diff --git a/.gitignore b/.gitignore index 3f8e89eae70d86d1660b746dd1e4a616d4b947e2..41a50284b4e969ce88dc386911f2f1542680f364 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,7 @@ /test/pose3d_test/results/ /pose3d_vhm/models/ /test/pose3d_vhm_test/results/ +/tracking/models/ +/test/tracking_test/results/ +/detection_cbnet/models/ +/test/detection_cbnet_test/results/ diff --git a/common/communication/messages_pb2.py b/common/communication/messages_pb2.py index 85b1bfbd562d8ea1593f4ac83f6135b85d2a986c..47c07c92a9e166dda1bf32766a26d2f04fe48acb 100644 --- a/common/communication/messages_pb2.py +++ b/common/communication/messages_pb2.py @@ -19,7 +19,7 @@ DESCRIPTOR = _descriptor.FileDescriptor( name='messages.proto', package='', syntax='proto3', - serialized_pb=_b('\n\x0emessages.proto\"\x8c\x02\n\x11PerceptionRequest\x12\x12\n\nmanager_id\x18\x01 \x01(\x05\x12\x10\n\x08video_id\x18\x02 \x01(\x03\x12\x10\n\x08\x66rame_id\x18\x03 \x01(\x03\x12\"\n\tdetection\x18\x04 \x01(\x0b\x32\r.DetectionReqH\x00\x12 \n\x08tracking\x18\x05 \x01(\x0b\x32\x0c.TrackingReqH\x00\x12\x1c\n\x06pose3d\x18\x06 \x01(\x0b\x32\n.Pose3DReqH\x00\x12 \n\x08\x64istance\x18\x07 \x01(\x0b\x32\x0c.DistanceReqH\x00\x12\x1e\n\x07\x61\x63tions\x18\x08 \x01(\x0b\x32\x0b.ActionsReqH\x00\x12\x12\n\x08\x66inished\x18\t \x01(\x08H\x00\x42\x05\n\x03req\"\x86\x02\n\x12PerceptionResponse\x12\x10\n\x08video_id\x18\x01 \x01(\x03\x12\x10\n\x08\x66rame_id\x18\x02 \x01(\x03\x12\x19\n\x03\x65rr\x18\x03 \x01(\x0b\x32\n.ErrorRespH\x00\x12#\n\tdetection\x18\x04 \x01(\x0b\x32\x0e.DetectionRespH\x00\x12!\n\x08tracking\x18\x05 \x01(\x0b\x32\r.TrackingRespH\x00\x12\x1d\n\x06pose3d\x18\x06 \x01(\x0b\x32\x0b.Pose3DRespH\x00\x12!\n\x08\x64istance\x18\x07 \x01(\x0b\x32\r.DistanceRespH\x00\x12\x1f\n\x07\x61\x63tions\x18\x08 \x01(\x0b\x32\x0c.ActionsRespH\x00\x42\x06\n\x04resp\"\x1d\n\x0c\x44\x65tectionReq\x12\r\n\x05image\x18\x01 \x01(\x0c\"1\n\rDetectionResp\x12 \n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x0f.DetectionEntry\"\x99\x01\n\x0e\x44\x65tectionEntry\x12\x16\n\x0e\x62ox_top_left_x\x18\x01 \x01(\x05\x12\x16\n\x0e\x62ox_top_left_y\x18\x02 \x01(\x05\x12\x1a\n\x12\x62ox_bottom_right_x\x18\x03 \x01(\x05\x12\x1a\n\x12\x62ox_bottom_right_y\x18\x04 \x01(\x05\x12\x10\n\x08\x63lass_id\x18\x05 \x01(\x05\x12\r\n\x05score\x18\x06 \x01(\x01\"\x1c\n\tPose3DReq\x12\x0f\n\x07patches\x18\x01 \x03(\x0c\"3\n\nPose3DResp\x12%\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x14.PoseEstimationEntry\"/\n\x13PoseEstimationEntry\x12\x18\n\x06joints\x18\x01 \x03(\x0b\x32\x08.Point3d\"*\n\x07Point3d\x12\t\n\x01x\x18\x01 \x01(\x02\x12\t\n\x01y\x18\x02 \x01(\x02\x12\t\n\x01z\x18\x03 \x01(\x02\"\r\n\x0bTrackingReq\"\x0e\n\x0cTrackingResp\"\r\n\x0b\x44istanceReq\"\x0e\n\x0c\x44istanceResp\"\x0c\n\nActionsReq\"\r\n\x0b\x41\x63tionsResp\"\x18\n\tErrorResp\x12\x0b\n\x03msg\x18\x01 \x01(\tb\x06proto3') + serialized_pb=_b('\n\x0emessages.proto\"\x8c\x02\n\x11PerceptionRequest\x12\x12\n\nmanager_id\x18\x01 \x01(\x05\x12\x10\n\x08video_id\x18\x02 \x01(\x03\x12\x10\n\x08\x66rame_id\x18\x03 \x01(\x03\x12\"\n\tdetection\x18\x04 \x01(\x0b\x32\r.DetectionReqH\x00\x12 \n\x08tracking\x18\x05 \x01(\x0b\x32\x0c.TrackingReqH\x00\x12\x1c\n\x06pose3d\x18\x06 \x01(\x0b\x32\n.Pose3DReqH\x00\x12 \n\x08\x64istance\x18\x07 \x01(\x0b\x32\x0c.DistanceReqH\x00\x12\x1e\n\x07\x61\x63tions\x18\x08 \x01(\x0b\x32\x0b.ActionsReqH\x00\x12\x12\n\x08\x66inished\x18\t \x01(\x08H\x00\x42\x05\n\x03req\"\x86\x02\n\x12PerceptionResponse\x12\x10\n\x08video_id\x18\x01 \x01(\x03\x12\x10\n\x08\x66rame_id\x18\x02 \x01(\x03\x12\x19\n\x03\x65rr\x18\x03 \x01(\x0b\x32\n.ErrorRespH\x00\x12#\n\tdetection\x18\x04 \x01(\x0b\x32\x0e.DetectionRespH\x00\x12!\n\x08tracking\x18\x05 \x01(\x0b\x32\r.TrackingRespH\x00\x12\x1d\n\x06pose3d\x18\x06 \x01(\x0b\x32\x0b.Pose3DRespH\x00\x12!\n\x08\x64istance\x18\x07 \x01(\x0b\x32\r.DistanceRespH\x00\x12\x1f\n\x07\x61\x63tions\x18\x08 \x01(\x0b\x32\x0c.ActionsRespH\x00\x42\x06\n\x04resp\"\x1d\n\x0c\x44\x65tectionReq\x12\r\n\x05image\x18\x01 \x01(\x0c\"1\n\rDetectionResp\x12 \n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x0f.DetectionEntry\"\x99\x01\n\x0e\x44\x65tectionEntry\x12\x16\n\x0e\x62ox_top_left_x\x18\x01 \x01(\x05\x12\x16\n\x0e\x62ox_top_left_y\x18\x02 \x01(\x05\x12\x1a\n\x12\x62ox_bottom_right_x\x18\x03 \x01(\x05\x12\x1a\n\x12\x62ox_bottom_right_y\x18\x04 \x01(\x05\x12\x10\n\x08\x63lass_id\x18\x05 \x01(\x05\x12\r\n\x05score\x18\x06 \x01(\x01\"\x1c\n\tPose3DReq\x12\x0f\n\x07patches\x18\x01 \x03(\x0c\"3\n\nPose3DResp\x12%\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x14.PoseEstimationEntry\"/\n\x13PoseEstimationEntry\x12\x18\n\x06joints\x18\x01 \x03(\x0b\x32\x08.Point3d\"*\n\x07Point3d\x12\t\n\x01x\x18\x01 \x01(\x02\x12\t\n\x01y\x18\x02 \x01(\x02\x12\t\n\x01z\x18\x03 \x01(\x02\">\n\x0bTrackingReq\x12\r\n\x05image\x18\x01 \x01(\x0c\x12 \n\x07\x65ntries\x18\x02 \x03(\x0b\x32\x0f.DetectionEntry\"/\n\x0cTrackingResp\x12\x1f\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x0e.TrackingEntry\"?\n\rTrackingEntry\x12\"\n\tdetection\x18\x01 \x01(\x0b\x32\x0f.DetectionEntry\x12\n\n\x02id\x18\x02 \x01(\x05\"\r\n\x0b\x44istanceReq\"\x0e\n\x0c\x44istanceResp\"\x0c\n\nActionsReq\"\r\n\x0b\x41\x63tionsResp\"\x18\n\tErrorResp\x12\x0b\n\x03msg\x18\x01 \x01(\tb\x06proto3') ) _sym_db.RegisterFileDescriptor(DESCRIPTOR) @@ -472,6 +472,20 @@ _TRACKINGREQ = _descriptor.Descriptor( file=DESCRIPTOR, containing_type=None, fields=[ + _descriptor.FieldDescriptor( + name='image', full_name='TrackingReq.image', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value=_b(""), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='entries', full_name='TrackingReq.entries', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], @@ -485,7 +499,7 @@ _TRACKINGREQ = _descriptor.Descriptor( oneofs=[ ], serialized_start=968, - serialized_end=981, + serialized_end=1030, ) @@ -496,6 +510,51 @@ _TRACKINGRESP = _descriptor.Descriptor( file=DESCRIPTOR, containing_type=None, fields=[ + _descriptor.FieldDescriptor( + name='entries', full_name='TrackingResp.entries', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1032, + serialized_end=1079, +) + + +_TRACKINGENTRY = _descriptor.Descriptor( + name='TrackingEntry', + full_name='TrackingEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='detection', full_name='TrackingEntry.detection', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='id', full_name='TrackingEntry.id', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], @@ -508,8 +567,8 @@ _TRACKINGRESP = _descriptor.Descriptor( extension_ranges=[], oneofs=[ ], - serialized_start=983, - serialized_end=997, + serialized_start=1081, + serialized_end=1144, ) @@ -532,8 +591,8 @@ _DISTANCEREQ = _descriptor.Descriptor( extension_ranges=[], oneofs=[ ], - serialized_start=999, - serialized_end=1012, + serialized_start=1146, + serialized_end=1159, ) @@ -556,8 +615,8 @@ _DISTANCERESP = _descriptor.Descriptor( extension_ranges=[], oneofs=[ ], - serialized_start=1014, - serialized_end=1028, + serialized_start=1161, + serialized_end=1175, ) @@ -580,8 +639,8 @@ _ACTIONSREQ = _descriptor.Descriptor( extension_ranges=[], oneofs=[ ], - serialized_start=1030, - serialized_end=1042, + serialized_start=1177, + serialized_end=1189, ) @@ -604,8 +663,8 @@ _ACTIONSRESP = _descriptor.Descriptor( extension_ranges=[], oneofs=[ ], - serialized_start=1044, - serialized_end=1057, + serialized_start=1191, + serialized_end=1204, ) @@ -635,8 +694,8 @@ _ERRORRESP = _descriptor.Descriptor( extension_ranges=[], oneofs=[ ], - serialized_start=1059, - serialized_end=1083, + serialized_start=1206, + serialized_end=1230, ) _PERCEPTIONREQUEST.fields_by_name['detection'].message_type = _DETECTIONREQ @@ -689,6 +748,9 @@ _PERCEPTIONRESPONSE.fields_by_name['actions'].containing_oneof = _PERCEPTIONRESP _DETECTIONRESP.fields_by_name['entries'].message_type = _DETECTIONENTRY _POSE3DRESP.fields_by_name['entries'].message_type = _POSEESTIMATIONENTRY _POSEESTIMATIONENTRY.fields_by_name['joints'].message_type = _POINT3D +_TRACKINGREQ.fields_by_name['entries'].message_type = _DETECTIONENTRY +_TRACKINGRESP.fields_by_name['entries'].message_type = _TRACKINGENTRY +_TRACKINGENTRY.fields_by_name['detection'].message_type = _DETECTIONENTRY DESCRIPTOR.message_types_by_name['PerceptionRequest'] = _PERCEPTIONREQUEST DESCRIPTOR.message_types_by_name['PerceptionResponse'] = _PERCEPTIONRESPONSE DESCRIPTOR.message_types_by_name['DetectionReq'] = _DETECTIONREQ @@ -700,6 +762,7 @@ DESCRIPTOR.message_types_by_name['PoseEstimationEntry'] = _POSEESTIMATIONENTRY DESCRIPTOR.message_types_by_name['Point3d'] = _POINT3D DESCRIPTOR.message_types_by_name['TrackingReq'] = _TRACKINGREQ DESCRIPTOR.message_types_by_name['TrackingResp'] = _TRACKINGRESP +DESCRIPTOR.message_types_by_name['TrackingEntry'] = _TRACKINGENTRY DESCRIPTOR.message_types_by_name['DistanceReq'] = _DISTANCEREQ DESCRIPTOR.message_types_by_name['DistanceResp'] = _DISTANCERESP DESCRIPTOR.message_types_by_name['ActionsReq'] = _ACTIONSREQ @@ -783,6 +846,13 @@ TrackingResp = _reflection.GeneratedProtocolMessageType('TrackingResp', (_messag )) _sym_db.RegisterMessage(TrackingResp) +TrackingEntry = _reflection.GeneratedProtocolMessageType('TrackingEntry', (_message.Message,), dict( + DESCRIPTOR = _TRACKINGENTRY, + __module__ = 'messages_pb2' + # @@protoc_insertion_point(class_scope:TrackingEntry) + )) +_sym_db.RegisterMessage(TrackingEntry) + DistanceReq = _reflection.GeneratedProtocolMessageType('DistanceReq', (_message.Message,), dict( DESCRIPTOR = _DISTANCEREQ, __module__ = 'messages_pb2' diff --git a/common/communication/proto/messages.proto b/common/communication/proto/messages.proto index fccd023919d48d9bc14ff152c0f04ac13370828b..8e98974009a75f0c31074c8865e7c18bf8d9f507 100644 --- a/common/communication/proto/messages.proto +++ b/common/communication/proto/messages.proto @@ -83,11 +83,21 @@ message Point3d { } message TrackingReq { - + bytes image = 1; + repeated DetectionEntry entries = 2; } message TrackingResp { + repeated TrackingEntry entries = 1; +} +message TrackingEntry { + // Возвращает скорректированные положения обрамляющих рамок детектированных объектов, а также новые + // детектированные объекты. + // Поэтому соответствия один в один объектам в запросе TrackingReq нет! + DetectionEntry detection = 1; + // Глобальный идентификатор, который будет одинаковым для того одного объекта на разных кадрах + int32 id = 2; } message DistanceReq { diff --git a/detection/docker-build-context/Dockerfile b/detection/docker-build-context/Dockerfile index 0250e51f50ce61d9a489aa1f55716822b75aa973..38c56c0a6bb0a17544075518dc7ab332a6621a32 100644 --- a/detection/docker-build-context/Dockerfile +++ b/detection/docker-build-context/Dockerfile @@ -15,8 +15,8 @@ WORKDIR /opt/detection/ COPY basic-requirements.txt ./ COPY requirements.txt ./ # Зависимости, необходимые на момент установки зависимостей из additional-requirements.txt -RUN python3.6 -m pip install -r basic-additional-requirements.txt -RUN python3.6 -m pip install -r additional-requirements.txt +RUN python3.6 -m pip install -r basic-requirements.txt +RUN python3.6 -m pip install -r requirements.txt WORKDIR /opt/detection/logs # Подключаем файлы программы при помощи bind. В самом контейнере хранятся только зависимости и логи при работе системы diff --git a/tracking/Dockerfile b/detection_cbnet/__init__.py similarity index 100% rename from tracking/Dockerfile rename to detection_cbnet/__init__.py diff --git a/detection_cbnet/docker-build-context/Dockerfile b/detection_cbnet/docker-build-context/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..3d1c2df654ed9f4c3aec5e0398ecc266802a68df --- /dev/null +++ b/detection_cbnet/docker-build-context/Dockerfile @@ -0,0 +1,23 @@ +FROM pytorch/pytorch:1.6.0-cuda10.1-cudnn7-devel + +RUN apt update +RUN apt install less nano +# For pycurl and wheel +RUN apt install -y libcurl4-openssl-dev libssl-dev libcairo2-dev libgirepository1.0-dev libcairo2-dev python3-cairo-dev +# cv2 +RUN apt install -y ffmpeg libsm6 libxext6 libxrender-dev + +RUN pip install mmcv-full==1.3.8 -f https://download.openmmlab.com/mmcv/dist/cu101/torch1.6.0/index.html +WORKDIR /opt/detection_cbnet/install +COPY cbnetv2 ./cbnetv2 +WORKDIR /opt/detection_cbnet/install/cbnetv2 +RUN pip install -r requirements/build.txt +RUN pip install -v -e . + +WORKDIR /opt/detection_cbnet/install +COPY additional-requirements.txt ./ +RUN pip install -r additional-requirements.txt + +WORKDIR /opt/detection_cbnet/logs +# Подключаем файлы программы при помощи bind. В самом контейнере хранятся только зависимости и логи при работе системы +WORKDIR /opt/detection_cbnet/src \ No newline at end of file diff --git a/detection_cbnet/docker-build-context/additional-requirements.txt b/detection_cbnet/docker-build-context/additional-requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..e6f419a89404d63c958367cf72e0659fd95e5de3 --- /dev/null +++ b/detection_cbnet/docker-build-context/additional-requirements.txt @@ -0,0 +1,8 @@ +# missing from the byte_track +cython-bbox +kafka-python==2.0.2 +matplotlib==2.2.2 +opencv-python==4.1.2.30 +plotly==4.3.0 +protobuf==3.17.3 +pycurl==7.43.0 \ No newline at end of file diff --git a/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/batch_test_list.py b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/batch_test_list.py new file mode 100644 index 0000000000000000000000000000000000000000..29c33ec24d29d440260d8df96c512ed8a6a0214b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/batch_test_list.py @@ -0,0 +1,344 @@ +# yapf: disable +atss = dict( + config='configs/atss/atss_r50_fpn_1x_coco.py', + checkpoint='atss_r50_fpn_1x_coco_20200209-985f7bd0.pth', + eval='bbox', + metric=dict(bbox_mAP=39.4), +) +autoassign = dict( + config='configs/autoassign/autoassign_r50_fpn_8x2_1x_coco.py', + checkpoint='auto_assign_r50_fpn_1x_coco_20210413_115540-5e17991f.pth', + eval='bbox', + metric=dict(bbox_mAP=40.4), +) +carafe = dict( + config='configs/carafe/faster_rcnn_r50_fpn_carafe_1x_coco.py', + checkpoint='faster_rcnn_r50_fpn_carafe_1x_coco_bbox_mAP-0.386_20200504_175733-385a75b7.pth', # noqa + eval='bbox', + metric=dict(bbox_mAP=38.6), +) +cascade_rcnn = [ + dict( + config='configs/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py', + checkpoint='cascade_rcnn_r50_fpn_1x_coco_20200316-3dc56deb.pth', + eval='bbox', + metric=dict(bbox_mAP=40.3), + ), + dict( + config='configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py', + checkpoint='cascade_mask_rcnn_r50_fpn_1x_coco_20200203-9d4dcb24.pth', + eval=['bbox', 'segm'], + metric=dict(bbox_mAP=41.2, segm_mAP=35.9), + ), +] +cascade_rpn = dict( + config='configs/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco.py', + checkpoint='crpn_faster_rcnn_r50_caffe_fpn_1x_coco-c8283cca.pth', + eval='bbox', + metric=dict(bbox_mAP=40.4), +) +centripetalnet = dict( + config='configs/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py', # noqa + checkpoint='centripetalnet_hourglass104_mstest_16x6_210e_coco_20200915_204804-3ccc61e5.pth', # noqa + eval='bbox', + metric=dict(bbox_mAP=44.7), +) +cornernet = dict( + config='configs/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py', + checkpoint='cornernet_hourglass104_mstest_10x5_210e_coco_20200824_185720-5fefbf1c.pth', # noqa + eval='bbox', + metric=dict(bbox_mAP=41.2), +) +dcn = dict( + config='configs/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py', + checkpoint='faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130-d68aed1e.pth', + eval='bbox', + metric=dict(bbox_mAP=41.3), +) +deformable_detr = dict( + config='configs/deformable_detr/deformable_detr_r50_16x2_50e_coco.py', + checkpoint='deformable_detr_r50_16x2_50e_coco_20210419_220030-a12b9512.pth', # noqa + eval='bbox', + metric=dict(bbox_mAP=44.5), +) +detectors = dict( + config='configs/detectors/detectors_htc_r50_1x_coco.py', + checkpoint='detectors_htc_r50_1x_coco-329b1453.pth', + eval=['bbox', 'segm'], + metric=dict(bbox_mAP=49.1, segm_mAP=42.6), +) +detr = dict( + config='configs/detr/detr_r50_8x2_150e_coco.py', + checkpoint='detr_r50_8x2_150e_coco_20201130_194835-2c4b8974.pth', + eval='bbox', + metric=dict(bbox_mAP=40.1), +) +double_heads = dict( + config='configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py', + checkpoint='dh_faster_rcnn_r50_fpn_1x_coco_20200130-586b67df.pth', + eval='bbox', + metric=dict(bbox_mAP=40.0), +) +dynamic_rcnn = dict( + config='configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py', + checkpoint='dynamic_rcnn_r50_fpn_1x-62a3f276.pth', + eval='bbox', + metric=dict(bbox_mAP=38.9), +) +empirical_attention = dict( + config='configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py', # noqa + checkpoint='faster_rcnn_r50_fpn_attention_1111_1x_coco_20200130-403cccba.pth', # noqa + eval='bbox', + metric=dict(bbox_mAP=40.0), +) +faster_rcnn = dict( + config='configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py', + checkpoint='faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth', + eval='bbox', + metric=dict(bbox_mAP=37.4), +) +fcos = dict( + config='configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco.py', # noqa + checkpoint='fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco-0a0d75a8.pth', # noqa + eval='bbox', + metric=dict(bbox_mAP=38.7), +) +foveabox = dict( + config='configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py', + checkpoint='fovea_align_r50_fpn_gn-head_4x4_2x_coco_20200203-8987880d.pth', + eval='bbox', + metric=dict(bbox_mAP=37.9), +) +free_anchor = dict( + config='configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py', + checkpoint='retinanet_free_anchor_r50_fpn_1x_coco_20200130-0f67375f.pth', + eval='bbox', + metric=dict(bbox_mAP=38.7), +) +fsaf = dict( + config='configs/fsaf/fsaf_r50_fpn_1x_coco.py', + checkpoint='fsaf_r50_fpn_1x_coco-94ccc51f.pth', + eval='bbox', + metric=dict(bbox_mAP=37.4), +) +gcnet = dict( + config='configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py', # noqa + checkpoint='mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200202-587b99aa.pth', # noqa + eval=['bbox', 'segm'], + metric=dict(bbox_mAP=40.4, segm_mAP=36.2), +) +gfl = dict( + config='configs/gfl/gfl_r50_fpn_1x_coco.py', + checkpoint='gfl_r50_fpn_1x_coco_20200629_121244-25944287.pth', + eval='bbox', + metric=dict(bbox_mAP=40.2), +) +gn = dict( + config='configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py', + checkpoint='mask_rcnn_r50_fpn_gn-all_2x_coco_20200206-8eee02a6.pth', + eval=['bbox', 'segm'], + metric=dict(bbox_mAP=40.1, segm_mAP=36.4), +) +gn_ws = dict( + config='configs/gn+ws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py', + checkpoint='faster_rcnn_r50_fpn_gn_ws-all_1x_coco_20200130-613d9fe2.pth', + eval='bbox', + metric=dict(bbox_mAP=39.7), +) +grid_rcnn = dict( + config='configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py', + checkpoint='grid_rcnn_r50_fpn_gn-head_2x_coco_20200130-6cca8223.pth', + eval='bbox', + metric=dict(bbox_mAP=40.4), +) +groie = dict( + config='configs/groie/faster_rcnn_r50_fpn_groie_1x_coco.py', + checkpoint='faster_rcnn_r50_fpn_groie_1x_coco_20200604_211715-66ee9516.pth', # noqa + eval='bbox', + metric=dict(bbox_mAP=38.3), +) +guided_anchoring = [ + dict( + config='configs/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py', # noqa + checkpoint='ga_retinanet_r50_caffe_fpn_1x_coco_20201020-39581c6f.pth', + eval='bbox', + metric=dict(bbox_mAP=36.9), + ), + dict( + config='configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py', + checkpoint='ga_faster_r50_caffe_fpn_1x_coco_20200702_000718-a11ccfe6.pth', # noqa + eval='bbox', + metric=dict(bbox_mAP=39.6), + ), +] +hrnet = dict( + config='configs/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py', + checkpoint='faster_rcnn_hrnetv2p_w18_1x_coco_20200130-56651a6d.pth', + eval='bbox', + metric=dict(bbox_mAP=36.9), +) +htc = dict( + config='configs/htc/htc_r50_fpn_1x_coco.py', + checkpoint='htc_r50_fpn_1x_coco_20200317-7332cf16.pth', + eval=['bbox', 'segm'], + metric=dict(bbox_mAP=42.3, segm_mAP=37.4), +) +libra_rcnn = dict( + config='configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py', + checkpoint='libra_faster_rcnn_r50_fpn_1x_coco_20200130-3afee3a9.pth', + eval='bbox', + metric=dict(bbox_mAP=38.3), +) +mask_rcnn = dict( + config='configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py', + checkpoint='mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth', + eval=['bbox', 'segm'], + metric=dict(bbox_mAP=38.2, segm_mAP=34.7), +) +ms_rcnn = dict( + config='configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py', + checkpoint='ms_rcnn_r50_caffe_fpn_1x_coco_20200702_180848-61c9355e.pth', + eval=['bbox', 'segm'], + metric=dict(bbox_mAP=38.2, segm_mAP=36.0), +) +nas_fcos = dict( + config='configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py', # noqa + checkpoint='nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200520-1bdba3ce.pth', # noqa + eval='bbox', + metric=dict(bbox_mAP=39.4), +) +nas_fpn = dict( + config='configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py', + checkpoint='retinanet_r50_nasfpn_crop640_50e_coco-0ad1f644.pth', + eval='bbox', + metric=dict(bbox_mAP=40.5), +) +paa = dict( + config='configs/paa/paa_r50_fpn_1x_coco.py', + checkpoint='paa_r50_fpn_1x_coco_20200821-936edec3.pth', + eval='bbox', + metric=dict(bbox_mAP=40.4), +) +pafpn = dict( + config='configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py', + checkpoint='faster_rcnn_r50_pafpn_1x_coco_bbox_mAP-0.375_20200503_105836-b7b4b9bd.pth', # noqa + eval='bbox', + metric=dict(bbox_mAP=37.5), +) +pisa = dict( + config='configs/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py', + checkpoint='pisa_faster_rcnn_r50_fpn_1x_coco-dea93523.pth', + eval='bbox', + metric=dict(bbox_mAP=38.4), +) +point_rend = dict( + config='configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py', + checkpoint='point_rend_r50_caffe_fpn_mstrain_1x_coco-1bcb5fb4.pth', + eval=['bbox', 'segm'], + metric=dict(bbox_mAP=38.4, segm_mAP=36.3), +) +regnet = dict( + config='configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py', + checkpoint='mask_rcnn_regnetx-3.2GF_fpn_1x_coco_20200520_163141-2a9d1814.pth', # noqa + eval=['bbox', 'segm'], + metric=dict(bbox_mAP=40.4, segm_mAP=36.7), +) +reppoints = dict( + config='configs/reppoints/reppoints_moment_r50_fpn_1x_coco.py', + checkpoint='reppoints_moment_r50_fpn_1x_coco_20200330-b73db8d1.pth', + eval='bbox', + metric=dict(bbox_mAP=37.0), +) +res2net = dict( + config='configs/res2net/faster_rcnn_r2_101_fpn_2x_coco.py', + checkpoint='faster_rcnn_r2_101_fpn_2x_coco-175f1da6.pth', + eval='bbox', + metric=dict(bbox_mAP=43.0), +) +resnest = dict( + config='configs/resnest/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py', # noqa + checkpoint='faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco_20200926_125502-20289c16.pth', # noqa + eval='bbox', + metric=dict(bbox_mAP=42.0), +) +retinanet = dict( + config='configs/retinanet/retinanet_r50_fpn_1x_coco.py', + checkpoint='retinanet_r50_fpn_1x_coco_20200130-c2398f9e.pth', + eval='bbox', + metric=dict(bbox_mAP=36.5), +) +rpn = dict( + config='configs/rpn/rpn_r50_fpn_1x_coco.py', + checkpoint='rpn_r50_fpn_1x_coco_20200218-5525fa2e.pth', + eval='proposal_fast', + metric=dict(AR_1000=58.2), +) +sabl = [ + dict( + config='configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py ', + checkpoint='sabl_retinanet_r50_fpn_1x_coco-6c54fd4f.pth', + eval='bbox', + metric=dict(bbox_mAP=37.7), + ), + dict( + config='configs/sabl/sabl_faster_rcnn_r50_fpn_1x_coco.py', + checkpoint='sabl_faster_rcnn_r50_fpn_1x_coco-e867595b.pth', + eval='bbox', + metric=dict(bbox_mAP=39.9), + ), +] +scnet = dict( + config='configs/scnet/scnet_r50_fpn_1x_coco.py', + checkpoint='scnet_r50_fpn_1x_coco-c3f09857.pth', + eval='bbox', + metric=dict(bbox_mAP=43.5), +) +sparse_rcnn = dict( + config='configs/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco.py', + checkpoint='sparse_rcnn_r50_fpn_1x_coco_20201222_214453-dc79b137.pth', + eval='bbox', + metric=dict(bbox_mAP=37.9), +) +ssd = dict( + config='configs/ssd/ssd300_coco.py', + checkpoint='ssd300_coco_20200307-a92d2092.pth', + eval='bbox', + metric=dict(bbox_mAP=25.6), +) +tridentnet = dict( + config='configs/tridentnet/tridentnet_r50_caffe_1x_coco.py', + checkpoint='tridentnet_r50_caffe_1x_coco_20201230_141838-2ec0b530.pth', + eval='bbox', + metric=dict(bbox_mAP=37.6), +) +vfnet = dict( + config='configs/vfnet/vfnet_r50_fpn_1x_coco.py', + checkpoint='vfnet_r50_fpn_1x_coco_20201027-38db6f58.pth', + eval='bbox', + metric=dict(bbox_mAP=41.6), +) +yolact = dict( + config='configs/yolact/yolact_r50_1x8_coco.py', + checkpoint='yolact_r50_1x8_coco_20200908-f38d58df.pth', + eval=['bbox', 'segm'], + metric=dict(bbox_mAP=31.2, segm_mAP=29.0), +) +yolo = dict( + config='configs/yolo/yolov3_d53_320_273e_coco.py', + checkpoint='yolov3_d53_320_273e_coco-421362b6.pth', + eval='bbox', + metric=dict(bbox_mAP=27.9), +) +yolof = dict( + config='configs/yolof/yolof_r50_c5_8x8_1x_coco.py', + checkpoint='yolof_r50_c5_8x8_1x_coco_20210425_024427-8e864411.pth', + eval='bbox', + metric=dict(bbox_mAP=37.5), +) +centernet = dict( + config='configs/centernet/centernet_resnet18_dcnv2_140e_coco.py', + checkpoint='centernet_resnet18_dcnv2_140e_coco_20210520_101209-da388ba2.pth', # noqa + eval='bbox', + metric=dict(bbox_mAP=29.5), +) +# yapf: enable diff --git a/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/batch_train_list.txt b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/batch_train_list.txt new file mode 100644 index 0000000000000000000000000000000000000000..7fe77b46d01710d2be3ea62faf2bd21ceace079a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/batch_train_list.txt @@ -0,0 +1,63 @@ +configs/atss/atss_r50_fpn_1x_coco.py +configs/autoassign/autoassign_r50_fpn_8x2_1x_coco.py +configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py +configs/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco.py +configs/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py +configs/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py +configs/detectors/detectors_htc_r50_1x_coco.py +configs/deformable_detr/deformable_detr_r50_16x2_50e_coco.py +configs/detr/detr_r50_8x2_150e_coco.py +configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py +configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py +configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py +configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py +configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py +configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py +configs/faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py +configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py +configs/fp16/mask_rcnn_r50_fpn_fp16_1x_coco.py +configs/fp16/retinanet_r50_fpn_fp16_1x_coco.py +configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py +configs/fsaf/fsaf_r50_fpn_1x_coco.py +configs/gfl/gfl_r50_fpn_1x_coco.py +configs/ghm/retinanet_ghm_r50_fpn_1x_coco.py +configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py +configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py +configs/htc/htc_r50_fpn_1x_coco.py +configs/ld/ld_r18_gflv1_r101_fpn_coco_1x.py +configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py +configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py +configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py +configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py +configs/paa/paa_r50_fpn_1x_coco.py +configs/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py +configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py +configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py +configs/retinanet/retinanet_r50_caffe_fpn_1x_coco.py +configs/rpn/rpn_r50_fpn_1x_coco.py +configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py +configs/ssd/ssd300_coco.py +configs/tridentnet/tridentnet_r50_caffe_1x_coco.py +configs/vfnet/vfnet_r50_fpn_1x_coco.py +configs/yolact/yolact_r50_1x8_coco.py +configs/yolo/yolov3_d53_320_273e_coco.py +configs/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco.py +configs/scnet/scnet_r50_fpn_1x_coco.py +configs/yolof/yolof_r50_c5_8x8_1x_coco.py +configs/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py +configs/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco.py +configs/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py +configs/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py +configs/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py +configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py +configs/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py +configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py +configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py +configs/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py +configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py +configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py +configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py +configs/resnest/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py +configs/res2net/faster_rcnn_r2_101_fpn_2x_coco.py +configs/groie/faster_rcnn_r50_fpn_groie_1x_coco.py +configs/centernet/centernet_resnet18_dcnv2_140e_coco.py diff --git a/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/benchmark_filter.py b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/benchmark_filter.py new file mode 100644 index 0000000000000000000000000000000000000000..81b363ed8e071db00a02e224187c3f59a02819f5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/benchmark_filter.py @@ -0,0 +1,166 @@ +import argparse +import os +import os.path as osp + + +def parse_args(): + parser = argparse.ArgumentParser(description='Filter configs to train') + parser.add_argument( + '--basic-arch', + action='store_true', + help='to train models in basic arch') + parser.add_argument( + '--datasets', action='store_true', help='to train models in dataset') + parser.add_argument( + '--data-pipeline', + action='store_true', + help='to train models related to data pipeline, e.g. augmentations') + parser.add_argument( + '--nn-module', + action='store_true', + help='to train models related to neural network modules') + parser.add_argument( + '--model-options', + nargs='+', + help='custom options to special model benchmark') + parser.add_argument( + '--out', + type=str, + default='batch_train_list.txt', + help='output path of gathered metrics to be stored') + args = parser.parse_args() + return args + + +basic_arch_root = [ + 'atss', 'autoassign', 'cascade_rcnn', 'cascade_rpn', 'centripetalnet', + 'cornernet', 'detectors', 'deformable_detr', 'detr', 'double_heads', + 'dynamic_rcnn', 'faster_rcnn', 'fcos', 'foveabox', 'fp16', 'free_anchor', + 'fsaf', 'gfl', 'ghm', 'grid_rcnn', 'guided_anchoring', 'htc', 'ld', + 'libra_rcnn', 'mask_rcnn', 'ms_rcnn', 'nas_fcos', 'paa', 'pisa', + 'point_rend', 'reppoints', 'retinanet', 'rpn', 'sabl', 'ssd', 'tridentnet', + 'vfnet', 'yolact', 'yolo', 'sparse_rcnn', 'scnet', 'yolof', 'centernet' +] + +datasets_root = [ + 'wider_face', 'pascal_voc', 'cityscapes', 'lvis', 'deepfashion' +] + +data_pipeline_root = ['albu_example', 'instaboost'] + +nn_module_root = [ + 'carafe', 'dcn', 'empirical_attention', 'gcnet', 'gn', 'gn+ws', 'hrnet', + 'pafpn', 'nas_fpn', 'regnet', 'resnest', 'res2net', 'groie' +] + +benchmark_pool = [ + 'configs/albu_example/mask_rcnn_r50_fpn_albu_1x_coco.py', + 'configs/atss/atss_r50_fpn_1x_coco.py', + 'configs/autoassign/autoassign_r50_fpn_8x2_1x_coco.py', + 'configs/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py', + 'configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py', + 'configs/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco.py', + 'configs/centernet/centernet_resnet18_dcnv2_140e_coco.py', + 'configs/centripetalnet/' + 'centripetalnet_hourglass104_mstest_16x6_210e_coco.py', + 'configs/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py', + 'configs/cornernet/' + 'cornernet_hourglass104_mstest_8x6_210e_coco.py', + 'configs/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py', + 'configs/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py', + 'configs/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco.py', + 'configs/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py', + 'configs/deformable_detr/deformable_detr_r50_16x2_50e_coco.py', + 'configs/detectors/detectors_htc_r50_1x_coco.py', + 'configs/detr/detr_r50_8x2_150e_coco.py', + 'configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py', + 'configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py', + 'configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py', # noqa + 'configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py', + 'configs/faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py', + 'configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py', + 'configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py', + 'configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py', + 'configs/fcos/fcos_center_r50_caffe_fpn_gn-head_4x4_1x_coco.py', + 'configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py', + 'configs/fp16/retinanet_r50_fpn_fp16_1x_coco.py', + 'configs/fp16/mask_rcnn_r50_fpn_fp16_1x_coco.py', + 'configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py', + 'configs/fsaf/fsaf_r50_fpn_1x_coco.py', + 'configs/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py', + 'configs/gfl/gfl_r50_fpn_1x_coco.py', + 'configs/ghm/retinanet_ghm_r50_fpn_1x_coco.py', + 'configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py', + 'configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py', + 'configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py', + 'configs/groie/faster_rcnn_r50_fpn_groie_1x_coco.py', + 'configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py', + 'configs/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py', + 'configs/htc/htc_r50_fpn_1x_coco.py', + 'configs/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco.py', + 'configs/ld/ld_r18_gflv1_r101_fpn_coco_1x.py', + 'configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py', + 'configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py', + 'configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py', + 'configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py', + 'configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py', + 'configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py', + 'configs/paa/paa_r50_fpn_1x_coco.py', + 'configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py', + 'configs/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py', + 'configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py', + 'configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py', + 'configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py', + 'configs/res2net/faster_rcnn_r2_101_fpn_2x_coco.py', + 'configs/resnest/' + 'mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py', + 'configs/retinanet/retinanet_r50_caffe_fpn_1x_coco.py', + 'configs/rpn/rpn_r50_fpn_1x_coco.py', + 'configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py', + 'configs/ssd/ssd300_coco.py', + 'configs/tridentnet/tridentnet_r50_caffe_1x_coco.py', + 'configs/vfnet/vfnet_r50_fpn_1x_coco.py', + 'configs/yolact/yolact_r50_1x8_coco.py', + 'configs/yolo/yolov3_d53_320_273e_coco.py', + 'configs/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco.py', + 'configs/scnet/scnet_r50_fpn_1x_coco.py', + 'configs/yolof/yolof_r50_c5_8x8_1x_coco.py', +] + + +def main(): + args = parse_args() + + benchmark_type = [] + if args.basic_arch: + benchmark_type += basic_arch_root + if args.datasets: + benchmark_type += datasets_root + if args.data_pipeline: + benchmark_type += data_pipeline_root + if args.nn_module: + benchmark_type += nn_module_root + + special_model = args.model_options + if special_model is not None: + benchmark_type += special_model + + config_dpath = 'configs/' + benchmark_configs = [] + for cfg_root in benchmark_type: + cfg_dir = osp.join(config_dpath, cfg_root) + configs = os.scandir(cfg_dir) + for cfg in configs: + config_path = osp.join(cfg_dir, cfg.name) + if (config_path in benchmark_pool + and config_path not in benchmark_configs): + benchmark_configs.append(config_path) + + print(f'Totally found {len(benchmark_configs)} configs to benchmark') + with open(args.out, 'w') as f: + for config in benchmark_configs: + f.write(config + '\n') + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/benchmark_inference_fps.py b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/benchmark_inference_fps.py new file mode 100644 index 0000000000000000000000000000000000000000..2befc901f73161ce55a98f37adf494efb0895a58 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/benchmark_inference_fps.py @@ -0,0 +1,93 @@ +import argparse +import os +import os.path as osp + +import mmcv +from mmcv import Config, DictAction +from mmcv.runner import init_dist +from tools.analysis_tools.benchmark import measure_inferense_speed + + +def parse_args(): + parser = argparse.ArgumentParser( + description='MMDet benchmark a model of FPS') + parser.add_argument('config', help='test config file path') + parser.add_argument('checkpoint_root', help='Checkpoint file root path') + parser.add_argument( + '--round-num', + type=int, + default=1, + help='round a number to a given precision in decimal digits') + parser.add_argument( + '--out', type=str, help='output path of gathered fps to be stored') + parser.add_argument( + '--max-iter', type=int, default=400, help='num of max iter') + parser.add_argument( + '--log-interval', type=int, default=40, help='interval of logging') + parser.add_argument( + '--fuse-conv-bn', + action='store_true', + help='Whether to fuse conv and bn, this will slightly increase' + 'the inference speed') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + parser.add_argument( + '--launcher', + choices=['none', 'pytorch', 'slurm', 'mpi'], + default='none', + help='job launcher') + parser.add_argument('--local_rank', type=int, default=0) + args = parser.parse_args() + if 'LOCAL_RANK' not in os.environ: + os.environ['LOCAL_RANK'] = str(args.local_rank) + return args + + +if __name__ == '__main__': + args = parse_args() + assert args.round_num >= 0 + + config = Config.fromfile(args.config) + + if args.launcher == 'none': + raise NotImplementedError('Only supports distributed mode') + else: + init_dist(args.launcher) + + result_dict = {} + for model_key in config: + model_infos = config[model_key] + if not isinstance(model_infos, list): + model_infos = [model_infos] + for model_info in model_infos: + record_metrics = model_info['metric'] + cfg_path = model_info['config'].strip() + cfg = Config.fromfile(cfg_path) + checkpoint = osp.join(args.checkpoint_root, + model_info['checkpoint'].strip()) + try: + fps = measure_inferense_speed(cfg, checkpoint, args.max_iter, + args.log_interval, + args.fuse_conv_bn) + print( + f'{cfg_path} fps : {fps:.{args.round_num}f} img / s, ' + f'times per image: {1000/fps:.{args.round_num}f} ms / img', + flush=True) + result_dict[cfg_path] = dict( + fps=round(fps, args.round_num), + ms_times_pre_image=round(1000 / fps, args.round_num)) + except Exception as e: + print(f'{config} error: {repr(e)}') + result_dict[cfg_path] = 0 + + if args.out: + mmcv.mkdir_or_exist(args.out) + mmcv.dump(result_dict, osp.join(args.out, 'batch_inference_fps.json')) diff --git a/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/benchmark_test_image.py b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/benchmark_test_image.py new file mode 100644 index 0000000000000000000000000000000000000000..cf37382f7a16f4b66b089f4b9e4a7c3bce27cf26 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/benchmark_test_image.py @@ -0,0 +1,101 @@ +import logging +import os.path as osp +from argparse import ArgumentParser + +from mmcv import Config + +from mmdet.apis import inference_detector, init_detector, show_result_pyplot +from mmdet.utils import get_root_logger + + +def parse_args(): + parser = ArgumentParser() + parser.add_argument('config', help='test config file path') + parser.add_argument('checkpoint_root', help='Checkpoint file root path') + parser.add_argument('--img', default='demo/demo.jpg', help='Image file') + parser.add_argument('--aug', action='store_true', help='aug test') + parser.add_argument('--model-name', help='model name to inference') + parser.add_argument('--show', action='store_true', help='show results') + parser.add_argument( + '--wait-time', + type=float, + default=1, + help='the interval of show (s), 0 is block') + parser.add_argument( + '--device', default='cuda:0', help='Device used for inference') + parser.add_argument( + '--score-thr', type=float, default=0.3, help='bbox score threshold') + args = parser.parse_args() + return args + + +def inference_model(config_name, checkpoint, args, logger=None): + cfg = Config.fromfile(config_name) + if args.aug: + if 'flip' in cfg.data.test.pipeline[1]: + cfg.data.test.pipeline[1].flip = True + else: + if logger is not None: + logger.error(f'{config_name}: unable to start aug test') + else: + print(f'{config_name}: unable to start aug test', flush=True) + + model = init_detector(cfg, checkpoint, device=args.device) + # test a single image + result = inference_detector(model, args.img) + + # show the results + if args.show: + show_result_pyplot( + model, + args.img, + result, + score_thr=args.score_thr, + wait_time=args.wait_time) + return result + + +# Sample test whether the inference code is correct +def main(args): + config = Config.fromfile(args.config) + + # test single model + if args.model_name: + if args.model_name in config: + model_infos = config[args.model_name] + if not isinstance(model_infos, list): + model_infos = [model_infos] + model_info = model_infos[0] + config_name = model_info['config'].strip() + print(f'processing: {config_name}', flush=True) + checkpoint = osp.join(args.checkpoint_root, + model_info['checkpoint'].strip()) + # build the model from a config file and a checkpoint file + inference_model(config_name, checkpoint, args) + return + else: + raise RuntimeError('model name input error.') + + # test all model + logger = get_root_logger( + log_file='benchmark_test_image.log', log_level=logging.ERROR) + + for model_key in config: + model_infos = config[model_key] + if not isinstance(model_infos, list): + model_infos = [model_infos] + for model_info in model_infos: + print('processing: ', model_info['config'], flush=True) + config_name = model_info['config'].strip() + checkpoint = osp.join(args.checkpoint_root, + model_info['checkpoint'].strip()) + try: + # build the model from a config file and a checkpoint file + inference_model(config_name, checkpoint, args, logger) + except Exception as e: + logger.error(f'{config_name} " : {repr(e)}') + + +if __name__ == '__main__': + args = parse_args() + main(args) diff --git a/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/convert_test_benchmark_script.py b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/convert_test_benchmark_script.py new file mode 100644 index 0000000000000000000000000000000000000000..bb3cefbb67a023b6f25d448e688ce7c889d7411d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/convert_test_benchmark_script.py @@ -0,0 +1,118 @@ +import argparse +import os +import os.path as osp + +from mmcv import Config + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Convert benchmark model list to script') + parser.add_argument('config', help='test config file path') + parser.add_argument('--port', type=int, default=29666, help='dist port') + parser.add_argument( + '--work-dir', + default='tools/batch_test', + help='the dir to save metric') + parser.add_argument( + '--run', action='store_true', help='run script directly') + parser.add_argument( + '--out', type=str, help='path to save model benchmark script') + + args = parser.parse_args() + return args + + +def process_model_info(model_info, work_dir): + config = model_info['config'].strip() + fname, _ = osp.splitext(osp.basename(config)) + job_name = fname + work_dir = osp.join(work_dir, fname) + checkpoint = model_info['checkpoint'].strip() + if not isinstance(model_info['eval'], list): + evals = [model_info['eval']] + else: + evals = model_info['eval'] + eval = ' '.join(evals) + return dict( + config=config, + job_name=job_name, + work_dir=work_dir, + checkpoint=checkpoint, + eval=eval) + + +def create_test_bash_info(commands, model_test_dict, port, script_name, + partition): + config = model_test_dict['config'] + job_name = model_test_dict['job_name'] + checkpoint = model_test_dict['checkpoint'] + work_dir = model_test_dict['work_dir'] + eval = model_test_dict['eval'] + + echo_info = f' \necho \'{config}\' &' + commands.append(echo_info) + commands.append('\n') + + command_info = f'GPUS=8 GPUS_PER_NODE=8 ' \ + f'CPUS_PER_TASK=2 {script_name} ' + + command_info += f'{partition} ' + command_info += f'{job_name} ' + command_info += f'{config} ' + command_info += f'$CHECKPOINT_DIR/{checkpoint} ' + command_info += f'--work-dir {work_dir} ' + + command_info += f'--eval {eval} ' + command_info += f'--cfg-option dist_params.port={port} ' + command_info += ' &' + + commands.append(command_info) + + +def main(): + args = parse_args() + if args.out: + out_suffix = args.out.split('.')[-1] + assert args.out.endswith('.sh'), \ + f'Expected out file path suffix is .sh, but get .{out_suffix}' + assert args.out or args.run, \ + ('Please specify at least one operation (save/run/ the ' + 'script) with the argument "--out" or "--run"') + + commands = [] + partition_name = 'PARTITION=$1 ' + commands.append(partition_name) + commands.append('\n') + + checkpoint_root = 'CHECKPOINT_DIR=$2 ' + commands.append(checkpoint_root) + commands.append('\n') + + script_name = osp.join('tools', 'slurm_test.sh') + port = args.port + work_dir = args.work_dir + + cfg = Config.fromfile(args.config) + + for model_key in cfg: + model_infos = cfg[model_key] + if not isinstance(model_infos, list): + model_infos = [model_infos] + for model_info in model_infos: + print('processing: ', model_info['config']) + model_test_dict = process_model_info(model_info, work_dir) + create_test_bash_info(commands, model_test_dict, port, script_name, + '$PARTITION') + port += 1 + + command_str = ''.join(commands) + if args.out: + with open(args.out, 'w') as f: + f.write(command_str) + if args.run: + os.system(command_str) + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/convert_train_benchmark_script.py b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/convert_train_benchmark_script.py new file mode 100644 index 0000000000000000000000000000000000000000..db4181b8bca78284fe2614255649bfa672f42629 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/convert_train_benchmark_script.py @@ -0,0 +1,98 @@ +import argparse +import os +import os.path as osp + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Convert benchmark model json to script') + parser.add_argument( + 'txt_path', type=str, help='txt path output by benchmark_filter') + parser.add_argument( + '--partition', + type=str, + default='openmmlab', + help='slurm partition name') + parser.add_argument( + '--max-keep-ckpts', + type=int, + default=1, + help='The maximum checkpoints to keep') + parser.add_argument( + '--run', action='store_true', help='run script directly') + parser.add_argument( + '--out', type=str, help='path to save model benchmark script') + + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + if args.out: + out_suffix = args.out.split('.')[-1] + assert args.out.endswith('.sh'), \ + f'Expected out file path suffix is .sh, but get .{out_suffix}' + assert args.out or args.run, \ + ('Please specify at least one operation (save/run/ the ' + 'script) with the argument "--out" or "--run"') + + partition = args.partition # cluster name + + root_name = './tools' + train_script_name = osp.join(root_name, 'slurm_train.sh') + # stdout is no output + stdout_cfg = '>/dev/null' + + max_keep_ckpts = args.max_keep_ckpts + + commands = [] + with open(args.txt_path, 'r') as f: + model_cfgs = f.readlines() + for i, cfg in enumerate(model_cfgs): + cfg = cfg.strip() + if len(cfg) == 0: + continue + # print cfg name + echo_info = f'echo \'{cfg}\' &' + commands.append(echo_info) + commands.append('\n') + + fname, _ = osp.splitext(osp.basename(cfg)) + out_fname = osp.join(root_name, 'work_dir', fname) + # default setting + if cfg.find('16x') >= 0: + command_info = f'GPUS=16 GPUS_PER_NODE=8 ' \ + f'CPUS_PER_TASK=2 {train_script_name} ' + elif cfg.find('gn-head_4x4_1x_coco.py') >= 0 or \ + cfg.find('gn-head_4x4_2x_coco.py') >= 0: + command_info = f'GPUS=4 GPUS_PER_NODE=4 ' \ + f'CPUS_PER_TASK=2 {train_script_name} ' + else: + command_info = f'GPUS=8 GPUS_PER_NODE=8 ' \ + f'CPUS_PER_TASK=2 {train_script_name} ' + command_info += f'{partition} ' + command_info += f'{fname} ' + command_info += f'{cfg} ' + command_info += f'{out_fname} ' + if max_keep_ckpts: + command_info += f'--cfg-options ' \ + f'checkpoint_config.max_keep_ckpts=' \ + f'{max_keep_ckpts}' + ' ' + command_info += f'{stdout_cfg} &' + + commands.append(command_info) + + if i < len(model_cfgs): + commands.append('\n') + + command_str = ''.join(commands) + if args.out: + with open(args.out, 'w') as f: + f.write(command_str) + if args.run: + os.system(command_str) + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/gather_models.py b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/gather_models.py new file mode 100644 index 0000000000000000000000000000000000000000..0c1048f4faf8e402e34ba6be0eabedab30e8c8e9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/gather_models.py @@ -0,0 +1,257 @@ +import argparse +import glob +import json +import os.path as osp +import shutil +import subprocess +from collections import OrderedDict + +import mmcv +import torch +import yaml + + +def ordered_yaml_dump(data, stream=None, Dumper=yaml.SafeDumper, **kwds): + + class OrderedDumper(Dumper): + pass + + def _dict_representer(dumper, data): + return dumper.represent_mapping( + yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG, data.items()) + + OrderedDumper.add_representer(OrderedDict, _dict_representer) + return yaml.dump(data, stream, OrderedDumper, **kwds) + + +def process_checkpoint(in_file, out_file): + checkpoint = torch.load(in_file, map_location='cpu') + # remove optimizer for smaller file size + if 'optimizer' in checkpoint: + del checkpoint['optimizer'] + # if it is necessary to remove some sensitive data in checkpoint['meta'], + # add the code here. + if torch.__version__ >= '1.6': + torch.save(checkpoint, out_file, _use_new_zipfile_serialization=False) + else: + torch.save(checkpoint, out_file) + sha = subprocess.check_output(['sha256sum', out_file]).decode() + final_file = out_file.rstrip('.pth') + '-{}.pth'.format(sha[:8]) + subprocess.Popen(['mv', out_file, final_file]) + return final_file + + +def get_final_epoch(config): + cfg = mmcv.Config.fromfile('./configs/' + config) + return cfg.runner.max_epochs + + +def get_real_epoch(config): + cfg = mmcv.Config.fromfile('./configs/' + config) + epoch = cfg.runner.max_epochs + if cfg.data.train.type == 'RepeatDataset': + epoch *= cfg.data.train.times + return epoch + + +def get_final_results(log_json_path, epoch, results_lut): + result_dict = dict() + with open(log_json_path, 'r') as f: + for line in f.readlines(): + log_line = json.loads(line) + if 'mode' not in log_line.keys(): + continue + + if log_line['mode'] == 'train' and log_line['epoch'] == epoch: + result_dict['memory'] = log_line['memory'] + + if log_line['mode'] == 'val' and log_line['epoch'] == epoch: + result_dict.update({ + key: log_line[key] + for key in results_lut if key in log_line + }) + return result_dict + + +def get_dataset_name(config): + # If there are more dataset, add here. + name_map = dict( + CityscapesDataset='Cityscapes', + CocoDataset='COCO', + DeepFashionDataset='Deep Fashion', + LVISV05Dataset='LVIS v0.5', + LVISV1Dataset='LVIS v1', + VOCDataset='Pascal VOC', + WIDERFaceDataset='WIDER Face') + cfg = mmcv.Config.fromfile('./configs/' + config) + return name_map[cfg.dataset_type] + + +def convert_model_info_to_pwc(model_infos): + pwc_files = {} + for model in model_infos: + cfg_folder_name = osp.split(model['config'])[-2] + pwc_model_info = OrderedDict() + pwc_model_info['Name'] = osp.split(model['config'])[-1].split('.')[0] + pwc_model_info['In Collection'] = 'Please fill in Collection name' + pwc_model_info['Config'] = osp.join('configs', model['config']) + + # get metadata + memory = round(model['results']['memory'] / 1024, 1) + epochs = get_real_epoch(model['config']) + meta_data = OrderedDict() + meta_data['Training Memory (GB)'] = memory + meta_data['Epochs'] = epochs + pwc_model_info['Metadata'] = meta_data + + # get dataset name + dataset_name = get_dataset_name(model['config']) + + # get results + results = [] + # if there are more metrics, add here. + if 'bbox_mAP' in model['results']: + metric = round(model['results']['bbox_mAP'] * 100, 1) + results.append( + OrderedDict( + Task='Object Detection', + Dataset=dataset_name, + Metrics={'box AP': metric})) + if 'segm_mAP' in model['results']: + metric = round(model['results']['segm_mAP'] * 100, 1) + results.append( + OrderedDict( + Task='Instance Segmentation', + Dataset=dataset_name, + Metrics={'mask AP': metric})) + pwc_model_info['Results'] = results + + link_string = 'https://download.openmmlab.com/mmdetection/v2.0/' + link_string += '{}/{}'.format(model['config'].rstrip('.py'), + osp.split(model['model_path'])[-1]) + pwc_model_info['Weights'] = link_string + if cfg_folder_name in pwc_files: + pwc_files[cfg_folder_name].append(pwc_model_info) + else: + pwc_files[cfg_folder_name] = [pwc_model_info] + return pwc_files + + +def parse_args(): + parser = argparse.ArgumentParser(description='Gather benchmarked models') + parser.add_argument( + 'root', + type=str, + help='root path of benchmarked models to be gathered') + parser.add_argument( + 'out', type=str, help='output path of gathered models to be stored') + + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + models_root = args.root + models_out = args.out + mmcv.mkdir_or_exist(models_out) + + # find all models in the root directory to be gathered + raw_configs = list(mmcv.scandir('./configs', '.py', recursive=True)) + + # filter configs that is not trained in the experiments dir + used_configs = [] + for raw_config in raw_configs: + if osp.exists(osp.join(models_root, raw_config)): + used_configs.append(raw_config) + print(f'Find {len(used_configs)} models to be gathered') + + # find final_ckpt and log file for trained each config + # and parse the best performance + model_infos = [] + for used_config in used_configs: + exp_dir = osp.join(models_root, used_config) + # check whether the exps is finished + final_epoch = get_final_epoch(used_config) + final_model = 'epoch_{}.pth'.format(final_epoch) + model_path = osp.join(exp_dir, final_model) + + # skip if the model is still training + if not osp.exists(model_path): + continue + + # get the latest logs + log_json_path = list( + sorted(glob.glob(osp.join(exp_dir, '*.log.json'))))[-1] + log_txt_path = list(sorted(glob.glob(osp.join(exp_dir, '*.log'))))[-1] + cfg = mmcv.Config.fromfile('./configs/' + used_config) + results_lut = cfg.evaluation.metric + if not isinstance(results_lut, list): + results_lut = [results_lut] + # case when using VOC, the evaluation key is only 'mAP' + results_lut = [key + '_mAP' for key in results_lut if 'mAP' not in key] + model_performance = get_final_results(log_json_path, final_epoch, + results_lut) + + if model_performance is None: + continue + + model_time = osp.split(log_txt_path)[-1].split('.')[0] + model_infos.append( + dict( + config=used_config, + results=model_performance, + epochs=final_epoch, + model_time=model_time, + log_json_path=osp.split(log_json_path)[-1])) + + # publish model for each checkpoint + publish_model_infos = [] + for model in model_infos: + model_publish_dir = osp.join(models_out, model['config'].rstrip('.py')) + mmcv.mkdir_or_exist(model_publish_dir) + + model_name = osp.split(model['config'])[-1].split('.')[0] + + model_name += '_' + model['model_time'] + publish_model_path = osp.join(model_publish_dir, model_name) + trained_model_path = osp.join(models_root, model['config'], + 'epoch_{}.pth'.format(model['epochs'])) + + # convert model + final_model_path = process_checkpoint(trained_model_path, + publish_model_path) + + # copy log + shutil.copy( + osp.join(models_root, model['config'], model['log_json_path']), + osp.join(model_publish_dir, f'{model_name}.log.json')) + shutil.copy( + osp.join(models_root, model['config'], + model['log_json_path'].rstrip('.json')), + osp.join(model_publish_dir, f'{model_name}.log')) + + # copy config to guarantee reproducibility + config_path = model['config'] + config_path = osp.join( + 'configs', + config_path) if 'configs' not in config_path else config_path + target_cconfig_path = osp.split(config_path)[-1] + shutil.copy(config_path, + osp.join(model_publish_dir, target_cconfig_path)) + + model['model_path'] = final_model_path + publish_model_infos.append(model) + + models = dict(models=publish_model_infos) + print(f'Totally gathered {len(publish_model_infos)} models') + mmcv.dump(models, osp.join(models_out, 'model_info.json')) + + pwc_files = convert_model_info_to_pwc(publish_model_infos) + for name in pwc_files: + with open(osp.join(models_out, name + '_metafile.yml'), 'w') as f: + ordered_yaml_dump(pwc_files[name], f, encoding='utf-8') + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/gather_test_benchmark_metric.py b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/gather_test_benchmark_metric.py new file mode 100644 index 0000000000000000000000000000000000000000..c8f3f6629a5b8ab0f326465d2247c60da3c3c127 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/gather_test_benchmark_metric.py @@ -0,0 +1,95 @@ +import argparse +import glob +import os.path as osp + +import mmcv +from mmcv import Config + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Gather benchmarked models metric') + parser.add_argument('config', help='test config file path') + parser.add_argument( + 'root', + type=str, + help='root path of benchmarked models to be gathered') + parser.add_argument( + '--out', type=str, help='output path of gathered metrics to be stored') + parser.add_argument( + '--not-show', action='store_true', help='not show metrics') + parser.add_argument( + '--show-all', action='store_true', help='show all model metrics') + + args = parser.parse_args() + return args + + +if __name__ == '__main__': + args = parse_args() + + root_path = args.root + metrics_out = args.out + result_dict = {} + + cfg = Config.fromfile(args.config) + + for model_key in cfg: + model_infos = cfg[model_key] + if not isinstance(model_infos, list): + model_infos = [model_infos] + for model_info in model_infos: + record_metrics = model_info['metric'] + config = model_info['config'].strip() + fname, _ = osp.splitext(osp.basename(config)) + metric_json_dir = osp.join(root_path, fname) + if osp.exists(metric_json_dir): + json_list = glob.glob(osp.join(metric_json_dir, '*.json')) + if len(json_list) > 0: + log_json_path = list(sorted(json_list))[-1] + + metric = mmcv.load(log_json_path) + if config in metric.get('config', {}): + + new_metrics = dict() + for record_metric_key in record_metrics: + record_metric_key_bk = record_metric_key + old_metric = record_metrics[record_metric_key] + if record_metric_key == 'AR_1000': + record_metric_key = 'AR@1000' + if record_metric_key not in metric['metric']: + raise KeyError( + 'record_metric_key not exist, please ' + 'check your config') + new_metric = round( + metric['metric'][record_metric_key] * 100, 1) + new_metrics[record_metric_key_bk] = new_metric + + if args.show_all: + result_dict[config] = dict( + before=record_metrics, after=new_metrics) + else: + for record_metric_key in record_metrics: + old_metric = record_metrics[record_metric_key] + new_metric = new_metrics[record_metric_key] + if old_metric != new_metric: + result_dict[config] = dict( + before=record_metrics, + after=new_metrics) + break + else: + print(f'{config} not included in: {log_json_path}') + else: + print(f'{config} not exist file: {metric_json_dir}') + else: + print(f'{config} not exist dir: {metric_json_dir}') + + if metrics_out: + mmcv.mkdir_or_exist(metrics_out) + mmcv.dump(result_dict, + osp.join(metrics_out, 'batch_test_metric_info.json')) + if not args.not_show: + print('===================================') + for config_name, metrics in result_dict.items(): + print(config_name, metrics) + print('===================================') diff --git a/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/gather_train_benchmark_metric.py b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/gather_train_benchmark_metric.py new file mode 100644 index 0000000000000000000000000000000000000000..1ad602ab1f5b851308b6d77f8b7251b7bcd71e45 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/gather_train_benchmark_metric.py @@ -0,0 +1,149 @@ +import argparse +import glob +import os.path as osp + +import mmcv +from gather_models import get_final_results + +try: + import xlrd +except ImportError: + xlrd = None +try: + import xlutils + from xlutils.copy import copy +except ImportError: + xlutils = None + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Gather benchmarked models metric') + parser.add_argument( + 'root', + type=str, + help='root path of benchmarked models to be gathered') + parser.add_argument( + 'txt_path', type=str, help='txt path output by benchmark_filter') + parser.add_argument( + '--out', type=str, help='output path of gathered metrics to be stored') + parser.add_argument( + '--not-show', action='store_true', help='not show metrics') + parser.add_argument( + '--excel', type=str, help='input path of excel to be recorded') + parser.add_argument( + '--ncol', type=int, help='Number of column to be modified or appended') + + args = parser.parse_args() + return args + + +if __name__ == '__main__': + args = parse_args() + + if args.excel: + assert args.ncol, 'Please specify "--excel" and "--ncol" ' \ + 'at the same time' + if xlrd is None: + raise RuntimeError( + 'xlrd is not installed,' + 'Please use “pip install xlrd==1.2.0” to install') + if xlutils is None: + raise RuntimeError( + 'xlutils is not installed,' + 'Please use “pip install xlutils==2.0.0” to install') + readbook = xlrd.open_workbook(args.excel) + sheet = readbook.sheet_by_name('Sheet1') + sheet_info = {} + total_nrows = sheet.nrows + for i in range(3, sheet.nrows): + sheet_info[sheet.row_values(i)[0]] = i + xlrw = copy(readbook) + table = xlrw.get_sheet(0) + + root_path = args.root + metrics_out = args.out + + result_dict = {} + with open(args.txt_path, 'r') as f: + model_cfgs = f.readlines() + for i, config in enumerate(model_cfgs): + config = config.strip() + if len(config) == 0: + continue + + config_name = osp.split(config)[-1] + config_name = osp.splitext(config_name)[0] + result_path = osp.join(root_path, config_name) + if osp.exists(result_path): + # 1 read config + cfg = mmcv.Config.fromfile(config) + total_epochs = cfg.runner.max_epochs + final_results = cfg.evaluation.metric + if not isinstance(final_results, list): + final_results = [final_results] + final_results_out = [] + for key in final_results: + if 'proposal_fast' in key: + final_results_out.append('AR@1000') # RPN + elif 'mAP' not in key: + final_results_out.append(key + '_mAP') + + # 2 determine whether total_epochs ckpt exists + ckpt_path = f'epoch_{total_epochs}.pth' + if osp.exists(osp.join(result_path, ckpt_path)): + log_json_path = list( + sorted(glob.glob(osp.join(result_path, + '*.log.json'))))[-1] + + # 3 read metric + model_performance = get_final_results( + log_json_path, total_epochs, final_results_out) + if model_performance is None: + print(f'log file error: {log_json_path}') + continue + for performance in model_performance: + if performance in ['AR@1000', 'bbox_mAP', 'segm_mAP']: + metric = round( + model_performance[performance] * 100, 1) + model_performance[performance] = metric + result_dict[config] = model_performance + + # update and append excel content + if args.excel: + if 'AR@1000' in model_performance: + metrics = f'{model_performance["AR@1000"]}' \ + f'(AR@1000)' + elif 'segm_mAP' in model_performance: + metrics = f'{model_performance["bbox_mAP"]}/' \ + f'{model_performance["segm_mAP"]}' + else: + metrics = f'{model_performance["bbox_mAP"]}' + + row_num = sheet_info.get(config, None) + if row_num: + table.write(row_num, args.ncol, metrics) + else: + table.write(total_nrows, 0, config) + table.write(total_nrows, args.ncol, metrics) + total_nrows += 1 + + else: + print(f'{config} not exist: {ckpt_path}') + else: + print(f'not exist: {config}') + + # 4 save or print results + if metrics_out: + mmcv.mkdir_or_exist(metrics_out) + mmcv.dump(result_dict, + osp.join(metrics_out, 'model_metric_info.json')) + if not args.not_show: + print('===================================') + for config_name, metrics in result_dict.items(): + print(config_name, metrics) + print('===================================') + if args.excel: + filename, sufflx = osp.splitext(args.excel) + xlrw.save(f'{filename}_o{sufflx}') + print(f'>>> Output {filename}_o{sufflx}') diff --git a/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/linter.sh b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/linter.sh new file mode 100644 index 0000000000000000000000000000000000000000..b0fe0acfa492820d6e556cf76d6d48e46c64e5e0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/linter.sh @@ -0,0 +1,3 @@ +yapf -r -i mmdet/ configs/ tests/ tools/ +isort -rc mmdet/ configs/ tests/ tools/ +flake8 . diff --git a/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/test_benchmark.sh b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/test_benchmark.sh new file mode 100644 index 0000000000000000000000000000000000000000..eb086725f4a2f75e549ff4d2e2dd77e6c31f379c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/test_benchmark.sh @@ -0,0 +1,115 @@ +PARTITION=$1 +CHECKPOINT_DIR=$2 + +echo 'configs/atss/atss_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION atss_r50_fpn_1x_coco configs/atss/atss_r50_fpn_1x_coco.py $CHECKPOINT_DIR/atss_r50_fpn_1x_coco_20200209-985f7bd0.pth --work-dir tools/batch_test/atss_r50_fpn_1x_coco --eval bbox --cfg-option dist_params.port=29666 & +echo 'configs/autoassign/autoassign_r50_fpn_8x2_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION autoassign_r50_fpn_8x2_1x_coco configs/autoassign/autoassign_r50_fpn_8x2_1x_coco.py $CHECKPOINT_DIR/auto_assign_r50_fpn_1x_coco_20210413_115540-5e17991f.pth --work-dir tools/batch_test/autoassign_r50_fpn_8x2_1x_coco --eval bbox --cfg-option dist_params.port=29667 & +echo 'configs/carafe/faster_rcnn_r50_fpn_carafe_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION faster_rcnn_r50_fpn_carafe_1x_coco configs/carafe/faster_rcnn_r50_fpn_carafe_1x_coco.py $CHECKPOINT_DIR/faster_rcnn_r50_fpn_carafe_1x_coco_bbox_mAP-0.386_20200504_175733-385a75b7.pth --work-dir tools/batch_test/faster_rcnn_r50_fpn_carafe_1x_coco --eval bbox --cfg-option dist_params.port=29668 & +echo 'configs/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION cascade_rcnn_r50_fpn_1x_coco configs/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py $CHECKPOINT_DIR/cascade_rcnn_r50_fpn_1x_coco_20200316-3dc56deb.pth --work-dir tools/batch_test/cascade_rcnn_r50_fpn_1x_coco --eval bbox --cfg-option dist_params.port=29669 & +echo 'configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION cascade_mask_rcnn_r50_fpn_1x_coco configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py $CHECKPOINT_DIR/cascade_mask_rcnn_r50_fpn_1x_coco_20200203-9d4dcb24.pth --work-dir tools/batch_test/cascade_mask_rcnn_r50_fpn_1x_coco --eval bbox segm --cfg-option dist_params.port=29670 & +echo 'configs/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION crpn_faster_rcnn_r50_caffe_fpn_1x_coco configs/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco.py $CHECKPOINT_DIR/crpn_faster_rcnn_r50_caffe_fpn_1x_coco-c8283cca.pth --work-dir tools/batch_test/crpn_faster_rcnn_r50_caffe_fpn_1x_coco --eval bbox --cfg-option dist_params.port=29671 & +echo 'configs/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION centripetalnet_hourglass104_mstest_16x6_210e_coco configs/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py $CHECKPOINT_DIR/centripetalnet_hourglass104_mstest_16x6_210e_coco_20200915_204804-3ccc61e5.pth --work-dir tools/batch_test/centripetalnet_hourglass104_mstest_16x6_210e_coco --eval bbox --cfg-option dist_params.port=29672 & +echo 'configs/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION cornernet_hourglass104_mstest_8x6_210e_coco configs/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py $CHECKPOINT_DIR/cornernet_hourglass104_mstest_10x5_210e_coco_20200824_185720-5fefbf1c.pth --work-dir tools/batch_test/cornernet_hourglass104_mstest_8x6_210e_coco --eval bbox --cfg-option dist_params.port=29673 & +echo 'configs/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco configs/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py $CHECKPOINT_DIR/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130-d68aed1e.pth --work-dir tools/batch_test/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco --eval bbox --cfg-option dist_params.port=29674 & +echo 'configs/deformable_detr/deformable_detr_r50_16x2_50e_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION deformable_detr_r50_16x2_50e_coco configs/deformable_detr/deformable_detr_r50_16x2_50e_coco.py $CHECKPOINT_DIR/deformable_detr_r50_16x2_50e_coco_20210419_220030-a12b9512.pth --work-dir tools/batch_test/deformable_detr_r50_16x2_50e_coco --eval bbox --cfg-option dist_params.port=29675 & +echo 'configs/detectors/detectors_htc_r50_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION detectors_htc_r50_1x_coco configs/detectors/detectors_htc_r50_1x_coco.py $CHECKPOINT_DIR/detectors_htc_r50_1x_coco-329b1453.pth --work-dir tools/batch_test/detectors_htc_r50_1x_coco --eval bbox segm --cfg-option dist_params.port=29676 & +echo 'configs/detr/detr_r50_8x2_150e_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION detr_r50_8x2_150e_coco configs/detr/detr_r50_8x2_150e_coco.py $CHECKPOINT_DIR/detr_r50_8x2_150e_coco_20201130_194835-2c4b8974.pth --work-dir tools/batch_test/detr_r50_8x2_150e_coco --eval bbox --cfg-option dist_params.port=29677 & +echo 'configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION dh_faster_rcnn_r50_fpn_1x_coco configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py $CHECKPOINT_DIR/dh_faster_rcnn_r50_fpn_1x_coco_20200130-586b67df.pth --work-dir tools/batch_test/dh_faster_rcnn_r50_fpn_1x_coco --eval bbox --cfg-option dist_params.port=29678 & +echo 'configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION dynamic_rcnn_r50_fpn_1x_coco configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py $CHECKPOINT_DIR/dynamic_rcnn_r50_fpn_1x-62a3f276.pth --work-dir tools/batch_test/dynamic_rcnn_r50_fpn_1x_coco --eval bbox --cfg-option dist_params.port=29679 & +echo 'configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION faster_rcnn_r50_fpn_attention_1111_1x_coco configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py $CHECKPOINT_DIR/faster_rcnn_r50_fpn_attention_1111_1x_coco_20200130-403cccba.pth --work-dir tools/batch_test/faster_rcnn_r50_fpn_attention_1111_1x_coco --eval bbox --cfg-option dist_params.port=29680 & +echo 'configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION faster_rcnn_r50_fpn_1x_coco configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py $CHECKPOINT_DIR/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth --work-dir tools/batch_test/faster_rcnn_r50_fpn_1x_coco --eval bbox --cfg-option dist_params.port=29681 & +echo 'configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco.py $CHECKPOINT_DIR/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco-0a0d75a8.pth --work-dir tools/batch_test/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco --eval bbox --cfg-option dist_params.port=29682 & +echo 'configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION fovea_align_r50_fpn_gn-head_4x4_2x_coco configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py $CHECKPOINT_DIR/fovea_align_r50_fpn_gn-head_4x4_2x_coco_20200203-8987880d.pth --work-dir tools/batch_test/fovea_align_r50_fpn_gn-head_4x4_2x_coco --eval bbox --cfg-option dist_params.port=29683 & +echo 'configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION retinanet_free_anchor_r50_fpn_1x_coco configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py $CHECKPOINT_DIR/retinanet_free_anchor_r50_fpn_1x_coco_20200130-0f67375f.pth --work-dir tools/batch_test/retinanet_free_anchor_r50_fpn_1x_coco --eval bbox --cfg-option dist_params.port=29684 & +echo 'configs/fsaf/fsaf_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION fsaf_r50_fpn_1x_coco configs/fsaf/fsaf_r50_fpn_1x_coco.py $CHECKPOINT_DIR/fsaf_r50_fpn_1x_coco-94ccc51f.pth --work-dir tools/batch_test/fsaf_r50_fpn_1x_coco --eval bbox --cfg-option dist_params.port=29685 & +echo 'configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py $CHECKPOINT_DIR/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200202-587b99aa.pth --work-dir tools/batch_test/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco --eval bbox segm --cfg-option dist_params.port=29686 & +echo 'configs/gfl/gfl_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION gfl_r50_fpn_1x_coco configs/gfl/gfl_r50_fpn_1x_coco.py $CHECKPOINT_DIR/gfl_r50_fpn_1x_coco_20200629_121244-25944287.pth --work-dir tools/batch_test/gfl_r50_fpn_1x_coco --eval bbox --cfg-option dist_params.port=29687 & +echo 'configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION mask_rcnn_r50_fpn_gn-all_2x_coco configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py $CHECKPOINT_DIR/mask_rcnn_r50_fpn_gn-all_2x_coco_20200206-8eee02a6.pth --work-dir tools/batch_test/mask_rcnn_r50_fpn_gn-all_2x_coco --eval bbox segm --cfg-option dist_params.port=29688 & +echo 'configs/gn+ws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION faster_rcnn_r50_fpn_gn_ws-all_1x_coco configs/gn+ws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py $CHECKPOINT_DIR/faster_rcnn_r50_fpn_gn_ws-all_1x_coco_20200130-613d9fe2.pth --work-dir tools/batch_test/faster_rcnn_r50_fpn_gn_ws-all_1x_coco --eval bbox --cfg-option dist_params.port=29689 & +echo 'configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION grid_rcnn_r50_fpn_gn-head_2x_coco configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py $CHECKPOINT_DIR/grid_rcnn_r50_fpn_gn-head_2x_coco_20200130-6cca8223.pth --work-dir tools/batch_test/grid_rcnn_r50_fpn_gn-head_2x_coco --eval bbox --cfg-option dist_params.port=29690 & +echo 'configs/groie/faster_rcnn_r50_fpn_groie_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION faster_rcnn_r50_fpn_groie_1x_coco configs/groie/faster_rcnn_r50_fpn_groie_1x_coco.py $CHECKPOINT_DIR/faster_rcnn_r50_fpn_groie_1x_coco_20200604_211715-66ee9516.pth --work-dir tools/batch_test/faster_rcnn_r50_fpn_groie_1x_coco --eval bbox --cfg-option dist_params.port=29691 & +echo 'configs/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION ga_retinanet_r50_caffe_fpn_1x_coco configs/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py $CHECKPOINT_DIR/ga_retinanet_r50_caffe_fpn_1x_coco_20201020-39581c6f.pth --work-dir tools/batch_test/ga_retinanet_r50_caffe_fpn_1x_coco --eval bbox --cfg-option dist_params.port=29692 & +echo 'configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION ga_faster_r50_caffe_fpn_1x_coco configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py $CHECKPOINT_DIR/ga_faster_r50_caffe_fpn_1x_coco_20200702_000718-a11ccfe6.pth --work-dir tools/batch_test/ga_faster_r50_caffe_fpn_1x_coco --eval bbox --cfg-option dist_params.port=29693 & +echo 'configs/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION faster_rcnn_hrnetv2p_w18_1x_coco configs/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py $CHECKPOINT_DIR/faster_rcnn_hrnetv2p_w18_1x_coco_20200130-56651a6d.pth --work-dir tools/batch_test/faster_rcnn_hrnetv2p_w18_1x_coco --eval bbox --cfg-option dist_params.port=29694 & +echo 'configs/htc/htc_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION htc_r50_fpn_1x_coco configs/htc/htc_r50_fpn_1x_coco.py $CHECKPOINT_DIR/htc_r50_fpn_1x_coco_20200317-7332cf16.pth --work-dir tools/batch_test/htc_r50_fpn_1x_coco --eval bbox segm --cfg-option dist_params.port=29695 & +echo 'configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION libra_faster_rcnn_r50_fpn_1x_coco configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py $CHECKPOINT_DIR/libra_faster_rcnn_r50_fpn_1x_coco_20200130-3afee3a9.pth --work-dir tools/batch_test/libra_faster_rcnn_r50_fpn_1x_coco --eval bbox --cfg-option dist_params.port=29696 & +echo 'configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION mask_rcnn_r50_fpn_1x_coco configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py $CHECKPOINT_DIR/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth --work-dir tools/batch_test/mask_rcnn_r50_fpn_1x_coco --eval bbox segm --cfg-option dist_params.port=29697 & +echo 'configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION ms_rcnn_r50_caffe_fpn_1x_coco configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py $CHECKPOINT_DIR/ms_rcnn_r50_caffe_fpn_1x_coco_20200702_180848-61c9355e.pth --work-dir tools/batch_test/ms_rcnn_r50_caffe_fpn_1x_coco --eval bbox segm --cfg-option dist_params.port=29698 & +echo 'configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py $CHECKPOINT_DIR/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200520-1bdba3ce.pth --work-dir tools/batch_test/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco --eval bbox --cfg-option dist_params.port=29699 & +echo 'configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION retinanet_r50_nasfpn_crop640_50e_coco configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py $CHECKPOINT_DIR/retinanet_r50_nasfpn_crop640_50e_coco-0ad1f644.pth --work-dir tools/batch_test/retinanet_r50_nasfpn_crop640_50e_coco --eval bbox --cfg-option dist_params.port=29700 & +echo 'configs/paa/paa_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION paa_r50_fpn_1x_coco configs/paa/paa_r50_fpn_1x_coco.py $CHECKPOINT_DIR/paa_r50_fpn_1x_coco_20200821-936edec3.pth --work-dir tools/batch_test/paa_r50_fpn_1x_coco --eval bbox --cfg-option dist_params.port=29701 & +echo 'configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION faster_rcnn_r50_pafpn_1x_coco configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py $CHECKPOINT_DIR/faster_rcnn_r50_pafpn_1x_coco_bbox_mAP-0.375_20200503_105836-b7b4b9bd.pth --work-dir tools/batch_test/faster_rcnn_r50_pafpn_1x_coco --eval bbox --cfg-option dist_params.port=29702 & +echo 'configs/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION pisa_faster_rcnn_r50_fpn_1x_coco configs/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py $CHECKPOINT_DIR/pisa_faster_rcnn_r50_fpn_1x_coco-dea93523.pth --work-dir tools/batch_test/pisa_faster_rcnn_r50_fpn_1x_coco --eval bbox --cfg-option dist_params.port=29703 & +echo 'configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION point_rend_r50_caffe_fpn_mstrain_1x_coco configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py $CHECKPOINT_DIR/point_rend_r50_caffe_fpn_mstrain_1x_coco-1bcb5fb4.pth --work-dir tools/batch_test/point_rend_r50_caffe_fpn_mstrain_1x_coco --eval bbox segm --cfg-option dist_params.port=29704 & +echo 'configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION mask_rcnn_regnetx-3.2GF_fpn_1x_coco configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py $CHECKPOINT_DIR/mask_rcnn_regnetx-3.2GF_fpn_1x_coco_20200520_163141-2a9d1814.pth --work-dir tools/batch_test/mask_rcnn_regnetx-3.2GF_fpn_1x_coco --eval bbox segm --cfg-option dist_params.port=29705 & +echo 'configs/reppoints/reppoints_moment_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION reppoints_moment_r50_fpn_1x_coco configs/reppoints/reppoints_moment_r50_fpn_1x_coco.py $CHECKPOINT_DIR/reppoints_moment_r50_fpn_1x_coco_20200330-b73db8d1.pth --work-dir tools/batch_test/reppoints_moment_r50_fpn_1x_coco --eval bbox --cfg-option dist_params.port=29706 & +echo 'configs/res2net/faster_rcnn_r2_101_fpn_2x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION faster_rcnn_r2_101_fpn_2x_coco configs/res2net/faster_rcnn_r2_101_fpn_2x_coco.py $CHECKPOINT_DIR/faster_rcnn_r2_101_fpn_2x_coco-175f1da6.pth --work-dir tools/batch_test/faster_rcnn_r2_101_fpn_2x_coco --eval bbox --cfg-option dist_params.port=29707 & +echo 'configs/resnest/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco configs/resnest/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py $CHECKPOINT_DIR/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco_20200926_125502-20289c16.pth --work-dir tools/batch_test/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco --eval bbox --cfg-option dist_params.port=29708 & +echo 'configs/retinanet/retinanet_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION retinanet_r50_fpn_1x_coco configs/retinanet/retinanet_r50_fpn_1x_coco.py $CHECKPOINT_DIR/retinanet_r50_fpn_1x_coco_20200130-c2398f9e.pth --work-dir tools/batch_test/retinanet_r50_fpn_1x_coco --eval bbox --cfg-option dist_params.port=29709 & +echo 'configs/rpn/rpn_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION rpn_r50_fpn_1x_coco configs/rpn/rpn_r50_fpn_1x_coco.py $CHECKPOINT_DIR/rpn_r50_fpn_1x_coco_20200218-5525fa2e.pth --work-dir tools/batch_test/rpn_r50_fpn_1x_coco --eval proposal_fast --cfg-option dist_params.port=29710 & +echo 'configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION sabl_retinanet_r50_fpn_1x_coco configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py $CHECKPOINT_DIR/sabl_retinanet_r50_fpn_1x_coco-6c54fd4f.pth --work-dir tools/batch_test/sabl_retinanet_r50_fpn_1x_coco --eval bbox --cfg-option dist_params.port=29711 & +echo 'configs/sabl/sabl_faster_rcnn_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION sabl_faster_rcnn_r50_fpn_1x_coco configs/sabl/sabl_faster_rcnn_r50_fpn_1x_coco.py $CHECKPOINT_DIR/sabl_faster_rcnn_r50_fpn_1x_coco-e867595b.pth --work-dir tools/batch_test/sabl_faster_rcnn_r50_fpn_1x_coco --eval bbox --cfg-option dist_params.port=29712 & +echo 'configs/scnet/scnet_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION scnet_r50_fpn_1x_coco configs/scnet/scnet_r50_fpn_1x_coco.py $CHECKPOINT_DIR/scnet_r50_fpn_1x_coco-c3f09857.pth --work-dir tools/batch_test/scnet_r50_fpn_1x_coco --eval bbox --cfg-option dist_params.port=29713 & +echo 'configs/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION sparse_rcnn_r50_fpn_1x_coco configs/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco.py $CHECKPOINT_DIR/sparse_rcnn_r50_fpn_1x_coco_20201222_214453-dc79b137.pth --work-dir tools/batch_test/sparse_rcnn_r50_fpn_1x_coco --eval bbox --cfg-option dist_params.port=29714 & +echo 'configs/ssd/ssd300_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION ssd300_coco configs/ssd/ssd300_coco.py $CHECKPOINT_DIR/ssd300_coco_20200307-a92d2092.pth --work-dir tools/batch_test/ssd300_coco --eval bbox --cfg-option dist_params.port=29715 & +echo 'configs/tridentnet/tridentnet_r50_caffe_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION tridentnet_r50_caffe_1x_coco configs/tridentnet/tridentnet_r50_caffe_1x_coco.py $CHECKPOINT_DIR/tridentnet_r50_caffe_1x_coco_20201230_141838-2ec0b530.pth --work-dir tools/batch_test/tridentnet_r50_caffe_1x_coco --eval bbox --cfg-option dist_params.port=29716 & +echo 'configs/vfnet/vfnet_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION vfnet_r50_fpn_1x_coco configs/vfnet/vfnet_r50_fpn_1x_coco.py $CHECKPOINT_DIR/vfnet_r50_fpn_1x_coco_20201027-38db6f58.pth --work-dir tools/batch_test/vfnet_r50_fpn_1x_coco --eval bbox --cfg-option dist_params.port=29717 & +echo 'configs/yolact/yolact_r50_1x8_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION yolact_r50_1x8_coco configs/yolact/yolact_r50_1x8_coco.py $CHECKPOINT_DIR/yolact_r50_1x8_coco_20200908-f38d58df.pth --work-dir tools/batch_test/yolact_r50_1x8_coco --eval bbox segm --cfg-option dist_params.port=29718 & +echo 'configs/yolo/yolov3_d53_320_273e_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION yolov3_d53_320_273e_coco configs/yolo/yolov3_d53_320_273e_coco.py $CHECKPOINT_DIR/yolov3_d53_320_273e_coco-421362b6.pth --work-dir tools/batch_test/yolov3_d53_320_273e_coco --eval bbox --cfg-option dist_params.port=29719 & +echo 'configs/yolof/yolof_r50_c5_8x8_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION yolof_r50_c5_8x8_1x_coco configs/yolof/yolof_r50_c5_8x8_1x_coco.py $CHECKPOINT_DIR/yolof_r50_c5_8x8_1x_coco_20210425_024427-8e864411.pth --work-dir tools/batch_test/yolof_r50_c5_8x8_1x_coco --eval bbox --cfg-option dist_params.port=29720 & +echo 'configs/centernet/centernet_resnet18_dcnv2_140e_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 tools/slurm_test.sh $PARTITION centernet_resnet18_dcnv2_140e_coco configs/centernet/centernet_resnet18_dcnv2_140e_coco.py $CHECKPOINT_DIR/centernet_resnet18_dcnv2_140e_coco_20210520_101209-da388ba2.pth --work-dir tools/batch_test/centernet_resnet18_dcnv2_140e_coco --eval bbox --cfg-option dist_params.port=29721 & diff --git a/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/train_benchmark.sh b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/train_benchmark.sh new file mode 100644 index 0000000000000000000000000000000000000000..a263ca05299e7d213e21475b8adaea6eaa04d481 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.dev_scripts/train_benchmark.sh @@ -0,0 +1,128 @@ +echo 'configs/atss/atss_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab atss_r50_fpn_1x_coco configs/atss/atss_r50_fpn_1x_coco.py ./tools/work_dir/atss_r50_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/autoassign/autoassign_r50_fpn_8x2_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab autoassign_r50_fpn_8x2_1x_coco configs/autoassign/autoassign_r50_fpn_8x2_1x_coco.py ./tools/work_dir/autoassign_r50_fpn_8x2_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab cascade_mask_rcnn_r50_fpn_1x_coco configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py ./tools/work_dir/cascade_mask_rcnn_r50_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab crpn_faster_rcnn_r50_caffe_fpn_1x_coco configs/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco.py ./tools/work_dir/crpn_faster_rcnn_r50_caffe_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/centernet/centernet_resnet18_dcnv2_140e_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab centernet_resnet18_dcnv2_140e_coco configs/centernet/centernet_resnet18_dcnv2_140e_coco.py ./tools/work_dir/centernet_resnet18_dcnv2_140e_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py' & +GPUS=16 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab centripetalnet_hourglass104_mstest_16x6_210e_coco configs/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py ./tools/work_dir/centripetalnet_hourglass104_mstest_16x6_210e_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab cornernet_hourglass104_mstest_8x6_210e_coco configs/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py ./tools/work_dir/cornernet_hourglass104_mstest_8x6_210e_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/detectors/detectors_htc_r50_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab detectors_htc_r50_1x_coco configs/detectors/detectors_htc_r50_1x_coco.py ./tools/work_dir/detectors_htc_r50_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/deformable_detr/deformable_detr_r50_16x2_50e_coco.py' & +GPUS=16 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab deformable_detr_r50_16x2_50e_coco configs/deformable_detr/deformable_detr_r50_16x2_50e_coco.py ./tools/work_dir/deformable_detr_r50_16x2_50e_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/detr/detr_r50_8x2_150e_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab detr_r50_8x2_150e_coco configs/detr/detr_r50_8x2_150e_coco.py ./tools/work_dir/detr_r50_8x2_150e_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab dh_faster_rcnn_r50_fpn_1x_coco configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py ./tools/work_dir/dh_faster_rcnn_r50_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab dynamic_rcnn_r50_fpn_1x_coco configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py ./tools/work_dir/dynamic_rcnn_r50_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab faster_rcnn_r50_fpn_1x_coco configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py ./tools/work_dir/faster_rcnn_r50_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab faster_rcnn_r50_caffe_dc5_mstrain_1x_coco configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py ./tools/work_dir/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab faster_rcnn_r50_caffe_fpn_mstrain_1x_coco configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py ./tools/work_dir/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab faster_rcnn_r50_caffe_fpn_1x_coco configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py ./tools/work_dir/faster_rcnn_r50_caffe_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab faster_rcnn_r50_fpn_ohem_1x_coco configs/faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py ./tools/work_dir/faster_rcnn_r50_fpn_ohem_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py' & +GPUS=4 GPUS_PER_NODE=4 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab fovea_align_r50_fpn_gn-head_4x4_2x_coco configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py ./tools/work_dir/fovea_align_r50_fpn_gn-head_4x4_2x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/fp16/mask_rcnn_r50_fpn_fp16_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab mask_rcnn_r50_fpn_fp16_1x_coco configs/fp16/mask_rcnn_r50_fpn_fp16_1x_coco.py ./tools/work_dir/mask_rcnn_r50_fpn_fp16_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/fp16/retinanet_r50_fpn_fp16_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab retinanet_r50_fpn_fp16_1x_coco configs/fp16/retinanet_r50_fpn_fp16_1x_coco.py ./tools/work_dir/retinanet_r50_fpn_fp16_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab retinanet_free_anchor_r50_fpn_1x_coco configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py ./tools/work_dir/retinanet_free_anchor_r50_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/fsaf/fsaf_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab fsaf_r50_fpn_1x_coco configs/fsaf/fsaf_r50_fpn_1x_coco.py ./tools/work_dir/fsaf_r50_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/gfl/gfl_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab gfl_r50_fpn_1x_coco configs/gfl/gfl_r50_fpn_1x_coco.py ./tools/work_dir/gfl_r50_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/ghm/retinanet_ghm_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab retinanet_ghm_r50_fpn_1x_coco configs/ghm/retinanet_ghm_r50_fpn_1x_coco.py ./tools/work_dir/retinanet_ghm_r50_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab grid_rcnn_r50_fpn_gn-head_2x_coco configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py ./tools/work_dir/grid_rcnn_r50_fpn_gn-head_2x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab ga_faster_r50_caffe_fpn_1x_coco configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py ./tools/work_dir/ga_faster_r50_caffe_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/htc/htc_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab htc_r50_fpn_1x_coco configs/htc/htc_r50_fpn_1x_coco.py ./tools/work_dir/htc_r50_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/ld/ld_r18_gflv1_r101_fpn_coco_1x.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab ld_r18_gflv1_r101_fpn_coco_1x configs/ld/ld_r18_gflv1_r101_fpn_coco_1x.py ./tools/work_dir/ld_r18_gflv1_r101_fpn_coco_1x --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab libra_faster_rcnn_r50_fpn_1x_coco configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py ./tools/work_dir/libra_faster_rcnn_r50_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py ./tools/work_dir/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab ms_rcnn_r50_caffe_fpn_1x_coco configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py ./tools/work_dir/ms_rcnn_r50_caffe_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py' & +GPUS=4 GPUS_PER_NODE=4 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py ./tools/work_dir/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/paa/paa_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab paa_r50_fpn_1x_coco configs/paa/paa_r50_fpn_1x_coco.py ./tools/work_dir/paa_r50_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab pisa_mask_rcnn_r50_fpn_1x_coco configs/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py ./tools/work_dir/pisa_mask_rcnn_r50_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab point_rend_r50_caffe_fpn_mstrain_1x_coco configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py ./tools/work_dir/point_rend_r50_caffe_fpn_mstrain_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab reppoints_moment_r50_fpn_gn-neck+head_1x_coco configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py ./tools/work_dir/reppoints_moment_r50_fpn_gn-neck+head_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/retinanet/retinanet_r50_caffe_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab retinanet_r50_caffe_fpn_1x_coco configs/retinanet/retinanet_r50_caffe_fpn_1x_coco.py ./tools/work_dir/retinanet_r50_caffe_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/rpn/rpn_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab rpn_r50_fpn_1x_coco configs/rpn/rpn_r50_fpn_1x_coco.py ./tools/work_dir/rpn_r50_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab sabl_retinanet_r50_fpn_1x_coco configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py ./tools/work_dir/sabl_retinanet_r50_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/ssd/ssd300_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab ssd300_coco configs/ssd/ssd300_coco.py ./tools/work_dir/ssd300_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/tridentnet/tridentnet_r50_caffe_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab tridentnet_r50_caffe_1x_coco configs/tridentnet/tridentnet_r50_caffe_1x_coco.py ./tools/work_dir/tridentnet_r50_caffe_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/vfnet/vfnet_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab vfnet_r50_fpn_1x_coco configs/vfnet/vfnet_r50_fpn_1x_coco.py ./tools/work_dir/vfnet_r50_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/yolact/yolact_r50_8x8_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab yolact_r50_8x8_coco configs/yolact/yolact_r50_8x8_coco.py ./tools/work_dir/yolact_r50_8x8_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/yolo/yolov3_d53_320_273e_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab yolov3_d53_320_273e_coco configs/yolo/yolov3_d53_320_273e_coco.py ./tools/work_dir/yolov3_d53_320_273e_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab sparse_rcnn_r50_fpn_1x_coco configs/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco.py ./tools/work_dir/sparse_rcnn_r50_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/scnet/scnet_r50_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab scnet_r50_fpn_1x_coco configs/scnet/scnet_r50_fpn_1x_coco.py ./tools/work_dir/scnet_r50_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/yolof/yolof_r50_c5_8x8_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab yolof_r50_c5_8x8_1x_coco configs/yolof/yolof_r50_c5_8x8_1x_coco.py ./tools/work_dir/yolof_r50_c5_8x8_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab mask_rcnn_r50_fpn_carafe_1x_coco configs/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py ./tools/work_dir/mask_rcnn_r50_fpn_carafe_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab faster_rcnn_r50_fpn_mdpool_1x_coco configs/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco.py ./tools/work_dir/faster_rcnn_r50_fpn_mdpool_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco configs/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py ./tools/work_dir/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab faster_rcnn_r50_fpn_dpool_1x_coco configs/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py ./tools/work_dir/faster_rcnn_r50_fpn_dpool_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco configs/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py ./tools/work_dir/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py ./tools/work_dir/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco configs/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py ./tools/work_dir/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab mask_rcnn_r50_fpn_gn-all_2x_coco configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py ./tools/work_dir/mask_rcnn_r50_fpn_gn-all_2x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab mask_rcnn_r50_fpn_gn_ws-all_2x_coco configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py ./tools/work_dir/mask_rcnn_r50_fpn_gn_ws-all_2x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab mask_rcnn_hrnetv2p_w18_1x_coco configs/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py ./tools/work_dir/mask_rcnn_hrnetv2p_w18_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab faster_rcnn_r50_pafpn_1x_coco configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py ./tools/work_dir/faster_rcnn_r50_pafpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab retinanet_r50_nasfpn_crop640_50e_coco configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py ./tools/work_dir/retinanet_r50_nasfpn_crop640_50e_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab mask_rcnn_regnetx-3.2GF_fpn_1x_coco configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py ./tools/work_dir/mask_rcnn_regnetx-3.2GF_fpn_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/resnest/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco configs/resnest/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py ./tools/work_dir/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/res2net/faster_rcnn_r2_101_fpn_2x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab faster_rcnn_r2_101_fpn_2x_coco configs/res2net/faster_rcnn_r2_101_fpn_2x_coco.py ./tools/work_dir/faster_rcnn_r2_101_fpn_2x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/groie/faster_rcnn_r50_fpn_groie_1x_coco.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab faster_rcnn_r50_fpn_groie_1x_coco configs/groie/faster_rcnn_r50_fpn_groie_1x_coco.py ./tools/work_dir/faster_rcnn_r50_fpn_groie_1x_coco --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & +echo 'configs/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py' & +GPUS=8 GPUS_PER_NODE=8 CPUS_PER_TASK=2 ./tools/slurm_train.sh openmmlab mask_rcnn_r50_fpn_1x_cityscapes configs/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py ./tools/work_dir/mask_rcnn_r50_fpn_1x_cityscapes --cfg-options checkpoint_config.max_keep_ckpts=1 >/dev/null & diff --git a/detection_cbnet/docker-build-context/cbnetv2/.github/CODE_OF_CONDUCT.md b/detection_cbnet/docker-build-context/cbnetv2/.github/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000000000000000000000000000000000..efd4305798630a5cd7b17d7cf893b9a811d5501f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.github/CODE_OF_CONDUCT.md @@ -0,0 +1,76 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, sex characteristics, gender identity and expression, +level of experience, education, socio-economic status, nationality, personal +appearance, race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting the project team at chenkaidev@gmail.com. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see +https://www.contributor-covenant.org/faq diff --git a/detection_cbnet/docker-build-context/cbnetv2/.github/CONTRIBUTING.md b/detection_cbnet/docker-build-context/cbnetv2/.github/CONTRIBUTING.md new file mode 100644 index 0000000000000000000000000000000000000000..c6696262b4a1469314c1bf76b36e4e24519aeec9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.github/CONTRIBUTING.md @@ -0,0 +1 @@ +We appreciate all contributions to improve MMDetection. Please refer to [CONTRIBUTING.md](https://github.com/open-mmlab/mmcv/blob/master/CONTRIBUTING.md) in MMCV for more details about the contributing guideline. diff --git a/detection_cbnet/docker-build-context/cbnetv2/.github/ISSUE_TEMPLATE/config.yml b/detection_cbnet/docker-build-context/cbnetv2/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 0000000000000000000000000000000000000000..56bbd88fddfd7a1ceb50a1cb406e80318a8d0370 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,9 @@ +blank_issues_enabled: false + +contact_links: + - name: Common Issues + url: https://mmdetection.readthedocs.io/en/latest/faq.html + about: Check if your issue already has solutions + - name: MMDetection Documentation + url: https://mmdetection.readthedocs.io/en/latest/ + about: Check if your question is answered in docs diff --git a/detection_cbnet/docker-build-context/cbnetv2/.github/ISSUE_TEMPLATE/error-report.md b/detection_cbnet/docker-build-context/cbnetv2/.github/ISSUE_TEMPLATE/error-report.md new file mode 100644 index 0000000000000000000000000000000000000000..23cb9c1aae43bc0ae7daa108f176ac4a04149d60 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.github/ISSUE_TEMPLATE/error-report.md @@ -0,0 +1,47 @@ +--- +name: Error report +about: Create a report to help us improve +title: '' +labels: '' +assignees: '' + +--- + +Thanks for your error report and we appreciate it a lot. + +**Checklist** + +1. I have searched related issues but cannot get the expected help. +2. I have read the [FAQ documentation](https://mmdetection.readthedocs.io/en/latest/faq.html) but cannot get the expected help. +3. The bug has not been fixed in the latest version. + +**Describe the bug** +A clear and concise description of what the bug is. + +**Reproduction** + +1. What command or script did you run? + +```none +A placeholder for the command. +``` + +2. Did you make any modifications on the code or config? Did you understand what you have modified? +3. What dataset did you use? + +**Environment** + +1. Please run `python mmdet/utils/collect_env.py` to collect necessary environment information and paste it here. +2. You may add addition that may be helpful for locating the problem, such as + - How you installed PyTorch [e.g., pip, conda, source] + - Other environment variables that may be related (such as `$PATH`, `$LD_LIBRARY_PATH`, `$PYTHONPATH`, etc.) + +**Error traceback** +If applicable, paste the error trackback here. + +```none +A placeholder for trackback. +``` + +**Bug fix** +If you have already identified the reason, you can provide the information here. If you are willing to create a PR to fix it, please also leave a comment here and that would be much appreciated! diff --git a/detection_cbnet/docker-build-context/cbnetv2/.github/ISSUE_TEMPLATE/feature_request.md b/detection_cbnet/docker-build-context/cbnetv2/.github/ISSUE_TEMPLATE/feature_request.md new file mode 100644 index 0000000000000000000000000000000000000000..33f9d5f2354dcb019cee5f6fbddf36f3a408fba3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.github/ISSUE_TEMPLATE/feature_request.md @@ -0,0 +1,22 @@ +--- +name: Feature request +about: Suggest an idea for this project +title: '' +labels: '' +assignees: '' + +--- + +**Describe the feature** + +**Motivation** +A clear and concise description of the motivation of the feature. +Ex1. It is inconvenient when [....]. +Ex2. There is a recent paper [....], which is very helpful for [....]. + +**Related resources** +If there is an official code release or third-party implementations, please also provide the information here, which would be very helpful. + +**Additional context** +Add any other context or screenshots about the feature request here. +If you would like to implement the feature and create a PR, please leave a comment here and that would be much appreciated. diff --git a/detection_cbnet/docker-build-context/cbnetv2/.github/ISSUE_TEMPLATE/general_questions.md b/detection_cbnet/docker-build-context/cbnetv2/.github/ISSUE_TEMPLATE/general_questions.md new file mode 100644 index 0000000000000000000000000000000000000000..b5a6451a6cbf81ad1c4d0cae8541621b7991b99b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.github/ISSUE_TEMPLATE/general_questions.md @@ -0,0 +1,8 @@ +--- +name: General questions +about: Ask general questions to get help +title: '' +labels: '' +assignees: '' + +--- diff --git a/detection_cbnet/docker-build-context/cbnetv2/.github/ISSUE_TEMPLATE/reimplementation_questions.md b/detection_cbnet/docker-build-context/cbnetv2/.github/ISSUE_TEMPLATE/reimplementation_questions.md new file mode 100644 index 0000000000000000000000000000000000000000..6b358387701e4cc4cbd7fcdabd2f9606aca045bc --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.github/ISSUE_TEMPLATE/reimplementation_questions.md @@ -0,0 +1,68 @@ +--- +name: Reimplementation Questions +about: Ask about questions during model reimplementation +title: '' +labels: 'reimplementation' +assignees: '' + +--- + +**Notice** + +There are several common situations in the reimplementation issues as below + +1. Reimplement a model in the model zoo using the provided configs +2. Reimplement a model in the model zoo on other dataset (e.g., custom datasets) +3. Reimplement a custom model but all the components are implemented in MMDetection +4. Reimplement a custom model with new modules implemented by yourself + +There are several things to do for different cases as below. + +- For case 1 & 3, please follow the steps in the following sections thus we could help to quick identify the issue. +- For case 2 & 4, please understand that we are not able to do much help here because we usually do not know the full code and the users should be responsible to the code they write. +- One suggestion for case 2 & 4 is that the users should first check whether the bug lies in the self-implemented code or the original code. For example, users can first make sure that the same model runs well on supported datasets. If you still need help, please describe what you have done and what you obtain in the issue, and follow the steps in the following sections and try as clear as possible so that we can better help you. + +**Checklist** + +1. I have searched related issues but cannot get the expected help. +2. The issue has not been fixed in the latest version. + +**Describe the issue** + +A clear and concise description of what the problem you meet and what have you done. + +**Reproduction** + +1. What command or script did you run? + +```none +A placeholder for the command. +``` + +2. What config dir you run? + +```none +A placeholder for the config. +``` + +3. Did you make any modifications on the code or config? Did you understand what you have modified? +4. What dataset did you use? + +**Environment** + +1. Please run `python mmdet/utils/collect_env.py` to collect necessary environment information and paste it here. +2. You may add addition that may be helpful for locating the problem, such as + 1. How you installed PyTorch [e.g., pip, conda, source] + 2. Other environment variables that may be related (such as `$PATH`, `$LD_LIBRARY_PATH`, `$PYTHONPATH`, etc.) + +**Results** + +If applicable, paste the related results here, e.g., what you expect and what you get. + +```none +A placeholder for results comparison +``` + +**Issue fix** + +If you have already identified the reason, you can provide the information here. If you are willing to create a PR to fix it, please also leave a comment here and that would be much appreciated! diff --git a/detection_cbnet/docker-build-context/cbnetv2/.github/pull_request_template.md b/detection_cbnet/docker-build-context/cbnetv2/.github/pull_request_template.md new file mode 100644 index 0000000000000000000000000000000000000000..8f8e28983ff2798a4a1c05dcfe9159f23b34b1c0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.github/pull_request_template.md @@ -0,0 +1,25 @@ +Thanks for your contribution and we appreciate it a lot. The following instructions would make your pull request more healthy and more easily get feedback. If you do not understand some items, don't worry, just make the pull request and seek help from maintainers. + +## Motivation + +Please describe the motivation of this PR and the goal you want to achieve through this PR. + +## Modification + +Please briefly describe what modification is made in this PR. + +## BC-breaking (Optional) + +Does the modification introduce changes that break the backward-compatibility of the downstream repos? +If so, please describe how it breaks the compatibility and how the downstream projects should modify their code to keep compatibility with this PR. + +## Use cases (Optional) + +If this PR introduces a new feature, it is better to list some use cases here, and update the documentation. + +## Checklist + +1. Pre-commit or other linting tools are used to fix the potential lint issues. +2. The modification is covered by complete unit tests. If not, please add more unit test to ensure the correctness. +3. If the modification has potential influence on downstream projects, this PR should be tested with downstream projects, like MMDet or MMCls. +4. The documentation has been modified accordingly, like docstring or example tutorials. diff --git a/detection_cbnet/docker-build-context/cbnetv2/.github/workflows/build.yml b/detection_cbnet/docker-build-context/cbnetv2/.github/workflows/build.yml new file mode 100644 index 0000000000000000000000000000000000000000..0d56d621239eb43579bc1dd4ca852dbc0906f2fd --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.github/workflows/build.yml @@ -0,0 +1,161 @@ +name: build + +on: [push, pull_request] + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.7 + uses: actions/setup-python@v2 + with: + python-version: 3.7 + - name: Install pre-commit hook + run: | + pip install pre-commit + pre-commit install + - name: Linting + run: pre-commit run --all-files + - name: Check docstring coverage + run: | + pip install interrogate + interrogate -v --ignore-init-method --ignore-module --ignore-nested-functions --ignore-regex "__repr__" --fail-under 80 mmdet + + build_cpu: + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.7] + torch: [1.3.1, 1.5.1, 1.6.0] + include: + - torch: 1.3.1 + torchvision: 0.4.2 + mmcv: "latest+torch1.3.0+cpu" + - torch: 1.5.1 + torchvision: 0.6.1 + mmcv: "latest+torch1.5.0+cpu" + - torch: 1.6.0 + torchvision: 0.7.0 + mmcv: "latest+torch1.6.0+cpu" + - torch: 1.7.0 + torchvision: 0.8.1 + mmcv: "latest+torch1.7.0+cpu" + - torch: 1.8.0 + torchvision: 0.9.0 + mmcv: "latest+torch1.8.0+cpu" + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install Pillow + run: pip install Pillow==6.2.2 + if: ${{matrix.torchvision == '0.4.2'}} + - name: Install PyTorch + run: pip install torch==${{matrix.torch}}+cpu torchvision==${{matrix.torchvision}}+cpu -f https://download.pytorch.org/whl/torch_stable.html + - name: Install MMCV + run: | + pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cpu/torch${{matrix.torch}}/index.html + python -c 'import mmcv; print(mmcv.__version__)' + - name: Install unittest dependencies + run: pip install -r requirements/tests.txt -r requirements/optional.txt + - name: Build and install + run: rm -rf .eggs && pip install -e . + - name: Run unittests and generate coverage report + run: | + coverage run --branch --source mmdet -m pytest tests/ + coverage xml + coverage report -m + + build_cuda: + runs-on: ubuntu-latest + + env: + CUDA: 10.1.105-1 + CUDA_SHORT: 10.1 + UBUNTU_VERSION: ubuntu1804 + strategy: + matrix: + python-version: [3.7] + torch: [1.3.1, 1.5.1+cu101, 1.6.0+cu101, 1.7.0+cu101, 1.8.0+cu101] + include: + - torch: 1.3.1 + torch_version: torch1.3.1 + torchvision: 0.4.2 + mmcv: "latest+torch1.3.0+cu101" + - torch: 1.5.1+cu101 + torch_version: torch1.5.1 + torchvision: 0.6.1+cu101 + mmcv: "latest+torch1.5.0+cu101" + - torch: 1.6.0+cu101 + torch_version: torch1.6.0 + torchvision: 0.7.0+cu101 + mmcv: "latest+torch1.6.0+cu101" + - torch: 1.6.0+cu101 + torch_version: torch1.6.0 + torchvision: 0.7.0+cu101 + mmcv: "latest+torch1.6.0+cu101" + python-version: 3.6 + - torch: 1.6.0+cu101 + torch_version: torch1.6.0 + torchvision: 0.7.0+cu101 + mmcv: "latest+torch1.6.0+cu101" + python-version: 3.8 + - torch: 1.7.0+cu101 + torch_version: torch1.7.0 + torchvision: 0.8.1+cu101 + mmcv: "latest+torch1.7.0+cu101" + - torch: 1.8.0+cu101 + torch_version: torch1.8.0 + torchvision: 0.9.0+cu101 + mmcv: "latest+torch1.8.0+cu101" + + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install CUDA + run: | + export INSTALLER=cuda-repo-${UBUNTU_VERSION}_${CUDA}_amd64.deb + wget http://developer.download.nvidia.com/compute/cuda/repos/${UBUNTU_VERSION}/x86_64/${INSTALLER} + sudo dpkg -i ${INSTALLER} + wget https://developer.download.nvidia.com/compute/cuda/repos/${UBUNTU_VERSION}/x86_64/7fa2af80.pub + sudo apt-key add 7fa2af80.pub + sudo apt update -qq + sudo apt install -y cuda-${CUDA_SHORT/./-} cuda-cufft-dev-${CUDA_SHORT/./-} + sudo apt clean + export CUDA_HOME=/usr/local/cuda-${CUDA_SHORT} + export LD_LIBRARY_PATH=${CUDA_HOME}/lib64:${CUDA_HOME}/include:${LD_LIBRARY_PATH} + export PATH=${CUDA_HOME}/bin:${PATH} + - name: Install Pillow + run: pip install Pillow==6.2.2 + if: ${{matrix.torchvision < 0.5}} + - name: Install PyTorch + run: pip install torch==${{matrix.torch}} torchvision==${{matrix.torchvision}} -f https://download.pytorch.org/whl/torch_stable.html + - name: Install mmdet dependencies + run: | + pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu101/${{matrix.torch_version}}/index.html + pip install -r requirements.txt + python -c 'import mmcv; print(mmcv.__version__)' + - name: Build and install + run: | + rm -rf .eggs + python setup.py check -m -s + TORCH_CUDA_ARCH_LIST=7.0 pip install . + - name: Run unittests and generate coverage report + run: | + coverage run --branch --source mmdet -m pytest tests/ + coverage xml + coverage report -m + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v1.0.10 + with: + file: ./coverage.xml + flags: unittests + env_vars: OS,PYTHON + name: codecov-umbrella + fail_ci_if_error: false diff --git a/detection_cbnet/docker-build-context/cbnetv2/.github/workflows/build_pat.yml b/detection_cbnet/docker-build-context/cbnetv2/.github/workflows/build_pat.yml new file mode 100644 index 0000000000000000000000000000000000000000..7da6a9b81eab08ab0f542bd4b0021d3b25ac1e5b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.github/workflows/build_pat.yml @@ -0,0 +1,24 @@ +name: build_pat + +on: push + +jobs: + build_parrots: + runs-on: ubuntu-latest + container: + image: ghcr.io/zhouzaida/parrots-mmcv:1.3.4 + credentials: + username: zhouzaida + password: ${{ secrets.CR_PAT }} + + steps: + - uses: actions/checkout@v2 + - name: Install mmdet dependencies + run: | + git clone https://github.com/open-mmlab/mmcv.git && cd mmcv + MMCV_WITH_OPS=1 python setup.py install + cd .. && rm -rf mmcv + python -c 'import mmcv; print(mmcv.__version__)' + pip install -r requirements.txt + - name: Build and install + run: rm -rf .eggs && pip install -e . diff --git a/detection_cbnet/docker-build-context/cbnetv2/.github/workflows/deploy.yml b/detection_cbnet/docker-build-context/cbnetv2/.github/workflows/deploy.yml new file mode 100644 index 0000000000000000000000000000000000000000..2f9458b95c74c4778543022df5c7b8fcff159d8d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.github/workflows/deploy.yml @@ -0,0 +1,24 @@ +name: deploy + +on: push + +jobs: + build-n-publish: + runs-on: ubuntu-latest + if: startsWith(github.event.ref, 'refs/tags') + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.7 + uses: actions/setup-python@v2 + with: + python-version: 3.7 + - name: Install torch + run: pip install torch + - name: Install wheel + run: pip install wheel + - name: Build MMDetection + run: python setup.py sdist bdist_wheel + - name: Publish distribution to PyPI + run: | + pip install twine + twine upload dist/* -u __token__ -p ${{ secrets.pypi_password }} diff --git a/detection_cbnet/docker-build-context/cbnetv2/.gitignore b/detection_cbnet/docker-build-context/cbnetv2/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..77ca0d7c808c77d27777041e64cd8a01054433fc --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.gitignore @@ -0,0 +1,121 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ + +data/ +data +.vscode +.idea +.DS_Store + +# custom +*.pkl +*.pkl.json +*.log.json +work_dirs/ + +# Pytorch +*.pth +*.py~ +*.sh~ diff --git a/detection_cbnet/docker-build-context/cbnetv2/.pre-commit-config.yaml b/detection_cbnet/docker-build-context/cbnetv2/.pre-commit-config.yaml new file mode 100644 index 0000000000000000000000000000000000000000..64b489838c477c6952d7631d9b3aee952f072429 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.pre-commit-config.yaml @@ -0,0 +1,40 @@ +repos: + - repo: https://gitlab.com/pycqa/flake8.git + rev: 3.8.3 + hooks: + - id: flake8 + - repo: https://github.com/asottile/seed-isort-config + rev: v2.2.0 + hooks: + - id: seed-isort-config + - repo: https://github.com/timothycrosley/isort + rev: 4.3.21 + hooks: + - id: isort + - repo: https://github.com/pre-commit/mirrors-yapf + rev: v0.30.0 + hooks: + - id: yapf + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.1.0 + hooks: + - id: trailing-whitespace + - id: check-yaml + - id: end-of-file-fixer + - id: requirements-txt-fixer + - id: double-quote-string-fixer + - id: check-merge-conflict + - id: fix-encoding-pragma + args: ["--remove"] + - id: mixed-line-ending + args: ["--fix=lf"] + - repo: https://github.com/jumanjihouse/pre-commit-hooks + rev: 2.1.4 + hooks: + - id: markdownlint + args: ["-r", "~MD002,~MD013,~MD024,~MD029,~MD033,~MD034,~MD036", "-t", "allow_different_nesting"] + - repo: https://github.com/myint/docformatter + rev: v1.3.1 + hooks: + - id: docformatter + args: ["--in-place", "--wrap-descriptions", "79"] diff --git a/detection_cbnet/docker-build-context/cbnetv2/.readthedocs.yml b/detection_cbnet/docker-build-context/cbnetv2/.readthedocs.yml new file mode 100644 index 0000000000000000000000000000000000000000..73ea4cb7e95530cd18ed94895ca38edd531f0d94 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/.readthedocs.yml @@ -0,0 +1,7 @@ +version: 2 + +python: + version: 3.7 + install: + - requirements: requirements/docs.txt + - requirements: requirements/readthedocs.txt diff --git a/detection_cbnet/docker-build-context/cbnetv2/LICENSE b/detection_cbnet/docker-build-context/cbnetv2/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..7c7dc7a721325c1880f25e7dbc0a108349fd0e28 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 VDIGPKU + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/detection_cbnet/docker-build-context/cbnetv2/MANIFEST.in b/detection_cbnet/docker-build-context/cbnetv2/MANIFEST.in new file mode 100644 index 0000000000000000000000000000000000000000..c6fcd8a5b5c9192a6c878dbabb0dc6183faeb174 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/MANIFEST.in @@ -0,0 +1,6 @@ +include requirements/*.txt +include mmdet/VERSION +include mmdet/model-index.yml +include mmdet/demo/*/* +recursive-include mmdet/configs *.py *.yml +recursive-include mmdet/tools *.sh *.py diff --git a/detection_cbnet/docker-build-context/cbnetv2/README.md b/detection_cbnet/docker-build-context/cbnetv2/README.md new file mode 100755 index 0000000000000000000000000000000000000000..50236af739443c5670219cec699e5f90a3cdf537 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/README.md @@ -0,0 +1,118 @@ +# CBNetV2: A Novel Composite Backbone Network Architecture for Object Detection +[![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/cbnetv2-a-composite-backbone-network/object-detection-on-coco)](https://paperswithcode.com/sota/object-detection-on-coco?p=cbnetv2-a-composite-backbone-network) +[![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/cbnetv2-a-composite-backbone-network/instance-segmentation-on-coco)](https://paperswithcode.com/sota/instance-segmentation-on-coco?p=cbnetv2-a-composite-backbone-network) +[![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/cbnetv2-a-composite-backbone-network/object-detection-on-coco-minival)](https://paperswithcode.com/sota/object-detection-on-coco-minival?p=cbnetv2-a-composite-backbone-network) +[![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/cbnetv2-a-composite-backbone-network/instance-segmentation-on-coco-minival)](https://paperswithcode.com/sota/instance-segmentation-on-coco-minival?p=cbnetv2-a-composite-backbone-network) + +By [Tingting Liang](https://github.com/tingtingliangvs)\*, [Xiaojie Chu](https://github.com/chuxiaojie)\*, [Yudong Liu](https://github.com/PKUbahuangliuhe)\*, Yongtao Wang, Zhi Tang, Wei Chu, Jingdong Chen, Haibin Ling. + +This repo is the official implementation of [CBNetV2](http://arxiv.org/abs/2107.00420). It is based on [mmdetection](https://github.com/open-mmlab/mmdetection) and [Swin Transformer for Object Detection](https://github.com/SwinTransformer/Swin-Transformer-Object-Detection). + +Contact us with tingtingliang@pku.edu.cn, chuxiaojie@stu.pku.edu.cn, wyt@pku.edu.cn. +## Introduction +*CBNetV2* achieves strong single-model performance on COCO object detection (`60.1 box AP` and `52.3 mask AP` on test-dev) without extra training data. + +![teaser](figures/cbnetv2.png) + + +## Partial Results and Models +**More results and models can be found in [model zoo](model_zoo.md)** + +### Faster R-CNN +| Backbone | Lr Schd | box mAP (minival) | #params | FLOPs | config | log | model | +| :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | +| DB-ResNet50 | 1x | 40.8 | 69M | 284G | [config](configs/cbnet/faster_rcnn_cbv2d1_r50_fpn_1x_coco.py) | [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/faster_rcnn_cbv2d1_r50_fpn_1x_coco.log.json)| [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/faster_rcnn_cbv2d1_r50_fpn_1x_coco.pth.zip)| + + +### Mask R-CNN + +| Backbone | Lr Schd | box mAP (minival) | mask mAP (minival) | #params | FLOPs | config | log | model | +| :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | +| DB-Swin-T | 3x | 50.2 | 44.5 | 76M | 357G | [config](configs/cbnet/mask_rcnn_cbv2_swin_tiny_patch4_window7_mstrain_480-800_adamw_3x_coco.py) | [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/mask_rcnn_cbv2_swin_tiny_patch4_window7_mstrain_480-800_adamw_3x_coco.log.json) | [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/mask_rcnn_cbv2_swin_tiny_patch4_window7_mstrain_480-800_adamw_3x_coco.pth.zip) | + +### Cascade Mask R-CNN (1600x1400) +| Backbone | Lr Schd | box mAP (minival/test-dev)| mask mAP (minival/test-dev)| #params | FLOPs | config | model | +| :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | +| DB-Swin-S | 3x | 56.3/56.9 | 48.6/49.1 | 156M | 1016G | [config](configs/cbnet/cascade_mask_rcnn_cbv2_swin_small_patch4_window7_mstrain_400-1400_adamw_3x_coco.py) | [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/cascade_mask_rcnn_cbv2_swin_small_patch4_window7_mstrain_400-1400_adamw_3x_coco.pth.zip)| + +### Improved HTC (1600x1400) +*We use ImageNet-22k pretrained checkpoints of Swin-B and Swin-L. Compared to regular HTC, our HTC uses 4conv1fc in bbox head.* +| Backbone | Lr Schd | box mAP (minival/test-dev) | mask mAP (minival/test-dev) | #params | FLOPs | config | model | +| :---: |:---: | :---: | :---: | :---: | :---: | :---: | :---: | +| DB-Swin-B | 20e | 58.4/58.7 | 50.7/51.1 | 235M | 1348G | [config](configs/cbnet/htc_cbv2_swin_base_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_20e_coco.py) | [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/htc_cbv2_swin_base22k_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_20e_coco.pth.zip) | +| DB-Swin-L | 1x | 59.1/59.4 | 51.0/51.6 | 453M | 2162G | [config (test only)](configs/cbnet/htc_cbv2_swin_large_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_1x_coco.py) | [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/htc_cbv2_swin_large22k_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_1x_coco.pth.zip) | +| DB-Swin-L (TTA) | 1x | 59.6/60.1 | 51.8/52.3 | 453M | - | [config (test only)](configs/cbnet/htc_cbv2_swin_large_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_1x_coco.py) | [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/htc_cbv2_swin_large22k_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_1x_coco.pth.zip) | + +TTA denotes test time augmentation. + +**Notes**: + +- **Pre-trained models of Swin Transformer can be downloaded from [Swin Transformer for ImageNet Classification](https://github.com/microsoft/Swin-Transformer)**. + +## Usage + +### Installation + +Please refer to [get_started.md](https://github.com/open-mmlab/mmdetection/blob/master/docs/get_started.md) for installation and dataset preparation. + +### Inference +``` +# single-gpu testing (w/o segm result) +python tools/test.py --eval bbox + +# multi-gpu testing (w/ segm result) +tools/dist_test.sh --eval bbox segm +``` + +### Training + +To train a detector with pre-trained models, run: +``` +# multi-gpu training +tools/dist_train.sh +``` +For example, to train a Faster R-CNN model with a `Duel-ResNet50` backbone and 8 gpus, run: +``` +# path of pre-training model (resnet50) is already in config +tools/dist_train.sh configs/cbnet/faster_rcnn_cbv2d1_r50_fpn_1x_coco.py 8 +``` + +Another example, to train a Mask R-CNN model with a `Duel-Swin-T` backbone and 8 gpus, run: +``` +tools/dist_train.sh configs/cbnet/mask_rcnn_cbv2_swin_tiny_patch4_window7_mstrain_480-800_adamw_3x_coco.py 8 --cfg-options model.pretrained= +``` + + + +### Apex (optional): +Following [Swin Transformer for Object Detection](https://github.com/SwinTransformer/Swin-Transformer-Object-Detection), we use apex for mixed precision training by default. To install apex, run: +``` +git clone https://github.com/NVIDIA/apex +cd apex +pip install -v --disable-pip-version-check --no-cache-dir --global-option="--cpp_ext" --global-option="--cuda_ext" ./ +``` + +### Documents and Tutorials +*We list some documents and tutorials from [MMDetection](https://github.com/open-mmlab/mmdetection), which may be helpful to you.* +* [Learn about Configs](https://github.com/open-mmlab/mmdetection/blob/master/docs/tutorials/config.md) +* [Train with customized datasets](https://github.com/open-mmlab/mmdetection/blob/master/docs/2_new_data_model.md) +* [Finetuning Models](https://github.com/open-mmlab/mmdetection/blob/master/docs/tutorials/finetune.md) + + +## Citation +If you use our code/model, please consider to cite our paper [CBNetV2: A Novel Composite Backbone Network Architecture for Object Detection](http://arxiv.org/abs/2107.00420). +``` +@article{liang2021cbnetv2, + title={CBNetV2: A Composite Backbone Network Architecture for Object Detection}, + author={Tingting Liang and Xiaojie Chu and Yudong Liu and Yongtao Wang and Zhi Tang and Wei Chu and Jingdong Chen and Haibing Ling}, + journal={arXiv preprint arXiv:2107.00420}, + year={2021} +} +``` + +## License +The project is only free for academic research purposes, but needs authorization for commerce. For commerce permission, please contact wyt@pku.edu.cn. + + +## Other Links +> **Original CBNet**: See [CBNet: A Novel Composite Backbone Network Architecture for Object Detection](https://github.com/VDIGPKU/CBNet). diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/cityscapes_detection.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/cityscapes_detection.py new file mode 100644 index 0000000000000000000000000000000000000000..e341b59d6fa6265c2d17dc32aae2341871670a3d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/cityscapes_detection.py @@ -0,0 +1,56 @@ +# dataset settings +dataset_type = 'CityscapesDataset' +data_root = 'data/cityscapes/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', img_scale=[(2048, 800), (2048, 1024)], keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(2048, 1024), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=1, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=8, + dataset=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_train.json', + img_prefix=data_root + 'leftImg8bit/train/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_val.json', + img_prefix=data_root + 'leftImg8bit/val/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_test.json', + img_prefix=data_root + 'leftImg8bit/test/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='bbox') diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/cityscapes_instance.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/cityscapes_instance.py new file mode 100644 index 0000000000000000000000000000000000000000..4e3c34e2c85b4fc2ba854e1b409af70dc2c34e94 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/cityscapes_instance.py @@ -0,0 +1,56 @@ +# dataset settings +dataset_type = 'CityscapesDataset' +data_root = 'data/cityscapes/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', img_scale=[(2048, 800), (2048, 1024)], keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(2048, 1024), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=1, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=8, + dataset=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_train.json', + img_prefix=data_root + 'leftImg8bit/train/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_val.json', + img_prefix=data_root + 'leftImg8bit/val/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/instancesonly_filtered_gtFine_test.json', + img_prefix=data_root + 'leftImg8bit/test/', + pipeline=test_pipeline)) +evaluation = dict(metric=['bbox', 'segm']) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/coco_detection.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/coco_detection.py new file mode 100644 index 0000000000000000000000000000000000000000..149f590bb45fa65c29fd4c005e4a237d7dd2e117 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/coco_detection.py @@ -0,0 +1,49 @@ +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='bbox') diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/coco_instance.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/coco_instance.py new file mode 100644 index 0000000000000000000000000000000000000000..9901a858414465d19d8ec6ced316b460166176b4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/coco_instance.py @@ -0,0 +1,49 @@ +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(metric=['bbox', 'segm']) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/coco_instance_semantic.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/coco_instance_semantic.py new file mode 100644 index 0000000000000000000000000000000000000000..6c8bf07b278f615e7ff5e67490d7a92068574b5b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/coco_instance_semantic.py @@ -0,0 +1,54 @@ +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', with_bbox=True, with_mask=True, with_seg=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='SegRescale', scale_factor=1 / 8), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + seg_prefix=data_root + 'stuffthingmaps/train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(metric=['bbox', 'segm']) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/deepfashion.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/deepfashion.py new file mode 100644 index 0000000000000000000000000000000000000000..308b4b2ac4d9e3516ba4a57e9d3b6af91e97f24b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/deepfashion.py @@ -0,0 +1,53 @@ +# dataset settings +dataset_type = 'DeepFashionDataset' +data_root = 'data/DeepFashion/In-shop/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(750, 1101), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(750, 1101), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + imgs_per_gpu=2, + workers_per_gpu=1, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/DeepFashion_segmentation_query.json', + img_prefix=data_root + 'Img/', + pipeline=train_pipeline, + data_root=data_root), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/DeepFashion_segmentation_query.json', + img_prefix=data_root + 'Img/', + pipeline=test_pipeline, + data_root=data_root), + test=dict( + type=dataset_type, + ann_file=data_root + + 'annotations/DeepFashion_segmentation_gallery.json', + img_prefix=data_root + 'Img/', + pipeline=test_pipeline, + data_root=data_root)) +evaluation = dict(interval=5, metric=['bbox', 'segm']) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/lvis_v0.5_instance.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/lvis_v0.5_instance.py new file mode 100644 index 0000000000000000000000000000000000000000..207e0053c24d73e05e78c764d05e65c102675320 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/lvis_v0.5_instance.py @@ -0,0 +1,24 @@ +# dataset settings +_base_ = 'coco_instance.py' +dataset_type = 'LVISV05Dataset' +data_root = 'data/lvis_v0.5/' +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + _delete_=True, + type='ClassBalancedDataset', + oversample_thr=1e-3, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v0.5_train.json', + img_prefix=data_root + 'train2017/')), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v0.5_val.json', + img_prefix=data_root + 'val2017/'), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v0.5_val.json', + img_prefix=data_root + 'val2017/')) +evaluation = dict(metric=['bbox', 'segm']) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/lvis_v1_instance.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/lvis_v1_instance.py new file mode 100644 index 0000000000000000000000000000000000000000..be791edd79495dce88d010eea63e33d398f242b0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/lvis_v1_instance.py @@ -0,0 +1,24 @@ +# dataset settings +_base_ = 'coco_instance.py' +dataset_type = 'LVISV1Dataset' +data_root = 'data/lvis_v1/' +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + _delete_=True, + type='ClassBalancedDataset', + oversample_thr=1e-3, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_train.json', + img_prefix=data_root)), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_val.json', + img_prefix=data_root), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_val.json', + img_prefix=data_root)) +evaluation = dict(metric=['bbox', 'segm']) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/voc0712.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/voc0712.py new file mode 100644 index 0000000000000000000000000000000000000000..ae09acdd5c9580217815300abbad9f08b71b37ed --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/voc0712.py @@ -0,0 +1,55 @@ +# dataset settings +dataset_type = 'VOCDataset' +data_root = 'data/VOCdevkit/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1000, 600), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1000, 600), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=3, + dataset=dict( + type=dataset_type, + ann_file=[ + data_root + 'VOC2007/ImageSets/Main/trainval.txt', + data_root + 'VOC2012/ImageSets/Main/trainval.txt' + ], + img_prefix=[data_root + 'VOC2007/', data_root + 'VOC2012/'], + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'VOC2007/ImageSets/Main/test.txt', + img_prefix=data_root + 'VOC2007/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'VOC2007/ImageSets/Main/test.txt', + img_prefix=data_root + 'VOC2007/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='mAP') diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/wider_face.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/wider_face.py new file mode 100644 index 0000000000000000000000000000000000000000..d1d649be42bca2955fb56a784fe80bcc2fdce4e1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/datasets/wider_face.py @@ -0,0 +1,63 @@ +# dataset settings +dataset_type = 'WIDERFaceDataset' +data_root = 'data/WIDERFace/' +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(300, 300), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(300, 300), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=60, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=2, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'train.txt', + img_prefix=data_root + 'WIDER_train/', + min_size=17, + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'val.txt', + img_prefix=data_root + 'WIDER_val/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'val.txt', + img_prefix=data_root + 'WIDER_val/', + pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/default_runtime.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/default_runtime.py new file mode 100644 index 0000000000000000000000000000000000000000..55097c5b242da66c9735c0b45cd84beefab487b1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/default_runtime.py @@ -0,0 +1,16 @@ +checkpoint_config = dict(interval=1) +# yapf:disable +log_config = dict( + interval=50, + hooks=[ + dict(type='TextLoggerHook'), + # dict(type='TensorboardLoggerHook') + ]) +# yapf:enable +custom_hooks = [dict(type='NumClassCheckHook')] + +dist_params = dict(backend='nccl') +log_level = 'INFO' +load_from = None +resume_from = None +workflow = [('train', 1)] diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/cascade_mask_rcnn_r50_fpn.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/cascade_mask_rcnn_r50_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..2902ccae5a8ffaa6ae9c49212b68a71035c83e60 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/cascade_mask_rcnn_r50_fpn.py @@ -0,0 +1,196 @@ +# model settings +model = dict( + type='CascadeRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='CascadeRoIHead', + num_stages=3, + stage_loss_weights=[1, 0.5, 0.25], + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + mask_head=dict( + type='FCNMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=[ + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.6, + neg_iou_thr=0.6, + min_pos_iou=0.6, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.7, + min_pos_iou=0.7, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False) + ]), + test_cfg=dict( + rpn=dict( + nms_pre=1000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100, + mask_thr_binary=0.5))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/cascade_mask_rcnn_swin_fpn.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/cascade_mask_rcnn_swin_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..c8ae9235d0cf1e1fdc1781a253ea43d110dcfa35 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/cascade_mask_rcnn_swin_fpn.py @@ -0,0 +1,207 @@ +# model settings +model = dict( + type='CascadeRCNN', + pretrained=None, + backbone=dict( + type='SwinTransformer', + embed_dim=96, + depths=[2, 2, 6, 2], + num_heads=[3, 6, 12, 24], + window_size=7, + mlp_ratio=4., + qkv_bias=True, + qk_scale=None, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0.2, + ape=False, + patch_norm=True, + out_indices=(0, 1, 2, 3), + use_checkpoint=False), + neck=dict( + type='FPN', + in_channels=[96, 192, 384, 768], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='CascadeRoIHead', + num_stages=3, + stage_loss_weights=[1, 0.5, 0.25], + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + mask_head=dict( + type='FCNMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0))), + # model training and testing settings + train_cfg = dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_across_levels=False, + nms_pre=2000, + nms_post=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=[ + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.6, + neg_iou_thr=0.6, + min_pos_iou=0.6, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.7, + min_pos_iou=0.7, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False) + ]), + test_cfg = dict( + rpn=dict( + nms_across_levels=False, + nms_pre=1000, + nms_post=1000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100, + mask_thr_binary=0.5))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/cascade_rcnn_r50_fpn.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/cascade_rcnn_r50_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..42f74ae748a32bdce10ab9003fd45f87721d02ff --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/cascade_rcnn_r50_fpn.py @@ -0,0 +1,179 @@ +# model settings +model = dict( + type='CascadeRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='CascadeRoIHead', + num_stages=3, + stage_loss_weights=[1, 0.5, 0.25], + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ]), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=[ + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.6, + neg_iou_thr=0.6, + min_pos_iou=0.6, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.7, + min_pos_iou=0.7, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False) + ]), + test_cfg=dict( + rpn=dict( + nms_pre=1000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/cascade_rcnn_swin_fpn.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/cascade_rcnn_swin_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..cd9336fe04d6f0c7fff38711655fe614218766e7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/cascade_rcnn_swin_fpn.py @@ -0,0 +1,186 @@ +# model settings +model = dict( + type='CascadeRCNN', + pretrained=None, + backbone=dict( + type='SwinTransformer', + embed_dim=96, + depths=[2, 2, 6, 2], + num_heads=[3, 6, 12, 24], + window_size=7, + mlp_ratio=4., + qkv_bias=True, + qk_scale=None, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0.2, + ape=False, + patch_norm=True, + out_indices=(0, 1, 2, 3), + use_checkpoint=False), + neck=dict( + type='FPN', + in_channels=[96, 192, 384, 768], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='CascadeRoIHead', + num_stages=3, + stage_loss_weights=[1, 0.5, 0.25], + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ]), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=[ + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.6, + neg_iou_thr=0.6, + min_pos_iou=0.6, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.7, + min_pos_iou=0.7, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False) + ]), + test_cfg=dict( + rpn=dict( + nms_pre=1000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/fast_rcnn_r50_fpn.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/fast_rcnn_r50_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..9982fe0956d60022a2c702a824ffaff192e93e1e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/fast_rcnn_r50_fpn.py @@ -0,0 +1,62 @@ +# model settings +model = dict( + type='FastRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + roi_head=dict( + type='StandardRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False)), + test_cfg=dict( + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/faster_rcnn_r50_caffe_c4.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/faster_rcnn_r50_caffe_c4.py new file mode 100644 index 0000000000000000000000000000000000000000..51b5db469e83cc6b3cf2adae92b5d5741825ab35 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/faster_rcnn_r50_caffe_c4.py @@ -0,0 +1,114 @@ +# model settings +norm_cfg = dict(type='BN', requires_grad=False) +model = dict( + type='FasterRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=3, + strides=(1, 2, 2), + dilations=(1, 1, 1), + out_indices=(2, ), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + rpn_head=dict( + type='RPNHead', + in_channels=1024, + feat_channels=1024, + anchor_generator=dict( + type='AnchorGenerator', + scales=[2, 4, 8, 16, 32], + ratios=[0.5, 1.0, 2.0], + strides=[16]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + shared_head=dict( + type='ResLayer', + depth=50, + stage=3, + stride=2, + dilation=1, + style='caffe', + norm_cfg=norm_cfg, + norm_eval=True), + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=1024, + featmap_strides=[16]), + bbox_head=dict( + type='BBoxHead', + with_avg_pool=True, + roi_feat_size=7, + in_channels=2048, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=12000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False)), + test_cfg=dict( + rpn=dict( + nms_pre=6000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/faster_rcnn_r50_caffe_dc5.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/faster_rcnn_r50_caffe_dc5.py new file mode 100644 index 0000000000000000000000000000000000000000..a377a6f09664b5eca189fa77dcb47c69842fdbf2 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/faster_rcnn_r50_caffe_dc5.py @@ -0,0 +1,105 @@ +# model settings +norm_cfg = dict(type='BN', requires_grad=False) +model = dict( + type='FasterRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + strides=(1, 2, 2, 1), + dilations=(1, 1, 1, 2), + out_indices=(3, ), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + rpn_head=dict( + type='RPNHead', + in_channels=2048, + feat_channels=2048, + anchor_generator=dict( + type='AnchorGenerator', + scales=[2, 4, 8, 16, 32], + ratios=[0.5, 1.0, 2.0], + strides=[16]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=2048, + featmap_strides=[16]), + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=2048, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=12000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False)), + test_cfg=dict( + rpn=dict( + nms=dict(type='nms', iou_threshold=0.7), + nms_pre=6000, + max_per_img=1000, + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/faster_rcnn_r50_fpn.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/faster_rcnn_r50_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..1ef8e7b2579504e7614429609524ae38239701cc --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/faster_rcnn_r50_fpn.py @@ -0,0 +1,108 @@ +# model settings +model = dict( + type='FasterRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=2000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_weight=-1, + debug=False)), + test_cfg=dict( + rpn=dict( + nms_pre=1000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100) + # soft-nms is also supported for rcnn testing + # e.g., nms=dict(type='soft_nms', iou_threshold=0.5, min_score=0.05) + )) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/htc_without_semantic_swin_fpn.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/htc_without_semantic_swin_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..618cf81fb9d25dbddf5f04208a0eab3a1e25002f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/htc_without_semantic_swin_fpn.py @@ -0,0 +1,221 @@ +# model settings +model = dict( + type='HybridTaskCascade', + pretrained=None, + backbone=dict( + type='SwinTransformer', + embed_dim=96, + depths=[2, 2, 6, 2], + num_heads=[3, 6, 12, 24], + window_size=7, + mlp_ratio=4., + qkv_bias=True, + qk_scale=None, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0.2, + ape=False, + patch_norm=True, + out_indices=(0, 1, 2, 3), + use_checkpoint=False), + neck=dict( + type='FPN', + in_channels=[96, 192, 384, 768], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='HybridTaskCascadeRoIHead', + interleaved=True, + mask_info_flow=True, + num_stages=3, + stage_loss_weights=[1, 0.5, 0.25], + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + mask_head=[ + dict( + type='HTCMaskHead', + with_conv_res=False, + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)), + dict( + type='HTCMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)), + dict( + type='HTCMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)) + ], + ), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=[ + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.6, + neg_iou_thr=0.6, + min_pos_iou=0.6, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.7, + min_pos_iou=0.7, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False) + ]), + test_cfg=dict( + rpn=dict( + nms_pre=1000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100, + mask_thr_binary=0.5))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/mask_rcnn_r50_caffe_c4.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/mask_rcnn_r50_caffe_c4.py new file mode 100644 index 0000000000000000000000000000000000000000..122202e1a5d6b3367de9a8c632864cf168ca5b9d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/mask_rcnn_r50_caffe_c4.py @@ -0,0 +1,125 @@ +# model settings +norm_cfg = dict(type='BN', requires_grad=False) +model = dict( + type='MaskRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=3, + strides=(1, 2, 2), + dilations=(1, 1, 1), + out_indices=(2, ), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + rpn_head=dict( + type='RPNHead', + in_channels=1024, + feat_channels=1024, + anchor_generator=dict( + type='AnchorGenerator', + scales=[2, 4, 8, 16, 32], + ratios=[0.5, 1.0, 2.0], + strides=[16]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + shared_head=dict( + type='ResLayer', + depth=50, + stage=3, + stride=2, + dilation=1, + style='caffe', + norm_cfg=norm_cfg, + norm_eval=True), + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=1024, + featmap_strides=[16]), + bbox_head=dict( + type='BBoxHead', + with_avg_pool=True, + roi_feat_size=7, + in_channels=2048, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + mask_roi_extractor=None, + mask_head=dict( + type='FCNMaskHead', + num_convs=0, + in_channels=2048, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=12000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=False, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=14, + pos_weight=-1, + debug=False)), + test_cfg=dict( + rpn=dict( + nms_pre=6000, + nms=dict(type='nms', iou_threshold=0.7), + max_per_img=1000, + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100, + mask_thr_binary=0.5))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/mask_rcnn_r50_fpn.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/mask_rcnn_r50_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..d903e55e2d95135b1448e566d4d5ec8146597a6a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/mask_rcnn_r50_fpn.py @@ -0,0 +1,120 @@ +# model settings +model = dict( + type='MaskRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + mask_head=dict( + type='FCNMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=2000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False)), + test_cfg=dict( + rpn=dict( + nms_pre=1000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100, + mask_thr_binary=0.5))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/mask_rcnn_swin_fpn.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/mask_rcnn_swin_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..e3d42197f4646cd9ecafac2095d3f8e079f0a729 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/mask_rcnn_swin_fpn.py @@ -0,0 +1,127 @@ +# model settings +model = dict( + type='MaskRCNN', + pretrained=None, + backbone=dict( + type='SwinTransformer', + embed_dim=96, + depths=[2, 2, 6, 2], + num_heads=[3, 6, 12, 24], + window_size=7, + mlp_ratio=4., + qkv_bias=True, + qk_scale=None, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0.2, + ape=False, + patch_norm=True, + out_indices=(0, 1, 2, 3), + use_checkpoint=False), + neck=dict( + type='FPN', + in_channels=[96, 192, 384, 768], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + mask_head=dict( + type='FCNMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=2000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False)), + test_cfg=dict( + rpn=dict( + nms_pre=1000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100, + mask_thr_binary=0.5))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/retinanet_r50_fpn.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/retinanet_r50_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..56e43fa7764cb0f48510415f21888ba0df0c6eb5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/retinanet_r50_fpn.py @@ -0,0 +1,60 @@ +# model settings +model = dict( + type='RetinaNet', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_input', + num_outs=5), + bbox_head=dict( + type='RetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + # model training and testing settings + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/rpn_r50_caffe_c4.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/rpn_r50_caffe_c4.py new file mode 100644 index 0000000000000000000000000000000000000000..8b32ca99258e5ddf249d11eadcd46630d88bd55e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/rpn_r50_caffe_c4.py @@ -0,0 +1,58 @@ +# model settings +model = dict( + type='RPN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=3, + strides=(1, 2, 2), + dilations=(1, 1, 1), + out_indices=(2, ), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + neck=None, + rpn_head=dict( + type='RPNHead', + in_channels=1024, + feat_channels=1024, + anchor_generator=dict( + type='AnchorGenerator', + scales=[2, 4, 8, 16, 32], + ratios=[0.5, 1.0, 2.0], + strides=[16]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False)), + test_cfg=dict( + rpn=dict( + nms_pre=12000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/rpn_r50_fpn.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/rpn_r50_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..edaf4d4b06b64b88a4ddd64419fc026e64a6af1d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/rpn_r50_fpn.py @@ -0,0 +1,58 @@ +# model settings +model = dict( + type='RPN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False)), + test_cfg=dict( + rpn=dict( + nms_pre=2000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/ssd300.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/ssd300.py new file mode 100644 index 0000000000000000000000000000000000000000..f17df010069e300f9f0b6eb456f87e61b8582787 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/models/ssd300.py @@ -0,0 +1,56 @@ +# model settings +input_size = 300 +model = dict( + type='SingleStageDetector', + backbone=dict( + type='SSDVGG', + depth=16, + with_last_pool=False, + ceil_mode=True, + out_indices=(3, 4), + out_feature_indices=(22, 34), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://vgg16_caffe')), + neck=dict( + type='SSDNeck', + in_channels=(512, 1024), + out_channels=(512, 1024, 512, 256, 256, 256), + level_strides=(2, 2, 1, 1), + level_paddings=(1, 1, 0, 0), + l2_norm_scale=20), + bbox_head=dict( + type='SSDHead', + in_channels=(512, 1024, 512, 256, 256, 256), + num_classes=80, + anchor_generator=dict( + type='SSDAnchorGenerator', + scale_major=False, + input_size=input_size, + basesize_ratio_range=(0.15, 0.9), + strides=[8, 16, 32, 64, 100, 300], + ratios=[[2], [2, 3], [2, 3], [2, 3], [2], [2]]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2])), + # model training and testing settings + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0., + ignore_iof_thr=-1, + gt_max_assign_all=False), + smoothl1_beta=1., + allowed_border=-1, + pos_weight=-1, + neg_pos_ratio=3, + debug=False), + test_cfg=dict( + nms_pre=1000, + nms=dict(type='nms', iou_threshold=0.45), + min_bbox_size=0, + score_thr=0.02, + max_per_img=200)) +cudnn_benchmark = True diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/schedules/schedule_1x.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/schedules/schedule_1x.py new file mode 100644 index 0000000000000000000000000000000000000000..13b3783cbbe93b6c32bc415dc50f633dffa4aec7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/schedules/schedule_1x.py @@ -0,0 +1,11 @@ +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[8, 11]) +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/schedules/schedule_20e.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/schedules/schedule_20e.py new file mode 100644 index 0000000000000000000000000000000000000000..00e859022156dcbef6501c04d03f335639f2c1f6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/schedules/schedule_20e.py @@ -0,0 +1,11 @@ +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/schedules/schedule_2x.py b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/schedules/schedule_2x.py new file mode 100644 index 0000000000000000000000000000000000000000..69dc9ee8080649ce3646b5775b0ca2e9c863d0f5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/_base_/schedules/schedule_2x.py @@ -0,0 +1,11 @@ +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/albu_example/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/albu_example/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b11ae56ffb484ed599690ae2f766625f6c0e1466 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/albu_example/README.md @@ -0,0 +1,19 @@ +# Albu Example + + + +``` +@article{2018arXiv180906839B, + author = {A. Buslaev, A. Parinov, E. Khvedchenya, V.~I. Iglovikov and A.~A. Kalinin}, + title = "{Albumentations: fast and flexible image augmentations}", + journal = {ArXiv e-prints}, + eprint = {1809.06839}, + year = 2018 +} +``` + +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50 | pytorch | 1x | 4.4 | 16.6 | 38.0 | 34.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/albu_example/mask_rcnn_r50_fpn_albu_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/albu_example/mask_rcnn_r50_fpn_albu_1x_coco/mask_rcnn_r50_fpn_albu_1x_coco_20200208-ab203bcd.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/albu_example/mask_rcnn_r50_fpn_albu_1x_coco/mask_rcnn_r50_fpn_albu_1x_coco_20200208_225520.log.json) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/albu_example/mask_rcnn_r50_fpn_albu_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/albu_example/mask_rcnn_r50_fpn_albu_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b3f879a6c573871ea17b2bf158173aadf14457b6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/albu_example/mask_rcnn_r50_fpn_albu_1x_coco.py @@ -0,0 +1,73 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +albu_train_transforms = [ + dict( + type='ShiftScaleRotate', + shift_limit=0.0625, + scale_limit=0.0, + rotate_limit=0, + interpolation=1, + p=0.5), + dict( + type='RandomBrightnessContrast', + brightness_limit=[0.1, 0.3], + contrast_limit=[0.1, 0.3], + p=0.2), + dict( + type='OneOf', + transforms=[ + dict( + type='RGBShift', + r_shift_limit=10, + g_shift_limit=10, + b_shift_limit=10, + p=1.0), + dict( + type='HueSaturationValue', + hue_shift_limit=20, + sat_shift_limit=30, + val_shift_limit=20, + p=1.0) + ], + p=0.1), + dict(type='JpegCompression', quality_lower=85, quality_upper=95, p=0.2), + dict(type='ChannelShuffle', p=0.1), + dict( + type='OneOf', + transforms=[ + dict(type='Blur', blur_limit=3, p=1.0), + dict(type='MedianBlur', blur_limit=3, p=1.0) + ], + p=0.1), +] +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='Pad', size_divisor=32), + dict( + type='Albu', + transforms=albu_train_transforms, + bbox_params=dict( + type='BboxParams', + format='pascal_voc', + label_fields=['gt_labels'], + min_visibility=0.0, + filter_lost_elements=True), + keymap={ + 'img': 'image', + 'gt_masks': 'masks', + 'gt_bboxes': 'bboxes' + }, + update_pad_shape=False, + skip_img_without_anno=True), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks'], + meta_keys=('filename', 'ori_shape', 'img_shape', 'img_norm_cfg', + 'pad_shape', 'scale_factor')) +] +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/atss/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/atss/README.md new file mode 100644 index 0000000000000000000000000000000000000000..8aa7746e9c556fdab01e43a5cf45869ce62abc81 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/atss/README.md @@ -0,0 +1,21 @@ +# Bridging the Gap Between Anchor-based and Anchor-free Detection via Adaptive Training Sample Selection + +## Introduction + + + +```latex +@article{zhang2019bridging, + title = {Bridging the Gap Between Anchor-based and Anchor-free Detection via Adaptive Training Sample Selection}, + author = {Zhang, Shifeng and Chi, Cheng and Yao, Yongqiang and Lei, Zhen and Li, Stan Z.}, + journal = {arXiv preprint arXiv:1912.02424}, + year = {2019} +} +``` + +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | pytorch | 1x | 3.7 | 19.7 | 39.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/atss/atss_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/atss/atss_r50_fpn_1x_coco/atss_r50_fpn_1x_coco_20200209-985f7bd0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/atss/atss_r50_fpn_1x_coco/atss_r50_fpn_1x_coco_20200209_102539.log.json) | +| R-101 | pytorch | 1x | 5.6 | 12.3 | 41.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/atss/atss_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/atss/atss_r101_fpn_1x_coco/atss_r101_fpn_1x_20200825-dfcadd6f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/atss/atss_r101_fpn_1x_coco/atss_r101_fpn_1x_20200825-dfcadd6f.log.json) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/atss/atss_r101_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/atss/atss_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5225d2ab672738d4d427eba252e92bd554252476 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/atss/atss_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './atss_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/atss/atss_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/atss/atss_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..42ff4c598f94f221ded7c91ce330e43310beddae --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/atss/atss_r50_fpn_1x_coco.py @@ -0,0 +1,62 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='ATSS', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + bbox_head=dict( + type='ATSSHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=2.0), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)), + # training and testing settings + train_cfg=dict( + assigner=dict(type='ATSSAssigner', topk=9), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/atss/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/atss/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..92187e340c3f6a52f3e550f5179aa11eb2ee25dd --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/atss/metafile.yml @@ -0,0 +1,55 @@ +Collections: + - Name: ATSS + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - ATSS + - FPN + - ResNet + Paper: https://arxiv.org/abs/1912.02424 + README: configs/atss/README.md + +Models: + - Name: atss_r50_fpn_1x_coco + In Collection: ATSS + Config: configs/atss/atss_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.7 + inference time (ms/im): + - value: 50.76 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/atss/atss_r50_fpn_1x_coco/atss_r50_fpn_1x_coco_20200209-985f7bd0.pth + + - Name: atss_r101_fpn_1x_coco + In Collection: ATSS + Config: configs/atss/atss_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.6 + inference time (ms/im): + - value: 81.3 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/atss/atss_r101_fpn_1x_coco/atss_r101_fpn_1x_20200825-dfcadd6f.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/autoassign/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/autoassign/README.md new file mode 100644 index 0000000000000000000000000000000000000000..4d5f37640367c0956ab682deebb95e55553fb160 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/autoassign/README.md @@ -0,0 +1,25 @@ +# AutoAssign: Differentiable Label Assignment for Dense Object Detection + +## Introduction + + + +``` +@article{zhu2020autoassign, + title={AutoAssign: Differentiable Label Assignment for Dense Object Detection}, + author={Zhu, Benjin and Wang, Jianfeng and Jiang, Zhengkai and Zong, Fuhang and Liu, Songtao and Li, Zeming and Sun, Jian}, + journal={arXiv preprint arXiv:2007.03496}, + year={2020} +} +``` + +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:------:|:------:|:--------:| +| R-50 | caffe | 1x | 4.08 | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/autoassign/autoassign_r50_fpn_8x2_1x_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/autoassign/auto_assign_r50_fpn_1x_coco/auto_assign_r50_fpn_1x_coco_20210413_115540-5e17991f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/autoassign/auto_assign_r50_fpn_1x_coco/auto_assign_r50_fpn_1x_coco_20210413_115540-5e17991f.log.json) | + +**Note**: + +1. We find that the performance is unstable with 1x setting and may fluctuate by about 0.3 mAP. mAP 40.3 ~ 40.6 is acceptable. Such fluctuation can also be found in the original implementation. +2. You can get a more stable results ~ mAP 40.6 with a schedule total 13 epoch, and learning rate is divided by 10 at 10th and 13th epoch. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/autoassign/autoassign_r50_fpn_8x2_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/autoassign/autoassign_r50_fpn_8x2_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..db548dc3ca4e54f631668f880eb53586bc17579c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/autoassign/autoassign_r50_fpn_8x2_1x_coco.py @@ -0,0 +1,85 @@ +# We follow the original implementation which +# adopts the Caffe pre-trained backbone. +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='AutoAssign', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs=True, + num_outs=5, + relu_before_extra_convs=True, + init_cfg=dict(type='Caffe2Xavier', layer='Conv2d')), + bbox_head=dict( + type='AutoAssignHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + loss_bbox=dict(type='GIoULoss', loss_weight=5.0)), + train_cfg=None, + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) +img_norm_cfg = dict( + mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(lr=0.01, paramwise_cfg=dict(norm_decay_mult=0.)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=1.0 / 1000, + step=[8, 11]) +total_epochs = 12 diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/autoassign/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/autoassign/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..88caebf9be783196a067b1242ae30c05d0648892 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/autoassign/metafile.yml @@ -0,0 +1,28 @@ +Collections: + - Name: AutoAssign + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - AutoAssign + - FPN + - ResNet + Paper: https://arxiv.org/abs/2007.03496 + README: configs/autoassign/README.md + +Models: + - Name: autoassign_r50_fpn_8x2_1x_coco + In Collection: AutoAssign + Config: configs/autoassign/autoassign_r50_fpn_8x2_1x_coco.py + Metadata: + Training Memory (GB): 4.08 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/autoassign/auto_assign_r50_fpn_1x_coco/auto_assign_r50_fpn_1x_coco_20210413_115540-5e17991f.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/carafe/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/carafe/README.md new file mode 100644 index 0000000000000000000000000000000000000000..768cb9840dc0b2605df8dbde8358eeea645f80a9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/carafe/README.md @@ -0,0 +1,32 @@ +# CARAFE: Content-Aware ReAssembly of FEatures + +## Introduction + + + +We provide config files to reproduce the object detection & instance segmentation results in the ICCV 2019 Oral paper for [CARAFE: Content-Aware ReAssembly of FEatures](https://arxiv.org/abs/1905.02188). + +``` +@inproceedings{Wang_2019_ICCV, + title = {CARAFE: Content-Aware ReAssembly of FEatures}, + author = {Wang, Jiaqi and Chen, Kai and Xu, Rui and Liu, Ziwei and Loy, Chen Change and Lin, Dahua}, + booktitle = {The IEEE International Conference on Computer Vision (ICCV)}, + month = {October}, + year = {2019} +} +``` + +## Results and Models + +The results on COCO 2017 val is shown in the below table. + +| Method | Backbone | Style | Lr schd | Test Proposal Num | Inf time (fps) | Box AP | Mask AP | Config | Download | +|:--------------------:|:--------:|:-------:|:-------:|:-----------------:|:--------------:|:------:|:-------:|:------:|:--------:| +| Faster R-CNN w/ CARAFE | R-50-FPN | pytorch | 1x | 1000 | 16.5 | 38.6 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/carafe/faster_rcnn_r50_fpn_carafe_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/carafe/faster_rcnn_r50_fpn_carafe_1x_coco/faster_rcnn_r50_fpn_carafe_1x_coco_bbox_mAP-0.386_20200504_175733-385a75b7.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/carafe/faster_rcnn_r50_fpn_carafe_1x_coco/faster_rcnn_r50_fpn_carafe_1x_coco_20200504_175733.log.json) | +| - | - | - | - | 2000 | | | | | +| Mask R-CNN w/ CARAFE | R-50-FPN | pytorch | 1x | 1000 | 14.0 | 39.3 | 35.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/carafe/mask_rcnn_r50_fpn_carafe_1x_coco/mask_rcnn_r50_fpn_carafe_1x_coco_bbox_mAP-0.393__segm_mAP-0.358_20200503_135957-8687f195.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/carafe/mask_rcnn_r50_fpn_carafe_1x_coco/mask_rcnn_r50_fpn_carafe_1x_coco_20200503_135957.log.json) | +| - | - | - | - | 2000 | | | | | + +## Implementation + +The CUDA implementation of CARAFE can be find at https://github.com/myownskyW7/CARAFE. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/carafe/faster_rcnn_r50_fpn_carafe_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/carafe/faster_rcnn_r50_fpn_carafe_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..dedac3f46b4710d16a8bc66f00663e379b2ebdc7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/carafe/faster_rcnn_r50_fpn_carafe_1x_coco.py @@ -0,0 +1,50 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + neck=dict( + type='FPN_CARAFE', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5, + start_level=0, + end_level=-1, + norm_cfg=None, + act_cfg=None, + order=('conv', 'norm', 'act'), + upsample_cfg=dict( + type='carafe', + up_kernel=5, + up_group=1, + encoder_kernel=3, + encoder_dilation=1, + compressed_channels=64))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..668c023981b9d421e5b51a48757c3819d090307f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/carafe/mask_rcnn_r50_fpn_carafe_1x_coco.py @@ -0,0 +1,60 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + neck=dict( + type='FPN_CARAFE', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5, + start_level=0, + end_level=-1, + norm_cfg=None, + act_cfg=None, + order=('conv', 'norm', 'act'), + upsample_cfg=dict( + type='carafe', + up_kernel=5, + up_group=1, + encoder_kernel=3, + encoder_dilation=1, + compressed_channels=64)), + roi_head=dict( + mask_head=dict( + upsample_cfg=dict( + type='carafe', + scale_factor=2, + up_kernel=5, + up_group=1, + encoder_kernel=3, + encoder_dilation=1, + compressed_channels=64)))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..0adcc3e20ad6650c366152d13f06be4ac9d5575a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/README.md @@ -0,0 +1,55 @@ +# Cascade R-CNN: High Quality Object Detection and Instance Segmentation + +## Introduction + + + +```latex +@article{Cai_2019, + title={Cascade R-CNN: High Quality Object Detection and Instance Segmentation}, + ISSN={1939-3539}, + url={http://dx.doi.org/10.1109/tpami.2019.2956516}, + DOI={10.1109/tpami.2019.2956516}, + journal={IEEE Transactions on Pattern Analysis and Machine Intelligence}, + publisher={Institute of Electrical and Electronics Engineers (IEEE)}, + author={Cai, Zhaowei and Vasconcelos, Nuno}, + year={2019}, + pages={1–1} +} +``` + +## Results and models + +### Cascade R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: |:------:|:--------:| +| R-50-FPN | caffe | 1x | 4.2 | | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco/cascade_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.404_20200504_174853-b857be87.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco/cascade_rcnn_r50_caffe_fpn_1x_coco_20200504_174853.log.json) | +| R-50-FPN | pytorch | 1x | 4.4 | 16.1 | 40.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco/cascade_rcnn_r50_fpn_1x_coco_20200316-3dc56deb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco/cascade_rcnn_r50_fpn_1x_coco_20200316_214748.log.json) | +| R-50-FPN | pytorch | 20e | - | - | 41.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco/cascade_rcnn_r50_fpn_20e_coco_bbox_mAP-0.41_20200504_175131-e9872a90.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco/cascade_rcnn_r50_fpn_20e_coco_20200504_175131.log.json) | +| R-101-FPN | caffe | 1x | 6.2 | | 42.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco/cascade_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.423_20200504_175649-cab8dbd5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco/cascade_rcnn_r101_caffe_fpn_1x_coco_20200504_175649.log.json) | +| R-101-FPN | pytorch | 1x | 6.4 | 13.5 | 42.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco/cascade_rcnn_r101_fpn_1x_coco_20200317-0b6a2fbf.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco/cascade_rcnn_r101_fpn_1x_coco_20200317_101744.log.json) | +| R-101-FPN | pytorch | 20e | - | - | 42.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco/cascade_rcnn_r101_fpn_20e_coco_bbox_mAP-0.425_20200504_231812-5057dcc5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco/cascade_rcnn_r101_fpn_20e_coco_20200504_231812.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 7.6 | 10.9 | 43.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco/cascade_rcnn_x101_32x4d_fpn_1x_coco_20200316-95c2deb6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco/cascade_rcnn_x101_32x4d_fpn_1x_coco_20200316_055608.log.json) | +| X-101-32x4d-FPN | pytorch | 20e | 7.6 | | 43.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco/cascade_rcnn_x101_32x4d_fpn_20e_coco_20200906_134608-9ae0a720.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco/cascade_rcnn_x101_32x4d_fpn_20e_coco_20200906_134608.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 10.7 | | 44.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco/cascade_rcnn_x101_64x4d_fpn_1x_coco_20200515_075702-43ce6a30.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco/cascade_rcnn_x101_64x4d_fpn_1x_coco_20200515_075702.log.json) | +| X-101-64x4d-FPN | pytorch | 20e | 10.7 | | 44.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco/cascade_rcnn_x101_64x4d_fpn_20e_coco_20200509_224357-051557b1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco/cascade_rcnn_x101_64x4d_fpn_20e_coco_20200509_224357.log.json)| + +### Cascade Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | caffe | 1x | 5.9 | | 41.2 | 36.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco/cascade_mask_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.412__segm_mAP-0.36_20200504_174659-5004b251.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco/cascade_mask_rcnn_r50_caffe_fpn_1x_coco_20200504_174659.log.json) | +| R-50-FPN | pytorch | 1x | 6.0 | 11.2 | 41.2 | 35.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco/cascade_mask_rcnn_r50_fpn_1x_coco_20200203-9d4dcb24.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco/cascade_mask_rcnn_r50_fpn_1x_coco_20200203_170449.log.json) | +| R-50-FPN | pytorch | 20e | - | - | 41.9 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco/cascade_mask_rcnn_r50_fpn_20e_coco_bbox_mAP-0.419__segm_mAP-0.365_20200504_174711-4af8e66e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco/cascade_mask_rcnn_r50_fpn_20e_coco_20200504_174711.log.json)| +| R-101-FPN | caffe | 1x | 7.8 | | 43.2 | 37.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco/cascade_mask_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.432__segm_mAP-0.376_20200504_174813-5c1e9599.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco/cascade_mask_rcnn_r101_caffe_fpn_1x_coco_20200504_174813.log.json)| +| R-101-FPN | pytorch | 1x | 7.9 | 9.8 | 42.9 | 37.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco/cascade_mask_rcnn_r101_fpn_1x_coco_20200203-befdf6ee.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco/cascade_mask_rcnn_r101_fpn_1x_coco_20200203_092521.log.json) | +| R-101-FPN | pytorch | 20e | - | - | 43.4 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco/cascade_mask_rcnn_r101_fpn_20e_coco_bbox_mAP-0.434__segm_mAP-0.378_20200504_174836-005947da.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco/cascade_mask_rcnn_r101_fpn_20e_coco_20200504_174836.log.json)| +| X-101-32x4d-FPN | pytorch | 1x | 9.2 | 8.6 | 44.3 | 38.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco_20200201-0f411b1f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco_20200201_052416.log.json) | +| X-101-32x4d-FPN | pytorch | 20e | 9.2 | - | 45.0 | 39.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco_20200528_083917-ed1f4751.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco_20200528_083917.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 12.2 | 6.7 | 45.3 | 39.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco_20200203-9a2db89d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco_20200203_044059.log.json) | +| X-101-64x4d-FPN | pytorch | 20e | 12.2 | | 45.6 |39.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco_20200512_161033-bdb5126a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco_20200512_161033.log.json)| + +**Notes:** + +- The `20e` schedule in Cascade (Mask) R-CNN indicates decreasing the lr at 16 and 19 epochs, with a total of 20 epochs. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5ee6231034a2fccc42b11b99830f748091551851 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f59c155848d6a40ec31c4de880f7900d9067c6ab --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..45ab7edffd33063022e95c6e2b44e503e69eda2c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py @@ -0,0 +1,6 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e9e09b51a1a1e721e460a69265e409cf2b1c4c67 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,39 @@ +_base_ = ['./cascade_mask_rcnn_r50_fpn_1x_coco.py'] + +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), norm_eval=True, style='caffe'), + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')) +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..49ab539aa4cdf7c396b6f109efe2dc7a6d596a2a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/cascade_mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1296dc45dd89da9c0801e1242080c67957cace74 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/cascade_mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_20e.py', '../_base_/default_runtime.py' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..06cbbe70dc84f25ba588e80d0061c634e63e94f9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4e352362b17919bb2ebfffb5b442292880cfb27a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7dbef5fa2a3a3d962df78ffb1b0b4357b783fd67 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..579b1aca49383f9d3874f4797bc1dbb2a1311e7c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1e90f4bb004798265af98489d6ed584a6a09d434 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './cascade_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5c077760dd20dc5e00b3b2a1ca6de89347657231 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './cascade_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b1719c25d59bc6dbe1c0ef71f08160057c21d5bf --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco.py @@ -0,0 +1,6 @@ +_base_ = './cascade_rcnn_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..696bcfb939e91c16898c2e039ec9a05d23105d1e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,42 @@ +_base_ = './cascade_rcnn_r50_fpn_1x_coco.py' + +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) + +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..87e21fbff82763caf0e14ba641493870a15578b1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/cascade_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6f886e1c407ff9376929a7092f82e5508d2b1ac9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py @@ -0,0 +1,4 @@ +_base_ = './cascade_rcnn_r50_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5ac02c10d743d0ce4b9cc4bb5f1e29cbc6aff06a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..486e45ead418d83a80224f241bc2355b82877640 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_rcnn_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..78229f0da3f5a1ac1dfc628821327efd5f34668d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,15 @@ +_base_ = './cascade_rcnn_r50_fpn_1x_coco.py' +model = dict( + type='CascadeRCNN', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..58812dec5a85d86d85b79d7b53ba33bc6327a815 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco.py @@ -0,0 +1,15 @@ +_base_ = './cascade_rcnn_r50_fpn_20e_coco.py' +model = dict( + type='CascadeRCNN', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..803b5d0faadb75f464f6f3217354e4ef66337132 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rcnn/metafile.yml @@ -0,0 +1,401 @@ +Collections: + - Name: Cascade R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Cascade R-CNN + - FPN + - RPN + - ResNet + - RoIAlign + Paper: http://dx.doi.org/10.1109/tpami.2019.2956516 + README: configs/cascade_rcnn/README.md + +Models: + - Name: cascade_rcnn_r50_caffe_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.2 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_caffe_fpn_1x_coco/cascade_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.404_20200504_174853-b857be87.pth + + - Name: cascade_rcnn_r50_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.4 + inference time (ms/im): + - value: 62.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco/cascade_rcnn_r50_fpn_1x_coco_20200316-3dc56deb.pth + + - Name: cascade_rcnn_r50_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py + Metadata: + Training Memory (GB): 4.4 + inference time (ms/im): + - value: 62.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco/cascade_rcnn_r50_fpn_20e_coco_bbox_mAP-0.41_20200504_175131-e9872a90.pth + + - Name: cascade_rcnn_r101_caffe_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.2 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_caffe_fpn_1x_coco/cascade_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.423_20200504_175649-cab8dbd5.pth + + - Name: cascade_rcnn_r101_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.4 + inference time (ms/im): + - value: 74.07 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco/cascade_rcnn_r101_fpn_1x_coco_20200317-0b6a2fbf.pth + + - Name: cascade_rcnn_r101_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco.py + Metadata: + Training Memory (GB): 6.4 + inference time (ms/im): + - value: 74.07 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_r101_fpn_20e_coco/cascade_rcnn_r101_fpn_20e_coco_bbox_mAP-0.425_20200504_231812-5057dcc5.pth + + - Name: cascade_rcnn_x101_32x4d_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.6 + inference time (ms/im): + - value: 91.74 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_1x_coco/cascade_rcnn_x101_32x4d_fpn_1x_coco_20200316-95c2deb6.pth + + - Name: cascade_rcnn_x101_32x4d_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco.py + Metadata: + Training Memory (GB): 7.6 + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_32x4d_fpn_20e_coco/cascade_rcnn_x101_32x4d_fpn_20e_coco_20200906_134608-9ae0a720.pth + + - Name: cascade_rcnn_x101_64x4d_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 10.7 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_1x_coco/cascade_rcnn_x101_64x4d_fpn_1x_coco_20200515_075702-43ce6a30.pth + + - Name: cascade_rcnn_x101_64x4d_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco.py + Metadata: + Training Memory (GB): 10.7 + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_rcnn_x101_64x4d_fpn_20e_coco/cascade_rcnn_x101_64x4d_fpn_20e_coco_20200509_224357-051557b1.pth + + - Name: cascade_mask_rcnn_r50_caffe_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.9 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_caffe_fpn_1x_coco/cascade_mask_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.412__segm_mAP-0.36_20200504_174659-5004b251.pth + + - Name: cascade_mask_rcnn_r50_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 89.29 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 35.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco/cascade_mask_rcnn_r50_fpn_1x_coco_20200203-9d4dcb24.pth + + - Name: cascade_mask_rcnn_r50_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 89.29 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco/cascade_mask_rcnn_r50_fpn_20e_coco_bbox_mAP-0.419__segm_mAP-0.365_20200504_174711-4af8e66e.pth + + - Name: cascade_mask_rcnn_r101_caffe_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.8 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_caffe_fpn_1x_coco/cascade_mask_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.432__segm_mAP-0.376_20200504_174813-5c1e9599.pth + + - Name: cascade_mask_rcnn_r101_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.9 + inference time (ms/im): + - value: 102.04 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco/cascade_mask_rcnn_r101_fpn_1x_coco_20200203-befdf6ee.pth + + - Name: cascade_mask_rcnn_r101_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco.py + Metadata: + Training Memory (GB): 7.9 + inference time (ms/im): + - value: 102.04 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r101_fpn_20e_coco/cascade_mask_rcnn_r101_fpn_20e_coco_bbox_mAP-0.434__segm_mAP-0.378_20200504_174836-005947da.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 9.2 + inference time (ms/im): + - value: 116.28 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco_20200201-0f411b1f.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco.py + Metadata: + Training Memory (GB): 9.2 + inference time (ms/im): + - value: 116.28 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco/cascade_mask_rcnn_x101_32x4d_fpn_20e_coco_20200528_083917-ed1f4751.pth + + - Name: cascade_mask_rcnn_x101_64x4d_fpn_1x_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 12.2 + inference time (ms/im): + - value: 149.25 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco/cascade_mask_rcnn_x101_64x4d_fpn_1x_coco_20200203-9a2db89d.pth + + - Name: cascade_mask_rcnn_x101_64x4d_fpn_20e_coco + In Collection: Cascade R-CNN + Config: configs/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco.py + Metadata: + Training Memory (GB): 12.2 + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco/cascade_mask_rcnn_x101_64x4d_fpn_20e_coco_20200512_161033-bdb5126a.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rpn/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rpn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..aa7782c31db60e20b87b03e15c66b99f44d8adcd --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rpn/README.md @@ -0,0 +1,29 @@ +# Cascade RPN + + + +We provide the code for reproducing experiment results of [Cascade RPN](https://arxiv.org/abs/1909.06720). + +``` +@inproceedings{vu2019cascade, + title={Cascade RPN: Delving into High-Quality Region Proposal Network with Adaptive Convolution}, + author={Vu, Thang and Jang, Hyunjun and Pham, Trung X and Yoo, Chang D}, + booktitle={Conference on Neural Information Processing Systems (NeurIPS)}, + year={2019} +} +``` + +## Benchmark + +### Region proposal performance + +| Method | Backbone | Style | Mem (GB) | Train time (s/iter) | Inf time (fps) | AR 1000 | Download | +|:------:|:--------:|:-----:|:--------:|:-------------------:|:--------------:|:-------:|:--------------------------------------:| +| CRPN | R-50-FPN | caffe | - | - | - | 72.0 | [model](https://drive.google.com/file/d/1qxVdOnCgK-ee7_z0x6mvAir_glMu2Ihi/view?usp=sharing) | + +### Detection performance + +| Method | Proposal | Backbone | Style | Schedule | Mem (GB) | Train time (s/iter) | Inf time (fps) | box AP | Download | +|:-------------:|:-----------:|:--------:|:-------:|:--------:|:--------:|:-------------------:|:--------------:|:------:|:--------------------------------------------:| +| Fast R-CNN | Cascade RPN | R-50-FPN | caffe | 1x | - | - | - | 39.9 | [model](https://drive.google.com/file/d/1NmbnuY5VHi8I9FE8xnp5uNvh2i-t-6_L/view?usp=sharing) | +| Faster R-CNN | Cascade RPN | R-50-FPN | caffe | 1x | - | - | - | 40.4 | [model](https://drive.google.com/file/d/1dS3Q66qXMJpcuuQgDNkLp669E5w1UMuZ/view?usp=sharing) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rpn/crpn_fast_rcnn_r50_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rpn/crpn_fast_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..29f5d0745b5689178bcbadc3c30b91ecc8cd5140 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rpn/crpn_fast_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,77 @@ +_base_ = '../fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + roi_head=dict( + bbox_head=dict( + bbox_coder=dict(target_stds=[0.04, 0.04, 0.08, 0.08]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.5), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rcnn=dict( + assigner=dict( + pos_iou_thr=0.65, neg_iou_thr=0.65, min_pos_iou=0.65), + sampler=dict(num=256))), + test_cfg=dict(rcnn=dict(score_thr=1e-3))) +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=300), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=300), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='ToTensor', keys=['proposals']), + dict( + type='ToDataContainer', + fields=[dict(key='proposals', stack=False)]), + dict(type='Collect', keys=['img', 'proposals']), + ]) +] +data = dict( + train=dict( + proposal_file=data_root + + 'proposals/crpn_r50_caffe_fpn_1x_train2017.pkl', + pipeline=train_pipeline), + val=dict( + proposal_file=data_root + + 'proposals/crpn_r50_caffe_fpn_1x_val2017.pkl', + pipeline=test_pipeline), + test=dict( + proposal_file=data_root + + 'proposals/crpn_r50_caffe_fpn_1x_val2017.pkl', + pipeline=test_pipeline)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..bad86e6ddf084b5b7e145463c88a8d2d887d6a53 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rpn/crpn_faster_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,92 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py' +rpn_weight = 0.7 +model = dict( + rpn_head=dict( + _delete_=True, + type='CascadeRPNHead', + num_stages=2, + stages=[ + dict( + type='StageCascadeRPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[1.0], + strides=[4, 8, 16, 32, 64]), + adapt_cfg=dict(type='dilation', dilation=3), + bridged_feature=True, + sampling=False, + with_cls=False, + reg_decoded_bbox=True, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=(.0, .0, .0, .0), + target_stds=(0.1, 0.1, 0.5, 0.5)), + loss_bbox=dict( + type='IoULoss', linear=True, + loss_weight=10.0 * rpn_weight)), + dict( + type='StageCascadeRPNHead', + in_channels=256, + feat_channels=256, + adapt_cfg=dict(type='offset'), + bridged_feature=False, + sampling=True, + with_cls=True, + reg_decoded_bbox=True, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=(.0, .0, .0, .0), + target_stds=(0.05, 0.05, 0.1, 0.1)), + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0 * rpn_weight), + loss_bbox=dict( + type='IoULoss', linear=True, + loss_weight=10.0 * rpn_weight)) + ]), + roi_head=dict( + bbox_head=dict( + bbox_coder=dict(target_stds=[0.04, 0.04, 0.08, 0.08]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.5), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=[ + dict( + assigner=dict( + type='RegionAssigner', center_ratio=0.2, ignore_ratio=0.5), + allowed_border=-1, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.7, + min_pos_iou=0.3, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + pos_weight=-1, + debug=False) + ], + rpn_proposal=dict(max_per_img=300, nms=dict(iou_threshold=0.8)), + rcnn=dict( + assigner=dict( + pos_iou_thr=0.65, neg_iou_thr=0.65, min_pos_iou=0.65), + sampler=dict(type='RandomSampler', num=256))), + test_cfg=dict( + rpn=dict(max_per_img=300, nms=dict(iou_threshold=0.8)), + rcnn=dict(score_thr=1e-3))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rpn/crpn_r50_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rpn/crpn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5562e696a8d16514fc2139874799ab2ef1df74a1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cascade_rpn/crpn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,77 @@ +_base_ = '../rpn/rpn_r50_caffe_fpn_1x_coco.py' +model = dict( + rpn_head=dict( + _delete_=True, + type='CascadeRPNHead', + num_stages=2, + stages=[ + dict( + type='StageCascadeRPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[1.0], + strides=[4, 8, 16, 32, 64]), + adapt_cfg=dict(type='dilation', dilation=3), + bridged_feature=True, + sampling=False, + with_cls=False, + reg_decoded_bbox=True, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=(.0, .0, .0, .0), + target_stds=(0.1, 0.1, 0.5, 0.5)), + loss_bbox=dict(type='IoULoss', linear=True, loss_weight=10.0)), + dict( + type='StageCascadeRPNHead', + in_channels=256, + feat_channels=256, + adapt_cfg=dict(type='offset'), + bridged_feature=False, + sampling=True, + with_cls=True, + reg_decoded_bbox=True, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=(.0, .0, .0, .0), + target_stds=(0.05, 0.05, 0.1, 0.1)), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, + loss_weight=1.0), + loss_bbox=dict(type='IoULoss', linear=True, loss_weight=10.0)) + ]), + train_cfg=dict(rpn=[ + dict( + assigner=dict( + type='RegionAssigner', center_ratio=0.2, ignore_ratio=0.5), + allowed_border=-1, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.7, + min_pos_iou=0.3, + ignore_iof_thr=-1, + iou_calculator=dict(type='BboxOverlaps2D')), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + pos_weight=-1, + debug=False) + ]), + test_cfg=dict( + rpn=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.8), + min_bbox_size=0))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/cascade_mask_rcnn_cbv2_swin_base_patch4_window7_mstrain_400-1400_adamw_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/cascade_mask_rcnn_cbv2_swin_base_patch4_window7_mstrain_400-1400_adamw_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3bbe47d746cc161b2e1fa6320b41a5b3bc452ce8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/cascade_mask_rcnn_cbv2_swin_base_patch4_window7_mstrain_400-1400_adamw_3x_coco.py @@ -0,0 +1,58 @@ +_base_ = [ + '../swin/cascade_mask_rcnn_swin_base_patch4_window7_mstrain_480-800_giou_4conv1f_adamw_3x_coco.py' +] + +model = dict( + backbone=dict( + type='CBSwinTransformer', + ), + neck=dict( + type='CBFPN', + ), + test_cfg = dict( + rcnn=dict( + score_thr=0.001, + nms=dict(type='soft_nms'), + ) + ) +) + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# augmentation strategy originates from HTC +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='Resize', + img_scale=[(1600, 400), (1600, 1400)], + multiscale_mode='range', + keep_ratio=True), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1600, 1400), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +samples_per_gpu=1 +data = dict(samples_per_gpu=samples_per_gpu, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer = dict(lr=0.0001*(samples_per_gpu/2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/cascade_mask_rcnn_cbv2_swin_small_patch4_window7_mstrain_400-1400_adamw_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/cascade_mask_rcnn_cbv2_swin_small_patch4_window7_mstrain_400-1400_adamw_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b3cd95fea49f407aea6c8e8865aab344aa6a0e01 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/cascade_mask_rcnn_cbv2_swin_small_patch4_window7_mstrain_400-1400_adamw_3x_coco.py @@ -0,0 +1,58 @@ +_base_ = [ + '../swin/cascade_mask_rcnn_swin_small_patch4_window7_mstrain_480-800_giou_4conv1f_adamw_3x_coco.py' +] + +model = dict( + backbone=dict( + type='CBSwinTransformer', + ), + neck=dict( + type='CBFPN', + ), + test_cfg = dict( + rcnn=dict( + score_thr=0.001, + nms=dict(type='soft_nms'), + ) + ) +) + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# augmentation strategy originates from HTC +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='Resize', + img_scale=[(1600, 400), (1600, 1400)], + multiscale_mode='range', + keep_ratio=True), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1600, 1400), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +samples_per_gpu=1 +data = dict(samples_per_gpu=samples_per_gpu, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer = dict(lr=0.0001*(samples_per_gpu/2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/cascade_mask_rcnn_cbv2_swin_tiny_patch4_window7_mstrain_480-800_adamw_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/cascade_mask_rcnn_cbv2_swin_tiny_patch4_window7_mstrain_480-800_adamw_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..35be1c7a25f672a8e897c04ac25c45d7ecb5baff --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/cascade_mask_rcnn_cbv2_swin_tiny_patch4_window7_mstrain_480-800_adamw_3x_coco.py @@ -0,0 +1,12 @@ +_base_ = [ + '../swin/cascade_mask_rcnn_swin_tiny_patch4_window7_mstrain_480-800_giou_4conv1f_adamw_3x_coco.py' +] + +model = dict( + backbone=dict( + type='CBSwinTransformer', + ), + neck=dict( + type='CBFPN', + ), +) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/cascade_rcnn_cbv2d1_r2_101_mdconv_fpn_20e_fp16_ms400-1400_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/cascade_rcnn_cbv2d1_r2_101_mdconv_fpn_20e_fp16_ms400-1400_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..dfd746c8c9194c52e42502b85704f81fe34b8294 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/cascade_rcnn_cbv2d1_r2_101_mdconv_fpn_20e_fp16_ms400-1400_coco.py @@ -0,0 +1,66 @@ +_base_ = '../res2net/cascade_rcnn_r2_101_fpn_20e_coco.py' + +model = dict( + backbone=dict( + type='CBRes2Net', + cb_del_stages=1, + cb_inplanes=[64, 256, 512, 1024, 2048], + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True) + ), + neck=dict( + type='CBFPN', + ), + test_cfg = dict( + rcnn=dict( + score_thr=0.001, + nms=dict(type='soft_nms'), + ) + ) +) + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# augmentation strategy originates from HTC +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=[(1600, 400), (1600, 1400)], multiscale_mode='range', keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1600, 1400), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict(train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +# do not use mmdet version fp16 +runner = dict(type='EpochBasedRunnerAmp', max_epochs=20) +fp16 = None +optimizer_config = dict( + type="DistOptimizerHook", + update_interval=1, + grad_clip=None, + coalesce=True, + bucket_size_mb=-1, + use_fp16=True, +) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/cascade_rcnn_cbv2d1_r2_101_mdconv_fpn_20e_fp16_ms400-1400_giou_4conv1f_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/cascade_rcnn_cbv2d1_r2_101_mdconv_fpn_20e_fp16_ms400-1400_giou_4conv1f_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..167d437963c3a6343f93af71a1361766d0672fc8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/cascade_rcnn_cbv2d1_r2_101_mdconv_fpn_20e_fp16_ms400-1400_giou_4conv1f_coco.py @@ -0,0 +1,127 @@ +_base_ = '../res2net/cascade_rcnn_r2_101_fpn_20e_coco.py' + +model = dict( + backbone=dict( + type='CBRes2Net', + cb_del_stages=1, + cb_inplanes=[64, 256, 512, 1024, 2048], + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True) + ), + neck=dict( + type='CBFPN', + ), + roi_head=dict( + bbox_head=[ + dict( + type='ConvFCBBoxHead', + num_shared_convs=4, + num_shared_fcs=1, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + reg_decoded_bbox=True, + norm_cfg=dict(type='SyncBN', requires_grad=True), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=10.0)), + dict( + type='ConvFCBBoxHead', + num_shared_convs=4, + num_shared_fcs=1, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=False, + reg_decoded_bbox=True, + norm_cfg=dict(type='SyncBN', requires_grad=True), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=10.0)), + dict( + type='ConvFCBBoxHead', + num_shared_convs=4, + num_shared_fcs=1, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=False, + reg_decoded_bbox=True, + norm_cfg=dict(type='SyncBN', requires_grad=True), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=10.0)) + ] + ), + test_cfg = dict( + rcnn=dict( + score_thr=0.001, + nms=dict(type='soft_nms'), + ) + ) +) + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# augmentation strategy originates from HTC +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=[(1600, 400), (1600, 1400)], multiscale_mode='range', keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1600, 1400), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict(train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +# do not use mmdet version fp16 +runner = dict(type='EpochBasedRunnerAmp', max_epochs=20) +fp16 = None +optimizer_config = dict( + type="DistOptimizerHook", + update_interval=1, + grad_clip=None, + coalesce=True, + bucket_size_mb=-1, + use_fp16=True, +) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/faster_rcnn_cbv2d1_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/faster_rcnn_cbv2d1_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f1c22dc1eda62c25107fa85856a987d8903ffc8e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/faster_rcnn_cbv2d1_r50_fpn_1x_coco.py @@ -0,0 +1,12 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' + +model = dict( + backbone=dict( + type='CBResNet', + cb_del_stages=1, + cb_inplanes=[64, 256, 512, 1024, 2048], + ), + neck=dict( + type='CBFPN', + ) +) \ No newline at end of file diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/htc_cbv2_swin_base_patch4_window7_mstrain_400-1400_adamw_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/htc_cbv2_swin_base_patch4_window7_mstrain_400-1400_adamw_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..444c10b7ed85490facf23f1a0623f53ac97bf970 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/htc_cbv2_swin_base_patch4_window7_mstrain_400-1400_adamw_20e_coco.py @@ -0,0 +1,112 @@ +_base_ = [ + '../_base_/models/htc_without_semantic_swin_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + type='CBSwinTransformer', + embed_dim=128, + depths=[2, 2, 18, 2], + num_heads=[4, 8, 16, 32], + window_size=7, + ape=False, + drop_path_rate=0.3, + patch_norm=True, + use_checkpoint=False + ), + neck=dict( + type='CBFPN', + in_channels=[128, 256, 512, 1024] + ), + roi_head=dict( + semantic_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[8]), + semantic_head=dict( + type='FusedSemanticHead', + num_ins=5, + fusion_level=1, + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=183, + ignore_label=255, + loss_weight=0.2) + ), + test_cfg = dict( + rcnn=dict( + score_thr=0.001, + nms=dict(type='soft_nms'), + ) + ) +) + +data_root = 'data/coco/' + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# augmentation strategy originates from HTC +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', with_bbox=True, with_mask=True, with_seg=True), + dict( + type='Resize', + img_scale=[(1600, 400), (1600, 1400)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='SegRescale', scale_factor=1 / 8), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1600, 1400), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +samples_per_gpu=1 +data = dict(samples_per_gpu=samples_per_gpu, + train=dict( + seg_prefix=data_root + 'stuffthingmaps/train2017/', + pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer = dict(_delete_=True, type='AdamW', lr=0.0001*(samples_per_gpu/2), betas=(0.9, 0.999), weight_decay=0.05, + paramwise_cfg=dict(custom_keys={'absolute_pos_embed': dict(decay_mult=0.), + 'relative_position_bias_table': dict(decay_mult=0.), + 'norm': dict(decay_mult=0.)})) + +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunnerAmp', max_epochs=20) + +# do not use mmdet version fp16 +fp16 = None +optimizer_config = dict( + type="DistOptimizerHook", + update_interval=1, + grad_clip=None, + coalesce=True, + bucket_size_mb=-1, + use_fp16=True, +) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/htc_cbv2_swin_base_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/htc_cbv2_swin_base_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..51edfd62abf70b1fc31358d447a1d3184242c600 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/htc_cbv2_swin_base_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_20e_coco.py @@ -0,0 +1,65 @@ +_base_ = 'htc_cbv2_swin_base_patch4_window7_mstrain_400-1400_adamw_20e_coco.py' + +model = dict( + roi_head=dict( + bbox_head=[ + dict( + type='ConvFCBBoxHead', + num_shared_convs=4, + num_shared_fcs=1, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + reg_decoded_bbox=True, + norm_cfg=dict(type='SyncBN', requires_grad=True), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=10.0)), + dict( + type='ConvFCBBoxHead', + num_shared_convs=4, + num_shared_fcs=1, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + reg_decoded_bbox=True, + norm_cfg=dict(type='SyncBN', requires_grad=True), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=10.0)), + dict( + type='ConvFCBBoxHead', + num_shared_convs=4, + num_shared_fcs=1, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + reg_decoded_bbox=True, + norm_cfg=dict(type='SyncBN', requires_grad=True), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=10.0)) + ] + ) +) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/htc_cbv2_swin_large_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/htc_cbv2_swin_large_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6c8f47c0bb41fdfd1374d9fb260e9928a2933e78 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/htc_cbv2_swin_large_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_1x_coco.py @@ -0,0 +1,18 @@ +_base_ = 'htc_cbv2_swin_base_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_20e_coco.py' + +model = dict( + backbone=dict( + embed_dim=192, + depths=[2, 2, 18, 2], + num_heads=[6, 12, 24, 48], + window_size=7, + ape=False, + drop_path_rate=0.2, + patch_norm=True, + use_checkpoint=False + ), + neck=dict(in_channels=[192, 384, 768, 1536]) +) + +lr_config = dict(step=[8, 11]) +runner = dict(type='EpochBasedRunnerAmp', max_epochs=12) \ No newline at end of file diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/htc_cbv2_swin_large_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_1x_coco_tta.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/htc_cbv2_swin_large_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_1x_coco_tta.py new file mode 100644 index 0000000000000000000000000000000000000000..e5b929a1e505753560aecda62f85d8f53258e498 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/htc_cbv2_swin_large_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_1x_coco_tta.py @@ -0,0 +1,24 @@ +_base_ = 'htc_cbv2_swin_large_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_1x_coco.py' + + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=[(1600, 1000), (1600, 1400), (1800, 1200), (1800, 1600)], + flip=True, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) \ No newline at end of file diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/mask_rcnn_cbv2_swin_tiny_patch4_window7_mstrain_480-800_adamw_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/mask_rcnn_cbv2_swin_tiny_patch4_window7_mstrain_480-800_adamw_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..90de33deae198442dd03f400e5101b89d03e6c30 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cbnet/mask_rcnn_cbv2_swin_tiny_patch4_window7_mstrain_480-800_adamw_3x_coco.py @@ -0,0 +1,12 @@ +_base_ = [ + '../swin/mask_rcnn_swin_tiny_patch4_window7_mstrain_480-800_adamw_3x_coco.py' +] + +model = dict( + backbone=dict( + type='CBSwinTransformer', + ), + neck=dict( + type='CBFPN', + ), +) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/centernet/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/centernet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..be3b53ccd05b659caf2db18470f25d12a888c5b9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/centernet/README.md @@ -0,0 +1,30 @@ +# CenterNet + +## Introduction + + + +```latex +@article{zhou2019objects, + title={Objects as Points}, + author={Zhou, Xingyi and Wang, Dequan and Kr{\"a}henb{\"u}hl, Philipp}, + booktitle={arXiv preprint arXiv:1904.07850}, + year={2019} +} +``` + +## Results and models + +| Backbone | DCN | Mem (GB) | Box AP | Flip box AP| Config | Download | +| :-------------: | :--------: |:----------------: | :------: | :------------: | :----: | :----: | +| ResNet-18 | N | 3.45 | 26.0 | 27.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/centernet/centernet_resnet18_140e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/centernet/centernet_resnet18_140e_coco/centernet_resnet18_140e_coco_20210519_092334-eafe8ccd.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/centernet/centernet_resnet18_140e_coco/centernet_resnet18_140e_coco_20210519_092334.log.json) | +| ResNet-18 | Y | 3.47 | 29.5 | 31.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/centernet/centernet_resnet18_dcnv2_140e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/centernet/centernet_resnet18_dcnv2_140e_coco/centernet_resnet18_dcnv2_140e_coco_20210520_101209-da388ba2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/centernet/centernet_resnet18_dcnv2_140e_coco/centernet_resnet18_dcnv2_140e_coco_20210520_101209.log.json) | + +Note: + +- Flip box AP setting is single-scale and `flip=True`. +- Due to complex data enhancement, we find that the performance is unstable and may fluctuate by about 0.4 mAP. mAP 29.4 ~ 29.8 is acceptable in ResNet-18-DCNv2. +- Compared to the source code, we refer to [CenterNet-Better](https://github.com/FateScript/CenterNet-better), and make the following changes + - fix wrong image mean and variance in image normalization to be compatible with the pre-trained backbone. + - Use SGD rather than ADAM optimizer and add warmup and grad clip. + - Use DistributedDataParallel as other models in MMDetection rather than using DataParallel. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/centernet/centernet_resnet18_140e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/centernet/centernet_resnet18_140e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..52c86a5eca27086dbc5ee2449aca749c550e852f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/centernet/centernet_resnet18_140e_coco.py @@ -0,0 +1,3 @@ +_base_ = './centernet_resnet18_dcnv2_140e_coco.py' + +model = dict(neck=dict(use_dcn=False)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/centernet/centernet_resnet18_dcnv2_140e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/centernet/centernet_resnet18_dcnv2_140e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..178b80a78cfd29cd6b09dc8887206eb2f1bf742a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/centernet/centernet_resnet18_dcnv2_140e_coco.py @@ -0,0 +1,113 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + type='CenterNet', + backbone=dict( + type='ResNet', + depth=18, + norm_eval=False, + norm_cfg=dict(type='BN'), + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet18')), + neck=dict( + type='CTResNetNeck', + in_channel=512, + num_deconv_filters=(256, 128, 64), + num_deconv_kernels=(4, 4, 4), + use_dcn=True), + bbox_head=dict( + type='CenterNetHead', + num_classes=80, + in_channel=64, + feat_channel=64, + loss_center_heatmap=dict(type='GaussianFocalLoss', loss_weight=1.0), + loss_wh=dict(type='L1Loss', loss_weight=0.1), + loss_offset=dict(type='L1Loss', loss_weight=1.0)), + train_cfg=None, + test_cfg=dict(topk=100, local_maximum_kernel=3, max_per_img=100)) + +# We fixed the incorrect img_norm_cfg problem in the source code. +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True, color_type='color'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='RandomCenterCropPad', + crop_size=(512, 512), + ratios=(0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3), + mean=[0, 0, 0], + std=[1, 1, 1], + to_rgb=True, + test_pad_mode=None), + dict(type='Resize', img_scale=(512, 512), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict( + type='MultiScaleFlipAug', + scale_factor=1.0, + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict( + type='RandomCenterCropPad', + ratios=None, + border=None, + mean=[0, 0, 0], + std=[1, 1, 1], + to_rgb=True, + test_mode=True, + test_pad_mode=['logical_or', 31], + test_pad_add_pix=1), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + meta_keys=('filename', 'ori_shape', 'img_shape', 'pad_shape', + 'scale_factor', 'flip', 'flip_direction', + 'img_norm_cfg', 'border'), + keys=['img']) + ]) +] +data = dict( + samples_per_gpu=16, + workers_per_gpu=4, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +# optimizer +# Based on the default settings of modern detectors, the SGD effect is better +# than the Adam in the source code, so we use SGD default settings and +# if you use adam+lr5e-4, the map is 29.1. +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) + +# learning policy +# Based on the default settings of modern detectors, we added warmup settings. +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=1.0 / 1000, + step=[90, 120]) +runner = dict(max_epochs=140) + +# Avoid evaluation and saving weights too frequently +evaluation = dict(interval=5, metric='bbox') +checkpoint_config = dict(interval=5) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/centernet/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/centernet/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..9d5bcc392457faddf82dc83e7b5d9e7cea1a1df2 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/centernet/metafile.yml @@ -0,0 +1,41 @@ +Collections: + - Name: CenterNet + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - ResNet + Paper: https://arxiv.org/abs/1904.07850 + README: configs/centernet/README.md + +Models: + - Name: centernet_resnet18_dcnv2_140e_coco + In Collection: CenterNet + Config: configs/centernet/centernet_resnet18_dcnv2_140e_coco.py + Metadata: + Batch Size: 128 + Training Memory (GB): 3.47 + Epochs: 140 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 29.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/centernet/centernet_resnet18_dcnv2_140e_coco/centernet_resnet18_dcnv2_140e_coco_20210520_101209-da388ba2.pth + + - Name: centernet_resnet18_140e_coco + In Collection: CenterNet + Config: configs/centernet/centernet_resnet18_140e_coco.py + Metadata: + Batch Size: 128 + Training Memory (GB): 3.45 + Epochs: 140 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 26.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/centernet/centernet_resnet18_140e_coco/centernet_resnet18_140e_coco_20210519_092334-eafe8ccd.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/centripetalnet/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/centripetalnet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..543cf065f509ccf98fae35537e8b79f689cb2655 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/centripetalnet/README.md @@ -0,0 +1,26 @@ +# CentripetalNet + +## Introduction + + + +```latex +@InProceedings{Dong_2020_CVPR, +author = {Dong, Zhiwei and Li, Guoxuan and Liao, Yue and Wang, Fei and Ren, Pengju and Qian, Chen}, +title = {CentripetalNet: Pursuing High-Quality Keypoint Pairs for Object Detection}, +booktitle = {Proceedings of the IEEE/CVF Conference on Computer Vision and Pattern Recognition (CVPR)}, +month = {June}, +year = {2020} +} +``` + +## Results and models + +| Backbone | Batch Size | Step/Total Epochs | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :--------: |:----------------: | :------: | :------------: | :----: | :------: | :--------: | +| HourglassNet-104 | [16 x 6](./centripetalnet_hourglass104_mstest_16x6_210e_coco.py) | 190/210 | 16.7 | 3.7 | 44.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco/centripetalnet_hourglass104_mstest_16x6_210e_coco_20200915_204804-3ccc61e5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco/centripetalnet_hourglass104_mstest_16x6_210e_coco_20200915_204804.log.json) | + +Note: + +- TTA setting is single-scale and `flip=True`. +- The model we released is the best checkpoint rather than the latest checkpoint (box AP 44.8 vs 44.6 in our experiment). diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e9c5defd1cda850f9702c05a86e0671880ef5e38 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py @@ -0,0 +1,105 @@ +_base_ = [ + '../_base_/default_runtime.py', '../_base_/datasets/coco_detection.py' +] + +# model settings +model = dict( + type='CornerNet', + backbone=dict( + type='HourglassNet', + downsample_times=5, + num_stacks=2, + stage_channels=[256, 256, 384, 384, 384, 512], + stage_blocks=[2, 2, 2, 2, 2, 4], + norm_cfg=dict(type='BN', requires_grad=True)), + neck=None, + bbox_head=dict( + type='CentripetalHead', + num_classes=80, + in_channels=256, + num_feat_levels=2, + corner_emb_channels=0, + loss_heatmap=dict( + type='GaussianFocalLoss', alpha=2.0, gamma=4.0, loss_weight=1), + loss_offset=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1), + loss_guiding_shift=dict( + type='SmoothL1Loss', beta=1.0, loss_weight=0.05), + loss_centripetal_shift=dict( + type='SmoothL1Loss', beta=1.0, loss_weight=1)), + # training and testing settings + train_cfg=None, + test_cfg=dict( + corner_topk=100, + local_maximum_kernel=3, + distance_threshold=0.5, + score_thr=0.05, + max_per_img=100, + nms=dict(type='soft_nms', iou_threshold=0.5, method='gaussian'))) +# data settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='RandomCenterCropPad', + crop_size=(511, 511), + ratios=(0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3), + test_mode=False, + test_pad_mode=None, + **img_norm_cfg), + dict(type='Resize', img_scale=(511, 511), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict( + type='MultiScaleFlipAug', + scale_factor=1.0, + flip=True, + transforms=[ + dict(type='Resize'), + dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=None, + border=None, + test_mode=True, + test_pad_mode=['logical_or', 127], + **img_norm_cfg), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict( + type='Collect', + keys=['img'], + meta_keys=('filename', 'ori_shape', 'img_shape', 'pad_shape', + 'scale_factor', 'flip', 'img_norm_cfg', 'border')), + ]) +] +data = dict( + samples_per_gpu=6, + workers_per_gpu=3, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='Adam', lr=0.0005) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[190]) +runner = dict(type='EpochBasedRunner', max_epochs=210) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/centripetalnet/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/centripetalnet/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..be3503965404b5f828851f59d44521abac6c4105 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/centripetalnet/metafile.yml @@ -0,0 +1,34 @@ +Collections: + - Name: CentripetalNet + Metadata: + Training Data: COCO + Training Techniques: + - Adam + Training Resources: 16x V100 GPUs + Architecture: + - Corner Pooling + - Stacked Hourglass Network + Paper: https://arxiv.org/abs/2003.09119 + README: configs/centripetalnet/README.md + +Models: + - Name: centripetalnet_hourglass104_mstest_16x6_210e_coco + In Collection: CentripetalNet + Config: configs/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco.py + Metadata: + Batch Size: 96 + Training Memory (GB): 16.7 + inference time (ms/im): + - value: 270.27 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 210 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/centripetalnet/centripetalnet_hourglass104_mstest_16x6_210e_coco/centripetalnet_hourglass104_mstest_16x6_210e_coco_20200915_204804-3ccc61e5.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cityscapes/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/cityscapes/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b790771e999d0ed6cca83e9d16a36ccc6826f77d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cityscapes/README.md @@ -0,0 +1,33 @@ +# Cityscapes Dataset + + + +``` +@inproceedings{Cordts2016Cityscapes, + title={The Cityscapes Dataset for Semantic Urban Scene Understanding}, + author={Cordts, Marius and Omran, Mohamed and Ramos, Sebastian and Rehfeld, Timo and Enzweiler, Markus and Benenson, Rodrigo and Franke, Uwe and Roth, Stefan and Schiele, Bernt}, + booktitle={Proc. of the IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + year={2016} +} +``` + +## Common settings + +- All baselines were trained using 8 GPU with a batch size of 8 (1 images per GPU) using the [linear scaling rule](https://arxiv.org/abs/1706.02677) to scale the learning rate. +- All models were trained on `cityscapes_train`, and tested on `cityscapes_val`. +- 1x training schedule indicates 64 epochs which corresponds to slightly less than the 24k iterations reported in the original schedule from the [Mask R-CNN paper](https://arxiv.org/abs/1703.06870) +- COCO pre-trained weights are used to initialize. +- A conversion [script](../../tools/dataset_converters/cityscapes.py) is provided to convert Cityscapes into COCO format. Please refer to [install.md](../../docs/1_exist_data_model.md#prepare-datasets) for details. +- `CityscapesDataset` implemented three evaluation methods. `bbox` and `segm` are standard COCO bbox/mask AP. `cityscapes` is the cityscapes dataset official evaluation, which may be slightly higher than COCO. + +### Faster R-CNN + +| Backbone | Style | Lr schd | Scale | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :---: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-FPN | pytorch | 1x | 800-1024 | 5.2 | - | 40.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes_20200502-829424c0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes_20200502_114915.log.json) | + +### Mask R-CNN + +| Backbone | Style | Lr schd | Scale | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------: | :------------: | :----: | :-----: | :------: | :------: | +| R-50-FPN | pytorch | 1x | 800-1024 | 5.3 | - | 40.9 | 36.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes/mask_rcnn_r50_fpn_1x_cityscapes_20201211_133733-d2858245.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes/mask_rcnn_r50_fpn_1x_cityscapes_20201211_133733.log.json) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes.py new file mode 100644 index 0000000000000000000000000000000000000000..c6da80ce85fa02a70b884bcc1900ea06d4478a38 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cityscapes/faster_rcnn_r50_fpn_1x_cityscapes.py @@ -0,0 +1,39 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/cityscapes_detection.py', + '../_base_/default_runtime.py' +] +model = dict( + backbone=dict(init_cfg=None), + roi_head=dict( + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=8, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)))) +# optimizer +# lr is set for a batch size of 8 +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + # [7] yields higher performance than [6] + step=[7]) +runner = dict( + type='EpochBasedRunner', max_epochs=8) # actual epoch = 8 * 8 = 64 +log_config = dict(interval=100) +# For better, more stable performance initialize from COCO +load_from = 'https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth' # noqa diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py new file mode 100644 index 0000000000000000000000000000000000000000..679890d8a03cf1c9858ddf114fe4385a9d30c941 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py @@ -0,0 +1,46 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/cityscapes_instance.py', '../_base_/default_runtime.py' +] +model = dict( + backbone=dict(init_cfg=None), + roi_head=dict( + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=8, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + mask_head=dict( + type='FCNMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=8, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)))) +# optimizer +# lr is set for a batch size of 8 +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + # [7] yields higher performance than [6] + step=[7]) +runner = dict( + type='EpochBasedRunner', max_epochs=8) # actual epoch = 8 * 8 = 64 +log_config = dict(interval=100) +# For better, more stable performance initialize from COCO +load_from = 'https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth' # noqa diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/common/mstrain-poly_3x_coco_instance.py b/detection_cbnet/docker-build-context/cbnetv2/configs/common/mstrain-poly_3x_coco_instance.py new file mode 100644 index 0000000000000000000000000000000000000000..c22ed9457197be61ec76117568f2351575573d43 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/common/mstrain-poly_3x_coco_instance.py @@ -0,0 +1,80 @@ +_base_ = '../_base_/default_runtime.py' +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)], +# multiscale_mode='range' +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +# Use RepeatDataset to speed up training +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=3, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric=['bbox', 'segm']) + +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) + +# learning policy +# Experiments show that using step=[9, 11] has higher performance +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[9, 11]) +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/common/mstrain_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/common/mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..80ec8b8dbf0f76a99395bf615b6f2a60cafdd7e5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/common/mstrain_3x_coco.py @@ -0,0 +1,76 @@ +_base_ = '../_base_/default_runtime.py' +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)], +# multiscale_mode='range' +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +# Use RepeatDataset to speed up training +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=3, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='bbox') + +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) + +# learning policy +# Experiments show that using step=[9, 11] has higher performance +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[9, 11]) +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/common/mstrain_3x_coco_instance.py b/detection_cbnet/docker-build-context/cbnetv2/configs/common/mstrain_3x_coco_instance.py new file mode 100644 index 0000000000000000000000000000000000000000..50f39bef3fe3c6e0f99259135745e89e000745ea --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/common/mstrain_3x_coco_instance.py @@ -0,0 +1,76 @@ +_base_ = '../_base_/default_runtime.py' +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)], +# multiscale_mode='range' +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +# Use RepeatDataset to speed up training +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=3, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric=['bbox', 'segm']) + +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) + +# learning policy +# Experiments show that using step=[9, 11] has higher performance +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[9, 11]) +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cornernet/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/cornernet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..a060196a21c6b0920cb45c79af913e37643fa980 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cornernet/README.md @@ -0,0 +1,33 @@ +# CornerNet + +## Introduction + + + +```latex +@inproceedings{law2018cornernet, + title={Cornernet: Detecting objects as paired keypoints}, + author={Law, Hei and Deng, Jia}, + booktitle={15th European Conference on Computer Vision, ECCV 2018}, + pages={765--781}, + year={2018}, + organization={Springer Verlag} +} +``` + +## Results and models + +| Backbone | Batch Size | Step/Total Epochs | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :--------: |:----------------: | :------: | :------------: | :----: | :------: | :--------: | +| HourglassNet-104 | [10 x 5](./cornernet_hourglass104_mstest_10x5_210e_coco.py) | 180/210 | 13.9 | 4.2 | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco/cornernet_hourglass104_mstest_10x5_210e_coco_20200824_185720-5fefbf1c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco/cornernet_hourglass104_mstest_10x5_210e_coco_20200824_185720.log.json) | +| HourglassNet-104 | [8 x 6](./cornernet_hourglass104_mstest_8x6_210e_coco.py) | 180/210 | 15.9 | 4.2 | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco/cornernet_hourglass104_mstest_8x6_210e_coco_20200825_150618-79b44c30.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco/cornernet_hourglass104_mstest_8x6_210e_coco_20200825_150618.log.json) | +| HourglassNet-104 | [32 x 3](./cornernet_hourglass104_mstest_32x3_210e_coco.py) | 180/210 | 9.5 | 3.9 | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco/cornernet_hourglass104_mstest_32x3_210e_coco_20200819_203110-1efaea91.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco/cornernet_hourglass104_mstest_32x3_210e_coco_20200819_203110.log.json) | + +Note: + +- TTA setting is single-scale and `flip=True`. +- Experiments with `images_per_gpu=6` are conducted on Tesla V100-SXM2-32GB, `images_per_gpu=3` are conducted on GeForce GTX 1080 Ti. +- Here are the descriptions of each experiment setting: + - 10 x 5: 10 GPUs with 5 images per gpu. This is the same setting as that reported in the original paper. + - 8 x 6: 8 GPUs with 6 images per gpu. The total batchsize is similar to paper and only need 1 node to train. + - 32 x 3: 32 GPUs with 3 images per gpu. The default setting for 1080TI and need 4 nodes to train. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..89f387641207512ae1b1c91ca56965004e5eb868 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py @@ -0,0 +1,105 @@ +_base_ = [ + '../_base_/default_runtime.py', '../_base_/datasets/coco_detection.py' +] + +# model settings +model = dict( + type='CornerNet', + backbone=dict( + type='HourglassNet', + downsample_times=5, + num_stacks=2, + stage_channels=[256, 256, 384, 384, 384, 512], + stage_blocks=[2, 2, 2, 2, 2, 4], + norm_cfg=dict(type='BN', requires_grad=True)), + neck=None, + bbox_head=dict( + type='CornerHead', + num_classes=80, + in_channels=256, + num_feat_levels=2, + corner_emb_channels=1, + loss_heatmap=dict( + type='GaussianFocalLoss', alpha=2.0, gamma=4.0, loss_weight=1), + loss_embedding=dict( + type='AssociativeEmbeddingLoss', + pull_weight=0.10, + push_weight=0.10), + loss_offset=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1)), + # training and testing settings + train_cfg=None, + test_cfg=dict( + corner_topk=100, + local_maximum_kernel=3, + distance_threshold=0.5, + score_thr=0.05, + max_per_img=100, + nms=dict(type='soft_nms', iou_threshold=0.5, method='gaussian'))) +# data settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='RandomCenterCropPad', + crop_size=(511, 511), + ratios=(0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3), + test_mode=False, + test_pad_mode=None, + **img_norm_cfg), + dict(type='Resize', img_scale=(511, 511), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict( + type='MultiScaleFlipAug', + scale_factor=1.0, + flip=True, + transforms=[ + dict(type='Resize'), + dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=None, + border=None, + test_mode=True, + test_pad_mode=['logical_or', 127], + **img_norm_cfg), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict( + type='Collect', + keys=['img'], + meta_keys=('filename', 'ori_shape', 'img_shape', 'pad_shape', + 'scale_factor', 'flip', 'img_norm_cfg', 'border')), + ]) +] +data = dict( + samples_per_gpu=5, + workers_per_gpu=3, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='Adam', lr=0.0005) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[180]) +runner = dict(type='EpochBasedRunner', max_epochs=210) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..873d59844f4b487a32186b0c6fd5ffea6459b373 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco.py @@ -0,0 +1,105 @@ +_base_ = [ + '../_base_/default_runtime.py', '../_base_/datasets/coco_detection.py' +] + +# model settings +model = dict( + type='CornerNet', + backbone=dict( + type='HourglassNet', + downsample_times=5, + num_stacks=2, + stage_channels=[256, 256, 384, 384, 384, 512], + stage_blocks=[2, 2, 2, 2, 2, 4], + norm_cfg=dict(type='BN', requires_grad=True)), + neck=None, + bbox_head=dict( + type='CornerHead', + num_classes=80, + in_channels=256, + num_feat_levels=2, + corner_emb_channels=1, + loss_heatmap=dict( + type='GaussianFocalLoss', alpha=2.0, gamma=4.0, loss_weight=1), + loss_embedding=dict( + type='AssociativeEmbeddingLoss', + pull_weight=0.10, + push_weight=0.10), + loss_offset=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1)), + # training and testing settings + train_cfg=None, + test_cfg=dict( + corner_topk=100, + local_maximum_kernel=3, + distance_threshold=0.5, + score_thr=0.05, + max_per_img=100, + nms=dict(type='soft_nms', iou_threshold=0.5, method='gaussian'))) +# data settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='RandomCenterCropPad', + crop_size=(511, 511), + ratios=(0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3), + test_mode=False, + test_pad_mode=None, + **img_norm_cfg), + dict(type='Resize', img_scale=(511, 511), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict( + type='MultiScaleFlipAug', + scale_factor=1.0, + flip=True, + transforms=[ + dict(type='Resize'), + dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=None, + border=None, + test_mode=True, + test_pad_mode=['logical_or', 127], + **img_norm_cfg), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict( + type='Collect', + keys=['img'], + meta_keys=('filename', 'ori_shape', 'img_shape', 'pad_shape', + 'scale_factor', 'flip', 'img_norm_cfg', 'border')), + ]) +] +data = dict( + samples_per_gpu=3, + workers_per_gpu=3, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='Adam', lr=0.0005) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[180]) +runner = dict(type='EpochBasedRunner', max_epochs=210) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ef749ccc8ddafd84da852c56821d7624a0111eb2 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py @@ -0,0 +1,105 @@ +_base_ = [ + '../_base_/default_runtime.py', '../_base_/datasets/coco_detection.py' +] + +# model settings +model = dict( + type='CornerNet', + backbone=dict( + type='HourglassNet', + downsample_times=5, + num_stacks=2, + stage_channels=[256, 256, 384, 384, 384, 512], + stage_blocks=[2, 2, 2, 2, 2, 4], + norm_cfg=dict(type='BN', requires_grad=True)), + neck=None, + bbox_head=dict( + type='CornerHead', + num_classes=80, + in_channels=256, + num_feat_levels=2, + corner_emb_channels=1, + loss_heatmap=dict( + type='GaussianFocalLoss', alpha=2.0, gamma=4.0, loss_weight=1), + loss_embedding=dict( + type='AssociativeEmbeddingLoss', + pull_weight=0.10, + push_weight=0.10), + loss_offset=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1)), + # training and testing settings + train_cfg=None, + test_cfg=dict( + corner_topk=100, + local_maximum_kernel=3, + distance_threshold=0.5, + score_thr=0.05, + max_per_img=100, + nms=dict(type='soft_nms', iou_threshold=0.5, method='gaussian'))) +# data settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='RandomCenterCropPad', + crop_size=(511, 511), + ratios=(0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3), + test_mode=False, + test_pad_mode=None, + **img_norm_cfg), + dict(type='Resize', img_scale=(511, 511), keep_ratio=False), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict( + type='MultiScaleFlipAug', + scale_factor=1.0, + flip=True, + transforms=[ + dict(type='Resize'), + dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=None, + border=None, + test_mode=True, + test_pad_mode=['logical_or', 127], + **img_norm_cfg), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict( + type='Collect', + keys=['img'], + meta_keys=('filename', 'ori_shape', 'img_shape', 'pad_shape', + 'scale_factor', 'flip', 'img_norm_cfg', 'border')), + ]) +] +data = dict( + samples_per_gpu=6, + workers_per_gpu=3, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='Adam', lr=0.0005) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[180]) +runner = dict(type='EpochBasedRunner', max_epochs=210) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/cornernet/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/cornernet/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..d2b23960d38572a40e89452783d1ec6010974eb3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/cornernet/metafile.yml @@ -0,0 +1,78 @@ +Collections: + - Name: CornerNet + Metadata: + Training Data: COCO + Training Techniques: + - Adam + Training Resources: 8x V100 GPUs + Architecture: + - Corner Pooling + - Stacked Hourglass Network + Paper: https://arxiv.org/abs/1808.01244 + README: configs/cornernet/README.md + +Models: + - Name: cornernet_hourglass104_mstest_10x5_210e_coco + In Collection: CornerNet + Config: configs/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py + Metadata: + Training Resources: 10x V100 GPUs + Batch Size: 50 + Training Memory (GB): 13.9 + inference time (ms/im): + - value: 238.1 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 210 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco/cornernet_hourglass104_mstest_10x5_210e_coco_20200824_185720-5fefbf1c.pth + + - Name: cornernet_hourglass104_mstest_8x6_210e_coco + In Collection: CornerNet + Config: configs/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco.py + Metadata: + Batch Size: 48 + Training Memory (GB): 15.9 + inference time (ms/im): + - value: 238.1 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 210 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_8x6_210e_coco/cornernet_hourglass104_mstest_8x6_210e_coco_20200825_150618-79b44c30.pth + + - Name: cornernet_hourglass104_mstest_32x3_210e_coco + In Collection: CornerNet + Config: configs/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco.py + Metadata: + Training Resources: 32x V100 GPUs + Batch Size: 96 + Training Memory (GB): 9.5 + inference time (ms/im): + - value: 256.41 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 210 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/cornernet/cornernet_hourglass104_mstest_32x3_210e_coco/cornernet_hourglass104_mstest_32x3_210e_coco_20200819_203110-1efaea91.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..044ff2953199ece753115c2cbf005c4377015584 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/README.md @@ -0,0 +1,52 @@ +# Deformable Convolutional Networks + +## Introduction + + + +```none +@inproceedings{dai2017deformable, + title={Deformable Convolutional Networks}, + author={Dai, Jifeng and Qi, Haozhi and Xiong, Yuwen and Li, Yi and Zhang, Guodong and Hu, Han and Wei, Yichen}, + booktitle={Proceedings of the IEEE international conference on computer vision}, + year={2017} +} +``` + + + +``` +@article{zhu2018deformable, + title={Deformable ConvNets v2: More Deformable, Better Results}, + author={Zhu, Xizhou and Hu, Han and Lin, Stephen and Dai, Jifeng}, + journal={arXiv preprint arXiv:1811.11168}, + year={2018} +} +``` + +## Results and Models + +| Backbone | Model | Style | Conv | Pool | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:----------------:|:------------:|:-------:|:-------------:|:------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | Faster | pytorch | dconv(c3-c5) | - | 1x | 4.0 | 17.8 | 41.3 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130-d68aed1e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130_212941.log.json) | +| R-50-FPN | Faster | pytorch | mdconv(c3-c5) | - | 1x | 4.1 | 17.6 | 41.4 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco_20200130-d099253b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco_20200130_222144.log.json) | +| *R-50-FPN (dg=4) | Faster | pytorch | mdconv(c3-c5) | - | 1x | 4.2 | 17.4 | 41.5 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco_20200130-01262257.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco_20200130_222058.log.json) | +| R-50-FPN | Faster | pytorch | - | dpool | 1x | 5.0 | 17.2 | 38.9 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_dpool_1x_coco/faster_rcnn_r50_fpn_dpool_1x_coco_20200307-90d3c01d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_dpool_1x_coco/faster_rcnn_r50_fpn_dpool_1x_coco_20200307_203250.log.json) | +| R-50-FPN | Faster | pytorch | - | mdpool | 1x | 5.8 | 16.6 | 38.7 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco/faster_rcnn_r50_fpn_mdpool_1x_coco_20200307-c0df27ff.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco/faster_rcnn_r50_fpn_mdpool_1x_coco_20200307_203304.log.json) | +| R-101-FPN | Faster | pytorch | dconv(c3-c5) | - | 1x | 6.0 | 12.5 | 42.7 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200203-1377f13d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200203_230019.log.json) | +| X-101-32x4d-FPN | Faster | pytorch | dconv(c3-c5) | - | 1x | 7.3 | 10.0 | 44.5 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco_20200203-4f85c69c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco_20200203_001325.log.json) | +| R-50-FPN | Mask | pytorch | dconv(c3-c5) | - | 1x | 4.5 | 15.4 | 41.8 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200203-4d9ad43b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200203_061339.log.json) | +| R-50-FPN | Mask | pytorch | mdconv(c3-c5) | - | 1x | 4.5 | 15.1 | 41.5 | 37.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco_20200203-ad97591f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco_20200203_063443.log.json) | +| R-101-FPN | Mask | pytorch | dconv(c3-c5) | - | 1x | 6.5 | 11.7 | 43.5 | 38.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200216-a71f5bce.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200216_191601.log.json) | +| R-50-FPN | Cascade | pytorch | dconv(c3-c5) | - | 1x | 4.5 | 14.6 | 43.8 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130-2f1fca44.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130_220843.log.json) | +| R-101-FPN | Cascade | pytorch | dconv(c3-c5) | - | 1x | 6.4 | 11.0 | 45.0 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200203-3b2f0594.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200203_224829.log.json) | +| R-50-FPN | Cascade Mask | pytorch | dconv(c3-c5) | - | 1x | 6.0 | 10.0 | 44.4 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200202-42e767a2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200202_010309.log.json) | +| R-101-FPN | Cascade Mask | pytorch | dconv(c3-c5) | - | 1x | 8.0 | 8.6 | 45.8 | 39.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200204-df0c5f10.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200204_134006.log.json) | +| X-101-32x4d-FPN | Cascade Mask | pytorch | dconv(c3-c5) | - | 1x | 9.2 | | 47.3 | 41.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco-e75f90c8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco-20200606_183737.log.json) | + +**Notes:** + +- `dconv` and `mdconv` denote (modulated) deformable convolution, `c3-c5` means adding dconv in resnet stage 3 to 5. `dpool` and `mdpool` denote (modulated) deformable roi pooling. +- The dcn ops are modified from https://github.com/chengdazhi/Deformable-Convolution-V2-PyTorch, which should be more memory efficient and slightly faster. +- (*) For R-50-FPN (dg=4), dg is short for deformable_group. This model is trained and tested on Amazon EC2 p3dn.24xlarge instance. +- **Memory, Train/Inf time is outdated.** diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..081b998f6f54d3d805dbab38b26750a378c0d93f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3b3683af235f46df36d8793e52c2b9c52e0defeb --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..daaa4729c8280107b19107607ec399230713cf93 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a01df33c94e1f8b5f51a51a780b30a77ce99b2c0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../cascade_rcnn/cascade_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..aa664bd61c78873a74af229caa8f62feca8daa5e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f5fee7e13cdfd531bf24d7c261e843855124f762 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8787088f27a09a3f8fd0d05a1144c0abdedd0a21 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1b695f0e19049dc91b7656d7684df151896b7727 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py @@ -0,0 +1,12 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + _delete_=True, + type='DeformRoIPoolPack', + output_size=7, + output_channels=256), + out_channels=256, + featmap_strides=[4, 8, 16, 32]))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d1bcf3c102fb660641eda2a1398db3df520caa3a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d0ab89c261f970e16a9c4407620bd16a0df9e9e9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=4, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ad7b0346a63dfa3c3ca246b624155fc4fd331a3f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco.py @@ -0,0 +1,12 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + _delete_=True, + type='ModulatedDeformRoIPoolPack', + output_size=7, + output_channels=256), + out_channels=256, + featmap_strides=[4, 8, 16, 32]))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e3bea1950ac8b1227b97d9eacafb208c4724f8eb --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cb340022ea27f563b8c4a570cf89b5f09e6434cd --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ababe58dc3fdfbbc6c366f48271db31bf6e2e9e2 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5ca2a67cde62bff078b7c4c0d696a585265e4c3a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..d5b53f42d1382db511af1ae482fb6a298df004ab --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/dcn/metafile.yml @@ -0,0 +1,330 @@ +Collections: + - Name: Deformable Convolutional Networks + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Deformable Convolution + Paper: https://arxiv.org/abs/1811.11168 + README: configs/dcn/README.md + +Models: + - Name: faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 4.0 + inference time (ms/im): + - value: 56.18 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco/faster_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130-d68aed1e.pth + + - Name: faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 4.1 + inference time (ms/im): + - value: 56.82 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco/faster_rcnn_r50_fpn_mdconv_c3-c5_1x_coco_20200130-d099253b.pth + + - Name: faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco.py + Metadata: + Training Memory (GB): 4.2 + inference time (ms/im): + - value: 57.47 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco/faster_rcnn_r50_fpn_mdconv_c3-c5_group4_1x_coco_20200130-01262257.pth + + - Name: faster_rcnn_r50_fpn_dpool_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/faster_rcnn_r50_fpn_dpool_1x_coco.py + Metadata: + Training Memory (GB): 5.0 + inference time (ms/im): + - value: 58.14 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_dpool_1x_coco/faster_rcnn_r50_fpn_dpool_1x_coco_20200307-90d3c01d.pth + + - Name: faster_rcnn_r50_fpn_mdpool_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco.py + Metadata: + Training Memory (GB): 5.8 + inference time (ms/im): + - value: 60.24 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r50_fpn_mdpool_1x_coco/faster_rcnn_r50_fpn_mdpool_1x_coco_20200307-c0df27ff.pth + + - Name: faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 80 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco/faster_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200203-1377f13d.pth + + - Name: faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 7.3 + inference time (ms/im): + - value: 100 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco/faster_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco_20200203-4f85c69c.pth + + - Name: mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 4.5 + inference time (ms/im): + - value: 64.94 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200203-4d9ad43b.pth + + - Name: mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 4.5 + inference time (ms/im): + - value: 66.23 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_mdconv_c3-c5_1x_coco_20200203-ad97591f.pth + + - Name: mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 6.5 + inference time (ms/im): + - value: 85.47 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco/mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200216-a71f5bce.pth + + - Name: cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 4.5 + inference time (ms/im): + - value: 68.49 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco/cascade_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200130-2f1fca44.pth + + - Name: cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 6.4 + inference time (ms/im): + - value: 90.91 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco/cascade_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200203-3b2f0594.pth + + - Name: cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 100 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_r50_fpn_dconv_c3-c5_1x_coco_20200202-42e767a2.pth + + - Name: cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 8.0 + inference time (ms/im): + - value: 116.28 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_r101_fpn_dconv_c3-c5_1x_coco_20200204-df0c5f10.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco + In Collection: Deformable Convolutional Networks + Config: configs/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 9.2 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco-e75f90c8.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/deepfashion/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/deepfashion/README.md new file mode 100644 index 0000000000000000000000000000000000000000..68e57e4fa5b29b66a8381a87949762d89d791a2e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/deepfashion/README.md @@ -0,0 +1,56 @@ +# DeepFashion + + + +[MMFashion](https://github.com/open-mmlab/mmfashion) develops "fashion parsing and segmentation" module +based on the dataset +[DeepFashion-Inshop](https://drive.google.com/drive/folders/0B7EVK8r0v71pVDZFQXRsMDZCX1E?usp=sharing). +Its annotation follows COCO style. +To use it, you need to first download the data. Note that we only use "img_highres" in this task. +The file tree should be like this: + +```sh +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── DeepFashion +│ │ ├── In-shop +│ │ ├── Anno +│ │ │   ├── segmentation +│ │ │   | ├── DeepFashion_segmentation_train.json +│ │ │   | ├── DeepFashion_segmentation_query.json +│ │ │   | ├── DeepFashion_segmentation_gallery.json +│ │ │   ├── list_bbox_inshop.txt +│ │ │   ├── list_description_inshop.json +│ │ │   ├── list_item_inshop.txt +│ │ │   └── list_landmarks_inshop.txt +│ │ ├── Eval +│ │ │ └── list_eval_partition.txt +│ │ ├── Img +│ │ │ ├── img +│ │ │ │ ├──XXX.jpg +│ │ │ ├── img_highres +│ │ │ └── ├──XXX.jpg + +``` + +After that you can train the Mask RCNN r50 on DeepFashion-In-shop dataset by launching training with the `mask_rcnn_r50_fpn_1x.py` config +or creating your own config file. + +``` +@inproceedings{liuLQWTcvpr16DeepFashion, + author = {Liu, Ziwei and Luo, Ping and Qiu, Shi and Wang, Xiaogang and Tang, Xiaoou}, + title = {DeepFashion: Powering Robust Clothes Recognition and Retrieval with Rich Annotations}, + booktitle = {Proceedings of IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + month = {June}, + year = {2016} +} +``` + +## Model Zoo + +| Backbone | Model type | Dataset | bbox detection Average Precision | segmentation Average Precision | Config | Download (Google) | +| :---------: | :----------: | :-----------------: | :--------------------------------: | :----------------------------: | :---------:| :-------------------------: | +| ResNet50 | Mask RCNN | DeepFashion-In-shop | 0.599 | 0.584 |[config](https://github.com/open-mmlab/mmdetection/blob/master/configs/deepfashion/mask_rcnn_r50_fpn_15e_deepfashion.py)| [model](https://drive.google.com/open?id=1q6zF7J6Gb-FFgM87oIORIt6uBozaXp5r) | [log](https://drive.google.com/file/d/1qTK4Dr4FFLa9fkdI6UVko408gkrfTRLP/view?usp=sharing) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/deepfashion/mask_rcnn_r50_fpn_15e_deepfashion.py b/detection_cbnet/docker-build-context/cbnetv2/configs/deepfashion/mask_rcnn_r50_fpn_15e_deepfashion.py new file mode 100644 index 0000000000000000000000000000000000000000..c4e86387e3ce4aad3dd68d7613160fced4d3785b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/deepfashion/mask_rcnn_r50_fpn_15e_deepfashion.py @@ -0,0 +1,10 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/deepfashion.py', '../_base_/schedules/schedule_1x.py', + '../_base_/default_runtime.py' +] +model = dict( + roi_head=dict( + bbox_head=dict(num_classes=15), mask_head=dict(num_classes=15))) +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=15) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/deformable_detr/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/deformable_detr/README.md new file mode 100644 index 0000000000000000000000000000000000000000..fe68002b49ac19ce82ea67db31df9a5fe50e4527 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/deformable_detr/README.md @@ -0,0 +1,31 @@ +# Deformable DETR + +## Introduction + + + +We provide the config files for Deformable DETR: [Deformable DETR: Deformable Transformers for End-to-End Object Detection](https://arxiv.org/abs/2010.04159). + +``` +@inproceedings{ +zhu2021deformable, +title={Deformable DETR: Deformable Transformers for End-to-End Object Detection}, +author={Xizhou Zhu and Weijie Su and Lewei Lu and Bin Li and Xiaogang Wang and Jifeng Dai}, +booktitle={International Conference on Learning Representations}, +year={2021}, +url={https://openreview.net/forum?id=gZ9hCDWe6ke} +} +``` + +## Results and Models + +| Backbone | Model | Lr schd | box AP | Config | Download | +|:------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | Deformable DETR |50e | 44.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/deformable_detr/deformable_detr_r50_16x2_50e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_r50_16x2_50e_coco/deformable_detr_r50_16x2_50e_coco_20210419_220030-a12b9512.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_r50_16x2_50e_coco/deformable_detr_r50_16x2_50e_coco_20210419_220030-a12b9512.log.json) | +| R-50 | + iterative bounding box refinement |50e | 46.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco/deformable_detr_refine_r50_16x2_50e_coco_20210419_220503-5f5dff21.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco/deformable_detr_refine_r50_16x2_50e_coco_20210419_220503-5f5dff21.log.json) | +| R-50 | ++ two-stage Deformable DETR |50e | 46.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco/deformable_detr_twostage_refine_r50_16x2_50e_coco_20210419_220613-9d28ab72.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco/deformable_detr_twostage_refine_r50_16x2_50e_coco_20210419_220613-9d28ab72.log.json) | + +# NOTE + +1. All models are trained with batch size 32. +2. The performance is unstable. `Deformable DETR` and `iterative bounding box refinement` may fluctuate about 0.3 mAP. `two-stage Deformable DETR` may fluctuate about 0.2 mAP. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/deformable_detr/deformable_detr_r50_16x2_50e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/deformable_detr/deformable_detr_r50_16x2_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0a58d9a241bc45f2ca57f817789894502c010a16 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/deformable_detr/deformable_detr_r50_16x2_50e_coco.py @@ -0,0 +1,172 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', '../_base_/default_runtime.py' +] +model = dict( + type='DeformableDETR', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='ChannelMapper', + in_channels=[512, 1024, 2048], + kernel_size=1, + out_channels=256, + act_cfg=None, + norm_cfg=dict(type='GN', num_groups=32), + num_outs=4), + bbox_head=dict( + type='DeformableDETRHead', + num_query=300, + num_classes=80, + in_channels=2048, + sync_cls_avg_factor=True, + as_two_stage=False, + transformer=dict( + type='DeformableDetrTransformer', + encoder=dict( + type='DetrTransformerEncoder', + num_layers=6, + transformerlayers=dict( + type='BaseTransformerLayer', + attn_cfgs=dict( + type='MultiScaleDeformableAttention', embed_dims=256), + feedforward_channels=1024, + ffn_dropout=0.1, + operation_order=('self_attn', 'norm', 'ffn', 'norm'))), + decoder=dict( + type='DeformableDetrTransformerDecoder', + num_layers=6, + return_intermediate=True, + transformerlayers=dict( + type='DetrTransformerDecoderLayer', + attn_cfgs=[ + dict( + type='MultiheadAttention', + embed_dims=256, + num_heads=8, + dropout=0.1), + dict( + type='MultiScaleDeformableAttention', + embed_dims=256) + ], + feedforward_channels=1024, + ffn_dropout=0.1, + operation_order=('self_attn', 'norm', 'cross_attn', 'norm', + 'ffn', 'norm')))), + positional_encoding=dict( + type='SinePositionalEncoding', + num_feats=128, + normalize=True, + offset=-0.5), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=2.0), + loss_bbox=dict(type='L1Loss', loss_weight=5.0), + loss_iou=dict(type='GIoULoss', loss_weight=2.0)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='HungarianAssigner', + cls_cost=dict(type='FocalLossCost', weight=2.0), + reg_cost=dict(type='BBoxL1Cost', weight=5.0, box_format='xywh'), + iou_cost=dict(type='IoUCost', iou_mode='giou', weight=2.0))), + test_cfg=dict(max_per_img=100)) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +# train_pipeline, NOTE the img_scale and the Pad's size_divisor is different +# from the default setting in mmdet. +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='AutoAugment', + policies=[ + [ + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + keep_ratio=True) + ], + [ + dict( + type='Resize', + # The radio of all image in train dataset < 7 + # follow the original impl + img_scale=[(400, 4200), (500, 4200), (600, 4200)], + multiscale_mode='value', + keep_ratio=True), + dict( + type='RandomCrop', + crop_type='absolute_range', + crop_size=(384, 600), + allow_negative_crop=True), + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + override=True, + keep_ratio=True) + ] + ]), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=1), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +# test_pipeline, NOTE the Pad's size_divisor is different from the default +# setting (size_divisor=32). While there is little effect on the performance +# whether we use the default setting or use size_divisor=1. +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=1), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(filter_empty_gt=False, pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='AdamW', + lr=2e-4, + weight_decay=0.0001, + paramwise_cfg=dict( + custom_keys={ + 'backbone': dict(lr_mult=0.1), + 'sampling_offsets': dict(lr_mult=0.1), + 'reference_points': dict(lr_mult=0.1) + })) +optimizer_config = dict(grad_clip=dict(max_norm=0.1, norm_type=2)) +# learning policy +lr_config = dict(policy='step', step=[40]) +runner = dict(type='EpochBasedRunner', max_epochs=50) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..01f13df4886558366625bc4f3a367cb8a5154462 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco.py @@ -0,0 +1,2 @@ +_base_ = 'deformable_detr_r50_16x2_50e_coco.py' +model = dict(bbox_head=dict(with_box_refine=True)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2aa840d9e961f62307f05e8dde2d8520edef8cad --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco.py @@ -0,0 +1,2 @@ +_base_ = 'deformable_detr_refine_r50_16x2_50e_coco.py' +model = dict(bbox_head=dict(as_two_stage=True)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/deformable_detr/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/deformable_detr/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..a63ffbeb43ad48667dc49d0f8cca008aef3f328b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/deformable_detr/metafile.yml @@ -0,0 +1,51 @@ +Collections: + - Name: Deformable DETR + Metadata: + Training Data: COCO + Training Techniques: + - AdamW + - Multi Scale Train + - Gradient Clip + Training Resources: 8x V100 GPUs + Architecture: + - ResNet + - Transformer + Paper: https://openreview.net/forum?id=gZ9hCDWe6ke + README: configs/deformable_detr/README.md + +Models: + - Name: deformable_detr_r50_16x2_50e_coco + In Collection: Deformable DETR + Config: configs/deformable_detr/deformable_detr_r50_16x2_50e_coco.py + Metadata: + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_r50_16x2_50e_coco/deformable_detr_r50_16x2_50e_coco_20210419_220030-a12b9512.pth + + - Name: deformable_detr_refine_r50_16x2_50e_coco + In Collection: Deformable DETR + Config: configs/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco.py + Metadata: + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_refine_r50_16x2_50e_coco/deformable_detr_refine_r50_16x2_50e_coco_20210419_220503-5f5dff21.pth + + - Name: deformable_detr_twostage_refine_r50_16x2_50e_coco + In Collection: Deformable DETR + Config: configs/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco.py + Metadata: + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/deformable_detr/deformable_detr_twostage_refine_r50_16x2_50e_coco/deformable_detr_twostage_refine_r50_16x2_50e_coco_20210419_220613-9d28ab72.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/README.md new file mode 100644 index 0000000000000000000000000000000000000000..37c151f358aa89573a1ee38703b98d22b81445f5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/README.md @@ -0,0 +1,59 @@ +# DetectoRS + +## Introduction + + + +We provide the config files for [DetectoRS: Detecting Objects with Recursive Feature Pyramid and Switchable Atrous Convolution](https://arxiv.org/pdf/2006.02334.pdf). + +```BibTeX +@article{qiao2020detectors, + title={DetectoRS: Detecting Objects with Recursive Feature Pyramid and Switchable Atrous Convolution}, + author={Qiao, Siyuan and Chen, Liang-Chieh and Yuille, Alan}, + journal={arXiv preprint arXiv:2006.02334}, + year={2020} +} +``` + +## Dataset + +DetectoRS requires COCO and [COCO-stuff](http://calvin.inf.ed.ac.uk/wp-content/uploads/data/cocostuffdataset/stuffthingmaps_trainval2017.zip) dataset for training. You need to download and extract it in the COCO dataset path. +The directory should be like this. + +```none +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── coco +│ │ ├── annotations +│ │ ├── train2017 +│ │ ├── val2017 +│ │ ├── test2017 +| | ├── stuffthingmaps +``` + +## Results and Models + +DetectoRS includes two major components: + +- Recursive Feature Pyramid (RFP). +- Switchable Atrous Convolution (SAC). + +They can be used independently. +Combining them together results in DetectoRS. +The results on COCO 2017 val are shown in the below table. + +| Method | Detector | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:------:|:--------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| RFP | Cascade + ResNet-50 | 1x | 7.5 | - | 44.8 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/cascade_rcnn_r50_rfp_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detectors/cascade_rcnn_r50_rfp_1x_coco/cascade_rcnn_r50_rfp_1x_coco-8cf51bfd.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detectors/cascade_rcnn_r50_rfp_1x_coco/cascade_rcnn_r50_rfp_1x_coco_20200624_104126.log.json) | +| SAC | Cascade + ResNet-50 | 1x | 5.6 | - | 45.0| | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/cascade_rcnn_r50_sac_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detectors/cascade_rcnn_r50_sac_1x_coco/cascade_rcnn_r50_sac_1x_coco-24bfda62.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detectors/cascade_rcnn_r50_sac_1x_coco/cascade_rcnn_r50_sac_1x_coco_20200624_104402.log.json) | +| DetectoRS | Cascade + ResNet-50 | 1x | 9.9 | - | 47.4 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/detectors_cascade_rcnn_r50_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_cascade_rcnn_r50_1x_coco/detectors_cascade_rcnn_r50_1x_coco-32a10ba0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_cascade_rcnn_r50_1x_coco/detectors_cascade_rcnn_r50_1x_coco_20200706_001203.log.json) | +| RFP | HTC + ResNet-50 | 1x | 11.2 | - | 46.6 | 40.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/htc_r50_rfp_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detectors/htc_r50_rfp_1x_coco/htc_r50_rfp_1x_coco-8ff87c51.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detectors/htc_r50_rfp_1x_coco/htc_r50_rfp_1x_coco_20200624_103053.log.json) | +| SAC | HTC + ResNet-50 | 1x | 9.3 | - | 46.4 | 40.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/htc_r50_sac_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detectors/htc_r50_sac_1x_coco/htc_r50_sac_1x_coco-bfa60c54.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detectors/htc_r50_sac_1x_coco/htc_r50_sac_1x_coco_20200624_103111.log.json) | +| DetectoRS | HTC + ResNet-50 | 1x | 13.6 | - | 49.1 | 42.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/detectors_htc_r50_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_htc_r50_1x_coco/detectors_htc_r50_1x_coco-329b1453.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_htc_r50_1x_coco/detectors_htc_r50_1x_coco_20200624_103659.log.json) | +| DetectoRS | HTC + ResNet-101 | 20e | 19.6 | | 50.5 | 43.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detectors/detectors_htc_r101_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_htc_r101_20e_coco/detectors_htc_r101_20e_coco_20210419_203638-348d533b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_htc_r101_20e_coco/detectors_htc_r101_20e_coco_20210419_203638.log.json) | + +*Note*: This is a re-implementation based on MMDetection-V2. +The original implementation is based on MMDetection-V1. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/cascade_rcnn_r50_rfp_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/cascade_rcnn_r50_rfp_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4430d8a677e48f84552eb23403bc874c56bda506 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/cascade_rcnn_r50_rfp_1x_coco.py @@ -0,0 +1,28 @@ +_base_ = [ + '../_base_/models/cascade_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + output_img=True), + neck=dict( + type='RFP', + rfp_steps=2, + aspp_out_channels=64, + aspp_dilations=(1, 3, 6, 1), + rfp_backbone=dict( + rfp_inplanes=256, + type='DetectoRS_ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + conv_cfg=dict(type='ConvAWS'), + pretrained='torchvision://resnet50', + style='pytorch'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/cascade_rcnn_r50_sac_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/cascade_rcnn_r50_sac_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ccd9319b2d1badebf3b891c8e3bdd55a435a4b7c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/cascade_rcnn_r50_sac_1x_coco.py @@ -0,0 +1,12 @@ +_base_ = [ + '../_base_/models/cascade_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/detectors_cascade_rcnn_r50_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/detectors_cascade_rcnn_r50_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f76040434f1ff07608c83202f779dfacfe91c323 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/detectors_cascade_rcnn_r50_1x_coco.py @@ -0,0 +1,32 @@ +_base_ = [ + '../_base_/models/cascade_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + output_img=True), + neck=dict( + type='RFP', + rfp_steps=2, + aspp_out_channels=64, + aspp_dilations=(1, 3, 6, 1), + rfp_backbone=dict( + rfp_inplanes=256, + type='DetectoRS_ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + pretrained='torchvision://resnet50', + style='pytorch'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/detectors_htc_r101_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/detectors_htc_r101_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..93d7d2b1adeb3fbdb7bac0107edf4433669e8015 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/detectors_htc_r101_20e_coco.py @@ -0,0 +1,28 @@ +_base_ = '../htc/htc_r101_fpn_20e_coco.py' + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + output_img=True), + neck=dict( + type='RFP', + rfp_steps=2, + aspp_out_channels=64, + aspp_dilations=(1, 3, 6, 1), + rfp_backbone=dict( + rfp_inplanes=256, + type='DetectoRS_ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + pretrained='torchvision://resnet101', + style='pytorch'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/detectors_htc_r50_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/detectors_htc_r50_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0d2fc4f77fcca715c1dfb613306d214b636aa0c0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/detectors_htc_r50_1x_coco.py @@ -0,0 +1,28 @@ +_base_ = '../htc/htc_r50_fpn_1x_coco.py' + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + output_img=True), + neck=dict( + type='RFP', + rfp_steps=2, + aspp_out_channels=64, + aspp_dilations=(1, 3, 6, 1), + rfp_backbone=dict( + rfp_inplanes=256, + type='DetectoRS_ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + pretrained='torchvision://resnet50', + style='pytorch'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/htc_r50_rfp_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/htc_r50_rfp_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..496104e12550a1985f9c9e3748a343f69d7df6d8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/htc_r50_rfp_1x_coco.py @@ -0,0 +1,24 @@ +_base_ = '../htc/htc_r50_fpn_1x_coco.py' + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + output_img=True), + neck=dict( + type='RFP', + rfp_steps=2, + aspp_out_channels=64, + aspp_dilations=(1, 3, 6, 1), + rfp_backbone=dict( + rfp_inplanes=256, + type='DetectoRS_ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + conv_cfg=dict(type='ConvAWS'), + pretrained='torchvision://resnet50', + style='pytorch'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/htc_r50_sac_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/htc_r50_sac_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..72d4db963ffd95851b945911b3db9941426583ab --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/htc_r50_sac_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../htc/htc_r50_fpn_1x_coco.py' + +model = dict( + backbone=dict( + type='DetectoRS_ResNet', + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..bf503375f4b17887f313cc81c0b074684fa6ae36 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/detectors/metafile.yml @@ -0,0 +1,109 @@ +Collections: + - Name: DetectoRS + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - ASPP + - FPN + - RFP + - RPN + - ResNet + - RoIAlign + - SAC + Paper: https://arxiv.org/abs/2006.02334 + README: configs/detectors/README.md + +Models: + - Name: cascade_rcnn_r50_rfp_1x_coco + In Collection: DetectoRS + Config: configs/detectors/cascade_rcnn_r50_rfp_1x_coco.py + Metadata: + Training Memory (GB): 7.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/detectors/cascade_rcnn_r50_rfp_1x_coco/cascade_rcnn_r50_rfp_1x_coco-8cf51bfd.pth + + - Name: cascade_rcnn_r50_sac_1x_coco + In Collection: DetectoRS + Config: configs/detectors/cascade_rcnn_r50_sac_1x_coco.py + Metadata: + Training Memory (GB): 5.6 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/detectors/cascade_rcnn_r50_sac_1x_coco/cascade_rcnn_r50_sac_1x_coco-24bfda62.pth + + - Name: detectors_cascade_rcnn_r50_1x_coco + In Collection: DetectoRS + Config: configs/detectors/detectors_cascade_rcnn_r50_1x_coco.py + Metadata: + Training Memory (GB): 9.9 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_cascade_rcnn_r50_1x_coco/detectors_cascade_rcnn_r50_1x_coco-32a10ba0.pth + + - Name: htc_r50_rfp_1x_coco + In Collection: DetectoRS + Config: configs/detectors/htc_r50_rfp_1x_coco.py + Metadata: + Training Memory (GB): 11.2 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/detectors/htc_r50_rfp_1x_coco/htc_r50_rfp_1x_coco-8ff87c51.pth + + - Name: htc_r50_sac_1x_coco + In Collection: DetectoRS + Config: configs/detectors/htc_r50_sac_1x_coco.py + Metadata: + Training Memory (GB): 9.3 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/detectors/htc_r50_sac_1x_coco/htc_r50_sac_1x_coco-bfa60c54.pth + + - Name: detectors_htc_r50_1x_coco + In Collection: DetectoRS + Config: configs/detectors/detectors_htc_r50_1x_coco.py + Metadata: + Training Memory (GB): 13.6 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 49.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 42.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/detectors/detectors_htc_r50_1x_coco/detectors_htc_r50_1x_coco-329b1453.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/detr/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/detr/README.md new file mode 100644 index 0000000000000000000000000000000000000000..617a8fbd591081f702ed64aadfd16ef3b2232c37 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/detr/README.md @@ -0,0 +1,27 @@ +# DETR + +## Introduction + + + +We provide the config files for DETR: [End-to-End Object Detection with Transformers](https://arxiv.org/abs/2005.12872). + +```BibTeX +@inproceedings{detr, + author = {Nicolas Carion and + Francisco Massa and + Gabriel Synnaeve and + Nicolas Usunier and + Alexander Kirillov and + Sergey Zagoruyko}, + title = {End-to-End Object Detection with Transformers}, + booktitle = {ECCV}, + year = {2020} +} +``` + +## Results and Models + +| Backbone | Model | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:------:|:--------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | DETR |150e |7.9| | 40.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/detr/detr_r50_8x2_150e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/detr/detr_r50_8x2_150e_coco/detr_r50_8x2_150e_coco_20201130_194835-2c4b8974.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/detr/detr_r50_8x2_150e_coco/detr_r50_8x2_150e_coco_20201130_194835.log.json) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/detr/detr_r50_8x2_150e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/detr/detr_r50_8x2_150e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..892447dec15f3ac0411c5b8d36725b84a40ecfec --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/detr/detr_r50_8x2_150e_coco.py @@ -0,0 +1,150 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', '../_base_/default_runtime.py' +] +model = dict( + type='DETR', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(3, ), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + bbox_head=dict( + type='DETRHead', + num_classes=80, + in_channels=2048, + transformer=dict( + type='Transformer', + encoder=dict( + type='DetrTransformerEncoder', + num_layers=6, + transformerlayers=dict( + type='BaseTransformerLayer', + attn_cfgs=[ + dict( + type='MultiheadAttention', + embed_dims=256, + num_heads=8, + dropout=0.1) + ], + feedforward_channels=2048, + ffn_dropout=0.1, + operation_order=('self_attn', 'norm', 'ffn', 'norm'))), + decoder=dict( + type='DetrTransformerDecoder', + return_intermediate=True, + num_layers=6, + transformerlayers=dict( + type='DetrTransformerDecoderLayer', + attn_cfgs=dict( + type='MultiheadAttention', + embed_dims=256, + num_heads=8, + dropout=0.1), + feedforward_channels=2048, + ffn_dropout=0.1, + operation_order=('self_attn', 'norm', 'cross_attn', 'norm', + 'ffn', 'norm')), + )), + positional_encoding=dict( + type='SinePositionalEncoding', num_feats=128, normalize=True), + loss_cls=dict( + type='CrossEntropyLoss', + bg_cls_weight=0.1, + use_sigmoid=False, + loss_weight=1.0, + class_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=5.0), + loss_iou=dict(type='GIoULoss', loss_weight=2.0)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='HungarianAssigner', + cls_cost=dict(type='ClassificationCost', weight=1.), + reg_cost=dict(type='BBoxL1Cost', weight=5.0, box_format='xywh'), + iou_cost=dict(type='IoUCost', iou_mode='giou', weight=2.0))), + test_cfg=dict(max_per_img=100)) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +# train_pipeline, NOTE the img_scale and the Pad's size_divisor is different +# from the default setting in mmdet. +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='AutoAugment', + policies=[[ + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), (576, 1333), + (608, 1333), (640, 1333), (672, 1333), (704, 1333), + (736, 1333), (768, 1333), (800, 1333)], + multiscale_mode='value', + keep_ratio=True) + ], + [ + dict( + type='Resize', + img_scale=[(400, 1333), (500, 1333), (600, 1333)], + multiscale_mode='value', + keep_ratio=True), + dict( + type='RandomCrop', + crop_type='absolute_range', + crop_size=(384, 600), + allow_negative_crop=True), + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + override=True, + keep_ratio=True) + ]]), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=1), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +# test_pipeline, NOTE the Pad's size_divisor is different from the default +# setting (size_divisor=32). While there is little effect on the performance +# whether we use the default setting or use size_divisor=1. +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=1), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='AdamW', + lr=0.0001, + weight_decay=0.0001, + paramwise_cfg=dict( + custom_keys={'backbone': dict(lr_mult=0.1, decay_mult=1.0)})) +optimizer_config = dict(grad_clip=dict(max_norm=0.1, norm_type=2)) +# learning policy +lr_config = dict(policy='step', step=[100]) +runner = dict(type='EpochBasedRunner', max_epochs=150) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/detr/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/detr/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..4c6a86ab4aa79a31378c09f24a142b93ec34df9f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/detr/metafile.yml @@ -0,0 +1,28 @@ +Collections: + - Name: DETR + Metadata: + Training Data: COCO + Training Techniques: + - AdamW + - Multi Scale Train + - Gradient Clip + Training Resources: 8x V100 GPUs + Architecture: + - ResNet + - Transformer + Paper: https://arxiv.org/abs/2005.12872 + README: configs/detr/README.md + +Models: + - Name: detr_r50_8x2_150e_coco + In Collection: DETR + Config: configs/detr/detr_r50_8x2_150e_coco.py + Metadata: + Training Memory (GB): 7.9 + Epochs: 150 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/detr/detr_r50_8x2_150e_coco/detr_r50_8x2_150e_coco_20201130_194835-2c4b8974.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/double_heads/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/double_heads/README.md new file mode 100644 index 0000000000000000000000000000000000000000..872211d26076ea414dfe2b95b2042e9b4ba7758e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/double_heads/README.md @@ -0,0 +1,22 @@ +# Rethinking Classification and Localization for Object Detection + +## Introduction + + + +```latex +@article{wu2019rethinking, + title={Rethinking Classification and Localization for Object Detection}, + author={Yue Wu and Yinpeng Chen and Lu Yuan and Zicheng Liu and Lijuan Wang and Hongzhi Li and Yun Fu}, + year={2019}, + eprint={1904.06493}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` + +## Results and models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-FPN | pytorch | 1x | 6.8 | 9.5 | 40.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/double_heads/dh_faster_rcnn_r50_fpn_1x_coco/dh_faster_rcnn_r50_fpn_1x_coco_20200130-586b67df.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/double_heads/dh_faster_rcnn_r50_fpn_1x_coco/dh_faster_rcnn_r50_fpn_1x_coco_20200130_220238.log.json) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9b8118b4b633c78120c370f877f47e951c2fdb38 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,23 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + type='DoubleHeadRoIHead', + reg_roi_scale_factor=1.3, + bbox_head=dict( + _delete_=True, + type='DoubleConvFCBBoxHead', + num_convs=4, + num_fcs=2, + in_channels=256, + conv_out_channels=1024, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=2.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=2.0)))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/double_heads/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/double_heads/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..3a2a2fdbd4f539cbbabe2b9991781724e4748d3f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/double_heads/metafile.yml @@ -0,0 +1,36 @@ +Collections: + - Name: Rethinking Classification and Localization for Object Detection + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - RPN + - ResNet + - RoIAlign + Paper: https://arxiv.org/pdf/1904.06493 + README: configs/double_heads/README.md + +Models: + - Name: dh_faster_rcnn_r50_fpn_1x_coco + In Collection: Rethinking Classification and Localization for Object Detection + Config: configs/double_heads/dh_faster_rcnn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.8 + inference time (ms/im): + - value: 105.26 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/double_heads/dh_faster_rcnn_r50_fpn_1x_coco/dh_faster_rcnn_r50_fpn_1x_coco_20200130-586b67df.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/dynamic_rcnn/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/dynamic_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..2b087c3b9284a2cc23d0c5d7031aa652ae9b12a2 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/dynamic_rcnn/README.md @@ -0,0 +1,20 @@ +# Dynamic R-CNN: Towards High Quality Object Detection via Dynamic Training + +## Introduction + + + +``` +@article{DynamicRCNN, + author = {Hongkai Zhang and Hong Chang and Bingpeng Ma and Naiyan Wang and Xilin Chen}, + title = {Dynamic {R-CNN}: Towards High Quality Object Detection via Dynamic Training}, + journal = {arXiv preprint arXiv:2004.06002}, + year = {2020} +} +``` + +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | pytorch | 1x | 3.8 | | 38.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x/dynamic_rcnn_r50_fpn_1x-62a3f276.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x/dynamic_rcnn_r50_fpn_1x_20200618_095048.log.json) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f2deb99e44cba92fd79d0a2cd258ddf6927703c0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,28 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + type='DynamicRoIHead', + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + train_cfg=dict( + rpn_proposal=dict(nms=dict(iou_threshold=0.85)), + rcnn=dict( + dynamic_rcnn=dict( + iou_topk=75, + beta_topk=10, + update_iter_interval=100, + initial_iou=0.4, + initial_beta=1.0))), + test_cfg=dict(rpn=dict(nms=dict(iou_threshold=0.85)))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/dynamic_rcnn/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/dynamic_rcnn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..3297255e7d3a3b9b3d94ff3f06d9287d70c6ae21 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/dynamic_rcnn/metafile.yml @@ -0,0 +1,30 @@ +Collections: + - Name: Dynamic R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Dynamic R-CNN + - FPN + - RPN + - ResNet + - RoIAlign + Paper: https://arxiv.org/pdf/2004.06002 + README: configs/dynamic_rcnn/README.md + +Models: + - Name: dynamic_rcnn_r50_fpn_1x_coco + In Collection: Dynamic R-CNN + Config: configs/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.8 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/dynamic_rcnn/dynamic_rcnn_r50_fpn_1x/dynamic_rcnn_r50_fpn_1x-62a3f276.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/empirical_attention/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/empirical_attention/README.md new file mode 100644 index 0000000000000000000000000000000000000000..f8647bd89812160787f7ad965e42d2cdcaf597a3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/empirical_attention/README.md @@ -0,0 +1,23 @@ +# An Empirical Study of Spatial Attention Mechanisms in Deep Networks + +## Introduction + + + +```latex +@article{zhu2019empirical, + title={An Empirical Study of Spatial Attention Mechanisms in Deep Networks}, + author={Zhu, Xizhou and Cheng, Dazhi and Zhang, Zheng and Lin, Stephen and Dai, Jifeng}, + journal={arXiv preprint arXiv:1904.05873}, + year={2019} +} +``` + +## Results and Models + +| Backbone | Attention Component | DCN | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------------------:|:----:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | 1111 | N | 1x | 8.0 | 13.8 | 40.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco/faster_rcnn_r50_fpn_attention_1111_1x_coco_20200130-403cccba.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco/faster_rcnn_r50_fpn_attention_1111_1x_coco_20200130_210344.log.json) | +| R-50 | 0010 | N | 1x | 4.2 | 18.4 | 39.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco/faster_rcnn_r50_fpn_attention_0010_1x_coco_20200130-7cb0c14d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco/faster_rcnn_r50_fpn_attention_0010_1x_coco_20200130_210125.log.json) | +| R-50 | 1111 | Y | 1x | 8.0 | 12.7 | 42.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco_20200130-8b2523a6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco_20200130_204442.log.json) | +| R-50 | 0010 | Y | 1x | 4.2 | 17.1 | 42.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco_20200130-1a2e831d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco_20200130_210410.log.json) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a544e3ab636aea0efe56007a0ea40608b6e71ad4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='0010', + kv_stride=2), + stages=(False, False, True, True), + position='after_conv2') + ])) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..bbefd27aa02f427e27068b37ecf4d30fbd49b519 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + plugins=[ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='0010', + kv_stride=2), + stages=(False, False, True, True), + position='after_conv2') + ], + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..13a4645bfdb50d5a2f04cee49ecc5f7647d10acf --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='1111', + kv_stride=2), + stages=(False, False, True, True), + position='after_conv2') + ])) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b1f26c081da27811f856fe9973eb444c82604727 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + plugins=[ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='1111', + kv_stride=2), + stages=(False, False, True, True), + position='after_conv2') + ], + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/empirical_attention/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/empirical_attention/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..853b78657e72812ac6fd0e150308805816b56035 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/empirical_attention/metafile.yml @@ -0,0 +1,98 @@ +Collections: + - Name: Empirical Attention + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Deformable Convolution + - FPN + - RPN + - ResNet + - RoIAlign + - Spatial Attention + Paper: https://arxiv.org/pdf/1904.05873 + README: configs/empirical_attention/README.md + +Models: + - Name: faster_rcnn_r50_fpn_attention_1111_1x_coco + In Collection: Empirical Attention + Config: configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco.py + Metadata: + Training Memory (GB): 8.0 + inference time (ms/im): + - value: 72.46 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_1111_1x_coco/faster_rcnn_r50_fpn_attention_1111_1x_coco_20200130-403cccba.pth + + - Name: faster_rcnn_r50_fpn_attention_0010_1x_coco + In Collection: Empirical Attention + Config: configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco.py + Metadata: + Training Memory (GB): 4.2 + inference time (ms/im): + - value: 54.35 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_0010_1x_coco/faster_rcnn_r50_fpn_attention_0010_1x_coco_20200130-7cb0c14d.pth + + - Name: faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco + In Collection: Empirical Attention + Config: configs/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco.py + Metadata: + Training Memory (GB): 8.0 + inference time (ms/im): + - value: 78.74 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco/faster_rcnn_r50_fpn_attention_1111_dcn_1x_coco_20200130-8b2523a6.pth + + - Name: faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco + In Collection: Empirical Attention + Config: configs/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco.py + Metadata: + Training Memory (GB): 4.2 + inference time (ms/im): + - value: 58.48 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/empirical_attention/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco/faster_rcnn_r50_fpn_attention_0010_dcn_1x_coco_20200130-1a2e831d.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fast_rcnn/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/fast_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..3be862a3c8aba383c7cc83fe5d8f5d0d40a6c32e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fast_rcnn/README.md @@ -0,0 +1,16 @@ +# Fast R-CNN + +## Introduction + + + +```latex +@inproceedings{girshick2015fast, + title={Fast r-cnn}, + author={Girshick, Ross}, + booktitle={Proceedings of the IEEE international conference on computer vision}, + year={2015} +} +``` + +## Results and models diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fast_rcnn/fast_rcnn_r101_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fast_rcnn/fast_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0929fd8e5d9fb519ae148324207040baab16c474 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fast_rcnn/fast_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = './fast_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://detectron2/resnet101_caffe', + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fast_rcnn/fast_rcnn_r101_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fast_rcnn/fast_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..83852b24e7c8d23f812733f7b2fd24fc0d0f38f8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fast_rcnn/fast_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './fast_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fast_rcnn/fast_rcnn_r101_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fast_rcnn/fast_rcnn_r101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c22088579ea4a5b2d8e32a8349da63d2dc8b5f7f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fast_rcnn/fast_rcnn_r101_fpn_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './fast_rcnn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fast_rcnn/fast_rcnn_r50_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fast_rcnn/fast_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f1b29ef30c7662d821921851c994d7ea78aeca34 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fast_rcnn/fast_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,48 @@ +_base_ = './fast_rcnn_r50_fpn_1x_coco.py' + +model = dict( + backbone=dict( + norm_cfg=dict(type='BN', requires_grad=False), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) + +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=2000), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=None), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='ToTensor', keys=['proposals']), + dict( + type='ToDataContainer', + fields=[dict(key='proposals', stack=False)]), + dict(type='Collect', keys=['img', 'proposals']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d2f080e9d3b1ddade22341aa38c6258eaee78a50 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,52 @@ +_base_ = [ + '../_base_/models/fast_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=2000), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=None), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='ToTensor', keys=['proposals']), + dict( + type='ToDataContainer', + fields=[dict(key='proposals', stack=False)]), + dict(type='Collect', keys=['img', 'proposals']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + proposal_file=data_root + 'proposals/rpn_r50_fpn_1x_train2017.pkl', + pipeline=train_pipeline), + val=dict( + proposal_file=data_root + 'proposals/rpn_r50_fpn_1x_val2017.pkl', + pipeline=test_pipeline), + test=dict( + proposal_file=data_root + 'proposals/rpn_r50_fpn_1x_val2017.pkl', + pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fast_rcnn/fast_rcnn_r50_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fast_rcnn/fast_rcnn_r50_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..228e85645c1c7d1556810d209679d49abcd86f8f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fast_rcnn/fast_rcnn_r50_fpn_2x_coco.py @@ -0,0 +1,5 @@ +_base_ = './fast_rcnn_r50_fpn_1x_coco.py' + +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..772a6c4f08999985d042b719a98734b3c0e5ffdd --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/README.md @@ -0,0 +1,67 @@ +# Faster R-CNN: Towards Real-Time Object Detection with Region Proposal Networks + +## Introduction + + + +```latex +@article{Ren_2017, + title={Faster R-CNN: Towards Real-Time Object Detection with Region Proposal Networks}, + journal={IEEE Transactions on Pattern Analysis and Machine Intelligence}, + publisher={Institute of Electrical and Electronics Engineers (IEEE)}, + author={Ren, Shaoqing and He, Kaiming and Girshick, Ross and Sun, Jian}, + year={2017}, + month={Jun}, +} +``` + +## Results and models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-DC5 | caffe | 1x | - | - | 37.2 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco/faster_rcnn_r50_caffe_dc5_1x_coco_20201030_151909-531f0f43.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco/faster_rcnn_r50_caffe_dc5_1x_coco_20201030_151909.log.json) | +| R-50-FPN | caffe | 1x | 3.8 | | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco/faster_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.378_20200504_180032-c5925ee5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco/faster_rcnn_r50_caffe_fpn_1x_coco_20200504_180032.log.json) | +| R-50-FPN | pytorch | 1x | 4.0 | 21.4 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130_204655.log.json) | +| R-50-FPN | pytorch | 2x | - | - | 38.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_20200504_210434.log.json) | +| R-101-FPN | caffe | 1x | 5.7 | | 39.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco/faster_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.398_20200504_180057-b269e9dd.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco/faster_rcnn_r101_caffe_fpn_1x_coco_20200504_180057.log.json) | +| R-101-FPN | pytorch | 1x | 6.0 | 15.6 | 39.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_1x_coco/faster_rcnn_r101_fpn_1x_coco_20200130-f513f705.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_1x_coco/faster_rcnn_r101_fpn_1x_coco_20200130_204655.log.json) | +| R-101-FPN | pytorch | 2x | - | - | 39.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r101_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_2x_coco/faster_rcnn_r101_fpn_2x_coco_bbox_mAP-0.398_20200504_210455-1d2dac9c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_2x_coco/faster_rcnn_r101_fpn_2x_coco_20200504_210455.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 7.2 | 13.8 | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco/faster_rcnn_x101_32x4d_fpn_1x_coco_20200203-cff10310.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco/faster_rcnn_x101_32x4d_fpn_1x_coco_20200203_000520.log.json) | +| X-101-32x4d-FPN | pytorch | 2x | - | - | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco/faster_rcnn_x101_32x4d_fpn_2x_coco_bbox_mAP-0.412_20200506_041400-64a12c0b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco/faster_rcnn_x101_32x4d_fpn_2x_coco_20200506_041400.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 10.3 | 9.4 | 42.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco/faster_rcnn_x101_64x4d_fpn_1x_coco_20200204-833ee192.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco/faster_rcnn_x101_64x4d_fpn_1x_coco_20200204_134340.log.json) | +| X-101-64x4d-FPN | pytorch | 2x | - | - | 41.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco/faster_rcnn_x101_64x4d_fpn_2x_coco_20200512_161033-5961fa95.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco/faster_rcnn_x101_64x4d_fpn_2x_coco_20200512_161033.log.json) | + +## Different regression loss + +We trained with R-50-FPN pytorch style backbone for 1x schedule. + +| Backbone | Loss type | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-------: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-FPN | L1Loss | 4.0 | 21.4 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130_204655.log.json) | +| R-50-FPN | IoULoss | | | 37.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_iou_1x_coco-fdd207f3.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_iou_1x_coco_20200506_095954.log.json) | +| R-50-FPN | GIoULoss | | | 37.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_giou_1x_coco-0eada910.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_giou_1x_coco_20200505_161120.log.json) | +| R-50-FPN | BoundedIoULoss | | | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_bounded_iou_1x_coco-98ad993b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_bounded_iou_1x_coco_20200505_160738.log.json) | + +## Pre-trained Models + +We also train some models with longer schedules and multi-scale training. The users could finetune them for downstream tasks. + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| [R-50-DC5](./faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py) | caffe | 1x | - | | 37.4 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco_20201028_233851-b33d21b9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco_20201028_233851.log.json) | +| [R-50-DC5](./faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py) | caffe | 3x | - | | 38.7 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco_20201028_002107-34a53b2c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco_20201028_002107.log.json) | +| [R-50-FPN](./faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py) | caffe | 2x | 3.7 | | 39.7 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco_bbox_mAP-0.397_20200504_231813-10b2de58.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco_20200504_231813.log.json) | +| [R-50-FPN](./faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py) | caffe | 3x | 3.7 | | 39.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco_20210526_095054-1f77628b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco_20210526_095054.log.json) | +| [R-50-FPN](./faster_rcnn_r50_fpn_mstrain_3x_coco.py) | pytorch | 3x | 3.9 | | 40.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco/faster_rcnn_r50_fpn_mstrain_3x_coco_20210524_110822-e10bd31c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco/faster_rcnn_r50_fpn_mstrain_3x_coco_20210524_110822.log.json) | +| [R-101-FPN](./faster_rcnn_r101_caffe_fpn_mstrain_3x_coco.py) | caffe | 3x | 5.6 | | 42.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco_20210526_095742-a7ae426d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco_20210526_095742.log.json) | +| [R-101-FPN](./faster_rcnn_r101_fpn_mstrain_3x_coco.py) | pytorch | 3x | 5.8 | | 41.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco/faster_rcnn_r101_fpn_mstrain_3x_coco_20210524_110822-4d4d2ca8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco/faster_rcnn_r101_fpn_mstrain_3x_coco_20210524_110822.log.json) | +| [X-101-32x4d-FPN](./faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py) | pytorch | 3x | 7.0 | | 42.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco_20210524_124151-16b9b260.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco_20210524_124151.log.json) | +| [X-101-32x8d-FPN](./faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py) | pytorch | 3x | 10.1 | | 42.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco_20210604_182954-002e082a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco_20210604_182954.log.json) | +| [X-101-64x4d-FPN](./faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py) | pytorch | 3x | 10.0 | | 43.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco_20210524_124528-26c63de6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco_20210524_124528.log.json) | + +We further finetune some pre-trained models on the COCO subsets, which only contain only a few of the 80 categories. + +| Backbone | Style | Class name | Pre-traind model | Mem (GB) | box AP | Config | Download | +| ------------------------------------------------------------ | ----- | ------------------ | ------------------------------------------------------------ | -------- | ------ | ------------------------------------------------------------ | ------------------------------------------------------------ | +| [R-50-FPN](./faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py) | caffe | person | [R-50-FPN-Caffe-3x](./faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py) | 3.7 | 55.8 | [config](./faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco-person/faster_rcnn_r50_fpn_1x_coco-person_20201216_175929-d022e227.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco-person/faster_rcnn_r50_fpn_1x_coco-person_20201216_175929.log.json) | +| [R-50-FPN](./faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person-bicycle-car.py) | caffe | person-bicycle-car | [R-50-FPN-Caffe-3x](./faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py) | 3.7 | 44.1 | [config](./faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person-bicycle-car.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco-person-bicycle-car/faster_rcnn_r50_fpn_1x_coco-person-bicycle-car_20201216_173117-6eda6d92.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco-person-bicycle-car/faster_rcnn_r50_fpn_1x_coco-person-bicycle-car_20201216_173117.log.json) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c6f078c771d7b7188a2d66ae73b56206c3e84a95 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './faster_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6a13fe9ff692d18927f9ada0604e675b2cd0bea9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco.py @@ -0,0 +1,49 @@ +_base_ = 'faster_rcnn_r50_fpn_mstrain_3x_coco.py' + +model = dict( + backbone=dict( + depth=101, + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) + +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1de53a6cdfcd64541c2ddf0f4f699b7f8d003029 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r101_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0d41599430ae5ca371969076c6d53706ae92e975 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r101_fpn_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0b498bb687c6d3ac941061584aeba3653df97fe1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco.py @@ -0,0 +1,7 @@ +_base_ = 'faster_rcnn_r50_fpn_mstrain_3x_coco.py' + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_c4_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_c4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..92344a151be9af53659845b51e4ece7f0a7b636f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_c4_1x_coco.py @@ -0,0 +1,39 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_caffe_c4.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ee2010c64a4c24e18b81c0be7e002ea474c57a44 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco.py @@ -0,0 +1,37 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_caffe_dc5.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..14eaef2dffea606027001b69d12d11cb46693e1c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py @@ -0,0 +1,42 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_caffe_dc5.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..403747f127e0f7a301771e53e75bf0e83a1736c9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = './faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py' +# learning policy +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..56c01bdcf55cbbb18b7519a46c9b8ce18797011a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,41 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person-bicycle-car.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person-bicycle-car.py new file mode 100644 index 0000000000000000000000000000000000000000..4f1f376c33a0ad884a8930833c6205339966f82b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person-bicycle-car.py @@ -0,0 +1,9 @@ +_base_ = './faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py' +model = dict(roi_head=dict(bbox_head=dict(num_classes=3))) +classes = ('person', 'bicycle', 'car') +data = dict( + train=dict(classes=classes), + val=dict(classes=classes), + test=dict(classes=classes)) + +load_from = 'https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco_bbox_mAP-0.398_20200504_163323-30042637.pth' # noqa diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py new file mode 100644 index 0000000000000000000000000000000000000000..b5dfb4fe447472b2fabb7d193778dbf2fbf2ce25 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-person.py @@ -0,0 +1,9 @@ +_base_ = './faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py' +model = dict(roi_head=dict(bbox_head=dict(num_classes=1))) +classes = ('person', ) +data = dict( + train=dict(classes=classes), + val=dict(classes=classes), + test=dict(classes=classes)) + +load_from = 'https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco_bbox_mAP-0.398_20200504_163323-30042637.pth' # noqa diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f807a19abce803dd99f82c5d1c4cec502d16253f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py @@ -0,0 +1,46 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..df58973fc009949d37e8a87e4d3ac39e2c313c65 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9eeaaceaf5e7533105f83b736ca7ce454159aedb --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py @@ -0,0 +1,47 @@ +_base_ = 'faster_rcnn_r50_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) + +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_90k_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_90k_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..74dca24f26422967501e7ba31c3f39ca324e031c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_90k_coco.py @@ -0,0 +1,15 @@ +_base_ = 'faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py' + +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[60000, 80000]) + +# Runner type +runner = dict(_delete_=True, type='IterBasedRunner', max_iters=90000) + +checkpoint_config = dict(interval=10000) +evaluation = dict(interval=10000, metric='bbox') diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..009bd93d06b3284c7b31f33f82d636f774e86b74 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e77a7fa8d6b8c1ad7fe293bc932d621464287e0c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_bounded_iou_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_bounded_iou_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..648081f19ca7d3ca9a7362a4a41e514d753ce4e8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_bounded_iou_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + bbox_head=dict( + reg_decoded_bbox=True, + loss_bbox=dict(type='BoundedIoULoss', loss_weight=10.0)))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_giou_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_giou_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5556c4977e221182b013b68fef4b73d1b0605bf3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_giou_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + bbox_head=dict( + reg_decoded_bbox=True, + loss_bbox=dict(type='GIoULoss', loss_weight=10.0)))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_iou_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_iou_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ddf663e4f0e1525490a493674b32b3dc4c781bb2 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_iou_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + bbox_head=dict( + reg_decoded_bbox=True, + loss_bbox=dict(type='IoULoss', loss_weight=10.0)))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..faf8f92437d839eda456187a29827907a5a9532b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco.py @@ -0,0 +1,3 @@ +_base_ = [ + '../common/mstrain_3x_coco.py', '../_base_/models/faster_rcnn_r50_fpn.py' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f897e7c55c8b8f0ef7a5db92f29ef1c2415965db --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict(train_cfg=dict(rcnn=dict(sampler=dict(type='OHEMSampler')))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_soft_nms_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_soft_nms_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..759ae3a7acec07daa75213835f1bc41d5c6de4a5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_r50_fpn_soft_nms_1x_coco.py @@ -0,0 +1,12 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + test_cfg=dict( + rcnn=dict( + score_thr=0.05, + nms=dict(type='soft_nms', iou_threshold=0.5), + max_per_img=100))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3808c9f2870d632feae36e521d0537141b7271d5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e93f5d8173dd4b22c1022dadf5258e455d4b3fd5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './faster_rcnn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f55985d61cec9aff95c78c8e287baad6ba1300d9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py @@ -0,0 +1,16 @@ +_base_ = [ + '../common/mstrain_3x_coco.py', '../_base_/models/faster_rcnn_r50_fpn.py' +] +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a5d5aebbdebb63b89dcac9e8bf4a4e88f5d980d3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py @@ -0,0 +1,62 @@ +_base_ = [ + '../common/mstrain_3x_coco.py', '../_base_/models/faster_rcnn_r50_fpn.py' +] +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=8, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + style='pytorch', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnext101_32x8d'))) + +# ResNeXt-101-32x8d model trained with Caffe2 at FB, +# so the mean and std need to be changed. +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], + std=[57.375, 57.120, 58.395], + to_rgb=False) + +# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)], +# multiscale_mode='range' +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +# Use RepeatDataset to speed up training +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8bf2b65a6a97173e2cb563c8f79c501936a2ee09 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7ea9b2da14da6b86f3497bfc3c56862a5c05730b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './faster_rcnn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..80397f4b18acb094f8f6e132ea21050c75b2de48 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py @@ -0,0 +1,16 @@ +_base_ = [ + '../common/mstrain_3x_coco.py', '../_base_/models/faster_rcnn_r50_fpn.py' +] +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..c314df89366453de072e93032101d1fb613b5776 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/faster_rcnn/metafile.yml @@ -0,0 +1,378 @@ +Collections: + - Name: Faster R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - RPN + - ResNet + - RoIPool + Paper: https://arxiv.org/abs/1506.01497 + README: configs/faster_rcnn/README.md + +Models: + - Name: faster_rcnn_r50_caffe_dc5_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_1x_coco/faster_rcnn_r50_caffe_dc5_1x_coco_20201030_151909-531f0f43.pth + + - Name: faster_rcnn_r50_caffe_fpn_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.8 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco/faster_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.378_20200504_180032-c5925ee5.pth + + - Name: faster_rcnn_r50_fpn_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.0 + inference time (ms/im): + - value: 46.73 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth + + - Name: faster_rcnn_r50_fpn_2x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py + Metadata: + Training Memory (GB): 4.0 + inference time (ms/im): + - value: 46.73 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_2x_coco/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth + + - Name: faster_rcnn_r101_caffe_fpn_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.7 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_caffe_fpn_1x_coco/faster_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.398_20200504_180057-b269e9dd.pth + + - Name: faster_rcnn_r101_fpn_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 64.1 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_1x_coco/faster_rcnn_r101_fpn_1x_coco_20200130-f513f705.pth + + - Name: faster_rcnn_r101_fpn_2x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r101_fpn_2x_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 64.1 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_2x_coco/faster_rcnn_r101_fpn_2x_coco_bbox_mAP-0.398_20200504_210455-1d2dac9c.pth + + - Name: faster_rcnn_x101_32x4d_fpn_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.2 + inference time (ms/im): + - value: 72.46 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco/faster_rcnn_x101_32x4d_fpn_1x_coco_20200203-cff10310.pth + + - Name: faster_rcnn_x101_32x4d_fpn_2x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco.py + Metadata: + Training Memory (GB): 7.2 + inference time (ms/im): + - value: 72.46 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_2x_coco/faster_rcnn_x101_32x4d_fpn_2x_coco_bbox_mAP-0.412_20200506_041400-64a12c0b.pth + + - Name: faster_rcnn_x101_64x4d_fpn_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 10.3 + inference time (ms/im): + - value: 106.38 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_1x_coco/faster_rcnn_x101_64x4d_fpn_1x_coco_20200204-833ee192.pth + + - Name: faster_rcnn_x101_64x4d_fpn_2x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco.py + Metadata: + Training Memory (GB): 10.3 + inference time (ms/im): + - value: 106.38 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_2x_coco/faster_rcnn_x101_64x4d_fpn_2x_coco_20200512_161033-5961fa95.pth + + - Name: faster_rcnn_r50_fpn_iou_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_fpn_iou_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_iou_1x_coco-fdd207f3.pth + + - Name: faster_rcnn_r50_fpn_giou_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_fpn_giou_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_giou_1x_coco-0eada910.pth + + - Name: faster_rcnn_r50_fpn_bounded_iou_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_fpn_bounded_iou_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_bounded_iou_1x_coco-98ad993b.pth + + - Name: faster_rcnn_r50_caffe_dc5_mstrain_1x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco/faster_rcnn_r50_caffe_dc5_mstrain_1x_coco_20201028_233851-b33d21b9.pth + + - Name: faster_rcnn_r50_caffe_dc5_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco/faster_rcnn_r50_caffe_dc5_mstrain_3x_coco_20201028_002107-34a53b2c.pth + + - Name: faster_rcnn_r50_caffe_fpn_mstrain_2x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco.py + Metadata: + Training Memory (GB): 4.3 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco/faster_rcnn_r50_caffe_fpn_mstrain_2x_coco_bbox_mAP-0.397_20200504_231813-10b2de58.pth + + - Name: faster_rcnn_r50_caffe_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 3.7 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco/faster_rcnn_r50_caffe_fpn_mstrain_3x_coco_20210526_095054-1f77628b.pth + + - Name: faster_rcnn_r50_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 3.9 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_mstrain_3x_coco/faster_rcnn_r50_fpn_mstrain_3x_coco_20210524_110822-e10bd31c.pth + + - Name: faster_rcnn_r101_caffe_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 5.6 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco/faster_rcnn_r101_caffe_fpn_mstrain_3x_coco_20210526_095742-a7ae426d.pth + + - Name: faster_rcnn_r101_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 5.8 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r101_fpn_mstrain_3x_coco/faster_rcnn_r101_fpn_mstrain_3x_coco_20210524_110822-4d4d2ca8.pth + + - Name: faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 7.0 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco/faster_rcnn_x101_32x4d_fpn_mstrain_3x_coco_20210524_124151-16b9b260.pth + + - Name: faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 10.1 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco/faster_rcnn_x101_32x8d_fpn_mstrain_3x_coco_20210604_182954-002e082a.pth + + - Name: faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco + In Collection: Faster R-CNN + Config: configs/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 10.0 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco/faster_rcnn_x101_64x4d_fpn_mstrain_3x_coco_20210524_124528-26c63de6.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/README.md new file mode 100644 index 0000000000000000000000000000000000000000..ae5470af3665f0001d6ebc25a0d325925c291e78 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/README.md @@ -0,0 +1,35 @@ +# FCOS: Fully Convolutional One-Stage Object Detection + +## Introduction + + + +```latex +@article{tian2019fcos, + title={FCOS: Fully Convolutional One-Stage Object Detection}, + author={Tian, Zhi and Shen, Chunhua and Chen, Hao and He, Tong}, + journal={arXiv preprint arXiv:1904.01355}, + year={2019} +} +``` + +## Results and Models + +| Backbone | Style | GN | MS train | Tricks | DCN | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:-------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | caffe | Y | N | N | N | 1x | 3.6 | 22.7 | 36.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco/fcos_r50_caffe_fpn_gn-head_1x_coco-821213aa.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco/20201227_180009.log.json) | +| R-50 | caffe | Y | N | Y | N | 1x | 3.7 | - | 38.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco-0a0d75a8.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco/20210105_135818.log.json)| +| R-50 | caffe | Y | N | Y | Y | 1x | 3.8 | - | 42.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco-ae4d8b3d.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco/20210105_224556.log.json)| +| R-101 | caffe | Y | N | N | N | 1x | 5.5 | 17.3 | 39.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco/fcos_r101_caffe_fpn_gn-head_1x_coco-0e37b982.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco/20210103_155046.log.json) | + +| Backbone | Style | GN | MS train | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | caffe | Y | Y | 2x | 2.6 | 22.9 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco-d92ceeea.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco/20201227_161900.log.json) | +| R-101 | caffe | Y | Y | 2x | 5.5 | 17.3 | 40.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco-511424d6.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco/20210103_155046.log.json) | +| X-101 | pytorch | Y | Y | 2x | 10.0 | 9.7 | 42.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco-ede514a8.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco/20210114_133041.log.json) | + +**Notes:** + +- The X-101 backbone is X-101-64x4d. +- Tricks means setting `norm_on_bbox`, `centerness_on_reg`, `center_sampling` as `True`. +- DCN means using `DCNv2` in both backbone and head. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2699bdb979bdf2dce3f4f26946304aa1ed2f4751 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco.py @@ -0,0 +1,54 @@ +_base_ = 'fcos_r50_caffe_fpn_gn-head_1x_coco.py' + +model = dict( + backbone=dict( + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + bbox_head=dict( + norm_on_bbox=True, + centerness_on_reg=True, + dcn_on_last_conv=False, + center_sampling=True, + conv_bias=True, + loss_bbox=dict(type='GIoULoss', loss_weight=1.0)), + # training and testing settings + test_cfg=dict(nms=dict(type='nms', iou_threshold=0.6))) + +# dataset settings +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer_config = dict(_delete_=True, grad_clip=None) + +lr_config = dict(warmup='linear') diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cf93c91e7128c277d1263b680beb108cfadbbc49 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco.py @@ -0,0 +1,56 @@ +_base_ = 'fcos_r50_caffe_fpn_gn-head_1x_coco.py' + +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + bbox_head=dict( + norm_on_bbox=True, + centerness_on_reg=True, + dcn_on_last_conv=True, + center_sampling=True, + conv_bias=True, + loss_bbox=dict(type='GIoULoss', loss_weight=1.0)), + # training and testing settings + test_cfg=dict(nms=dict(type='nms', iou_threshold=0.6))) + +# dataset settings +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer_config = dict(_delete_=True, grad_clip=None) + +lr_config = dict(warmup='linear') diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_center_r50_caffe_fpn_gn-head_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_center_r50_caffe_fpn_gn-head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9f502e7b465f789a90100d96e881c60c84d9bf91 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_center_r50_caffe_fpn_gn-head_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './fcos_r50_caffe_fpn_gn-head_1x_coco.py' +model = dict(bbox_head=dict(center_sampling=True, center_sample_radius=1.5)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..45bea48dc38881d0a0f41ef820723a1ac854c854 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './fcos_r50_caffe_fpn_gn-head_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron/resnet101_caffe'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f4d36f1eeed8fe152c2c4cad702d3736bc56172c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py @@ -0,0 +1,47 @@ +_base_ = './fcos_r50_caffe_fpn_gn-head_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron/resnet101_caffe'))) +img_norm_cfg = dict( + mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..955787bab9413f93908cc4542da89f1bdd31c492 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco.py @@ -0,0 +1,106 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + type='FCOS', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron/resnet50_caffe')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', # use P5 + num_outs=5, + relu_before_extra_convs=True), + bbox_head=dict( + type='FCOSHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='IoULoss', loss_weight=1.0), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100)) +img_norm_cfg = dict( + mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + lr=0.01, paramwise_cfg=dict(bias_lr_mult=2., bias_decay_mult=0.)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='constant', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[8, 11]) +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2816b16f64dbcbfecd779650aaae0ca6cee0d810 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py @@ -0,0 +1,4 @@ +# TODO: Remove this config after benchmarking all related configs +_base_ = 'fcos_r50_caffe_fpn_gn-head_1x_coco.py' + +data = dict(samples_per_gpu=4, workers_per_gpu=4) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..497d03f6f702ecb47cccbe0089089b5a002ebcca --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py @@ -0,0 +1,39 @@ +_base_ = './fcos_r50_caffe_fpn_gn-head_1x_coco.py' +img_norm_cfg = dict( + mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e70e4651230cbf58129b139d30de68c35e9c0e2d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco.py @@ -0,0 +1,60 @@ +_base_ = './fcos_r50_caffe_fpn_gn-head_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + lr=0.01, paramwise_cfg=dict(bias_lr_mult=2., bias_decay_mult=0.)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..88db4d40b2c348b50e2390c257300a6ba935abed --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fcos/metafile.yml @@ -0,0 +1,141 @@ +Collections: + - Name: FCOS + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - Group Normalization + - ResNet + Paper: https://arxiv.org/abs/1904.01355 + README: configs/fcos/README.md + +Models: + - Name: fcos_r50_caffe_fpn_gn-head_1x_coco + In Collection: FCOS + Config: configs/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco.py + Metadata: + Training Memory (GB): 3.6 + inference time (ms/im): + - value: 44.05 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.6 + Weights: https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_gn-head_1x_coco/fcos_r50_caffe_fpn_gn-head_1x_coco-821213aa.pth + + - Name: fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco + In Collection: FCOS + Config: configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco.py + Metadata: + Training Memory (GB): 3.7 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.7 + Weights: https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_1x_coco-0a0d75a8.pth + + - Name: fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco + In Collection: FCOS + Config: configs/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco.py + Metadata: + Training Memory (GB): 3.8 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.3 + Weights: https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/fcos/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco/fcos_center-normbbox-centeronreg-giou_r50_caffe_fpn_gn-head_dcn_1x_coco-ae4d8b3d.pth + + - Name: fcos_r101_caffe_fpn_gn-head_1x_coco + In Collection: FCOS + Config: configs/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco.py + Metadata: + Training Memory (GB): 5.5 + inference time (ms/im): + - value: 57.8 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.1 + Weights: https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_1x_coco/fcos_r101_caffe_fpn_gn-head_1x_coco-0e37b982.pth + + - Name: fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco + In Collection: FCOS + Config: configs/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py + Metadata: + Training Memory (GB): 2.6 + inference time (ms/im): + - value: 43.67 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.5 + Weights: https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/fcos/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco/fcos_r50_caffe_fpn_gn-head_mstrain_640-800_2x_coco-d92ceeea.pth + + - Name: fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco + In Collection: FCOS + Config: configs/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco.py + Metadata: + Training Memory (GB): 5.5 + inference time (ms/im): + - value: 57.8 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.8 + Weights: https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/fcos/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco/fcos_r101_caffe_fpn_gn-head_mstrain_640-800_2x_coco-511424d6.pth + + - Name: fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco + In Collection: FCOS + Config: configs/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco.py + Metadata: + Training Memory (GB): 10.0 + inference time (ms/im): + - value: 103.09 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.6 + Weights: https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/fcos/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco/fcos_x101_64x4d_fpn_gn-head_mstrain_640-800_2x_coco-ede514a8.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b9b5684e07178e3b01646182d98bf37e47444f79 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/README.md @@ -0,0 +1,41 @@ +# FoveaBox: Beyond Anchor-based Object Detector + + + +FoveaBox is an accurate, flexible and completely anchor-free object detection system for object detection framework, as presented in our paper [https://arxiv.org/abs/1904.03797](https://arxiv.org/abs/1904.03797): +Different from previous anchor-based methods, FoveaBox directly learns the object existing possibility and the bounding box coordinates without anchor reference. This is achieved by: (a) predicting category-sensitive semantic maps for the object existing possibility, and (b) producing category-agnostic bounding box for each position that potentially contains an object. + +## Main Results + +### Results on R50/101-FPN + +| Backbone | Style | align | ms-train| Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | pytorch | N | N | 1x | 5.6 | 24.1 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_r50_fpn_4x4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r50_fpn_4x4_1x_coco/fovea_r50_fpn_4x4_1x_coco_20200219-ee4d5303.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r50_fpn_4x4_1x_coco/fovea_r50_fpn_4x4_1x_coco_20200219_223025.log.json) | +| R-50 | pytorch | N | N | 2x | 5.6 | - | 37.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_r50_fpn_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r50_fpn_4x4_2x_coco/fovea_r50_fpn_4x4_2x_coco_20200203-2df792b1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r50_fpn_4x4_2x_coco/fovea_r50_fpn_4x4_2x_coco_20200203_112043.log.json) | +| R-50 | pytorch | Y | N | 2x | 8.1 | 19.4 | 37.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco/fovea_align_r50_fpn_gn-head_4x4_2x_coco_20200203-8987880d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco/fovea_align_r50_fpn_gn-head_4x4_2x_coco_20200203_134252.log.json) | +| R-50 | pytorch | Y | Y | 2x | 8.1 | 18.3 | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco_20200205-85ce26cb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco_20200205_112557.log.json) | +| R-101 | pytorch | N | N | 1x | 9.2 | 17.4 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_r101_fpn_4x4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r101_fpn_4x4_1x_coco/fovea_r101_fpn_4x4_1x_coco_20200219-05e38f1c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r101_fpn_4x4_1x_coco/fovea_r101_fpn_4x4_1x_coco_20200219_011740.log.json) | +| R-101 | pytorch | N | N | 2x | 11.7 | - | 40.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_r101_fpn_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r101_fpn_4x4_2x_coco/fovea_r101_fpn_4x4_2x_coco_20200208-02320ea4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r101_fpn_4x4_2x_coco/fovea_r101_fpn_4x4_2x_coco_20200208_202059.log.json) | +| R-101 | pytorch | Y | N | 2x | 11.7 | 14.7 | 40.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco/fovea_align_r101_fpn_gn-head_4x4_2x_coco_20200208-c39a027a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco/fovea_align_r101_fpn_gn-head_4x4_2x_coco_20200208_203337.log.json) | +| R-101 | pytorch | Y | Y | 2x | 11.7 | 14.7 | 42.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco_20200208-649c5eb6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco_20200208_202124.log.json) | + +[1] *1x and 2x mean the model is trained for 12 and 24 epochs, respectively.* \ +[2] *Align means utilizing deformable convolution to align the cls branch.* \ +[3] *All results are obtained with a single model and without any test time data augmentation.*\ +[4] *We use 4 GPUs for training.* + +Any pull requests or issues are welcome. + +## Citations + +Please consider citing our paper in your publications if the project helps your research. BibTeX reference is as follows. + +```latex +@article{kong2019foveabox, + title={FoveaBox: Beyond Anchor-based Object Detector}, + author={Kong, Tao and Sun, Fuchun and Liu, Huaping and Jiang, Yuning and Shi, Jianbo}, + journal={arXiv preprint arXiv:1904.03797}, + year={2019} +} +``` diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c5d178492d1031f03915e5a8e273f2b4b12a7e97 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco.py @@ -0,0 +1,12 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + bbox_head=dict( + with_deform=True, + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True))) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cc5affefe85150d8e8d372920221e00c27646375 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py @@ -0,0 +1,29 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + bbox_head=dict( + with_deform=True, + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e7265bcdbef2a7ab5e8ba6b3fe13f02cb718b40a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py @@ -0,0 +1,10 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +model = dict( + bbox_head=dict( + with_deform=True, + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True))) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8fc39beaac540a8d3e00bf968f1af08450f9d4cc --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py @@ -0,0 +1,25 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +model = dict( + bbox_head=dict( + with_deform=True, + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_r101_fpn_4x4_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_r101_fpn_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9201af11b88f4c161730f43e957c4d9c53828262 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_r101_fpn_4x4_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_r101_fpn_4x4_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_r101_fpn_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1ef5243f93f5df47d9f1dab318655ea757e6c676 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_r101_fpn_4x4_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './fovea_r50_fpn_4x4_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_r50_fpn_4x4_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_r50_fpn_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7e986ebcd59f0fe59c760739d291a693f9b7a02e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_r50_fpn_4x4_1x_coco.py @@ -0,0 +1,52 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + type='FOVEA', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + num_outs=5, + add_extra_convs='on_input'), + bbox_head=dict( + type='FoveaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + base_edge_list=[16, 32, 64, 128, 256], + scale_ranges=((1, 64), (32, 128), (64, 256), (128, 512), (256, 2048)), + sigma=0.4, + with_deform=False, + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=1.50, + alpha=0.4, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.0)), + # training and testing settings + train_cfg=dict(), + test_cfg=dict( + nms_pre=1000, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100)) +data = dict(samples_per_gpu=4, workers_per_gpu=4) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_r50_fpn_4x4_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_r50_fpn_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..68ce4d250ac673a274d1458963eb02614e4f5f98 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/fovea_r50_fpn_4x4_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './fovea_r50_fpn_4x4_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..f5f892fbab189d63137c3b3d6fd75874b6fe5005 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/foveabox/metafile.yml @@ -0,0 +1,167 @@ +Collections: + - Name: FoveaBox + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 4x V100 GPUs + Architecture: + - FPN + - ResNet + Paper: https://arxiv.org/abs/1904.03797 + README: configs/foveabox/README.md + +Models: + - Name: fovea_r50_fpn_4x4_1x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_r50_fpn_4x4_1x_coco.py + Metadata: + Training Memory (GB): 5.6 + inference time (ms/im): + - value: 41.49 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r50_fpn_4x4_1x_coco/fovea_r50_fpn_4x4_1x_coco_20200219-ee4d5303.pth + + - Name: fovea_r50_fpn_4x4_2x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_r50_fpn_4x4_2x_coco.py + Metadata: + Training Memory (GB): 5.6 + inference time (ms/im): + - value: 41.49 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r50_fpn_4x4_2x_coco/fovea_r50_fpn_4x4_2x_coco_20200203-2df792b1.pth + + - Name: fovea_align_r50_fpn_gn-head_4x4_2x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py + Metadata: + Training Memory (GB): 8.1 + inference time (ms/im): + - value: 51.55 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco/fovea_align_r50_fpn_gn-head_4x4_2x_coco_20200203-8987880d.pth + + - Name: fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py + Metadata: + Training Memory (GB): 8.1 + inference time (ms/im): + - value: 54.64 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco_20200205-85ce26cb.pth + + - Name: fovea_r101_fpn_4x4_1x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_r101_fpn_4x4_1x_coco.py + Metadata: + Training Memory (GB): 9.2 + inference time (ms/im): + - value: 57.47 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r101_fpn_4x4_1x_coco/fovea_r101_fpn_4x4_1x_coco_20200219-05e38f1c.pth + + - Name: fovea_r101_fpn_4x4_2x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_r101_fpn_4x4_2x_coco.py + Metadata: + Training Memory (GB): 11.7 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_r101_fpn_4x4_2x_coco/fovea_r101_fpn_4x4_2x_coco_20200208-02320ea4.pth + + - Name: fovea_align_r101_fpn_gn-head_4x4_2x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco.py + Metadata: + Training Memory (GB): 11.7 + inference time (ms/im): + - value: 68.03 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r101_fpn_gn-head_4x4_2x_coco/fovea_align_r101_fpn_gn-head_4x4_2x_coco_20200208-c39a027a.pth + + - Name: fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco + In Collection: FoveaBox + Config: configs/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py + Metadata: + Training Memory (GB): 11.7 + inference time (ms/im): + - value: 68.03 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco_20200208-649c5eb6.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fp16/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/fp16/README.md new file mode 100644 index 0000000000000000000000000000000000000000..171aa1b95982f55692c9e2ca0b587a6ba9426050 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fp16/README.md @@ -0,0 +1,24 @@ +# Mixed Precision Training + +## Introduction + + + +```latex +@article{micikevicius2017mixed, + title={Mixed precision training}, + author={Micikevicius, Paulius and Narang, Sharan and Alben, Jonah and Diamos, Gregory and Elsen, Erich and Garcia, David and Ginsburg, Boris and Houston, Michael and Kuchaiev, Oleksii and Venkatesh, Ganesh and others}, + journal={arXiv preprint arXiv:1710.03740}, + year={2017} +} +``` + +## Results and Models + +| Architecture | Backbone | Style | Conv | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:------------:|:---------:|:-------:|:------------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| Faster R-CNN | R-50 | pytorch | - | 1x | 3.4 | 28.8 | 37.5 | - |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fp16/faster_rcnn_r50_fpn_fp16_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fp16/faster_rcnn_r50_fpn_fp16_1x_coco/faster_rcnn_r50_fpn_fp16_1x_coco_20200204-d4dc1471.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fp16/faster_rcnn_r50_fpn_fp16_1x_coco/faster_rcnn_r50_fpn_fp16_1x_coco_20200204_143530.log.json) | +| Mask R-CNN | R-50 | pytorch | - | 1x | 3.6 | 24.1 | 38.1 | 34.7 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fp16/mask_rcnn_r50_fpn_fp16_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_1x_coco/mask_rcnn_r50_fpn_fp16_1x_coco_20200205-59faf7e4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_1x_coco/mask_rcnn_r50_fpn_fp16_1x_coco_20200205_130539.log.json) | +| Mask R-CNN | R-50 | pytorch | dconv(c3-c5) | 1x | 3.0 | | 41.9 | 37.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fp16/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco_20210520_180247-c06429d2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco_20210520_180247.log.json) | +| Mask R-CNN | R-50 | pytorch | mdconv(c3-c5)| 1x | 3.1 | | 42.0 | 37.6 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fp16/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco_20210520_180434-cf8fefa5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco_20210520_180434.log.json) | +| Retinanet | R-50 | pytorch | - | 1x | 2.8 | 31.6 | 36.4 | |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fp16/retinanet_r50_fpn_fp16_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fp16/retinanet_r50_fpn_fp16_1x_coco/retinanet_r50_fpn_fp16_1x_coco_20200702-0dbfb212.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fp16/retinanet_r50_fpn_fp16_1x_coco/retinanet_r50_fpn_fp16_1x_coco_20200702_020127.log.json) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fp16/faster_rcnn_r50_fpn_fp16_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fp16/faster_rcnn_r50_fpn_fp16_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..78fa5b6c6a895cb04e1813462ed6a7eefd8c1fa6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fp16/faster_rcnn_r50_fpn_fp16_1x_coco.py @@ -0,0 +1,3 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +# fp16 settings +fp16 = dict(loss_scale=512.) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fp16/mask_rcnn_r50_fpn_fp16_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fp16/mask_rcnn_r50_fpn_fp16_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f506ea815fedd6faefad9a06d7f466b86e8d2622 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fp16/mask_rcnn_r50_fpn_fp16_1x_coco.py @@ -0,0 +1,3 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +# fp16 settings +fp16 = dict(loss_scale=512.) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fp16/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fp16/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ee5cca7d535bc0a3e181f690a46ab42c42f1b9b1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fp16/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) + +fp16 = dict(loss_scale=512.) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fp16/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fp16/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7e21454bd96e4accdf0693d5fc805622f605be7c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fp16/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True))) + +fp16 = dict(loss_scale=512.) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fp16/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/fp16/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..af577c73f1251f5337fc22f17cab2c8946e51fe3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fp16/metafile.yml @@ -0,0 +1,108 @@ +Collections: + - Name: FP16 + Metadata: + Training Data: COCO + Training Techniques: + - Mixed Precision Training + Training Resources: 8x V100 GPUs + Paper: https://arxiv.org/abs/1710.03740 + README: configs/fp16/README.md + +Models: + - Name: faster_rcnn_r50_fpn_fp16_1x_coco + In Collection: FP16 + Config: configs/fp16/faster_rcnn_r50_fpn_fp16_1x_coco.py + Metadata: + Training Memory (GB): 3.4 + inference time (ms/im): + - value: 34.72 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP16 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fp16/faster_rcnn_r50_fpn_fp16_1x_coco/faster_rcnn_r50_fpn_fp16_1x_coco_20200204-d4dc1471.pth + + - Name: mask_rcnn_r50_fpn_fp16_1x_coco + In Collection: FP16 + Config: configs/fp16/mask_rcnn_r50_fpn_fp16_1x_coco.py + Metadata: + Training Memory (GB): 3.6 + inference time (ms/im): + - value: 41.49 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP16 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 34.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_1x_coco/mask_rcnn_r50_fpn_fp16_1x_coco_20200205-59faf7e4.pth + + - Name: mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco + In Collection: FP16 + Config: configs/fp16/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 3.0 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_fp16_dconv_c3-c5_1x_coco_20210520_180247-c06429d2.pth + + - Name: mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco + In Collection: FP16 + Config: configs/fp16/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 3.1 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fp16/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco/mask_rcnn_r50_fpn_fp16_mdconv_c3-c5_1x_coco_20210520_180434-cf8fefa5.pth + + - Name: retinanet_r50_fpn_fp16_1x_coco + In Collection: FP16 + Config: configs/fp16/retinanet_r50_fpn_fp16_1x_coco.py + Metadata: + Training Memory (GB): 2.8 + inference time (ms/im): + - value: 31.65 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP16 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fp16/retinanet_r50_fpn_fp16_1x_coco/retinanet_r50_fpn_fp16_1x_coco_20200702-0dbfb212.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fp16/retinanet_r50_fpn_fp16_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fp16/retinanet_r50_fpn_fp16_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..519c4dbacb1a876dcd973f2a82ddeef98787619d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fp16/retinanet_r50_fpn_fp16_1x_coco.py @@ -0,0 +1,3 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' +# fp16 settings +fp16 = dict(loss_scale=512.) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/README.md new file mode 100644 index 0000000000000000000000000000000000000000..10243e0854b1f9b7cfa2593e582bc4f6b57dc862 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/README.md @@ -0,0 +1,29 @@ +# Feature Pyramid Grids + +## Introduction + +```latex +@article{chen2020feature, + title={Feature pyramid grids}, + author={Chen, Kai and Cao, Yuhang and Loy, Chen Change and Lin, Dahua and Feichtenhofer, Christoph}, + journal={arXiv preprint arXiv:2004.03580}, + year={2020} +} +``` + +## Results and Models + +We benchmark the new training schedule (crop training, large batch, unfrozen BN, 50 epochs) introduced in NAS-FPN. +All backbones are Resnet-50 in pytorch style. + +| Method | Neck | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:------------:|:-----------:|:-------:|:--------:|:--------------:|:------:|:-------:|:-------:|:--------:| +| Faster R-CNN | FPG | 50e | 20.0 | - | 42.2 | - |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fpg/faster_rcnn_r50_fpg_crop640_50e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/fpg/faster_rcnn_r50_fpg_crop640_50e_coco/faster_rcnn_r50_fpg_crop640_50e_coco-76220505.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fpg/faster_rcnn_r50_fpg_crop640_50e_coco/20210218_223520.log.json) | +| Faster R-CNN | FPG-chn128 | 50e | 11.9 | - | 41.2 | - |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco/faster_rcnn_r50_fpg-chn128_crop640_50e_coco-24257de9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco/20210218_221412.log.json) | +| Mask R-CNN | FPG | 50e | 23.2 | - | 42.7 | 37.8 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fpg/mask_rcnn_r50_fpg_crop640_50e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/fpg/mask_rcnn_r50_fpg_crop640_50e_coco/mask_rcnn_r50_fpg_crop640_50e_coco-c5860453.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fpg/mask_rcnn_r50_fpg_crop640_50e_coco/20210222_205447.log.json) | +| Mask R-CNN | FPG-chn128 | 50e | 15.3 | - | 41.7 | 36.9 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco/mask_rcnn_r50_fpg-chn128_crop640_50e_coco-5c6ea10d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco/20210223_025039.log.json) | +| RetinaNet | FPG | 50e | 20.8 | - | 40.5 | - |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fpg/retinanet_r50_fpg_crop640_50e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/fpg/retinanet_r50_fpg_crop640_50e_coco/retinanet_r50_fpg_crop640_50e_coco-46fdd1c6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fpg/retinanet_r50_fpg_crop640_50e_coco/20210225_143957.log.json) | +| RetinaNet | FPG-chn128 | 50e | 19.9 | - | 40.3 | - |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco/retinanet_r50_fpg-chn128_crop640_50e_coco-5cf33c76.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco/20210225_184328.log.json) | + +**Note**: Chn128 means to decrease the number of channels of features and convs from 256 (default) to 128 in +Neck and BBox Head, which can greatly decrease memory consumption without sacrificing much precision. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4535034efa3f4c4a09064a753a2bbde68b6cd2f2 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco.py @@ -0,0 +1,9 @@ +_base_ = 'faster_rcnn_r50_fpg_crop640_50e_coco.py' + +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + neck=dict(out_channels=128, inter_channels=128), + rpn_head=dict(in_channels=128), + roi_head=dict( + bbox_roi_extractor=dict(out_channels=128), + bbox_head=dict(in_channels=128))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/faster_rcnn_r50_fpg_crop640_50e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/faster_rcnn_r50_fpg_crop640_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3ab2a2c5ef04fc38a686065167df62eb3d67266d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/faster_rcnn_r50_fpg_crop640_50e_coco.py @@ -0,0 +1,48 @@ +_base_ = 'faster_rcnn_r50_fpn_crop640_50e_coco.py' + +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + neck=dict( + type='FPG', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + inter_channels=256, + num_outs=5, + stack_times=9, + paths=['bu'] * 9, + same_down_trans=None, + same_up_trans=dict( + type='conv', + kernel_size=3, + stride=2, + padding=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + across_lateral_trans=dict( + type='conv', + kernel_size=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + across_down_trans=dict( + type='interpolation_conv', + mode='nearest', + kernel_size=3, + norm_cfg=norm_cfg, + order=('act', 'conv', 'norm'), + inplace=False), + across_up_trans=None, + across_skip_trans=dict( + type='conv', + kernel_size=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + output_trans=dict( + type='last_conv', + kernel_size=3, + order=('act', 'conv', 'norm'), + inplace=False), + norm_cfg=norm_cfg, + skip_inds=[(0, 1, 2, 3), (0, 1, 2), (0, 1), (0, ), ()])) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/faster_rcnn_r50_fpn_crop640_50e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/faster_rcnn_r50_fpn_crop640_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..95f4e91f203bad8367942fc24b838da9fbf62947 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/faster_rcnn_r50_fpn_crop640_50e_coco.py @@ -0,0 +1,68 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + backbone=dict(norm_cfg=norm_cfg, norm_eval=False), + neck=dict(norm_cfg=norm_cfg), + roi_head=dict(bbox_head=dict(norm_cfg=norm_cfg))) +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=(640, 640), + ratio_range=(0.8, 1.2), + keep_ratio=True), + dict(type='RandomCrop', crop_size=(640, 640)), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size=(640, 640)), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(640, 640), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=4, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +optimizer = dict( + type='SGD', + lr=0.08, + momentum=0.9, + weight_decay=0.0001, + paramwise_cfg=dict(norm_decay_mult=0, bypass_duplicate=True)) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.1, + step=[30, 40]) +# runtime settings +runner = dict(max_epochs=50) +evaluation = dict(interval=2) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..baa4a5affc9b3ead0080d993b14f0d00392c2de5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco.py @@ -0,0 +1,10 @@ +_base_ = 'mask_rcnn_r50_fpg_crop640_50e_coco.py' + +model = dict( + neck=dict(out_channels=128, inter_channels=128), + rpn_head=dict(in_channels=128), + roi_head=dict( + bbox_roi_extractor=dict(out_channels=128), + bbox_head=dict(in_channels=128), + mask_roi_extractor=dict(out_channels=128), + mask_head=dict(in_channels=128))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/mask_rcnn_r50_fpg_crop640_50e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/mask_rcnn_r50_fpg_crop640_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3c9ea27617c85c54309ac454fff253a6d0462735 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/mask_rcnn_r50_fpg_crop640_50e_coco.py @@ -0,0 +1,48 @@ +_base_ = 'mask_rcnn_r50_fpn_crop640_50e_coco.py' + +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + neck=dict( + type='FPG', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + inter_channels=256, + num_outs=5, + stack_times=9, + paths=['bu'] * 9, + same_down_trans=None, + same_up_trans=dict( + type='conv', + kernel_size=3, + stride=2, + padding=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + across_lateral_trans=dict( + type='conv', + kernel_size=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + across_down_trans=dict( + type='interpolation_conv', + mode='nearest', + kernel_size=3, + norm_cfg=norm_cfg, + order=('act', 'conv', 'norm'), + inplace=False), + across_up_trans=None, + across_skip_trans=dict( + type='conv', + kernel_size=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + output_trans=dict( + type='last_conv', + kernel_size=3, + order=('act', 'conv', 'norm'), + inplace=False), + norm_cfg=norm_cfg, + skip_inds=[(0, 1, 2, 3), (0, 1, 2), (0, 1), (0, ), ()])) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/mask_rcnn_r50_fpn_crop640_50e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/mask_rcnn_r50_fpn_crop640_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8dfdbb44a49bf75e7460c49285046b7f38cdfc75 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/mask_rcnn_r50_fpn_crop640_50e_coco.py @@ -0,0 +1,74 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + backbone=dict(norm_cfg=norm_cfg, norm_eval=False), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + norm_cfg=norm_cfg, + num_outs=5), + roi_head=dict( + bbox_head=dict(norm_cfg=norm_cfg), mask_head=dict(norm_cfg=norm_cfg))) +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=(640, 640), + ratio_range=(0.8, 1.2), + keep_ratio=True), + dict(type='RandomCrop', crop_size=(640, 640)), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size=(640, 640)), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(640, 640), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=4, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +optimizer = dict( + type='SGD', + lr=0.08, + momentum=0.9, + weight_decay=0.0001, + paramwise_cfg=dict(norm_decay_mult=0, bypass_duplicate=True)) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.1, + step=[30, 40]) +# runtime settings +runner = dict(max_epochs=50) +evaluation = dict(interval=2) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..31e80a71a49c7ca163f1ee62b997f92ffe24f96e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/metafile.yml @@ -0,0 +1,99 @@ +Collections: + - Name: Feature Pyramid Grids + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Feature Pyramid Grids + Paper: https://arxiv.org/abs/2004.03580 + README: configs/fpg/README.md + +Models: + - Name: faster_rcnn_r50_fpg_crop640_50e_coco + In Collection: Feature Pyramid Grids + Config: configs/fpg/faster_rcnn_r50_fpg_crop640_50e_coco.py + Metadata: + Training Memory (GB): 20.0 + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fpg/faster_rcnn_r50_fpg_crop640_50e_coco/faster_rcnn_r50_fpg_crop640_50e_coco-76220505.pth + + - Name: faster_rcnn_r50_fpg-chn128_crop640_50e_coco + In Collection: Feature Pyramid Grids + Config: configs/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco.py + Metadata: + Training Memory (GB): 11.9 + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fpg/faster_rcnn_r50_fpg-chn128_crop640_50e_coco/faster_rcnn_r50_fpg-chn128_crop640_50e_coco-24257de9.pth + + - Name: mask_rcnn_r50_fpg_crop640_50e_coco + In Collection: Feature Pyramid Grids + Config: configs/fpg/mask_rcnn_r50_fpg_crop640_50e_coco.py + Metadata: + Training Memory (GB): 23.2 + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fpg/mask_rcnn_r50_fpg_crop640_50e_coco/mask_rcnn_r50_fpg_crop640_50e_coco-c5860453.pth + + - Name: mask_rcnn_r50_fpg-chn128_crop640_50e_coco + In Collection: Feature Pyramid Grids + Config: configs/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco.py + Metadata: + Training Memory (GB): 15.3 + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fpg/mask_rcnn_r50_fpg-chn128_crop640_50e_coco/mask_rcnn_r50_fpg-chn128_crop640_50e_coco-5c6ea10d.pth + + - Name: retinanet_r50_fpg_crop640_50e_coco + In Collection: Feature Pyramid Grids + Config: configs/fpg/retinanet_r50_fpg_crop640_50e_coco.py + Metadata: + Training Memory (GB): 20.8 + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fpg/retinanet_r50_fpg_crop640_50e_coco/retinanet_r50_fpg_crop640_50e_coco-46fdd1c6.pth + + - Name: retinanet_r50_fpg-chn128_crop640_50e_coco + In Collection: Feature Pyramid Grids + Config: configs/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco.py + Metadata: + Training Memory (GB): 19.9 + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco/retinanet_r50_fpg-chn128_crop640_50e_coco-5cf33c76.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9a6cf7e56a4f23a42d3905560a9b8035d6d935ff --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/retinanet_r50_fpg-chn128_crop640_50e_coco.py @@ -0,0 +1,5 @@ +_base_ = 'retinanet_r50_fpg_crop640_50e_coco.py' + +model = dict( + neck=dict(out_channels=128, inter_channels=128), + bbox_head=dict(in_channels=128)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/retinanet_r50_fpg_crop640_50e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/retinanet_r50_fpg_crop640_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..504ed5ec5040559b3d10f7caf8a970005a1a92d7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fpg/retinanet_r50_fpg_crop640_50e_coco.py @@ -0,0 +1,53 @@ +_base_ = '../nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py' + +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + neck=dict( + _delete_=True, + type='FPG', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + inter_channels=256, + num_outs=5, + add_extra_convs=True, + start_level=1, + stack_times=9, + paths=['bu'] * 9, + same_down_trans=None, + same_up_trans=dict( + type='conv', + kernel_size=3, + stride=2, + padding=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + across_lateral_trans=dict( + type='conv', + kernel_size=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + across_down_trans=dict( + type='interpolation_conv', + mode='nearest', + kernel_size=3, + norm_cfg=norm_cfg, + order=('act', 'conv', 'norm'), + inplace=False), + across_up_trans=None, + across_skip_trans=dict( + type='conv', + kernel_size=1, + norm_cfg=norm_cfg, + inplace=False, + order=('act', 'conv', 'norm')), + output_trans=dict( + type='last_conv', + kernel_size=3, + order=('act', 'conv', 'norm'), + inplace=False), + norm_cfg=norm_cfg, + skip_inds=[(0, 1, 2, 3), (0, 1, 2), (0, 1), (0, ), ()])) + +evaluation = dict(interval=2) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/free_anchor/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/free_anchor/README.md new file mode 100644 index 0000000000000000000000000000000000000000..5014bc07e64a490dc0b7aa870511168e5c92e5c7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/free_anchor/README.md @@ -0,0 +1,27 @@ +# FreeAnchor: Learning to Match Anchors for Visual Object Detection + +## Introduction + + + +```latex +@inproceedings{zhang2019freeanchor, + title = {{FreeAnchor}: Learning to Match Anchors for Visual Object Detection}, + author = {Zhang, Xiaosong and Wan, Fang and Liu, Chang and Ji, Rongrong and Ye, Qixiang}, + booktitle = {Neural Information Processing Systems}, + year = {2019} +} +``` + +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:--------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | pytorch | 1x | 4.9 | 18.4 | 38.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco/retinanet_free_anchor_r50_fpn_1x_coco_20200130-0f67375f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco/retinanet_free_anchor_r50_fpn_1x_coco_20200130_095625.log.json) | +| R-101 | pytorch | 1x | 6.8 | 14.9 | 40.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco/retinanet_free_anchor_r101_fpn_1x_coco_20200130-358324e6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco/retinanet_free_anchor_r101_fpn_1x_coco_20200130_100723.log.json) | +| X-101-32x4d | pytorch | 1x | 8.1 | 11.1 | 41.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco/retinanet_free_anchor_x101_32x4d_fpn_1x_coco_20200130-d4846968.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco/retinanet_free_anchor_x101_32x4d_fpn_1x_coco_20200130_095627.log.json) | + +**Notes:** + +- We use 8 GPUs with 2 images/GPU. +- For more settings and models, please refer to the [official repo](https://github.com/zhangxiaosong18/FreeAnchor). diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/free_anchor/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/free_anchor/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..7e78cf4e80c617b04bd3a654119a334b1915cc7d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/free_anchor/metafile.yml @@ -0,0 +1,74 @@ +Collections: + - Name: FreeAnchor + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FreeAnchor + - ResNet + Paper: https://arxiv.org/abs/1909.02466 + README: configs/free_anchor/README.md + +Models: + - Name: retinanet_free_anchor_r50_fpn_1x_coco + In Collection: FreeAnchor + Config: configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.9 + inference time (ms/im): + - value: 54.35 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco/retinanet_free_anchor_r50_fpn_1x_coco_20200130-0f67375f.pth + + - Name: retinanet_free_anchor_r101_fpn_1x_coco + In Collection: FreeAnchor + Config: configs/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.8 + inference time (ms/im): + - value: 67.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco/retinanet_free_anchor_r101_fpn_1x_coco_20200130-358324e6.pth + + - Name: retinanet_free_anchor_x101_32x4d_fpn_1x_coco + In Collection: FreeAnchor + Config: configs/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 8.1 + inference time (ms/im): + - value: 90.09 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco/retinanet_free_anchor_x101_32x4d_fpn_1x_coco_20200130-d4846968.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f4aea53cc39f4fd441ae9c9f3a6f541b2fa36929 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/free_anchor/retinanet_free_anchor_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './retinanet_free_anchor_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..28f983c29edd071b32a50f18ac7b3f5c1bfdda88 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py @@ -0,0 +1,22 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' +model = dict( + bbox_head=dict( + _delete_=True, + type='FreeAnchorRetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_bbox=dict(type='SmoothL1Loss', beta=0.11, loss_weight=0.75))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..65f8a9e2a4d221732dcf55a4a4d4b07041271668 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/free_anchor/retinanet_free_anchor_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './retinanet_free_anchor_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fsaf/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/fsaf/README.md new file mode 100644 index 0000000000000000000000000000000000000000..4d64392dbb67b2636a51be536eb818460536c07e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fsaf/README.md @@ -0,0 +1,45 @@ +# Feature Selective Anchor-Free Module for Single-Shot Object Detection + + + +FSAF is an anchor-free method published in CVPR2019 ([https://arxiv.org/pdf/1903.00621.pdf](https://arxiv.org/pdf/1903.00621.pdf)). +Actually it is equivalent to the anchor-based method with only one anchor at each feature map position in each FPN level. +And this is how we implemented it. +Only the anchor-free branch is released for its better compatibility with the current framework and less computational budget. + +In the original paper, feature maps within the central 0.2-0.5 area of a gt box are tagged as ignored. However, +it is empirically found that a hard threshold (0.2-0.2) gives a further gain on the performance. (see the table below) + +## Main Results + +### Results on R50/R101/X101-FPN + +| Backbone | ignore range | ms-train| Lr schd |Train Mem (GB)| Train time (s/iter) | Inf time (fps) | box AP | Config | Download | +|:----------:| :-------: |:-------:|:-------:|:------------:|:---------------:|:--------------:|:-------------:|:------:|:--------:| +| R-50 | 0.2-0.5 | N | 1x | 3.15 | 0.43 | 12.3 | 36.0 (35.9) | | [model](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_pscale0.2_nscale0.5_r50_fpn_1x_coco/fsaf_pscale0.2_nscale0.5_r50_fpn_1x_coco_20200715-b555b0e0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_pscale0.2_nscale0.5_r50_fpn_1x_coco/fsaf_pscale0.2_nscale0.5_r50_fpn_1x_coco_20200715_094657.log.json) | +| R-50 | 0.2-0.2 | N | 1x | 3.15 | 0.43 | 13.0 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fsaf/fsaf_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_r50_fpn_1x_coco/fsaf_r50_fpn_1x_coco-94ccc51f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_r50_fpn_1x_coco/fsaf_r50_fpn_1x_coco_20200428_072327.log.json)| +| R-101 | 0.2-0.2 | N | 1x | 5.08 | 0.58 | 10.8 | 39.3 (37.9) | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fsaf/fsaf_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_r101_fpn_1x_coco/fsaf_r101_fpn_1x_coco-9e71098f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_r101_fpn_1x_coco/fsaf_r101_fpn_1x_coco_20200428_160348.log.json)| +| X-101 | 0.2-0.2 | N | 1x | 9.38 | 1.23 | 5.6 | 42.4 (41.0) | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/fsaf/fsaf_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_x101_64x4d_fpn_1x_coco/fsaf_x101_64x4d_fpn_1x_coco-e3f6e6fd.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_x101_64x4d_fpn_1x_coco/fsaf_x101_64x4d_fpn_1x_coco_20200428_160424.log.json)| + +**Notes:** + +- *1x means the model is trained for 12 epochs.* +- *AP values in the brackets represent those reported in the original paper.* +- *All results are obtained with a single model and single-scale test.* +- *X-101 backbone represents ResNext-101-64x4d.* +- *All pretrained backbones use pytorch style.* +- *All models are trained on 8 Titan-XP gpus and tested on a single gpu.* + +## Citations + +BibTeX reference is as follows. + +```latex +@inproceedings{zhu2019feature, + title={Feature Selective Anchor-Free Module for Single-Shot Object Detection}, + author={Zhu, Chenchen and He, Yihui and Savvides, Marios}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, + pages={840--849}, + year={2019} +} +``` diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fsaf/fsaf_r101_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fsaf/fsaf_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..12b49fed5b6cd617aa9c05d76ed737d755992a34 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fsaf/fsaf_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './fsaf_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fsaf/fsaf_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fsaf/fsaf_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..67f3ec1c4c16fb9bd041dbb3a24d269a83145f26 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fsaf/fsaf_r50_fpn_1x_coco.py @@ -0,0 +1,48 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' +# model settings +model = dict( + type='FSAF', + bbox_head=dict( + type='FSAFHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + reg_decoded_bbox=True, + # Only anchor-free branch is implemented. The anchor generator only + # generates 1 anchor at each feature point, as a substitute of the + # grid of features. + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=1, + scales_per_octave=1, + ratios=[1.0], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict(_delete_=True, type='TBLRBBoxCoder', normalizer=4.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0, + reduction='none'), + loss_bbox=dict( + _delete_=True, + type='IoULoss', + eps=1e-6, + loss_weight=1.0, + reduction='none')), + # training and testing settings + train_cfg=dict( + assigner=dict( + _delete_=True, + type='CenterRegionAssigner', + pos_scale=0.2, + neg_scale=0.2, + min_pos_iof=0.01), + allowed_border=-1, + pos_weight=-1, + debug=False)) +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=10, norm_type=2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fsaf/fsaf_x101_64x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/fsaf/fsaf_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..89c0c6344aba6e6eae5657eff60745645dd1e8dc --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fsaf/fsaf_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './fsaf_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/fsaf/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/fsaf/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..012e891b0d9129f39feef2329691cf6cdda88c50 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/fsaf/metafile.yml @@ -0,0 +1,75 @@ +Collections: + - Name: FSAF + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x Titan-XP GPUs + Architecture: + - FPN + - FSAF + - ResNet + Paper: https://arxiv.org/abs/1903.00621 + README: configs/fsaf/README.md + +Models: + - Name: fsaf_r50_fpn_1x_coco + In Collection: FSAF + Config: configs/fsaf/fsaf_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.15 + inference time (ms/im): + - value: 76.92 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_r50_fpn_1x_coco/fsaf_r50_fpn_1x_coco-94ccc51f.pth + + - Name: fsaf_r101_fpn_1x_coco + In Collection: FSAF + Config: configs/fsaf/fsaf_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.08 + inference time (ms/im): + - value: 92.59 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.3 (37.9) + Weights: https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_r101_fpn_1x_coco/fsaf_r101_fpn_1x_coco-9e71098f.pth + + - Name: fsaf_x101_64x4d_fpn_1x_coco + In Collection: FSAF + Config: configs/fsaf/fsaf_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 9.38 + inference time (ms/im): + - value: 178.57 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.4 (41.0) + Weights: https://download.openmmlab.com/mmdetection/v2.0/fsaf/fsaf_x101_64x4d_fpn_1x_coco/fsaf_x101_64x4d_fpn_1x_coco-e3f6e6fd.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..0ee6a947f4675573a854038fd01121dddf2052df --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/README.md @@ -0,0 +1,59 @@ +# GCNet for Object Detection + +By [Yue Cao](http://yue-cao.me), [Jiarui Xu](http://jerryxu.net), [Stephen Lin](https://scholar.google.com/citations?user=c3PYmxUAAAAJ&hl=en), Fangyun Wei, [Han Hu](https://sites.google.com/site/hanhushomepage/). + +We provide config files to reproduce the results in the paper for +["GCNet: Non-local Networks Meet Squeeze-Excitation Networks and Beyond"](https://arxiv.org/abs/1904.11492) on COCO object detection. + +## Introduction + + + +**GCNet** is initially described in [arxiv](https://arxiv.org/abs/1904.11492). Via absorbing advantages of Non-Local Networks (NLNet) and Squeeze-Excitation Networks (SENet), GCNet provides a simple, fast and effective approach for global context modeling, which generally outperforms both NLNet and SENet on major benchmarks for various recognition tasks. + +## Citing GCNet + +```latex +@article{cao2019GCNet, + title={GCNet: Non-local Networks Meet Squeeze-Excitation Networks and Beyond}, + author={Cao, Yue and Xu, Jiarui and Lin, Stephen and Wei, Fangyun and Hu, Han}, + journal={arXiv preprint arXiv:1904.11492}, + year={2019} +} +``` + +## Results and models + +The results on COCO 2017val are shown in the below table. + +| Backbone | Model | Context | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------: | :--------------: | :------------: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | Mask | GC(c3-c5, r16) | 1x | 5.0 | | 39.7 | 35.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco_20200515_211915-187da160.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco_20200515_211915.log.json) | +| R-50-FPN | Mask | GC(c3-c5, r4) | 1x | 5.1 | 15.0 | 39.9 | 36.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco_20200204-17235656.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco_20200204_024626.log.json) | +| R-101-FPN | Mask | GC(c3-c5, r16) | 1x | 7.6 | 11.4 | 41.3 | 37.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco_20200205-e58ae947.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco_20200205_192835.log.json) | +| R-101-FPN | Mask | GC(c3-c5, r4) | 1x | 7.8 | 11.6 | 42.2 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco_20200206-af22dc9d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco_20200206_112128.log.json) | + +| Backbone | Model | Context | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------: | :--------------: | :------------: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :-------: | +| R-50-FPN | Mask | - | 1x | 4.4 | 16.6 | 38.4 | 34.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco_20200202-bb3eb55c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco_20200202_214122.log.json) | +| R-50-FPN | Mask | GC(c3-c5, r16) | 1x | 5.0 | 15.5 | 40.4 | 36.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200202-587b99aa.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200202_174907.log.json) | +| R-50-FPN | Mask | GC(c3-c5, r4) | 1x | 5.1 | 15.1 | 40.7 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200202-50b90e5c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200202_085547.log.json) | +| R-101-FPN | Mask | - | 1x | 6.4 | 13.3 | 40.5 | 36.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco_20200210-81658c8a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco_20200210_220422.log.json) | +| R-101-FPN | Mask | GC(c3-c5, r16) | 1x | 7.6 | 12.0 | 42.2 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200207-945e77ca.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200207_015330.log.json) | +| R-101-FPN | Mask | GC(c3-c5, r4) | 1x | 7.8 | 11.8 | 42.2 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200206-8407a3f0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200206_142508.log.json) | +| X-101-FPN | Mask | - | 1x | 7.6 | 11.3 | 42.4 | 37.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco_20200211-7584841c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco_20200211_054326.log.json) | +| X-101-FPN | Mask | GC(c3-c5, r16) | 1x | 8.8 | 9.8 | 43.5 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200211-cbed3d2c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200211_164715.log.json) | +| X-101-FPN | Mask | GC(c3-c5, r4) | 1x | 9.0 | 9.7 | 43.9 | 39.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200212-68164964.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200212_070942.log.json) | +| X-101-FPN | Cascade Mask | - | 1x | 9.2 | 8.4 | 44.7 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco_20200310-d5ad2a5e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco_20200310_115217.log.json) | +| X-101-FPN | Cascade Mask | GC(c3-c5, r16) | 1x | 10.3 | 7.7 | 46.2 | 39.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200211-10bf2463.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200211_184154.log.json) | +| X-101-FPN | Cascade Mask | GC(c3-c5, r4) | 1x | 10.6 | | 46.4 | 40.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200703_180653-ed035291.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200703_180653.log.json) | +| X-101-FPN | DCN Cascade Mask | - | 1x | | | 47.5 | 40.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco_20210615_211019-abbc39ea.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco_20210615_211019.log.json)| +| X-101-FPN | DCN Cascade Mask | GC(c3-c5, r16) | 1x | | | 48.0 | 41.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco_20210615_215648-44aa598a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco_20210615_215648.log.json) | +| X-101-FPN | DCN Cascade Mask | GC(c3-c5, r4) | 1x | | | 47.9 | 41.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco_20210615_161851-720338ec.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco_20210615_161851.log.json) | + +**Notes:** + +- The `SyncBN` is added in the backbone for all models in **Table 2**. +- `GC` denotes Global Context (GC) block is inserted after 1x1 conv of backbone. +- `DCN` denotes replace 3x3 conv with 3x3 Deformable Convolution in `c3-c5` stages of backbone. +- `r4` and `r16` denote ratio 4 and ratio 16 in GC block respectively. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5118895f00345a42fdbc6d2edba084ccd3f1a3c8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), norm_eval=False)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..413499dd6d3fe88e91e357a62461f47f037fcedf --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = '../dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), norm_eval=False)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..50689aadf6cab9414aab1a7a9e72ef8231355e4f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..13672312a8f5c57c5799ca6df4d52fed103287b4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..50883ffeb16369ea6210f2ece8fc2d7e084b0134 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..31fdd070595ac0512a39075bb045dd18035d3f14 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ad6ad47696e6aeb2b3505abab0bd2d49d3b7aa83 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..29f91674c6d54bfa6fdcfcb5b7e2ec2a2bbf81fa --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6e1c5d0cadfb9fb3a4f8645e28a8e67fc499e900 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), norm_eval=False)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..781dba78d68e77fa7eee15f5bbcc539731f8378d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..32972de857b3c4f43170dcd3e7fbce76425f094d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d299b69f576a2547de1f7d9edd171d56ab002d0a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5ac908e60c1f964bdd6c3e61933a37c04d487bfb --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict(plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0308a567c147413688c9da679d06f93b0e154d88 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), norm_eval=False)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e04780c50f96929997c279b23fe5fa427657039b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..980f8191d4c07eb35e338bd87e3b73b06b3214ad --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f0c96e58b6131f2958f28c56b9d8384d5b4746f7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = '../mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), norm_eval=False)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7fb8e82ece225ab6f88f1f4f83bea56a42cf1a57 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b1ddbee3b4b79e79bb2a3faf30604f2465612728 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = '../mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + plugins=[ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 4), + stages=(False, True, True, True), + position='after_conv3') + ])) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..5a15a9e5ae04b33a383374617817cf36ceb74eab --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gcnet/metafile.yml @@ -0,0 +1,435 @@ +Collections: + - Name: GCNet + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Global Context Block + - FPN + - RPN + - ResNet + - ResNeXt + Paper: https://arxiv.org/abs/1904.11492 + README: configs/gcnet/README.md + +Models: + - Name: mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 5.0 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 35.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_r16_gcb_c3-c5_1x_coco_20200515_211915-187da160.pth + + - Name: mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 5.1 + inference time (ms/im): + - value: 66.67 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_r4_gcb_c3-c5_1x_coco_20200204-17235656.pth + + - Name: mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 7.6 + inference time (ms/im): + - value: 87.72 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_r16_gcb_c3-c5_1x_coco_20200205-e58ae947.pth + + - Name: mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 7.8 + inference time (ms/im): + - value: 86.21 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_r4_gcb_c3-c5_1x_coco_20200206-af22dc9d.pth + + - Name: mask_rcnn_r50_fpn_syncbn-backbone_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco.py + Metadata: + Training Memory (GB): 4.4 + inference time (ms/im): + - value: 60.24 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 34.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_1x_coco_20200202-bb3eb55c.pth + + - Name: mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 5.0 + inference time (ms/im): + - value: 64.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200202-587b99aa.pth + + - Name: mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 5.1 + inference time (ms/im): + - value: 66.23 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200202-50b90e5c.pth + + - Name: mask_rcnn_r101_fpn_syncbn-backbone_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco.py + Metadata: + Training Memory (GB): 6.4 + inference time (ms/im): + - value: 75.19 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_1x_coco_20200210-81658c8a.pth + + - Name: mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 7.6 + inference time (ms/im): + - value: 83.33 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200207-945e77ca.pth + + - Name: mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 7.8 + inference time (ms/im): + - value: 84.75 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200206-8407a3f0.pth + + - Name: mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py + Metadata: + Training Memory (GB): 7.6 + inference time (ms/im): + - value: 88.5 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco_20200211-7584841c.pth + + - Name: mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 8.8 + inference time (ms/im): + - value: 102.04 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200211-cbed3d2c.pth + + - Name: mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 9.0 + inference time (ms/im): + - value: 103.09 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200212-68164964.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco + In Collection: GCNet + Config: configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco.py + Metadata: + Training Memory (GB): 9.2 + inference time (ms/im): + - value: 119.05 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_1x_coco_20200310-d5ad2a5e.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 10.3 + inference time (ms/im): + - value: 129.87 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r16_gcb_c3-c5_1x_coco_20200211-10bf2463.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 10.6 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200703_180653-ed035291.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_1x_coco_20210615_211019-abbc39ea.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 48.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco_20210615_215648-44aa598a.pth + + - Name: cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco + In Collection: GCNet + Config: configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r4_gcb_c3-c5_1x_coco_20210615_161851-720338ec.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/README.md new file mode 100644 index 0000000000000000000000000000000000000000..51a6aa1b6abee57fd234af3f9aeed0314a32e141 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/README.md @@ -0,0 +1,32 @@ +# Generalized Focal Loss: Learning Qualified and Distributed Bounding Boxes for Dense Object Detection + +## Introduction + + + +We provide config files to reproduce the object detection results in the paper [Generalized Focal Loss: Learning Qualified and Distributed Bounding Boxes for Dense Object Detection](https://arxiv.org/abs/2006.04388) + +```latex +@article{li2020generalized, + title={Generalized Focal Loss: Learning Qualified and Distributed Bounding Boxes for Dense Object Detection}, + author={Li, Xiang and Wang, Wenhai and Wu, Lijun and Chen, Shuo and Hu, Xiaolin and Li, Jun and Tang, Jinhui and Yang, Jian}, + journal={arXiv preprint arXiv:2006.04388}, + year={2020} +} +``` + +## Results and Models + +| Backbone | Style | Lr schd | Multi-scale Training| Inf time (fps) | box AP | Config | Download | +|:-----------------:|:-------:|:-------:|:-------------------:|:--------------:|:------:|:------:|:--------:| +| R-50 | pytorch | 1x | No | 19.5 | 40.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r50_fpn_1x_coco/gfl_r50_fpn_1x_coco_20200629_121244-25944287.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r50_fpn_1x_coco/gfl_r50_fpn_1x_coco_20200629_121244.log.json) | +| R-50 | pytorch | 2x | Yes | 19.5 | 42.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_r50_fpn_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r50_fpn_mstrain_2x_coco/gfl_r50_fpn_mstrain_2x_coco_20200629_213802-37bb1edc.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r50_fpn_mstrain_2x_coco/gfl_r50_fpn_mstrain_2x_coco_20200629_213802.log.json) | +| R-101 | pytorch | 2x | Yes | 14.7 | 44.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_r101_fpn_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_mstrain_2x_coco/gfl_r101_fpn_mstrain_2x_coco_20200629_200126-dd12f847.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_mstrain_2x_coco/gfl_r101_fpn_mstrain_2x_coco_20200629_200126.log.json) | +| R-101-dcnv2 | pytorch | 2x | Yes | 12.9 | 47.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco_20200630_102002-134b07df.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco_20200630_102002.log.json) | +| X-101-32x4d | pytorch | 2x | Yes | 12.1 | 45.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco/gfl_x101_32x4d_fpn_mstrain_2x_coco_20200630_102002-50c1ffdb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco/gfl_x101_32x4d_fpn_mstrain_2x_coco_20200630_102002.log.json) | +| X-101-32x4d-dcnv2 | pytorch | 2x | Yes | 10.7 | 48.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco_20200630_102002-14a2bf25.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco_20200630_102002.log.json) | + +[1] *1x and 2x mean the model is trained for 90K and 180K iterations, respectively.* \ +[2] *All results are obtained with a single model and without any test time data augmentation such as multi-scale, flipping and etc..* \ +[3] *`dcnv2` denotes deformable convolutional networks v2.* \ +[4] *FPS is tested with a single GeForce RTX 2080Ti GPU, using a batch size of 1.* diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b72c2b6eddfb51a0a61610826e00296e2b76f827 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py @@ -0,0 +1,15 @@ +_base_ = './gfl_r50_fpn_mstrain_2x_coco.py' +model = dict( + backbone=dict( + type='ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/gfl_r101_fpn_mstrain_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/gfl_r101_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e33b5c0d27883d5b495c4dae88f550ffbb26a318 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/gfl_r101_fpn_mstrain_2x_coco.py @@ -0,0 +1,13 @@ +_base_ = './gfl_r50_fpn_mstrain_2x_coco.py' +model = dict( + backbone=dict( + type='ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/gfl_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/gfl_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cfd4b02391a3d4cae0c060990be1f99b3edebabe --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/gfl_r50_fpn_1x_coco.py @@ -0,0 +1,57 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='GFL', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + bbox_head=dict( + type='GFLHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + loss_cls=dict( + type='QualityFocalLoss', + use_sigmoid=True, + beta=2.0, + loss_weight=1.0), + loss_dfl=dict(type='DistributionFocalLoss', loss_weight=0.25), + reg_max=16, + loss_bbox=dict(type='GIoULoss', loss_weight=2.0)), + # training and testing settings + train_cfg=dict( + assigner=dict(type='ATSSAssigner', topk=9), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/gfl_r50_fpn_mstrain_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/gfl_r50_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b8be60145758c191543ef0683234e63f02d8fe60 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/gfl_r50_fpn_mstrain_2x_coco.py @@ -0,0 +1,22 @@ +_base_ = './gfl_r50_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) +# multi-scale training +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 480), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..25398075cb866db8dd49d0bbd48cad19566e77e5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py @@ -0,0 +1,18 @@ +_base_ = './gfl_r50_fpn_mstrain_2x_coco.py' +model = dict( + type='GFL', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, False, True, True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..effda195cb0f18b3137c2b923d59f8cba025ba8e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco.py @@ -0,0 +1,16 @@ +_base_ = './gfl_r50_fpn_mstrain_2x_coco.py' +model = dict( + type='GFL', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..d11eae00a69c5da8cca35c60a26e32d6f1db3537 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gfl/metafile.yml @@ -0,0 +1,129 @@ +Collections: + - Name: Generalized Focal Loss + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Generalized Focal Loss + - FPN + - ResNet + Paper: https://arxiv.org/abs/2006.04388 + README: configs/gfl/README.md + +Models: + - Name: gfl_r50_fpn_1x_coco + In Collection: Generalized Focal Loss + Config: configs/gfl/gfl_r50_fpn_1x_coco.py + Metadata: + inference time (ms/im): + - value: 51.28 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r50_fpn_1x_coco/gfl_r50_fpn_1x_coco_20200629_121244-25944287.pth + + - Name: gfl_r50_fpn_mstrain_2x_coco + In Collection: Generalized Focal Loss + Config: configs/gfl/gfl_r50_fpn_mstrain_2x_coco.py + Metadata: + inference time (ms/im): + - value: 51.28 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r50_fpn_mstrain_2x_coco/gfl_r50_fpn_mstrain_2x_coco_20200629_213802-37bb1edc.pth + + - Name: gfl_r101_fpn_mstrain_2x_coco + In Collection: Generalized Focal Loss + Config: configs/gfl/gfl_r101_fpn_mstrain_2x_coco.py + Metadata: + inference time (ms/im): + - value: 68.03 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_mstrain_2x_coco/gfl_r101_fpn_mstrain_2x_coco_20200629_200126-dd12f847.pth + + - Name: gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco + In Collection: Generalized Focal Loss + Config: configs/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py + Metadata: + inference time (ms/im): + - value: 77.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco_20200630_102002-134b07df.pth + + - Name: gfl_x101_32x4d_fpn_mstrain_2x_coco + In Collection: Generalized Focal Loss + Config: configs/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco.py + Metadata: + inference time (ms/im): + - value: 82.64 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_x101_32x4d_fpn_mstrain_2x_coco/gfl_x101_32x4d_fpn_mstrain_2x_coco_20200630_102002-50c1ffdb.pth + + - Name: gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco + In Collection: Generalized Focal Loss + Config: configs/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco.py + Metadata: + inference time (ms/im): + - value: 93.46 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 48.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco/gfl_x101_32x4d_fpn_dconv_c4-c5_mstrain_2x_coco_20200630_102002-14a2bf25.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ghm/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/ghm/README.md new file mode 100644 index 0000000000000000000000000000000000000000..2a51ed8abc3e1ca925390c5bc227bf29c3dd6a34 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ghm/README.md @@ -0,0 +1,23 @@ +# Gradient Harmonized Single-stage Detector + +## Introduction + + + +``` +@inproceedings{li2019gradient, + title={Gradient Harmonized Single-stage Detector}, + author={Li, Buyu and Liu, Yu and Wang, Xiaogang}, + booktitle={AAAI Conference on Artificial Intelligence}, + year={2019} +} +``` + +## Results and Models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-FPN | pytorch | 1x | 4.0 | 3.3 | 37.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ghm/retinanet_ghm_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_r50_fpn_1x_coco/retinanet_ghm_r50_fpn_1x_coco_20200130-a437fda3.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_r50_fpn_1x_coco/retinanet_ghm_r50_fpn_1x_coco_20200130_004213.log.json) | +| R-101-FPN | pytorch | 1x | 6.0 | 4.4 | 39.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ghm/retinanet_ghm_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_r101_fpn_1x_coco/retinanet_ghm_r101_fpn_1x_coco_20200130-c148ee8f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_r101_fpn_1x_coco/retinanet_ghm_r101_fpn_1x_coco_20200130_145259.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 7.2 | 5.1 | 40.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco/retinanet_ghm_x101_32x4d_fpn_1x_coco_20200131-e4333bd0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco/retinanet_ghm_x101_32x4d_fpn_1x_coco_20200131_113653.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 10.3 | 5.2 | 41.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco/retinanet_ghm_x101_64x4d_fpn_1x_coco_20200131-dd381cef.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco/retinanet_ghm_x101_64x4d_fpn_1x_coco_20200131_113723.log.json) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ghm/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/ghm/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..9c4c934325cb0e37ec9c61ba97cb8252b43e6c4b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ghm/metafile.yml @@ -0,0 +1,96 @@ +Collections: + - Name: GHM + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - GHM-C + - GHM-R + - FPN + - ResNet + Paper: https://arxiv.org/abs/1811.05181 + README: configs/ghm/README.md + +Models: + - Name: retinanet_ghm_r50_fpn_1x_coco + In Collection: GHM + Config: configs/ghm/retinanet_ghm_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.0 + inference time (ms/im): + - value: 303.03 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_r50_fpn_1x_coco/retinanet_ghm_r50_fpn_1x_coco_20200130-a437fda3.pth + + - Name: retinanet_ghm_r101_fpn_1x_coco + In Collection: GHM + Config: configs/ghm/retinanet_ghm_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 227.27 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_r101_fpn_1x_coco/retinanet_ghm_r101_fpn_1x_coco_20200130-c148ee8f.pth + + - Name: retinanet_ghm_x101_32x4d_fpn_1x_coco + In Collection: GHM + Config: configs/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.2 + inference time (ms/im): + - value: 196.08 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco/retinanet_ghm_x101_32x4d_fpn_1x_coco_20200131-e4333bd0.pth + + - Name: retinanet_ghm_x101_64x4d_fpn_1x_coco + In Collection: GHM + Config: configs/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 10.3 + inference time (ms/im): + - value: 192.31 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco/retinanet_ghm_x101_64x4d_fpn_1x_coco_20200131-dd381cef.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ghm/retinanet_ghm_r101_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/ghm/retinanet_ghm_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..aaf6fc26d323a99a92b0ce266c7c7dc8a919d6f3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ghm/retinanet_ghm_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './retinanet_ghm_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ghm/retinanet_ghm_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/ghm/retinanet_ghm_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..61b9751057f10f2173b8e7edde12cca53ebbd2d0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ghm/retinanet_ghm_r50_fpn_1x_coco.py @@ -0,0 +1,19 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' +model = dict( + bbox_head=dict( + loss_cls=dict( + _delete_=True, + type='GHMC', + bins=30, + momentum=0.75, + use_sigmoid=True, + loss_weight=1.0), + loss_bbox=dict( + _delete_=True, + type='GHMR', + mu=0.02, + bins=10, + momentum=0.7, + loss_weight=10.0))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cd2e4cc34b4526ff32d193c30d5884b16c6adf5c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ghm/retinanet_ghm_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './retinanet_ghm_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b6107d8c31bd64dee3a70a1ea5e0167247af6b73 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ghm/retinanet_ghm_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './retinanet_ghm_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/README.md new file mode 100644 index 0000000000000000000000000000000000000000..8001b0d6ef922df5ccf75f037de9005ef6811a46 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/README.md @@ -0,0 +1,44 @@ +# Weight Standardization + +## Introduction + + + +``` +@article{weightstandardization, + author = {Siyuan Qiao and Huiyu Wang and Chenxi Liu and Wei Shen and Alan Yuille}, + title = {Weight Standardization}, + journal = {arXiv preprint arXiv:1903.10520}, + year = {2019}, +} +``` + +## Results and Models + +Faster R-CNN + +| Backbone | Style | Normalization | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:---------:|:-------:|:-------------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | pytorch | GN+WS | 1x | 5.9 | 11.7 | 39.7 | - | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco/faster_rcnn_r50_fpn_gn_ws-all_1x_coco_20200130-613d9fe2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco/faster_rcnn_r50_fpn_gn_ws-all_1x_coco_20200130_210936.log.json) | +| R-101-FPN | pytorch | GN+WS | 1x | 8.9 | 9.0 | 41.7 | - | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco/faster_rcnn_r101_fpn_gn_ws-all_1x_coco_20200205-a93b0d75.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco/faster_rcnn_r101_fpn_gn_ws-all_1x_coco_20200205_232146.log.json) | +| X-50-32x4d-FPN | pytorch | GN+WS | 1x | 7.0 | 10.3 | 40.7 | - | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco_20200203-839c5d9d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco_20200203_220113.log.json) | +| X-101-32x4d-FPN | pytorch | GN+WS | 1x | 10.8 | 7.6 | 42.1 | - | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco_20200212-27da1bc2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco_20200212_195302.log.json) | + +Mask R-CNN + +| Backbone | Style | Normalization | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:---------:|:-------:|:-------------:|:---------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | pytorch | GN+WS | 2x | 7.3 | 10.5 | 40.6 | 36.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco/mask_rcnn_r50_fpn_gn_ws-all_2x_coco_20200226-16acb762.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco/mask_rcnn_r50_fpn_gn_ws-all_2x_coco_20200226_062128.log.json) | +| R-101-FPN | pytorch | GN+WS | 2x | 10.3 | 8.6 | 42.0 | 37.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco/mask_rcnn_r101_fpn_gn_ws-all_2x_coco_20200212-ea357cd9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco/mask_rcnn_r101_fpn_gn_ws-all_2x_coco_20200212_213627.log.json) | +| X-50-32x4d-FPN | pytorch | GN+WS | 2x | 8.4 | 9.3 | 41.1 | 37.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco_20200216-649fdb6f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco_20200216_201500.log.json) | +| X-101-32x4d-FPN | pytorch | GN+WS | 2x | 12.2 | 7.1 | 42.1 | 37.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco_20200319-33fb95b5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco_20200319_104101.log.json) | +| R-50-FPN | pytorch | GN+WS | 20-23-24e | 7.3 | - | 41.1 | 37.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco_20200213-487d1283.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco_20200213_035123.log.json) | +| R-101-FPN | pytorch | GN+WS | 20-23-24e | 10.3 | - | 43.1 | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco_20200213-57b5a50f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco_20200213_130142.log.json) | +| X-50-32x4d-FPN | pytorch | GN+WS | 20-23-24e | 8.4 | - | 42.1 | 38.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco_20200226-969bcb2c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco_20200226_093732.log.json) | +| X-101-32x4d-FPN | pytorch | GN+WS | 20-23-24e | 12.2 | - | 42.7 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco_20200316-e6cd35ef.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco_20200316_013741.log.json) | + +Note: + +- GN+WS requires about 5% more memory than GN, and it is only 5% slower than GN. +- In the paper, a 20-23-24e lr schedule is used instead of 2x. +- The X-50-GN and X-101-GN pretrained models are also shared by the authors. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cd2cb2b6348a9555b8c80c3f1398d8989ef3f7a0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://jhu/resnet101_gn_ws'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1b326b88e7309ee217646b5550a23a6796ad5c0b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://jhu/resnet50_gn_ws')), + neck=dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f64ae89178ed351dbe4be80318b9a1da385853c2 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco.py @@ -0,0 +1,18 @@ +_base_ = './faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py' +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://jhu/resnext101_32x4d_gn_ws'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..246851b9f2be4d0e0f129d20692d22acf194308a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco.py @@ -0,0 +1,18 @@ +_base_ = './faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py' +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + type='ResNeXt', + depth=50, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://jhu/resnext50_32x4d_gn_ws'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a790d932152420f5be0a05b21ac122087d315398 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py' +# learning policy +lr_config = dict(step=[20, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a9fa6a2445020979a217ee3b648d49e5577d2357 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://jhu/resnet101_gn_ws'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..55168085cd085c241bfbb85a76bb230241378faa --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py' +# learning policy +lr_config = dict(step=[20, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..63be60ff8c117402aa46811ef86ba16aebc76a45 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py @@ -0,0 +1,20 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://jhu/resnet50_gn_ws')), + neck=dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg), + mask_head=dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg))) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cfa14c99543382328b2cb4ac7c2d0dbb2a562017 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py' +# learning policy +lr_config = dict(step=[20, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6498b03fb4fda52a995b5b76da8b02385697ebc1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py @@ -0,0 +1,19 @@ +_base_ = './mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py' +# model settings +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://jhu/resnext101_32x4d_gn_ws'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..79ce0adf1bf760c371bd1a1c3a9b028cef51c4b4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py' +# learning policy +lr_config = dict(step=[20, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7fac3175e3a4e900f5051bd0385a6dd828cef9c7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py @@ -0,0 +1,19 @@ +_base_ = './mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py' +# model settings +conv_cfg = dict(type='ConvWS') +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + type='ResNeXt', + depth=50, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://jhu/resnext50_32x4d_gn_ws'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..6aa87829ab36b554e5df22ce0d2c881409e9b558 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gn+ws/metafile.yml @@ -0,0 +1,258 @@ +Collections: + - Name: Weight Standardization + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Group Normalization + - Weight Standardization + Paper: https://arxiv.org/abs/1903.10520 + README: configs/gn+ws/README.md + +Models: + - Name: faster_rcnn_r50_fpn_gn_ws-all_1x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco.py + Metadata: + Training Memory (GB): 5.9 + inference time (ms/im): + - value: 85.47 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_r50_fpn_gn_ws-all_1x_coco/faster_rcnn_r50_fpn_gn_ws-all_1x_coco_20200130-613d9fe2.pth + + - Name: faster_rcnn_r101_fpn_gn_ws-all_1x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco.py + Metadata: + Training Memory (GB): 8.9 + inference time (ms/im): + - value: 111.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_r101_fpn_gn_ws-all_1x_coco/faster_rcnn_r101_fpn_gn_ws-all_1x_coco_20200205-a93b0d75.pth + + - Name: faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 97.09 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco/faster_rcnn_x50_32x4d_fpn_gn_ws-all_1x_coco_20200203-839c5d9d.pth + + - Name: faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco.py + Metadata: + Training Memory (GB): 10.8 + inference time (ms/im): + - value: 131.58 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco/faster_rcnn_x101_32x4d_fpn_gn_ws-all_1x_coco_20200212-27da1bc2.pth + + - Name: mask_rcnn_r50_fpn_gn_ws-all_2x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco.py + Metadata: + Training Memory (GB): 7.3 + inference time (ms/im): + - value: 95.24 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_2x_coco/mask_rcnn_r50_fpn_gn_ws-all_2x_coco_20200226-16acb762.pth + + - Name: mask_rcnn_r101_fpn_gn_ws-all_2x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco.py + Metadata: + Training Memory (GB): 10.3 + inference time (ms/im): + - value: 116.28 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_2x_coco/mask_rcnn_r101_fpn_gn_ws-all_2x_coco_20200212-ea357cd9.pth + + - Name: mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco.py + Metadata: + Training Memory (GB): 8.4 + inference time (ms/im): + - value: 107.53 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco/mask_rcnn_x50_32x4d_fpn_gn_ws-all_2x_coco_20200216-649fdb6f.pth + + - Name: mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco.py + Metadata: + Training Memory (GB): 12.2 + inference time (ms/im): + - value: 140.85 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco/mask_rcnn_x101_32x4d_fpn_gn_ws-all_2x_coco_20200319-33fb95b5.pth + + - Name: mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco.py + Metadata: + Training Memory (GB): 7.3 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_r50_fpn_gn_ws-all_20_23_24e_coco_20200213-487d1283.pth + + - Name: mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco.py + Metadata: + Training Memory (GB): 10.3 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_r101_fpn_gn_ws-all_20_23_24e_coco_20200213-57b5a50f.pth + + - Name: mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco.py + Metadata: + Training Memory (GB): 8.4 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_x50_32x4d_fpn_gn_ws-all_20_23_24e_coco_20200226-969bcb2c.pth + + - Name: mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco + In Collection: Weight Standardization + Config: configs/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco.py + Metadata: + Training Memory (GB): 12.2 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn%2Bws/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco/mask_rcnn_x101_32x4d_fpn_gn_ws-all_20_23_24e_coco_20200316-e6cd35ef.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gn/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/gn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..5c6e41f383ac48dff0d48d4c5704c867d5aef8d4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gn/README.md @@ -0,0 +1,31 @@ +# Group Normalization + +## Introduction + + + +```latex +@inproceedings{wu2018group, + title={Group Normalization}, + author={Wu, Yuxin and He, Kaiming}, + booktitle={Proceedings of the European Conference on Computer Vision (ECCV)}, + year={2018} +} +``` + +## Results and Models + +| Backbone | model | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:-------------:|:----------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN (d) | Mask R-CNN | 2x | 7.1 | 11.0 | 40.2 | 36.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_2x_coco/mask_rcnn_r50_fpn_gn-all_2x_coco_20200206-8eee02a6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_2x_coco/mask_rcnn_r50_fpn_gn-all_2x_coco_20200206_050355.log.json) | +| R-50-FPN (d) | Mask R-CNN | 3x | 7.1 | - | 40.5 | 36.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r50_fpn_gn-all_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_3x_coco/mask_rcnn_r50_fpn_gn-all_3x_coco_20200214-8b23b1e5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_3x_coco/mask_rcnn_r50_fpn_gn-all_3x_coco_20200214_063512.log.json) | +| R-101-FPN (d) | Mask R-CNN | 2x | 9.9 | 9.0 | 41.9 | 37.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r101_fpn_gn-all_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r101_fpn_gn-all_2x_coco/mask_rcnn_r101_fpn_gn-all_2x_coco_20200205-d96b1b50.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r101_fpn_gn-all_2x_coco/mask_rcnn_r101_fpn_gn-all_2x_coco_20200205_234402.log.json) | +| R-101-FPN (d) | Mask R-CNN | 3x | 9.9 | | 42.1 | 38.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r101_fpn_gn-all_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r101_fpn_gn-all_3x_coco/mask_rcnn_r101_fpn_gn-all_3x_coco_20200513_181609-0df864f4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r101_fpn_gn-all_3x_coco/mask_rcnn_r101_fpn_gn-all_3x_coco_20200513_181609.log.json) | +| R-50-FPN (c) | Mask R-CNN | 2x | 7.1 | 10.9 | 40.0 | 36.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco_20200207-20d3e849.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco_20200207_225832.log.json) | +| R-50-FPN (c) | Mask R-CNN | 3x | 7.1 | - | 40.1 | 36.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco_20200225-542aefbc.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco_20200225_235135.log.json) | + +**Notes:** + +- (d) means pretrained model converted from Detectron, and (c) means the contributed model pretrained by [@thangvubk](https://github.com/thangvubk). +- The `3x` schedule is epoch [28, 34, 36]. +- **Memory, Train/Inf time is outdated.** diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gn/mask_rcnn_r101_fpn_gn-all_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gn/mask_rcnn_r101_fpn_gn-all_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a505ba0e26246772c9d18874a5552831e2efe33f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gn/mask_rcnn_r101_fpn_gn-all_2x_coco.py @@ -0,0 +1,7 @@ +_base_ = './mask_rcnn_r50_fpn_gn-all_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron/resnet101_gn'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gn/mask_rcnn_r101_fpn_gn-all_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gn/mask_rcnn_r101_fpn_gn-all_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..12a9d17e5592ade405605e3ffb2d4d2fa632d03e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gn/mask_rcnn_r101_fpn_gn-all_3x_coco.py @@ -0,0 +1,5 @@ +_base_ = './mask_rcnn_r101_fpn_gn-all_2x_coco.py' + +# learning policy +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1de7d98e1034f7330552958cae5ef3ad402caed7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py @@ -0,0 +1,49 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron/resnet50_gn')), + neck=dict(norm_cfg=norm_cfg), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + norm_cfg=norm_cfg), + mask_head=dict(norm_cfg=norm_cfg))) +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gn/mask_rcnn_r50_fpn_gn-all_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gn/mask_rcnn_r50_fpn_gn-all_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f9177196cb91c6bbc6dd4383837819f053b334bb --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gn/mask_rcnn_r50_fpn_gn-all_3x_coco.py @@ -0,0 +1,5 @@ +_base_ = './mask_rcnn_r50_fpn_gn-all_2x_coco.py' + +# learning policy +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2f430fdab1a825211582b48b0eacab98b55c2167 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py @@ -0,0 +1,17 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + norm_cfg=norm_cfg, + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://contrib/resnet50_gn')), + neck=dict(norm_cfg=norm_cfg), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + norm_cfg=norm_cfg), + mask_head=dict(norm_cfg=norm_cfg))) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..66834f08ba398e7621aa8c5a3bfe12a646aecde2 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco.py @@ -0,0 +1,5 @@ +_base_ = './mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py' + +# learning policy +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/gn/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/gn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..08209cfce1e97f016c2145f1c2eaa16ec1b347e7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/gn/metafile.yml @@ -0,0 +1,157 @@ +Collections: + - Name: Group Normalization + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Group Normalization + Paper: https://arxiv.org/abs/1803.08494 + README: configs/gn/README.md + +Models: + - Name: mask_rcnn_r50_fpn_gn-all_2x_coco + In Collection: Group Normalization + Config: configs/gn/mask_rcnn_r50_fpn_gn-all_2x_coco.py + Metadata: + Training Memory (GB): 7.1 + inference time (ms/im): + - value: 90.91 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_2x_coco/mask_rcnn_r50_fpn_gn-all_2x_coco_20200206-8eee02a6.pth + + - Name: mask_rcnn_r50_fpn_gn-all_3x_coco + In Collection: Group Normalization + Config: configs/gn/mask_rcnn_r50_fpn_gn-all_3x_coco.py + Metadata: + Training Memory (GB): 7.1 + inference time (ms/im): + - value: 90.91 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_3x_coco/mask_rcnn_r50_fpn_gn-all_3x_coco_20200214-8b23b1e5.pth + + - Name: mask_rcnn_r101_fpn_gn-all_2x_coco + In Collection: Group Normalization + Config: configs/gn/mask_rcnn_r101_fpn_gn-all_2x_coco.py + Metadata: + Training Memory (GB): 9.9 + inference time (ms/im): + - value: 111.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r101_fpn_gn-all_2x_coco/mask_rcnn_r101_fpn_gn-all_2x_coco_20200205-d96b1b50.pth + + - Name: mask_rcnn_r101_fpn_gn-all_3x_coco + In Collection: Group Normalization + Config: configs/gn/mask_rcnn_r101_fpn_gn-all_3x_coco.py + Metadata: + Training Memory (GB): 9.9 + inference time (ms/im): + - value: 111.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r101_fpn_gn-all_3x_coco/mask_rcnn_r101_fpn_gn-all_3x_coco_20200513_181609-0df864f4.pth + + - Name: mask_rcnn_r50_fpn_gn-all_contrib_2x_coco + In Collection: Group Normalization + Config: configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco.py + Metadata: + Training Memory (GB): 7.1 + inference time (ms/im): + - value: 91.74 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco/mask_rcnn_r50_fpn_gn-all_contrib_2x_coco_20200207-20d3e849.pth + + - Name: mask_rcnn_r50_fpn_gn-all_contrib_3x_coco + In Collection: Group Normalization + Config: configs/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco.py + Metadata: + Training Memory (GB): 7.1 + inference time (ms/im): + - value: 91.74 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/gn/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco/mask_rcnn_r50_fpn_gn-all_contrib_3x_coco_20200225-542aefbc.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/grid_rcnn/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/grid_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..30cb6e06aa349ebaf6edfe09072addd078132fdd --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/grid_rcnn/README.md @@ -0,0 +1,35 @@ +# Grid R-CNN + +## Introduction + + + +```latex +@inproceedings{lu2019grid, + title={Grid r-cnn}, + author={Lu, Xin and Li, Buyu and Yue, Yuxin and Li, Quanquan and Yan, Junjie}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, + year={2019} +} + +@article{lu2019grid, + title={Grid R-CNN Plus: Faster and Better}, + author={Lu, Xin and Li, Buyu and Yue, Yuxin and Li, Quanquan and Yan, Junjie}, + journal={arXiv preprint arXiv:1906.05688}, + year={2019} +} +``` + +## Results and Models + +| Backbone | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:-----------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50 | 2x | 5.1 | 15.0 | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco/grid_rcnn_r50_fpn_gn-head_2x_coco_20200130-6cca8223.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco/grid_rcnn_r50_fpn_gn-head_2x_coco_20200130_221140.log.json) | +| R-101 | 2x | 7.0 | 12.6 | 41.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco/grid_rcnn_r101_fpn_gn-head_2x_coco_20200309-d6eca030.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco/grid_rcnn_r101_fpn_gn-head_2x_coco_20200309_164224.log.json) | +| X-101-32x4d | 2x | 8.3 | 10.8 | 42.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco_20200130-d8f0e3ff.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco_20200130_215413.log.json) | +| X-101-64x4d | 2x | 11.3 | 7.7 | 43.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco_20200204-ec76a754.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco_20200204_080641.log.json) | + +**Notes:** + +- All models are trained with 8 GPUs instead of 32 GPUs in the original paper. +- The warming up lasts for 1 epoch and `2x` here indicates 25 epochs. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1bb5889bc0ce4013ae3e6bf87d04f94417e84ff5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco.py @@ -0,0 +1,7 @@ +_base_ = './grid_rcnn_r50_fpn_gn-head_2x_coco.py' + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4aa00ece55280697fc67bd727077a8c9a58cfa44 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = ['grid_rcnn_r50_fpn_gn-head_2x_coco.py'] +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[8, 11]) +checkpoint_config = dict(interval=1) +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..df63cd5d82a3c622ffad6d044e80ebe5f7c8c122 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py @@ -0,0 +1,131 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + type='GridRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='GridRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=dict( + type='Shared2FCBBoxHead', + with_reg=False, + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False), + grid_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + grid_head=dict( + type='GridHead', + grid_points=9, + num_convs=8, + in_channels=256, + point_feat_channels=64, + norm_cfg=dict(type='GN', num_groups=36), + loss_grid=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=15))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + pos_radius=1, + pos_weight=-1, + max_num_grid=192, + debug=False)), + test_cfg=dict( + rpn=dict( + nms_pre=1000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.03, + nms=dict(type='nms', iou_threshold=0.3), + max_per_img=100))) +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=3665, + warmup_ratio=1.0 / 80, + step=[17, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=25) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3bc8516e223e3f74b003b5566876706ee8398fb1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py @@ -0,0 +1,24 @@ +_base_ = './grid_rcnn_r50_fpn_gn-head_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) +# optimizer +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=3665, + warmup_ratio=1.0 / 80, + step=[17, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=25) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c78f8f6501130a3e4f76269030b92f7f9e29fe07 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco.py @@ -0,0 +1,13 @@ +_base_ = './grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/grid_rcnn/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/grid_rcnn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..c37fb0dc3a15f23b7e09fed7614e26a0b4956a4e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/grid_rcnn/metafile.yml @@ -0,0 +1,96 @@ +Collections: + - Name: Grid R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RPN + - Dilated Convolution + - ResNet + - RoIAlign + Paper: https://arxiv.org/abs/1906.05688 + README: configs/grid_rcnn/README.md + +Models: + - Name: grid_rcnn_r50_fpn_gn-head_2x_coco + In Collection: Grid R-CNN + Config: configs/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py + Metadata: + Training Memory (GB): 5.1 + inference time (ms/im): + - value: 66.67 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco/grid_rcnn_r50_fpn_gn-head_2x_coco_20200130-6cca8223.pth + + - Name: grid_rcnn_r101_fpn_gn-head_2x_coco + In Collection: Grid R-CNN + Config: configs/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 79.37 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_r101_fpn_gn-head_2x_coco/grid_rcnn_r101_fpn_gn-head_2x_coco_20200309-d6eca030.pth + + - Name: grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco + In Collection: Grid R-CNN + Config: configs/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco.py + Metadata: + Training Memory (GB): 8.3 + inference time (ms/im): + - value: 92.59 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco/grid_rcnn_x101_32x4d_fpn_gn-head_2x_coco_20200130-d8f0e3ff.pth + + - Name: grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco + In Collection: Grid R-CNN + Config: configs/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco.py + Metadata: + Training Memory (GB): 11.3 + inference time (ms/im): + - value: 129.87 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/grid_rcnn/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco/grid_rcnn_x101_64x4d_fpn_gn-head_2x_coco_20200204-ec76a754.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/groie/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/groie/README.md new file mode 100644 index 0000000000000000000000000000000000000000..c38b70b64e673d2ab3dbe18c20913ec462cd834c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/groie/README.md @@ -0,0 +1,65 @@ +# GRoIE + +## A novel Region of Interest Extraction Layer for Instance Segmentation + +By Leonardo Rossi, Akbar Karimi and Andrea Prati from +[IMPLab](http://implab.ce.unipr.it/). + +We provide configs to reproduce the results in the paper for +"*A novel Region of Interest Extraction Layer for Instance Segmentation*" +on COCO object detection. + +## Introduction + + + +This paper is motivated by the need to overcome to the limitations of existing +RoI extractors which select only one (the best) layer from FPN. + +Our intuition is that all the layers of FPN retain useful information. + +Therefore, the proposed layer (called Generic RoI Extractor - **GRoIE**) +introduces non-local building blocks and attention mechanisms to boost the +performance. + +## Results and models + +The results on COCO 2017 minival (5k images) are shown in the below table. +You can find +[here](https://drive.google.com/drive/folders/19ssstbq_h0Z1cgxHmJYFO8s1arf3QJbT) +the trained models. + +### Application of GRoIE to different architectures + +| Backbone | Method | Lr schd | box AP | mask AP | Config | Download| +| :-------: | :--------------: | :-----: | :----: | :-----: | :-------:| :--------:| +| R-50-FPN | Faster Original | 1x | 37.4 | | [config](../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130_204655.log.json) | +| R-50-FPN | + GRoIE | 1x | 38.3 | | [config](./faster_rcnn_r50_fpn_groie_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/groie/faster_rcnn_r50_fpn_groie_1x_coco/faster_rcnn_r50_fpn_groie_1x_coco_20200604_211715-66ee9516.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/groie/faster_rcnn_r50_fpn_groie_1x_coco/faster_rcnn_r50_fpn_groie_1x_coco_20200604_211715.log.json) | +| R-50-FPN | Grid R-CNN | 1x | 39.1 | | [config](./grid_rcnn_r50_fpn_gn-head_1x_coco.py)| [model](https://download.openmmlab.com/mmdetection/v2.0/groie/grid_rcnn_r50_fpn_gn-head_1x_coco/grid_rcnn_r50_fpn_gn-head_1x_coco_20200605_202059-64f00ee8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/groie/grid_rcnn_r50_fpn_gn-head_1x_coco/grid_rcnn_r50_fpn_gn-head_1x_coco_20200605_202059.log.json) | +| R-50-FPN | + GRoIE | 1x | | | [config](./grid_rcnn_r50_fpn_gn-head_groie_1x_coco.py)|| +| R-50-FPN | Mask R-CNN | 1x | 38.2 | 34.7 | [config](../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py)| [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205_050542.log.json) | +| R-50-FPN | + GRoIE | 1x | 39.0 | 36.0 | [config](./mask_rcnn_r50_fpn_groie_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r50_fpn_groie_1x_coco/mask_rcnn_r50_fpn_groie_1x_coco_20200604_211715-50d90c74.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r50_fpn_groie_1x_coco/mask_rcnn_r50_fpn_groie_1x_coco_20200604_211715.log.json) | +| R-50-FPN | GC-Net | 1x | 40.7 | 36.5 | [config](../gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200202-50b90e5c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200202_085547.log.json) | +| R-50-FPN | + GRoIE | 1x | 41.0 | 37.8 | [config](./mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco_20200604_211715-42eb79e1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco_20200604_211715-42eb79e1.pth) | +| R-101-FPN | GC-Net | 1x | 42.2 | 37.8 | [config](../gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200206-8407a3f0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco_20200206_142508.log.json) | +| R-101-FPN | + GRoIE | 1x | | | [config](./mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py)| [model](https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco_20200607_224507-8daae01c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco_20200607_224507.log.json) | + +## Citation + +If you use this work or benchmark in your research, please cite this project. + +```latex +@misc{rossi2020novel, + title={A novel Region of Interest Extraction Layer for Instance Segmentation}, + author={Leonardo Rossi and Akbar Karimi and Andrea Prati}, + year={2020}, + eprint={2004.13665}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` + +## Contact + +The implementation of GROI is currently maintained by +[Leonardo Rossi](https://github.com/hachreak/). diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/groie/faster_rcnn_r50_fpn_groie_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/groie/faster_rcnn_r50_fpn_groie_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0fc528bfd49bfc9a262692db78a5f94b46c285af --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/groie/faster_rcnn_r50_fpn_groie_1x_coco.py @@ -0,0 +1,25 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +# model settings +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='sum', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/groie/grid_rcnn_r50_fpn_gn-head_groie_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/groie/grid_rcnn_r50_fpn_gn-head_groie_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8e4b4ab23513a97adf4471ab3b33ca8abdb6dbe5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/groie/grid_rcnn_r50_fpn_gn-head_groie_1x_coco.py @@ -0,0 +1,45 @@ +_base_ = '../grid_rcnn/grid_rcnn_r50_fpn_gn-head_1x_coco.py' +# model settings +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='sum', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)), + grid_roi_extractor=dict( + type='GenericRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8b83722197c69a51907f43bcb05883deedc37f0c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py @@ -0,0 +1,45 @@ +_base_ = '../gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py' +# model settings +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='sum', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)), + mask_roi_extractor=dict( + type='GenericRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/groie/mask_rcnn_r50_fpn_groie_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/groie/mask_rcnn_r50_fpn_groie_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..81dfb4873bdb587626200a3007dc4d57a92c0fd9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/groie/mask_rcnn_r50_fpn_groie_1x_coco.py @@ -0,0 +1,45 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +# model settings +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='sum', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)), + mask_roi_extractor=dict( + type='GenericRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..852c5ca7c5c4ba04f6a5f7dd6dbaf6b2c357a2fa --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py @@ -0,0 +1,45 @@ +_base_ = '../gcnet/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py' +# model settings +model = dict( + roi_head=dict( + bbox_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='sum', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)), + mask_roi_extractor=dict( + type='GenericRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='GeneralizedAttention', + in_channels=256, + spatial_range=-1, + num_heads=6, + attention_type='0100', + kv_stride=2)))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/groie/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/groie/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..5f9ce13f45bb8386079f7d2c6279637165c46938 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/groie/metafile.yml @@ -0,0 +1,88 @@ +Collections: + - Name: GRoIE + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Generic RoI Extractor + - FPN + - RPN + - ResNet + - RoIAlign + Paper: https://arxiv.org/abs/2004.13665 + README: configs/groie/README.md + +Models: + - Name: faster_rcnn_r50_fpn_groie_1x_coco + In Collection: GRoIE + Config: configs/groie/faster_rcnn_r50_fpn_groie_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/groie/faster_rcnn_r50_fpn_groie_1x_coco/faster_rcnn_r50_fpn_groie_1x_coco_20200604_211715-66ee9516.pth + + - Name: grid_rcnn_r50_fpn_gn-head_groie_1x_coco + In Collection: GRoIE + Config: configs/groie/grid_rcnn_r50_fpn_gn-head_groie_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.1 + + - Name: mask_rcnn_r50_fpn_groie_1x_coco + In Collection: GRoIE + Config: configs/groie/mask_rcnn_r50_fpn_groie_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r50_fpn_groie_1x_coco/mask_rcnn_r50_fpn_groie_1x_coco_20200604_211715-50d90c74.pth + + - Name: mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco + In Collection: GRoIE + Config: configs/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco/mask_rcnn_r50_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco_20200604_211715-42eb79e1.pth + + - Name: mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco + In Collection: GRoIE + Config: configs/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco_20200607_224507-8daae01c.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/README.md new file mode 100644 index 0000000000000000000000000000000000000000..34d6b0dbe49678461ed8f7d14e2a02b48d30bed3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/README.md @@ -0,0 +1,49 @@ +# Region Proposal by Guided Anchoring + +## Introduction + + + +We provide config files to reproduce the results in the CVPR 2019 paper for [Region Proposal by Guided Anchoring](https://arxiv.org/abs/1901.03278). + +```latex +@inproceedings{wang2019region, + title={Region Proposal by Guided Anchoring}, + author={Jiaqi Wang and Kai Chen and Shuo Yang and Chen Change Loy and Dahua Lin}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2019} +} +``` + +## Results and Models + +The results on COCO 2017 val is shown in the below table. (results on test-dev are usually slightly higher than val). + +| Method | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | AR 1000 | Config | Download | +| :----: | :-------------: | :-----: | :-----: | :------: | :------------: | :-----: | :------: | :--------: | +| GA-RPN | R-50-FPN | caffe | 1x | 5.3 | 15.8 | 68.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco/ga_rpn_r50_caffe_fpn_1x_coco_20200531-899008a6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco/ga_rpn_r50_caffe_fpn_1x_coco_20200531_011819.log.json) | +| GA-RPN | R-101-FPN | caffe | 1x | 7.3 | 13.0 | 69.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco/ga_rpn_r101_caffe_fpn_1x_coco_20200531-ca9ba8fb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco/ga_rpn_r101_caffe_fpn_1x_coco_20200531_011812.log.json) | +| GA-RPN | X-101-32x4d-FPN | pytorch | 1x | 8.5 | 10.0 | 70.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco/ga_rpn_x101_32x4d_fpn_1x_coco_20200220-c28d1b18.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco/ga_rpn_x101_32x4d_fpn_1x_coco_20200220_221326.log.json) | +| GA-RPN | X-101-64x4d-FPN | pytorch | 1x | 7.1 | 7.5 | 71.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco/ga_rpn_x101_64x4d_fpn_1x_coco_20200225-3c6e1aa2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco/ga_rpn_x101_64x4d_fpn_1x_coco_20200225_152704.log.json) | + +| Method | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :------------: | :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| GA-Faster RCNN | R-50-FPN | caffe | 1x | 5.5 | | 39.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco/ga_faster_r50_caffe_fpn_1x_coco_20200702_000718-a11ccfe6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco/ga_faster_r50_caffe_fpn_1x_coco_20200702_000718.log.json) | +| GA-Faster RCNN | R-101-FPN | caffe | 1x | 7.5 | | 41.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco/ga_faster_r101_caffe_fpn_1x_coco_bbox_mAP-0.415_20200505_115528-fb82e499.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco/ga_faster_r101_caffe_fpn_1x_coco_20200505_115528.log.json) | +| GA-Faster RCNN | X-101-32x4d-FPN | pytorch | 1x | 8.7 | 9.7 | 43.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco/ga_faster_x101_32x4d_fpn_1x_coco_20200215-1ded9da3.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco/ga_faster_x101_32x4d_fpn_1x_coco_20200215_184547.log.json) | +| GA-Faster RCNN | X-101-64x4d-FPN | pytorch | 1x | 11.8 | 7.3 | 43.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco/ga_faster_x101_64x4d_fpn_1x_coco_20200215-0fa7bde7.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco/ga_faster_x101_64x4d_fpn_1x_coco_20200215_104455.log.json) | +| GA-RetinaNet | R-50-FPN | caffe | 1x | 3.5 | 16.8 | 36.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco/ga_retinanet_r50_caffe_fpn_1x_coco_20201020-39581c6f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco/ga_retinanet_r50_caffe_fpn_1x_coco_20201020_225450.log.json) | +| GA-RetinaNet | R-101-FPN | caffe | 1x | 5.5 | 12.9 | 39.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco/ga_retinanet_r101_caffe_fpn_1x_coco_20200531-6266453c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco/ga_retinanet_r101_caffe_fpn_1x_coco_20200531_012847.log.json) | +| GA-RetinaNet | X-101-32x4d-FPN | pytorch | 1x | 6.9 | 10.6 | 40.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco/ga_retinanet_x101_32x4d_fpn_1x_coco_20200219-40c56caa.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco/ga_retinanet_x101_32x4d_fpn_1x_coco_20200219_223025.log.json) | +| GA-RetinaNet | X-101-64x4d-FPN | pytorch | 1x | 9.9 | 7.7 | 41.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco/ga_retinanet_x101_64x4d_fpn_1x_coco_20200226-ef9f7f1f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco/ga_retinanet_x101_64x4d_fpn_1x_coco_20200226_221123.log.json) | + +- In the Guided Anchoring paper, `score_thr` is set to 0.001 in Fast/Faster RCNN and 0.05 in RetinaNet for both baselines and Guided Anchoring. + +- Performance on COCO test-dev benchmark are shown as follows. + +| Method | Backbone | Style | Lr schd | Aug Train | Score thr | AP | AP_50 | AP_75 | AP_small | AP_medium | AP_large | Download | +| :------------: | :-------: | :---: | :-----: | :-------: | :-------: | :---: | :---: | :---: | :------: | :-------: | :------: | :------: | +| GA-Faster RCNN | R-101-FPN | caffe | 1x | F | 0.05 | | | | | | | | +| GA-Faster RCNN | R-101-FPN | caffe | 1x | F | 0.001 | | | | | | | | +| GA-RetinaNet | R-101-FPN | caffe | 1x | F | 0.05 | | | | | | | | +| GA-RetinaNet | R-101-FPN | caffe | 2x | T | 0.05 | | | | | | | | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_fast_r50_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_fast_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8fc203c6ed2b31b4672ae4525c65afbcdc6579ed --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_fast_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,65 @@ +_base_ = '../fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + roi_head=dict( + bbox_head=dict(bbox_coder=dict(target_stds=[0.05, 0.05, 0.1, 0.1]))), + # model training and testing settings + train_cfg=dict( + rcnn=dict( + assigner=dict(pos_iou_thr=0.6, neg_iou_thr=0.6, min_pos_iou=0.6), + sampler=dict(num=256))), + test_cfg=dict(rcnn=dict(score_thr=1e-3))) +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=300), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'proposals', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadProposals', num_max_proposals=None), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img', 'proposals']), + ]) +] +data = dict( + train=dict( + proposal_file=data_root + 'proposals/ga_rpn_r50_fpn_1x_train2017.pkl', + pipeline=train_pipeline), + val=dict( + proposal_file=data_root + 'proposals/ga_rpn_r50_fpn_1x_val2017.pkl', + pipeline=test_pipeline), + test=dict( + proposal_file=data_root + 'proposals/ga_rpn_r50_fpn_1x_val2017.pkl', + pipeline=test_pipeline)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a40e7c6fd7e2355081e7a31b40a893314e4eb303 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './ga_faster_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b0add92c398b62aa8fd2141f595cf0941f55d421 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,65 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + rpn_head=dict( + _delete_=True, + type='GARPNHead', + in_channels=256, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=8, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[8], + strides=[4, 8, 16, 32, 64]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.14, 0.14]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.11, 0.11]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + roi_head=dict( + bbox_head=dict(bbox_coder=dict(target_stds=[0.05, 0.05, 0.1, 0.1]))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + center_ratio=0.2, + ignore_ratio=0.5), + rpn_proposal=dict(nms_post=1000, max_per_img=300), + rcnn=dict( + assigner=dict(pos_iou_thr=0.6, neg_iou_thr=0.6, min_pos_iou=0.6), + sampler=dict(type='RandomSampler', num=256))), + test_cfg=dict( + rpn=dict(nms_post=1000, max_per_img=300), rcnn=dict(score_thr=1e-3))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_faster_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_faster_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e3d8238956f4d4874de1fde662a1a3ded1918189 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_faster_r50_fpn_1x_coco.py @@ -0,0 +1,65 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + rpn_head=dict( + _delete_=True, + type='GARPNHead', + in_channels=256, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=8, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[8], + strides=[4, 8, 16, 32, 64]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.14, 0.14]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.11, 0.11]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + roi_head=dict( + bbox_head=dict(bbox_coder=dict(target_stds=[0.05, 0.05, 0.1, 0.1]))), + # model training and testing settings + train_cfg=dict( + rpn=dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + center_ratio=0.2, + ignore_ratio=0.5), + rpn_proposal=dict(nms_post=1000, max_per_img=300), + rcnn=dict( + assigner=dict(pos_iou_thr=0.6, neg_iou_thr=0.6, min_pos_iou=0.6), + sampler=dict(type='RandomSampler', num=256))), + test_cfg=dict( + rpn=dict(nms_post=1000, max_per_img=300), rcnn=dict(score_thr=1e-3))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f1dda9495c2595b2743e3056abf65a1795ea5971 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ga_faster_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..fb9e2afc9cff8c8c94b2ace544785a026a61f45e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ga_faster_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1b1cccd0dd15123c35044367001e465b691f6f24 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './ga_retinanet_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_mstrain_2x.py b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_mstrain_2x.py new file mode 100644 index 0000000000000000000000000000000000000000..260895b401106c91a6133a054260ab94e92c75c5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_mstrain_2x.py @@ -0,0 +1,169 @@ +_base_ = '../_base_/default_runtime.py' + +# model settings +model = dict( + type='RetinaNet', + backbone=dict( + type='ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs=True, + num_outs=5), + bbox_head=dict( + type='GARetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=0.04, loss_weight=1.0))) +# training and testing settings +train_cfg = dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.4, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + center_ratio=0.2, + ignore_ratio=0.5, + debug=False) +test_cfg = dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100) +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 480), (1333, 960)], + keep_ratio=True, + multiscale_mode='range'), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='bbox') +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=1.0 / 3, + step=[16, 22]) +checkpoint_config = dict(interval=1) +# yapf:disable +log_config = dict( + interval=50, + hooks=[ + dict(type='TextLoggerHook'), + # dict(type='TensorboardLoggerHook') + ]) +# yapf:enable +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..33512011abb612ff5c762e75ee4492b382902fa4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,62 @@ +_base_ = '../retinanet/retinanet_r50_caffe_fpn_1x_coco.py' +model = dict( + bbox_head=dict( + _delete_=True, + type='GARetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=0.04, loss_weight=1.0)), + # training and testing settings + train_cfg=dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.4, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + assigner=dict(neg_iou_thr=0.5, min_pos_iou=0.0), + center_ratio=0.2, + ignore_ratio=0.5)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_retinanet_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_retinanet_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..769472352d06a8f2c30d73ae1f57c393f77adfa2 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_retinanet_r50_fpn_1x_coco.py @@ -0,0 +1,62 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' +model = dict( + bbox_head=dict( + _delete_=True, + type='GARetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=0.04, loss_weight=1.0)), + # training and testing settings + train_cfg=dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.4, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + assigner=dict(neg_iou_thr=0.5, min_pos_iou=0.0), + center_ratio=0.2, + ignore_ratio=0.5)) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c5eb34f5fa2d1061c7eb4a3adfb8b7e1ede51b55 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ga_retinanet_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5c69a6f848f278b0b81082a8f38b01e154db0e84 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ga_retinanet_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..039703ec6635f6665be16919baf157511c7b3431 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = './ga_rpn_r50_caffe_fpn_1x_coco.py' +# model settings +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7830894af1b5824d9ff442f6aa90f6e68c9ef29c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,58 @@ +_base_ = '../rpn/rpn_r50_caffe_fpn_1x_coco.py' +model = dict( + rpn_head=dict( + _delete_=True, + type='GARPNHead', + in_channels=256, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=8, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[8], + strides=[4, 8, 16, 32, 64]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.14, 0.14]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.11, 0.11]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + # model training and testing settings + train_cfg=dict( + rpn=dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + center_ratio=0.2, + ignore_ratio=0.5)), + test_cfg=dict(rpn=dict(nms_post=1000))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_rpn_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_rpn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..27ab3e733bda1fb1c7c50cbd0f26597650b4c2e7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_rpn_r50_fpn_1x_coco.py @@ -0,0 +1,58 @@ +_base_ = '../rpn/rpn_r50_fpn_1x_coco.py' +model = dict( + rpn_head=dict( + _delete_=True, + type='GARPNHead', + in_channels=256, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=8, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[8], + strides=[4, 8, 16, 32, 64]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.14, 0.14]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.07, 0.07, 0.11, 0.11]), + loc_filter_thr=0.01, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + # model training and testing settings + train_cfg=dict( + rpn=dict( + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + center_ratio=0.2, + ignore_ratio=0.5)), + test_cfg=dict(rpn=dict(nms_post=1000))) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cccc985f9eb2c3e9c06f91af6107ec909aefd9d1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ga_rpn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4e134d23ad428eaca19bc8069325a9545683cd8e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ga_rpn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..6364104054a861876727ca76b5117321fdeb9779 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/guided_anchoring/metafile.yml @@ -0,0 +1,241 @@ +Collections: + - Name: Guided Anchoring + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - Guided Anchoring + - ResNet + Paper: https://arxiv.org/abs/1901.03278 + README: configs/guided_anchoring/README.md + +Models: + - Name: ga_rpn_r50_caffe_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.3 + inference time (ms/im): + - value: 63.29 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Region Proposal + Dataset: COCO + Metrics: + AR@1000: 68.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_r50_caffe_fpn_1x_coco/ga_rpn_r50_caffe_fpn_1x_coco_20200531-899008a6.pth + + - Name: ga_rpn_r101_caffe_fpn_1x_coco.py + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco.py.py + Metadata: + Training Memory (GB): 7.3 + inference time (ms/im): + - value: 76.92 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Region Proposal + Dataset: COCO + Metrics: + AR@1000: 69.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_r101_caffe_fpn_1x_coco/ga_rpn_r101_caffe_fpn_1x_coco_20200531-ca9ba8fb.pth + + - Name: ga_rpn_x101_32x4d_fpn_1x_coco.py + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco.py.py + Metadata: + Training Memory (GB): 8.5 + inference time (ms/im): + - value: 100 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Region Proposal + Dataset: COCO + Metrics: + AR@1000: 70.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_x101_32x4d_fpn_1x_coco/ga_rpn_x101_32x4d_fpn_1x_coco_20200220-c28d1b18.pth + + - Name: ga_rpn_x101_64x4d_fpn_1x_coco.py.py + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco.py.py.py + Metadata: + Training Memory (GB): 7.1 + inference time (ms/im): + - value: 133.33 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Region Proposal + Dataset: COCO + Metrics: + AR@1000: 70.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_rpn_x101_64x4d_fpn_1x_coco/ga_rpn_x101_64x4d_fpn_1x_coco_20200225-3c6e1aa2.pth + + - Name: ga_faster_r50_caffe_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_r50_caffe_fpn_1x_coco/ga_faster_r50_caffe_fpn_1x_coco_20200702_000718-a11ccfe6.pth + + - Name: ga_faster_r101_caffe_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_r101_caffe_fpn_1x_coco/ga_faster_r101_caffe_fpn_1x_coco_bbox_mAP-0.415_20200505_115528-fb82e499.pth + + - Name: ga_faster_x101_32x4d_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 8.7 + inference time (ms/im): + - value: 103.09 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_x101_32x4d_fpn_1x_coco/ga_faster_x101_32x4d_fpn_1x_coco_20200215-1ded9da3.pth + + - Name: ga_faster_x101_64x4d_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 11.8 + inference time (ms/im): + - value: 136.99 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_faster_x101_64x4d_fpn_1x_coco/ga_faster_x101_64x4d_fpn_1x_coco_20200215-0fa7bde7.pth + + - Name: ga_retinanet_r50_caffe_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.5 + inference time (ms/im): + - value: 59.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_r50_caffe_fpn_1x_coco/ga_retinanet_r50_caffe_fpn_1x_coco_20201020-39581c6f.pth + + - Name: ga_retinanet_r101_caffe_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.5 + inference time (ms/im): + - value: 77.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_r101_caffe_fpn_1x_coco/ga_retinanet_r101_caffe_fpn_1x_coco_20200531-6266453c.pth + + - Name: ga_retinanet_x101_32x4d_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.9 + inference time (ms/im): + - value: 94.34 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_x101_32x4d_fpn_1x_coco/ga_retinanet_x101_32x4d_fpn_1x_coco_20200219-40c56caa.pth + + - Name: ga_retinanet_x101_64x4d_fpn_1x_coco + In Collection: Guided Anchoring + Config: configs/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 9.9 + inference time (ms/im): + - value: 129.87 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/guided_anchoring/ga_retinanet_x101_64x4d_fpn_1x_coco/ga_retinanet_x101_64x4d_fpn_1x_coco_20200226-ef9f7f1f.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..bff5ce04dc16cb91e4eeae45bf74e9fc3f3e67d0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/README.md @@ -0,0 +1,88 @@ +# High-resolution networks (HRNets) for object detection + +## Introduction + + + +```latex +@inproceedings{SunXLW19, + title={Deep High-Resolution Representation Learning for Human Pose Estimation}, + author={Ke Sun and Bin Xiao and Dong Liu and Jingdong Wang}, + booktitle={CVPR}, + year={2019} +} + +@article{SunZJCXLMWLW19, + title={High-Resolution Representations for Labeling Pixels and Regions}, + author={Ke Sun and Yang Zhao and Borui Jiang and Tianheng Cheng and Bin Xiao + and Dong Liu and Yadong Mu and Xinggang Wang and Wenyu Liu and Jingdong Wang}, + journal = {CoRR}, + volume = {abs/1904.04514}, + year={2019} +} +``` + +## Results and Models + +### Faster R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :-------------:|:------:| :------:| :--------:| +| HRNetV2p-W18 | pytorch | 1x | 6.6 | 13.4 | 36.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco/faster_rcnn_hrnetv2p_w18_1x_coco_20200130-56651a6d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco/faster_rcnn_hrnetv2p_w18_1x_coco_20200130_211246.log.json) | +| HRNetV2p-W18 | pytorch | 2x | 6.6 | - | 38.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco/faster_rcnn_hrnetv2p_w18_2x_coco_20200702_085731-a4ec0611.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco/faster_rcnn_hrnetv2p_w18_2x_coco_20200702_085731.log.json) | +| HRNetV2p-W32 | pytorch | 1x | 9.0 | 12.4 | 40.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco/faster_rcnn_hrnetv2p_w32_1x_coco_20200130-6e286425.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco/faster_rcnn_hrnetv2p_w32_1x_coco_20200130_204442.log.json) | +| HRNetV2p-W32 | pytorch | 2x | 9.0 | - | 41.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco/faster_rcnn_hrnetv2p_w32_2x_coco_20200529_015927-976a9c15.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco/faster_rcnn_hrnetv2p_w32_2x_coco_20200529_015927.log.json) | +| HRNetV2p-W40 | pytorch | 1x | 10.4 | 10.5 | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco/faster_rcnn_hrnetv2p_w40_1x_coco_20200210-95c1f5ce.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco/faster_rcnn_hrnetv2p_w40_1x_coco_20200210_125315.log.json) | +| HRNetV2p-W40 | pytorch | 2x | 10.4 | - | 42.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco/faster_rcnn_hrnetv2p_w40_2x_coco_20200512_161033-0f236ef4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco/faster_rcnn_hrnetv2p_w40_2x_coco_20200512_161033.log.json) | + +### Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :-------------:|:------:| :------:|:------:|:--------:| +| HRNetV2p-W18 | pytorch | 1x | 7.0 | 11.7 | 37.7 | 34.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco/mask_rcnn_hrnetv2p_w18_1x_coco_20200205-1c3d78ed.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco/mask_rcnn_hrnetv2p_w18_1x_coco_20200205_232523.log.json) | +| HRNetV2p-W18 | pytorch | 2x | 7.0 | - | 39.8 | 36.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco/mask_rcnn_hrnetv2p_w18_2x_coco_20200212-b3c825b1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco/mask_rcnn_hrnetv2p_w18_2x_coco_20200212_134222.log.json) | +| HRNetV2p-W32 | pytorch | 1x | 9.4 | 11.3 | 41.2 | 37.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco/mask_rcnn_hrnetv2p_w32_1x_coco_20200207-b29f616e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco/mask_rcnn_hrnetv2p_w32_1x_coco_20200207_055017.log.json) | +| HRNetV2p-W32 | pytorch | 2x | 9.4 | - | 42.5 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco/mask_rcnn_hrnetv2p_w32_2x_coco_20200213-45b75b4d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco/mask_rcnn_hrnetv2p_w32_2x_coco_20200213_150518.log.json) | +| HRNetV2p-W40 | pytorch | 1x | 10.9 | | 42.1 | 37.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco/mask_rcnn_hrnetv2p_w40_1x_coco_20200511_015646-66738b35.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco/mask_rcnn_hrnetv2p_w40_1x_coco_20200511_015646.log.json) | +| HRNetV2p-W40 | pytorch | 2x | 10.9 | | 42.8 | 38.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco/mask_rcnn_hrnetv2p_w40_2x_coco_20200512_163732-aed5e4ab.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco/mask_rcnn_hrnetv2p_w40_2x_coco_20200512_163732.log.json) | + +### Cascade R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :-------------:|:------:| :------: | :--------: | +| HRNetV2p-W18 | pytorch | 20e | 7.0 | 11.0 | 41.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco/cascade_rcnn_hrnetv2p_w18_20e_coco_20200210-434be9d7.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco/cascade_rcnn_hrnetv2p_w18_20e_coco_20200210_105632.log.json) | +| HRNetV2p-W32 | pytorch | 20e | 9.4 | 11.0 | 43.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco/cascade_rcnn_hrnetv2p_w32_20e_coco_20200208-928455a4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco/cascade_rcnn_hrnetv2p_w32_20e_coco_20200208_160511.log.json) | +| HRNetV2p-W40 | pytorch | 20e | 10.8 | | 43.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco/cascade_rcnn_hrnetv2p_w40_20e_coco_20200512_161112-75e47b04.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco/cascade_rcnn_hrnetv2p_w40_20e_coco_20200512_161112.log.json) | + +### Cascade Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :-------------:|:------:| :------:|:------:|:--------:| +| HRNetV2p-W18 | pytorch | 20e | 8.5 | 8.5 |41.6 |36.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco/cascade_mask_rcnn_hrnetv2p_w18_20e_coco_20200210-b543cd2b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco/cascade_mask_rcnn_hrnetv2p_w18_20e_coco_20200210_093149.log.json) | +| HRNetV2p-W32 | pytorch | 20e | | 8.3 |44.3 |38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco/cascade_mask_rcnn_hrnetv2p_w32_20e_coco_20200512_154043-39d9cf7b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco/cascade_mask_rcnn_hrnetv2p_w32_20e_coco_20200512_154043.log.json) | +| HRNetV2p-W40 | pytorch | 20e | 12.5 | |45.1 |39.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco/cascade_mask_rcnn_hrnetv2p_w40_20e_coco_20200527_204922-969c4610.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco/cascade_mask_rcnn_hrnetv2p_w40_20e_coco_20200527_204922.log.json) | + +### Hybrid Task Cascade (HTC) + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :-------------:|:------:| :------:|:------:|:--------:| +| HRNetV2p-W18 | pytorch | 20e | 10.8 | 4.7 | 42.8 | 37.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/htc_hrnetv2p_w18_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w18_20e_coco/htc_hrnetv2p_w18_20e_coco_20200210-b266988c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w18_20e_coco/htc_hrnetv2p_w18_20e_coco_20200210_182735.log.json) | +| HRNetV2p-W32 | pytorch | 20e | 13.1 | 4.9 | 45.4 | 39.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/htc_hrnetv2p_w32_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w32_20e_coco/htc_hrnetv2p_w32_20e_coco_20200207-7639fa12.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w32_20e_coco/htc_hrnetv2p_w32_20e_coco_20200207_193153.log.json) | +| HRNetV2p-W40 | pytorch | 20e | 14.6 | | 46.4 | 40.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/htc_hrnetv2p_w40_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w40_20e_coco/htc_hrnetv2p_w40_20e_coco_20200529_183411-417c4d5b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w40_20e_coco/htc_hrnetv2p_w40_20e_coco_20200529_183411.log.json) | + +### FCOS + +| Backbone | Style | GN | MS train | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:-------:|:------:|:------:|:------:|:------:|:--------:| +|HRNetV2p-W18| pytorch | Y | N | 1x | 13.0 | 12.9 | 35.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco_20201212_100710-4ad151de.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco_20201212_100710.log.json) | +|HRNetV2p-W18| pytorch | Y | N | 2x | 13.0 | - | 38.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco_20201212_101110-5c575fa5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco_20201212_101110.log.json) | +|HRNetV2p-W32| pytorch | Y | N | 1x | 17.5 | 12.9 | 39.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco_20201211_134730-cb8055c0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco_20201211_134730.log.json) | +|HRNetV2p-W32| pytorch | Y | N | 2x | 17.5 | - | 40.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco_20201212_112133-77b6b9bb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco_20201212_112133.log.json) | +|HRNetV2p-W18| pytorch | Y | Y | 2x | 13.0 | 12.9 | 38.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco_20201212_111651-441e9d9f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco_20201212_111651.log.json) | +|HRNetV2p-W32| pytorch | Y | Y | 2x | 17.5 | 12.4 | 41.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco_20201212_090846-b6f2b49f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco_20201212_090846.log.json) | +|HRNetV2p-W48| pytorch | Y | Y | 2x | 20.3 | 10.8 | 42.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco_20201212_124752-f22d2ce5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco_20201212_124752.log.json) | + +**Note:** + +- The `28e` schedule in HTC indicates decreasing the lr at 24 and 27 epochs, with a total of 28 epochs. +- HRNetV2 ImageNet pretrained models are in [HRNets for Image Classification](https://github.com/HRNet/HRNet-Image-Classification). diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..839cf3eb62590368ab0e99efdadcbdd4ad81eeb5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco.py @@ -0,0 +1,11 @@ +_base_ = './cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py' +# model settings +model = dict( + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18')), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9942602762d8eba5d4c3ad20f2190fdb9f1df906 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py @@ -0,0 +1,40 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w32')), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256)) +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..10d5e83c67ebfb7f3017abc164d9559681185268 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco.py @@ -0,0 +1,12 @@ +_base_ = './cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py' +# model settings +model = dict( + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w40')), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ebd5e202d955e87870b3cf8efd94683668dd5929 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco.py @@ -0,0 +1,11 @@ +_base_ = './cascade_rcnn_hrnetv2p_w32_20e_coco.py' +# model settings +model = dict( + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18')), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e7f89a9edae81d02a2229229b1c66cf50a9282e0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco.py @@ -0,0 +1,40 @@ +_base_ = '../cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w32')), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256)) +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..265e8d636f53f448f59372074c9bbe590cb26d9a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco.py @@ -0,0 +1,12 @@ +_base_ = './cascade_rcnn_hrnetv2p_w32_20e_coco.py' +# model settings +model = dict( + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w40')), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1df2c3db1c00a6c0c34f96bc71cf35bfc0e0fbe6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = './faster_rcnn_hrnetv2p_w32_1x_coco.py' +# model settings +model = dict( + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18')), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a4b987a19ae32453d524fc2f7a4fb6b6b87f1f32 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco.py @@ -0,0 +1,5 @@ +_base_ = './faster_rcnn_hrnetv2p_w18_1x_coco.py' + +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..be058099a4c59b06ec5598ea25d194163e45601a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco.py @@ -0,0 +1,37 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w32')), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..63c8717182f2284ff1062be31bae43b4360c6887 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './faster_rcnn_hrnetv2p_w32_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..886a7c90a453e684b3c0646b2eb3dea903671358 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = './faster_rcnn_hrnetv2p_w32_1x_coco.py' +model = dict( + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w40')), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..585cc2c332fd88a9f0164b14084d45d7a3783b11 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './faster_rcnn_hrnetv2p_w40_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..fd662bd10e3eb84fccbda080d9c902084f2fb490 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py @@ -0,0 +1,10 @@ +_base_ = './fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py' +model = dict( + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18')), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..34975959f27f0ef8b985ab7d2857c7f2d70e47ae --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..37bfdae98f177914cbaa99d5b117c7928b6f84dd --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco.py @@ -0,0 +1,10 @@ +_base_ = './fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py' +model = dict( + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18')), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..10617f24c46f8dee164f06babecb00ae5d289466 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py @@ -0,0 +1,70 @@ +_base_ = '../fcos/fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py' +model = dict( + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w32')), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256, + stride=2, + num_outs=5)) +img_norm_cfg = dict( + mean=[103.53, 116.28, 123.675], std=[57.375, 57.12, 58.395], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7b3813071c7591caa72412e5622e4101f7c05920 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..482f88729ff6c08e482a5ca5c6d48b75f14f7ca8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py @@ -0,0 +1,39 @@ +_base_ = './fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py' +img_norm_cfg = dict( + mean=[103.53, 116.28, 123.675], std=[57.375, 57.12, 58.395], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0ae9dbe3aca8d9d6e0af785dd60131909f420a89 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco.py @@ -0,0 +1,11 @@ +_base_ = './fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py' +model = dict( + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w40')), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/htc_hrnetv2p_w18_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/htc_hrnetv2p_w18_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3c2eb1dd4e08830d0e57ecfe321f0353c8bf6cb1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/htc_hrnetv2p_w18_20e_coco.py @@ -0,0 +1,10 @@ +_base_ = './htc_hrnetv2p_w32_20e_coco.py' +model = dict( + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18')), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/htc_hrnetv2p_w32_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/htc_hrnetv2p_w32_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..545cb83eaca50f9d5de1fa6b3f3e569faab7d5f2 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/htc_hrnetv2p_w32_20e_coco.py @@ -0,0 +1,37 @@ +_base_ = '../htc/htc_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w32')), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/htc_hrnetv2p_w40_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/htc_hrnetv2p_w40_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..94bff1bc01c09a98579f469dcac19df27cfc60b9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/htc_hrnetv2p_w40_20e_coco.py @@ -0,0 +1,11 @@ +_base_ = './htc_hrnetv2p_w32_20e_coco.py' +model = dict( + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w40')), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/htc_hrnetv2p_w40_28e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/htc_hrnetv2p_w40_28e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7067e8b602efb4f61549d376ec393e89deee8c3e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/htc_hrnetv2p_w40_28e_coco.py @@ -0,0 +1,4 @@ +_base_ = './htc_hrnetv2p_w40_20e_coco.py' +# learning policy +lr_config = dict(step=[24, 27]) +runner = dict(type='EpochBasedRunner', max_epochs=28) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/htc_x101_64x4d_fpn_16x1_28e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/htc_x101_64x4d_fpn_16x1_28e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..815f2857f99791232664ecc9e82ea860fdcaa268 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/htc_x101_64x4d_fpn_16x1_28e_coco.py @@ -0,0 +1,4 @@ +_base_ = '../htc/htc_x101_64x4d_fpn_16x1_20e_coco.py' +# learning policy +lr_config = dict(step=[24, 27]) +runner = dict(type='EpochBasedRunner', max_epochs=28) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cb12200edb5fe0a31b0cba8966e858ad06024b7c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py @@ -0,0 +1,10 @@ +_base_ = './mask_rcnn_hrnetv2p_w32_1x_coco.py' +model = dict( + backbone=dict( + extra=dict( + stage2=dict(num_channels=(18, 36)), + stage3=dict(num_channels=(18, 36, 72)), + stage4=dict(num_channels=(18, 36, 72, 144))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w18')), + neck=dict(type='HRFPN', in_channels=[18, 36, 72, 144], out_channels=256)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ca62682a3b2d328cc9a8fd08887bcc1bac53104d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_hrnetv2p_w18_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d5f0eb56b7e97bc764b98a2b88a277a69633caa6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco.py @@ -0,0 +1,37 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w32')), + neck=dict( + _delete_=True, + type='HRFPN', + in_channels=[32, 64, 128, 256], + out_channels=256)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..63d5d139e7b56843f5dcc85bda48945d56cfc49e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_hrnetv2p_w32_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5a76f4b056367f0cc69b5fc601ae5cdb1ac98cf8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco.py @@ -0,0 +1,11 @@ +_base_ = './mask_rcnn_hrnetv2p_w18_1x_coco.py' +model = dict( + backbone=dict( + type='HRNet', + extra=dict( + stage2=dict(num_channels=(40, 80)), + stage3=dict(num_channels=(40, 80, 160)), + stage4=dict(num_channels=(40, 80, 160, 320))), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://msra/hrnetv2_w40')), + neck=dict(type='HRFPN', in_channels=[40, 80, 160, 320], out_channels=256)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3a2a510689308e556af803968a641dcf2594fe77 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_hrnetv2p_w40_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..fce29bdbe57820f297282f97417290266c04c78d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/hrnet/metafile.yml @@ -0,0 +1,599 @@ +Collections: + - Name: HRNet + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - HRNet + Paper: https://arxiv.org/abs/1904.04514 + README: configs/hrnet/README.md + +Models: + - Name: faster_rcnn_hrnetv2p_w18_1x_coco + In Collection: HRNet + Config: configs/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco.py + Metadata: + Training Memory (GB): 6.6 + inference time (ms/im): + - value: 74.63 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w18_1x_coco/faster_rcnn_hrnetv2p_w18_1x_coco_20200130-56651a6d.pth + + - Name: faster_rcnn_hrnetv2p_w18_2x_coco + In Collection: HRNet + Config: configs/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco.py + Metadata: + Training Memory (GB): 6.6 + inference time (ms/im): + - value: 74.63 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w18_2x_coco/faster_rcnn_hrnetv2p_w18_2x_coco_20200702_085731-a4ec0611.pth + + - Name: faster_rcnn_hrnetv2p_w32_1x_coco + In Collection: HRNet + Config: configs/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco.py + Metadata: + Training Memory (GB): 9.0 + inference time (ms/im): + - value: 80.65 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w32_1x_coco/faster_rcnn_hrnetv2p_w32_1x_coco_20200130-6e286425.pth + + - Name: faster_rcnn_hrnetv2p_w32_2x_coco + In Collection: HRNet + Config: configs/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco.py + Metadata: + Training Memory (GB): 9.0 + inference time (ms/im): + - value: 80.65 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w32_2x_coco/faster_rcnn_hrnetv2p_w32_2x_coco_20200529_015927-976a9c15.pth + + - Name: faster_rcnn_hrnetv2p_w40_1x_coco + In Collection: HRNet + Config: configs/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco.py + Metadata: + Training Memory (GB): 10.4 + inference time (ms/im): + - value: 95.24 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w40_1x_coco/faster_rcnn_hrnetv2p_w40_1x_coco_20200210-95c1f5ce.pth + + - Name: faster_rcnn_hrnetv2p_w40_2x_coco + In Collection: HRNet + Config: configs/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco.py + Metadata: + Training Memory (GB): 10.4 + inference time (ms/im): + - value: 95.24 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/faster_rcnn_hrnetv2p_w40_2x_coco/faster_rcnn_hrnetv2p_w40_2x_coco_20200512_161033-0f236ef4.pth + + - Name: mask_rcnn_hrnetv2p_w18_1x_coco + In Collection: HRNet + Config: configs/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 85.47 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 34.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w18_1x_coco/mask_rcnn_hrnetv2p_w18_1x_coco_20200205-1c3d78ed.pth + + - Name: mask_rcnn_hrnetv2p_w18_2x_coco + In Collection: HRNet + Config: configs/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 85.47 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w18_2x_coco/mask_rcnn_hrnetv2p_w18_2x_coco_20200212-b3c825b1.pth + + - Name: mask_rcnn_hrnetv2p_w32_1x_coco + In Collection: HRNet + Config: configs/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco.py + Metadata: + Training Memory (GB): 9.4 + inference time (ms/im): + - value: 88.5 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w32_1x_coco/mask_rcnn_hrnetv2p_w32_1x_coco_20200207-b29f616e.pth + + - Name: mask_rcnn_hrnetv2p_w32_2x_coco + In Collection: HRNet + Config: configs/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco.py + Metadata: + Training Memory (GB): 9.4 + inference time (ms/im): + - value: 88.5 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w32_2x_coco/mask_rcnn_hrnetv2p_w32_2x_coco_20200213-45b75b4d.pth + + - Name: mask_rcnn_hrnetv2p_w40_1x_coco + In Collection: HRNet + Config: configs/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco.py + Metadata: + Training Memory (GB): 10.9 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w40_1x_coco/mask_rcnn_hrnetv2p_w40_1x_coco_20200511_015646-66738b35.pth + + - Name: mask_rcnn_hrnetv2p_w40_2x_coco + In Collection: HRNet + Config: configs/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco.py + Metadata: + Training Memory (GB): 10.9 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/mask_rcnn_hrnetv2p_w40_2x_coco/mask_rcnn_hrnetv2p_w40_2x_coco_20200512_163732-aed5e4ab.pth + + - Name: cascade_rcnn_hrnetv2p_w18_20e_coco + In Collection: HRNet + Config: configs/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 90.91 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w18_20e_coco/cascade_rcnn_hrnetv2p_w18_20e_coco_20200210-434be9d7.pth + + - Name: cascade_rcnn_hrnetv2p_w32_20e_coco + In Collection: HRNet + Config: configs/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco.py + Metadata: + Training Memory (GB): 9.4 + inference time (ms/im): + - value: 90.91 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w32_20e_coco/cascade_rcnn_hrnetv2p_w32_20e_coco_20200208-928455a4.pth + + - Name: cascade_rcnn_hrnetv2p_w40_20e_coco + In Collection: HRNet + Config: configs/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco.py + Metadata: + Training Memory (GB): 10.8 + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_rcnn_hrnetv2p_w40_20e_coco/cascade_rcnn_hrnetv2p_w40_20e_coco_20200512_161112-75e47b04.pth + + - Name: cascade_mask_rcnn_hrnetv2p_w18_20e_coco + In Collection: HRNet + Config: configs/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco.py + Metadata: + Training Memory (GB): 8.5 + inference time (ms/im): + - value: 117.65 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w18_20e_coco/cascade_mask_rcnn_hrnetv2p_w18_20e_coco_20200210-b543cd2b.pth + + - Name: cascade_mask_rcnn_hrnetv2p_w32_20e_coco + In Collection: HRNet + Config: configs/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco.py + Metadata: + inference time (ms/im): + - value: 120.48 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w32_20e_coco/cascade_mask_rcnn_hrnetv2p_w32_20e_coco_20200512_154043-39d9cf7b.pth + + - Name: cascade_mask_rcnn_hrnetv2p_w40_20e_coco + In Collection: HRNet + Config: configs/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco.py + Metadata: + Training Memory (GB): 12.5 + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/cascade_mask_rcnn_hrnetv2p_w40_20e_coco/cascade_mask_rcnn_hrnetv2p_w40_20e_coco_20200527_204922-969c4610.pth + + - Name: htc_hrnetv2p_w18_20e_coco + In Collection: HRNet + Config: configs/hrnet/htc_hrnetv2p_w18_20e_coco.py + Metadata: + Training Memory (GB): 10.8 + inference time (ms/im): + - value: 212.77 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w18_20e_coco/htc_hrnetv2p_w18_20e_coco_20200210-b266988c.pth + + - Name: htc_hrnetv2p_w32_20e_coco + In Collection: HRNet + Config: configs/hrnet/htc_hrnetv2p_w32_20e_coco.py + Metadata: + Training Memory (GB): 13.1 + inference time (ms/im): + - value: 204.08 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w32_20e_coco/htc_hrnetv2p_w32_20e_coco_20200207-7639fa12.pth + + - Name: htc_hrnetv2p_w40_20e_coco + In Collection: HRNet + Config: configs/hrnet/htc_hrnetv2p_w40_20e_coco.py + Metadata: + Training Memory (GB): 14.6 + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/htc_hrnetv2p_w40_20e_coco/htc_hrnetv2p_w40_20e_coco_20200529_183411-417c4d5b.pth + + - Name: fcos_hrnetv2p_w18_gn-head_4x4_1x_coco + In Collection: HRNet + Config: configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco.py + Metadata: + Training Resources: 4x V100 GPUs + Batch Size: 16 + Training Memory (GB): 13.0 + inference time (ms/im): + - value: 77.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 35.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco/fcos_hrnetv2p_w18_gn-head_4x4_1x_coco_20201212_100710-4ad151de.pth + + - Name: fcos_hrnetv2p_w18_gn-head_4x4_2x_coco + In Collection: HRNet + Config: configs/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco.py + Metadata: + Training Resources: 4x V100 GPUs + Batch Size: 16 + Training Memory (GB): 13.0 + inference time (ms/im): + - value: 77.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco/fcos_hrnetv2p_w18_gn-head_4x4_2x_coco_20201212_101110-5c575fa5.pth + + - Name: fcos_hrnetv2p_w32_gn-head_4x4_1x_coco + In Collection: HRNet + Config: configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco.py + Metadata: + Training Resources: 4x V100 GPUs + Batch Size: 16 + Training Memory (GB): 17.5 + inference time (ms/im): + - value: 77.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco/fcos_hrnetv2p_w32_gn-head_4x4_1x_coco_20201211_134730-cb8055c0.pth + + - Name: fcos_hrnetv2p_w32_gn-head_4x4_2x_coco + In Collection: HRNet + Config: configs/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco.py + Metadata: + Training Resources: 4x V100 GPUs + Batch Size: 16 + Training Memory (GB): 17.5 + inference time (ms/im): + - value: 77.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco/fcos_hrnetv2p_w32_gn-head_4x4_2x_coco_20201212_112133-77b6b9bb.pth + + - Name: fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco + In Collection: HRNet + Config: configs/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco.py + Metadata: + Training Resources: 4x V100 GPUs + Batch Size: 16 + Training Memory (GB): 13.0 + inference time (ms/im): + - value: 77.52 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w18_gn-head_mstrain_640-800_4x4_2x_coco_20201212_111651-441e9d9f.pth + + - Name: fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco + In Collection: HRNet + Config: configs/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco.py + Metadata: + Training Resources: 4x V100 GPUs + Batch Size: 16 + Training Memory (GB): 17.5 + inference time (ms/im): + - value: 80.65 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w32_gn-head_mstrain_640-800_4x4_2x_coco_20201212_090846-b6f2b49f.pth + + - Name: fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco + In Collection: HRNet + Config: configs/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco.py + Metadata: + Training Resources: 4x V100 GPUs + Batch Size: 16 + Training Memory (GB): 20.3 + inference time (ms/im): + - value: 92.59 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/hrnet/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco/fcos_hrnetv2p_w40_gn-head_mstrain_640-800_4x4_2x_coco_20201212_124752-f22d2ce5.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/htc/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/htc/README.md new file mode 100644 index 0000000000000000000000000000000000000000..162582723a78ee21cec5ab81dd9343e3dc784e6e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/htc/README.md @@ -0,0 +1,57 @@ +# Hybrid Task Cascade for Instance Segmentation + +## Introduction + + + +We provide config files to reproduce the results in the CVPR 2019 paper for [Hybrid Task Cascade](https://arxiv.org/abs/1901.07518). + +```latex +@inproceedings{chen2019hybrid, + title={Hybrid task cascade for instance segmentation}, + author={Chen, Kai and Pang, Jiangmiao and Wang, Jiaqi and Xiong, Yu and Li, Xiaoxiao and Sun, Shuyang and Feng, Wansen and Liu, Ziwei and Shi, Jianping and Ouyang, Wanli and Chen Change Loy and Dahua Lin}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2019} +} +``` + +## Dataset + +HTC requires COCO and [COCO-stuff](http://calvin.inf.ed.ac.uk/wp-content/uploads/data/cocostuffdataset/stuffthingmaps_trainval2017.zip) dataset for training. You need to download and extract it in the COCO dataset path. +The directory should be like this. + +```none +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── coco +│ │ ├── annotations +│ │ ├── train2017 +│ │ ├── val2017 +│ │ ├── test2017 +| | ├── stuffthingmaps +``` + +## Results and Models + +The results on COCO 2017val are shown in the below table. (results on test-dev are usually slightly higher than val) + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:---------:|:-------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | pytorch | 1x | 8.2 | 5.8 | 42.3 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r50_fpn_1x_coco/htc_r50_fpn_1x_coco_20200317-7332cf16.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r50_fpn_1x_coco/htc_r50_fpn_1x_coco_20200317_070435.log.json) | +| R-50-FPN | pytorch | 20e | 8.2 | - | 43.3 | 38.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_r50_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r50_fpn_20e_coco/htc_r50_fpn_20e_coco_20200319-fe28c577.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r50_fpn_20e_coco/htc_r50_fpn_20e_coco_20200319_070313.log.json) | +| R-101-FPN | pytorch | 20e | 10.2 | 5.5 | 44.8 | 39.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_r101_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r101_fpn_20e_coco/htc_r101_fpn_20e_coco_20200317-9b41b48f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r101_fpn_20e_coco/htc_r101_fpn_20e_coco_20200317_153107.log.json) | +| X-101-32x4d-FPN | pytorch |20e| 11.4 | 5.0 | 46.1 | 40.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_x101_32x4d_fpn_16x1_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_32x4d_fpn_16x1_20e_coco/htc_x101_32x4d_fpn_16x1_20e_coco_20200318-de97ae01.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_32x4d_fpn_16x1_20e_coco/htc_x101_32x4d_fpn_16x1_20e_coco_20200318_034519.log.json) | +| X-101-64x4d-FPN | pytorch |20e| 14.5 | 4.4 | 47.0 | 41.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_x101_64x4d_fpn_16x1_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_64x4d_fpn_16x1_20e_coco/htc_x101_64x4d_fpn_16x1_20e_coco_20200318-b181fd7a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_64x4d_fpn_16x1_20e_coco/htc_x101_64x4d_fpn_16x1_20e_coco_20200318_081711.log.json) | + +- In the HTC paper and COCO 2018 Challenge, `score_thr` is set to 0.001 for both baselines and HTC. +- We use 8 GPUs with 2 images/GPU for R-50 and R-101 models, and 16 GPUs with 1 image/GPU for X-101 models. + If you would like to train X-101 HTC with 8 GPUs, you need to change the lr from 0.02 to 0.01. + +We also provide a powerful HTC with DCN and multi-scale training model. No testing augmentation is used. + +| Backbone | Style | DCN | training scales | Lr schd | box AP | mask AP | Config | Download | +|:----------------:|:-------:|:-----:|:---------------:|:-------:|:------:|:-------:|:------:|:--------:| +| X-101-64x4d-FPN | pytorch | c3-c5 | 400~1400 | 20e | 50.4 | 43.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco_20200312-946fd751.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco_20200312_203410.log.json) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/htc/htc_r101_fpn_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/htc/htc_r101_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b42297bf14723f4068ebddaffdeb84a29d2fee44 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/htc/htc_r101_fpn_20e_coco.py @@ -0,0 +1,9 @@ +_base_ = './htc_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/htc/htc_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/htc/htc_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..929cf464f6091f8380fd1057b282f29f4f7a8b5f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/htc/htc_r50_fpn_1x_coco.py @@ -0,0 +1,56 @@ +_base_ = './htc_without_semantic_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + semantic_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[8]), + semantic_head=dict( + type='FusedSemanticHead', + num_ins=5, + fusion_level=1, + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=183, + ignore_label=255, + loss_weight=0.2))) +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', with_bbox=True, with_mask=True, with_seg=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='SegRescale', scale_factor=1 / 8), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict( + seg_prefix=data_root + 'stuffthingmaps/train2017/', + pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/htc/htc_r50_fpn_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/htc/htc_r50_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7d2e0116e7d3533d3d6e9567f310a0d1d86cdb42 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/htc/htc_r50_fpn_20e_coco.py @@ -0,0 +1,4 @@ +_base_ = './htc_r50_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/htc/htc_without_semantic_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/htc/htc_without_semantic_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..565104f4aa984eb0685548e3bbdf2497cf72b5e9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/htc/htc_without_semantic_r50_fpn_1x_coco.py @@ -0,0 +1,236 @@ +_base_ = [ + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + type='HybridTaskCascade', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + type='RPNHead', + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='HybridTaskCascadeRoIHead', + interleaved=True, + mask_info_flow=True, + num_stages=3, + stage_loss_weights=[1, 0.5, 0.25], + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + mask_head=[ + dict( + type='HTCMaskHead', + with_conv_res=False, + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)), + dict( + type='HTCMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)), + dict( + type='HTCMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)) + ]), + # model training and testing settings + train_cfg=dict( + rpn=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False), + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=[ + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.6, + neg_iou_thr=0.6, + min_pos_iou=0.6, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False), + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.7, + min_pos_iou=0.7, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True), + mask_size=28, + pos_weight=-1, + debug=False) + ]), + test_cfg=dict( + rpn=dict( + nms_pre=1000, + max_per_img=1000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + score_thr=0.001, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100, + mask_thr_binary=0.5))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + val=dict(pipeline=test_pipeline), test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/htc/htc_x101_32x4d_fpn_16x1_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/htc/htc_x101_32x4d_fpn_16x1_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0c834f28357a506cdf520b2c23cfe396b5c68709 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/htc/htc_x101_32x4d_fpn_16x1_20e_coco.py @@ -0,0 +1,19 @@ +_base_ = './htc_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) +data = dict(samples_per_gpu=1, workers_per_gpu=1) +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/htc/htc_x101_64x4d_fpn_16x1_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/htc/htc_x101_64x4d_fpn_16x1_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8b0d962b2e920121f6c31df406e8fb6159cbe9f0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/htc/htc_x101_64x4d_fpn_16x1_20e_coco.py @@ -0,0 +1,19 @@ +_base_ = './htc_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) +data = dict(samples_per_gpu=1, workers_per_gpu=1) +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c8d870334c31fdbbe16a87b15b34d11b5b90fb81 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco.py @@ -0,0 +1,43 @@ +_base_ = './htc_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) +# dataset settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', with_bbox=True, with_mask=True, with_seg=True), + dict( + type='Resize', + img_scale=[(1600, 400), (1600, 1400)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='SegRescale', scale_factor=1 / 8), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg']), +] +data = dict( + samples_per_gpu=1, workers_per_gpu=1, train=dict(pipeline=train_pipeline)) +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/htc/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/htc/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..ad006a5e74c199086ab25fff57e782d1b4778bd1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/htc/metafile.yml @@ -0,0 +1,160 @@ +Collections: + - Name: HTC + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - HTC + - RPN + - ResNet + - ResNeXt + - RoIAlign + Paper: https://arxiv.org/abs/1901.07518 + README: configs/htc/README.md + +Models: + - Name: htc_r50_fpn_1x_coco + In Collection: HTC + Config: configs/htc/htc_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 8.2 + inference time (ms/im): + - value: 172.41 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r50_fpn_1x_coco/htc_r50_fpn_1x_coco_20200317-7332cf16.pth + + - Name: htc_r50_fpn_20e_coco + In Collection: HTC + Config: configs/htc/htc_r50_fpn_20e_coco.py + Metadata: + Training Memory (GB): 8.2 + inference time (ms/im): + - value: 172.41 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r50_fpn_20e_coco/htc_r50_fpn_20e_coco_20200319-fe28c577.pth + + - Name: htc_r101_fpn_20e_coco + In Collection: HTC + Config: configs/htc/htc_r101_fpn_20e_coco.py + Metadata: + Training Memory (GB): 10.2 + inference time (ms/im): + - value: 181.82 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/htc/htc_r101_fpn_20e_coco/htc_r101_fpn_20e_coco_20200317-9b41b48f.pth + + - Name: htc_x101_32x4d_fpn_16x1_20e_coco + In Collection: HTC + Config: configs/htc/htc_x101_32x4d_fpn_16x1_20e_coco.py + Metadata: + Training Resources: 16x V100 GPUs + Batch Size: 16 + Training Memory (GB): 11.4 + inference time (ms/im): + - value: 200 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_32x4d_fpn_16x1_20e_coco/htc_x101_32x4d_fpn_16x1_20e_coco_20200318-de97ae01.pth + + - Name: htc_x101_64x4d_fpn_16x1_20e_coco + In Collection: HTC + Config: configs/htc/htc_x101_64x4d_fpn_16x1_20e_coco.py + Metadata: + Training Resources: 16x V100 GPUs + Batch Size: 16 + Training Memory (GB): 14.5 + inference time (ms/im): + - value: 227.27 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_64x4d_fpn_16x1_20e_coco/htc_x101_64x4d_fpn_16x1_20e_coco_20200318-b181fd7a.pth + + - Name: htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco + In Collection: HTC + Config: configs/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco.py + Metadata: + Training Resources: 16x V100 GPUs + Batch Size: 16 + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 50.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 43.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/htc/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco/htc_x101_64x4d_fpn_dconv_c3-c5_mstrain_400_1400_16x1_20e_coco_20200312-946fd751.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/README.md new file mode 100644 index 0000000000000000000000000000000000000000..02e9676c1765aebaf989e99acb889d0479d61e2b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/README.md @@ -0,0 +1,44 @@ +# InstaBoost for MMDetection + + + +Configs in this directory is the implementation for ICCV2019 paper "InstaBoost: Boosting Instance Segmentation Via Probability Map Guided Copy-Pasting" and provided by the authors of the paper. InstaBoost is a data augmentation method for object detection and instance segmentation. The paper has been released on [`arXiv`](https://arxiv.org/abs/1908.07801). + +```latex +@inproceedings{fang2019instaboost, + title={Instaboost: Boosting instance segmentation via probability map guided copy-pasting}, + author={Fang, Hao-Shu and Sun, Jianhua and Wang, Runzhong and Gou, Minghao and Li, Yong-Lu and Lu, Cewu}, + booktitle={Proceedings of the IEEE International Conference on Computer Vision}, + pages={682--691}, + year={2019} +} +``` + +## Usage + +### Requirements + +You need to install `instaboostfast` before using it. + +```shell +pip install instaboostfast +``` + +The code and more details can be found [here](https://github.com/GothicAi/Instaboost). + +### Integration with MMDetection + +InstaBoost have been already integrated in the data pipeline, thus all you need is to add or change **InstaBoost** configurations after **LoadImageFromFile**. We have provided examples like [this](mask_rcnn_r50_fpn_instaboost_4x#L121). You can refer to [`InstaBoostConfig`](https://github.com/GothicAi/InstaBoost-pypi#instaboostconfig) for more details. + +## Results and Models + +- All models were trained on `coco_2017_train` and tested on `coco_2017_val` for conveinience of evaluation and comparison. In the paper, the results are obtained from `test-dev`. +- To balance accuracy and training time when using InstaBoost, models released in this page are all trained for 48 Epochs. Other training and testing configs strictly follow the original framework. +- For results and models in MMDetection V1.x, please refer to [Instaboost](https://github.com/GothicAi/Instaboost). + +| Network | Backbone | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :--------: | :-----: | :------: | :------------: | :------:| :-----: | :------: | :-----------------: | +| Mask R-CNN | R-50-FPN | 4x | 4.4 | 17.5 | 40.6 | 36.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco/mask_rcnn_r50_fpn_instaboost_4x_coco_20200307-d025f83a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco/mask_rcnn_r50_fpn_instaboost_4x_coco_20200307_223635.log.json) | +| Mask R-CNN | R-101-FPN | 4x | 6.4 | | 42.5 | 38.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco/mask_rcnn_r101_fpn_instaboost_4x_coco_20200703_235738-f23f3a5f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco/mask_rcnn_r101_fpn_instaboost_4x_coco_20200703_235738.log.json) | +| Mask R-CNN | X-101-64x4d-FPN | 4x | 10.7 | | 44.7 | 39.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco_20200515_080947-8ed58c1b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco_20200515_080947.log.json) | +| Cascade R-CNN | R-101-FPN | 4x | 6.0 | 12.0 | 43.7 | 38.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco_20200307-c19d98d9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco_20200307_223646.log.json) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/cascade_mask_rcnn_r101_fpn_instaboost_4x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/cascade_mask_rcnn_r101_fpn_instaboost_4x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9d0515d73d4276883f495d8b30b793afd9fa2dc5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/cascade_mask_rcnn_r101_fpn_instaboost_4x_coco.py @@ -0,0 +1,7 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py' + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a89a81f5c76586d6d1b15abf74f3740e9f439762 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py @@ -0,0 +1,28 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='InstaBoost', + action_candidate=('normal', 'horizontal', 'skip'), + action_prob=(1, 0, 0), + scale=(0.8, 1.2), + dx=15, + dy=15, + theta=(-1, 1), + color_prob=0.5, + hflag=False, + aug_ratio=0.5), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(pipeline=train_pipeline)) +# learning policy +lr_config = dict(step=[32, 44]) +runner = dict(type='EpochBasedRunner', max_epochs=48) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/cascade_mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/cascade_mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d67b7992ab1fa5d8190ff1a0d0c52a0e832c205d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/cascade_mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py @@ -0,0 +1,14 @@ +_base_ = './cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ebbb43e918753e464a8e1e7f9ff1fed702c1b64d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_instaboost_4x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..55ca62b7bc6c9cdc97018bcfbe5b109038470dd3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco.py @@ -0,0 +1,28 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='InstaBoost', + action_candidate=('normal', 'horizontal', 'skip'), + action_prob=(1, 0, 0), + scale=(0.8, 1.2), + dx=15, + dy=15, + theta=(-1, 1), + color_prob=0.5, + hflag=False, + aug_ratio=0.5), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(pipeline=train_pipeline)) +# learning policy +lr_config = dict(step=[32, 44]) +runner = dict(type='EpochBasedRunner', max_epochs=48) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2010f44819f625f7da5196270f3721274a390881 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_r50_fpn_instaboost_4x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..f85fdaaf18f5b09e76795c436faba675989446c5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/instaboost/metafile.yml @@ -0,0 +1,94 @@ +Collections: + - Name: InstaBoost + Metadata: + Training Data: COCO + Training Techniques: + - InstaBoost + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Paper: https://arxiv.org/abs/1908.07801 + README: configs/instaboost/README.md + +Models: + - Name: mask_rcnn_r50_fpn_instaboost_4x_coco + In Collection: InstaBoost + Config: configs/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco.py + Metadata: + Training Memory (GB): 4.4 + inference time (ms/im): + - value: 57.14 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 48 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_r50_fpn_instaboost_4x_coco/mask_rcnn_r50_fpn_instaboost_4x_coco_20200307-d025f83a.pth + + - Name: mask_rcnn_r101_fpn_instaboost_4x_coco + In Collection: InstaBoost + Config: configs/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco.py + Metadata: + Training Memory (GB): 6.4 + Epochs: 48 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_r101_fpn_instaboost_4x_coco/mask_rcnn_r101_fpn_instaboost_4x_coco_20200703_235738-f23f3a5f.pth + + - Name: mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco + In Collection: InstaBoost + Config: configs/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco.py + Metadata: + Training Memory (GB): 10.7 + Epochs: 48 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/instaboost/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco/mask_rcnn_x101_64x4d_fpn_instaboost_4x_coco_20200515_080947-8ed58c1b.pth + + - Name: cascade_mask_rcnn_r50_fpn_instaboost_4x_coco + In Collection: InstaBoost + Config: configs/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco.py + Metadata: + Training Memory (GB): 6.0 + inference time (ms/im): + - value: 83.33 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 48 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/instaboost/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco/cascade_mask_rcnn_r50_fpn_instaboost_4x_coco_20200307-c19d98d9.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ld/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/ld/README.md new file mode 100644 index 0000000000000000000000000000000000000000..0177f1e676bd84ecc1f78f28259b31d8b7c00c2d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ld/README.md @@ -0,0 +1,31 @@ +# Localization Distillation for Object Detection + +## Introduction + + + +```latex +@Article{zheng2021LD, + title={Localization Distillation for Object Detection}, + author= {Zhaohui Zheng, Rongguang Ye, Ping Wang, Jun Wang, Dongwei Ren, Wangmeng Zuo}, + journal={arXiv:2102.12252}, + year={2021} +} +``` + +### GFocalV1 with LD + +| Teacher | Student | Training schedule | Mini-batch size | AP (val) | AP50 (val) | AP75 (val) | Config | +| :-------: | :-----: | :---------------: | :-------------: | :------: | :--------: | :--------: | :--------------: | +| -- | R-18 | 1x | 6 | 35.8 | 53.1 | 38.2 | | +| R-101 | R-18 | 1x | 6 | 36.5 | 52.9 | 39.3 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/ld/ld_r18_gflv1_r101_fpn_coco_1x.py) | +| -- | R-34 | 1x | 6 | 38.9 | 56.6 | 42.2 | | +| R-101 | R-34 | 1x | 6 | 39.8 | 56.6 | 43.1 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/ld/ld_r34_gflv1_r101_fpn_coco_1x.py) | +| -- | R-50 | 1x | 6 | 40.1 | 58.2 | 43.1 | | +| R-101 | R-50 | 1x | 6 | 41.1 | 58.7 | 44.9 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/ld/ld_r50_gflv1_r101_fpn_coco_1x.py) | +| -- | R-101 | 2x | 6 | 44.6 | 62.9 | 48.4 | | +| R-101-DCN | R-101 | 2x | 6 | 45.4 | 63.1 | 49.5 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/ld/ld_r101_gflv1_r101dcn_fpn_coco_1x.py) | + +## Note + +- Meaning of Config name: ld_r18(student model)_gflv1(based on gflv1)_r101(teacher model)_fpn(neck)_coco(dataset)_1x(12 epoch).py diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ld/ld_r101_gflv1_r101dcn_fpn_coco_2x.py b/detection_cbnet/docker-build-context/cbnetv2/configs/ld/ld_r101_gflv1_r101dcn_fpn_coco_2x.py new file mode 100644 index 0000000000000000000000000000000000000000..1cbdb4cf5a5d5afa60327d80b31475500d5f3c6c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ld/ld_r101_gflv1_r101dcn_fpn_coco_2x.py @@ -0,0 +1,44 @@ +_base_ = ['./ld_r18_gflv1_r101_fpn_coco_1x.py'] +teacher_ckpt = 'https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco_20200630_102002-134b07df.pth' # noqa +model = dict( + teacher_config='configs/gfl/gfl_r101_fpn_dconv_c3-c5_mstrain_2x_coco.py', + teacher_ckpt=teacher_ckpt, + backbone=dict( + type='ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5)) + +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) +# multi-scale training +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 480), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ld/ld_r18_gflv1_r101_fpn_coco_1x.py b/detection_cbnet/docker-build-context/cbnetv2/configs/ld/ld_r18_gflv1_r101_fpn_coco_1x.py new file mode 100644 index 0000000000000000000000000000000000000000..18dce814be9036e6af70389fc60a5b4e42bc8efe --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ld/ld_r18_gflv1_r101_fpn_coco_1x.py @@ -0,0 +1,62 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +teacher_ckpt = 'https://download.openmmlab.com/mmdetection/v2.0/gfl/gfl_r101_fpn_mstrain_2x_coco/gfl_r101_fpn_mstrain_2x_coco_20200629_200126-dd12f847.pth' # noqa +model = dict( + type='KnowledgeDistillationSingleStageDetector', + teacher_config='configs/gfl/gfl_r101_fpn_mstrain_2x_coco.py', + teacher_ckpt=teacher_ckpt, + backbone=dict( + type='ResNet', + depth=18, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet18')), + neck=dict( + type='FPN', + in_channels=[64, 128, 256, 512], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + bbox_head=dict( + type='LDHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + loss_cls=dict( + type='QualityFocalLoss', + use_sigmoid=True, + beta=2.0, + loss_weight=1.0), + loss_dfl=dict(type='DistributionFocalLoss', loss_weight=0.25), + loss_ld=dict( + type='KnowledgeDistillationKLDivLoss', loss_weight=0.25, T=10), + reg_max=16, + loss_bbox=dict(type='GIoULoss', loss_weight=2.0)), + # training and testing settings + train_cfg=dict( + assigner=dict(type='ATSSAssigner', topk=9), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) + +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ld/ld_r34_gflv1_r101_fpn_coco_1x.py b/detection_cbnet/docker-build-context/cbnetv2/configs/ld/ld_r34_gflv1_r101_fpn_coco_1x.py new file mode 100644 index 0000000000000000000000000000000000000000..3b6996d49b06ffcd0803e86cb33f8a35b02911dc --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ld/ld_r34_gflv1_r101_fpn_coco_1x.py @@ -0,0 +1,19 @@ +_base_ = ['./ld_r18_gflv1_r101_fpn_coco_1x.py'] +model = dict( + backbone=dict( + type='ResNet', + depth=34, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet34')), + neck=dict( + type='FPN', + in_channels=[64, 128, 256, 512], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ld/ld_r50_gflv1_r101_fpn_coco_1x.py b/detection_cbnet/docker-build-context/cbnetv2/configs/ld/ld_r50_gflv1_r101_fpn_coco_1x.py new file mode 100644 index 0000000000000000000000000000000000000000..2b18785ae41f6fd11a933ca046a34b967306f9b6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ld/ld_r50_gflv1_r101_fpn_coco_1x.py @@ -0,0 +1,19 @@ +_base_ = ['./ld_r18_gflv1_r101_fpn_coco_1x.py'] +model = dict( + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ld/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/ld/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..dc05e5a7a522f9aa3cb837b2c6160d4ba3cead0e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ld/metafile.yml @@ -0,0 +1,67 @@ +Collections: + - Name: Localization Distillation + Metadata: + Training Data: COCO + Training Techniques: + - Localization Distillation + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - ResNet + Paper: https://arxiv.org/abs/2102.12252 + README: configs/ld/README.md + +Models: + - Name: ld_r18_gflv1_r101_fpn_coco_1x + In Collection: Localization Distillation + Config: configs/ld/ld_r18_gflv1_r101_fpn_coco_1x.py + Metadata: + Teacher: R-101 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.5 + box AP@0.5: 52.9 + box AP@0.75: 39.3 + + - Name: ld_r34_gflv1_r101_fpn_coco_1x + In Collection: Localization Distillation + Config: configs/ld/ld_r34_gflv1_r101_fpn_coco_1x.py + Metadata: + Teacher: R-101 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.8 + box AP@0.5: 56.6 + box AP@0.75: 43.1 + + - Name: ld_r50_gflv1_r101_fpn_coco_1x + In Collection: Localization Distillation + Config: configs/ld/ld_r50_gflv1_r101_fpn_coco_1x.py + Metadata: + Teacher: R-101 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.1 + box AP@0.5: 58.7 + box AP@0.75: 44.9 + + - Name: ld_r101_gflv1_r101dcn_fpn_coco_1x + In Collection: Localization Distillation + Config: configs/ld/ld_r101_gflv1_r101dcn_fpn_coco_1x.py + Metadata: + Teacher: R-101-DCN + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.4 + box AP@0.5: 63.1 + box AP@0.75: 49.5 diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/legacy_1.x/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/legacy_1.x/README.md new file mode 100644 index 0000000000000000000000000000000000000000..38a2a0e4721f0ada69acd7e927ef5adf197e68b0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/legacy_1.x/README.md @@ -0,0 +1,53 @@ +# Legacy Configs in MMDetection V1.x + + + +Configs in this directory implement the legacy configs used by MMDetection V1.x and its model zoos. + +To help users convert their models from V1.x to MMDetection V2.0, we provide v1.x configs to inference the converted v1.x models. +Due to the BC-breaking changes in MMDetection V2.0 from MMDetection V1.x, running inference with the same model weights in these two version will produce different results. The difference will cause within 1% AP absolute difference as can be found in the following table. + +## Usage + +To upgrade the model version, the users need to do the following steps. + +### 1. Convert model weights + +There are three main difference in the model weights between V1.x and V2.0 codebases. + +1. Since the class order in all the detector's classification branch is reordered, all the legacy model weights need to go through the conversion process. +2. The regression and segmentation head no longer contain the background channel. Weights in these background channels should be removed to fix in the current codebase. +3. For two-stage detectors, their wegihts need to be upgraded since MMDetection V2.0 refactors all the two-stage detectors with `RoIHead`. + +The users can do the same modification as mentioned above for the self-implemented +detectors. We provide a scripts `tools/model_converters/upgrade_model_version.py` to convert the model weights in the V1.x model zoo. + +```bash +python tools/model_converters/upgrade_model_version.py ${OLD_MODEL_PATH} ${NEW_MODEL_PATH} --num-classes ${NUM_CLASSES} + +``` + +- OLD_MODEL_PATH: the path to load the model weights in 1.x version. +- NEW_MODEL_PATH: the path to save the converted model weights in 2.0 version. +- NUM_CLASSES: number of classes of the original model weights. Usually it is 81 for COCO dataset, 21 for VOC dataset. + The number of classes in V2.0 models should be equal to that in V1.x models - 1. + +### 2. Use configs with legacy settings + +After converting the model weights, checkout to the v1.2 release to find the corresponding config file that uses the legacy settings. +The V1.x models usually need these three legacy modules: `LegacyAnchorGenerator`, `LegacyDeltaXYWHBBoxCoder`, and `RoIAlign(align=False)`. +For models using ResNet Caffe backbones, they also need to change the pretrain name and the corresponding `img_norm_cfg`. +An example is in [`retinanet_r50_caffe_fpn_1x_coco_v1.py`](retinanet_r50_caffe_fpn_1x_coco_v1.py) +Then use the config to test the model weights. For most models, the obtained results should be close to that in V1.x. +We provide configs of some common structures in this directory. + +## Performance + +The performance change after converting the models in this directory are listed as the following. +| Method | Style | Lr schd | V1.x box AP | V1.x mask AP | V2.0 box AP | V2.0 mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------:| :-----: |:------:| :-----: | :-------: |:------------------------------------------------------------------------------------------------------------------------------: | +| Mask R-CNN R-50-FPN | pytorch | 1x | 37.3 | 34.2 | 36.8 | 33.9 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/legacy_1.x/mask_rcnn_r50_fpn_1x_coco_v1.py) | [model](https://s3.ap-northeast-2.amazonaws.com/open-mmlab/mmdetection/models/mask_rcnn_r50_fpn_1x_20181010-069fa190.pth)| +| RetinaNet R-50-FPN | caffe | 1x | 35.8 | - | 35.4 | - | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/legacy_1.x/retinanet_r50_caffe_1x_coco_v1.py) | +| RetinaNet R-50-FPN | pytorch | 1x | 35.6 |-|35.2| -| [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/legacy_1.x/retinanet_r50_fpn_1x_coco_v1.py) | [model](https://s3.ap-northeast-2.amazonaws.com/open-mmlab/mmdetection/models/retinanet_r50_fpn_1x_20181125-7b0c2548.pth) | +| Cascade Mask R-CNN R-50-FPN | pytorch | 1x | 41.2 | 35.7 |40.8| 35.6| [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/legacy_1.x/cascade_mask_rcnn_r50_fpn_1x_coco_v1.py) | [model](https://s3.ap-northeast-2.amazonaws.com/open-mmlab/mmdetection/models/cascade_mask_rcnn_r50_fpn_1x_20181123-88b170c9.pth) | +| SSD300-VGG16 | caffe | 120e | 25.7 |-|25.4|-| [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/legacy_1.x/ssd300_coco_v1.py) | [model](https://s3.ap-northeast-2.amazonaws.com/open-mmlab/mmdetection/models/ssd300_coco_vgg16_caffe_120e_20181221-84d7110b.pth) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/legacy_1.x/cascade_mask_rcnn_r50_fpn_1x_coco_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/legacy_1.x/cascade_mask_rcnn_r50_fpn_1x_coco_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..fc9d0048188406348416fe5012af9985f62bbb56 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/legacy_1.x/cascade_mask_rcnn_r50_fpn_1x_coco_v1.py @@ -0,0 +1,79 @@ +_base_ = [ + '../_base_/models/cascade_mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='CascadeRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + rpn_head=dict( + anchor_generator=dict(type='LegacyAnchorGenerator', center_offset=0.5), + bbox_coder=dict( + type='LegacyDeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0])), + roi_head=dict( + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + type='RoIAlign', + output_size=7, + sampling_ratio=2, + aligned=False)), + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + reg_class_agnostic=True, + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='LegacyDeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2])), + dict( + type='Shared2FCBBoxHead', + reg_class_agnostic=True, + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='LegacyDeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1])), + dict( + type='Shared2FCBBoxHead', + reg_class_agnostic=True, + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='LegacyDeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067])), + ], + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + type='RoIAlign', + output_size=14, + sampling_ratio=2, + aligned=False)))) +dist_params = dict(backend='nccl', port=29515) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/legacy_1.x/faster_rcnn_r50_fpn_1x_coco_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/legacy_1.x/faster_rcnn_r50_fpn_1x_coco_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..8c573bef34628babaee43183b260cd06e22b7c46 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/legacy_1.x/faster_rcnn_r50_fpn_1x_coco_v1.py @@ -0,0 +1,38 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + type='FasterRCNN', + backbone=dict( + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + rpn_head=dict( + type='RPNHead', + anchor_generator=dict( + type='LegacyAnchorGenerator', + center_offset=0.5, + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict(type='LegacyDeltaXYWHBBoxCoder'), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + type='StandardRoIHead', + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + type='RoIAlign', + output_size=7, + sampling_ratio=2, + aligned=False), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=dict( + bbox_coder=dict(type='LegacyDeltaXYWHBBoxCoder'), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn_proposal=dict(max_per_img=2000), + rcnn=dict(assigner=dict(match_low_quality=True)))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/legacy_1.x/mask_rcnn_r50_fpn_1x_coco_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/legacy_1.x/mask_rcnn_r50_fpn_1x_coco_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..04581bbc901d0fda0ec8c6b4a8078ae04f21473a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/legacy_1.x/mask_rcnn_r50_fpn_1x_coco_v1.py @@ -0,0 +1,34 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + rpn_head=dict( + anchor_generator=dict(type='LegacyAnchorGenerator', center_offset=0.5), + bbox_coder=dict(type='LegacyDeltaXYWHBBoxCoder'), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + type='RoIAlign', + output_size=7, + sampling_ratio=2, + aligned=False)), + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + type='RoIAlign', + output_size=14, + sampling_ratio=2, + aligned=False)), + bbox_head=dict( + bbox_coder=dict(type='LegacyDeltaXYWHBBoxCoder'), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + + # model training and testing settings + train_cfg=dict( + rpn_proposal=dict(max_per_img=2000), + rcnn=dict(assigner=dict(match_low_quality=True)))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/legacy_1.x/retinanet_r50_caffe_fpn_1x_coco_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/legacy_1.x/retinanet_r50_caffe_fpn_1x_coco_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..a63d248c435c8b7035f00299a6f97f1fc18e3be5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/legacy_1.x/retinanet_r50_caffe_fpn_1x_coco_v1.py @@ -0,0 +1,41 @@ +_base_ = './retinanet_r50_fpn_1x_coco_v1.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[102.9801, 115.9465, 122.7717], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/legacy_1.x/retinanet_r50_fpn_1x_coco_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/legacy_1.x/retinanet_r50_fpn_1x_coco_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..6198b9717957374ce734ca74de5f54dda44123b9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/legacy_1.x/retinanet_r50_fpn_1x_coco_v1.py @@ -0,0 +1,17 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + bbox_head=dict( + type='RetinaHead', + anchor_generator=dict( + type='LegacyAnchorGenerator', + center_offset=0.5, + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict(type='LegacyDeltaXYWHBBoxCoder'), + loss_bbox=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.0))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/legacy_1.x/ssd300_coco_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/legacy_1.x/ssd300_coco_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..b194e7651ede006c5101bff1056749edf4d249cd --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/legacy_1.x/ssd300_coco_v1.py @@ -0,0 +1,79 @@ +_base_ = [ + '../_base_/models/ssd300.py', '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] +# model settings +input_size = 300 +model = dict( + bbox_head=dict( + type='SSDHead', + anchor_generator=dict( + type='LegacySSDAnchorGenerator', + scale_major=False, + input_size=input_size, + basesize_ratio_range=(0.15, 0.9), + strides=[8, 16, 32, 64, 100, 300], + ratios=[[2], [2, 3], [2, 3], [2, 3], [2], [2]]), + bbox_coder=dict( + type='LegacyDeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]))) +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(300, 300), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(300, 300), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=3, + train=dict( + _delete_=True, + type='RepeatDataset', + times=5, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=2e-3, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict(_delete_=True) +dist_params = dict(backend='nccl', port=29555) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/libra_rcnn/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/libra_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..ef2f6fe257305d4a236c16fb5a73d56d77e7d512 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/libra_rcnn/README.md @@ -0,0 +1,28 @@ +# Libra R-CNN: Towards Balanced Learning for Object Detection + +## Introduction + + + +We provide config files to reproduce the results in the CVPR 2019 paper [Libra R-CNN](https://arxiv.org/pdf/1904.02701.pdf). + +``` +@inproceedings{pang2019libra, + title={Libra R-CNN: Towards Balanced Learning for Object Detection}, + author={Pang, Jiangmiao and Chen, Kai and Shi, Jianping and Feng, Huajun and Ouyang, Wanli and Dahua Lin}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2019} +} +``` + +## Results and models + +The results on COCO 2017val are shown in the below table. (results on test-dev are usually slightly higher than val) + +| Architecture | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:------------:|:---------------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| Faster R-CNN | R-50-FPN | pytorch | 1x | 4.6 | 19.0 | 38.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco/libra_faster_rcnn_r50_fpn_1x_coco_20200130-3afee3a9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco/libra_faster_rcnn_r50_fpn_1x_coco_20200130_204655.log.json) | +| Fast R-CNN | R-50-FPN | pytorch | 1x | | | | | +| Faster R-CNN | R-101-FPN | pytorch | 1x | 6.5 | 14.4 | 40.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco/libra_faster_rcnn_r101_fpn_1x_coco_20200203-8dba6a5a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco/libra_faster_rcnn_r101_fpn_1x_coco_20200203_001405.log.json) | +| Faster R-CNN | X-101-64x4d-FPN | pytorch | 1x | 10.8 | 8.5 | 42.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco/libra_faster_rcnn_x101_64x4d_fpn_1x_coco_20200315-3a7d0488.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco/libra_faster_rcnn_x101_64x4d_fpn_1x_coco_20200315_231625.log.json) | +| RetinaNet | R-50-FPN | pytorch | 1x | 4.2 | 17.7 | 37.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/libra_rcnn/libra_retinanet_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_retinanet_r50_fpn_1x_coco/libra_retinanet_r50_fpn_1x_coco_20200205-804d94ce.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_retinanet_r50_fpn_1x_coco/libra_retinanet_r50_fpn_1x_coco_20200205_112757.log.json) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/libra_rcnn/libra_fast_rcnn_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/libra_rcnn/libra_fast_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..efbedc863c7eeeaef331121416141334906fef3d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/libra_rcnn/libra_fast_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,50 @@ +_base_ = '../fast_rcnn/fast_rcnn_r50_fpn_1x_coco.py' +# model settings +model = dict( + neck=[ + dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + dict( + type='BFP', + in_channels=256, + num_levels=5, + refine_level=2, + refine_type='non_local') + ], + roi_head=dict( + bbox_head=dict( + loss_bbox=dict( + _delete_=True, + type='BalancedL1Loss', + alpha=0.5, + gamma=1.5, + beta=1.0, + loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rcnn=dict( + sampler=dict( + _delete_=True, + type='CombinedSampler', + num=512, + pos_fraction=0.25, + add_gt_as_proposals=True, + pos_sampler=dict(type='InstanceBalancedPosSampler'), + neg_sampler=dict( + type='IoUBalancedNegSampler', + floor_thr=-1, + floor_fraction=0, + num_bins=3))))) +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +data = dict( + train=dict(proposal_file=data_root + + 'libra_proposals/rpn_r50_fpn_1x_train2017.pkl'), + val=dict(proposal_file=data_root + + 'libra_proposals/rpn_r50_fpn_1x_val2017.pkl'), + test=dict(proposal_file=data_root + + 'libra_proposals/rpn_r50_fpn_1x_val2017.pkl')) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e899706b8ca7780a95b41de14b85b05b427f9595 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './libra_faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..89a0d7b2bd83216dfc4db120fe9f610b23376681 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,41 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +# model settings +model = dict( + neck=[ + dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + dict( + type='BFP', + in_channels=256, + num_levels=5, + refine_level=2, + refine_type='non_local') + ], + roi_head=dict( + bbox_head=dict( + loss_bbox=dict( + _delete_=True, + type='BalancedL1Loss', + alpha=0.5, + gamma=1.5, + beta=1.0, + loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rpn=dict(sampler=dict(neg_pos_ub=5), allowed_border=-1), + rcnn=dict( + sampler=dict( + _delete_=True, + type='CombinedSampler', + num=512, + pos_fraction=0.25, + add_gt_as_proposals=True, + pos_sampler=dict(type='InstanceBalancedPosSampler'), + neg_sampler=dict( + type='IoUBalancedNegSampler', + floor_thr=-1, + floor_fraction=0, + num_bins=3))))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..06740a778f821d74b5206a9cada969bfee0a84cf --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './libra_faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/libra_rcnn/libra_retinanet_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/libra_rcnn/libra_retinanet_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..be2742098fb8f1e46bbb16c9d3e2e20c2e3083aa --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/libra_rcnn/libra_retinanet_r50_fpn_1x_coco.py @@ -0,0 +1,26 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' +# model settings +model = dict( + neck=[ + dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_input', + num_outs=5), + dict( + type='BFP', + in_channels=256, + num_levels=5, + refine_level=1, + refine_type='non_local') + ], + bbox_head=dict( + loss_bbox=dict( + _delete_=True, + type='BalancedL1Loss', + alpha=0.5, + gamma=1.5, + beta=0.11, + loss_weight=1.0))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/libra_rcnn/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/libra_rcnn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..574cac6076eb809e33d23dde57bd840c4f7f6877 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/libra_rcnn/metafile.yml @@ -0,0 +1,94 @@ +Collections: + - Name: Libra R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - IoU-Balanced Sampling + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Balanced Feature Pyramid + Paper: https://arxiv.org/abs/1904.02701 + README: configs/libra_rcnn/README.md + +Models: + - Name: libra_faster_rcnn_r50_fpn_1x_coco + In Collection: Libra R-CNN + Config: configs/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.6 + inference time (ms/im): + - value: 52.63 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_r50_fpn_1x_coco/libra_faster_rcnn_r50_fpn_1x_coco_20200130-3afee3a9.pth + + - Name: libra_faster_rcnn_r101_fpn_1x_coco + In Collection: Libra R-CNN + Config: configs/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.5 + inference time (ms/im): + - value: 69.44 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_r101_fpn_1x_coco/libra_faster_rcnn_r101_fpn_1x_coco_20200203-8dba6a5a.pth + + - Name: libra_faster_rcnn_x101_64x4d_fpn_1x_coco + In Collection: Libra R-CNN + Config: configs/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 10.8 + inference time (ms/im): + - value: 117.65 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_faster_rcnn_x101_64x4d_fpn_1x_coco/libra_faster_rcnn_x101_64x4d_fpn_1x_coco_20200315-3a7d0488.pth + + - Name: libra_retinanet_r50_fpn_1x_coco + In Collection: Libra R-CNN + Config: configs/libra_rcnn/libra_retinanet_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.2 + inference time (ms/im): + - value: 56.5 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/libra_rcnn/libra_retinanet_r50_fpn_1x_coco/libra_retinanet_r50_fpn_1x_coco_20200205-804d94ce.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/README.md new file mode 100644 index 0000000000000000000000000000000000000000..157e8724267e7ab4877b8b7620bc5b8449b0533e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/README.md @@ -0,0 +1,44 @@ +# LVIS dataset + +## Introduction + + + +```latex +@inproceedings{gupta2019lvis, + title={{LVIS}: A Dataset for Large Vocabulary Instance Segmentation}, + author={Gupta, Agrim and Dollar, Piotr and Girshick, Ross}, + booktitle={Proceedings of the {IEEE} Conference on Computer Vision and Pattern Recognition}, + year={2019} +} +``` + +## Common Setting + +* Please follow [install guide](../../docs/install.md#install-mmdetection) to install open-mmlab forked cocoapi first. +* Run following scripts to install our forked lvis-api. + + ```shell + pip install git+https://github.com/lvis-dataset/lvis-api.git + ``` + +* All experiments use oversample strategy [here](../../docs/tutorials/new_dataset.md#class-balanced-dataset) with oversample threshold `1e-3`. +* The size of LVIS v0.5 is half of COCO, so schedule `2x` in LVIS is roughly the same iterations as `1x` in COCO. + +## Results and models of LVIS v0.5 + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: |:--------: | +| R-50-FPN | pytorch | 2x | - | - | 26.1 | 25.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis-dbd06831.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_20200531_160435.log.json) | +| R-101-FPN | pytorch | 2x | - | - | 27.1 | 27.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis-54582ee2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis_20200601_134748.log.json) | +| X-101-32x4d-FPN | pytorch | 2x | - | - | 26.7 | 26.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis-3cf55ea2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis_20200531_221749.log.json) | +| X-101-64x4d-FPN | pytorch | 2x | - | - | 26.4 | 26.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis-1c99a5ad.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis_20200601_194651.log.json) | + +## Results and models of LVIS v1 + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | pytorch | 1x | 9.1 | - | 22.5 | 21.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1-aa78ac3d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1-20200829_061305.log.json) | +| R-101-FPN | pytorch | 1x | 10.8 | - | 24.6 | 23.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1-ec55ce32.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1-20200829_070959.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 11.8 | - | 26.7 | 25.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1-ebbc5c81.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1-20200829_071317.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 14.6 | - | 27.2 | 25.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1-43d9edfe.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1-20200830_060206.log.json) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..0f017f585c78d9d8e1eebaeca0a9a6c518a6295a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_1x_lvis_v1.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py b/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py new file mode 100644 index 0000000000000000000000000000000000000000..637f4a63a55d24133a994eacc1e7a6521bfa3b9f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_r101_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..92ddb526d7ea7a011e10aa82cbd1bd62773b35d6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py @@ -0,0 +1,31 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/lvis_v1_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + roi_head=dict( + bbox_head=dict(num_classes=1203), mask_head=dict(num_classes=1203)), + test_cfg=dict( + rcnn=dict( + score_thr=0.0001, + # LVIS allows up to 300 + max_per_img=300))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(dataset=dict(pipeline=train_pipeline))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py b/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py new file mode 100644 index 0000000000000000000000000000000000000000..d53c5dc6a1470e4cca209a26c8261dd66c60e9b1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py @@ -0,0 +1,31 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/lvis_v0.5_instance.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] +model = dict( + roi_head=dict( + bbox_head=dict(num_classes=1230), mask_head=dict(num_classes=1230)), + test_cfg=dict( + rcnn=dict( + score_thr=0.0001, + # LVIS allows up to 300 + max_per_img=300))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(dataset=dict(pipeline=train_pipeline))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..a6115c1ad03317e32915102212cf878101fa671d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py b/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py new file mode 100644 index 0000000000000000000000000000000000000000..96b625230f37906e32ad872b6e947285432f60d6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_x101_32x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..0f95a7321d9a7b7f9cb98adf31d6238156c21de6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_1x_lvis_v1.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_1x_lvis_v1.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py b/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py new file mode 100644 index 0000000000000000000000000000000000000000..986acda589899e49c7d22df6455200e22bc5a940 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/lvis/mask_rcnn_x101_64x4d_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_mstrain_2x_lvis_v0.5.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..77cfabea9844598a60d364324358aeb11310afce --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/README.md @@ -0,0 +1,48 @@ +# Mask R-CNN + +## Introduction + + + +```latex +@article{He_2017, + title={Mask R-CNN}, + journal={2017 IEEE International Conference on Computer Vision (ICCV)}, + publisher={IEEE}, + author={He, Kaiming and Gkioxari, Georgia and Dollar, Piotr and Girshick, Ross}, + year={2017}, + month={Oct} +} +``` + +## Results and models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | caffe | 1x | 4.3 | | 38.0 | 34.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco/mask_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.38__segm_mAP-0.344_20200504_231812-0ebd1859.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco/mask_rcnn_r50_caffe_fpn_1x_coco_20200504_231812.log.json) | +| R-50-FPN | pytorch | 1x | 4.4 | 16.1 | 38.2 | 34.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205_050542.log.json) | +| R-50-FPN | pytorch | 2x | - | - | 39.2 | 35.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_2x_coco/mask_rcnn_r50_fpn_2x_coco_bbox_mAP-0.392__segm_mAP-0.354_20200505_003907-3e542a40.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_2x_coco/mask_rcnn_r50_fpn_2x_coco_20200505_003907.log.json) | +| R-101-FPN | caffe | 1x | | | 40.4 | 36.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco/mask_rcnn_r101_caffe_fpn_1x_coco_20200601_095758-805e06c1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco/mask_rcnn_r101_caffe_fpn_1x_coco_20200601_095758.log.json)| +| R-101-FPN | pytorch | 1x | 6.4 | 13.5 | 40.0 | 36.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_1x_coco/mask_rcnn_r101_fpn_1x_coco_20200204-1efe0ed5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_1x_coco/mask_rcnn_r101_fpn_1x_coco_20200204_144809.log.json) | +| R-101-FPN | pytorch | 2x | - | - | 40.8 | 36.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r101_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_2x_coco/mask_rcnn_r101_fpn_2x_coco_bbox_mAP-0.408__segm_mAP-0.366_20200505_071027-14b391c7.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_2x_coco/mask_rcnn_r101_fpn_2x_coco_20200505_071027.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 7.6 | 11.3 | 41.9 | 37.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco/mask_rcnn_x101_32x4d_fpn_1x_coco_20200205-478d0b67.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco/mask_rcnn_x101_32x4d_fpn_1x_coco_20200205_034906.log.json) | +| X-101-32x4d-FPN | pytorch | 2x | - | - | 42.2 | 37.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco/mask_rcnn_x101_32x4d_fpn_2x_coco_bbox_mAP-0.422__segm_mAP-0.378_20200506_004702-faef898c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco/mask_rcnn_x101_32x4d_fpn_2x_coco_20200506_004702.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 10.7 | 8.0 | 42.8 | 38.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco/mask_rcnn_x101_64x4d_fpn_1x_coco_20200201-9352eb0d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco/mask_rcnn_x101_64x4d_fpn_1x_coco_20200201_124310.log.json) | +| X-101-64x4d-FPN | pytorch | 2x | - | - | 42.7 | 38.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco/mask_rcnn_x101_64x4d_fpn_2x_coco_20200509_224208-39d6f70c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco/mask_rcnn_x101_64x4d_fpn_2x_coco_20200509_224208.log.json)| +| X-101-32x8d-FPN | pytorch | 1x | - | - | 42.8 | 38.3 | | + +## Pre-trained Models + +We also train some models with longer schedules and multi-scale training. The users could finetune them for downstream tasks. + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| [R-50-FPN](./mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py) | caffe | 2x | 4.3 | | 40.3 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco_bbox_mAP-0.403__segm_mAP-0.365_20200504_231822-a75c98ce.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco_20200504_231822.log.json) +| [R-50-FPN](./mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py) | caffe | 3x | 4.3 | | 40.8 | 37.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_20200504_163245.log.json) +| [R-50-FPN](./mask_rcnn_r50_fpn_mstrain-poly_3x_coco.py) | pytorch| 3x | 4.1 | | 40.9 | 37.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_fpn_mstrain-poly_3x_coco_20210524_201154-21b550bb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_fpn_mstrain-poly_3x_coco_20210524_201154.log.json) +| [R-101-FPN](./mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco.py) | caffe | 3x | 5.9 | | 42.9 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco_20210526_132339-3c33ce02.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco_20210526_132339.log.json) +| [R-101-FPN](./mask_rcnn_r101_fpn_mstrain-poly_3x_coco.py) | pytorch| 3x | 6.1 | | 42.7 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco/mask_rcnn_r101_fpn_mstrain-poly_3x_coco_20210524_200244-5675c317.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco/mask_rcnn_r101_fpn_mstrain-poly_3x_coco_20210524_200244.log.json) +| [x101-32x4d-FPN](./mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco.py) | pytorch| 3x | 7.3 | | 43.6 | 39.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco_20210524_201410-abcd7859.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco_20210524_201410.log.json) +| [X-101-32x8d-FPN](./mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py) | pytorch | 1x | - | | 43.6 | 39.0 | +| [X-101-32x8d-FPN](./mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py) | pytorch | 3x | 10.3 | | 44.3 | 39.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco_20210607_161042-8bd2c639.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco_20210607_161042.log.json) +| [X-101-64x4d-FPN](./mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco.py) | pytorch | 3x | 10.4 | | 44.5 | 39.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco_20210526_120447-c376f129.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco_20210526_120447.log.json) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..95b324f59144e6a894ad30e01859af148aa699d6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './mask_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..252e0616f9b036602433b5627a5f7f7b33a1f713 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,53 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + pretrained='open-mmlab://detectron2/resnet101_caffe', + backbone=dict( + depth=101, + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe')) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b7986e8576642e631cfcdc9b274c49a17671e8b1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r101_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c9059d5385a960172dfe01c6d9a25d3089d96649 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r101_fpn_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1be11c37e91c21179e971a892f4ac50422f4a81a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,6 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict(pretrained='torchvision://resnet101', backbone=dict(depth=101)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_caffe_c4_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_caffe_c4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a44c01831b508da0a5e1ca3720bb437bcea086d1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_caffe_c4_1x_coco.py @@ -0,0 +1,39 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_caffe_c4.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5a23f8c7cd21ef5025def03d4743d03103d821c5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,40 @@ +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6308e40416a2be6ab5cbcc6826faff8556bc0b16 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py @@ -0,0 +1,49 @@ +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4f7150ca718e2ead46eb63e74b6be06f50aa0fce --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1b48a2104baf0df935954897ae4a991b38684d78 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = './mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py' +# learning policy +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..bebbaaab05c099f575d94fbb5ae6bef57d4b4177 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain_1x_coco.py @@ -0,0 +1,45 @@ +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_poly_1x_coco_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_poly_1x_coco_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..46eb8e3ad60f3d858ecb1a6318309a5bd6122756 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_poly_1x_coco_v1.py @@ -0,0 +1,60 @@ +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + style='caffe', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnet50_caffe_bgr')), + rpn_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)), + roi_head=dict( + bbox_roi_extractor=dict( + roi_layer=dict( + type='RoIAlign', + output_size=7, + sampling_ratio=2, + aligned=False)), + bbox_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + mask_roi_extractor=dict( + roi_layer=dict( + type='RoIAlign', + output_size=14, + sampling_ratio=2, + aligned=False)))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6a6c92460f1d58b8e8d361fb56ee123f2668ad9f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..932b1f905155a0d3285daefc4891f5194705e30d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py @@ -0,0 +1,5 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b3d9242cd222d9da0bb7cc531130456f1031266f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_fpn_poly_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_fpn_poly_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9eb6d57e0d25370a59472a4ceb1a3b9da6574608 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_r50_fpn_poly_1x_coco.py @@ -0,0 +1,23 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a8b3799b3482c840a4fcb5201a7dede23a0e073c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2cd3cee5a102b49750e5b265ec6775907f1a9545 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_r101_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b02e49a824c8f678de8112f44e6f540933b1c5d5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,17 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + pretrained='open-mmlab://resnext101_32x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..108ea4e34717953be59795b63f4f932f4329468f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_1x_coco.py @@ -0,0 +1,65 @@ +_base_ = './mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=8, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + style='pytorch', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnext101_32x8d'))) + +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], + std=[57.375, 57.120, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6b912f692b7a833556e6f7ef02b483c4e33781ef --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_1x_coco.py @@ -0,0 +1,60 @@ +_base_ = './mask_rcnn_r101_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=8, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + style='pytorch', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnext101_32x8d'))) + +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], + std=[57.375, 57.120, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8ba0e9c200fdc4ff196184c0b8e2320804037fbb --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,85 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=8, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + style='pytorch', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnext101_32x8d'))) + +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], + std=[57.375, 57.120, 58.395], + to_rgb=False) + +# In mstrain 3x config, img_scale=[(1333, 640), (1333, 800)], +# multiscale_mode='range' +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +# Use RepeatDataset to speed up training +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=3, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2333b03a835a7d1d09df09749ebdc492db499f63 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_x101_32x4d_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6074cca29f462e821206a6509ff8869ec86b5a68 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './mask_rcnn_x101_32x4d_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4334b703c318c23359a0b7c8a3181a7b621f70c7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,17 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + pretrained='open-mmlab://resnext101_64x4d', + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch')) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..b8e72a67965deefcf9ab7907ce7133453959f83c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/mask_rcnn/metafile.yml @@ -0,0 +1,414 @@ +Collections: + - Name: Mask R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Softmax + - RPN + - Convolution + - Dense Connections + - FPN + - ResNet + - RoIAlign + Paper: https://arxiv.org/abs/1703.06870v3 + README: configs/mask_rcnn/README.md + +Models: + - Name: mask_rcnn_r50_caffe_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.3 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 34.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco/mask_rcnn_r50_caffe_fpn_1x_coco_bbox_mAP-0.38__segm_mAP-0.344_20200504_231812-0ebd1859.pth + + - Name: mask_rcnn_r50_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.4 + inference time (ms/im): + - value: 62.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 34.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth + + - Name: mask_rcnn_r50_fpn_2x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py + Metadata: + Training Memory (GB): 4.4 + inference time (ms/im): + - value: 62.11 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 35.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_2x_coco/mask_rcnn_r50_fpn_2x_coco_bbox_mAP-0.392__segm_mAP-0.354_20200505_003907-3e542a40.pth + + - Name: mask_rcnn_r101_caffe_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_caffe_fpn_1x_coco/mask_rcnn_r101_caffe_fpn_1x_coco_20200601_095758-805e06c1.pth + + - Name: mask_rcnn_r101_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.4 + inference time (ms/im): + - value: 74.07 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_1x_coco/mask_rcnn_r101_fpn_1x_coco_20200204-1efe0ed5.pth + + - Name: mask_rcnn_r101_fpn_2x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r101_fpn_2x_coco.py + Metadata: + Training Memory (GB): 6.4 + inference time (ms/im): + - value: 74.07 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_2x_coco/mask_rcnn_r101_fpn_2x_coco_bbox_mAP-0.408__segm_mAP-0.366_20200505_071027-14b391c7.pth + + - Name: mask_rcnn_x101_32x4d_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.6 + inference time (ms/im): + - value: 88.5 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco/mask_rcnn_x101_32x4d_fpn_1x_coco_20200205-478d0b67.pth + + - Name: mask_rcnn_x101_32x4d_fpn_2x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco.py + Metadata: + Training Memory (GB): 7.6 + inference time (ms/im): + - value: 88.5 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_2x_coco/mask_rcnn_x101_32x4d_fpn_2x_coco_bbox_mAP-0.422__segm_mAP-0.378_20200506_004702-faef898c.pth + + - Name: mask_rcnn_x101_64x4d_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 10.7 + inference time (ms/im): + - value: 125 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_1x_coco/mask_rcnn_x101_64x4d_fpn_1x_coco_20200201-9352eb0d.pth + + - Name: mask_rcnn_x101_64x4d_fpn_2x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco.py + Metadata: + Training Memory (GB): 10.7 + inference time (ms/im): + - value: 125 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_2x_coco/mask_rcnn_x101_64x4d_fpn_2x_coco_20200509_224208-39d6f70c.pth + + - Name: mask_rcnn_x101_32x8d_fpn_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 10.7 + inference time (ms/im): + - value: 125 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.3 + + - Name: mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco.py + Metadata: + Training Memory (GB): 4.3 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_2x_coco_bbox_mAP-0.403__segm_mAP-0.365_20200504_231822-a75c98ce.pth + + - Name: mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py + Metadata: + Training Memory (GB): 4.3 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth + + - Name: mask_rcnn_r50_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco.py + Metadata: + Training Memory (GB): 4.1 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_fpn_mstrain-poly_3x_coco_20210524_201154-21b550bb.pth + + - Name: mask_rcnn_r101_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco.py + Metadata: + Training Memory (GB): 6.1 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_mstrain-poly_3x_coco/mask_rcnn_r101_fpn_mstrain-poly_3x_coco_20210524_200244-5675c317.pth + + - Name: mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco.py + Metadata: + Training Memory (GB): 5.9 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r101_caffe_fpn_mstrain-poly_3x_coco_20210526_132339-3c33ce02.pth + + - Name: mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco.py + Metadata: + Training Memory (GB): 7.3 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_32x4d_fpn_mstrain-poly_3x_coco_20210524_201410-abcd7859.pth + + - Name: mask_rcnn_x101_32x8d_fpn_mstrain-poly_1x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.0 + + - Name: mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco + Metadata: + Training Memory (GB): 10.3 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_32x8d_fpn_mstrain-poly_3x_coco_20210607_161042-8bd2c639.pth + + - Name: mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco + In Collection: Mask R-CNN + Config: configs/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco.py + Metadata: + Epochs: 36 + Training Memory (GB): 10.4 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco/mask_rcnn_x101_64x4d_fpn_mstrain-poly_3x_coco_20210526_120447-c376f129.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..f36b64c55398b3db48aaa1901fdc19be933bb740 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/README.md @@ -0,0 +1,26 @@ +# Mask Scoring R-CNN + +## Introduction + + + +``` +@inproceedings{huang2019msrcnn, + title={Mask Scoring R-CNN}, + author={Zhaojin Huang and Lichao Huang and Yongchao Gong and Chang Huang and Xinggang Wang}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2019}, +} +``` + +## Results and Models + +| Backbone | style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:-------------:|:----------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | caffe | 1x | 4.5 | | 38.2 | 36.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco/ms_rcnn_r50_caffe_fpn_1x_coco_20200702_180848-61c9355e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco/ms_rcnn_r50_caffe_fpn_1x_coco_20200702_180848.log.json) | +| R-50-FPN | caffe | 2x | - | - | 38.8 | 36.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco/ms_rcnn_r50_caffe_fpn_2x_coco_bbox_mAP-0.388__segm_mAP-0.363_20200506_004738-ee87b137.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco/ms_rcnn_r50_caffe_fpn_2x_coco_20200506_004738.log.json) | +| R-101-FPN | caffe | 1x | 6.5 | | 40.4 | 37.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco/ms_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.404__segm_mAP-0.376_20200506_004755-b9b12a37.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco/ms_rcnn_r101_caffe_fpn_1x_coco_20200506_004755.log.json) | +| R-101-FPN | caffe | 2x | - | - | 41.1 | 38.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco/ms_rcnn_r101_caffe_fpn_2x_coco_bbox_mAP-0.411__segm_mAP-0.381_20200506_011134-5f3cc74f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco/ms_rcnn_r101_caffe_fpn_2x_coco_20200506_011134.log.json) | +| R-X101-32x4d | pytorch | 2x | 7.9 | 11.0 | 41.8 | 38.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco/ms_rcnn_x101_32x4d_fpn_1x_coco_20200206-81fd1740.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco/ms_rcnn_x101_32x4d_fpn_1x_coco_20200206_100113.log.json) | +| R-X101-64x4d | pytorch | 1x | 11.0 | 8.0 | 43.0 | 39.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco/ms_rcnn_x101_64x4d_fpn_1x_coco_20200206-86ba88d2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco/ms_rcnn_x101_64x4d_fpn_1x_coco_20200206_091744.log.json) | +| R-X101-64x4d | pytorch | 2x | 11.0 | 8.0 | 42.6 | 39.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco/ms_rcnn_x101_64x4d_fpn_2x_coco_20200308-02a445e2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco/ms_rcnn_x101_64x4d_fpn_2x_coco_20200308_012247.log.json) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..997e443efa300c2a86938bfb69c7edc4114b044b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/metafile.yml @@ -0,0 +1,154 @@ +Collections: + - Name: Mask Scoring R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RPN + - FPN + - ResNet + - RoIAlign + Paper: https://arxiv.org/abs/1903.00241 + README: configs/ms_rcnn/README.md + +Models: + - Name: ms_rcnn_r50_caffe_fpn_1x_coco + In Collection: Mask Scoring R-CNN + Config: configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco/ms_rcnn_r50_caffe_fpn_1x_coco_20200702_180848-61c9355e.pth + + - Name: ms_rcnn_r50_caffe_fpn_2x_coco + In Collection: Mask Scoring R-CNN + Config: configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco/ms_rcnn_r50_caffe_fpn_2x_coco_bbox_mAP-0.388__segm_mAP-0.363_20200506_004738-ee87b137.pth + + - Name: ms_rcnn_r101_caffe_fpn_1x_coco + In Collection: Mask Scoring R-CNN + Config: configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco/ms_rcnn_r101_caffe_fpn_1x_coco_bbox_mAP-0.404__segm_mAP-0.376_20200506_004755-b9b12a37.pth + + - Name: ms_rcnn_r101_caffe_fpn_2x_coco + In Collection: Mask Scoring R-CNN + Config: configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco/ms_rcnn_r101_caffe_fpn_2x_coco_bbox_mAP-0.411__segm_mAP-0.381_20200506_011134-5f3cc74f.pth + + - Name: ms_rcnn_x101_32x4d_fpn_1x_coco + In Collection: Mask Scoring R-CNN + Config: configs/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.9 + inference time (ms/im): + - value: 90.91 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco/ms_rcnn_x101_32x4d_fpn_1x_coco_20200206-81fd1740.pth + + - Name: ms_rcnn_x101_64x4d_fpn_1x_coco + In Collection: Mask Scoring R-CNN + Config: configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 11.0 + inference time (ms/im): + - value: 125 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco/ms_rcnn_x101_64x4d_fpn_1x_coco_20200206-86ba88d2.pth + + - Name: ms_rcnn_x101_64x4d_fpn_2x_coco + In Collection: Mask Scoring R-CNN + Config: configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco.py + Metadata: + Training Memory (GB): 11.0 + inference time (ms/im): + - value: 125 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco/ms_rcnn_x101_64x4d_fpn_2x_coco_20200308-02a445e2.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9b7dcbbf145bb9705ae9628440349f6a5fecc438 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './ms_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..202bccedae84657737b0315394199208d0307ae4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_r101_caffe_fpn_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './ms_rcnn_r101_caffe_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5845125a7b3ee70deeaa545c16d1211b4fcb1d06 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_caffe_fpn_1x_coco.py' +model = dict( + type='MaskScoringRCNN', + roi_head=dict( + type='MaskScoringRoIHead', + mask_iou_head=dict( + type='MaskIoUHead', + num_convs=4, + num_fcs=2, + roi_feat_size=14, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + num_classes=80)), + # model training and testing settings + train_cfg=dict(rcnn=dict(mask_thr_binary=0.5))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..008a70ae67454c3fd470c29ffd000b18db391c8e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_r50_caffe_fpn_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './ms_rcnn_r50_caffe_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0a163ce445c35d51a9d8940e46697c5c6a39d354 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,16 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + type='MaskScoringRCNN', + roi_head=dict( + type='MaskScoringRoIHead', + mask_iou_head=dict( + type='MaskIoUHead', + num_convs=4, + num_fcs=2, + roi_feat_size=14, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + num_classes=80)), + # model training and testing settings + train_cfg=dict(rcnn=dict(mask_thr_binary=0.5))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..20479bbd70ce039789d8df346d270fde898bbc26 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ms_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ee5b7341663049f6eb8b99c8fec1f54964c698aa --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './ms_rcnn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..54c605b94aa5fc8b1ddf2267ed349c2fcd08cc9e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ms_rcnn/ms_rcnn_x101_64x4d_fpn_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './ms_rcnn_x101_64x4d_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fcos/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fcos/README.md new file mode 100644 index 0000000000000000000000000000000000000000..21a2b22139f5ca713509b967b39f7a113ca23523 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fcos/README.md @@ -0,0 +1,25 @@ +# NAS-FCOS: Fast Neural Architecture Search for Object Detection + +## Introduction + + + +```latex +@article{wang2019fcos, + title={Nas-fcos: Fast neural architecture search for object detection}, + author={Wang, Ning and Gao, Yang and Chen, Hao and Wang, Peng and Tian, Zhi and Shen, Chunhua}, + journal={arXiv preprint arXiv:1906.04423}, + year={2019} +} +``` + +## Results and Models + +| Head | Backbone | Style | GN-head | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:---------:|:-------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| NAS-FCOSHead | R-50 | caffe | Y | 1x | | | 39.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200520-1bdba3ce.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200520.log.json) | +| FCOSHead | R-50 | caffe | Y | 1x | | | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200521-7fdcbce0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200521.log.json) | + +**Notes:** + +- To be consistent with the author's implementation, we use 4 GPUs with 4 images/GPU. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fcos/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fcos/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..09811313d6299e77812bcf0989b3f2d54b41e1fc --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fcos/metafile.yml @@ -0,0 +1,39 @@ +Collections: + - Name: NAS-FCOS + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 4x V100 GPUs + Architecture: + - FPN + - NAS-FCOS + - ResNet + Paper: https://arxiv.org/abs/1906.04423 + README: configs/nas_fcos/README.md + +Models: + - Name: nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco + In Collection: NAS-FCOS + Config: configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200520-1bdba3ce.pth + + - Name: nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco + In Collection: NAS-FCOS + Config: configs/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco_20200521-7fdcbce0.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a455c9285cc892c8766df28d526fcd106272a09e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fcos/nas_fcos_fcoshead_r50_caffe_fpn_gn-head_4x4_1x_coco.py @@ -0,0 +1,100 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + type='NASFCOS', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False, eps=0), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + neck=dict( + type='NASFCOS_FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs=True, + num_outs=5, + norm_cfg=dict(type='BN'), + conv_cfg=dict(type='DCNv2', deform_groups=2)), + bbox_head=dict( + type='FCOSHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + norm_cfg=dict(type='GN', num_groups=32), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='IoULoss', loss_weight=1.0), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)), + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) + +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) + +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] + +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + samples_per_gpu=4, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +optimizer = dict( + lr=0.01, paramwise_cfg=dict(bias_lr_mult=2., bias_decay_mult=0.)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b779492527850ca8ea52f7aa8c17d6c3543fa368 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fcos/nas_fcos_nashead_r50_caffe_fpn_gn-head_4x4_1x_coco.py @@ -0,0 +1,99 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + type='NASFCOS', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False, eps=0), + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + neck=dict( + type='NASFCOS_FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs=True, + num_outs=5, + norm_cfg=dict(type='BN'), + conv_cfg=dict(type='DCNv2', deform_groups=2)), + bbox_head=dict( + type='NASFCOSHead', + num_classes=80, + in_channels=256, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + norm_cfg=dict(type='GN', num_groups=32), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='IoULoss', loss_weight=1.0), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)), + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) + +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) + +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] + +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + samples_per_gpu=4, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +optimizer = dict( + lr=0.01, paramwise_cfg=dict(bias_lr_mult=2., bias_decay_mult=0.)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fpn/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fpn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..9862b9a0156ab23948ae22a5f3ace69145683198 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fpn/README.md @@ -0,0 +1,26 @@ +# NAS-FPN: Learning Scalable Feature Pyramid Architecture for Object Detection + +## Introduction + + + +```latex +@inproceedings{ghiasi2019fpn, + title={Nas-fpn: Learning scalable feature pyramid architecture for object detection}, + author={Ghiasi, Golnaz and Lin, Tsung-Yi and Le, Quoc V}, + booktitle={Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition}, + pages={7036--7045}, + year={2019} +} +``` + +## Results and Models + +We benchmark the new training schedule (crop training, large batch, unfrozen BN, 50 epochs) introduced in NAS-FPN. RetinaNet is used in the paper. + +| Backbone | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:-----------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| R-50-FPN | 50e | 12.9 | 22.9 | 37.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/nas_fpn/retinanet_r50_fpn_crop640_50e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/nas_fpn/retinanet_r50_fpn_crop640_50e_coco/retinanet_r50_fpn_crop640_50e_coco-9b953d76.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/nas_fpn/retinanet_r50_fpn_crop640_50e_coco/retinanet_r50_fpn_crop640_50e_coco_20200529_095329.log.json) | +| R-50-NASFPN | 50e | 13.2 | 23.0 | 40.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco/retinanet_r50_nasfpn_crop640_50e_coco-0ad1f644.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco/retinanet_r50_nasfpn_crop640_50e_coco_20200528_230008.log.json) | + +**Note**: We find that it is unstable to train NAS-FPN and there is a small chance that results can be 3% mAP lower. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fpn/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fpn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..a2ef30cb44ff47401a7419919c1c07a1ed02a6ec --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fpn/metafile.yml @@ -0,0 +1,54 @@ +Collections: + - Name: NAS-FPN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - NAS-FPN + - ResNet + Paper: https://arxiv.org/abs/1904.07392 + README: configs/nas_fpn/README.md + +Models: + - Name: retinanet_r50_fpn_crop640_50e_coco + In Collection: NAS-FPN + Config: configs/nas_fpn/retinanet_r50_fpn_crop640_50e_coco.py + Metadata: + Training Memory (GB): 12.9 + inference time (ms/im): + - value: 43.67 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/nas_fpn/retinanet_r50_fpn_crop640_50e_coco/retinanet_r50_fpn_crop640_50e_coco-9b953d76.pth + + - Name: retinanet_r50_nasfpn_crop640_50e_coco + In Collection: NAS-FPN + Config: configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py + Metadata: + Training Memory (GB): 13.2 + inference time (ms/im): + - value: 43.48 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 50 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco/retinanet_r50_nasfpn_crop640_50e_coco-0ad1f644.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fpn/retinanet_r50_fpn_crop640_50e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fpn/retinanet_r50_fpn_crop640_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6ea44a05f52143694365737af5da0eb750c282f5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fpn/retinanet_r50_fpn_crop640_50e_coco.py @@ -0,0 +1,80 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', '../_base_/default_runtime.py' +] +cudnn_benchmark = True +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=False, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + relu_before_extra_convs=True, + no_norm_on_lateral=True, + norm_cfg=norm_cfg), + bbox_head=dict(type='RetinaSepBNHead', num_ins=5, norm_cfg=norm_cfg), + # training and testing settings + train_cfg=dict(assigner=dict(neg_iou_thr=0.5))) +# dataset settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=(640, 640), + ratio_range=(0.8, 1.2), + keep_ratio=True), + dict(type='RandomCrop', crop_size=(640, 640)), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size=(640, 640)), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(640, 640), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=64), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=4, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='SGD', + lr=0.08, + momentum=0.9, + weight_decay=0.0001, + paramwise_cfg=dict(norm_decay_mult=0, bypass_duplicate=True)) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.1, + step=[30, 40]) +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=50) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3e039199ae6fb1b17af258c1fafa678625ddb3ea --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/nas_fpn/retinanet_r50_nasfpn_crop640_50e_coco.py @@ -0,0 +1,79 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', '../_base_/default_runtime.py' +] +cudnn_benchmark = True +# model settings +norm_cfg = dict(type='BN', requires_grad=True) +model = dict( + type='RetinaNet', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=False, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict(type='NASFPN', stack_times=7, norm_cfg=norm_cfg), + bbox_head=dict(type='RetinaSepBNHead', num_ins=5, norm_cfg=norm_cfg), + # training and testing settings + train_cfg=dict(assigner=dict(neg_iou_thr=0.5))) +# dataset settings +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=(640, 640), + ratio_range=(0.8, 1.2), + keep_ratio=True), + dict(type='RandomCrop', crop_size=(640, 640)), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size=(640, 640)), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(640, 640), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=128), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=4, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict( + type='SGD', + lr=0.08, + momentum=0.9, + weight_decay=0.0001, + paramwise_cfg=dict(norm_decay_mult=0, bypass_duplicate=True)) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.1, + step=[30, 40]) +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=50) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/paa/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/paa/README.md new file mode 100644 index 0000000000000000000000000000000000000000..3e9b6e31ccc9c92d8d9d429c4d338dbcc49d7f57 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/paa/README.md @@ -0,0 +1,35 @@ +# Probabilistic Anchor Assignment with IoU Prediction for Object Detection + + + +```latex +@inproceedings{paa-eccv2020, + title={Probabilistic Anchor Assignment with IoU Prediction for Object Detection}, + author={Kim, Kang and Lee, Hee Seok}, + booktitle = {ECCV}, + year={2020} +} +``` + +## Results and Models + +We provide config files to reproduce the object detection results in the +ECCV 2020 paper for Probabilistic Anchor Assignment with IoU +Prediction for Object Detection. + +| Backbone | Lr schd | Mem (GB) | Score voting | box AP | Config | Download | +|:-----------:|:-------:|:--------:|:------------:|:------:|:------:|:--------:| +| R-50-FPN | 12e | 3.7 | True | 40.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1x_coco/paa_r50_fpn_1x_coco_20200821-936edec3.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1x_coco/paa_r50_fpn_1x_coco_20200821-936edec3.log.json) | +| R-50-FPN | 12e | 3.7 | False | 40.2 | - | +| R-50-FPN | 18e | 3.7 | True | 41.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r50_fpn_1.5x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1.5x_coco/paa_r50_fpn_1.5x_coco_20200823-805d6078.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1.5x_coco/paa_r50_fpn_1.5x_coco_20200823-805d6078.log.json) | +| R-50-FPN | 18e | 3.7 | False | 41.2 | - | +| R-50-FPN | 24e | 3.7 | True | 41.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r50_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_2x_coco/paa_r50_fpn_2x_coco_20200821-c98bfc4e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_2x_coco/paa_r50_fpn_2x_coco_20200821-c98bfc4e.log.json) | +| R-50-FPN | 36e | 3.7 | True | 43.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r50_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_mstrain_3x_coco/paa_r50_fpn_mstrain_3x_coco_20210121_145722-06a6880b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_mstrain_3x_coco/paa_r50_fpn_mstrain_3x_coco_20210121_145722.log.json) | +| R-101-FPN | 12e | 6.2 | True | 42.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_1x_coco/paa_r101_fpn_1x_coco_20200821-0a1825a4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_1x_coco/paa_r101_fpn_1x_coco_20200821-0a1825a4.log.json) | +| R-101-FPN | 12e | 6.2 | False | 42.4 | - | +| R-101-FPN | 24e | 6.2 | True | 43.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r101_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_2x_coco/paa_r101_fpn_2x_coco_20200821-6829f96b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_2x_coco/paa_r101_fpn_2x_coco_20200821-6829f96b.log.json) | +| R-101-FPN | 36e | 6.2 | True | 45.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/paa/paa_r101_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_mstrain_3x_coco/paa_r101_fpn_mstrain_3x_coco_20210122_084202-83250d22.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_mstrain_3x_coco/paa_r101_fpn_mstrain_3x_coco_20210122_084202.log.json) | + +**Note**: + +1. We find that the performance is unstable with 1x setting and may fluctuate by about 0.2 mAP. We report the best results. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/paa/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/paa/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..8b6e1c6e28d9c92c71cf532fb9bc76cd986bad1f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/paa/metafile.yml @@ -0,0 +1,99 @@ +Collections: + - Name: PAA + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - Probabilistic Anchor Assignment + - ResNet + Paper: https://arxiv.org/abs/2007.08103 + README: configs/paa/README.md + +Models: + - Name: paa_r50_fpn_1x_coco + In Collection: PAA + Config: configs/paa/paa_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.7 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1x_coco/paa_r50_fpn_1x_coco_20200821-936edec3.pth + + - Name: paa_r50_fpn_1.5x_coco + In Collection: PAA + Config: configs/paa/paa_r50_fpn_1.5x_coco.py + Metadata: + Training Memory (GB): 3.7 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_1.5x_coco/paa_r50_fpn_1.5x_coco_20200823-805d6078.pth + + - Name: paa_r50_fpn_2x_coco + In Collection: PAA + Config: configs/paa/paa_r50_fpn_2x_coco.py + Metadata: + Training Memory (GB): 3.7 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_2x_coco/paa_r50_fpn_2x_coco_20200821-c98bfc4e.pth + + - Name: paa_r50_fpn_mstrain_3x_coco + In Collection: PAA + Config: configs/paa/paa_r50_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 3.7 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r50_fpn_mstrain_3x_coco/paa_r50_fpn_mstrain_3x_coco_20210121_145722-06a6880b.pth + + - Name: paa_r101_fpn_1x_coco + In Collection: PAA + Config: configs/paa/paa_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.2 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_1x_coco/paa_r101_fpn_1x_coco_20200821-0a1825a4.pth + + - Name: paa_r101_fpn_2x_coco + In Collection: PAA + Config: configs/paa/paa_r101_fpn_2x_coco.py + Metadata: + Training Memory (GB): 6.2 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_2x_coco/paa_r101_fpn_2x_coco_20200821-6829f96b.pth + + - Name: paa_r101_fpn_mstrain_3x_coco + In Collection: PAA + Config: configs/paa/paa_r101_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 6.2 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/paa/paa_r101_fpn_mstrain_3x_coco/paa_r101_fpn_mstrain_3x_coco_20210122_084202-83250d22.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/paa/paa_r101_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/paa/paa_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..94f1c278dc16c1befbca510ca0ac5ba407969f6d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/paa/paa_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './paa_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/paa/paa_r101_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/paa/paa_r101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..641ef764d2713184845b624b20db1771cfcd6739 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/paa/paa_r101_fpn_2x_coco.py @@ -0,0 +1,3 @@ +_base_ = './paa_r101_fpn_1x_coco.py' +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/paa/paa_r101_fpn_mstrain_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/paa/paa_r101_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..71858ed65c7fa998fdc960161689be083bdb4e62 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/paa/paa_r101_fpn_mstrain_3x_coco.py @@ -0,0 +1,6 @@ +_base_ = './paa_r50_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/paa/paa_r50_fpn_1.5x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/paa/paa_r50_fpn_1.5x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..aabce4af987aa5504e1748e10b9955f760a013e1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/paa/paa_r50_fpn_1.5x_coco.py @@ -0,0 +1,3 @@ +_base_ = './paa_r50_fpn_1x_coco.py' +lr_config = dict(step=[12, 16]) +runner = dict(type='EpochBasedRunner', max_epochs=18) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/paa/paa_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/paa/paa_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4c9c4aa73e1190da0edf1f20ffc3e60654cf87b1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/paa/paa_r50_fpn_1x_coco.py @@ -0,0 +1,70 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='PAA', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', + num_outs=5), + bbox_head=dict( + type='PAAHead', + reg_decoded_bbox=True, + score_voting=True, + topk=9, + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.3), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=0.5)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.1, + neg_iou_thr=0.1, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/paa/paa_r50_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/paa/paa_r50_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..663d2c0ded52086663360a8a3dce89702584fc1f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/paa/paa_r50_fpn_2x_coco.py @@ -0,0 +1,3 @@ +_base_ = './paa_r50_fpn_1x_coco.py' +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/paa/paa_r50_fpn_mstrain_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/paa/paa_r50_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..91fa28cde470cb323f90f89a56d8acb6f9f0a22e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/paa/paa_r50_fpn_mstrain_3x_coco.py @@ -0,0 +1,20 @@ +_base_ = './paa_r50_fpn_1x_coco.py' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/pafpn/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/pafpn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..9602c6da38c50660370c3bfc1cda55af1faad3b1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/pafpn/README.md @@ -0,0 +1,26 @@ +# Path Aggregation Network for Instance Segmentation + +## Introduction + + + +``` +@inproceedings{liu2018path, + author = {Shu Liu and + Lu Qi and + Haifang Qin and + Jianping Shi and + Jiaya Jia}, + title = {Path Aggregation Network for Instance Segmentation}, + booktitle = {Proceedings of IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + year = {2018} +} +``` + +## Results and Models + +## Results and Models + +| Backbone | style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +|:-------------:|:----------:|:-------:|:--------:|:--------------:|:------:|:-------:|:------:|:--------:| +| R-50-FPN | pytorch | 1x | 4.0 | 17.2 | 37.5 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pafpn/faster_rcnn_r50_pafpn_1x_coco/faster_rcnn_r50_pafpn_1x_coco_bbox_mAP-0.375_20200503_105836-b7b4b9bd.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pafpn/faster_rcnn_r50_pafpn_1x_coco/faster_rcnn_r50_pafpn_1x_coco_20200503_105836.log.json) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b2fdef91c5cc8396baee9c2d8a09556162443078 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py @@ -0,0 +1,8 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' + +model = dict( + neck=dict( + type='PAFPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/pafpn/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/pafpn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..14a96c581f4cee9ebf20bfe37680e7b03b986982 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/pafpn/metafile.yml @@ -0,0 +1,33 @@ +Collections: + - Name: PAFPN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - PAFPN + Paper: https://arxiv.org/abs/1803.01534 + README: configs/pafpn/README.md + +Models: + - Name: faster_rcnn_r50_pafpn_1x_coco + In Collection: PAFPN + Config: configs/pafpn/faster_rcnn_r50_pafpn_1x_coco.py + Metadata: + Training Memory (GB): 4.0 + inference time (ms/im): + - value: 58.14 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pafpn/faster_rcnn_r50_pafpn_1x_coco/faster_rcnn_r50_pafpn_1x_coco_bbox_mAP-0.375_20200503_105836-b7b4b9bd.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/pascal_voc/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/pascal_voc/README.md new file mode 100644 index 0000000000000000000000000000000000000000..69e0220199b0f9cebc3f0dd3e3669759d6259d60 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/pascal_voc/README.md @@ -0,0 +1,23 @@ +# PASCAL VOC Dataset + + + +``` +@Article{Everingham10, + author = "Everingham, M. and Van~Gool, L. and Williams, C. K. I. and Winn, J. and Zisserman, A.", + title = "The Pascal Visual Object Classes (VOC) Challenge", + journal = "International Journal of Computer Vision", + volume = "88", + year = "2010", + number = "2", + month = jun, + pages = "303--338", +} +``` + +## Results and Models + +| Architecture | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:------------:|:---------:|:-------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| Faster R-CNN | R-50 | pytorch | 1x | 2.6 | - | 79.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712/faster_rcnn_r50_fpn_1x_voc0712_20200624-c9895d40.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712/20200623_015208.log.json) | +| Retinanet | R-50 | pytorch | 1x | 2.1 | - | 77.3 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pascal_voc/retinanet_r50_fpn_1x_voc0712.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pascal_voc/retinanet_r50_fpn_1x_voc0712/retinanet_r50_fpn_1x_voc0712_20200617-47cbdd0e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pascal_voc/retinanet_r50_fpn_1x_voc0712/retinanet_r50_fpn_1x_voc0712_20200616_014642.log.json) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712.py b/detection_cbnet/docker-build-context/cbnetv2/configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712.py new file mode 100644 index 0000000000000000000000000000000000000000..7866acebea689e7a863a836c326b1407de733fe8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712.py @@ -0,0 +1,14 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', '../_base_/datasets/voc0712.py', + '../_base_/default_runtime.py' +] +model = dict(roi_head=dict(bbox_head=dict(num_classes=20))) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +# actual epoch = 3 * 3 = 9 +lr_config = dict(policy='step', step=[3]) +# runtime settings +runner = dict( + type='EpochBasedRunner', max_epochs=4) # actual epoch = 4 * 3 = 12 diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712_cocofmt.py b/detection_cbnet/docker-build-context/cbnetv2/configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712_cocofmt.py new file mode 100644 index 0000000000000000000000000000000000000000..12eee2c1ecdaa5f9e84a3bd2084b00493f2f76c0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc0712_cocofmt.py @@ -0,0 +1,75 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', '../_base_/datasets/voc0712.py', + '../_base_/default_runtime.py' +] +model = dict(roi_head=dict(bbox_head=dict(num_classes=20))) + +CLASSES = ('aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', + 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike', + 'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor') + +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/VOCdevkit/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1000, 600), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1000, 600), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type='RepeatDataset', + times=3, + dataset=dict( + type=dataset_type, + ann_file='data/voc0712_trainval.json', + img_prefix='data/VOCdevkit', + pipeline=train_pipeline, + classes=CLASSES)), + val=dict( + type=dataset_type, + ann_file='data/voc07_test.json', + img_prefix='data/VOCdevkit', + pipeline=test_pipeline, + classes=CLASSES), + test=dict( + type=dataset_type, + ann_file='data/voc07_test.json', + img_prefix='data/VOCdevkit', + pipeline=test_pipeline, + classes=CLASSES)) +evaluation = dict(interval=1, metric='bbox') + +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +# actual epoch = 3 * 3 = 9 +lr_config = dict(policy='step', step=[3]) +# runtime settings +runner = dict( + type='EpochBasedRunner', max_epochs=4) # actual epoch = 4 * 3 = 12 diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/pascal_voc/retinanet_r50_fpn_1x_voc0712.py b/detection_cbnet/docker-build-context/cbnetv2/configs/pascal_voc/retinanet_r50_fpn_1x_voc0712.py new file mode 100644 index 0000000000000000000000000000000000000000..b4b050dda5d2d752c0db3c83c434879c8765a272 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/pascal_voc/retinanet_r50_fpn_1x_voc0712.py @@ -0,0 +1,14 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', '../_base_/datasets/voc0712.py', + '../_base_/default_runtime.py' +] +model = dict(bbox_head=dict(num_classes=20)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +# actual epoch = 3 * 3 = 9 +lr_config = dict(policy='step', step=[3]) +# runtime settings +runner = dict( + type='EpochBasedRunner', max_epochs=4) # actual epoch = 4 * 3 = 12 diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/pascal_voc/ssd300_voc0712.py b/detection_cbnet/docker-build-context/cbnetv2/configs/pascal_voc/ssd300_voc0712.py new file mode 100644 index 0000000000000000000000000000000000000000..271ebe32ea354c0748d7745fad2c55960ac305d1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/pascal_voc/ssd300_voc0712.py @@ -0,0 +1,69 @@ +_base_ = [ + '../_base_/models/ssd300.py', '../_base_/datasets/voc0712.py', + '../_base_/default_runtime.py' +] +model = dict( + bbox_head=dict( + num_classes=20, anchor_generator=dict(basesize_ratio_range=(0.2, + 0.9)))) +# dataset settings +dataset_type = 'VOCDataset' +data_root = 'data/VOCdevkit/' +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(300, 300), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(300, 300), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=3, + train=dict( + type='RepeatDataset', times=10, dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=1e-3, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict() +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[16, 20]) +checkpoint_config = dict(interval=1) +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/pascal_voc/ssd512_voc0712.py b/detection_cbnet/docker-build-context/cbnetv2/configs/pascal_voc/ssd512_voc0712.py new file mode 100644 index 0000000000000000000000000000000000000000..ea2b69f33cb01bff4bab4ea357f88e74ed99b4a4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/pascal_voc/ssd512_voc0712.py @@ -0,0 +1,52 @@ +_base_ = 'ssd300_voc0712.py' +input_size = 512 +model = dict( + bbox_head=dict( + in_channels=(512, 1024, 512, 256, 256, 256, 256), + anchor_generator=dict( + input_size=input_size, + strides=[8, 16, 32, 64, 128, 256, 512], + basesize_ratio_range=(0.15, 0.9), + ratios=([2], [2, 3], [2, 3], [2, 3], [2, 3], [2], [2])))) +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(512, 512), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(512, 512), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/README.md new file mode 100644 index 0000000000000000000000000000000000000000..3ae3392ae4e5fdf9bb9472af9befb0689dc70858 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/README.md @@ -0,0 +1,40 @@ +# Prime Sample Attention in Object Detection + +## Introduction + + + +```latex +@inproceedings{cao2019prime, + title={Prime sample attention in object detection}, + author={Cao, Yuhang and Chen, Kai and Loy, Chen Change and Lin, Dahua}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2020} +} +``` + +## Results and models + +| PISA | Network | Backbone | Lr schd | box AP | mask AP | Config | Download | +|:----:|:-------:|:-------------------:|:-------:|:------:|:-------:|:------:|:--------:| +| × | Faster R-CNN | R-50-FPN | 1x | 36.4 | | - | +| √ | Faster R-CNN | R-50-FPN | 1x | 38.4 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_faster_rcnn_r50_fpn_1x_coco/pisa_faster_rcnn_r50_fpn_1x_coco-dea93523.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_faster_rcnn_r50_fpn_1x_coco/pisa_faster_rcnn_r50_fpn_1x_coco_20200506_185619.log.json) | +| × | Faster R-CNN | X101-32x4d-FPN | 1x | 40.1 | | - | +| √ | Faster R-CNN | X101-32x4d-FPN | 1x | 41.9 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco-e4accec4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco_20200505_181503.log.json) | +| × | Mask R-CNN | R-50-FPN | 1x | 37.3 | 34.2 | - | +| √ | Mask R-CNN | R-50-FPN | 1x | 39.1 | 35.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_mask_rcnn_r50_fpn_1x_coco/pisa_mask_rcnn_r50_fpn_1x_coco-dfcedba6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_mask_rcnn_r50_fpn_1x_coco/pisa_mask_rcnn_r50_fpn_1x_coco_20200508_150500.log.json) | +| × | Mask R-CNN | X101-32x4d-FPN | 1x | 41.1 | 37.1 | - | +| √ | Mask R-CNN | X101-32x4d-FPN | 1x | | | | +| × | RetinaNet | R-50-FPN | 1x | 35.6 | | - | +| √ | RetinaNet | R-50-FPN | 1x | 36.9 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_retinanet_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_retinanet_r50_fpn_1x_coco/pisa_retinanet_r50_fpn_1x_coco-76409952.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_retinanet_r50_fpn_1x_coco/pisa_retinanet_r50_fpn_1x_coco_20200504_014311.log.json) | +| × | RetinaNet | X101-32x4d-FPN | 1x | 39.0 | | - | +| √ | RetinaNet | X101-32x4d-FPN | 1x | 40.7 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco/pisa_retinanet_x101_32x4d_fpn_1x_coco-a0c13c73.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco/pisa_retinanet_x101_32x4d_fpn_1x_coco_20200505_001404.log.json) | +| × | SSD300 | VGG16 | 1x | 25.6 | | - | +| √ | SSD300 | VGG16 | 1x | 27.6 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_ssd300_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_ssd300_coco/pisa_ssd300_coco-710e3ac9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_ssd300_coco/pisa_ssd300_coco_20200504_144325.log.json) | +| × | SSD300 | VGG16 | 1x | 29.3 | | - | +| √ | SSD300 | VGG16 | 1x | 31.8 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/pisa/pisa_ssd512_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_ssd512_coco/pisa_ssd512_coco-247addee.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_ssd512_coco/pisa_ssd512_coco_20200508_131030.log.json) | + +**Notes:** + +- In the original paper, all models are trained and tested on mmdet v1.x, thus results may not be exactly the same with this release on v2.0. +- It is noted PISA only modifies the training pipeline so the inference time remains the same with the baseline. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..7f017cfb10f8468319c7ff6a43ca93871004e2ad --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/metafile.yml @@ -0,0 +1,105 @@ +Collections: + - Name: PISA + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - PISA + - RPN + - ResNet + - RoIPool + Paper: https://arxiv.org/abs/1904.04821 + README: configs/pisa/README.md + +Models: + - Name: pisa_faster_rcnn_r50_fpn_1x_coco + In Collection: PISA + Config: configs/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_faster_rcnn_r50_fpn_1x_coco/pisa_faster_rcnn_r50_fpn_1x_coco-dea93523.pth + + - Name: pisa_faster_rcnn_x101_32x4d_fpn_1x_coco + In Collection: PISA + Config: configs/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco-e4accec4.pth + + - Name: pisa_mask_rcnn_r50_fpn_1x_coco + In Collection: PISA + Config: configs/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 35.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_mask_rcnn_r50_fpn_1x_coco/pisa_mask_rcnn_r50_fpn_1x_coco-dfcedba6.pth + + - Name: pisa_retinanet_r50_fpn_1x_coco + In Collection: PISA + Config: configs/pisa/pisa_retinanet_r50_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_retinanet_r50_fpn_1x_coco/pisa_retinanet_r50_fpn_1x_coco-76409952.pth + + - Name: pisa_retinanet_x101_32x4d_fpn_1x_coco + In Collection: PISA + Config: configs/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco/pisa_retinanet_x101_32x4d_fpn_1x_coco-a0c13c73.pth + + - Name: pisa_ssd300_coco + In Collection: PISA + Config: configs/pisa/pisa_ssd300_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 27.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_ssd300_coco/pisa_ssd300_coco-710e3ac9.pth + + - Name: pisa_ssd512_coco + In Collection: PISA + Config: configs/pisa/pisa_ssd512_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 31.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/pisa/pisa_ssd512_coco/pisa_ssd512_coco-247addee.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..71e65b0b2bc72379f4db73e491f76fc767cb786b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_faster_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,30 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' + +model = dict( + roi_head=dict( + type='PISARoIHead', + bbox_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + train_cfg=dict( + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + sampler=dict( + type='ScoreHLRSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True, + k=0.5, + bias=0.), + isr=dict(k=2, bias=0), + carl=dict(k=1, bias=0.2))), + test_cfg=dict( + rpn=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..16edd99de295161a3c246243e8c482ede4e5bdae --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_faster_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,30 @@ +_base_ = '../faster_rcnn/faster_rcnn_x101_32x4d_fpn_1x_coco.py' + +model = dict( + roi_head=dict( + type='PISARoIHead', + bbox_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + train_cfg=dict( + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + sampler=dict( + type='ScoreHLRSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True, + k=0.5, + bias=0.), + isr=dict(k=2, bias=0), + carl=dict(k=1, bias=0.2))), + test_cfg=dict( + rpn=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..047a293466a20ea90501e3054d7fcfe23fcdcb39 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_mask_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,30 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' + +model = dict( + roi_head=dict( + type='PISARoIHead', + bbox_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + train_cfg=dict( + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + sampler=dict( + type='ScoreHLRSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True, + k=0.5, + bias=0.), + isr=dict(k=2, bias=0), + carl=dict(k=1, bias=0.2))), + test_cfg=dict( + rpn=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_mask_rcnn_x101_32x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_mask_rcnn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2186a8f695ae6de9f27f5e96e398766f7a0e74bd --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_mask_rcnn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,30 @@ +_base_ = '../mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py' + +model = dict( + roi_head=dict( + type='PISARoIHead', + bbox_head=dict( + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))), + train_cfg=dict( + rpn_proposal=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0), + rcnn=dict( + sampler=dict( + type='ScoreHLRSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True, + k=0.5, + bias=0.), + isr=dict(k=2, bias=0), + carl=dict(k=1, bias=0.2))), + test_cfg=dict( + rpn=dict( + nms_pre=2000, + max_per_img=2000, + nms=dict(type='nms', iou_threshold=0.7), + min_bbox_size=0))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_retinanet_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_retinanet_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..70f89e227ec64b5c7224375aac0cf7ae3a10a29e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_retinanet_r50_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = '../retinanet/retinanet_r50_fpn_1x_coco.py' + +model = dict( + bbox_head=dict( + type='PISARetinaHead', + loss_bbox=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.0)), + train_cfg=dict(isr=dict(k=2., bias=0.), carl=dict(k=1., bias=0.2))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b97b6720f0522ee19e3f8353bf490b74a5835308 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_retinanet_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = '../retinanet/retinanet_x101_32x4d_fpn_1x_coco.py' + +model = dict( + bbox_head=dict( + type='PISARetinaHead', + loss_bbox=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.0)), + train_cfg=dict(isr=dict(k=2., bias=0.), carl=dict(k=1., bias=0.2))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_ssd300_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_ssd300_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b5cc006477eacaa9ab40d463312dc2156a59d634 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_ssd300_coco.py @@ -0,0 +1,8 @@ +_base_ = '../ssd/ssd300_coco.py' + +model = dict( + bbox_head=dict(type='PISASSDHead'), + train_cfg=dict(isr=dict(k=2., bias=0.), carl=dict(k=1., bias=0.2))) + +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_ssd512_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_ssd512_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3219d6d667cb185e6fa4f1954d632ccad9512a48 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/pisa/pisa_ssd512_coco.py @@ -0,0 +1,8 @@ +_base_ = '../ssd/ssd512_coco.py' + +model = dict( + bbox_head=dict(type='PISASSDHead'), + train_cfg=dict(isr=dict(k=2., bias=0.), carl=dict(k=1., bias=0.2))) + +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/point_rend/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/point_rend/README.md new file mode 100644 index 0000000000000000000000000000000000000000..fafb29c95ed1ab9b695074af7ea49347a006ed16 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/point_rend/README.md @@ -0,0 +1,23 @@ +# PointRend + +## Introduction + + + +```latex +@InProceedings{kirillov2019pointrend, + title={{PointRend}: Image Segmentation as Rendering}, + author={Alexander Kirillov and Yuxin Wu and Kaiming He and Ross Girshick}, + journal={ArXiv:1912.08193}, + year={2019} +} +``` + +## Results and models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R-50-FPN | caffe | 1x | 4.6 | | 38.4 | 36.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco/point_rend_r50_caffe_fpn_mstrain_1x_coco-1bcb5fb4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco/point_rend_r50_caffe_fpn_mstrain_1x_coco_20200612_161407.log.json) | +| R-50-FPN | caffe | 3x | 4.6 | | 41.0 | 38.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco/point_rend_r50_caffe_fpn_mstrain_3x_coco-e0ebb6b7.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco/point_rend_r50_caffe_fpn_mstrain_3x_coco_20200614_002632.log.json) | + +Note: All models are trained with multi-scale, the input image shorter side is randomly scaled to one of (640, 672, 704, 736, 768, 800). diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/point_rend/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/point_rend/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..015d9f9a62da34efdb80e633bcb64d22794373ed --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/point_rend/metafile.yml @@ -0,0 +1,49 @@ +Collections: + - Name: PointRend + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - PointRend + - FPN + - ResNet + Paper: https://arxiv.org/abs/1912.08193 + README: configs/point_rend/README.md + +Models: + - Name: point_rend_r50_caffe_fpn_mstrain_1x_coco + In Collection: PointRend + Config: configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py + Metadata: + Training Memory (GB): 4.6 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco/point_rend_r50_caffe_fpn_mstrain_1x_coco-1bcb5fb4.pth + + - Name: point_rend_r50_caffe_fpn_mstrain_3x_coco + In Collection: PointRend + Config: configs/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 4.6 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco/point_rend_r50_caffe_fpn_mstrain_3x_coco-e0ebb6b7.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0c0e563d6fe307d05fbd3862cd28b6dc2a3e52b2 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py @@ -0,0 +1,44 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain_1x_coco.py' +# model settings +model = dict( + type='PointRend', + roi_head=dict( + type='PointRendRoIHead', + mask_roi_extractor=dict( + type='GenericRoIExtractor', + aggregation='concat', + roi_layer=dict( + _delete_=True, type='SimpleRoIAlign', output_size=14), + out_channels=256, + featmap_strides=[4]), + mask_head=dict( + _delete_=True, + type='CoarseMaskHead', + num_fcs=2, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + num_classes=80, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)), + point_head=dict( + type='MaskPointHead', + num_fcs=3, + in_channels=256, + fc_channels=256, + num_classes=80, + coarse_pred_each_layer=True, + loss_point=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0))), + # model training and testing settings + train_cfg=dict( + rcnn=dict( + mask_size=7, + num_points=14 * 14, + oversample_ratio=3, + importance_sample_ratio=0.75)), + test_cfg=dict( + rcnn=dict( + subdivision_steps=5, + subdivision_num_points=28 * 28, + scale_factor=2))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..169278e5738b0abd4ae5e99594e4adbaaefa2d96 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/point_rend/point_rend_r50_caffe_fpn_mstrain_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = './point_rend_r50_caffe_fpn_mstrain_1x_coco.py' +# learning policy +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..ed3769b110d007917079b92ecca445eb7a1d5b34 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/README.md @@ -0,0 +1,105 @@ +# Designing Network Design Spaces + +## Introduction + +[BACKBONE] + +We implement RegNetX and RegNetY models in detection systems and provide their first results on Mask R-CNN, Faster R-CNN and RetinaNet. + +The pre-trained modles are converted from [model zoo of pycls](https://github.com/facebookresearch/pycls/blob/master/MODEL_ZOO.md). + +```latex +@article{radosavovic2020designing, + title={Designing Network Design Spaces}, + author={Ilija Radosavovic and Raj Prateek Kosaraju and Ross Girshick and Kaiming He and Piotr Dollár}, + year={2020}, + eprint={2003.13678}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` + +## Usage + +To use a regnet model, there are two steps to do: + +1. Convert the model to ResNet-style supported by MMDetection +2. Modify backbone and neck in config accordingly + +### Convert model + +We already prepare models of FLOPs from 400M to 12G in our model zoo. + +For more general usage, we also provide script `regnet2mmdet.py` in the tools directory to convert the key of models pretrained by [pycls](https://github.com/facebookresearch/pycls/) to +ResNet-style checkpoints used in MMDetection. + +```bash +python -u tools/model_converters/regnet2mmdet.py ${PRETRAIN_PATH} ${STORE_PATH} +``` + +This script convert model from `PRETRAIN_PATH` and store the converted model in `STORE_PATH`. + +### Modify config + +The users can modify the config's `depth` of backbone and corresponding keys in `arch` according to the configs in the [pycls model zoo](https://github.com/facebookresearch/pycls/blob/master/MODEL_ZOO.md). +The parameter `in_channels` in FPN can be found in the Figure 15 & 16 of the paper (`wi` in the legend). +This directory already provides some configs with their performance, using RegNetX from 800MF to 12GF level. +For other pre-trained models or self-implemented regnet models, the users are responsible to check these parameters by themselves. + +**Note**: Although Fig. 15 & 16 also provide `w0`, `wa`, `wm`, `group_w`, and `bot_mul` for `arch`, they are quantized thus inaccurate, using them sometimes produces different backbone that does not match the key in the pre-trained model. + +## Results + +### Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :---------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| [R-50-FPN](../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py)| pytorch | 1x | 4.4 | 12.0 | 38.2 | 34.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205_050542.log.json) | +|[RegNetX-3.2GF-FPN](./mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py)| pytorch | 1x |5.0 ||40.3|36.6|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco/mask_rcnn_regnetx-3.2GF_fpn_1x_coco_20200520_163141-2a9d1814.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco/mask_rcnn_regnetx-3.2GF_fpn_1x_coco_20200520_163141.log.json) | +|[RegNetX-4.0GF-FPN](./mask_rcnn_regnetx-4GF_fpn_1x_coco.py)| pytorch | 1x |5.5||41.5|37.4|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco/mask_rcnn_regnetx-4GF_fpn_1x_coco_20200517_180217-32e9c92d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco/mask_rcnn_regnetx-4GF_fpn_1x_coco_20200517_180217.log.json) | +| [R-101-FPN](../mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py)| pytorch | 1x | 6.4 | 10.3 | 40.0 | 36.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_1x_coco/mask_rcnn_r101_fpn_1x_coco_20200204-1efe0ed5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r101_fpn_1x_coco/mask_rcnn_r101_fpn_1x_coco_20200204_144809.log.json) | +|[RegNetX-6.4GF-FPN](./mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py)| pytorch | 1x |6.1 ||41.0|37.1|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco/mask_rcnn_regnetx-6.4GF_fpn_1x_coco_20200517_180439-3a7aae83.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco/mask_rcnn_regnetx-6.4GF_fpn_1x_coco_20200517_180439.log.json) | +| [X-101-32x4d-FPN](../mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py) | pytorch | 1x | 7.6 | 9.4 | 41.9 | 37.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco/mask_rcnn_x101_32x4d_fpn_1x_coco_20200205-478d0b67.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_x101_32x4d_fpn_1x_coco/mask_rcnn_x101_32x4d_fpn_1x_coco_20200205_034906.log.json) | +|[RegNetX-8.0GF-FPN](./mask_rcnn_regnetx-8GF_fpn_1x_coco.py)| pytorch | 1x |6.4 ||41.7|37.5|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco/mask_rcnn_regnetx-8GF_fpn_1x_coco_20200517_180515-09daa87e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco/mask_rcnn_regnetx-8GF_fpn_1x_coco_20200517_180515.log.json) | +|[RegNetX-12GF-FPN](./mask_rcnn_regnetx-12GF_fpn_1x_coco.py)| pytorch | 1x |7.4 ||42.2|38|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco/mask_rcnn_regnetx-12GF_fpn_1x_coco_20200517_180552-b538bd8b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco/mask_rcnn_regnetx-12GF_fpn_1x_coco_20200517_180552.log.json) | +|[RegNetX-3.2GF-FPN-DCN-C3-C5](./mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py)| pytorch | 1x |5.0 ||40.3|36.6|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco_20200520_172726-75f40794.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco_20200520_172726.log.json) | + +### Faster R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :---------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| [R-50-FPN](../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py)| pytorch | 1x | 4.0 | 18.2 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130_204655.log.json) | +|[RegNetX-3.2GF-FPN](./faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py)| pytorch | 1x | 4.5||39.9|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco/faster_rcnn_regnetx-3.2GF_fpn_1x_coco_20200517_175927-126fd9bf.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco/faster_rcnn_regnetx-3.2GF_fpn_1x_coco_20200517_175927.log.json) | +|[RegNetX-3.2GF-FPN](./faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py)| pytorch | 2x | 4.5||41.1|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco/faster_rcnn_regnetx-3.2GF_fpn_2x_coco_20200520_223955-e2081918.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco/faster_rcnn_regnetx-3.2GF_fpn_2x_coco_20200520_223955.log.json) | + +### RetinaNet + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :---------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| [R-50-FPN](../retinanet/retinanet_r50_fpn_1x_coco.py) | pytorch | 1x | 3.8 | 16.6 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_1x_coco/retinanet_r50_fpn_1x_coco_20200130-c2398f9e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_1x_coco/retinanet_r50_fpn_1x_coco_20200130_002941.log.json) | +|[RegNetX-800MF-FPN](./retinanet_regnetx-800MF_fpn_1x_coco.py)| pytorch | 1x |2.5||35.6|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/retinanet_regnetx-800MF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-800MF_fpn_1x_coco/retinanet_regnetx-800MF_fpn_1x_coco_20200517_191403-f6f91d10.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-800MF_fpn_1x_coco/retinanet_regnetx-800MF_fpn_1x_coco_20200517_191403.log.json) | +|[RegNetX-1.6GF-FPN](./retinanet_regnetx-1.6GF_fpn_1x_coco.py)| pytorch | 1x |3.3||37.3|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco/retinanet_regnetx-1.6GF_fpn_1x_coco_20200517_191403-37009a9d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco/retinanet_regnetx-1.6GF_fpn_1x_coco_20200517_191403.log.json) | +|[RegNetX-3.2GF-FPN](./retinanet_regnetx-3.2GF_fpn_1x_coco.py)| pytorch | 1x |4.2 ||39.1|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco/retinanet_regnetx-3.2GF_fpn_1x_coco_20200520_163141-cb1509e8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco/retinanet_regnetx-3.2GF_fpn_1x_coco_20200520_163141.log.json) | + +### Pre-trained models + +We also train some models with longer schedules and multi-scale training. The users could finetune them for downstream tasks. + +| Method | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-----: | :-----: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +|Faster RCNN |[RegNetX-400MF-FPN](./faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py)| pytorch | 3x |2.3 ||37.1|-|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco_20210526_095112-e1967c37.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco_20210526_095112.log.json) | +|Faster RCNN |[RegNetX-800MF-FPN](./faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py)| pytorch | 3x |2.8 ||38.8|-|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco_20210526_095118-a2c70b20.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco_20210526_095118.log.json) | +|Faster RCNN |[RegNetX-1.6GF-FPN](./faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py)| pytorch | 3x |3.4 ||40.5|-|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-1_20210526_095325-94aa46cc.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-1_20210526_095325.log.json) | +|Faster RCNN |[RegNetX-3.2GF-FPN](./faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py)| pytorch | 3x |4.4 ||42.3|-|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-3_20210526_095152-e16a5227.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-3_20210526_095152.log.json) | +|Faster RCNN |[RegNetX-4GF-FPN](./faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py)| pytorch | 3x |4.9 ||42.8|-|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco_20210526_095201-65eaf841.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco_20210526_095201.log.json) | +|Mask RCNN |[RegNetX-3.2GF-FPN](./mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py)| pytorch | 3x |5.0 ||43.1|38.7|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco_20200521_202221-99879813.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco_20200521_202221.log.json) | +|Mask RCNN |[RegNetX-400MF-FPN](./mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco.py)| pytorch | 3x |2.5 ||37.6|34.4|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco_20210601_235443-803b87a2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco_20210601_235443.log.json) | +|Mask RCNN |[RegNetX-800MF-FPN](./mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco.py)| pytorch | 3x |2.9 ||39.5|36.1|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco_20210602_210641-e843d02e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco_20210602_210641.log.json) | +|Mask RCNN |[RegNetX-1.6GF-FPN](./mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco.py)| pytorch | 3x |3.6 ||40.9|37.5|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-1_20210602_210641-6e63e19c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-1_20210602_210641.log.json) | +|Mask RCNN |[RegNetX-3.2GF-FPN](./mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py) | pytorch | 3x |5.0 ||43.1|38.7|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco_20200521_202221-99879813.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco_20200521_202221.log.json) | +|Mask RCNN |[RegNetX-4GF-FPN](./mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco.py) | pytorch | 3x |5.1 ||43.4|39.2|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco_20210602_032621-c5900e99.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco_20210602_032621.log.json) | + +### Notice + +1. The models are trained using a different weight decay, i.e., `weight_decay=5e-5` according to the setting in ImageNet training. This brings improvement of at least 0.7 AP absolute but does not improve the model using ResNet-50. +2. RetinaNets using RegNets are trained with learning rate 0.02 with gradient clip. We find that using learning rate 0.02 could improve the results by at least 0.7 AP absolute and gradient clip is necessary to stabilize the training. However, this does not improve the performance of ResNet-50-FPN RetinaNet. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..385b5ca73b5f7432ad60b0a1528ee8c992b31d44 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py @@ -0,0 +1,17 @@ +_base_ = 'faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_1.6gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_1.6gf')), + neck=dict( + type='FPN', + in_channels=[72, 168, 408, 912], + out_channels=256, + num_outs=5)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..88d270e3ce76f631acbef116cd3f7d3e6853ab59 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py @@ -0,0 +1,57 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_3.2gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_3.2gf')), + neck=dict( + type='FPN', + in_channels=[96, 192, 432, 1008], + out_channels=256, + num_outs=5)) +img_norm_cfg = dict( + # The mean and std are used in PyCls when training RegNets + mean=[103.53, 116.28, 123.675], + std=[57.375, 57.12, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..612490b4342a1b6fc164ec80bbe0a6c6df147d76 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py @@ -0,0 +1,3 @@ +_base_ = './faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b7e6e1a3125d67f4fd7d99c0ef856bf02402ddb6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py @@ -0,0 +1,61 @@ +_base_ = [ + '../common/mstrain_3x_coco.py', '../_base_/models/faster_rcnn_r50_fpn.py' +] +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_3.2gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_3.2gf')), + neck=dict( + type='FPN', + in_channels=[96, 192, 432, 1008], + out_channels=256, + num_outs=5)) +img_norm_cfg = dict( + # The mean and std are used in PyCls when training RegNets + mean=[103.53, 116.28, 123.675], + std=[57.375, 57.12, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] + +data = dict( + train=dict(dataset=dict(pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +optimizer = dict(weight_decay=0.00005) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0a05f6e4e3c6aa2e85f5473872b5633cdb8bfc50 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py @@ -0,0 +1,17 @@ +_base_ = 'faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_400mf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_400mf')), + neck=dict( + type='FPN', + in_channels=[32, 64, 160, 384], + out_channels=256, + num_outs=5)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..98b3fc2b5b6cd122a42cab4754336fd355d40cfb --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py @@ -0,0 +1,17 @@ +_base_ = 'faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_4.0gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_4.0gf')), + neck=dict( + type='FPN', + in_channels=[80, 240, 560, 1360], + out_channels=256, + num_outs=5)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..67f448bdb797459da8898d1846b7e97786163cf4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py @@ -0,0 +1,17 @@ +_base_ = 'faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_800mf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_800mf')), + neck=dict( + type='FPN', + in_channels=[64, 128, 288, 672], + out_channels=256, + num_outs=5)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..963aaf6ad45a017e1c22bf4cecc3748785cb775e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,25 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + pretrained='open-mmlab://regnetx_1.6gf', + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_1.6gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[72, 168, 408, 912], + out_channels=256, + num_outs=5)) + +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ce3661cffbfee0aa4206c889c2f8517d6d1e0e58 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco.py @@ -0,0 +1,17 @@ +_base_ = './mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_12gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_12gf')), + neck=dict( + type='FPN', + in_channels=[224, 448, 896, 2240], + out_channels=256, + num_outs=5)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..44bf0d1176bf3fd585b65dc10fbac455ce01c59c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py @@ -0,0 +1,58 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_3.2gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_3.2gf')), + neck=dict( + type='FPN', + in_channels=[96, 192, 432, 1008], + out_channels=256, + num_outs=5)) +img_norm_cfg = dict( + # The mean and std are used in PyCls when training RegNets + mean=[103.53, 116.28, 123.675], + std=[57.375, 57.12, 58.395], + to_rgb=False) +train_pipeline = [ + # Images are converted to float32 directly after loading in PyCls + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..5b53428125e5a8732bfd489195b0f6e179420b47 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = 'mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_3.2gf'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..aca64d335c7b299d985621adb254d9e4f471cca7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py @@ -0,0 +1,66 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_3.2gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_3.2gf')), + neck=dict( + type='FPN', + in_channels=[96, 192, 432, 1008], + out_channels=256, + num_outs=5)) +img_norm_cfg = dict( + # The mean and std are used in PyCls when training RegNets + mean=[103.53, 116.28, 123.675], + std=[57.375, 57.12, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ac78c80ae12823318d144b0bd8244e6c7a137dc9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,25 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + pretrained='open-mmlab://regnetx_400mf', + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_400mf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[32, 64, 160, 384], + out_channels=256, + num_outs=5)) + +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..874d485bec139ec2bfd8253ac82e8f5861d3f9c2 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco.py @@ -0,0 +1,17 @@ +_base_ = './mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_4.0gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_4.0gf')), + neck=dict( + type='FPN', + in_channels=[80, 240, 560, 1360], + out_channels=256, + num_outs=5)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0981298f51c192a88ab6afaa722b4d958bb863c1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,25 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + pretrained='open-mmlab://regnetx_4.0gf', + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_4.0gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[80, 240, 560, 1360], + out_channels=256, + num_outs=5)) + +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..99387d8655eaa8bca5276dff7f2b7505afe185ed --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py @@ -0,0 +1,17 @@ +_base_ = './mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_6.4gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_6.4gf')), + neck=dict( + type='FPN', + in_channels=[168, 392, 784, 1624], + out_channels=256, + num_outs=5)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7af20783ae4d05e3370c1b1d0bada8c0b2965310 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco.py @@ -0,0 +1,25 @@ +_base_ = [ + '../common/mstrain-poly_3x_coco_instance.py', + '../_base_/models/mask_rcnn_r50_fpn.py' +] + +model = dict( + pretrained='open-mmlab://regnetx_800mf', + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_800mf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict( + type='FPN', + in_channels=[64, 128, 288, 672], + out_channels=256, + num_outs=5)) + +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1e7832ff2605346e9743e54023dfd5872dc55567 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco.py @@ -0,0 +1,17 @@ +_base_ = './mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_8.0gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_8.0gf')), + neck=dict( + type='FPN', + in_channels=[80, 240, 720, 1920], + out_channels=256, + num_outs=5)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..455e82def26e1d0e8d42ca5ff646cb82cf15a9d4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/metafile.yml @@ -0,0 +1,347 @@ +Collections: + - Name: RegNet + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - RegNet + Paper: https://arxiv.org/abs/2003.13678 + README: configs/regnet/README.md + +Models: + - Name: mask_rcnn_regnetx-3.2GF_fpn_1x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.0 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_1x_coco/mask_rcnn_regnetx-3.2GF_fpn_1x_coco_20200520_163141-2a9d1814.pth + + - Name: mask_rcnn_regnetx-4GF_fpn_1x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-4GF_fpn_1x_coco/mask_rcnn_regnetx-4GF_fpn_1x_coco_20200517_180217-32e9c92d.pth + + - Name: mask_rcnn_regnetx-6.4GF_fpn_1x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.1 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.0 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-6.4GF_fpn_1x_coco/mask_rcnn_regnetx-6.4GF_fpn_1x_coco_20200517_180439-3a7aae83.pth + + - Name: mask_rcnn_regnetx-8GF_fpn_1x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 6.4 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-8GF_fpn_1x_coco/mask_rcnn_regnetx-8GF_fpn_1x_coco_20200517_180515-09daa87e.pth + + - Name: mask_rcnn_regnetx-12GF_fpn_1x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.4 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-12GF_fpn_1x_coco/mask_rcnn_regnetx-12GF_fpn_1x_coco_20200517_180552-b538bd8b.pth + + - Name: mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco.py + Metadata: + Training Memory (GB): 5.0 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.3 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco/mask_rcnn_regnetx-3.2GF_fpn_mdconv_c3-c5_1x_coco_20200520_172726-75f40794.pth + + - Name: faster_rcnn_regnetx-3.2GF_fpn_1x_coco + In Collection: RegNet + Config: configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_1x_coco/faster_rcnn_regnetx-3.2GF_fpn_1x_coco_20200517_175927-126fd9bf.pth + + - Name: faster_rcnn_regnetx-3.2GF_fpn_2x_coco + In Collection: RegNet + Config: configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco.py + Metadata: + Training Memory (GB): 4.5 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_2x_coco/faster_rcnn_regnetx-3.2GF_fpn_2x_coco_20200520_223955-e2081918.pth + + - Name: retinanet_regnetx-800MF_fpn_1x_coco + In Collection: RegNet + Config: configs/regnet/retinanet_regnetx-800MF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 2.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 35.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-800MF_fpn_1x_coco/retinanet_regnetx-800MF_fpn_1x_coco_20200517_191403-f6f91d10.pth + + - Name: retinanet_regnetx-1.6GF_fpn_1x_coco + In Collection: RegNet + Config: configs/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.3 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco/retinanet_regnetx-1.6GF_fpn_1x_coco_20200517_191403-37009a9d.pth + + - Name: retinanet_regnetx-3.2GF_fpn_1x_coco + In Collection: RegNet + Config: configs/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco.py + Metadata: + Training Memory (GB): 4.2 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco/retinanet_regnetx-3.2GF_fpn_1x_coco_20200520_163141-cb1509e8.pth + + - Name: faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 2.3 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-400MF_fpn_mstrain_3x_coco_20210526_095112-e1967c37.pth + + - Name: faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 2.8 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-800MF_fpn_mstrain_3x_coco_20210526_095118-a2c70b20.pth + + - Name: faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 3.4 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-1_20210526_095325-94aa46cc.pth + + - Name: faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 4.4 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-3_20210526_095152-e16a5227.pth + + - Name: faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 4.9 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco/faster_rcnn_regnetx-4GF_fpn_mstrain_3x_coco_20210526_095201-65eaf841.pth + + - Name: mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 5.0 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco_20200521_202221-99879813.pth + + - Name: mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco.py + Metadata: + Training Memory (GB): 2.5 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 34.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-400MF_fpn_mstrain-poly_3x_coco_20210601_235443-803b87a2.pth + + - Name: mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco.py + Metadata: + Training Memory (GB): 2.9 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 36.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-800MF_fpn_mstrain-poly_3x_coco_20210602_210641-e843d02e.pth + + - Name: mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 3.6 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.9 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-1_20210602_210641-6e63e19c.pth + + - Name: mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-3.2GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 5.0 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.1 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-1.6GF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-1_20210602_210641-6e63e19c.pth + + - Name: mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco + In Collection: RegNet + Config: configs/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain_3x_coco.py + Metadata: + Training Memory (GB): 5.1 + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/regnet/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco/mask_rcnn_regnetx-4GF_fpn_mstrain-poly_3x_coco_20210602_032621-c5900e99.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7395c1bfbfa16670294c721f9f3135da9b9e69ae --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/retinanet_regnetx-1.6GF_fpn_1x_coco.py @@ -0,0 +1,17 @@ +_base_ = './retinanet_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_1.6gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_1.6gf')), + neck=dict( + type='FPN', + in_channels=[72, 168, 408, 912], + out_channels=256, + num_outs=5)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f05307c4364c565d410de35cc720db70d22be947 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/retinanet_regnetx-3.2GF_fpn_1x_coco.py @@ -0,0 +1,59 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + backbone=dict( + _delete_=True, + type='RegNet', + arch='regnetx_3.2gf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_3.2gf')), + neck=dict( + type='FPN', + in_channels=[96, 192, 432, 1008], + out_channels=256, + num_outs=5)) +img_norm_cfg = dict( + # The mean and std are used in PyCls when training RegNets + mean=[103.53, 116.28, 123.675], + std=[57.375, 57.12, 58.395], + to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.00005) +optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/retinanet_regnetx-800MF_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/retinanet_regnetx-800MF_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f6f8989320d6ffbcd55148471f62a962c52f9131 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/regnet/retinanet_regnetx-800MF_fpn_1x_coco.py @@ -0,0 +1,17 @@ +_base_ = './retinanet_regnetx-3.2GF_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='RegNet', + arch='regnetx_800mf', + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://regnetx_800mf')), + neck=dict( + type='FPN', + in_channels=[64, 128, 288, 672], + out_channels=256, + num_outs=5)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/README.md new file mode 100644 index 0000000000000000000000000000000000000000..fc589158b009ebe778255620969d4dadc9db0d39 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/README.md @@ -0,0 +1,54 @@ +# RepPoints: Point Set Representation for Object Detection + +By [Ze Yang](https://yangze.tech/), [Shaohui Liu](http://b1ueber2y.me/), and [Han Hu](https://ancientmooner.github.io/). + +We provide code support and configuration files to reproduce the results in the paper for +["RepPoints: Point Set Representation for Object Detection"](https://arxiv.org/abs/1904.11490) on COCO object detection. + +## Introduction + + + +**RepPoints**, initially described in [arXiv](https://arxiv.org/abs/1904.11490), is a new representation method for visual objects, on which visual understanding tasks are typically centered. Visual object representation, aiming at both geometric description and appearance feature extraction, is conventionally achieved by `bounding box + RoIPool (RoIAlign)`. The bounding box representation is convenient to use; however, it provides only a rectangular localization of objects that lacks geometric precision and may consequently degrade feature quality. Our new representation, RepPoints, models objects by a `point set` instead of a `bounding box`, which learns to adaptively position themselves over an object in a manner that circumscribes the object’s `spatial extent` and enables `semantically aligned feature extraction`. This richer and more flexible representation maintains the convenience of bounding boxes while facilitating various visual understanding applications. This repo demonstrated the effectiveness of RepPoints for COCO object detection. + +Another feature of this repo is the demonstration of an `anchor-free detector`, which can be as effective as state-of-the-art anchor-based detection methods. The anchor-free detector can utilize either `bounding box` or `RepPoints` as the basic object representation. + +
+ +

Learning RepPoints in Object Detection.

+
+ +## Citing RepPoints + +``` +@inproceedings{yang2019reppoints, + title={RepPoints: Point Set Representation for Object Detection}, + author={Yang, Ze and Liu, Shaohui and Hu, Han and Wang, Liwei and Lin, Stephen}, + booktitle={The IEEE International Conference on Computer Vision (ICCV)}, + month={Oct}, + year={2019} +} +``` + +## Results and models + +The results on COCO 2017val are shown in the table below. + +| Method | Backbone | GN | Anchor | convert func | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +|:---------:|:-------------:|:---:|:------:|:------------:|:-------:|:--------:|:--------------:|:------:|:------:|:--------:| +| BBox | R-50-FPN | Y | single | - | 1x | 3.9 | 15.9 | 36.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/bbox_r50_grid_fpn_gn-neck+head_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/bbox_r50_grid_fpn_gn-neck%2Bhead_1x_coco/bbox_r50_grid_fpn_gn-neck%2Bhead_1x_coco_20200329-c98bfa96.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/bbox_r50_grid_fpn_gn-neck%2Bhead_1x_coco/bbox_r50_grid_fpn_gn-neck%2Bhead_1x_coco_20200329_145916.log.json) | +| BBox | R-50-FPN | Y | none | - | 1x | 3.9 | 15.4 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/bbox_r50_grid_center_fpn_gn-neck+Bhead_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/bbox_r50_grid_center_fpn_gn-neck%2Bhead_1x_coco/bbox_r50_grid_center_fpn_gn-neck%2Bhead_1x_coco_20200330-00f73d58.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/bbox_r50_grid_center_fpn_gn-neck%2Bhead_1x_coco/bbox_r50_grid_center_fpn_gn-neck%2Bhead_1x_coco_20200330_233609.log.json) | +| RepPoints | R-50-FPN | N | none | moment | 1x | 3.3 | 18.5 | 37.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_1x_coco/reppoints_moment_r50_fpn_1x_coco_20200330-b73db8d1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_1x_coco/reppoints_moment_r50_fpn_1x_coco_20200330_233609.log.json) | +| RepPoints | R-50-FPN | Y | none | moment | 1x | 3.9 | 17.5 | 38.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco_20200329-4b38409a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco_20200329_145952.log.json) | +| RepPoints | R-50-FPN | Y | none | moment | 2x | 3.9 | - | 38.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_2x_coco/reppoints_moment_r50_fpn_gn-neck%2Bhead_2x_coco_20200329-91babaa2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_2x_coco/reppoints_moment_r50_fpn_gn-neck%2Bhead_2x_coco_20200329_150020.log.json) | +| RepPoints | R-101-FPN | Y | none | moment | 2x | 5.8 | 13.7 | 40.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_r101_fpn_gn-neck+head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r101_fpn_gn-neck%2Bhead_2x_coco/reppoints_moment_r101_fpn_gn-neck%2Bhead_2x_coco_20200329-4fbc7310.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r101_fpn_gn-neck%2Bhead_2x_coco/reppoints_moment_r101_fpn_gn-neck%2Bhead_2x_coco_20200329_132205.log.json) | +| RepPoints | R-101-FPN-DCN | Y | none | moment | 2x | 5.9 | 12.1 | 42.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco_20200329-3309fbf2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco_20200329_132134.log.json) | +| RepPoints | X-101-FPN-DCN | Y | none | moment | 2x | 7.1 | 9.3 | 44.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco_20200329-f87da1ea.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco_20200329_132201.log.json) | + +**Notes:** + +- `R-xx`, `X-xx` denote the ResNet and ResNeXt architectures, respectively. +- `DCN` denotes replacing 3x3 conv with the 3x3 deformable convolution in `c3-c5` stages of backbone. +- `none` in the `anchor` column means 2-d `center point` (x,y) is used to represent the initial object hypothesis. `single` denotes one 4-d anchor box (x,y,w,h) with IoU based label assign criterion is adopted. +- `moment`, `partial MinMax`, `MinMax` in the `convert func` column are three functions to convert a point set to a pseudo box. +- Note the results here are slightly different from those reported in the paper, due to framework change. While the original paper uses an [MXNet](https://mxnet.apache.org/) implementation, we re-implement the method in [PyTorch](https://pytorch.org/) based on mmdetection. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/bbox_r50_grid_center_fpn_gn-neck+head_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/bbox_r50_grid_center_fpn_gn-neck+head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b24c8db768423de12d1e8582bb26dd71218f52ee --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/bbox_r50_grid_center_fpn_gn-neck+head_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py' +model = dict(bbox_head=dict(transform_method='minmax', use_grid_points=True)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/bbox_r50_grid_fpn_gn-neck+head_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/bbox_r50_grid_fpn_gn-neck+head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8d5013d30a059f067c71e877dbc0bcef94790154 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/bbox_r50_grid_fpn_gn-neck+head_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py' +model = dict( + bbox_head=dict(transform_method='minmax', use_grid_points=True), + # training and testing settings + train_cfg=dict( + init=dict( + assigner=dict( + _delete_=True, + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1)))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..8ec2056d66746b3a01319095d6c23c30ab6a44b5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/metafile.yml @@ -0,0 +1,176 @@ +Collections: + - Name: RepPoints + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Group Normalization + - FPN + - RepPoints + - ResNet + Paper: https://arxiv.org/abs/1904.11490 + README: configs/reppoints/README.md + +Models: + - Name: bbox_r50_grid_fpn_gn-neck+head_1x_coco + In Collection: RepPoints + Config: configs/reppoints/bbox_r50_grid_fpn_gn-neck+head_1x_coco.py + Metadata: + Training Memory (GB): 3.9 + inference time (ms/im): + - value: 62.89 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/bbox_r50_grid_fpn_gn-neck%2Bhead_1x_coco/bbox_r50_grid_fpn_gn-neck%2Bhead_1x_coco_20200329-c98bfa96.pth + + - Name: bbox_r50_grid_center_fpn_gn-neck+Bhead_1x_coco + In Collection: RepPoints + Config: configs/reppoints/bbox_r50_grid_center_fpn_gn-neck+Bhead_1x_coco.py + Metadata: + Training Memory (GB): 3.9 + inference time (ms/im): + - value: 64.94 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/bbox_r50_grid_center_fpn_gn-neck%2Bhead_1x_coco/bbox_r50_grid_center_fpn_gn-neck%2Bhead_1x_coco_20200330-00f73d58.pth + + - Name: reppoints_moment_r50_fpn_1x_coco + In Collection: RepPoints + Config: configs/reppoints/reppoints_moment_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.3 + inference time (ms/im): + - value: 54.05 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_1x_coco/reppoints_moment_r50_fpn_1x_coco_20200330-b73db8d1.pth + + - Name: reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco + In Collection: RepPoints + Config: configs/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco.py + Metadata: + Training Memory (GB): 3.9 + inference time (ms/im): + - value: 57.14 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco/reppoints_moment_r50_fpn_gn-neck%2Bhead_1x_coco_20200329-4b38409a.pth + + - Name: reppoints_moment_r50_fpn_gn-neck+head_2x_coco + In Collection: RepPoints + Config: configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py + Metadata: + Training Memory (GB): 3.9 + inference time (ms/im): + - value: 57.14 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r50_fpn_gn-neck%2Bhead_2x_coco/reppoints_moment_r50_fpn_gn-neck%2Bhead_2x_coco_20200329-91babaa2.pth + + - Name: reppoints_moment_r101_fpn_gn-neck+head_2x_coco + In Collection: RepPoints + Config: configs/reppoints/reppoints_moment_r101_fpn_gn-neck+head_2x_coco.py + Metadata: + Training Memory (GB): 5.8 + inference time (ms/im): + - value: 72.99 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r101_fpn_gn-neck%2Bhead_2x_coco/reppoints_moment_r101_fpn_gn-neck%2Bhead_2x_coco_20200329-4fbc7310.pth + + - Name: reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck+head_2x_coco + In Collection: RepPoints + Config: configs/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py + Metadata: + Training Memory (GB): 5.9 + inference time (ms/im): + - value: 82.64 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco_20200329-3309fbf2.pth + + - Name: reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck+head_2x_coco + In Collection: RepPoints + Config: configs/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py + Metadata: + Training Memory (GB): 7.1 + inference time (ms/im): + - value: 107.53 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck%2Bhead_2x_coco_20200329-f87da1ea.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints.png b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints.png new file mode 100644 index 0000000000000000000000000000000000000000..a9306d9ba6c659a670822213bf198099f9e125b1 Binary files /dev/null and b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints.png differ diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_minmax_r50_fpn_gn-neck+head_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_minmax_r50_fpn_gn-neck+head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0f56a46b3c002cdec630bb06df66a4fc9e7804a8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_minmax_r50_fpn_gn-neck+head_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py' +model = dict(bbox_head=dict(transform_method='minmax')) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e223d80fab5eabf99da7ee28668d81d0f059d9cc --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_moment_r101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py @@ -0,0 +1,8 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_moment_r101_fpn_gn-neck+head_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_moment_r101_fpn_gn-neck+head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..118547096e67abb82c563ad128dd1a18309dd775 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_moment_r101_fpn_gn-neck+head_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_moment_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_moment_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..158a90670b86a78d872e7db4cf80db72401481b8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_moment_r50_fpn_1x_coco.py @@ -0,0 +1,67 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='RepPointsDetector', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_input', + num_outs=5), + bbox_head=dict( + type='RepPointsHead', + num_classes=80, + in_channels=256, + feat_channels=256, + point_feat_channels=256, + stacked_convs=3, + num_points=9, + gradient_mul=0.1, + point_strides=[8, 16, 32, 64, 128], + point_base_scale=4, + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_init=dict(type='SmoothL1Loss', beta=0.11, loss_weight=0.5), + loss_bbox_refine=dict(type='SmoothL1Loss', beta=0.11, loss_weight=1.0), + transform_method='moment'), + # training and testing settings + train_cfg=dict( + init=dict( + assigner=dict(type='PointAssigner', scale=4, pos_num=1), + allowed_border=-1, + pos_weight=-1, + debug=False), + refine=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False)), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100)) +optimizer = dict(lr=0.01) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..337f167c820979f345eef120a936195d8f5975c2 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py @@ -0,0 +1,4 @@ +_base_ = './reppoints_moment_r50_fpn_1x_coco.py' +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict(neck=dict(norm_cfg=norm_cfg), bbox_head=dict(norm_cfg=norm_cfg)) +optimizer = dict(lr=0.01) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..feca44aa67126b3326e45b1c9fbbf9e9c3bec11a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py @@ -0,0 +1,3 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py' +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c0a12d00615aaa347ad6790c110be1304458501d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_moment_x101_fpn_dconv_c3-c5_gn-neck+head_2x_coco.py @@ -0,0 +1,16 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + dcn=dict(type='DCN', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_partial_minmax_r50_fpn_gn-neck+head_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_partial_minmax_r50_fpn_gn-neck+head_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9a63bd0862be6d5f363c5d481bade3e8e2e8433a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/reppoints/reppoints_partial_minmax_r50_fpn_gn-neck+head_1x_coco.py @@ -0,0 +1,2 @@ +_base_ = './reppoints_moment_r50_fpn_gn-neck+head_1x_coco.py' +model = dict(bbox_head=dict(transform_method='partial_minmax')) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/res2net/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/res2net/README.md new file mode 100644 index 0000000000000000000000000000000000000000..864a891d8674face7da1bd335d9a00b3a0c294c7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/res2net/README.md @@ -0,0 +1,65 @@ +# Res2Net for object detection and instance segmentation + +## Introduction + + + +We propose a novel building block for CNNs, namely Res2Net, by constructing hierarchical residual-like connections within one single residual block. The Res2Net represents multi-scale features at a granular level and increases the range of receptive fields for each network layer. + +| Backbone |Params. | GFLOPs | top-1 err. | top-5 err. | +| :-------------: |:----: | :-----: | :--------: | :--------: | +| ResNet-101 |44.6 M | 7.8 | 22.63 | 6.44 | +| ResNeXt-101-64x4d |83.5M | 15.5 | 20.40 | - | +| HRNetV2p-W48 | 77.5M | 16.1 | 20.70 | 5.50 | +| Res2Net-101 | 45.2M | 8.3 | 18.77 | 4.64 | + +Compared with other backbone networks, Res2Net requires fewer parameters and FLOPs. + +**Note:** + +- GFLOPs for classification are calculated with image size (224x224). + +```latex +@article{gao2019res2net, + title={Res2Net: A New Multi-scale Backbone Architecture}, + author={Gao, Shang-Hua and Cheng, Ming-Ming and Zhao, Kai and Zhang, Xin-Yu and Yang, Ming-Hsuan and Torr, Philip}, + journal={IEEE TPAMI}, + year={2020}, + doi={10.1109/TPAMI.2019.2938758}, +} +``` + +## Results and Models + +### Faster R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +|R2-101-FPN | pytorch | 2x | 7.4 | - | 43.0 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/res2net/faster_rcnn_r2_101_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/res2net/faster_rcnn_r2_101_fpn_2x_coco/faster_rcnn_r2_101_fpn_2x_coco-175f1da6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/res2net/faster_rcnn_r2_101_fpn_2x_coco/faster_rcnn_r2_101_fpn_2x_coco_20200514_231734.log.json) | + +### Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +|R2-101-FPN | pytorch | 2x | 7.9 | - | 43.6 | 38.7 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/res2net/mask_rcnn_r2_101_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/res2net/mask_rcnn_r2_101_fpn_2x_coco/mask_rcnn_r2_101_fpn_2x_coco-17f061e8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/res2net/mask_rcnn_r2_101_fpn_2x_coco/mask_rcnn_r2_101_fpn_2x_coco_20200515_002413.log.json) | + +### Cascade R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +|R2-101-FPN | pytorch | 20e | 7.8 | - | 45.7 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/res2net/cascade_rcnn_r2_101_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/res2net/cascade_rcnn_r2_101_fpn_20e_coco/cascade_rcnn_r2_101_fpn_20e_coco-f4b7b7db.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/res2net/cascade_rcnn_r2_101_fpn_20e_coco/cascade_rcnn_r2_101_fpn_20e_coco_20200515_091644.log.json) | + +### Cascade Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +R2-101-FPN | pytorch | 20e | 9.5 | - | 46.4 | 40.0 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco/cascade_mask_rcnn_r2_101_fpn_20e_coco-8a7b41e1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco/cascade_mask_rcnn_r2_101_fpn_20e_coco_20200515_091645.log.json) | + +### Hybrid Task Cascade (HTC) + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +| R2-101-FPN | pytorch | 20e | - | - | 47.5 | 41.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/res2net/htc_r2_101_fpn_20e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/res2net/htc_r2_101_fpn_20e_coco/htc_r2_101_fpn_20e_coco-3a8d2112.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/res2net/htc_r2_101_fpn_20e_coco/htc_r2_101_fpn_20e_coco_20200515_150029.log.json) | + +- Res2Net ImageNet pretrained models are in [Res2Net-PretrainedModels](https://github.com/Res2Net/Res2Net-PretrainedModels). +- More applications of Res2Net are in [Res2Net-Github](https://github.com/Res2Net/). diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6b6c0010a44be43131defb002767eeb5b5d15600 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco.py @@ -0,0 +1,10 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + type='Res2Net', + depth=101, + scales=4, + base_width=26, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://res2net101_v1d_26w_4s'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/res2net/cascade_rcnn_r2_101_fpn_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/res2net/cascade_rcnn_r2_101_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..10dddbb467993a023f8e498b57f86775b142ce4f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/res2net/cascade_rcnn_r2_101_fpn_20e_coco.py @@ -0,0 +1,10 @@ +_base_ = '../cascade_rcnn/cascade_rcnn_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + type='Res2Net', + depth=101, + scales=4, + base_width=26, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://res2net101_v1d_26w_4s'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/res2net/faster_rcnn_r2_101_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/res2net/faster_rcnn_r2_101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..fc2221cbabf293b55098d543ef9f14d9f75f1909 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/res2net/faster_rcnn_r2_101_fpn_2x_coco.py @@ -0,0 +1,10 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='Res2Net', + depth=101, + scales=4, + base_width=26, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://res2net101_v1d_26w_4s'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/res2net/htc_r2_101_fpn_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/res2net/htc_r2_101_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..22d0c5da57aa00daa62ebccab73d29fbe5620938 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/res2net/htc_r2_101_fpn_20e_coco.py @@ -0,0 +1,13 @@ +_base_ = '../htc/htc_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='Res2Net', + depth=101, + scales=4, + base_width=26, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://res2net101_v1d_26w_4s'))) +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/res2net/mask_rcnn_r2_101_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/res2net/mask_rcnn_r2_101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..33aef1a54d4e6c7d30eb2a2abc67937005a24aae --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/res2net/mask_rcnn_r2_101_fpn_2x_coco.py @@ -0,0 +1,10 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='Res2Net', + depth=101, + scales=4, + base_width=26, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://res2net101_v1d_26w_4s'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/res2net/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/res2net/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..a91bab2b6328c05228d39e26c3fe6de2ec52b771 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/res2net/metafile.yml @@ -0,0 +1,89 @@ +Collections: + - Name: Res2Net + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Res2Net + Paper: https://arxiv.org/abs/1904.01169 + README: configs/res2net/README.md + +Models: + - Name: faster_rcnn_r2_101_fpn_2x_coco + In Collection: Res2Net + Config: configs/res2net/faster_rcnn_r2_101_fpn_2x_coco.py + Metadata: + Training Memory (GB): 7.4 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/res2net/faster_rcnn_r2_101_fpn_2x_coco/faster_rcnn_r2_101_fpn_2x_coco-175f1da6.pth + + - Name: mask_rcnn_r2_101_fpn_2x_coco + In Collection: Res2Net + Config: configs/res2net/mask_rcnn_r2_101_fpn_2x_coco.py + Metadata: + Training Memory (GB): 7.9 + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/res2net/mask_rcnn_r2_101_fpn_2x_coco/mask_rcnn_r2_101_fpn_2x_coco-17f061e8.pth + + - Name: cascade_rcnn_r2_101_fpn_20e_coco + In Collection: Res2Net + Config: configs/res2net/cascade_rcnn_r2_101_fpn_20e_coco.py + Metadata: + Training Memory (GB): 7.8 + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/res2net/cascade_rcnn_r2_101_fpn_20e_coco/cascade_rcnn_r2_101_fpn_20e_coco-f4b7b7db.pth + + - Name: cascade_mask_rcnn_r2_101_fpn_20e_coco + In Collection: Res2Net + Config: configs/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco.py + Metadata: + Training Memory (GB): 9.5 + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/res2net/cascade_mask_rcnn_r2_101_fpn_20e_coco/cascade_mask_rcnn_r2_101_fpn_20e_coco-8a7b41e1.pth + + - Name: htc_r2_101_fpn_20e_coco + In Collection: Res2Net + Config: configs/res2net/htc_r2_101_fpn_20e_coco.py + Metadata: + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/res2net/htc_r2_101_fpn_20e_coco/htc_r2_101_fpn_20e_coco-3a8d2112.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/README.md new file mode 100644 index 0000000000000000000000000000000000000000..a23a39aeb919858a260518cbe1052eea03224fc9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/README.md @@ -0,0 +1,44 @@ +# ResNeSt: Split-Attention Networks + +## Introduction + +[BACKBONE] + +```latex +@article{zhang2020resnest, +title={ResNeSt: Split-Attention Networks}, +author={Zhang, Hang and Wu, Chongruo and Zhang, Zhongyue and Zhu, Yi and Zhang, Zhi and Lin, Haibin and Sun, Yue and He, Tong and Muller, Jonas and Manmatha, R. and Li, Mu and Smola, Alexander}, +journal={arXiv preprint arXiv:2004.08955}, +year={2020} +} +``` + +## Results and Models + +### Faster R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +|S-50-FPN | pytorch | 1x | 4.8 | - | 42.0 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/resnest/faster_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/faster_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco_20200926_125502-20289c16.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/resnest/faster_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/faster_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco-20200926_125502.log.json) | +|S-101-FPN | pytorch | 1x | 7.1 | - | 44.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/faster_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/resnest/faster_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/faster_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco_20201006_021058-421517f1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/resnest/faster_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/faster_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco-20201006_021058.log.json) | + +### Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +|S-50-FPN | pytorch | 1x | 5.5 | - | 42.6 | 38.1 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/resnest/mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco_20200926_125503-8a2c3d47.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/resnest/mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco-20200926_125503.log.json) | +|S-101-FPN | pytorch | 1x | 7.8 | - | 45.2 | 40.2 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/resnest/mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco_20201005_215831-af60cdf9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/resnest/mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco-20201005_215831.log.json) | + +### Cascade R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +|S-50-FPN | pytorch | 1x | - | - | 44.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/cascade_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco_20201122_213640-763cc7b5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/cascade_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco-20201005_113242.log.json) | +|S-101-FPN | pytorch | 1x | 8.4 | - | 46.8 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/cascade_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/cascade_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco_20201005_113242-b9459f8f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/cascade_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco-20201122_213640.log.json) | + +### Cascade Mask R-CNN + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +|S-50-FPN | pytorch | 1x | - | - | 45.4 | 39.5 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/cascade_mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco_20201122_104428-99eca4c7.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/cascade_mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco-20201122_104428.log.json) | +|S-101-FPN | pytorch | 1x | 10.5 | - | 47.7 | 41.4 |[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/cascade_mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco_20201005_113243-42607475.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/cascade_mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco-20201005_113243.log.json) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..406f39db91bb5c5abacb76db969b9181df453466 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py' +model = dict( + backbone=dict( + stem_channels=128, + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='open-mmlab://resnest101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..83d75372fc561935e43542743c8814ca2734414d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py @@ -0,0 +1,118 @@ +_base_ = '../cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py' +norm_cfg = dict(type='SyncBN', requires_grad=True) +model = dict( + backbone=dict( + type='ResNeSt', + stem_channels=64, + depth=50, + radix=2, + reduction_factor=4, + avg_down_stride=True, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=False, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='open-mmlab://resnest50')), + roi_head=dict( + bbox_head=[ + dict( + type='Shared4Conv1FCBBoxHead', + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + norm_cfg=norm_cfg, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared4Conv1FCBBoxHead', + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + norm_cfg=norm_cfg, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared4Conv1FCBBoxHead', + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + norm_cfg=norm_cfg, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], + mask_head=dict(norm_cfg=norm_cfg))) +# # use ResNeSt img_norm +img_norm_cfg = dict( + mean=[123.68, 116.779, 103.939], std=[58.393, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/cascade_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/cascade_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0a7476a3748b6ce80d25188284facfec13d9f86e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/cascade_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py' +model = dict( + backbone=dict( + stem_channels=128, + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='open-mmlab://resnest101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6ed7730104ca42e23a004827bb7aa0a114fa5e70 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py @@ -0,0 +1,116 @@ +_base_ = '../cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py' +norm_cfg = dict(type='SyncBN', requires_grad=True) +model = dict( + backbone=dict( + type='ResNeSt', + stem_channels=64, + depth=50, + radix=2, + reduction_factor=4, + avg_down_stride=True, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=False, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='open-mmlab://resnest50')), + roi_head=dict( + bbox_head=[ + dict( + type='Shared4Conv1FCBBoxHead', + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + norm_cfg=norm_cfg, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared4Conv1FCBBoxHead', + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + norm_cfg=norm_cfg, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared4Conv1FCBBoxHead', + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + norm_cfg=norm_cfg, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], )) +# # use ResNeSt img_norm +img_norm_cfg = dict( + mean=[123.68, 116.779, 103.939], std=[58.393, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=False, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/faster_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/faster_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..40a2f1f2c9d62f173e88893e4ef809e70e2cbf5b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/faster_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py' +model = dict( + backbone=dict( + stem_channels=128, + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='open-mmlab://resnest101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..eb1ecd224cb86d6c296363ab53fb733848f6224c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py @@ -0,0 +1,62 @@ +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +norm_cfg = dict(type='SyncBN', requires_grad=True) +model = dict( + backbone=dict( + type='ResNeSt', + stem_channels=64, + depth=50, + radix=2, + reduction_factor=4, + avg_down_stride=True, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=False, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='open-mmlab://resnest50')), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + norm_cfg=norm_cfg))) +# # use ResNeSt img_norm +img_norm_cfg = dict( + mean=[123.68, 116.779, 103.939], std=[58.393, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=False, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c882ba1421afdcc7100995da7ab10eb16bd3db25 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py' +model = dict( + backbone=dict( + stem_channels=128, + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='open-mmlab://resnest101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4e50deacbdecdccace68f77636edac7a29d4ef57 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py @@ -0,0 +1,64 @@ +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +norm_cfg = dict(type='SyncBN', requires_grad=True) +model = dict( + backbone=dict( + type='ResNeSt', + stem_channels=64, + depth=50, + radix=2, + reduction_factor=4, + avg_down_stride=True, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=norm_cfg, + norm_eval=False, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='open-mmlab://resnest50')), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + norm_cfg=norm_cfg), + mask_head=dict(norm_cfg=norm_cfg))) +# # use ResNeSt img_norm +img_norm_cfg = dict( + mean=[123.68, 116.779, 103.939], std=[58.393, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..beb8ab283c10ee525c5f8dd696e6c6140360431e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/resnest/metafile.yml @@ -0,0 +1,131 @@ +Collections: + - Name: ResNeSt + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - ResNeSt + Paper: https://arxiv.org/abs/2004.08955 + README: configs/renest/README.md + +Models: + - Name: faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco + In Collection: ResNeSt + Config: configs/resnest/faster_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py + Metadata: + Training Memory (GB): 4.8 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/resnest/faster_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/faster_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco_20200926_125502-20289c16.pth + + - Name: faster_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco + In Collection: ResNeSt + Config: configs/resnest/faster_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py + Metadata: + Training Memory (GB): 7.1 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/resnest/faster_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/faster_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco_20201006_021058-421517f1.pth + + - Name: mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco + In Collection: ResNeSt + Config: configs/resnest/mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py + Metadata: + Training Memory (GB): 5.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.6 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 38.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/resnest/mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco_20200926_125503-8a2c3d47.pth + + - Name: mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco + In Collection: ResNeSt + Config: configs/resnest/mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py + Metadata: + Training Memory (GB): 7.8 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/resnest/mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco_20201005_215831-af60cdf9.pth + + - Name: cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco + In Collection: ResNeSt + Config: configs/resnest/cascade_rcnn_s50_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/cascade_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco_20201122_213640-763cc7b5.pth + + - Name: cascade_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco + In Collection: ResNeSt + Config: configs/resnest/cascade_rcnn_s101_fpn_syncbn-backbone+head_mstrain-range_1x_coco.py + Metadata: + Training Memory (GB): 8.4 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco/cascade_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain-range_1x_coco_20201005_113242-b9459f8f.pth + + - Name: cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco + In Collection: ResNeSt + Config: configs/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone+head_mstrain_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.4 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/cascade_mask_rcnn_s50_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco_20201122_104428-99eca4c7.pth + + - Name: cascade_mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco + In Collection: ResNeSt + Config: configs/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone+head_mstrain_1x_coco.py + Metadata: + Training Memory (GB): 10.5 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.7 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 41.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/resnest/cascade_mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco/cascade_mask_rcnn_s101_fpn_syncbn-backbone%2Bhead_mstrain_1x_coco_20201005_113243-42607475.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..c499fd299783468906347be47c4a8b8ddc41b9a7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/README.md @@ -0,0 +1,29 @@ +# Focal Loss for Dense Object Detection + +## Introduction + + + +```latex +@inproceedings{lin2017focal, + title={Focal loss for dense object detection}, + author={Lin, Tsung-Yi and Goyal, Priya and Girshick, Ross and He, Kaiming and Doll{\'a}r, Piotr}, + booktitle={Proceedings of the IEEE international conference on computer vision}, + year={2017} +} +``` + +## Results and models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-FPN | caffe | 1x | 3.5 | 18.6 | 36.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_caffe_fpn_1x_coco/retinanet_r50_caffe_fpn_1x_coco_20200531-f11027c5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_caffe_fpn_1x_coco/retinanet_r50_caffe_fpn_1x_coco_20200531_012518.log.json) | +| R-50-FPN | pytorch | 1x | 3.8 | 19.0 | 36.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_1x_coco/retinanet_r50_fpn_1x_coco_20200130-c2398f9e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_1x_coco/retinanet_r50_fpn_1x_coco_20200130_002941.log.json) | +| R-50-FPN | pytorch | 2x | - | - | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r50_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_2x_coco/retinanet_r50_fpn_2x_coco_20200131-fdb43119.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_2x_coco/retinanet_r50_fpn_2x_coco_20200131_114738.log.json) | +| R-101-FPN | caffe | 1x | 5.5 | 14.7 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_caffe_fpn_1x_coco/retinanet_r101_caffe_fpn_1x_coco_20200531-b428fa0f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_caffe_fpn_1x_coco/retinanet_r101_caffe_fpn_1x_coco_20200531_012536.log.json) | +| R-101-FPN | pytorch | 1x | 5.7 | 15.0 | 38.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_fpn_1x_coco/retinanet_r101_fpn_1x_coco_20200130-7a93545f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_fpn_1x_coco/retinanet_r101_fpn_1x_coco_20200130_003055.log.json) | +| R-101-FPN | pytorch | 2x | - | - | 38.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_r101_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_fpn_2x_coco/retinanet_r101_fpn_2x_coco_20200131-5560aee8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_fpn_2x_coco/retinanet_r101_fpn_2x_coco_20200131_114859.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 7.0 | 12.1 | 39.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_32x4d_fpn_1x_coco/retinanet_x101_32x4d_fpn_1x_coco_20200130-5c8b7ec4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_32x4d_fpn_1x_coco/retinanet_x101_32x4d_fpn_1x_coco_20200130_003004.log.json) | +| X-101-32x4d-FPN | pytorch | 2x | - | - | 40.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_x101_32x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_32x4d_fpn_2x_coco/retinanet_x101_32x4d_fpn_2x_coco_20200131-237fc5e1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_32x4d_fpn_2x_coco/retinanet_x101_32x4d_fpn_2x_coco_20200131_114812.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 10.0 | 8.7 | 41.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_64x4d_fpn_1x_coco/retinanet_x101_64x4d_fpn_1x_coco_20200130-366f5af1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_64x4d_fpn_1x_coco/retinanet_x101_64x4d_fpn_1x_coco_20200130_003008.log.json) | +| X-101-64x4d-FPN | pytorch | 2x | - | - | 40.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/retinanet/retinanet_x101_64x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_64x4d_fpn_2x_coco/retinanet_x101_64x4d_fpn_2x_coco_20200131-bca068ab.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_64x4d_fpn_2x_coco/retinanet_x101_64x4d_fpn_2x_coco_20200131_114833.log.json) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..363d1bf704ecde29057d5df9879844f299c9ae18 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/metafile.yml @@ -0,0 +1,207 @@ +Collections: + - Name: RetinaNet + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Focal Loss + - FPN + - ResNet + Paper: https://arxiv.org/abs/1708.02002 + README: configs/retinanet/README.md + +Models: + - Name: retinanet_r50_caffe_fpn_1x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_r50_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.5 + inference time (ms/im): + - value: 53.76 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_caffe_fpn_1x_coco/retinanet_r50_caffe_fpn_1x_coco_20200531-f11027c5.pth + + - Name: retinanet_r50_fpn_1x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 3.8 + inference time (ms/im): + - value: 52.63 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 36.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_1x_coco/retinanet_r50_fpn_1x_coco_20200130-c2398f9e.pth + + - Name: retinanet_r50_fpn_2x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_r50_fpn_2x_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r50_fpn_2x_coco/retinanet_r50_fpn_2x_coco_20200131-fdb43119.pth + + - Name: retinanet_r101_caffe_fpn_1x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_r101_caffe_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.5 + inference time (ms/im): + - value: 68.03 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_caffe_fpn_1x_coco/retinanet_r101_caffe_fpn_1x_coco_20200531-b428fa0f.pth + + - Name: retinanet_r101_fpn_1x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_r101_fpn_1x_coco.py + Metadata: + Training Memory (GB): 5.7 + inference time (ms/im): + - value: 66.67 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_fpn_1x_coco/retinanet_r101_fpn_1x_coco_20200130-7a93545f.pth + + - Name: retinanet_r101_fpn_2x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_r101_fpn_2x_coco.py + Metadata: + Training Memory (GB): 5.7 + inference time (ms/im): + - value: 66.67 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_r101_fpn_2x_coco/retinanet_r101_fpn_2x_coco_20200131-5560aee8.pth + + - Name: retinanet_x101_32x4d_fpn_1x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_x101_32x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 82.64 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_32x4d_fpn_1x_coco/retinanet_x101_32x4d_fpn_1x_coco_20200130-5c8b7ec4.pth + + - Name: retinanet_x101_32x4d_fpn_2x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_x101_32x4d_fpn_2x_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 82.64 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.1 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_32x4d_fpn_2x_coco/retinanet_x101_32x4d_fpn_2x_coco_20200131-237fc5e1.pth + + - Name: retinanet_x101_64x4d_fpn_1x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_x101_64x4d_fpn_1x_coco.py + Metadata: + Training Memory (GB): 10.0 + inference time (ms/im): + - value: 114.94 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_64x4d_fpn_1x_coco/retinanet_x101_64x4d_fpn_1x_coco_20200130-366f5af1.pth + + - Name: retinanet_x101_64x4d_fpn_2x_coco + In Collection: RetinaNet + Config: configs/retinanet/retinanet_x101_64x4d_fpn_2x_coco.py + Metadata: + Training Memory (GB): 10.0 + inference time (ms/im): + - value: 114.94 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/retinanet/retinanet_x101_64x4d_fpn_2x_coco/retinanet_x101_64x4d_fpn_2x_coco_20200131-bca068ab.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r101_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..56eaae200fb839eddabc95f18a7a6889cb830100 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './retinanet_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r101_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..a7f06002413dcdf2716975655a582a3eefaf007a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './retinanet_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r101_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..721112a221953bb86dc3259e3991d7f0f740b26c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r101_fpn_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './retinanet_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r50_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..04c9af5898971b4a13c46d71362c111e8cabbbaf --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,41 @@ +_base_ = './retinanet_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4d7b8f2bd04598d64f1cf24cfaf9c155f9b21e87 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_1x_coco.py @@ -0,0 +1,46 @@ +_base_ = './retinanet_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..eea9690eb159fe03865825bb9f9ca5fd6ff99d70 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './retinanet_r50_caffe_fpn_mstrain_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 23]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..8057650736eaab0b7b01a7957339124f73d6d6b0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = './retinanet_r50_caffe_fpn_mstrain_1x_coco.py' +# learning policy +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..04bd696b9589e37ad34c9fdd035b97e271d3b214 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r50_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r50_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r50_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..927915fa8c63d380cc4bd62a580ffaad8b1ce386 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_r50_fpn_2x_coco.py @@ -0,0 +1,4 @@ +_base_ = './retinanet_r50_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_x101_32x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..765a4c2cc0f69bf13891bf371c94c17b6cd5f30c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './retinanet_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_x101_32x4d_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_x101_32x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..14de96faf70180d7828a670630a8f48a3cd1081d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_x101_32x4d_fpn_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './retinanet_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_x101_64x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..948cd18e4d995d18d947b345ba7229b5cad60eb1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './retinanet_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_x101_64x4d_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_x101_64x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ad04b6eea793add40c81d1d7096481597357d5bd --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/retinanet/retinanet_x101_64x4d_fpn_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './retinanet_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..03b293d9e13ff46ab90f98202b774ba6b5331ef8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/README.md @@ -0,0 +1,29 @@ +# Faster R-CNN: Towards Real-Time Object Detection with Region Proposal Networks + +## Introduction + + + +```latex +@inproceedings{ren2015faster, + title={Faster r-cnn: Towards real-time object detection with region proposal networks}, + author={Ren, Shaoqing and He, Kaiming and Girshick, Ross and Sun, Jian}, + booktitle={Advances in neural information processing systems}, + year={2015} +} +``` + +## Results and models + +| Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | AR1000 | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| R-50-FPN | caffe | 1x | 3.5 | 22.6 | 58.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_r50_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r50_caffe_fpn_1x_coco/rpn_r50_caffe_fpn_1x_coco_20200531-5b903a37.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r50_caffe_fpn_1x_coco/rpn_r50_caffe_fpn_1x_coco_20200531_012334.log.json) | +| R-50-FPN | pytorch | 1x | 3.8 | 22.3 | 58.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r50_fpn_1x_coco/rpn_r50_fpn_1x_coco_20200218-5525fa2e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r50_fpn_1x_coco/rpn_r50_fpn_1x_coco_20200218_151240.log.json) | +| R-50-FPN | pytorch | 2x | - | - | 58.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_r50_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r50_fpn_2x_coco/rpn_r50_fpn_2x_coco_20200131-0728c9b3.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r50_fpn_2x_coco/rpn_r50_fpn_2x_coco_20200131_190631.log.json) | +| R-101-FPN | caffe | 1x | 5.4 | 17.3 | 60.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_r101_caffe_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r101_caffe_fpn_1x_coco/rpn_r101_caffe_fpn_1x_coco_20200531-0629a2e2.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r101_caffe_fpn_1x_coco/rpn_r101_caffe_fpn_1x_coco_20200531_012345.log.json) | +| R-101-FPN | pytorch | 1x | 5.8 | 16.5 | 59.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r101_fpn_1x_coco/rpn_r101_fpn_1x_coco_20200131-2ace2249.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r101_fpn_1x_coco/rpn_r101_fpn_1x_coco_20200131_191000.log.json) | +| R-101-FPN | pytorch | 2x | - | - | 60.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_r101_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r101_fpn_2x_coco/rpn_r101_fpn_2x_coco_20200131-24e3db1a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_r101_fpn_2x_coco/rpn_r101_fpn_2x_coco_20200131_191106.log.json) | +| X-101-32x4d-FPN | pytorch | 1x | 7.0 | 13.0 | 60.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_x101_32x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_32x4d_fpn_1x_coco/rpn_x101_32x4d_fpn_1x_coco_20200219-b02646c6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_32x4d_fpn_1x_coco/rpn_x101_32x4d_fpn_1x_coco_20200219_012037.log.json) | +| X-101-32x4d-FPN | pytorch | 2x | - | - | 61.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_x101_32x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_32x4d_fpn_2x_coco/rpn_x101_32x4d_fpn_2x_coco_20200208-d22bd0bb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_32x4d_fpn_2x_coco/rpn_x101_32x4d_fpn_2x_coco_20200208_200752.log.json) | +| X-101-64x4d-FPN | pytorch | 1x | 10.1 | 9.1 | 61.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_x101_64x4d_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_64x4d_fpn_1x_coco/rpn_x101_64x4d_fpn_1x_coco_20200208-cde6f7dd.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_64x4d_fpn_1x_coco/rpn_x101_64x4d_fpn_1x_coco_20200208_200752.log.json) | +| X-101-64x4d-FPN | pytorch | 2x | - | - | 61.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/rpn/rpn_x101_64x4d_fpn_2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_64x4d_fpn_2x_coco/rpn_x101_64x4d_fpn_2x_coco_20200208-c65f524f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/rpn/rpn_x101_64x4d_fpn_2x_coco/rpn_x101_64x4d_fpn_2x_coco_20200208_200752.log.json) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_r101_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_r101_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..27be94638a989f238972e85f9c14e1bcba0d09ac --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_r101_caffe_fpn_1x_coco.py @@ -0,0 +1,7 @@ +_base_ = './rpn_r50_caffe_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet101_caffe'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_r101_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..962728ff08abb4652c617a085649575b6cfdcbf8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './rpn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_r101_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_r101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ac7671c1c2421c0caa7b42d012cc3a2edc068934 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_r101_fpn_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './rpn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_r50_caffe_c4_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_r50_caffe_c4_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6da0ee94906fd8febaf69786976e478ef8f35c9e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_r50_caffe_c4_1x_coco.py @@ -0,0 +1,38 @@ +_base_ = [ + '../_base_/models/rpn_r50_caffe_c4.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# dataset settings +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_label=False), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +evaluation = dict(interval=1, metric='proposal_fast') diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_r50_caffe_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_r50_caffe_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..68c36fa8caa0d0715128b02da03d14e7f5b27862 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_r50_caffe_fpn_1x_coco.py @@ -0,0 +1,41 @@ +_base_ = './rpn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + norm_cfg=dict(requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe'))) +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_label=False), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..26f95a3402f9fd2d54c5919484e2f4958beb8a34 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_r50_fpn_1x_coco.py @@ -0,0 +1,18 @@ +_base_ = [ + '../_base_/models/rpn_r50_fpn.py', '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_label=False), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes']), +] +data = dict(train=dict(pipeline=train_pipeline)) +evaluation = dict(interval=1, metric='proposal_fast') diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_r50_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_r50_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2f264bfe4234c870839ee77e3a671464aacc7813 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_r50_fpn_2x_coco.py @@ -0,0 +1,5 @@ +_base_ = './rpn_r50_fpn_1x_coco.py' + +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_x101_32x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_x101_32x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d0c73948ac56afa34b9d6c8d22d6158271306b8c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_x101_32x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './rpn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_x101_32x4d_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_x101_32x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c6880b762abc8f5d3bf12f278054d76958756fb2 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_x101_32x4d_fpn_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './rpn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_x101_64x4d_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_x101_64x4d_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..96e691a912c424f09add038c75631a2e1fefeffc --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_x101_64x4d_fpn_1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './rpn_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_x101_64x4d_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_x101_64x4d_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4182a39667c47d774a1df9d34a1bc2fe60b45538 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/rpn/rpn_x101_64x4d_fpn_2x_coco.py @@ -0,0 +1,14 @@ +_base_ = './rpn_r50_fpn_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/README.md new file mode 100644 index 0000000000000000000000000000000000000000..c3bf16987b5d2d4a33e249222d4615e7151f2d2e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/README.md @@ -0,0 +1,37 @@ +# Side-Aware Boundary Localization for More Precise Object Detection + +## Introduction + + + +We provide config files to reproduce the object detection results in the ECCV 2020 Spotlight paper for [Side-Aware Boundary Localization for More Precise Object Detection](https://arxiv.org/abs/1912.04260). + +```latex +@inproceedings{Wang_2020_ECCV, + title = {Side-Aware Boundary Localization for More Precise Object Detection}, + author = {Jiaqi Wang and Wenwei Zhang and Yuhang Cao and Kai Chen and Jiangmiao Pang and Tao Gong and Jianping Shi and Chen Change Loy and Dahua Lin}, + booktitle = {ECCV}, + year = {2020} +} +``` + +## Results and Models + +The results on COCO 2017 val is shown in the below table. (results on test-dev are usually slightly higher than val). +Single-scale testing (1333x800) is adopted in all results. + +| Method | Backbone | Lr schd | ms-train | box AP | Config | Download | +| :----------------: | :-------: | :-----: | :------: | :----: | :----------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| SABL Faster R-CNN | R-50-FPN | 1x | N | 39.9 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_faster_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_faster_rcnn_r50_fpn_1x_coco/sabl_faster_rcnn_r50_fpn_1x_coco-e867595b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_faster_rcnn_r50_fpn_1x_coco/20200830_130324.log.json) | +| SABL Faster R-CNN | R-101-FPN | 1x | N | 41.7 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_faster_rcnn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_faster_rcnn_r101_fpn_1x_coco/sabl_faster_rcnn_r101_fpn_1x_coco-f804c6c1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_faster_rcnn_r101_fpn_1x_coco/20200830_183949.log.json) | +| SABL Cascade R-CNN | R-50-FPN | 1x | N | 41.6 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco/sabl_cascade_rcnn_r50_fpn_1x_coco-e1748e5e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco/20200831_033726.log.json) | +| SABL Cascade R-CNN | R-101-FPN | 1x | N | 43.0 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco/sabl_cascade_rcnn_r101_fpn_1x_coco-2b83e87c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco/20200831_141745.log.json) | + +| Method | Backbone | GN | Lr schd | ms-train | box AP | Config | Download | +| :------------: | :-------: | :---: | :-----: | :---------: | :----: | :---------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| SABL RetinaNet | R-50-FPN | N | 1x | N | 37.7 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r50_fpn_1x_coco/sabl_retinanet_r50_fpn_1x_coco-6c54fd4f.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r50_fpn_1x_coco/20200830_053451.log.json) | +| SABL RetinaNet | R-50-FPN | Y | 1x | N | 38.8 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r50_fpn_gn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r50_fpn_gn_1x_coco/sabl_retinanet_r50_fpn_gn_1x_coco-e16dfcf1.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r50_fpn_gn_1x_coco/20200831_141955.log.json) | +| SABL RetinaNet | R-101-FPN | N | 1x | N | 39.7 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r101_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_1x_coco/sabl_retinanet_r101_fpn_1x_coco-42026904.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_1x_coco/20200831_034256.log.json) | +| SABL RetinaNet | R-101-FPN | Y | 1x | N | 40.5 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r101_fpn_gn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_1x_coco/sabl_retinanet_r101_fpn_gn_1x_coco-40a893e8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_1x_coco/20200830_201422.log.json) | +| SABL RetinaNet | R-101-FPN | Y | 2x | Y (640~800) | 42.9 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco-1e63382c.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco/20200830_144807.log.json) | +| SABL RetinaNet | R-101-FPN | Y | 2x | Y (480~960) | 43.6 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco-5342f857.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco/20200830_164537.log.json) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..7680afd9a5424292c19296ff2158a1dfa7938c28 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/metafile.yml @@ -0,0 +1,135 @@ +Collections: + - Name: SABL + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - ResNet + - SABL + Paper: https://arxiv.org/abs/1912.04260 + README: configs/sabl/README.md + +Models: + - Name: sabl_faster_rcnn_r50_fpn_1x_coco + In Collection: SABL + Config: configs/sabl/sabl_faster_rcnn_r50_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_faster_rcnn_r50_fpn_1x_coco/sabl_faster_rcnn_r50_fpn_1x_coco-e867595b.pth + + - Name: sabl_faster_rcnn_r101_fpn_1x_coco + In Collection: SABL + Config: configs/sabl/sabl_faster_rcnn_r101_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_faster_rcnn_r101_fpn_1x_coco/sabl_faster_rcnn_r101_fpn_1x_coco-f804c6c1.pth + + - Name: sabl_cascade_rcnn_r50_fpn_1x_coco + In Collection: SABL + Config: configs/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco/sabl_cascade_rcnn_r50_fpn_1x_coco-e1748e5e.pth + + - Name: sabl_cascade_rcnn_r101_fpn_1x_coco + In Collection: SABL + Config: configs/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco/sabl_cascade_rcnn_r101_fpn_1x_coco-2b83e87c.pth + + - Name: sabl_retinanet_r50_fpn_1x_coco + In Collection: SABL + Config: configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r50_fpn_1x_coco/sabl_retinanet_r50_fpn_1x_coco-6c54fd4f.pth + + - Name: sabl_retinanet_r50_fpn_gn_1x_coco + In Collection: SABL + Config: configs/sabl/sabl_retinanet_r50_fpn_gn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 38.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r50_fpn_gn_1x_coco/sabl_retinanet_r50_fpn_gn_1x_coco-e16dfcf1.pth + + - Name: sabl_retinanet_r101_fpn_1x_coco + In Collection: SABL + Config: configs/sabl/sabl_retinanet_r101_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 39.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_1x_coco/sabl_retinanet_r101_fpn_1x_coco-42026904.pth + + - Name: sabl_retinanet_r101_fpn_gn_1x_coco + In Collection: SABL + Config: configs/sabl/sabl_retinanet_r101_fpn_gn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_1x_coco/sabl_retinanet_r101_fpn_gn_1x_coco-40a893e8.pth + + - Name: sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco + In Collection: SABL + Config: configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco-1e63382c.pth + + - Name: sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco + In Collection: SABL + Config: configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco-5342f857.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..64fe2304c0f34c366ff443d4531ae07c48d915d8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,90 @@ +_base_ = [ + '../_base_/models/cascade_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + roi_head=dict(bbox_head=[ + dict( + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.7), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, + loss_weight=1.0)), + dict( + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.5), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, + loss_weight=1.0)), + dict( + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.3), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, loss_weight=1.0)) + ])) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4b28a59280e6701d31afeeaae7ae12cdbd4fb95e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,86 @@ +_base_ = [ + '../_base_/models/cascade_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + roi_head=dict(bbox_head=[ + dict( + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.7), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, + loss_weight=1.0)), + dict( + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.5), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, + loss_weight=1.0)), + dict( + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.3), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, loss_weight=1.0)) + ])) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_faster_rcnn_r101_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_faster_rcnn_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e48d4259b78aa4494a9de1deabdf40c0d37d9816 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_faster_rcnn_r101_fpn_1x_coco.py @@ -0,0 +1,38 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + roi_head=dict( + bbox_head=dict( + _delete_=True, + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.7), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, + loss_weight=1.0)))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_faster_rcnn_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_faster_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..732c7ba3f607e2ac68f16acceddd16b1269aa2cf --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_faster_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,34 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + roi_head=dict( + bbox_head=dict( + _delete_=True, + type='SABLHead', + num_classes=80, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=1.7), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox_reg=dict(type='SmoothL1Loss', beta=0.1, + loss_weight=1.0)))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_retinanet_r101_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_retinanet_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b08e916c9f9d158dd89a3a13418cc51bd25ef953 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_retinanet_r101_fpn_1x_coco.py @@ -0,0 +1,54 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + bbox_head=dict( + _delete_=True, + type='SABLRetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=3.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.5), + loss_bbox_reg=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.5)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_retinanet_r101_fpn_gn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_retinanet_r101_fpn_gn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..fc30d63dc58b44deda01790e6f432db0fe957a1e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_retinanet_r101_fpn_gn_1x_coco.py @@ -0,0 +1,56 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + bbox_head=dict( + _delete_=True, + type='SABLRetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + norm_cfg=norm_cfg, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=3.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.5), + loss_bbox_reg=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.5)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e8fe16646278fba3aba64742bb9912984720489b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco.py @@ -0,0 +1,73 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] +# model settings +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + bbox_head=dict( + _delete_=True, + type='SABLRetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + norm_cfg=norm_cfg, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=3.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.5), + loss_bbox_reg=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.5)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False)) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 480), (1333, 960)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..30c43399f7bf2ec1f67aee3265565a8067fe2b6a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco.py @@ -0,0 +1,73 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] +# model settings +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + bbox_head=dict( + _delete_=True, + type='SABLRetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + norm_cfg=norm_cfg, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=3.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.5), + loss_bbox_reg=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.5)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False)) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +data = dict(train=dict(pipeline=train_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6fe6bd660230eedf70f87072e5abec66036d865f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py @@ -0,0 +1,50 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + bbox_head=dict( + _delete_=True, + type='SABLRetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=3.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.5), + loss_bbox_reg=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.5)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_retinanet_r50_fpn_gn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_retinanet_r50_fpn_gn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6acf080afe1b04e50467b16b60700feb5c12e886 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/sabl/sabl_retinanet_r50_fpn_gn_1x_coco.py @@ -0,0 +1,52 @@ +_base_ = [ + '../_base_/models/retinanet_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + bbox_head=dict( + _delete_=True, + type='SABLRetinaHead', + num_classes=80, + in_channels=256, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + norm_cfg=norm_cfg, + bbox_coder=dict( + type='BucketingBBoxCoder', num_buckets=14, scale_factor=3.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.5), + loss_bbox_reg=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.5)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False)) +# optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/scnet/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/scnet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..3757be13357876528cf154f4d464ead5aa7f13eb --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/scnet/README.md @@ -0,0 +1,51 @@ +# SCNet + +## Introduction + + + +We provide the code for reproducing experiment results of [SCNet](https://arxiv.org/abs/2012.10150). + +``` +@inproceedings{vu2019cascade, + title={SCNet: Training Inference Sample Consistency for Instance Segmentation}, + author={Vu, Thang and Haeyong, Kang and Yoo, Chang D}, + booktitle={AAAI}, + year={2021} +} +``` + +## Dataset + +SCNet requires COCO and [COCO-stuff](http://calvin.inf.ed.ac.uk/wp-content/uploads/data/cocostuffdataset/stuffthingmaps_trainval2017.zip) dataset for training. You need to download and extract it in the COCO dataset path. +The directory should be like this. + +```none +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── coco +│ │ ├── annotations +│ │ ├── train2017 +│ │ ├── val2017 +│ │ ├── test2017 +| | ├── stuffthingmaps +``` + +## Results and Models + +The results on COCO 2017val are shown in the below table. (results on test-dev are usually slightly higher than val) + +| Backbone | Style | Lr schd | Mem (GB) | Inf speed (fps) | box AP | mask AP | TTA box AP | TTA mask AP | Config | Download | +|:---------------:|:-------:|:-------:|:--------:|:---------------:|:------:|:-------:|:----------:|:-----------:|:------:|:------------:| +| R-50-FPN | pytorch | 1x | 7.0 | 6.2 | 43.5 | 39.2 | 44.8 | 40.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/scnet/scnet_r50_fpn_1x_coco.py) | [model](https://drive.google.com/file/d/1K5_8-P0EC43WZFtoO3q9_JE-df8pEc7J/view?usp=sharing) \| [log](https://drive.google.com/file/d/1ZFS6QhFfxlOnDYPiGpSDP_Fzgb7iDGN3/view?usp=sharing) | +| R-50-FPN | pytorch | 20e | 7.0 | 6.2 | 44.5 | 40.0 | 45.8 | 41.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/scnet/scnet_r50_fpn_20e_coco.py) | [model](https://drive.google.com/file/d/15VGLCt5-IO5TbzB4Kw6ZyoF6QH0Q511A/view?usp=sharing) \| [log](https://drive.google.com/file/d/1-LnkOXN8n5ojQW34H0qZ625cgrnWpqSX/view?usp=sharing) | +| R-101-FPN | pytorch | 20e | 8.9 | 5.8 | 45.8 | 40.9 | 47.3 | 42.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/scnet/scnet_r101_fpn_20e_coco.py) | [model](https://drive.google.com/file/d/1aeCGHsOBdfIqVBnBPp0JUE_RSIau3583/view?usp=sharing) \| [log](https://drive.google.com/file/d/1iRx-9GRgTaIDsz-we3DGwFVH22nbvCLa/view?usp=sharing) | +| X-101-64x4d-FPN | pytorch | 20e | 13.2 | 4.9 | 47.5 | 42.3 | 48.9 | 44.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/scnet/scnet_x101_64x4d_fpn_20e_coco.py) | [model](https://drive.google.com/file/d/1YjgutUKz4TTPpqSWGKUTkZJ8_X-kyCfY/view?usp=sharing) \| [log](https://drive.google.com/file/d/1OsfQJ8gwtqIQ61k358yxY21sCvbUcRjs/view?usp=sharing) | + +### Notes + +- Training hyper-parameters are identical to those of [HTC](https://github.com/open-mmlab/mmdetection/tree/master/configs/htc). +- TTA means Test Time Augmentation, which applies horizonal flip and multi-scale testing. Refer to [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/scnet/scnet_r50_fpn_1x_coco.py). diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/scnet/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/scnet/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..c6429bba83c860538fd360ceada089cfbeaf6f88 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/scnet/metafile.yml @@ -0,0 +1,111 @@ +Collections: + - Name: SCNet + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - ResNet + - SCNet + Paper: https://arxiv.org/abs/2012.10150 + README: configs/scnet/README.md + +Models: + - Name: scnet_r50_fpn_1x_coco + In Collection: SCNet + Config: configs/scnet/scnet_r50_fpn_1x_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 161.29 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 39.2 + Weights: https://drive.google.com/file/d/1K5_8-P0EC43WZFtoO3q9_JE-df8pEc7J/view?usp=sharing + + - Name: scnet_r50_fpn_20e_coco + In Collection: SCNet + Config: configs/scnet/scnet_r50_fpn_20e_coco.py + Metadata: + Training Memory (GB): 7.0 + inference time (ms/im): + - value: 161.29 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.0 + Weights: https://drive.google.com/file/d/15VGLCt5-IO5TbzB4Kw6ZyoF6QH0Q511A/view?usp=sharing + + - Name: scnet_r101_fpn_20e_coco + In Collection: SCNet + Config: configs/scnet/scnet_r101_fpn_20e_coco.py + Metadata: + Training Memory (GB): 8.9 + inference time (ms/im): + - value: 172.41 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.8 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 40.9 + Weights: https://drive.google.com/file/d/1aeCGHsOBdfIqVBnBPp0JUE_RSIau3583/view?usp=sharing + + - Name: scnet_x101_64x4d_fpn_20e_coco + In Collection: SCNet + Config: configs/scnet/scnet_x101_64x4d_fpn_20e_coco.py + Metadata: + Training Memory (GB): 13.2 + inference time (ms/im): + - value: 204.08 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (800, 1333) + Epochs: 20 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 47.5 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 42.3 + Weights: https://drive.google.com/file/d/1YjgutUKz4TTPpqSWGKUTkZJ8_X-kyCfY/view?usp=sharing diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/scnet/scnet_r101_fpn_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/scnet/scnet_r101_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ebba52978b23c07a68e3563033c860a95dd515b6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/scnet/scnet_r101_fpn_20e_coco.py @@ -0,0 +1,6 @@ +_base_ = './scnet_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/scnet/scnet_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/scnet/scnet_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e4215a6d2d0b90f8ccd9c1291f6ca222c0ff554f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/scnet/scnet_r50_fpn_1x_coco.py @@ -0,0 +1,136 @@ +_base_ = '../htc/htc_r50_fpn_1x_coco.py' +# model settings +model = dict( + type='SCNet', + roi_head=dict( + _delete_=True, + type='SCNetRoIHead', + num_stages=3, + stage_loss_weights=[1, 0.5, 0.25], + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=[ + dict( + type='SCNetBBoxHead', + num_shared_fcs=2, + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='SCNetBBoxHead', + num_shared_fcs=2, + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='SCNetBBoxHead', + num_shared_fcs=2, + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], + mask_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + mask_head=dict( + type='SCNetMaskHead', + num_convs=12, + in_channels=256, + conv_out_channels=256, + num_classes=80, + conv_to_res=True, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)), + semantic_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=0), + out_channels=256, + featmap_strides=[8]), + semantic_head=dict( + type='SCNetSemanticHead', + num_ins=5, + fusion_level=1, + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=183, + ignore_label=255, + loss_weight=0.2, + conv_to_res=True), + glbctx_head=dict( + type='GlobalContextHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_weight=3.0, + conv_to_res=True), + feat_relay_head=dict( + type='FeatureRelayHead', + in_channels=1024, + out_conv_channels=256, + roi_feat_size=7, + scale_factor=2))) + +# uncomment below code to enable test time augmentations +# img_norm_cfg = dict( +# mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +# test_pipeline = [ +# dict(type='LoadImageFromFile'), +# dict( +# type='MultiScaleFlipAug', +# img_scale=[(600, 900), (800, 1200), (1000, 1500), (1200, 1800), +# (1400, 2100)], +# flip=True, +# transforms=[ +# dict(type='Resize', keep_ratio=True), +# dict(type='RandomFlip', flip_ratio=0.5), +# dict(type='Normalize', **img_norm_cfg), +# dict(type='Pad', size_divisor=32), +# dict(type='ImageToTensor', keys=['img']), +# dict(type='Collect', keys=['img']), +# ]) +# ] +# data = dict( +# val=dict(pipeline=test_pipeline), +# test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/scnet/scnet_r50_fpn_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/scnet/scnet_r50_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..3b121a6a2836ac7626f7b383ada9508f8b9d972d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/scnet/scnet_r50_fpn_20e_coco.py @@ -0,0 +1,4 @@ +_base_ = './scnet_r50_fpn_1x_coco.py' +# learning policy +lr_config = dict(step=[16, 19]) +runner = dict(type='EpochBasedRunner', max_epochs=20) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/scnet/scnet_x101_64x4d_fpn_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/scnet/scnet_x101_64x4d_fpn_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..1e54b030fa68f76f22edf66e3594d66a13c2c672 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/scnet/scnet_x101_64x4d_fpn_20e_coco.py @@ -0,0 +1,15 @@ +_base_ = './scnet_r50_fpn_20e_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/scnet/scnet_x101_64x4d_fpn_8x1_20e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/scnet/scnet_x101_64x4d_fpn_8x1_20e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9f3ce6d14e6b3474d78c8de3f3565b0029dc067e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/scnet/scnet_x101_64x4d_fpn_8x1_20e_coco.py @@ -0,0 +1,3 @@ +_base_ = './scnet_x101_64x4d_fpn_20e_coco.py' +data = dict(samples_per_gpu=1, workers_per_gpu=1) +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/scratch/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/scratch/README.md new file mode 100644 index 0000000000000000000000000000000000000000..6b304471a7a2613d163fb980610de0306fa7374d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/scratch/README.md @@ -0,0 +1,25 @@ +# Rethinking ImageNet Pre-training + +## Introduction + + + +```latex +@article{he2018rethinking, + title={Rethinking imagenet pre-training}, + author={He, Kaiming and Girshick, Ross and Doll{\'a}r, Piotr}, + journal={arXiv preprint arXiv:1811.08883}, + year={2018} +} +``` + +## Results and Models + +| Model | Backbone | Style | Lr schd | box AP | mask AP | Config | Download | +|:------------:|:---------:|:-------:|:-------:|:------:|:-------:|:------:|:--------:| +| Faster R-CNN | R-50-FPN | pytorch | 6x | 40.7 | | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco/scratch_faster_rcnn_r50_fpn_gn_6x_bbox_mAP-0.407_20200201_193013-90813d01.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco/scratch_faster_rcnn_r50_fpn_gn_6x_20200201_193013.log.json) | +| Mask R-CNN | R-50-FPN | pytorch | 6x | 41.2 | 37.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco/scratch_mask_rcnn_r50_fpn_gn_6x_bbox_mAP-0.412__segm_mAP-0.374_20200201_193051-1e190a40.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco/scratch_mask_rcnn_r50_fpn_gn_6x_20200201_193051.log.json) | + +Note: + +- The above models are trained with 16 GPUs. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..55aa3a6e73b9e56fb1d285272b3011fad8e9e11f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco.py @@ -0,0 +1,24 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_fpn.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + frozen_stages=-1, + zero_init_residual=False, + norm_cfg=norm_cfg, + init_cfg=None), + neck=dict(norm_cfg=norm_cfg), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + norm_cfg=norm_cfg))) +# optimizer +optimizer = dict(paramwise_cfg=dict(norm_decay_mult=0)) +optimizer_config = dict(_delete_=True, grad_clip=None) +# learning policy +lr_config = dict(warmup_ratio=0.1, step=[65, 71]) +runner = dict(type='EpochBasedRunner', max_epochs=73) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..cc52cb8f7618f57f280f4e5d640f99839bf66278 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco.py @@ -0,0 +1,25 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +norm_cfg = dict(type='GN', num_groups=32, requires_grad=True) +model = dict( + backbone=dict( + frozen_stages=-1, + zero_init_residual=False, + norm_cfg=norm_cfg, + init_cfg=None), + neck=dict(norm_cfg=norm_cfg), + roi_head=dict( + bbox_head=dict( + type='Shared4Conv1FCBBoxHead', + conv_out_channels=256, + norm_cfg=norm_cfg), + mask_head=dict(norm_cfg=norm_cfg))) +# optimizer +optimizer = dict(paramwise_cfg=dict(norm_decay_mult=0)) +optimizer_config = dict(_delete_=True, grad_clip=None) +# learning policy +lr_config = dict(warmup_ratio=0.1, step=[65, 71]) +runner = dict(type='EpochBasedRunner', max_epochs=73) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/scratch/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/scratch/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..cbcfa4359978422ed55373d6e26a19e68583b5ed --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/scratch/metafile.yml @@ -0,0 +1,43 @@ +Collections: + - Name: Rethinking ImageNet Pre-training + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - RPN + - ResNet + Paper: https://arxiv.org/abs/1811.08883 + README: configs/scratch/README.md + +Models: + - Name: faster_rcnn_r50_fpn_gn-all_scratch_6x_coco + In Collection: Rethinking ImageNet Pre-training + Config: configs/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco.py + Metadata: + Epochs: 72 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/scratch/faster_rcnn_r50_fpn_gn-all_scratch_6x_coco/scratch_faster_rcnn_r50_fpn_gn_6x_bbox_mAP-0.407_20200201_193013-90813d01.pth + + - Name: mask_rcnn_r50_fpn_gn-all_scratch_6x_coco + In Collection: Rethinking ImageNet Pre-training + Config: configs/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco.py + Metadata: + Epochs: 72 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.2 + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 37.4 + Weights: https://download.openmmlab.com/mmdetection/v2.0/scratch/mask_rcnn_r50_fpn_gn-all_scratch_6x_coco/scratch_mask_rcnn_r50_fpn_gn_6x_bbox_mAP-0.412__segm_mAP-0.374_20200201_193051-1e190a40.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/README.md new file mode 100644 index 0000000000000000000000000000000000000000..47cbff82e9171c4093ccbca2dab39400cd8364a3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/README.md @@ -0,0 +1,39 @@ +# Seesaw Loss for Long-Tailed Instance Segmentation (CVPR 2021) + +## Introduction + + + +We provide config files to reproduce the instance segmentation performance in the CVPR 2021 paper for [Seesaw Loss for Long-Tailed Instance Segmentation](https://arxiv.org/abs/2008.10032). + +```latex +@inproceedings{wang2021seesaw, + title={Seesaw Loss for Long-Tailed Instance Segmentation}, + author={Jiaqi Wang and Wenwei Zhang and Yuhang Zang and Yuhang Cao and Jiangmiao Pang and Tao Gong and Kai Chen and Ziwei Liu and Chen Change Loy and Dahua Lin}, + booktitle={Proceedings of the {IEEE} Conference on Computer Vision and Pattern Recognition}, + year={2021} +} +``` + + +* Please setup [LVIS dataset](../lvis/README.md) for MMDetection. + +* RFS indicates to use oversample strategy [here](../../docs/tutorials/customize_dataset.md#class-balanced-dataset) with oversample threshold `1e-3`. + +## Results and models of Seasaw Loss on LVIS v1 dataset + + +| Method | Backbone | Style | Lr schd | Data Sampler | Norm Mask | box AP | mask AP | Config | Download | +| :----------------: | :-------: | :-----: | :-----: | :----------: | :-------: | :----: | :-----: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| Mask R-CNN | R-50-FPN | pytorch | 2x | random | N | 25.6 | 25.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_mstrain_2x_lvis_v1-a698dd3d.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.log.json) | +| Mask R-CNN | R-50-FPN | pytorch | 2x | random | Y | 25.6 | 25.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1-a1c11314.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.log.json) | +| Mask R-CNN | R-101-FPN | pytorch | 2x | random | N | 27.4 | 26.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1-8e6e6dd5.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.log.json) | +| Mask R-CNN | R-101-FPN | pytorch | 2x | random | Y | 27.2 | 27.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1-a0b59c42.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.log.json) | +| Mask R-CNN | R-50-FPN | pytorch | 2x | RFS | N | 27.6 | 26.4 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1-392a804b.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.log.json) | +| Mask R-CNN | R-50-FPN | pytorch | 2x | RFS | Y | 27.6 | 26.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1-cd0f6a12.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.log.json) | +| Mask R-CNN | R-101-FPN | pytorch | 2x | RFS | N | 28.9 | 27.6 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1-e68eb464.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.log.json) | +| Mask R-CNN | R-101-FPN | pytorch | 2x | RFS | Y | 28.9 | 28.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1-1d817139.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.log.json) | +| Cascade Mask R-CNN | R-101-FPN | pytorch | 2x | random | N | 33.1 | 29.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1-71e2215e.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.log.json) | +| Cascade Mask R-CNN | R-101-FPN | pytorch | 2x | random | Y | 33.0 | 30.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1-8b5a6745.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.log.json) | +| Cascade Mask R-CNN | R-101-FPN | pytorch | 2x | RFS | N | 30.0 | 29.3 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1-5d8ca2a4.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.log.json) | +| Cascade Mask R-CNN | R-101-FPN | pytorch | 2x | RFS | Y | 32.8 | 30.1 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1-c8551505.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.log.json) | diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..beeb0d1e5cd221c822641a1f64a4f27ad0cf25e5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py @@ -0,0 +1,132 @@ +_base_ = [ + '../_base_/models/cascade_mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + roi_head=dict( + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=1203, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + cls_predictor_cfg=dict(type='NormedLinear', tempearture=20), + loss_cls=dict( + type='SeesawLoss', + p=0.8, + q=2.0, + num_classes=1203, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=1203, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + cls_predictor_cfg=dict(type='NormedLinear', tempearture=20), + loss_cls=dict( + type='SeesawLoss', + p=0.8, + q=2.0, + num_classes=1203, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=1203, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + cls_predictor_cfg=dict(type='NormedLinear', tempearture=20), + loss_cls=dict( + type='SeesawLoss', + p=0.8, + q=2.0, + num_classes=1203, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], + mask_head=dict(num_classes=1203)), + test_cfg=dict( + rcnn=dict( + score_thr=0.0001, + # LVIS allows up to 300 + max_per_img=300))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +dataset_type = 'LVISV1Dataset' +data_root = 'data/lvis_v1/' +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_train.json', + img_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_val.json', + img_prefix=data_root, + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_val.json', + img_prefix=data_root, + pipeline=test_pipeline)) +evaluation = dict(interval=24, metric=['bbox', 'segm']) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..0f299484940db4ee1a1edd55006e2e145d99af2b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py @@ -0,0 +1,5 @@ +_base_ = './cascade_mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py' # noqa: E501 +model = dict( + roi_head=dict( + mask_head=dict( + predictor_cfg=dict(type='NormedConv2d', tempearture=20)))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..bb88750fe5f4d8cefb81222d65de6ce8e4c7dcc9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py @@ -0,0 +1,98 @@ +_base_ = [ + '../_base_/models/cascade_mask_rcnn_r50_fpn.py', + '../_base_/datasets/lvis_v1_instance.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101')), + roi_head=dict( + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=1203, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + cls_predictor_cfg=dict(type='NormedLinear', tempearture=20), + loss_cls=dict( + type='SeesawLoss', + p=0.8, + q=2.0, + num_classes=1203, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=1203, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + cls_predictor_cfg=dict(type='NormedLinear', tempearture=20), + loss_cls=dict( + type='SeesawLoss', + p=0.8, + q=2.0, + num_classes=1203, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=1203, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + cls_predictor_cfg=dict(type='NormedLinear', tempearture=20), + loss_cls=dict( + type='SeesawLoss', + p=0.8, + q=2.0, + num_classes=1203, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], + mask_head=dict(num_classes=1203)), + test_cfg=dict( + rcnn=dict( + score_thr=0.0001, + # LVIS allows up to 300 + max_per_img=300))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(dataset=dict(pipeline=train_pipeline))) +evaluation = dict(interval=24, metric=['bbox', 'segm']) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..262e76bdd5e26091670f33534b43172e0664d3ba --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py @@ -0,0 +1,5 @@ +_base_ = './cascade_mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py' # noqa: E501 +model = dict( + roi_head=dict( + mask_head=dict( + predictor_cfg=dict(type='NormedConv2d', tempearture=20)))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..57deab10863a0d375e4393e051abad96545c73d7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..a539929252c0b760a13a208883b867d085ba8821 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r101_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py' # noqa: E501 +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..1f5065e799a90e1458da2db737bd496d9dc11349 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..13d0b5f2304fdc8af9d65cdb591c5dc6ee035097 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r101_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py @@ -0,0 +1,6 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py' # noqa: E501 +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..743f5f2617d01639cbcf855abb59e9cd94ed3c8a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py @@ -0,0 +1,75 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] +model = dict( + roi_head=dict( + bbox_head=dict( + num_classes=1203, + cls_predictor_cfg=dict(type='NormedLinear', tempearture=20), + loss_cls=dict( + type='SeesawLoss', + p=0.8, + q=2.0, + num_classes=1203, + loss_weight=1.0)), + mask_head=dict(num_classes=1203)), + test_cfg=dict( + rcnn=dict( + score_thr=0.0001, + # LVIS allows up to 300 + max_per_img=300))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +dataset_type = 'LVISV1Dataset' +data_root = 'data/lvis_v1/' +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_train.json', + img_prefix=data_root, + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_val.json', + img_prefix=data_root, + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/lvis_v1_val.json', + img_prefix=data_root, + pipeline=test_pipeline)) +evaluation = dict(interval=24, metric=['bbox', 'segm']) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..0af89210777d31e1ebf8c2852669fd397ab8c8bc --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py @@ -0,0 +1,5 @@ +_base_ = './mask_rcnn_r50_fpn_random_seesaw_loss_mstrain_2x_lvis_v1.py' +model = dict( + roi_head=dict( + mask_head=dict( + predictor_cfg=dict(type='NormedConv2d', tempearture=20)))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..4fc15049c6c6184506095483c1c16aabc5e55328 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py @@ -0,0 +1,41 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/lvis_v1_instance.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] +model = dict( + roi_head=dict( + bbox_head=dict( + num_classes=1203, + cls_predictor_cfg=dict(type='NormedLinear', tempearture=20), + loss_cls=dict( + type='SeesawLoss', + p=0.8, + q=2.0, + num_classes=1203, + loss_weight=1.0)), + mask_head=dict(num_classes=1203)), + test_cfg=dict( + rcnn=dict( + score_thr=0.0001, + # LVIS allows up to 300 + max_per_img=300))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(dataset=dict(pipeline=train_pipeline))) +evaluation = dict(interval=12, metric=['bbox', 'segm']) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py new file mode 100644 index 0000000000000000000000000000000000000000..0ef6bd2ce4301287cba1b48d89efbbcccecfe3bc --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/seesaw_loss/mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py @@ -0,0 +1,5 @@ +_base_ = './mask_rcnn_r50_fpn_sample1e-3_seesaw_loss_mstrain_2x_lvis_v1.py' +model = dict( + roi_head=dict( + mask_head=dict( + predictor_cfg=dict(type='NormedConv2d', tempearture=20)))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/selfsup_pretrain/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/selfsup_pretrain/README.md new file mode 100644 index 0000000000000000000000000000000000000000..ad5c69f3d2013106d3bf9fb7a1237428ecc625ec --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/selfsup_pretrain/README.md @@ -0,0 +1,95 @@ +# Backbones Trained by Self-Supervise Algorithms + +## Introduction + + + +We support to apply the backbone models pre-trained by different self-supervised methods in detection systems and provide their results on Mask R-CNN. + +The pre-trained models are converted from [MoCo](https://github.com/facebookresearch/moco) and downloaded from [SwAV](https://github.com/facebookresearch/swav). + +For SwAV, please cite + +```latex +@article{caron2020unsupervised, + title={Unsupervised Learning of Visual Features by Contrasting Cluster Assignments}, + author={Caron, Mathilde and Misra, Ishan and Mairal, Julien and Goyal, Priya and Bojanowski, Piotr and Joulin, Armand}, + booktitle={Proceedings of Advances in Neural Information Processing Systems (NeurIPS)}, + year={2020} +} +``` + +For MoCo, please cite + +```latex +@Article{he2019moco, + author = {Kaiming He and Haoqi Fan and Yuxin Wu and Saining Xie and Ross Girshick}, + title = {Momentum Contrast for Unsupervised Visual Representation Learning}, + journal = {arXiv preprint arXiv:1911.05722}, + year = {2019}, +} +@Article{chen2020mocov2, + author = {Xinlei Chen and Haoqi Fan and Ross Girshick and Kaiming He}, + title = {Improved Baselines with Momentum Contrastive Learning}, + journal = {arXiv preprint arXiv:2003.04297}, + year = {2020}, +} +``` + +## Usage + +To use a self-supervisely pretrained backbone, there are two steps to do: + +1. Download and convert the model to PyTorch-style supported by MMDetection +2. Modify the config and change the training setting accordingly + +### Convert model + +For more general usage, we also provide script `selfsup2mmdet.py` in the tools directory to convert the key of models pretrained by different self-supervised methods to PyTorch-style checkpoints used in MMDetection. + +```bash +python -u tools/model_converters/selfsup2mmdet.py ${PRETRAIN_PATH} ${STORE_PATH} --selfsup ${method} +``` + +This script convert model from `PRETRAIN_PATH` and store the converted model in `STORE_PATH`. + +For example, to use a ResNet-50 backbone released by MoCo, you can download it from [here](https://dl.fbaipublicfiles.com/moco/moco_checkpoints/moco_v2_800ep/moco_v2_800ep_pretrain.pth.tar) and use the following command + +```bash +python -u tools/model_converters/selfsup2mmdet.py ./moco_v2_800ep_pretrain.pth.tar mocov2_r50_800ep_pretrain.pth --selfsup moco +``` + +To use the ResNet-50 backbone released by SwAV, you can download it from [here](https://dl.fbaipublicfiles.com/deepcluster/swav_800ep_pretrain.pth.tar) + +### Modify config + +The backbone requires SyncBN and the `fronzen_stages` need to be changed. A config that use the moco backbone is as below + +```python +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + pretrained='./mocov2_r50_800ep_pretrain.pth', + backbone=dict( + frozen_stages=0, + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False)) + +``` + +## Results + +| Method | Backbone | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | mask AP | Config | Download | +| :-----: | :-----: | :-----: | :-----: | :------: | :------------: | :----: | :-----: | :------: | :--------: | +|Mask RCNN |[R50 by MoCo v2](./mask_rcnn_r50_fpn_mocov2-pretrain_1x_coco.py)| pytorch |1x|| |38.0|34.3|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_1x_coco/mask_rcnn_r50_fpn_mocov2-pretrain_1x_coco_20210604_114614-a8b63483.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_1x_coco/mask_rcnn_r50_fpn_mocov2-pretrain_1x_coco_20210604_114614.log.json)| +|Mask RCNN |[R50 by MoCo v2](./mask_rcnn_r50_fpn_mocov2-pretrain_ms-2x_coco.py)| pytorch | multi-scale 2x || |40.8|36.8|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_ms-2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_ms-2x_coco/mask_rcnn_r50_fpn_mocov2-pretrain_ms-2x_coco_20210605_163717-d95df20a.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_ms-2x_coco/mask_rcnn_r50_fpn_mocov2-pretrain_ms-2x_coco_20210605_163717.log.json)| +|Mask RCNN |[R50 by SwAV](./mask_rcnn_r50_fpn_swav-pretrain_1x_coco.py)| pytorch | 1x || |39.1 | 35.7|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_1x_coco/mask_rcnn_r50_fpn_swav-pretrain_1x_coco_20210604_114640-7b9baf28.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_1x_coco/mask_rcnn_r50_fpn_swav-pretrain_1x_coco_20210604_114640.log.json)| +|Mask RCNN |[R50 by SwAV](./mask_rcnn_r50_fpn_swav-pretrain_ms-2x_coco.py)| pytorch | multi-scale 2x || |41.3|37.3|[config](https://github.com/open-mmlab/mmdetection/tree/master/configs/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_ms-2x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_ms-2x_coco/mask_rcnn_r50_fpn_swav-pretrain_ms-2x_coco_20210605_163717-08e26fca.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_ms-2x_coco/mask_rcnn_r50_fpn_swav-pretrain_ms-2x_coco_20210605_163717.log.json)| + +### Notice + +1. We only provide single-scale 1x and multi-scale 2x configs as examples to show how to use backbones trained by self-supervised algorithms. We will try to reproduce the results in their corresponding paper using the released backbone in the future. Please stay tuned. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f1e061524e656409e37d3ae80b290c368a47d6a6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + frozen_stages=0, + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + init_cfg=dict( + type='Pretrained', checkpoint='./mocov2_r50_800ep_pretrain.pth'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_ms-2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_ms-2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..09aa15608decb610a2f0b1181e50cbe1b8c6387a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/selfsup_pretrain/mask_rcnn_r50_fpn_mocov2-pretrain_ms-2x_coco.py @@ -0,0 +1,32 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + frozen_stages=0, + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + init_cfg=dict( + type='Pretrained', checkpoint='./mocov2_r50_800ep_pretrain.pth'))) + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']) +] + +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f92a3453dd1d5e8460a4279764845ce3e9c3ed81 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_1x_coco.py @@ -0,0 +1,13 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + frozen_stages=0, + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + init_cfg=dict( + type='Pretrained', checkpoint='./swav_800ep_pretrain.pth.tar'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_ms-2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_ms-2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..fe473613492b5388ceb50b1669317539360b8e2f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/selfsup_pretrain/mask_rcnn_r50_fpn_swav-pretrain_ms-2x_coco.py @@ -0,0 +1,32 @@ +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + frozen_stages=0, + norm_cfg=dict(type='SyncBN', requires_grad=True), + norm_eval=False, + init_cfg=dict( + type='Pretrained', checkpoint='./swav_800ep_pretrain.pth.tar'))) + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 800)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']) +] + +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/sparse_rcnn/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/sparse_rcnn/README.md new file mode 100644 index 0000000000000000000000000000000000000000..bd5f1571fd4b9ff8ea83bb567919e8de96975e91 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/sparse_rcnn/README.md @@ -0,0 +1,28 @@ +# Sparse R-CNN: End-to-End Object Detection with Learnable Proposals + +## Introduction + + + +``` +@article{peize2020sparse, + title = {{SparseR-CNN}: End-to-End Object Detection with Learnable Proposals}, + author = {Peize Sun and Rufeng Zhang and Yi Jiang and Tao Kong and Chenfeng Xu and Wei Zhan and Masayoshi Tomizuka and Lei Li and Zehuan Yuan and Changhu Wang and Ping Luo}, + journal = {arXiv preprint arXiv:2011.12450}, + year = {2020} +} +``` + +## Results and Models + +| Model | Backbone | Style | Lr schd | Number of Proposals |Multi-Scale| RandomCrop | box AP | Config | Download | +|:------------:|:---------:|:-------:|:-------:|:-------: |:-------: |:---------:|:------:|:------:|:--------:| +| Sparse R-CNN | R-50-FPN | pytorch | 1x | 100 | False | False | 37.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco/sparse_rcnn_r50_fpn_1x_coco_20201222_214453-dc79b137.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco/sparse_rcnn_r50_fpn_1x_coco_20201222_214453-dc79b137.log.json) | +| Sparse R-CNN | R-50-FPN | pytorch | 3x | 100 | True | False | 42.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/sparse_rcnn/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco_20201218_154234-7bc5c054.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco_20201218_154234-7bc5c054.log.json) | +| Sparse R-CNN | R-50-FPN | pytorch | 3x | 300 | True | True | 45.0 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/sparse_rcnn/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20201223_024605-9fe92701.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20201223_024605-9fe92701.log.json) | +| Sparse R-CNN | R-101-FPN | pytorch | 3x | 100 | True | False | 44.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/sparse_rcnn/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco_20201223_121552-6c46c9d6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco_20201223_121552-6c46c9d6.log.json) | +| Sparse R-CNN | R-101-FPN | pytorch | 3x | 300 | True | True | 46.2 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/sparse_rcnn/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20201223_023452-c23c3564.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20201223_023452-c23c3564.log.json) | + +### Notes + +We observe about 0.3 AP noise especially when using ResNet-101 as the backbone. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/sparse_rcnn/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/sparse_rcnn/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..53280cc3db4a32c8cb87481bbf093330c1636557 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/sparse_rcnn/metafile.yml @@ -0,0 +1,75 @@ +Collections: + - Name: Sparse R-CNN + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - ResNet + - Sparse R-CNN + Paper: https://arxiv.org/abs/2011.12450 + README: configs/sparse_rcnn/README.md + +Models: + - Name: sparse_rcnn_r50_fpn_1x_coco + In Collection: Sparse R-CNN + Config: configs/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco/sparse_rcnn_r50_fpn_1x_coco_20201222_214453-dc79b137.pth + + - Name: sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco + In Collection: Sparse R-CNN + Config: configs/sparse_rcnn/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 42.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco_20201218_154234-7bc5c054.pth + + - Name: sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco + In Collection: Sparse R-CNN + Config: configs/sparse_rcnn/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 45.0 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20201223_024605-9fe92701.pth + + - Name: sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco + In Collection: Sparse R-CNN + Config: configs/sparse_rcnn/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco_20201223_121552-6c46c9d6.pth + + - Name: sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco + In Collection: Sparse R-CNN + Config: configs/sparse_rcnn/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.2 + Weights: https://download.openmmlab.com/mmdetection/v2.0/sparse_rcnn/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco_20201223_023452-c23c3564.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/sparse_rcnn/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/sparse_rcnn/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..de323bdfaad7a092373da57d8f5ce99441bd48cf --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/sparse_rcnn/sparse_rcnn_r101_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py @@ -0,0 +1,7 @@ +_base_ = './sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py' + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/sparse_rcnn/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/sparse_rcnn/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ab4c5f68178a55d89a74bfa2911d48befb8869f8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/sparse_rcnn/sparse_rcnn_r101_fpn_mstrain_480-800_3x_coco.py @@ -0,0 +1,7 @@ +_base_ = './sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco.py' + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b383ee48598c9ae73c6f44dbb539cdfa6c052d80 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco.py @@ -0,0 +1,95 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +num_stages = 6 +num_proposals = 100 +model = dict( + type='SparseRCNN', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=0, + add_extra_convs='on_input', + num_outs=4), + rpn_head=dict( + type='EmbeddingRPNHead', + num_proposals=num_proposals, + proposal_feature_channel=256), + roi_head=dict( + type='SparseRoIHead', + num_stages=num_stages, + stage_loss_weights=[1] * num_stages, + proposal_feature_channel=256, + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=[ + dict( + type='DIIHead', + num_classes=80, + num_ffn_fcs=2, + num_heads=8, + num_cls_fcs=1, + num_reg_fcs=3, + feedforward_channels=2048, + in_channels=256, + dropout=0.0, + ffn_act_cfg=dict(type='ReLU', inplace=True), + dynamic_conv_cfg=dict( + type='DynamicConv', + in_channels=256, + feat_channels=64, + out_channels=256, + input_feat_shape=7, + act_cfg=dict(type='ReLU', inplace=True), + norm_cfg=dict(type='LN')), + loss_bbox=dict(type='L1Loss', loss_weight=5.0), + loss_iou=dict(type='GIoULoss', loss_weight=2.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=2.0), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + clip_border=False, + target_means=[0., 0., 0., 0.], + target_stds=[0.5, 0.5, 1., 1.])) for _ in range(num_stages) + ]), + # training and testing settings + train_cfg=dict( + rpn=None, + rcnn=[ + dict( + assigner=dict( + type='HungarianAssigner', + cls_cost=dict(type='FocalLossCost', weight=2.0), + reg_cost=dict(type='BBoxL1Cost', weight=5.0), + iou_cost=dict(type='IoUCost', iou_mode='giou', + weight=2.0)), + sampler=dict(type='PseudoSampler'), + pos_weight=1) for _ in range(num_stages) + ]), + test_cfg=dict(rpn=None, rcnn=dict(max_per_img=num_proposals))) + +# optimizer +optimizer = dict(_delete_=True, type='AdamW', lr=0.000025, weight_decay=0.0001) +optimizer_config = dict(_delete_=True, grad_clip=dict(max_norm=1, norm_type=2)) +# learning policy +lr_config = dict(policy='step', step=[8, 11]) +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/sparse_rcnn/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/sparse_rcnn/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..36f1d62eba62bb9c3266864cd4250caedea95a21 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/sparse_rcnn/sparse_rcnn_r50_fpn_300_proposals_crop_mstrain_480-800_3x_coco.py @@ -0,0 +1,52 @@ +_base_ = './sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco.py' +num_proposals = 300 +model = dict( + rpn_head=dict(num_proposals=num_proposals), + test_cfg=dict( + _delete_=True, rpn=None, rcnn=dict(max_per_img=num_proposals))) +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# augmentation strategy originates from DETR. +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='AutoAugment', + policies=[[ + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), (576, 1333), + (608, 1333), (640, 1333), (672, 1333), (704, 1333), + (736, 1333), (768, 1333), (800, 1333)], + multiscale_mode='value', + keep_ratio=True) + ], + [ + dict( + type='Resize', + img_scale=[(400, 1333), (500, 1333), (600, 1333)], + multiscale_mode='value', + keep_ratio=True), + dict( + type='RandomCrop', + crop_type='absolute_range', + crop_size=(384, 600), + allow_negative_crop=True), + dict( + type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + override=True, + keep_ratio=True) + ]]), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/sparse_rcnn/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/sparse_rcnn/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..2fa2a807190427c857ddbea8ed7efd9434e5ef0f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/sparse_rcnn/sparse_rcnn_r50_fpn_mstrain_480-800_3x_coco.py @@ -0,0 +1,23 @@ +_base_ = './sparse_rcnn_r50_fpn_1x_coco.py' + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +min_values = (480, 512, 544, 576, 608, 640, 672, 704, 736, 768, 800) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, value) for value in min_values], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] + +data = dict(train=dict(pipeline=train_pipeline)) +lr_config = dict(policy='step', step=[27, 33]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ssd/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/ssd/README.md new file mode 100644 index 0000000000000000000000000000000000000000..61e391ac5c0723ba97dcf6576b597a3260e7980b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ssd/README.md @@ -0,0 +1,35 @@ +# SSD: Single Shot MultiBox Detector + +## Introduction + + + +```latex +@article{Liu_2016, + title={SSD: Single Shot MultiBox Detector}, + journal={ECCV}, + author={Liu, Wei and Anguelov, Dragomir and Erhan, Dumitru and Szegedy, Christian and Reed, Scott and Fu, Cheng-Yang and Berg, Alexander C.}, + year={2016}, +} +``` + +## Results and models + +| Backbone | Size | Style | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :------: | :---: | :---: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| VGG16 | 300 | caffe | 120e | 9.9 | 43.7 | 25.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ssd/ssd300_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ssd/ssd300_coco/ssd300_coco_20210604_193052-b61137df.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ssd/ssd300_coco/ssd300_coco_20210604_193052.log.json) | +| VGG16 | 512 | caffe | 120e | 19.4 | 30.7 | 29.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/ssd/ssd512_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/ssd/ssd512_coco/ssd512_coco_20210604_111835-d3eba047.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/ssd/ssd512_coco/ssd512_coco_20210604_111835.log.json) | + +## Notice + +In v2.14.0, [PR5291](https://github.com/open-mmlab/mmdetection/pull/5291) refactored SSD neck and head for more +flexible usage. If users want to use the SSD checkpoint trained in the older versions, we provide a scripts +`tools/model_converters/upgrade_ssd_version.py` to convert the model weights. + +```bash +python tools/model_converters/upgrade_ssd_version.py ${OLD_MODEL_PATH} ${NEW_MODEL_PATH} + +``` + +- OLD_MODEL_PATH: the path to load the old version SSD model. +- NEW_MODEL_PATH: the path to save the converted model weights. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ssd/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/ssd/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..bb93173c8d8d7b3fb33fc0b69eab9a259e42782a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ssd/metafile.yml @@ -0,0 +1,51 @@ +Collections: + - Name: SSD + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - VGG + Paper: https://arxiv.org/abs/1512.02325 + README: configs/ssd/README.md + +Models: + - Name: ssd300_coco + In Collection: SSD + Config: configs/ssd/ssd300_coco.py + Metadata: + Training Memory (GB): 9.9 + inference time (ms/im): + - value: 22.88 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (300, 300) + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 25.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ssd/ssd300_coco/ssd300_coco_20210604_193052-b61137df.pth + + - Name: ssd512_coco + In Collection: SSD + Config: configs/ssd/ssd512_coco.py + Metadata: + Training Memory (GB): 19.4 + inference time (ms/im): + - value: 32.57 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (512, 512) + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 29.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/ssd/ssd512_coco/ssd512_coco_20210604_111835-d3eba047.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ssd/ssd300_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/ssd/ssd300_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..75c5e4e5b81a320a7e6bd7bc31e7d5cf49a0b92d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ssd/ssd300_coco.py @@ -0,0 +1,62 @@ +_base_ = [ + '../_base_/models/ssd300.py', '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_2x.py', '../_base_/default_runtime.py' +] +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(300, 300), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(300, 300), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=3, + train=dict( + _delete_=True, + type='RepeatDataset', + times=5, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=2e-3, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict(_delete_=True) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/ssd/ssd512_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/ssd/ssd512_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0bbbd3f6239b6444122d1f35a2d5aedfb4792f76 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/ssd/ssd512_coco.py @@ -0,0 +1,75 @@ +_base_ = 'ssd300_coco.py' +input_size = 512 +model = dict( + neck=dict( + out_channels=(512, 1024, 512, 256, 256, 256, 256), + level_strides=(2, 2, 2, 2, 1), + level_paddings=(1, 1, 1, 1, 1), + last_kernel_size=4), + bbox_head=dict( + in_channels=(512, 1024, 512, 256, 256, 256, 256), + anchor_generator=dict( + type='SSDAnchorGenerator', + scale_major=False, + input_size=input_size, + basesize_ratio_range=(0.1, 0.9), + strides=[8, 16, 32, 64, 128, 256, 512], + ratios=[[2], [2, 3], [2, 3], [2, 3], [2, 3], [2], [2]]))) +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(512, 512), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(512, 512), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=3, + train=dict( + _delete_=True, + type='RepeatDataset', + times=5, + dataset=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline)), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=2e-3, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict(_delete_=True) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/swin/cascade_mask_rcnn_swin_base_patch4_window7_mstrain_480-800_giou_4conv1f_adamw_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/swin/cascade_mask_rcnn_swin_base_patch4_window7_mstrain_480-800_giou_4conv1f_adamw_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e3e44ee6121f3c8b5f83f263303bbcd4370eea71 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/swin/cascade_mask_rcnn_swin_base_patch4_window7_mstrain_480-800_giou_4conv1f_adamw_3x_coco.py @@ -0,0 +1,140 @@ +_base_ = [ + '../_base_/models/cascade_mask_rcnn_swin_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + embed_dim=128, + depths=[2, 2, 18, 2], + num_heads=[4, 8, 16, 32], + window_size=7, + ape=False, + drop_path_rate=0.3, + patch_norm=True, + use_checkpoint=False + ), + neck=dict(in_channels=[128, 256, 512, 1024]), + roi_head=dict( + bbox_head=[ + dict( + type='ConvFCBBoxHead', + num_shared_convs=4, + num_shared_fcs=1, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + reg_decoded_bbox=True, + norm_cfg=dict(type='SyncBN', requires_grad=True), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=10.0)), + dict( + type='ConvFCBBoxHead', + num_shared_convs=4, + num_shared_fcs=1, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=False, + reg_decoded_bbox=True, + norm_cfg=dict(type='SyncBN', requires_grad=True), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=10.0)), + dict( + type='ConvFCBBoxHead', + num_shared_convs=4, + num_shared_fcs=1, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=False, + reg_decoded_bbox=True, + norm_cfg=dict(type='SyncBN', requires_grad=True), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=10.0)) + ])) + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# augmentation strategy originates from DETR / Sparse RCNN +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='AutoAugment', + policies=[ + [ + dict(type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), (576, 1333), + (608, 1333), (640, 1333), (672, 1333), (704, 1333), + (736, 1333), (768, 1333), (800, 1333)], + multiscale_mode='value', + keep_ratio=True) + ], + [ + dict(type='Resize', + img_scale=[(400, 1333), (500, 1333), (600, 1333)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomCrop', + crop_type='absolute_range', + crop_size=(384, 600), + allow_negative_crop=True), + dict(type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + override=True, + keep_ratio=True) + ] + ]), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(pipeline=train_pipeline)) + +optimizer = dict(_delete_=True, type='AdamW', lr=0.0001, betas=(0.9, 0.999), weight_decay=0.05, + paramwise_cfg=dict(custom_keys={'absolute_pos_embed': dict(decay_mult=0.), + 'relative_position_bias_table': dict(decay_mult=0.), + 'norm': dict(decay_mult=0.)})) +lr_config = dict(step=[27, 33]) +runner = dict(type='EpochBasedRunnerAmp', max_epochs=36) + +# do not use mmdet version fp16 +fp16 = None +optimizer_config = dict( + type="DistOptimizerHook", + update_interval=1, + grad_clip=None, + coalesce=True, + bucket_size_mb=-1, + use_fp16=True, +) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/swin/cascade_mask_rcnn_swin_small_patch4_window7_mstrain_480-800_giou_4conv1f_adamw_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/swin/cascade_mask_rcnn_swin_small_patch4_window7_mstrain_480-800_giou_4conv1f_adamw_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..816d206f5735c008cd6bca6e3cbf7a81fdd9b619 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/swin/cascade_mask_rcnn_swin_small_patch4_window7_mstrain_480-800_giou_4conv1f_adamw_3x_coco.py @@ -0,0 +1,140 @@ +_base_ = [ + '../_base_/models/cascade_mask_rcnn_swin_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + embed_dim=96, + depths=[2, 2, 18, 2], + num_heads=[3, 6, 12, 24], + window_size=7, + ape=False, + drop_path_rate=0.2, + patch_norm=True, + use_checkpoint=False + ), + neck=dict(in_channels=[96, 192, 384, 768]), + roi_head=dict( + bbox_head=[ + dict( + type='ConvFCBBoxHead', + num_shared_convs=4, + num_shared_fcs=1, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + reg_decoded_bbox=True, + norm_cfg=dict(type='SyncBN', requires_grad=True), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=10.0)), + dict( + type='ConvFCBBoxHead', + num_shared_convs=4, + num_shared_fcs=1, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=False, + reg_decoded_bbox=True, + norm_cfg=dict(type='SyncBN', requires_grad=True), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=10.0)), + dict( + type='ConvFCBBoxHead', + num_shared_convs=4, + num_shared_fcs=1, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=False, + reg_decoded_bbox=True, + norm_cfg=dict(type='SyncBN', requires_grad=True), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=10.0)) + ])) + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# augmentation strategy originates from DETR / Sparse RCNN +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='AutoAugment', + policies=[ + [ + dict(type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), (576, 1333), + (608, 1333), (640, 1333), (672, 1333), (704, 1333), + (736, 1333), (768, 1333), (800, 1333)], + multiscale_mode='value', + keep_ratio=True) + ], + [ + dict(type='Resize', + img_scale=[(400, 1333), (500, 1333), (600, 1333)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomCrop', + crop_type='absolute_range', + crop_size=(384, 600), + allow_negative_crop=True), + dict(type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + override=True, + keep_ratio=True) + ] + ]), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(pipeline=train_pipeline)) + +optimizer = dict(_delete_=True, type='AdamW', lr=0.0001, betas=(0.9, 0.999), weight_decay=0.05, + paramwise_cfg=dict(custom_keys={'absolute_pos_embed': dict(decay_mult=0.), + 'relative_position_bias_table': dict(decay_mult=0.), + 'norm': dict(decay_mult=0.)})) +lr_config = dict(step=[27, 33]) +runner = dict(type='EpochBasedRunnerAmp', max_epochs=36) + +# do not use mmdet version fp16 +fp16 = None +optimizer_config = dict( + type="DistOptimizerHook", + update_interval=1, + grad_clip=None, + coalesce=True, + bucket_size_mb=-1, + use_fp16=True, +) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/swin/cascade_mask_rcnn_swin_tiny_patch4_window7_mstrain_480-800_giou_4conv1f_adamw_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/swin/cascade_mask_rcnn_swin_tiny_patch4_window7_mstrain_480-800_giou_4conv1f_adamw_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..36e3acd0a4b6ad08e5af3c7b9c639eff028431f7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/swin/cascade_mask_rcnn_swin_tiny_patch4_window7_mstrain_480-800_giou_4conv1f_adamw_1x_coco.py @@ -0,0 +1,140 @@ +_base_ = [ + '../_base_/models/cascade_mask_rcnn_swin_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + embed_dim=96, + depths=[2, 2, 6, 2], + num_heads=[3, 6, 12, 24], + window_size=7, + ape=False, + drop_path_rate=0.0, + patch_norm=True, + use_checkpoint=False + ), + neck=dict(in_channels=[96, 192, 384, 768]), + roi_head=dict( + bbox_head=[ + dict( + type='ConvFCBBoxHead', + num_shared_convs=4, + num_shared_fcs=1, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + reg_decoded_bbox=True, + norm_cfg=dict(type='SyncBN', requires_grad=True), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=10.0)), + dict( + type='ConvFCBBoxHead', + num_shared_convs=4, + num_shared_fcs=1, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=False, + reg_decoded_bbox=True, + norm_cfg=dict(type='SyncBN', requires_grad=True), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=10.0)), + dict( + type='ConvFCBBoxHead', + num_shared_convs=4, + num_shared_fcs=1, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=False, + reg_decoded_bbox=True, + norm_cfg=dict(type='SyncBN', requires_grad=True), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=10.0)) + ])) + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# augmentation strategy originates from DETR / Sparse RCNN +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='AutoAugment', + policies=[ + [ + dict(type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), (576, 1333), + (608, 1333), (640, 1333), (672, 1333), (704, 1333), + (736, 1333), (768, 1333), (800, 1333)], + multiscale_mode='value', + keep_ratio=True) + ], + [ + dict(type='Resize', + img_scale=[(400, 1333), (500, 1333), (600, 1333)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomCrop', + crop_type='absolute_range', + crop_size=(384, 600), + allow_negative_crop=True), + dict(type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + override=True, + keep_ratio=True) + ] + ]), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(pipeline=train_pipeline)) + +optimizer = dict(_delete_=True, type='AdamW', lr=0.0001, betas=(0.9, 0.999), weight_decay=0.05, + paramwise_cfg=dict(custom_keys={'absolute_pos_embed': dict(decay_mult=0.), + 'relative_position_bias_table': dict(decay_mult=0.), + 'norm': dict(decay_mult=0.)})) +lr_config = dict(step=[8, 11]) +runner = dict(type='EpochBasedRunnerAmp', max_epochs=12) + +# do not use mmdet version fp16 +fp16 = None +optimizer_config = dict( + type="DistOptimizerHook", + update_interval=1, + grad_clip=None, + coalesce=True, + bucket_size_mb=-1, + use_fp16=True, +) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/swin/cascade_mask_rcnn_swin_tiny_patch4_window7_mstrain_480-800_giou_4conv1f_adamw_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/swin/cascade_mask_rcnn_swin_tiny_patch4_window7_mstrain_480-800_giou_4conv1f_adamw_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e01a9eff6197fb80e3a541910c9b94c00510323e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/swin/cascade_mask_rcnn_swin_tiny_patch4_window7_mstrain_480-800_giou_4conv1f_adamw_3x_coco.py @@ -0,0 +1,140 @@ +_base_ = [ + '../_base_/models/cascade_mask_rcnn_swin_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + embed_dim=96, + depths=[2, 2, 6, 2], + num_heads=[3, 6, 12, 24], + window_size=7, + ape=False, + drop_path_rate=0.2, + patch_norm=True, + use_checkpoint=False + ), + neck=dict(in_channels=[96, 192, 384, 768]), + roi_head=dict( + bbox_head=[ + dict( + type='ConvFCBBoxHead', + num_shared_convs=4, + num_shared_fcs=1, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + reg_decoded_bbox=True, + norm_cfg=dict(type='SyncBN', requires_grad=True), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=10.0)), + dict( + type='ConvFCBBoxHead', + num_shared_convs=4, + num_shared_fcs=1, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=False, + reg_decoded_bbox=True, + norm_cfg=dict(type='SyncBN', requires_grad=True), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=10.0)), + dict( + type='ConvFCBBoxHead', + num_shared_convs=4, + num_shared_fcs=1, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=False, + reg_decoded_bbox=True, + norm_cfg=dict(type='SyncBN', requires_grad=True), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=10.0)) + ])) + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# augmentation strategy originates from DETR / Sparse RCNN +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='AutoAugment', + policies=[ + [ + dict(type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), (576, 1333), + (608, 1333), (640, 1333), (672, 1333), (704, 1333), + (736, 1333), (768, 1333), (800, 1333)], + multiscale_mode='value', + keep_ratio=True) + ], + [ + dict(type='Resize', + img_scale=[(400, 1333), (500, 1333), (600, 1333)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomCrop', + crop_type='absolute_range', + crop_size=(384, 600), + allow_negative_crop=True), + dict(type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + override=True, + keep_ratio=True) + ] + ]), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(pipeline=train_pipeline)) + +optimizer = dict(_delete_=True, type='AdamW', lr=0.0001, betas=(0.9, 0.999), weight_decay=0.05, + paramwise_cfg=dict(custom_keys={'absolute_pos_embed': dict(decay_mult=0.), + 'relative_position_bias_table': dict(decay_mult=0.), + 'norm': dict(decay_mult=0.)})) +lr_config = dict(step=[27, 33]) +runner = dict(type='EpochBasedRunnerAmp', max_epochs=36) + +# do not use mmdet version fp16 +fp16 = None +optimizer_config = dict( + type="DistOptimizerHook", + update_interval=1, + grad_clip=None, + coalesce=True, + bucket_size_mb=-1, + use_fp16=True, +) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/swin/mask_rcnn_swin_small_patch4_window7_mstrain_480-800_adamw_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/swin/mask_rcnn_swin_small_patch4_window7_mstrain_480-800_adamw_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ee15134ba3f0a0788cbf4eb69cf080d01e08ddab --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/swin/mask_rcnn_swin_small_patch4_window7_mstrain_480-800_adamw_3x_coco.py @@ -0,0 +1,80 @@ +_base_ = [ + '../_base_/models/mask_rcnn_swin_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + embed_dim=96, + depths=[2, 2, 18, 2], + num_heads=[3, 6, 12, 24], + window_size=7, + ape=False, + drop_path_rate=0.2, + patch_norm=True, + use_checkpoint=False + ), + neck=dict(in_channels=[96, 192, 384, 768])) + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# augmentation strategy originates from DETR / Sparse RCNN +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='AutoAugment', + policies=[ + [ + dict(type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), (576, 1333), + (608, 1333), (640, 1333), (672, 1333), (704, 1333), + (736, 1333), (768, 1333), (800, 1333)], + multiscale_mode='value', + keep_ratio=True) + ], + [ + dict(type='Resize', + img_scale=[(400, 1333), (500, 1333), (600, 1333)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomCrop', + crop_type='absolute_range', + crop_size=(384, 600), + allow_negative_crop=True), + dict(type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + override=True, + keep_ratio=True) + ] + ]), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(pipeline=train_pipeline)) + +optimizer = dict(_delete_=True, type='AdamW', lr=0.0001, betas=(0.9, 0.999), weight_decay=0.05, + paramwise_cfg=dict(custom_keys={'absolute_pos_embed': dict(decay_mult=0.), + 'relative_position_bias_table': dict(decay_mult=0.), + 'norm': dict(decay_mult=0.)})) +lr_config = dict(step=[27, 33]) +runner = dict(type='EpochBasedRunnerAmp', max_epochs=36) + +# do not use mmdet version fp16 +fp16 = None +optimizer_config = dict( + type="DistOptimizerHook", + update_interval=1, + grad_clip=None, + coalesce=True, + bucket_size_mb=-1, + use_fp16=True, +) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/swin/mask_rcnn_swin_tiny_patch4_window7_mstrain_480-800_adamw_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/swin/mask_rcnn_swin_tiny_patch4_window7_mstrain_480-800_adamw_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..dd42cba7ca95c008218e966aca6becb2a2dabc8d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/swin/mask_rcnn_swin_tiny_patch4_window7_mstrain_480-800_adamw_1x_coco.py @@ -0,0 +1,80 @@ +_base_ = [ + '../_base_/models/mask_rcnn_swin_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + embed_dim=96, + depths=[2, 2, 6, 2], + num_heads=[3, 6, 12, 24], + window_size=7, + ape=False, + drop_path_rate=0.1, + patch_norm=True, + use_checkpoint=False + ), + neck=dict(in_channels=[96, 192, 384, 768])) + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# augmentation strategy originates from DETR / Sparse RCNN +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='AutoAugment', + policies=[ + [ + dict(type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), (576, 1333), + (608, 1333), (640, 1333), (672, 1333), (704, 1333), + (736, 1333), (768, 1333), (800, 1333)], + multiscale_mode='value', + keep_ratio=True) + ], + [ + dict(type='Resize', + img_scale=[(400, 1333), (500, 1333), (600, 1333)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomCrop', + crop_type='absolute_range', + crop_size=(384, 600), + allow_negative_crop=True), + dict(type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + override=True, + keep_ratio=True) + ] + ]), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(pipeline=train_pipeline)) + +optimizer = dict(_delete_=True, type='AdamW', lr=0.0001, betas=(0.9, 0.999), weight_decay=0.05, + paramwise_cfg=dict(custom_keys={'absolute_pos_embed': dict(decay_mult=0.), + 'relative_position_bias_table': dict(decay_mult=0.), + 'norm': dict(decay_mult=0.)})) +lr_config = dict(step=[8, 11]) +runner = dict(type='EpochBasedRunnerAmp', max_epochs=12) + +# do not use mmdet version fp16 +fp16 = None +optimizer_config = dict( + type="DistOptimizerHook", + update_interval=1, + grad_clip=None, + coalesce=True, + bucket_size_mb=-1, + use_fp16=True, +) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/swin/mask_rcnn_swin_tiny_patch4_window7_mstrain_480-800_adamw_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/swin/mask_rcnn_swin_tiny_patch4_window7_mstrain_480-800_adamw_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c58057747d7d922293b6838e6eb1e13aa520aa3a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/swin/mask_rcnn_swin_tiny_patch4_window7_mstrain_480-800_adamw_3x_coco.py @@ -0,0 +1,80 @@ +_base_ = [ + '../_base_/models/mask_rcnn_swin_fpn.py', + '../_base_/datasets/coco_instance.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + backbone=dict( + embed_dim=96, + depths=[2, 2, 6, 2], + num_heads=[3, 6, 12, 24], + window_size=7, + ape=False, + drop_path_rate=0.2, + patch_norm=True, + use_checkpoint=False + ), + neck=dict(in_channels=[96, 192, 384, 768])) + +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + +# augmentation strategy originates from DETR / Sparse RCNN +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='AutoAugment', + policies=[ + [ + dict(type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), (576, 1333), + (608, 1333), (640, 1333), (672, 1333), (704, 1333), + (736, 1333), (768, 1333), (800, 1333)], + multiscale_mode='value', + keep_ratio=True) + ], + [ + dict(type='Resize', + img_scale=[(400, 1333), (500, 1333), (600, 1333)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomCrop', + crop_type='absolute_range', + crop_size=(384, 600), + allow_negative_crop=True), + dict(type='Resize', + img_scale=[(480, 1333), (512, 1333), (544, 1333), + (576, 1333), (608, 1333), (640, 1333), + (672, 1333), (704, 1333), (736, 1333), + (768, 1333), (800, 1333)], + multiscale_mode='value', + override=True, + keep_ratio=True) + ] + ]), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +data = dict(train=dict(pipeline=train_pipeline)) + +optimizer = dict(_delete_=True, type='AdamW', lr=0.0001, betas=(0.9, 0.999), weight_decay=0.05, + paramwise_cfg=dict(custom_keys={'absolute_pos_embed': dict(decay_mult=0.), + 'relative_position_bias_table': dict(decay_mult=0.), + 'norm': dict(decay_mult=0.)})) +lr_config = dict(step=[27, 33]) +runner = dict(type='EpochBasedRunnerAmp', max_epochs=36) + +# do not use mmdet version fp16 +fp16 = None +optimizer_config = dict( + type="DistOptimizerHook", + update_interval=1, + grad_clip=None, + coalesce=True, + bucket_size_mb=-1, + use_fp16=True, +) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/tridentnet/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/tridentnet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b6263f271c7fcbd314a9fc6843a59816fe961be3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/tridentnet/README.md @@ -0,0 +1,28 @@ +# Scale-Aware Trident Networks for Object Detection + +## Introduction + + + +``` +@InProceedings{li2019scale, + title={Scale-Aware Trident Networks for Object Detection}, + author={Li, Yanghao and Chen, Yuntao and Wang, Naiyan and Zhang, Zhaoxiang}, + journal={The International Conference on Computer Vision (ICCV)}, + year={2019} +} +``` + +## Results and models + +We reports the test results using only one branch for inference. + +| Backbone | Style | mstrain | Lr schd | Mem (GB) | Inf time (fps) | box AP | Download | +| :-------------: | :-----: | :-----: | :-----: | :------: | :------------: | :----: | :------: | +| R-50 | caffe | N | 1x | | | 37.7 |[model](https://download.openmmlab.com/mmdetection/v2.0/tridentnet/tridentnet_r50_caffe_1x_coco/tridentnet_r50_caffe_1x_coco_20201230_141838-2ec0b530.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/tridentnet/tridentnet_r50_caffe_1x_coco/tridentnet_r50_caffe_1x_coco_20201230_141838.log.json) | +| R-50 | caffe | Y | 1x | | | 37.6 |[model](https://download.openmmlab.com/mmdetection/v2.0/tridentnet/tridentnet_r50_caffe_mstrain_1x_coco/tridentnet_r50_caffe_mstrain_1x_coco_20201230_141839-6ce55ccb.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/tridentnet/tridentnet_r50_caffe_mstrain_1x_coco/tridentnet_r50_caffe_mstrain_1x_coco_20201230_141839.log.json) | +| R-50 | caffe | Y | 3x | | | 40.3 |[model](https://download.openmmlab.com/mmdetection/v2.0/tridentnet/tridentnet_r50_caffe_mstrain_3x_coco/tridentnet_r50_caffe_mstrain_3x_coco_20201130_100539-46d227ba.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/tridentnet/tridentnet_r50_caffe_mstrain_3x_coco/tridentnet_r50_caffe_mstrain_3x_coco_20201130_100539.log.json) | + +**Note** + +Similar to [Detectron2](https://github.com/facebookresearch/detectron2/tree/master/projects/TridentNet), we haven't implemented the Scale-aware Training Scheme in section 4.2 of the paper. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/tridentnet/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/tridentnet/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..871c0bc99c42e3e9934cf05dfded2e409be1b647 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/tridentnet/metafile.yml @@ -0,0 +1,50 @@ +Collections: + - Name: TridentNet + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - ResNet + - TridentNet Block + Paper: https://arxiv.org/abs/1901.01892 + README: configs/tridentnet/README.md + +Models: + - Name: tridentnet_r50_caffe_1x_coco + In Collection: TridentNet + Config: configs/tridentnet/tridentnet_r50_caffe_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/tridentnet/tridentnet_r50_caffe_1x_coco/tridentnet_r50_caffe_1x_coco_20201230_141838-2ec0b530.pth + + - Name: tridentnet_r50_caffe_mstrain_1x_coco + In Collection: TridentNet + Config: configs/tridentnet/tridentnet_r50_caffe_mstrain_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.6 + Weights: https://download.openmmlab.com/mmdetection/v2.0/tridentnet/tridentnet_r50_caffe_mstrain_1x_coco/tridentnet_r50_caffe_mstrain_1x_coco_20201230_141839-6ce55ccb.pth + + - Name: tridentnet_r50_caffe_mstrain_3x_coco + In Collection: TridentNet + Config: configs/tridentnet/tridentnet_r50_caffe_mstrain_3x_coco.py + Metadata: + Epochs: 36 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 40.3 + Weights: https://download.openmmlab.com/mmdetection/v2.0/tridentnet/tridentnet_r50_caffe_mstrain_3x_coco/tridentnet_r50_caffe_mstrain_3x_coco_20201130_100539-46d227ba.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/tridentnet/tridentnet_r50_caffe_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/tridentnet/tridentnet_r50_caffe_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d779f75f8395c9d25345b936029ffc1628b5d4cb --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/tridentnet/tridentnet_r50_caffe_1x_coco.py @@ -0,0 +1,55 @@ +_base_ = [ + '../_base_/models/faster_rcnn_r50_caffe_c4.py', + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] + +model = dict( + type='TridentFasterRCNN', + backbone=dict( + type='TridentResNet', + trident_dilations=(1, 2, 3), + num_branch=3, + test_branch_idx=1, + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron2/resnet50_caffe')), + roi_head=dict(type='TridentRoIHead', num_branch=3, test_branch_idx=1), + train_cfg=dict( + rpn_proposal=dict(max_per_img=500), + rcnn=dict( + sampler=dict(num=128, pos_fraction=0.5, + add_gt_as_proposals=False)))) + +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/tridentnet/tridentnet_r50_caffe_mstrain_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/tridentnet/tridentnet_r50_caffe_mstrain_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c73d9eaa96c7f88dd33eb55f21848db2421bea1e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/tridentnet/tridentnet_r50_caffe_mstrain_1x_coco.py @@ -0,0 +1,22 @@ +_base_ = 'tridentnet_r50_caffe_1x_coco.py' + +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode='value', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] + +data = dict(train=dict(pipeline=train_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/tridentnet/tridentnet_r50_caffe_mstrain_3x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/tridentnet/tridentnet_r50_caffe_mstrain_3x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0f402826d3a22714078d8c50ed6bd8959018e4e7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/tridentnet/tridentnet_r50_caffe_mstrain_3x_coco.py @@ -0,0 +1,4 @@ +_base_ = 'tridentnet_r50_caffe_mstrain_1x_coco.py' + +lr_config = dict(step=[28, 34]) +runner = dict(type='EpochBasedRunner', max_epochs=36) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/README.md new file mode 100644 index 0000000000000000000000000000000000000000..363f1b900498a140a5225f97dba5a8838f82b023 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/README.md @@ -0,0 +1,43 @@ +# VarifocalNet: An IoU-aware Dense Object Detector + +## Introduction + + + +**VarifocalNet (VFNet)** learns to predict the IoU-aware classification score which mixes the object presence confidence and localization accuracy together as the detection score for a bounding box. The learning is supervised by the proposed Varifocal Loss (VFL), based on a new star-shaped bounding box feature representation (the features at nine yellow sampling points). Given the new representation, the object localization accuracy is further improved by refining the initially regressed bounding box. The full paper is available at: [https://arxiv.org/abs/2008.13367](https://arxiv.org/abs/2008.13367). + +
+ +

Learning to Predict the IoU-aware Classification Score.

+
+ +## Citing VarifocalNet + +```latex +@article{zhang2020varifocalnet, + title={VarifocalNet: An IoU-aware Dense Object Detector}, + author={Zhang, Haoyang and Wang, Ying and Dayoub, Feras and S{\"u}nderhauf, Niko}, + journal={arXiv preprint arXiv:2008.13367}, + year={2020} +} +``` + +## Results and Models + +| Backbone | Style | DCN | MS train | Lr schd |Inf time (fps) | box AP (val) | box AP (test-dev) | Config | Download | +|:------------:|:---------:|:-------:|:--------:|:-------:|:-------------:|:------------:|:-----------------:|:------:|:--------:| +| R-50 | pytorch | N | N | 1x | - | 41.6 | 41.6 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet/vfnet_r50_fpn_1x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_1x_coco/vfnet_r50_fpn_1x_coco_20201027-38db6f58.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_1x_coco/vfnet_r50_fpn_1x_coco.json)| +| R-50 | pytorch | N | Y | 2x | - | 44.5 | 44.8 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet/vfnet_r50_fpn_mstrain_2x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_mstrain_2x_coco/vfnet_r50_fpn_mstrain_2x_coco_20201027-7cc75bd2.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_mstrain_2x_coco/vfnet_r50_fpn_mstrain_2x_coco.json)| +| R-50 | pytorch | Y | Y | 2x | - | 47.8 | 48.0 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco_20201027pth-6879c318.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.json)| +| R-101 | pytorch | N | N | 1x | - | 43.0 | 43.6 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet/vfnet_r101_fpn_1x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_1x_coco/vfnet_r101_fpn_1x_coco_20201027pth-c831ece7.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_1x_coco/vfnet_r101_fpn_1x_coco.json)| +| R-101 | pytorch | N | Y | 2x | - | 46.2 | 46.7 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet/vfnet_r101_fpn_mstrain_2x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_mstrain_2x_coco/vfnet_r101_fpn_mstrain_2x_coco_20201027pth-4a5d53f1.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_mstrain_2x_coco/vfnet_r101_fpn_mstrain_2x_coco.json)| +| R-101 | pytorch | Y | Y | 2x | - | 49.0 | 49.2 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco_20201027pth-7729adb5.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco.json)| +| X-101-32x4d | pytorch | Y | Y | 2x | - | 49.7 | 50.0 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco_20201027pth-d300a6fc.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.json)| +| X-101-64x4d | pytorch | Y | Y | 2x | - | 50.4 | 50.8 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/vfnet/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco_20201027pth-b5f6da5e.pth) | [log](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.json)| + +**Notes:** + +- The MS-train scale range is 1333x[480:960] (`range` mode) and the inference scale keeps 1333x800. +- DCN means using `DCNv2` in both backbone and head. +- Inference time will be updated soon. +- More results and pre-trained models can be found in [VarifocalNet-Github](https://github.com/hyz-xmaster/VarifocalNet) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..dc6e78b15f3055c5b1b07f1f3cf491cf7d1f372a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/metafile.yml @@ -0,0 +1,111 @@ +Collections: + - Name: VFNet + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - ResNet + - Varifocal Loss + Paper: https://arxiv.org/abs/2008.13367 + README: configs/vfnet/README.md + +Models: + - Name: vfnet_r50_fpn_1x_coco + In Collection: VFNet + Config: configs/vfnet/vfnet_r50_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 41.6 + Weights: https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_1x_coco/vfnet_r50_fpn_1x_coco_20201027-38db6f58.pth + + - Name: vfnet_r50_fpn_mstrain_2x_coco + In Collection: VFNet + Config: configs/vfnet/vfnet_r50_fpn_mstrain_2x_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 44.8 + Weights: https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_mstrain_2x_coco/vfnet_r50_fpn_mstrain_2x_coco_20201027-7cc75bd2.pth + + - Name: vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco + In Collection: VFNet + Config: configs/vfnet/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 48.0 + Weights: https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco_20201027pth-6879c318.pth + + - Name: vfnet_r101_fpn_1x_coco + In Collection: VFNet + Config: configs/vfnet/vfnet_r101_fpn_1x_coco.py + Metadata: + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 43.6 + Weights: https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_1x_coco/vfnet_r101_fpn_1x_coco_20201027pth-c831ece7.pth + + - Name: vfnet_r101_fpn_mstrain_2x_coco + In Collection: VFNet + Config: configs/vfnet/vfnet_r101_fpn_mstrain_2x_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 46.7 + Weights: https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_mstrain_2x_coco/vfnet_r101_fpn_mstrain_2x_coco_20201027pth-4a5d53f1.pth + + - Name: vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco + In Collection: VFNet + Config: configs/vfnet/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 49.2 + Weights: https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco_20201027pth-7729adb5.pth + + - Name: vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco + In Collection: VFNet + Config: configs/vfnet/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 50.0 + Weights: https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco_20201027pth-d300a6fc.pth + + - Name: vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco + In Collection: VFNet + Config: configs/vfnet/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py + Metadata: + Epochs: 24 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 50.8 + Weights: https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/vfnet/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco_20201027pth-b5f6da5e.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r101_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r101_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b296a07959e43517d792f36f356404a232fb0dc3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r101_fpn_1x_coco.py @@ -0,0 +1,6 @@ +_base_ = './vfnet_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r101_fpn_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r101_fpn_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..27962f3a88d850edb38360b6988584f7438691b7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r101_fpn_2x_coco.py @@ -0,0 +1,8 @@ +_base_ = './vfnet_r50_fpn_1x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e438c247cf4c9c5b2b5aabffda535bec61d4a21e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r101_fpn_mdconv_c3-c5_mstrain_2x_coco.py @@ -0,0 +1,15 @@ +_base_ = './vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py' +model = dict( + backbone=dict( + type='ResNet', + depth=101, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r101_fpn_mstrain_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r101_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..eae69a01e801ae0422cdb8f8e58fd02a1720fee9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r101_fpn_mstrain_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './vfnet_r50_fpn_mstrain_2x_coco.py' +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r2_101_fpn_mdconv_c3-c5_mstrain_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r2_101_fpn_mdconv_c3-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..815a36e079111ee605c46d27bda9962dabdd6cdd --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r2_101_fpn_mdconv_c3-c5_mstrain_2x_coco.py @@ -0,0 +1,18 @@ +_base_ = './vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py' +model = dict( + backbone=dict( + type='Res2Net', + depth=101, + scales=4, + base_width=26, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://res2net101_v1d_26w_4s'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r2_101_fpn_mstrain_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r2_101_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..58022e0eeac5fba20b2360e0578aa9b9c781f287 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r2_101_fpn_mstrain_2x_coco.py @@ -0,0 +1,16 @@ +_base_ = './vfnet_r50_fpn_mstrain_2x_coco.py' +model = dict( + backbone=dict( + type='Res2Net', + depth=101, + scales=4, + base_width=26, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://res2net101_v1d_26w_4s'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r50_fpn_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r50_fpn_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7de64296cd78ce12a1d3df281bdffb8c393543be --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r50_fpn_1x_coco.py @@ -0,0 +1,107 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +# model settings +model = dict( + type='VFNet', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_output', # use P5 + num_outs=5, + relu_before_extra_convs=True), + bbox_head=dict( + type='VFNetHead', + num_classes=80, + in_channels=256, + stacked_convs=3, + feat_channels=256, + strides=[8, 16, 32, 64, 128], + center_sampling=False, + dcn_on_last_conv=False, + use_atss=True, + use_vfl=True, + loss_cls=dict( + type='VarifocalLoss', + use_sigmoid=True, + alpha=0.75, + gamma=2.0, + iou_weighted=True, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.5), + loss_bbox_refine=dict(type='GIoULoss', loss_weight=2.0)), + # training and testing settings + train_cfg=dict( + assigner=dict(type='ATSSAssigner', topk=9), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) + +# data setting +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) + +# optimizer +optimizer = dict( + lr=0.01, paramwise_cfg=dict(bias_lr_mult=2., bias_decay_mult=0.)) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.1, + step=[8, 11]) +runner = dict(type='EpochBasedRunner', max_epochs=12) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..24d2093b8b537a365c3e07261921b120b422918c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py @@ -0,0 +1,6 @@ +_base_ = './vfnet_r50_fpn_mstrain_2x_coco.py' +model = dict( + backbone=dict( + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True)), + bbox_head=dict(dcn_on_last_conv=True)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r50_fpn_mstrain_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r50_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..6078bb98cacc04da23dcb7a661047902e0adefb3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_r50_fpn_mstrain_2x_coco.py @@ -0,0 +1,39 @@ +_base_ = './vfnet_r50_fpn_1x_coco.py' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict( + type='Resize', + img_scale=[(1333, 480), (1333, 960)], + multiscale_mode='range', + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +# learning policy +lr_config = dict(step=[16, 22]) +runner = dict(type='EpochBasedRunner', max_epochs=24) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7efa0517eb72395a2ff24992318fcb4667fc033d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_x101_32x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py @@ -0,0 +1,17 @@ +_base_ = './vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_x101_32x4d_fpn_mstrain_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_x101_32x4d_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..49a4312107d9ff045bc626802fa23cf01f54d10e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_x101_32x4d_fpn_mstrain_2x_coco.py @@ -0,0 +1,15 @@ +_base_ = './vfnet_r50_fpn_mstrain_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=32, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_32x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..7e1ee429f3dbaa895018a1b280ff312d01965e03 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_x101_64x4d_fpn_mdconv_c3-c5_mstrain_2x_coco.py @@ -0,0 +1,17 @@ +_base_ = './vfnet_r50_fpn_mdconv_c3-c5_mstrain_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + dcn=dict(type='DCNv2', deform_groups=1, fallback_on_stride=False), + stage_with_dcn=(False, True, True, True), + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_x101_64x4d_fpn_mstrain_2x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_x101_64x4d_fpn_mstrain_2x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e51064e7ec003604edb99c2759b3f5fe4b95423e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/vfnet/vfnet_x101_64x4d_fpn_mstrain_2x_coco.py @@ -0,0 +1,15 @@ +_base_ = './vfnet_r50_fpn_mstrain_2x_coco.py' +model = dict( + backbone=dict( + type='ResNeXt', + depth=101, + groups=64, + base_width=4, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + init_cfg=dict( + type='Pretrained', checkpoint='open-mmlab://resnext101_64x4d'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/wider_face/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/wider_face/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b8fe474257b69381dfb5656feffe3ad3389b25dd --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/wider_face/README.md @@ -0,0 +1,43 @@ +# WIDER Face Dataset + + + +To use the WIDER Face dataset you need to download it +and extract to the `data/WIDERFace` folder. Annotation in the VOC format +can be found in this [repo](https://github.com/sovrasov/wider-face-pascal-voc-annotations.git). +You should move the annotation files from `WIDER_train_annotations` and `WIDER_val_annotations` folders +to the `Annotation` folders inside the corresponding directories `WIDER_train` and `WIDER_val`. +Also annotation lists `val.txt` and `train.txt` should be copied to `data/WIDERFace` from `WIDER_train_annotations` and `WIDER_val_annotations`. +The directory should be like this: + +``` +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── WIDERFace +│ │ ├── WIDER_train +│ | │ ├──0--Parade +│ | │ ├── ... +│ | │ ├── Annotations +│ │ ├── WIDER_val +│ | │ ├──0--Parade +│ | │ ├── ... +│ | │ ├── Annotations +│ │ ├── val.txt +│ │ ├── train.txt + +``` + +After that you can train the SSD300 on WIDER by launching training with the `ssd300_wider_face.py` config or +create your own config based on the presented one. + +``` +@inproceedings{yang2016wider, + Author = {Yang, Shuo and Luo, Ping and Loy, Chen Change and Tang, Xiaoou}, + Booktitle = {IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + Title = {WIDER FACE: A Face Detection Benchmark}, + Year = {2016} +} +``` diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/wider_face/ssd300_wider_face.py b/detection_cbnet/docker-build-context/cbnetv2/configs/wider_face/ssd300_wider_face.py new file mode 100644 index 0000000000000000000000000000000000000000..5a3eb38df3dc75af176cc6972af88e76124ba4dc --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/wider_face/ssd300_wider_face.py @@ -0,0 +1,18 @@ +_base_ = [ + '../_base_/models/ssd300.py', '../_base_/datasets/wider_face.py', + '../_base_/default_runtime.py' +] +model = dict(bbox_head=dict(num_classes=1)) +# optimizer +optimizer = dict(type='SGD', lr=0.012, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict() +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.001, + step=[16, 20]) +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=24) +log_config = dict(interval=1) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/yolact/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/yolact/README.md new file mode 100644 index 0000000000000000000000000000000000000000..da3559bbe8395fd9f84d803ce818a230f5721365 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/yolact/README.md @@ -0,0 +1,71 @@ +# **Y**ou **O**nly **L**ook **A**t **C**oefficien**T**s + + + +``` + ██╗ ██╗ ██████╗ ██╗ █████╗ ██████╗████████╗ + ╚██╗ ██╔╝██╔═══██╗██║ ██╔══██╗██╔════╝╚══██╔══╝ + ╚████╔╝ ██║ ██║██║ ███████║██║ ██║ + ╚██╔╝ ██║ ██║██║ ██╔══██║██║ ██║ + ██║ ╚██████╔╝███████╗██║ ██║╚██████╗ ██║ + ╚═╝ ╚═════╝ ╚══════╝╚═╝ ╚═╝ ╚═════╝ ╚═╝ +``` + +A simple, fully convolutional model for real-time instance segmentation. This is the code for our paper: + +- [YOLACT: Real-time Instance Segmentation](https://arxiv.org/abs/1904.02689) + + +For a real-time demo, check out our ICCV video: +[![IMAGE ALT TEXT HERE](https://img.youtube.com/vi/0pMfmo8qfpQ/0.jpg)](https://www.youtube.com/watch?v=0pMfmo8qfpQ) + +## Evaluation + +Here are our YOLACT models along with their FPS on a Titan Xp and mAP on COCO's `val`: + +| Image Size | GPU x BS | Backbone | *FPS | mAP | Weights | Configs | Download | +|:----------:|:--------:|:-------------:|:-----:|:----:|:-------:|:------:|:--------:| +| 550 | 1x8 | Resnet50-FPN | 42.5 | 29.0 | | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/yolact/yolact_r50_1x8_coco.py) |[model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/yolact/yolact_r50_1x8_coco_20200908-f38d58df.pth) | +| 550 | 8x8 | Resnet50-FPN | 42.5 | 28.4 | | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/yolact/yolact_r50_8x8_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/yolact/yolact_r50_8x8_coco_20200908-ca34f5db.pth) | +| 550 | 1x8 | Resnet101-FPN | 33.5 | 30.4 | | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/yolact/yolact_r101_1x8_coco.py) | [model](https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/yolact/yolact_r101_1x8_coco_20200908-4cbe9101.pth) | + +*Note: The FPS is evaluated by the [original implementation](https://github.com/dbolya/yolact). When calculating FPS, only the model inference time is taken into account. Data loading and post-processing operations such as converting masks to RLE code, generating COCO JSON results, image rendering are not included. + +## Training + +All the aforementioned models are trained with a single GPU. It typically takes ~12GB VRAM when using resnet-101 as the backbone. If you want to try multiple GPUs training, you may have to modify the configuration files accordingly, such as adjusting the training schedule and freezing batch norm. + +```Shell +# Trains using the resnet-101 backbone with a batch size of 8 on a single GPU. +./tools/dist_train.sh configs/yolact/yolact_r101.py 1 +``` + +## Testing + +Please refer to [mmdetection/docs/getting_started.md](https://github.com/open-mmlab/mmdetection/blob/master/docs/getting_started.md#inference-with-pretrained-models). + +## Citation + +If you use YOLACT or this code base in your work, please cite + +```latex +@inproceedings{yolact-iccv2019, + author = {Daniel Bolya and Chong Zhou and Fanyi Xiao and Yong Jae Lee}, + title = {YOLACT: {Real-time} Instance Segmentation}, + booktitle = {ICCV}, + year = {2019}, +} +``` + + diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/yolact/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/yolact/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..0baa3faf114fbb58c8188a81f4cd17478c628475 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/yolact/metafile.yml @@ -0,0 +1,73 @@ +Collections: + - Name: YOLACT + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - FPN + - ResNet + Paper: https://arxiv.org/abs/1904.02689 + README: configs/yolact/README.md + +Models: + - Name: yolact_r50_1x8_coco + In Collection: YOLACT + Config: configs/yolact/yolact_r50_1x8_coco.py + Metadata: + Training Resources: 1x V100 GPU + Batch Size: 8 + inference time (ms/im): + - value: 23.53 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (550, 550) + Results: + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 29.0 + Weights: https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/yolact/yolact_r50_1x8_coco_20200908-f38d58df.pth + + - Name: yolact_r50_8x8_coco + In Collection: YOLACT + Config: configs/yolact/yolact_r50_8x8_coco.py + Metadata: + Batch Size: 64 + inference time (ms/im): + - value: 23.53 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (550, 550) + Results: + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 28.4 + Weights: https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/yolact/yolact_r50_8x8_coco_20200908-ca34f5db.pth + + - Name: yolact_r101_1x8_coco + In Collection: YOLACT + Config: configs/yolact/yolact_r101_1x8_coco.py + Metadata: + Training Resources: 1x V100 GPU + Batch Size: 8 + inference time (ms/im): + - value: 29.85 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (550, 550) + Results: + - Task: Instance Segmentation + Dataset: COCO + Metrics: + mask AP: 30.4 + Weights: https://openmmlab.oss-cn-hangzhou.aliyuncs.com/mmdetection/v2.0/yolact/yolact_r101_1x8_coco_20200908-4cbe9101.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/yolact/yolact_r101_1x8_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/yolact/yolact_r101_1x8_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..532631dd5f8483dfb61488e4f445f1f50a71fbde --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/yolact/yolact_r101_1x8_coco.py @@ -0,0 +1,7 @@ +_base_ = './yolact_r50_1x8_coco.py' + +model = dict( + backbone=dict( + depth=101, + init_cfg=dict(type='Pretrained', + checkpoint='torchvision://resnet101'))) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/yolact/yolact_r50_1x8_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/yolact/yolact_r50_1x8_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..9d2103c99ff2da6120b9c20a744c90bc7b5f6fc8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/yolact/yolact_r50_1x8_coco.py @@ -0,0 +1,160 @@ +_base_ = '../_base_/default_runtime.py' + +# model settings +img_size = 550 +model = dict( + type='YOLACT', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=-1, # do not freeze stem + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=False, # update the statistics of bn + zero_init_residual=False, + style='pytorch', + init_cfg=dict(type='Pretrained', checkpoint='torchvision://resnet50')), + neck=dict( + type='FPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + start_level=1, + add_extra_convs='on_input', + num_outs=5, + upsample_cfg=dict(mode='bilinear')), + bbox_head=dict( + type='YOLACTHead', + num_classes=80, + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=3, + scales_per_octave=1, + base_sizes=[8, 16, 32, 64, 128], + ratios=[0.5, 1.0, 2.0], + strides=[550.0 / x for x in [69, 35, 18, 9, 5]], + centers=[(550 * 0.5 / x, 550 * 0.5 / x) + for x in [69, 35, 18, 9, 5]]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + reduction='none', + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.5), + num_head_convs=1, + num_protos=32, + use_ohem=True), + mask_head=dict( + type='YOLACTProtonet', + in_channels=256, + num_protos=32, + num_classes=80, + max_masks_to_train=100, + loss_mask_weight=6.125), + segm_head=dict( + type='YOLACTSegmHead', + num_classes=80, + in_channels=256, + loss_segm=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0., + ignore_iof_thr=-1, + gt_max_assign_all=False), + # smoothl1_beta=1., + allowed_border=-1, + pos_weight=-1, + neg_pos_ratio=3, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + iou_thr=0.5, + top_k=200, + max_per_img=100)) +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict( + mean=[123.68, 116.78, 103.94], std=[58.40, 57.12, 57.38], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict(type='FilterAnnotations', min_gt_bbox_wh=(4.0, 4.0)), + dict( + type='PhotoMetricDistortion', + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 4)), + dict( + type='MinIoURandomCrop', + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(img_size, img_size), keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(img_size, img_size), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=False), + dict(type='Normalize', **img_norm_cfg), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=1e-3, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict() +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.1, + step=[20, 42, 49, 52]) +runner = dict(type='EpochBasedRunner', max_epochs=55) +cudnn_benchmark = True +evaluation = dict(metric=['bbox', 'segm']) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/yolact/yolact_r50_8x8_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/yolact/yolact_r50_8x8_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..b3adcb74a6155a0ab7303ab9ae90ee120f3eb4ad --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/yolact/yolact_r50_8x8_coco.py @@ -0,0 +1,11 @@ +_base_ = 'yolact_r50_1x8_coco.py' + +optimizer = dict(type='SGD', lr=8e-3, momentum=0.9, weight_decay=5e-4) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=1000, + warmup_ratio=0.1, + step=[20, 42, 49, 52]) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/yolo/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/yolo/README.md new file mode 100644 index 0000000000000000000000000000000000000000..25af46ad28cc9769aef80b6db4368305789038ff --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/yolo/README.md @@ -0,0 +1,36 @@ +# YOLOv3 + +## Introduction + + + +```latex +@misc{redmon2018yolov3, + title={YOLOv3: An Incremental Improvement}, + author={Joseph Redmon and Ali Farhadi}, + year={2018}, + eprint={1804.02767}, + archivePrefix={arXiv}, + primaryClass={cs.CV} +} +``` + +## Results and Models + +| Backbone | Scale | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| DarkNet-53 | 320 | 273e | 2.7 | 63.9 | 27.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/yolo/yolov3_d53_320_273e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_320_273e_coco/yolov3_d53_320_273e_coco-421362b6.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_320_273e_coco/yolov3_d53_320_273e_coco-20200819_172101.log.json) | +| DarkNet-53 | 416 | 273e | 3.8 | 61.2 | 30.9 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/yolo/yolov3_d53_mstrain-416_273e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_mstrain-416_273e_coco/yolov3_d53_mstrain-416_273e_coco-2b60fcd9.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_mstrain-416_273e_coco/yolov3_d53_mstrain-416_273e_coco-20200819_173424.log.json) | +| DarkNet-53 | 608 | 273e | 7.4 | 48.1 | 33.7 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/yolo/yolov3_d53_mstrain-608_273e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_mstrain-608_273e_coco/yolov3_d53_mstrain-608_273e_coco_20210518_115020-a2c3acb8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_mstrain-608_273e_coco/yolov3_d53_mstrain-608_273e_coco_20210518_115020.log.json) | + +## Mixed Precision Training + +We also train YOLOv3 with mixed precision training. + +| Backbone | Scale | Lr schd | Mem (GB) | Inf time (fps) | box AP | Config | Download | +| :-------------: | :-----: | :-----: | :------: | :------------: | :----: | :------: | :--------: | +| DarkNet-53 | 608 | 273e | 4.7 | 48.1 | 33.8 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/yolo/yolov3_d53_fp16_mstrain-608_273e_coco.py) | [model](https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_fp16_mstrain-608_273e_coco/yolov3_d53_fp16_mstrain-608_273e_coco_20210517_213542-4bc34944.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_fp16_mstrain-608_273e_coco/yolov3_d53_fp16_mstrain-608_273e_coco_20210517_213542.log.json) | + +## Credit + +This implementation originates from the project of Haoyu Wu(@wuhy08) at Western Digital. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/yolo/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/yolo/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..4e290df4127b2be089b0e936f43ad847c6f822b9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/yolo/metafile.yml @@ -0,0 +1,93 @@ +Collections: + - Name: YOLOv3 + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - DarkNet + Paper: https://arxiv.org/abs/1804.02767 + README: configs/yolo/README.md + +Models: + - Name: yolov3_d53_320_273e_coco + In Collection: YOLOv3 + Config: configs/yolo/yolov3_d53_320_273e_coco.py + Metadata: + Training Memory (GB): 2.7 + inference time (ms/im): + - value: 15.65 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (320, 320) + Epochs: 273 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 27.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_320_273e_coco/yolov3_d53_320_273e_coco-421362b6.pth + + - Name: yolov3_d53_mstrain-416_273e_coco + In Collection: YOLOv3 + Config: configs/yolo/yolov3_d53_mstrain-416_273e_coco.py + Metadata: + Training Memory (GB): 3.8 + inference time (ms/im): + - value: 16.34 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (416, 416) + Epochs: 273 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 30.9 + Weights: https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_mstrain-416_273e_coco/yolov3_d53_mstrain-416_273e_coco-2b60fcd9.pth + + - Name: yolov3_d53_mstrain-608_273e_coco + In Collection: YOLOv3 + Config: configs/yolo/yolov3_d53_mstrain-608_273e_coco.py + Metadata: + Training Memory (GB): 7.4 + inference time (ms/im): + - value: 20.79 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP32 + resolution: (608, 608) + Epochs: 273 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 33.7 + Weights: https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_mstrain-608_273e_coco/yolov3_d53_mstrain-608_273e_coco_20210518_115020-a2c3acb8.pth + + - Name: yolov3_d53_fp16_mstrain-608_273e_coco + In Collection: YOLOv3 + Config: configs/yolo/yolov3_d53_fp16_mstrain-608_273e_coco.py + Metadata: + Training Memory (GB): 4.7 + inference time (ms/im): + - value: 20.79 + hardware: V100 + backend: PyTorch + batch size: 1 + mode: FP16 + resolution: (608, 608) + Epochs: 273 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 33.8 + Weights: https://download.openmmlab.com/mmdetection/v2.0/yolo/yolov3_d53_fp16_mstrain-608_273e_coco/yolov3_d53_fp16_mstrain-608_273e_coco_20210517_213542-4bc34944.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/yolo/yolov3_d53_320_273e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/yolo/yolov3_d53_320_273e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..87359f6fb66d94de10b8e3797ee3eec93a19cb26 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/yolo/yolov3_d53_320_273e_coco.py @@ -0,0 +1,42 @@ +_base_ = './yolov3_d53_mstrain-608_273e_coco.py' +# dataset settings +img_norm_cfg = dict(mean=[0, 0, 0], std=[255., 255., 255.], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='PhotoMetricDistortion'), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 2)), + dict( + type='MinIoURandomCrop', + min_ious=(0.4, 0.5, 0.6, 0.7, 0.8, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=(320, 320), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(320, 320), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/yolo/yolov3_d53_fp16_mstrain-608_273e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/yolo/yolov3_d53_fp16_mstrain-608_273e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..4ef2422dada278c1e28b48d333437c7994832eba --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/yolo/yolov3_d53_fp16_mstrain-608_273e_coco.py @@ -0,0 +1,3 @@ +_base_ = './yolov3_d53_mstrain-608_273e_coco.py' +# fp16 settings +fp16 = dict(loss_scale='dynamic') diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/yolo/yolov3_d53_mstrain-416_273e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/yolo/yolov3_d53_mstrain-416_273e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..d029b5cdd6b3dad09b16a6f2a23e66be684a6412 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/yolo/yolov3_d53_mstrain-416_273e_coco.py @@ -0,0 +1,42 @@ +_base_ = './yolov3_d53_mstrain-608_273e_coco.py' +# dataset settings +img_norm_cfg = dict(mean=[0, 0, 0], std=[255., 255., 255.], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='PhotoMetricDistortion'), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 2)), + dict( + type='MinIoURandomCrop', + min_ious=(0.4, 0.5, 0.6, 0.7, 0.8, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=[(320, 320), (416, 416)], keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(416, 416), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/yolo/yolov3_d53_mstrain-608_273e_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/yolo/yolov3_d53_mstrain-608_273e_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..83026b0f742dd57400114bac1402e188596de772 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/yolo/yolov3_d53_mstrain-608_273e_coco.py @@ -0,0 +1,127 @@ +_base_ = '../_base_/default_runtime.py' +# model settings +model = dict( + type='YOLOV3', + backbone=dict( + type='Darknet', + depth=53, + out_indices=(3, 4, 5), + init_cfg=dict(type='Pretrained', checkpoint='open-mmlab://darknet53')), + neck=dict( + type='YOLOV3Neck', + num_scales=3, + in_channels=[1024, 512, 256], + out_channels=[512, 256, 128]), + bbox_head=dict( + type='YOLOV3Head', + num_classes=80, + in_channels=[512, 256, 128], + out_channels=[1024, 512, 256], + anchor_generator=dict( + type='YOLOAnchorGenerator', + base_sizes=[[(116, 90), (156, 198), (373, 326)], + [(30, 61), (62, 45), (59, 119)], + [(10, 13), (16, 30), (33, 23)]], + strides=[32, 16, 8]), + bbox_coder=dict(type='YOLOBBoxCoder'), + featmap_strides=[32, 16, 8], + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0, + reduction='sum'), + loss_conf=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0, + reduction='sum'), + loss_xy=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=2.0, + reduction='sum'), + loss_wh=dict(type='MSELoss', loss_weight=2.0, reduction='sum')), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='GridAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0)), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + conf_thr=0.005, + nms=dict(type='nms', iou_threshold=0.45), + max_per_img=100)) +# dataset settings +dataset_type = 'CocoDataset' +data_root = 'data/coco/' +img_norm_cfg = dict(mean=[0, 0, 0], std=[255., 255., 255.], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile', to_float32=True), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='PhotoMetricDistortion'), + dict( + type='Expand', + mean=img_norm_cfg['mean'], + to_rgb=img_norm_cfg['to_rgb'], + ratio_range=(1, 2)), + dict( + type='MinIoURandomCrop', + min_ious=(0.4, 0.5, 0.6, 0.7, 0.8, 0.9), + min_crop_size=0.3), + dict(type='Resize', img_scale=[(320, 320), (608, 608)], keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(608, 608), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=4, + train=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_train2017.json', + img_prefix=data_root + 'train2017/', + pipeline=train_pipeline), + val=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline), + test=dict( + type=dataset_type, + ann_file=data_root + 'annotations/instances_val2017.json', + img_prefix=data_root + 'val2017/', + pipeline=test_pipeline)) +# optimizer +optimizer = dict(type='SGD', lr=0.001, momentum=0.9, weight_decay=0.0005) +optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2)) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=2000, # same as burn-in in darknet + warmup_ratio=0.1, + step=[218, 246]) +# runtime settings +runner = dict(type='EpochBasedRunner', max_epochs=273) +evaluation = dict(interval=1, metric=['bbox']) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/yolof/README.md b/detection_cbnet/docker-build-context/cbnetv2/configs/yolof/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e16e8a2790c0098083fc36eee3013c28b0200f8a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/yolof/README.md @@ -0,0 +1,25 @@ +# You Only Look One-level Feature + +## Introduction + + + +``` +@inproceedings{chen2021you, + title={You Only Look One-level Feature}, + author={Chen, Qiang and Wang, Yingming and Yang, Tong and Zhang, Xiangyu and Cheng, Jian and Sun, Jian}, + booktitle={IEEE Conference on Computer Vision and Pattern Recognition}, + year={2021} +} +``` + +## Results and Models + +| Backbone | Style | Epoch | Lr schd | Mem (GB) | box AP | Config | Download | +|:---------:|:-------:|:-------:|:-------:|:--------:|:------:|:------:|:--------:| +| R-50-C5 | caffe | Y | 1x | 8.3 | 37.5 | [config](https://github.com/open-mmlab/mmdetection/tree/master/configs/yolof/yolof_r50_c5_8x8_1x_coco.py) |[model](https://download.openmmlab.com/mmdetection/v2.0/yolof/yolof_r50_c5_8x8_1x_coco/yolof_r50_c5_8x8_1x_coco_20210425_024427-8e864411.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/yolof/yolof_r50_c5_8x8_1x_coco/yolof_r50_c5_8x8_1x_coco_20210425_024427.log.json) | + +**Note**: + +1. We find that the performance is unstable and may fluctuate by about 0.3 mAP. mAP 37.4 ~ 37.7 is acceptable in YOLOF_R_50_C5_1x. Such fluctuation can also be found in the [original implementation](https://github.com/chensnathan/YOLOF). +2. In addition to instability issues, sometimes there are large loss fluctuations and NAN, so there may still be problems with this project, which will be improved subsequently. diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/yolof/metafile.yml b/detection_cbnet/docker-build-context/cbnetv2/configs/yolof/metafile.yml new file mode 100644 index 0000000000000000000000000000000000000000..f2b22921bb2939bfbd4541b51cb61020375a6421 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/yolof/metafile.yml @@ -0,0 +1,27 @@ +Collections: + - Name: YOLOF + Metadata: + Training Data: COCO + Training Techniques: + - SGD with Momentum + - Weight Decay + Training Resources: 8x V100 GPUs + Architecture: + - Dilated Encoder + - ResNet + Paper: https://arxiv.org/abs/2103.09460 + README: configs/yolof/README.md + +Models: + - Name: yolof_r50_c5_8x8_1x_coco + In Collection: YOLOF + Config: configs/yolof/yolof_r50_c5_8x8_1x_coco.py + Metadata: + Training Memory (GB): 8.3 + Epochs: 12 + Results: + - Task: Object Detection + Dataset: COCO + Metrics: + box AP: 37.5 + Weights: https://download.openmmlab.com/mmdetection/v2.0/yolof/yolof_r50_c5_8x8_1x_coco/yolof_r50_c5_8x8_1x_coco_20210425_024427-8e864411.pth diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/yolof/yolof_r50_c5_8x8_1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/yolof/yolof_r50_c5_8x8_1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..0e3b5016296b266c1cdd2d362fd2e8e72f13578a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/yolof/yolof_r50_c5_8x8_1x_coco.py @@ -0,0 +1,105 @@ +_base_ = [ + '../_base_/datasets/coco_detection.py', + '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py' +] +model = dict( + type='YOLOF', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(3, ), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='caffe', + init_cfg=dict( + type='Pretrained', + checkpoint='open-mmlab://detectron/resnet50_caffe')), + neck=dict( + type='DilatedEncoder', + in_channels=2048, + out_channels=512, + block_mid_channels=128, + num_residual_blocks=4), + bbox_head=dict( + type='YOLOFHead', + num_classes=80, + in_channels=512, + reg_decoded_bbox=True, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[1, 2, 4, 8, 16], + strides=[32]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1., 1., 1., 1.], + add_ctr_clamp=True, + ctr_clamp=32), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.0)), + # training and testing settings + train_cfg=dict( + assigner=dict( + type='UniformAssigner', pos_ignore_thr=0.15, neg_ignore_thr=0.7), + allowed_border=-1, + pos_weight=-1, + debug=False), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) +# optimizer +optimizer = dict( + type='SGD', + lr=0.12, + momentum=0.9, + weight_decay=0.0001, + paramwise_cfg=dict( + norm_decay_mult=0., custom_keys={'backbone': dict(lr_mult=1. / 3)})) +lr_config = dict(warmup_iters=1500, warmup_ratio=0.00066667) + +# use caffe img_norm +img_norm_cfg = dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='RandomShift', shift_ratio=0.5, max_shift_px=32), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + samples_per_gpu=8, + workers_per_gpu=8, + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/configs/yolof/yolof_r50_c5_8x8_iter-1x_coco.py b/detection_cbnet/docker-build-context/cbnetv2/configs/yolof/yolof_r50_c5_8x8_iter-1x_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..c95c02da103bdd499063312c36ade30601bb7380 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/configs/yolof/yolof_r50_c5_8x8_iter-1x_coco.py @@ -0,0 +1,14 @@ +_base_ = './yolof_r50_c5_8x8_1x_coco.py' + +# We implemented the iter-based config according to the source code. +# COCO dataset has 117266 images after filtering. We use 8 gpu and +# 8 batch size training, so 22500 is equivalent to +# 22500/(117266/(8x8))=12.3 epoch, 15000 is equivalent to 8.2 epoch, +# 20000 is equivalent to 10.9 epoch. Due to lr(0.12) is large, +# the iter-based and epoch-based setting have about 0.2 difference on +# the mAP evaluation value. +lr_config = dict(step=[15000, 20000]) +runner = dict(_delete_=True, type='IterBasedRunner', max_iters=22500) +checkpoint_config = dict(interval=2500) +evaluation = dict(interval=4500) +log_config = dict(interval=20) diff --git a/detection_cbnet/docker-build-context/cbnetv2/demo/MMDet_Tutorial.ipynb b/detection_cbnet/docker-build-context/cbnetv2/demo/MMDet_Tutorial.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..c95c6211301c64781ca74ad58325aa2e740f6dfa --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/demo/MMDet_Tutorial.ipynb @@ -0,0 +1,1656 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "tJxJHruNLb7Y" + }, + "source": [ + "\"Open" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "aGYwt_UjIrqp" + }, + "source": [ + "# MMDetection Tutorial\n", + "\n", + "Welcome to MMDetection! This is the official colab tutorial for using MMDetection. In this tutorial, you will learn\n", + "- Perform inference with a MMDet detector.\n", + "- Train a new detector with a new dataset.\n", + "\n", + "Let's start!\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "Wi4LPmsR66sy", + "outputId": "8eb8aadf-1c70-42dd-9105-1a3dad85c504" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "nvcc: NVIDIA (R) Cuda compiler driver\n", + "Copyright (c) 2005-2019 NVIDIA Corporation\n", + "Built on Sun_Jul_28_19:07:16_PDT_2019\n", + "Cuda compilation tools, release 10.1, V10.1.243\n", + "gcc (Ubuntu 7.5.0-3ubuntu1~18.04) 7.5.0\n", + "Copyright (C) 2017 Free Software Foundation, Inc.\n", + "This is free software; see the source for copying conditions. There is NO\n", + "warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n", + "\n" + ] + } + ], + "source": [ + "# Check nvcc version\n", + "!nvcc -V\n", + "# Check GCC version\n", + "!gcc --version" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "gkGnB9WyHSXB", + "outputId": "f1360573-c24a-4a8f-98cd-cc654c1d7d05" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Looking in links: https://download.pytorch.org/whl/torch_stable.html\n", + "Collecting torch==1.5.1+cu101\n", + "\u001b[?25l Downloading https://download.pytorch.org/whl/cu101/torch-1.5.1%2Bcu101-cp36-cp36m-linux_x86_64.whl (704.4MB)\n", + "\u001b[K |████████████████████████████████| 704.4MB 26kB/s \n", + "\u001b[?25hCollecting torchvision==0.6.1+cu101\n", + "\u001b[?25l Downloading https://download.pytorch.org/whl/cu101/torchvision-0.6.1%2Bcu101-cp36-cp36m-linux_x86_64.whl (6.6MB)\n", + "\u001b[K |████████████████████████████████| 6.6MB 60.4MB/s \n", + "\u001b[?25hRequirement already satisfied, skipping upgrade: numpy in /usr/local/lib/python3.6/dist-packages (from torch==1.5.1+cu101) (1.19.5)\n", + "Requirement already satisfied, skipping upgrade: future in /usr/local/lib/python3.6/dist-packages (from torch==1.5.1+cu101) (0.16.0)\n", + "Requirement already satisfied, skipping upgrade: pillow>=4.1.1 in /usr/local/lib/python3.6/dist-packages (from torchvision==0.6.1+cu101) (7.0.0)\n", + "Installing collected packages: torch, torchvision\n", + " Found existing installation: torch 1.7.0+cu101\n", + " Uninstalling torch-1.7.0+cu101:\n", + " Successfully uninstalled torch-1.7.0+cu101\n", + " Found existing installation: torchvision 0.8.1+cu101\n", + " Uninstalling torchvision-0.8.1+cu101:\n", + " Successfully uninstalled torchvision-0.8.1+cu101\n", + "Successfully installed torch-1.5.1+cu101 torchvision-0.6.1+cu101\n", + "Collecting mmcv-full\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/30/f6/763845494c67ec6469992c8196c2458bdc12ff9c749de14d20a000da765d/mmcv-full-1.2.6.tar.gz (226kB)\n", + "\u001b[K |████████████████████████████████| 235kB 15.8MB/s \n", + "\u001b[?25hCollecting addict\n", + " Downloading https://files.pythonhosted.org/packages/6a/00/b08f23b7d7e1e14ce01419a467b583edbb93c6cdb8654e54a9cc579cd61f/addict-2.4.0-py3-none-any.whl\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from mmcv-full) (1.19.5)\n", + "Requirement already satisfied: Pillow in /usr/local/lib/python3.6/dist-packages (from mmcv-full) (7.0.0)\n", + "Requirement already satisfied: pyyaml in /usr/local/lib/python3.6/dist-packages (from mmcv-full) (3.13)\n", + "Collecting yapf\n", + "\u001b[?25l Downloading https://files.pythonhosted.org/packages/c1/5d/d84677fe852bc5e091739acda444a9b6700ffc6b11a21b00dd244c8caef0/yapf-0.30.0-py2.py3-none-any.whl (190kB)\n", + "\u001b[K |████████████████████████████████| 194kB 53.0MB/s \n", + "\u001b[?25hBuilding wheels for collected packages: mmcv-full\n", + " Building wheel for mmcv-full (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for mmcv-full: filename=mmcv_full-1.2.6-cp36-cp36m-linux_x86_64.whl size=20243694 sha256=8742a849334b62e8e3f7b695fd546b033111501586a94fe5612aab54f7edebfa\n", + " Stored in directory: /root/.cache/pip/wheels/40/39/64/7c5ab43621826eb41d31f1df14a8acabf74d879fdf33dc9d79\n", + "Successfully built mmcv-full\n", + "Installing collected packages: addict, yapf, mmcv-full\n", + "Successfully installed addict-2.4.0 mmcv-full-1.2.6 yapf-0.30.0\n", + "Cloning into 'mmdetection'...\n", + "remote: Enumerating objects: 50, done.\u001b[K\n", + "remote: Counting objects: 100% (50/50), done.\u001b[K\n", + "remote: Compressing objects: 100% (49/49), done.\u001b[K\n", + "remote: Total 15882 (delta 7), reused 5 (delta 1), pack-reused 15832\u001b[K\n", + "Receiving objects: 100% (15882/15882), 16.93 MiB | 33.41 MiB/s, done.\n", + "Resolving deltas: 100% (10915/10915), done.\n", + "/content/mmdetection\n", + "Obtaining file:///content/mmdetection\n", + "Requirement already satisfied: matplotlib in /usr/local/lib/python3.6/dist-packages (from mmdet==2.9.0) (3.2.2)\n", + "Collecting mmpycocotools\n", + " Downloading https://files.pythonhosted.org/packages/99/51/1bc1d79f296347eeb2d1a2e0606885ab1e4682833bf275fd39c189952e26/mmpycocotools-12.0.3.tar.gz\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.6/dist-packages (from mmdet==2.9.0) (1.19.5)\n", + "Requirement already satisfied: six in /usr/local/lib/python3.6/dist-packages (from mmdet==2.9.0) (1.15.0)\n", + "Collecting terminaltables\n", + " Downloading https://files.pythonhosted.org/packages/9b/c4/4a21174f32f8a7e1104798c445dacdc1d4df86f2f26722767034e4de4bff/terminaltables-3.1.0.tar.gz\n", + "Requirement already satisfied: python-dateutil>=2.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib->mmdet==2.9.0) (2.8.1)\n", + "Requirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib->mmdet==2.9.0) (2.4.7)\n", + "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib->mmdet==2.9.0) (1.3.1)\n", + "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.6/dist-packages (from matplotlib->mmdet==2.9.0) (0.10.0)\n", + "Requirement already satisfied: setuptools>=18.0 in /usr/local/lib/python3.6/dist-packages (from mmpycocotools->mmdet==2.9.0) (53.0.0)\n", + "Requirement already satisfied: cython>=0.27.3 in /usr/local/lib/python3.6/dist-packages (from mmpycocotools->mmdet==2.9.0) (0.29.21)\n", + "Building wheels for collected packages: mmpycocotools, terminaltables\n", + " Building wheel for mmpycocotools (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for mmpycocotools: filename=mmpycocotools-12.0.3-cp36-cp36m-linux_x86_64.whl size=265912 sha256=1e5525c4339f76072ed09fecd12765fe7544e94745b91fb76fca95658e3dea7b\n", + " Stored in directory: /root/.cache/pip/wheels/a2/b0/8d/3307912785a42bc80f673946fac676d5c596eee537af7a599c\n", + " Building wheel for terminaltables (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for terminaltables: filename=terminaltables-3.1.0-cp36-none-any.whl size=15358 sha256=93fdde0610537c38e16b17f6df08bbc2be3c1b19e266b5d4e5fd7aef039bb218\n", + " Stored in directory: /root/.cache/pip/wheels/30/6b/50/6c75775b681fb36cdfac7f19799888ef9d8813aff9e379663e\n", + "Successfully built mmpycocotools terminaltables\n", + "Installing collected packages: mmpycocotools, terminaltables, mmdet\n", + " Running setup.py develop for mmdet\n", + "Successfully installed mmdet mmpycocotools-12.0.3 terminaltables-3.1.0\n", + "Requirement already satisfied: Pillow==7.0.0 in /usr/local/lib/python3.6/dist-packages (7.0.0)\n" + ] + } + ], + "source": [ + "# install dependencies: (use cu101 because colab has CUDA 10.1)\n", + "!pip install -U torch==1.5.1+cu101 torchvision==0.6.1+cu101 -f https://download.pytorch.org/whl/torch_stable.html\n", + "\n", + "# install mmcv-full thus we could use CUDA operators\n", + "!pip install mmcv-full\n", + "\n", + "# Install mmdetection\n", + "!rm -rf mmdetection\n", + "!git clone https://github.com/open-mmlab/mmdetection.git\n", + "%cd mmdetection\n", + "\n", + "!pip install -e .\n", + "\n", + "# install Pillow 7.0.0 back in order to avoid bug in colab\n", + "!pip install Pillow==7.0.0" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "6hD0mmMixT0p", + "outputId": "5316598c-233a-4140-db12-64d3a0df216b" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "1.5.1+cu101 True\n", + "2.9.0\n", + "10.1\n", + "GCC 7.5\n" + ] + } + ], + "source": [ + "# Check Pytorch installation\n", + "import torch, torchvision\n", + "print(torch.__version__, torch.cuda.is_available())\n", + "\n", + "# Check MMDetection installation\n", + "import mmdet\n", + "print(mmdet.__version__)\n", + "\n", + "# Check mmcv installation\n", + "from mmcv.ops import get_compiling_cuda_version, get_compiler_version\n", + "print(get_compiling_cuda_version())\n", + "print(get_compiler_version())" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "gi9zw03oM4CH" + }, + "source": [ + "## Perform inference with a MMDet detector\n", + "MMDetection already provides high level APIs to do inference and training." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "j4doHX4exvS1", + "outputId": "688ef595-5742-4210-90d0-b841044a7892" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--2021-02-20 03:03:09-- https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth\n", + "Resolving download.openmmlab.com (download.openmmlab.com)... 47.252.96.35\n", + "Connecting to download.openmmlab.com (download.openmmlab.com)|47.252.96.35|:80... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 177867103 (170M) [application/octet-stream]\n", + "Saving to: ‘checkpoints/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth’\n", + "\n", + "checkpoints/mask_rc 100%[===================>] 169.63M 8.44MB/s in 21s \n", + "\n", + "2021-02-20 03:03:32 (8.19 MB/s) - ‘checkpoints/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth’ saved [177867103/177867103]\n", + "\n" + ] + } + ], + "source": [ + "!mkdir checkpoints\n", + "!wget -c https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth \\\n", + " -O checkpoints/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "id": "8M5KUnX7Np3h" + }, + "outputs": [], + "source": [ + "from mmdet.apis import inference_detector, init_detector, show_result_pyplot\n", + "\n", + "# Choose to use a config and initialize the detector\n", + "config = 'configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco.py'\n", + "# Setup a checkpoint file to load\n", + "checkpoint = 'checkpoints/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth'\n", + "# initialize the detector\n", + "model = init_detector(config, checkpoint, device='cuda:0')" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "Wi6DRpsQPEmV", + "outputId": "8ea1de7e-d20f-44cf-9967-24578d51ff16" + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/content/mmdetection/mmdet/datasets/utils.py:66: UserWarning: \"ImageToTensor\" pipeline is replaced by \"DefaultFormatBundle\" for batch inference. It is recommended to manually replace it in the test data pipeline in your config file.\n", + " 'data pipeline in your config file.', UserWarning)\n" + ] + } + ], + "source": [ + "# Use the detector to do inference\n", + "img = 'demo/demo.jpg'\n", + "result = inference_detector(model, img)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 576 + }, + "id": "UsJU5D-QPX8L", + "outputId": "04df7cef-6393-4147-da43-ab89f3b29a56" + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/content/mmdetection/mmdet/apis/inference.py:205: UserWarning: \"block\" will be deprecated in v2.9.0,Please use \"wait_time\"\n", + " warnings.warn('\"block\" will be deprecated in v2.9.0,'\n", + "/content/mmdetection/mmdet/apis/inference.py:207: UserWarning: \"fig_size\" are deprecated and takes no effect.\n", + " warnings.warn('\"fig_size\" are deprecated and takes no effect.')\n", + "/content/mmdetection/mmdet/core/visualization/image.py:75: UserWarning: \"font_scale\" will be deprecated in v2.9.0,Please use \"font_size\"\n", + " warnings.warn('\"font_scale\" will be deprecated in v2.9.0,'\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAqoAAAHJCAYAAABNDRsDAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOzdd6xm6X3Y9+/TTnvb7XPvzOzM7M4uyS3cXXF3SUmsKqRkUQWkLMmO5NgGKEBWYCeRECQRIoQKEMRx4ACxkYYgQID4nxgIZMWKbIeyxYgqK7Nv4fbd2Wl3bn/raU/LH+eaEoJEiBIpXFDnAwxeYO5bzrnveZ/3154ZEWOk1+v1er1er9d7p5Hf6gPo9Xq9Xq/X6/X+r/SBaq/X6/V6vV7vHakPVHu9Xq/X6/V670h9oNrr9Xq9Xq/Xe0fqA9Ver9fr9Xq93jtSH6j2er1er9fr9d6R+kC11+v1/l8SQnxeCPGZb/Vx9Hq93rerPlDt9Xq9PwVCiL8mhPidb/Vx9Hq93reTPlDt9Xrf9oQQ+lt9DL1er9f7k+sD1V6v921JCHFDCPHvCiGeA1ZCiA8JIX5PCDEVQnxdCPGxP3LfvyaEeFMIsRBCvCWE+Onzv/+sEOIf/JH7XRNCxP9z4CuEeBj4b4DvEkIshRDT/3/Ostfr9b699VWGXq/37ewvA58EAvAc8FeAfwp8H/A/CyHeA5TA3wOeiTG+IoTYAzb+JC8SY3xJCPFzwGdijB/60zyBXq/X+/Osr6j2er1vZ38vxngL+BngN2KMvxFjDDHGzwFfAn7o/H4BeEwIkccY92OML36rDrjX6/V6f6gPVHu93rezW+e3V4GfOG/7T89b8x8C9mKMK+CngJ8D9oUQ/+t5pbXX6/V632J9oNrr9b6dxfPbW8D/GGNc+yN/BjHGvw0QY/xnMcaPA3vAy8B/d/64FVD8kefb/X/wWr1er9f7U9IHqr1e78+DfwD8iBDiB4QQSgiRCSE+JoS4LIS4IIT4MSHEAGiAJd0oAMDXgI8IIa4IISbAv//HvMYBcFkIkfyZnkmv1+v9OdIHqr1e79ve+ZzqjwG/BBzRVVj/Hbo1UAK/ANwFToGPAn/j/HGfA/4nuo1YXwZ+/Y95mX8BvAjcE0Ic/5mcSK/X6/05I2Lsu1W9Xq/X6/V6vXeevqLa6/V6vV6v13tH6gPVXq/X6/V6vd47Uh+o9nq9Xq/X6/XekfpAtdfr9Xq9Xq/3jtQHqr1er9fr9Xq9dyT9x/3w6of3oo2BNM+IQmIbQQwLZDBsbhbMy7tUVYOtA64BHyHG7l+9FgZkkIQ2oExOng2JziO0JChLDC2Nr0AM8G3DM48+yuLlfW76U/Sa4dOf+DQ710fMTr/Ol7/8IoffKFCFRFQr2v0Vw2GOuyK4caPivU88yCD1/PY/vsUj3zECo6jiCTJpCXWOSQYc3G1Yu1Zz76aHaY6ZrBAhZZAEGidIR56NUc7bN1aQZjz8rifZNCOOT94gRMXadspzL9wmGVRIanSUtFVg5aB1AoioVNBaAIkm0kaNn0E4dAQbkKOE3auBvHC0PicKhwwZZ0cL9vbWSIZL9m8qqlKQ555YWIpxShIbrJRoPcaWMxoDMSiCEzS1JcvgoQc3mR6XXNrbZb5omc0cZYwIcopsRFmtUFpjTEJd10gREcKhg+HozVtoY7DHLSKCMiBGA+SFCbbxDMeGK9sb+Bg5OTnCTo8JypA3kqaU3LtbIUKkWGvRa0BlqE8FTYAgW95z9QqVWzI7ndPElGwkEMkKrQLtNGE1k9ShJck1k60JeuwROqNtZ8gKRFYQjWEwyDidHjE7KgnOARIhA0IY8kKQ5i22TbBLQ5ZX+BBIWoNSjtpEZJLTzGtkUDjjwWka77iwrlitchZ3Gi5cEwwvOprGYxKF9Z7R5ojj18AeB1ITEVFQ+oC3QACdQGbWqOsVIdZoZQjeEPFAxHuPlF1OKIAQHArQMRLxROkxqSTNhyyWDdY79nZ3WMyWxCjx3qNEZDat+cTHP8l7H3ucX/vHv8bh0QEIj5SCtnXkeU5EsFyuyHKFcApFBVKh8k30sMaFOYtjRVXXiJAwzC1bkwTFhKqJbF/c4L1PPMbXn/sGr716A+c8xhiMMbRti7WWEAOtbwnBkRhD8JEQAlmaIYTA+poLF1Ns4zg7FqgkRyjITE5bl6ytj1isKt599V1gBS/fvEHZtjjbkGaR6/fvgQjEkGPEGm0zo27POJvWaJOzvjFiOJRkA030S47uTjk+sRAUAI4aIxRparCxARRSOkKwrK9dYLI2JE1SlmVNkiesr13gxo03OT25R5IYbOuIwRGjIDEFNjTovGGybUiHOSRwfKtmeldz8fIFxiPDzbfuUi8bjFE4F4ihJYqI0glFkRNxpBmkacJ82tC0lhgjzgaUUqQmRQqNbR3WthTDlCzLqFuLEIIYBS54bNPS1DVCCLJBzmh9DF5x99YhRZIyHK2zalb4WKKTHBEj1jV477CtReIxGoQQKJGgtKEsK4QQaJOChKapkUIgBQgECIGUkJqEEMB7363zAZx3hOhRSmGMwXvfvQe+RWuFRIJSRCHIBzlXHrhMwHN0fAIyRQjJlcsXkbHm9ddeY7WqGI4HXNjdxIeG2aJBJ0PWJjnDoeDo1oqzkwXRJKwNh2RJyeuv7SNjRgwegSRGQZ4XDIcGF8/Y3lPMTz2H+y1KGbRK2dhJuXh1RFmvcXDzkNq2SBuBBd5krMolwXsSLclTTVNFXCtQShJCZDAcsbm1weHRXYKXCOloao+QgrquSHSBMYa6ciBrQgikaQ4EfGyJURNjShBLhsMhSZpycHiKDzAYFqSFQMrA6eGKne2C42VEa0umBNU8sjFIKBc1jcvwviZJhoTgUcbiPRAVaappAxiVEkWLyRpK27C+s4ZwNQc3LDiDVA6pIEkSFvMSlODSpT3mizNsJbCuJAaQShNFwLZgTIY2wPlnP00NzjuiEKSpoa5rnHO4xmGMIYTunyXO85wQAs45sixDK4NAkCQJbdsCgSTVNG3bPZ+D1tZIpc4fU9A4y9bWFjF66nLFZKIYrcG9O54ohlh5SOscgoRRsUmqE44ODrG1RUkJEaRW3bqaGKSUJEoTY6RtW6qqoshTANq2JU1ThOyuq7qumUxGDAaGqlqijGQymXD3zhFN1TIa7OGcJckrqtLSOk+UiiANOho0kjqUBGHJoiAGRSJzrKxpRUZgzjgfUZ+UfM9PPsXsnuXwpRvc/8MTXr5jyZqLXH6P5NW7zyLu7QGC4WAPpyyurdjcHXP25uvcenvJ1qUh83JFkSpWJzAcBZAtizmEAGmSYgnkaXeuw0mBdQMad8L6WIERvP6bJ+JPJdL8/0h99rOf/b/94X/+9/+jzxZ5gYuRum2J1iOx5EaymB/hvMXZQBSAFFgHSWoQMiLgPOgxhBgJLiKNJs8yjBA4bxmRo4oaHQ1nhyfc95FNioHk+LkZqpIsTi7w5X92zP6NFbb1TJtjPvWvXeZrz1VceWaNW/eOCIxZW9PMbpWc3q65/phg5xoYU5CuVey/aUiySJYnnB0s2VxPKcuKqBJa0SJ1gVSS8SgyO61I0gGuKXntS7f51A/+FH/rb/0M0QmIBfcO3oAAUrQYUjICMUZ0Zqh9IAbF+dKOJhCIROVYnyQMBpJL903Q+RIfYDQGgSMRnmaqqJYtg4lhe8+RxJzFviUdC9AWaokZCFaxwkeD0BG59LRtoMgVoyJhOV+wu3udpoV7J0tiUpAPtzE6RwRIjSHKgPUeKUEJh5YpXkSyrTEbly9iFzWyaSkMNKZF1pbcBqr5gtsnJ5ikYmfrIjJfJzSRSoJiyXhYs7XhuO/BTZy0ICNOtuhyQLlqCDLhbFqzeXHC2rs0DB1yCAwNxUDiFOxe3CaclviVI8hINoR5ucDWmnSUE3zLanZMMgRnW4I1ZEYzGmkigqYWZFlgXESUiCiRQAJ+5aFNEJkgqIBrA94GfDR46RDSEJ0HAX6RMBoWFJstUWlaH5BEjJKIdkR9Bq4MeBtBamKISFJGgzWImqpegIgIFCEInLNIKZBSIgTEGAjRkSQJPgQigWGqGKcKrTTzqmHVWIiCqmxwvnu8Dy0mNQyGQ+bzijfeeIt7h/voJCM5DyCbukUIwcbmJsPhiPl8Rh0aHn/yaWrvWVTHnJ3MCE6zNUkJ1uOsRxlFEAXLUrC+tckHP/wRtB7y0osvMZtOkVKB6BZo7xwhBJqm4bEnn+DqlQdZzpZ4b1FK0X0XRdJM4Z2jaRwhdAFN9JYIvO+p95NmkqPDO3zyxz7NJ374h7h583V0DPyNz/wMAkG1gAs72+wfv4VjRWISTmfHYBZsXdggSVKsc6TJJjFR1G6JROIlJEPN3u4YqQKn8xnOt6RGI4TChcCiqvDBQ5Bok/Ho40/hnOLWnTcQyuKcJwSB1hlSDaitxYsWKQLN1DLfh9N7FltWGNXifKAoMoQIlGWN0gIfIy5oQugSEyEU3kfKqqKqWgiaGCQhQAgRoyVIR8ST5yk+eGzjkMrgg0ebBKk08/miW2+kxBhN8J7FdAnWsz4uaENFU88YGklmDK5tsHWNaxu8bZmMh1y+dJHJZAJRsSprlFYIYpdIyQhSIKTsrs8IWhuIAd8KyrLG2pYQIEtzhIx4H0hSgw+uO1alkVKipCJNM4gS6JK7umlZLVrWNyYsV2fd70G0HBwcslrU3X2dRgpDmit0Hsk2cmQTODmcsjirWSxXbO1usD7IWB7UeDFguVxQrkqEkCR6QJpGdi4M0EmNxzNZS1kuWoLTaBNRWlLXK+7cWHJ4Z4kzS5yr0KIlJpEYUgoZGeZjRNDYNmDMkCtXrrJYnqETTesgRE/glHIZELFgNC5YnwyRpAQX2dzYwMcVPtSkqcZ7SVNHtNQIYUlMRDOkrSyrVYOIgizLMGlCPkgYTyYU60P0fM52tsaDF9/F7VcPKdIBu2ubXL34EA8+cp3lsmQ2OwYcbePIswxEF4wEH3FEfLAIP8QkA7KhxtY57UyhEo+SBq1yBIokSTBaMhoVnBwdo0R3DUjVJdxG5qSJxqhIbkbd++67REUAhHj+P2UIQujiANs6pJLk+eD88xXJ8wIpFYvlEqM11lqstRitaFuL9448y7GuhS68YG1tgneOtumS2qap8Q6iHTA/CzRNS+tqimzCZLyNs9CUFVVVQQzEELq1WIJznhgjIUY2NjbwMWDblslkBMTzJFOhpEQphffdeSaJQSrB2ekpaapp6pa7t2fEdp3xJKd1p1RVt8YHL4l16PrWIYILROEQAjKZ4VtNayxaBmoBAwy5H2JrRTbQfP93foQvfvVlPvSzF2is4ud+8pcYhJRnv/Qsp7MlLuSEZEQeF1SzSHN8QD7wFEmLS2rauWR9mLCcr5ACpIlEKVguuzXEW0hGGUpFgtTgh6QbkfseaIkLy8IP+MW//ou/8mcQd/6J/bGB6t/5+7/0WRdsl324gJGaRAo8XWVU+IiRkhgiPnQLcl07jFLEGMFD1F02nsiIKQpc4zi4cURVWharFrmIZGPFfFrz+henLA8sbMK92T7D5AXS9ITttU2uPnqB8WDB8eGKuorcnUesDdhoWRsI4lmC55RWwGrmOHh1xa0Dj/ORpm7Y2MyQIePwVonKBcE6hBBIqdEyARfY3LzA/r0z0ixnb3OXyc46jz3xML/6q5/nC3/wOSabEh9qjNCEpiUzAlUklC5S14EQIlJJhOg+oAqBVQleNeAM+2+XFENDFJCuJzhjsDJQXHAsfOB0ZTguW1rRoEYBTjRr2wnJeiR4gbQBCIQ2QoBRUWCMoK5aNtb3ePK9H+alV16n9YKsGOCsw7kZUrcgJE1rSTNJkggEhihbomrIsMz9imJrgDctAcdWOiASkVXg0lZGEJrDgznl6Skbly8x3F4npoK17U3Wd4a0heNwuSSGAflowOZeZH03sL6rsEnJeE0x3BnQ5oZmuSKNhiAMJh+wtb6LzCXySorZBL+QlLMSPcyJQDQG6yONXWKdgxBRXiFCJB9KhLKE4NACskQTvesubhlJdKRpIoP1DBctvo0IBUIlJEkE2T2fzlJS1bC+4/GJxYcBRmqMBOssyUBw3/U1Nve2uuz/1BMiJKkmTTRSBrzzaJGhZIJQkRADaZaxe+ECTdMtuEIIvO+qDK13DBLdfYZEQtQJMYKSkuFwxNbWhKJI8d5x8eIeW1tbVFWDDwGpI4NiyHg8YTzewHmwbUXbNiiVAIq2aVnOFsynU7x1pMkAISWtragqwdpgg2I0Ih9vEoOirmuef+EFfud3fo/FvCTLEmL0WG8RRBSwMVkHITg+mzI9m9PUNSF6tJZEHEpFwGDUJloNQVgQHqMlSktOz+YsZgug5c0bd3n4iQ/z0z/3Czz9vd+DKTb46pe/RnQrVqsVs+kC23qinJENGkKULFdTpIrsbF8hGyiOp29jsoTR+h62CSRCI0ROEPDEk49xae8Kp0dn2KZBJQqhYTGvmJ3NWdUVB/eW3L1zj6qeEqMlOJBC4hw0dYsUEsEYQoFRA5K0QBUJ2VrGYKxwbYUUGmMyyqXF+wgqEoQgxogSBiEMQiYMiglpNugCeA/BewKOQTHkgx/8GNs7Fzk5OWNre5vFcoGPnigEPkSs9XjrUaIrcHjv2Fjf5Ed+/NM88vhjTE+mXLh2hcHGiGpZsiwbnnzmaZ5875MU+Ygnn3gcayvquiExOWmad4mQUnjrcLar8Lauu9XGkCYpSkiilzz44AM89dRTHNw7pm0rnG+IUSClQgiQsquKadUFHUmSk+cZdd0QCZjEkOcD1jc2WFUrklTT2obxZJ22cawWFRFH8BJ8RggBrTKE0hgj2VzfZO/CdXb3JmzvjJnPWs6mJWV1xmCYsbm1SdtalFQkqWbv4jXeen0f28DxQUmerCMU1E1DkVxgd/caq+Ye+bBBiZb1NcFEKCbDEkzLcqooUs+iXCJFToiCs/kJddOeV5FbVqsVro1IaTAmAxwheBKVEELg9u0jUqXZ2R7S2oa6hMRkGEOXAJIQoiPNUpy1GJUQo0ChAcXitCQcRKoFPPLYd/EjP/5plvdeYiOpyMebaDXi4M7bVPMZO1tDtIgEq7G2S5IjEikck/WUfOhYzhtsrVjNA/USvJ3jQkOS5GiV4n0APFW94vR0znCQk8hIVVqc6xJQ5xuUHCJFhgsLnI/n65oEBEJ0SQqB7npVEq0VMQq892itMTpFKU2W5gxHA3wIZGmK1n/YUZBC4n3AWkeSGIQQOOdp6u49FkhC8Fhf0zQlqRqRFQWz6YwQJbPZEYoELSV1VRFDACI+eEII59etIMsH1E1FCAGTJpRViXeWSCBGjxRdBd2HSJrmJGZAkQ15+qkP8OYbb/P4E4/zwz/yEQ4ODvjEJ36QC7ubSOWZL06oVpYYHVILlAukaUYbW0yMuLIi2RJMhorVkcGsSxIdSQpHOljxH//d/wKnHZ9/9gt88JmP8sbzSy4/fp033vot3nj7G8g2YXf8EFfe/SDXr55hzC2ahaJpZ8zOFFWlCacTfL1ikA1Q0hCipao9UqTE8zVZJCmhsQzGF/m+j3w/z7/2RQopsIcQBrv823/159/5gep/+t//ymdVrhFJRmsjUioCGpSEJCPTHkFgUuQIG9kYbPDAtfuZLmdI5ZFBUTnPpQsXuP/qNWazFb5xuKpGKkMiPUsHYanxRpCNPKSeDENMAz/8A4/x8Ps3YDTlnz97B1qPKDVGa0p3hvOexnoKvc19e+vcuXOALuD+64GdMVRngWUSkFYjpCUbT0isQNqagRTk2uAjKJWS6ZQPvP/DrOqWVbPkoSfu5/p7dvjC//4qX3/ua8j0kEANIZJojasdTkWWLnAy61pOWgiUEkTRlfRTHWmCQ0ZIvUFGyWRToRMYb0pOb9e4RqI01MtIaBzaKWjASInMPdNDR2ILNPa8xRsxRiOCYHPPkOQS61seeve7Sc0mB3f3kbYlNorhUOL8lEV1gtYZqS4wShOCJIgMYsRpjcN04xlaEDNHvj4gGa+R4WkGOS5VFCickvg0cPLKlM2NK2yvX+SoWlKtKrxdgm2Y3rDs31ZkzQgrAt5AccHiyoZMXiTJJpisQKUDdDLEZBsIlZCOMqqoiB6WJzVKJ4jhkEGegIjk+SajwlCVFTiDQIIWRN2SFJrhOEPorjLqIggtMD5FpxI9crgY8JWAKPA+EpVH2m7xkCpipSXPI/lYEnRAJw5VtJhcIHUE0SJ1jStKisuO3Us55Qx821KuzmjbkmExpm4CAfDBobXBNpbjkyOUkHjniSGglMb5gFRdZVWZhNpGVrVFK41WChsi3nqSJOXS3iWUVLzy6hucnc05PTtjWc6xzjKZTEjSgrZp8WGFCyXW1mRZxiAfUjUlrm3AdwGFsxbbGMbDMeNMsbO9w9lqzuHxPapVRYyO8VpGnmYIERkMc0IMeO8RUSAQtLbFOUcUFeCQQhHxSNlVk2NUXbfFVUipULpLCH2IrFYLykVDFJHKHvPlr7zM17465+aNY/7F//ZPePH5P+A73vcIT73/CY6ODjk7PcPHmhAd3ibUywQlhngfacolici4d7Pi4M6KYB2zs1NMknFhb4f7r14jesnNm7domva8fedJTEqW5DjfsixPiLHBKEOwHmu7ym8IFlwkBoejxceWYFukV/gq4E4bfKNYNJKq9ugEnG1wtSWRIGNARI/zDiEiSguapqZpaoTwXZtWKfI0JzM5zkWOTs6ISN718MOYLOXg+JhEm/PVWOBdN0Lyr6pXIWZU9Zj923Oe//pXuHvrbUIj+MDHPsZD73mE0WiH7/3RnwCTUVvLG2+9wZ07t6mWK5xrzkdTJNGfv7/nrxECaKUxSrOxtsZkMqIsu+7Z2fQIKQ0xdOckEITYtfuFkN8cU1BKIaUkyxMi3aiI0poYPa1zFIMcZMvp6RJlDEJ7iqHBWs9qZZEi7YI+LONUUlQCFySlkQQVaWpJjBItM4zOaW1Lkqa875mnefr9H+SNt17l3vFdjOzuU+QDYpRk6RhPSxMWbFwIPPDAgMUssHNRkAnJ+vqYhoi3LeVcUQxzhFCE2FK3FTFq2tajtejayD5FaYMPK2wTiV6S5prJmuZd7x6ytQneaxYzj9KSQINUuhu1UBIfHFVVEoLHJAkScG1DW9bYqiZSMVzf5mRR8qu/9mso5xkMCm6cThlu7fD4Q49w586bTMbrVJVHKNFVwZUmzQVKVzRzDV6DjsRgyZQl2hJjBkhpwHcJdGIE3/mdT+Gd5YH7r9OUnvFgxHCYs7m5wWp1ngSL0BVjpCBNs656GMJ5655vjgoJIQjB4/15NVanjIZjpJSsVqvzgFHgnKNtGoToxkzaxmJdS5okIBRKKRKTEmN3bkolFEXB+vomSWZQKpCmmmJgkabBB9jcmJAkmqqs/3CURYrz67S7dc4zKAZonXSBdhSEEBBSIIVCaU2eD6ibFiEkg8GIra0LjEYT7tzeZ7lYsb3+KJsbu8wW9/jGy1/lhedfoa5XgECrgkpapOiKFja0CKmoG0W6mXPtfZr9l1eIwYSr79vi8ruHvOcvJPyVn/5LtFXKogj80r/589w9nvHq4e9S+5bf/Ce/RTpc45Pf+xcYJxtcv7jD7/3ul9m/LVm/Jtl/2dPW6zz50JPMju+CMPg2YKsWIaCtPM6BSiXFaICWEd8ERpMdptObhFixUUz48DPv4re/9hV++d/4D9/5gerf/q9/5bMIiS09BMF4nOKcpW2mpEqS+ICvKwQOZx0iajY2t7BthXclUqYQI3XVdrN3rUdrhTIwGCrEWJIlgnrlCD6g065dEWOKQvKlLx/wwnNHqPXA1lZGUwYoPemu4WTqkD7gnKb1c65fe5JPfPwT3P8eePvGTe67pqnmIxYWCgNCpQTXcG1jg8VR4KHrE/brOY0QpEWOi57nXn6VJE+QScXxbJ8v/fZXeOWF19DmDKMkWS4RMSGEBhG6z/5ZCTaAEiC7+h8RiUcifEKqLc3dhLBs2djzRG0ZDjISLVguwRNZlV2Apckw2uM8OB/JRELwmrODiuJS94Gdv+bxAnwWiFU8/1KVeC9RQjGf7TOfnbCcThkORwwnI1Z1g3MRoxK0SJFKYjJFlgoSFQmNIFsokjpCYhBnMLu1wC9nNMOG6SowSsbU0aG0Ry0sZ7fusH19kygdzXzOar5A1FD4gsWyJtEZ5XHF2VGKHgkIgtPZiqSoiSqlrgWJHpGuQVJAszjDHy64/dV7GJlSXBmTDNeIq4Q0aaAWLI7nJKnEeUtUkUBLcALhINoWLQER0EagSFFtSkOFyAX1yuMbgRCGeL4QCadYLlqKNMPGQJIosqHAi4BRiupeQbU05ONAMYooAkkW8QZO9wPtTNG0FYlJMUYD3bySlJLxZJMYuvktrRVSCLRW3XxWCIBASEXTOrwQoAxKaYgQQ8D5gG09p0dThsMJ1gba2hK8JMsHRALOdjOjR4fH1E2J0jVJFliuaqyrUTqi0wzvPc757stIakL0bIwXbO/W3Ly7z717S7SRTCYFWnfzZHkxwNqWLE/ROmG1rKgaS912gZw8/1JR3zxu0bW6YtdRiLS00REDDNOcpqrZ2bvKT/z0X+d7vu/72d3axFaS46ObNNOXeebdVxG25Oje2zz2+PsIFDz31W8wn88YFXsgCkZjRZ7llNWC3csDNrd3OZ1Ouf/qfTz6yBUu3b/JpetXefDBB7m4d5k/ePbLbKxv8tGPfoyjo2NOT85IU0OILbNZjbUOrQPetljbIDEo2a1xSmpkCjIJFGlCJjXYhkBDzAU+0eAVsYk4C1mWkucG13pCDIQg6PoqESm7iiNCIgiAI8bQJY9JgXPdzN6yXHLv8IDFqqR1sJgvETFgTEKMkbouGQ4GXLt2jaIoSFPNux68zLCAYpTz0MMP8dijj3H58iPcfuMmv/m53+BffukrvP7ai3zli/+ScrZkkGXE6HEuQFQQI0rKrjPpA9okrK+vkScJRZ7Rti2np3Nms1MOD++dv78epcF5iCFiEoPSCuc8VVV1XQGl0Eai025utWlbYoys6jnOe4RQXHtgF+9bAjBZLyiXjjwtaJuafFDwwLUtQnmCFW1gt5cAACAASURBVIKjmwd44YCE6e0ph0f3OD4+AS9o2yWbWxPqEuazhpPjGZfv26Bqj4hUnJ0E0rRga3uN2WLJqjmkdTXBGu686Wj8GFtvdEHehmK6mjEaSKzNEWJA2cxZljWDwRrFIGU4HOCspGks2jiIkUR3AVNiEuqqZjTR5MNAcDnBC2azGfkgZzBMyAeKVdlgW9GNV5huTtLbFq0EIbQkiWIyWUMmBXvbFwntPVQzY2vrEnsPPcKdO/tsbl/k3vE9bt4+IEpD4y11W5MVhuBaggsIcqKVKKnRJqNuAip07W/QxGjJsoxhMaAsV+zv3yFGzivCtyEEdnfuZ31zQqBhPl0RgiKqrjppW9slrbEbH5Gym6mv6/qbgasQkp2dHcajCWVZUpZlF3xo0VWlnetmZOL5PL9SpFnetevP972AwJiELO26SHk+YDJeJ4aCatUyGBhUekoUHi0KbA22tRAjaZKgzmdcO+cBqejmrUeTEVVVs1zNeeyxxyjLiuVifj76kmGtRxrFslxSlSvu3r3JYnnKhd0Nlu3rfP63fpflYoXWirpZoVVBDBoXKpTMMaIFuYlMKmQJV9+7x/f+62O++IV7bK6vc/2966z8iGc+NOSrv3ebBy99hrdO/jnHh/DVN2/zT3/9v6WYjHnupS/yE5/4MX7qkz/Ds1+6wY/+7Kf44ud+m2d//0WKwrJcDBkqhyhW3H39AKMkTlpm0zmbG+vUzQobBEEokjzj/uvv4uT2fpd4JkvaZsUwhwtbA65dvMJrt9/iFz/zy+/8QPW//B/+s8/ayoIPFMMBNiwgCh68vIVuAqvjhtmsIc0SrJNYbzg+OMPXFThHK7pBmUDABUeeZwgliDqitSAfKIQMqDHkRiCcIEehcoVSmmBq2ipQnwgm4jIffPIZZnctVhlE7alD3S38QTE7c3zmZ/8t3nxN8OKr3+BkKkFH9l/SyCSgU0khDA9srhGmnmUbkcMRWjnaRU3wATNusG2Nrw0oRyG2yfNu8rjIBwQHIXji+Saa4BV1e97+kqGrcvhu7gzhu5afs1T7kmvXtkk3PEoHkJ7WC2TusU7SNB4lNF60+ChJh4G8gNUiIFNHnoDQE+rGkSYeJyJrg5wkDyACi4Xj8u79nB3WSARHh0tWS8Xp9IRIy3BYkCQ5ShdoI2nbFVvrI4RLaFzDW/t3aBtHeXqPJIvUJ1MWbUWxk9EuUvISYtoQbMrZa0s2L+dcfiblaHqEcIpkMuK4LllFgRk7xmstemBJtgLruyB1g1QOMZbIuERULbPnW8RsSSNPKc9Oqc7OqIolMhpkKRld3KE5W3J2a85oJDm7vaBZNpixwIn2/MtSUJ8KPvGhH+ThB69z8+Y+2hhitOAsWhmSoSfKCELT+q6SZbQiOo/JQZJAOSIQmYwseZEiZIJ0OUcvtMxuB9Y3xzCIqESC8OAlh1+PNEuJ8zUiKqToMnSlJbuXdtneuY+9vT2aqqRpKiQRKUCbhLZtumDGWhACjzjfvCJorcUkKXmeYpQmMYamqmjb+rwiK84X9q6NVtcl49GItbUxw2HBd7zvKQaDAcvlnNA02LrtFlppUCZQ+wZH4L77h5hCYswatha0ztO2trv+rcM7hfWWPM/RImEwXOPp7/punvngd/OhD3+Is5NTjo/nKJUgpUCgEOjzxd9RZGOQpqustgEfAtPlnFdevsl3f+BD/M2f/5u8/cYrvPTSCwSh+do3XuHl19/Eu4bnnvsSX/ny17sFSmk+/Rd/hu/8ro/ywouvcHp2zOb2hMWq4tU3bnByfMzTT32Y93/oI9y8ecBrL9+mtlBXljSRPPTQIzz+vg9wcHTA22+/Rtt4rl9/hI//wA8gleLkeEZiMoT0CKnZ2t6jGBkcKwSeXGckaUoTPEk2xJgcX63YlA0XRpDmEecDy2VNXXdBvBAST5dsRN9VHY1JkHTtRqkiMXQbc7yvWFU1VdUSfLehpVwusW2JlgKlNKlJ8NYhEJRlyaVLl3nPex6hrmuuPPwQa1ev8cCDj/HQUx/iyuMfxdcrvvC5f8iF+y6gfY1dTZHenl+jXWXFeeA8edLGkCQJUkm00Rgl8W1NDIHlqjof3Ui6SpOMmCQhCokUoLSibVuc86RJ1q3bweN9F8Cs5ku8DRAEMUSEUGxubbKzvUWiM5TU3Ll9lzzPkSgWsxnhfEPWZHMdORgSRYavK5JcITPB4e0F1dkCjQTjGQ0nPHD1Ed584xZHR29zenaT/Tv7jAZ7PP30h/GhZH9/n+l0yWo5RWmJUQWzuSMZ7PD0E+/DyYbjkwOO7q4oW8GqTml8YD4tsc6SZ2OCi1hX0TYVaVqQJgWSSAgVUkq0ASEjTduymFfcvFFzfBRYVYJyFWibQJENISjOzqZo020eEiLgXIMUkKUZShkECUam+PmKOye3aNvAejEEA7fvHpL4hDu33+LO3RsQFVVTdpuOpMa6FiECRT4kRk/AE4AYBN5WONeidNZ9X9FCjFRVTV2358kLtG3Nd37gA/zyL/8dbFzwj/7Rr6NEhgs1UgUkiiwr8M59c7OoUuqblfQY4/lonSJNU7TqNvHW5xsBTdJ9FoxR3WxxkjBZm9DUdfe71Jqq6jah/atEu6pbEIK8KJDGsCxL6nrJfHHIaDgmBs2lS/fRVI7ZdEoIFiEUw+GwGztQBim6YDrPcwBa2zBZm2Btg3Weo6MjgvdEESnynMWqQuruNyVl1z1ItCZJDXVTs5hHjHFEDEKk7O7u8pd+6i/z7LO/DzGiQ4OSgsq2EA2BiDcFqzLl8uUhH/9UwR/8Lzd55OH380M//gwvfv4m3zj+Le7Uv8/pzQU5L3H1qYrV3QXhrEaM3s3R2Ql/8JUvoeSI3/78P2T7yoTNiaA8WaJjiikE6xsVbVxSrmAwXGe+OiOagMkTat+ydWGda9eu8Przr3Hx2v384I9+sFt3qpJvfGWfF54/Qw8Ev/CZf++dH6j+J//Vf/BZZyNJluOUgUQhGTI7OGF+q+RsGUgSQ7mscQ2ExuDd+RC1gUYGpAioBIL0BOdIswxMipIZNJbDA8/u+hbDNUGapcycJdgWX1kGgw3yQrA7WmPjcoFdbXPhoYRZe5vp7SULYRDeo6ymrle8efQFlm7FYDTheHqX3YHCl4EqVtjYosYt4aRF3DyhGldcGq6zKQrevX2d0HhkKrl48RLlrKb1NdPpAp1otDa0zjFeT8nVLnV5ig6aauZxSNroSTIoUkNwkhjB6IgnkiSBUbLNZGNEq4/QmWS5ErS5xVmBrRwmpuAsQnX12CyFYSrQiSKkATPI0FVguV/jlGT7miQZOmwKB0eeohhjK89zz77F2sTg/QwjBuRrgXzQ4MOS2aylrCNpkTJME4Z6jWG6RVNN0bbCaMnWhTEHs2PkIGcVPc5CupZQ+RntsUYbhdkQ5MMdKrUHzZyjOzcp3ZKxymhXDQvnyYUkzxKEcFQBnLIoDcpmsJ+ynELdrrClQ6GRE0edeIZGI1aKptFMT2ZM3zpmMhTkI8HhnRnrW4FWeaTMkY3GCY33jkevP8N7H/4IX33xi1CcIY1AiQGzaUmwgbyQNFawarrF21cpifFYadEmkIaIVopQJZi0IClaFseB1kvasmVSGMZbkixL0VqyeV9kZ2PE7ddrtFa0ztK2NVqltK7h6OSEo+M5REdVzomh28BWVSVaKdIso7WW0SAlSbNuxts2hBBQSUpRDEmkROlIxGGMwHsLRISENE9IM4UxCU1Tk6aGIh/gveLwYM61K+/m5GTKsqoxyYAaT4wWJQQ2QrSG07uW6SxHqzGr1RznHYiE6CVKCnTSBfVZWjCfLqlax/7pGe969FFMlvLSiy8yXkswiaKtXTeHJlqcq/E+UBRDlO12zl9++GE++am/yO7GELs44YWvv4k3a5wujnjjjTdwFdh6wWSzIM8TtJIYHM6uCDLw2hu3eP7FV1CmZWfnAvNFC2bG9rbD2cDxkefw7glZonjogYucHdzlvvv2+Mmf/BTWaYJUqNRx460bLOdLtEn56b/6Gb7v+7+Hbzz/HPv7++dt68iynONVxdalgEgc02lgPNgilTnzkznOWXb3tpiMMhCBh9/7KB/44HewuTekrE5wTbcWKhQSff683UY6pTRSgHUV591ylJZIpSirFVoLRqOCRCtEFOAFwXus7YJLrbtq1cnpGauyYri+w86DV9m+v2BWHXN8MOXF17/KrcM3aRYlUgekCiyXK+qmJMu6gMBaz3iSE7BY67qRIvd/UPdmP5an93nf591+61nq1NZdvc30zPQs4myc1RIpUQtpk5EjWRJExVFMOYkTOHHsJIBjx0gcMFe5MJKLIFdG4gsjCZwYShBbseRoMSkJ4iaOOBxyODM9Pb1Vd+111t/+Lrn4nWkJ+QuYBgpodHUVTlef857v+3yf5/NYOterwbapSLQiOEfXWxDXyX7ZbwScIxAwJsI7h3N9QEYpg5Qaa3syhBQKUATv+4uMEEQmZXd3G2M033/7I5bzgtFkhLOauqrQwuDaQGtryiqwqixF2RGlYzySw+OT3j85mOBMRdcGBAOqlaNpTnj6xkWeeHyPuggs5zWN1zz59HW6rqEqViSpQXiFcw1ZkpLljrOTBUV7RJQ6QtRi24AtwImWtukQIkLKfkgVxBgT48KKJA3YpkOrAVXZYtQQJYcUS0ueD4gSi8Vx4dIQYwJd23vpu2ZJkqS0lQYCXdsHh5IkoW4qhBRUdYUnsHv5MmGoUYVlM9/A6ZTti5f55b/8C0g6ZkeHDHKBEJ7O9qo4IULLeP0886wqgTQWLQJaJaSDDASIYKkriwCcb8nzhChWbG/vEEUpGxtb/Ivf/irv/uA9qnpJliY0ZYvAkyQRkj5E97Ga2quUgizL0Fo/8qT2FoCAMaZP0AvQWpEkyfpSY3HOU9c1PoT++eU9aZquv9ZijGJjsoHtPMuiZD6fM1vMSTIYDyY0paBtHA8fHLCYt8R6Ayk8UkJTN0B/wXcukCQxdh0MjRNNZ3s6i8cjJERGo5QiifvzPE37jZlzjra1KBUjhaSuWzwdqECkU5R2tI3nm19/CxM1KB3jmhaRG+I4Id+GvU9GjLYryuKINtP8yAt/mf/81/4Gi3lFdeGY4e4dhluW55++wKd+9CWGkeO3fuOM4XAXnRimR+ccvvcBjJc0B+9zfm9JF1tWtSeSmulRTbcUGK1INi115RmPJ5hEUlHTBk+QgcFgyP17R/iuRaeGm3ceolSgLs5xrWV8KeBdwn/61/7OD/+g+r/9r//NlzutqXTCYKDIQsDKQBkq8C3RxBIniuA0RnSMt20vKdJhG4+MQAwDsRkhmoYOEEYQhYgoapme1LRN3adXVyWXH7vep51ty2gjQpaK8STi+OSE2ek53/7uexxPp4yTp5iXpxTLBp0LhHBERnL98nWKacH5bIYtT0k3x6yk4ui4wnuJST1WW8SOxEs4dQvycsKmjojihNtn55SiIYkgHirqGrSw5Cog25bV3FJZS3UO3UHvv2w16CyQG4ESnjZoyroP9kgTQCmcKumac/JIkW0K8i1H6BKCavEBRCdpvUf3Fkp8C84leNdilGZn9wKzskbmHWmWMxhZvNPM7xlsa+lkTOiWyE4i9yJUIclzhUgVdZvQtTFdVdOUHctVzZUrFxhmmnrR8NilS1y7vMvh6REi22Bz5wohBEZpzkAolm1NiWYwyRBUOCGROkNbx6yrSSKQzvY+Rtuig6L1AhM82hhs04GTBBRCBLoQKJeeeuXQpBRdhVpKci2osoa6FnT3PKKKubC3w+gZSVk5mgXE22BxaCvoZIcRjuE44d6H3+e9H7yNHJ4RS4ntHL4LvPniq1x/7DLn89N16tQyGjpSs4t0CU2oiGNBpiWzqaOaWnZ2NGyVLO8lGDumbEtsiLj6WMz776zwbsjO5gbnc8mDD+cEK/tgipW0nespGM6Bb5jNDkFa2qYmjjzjcc54Y8BoS/crf6/6G74QRJHuqQCuoW0qirKkrDuCCygvSU1E07UEFfXbjSZQVnOUEjStZbFcsljNmc3PeO/971GWJUJotNRIJ8H3qzoRLFJ7glSkSYLt+vBE8BJvPVK3pHlvF/AhUNUlXWexXUNbnXPznff4zjfeYTE/Io5zuqYPh7Vd0x8aQhFQ1K2l6RoEgvPpjLqO+NVf/Xf40q/9Ff7SL3yB+XHJ/VsLjo7uo0zHtRs3kBFc3N5hczzGCoGXKc4JhFywOclIIsOqmFGWC7rW4WPJZ37q83zxl/4tDAnzec3hfM4rP/ppfvmLX2Jy/QmW5Yx21XD9sR9henTGw4f3KcoVZ5XHRCl2Mefk7IzWNyjRYb3CVgnVClZLx+7mBZ64cZnHn7lGbCJO94+JzIA2aM5XistPvc7F6y9x/+EJp0cHCKFobb9KD4R+A9PLrMSRBm9oa0kIHtt5vIv6cIhsMUbSeUPV1ARv0VHUq21dh1ISIRUq0jjvmE5POT2fsX93yd33ptx85w4fvv1djn7wXR6++w6nB0dUy4qLOztopZnPWl5+9VluPLvH6dmcvSsX2d4ZMD+rEOgeI+UdKvQqdpInSKMIvvfzfezdFeuQmNa69yV2HQBR1OPYOtuipFwjfQQuOJTsBzKHAxmYTafMZlMIFttKhIO6KOjKljzN0SqhXK7I4ojtrQF1ueo9ik5weP+Yi5s7DDJFtajBS4pixnR+RNtZqlpy5doNPvnq8zTtnIf7t7h954jNzS2efuY6d27fpu4cTW1IkgHSJJydPCRWDXXV0tURBBC6Q3gJ3tB1vdIopUILye7OBa5evkJbOYqiIRCIYo3zHdkg4vLVXZxvkFKyNRwifMpqWTPegK3thqt7A0LrKJcdnfO9p1QqmrbD2j5wlcQpj1+7jkMR1bDqun54yydMNgdcufgk6XDI9//4D8hNRNUIrLBc3N2iaKY9GM8K0nyDLI0pVyVRlPQqI726HUURl/d2KGuLd4oQWYTX2MUMjOKdH7zP+ekdXL0iNprWdly+egmt+9da44r+PAngg6K1ga6t8XVL6x0tkK7/LZubm3163jvKqsDaFucsTdMQRREQaNsGY6I+jGcMk8kEHcU0bUsUJ0wmEwaDAdZ3VFVFmiS0ZUee5ygTaG3Zi2TSE0SL8w6leuyVVhHO9YO0EH0Qq/+9IjiPtf3zeJAPkLIfQltr6bA467FVh+88wXfYrkGIXglWkUGaFN+sMDrG2TnLWUHZerzr2Ly0y6WXrrL99C4L2aJGJU3boLTm7octp+9Z2p09Lr+0xere/8XtWy1PXnmW4mDMa5/8Fb7+la/x+//8Dt0AHr/yGv/BF/86X/i5n+Xtt7/B2fSQ+/stTnhs0xHbDfJUsXW1w8eBsoQoksRxTxtwGLJ0iK876rKkbleoge7tXWZEvlFz8INzSEasVh1VWfP3/ub/D1b//+0/+gdfJk4YxBmuCFQrwzjbIAo1tqsxEdBpgm3xnaJpweERqacoNb5OGA4Vi0PPlce2WdYNtqmIpCJELZEB10qqukbJwOnhrGfhBYetA8W0ZlUGRK6pKsswFoSo4ng+58KmJh/2CkGxDDQoKrvC1p66PSFONEp2zM8lTWOZ7HiCHxAax93vSWRkSAee8mzKwweeebkkG0GnKzoT40OLDoJYaAwWozzjzQs8PChRpibUCWIDFhUkiWCUZeTDnGVZEBnYUENiA1J5VBwxGEd0tuHihU1kXFPVHXUFXW0wUtB512N8giBWGqFakkz2L37nqKqCNNEkg5YgPcuFY/bAk6YakhoFJK5lc2/AdLYiTT1JkpNnEUY0bA4nGJUyGiYUsznXdp5gMNjgz33qx1lVGmdjyumc6rwk1yOaeslpUbC3t8NGNgAXI82IVdeB9IjQUS1bjBJcvrbHzs5FyvmCoq7RcUyeZWADUaqpmpbGeoJwRDkMt2JULlhVDi1S2lJTtE3PjI0sow3Noq3Z2+0YZYrlecPytCQdSqKhQySK1llsCxtmi0SMCfGCbOIJSuCtp3wQmJ+mCBFhw5Kje4q9SyPSQUuWtNTLDB/6YUpWOaEVdE6yNRkhXMTDjwpwHUnc0paC+VHE8hSO7zY8+LDBlwZXOiIToVXUKwZRxGCQrQ9AQWQSbKsYDmOcDXRdxGJVcHKyWB+W4KzHOXp/pxc41/tDn33mWZarFV3jwXu0SbBBgPJUdUUQDUkqCQS0iTDG9KGmIFAywugEcLhQEXC0ne3JCNqAj3GtR2uPMQrn/drHJbHW0XVirfgFrGtxrlsr/THSWKwrCBiqoqZpKnzov7f3bm018wipQCuQglBX7IwG/MQXfpbrb/wo8fYeL77xAq+/co2jh+/x3T95l9VpQzmdcny8z4OHZ3StJY46vKvZ3tplc2OHjdEum5t7dM4z2dqgXnoev/YGL7z0WTYmezz1wnNcuXCReJjz9q19Prh1zrPPv0xqUkyaM96U3Pvoe0zPThnGCZ//i3+JX/4rv8Ti7IT33/sALyAiwocG6x3GxAgtiBPD8y++yKWrVyjbJZaax67u8OKrz3L92adYLlb88R99A9848qGiapd4FyDYNQVkvRIVoExFnBdoo4iMZOeS4emnL5EYzbxYkSae8UYgziRd3VDXLcZopBR9glj0fkgpwbc15fkh05MDLlxMuPpYhhTlmncq0FpSNQ1xHGFMSVM1PHn9Cd547QZNUXL31glV069yoyhC65i6bWi7jshkaJVS1iXWuT70t6a5/H8/+uG1T7JrrdFaP1JuQwg42/REFBmvrS0SrSOqskNJ0YdUVUtAEOgYbgiaxtF1ECWGjdGYrmzY379LkiakowHT6XnPuw39YCKlQxvJ7t6Qk5MjvvaH36OuGuJMc2HvGjtbT/KV3/tDAo4Luxd49tkrRKZFhRxHRxNSOgu+8eRJSl05lnNoWovEYDsYDDLSJGEwGDObLphOpxRFiVI98SCO+8BPURQsFkum0wXTacHp2QwTabqyITQeWzbsToakSeC8DIg1Y9kT1oFHS9t2zOcz9u/fpypXLFyJVgaSmPv3bnP75n1u3nqfabmg8ZbTZU2QOeWiRWEQIllTO/q0uzGGPE9ZrVbUdY3WmqapUdIzLaZgO4ZRTtvV/Htf+hnKg3vcPixJ0ghHoK5632pTN6yKGoIlNglGa+o20NkCHzqQEhE66DpkcGuBoveEeu9ZrRZ9cG9N5/yzn+tZ0z3Oz1pLURREcURZlj2/2Xvm835G0EpgncO7sMarCcpq1ZMDuv6cE0LhbB8CV0phdJ8BiIzBh35obduWJE3RUURV1QDUdQ30gUCCRgQFQuF9h1Q92ccHS5Dr886X2DYQdMLFGxtcvfEYb/70q1x8XGG3ckSW0RQPEKbA1iXVwQaPX/8Eb9x4imsvdGh3iwvjLU5PZgwqwZ/cOaDY3+Jzf+GX+cbvv8W7d27yxI9c52d++hd5/c03yUTGv/z1/5upPSK4fsMmhCeSHhlsjwlNBHEk8U5gbU0IkuH4IiZWeLckiIDQAzY3J8TJdcY7gnJxyNm9mqvPXeF0f0ESS/7uf/Rf/vAPqn//v/v7X1apQdmI6eGUwcSQDiXW1iwKR1dKtJREylEtBWnu1kELTdtYdrYj3ErgK0+SpUTJmNh0tM0KYQxxJPsbtXA950sIpLLkqaKtHCJLUKpDdB0qBTsIuGLIP/wH/xMucgRhuXx5j7P9Yxye3Ysdwp7hlaUNKZGUHB7PkCikcf0TWPieHxkpRGX51K/FrGK4PZ2yVBaLZjmtsac9hmRrYpgftlQrqDtL0dQ4p3jmhT0aJHWnMTKlKT1l2aCNQdSe5kATCoWULXGuEAoyHfPBNyrO7k3QpkMqj7ceWztUDBsjUFHMamYZjkBFfUChWLUoTZ9azR3zuWB+FpFp2Qc/opzYaHbTMdZZGjpWs8DpfMFsvkIbw2iUc3J2Tut6csDj156g8Q13HxTcevCAw9N77Gw8xovPfZJVe8KtB7dQ2rK5cRmjN+isR6mIYTqkbApULtkaG6I0UHUtJ4cnbI0TTCrpnKCuFMEoLBHCZMRp1q9hmpaqbsgyRTxpqU8MeTykqRoiBEne0JqW8UXFIDLEuuDkQGFEjE4Fq2PNaiUZZv1KO7cS21TYLlCeQD5p8UJTH41pm5p79+5TLiTzk4K2FmRxRBYNifWYEBqCaOjOQdDRBmjLivlRTdcGhvkQbw3el9RNfzgbIQltw9nB/BH8vqr6g38ymfQ+q659xBx13mMdJOkI6zxNt1YQQoRzFcqANgrwONcz9pqmZHt3g82tEUnSKwkHR6d9mKlrsNbjO7v2kykQmsik/QrNW3ywIDxKZGu+Za/ca61wXUDLiDwTXL6yQ1GW/cEsXP/3XEACIXSE0F+eEJ40lQTRr9pD6B9vFClG4yFK9ZD4EHq7gA++/9m40IdqhEES8fqnPsXWY9dZYWmRDHe2yJMhn//MZ/jSr/0CIpV8cOsutu1AdLgOgoXJeMJiseTw+IgHh/dZFmdkgyGJHvHgzildMmLlPe9++ybRSLFxOWU6v0uz2uf2D+7y7ru3WTUte1duMH14yt277zOdT5mWFrW7w2Az4+TeAQcH5+hIQK9jkeiMYtEwyPe48eQnuf3BTfbvHvLZn/1Fbrz5MudVhbMaJSQnD+8zPZmRZDFt67CN7ocwr4B+5W87uLCXsjGJKRa91y6JA1FsCEFj24Y4E8SZw7UdkoSu7f1+rAcOgLIosJ3l2hM3eO7V59EDy3zVcffukqLuL71tWxOEoKga6jagVMyFS5fJshEffnjE++8fcHB4ihAxISguX7nC62+8wqpY9R5bbwkSqqpZB/HkWpHqrU09johHq12l1Bq95v7MRx800kr3QcLgGI2H7G7v4Z3E2Q4hLMFHCJ8ilcJEA5557gZt13I2XTAYjdgeT7j30R0QcPn6VYqqZjab9hdE160fk2Q0StjeukBTNwRKkB349AAAIABJREFUXCewTrFcdSyrU4I860tOYs3FvUuELuG9H9xERSmdb/tBWxnSQSBY2BrvcunyBWaz0zWKTuGt5/TknMXqnPAxI3a92l6tCtqm699fVM/U3t66QjpImS3OkUYjVIrzKctacDqvqa1DKklnLVJK4iRhPN4Aetj8ZDImSSNG8Zhrly9y96M7iNZyeHZI8JbR9iZaD3j1tU9y+84HBCeJ0l4UkVbghQUCq9WS5XKJMT1FouscWZrTdo66qsnjhHplufrUk7zw9BYfvPUOi7DNIIIIw2MXJJ/YS9FFS2Y0w1RRz0takYEv8EGhCER4hMlxkUYE2fvXJcwXs/XQzFrN5FHwyvv+vOh/9Yp9r4ZK6rrEud5a1NY1SRzjg+svw4T15UhhbUfT9Or2n16eAnGcIIQiimKMidbna7/paduGNMtwzrEqC7TWWGtRSj96fqdJgtGKOJF01hFFQ1QEbduB74OZKkTE44Rn3siojaZJI65df5KN3cvcOnjIeNxxZdMyHCj2b2VMNib8+Gd+iZ/7uRf5V9/4HV7+cz+LrQzNQPLh945YiNs8uFPzwk++xuz9Dzk4usXFx7aou5oP77X85h/8FifFV2iKPusT7DZZbtjablAyYn6aE0WWSEc40aBETAgxdWfpmsAo36FuO3YubxKLLRqvifOCtjrg9Vc+g0gEDz48wRjB3/1b/8UP/6D6P//jf/RlqQpCJbi8sYPUnkYplAnYqkDGjsgI8jQwP/JU04gkCoyHBuNHHB9W+K5niQ02IqI0kEaXUMmC4AzSdNjG4jqBNp4oBecEUinSgaAuLc4DSYINQC1plpZZcQpiwuKB5XOf+Sxnh1PynTkvf3qTB3cUo82W1cPAw9uKaBho2g7hwaSSze2Iq09aFrOIeENy4RMX2X/PkfkBrmwQXZ8M9E6gTEB6R2gTVsuOugo9RqgIWFFwetKws7tHmo5YzTuKcoHRim4ZaBYBu0oZ5ynBdcRpQ7vwtNMB5WzO7EyQpzGHN2O0lGxcFBTnHhUbtvd6UHNdaeKkV7KVDmS5R2g4PQIlNcK3pKOkL2KQNeVh3l8SogJpJUUbsE5QVS0miliVDdb3QO0rl/f44P4+xeqcSWY4OrjP29//Lq0e8Kkf/4sc3Dvm+OSUk8MpJw/vc3JwwuUrF1AJTOfnaDLiXFO0FdZL9rYuYoxHxz1zTgIulBitcK5FiBYbGoQSSCOxosNoQzsv6coSKT1d0ZAMElzkCc6AbFAyo6ljImIckh3RYs9rkoHE5y3MLE1lKeYdyxPHzqURZ8cd5Ymgqhx5HqGlQUlJu9IU5zH1QnH8cIqgQ2twy75JpxOCQZTgG4lMBGhB3Xm86n9+xvQrI0/HeDxE6l5hiqIIpQSIwPHxMU3TMBgOqLsC5yTWejpbo6OAkAHvNNa2xElKEmdUZU3X9gpekmRsbW/TNA1PPvkMr7/2BpcvP85wY0hZF/2a0QjSJCWKeianc4GmafphWvRvDEIKui6scVi9IqC0JM1SiqLkxZdf5Fd+5Ve5e/eAk+OzR54trTXa6D40hgTR49CUMnSdxXkJXvaBwKZeN7dERJGhbe0aYG8YDUfIdYpXxDGz1RnSOl5/9Xm6VvP9t+7QNYZXXnuel157kSgZ8Fu/+bu8+4O3kdoh6ZXktnH9m5yssLYmS3PyPCVLNlEy4vJWzJOXtnh4fMzX//C3+dq//A3ioLhx7XFE6Jif3yOVBU9cucRsWnPr5nvMV4e9GlwtWTWnTLZSIuE5eniXxrZoGbG7vYdCEkTHk88/TZSOufneHVbzMzY3J5R1x3I1YzIZURUd7793k+OTQ6xr8K5vtlIqBuEf/Sz3rsS88MlLVIXmow9mNG1FVXXEZoetrT22tjaIoh0e7tfMzxwBw9b2BhcubCGUo21KAh1ZlhLHmoOjc+7ePqJZlMSt4/reFoPIcOfDOzSVXYeXatquJE4GpPkG79+8xbvvvUuDw8QRwfWXkytXr/Haa6+RZiM8huPTcxaLGXFk1sxP1opheNRClWX5OpXdq1Nd1z2Cv3/cRCSlJNIxSZqgIwlScD5dUNUVcdxbU7rWEkQgiIrWthwdHfStcG0g1imHhw+p24q9KzssixWz6aK3KriA4E/VuM3JHs71xA4tBVVzRpRorj25STYqWRbnRIlBiBGLORyfHTDeSdja3EajidQ2Ii4pq5rYZMSRZLGoCNRcuJzQNYay6giyRsmIIDqUMVy4eBFB/xqUos9ajEYjqqrChxa5zh5YbwlSUNmOs8WCNgSM6csrlO4LInrvY9N7TZFUdUue5PjWUyzmBNfx8idf4s03X+HWzQ8IC4+Jdvj8X/x5EtNx59ZHqAiC9CgF1ttH/uKAhyDWFw2B1holDXSCxlXIRDDOt/nDr75F0QqSjQFDZ3n8+hO88eYXkGaXsu1447VXmB1M+atf/CWiMOODu4foSBLpIXWIcaIl05rGtnRt01ua1sPpx0Plx+Grj58v0A/PH1tK+tChRAjJcDhgmA9omqYPVnWWPMto6prOdmR5tvZKW+q66X3haLyDLMuJoghrLbPZDK0N1vakA2MiAn0blQ+BPM8JITwqH7DWYtsKZKBp+uBr4OMNWMDoiDyNKMqaH/vXr9LpFfPVBJkqjvennJwuSOLA1qjl0mXNH//BPdJxyjOvPcXi7CErX/L5n/gC792c8vDkJsNsh+OzD3F2xmj4Cv/aT3yGg9MHfPWr36apLN/4rbc5OzqglB8hkhWjC5rj9z2+AylbBsOEYtlSLA1ZDkIJhNRoHUAEqhLKpUPLiM56Xv1zP8mzP/Is9w/uEThEi5RnPvEaZtDxwbfuoQY1f+9vfvmHf1B97MblL0/P5rz7wfcoQ4uMY8gidJpRz1e0tSG4wJPPJFx7IuH2Rw0uQJJGZHlMUXUkA0+SK+bnlr3HNagU6zKK8yVlBXEkqQrLcJRw5eo1BoMx0hRESUw2TqnmNViIEoULHTLyzM7P2RguOD+f8/xLT3L/1oLSPqRTCY1c8uQnJBsxPLjjYdBhfWBzDJvbCTU13kdMJoqj/Yb3v1lTLFekW/1tWQhPLTyZ0gij6LoWZz1CB5AeW+WAoyzBtZ4gBEmuSAcRRq3wrmM0FsS5opUBLRKyNMI5ycP7Htf2oZMQFKsTQegaBhsCEte3PbWW4abBB9tbK4InjQ0mcpgkUK6grnocVpRLFkXHzshghOP+foNUDZt7GXQV1sUMxxGbG2OkjxlkG3SuJU9iEqmYZClP3Njh4ekBSiYMcsvx/vt877vvsGoWiGoFymMSheo8oRJs715kujjE+4KqsnS+JE8ijFCk+QiV5GxtT8hz2ftc6hLrmn4tbQMihL6xjASpHMOhJskyqqomHsTIVCPiliRWeD+iCktsK4hshhoYrugVu1pyUmniCNq5JYgMbRQ+eFYrEFUMTrJ9MQdZ88pLn2JrssnhwX06O8Pasm/CcYpY5rha0OJROifWGZvb2zRd/9ida5BCE0KEDyC1IXiPp1uHCBRSSYL3VFVFksTEcb9y61qHlC3K0IPPvSRJUkKwaB2jVNSjYQZp/8bctWij1rD7Gfv3H7L/4AHf+Na3aJsaFyxaGYLvm7hc6NuMQnA47/i4ZkLJCO9ArNuOvIfOWrI8R0eGuqqRMuX+nQNu376N94LJeHvNQGVdWKHXCqlnY3wBfIy161Yz3fuAP1YdqqpiNBr3DS+u//Pr15/Ae8uqWIHQSDQP757x/vdWlMvAb/+f/4R/9r//Jj94r+Bbf3LMV776Pb7z7T+iXk3RQlM3gcefvMozzz3FeLTDjzz7IpeubHF+NmN2XjJfHHLw8B4iGhK0YTE94q/927/KJ25cRwxSnnvzz3M2L7j18AdcfOoTPPOjn2bRHaKlpVqUnB09oC1b9pLL5GbCoqlZTZcU5wuk1gitUHFLOmo4PTrh67//Tfb3P6RuKvI85+UXP8HF7avc/vAB3/6Tb4EJfPonPsUgHXB0cECa5oTQV9t2rSfLIl599TVeeeUzCCKuPr7H3qVL7O+fsljOWS6nJNmExbzg+OQBcRahRMRjjz/JK6+8jvdwenpO1wWcA2sDGsEg0kwmW4wubFIpz2DrEo/feIwnn9ojEjGuBaMiZqclD/Yf0DQFWZaDd3RNjZISE2uMTnFWceujO73HuVohpWSyMebixQvMZrPe/yf+lEU5GAwA1mUW4dHa/+NwjdbResjtcN5R1x3KSJ557gnyPOHo8Lj3gyqL0A1NI8jylK2dUR888wrXOCyW5156ASsss7MZtJ5YqvXQ5VBKoDRoJUnihPlsSVVaZFB0ncNEWxwfLUjTnCwbonWKdZ44k2xduEBTWZbn5wxHS/Yuj3j8qZyiOqUNC44fNvhg0VGgLkAI3SPKQr9hMMYwHOYsiwVN1SCVoq4rqqpksrmBVI66rNgcb+LaDmcbcBYtNEpodGTW2KSe85nEMe3a0jAY5DzzzDOsihnH53O2Lm6ysTNBxUOK80OC1+RmQc0Zv/l7v8e9+8eksUF6ECKB2KOFpm371rrhcIjREePxmJ2dXZx1fUDKzfABxuMLlItjdFKz9BaahJ/+4hf523/nb3H18oR7t+4zPT5mMBGoYUKyfY3bb32TzpUsnaHxLZlukSFQdQ0J/euo63p6SBzHj3irH6vvIGma9lHYCsDavtkpzweP0v/B9/XDeZ4zHA6ZTuc41wf7sixjOBjQtn2zXF880PtPre36kJbvEXLGmPUGrKcTzBeztS2np3OINddVCIhM3IdsAWOGVNWSl199ljQZU6walFkRnEZFGU//2DbW7eGbDNdaLu9sUzUr0mSXYA+Z2nMuP3GBn/ncLtg9hnnMjatvEI02uWQbRvke8/oud86+ynAw4rnnL/H8tc/yzd/4Gn/0lbfoygB1zWhrhZ97EhV49gvbfPTVComlLhvaNjCeDDl8uCCKFNFgif3YNxz6jY5tLV3TIMyIZHCZa0/vcP+jtzl9eEhLysbGNSajhLe/9jbprubv/vX/6od/UP3UZ1/5ckLK5pWahSvJsx1Cs8B0GtW2FG1JWTpWM4VMW5INhRkmTFcOPV4wGnmKlWD7msMHx2wm2L66QZyMGEaKBw9P8V2fzBuOU/J8mzu393n++SeZnzo20i1UpJielyR56DviFwJjM17+zGscnDzknXfO+ORLj5FtfkQdluzd0Aw2xiyOAmcnK2oXUPSVkXHuqSrB/k3JbFojBmBiT2oUna0JMtC2AYlnVbj+hmeg7RTWB1QEtuybMcYXBLEMrMoaK5Y07TlZJtjYiNCxo+ocKoqwxSWidIZRkuWpwTNHaw2hxQhBnsdcuZHisgphNVEuIO5QPkdKR7E0RJHv4dJCUFV941c2UNSdR2nYzDJsA41wKGHYvDBgelSTbimGmzFZ3g+CbXDINUB8lCeYyYD3v7tEW831p4aI1HA6ayAsGEY140zSBEEdJNpJVicLmrpERJaWgiTyGGWRtqZpS1ZdRQgCgUTGYOINrHNI2SeICYEsivGNQ3sBkUWlHkxLOgHyDqsBkRI6iOMKrxSTYUyzXHLv/oxJ4hkPHbcXAi0T2sYRvKWrITIRwq87obO+R3x76yo3nniWO7cfoHWgrQ1aS6JEMMh2yONRnx7NR6Q6R0pB7UqWyxLXOBR9oKRrCyS970/rNfrKxHjXq5YfM/o+rhg1RhMnuv/aOuP64xdRpma1DBidcGFvB2ctq2KJ802PuRI9YzOKBEpagizpXIU2NYfHpyRJQMiapm5x3uM9jEYjmq7BrUMZzjmSJCbPM+qqwegIqfp2OHxAS4WJNMfHhxwd7TMcGYwRZIMcpQx1XSFlWEO6O6SIKMua4XDAL/7iv4H3cHB4j0Do+anQMz7blvlyQWf7xq6DgwPqcgnaEACjWzAdt/ff4ttf/1fMTh8wO/uAP/naP+NrX/nH3Pvwu+xuZSi5ZLU4wTeWLNkmScacnh2x//AOZb3PxmaCCBGrZYXQisVZwUfffZfTkwe49EkmT71CvTrl5Oj7HN+7ycF37vLW77/DV3/nD/nOW2/RBkA5zk8foEXMT3/+C1x/6ROQRNTFioP7D3BYiqJkOa3weLZ3crYnQ3a2NnnjzZdo2pb/53f/BMmIyUbO0ckDiqohiy5RzlpOj+/2gTXVYCJLkipGgy1EGHP/7hm3bu4zmezy2hsvUtsp1s0QquX4qGI8GRJnnmJZEumUsm754IP3uXnzJt6vNylCYz0oE2ODpShWzM/PWZ5PObx7hO9StncvYDL4+V/8eT71mR/j9v67fS2qlwTb4BqBJO2LFKxgPl/x4c0POTjYZ2tngytXLrIqFxRlxXK57JP76yCKtfaRetr3vMekadpj1x55DVlbWURPlxYwGA3Y3duh7Vru3P6ovxAJ+kKNIAlOr+kZmq3tDQiB06NzZKJIBhnHR4e0ZYNvLd72FhPn+9a+QT5ksjWkqRvOz88QOIwaY1LJ9lXHaNPQlJpyFaNijzQrcIbpWcV0esbu3oiLezGHdxeUC02yveT41FPPe5pBW8YoHXCutyr40BGZHGtb6rpcD+45bdOtPZaO5bL3Ku9s7/TDvBA0LiBNTOscUveXUil71FfvPVZ/JiEfkaaGRVXgq5bX33yV61cu8Xu/+dsURcl4sMNzr/0Uj127xL3379OJBDPY6H2KVQ14six7NBRa23tfu67//2vbhuXijNpJhvGE0FkivQAXuLB7lRdvPM9rr3+Wn/qJV7hz54TP//xP8f7N7/I7v/Ft6qZjVpzghaJJdijaJQbFYDAhz2JEkPQLdruukRbUdUVd14/KAPpQniFJkjXKqldZJ5PJmg7Q+329dRhjSJKUpmkZDIYIIRhPNnrV01q0UpRl2VuN1pWsENCRIYoj8kGvun5sC0iSfjsglVorvr7//DpwpXSP2kP0dpumLRBSMJu2FMWCjUlC1wqW05ZXPpdxXjVc3H2OK5eHvP7ym/zkZ16l6QomG5pidY/B9hOIXPLU7rOMzXXe/ImfJFMVv/7Pv84Xf/bf5713f4s/+PB/4fqz2wxGlpWd8d3pP+N7b3+VB0eSjSc96DF5voWMzwmt5aO3HbLJOHnQkGUZu1ciTk6W+CZFKUU+6a2HeIHSEbN5SRINGA93GGyMqSh4cLqPPQ8spw9xUcrlvefIkorv/fGHjC5N+Nv/7n/2wz+o/sN/+l9/+Wj+EDOOEdEE6yTBnTI9O0HGEm0sUQyuUxR1R+cEXjik8gjp8SHi6evXaCtJJzwmqyndKU3tOT8+ZjR0tGVAq0CWW6Yn55Qzx+m9JavjOctqShobatsSrMXYgBwKLj2V863f+AgTLZlOT/nSX/5PODy5yKL7ATExDz465XxfIYynqSR5YlHKYPEYBBtjS1GDrxOiuD9MDZ7OBXylUI0Eo4mNIo5SitIhDNRFYJAotJJsXzI46SlWns2tiM3NDBcarPd0PpBmE65evcJoe0hZRZw86Ei0oq4LvI8Y5RdABGynufyJmCauqeaWyZ4mqL5FabQFy5lFRR4pYxbLjtBTGfFeUjWwkUvqtsOHjiTLaQtFNA60hSHfjjk9n4KJWK4Kyq6mbkLvH57k7N87JUocr775FOPdLVQ0AOkYDBKWyxZvMtI8YqA1wglk7CkoCDKQqpwQOoxMMCZBGEmgIeok9bTCSyjOLXQG4ROSJEdFGZ2L8TLGS4Xvkt6UTkLTRnQ20NKBbEmNRKgeU/LgVsfu1tOEkDFfBPygpWgB4YlNysBEyKDWQY0UlcR0tkSEhLJc8PVvfo04q2m7QN0ucDbQNj0SqO1K0nGOEhKNpO0K5vNzFLLXJ7VAqwgpDISAdxIRNF3b+/SSNCWKYrIsXzP/+o8oiskGEQRF2zSkUYJREKghGJraUpQV1jpWq5K2cRiTEjwkkUIrh3cpqyU4a8jSAVXZkCY5TdOzO7WK2dzaYblYrtOzhrZtePmlF3jjjVf7DvWyIE0TBmlGXZT8yDNP8cTjV0giT5poTGTwQTCfL2nbmo3xRl+YoASCBGfbfiXbdrz9nfc5OnpIEA3wcbDG41kjZUKfcjdGr1VmA1IgdYtEEOjDZ2nmcGKJSh3pJEbGngu7A1559S9w5ROfZHSlf20cHt7m8OE+o1HC7oUdmtqgRIITJUonjAe7/NTP/3meevFZ5tMp3/nG7/Kdt/6A8cYW06Lh3Zsf8GD/Lsdnd6gWDwnLJacfHVLMS0wGVV1SyRFyPKJZFNz8zk1Ozg5QkQYXGA0SnnvpBs+/8iatj0FFxGbEzXdvcrR/j8/85Kd5/vnn+e5b3+LocJ+7tz+irg649tiQtvMUpScg6ToPKuBYMl08YFUdsv/gAe+88wEP7x8zn1q6DtI4pXOOo4NjtifbTLYHPHhwh64rgQ6lBVlmsK6hszXOWoQK2FDjveGnfvxzfO6zn+KbX/t9Htw5ZjK5xNFRxW/+i99ntljhQ0OWJzRNADxBrHBrWLs2EhMJ4jhCBENdNkRGk+VpP3D5/rLcD599iLWu60erW2P6dLz7M8OAlAKpBISei3nx8i6N7bhze580TVCqrycW0hNFCVevbbJzYUhdWfJ0wPRsRrGqGI4HLGZzXNth6w4tNUIJfACjI6IoZmfnApHJmc2OaZoCoxO2dnOCEWTDEctln4NQKuA6SVO1HB0f0rUtaWbY2jXcvTXDtznTxZR0kHDjuU2mBxJJCkI/ssEYE/fJ/CTi4t4e0+mUJEn7qk3rSeMUpeS6Ucngne/VcO9QScRkZ4SIa5Rpyc0Q21mU7Akg0TrUE0KgqStOTo5oKsnrz1zi++9+xOFHH/D0Yxe4/sSL3P/ofS594iXaVcl2qoi0Zf/oCBMLZPB439sImqZFSU3wH1MbPHVdEUcJzhlUpFDaURYldeOQckTXWYqq5uUXX+Cje3M+9dk3+Se//lv80//j1yGdovUmf/VL/yEf7H/IanqAEgmLqumFEBHhQosWtg9V0ltAjDFrxFlASs3Ozi7BecLaex3H8SOvc1VVFEVBbAzb25uUZfHofDs4OsKvg3xSQlGsei+r7f24Wqu+StoosizvWdBa03Ydo/GY4AMhwGSyQWc7iqIkiSPgT6kWaZxg12SAXoRwGBXTVAKoKKo55bzl07+wzY//Qsy3v7Hg9GjMq6//GK++eoN37/8uH977ComDx668wBNPvMDRnY7PffoXePzak1zeeZyvf/M72JMls8GS2/f+R/KLnvs/KKAzpJvAxhlyLrnzTcXlnU3EMrA8nNLJhHSQcPpuxfncI4ME2XH9uZSDBw3YmKtXL1EuIpTySO0QUjI7d7QrSds2NFREGymthWr/kMX5isFuzoXNXb71rT+C1rFxYZO/9aX/+Id/UP3v/4d/8OWdpySrqqaeSSITsZyWaAll2+A7QRzFfQov6lskFII0ckQ+ofaOJ3Yv82/+yt/g7sMV89U+TRN4+LDAKEG6bmYZZIFqZRmPEnZ3wBUdUfBcu5FxupjjXMDX/y917/msa5aX510rPPGNO599uk/snGZ6pptpJjTDMEKCQlhSCckuDEjCMq6iAAXLpZIKFSOX+aBUNlVW4bJchYRs7LKQkEFkhhGMmplmQs/09HROJ+689xue/KzgD+s9G+k/EOfzObv2ecOz1vqt+76uhPGmYHnq6QvDxk7Ctd2PcOnSBllS8Ou/+jn+4Nf3eP6p70YgODi7w3QzZz2/yvZkl1dfP2C6tYadeR57Muav/O2H+O1/ccAozehzRy8dXZlhko5eelociXR4b4JHHUsSQb8Y4G1P1YJPPMJ5sJZ8oIlHZmUdgr5rQW4y2b2frctTKvcaG9uGbJxT9x2LZUFremRSs/VwjRwkrI00Pmppqow8axmt5RRli/SSpu3x0qPu5SSNRUjPejJm3nQkucc1hvHYICcwnxuqeU8cS9aGU+YnNV3TIb0gMp5PfsvH+C//9Me5fu0+7txt+Nf/6vOc7s9QDja3LrB1aSO46hPN6ekpbW+ROkarFB1phusCJ3IOjxe0vcEJQyxSZm83nL5bo+IhpSnofYMTFuMkUZSSphE6giSKkc4jrMSrhrox+F6HgL2PmWYpvQ/gnOG0pyw6YjvmYFmzdJrECQZpjO0KYpHQqxqjoKo9hhY3tzg6jHVkucLrBnxK1xmsD4gWLT2DSc50bZO10ZTF2YyqK7BG4noPyq6u+hqiWAYUmw8MSYHGmACids6TxPk5mDrLU+q6oW09OupJU0Mxq2mqmjh1SOWZz2ocBh35FZ4ltKYlhvFIoXSH9YrRcIQ1MvD7hMA5ydrGGoPRmN4Z9g7u0LuWKImwJkzM7tzZ55uvvMpwPcLTMjuqydSQa1c2OD6+wd6dfUwrmM8b6trzyEMfJktH1E2HlClt09E2NVIFkgFe0bQN1pco7cKCI+RqgXEorVYtXrEC/vd47/Bxj/Ae2SvwEVpKEKzea4H2Omy2vOTsaMlLX/4PvPKVL3O6twAvUBjiSGGsx/aKLBtQ1CcUdRkmvqYhFgPef+cWp0dHfPwTz/HkUw/y0he/yFd/63Ms9+7Sd0t2t7e4eP/9yCjB0lF3M+zCYxuD9wXjJKY5O+bg9vvUZRkIBt4xXt9m576rdIVnc7jF1WsPc1xUzMpTskHKu+/f5vO/9yJHt2/h+opISfJM09uGKEkZDNZou5ps6FFRz3JhsH3Co488yaX77+etN9/g2z7xSf7iX/w+bt/c42xxjFYeKX3IxBdm5TVXPHDtEdan2xzsH7O1uctwOKFaLvGtQ0cjslHK3t4xL33lNaqmhUhyfHSXg1tv0zYLbF8wzDPGww3WpilCNpRLR6QHKB1KK95DFAnKckbXdayvbzGeDDk7OwtFPGPPDyfG2JXOMgD+m6ZdkQn+KMPqnMfYHut6nPdUTc9iuUQoz2g0pO88CsV0OuH0qGY5MwwHQwaDnIO7JW3X8siTD3F2fMz8eEZvRnNKAAAgAElEQVQswmard5YkT9FRjHEtO9vrPPTwdeaLU46PFmgZgzAgNK331F1EZ1oGg5gkCVi3tgDtE3TiGKaCo4OTsIFPMh557EPcubHgvdePMJ3D0xNFGu8szrrggZcd2craVlUVWmtOjmdIqTB9T9f1eAF1bwJFQWsEDi08s6M51DGpGyGjMGmUiKBBtn8UqXEObAt5lnBwOqdoS5588jJWKIrDGwyHE37/93+fg+MzLqxfYC3yZEpS1j09ks61aBmdF5eA1fsSsuiDYU6UOhKvKZoClETGCiM0QkT8lR/9G5zN3uMf/c//O22V83/9i39CXx2R5Tt0wvKHX32Z4+M9xsIyEWdc3R0xGOYslwXlsobVaxathBI+cNpWzFUfXjelqMqKfBA2lGVZUlUVSZIQRxFaBUB/XdfnMoHxZMRgMKSs61Vh0QV5hdLn0QIpJXGaYLylXJZB4RpF53lU0/ckSYp3Du+CUVJJuboNCzdT1jukylaHiGB0E6oMdIyq59mPX+FHfvIxfun/nXN4UzLYSvBRzK2Dr/C5F3+Vjz7zvXz6uT/JhcuP896Nd9n023zk+T/Fxd3L/OL/+vMUfsC3/bmH+fIf/m8c7DnUJU8+FBy8WnD0TsLVZyfsvdEy1k/yxDMP8sWvvM1gWyP1lNl+TdvHeOXIBh19K3jwySkQsza4gCLhrXfeoms9cTRGyZiNtQ2atmB+VlAbz+Z918gyx+L2+1TLmGgomR0eMFsW5Jlm99KAv/oX/hhsVP/hP/0fPzPZjGg6hxExXrQUi4bi1DNMJLFyeG+JU0FvgrtZqojWGAwOlOXwzoJvvPg15vsNLQE31c49uRYsCodSPd4IEANkLJBJxYULabhStQ3DDPIUmlIzHcdMx46UC9x845SHH36CH/yrP8j6w8/ywudfIhUnXLx8hQevfwdZ2jCv7vLqC4Yf/+t/jWQCb77+Dl7lDHY1H37mUSrneO+NGSkr9lpscV3IT2oE9p7qUjm0DBM0mfYkeYQSHUmuaJaONIK9dyGX62xsdDSNBglnJyccv3bG0d4BCztDrvVs3Beznmq61lKeGHavriGThPe/1jB/N2F3N6KTYUolI0lbG2wPwgv6RhBph5Y5dDlZLhDaBui01xgvsFKTpRNkJGi9ZLI2pWoLZvMKKQR5rBjkawzGY574lk/yG797k+uX1jl46w3ev3GDk2KfO7du4fseawyDZA2Noq4KpNIYaqxoSZIBvuoYJENi7YllTj505HnHsvCQeaIow7kO5zq8d6GR3rfYvsKYGssCb8JnIRYmnIidRQuJijzro5w402TJCDudo9M5sm2YHzs2s5zpKOX4rGLReryLSZVinCtE52ktWB+yRt5pmlLQ1HV46BCue7yERI9xpmN39wLZMBidkijCeY8QnntueC1jtEoCU89YpBTk2QCtIqqqpesNSZIymUyCNnQ4IJYR3sW0PSBBywwpIqQKBahIhwXVOXFuUOk6S6QjvIfJdAvBgCgv6XqD9Y6qbsnSMeVywfysREuJ7Q3SR8RxhJCWfDBka+d+pjtDLuxs8/zHP0pRn1FVhrb0jKc1ZWvJBlP6HvYP91gs5lhrmC8PiRONjhLqJrAIjTPs3neRwXBAU9ehpCNlyHmtkFb3cnb37DTCO5wPG1SpQKqO3vU4wiYlzxNsC13d4oxHRZp0IkjzDlufUBVzBBrrV4gsUWHVKcigOs2TMcONDb79k59iMx6ixYD7H3icV156heXxAVcfvkSvNTob8cxzz/LwIw9x68Zt9g/2iGLobIWlw3SWOJnygW99jg888xTzxTFHpzeQwqBUSpQMycYT2l7w4h+8yFdf+hLLcomzPbYu0bJg52LE2mbIuXWtpuscXdfSFBW2sfS9JB8rsjXPvKw4PpvTmJrJ+pCToyVf/sNvcHh4K1yN1hLnNMNRysWLa4wmknlR8cxz38YP/dAPcLp3l5fffBcRJWjlqPoubJycoyjnVO0c53ucaYAetCLLBxjb0bQdKhLEyYDT4xJBz2AUYW3A2AyGaaA/WItSmqJYspgtwa2efSJogoPOMhw6wnQq5FPTNCOKAq+yb3uE8MAKzyU1dVUjhSJRKbGKiaKUZVFTVRXj8ZgkjSjqgiQfEY8yHn36MfL1nOvXrnPj7ZsoGRB33mtMG7KQo9Ea+WCE94LT01PKaoHHoqOYputY25gyGSTEYbDH7GxOMS9I4hhvHLFPaTvBsvAo6emrhtnhAaOBYJCscXYyQ4mcLE8ZbRjSgeH40IDJ0GhOD48Co9iAimQQAiSK9QsRfQesJtZCSKIkYTxdpywKpBI0Xcheah2T5ilRHK1a63YVmXCIRCFai3EWl6Qszzq285jNSxsc7d1lc3dIVVrevHGbaLLJhUvb7B/cwBqJTnKsX0Wv8MRRtGq1h+txgK7pqZqK6XiK92B7x/bmFoNBjqkL/t2v/A5bazlffvG3iZOEou4p6oaqrHGuZZhmaKswccmV3vDyjVM6OcCrnoH09IiwlmnIUo3WAh0FNFakIrx3RJEMAyETUHhJkrG+tolFkQ1HSJ2Q5SNOz+YY4xgNJ4yGY9q6R0rD5cuXSKOEpq7PM9QAblW8klKErKk1ZElCXZbEkWa5XOBseK2ts/Qrlav1PtwESYWz4eDhfA2iR4oE1w/Jxp4f+G8+yf/zcy/zjc8e8sR3Psfy/YLh5iEvvvgCtkhxx+t86ju+hzfffJvTw4LL1x/i4x/6CF//4td5+eQLPPzB+zmqf4n3Xzvj8J11rj0+o1lEPPihCX6w4J3frejPBnzpP7zHW6/tMUwTTKNYnB3SObCyIUkm6DSGuMX2Qw72FhjjOTg8BCDJUqpa0tmM0XDK2njKyfExrhcMVIzKlpS3lhjTMbwQc3RYQOMYbgy5/PgFfuC7fuQ//43qT//M3/tMlqYMxhrXp6RJzGI+oy1hPLZk6RQVabo+nEK8F+B7shQUEIuEOG44PlpydnZGvegR3ZALF1JGU0lZ9DSVRSEZDvPVA7anqXq08DiS1ZUZeCU5OZGsX5IMtxKeePZZZKz4+Z/75/zmZ1/kkesbPPTgdRZNzNYk5fVXWg7rm9x465R3bn6J6w9MiOUCT8Xr3+z4lV96n3xYghfUdY9zir5zpGkeJgGiQ6BCUxsdSibeEUWCjc0U6xWQ0okWQ8JwGnHrHYMmYueSoLOG8RAaURMLQXc3YfG65uyWR4wydp6IuHwt4Z1XFhy+GUPh6SrF4OISlSls7Tk77rANwf/u/ap17sHFtH3DYGioqx7hFXGk8c4QZzFSCxwVkiFuVfLRKiHLE+I0QYiUuwd3+Pd/8AovfeOrrI9TPvTBB3npa2+SjyxJYlkctZzMSk6Oj5lMs3Al1YQJY5B+hswVuiIfSKIoXKckuSabOIq+R6sMKTRShQWu7yxdGx5wUnlUBOiexllkHpFNFTrK0H4b20gOF3PqhcJVEiF7xFCxcUUhbcv8TGBMmFqoWCBkSxTD+voIYx1104TXChAIgkya1fWTWp3uIUngbFawt3eAjjzHx2f0fUuaxect59BClfRdv2q5J2itKcuCS5cvMxwNcS6E+kPDtKdtg01otjijMzXTyQ5SRnR9TRSNMNbjMHgvSeIhzvc07QIQIZLQEt5/X5MOS9q2o28lWkecnZ3Q1I7RJMNTc/nKRQC6rl05swV4h/MRDz74JGV5ysHeMR/7+PN893d/N3t7S+qupHML6qpm9+IOUhkWywq9Qrg4HyDz3jnyLOPihQs4a6mqCu9Ctuxe1MEDQsrVVDW0jdM8IdYQ60ATUEKjhWaSrPPdn/5u8J5HHnuEZ595gt6c4UyJWfQ0C/AqR8sMnENqFw4MaLo6oq6ga+G7/uT38exTH2OyeYk/85d/gOc++gF+65f/DV/56ufZ3dlESEPZnVEXHTfeOePVV25xdHKI9QXbmxusTbfoO0Pf9pzOZiTJBN/l3Hz3PdquCnzfZcnRwR1uvn+bt994lZP9m+yOhjxw/yaanmpZBqOXFOATulbhXCjOLWYNKlIMJxqPwjpF34d8cVO09JWirSQHe3eZz44QKw6pwNM2Pc7Axz7xPE9/y7fw1rtvMDtuuHL5Cba3N3jttS+zODugbtoVSkecZ6TVKnM3yHIee+I6G+sJt2/uM8hGXLt2jevX7+POrZsU84b16RZ4WCyWpFlCkkTUZZiqKyXp+3bV0BbYVRYwmLbCZlWtrnXv8VTTNCXPc2azGUqLc/2llDIYsFbA97ZtaZoa07chTmDCs2v38ogr13c4PJjjrObw4IB8cIH7tp7izTdfQUcWrYfEcUKSaqS6l3V0OBOKe13XEUXxaoIX1rHZ2Yy+D43wQT5kMlqjLlq8syRpxeaFhvGapS1i4kgxnERs7jzApWv3c+v2TYaTjO0L61S1QPi18JlXS9o2INgm0xEysiwWFc57rG+Zrg3Ic8UgHbEsSrI84OPKYkkUhWvoYJZrAxs2SwEoy/I8BzyZTFBOUfmK3mrGKuFi4jFpRpxvM97e5PbtUw5OTsiGEUcnJ5ydHpOkGusVOspxtqPvA5lDyT9SnEoVGvjL5XKlA61XsY1QZkqSlNt37qCjEIFam445nZ2uxBOCOIkolguarmJrbZtFOWMSZZwgmDUtrm+J4hShJabr0Epy5cpV5vMFXWeQSASS8XhCloXsaVEuESiMdSwWBVGSMBpPgsHKWtouKMJPTk4oigJrDRvra6uf2eFsiBrci59orYni+PzfN01D27YURbnSqCZkWU5V1wH4LwRKy/MoQlg7erQEKRSSBEmEZ0aaKz77669y582OC5c2+caXX2F7WrH95IL713d58PJ9/ND3/yjZVsO773+Bqw88xvOfeo5vvnCDf/JPf5wn/3TBq998gXjY8NIvLnnrZsv6Q2PWtktab7l6ZZt/9b/MufmuD8zlOGE4mFBWs1VURuK9QmvBcjkLAxgV01QdVdUglKc3bZAhKI2OYqp6wcnhPtZ4bG9ZnJ2xtj1geVBhrCOeeoxpA/9dZ8xmhr/+l/8YTFT/p3/89z9TVi233u9YWx/ifEldtOQDj+0hygTFooJV9sdbj3QZsQ7NZOc7ZARxnNP3Mc89+yf40R//L/jhH/leXn+l5bQ5wOMpW0NjWrx2GC9BKpKRD/kp6el7CRI6K4gyzaw6RccxKlry1nvvspNe5Du/58O8+nrJ5ihj/+iYt995jf3DA1zruPRAStVWfPXzR9QnPVL15LmnOI5YlC21ERgTdH9pZhG6Q0uNFpY8gSyLaBtLlCom25LGNiSjEYNkg6ab4ZxB0jOKx8z34Hi/YXs7oZcdo7FmNK2YfqAlXx9Qviupjg233jYYD1eurHF2ViAiR+MNVz84wOkaW3mUDyUYLcLDBWHxPiFLE1Rkw9TKgBYJs5MGKSHNI3QS01mJtymSaPVlTchGGUgZTom+R/cHrG1LbuydMN28j9Ycc3y4JMtHMKzJ4pTRYMjR4dGKgxhUfVK7wLh0iqruGcQxkWsRQtM7i0haoiSmqRqE1zircdbhbE0UC4RUOC/xImCdIiXoG48UEUJYOtOzrEpsKSg6Q9k4hDUQtXhlGCVTDt7rw+FBapo2HDRinTGbFXR9wN3I1SIukCRJTJ6n6EgxGA5x1tO2NUIahA9MvbJcIoVY+asN3rPKiwUIN0Caply4cIEoilgullhnSZKE6XSNzc1NiqKgXC2YH37mw/z5P/8XaOqOuqmp6xbnwLguTJl0WOwjrVEa4lihZIQ1knywTpxK4sRjraCtJaBWIGuBUlBVNWmS8tijT9E0ZsUcbGmalqqqmEyGnB0teeXlV7EeLl16iCQe8Ydf+Qonp8tVS9ZwdjanrlvygcNRBXanDRMXKQPGZjGf09QBhH0P0m2tXeGwwnRVSI/HobViOh0jM0FresaDQchjezBC8Nobb3HrxvvMZj07lx7nyoOXWFsbcvG+y5C2VP0pnW1QUqAESBmuXBGedADZCG7fPuaRD36Ub/+e7yIa7TDduZ9nP/IhdrbX2diaUraemzf3aOoFXV/TdhV5HvPUkx9kOt7iYP82bTMPUzp6bt64ycsvfYXl/AAdBW2jEJJcD5DOMFmb8PBjj/Lo08/w6OMfY7o1pugP8b5Da89yUSOlI85bhmuKdKCpmxYH6KSnbXpsnSFdjkRjbA2iYjLJUFJgrQ9aSwkXdneYjNe5e3fBnbtL7t6+w3JxSkuNjwWRhDSOKdoWa+z5te69P9570iRlkE+Zz2rmswWjYU6ep5ycHHB2ehR0um1DWVdEkcLYDmN78BJ82IxM18ZsbGzS9x3OW/ACKSRaBfxY3xmcNSgpQ23GWoqiOM+n3uOrOsd5JCBJYoQMgHbnIM+G6Egymayzs32V+WnPrRt3KZcznOkoF5Jy5un6Y9p+iXcR1rUrGgsMhtmqIBQ4msYY0jSlLCusC3rSumpo6xYcjLIRwgrKRYmO4OLDKU70nBxCU2lAgRxx484eb7//FpFIiOIMreFg75Bi3qIjx2giKJue/CKM1tcYrWnuvzIkkQNin1GeHtNXlpPZcnWV7FbRCo1SOmR6TY9WGoFgPgvfr2j1/dZaMxqN6LyjbnvWo57HN3PSaw8gR7t89Uvf5MmnPsp4LeW9G+8jlFwhnSKaxiIjTWtLsME0JglFz7IsAzJt9TnJ81C8CeYxj/OORx97DAHcuXOLLI/Js4zDg8N7Z/0wSbc2rAnOsL25S+ssJ7MZtRc4rRlkKXXbIHBsbKwzmy2YzeaYzsBqMxyu3AMuq2lqlNJkg5zRaBQ+v6MBEsXt27coigKlQqF41W9CCEFRFOca4KarAwZLgrEWY4OnuGkaohUjte974njF9XWWfDAKFAIZ8Gpah1xx27QoCeDxVoAHZ4PYJI4UVdkTywnDdY1ZNFAadh7a4lN/5lsZtle475HvZP2y5Q+++M/Ru0ua04LP/fI+v/Avf4Grj3j0hZqdC2BuXeLzL9zl+NYBb352gRoOefDJi5y+2vK1F3vWpgPavsJ0PcZWNLVFiCHWdUQyR+tQGovilEjlCCLKMqDfgmZbYWwQj1TlktFwQFW04CVKarQS1HOLJSLOE7q2JU6D0GD/zhGf+Zt/DPBUP/XT/+Az1dKEqxZTMUhHnBxWDAaKOIlBBeyBcwKtJEkKTWmJ1QRjLGrgiVPP3o2cH/7v/hI/9N/+BaL0Pl5/c8Fv/ubnseIW3mmcW7Eg6zAtiSOB7S0eGTZekcBbi3SG8jhifU2ysxnziT9xhfH0AQ7eusut2w2j6TqPX98mn+bcuH2XuukQxnNwp+XGW2dkJoVmgNMNTumga10hfTQ9o9QhtMOvFoyy18wLR9kEE0YaK4QzCBNxsl9ycesirhtzOj8jGyjUsGTrkoNKM3/Lko02EOOeqodmNmZ5V6JMgrEFI5lzdtCwOCtJkjG2bxmlE+rZgLWJwDioComOPN5rUAYVQdd4mrpFIrAmLFDCe7rekQ+zYAZTMX2vUTpiOJiQZDFdV9N0NbP5nGUR1KEqS6gwWNHRLBq+/Vs/wje/+hbLsiXKNshkg3BBT9utFjLvTZhExGkoF2jBMGnYHiVI6Wh9jTNDtF8Lpi0cSlmk6omilL7XCBkjZLBw4RTKaRKtkN5TLjpimbKzNSFJPSKW4CuyeMAk36VvDXJcMhnHHN9u6XuJ8wrbC9rGI2WGcxKhVDi1rxqedVPhcQwGg1VpIaXrKrTK0JFFKoOzkKZRaII6SZ5leA96tbAIIdjY2KCua/b391FaYo3h6OiIra2t89a99wTGXwdbm7u89fY7nJzto7UIAH5f0/cGCNPmrqtCNs0nFMuWi/ft8mM/8WPUXUNjb5HkJV2bhvZx2xIlBikjhiPNZDrgjTfeY3ZWnrdX4ygj1gnzszMODu4ynkSMRyOuX3uU+XzB62++zvXrV/jE89/O0dEcIeDSpasEMEPI4I5GA7RWIRu28nh3XYexIY/o4N4t8PlETWt1zkGs6xrfW/oSHn74QS7ev06xWNBWNX1f47znws4W87O7fONrL5PFW8howsnxIdOs5eqVdfpOUxQ1QkIUpRgT4Zxgur7GJ77tk7SLBUk+xg6mfO3Vm3zzpZd58bc+z1f+8GVu3NynLcNUO8sTokhRVTVV0VMVHX1rkUKtoiEO5zuSBLIkRUYCLywW6F2E7S2feP5b+Us//CO89/YNfvXf/gIXdjrixHBnb07TGaq6pa7ClXM2NBhXoWNFPgwQ+VG+TaxiqmIJwjAYJKTJkNnZkqrog8XMOHZ2dvnxH/sJnn76SX7t136J2zffQgmIVMT61i5PPP0sm5vbRDKjrHsW8zNgxatdbQy10pjesL+/T9t2xImibioWixJrDI8+fo1nn32YybogG6hzveRgqNjYzmi6ZaBp+BDrcMbSd91KWhAOiM4F7eR/fJVsjMU6859sUrWOz8tVzjmUkly4sEPdNIESYPrwHEDw9jtvcnR0lzRRTIdjkiSlriuq8pS2OcVas+KBWmzvWV/boG6W9G1HmmQh2qMUQgqUkkghVz55Dz4wSiWStm7o2h7bOWanHcvjDFOE6Rmmoq9qcp2QCIvtDabtKWaGNE6BAtMSSsBeogo4uTNHWCjKmrOFQaSKJz/2CCQxiUiZz5cr1rIKOmLTBYGCceflHSnl+Qb13p+qqvFGQWcY50MqEdO6mDwf8dyHPsiHP/pB9vdu88Yb7zCaTFeHbOh7hxMWT3iWJnGyeq96dKTY2thEa8Xs9BS9UpUuFotztNh0OuH9926EKIA11FVJvkJFhamjCsVSb5EKqqqnag2N9/Qu/D8UHqE01vZUVU14XMjVdL0/v5XqjT2XjuhII6VmMBhQVgVpllMs5vRdy2g4YDQZs7W1xWQ0Xj18QEcRzhqM6Yli9UevsXXn5ACArm1X2ekwcQ2SFc/m5gXy4ZDFYoFzIadq+j48FzwokSGEBgRKO5AGZ1MSPcKJGcYIyC3VouM7n/8O/sHf+Wd86PqjvH78//GlN/5PPv9/H/L8f7XFz/7kb/LFz7/NvD4i1QnPfs8G7ijnl37mFkflPoM4Z/fSmIOjBYfvb3L3rZjmzgwjF0RiCkJgXEWaJKsbVoN34aCqlMZ2wV4/na6HDLltSdIELQReKHoT9jBZkrI4K1A+lIPL5ZKut0FUhMI0EiUl2UCD6fi7f+2PwUb17/+jn/qMRxFFlt446tLgrKMoA7+sqTSmV1RLT9P6kGtrW6TKGU+2KKoTnNPBY2xP+fVf/wK/9dnfZlEf8vKrryBVg5AWY0EIhcchVwaUyboG7Mo57kh0ShQ5ipkiieDsqOT23YZYC3w/YnEiuHJ1kw8/8wQvfelVPv+VL1EdneKsId+E6WXJcL2nqHqSsaOrwQlFXTu0EMjacf/O/Wxf2OZ4OUMmmmIu8MITywyBwFkoF44kDYvzomnovQgnfevxSEg8+Ro4Lzh5tyKfKCZbKQdfq+lvaaKBQEc5i+YUrfvAN+tCPCARHWd3I9CQ73qa5p6POEHIDmcm9C3kuSTLQ+YTgtItHwf7lXEaJzSWUGCQMsHYhqpaYvoO07akUYyUDq1jYmuh0PS2RsQRrV3QFiWbSReYpxiyzKOUxxiBVDGIkE1GevJ8wijTzPY9TZeSDgRVYVmeWbJBGh4cpkMqixDmvCgAHuE1OlmAbrFILB1ZoqhPDMfvWaSK2Brcx9Wr69TCs7X9CB984jnmxzV9VdAtU0zfg7Jo5UC6cHJ25tyKFADO97JPAfa8v39M1wUXd1lVjCYxa+vDEKB3FiFihIio6uAYl1KS5zlt29K27XlY3zmLsT1xlND3HUdHR1RVRZqmSCGZzyteffUVlsXJqlTgMb4NLWJcmNwaz/VrD5BlE4qlYW19yk/+1N/irbe/yZdf+SzZxilJLqhKR1V41jdTLl7KqasWISxtW9J3DmskHkjiiDiW6LhnfaqZDAcMBmP292acnZQIaRlNBow3FHsHB+zt73H9gWt85Fs+xt7eHoviCCHAWIk14bqsLKvwEDOBhylUyCfew8nc24hEUUzfGUxv8U4gfFC6np6cMZudkA0G/Pnv+68ZjIbcvHmLJ598mh/8/h/ku77rOziY7/GFL/w+ru9pGsHstOV7v/fP8tQHnuTOndvUVYtUPXVdMcouMjtR3Lx7yEl1m8/+yi/yxV/8N7z5B7/Hu2+8xLI9wqkKJfzq9w48WSEcbTfHuhmCnqbr8YQJitQwHq6xNt0lzlOquqaqW5xwXH3wUXavXOfWwR6PPHKF7//BP0XdG774hVuUy56dC0MuXbqAdY7losKaFC2njKcpOmoZDhKGA81iuUBHEaZXbG1d55Of/E4m03xlgwqHmLIy3LlV8vrr73BweJM4SsAJHB2xjnnuI59i9777+Y1f+ze8997r4Dm/Xr93YFCrCMHWzjoPP3YVIR1d33Dp2gXuv7LJxuaE61cfwhpJ21Ys5hV1HViWXQ9xNCKOB2TpgCSKmc1mDIY5m5tbRFFo+xvTg3crf7sLnwEpAlxeK6QK8SecW5Ws3PlBZjQcMR1PSGJF27UBpu49UoUsYaQUzgbOq3MV1hUIBEoocI6+6RkNBwzyAWW9ZHNjk67tMKubkJAphDzPMasNfFCGdiwWC4qyDJs6C66JkMKRD1PQjiiN2doaEUU9feuJB5okj7A01E2BlpJ85Ln0oOfS1YyrVx9jMBpx+/ZdulbRNS1N6aiLho0ty90b82D2QqzKaGGyao1DyoBA6vuwWU/TjM3NLba3d2ialuViSexBJ5LepxxVHbY546MfeJIbd/ZIxhPOjue88cbrpHFE3wXboafHWYckx/sQtUjTsGmL44iu78PBQ4bveFkURDrCWUukNcdHR8RxwMohgmmybfpVUXQQppZK0lQN1lj61tB7B1qhfDiwRFLS+/68QBVFUdhguYASGwyGdG3ITUdRxKXLl9nc3CKOY/5HwKoAACAASURBVG7fvk3XdVRlgdKQJBHzxRmT6ZSmabG9CzGOwRCtgx0M/MpYFZBTQRscnxe5kjg+j33FcWBhe0JZSqtoRbFoV6KKQF/wLlyf6yjG2hBvyvJAJmi6CiEtAw+i9Nx37X5+9ud+hrbb4oUbX+B3vvjzfPWfLVnO1ti5oPnNf3Wb9QuO+eGCD330MZ77tvv5Bz/6AkdVyfrwAvFgDTEZozvN4Y0Ze8cndPUSb4a03TxEcITG+RalA6au61uUTJBC0bQh3uGc5fTslPW1dZq6RQhPUTTsXtjFGcPh/jHCa7wVRFohvEDGgvFEIwBJQll2xCrGd4q/8zf+7n/+G9V/+LOf+UySOaQINqq2syA0UezBR3hv6DpHnHkGk1VgPAbhm9Cw9CnWGyabLfPigKba49Offpb33z/kxt57pLnEWIkxgafnUSjtGIw9Qgl8G4cFTzpk5EA46gaM12RrNQe3Cg7uHnP7vQOe/9ineeYjD3DtyqPoZJt//4XPogc1So3oaOiMJ4lBpY5kDY6XHrvwPPHBCZNxwumpRyWWzd11yrmnnC3JtENaD86SJjFZPkLHCcZLdi9fZBCPOFns07cRG9MY7WRQPzrLaJpAOaK4a/DacuGRiOHlFqEbiv2KJB5gzIRwUrNIMaGyBVEasVj0bNzXYpxBSouKOqII5qc9zkmGw4y+DadUoQU6zvAiprcxngSQSCWRWmG9DHlFZ1ZFl2D/iBONAvqTitP3S9rCceutQ6SUbOx2gEOrKJx6bQ8CekuwgHiDjjReaOaLHqjJxh6dS2TSk6YRbZfgvEEQE+I/BhV3DMYQRwpvBQJLPgghd49FxAIvHVE64ORUshkl6OoWkbOMRY2lJtveJKq3eOOLBzR9jdIeLUPbXIig7ItWxijn7ErxCEI6siwgdEK+tEHKmCgxqyttj/cGfEyaDkKD1FsQkCbJ+Qb7XkM/TVOSJKaqAuRbCkFdV7RtQ12VrE3X2dqeYm0bog1djdSeJM7oW0dTNyihkTKg3LquxTHn45/4CE9/4JP8u1/+HLcP3yCKc7oyYjxUbO9M+PCHH6Oqlty3+zCuz5mdVqFhjCFsfsP3cH1zhPSS45OWnZ1LfPo7nuf9m29xMp/xN//7n2Y62eFzn/ssy3aPdFTz9ruvcXB4RNeCtzEOBwQk0drGJkIrkiwJTNRYoZTAeR+u5L0njWOyLFvdsITFqfMOoTxdY3j4oSfYvHCRfLrBkx94jjwf8NZb7/KRj36S69c/yCsvf5XpSCNVzvF8TucaTk8qnvzAh5BS8d57b6O0IEkiZrMzJmuSb3/+wzz+wBUuX9rikUd26Ot9es4wSOqlQKk4/I7e4oxHqyHDfEIchQmJ8+GQp0SEMYo0GfH44x/g2Wc+RVN3YWouJIM84+q1R/jQU88wHO5y492SN169wdnyLkJ11IWgaxLqqqe3FTqSSJEzP7XcuVGwf7tlcWYYDac0lWW2nLOsF3zzG+8xHm3y/PPPUSwrjk9P2dzRVP0N3r/xdtgAqh4nfIjDzEq+9MLX+a3f/G3Ojm6SZQqLRK2mcPcYplIKokhjrONkdsJoHDEYDNi/u6Cthmyu3YfShrI6Y37WUFY9xnia2qHEiEiNWC5Llss5TdsiJFhjSdOULM3PHexytUcI6uswZYviiEhH6EgjVpPXe1OtMIW15wYzKSOaOgwjhLJkaUykM4TTmD6UW6ztUcQ4qxA4cIJYZ8RxIBXct3sfxjiWiwXOhQO0Wyk4i6o+99rneRYW+0gQxRFtH/Ltq6Q4TihG61MmOxssmp5lDelwxGRzwsUr69x/bS004yPF+s6U4+OC9163tJ1CxzGnZ3OUCpEsrXu6yrE4NvTdatMo/0i3KldRiThO8N6RZRnj8QSt48B+3txEKclisQgDBwneNiTKM9nc5Oa77/Ly62/yxjducfvOGwwGGd4plos5Ah+c9oRcpdYr5JgImX7n3Wqz7MLmP4owvcFay3g8JssymqZZQfmDWCIofAVegMNRlEX4GS4MO5w1RGmE1BGJToPlDo93Qal+L36FEKuiaMj9F0WxwmZ5sjxHKXUeHVEyfK6KYkFvQixhvlxwenS8Usuuk2UZi3lQs0opsPY/1bAOBsPzAUNV1mxtbSMIumcIw6b5vGI2m7G9s8VisQg3wSpkqp0zSO3xQqIjjYoEG2s7VFWNtV3I6cuYuawZru1w+F7JT/wPf4t//S//La999gg12kVZwxtfLihNT2QtXsTc//iYX/k/XmD/pmDjvos89OQuVWu5+8YtlsclTdMwjEBlE7qywdOv1ieNIEGQYlyP1hlCCrzzoThmDVmakacptrdk+ZCmLuhbQ6TCBFWhyZLh6rkdBiXGQ28acIK+7zB1R9+2GBPx9/72H4ON6s/94j/+TJyAk4auBanCAtUbi9KONP+jBSsfgneSSAgylUMHRrZ4VRIngkhDng55+82SmzffDWyvzodpZvgYMx1oYukZZ3EgCgiLE+CQ9NYRZaE4c7KfsLHjGU8Mo2SXT33b0zzxxBMc7Uf8xud+ja+++g5vf/OryBik7zAWrM/ojMG7YHYaZDm5V6huyn3bDzBcT1DplLt3johEi2jzYOPSCSqxFEVHnDgu7qxx8P6c1I9Zj6FhRu97lO4xViC0g2bAwTfhrO5Jk5STOwrjO9KLLfFayii7wP7bZ6RJie8Vwjm8cLSkxAp0H6N1y3DbkqU50gmWJwlKRqRpgqMMr40L+R6pM7wcIqOc3oXTuVY5QhlMHx7GAokQms6UON+T52Ock0TSkK1lpJua0XpBXzmaZViYWmeZL3t640MLXkcI6UjTCGcVadwhhcZYi5OOedGjGFGX0DtN27TUbeA3euHI8pi+q5DSYt3qYWEsygsiGSHMFOFj4gT6PkU0Ay4/fZ3TOOFjV5+h+8qrHN18mWV7i7I0LIqKSEUIERPJbHVKvLcZDU1jYwxRpFCr8oCxPbsX7+P+S5eoq4I81wzyoDE1xgcUVN+t/PZpmLoW1XkGLs/zcye0xzMeB7VfsD2Fa6++61BKhuKUFngjcabDWUfXeoaDmOkoZ7oeNtCnJxWnxxVZPuTRx57md3/7Bd5592vkeUy9dEyHY+oFnBwdc/vmKVevXafvPXduHqFlxObWVjBTeUOkU3AJVdnT9+CE4M9935/lkUee4Otf/wbxMKd1Oa+88zvI8V2EsjgkpnfUhUMITdu3dH3QxuoopjeW8XQKEtquIYokgyzBdiHmEPJejqZpzrNuQggSKZA4pPYUyxLT1Lz+8td57Wtvcnx8QLG4y+/+zuf4tV/9FerZHrZvSeKUSEi86/FKIFVgZCZJxGx+gjPgrELrAYvC8sLvfZ3XXrmLRuP9AUf1jEKmpDrB25rOhelZpDSjQc5wkLNclMRxytrWFmVZU5UdXsQsiwWvvfo6r792C9O1OLOkszWzk1NODwwu2kKMJAfNHQ5nJ5wdHeP6mulknaO9GWWxRCtNWQBCkA4EaxsTnvzAY2RZxq1bd/FAliVEGhw1R4f7fP2lN9jcnPDUBx/k5OwAYys21oc4KylKi1Sa4Wi0OtAcEyUtQsXUtQVhmU6nJGlKXTVIKQCHFOB8iFLNTyuKZUvbtThnWVubsLu7TRQJEB0nxzWzWcsTTz3GB55+iLPZPlVVrm5kHFqHaW1RVBRltcL6hMMIPjB0lZJEscY6C8KfywAEnKsynfPEcZjsVVVDPtA8/OgOp8cNSZQFikFVrDaUDqUMSkQ4GxrY3lnwjijSNF2DijVJlFIURZiiuh6EWxVlcro+FN2EFPSmIxtmjCcT1rc2Wd/aZjAZEGctTVVQnYHvHaYxzI8bmuoU5zrSfML6bs/xfJ+mFfSmZ3lieeqDTzPdijhZ7GNcR981tG2N6Swq8ujIAIpIK5z1iJWDXim5OjgLuq7jHvQ+SRKU1CyLkr29u8xmM4w1eKGItWQwHNI6S3dccNaViMTj+jNk5DGdA5/w3HMfwguYz+swiFDhdYuiiKZpUPo/asQ7j46igHWKI5RWXNjdDc9yH+pEvQ2WJ+9D9hw8TVutJpJ2xcAWeGcRkSaNE/quIc0Tiq5eRWHDAf+etlUpxXJZcE89nabBWFXVNfP5nLIs8dYEs1m0wnSt/q5HBPGNdxTLBYv5nCyLzrWsYhU5ufd8juJ72eBgnYqiiDwbIGTItjrnyPOMtqmQSqOUIsuS1SZ9haxSYcrYdxXGBLvgcLCG7XtcwPGwJmPqxYIXXvwDvKtInCOVkrJr0KKhavpA6+klLsvYPyg5erXDZY4Hro25dP9VXnrxLvXihM2LErxgoidEUrMsZ6t1SGOMQ+mIfJQw2RhS9iVdVRGSLX6VqXU4+/9T92a/nmXned6z1trzbzrzUFNXV/U8sjmLZFNNUqFE0pJlUbFiJVAGJEZyESS2EkGSbbGFQB6QxIiuZCQwAgNBBgeObxxIiuJAEiOxKZLdTTZ7rO6u+Zw682/a45pysXaVlP+Aqsuqxqk6fc7Z+/u+932ft6OsAyNZdy2RTDg9OmU5XdI0NdZCpzuKIgXh0S7GNR3CSzqtkT7MJDJR/L1f+bs/+oPqb//T33y5rELvelMrnDCo2JHEwYfmnQTlcB6Ej0jjmCzqqM4s1alhJhzTI8vmBqyuRxwtWo5Ol2SFZnOyxnCiaFrbe0JdL8+Fy22UqMDf9AIih/Dh0jMcQnkCV69scfmpc4ztBt/4r17md/7Bd9i60HJt8Sav/Mn/w2Qg0RJE50l9hJQdkRMoF+G8o2sdgzXNMG957Y/u0lYRedGxPtnGiQKf5Bh3hjYa3UKaFXjX4Pyc7R3FfHrKNJ5RjAekfohYGEaDLWozQKaQxgV/5aUvsrq1yt13b9OetYzGA1pZ4kcCYRzTEwWRxxgbOqgjgbMNSMvukxOaumV6oKAtuPmOY20LVOrRrgm+4EiATPEyIxlGGF+CIGxaHpDhatF1DXFSoKIE7UuK0YA4KahsR2QNriupTEtnFD4zVN6jvaLTEQ5BlAiQAmvD10sIT17EFAQ53ztLVUm8KOgWDdJIjNAIGS7CnQ7dylEkyAuFxGG1g1jjsWjnSLKEo72W+Z0cfeiJFgZv5iwai4kFqd1gNTol3d7lus4RwjA7qsmSnKausLYjyyOcN31towIEsZK0XUOWx8RJRNt15HnBZGWFo6N9nBEMBhkeibWBlWdMh4rCw9qY4MlMeuloPJ4wmawihCRNErI0x+iQbE+SpLdqCNq2RUgP1iN9RJaM0aajbkrGwwmTsQc0i9McYwWffvFZPvrRT/HHf/gKxs4ZjB3WVHg7x9SKam7I84LLV3cZFuf57nc+4JlnHuWxxx7lC5//GsYqjo73sK4mij3jccp4ZYiNPBcvPc63X/k20+WcYi3nzVu/jxy9T5w45lPH6VFN24Z0d1t3rEy2eOjSFdqmZr5c0HQdg9EQbQx1vcTorr9KKPAS512f+Pco1WN1JIgkJikSijwB0WJEiZVwenpMNZ+SFxJBSzYoSdZajmclTS3J84yqLpFJxmCwRpKkNHXJ7KzCGogiWC6n7N+7Sd0cMz+5yVs//CFHZ1OiuMYbw7K01E0YaqWUJEnMcJijVEJdG4bjIdkgZz6b07ULZFST5oIkFdTdIfPpQagaFIo4z5gtDvnhd1/h/bffYSRi3MmCo4P3WNtJkeScnc4oBhnD0ZCqamhqS5aNOX/pAnk+4Oho2Q8fLbrV2E4gfUSkPKZrmJ7MOdyfUy66wMu1+gFjdjjI2T2/hYglzdwAGaWr8EqjemuDNSEgE+R4SRxLpHRkiUTJ4MOVytF2LWtrIy5dWuPatXdYzC1to3jyyed48XOf59atu/zg+28ihQRvQjo8DqHE0MaWEUUxMopI05RLly4RRTGnp6cPPNKCsBxL2bdSedfTAgRSRoEXLCJ0Z9nfO6FpSpBNXyE6wDjNctlidcR4MmB1fciiXKBkxtbWCtvba5yeLYMNp+mw2oQh1Yc64cFwxHA8YV6WJHkGQjCajFlfX6PpQilLozVeWR792AYPPap49LGcux8eo2vHoHCMiw0imTKvzlgsSgbFmI3tgu1zA6YnS7qq4+xkijYJT39kl0tXU7J0RNd6lPCYLqHTHmfaYIX4CwGzcB1zD5i0bduFRbFuaZoQWLQu+ChDhlZgvCUWjkp2iCgicRCnMc6mrKwN+cjzz7CsFhwfH+O8RQBdG0o2XI/pU1GEigK7uK4b0jTFCsmFi+exxnLn7l3qqiLNsgfeY9s3CnocTrp+8Hf07QH4nvThhUA4MC6oY7U1ZCpBSIUQ8kFlqfNBqQve3BAWrvsQWZwECkIcRSAczoWClQD3J1iOvEXIfkEyHXme472naRrW1zaoqyYEN00Iw7Zth7UOgaIsK0x/cIhjhTYmWLfSMLDf//poHSwAHo/Vcd8aqAgEPo13Gt1orDMh46IUrW1JsjHC+BDO1g5ZW2ysKOcRm6MxLrYYBA8/UnO66FhZHeGHmuPFB8jygE6ndJGEJqJul0xdQxbl4XshAeckKq9Ihw1llZIPc5JY4PpF8f73mPOG1dUVynLR205kUDsUZHlBXZasrq2iEmj6QGac+YChixSijVDK0QEv/+rf+9EfVP/hP/mHL9ddh14qtO8YJAojPaoH/ljhycKtLjATbYt1kiRKqQ4cvmzJipy9fbh0eUSMQdqAeYqU59zmCBWnaNPQmXCZzTJFUYRUHnGK9ISGIgFxFKErzfbaY8TyHF/46E/wt77xq7zyvTPuvXWPr/3NT/N//a9/ysHJHVZWItrO4WOP9qByj44FWSpRnSMZedIMPvqpj3D7rVOEnfOJT2bMTk5YWd9ifTzGWgmuRlgfTv2xYrkQaAvblyT1wrJ/A4bDIflklcaVxLJgZ7ems5qbbx5xNl0wb5ak446DWx27ayNGA49abRnlOfNTSRwlGNuBTomEI88Ux6cL5h9OMLMJ05OStbWI0VqN7hqUFwgtiLxHeAXeYts5qpVk+MAxJAbriYohre5I4oRMxiRxgbUJzoBYnmKdxwiPMT40iHV/AebkDCqOaLWk0ybA361GGUGMYXkcodsIVBwGZClJijE+zmi74F8jDlW0w3SFCxe2WVQnYSPvAmFBmojOO2ymyZIJap6BcVglkV2HrRdsxuusryWkaxd5fWopY1gsSgpV4P2c8ahgMhoxnS0QxOHBqDRCWnCQFUO8aIkJvrTFrObw3h7OFL0cZcmzHN11D14g1vtQE+k6kjRCipiu9UxWVojjiMPDY5yDs7NpbzsQaN1/HB0YkpFyKBzaeJ7+xEco7RzTakxrODytaU0KkWBty2BMwwfv7XF8fMjstOTsbEqeNVx6JCUpJHEUUyQFESnT+T0uXbrEJz/5OYyJeP37r/Lq975DuawRLsF7g9aGtluQpfD669/j3Q9u8dCVq+R5xuJ0wTjb5NZbNbMDQyxSgr4oEQTkkDOWtutwHqw2bK5sMilGLOdLnHE4EyTmNItJYgIfUQl0Z4hURpxIskIzGkUgDNbb8PFteKEFrJIny8aBQSvX+ImXvsra2ipV2XB2uuRof8qtD2/xwXvvgg/NTE1bYpxGKtEPYKGScm17FSc9zid4I2mWJdI5EhWuMF3n+MznvshLX/gyl69c4eT4kNs3bqIihbaWJMrIU0kkdV8zm5AVBVImOG9Joo4rD+1w7uIuajjgYy99jKuPj3j7tXe5c+OQtY2YNI1ZXVnjkatXcKZjdnrK2fEJx4d3qZYzPAahHHEaIWNIU8UwG/Gxj32Chy6f4/r19xiPMoRMOTptQUZMVjKkhPmsopx3wZIhNcoLlAtynu6Zx5FKiCNFlnn6dls8gjjOePjqZYrRiIN7R3St5vBeyc0bMzY3dvmrX/83kErz+qs32N+/x3x+RFGE8A/uzykPzmmcC2pDFMc889yz/NhnPklZLnnyqSfotKEYDFkuKiQKaz3GGUSf+BdKEg9zVtbXmE3PiBU8/fQjRCri9HQZbDGmIY4jdre3WFmZoJTn8Sd3SGJJGq3yc1//a1y7dp39e/cYj0e0VodFXCZ4HX4O04GiGA6R0YCVzYBw020fMrKKri4xzRJbVxzeWZJMBizqmrsfNCRRFCD9tgYtiSJozzzO5Tz67JMsqxPOXxxgvGFZGx5+PEGqOboVXHtrThILhFNYbZEyMF/zZIy30HVNaHxTDiE9wimc9z2EPwTRwgDoQsWnc0iCd1d3LdpYpBeYViPjGOvBe00cpbz33gfcunkXKaJQ6axNn5IHQQjFZWlGlmZUVflA3rZOUGQDnHVYbQNIv1ySpSnWGDrTBl+6pA/qqeCBlipc1V243CdxQjEo0NoE/JSXPYc8lIJIGWF0R5pEjFdWMEYEFUA3bJ9fR+uapm6IVEycBqZ0WdU4H2RvIUAJgRQCHNi+tMA6TafbwBLG9YFfj5LBcwuOOFZE0iNcS9ctiWOHd12QyL2g05rlsqTruv4aqx/47qX0/fGBXq2gLzgJaqbpAqLMecFw0A+VsrcbSYe0CYnS1E2NsYIosRQrkiReZWNzDa0T0kLh8o7V8QWaWYaxS1Kl0NYhpCXKoOs8ohNcuDhk53xMrLdplcNVHdZUJCoiVknPo4VOe2QCo0FBWQWGukpSrAuM2Kpckg8GWN8Fv7KKcA6SOMYrF1rUPPzGr//Gj/6g+mu/9Y2Xhe0behShI71L0JXBeEGSBU6EUTkuAiUtbeuxSrF2EZqp5cWfXONnfn6X3/vf7iC6hOGKpzUxjW5pjjqStCVNPVnqkBaklrjGI40ll2OSgQGnkFYG3FURU9WeTz7xU2w/8ijfe+UH/B//8l/R+CMqb3j1B98m3wmwcE/Eysiy2PcgwzyVtDEP7W6weTlGRVu8+XuHHJ3N2Vov+OyXH+L28SH7377HoEkpHrocmIZRiY88Xii8Ci8bax15skJVevI0RckhHkfrZkR5w+b5miqac3J4RuIFQscochYnHcOJIi4s8bolahWzI0OUDUmyEuGGNE5RzSyDcZC/IrlCvlqiimCkxmVBjsdhnEUKiyLF+RLrErwPPsoI6MwS58MlXKUJQkSobkZhO84NR5TTBVY7oqQHHQsfronO98qeRMieS3efqzmArNAhrZx7tO2I4oQ4yXtMTY0XHaPRCl47CiW5vHue6d0pEYLGVtTCkyUpSIMxoW6zSFPa05JESDrRYFMXANrDFRbWsuo0t+68g59pqsOSKPLo1oADKTKMq0nyhqyA8SRG6Aw3qBgOavIspdMK2hAcS/L+RSA9eIfuXGg5oSM8X0M4SckMJRI8FhV54lgxm00xpsb5ChlB1xmydNBfch2jcczqWgIiQcYaGZUsZ2dMhjH1UtNqQxSltLWjyBW6lSzmJaiONB2Q5AQEzsaA+Znn/Tc1RwcND115il/5tX/EV77yS7x/7Rr//H//X3jjh99Ha8H5hzIQFaYTRLEjzRJ0p0DEXHn0EbbPbTFfnDI7m3J0sODgzgxnwoOpqRucDTWWwRrTsiyXPPncU3z8k59mPpvx8OUn+IW/8UscHx5ycnpAnEGcWJQyiL7KNs1y0kwwXh1w+cpDxHKVs1ODsznGRLS1wwYrbX9lsxhbYw2sb2ZsbW8wGq6yvr5BVmRE2YLhSsf62gbW5Kytj8kKR1U1geGLxDtJp0OYaGfnAk1tmM9q0mSAVBYVhQutjOCttz/g5o3bGNNxcHCP6fwU6zogwOxXJgV5HmOMJ8/X2Nm60tdu1lx5asjqRUXdtLz9/Tf47ivfZbbcR8s5yWDIzqV1Hn3qIlcefo69O3MODvdDW11hULFA6/tBo6i/zmhao5jVLbXJmFUJS32GEQ1luSCOJFkaMR6toJTi9OQMZx3D0YA0i0PPvTGMhgOKQW9d0RolBGkyQMksXLv7gorFoqKuKuJE4rylqkuUgDRTtPqEN9/8Aa9+74fUTclwmLG+vo73lq5ryQvVXysFWZ4Eb6y1zKct9/YW3L61z+nJHOvCQNGZGm0bVCTJskEI6jiHdR7rBZPRhEhIutYEBJPuKKsytAUlGVEPgg9oNBFKHqIWfMo3/+jPODi6Q1HE6NYxjCWCCCcNVhjiNOUnv/YJXvrJJ7l1+yaPPfQIzz93gaefeJTRSGG7JdJCOVuS9g1Jy3nFxuQCbWmplhVpHoIqUhqM7ojycNm0OuWrf/WnOd4refVPb/L8jz3FV37iqyyrGW9+f0GSCqzTtKalGAzZ2llnUc6IZYS1HQgP/WU5ibNAK7HBR3+fFiL7JqZQJSpp6hB8iuPe9yvCezggVcJS3bYdeE9RZGjT9iHbMCJKBdaZXk1qaLtguQqFAuH9PZ/PAppPCpqm6Rf1YN3Q+j7VIYY+EBYCfOEdmKUpzoWEfeDjNujOIKToSSnQNIYkzonjnPFkLYD0vaZc1njX0TWernVYI1BxsDBNp7PQTicFSZripUAKRVu3JGnac9s9bdf0AankAfkiiiK6Tgd+aBRTV6ECWkjJeDjsP/eQ37DOIUWwh3hPr/z1y3rvr77vsQ5DrGNjY5O8KAL2Kk6x1jEcjVhbW6frNFEUk+dFmBFyRRIXtGYKrHD+Ss7ZiWEx9SzOlmxuJYiiYr4Pa+s7qKzi9NaCYjKmGDeYKCZlhPaGx14Y8NSzP86nv/BFFtWU6x++QdPGDCcDrHfo1uBsKKMxTU0iIopRQZ5nWGeIlMJa/eCynWUZm5ubzObzgEtUEUEVk5i+We3lv/ONH/1B9R//0998WWpwKkaiaLXh3/vpX+Tf/w/+Br/3B39AlIDpBCrVCBWhG09RxAivWFtXPPSxjFtHDdOlp2ta6qWnKSOUtCTSUc4SVrcFbTPC0BDLAR6NUo4oVXTdktFQUE0dIjWooUJ7ye55QzWP+eonXuSzn/80R+/fozy4S7ZZ8/pb17h4oSNSFh85aBzdcUIcpTRW45RFRg3lPOfTT3yWf/DfmNItBAAAIABJREFU/ucMNpfcMu8i45g0d4we3eTsWFPNG4rC0JVh6/WyCi8uD77LWSxq4lQj3QYbG5tYk+N8wt7dkmKcMLwgiMqM4w80wiUQV+RpyuzemNE4Jl2pGE0s80OQNgMvyEYta5MC3XVoH5H4jOFQsHJhSetT8BFaN6BckOWkQvVSERE4MoyLUAIoLNIXSAvaLqiaCmdqsrSjXSjO7pzS1I4oSzEoGi1AhBCEcAqkxDoR5LQiR8UaYxzrGxltZwKPLpUgLUkMcSJpmgCn94hegqowdcfxXUdqznF55yHK+ghVeHSrcTYhFRKFZrKSYmtBezRARTpseJHhdHnC6krCpchx8/QE40JDV1Nr0jiiqT3LsmR9I8dohdERQjiWtWOcjHBWYG3M1u5lpu0yYEicIcuLPoTUksYJ6+tb5ElKXdUIHCp2ve9UkqQxcSSYzZZEUYqSWcD6GN+38zis1bRdTRQpqrpFa8e5Kw+xnDc8urHOhdUBZ0vHUmd0ukSiefLZc0Sxoyo7Nre2efHFH+fnvv6zVO0xd+/coVkWpGlBZ0p8pHn73bf4n/7ZP+f9D79DnEnWV89hdMfZ2SnOW7bOZQhpuHzpUT7+8R/n7v4x779/ncVsQde0odJUAEajOx1Yt0KSJDltq4NHSYbgw7LsuHt3j9PTE9q2ZXo658at9/uUsMQZiRIZcZRgXV9liCCKJmTpGovFkqYNL5I0SZmMV4iimKquwHmMc+guZnUj4snnR9y8eYe33/qQs+UexjUMRylZvMLx0ZT58h5N01GVGhm1CCEDjkwFWR+hWC5bynnH+voWa2vrlEtN2xiMbhkUA557/nGktLzxxg9YLM5w1hMniuGwYGW0zWc+/WUGxTlu3bxL08w5Pb2LoCSVioPbniK6yMYkY3Fyj9lRYOmtDlPWxgOqhebN1/a5dfsGy2Yf6yxFsRI4v13bjwQJ3kZkORTDGGyEdI5qtke12A81nQhimTDOJtgODo4OMdb0VaWuryBW6E6DS7h65XGefOoK8+mUIhsQxYo0VWhdB/5pv4xZY+l0S16EsGDTdmjTspjPOTk9CenoNGK5WBInMVmWYY2j05rJ6ohBMWQ+XyL7xqSm7lguSw6ObtF2S+qmpCyXfdVl3Cepg4RcVxUCQZYVSCWJ4gSF4ujwmNPTKZ3WgGM4HCKE6kkTS8qqxBM64oPXM6KqqkARiWA0mdB5QdfWSBPwU1vnJqhMkmUTNrd2uPj4eZ77yItsXVjjvRvvsHX+Iitbm8zKFic1pmypF55qJrFmwWSSsbKaMRit03QO5wXlvMY4T9dZFsc1r732Pcabgsl4wp/+4Q1e/fYHdE0gjURZR1oI2jbG4YiloKk6hJQ0bReuynFE01ZBHpayD1rJB0PRfSi/EHD5ynlW1iYcHp2QJKqXw4MM3FeD9XaPQI3IshByS9MU54MdRAiB7huynPe0Xfj3OOdQkSAvsmAV6FpkJB8g54QM3s5QDW0Cmec+CgDxALGVZwVVWYeWLRX3/tKYwWCEMY68yGi6FusNo0lBnEQ4A7PZKXkW0XaW8XiNzd0BTe3BpUQqoyg8xoRCkbYzxEnGeDzh3O45mqbG4VFCBf9yP7Q757Em4L4Cc1qRZnn475SibhqyfICKQso/KPeBZe368F/bBq99eG7FJHHSL1oO2ZcmeC/Q2qCNIc1SrLUcHh4yGo2CEgF0TUsc5+RF2mcbYrSpaCvDZDzi0hXD+UsRWXyewSTnw3fOGI1XWNvRHO+VNK1DNZ7l4ZyPfWYXbY659sY99u7c4Tt/+Ca5ixhOJMMiRndzimGGccGmYb3DOMnsZEFWJFhtQiEMoLUhSeP++h2420pFD9RA3XXhEi8E3/j1vwSD6m/99m++XBSBhyaVoCgs28OH+I//o2/wu7//++zPD8mSFIlCiY4kmYDXjAcN09uavXc7zl0ynB62dJ1hshqxKF2QggTYzjDZNBwf17huRNN0DIeONI+4+qQgHyWsXO5YPz9g/8OOZ556juNlxfruChubp3z7wwPefK1huAZf/KWv8Yd//BonZwcMhgKtOhot0NKRrVhsB8YIXO6ZLwDd8fWv/zinySoiW+KWp+iqoz626DJm+MiQR57IePiRC1y/vce8LBGxI1ZQz8FWoaYzLxRJVnL7xhRtZ1g0aeZY3y4pRp72rGO6l1BMMjQSJQvwipM9SyHGXH1qxNm04vC2xfSd0IoGKRWLCgYJxCpi0WkmW4K2dEgbI5JwSYzIwwNEdnhRYIgQUUYxHCMpqJwjThVRA+1ySUPLooS2rnASXKpoRfAUgUfKAHMODSGGOErRnSVJJSsrMVIapIrROgbvgtTRb5pd2+GMQUhI0gTvDHXZ0dUR1jpm80Pu3jikXQjGSYGYCKalQymHyhx1UpMNFM08BMKkrzClJp0MqAYRS5fQDApKYVnZyWkXJdXS4kwEaKrS0bUKIQ15tkrtBLlK2drZIctjqrmhNEtWxxcplwtC25cizxRV1VKkA7w1WNMhpQv+X98TKXofVF5EoU5xtEqWTDh//iJtW2Jdi5SBQahkQqQKmqbiuSee4MmrH2Pv6BQdjTg726denvDZz/wY69tDjk6mrK5ss7KyyYcf3iZW60xPDLf33wZn2LioGaw7TmeOwSDi9OQ2Mj7hbFoSqTE/9ulPsbk2ZmW8zuNPPsHpfMrJySkff/ZzlFXF+9feQesmeMC9pakbdGtoa92zBxMiFWFNCFHcB/pLKfBWUM5KpLSUiwW3b3yIbkokPAi5hMCVZ219iFSgNTz88NPUdc3J9A7GLqmbBUJK2q5jZ2eXR648QlVVKBkzmoTrysFeiZCK0XidJFnl+vVD3nvnLgf3DtC2JB8qtrbXkFKwXAZ5S0qLC2XNJHF4WQgZ0s0np0dY16LigNQbDnKMceztH9J2NVIJNjbXeOKph3j+ucf5yle+xksvfYHhMOXm7WvMZuFq7KTEoGnakuVUsrPzMOcf3mDRLNjfq5nNVQiSjATbu5vUleb0eEGW5BitaRpNnHjSvMPoui++cLS1D2E058AFnmueDFAi4exsRqM7fI+cCml5gzEhsBa65WPGkwFZllItNHU9I88jxuNVhDDkA0EUC3TX9Hio+xKzY7Go0Z0NQVhnaOsIfEwceyCmrirOpidUdUeSDUjTAVLFKBn4wuWyRYqEJI0ZDLMQ8HQW5yxJooJEKkMYJYmCumONoWmbMDBZGyRcPEU+YDgsqOsKKQVlucA5x2g8xJiOZVlydlpTzh1NU6GiwI9O4oQ0kfi2xhiHkxbnYHNnhSgd0PoLiPFFvvPuh+ztHfP297/Dq9/8Dj/89ofcfO8mputwOsEaS5HHLJdLsqGgqiyzs5qutUQpPPvMOhurhpPDFt8K7nywx+72Rda3Nvj2H73NbHbAcBjS/m3j8T2SqShSXvzsV9hYXeXDD25CHzqUkSIrYtIk4BjjKDRSOecfXO7iOH4waD366GNsbmxy+9ZNvPXEUdwXLHjiOCLPC0CQ53nvtbchCBkFHmndJ9yB0N7kXEBFQT+QBuZxUCh6hJbzpEVBXbe9FTVcnu+XNmRZ+Dfr/gIuRPjYtmcsx2kKCJzzDLIRcdZR1Q2T1RFx3nBydIxpI4QwDLIdjLXMy2NOD5ZoZ0izDJiH8BwRrXFMVtZZXVmnbjpWJissqxKBJ1KB/RpHCXgRLAw2kCSSJCFJ8rDkeUvTNTjvSNKUPB+EBicZnu3B0+p6nJZHwAMmdPj6gEQRxRFdZ3ovcfh1vwkr2L908ExbGwoEVIL3GnzKcJQwn3q6VjM/1Zy/ULBcGK691XDr3jFpGn5GtYa1UcEjz68wP6rxUnP+mRGXHn6MxXTG2fI2q6sJtWsYraasrm8yHA/xaOqyI5KB5KCcYnN3He9Ad22PsROkaUrXGqI4pus0VlucDfbKJEmQShAnKc57fuMvg0f17//2yy+vbeU0ZUsUW9bGBX445Y0f3GQ632fazvGiwxtPU3lk5LFasrk2IFKSZy5+lZ/7a/823/zmK0hpiVJH5z0iSqi1ZryW4KSl1RGVbmg6S2dgbTfl8GBArMZMm5QnPqaZ3hhw962Yy1dXee/de1y+fI7rd17h1e+9zmPPXuTlf/A7RPIuW1dKxpMJh9MzZqeeRgvOpgKwpFIgbEwUWWwV8dpbr/P+tRu8/u7rvHttya2Dit2NiAsywvgl19+6yxMPP8+XPv9lvvXKd0OSMrUYB14qVJxSNy3jwYAkjVjMKlS8IItWaKuWtfUNHn9mk7JuuHOtJYoahFNs7YzJC8fdW3PKA8Vzn5yw++h5DvcbmqqjbQakqWWwEjHMBIuZpTQtK1ua6Z7BW8iHMIzG1K2mdgYZCywFUhbE6QCNB5YIYem6mkhBqgyDyJBIj4gcWnmcCheXNA17srW9v00JhJJ4bxG+R5vYIN+dnWjaStE2HW0X3EFZmjzAg1gLWTpkflb1D++YNLWMVh3Ehqb06FNBFmesbRmqqUGJCBEJZjfH2MOILHPoVrB6IaV1UJkKcompYywavegYJ+s0lcbLDmskf/uX/zPWdxzz5QneZVTa0g4Eh4d75Kkjlh0j19KUM2rdPTDNN3VLonJOT46RcdfLqg4nQotUFCmGwxGbm1sMh4PQCJUNiRPBsjql65rex+dJ05TBMCNNw9Y9O5oyn804LA+5fueIy5fHDIctRXYRH6doH3Hr5pRlOWVjc8z+3QNee/1bASOWjtE0zBcGayXnLgw52msxTcJkkoEveeONH/C5z30R3Sm++93vce/uCcoPefvdt3nzzTdAeJIkbMtCBDmxaxt2dnZCuKQKlxDrfA8ED+bGwKRsUQqE9IyKUShAIFSrKqGIIol1hq7rwqW/BUSKinJOp4c0dYlSKc8+8wKrqxscH53Q1DVrqyvormVZTUPohwxnM06OK25eP+Do5B6Oht1LknOXYna2zxHLDe7tLTk5WeAdqCiEgwQC50Wos3QWpUL9a9P0vd8uQWCo2wXLco6MAr5LioSrjzzG1sYGpovwdsSbb37A+x9+wMnpAWcnx0iviLwijwbs7GzxmRef4ad/+sv4LuPaO7fZ3V1DRYL9vUNODhqyeIWdrV26xlGVC6pmyXxWUi1adOPwxuMMmE6SxpI4FbTWYhUYHOWyZhAXPHL5MlIK5os5KorR+s8RR0JIlFQ4JyiKlHMXhljXsH97zsnxnMXyOFRmJgWDQYGzjqpsUPc5q04S9U1WcRKzMimCz1ekjEcjnPE9KSMNnOV8GIDizoeLe6d75JBEyhhrYHf3AlIo5ot5kKijsOTeZ+1678J1TwAIjO7o2roHk2ustQHj5DVJqkiScGUsBjlbOyN0Z2hrj/Ma3UGkMvI8oWk0URGa3JxznLt8kUtP7jBrDnHNkvm969z84XusDVJ2dja4eXufbJiilCYVgiItiIYZeE2W5yzKjjQeMcyGxJlg6+FNtj+6SnGxwDiF1jBYiairOXvXD0jiGGRHpBzeOgQNpg2sULzg1ofH7O1dJ4szjNF0ugf9O4sgpNED0aC/jkKPEJQB0G8NH9y4zrV3r/XlKtGDYVNKSZaGWs26bmjqFmsNkYr6C2C4nOVFHoKeadI/A8SDxL2S4Rp535px/xKone+lc0We5QE31cv79wdCgaTIi344M3Sd5rnnnuWzL36WH3z/B6RZFtikdUdZnwUVKlasrRVIkTCfthRFhPEdKo54/Mmn+Vd/8C+YThtee+3PiFSB7nRfQiJIkoK1tXXOTs8YDUeBt+4s6i8wrpu2CkErEd5dQoY6anW/VtUFYoRzYZi3NrB9lVLh/2sfbhPwYGm4X08cx/EDkoR8sEAGD2y4igcPqzOm97KGApQkFdS1Zb487RfOhvXVDbKkwDYTLl1eZ295A+VjtHDsHe6xkq3w9AuPcf2De6yf3+Tq8+dZWXuC89tPcG/2PoYYR8eVTw05uyW5e3NJpBKM6Whb19t+PMOVAmvv5wGCtaHrDIJQhJEXQ5SSWOeDvcIHP3F4l1mkVPzdX/k7P/qD6n/zT/7+y8VKy+76mP17Lds7Cf/FP36cD+7+HhcezXjk3Bpaz/jCFx7jE59U/OBbM2IMB7csH/v8JsnDM/7H//mbpOMqgNxLg3MJiI526WEuWd+I8UbQLB1rKzHVzLK7k7C1LanblHLm+eAHDcJGzE9mvPf6IWvrQ3YeGpBFHTcWZzTuHpE5JbJnzMsUpQWLuzVV5eBQ8lMvfQpkwv7hKeORJ449MnZkUUK27YnFJo8+OuLc2ohOwIfLiry7yoXVbV575R7f/uM9tJ+hsjnOxcynjkgqRGSJI8fsrGV9bYXhaJXp2ZQ4BdtF3LnXcvHSc3zqc49w6fw6ril5/NFLPP308/zwrbdpRcvZ3ZLTA8fVj2/z1KdeYP/2EdJC3YESLSdnC7IhXHkOFlODK8esb7e02tHMPUQOYvBihMcTp2PyIidJIjorSOMBXTVnoCTVHYE5hdUVj5Whf136GG8cWOgawIV0aNOAkh5jHVIEN9Ny4bGGgLzyDm8S7rfXxBl9AtH3fx5RpDF5qkilBmvwMobcUawoqmXN2b2KtMjZvJhTFBfI1Ard8pDZYYdILJVxbDyzjU2gmzryeIxwLQeLYzYmm1w5f5lbN/e5cvUCn//xz/KtP/0uVX2CjCsWy45JVHNuR9HWOcM1S9tGoCTzUocUqw8okDyPEV6QFcEnKWXo4s4GER5o6o6nnnqSy5cfZjFv2No6z3R6yu0716nrkjRNwoaqEkDSdbpnF0rOqpb1rXU2xwWumTFfajbOPcS1D+/gWGc42uDw6ICyXNDVIQE8HEVYnSIZYqsIX0uunD8HFvb3T5E2IvWCL33hixSDMbf3Z1y78R4H+7doFyVd1xJlnvFwBD7CWkGeD/FeIpXCuI6NrXWUjKjKqq8FjFCR6jvjfe/jAisc2hg2d9bZPrdNPo4ZjMZE8ZC6KdGmRghPXQc8TGeW3Dvcw1hNpGKs8RwdHgXPGaCkYH9/j7pe9n9+v5tbo7VhZXWVtfUxKxsWoy0nhx1dk1JVMJ9PkZFGRQ6lPCoKFz5vA+Wh7Wtwg3yagg9hGCWigB3r+mpc5/C+Zbmo2Ns747133uLbf/ZNrl1/nXSg2dnZollayrLCO8izjC/9zNf4t/7Dv8X6xmX+4Pf/gHSQ8kv/7i+il4Yb73/AeOw5Ojjhw2v7IW1rGyBleyfnwmWPQNJUKVkWceXxlI1dwezM0C0ipBMo5Xj8mSf4+r/zC/zCL/48F85t89471+iM6RPTAbcmELRduHQZo/AmR3ce589YXY/DwtAppMoYDCesra5jtKCqK5QMwHwhw5CfJgnnLqyze35ImhTMzureG6jwNoQujNa03YK2aeg6Q6RARaEqtdUdKMNieYa1AS4upUDJCLxCa4vWYWHIsrxv1QnYN2sN2nQ9gaXrh3Dft6BZqqpGSsVwmOKpaKowPEsJw2HMcJQxOzujiO63zimiJGVz4xLPPf8ZXKTYOL9NPHR85PmXuHHtNj98/W3SLEZloPIhn/jMS3S6Zf/OAcYIJuO1cIlWwcbjTcbN759w+80FfikYREOEV0RZQpLHON+hSGkqh9MpschxJgnfb42k7Q57yV0QRzI8XFGMJit0jcYbGXBQzvWqjQslIsb06CpHkirSNADppZBBaRIRUqbUdbBVWOsYDIYP5OlwOYsQSNquJY7i3lNq/39X1YBh+vOBLAxSrj9QBHC+6RFacRwQWkmahRxCX0d6n9sLntFkzPvvv8/G5gYAbVsjFUAMRJjOPbjGp0lE2wWOZ6OXuOYKy+NL3Nx7hRs3rhHLIUY3CBmwUc4Hr3VdVxwf3mOyEqpPmzr8HUWRB0IB4bnQdW1voQgeXSl88Ota86CByxiLNm3w5Pb+VudcQDFKSRSH4JgxwUud3Q/cAuB6Trek65pgjcCj+guvEOFyGakYT4SKLV1raOqS9dU1sgGcHC95681bZIMBnpjp0ZI4sTz71BNgBqTDiN1Lm5weVaRkvP/WKxydNCAKhmqdk9MZqlY4DSeHJ6jQN03XOVY3Nzi8d4hAUFUVSZKGwoz+a+d7r3TbNhhjH8x81hoGg/wBpeHXfvnXfvQH1f/6f/iNl71OuPRIxud/aoff/ReHNIuUL/2Vy3TC8cK5v8ne7Rl/9H++y8Q/z+/897/MS597geefepRvfetN3v3gDqOxpUg99SmMJ5bDfcuwgI++MOH29Yp8MObilfO8984Jo/UILwR371qsF0jXMVkbAznTg5LVzYztqymzquYjzzxOMnJ01yvaaoHOWvbeSZisGa7fnjI/9WyNUnYHki+/9LNcefJTvPr9byKsJFWS9bUhtpIM4vPo2RnFsOTWvXvc+N4pZtrQrbeclRGPvLCFmpzw3vXbCJXitMWUkkh4vAoYkDgN/ePDwRXSvKCuaryYs76W8cNX30eYCVvbu/zJH32f47sl7197j0Z3CLdKlLa4TnF04FG5YXbgofZEwxLbBoj+5sMxJBWne57Yw3BF0bSGtvOMJilOaqxPUWlKkedgWtAtg8GI02aObiyTDmS9RE0UXWqwbaADOCse+IBCStQHFqEI6f/xKCaKPN6BJAEE0kOsFEJ2yEjggKYOP+BpkoTK1a6hrS04x8ZmzmiSMF82KDUkywuiXJKKhLPDDqEN6yuCy+eeZjK4QKWPqeaQCsG9OxVdGSMSmM0aijRlZRzR3C54+smrHJ3eZLHQ3Nuf8sF7Nzg+mhNFoV993nSMswn5asFsqUmTmLNjz7mVgiurObWLyPKcYpjQdZrNzW0QkuPjKVXXoY2kbRyTySqLZcUbb7zF/r0Djk72yEeOj77wceazBbPZlDRLUComjlIef/xJjg5PiSJLGsPp0Yyyjjh3+TJNHeGtpOlOODg5pKk6skwTRx11WYfErrHUdUlVHVKkniJe5cXPfoHbN49DSUKh+PVf/S1e+NiX+Nd//C3eu/mnyLThP/1P/kt+6qtfYr4sOTk7Q4ogX2nd4oVmPBnTNA3OCKql4PDeIc50/cXLkiZJD/m2oeCg985J5Zkvl7RtiQDOzmacnZ2F1HJ/8ZAy7n14LboP8AS5SfcpWkvXhkYjge8TuRbnQ5DOe4GQFt0ZymXLcm6YzzVGC8qyZLE8BSRxNMAagdGSOA4yVYDHC5IoeM3oqx89lp3tLZRSnL+wy6d/7NMUxUp46WnHdLagqueh0CRKeOypq3z6xedp2pr9vUOc0winEbLlYLrPH//Jt/m/f/dfszrwnNvd4LU3P+Ta7Q8hEVgPH//4C/zcz/0M6xsrnDt3Ealijk/ukeURcQxda8jzCaPVIXXjUHIS+J/GkuYZmw9d5uKjL7K58xyzxTGvfOubfbjF4m1Icmd5xs7uNkJAVdYIKVhbG7KyMuLstKJtRM8YDc1w2ji8Cx5V2bdfhaCSpO00urNMxiO61nB0cIJzAkFEnMR4R7iEekskRWhpEw7nW5I0Dl9zlTIajgJTkkAG8N6GpLru+qEppLjzLA9Sow24qoCu8mR9OCbPChAyeNx9CHLGcY42NdaGhSTNYff8kKrqMFgWvsRpRx4ZikwwGGxQ1nOuvfsdvvP/vk63NOzduM67b7/KaBKjncZKRzKCw8OWmIytjQ2eeuoqQiju3r4TAkzasZgvsY3HO0NrDPP5Al23NBqciCmSqE+AO0bjguEgxboKr8MlcmUyYbmsiFREVS/7QUj0y6AgjXOctz0lRDyQrMNJL4xDVodlOkvyB/ivJI3w0rK+scLFiw9R1RVJnDEajanrOlwalSRNQ4tY27Z9MEpzv3AgMESL3pPZcr/VTPTH3SSJe2k9/IaUgTyS97L+n1fkhr9HyGAjOTi4R11X1PX9BSUs8fgg0+MkujNUVYk1giRJcK6jM6f82ff+JQd7ezzxyBOcnZ6S52C0AwXadIwnA+p6gbOaKFIsFnOGwwIpJfP5rPfWB0ZtksRhWO3VA2ODvP3www+F+tjp7EExhu8H09CuF0Ja9wsanA3Sf2gbDNfXtm3BhWyCdY4kSYlVjO9b4tI+YBYu0YLOVBgnES4mVmOM8VRVw2Bc0LkK7xtEnLCzlvLxT69SNge89b33+em//jVe+bPv8+br75ANBEnq2NhZY15OuXPzmAvnLnL1iatsnl9nuSxZTCsiGQVJUwcikHYdSoq+3thRV034XKWgaer+c73P+XFYa4gT9QAn96t/+y/BoPrf/bPffHl1qLj2wZKf/etPYr3i5usHPPkpuHNyhosOyYYFx3sfsL054ud/8d/kysXPc3H3Ii88+1l+9w9/l3ykKEtBoy0ru7D7SMrFyxkSyd6HGpUarl8/RqqIzd2Yk5MW76GqO46OOo72SpqmpnSO49KTjTq8ain1jIsPv8Dbr3Q01ZKrn+yYd4b/j7o3i7UsPc/znn9Y0x7PPFRVV3V1VU/sZnNokhJJ0RRljZENObqhLCuxA0mxEMBwEjlIHCcAYQMJgtxkUoBMgC3FCSzFlozIskRHNBU6lCgO6m6OPbDm6cx7WuM/5eJf57QcBLmNVDcNVJ3q2vuctdf6/vd73+fd3BohRcbDA8OHX8q5+dI1/tHvvs4XX/sDtrMGDvOIvAoD3Nmc0+YJN687Lk3mWOPZuhw/gAeLFc3hMQ/u3KGuZgjl6DpHVYdIAFAB24JAMZrAxr4lGZ0RxBDrSppGUz7sGKeKxw9nLNsRV58fsjysMLWgdR3aGQZjcDKFTuDPliwXD3FCk4s4IBQ7DWtPOar5Gge3LISOIBVkDhckWR4w1iGSBKnWEbrDVDP8osVpTe4LsCVLu8IMDINNRWcCdRewNq58HQIToh9Pq+h/8ibeKKVUSOUxRvQnU48ggFco5fEE0iwhyxXeecajHNlzApXKsb6jajpCmjFem+BLT7P0jCfbpFspisDiERH2LJ4QphqRSfAdwVs2ti7jfKCVZDibAAAgAElEQVSpG4JxzEQE2X/o+lUePZpxeHyMp2FVLhmPJTptyYsRzqfQwsnZAtEtqOcNzli0XtI4Sy0KZsslaZoBgSwfgoj2FaVzFov4kE0zGcHWQTMo1hgMC3yIffVJGh9Aw1EW/Z9NTZomPPvcDR4+uE9wLUjJbD7HtguKNOXpZ57m1t03QURAeVcbvDEokbA23YneWBEZhW0DN198jmyS8cWvfJmkGPLjf/6neeaFD3Hv9A7/3S//F9y6/S2GeaBZenzY5MHhkrdvfYN63pCk8Nxzz7G9d4mmNRwenNCZls7WaO1ItOLatafJ8oSmrlBaxpaTEG/C3loEvac8aELXI89kIEscUhmGoxylwgV/1uMoBik7+7H603aKohjgQ2B/fy+2V3lHUeQEfN8AE1doInIqohdOpFEl6tdxeZ70gZFYhRph5h6lI8LlPFWtZdo37Vim4zF11TCbLZhON+Mg8vgewXuGg5TpdIMPvP97eeH5G6zKBYtVgw8JWk34vo9+nOnakO/evYNxgdFawKvHfPC9H+D7PvpD/J9f+AO++Pufp1pFpmawNd5knJ6suH33Ng8ePWJZzmgby+ETz+mJw3Sa1nQsVkvqBqpFLOgWWmGdoFlWfOdrX+E3f/U3+dIffI1ldYSSkdebplnfQGNI0hhUDMIwmkSP8Wx2RtMtUIlDqQxrFB5woWW+OGNQDNna2iRNNVUVweVCCLxJWc49y8UcJaOqCBLrW7TKEAFSnaOkjlQMH/3ric7QuiDLNqnrDmsNeZZi2hjSiyvSWJlKiL315WoVH4c9VD5NUwSSQFwrO+cp8iFdG9mW+/vbIAJtLbA2sqN3trfZvTTmwd0ZWkwxVYlwCh80Zet48PgBh0dPSLVCCRBJwdb2Lk1XU5kVXhoSlTIuNFo0HD06oWssJ4fHPHlwhGkDxlg2t/eQ0tN2JRQCYxqUV3S0NE0FbYdpaxobbUJ4gQgSKRVpKmIltUgIQcYikkSjE9WnqyM/1DsX65596Ic+zWAQw0ZxaNL9vSDQdW0faJI4B1rneBdblubz2KjUtVEtJbjoOXc++jb7ooELbypxoGrahs6ZC4zVuWc1EBVK731/z4yvp2mbvsAhWlC6rsOYDggUwxjEyfM8Kq09V9kYC8IzHBbRo+3biJPykezgfEeeDWmqjkGRMR4pylXJeDigaxuCiKEu6w2rcokIHiECXRvLK1KdMBoPqes6ekqdp6rriwbB1hjSJKLWzvGD3nvKsiJNitg+pRTD4fBCIY5Vs2lsXyTiqLq2i+HCruvbC6KVRfZ2iUExwjlPkkQxR/d2Cu8tSEtAYVwHvqFpV7EVTXgEllUruXrF8/T1gtlxYCDGVK3na199i2V5l73JNY5XhiA0W6MpplV0/oDtrR1OjwxtveDo4Rn1IgbcOtNQVQ35YIQPXV/zHVX1+JrlH7vnCrx1sazDOZJEo5RktSpJk4S/+Yt/MpqpRPQl/L//eu6HRHAmQQhDtRjwM3/1BnU7Z++ZgllZMcyhaRWFPsNUI8bhxzn5xoSPf9/LrNIpf+c/+xs03KY6TZDB8dwrKRvP5Fg5485nPfVKMZo63vxmBNHuX28IynP6SDHdcMxWYOoMn7UoAXYlWL+syCYaIcdMh0NA8OCbt3nx5TFHZ57lzPL9/9o2X/ylQ0SmkMWYtmmYbi8olyNYSuYHgXq45Op+AarlYz+6zff+0E2Wjee3f+0+9XHDdrHDwdljjk4WtF5Qto6yiYlLJUAr8D6jlR2pkYz3HEWhyY4KRi8Mqc405XFgefsRSieInRHPfeBprj01ZXUasM0T3vjDt6kPwOqMhBHeluhxyvy4ZFQoNjYEal1S7Bfc/4pBtEsW1jEZjpnu1MjUUmu4PLzEg8YyzjXGzAhLSzbYoJ7uIqsFSzenmp8hXGBrf41FOac5Dj2uIsr+QgJBRTXKO5xRWOMoihQpJXXdRF5oiAc2KeJNV6emB+MHNrc04xEIC3XlaHxAeHAGEFDkE7a2plTtEfNZh3UF3cyju4DrAtO1nI0bHXVqKBtNGkZk9S7HZ3OmY0G5bNl4ahOaJe+7uc47d05487tzXn7he6gax907X2GiC6ZbgsVxyemi48pLBd0M0o2StY0hx+84RJLj647jw0Be5HRdRVUaBBlSQZJG1UNSgPBYV7Gzsw0hYbGsWK3KuKZUkOcZUnvqqiPLE7qu5ge+/0e4fesxnY1A+6ef3uW1179O8AFFjpAmqo6dJRtoqkWF0HHN3lax/KFcNNy8+Rz7l7f55rfewJga76GtEpTOGW8YUiUpFy1NY3AuYLpYHZmmsfLPx3ZTkJFZ6HpVJfQeNxVk32BkGE4cxUDRlJqyNDFYFSLGxauAIxAwBOcZZRk3bgyZLzuU0iQqYT5rMJ3HeYUxHmsh0OGdZXfnCoPBGJkITk6PKFclRT4g2EDXVgRalBJk2YAkKbAhkBQJ1jTUZR0rMSN3HGvdhZ8uTRISHUh0bNCxrcS6qOR1zqJFfH9SSYrBhKZusd6wt7+HkLBatIyH61x7ap/OVJwuFzTW0naW8XCCrRcs509oO8PW1oDd/XWmG8/z5MmSN9/4MqNRykvvey+dMbzzzj2a2tI0S4qhJs0gLzTOBZra0rUxRHVeDKGUpixbQlAoiCvv4C/WvE3bYJyN6W6hESqGJ60JsRRESLJEsb7ucGisWWNjbYQIgbq2qDwDGZidnhICDPKCoijQWrJYnMWSDGt6Z6QkSwrW1zYoBkNOT2cIKViultBjokLwFx7mug9FZVmK6AtUQvAXw0G5WiGVulCnYvuS7hPr5x44TyAWAICJ12kQSBVXtMVwwKAY8OTRAVkyIHiL0p5BMWb30hoPHxxjfY3pPONJzovvm3Lw+Iy19YS772gWiyXT9Sm718acHDU0pWA8VizmS8qyRWsDPmE0HNFUJaYLKJXSmZrd/S10knF8PGdRligb1c4gYyhM4QhBYrxnUORY20ayeCiYTMeslscIoek6F61ZQSBDipCGNNFIpVgulySJRorov/RYWttR5GO0lpycnjAZTREh4tikjMNilhY4byjL8mK4jZ8HSde1F0PWeRMYXiCV7YcijZQaH86/3/1h5Y/hrAQysl6F6K018f/XdR1KKareWpPnWRycg4r39kG8v7Rte9G4lSTJxTVRFEMWi8XF2v2cdlCW7w6VxhiKIto72i7WuCY6R+oc0y1QQhB8hvHRchKCpshTUqWpqhJCwPfsb+ccUqdIiEMjYEy0VWR9k56Q8f0651BKXXhxi6JASknT1hff41hEEFVTawxSRC9/HO6SC6RYlmUXiK+2bZFaEUPKqg9sxXkrz3OKYoBWmsFIU/tTsqFgbXONzjoOTkom65v88Kde5bUv3ecrX/kK156ZcP/unPFmRt21PHP9/Xz4Q0/z+c99no3pGt98/R6bW0PKsmRte8rhwYquhLatIjXDS5wxJEmK9wIfIgVAyQSdGrpGkGZxi+a8J8s1y0fNu3Vm/z/++v9WVP+nv/OZvNBY63jqBly9nrK1r6jMknwwYrTdMd3axGcbbO9u8p0vHvLb/+hLfPRj7+X3fufXeOONL7N/aQ0rV3iVcnw74ezBkrYWHD1RCOmQWrG1r8nGgmVpsT7FS48xgVSkVD4QWk8WJwgyBUWhscJQNnMez1ZUTcLmeuCnf/JFvvmFMz74IxDSTW793gmDcUmx7XBCUp7A7FGsyPyJv7jP1lMFs+WCIrnM535jxje+esA7r9cc3wdrV7zyvc/zxtcfsKzjKVMnOdbEwEaSZFSnbXwQ6sDCQlgFjt42+M5T2YSuPgKZMN1JEGlLVQqczzmuSw5lx6sfmVIaw8mDhtEg46xc8SN/7gexTnBydERVO8abE5b1ivndlkEmCFJTFJ6B3sYJj8wkyycN47U1Hr55gm0D47GAdETrPAVDQu3YSgS57hAVrOWepgs4H3q0RyBROQiDUAFFAli0FggRKEsTsTReINGxVk4FhHJYA6BJUk0gpo1VEgghwwkHQaNThdQe62Kl6mCQkqaBzDlSl9CKFj3NyXRJu5RMphtMB0uMrXlwv+L0ZEXIMnauXme8ljFd32YktgjDBhF2KGTBG1/9KvP5irUtwXjP8/CBJ99I2d4dMj/JsZVmkMgI4+4yvNN4lyBFgrUR6yJkRPhkWcLGVoHQC6QSFNmU5bKKIOyiiCGV0BGcpO1KTOuwnUCSMsjX2Vhfp6qWpKnm8OgRn/6pv8QH3/sqy7M5Z6cLitGA+aqksitG0wRpHctZTZYM8cFiXMXWxjYffvWjvPmdb7FYzABDnknSFPJCRq9wJ9GpJCiHlx2jNUVaCFAaeb7CElF960wcGs8Hj3OAe3wQJRErJyS2S3E2R2kIKISUKG2j8mczUp2QFB2zZc1q7lnOGqrKIog+WGOIcH+haW3PWW4aqrLi4OiAyszRicV1FXVbEaQjyL6isQ+22dZjGovpu8mtO/d+cZHCjYOTRQRBlmZopTG9iheTvgpvPW3jQUiMrwky1lWW5QohEsajEU2z4O3bb3OymINKCEjq5YrTg8es5nNWbYMXYDroSrjz3Yc8efKYn/2rP8u/8bN/mbJacHL2gKo+xXQWhMOFgNZpDx2PQ6oUCVpnMblfG8qyjTgjFeHhzhqcs2RZHtUpF/vXlRaxhTJEJUuriADSOkOJhBAKpJqQ5QkQKBeBQAuiYrksWS4WUcU0FtuZ3gKg0Srtk9wRfaWTONzNZnO2tjcpioKDJwdkWXKxxkySlCRNGQ4HbG5u4F2gaVpGo+GF/zGuruNWyhjTq4Dioh1MCHlRtRvXyT1AHoFOZN/9LrHGUldlTIALixQpTWOj7zhRzOcr2q4mBMlwsMv1p5+NrOOwz4vPfQ937r3FZDOjtTnbe2vMl2dUHVhSNi8r1i93WB9wy1g17bynXDUQJF3jOHh8ijMGEWJDHFhscBAEqS6wrmMwyiOuUGqybMAzz1zF+0DTtuhE935oSZooikHOYJgzn88JQD4Y0rYOEeLhMlqvkn5V36BUgjVtvKaIHtAsyxmNhvEQLSU7O9usVqsLIP0fHwDPfZLORguMINpzrPUEby9oEsZ0KC1RWpNnA0BeUBxARkWceCBPU02eZaRpijGmr0AdkCQJ5aqKAToiGqvrOoDebqCoquritTVN07/GGLYzJoZw19fXmUzGUbUXsDZdQxpLFxakKkMIhQklQgsIihGe1lmM6S489VKJC4/uu5Wr0LVt7LuXAm/P74GBpuk4L2wBLhBh56irGGLsA27OkuiEPMvoyV/xPtPzVrMsQylFWZYxAOk9xhrSNLv4bLzrj7Wx3KSqOTubk2cTjNE8uX8GLiXVGW3Z8sU/vM3J2QwfPOVyRrMIZCrn2pV1Rsk2x/MV69NdTk9qnjw8xBqYrg3Js4zZ8hQRXGTDymFsEu1tJRcoQZVQDJJYFBHAB8tgsB6fMQ7+w1/8UxCm+q9/5TOf0SogQ7xxLI8nvPTBfdJCE7TjO38kef1LZ3zh7z/hS7+x4rRdkjQT/t1f+DR7L17ia3/4O/hhRzYI1I/2aGfHXL2puf/YkilB0wWODgV71wI2WE7OAmUTT3ROeBalIwRPIhVGO4S3FDqlTgSFLqiaDmFbholj+3LGj336BX7o5/c5O234rf+lROYatREoncHOJc1hx2gk+Fv/1Yf4C3/lI3z+t2/xjW822PyQtR3D8d3A5a0MoU/48z/9Il974wHzVc1gMMa4eFpLU4V1ka/3iU++yHJpWR43JAPJWAfe8yHBoBgyWfc8fgiruma86cmGOYIMbTNkXTFC8NbbNVdubjFNco4Pj2jrlNPbB5SLmny7RmYZ092OahagcmRKU9aGna1LHD45JRlqVB5QJnDwoKE8ari0l9EZQYWApCDNFSu/JM9HmIEl057yRDBrAgGJ6CtwsyQlH0Q/WzCaNItNQuBj7akEYyIeJaqMgWExRSUGlTisEZhOobQjK0AE2Zv8U6yPepxKARJsJyiShDAMFLsFjdM41dDlCWWbcfqwZWdjwvauim1gpx6RJFgXuHPrLvfvP+Hn/uJf5+mrn+T1r73JO9/5JlVZkU8Uww3BomrxXtK1hkdv1aw6y/paTrVoWNQaTEuWFiwWC4xteghyrFKVKqGsKtJcsr5VMB5NqUqHNR6t47qqbZuIFRIgQiDRFiUCu5tPs7dzmf3LOzw+uM/x4YKyqfi9f/4HrI4WPLh9i1W94IX3voeTk0NsV3HypOW9L73Kp37wozx68hBFYDzJ0DLjycFjympBvYx+zp39Qexrd4KubdnY3IyNL86SZRnBS5yTKJ2wtbnL3u4uxpmLB4j3Hi0F3lmUFD3axaGUp64dwWWkmSDQ0bUW06/pBBKFZ9x7edGS7f11Mp1TZGN2ty8hhY+BsK7FulixmmjJeD0jG6cgNCpkpCQEJyBkJHmOIzAarzFd26Q1js52GN/iRYdQEeqvlML1w1JU586HG3o/bBz40iyu/trOkiSaK1ef4uq1K7gAy4XB2XMuZEBrydUbV9m7cgVrO+azM7JEsz4d0dQlBMt73/8enn/xWcp5RVO2kHqMqlBK8f0f/kEu79zgH/76P2N39yk+9Wc/zL17t1nNIuJlVdbMTiP1whpBXRtMZ7EmfhbSTPcYHoF1DqkUgyKuW5erEh8CUsXmnfFUM5mmdG2N66JK5lyDyjwqFVjfsrW7zv7eNk8eHmC7jnJV0rWB8WBC27TxwRk8WkvyPMX3XtXORJxSTD3HtXFZlszns/4hHNVqKRVN0+JCbBlCqjjkSEHXtbRtHD5c7+k7X6GeJ6u99xFGLgRSy4sDslQybmaUghCQIsEYjxCeEGxPEYjPhCtPbbNa1hgT24ikisOttS0P7i24f3vFwZM579x+izQXBK8pz0q+9bVb1KuKJKkxbUu11FRLgWtS2rKNFaYtKJmiJHS2QesIRxdB4jpwNkWnnizNyLOE6XRCWS0Zjwp8EGRpjpI5x0fHFEVKa+qI3SPFGMuzz97guWdf5s033yIf5EzGa8jE4Ex8D3k2hCAiSlBopFB0Xd1jrTRpmmGMjSgpIEkUdV1TVeXFwTME15ckRBVQSRWH935VL2TksJ77lJXSvRczetKDl1Fh9DEARlDkecp4PAYCi8UiKog6guGFEHRtDFEqHQedNM0QgovhM8uK3pPLhZo4GAz6Suqosp//XpIkVNWKwWDAZDKlqitWTYOTGUKmdCGghER6QYekRSFcFzdVSiOV7DdICoQk2PNwmuuvSQU+9Id098d8tvpfmnvOD2bn2Kn4+TAQ4vtKswxj2ugDTvOL99Y0DXVd07YxyCWk6CkO74ZUzxFfET8W64S9E1gnqecGrKZZttjaEKzDN8c0sxXtrCPRQ7qV5Sd/6qf43k99kLduHXDr3htY1/HWt98hzRS2SwDF8fGCybjAOUmWrOFcwGMQsiHNI4P9vF7aWBvLOqTCNLGtTEiHdfAf/Xt/CgbVX/p7f/szMU/hSdNocr9/Z8HNl8fMZw2T8YjP/uNb/ORfGXPrdcVr/+wJO3tjfubf+QX+3j/8DSr/DWqrmN/OefBazc7mkOk1w/HMkfgMMsvGRoF3ggf3IxtRSHBeIJUkkdAGgTSCfBrY3RtDnTDd2qWpLZNJTp7k5FnNyf0B/8f/9ojT1uIf7/LaP79PPrGEUUC3A8QyRQ80e89KvnvH8Ev/w9eZVYeMNxxXnknYvd6RTS/xoe/5NG9/Z863X7uDQZDlIKVmdtqgJAgde5OtCbz3A7u4QYOddVQngmee3uXVj91g+gw8+2zBYpHg1RBrHFIl1MpT6DVGScbmtWf4wPe+yOJkyYM3j+lWNq48TEnnaj768T0aeUZwgtkDTYLDGokThrYz2EYyXdfkA4+rMmaHmlGi2H+qA+nRkyFaJDHlm+eoIqHBoDrL0a2AnnisCaQppDpB0JHngUE2oi5blIiNID5Y8kFEVsVVsooGdwLOyeiXw5EPIhC/WlkSmZPlHcFFzycShJMIJQhuwFp2g9QJgrIUXjDQKbYcMk43ubx3CecMy9MlwzXNcCIYDdbIkzGLsxMGE0WaFxw+OeC3f+PX+e53v0GiS7Yup2xd0tiuwdmcQSbJpEILj8saqlVHkiacrQLCQb3KqOpln9wERGC5NAgJOrEEn3Jy2DA/C6yWHUKBUjAYDPuHeqzbS3XC1naKd10ccBDkgxQh4ejoLkoH1kZjmuodkmLJqrU8fjhnlLYktmVzZ8qNF1/CSM2Dh7fpVi1rkxuMhlfo7AzPkk9+6vv40R/7Ub7xxl2W80CaCj7ykY/RVEse3H9ItWpQaJpVIHiFNw6tNNeevkYgUFVVXFlKeZFuDd73g4vCe3A2slMHI0+Wg/eO4FT0IwrNeJST5YrNzSk7O5f5gT/z01zZv8Hbb77N6fGCJI3BkHiN5KRJgfCel9/3EtdffIbFbI4vW7zraF2D1LA2Hsa63K5jd2eLwTCPTWsqRYq8h/VHJFLbRoUm0hQEpjN9WpdYLSkgzRSJjoeqpm7JiyHjyQbL1YrBUDMaj6ibBq1j1eudO3e5e+82VVWSCE1b1ZydnmK6GqUDQgyp65SynONdSYomFXF4+8OvvcWXvvYa001N1634+mvf4vH9UxAejycvRqyvr+OM7YeAuFjVOkGrWMkrZaxrPO9V9+eDIzGkEQ8JguFgytr6Os66Hp7v4hDiEtI8J8sKTo9W3L9/D+eqGJbSKVefusLly7tY19E0FUmi2N3dZTpdZ3a27HFlgc5E716iZd+jHpWiweCcBRtZrBGIHliVq/h7SiBF6FWzOCQopftWJdGvUSOmyEUvSFTSfBxQiyIHJN6GixWs95asiPWyvgfNeyPZuzzmA69e57tvP2S5rEFaREjI9ITOLlE6MF1PYxOfaNnc2uD973+Oq/tD1qYZw2yd5WmDAqajgraymMZjbd2HAQUIh3UtUvnoIfWetPBs7oyYbma0rWBtbUC5muM6zQc/8F6cyaK/Oy+ZnZ0xHGscK5aLeK9UOpDnIx49PObu3dtMNx1J6jg5qRBobNextbWBkjpuTry/uNYlcQOQZdkF1D5J4jXTNA1VGT3xQDxsK9EzTc27h4Ag+8BXiGKBEhdqporsuTjcqpRExxajpln1wxoUg4TZ7CzeQ3o2aNf/vE1ncc6jkvgzP1fKx+NJ33IksJ0hz3LSJIVAFDpCFDyi8tr2yqrorwcoyxWr1QpjDdO1NZZHp9jKUHhJknqWbUcuEnzTohPZX5fghbwoOQg+9GEwhdK699nKCxuKELIv0Yih4hDePWCdH7LOr2Nro0/3XA1drWrSVDEej2lNFz23IXqJozoL9DmNLM3JMt0fJnoPqNSRs9oZvBOkeWSSe9eiJCQ6VvB6G0iHhiLd5Id//OMslscs2pKqLvniF/8Fb731DtONIft7+3hvqVfLHsVlSRKJSg3eR+ZrZ1akuSFJY0OnMwHTaQKmt+8YlEoYDFOsNf3BQfLv/9t/CsJU/+3f/U8/I9AI6Ym8MJifevZ2thmOK44WZ9z5Q8udbwrqmWArbTEBfvfzX+b0zluoKwd0peY7v2fJM43Tls39QFU5MAqpYX3TU61ivZd1gTQTEGJHu5aQDkAbT2DA/MySU7Hz1IA83cPWnmxdoKYlZplw5WrF3vMFn/vfv814LccmCq8r2tOOwiUYK3Ci4ehRjU5X5EPwQTHcAF0oOn/Mt771ZVBL5mVLXVXxRl03KCFwVlysw52Drq4JMuPSUxN2djJ2L1/j1Vd+jDo8i2MAZsH9ry/Z2N/DDJeMyXj7y4dcee4GZ6bmye0zxvoKV5+5yp037xGsYqFqvBQc3a1JsynSDWkXC3IVaDrQ6YiyrvEOsJZqkbKYL2ibmr1LGVeelhGO7TXCa9xqwWi6hhKBTGZ0JzW4lnwqWBsOKXo/02RNRAi1iUlu4XOEbi9qcpM0oW08zp0/cCVeeKzrvU9CIETHuBggbGBYjCgKyWIRkMKCl0hS8mKCttucPTykXlTkWYHMV+TDKVJn7F25wtXn3seJn3F6OifRDcPdjsWyYz4TbFzZxHYJTx7f4/pTC3TeIqWiM5KtacGLNyeYzmCrMWerjs2rGRtbQ5bHNaZp0ElKOtCsylNwMaBjjCXPphjbkecJ49GU1WqFdzGpLaRgMs7Z2d3h2pVnWS0tRTHCdh5YIaVjWGywsa3oOolUKUHA6ckp3rWYleFD73uVbLDGooOuWnJlDTIl2Nt/H2ezkm+8+TVsC6kc8dT1yzhRIpTn5s3n+bf+zb/F8UHN7//+F3n/qy9x47krvP7619nZusJPffrTrMolR0fHuNBgnYEQG3weP3nMyekppjVIztPxEq00WmlUr46LSL0mYMnzjNFoENXKxpNq2Ls0ZDKdMF+UrG9s8MrL38MXPv86X/i9z+FcS9vF6kRrW0KQQEIQAucVs7OGJ/cOOXlyQlk2fWNZ4OZzN1jfmPDsjRt4K2iWlr3ty6wWFeVyhTNd9FD2Sd64Ok77wERM8HofE75CvuuD1JqY1hcx2Xvn3m3qqmQyHdAZQ1VWaC1I05wiHZGKNLJXhcHREbxDS4lEcHZ8xNHjhzjb4WUAFVBJwBqLCCWektl8ycOH9yhXM6YbKeubE/JsE+80dV0xHBSMRiPquupVpvhziAii9kLZicpNrzoJgZBRdbQGypXh5GhG1xoGgyImjL3Ge4EzHUpKklRTDBRZLljfnrC9t0ndVFSriqapGQwLWtMxXyyomgaVJDh8v8b0PW3C9MpcPIier5IjxkhSDAt0oplOJ4wmI5qmwtru4utiYw8XvveY6O7T5oj+z+PXBuJh6HzFHHxEN2W5ICs60kzhncIbSQiap67u4EPNw/tnWAs6AaUExSDhxvUXePmVZ3j8+Dj6pO2A8XiK8BOePATnE7QeIrTA+I62sz1eKdp3Ah1Ih9IglF+Gye4AACAASURBVMBYASjSXLJ9aZvLV5/DhxFCNqzKFXmyzc/+3M+wvrbN5z73BbZ31hmOQmwFEobxJKNrE3Z39+hsRd3O0SmkiUYngJd0tYhUkCwleMHZ2Wk/4CiuPf0UVbWIdaU9TuicazocDi9S+mmWXiTvheRCqQMuuJnOxoE7qtgerVRsj0LF7RmQ6B5hlw8QMmBdrGGNP/t3W5rOqQHnP0cEZHksHEjTjDRNLtL+Fx5NIWjb9uKaOG+5cs5RldXFgSUWUSz7z0FcpSupqFc13/8X/lX+0i/+NYK1XHvmOtMb+xweH/HJT3yKg0ePI9mCHt3Vp/V1vzFCRNpAfA324h54nsw/X8NLqS5e9/l/IRJNovJ8ju9yjMYFaZbRtLGYB7go0zj/++efB6XkxcB7MSTLeGAdDAbxcOYM3reEYOnahp2dbdrO4IKi8i2XLr9I2VRU3Zwf+bHvpeuWXLm8SZGMmFcdBIV3DXVVkmYBgsS6mr39NZSK6MDBMCNJNfOZZTqZMp6MWcxXJFpEcUF4tEoQIhaBmK4F4fibv/inAPj/3/+vn/mM1DG84G2CDJr6TPA7v3bI9vqET//cB7nzxgnf+SczsrJhuAPl3HPntftoccbu09tka4ZUKR7e8eQTSTYYsVpZBpmn7lwciBAELG0bHzzGBJI00AiBbeIHYyVach3ABg5WC5q2Y3OiUMqSy22kPcQj+c7bLYkraNqAAxKXop1BMMIXDX4YWHvasHUpQ+QKs7LsTgVXr+bsPJVCKpkvKkIXkEHEtYIAHwIbmxOcAe8CWRoTtZtqk6PTgvU9yfv/zKscHMx4cOeA8doGl64MaQ/mPHp4hGss8qilO+x4+613+PDNZ2n9A37/n/xfHB8vQQlW81NGqUZZMFikN/HfbjucT6hqCcKikhQhDKOBI/iW1SKNCdvMMjtRFFmB1YZycUq9tKzvTxCyRegzDm57sjAmzyzLk4a2DGxtJ2htmZ/ZfkUKPrTIJKp3gthV3tYRUi1FBK4bG9tRBBbnRGwxkR0uWI4fG3b2BqRFQGeOVEUEksob0nTOQAwYpDtsXtql0CNCOGTr2gZ37s64/fUTtPTYpiIJA5JJgvWWQb7NvQcHzB4vcGXLjWsFnU148sCQiAG7mwkpipPThqprqTvPWSVoliO0BBBcvjKKqeRTjVAOqWA62URqxWQyZDRap65C7Dr3Gc7BU1eu8lOf/tepq4Zvv/lVpO5YrZZAXKXGxK3C+po83+Dpa88zn884OjqGEFCkrG1s8/DolNPFgt3tlGv7goNZgDRQLu4wGYyYzzo+9YM/wCsffD9vvfMWx0enKN3xD371V/mtf/pP0WnHpz75r1DOE+49vMXHP/Fn+aEf/nPUXcu3v/0mjWkRWsXEW3+yDuH84RTVgLT3rkb1SuBdF5WXnn9ZLl3f+pQghWNvb5O19U2OTpd0rmE2P+NrX3mD5eouo6kmLwYMxinPv/gMm1u7rMoG503kF9Lh2gpTlTgZyEY5a2trJGnKBz72UV7+yMdZtYHvfvcWOlNs7GzgQvTr5dkgcly965UfekUnBh7O4ffRvndenRitOVkWO8Z9cIwmBdO1Nc7OViwXdbxeJZGj6xqMKftiA0GiUpRIaBpH03hkKtA5BGtpSkvwgiRVxLBBQms6ylXD9q5kOLLcu1UyOyuxrouKUBsb+U6OT/DOkaYa8CAcSRoPe6Kvzry4Iferwji1gtKCJIn+xSJPKDJB1xhcsCgNiSpQSrC5scmVS9dxLiXV60ynOxwenLKYL0FKyqrCGNsPNQHrOmQ8n6CVIkmTqHYRvbAheEzXIYRifX0d6xzOO0ajIUrC7PSE4Bw+RE9q8PHAd/4rttXFgUSr6EF9N2CTRMU+xPuJgFirGQJpAjv7A5SGcmEJXsUCEqE4OSyxVtF0LVprEp0SvGQ2m/Pmd+5QrxyTyTrGzVktO+7dv8fR6RMeHTzg6OwxTdNhOkFTu1irbGIo1AcIIqc1DidM9H1rx+Vr66ytXWUxEyyXS5rVimeuvsB//l9+hu/5yEf45V/+FeaLE4QghinTHCUTbAdSOAIVybAhSSVd6+hahxQJqR6yvb1LWS57dJREZzEshdBcv36Nvd1tHj46iFW2LlwM/E3TXHg8z/mrSqseXC/jml9KpIpYq1hzasmyBKUFxhqk1Cgt8RiMNUBkriotsLbrMVnRx/lunau4QGhZaymKIaPRmOFwRJqm/c827T3I9YUCmSZptCX1A5wQItoS+uCRs5ZyVcZrsFdbvbfRVx0Cxgvu3znmC7/3OR6/9SZl5XF5wtG9eyS+4OjsEcbEryc4grP99RvwwSN1HDKjahrVzlitGj9zvufOCnGOaOrVdeAc6m+tu+C0KhXvq7EcwWPduygwH3wfVBX99072FhLXD8Pvfs6lEgyHA/IswVpHVTU4F0jTHCEVUQ8SjJJNnjz+LgdPDvj+T/0gL730EY5mK3aeznjtW39EvfLIEAOWTdmilaKt40Z6Meto6yi2KK0Q9Cq4DVRl0ydtIc0j7SfLUhAOQcr1Z/ex1vI3/tp/8Cd/UP1v/u7f/ozHkxZgmyHTSWD2cEguBIvjBUmzQ+lm7N0sOLjdQhMIiUJMBzx4XLKznqG3K55/aZ07bzRsbm1TDK5Sm45xnnLjlUC5khw89DSlQqhzU3iC6EMW2SDFWwMOlB6QBIl0gS5pCb7j8e2Ke6/XXMlvcmnrGgffvo/uNDJtIS0ixD5JaVXOwi3JCk+mCnzXkooE7y3TXcnW5ZzFUc78qObxHYVDkCsQKuCCIE1SBiNoy46u8mxv5eQ7nsXjGXv7OUfHCaflCVv7GcKc8Tu/8y+Y1bfYebGge6J4+NU5AYUsHNNszOvffosrTw94dj/j8M3HiOBBOWTQDHROCIZhvsF83rC1lTHaEizmklT6mAbuUtY2c7QQLFcNg2JIPgncu1vTlYLh5oBmUYIMqKRjcXfFaKKYP3RUD1eM8g02xhucPFkxGqV0XUtdCZSKytNgDHmRUteOtoG2DoQQVytKxapV5+jDOApE9CRZJ9BJjsoUq4WjcQ1VKWOtaJKwbDumU4kq13l49BhXC84OG6q6I+iSurZUyycsHz1Gak+qL/H4YTz1l6slwywhEZYPv/wqL954hbdv3cW6CqU8QVZc2rvG2WrB9t42iRuyrDqSzJDlHc4ZxuuW4VCTWE8+gc5ENc2Hmtms5ux0BVi6VrBcLbC2pmkr/uiPvs5bb93i53/+5/nEn/kEX/nK61TVDCFSOmNZVRWCFOMCJ8cLXnr5BR7cO+bylT10Fjg4OeXw8JS2saQ6oWtGPD4t8apCi479vWuUzYrDoyNu3zpB4jg9PmKxWLKxsUbXrbCm4/XX/4hvf+cNvLd899bb/Mr//Pf55je/hVAC7yUCRRCuv1nKiJORMtbiJTo+qJQkEPDeoGRC17mImBKGpukIIWFrc6tX4DxnM0tZxfBTlmte/dD7SdMpJyfLWMdpGo6PT1nMOqwJtG3Xh3biw6cYjRFJxqUrT/HSK6/wkU98H4vOc1LOML5idnbMaDji9HTBwdGMqjb4wAVUPPSpcnrOYUwJR1QVPZ9R9JWcURkmeiGFwPYHzu3tnCxNqOsW5zu8T7BBo5IUqXKa0lKvWkajgve8cp2PfPIlLu1vsjZc4z2vvsDHfuwVNvcnHD8+xTUO7xS7uzu8/PLTZHnHzZs3+MQnfoCqqXj08EkcRBOJEI7BMCFJo4LlXEyXE1QfzPyXVbA/rsoUuWYwUDSdiTzfyYCdvTFCEocr6L8/jrZtePDgEcvFnLbpqMp5//dzijxnkBdkWU6WDbDGo0SC7geD4D2JThgUw8g59THJHIjczz6fT5EX/aBUXaw3z7PihOgnlkL1QSL3rjcR+kE1qrZd1/UqboTKJ6kC6XAuMJms8dGPfpDBMMV0ntUqHi6cDRwfL2KRRG9Xif+uI4gSpTuC6EizQJGts1q1KBUoMs0gK/r6UYkzFVoGlARn6t4fnNDaBpV71rdzBlPFjReuopOM6eAys7OKs9MHaFJuXH+Z69c/xu989nf5zd/6x2gVQ2yDwYCdnS1c7/dbLpe0jSEIKHLJ2saI8XBIXXuqMoL5V8sFAUiLPHqAe37s6dkps7M5bduidXqBhzq/Rs49lee/71ysrU6SWFXdtR2uh/orJSBIkjTvQz3xwKWTwGgSQ3RtEwforlthXUcIEq2z/sAXh+C6jszNLIuBwDRNGQ7HiB77dH4dN03DcDhiPJ7QtR17u/t9OMszHA4v1MXRaHRRPhD9qkmvOEbv5vk1kkiFCAsyH9FPx0cPCIuS9730AV77+pdZ2x5iTIdWmixJCN6RJSmyDzkleT8kS4HkXCnt71H94Pn/JBHE+S1er9Ef3NMriASRqEDHwFcMZDUX/vlEJyRJchGccj3+63x7oHWCMZaqqns11ffXsgQUSiVUVcXW9harasnu3iWyQrGz9xQvvOcmn/3s53jj66+Bj9SkrnU447A2ILzGGk+aKuKrVeDpSRrxfTnvIaRIMSDPxiSZuHgfu/v73HzuGY5OTnnuheusVhV//Rd+8U/+oPo//oP/5DNKgXWBzd2M1Wng4I4jl4LdtQ1++9e/TpF4BlcV+dp1Hr1zgJYWrw1UIygEaZHyxmcddJp0mHBwUGJtznyVsnkp4cYLA+7fbvCuQBUVxijSUfQKhjplXQZuPLPBIFeouqF8YhjIXYb5gMn+nKu7moN7NTde/BA/8Zf/Y1774lvcu32IGrYkhQOR4BYwHkjW9xy538a1HY/bjgEedGBZK27dbnnwwDKfd4ROITAIGZBaI/pkeNd1ZFqSpwmrWYN1iqAFwlZ4NcQtLW+88WUChlfe/3Huv1UxuRS4/vx1Tp+cMj9dUUwyGm8pLDz+bkk3DCyOOm4+PWRjusn9BzOMiqe/VDlaO2Y02GG28DjjoAvIxJGlmrNFR1t5RuMU29RsXR6SZmPadkEnJS6FRngmepOTb52yLC3dSlPolpVR+HZFvYLjw5ZiVOAxID35IA4F1crhnaRpAlLEEBlAnqdY66KCqnxM86o4IAWR0BiHkwbhHXUb1ViJJx9b8ommLTN805C5mnlZkqYG20JpHdtXGibjAcXGNXavrXP38AFN3XJ4uGJ57JEB5quKm9ducvbAc//h26SZorGKbOqYLU/RuWRtNCIvGo4PKxK9YiAkZqU4mXkOjmqU1lgnSNLI5NNqiNYwHKW0raFpa4qBxvtYnzqeJCwWJfduLfnDP/gGs8XjfnVWo2RGlkdPb1MbmtpyNntCZ1dU1ZK6bVhVdQRMy4xVteRgPkeEwGLR8mM/+hOsqsDh8SmPHz/i6PAx9eoEF2qG+TqrRcNkmhEpLo7xeIQUluBbBoVACtczTx1SeKSXBGt7T5rCO0vbxhWe1rK/YSryrMD7QJJKglcURc7N57fIC0ldS1Z1hQ0NZb1kZ2cdKTTra9fY3L7Mg0cPOTs7I0k9OnOMRhOKfIix53iaBqkCJnR0pkY5xf7lK+iNgoP6jKcuXWe9WOONL32VkZTsra3z5MEjzk7OcKajqVdY212s5Jz36ET3LEiBVuqPgarjui8mzz2JTkkzhZChX1umPPPMM1gDJ6cR9N11Fmss48mQS09vc/n6BvtX1xmOBhweLFidep5/z8tsXpry5Mkxki02Nq7Q1HNmZ4dMpgWTtYT79x5QrzJGxdPcvX3EbPWYtU1H13iW8/PkvuK8cQj6WmIZ0EksOjh/yJ0HLdama2R9krrrfD+sxNrU6XiXNBmxXCx75Ty2xRnrWN/YZDwecXwcA1VtHQe9qmzwnr4yM4aptATRK0Cuf1iazlyEiM7HS2OjItO2USWuqgr/f1P3Zr+WpWl61++b1rSHM5+IODFUzlVZmVmVVd3tpt3GchnL2BgPiKZBQjKWumXA2HABksWFRYHEJVzABQIhuMJgyUhICBojQEa2bNOmu3rI7srMzsqIjDnOtMc1fhMX39onsiX+gPa5SkVERpyz99prvd/zPs/v8QElRFphqvRwhaRa7VTZ1LVukGN9pR9rKHdDC3BjcXBuQAiHd4FMV7zzzptUE8V6veHVqwVlUZEXgjt3TyhKw3q9SjWlJJLI0Esys0eRV9QbS98HpOyxdhwE6FBSYQeH1G4c8CHLJaenh5hC872fO+X+WzmrdUtRTfBBsLqCdn3NcvOE4/0Dvv3eO/zDf/QP+dVf+xFDGDi9VfLwi0cpYOs8zRbs4Gma1WtKg3bIqNksHetVw9AHzs7uIPTAar0ZB5tUz1oWU8oqo216mm5A65TK3vFPd4pkVVXArp52DBeO15i1yS6T1rjgXJf80CJPwSabajKVdpzcnuGdZ3Hdkmc5gXQoVLJEoJAqqaQpeJVW3K99yIa8LKnrOkHvjeHq6oosy1itVlhrRxrAlqura7zftW3tVu2p+rRtmpvwknNu9NkK2ralKAqE93RhYJblnLcbvNIclAdMj45pV1vO3j5ksViksg+VWL8xePK8SAfbPEcqhXcOreTNZ2CHi/LeU1XVzQFASnlDqtjB8HcVrQl5lQ77cVS4kRI1rvn9GBbchad2Q7H3KXmffi2F2KbTCX0/MIxV3umM7bFDh5KJF1xvt7x8dU5ZTJnOp3z5+Dd5+uwpZZHx5PF1OoSLdB1riuRtjpogG0CQ5wUiRhCR6BUhmJEJ7tFKIGVH4wZOTu4xmWes1y1XiysikcE53nznAX/xX/qlP/iD6n/1t374wyAChpLtVc8XvwlFUIQuUJ7uUZQB2TmuHw3UjSE7kCzOOyo5o2eLsBnf/JmSV+sLHv+W5/6tBzR6w6LpcX3No388cPEc3nx3n9l8hvUbpPZ0vURmMJkJtp8bNheKo1t7vPq9BoVh6yzvPPiIf/pPfIMvPnvF5mLCl599wv/83/8trB0o95ISUxjQsuVyAf/iv/pnySvFxdWavDwmCyWrlcOHgvqqgwtFKEa8RhkQiWmPyQKZ1gjvkEi8F6jcIjPQAjIRWHeBoV8x0RV/5E/+FLV/hd+uiHbCxdOXXH5xzQ/+uZ/h2dNXmFVNdI5aKCoFq60jSs+f+Wd/wO/++JrLq4Y3Twxvnwy8WAmmkz0ePDijiy3tZkP0giZYhFUUk8R1Q0SE1ixe9eQ6sndPshUWYSNVKfDWo8noznukcAQNBIcpciZFj4qRTGeoORRK0COxHkwusV1kOoPB5gQCegQVW5lUktT5neoPtZIQHVKl/1YUTKTHlZKD0lPOMnKjiE7iV4piJukHjRVQ5gV6uSGXGrk/pchuYYqM1dWG6WTO/nRCoQLtEJGy5PrpU24dL3nv7TNW3SGideRV5PFXBuUU2cEaN9Ts7RXszzVFeYCLgaHzYAvaruf4/jGqLOmHQLAegaTzkdYGhE8G/ywfu6EHi8CDrLFuhXO7h3kcawJTw1cIkTtnJzg3kOWaoe+ZFhMybdBasH98gPUD05mnnMVUGlBXPHz4ivV6wbff/x4nJ3fYP9qj7lZYP+C8o64blosWPxg2mybVHKKJ0dB0lqbrcT55hv2OnYIYeYESRGLkBg/z2R4xpqCEjwPGwNndE6oq4/Kqpu8Fs705eV6QCUXf9dx58AaqyLFdw5effcF6uULItHo0KqNr7KgiJfC+lJFMGRQK21uqyYwsL3jx+AkvvnrFdjOwud7w5hunnNyZcbm6pu0bvI84Z1GakbGZUsoISWZKtCpQOuHOkqczecaliNw6PaYoMpqmRamCLNNoExhsy5c/ecr19WK0BAjyQqFUxnbTslwusV4yPz7g5M4x3bbn4Y+/ZLOpufvBezx45z4XXzzn6cMF9bKha1ccHB/wwUff5sG7c/JZCwIeffWUZ09fgJ/yxhvvcXg0Z7la0dYDeZZhlKTMKw73TnCDu0nK74IXuxRw3/e44JNKHh1GKzKTkDVdM7BYvEyDeJSjt82PGKSS+XxOJLDd1uS5IkbL0LcQPW23xbqGYWjph3RwyrP85uFsnSXPcmD0EtrkyVNSIkQK34kk/iKVpigmhJg8bin0k+HGgWTHltylw/cOj9CFJtCitSQz2YgVShghYoYZ1TSlJINtePjwFdttj3Weph0IXtC2A3XdItU4bPjkn98NOM4lO08kBQaVimN9qMPH5BFOSqOmLCtUXoBynN19k4vLBpMnBu75szV+GFITVd2Ta8PzxxeURlMWgYvLz4lyRb3t6dqINgV1s8CFtMKVUmOdHC1TEkFOnk/JTEGMnuvLa6KIGGUoixKiZxg6yqqkrIpxY+Vw3mKtpe1adhWdVVURYwpJChmQItU2O+vHEFha3Vsb8U5RTXMevF2QTVZcL1YIBEcHd2nqhouLNXmu0WbniU5/hw/ptfLepVYjv7unxBsLQNcMrNdLZtMpEFivV1g73ByEtTYpnIckz0um0/nYutem1rJgyfOSutmMBIc0RDZNqqUuipw+WGKAfFJR6hwVItZbVosrjDF0G5fseDHyrW9/k8EPqWKYSPQR26dSlaqoRi60RypJVhZEITAyERZiACnAOz++zmK0YkSsTfYoYyRaCYhhtEYonB2tBgHyLKeaTGi7LtEujCaGyGw2v2lbk1KPB1I9UjL86NmO45YrYoqSW7dv4x3cu7fPl188Ji8liBSm1CZjPpswNI563RNc2px5nyxFImQQJW0zoDONkIrBDkhlcKP9JAQ7Pr8j63ZA+ZyyqnD9hkWzYXHekZlj/uov/+U/+IPqf/q3/8Mf5lKjguLioUAOPSFmWNFjvMcUisurNaaYsVyv+Nb7Z9QbR9Mm1MHViy3z4wl71W2++uwSsEwOZlgHhICZBvou5/oyZzorMMpTb2r2JhlH+1OGc4dYZXRdj5pZJrNAXgTyecem2XD9FNZXC+pFy9FRThg6ZlUkL2rWF2BmnrLKubrsubrecv5qTWuf8+bbt8izCXa1oaxKJvuW2UkACWEQDE1kUlWgAqur1LDjnaco5vjQJoxPZgjKghbs70+4f7qPaxc8+3Jg7/QWkwfXHJkr2vWEuCnobI2qjokXnsPC0peKIAeE8HRbwWz/PkN2Se+WFKXBlHu8PI/8sT/5x+mblqifsVxvEDFPHDySx7AsSrTO2Wy2TCcVQ2+JmcfnETt45tMKu8mgd1QTiFEjyGm2KYSUF3P0IcR5Qx8Ctg3kWWofMkTqAUIsiXFAFSlJKZxAWYWPIfmerEPrlHbWRqDkaOGQcOU8RhraTjOZGGZHATUXnH/qubzQXL1sCL3geBr4xn3Ho59YfAzcf+s+VXWPg+qExfkSpaec3jsiMrBabIl6y9ndj/n0swavlvzyH/7TPDgfuPuzb7O42rBpPN7lnNwWbBaeixd1wqb0Pd3QEwLsHRzS1h7rLPP5BKyivWoJXWp18l7SNA0Rhzapv3x/75Ck5DREBpROSs1kMkFrycHBnM1mzXK5petaTo/ucHx0gPee1aql67rUYiOSuuAdNN0ls30Y+sh3v/t9TOH44osv2S57PvrgY+7fu8v3vvcRB0d7VPMJH/3091FlldZiwHCTxBU3K3+lX6evd3gWNSowu9pEKRgbWZLC0bYDXTfgQ6rn7LtA2zQcHe+xd1Txznv3qDdLlosVWhmUgulkgjFFGmjUCAuPARElNg54epTWEATNZkPsB+g7ihDZmyuaesmPf/sRr56/wIcFdgjEkKP0WN0oDNooqipHaUGMjhgiQ5fad3bBn5Tg3SF31MjpjKnbXZnUP4/Ax+RxnVRTzu6fog20q4bYg28Md2/d4dsf3QHT8OxiweJyzVQb3n3/Lb71/nvcv/2AetjQDA2xP+DzTxd88XvPePSTlwy94/adWxid0zaWofNjZahj6BJzuJhEBt+z3rQ3UHw/9r2/VnTsDWYojnWv3juk6kF0hGDHYSDB9gWpYa7vB5qmpu/TUBN3KWu4WVVaNzCZTDg9PaVtW7q2Q2vDMFisdXjnyUw2HgBS6AQYLQmMTMnX/eu7r93adNcQlMgSEe9i+ntMie2SWgSavgsIpYkitRj1Q4fJQQnFxcWa66sVV1cr9NiU0/f9jT/zpgZ0tC3slEbnHIjXeCExeme9A+8FWhXMpvvJAykDbT+wbbbM5lNCMBgjWa1ekKk5oZ/i+4x5dcxHH3/A3v4xv/u7nzI7yBn6yMX5NcvLga6JRAacdWR6itZqtNik5p/gIVOpfGNaTblz+xYvnj+jmlTU2xpIFpemadJQNXpEw1iDOp/P00DWdyiRhhxrLW3bYoy6GSB3K3rnPWdnd6nKaVIay4C3EwSOu+80XL+c8e4771E3Sy4u12SZoihSK1jbdsQQx+swWWl2AaKdj/P1FkMQxpKHYRho225M7pfkeVJv8zwnz5JdZBgGDg72E0nCpTa6Ii/RWpMX2Y0NIMbIbDa7uU+F8QC3+z4Sq1Sy2Wyxtme7WTMpK6rJBARcX1+nWuKQwl5KpU2XUrtDE6lBzweKsgQfR1tRuo8kxmhqDJNK4l0q6rhZnft0j8myDCkUchdEjSlQ9ZrBmlqfANq2HTcLqdlqVyjQNA1aq4S+gtHXqomkQTjLcl49X1BNpvR2ICtSZW9mkqWgbbepYMVZskwnVnZIB4m261BaUU0mOGfxweF8n95T78iyIjG228RR7uMW23u8CBiVkxeWy6dX/I1//58APNV/9t/+Rz9sa09vA/szxfrJFB9D6krOYkrbasVq2+M7aOo1Op+y2tYo4cBHpKy4Pu9oNx1ucKxXWybTiv35DBemOF8TCZTGIGzkjbcO6GuBb4+4euzIJ1ve+ynFdTPQE9g7gTxLb2TNFfu3C1RhaH1HdRgxU4WZaUSWsXdQsV7V6BzadoMPLeXUs22uaPpznOvZXgvaTUSWAUFA5SkoJWVgehxZXDgyE8irVCmp80i9Sck4IyNaRbqtZutr/uW/9gZDXPL5b7zk018deFZHjmeCy42iOtzjo8v/DgAAIABJREFUvduKlw9fMQRwnWfpIAsTpPS8rB9iWdEsNCGUXG9bTm/dYv/kFs8ef8Fy8Yrp/oSLZz1HRzl7R4bNOqWUs8wQCEjt6PtINgMKj1Aa0ZfULwXOOspSo43F2hbFEVF4zEGNmvZ0EaoCTo9ndOuBvIDBQ+dKfOzIEPRt6oIupwDpMOJD2GHrEIA2cgxoZAzOMgs5QQyo3GM7z6Q6JO/g6RcW2w1MtabZ9tx503K+mnH65hnH1T627hHTGVJq6u0l6+018/1D+r4mzxxRar74vZd8+/27vH1vj3/wK/8ne87TZBM++fQJxEi97VktBNVEkpc9y8tAcKBVQZEVeOmJcSDTOSov2WzXfOvdDzg6OOPV4mpcUabTtXMJlSSVoOu26QErU4Asy3K01sxmM66vr6m37c2DwxjN6eEDrq8XfP/732M6PeDy6jneCoYeul5jrcY7ODq8w1ePXvHVV18SgaPTu8z2j1Cm5P33v8fh0R1+4zd+ixfPn0EIrJYLrpeLlA4eod+7Yefrq2QYgwExIlQCYIdx5auUgSBZLTd4DwRB11m0MfjYoTUIqSmKOddXax4/fIqzlunccHrrFpkpaJp2bGNJ9bp2sHgX+PCtb/HTH36HGCKb7YaoIy4GDg4PODo4oW8C15dXSNFRbzZ0TcBZwzA4EAPgR9WvSApRn653Oz5o0nCX2JB9P9B2PV3fj/ieflRBNCKKkXsrEmTfpEDUYrGmbhucD0itmOwXFHsZpqw4vnOXe/dPOT3NCb2m0Hf4Y3/8z/DhRx/y7Nnv8ukXv86XDx/z0Qff5Gf/0MdcXWyYlKdU5ZTLiwuePXnJalkTRURqT4iOo9MpD97aA+XobeJPpqammMD+pKYcM9YdhjBgsgT4T5WbFoGkKGbMJtOEsRm5kLvgTEKEWYzWaG3GoEe6Pk1mRqVTMJvNCT4m9elryWep0rCz85fqLBvVOXezukwUBsYUdRh/3xK+NjSmO0QEGQh+YBhapAwUuUZqxWAt3dBDSKG4/f09bt3Zo24ari42bLf1OGQYtFbsoPfpQJQOYPJrKe3dNb7z+aaUuSAiMblHZwKkoOkakJ7JXDGdK6RULK5XnL9a0HY1p6eHnD/pyWTFndMzts01MZbcf+eEj/7QMZ/89ucsr7fIWLF/kHHnwYz1qkEKjfUtPqSDSXACSUGq+03Bsu1mw3a7oaqqGyD+DYpqDEAprUbPo705GFjrkhUszzE60SS+vo7ftVZprZjNZ6xWK1bLFUCq9JRbrB148pNAkU9Yb9ZcXq6QChw9IcLgXNq8jUNYJCBlOmjALrGu2OX+lDLsWLi79977MPqgC7quI4Rw8z62XYMdhnSIEgnu3zRNeh1E8ocaYzg+Pr7BYPkxwLcLazlnRyVTjkHRyGQyGd/7wKuXr7DDyJSWAqHkDb3Au3R4Kcoiva5CjZuCMbw1zt83wcZxyI1hPKQBbrSSeJ+G1909dhfuvBEApMS59OecszfNWzsu684Xm0gKw839OjGVLSKKRChp60SjKByBgabpETKpt/W2H8knciQrDOl+EvzN6wOBqDzKKAbrIESKIl171ll8sERrUNri8QyXLZO9gmhTE2HwHX/jr//wD/6g+t/81//xD/M9iao85b5j73bBq5drfKspJ4IsL7B+S4yeYBVDD8vVBiEjSnoQkfWiZbOoRwO4wPaBrm7Z399jfnRIXgVC7Fmc9/gg+IVf/AU++eQCLy137h8h97dsbEOUGjXLMUIhoqNuExdydTUgVcf+oWa7nKPznryEQIvzEu8ypPbk00AMEikzlIF8FsimAjcMECU2dxzmKUgkZUUXJItrByGjrR2zuUqhE5PM8QCB9ICospSU/s3fWtMucj58/4DBZqwWgnq74Oh2zrMXzzi6+xHvvXvCxU8ec+5k2sZuunT6U1PqC00uC6LYsF5Z/ugPfsDnX3zG1fkTtJKc3X6bjz58n+dPX6F0YLnsEwONHikqlPIElxN1z2x/n6g0NJLuegMh0jSpk/3oyEBUTL6xjz48ocwCcuLIFLhoaSN4kwEe16cUo5CGv/QXf4G2r9l0DdnU0271GJAAQrpZhhioigKtJnz/m9/jwAeW647gIcrA+Zc9l59nhNCiKosJHj0xOJuzXbTMZwUxFrwSFicC59evIFimU8lXX10go+T73/0Gz5+16Fwxl1Om+QnHH7zDrz7/kkWzYv/+Kav6Emct66tI10j29iqqYoZRVQofCIeKhrncQ/SRvekE5yLlZI+u37BaXUH04w1l7G82AufbpHgJiZT6xm/nvWfow9cQXlBVOdYGHj56xP7+AT5ELi7ORwU6x7sUgGjqhp/9uZ/mT/3pf4btZsvDnzwhzya8/a1vMpmXfPrjz/jff+X/4ke/8Vt0/ZbtZknfNYSxtzoZ5/l9rL+vP7x3UO1d2t86l+wARGzf46xDK41zAecCJktNY3bkl+7tHWJ7z5PHT5JqKTXTsmKwgaurS5zvKAuDQNA2if0pZCQ/OqJVhovrS6K1aA+Hszm3Ts949NUzXrx8SZ5VdG2H9w6tS0ChTAp/RdLDpe87pEiBuhA9EFOLkZJjaMenIWtExTjnmUwmfPe732U2mVLXS6SMBB9To0z0iChQAqqsQMsMF3rWqxVd7Tg6uEMh5qnNSXSgIrnRBJHz5Yuv+OrR32Nx/Yr9vXf4xX/hl7h755s8fvyI5fqC58+f44PnG2+ecXbvFsPg2G62CCGZzmYoPeXyfJPUZ6lp6pFHKnf6CzeJeKU0AkPEIZWlKEoKvUfwgqZucdZjzI6/2qe1odYoyaiii3HIGxu9hBiZj2kAqZuEl9op0alNS0FMDz7r/OjRl3hnMSapX0pHqipL37NIg+ZODUsw9TBej2astE0Pz8OTQ/YPD7A+sFrXBC8wKidGmM9n+GDHWmdBUeVEkTYZznn6vmen5O3+na9/pWtejio7N6vVokhBnelsj5Pbhv0TT9c7mg0I4VONcV4igCqv6LYCEQzf/c57XF1c8OzZU/KJRcuK54+3LK42ZCqj7beE4MjyQN878kKnIhQrIWiqskIIKMtiLC1IKmXf9zchOKVMSn4XBbsa1OQrT+q01oa27fDOY0xOUebozOCtvQlUCZHS48meNKSWqxjJsoyTk2P+yB/9kDff39JsJNeXAzF63nhP8+CdnuePLVmRkRqxBkCOhyKB88mX/PWgUeKQhvE9ToPV7n03xtyk+1/jrNyobEaEiAxDT9e1iJ0liXTg6doOqRJd4vLyIlkaxkPI3v7ejUK++2wku9X4vYikRNbbhqoo8WNFr1QapEAJPQ7UIIUkBoEQaSuw4whbl7ZRu+1GFIy+1AEl1Q2OSyk11h+PQa3xz3+9NCAN9K8DcIJkyxCjZ8YYTZ7nyfrg/c02ZVeukbBZyRJmrWN+mPONtw5ZbK7ZbDvatqdpevo2hVUjjP7WdP3HEJlOJglTKCIIzdAP3Lt/OwUWbY+1ASUleV7hveGNt47Y39vjp7/zIb/96084vC1ph5Z33/kmf+WX/uof/EH1v/gf/oMfCgVKCmyMlKeRQh2xeFoznUyIIg2kWjI2nMC0KEe1I6JVhhQS533qMo8OJQQBQVnNEFWAWDCb7WP0PoNbc3m1RGWG++/u0foLVt0W21YIFJt6ixaReb7Hpu0IveDF48DxoeDkNGM2kxQmx/YDCk2UBV5s8S4FgrouUG8FQ6+ZTCR7dyuODgYODwV7Z5LloidmOc3WcvHCsllkTCYKokeKiNYFCeib1mh9l+F8wEiJVgERNF/8Y8uP/u+Ob//U9zk8zmm3jmg0t8wxv/l3PuNi3WNNxna5Yj47wEiDCxU+tmRCYExBiBmnhye8860P+O1f+3scHue8elGzufZcr65YrrZMiinrbcOdW0cMrhlZtxrvB45OC3wsyAtPc9FgIjgXqKYFXT3gbYGnI5tWdO0hw+MrXBuoXwjmZYkoLRMK8sFwmEdUCUwtw3ZCW0ea4ZLpJNBuIj6MPjetEDKQZ0kd9NbxnW+9i2xPWCwCsbHU1z1VMYE4EAaF8IJ/6lslZ7dnGAz7RYdzDZ3VfP7lBf1GcPvsgHrV8uKrdAOb7s/YNCsGJzBS8vLhIyDjD//Mx/yjTz6lrA5YXVmGtkXKhONomi7RCXJJ8IG9vQypJYIJUuUIFWi7mt57nl1fcrW9Yn+/hBjpe4sbLAiBNqmJKq1kScxNrWBMpTvrUFpS5BngWK8b+r4jiIG2b3n81XMuLxfpAR6GdEp3gem0Ym9+wHx6m9/53U/49nduk5fw+CeP+eLzT6i3V+QF6HwgiJYoIkGkh2H2taDFjrNYVRVd192wC3cw65FGMj500s8ipEQbc9Nz7ZxjNq2YzaapblFE6nrLZr0i+j6tVNE0TU+9SRw+JSHGNOyenJwiSb/W1zUXz1/iB0c3WCb7B+yfniBMhsok1rf0tsGYgrYbaNo13g/jDV4RQxpGy0qTZeB98nkpocakqwOxQ+ior/2syVf25PEzzs/PyXJBZhIVQcmcSGI8ZnlOXiiCt8iQozB0bcvlyytevbpAyFRnenI2Zf94wssXjzlfPGRw1wxbw/7kDX70a7/N3/yb/x0vzx/R9WskCudSi9t0z1DuBbrBYZ3n5OQ2k+KYq/MFfV+TQmCj6XNM1subsEeyPjgXKcoSbRRdGzE6R6q09htsqjkOo4dXKzOqnJ5ICqCZTGNMRts29H1qNfLO0bYdRptxKyDGcEkYAx6WIECPw4fzLqF+ZALxK/X6HijEa37k7qF7A0snKdhIKPYqLB6ZqbHSt6UsCrJMcng0wVpP1w20Tc/Qu69du34MrrhRYUyvzS5lvlOjvq7mGm3GwUEihMaFNctly3adKkXzLCd4g7MSERO9xHmfCia6SJ4PeN/w5MkXidcK/N7vPmWoA8dHEy4uVrgYqOZTtqvkgbWDgFAxmVRok7YAQsZUzuJcsuLolLJv2yROpIFU3GCmYky+xq9bGTJTjDzaXV+8JY7oqDTwpIFESMF0OqHrujFB7+mHnqGLrK4tn//OkqIsEELSNi3nLzXOpYG+bXp8D6cnp0yq6c1K2pgkRAS/s9SImyE6rZjlzQD39aEtvW+RySTdR5qm4eT4CO8t9+7dJUZPs92kUKgUlGVF8I71Zp3qWd14vY3sVj2m7Pu+x5jX4bJhGAjeJ1yXSMqiyTJ6Z3HB3wzJMSaMWmbMeOCJBJdsBbsD4e4a3v37zrpREXU3h/7d9aa1Hn3zaVMjeF3tnMpIXNrgjKUKeZ7foPWMyW7wb2kzkOwIO9VdyoQKlEKijebO2QOiCFxfr1FqwnQ6vVFvbdcn648Qybng/M1BxxgNMTKblSgpuL5cIqNB67FeOjgOD2cwBJbtJbad8pf+jX+FH//4Uy7Xa0oj2Zsc8Vd++Z+AQfVv/t3/8YfbZUuQPeW+5Kv/V7J6siY3mn7ryCcWgcJbSYjpjdM6DSpKRkIU+DC2roTko0JElPIIAvnBGds6reaVUQgxcH21oO0u+PzHD7GtZTIxuHZNaEHrPbRuWF0PhJjTdYHDW4HjuxltBzJrQGhk4ZBG0vQ101mB1EVaZQ+CvnN4G7k+98wLwdvvzSmOGyYnAh8FXe05OjRo6VkvYW9f8uFHZ7x8ucJkgHBkeUYUHhUF1TQpq/WgyCaSo5OBV19aSgRvfvM9lqtLrpYrtv1ANt9Q7LV88PGcx5+1tE3ETAJRagqTwjgmz2h7y7QyXC+3dNuXCBOot5rtumGx7SkrxXwyR+p0iOiaAWEsdjAUVaDIFM+erphPDbGbEB20TZfaXKSn3lpiVNSbmqvra+K6wfaG1cojhaea5/TXLZnVbF6CrjTFgeP5TxYcz44oJuk13G4ttichd4zFFJHgUg2isz17h2fMqzPaxUMG25J3A74TzCpQWeBbd0E7yXU85IVtOTopsL0GldFZxdXTCwgtWkQWFw1FbpgeFKh8j7zQvPd+yf239/jid55iX33C3p7H+i2uP2deAaLEh4GT4yPO7u/TDwNN7fmZn3sT2+UIB9U0o8wr3rz9DfYKQ6EiRmX4HvreYcd61SLLMCZLKz47EIJDGzBaJqVdKKoyw8eeLFejl6ikmhik0rStxZg07IaY+r210kipOTu7y9Onj/mNH33CZrPl7O4Zzx6vuX3rNnmRMFlSRiYzlXiTzqDIxiT/69UhJFXVmIxt3ZAqFdXXrACgtSIzBjXinHaqT/L8BbRRuBiwbkCbwO07hyAEXdMhYgqupLVYwGjGdROpxlRr8kzdrN67tkOQHqq3bh0xnRQ8+eopd26f8v633sWYitO7Ei9qNusOIcWIRPLjQ8Lj/UCWyVGBNBAEQz9w5+yU+XzOYP3Ng16IHb5nrCiUelR3w4jaSWn/ELgZEJrW0juHF5Z8kkIlAUdeauZHM6b7mqaruXjVYPuWvcOM5WLLlz9+yLOvHrFavyIrBpQCoiYvNLM9jc4Ee/v7CJFxdblF6ci2XhNcx8npFDs4utqiRv5lJFkzpEwP/RBDuqeOr9/JyRHHx0cUFUznOUYXaJWhdWImxsCoJqVDyM67mGUJMO7s678rBSDNjYfVmGz0tyYvXYLxJ7HBucDQdxgtMEaQFwm15YMj+EjwjOnp9O+loJK6WdUKAdWs4uj0gNlhavLRCu6cHtK1Lc5CZnKauh1ZrDu+qks0ivE1SXir1+zLr6/4d+zZXUo73tTtBrq+JoYEs8/ygm6r6BpFlmlcqLE96bA3IgjzIpKVLavlkjwX6V4fNZmacXx4G2Jk255zcueI99//Hu16zdB7lCrS6+J7tNEJedWngKH3Aes8LgS6vkePg1EI8SapD4K8yMcgjh+vUYk2Cu9CQrERcP0wrppfY6raNiXnd+n15BNOFI/l1YYXz2p0PmLERFKUQ0jNZk3TIKNkNpuTZwUJ1N+lw06IN/YUY7KbQTUNowl393XWalEUv29g3R2Wg/M0dc10NhnVz3UiCiiJ9Y7B9uyYz7twrlTJ77yzeezubVpr6rqm67qbg/mu4coHnzzNOn3uQ0wlJjE4hIwMfQekYU4bTZZpgoO+G9Amhb/sTXUqgLjZ2mitbw4GNwg5XoP9d0pzopRAnhe/T/Xu+548z9Phpe/Z1QqnITX9PyFEhsGN712Fd567997h0aMn+JjQY8R0z05BxHRYMFKNW5hIlmls3zOfzwg+MPSC6MdhnXDzfCDAMHgOj0/ow8CkgF/5lX+A75eoScCIKXk+56/95X/rD/6g+n/86ic/9DHQxRfMM1j/BIZVIK80gwUpB6TOiDGDkGonmz6dxkRM3dJRJNZmcDKl02RgOs3BO+rWU2YZea4TyJ6MvoW9vT0Ojwt0BOs3nB7fpuvXRGVZr5O6eX0RODzUzKsjHn02MK0ylNBgarQKKCXQOmJdxNmBalJA9FRlQDhFXwemq5z8uKc4UxzuzQktdOeR3lqKg5JuHem2lhhXCJFWcMqAwKekoQr4PiJJNWjOCvK5oW806+cLfvT3v+Dg+AF37x9QbxegGy7agaX17B1VrB9vCSIm/0zfU1QFHkfwW7Qsubh4SaElqBnb7ZqzN8+49/YZl88WNF1NWVRcXSzJswqpkqJJ9Gw2HUZPmasJq2XL4dE+xEiRlQidDgtVVTLLSlSAYiKZTxxSBkKvUT7DLjzBR0R5iwfvvkvbLTBG8PKxpW9KTDVQmAznUve3j370M4JWEkHkww++RzMoLj/9lEUIFGVPFUsGnVHqCU9XFZ2Z8MXVC7YS6lDy/HqgFwWtH5C+J9iB+X5B3XYEB0RH2wzICL3t2OiWftMzrfZwpqfd9gQ54fY3Cpq6Zug1NnRsNwPOSga/pm3A9QKV56ho2K+mGBGTX9A4dAnLTZ/q/XbexpD8cc6l6julNFleUDc9wQkG68iLHKkkbnz45FlI1z6B4HLyInJ2N8N2Bq2zETbf0fcpHKGNQCnB+fkr2m5DO9Q0fYOSJX2f/KXCS3CGIjPYsRVo98BOHMOefhjQRo8p2tenfSkYfU3JU4VMQP10800K1q7lpu97YoDBe9brLSJAmSlidPjRt+VH1SLPS7TKaJuO5XJNjHD79h0m1ZSf/yM/z5/983+eD7/7AW+89QYXr654751vUumcX//Rr9L0C/rW0zRDWveHVKghhCDPDXmeHvhd4wg2qRVZZnjw4D5FOUHrhM9Jq7RwA9YWo2c3hoizMfkz86RkEBJn0A89KkJVTtEy4+z2MblRDK3ljTePmc4z2i7QbBsYHNZGHn15zvlXF9h2TcCRPv274ShtlW7dusc3HrzLdtvx6OFXDMOWsgzkhaecRqROB52hTyv4oizSMOLSw2YY0s8ikBBS8v/W7WPeePuU2b7m7OxNFheRxXJFnlonsTYwDCFVM1pLVU64dfsWSiVY/W69mXrZTfLbjddNKksIZCbDjOBy5yxKpnIPMSrUiDEc5nbcTUcMr9fsX2+euoGj24izgbIsKbKKZt2CDRiVUa9bnIfgk82j67tRNU3FCmWZM5/vJ1UWeePF3H1JIchMaiqTQpLlGWWZFKN+SMPAg/v3AEnTbShykCrVtPZdj4x5elahUyI8ekLsmc4qpPKYTOLjQDmRoHLqdsN6e05Z5ty6rbh8dc2LZxfJ78sWIQfqJhCDxrkeIeJNwCbLMiRqrM8MBO9ufn3HBk4BtTSs7JQ751zC542e3J2alixXDudSoUeMkbbp6NqOLMuZTKpx3e6oJtW4hk/ZgcxkhDFgqVUaXMqyGj83vF5DZ9kY1hM3ftTdQHZjTRGCHed4FyhyzrHZjHWezpFlGdPphKZuWVxfkRcKo8DaNv1MiJF9nA5sbrSPTCezUfX8/Vam3WtDBG8T/q2sCmxISmwKDlkIYPtUtHFycojAY0yGs2EMBaZDAMSbEGraLI0BwDwp0MNgMSYjy8q0dRocglSsIMTOu5s2GzsqQ55nxAjbep2eidqM92hBUaQP7c7i0/cDSiVW6w7fdnJyivOeoipAKJy3yeKnoKxy+q4lRhAq5USU3oW6Ai6M5IcQUVLgbApyziYHDL1j6G2inuiIqUouri7Zm9zipz9+k288OObzz66YnwSk2OPf+df/CVBU/5P/8j//4e07b+EWnu2TJUbP2LQtwXlENLhaoGeGIAekt/he4bRPQ0uAKARZPsXaDg04J0AmVaevI+0itSm0dct0PqHtW6Lw4GfIYkE5hbqNXCwuEVoRlUgp5WFgXkG0E9YLwyQraTYwP9wymxum84yhz1I6uM0oKsXyemBoCk5vCWR07E9L6tqSTwLXjwL2ZUaYRZaNJSs0ORIpHUMbqaZAkKgsIKRCSEHflGhjk6VBKaKwqBjR48+/PVdkwbO63vCdj38KW6TT1OFsQuiOePXblqwLCK0Q3qUwRCYxlBjhGYLn6NbAy2eO6eQOtpcc3a+o9gue/+QFe7c0We7wziFUhykEp/ccvg88+MY+iECzWKLyjFevtiNqpibLZgRKijyAzvCiZy3AWIOz0FvP9z7+CBcUuJaz798i5Lc4v7gk5HvUjWfzqkfHA6xtgZSQhCwlKYuk6LRbwby6T/QFv/Y7n+AwhDBnGw290myXG7yOVAcT7tw6Zb3oqRcOv/H0zRKCIghNpg29dUSdU5iKelnTrDaEPlDEimevrjnYz3ny2TV3zh6go+er50vqlaPZCKQ0CN0xeEvXKaqypF5JJDnBK06ngrmo6Tct29azri1XizV1V6fB3QukyBAxWUBitGOCU9J2aTgAyXQ2p+8DwxBp2i5xgIcEY3cu0NaBLB+YTWFxZWmaDiEMs7lg70CRlzmr5ZYPP/iYH/zgBzx+9JR6c02ZG7QsGFzE+hapDYMLbLrVTbL669D4ECLVZIKQCmdH2HRMayjJ66CLGHEocuzCvhnsBIAc1TKJHBmZmdTEEWaOVInaofN0OhcxJbwtOJsOmvfunfHh9z9gfnDMwd4Bf//v/gP+p7/9v9J2jnsP3qKLA0+ePOT6vKZr0qA59CnUo9RrlcbagEBhTA4EjFFMJ1Nevbji+bMXbDdburZNw7dOA85O7RAyhVGIaTDNy4TN8l4ihMJJT+tjUvFcS7tpOTo65eBsDhPD3uEdpBPU1zX1ekvnetra0a9qJBGkwbs4hulMClA4T9O25EWJGwKLxSVaC0Axnc7xQ4aWMw4PT1gu6xSSyTKstTfK2k3fOAKldRr2BsPprTP2D+e8fLFk22zYP1BMq0niVdYtITD2uoNUiumkYjIpGQZL1yVwvlZ6TBmnrnNGxFxelnRDOuTshv7oU+94nhuKwozqb6JfDL1P9owxiLXz4O2Gil19ZJFr8iyjqzfgPRqFBIauxVmL8/24eetQOiCiQYicGCTTyZSIZ7ttbpS23SFLjrVa1qbDmtJpUO26/sbPmLBlnmFocS7inRjLA1KwBDwxeAQGhE9e3z61TN1/c8bQO1bXmtn+Me99dJdstuHZ4w2zyQScYPEyZQOCtzgbaTtPURmm05LZdE6W5bRtw/7+QfL82rS2z/MMrVJ4qKwmzGZT+r6naRuEVDf+Rq01amQG717XLMuSato0MELdd9aepPolNjKRcS0MPqZrw3s75kcM2gSCT9dYluVsNquReyvQKtXmNm2qOE3K7+593Smq6S4ym00pipKu61mvN1hrmUwmzOdzDo8OUWP18eHhETEGvBvQMuLDkNoChca6YQzoKYRM96Pko/ZpDT5uhPo+MZp34SRr06FJaYWPnum0Yhj69H2S/MpaT4GIMQnP1zapedE7kVBhIZDlX7OvhEAInizLR+8wEJMlT0mFVtnN9a5k+mzA68phIVK4tmlaVqstf+7P/RmWyxXL1YI8K8ZAmGcyqcaNw67I4bWNZTKZjAO0oPdbiBk2tLi4HEstBIeHibnbNJZymugJO8tXEoNThW637m/uI23bpuFaihQkkzLNN4UGI/kL//zSjRziAAAgAElEQVSfwinPb/7od5BGE7uBf+/f/ut/IAZVseu0/f/7mnzzID64e496rSnsl2w2W4q+wgaL9Z7eCuaHGSF2iCjp+wSODiFglESKDKki26YniEh7lfEXfnGPf+3f/DartsY4yezwhL/x737Kjz654NbbR+g8Ipmj8wtsXCJEYOjceKoHZSTtxrB/JFi/zNH9QBErqoOW6u4W1RkO3zZMJg1PHgu2i0gUYCqD62f8iR/8PP/b3/l/mJ1d4m2OjAO59ugThes8koIXn2X8/J8qeProil//x577d3OyXOFMkywEW3j8peTsbjLA4yuEGBBZxEvLtICHv25YPwmIGPjuH/1pbn1wn+X1mvPnX1AVgXYZaS9aFq/WZChiBpaACAqdKZxIq5Fm0xFtifcRoQfyIh8VvYEqlwg5oNjD+g1ZnlKeD96puHoVePawQ+qA7aHIZ+SFwPmBw4M7mMzw6vwR33jvHo+fXbBcrrl9/5AoDKI3GNux3G6Zn9xitdwissBy2aAJDCpn/0hjr5bkucNXkXaQqCApS4fMAa/Zy4+5dfQhv/njX+Pw9A55obi4eMV23XF8sI/cB7vcUFWnXNeXbC/WzIsDDk72uTx/QhRw++QWIitYby7wnSV2OU2/Iao5YqgJmeDbueTkzY/4+Oc/5n/5lb/FTz67ZFZVyKyn7zy37x1SX/fYtkDkjmquCLMp6IG5n+N6RX6yT4g1548fY7sNXavJxJRuGHDCp9WpdSjGNGkyvYyfokD0KcRyMMv54NDy5NpwKdKNtqsHJtMcIUpCyFCmI8QVzSYlsuuNo2kGDo/2eeuNb1K3DV89fojQmzQAdzl1uyXLUmVwUh0tWhYoZej7blQ00rV+fHLCMAxsN01SurTEDT1+HITi7iE2VqemKs+YlCmZOrOFVISgKIqCKJIaY1TO3t4+69V18s2aKcHD4AaUTozB6AIn+8coBJfLNc0QCXhUFonRUeQZeV6yWvY0w5ZqopAykBeCGDRdm1b4UmQgJN7bm+9LCMHhwT5VWbJcLtms66R0j0qK1nqcoR3J86mJXiKFRyjPfDajKA3dsKHvHFpNiAG2tcV5eOude9y+fcjDLx+zWTXMj/YQWhKsp1kn/2RVGaoqo8gDPtRslgNlccydu3e4urrk+auvUJnGDTlSGUyR1nRt23N4eMLhwS2GrqNrEtPUuR47eECNVgdHWk+mNajJ0oCeqynvf/wBt9/NqOtrMjFH6woGuHh2yU8+/y18BOfTelLEiBYZJjMIHM51SXlRI7cUkpcOOaK80rVYFInisNnU5LkeV58WYxTaJG92GANSzvmbFHgKbe0e1uP6VoCS+sY/rBREHC54sjE413btzQM+8TvHaOA4uHiX1DUxeiHjSEnYJapNpslyAyPntR8CghwpDEJEjo/3WK4T77QscrwbRvULCAZj0gALjnv3z2hbT5ZrlqtX+Oh5/3vfQRvPl19+yeZK0TeBEGqKvKIcDyNSOqzfUubHlFXyFjdtz/n5c6aTGR988BFfffWYeluTGU0qfEiqV9t3KUDjI8NIUYARN6U0Wmqs7W/sDsYYhmH4Wrgo1ViHmBBHVTVJa3Dv2d+f09SbxIn2EaFHf6+URA8hjJ99kQ4rZVnRdZtElxAKN4S07vYD/x91bxJjWZaf9/3OdKc3xZAROVRlZo3dVc1uNrvZJkW15CYBkYJtSrZEg5IILQzBC21kArJFyTJtNOyNF156YS9lgYJkwQMIypBIT7IMUjRtiTO7uos1ZOUUGREZb7rTGb04NyLbG23dSiCRucjIN913z3/4vt83hhxTWhclWmZqAyJzmbXWtH1uJrTOzv2maZBCsd29oG+zttoUI95lbaVQw6RvLdh32UyVkkAXhrJ8hbS6Xp9fY7AAyqK6+ftyueTy5cWNi558FSIF9HZEpmucVp7Uex+4dXyClJL1en0jv7g2R2W5gcE5l9O1hoFxHKmqirqZQxIM4wgi4txIM6/RSWTKgRGY/UB5csB2P/KnvvFvMD+Fv/d3f4mYCpRJSDmimCOkneQAFcn1+JCIKm/f8JJZdYAwiaoqWO9bqhU4LwFL0wTWV2Ea7nkOl4dsrrbsNvvplUbqqsoN+GRKSymbxMrCIHVuJEMMuGRpqiXWpun1KITMoQAxRrYfh1crjP8ff/0LJ6r/4J/9wjffvG8IQySaGXE/MPo9PQFdFXgLRWEojCREN3Uf+YJA5OguNwgiWZPY7xN/6Wcbvv6TPaIcOTi55M33j/i/fq3ng+9EmlXO+CZOKUihR6UMVU9RUxlPt4kIFZnNBajE2GuEGzCrHj0rUERe7h3CH5J4gx/8wa/y8cdPETphisjHH1yxftHgk+PwKCBKcFbgx8CiNuwGwbd/o2N/NZLSm3gxsn7ZU9dgaoURMDOSi8cFi0awnBeMrkMtAioZRGdQCIojw/qJYuYaNi8fc3zvFj4ETg4O+fTxd2ju7mjPBclmjYxLUJQFWhU4PyK1ZLac59UHLUI6ClPQ7jsKlXUns6ZAKocyltVyRlUuGIeRF89bdmuoipKqlkiRbw7LRcn911/jww++A8GTlOXJ8xafBPNFgXVgXdYezhuFKTWPP7vg9uo2VT3j8vwlB7ePWBwfMvRrJIp+L1CyRFce5yNRCupG4LclX3jvy2w2nquXV+y2WzZXl/Rtj5FgrSMOIFRijDuiryi1Zj4TLBYl5ZFhDHklvnm+xhqdV0LTgaa0Zow9dSgI1HzlG3+C80+f8IcffcbqcM7ssKSer1jMJYpDNjEw6BYXBNY4duOWmAybXctL17IOe/puZNxvMcwJzrPt+syovElNUSQhGL1HCYk2q1yqKo+SBSlqkoR7t+cMNjIk6AdPCgptYLR76lnWwPlBMHTZ0FA1htfv32Y+m7Pebnj02aesDmb80B/5AmdnV7iwZ3WYGHtL8DmJSoualPLkYzarc4qLEFRliXWWfdvf6KrGMeOc1MRdFQKiyIgZrfTEAsxFd1XWKFXc4J+KIkfEhhD54//qN5BScn72fFqrS3yIORUnBaxzlGWWPxRFRVlV7NodqEg9qzBGTgk+PUkMnJ7e4vbtU7SWOCumJKGAVnVmj479DQYmu/9Hdpsd2+3mxnjgYzZKSAWIbPjJmi9FzLojkH7SOepJiyZIURGn1KUYIjFlbmXwgc1mjXOWRT2jKQqs7RlsT9GUzBcNy8WCUhXYfiREyTCObLZXDG6fL07K7DpWkUSYHLgZb3Nxfslus8kYIx+xQ5rSgsRkssghBloZCp3NUVJqvLMEFbj/zl2qWcHZ+UvWm5fYbuDl2Ya+dShZZA1uiJSm4vT0NiF6xmECwyuRo1dTloAkuNEwW+umaMxrHFS4MfWAYBxddgoX1Y0m9VoXB6/kJdcu8ZTSlDef6QOLxRyhBKPzvP7aPXyIdG02d8V0Hd4woX6EwCiNzuR+SBPbc1qvXj+WMQZdliiZMGUFVCilKUuJDxmDZIdAu2+JKRdaRalwoc+peiJNBaPn1skRD964x9Pnz9jt90hZsu86zp6d8+TTM3ZXLSIqCpP1rUrDbCFRemDoe+YHif1uZL3Z0vV7Nlf7aTUOjx49ypNfssPd+yxfcS5P5rwP2cw1sUSvp8YZJP8KfeScuzE9XoeN5EQkR1WXGKMYxyHrgpua/X4LKUua6rqBlLIefdJMAmgDCM/Qj2jVEHwkJTd9vwp0IbNByeYCv1AFTd3Qjz3Oe6RQRNKNflSpXPAKIfJ0PxWTGcwz2p66XGCdx6d+0urnVbibGpKyavJrDHlLI0T+nGOMN6YkH3yWCaQcXxr8NYos3Zj6rHX5OozpZqUfQqQwuZFr23bS1NY3/z5fx0zXfX7smCLKyHwPjZauH5gvZvm+OA5EF4nCEZLFS099OCcMkT/2r/wIj9bn/Nr//hugBpQ01KUiuUgIu8nAmj0+gpCvg5jxdHWRk8eKUrIee4oiUoiBuw81W7unT4myNOzPIoESN45UNbg4TDrVMnO15TU6KyFVNjWWVf5++uhyoELnJhmKpzAS5wND76bvV8Hf+Nmf/56YqP6LI1T/h5//5umtI77z2x9zePsUox19u2MeKkYEKQiG3lGUgoQjhYwS0TrlfFzUtDLwlLph3Fv+/E/9JG88PCSyYXk6oywVv/C3P+bsUnOwOqLUAaUTnQ8YvUW0OflBN4pSesYrTaLOhpC0QhYJ+3JPfQei9DifcKpmu9+yuFvRVPc5u/iMJAYUEMeWYTdwcntOuRgZxYiWoF1B/1xycBu+8qX3OP9WYmd3LA8cRnuKEhZHkiQgjoe0w8hwCacPBS4kFDqvaNTAGCO3TuacfWvHg/sP+fd/7q/zP//Df8Dv/NrvcXr4JicPT9m5c9zG8vJ57hYRaXrvBM0i0PWSq13L2AfuvXbKya0Dtps9y3lJCD2SRN1EjGrwVnK4eIvLFy2LpciRfi4jb5RwpGSpCpNjF0OA6POUqTF0g+DW6YKYPFovWS6XnJ9dsH45osua7b6jbCqOT465utow7na0u4FZXVJVDevLkbKeUdW3ICkQA0Mn2b/Q/Gv/+p+mbwOffvIp7W5LaRQyKmazghQESEv70tFtPV03gpK89vopT56f0ywFJI1ts6DfxJIQFbdOGows6NsRIyXOBfaD5cmTj7GPf5MH925zfPoGYf2Yy03gfBc4X68pjpZUywWHy4rFfAHB4C49905PaeazjKmJkjgEfG/QAfSsJHmPUSajTqQmKYXUCoNAiP7mZlhVEqMFu27k0/M9nZLEJHNBEARKC5bLgqGP+FFS6BltP/Bv/9k/x1tvvsPTJy/QRjOMG5TJuKHPHl9ko5r2aC0zMmZIKFFOE5VIjP4GB3OtC/TOU5qCWZNdwP3QA4myKm4OvRASfqJziATXKWPApBPMN7R92yEQBJ/4+ONPOHv+DOss42gZrceFLNKfzxcURUHf9SQkm/2Ou3fv8v4X3sYlx3bXEn2kqoqpwAloXbBve3bbHd7HjAFL2eFrjL5ZN363kxyy618gGG0+4Isy6xnzCg6qskSkXFylFLJTXRb5MErZGAIqyzrIK2qpwoRTYoo0TFT1jLKpcUNHGC0GSD4XhLrIhYy1I1IpdCEpaklKkhDylG+5akgp0vdd5uzOlxnwPXQIYs5Vx6J0zEQJOUHKpUJJg1JZv0i0pOSJGIIvOH+2ZbfuWC7KbPrcXGXGr8jT65gESpislw4j1uaC0DmPjxEpFFVV411Oo5IyI46UErTtPrMnJ6OM4BXizPs8yUyTm/laVwncTLyvpQRSKqQAqXK60TAMlJXhqz/4ZYwxPHn8FLKAYlqH+un/uYbKvwoYSCkXr9fu6qIoqKqKEANS5Ugh6y3Ot6SYzaJ2zNr5sqp586036Po9R0c1ShSIMKPvW4pSUpZz5vNDrG/ZD2tenK0xhaCqDPPZnLHbo1NJXTYYLRAIvAPnI1IGUnQMY+ZO7zYWKSQuDMSQbvi1RVFM8g5PXVU3kZzWu5v3MUyF6rWEJ8WIiJFhGL+LI5rfI6VeYaG8z0ahTGLIg6MYPTFlCLwxGlMU1FVDXTdst7vJDKkm3mt29v/I179K1STOXjxDMKPQC7RRmNrQdh0gKabGNmtjczGDyAVzxnDpzDPuuyxpMYZ2v0GrRNttSSkya27hvckygJgm82S+7oWUU3CAvLkXxZT1zSEE2rZ9FZCRIkIKDlYHOVXNZQRUnAyFMcTcjCBu2KfX4RdCKA4ODrI3oLOcnt5h1ixo9z3Hx8fs97us7U8R66GZzUhTQ1zVBd6PKC1zAiMQpSAqg3QCNyqMNJxdXXG1fgGuJyWJ1BopS1AJO0biVCuVVUE0ki4CukTFSIlgfrBk7feUM8nyqOD4zpzDo4KLpyPDuqFoEvfefkCpoG03pBQ4PFpRNfnzCi4hVQ3XUfBSYgqJNlmaZwpDjAHBFB07gdAFgVlTUZia0Sb+5l/9m9/7heov/dP/55tUDad379L5itHP2F1eopxlUlPhHTRzQ8SS4iSmV9lANfYWrTIT0UVF2zmCiiwfbng5bHj6aEDXiV/8e4JNGwl+JPYR61vk4jluKJmHkv26xQuDDhVFE8AIki+wzpK0oogReRTwIREQeOUokuSguMNnf9Dhx0uaxQgpw6ZXdwK+cCyYE5SjkCWhiwRvOO8S633H6nYimBYpP8/r929z+fIFRTnno99yfPrbI3Ot6Hclskwc3Z4hRMvlc0NZrZitALEDecQPf+UH+IPf/ITvfPgIO7Q8+/gZJ8e38ipKrHjx2Trn8SYFwqPNDF2VSKmZzSuCD/R7h0glZVER44hWklLWbDeCvoOyFLRtz2azwfsBbzXRS5IPxJQTeoqihKSIgClLoohQ5QLs4vKC7TYRoma72+Spncyd/K4bsUSqqmJ7dYkMAT8mUtC028w2DDjaoUUMkkXd8Oabd3nv3S9yfHTKetPy8UffpqkqCpMNGKqQKFXgu8CBNHztqz/Em18MXDw7p9sHjm4fMtcF3SZxdFLysrXUumGMsCwOUUUiyh1FFUhVSbVMXDze8hM/+g1+5t/5aX71l/8Jp/VzDsqOq8KwenjE7vkVOJjNDfu1RRHZicDLYUu1XLDdt1jfI8ucAtLoBmEgjNmRGgUkn4tmHyIKgRRZC6nRhAgqCCrpmS8PwKtpIjBl1OssoO/aQNfCyekRt27N2W5HPvjWR3z22ZMpEnFktD3XufXBJxCREGTOsJdzjC4pzATfFtwAsrOrPesLs1YvT8EAQoo5sEKbmwMiK9SuIdfpxul6Ld8BJoxOnsxKLbPBRkqcv8arCOazGcWEkIkxMYwDXT8ym89xMfDixQVdN+BsoJkVHB4tEaJmv+/xwVKWNcFfP3eP0RXHt26RUqJtuxtnb1VVzOdznM3OWz3B60P0zBc1Skm6dsjmRJsz3XPsYQb8x+RvDA9ymqSJyQEstWS727NarXjn3bfpe8vFxRXeWooi3zd2XU8/DKxfbvDDiNa5MCQpvM8mxbKoUdLkBiXl6yUlblKfxLXSKiWE0BAz0zjGzHnNRTPE5HIims5g8uQDYzfy4skFru2phQZXI8WK6FKeXE7XQgqZWdm2W/yUua61wRTF5KzPZpAYr2Mb4830/VUiUKYQhJSwLkyAdDW5msVkYMuTt/yz1c06/noN64OnKDRFmU1Lb731Fkorfuu3fpvgM+vVT2ZFo/XkIs9YpMIYtJQ4m8MLYgg3U0Ah8msYBjvxgHPhVldzvIdbJwcc31qyWa/pdo5uv8Vbx3ZtKasZhycH2GiRWtPvc/Kc9Za+y8W0FGUuKmea4AdAo1WOnSRF9vsBN45oUxCjYhg79vuIFBXBW1KSKKExSk9UjGyQywglOa3IFWIyDxljICWUzChHySSD8H76XsYbV/y1xCfrVQ1S5XTAotAgYBhGUoKyKoCUjTQhB4IkEu+/9z5XVy8Z7cBsVnLvwYz12vH6a19gtLDbtcSQMCUgBpgYuQqJlooYAtvtlrqpbp5HWRU47xiHjH7KWKvEev0SH/a56BkiUkWamWF0O5SeNLtTwRpCYD6fT1KTeGPuhDSlcBmqqvr/SACEEDR1czPBT0lMTZS+mVRfa2qlVJOhaWpmq4rdbkff5XCWsqzROj+GkIIQIocHB8yXDcPYEZLl6OAo03OSJgZB8FkGEIOmmTWY0HF4UjDaLfthj3SQtMc6gSnTFBE9cHhySii3PHh/Qb9XpH5gpkp0TJRF1hX73qJCIu490TT0ZeTsakvsNHHIJt+X6w2LhaTrerwHb/NE2JiJ6esdUmaDczHRWq6/K0M/4l1AiFzYD70lpkBZmhtpZ103/Nxf+bnv/UL1v/w7//03Xzze8Ox5x6NvXXH+/DmVHgmMFGhCTHgPzVxTVhCCIiUJBEgCqU3WKYkKaHEDfPHL8Kd+pqepDaqEaqb5xb/b4qLg6FhxfrZhthpBFATZU41LfuhrP8yQwCdPUY9UM4EOJfOjUx492XG4aikPNOOQ87HrUtFIwf7M0K33XF211M0SaRzJeWI0ubNJHckIpErUq4LLi8DeR5LwPH3RIkSkt5YXZ88ZB2h3jnHjoYNxEwjjyPYCyqrktTdKrh5pPvina46WR8xOBCdvHnKveJdf/1/+N95453V2HsY0sjnf0j1KdMMlQnlef/0IpSMuDoSk6boW0ogbI9FaZtWS9ct1xmNJgRIKqUAqRwj5QOm6gZQUJIMdPZAh/YlIUVaMNrJtR3wS2OBwUTBYOU1eDabWjG5NsJFkDYe3a8oqsd/2hDHgerDRMooRIfQ0sRhRIq8p53PJaqXZrVvefvA17h5/GRf2bK52PH32IQjJODrKmckHn9GMg2W+XPF8uOTgzQrvJKvjmp2LqPkRzl2iyghDzfHqCO/3dH3H6AIPvlAhzQy7tgw2UR8kbFHyi7/0OyyH57z5RmCxKHnytGTXRW4dC+49fJPdHg6agg6LCwPeWcaupYgS345kX9vIreUhdmizCGtKOZEI3DDmqY8QxGQgZNzJKCU2Bj73zm3GkNjtOrwfGAdPUSmaasVoB4oyoo1jGCzPnq85e/6SO/ducf/BHa7WV4w2axYLU3N0qyakEVMYKrNg6AYkBT7kXHClX+kLlcqYoeuYPikls7ohppQPKjKbbxzzCjKkhJb6BhadU4z0dznnc+edC5d8s0sTVByyvq0wGkGiKsssLRBqOihyws5uv6PddRkcLyNVqdBScnR4zDBG+mEPBLp2uDESVVWNVIL1evNd05WMQMpRjT2JPH2J6bo4zBq9EDxGKWJIrJZZy5q5oolEyGiwGJHCMJuVSJNX/j4GnI/UTYmPI8+en2Gtp6wrhNST030y1QHExPHxiqOjRc4tt5GqmtFUc+pqQaEFfd+y27ZYO1Ka7LQmQQpZIZrd4BEfB5QWE/syS2JCzAWeVArvLEoyTZsDWiYODgpM4fnkD5/x6NMzNldrxqHPrMiizHo08kSxKAymqLIOVkqUKQg+FzBCCpqmvlkrXxee1wa7nHiVpusqP+9rvel3BwnE+GrFKoTETeD5qqom57hgsVyx37d8+OFHXKdopZTd3q9UhFlLxzWTNWVtY94A+Ju1eL4mr1FEhsV8wWK+YrfrUdLwAz/wJY6OD5EGvvEn/yiffPqMvs2ylP1uw269RUZFqRQJgbOKxarMusgxUyNS2rHbdGhtpohZOyWiebSRPHj4GkcHhzx+8gylNZIKKQ1FUWYGps6xvVkSlyZzmSaRcjhDCvR9iyk0Wme9IDKbvZiah3Iy6VmbDWPXjcQ11N4Yw/KgzoapCaEkxFSYTWl6y8ODHK2ZEvu2x8fAg4cP6WzPtt0z9Nlc9NGHH3J2/hxTCFwcJv2pYN/uqEyJEvKmUK9n1QSrjyReRZwKQc63Z5KWJJjNDhlsh9Ia7wUxwDBuWB6UuF4y+hFjNIeHBxPO6hoPFaeGR99sgbKMaZwQS5ku0PfDlOSVm/CiKKaGRjKfL6jretJSy5ufU0rRdQMhRJbL+Y1RK393wFlHUeYp91vvfp4H9x+w2+6zxCDJmwbD2pHFicJHT1MJTl+fsV8EktXovsKoBUNwzJuaQkdEHBFxia4TB69XDGrPi0cOJR3zmSFJT4gWRTZtlY2hnGuigtFecbC6S992CDWSvMH1DqXE1ChHUlIIkcNaVkcVzTzS7iPeJ5zPFIc0Gcq0Mhn8ryXIOJlQwYW8dK1rjZADP/dX/uPv/UL1P/vP/9Nv3n9QYaJj/9klyxPNspC0u/0NGy54gVSBeqbw7pXeqjAFWktUSgxDIGlFwPD8xZY/+edus22PKdWOorjD3/9vzolsubjY84UvfREvLhi3ELWljCU/+Rdu8Xt/sOb5iz2y7Og7zefffpfhStOuLU2zRZiQ2WshoXRCi5pbh6e07ZbtRrPbKeZNpFKRsK8oTcCXic0mcrhocB7OvlPy4tsDr90+5uS4hBjxqSWMioSnbMgGp/OCokjUWhJGycXTlkrO2L0YSYPmyUc77BbSueDFs5c8uYrcunuf73v7lMsnZ1xdjaSyY1GW2EEgRUW72zN2khCgVA2r6gglNSJ4ykKR0gAMRJ+diaZ0KJ0ILhGCIGCRKjGbNyglWa/zgV5VDf1g8UFQ14ckIUkygS5xtkXpjLiwKXBwcIrwiqqSbLYv6UZHWVUc3J3hnUeSc7MNirIyJGnAesZ9jx0CD948xcmB73znMS+efps/8vWv8/z5Oc8vPmToLYeHx6hCEFLWSM51Yru2bLd7nny247X7t5nVjmcfX3H5/ApVL9lfCIzZ8me+f8/uactWnGLme3RdcXW1xw6O2szxA3Rhjz5oOb4teXKW+NXfHfCjZOg8fVCMwTObK4LrGLymFoZgIckMCa9V1hG/9d77rNsNYejzDdMoiJnpaYxCkp2ioggIkQ/UxmjuaMHnKomynhQiQQhCUmgl6XtPVQuqOne2QhWUdU3TzCAxQdkH+r6bHJuShCMmKOWc48M7jMNA23XUVYWQgX4YqKrmZhU7n89vNFoh5LVOjHmlmMgA96qsbiamMXgi+fOPKfNd8yroOuHJTeCYQJoA18B0SCpi8JASXdsTfOC9996jKkv6frgpoJ3Pzm5jAkWR9U9PHl8ShWO+LBjHwGKx4sHDeznhLOUVVca1QJY3xGna+spU9d2Q+etDTZCna0pqbt06oaqqG6akmlbQUmhCDPhgp+CGnCykVYVSEmtHvEucnNzh9r27bPY7vLWoINmt94RpVS5UxBQSUp6YOZfXiONg6drdhK3JyJkYMg3AjTmGdzabcXh4QFFFFiuB0mRywXSgXqOYClPjrMfagUAkJlDa8N6X3ueNdx/y/OwM73IyYF1nRFgMeYrkbCREgTKaqqyBjC5zzmOdZepQJuNapkeE4PMkNSbkxPi11k1Jaobl6oC6qmnbPXbMTm0hRMYQTVpirfNk73oiZqY1vbWezWY7rcXVq+UAACAASURBVGWvma95vVoYjVaSG/YqWeuqphCCXIQZTk5OuH37lLZtb9ip1xPrZpa3TW2745NPnvLZo+fEpDm9+zZdt+Hi7IKiqvnc+29ycq/g+DTSdlt2XWRxUFPPDaujQ4R02CFNWmlJUpCExTsHQiKVyg2rdTw/e4mUiqpakIczmSmNyo58oaCuZzfTwMzODHhvGccBU+SiaRxHlDHTNjLv8KcdB0zrfK1zJHNVlTfXvzaKg8MlwzAAObgBITBVmRsVnXWPbdfnNX5h2O9brHM0zQwfAsEWmEJweGsOKjJaz6I5zJ+DChNaMj/nmEIm/OjcONZVM6H7cnMhyZQIQcJ7m+V/wmHHjE9zfktZasYxQoqUtUErw3K5ZLvd3rBRi8KwWq3Y77c4F24A/845xsHerPJf8WjjTZN0eHjIarFk1mRpS1VX071jYo6agrKsuEazZclJ3nwopW5kOkYXXF1tuLoc6TvHvdt36boeJQRSBhaLIv8+rHj7/Rn1USI0C156y+FpyUwL7GWklhNrOCX0HMZomR8c4N3A+ZOOw9U93v3qmwx1NmMJD3bnKY8WmDslu9EhUsBtA+snI3HMDFuXHNWigJgRiG6MmZNqrnWogb53KJU3mUppnA0IZJYAuUgza0BOBsuUiMByeYhzFh9GpAr8jX/vX4II1X/yO//jN2+/8xp4w+KW4cHX7tJuFetHZ9BoXB9QKjsui1LgfTZ7ZCODJ0QxoWGy0Sp6y1FT85f+8pLLz17w4POBrl3wC//VS/7Yj77F+19b0pxKpH+LJx9c8MabC3Tl+ez5Of2gWBx4gpOk1BPcnJ/6N3+co9lrPHzn86Rxx1W3o6g1MiqefCC4+tjS71s2m5bFiabroK5qotxQ1gViFBTa0q1H+haOjjpEZXh5aVmejlQzSZSKZqYAT1WtOF6UtE8tBZLBJ4QBEQP788wX9WokJU+6nLEfe04LRXc58hf/3b/Mzg7EUfP48WNUoRA+MriRocurOwEIZSlk5HjuUXUWOksSZSWYzxbYocjGCQHjKIFiQrtASHD37j2s62hmBh/zNDWEyDgGiqIihpGQHFoYDlYHKBVQpeH4bo74FF4wjntMsSSJGhdHTt9aUpqaODh0MmggiHyAfeGdd/nhr32N2YHkrXde58tf+wHKxlBwitQFbdcy2DVNs+Dy6gpSyuY7H4lRkOipy4Y7rx2z7Z6zuYz0m5Y4RLwIFMFAaXnfSB6/sDwPnvnBIUl6ytpQ3GnyFHLnaA4a4gz29YgvEs3xIVf9gI+A9xRR0nU5M9ltNrQ+EKQkypxvP3Yjq0XNuLfs1xu0gMXRId1+T2U0qimQRiFiNmHUZUldl7jkSUPgqCpZzix95XncjfiQE2L60RJ9oh8GLi/6vFbTjiQUw2jpe8/Z83MSOXM6hYBUgrKo8aPC2UA3rCduqs968JQ/06ZpMNqw3bS0+/aGZ1k3NUYr+nFA6bx+lTLrV6/z3aXMk4HvzqAWEzA9m3hUbjZ11k+mEPL6V7zia14fruPo2e9bfvzHf4Lbd+7w7OwZztoM3CZPe4KLOJcndPW8IgSwg+Xk6BaFabhaX+F8nobk4kTePM9r96+col+Vyi50IXOxFELKKKuUnb1919N1PTEGlPYoLSBqpFBoozNeSWiqsiQlMTmM8hTJ+8h+t2N3tSFZx9j3zFcLPv9975GEYLfdc3gwpywl3TDiQ57sOtcT04ibYl1XqyVGa4Z+QCuF0YqmqXHesb5a41xAqxlVsaAsmrz6nybFzlusHwCHlgZlmqyRTpEoJOvdwNnTCzS5EDV6RkYVeaSWhJgoyowy2rcd2hiSgBA9i+UMow0pRlKM2HG8WfkrbSjLKk8JlcToVxOtvuuw43BzrRitbyZgSmf01fUaVqk8VZzN5jTNLBeXUjEO9qbpyPiqcjJJ5YbjGiAfr6N+hbqRmGhtWK0OuLi4zFPIELE2MzPXV3uiNxnuXwsOT2qCt3z4Wx8Q2j0HByU+RQ7vHuGMpTw44Bs/8aM8fOM2b737BnfvH/L0xScMXaZblMUCQUXTQPBDPsxjRMkSpeusJxUmo8dETgKq6mwkFGrk8PgALRWmmKGNoZ41KKPYtbucLEfCBZ+B/jFMHgU3vS5P9DlhTEp1M0m9llnkSOvckPWDpSprVqsDrPeTQY58FgdP21mUyJpXZx2mMEip2Gw2OQEq2Mn9X+bpGgHrbDbWmSrLRFLuUeeHDfW8ZNt201ZJUhQl+33WBue1eZpiYiNNU9HUM0LoUELz+uuvc+fukpPXoJp7Ls8tha7ou4FEXrcXRclyuWK/31NVNcYU2DEjAefzBcfHx6+0uZNu/fp6TCkyDv2NrlcIuFpvp03RK+nItSEtT6kLQN5MhZ2zDGPPnTt3iTHSdTu6doOzfZZnKJ3DOgZHVdZs1pbzZwOqXnDrwQHYLU7kZLLhxTl1XRLLgUFBeVCxek2yb1sK3XNrcQ8rE+vNM4bdluQF+IwGlFoRGLH9yMuXLTYohHJEOXB4LLjzWolRWeqh1RTHXRicH9jvOwgS2yuWhw1tu2cYLMFF/MTvlYr8vmomJqHEqMy0D35AItGy5K//7H/0vV+o/v1f/p+++fzsnLbzqLt32e4Em6eO4cWnFDpix6yz8cGxWJQoDdZm12oIOU9cqzzuT0JQlgW2c/zZn1ny5R88oKhKnj2t+Nv/9Uv2V8f86Z/+KX7j93+d3/xnH/DGnRm+t8jlyNU25xR/9fu/Qq0WWPuCdtwzjA13bj9EzW/z6e9/ghVrjLYMjxRF7wkpP7eTNzQP3l3x+LPAvbsNMe5IoeK9H5zzfD3iQsHVdkRIOHxQcXgHvBiQe89urajKvM5oFiuEKzh/vCUJjxUS8Bm6bRpcDIjgKKQkKpGddMnj7I7f+/1PCbHmw0e/gykifu9JQRBRJGX50vffI9oliwXcvW35oa+csB0tm8tE9AUkz2Yz5BSw4DAqv6dMq2BnJUbXFGXEuoCzkSgCEk1KMPZZ4F/X+UJMDrphpKpK+hEWt2pSjFw83aLkyGxZ4qNDuMh6fUW3DujS0KceYyBKEMHx9R/+MVbHd7h9521+9zc/o6nu8+mnG7717d9js80axPPzpyAkbrQQJEQLPrFaLQjjiJiVOKPoLz3KK7Ra0VQLdHCIWuLahJ8teFYafGwpqyVXVwPt2RVXu8DJwwXz2pG0Z7NrqWLFfHXAvdcesr7aYq8sy/mSYRzwm4iIElFpylpQykAhTNbRlXvaviWEHuUSvbNc7TbI4KkLTX28ImpJo/NUI2ExSlHUFcNoOetHLp3kbB9JvkFkqNp06GcdWjMrefg5OLzVcPlCcf/h/akDdmiddUZZUpZw1oPMiWKzuaasHWWpCL4EEdFSYwdH3/U8ePiA09NTjDF0XctobebbCoFQKk9qUpoi/8SkWeOmsBUTTBwm1+tUhJaVoaqyflCIjJCJMWUNZcrO8Jjg8OiY/+Dn/hpvvfM2v/Irv8Lz588oKjMlJkFwAjdmMb8uEsMIQ+dZLZYMg+PsxTkhWhB5enq9hr7mSWZs1qt896yPS8SUV4xZ6phNPEpInLM4lykAs7lhNpsxDilr+PB4p3DjBPY2JUJEggMps3takKdEKQQKlTBSoEVOZxrGnpASVZNlNs5mqYMyFmUczkFVrtBGYcceNbnXlc7IJsgnfwyJoR+p64qmKRmGHUKmyfyQ+ZZlVU96WksKDp0EVy83BNsyq3LEarMQjENPMyt4+MZdhNB0Q0bojKNFTLrIw+ND3v3cWxweLPHBYe2Is45Z02RndkrT42WahNJ5Sn+tsSyMmVTN3BzyZZVXrXG6rq6Lh2KaFnrv2W7XuZCAGy21vGHliknbnFFX17npwXmcDTdg9jzt6nn27BlKZv23FBIpwWjN6mCR19bO5vVyn+HoehZIwuKcxAbYdZe4MPLxty75+A+27LodH370IR89+ghTu9zsaI8bHWO/Jbr8WpzNU11jakCDzNr/1+/f5a2375JiRT9000Q+QjJoVXFyespiscwc1akQtXbEFIqiyHrca/1pLpoEzrncWIpXUarXDcD1368n1ylIVssDnPX0wzilK43oIp99IhY5gSn6m+bBW5919jKbPY9Olvl5OEXfDYTY5/875vsMAqQWRGG5deeI5eEcOzpImZIRQkCSU5FIoJWaGsBEJZek6PHR4oPl8WeXaH3Iu597j81+z9X5NmuVnaeZzbHWYa1jtTpgvd5QmJKHDx/inGMYhhtjWQjhuwgIr66bwhiGoaft9jnowmW5UlmWN6bM6yIVIlIUmCIXspAHBCkluq4lxoBRkpSyGbBpGs7Pzqf7euLqao1PllqvuHh8hSNRqYTre0TvaI5qLlrP/S8FohBs9x11uSQBd9+4z35I9H6N944vfemHKIoFMkUEkSASFIK7917PxmadSMLhvEDrOUoYup2nLGf0/ZADZLzFucCsydSAuimYzUvaruPW8TFVVdP3/WSmE9SzghRNpg7gsrRrdATH1Jgl/sO/+i+B6/+/+G//1jdv3z7i0bfX7F7uefnxGf3uChU9Uo54kbCdRgjDfKHzRCKWuTPWiVI2jDEiYkIlR1QlLztPsxj46tcbkh549FHBr/zDns0w8vjRlq995YvMDvcc3Hud5lBz8emWqE4Yuh47dkTRUMkeq3esLzZ88p0P+D9+6R+zubjkVqOwW03bKkTyJEZmS40Qh5SmpCqXJJFYHBxy/uIly1PD8UPBbtPhbU3XeYRzWF2x6xNqmyiKBqd6KhEZ9iNPPt0jWwCBThFBjtWE7KSvo0b0kXJR8yd+7EdJmz/k+KjmO+eep48+IIge35FF6liCgoOjGcLBZ4/W3HnziLfee5P/+9cfcXZ2HcmW4yTtOBLSmG/eusyHQRGmLl7lhJtE7v4wpBBv4vCkEiDypKcoKpz3oBJSGYLzvHy2od+OlE2BTTBfHTGODuscyeWDoe96RJTEJLh7+x4jgUefPSIZyf/5j38dUzu+/cnvsj1/xP1bFadlZHlwjz0dyQaOD2qCUkQrqYzA+YE+KqqDhnHXcu+1+xzfOcXvB56vX1I1c0IINMua87anXkTadcnmRYewgXtfeotxa+nPHEnW9HtDe96zOqzZe0e3sQw7C2Q6hQsaYTSjG8GD8J7tGJHBUCro+oi3Jk/wJFgfWZQVtVSIlBi7nnbTMvQtWqbMqI2eQmlizOxK7wNaGRA+N2sqYEqdE3ic4fS1I+6enPC5d97i008v+OzRYzZXW5yNtPue4B2JhC4yDUIrgZSeO/cddhSsL6/XrYYoA8jMuzs9vctqdUo/eq62V0QCKfjMFJQTS2+KZSyKAmKe3mmZD8DZbE5VZ7yNFBKRIs7nww2YDA7cOPEFMjvMiSAVo/X87u98wD/6R/8rTx4/QUifV+w+H+jRB+7eOWU2m7Hd9BAjZS0p6po43Siz8XvKE1cSaa7jVCeuX5oMX4IcG5pkXndyXbjmdamP2UCShKeoMqA+xYkYoPXUSLuJVRoxVUSZXPiGEIgh8z+VhJQcPiZ0YajmLuvCfQIchVEELxgHi5CRkCI+iHxYS3AOVocrDg4WbLdrYgw0zZzCFNgh46K0yWio9XqHDwlpDEJqnIsEFxApYAxoLRBJEaIkkpitGg4OlwRnqZtDivmM/TBwvt6y3W2pCkX0FpJAJw1R0tueJ8+f8ejRE/rtQBgj1uUJbEZJiYwbCinLQvzIOAx46/I1cX0wTBMsrSRlKafJd7yJ3BQyfxZaq8mFnn8sT74D11Gc19ri68lhLsAKRJqmdUbfTMquI1GzOUaiC4kNAzHlg9uOkavdFSkmyqKgKMGOlqFPDIPAmJoYEn0/knxktayQuse1V9QGzh4P9PuC1UKyqDOrdYxZ6uVDgiR4+OYpIXV03Y4MOwisFof0LTx48DpPnn5K262paklZNdhR03eJqoazp4+5vDjH+QHBSJKBFHRuZKUhUZKwpAnWX1WrHH4z4ZK+u6G8/jPERAqKrt/R9bupUVCUZZU9DBJiiBBLjEnoKXEOmYsyoQJGz3E2M0HHoSfGrB+OyVM3FSIp+n6PIkI07DY5fS16x9BJZJUIftKtezAlCB2nTUvEj1uMPsL6HjtCkCMpFcwXBV/4/PdRmcR2t0Uww/mOFB0HqxOsdeiy4MG9t/j4k4+JKU/ht5ttph+IRIoZj1UUBSnGjFgcLfWswTlB08z4C3/xp1mtDN/+1iPKqsTomqo6JMo9UkPwjhCuwzam2FqdY6DzlN8SYh40BLnljc+f5DNfNihlIWpE3NM0NY0+oiwXHLx2wureEXXZU9aKNCzw0nJ0tyCNPgcJJM3zRxfMFwUnDxqsGxm7gXZ0bKJDLoBYQqpYb68ylUcaKlOx23S0O0e7D7RtT1PPcWHHfJ6pDF2XzWcpKuaHC1wceP3+GwSxoWoUCIegBDx+jIBByEhhmkwMKQQhCHxw/Pxf+0++9wvVv/XLf+ebH/7WYy6erTHFgKxK+k1LYRSEHUoYgpek6DAFVNVsWhvmnODsRiyJQVCZhpgUQ29Rfsa/9ecNmDmPPjb84n/3ksPTA16c77hz+zVqXfLPf/U7LJZfZP3JGafFnMNyycXFyAfP/5DLfYl0iSB76irQHBn2LuZJROzxNmF9IqExes7mZQcuIkXg6mIgMnJ0u+DqbEuUI2HbUC0km72DK4HdOjARGRJNc0BUklEMiCpQphVhHUhCIYwiTDoiVMoTJxQ2BFwa+bE/9kWenF2xe9mz3cPhwYLj5RHnmzWzRnNwVCCNwA6RFCLaFOy7ng8/OOPk8D4HR0fsdm3GsMhrDExeX47jiPMDKZbZterClCUcGdqM+LoWnl/rBYEJ2ZMj3UDR9h0xBmaLiqLShJQQMn9OQ9/lVap1GGXyAaI1IsH65RXKJVrXs91s6QpP63aEsMXUK/R+z5fnl9jmLueXa1SKdKphlCN1M0OpBWJwzIuG/S7D7A+qJR996wmb3YamkUQ/MnSBu6+XfN+X73L53NNuBqTaUZeSYd+BM4TeIQfN+2+/xp/56R9mcC19u+bs6Ro3jMTRsd/2VGVBsyiJRPrRk1xJcppqFpnfahh3e4QP6GqJkJnbGohQalRh6NsBERMSyXK5JKmSYYyMNkGUlNpQaElTlSQSgwOdNGUEay2iCvh94tFn55y9vMSNnhQcadJZ5VMpfzYx5KmlVBIhKt56+P30+4KuG2hmEjtaoqumlTK8vLwkxhFr97TtHqVyNGgMuTtWMic1FaUGArlSzyigkFLW1pGfQ15B5xU5KYPhgw+TySqvIoWIRH+djBWJydH2WyIdUmfdVxSBpAWIEiELhnHAest8NSOS8nfFWvzo8XbKha9qdI7TIU7SISNzEywmZ7SYppKZn5huDheYol1DXmHfuXOXxXxJ2w2EmIhTxOPR4dFkGgvMFzOappoMW4F0U1SlSVqg8c5P8bkKo2bMZwsAXBiwdsyJWs5hBwGUSJkL2eAj7d7S7UeGIfHOW1/hR/7oH2e323JxcT4VnwJJQisxcW6zM1wrhZJACjcxlnHiLKYIy9WKdz7/Fv0wcPb0CSr2hNYzbge0ELlYMTmSM4ZJ3hFzMWkm/WdRGGbzOePgb96LqtKMY4eUOe0ppYz3gdwcrFYLVgdLdvstpshyhhizXjBr3eKksc7F7CsN4XWaEaQpMSkbdq5xV3q6t8mJW+xA5sjcGJn0vkx0gYR1GWfWzAuahWV1lCh0LkS0yVSCg+MZxSKbwJxzWDsgUVT6IBeJ3lDIFYuThs9/7gsM24FhJzh/EfEehl1+bFLi8PCAslRcXFyipL7BCb68umLX7bnoPsIsemSVJRZlldjvryDmKWg/btGmxOdsB6RSNIs8WR6GgCBlHbUTaF0Q4wAx3BA58hQwTw6vzZKFMXiXBxF1nZFXbjLfeR9YLlYYU7Bv15BCpncUDVU1pyg1dT2jbCymkOz3fTYOAFpXkCRFoYiuQ8gaJ8CnET9Y+m2fmzIsyQpunxqqRuNFDhUWKaJdIqwjTkiUkZRzxeBapPQoCZv2kn/+ux/w+u173H/wJk+evUArj3eC5bJhefj/Uvdmv5ZeZ37es6Zv2uMZa64iizMpSqQkUupuy7J6cLe7ZXfbTtKIYQcJfJGbxLCTdqcN5EJXAQzkIgEM3wRBfBEYiYMg9kWn20aP7jFKSy1KoiiKUxVZVafqTPvs6ZvWlIv1VSn+DywCvOAA8px9zt7fWu/7+z2PJi8MH3zwAxAdzgmMLgmx5ZnnL2FdjY8508mEKAJFoanXkohHikT1+PQrn6fZptjPgwf3qMYwnw8xmxAJriDGHx5QH0dSUuY35aS1kvQ2xQY+98ZnGI8LlouWzFTEaBFoEBVSRLrakk0su1dyNouI8zk+V7RecHjlacajG/SNY7Pp2G5gPBKc3w08vLOmyAy3Xoh8dOchys7olg3bk4au69jZmdDU6/T/GH72MXqEeUxq8RBTUUqq9L5o24Yr13NOjxtE0Bw/fERTW5qt5PLVy9R1Td9LpLFUI8PB4Q6npwvkUKidTA8xWcGv/r1f+ff/oPo//ON/+rXxgeZi9ZDKZqwXLWNtONuu2ZEe6z1CKCBQlSmwH/HDDSs1HpVMGJTObYihpWsir79Z8Dd/OaMH3n838ke/U2FKaPuaux/ep9lsabpjlDrG5CN+8pe/wCdHiouTu4z1hNmo4ItPvw6mY+lOuahLbB+YSI1QEYRC+4ymbSnzEmdbjK44e7RkcdZSZjvM5hmXnurRVcXyrKYPPdW0xG8tRhi20XPpUs5f/Mu3ee/dLdAxnRnsesvyfoYuoLEWQzIUZYAIgm3nsSpy5co+0Z3y3tuP+PSP/xQ/97M/w40bh3z3nXusG4dRDVlmUiu6d4OPvERl6SBZZPucnZ1gnU8P201DiElbWRTFsP6IdL3DWbh6/RClJA+PznDWkmearnNPUBVK/7slFGNy+q5PZiKj0Jmmbpq0Gh5A8qToHZnK0wMS0jRFJFTJNlqiLsjpaPolV/f3eebWJe5u1tjW8fzNq5xIzSf3L8ikZ71esFlsiX2NsAK8xEmLEgaQrNYn5MbT2gbrW6pJllBUONarCCJjuTnFOWi6QGw0XaihVFx5bsy1Z27x8CLw8YP3WJ215GMoJoLxZMx8fshisWS9WSJjz+5cYirLdrNhf3TI7nyX4/MzQoTNZj0UPaD1Hl3m1G2LjGCEQuuMxluaPuJiRBlNJjNyBFl0BN8Oq6X0IPfeIqUhCskXXnuGpnYcP1rie4dEJloD+knDPcvzYR0VybMCo0ecn3Y0zZYQEsxfKMd8N01fQKZsUWhp2z5lSOPgwnYONQg5UoY8NZfTATiBreVgAbDOIpE4+xiBk1Sj6esarEQhrdOzTKN1oiK1TcDojKJI6stMF+ky5CXaCXzXo2VAifTQzosKY3K61uL67gnLL8tVQtUQiSJFDIxSaJW+TjVEErRSKVcl4hOOY/pDPMkyPvaQb7db6roZNKLJ2NO0LXXT4IOndzbxDLOSosgQIuUFlUoTxBDT5CmKgLPQtB3L1QUQGI1MOiyH5HO3riZEl/J2VpPlkmLkEbIjLzQ3n7rFtt3w1lt/Towd3nliFIgnpSaJtwERIc/SBQSZpmRKps1NEjGknPFiveGisZhizHhe0rQ9bR0G6H/K+KbXJ2WORcI9ELwf8EaKnd3JsI5OtqO62Q6XJg1BDeB0eOx310aS5walwbr+CbIK0jQ7hjj8bqXfERBPohyPkUrpoCqG/LEaIgIpJ+2DS0pN4hAfA60lXd88WfWmS8OAopAGEXM2K0tTp9Z052t6CyYbs7O3z3g8IYaI7RxVkTMeC4wJ9P2WhgtQgnoLbd/g/YZqLlC5TWvyTlFVGZPJhNXyYjhYB2Jw6SIlBC4mZrKPAlBIYQZFsifPDL2zSGNQhaOcapRJxrLJvGC1TPi6GEELg6BHqVReikECP8QMxfhYwmEwSiX9q3BDVngoQkpBUeYJk6Zzdnf3Wa2OkVLTtYOUR4gU7eodwWdoHbB9QMaMQAOiQwpB33uUMYlqoUFLiRcaneVoHSlMoJoZmsYhcUgMRujUnpeaz/zELW6/cQOTeU4fnREZSnc2IjEII/n4gzMeHt9BGRjPRkSvePaZ2xwfn3N8vKDMJUqLoQQEXesp8oJLlw5ZrZeslw0xZLz4wqe59+AO9bbG2khVTGibNd95+zucnh1BSFn0rok4J6gmhuVykS7wIpmlHhc1H/+ZtisJZ2d0xtnZKY8ePWSzanE24F3amrWhI9NgyHBR4HxNjCuceYAut5w/2nJ6vKLddshwweHeDqul4dbNyzz1bM5qtWYyvcZTNz/Nt9/6PppAtC3PP/8K682G7cWGYCOuE0gMcdDDFmWepr5OoJXGupau8+TZaOAXe7RKE2+tE75LF4K63TDeyWl7j7WKtvYUxQjXR7o2MWKjiEQ6fu3v/6N//w+q//xf/69fu3HzJoV1jA4CseqpVz19X2DiOZKB3iMFWZElU4WESHrxhEqNUqEVUpbgHSEoTs9q/sNfvsRoEvjztzRf/2OQGgozZ3c3w1RLdq7P2ds95MM7SxarwHfeuoeSPbv7l3jwyYr/5r/+7/D2gHfe+QPyPNC2ggzIdWqtLs49mkCeZdRtzdXrT/P8c2+wXN3l+N4Wiefa8x5VjNnWGy5d0jRbie0miKJnsqfBwng3MNb7FPaCO9/pkb4CH7F9i5KabAgmSy0QTiJUjoyOQyUJWclLleZhveDWy1/h4PJ1/uB3/jV5JgleDw+rHIjoPOW0vOvIM8knnzzE2uStTraQlDfTRqZSDYGm7clySdtajA442yOiROlIkRdIqZ9oBh97hLVKh0IpNWWpaJuWoqwQaAQmGaOCI9MJqh4G64fWmhADkoQpEjGSlyVKRlzoKbIxZeUJWUaMGVVR8MUv/8fcv3/B9oOj+QAAIABJREFUR++/w7gYk+ke10VmxQiLo/eRdbdhMt7Biw7fwfKk46mXLlFmmuWm5dL1Mddvlzx6eMZyWUOoQGq0Tr7n2W6G0ZKDw4oH9z/gB+98iBQxoXFyTZ6PmR1O8PmGYlwgyWlWHfQlyhQcXB2xXrd8fO+ErDRokZHrgl56RJemGHXbENLLQJ6XYCTOSDCRbCKQxdBSDgIp0so7yyeMywofO3whiLIgek9ZXqYoBb1rCV6mnJf3uGBBebRRQ+kpTbGsTT/XxcUFIVi0MUwnI249NSPLI33n0lXJK9abJmWOhKbtGtquHQD3EiVBKwkBtMpQ0iDEkyI/ApFQQiEync4oivQhmLzvw2RnyKfGYfqTLCiGGzev/dDU4jvGoxEiKuq6QxnNZFYhB5c6ATarTcpMx/S+UZkiKkkUgzP78eEnepAWhEsYFjw+JO0nUaYPadKEW0QxTKbjUBBztG3zBKcF6Xs1xgzvhVQQS23qkqZu04rTxKEFn0pZSIka2vtiWJl6KxBSYlSBd5Kuq9EGxjNNXhr6TiKRTKYa72C7scx25lRTycOTj2ibhkxrguNJ5tf7kFbvMk3Q3EAXSPlck3K+Lk3eQgj0LrBdt9htIFMZk8mIQMflS1d5880vImXPenVOplMDWGcDA3WYzEYGta2KhJAKUQRFkRfkeUbXeqQyjKZlmtoMmWDreuq6Hop2OmGihoMugBi+fiHSa9RZ+2Qimzip8t/hYCYjln9yCEv51oQRSrnogDGKssrQRqCNQCo5cH1TS1uqpFq1rkVrQzWaYHKByXK6OrBaNGw3Nn3mFQV951MDOkpcp1mfNJwfn2G7jio36OjAgykK2jqyuzehabe0TQOIJ0MCrQp8sGQqgz5HWINRqayzWVtC0BgFWVHR+55s1KKK1Jpv25amTpzcp56ZYLKOi7MWJTQxpOFBgrAnC5G1lmpUYLSE6JAqXdryIktlLNKEtygqbt56mtPzBQ/vH3N+dsZorAGJQNL1jqwQWL+lHAVM6XFdoN4mXmtRFASX+OjeW1xvEy9WWbTOiT6QZeny09tAZQKuGSEldG3Hwf6EfKdk98VrPP9zVzhenRLqc07vWJpGorOcKs8ItiU6Rzl1CO2ZTiqqcc58co13f/Ae5+cLdJ4uWK4T2D5N7U2m6bvIx3cf4KNjXE7pui33H9wjRMdP/tRPMtvN+OpXv8q3v/tW4pxGP8SWLCF6urYbtgbqSRb/iX41xicmrBRvUUN+OkVjbOfSxS0+xrVBPtZsVjVKKLwVPLrf0G8MbjMloJiOCrLRhslkyurc0vgOn2+hrNm0Sw72Czbbhj/7fz5gbz+H4GhXJePLDik6VqstIabLn8oDWeExeTq0ZzpHCEXbtZhMoWRBWZYJS2gzMgObTTP8PYHOHd5pjCqZjEc0bfpcbrbN8PkeAMl4MsVkhn/4X/wITFT/8T/9H792urzASIutNMvjlu70HOsdXkkqbXFWoFRS/CW4sRuaiwkpLqMAkaUcTN+ii8h2M+XHfsry6tMz/vhPDf/yXxwzvzal7ltySqpqh7U65Rv/5i4FGx59coLcNHTdlm19yu3dWzSjO/yb3/1DppMJ2eiM3fFtTPD0/TJNf3owuqIPHVWV8fHHD9Gq5KVXbvHBB3cSWH5vzsmdntf/4ghdBc4fRA4ODnBOM6s0PgS+9Y3I6p7l+WcKlmeeD78dmM1NmkYGgck0VnicSoUO30dKH/jiM8/y3Guv8cFbb1N7ydFpy8HVa3hXUcxmPLpzh2Ks6DuY7YyIIlJvW4wqqbcNo8mM6CzExGtru57xpBzanHZ4wKYsVzVKGcx625Hn4KxgNBrjnKNtu3QjHU4kUiS8RQiJNJAeyj/8ZZ6MJkM2Lg6A8MfYmrSuk0rSdS2vf+ZVXnvlRR48eISXQLSsmoZNGyhCx2x0wMuvvsbRgyM26xWbbc9FtyFKSUaHdwVtv2VnVrA/forGnfFjbz6Ht4LrL+ywf9CzXNRkasLu7BqLsy22FUyqOcGtyE3CbFVljrCKzYkjj2OM0rTbjs2yo75o6boepStCyNi0WybzkulOxfQwZ9m01AuPyjVRC4RLQfa+j/jgMVIiB7tayt5Hmq7BhYALCSOmpUmt7iAZa4mOHUWejgGrtsUXllx5CBbd54znO2RSsjldU4eaIByz+ZyqHKeMpoAYOmzXD9k+QRQ9WQ5gCN5hTKQaS47u14OFx9J36WGUFxKTRW5cu8HVy1dSe7zv02pVPM65DQWVASouU3GZVORwRBFp+wZEOuTGGOl7x2N71WPHu4/w0z/9c7z+6Te5+9EDlhdrlIo42yCj5/qlXV546mmuHFxldVFTFhXjaZk2LUYTRMS7DqVS4UsEDwNCyogkChEqlYu0kRSFoqxyYoh0rkuWJYaSWIwURc50OgaRJnNKgckU2gjy3FCUOdY5+q5PazKZDq/WuicHrulkjtF5KosgiQhiMMSQDmUhRrrW4T0Yo1ACnAUhdSqP5p7ZrhxkAA5rBcElfWjrznFhhdY5Wk7SxDs4hNBolaX3tQGlGPK46VAqZJpMxhhRUqSSbhRURcnetEIIR54bPvO5V3jm5duIHEZ7Y9quobnYIJSgdwl15l164GuthjKXo25abJ8yu6k4lXLFUoWBE2sRpJxkkiwM3MbwOC7C8GBPmVLrHrMwGVBLyTSVDt4ptpSmo/GJdtMYTVnmA8IpPPn3pDRcu3aFPM9wLq09vQ/46JFKU5ZTRqOCrlsDSbWdVzDbi5QjSd806cITHTH0iUbjHZ1Lxh5UoChzhA54AS56JtMRNhg2tWU6KhEy8XvThS3FbtLhmJQjJTDfzcmrQNPWCa+Wp4OfFBpTFJQTwareUJTJ1pXpHVwvETHy8H7N+UkPUSFkAGzqJfTqCbBd61Q8C9Elwou3aCOf/B4mXz1cu3aLrk+Hj2qcbFFNnZ7NxhSoDLrWEqMgqwLGCBbnAd8D0uP6gBz0zMSIziNaj+j6lhgd2gSyDJzTjEY79AhWfUvtPFqP2K46lqdb7t9Z8v0/u8e1528yzxtOPrIEkQ7KXWuJ9AhZcvnmAbNpRXQjHj3oWC4uEKpJKvG+H+IuioSQcsPBUpDlScrQd02iIojIl7/0s/zqr/4Ko/EuEcdv/MZvMR1PkkrUOxBhiAZZxBBxEuL/l9uHJ3GVpAJ+zO0VQ6TCQNTDBT/S25Tj9r3H5DNMDlJYRuOCrr9A1pazZSA0CtfA+sJycdwynY+Y7EpyU6EwfPK9NVL3TMaB/b39VAatGy7fGHHj2RGO9Fmsc4WUkbaNEDSZGWOdG6x2kr5zHFyesamX9F3P4eGcB/fOcD3Y4AZ2c0/0OSIYMlUy28tZLheUpUKpNHRMUiBF2zQ/Gmaq//k3/97XpocdsmkIsqQ506xqxbjaIxcOFbcQ9fCDdEn9pyMxpLWFEqmw0/eBGB3BC1A5ddsQBfzyX53xh19XfP3PS/av7dL0ikdHR0QbMOUBt2/ukM0q9O6MW7dvc+lZz5WrGVNR8v2jM6ZXlzRdwCvL6SfnnHxsGe846tozFhN6lVZMwivK0rM4e8hq7RjtGpbnNSqbYRrPy19uCFOD7TtWpyO6Zkv0kvFuifZX+OCtO7z643vc/vQeJ/da1ostJi+Igx9dCFKuLmoQktZ39KpH6xFnRw9o+yWrswf86Tf+X/quYXv8AVEKojCYKufHv/QXeHBvQd/UQ6FEp1yoFsMbLK3RlBLDAVUn5I8yON8N9qEMY7InlhLbe8bjaljHQl23OOefgJlDCMlHPjSkqzKj2W4JPuCsR8tk/gqDOSaNjdMtXoqI7Trq7RZferanjlJAQIKNrFcbbIBPv/5p3vvouxw/POPqrcv0rcMIxcGtKc3JBnykmO8xncxo6iXzmxlmFtkuPXs3D8lzC9piXcYLL99CmRznIpk2aLNFqoImbgkahLRs1i1RTpjuFQTbkU0i1aRCALNpxu50xqMHLR7DjRfmKO3p6m26GceYBAkGtPJkJiMqRfCpXZ+bbFjnizTtCzEVqmQkzwyKZKMJHqSasN5Eqp1Dqq5jpiSHJvLK9ZK1lLB8wBuvv871Z1/m6OiU3BQQYTugs0aV4fKlWZpqDbpQ7zxZlg5XbR0Qomc6hbxMN/0YUlQhLzTe97zy8mf52b/88yzOF6yWq3TYkGI4BKWHHTFibYLLj8clzg1GGOmIMv1uZ3lOWWQQk3KV4fCRJhAZZ2cb3vr2t6nrMybj9HvX1pHd+SVeuv4UO3lJZUp2ZnN613O+vsCKSOstlVRcn+yRa421PSLAbD4lyHRAzXJPPkqgfm/98P/UdG0SVQh4YsqCNB2tqqTW9CFlz2JMIP/HUPQQHiNa0qVNSkkUjtlswng0JdMjtMroXYtUCaEUQmKbCqHJMs1kJznjtRYo7XE2HYyF8vQ92D6txqtxzuGlaxxc2sMUHYElk8mMtlEsz9foTCCNpqqmXLlyhaLK2NZLGGD9RAHyh9GGGALWJhkDQtI7RzUrmV+aEUyOVxnn6zOOHqb1ZL3e0rU1h4eH7Mx22aw2w6ozTVGFBB8iSmUcHu6zszvl/HRJDBqjJSYTaJlsPsmoNJTyhqSFdSlv+Nhelj4LE00ihLQajSGxbx/rWI0xwzo1Wc4IKcpRjgqUThPlSCAMkoo8L+k7x+npGV1nsQNrtshLTK6QKuUXf+zHfwKlch4dn6K1oWkC63UHPm0suqYjzxLdICs0QoLtA7QOFxzBK6QTFBlkhaZvWwwKYwR1vR4sc4lgk2UVUii8B6TCBUE2KZC5oHEtkHHz5jVmc8PivAXZp1KOi5Rml+gKXK/pGkewAd8nAsCV6xP2DsecnWySitOkZ0wYcHUu+CG2kbBhzgWUzogEettTVRV13XHz1lNoo7hYngGe4NIFs+sszgesDzivGE8n2L5is0q4LKMk48kIkwVC3KKMwkXItGBSBdrW0fYRR0AGzbQacXG6IdewOxkhMATlQfVEK9G2pF8byjjh44+OqeuW3fmEz3/2dZ5+4TY3PzVjvpPT956TR2d0tcfGNTp3EAyhLwZSwQ8B/yDxIdC1lhs3bvPa69c5O1/jneDV117j/v1H/LP/5Z/z7e+8jbMNSveUeUFejKjrRGTIMoM24Hyb4jQDhxV+qAZOFIDHODyVtgxdm7aUvh+e/RJiQJDT9g1KF2RqxOnihC9+4RWmec22q7AbR9cENILMKWJdcPaoZXvWcP5ow/JRy3OfeYP57oTF+hNc9HTnHfVWsWlhu/R0XaBrPV0TIco0FcMScAQ8o1HB5etjmvaCvktRjrar0SpnPKmSJCBaCAIpI7arOTvesru7B84PlqqI1jIVF0OHd+5Ho0z1r775P33t5rXr2PYKZ4uco6OzhPIpx9TSU4UNIcY0XSRDSs14LHGuxzmRigBZTKs7N0JnqeluVOTuBz1f/RtX+M77Hb/7Gw078wpTBkIjiB10IVCN91DxAGXnbC/OeeX15/mLP/0lrt0quHv0MeszRxYFQbRMDj3rE4HuBZmsuNiuEMqkQ3Qb0MphMsVma3nh07d580uv0apjPvjuMbdevsTZccF8Jvno3Q2T4grjnR1OH63QmWPzqOP8JPD2tzdM8xKdeTYrl25QGFRI0zgf0wfG/PI+l1+9zUEYI0xgfmVEtl4xzaEWNbN2gQ6ObGefH/9LfwWlJ7z3ve8T/QajKvIip8wdXdOzs7vD/t7eoMYLGGOGJm1SbEoJfSvQOuW/2japF6VMq8vHDER43GjUaJMeHFFEtE65Jq0V0Tu6tiUzhiLPKfIC1zu00In5p9PaNLiUvRqXmpXtadY1o6lmsnOZvq6xfcv+5Aqvv/4m7377uxzd/4i2t8jOc+XSPnKScf35EbVf0XjP7MqWa1f3WTQtp6sHbE4jfRxx/5NzrPU0nefqzetcLNccPbyTpjHOst7KpIgMBvqAdw1VWVFvHaOZpAuGIDRZHlmcrDj6qCZTkskscvTggh978Wf4hV/6S9w7vUu9WjGdlrRdD9ajxzmI5J4OLskOpBBP8ndCKIKRRK3QxYjaWTauY+McdYC1dTyzd8jhoeBSFSkDxK6lVwU+H9N7eP+DY7bbLcEtyIsGcPiQUE7OaWKwRGxqHQeDlA5jcqqiwpiOaiSZjNMBIWLT9EaVNGt49PBD/vhP/pCjo4eJaxgDUqTVFtGjtESrx8rKhI2CVLarRoaiygghtYSlEk+YmUImzFyC7AeUanBuhe17nIu44DBZKm01zvPK5z/Pp978LK//2OdZdw0/eO9Dui5B3V946hk+98pr3Lz1FD/xlS8jlWZxcUETWjbbFa4HY0o0Oa4HZy3WtiitmM0rhPDYx430mKxGXd9hbeJF5nlJCHGAtacDXySgleIxbN4YjVTpv+36wGbTsK3XZNnjQ30YaAA9IUKWS0zuh0uLe5IlLKqMpon4ILh0dUxwEfwe5WjO0cOPWa7OuHHzOl/6ib/K5cNb3Lv3EZvNhkBamW42G7y3EAd+6BMMVBjIDcn4RRT4EIbtVcD3nnI0JkTD0YNj1hc1WciZ5SMODnfZvXpIdNCuG0pjEDHFJ5Q2pBxoajx3XUPXJV6oiAx8VDf8nAckUPAoKcmzwTgoH2dgf8izhERWSF97WqPKQSrweHLlXBIfpNc/HQKI4QnXNUU4EqHEDxcLJZNtKcuL4fKWUF9BOBJmCI6OjqjbLUIIjM7JyziA7ku6toXYJ3mCTxcB7z35eMTVFw35Tps4s+Qsl448q5iOC/quTt+fTJN8az1EndBPUiaKBIK6Bu8lWaZZX7SU5ZSyMiwWG4pKpCa2zmm2jtVFjRAOQkT6SBSOvDDM9g2HV8YDzqtACIuSeohqAKQ4Q98Hmjq57Z3v8c6S6WRNs33PyfEjTk4e4vqUk5VCpqEDfoj1CrJSMZnO2dZ1yiVKk8D6AbbbFikDRSkwUpKPDOQWbzWhVkgSqut8vcQUGrxgu6oR0aQJulQImyFjy+r0nELtsbe/y8svv8Dlw0t88MEdjk5WeLPi3scPePcb9cDp3RI8SUzDmtFYEpFs1w1NnbBct566xu7ujL53XLt+m9feeJX/9O/+bb75rT/nc2+8zu//3u9z9+N3cN0WYzT7e4c4FzhfDN2NQrG/P0+XnkEl/Bh99riolnLA2ZC9TkId53qUfqwozYCBbECF1gVKOkLYsnOwy3/5X/0KVTXm7W+9z6qTeF+zc7BDvXX43oLu8V7jG41UlqqYcV7X9NstSlRsG4ldt3R9ZLloEAi22xYlBN5FJpMpWZFIM08/d8iomtP7mtFEYvIRCcKgiFajpUnSFeuoqpy+C2gl0Krg9U+/yXe+9VbaXEmDDzbxjSkgJsTaf/urPwIc1X/2v/321/q6YrFQLO8+om0cXjboKjLRY0R/QQg9AoEKFcF6dNEhZCC6gugkTkkyUSKdIwiBjx25UZw8EuixYHxpzu/8X+/jtgvOl2syOWJcKVQxobeCzfqYqa6oyhqH59d/4w6n6/vEdeSD3zsDteLg2Y7Q7jAWgvnOmFV7QaYLZLBE31EUGflIc3HhaZ3jk49OqWY9N26/xO7ujLe/ec7Z21O2DyV5CZmeM9u/xPffesBqvUJtoT1rsLWj3tQE1aNCRhQeTwIvRxFxQIbgmZs3ObhyyHvvHLHoDE0/xakR682Wk/NIQeD6zR1O25aLxvPh3Y/ZLpZUGezsT/FxS2jSLddZy3qzpW06/LCebJp6mGoI2iY9DJx1NG3LdDLG2gAy6UCTJShNJ/xQBAghEQJ2Lk3QRuF6R985gk3rfqWTOo+giD4p2HSWGH9EQd+1XN4/5IWXnuXuvUdEkZi1fVcj0ezt75GR8/prn+ed777L4mJBWzvqek1z4Ticw+SlnJu3X2QmBa3PuFgLdHfBjWuHaDHn/rs/4MatKbpYsd5suDgd09WRKousTrZklOg8MMoCxAZZjtDVFN/1PHXtOp1rcO0WJQIilDQNPPX0Ll/5mc9yerpA6wl/92//fcbxBX7zN3+b3X3PjWuX2PaSYjRBhA3tpk030EELmQ75Cmk0QUlC34ON2LbD2i41zwXJla0U227Nve2aB7VkpHcYZz13zh2LpWezfkBQ51RVy/VrGWURqWtPDCUBhZSGrusxOufgclo/GqMYjSqKsqdtHa4ZsV0JlosebSLBC2bzittPX6dttrS2piyzIYPohrV/urgkhurAo5RxQJol4HcUFpMrCGnNypDjSm5xhbV9OsAIiZYaSZIAeB8SA1ZBFA2L9TkPT89AFjw6vuCTuw94+aUXuby/x8XpGYu6ZrJ7wKtvfIEbLzyD95bT4xO6fo3yHhllWnlphSIbEEUCbxWuD+zv77K3v0PbJpD3bDZBCEHX24TuiWnal+XZoPJMWds8zwbweTr8aZW0osT4ZNLio0PrDC0LQDzhtSotk/zCJ32sIMNZTQgKnSmuXL/M4eElAj2tqxEyYvuezbJnu4KT44bVakO92bKtazKjyY2mKLKBsSoYlVX6nvXjWINP5A+hhjVoKnnFECBKFIZRXlIU4JyhQyHmAVd4hMnIouHRh/fYnF9gMpHUrD4Qhm1YsjulUt+VK/s8/ewhTbclhvTPwpOijkyHaZEwUeHJRumH/NTHjX/gSXQh/XUc8u2PsWeKqiyT6Uelg0D6xEvT1DQ5Tqv1NKwNFHmBGnSjbdfgXGQ0mjCaWo6PFzgPWRHAp+a87QOLZY+1keBSu54gcNamvK/QlPOKq9d2cdGxqRW7OyOeubFHrjSb+hRnIzFKvBOJKYpMZiXSayB0JMtAxR4VHKG3CdiuDBcXW6xrsb1DMESnRFI7F3lGCB2jLOfS5SnSCNYbx0c/aMhNlYo6vUv5XpHWsUVRpgJt61AiKTBHo4EdLA1FlhOCRan0tWW5IcvTITUSKCpNVih04RhNBBcXLUpWTGbQ9Q3BZfRdB9I9MR7uXgk0qwzXBIzKCTKkjwlyqjyhBnUWCUJTdy02evoWUD3COJzXWL+hXnvWFxdstgvatsUogejGnC9rbj4958WX9nnw8YLnX7jO0SdLujpy+/Z1nnr6Fs/cfpZf/KWf5T/4j34BISTrZc+NW9eZ7kre+f6St9/+BGNyPrl3h9XqEbb12NaiRcbFcj1oUwXVqEBrwXK5pGsDIiZBxuNL0eN43GPuqnOO7bamyMeE2CNlpCwqJpMpy+UFxiiiaNGFRHQZOIesDvjSV36Rf/JP/nuOHy0I/YaRqeiaNX1j0EJTlem1NRJ8B4XJaO0F9qShPrFEBfR+4OimwdBkPGazbRmNS4wxbNZb+iZweGUPBGzXHesNSJGlTadLZJdu06E0HOxd5cq1A8qi4vhkwZVLt2iaM86OF1y7dsC23jDbqRL6TyjqpsGo7EfjoPqv/vDXvyaLnLNzn8oBhcZ1jt1L+/hSQttQ2BXOa9AJmB2VwCg1KNMiOIcPEffE7xwIaLrOcv8Dy+4tw1vf3KBGcyqTE0X6AA55emOM90u8LxlPexaLh8h1TbQt/Ymi3S5Ynlmkzfjwm5a/9tWfZD6teO/9e+SzVEhxdkw1y3jjS8/w7ncuqPICo2BaBfL9ClvNUNNzTi9O+ejdU7YXkdNNzenphlJK4tbjmgZlkoYxy7KE5IoBKMikwImOGD2qFzilyfKM5p2PWZ+cYndHnL57j5//T/4GerLD+z94m2w2YVuUNNWMu0fvMD8QTHZHeFFikThAjwpEEEgtyLWGaEFB24GUESMEusiQMufK1SuMxiOadktR5kQnEiNTpwJGMpuExNYUyV+qMPRaonxOu3WpzKMZQuMx5VVMhlApGxbtFh0VtpNcn5f8nb/+eWKxT3/eMR8LTk5WjKuCPvQsLlqm4ymvv/YG73z0PkcffMTn3rjMZ778LDc+s8uGkvO7njvf/pjTky3bC02zbdh0K7o2MB7vcXHhOD9tGOl96AT9as3+TBCdZtvW6HHg6U9Bu8mwraDzlqAaTB7pmgypPD6mhme9aYgxMjuccHJWIww07ZI/+e4H/Obv/J/U2yOi6nmwaGm7jHG+i5Ewn85pmhaTC6RJGTG77clkTmGGCX2dpgE6MzSbjrKsODyc09oNF0FgvISgWOpIORM0NpLJiJSecTllby6ZjQoWZ4JtlySgmdEIXxOcRecTbjzzDE3bMQ7Qh5KuldBbrt2KTCY9de/Ynd0gcxn7Bzf5O//Zf07drXn06AH11mH0GKXME5h8Yi3m6XAkwTmwztM0Hm3ypCZ0AWky8jJFTnCaaA1KGsqipCxHKJPIErdu3uDG1at0TY0PXVoB+ogQga5ecrFe0EXJ3v6Ey9d2OXpwysn5fQI9To5YrgNCVSyixTlL0a4JaoPMJZmSlFmOFGmyGVFkecKzWNeyrRv6dkDEycB0JyPPFX2TPPUCz3w+ZTTNUzGsB4FHy7TS90EnGHzX4kKP0QVFNsboMVmWDxlZIAiCE+kzDkU1qiiLDNc0dK0n+KQGrbdwtljQ9BsEIV1kGvCuxtuGi7Nzzk4e0LsNSmbEKHCuwxiDVokQ4YLF2gikMiSPubUhMqi4SJpThY+R3rYIBZ4CmWUURaRdd4l8Um9ZXGwxZUY5KsgmJdXehLZ39NuePNN4kWD8wkdmkwlPP7dPWXqW5wucVUSfDqxSJ0KE69PaM7nlw/CAf0xcSAxKH+wwAQ4IIdMhWyikVoSQWJJKpWKoHOIbMQpiVEliUFWEIWoTI5RlRVFVtMP6H6FxIiIyyfVbh2zWK/o6oIUnBvXEuFWVI8p8AOuHHoRHSIlUmjzTKA93P1rTtoGrNzIuTjYsFzVtHdKFP0JvFSL2CGHpgqLMJ4zLEWhB0yfMjyokk70CL9NBdrNaYTKY72YpWjXS7OxO2KwdnoiPfbroNBlt29M0nvHM4L1juWjRskergkAgRp3eVyqVK6fTgjyPXLp8DWFy1psGIw3IYSvjBV5G2j6msmnVt/8ZAAAgAElEQVSlsNFjSpBZQMmCg73LTIoZR49OkHRMqxmohmqu2KxBSAdWM5pXRAnWFVhfk40FWQ7tNnDj+nXqbcOmq8lHOaNRietrhGO4ZBZUo4hCEFzN8fEFZxc1623D6qJhu6l59rlLPPXMjO997yNe+dQLfOr1m9x86hY//Vf+As996kWm89tMZnucnl3wB7/3Df7g3/4eR0dHnJ8+4uT+kgefvMfdOx9y/PCUk+NPmBRTbNszn+8gjMPkBme3w/RUUW+3gAYUUQaMkoSgmO/NyIpiIOGEZK6THk2FlB0CixCaECTlOCeERMkIXrIz38GRBnHRt/z2b/5LYt8znUzY371Kbz31pkeJiI8NTdNjZOIFjYs5ZDA/DFgi9TaC3xAFOF+RlY6RmlLMBLa2ON+zbTpG1YSnnr2GN4GTowt25hOa1YroUqmurHKQBTev32RnR/Po5IxitMMzL7/Cne9/QB1qurChNDl1d0FdK7JcUVSaxXnD9RtX8bHh1/7Bj8BB9X//rd/92rf/+C0+/M7HzA/3+fjeQ0oRKLM548s7KN+yOjsmLyv63uOtBxWZjCva1uJ9TE1xKVHSJKi0SE3q4CN9G9DjHdbrkkz6xKDzGcqUoAyzvMB6Qd827OxeTpnO+ZQXZjd5762PaETgzTdv8t7Xz/jiz13nS3/zdf7ot+6yfnSEtIZAhhSe7ban2jVEJTh9cIGkBzlhcnPKJycfkucO3+S89NxNTo9aqnHBJBesLzaE3g7sTEUcsp0MWJ8QOxQJh+VtaoR3NIyzwFUdyaZXuPGpp/nFn/9JXvv05/mz3/8ux+fvMdId573irG55/qWrPP3sDe7cPeds1VDtF+xe38GZEnLJhW3Zu3pA35V425IVjiJX0Gs6n96Q8/mUrnWs1xtc3yRXr3WYTDIa5XjfIlVqj2RZjtESqSOTcsLmfJMeQkIkQLSI5CaV44LomY4lo/mUKt+hnJWs3JLPf+5VvvjlLxLlnI8+vEOQHZvOsr93jboRtN0FB+NdXnz5Od75s+/x6mcyxvsTFs7z3tunfPzNI04+WSCjpO8Cm+WK9dkCLQpyNUIQef75A8Z7ko/vH3Ht1nWyUUs0His3FFOPLiDYnNLMOT9ZMd3LcF6RGciKSJ5D0xtknh5MPvREr1kuNuAztsuO9fm7jMsalQk6GzjYy5kVmrZesQ0rRqMCqSMWy+7uiOdeuo3ZrZCVpa5tMs0oTTkqkTKxK3ub1idCZijrsF7gg0dUim2E2kh29vYp44Sdw+dpfcmdD1YsXUfEUlaR8a7hog6YXLKnJaujFe2jJXlVEvNALhXTmWavbNDGojOJbTf02w3b9Ybf+rd/wJ2PPkFriXcx6fOCSxEcndBizva0TWC97iEmOHRRZBgjsL5L0RAl0KUcijRZWq13NQLS911kNE2LFwlTtF00jDLNzrxAackrrz5HEDk7OzOuXpkznV2jvoh89P73Wa7XaMbUi5rN2X3e+vqf8u533mGWOfKwYnHeUTeS4CW+z2iajul8ymQyJS/S4WmzsfR2eD+iaFvLetWmzY9P1iqtMvKioOt7+jZ508syRymD7yLtumF3OmM2ntM1LVkmKPKcm7du8cxT1/DNhn6zRRCwOLro8K5HeVCC4SBhiMHh4wYfaqClzCv292/ihaAPW7Qx9F1al3svB3xWKgzFENKK2wVs31OVE/Z2LyW8XKbIjB6A5MOhcDBwgUwkhRjQJufg8h43n95nPNP0tmZ3d4YR6dDW2YDKR5TjOcZU7EwmKO2puxVGG4iJQDAZj9nZKZFKUdeOzbpFiFSSMkYNLncoq/IJVipl3hOv9YndjHTplVINuuSU8cyLLOVEM0PXtcM09rGwQQ42sjQdh0hnI1J7skrgo2O53AyrykhZ5VSjEi0rXnr+JS6Wp7Rbmw54sWUyr8iynLpZ03U9ZVmSpAEqRZ3wZCYnKwxN3UOQlLrCdhbXp66Fc+mAI00PStO1Ah97UCGV1GqJCwGhSvJyh/WmSa+DEEk6IwZaRFSsV6mUqKTEdwJvDVp56rZB5QadD1GurkXLQN96Ip681GkCPhSnvA/U6wAxw4VNKo2xJZBRjhXj6jKbiy2ZtmS65Px8g85FEkWMp4nN7QN7+5f42V96E5Wdsd4syAqFySSRHiVz9vdmPDpOWx0RUrSnLDKywrM6t7QuURC8d1SmICDpnCPXmiAdQjokFQd719nf3+Fv/a2/xufffJ7LV2fcunGdGzcOePW1T/HSy5/liz/+FW7eepH7D5bc+fCM7739Pb7x9T/nj3/3T/n2N77F99/5Licn9xmPR0kIkjUok37RJnsS9JboA33rcGxRKqes5kgBbZum+l3XPsE1CplIE5nJ6XrLL3z1q3zhC1/gu995C+/Sz0EgyIsk3PGO9DkpIuvNknE1oa09eZ4EC49jcel3OyOElKlfbRa07QqpQiouE1FmKJsLgyxyDm4eUB14RgcNV67u8/D+CukNRvesGg9ZKnGORoogDdZbpEgWss2moXeO04sLHFAvOwyG8XiPcqyxmWSx2LDZtFi74u67PyBGgwyRqD2ht3QNCFFiu8DObo6LG3JTYfueX/sHPwJlqn/4j37taw/uPGBSarr1iulUs391zMHBZa5enjHdmbE4PmKzWhGDQgx2ESM91qV8UIweqdIaSww5Me/TBGSzCgg1Y7Q3YVYZRgeRZq3JqinTasp5tyYuz2lqgdGRzEBoNR/fvYttz8jGkTd+5ie4/so+zfV3+frba8SFZH3xCMqS4GomStFsPXvXJ/z1X/oKf/I7b6NNZLJTcf7IcfNSxYd/doqOmtfe/AxXbh/ymTdv8a0/ukfsG4L1yPi4uZpSbkJEHkPHjdDDJIBkWClzlFS0i4b5red4+sXXuPf2R/zfv/5/4E7ep9Ata18ixhJVCGwDF49WCNfTbVdU2YhRUeHsCuuXyKzDtpZu3fLSc5/CeUsbHJuuJbr0AFtcPKKuG3Z3ZpSlpGl6JDmzWUVRGvq24eDwgLru8VEmIHWVoXoLWLxKOS9DSIdgEQh5xlgLhHcszjo6ocn3I/tPlYRM0BcFj+oNfb9CeMf58YIHDx4ymuzQNS2zMuPGi88j3ZLXvnKJD+4c8c3feh+ahiLLKeY51SRnvarJS9BlRlHllBND6yKf/cJr7O/vc+/oDst1Q+c1nYvML3n29sbkZo/1QrBdnxOcog8O52MC3DNms27obYft/f9H3Xv82pam93nPF1be6eRwY4VbVd3V3dXVid0UmxQpqUmKkmhKBG3LokDaECcOhAFJJmAD6pEBGwZseWBDtkceeuIRYVkWSNtkMzQ7V4fKt+rmE3dYeX3Jg7Wr4D+BGtzhucDe+5y9vu99f7/nYWe+IC0CdsvozKaS2aKg3HQ0G0uWTmi6gcPTHVabmlW9ZmevYO/EEycwi/YoK0vZtfi2pu8dvRuxJ0rHW8/8iNyJ4wihBf3gGYLHBosyhm7dg5gRuYbbc401Ka++/mXqruPB/UcE1/PS8zMGa7i8bklEwV6ekwZDoOILn9hn5TTrSuOwROnAoRL0TrCqFRHHfOkLf43L5TXLzSXCQ5rFSKVoux6lxtyokoooisamezAj99iNjVal/bYsl4yTRmKyfIo1itWyZDaXpLmjbQybVU/ddNv86oyToxvkueHuzRt8+Suv8cKLd7nz/F32d0946cXPkMYZZxeXvP+Ttzk7v0LIGNs7rK05PFiQpTF1WRLM2ECvO8WNmy+RpTujuUaOBSnjWsqywtgWIcRY8MJu0VYjGD/S0agr1dAPluk0Z3dnTltaimnKrbt7NM2oJn7hxTv86q/9Hfb3D3n3/fu4YCjLGqVS4mTKct1QdgO9NQQf0IyHOuPGYorziq6x5EXO0fFii9GyFHnM5nrA9YJ79w4RIpDlBTdu7lNVJWkak2UJbVODGNmmY4EjMJvN2Ns7JIpiJrMJwVvartpePCwijHEj5+z42r3HO0ExTXGi4Xp5hVYRcTKh7R1VuaLvGuaLjGKqsL7HS49MPFIGhtqMcHmpmM0nTKZqW1yCatOOaJ/gUWosPhk7Gs9GhJYExLZsNBZdpBRbBi9j8WM7fZ0vFiNBxDn8VuFJCON6O7CFmcPu7g5JptGxYr2pSNIcQkLXeYpiRpYXuGDJ8gQpLb7XuG7O9fUZOu7pzRjCstYyDJYQLJNJDmK8vFvrCUERvKRcl0hpSeN8jKgFT9802N4hGPWlUgmm+wkyiumbcTI5P4pQaSCZDgg14MxAs67xxlDkU7LplChJwMYo7fFYlEywth8ZosITQk8/QJREBDUKaerNgLMjFxnhQQb61iFljFJqLAkKS14IdNwirQUUydQjIs/B4Sl9o+nbEtNEWN+gZIYXht29I26cPk87rEA4NlVLPYz2xqa0mH6cBk9mCqkHnNX4MBBFAq38eGAXCusaimnMdCZZrjumWU5wls702ABCGLyEPJlg7cCmXHFxsaQsc5CH3HnhFbL5ApVFfOub7/Gdb97nz/74z/nDf/mveOdH7/Pk4VOOX8iYH62ong4kSSCOR0Nb1xmEdOwfZyyvRx1oPwxIEbN/XNANHcGNU/myuqJtK8IW+xbFAoHe5qNBiBEN6YNjva754P13OH92MZZbxYCO4rE4KjVxnG5JFoY0S1ktNyihURrKctTAah3RNM0Ya8lz2rbBWECk4MU2upcQ6WRrYvMMrqSuAl1nuLocwI2wfhVNcWJAqgQjIZYDKgqUdQDvMLXEhkCWeiaTOXjL0EheePEmz72keefNczCCq/uX/MIv/ixf/sIrvPb5l/jy5z7Ds6vHnD20fPozB8i05/IpTOaKZt3yla++xmZTsbyqKCYz/vF//G8Cnuqf//dfX5wcUfXXCKP56i+8zuK5F4jmOZcPnxHnCe1qQ726RokCgiN4SZIp+mEg0qOlI2xboR8bZdyYH7ODQsQZQWvyeMYrn/e88+YF8WxFlhjatUWQkBaCL//VhMM7ik+98iKT44zlhw16Z5/zZx1J25CowONHT6jPN0Q+IPsWT8HK98RKY5uYneefI5lMOT5dcPrZ5whxwwfXS/ZvHnP0XELbBn71b/46l2fw4fuPsE09pqdDGLlp2xXVtmQMEqTWmO0XrRQgs5R455CH1yWPN9c8ffaM66s1S1uzkBXT3Tlvl5K0aGhtwFlBuWqpqpJikjMYx6bZcHiyx63927ihZ71p2DueksQLuk1DdbUmSwRpMtpIknj0Pg99Nz70rCBNC9q+pe/GpmSWz9lUHYMPLPb2kFoRLSSRNkwlZEVGaVoi7ekGSyRiit0Jg4bZNGFxZ07t1rjS8fS84sc/+Alv/fg+r3/pVW6+eETdOjbnHmuWtH3L7PgmL9/9NGrHUTrL6eKUsyclei+ibhpUb1g2a6ROSJMFQg2EoJCx5PBGYL0x/NH/9X3qTTPqeDOYTwuky9lcKfwAbdeQRnOkkhjXk6QJBM1mXdEPUKSSpiyxrmEwPVVbcnrnhKZb09aKfD6n60r8ULLY2eHhkxWbtWUSTxnWEMION24eUbGhq2q6ZYdOPPWyQeuYYlLgzIgn0tuykQ8WLx1pnpPGkiLJufnSPp/7/OfIFhlaVVy/c41QM+pu4P13v0/XN0x0gosEG+MwG8HdAw3a8GEpiPIUphPqMmazWeNMS2hSEq24bATVIMfCV5SRzxOSSUecSobejZxVyXaKAEka4ZxhMA4XBFolJJFGCPfxodUHQ5wppB65l9ZY+qan2rQ4E2GHcfUdZyPfcbNaU5VLvviVl7lx+xRIkGHKatNTZDlD54miGTvzCfff/QnrsiIIT1WWHJ6k7B0fkU/3scFRtS3rpmU62aMoprTDE7xYY4zDmB7nDNPJLnk6p+vGw7ZSMYII6x0hOEKQ22b4eCivNj1dVzGbjxYtZyOqauDWnZf5d//BP2JVrnn05D2iOHB5/ozgA9N8B2scZ1cP8dIwn8+ItScYh1ZAZNBxwJieKEo4ONgnTXOqaiCEiJ3FMQyO7nrD1dOOi2drimxOnh5wdbVhvVkhcEwnBSfHx6RpStfWo/oweNbrFRcX5zR1CcITx9sy21a84LbudinGnLF1HqkU88UuSsRIEUMQtGWJDJ6dgwU7Jzs0XUOz6dksB5J4SpEtMP1oNJNSM18U7O4nSDTWwGbTYIwj+DErGmk5TkjDRza1kfXqrB+ZqMYRJ3o0nm2tU4QtZ1ipsaRlRyIDWxblR952tvg048fSHIxCjSiKt4VAt8WTOeIoxQzQVR5jex49fp8gPGbIODg8opgPGDvQtd22WDpeYo2xaBXhrcN7S5ZNydKIxbwAH2jqDVKOLG47KJSU5JOcLkiqvmaSpRQ7Odm+RceSREb0naaYLEBBns9J45xqs2ToWoauIVhBnhzStCt29wrmOxLBmDENAnSUkKUpbddsRQYw31vgkXijEcqjtlB+fEwxE8Spxw+C3VlgeTXQdJ7BSgbXgFphB0kIPYTxghqn46H/0eOnqMQRxRFt57HCMPSBR+8LPv2pT2ONY7m8JtIz6o0e5QK6QElF362xPfSNREjNem3QkccYjc8bdnc9rvUEleCsGjsPsidW6Vh2e3afd959g+/8xZ/z/e98j7fffpv1xUPqzVN8WDOZJmRZYHAd+0eHbK56Vk97dBxhjQChmc4K7r5wjIw8Tx93aAmegDMxg+nAa9p+YD6PybKcoe+REpwfZR/DMCrER3qL3vKBodxsqOvRkOi8BxXojMW5eLQQBoPSAes+yo4KhPAf88nH0vKYYx+5rGNsR8mACGOkQ0ceHY2FLaUDO7sZV1cVUdKxO9vj8btndKUhjXJqs6KzAS0HTG84PcnIJineKULt2D+5xb2XMxpTcn21wpnRWHn3xTusy46DowW7hzHruuJLv/S3+dVf/BX++f/8L/izP3mD6tIjdz1X7w/81u/+Hdb1isePLlkcTnnw4IK2bfAhx8vA7/3uP/3Lf1D9b/+X/+nr82lG39TEyYSXPvM8b/7kCe++9RZPHlzw4QfvIowndhY/ioxoq5Yki4nSmK7pxrWrC6OPNwRG9vOYtzKDRSTjDfL8uubkdOBv/MY+l48Vl08b5tM5T6/WHJw0fOITr/GpvRd5//KM2a1dbh4f8r03vsvd5+7ys7/wVeb7+3z4nR5z2RKkoR1yvApoL9CJ5Pys5q0fvsXOnYL4VPOTN7/Phz9Zstn03L6xQ9vFbBrDD99+h7fvv4f0geryEsK43kNsY3BqyyTdMkWd3RZThMQFSzrNWHclKosRUUy5vGKZLzGFp7xybJRmEClxHLPeNOzvHrIz36HvHCZ42tCyON7nuhpXOUMVCH5JP3hWyyWzNKbZQIfn6Cil7wbm8z2UyMZJXzfgHCQp421scBgTOLtYoxPNdGfKst7gA8wPd8ZsXtPDYkrINKkPaCURJtCGnt4K0kgj8oB1ivqiwXmBMBKdKx5fLfHxPu9+cMYrn5lyejjn6XtXHM8P2H/lBuVlywfvPcTPa+Kk5+zdDZ2VKCVI9Iy+67FuhQSG1vPKJ+6xKWs+fP+MKGpwTiGjCB0Jur5heWmJiJnkCdflOd53OD8gtGWwA0NnPi6ATXdyZospQhTU1ZgLbTeeYaNwzcD68oxMw3QyoW0tmSx4+e4uER1t7XAiotjb5fzZFSqOx9JNb0kWagRQ6/ESNvSjLjNJMowdb9wq0vRBICycLy9ZG41POyZUZJueYCMal3J9fQ55j0JiipSNSInsqAqtLPTe0VvLZOcWr33mNVzXUA7XDFguNgN1p1Fo9ncXVFXLctVwfb4inUxo2wFnxkY0wSK2xqXxge7wVhK8Js0UUeQIXjOdzdg/2qMfOtwgmGb7dHXP0Ncs5jsgFIPrOTw64pWXX6QqG4zpmc12ETJDqJg419Rth1QRaZGitGW1OuMv/uL7XFxe402FC5Ljo1Ne/eTznJ6+wMMPz3j29CEvv/gy+wdHDH1L1Vzh6EBEWAtJLpByfFhrrem3D/YQHCqyJGmCFAIYSOIYJQPBgRIJ00lEUQis9bSNZegMDz98xHtvf8DF0yuePHzM6nrJ3mKf527foSqXrM4esptG7BYzDnZPkDql6SsGE3B9IEs+KhvFOOep25Iojbj73AscHz/PJJcU+TXd0LB/cIOj010ePHifzWY04gz9KFToB4sx5uOi48hNbpHSE6fj4a5ve6QYNaJjqWxscyuZoaME5w1106KI2ds7QClo6grT98zmEw6P9xgcxLpgbzFn7zBDaYlzKXEuMbambwJxkjDbi5FCYnvPalUzmPGBrOVYLAyEsfEeKbwbdac++LFctd2axfH4GXkXPqaPfFSaipMYGB/6PowYK4QkimIG60Zz0nSKc2PExLvxdzdJs/GAN4zDDudH/FVwnmgLhw/eM13A3Rdznj3eEFyGjiRKgZCeJFb4ISBlSpplFNOMSEv6rtu2zs14UBGCJEnY39ulbjuSYsFkFiPCMNroMkHXDqzPPZtrh7UjQrAqK+qmRgYQwRJJhXfjYSZOMvYOZ/jguLoqwU+5cXNOUzVEkUfJMet+fHpAZ1pUoug7TxQHkmycjAQikmxczxvn8QGQMRaHQuHanGYjyQqFcwO4BB0bgrII7ZnuZOg4omkdwecM3mxZvSUPP9gwnUzZrC12cLStYbErKSYxT8/O2dubk0QRq1WF9WakUQRNOlX8yj/8MlnUsXxgcFoQCUesFZ2xCJmS5GOsKE8Vs0lKqmOySCHJSLQg+BiHp60C04MD/srXvsTnv/wl2pXhwePHxHmGEz1JIXAMXF+vUTLFYxlNcmwz9pq9A0nX9ph2LAmm2wu1kjE3b99kuVxz8+ZNDg7nPHl8QZqmYzTuI9FfFPBCcu/VVzg62uXq6gIlJVJr+n7Abe2M4zBuJFwkSbplCY+fdds2xHE8boKcJU0lzhqUKra5c0uU5MTzcYthXU21bvnUJ19meX3G0AaKLKbrLAL4az/7y/zm7/wHFNOGs8ePWbcVh8cnnN6MGWxOMvckMufd773P1cOGat0TTVLqsuLmc3f48M0/p7o0/Pq//Xm+9Yc/4tZnTkl1w5OnMTIq2VwLuqZHR4bJYuD4dsHQwT/5j/4NOKj+i//1f/y6ciW2dgQaNpuSbl3Tr6uxexoCpu3JhcM7gZcSNxiUVnhhsHa8vTo3YmBGe4b4WLknlKUpEyaTguJkztWjBO9OIdQImRK7lp/7W8fs3THM8psQvcK//sPf5+JHK776yz+HEwNv/uQ+P/zwAT9+9B6vvvZpDvdv8uMfvUsy1YiuIlExIgQmsWCSC5h6pkeBUFvu7h0SFxkPHlxSritoO052Utar+yyf9pi6/vi9kFulopQKRNjiQiQKhbcGZECnKXU30NcW11i6pkLFBUe3d1A6cH01sNlUpFFGte4AS0zO5bNLXrl3l+AspgnYzjFJI/r+ikmS86UvvMyjx1dIHLPkEGtakrTF09P3INGUZcUWcjmabkIPhK0zPhBlEUE5iANSgQmO68cr1vXA7Xuv4Hdm1G3DrcnuiKfxFi0N02QHrxKulzXlpicuJCEMIApM7WmWa8qLR2SpYO9OysXa8IWffoXXv/gasd3hsvkAL1oePrzk9NaC0AWuzjeoqKdsAiqWRMmYS3ZC0pk1s11BnA6EkFLMI249d0BdDpTlBrGdulxcXKB1wtFJSlUaAlCuB6QbrUDOe4yBnZ2cphqNNXaQTLM5RTwnVQVf/PTr3D19gdXSMZvvoYQk1Smz4gZpvkfTtawfX7G/m9L1G5IptIPGt26rdBwzT1E82n+EjNA6oS4rhtagvMPSQQ992VBeX1NWM6YHM6JU0nSOdbMheM/tBWhjuBZgfKBVgtprXAi0jWN/MuHe/i6Pz9cMQyC4AeMGiijC28BmYxh8wMuOVFq8dQQR48MIiVbKgduW5OQI0Q7BoZWgmETMFjkvPPc61sY0tUOEnKzosUPDZtWOZSwhadoGYwxl2fL4wye0TQdKY33E40cXmAGOTvdQxUCQHc4ONG1NY2ruv3ufEGJ2Znt84Us/A1Lxxvd+wg++9yOcaUmVwFtDbzt0MqpD3RBRVZZ+GCjylDSJiSKFtaM3XIqIJIMbdw7I84KqatAKkkRuyRierusp1wZrGVv9FoZ+fHhY23B2dkbwipdefpXPf/ErvPTyp/ipn/kpfv6v/hSf/MSr/NRP/zxxusOPfvxjfNhsc4Jyy4wWDMMI6EfCweE+Lgx89/vf5Nn5BXsHL3Dz7nPEWTaWGdUFUSyYTXYoipi+d8xnu9y4eZPJZIIQktlssc1SQpqOOLqPkEwBMTJII7mFwY/GLqX9NqMXuHF6k6OTQ6q2xBHAaXCaJw+vKNc9h8dHCK15+uyCEAyLgynzxYKrZxumxbhxCi4wdI6r5QZr/Pj9J8MWHwXGuVEUgcAH//EkSSq2RU6F1tEoQXHjA1wpsVWg2i3PefuzgI4i0jwnyXPWZUU/DKNxKYyA97b1eCsxxo+wfiFR8iMUYk/XOAQxWsN62bNcNlgj0ZFmGLrtpUyh5SjGiCJFMUmwrsGZnq7e2jYUWD8qWoUytG1FP3jiLGNSFDRNBUHhbUJTl1gnSDOFYiyZxToQRxqEGg/03hJEBLJntltgbMnQj0pUa2BoW+q6o5gqhFLE8YTZfM5VuWLnYEIcgzEdSRZRTCMG1zEMAZ1odOYYQoIjJs0VMhQQDLZz4B1RIsYJnOyJYk2UaoQSeK9pmjHLbUNLtVTEugBamnKDNQNDP2KRxkLXSJRpakXftwgPUVTgQ8Abx2R6SBfDxeNrlo86ip1xi2F7R5QkBNFiRTf+rliBVAGhFIMxGAkKTxARngAu5u6re/SThun00xztKL73xhugHEKPl1LvJV0tiVPApzg/4JxksRfTmZ48Hw1auJF8YGyL1gkvvfQqk+mUy8slL730Emfn4/dXHKUg7Ih8zF0AACAASURBVFa9rEbxgVf88q/8MlovWS0rNqseJVKMtRweHbFab0aNbRDcvHmLsixRaiypdl378WBuaANajRdsITO8jcimGp06NmXP5z730xhT8/DhOXfu3ebsck1nIJ12HJ/MOLqzoK0tTZtwfhnzmddfJtCwrFZUxnPvtX0m8z3uvXSHvPDsHbXszPd4+TOH/PCN93FmQrADu3crdue3OLzzHNmO4ifvfUhQmqf3P2B5URIpxa3nUpJs4PJRzu5exPmTiv/8H/8Xf/kPqv/6m3/09YvrMy6uKzbrNX0LaaJZL3tu3T6hrFuUFKTB03Q1vZVoPM4JJotonHSoaISmawmCEV+D3GoYFaZtmexMyPcnDL3g4klJEqd8+gsnnFct+aDZOznm//k/v82//P3/g/WzNV/9xdd59+0N3/rjtxjqFWZYI9eee/ciqo3m6Vv3mU2nOC/BBRrfEk32+c1//x/y7uMzOq5JzQHHt57nrbefEXkzstp6yYsvPc/p8R3e/84H23UoIEcEy0cj/wDYEBBej03kMN7khIqBGKUi0iwb7Ukx2KXDrgU+ROSxR0Xj4dEiaYcOIQLL5TVdZ4h1hO07qvWSnWmMkCnRtGA99Ex2F3RtTxrWvHwoGeQ+dWnGNmsYG90jyuX/Z7EJ4yrO+rFtGscx09kUZy1apyQ6p3eBq6sruq5lcbiP68eJjQk9XlgqerwbRQ82eLT1aJ8yNDXHd45Ji5wbRztMM8HFozWHR3MoYnR+wJ/+v3/B299/A9s1XJ45ys7iI0u2EMQ51LWk2FFYsSGdpshkIM40iBzvLHVTgkuxQ0RdrZlOYpqmwjrDbLbPwe4pl2cNxllwObEUxJFDxxGxlmihAUuStjRtgxtgeVnzt37pN3jlpc/yJ3/0fZ49vWQ2m3L71j3eefOMeuPZbCq6qwadKPJCElxEksc4SqIoQfWS2zdfROsE43qm8zGG8dFDWaLonMMQkUQZxBG9bxCxw9YN63XH4e4UaSpy13GSK4oYVk7gJgGVGhgGUmakC0len/Ponfs821R4NUf4hlkBWTRFxwm1qZjPd8gLQYha2lZhw6g5Cr5HS4dEkGhJpMGZAD4Q6QyhAi4IvE/phn7L7AXTD5TrdkQ1xZ7BDRTFhCxPMaZDBIHUEU4MmNARvOHy/JzHD5c4N5IS9hY77C4O8Epx++5tsrhgtWl4dnbGo4cPMHYgTsZ8qbGO+WKC9wNPzs+pq47pXCIjyzA4usbSVQZnO4rJFHxC3w2oCNrGI2TEZJLQlI6m6pHSEYLmE698mrvPPcd607Apa8IW8N22BkcgKENnKi6un/Dd732Hy+sNxeyQH/3kPX7/X/0B3/7xe3ilee7OMYlQlOsVvXcIpQnBbsuVUJUdzgju3r5LkSVcL89YbkoePjrnevUQ7zdk+oChUTTVEu8MaRpjnaOuy22hX1DXNaZvkcITnIXgSZN4m8kdv5PG4lLAh2GE4wtQQjIMhqZuxovaMLCY71BMFngFQXVM9xVB1VycP2VnNmWxO6PqesqmJRAo0ojjgyld11GWHeWmoR/cmNAXI2JJSDXKE8I4+bfG4Xz42JU+MnsFaRJvS1p+LHjakWf70UHVhS2qSiuklgitSLIUL/zHU1dwdH1PEB4ZWbKZZDJLsSHgvSNNB+JYEcWCLItAOCKd0rWjjc17SLMU/Gj8iqLRgS6FHp9JIeCMIXiJddtuhVLbOIncvqaIvqlpSstnPvs5TB949uiKWEfEKmW22IMop3WOOI/G19j3BDPgACJDlAWmC8eTxy0iJGgZEyUlXRdwLkZqwabqtqTfnrruiaMYqSXWD9vM5Pi+WCtJ032iaIrFjKYi2eBChzECGQduvGTYPZyxvGjpTSDNE4TKxgvt4EfTW5IgnMTaHm8cSRRvjVgCbz14jXUtewcLNs0aMwRs68mSKUL1xJEizyNsZzG+5ad+9nV+57f+Q771p99lvn/C5z+/S3OxRvocLRLs4MZikh8ZdiLySDzSCZwSVEOJd4Ld29BH5zz44D2+/cffo2sDOooQUlNXniQX9J2hmCS4kHFwpBgGx85BQRSnrDcVk4mkaz0hDNy8cZe///d/i6KY8ebbb3J2dk7bViyv15jBEbBE8cihlUIghcJYs7XWVbz95gMiWRDHEcPQslqvIXxEjogoy3JU3Ob5qEF3o52zKAq6vhuHW9LS2Q5kxOmdfT752l3aoeJH33qTvfkB6/WSyTTn8vqKF18+Js0tVVVzdLpD2dQ0/RPe/vE7XJcNN46mLJ88ZnZ7h4fvXSDcAV4PNM4SMs97j84gVpw8l3NyEmhLuHvvFe594iZvvPEud07u8N3/+9s0q55iMkXpwNHJgr2DnItnAqkidDzg+oJ/+p/+k7/8B9Xf+y//2deH2tK3LULmuN6wubyibwxBDVjGaU0aPH3fYIhRfsBYKKYaxJhZ9c4j1dgqd06MNzHvCEKge4FRCdPDKX2A0NVUdUySZDT+MWl0TOzv8PjZj3hxesRVZ1kKzwffeJcQf0jpalaN5LjY4ed/5YvcX/+Ai7davNiACQwqw/uOSBe8/rUv8c0/e0DcbZgvdnj1pZ/hT/7oGyxyjwoTNs01508bEjHl+sEFRB7PSIRxwaHEWBqQWo0YLjE623UU4bwgiQq8dMxu5Nz83Ant0tFXG1ofsENHEAKnIRIJloDvAmkk0SrQdQ0COZY2cKTFmKNZNgPXyxLXeWQ0o0nOWFfjH+Fg1LiKcJbgFQjJfJ7Stg3WexQagkPHYvs6EpJ0iukHmrrEiwHb92yaNTtxSqoEZ2VJXdVYPIs0JyiFERHWCXYPFpTdNeONQ9D3HXqSUTYtm+WGzaXm577yGl/67Of48298yMHkBnHRc/cTt7i6Kln1PZ0a2Ck0cZbgWo8IBu8ku/s5eeG5efI8beV5+vCMEBTCjRNKyQYVNAJHFEmSOCctLJeXJU3ZMwwBwsjnaxtPnETkiwSPpMj2uLwYmOYTFsWE3ZnmE8/d5Rt/9g3ee/dtlOqpq5bLJ1f4Zs18WlPEnqoPzPYVle+oQku5UezOFoTeIJgwtBHeCTbVFZ6OJE3w1hNJCNagncCalhB60lgyTzOKfMp0SMmnMQeLOS8WgVt7DfPbCVcyYdN4LBrfjOiwQYB3MJUp3ghUNmV3keL7HhMsVdehhSRWHvqOthJcmYgq6hmsRcqIWAdUcCQy5mBvB6kcddlzeHDMwf4N6qZEyBihJL2pqco1fVsRgJPTW0gt6U2PdwJrx6mI92bbfA444yiygiQtSLIU6zpimaOZ44mQaaCqluSTE/J4itYlDx59yObqAkFHMZmQ5AmT3YzD/VNcI1hdXiF1g44MZWUYOk8cjaxTb+R24tZhjRsjL6anbTpefvkOO/MZl+cr+s7TD47nX3iBo+MbXFxeb1v2juAj5otjAnILTJfjgzRAXV5RLq8or6/wbmA+S9BJTT6NOD49RoiB9dUZ3kIIKXYQ21jFuJwNDqpVR1d1SDmQJD2Hx5o48lw8XdO3hizXqGgYL5lybLEHDHmeEQLUdYXWYpt/Ex8/AD/KfYot7ikEAejRihc8Wo9w+cENHJweotKEsllSbi6JtCGfSpJJxs7eDnGmaaqOpgMZS5AtJ/tTTg/mXC6XrKueoXMMgxlb/Vvbl9YxY3zLQRgB7x9xmkeNqkRpRRLFY8TLjEpVHWmUltspsCAIQRwnqFgTJxFJllBWJVme4YKj7TuMGQ16eZGTZDlSx6goIYojiumEmAnexcznCwSK66uaJI23ulNNlqdkWUqR5VhjsWarzxUB53qsYfwcnRu/41WOTjx3br3AND/g/PyCkfTuSJOMOF/w+NETlHeoSPPq517l6rKnbnqSVI3vR1Ag7Dg5jAPFLB0xZ2QYY6iWnmAEUmhkJPFesNjVDKZnDE4MY2nNl9hgSOM5aabRsSWOY8q6JcpG0gIGhDSEYIkyjSVCxinpZMbZxTXKpSiZo7RnXVbjunwrK9iZ7yKCwHvFZCLJMkFXRXTVBm9gZzHl+MYek52UW/cyPv36AXmW8eTDK+LEMvSaxXxOrCS5XrB79xW+963vcvnoffaOP8mrL93jR2+9QYgcOhWUdU+ezdCRousNOtZABP0YDcEoQhu4+cKc/Zu3mSan+P4RbdugI0dTObJ0ymK/IU4V1aYnmXqObymkNrR9S9MJ5vMZPhiEH/FRr37yK9hB05meN9/+EdnEEyUBgWY2m+GDJU8muK1xSssEMDx++IzHDy4JPkUpwc1bByyvL0BqlBw3xKOcwpMkyVYWMNIExqGFBW1wNmF3f5fXPvsJ3nvvQzZVjwmG6c6Uz732Og8e3Of0aI++XXP1qOXenU+jRAvOkKU5IY6Q1pHrjsfLJTNtOFSCS+/IH5REqqfqHlE3jssna9YXHdVGkhYTXv3kMb/29/4BzdUZD86u2FtYunbF5fU1Mo8YBkljKjZVT2dyfuFvfI2yfcZyXeOF5z/7T37vL/9B9b/5H/67r2/WJc5JsA65NaRYM1CVHZEQ1OsG7xomeU/oBlAp1nqKPEVKh3cKrfSYKbIRUkh8GCCMWQ4TOVzd4fqYSZFz5+4h+ULz9MNzUh149PY7ePmQvb277L2wx4fvrli/fx8zcXR1AoODTvL8nbs0eeBs9QRzVtE20HlLpiXCSFQG5VDx4Qdv8dlPf5FNX/HjHzyicxVVFbD9uApFDzx+8AA1MldGhIVS27KURGrFMBhEECOUWSuikKBlwO3EdHLE/5xdrPGRxfcS7TTz2YQkkfR1O5bJxGg7CiGMKjwZMXbSA1JrlIrph4HpZPyyXa16hrZkGu3QN4JbLz7PTAfMEFF3Pdb3KOfJc0lSBIZmnBpFSYINgbY3xPEoXGgtxIuCeFIw3T1gvpOjIolpE/rNQKRBRxNCUlB1bgTfu8Dmao1tPEVeYGxLXOTgA72xdNUKbSK++JW/zv/2v/8xwQdu3bjJTx69yex4xvlmjdc9ojO4OkJEgc0wMMsmFLsavEL6gboSBOOIpaI6N9y+eYPl6hLigWw2rk8lCaZztLVHxwmr8xoVGqSa0jYJwhuiVNI7w3q1wbSOLElGDI+TFPGEZuMZhpLgJZ/9zGcwpudqecVkMSGfTHn2xGB0IOie2d6EdVlx++6co+ltzp/0WOVpuyvKaj2uXTFYo+i6cVOgYkk2n9AFhxUKPSQYDb1tOdACrSLOHl9zUZc8qjXPNhnvPfO0rac1LfQSGTTOWvww0NuW527cYjo7InQrrCk53tul6QakjpkmEbtxD2nAT3Nir4hCzOAHprMF+2lC4i1l1dGYjt5BEu8RYkXdrShmE6S29AwkhWJ6sGBwgpu37xIJyXqzxuLovcGb0aCkBMzSnERHFMU+k+mEsl7R1QOr64rF3g4vvfQiXdWzqgxPLpbIheL0hX1ODjISNRY6qrLB9x2Bjg8fPiPWMc+9cBPpLeWyIwqK0wImUUQUxURKoCQo6RFSYgLE0XZ6R8znvvR5fu3X/i1uHN9m6Evu33+Pd97aGrGExtiAl4HpbIfT02Ok6ug7y+7OKXGcsFlvODo65jf+vd/kt//Rb/P3fv3v8tnPf5lB5yyNxQwNbrB0ZsC6MWqAlxAUSkkG27BaL6mbFu9j4iwjzzOOi1vs5BPWzZraBIppik4HNpXBCz1mnHtDVdYkWcaLL9xjb3/B5bIExsKP6QwmjIYh7wVKiVGwgEMKj1ZyG0OIODy5gRWW1dUZi2KClBkySum6gc26xPYFV5fXrMqSyVQR6UCWLDg5OOLsUc31dU3XBsww/t+CsUQzDhwM3ostVsrCtkWPcCg5xhOiOMIHgbE9bssa9kFibMCFQJrnxEWORxInKUjNYM0YGXAQSYUWiixJiSJJ3/Z4o7CDGv8u/EAAjG8IIrApy3HabAAHzvbYvqNvWpwb2+sQcGaU1DgntqYvRz9YnAUbxu2RlhEn+yc8eHIfhCKOU0w38PD+QwKevCiwnaHZQFN17O5OqNc1aaRw7hqtDEURkeSKqkwossD8YKCrJdJl6FTRrB1Hp5DPBpbXklgL8nxshRs7sHOcMz/MKS9hXVUEq6iegGPCK5+9xfLiATLRNA0gFV0DkcjRceDk1ox12ZGmCTY4NmuDVgk21AihiVTEZlUzeMP+Qczx6ZzLpytso0iTCWjL4mBCEmIePLrki7/0WbKjgsfvtWzOV+AUwXqCliyOHMc3Mh688wZnT99GJYLNZYsxPU1T0vXdGAFAo1WEkiCCJ48nWKlxtsZ7QRRniEyg5wsOT+9y/92niDhw8bBjllvKWiIPHbGC/bszTl6YsL8X8e0/qYmilL7xSDfl+uqarlRotYd1nk254QdvfJt33v4h3g5EMma9qmgHw968wA49BE1gZP86FI1peelTL4xMWCzO9lxf1AQcOsoI1iGCRBBASYK3KFKk9kx2Uk5v59RXjsHB3k6CbQN7h8dsynOwCTuLhAf3N2yuLbvPRXzy1XsU00PufeJVdvJ9br14j5uvvgpe0JQdddPh44jOrlm3A5Oj2zDUlFmHL3LwEfODhGJXMc8i5nspKg+cHB/w7Pw+D5++jbVLtJIMfcHe4T4HJ9A0DXuLKV/64ovUy5pv/MF3cG6gyCak+Yzf/Z3f/ct/UP1n//V/9fWICC2jcd3TDUQqRitJKhR919B3hjSWTHIwRhKIcdaSaPDCjGUkD03bjY05NfrinQ1EsWLEAwqssDRtQx5PmO0sKKYJphd89lMFn/jiPZ578Wv8+HtnuKFmdxFTrlaoOCFKNEFYyotrvL7k1U9+nssfl5hmbNAa344bJC94+4OHvP7FzyFFwup6xU/eeAPbGiIhQLTszBa4SqBcQEuL9eP91vsR+i2lHLV+zqGVAgaccERC0Q0tO6cnREnOJBVMd2PiSBNHyejYFqPeTukEkAgPgxlQavSN62hEuCitscYymU5Ik4i+dSyvaiaFRqDIogV1ecHqeolpBL2SNOuK2wfHpLOccmjY21mAc+MUNRpbzkWeEyWKJCnYO9rFOk8iHBERfRsoJgWlueTuC8dUdUe57CirGm8dWkVoNeJeHA4vPdOdHBEJTo73EXFgnmhe++kFjVjRn52jg+H49iv88Ec/4Ad//k26sw1m09CtavpmIM01z988oFlG7O0ckEUZbWXGfGQ0sKlavAkoGZOlkr6RaKXIZ5a6FkSZxPYpfWvQchj95HGEdxHObteQWwuNCClNVbN/tGD/8JCL6yV1VxJlCus9ZxdrOn/O7MDw9FFLUzkWO5pm0zI0EZtnsJee8tUvfo1nTzY8ebqhaVrm87FMp5TmYH+BGQacGbE9EBhCS55m5JOeLB9h9X3fMs8l+VyxXm2w1tJvHHVjiIoJg3QkTrI3nWFTg5CWWZ7SRppeZ7yYCurVBWebUb85K2Je2NOUIbARjpVzlCYem+DC40zPXn7EbpphmiWznRNML3B9h4oE3q+IVYr3U26ePo8xgjiZIHxMU9X0TU1ZbRjalliAcm476dMYHxikJY4j7t28QVcPXFXX+NAjcDx78gwVYk5u3SZNCoSXrJ4+40//4HtsakfvIgaZIgTszDq0GJjoKUpq7F5CsQNR32PNQJYmeJHj5wXxTkGWZ5gWkjhhbzehWVn2D2/xtb/5S7z62l9hunubo4MbrC6XPHzwIUqC1g5jaoLv8daxvFpRbtZEkQc6mvYSRE2cxCyvG77w+S/xi3/77+LiOY+eVFyenUHT019azs+uaPqGSE8RQY94rCCx3iCVQWtFkkUksUJ4zXw2Y8Dy5PKavuuwrUF6SRxF7B/OSZOUq7MrbN+ixKj4zHducPPuJ7FVT18Hdg5vETT4YLGdG1FojKB8+REOCghbbJc1hjxLmU8zbtw8RXhNuVzSbRr8YFgvl3TdQJ5kZJFGeoUWCUWmeeu9+yzLCtPVON+P/OvtKt4aO2ZSfRj/bUkuo5p0tH2lWYKORhSQtRatJUIEjO3xwaLigFAeITXOaLpmIE4S8jzDOU8aF2MeUo4osKZrSbMUay1tUyNVIElGfJLc5lWV0NtpWEAIRxTHZFlKkiTbdrfEOzfazVzAWMPgLMJp0jzmxvM3+Hd++6+TpA3vvPmAJ+dPiGYaoRWDETjrOTpa4IWh73vSOGddXxKEAbEkmxqEaimKjHJj6RpPbwxSWQ5PC2aLhPNHkp0Dyc6Bpx968iJG6Yy948BqOSBDThRDb3qiRFNuOpRPuL4OBG/I04y9k11kpOnrlizN8WE1MlnlqL5smpazR4ZipkgWBlxBuarJ0hjpJ/x/1L3Zr2Z7fp/1/KY1vOsd91y1azx1Tp+hzjk9mw7GjrENJokjIZHkHhBXIGMGOwoKUl8BQVGQUCKuiAS5BS6RQCGRLJvYbrvb3e12T+ecqjo17vEd1/QbuVjbzb9g39Sumz3offde67e+38/neRCCpneIPDKbHXC4rylLRYqG7XKLjJLptCQzPc8+vcD3gVE+5Yff+gmvvl9TFoI+De/1+1/qyUsPyfPy85Zmm5hO9inHAZfWuF6z2LtNjIl8JNg1NaR8iIpknoyI6z1ZldFZSyE1q4trfvz9z/B1R399zd5pzt33ZqiQ2O8zZCq5ON9R9zkvf7iiXWucdSijWK8airxCZwmp7bApSJ4qN3g7FMDqpiM6SF5Qtx3T2Yy6bQZ0mMywVnB0fMRb75ziOsmbNzWkhEGSVKLzHUp6bA+ZHhBmPgoEGWhFGywP3j8lCcFutULKjPme4PLNGTZKju6esFpd0V2t2TUtWhdsri/5yY9/jE4HvHz+KV5csGkKXp3/MXrcIzIosjH7M8jljG1nsCOLXUM0iYNFgZKRrrFUoymL+QSt1jSdpescsz1FTHB5veXsZcfR3YwuvOHttx5yfSZ49uSS81crurbj6JZmW19zdRH4+7/19/7iH1T/wT/+J9/MZgMPLjeaLvSkEIgeeiRFCjgE43JEdDU2miEsHSDEjrwcAsX+BmYtZEKQ0DofFIBIiHpwKAuHlJqz5xes1mvmhyVHd+Z85aOv8fRpw//1z/9vTIQii9gucrVaoSwY2eMazd5C8+jD23z3Dy9x1zucsvRbi8gZcopBkVc5W1tzfXXNarlllM2YFDOqcUlejNitekyIGOHp/MCW/HPLstJyaJO7wS2ulUYLgwseLQ1KabooWK9bikywdzijWXd0bRgu4r3DO4vKM6IPKCJIdVMo8EP+zPubjFe6sUElYGiX217Q7iy73RW5yWi3Pe998DHtxQW6tXzpS18mSsGzZ2eQFCIKlE4YI1GyuNGoDm3WTb3Fp4gykslc4WNHVmXMjsa8eXNJqjWy80ilSAHCDVQ9isRif8HhrQM29ZrY9kwWGikCVTXi3kcj5ocP+Wu//neQ2ZRt3fPi+XO6esN4PGU03SOvCkSKZLnABo/PIq/fPGO7WjE7yKj73ZBJswotInV3xRfe+QIiLWjblsl0D+cydjtPUTRIL0lRsDgYYzT0dktWgJaQvMb3YjAvpQ6PJcqEzD0iC2RFSVKWdb1kNCmJCaTOObo1tEBtO6xkN6sNb93b40tf+ojXV2/Y2nPq7YZiXNFYiw+BEAUugPMSZUqkEfiYkNEhkATBYBcLJZ6C7SagG8dJZpmmwMiMEWVGqoZA/rZ39F0g6w29ToiUWK+39P0aPBwfH1LKjumkZ8OI51eWlhzrDNFtkEJiGGO0RgtJHhq87Mj372L0nNV2Q64S41wwX9xHmQUXF29YX71ExpZZPiLalna9ofOOclZSzQpGowIfgZgoisS9t95hcXBC7SznF6/xa0cmNOghEhDwVPt7N+rPwO3TjFt7OWcvrjj//Bmyu0Q6R05FhuFg/5Bd7diuOr74rz/i9KN9soUh3ytZrnZ0zlMejXF48lIzXpSomHH/9A5f/bkvk4sp/+pf/g5/+Hv/gouzM77w/rs473n2+Yubg0qkKA3GaIp8ONTstpbbtx6ymO/Rdju8jzx+/CF//df+bWTSfOv3/4DtxWsmVcX9995jfDzj1csnXJ1dsd3WwyYmDXiogS8aB+FAAKEEBKi3G67Xl3TWkmuwVnC9iTTbRKULVBRYJ1ic7JOVE6J1JL+l7z33Hr7H4Z07NKnHjCWz/TmuDXRtTwz+hokrbg6tN/rSOGQ7hVLsHR+xqTua9pzpJGO3sdhu2HYl71mutjR9R5KCUWmQqePsxRVNPWhPEf+/XlJKcSNv0QzzyeFfKYcboSCRZZrxuAIEMXADN5eEmNBGU1Q5Qg7/T0mRvEFrQ+cakgiMRhXORjabFc57Ykrk2ZA77fsOCDdDgAyjNLYdbtCCYVJXFjDfK28Urwpre7wfVrIxxBv+7RCpKKSk95F8lJGZkqr4kHtfeAwm47OfPqEQOa632LZDxMB219H2AZ8swQWU0EihKSpD19dYG2h2nr6L5GXOZK/g1umYv/JXv8zlm5aLq3N0NWjE1xvJZGZYLlt2TY+SGud6igKSTOy2fsiF2uE1u/fOzZZSGcrRiPPn1xSFw3YSISyZGpFlHqXAWonJFKe3jiBtUAqWV+2N/KQmlyU6GWQUGJFxMD9CihHX11uED2xXO4SaIETLeHLEqxdndNdbFIL1tiEGxeF+hXcF9WrG4/e+wY9+8BTnBd5KQuzxcQvJ4OKOlHom4xlal2y3gwBju+4Yl2O0yWldM7Q8fSJXBnzEBEtGxuH8AO3g4cEddBdwncKeB5ptT/KglaMoDdNpTvBD7nJUaZLcDQVXIVA+YW1PEkP0sMhLJnlJH4bVfQhuyJzmo0E4lEXK2Q67s4wPJEZ6slQSgiPLDClCriY0fcNoNqIoNUqEG3lGTiZhVoxpm4aud+Sjku1uS70JXF82dO2aQlfE6LC1ZDGf8OD+jO9+53vUG0s1kfhiRSkjMRaMZg7hKiqzwLmEUxm+7Kn6nrNLOD4cDwSOMOL4ZErbbyhlTmcTzcAz/wAAIABJREFU7U4wneVokTEpTzAG9o5adsuc1dk5P/6z1xiT88EXTzi+P+Hpp5e0O0NVJf7ub/4lMFP9d//oH3zTSIMcj7AKXD1YaUw1xvaBXEvmt44RKaGFxXqIPg6twFJQjAwxBWIcAspZNoDxtSxuDqqa6N1wEVQFRuZkShBF4NX5BX0v+elPL1lerfj441OePjnn8yef0dRb6uiYzkbkYoKuAqfv5nQuR+wi3fklwSTwBUIJlFCkKCATbLsaozWb7ZqqLMmKKUIWtLYh9B3R2gHZIYbPGy7CEq0kzrnh4lbkA0cNhSky2l3LqCroCdTrlmAtbWO5PFshZEZVVDR1Q5nnKCXwwSKIFGWJVDccwjB4s7XWtF1HWZRUlR5UgWG4IWkpUUKxXXdY26ONZpE6MuEgG3F2fkaed1jf4m7c1DrLkUbiHXgX6YMlJMHx/X2iqsmMwTYSZQome5K9vQljtYevh/ytyYZWuZSDzaUcVWy2G1JKTI8rLAnrHV3oWT4PfPr91/zL3/kOwWUcHe3x5NNP2W7OWexNB4xTcpA8zlqSzBBhxzcevYVSnk4uMXaEvTSMtKasCqTueXOxom4DUUX6PkMKj0bQu0CpKowu8FHS1R1lsSArNN3WMiQGLRGLyTR9H/ChJcaG5XXLcnNJ3wVC2rJab+hdZHGk6eyKi9cRnU+o+5Z3PrzDwcke1fSA15fPefnmE+7cG9O2LUoIClOwud5hhESJbtDtpYwsV0RrsLWksxojDClEVFlQFQVCKKZac7JfMr9Vsawdq61HFdA7T5IdVg4ZPT3JyKaSuhcU5YLD6hC1W3L+pufycofMEjEIkpQU2QQVFCMSXgSiklTZlDdnO3oU01lBY5fY0qMWh3S25PrsCc3uJVJ0g22pUpjFHlZ4bGzo+qEJG0TEh+GmPd2fowvDm2fnvHn2hg8e3uPksKSJjpjkjbseDuZzThYFb71bcvJ4QnSa5KbsLyZkpoM8cHhywmLicP0ZdbdDBcnJnUOY5Ny9d8zt0wO2yx27bUsvFPvzOd11w+6qJleCtg/85Mnn/OCT71LqSKYKRtMFx7dvEYlcLa/xcdgyRBK9hXcefcBHH31EXW9ZXm/p2p7Ndscv/Pwv81/+Z9/k53/xl9i7M0ePFFebK1Kmubh4xe//7u/R146vf+Xr/Nq/82tkmeLlyxfDOj4JYNiS+JjwYfDKz+cLDo8OCdZRaH0DKo+UoxylNUEIirFhMj7EdpLV9hK8wzcBHzO64DAjRTWr2LY1IiXa7VAqHNbvw4st5LCOHAxHFqRkvjdhubri9fnlDTtyONAao4kkVC7JRyXB5xSm5KAqwe3YbtbEmJNlGXmhcdYjpcT5gfKizU1pc7hMkqJASNBaUZTZAPb3w6S17z3h5nsqLUkiESMQB5qE0glpEgg/SE2anqIcsHTjyYjDw0OUHPSdi/2SlAKb1Y5u5+i7nq6xkCLTqWZvf7Av7XYNMQy5ZnOjBfY+QAJlDOPZjHxeIrMcGzzzg4rl5if86bd+xCd/9jlSNYgY+fBrc05Ojrk8H5rd04Mxo5GkbTpMBkpC17bUdcBZRfAgtURnCaE9MQo+/emS66steVFwcJJDmHFy+y7L1RVm3CFiQbsd1NdJdqSomM0meGfY7jaMF4oUNc2up+56kDtwlsIsECJnMT9EaU/EUpSCzjYolbFf3eZgccT+3gRkB/Tcu/WQx+884sMvfMTJ8ZzHH30Bk3KeP73g7HKD8JB8ZHRc8Tf//a8wO8x5/4Mpq/UGRMa9t+fcum9oGs/zpzvaXeL1qyVCWmaLCbt6Q9dH8kLRW4+3jhBgt6qxXTfklMVNLI2MUSmJQLiZeiclMTkIq+hcjh9rDk8EXMAq1ChdsFp3pFIPiuViglE5fT/0SdbLHaE3eDeg0QqZQRiKUlmu0EbivaNrWogR78Ig6xGaECNdtOgK3v5owcHeu+RTSd+3vDkb8vQP33uXWMD1xRm3bs+G7WgDIgm6brjvvP/4EbPFhNW6p14n2s4T8RgtGSmJkppt36CFJvqGi4sVtvEomVGUJVpFZncTl6sVUo4pshLnPMZkJB+4cwj1045xlXN47zYBELlHjyzT8R7nl1tkscBuarZXBt+VFGaP4CRZ4VheJJpdy5uXO3Izoa09Tz89x3YRoXvKkcJHzd/7zb8EZqr/5h/+w28mFRFNy/r5OX0fmExnlGOFmkPfS/S4ZLdeInxNpiTO9UipUZkEBvOJlIYQ042ZSqLk4D923iMBFyJKjW5Ye4GQPD6VqDCUIdbLLc7n9Elz/+EMpS22y+hdjdKex199wLK/ZrXbsl+VLF/XSBPZbXvyLEeg8MlxeP+ETBdcX6yhlJwezkiZ5dXZK8amotttkFrgfLrxUw83gCGuMJinjDE3GtiIMZFEpGs9e8fzm4NUT1WWCCHZrDdoBK7vcSEOZSrrh7ZlVhCCvUG4RFxvMTf6NSmGlbXKJNPxgjzPWa2v6RrLaDTl3sN9ml3PZrVhawN5MeFXf/mX+d6Pvk9SFiUHK5LrhkmIjx4wKJ2TJAQlyMcRGQoyci5enSFVYrnesWssl8sLVBbRIsNZN1h4vAMBTV3jXWBWzWiCwjeO2/cnnD2/5q985WN8F3n+ySuODwe7zieffoY0jrrbYFtLvV6zt7fP0dEpbtegpON0b46MGWerHSorcXjwYsiuZgpdSjb1CpUJyjKnKCDLIn0vCa7Hhoa2rwem3zgwnmouXvSUMzAlGFNQlgqjPPUyEluDBjJtIeibzHFJSC0m97jO0Ow0Xbfh5F7g/a+PKWd3qFuB7S9p2y3rreNqvUFIg9Lw+PH7bHceHyPF2BBSQksYVTlHBxOkTpQlqCRIQjKtDFe7RBsNne+4IKeOICroe4fOFDqfkGk9tEaVxm52+K5nHhvGqYeZQFcjigLamGjJSEJBciRlSEnjvQQlWHdbMtcyFTXb9ibDrSpiPiLWG+g25IDOIkRHXih2bc+6boeHPAwogymy4fc2G4w661VNt2k5XJR8/MUjbp9OuLjoWW23Q0NcGd774j3e/7k9sj1J2/S8/M73eP3DP6PZCULIWW+2mPGY6f33YXyHw9P73H5/D1/0NL3i+iqw6y2z4xLGkulsRhky2jfXrK52TLWia3a8vLqmTJpRNePF2QXf+aM/5k+//2325wtGozHr5TVZMXjTfbBEHMvVOSqzN6pESVUVlMUBP/j+M37vX3yHq6c1zeWWi+tzXrx+ze5shYyJ8ckMXSqe/fQFn37yKd7Xw8ErCYT0xOgQKqCkQpFxcHDMfO+QxnpiklQigg2smw6fejrXYG3HbrMmxobxRBGEHDYBuzfMRhm3bp8ijRpKTERiGkQMJjOD+Y8B/yTV0GaHRLCO3aplVBqqKqdtPV07rG0RCi+GrYFB0jUd0dXMb3BlSUg22x1lkbF/WLBa7gg+IRCkKH5mpkopIuSfT3QjoyqnKHN88EgZqdtmuHbmkmoyIi8NMUW8B9+Ddx3WdozHexwcnrBaLambDcE5Qp/oOwtojMnp+sE4l+cjYlQE36ONHEQJ0SJVomsdm3WHlhlp0Gr9zIaV4oAS8yENeKQbkcFm02B7z3S8x9uP7rBcv6GoDI9//g6//rf+Pfb2H/Htb3+fvh8MXkdHC4gC62uEjkgNZZWhs0BeBspRQmeGxWHGZtNSb3ZEhlJaoSpc9Jy/viCmnjtvSV6/GIqB2kAMA5szpJbRvGAyPSQbR7brHeuLSNdEkg/g4M3rHWVxwGisafsdde0AT1FU3Lv/Fq3bkZJkOqm4dTRjf37AYvKA07v3MEWOqSp6W3O5/SGjqWR51pB8x5e/fpskOtarkovPO7qQcevjQw7eURwe5rz60ZaLlztgOPgEnzg8PL6ZVrdYG+kaSV4klBIE5yiyEW1tUUKTkqWqcpb1lhBqlFQDUgqFixKd55TlFBETTiQOiwMm6T4XteXdh+9xtbpm1yxJyZJnFbu2Rgg/WPZ0pMjN8IAiDCIJsiJju938OXaIzjmq+YzjoyPa1lGNK4qipO9apJIUowKTJ378Z9f89PuviCGhcoWrLX0b2aYtD472eOerX2bvaM6TJ0+JBGIyzCaH5IUmyYZaL4mscLuIkYbgPUoZrO/JCoMxkhTA6Al91xHi8CDX7hLlbM50/4DoFbPpmKzMuN6u2K0DaQdxJbn3wOCDIdQV1Sxh15b7o0fY7hWX5557d9+maxumsxn5ZMfr1y+YL4548+aKvnNII2jrnmA3jHLN2fMdKeUc3F7Q2Ia/9xt/CfBU/+if/c/fVL0k9jCaFpzem/PhN95ndDBndVaT8owkA4v5jMODKfXymiI3+CjQmSRFh5QKIQaWKjCUkqQensS1xEhDHwR4z3ha0iaPd8NEBJ2QaWAoXqwa6vWW3Ix5/PVvcP/ubb7w8C3KvURdB0JzwLgyvHhxjTFT2t0GSGR6iBZEFbEi0jcDa1AVGY9O3+Fv/Z2/wXa14sd/8hmTYkSMg83DAOnPywnh5onrJkvqvUVKgVQeHwSdk1SzOSEJSJrg+xv0T05VFHRtT3fjhU/eDsWHMiPZYdU/Ho2HdaGPQykEbiwaw8fFYoZSCts5qrEhxp66achHBp2X9D5weHLEqxefI2zAoBBJkVxCZ4noNQmJDQ5ExnRRIHRit+u4vqoZj8ZMJxP6PrLd7lgspswXc67PVlhnMcagM01eGLxzGG1odpawteQ6oW1kMjvizkcZo8mUgGf/6DaT8oDvffdPad1mwJFFjVEZKRlsk2jba7Jqj+dv1nTWo3WODYHxfMJmWxONIyVN31nGVYUg0LYbQgpkxZBtjFbjk0ebhO0zvvTVB3Rt4OJ1z2hcIXXEGIlE0NWO+6d3OVzMmY3meJfo2gZBpG89AkO7M8iUQwyUTNmbjdl1KyweaWo2yzVvXmzYtRGZIiJAu4JJuUBLj+sso3zOtCpJ1Ez2BRLQcpjAyjgiBoGUjmmhsV7j3B6NFqh9iTYlQiRCDDSbwUijkkOGMCgNQ2Jvvs/U5Hz6okWojHpasYzgZE6lDSZF+iAwZY5EIFOiDZ7xSDPPBFe2Z2vAuoTuEnMckWbAVqHZGx/zV7/yiFszxXq5w1mFzBRCJ7QZI9MELzRFlfH1x494+70Fswclx8c53XrDp88vsH1gVBoUAZOVTA/GiKR48ydLnv7xp1w2Gy5sz+ura06OTvBdYrUL6MmUfDqlj5ar7YbrdsvWXXJxveXNZU9UiXFVcb3saa1AlYYkJaPplL3JHu/e/QKnd/bIi8DBXs5kZLm6fINtA8FH6t2O6AOZMmglqdst11cbrHUcHx+gZIHtLC9ffcYPf/QdxmNPiq+5OntCu73k5atPeHn+OZt6ycWrV/zgu99js1rexHOGkU6Ig1t8EC8PjfiLi3NePn1Jvd6y2dbk2YgHb93nwaNHLBZT6mZFsxuA+bdv3WKU7XF1cU1eGmaLBYv9I2DE5es1yee8fe99Htx/yPXyit12S5bpG4D+ANEf8u6DZaevPd45Dk4mTOZ7oDKatqZrPK7r8Z0jhoAuPJO5YLE3pqkDy02DMT2jkaaaKDabBu+BJIhxwA3+eXEqxgFMrrVmNMqRCmIYUETxZoK52N/HFDlCafKyoiwmpJDoupqqmvLowceMJwuul2ekYFlMxz9rULdtQ9vuCFhMNkysnXNEx5BTV4lqlNN1lr6zpCBQMgMCKSVSBB+GB0mpNKPxmK7pKWNB17aYLKeQI149XyGyBXffT9iwYXN1xMH8Hn/wuz/ixeefU00dCEdvO4pcIpTEOolzCueHaWZeDp0G2wuKsWAyNwivePTuPUiaq7MNLga2y54UE+cvhqHIqBoBbiBA+JIsT7StQhc9SI/CMJ4YTB5ZnxlSbxC5Yu+ooAstRVGyWl/RNpqyzNjttkwPC6LYcLl6w/W2Zv/kBK9bnr54xWpzxrNPntMuV+TjCeP5glv7U6a3M979xgds6yXjItFc7RDCYcY5UWi+/Tuf8frTmpgpYoxM5yOqaUZRFAS2zPcKJnPYbDpsl7DWc3xyRNd4rq9blCkRWpGEp5oqMjkiKwY7XpYAH3FRITLPbDEiE4JyU9H4BmrLnZMTzlcXXG6WFGVJ3bSDVY+hLCmEpiglzkaUCCgt6NpmgPHHiFQZdZ34j3/jN5nN53zrW9+mnIxodhuMMiQfGBdTLl9uWZ2tkBZmeY4eF0wqyfZqSWc9Yz/mz374nLNnz8CDySImL7AxcHBnn1QGNs2ao3uK01uPGZspL5+/QoiCqB197emtYDyv0NIwGlVobXDuGi0V9965y9235ly+7tnszhG6o6gCKndcXGSUizHjwwyXSzonCcoxGlv2RxWHB3f57MUnoAvuPio4uzrDx0TTr7m6bjm6dcD1+nPiRmG7yOHtOfsnmunBCKE7phNFbAv+i//kLwGe6r//H/7xN8v9jPd/7hEPPnrI9M5tuk7wyZ/8BGEDB6e38H3NdLZPZhT11cXApssLNB6hAsHL4YslMWRSByY9WuVAxLlEBJL3zA738WWJ6wK5EpSzPVpXMzIF09mUTCk2a8vWS0Jo2KUWnS0YmTn9VcHrp2dMpzmZksTVcNPwyd4E+SUyG0L7wYHJDK9fnvPpj16wuaxZX19itII4ZK+M9kOmSg/q1yzXN2stidZyYGUmTddGVD4lkAYvdTaUlrJMYfthraYyhWt7tJQU+eAHttaSKYOSEtf/OfZFY63F+XADtFakOEwxRVQUpcK2lstzy3hscFi0qJF55M3lmth2TKuM4AOxT+QZaCmw3pBlI0JwIGFvf8FqtWG+N2e92hEYLuJ106BICC84e3lJDIPgIMuHlv6u2RCJxAhZXuBdx+G9ffp1y8c//5hPngT+4Hf+hNO3DvjiRx+zurZ8/tOnNOtrDBLjJUZpXB+wmx19UrCx9DtH6yTJGaYyo7mo2dYK7w0x9Ph20ERGH8hzAzLgwqCQRCnyPJK8Jq8UhwenvHm6wdoN210PSVKWI7JMUZkJx3u3efvtt3j48B3efvQlXrx4jbM9RTFMDUM3o2t2aJkzn40ZTXLu3LvDi+cv6dvBHrK8HuIqtusQIqKzjOVqCcLhrGS77ammg23o4syxq3dsmxZvE8FbkvCU+Yy7t4+QMUNkEkJB1InkNqzO6+GBQ2vcrkHbhBYKjEDFhFELbh2/g7YrpF7TSsvotsIIgdso6m1iOsoheaIYfl996uhFokmCnXTo/ZLZZEK0NSHuUGWBTbBxksYp3n3/Q0azW2xa2DX9ENuwPaHf4eyWJBIpSTYp4hF8dHrMOD8mjd8mpDGXZyu6rh4ydtKgxznT+YTQnNFsz1juYHvd8uHjR/z63/7rBGno+0Q1nhAI1O0V3nX4PhGahNsmXBsQreHqvGPdJ6ySBKMwi5LRUcm/8dVfZJzv8e3vf5sYS/YWd6mbmvWqATQqsyjTIVXEB0vbWWw3TCKVlGzWO9arCy7Oz7l/95ivf+0tluszni/f0As3rLizRLanUTLD1z0H84xbx8fEmOj6BqWHSXZKApIhIYgqoBSM85yHd0/5pV/+FX71b/xtvvj1X+T27YfcOn3Ig0fvM5nNuXt6n7PXS549e85iUlKoHKenTI9OGI/HTMcz9vb3kAju3X/IeDrnxfPnODs8PEfSDX5pEKskAVF0dL1lufQYUxHo2dkdZWEY55rJdIRSGRgFaCbFmN1mTVFm3Lt3mxAtm82Wtva4fnjfE8NUVghIxGFblkAqRTXOBnaqjQOf1A3Z73JcERmmmd4NsoS2bgb3egxcL1fUzQrralJKlEVJCIIQE6YoGI1H5KZglE/oW4v3AZE0UimK0hDjEDMjJhJDbjjGgdUagSgYCBnzKdV4xHq3pQsBeYMHs7HD9h2r89e8eram3ibsZssP//T7fP7Jp4wyRVGUWC8xBoRqsDdiEYEZ4l+doyjGKFlwddEipMLbwOVryLM9fCuGdb8a7jW7VSS4xGw+IoRE27jh4dR4RJJsVhbrJCkotJKMqgwhLU3dY8zw3iahOLl9wKjKOHt9TVUF2qZHKeiajtXS0QfHvXdvsTg9QpQ9s0PF0R3D5L5FzUuCmPLqyZJ21aCyjJ/84DWTeIoKGUf7J5zeOSIazf5Jor8KXL7YoLRkNM/IR8NN/c7pffrO8vzZGdVYUZY5ImmMzonCU84jD947Ip9AKjpOHlToTLNeNUzmU4SMaDEwd1MSuH7LpKiYHZVop9irR3x18TXa4Hm6eg5lgYwepTUoi9AGITJMZmjantn+lKg8zvXon2nPJVIbYhTcvXOfP/zWH3J5tRwQlM4To2L/YM7+yZiLyw1GlXjd0+x2vPXR1/nyL3xM6Gsury/xUfDxz7+Ntzt2ywFLVxYl7aZBuAzXZrTdjOUmcf/t99hbzLg+e411WwSGg4M97j8u8MGxuWrIMrheNVR6QlMLzHiHmoypqsVAYkg7ttee6CTGlCTl8Mlhph6p1ngrkKblevmKg4O3Ob6dWG8Un32ypLMX9L1lu5IYZVhdW1JwzI1mtxFEBOSe3kUe3L/Ldrll82bB3/2t3/iLf1D93/6ff/rNx1/+kKvG8vmLC7YvVvzkj37A8mqNUCVNtwZrud62jCclmQ20fU2Uijz0oCXO3rTmhUAkMxTwY0BJgxBqaMzhED4i8pLq9i1Cb5F9z3QxIRiNCDnFKCPoAiU9fedJPqJHB7j6jPXTwOWr14wnHTrraOoWHzzaFYgMYoqIqJFKMpoMk7hcahgJLp5fsb1aUVYZre8Bj5KAGFb8eZExGpWMx2Os7YbPLTKs7VEU1G1PUZW0fofJJE3T4V3LdDJiMi0Y743oQ4fwiUwYpBqYpiloUgw/y6YKBH0/UAC0HibOUgiMGTAYIEhBEnw2RAdiyZcff4CejbEuQwfNcrvm8Zd/gYuLjiR2lMIOCBaV0XSJhEdIx3ZrsX0ktZEUEsoYet/fgI8HRMzxrVv0O0si4mOP0LB3NEUXGqXNsOKRFldDHxw/+elrXv/0c5StycyMzBxihOKnP/4hqpD4XEFh0JUh05qJGJSBMnhccri6oV8HlrbDJYEKAk1kNrND9CCBT4LeWkTSDEiDSBAOiIxHBTL3PHvyOf3WE4K5KYL0pOAZVxWud6yuLU+fnPPk2Su6vuXi4g2CEi0n2N6Tomc8VlTmmF3YcfJoSrNNnL94TbSS58+XlGVG7HtkEoiYIaOiyAr6GoITKDEcqFU2yBtkLpBGUOYF+ahA5IFqNOXqKnAwPiFiub68ILSJpg/IHEAhjSKJRCgNdR9QcsgKB+dZ2RV9XOIchIMpeTEl8yU6CUIEmQ25LNs5WtuRVx6ZCbpMsX86Jy3BX0UQml0M7KygRpKUAqX4/LzhsydndPWW1teQS2RukFrgUyD1CYWkF5Kt6xjlgjw74kdPVkwXlkz2rK5rfEiIEDk93WM01vhMIRYTJsd77D2c46YVb7/3ZY4PbrPpW1y0jLTBRsE2JdpdT/QekRRd02FtSyE1IkRs1xP6CCpSFBMeH98h91tePfsJm1dPePn6FVfNltb2bGtHY4eISKJAKIN1Dh8sxkDwgSLTjMqM6WjE17/yFb7+4UccjjSjkSKVsN6uqdeJsZgzywRa1ozyMW3t2W5WCClvWt4JcSPbEEQUCmIiqICXcH3dMV/c4fbbjwgqcHF2zsX5io++8nX2Tm/z8N23+fjDt3n57HOWyzUffvCYL37pG6zrmq5z3L5zn9N79zk+fcDh3jHb5ZIXz58yGhWDjCQGnA/IBEqkgbEcDSL2bK4uwcPB0SEiB1WIYQ1ee/LMoIRBBdhtN2x3DWevLskyw2rZD38faThgIcNQjhXiZu0fSUjyPKccZYToflakss5h8hxlBJ1r6PpmaERva7qmoygyskywfzSh7q6o6x3JD1isJKGaTJgtJownJTB0B8ZlOaz9tUKZMNxH7JB7jX6QsoTkCTdRiLKqOLl1G5Nn+OhZ77ZIKckyjRkJfIBcaTIjEMbjPDhrKEczHr53yGpzRoqDodCHwHblEUlTjM0gODABoTqMETR1h9KB8bQgIjAiZ7JX8vzJKzbrHeVYEDpP2zmqsWY0tdS1w+g4UGFUoMg1Ug6IRGUy6p2jXUtClAg1xMWCyxBE2jqxWXcsly2Tiedgv6KtLSH0WOtot9BuAkWhabsVUi2w1EwPMr7/rRe4ETz76RO6yyuoGoppNmwcymETeLXe8vzJJZcve3SouHp1wXbVoMsJi/0FX/m5WzR1T7OLvHj+hpQEfafR2rBdDwOfprNkkwl6DNW+IJ/MeHPRc/ygwLmapnU459BGE9GkBMoWLOuaTS0Y7085rg7w45yX7QVe1XT9CiUl49GckHZ0vcLkGUk1g9ly5KgmC4yUqDh0tZz3JARG5Vy8Oefy/JzxqBqUwT6AKDi5u8eD96ecrS5xK4+aTNFeMbt9l6/9m7/C5sUlT5/8BDMu+dov/Ls8/uA9vvV7f8jtB8cED33T0jUtm+aS8vAWdqc4PXoXUXg637NbrfCtY1Tl/NKv/gofvP8VvvP736Vuav7D/+g/4KsfPeD3/tUfM55OuPvokINFRammnL9e8ubVMFUVQjGdlIQu43K5wtAyKgx6NELkkh/++AmvPwNnA9Mpw6avMMTmhC998WOa7QuadYsLYugcuIppVnH79pjr60u2O8H1VvBf/9Z//hf/oPo//u//9Jsvnlzx5rM3XL16Qepa2s2SxWRMTJ6j48MB2Jzn9F1LXPdI2+P6gE8K2xdo0wMM6lTlbtZhOSF2pD7QMzBEVRjc1KrQaKPpdz2Ns7iuYT4q8K5HGYXykSmQZYluE9GpQOE5Pjygc568OmS2UGwuW2QmSTGglAApSUJhXbyxfpQkmxBEpB5Ww+oGt8JNNlUIzagcsdvubjAsYN2g13MuUhCwQiFGw6GCJCjHQ7u5Xiemx4nNtmVSHFKae0QbAAAgAElEQVSORkQJeZYhRKLMZpgoyLUheY/vWzIjIcWfWTEiDpllBDGYrASJSZWjZCKEDofA28R2tRlc4THy+tUrdrs1ypQkUdBHORz89HDxliobygxEskyybTZMJxWPPnjI8mJDjJ7YDdMrH92N03lY3bk+4G3CtX6Y8CWByTOmswl2vaaYj6iyitefvOH2/m3Gh0f88NPvY91uYNH2PSo67h4ueJCNOT0uOXwr587pKd4cMJ0ZrO+wzoIfIhAxSmJvIEYEiWoCPgpm8yl97/F9wLeOZhVxu4hOkDDEoIAejMRnkSAbyqLCWo9E4m3Len2B957ZdA+pIinkxBjIs5yskLhtx/mzFbvacu/eMWefXA9rxzRAyyWGPAtUeUWux3R1RxIe6xPWJ7JCoHOPKQRCFGx3Pa3tmM/mkDK6LZxfvWC5XhJiQGsYjTQmT/TJ4q1HCY3CIIIjkxDsAInftFuaqOhVhpElm8ueixdb3BYyqfF0aCnISklSPSYbvOzFaPh6e/MDRqakaVqigpA6FJpcS1TyfOHeWzx4dIu9yYJ5WTHO5HDodW5QRGqQRnA4KehDy/PrHeuu53Cv4KOvHvPOOwt8L7i62NC1NY/efYsvfuOLlOOSMbd59eMVVuacPjjh1vEpOhnWF9dslx0qhyA6mnpLby0ORTXJyTLBu+/v8fB+weq8wWQFygiuV1tmRcXHdx9ze3LCdGyobcfVboOLiQyo5GBzErliNJmAyilFwb3FIfMRnBxU/OI3vso3PvqA3WbHatlwtLfP3nzMbJKR5SVCQpXBYZVxsn/Irf1junpLItE3lk+fniO0Y1IKolc0dtBFamlRQ6cHbxP0nirLWByeEoTiO7//+3zr//1dlCmYThcYaXj2yXO+9+0/YjIt+Ll/7d/i7tsf8vSnP2RvPObOw/tU+wuaPnFxeUV0ns12xfV6Oaw3fcAIgw8J5xNK6UHhKIYyq7MemRRS5ay7NSJajCggDiKQ5XVL23g2y4a2tUip6fsO1w/6yhgHI1XiJsM/uKURgkEpbAzOhp/lQkFhlIEUIQUO9w8IfaLZNoAnMzmLvduMZ3OScHgbsb2/caQLpAYpc9o2kGU5Mcnh57rRWrvWEX0khHhjzIpDjjZJQCIx9K2l71s6u8EHS5HnZFlBUoo8KzFKIRI450ALzDijnOfYxpJPZ9x7+y0a22NGBeMqQ8pAOSsQLt7E2Qx9nRDkCOkZT8f0vcLZnvnigN3W0tUWQcLbhHUBkyekiUQ3YlRWxNTjegkhJ4WCvoe2S5A04uZ1d32krwMpSIoiQ5qI0hJnHeNqjHMRJQva1mOyCXKUkFlHbjJOj0+o5oqL6w2hjzz9wStCm7h+uWNvv+DO+xnVQUbfZ2hRUhYlyWeAQAZJvW6o6xqhLHtHmnIahmuh6Xjxecv1RUNRKnrXDoXJtsP5Ht8Jqv0pQsHnf3qBi57Lix1Fr3G+Z/rWGLlL+DeBcjZDdA5pNMkHcqlodw3b2lKbxFJumB2NsL7j4uIcKjWowZVCkJhNZ4QgMLmnGAtilcinOW5rEZ3ARYtMBoNj1XZE7+lTzbjK8UJQ5prxwZzPPnnNB+9/kYfvfAElLsjkmOO3vsCt02N+9//4P9l1a0TvWJ97qqMxP/nOd3n38dfYbFdsmmtUHpFWkuUjvA/ce+cdNp3H6Jz7dx5y+tZdfu5XfpXR/JSDowVPPv0RoR7z2//pf8X/8s/+Vy6vz/mbv/7XmN4P7FaK2/tf5PAost78gKqcY0YtNkom04ysELgISU5ptwlvWwo1YrowFLmlc4L96ZTz1z37+zmXry8RuUUJ8KHnnbceUs48ITfcf++YF5+/we/22dU7/v5v//Zf/IPqf/tP/qdvEq4oNag4pt46vE90IXL66AH333nIpm3YdR319opmvYIY6OMAdB6qaJEsUwgRB4zKjWISJCEGwk2rPnoPImGywfIUXRjWZl1gu2mY7E9IUVEVFTkVo1H1/1H3Zj+Wp+md1+ddfvvZ4sQeuWdWZu3dVb1Vj7tttxvjGWPDaISHxSCBDIPEiAuQACELJMMfwL/ACCTA4gLQXADyGDNYdnvrbnd3dVXXnmtkZMQ5cdbf+m5c/KLKXPliBNLMiYtUKqTIPEdx3vO8z/N9Ph+8cczOXpBEEXlSkGQjhIyxTUuz3qKUwl2NiKM4ojWWTVkx3d1jvd4SKX2F27J9kPsqW4boHdpCgFbqKpcZUwzGVGWDtRBHCdZAsV/gVIq5chg3raNIp7hQI4zEVJ7BIMO2Pcoly3JEGHL33h2Obg7ZuzbktS+9xN/4xa+ye7iDD469gwPKsu2FCKYHNUskvnN0lSWJErwNbLZbqrLsuzXOwRVLMY4jmq7FB/q4heoxWEprjDGoSBPHPRfVuIBAUjcVzbojThNiEeFMj5opiiFCSsaTMVJKym0ffDetwZiGwc6QbJDSdi16kNKUDUJH3L73EomQLBYzgvTY0JHKhF/cf43XyhQRGuK9IfHKI/KbVNGbPHj1mOxaw2ZTk+g1jXW0NVe2l97MJLy4uqQIuo2hLQ3tJsO1Md4J2kbSmY5AjVIClQiiRKOUJzhBrAc0Fewd5OTpqO8YW8m2XJNmHqkiqq3FW8tgUJAlMT7EvPP1b7G3NyEuYDGryYoRzjoindEZjUgs04O+666lYJBKFtUlnY0xtoeL58OkL3YGEYPBiO1sRbmsydOc4+MjXv/SAzw1ZVlCcOAhiaM+fhIUrtNXJIsEHcUI4dg/2ENlnuE0Ze+kANmxXHq8d+gsQ+sErQVCejrT0DQdy0XFYLzHves3GQ9iZhdrtIiJhWZnuMe/8hu/wf17d/jaq1/nxrU7jPLAnZMBi9WK83nZZ8ijmM4EDqYZX30t53quOUpSJnnGtopxcUyjDCd3ThgfDVht58xfbHj2vGJ9vsX97AOSzVPIJxydvEZddTw6O6XyvRCj7Dp0oknHCts1bNclURGDljiZkuxo9NDgOo2OxwwnO1yb7rA7LjBa0MmUMiiWmw2HecHBKMUoQ2cFMSNCA19++SW+/c43ePZ8RT69DmrCh+89QtISScM43yfWQ06XK55vSlqfkuc5ZbPk08cLZhvH2m6p6YhzzWiimExHjDPNcCDI4hRbOzorQcYIHWMax/HJDb71q3+T26+9Rl2XPHr/+2yfPmXv2g0OX3md2lkSDAMF588vkEmBmo6xWUSSTaBTuNpj6pq2vmT/+h5Iz8MP30fTDxu8vMJCEfqltiv9MwKMc1RlTde2pGlCpK9+R0NfWK6XW7yxmPYKxaclTdtirSf4fpFK9CyoPv8JVxctTZ5neN9nQj8vHIUAJEgdcfvufb781tf4+JOPUFL0+C4tUZFiuVxxOZ9dcasdWa5JkpgoTjH26v/cWNq2Q3x+HtZ9TCAEUKrPTALEsUbHEhVf6WZVRDZICcrhCfig+mVDE1BSsVlt8a5fFhaq54ZLoTG1odm0ZNkuy+2Crm1pOjBKkg0m6DzHKUXra2QEWa6QSrItK+rSMx7tkeaa56fnBPf5hSEQRf0ymI485bZHSsVpL7BoraHtHE1r+pG0s1eCh3Clxu7P8RD6aZx3PavcOUsSFzSNJckFNmyQpEjV0jWWs6dLoiTnza/vs103jIYQR46jwzE7u/TqTDFkb38AouNysaBaWcZphhMVZVfTloHbr5yQHyScP99iXcflc8ducQMf5iR5RFtZXNvjJ512xMUQFZdMck0exywWDbtHYx58fZfalUiRUz6FnWKA60pMa8GWV4i/Bh0BwXK5mLFaLbiYnbOta1yAymxI4pgQJFkyZjgaIaRDaYm1mnE6YfdoD+86FvMKlUY46/ooWezx1uCDJnIR1sIrr7/FO3/zX+Lkzsvs7B9SDHaYHu9x49U32B3fwNgt6XgD2hFsxutfH/En//DPqaSlrGfY5Zo4TolNzstfeYtbb17nV/65X+dgZ8LBtOD68SH7h9e4fu9VSCNiGVOkmscff8Ds9Jzv/8Wf8PHDj/nur/06k+MjnpyV3L31DvFoTW3WDEcTVssN+5Mb4Gv2DvZ49MmKwShDqyEHu7cZZicIaVnMauJhiwsNFxeBr7z1Ld58+xoPn32MsTnbruXaresksSDJNF7POX264pWXjqiWltlpzX/+2/8MFKq/+4/+h9/pNhHbhacqV7Tlmlxr6m2JqXvrxcfvf0JXVuwUEluWQOg3jYNF4PGuz2TGaaBrXW93kvSsT9Vv47Vd22+rCoF3nvFkhxaPdI5hljOaDHgxaykGEQd7E+IYdChoujXD4YAkybB4zBVwG6uZPX+GD1dbqQS6riNKMnb3DzDGs7Mzod5ue8+z84gA8Hkntf9Tqd75G0UKKQVltSYEg4x6E1HwguFhhPOSroEsFxhTEazgzq3rGFtTjPpNW4iIlGA5XxMJifQNZSOYnW+p1oGP3nvKpx88Y/ZizXK2QXjBcJxS5Cnj8YhIaTbrNd5Z2qYmBI+WCtMZtOo3JoXqQ2NBCKb7e0gVsa1qjOkXHXqzjLhSMIo+z4XCtrYvsHzv1zbVX3mye0pioKxK2ra7ej0FURQjI0Gc9NpDYzqSQUHTtNjO89ZbX0HKlPc//RnJUPH6rZvc37vGweSI1bYkPZ5QvJ2xeN6gfESTf8K6/UmfZ40CKsvpOoXSLWmSEqzH2Y5mG9NsJLYB13iG+YCDQ43zG5TOkXGgGGvSQoP2mA6kdEQxxHJCV4t+Q1iUvdNbxMwXF2iV05kGHXm8EdjOsDPeZzjMWS6W3Ll1hwevvcyTJy84P50xHI6JdIIzAud6j/nJyQ3Gu5prd3fJdwYM93sslu0kXVcjfAReU25a5os1R8cjpMppfcNgqpjNVpyftRztX0fLhqbZ4LsBXQ14gdYOZItUksluipewbQzWCzrrqcMWEsFkfEAhNd71usi6NhQ7I9LhgHLdcfvaPQgw0AmDbI+yKWnKLa+//BqRhgcvvcW/8Zv/Ju989R1G0wn7+w3ernn3vVPWZX2FJ/KkUURdak4mN3nlziFZEmiqDeV8zvJ8xunFBXVlyLKc4eEAVWgGeUJoVyRcsu1qLr3k5PotcjJW2y3rekkWJywvN2yrNQf7Q3zjaVYd+IiqNTSNQ0cxIUC36dhsV2g8bxzf4HiQk0WKQZRjVw33bt9EFQUfPTql2pZ0tUXYFtNUaJ2S5xGrF+d0szN0vaA8P2XvYMqX3vk64/0hK1fzuLrgeXtKpwyd8awbz8p2zJcv2LYzjLDEMufw8A5CDnj8fM5g74CTw0M2y4qyrZGRR0hDJCyLxZoqeLLdAet2wfWTE37+577D0bVDNptLXjx/zsJ6SAra+RMWzx/jbEDhWTx7zs2ja6TDDBElFOmERA+4e/s+TVPz8OFn5HnPh/U+fJHT7N//fBEpklJgW0MwUAzGRGnWxyHajuXlJbbtAAhX56e1Fu8g+D7vH4L/q8kTgiiOGAwHPT7n6oLdF6oBFStUpFE6ZbGseO+9n/V5xARkFJFmKTs7Y6TyNN2aNFUoqdE6wVgFIcY7ixQCgsTbFkm/AKakwLl+YaprLQDZIEdpCcIjFJS1I8ki4iJFRBFpXpBkA3wQSA/O9rIZoRRZkZNmKXmWIXwgL1KCb5kvnjOZTJBKYYUjHRQAFMMB6UAxnG5ZryxtEwjW9gIG4qscr8V6h3MGrR3eBYwJFKOYOPbEcYzzLcNxAgLSLO1fW9UXrlHUA+2llF+QHbSWfVHu++LXBYNSKdttx+7uBBm3JGlgc9niTEGSBaK4NyKulp6d3RwpLc3GcPa4ZG9vH2cUZ2fnHJ0MyIoepdhuYLnacPZ0gUbjbM38fM3j92d0pcY4xTBK2d8ZMj2KeH72AukikiRmtJfRbgNHdyX7OzskRcze3QGryzXb85Rrd/a4f3OP+SfnPPn0ksHOkFe/dMD09oioFWy2DT700T3n3FWu11I3Fa2paZ1BkmC7fm+inxj0iuEbt66xrRfAgHv3foHru0c8+eATQOCCILjQ0wBCjAV0FBFHiihyXDaX1MbQdJ4k3yHoATLPwLRkxRARFahRwYM373O4e5v57JQkLSjnS7ZVy8mNW3z313+ZP/j9PyAWt/ny21+hMiuEyEFKns0/RscFR7s3sXLCIBkxe/qYerNgsT7HRZJf+9v/Ondeuo+xgk11zg/f/32y9C5f+tK3idLAxYsZy3PHg/tvcTF7ho4HjEcjhoMBbVsyGubMLi7pguDyPDA6uMmqes4Pf3DKyfGruFmLL2esNtu+tmvgxo3X0Nkl5cpy+tyw3uT8F//ZPwMZ1f/yv/6vfmd7UbO9uMTWDaN0hKlrdCwpq4bFi3MiK5CNYycPHEwnzGcLlEwJzvZB+6DxwTAYaazxKBVdnZiSNE17U4gPCKXAe1xjEVGEkYKm7TjaL7hxd8CyLLFtQxaNiTON9LpXhEYpzgtCf09GaI/oUqrVOVEKTdP2hyrQGdvDmk3AWYOtG5x14D/vPPRdByn/Cm7d46h6BqkkRlxla6vGsHMwIRln7BylNO2GtlHcvnOXyV7GdDzi1oMDLpYz1utAMRzRtCXOOZJEUm62LNdr2rrl8mLGerliMiw4PBhxcn3C7bv7bOqW6XQPpKAoUoajAiEhyWKM77DGkWXp1evcP0cRAsZZmqZBCEUxHABXuCshQIoeUUU/5vLB92NJ6bCtII0TkqjfnvXO03YG7zw+9PaqJEmoqhopZD8+i2LWyyVxpDnY28dUHbY23L1xBy8knzz6kMa3jIo9dhkzX8x5d/WIa/ducHT3hLlq4UbF/j1Qo5xVFUhEwWbu6bzlzv0Bk8GYUTGlrtcE0TEaK2TUEauMWy/l3H0l48bdEecXl0Q6ZXqQ9bQJBUU2IpaSYTGl3FjqzZa2tvy9v/eb3L1zkw9+9gllXRKI2NnNyfKEndEUnGVnOiSNFN4FJtMpIpXU64bF/Jzp9Brz+Yy2W6FkwHYxLy5eoGLJYlMih3MmOxJvNdtLRaJjbOtJooBtA9ePb7I/3COWnnKxZXNZsd2sMK2h/nxD1cWYzpLECVHUd0+CHeDR6Fhj2obG1qhCIGVHWwakjxiMHN6X6KBRIsK5QGsd26rBtg5Ttdw4PuZvffdXKbc9e/Gb33yDu7fvcPell9jdPWA0OiDEOZ89OiWNPSGCP/rjn2IqQRwlVFWJoyHZkZTbhmbjiLOELgSkCuxcH3Fw4wYyyVg2Fdu2YVMZnp8vWNQ1w9sHlGnB5bLh/u3XGeYx29VjVsuGmBQvDE2wbFYV9boiUREixAgV49oWs3bEDBkOM7y37I1GvHrjAUM9RvgIFeXsH+1xOB1x9uQhsbe888rLfOutV/nlb36ZW4dTns/W3LrzgG+89Rp3HrzC3S+9TT5OyCYp2e4YJxznq0vOtzOmuxNGg4TZi8c0bUmRZ5zs7KGtpNsGjvYOwGgefvqEy7PHFGIXbyJms6cE05GrpF+AsL3Yoyy3iM5RxBnZeIfxtbsMJ8dsNzWffvg+i9PHmLbBVA22rhgd3CQb7LK3O+Lg9k3WxkMErm3Zrrd47+lsy+XsjHK9Run4irjCVccxfHGu9+dbL5EwraduOqQWbNcl69kS4TzWhV6RCl90TYMP8HleHv+FCUtIebWxrK+EKP1zdM5z5fYjAElacHBwDCKwXJ8T8GhVYK0kjgr2d/dRKtBUAeciIO2XaozHXl22ZfBoCQKLCB5nDN6FfnJEjy5L8wzrLEGKqwLZIyJBEJo4HRKnQ3QcEelAHEmUFmRFSpQowGNtg61rkkjRKYdOBLG2rFclDgHCQWfJc02abrHdgkQNWZ03FLmgrh1NR2/ayjR1W9F1BiUDRZGiI41OBFEi8GHAelOhdEzTeNo6wXR9HCFJxVWXuLdsJXGCNY4+8ADO9JY4Zy3B9oYypT1H16ZsNhs264b9/YQQtRwcXUfQE3h8SEkKhZAJm1lFU/e598Vszng0Yjnv+Oj9Da6NUcJQ1ZbQVUwmCecXc9qyI5WCKAIpNFVV8fZXvsG///f/Q/7gf/9LFvNLipGnGOXYWrFaVeAEnbacbltu396jejHn4XtLtEuoXtSUtmI4zbl1/xAxkkRdRnlpccaj0CjZX4Y62xLHydVnWofwMbYLBAumMWxWHeuVo9xYFJqTl15hfPCAQZby4tGnrFeXffQtdGgZ4SJDaPtsbHSQs4m3LBZnvH54i+88+AZn2xnr+UNOpnfIJ4dsqyXVs4eUds3e9a+ymz/grV/8NnuDlKePT/mNf/e3OD9f8Et/528zHGk++eHHFLtTjl8+wfqIdJD3E7VsyuNPP8WFFpVqrt+7xauvvs6P/+IH7A2vs3/8EtObh5yd/oAX5w+pmjWbckHVXbBaranaS4rBLm1dcb58l0FxRJCWTx79JTZs8MFRDDUiHEDd4JsZbXkGfsrNu3dJREy3CWQHCUIUDIc3mJ9btC+gG7OceZqy5Lf/0386Mqr6r/1mI7lxLeEzuwS1R9UYDJKok0yyHDVMaRqDMI7RsCAERzrIMW2vr/z84Sx4K0lSRdd6JAqpBW3bIpxABnF1lPWaPWsM8XCCyiR143j4ac3JtQmhjlGqty8ga6RIqZsWFWkkEhEgdA1YT55IDI4sK7C2w9N3bIfDMUlS8PDRp8RBwefYLBGuOo09Q7XvqP7Vy6P15+NwqJqag+MDJvsRjU1o6xWDYYdrPHneUQwNzXLL5Szw4umG6f4eQXTEeU4xHuCMRkS90jF4w2R0Qp7ElOWGbNwfos8XlwhVsFjXpEVO1RkIiuFkByklJ8UtlrMZz0/PACjSokcn+tBnMI2lbLc9/qooEFHcL2/1Em2yPEPHgqZtCMYiI0Eq0x66HHpOrEAQ6wipFZ0zeBuIUkmR5Vcfgoq2sSjRawCXF5cEG8jihFRrOp0ySncItuTs0RPEcEMxzqkiz+W24+P/7We02QXFjmL1RGC7kjtHCQ/ZoiYxwy4QREznPMZ2fPM7r/L+e6dYH3Abh04LPvxsyU9/IhhOUuIiZTLMqcqSpmwZ7o+JVcb82ZZOSiJS4sSg9ZB6Y/jRj3/MfHEGPqcYKf7Gt+/z6MMtbSnpsoosjYjwmK7v/l1PcpptS1caNqsV1ji6TtJJQLWc3BwRREN5uWJ9Cc+ifrQofUxtLM411HUgSQdEsUCOB9y7e73PSnWe9XpO07bQgLWS4HsrTrm+JM9TokSjU4uILDJKodFEIlC/6CBJkFGESDLKEppVjC0NhBapA7GMiDzkSYoPgsefPOO/ffo/kuUxKMXZ0wWn3ZJtZRhPP+R7f/SXHN28h6tbXn455ic//hOCh3d+7nVWqyWoV3jvJx9SLbYcv5Lz1msvI4NjXZa88eaXebh6zF+8/ynPXyxoyoY8SUlzSRF7BsMhu5MpbV0SOkvXKsyoIY4qUtmb7FQS05aOsm5IRASxREQKYxq8c7ggsIsVKobt1pOpji5s8SrHWYfUkjxSJGrAr/78z6OVpBhmtKZfCguDZ7iPnvLZsyc8uP9NxkrjcLzy9tcIPqXrSpzvaHcinq/nnD9dkw9yJtMTxuMRvolJbUrsJc48Z7Fe0hoNsmRvHDHdCWSjiDeKVynilocPn/P4RYPOVV98uIr9/X3e+YVfoawkphV0wpGPM15+8x5Z/Bq+VVB9mc3qgo0sSAcph9eOMTKw2VzQbrakac71Oy+xWlwwu5iRRBrUVaZdOJzvEXiE0E+tpLzqgkIQfXnXVg0vnqxQQSO9QPg+DuTpzxPjHEr0o3wRQIp++fPqyO7PeOcwpaHrWoo8I1z9e0r2tq4QxJUQoWW5PieKJM5Y0J5Ix9S1wfsBkTqiM0+R2mJ9i0IiVUCiIThE8AihcJ3He0NA9AtT4qrxESTWCJJ4iNACpCfN+46wExqPwhiHkP2iGxJkKvHGo1HIAL5zyODxpkOnGfWmYxjvIdKKremQKsVYi2kUhzeOmC3OODt9zu6+JtIwnkZ0ztLW0FUSoWNi7RAiJc9HBFGz2W5Is13ywRH5cE1TNZiuoWkDtrU426ATg/H9BUN6B6LXj/eWQVBS4m0AVK+5dQ5jJRcvNggdkxcxxrUsLxVFIUgLwYvTLftjTWcdo/GI1bZl/3CX+dkF1dYxf3GJsQ3FOKFkTZEc0qxrnIhZt5Zrt/aYHue40HL2aI7uDMOb9/lX/62/D/GGV944YLzT8dEnj2i6JbVTaBtjGyjUgOV6SRilpIOEpnXMNxVxmnLv2iF7OzE/++ljFuWWTAru3rjPtix59uIZ267G4dCxAtdTUOJkiKcjEPAWpIjwvkMJS71uWZwHxrll9+SM/HCPw5ff4snpYwZxRFCS1hviEGiVRESwayuKkHLv+ldJTkdMDkZ8ffoqD3dyVh6GoWOQ5WS33+ZooEl9zLxeYBcjRtfv8S//27/F7rVbHE7vspy33Pvad3n5pdfYmBH1NhBMQIqIRB0CETsHA4RXCFKsSMinA37zt/49RvmApVlydvl/8/7Hf8KNa69x89ZbPD//iJ+++6dMhy9xdHyLy8ULnGwJfgx2lywac+fmHt4MkKqiqRyTtCDbG9CVF+xfexvr97l4ckaaekbTfcqmQ8mYzbohhIbLc00S17T1VXTzn5LHX9tR/e/++H/+nboR4FJM0+DaCmcsw/09kr0xepQRkoTJeEzkW9bLTX+zrw3h//XG8qE/qJJU0jbd1cZ/P4LO4pSuM7jgIYj+Fq41o51dLDEiRIBiWy4IPmKQjVEiwVqDQ2CEwQmDsx6I0HFM061p21k/Duo8tjMIBQjJrVsPmJ1fst0skeHK5iL6DkP/l77jIIQg9LFPICBFj3wxtiUZKL7+rS8TwpiL+QVR7nj1/htcXlhquyAuWrJ4xO7RNc7OLmhMhfeeLJtgjKNra1bbFTa0RGkKKKqqYVk2dE5wuSpZblsUisV6hdb9hnJVbftOG70GbjgYcHBw1GfHXOhv20KhhboabY0D/KYAACAASURBVASSKMJ0bZ911QqkwOKwwfZe7qhfVshHKbYJWCy4QF03xHGMjiKauiGJY0zX9hIDFWFaw2BnQls3CCHQUUS5WCOlpiorXnnwMgLPe+99QN0ajgYjRnHM6fKCZVfxpH7BUj5nL97DtBozazg4GmB2tjRdRF5EVFtDUwl8iDk4STm5PeajT2bMLwMmSBrjSHJFNtAEYSgGEqmgbQwgydKY7abFbS1JOiCWObtZhLENT0/PGe8OaV3HYlnx9ttvcP36ER+9/5jTZ88p8pjhaESiE1554zoP3vkG0uc8+uBjlBQMxwWbbU1rtggk+SDC2i3VRvaRka6ls4oohb39IV3borTAC4uKBLW5xPslTz95ga8M00GBDDmIlOn+lKA9ZXnJl958i1/6pe9wOV9hnCUpHEiP9TWlNUitKJKY0AWqcontOkSruLG3SxFnFMOctmuIpCZWMVL1SKFxPuFg9xipNNYGVsuW7dpijGM2XzG7WPDo4w/57NGH/ORHH9Eua/Z29/nWL/4L7Ix2WC1KdsaKybTm1s2Xeeeb/yJRNuVyZfno45KnL1bEwwTVlviyRmQ5+e6A0ThiMBiQDYZ0psQYz+1br5HIwOryjOXW4lTCarOkWV6SBYuvO7wRRDJB2L7DcrxzHWs821XZL79Fmpf2dtnPwHVrItGgg8cYSYgkQQZ8bfG2V4UqKciV4O7hLod5QVeWLKoF880WEwTresVqMyNIybatma3mdKG3lV3MF5w+e856tkFEmsH+CINhtakoqwqZanaPxkx3J+xMR3ztG3c4urHL6dmcpi2RWlJVJdvLjiSdkGcJrtpghWcQx+wkI6xPsUIxObrGzo07PLt40k9d9o7QKub8/CkfffYznjw7Yzaf4X2H94ad/QF5ETOfnQPuiisp+7x6gBDEF91VKQU+OCSgRF+ofb4kZa8KJBH63xel+k1sgfzCghXgi47tX4kGPMH3cYPgHd57oighSVPAUzUbnG2Jr+gmCH9VQGt29w/YbC9ZrRdEcc8WVTJCeEsI/d6CFP5qQUvgnMEaRwiSJM1J8/xqBCyvjFmqN0SplCQbIpTGBoO1LXHUm7aC6LneUlxdvFuDc4E0zthsKjZVgzD9Ei1SI1C0xoOMsA4Gkx28aiFaMtnJePbQslnESBGTZwWrecdoNMaLGmRPu2naCiQoXdC5Pk85GIxwrqGsFqioZ/5aZzHBo6RAXn01dYe3njRN+0aKiHGuRalAFKUEF+M85JmkqpesFpJ8MEJHjmYDyB52bww4EzBt15sleywu5abjYH+P4BuCjXC+Y3m5pmsrnA+slw6lE6zzVKXkzfu/AKnkH/1ff8RmteB7f/jHtI3m6Nou0/0Nm43Adg0y0eRFRNxKzHZLbTboRJLICC86ZvMShebsvKFrPNEwcGfnVVaLOZtmA5HHq9705p1BCoekx5EFH4ijCO8tkYZI9Q0vLQXlecXDJx/gak97WTI7fch4MKRzlkCMRhIkXJ8MeF0OecWfsFeecLT3Ck+enKMuHUEK7MEOphM4JXAqIhVTus5yvnrE/PQjQlxwuHuL1byBSOGxmDYn6BitBgRjGWRDjLOgwdnAKM9Jsww6RXJFz0h2hqjxkCAd7777B8RjzcXsOduyQgtDUxoirRmOYqqyYTRKWaxKOgNlc0rVzlgsT9k9lLjQUq9WPJy9i0osIhmCrqkvVzx+9CmmfE69PWNTPSXoGZtlhYzm5APFcuVpWstv/yf/0T/9HdWLx0uWzzdoD2VVEoLi5KVjxkdTns1XDIiIBhHb+YYMCB3kaUJXGUJQ2C94dpK6MuRFio4geE8IEqEEm2oDQqIQEEt8CNRlybRp0XFG40peunNCnB6DqJBNh992SEZsuzWd6/pQvhNYOmwbUdcVy2VKlq/RKkUEiXVVn9lclSznSyI+P3jFVXH6ORtQfDEa06r/ng2uV7ChqcqO1x68ymrV8PDZkmQ04eT4GoPBbZxYgPTkwyM+fPcR+6VgMC7YrFqyKANjoANvS9IoUNU17bZEioQsLlAqwkpL3W17c4uX7IzGFGlGZ1pUANc5kmFMXTXYqu8Ga5mxf3TE5WyOJGBtd4W4cnRdBwjiOMZ713cdzJVXumkpJgVdV5FlKTSgg4PWMxqMr/S3fRcmuICWEdkoB6AxjmEaIbuEalPTVg17gwmdAKNMzz/UAh0EWgVKtyWYlrKu+tB8AqVMWG83ZNWI0VjjVaA81ewNYkxSkdxN2a4sm/mS+RZO/3BDZyVx6smyjLIsiZIWa0I/AjMFi23Hzr7DmJj1smI0HJEOYxySrt4w2ptQWXh8/pS1W2CNZGcv52J2yaf/8CnbzazPjIWADZbj63eRseHg/gPO3n3M7TsnfGLXnL2YY10giTKCa/C1pFw74jyQTzSdy6DtCJ3mxeMaayKyPGU87GHrzmhU2l9QqjZQnm3pfEmIK1ypgUAxkhydnLC/8zJavY+KVxhvqJoOgiL4Dhckajpg79qAL995nScffkRVLzHB8NV3vo6xnu/92Z+z3W5J0qjvTPmOa9cO+Nd+8+9yePOY2nS4zuBMoLUdnTV0TUe7aUAG3v3xT3j3z37ARw/fZ7h3i9/4O7/OBx9+RgiOr795B99mbC5rXswbfvDepzx9sSQfaKaDhpQOESTz2YbVypIngspekE+eIpVDiQSlBcqDlhp0gpIxGkdWZBRpQt02bGtLuW5IipwoFjw6e0Zwjmt7x8jIoaMEqaa4MCToBiegdaF3wgsw3uPj/kNpuTVUNmfv2uuEVLIZ7xHRMn/6E9ZNTaJavN9CMNANKLIR+8MWbyrkZkZkWwaJIhOStlpwcWlRPmVsBE50vPL6S7z16qu8/+FnGDvDmjGTnX1Ojvd4/nxBWiiyOOPsxaf83u/9Lj//y3+LN+9/hSQfUdcVp4sXNG3DZJDS1Avm5xsiCaPdgk8/fI92UxHcFudLpoe70M352U8/QCSSIBxOSXYOdllcXPaIss706mqt+26ks32+UQiQimD7BVcf+hG70hItZc8fFQLpe9aslqrPvcLVSB8I4YtcoJSaJM/YbvpFVqkUPniQgeFkiA2WuqnRUYKzDq0jQPeCGDRSCwIdRZ4RKU1T1hjX4azrtcR4vOgzmj0f1eGDIkkSuIobJMlVRtb2NivvQCuNMwHnLVJ4jO9wRiJCn/M0xpAmCV6A0jEmGCrrUFmOsr4ngqgWEyQqKLpug0pSojjhybNn5EPDgzcmxLphPcv47F1D1zrWlwrTNmw3ETqNENKyXG8xxlIMs15qQodWoESDFJAVGQGLFbp/b0tHmuX99nvZEMcamUQ459CRQGCIVQxB9/SctGNnGhOCo15FCJUTLGwuKlwHBsXdN45Jsh6ZqLVjMesYDzJC6EBYVqs1ddUSKd+zjL0gChntpmN6LUfoiuWLBVFRcLr9jOX6OcIXNOWY8xczdJIym7fs7adcO+kw3rPatNgm4XDnJkJvsXNDsJ5msaWmIskOOJ8ZxlNFMYwYjnM+/PinrJYtrfcELUmSnk8cpOuXn3UgdF2/gO1KfPB4J4h0QSTB+JKgn+NnBe/94z8kjxXTnRxjW5zvEE7gZEcSSda+4aPaMzy6g95UfPjp99kUmvvzKXI0ItVTrO9jiuSKYNZIWXHjeI+ZEhQ6pqoDkVa00qPkgMxsaUJEJFriyNP4BSQR+F6UYTuJjhRCCrwK+GARTuGcRMgBb3/157m4fMa7lz9lf/IqQT0mWlRU5ZaqTNhcBmbnnzIoYtpuRZS2tK3vqTCLjOFgn2eLz7h1cpv15ZqEKYqE63fGbJYPGciUbTnE6Q3GV0ymQ9rNsseDGojM/z9F5z/J468tVNt5R37let8/uXEFtI6x28BBMcHVHaUrGYxH2NmcbdWyM8jp6G+qIvQbp1KqfgPQ9py9prYIel6cjCUa3XcanUVoQRoE86fPCJM9podTVuuKqHXcffkGD44n3NrVnM0fMYzuYVGU2w2ha3FhS20kWfESv/sPzqiNARnTNA1J2kPjnzx6ius8SdKzQHvdAP3CgezBxbIHcPYtCNGrX4UMNGXJ0fGU3f0BW7vkeNpSVp4XHzzn+0++x97+HsqMefiXZ8Rhw/t/XhMnmkhHbDYNQW5AwmCwj5EdA5UTnMcbi8T1hULT0nSW1liWZsXR0RHzi9Orkb4giVPq7aZfmnIWAnhjiVXaQ7uDw7iW9WYJzpPnee9U7gze9w5nLSTBBBQRTWWomxrigG0jjHPEoR+FmWCI4+RKk9iPW5SQdJ0hUjHaG8ZxxGik2XrLbjpiVm64rA1plNECXdKRYtg62AZNPhziTMN606AsdHsZfujxgwqzVfgNSGKePm1Jhn1xXNYW042oty0Ej44UgZJIQ1c5FAoZC4xpaVrDaq2pm47deEBwoi/WEwtInJGoSJMMUupOIoIDBE+ePqFIR7z2+us8f3LBfH7BaHLCcJLw0adzdlYVl8sZ1gjSrEDHNaa8ZDCY4H3CZm2Y7IxRylK+aBjmFi16qUMyhc3KkqaKpgrsTSYgLL7qON6dcrbdsF4vKKYxZRMo65JIJURRznvvvccnHz7m4vIhTmyx0vXP33lElaCjjur8OaEaM/25XyOKD3n08Kfcu/86d+/f5Q//z3/MdlPiQkvXtQivGQwmPDo750cfPuLbx29ApNFxRxQSUiUQWqDpL0UH+zss1xUPnzzk5nTIj374Q77z7V/gV37lu/w3//0/4C9/2PHVt6c8vTjlT/7iXU7PZpSV5Xg0oIhh2QRWbc9O1qLFhkCmYpJuxKrZYitLVVmKqcSi8K4BW6EjxXZZcbnY9mzaSJBELdJuiYe7yClYI9jakuVlySjN6O55UCNoBCoWDK/MOV3d0liLU5I8S1ltznn26DP8pmXn9nWmBzvkw2NuxIEPTn/IbDMnWImILGnsuVxukUFy/+AOTVWyCS02T8C2jJzk9WsDpodDJtMp14/32T/a4aPPFtSdZzetKRcluycTvvud+1yuVnz8aEacBpwVbC8vsGWDznI+/eh9Nqs5Ihh8gNNywdnsKctyQ6JyXrv3gGvXCxaLFZfLNfXlGWfP5hwd7rOzd8zZxVMW6xXOtCgJcZJRd/XVxVshZX/cC9GjjpwNKK2JIo11BodHJArnPN5ZFAIbeqKIM+JqAbO/uPY/h74RIQQ+eNqrBSzE1XkqJEJqPJ7ONkRpgq9BRxmt7fBeo5OoX46N+k5uEBrTdkjVEdwWaz1SKJToO7XO9qrbru0QIiHJUpTspxgQcM4QCBRFhvMB4wNVvaXrWqK0H8lLKbFdb020riOLEryzfZPENCAEOk/QQmI6i5MCGSTKW7QKjEJGuJqyieDZGQ1x9Zof/chw/tgS5zFCeqxVKJmxWq1g43FOg1DEScrqsiPNC2RsaLsa6c1VBtOjNKTDGJ3ErJYVWvdLs53pkEKSxBpjAl3b4PFX8baW4GqcD5yfmn6BViqS3LNd1Qyzgli3HF5PuX6r4MWzLe2qpWocplWIoUAqzWCUomRCU0riFLwxTIscIsFq23J4fJ0bD0Z8+uRjLl94ln7O66++xFdf+g6PnjwnSRKcNygRsdk4rDTsX48JWLSpGQ4cF4uOm9duYtqKH7z3GePJlEgaDq5NKHRL2bWcfVCy3rRU246dgzFNu6WuWyKRoJOUrusQCvJcUpYlg8Fuz1X2DS6UaJWQqJzOdojg0Tk0yKtLUd/cQXiCjICIzlj8uOP319/j/tE3mYibTNeSPz//CW/ufJtOeJzoSMUQZWI2/jO+96d/wKu3Xufea1+jWzts1CKDJBExynZ0kUZ7CMFhgyDIAmU1KjiCMPgkpgsOLSUOh6RX/nrnUFHHo6c/5fnpzzjc22c2u6TYETRNznCccXq+IEknvHTjK3zyyUMi2S9Vt8FRrSVLuaSrDTdevs1+GuP3TzifrTm9WJCrhDv5Da7Hnu+3HxNnKV2YUm876sqhokBtQcjs/+t685/48dcWqsE0BC0hTyCWJLJgWzfMV+u+YDqf0zhLLVPGassgcSQWMjIMDRbbG5YUdJ2l3MJ4J0XpDtd5lPNEcULbGmzw/XjJeEQIgMN3FeW8I9k5wYeSri5ZbW7zC3/33+HWieT7jyo2xnI5X5AmBteAaD8jqtb8H7s/YvNsTdA11nkyWeCDxdktoK6UrpLg6REpwva5TJEghILQgkhwoT+Q8DEhpPwH//E/z7WTA6rSsa4X+Fpw+mJNltzmo08+4+GLlk/fn/HlVwdcG6fs7O5z/ZWU4AXlwvJnP3jGclkxf3yGHA1w7Yavf+OE1948IVTQEjGMEvLhDi1rXFNhXEsqJlRNSW1afGOJ84jSepIQo5Qmjz3eTOgwHOy/jowt2+2W3/u9H/PsiWWYgoxTvK8p4inWV1SqLzhlXtCVLd5oFIoo0ixXS7RK0HFPZujs1RawVSiv0Fow1GNa75F6Q2pG5JnCA2VWokVgaz2x0zgRUElMpjznT89Jdor+UFlJxsMh0rxg3SnWrSVysLhcMpwWbFeGdqMQLmJdzohlDNZD5HFCooOkURGmaglGkY5akkTRlo6R3eHO8T4fny+pgyPeKvJ4jA6K0BnSwZTlfIPxkGhQWczO3i465IwnQxIZYcslH3z4EC8TdNuRpillGsiKlL39PUwIBFFS9uIjdB7IckU+GrNdeXTakWQRRZGTFoLxZMhydYGzJWmcMIoTFssFW3+JiocMBkdU5UP8wmCGEkRNU5fcuXWDlJqHs4bOdnhjEcRMhpr9nZu88rU3+NO/+BH/y//6P7EfFUhi9IOI06eb/4e6946VLMvv+z4n3Fjx1cvd/TpP6JmdmZ2wgStyE8MuadimDJsUKEE2DAsGDNGQZdAEJAuGJUPwHxboIEgWLIq2BIkKtEmDZlqSy7DDnd2ZndmJPTMdX/fLr96rXDefc/zHre6ZtQZLy7QJ+gDd71W9G6pu1b33e36/b+DJK8/QtAmvv/EaKlrD15rDvXt87w/8AE88dQ1jHTazOCkW/D+Lo0ASkxU5MhwznoxJh5bnP/W93N97j6987be49thjtOIzTMcD7uxOuL+7i99s8MTTz5IOh3jFlOmsoD+1ZHmMUoaygLQo8HyYzEcUMiWwISazUBqUKJBSkRiY24LltTZnl2JUNaW7EbNxdp3+0Zibt084ODHMC2g2UqQ/Q6IJvQShTxGepBI5pSkoCyh1RLSxgkwyZDpDu4zAk5hmk0pphrMJh6MRqIpusEJ/5ybTeYYKFEM9RxeWWEUMlUL2NgjzDFcUXL6wxdZqxLkrT/HoU89QJKc4U/He9Xd5+Vvvc/mxazx5ZRUx26YyFbcPdrCipOlJjK2tgdLZhFd/7xswg6qaMRz1kZ5HqR25ciTTIeVsgqPJ/axkvrzGNEsoignKzKiKEdOige81mUzHDE4OqKxgudtF+o6qmhAvYplTm4EFjUeJRIgakBYs2uTGYSuLcIJaXlo7BNRuAQ+EpnXEzwMhlbMLkipiIdqyeJ5HnuWARXsahCZJS4StKLUiK3OEqmlHVlWYCvwQlHKU84QymSF9jajr/3XgwyIhsLSGIjdYoYkbIWHs184GpUUrjecLlPawwtYg2tbSryD0kUrUwsI8QWuPKAyJZEhZ1baEaZKQFSntdhutZJ3qVeZUDjxdi4GNlCBcrUQvUqSN2L855+brFVG0SqvnyNMUpTVFWlIUSV1UsIog8msthgAl6/SuqqxoRR1MbkhmCXlV0l1uIUQdHRuEFULPieOAvDLkkzo8wtkMbA0ui6rE8zQmcSx1As5dWSVsCwb9MTdfP+aJF7bYvKyxlc/KepvKzVFimayVUFYF9+4MSfMhWVlRFgFRw7B5XpBMBa5NLXQWCa2GYv9en2ZLcX5tlXxwRDm3KLvM/mlKczPgkStXGE5GlEVCMk/x3DLKjupqnl7Ca2uCvEmWzTntn3Lp7AYCjZKSbFDR3ThDNtxlfDJDKkMn8mj6BuHFyPEYU5WUIUBFmoPWISoOmeQzmo2IUHWo8qwOWqFCyxjpB5iyIAgLsipBENa+tBgUPtbmLC3HpGfyOhkrOkYOz9EXUwp3j/HxOmtnzjEWIU7keAgarsHTTz7FaucKxTxDKIV2Cmup9S5SI03NpxZC1UWwWpFYZ9WgkNbUiW5UUGkQAiULhsktpHZM5gWbG+sIu8bh4X0qWXLl7BmWoy3evvVtMjHg3v6gtr0zhvnI0GpHxBegSgJGoz2S+ZTOpassb2XcurnH8498ARFmbL/0FuvLEc3jBrdI6TYigoHCdXyWV5uc3N8lD5r/rwHNP+r4rkC1LEvwI2xV0ukskeV5nZaTSuZ5RukJ3HQKocS/ep58PmVybweXOlQvhMSnMnO0ZwGLtR7WOHA+ystxTi4spD7gOj3gixpj6YY+LtScjMe0WktkZcHbd17nH/3C/8Hf/Ct/EZnc55d/8ZsUUc7amWVkGdJdfp7LF5cIln8HdbRXi1C0ozI5xpZEUYPEWqqqqhXwViKVWnCwFuxh6xCyBmQ4hy0dpsj4hR/+JJ98fQyvjz/6gPnnYQvYeuQ7nz+Fu7HPOz98if/I3eBJ4YCPfecyyYcPPDCYUSsVmkCTVCsiFNQTwHqoD63jHnyaEQzTxZMBfOYTH/lSf+dkzJ9/+V2KLMePBIEMyQ3YsvbglEKA8pDGqy25VAPnBNIaTFmweeUM155tk2XL+K5ACM08sVzRlkef7hKvVRzcy+lYy4mbkh0lXLiwxvf86Au89tIBqtvi8RfOs7u7zaNXznH2zDKBv8IrN99GbTim0zFuT3PubMCgP2aeBijRZXIyQ3sRUcNjejjgkSef4NPPfZxf/9VfY+5GSN+yFDZY8xUbLUEgG5xOR6S5pjIZkhlhVbKz3cd3HazOyG2G57e4P7hLeKFABT5GVJwMBvhhk3ZbcnDzJi0Ug6NDbty4j3MNltc6WFnS7lqqKsL3A+bzOdJ5SOEwxieZQpU7tG/JkxmeXiPQPp/55LOc3eqRjcdk6YBJVvA7X3mFFa/H5qcf52Ryghf6tP0NDo76jBLLSu8svaxPr7POKJtw7cmPoWWX3Rv3eOHiYzz+7DM0Wsvs7G7z/q372OI2f/rP/hjPfOHjbHzlKzz25BOkRcE7r36LT37uiyytXWE8Hdd8QHyMKcA5PC0xZk7lHHluWe4so4Rlf3+Xxx5/kvevv8u3X3mdJBtiJQymGe21LvFKG6E9rlw6x/hgm9F2RiuUaDcnzw2FKVFhLfbBOVphTOBCPM/HiJJKWgqT0gjWqOYZp6N9wssx7XYH7YUMM8H+sGQ4rwjakqiQTIY5k5mk22vhWw+V5+RFjpQK6Usql1N5XY5TgZ7kbAYeQsbkto00Bu0C9g6OGGUT1s9cRMoWcbfLJDthnpQ47XFh6zIbqx327tzHGEGj24NizMHAkSiP26ObvHp9RGWn3N++zd7+CfMs4e79U779yhJnN3sstTWTLERGPSo/qSuERQXa5/Bkhzdvx2xdvYC/0gIJo8mYfFLS7iwhwwaRjFnbOkthC5bDHqJa4vh4HzzL4PiY08ldSpcRNlp1FcdaVOjT6HaZjiZ4vkbLCFcZjMtrb1WnH1pJPbj21qqCeoiHaLQWS9V2dg+6ZOJDLf+6molztS2TAuvpmtLlSQwWJxxB4NUWUouENfsAYGofieDe7duMB0M8JergEVtv3/OiWrxUlRhXuwfE7SZOVCjl42mJ9Rx5moGx5MbUnqC+QEkoK0O1CEGQWuGLAOdqVxlsHVmklCYIfTxfY6oSKUKUpyAOSNIMT4HnBRRZRlWVSC0pK8tgOl50nRROQ6vToCWazCYZzaWQMpsyGk5wCLTWKCVJ07Q+NlJRlAXjZIqkJo9robGlo0gzuktddOBR5BnpLEc4BaKkMgpcXVUVRtIMoEwF3bWYZz9zHunPONqbsXl+mTMbEb2zLSpd4IoOJjHs3kno9poY4eMHjjCSaB3hREkQCZZ6IZQVp/0hcbCyEDjnKCnxPUEz7tLfO2HQl7Q3Ql55/+t8/JEWz1+8Rp7+PlcuXWT/aIfS+MSNNsVsibjpOB2MOToJCX1JlhvWVi+wv39CGApaSx5FOWP3/gnz9ISo6ZPOHSvtVYQ2jLMpVjmiaInls+uc7G9j7YwkM2itiJsBlSsJYo0MQxq6CTiSoyGlsFhZ1ZZjsxwslFRIZRDM8ZRPMhbobkjYLjDmlHm+y7SQnJ7LIb9NUDyHH6whXE5aFKigy2qjgSr0gqytah9zZTHOUok6YEMB4uH5ZKhdjiVC1hMo4WoOuROOIAoQMuPgzoucDocsdx9labXJt16+z5nzHVpti51dYmVrC3nwBvOTOVVgKUvN0rImKYY4uYrngedZWr3zRPoSZ89doD86oNnM6PcPeOJjzzBZuUOSzCmDFrqfY0VBKlM86zEZD7l85XGskvxJGd8VqFaeT0dHnL9yiXEyq9XMoxG9lRW6y226F5uc9ncpS5/xNKfZWSF4NKTsTxjt7xE1QyBEIPH8EmsyrK2tg/K8wPO8h60kraCsHFp7uAWHKpuWbF3eYmoqsknF3v0jKuf4n37u70Oe8F/91Z/m6cfbvPnehD+48SaD2W0Ob8HsdJnp8QF+IB62p6QEqR0SVwMxqUE63MJ7DR74sNVWLgIBuiI0PpmsMNrjp37zzT/a0f7Hv8v7o9n/o1Wzyv7R9v0Rw33odyE+eJw8/GNOBoszrV5ACIcvJe9/c8z73/zO7T3W/WAGdpN3AGgD7cXXrLrT5/07fRqLZbZvDSgkfOk//DytRo9Cxbw7vs/2cR+TBAhZ0mku02us8Obb95mlUypX4oqC5ESSlAXp3OPG3RS9dAmO73Ht2kXWL1zlX//8Z/HNW8jxiM3zy3z9tVq49MXPbjIoM4qZ4eRIs7Kl0VHCu+/02b65xw9+/jPsHQwp7Qypz9e+jVnMUX/AnJizqz02aMcYIQAAIABJREFUVyRxW5KZgO27in5/TlWUFGmKqUA4Wbdeo6COljQWUUlKW+LJBInh26++yWD4FI8/+XHOXVwibrZ577bh6Y9d4PKTT/Otb7xENphSpobYm9GcDLl4YYvKGp574ZP4nuS3v/pVnvlTH+OZ555lqbVBpiXJdLRIyBEYKXjjrXc5f/4xVp/+BHeGJ7Rkg09/+UfJqog33rhHtyOJgqgOuPDryUlaSqQwlJUjzB3HxwNOR0ds94+YVQXSSk72DhYiOk2ezHj6iccppMdgmDMd5uzt9jkZpZgyxxRQGYUXeIgA8jyhyhVRHNeA3s5wFqhAVA7lw8rSMu1OwuB4yI13Ba1WE+1nFEVOpCPGk5TpuHYN8XJQaYUSbbzwDJopypNI7RO4lKjdIatKrAwolCKRJ1QiRZCRGIEf9XBmyq29b2Lyina0yqNXnuD49BbDYZ9qeITwwCRzbh3t0Bssc661xMHJiHk6ZnW9watvfYPJMMXLS/Jyjhct0d48iws0b904YFaMETEYJ/AKh/A04ONLSVHOuX3rHVorMecubDEcDOg02pRhxVK7RZ6URJ01vOYK+bBPljh0IVlpnOfy1lVu3XiFyemARmuN9Y0GIksZnAzIM4Mf92gttTg9PoHM4UTtJiFwCA2UtQepMQa1CAZ54GT1YUurDyyuHlRT5UOj/boq9AFvVSv1YKVa+a8kVVowKXOkF9RpUqZAIgm0h3IwPDmmSHM8ISjzGkgrX4GzzJMMKyq079GKl4ibHsZlZFlZh4FIiVASv+lhXYkSmrKsyJMC5Xm1t6uUmMV7qIWlHmEYUeYJxjmyLEFKjdaaIIjqaqYMaDUaBIFPlhZEQUAcBIzHY8qqwlcavyURzidN50yHJVVVoiJJ5QTT8ZhOq0nQCqlKS5GXBMIn8AKSJGGWF0hR2341GxF5XlBVhulojrWWKh/ihELi4axBCo/Ql1TW4vmaF569xGh0yK03B1y91mPryVXyYEQzCmgu+axsRBwdnHL9+pgLl8+RZfuM9zLMNGT7+AgXtol98P0GZWFothqk5ZjxuGBwMMeWPsKNUa52HBEY0jRjNBgwn83IkpJqt044PNg/4P33Ik5P9zHUVJFAOEaTPsZ4hO0EvIy94wQlINIdKMb0j+pqcaujyYuMyey0pmTYjGani0HU6ZJaEHQ0+bzEU3Xqmh+ECKXq9CtbEkU+lakImzFaeXTaXVxZMdsdEMdLCDyky+v7vnNQKVJtCSgpEkc6UIjIY24C2p4i7swZ2wlXNp/HVwG2rAGdH0W16LsQKGo6nK15dTXXm7p6qjyFKy0W91ALIxYC89oKs77xCuHI8xOG0z7b29ex3jGj8SF5IhhPIMnH5KXg9OaIZtDl/Z0XGU9OiYJlKjNkOEiIG8v0epvM5wl5ltFu9aiqmHFyynTyCL3uEyw9cRVrfFzpKKYVUU9xOpvTtiFaaYZmDqUic5L793bx1P9PWv96lmPjBvdv3WV41Ed4ivWtM3UmfewxLxMqzycKW+TjlGyW0lteZX6S8/Fnr/HWO+/Vyk7A86HIzUKN6bBGIzyNtWbxASqksDhbJ1RJLUiLKdPZGBk3abQjYt1kc32VpacVaXrCV9/4Oq3sDF//1T9APyIITIif9umtRiCyOlnKk2SZWyjeJVVR1OV5HnxhJHW116G1XqQAAcKSVRW2ESLnHs0wgqxuf9+eCMIgRPsBeV6fRLBIDlnAPSHBVYY063/HMf0owPnMSpt3hzMK8+G/PVDWun9p+f8vhvtuu3EP/8M5yM1Hg+bXTyb/Svt8MNP863/zRco8wzhDmqckyRyhFFJ53Mn6i13XXFJraxeJ+sgIbo1fYvv1V6nKgsoYqtMZb/3Bu3z1n/8aoa+osjlhEJDnljyf80u/5teuCcaSpYa4oZDS1eI+Ad//b0g2Guvs34lYP7fK5vIaJ8MB2b0Zd+/NuXZ1i3h5jTtvHbC7e8pxf0plFirgQlJVCZ5vQBjmeYKvPfwFAPAJKUrIspI0nXD6jd/mzW9/A0920Z4gM47Xqg7X775ILCeEpebS+ZhnXjiD9mpaiq0qMEPQIX/6xx/j5u33ScceJ0eH3Ll3hF+mnB6fcFjMCAPH26/u8PorX2OcG3yleGRzi3F/idHJgG6vR4MuRVFXv4TvYUULKBGk5HnONIhRxYizqzHTcsLk+A2qvCSdFAgVITUI4eNVAaa0+GlKOj9hfLBNOS/ZWF5D9xyTSYIzPmmRUOQSZSTjwwxf1pU6Ha4QtDKkv0tVGar5GBEJeptdDvvb3L27z9LyEuPhlHRaUJaSCmjECiEtyofSppTZGJsnlLkkDHxCM8WlHjENlLa0gjkdCsZWkFWKTrzO5tnLLKVdDga7jKZ9oGKcJPQ6l7i8+TQeCfPRIUkyZl4MEEXJpUaTjpaEUjAZzUmFoHN2HQZD3EigvA6bW9e48thVTgd9Xn/rNY5P77O6tkpyOGQ2S+p0uyJHGXDFjHtvvYk2jl5vndyW6KiB31ghjA1lVuBjiRshVVnS63RZXl0migROJvjNJcJGk/bSBko47r7zBid791g536G5dIabt+5w46230FqhJRgMCP2hLtYiyeqBN+pi0v6Bt3St9n9grl9zUD9YTym5QKpiIZLKEAjkgyISdbVQSo3FEHiSPMtrHmmagrX4ElxVAbWVVVkYKgs6sDSiFg4f4VWoQOMqjVRVHVlsDEhJFIdIJLPpHIlH3GyQ5oZ5mhMEAUpZ/MAjjHUdR1rUziYWaLfbFHlFmed1CmBVcHLcpxnFhFGM70eEUQtnHGUxZjavU70asSCZzxELy6TZaEZsAzw/QHp1YMx8nuArD2ctWZLgaY9GGDObJVhqw38WISy1CNbVzgYlOFHipMNTdU69lLUgTvke3bUIEfg8/31bnL98jswbY3XEaGBo9AQ37t6iqkJQjpe+epveUhNXaqhKSjNF5hLrK4JIkaczlA6IoyUmwymNRhtX1XGcUGIKD60Dmk3BYHgIyrF5DobHijCQTLNDpuUmfjdCtR3TkxEq1LhSoIMJftiktRIxmY2xmWA2nBHIoHYg6SxjKwEupNnLyOcxQawoC8fMn2BKRRA2CXSJqBwba8scH3sYZ1EVeFahlF/7fVtQJqLKJMfTnMde+CF6yze59+7bSDrkBGAsigIhMiLhIY3F0wFYQeFmZLJJ7jUZz+9Qhh7dpafwvGWyYgpOUaWG2JMIo5jLAk9qUCCseyjQrjU5Ai0XeOJD91JHfU44BM4ZZODIizlvvfktPA/ScYul6Ax5eUr/zgzjSd57+5Beo8vymQHF+IRkbMnVGCFywiCgfzAnaki0VyAcjPKEKIJmw1AVJaaskC4kDGKq/IRZvyRa77GxfMjpBCbzGZsrEUHgMeiHCK9Eme8KD/9Yx3d9JWmkYDYHTxGv90gHY2xW0lvv0ektMRxNcPMEBEQNnySxTA6HZEnFl//tn6B/+k/on24TRoKyDPD9irIscIh65mEUAo2ztXm0kqK2bxAsGlCW47uHdDbWaa40KAqHrJp86fu/xL2dI178xou4cUqvIVBll1hlrD0S4nVha/Myrx7fRIoPSdfcIp9JSqSUlCZH1RV7hJAP+VeIhd0KAldkTMqcIGzQe7AZKzDW4coKXwowlqIyCCUWhtcfRAt+eBg0sBAcPIRpi206FvOu2lbjXDOi5Smsg0FWsJ/k37H8+C98mad+/ve4P0tZCX1WIx8t66pnUhn25hnZRwDKldDjXDPiJC3Ym2f1vj/yFfGvDJEfrP9R6323baeVIvSbvPWFy3zmN15hx3oUhaXM7cPcbiU9pBAYDM7UAg4poDJ1dUQqhSsN6XTG+voaBoESEhU0EbKgqhKcsThTW79I6dVVZFOnvlhXIaVgcphBpZGlZXpYcXJnwt7hHYqqQqo2p/2EX/nlr3Lnxg6tsIVWgEhA+uSpIowCZoMRSjmE7zG3U1IHaZpjTQ2+2502QSNA+5ZAFhhOKUyGyTL6u8c0Zytc+b7vI1A+0/4BnrrEdH6PaTKg0W0yr4bcefeI/uGU3d090IJsWjCaFFy5cIb2RszySVi3lVzCIBtgc0Nicnb1lNwuUxQzJuaIWRmzubLK5lobJUPm0wEbGx6+yihKqOSExy6s8/0/9CNkJsGWCx9QCpIqo7IFphSkyTFJmvNoGNCKOzTDL/Dq+2M21h8jbPToH++hsylFaknGtZCRoMKplHMX10gLj6yIKCqLlpZPf+qzTGYzBif7fGJzmXw1Y5wMKWUTveaRZiVlUYsJB7MZy8027VaI167wqIicR1alDLMpDb1G7EFaCUoRggVrJIVWTF1BeTik0VziwtkNxm+8xHQ8J44TZDDHcxUFFhW0WDl7gRzJZDZlYgyrG6tUiWF3d5/ZPGH94ia0Lfv9U1pNi5U+2/t9CjPD2Ix8NuaRJ5/DlR4v/f5XCYWH9sLa77Q0DAYnuDff5tI1gbfcIaoU60sbJDbh8N597h8d0Go3aDciRuWMndsnCGsYzg6p8hw3LzieOi4/coazl86xc+cee0cJ13oxn/n8F4jbbV7/2st1BTLwwJgFCFWLeM4PfFYfVFM/DFIfANO6C/bgcZ2AVV9TwT6MTRX19VaoeruiLlB4gWN1bR0hDEcHx7WgM00W6VkCJyRGCPKyDpDoNBsoL6coK4rCEEqNMSVSegRBWIthlUJ5AcpXdVteKoqsxJoKp+RC3FVRVsUHtlso0jSnkobSGIwB3/dptFpkSYIQkqqoGJcTirLCoUjTgmbcJAhC8qIkLQqccFhTQiXoLbdwOiZNM8LII88ccRwS90JGwwl24XdY5AW5K/CD2tu6LEsC38MPfOwCpDrj6moxtR2bKSzWeBjhcKpi/cw6fgjJfpNmD/YGR3Q7kv5RSU6F8iSN1hLJzLB5vsfVK4Ibrx2Q5TOMUCyvbZAnKVVZkqcVzUZMks5pBz0CT2PEBN/zCbwIrSTzWUGRGbSOqSqJFZZG26fRyynyBOF6UAqydI6ONUFHM5/OaXcjCuMzGUJ7xUcIjTMRWVoQNHI83+Pk9JDKhhiREcVNSlPQ9JdAOPzVhCoRZKOSLI2I/SWEgSSd1xPa0tFud8FJTFXQanUwlUTIiK2zF2h1t9j64iUkJYe3t0FViECh8lrBL02J1A1KaUj7Jdc+to5NFEtLPWRrk95ghs4l5CmRMGzf26PV7RFuLpOX4AlRW7gtzhVnDUoo9EKw6GTt8fvQcPhDd8S6uuohbEa3dZbv+cSP0ulGjEZDKCOQO9x4+y5H/buk+W1yP+H9G32qYk6sWhQCuu2zNJo+toDDgx3CVkzDX6csHeV4xCxLOK62UYMYX3lcvPIIodYsd1rcuzuizErKdot2FODJksl0hKBHIVIwf3KMVL8rUA0KQ7DUIJvNqaYZWZYyHA9wviDJM2aDOUtrDarKkc5KyqxCakVaJNy6vcuZC5I7dw1hoGvPThVRFCmmklhrqMr64vAg0vMBgLRYbOUQpsLLStLjPg5BEDr29/Z58aVvsfV4gzt37nG269M7/yzTao50fY6zhNE9ODi4QRiEGFO7UlvrQNTgOM8zgkCjlECquhrwDz79DF/cWP6uB+tzv/h1AK52LdRN8Q8drMXP8Duffn/0we+Kig9gc+0w8J2grn50oRlhHLwzmKEEXOk0qJzjOC34qDEp6tSpv/WnrvH5syuU1vKr9475i7//9nds35OC1SggrQw/8ehZfvyRM6yGtRXT6ycT/u5b2xymOc7B08stfuLRc1zrNVFCcH045a+/cpOXDof/0v79hS9lYRZG49RUguBBlWUxLFAs0mo+eLdwvLsNOD73i4fcGs8pF9w0RclWM6Tl6RqwFyWHefGwovoA/qbFrHYl0JosS7l//z7nY5+2p7iXVqS2NvyOpSDCry8ulBTWcTJLmJTVonLt+O9+5pf4qf/03+eRT36K/cGUg71Trj5zlenYcGf/hEY75gtf/gme/cQBv/vrv4ISijjugdDM/BknJ6c8ce3jDIcnjEdjlPPIsoSPPfkEjVbA9t0dlPToNJrMiynj4ZQgdEgrsNrSXZH4pePlF3+b3NYJVM1Lz/KJZ3+M/MY3+JVf+DX6+YA8SZnNxnQbAY6KNEnYOLPFPK945/oNyszhtTSN9TMM+ymmnEPlGB7O2OlOqWSJrODS+Q3mlzzSUpNPB0zGQybzHr3OCmU2p5QZSta8LlQFRuAFPlYVmNziaYkfACYnUIqqqLCxx537I0YnM1aXt7m3s0tWSbZWLbGwuKZi977hwmbA2prEC3KO3jvlYGcfa2zNH4tgba3D2YsVSzpnOnMI9QQCy2g0Q3sN/AjmkwlJmYPzOZjBQerwAo3MDZHfoLuyxkAEHJ+coitJahtMvFWidR8vnxOEDYaTKU77bF69wtnZExzsbSNlQmd1g25rg8H+LQ77OzSXV3j844+zf38bIQv2Dw+ZpRmGlG7ksy5DTrMhVWkIoiaZzUhnY6TJcFlCu7nC2spVLqw9QjPwePlbL5PMZniBQmkfV8JwdEpx/ds88YlPcenKNYSojcIbcRuLhy0tR6MjtJKcjI+5fecGyg/xFZSlwek+aV6x3gg4//hjtDdW0V7Mt97c5vzVZ2m2e3zr936HNE8JAh+blR/4oC7U/A/a/P/XyumHR32trjscD5LxnIOyKiGvt1WU1eK8L1Ceot1t0+61QBiSeR1WkIxmiIXLgHCyFiw5R7NbBxwUZUmSWVbX2iRpgq00zkqKMkWqCGPr4JhmO2Y+LynyDOnqCFbrChoNjcAjTTOUkigpyPOaMqBkhBcofFnzRifTMb72wFq059PpdCmrBKkdWimMLUizKV6k6UVdZvOEJM0XrjCafFFtzgtD3j8l1AGlUjgLtjKIh77dLCbfCqkV1hjSeYIfhcSNmCRJEAIqU6G0whiHtRXaBycMvtdg9+4JnqfQ8ZRZZsnTkv07jtk4Ze18i8Nthc0VrabjaG9OlVWUVY6rPPJM0S9SHClVkRLHLdrtBkoVpJMpng+ep5gNckwFWvoIAVHgkWcVlTMEDUeWGqJWXrfBS41DIbBk04x0GlAWErnikH5JVqXcve3RW/XJiwnCNVGs4OwprXZMsxUwHOdYJH7QpRGfoXSnSE/RXFLMKoWVAbYQTGcFynlgHEoH2BKSNCdqxHhBTNBoI72YRq9LjMeMgKe/54c5ufVzKE4pAS0EjoDCywl8DW6MnEU0zNNsnN8kbDxGZJdYvvwYXhyTygytQ9bOnUEpR55XdfSq8+rvvJIoJXDGUgmDlnXKmanACllzmD9EMRRCIlBYl1NVZe3yokJGwxyI0Z5mOpvROduk4jytxjpGwXAyQUWvE+oGo3HMeHKIr86yvLSKdjHNcJ1Qx0htmSVHDEcz4qjNbG5QQe0YIzNDt2nJM4+94zamkZPPEuYiIgw3MNkpSSaR9o+nm/t/Z3xXoNpcapBOp5R5jic9vLxkvneMSAs6Kz3KacLEJlRGM5mM6LZ6FPmcbqfN6WjI5ScyRtOA3duKqJliSwVIlArR2pImGWVlUXKRH83iJF4Iq6SsM6uLZIIZK0rTIJt7/Ppvvsgny0vIaER/JHhPeDS6EcZmtDuK7VdO6B/2UaHCUUfv5Xm+4GBplFIYW6KVB4vK7X/50vv8Yc62D/ilsVb81LNX+JELayyHPvvzjJ/6+nU24pD/+OlLnG9GJJXhN3aO+Utfu74AVnBtqckgK2n6ilgrdmYZo7yGrg/uBZ6UNH3Ne8MZ1jmMg6MkZyMOOE4LtKgtYz48Cuv4e59/illZ8eQ/+V2udGL+4Q8+y1965hI/88bdh8udb0YczDNWIp+v7Z/ys9fvM8hLtBT81eev8rc/9xTP/NPfQwnB959b4edv7vGVnT7jvOLPPXaWX/jy83zqX7zI7vwDkK7EB6KLD1eRH9za0o+o6n4oXgEA39mHFIfjf+8H+cS/+H12ZhnnGhGVc7wznKGE4Eo7xoSOozT/yA2ahTdkV8uH81frHMbULZnMWHZSQ1ZWOOdoaMmlZkBSluSLPHRw9DZWufDYZznnwXu3B8RasHf3babK442X30KzRLMTU5Q+7VZAu32Ww8MhVy5d5cd/7Dm++MXP8bf/9s/w8rdfx7iSshI0ele5evVjiOA9imJAI1R86VOf5Q9ev8GN6+8xH9zAU5Y1FbMR+pyagqSa8Nijz5Cc3Ofv/+yrpJMTpqNj0mKMK2uB1jQpkZ5Podrsn4x46kLAp77/EqFsYzF8ffeYJ3pdLi1tkOSQpgU0fPKixFFiRYYrDjm8PyAOmiwveczmM/b2DWdXPPAmlOUI7flIpZHGI0unCF3iSZ8stSRjQZaBFBLlOWY6ZX9/QqxauKTEdwmjsWCqHb6GdJZhCkM68klCRXdZ1zxCJRFSUZWWweldyvGcIIip2hKl6wmltRleOwWZgJQ0W4pAW4Sp8GeO0AmsUlw/3KehSoTtkHYvsLLRI793k9OdY6bljGJmUSpEUJGWQ+6+/R4379yi2Wriex5Frjk5nNM/vE1ezcllk/RwStbOCNbbtPwu09MZB6M9qqKiFUfMVYFpdVg+t4IKDYfHO3QbLc6fOYPIBK+//jbzScXTLzzDcltxcnzCG2++hlQWayVV6VC+xGZzbrzyGr2VCzz5qUe49Y2v4QZ9ti6cZ286QLuc9WaPXnsD5n2Gk5wXXvgecpPx5nu3OO7fpxlskReOwc4JOvKYpAPSYoMnn/88raDN7/7GLzNPk4UoahEAICRCSoypFtck8fDfh2NYH3hNSwlSSZwVC1FVbVpuF5HUtYtKnfAXxzGra2dwwiKFR2ulwe13t8nSAjBIJ7HGIaTA8zQ60ljPIrSkEcd4fkgDSb8/IbO2rt6KKUifIAzAQp4WzGczAh3gqjqiNJ1nFPkch6TZioFayGVKhzGgpMZJR6vTpGkDqrJkNp5iC0sYR8SRX6ddmYqqtNiq1k9orWnEAVoLkrFByAfUCUlR5GhnCQOFLzS7RwdoLwApairaPEVJTZ6nREFAI4xQQtLutJmnKbayD2loZVkhlUAID4dBa79O4pKGe7cO2Ty3yfrFKarSnE4LnAiYDiRPf3yLuzd22LkxxPND0hRCpakKgxcUnB7NCJsVvq/J8oLZNCWKG7WYy2SAReg6CMD3BLbU5KXBYVBS48sGxiQM+zVNT0cCG/o0V1dorzhmR21c2aFQMwJ/Cc+X5LkgnR8TNzSeE+CGSE+Sl455OiNuhDTbkpOTku3tQzqrM8y8oEh8dLWCa4zZPNPj8uVLvPX2a3SXFMapWqDnan5wnuUoz+ApRVk60smEUjUQZcI0nSEChazUoupfx/HaLMHTsLF6ibNXPoeIQlIZEOoVVnCklcUqi1AxOgwIfXDOw4oSz3oEcYz0FEKpulsh63RHrEFbiRKamtNtQDyY+GlwAucVKEvd6aMikC3KMqcZCPZOd/nmt9/kkbOf4MKjXXYP30PNQ1qd8+RzS6/bwLKO73UYjHZphE1KO2U222F55QzN9iWiVkoQLRF3PDylqIxGFgWxp5BqjvULus2A5HSACBWloQ7QoIF1KX9Sxh+i+s/xYx/jDJHysYvWxPh0wHQ6wXOG2cjDV21UkZEmfVIMS0sraFdidZ/P/msF//C/BU97VA6qSlDkjrghiRoBWZYhdd3GNaXhQYlcCEEpcywewsW40iBxCF8jPXjntT2eeqLDzvSE3dtztlZXiHoBd+9YXnvp2wQtTZGXSCXqqEAp6xl/UZP73cLkX0hTVxIWVe7r0zlC1HxTUzpi3UII9R0czv/mM0+y2Qj4N3/lVe7PMi62a9KxLzT/wVff5P3hnAvtiJ//oWc52wjYnWfESvGTT1/kc2dX6Pia3VnGX37xHTbigL/ywiOsRwHzquK3d074ufd2Hrbt3/wzn+Of3dzjB7dWebTb4C+/eJ3//W4dmypFPTO83I75wrkVfvw3XuVSJ8YCf+/te/yFJ8/zd9++R2UdK2E9K56VhtUIdmYZgwcgGdBSsh4HNH3FSVryj97fZT0OGGQ1kP3Zd3f46eeu8txq5yFQVQI24pCOr9FSUFjHzjTFk4L1OCBQtYBhUlTszTIeQNZrS01Os5LWRwB2gLxyWAcNT3GcFvwvX3yGz59doVhUin/yYaX4A94s1EpnLQWbUcTtScI//aHn+OK5Fb78y9/kpcMhBvj85hI/+fRFnuy1UEJwb5rwN165xW/tnQBwfDziP//pn2Ge/i1+/N/9a4yqIUutDoFfcHhwiCmmjEfvM9gPwWXkuWC5d5af/s/+Gh9/7lHeeuMmyawizRICaSgqiS0q9vevk+VHHB+fIJ3l3OYqvp/y5/6dF/jar1p+87e2iWJNW0miJjz7zCe59r3fg1je5Jf+t69w8/orjCY7SGFpBT5OeSRzRdBQKFlSpAmEEdv9kpN+SuTP8DttpjNLJSom0wLlh2jPEhc5rRiENVgXLOzXcmyh2Ogt0R8fcNTfYevcoyg/QggLzlFZg5IVnhIoESCFxm8GWOchvRJrc6rCokQTpXMcOdpfou0HDIYJSoRoIYn8kosbAXG7QnoWLT2EMYgqxeZzlGzjBwH5bMhwOqcTtyEzCFfg+Q4tNVpZZBaQzSRlIyWUhqY2ND3H6bygFYScW1nFsxWn+6c0Nztc2DjD2WbI7vGY/bRP7hzT8QmPPXaJxy5c5r1bt+kf3MUISdxsIM2IThSQTOe4MqW30qNSIcmoREhDWVh85YiVzzxp8vWDBCESzLxPIxuipSH1BAUa48Usn+lAVHI6HZEVKc4rEFoiXc1lFErgLVKdp5MR3/zN32YwGON1oWoJtqdD3FyyurLC0fAALS0/+iOfZ14qJhPFnfs3qJgyyWF/PGTa38dzdbJOah033n+bUX/EJ557jj+z3uMf/9z/SGF4SHmSi4KBFXWyj5RyERIgKMtycXN33xGKUl+L6QX8AAAgAElEQVSrF9QlV4NAJSHPinqb1tT0rlyz3NxieXWJd6+/y87e7doX2apFimFtzF/aOnlN+g4jKrTnE3geSZoRejHCJoBE6xoY6NCnLHPKMiOOfKKgS56WWGkxpiKdOqyBMKgz4T0PfE9RUIcGTGdzwtgnr0qiyKPhB6TpnCItKEtFnpV11Q5QqhZCOWvJs5xJNifUPj6atc0eSVpgFXQ7DZLThF67R38yqvmKrvaDbbRb9JZ7jIdjPK25uHWeKi+YT2c1qE5TJGAWn4fnK5wrsbZECok1CwGyX5JnFYe7fc6uXuT+9Vt0zzSYpmAqyWAvob8zYW19he33j2m3Wii/JFcOW5RcuNxmOp/gXIRWDifnICVZkeBEhRf6yDQl8kNM7qM8h3EZOA9Ph8ymOZ7fpOFbsmKC0hKXphzdK5jNLVkypdVcprcSsn0rJWo4vKBEeRJHTBAGVEmFo8DZED+UIOdMpxrhJwwOp4jQY+uiY3Q8ZXDX8sjWI7hywq0bb1OUgvGkjo51RiCswuYlo/SQyaiP8kIm/fvc8yM6S2eoJiOsNCjpEGWJRGBEic0gjDSJqQh0ydHwmNsvH9HoCFwu8UWELyOS2S6D/QNaYbeeoAU+wrNUpUJHEUZKCivQgU8YhngL3YryPJTwHgoVpZIoWVdYcRqrFZ6sEE4jlcRIqEyKqHK6vSXOnTHcuPMujdVlhuN75JN1pIDV9Rbl3MfKDoHXZOfgDlG0RKPRZjbZpVAjOq01NMtkRVlPunxFq9NAZB0SU4u4nKxqkZ5uokQT08g4GUpkZZHC/8Pw4x/b+K5AVTiFqCqUtDS7MdksRSiNFK52SNIRQhlKCqxUCOPwhcHmBdJ3lImi96Tj4qOavdvQ6JTIvIUxU4wLweULz1QPnML3fHJbPJzF+yKoLbJkQZVVkHmMBsdc2rqKn8cM9j3On38GUXmMT8bMxxW7O4cUWYUVCcYIpPKQov7C5HkOokR7jqqCSlTEXoCoPuBiWFuzna21BFGEtHW6iRRgHKyEPv/WlQ0+87++xP1ZXdnbnqSA4O4krZNahGN7kvIPru8Q61oB+z989mNc7sT8+d/8Nm+eTrjSjnFAy9f8jVdu8Gv3jjnbjPjnX3oe4xyvHH9ggfVnHz3Hf/Hy+/zqvWMc7iGIdQ4sjke7DUZ5yW/c7xNpRdvTvNYfc6EV0/QUubEsRx43R7Xl0APM/en1Lv/sS8/T8uuvwX//5l1mhcEBqTH4Sj6scD+x1GQ59Hh7MH34ui60YpQQi3a9w1f1ZEAKwb1pSmEsvpJcbMdsxMF38GxXIo8744TUWNQCcH94tH2NcY6/89mPPawUbzZCfuGHn+c/eeYyP/PGnQ81/z8Y5xohJ1nBl7ZWaS/eFws6AkDX1/z6/WP+69dukVWGH9xa43/+gWf41P9J3ZtHW3bd9Z2fvfeZ7/jmmlWDVFVSSVbJkiwsDxjHNKYZHGjDioGGBR3CaoYmGAOdJmmGkBCgAYNJB5pucAwBm8FA8EqC28aWY9myRpekklRSDa/qVb336k13vmfYU/9xXpUkbJykk2669x813LfuPcM7d+/v/v2+wx89XHN2PVjnMXrK5z//p8TdNusywBiNwJLMtokWOpheDtvb2NCxur3Kn3zko8wtzvPU2RVm987jnAIVUxV99h6YY25uHlfW/pfdrMlcE8qd53hp/RG+4o0nOfdCh6qEN/ztB3j+2WuoPYcoo0We+ujDPPHJP0OX2zTjkDIvGE8NMnLEQYCxMB5qhEsIG002BhNW+wW6GhG2QprtNmvDKdOqNkKf786ShpKJmSJzg4igDCSRFiSiIG4mdNotCCbgFO04YsdWCOtJRUVlFdK1iKRACk1mJLkpCZ2i0hIpQkIbYUvIWh0C1WIwmGJyRyOcJW06pCiYuhKHYTLRJJHGewlSYX2EL3NaYUrQaKMriSoaeHJkZmsz8UFE6SvGYQ/tBM3dDPViJ8JPBYNBwbjwjIoW+5OMfOccjy8/xIkD+2gvNOlrXfOSVc7M7H6WDt7J5s4217Yf5eKlFWbnO7QX5tBVzuWNVfbPLNHuzHBlc52xzjEup7QF7ayNEiE20KStJl7N0N8Z4l2MSwQbo3WW9h1jrjVL79omadQhjpvs7IwZre/QaS+yZ3HI9WtraOOJA08hag/TZqiYjFZ46i/XWDh8hMOvOYm0nn5/nX4UMdOqKArHtc2SZraEtDv01p7HVZrhANJkQrOTkZqCvJqyOZiyMDfLnXceIkkiIjXP8dOneerhxwlVQCU0TlqkB6hTA4MAAiV2OajcTKW6wVmFWtLovEFIUTs3+GA3WKCs6QEqxHmFClrMzuxjMhywfnWdYmIReBwaL0VtX1ZZVATdbpPIttFRSRZllBZK08M5iBsxwkoQirRTEcmAQnq2dgYoKcmSCJzGakEQtVBhRVlOKIvxbgXdEQZNlE9RiSVJQqx1VBX01nfodJtEQUjpNLqoHS+98Rhb+//m7Kb7lZKytHhnaicHlbC/O8O58y+SNWNuP3qIaT5mp98jSptIb8Eo2vE8Bw4u8tj24wilWL6wTKsZ4EqBkR4rJEJb5pfa9AaTeo4zjkyk6CrHKkuYgnQdnBnSXUhIFgzzB2sKnbe1qfNoWpAXktGkwkUwdRXdMKMROiYDSz6VOJNRuBF75vfSnZcMB5Z0LmY6NlgjwSuUj6jKkmYzoCgSnPBYJ4mjlCBUdJdgp+cIGwY3LZn0RriigXOGRlIR+gbzS9vkk4h2FuFUxLSsiJoGpzO0z7FO4+ngZUl7JsM6hdAx+WTCzjWL8BmNmZRqavEeTj9wnEcee4xGOlNrS4SnFAava7/daupQLcOoXEdqxcxcCma6u5YFgMVoDaEAQqzLUVbQaC2xr9Ph3z75+8wtzmG1pT+a8po7T3OwrShEjzTM8SVUpqrFfQiyoMOzz71AUVSgbrhhOMDXQk8ZoK3Fuw6oSc0/doZQdKlEQeAjHAYjAGHAxRCO+dq/cycTNyFrTNi4VocHzd8l8bKiGVlevDYiDzZoRi2KMmYyKhlvj+jnMVZssdNPCItZ9h7ax3NnnsbYCGuXubU5ZH+rYKeXICYV+nqEXJBUYkK6ndIOOqz7HjL8/wtQtTUYOnryBBuDPsmeOczOkLLfoxXGBFmTiRkRNpq7HnCC6WCLIAgxRuP8FG9jbnuNYOWliiiR6LzC+YiqdLSaEUZrvLNY/3KL6cbwxqJECAjyvGB+doEjx49jdMVr730Ng62rnD9zntyM8UrSae9n5eoWwmt8UXu0Gi0Iw9p41Ps6IefGMTIVgd1Np3gFb7imICgSkeKlxBh7ExEdbNYk1CujMZF6tZzvzftmeffdx7it2yCSEiUEb/vX27vgdi/f8tEnuTKuQe25fj0JLY9yTs408cDF4ZQPnFvhXbftf9l7Dfi9F6/x0mBCaWv+1is5nnXlMWBYGTw1B7TnNY2wBsitMKAbC65PK/Rf4Zw8cr3PLR/4ON045Adfc4QzW8Ob4O+GOYGgBue/85X38L6nl7k4nN58vR0FXBnl/M/3H+frDi8xl0RcmxT8/U+fZV8W80Onj3JLK6Mwlk+v7fD9n3qWye6m4A++6l7+5QtXedO+WV670OF/+NSzXNj97JpVLFhMI77iwDynP/gQQ20wk5zfe/Ea33n7Qd73zCW0e7VgrRMFRFIycIZ//MAJfuyzz/Ohr7qXQAqSQFIaxx9cWHvV+a+Mr/Bdtx/kgaUOf3axQGvN6rVVvPc88omPvWJBfhkUt+dmMZWmGI+Ym1tgYC7z0PpFHvvsn9LbGVJUFZPxoG4BqohJb8y1KxtEaYrJJ+yodS5cWMb9pcJamGl8hvFoTFFYtneGZNMRZz/2ER76hgf5duCX798D7Pki39D/+PHJrSHf/fhlEm/xaUC1Y3ByjJQxyewsUWlJvUA7xTRvkQVT2o2QuWZMO5IEWYIxjiIPiYMQI3JQcW0U39NkacTe2SOkcg5hLaaaYXZxD52oxU7veVSrS28YsNa7ShlJ7th/mtnGHFPbJ4xTgnAFqQRRLKm8JtcFaSdgdXWDi+srxEFRG7aHDUS1RlhCo9NFWk2+06e0Iw7M3MJSax6/EREXnk67gQot8yZmb9BkdfMqz54dcKC7RNUAm8VYFfP4Y49w5folDi3u49ShA1zpvcjq1WeJbETgHLYYkHZbWNGjUhWdlkTm9eIYVQ1m5vdgI0tLb3D6tad48XKXtWvL7OsmKCMY5jvsTK8yrUqurJxnqdWi2w5oz83y5q/8arauLPOZz30K5wVUBuUqZFq34ISzbF16iWJ7i4VbDpG0AygKUpWShDMMt9e5srJFFmfIICV0julgk2t5j/m0we37GhxYmmUwtbx07jLrVwrm98xz58mD3HL8JBefX2Z7sEWkQFiHV3WKoLrZorzhl+pfoSOo5xbn6mS/G9oCu+tDXesRFB6Lp0BIxWR6nY9+9MM1QKCm2Wgt64hWoNmd4fChWRppTtJNuHj+OpMNDQ2FairmWosU4xEyDikqjdZ9QrtAGKVMhjnOearKMJ2UBEqgZIKuKpIk5JZjp1i5coW8GGGtxMcFRTWmLCqSsEEcJkgl6bQWSOOASo9ARSAVWpcEoWDv/gMImbBydRXlJHlvQhZKrHagPQmKQ/N7oNK0Oind5gxr61dZmOlgVERRjjBOM5xsc+bsMs5WuMIjSekPR7TSDnGqKMoJSdRiafEw08l5wrIk12DjXaqC8AgjcXJMnHjMOGRrRdNpNFleGTLYzDl+bA9hBq4YMBkIFuc7jEYDxv26elbmBquHgEQbRab2sbRoWL70NLOzbXAF07GAKsIog7UB/Z5BiRZB4pCBQ+saoF+56EiSeVyUYEyBVA4rHE4UEJZM3BQiidIxlZvQ3y6I5AxKGbQ2eB3jpWU6KnEypb9tEdJQTSWShGIYYIwlikK2rw9pd1qsXLlCp6tIUsl0VACGdqfBZJxjAkuWKRwaPaqI0tp7e7KxU9M8hEUoh4oEVrg6XUzUARxp2qSohkiZ02zVPNEoEzRmWnhfoVyMIqWwBqEUCoEXGl1OCaSn0agLa25X/GudI1AJ3o+JVIZlghcGbxWQ4oMBkQpx1hDICpcblGxh0XQXlugNNzHDmMMnFll+YZ1pH0w1IGsknHtxE70lCbpjxExKpiTTUY+ry+dJVJsrz/TpNLchapMGFVfOPEOvl6ONpHN8kRN3HsJV17E+Qkwl6TQjTRVBFNKrKtIgIrDqi64lfxPjSwLV3nCMlBBv7eCVoipL8qJAhAFa1NnJcdbYNc6H0Xhcx9/NhhgscRJSlB7rNTKAUhuixGBLjbERUgYo5ai0vUkwB27aSmlf76CQdXV19do67/zmb+feL3uA+x68n44XjItNcn2dM888zS/8/G9QFRMioYkjh6dOM1FK3RQF3ADDSimMNYRZg6wRwVbdzm5IyY/cc4x3HN7LbBKyNq348UdXWMpCHlvTBLuCg9ctdnnkep8b0qDjnQbv/1v38GtPX+LHH3mBlXHOO2/dC8ChVk0NuDLO6wna7VZEgTfsneUn7z/OvkZCpGpwO6w04W4rHeB6XqCdw7yiMnhjSAGltXSigPQVBr2tsP7VjrVlfzMhVYo9Wa34UlKQBZ40UJwfTBhWmg9fXOMjX/M6LnxkwtM7I9TugRbTmD9++3184uoWP/XYizf5pXL357/4hjvY04h5x795jOVRztF2RiOQNDsZP/fEeS6Pc/Y3En729bfznnuO8VOPvXjzHL/j5AHe9dEneXp7RKIkJ2de9mF1eE50m/RLzaVRTiwlCsFL/QmHWimdKKBfmpt8XSUE+5sJFwdT/vmb7+I3zl5mfbeCK77grtXXMKgMX3Vwnrkk5LmdV/vbnp5v09OWq+NylwP7Mih2ztfeqAjG0wmTyiCDkME4x1UWFdbUlWYckMQK6x2lLupovSCkKPJaTLhrzF4FEQiw3tHv91gb1tX0G+K9/2JDOrav7FBph9q9K9pDcWmnzs72ntWz6yDq/OnPPfRk7ZHpBdqCxFM/YjWX0BhV/1x68AFWA0iSNMVYjQokZVWRl1Ocq3nBt7/pLgabU9bvcJy87TaUUSzMKQpdMNU52mniTHFpfQXbH7E+6LE5GBEGgkTFmHwD8gIlFM4ltELJsZN7MWPHzk6P1gP3svfO28iGYw4sLbLQnqd66lHOX3iewsYEScxaNWY734KRZ227REuNd47WwTvoNlI2t1fpxp4gDkgjR5zklL6q04pySWdmhqDyXN2cYkJDSyYI6entbCPldYJqSpeCdhpTVuusnN9h/cIFGu0WraxCmXUqneFGY+bmD9M4tJeXlpeYAKqq8JOC3iRHCY01AmTCcDhicOYMswtdDj1whLlj+9i3eA9rV6/yzFMfxoSKqN3GiIDjB/fw9q94M8cOH2O4+RxX1pdZqBRvf+uD7A8E584/z3CwzdzSPGGmENsKFaUYl9fdJ2nwzuGdwrhXg9SXh+CGl6rYfZqEdEh5Q0wpak9hkdQTutJoU+FdhERhtMFYW/Mbg4CF2T185dsfYGbxCs+fXePk4eMsX9ngmRe2meQpVkgmE0GQQbPbxBmJLgRjPcZ7Q6OZkQRNbGXZ2d7CoFEK8rHnyoWrCJEgdF0xHg0rjK6N4o0uKe0UGcDMfBttPXES1hGt1rE4u5dAKu49dQ9pmvCkh057hvOloLczIGwIpAyIRIDQln1zC2hboKc5kQrJpyVBJyDJMmzoEZFEVgEUOXGgOHpiia1+QjNIme2mbHuYTj3KC8op6ElI0CwoCo8QKSL2lFVR+1mrmHxnC394PzOzgokOObD/ENvXRrSyBideu8BouxYs67KiLCxWGKJUUBlNFDUJ4iY7kylXP7PJ9mZIkmVEWYbzIybGMM0N3gfUmwu726GUGF2hlMDahEF/Stzs4KzAmgCV1l2BIJ4lDBJWLy/T7dQhPmnDE/uKYhDgvAJRR7166zBao41HhiCkptGKqSYC5yxVlTOdrnPw4P3oXGImXYwMCbKU6aBismEJpUQ0LFVV4H2Cq1IKBlzt9UhlQhKH5M4hZU0XEUrWWhVSrNE0dv1Xs6yDVCEQEacR7bku46sX6w5ApXFUlLqiGQVYoRjn09qHOKyTGY0xuN1Y30pbAglBUHcfvA9QMmNcjTFSIG2Ak45YpUg5QrgCEQT0tqc8+7mKYuw49/wamYFmKRlvNZi75x5uvysj0xUjvUxlPI3sCEIkHDi6Q5IqAr9IHNVK/yAL2HvrPoRKaHUTLn3qs6zuaITyVK4kjWcIy4CqzIm7gknRYzKsaGTd/7Lrz3/G+JJANZ3tEhpHf6NPICTT3hjlPVEag7cIKQmlIlIxk2JCqgLSrF23xKjTIArtWNiTcuRUzqVz0G0KKi3w0mMdCCURpi6T1wrSusfunUdZi5B1xVMKhVOWf/Wh93P3m7+cf/PQGbbGisRXVJtDnvrUWa5f3CCKDM5HFL4iiqKbxrpit2JbCzJqe5UQRxqG6OmEGzFP733jKQ40Gvy3D11keVhyKAsRQpAGksvjihcHBX9+6Tr/8P7j/OhnnufJrTFHWimHWhmRlFweFTy1PeHkTJPvPHmIT17b5sqoBsGHmgkb0/JmdS6Sgt9522n+t7NX+KXPX2CoLX/vjkP80Omj7GskrIxrMnMzrEHZF8ItiKXkyiinE4csZTFr05K9WcxtnQaXR3XE7A1hFtQA7XAro7SOtWl9Xs7XjgChkhxupzyzMyILFbNxyPv/1mn+7eUN/sEjL9w85q4LFt0o5BuP7eXL/ujTXBnlRFKwNim4rdvgoytbuy4Fns284sMX13jr/nl+9omXr+JfvnCVF/uTXQXmq0dpHc0wYKTr65ayPsedXS/bVCnG0mJs/c40kIRS8p23H+T22SbvPXMRtYumb1IA/sqYTyJ+4Q138P7nr96sFAM3QfqJdkoiBWt5RaIUv/TgSd6yf47KeT50aZOf/NwLaKNxxiClIksVKnAEMkABDx5d4j0n9nNHq1k7K/RHfNenz6IDTywkaVIv8t96bA//6K5b+Wefv8gHrl7jBoW9F9eJQEUlqAqJJCBQEkSIQ2G0x1lD3Q7weGz9XAUhgRIUWlNKze1ZDTBeescbuedPHua6MkShQona8ivXFR5JHErCQCFFiNaaqtDEUUSkBFoXeKUIgrD2LASGoyHtVpsoDOkPd50XpCIMawBbak2ZF5TlFO0NeMHqtVV0MeKlS9vsjJ+jGx/g9O2vIwwSwiAhUKACQX8yob+9Sl5OaFhFvjlmvbQYISG3LC202H+oQ5IFrGxuUU3XyfMxZVnw+rvexMJcl0Ym8WG9uL7m5Gni5B7++CMf4tzKCrKVUE5GWLPNtCwRMmKiP4lyoLwhaiu2Jn3G10q6UUArCekNSxZm9nBw7yE2wlWu9yfkbko5FOybPYJp7+WF7R5VscFsYLFjy2r/BdrNLjIK2B5PWZyZEFnFeDNkbf06mVhGyYK45Qg6ATNZg/ZYcfGFKdfHCqks1k/RkUS5mEGvz8c/+TRx8wAPvPFWLm9cZVTldJJ5msazOu0RxccYDDo8uzwmD5tsjyIy0SEKu+jFA7zu1IOsXHiCj33iL7CVpRHWBuuo2pqv9pGWWGNre7hXWFPBDVeAXeDqXp5Xasuq3QAA4W7SAiQBmDpK0lvQtgRviKMApULitElnzwyjaYXcgH3dFvffe5SLG22MbPDUswOm/ho+SglkBzOtwAYEHpLI0QkiApnSbM6zvrlNI2tgyqo296diMBqiSGpKgrWEPkB6iTAWZzxRGNNsZIQiYOf6Nq12k+NHTjDbXeSrv+FdTAvNeGq574HTvOt7A6gKfuLH3sPGw48SiKzmnyqFtgZtDTIM8FbUlV9fF3iWFvdw5x13sD3qs2dhgXtP3MWJA7fz+KMf532//Vs0sw55lTLb7uBUSdQIkUnBa75sD932Et5bnn5slZ01xd5b9lJVmt5Wjztef5o3fP1hPveZTyHDBIslTRP2z7fQJqEse6xd3cBphfAQpSFeBHTbMUjPZHvKzuplinKCchG6rwmbnv5aiRQBhDlhWt+fKhcIEWC0B5fUqZLKAJ40SpCI2knHBwSBIwoM0k2YbG8wvZ6TdFuIIKSdGeJYYUSFtYZQRXX3AEhjQWEsSZrt8p41qJw4nEHQxpQGE0iq0jE3nyFFhmyMKKoxygkqGxMFKSqRlE2BKwKOHz7CysVLVPVuG+dM3WLHE6oAQYSkJE4gzwsqbfHKUZmcycgRyphhXtSibGNBanAeY6nFj2VdELG62p1/A7yoO7kqcEQyxlhfh1fZGFfd8PvJUF7jvaUqFboKSVINXlFMByx0DmDsCLVTEWQCmTQoreHFM08ihUSritaCA6+42uuhiElSz3SsCcMhYaJQIsWqCuEEIlKEqUWvb+HjBiJ2hCJGmwIbp4RxTDmact9rH2TjUMXG9sYXXTf/JsaXBKrSQ6BCpnlOoXOyLEUimBYTnPfEIShfGzcPh0NaMx2kVQx3tmmo/USpI59Ab0dz8BhcW05xLkYIT6U1ReHqqMNd5Wmd7lCfkveeEk8YhXhvsYUmFYrxtTX+6Q+/h0GumA56CFEhKAmDkjCEqjJIAUkcY6wlDEOMridb527YQ9TXJ5RgPNxGhXVWUjcK+caje3nbR57lws4EIRSXJzVVYHk0rR9S4Ps+dZafft1xfudtp2ntCqN++cxFfurRF/nx+27l5x48yVObQ/7oQi162swr/vTiOu+551be/emzXM8rjrQzEiWJlWSsDZXznOg2+O5ThzCurkqcmm0RSsHUWDbzkkhJtHUc2KUfiN0/tgvNo9d7/MqbTvFPHj9PEki+6bZ9/PbzKwDoXZP8lykDnnce28uHzl9jdVIyE4d8752H0dbx5OaARqi4f7HLr7zxFL//0io/8/hLr3oubjAIWrv0gsvDKaGURErU5y4E9y92+e/vuoUT3SaJkkgh2MjLOjt89/5f2QXigRRfECKgnWOnqOhEIYEUhEKymMXYXcA9qgyvWDuZaMvquOD77zrMuz76JC/0x5yabQGwU9bgthOH5MZSWMfeLObPv+Z+ntgY8JOPnXvVscPdDz43nDLVjsONmJ/5spMMK8OdH/z3HFya5wOvv5WtU4f49QubyEBSVRXORchAoQLF0Tb87/ce55fPrfD9q89QVpqTnRbWVIRB3e5MEsXeKOA7ju7nud64rkS94jzyvFm/4iEKPc6Luq5hPc5UCEytLt0FCM7XPpbeuZvP0CupNFBvMorSUeaW+dTxiw/czlv2zlBaz+9fWOdXXriG8wXGONIsJQgzVBjiCse3H17iW2+ZYyaq/X2f7k1436Ue16wiiStOpYrfe+NxptbtGlt7nh9M+NqPPYkSNd+5GQl6lWW7l7PdX2WhNebu43fTzjpkcUpVlAQyYjgekleOII7JfMZS6zDlwpie3mR1uc9oUnJ1bRujJFhbV2qShFFvzJXVNbJmg0g1iKWikSlKZ3hx+QLr2+tU+aQ2GBcOrx3Sx4hGSiGgmkyIVEzYd2ysjxgPc8LFFguLXY4dWEISsDbYwoaQzCjK4YDZdJ55KVlf2WJ8fZtJpyCM4TUdz95bGmw5zbWdHpO+xnUk803PlavLbE8sR3xE7Ka0mhEu9oRCcOJoxNF2xMc/n7M5VMRRQD6q8LpCBIpi7PnYRz7HpWeu056riFuK4WibdjKLLAvOv/QUl188Q5o0WLrjKGk7Ie22uDi4wMrZJ4niGR68/Rhf8/q38uGrf8iO2Sbd7cBUEpxxKCdqO6VdP0gh6s3iq/ipvm6bQv39rkmqDnaFqkEg6s2TdBgTgBM4X4GwSJGiC4HF0VhsovYuoq3hlsUIa2Yoq4CZ7hKN9pjCrpPKmIYTLEYpw7FBKEjCiJPHb8VVBu8iXJTQy6e0IoGy0N8e1D6kIkJXJVJ68BFOaIwrCO54GSoAACAASURBVGSEchJnDN1WB20rTOmYDgqqRsHV65f4nV/6NVABeRnwZ+//faIoxLmSlavnCOKgVvoHdeW5DpSxGF0RB0kNViJJ6SyjnT7L51bRCCa9LYZrL/D8wg698QUO3b1I1XdMd3JmmjGhDJlp7ePBBwNaR0cI5UgTxbjoUYwLOtFeVgcrGAcXX7rExV86z9yRDu0uuHJKMtdlc2ioKsN4kFDkA5opSGlQQmFNwGirQgWSKJPg2uzbf5RK54RRk72HobDLjPojDhxs0tsoGA6HBKJLGIFXFhEqhHOkqdgNOREUxQRrc1zhiaIQYQvQIXZqiJOSQCSYMiJ3EmcEc3v3M9kxeFtXd0ejOj0yiRsEKmQyrAhDj3QNnJUEQUicZkSRptFSuxG7BkNANtcicCU6t+Q7BWEBUTtl6dbT3HXrMS69+BIylHVkrncE6mW/YGcNWnsgqgWNUURVOYJAISX4oqAY5lhXEakmVQlR2MDYCuk8tjJESmKtx1iNDEIQoi5gCIsMU6Ty7DvSYOXFnCAKESUoUeJ8gHQCmUkas/spih2q0ZQ4arDV72GNpxmHGF8RKUEUago7QlQpsbSYtZgoFoRFiXdjTCGJnSSwE/TQYWOFtWVNF3GSibR0Gi0mqmJYFAgpSaRCiJiq0oSV4vqVbc6t7vCqBfZveHxJoFqWJZW2RElMkqW4vMQLgRaC0HsKVTvdWq0JQkmeTwiDiFbawEuHELW32Moly12nJbffo3n6YUsYe5wIKQtNkqgvuB83qqBpFNc+biIgCA3WeuKswcVLL+AjSRRYsjDB2ynee4zOUDJChgZj9W6Mc5089UpvQCFBSI8OJMKBNRqI2NOoW+PnNgY4wDtNEAYIJXnTnhbndgpiJZEC/o/nL/Mbzy3zQq9u55+Yybg+3eafP3tl9yLqiTxWtT/n933qGf7Rfcf53a+8h24csjLO+fv//iw//PBz/Ph9t/Hu00d5YnPAH55f49tOHODSqAZxlfP0y9rzM5QQKcnWrp+qEDUIvZ6XfNfHz/BzD97On3z1fZTO8bvnrvIrZy4R7dIBfumNd3CwmfLOf/cE5wdTbutmfPwdr6cdBYy15cnNAd/zyaeZTSJmkoh33baPPY2E7z51iO8+dQi5C6He/fBZPvjSKh743EZtEvuW/XNcnRQ3Vf/XJwV/8XUP8C+eXeY9Dz/HdqH5e3cc4p237rtpuA3crPJa/1frqfX4xLVtfjEKeOv+OVanJdt5xX0LHS6Ppgy04ZZWihKCC8MpHjg502RPFvOn//X9QE0HAPjgf3UvHzh3lV99+hJ72hkHmwnvfeMpPnFtm3/4uXOM9cvCsQCBeQUJeKfUvGa2yZfvm+OeP/w0Q225Mqn4X19Y5QdO7uc3lzdppCk+TnFAWZRUwLvvPsInrw/5wMU+cdRBKMNzucOLCHYVx1Y7fuK+k/zqi8v8N/tqmoh3LwP2aaF2OVQQOY3yntILjN/14fMBog5cqzHCbpVAqbryWlmLUqCEu3mPlQqQ1JZAv/6Guxlrw91//igLUci/evMpCqn4zYtbhB7a3RZSCJQKkTLg4Z2CTw03GFhHMZnwjUsN3nf3Pt7x2Cre136WxnlO/eETEDgKX2JdHaRgrMNbyCtPI8so8oJxuU0wnzG3MIsuBcJXgK65ml5wONnL5mSb69UmW3qHfFOhWpKsm9BRIQu+w84opzuXEqcCL1qkzZDLOxeYnjcsLR1E7FznM09/imcuPM9oMEEKTWlzRoM+3YVZpjaknE4Jq4J+UbdUZzoNtIKqGJMqgadkdXNEXAi6c01SamVumgaYPGCYT7iuRhw8aJjpjPns5SERTRZmNKka0ltzaC9Y2i9IOymulZIuDgm3++hqTKBKmirBBxG6qqASHNifcd9Q8onPWgalI41CnHBUzhCmglGxypmz60SJIkpDbjnQZX6uzfLKgHw6pBiVFMGYotDEcx1Ov+F+jt1yK8OdJ1i9dobPVut8xzu/lZ9+7X387M/+U559+hmiJAT0bsQkdRHhprL/lVXVl7dUQtxItqr/XwPZ+ruDF1hXOwoIHF5Y8AqjA5wtWdzf4JbbFun7hGHu2bd3kTe+5U6W1y8TqwZFVWKDjGbzIE2p2ddJObDQ4rKqSFozzLZSdCHRLqAUkv37DnAEeOyJz3HX6Xs4fFRy7fxlzr90niyNUEJinMA4iVIx1gscFaGSBGG9yZa7Bu6xCslSiZMDpLU0wgC0xlUSIxUJDi0U2LrAUlWafFpQ6rKOBcaS51MEmgYRctLHyA0QlonJ2SnPUp3ei9ovWGq02XxhRNaIacaS5v4WS4czTLDAM5ev0pnJ6F28xqQUfOU3PUg37XLp4jbnn5twdaVPFnWR+zpU2xPKSmGDMb0VR9iIme6MyIImvnRI1cRqh3UaUXksEjMCZ4eUGw6j+sRxm4WFBcx0TBbFOFPPV0kWIFC1Z20zoSgLhNDIQLK4JyHOLGYakXRbRGGDhZk9WGORYZvFg7cQBZbRpI45VbIWf841l9gMrzHVFV6AdlOSoE57FN5hzQRrAyQZaTvES838UsagN0bJBlVuiZMGSEcQgJn2KCrF/a97A7cdO8YDb34rq6Oc5ac+g7WWpBkxnU7r5DGrsMYTxx7nS0SQEmddpmaLqFUQpo4wsGxtjZDSEcqEqdvEVwWCAGsKRGDwVmK1QfhaeOgJMa4uaMVpQiQihqVm4ZiCYFob/MsQ5BhdCIJgSiNLiBpt9h3bx9lnN5HCEMgQg0MzJmrtRRURxJqpFiSySfdgQN6zFKUlVhlOFmjvsM7hkFQOfKBITEgaJ3hlCVSM9ZowkChhUYVnYkraLsWMS0TmyKKMtUlFWVU0g+yLrst/E+M/AFRzYq8Q2uKwTEdj4naTvUcOkY8nJHNNQiMYr+5QGE0j7VBoQ9zIKHxJPBU4BUdOCFTouPVOxaVnLZOdGGt1HWuKxPOyb5/bjSEDMN4QBEkttpIelKLyhiAEJ0uUTtDaEKgUi0VEps7VNcFu1JzG74JVY8zuVTmE8IRhgDMQBikCDQbWJ3UJ/8R8i/OjEmsM1hqyMOE333Kcuz94nZ2yxHvPXBIyn4ZfNHHphl3LKz2txtryP372BX7ysXMU5mXT+8c2Bnx+a8hzO6ObnNSff+rCzfe95oMP3fy3dh7NK4EeGOcxeFYmBd/yfz71Bb/DGw4BP/Tp5171+rsffu5mnOsXg4nf8fEz8PEzX+QnL4+N3Urx9911hB96+Fle7E852s6IlSSUgrM7Y57v1ZXNrz+yB0ndvr8wmGJfcdC/zlf40mjKJ65u8W0nDvDuT58lCwN+8O6j/PbzKwjgel6S716DAB693uOuDz60S6uQVM7x3Ld8Bd/7qWd4bKPPVlHRjULe+8Y7+dD5ulLsXvE78r4GzTcdCHYV2Ld1G4wqw/KoFsI56mriLa2UTAisMXV1SdZJMpKQNyx2+Ph6n99/w23c2kq4Mqn4tYtbfGyygbMWKS3vPLzEVBv+4to637hvz+7m5mVVX6bgR+5e5O0HO8zGirVJxY999jKLWcQP3LWXQ82IqXF8dKXPTzy6wnQ3zeahrz/Fh5d73D+fcddcxi8+dZ6/vLYNgKFBENYUj6/YN8fr/uxz9AuD9YrfPLfG996+n59/6hJhUCcNpWlG6TXTMud835BlGYGKgDqdbSkJ6aQRRVXe3HDKyKOtwRmzmyNfq8Ytno31AcJ4WnMBR2+dR1jDpStX6DQ6SCEw2qHQOFPxwsY2o3JMpx3gnWcyNNhBTiOVzO89zHwnZiS2kNEB9i0cw7ZyStPnpedXeObsU3hzmYWDKaa7xfyeilYTpNxX27CoNbyy4KaUZR3vivVoVzKZKtKgQapSZCxJGt3aN9pXrFy9hhtrshTiJEKYlGHhyINtZjsNSt2p5xM0wm2QFUOSPCOLb2FYTjmzMmCz1PQ2c8IgQ0mHyRVJkDEpLWngaTctxjlOnoxZmJ3y9NmclX5GP8vwVUE+mhKr2g9aW8loUFL5CdoXiJk9vOUb7uWOpb2YcshwvM2V5WXOfOYzrI3G7FlYYDGK6fcmPPTkZb757W/j/te/iaeffQoRSAIt0EbghUDctK36Qi/VG8WEWt1cT3hSScKw9ot03u12cWpOoAoVk3FBVUmyRpOjJ/bQWayz1dteUNkxl14c87ujkuv9HsP1ZwjTJi9cG3B4323ctqfNLW3F8gtXOHHkNVgGpDJiJzc8dfEZtsuCxZ0tEhGxNLOHfUsHsFISq4Qjtx3h8088zs7mNnGcgLEIGeCsQcqYIIgIggynLUHcII5ivJc00gZxlCJ307CcK7G+hDBA97YprCaVAucdMghotNswEQSBQCCZm51nYjXtrM1sO6URdwmkp5yMiLsZ3ZMxj559iXziOLR/geRESJwYCtfjiZVPI2RCnKYMt/skSFa3NRfbLxIpSW99yrCnufOuA4xH27z0xCUUU0RzBhFrzGiEHYRIUxFHMbHKQAcgDM5CFCR4p/A+R0aAnRAQEgjHtF8Si3nKqmB9xZGlTUbjHs1WQNZsMcnHOC9pNBtMxyOsjYhzweLcHPc8+AbCsE0oPdujy6jQcfyeA+S9ELeyjjYjtFW1Y0jc3I0Z7VJpSWs2odWJeOm5HaK5FkHoySeeJKtIWpAXCa2ZhOXlFYpiyPzcLAGSPJ8STDMif4jv+YHv5vUP3s0zz57lked7NNuC5YsrKFUD7UCFVEWOChSBFBgrUJGk2W1ydeMqlNc5cDzFySGBUpyauYfuUpMXPzuqW/pS4V1J3BCgY5QM6mLYjblPUtOxdr8f1lgacwlzewX9FUmsPJPSMPWW9swMh44EjEY5m6uWnWt9mEoS2cb4gsWFRZLZjCRVbDw7JrJdgsTxv9y5lwcafyVZ6D9zPNHP+Z+eu4TO9tEbVkg01oz+w2/8f2l8adV/VfvcTfMSrSsaWYMjR44w1AVRw+OsZTrMyfMcKSXd2Rkm0wJbabIgIs8tV9cEx+8IWHnRIQNNuxsx7TuE9HUUqbVoo3d34sGrW5Xeo4SlMhUIQZwo8qpE+YBQtzFqQBDU6joVSIy1KARKAXWIJlVV+3NWVW17dQNIKiUIRITxmtxULJLQrzT/+tImP/PAYX7kkcusjHL2pwFJIImkxLi6EhYryVwS1AKbL1INvAFSxc3SQv23x+OcuCmUEtTAbSMv/9qq4v/Xx/d96hl+/N7b+JOvvv9VleJ3f/o5fvp1J/jVN53iyc0hf3B+lW87cQDt6ojYvzq+6NV7+LufeJr3vvEOPv93vpzS1pXi9565hBS1yfgv71aKv+nfPUHpPKuTEinqVKwbt3RYGYaVIZaSHzp9lP3NhO85dYjvOXXo5qF+5DPP8eEL6xxpZlzdpSQIYD4OaIcBY21edV6D3ecqk9Czpn5WvcQ7h7aa2Tjk6w/O8d898hJP7Uz42gNzvO/0Id68cp2rWnOgkfA9tx7knQ89xSR/2W6sqF6+N7/84AEW05Bv+eQVlvtTDib1s91MQn7wM5d5qZ9zqBnyW285xg+f3ss/O3N1F/V7vunoLH/3E+d5tLfF6VZ48zON0ZRac1tjgUFluW5DlKyw3vJ0f8yhZkIzcIytpjAhdlKnAOVVTlkV3NEQvP/1R2nu2q791qUtCieIowhpDIEUPPzV96AEnNkZ8k+ePs8zvQnW1YCnNSsYb1fIIqDseaZVSWkL0sYepEixPiRJUpgMGA436k2sqbltWatimjuidotGs8toqhnpGD1eZ7Q8pDcyDCcDKDVves0JFH0+/dDTjExJe0+T2X0NJmsWU0ZInSCFJg0CTBYQJxG6DBlNAka5o7BT8JogjrBlQFVOyHWP6XYfb0OSpI2toHKWUW+bcDigne4jSRrEyQSlijpwJGthkojexhab0yEYR2wVxubkLqDvZwmFI/AVHRnTjqGVRYxHIdr0WdgX8HV7W2xsSR46M+X8mkapuHbwkBZvHZEwFOMdnj3bI4xTjruI2ROnOXbbfaRZjhmusbl2hUsbPchCovkTXE7WOXv2Uf7xS+dIzA5J2kYZA8LVboE3UqleEQjwSpAK7ApUd2VVQhKFiiCQlGVZd9OkrO0HPVjrkCrk2PFbeMff/lriJOXxzz9Hs9EmikLO7WwwMjnPPrPBpdWLDPJVks5eXJJQjK7RCzy6iNjMCxaDFsIlVGrM0E8J4op2LIljwem77uPYwSP0d7bpTQvmX38nh/ft494vfxO/9ev/gq3VbdIkRtsS6ev2scMhwrohkTQivLVoX1JoT5FPCaMYJyzWCqqqAlkREBGq6a7oBiZVRW4MGl/HhFvwUjDfnUPgKU1IwQRBgLMprVbIxmQbgoDufAtSj0kFM/N7ufD4U0xyTZKFzN62SF402bOQgrhEZR2DQcX84RQblNxyusn6uQmjScGe/bOsXxOISGCJcaXFJ+luWIAnSgN06YmSDkqAVA7jugQiqsG2ipFxyOWNPq05QUPMoaYBUZTSaMyTFyOMFhgcSdamyCcYHaKNpDmfEKQLlJM+pa1oxhUjM0IPxoRhm2JQMppsEMdtCDKst2zuXKfSJVIFiADC1JM0NYt7m0TEVGNJmtYboNacx/SGWDHEWsfi4gKKkGk1rUVZeGaW5jl3uWD52jluv3WWmfYGD3/+E2yvPkcc1Xx/JWIC5dCuqt0qAo+2Eh9VtOY69MuS62uXSJqKSHbYv6C40nsSkfWw4wmmqlg80KIoRxS9NirTaGsQQmCcRVtDmmWESYw1hkBK5g8nbPeG9Ncke+YDptt9smA/B045kqZho5qQLQZsXL9Se6qGTRYXO0zzPhQajGHPqQSsxKoJP/rRJ/9jlub/5HGxdDi3TWlsvQH1EiHE4/+PHOyvGd77+77Y618SqEZhiPQKk2uSrEE802asc3COVpKwtd2DQuNMSWtuFhmFFNMpe1odJCPaM2BMjHCaZgfKQpJ2DKgAJRPKckwYZQiiXcITSAVSBIzHU0DgAhBBhBSyjiMzEiEclinShwgRIpRDKrC6TniR4W5ogAThE5wvajGV1GRpE2umGO3xssRWnpn2HIxq4PGjT17gR+64hQ++7TjdKGB1WvEz567zM8+t8cg3v4lWIHlmkPPo1phvPDjLWz9+FiHgD95yBz/3+DJ/fH6zNqOW4L3gaNtxrl97ikItHjuYhWSBwgN9bbleGFQYofDsTSJCKbiS1+djdUUoXs3h9MDPPnH+Jlj665gkN8DfF4tA7EQhlS13v2D/90HyWFv+wSMv8BOPnkMIcTMi9XPX+3zg3FWSQFJZj/WeX3jqQh2rysuV4kTJm43E69PyCz5/q6j4to99nlhKPP7/ou69wzTL7vrOzwn33jdWru6uzt0TemZ6oiZISBppJCExBJsVCBDex4DsB3sRyGDELgastUHYlolmiSaZaIFYZSEZISSNUBjNjKTJM93Tcborp7fecOMJ+8e5Vd09M5J2MfCw53mqq/oN973vDef8wjfs7Gskg0PXdqX48m/oPDvV4onf+h8IINGSynm+/77HePN9j+28drv6HSuJ856zg3RnW9eMNTnRS4ObWKR30g0pBRNJ0JhLrUMqiZBhxXY4pHMMK8PHl3rcv7KJkop3nniWNx2e5lVz4/zOUxf42XuO82snl5gfQl6ImjgAg9SjCUSvbzo4zqvf/zSnexkCx/mhp9HQLK8GhYI4lixVjv9+ZpPXH57gv5xY3bEjftfpNb642ccJKC+DE4TcqaJNRr80pLnBSUFRlqxlIficaEWMho6qysnLoJ3sRTDOeHC9z8s+8RjjCbx6rMtQNREyQmvN+UHJqz70EMsiIcLwzw5P8e5X3cbdH7qfxVGBQ2CGkOgmlY/YWo9oq5hx36TKcooyo6EUTd1gA0uz3UREEhsrrLd0p2LsWsbGxiafzR7G5DmmqEKlw24iRILAsHtmhvbkIUYpHJi5hVFmOfHESR5ZfRpszPTYXvCexYV1HJZGK2JsTKMiR2ElVSaCI5CXFGlOPlygs9UG7ZmcmqbRVKwvj0iHm0BoC5a+4rHNCyg0hS8Y2zfJY1st0uWSrNB4lZEgGZ/ukicNRoVDNQ3r6QBfeows0cIxK8ZYPxcTRXDToTm6sWG9NyKaSNh3cMAXTi9hjafRiNAKhDdYJxAkoWptK558/GFOPfYkzbiJjGMOH5nl3q+9nZfefphuZ46oMU2rczv9/iprK+t86eEHufAlyWikUbpCeLBW4AUIZCBYPUc60NUuTJ7gZ66ihGZL0e5ELM4bpPBUrgQpkXFMVVna7QnufMkrGd9zEydOniQtJMkoCeoX1qKTmLRd0Jjx7J86Tq+Q+Fgx7gQrF84zTNuM793NZm6RoxEdk9MWlj0TM+TxOEk8xXC1z4neKYb5FmlWIRY3KbcyXCK465Wv41Mf+wCNCHqbFUZLYglJJDEYUgE2aWG2Rmz1U6JJjS0tWEez2UA6SxJFZN6SlSmmqALeVklsbtlKR6TZJq12C1QwidG0qLwlLw35cIQEvFSUyxF2y7C5OkDIEafTAd3xMRbG11hd38JUjik1xXC9YGpPh74pibt7WXlmgbZuUaymzE03GPVTnHbcdHcL6zosLFaoosGolyFkgvOWViyJWhI6num5hGxVM1ivsLpk76HdTIxDUcLS5jpZmTM21qCVDLn47IADu65j7+wYRZlxemFIvxgy1u5QDEeYMg9kJNmko9sU5QYqytkcLbK6PkBHJTopaXclUbuiPatI13L6qxWNuMPyygLOAq6gOyWQkxo9rpkwMFzydCe6jHKLkxWpH2EairXeOhurPSanmxS5Ia8KWlGHifYEW4NNBk89RoeKlb9cpL13ErF7hY2VJSIvcU6HNr9QRFJjrUNEGlmUdOPdHLz6OO31NqOth0mXh6y7iqz3ALrhiJua3ZNTbAxK0v0D1uZH7NqdoNolvbygyCrKXNIcj2lOCibmBEuLKY2kQ+kLhssQK4FsamgbYp0i45T5pYrISZq7DFUh2VqGudkZorgM1uG6hfOCSho2NzeY3nWpHX/aOBq+jZIQRU28kDjrMNZQ5hllUQAeqVTgLvhL97HWklhr8rLkmskw719448u5830PcXaQXqnH+A9gfMVANZnokvfTUO30gHFk/WC7t7B8AYFBe0+sIyrjmV9YojnRRmrFsLS0FRw5VqBEQrNrWVtwXHWtIN1ynD+Z4X3AwbZacfDLJbBJKxNIVUL5gG5yDqVq3T5rUFpjhSOOAqZ0OxBTSqFCORVby1F5N0IRI0UCKqeqhuAFzpW4XOOdYnZXE1sHqiPjeNuXTvO2h0/jnKfZbCOk4FN4/uD0chD1qSuzv3RyGe8lUkju+dhTZFmK0hIdRUgZKgv3vf6lvPTdD7MkfWAy4ulkGwCsd2cRQrBXhoxwW/3FedgfBF9CAOR93VYVdTzveffQM3v4amYF9cVXs23r3x5fZ0Twc7/2S2jVAFExO7Ob/lbOX33ivfzBb/8h3oZMUGvN1tYmjUaDTmeMPM0pTYmzHqUcreYYQmiWF8/zPd/93Xzi45/AWsv8+WcReCSCmWZMJ9IoEbC188OMTqSZakQoEUhhw8oylUSc7acY7zk61mIpLRhUhs3LnLKOTXQ42RvutPYdnv2dBt1I430gSC2mxc69dDledbuODdBUIUA9NtHhmd6I0jmUEDuQiERKjPMc6DQYlJblrNi5nk5upSxkJecGKd1Yc7Db5NlBjitKju/tcn6QMawcmArngsNZmBDgic3hDju6KMsQ+EiFVAqlNK+cm+TmqQ7/+ro5vPeMxZpbp8d45dwk3/KRB3f0es/2+kgv6nNvKUvPK/eO84M3z3HVWCPo9UpYyw1FjV323nNuK8XingeI964kSRROC8ZiRWjIB8zreK2OsDmqMMYhhaQ9PkVWKmwxRJLijSfLLUUl+O2lRZ7+lq/hnz54kbNpSaPIcThmCfvxnpPzfOuhXXzXkb18+HxgkN53zwskzCuPwQrQqH/I4cB4+HnuOPL8h15wpD1IL/v/oZnwszOmgEP/Lzf2AqOWtf3U5hZvOXkSLRVeQGEzOm1FUQ14/FQPQZtms0sUd+iOK0bpiP5wSJqXWJfTiASdTgMVOxqxZ83mbJwfIoWl1Ryjc3icTHkqK4gnxrnhGCTO8vCJdba8IGlJtNL4XCCcCcm5MCAkRbVKVTgefniRxx89QavZJGl26UzNcvzmm7nmuuvYf/gQc9cPmdjzOfrPrBKJCGMMFglSoLzbuR8uT3hFjYuWIsxTVVURJw3iWNekVReqdBbIDR6NF56lhWWsfICV5SWsF0zvanJm4QJRBP2tPhtbOdWoSdmUdNuCYVEy2koxZkjuJZubfVSUEJuMpBExNBmro4LRcIG2Xqa/LFjdyPjVmw7x8l0doA/LC5fO22tu+5uf88vHi3Z/mSf+P0r6HDr4/McOXnPl/2vlvM/lA36s26BIS6wY0GrPsjk/otINVGsCnVZcf8ywtpFx3U0HSHyDzaUNTpzfortrCtFwDMoMOhaXG665ahcz+xpkRYnGMxFbpBgny4e4WNOddfTMReLRLOkgIx+laFfhRgKfOVzlyJ1Fa4NXltJcZDRaphzmCC+Zmm4zynNGbp1K5GgtiBueqSmHdUXAL4sKpxStac1IDdjKSuJ2gotTthZzxucazOwxlLpisGbprTtiLRkNMrwLSjVjY+NU2uIGFrl1DteExa1lomSNySmN8QURCqUF3npcaUEGLL8qPcZr4rbjyfvvp737PDLxyMYkcT4kLwswjqPHd7M5TMnyEp0aCqUZzgwRsaJ1KMb7MQ4cP0TWHFK1ljFFjG5GtDoRqbE02hpRNRjmjt27jzA2PYuXQ+Y6gtXBs1Spx5eWuUNdJmY9sYrppTpIcbkUF6Xs2jNLqykY1ZdFU44TKY23tuZH5JR5gTElWim0UlSXw9J8WBmllCgZLFy11Fzez6xMnHDcSQAAIABJREFUha1hkrPNiN94zR1XvWT35FjhnHvP6cW1H/nMk/PPN0V//njX191+9OsOzk6+/iMPnvj4xfUhwOuP7hn//psO775mvN1UQohTW6Ps7Q89M/9XF9eGX217XzFQJa9QBtKiYGp6mqmZKZY2VkFCa7LLZGeCzfVF0o0ekVUIYbHliDMnTjPZblLZCOsr8CXtrma4Icl6nkg1SeIUvKYoDHEcNCmttWjVIC3K4AolQ8teqeDNa61H6zARJklMkOUR6OgynVS4pPuHxCEoyhQhKkQQ0MCjENIzyiJedM9uXvraW/n4f/44AEdj2LZxrQ/CpfP45QqP27DCnYW2vPR/4EDHEgWj9SvetrayGEgxNWXBORfYs/WCcKlDLnYqf5GOaj/hbUZukCYSIjAUqWW4thUBpFB85tOPIWvJojheYNiDg0dv4ujRqzj1zGm01rW2bFRjhH0NbrcksUD6BmWR0WqOIWNJ6iSVBeMkVgatWgsslg7KEoGgLIL0VWk9G0VF5VztWsWOgoF38NTmECkgqqP0d9SV4slGdMWxMi64fTW0pLDbGN9LIenCKA/nXOwU57cPHR54eK0fKqtK8kLj1NalqGb7vVpK9jaDicEn59f5yTuu4Qf++gnmmhFvvm4ff/DMImVlSJKYJIqJIk1lLA7P755c4P/6mmP83okFHh8WfMPhSQ53Ej61MsA5x53ve4BOtwN16/Rnb97P51e2eOd8IKhdGIbjd2RG8ehKH+egE7eIpOC3XnkV/+GL87zr7AajouJNx2b4347PYW2Jd6HqW3mLs6augl32nX24104MK8ZjzTXjLc4MB0gBt8yMc2FUUEpFHEG72WKs08b1AlkxiZqBMOAFg34OPrDU9yjHaWN4ttd/3nHNrWM5LTjRC3PR37o27N/TuO/1L33Bx18xOU4+csQNTVnkDGzB1PRh2p0GWb5EMeyzubRJu92l3YoY9jKstXjpsM4ylILKViRKkSeOtJVRZJayHPHgiZyV/iTC5fhC0Fs0dLznjuubHD0wzVOnh5w/l7FeFqi2pCl0YPd6cMJjRC2/oyXG5IzynLwYsLp6nnOnHiduT7Pv4NXs3h2T5xIZCZzwKK1xxlFVVVjg6irM5Yz/MLYTIR+Ii16Q54Fdb63bBjsh8Wjl6I53efFLbmH3vjFOn1ZspBk6SWm2PUXliSLDxBQUkSBOcrxPEYXERA2yTkTGJjYf0NYtNqpVTDxJLx8yyAuIEzItGJvV7Op4fuKTj/69XBd/3+OpIuXal+2loec4/fQWuw506IxFjDcniP2Q9vUz9N0mY+1pyuUWR45M0Jlb5PxKio8t1x49gvB5SBJ2d1FbJU89dQ6VdCkKg1QVDkVvtWLv7jZVbCjjeYgqxlUMVYt2Z4LFlTXmZtuMN8YY5COiZkSaFfR6G8RRF932pFXJaOCChKCQkIDTFWpS01IxmysVlRFoKdmcL0PQN1GxubkFok1jKmPqQEJVwLCfMDGpcKVjemKWvBxhTIFB0WgmICqiues5+vKvYbK5xbt/47e48bbXsjp4EKnBVY5IiNpqIqxv1loqGRFHjsEw5eBVCf1UUiwH9Q9blAyyipljE/QGF1k6mzIzN4sVlnwzY6Ab+ApiEZOMw+nzJxGTbSb2KtLNjDhus7IkmZzrIsZ6VL2IqT0TtNvT5EayegHanYiyWiUd9hhrz+GSLeJuQW9hiB5vszGYp9No0BCTFMOMpQsb7K5XqDKrqMjRCspU4pzBmVDs8d7haq1v7+yOY+QlDo3DOpANhxJyB3ooqBNS7/nNVxzHOccN7/zko7PNWL373juu/Yk7rrFvf+iZpa90jX7nNXsnJhP9vNhyMonUbz/57MpHn10d9Etjf+Dmw7N/9Nrbrnnp//3px88OsuqFtrU9vmKgmveHqCjgVtozE6S2YGJqHFOWeGPJq5zKWCandpE6SyPpULmKViK59qpZ3NQprIWyiFDK0d+oUJHn0DHH4oUIhMRjydKSZquFIPhcIwRxHOQehAgBrJS69jwOYv0hFAvVIO8FQgQ5CQHgQ9BnrSOKNFJpnAsSDbpV4X3MxpJk7jDc+vJZtrI1cuFp+L/9Wvd/e+rClfjGehQSxiZCRXi7+huIXQLvJd5JEOECq4WG0EpRFBVpmmGsJdaQJDFSebwvCfXncDxCRTXgpFbXPxGcYmqijq0cpZ2gKEdUpiSOG1hrSZKE0WhEs2lCAC0UxhtiBHiLMSWNpMMTT32J6158E8UopbexVleZCZVq74MgdB0kOSEC4Sa+rNI6KkiUZPdYjBKQGhcqrQ3Nf374DBACg7vf+zme2Lwy2To20ebprZSq5nA4a4ikJBLbFV2FEgEDfGGY0daKXWPBz367ojvdiDi9FSqvR8dbLKcl68Wl+ySuM4Sj3QYnt1LODHL+xX2P8fMvvZ4n3/gKKu/5kzPL/PJj5xBSorTmP77oCPvbCd/z6ZPEWvGR+Q3e/qWz/PrLr2cy0TyzlfE9f32Cc8MUqSWrFnIjsC54gJfOMzCWDRc+ey0v+eT8Gj9/19W844unWEoL9rUjYimJlaQhc/YmKYdmmvyL47vREq6uC5BawJ4mHB97vrNIK4kQw5zz/SGfWe3zk3cc4S33n2S2GfP91x/gvYt92q12UBrQEaYsseUIY3O+95ojfHazYCU3zDYS3nz1LIXzPJMH6bOXzU0xP8w4uZXRijRvuekws42Yv7i4Rl7DD57MszqhkCRJQjNuoVyEcDJMrs5hjaurch5nHWf+ye284oNPs5CacC/UVTwhAh7XGlOTJf0OZjKM7VbXpYRG7PzDDvPxcka7qBMb5x2Fyyhc0AK+/rNPBItR7aFSpHnB+VfdDkAz0WymOYnWCK9ZWOwx1o4pbUZZpGSFw+KpXExZleRZBrIiVjFSR1RpgZIJrqyJpMYjvcbYiJXNDFtV7JvZx/QBwdLFi7jZPah4lQOF484b2pxZtzx6omJtJaXVCfOBNXlwYapA4nDSsT05RjrCYCiGi5x/ZJlzRDjl8LJW4PDhGtJRRLV9H9dz1Lbo/3YxQEmJqM1f8jwnTgSBsCqC2oMXaC3RKmbv0SmINzl34QKiIdnVVZg8o90xDDYEXhhklNKZ1GRFTn8rI80zWmPTtKZitFR0mlNkoxQdtcNnF4I9ezps9LcQNDEoaF5Kch8f5ggZ9sXV3y2c93pW3a4A+HDsldBELojab6894VoLBKlnv+suXvruh7k4Kus5r07sCVJdgXgW4ExuR2mG52fPz6t6iJ3tCSnwwjIs+wgV5tPjnSChKIVic72PHQ5oRJJqo2Jjuc9gMkfYjGbSptOdYG1jC5+NUFuCsUMTTDYd1UCiKoHVls1BCTKhZfrsmetSVhHxZJd+L0NouPGq69hcWaIoS4jGMHaVwliyoYOxPuOHggZnpBSjXp/ueIqx0O+XzM5GrK700B1LtxtjegqbQyNuUAJ5UVLZPjExkfKoWNLoSqx12LKFFw4RGRI3wdragKwnEFGDzmTK2OwcW/0SrR1JMk1epvQH68EJanSRhz/yXoQTdOI2y2eeIosHobhjLJUp8LbuzkoQXlBqUFYQxYYzy19CdFvs3tVm/mKfzcyifExsm5S9nHbUQBhYWawoh02yHFY2B3TGNHOdFkIq/tHXfwePPP4XrK2dpTmj0VMFpciRooRM0FsRdPZNU5gRaboFukNZCLIcVJXjqoJyXHDNoWNc3Fig3Y5JdEwcRZw6sYhQERDm0sgV/MjtB/iGgzNMJRELacGPf/YMu9sx33/TPg50ElLj+MsLG/zkg+fJKotzjvvfcDvvW9jiruk2N443+bnLyLZSgHeew2Mt7tk7xV1/9tcXN4vKbhaV/fXHzy295eYjc18pUN3VjNXb7rz2wDf/+YMnHvr2u2+6/LnfferCxuX//y+PnF39Vzcf2fuSPZPts4Os9+W2CV8lUFW6ojPWZbY7i0wUxjqE0pSjlP76FnaUMr1rD91mG1OMiHSCrSQTuxrQXGNyFzQ7UGbB07k70abZHjE5LXj8iwXr8wntVpc8L2nEXaSSbGyuE8eaqsqBaCebV6oWGzYGfVmwrlSw+FMyTJq2luQJwasizQZEukkcx1hrECKiP8g4cmw3b/jnN2K1Ji/hG3/sbjwVmRVolSCdQBS2hh9YrDcolYDQCBzOVpeyEqVDwKFrHJdQCBXhioKTHm70+1Ba11asgkhrdKTxJZTWohNBnreYbt/N/OIKo6HBeolwFUIrlBJMjbewecmH3/tBpNoi1hZRjpjYNcm3ftcdRC0DCJSOcMYGmS4tqMwIUymSROOcQXiJtzmTE9MkDzzLtktXUVQ4Z4iiaKd6ogV428AnBlsJIiRV5bnllqs5erxDnvV58guqruICeKwPslCjUQgavPAs5SVLtVA/hGAgN46tQQjgt6Fv65UJ5xP4mYdPM5/loUp8RXQRhMVlrdfobMBglsBoeGXFGh/0VVeyKz97OSt2tvfk5ojnjrzGwZ7cSuvKpmAtr/jujz+K9552t4tAoOIE7x1ZlvGvP/MUgrCQK61oNFv82UKfd80/SrvdQUpFVXmSqEGr0cJaw2AwRGuNVJI3PXiBSEco2aiBFI53fPEU//z6g/zS3TcyFmuW04Kff/g0v/jIaf7ljYd4621XcWJzyF9dWOPrD+36Srcypr5nOs2EtvV02g1+6uQi/8fVe3joG26ldJ73L27yBxdWEM5gHfzEjQfY24j55w+UGO+4aaLBD1y7m45WjIzlS+sD/tkDp1hOc8BzfKrDL738OJNJTGosj673ef1ffIGFNN853tskOCE1skrAKbwUOwmZcxIvZBDHxgdyDyFwNK4CL5iONf/hrv28fK5LYR3vOrXGzzw8z3Y84IUPcl11MNBQgh+9bT/fcGiKbqx4YHnIv33gPPO1yocEphPFv7ltP6/eN46WgguDgu/55EkWhimlK/CmxNmKrKwwhORye0ztarE1X2K9IdYaW2Vsrg9oNBK06hLrAmsq0qHBW4eWMRCFIEhrlJQUrkBZgckckYrpTswgpGK110NKw3gVBVma3R2m9u5jfWMNJQX7rh1jTzTG2IzhsYcvcO78gML4YCOqCMYOSLwWOGFxpgQrkELglMALBz5HYEOiWYGQDiSBJS3YmQ/C/ROgWM5ZtFJIIbC1HE86yrBWoyNFllbgA8/BCsXM1CQ333IVeblGURkq51EjgVQK67doRWPkZc7KxiJJs4tSFqskOmkhS4cvDZkwlMWQorRoqSizAZE3VEOYnmxgI4V1GhlfStDk9p8enLU1NEqGJB6/g78VXpG4aAcrbtkmOG5HtrDdOquMoapKQDDTjPiZl13D3XvHKazjT0+t8Y4vXgAh6m5XkPna3kJDSX70tv18Y30tfn55wE98/ly4Futu2L+4fg//9NpZphsRy1nObz59gfvnN+qv4Ui6kgPHurTjBufmN1ic7+GkYny8QaQjFk/2aCYdZuYkFy5cQPXHaHRitlaWydd7tCdb7Dswi3CeZ9f77Nu1h3bbMuxnjHWaRFqR5RusDwuSuIEsBZFLMKJCNns4mZCuFWz0MobjJZUDZ2C8Aw1RUfUq/MAjvUIkTZIkYjPtUfVHtBpN8kFOJBSuaILJcbbiwvk1TOZpdRvIpsL5jE5sMSPF1HiTpfU+h47uoipKRlmJ1J4yX6ahEmSjjUew9OwTOJOTph1mb+hyvjzJeJHQjAN+s7IVxjqsr7AmQMBiY/BVRJEq+kXKgSmNso6Z6Sb5KCWNU7qTMVZ2kLogTTPWLm4y0Z1gY5AiswgbJzRmJxhGK3z8c58jbm2gm+EaaseW3oZAqyaxjHnZy76FF991nKXeCu9/50fpuQVsWRIlgsqmRCbihqvu4tixIyx+6I+Y1JM024rV1ZQoboEZA1YB+PmXX81cO+Z//dijPLNWcnSiCXgSLfj+T57k1FbOobGE333Ndfzgzfv5uUcuAhohBN96YJLv/vjjPLo25ObJ+LKVQiIQHJ9qslVWnOiNdogjX1jdGu1tN+KJWMteaV4QAfDLd9948PefvrD8zNaofKHnLx8vmh1vjseRfmStn321137FQHXvoVkazS5nzy8QJx2kjumXQ1CauDuBlA2kTHj24rO0ZpokjQ69/gDfUsQNw2NftAy3YNf+kompCG9yFp5tMDmbc82tsHTO4lyB1po064cMXQsqU5HEjdA+8rZe5EPFaxuDKkRYvJSKECJUG7cnFCklKhYUeYaSDYwribRBOEVvXUJiufbWNk9e7JPnwTkiiixaVQjdQKkShSdK2lRl0IjVuhF8ll1OrBsIukjt8cbUAYpFKYc1VQiaEWRR8MCOfAhmpQrfxVsPTlDSQ+omzWbCxsomn/vQe4mjqnaDkciGqiursNZtcf7UOYa9DZIoLG5OtFhd2CIbDKgwWKcxzqKkwlYaVKgGlblAa4tSgon2AYosRohNBqMhQssdAk6SJAyHQ/I8J0kSWnGTUV5hXMC8lbbEGkFV9kAMcTLiu/73e0jUOK7KkKqiwLGrs4v/9GPvRYigU7dtNRBgF2HiD0fI11WxgHcL4uGhevP7yxs0psdo1A3E7W3c93V38ZpPPULaS4mt32kp/22P+17/Uu78s0+R1JahO0PAVVG9eEWC4Gj23MqlA38ZMWy0eeXT21ARBfggjQbsIEZW6kUyM45feewcv/LYuSve/tj6gA+du9I15PdPXNz5+40vwArVNe5o2pRUroJBRT6An1pZv+J17foH4Ne/ENzI9gJo+OWHTvLLz9sybCM/f/PJVX7zyWef93xyWZnzWHK5rErOFdAauAKloITYaUntSzKUDcf05192A6mxfPv/eIhYR/ziy26gVxp+7fFFtrEegbXqQQh+/PaDHJ9q8fUfepyRcfynlxzmd151NV/3wcdDMKUEf/jqa/jS2oh73vcYvdJw7WSTUeWCnqG39NcyokTj0eAMSePS1LmwMkDJoDriscxO7grVUWFoJA286ZFmKVpGqEhSFgbnLSgQQqNlhFMy4Dt9zigf0Gg36chxBv0QTM7b85jCkm1AtrzE7KSi1erSL7tcc/Uruf5FN3Ld8QcpioQTT5/ngc98nFGaUhWGUeUwLqiqxJEmiiKEdKFVCAjvUcKhpMaa4NyHAu/MjmU0UEuN2Z3OjNIyaIbWS5YxHjMqg7A+weHMGc/u/VO87t4XMzs9QVaVqLajKkuU91hhEaIgiXwgaDlJfyuj3fUoHVFmBqtFwMk3ErJhhXaALml1G3hT0F8u8VWMUwKJoeEvdUeCXFAgc0ohcbV0lpIycA2cQ6CJrK6D1EuB6TYfACHRQmD89trsduaxX73nWjILL33fo7Sl4I9fex1bpeU3n15GKYmpu2lCSvCef3vHAY5PtviGDz/BqHL8xxcf4r+9+lru/fMncd7z2v0T/PAte/knHzvBlzZG3Dir+bNX3czbyqd5aHWLsrCMtfawvL7G1uYmXiukcORrKWNlQjzdpt3ZIh2mZFkb21Tkw4rJiSnuuHuK5aUlTj2xiVOS/qjADh3Vxhrjs4KtYcnUZEI1MKwupBhRceCaA1gnWR8NiRueTFoqJ2l3I5KO4OLFBfI04uDehCwzTO5qISlY6+XookXey2jEivFmzCDfQEcRSUNjspJYBZtbHLz+m76ZscYE/cGQD3z4z5FJxLCo6HYjMIpEtYljxXDJ04i7FD7DqxQnIpKogXaG3tYQg0DadRokRKLL4bk5zq4/TVqUCA3ey7o7ENYfZ8FHhqq0qFaMzVKcaqLciIZtM7a/hWtYTOZRI4tfgTe//p9iRsvML6/xsfueRJKzsrZF5TyFOU3iU7pjLcSYYe1ESlqF9bNVlWz2PR/+2Kc5f/EZhsMVRMMhcocnaMmOdXaztjpgdeMhpJIMsiVOP1pQDCy2KsD1ONhuMhFHfPPRWb72/V/kwiis82f7KUIIzvW3HScdpzYqfu/JBb7t6t380lNJfT3DO88s8dDiGu3OOKW7JInofFiTO1rTLy89DrCRVxZgIonUCwWq33H13on9nWbynR/94tnnPvfcMddK9O+/5tarfu/pC0tPbg6fz6J+zviKgWprZoYiN0zt3k0xrBj2BlhjAUWRF8QtyWi9j5KCsvKY0qCVpBFLMDljEwKTa3AVjRhK78jzguV5zdxBD1QoHeExJI0Ea8LNr+tKlTUlWgU7R2MtQoSAQOvaY1rUk+iOmLms8Rc+TEba4ypZexNLTCXYM7eX73zT6+nOxvQrEKIkHVY4NF4GjIcnx9qKsjI0pGNrs48pS5wvkDILlS9aKAQ6VigdWHReS5xTSKEoXEVMjHACY22YRFUUWkRaBPxnNEtZWiqX0WhqTp06RZJAWVa0ml3ydAVrFVgQ0jC9a5LSZXgX4UcOYQQ6EcQtjRVlYN8q8N6gY4A4LEg+J4qa4Csyu0GFoNMIAGtjPa6Wo9lu51lrQ+U6TtDaIXSLskhDi7WqWNtY5zp5lMJmNFoNKlMilEWJCOEjimw8VEpsFazjQvyJwNZM4u3AUwSnkLq1hjP1o6HChqiJHF7U8ALHr5y+SL8yXFgPGm/5Cyd2/1NjW1VhOS13yFyXj0fWno/F/LsYf1dB+N/1kAimGzHdOMAwjPMspwVaCn7n1bcw10rIreMzixv86mPndohtf/K6F/GR8yvcNjvOsckOP/vFSy2p7bGnlXDHrgm+86NfYGQsI2P5tScWectNe/n1x5d2MFiBSBkChG86NMWP3n+WjcKC8PzCI/N89ltu4a5dXT6/3OdbrpplLFb8+P3nsHUb9+RmjpSBxJaoBnPtI5wZPE2jKUnixuUActLU0ekqpIPKWrJ8SD6qkJEjqzIKZ7C1vqJ3UJUFKBvMHCqoKFGxxpYGZwWNeBxNk6oqKdwIZ8FkGY2GJhnTPLu6woV1wcyuNkPfxaoupVrm7GnHtTcexXdTdu+/hv0zXb7m9hvopZtcnF9haXGFU2fPsrg44g9fcyuv3T/993dR9LbCzwuOSVAWlIKDx5731BVj7AXePvv8h17JPAC3tVr8k2P7ecXeaWYaEfOjnLc+cII9zYQfOn6IQ51mrUW8wb/7/FkyGxbnB95wB+88uczL5sa5dbbLWz/9DO8/EypZ24nVkYk2r9g7wSs/8BgbacmmFPzXp5b5geN7+K2Tq2xLFPo6SPZ4vvHgFD96/znWsgBV+bmHL3L/t97KnTNtPr/S51An4cmNjC+ujNjdNAyHFWf7KVeNt3lodYubux0+OXcQOAj76y97/XO//eXdlblLf6bA2NXwNS9wDAG2+WEvxDOc3fPC77nq0p+fHo3zQwbiluJAs83mgseJipXlFXSkGZ+cpbc2JBIR3biBKhVD36DbnuXlr3w9jz1xjm7TULkPI4qCVmMCaZogBVMznv7WBpHtMjneYphZZLkb7VKK4QZpmfHhb3kxd7Sj5+/jS2/9Ml/4q4xrL/t7nECgvAEY1p3vXdPwbXe/8Hu3D/vzDlut9nRUwNH6gN8y95zX1F3wsf3A/vCZl41XvvezO+ZEzhccaktow6iC5VRw99wYP3TrAa4ab+6Qbddzg7UOmqHwtZBbxqcm6znu8oA0zH9D4xiLrizATDXCA1svEKTONGL1Uy8+duA7P/rFU19tRT7QaUQf+Ma7rv3M4kb/Rz7z5PxXeTnwVQLVt7fbvGLubygsu7YfuDGUZ3LgzHM+cQF+7g1/s03/T48zT17any8zHjIJv1jOsDVYYWZXG6/XmZnp0GrNBkypLPH1Qqbqaq6xlkajhTUgSXCy3PGbR3qss7ha59JYi4pAEOOqiEg3sSZCNC1xkmCcAh1RZEO0CG5YWV6CjLHWoaTDex3aaBV4Gug4CQDxoqo95y22ckgfoYQGpYOgfaQwrkFva7hD3lJKUVVl3cIP+oilybACGkpjkcQqRkdbLC4NGGZtXCUpco214Xsal1Bpx+Mnhtz9bW9gcyHn8GHL1J4cTZBK8VoQ4TA+SHgF6TCQMkJ4i5QJUkQYm+GokxKjES6i0ZRUtuCNVvOBt78TgOVWvFMR3gaCh7zF1dlhOP7BaSckOPjtQNkh6hLedsX2xL13ceD3P8YvPXqGSqjAqPbb/LavgGH2deC9o85QKzVsg9SlYO1Nr+G2P/ssF4Y5ui6jmvqGeO6WPZemD09oW263YOVlVa7n7UaN67wcT7g9fuZLZ5m3XfqyhfcOXztkBQKdqzGinrIySBmhowCXKX2OcRYhPMZAoxnRbkcoVWP+xLZmbQisd7UStBSc6qeUzpMoiRCSxAve/JnznB1WHBlv8esvPcgbrov4uSdWsBYsknsPz/G9f32eJ3vPkihBYYIN7nzZZqGMOTbdZqs0fHrRcrQOWh5ZHXGwk9COFalxO+fZ1WRLIUBJWcNKartP4PhUi/uX+7xkd5ez/ZxfeNkR7tk7znph+KOTK/zOUyu1iYPk+skbcS7nQn4eqRzYS2esKgv6qUJ6QaPZIG5pGklMWQSIgJIC6SsEFVXlUUIGxnGsQHry3CDKgJnUOqGhY9JsEKownTEUImAOHcyMNWnt6mLwWByiAWujJZ4+uYEvRuTZiPkLSyQipldWnFzIMb7J7NzN3HCj5I50kSefWuanP/UAP/3gl7+c/ybjH0pidWyis7Mve1sNfuXRs/zIZ5+idK7GnwsSKfnQqSVK64mk4MhYg0QbNssSPHz7XzyEEIJffPQZMuOYiNo7hX8hBUoqjk912CpNXcEK2N8nNlMOdBKaAkZ2G55kdwJWUSfq4bfY0ZS+cabNF9ZTPnyxxxuvmeUle8f56wvnuGv3BHOthHedWvj/BSHx9KgPfkgjUXgHRRaOgRcZw/lRbaHtiQ5N0KvtU5Os4N1/+kHWeiMQI1xZ0Om22L1njkhBFA+QYxXnny24dnaW6dY4G2eWwMcUXhLHkrZSvPWjf8sX9N9w/F3cB8cmOldsf72VkEE6AAAgAElEQVSG0hnnOLl1Cb7mnOC3Xn0X//7BZ/jjkwtkxvK9NxzgB246RC/fQJQBujNKUwaDPt57Ui6FlqYqsbbi0ZVNxpOII93mDWcHma/3QS+M8nKzqK4stQK3z463Zhpx/N6vv+Py8J4/eM1tV//pqYXVt9YB6dXjrfj933DntR+7sNb7wU8/cfG52/ly4ysGqm/7q78bYdl/COPLsXi3xx264NMfeR+HDk5x652H0bpFqiucBe0kRjk0UGu97wDujamriEIhrSQrHDpKsKVDojA2VImlV1RGY02J1FBkBeghExNzOGfZ7K0jvKfRbLB71y76/R6mqoikr7GuklDC0WQjRzwuGA0KvFI4L6FUGGw9MQjykUTHYL3F+y2EbzIcFkgVY0yFtRXee6IooigM1DZzShhMNSKK2iAKGt0G5071GK6OiMYNVWFB5rhS4X2KEoJhCVXRAmGprKWsDLkboXULjCAzBlBILA6D9yYYNhhTB6waKRReCFxhEYR2jaxA60DC2x6ursjCpaow4hLZw3t32Wt36rj1vyq0/7aBac8NFx1oIdnbjuhEQZlhswi6t88dO2xo79nd1IzV1cTUOBazKgi0158wlSimk5iZRswP33qUO3dNkBvHfz+1yE8/dJq5mbpUUuPoiiKnLApakebf3H6I1+4boxspvrCe8lOPLrGUGUKpWvBth8b47qOTzDY1zw4r3vH4Cg+upQgEv3++4F9920/V2EJJMMWweGEQ3iBdaIcuz88zf+ECeVbRGhvDC8vS8kXSmYzlYkhnPOYVr72GsQlLJDVCxKRmxAf//XuYiCPe/4138vWfeIxe0qQoDUIpKEFawbnUEscxS4XnT872+Kb9Y5gdz1r4k9MbPLEZWlfFZYm7EsHDriVhUFqSOGIbM5HWVbCOlozMduAdHLHA8lcXe/zLG/bwhdUBo9Lxw7fsxXlPp64YTCWal82N87bPn+OHPn2GGyZb/PFrj7GeVbzvfA/nPWWpOD55K37dcD47TRQ3d/ZNK4k0iqSRUKSeUX9IpxVT5o40S1EyEIqSuE2kPHlRYDHgJc5IvPO4qsIZCzGkZcWgZ2i1m8TKY2xJpCNsaRgO++SFZTQq0FnOLjocOTTO/kMxvdU1Op0WnTGPLA3F0DL/7BkWFpfobeTs2b2b6ZkZysuC7Kf6OdZZbH0MdxKyy26Fy6k/YvsJURM32U78IBGSpTe9jpveeR8X0iJcY5dhha+AeFyx0a9MYr1pusWJzax27qvb8sLXsBwoasLS9maeyjMkATrSjTXn+umOlnBRM6Ere6kjV1rHWl4xmegr9nE9r8hN4IkX1u98dyk1USLpJhGDyuKs3YGk9WrycieSDKuQKHnv8S6QpD4+v8Wbb9zLoxsZI+N46637cd7TjQKOdTWr+PCzm/zRq65GiqsB+PH7T/J0Hfxk1vFof7hznpSE2LeIkxaRTupkvdamlmEeNJUN+tP1fHLujTfzmr84zWJaUhQ5VWXqc759HOUOJKueTJEarE5J0wxqJQjnA7RCeMn1rXA/aCXxXoSWug8BeoAEh1a7lCJYoTc6vO7el3P/X34KPRlz+PAB1r/0CE4p9oyPYRSkvWeYmTmCjyMuLgxxWYNuu4OXEc5FaOUYjfrsmT0cAuJ6nBgWtMdaQXBfC/r9USBU1tAOz7abWv3LQaM1QTtqBx1SF875duHjzHfczD1//hTnt4K2u44VtjLBvCfWOBc6kKIlMeWlLnbh2LlGSy9qDL2vZaK2L32xsyaJF7gXdO2kCSFm+Y6/+AInekPW8pIPnl3iHV9zPT/46ce5MMw53G3SUIpECYZVifOGm6bafO8NBxBALBw1LBvlHZF/XrxJrMLnnx9kfGJ+jbe/+Lr4++57rJhuROIHbzkSvfOZ+YXnvQm4b2F9ePu7PnWF1MYjb3zlzT/y2SfPffTZ1QHATdPdxnvuvePa95xZWvvRzz31gtv5cuMry1PVY4VpRM3kdD7gGKyxoaVds0JFHSAEBq2D9hZK252ToCOoykB+Ofe/3Ms9n7yPZ1YzikwgUTgfWL7eb4cR/hI5Yrt14i8FIJdPlJcs/sC7gB8SIiwAQnichyiK8d4zNdsC4fmJoxF/+hsPkqiC7/i+OzHVHL/87/6IN735dfxCGi6Mg1dNMDexm+mZFhcXNvGtRggWvSMSLYyqMB6091hrUCLYa2ohA/7FCCKhsKVB1fsfOY1znkhF5EWFjjSJFBRKgBmyvrEIzgWlg3oi3dzc3NHp3FavCp6+JZIYJRQ4R5VZZCTQkUAah1EK6x3GBi1b6S3elEQqobfcD1ALH7rvcZSQ5xmCCFMV+PoOM1WwIwVPZTxKNOj3Bsyf3+T6O/aRFYF24DVgPZKESDVIooi8N0DpmEjHFKXBOll3TD2RDqLiSiR4HyGFRqvtxUCE82WLejIUIAMpwblt9vJzR10l3bkGw5bczqTgd4LW7eVO1PJeAk8sI8r0uVhTwf52jMNzYiu0rg91YhxBt/SKT68ZxXtaEU0lONXPcV6wrxVxuBNzYisEX20laSnNxWHGO15yPYV1vOEjD7FROt752lvpFYb3LAV8pReBFBjFoVr+b2/exbGxBt/68XOkxvLvbt3Dr961n2/+2Bm893zdvi5vuW6G77nvLCe3Mr79yBS//pJ9vO7DJ1hMwwJ68uRpvA0YaKXCb6E0UghirdCxQkdNDh27gUgkiMgTK8F11x5jqxywsLjI2f5Jqg1NKafIKkNWbBJPh0lvuyW1YIIbjhMOUVhiIu45OM33X7+LI52YWAZCz3puuFTPZgdvperntxMArSTWVAzKirFY0Wg2wAcM/q5uQNX2i9De2o6BttnYb7v/HG+78yAfuPcGpBD8xuMLvHb/BGtpiTWOYWlZGBX8zhNLCAGPbwx579l17j00xZ9f2EJKSTtSFDZi74Hr2FjtkQ8u4Y61UGip0NTKGoUhz4IEkI4jPI7SWFQUKsu2qnG5zoWOgRBUOITyOF/hjERpQZkXwYo6kcQ6xhmHLwXtjmLXVJvZ7mFuuu4OVocDzp9doqET2m0QIqYqYya6be6681aiJGKjf4HKrjDe3U2s9vKeRz4PwMzMBNZ5jK3Is5x023RjJ2+7lNbtnKXtrsHO6wQ7cc2l2xHvPTNJxC+8/Hru2TdNYR1/fHKen3zg1JUk+Mve9KMvuoq33np4Bw4C8IGzy/zwpwNe+mv3z/CWmw9x43QXLQRn+iN+9uEzfOj8yg4pM3RJ7I7knRKC//Ti6/jHR3YzFmk+t7TJ//nASUztMigFTCQR33f8EC/ZM4mWkqU057v+8mFW0lqbmEuBqtYKaYLmdjdSO8Gdc56xJMwhw8oFJL4UuNpiUwrBTz54gR+/fR/vu/d6pID/+sQiX7tvgtW0xBjDD96yj398aJJ7P/g4T24ucWyize+95jYqZ3nnM/N1sCkQdTLWUOPEcacmb9V7WksWuvoEbSfol0fhWV4wykqwhtlGzH948UFePjcWyImn1/iZRxbC5wjAgakckWjTVhovMn7iRVfzjw78P7y9d5xdV3n3+127njp9RtJII416sSQ32ZblSgwJJA4EQgslIQHyEhyKgdf0agIG4yQkJDFwQ96E4GBjh44BF3CVbcmy1cvMSNM1M5p25tRd1lrvH2ufMyND3nDv5967Px9bmtE5+5y99yrP83t+z++3jCbPYe/kPF87MmiaVrXhBL9lczd/sWU1y9M+AwsVPrLvBE9MzpvKjYJmfw3rezbxlHoM2/I43H+CWlTGUhbZljxr1q/i4ss3Ep6NOHz6MDqXYWY+RVAsQ0rS1t6MDKtYoo3iQplaVG10CngpiyisYQmSZNVQx7SoAxJL8iRtJA2pKUIdJbrhlqERsliRk4mKkGU7dKR9PrV7DVctyxNIzb2DM9x2YISorHDtFDHnI6oWghVpr6F8U44UYxUj2QiwLO3SmTo/FKuvjaMlI5tYR2lPL5QbdLd3PXaEv9q9hR/+3uU0eS7jpRrvevQw73v8GB+7dBO379nGc+cKfCdxhQylYkUmhWMJlmd8ujMpRsu1hhkRmNhudTZFs+dwx/Onef+F6+wTb7g+EyjFv58c07fu62slIeR8+ZoLvJ5cWrzq/v1BTarEYvz8Y7hYXTkbRArgpu29XlfGd968eeWKN29euUII0yn70adODL1QEeCFx28UqJoOTxMhhbWAMAhBKyPNYwuEXZcsSZAtJRGWwvVMoBMEJihJiIqACbayOUEUQJvr8KXdW7m+u52aVNzVN8atz56izl3Uwgw2rU1JOm0LPr5rMy/vTSbKxBzvf+o4Y+VaI9N8y6aV/MUFvSzP+JwpVvmrwyM8NTOHEKaz86EfDvHqP9rN6ZNjDB8VOJbkxle8mKOHJmCD4Y7c+NoXcd+dj7Er3kVvbwuVikXayTJXPUcUemhlNEAt4aKFxPi8J5maFODYxEGIEJbZsBSEMsJ2bCKpTUlfWsgQHMvB8RyUrKCVjWM5ie2kRS0I8HwXYVsNMV5T2vVAGeFeG4vmfB4pFFJF+CkPW2lKpRDXyeCnHDQWnpNhxbI83/3lM2QyWWQsqVbK2HYapTSRjkzXq5T4KQspHWzbSQI647nuunDgqZNs3LmSONZIXLBsHCciCCUyVjiuBGUThppiqWpcaoCU76OlRocKISJsxwFlGVkY4aCkpBZUUWGMwCgSKKVwXRcZSaOusGSLS1s279rYze8sb6XVc5mohXz80BmWZ3z+x7oVrMoYiY6Hp+a57cQIFWlK3L+4/kLuGz3H7vYmtjdn+diBUe6vnL/AtHg2//PitVy5vI32lMtYOeBTz/SxsTnNazd2J/w2yU+Hp/nYM6ZE2OzZ3P07l/DNk2e5ekUrl3Tm+dJzA4yUTAKZdy2GSyFdaZ/rVrZzyT1P4FtQDGP+7tAg77toLf95diTprtZGfF2aJO63V+T50NMjjM4V0Upy274aT736IralNU9PFnnJ8mXc2zfFs2PTpNMp/u34OG/b3MErVub4m4OjoOHIvr0kvhDJ5ms2PrTA8z16168jl8sxdW6SsBaAbZNK58jlm8jls6xcvpGVy3uYOTzC3tnH2bS7k5Wb2umbMoHbRNIk2hIFTC9UEdokZdlUiq/sXs1fHzvHd07PUQokf7yxjbdubjfySclaaQnTqV5PyOobRRhGRHHMifkqTZ5Dp2uTDCk2NfuMlAKKkVxMZKkvN5pSFPOhvYPU/ek3t6TJew7PzlZIpz1OFmtc1Jkjk/FNkCOSpk1hzDu0BjflUSlW6VQdbGnayon4+cY40bFlmj0ltHe1EoQpJiZmDcVAR9gCdBARhQvYtoeTVAtkLFEqMtuoNlJlGiMLpYVCyhhLK6KaRRxFeK5HZLl0dXey6+pVIBU6KjE1MkulUsHOKObmJdVaBc+K0KkKcfskVgYy7RFKNyEskCwG2dIx3f6ub4PtUQuihDpyPgC6GJQm63f9H5Lqhf1CVDSBOL/+oh0Uo5jtdz1GW8rlOy+7mLkg5u8ODi5uBCw5GfD42Tledf+BRjCxoz3beEWL7/L90xN85KkTDC5UePOWlXzt+p289IdPc2jW8NalNNWueiDwycs3sbk1x7Xf3Us1kvz11dv41ksu4mU/3MdMUMOzLH75BxdyYq7EZfc8wVwQ8dI1HZRj2fhajuM0EFhhO4Dk2FyVZs9hTT7FSMVIJ25pSjFcrFGKJLZlLJVNpccAD4Uw4kN7h9CY69/ckuaTl9k8NbmAZQl2duT42cg8p0tmcJ+cL/OToXP8dk+nCVQxQIjWkE+14NipRHarbnCTtJ7qBts/KRYJHK2JkofqWCB0DJbN3169lnIk2f3dQ7T4Nv/2ok3MB5I7j0825pMlIAoktpPi05euZWtLimt/uJeaVtx++VZuu3Irb334IFrDK3qW8aGd63n1Q89ybLbEmzas5O4bLmX3955s7NHNnS0MjJ+mWCtA0aFaTNMsfEZO9+M1gQx8Bs920Zau0dS8hmyum7RbAt8hjizm5gqkW7Koao1aNWDTtlUMjg0B0JTPGoqd1gjbolYtJVXIZD1ZWkCrJzdaYwmB69m4rk0cxThi0dVPgAG+HPjSZasoRzFX3neIZt/hmy/eRDFWfPXYJNGSBqS6hXpn2sOzbfoXakitWZnx6M179BcCEAb0OLcE+PAswabmFPNhnDxvQaRNGB5pO1m1oc33uP3Aad716FEcS7CuKcNsLWLvVIF/PWWSmnrPzheeO82qXIq0bbPj24/iWUbOcW0+w8n5RerAmnya/kKFY3MlpNbsm5xXoVRLyAGLx3seO/p/7Oxv/vpPl1qu8I5HDofveOTw0vcc/z+9f+nxGwWqKc+hFkoq5RJKS1zHNo1B2gSlUi7xvNcaSxiBnTrCZdsJ3O9YDdmPeobuuBZ3XrOdUiTZee9jtLgO97zkEuaCiL87fAbLMk0R9ZVRa/j05ZvZ0Zbn+h/spRLHfOnKbfzHDRdz3fefRKN5ee8yPnzJRl710/0cnl7g7TvX8Y2rNnPdj59ldNY0SbQVYWoypDBvMztRpjQ3DXaJ0fFyI1AdHdKUyoonHhph9eouyioAXUMrSVydpKc3h+WaoNK2bGQcNRZus98JcvkMcRgZlNmycD0XLcB1BLZ2iRNtM1c4plkc28hvadEYZLZt+D7YLNkhwLYdKgsBURiQ1g4qDonjCGyFsl2UlLiAlopydR6ETcpv4uzkAv39U/iOSxhUsW2jYek4RstOWEYXUUqBsByCWoSfBinBthxyTSnGhyfoOzLGhbvWMz45SjrdDEpiY+E7LjJUhnuqICUcUo6mVotQWmILC0vYSEsjEs1MLQEdJNm/xHV9dKywlUFFhRAQaxxR19A1x+d39NKV8viTp04wVo1Yk02htCLrOLzv+QH6ixVWZ1Pceekm/mL9Cu44OdoYq6/t6eIdTw5yqhCSchzON3qAL1+9lZ6cz6t++hxDpRpr82l826In5/EXjx7jxFyZNdkU33rJhbz/wl4+s38geafgjzd384YHD3JyrsyOtgxpZ8m5BWxry1EIIobLNTY1pUjZFgdniqzJp0kJqCUovYxjNJooNHI/cVgjrFUAjfLNdWxr9dl7djY5tdm0NEkHJ3BBe6bxO9MoKMFOeJK24U7HoaJYVMzP5sjn0khZoxoU0FIwPT6CsBSWZWPbGdqWL6e7ZwUXr78BZkqcOzXNxJRpAJgPI34yMssXd63jw8+MMFaJWJNPkXIdPEswXwupRRGbmj3evKENAaRc16ByJFurMkmwEMZJBozxhwBGigFPT8zzyYva+cKBOZo8h3du6eAnQxNsbP7VUhaYBqxQKmaDiNW5NB++tJf7h6aI4gV60vDk2FnesXUZN21p5gdnJljblOEP1rTwtwdPs8I1ycs/XNi/5Iw+cEXjp8Hf2/1rP/f/0+NgPamqmFV8kw0E5r/ONJBQE46/UPLQNDR9P/lpbSB5y9Yeru1up9lzGS3XuPnxo3Rnfd574TrW5NKUY8lPh6f46NMnqSboy3OvvoZv9Y1x9Yo2Lu5o4r2PH+VHg5OA2Ry7sz5bWrJcv6qdP7x/PxLNfBjx7VPj/PkFPTwwMsVCKDlbCRvI35bWDFnHIu1YXNCWYbQUUnhB5/EPzkyyuTVN33wVhOIbx0e4+cL17FneypHZEqpe28RowhbDmN/vXcZ7nzjGbDXCty3ueO4M+157FRd3NPHA6Cxv3rySFt/l9ucHmE2CgzMLFcrRorObZdmNdUMrg1IOLVR4dGyej1zaw/seH6A5bXHTjm6+dWoKkSgkgG5wpjWK1XmfQMJUNWR9c4rb96zlnv5zDBQDhIZ9U0Ves76Du05N0leAjc1ZfndNJz8enGRdUwZbCL7xW5fw4WdHWJnN8I4t7azKelRjxYPjC9x6YJyqNAjv47+/mbtPz3JlV5YL2zJ8aN8o94/VG0E1SsGaZp9rVjRx3Q+OUIwkZam489gE79q+gq+dnAKM5rGwbZSURFHMjWs6+OBTZwjiFLgxn392gEOvvYYd7U0cWijxit5lfOfMBEfmy2jg3wbGeM/2tbxhQzd/fcRUf2ZGx4gXyriOQbNkUCO0ZrnuhhWUrCo5u8ID9z/I7msvId3sU55WtLS4TJ8tkUlncR2frNWM9ANUtYZeYgOeti1u2tLNDV0ttHlJA93eE6zI+Lx7+5rGmP7ZyBQf23eKciiJHMXTv7+V+wbnuaIzw47WNB/ZN8r3h0wzp5JGWrK3Kc1VXTlu+NFxykpSrii+enySm7Yt586jk1iOBYnoRM7z+PBFa3jF2uU0eQ6jpRrve/wEPfkU//PitSxL+8ncOsfHnj5FJTaBzv5X7+FHg5Nsa8tzcWcT7370CN89Yxr56vrSnmWR9xyOz5WQGpRUTFUDlqX9RH4xecp6ka7T4rmMlGrESuNYMFEJ2NqaI+valBPaqS0spquhQaM151U2lh4C6M6mvBbfsW0hRCmScqRUDSOlGw+iI+U5nWnPdS0hAqnUWLkWlSL56xfp/+b4jQJVWQ24eWsHv9u7jraUw9lKyIeeGmJ5xuGmC7qNsKyUPDAyz6f3DxMm5beHrrmOe0dGuaKtjZ0tzXzk0FF+PHHW3EDMDVydS3N9dwe77nuMhTCkUIv4+yODvG/nOr58+MwSK9T6DdO8onc5733iKDO1CCHgcwf6OfTa67hiWStPTc7yit4VfGfgLIdnFmhqTfON48P85bYe3rBpOX9zdBDbdtl2sc/Bp56ntS1H//FZtBT0rGumZ9XiLXn+kV/Qsybi8FOPEo+3IbI25do8175oF6I7C0mAZ3SFNMJSiMT1SinDSSWSEEVGN9JKsjml0VKhXQcsiSFfemjLxijDWEY/VOnE7MAsdHEcgzZNQSZLsBDaxhYunuMjI7AtzygRxFAM5mnJpHBcgbByaDuiOd/Mgz88QXmyAG15gxZZdtLkZRGGAa7rApowAMtWWMJNqBcmI7QtQSqd5Rc/OkBLxmfztm4WCgtEkSTSFjrS4FhoGZC287SnWvEcQehKYh2jicx5yJkAyjblFrRpNlNKY0mBsjykkvi2g5aalO3RsJXFTLzf7W7nxkePMl4zJgXDFWN8MFozSJVluYxWI+4anuQPVnaABtfyQQu+MzhHf0kiLIdyvFgWSiYZN/Z28cafH2A4cYk6U6ziWQIbSX8hQGs4U6zxjeOjvHajae8sRqab+5snxzk2W6I74xEq1WgSL8eKFs+m1fcoRjHtvotrCUIhWEg2ybxrUQkMb1Al/Nk4jnhwZJabLlzFgakFSlJxy6W9ht/mmefz4Mgsn7h8LfcNTHO6GvPGLctYmfMZLNYam2wcxgklIsJyFNgWSkqUMnNsYW6GsKuT1uYmypUK6aY0maY0rgOuB9WFCudG+pkYGqCru5ue3nW4lSyXbljL/RiK0ieen+I927r499/aSItnM16J+PTzE9x6cJIP7FjOZy5ZyZH5Gj8ZL/LK1c0EUUycoKFxFBKHIcIyqJGqIxoChGXcV976i+f5wp6tfOellxJIxbf7xvjs/r4GAviFK7fSk0vzhgcMx351LsUdV22nLeUxWwu5p3+cO54/3ZC+Ojlf4g0PHODTl2/mf1ywholqjS8c6OdfTow0xsP/Xw0s/x13/v/t45ZLNtCR8rj58aNMVAKk51CpBOQ8m7f/8iCn5iv05tPc9eKL+cBF67j12f7GPPnjzSt5wwPPc2i2QGqJ49vKrM+5akST51EIIp44a5IYW1g8Oj7Hxy7byGQloiPt0JV2maxEDZQo7Vhsb8vzvd+9jFIkeWRshnsHxhNEV5OybaTWhErj2bClNUer7zBcrBoKlFY4ONS13iYSKkN3xmN7e5ZQ6UY6unt5C+OVKjesaufMQoWP79rIZV0tTNci7h+a4suHBhvXpJWiHgvJOEkgteKmR0/xhSvX8ezrLiVMyuZfPT6ZqJvAX13ey8qsx5883IfWmo1NKT57RS/tKYfZWsx3+s/xNwdHEl41/OOhEXKO4K6XbKEttZ25IOLh0Rm+dWqM0XKNUqz5s18OAIKUrnDTowX6CjVW5z3+5foNvHNLB7cdGG1QL/5oXRtve3SQkws10o7dQBDjyNg+b8q5FMKY/pkStmMjLJujs6YhLGsJSrFKrJgN/c71TD3LtV18N4Pr2TQJs25tbM6AMtUZoXWDYmQoeLCjPY+f9giDmCuueQleqsozzzxNc9tyPJFj664Wtm5Nc3a+Sovn03VskqGxWa69dBUdqQqlYIFatUK0MI+QCuEpUkETkRWQzS+i7p++YA1dKY8/fvoUI4WQrqSROec6vOOxI5ycM4DAt264mPfvXMtnDwwgVYjWmtf0tvDWX/RzZLaMb9fNQkysIuOYtSkohDF9s0YDWwvNoXNlenIenpaUwsXA7st7trAi4/Gex46wb6pIW8rolTb7DrfuO8XeiQI5z+Wul1zI+y9ax637+xGYqtLL1y7jjx44yPPn5pK5lXwPUZ8nZh4EchHFq8YKz7aMnnwDCf51BJvzD2MzrrjzyBDTtYCujE+T5xArzXQ1FOdq4QvfwspcykvbtnVyrlyTWuvVubS3vinrn5g3m2WL59jLM747UCgHValUR8pz1jVl/ONzperSYPY3PX6jQPVzl3WzIufzpl/0c3q+wpqcKYNkHMFfPtZPX6HKmrzPN160mffs6Ob254cafPzXrlrFnz55gFOVIq62qWuvGwRes60pRyGMGCxVDRprWRyaLbAmn6bJtSnFiw0yQoNOyneWqBORTRctwI62PE9NzZpHozWptE82k6a0YEjt29vy5JrSKAU7r9rKJddtZWq4ysTMI/gpl5e8fA8tKwpwwCxuv/X6reRzKUr3HKClo4nt129gbmEey/GoBgo7+VKObROGAShQcYTjGMRQWZJQSWxHE8oAS3tUoxDPtvE9i0pcQekQIQNsZeH6NipxK5Z6GD0AACAASURBVDK0U2UE4S2LVCpFGIaEsbEKFVqgVQ3Lhmqlih9Yhl7haBxH47sW6VwnUWjMAxamI+YLc+SzAWdOT5PKeITVEK00TsZJAiKBa7uJDSem0SpM6BTaNB5JGYMtcV2PUrnMj3/0BLXytSxb1kzXynYKlYBAGMQuFhLh2YRI4lqMZTkooQmVJI41wkoQB1vjWsos/kJjOQ7CsdGxAG01Gq1knZyflEHqfMjBcq1OlUvGlmBPe553rl/BumwKzzI8tNkgRldtaqHpgBsvR0ipG8jj0ipkT86oXYwm564fFrCrs5kv7FnJhuYMvmW6yKeTLsyzlQgNBDJmY3OK6VpMk7aQyS5XiCS1WOJZgmbPJWULSpFCak2Lb6RVytLwoaQ0XFWljG/6x/cO8PEr1vKTV1yMJeCfDo/yO6vbmAslwrG59/QMnSmXf7h+E20pl58Nz/HY2QLzQUy9aUxh3Mw830MREsuYODR8NmELqrUKpXKRXEsrmXwObEg3tSCETca3ae8UdK2OOTc5zfTYKLMz06xas5a1+UWdmpoSfOHINLcdOYfSi/zdp6cqfKt/Ngl0TAD+t8+PAaYz/rd+fCLhl5PodarFSg3185h7/daHDzZ+H0iDxdYf1HsfP5Es6GZT/cnILPePPvmCFdo6r3nhsYkif/noEfoKSzlbi8HXVJA+T/lBCOhwTWXrXFhvrErS7/PK2bpRNq/L5plkzFioSq0IZYiyJI7ncMkjB3FswfreTkqFMgqbUi1g2fIurr3hAtJZo60cxgqVoNCuZTM8MM2Jk1Ncsms9rU0ZHv75EYSy+b0bLyOT00SRRCmZ6E4L+O6TtHguv7Wqgz94/DhzSUB3NoyZK1UYLC9W7U4XK/zziRFet6Ebnu1r/P7fTo5xaHYBgaAmtdHLBZo8l+emS+zqslkIY+p7d6QU55INIGVbzNRiWn2HCR2Z+6Thu6cn+duDxoxhdS7F7Vdt4XO7t/Di7+8DYZqHVMIg60h5/POLLuSfjgwzVq5Rbw9MWzmqmAqDQvPzkXP84bpuvnf6HOVYcsdVW1FaU44VR2bK+LbF7uWtfOjJE7z5gYNc0Jbn3pddwtGZIt8ZmGg82bpahFHHkAhguhrxtodO4LieCcr0Ej6o1nzwydOL4x14cGSWB0fmkuvVpnogBMIyrlZSKT5/YJjbDowQ6yK2EGxozjJerhkqg4KBWYOKDsyZyiXCYrgQ882Tk/zh+g7usM3nC+DugWmOzS6AFpTlYnnZsgyNLO/ZFEOJ69qYBlpJITCvMw1hqvHdZTIfHxor8OdbOnl2qkg1lHzgih6U1mRcB4Tg52PT3LprI9/uP8uxuSJ/urWHVdkUZ4oVglqEEDaDp4c5OzGMlXEpVuaQwxVe+uqriZnDczzSrQ49qyT7+ycZmVpJW1MbsXSYmBhAaIXtQWH2HJZw0AKqoZFaq4MX19//HKMlcCyPGVtQKM0xWKoYQEYIBos1vnFihNet70br09QFUv6jb4ojM0Uj3chiU5lKVE+afJdSJHEcYXSIpaIYmv2o2bMpBWZ8d6Q8XrluGdd99wkWQkln2mWoWENpkErS4pu+gMFilW8cH+N1G5cDgpaE4/yvJ8c5NJ1IMEqFY5lQbV3ep8kV3LCqg7dtW8O1391LZ9rFswTL0j73vmwXO+9+nLFSDSEsNjX7zAaSnGPx0V0bWJ5J8cYHDrEm7/K2bb28at0yvvjcGe4fmuCCthyPvPIqPrj3ON87PcGe5a186cZt4iN7T4Tf7h+vI6HHAR5/5Z4Lb9l7/ExfoTwP4FjCe/o1V+/4w/v3jzw4Ol36+osuXDdRCQrveuzICBinxcdetWfHfQNn5z729MmzZh7pXfyGx38bqLZ4Li9f284NPzjMwGwFhGawaLqM67IcCMGZhRr/68RZXrO+i9sbbU5wz8gofbUitgOxVDT0vhOOSNo2i5llCYPqaE2hjix5LmUZJoGEaJS9fzZ8jnfvWMuz0wUqseIjl6xHaU2T5wCCnw5P8dkrtvDzuQr7x2Z427ZuVmV9BksVykngMTA0SkdXJw/+9CDbdiwn5Wd46Kf7efXbL8KU0MBxs/hejpfceCn3fvkAO/f00t7SQqUWYbkVbO0Qq5DIAu1JLOHhWy6owHSw+xmQsbEVxSJUEZl8GhVVCeMqbiqNZWVwHBcRZ8hkUqgIojDGtW1QYQNNNRwpYbx8pUbJGMf2CGSFiAK59hRKeZSDiNmFKsXpMrWFCebOFQgrNguFBWRkY9uScrFGJu8SJ4mSlAG2bWFZCsetAybJZ7ggiI2PfWS4VlgaN2WRS1nUIs1PH3yWVAquuPoCWpe3YntZAi2pqoBSVMYJIjxbI2NBLI2hAdrF0sogqpFG1FkwAgQBRAJ0jCUE5Zr5PnEUG+Q1GQh1PmRP2mGgbNyPlNI4wFcuXs/tJ0a5r3+OSFu8aX0nb9nQjmuncFIJSi9Mw461NK5IjpEERV2TT9O3ECQdx9Dk2Xx29xY+uW+Afz81Ri1WvG3rKv5yx2pzLzVIpRkrR5yYr+HbghUZl1K9g12bzt5fjM3whT0OrmXjW5qpasw1K/IMFauUY21QTm0qE0pJUJqiVNzyeH9yGs3mlgyf2b2eJ8bmGtzlrxwa4Z+OjOOnUqAkT7zyIr58aLTBx9JaUQtqplPdNtqdru0gLYnWFlEYUlhYINPcQjaVJVDmvK7rEAibMDBUkqZlXWSackyeHuH08WMsTC9y4cNQNoKzuo3k0iixEcclzx9hoXRyzUo1GiLr47D+/zrq9CvHC3+naQS89QB26fuWUFgXOWpL/lEku5YjBCsyDlnH4vu/t4PvDc7zN4cnGih34+MS29uUbXHzhd38dk8zeddm31SJT+4baTSygeD1G7r40y1ddKVdhkoBtz03xtOTRfMFhObqzNW0rs+y/qouxiemmVuoMlus4Hs+IuokIzLYqkoqZaNtTSxtPM9HdlqcOxvS3bYKiwi0g+c5pLwUhAor1liE6FhR7y+sJ3pD5VpDntSybISwuHZ5K7dcvI6NzTmD0ggayVj9yocTJzh93kM1RyAVpShO1mSz2Gddi21tGQBW5TzKcdKNngQDQsDxuRJzgUzOX+O9jx3n6BuuMfOwUEFqA1IsS3vc99JLeGR8hs8fGGBjc9qU/bVp2F16fPzpk3z00k089IorEAK+cmiQl67uZDa5nlIUM1aq8dWjxqzi+ekF7uk7y8t6u7j3dJ3OYC2W/uvPPaluWRgXKqXqgaBuDK5GV3fCV1Sq7vKVcD8bLouiwSttJDuaRkNYff2xLItMNoOwBFd35bnpgmWsy/uN5sTZIMZPp6ln7uNV09GvEYZytUSyzvNcKsqsacbO3HyPfKKGUQpV0uSM2XO0mdeffOo0H9u1mu+/dDOWEHzt6DgvWdlMIYjQKO7pn2BZyuXr1+2g3Xf5yfA5Hjk7y1xg6Eu2LThx5FGqpQwZWyCiClbeoblpFdJqoj0f4Ts2m3rhSP8gfSeeQUuBq1M05TLMn5sGS+LYrXjNGqlTHD32PJklY3q65pL2HGxLEEQRtm1zVVcT79ve+ysAg8asiwIYq0RYdce1ROYPQNg2nutQjiR5t04/0+fdr2K4iG7WgY6BQgWloT3ts6HZ/G5TS56bL1xDTy6Nu+R7WMIokChgaKF63rK2KmfQ2JFSwMBClfXNNfO5lsXZUBEGNXKuGUspyzz7OnDZ5tsMFQP+4pHj3HrFRh58+WV4tuDvDw/zOz0dFMPY0GSimPFyjTuPDpO2LU4vVLhv4Gz8u71dzpJANRmu4jy0tp7EXdzZlHlwdLpU7zE47y3ABW35DP8Pjv82UK0/+NOFqnH50EDinX318iZuvmgVG5YIy05XIxOiWmayDpeqxBHYltFPq9/8pD+ESpwEmAmXQqFpdg2yVAzjJRyL+iSGjzx9gk9fvokHbtyNAP7x2BAv7eliumZ4bN/uH6c7n+WOC9fRevkmfj42xxMTCxRChVVzUL6kODXLmVMTCFWkVKtBOkXnaoe9zxzj3YlK795nTiFqNdL5TrJdAY8/chirOYP0K7gyR2uzSyrtoS0Nlk2lUiSdzSWBpY1VDXFti0qpQrFSA8cHioi4ytpVyykUy9iWRyZjQ6gpFkrYwkZohygCYRnxfaUUxWIR13FQUmFr8BwH4dqkM3mCss3UUMzMZImJsVlmZ4o4oY2wJLYjENIiq21iC5SlaG7Koa0yKjILopQSz/Wpa/3FMsS2HbRyQMTmuSmB6zQTxGViVcG1PWRkIVPKaE3aguf2HyHbnGbHzutR0qHJb2Hm7AKDp0fxUx4klIaUY2FpG20HWJaFUgKkwLZMKUURGfUtWzSCDiexmrSEtSgFE0b8ZGyaT27v5ZbnBhguVVmXz5LWHp5lUa7ZSHw2Nvn80do2EjY9ljCZvzhv4T5/Vk3XQr5/Zor37FzHdO0kz06XWd+UZm3ex7Us5oOIQCo2t2R527ZVjfd5S6Je3xb0ZD3mgpgg4VpawkiADJeqPDo+w+d3b+Tzz/aT91zes3MN/3rSlDmlUo3NRiVNNj05n0AqJqshG5sz/M21m/j2qQn65ytoIO/aLM/7nCmFtPkOH9jZQzGK+c7AlLEuFSCSDa9SLGG7NgqDWqtkfimlqVZKxGENz7Yp1iqE2qYcxQSqim1BUyZN2vNwvRRX7O5hanyMvr5TDQBSaYnQZlOWSi0mPvUATy/xRyfBHJe0gTeKbcIECH/9/CjztbhBA/jVatbSJb0ekScf9StR7K+eQNSXFwySK5VJjLubfJSGk/M1Xv+zU3zt+nXMVlr52pGzhoqUaItHQYDSmg/v7mVbi8/Lvn+QUiT53J513HntWn77ewdRwI1r2njPjhW8/qdHOFGo8sbNy7nzmnVc/73nOVuJEzvSJtRslrOPRpyanODA1GFwXbRUPPPMIa696kquu3wLLTlQMkKGGidlM+t4+I5NSzaHZUkcWwExrS1NeI5ChcbnUklFnNDE6oneumya6YoJOm3bxXcd/v3FF/Gp/adMMiYVb9+ymr/c0Xve3axXIupl+aU7k28LjsyUaPZd1uTTDBdrrMn7rM5nGCpWeWaqRLvv0JF2z3seJqdZ/JTGM0/+sRpLVmZT/PuLt3L/8BSf3neKrOMSqsQhS3BesyVAMYq5+fFjjZ+3tGb53JVbeHzcJFeHZ4pc3NG8OBgWtxrzscm4EMAdz41QSAKuesCpG2XWemBaH9WLygimKrg4x5aEu4tjP4lvTeVDg73YEOZaglCZykcswdPwD1et4bbnzvKdM3OESvPHG9t56+ZOwiDCTrxjpdJ4rotSkloYUZ+kWkqU1BydKdPkOaxM2QyVDJ1pW4vPcLHGfC2g3nBZ/6pCQCnWfPipQYQYQgjBhqYUn7jM4flpg65LLfnykUG+fGQQELiW4Lk/vJo7Dp9BaU0US4STQrgVI9sXuVhS86PvPYST1jRl01SUz1WX7uSazbD/5DFal2WgGmNJl1xTjiAGTYgOHHw7jZ+ZoDq6OKZXZ1OcXgjQjkWtUkUT8C/X7eYz+/v5Vt8oVal425ZV3HRBLwJDb0MIFALbdhBaUotCrOQ+RmGEUnB8vkKT57DCtzi9UAMt2NJsGujmKtVGklsHOtY1ZThVKDNaDBCWhSvgwZfv5qtHh/i7Q8PMhzF/fkEPN21fjWcJMk6StC+RVRQCmj0TqoXJPvL8uQWafZeOTIqRpBqypTXLSD15XDJRZ4OYqtQIJXlvop6xoy0Nls1nLt/IM1PzKA1HZ4tcmMwDuSSB/3XgwKNjM4V37exd/sTEbLkYxvLTl29aaWTWHBvg58PnCh/dtXHVXafGZvZNzVffvXNtx7KM742UlhBo/28c/22gWn/w61tz9BWqRstIG5Hkb9ywmc8dGOWe030EUvNnW5fzZ5s6zQZDvUyS/JnMwjqwZLJei6PzZZo9lzW5DEOVMiIW7GgzyFIxlixW2+rlPRP53/zEsca6uKUly2cv28wTE3PUl4B/OniWrx2ZQQgL37F57BUX8PdHC1gijaNg2exv0VIpEv52jfnxfqYnz9Dc1MHM9AJ0mEA1kNOois90ZZqW1W3EcYFwooaVsQm8IkFJ43kpdBBQqdSo6ggrsCmWBLmMIas3t6SJrRirpggCheO41HTE9Mg0OnJJWRFSOOg4T1OTxHXMhu5UAty0TSmCnO+AyqJkGavZxyImxMKJYqxmn2OHBqAsEekUruUjpMCzY2wdETo+kV0hnQpY2dHN3EyRWkUbSaK8i+9YWLaiVpEEkSRSHq1NMWHNwtJZUGWKKkapwDSRuSAkCCR2ysIhRttmEbYxCEkkbZwoxLEt5uaKnJuexXfAtnwsp4aUIBJNWT9ldFwtyyHlp5FSk/INn1c4AYIMMk5KptLCwsJxFneR9zx9klu2r+GuPdto9V3OViJuPTTFZw9N8v4LlvGpi7o5Ml/j/rEif7C62SDVyWEJYawU/4vjPY8f5yOXruOfX7STZt9lshLwiX19/M+9J/nUZev566s2c2y2xEOj0/zeGuMG49tmYV6VdVmb95kLYqaqS0puAlbnPDzL54vPDXDzznX84PcuS6R7zvKVw0N0tixHKcWtu1axMuvzpgeOAprNrVk+v2c9bWmX2VrEPX2T3HFgqIGENHk2X79hGz35FJHSPDw6z+t+fpxQmbmjEyOHusxcFMek0j6u5xpr3Nhoe1bKFYJKFT+TpqW5mXx7O1IItFREWuJnskZyrFgh8lJYHd1ky1V+eKXJ+t/yzAqWdS6nnI4pz5SIZqpYUpHJe/T0rCRcmOP4wBCz5RBbWI2OakO/MHJ1Go3ruziuzcMjZXrcHixXg1YE/lOA4Va9ZWsPVy5rpz3lMF4OueXxPrpzKd51UQ89OZ9yJHlgZJbP7B+iktSg977mEu7um2LP8iYu7Mjxgcf7+cHgLI+88kouv2c/o6WA1fkUT73mUvbc8yy+tuibnufOg2PcdNFK7jw4et7qHSrjuPa7ve3c8uQA55LS6ZeeG+Hp11zKFd0tPHOuxO9v6OJ7Q7OcKIYoBN88Ock7t3fzmvWd/O3BESRwerifoUGLfK6ZfFMLl6UvZzA4zZnaaTpaelnRkcP3NfPzBbT2iIIYuxYwNTfD/HyRweEhbE+zfEUOG8nQyDAp33jAOwaAOS/R++XYNB/bvILbymUmKgGXeOAtb8W3beZDk2Btacnytm09jes9LwwUdWmqxfuxEEaszKYYLQX8cnSGWy/fxPueOEZ3NsXbt63mfx0fxbcE7elk+0kCNa3hd1Z38ODIDDO1iBUZn9uv2sKJuRKDC4aK0JtL83fXbOcHZya5dV8frmXRmcwHkq8hlTxPIH15xufETMy5asyGljR/e+1G7j41xWjJImVl+c/+ed574VreuWMj3zw1yQVteV67oZtPPztKa6YNFcfsbt3N5ELAj09ZrM/2YuVA6hjPTpFKuZSKBeIoSvjzdQUHldwbAwaY67QRQmPZZty7rml6DWoBGogio7pgCYFuNsH1LRdvpjvrcNuBfp6YUPRkbDxhZANnKlUWymU2tWR444Y2E1haViNg0ppEOUSipUrsbetJnGKoUObRcdMQ9v4nBmhLubxzx0q+dWoSlgapIrm5GlblXKpRzHRNsrkty+171nH/0BTDpSoCQ/1YkfE5VSjT4Xt87NINLEQx/9E3ZpqeLNDCRqsIK3aJqeE7WXK5mKBWI6yFxFWLvLuKjTscSqpKNpdnYU4yNDhIKp1GRYowLuCk2ijLObZuWUv/0QkDXgzP88mLV/CJ5yYYLQf05FywsviWYD4MqUnF5uYsb9uyunFxSiQJglREUYiSJnGso+RKmns4VKjy6Ng8H750Ne97vJ9W3+GmnSv595OTCQpu7u90LeR7p43O6fufOMbAQkBPLk3etfFsi8lqwHwk2dyS5a1bDdDR6jtUYrVEkaC+si+ZcUlCM1IO+eX4LJ+6ZB1fPNBPZ3uGmy9cz48HJxdNhurrUxJ19mRTBFIxVTNNpTdftI67To3TX6iwMutyd984N+1Yy9u39nBX3xjrmzK8av0K55Ynj/9KcPmex4+OfOmqbasefsWVWy0BXz82PHH9yvaWmZrxDP76seGZZRnfvfP6HetaPNd5eGx67pnJ+YVC+KtmAb/J8d8GqvNhxI+HZrl11yre+/hphosR65vTpFwbz7IoBJJqpNnQ7PPG9YYnorVGln0Q4LiKTNYEpkobWQwA2wFCxWSs+eXYDJ+4dAPvfeoIzSmPd+9Yy7+eGlmsoPBC/mCaQCkmqwEbm7P8/dXbuevUGH2J1liz67A2n+N0KaTNt/nQJasohpJ7Tp0jUiBimB2Ksd083v5OWuwOVrrtXDQ1gq3thnn5jRMtTNZCejt87CBkLmojsGwKqZDt2XYmCgvMhDGlfI6u5pWkRqaJNjcxm3PxogB8F8/2yfkWTb7PPFArVVAh5Jtd5udtIqdAGoc2kealF6UJfY+gqIgyLiKdZmp6lurINMK1CbN5hGPR61nk083MuVXmazWarSwZWWNCSezYobowx3w1xlI5ClKyTGUp+lmKcY04G4GdRmmHlOchZUjNBemDrChKmRI528fyPMpUsEVIiysII4d5NGkpyOBSFRpchdYeqBpK5PAdhatBuIIgMqYHYJHNWTgCkCSuXhrfT9OaSpPL5ikUSkg0sQqQQlKNbaJYI1RIyk2kcjRIaWEJSRAuBn6lMOavjozzN8fncYWbWMLBofmA748WzxvL/1f/XFKi0Nz48CDAry3714+yVHz0mX4++rQptwvLoJtow89bmmnWX1OMFBd8+4n/8pyR0pwu1fAsEzS95ufPN5BskVARVIJCfmz/KAaxMRPhwZFZHrj7hXJzi3DgeDnk+vuexXEdXNczCKYA23VMoqikaSBS9ZKWIFagY03KT+OmBKVimSAMqQYBfiaNjkIKMzN46Sz5VBZXuNSKNaq6YnR4LYdyIGnu7gFMmXR8YAAdhHT19pDr6KKyTpOOXaySQLi20RiNNLYSICRRLTJdtdrwUi2M3aRG4dg2GoWKjV+2WJJY1BuBXnP/4UTwOoVjOXQ2OXzw2bP0FQJWpCy+evUa3ndxL7cfMo2cQgjeuHkZf/7oaY7NVUk7Fum0qRy5to1tWWxry1IIYs4UK0YgW1gcnS3Rk0+Rcy2KS9ZbnThv1ctdQhinNzcp627vyLF/pmJ0YQHHspAJ4iwEbG/PJki/IN/eRrVYZGpuivmCT2dHJ+vzF0BkMVcuMXx2jt61K8lmc9SqVSxf43ppHJEim8qwvKODTF6zsrMVFdsoEYOO0aqMlBopLCItGrqMb/j5c3SkPXKu0Xg8s1BhohKw/T9+SVvK5YKWLJVY8vs/3keb76KSTe91P3uWs5UAucSUoYbmuu8+ybHZIhqjX/rGBw+wLO2zLOMyVqrxip88w7lqiAL6C2VWZlP0NhmtzdMLZSpxTFfGZXnWRWrNB/ceZy4ICZQpO1dkzGt/9ixKa9Y3L1YRAymR2qwLt125lvV5h1v2GvWbtU0Z7rhqLe0pl9kg5r6Bab64/0xCqxKMV0L++METfPKyXj66azXnapKvHJvmZ2dLOK6DEuA7MUJXsd081aCMqlXRyqZil/E8l1TKM9WvMNEmtpLqjetg2zYZx8b1XDKZdEJvMg2sUsYUCgWkkmhME4xlWUvCfrj50T7+as9qvnzNdvKuw2gp4INPDvChJ/r56K7VfHHPOg5Ol/juwDSv39SFQBAnTjRSSoIgBK2T9cvcI7kEqX73YwN8fvda9r/WNITd3X+Ofzy8qMf++SvXsiqb4s0PnsCyYWNzis/tXkd72mW2FvOfZ2b47sCgmQtakHMsvnH9TnpyJmH++cg5Xn7/PkJlutNtYSPCGCvRLI1qmmwqR2d7C7XqAo7t0tzdSbkwzczMBNmMRVSbIwocWpq6QMQU588hpUClYnJ+Hqu2CEC855cn+PBl6/jGVT20eDZj5YD3PXWUDz59ik9eupE7rtzK89ML3Hv6LG/cuDIJMOv9ADEquXe6Tjmk7mpoLL7/8pE+brtqHc++bhehVHy7b4p/PDzWQMOXZUzV7V2PHuXjl23g3pfuojVlgI4vHOjnM8+c4l071/KxXRt57twC9/ZP8KbN3bT6NuOlX1V8emFVSAgQtuAdvzzMV6/bzo9uvIxQKv6jb4y/OzRMe8pBAF+6ajNbWrK86cGDCAFb27J88crNtKc8imHMXX3jfP7AGcP5FR6T1ZA3PXCAT122mc9csZmZasgXDvSH9w6c/ZXgci6I5FsfPjhU//mijqbURy7d2PPwmCHWauDW/X0Tt+7vmwDwLUscfcN1O/7+0ODZX7nA3+D4jZqp3vtoH7dc3MN9L91Cq+8yWgr40NNn+MjTg3zkklXctnsNB2fKfH9wltet78BxPGMao42kUWR0hc0C7iYZXcUnlgFC2PzF40e5ffdmDr7qOoMs9Y3x90cGG59/x56trMqmef2Dz4EQbGnNcfuVW2lPuni/3T/ObQfq3D1o8l2+ev0mVuV8IqV5aHSe1/78BLWk5KURLJTnsSMQBRc/5ePv3EDLcBUlQzCGIPgTGdbiscHN0R6VeGyiwpr2DC3VZuZmF1inAy7vzlIVHrOjM8izFTrbVxA4imzkMTqlEdmITbQwP7dAbXWW1oJg/bkKtajGeNrDaZdkzsU40xLPr9Le6TISCazlTVh2kZfQxjlqFNeloZCnzS3BkMXCuZDVF/rQ0oZ4bpy4WdLa7FNREWum21iwYVIU6KrErHPb+XZlgZa4yoVOFq0VZ1pgXYuLOCOZ9CVHkGxclsavVXhauKRcuCbnMjovOKpmaXLhxdYKxtFM2ApXlul0PMpVSSXVRV6GqLRFaGWxJHiWQ0ANy7aRRDh2Fik1tpsjk3bMmIigXKohI8NJdISPLYyTSsqyUcJCKwfHtfBcpjq1DgAAIABJREFUB8/10UR0Lsty8rAJ2DzXJWtncC03yWZeUEZcylVsHEv/npTjXsA7rJ/AaCCC7dqNgLG+cCxJoBfPfN7vfl3Z+QWvX/Kyhri0VMln6KQBRy+NR5f+9ddcz+J5DERpNWgOAoFrGQMDLRVaaGSkjGmHNC5oru0Q65BapYxuaSYOQ2QcEYU1iucmcf00URzh5zIsVMqUaimiaoSTdLQCtPR0UysXqM3myGab6M0tozxTZaYSUVQFpscmKVfKKMtGysi4CDk2AhswUmwN5Fcuyt8pFE5Suqw3Av3Jg88xVg4RCM5WFem0zRMzIZEy922orLhrYJ5X9rbgHJ9tPLu7T89zfCHG81NgC5xEM9NNpcni0pnPUooVmVQatMbRglIiQpl1rIZCA4DjGs3jh0bneef2lTw/XSESgg9cbFyHmnwX27b5xXiBD1+8iu8NznBspszrN3SwMusnXH/zwNyUh59qw0m5lAoLjE+P0xK10tu6kebSNCf3DVIRNTas6aa9KUcsK2S1R4gg0DEVHUIIYU2BpbFEnFSzTUe3khqposZ3DxGMlSNcy4AmkbJwLUVVSs5WzgdSJpb8fGzu19tEPvLKPVxy9yMMLFSJ0CChFFV+7WuB80TC/yvR8KXSbsOlaoMbWx/1L5xlH957ht784vPZOzHHN0/MYVs2jucRx2FinpwYxgjBk+MFXvbDI/ipdMLtWyzPy0gyVQ6xPR8dReigZsxdLI3UMWEoEZZGODYtLa34fqqRlJBQq+JYEgWSuWqFOA7RSCM/l9isWo5LFMX4fipZYxT1KyhFkq8cHuQrhwc5NSuTryV4emKBu/qmEMLGsg0l6h9PTDfkvvZ893AikZU0bKEbAarhgxuznHOVgLc+dOJXOIX140NPLvqNy1jwwPAsD43Oo6TEsmwc16E3AbA9VzAjJTfcv8+4VqGM4o0jaEl5xNJBads4IHoeOgxxbBvXg2XL29Eqg1YOFg6WPc+yriZWrWojn0tz97cfYnoqprOrBWMC5P1v7t48WNPsru/7nOVZ3u3uve89+6LRgjZkYQSUSDlxgokDBlMUBicoZZK4DHbZZnHFQaYgpmwBDtgVB5SYXWDATlFIIghJSIhBaDSLRtPd093Ty+273/vuz3K2/HGe9723NRIekrhQ5Uz1TM/73vsuz3PO7/zO7/ddKCYFp09e4lu+5Rv4n373D4CorPKDf3iNH/xkXJtCCCZuyNPbQ/6Pq3cOrbUD/PizNwCJEJ63/eofNwgWMccWz6BhUYXEo5Rmv7Z890euziffLFYJIg9nJg9VWfhHT1/hHz19BePvd1P8yedv3zd//+kzh9f49b/yB4fwk2ZbmhHcUhnnbOI9wxp+6OkrrE8ME1ORSsnpTkPsFIK/+8mrPLKYMetGf+jOPh+684cAPLmcc3VYYRtS58hE44dPbfb5un/3Rzyw0GK/MmxNq1fbMAIPLXbSwrmwPi7Nk6u9/H/5i6+7+Fs3Nvee3xuVAMtZos518+T5vVF5op3pH/3KR8+MjXP/64u3d7/4LPvTx2tKVK3U/MizG/zIcxvxhNHM6M8e1Hzg5mG7nRD4iecb3T4vece//RMCoNUCITi8B9f8rikVtmhTCcPEGr7zo8/GjcnH1znq0/73PvXSHG8mEHz47i5v+MDHgcOkIfqgxyzh7qTia37jGZqja3PT5SEwWgQm4ylKCZSrqUZDRp9J2V9JmWweBuPfuvcSl04pfvm5wKO2Zn8l5V0nl3jm9gG72vF4q8Wdl/Y4MAM6TyiyBxbY/vDLjPyEYsHx0MICbd/hyv6Yvujj1hV2v+Smh1dsyqWsJG2d4tatfYrzgdWu4sU/2eWK05zOX+Fdq13+5IbFPlQxubjMiWuOjbtDJqXlU5Mx7+h0Of0yFGmH9lcusLK5w8MTzTPPDFnqtXnTuTY9K9ibOJ5qtXnn8TWuXB0RXOD1+RLhrucPbuzztsee5K/m+xyYhKrynM8CJ5Zyzk8LXhx6Hr50nLPVmEfcCtcmBZ9Id3lidZWTE8GNyx123JCvKlfwOXxiv2QcojVrKhwnVlssLx3D1o5iOqWsCkwpkCJFJCmlKcg7Kaa2WBPIs5xUg5SBJO1RlTGge++ZTgukTNhcP9z4pElASmyINTghYsVKCNFUJ5q2t5/5a88IPrFCWhsfq7zEoPQvPr8zB6H7BuaiEjXHVh7dFg/JPjNUJXOG+xf0zIDAj33mRgw4R3bWZvbOgC0EaMhm4X6c3Px0/8VRl0ffa1YFiEYBUZPUN4c0TyTTCCXAgxIKnSQYU1NMi1lPkHIyxZgKoRIyKcm6bbzweOGRRtBu5Ug8TgqcT3DucN20V45HTK6DzfW7BCFIk4TR+jrT/Yqhr6mCQfqoyYwPuKqORD0iZCY0eD+dpGg9I/IBIoasGXb+zjhqKKZa085z0Jq3rrX5nseO8cBCeh/JJNVyvqlslfH/1Vxi7vA6ew/TEEkSSicRiGAs3SRuNuPazaWK4nWO//OPP32b73/TOf79f/YEUgj+9UvbfN1pz16jrvFrN/ZZzRLe947LLGeaD9054OP3BvRrO68UJ2mGlmC8wwpBNZ7Q7+9j64rja8do1VOufOQKdy7d4PiJY0zNmLMXjjPeM4wN7JmCA1/grECoiEsjREMNqSJPYFZ5hFhIum9C/YfPVn+msZanvO+dj/OuM2tUzvPzV+/yPz599Uu+zXc/cZ73PHGBtTx2BJ7ZHfLeT1/lmZ3h/Ge+7uwa/+Ttj3Cx1+bmcMoPfOolfm99b/68tYYvPLxFAXhHqG2U+SM0LndRgkhphZSySSKJEoIhrsXgHdarWJF2FVIpQoO3ly7GD+8iKbAqK2xtopORsVFofv5t/XxeIxptbJpuUROTCEfiyHwcBpQkyRBi5jbVCPwH33y2MIfYzk/LR2wyY2LabPmCSFoUh5XI2a/Mt0xxJAw1/wrBo2SsDltjGg7F4Wc9cWK12WtVPFKKQAhy3rnSiWY8nrJ1b52lpdOUvqKuKp584vV821/7Hm7euE07WabTUeAMxlmyrAc4fvkXPopSNdNyiBCe5ZUlNrY3WD1+gkcffdP8MwjRVECbCm7AI7WYFx1mLfr7WuyBhi9xv0rHDKM6O3TMk537MMWze3T/8pkR6OK1/0I90hlG/9VRfWbIcrTlf2fcEIe7GS0tMD5wb2rZmBhOtDSqrSmdZ1A7lrMjn/lPG0e2s1uj2JF6YLGNDzRJ6mF191w3T1MpxfUGgvPUaq/1I1/56PmlNNH92trfuLGx94OfujIvwy+lWr7/697wwKlOnlrvw8fu7Q/+0//zj65MrQv8PxivTUe1IXMcAt1nySHwhSHn6M138YJJlRN83UzqxprTKVylMNow81o4WjiS4ggQneZGhnmdCTEDqs+GmM+3GHw8zQYUEfZC+FkdDCFiuyG0LSmaoEGUnmf7KUtPLM9f8tvf+w2cPHWGjXsD2rLHyukV9qt7DD9/i6X0FHnaYuvGOotpQu/hHuVAMT01QqoDTp1qU9qCwX6GdJ6V3kNkZ5e5ffU21cCy1vXkGVy7OyK5dJLjX3WM/UnNzbCP3R9y+o3HePqW5dneBm94rEdNh5fSLe7YMbsHlu5XLFE8ucQnXriCXL7I2gXB+lTRf3bEPhPe8qDm9iuvMHAd1vcLHj+2xm/d3eET2wUPn2zT/vyQsjIMigP6L7/EG1da/Jvrd7ngMi4tt+mvt/mjqqAeT3lcn6JT1PxScZ3O1POfXFiiYz139gUPP3CBtzAhv77NK3ubvOEtj+H+wuv4zLW7vPKZLVpty7QcM50e0E57LCy2sFaAVzhvyXRC9F/1BFFjRI1Os4ihlaCTZsmKBKU8IVRAPteb/P6NR7l47BFcUNQYFLDYatFuJ4yNpzIOLGQiKlXoVFEbi3GOwmUcDEuG5SievI3nnjes/41jAPyL6bt54uuf5B1f/3qe/XfPs3vnAGcD47IkmJqyKCgMlLVlMNin3x8iXEAEcEqj8jYHgwFlaZEonpnAWy9alIRfemfsmnzvi4/T6+Y8cOE8S2urXL99l/VXblO5OpojEOjv7zEaDDB13eTLfs6Sn/OTGriA847TJ8+wtLzEnfUNiqpCaIWvanyosbZurqdo2lk+GlUQgzohspJNXeOMo91tU5UVk8GQoHWsusqE/sEIoT2tdpuFbo8kBYjf6dSxVSYSzHSKyBJ2DnZY7C5y6uIaG+km5brGuxTRishZ46I8UczVBLJxI/Pe4WyN1BrdtNdn7NIZdv5sN+fmyNJutwjRVJyffsc5fuy5LT5ws08dAt/x4Arf9fBqY4s4KzSL6EDjouHErHKitUJKz0v9koVUcbadsDGp8c7z+GqbO6My+rsf2QVUoyE6dfAPP/VKfJ1E8+BCyg++6Sx/uDmcE+J+5nMb/MsX42Fei8AffMNTvO+5u3FTkgLjPHVtQEhanS5ZmmGHY8rRmPV7jpPHT/Bg+gCv3Pg8n99eR6QZewcHuKmllSyws5+TLXqcSfFB4EKNCNHmUwrw3N/F+2JEiaMx9bUmrlqIuS7t0a3oX3/NU4yN5fFfjFCCX/9Lb+agMrzv2Ztf9HU+fHuHX3t5g/3KkEjBe564wC+8+008/ou/D8DFXouff/cb+dsff4HfvLHJX7l0kp9/9xt5+699Yl5pjUnB/XbIUor5XD90V2feIXE+4L1rSH6NUkUQRBeWgDd1U4NVIANZkuHqCt/sXTEJDJRl0Rwqm0RFNPtQk7SEIKKBCVENRgTR6AN7lBAs9Hq0Wy10ori5/9zsIx5+D53F+2hKnIkJ+eH7zUw9DhPdWfEmmu9EXOz7nrvH2B1WjQ8TuMM99Oit92GWNsX9NybqUSFiRsybjSztEeayGrMNXUZcqg+4EHkqvaVleostsrULXFpZoXPsMv/bL/wRV6/cpL/f5xv+8l/g0sU1qIaMx3c42O8zGExodVOKakoQisqW2GAweD7z2SNOcQ3pbJZsH8lIZzecwyLWjKAp5ocTCHNlDELgJ1/YYGxcA086qjxyOI9mlfmjC+q+w8ZRj2EhkGGm8jDLSo7Ml1nif/TGK8GPPXOdZ/bHjOpDNYKphQPj0E393QbNTmnnSerVQTV//cNP/QWHIAE2BNajvA6FC/e/N3BnXN6HSfj1G5uDX7+x+TxfYtwcFeZNv/rxz32p5/+s47VVVOcn1MMLGG/R0YSVw+fvOyXEU3y8A1FU/MdnzElEc3Pj78Z5LRqyh0eKiE8D5qV4ANGoKYsj7z17PyEESmukk5F5PJ8QTbACCJ5OW4DQzRpSYAUD3aM8fx4mMeANw2XG+wkiP8ndzSEbL97m1vUXGO7uka7uIJIUW01pZzniOiiXkHZb4KbooaH2lqVuCysrVF+R1gU6LMFCSr0o2NM97EMFqh3Y2qrwVnPuzRfJA7h2F3Xc8+a31ZA5urqFWREce2NJYUpUpmjnqyx/81OMBgWuTFl+8HHap0eccY61E6uMtvqI0YSONLRczqAc84i0XF47we5miSm2OasFx9eOcXNquPzgcbpBoBeP0e8P2dqb8MipLlM8n3llk0HvNJ1ui0JWXB1XlF3DwcEdpjsj3LikLDXHjy9Q3nqOx04e49wbjzOZ7DEsj5F3LrG/n7A32iDvKHxQSDOTK0mwaTavbtUmJnZKR71ZgkDKFnVlaLUWMfZwzbzusSexY8HBaApptL8srUOJFlZYRLAIPEEqEhlwLiY+iW6xvVdRlBOOtTWZDKh2QndhkZlSVmv5NOff8CRXPvYKo50CJTUyjQxMGTw+xJ+9dvVlQlXx+MMPsrS8iJAwGff5408/T1sqzl04Rt5uYcuSTApOnDjBLKl7+5teR5Jr8izDOtAh0Om2SF1ONK7zdHq9hkUfyRl+Vg0Kzcba4B1DCI2kCkgluXTxHLYyTKuCsiywxmCtwRqLaaxZvasxdhZ8BTMHq9IapuMxWimcMfT39inLmsVjxwghsHxshYXlFQgSgUQlh3jBpNfGDvYZTguSTCOsZToY4no5K2dXEdqzuTGlLG0TW5hLwkBMwGPgls3GKaOEG9FNCw6JQH/n9ZfZctvcHtWcbmlSqUilYGQ8FsEjSznf9sDKPEbMxgxyEatRfr6RzPatO+OST2yO+PtvPM0P/OEtOu2E97zuDL90ZQsvuC9RzVo5zlhOZYrCWLaKmku9lB9720V+9eUdXu5HNnAvUZxop1wflSynin/whjMMjeNXr21HvGqDUQzEilun1SZJU/zqCuXBAQfb+2zubHBsbZmL7Ue5a6+z196kXxZIm2JCYH1/nbZykYgmUhKVxuTIS7RKI25chDnZaHuU8nfedJa/fGmJpTTh3sTwE89dYy1P+asPnr7PmeoHn74SyR4Env2mv8jPX1vnq444U11rrBiDhESlPNxZ5mvOrvHOX/kMwvXoTwX/6rkt/vs3XOBnnzu0cRVaIBOBKS17YwlkLAhIRJQkOt3JOZWuMjaOv/HwWV7YnfA71wseX+1xdTDhhb0J3/X4g/zkCxtYa3iQR/jIN65wdHzXp06SdduIymMxbNy9w+hghJRJlF1q4DEKQRARFiMDuBAft8E1WD4N3tDKUgpvMJWLrk0hJrozYXznXUxCZ0WT+f4YsaKhed24N4aIG5cS4yztTgvr7Pww/tXv3+Wj3xiJE9/0sdMkytPfvs1of4DWCdYHhBJICa12N1aCnWs6lALhHSE40nyBE6fP8PlK8tTaJttb27S6nYboJWJS27SwZ4dGIWNyLjURW+4C5y5eoNXtUAyn7G7c5sKDl/mpp6K014e/7jvwOsEGQ64D00rRyiW31+/xu//Xc4SQ0upYXrryItNyii0Euxt7/P7O77E/nKATT/CGzc1omWpKQ6DklfVbLK6sURQF42mFEi3Gk4pEtfHVAZ/+488eXq+f20WrDK8cwQv6x+8guzXeCIIX1FXRHO4l3lmEgqwlqfs9To3Oo3STLHrJ8vEz3JTHeevlfZInUoSwZIlqiiigXU7SVowHY5755atYp/jYd/YA+NpfGvF73xr//rc4Q1lJECX4QFso9jYP2Pn4Lqb2HD9xAvdkRrvT4uBjdxgeDGi1u1T5MwC84jJ+6uUD8nSZ450sJtYumlAkSrFG7Chss9yISxw1SYpE2lmhMQAf/cYn+drffomXdwaMQ4lQnke6sUv1/KCEpoNnbf2abU7/Y44/NVGd3fiv/cWiOWlKwCOCxxHZUULI+Zc6PBF4hBakOqMsA1/55tczGOxx794WkyLwu69oHuxcRCwKrq9u0fab1NU2SrdIZY7zktqMSXSbqqrwDVs5toDjZuK9jz7wTVtYEFnMabvL8onLnN2+TJYqrPVUdRRhN8bEJEQGyqJEKEGiLK0sp5gMuDe2bH50G94cLVR/6f0fxMmaTp6zP90iQ+Iqw96wpD3q00pXSFNFP4zIaonPNXqoSXE4N8ELzRa7BJ1TFUN0npDVmsrXyCyl1xZMDRRVybF2DyMTJuMCrxxJiI5UmsDUlizpHpMMVnuLlHVN2Z+gdEqaSUxlMC9EnB+ZxxvNYvsOqZRMbYJuC3JhkO0uk/EOu9UBWdKB9nG2iwm39guEl+TiFNu5Z31Sstg7SbpguKUdWZ6j8pOcW1xkcSlhQ1vKoWNhqUViUzqhxIg2rdoSzndID4Y8dvEBeq9fotvrsLy0xJWXbvE7v/3bPPLwKsZP0VrihYptNVtDiPhJiWRhocd0MqRfCjq9FB9KnDconUBI8d7AtbjRrSwvMainmMpQ14LgDFqU9IsKGRQtDUp6igLyLCY/jsDGxhaj3T4XLy1z/PQxgkiwXmCDgMb6+MJXP0p3ZZHrOy8yKQ2q0RrUCJyS2ABawOlL5yiEp0YxqhLavYy8JxA4Ll6+zJnz58lSjQoeGQRJmsx4RyytLMRqlxTU1tFaWqKXSOzUYG3TBZCAd02baFbXOIqNmmVNolkncZNSUiBjiIr/OBeDl/dUxkYvee+wxlI7G3GrBKz3BOtIdbT7DXmHXm+B4E20vfWQpRmyDEjhEMpRHlTQ4NRuf/ZFJv0+tqoQWUo7z2h1NMF56sJw8tQpLl44h1YJxhj2D/bptLsgAv2DA8p2yU6xyfbBFmrUgnGGqWu0zkhSMY9aP/qZl/mbj53n5955gcVUsT41/MDTd/mBp2/z9153ih9+00me25vyWzf3+eYHVjC28V8nhoHYbhVIJedn7KiHGbVcv+9Tt/nht5zl9//Kk9TO82tXt/lXz9+LB4Ug+N43XOZkK+OvfeQ2hMADxzJ++C0PRdehyvKB63v8s8/ejQL1PtDWkp/+qsuc7aQRO7/e55s/+CKlO4SlaCEIWuOdoCw91kYzh2RpmWWlONjcZmt/n8XFBZbkebp+Fdvd597dHbyyTNZH1ANL1s0QQqNUTsChpMAYj/fR/W42/ud3XuJE+9CZCtflzIKlpRXv+dhzXOlPudjNo4vP66Mz1Wy+fcfDZ/jW332G5/dH5FLy8OKhO5AEHl9pM6gst8dmrjTw4kHB+YaQNjGNKFqImf9MVuqtJ3r87Nc/Qi+Nv/Mzz91j3JDXHl9p89zuoTc5wOf2Cx5bbke9aqlQ8tXbmko6tHrHsUmFCjU62cQHi5CANQgULshmr4lVTymIz4eI70aASCTd7hIhCDoLC7SM5WD/AKEiPCj4iKWe7YeCKLMXYRdRiitJkjkz34dAlqZMx1OKsiDNc7xSFJPD75hlhwoGo82X8Q6sq0nTVjwUIFFJQqfdJc07EAI2RGe7enKAUhllVeKFJ0lTtIxJMniCCwgfE1QXHFutPZSKEnNaa6S3HB+fpJhGbKzz8XBprcM6w8raCqfOngZionpz8w6IDFMFJpM+hU1IRM1wvM3m9nWSZIFkAIP9MSIRTCa7kRciBU5I0q5CuJT1OxuEomQ0Ekhd0R9O0KobTSukpqoKummbuq5QVjPe7kOj0b7T3aSaWmpTk7Q0xxYXqeqYrNemisYKzhNwCJVivUMjGZg9hmbKQ/4SAYkIgkG/z+52wnPTK5y4fQbj6kaxIUOmAik8Wa5oJTU30mucm14EYnI6S1IBfpr16Lx8dFwALswUjBv7Y4bwX6ZA7Op99W/EZy+qGbSqBqZN2/lV0zyOpvgnVZRPs9YhhUSqQ/jDT724w2jOheALKqiHBccvl/HaWv+ujidF75tNL+pZzrgrQYIMARcCMgi0kNjgcc5AUMQZHsAHTDUhWIEnwwnPcqGZrtVoqTHeEVyN9xohNSEEVNJUrxq2opQSpMBWFhkkSZJgTR0rEkEgpWZ1e5myrNBJhlApOhVoFRDCMhoMyNKEycRw6tRxelkbi6NwJVIEevqwldFeknS6a7h+RdXp0G5L9jZ2WegGTj6WcWIhRcmcwZ6jMAm2riLxxKSk7Yy6HmGmrhHcHuFTTW1j5akY7iDtMlWwyMrTZ4oSgmpSI6Qml5C2FuhPtkHAQBcIJ7m9fx1cSpIktBX0D2rqskLlinriyXUHJcasD0ta3TaubFNvlSTJmOAynK1QaTu2Z9yEENpkLYH1knpasry6SkcH9stNSKAlc6wUjPeniEQg2pC6BGqDUwKjElIfsB5aOsGLiklp+a3saVSSsrrc49Txy+ysH3Di3ISltWWCPw7BYJxH6YQQBNa5BvocnatUp0cvmYKYItEo2UP4QJLmeNcGYqJaTT3dXpsTxxapnEf52DI8KAzdlkYlkhAUneBRKkVIqKaG5Xbg8hvOolpdDoqc2Hx2c7gKwO5Bn5s3X+GVWxuM18e0ex2kTppkJtb6pY/OWQvLxxjs7TIpDW23SruV8rq3vBOlFEHmmKAwIiAVmCMBYBokSfDsbR9QVI7t9Q0O9gbkqy3StIWzLlZzmpCiZo1LAQLZtHQF80ynwcB47yJ8onl8JvskRJR+EmmGFrFBmgItDvFtQQhkiHHQEoW+E6Gija6YYahm7ULfbNCHUfP9bz9K7CybP4MvHWDOf7EHW8DFL/rjs+BdWD8nmRwd22P4lo31+x77zes3WAVw8Nc/GPH8q83l4sh/TyRTgmk2hQDvffr+z312afa3wD/7bCRA1FVk2X7wlYoPvrLTZDeyScJmnaXAnVHFu37z+Wgy0cAuvLdNGTdWi8+fu4CzMZ4VZUllTLRqBHyWkrS7jPb67FW7tNod2q7LabXKiWOnmXRHPPXkGUIYM61Fo+zRxdmAtR4TLLUxyCThHi+ylCb855dXefdvPE/dmJzcGpWgLOuTkiuDiEe7OSr42St3+eYHTkFDWgX436+u8/z+CIGgPJL9xoZ5oJ1KRrW7T9ViRkLrJoqJDYgQQIFIgSrO1ae3Rrzu3/wJvUzyXz10jI3JYQelkyjGzWvOxqC2PDyv6AvSNOcLx2Bnm0kxxQePllCWkzjZfSDI6CSYCkUQgeAdWkBANO35I9+sSaoTnWPqgmkxRWiFUhqdaLRSpEkaOwQyQSqBMaapbgl0kiBVrJw7Hwlv3gaEVAQhUVmGF4JwRN3CmJpZFlZVJXnWJetq9rI9KmMwoULnGaMspyqm1FXUSJUusDZZQwkR92xnKKuSRFqsqUlUSrsrOVjaJYRAnudcWlql22kjZCBNNanKsJOaKy9dQ97sIUULU0raHcVkPObkyVWq0yYuceDa1XXqchoJZE6jlEUoQakLxLGU3dEubjwmtCK7PiQTdCJBRflCJxzOKMrak7YUmStpt5YYDPcZT/cYjQZMxkPa7Q55ltPvb3P8/BlOnT8J1+Lp/8VvevJV9///3dgjJo6zeeiBGenvCFHwW9/y//H7/tkslZ/Zy/knzy2Qioy69rz9LV9B0hVkCq6+cJfdcYnKHdoJni8Eb73guOuv82J2AyFrPvr1kUX+wG++AEphnKe/ffs1u0f9xxyvKVGNrYkQxcGPMKvFjFFMfEgRW/GemLB6H/XglFa44MAFZPBIp7CiRoRAbqBA4YRB0iI+lcROAAAgAElEQVQEiTGGJAUbLEoprHXNfw9JANZ78lQhFVSFRUmFSiWmKqmHU4SsGZQalWmSNCNRPZI0I9MaX9cIAv39ISPVpzIW7wK9Xg+tD0+vSRCE0jMoJozNlKKakK5UPPb4aR5+4jzBgpIaJU5S+4CtK6SSDSHM0MoXoi6rE6TZOWpTUUwqEJKy6BKEwjqDJFY50iRF+Eg8GQ9GTIoxa7KLNRHz1m4nTOvYVmq1ctwkoTaaPFukMBNoTszOSKxr0WqJyKgPOXUJpg4kus1kXGCdw5QC70paSQeVZPRdicBQOaisIREtCiyTYsJkPGJleRFZiSjhk2hKPKYu6Y/HICwma1FOhxRlyUJvEes9g23Btc89i1KeY2cfZjJtY82YLMtQSEJjH5q3chDxfa0zJEnCUneRaTHGO0+WJTgrcc6j0sNNcXvgSDNFe2kBO5iwcW8IUiN04KB/wMrKAmsnV+M8FSqSp2Sg111kNIXxQUVthxA83kbMJ00ycvC5z9B/scVkH6xzHOwekCTRSjRLFEHEx3GCRECv3ab2Fl8cMJ0meJWAtIyLfZTKSNME6+rm8BbHYGQZHuxx+8491vIMe/sq5d27DNMcffIk7ePHIqrQOWJdJ0o3CRof7aZlN4O3zLBOzjU4ytl6bWw7pRBIreYbvZx1Qmbr/EjiJpEEEbF98YSu4yeYJb8cyuiEYHmpWOLRVv+1hZT/n4zCQBojI0ECSGZKC7MRZsbv942AmOlJStHYqjqKoiZpZahEs9TrIrynqB21s2TdJZaOn2K0ucXWnbvIVBOEpsUaF5YepV8POG8fYvlMzvHXrVIZC45YYbMOqRrdThH49K9/eE5IuzkoOdPo3c/wcdEm+PFX2wQfqbwcOlPdD2iLh53A0Bp6qUJJ1dT0aZyqYGLcfXM2KCJRL8j5aw4rx8+9sMnz3/5mrh1MeblfMjGObqruw/8tZpqxdZGZ7Y64KR0Ze7v3CDs0CXOsDCIERTCsqw1ANFhjT0rKaXuCBhsSkUCNasZMX3Nhoc3+fjHfn5IkQSpFRc3dbLuxqpV466lcJAwByAq8t2RakSSayWjE6fI0SsRK8MHOAcEGulk0hwG4Gq4AMQlaz9e5cP4CRYj3M0WQeI2SCqkVqt0i9RZEtBYXNmV/Z8DCbgdtA74uqaXAJ5pjX3sZnTt6reOkKkGpKKUllG7mcSQp6hOOdz10gQ/+6ofI1gN72/copkM6Sx1OvPsSUz+dJ6q616O1vESQcW/UMsoR1t5yd7hDGE8gEcjcUxSOoMQMORqNZ7zCiwlOa1bPXiYcDFlc6PLSlRfBWLoLPUbjMbb0yGVJmnborR5DZAvc6Bzj8mTnVff+z3vc6BzjN8+9vSGLCqSMncTgZMSMN+Eh/pEgJd5FI53vu/rvAfj+nTeTmQ6Lx3uUo5JJf4/JyDEe7bB3sIexBc6PcGZACIKiDrxya43l8+dYlCXXX3qGQWWRKtrwigBJptle3SURCuyRVLCBDagvn4Lqa01UGy9iIUAeNhrn+NBmsxQNs36+1ymJrf3Rl4lYHiCEmBR47zHWkXU1zkT3HKlo8DWBqqpIEj2P+1JKqqqet5K8CygRyQJCCrx1TUszYL1BWokpSxQJMldknQUmgwOUDBhTU9XxrkkJ1llOnj2U2Tlx2mL8gN4xRZALyLTD4kqLC5fO4IPDNRUqGwJ5u0Wr0yMIF0VjQ6z+LWtJohOUhrKecqqVxvaq9VS2IEkSlNSkOqWuDWmiCdZR14vkWQvvPXVdY32IVokhgHSkWUI1MTHh04rRuI9O4oaQJm1CENTFFCEEWZrHAB0E1trGnjNQl5GNLKTAWUeWnqEuK1KVUBRTypHEWIcxKdb2omOHiJaNxaSgKgwheKztQFD4YCiLBeqiR7fbxjpLRFso9va3ePHKFXaGLd72jjeRtBLGZYkX0G632Rv30YkmJAHZUlShprQ1Ia1RKotGAyIg/P3yOv39LQQpSM24mDCtA0IGtNMRpnHjDlObYJ2aJ3CxTecop9N4PYJDBIcUTVW1SVSL0RaiWsSaDsbGe3P81DIKxbioAU2ahmgiIAIL2QpSxsOYqyOg3/iK2sF4UrC3s48xsf080+rdWN9hONyFEOjfvYkf3aXVnpCFCeNbY3b391g4fxahdMQaiqbKI9y8ojo7KM5x2rOqaiMcHu1xoZxOkEqSplmDmztShZ0tUXF/UjVLjENj/ShlPEhWZUXeapGkKTMnnX955wxKnYs+9jZWCiObWjWxQTXr2s/JXLPKsJBNtWoWb2KoiaTK0CB2pKK71gM+eV902rVNi835Ji6FecIVQpQNCgKCdXOY0syA5Cj54aeevcfL+5KxSSMLXwqsNQQbsdRByPl1ngc0QkzXw6HmIsLNW8Vhdk2DOMxdZ2zgOalDx0gsBN3SIq0gSAMywhI6QBYk06qmNCXSBrSH0f4+Pm/R1ymJFXQ6HfqvTCn2HMN1gc4SfPAsn1vg8lsvYYylNIbQlAhnhLSLCxmGGVZYooXnvW9/lH/8Jy/ziy/fo7CO73rkLN/zxIX7rvuXImIJIUDC5w7GLGaa0x3N+sQghOCJ1Q53RhWjmYe8OKxwRIvOgELOSBBIopPbhV7Gy/2CF/cnvOPUIkcz5idW2nxycxQ3enm/Tejhh41wjpKS3WyXJEsJweGdRYl6flu891hRsdVUz4rak4WMU/YUNHM2y3N29nap6xKNAO+YVH0mKyNanRwtipik+oAJlnwpZbWzSN7KCT6gpKDb7ZDlGYPBlOLOEHvdoJJoVZtnKbU9jHFOH/49ZJ6N4Sa1nV0rEH52QI3QBU+Uy4oufmMKX7Ad7iIrxc2t6wB44cg/18bbOMfUDFgkJSrJSHSsCiedqMrRzTOS1TZV0sdnE+qkRXbqIrd2NhrMfRzDcU3wHp1GPKdQCVIGhJIIkeG8pygrTDXBh4hNN00BStAoMCjBtKyY1lAZyWhcsrffxxhD6SMULdeK6XTMwsIKSqcURc2vHH9zlMRqFlpdWxZXO3zk3/4yL738LKlOcHU8ODR9JVyIBi02aP7m3/pe8u4yVVE2c1hG1FWjBCBnRgqhUWaZ4cmPrAc5h2I1hDwVYSC4eL/knLQlCcKj5OE6iu5lHkHcw44O268YTSaUxtNZXGHtbItzaUZVnaU2Bu9qXF0TjKMsppS1i9yPQZ+N6Q7HTnU4lbUIxjUGEB5H4F6yhavsfesp4OfW7V8u4zUlqonW8wqBgLkkxtFKTPxOPoLEmyjjnUWEwyQzbiA+Wo7OE8+4aWitKafR+k2I2C5xjjkTMUpgxGQ4SRKEbd5LRMmNuqlmZo0LC0JCYxMXgmPQ36e3vECrt8y0rKCaIJRDuJhkSCExlad1exmWYqXgqTefJQiB8RUoCCImeM4ZQNFuZXjvqeoKVxeIRGNdja1jAlnUBcYavARf2djqsY5xMcUYg3aaWnryXGKwaK2ZTOtIohAKa6K8ktYKV1cUkwkqyeMkq2qmtmBYTsjSFOscwUT8jPUS50JzeIC6ihabeZZSUyKSeN1lK48Lz3kyKZAikIeEtZVliumUonR4UeODiRqoqgVBk+UpdT1FC4WpDVIK0jTF1PHE6GqHEBKtFJNpQZKkFMUJPFVshSeBae1RaU5wFutcTLiVnOONpRTIdkLwbQQ5oSFFIRzuKJMlWMAjfYLwBUonuEZIW0lNXZUU0wE2RHJdBNGDMzXONRqTwUUSi/eN7mCzOJY6tPttdkoXGfhJhlaKl154iTu3N0FHlywps5hY4cmSjCxPSVo9sjRDaU+aZ+StFu22ZzwawxHm9WSySzk5IJOCUX+dYMZo4dEZpJWh2N9jnKa0T0Yhbx8CXkbMY0TbxjFvrzarNCblkT0eGp9uQnSnKQtHolOEaoJRQ2aM/xZHIEpi/lzwPsqANZa+iQBXjQlWIVRToRUSY5lHXiHA1TZ+25keYVPhDSE07dFIlgshVnBj0ybKRimlCXIGehAYU3OwUbyqHfbej5xE5Dm+qrDWEaxDCY1upSR5gkrzuCnaOsoJIbANWYba4QS0L54mWVvi208PGN/aIhQB4wrGu+v4siSxIWJqOx1skkaCSkNCEVpBplFaz65gfI6AyrJ4jV3ECEc1A4838V5E8pqLSGIhOLh2BakSdJLGBL8hx/lUonRKVlX4/R3WnGVtaY3W0gKJ1LhJRWVgc1whpUbmCbqVxEPKC5IrT19DKsiPKVYejeXTfm34nbsHvPcrL/LPn7vG5rTibDvlXDchkZKBMZTO8/Bih//60XOH8+zIpnakFn/fCD7wymDKx9b7fP9bzvEPPnmLhVTxnidP8AtXtpllojKR6E5KVRUYZfjOR8/w4bv7bE5rljPNP3zjJUrn+fSgj8sMH7izxXueOs1furzAy8Mh7zqzyutWOnzfH96KZELrCIuWCGg5HJ93VxACWp0WJy+cIG/lmNrEuXfku8hG2k42a8PYqJowujui1+8RgqQ/OJgnJJUxTH3NfrfPhYsXcd6RiZW47prYIoREKk2QsfpaEahKH+25dZuwbLiRfA5lJeenl7i1PmGru08EMcLmtx3O92v/xev58xvHmz+zMQZehq1DOEiEOEjKhiQulEcGhxeGRGmyJMWnHmk1XtJAvkRTzW2R5hopEsbjkrvrm9SVZ0hNq9VhdHBA7WzUXs4SalMRRMZgMMD4iDnXWjX421iUMa6kLCoI0Q3MYRAiHhaq2sa8Ik2QCMajMT60MHUdEzWhY7ItZx2kw5mvlEIrgXfxvWY6tcx7B03RoNFznq0Uh2MW/4UQmLo5TMswhwNFJRd3n6TVsJxgrEFMBaPJkH6a0u72cNhYdPAOvCA4ycLiKqmw1DbgTYnQXU6vPIqrFZaoBBLP1hWsX4PgG1Lf4ZDwBfogf77jNSWqARCN/M0MmxZCQM1qw00mGoG6hyx97wKJliipm4SRiOVRGoNDNYz8mARpQohAbe9ju1FKTQgeY2ZgfElVVbE8Hg6rrhCrLQQf379pySZCYTVxw3aBab9P2lqku7jIeHPUtOgidkgpja0dqsyZ4U8SsYB1jpbqEAi4xjknuoo4qklMFBKdYSuD9OCtRJGShhbFqKCjOwij8Qh0mkbnEt+hs9ihtjXWhehiozRVXdFKF3AmBlrrLHmaNz7tJVLWZEmKUVHLDhc3R4EizVPqusRZSyLSpmLVXBclsdZgSkuvtRhZqc6AjJVoYwxJksXDQlmys+HxPkOmFSFEjJu3PibM1lJPDInu4qgIrkWQmqLwJLpNCAZvRygVqIwlT3KClbTUAsHXKASmX+O9IfhILChtjZYK4y1VVdLO2vGg0lTJa1vhXEWWLGNMYFoc4ga9cdxZ3wASrPOU1TTCKbQg1AaloH+whxcqtrkbgergoz+78D4KgAeoKnsfEWP9hU3S3XVa5y/HKqH37O7s0j/YIs1KlErJ2zmJVlhTUk6nlOWE2gj8YAvhFSqJ9FCZZKyuHkOplLo6FDWv64J2K6Xa2kCPx1gX26Cq8WfKraXa3UMsL0Cm0SEQot9fUzs9coBscIAzrWPfPCGEjHOTxp5XKYSatfSPiDnOf36e7gLxNR0WESQSj3cG4wxKxrkuUXOIwRd2i0TDpg6N9M88nHuHdwq0jsmqj77koRH2tCLgGyy8EhKlNbohd33hqO/eIElyJgS2rEG1c7K8RbHrGikbhyDOdQWkaUpLZwgpUAh6nS5mr48vpmArJqMh7SSlrcGbGtWQEYpQUzmNkyA9CKHwRPatbDa94EPE51uLszVBDOPhWmlUU5ENAlAhVkxDYyThgdnveY/xUU5NEQ9tpnJgA8p7hI9QJ1Fa7PZBQz9OsbpEpylaJYhSY8cSIQNBCYr9nKzdoexn7N64O792P/DZu/x3DxznJ77qSRZSzZ1RxU89f51//ux1fuiND/Ljb3uUZ3aH/NrNTb7twdNxftx3E0RTm/qCPaNRo/gfPvIyP/rOy3zym56idp4PXNvjZz67zgzA+yNvv8jZpZzv+PgV2p02X3Fymb/7hkv0EsXYOJ7dH/PXP/p5hlKiWxl3ast3f/IqP/QVF7nYzbk3iWL1twbTZj9wLC4tcYgnbOJE7lFao7qa8XTM/sE+UgicuV+SSiDmCYIPHm8N1sX4tx0UrvQoo8jzDOcczlcELSinFZNnD5gVU7TSiKbIoJM0qgogoyWjiK5lSiv6iSRLJWceO4v0gUQodJZyvnOGT9eKN6dfTunCnz4+JxeQMo3f3ZqIdRcO4RzolBMnz9A9vkQwir3dbfq7B/Tv3SU0VunO1oxGU6TMWOxOIwxBeMrJlKKY0FvsMd7aBATTskBr3ZC6HLbJJYyZdVIi1KsqS4qmsxga5RStdSSWBkgSHWW2jGc8HqPTJayxJFrjRSD2yeP3m+mihgBWBGrhiVK8TXco/tC8U3R/lTK222frYmYqQAP7wwWkmkmFubkyxGwktsI5QVkWSComZU5d1KDBeoezBu9BCU3WyimqyFFQpCgpSSRsbm4wdFXkYDkQYYQPNhL9jrxXrCL7uRTgl8N4ba1/f/8ZWjbYtznG6Et8ISkjdgaa8nmIiYJvkkwhBXjBqfE5itVbSKmigxUK2QiS23DoDiGlRAo9DySz6pcSEp1mGF/xQHiciw89wmQyob8/YDQZoVMFBpw1TIcHrKytUiUJzgsChpk0T8Dgj4hhO+GRaQpBUluPd8SWkfeEYKntiDRJoTkRhiBwLpDqhKL0KN2Lv2ciWL4oDHmWIr1kuDehpkLphGJakGU54/GUdhoDoFIRs1SEMrJRvcPYCueH0ahAa4SJlV0RmsqYj0xPrZtrFHyDfQvzU2an4xiORxTTgtVjJxiOhjhvkQpUqqjKCTQLJmmleOfRStPJMw6KAo3CTiWZWsTKAbWp6bRXGPYL0kxjbU3eSgiUUOckWtBq5zhvCMKikrgAtcxQKsEUjjzvxdZVEheUVppWrkllQpJGseZWu0WaLlBXIlarfv8jAFx+4ASfe/YZNje38SFBNrqHsbpl0VKwd28DhMQHi0oSHn70MSBeF1NF+IMjkKUJ6uhclj18u8b66POspEARsLVBioxWb5ne0hIheCbjKVkSpamk84SqRueCVneBgGY0KWN1WVWIIxg64QKJkhwMp1S1BJMhqCinMalRWhKcxddT8jRHhBBPuqJpb87ax81DfqbAQZQkwUddUqRAJodYwagnKRtDhDDvdkQw6pHXRBw6WckYvNJUkYRD8XwhfBMLZhE9JjOHhdroATSrpgKIEBDB403VbGgSgmfGyQ8hoEIj2YTAWYGTKV4krw40SmFshQmBVCS0l1ZJOhktrbBKYnwM/HZaxU5BUSDwTSvQMN7ZZrq1hRUBFzw+0XSeeAzT0pT767SrmgnQFyIqGRRF1EGUurlWce3FTx6xS0qnkTDjLErIaGUKESIwi59N0hoFvmbHghjXbFXjvI8STU3MlMHjFLgkRQSJr2qcrREBtJIxeQ0i6s5mKUJrZJqT5ClCFRTplHyyQLt7SDbqmhHvf2nE+1+6/5I+vzfin75wq+lAxCLFjzfksQBzw5V4r199S4SIehPbZcV/87tXmVnMzp3WgCA8f/8z18k6LYyzmNrwtz/50rxzMNOZjFfmcL/50K0dPnRrm8e6cS48tzuNMAEh0EnGeOHVn2fzW97+6ge/7Merk9T/NjzUmJfIuJc2UBQpJEI186e5HzIIwEX8qowk0HhSinCvmITNbKEjDGOOXZ9BKJyN+3KYaQ3H4pAUAY+j213CGEdZlmRZRuocQsRk0BOwIb6OUJKD3V1u3L2OHVvqyQhswNloGiCbjo3EgTNYU6KEI9EwqQvKsiRNJMfW1hiOIhFOCYlIU/I8B2OikoKYFb8AqUgSj8AjRSD4CD/xIZp06CTHugql4vdVSpGkGh9cNEbBHnaDBSBBCt0kpXHNem/m83IWil1o4mGIsMTZmGdRDUQyattGslkMBPF3mMEYjxgp5CqwtbVJFRKSXCFVjiknsRMl46HIOc/KseO0WilFXWOtpNUOlFODlppyOsaEklRGx0fna/KFjGIyjRXZZsyu4Qxe+eUwXiNG9f4RN6DD9v2XSlid84hE3RekYqslzG+EQJLaFiPr5xjX+KKxrd/OW7jgG7MAP794pq4RSXJf0uydJ5dtVldXqY1l7eRJWsOUvb1dEDq+Tl2gQgTS17YmvlzcImKyefj5B6NpFAnXkvFkTCvPGPWrphVuyJM2o8kQa0zENlkbJaWUxhtPlnUoqoo0TWm3uyiZUlY1k/EEZy0mlLTbbUxl8KomWI/qtbBlTavXppN3o9OJC3S6yzgbkNIjlYvg6SQQHCwvrUa9S9tUngUoKcmSFtPpNPorK0VV10ghObEcsNagteTkwmpsvWgVMYFSopIouG5DCxumOD9B64AtBYu942Sqy/LSKkmSoBNNq9UjhCjPIsWsuuaRUQUenSQRSC5CI82SAtGtxDlLmqYxP5IqHkQa/UHpLUppvIunWx9cxIBK5olqkC0uP/AAzhUU4xG2DtTGUBZ1dOdppmqsaHjOnr+AkhIfIM8li4s9Ep00bSNNkqTACwBkvS4ii+0UKR39g32q8RRrPbotSbKUTrvHZDqmsgk+SDq5QPqAlylpJyFv9/DO0ko0Cws9JpMRg/EhU7SajLnzynX8YEAIHuU9mW8wjU1aWWsXyYgiNM5SEtls4rMT/qyDH5r6lhAiKnE4D809iZVWGVvsUs9lmGZOLXObogY6+H9z9+bBtmX3Xd9nTXs407v3vqlnqVsttWRhyZMky5jEdlQpOzhFIBR2SDkkFaBSQCinkj+gyGAqE1V2FRRFkaSACkUoXA4EKhRxPIBjE8uWB2xZ3W5JbrV6et395nvvmfa0hvzxW3uf+7pbsnASy7Cl7n733XPPOXefvdf6/b7TT1bn7FAfc0ZHpCCrAlIaI4ZyQTFWW0kxjaOZNtyD7kk000kcyRFE1ylF3Ei/kp9/RIsTHUq9ffPujRVdu/c4o5kVDqcdfragPLmCtsIqpAjN5j63Xn4R2yZW8xnD+hy/lgk4aIjBM0TNMDQsjx9iPbvEK/fOaVNCJ83MWYzRYsZBpoypFFF5fQoI0qozUmFMQuHzJpavxyj5hwowUZE4/APyfBZ5zhTyNGoVSUaToiF5JQBATOggLbYnMEQvGdI60Wz3ol20BfP5guXJEX7Yc/PlOxS2eNs5fOuxC3HKA32gCSF//u/wx/F4/2rOkb24tYyDV9565IvF78EB7re0HfGhK7MHvv5HH3r7vPR/GY7fKI946PI1SDLYI6ac+JEHdaDVFLavtcZkOvri/iySoJgzqwNhXBPUmJnK9PMjSqiNBPurLM3ROf/Vx4G6XHDzzVucHC9YLOf0fYeAADIBj1TC0GEKw53GkLQnWE8wMjrZYYUejyFnmkYUHqvh8vElBu/pzu8ACR88TdPS9z3LS5fw+57ZrGa5WhDWW7Q12Lz/BZ+ICpZznRvpSMKiVMqFvjAswiomhig6Q6VHlZIMcAihO/hz0Lmg0yKv0Ub8BugpKmpknbU6ZOhOx1vqpJQSPrZYWxDDoeHI4WaYC1LrRpU8+p7HuPHFVxlaT1UHhn7DfiuGvpgCYVAim1jvib5D20DfG7Alukzsdnc5O98KEh0Uy0XNlScv89pux8XCx8dIYe2XBCC/GsdX7vp/4FATRXhRcPvWPyfAWj3pLaSjkw8xpkM3p2CC+WMaiF7j7GE6TdcfzFMjiqq1FvSwMHSDp7BFNnFIcPLp2Rk+BB575CGatmW3E52pHzq2Z+fMFkuqWaDtWgY/ZOQqMasXjG7Lk+Ia1bzAWk1TtDhbkRIUhaNwQuf2vSQTOCtIa1EUhEEm2XShZb1b0/c9u33LrLzE0eqEax+4Tl3PpBCfzeTOSBpjHc4WeO+xOZ4rKSnurS3o+4jVGm1i7g7FnZl8zKaTMHXUReFodgNFIU7OlM0KPgSM1WIgQudsUun8VEa2QpRNKiaPs5qQEsMgxb02Cj8M0nl6i1YFZW1FQhGdTJuqSlS0DIh7f+Sgx88oxojRVqJgbHbhhpCHwGhBmLTBhwalK3wWmRszI8aecCES5Of/75/neHnMv/Id34kmEJuWrmu4t96xbTqC72EcqTp4ympGSDlg3paSnToEUoTdviPGHnIc5Ouv3WFV1iwXK6GmPQSrMVqQm6RLoVtQzAqLj0aMVVqitmazFYUrOd3eROFZ3zhHRPSHBUAVmpNHrpBOlhJD07aSY5onq/kEZeFwZcGQJ1JpL3RUTB6lMo2Uu3yJ1BmTUxUhDsQhUJTZ9BQTSiWsNlkPNd6zolM6aFbH+zibBnIgeTrApONKMBH+04APkM82F2MXi86RGlUkSGFC18a1pGs72rbD58bVWU01m1GUNXHoCeHtc+ODNaR9gw8RU1XiqLYWvTjG1JdYlOAKQ7Kae2GP9pFCaypjCSHh00DUCZMEDSmPj7FGc+sLL7K+fYdKSdM8qEBIovvFKHRUxBAISRgTXZTMqwoFDF2bKbyxoRANakKQk2mKTWaXRqPWOOrXaJl4FVOeMa6kqU/JSNGqEyiD0dIk6BTxKmGWNW4xY3f7FOehKMXA8oc/cMoHr41s0X6K+PpSx9xonpm/NfjxNz/+58++xnZ4x/HgvyOOV1bX+bGnvhkysisFG7n3OhR1cilrghLpRYg+s1L52ym76vM+lmJCYcbxNLnpkCZF8l1lbZXpV6CMIsSEVTKXUfZBuQ6kORNwx6s+G7BkMtvlFFEpo5rGEL10qSopSfmYEkAks1nnaW2SLHGYwDYWJmNsltyX4x4+ZtomYhogWZQtSLEH0yOMh6GkhLxv1PWMysnUujDJ/yAFYf9MaaldhVFG9nZToCME+iwLEvNP0uCHSNu0LGZzQhq44QcK5+iHnjh4uq7Ddi06CHJ7dOlIZGJGZEIxBULOWq8qiw9+amILIL8AACAASURBVH6T0EAoFcSUbXPCQYS6qqirGotC6URUBhWL6dzE3FclUtac+/y9C5MyR4+gFundIXQ/nxDIJiw563VR4WxJ1/XSQGg1XT3hwjo7tI7jR1Y88i44P1+TYqDZRWGLI8RsSDu7+yr/7FO35RrK9YHSBcZ6tmf30ElJVOLgSWXBaj7HKs3wVvReKfzbxr5+9Y7fWgvLARX5Use4OZksSp+2shQlMw65GQ+Pl7BtrRMY6WxSSKJPzReZUgrnLG3TTF/3fkBrGYtZFCVD53nz1k1m8wXb+/fQRcHlhx5m98LLmHwT7/Z7qrlM8DjQHUJJf/R7fzd89u8B8PGP/T5QQgJYa9E4eaMqMLZKstFqyX31YdKGMo65TJ6QWoYQCD7Rd566Kun7HpLj7LTHOUfXdxg9UFVQ1zV9kkgqYyxGWba7DavVgqEfSCGh9QxsIvmIqywxiK4GLVq/mBTVosDaUXoRqcqC6KPc8H1PQvIAY5COs+8HZrM5IfQY59ARhii64tliLl21VhSFYrvdYQ2UM0cIER8iSg+oBG3bo+hISbPfNiJw15rX7r/KyeXLaGtlXKd2OFcQY8BZjc2LsUpM5qXAgDJiG4qxJ5GI/nDdfNef+D384//zn/Lms2+SvEMpi8qLpUoJZ0r6QWgMjeb8fMMQIkNe9Nq+E6omZC1mSvBuee7rVxYs3Zzd3ovmVQWGtqFaLTk+PmExnzNbFKSF4eQ44IdIP0TadiBqoWCD0hi3pO3OGVJH9KLbnm7AqmS+fEw2sX5P6Bp88pJskPJM+USespZI+lAIOgpCiHStFOQhU7TjdDbnNK5whOCFOrNFnoEdCLHJ1zKTNk+az0NzrVRelEdtFimzU+NmnsaeNRff8sNpRG/HlSJdxAszKiFcNjEJurPb7bl79x77XYMf0oQ4is7P4Zyjni1YHj04cQhAvfdp7K2buPsblCpxTcCoSLfbcrbfcp4CrqxZLGbsT8/RES7NFpTKcNp3DL6XyBglZgdb1cR6QXPzNqprmOXiIRkJ+M5dBFElkhWdaV0vsUUlbIDTaFfS7c4zIgMQ85KRw9ajNBMxI+KMU4xy1RQnw2CQe3rSDQ/5vIrkIhZZLhHFAU7f0d4fKKzDlY697zlr1vyJTz73Jdfq/y+Pv/m5135bXufLHf/qP/i5L/v9u7OfBh5srC4e3/9n/hwppUkfPRaRMtY7PoCUJWOEus6Rbzqlaf1SAEFjx+dQCqMKKeSi6NAjCp0iLkslIgGskc8/KWyUZBlFHtOarxOj8/ViYm5gslxCIRQyCjUBDlLopkwPyyADLQ3tyHrIJQSMa0DK9LQRtkOJrl4hYANGgBFrDSH02Pw1pBxrJ+tQ0AKuKKPBQuc7htbTNTtMkNfRVuKYvJd1tqpmDEOgHwLORkk2UArnCrxzlJXDGEXT7hmGgB1jtUZ9aMqSqVGiEMEVAipJk4D4UawWVEd6RGxeH402UrcoQWOzag7y4CHrLM7abNyScypNgJkagck0GfI9rWDkyMZR1QpFObdUZU3XtvL7K003dPR9y+DHkH/Yrs9JqSP6SFUvsIXj6OgyzRMR3+1Jn+9kHx4iIXm6fk/fe4ie2O3ot4GynkEf6MKQpRs9MZhJwjUeWpt3Ikq+qsdXZqa6cAEfuL3xG+oCK3Thewg6YKzKs3eFJjOa3HGNm3WCZFA6YYwjhB6lBEGzmXIoi1IKUiUoqysKib9B5408kQiknKO3PV/z0BPv5trDV1Gqoi5n3HQ3JHjZOVKM2ZQFVhvK0tF1LYtFzQe/6T2Qh4ZpU6FUmmRo2lpJPIg5aibJxq2NoXCWPvWyiBhBq3T+r9ISn6O1g6TkJkKiciSeS36u6zpIYnza7XeCZkaoyppnn3uO973vfaxWK2HL+gabTC68W1JK9F0n0zhcIQXn0JOSxFrt93tu3brNYrHKTLCkMgx5WIL3A7vdjqqqGYYhI6se7z1KKcqqQmFomg5tLF3bMV+U1HUpiLcW807pSoYhTnl8290WZzVFWXJ2dkbx2msUpcN70RRVpaDUVVVRFBU20/BKaZRHOvqsXOy6nphk03h3vnpM7fjIt3095ls1P/HX/y+++PlXSGiCjzib+MhHv4E7d095/bVTrl97mLt3brFZn1I4yzPPPInWM8wTgZ0/w4d+GtEJ0LUNrz33GkfXrrC6cglXlNSlIGYxONHEKU1dz7BGidEopjwBKIrT12j8smDbLLh39x77zamgZ/lYn+4JUfRWKbbo1EMaBGUgZgZdPVBMpihGj74LbNZb9s1e3M4RRsGgUjIBq65qjo5XFKWh6xusLTDajXfe4Ra8YBoYb+M4PiJnsJqsq5RQlozAqGyOSjA5R1O210ily0WzzcE5m3NdtWK/a7l58y7rzf7Q1KaUEV4tJrlmz363Y9+0PEhDw+u3X+ehy1d49Jn3E/sO33YMQdGe36dtGrxP+KS4rRTRd1QRUtPR9FtsSBRVjm/zgaqeUxpNf+cuab8Hq2mSWHNSkgIlwmSO0TGJ1KMb2A+Rte/wWnTHRUqYrB9PajypMp6TrFFLKs9pn87K+HnkzzwhhYciN/e5SBVr3FRdjGg1IaCigAH7NHC72z1gK/rbn/gGfuhXXuVHvvgyv52H05qnVjNePN/j3yHnFOBS4cTQtW3e8fu/1cNng6TNQ2P8un3g+9cfevjBHxhp2+lLBXlQhuxbgqLFfDFoJSxQCgfHt6BqGsxhUlXK95nKReZYACs9RrZlE04USYyPXqK65F0IOqeEqpcEmay/HMEJ8p2RhHGUlmdkrGQdV6TDvZpkb1P5vhaTJoftnaxlVTLeOGpBjU32QCitBQhJVvwoBhQSRxfz9ShDQgJJaSqz4MnHnqRpG5qzFX0Lu9PbtPse5+aY0uFjwNgKlKdpW+ZLg48DVidBWncNVmloBlKIlNUMVwJqwLhaBkikIpN4PcZrWa9S9jZohTaQdCR50CqBNgRlKG3NamFpNCQCkS2Vq2naNegenzq22w0nx9co6hPu3jmnrp3s22hh3rSVxiWzMNFE6mpO4Qr2u4YUA9rIHWy0xaSe/aaRNSAljDWcHFV4r9ntDyjnr3e/zPWjE7rQ0m0HQjQMTSCcaYwJpKZHqQJrZlTVDCPWGgq9EMPYsMHWDnwta4byqLJGm0g9K+jPD0WxQlD7i/rar/bxFSKqYxSVFKYXO9G8F02o9qGozdFRRvQdepzbnVKmEEfwW2iwsizoGtFYBp8pyqwbnA51oP5lQfcYXTL0DTENaDUnpcilSyvOz9esLl9m2A6UlaKaLRhOGyBRV3O6MCByE5ntfHJyiT/y5/8drly5NL3cbFZKtl8Qisj7gDJKxrgmQX6NNsQQuH3nNovFkhBknnyKkX23Zz4XnShowhDxPrLbb6kqh/cRHwb2+x3zubx3owvW6zVN07BYFLRty2K5Yrlc8YUvvEhZVkDKeaBjrqWXGKysRZ3NZhIX1fVCgWYdTdP19OGcSMoShUHihnJECLrARyNxOyiqHL91yL7UzFczSWkoLCTFvhmQzEAw1rLZdQz9MFEvcvl4UlzjQ2CfWkGZkW7TGTuN/Ex5ce97Eb1bnSNGfMdowkhR9Ef/ev6MPvnJnyPhmc1KHvnYCa/feJX96Z6UoPWeX/v0Z5jNF4TQcOONl9hvd/ih4crVa6w+WNCnHrsoqMxVQuilOGtvAtBsNtw7u4udWapVJVNWrCGGyHa/Y7ffo815vgki1oz6SkHenTG4yqF1gCHSNXsCidQdSofHH38IawtOT884P+1Znzd0zV6o9nx7ih4tEoOf7q0YI127J3g/3Y3jphQzfex9pNl3rNcbrl47YXVpno0TmaXIm+5bmRG5dzmgRyNNSGKy3Sum8lM9+K/D36U0oQnTaxh5fwEpfH2I3Lt7yma9kzVCiyxAhgoIshrzOKwYA9vNGXD8wPv13Z5u19LPe2xZUCwLtLOU6lgKd6UElY/5volR0CYfWe07+v2OsNtjtw11UVIvjyjqiraouDNbcefOHXTXocffx+SGIUSUT0StuDPsWSdPRBGyoa/ShpV2VEnl/iHTxOpC1nQ6GFjGVAYQF67CZPdvnO4l2eLyUAHSZJBJWpoXoxUmKVrfcR49bQpvcelLjM949F7uOafl+b2yxBj4h9/1jfz063f5oU9/8VCwcVjfv9yhlOI/+/r38JFrl/jD/+Qz09/J82sGDoj73X//O/jw3/0kN7YtAU1CMXDYIBfW8HBtKbSmj5E3G8/Wj4VuunCtHo6TwnC5sFgle/LtznM6jMZbxfXKsXCSkbnxkdpq1v7txfO4JsWMLh7ig+TVtDZieEmS560UeZ8bo4bG2EYpLqWhG2MD5fMjyhQquWfHAjnnHyuRhEmBKoS8sCvj9XGxmGYygsY4FtBmah7H576o/744MYykRU6SG9DDEaeC2qoLzGcOb40RnHM4qzPVrbMb32cEORBwFC5R2woTZygfYShJrPFBwChtBkJKtF0ktIHSBmxp0Ulxeu8UrSzb7SmtF2mZ14rBKBb1itLNqIoaV87kGg0JtMW6gjIEfOhJUQY1BJUkOxclngen0MpiteXWzZdo4+fZbvfEQTGEhs12TUqBspIINe/9dA9qLRPJJDXHkZJ4Q5x12YAeOdvumc+XLOo5fd9SlQVjPRVCRJkISWegSmRFSiv6TozWfzx/Cp/+Nx7Pf5rzlR+j8dQjk/6mKzt/L+fzPvQwcGjWlJHlRekH142v5vEVFapqwr7zRc94Q43dPPm/4/floSHIvPrJpT9GXMUJqyGBFCN9J+H92jL4Hls4oX9Rkwv+ohEjpYQ1Fh8E+h98wBWi6+y6jiuPXuPGG6/h1573fvg9hBRkvm/XsvMRTMRVBVZb9o/s+Ne+5/fS2pZPfuoX+EP5vf36889PyBFkN242TSiVx+HlUPDbd24xn8/IcAhFUdAPnqos6YeO0TaotKZpGmazaorfUhq6vpHhBrZE6UQ9K1Fo6tkS7xMQca4Ut2KmHLwXqYS1pVA/Ss7zbtdy//5aFiqtGHov+t0gtH9CJBlGJ5qmm0xqWgu9IoVvwObEhvV6jSskVkw6cy10eQxZTyt6YeccwQdifm/tvqWqSqxRU4MhMU8ysBRtpsgOhWQgaqVEM2wOWXhSgKVsqJPNYLxfb7z6BZQCV1iWywWPf8tVbn7qLtvTBh8M282e7Xaffz5S2JKnPv4unvnwI/jCUdhlnv8eJsqMN6VQffj6k5TVCh0Vs2JBUcqggBACXrUEnyRxwQtlF4bIEGWYQ1SS5CzB84GcFYYzlguAKq03+GZg30Hba0J0JFOBiaITS4ngs+s+VwpKQde3DMMhCPwiqzFOqhIDm2LwkTu379G1PYtlTVWXgvgrMyEvUxM66SbzdKUoSKKsXLKCjQXLRbpoLFlH2nBsadFkhHVEZ2UjSSGiC8d2t2Wz3U3PJ7SkbAA6Sw9ihDCkrGV7e0Gxd3u2b97k1hu3sEYKvKStmC1Lg3IKrEbrAoWwGhEwWlFYg4mawtXYE9nodk3P2XrHfrfnbLum7TqmjEQtDvuR3tVasVWRtYqilU2COqkETQwQe4wuqbTk1pK1xCojV6MOPZEROoRNMKMBlYRGYbMJZNIQj4WO1VKNRUHWFIqQYJsCWyKz+YrjoyWvbi7a+i+Udeodvn6AIXvrn76y4y8++woXh79cfCIlAaNvsVfJI49Lxw989H182yMndCHyY6/c4b/6pRc4GwIrZ3hiXvDCpmXISPIfff+j/LEPPM6V2hEiPH+64X987hV+5uYZgx/4/g89yX/8oScZYRGFYuYMf+2zr/Gf/9ILPFYX/Pdf+wh/8ldvPPhu1OFEXMwLv3iklHPAcyMhj4ugUr5aFFq7C43gg8kXYljKJlRtLiCZh5QbUMSocjrHocg0WpBYlaRZ0Safa6WEtsxM3wiqKG3yczAVp0qLb2HSkMcHM2XldaTYiVmDpDUErTCmyHuXJwwdVeFQOmtlg7ChSimGpEhpwBrLrHRszm+z3txle7qhHyJ2iMyKQprx2GJ1wlKgg8d4xdnmHufnp1ka0FGYyNBtcIVDhYZ9O9D6QFCadr+TfG9jaZuWpFvM7pQYW3TWfIfBk1TMkiNF9DIVU5mBN978Dc52LbF3WFXgikAYNmLYWjgaPYBykHSOoougIlYljEmSJuIsziq6fYtzltJ0xD7SpT2zusYQ8n6cmbgiMAwdIcJqZogp0bYdTsukxE+T+Dr7Dkkn/z8dP3N7kxFVjf0XrVAdEa/pkF0lIyaHLhGQzjGbORQR6+y0GYYwZinCiMymGHnJvczj1mK0hPgXRSF6nOzI01qo47ZtpxiJtm2lsDUJnWQggcgGFFoZVpeW3PvFe3zg/U+zOr5Es++4+vBDHJ/UPP57rvLYo9dkbKsuSLVhuazZZZp6PLbNXi7KFElRHOshRmKQTE2VKXetFWUxp+9kkYpBaGqSot03OW9PNGoj5X1+tscPvcgDYpTA/pTjeLLBDOWxtsjUwqEDnhbFKWPWTtMm5OOJeC+yCx88hZEJNUM/yEaRC8nCadmEtcKagr7vKMuSfpCQY6MU/TCwXp+zXC5wRZHH2MpmqsmfT6YIxkgLrUXQ3g2RfmiwTqj48ToqCidRKn4geDGjFUWBkWFAWCeaxIRMLRHjmpEs2CHgjIXuFID/abU9XJf9mYBt32WB5Ze5onewewF2X/66/y+u/yxc//KP+X97vP7SF9luNwx9i/cdMGQEx6CVFCkpbyo2x0bJYIJc7KMODt0Lm6DkQop+TiUY+sD9++doA84ZdFlmSjNv4NJCo/QhExWtJzPGqMGSyXQHNOetAJsUsSk/b5pQJAET00Q7aR0x1tENa0LIUg+Vx1iq3KAkMaUZY3BOmIN3Mo9H3ROCxXvEaIQwCGNEV9Qyr4cgVLwQ5zGbT/SBDiJToGksCgMxhixFUjkEQeXxh1H02Dqx1omgDaWxuFKa574dCCrRpUCTvCQw9GkqPA8lXJrA6JQRVWGoDo+5+OgpCyIX/0kricoSOgKjBNHtiNjCsVzMWR0djf5QAJ5YVjyxqCmNpguJN3Y9PsfyHTvDSVEwt4YrlWPh7GSOul6XLJxhNwQuV5IccLftubk/0Ia11Twyq5g5WRPakHh530/X5cIarhSGyug8nerBIvi/+dj7uNMO/K4f+Vneu5rx17/9a/m+9z/GX372Fc6HwOUQOS4Mt1t5Tz9x4x7/2xdvcdp5Kqv4Mx9+kh/8lq/ha//uz2KBv/SZl/ihZ18FFEUKfPtjl/lfPvH1/K8v3iTExJ1u4NuuLXmoNLyZn3MYhqmohQPiHUZ1TGYXU0xZpkTOXx5RUbln0Exxh0qJEfWQNZ41r3FsxM3UNIx31iEt5/AeBEWP0zquVHb4jyxlRnVDlBQNjJ4aAzUVqkyu9Jjs9LwT4zkhu4kQJVtahqUoCRhQmpAgJY0OmhgUMRlS6Enk4UAqEGJCpVKMwYOlLkqcCZTW0duSMGgiW0iyb4YYSRjKsma73jPEjt12RzWfs9/uKEpHPzQYA02zZbVasd28wk/++I8QlewtbbNBpURhDdp64r7B2YT3SERiytniWozX2jgkVC5CajHKs1gsKF1NosMOS3yIxNagQ02zb9FaBkHM6opuaGn2LWWhqOwMFQ1FWqBsgUJzPKvp+x6nDJUqqKuK3gwMvUcHjfYLTOwFDY8Jo6CcV4QAfd/zPzRbbAjUswJXKNbbBqsqrDZoNdBsPCiPxzOfL6nKJcfLI+alpaoMZ9sd83rJcuGYzbQM6FGKxeISL714mxt3N+x2O27dust+3zAMA+9/5nEZmPLbWCD/ZsdXiKgeFszx6weo+1yIjnSfdF+RFD1GiUPRX5j4I6CA/M9o2IYtw1CSkqZrZTTlRRRzzEeTjVX0V6O0IEaLpM8kEo4UE02zJYbEIw89wfs+9Awv3XiToR949LGHeO93Pc5WeU4bRZ88KXmGu55bdpuf77ATvvjCC2MpwGg2kvgnxeADOiMccTJAxLwIGNEQMWYngvdhcl2mLGnQWXsm1EEx0SXj762NCOm7rpuKEWMMbS/C67o8ZK6mmMRFmmMl5LPS+KGXka5IASu5r1KsxOBFY9pJllxZlVI05uLAWocZBuaLxTRFqHCSDzktZinirEzSIZ8HY0wGkEXXJYauA9UsX48IngRx11Wdf8cRqR5D4LMT0oji3Q+idX7thR2Pr2/9c13sv9OOz+2PUNpCCqTUYZQ0ATZvhCEKOk2mD1MCa52MTwzy+Wp9KFRjPKByY+FqrUhTUkDujV2PVg1KD2hXUJTV1BDK/8RrKuMCpXGIUcxn1jqMNQcnq75AE6rD+pC/hZ7SQQ90t1JynUqUvWUIB4BQjwNEkrh1jS7o+p7B+xwTZ1H+7dyzTyUmIzoxjUaGPLFFng6pGiJ98IxDQcZyFsaxtEJ8SAEr/6TxUs262wlNTpoI7FSk1YrlcsW1Kyf0Q8duv8c6z3a9ISboSdT5nIiMOElDSz5nKUmU1NQ05DU3HfKrBZQb584zFTVmNPUAxEhE0yU5x6RE2zdS4I+Hgn/v/Q/zV3/9RboQeWhW8YPf8gzf8vAxV6qC203Pf/LJz7LL7Mbf/sTX8U3XjrjT9Pzgr77Ic/c3nHUeBfzg7/4aPnC8AOCXbp/xZ3/+s1TWcLvp+d5nHuNj1474Iz8l1P8v/oFv5idfvcPXXlnxNccLvv/nPsenb59d+BQTj84rPnb9mG/8ez/HZgjcbTv+xmdf4z/8wOP85WdfAaAJkepCbs8rm4OetTay3jw8L/m64zmNDzQ+8Ebn8bmr+jfffZ3n7m/51XsbuNAMfGBVc7OTptdZ+7YObGTxxnzXiT3UeYjNCHCkkdaXD8mMjKLUYlIcMiLjaZqANd6DjMh9bvpTGlmK/EZiBoVGmj8DP9qYbOo5MEOHKXP5+vYX121ZW6XxOvxO4756aDNVzkA/XJPWZJRYS9j8tm2wZcngI7Yo0UA3bFEGbDoHrUmpo6ojVVnKgJWZR9kdui8I+Z4sC0fbJkiR5cLS7u8wbO+KvlJ1zMrItavXCcmTVGK2mJNCYGhfl8QAAkeVZjmbc7Rasd2dcnfoiL4XZBk5z973kkakE6gOrWcMbeLyyVXe876adj/QtQk/GBaLFU0j5qbFbIbVss/PcJRlQdFXXJprqrJm37QURYlVmkJbnJOJmfNyxqyuGYYWrTSlLbFKIqmCl3QGHzuMzTmyA1ilCEPLwhRoHShjoAhzVqsVi7qicol5VbBv9xRFge89NvuB+mHNopgxq+aYGGm2p2CPQReUpsK4SAo7jNvx1GOOsnqEu7crtpsdVy5fJvhIDClPmvydcXxFhWoMQSjMt2nZsrzc5KlQeTMyWgxFqjC4IrsiM9853hA6R92EGIk6ycab5O+ck+B48nMnxP0/6Zz8AAiVbeyocTH4EHmzfg23mfPqK69y/ZEjtq3ni8+/xtxp3LtavvDS83RqoEgFUQn10rYtxhqKskB4T/n97tx8U37PMWvNDxhrIKmcSWqwNk8Nzo6rGAJFWaKAvh+o6xnBp4wUAUrGukr0icp0BBAT1hWMZizR5ymsdtiipHAFJrvnlwgNXpQun0s7SSycE32kFCklwUslMGp9nbVTIZKSP4wtzcjS5E41olMVisTkxUoMb8Y4fPB5Sq1IL7Q66Le0EwlGBqKQhVcy3mLu2slub0U2XmqNjxGfEj5YQh9xWvSJKuYHKUhRCrCfeObb5JrILs1xsIEAgaIddsZgdMT7Nv8uSHeMNA5ajSi2bAhSrAX+1HM/CsCf/qmSh089UZdsn3iEjZU4o6AVKXk0Dm1d/j08VgtDEKNn6Fu6dqBrG2KAsl5wvDpisahxdUXTJ27dvMHm/C7RtznmBqQ6ynpT1DSzfjpSktzeFA7UoVJTI2jy/eK9R6GzGxaUloippm1IIwKkO4zZi2Y8n5+Ui7KYRguUbJR+GARh1VPpyRhcrXJ1NVG9SuX4JfldmBpbuR5ClNSMoihou440GhYzijSbzZjNSmZ1RdP0nK+FfteMOroHDz94go/47NxP2qBtQV1WkhzS9qJby/PeJbMxHhCxmKn3zPCMAw8So0Zxqgun4tEoRUekJeFcidOGwpX0Q0/XdaxWl1AR7q/P8AmSR6KIRuIpJbEIjkVpRrtUHk19QO4OdGzKfzUiqnLaD9ifVhKP1PoBVRbMF0sS8QH3sELxw79xizZfL//lR97Lk6ua/+Aff5p9iMyd46yXtffffs/D/Kc/+zz/0U8/yx94z0P8hY+/n+/+P36Ju23P9VnBf/vLL3C/69n7wA985Bn+2rd/mD/+05/hdtNPW8U2xKl9+e53X+f7fuoz/PKdNZUxzOxFLBU+cDxn03tezsWnUYrn7m9417Jm6QybIRASlDz4cx+7dokf/sSHWRayxv2tz93gufMGEz0PzUoeqwte3vdYDd/5xDX+yrMvZ5mx4mopP7NwZmrw6+qg50vZ6T3eF4fOLGWTcHxQlhZHSYDGBw9BsqDHtVWADJ3jiyR/M4WxMMzPm+Q1VVL4mEewcrjPgOkaSUZYM7IhU+Z3qMN9ORaYOQtU6uM4ueQl5pCM3upJcjWO8qxKR9c1aGOo65K+b2iaDbu95HFudmccLXsMd/Fxg5QUiUUZ6LqGwlXowmPxWO24VC3Qg0I5Rbvd46MnJUNh5rQhEEOH9wNGw2c+8yyFP2fb3KcoCsrC0PQbyqogEjk7v82ymFGVBc4Yrpxc4crxHBVl7VyUV7Em8hn/RZStAE+MYI2DPHrUFhqtI8EPEAoMS/abU5yr0CqyOQvMZwswDXEILOZzjC6I0cjvtgr4XhJXTi6tctqA5/79U8pixnKRARilUcmSQqQqapQzspfqnhgDVXWZuirQSOyVimCt4fioRhG5c/ces/oYbRxVVWJ05Gg1YwgtR6tj9psdvu+pXi/JDAAAIABJREFU64Ju6NGuQLuCq/0x+61IwOpZjVIGnwY2mzOeec8zlIUi+sQH3v0U52dnLBa1MHBdT1U+mE/81Ty+okJ1kfP0jJGL2WaDzhjJZq0UVtaKjrH6GjJCqLn20MOCKtiGL7z0BRIBbSSeyFjJIYw+MgyBGMc5vTEXq2IYGqMexi7Te481hmQsfeioXS03YGzZxDWzes7jjz3GdrPn07/866zv3eaZ7yy49LBh3SCayUHmlWulcIsZTdPitJ0c/gCLSyfi7LZi+BkTeK2xxJRwxSEU1xVFXqRkLChJgxrQVrSmZLREG3EOjyHKMgJV4X2kcI6YUpY+yHx5ax0xSAFcVhU+L3whhikzL+bCX+idwyi3MftScktzZ6/0RA0Z7fDeU1qRBoybYP6/FNEhoDL+NUJOSWm0GYtkcgxdkik81hERF6oeN9x8Qn0QikvlTWJEsUBChmNickcnFD6N0T16QqJHimrofY5akucOIWK1mbS4SUPfJ1BBKNE8Jk+rPIHECsI4IVNZZ3tR4VI+8Rj30218O3Dz5ms0KTdTKKySlImYZIqa0gmtHNYUWGcpqprF4piHHi549dUXSbdex53ew2tDMCXnRc15e0rCZxrHZA1ukFSJjEbGEUnL+YveB/qhP2xaSuWhCY7FYiFFujH0fc9mvZkKWlBCcEXwQyAaMEah8KQhN4wZOZGXlEJ0pAnjGMeGNAIHcxCMF/dFTeLBNZ21lLnKUnkTDCESehkTLJup3FtFUWCMZhh69HxO8J7CClPSd/2FQLvDMbOa5XyFUxDSwJASfZJGMQYZL6h8mM5Z0im7nEEnLQVsiJiQZJtVOiPZIy2vc+Ge0SsgqURHwhvJ1W2aPfv9Hq0t8/mSk5PLqKg43awlCYIkcVxM3cBE646nO104Y2NhnMa7YbwwM0I2JkAMuaFICpmaRqJPkbqqeOLxJ9BWs9ld4P2BG1txvV+pCn7/Uw/xPT/+K9xsei5Xjj54aqOZW8PPvH6PF9c7rFb8zc/d4L/75g/wyKzk14Bfvy/o49OXZux95C/8yhf41B/81reUkEwadIB/9PItPn1X3ksbIvU0+UZ+au7MAxmsISV22QS1dJbNEJCBhQ82K79w+5yn/s4/5dFZyR/9wKM8f7aT7OWUuNf2vGs5QyOFslGKH331Nk8vSlKCu7283mk/mnQT0ctwjemUj+h2zkzV6tA8GFMxUceAMilT/4pCW0gWMZvKWqmnPHAvIILy8mkmNdHxo+wqESkyg0fK62lmDJTO1bOO2Ch6S+usrE9TEkTAOQnnnwbRpEQIskfJPnPIYp6ut3x/phipC0MoanmMjlTGMC9nHC0disT1K4YPPvkutFbEVJKiJYWEU5G0SIDh/uY283rF3PUsqh4XDI9dOmZ3pWS/99y69QYqtQwo7m/3VGXN449cY5k2JN3wzHuuo1C89+n38tyzz3L55ATrDHVVcXR0GWMTRhVUZUnpIk3T4FyBqy1FnQi9yCC0tqAtSke6vsGYGYku+1wKFIn16T126zXXrl2n2+4pTSmDdLDU9ZyykpSauq7xQRhWM6+mnPfVasngW1YruS60KrAmm+VSiSJRODF6paiwbsEo53HWsZjNKawmepFIXLpkMKrgaHkFpRM+JNrWY6whJLB2jjEFyrTMFiucq4hJIvHW5xtaP3B0dIWUAn0Y0BQMfY+1Jdv1ho1xLJcrtpuWs/M9ex+pSifxW+43Hwzy23V8RYXq5W+ay4WezQLOORmRFsRIZJwUnrNZJchd7QWB7APFkWXXdpiV4uixY27fvsHQS3QPKZBUoqoqnCvxvWxw3nusk1gJwoEy7/t+QgaVGtG+coq5Maog4LmzuMWdO3d54/U169MbPPGJa3zoOz+K3i04KTU2JZTVOJXDs3NKr7WO0hbwyb8DwIe+8eN0fZed0kJzyPQi6RpDCqIXjAdnpMiHDt2xRFLpjCp5oXqCdLYuo0MxJVRIWDfGBonrU8cgC5zVaJvRRSsZoTEbnbTRqCBGsZH+NUac6TCVCQwh5nivlF2/msEHjLF0Pmad1SHvTynwfQ7m5yIKnuUROZMvASqqg5Deh8kwknJYeRz1UVpwAxHwZz1unoDDhACI0cZkxN0Ym3NaQ6azNTEcilSl5DmU0oIqKoVS8vsnBqRAS0QliHzIFFCMnqQE6Uh4dM670xe22qvf+jSfPz3jrG3wAygj7vsUcpplnr4UfS7UUqJXFkm0jNxV4vrf79ecNDtis8UkMBTEWUUxM0Qt4VtKO4wyeSPxMlpwLB5jnGi8kNkNhRTfKSVcUXD12lWqquTWzZu0TcvJ5ROOjlec3j+XYldpkpI4o5B1lx5FiLksyvXTFACuZMNSiJlJ4rYy2hLiCO3liVVS+CnBA2DCYsnFXi7GxsI6I1Gxj8QQhKlErltj5P5fzGcURUnv79P0DcZaUjaVvfWIVrNpe8xI5CsFGEzSOJOwlct3pMrIUQIibdvSDj1VVVElRdw2MBzkOkpJ6LZWCq0tcWSMYqJPno4IyqCjoPTN0LGYLVitjiYpReksqe8ZTCJopoi9ceKWROfkuz5JiDuKac3JXcN0f0iNK3QhKksqFNiExPakhEmKoRu48fobVHVJUR82nAQ8vpQRqo/nUapv7GTIhI+JW53ntO3Z+cDzZ1uevb8d3wTApD19clnzX3/sGT52/ZiZM1OSwPVZyW+cH4YyHJzvPKBlfadjNwQWF6ZTtSFxpZI1cZM/l8poMcO+7VDcbnv+/otv8r//3o/ya/e3vHS6fuAR3/fM4/z9L77Jb2wO72NpNW2IPHveTtepsSOTkOn1TJGP0XVjRqoxmkhDCANl6QixZ2yku67FuQJjstM+p8WPYzHLWvSKOpXyvEmQPptZJp3ZqpgC0YtpVRuJiWKSeIFSMuZTZ7OU90Hyo7MLnTTmeo470oj45rgtJSyiXGpSrCttZKwniiH0B2Q/KbSS5nJWXJK9R2tSKEj09MOOlCL7/Zo27rlz+yZ1pQkp0JxJo/aeR6+xqAroBwpjqMuae/ceRSmYzRakZKiLmrosWM4qIpIqMPhAXc145rGHubRaYYxh1zS0MdG2O6pqLikJtKyOFgQUpjLcvv8m3keqWcnQBcj3nzEFISB7K44QBo4uLXj66RV35ndZLVY89cRl2q4nRUVZ18znC7p+oO1ayrpku91S1wUxQtt0k/ygqirmdc35+hyNZ1aVhACXj0/wXSuNiBE5nO8HttuNrH3KEIaWpCzWKIyNbLaKsvCgpXlv+5Z6Pmez3tP3ltL5rBHWaGfxQwc6UhjH3M2Yzwx+CNhC0fWes92azWbLybUj7t87ZVY4+lYYEIXOkYc7QOH9bb7jy96xv33HV1So/q0n4cExeF9q8sgFd4oCKuC2aJQ4AT7xFPDUb+Ft/laOl+HrAWbAFn7lp/65n+FXnn8Bk3V+Lo9rdc7JxlY6tIpY4yQaoygpCsljbNuWpmnwQ5dp7kDM1HIIB1QwR9kJ5QgM3mdq8cLmnpLMIlaSJ3mBILpADcskp0nXFgWhSmksnM2EDuhcQci85kxwjsjBBWnHSHkeNqlMDY+vnUbH6+jolgLx4mSjsQEY0WT5txTJiTTptka90+G1s94XcvwOoFIueg/fh8lUnt/VOA9mzKoc37k6IFHZ56D0iOYcBk9IkXN4I1cev4z/zvfzuR97jtPX70szYQvZRcg6nqHPNJ4s/Cn66fdTKPZref/FIw+zB+LgGfqedd8RBmk0fIyZcpO4l+Alxuxgjsv/UlkvhqAuKZ/b1WqJDHSQLNqm6SiKktnsEl3vJfpJi6kiZtgkJkkskC8jk9hAC+KfLqCrY1HFGMumgGkiTz7HWp5n1Myl8YymkdSUsmz8fAUQCrmhyvrbwnH5yokwDdqyz5m9y+UlQvT0Xf9OXire9e5389IvvoT3YYQmM10qbYokFYwuaXnvKkUZTao1M2+ptAarUUrKXZUlDqSDHlELbExUMhTWx4AyhusPP8J8MafrJZN4s9mgtRZUx1gGAkkZUf+qBFahohH6X24kuSuU0LcGJaavCdE9FHtaidZ7nD4kSR8KExUmIiyEkiZys9lgrWG5XEznSgHf897r/KXPvMBrGVl917LixfMdWg1cKx1NLgoVgnL6mOiyVGBs4/7it36QN/cd3/sT/4wbu46T0vGpP/itlEZzrSqmRmVmD4XlW5HQtx6fPd2xLCzvWlS8sm056z2/62TJa9uGzRC45Ay10dzYv/OI1CHCzgdKrXh6OeOV0w2Xy4Ktj7z30oyPP3TMD/zi5wWVTVAZxUOV42+8fIdN6A6/nL6JtZrBDyQVhEVAIXH2EUyi71pCTj2JEWxnQHkpLCMMvsMVCkKeUIQiBWQwDIqqLNh0Ch0ci9mKulpS2hkok4tNi8JgtWNxsgLAD2IWLlzBFJqaP2ubGc6+l4LbuiKPEZX7wXtZixNiApRlVWf/x1gUy5oe4zDdv1bniXZA07YT+jpEz367puk2bDc7UvSEbqDUhtJ55jPNUW1IqWezX+OjJ6TE1UuXuHx0TG1Lji6tGGJiubqEdUoitbRlsVywPj9jIBGTpul7jILd+pRiVnNzfYpzBdpomm6HtoqgO7pBpHRlWRIDxNhxf78jUmJSDWGP1nKPaGPofUT5iK0SQ2iZLWYcH18h9oajxTFlZYh+wNpxSqSsYevNnqAG+tgRdmJd3G5b5ssj/G5DSonF/JguBK5dWrKYLyhsgdWGPgVWl1Zsty198DjnuPbQdbyHth3wQWqEFCKhTyhX0HftIfVDFdw93YM1zOoanQa6YU+zHSAdYYtEHwMv3bjL0eXLnJ+v6doWlQJVYcUPsrT0oaGcOZIu2Gy2oGExX9GHgKot55tzYvwXxEz18+vIx1fvRLb9y3981q5wLo/zNKB01nEmL7mtoct0ikzQUPsWrXaMWqQQZHivykjriEpmLlX28FFElA4uzFzyTQjmeEhxdSgBxr1/LNgeyLtMB+fmdGQEWl3YK6akBpge+0DkkD7k7KW3bDIXX0sKlgtfp/Tg80yPT9MCKKjg+NtMVfEDz3+g+8ciQfb5mN/3iH6OL/VgjMwhIP/wLi4+//i4B//uQq1Os+04eeQhvvHfmvPmp16lPd1z69YdietyBdooCu3y+LtM2ydZLMc0h/H939usUSjGj7BPEd808tgLn5VsGPHBCLfIpFubTBjIcIv5rCYGz/17a5xzLJYLiqpksVoRw0BVljS2EZpby5hgoSFHijC/Tr4WY8pFfcqFZcrqzUQ2/IzF8gGhUUqBTxPKOn4oKlex8vxpulZgRHge/Ey0ElnFfLag6wa22zXWGS6tLrFenzO6g996LFfHRF4iEqfzI1NzZNPq+0HG03rFbFZzfHRCcdXQVDvO16fcuPM6+qxgMVw6SD+8XDOanJKQpMmwSlOagqhy0U+i6z163zP4HmM1PvToZCgKmcDnjGFR1uLPShKNMzZmPsr6kXLjERRYV04GyVESEHMcXEpePojIYfiATrnoVQwIcguRWT3j+Pj4wQYU+OEXbnOtLiiN5p/cuMuf/NCTfP5sy2vblkcXMz5+rWRuDVergut1wRu77m2f1bKwvLjesxk8x6Xjz33j04CYm5aF5UplmVnN1dKy8+9cWD54KG7sWn7h1ik/8JH38qd/9nmOS8cfevoR/uFLt/jgpZo+Rl7d9VM0FcCf/bp38w9eus3nzhtOSsef+tBTdCFy1jQ8tZqx84E3m44//8F38cu3z3hl0/D0okQj0qL7/cBfffGNAzBAoh9uE6Jmu9sgZlBLYUtQELwnxMDge1mPVCJGzdBrnLMYnbBOYU2k6/doHLYoUCj6GNAKCldC8hAjrjAMfsOw2dE2BX3XiVxHuXxPRMrzGmctw+CxWrTHQ5dHuiqY1SvabY/ouxeUZYH3KSd2iH5bobKnQ1iuoijEe0LE9zFr/VU2ftlM9cv1jVIQA0aDdYaikEzTu3ffwKr7hKaF6CltSW1nXD1eYbRCVwXWJQZ/JOHzVckwyKjUq5evst/vuX92ig+BISffuKLg/q013dAS0oDHyDCUzHx0p6eydjVQFQUqGKyyNNs1iUoK8a5lu+04uVqz3TX0g6DRSstIaQEUIkVhUVETg8EWNbsm0EdDF+Hubo/tEyRFXdZ0fSv/5JHrYWgZokcDy+UR9cKS6Ni3p8xmK+7fv8vR8jK7fiConlnlCN0OUqSLa9p2j7GO0hnu3r5PPVugrUNpw+LyVXabLUVRYQpFs9mgc167soo3b75JPV8StCMWhiH0LE6OOds3nN89p0+RZhi4d7vhdHvGcj5je37G8eoYa2qCF0Z03+zzqGZHSgPbriEkRYwNu3aL4l8Qjerv+9FfJATRi2okeFgbPdEZhRX9zOhGlz8rrJEZ8vs28N2//7uYO8Vzv/YZXrt5m6paCDGqElXhCFExq2s22ze48cqrGF0APcaWmf4MWCtd3WiEGI1D1pX0Q0vpCoYhELI5aOgV3/fv/jGWJ1fo+0HGThqVKXtACb3tVMnojpfMu4ysKdF6PqoOTkylcuF2ofhIiBs9Kp3fm8SwjNR5TOMEnzGfcoIuAR7QOo3HxQJvKlXG4kQdUNa3hrQfzCoPPmYqeHORk2J64DHjfx8oat/ydxN6qfU7vtaXev8Xa4oMaF74SqJ13nYCLnwdL5zrtz630Pdve/l3fKqLeecPnJep0L1wztThSWPqCT241YyHPvoki1mN+8nn+Oynn2e/25GS6IlndS2moBgJwRPD+Jxp0gj32aCiFRhthDZDIlL8hc8zPvC5pul5BD3N+YqTFlTGi+62OySWTdMNUjT3fcf6/EwCqJ2l7/yEdiuEgpYYuREZH5FVdSHT/yJKPbYZF7MW1XQOya8/IskKJuPTRd3zAx/G9FvGSQd7++YdrLMZFUmUZcVuv8/jD2WTfOtRPl5RXC2It2TjDUPg6OiI+WJBPwwMwzlZd8KTH3mar//2b8DNLS09/dCzWZ9zeus+fjvgrJlYBqMt1jjmqzk+Dpy9eMqt529z581bDD4QNBQkzk7vcT8ljo+Pca5mPp8zn83Z7nYMPmKiZ9/uMPlchdysxpSwzrI4PsGVBeqyonjUUTonIylJGGuxzgldqC1GO8m7THnssLGYuuL81TW/9uOfYb1eS7Gb4P9h781CbdvuM7/faOeca63dnOb26u6VrK5sS27KRpLtUE6lKkUcqgwBk5ckEMhDIFQIgXRFCHkMoRIS8pSnPBRpiBNDKDAUCWVjS7HLLrlTJFnSlayr5rbnnL1XM+ccbR7+Y6619r1XlwRMWQU14XDOPnutNeeazRjf+P7f//uGfkAp+N53vnc8V5/71S9ireUQpGHp3/yNL/Ef/sRL/O9/4y9zr3O8spv4d3/rS+xT5vVx5uXbuylRi771P/rCV/ivf/4v8a/+yPN8Zzfx3/zRN/mXX3yWORe+NybemDL7VPjWXsbEWuE7+yja0XYeHr+tq1gBf+d3/pS//akX+ZNf+TnmXPh7X/s+//nvfeN4z/2Xn/kY79/0/Mo/+EMAnl0P/Oq/+JNcNButf/zmLX/r17/InzzeYap8vrWWX/nws/ynv/tVnoTIm+nufp+p5ewuF5vDcQx0nacWWYBSohAT3nE4RKleOI+2tBS8Hu+GVlHJWKcJYW4VoPZc9YoUc/OqbnHQZUfKMI+R2pphFRznvFIndodC1/VNX17xu17ARVHUnKSaVhUhzGw2F+IJbCRDXuuM9yu8WwkxUVpjj8pM8w5vPX1vyClRshapl17OhqbqhKJitAGcdPyXysX6gpc++GEoH0BhKXWCmqhZFnbjeCBncKqSjWKedvRKMx72GKXZ7m5JOVFw5Jhbs2emxIguGVMyGuhU5fJiTZodcwz0w4Bt6YBPntzQ+YFSAjGMpCw2a8YbMjCFyDwtTdiyOMw5HQmfkiWy1DmHc5U3H73JG488mZnb7WN874nZsKlJUhuHjrEEtBcf5bA/oE0h5EwsEVSkW12D9lSz4+bwOsUWOGwpMVNy4mJ9n6FzVJWwxRC3b1DRPH60JROpSvMk7MixSUwskCHHKPaRK4e97tlOIzlY3pxGUsj004Fp3BHTiLKgnGV/iKgiVabrew+Yp0SII0oZrDI451EmAQVrLCEmSoJqYL1Z48zlO8bav6jtPYHqX/mrn0WDWC5oj9YGa+3R61Are0xHMqZNvkr84owx7PaB933gRV7+0pd5+pkP8KM//blmNyQZ9n2nOBwmHjx4yD/+R7/Fn37561xddqCddHFnhcLeWe2eg6RShHkSELCYzktSxObyiot718zTBG2Qp4ncF3ulpSYtlWfdOpVPbJFCNx2QnA+h3kX1uTR91eOsriWa7QhiOWo1F/B6pykC7oCwdwV/y2K2nv3Qtnf7nHNW9XxbOvDPt5NNyTtB6lkS5um7LJ9zxrye++ueH4Nqg/OdQ2nn+Xi6lKKmBsXf5ZjbW05M65E2lb9KO85zS6TTd2vXdGGEz8g7VRGtoVLSHXr2fY7s4PI5WljRWBLm2qBWmvWPXqG+YgjbhFYQkRSymCK1WcfkUqRRAlqmtjBjVovuuuSMLgKIahVXDdX0t3L/sdDQx1L6Qg4Lu3ZKbqoarPOsN+tWPtZYbRj3e3bbLfeu78kkU4MAXHtyAqhVWJYYmnuC0WfPWD3d2+0eqAututwHiiM7Qa3E85XD8j3a9Tj6SyL3h0gXlvvvdH1iSnS9aN2LLozj3LyCxXFgWK2Bu0Dj/S99iNc/+CrfePXrUhZXit1hFDCZIiUlHtc32Lzvguc/9RzDw4FD82Tu7Yp+fcnDZ54Vr1orTQ6Q0a1JEK3JtfK+T36MBx//Dv/3//FbjC8/QitLzjKhdF3HarVmnGZ22x1KeaYxcHl9zXNPPcPjN9/i+6++yg1PeEu9RUkJPwz82E/+NB/+iU+LrtqA6mpj6dLxekjIhkUp23S8Gq0tnXXUnOiGjtW9C77z5Bvcfv3bPND30Y+lsUTVkYf3Vzx+S86V2yi6znF4Q37excTf+d0/5T/7HcmNjmhyTvzS3//d86cQgKv//teP//7d15/w2V/97TvX4e997Xutw9nwX/zBN++8/yf+18+/4/PebXs8R/71/+uPf+Dv//0vfPXOz3/7t7/8zhdVOCtGMZXCR/6n38Au1ae3vfzUtS834npYs+rXrelV4ewpPa+UwnplQBuMNhLAYqSPYt+kFtYaQq6U1GFsJaUgPtkUjDaEeUQSnxJomOeZknPzrRSday4tpTHOaGcIecs4yj07HyQFMOcsNkap0ncDpWT2456brTixgMhPqLYBaTHp11qcAnLKbNZXOOvZ7naMh4nN5qIFy2ScddQqJXZrHL5bHxeiJRuM7klYjF5jraLUGY0X+yafWgqcwpjKajUR48R6I4xmThHTFbzRTOMe7yRMRitFLwM/MUZKTdSimcYJlKWza0qWnhBTNON8K/0sNZJjpvMObzwmwvNXD/kjtcY5yxxkgVWVJsWINTJ3FAXKWGqZIRaeu/+A73/nZS6NglRYF8g3NzwYekrIdG5AGYk7v7h8QGogL2kljG7VVBLri4EwK7puT5wz1UZCHDkkRdU9lMxmtSGhiVF6dZxzpJx58vj1Jr3LVCRdzDuNspqYCjQZ4Tg+pusvsboSppE47+l9f/QxL3hM7/CuxyiPXYH3FmMVN0+2aCsBLilGjLV0riPUyvpCtNU1D/ywbO8JVD/7mb92ZORUmy1lLm0lAexxcl86fvOZEumZHFmvLwn7yPW9+zz77Asi3FVGyhI6YdyK1eqC9eYBpSqMBaUtIQRSyneap87L4UegWsE6S0rl6CeZSiHEiDWeWYdT6bpmAddtpErHVA5psGj9CfIoKsTvkGZhc/Y9l29/8r6T7fzfWktk4/KZgjPfVo5v2zuA4tl+hD0rd153fh7ejXF8V7BalpJovfOat4PY5Tqfs7ZHPawSFqgg+1+Sx5YEJUCy3+vdBcVx/20HS4X4JCN92zmpC3d3RrAuZeP2H+eClHc5pW2f5QSwzo7lBMTvXrt3stTtPqOl/ewD9973NC999CWerN/k0ZuPjnGvVPmM0lKCaosxFdq3MRSlyS2UWKw5a3n22WfYbDa88sp32O93x3tVSDd1BLDHxVKtDeQJ+2qs5eHTT+GcY7/dM44jOSc6LzZiLAlTSglrUkrz6xWmabXqMXYgxcw0je0yniHH4xVautQbO9QagnQDvV3f073P8cbhNXzneenDH4ZS2b68Z3x9IqZEjLHZmi1XryANHM3hIGdWg+fiYo13jhAjtc5o41poRmYYOuDUrAPQ9ZpP/PxHeX36Hq9+6TX6MpByIo0JVRVv1tcwzxl+7l/6BV785EekW3Y1iBVUKWQKGQmwKKE1r2XFlGcSEgccY2I19JQ+Ee4d+P7wbTbxgvv1aS5Wl1xdXVNTJocD964HNitDngvrZzp4YaJejTx8qeehecDFW5GbR1vu37tg8I/49je+gFKiiby4XPPwmftUEsZ5UqhM+5Gu83S9k2qWUnR9Jw2AVOpB453lc3/lRX72s89j0YRJdIa9X9F7z9f+7v8JwHrj7zwv6vgwHO/6tz9FvO1JfNdNKbGE412qRKf76L0/Y8xFcNqfw/aOStXbfrfIsYCms15U2hVVJdEpzIHVak0timmaKSU1Vw3bXDoq4yFirISnhBDY76P0FGgHWWOt3Lco6LqOeZ7IOR6fH+s9JYkbirBaQRpJSxbAGHvRklPQVDrvqVVs9bq+k7Abr6AWaVoa91QSIRVqzahosabDJU/IPTllbm9v8N5Tgd34iBQLu92OrndkdSVVyCrso3Xi7tL5Hq0cKQmB44dePp+M1Rd0fY/zhpo7SlJ4J/p363oJiakZ5wa021CqpvYcq1qrTppm5xigtgZDwPaFqjI5Bi5swhhpdJvDiK6F1cUF03yKdV7YAAAgAElEQVSg1sSqq/jNhQTbzAEHWLXmyaMJpQzOewG7SONvKanZJ2pSyJDBVEMZNSYZ7l1csN8ljEqM84zvOglPUAZtDb0STbC3PSFMoD2HMeA6BcaSsqI4w24bWfkrqjpI+EPRpJDQGsb9KHaMKWEs5GSoGbQRQiDHgDMDVSVCTFjXEceCseIkEFIg7m5Z9xfEFOmHNZvhmpQiOU1UK/d0mSuJCd/3TFMghJkYA6t1xxRHtLbcbh+x2VyhjOJ2u0frilHvFZrzT3Z7T6CqnFDsptkrLPYctFJALYmF7lkGP4N4qqJk9aKdpuQokgBrSMZQjBFSpFqsMaAtfX8pE1hLjKCBoVMEqwwq4ulZpBO5Smkwp2aOj0woRsmKx9qOWiaU0ceBSVZQ8rNT7sxb7hyUIWW6xpDqemKFqgLyiS26oyN9G9jR6gyocZe9g1PilzoDd8JIthJUM3x/O4ASi5J6TNF5+7b835J2kd8O5BbDzOOxnQbyev5DO9ZjobkBVV3lGExjx+7W+M9A4bscm1p2+HaA3Y7nOJ0tP/8ABvrEHt894HcDye/2/nc7b+98jQYihdj8Qw0KxUu/8COMX3mar//x13nlle8QUjx2ci9+s75rGrBaMWqxZZLPzjmzWq/5+Cc+wWa94fmffp7yZfizL3yLvJUSo/eeeQ7NWkyA5l1dMC2OtBCnmdubG7lWRnO5ucJ5i/Pd0c6m1MLTzzzN1fXV0R2iH3qurq6P1iovv/xNvv+978skWsTV4nQ9gebxmYs4VPz4X/sUT730kJwLq/UFm4cX7OY9Smk2FxeYWtEjhEPEaM0f/OEf8pu/+Ru4J5YrfY1UZQwK8SbOueJ9j0Lx6NFjpmnm4mJD3/c8fvSYFAtPf/wZ4Jucb7t9YPPwAZ/5q7/A5/Vv8aU/+GOmaYSWnKb6ysee/zj7ceZ3f++LLelLUUIhh4xxkHIizNIsYq1iniau1hesh57t9jHXV2tWD+/h845P/tjTvO+DHRdmzYPNQ7wf8F0n4NI8xf0H1+QUOeyucStHdplxvAStuFityCkwTweGztL1vmnEJETEeofvLFMUBqhznuY8R2mhDdpI1UpSxzSS6pMwZsDbSxSaWBIoGXM1it/45c8C8Gs/9WlQiv/qP/nvAHiyXlGBe3sB/3+WC7v9gUWLrJBGwZMMp0lnjs+hxliP79asXAetSXFZ8LL81aoscPeZ/m+/+gZ6dZ/7/lSdub/c42f7WbZlUXinwlSX2aGxfbXgtOLZIl3/b60GQPHgcGg/r47HAvDv/Qf/GorKNM8Yo5lGcQtRGInirJnxsEfCKiKlzX+yuMgMvWeKCiFIKzlGlM1iiReCsOVKsdtuxQu7SqXQaIWm0DkHyPudHlqoR6QkJ9+tGJQqWA1pLjjfU0omTgVjJFin8w7vHSlZnHNiy1YzYzhQcsZ0mt3+gLGaojJT2st1jYlp2pN1oZoNU7plinL9Yyx02lFrIYxybucQZY7dafrOo2pPSpph5YFMCeIXarXG+xXVGkpJeK2oubIeLilZXDQUCm17caJJlcura0pJ5CJWjTVXnO4xyqJcCzAAVsMDoFC6wuVaGMyqI6U4aZYqiWlO6H7Na6/fSgNbqZQosptaFFZ3YiPoBLBrY8jV4vwDhtVI1hXlFF1vUH5mmmNzZKgQpIyvVCGZyjjuudjc48HlfQ77Gzq3RhvDFB4TSsLnTAiRa7+hVEPOAUik6RE5RaqKzHPGmDWb9SXKJm63j7HekkuiVIhJs5tGjDHMe5FCaePYT99GlYE5iPSoxhtiHnFWk5Oi67ojo9wPA1obYgrUkhgJpBjpuhVPtgce72aM9ozTrs1bdwmBv8jtPYGqNlJmLEIfChuJag0f+jjBwdKVupQphZHJWpFUJeWAWzlwhrhN2N4DRYTMJYhuz1iZlFWhVDGbl49ZwEg9lnJzrs0ux8sgUBNWGVKMKC0T8zwecM5gjOTA0xibvAx96szzsVY5nvPvDqRylwVdOtFp0K0sHd7vAoxKkcalZaAV/eM5wGrn68jyLUOtlKblGJo3KmfWNCygWej9dyDL06h93I++w2acZ8G/C5A8A9OLNvXtjV1w4keOusczNvd4vpay9dmxcPZzVYsWapnJjq+mUoXhervh/Z3trCv+eD0FxAmweo/3HifPduyqNp3k+WtmUAWLOvsojb70bH7sHs/tn+fV114jzNIxvCx4lBIwumhRxZcwH895KYUXP/ciL/3lHyHlgnnY8+lnf5pPfPpHqTFjtXgEvvI7r/AHn/8iNzc3gJT7jteoWZjGmHjy5AmH8cCwWnHv/gPGceTNt97i+uISraS8r1A89/zzPP3sM0e/1AX81ioefh/80IeYpok3X3+tTeQcWey6NGABN8OWz/wLn+PT//zPgFeiydWanDL31pvjd8wUyhrsxmG14cc+9yme/vBT2Kj46h9/mW/+/iuwFSsvhSInxWE/ohVM054YIzAwjQdu3RuYj8DliwO8zeXo3/7K3z/98EkFn/zxH3DRv/1Ow5KlsdXAnd4BB/BE9uWB8U145c9Ov7+wwAzluzAhf5bt0Q/Y/Q/B9rd+/w/k7wZc3779zZdf4U+/+mcS3RwiWmn6fk1OhcNhPGr8Ui703cBPffTTfOD+x7i5eo6Lfk0/CLkh6XO0xYjcZzHGo0xMpovCIxS/9FOtlNskYxLeIWloEi19ekYX+Uit4n2slCLlCHWZLxRhTjztNf9x/AcA/I8/+1PM44F/449EJvA//PjHyCUy9GtizoSwp+s6LpsfZl4sEZVIxmqlMYgVVNe8f+WYLzcDCiOR1U63xXyi7x197xkPE1li+gg5oowSWVCR8BTpWQwyzmtNqappSCtzGLHGoFRmDhO2pQOqUiFXckrU1sQZQ8C5Hu86UoxUnYl5kjkvZ4yO8qznijM9uRScNmQSw+qKnMX4P6aEVhKv7H3BqAGtKyXH9n0vSEn6PrrOUlKh6xQx7qRK4zrGcYt3Kw77EVUMxlRmLRWcbdizO0iFJedMdh6traQgfbty7/qSznlpsJoCyluccaRQ2G4nYsisVpf0Xc+qH5jznsvLNXOasO4CYx1KW1LSvHFzy+tv3ND3vcgIWviE0QbTEhbTrHGdQ6nIPGtWFx9gP4F1BeU91YB2kWEFJWWsLkzTAdfLdc4qktPAzMDudsfu9oby5pauW2GMYdOtuXd1j2leEeIsjGyniHHimQ89x5tvvAFKFhK5KmqxOG9YK0tnLVNKjCETSgZT0bqwnwtmvQZlSWvL48dPsFVx8+breOvbIrbIgvfwhAnNathw++hJm9czU8hY7UllBN4ipkAu0PcblBKLM2t+kLvTP/ntPYGq1UtU2hKRKiJ+rcGZ5sd5xECtXHiqUWJUjzWWOM04bTHKMriBVDVOKYpRKG0wGrpuwDW2VKtObJV0bkykdFsuq3wBscLExhgRG6bC4pGoteb25slyGFJOWDwpm71GLhExY1bHMq2MhWJIXuDUZV9b9+355N3eJ5XcBuLqkWA+ptmcGFUay3zWXEVdAmmO2x3Ax4n4FI/WxjqoRfZwF/C+g008YylBHf1mlxNzB2wun/RugLIBD/WO/Z4x0edSgiO4X/w/77LC6uweqe1LLu+r7fxWOKax/KCGKpbb7njyZb/6zjm++12W95+Y+hMwl0ngpLO0zlJycw/QpV1PZGJS8OI/92Ee9Y/42j/8GnpsyV5I6TgtwDTVBmJl3yVn1h/d8Imf+yT91YqQEllXtPKsnu2aHEJe+/6f/xD+wz3bJzc8/tIjXvnqK4zjKMcpX4IQA5vNirVZiaF3iuxubghzRF9eS5knRHzXiWH1B9fYex3aiE2O2DBVjNI8bQ3v+7n38/u/94/4nS98npvbJ+0GkWtYUgFt+NFP/SRPffRFvv3GLSmXNoGKREWeh8KSCarQosFFYo5Xl5dcX27YseNx+A6EPUqLh2TnV3R9Zr2Zef9qwLn7KCwxBj7gPsjqcs319cQ3X1/z4rjnn21/vts3h47HT7ZUhJQwzWM6holSTpKrnArGOD7wgQ+xfvg8sXuIxRzHkmWcWPTqmNMzdve5VMfxXS++ee2ZP6autXtnWbovIR26yWFqXVhUaGa+jcw4e46VRbnu+PPD63ugIiChK+76kr5bEVNCYQgxkJN4NXsnITaq1qaNFJMqpZq3sLXUKou0mCZSyjh7IX0cOUPOdNYzhSD+y2ESgqNUqk0tBEUWtlmMPSXQw1RCmNHKoFWlpMg4paZPFl37PAcqmq5zaC1+ouO4F3tnAzFNTd9s2O1mnHXkUtAYvO+Y50DIc/NslfmzVukCDyGI7lWNDcRWKKqdY3E1OeyipERqwQXznFgNHbkUduFASQarO7R1zGkmF7mSicphFuvG6WYLSjxFQ4zsd4/oe491jnE/EYJ4fKaYialyGCdc17UoUkfRSoA0kd41EI1ivbri0ZPvM40B39mW1Cf3nLOeXMW32RmHNgXt4Nvf/Q7b7YxSK2I6oNVAzhPaSGOnMeJ60NuVWGLVjEfR90DNOH+J8w9FX+41ORmUqeyr45BGqD0lJ9IYUXrDK6/NlKlnt7/l6mEHOjHu9wzdhsvVA7wtrFShdlCR+yOHwHD1FCFFpjQBVzzX3ePJbiSUgvOKecpoJVWDuc7kEjk82bJyTuYv7Rl8IZVMUrIoUMqSKOhUcb4lDv7wJKi+N1ANUTJ4hXxRR62q5IBLyc4cy8ENSHHyd0xp4mr9DFUZvO2pOaONRreEpMVcPKbYfAfXFGZSnqlF0o9Kad50NeOMRWvRo1Ihl1m86WhG6FVhdEXryltvviUsZa3HWDullnApharSXKWVIrXGCYkBFWZvWVGf0OcCchYLHOmcXgAucGTwJHavspQel62eySd0i3RavFQX8Lk0wktXtrxPJAKtyauq9rnLftTxeOEE5CS95DgVyAcpMduX0n+hNk/Gu1v7HH1aFMhXO30PrRGDfyRfWliQU+luOY5SJGZVSN4TGF1qmWIhqY9geGGKcxVn2XoEO41BbOettmuw7G2Rb8iO5RorhaxQW4lvAabGLKEFLXyBU5xvpR6TY2jXwFhOgL/dB6UUnNcUrfnJX/wZwiuBb/3JNyUBicWLMFOKoiLl2SWooPug4xd/+bNcX18w5tK6/8WmiNrY78ZeqgvD0x9/nmd5HvcThl/7n3+N7R/u8GlpQtLEEJmmwPW9a4wRnZzvHA8ePqDmwuGwJ+fEg7/0Ak995gX0RcdcI7nMJCopSjNVnA7EMBEOI2+FW/S9HqMVKYyAxWiLsxXrMm+8+TV++x++zmrT0Q8dxnguLi/YbFZYq+UZU8IMDYNMomEOdF3PxWZFDG/ywnNw/69/glIj1g5oRNSvlMJ5TcyjMHOmB3pqLXi3JqaRX3tmhbXX/Dtf/y4Af/cD96B17xqtUVU383RHKpGcEl3X4TuPVpL1HdMs4KhWOtfTeenELlSUtijlmgY9U2uGKuk2VkvSkDNe/DiTAAHd2PPO+7Yq1XTOkmMgV1nUH8aRlBO+68TdoULfdShTmaNo8p33x+fEKCN6OwND38miB4lCHKc9N7tbMhqUyJ7EEaE2ff6AMR0xzeQc+Ztf+D0Afu0zP0MthV/+Hfn5f/nJT4iTixv41iuvMx5m0Jmso0g8jKaiiTlRgVQqaMP7P/ASz7/wcbR/jsm4ZsO3yAKqdHw3feOSJb8wq0fbuNoa8ZbhifPFZGNMWfoiztoma1MYCDJubhPLGAm1FG6k2LUMe3T29Fx703GYxAVjIUi2Nzcobch5SSJEQkNiIurCOI/SAGQMKUdW/UDX9YRZk4tYk80pQNHEOGGsRqlCUYoxTKQcxYGiJa1pY3CuE69cVTEqMaU9qhZySMc+gFRnSlI4r8hBALlWQWK4W6XxsA8CGJU45JQis7DShVwyWkvFZB5H+dloDuMt3glLPBdp2JrHzDQmfDdIY7CK5Bgb+Nc40xFqJqYINRPigZhkPA0hsD+MUrpWihIzxnhqTfSDY5r3eLvC6oFaFHOYcAZyUnivRROJVOH2hwBESc/qBVDv5wNKW4pOFFSLwjYo3RPjDHpmP+1RVZ6fkA58/9VXMcaSYsE5z2GcMMZinGfVDUJc5UTJlr5bsd2+zhf/6AvEdIuzPdZ5nO2aDGvGGseq7zHW4ZyAfqN6rFWSSKY9fliJ8lFrJIVckSj4zRqtKxUJ8Vkax2qvudg8wDhDqQG/SUwhM++F3S5ZKqfee+bpQAqVnJ+w3T1hvVmRwgGlMilLD8A8S7Pafvcq87zn6t5DqOJE4YxDa0uJmcvNCih03rHf7ylqxXZ3INYCybCbI5uLf0o0qh/92IdJKTE3VibGSIiBEAJznCRrvYExoyVnWEo+ghqsNeQ0E9NIP7j2/oz1uqV4WGrNFKSjV8adxZ0QqALItNGUotHWNlPgZn5facxrIWdaDr2i1JHdfn88DlhKRwv4Kiia7ko3H0tOD/6ywn97l3uDkGdnaAFxd1nGBcwqpZuj0GJP1Trhq0gqFm3nkU2spZWJAFVkwNEncLsApXp0ys/NokkfGYnlWI77RV53Rj02AHvS/ByZ4vPXtHOwxAEuYNIYafA5MsJGBkRqFnaugX0xJvfH/R3PzqJnFbegIxNurTtqkQUMiPG0yEuEvS4lt9SYVmZfjlcvvqyNe9HS+GVRkj5ylC0s1mFynAtQUUpTkIjNWk7LSN0aB+uZ7ERVYfMFXCaM1vzsv/JZLlYbvvX/fIObm5tmYm9QiG61Kvhe/T4f+fRL/MLf+OtcPfs0T0KCLBZR2iw6aYkyjSnKvRBFf1VSYL1a8/zH3sc3Xv4K827EWsPV+IChDoRpZL+1MsFoi+8uGKfI7vaGV/N3OfgD4cnM7ve2KG3xnWOcDyILiJm+d1iX6XswquD9DZ/+1JrLez+ONjCOM8OwxtosFmxV492AcRrfdRjt6fqBrusam9oWDrlSa0CpTHBJBnOTyKpgO41XjpQKRs9yrzemRtuKUSKdMFVsyJxfYY3HW092ifMa/jNPvdCej0qYZ6x1ONshAQRSyTHakEtsTJzBGI93nWgNUS1xriXDVUXMMqaVtMiapAHUO41t3relVpTtMMq2hXnm8X4rfpfWYa07LtxDis2uSOyqahWmT0K+inivGkOYAyHEY9NOSpmxJA7TTOc6hn7Fbk6Uahg218zzxDQFrHEtjU28NmtWUBMmJ8593FbG47oTaFuvV21stEyHPSEEjHbkWIVMUK25rEJMEe89z7/wAV544SN49zxar9pCr7TqVKG2lKTjg7qMc22sq00TtVQ/FmLjRACcqnNKiS9xTvU0/jWAq1A0VHDcVSlZyId6+s5GGUo+3S+HaWY/jQz9SuzkCsSo0KZSa2aaIqXNJwV5xo0xR1mZs45pntkd9mzWl2JmX2Qhg5YUoXGeySnjOmG2JMRCoZZxrirCHMAYqJmcA4XEuB9ZrQbpA8CQY8boAd3ccmqRyGTpGxEm0zpHRebAxTkkpUzKAbSh7x3jlPFWtLAxZWIqhDihskFrCxF2u8RhP1MJ5JoJYSSEjNEd3neUMpNzYlh5ri4viUGTSmS1Es3sarWh1MKqX0Enzcbojs26hy2kAFVLo5r1HYt7R7fp5FxhWQ0rDgdpehKNtGjI/SBzQj+sMdYcddvaWMZpxnUDJVv228IwaOZ55tFbAec0292eBw/usd3JmN55i7eGaZYo6mHtSQl2u5FvvPwVXL9viYldc0hZSDqDaQtVpRQxRDbra7T2WCOWelrQdiOaFJ3rWa9XhJwoRVjyYVjTdT0KLa4QRZPmQkoQoxDxKRdSFrlITJmSAzGEphf3xHTN4VbGTKOrNOhVkZb43lLNDj2vyFmTUmK1XnN7O2Ndbbpri7UKlzVdHbBG0288cw7cbEc2pseEuxXJv8jtPYHqG6+9gTEG6xyd69isNscLV6toT3PKjEEsZMI8U1KUjv0Y0cax2x3Y72e2t3uefeoZ4jxhtKMUBQYZECl4b8hFupF9Z8lJSkxiTF5QyhBm8aBDZXQrJ2VKK5sUjLGt7JPY7XZHlhF4R5lbN2JvyXQ+L5sf31eXEnYDq/rEMN553bts8nnpyELX9lbpNVCNTW7HomEByAuzCLUBP1pM5WlrJkIsDVpKSW67HNOpY/wIGJbkpZZwI786ldpEZ3u3zC7A7BykL2Gsy/tp5TUZNGTyN5zkGSJQPzWpldbF3lhpOUmtk7KV+vXpXAPNYkWYaGUUVevjpCfHZI6TWWns8rFho0AlSSMSYpiugJITRtvWYCQsT0pZmJKYObe7+re++L/9wOv7ju0jwEc0cO8HvOCh/PXqb8Kr/98/9s5mgF/66Hu84O0RlSvgR972f7n9WcC7sHCnnzVcPLj7cu/b2zRHUWcCxgKM7c/N/59v8ue6Xa4v5B7RitqtKLUScyYlacpQGOnpt3JPGdORcsQZj3eOmGQRbq3DOS/JQIh10AJsQe73WApFS9xwmiM1K4zObXEhYQ2lVlKFFGMDJZWYEhWRSagqEYcoGMeJUiPGKnSWlDTvLLUWQpjIVRaVJWVCOnA77rHOo63FaI23lq6HFBOxrbG0Et1iSAGJET4B07nEty3GLH2/IddCCIl5mnGdA+TZ1NpSkJL3ZnPBc8++wL17L7Dq34+iO5sP2mIWWfguhvWnhf6RROXUENV+cbZ4V62SJePnSSOvWqToKcGtHMmN5YmNQbryL6/WbG+2x+9odGssbVvJgc5ZlCrMIaCVpA6mMGNsA7t6iTNVbQFzYnoV0nyXciTltshC47QjNV3oeJjEFiiDtgalMyEGnFZgzXH817U2c/wO1Xn2ytAPK3JKpJxZb64A2wziZcw7jOF4XyvV3G4AnJAIxWSS1rjqGHpLTgVrKoPvqMoxzSOuKqxzxBCZY+awm6UjfJ45TEEWaEgsZynSke78Bq0nSraEeUUMmZwN+6wp1VNUs1Rr+l2lMro60pSpZSDnJG4GxjBPwkLnpHj8+LaZk1SsNRhjsdZKbHYF4yzPPvfCUecvvSiaMM9tQdJxezOSYkJjGQ+Ft97akWMLRFitsNazWW8w1tD5rrmziI+xuJ1o+n7Ft771be4/7Alhj3MbCoF+6BthkhmGntVqAFXRXSWUgLcDc4ESPSpWiZU30shY1QX7R5mUKyEkcq4oLH2/QmsjVQsrUgeKzN8FzTxHfOcFsM5Z3HSwAmaLQqmeqtpyXVsKGaOUEEnG4vQV2IgqgZWR+OzMLMlTHWyDIu0Dr6UZbwxaJ7rOglZs9xNozVkR4i98e0+gutvu7wK1trKwVjoevTdoY7DO0/cr7LV0+5UiUaAxj3z5q1/D+05YOBI5T+x2E1SNtlJG7YqUQaw1aC2DqAxUi67JQpEV8aKRTDm11bYhJRoDoUlpFpo8TKQUUAhwlSr2OfMZ0fZUspaBTx9BODTWsZ5KU7WcBl7ZJFxvOTd3O9QX0HT6HnACY1VrFnZg2edSnn6nhdXJ/ot6+gzRMpo7+zbNdH2ZCKo+P667QHVhfU/7OAH203Wvd47j+C8lViELc6KygHDRwNA6W0+fUUr7/kqx5C3XBsaPwLZ1o8s5aPtpnyM6YiOAswFSVRuwVIpaxIt0YY8aOdMAdxs4W4l/AQ+FBNpIolEposWu8K3N03xo9zr/bPvh316/f4034saRcxUWx2hsTuA9WoubQC1iA5NLOd5TJRemPLUEK0sImWncCdtpJflLSqrijSsAT8a/ru9JpchrSiYEYWdKzeRScVY1naXc272VYynN0s1ZK3GJtESzoqBmjNIoAzEEUg4Yo7Gub4s2kfPMYSIkhfOOVBTzPFJrZjVsACOxqqZIIYUeZ/vj+co1so8nE//dfs8UZ1LOfPd73yeXiNcGcmPvasFaz9NP3afv13h/zWbzEkr1EkJgpaO6UoU1tFYWj0sFAt42prXnexkd2phTmkQAXY9jQy0cpTonoCvVjZRkUj/p5uUFzlkpJU8jrGWPvcp03SkOcjCqVbsM0dTG2GY6I6ydG9Yi0XCGwzhT5oxrjYwxCnBRzTy/5MiqG5jGmVozuoH1VT/Q+Z7eCugqtSPGmUpls16z30uscUoJ7z0xCshe3xeHgporN7c3qBzRuqKdRWCJxus143SDyVZY+Gls59qJVKkUSBlve/KsCHPFuYE0i5TJYtG6YJXDD5lVr7naKMZxZI4z2gjrGUPBdQPjOFKrQhvNNPfShR5mQgxoLc4HUBnDLD6/LjNPUhVwXvFkTwN6hVwSOUeMsmLfmMR6S9j4hHGWkkfGcRTcUDS28zy5HUXWZQxhmmXRGCNVV7a3gXmKOKOJoZAj4r06JdAFqxWPHotxsC6e7fYWax1911OqppSJkgqXV5rCgdde27JaG8Biuw4wjGNAaUhlQluP92KZF2IlE6g1cph2hJBIc6LrO5SqFB6x2+0kJtsaCeyo0tBVaiVNGec7rDGtkVdTqyakwuXlBd5Y+n7AGN+qqIYcIjkXcoGCoWTp08kxNGs03cIbPFVZYlXUKu4cpYrkLVNQ3YpiInPVeOfZp4TWle5igzIS5fvDsr23PVVj+gTdCCuWMizHP09mKZYfNTWdtbIq0prV2reSeuCZZx7w4Q//CLc3e1KO5BSZ5iC+j2VmCrP4PhJbqXlhHAU85FyOWsJzQOmcpZKOg1mtpRnnRlKOx8Qb0X1KefCoDW2rKpqWk1biXRjDc5B20lCeQGet+uzf8vqlrCzHfSo33cWeVVjI1tV+DlTFDqZ9JueA8gRUl/+TtJAFxC4ZQXe9UpfPkIO4C0TPTBuOx1hR1KxOLGeL6Vt8UI8NESByDBq4RkPRR9qkosg1nDSo7SSJh2djyTWUuGhIzbEB4njuKw10i8bTmJZ/3kT/d1ly1c6NaaUZ3SbPeGRy1cKEpYSxFoV4/GklUZelJBSFX3/xp+T/Sccse2sNzllymp82KcQAACAASURBVBmnA0ZrrKvs97ekNLPb30omdE4MvUWpjNE93SpidUZh8M5g1EquUQHtOnk65gPOi3ewMgZtRLfdWUfJsxyvEWDUdY5SwWhHjCOUSufWkpJjhHF2vmeOmZxj6xqubLe3KJbnyNF1AzGNzGFCa3uMS3R2hdYWrcVkW+kZ6wwpGCllmyTds8qjqrCzFXGfKFkWgp3rGJoGrOREKVIKP/rIAlo5WXgUiTrVWuJbje7aEmyW7uVScc630qtYBoU0UWrGmA5rHLoU5mbgH1MSV1TTGuuMwzhDTpWUSnNAAFSmYMm5sD8IQ6W1byDV4nxrCNIGqsZWjdGOECfmMFKRErCwH0GaLAa5n0qGzeaSlCOHcYvzHmOlcYcEVSnGIiEG/cpTMW1ykbFLA1pX1q6Te6rIeKJ1lQz5zmKaUTgN3CkMUwxynrNkl9daqNHAWam/8x1dvz7+XFSiUNjuM9/+zutULUk7KVaGoafzG5xZUbKilBX3738M58QeQWuDVoZYz8B8G19qXeJ3z8afukTnClhfgh9QZ1WP43goYBXuuocsDZCnBT7iDlG1RN1q0Tr74wIcvFX07gRUr/oV2mhiLFS7kt07cN4jJedCyhHrNEZlaqqn7nkyOYz4vsMZw+BWlJKoRjyKre0owJwCiogqitpkC711koSEZmXFvs67ppkeDzhrsb5nnCZqVVgFZHBGH3PmO78CpZhdJQZJF7LuuskXZG7c7Z/gNj3TFLnZ7VEFBj8cz2s/DIRZLLjmKUj3einEKTD0K6x1bLdbeq0J+xssihBnqnXoIsA+1hmlIs533N5u5RpozRgTsS0cOu85HEac7Ump0vcdIUwyz1gloKvMrSJomOYJh2sMukJbTQwBVWBscgDnLDkmkYJoja4dw1BAZXo7MOmA6jW1wGrtwHhSjKwvJKDBGMd4mLDOYbTFW083eHLSoCtdb9nvd3Qri9PXVJvafORkwUFhtxP7rpSk6qKU4nDYy0KzSRLMzqOqoliJ4tVW2O9ahWktJaNR5BQo2yxjrpJwmNx6Qx5tDYNfSQCKccQ54n0n+tiU2xw+QNVQNfM84r1FG0tJGqU8VmlyyXTdgKqWHGaUtW38NBhdUFUaXo22LE+dVgbj+aHZ3hOoSnb5ie0SEHRq2glRBiitxBSdAoc4QoVcMqvDit4P9N2am5stL7/8TTQWYy2roePy8orr63t4r3n2KbEiCUH8T40xzPNI13XULKXtBSgurOqyShP/uNTYUQEuYQ5UxPKkFprOSDfq/Qw4LqXns5KU0fpUdj8ysCdW+dS92n6jzsAS56xre+jUSet63pVvlL0rT1DCtB4bojhlsrfpndpCCs71s3ePj7ufd66dFSwioBJ1/Kwjq1FaLGk96TrzeZOWEg+6I4Nby3FSkmPJR3Z08Xk6NlHRSvVNL6u1pRZ5OCQBTHxjjZEu11oyi2ZRgJHs3yC63ZwLnfVipk2lqIpu8af6uGCxVITtEi1axipD5w3GZGKY6TpFpRDjhHZy1mOc6ZxG68zQD0cWutZKMjPGRmm+sxltRbN5//41XeeJUQy9weLdwDjNWLsip8IYA84oCqLbTmOh7zpKTcRZYYxv+uvEHEbiFNqxOgm/8I45zDKgmUKqkRBnQo4MrDHRQtEcdkEmrjwLAFUQkwRfKDEaJKSIcy0yscrCqaSEqpGaMlOa0drQdfL7giz0OufJWvRWJSmUkmCBnBPOKTrfkWOiZmlg8M2zMoYkEZHeYKwlhoJBmJuUArlC53tqCkyTWPFQClpptKrkMFKKwvsBVcXHcAyJ1XpD33lZYCmF0Q6qMJdKFbJO1CI6RO88SmlKiY1Rs5QaqUlJCImxdK2rOsUAGoz3wsimStEyXlQlTGPNUSa7wR8dKqAeU4NSiFgUBqgpUVM65qsrpQhJ9KiHaaTUQud71qu1ZK7nhM6Kvl9Ta2GeZ9EhNu/qcY6EkOg6zzCI9nCaIyq0ioIYVQOaPJ+8s954VHj1e2/xi+3nw17R9SvefOMNHj85MAz38N0KOtNY38x2e8t9s+FjD94P6wcsK1tJzjqxnLUUSnOBqK2cf16UWbSlxwU4oMpd6zupfuQ7Y+zCpKKkwkJbzJeShfVpJU9j5Fkex0zfnVjkYj1Rnaa6ar2oIw04LUx8SgVtKtO0k6a6WumHQZrsSsQax1I9CyHgbY/vPLvtjkrk/v0rSiqEKAy7s4b9YQfWH79PLmCcYdpP+M6JibuyDL7nenUpc1itOGWw1uMHiWR1zpNiIOdA59ZS+r64IEaN7xzaiPRjmkTnfG+zxjnPHBKpJFLKSIiNPkaAlyHT9wP7eSaXgLaK/WFsvFTFMTGsPN5cU6nMUYJexnAgF8Xt1hJjogDu8pLSvJUfP3lCKYVpmvFW49x9chIZnCoWbzq5Xq1ZEZslea4aTKmoVOi7Ht05nLUE1YvveRVJgMj8FH3nmMYRbTTrYaD3toEtDUqY7s1lx3TwxKglOKiRHPeu1yilGMcJq2BzMXAYI9b2eO8Yhp7KzDzNxDCJBEEbpjlhrBFJTql474glU4smhMTgO+YYGtFwaH0aIg2qJbcUT8U8zQxdh6KiOwkcUErGKykFZozVxDQSxoSLe7RWWK2ZcqXsM6uhp1KoM3RewGrWlaw8KWtqsUyHyKpzjfiTRQMIbrrYXDFOqUkmpfNfIlUtsUhQg3P/lCRTLZnlkiSjGm14Am7OL40zi45IKGZZOstEL2OQhlqI6UCcI1UZxkPfrKI0/eBIc8Vae9R0xiSMg7WaahSHw4yz/piZXkrBOvG8c+0GWJq6ahF9mNYK01lUkm7uxW5Ks3SBJ+nyrhWUPRtT9bFBYNnuAMPGbp6kCZy0Vu3/lwEWdQJ5QNMmLZ9Rj7GtC8JUnMClTAMFaqGI4d6ReZUIvuXfTW6BEqcPJezWOaPRhvc2obZyedWnkpqSErwWJItpjgbHEn376qcSnXR3l1oEUJgFdDcLLA2liM/ewqpqwDd9qALQ0pxVcmrNVJVSZlKYcc6SUqTzK7TTzNOIQgaHzlsZZPQIeQYy1hSMqe2cSslWF4exBlSgmubHqBXOeEJIOC1G2ZmKqpOs1kuiFmHncg2kMkFVpFSaPARpQtCVGqVrfj1siEtzoYK+X3E4BGqxqLpCqR7nCpSEpqC1wdmBWqWruhRNjAlVKzUXDJrOdFQNMQm75nxHLUayvTtDijC4Cy4urtu9pAkxk3NqMcWKWqP4JjpHUQrvJTdemhQ02nqs7eU655nictO9aZz3Yp2mxB6o2kqKE2FOGGNx1mGUkgk0yoJ2vV5L0xKR/X5LrXCx3tB5jykKg8VVRZkTLiuMV1Sj6buBXOR+1EZAc21NgtL0JqbmqlRsrzFqha/QV9Emz3lEefmeShtpRirCqlak2SQXacp01hPTLGXYWunWA916JQ0ezb2ilIq2ou0zOLzXFCvnVhuNXTXD1apE694kQgaFMU5AcJGMvpgycwhi9K4UNQvYXIz45f3iVa2UllSgUtAGnPOMrYSai5iGV1TzPJRydOc8OVViHGWRWPWxATPlgrVKohLb9vjmNT7/+S/Cx0RLbV+feXI58d3vvsJ2+wTjVoQ5UHJmv5OF5kO75lNXL7DqVjxGqi7QyrnHhap4bC8Lb0mtU8eqiGYBm6q95+7ifWnOXGwERTaxAFIZZ4SkFSu1nJfehErfL70JMtbEmLlwejEwYaqKw3TSb796+4h5nmXcTHspmWuL95Zx2pLiDNXgDgPXF1cN6GkJl8kSg7ofI4c5ybiAJh4iMUSGfpDdKsX11UOq0q3R2JxdWw2uudwURzFKPIerTBO5gLGecU5QpOO71EAumu1+RKuC7zxKKw7j2PyaDcZKT8AwiE57vbZMYSLlxNXFlZyL+SA2dk2Oca9fA2u00VyuZUGfU+Zq1UvpXWm82zDHiRBHCvcYpz2D8dQq4RQV6LwA8vyB91FK4uZ2i+86nPagFDc3W3IuTNOEMhZjnLC5aOxa7LAu3UocN5aZrxSMLThrOeTEZnPR7jVJ9su9aGxDjGy6NSkfKFaRosN0a1ZdoTc9OQ/HJECtwbv+qIHNecT7ns2mkrMsJmIqxGTxPhPngFaVoe8Is9y/uhuoCF7xRfTpuor9nlaCa0qZ8b2o43vTE1OScbgUuV+LLOxyFba7UqUJE1pDeMUbJy4kKknTp5GKWMmJEKMs1EImJwl0kYqSOVahqgnso8TrhihjOKVStcH1HauLgXl7IKUdSifmceby6h773Y6oOugu+WHZ3ttH1SpOXqIFli7wxlihLKpI8k7J5VhCrUZhSsXanpR2LKyntZ6StDyoDWiVElE6EUYRyp/K7gvoWTRK8hQvWlJpnCpNFnCuVRKfuBiCUPh+RSoVXQq6dQtaI917lVPy1bmt0wmk3lUT37GaqvUo7F+Y5uXXtZwaC5bjl+G6nH/YsbQlYN8cmcml+30p6S9m9E1d0ITnix5TPtvY1mTQ9mGsbt2zTefZgPSxDI+AlZIzMYs1krCZC9tKKzlrtNLCEiopOS9lj0yWY9Ya30l5VcoWUnIfemGUvbfEeQYKXe8YJ/Hms3ahb6UsFOMsko20J+WK8ytyviWmzP6wY70aSEF0OrfbxzgH8zxRkSYZZ8Xz0BlDChGcYh5nFq2cMQ6jHDGMpFjonWOaEzFFCpk5isVOKZlpDgJyrCLGKl3ktRKjWMAorQgx0HXu/6XuXX4kydLrzt99m5l7PCqzqtQkuymNJEAYCbMYzGr+//1sRosB5kFSJLuruyszI9zdHvc9i++6R/WmlhLpQKGA6uhMD3eza/ee75zfIeUd76Tj+nrbWU4K7aThCDzKNpwNkA80Fe8mWpWHrzSTaIz3WO2G0tyZraH1SowbzrtH2UY8FMe+sm8R/fTKaf6MMo4YI9M8qh+PyBwmchUn4H3zZAdsvLbGFJbBN6xjrClKVi3SeGKdoWSpLRSyQ0UyJR7vlmEpaBQrVYvee7yf6V2xzJbT6URJSf67C5KM1RbrFNux0Qt0a0k5ElNmPzZhF3dRzFvP6K6x2mIMzPOMNY6oxBe2H5sosbmI11g1nPXM0zI2NA2vxQKkVMaYQMqJNR6UmmTcmpKMYsf7NmZYgHrHWZl7rZcLrVWC90w+yIRgqCneB2ppkpQ3Fm0UTcu61XoXxW4O6FpFGdZK1h6rKSXTemeeJs6nk5BPjCijzpox/alQO9bKSDCnImpRa6zrRbrAm3TDG6VQuhKCkAK0lveXM491AODzZ8v//NtP3E/VvzXfc7nurJdX/vfn/4wL/uHjvfOlzekTfP533KyTgykMe4j5xVh+bDx7p1U5bDD853cRgfuhmL+0JxljpQTg4T/vQ1n9hb++M6Zj9zCVqMXOjUILdV9/pTlNvapHvu/nb18fQgHAz7cvdPpQ0TSKhLcO27W0SnXFNDm6rrzvbyhtsS6wbcLmNsZQotBv5ulMmCeO7eBIO82Mz3uEYlTro/pTc+RMaYVcIkRR3t/eM84qTrOHVvBWCgbKtklDVT5IpWNcZtskiKxN4zgkFKpVp2Tx46W0yWHNCF+8tYSzlvNyGhD9gmoKbdUA4Fem00xrinXdURqc0yjd0HaCatFTJ5ZVPn8cc5hwyrG4BcG2TeScMMpQumAmm2r4s3ynVNBWs7zK5tNoQ66VnAv9uVP6mRAmSqlcbyveB3qHeER6B20KHc3LPDOfzizLwr6vj8lVbfL8h0ApJ4wpxNypVaN0AmUwkxd7Bm1836CNwhmN859BdXJZWddKK4VcbrRuJZynDH4Sesc0TbI/MHpYlgytaYIzuOdnckp0YI8RcAQ3Y7s8k531aC3ijwhpgqc6yo63Xu6hIvdDmE7jeTt+WIkdKaaEbgqUox6Z2ju6OdIu0x9rLbk2CeK5iPeKGAutFrxbRATRml4LP/3hJ16evsM7ULoTjOE0P4Nq/PC80Jsmxhv/Ul6/ulF1v0ht36U3bcxggFnhcBq5+JRuvxQGcUZjgockoSc3TWhlHptfpUbeuGusc6hSHnWAtI7Rdmxm3OCIiofr0WwylD2tzVD47u9auIK1Z37317/jh9984uu3jVYbe1rJqZFyIqb90VV///e9aUsuHI1GlNg2moXu+JV7jWjj4/etVYy7xmgaZag5eox9hsTaNQpHp2BsoTOLPUHJwmy0qCNaFSChdRgjsnFAGItzr7KxrcjFfB+/1SIbLT1Oe7VJuEMrRatGNmFd1EpjxK8SrMKbsfiWY/wOHpTghJw1bOsNF9QAQd+gN4LXpHLFOWmGqdXTlaLUxHlZKDpDz7ReKUXApvK7WLbtZ6ByxIK1Fms879fIun+R6jwsrSVUeSbF0Q8P1DZTOuyxo42M0YJSlFrQqhPzTu2ObRe802yepFrSQm8CmaYbaS9RmiMLJqZTCLMsuM5papUTv8LgtMVNipKHOqUdHYULinKt4il0k4ymnMFqLyxRI/5H5wxdSfuN0fJ319HjnXMhTAuoOwe4D1RVZ9+uTPPC+fQkBQJFOu+dk3DGabHEI3LETa5d3VimGdVPVBuYJ0vKDpQfbUKVfSuEyQg6RkFMu1wLVlOKx5gTvTViPtDKPGDasgG592Q7jnTQUsJ58W1payiqUeuOMR7rLDEm3ORISnHdr2htmMKZXiop79Sq6VELlLxnvr59xTlLmCZ6AW0kvNSzFgYkYjsptYLqxBhJ8QAlxBDnLFUZtlRkNEojN+HT9lbQSjrXJcAHrWW6qSht0NaRUuF62ZmCY5o9sRVSysRYcNbjdCB2M2qeHXs37EcdQPImD60iwaiSy8P+VGvBaIfzotiVrrHNSoivCivSjCBQqZmcG600sWy0Too71ss6knPDmgA0wdFo8Wc6Z/Fhgg7OB5zXY5IAjcgvvfLffffKf/lP/wH+6f8B4O/0X3Hkg8viOf3778UuUT9sTCho2rH7GeiPiZCMMaUNcIyUuHtHhT1rBqZtqOO/+Jm7ynq308QYZaLwi+HSPYcgIVz5j22QBVqrwvlswN1VN9R1+TsV1XwEQX769jPefxju/ny9SY3ksEDVJp/hbd3RyuKNI4RICJZTcBz7OIgoqa4tWVqevJu43d54Mc/s6ZCGuBx5enoWnGO84syEc5pp8pQkafUjFlrPeGe4vH/DWct359/SEdKN0go7ObZjR2mNdYqqDIUuI/oj4p0cIFOJOBvEHtAcGEdOheA8dMPXdaOVzndaVD8/LaR05Z7lqCVh7YS1hpwTuUs63RpH6wWV5CCWYsZaT85ZgPzeY6wixi4qcqm4k5drXylScezbTu1FrtmOcIy1xTpPyZUtHvSOHMRNxzqY/QtGw77LoSCnTG4a6xKld4xuGBtxdiHWCyUxKo93rB6hyrLhpknCTodYdp5f5pFN6BxHJOUNrQxbvGK1RhsIzuC9YekibsXUyHViH6UBxmiOQyayNRcoRhix+UAZQTJabTFB7lfvLGRFV3l4sqXUwFkt9a1+JlTH8/OTiDTLLDXSQbitIpoJDUkrzWQCc1gEPZYTYfKkmOkIauzY7uQDOC9PhMnz7BRae2ENtwbd4pzmODY8BYuotrprJufJpWCdBSrOfnjZ/0e/fnWjKkrMvV9IFEEJNQx/6BjFKPpAZ0goRsYwFWXlNFlaYV4W4aD2MpAvI7Ffi3AzvRWPnrHU1EbYQhTPGKP8vNbUVqSuzghkGxi+M1H/cpZNYkw7//jffs/n71/QWE7PM5/8K73KeFdrUQzv/2zbSjoiHTnNlVy4J8qbkg2oNuKTMV3GfYy+baMN1nhq6yOw9REQKkWaPzqiBijFKBaQi0Ep8Wy21gWir2UTqpjoaGoZD5mWkNG+RmGI8SA4i7VilrbGYE9yM/beqC3zNIuiBLJ4LPPCsTW02ekqs6Yd6400MBWofcVYWSydk5N7KY1UVnRTLM5z5G/iA6yaTiYn6Ep8YbV2jHXkvNFb5YgXci54HzidXmRMFjXONdnA5iIsQSt2gSWcKKUyT0+yse2NEDRBSROJHd6/bhDYdspYa2UT6BzW+NGFLsns0hTTfCbGFaUV8zKhsUPd6ZymM+loVHZqqUzhhLGNdMByMuRyI+Ybc3glxYJ1hnl64nbb6MozBbFVCN7GiOo2n0hxZxreau8C676ideLp/ITWmnXb0E1xPs3M8zwKEwrT5LmtN1FZu3RgT26hq8J60zhn0eZMyZlpnni/Ctw/5zQ8WAO/pJxcr3UgzLT4Fzsa48Jo0OnUrklHRqk6DjSR1gulHXg3E+Muk4vmKHXjrhTWCtbCsV25UyNKKTCsGc4FYpR7zBhFzlWqmOvdg2zZ9pVOQRnxoRoXKC2T1+u4rxXWzvQG1+2NTpb0urK0VnA2EJZncjnIraGaHMxKETC7UprrlhEecaUUWc/EdzijrafWBHYCM2FDZTFBRsrKYI2jlw3jM8Z7tpxIm/h8yygXaaWwbTdQwsIVhGUlxQNrDdZoUqq06jDW0lqW0WcXRM4UZk7LwvV2Yds2vnv9RG2Vn376I73BNAeOtBHCwjyfKCUJVaBK6tfbCaM8StXhz+74MLMsZ3rXvL1fMUZxPn/4Nf/+H39izh/Bov/6+3d88PTzZ8LyCbEXIe6tsSlsraFGraj877KBLEVUud47tVe6arThNzVjo6i1eYzxkT/2McG6Xzs5Z9Z147Qsshb9wrcvwsB90nRXWkWwkPfXHlOm3qHTmFrl/P7RY/vPP319CAoA//CPfyImocMoxkG7N46U0VoKG7w1WCcHoFbFux+8Q+nGcQfHGwu1MY8Q3XEcBD8zL++yrnVh7s6zxQx27Gl54thXSo1Yp/FmRmnPT2+34V8W5bn0Qk4ZozwhHPz+9//Eclp4fnkmpYhzhRijoKasG2GqSiuicD+dn9AKbvuVfat8fj8RjOFvf/tbqlZk3bDBsVjDESPeSWiwtYK3HWsG0cZalilwHAfzcqYWseFY4wSJ5BreabrrOK15fjrTgdo65XkmJWHvNjq5Cq4RZdhV5OzFu6+MiA2n5UwtGRcm/CTe2FQyqewoJVOsfdt5fnrB6M62BbLuTNM0LIESyCsvgmvKpRGnyrwEak2YavBe48MJrSdizLTe8NaR8wgXGUPMBh88+7GTYiIagzKSHVgWh9GG62VDt8SnECB45vnEfiRRtwlYpzifT+wxc2yCyJKw6SRqchc/dTUelSKmZZYQOPZC2ROvs3yOJTcw9ZFVoRmc0thTAN2ZMVIRmw6e/uaJmMTjP03zsMk4Suko06i9YsyMVooYPNZMD5tlaxrVFdaIMGisHMT+pbx+faPq5MsTR6GGLv41PapHUZk7bJl7UEcBSqPoWKWkkvLhY2Ik1eH+0Os908aiqLXCO0+slZI/xuAygjYP1dI6M6wHemxeq3hOBij9zlL9/T//nr/9t7/h29cD6y3aIRBfrSRI4kShOp9OvDw9Py7UMjaHtWZizhzHTkyJ/TiEZ5iyqDbdCsDbKJRRGG3o97E8CMPTDhRO0zRVQUmwA+Wgl0drkVaV3rOEMEYS3ihpupkmz7J4YlxJ7aCR8KeKYsdbzfXtG59eP1PrFa0rNli+fPtKz00qbHMmHhfQZ2rvlCOjdOPr2xdCqizziVrElB+3DWs82+FAiXJuraXkxLom5iAMOjNO7lqPeV6rhFkgxr0KJPu8vKIQ7EYr4J0jph2oOGcoOeG9YZ4sx54x2uNn4Rx690xKmZwSr6+vshA1mPw8xrbCqLx7zZyb0COcM8+B2/UKSvxIvXWsMdJspjTbthKCwyuN9ZqYA910zvPENHmiKyzTidaeUKqxbjemF49S0kZkgkNrxfn5BaM9Cism+bjLdCBMlJoIy4ILAdUK58+fRcGolefTmfV6YQlnNIbzdBJPUvBYZkouLMuJvRT2I/P125/IpdGNQ3XNfkQqBuMDR8rElMjrBd0Nc1hwznAkcFYWW20MMYqit66JRqPWg1hWcjnoXeDgrWWMNczzTI674N2GDzCnRqt3D6kiV0unjWBbETVm2CKWZeHYj0cYUivHNC20Xti3ld6g9gOlG6rJJKFVpKO7ZkrZUFjCbNDaI31f0kintaSrcysEJdzEnCspF2rZiHmjq441Ho0VD/Cor0slcX27YkwYinfC2QljPDEeg6/b6Ygiuu07vYvvOOY4piqiNGndCEPFrFVUjdbreJC3wYPUlNzJWTNNM61nrtcrCgm9tVyHZ1kqpEsb0z7VuO07f377StOd1t5R2LHUilUj5Qql4Y0eYUNNCLMEQY14xI8jjutKwwhG/Nf/6x/4Kz4/XE2324pa9xFW/Qhy6jv2TksZgh14pruN4F6/OwUYRvyhOJpHGPXexHcf59/97Xe6xwdJRSZsKaWh5nwk/dWYJj3g/siGVGxjQ/lV902qbGx1LXB7g5EF+fr1qxgdFlGIjqOgtSUe5WGzamo81FMV8UIrCUzqu7Cg2doggyg1phQG5RT9vUmwGAkE2sv7OKhJLsI5KS+xyqKUrBXiOc3yOXdBCSllHuQEentM+FCKy/s75/MZa/9AyhHumZDWKLniQ6AUwX5ppcVvXQs+KJ6fPvF1O0jHxluMvDyfUaph90ZJmXW/4K3n6fSC1p1aDrx1pF08lM/PzxxHIyJeyZw657Mnph3VHKU7lHbYeea271ze3yUQpTXWG97XK8u8YLQUWngHZy+lCcpklIbgF7ItaDswg92JtaBklIF0FKqSzVwtjaNeMdrhzwqonL1Yr0KQzMH1doEjc/r0zPPzC7f1nWPPhMmSs2FZzpTcsUZ843ePMwqWGnh6emHdVlLaaK3IprY18dsbR//hCasURh1S5xpmUGbkPqw8X7xj3VZUt1jtKDVhjed2k3XReU2KwqNvJaGUZtt2pjBjrSi4l/djWEcsqQieTGtD7QVUFy/ztpOUZnYTT/OZvGSxgKXE6/MrShm248KeDnKuEhzFIplfoYNY40AlapI2JaA7qwAAIABJREFUSa0Uz0//SsJUqg880QOAL8dWpSRV28Y494EUQdKwdElRzgNfUkrBWv9QX4VL0uhF+sa1tsR8yCanCwooRkF03F/3VL94nj4wRnWQCe4LotZ6oGlAsDhKLkYkiWhth1rZVUNpL8GD1h6LsdJKRv9KMXk/xqxnXl+dLMRKRrr0Rsm72AjiwbZtlFJlrJDG+GMsstYaUG0syoZWRQ0uPdKqIL4aK84apvDE6eXMy2vAOzGN//zljxwx8n75MyldQGVO54n5/D05bty2P6D1V5ZponZo0eBd4u3yFWcd63Zl297p6kd6lwYebw2nU6By4zhWnHki7ZnWMs4vEgTCYZ0srEbZB87LDe/Z4k9oo7BOs2+7gNKHsig3fqa2JCOilHHmhEFUXhcM2TpybuS8YbTBuYmcxHtIm3g5v1BChQq2SyOI6fLQ2PcNpTSnsEi9HYZ9jczzgtcBypU5eHzTLMsLJRXpIy+Vv/70I8syy0MpVZJ1GGNGaj/xOp+YwoxiJoSF/BSRxh9Hb5Xr9Rvn5QnnROHLqVB7RgPWeIKfWLeV3ivHtnNskdP8RKqFlJI0zyhDrpmaIqlkfJh4u9449kP6v7/dWNM7KUeu1yvLyXHd36GLJeay3oZXWawlMSaCO7Ong6ocvRr6fqOUndwyRi+0XCglkutGTJu07lgZf7Yu40iqolU5UJTS6Bzi1dULuntq22lIkj7nMt7D6FofGxXxllmcW1CMzV9XKBVkLKwyCodWDmPV2Jw2rNN0HMbL2HpPN6ybiTmSS4HuqFXhvQMKW9wJfsKo8AhV5SaL+Z5XNJrgLbkmSi7UXsbhpozNUsPZGbphvW0fD3pbQXe2NY8WlzaqdUVNNk7jvMa7CWcDtRUZ3eVfoNDu6+Ugf6DfxDZVGtaIRadkwZ8FP9Fs5/LlZ2ptnJczVVtSXclHRKmB4lEOY7wo9VbjZkcYrNichOWINuTSSbs0LB35wEYFn+Whc9l3Cn+C830UPoJX5a5i/oKAwt3apH7x7481+UMt1Y+1V5UPG5X426Va9oOS0vGMzel4yNyDp7fbDUEO+keIk97H4eEjpHXnsPb7Q+ruku0fhQMpx8dGNeZdPMNjo3onYEjo9Z7Irzy40l0UL62lIlwPRFlHDZ5tHypWIcWCdwFrHTFleoKiCi1VSi90U0lRgrA0RYwSEtYWepfvvzUI00IpUkXqnWUKDkbASmuLCY5YMrkJw9RYTU2F2mR6mbvMPe/1y2Q5MJlquWx/QCN/nvn5K856jOvUrDlyBGT9md3TIE7cJMAWDTbA09OJnAs5V4KXtPr56UTLkf164IPFe888TeSS2W43pjDhrMVPIk6doggNLy/P9CIYJwk5jvzGIHzkktj3DauDeLpdxzOjdUSrwOl8JsdEjoYwif3g/f2dZTlhzF1Ag9l48umVZmemINeTUZHT80SrYnOwxmNNJ8aDZZrZ9nUwbzU5rzw9BVI27McV74UVr8jj/pulLUo58fUHzymc6U1jscxTkMmYFx/6PJ1oWRTfHz9J+cs0BVpulJLQWkLpOSXmeZb1oWbaX4so15US/2kFlB58dJkgrLeVlLPYUzqD0Wsw5gwUjlh4WjxWN+yTBLdSTWI11HI996KYZ0/cM84b5sWT4y/GEP+DX7+6Ue1j0Wr3jeDYZAq0XXrT6U2UlYEYoo/WJ6MxVpFKpAzcAsbKSbR30Jpm5Chszb2fXZFSxJjhfzkqx3HQBoJIVFMZbYcwjXBL5t41f8dW1So/c+wbxmq67uLZGgGAOjh+Hw0akqSNaUdrzaGGbxU3FFsZ7ekHo1MwPOK50Tw9feL1u88yLkS8orlkQZ2kxrreKDmSohQhGFO4XP5AMV+Y/Qvear78+f/j3/+7/8jz6YzTO3/3//4f/E+//VuOFLnd/sx1vRAmg1OV53li1haXG7l2zj/+SGlNEE3j4YE2PIUXWt/Rs+N5/gFnBbFS8kFMHa/PTPP31Cr8v+Iiry8/8vXLhfM5yMZ7jzg/Szq+RfbjRmtaKAzOM81hnM5GjWNraMQ/mlUmaEFfLD5gmhK8lZmw3RBsx3Sp3k1x53Q6k7unmYZWni8//cRvfvMb6ArjZ3LKoqxQ+avv/w3v71e88UyzqHjfn0eiXcMPr99xmgLOSojPGMOx77QmozrvAjY43i/fmCbx0KZYMGj29ca+vjEtJ26bpGYx4jHWRvF+uXJdMy4sooCXIhxgq6itELyXBDLStJJb4bqtHMN39O3tGyE4rF9IOXG8X+TEHCMpR5qSkWsv4pmbvcErz7p+HYnuirOJbavMpxnnLNN0wppA74XL9Q1nn2htpZPY90zwEMxCbgddV6zztK6pLY/xagXVKPVg3S7UFqklDhJHkIe16tIk1yQ41zoYBPlmnNyzHcil4v3C5O/TkyKbuSRJcKUdqjtyUexbHYfLTm2F1jW9QleZ1jNHlIaeUpOM8mKhXhtaTbTaqLWwzAulJo6USFmoIMZAbUmsMbWTUx8II6GC9NbIuXAcP8uUR4kyJaGoPkbCkOqoDh6H6JQOSBobrWwM+ybWnt6GMiFrYikJpTxaVxqykZWKZ1GKjB4HV2PJqXNLN2I6oMN1v6FUJ6dd/OhGQ1HUusvmwxqC88Ss2GOUIGYT7FUdJJPeIdZIV3VMxOS1x8jREpw/ATw2mCCTKa2EzSlr6H0TO0a28Ags6SFWjD/kLzawv0T4qaF4PjynStRWCZaosRG8B04ZoHdRE5133NnSqJHqz1mCnEMsECX3HrBqwwbbByJuXH3tLxvnUs7s+45zbmxU73YFixnEhLsgEYsE1pwL1LGJ7r1jah/Ts451Guc8zksdb+sdYzqtSAFCKVLokHJBjxCOG1B94zzWBnJtlB45LzNP5wWtKmE0E13eb+iuR+lAo9LQvbPHjRhlmkhtmNFWpnoXK5yVat99v1GSBtOk2lcHUJne9XjmFfRu+dbFr6nUQS2dyb/S4saXyxdabw+OZ60Feh33k8Na8wgayfN5hA/DhB5TLa1vUhT08xd6L8zLRMkJFTPPL2diEs9oq+pRd1xrpVtLPA6cm/j0+sy37Q1vJHy0pSJB7jYPpntnnjzrvvL+fmWaA8Ws/PTTH0k58nz+zPVd2M6MA76zE6flM0qBtR3nRwtZEIye5YC048MseYGe8WbCm0nWyBrl542iNYU1HmMdKR4Yb3hyrxjdCcGTWoEKp5Mf/G5oZHofBQHIhFghFkfnJuan+cF93badmDPWGbZd/KjWOZ6eltHmJmjN83mSliprKS2yrkIcyDkR5hMlH+Ry4P2JWzo4zYG0F7RqTD9IkCvMltvbv5qN6i+S6+3OtesCOVKCc5BkvnuMesTk3tHGclaauq5s71eccVSlKEqjjCHWyDRK4HuV+q8+/EZNNbTVWCttGdYaUTtH2lyS/pI4h/tiax4Wgd5lg7Cu26OhYlKWriDFjFJtVMT18fNyqlVq1MV1ScTnliloequ0ch9JyGK9RYtehS6gjcZbPzqItWxincOFE+cnx3L2zPPM7fLG3/3d/0nTK6eXROiWdPyJTy9nvrNPzOxsP/3f/O53f0MoX0nbmevtxpc//4HLdmOeJr5/fubkTgSlUAVizHSr2LNgP3JJ8uDeM05rUgFv5gErj8yz5vn0iff3jSlMBOdIvfK0PNNbwnSHI2FxdDLnJTBPJ7RW3G6FczizH5Gn+Zncdvb1Sp+CfBet4a1l2y48PZ8waLbrTquK5/OTHDhKlQTqXjC98Lx8xzJNHPHKaZoIL58oJcs/61d6jkzTLGqxdzw/v7AdG0rDpB3Oe2qtLPPMMp9l1EJnj5rTMrPedlrPnIKn9ApasafEGjM5R1qLsjlC/IatgXWCnVK3XVBPltGYI2Ea5y1/uv6MepPCCecC6y0xzTNHurDuldbAWwH6W+cpvYzAnyBQbsfBPEae6EJMArHvKqNUp9QD1CH4mhbQRE7nV2pPKC2J6MlkpnBCdWmB03ak3JFFrKmMMY7Xl+9xIZBLIu1XseCoE7ROjqIYliYUhtoqzWy0ngCLYqY3N+65zLbtww4ii2ouZUwlqiSStaP1Tso7wXnBjlWhSZSS8H7CmSD1y+0gpfvD/0AsIQsxSuuKMQ56J3gLqlB7wzn5ri5vf2S9CZR93WV9KsWwHTsxSaOettB1oxWF7gFjNM4IFspYJan0Lu1SfbT+CG5N1r1KRTtNK1BqBKWx/l7OoUYARdL7MCgO7V68oahdSkfutao1V1rckBZNJ958k6mMETDChS75GDanPg5QsnnyXhMmK4G3JGzPGHdirNRSMRaUllKNjqjaosp9cErXm2x++eEvN6oycpa6Smvdw/5035iqoabef1cJa9ZR4PGLNr+HdWBwUtX9v32Es0RJVY/P6U4gUEomSNLypQaQ3GGdG5XZ97DW/QElz5yCHBLuflnfi3Bwx2tbb1JROV4579SayOWgUXHWy2eCKFR3brOMz+vHc0UpGdNbCRSH4LHGUdb0uB7uv2/pRTBF5d5Zr5nnwHFAKpGurJAjwgTKoHt5qJW9a44kGxh5v+L/NsZSamHdpE1tCgteG8rwZLdUUFZhVEc1EYGKylhnZWRfJLwqh7BGSjtWObZ9xeiKNYpmACTc5vyJ3mBbNxGYRIKALgdEPwlL1XY48oZWaoTiGnvMLEV45rW+YayHLsq10khlei30CtPPHu9HNqN6zucF6yWYuF+vbNsFrRZucSXFQ+D8KuLsmZbhzsyupfLd64usATGitCanCzl1pmlm3wTzVbLCeakMPs0vlPxVJj3I9zxNgdKE77rMk1ijyjvLlHHOYFVCa2mDc0YxTQGpR5ayI5UbWgdq6VjEMqW6HFBqr+yH3DPX9QKIGv2+JbQ2xE1YuN458nFgs7DMT6eJrCypRppOlFaIWXjTelhqjNUsfgYMtQ/OdizMp7NYWFzDmkprhZOfqFnzfJqoOTF5R++JJRhkuFJ4evpXgqeajXv4hKoarQs0tJIzulXt0fyTU6INbEtX8qGpePAf/tbz9r/+jl7f6CrQMeQCyll60vSm0dZQS6bmhp89pcqDT5Ax+qGU3jmrpYhvzlphtt43sKKsil9LktOrdLurTkkROwnM22kJVNEVqvcxsgbtDLVU6XxX95FmQTG4pGMj3ZXUGNaSmOcTMRbysY2FVyrbZLxdebv8ga9f/4nvXl+YJs9xXHh+mmmps8XAFKx0IRfLLQnu6Nvlz3z6/nuO4+ByvbBvG/t2o+aDYDpPc6BbRyKz14P9OCitSY2ttry/faOWylEPUha80Tw9UVLHh4mUZSHct2/Ms8O7E7dL5HZ9E6uCCeTY+fb+xrJIovN0ehajd21M04mUknxvUXx+ugsSJ+7ykP3y5QufXz8R9wtP5wXVd3Rz/JtPvwEFMa7EDZ6c4zzN/M0Pn0kxYYzj8p44Py3M07/ldDqRx0O5dakRPM2Cior7Ti2SSO21k5IAwAVzZvj6fsEYS6uJ9etKSlEan8IMyrLv76KCuRO1JopupHrgVcBNC71EaJVWHNKCJBOCnJMo+P3GsVds9hypUJVH2zHSivlRKRjzldv+lRAWSXQzKAhRrrM7deIeRFRK0QfVQLZMhhgzSkWMdVinOPaCnxS53oT3qaDWnVoPOsdIexvxmpWNXA+24xulvWGMWBxaFRVzzE4kSIIoJb1Ir3TqO7Ve8c2AMpSa8V6DHpD1Mb7b4wFkXl9fhFMbM0dcMUUawIyeOC2f6F1jjDygjes400i5S8JZtYFFyfQOe6nEI1F7Z48rMd/T65VjSxx7ph8V7y3OTsQo338vPEam1ioK0Kqmlk7q6eG77uTHGLmMZhZrDJ0Mo8rZWjVU1rtFsg+IehkWJtmU1VIeQRgJFkndqtZCA+itjeS4eNeNvntA29hoWPFZa4UxXWwqDWqqo9VG44xC9y7FGCBouSIYH+ftIEaI/SZHafwy1lI+9mzDkvCBaupdPUJJWgtoPpdMbYO0UetjzN7H3/0o/xhjeTXS+/qXXlR1946Oxj314d2/bzi1kqYypT7e0sMz2kQVVUQ45P9z//8L7mqIFUqNg0V/2NIwH78fSK1q+wX5IJdI110UPd2pbTTjNRE/jNOPkG3L0FURLrPSY7NlxmeUcW7GGNlsO2sHqsyM9zl4oFVwZ603ygh9VTrXbWc7xJsOjDKETt2utFrZ9ob3E8aJFe3IonJPyxnrLFYbsIbQu1xT4znKsDOUKgcYazuld+bpTCpynwojvWF14BReyVXsea0XejdYPSZQqqFGK5819sFq1Zhh+crkIzKHRSxQI8zae+e277TaKangfMfbQD4i1mpilnbK4gxZdVyV+5VSuOUdHyDGXeyH2mLJ3NYD1Rv0irOBZV7o5zIONh1VFZfjQqmNqjolJUoSJT/XxJ+//p49XvH2PDBVjnRkUoyUWmi9jnYwI+B+rZimGessl7dvnE+LfFdNlPSSq/jNkenyHMIQ8eR6UF1g+koZ2S9ZuQe8l0mcNqBMYJo9x75TihBGlJZD2nGsOHcGwPsNbRrWiq2haos/vfB8njmiPJNySVzXxHKa2dYd1Tt2Cmin6EqCq3tcx98XMTbg3NjflYLzQhJKNZJbwunl17aH/11fv7pR9eofSHn06paEG1WHVQnIv4yF24ZAyjdKTugepB7TW76RmJ4D/8v/9hsK/42SHBYvakXtxGLoqrP4T5R6o9SVoJDQVnc4J194SlLFed+EOiseTzG2a9r9NPyLU70xhn17QxHxpvOnL3/k6WVBK2ka8d6x7zv7fvDy8kxtneMWmecJM3ixXk9s+451luDMeC8DmYWmAedgKBZOkyhbtUSmyRKPDRcuzGrixf4GHyxPT0+clv+ItwutdrqBeTqjJOshY0kUfTD/Wu58+rHyn9RQd4ZaYfSAhVtZXO9dv3SFdYIQ6UpRy2XQE9T4nAA0pRdezldShDB1ejfUZJnCE8YIPWHfNs7n74b30PN+ubFFaWHpSVTMeZpZTmeOI+FNkBBWFa/qt8s3vn/5xPff/4hujfN8IqeO05WUM0/LwuIdl+vPhKCpeWHfhQVq/IR0UGXKCGzFtKFQLPMT376+CdKmHRzpGEgXg9KSflVI8UHvmu+++0SJiePYJbwgrghKjgMOv8Nog1pOJ3pz0MRfprSmu0JJmd4EQ610ZdtWlK5450i5olrBuETXjXn+jpItioDRu6ixVTYDtURykpPOFGY6mf346F0vZShUI5wSjMHZSZQp30b3ciFF2dAdcaVWxxyeqSWimqc2A0rqGbWa2PeIUoleHbVmvH+iVincbXrDeNmQtQZKG0GqqIw2gc6B0pngBRfWesO4Mq5Rj3zICGu5dwkKKU2wXnrDD/EMO5eYp8bLy/f0puldyXXfHconatmY/QtHLGxXaV4Kk6c3CRjEfeVyvbJukd4lKKKVFBLk0sktUMah2VqL9S+CheuKdiDYlV5IeRPKgLFoqygZGkqS2WFCYYSBrIx455UF7iriqA2uCtWtWAXuqmPrlBHIKCN13Rto41BNSWCtNZQS5FnOmYIcbJ13GGXkgd52OaRYI0UnrY7vUTZntVZyLCM1JMzqe8C1taFkGsOyTDjbqKWhkWvt/jpPf5nkvSuqQjURWkRtglVTSmGbHozdPnz85qFqaq3x3v7FuivWn7t6eldix9qsRkBqeHjVsJChJLjUuqB/lP4ocBkLO62rkb6XEO0dMwZtVMjK94CCa1/55/T3wG/G96MfAS5gBEpksiFV2nddVw9Vt6G1jN3bsIsIOUWjpRtv2NcMpRdqlQ12LJmc5Rq8h73u5RVG3+0T8r6PfC+uibhsHgeF8cFhtKPkjLEHdYR42ghYOesotdKqYBydczKVrGqEVAtKdYyzeGPJNcrBS7uBkRyoNhWpZqUr2bx7C7P3GO0pWawxJYudRSgLjZKKTAhUZU+V3mQyIAhFM2grktFQTaayGAkIFpXpttPMOIwrgzdSjlGqJR8RVEEb8MnTqHjtSBFq2+gtcj4v1BrRKK77V0rZhYVsR8ioDv9yh640MYGmCcRMdeZlRnURpFrJVHVgrWW7boLq04bbuuO8lQP4caCjqPrKKLb9IEyO1hKpVKz2kkVYd1bv5XvvYEf9q3V+fH6KFDOtNqZFCkaMCWjKBxGji0g0TWLnSinibKaUOixrAR8MYZcihpI7x1EHM7exHxu9N15e5dDUWoWWiO+Cssq10JoilsrlsnE+nbAlclomehNcmLPSMpqG5fNfyutXN6op/RPOaFCN4K0oZklQFufFc0THfhxMJ2GqTm2iVQnkWMk7QEt89+SpHBzpNpAbHUtnsZ5UQfc/k0uj1JVUJTWakmFerMjuo32oFrn5nQvkFHEuyPhnJP3lxD2A90aRjgvB/gHbv/Gb7zRu2ohrpteNiTPKb+hWeZruCKg3aJrT8oK3C05BUBtKS/3jFBaqF+9t7RWrQLeEn8/88ONnjmPnSB5lHJ8//YBVBfNXAgUutWJMI9iJkiqtR3Jt6DoPuLac2GOOMjJCM00Twdnha5XQiyRgtdT0jVGdNgIV7kXGd/o8zNajd7w1OW07b4lpjHpLomEJk3ixaqwCeW8JpQcXtgnHTms9WknywzskHd2VY9+5rRe225VcK35ylD3y8vxKPmQ8prX43jqJbX/HWSvJ0OmJaZJ63X2T4oFnrZnmE8d2sN1uNAN+Dhx7JMfEc8pcLhe2uNGUoHv6OIHKyDmBKrSc0Uxcr6MpDfFHeu/kIdMbtR04b4jpBiqgdBA0kzGUeqMhalrXDe0lSEaHHz/9wHGs1Br59PwqdXrOSbNMs8Rtp3Xw3mF0Fx5xlXHqeRGg+u32hnUSo2u1Dj5lx1lNLfdWnzDqHMXgDrLYeS8KkW6BMJ2xRg4noiaPDSSZ4J5JeaW0nU7BmxO6K/GXWcNsZXRnjNxHtUtJQSPTmkIFwbp4e6LUnSNdsMOS01plW3cZu/lObwlvZ461ks0+NlAz8ais1413fXBb21CdMkZPlLJzvSTW9aD2SC3STiXeUgckYr7Su8X1M2c/DeuPLKBGzzjnKBVqVfRSHhvGME2iKqWNVA86CqO8KKNISYFxBt14KHG9N3Ip0gKnhOUKUIuESluXcaxWjAe4xxlPjGmocvK51NpB2XH/xRGEMuQimD09NnTOGmpupCq96xo9OKCdENx4TxVlFWBlI2A0JTdaBR9mgq2k1Abn12GcKHNae9poaEN9qIl7yh8bQBA7ihL4vFT1FjoVZYbH1GhpmUui0lmnhzeYBwYO5BZrg8RyFxXu437FCDr1v2zK424ZUB3Q4jdENjdyuL6Xkyh6liIB6wzaALWjjHiL779NS9JetbfM31++cN+o/vz1Iu/tt/Jzt20fCuxYG7VY2ET1lVGy1vKZNe6FAqJga61GWYwh1Qalg5IxtdBdOrlK9qHWNtYbwTDeX+Iz76IgdjUOp8i6O6xtxoxrokEuhf04UKOs5m4laQ+6xAjcDPpNrRXdRQ1sGCCOaeA+SAx5WGMUyot3+2w9p9OHxeKIWUA/KqPsJP7snB68Z5R4MvUQQGrfJbB1/zKqwgVRj5WxCKRCPLu5iBfcugmt+qhXVtTOwz5QykGYAnko9ChZ8/YY0VpRyiZ0h8xYT2TzqZSS2nUEzC/1rY2YrWDp1KjVjglp4WtYOm6ZKTHBQM99+fqNZVkEAWdlHTiOym1d0TeFccLKBkuYAtM4wO3XndfXF5RKNLWhVKAybIO2C8oxrSjtkONwJe2JEDzeeHKFtjf6KlSHt/gNYxxussR1RW8WP7i3qWxcLkHUfq0wJtBaI43niXOB0hIl3/GiMv3w3qKtJdadUhPXbed8nmmqcMSV0/ID9DwKSP5lvH5dUbWOyRsMna4MSnms0tKiURMQWRZFr5kWG59ezlzfvkka1mhS07RamCzkJqzM7Az7euN8XnDGcpSCc5mcqiRqjaKWnVIbpczDhypX/8fIp0jHey6PG1UwOffkpix+Rz2Yl8jLOTOFJ2LdePaW65pw4cAQOU+a53Nkve08LwmlNWHeUT3x+Wx56xeCM8QYeV4spSpa2ygUrPM4XaBtXL++4e1Mvl2wLhAPyzVVXl6/ozRFmM7EvaKCNGcpNaGNqF26G6yZsFrhJkvKbSRToZWCN4aasnwOSrBDujeUmYi1ULP0QRucGMUp1Jpx2jOKpii9ko+MHfgshcUoDUUUMBY5QRs9o5TCB/HLBS/qyTQ9ixJi1Dg1NnlPp056PcjpEMXSICn8wb899hVM5xYrca947UeTmebyfsE6y7FFdp2ZlpnreiVm6Vm23nDbrlzf/iQjGTRL9ZyeJ3Tu5LVhmkc64A3WOybrRe2vO7k0Xp5lkS1FEC61NlLMOOvo/Yyxnpa/YX2gxEqwHqs63p9prTDPZ+IhnrnaM8E6Fu9RpdHVxMvzM/uxy6l83+jAp5cztRZOp1c5CJQ+KlgrlSjNSMkQgrBP78UVMR0ShvHS/taaQVsJYqyHmPZLLRzbQWuJU/hEjpnYZbKgB9f3Xi/sJ4PSM3mVYEZvjZIikz/JIq0LxlVCmCXE1SJocOZEilLjWdCi2FB4On9HmBy1JfYtYbWi5opxFe8CnCRdiy7QPDk1lE7cbpFtX1n/tA02b2HfMtZAOhrrJmGu1oZiqQrOBMIk3thOIUc1FF9JWyvV8M7S68SeEr1GLDJdsUZjmqgr3kygKrkegKYmwWp1lUF1uQ4qrPshD+CuqUWBZlSPKlHcOnQyxkgF4R36XaMA8lsrYo+ywiKkiydXdYNqTtRPErUVKqIgVgW1VPHHdaBL7adWEPt9o9doDpwVj63WI9hpxDdbUqZWebiTNxnP1y7rVJVxvBxyfwDgut5EeRwvpT6A+jKSL2O9vSvJfSiewx5hZTfXepfnArI+3INXIR28rF+5h07l33xYVAdqqvdGchPfltfxZ6jhX1XDY9FlnWKqAAAgAElEQVQf9tVa5f6x1kp5xwNT1R7WAekUeBhgqe0jCCLe4Y8CgDg2RlrJlOzeqFSyHOD0qO2V8haZ7ZSBAMulUs3dzypIK6UZfkhptVJGFHKFHgofspEdG3OxCtzDvB+HJMY4uHeZ6IB6nDG0lcBwrjI5QAk2LOWM6WJ3Kq3Is8Q5jv3A9or1WvISRmx2NI11p+G7zIRZFPcYE8rIwbnTmE4G7yfKuI9LBt1FTEi5DyyfoapIyRVdKwonkwQNqisqDHa45aMQR5HigcLI89NZUixC7/BOKp+9R3XY1vg4CLWW0LrRYsbogLYW5ydsMKRvb6z7iupKEIDDwqGNwjmxNB3HQON18cimERQFKLuQG7z31BFEs9YOdVxLnqU3OhIw7h1UNfSembwmFSmC0Npg3UzJUm4Qwoljl2B2GkHSWhVKWYSjOq5B5TiOSnMVekM1aYEMIQhDmMyRIjlFwLJMDWnik4ayfb8yTzP0Y1BlJKxldBgoXEfOB4Iuk6nLNE+U0nBG8fOXN77/8TPejX3T1wvohlbbr20P/7u+fnWjejInYaGOq0/wnpZU5UTfUsYGTclpYF/2MTYxlFYH5kmTSiPmirYO5ybe88b1kpgmjz8tTEFj2Jn8M0Ynmi7YUSEotAHxJkmjlTyMvXcyCh0PaJr8/C/9QXHbSNcLUy1MpaJrQWuHO73QrRUTPDDFiFMdf3plPTb29zdiSRyT/f/Ze5cd2bZtPevr9zFGRGTOy7ptr8PNsuRjuwBFoEaNEhIlKtQRL8Ar8BqIIiXewAJLIEAgKhQAHw4+Z5+191prXjIzYlz6nULrkXNtG5YtS/a2kLs0pXnNjBkxxuitt/b/38+erqAnlFU8rx+k46EFQXM7CsF6QVvcIt989S25fOR0fsOHDx+ZpxM5v7DtnduzpfWK1ROn5S3r+iTMwF6ZJoc7XbiuhXn5Cu8WoSY4h9UGZy0dOUHnLAgaQxftWrmf9ouMpnvHKksrjVyzdJ0rWK2pBTGFoQhuQhk1uqvxtYMh3UYGp7QO4PvYFF6NFXKBay9wbOc9UQvxwBmDOgEdSlcc+4afDJrOcRw4Y+k189ArR9nJKeIXcYXPy0QqB+u+CjJEzeRUWa8rbx4fJDIzSSGmu2Pxjzjn2LZNpAfKUWsk74VpeuR0tpzmN6zrRo439m0Ts0kVPXOOReJDh2g+xhVvPMexs8wTp1kCCCYn8OQtZrSXDrQPE5MXtIdE8CVKjYRJ2LwdEdC35HFWk8tG64XaROP5zftvBxKoU7JoAr3r1CZJVkordJcinB5x3slm2SOtK7RzdCWsxpYFAu/Pb6ALG9Z7Qep4d+LxMlHawXGIqURpKUhazWJ+qvKZGx2Y5hmlNaplKWB0IZWbbKTdUnLFuiCdVd2ZHk4cx0ZtO94HrFnEMd0SKclhYZol+erpacfaCWM1+3ZDdUdKlWXLKOXoRDqZObzBu0kmGFUKslqgVgnKyEVCClCwbQdNVYJS1Oo5dmGoOqtxWviDtWZ0djR1R6Y1gfZTUcbgnUf3OsDyhmZEfiNQfzWoI5XJO5wTzE+MiVKk0LUacr4XLlLIHUcUacCouU4n4Szuw3SitKLkhPNywMw5j+AD6Zp6b9HmHrUqsO6cMjIZAI2mlCa0j3Y3AUkXrjZFKWm42M2r3hbgfJ7/oGhTowg12gpycHTppJYdkoc+isHaxshSAZXWNQbwaaNff8s/aD/jqHyIm3TNtCGEAINiAaOxd6cJGItd3/M33XccduLp9EZoAPXL2L/1Rmti4tFWpAK9S7eptS6HCDNq2lGkyuv+Mup3xuLunV9EI6y0E0KKEUoLSEFsBhf6bqrTWr3eH0oLoQYt10Zvhq6lKw6dVqVQykl4xK3dOa1t6M+leMolQ/+yT4HEUBsDqD5Mh04S40YzRlBqEjgTk+hulVI0Bz445jmwrbehgXTkemD8SHjTCj97IRMYj3WOlHZqSxxxk2vSQGlC0Nn3DF26oL03YsykKE2gVDK1g3NKGhKm4IKj0jCDvauNIu0ZbSzK+C9pYa0wFGr0EdyTc6QUBnFD4wyoYDFKYa1inuWwum0RRre1G9lP6hZZphkXnFxnXZBvILroWir4cThSmm4yiopWDq/DQOM1VBNxQGmyL6iuOJ3PrNuNVHcx9VmD1p15DuTah4Y5s/UrOpthGpUu+bZHvDeiYx2UI20MvcjkR8zhDqU8+3HDWYvSnZTlvnDGIWI/YZ7GnAYSUD73XAu1ZPZeqRiCC+zjeaSV5vl2xRlLjDvTMuN6o5WM1oYjF/Z9J4RJJhKi4WH9i58wJg3u+JXWO1P4wwj5P+b61UL1Fm/4bknpkKLIaI5caLGwLBO5KfZbxviMso2P24ELjqPuoC1tvWEH07SUSo4b1gWKlsz30m8ct41bLORkOV9m4rFJhu19/MwXsLGMRAz3WNK7mep+wgZeC9XWGtt1ZX1+wRyVtjdOj5acduia55efsalyOs3UUlBW9EbTMnG9PeONoqckOJEuDxvVo4z7lcdZz9YKedtFqB4UWu9YU9m3T7x51JKelFcezwu3lxv0hDeCu5htQhUBnD/fbjx90PjlHaU8c348EXPjuGoeH95wvQkWpOQup9wsMoemM9ZO6DrLyKkM5mL3OH+mtURvCloTxJZWo9uSSVmkE13JyPBIggGzw1ChtLiGWxuygm5EcpAqjTujliESH1zYWqgjNcxoS9AadzIoK+J6ZU5jRNjJOTH5N1zOll7le2AkzlBdn7hYw2IVD28yX3+d8M6T90hwhnV9xli5rmo7wFau18rpNFF7JdaVtGbyp8jL0/MYxSpSzlhvSFm6Z346c9tf2LfE6eEtxnZybtTWebm+cDqd2bYDpaRgD0G0RdsmiBzOE70JizTGgzmIszXtIhPZj4zSCT9p9nSlVkPrWXSkHWIUaYRCNjjn5HoupYnmLHhyrUxuIoTAERMpJkqT7nppkTBN2NTJWRKberc4u2Btp9ZIbwWw9FYFdp0iMWeW5SSA/VxIWVznMe1DVyevSTuNbkOe0EQHmEtExcgyXzBGEpyOQzb7ZisxifnQ+iZpaeYsqJ5umcMwKeqGevde9OI5c6SV1hpTOInmsXbojtNyovYiAQ850ZqjZIOxUkBc9ytb3Cn5RGudeGg+f7pSSibnNAyWTpiNZiaVijFVNu8cpWiqhtoVWgeZDFWhDQjs3aAn6apK505Ys0bJyNaoLtnkg+3cWhtcWU3zFjN4qVpbnJdxeAh31NfgjVojxb+VwslqSQMsrUHLMOQHOSXZpJG8bhmfW5kX64QxCt2doOaMxgUnzM9/qJtYe3mdzsIoVJWYD1uP1FpoTeRN4gOQYkUpKcTrwGPdYf3aGIKCXl5Yy2/H+B8yBYNFZemQdtrAoAEMiY6GpfyeB/uB7+a/CedHeU1DQiHN1T70vlKk3nVz9+ZpH8Wk0tCUmLjkPf9DTarSX/7XIhWtOG9RSvS4rfXXeHAZ21cowq7tXczBKSdCMKK97FYwhHUV6UiX71vrXbbTxvvjxpTjfh/d33ct5iwtOCfpOguFAq2xXqQfFrm3ZMysxiFbphzHsUsB1hUxNmI6ZOqIyDe0QZLmIiMdKonpscszsNR9ZMFX/Igqr7WJhrdpwA5+8T1N7MtefFt3WoOkHM4qwiyHc62FMWy0kxCbJt3mMDk6cuDLWhohox8v0hPnMdqKl+M8Da8INA72LdK7f50ySpp4gV4oRfS6D6eZnDM5j8PBXcpzKGpeoSu0ba/vda8K2zWlV/wU6KWRa6UMvOJ6fZGQoabEsKdhT2mg7JD713RQMt2tsY3P+JCCMHWsEU2wApxS5NKGDAgx83WRV6SS5BDiHMGJ0RUauUVKlnhXpQ37nlBGEfMu11IFYxV5hGh05PopOZNNwdqJ7bjBMSazXmR4jUbMkZbkviqtUotlPsHLSwZVmeYzXf3ChflHXr9aqK515emaOJ9PvFyfZGNylm2/EatDqUmMNN2yTI48bqLSdmrR1F7QtZHjMT70ynbUcdNEutIcUaGpvKx9cMFmahLdjxomAhSgFTlLy9tYNYpYGd3AGPvAcI+KpvO2RQqG89uZfCheBpeyYVC2EYLk8sZSSCVitCcYz9d/8g1vH9/w6dPKMs/CNLPweLkI3aBoNIp3Dw8c8ZAuZ1N8/vATRjUu04lpmin1oJbIsW8EbbGTQxuDVop1r1hbefv2Qtgt+7FzPje6Oih7puUDtR3EMjGFGV29QO2viVrBOU+MlpIs8VCcTid+/OkH3n/zFqMnnJnxIQASK2vcfYO0GCezONfEDdlqxmmFtl6E21Vc9MqKJqq0SqkNbabRzTWIz0QK4NYEt2NeXb3yGXQlmsM74gWBJkgB4x21S+Rn12I0ielgDgF1uuCtwZjAfEfidI26KFrJTMtX2GCHPqqQ0sHL84uAstVbHnWjt84xUEytS6hESplGISwPxBSZXeCyLHxIn9menvHecVw3jI2CADpuaC03fjxuaKVwzstG2DJPnw6MtSjVR4SffC7zvJBLI+edEAwpjhhNbXg4PxDTjZhuhHAaFh0pFmLKvHvzhhgj27aSixTvCkstwujDanwP1FZxfkJrh19k4zHGCIrJdE7nmVpnbrcrcGC1IkakIDOZRiW4SVRStZC7dBev27O87skzqwutN4I7U7WMokpZSeWQDb1qYjvQKjAFL5DrPNBcbiKXY6S4ScSyTEEkjc1ZkRNY23kzXUQysoPqBmMzOWb2PXIch4zySGzrlettA8Rck1IaD+gqY7KqWKbKuu6sx4YyjFGeIhfB/qhuxT3rKuAwBOmOdZm23FnQbaQQaaUw1klXs0roSO8SPuAmh1JRSA+Tkq9nhWoQY5GNRUvYSO/70I6Owk/XYQYaKU4MjWJvYrRExn8GjVKNpgp64MNKH+5gJSNlrfpAhMnsqSNj6tKkg/9LxqnQGb50SsSRr7knchkrhV1NYjKTjmsH3dFtBL106RqrMfL/WJ746fi/OdpGG9ezTMMi9IaxsjmLrvGuVZUNfm2ZPy+Fa1q4tO/hVZqkhuTA0LREcSvVX2ULSonEq3fh4kqndUz+hynrvpwXOcTrpqetRHrmiPPSge10fDASLVzuDQ95X8QsNLqrvSB2EyV6ECWdqj5G/b23MTZO0k0rIp/QmkGakAMKTb0WU1rrUZwgnV7jOWIVkDyKnDLO3Q19Ih3oXUI+elds2w6qj4NAxzkxZL70naEoICeF8wBGNNd6dPmaaI5TMSjt2fcVNfTQOWVKjuM6kfdDNMhOzJg0GoIIE3pGoZY6JiuS2ij+ETX2cSUdeW0oqkGX16SyuNmXObyO47f9xmmZ5QBVG/PiKbcd6/XQXYup7TiEEBKT6E6HWIWcktQZ3VB7BBQpVoyB1hPHnoQd3iMhyPusGEzhYRb0Vrw3DTiiYNPmSaRkOWcUDeMGBSIlTqeF2hJQRfpTm0yjgdqlMNdoLEakVr3gjOaIO60HdCnUImY1e78Pu2JylnW9YqeF5/UzKe9czmdiKizMcq+XTCkiB0EbulZUpchNonatUqIpomCsaE/jFpGBa6c2aF1j/UwpN1KO+PDlsPfHXr+Op1oCOhlcmPFZYPXWGpbTmVQShp35QdNLQduAMcLyM93hJ8NeGtSOtnIKC36iKBk916wx04QOCX/qYCPr8Zn3X5/YY6IWjXN6GHvaq2PyLmwXrI+MVL4AowX2D11MMqsUCH7yoBXXbWdyUI6DKUimdCoFpQyzm7CTJ9dE7J0tJqxTnNTEx08fOJ/forRwCycvcPbQNZfpzC3vxH1HqcRy8dSY6eP1GqM4toI2mrB0OhUfZo6cBJ9RHcFPLNOFUjf29TPOeRZrqJMnpUihYYxgT3qN9FI5UuPBPnK73TC5UqpDx2f6Fmk68HnLKKuGflNwTNoYtiMynx5YljPXIu+dc2KyOp9PtIEDs85T80bJSbRMYcKaobVkFLAVzMDotNpwzgMdbcQs0mqnZoErG63RVh4HAgpvMr6tVZBNSmONGEqmsIiWpmtSjnRGQpoWjqVuokk22gMCOA7TIhggJP7RGEN7J50kYV3KeC7XJBG8NIgSpfvV+8zz9WdyipzmKsgOZdHWsR8bzlZyTsRjl8K7JekC0HAU4QHmHWscs5s4h8B6OzBVE9cV6wI1ZdyiqHknrYVWDcZlwS81hQ8npvmBIyq2XaD/1hhakUJfG3GTOtOZgqfkytE7H54+M4UZ5wMfPz5Dh3maWdedXi3THDhf5ACSojBmu1Zo5WS8l5M8/GsVnbN/5DgmaMJJtmaG7uktD2OCoTeD8Z5aGt4HvA/Qm2gmR5yocU4A/GXFu7PA/DPUdh0u8gdCCAM/JzggZSTu01jpIuxD36x0k6xsfYMh+Xl5XtFOc7qILMNZg/MSGLCtFx4eZyqVI3du10iwSniSTliUMdpR2DTBVuWRDmPsKxpJNt+E7vcD8whi6Eo2PAq1iglxCoKhkcKj4RzUFsj17jgXzE9L0hXVSqQ5qkvB32qjKpCseYV1MuarrSFRr7N0wEuH141JCmGrNdoYrIN43HmXI8mPNugFsnIrQnYYS4pYPcIWhLhgrciEci5Yp2STQ3SbtbQxymUUIh2P4VFP/Fw+Uiv0Lvdqqw1nBjVAfXH/tz4MUEpDMzynA8wLD3roQ5WR7pvSKO1xU8e4RsmJqtoXw86Y99ci3b57IdebEr3vfZNz+lUjCfIaSi2j825H0pkkPmljcH6ij/Gu1g1lBl7MDi3w0NiiJH9d6S/JbK2N980G0Wa2MjSvX9BcrTV6K6imh+b/Hjxj6KVLiFWrcr11yFmaOBKAoEj3ru8wDyolExmlNKp1ah5Nm4G9KwW2GoVAgoSu+CBRwEb7YYIS2cfkJnJOGOWwIZDVfYJpRFTdLRQZq0/zicQmJsAkndlpduQYoRas08NIxyDOyP1jlMEbJ3HDNeOdRfVCOlbWUljmwL5vXJ0ZhWonzE54pMqMIl9el9ayz1xvN+yQHlQaKReM8/KcN+31MCZA/U7XlXF3EJOM/K3WpBSx1hEmT6yZWjtuSLykFBdzXW1ipipxTGaQhpfIgqJg3lKSyZi1guscpr2uBkYN+VxzreRd9o/P2wuNztu3D1LjUNlvm0yUbUE7OZhV3dHBCF6wgbJmdHu1oDG1pynw0wnjArOfUEqz77tcZ0oRZkOtByiRQbVmhmRIk4s0GP5FWb9aqG7pBW0cv/v5B4w2LJMkltQqGe3rdsUqsGbiuu30JtB0VJBitEisJOb+TFLkCk5rlHek4iSbO4j7utRE6R5tLa0JgNYaMwTIGmP8SJX5xfhnCNTvqBgRE0kBm0vh89PGb74ZpqwexLFu7lF5gosxKM7hhPUTH58l/SceEaMbMd5wTnRDrVRyriyTQpvCuq4oErkWuilM80xuhlI7La7Uxsizn6kxc/0s3aEjfcQHi+uZlx8/URuEOWC0Yw4naknCk9wLtTXSvuNsGQ/KJGPs4+BzTTxeTqhFRnKnv/Y1FUvOO48PhpJ34pGYTeV63fjw8TPnyyMfP/2OD0qj/cJpnnm4nLg+fyI++YHPOBHCRGyVbbtxu105Xx559/Y7WrO0rijNYuA16UchQHh6Jx4JN3mstsLFvevTuhI0kOmkJG7o1uT6QOkBERedTclFRPnaQK/iPKYRUxWnd5bAACivzk43InAlnELMV/QuBcpg7H4xPHeqdTQqplXeu0dqPWQkpz1GK0qHOR1SJLYqG+WIhU1pR5WCVh1jO+t6lU6ZVbxsG/F++s6OXBtHTMRykFOkVSujliXKQaQoQtKUojniQYwb0+LpJYPSLPOZ7iz78UKuifNZNoqqOtt68OnDZ3LRfPx4RQHeWXKSxJ3zeeLrb9+LxnYVece63rDW4sOMMYaHx2Wk6RRBm3VFjoUwa3KOxGOlU2g9UkqioznNZXSeEta+4Lyld5EPaCtdvFI0OYtebJ5lLJ2y5GFbOxFT53rdOdKVUg+m6UJr0uG1zpBTIyXJArcvnlQOQlhAW+w0XNNGuuyxJ25r5NiTFNNdsd4KMWXpQKHFOR1Fi3jsGWgjvUcmM3J9VBmFA0YSA4RkUkcy0sC91d7HNQmqKmqRjaKUzJ4T0qW7h5hI6hCtobRimWeMFRj87C70btnWmxhGBi7qcgm4oIdJNHCsmlIUl9MD6Mpt/UzORWgHmOHEBh9kFCxGm7Fh6y/dRWv7kJjIUvoeeCLBDXQr95wtgmkrXZ6RVo3x+ehOjqKzNsWiHJpFJksMGVCWTbsbO+65BsNcVFsV1/SQOmAGR/iOytJCShHlV0fT0G2A/vuX4pou2CF6R3eDGv1cY+ygRsjat/yluGVImagY46jVUOsw4w7tsugxX/8ysedhltH05l8lEMLFHEU8cE8n0yOW0hhxld/jWlut1C6JWZKOJJHQtclImy5BHvb14+koY8UQpOyrTKO1RtzTSNaKr7piXjvKo7HTK9OkXzW0Psy0Wtn3jXwI4xXTUUZ0oCkJ0F6jSWmT70nAWZHh1VKlj9oqOUZUr2SVaC3hvRSR3YreVEyEoHVFa0svjdYyJWe893jtkHdC47UcxluOiDqgE2ygV5l0qu647qsUjJ3Rza4ydu8d1SWIpw0yg7HSFEnDeyHplfK+CX0go6wGKgbBq7ng6U2kD7VE2iA59A4lyXuN07xsN2qRWNR2n9YMJNltBDHs6WDSGu2kIGyDOJOrsJbjVnDagQJNH9p4+a9fjw3rLGs8uBsVS2ssy0yqMrlz4cSeZGrYjUZ3QcKVLHtjrpG0RaZwoo7JROkdo7rI34w891zwmKyoSj53uiKmXeRoRogY/6KsXy1US1vp3dJUwfuFWKWtnVuUVKSWoc/UXOm1M7uF0sqIBRsnjy7EUaW1AMl1Q1vH037F6TPrsaIng1KB1jzpkII0azFoGR3oPY6NV06NtYr2THQqUqjWWochSM4+DHH0Dz/8xN/6G2/JKTL7E9okUAbtAz7J+DHVg6wkQjKmTAgGWuR6y+SSsEGz7lc0XsDiCcCxbjdCkPhMpRRpV8Q9441jiy+4aaKVBLbTSheNm+4c8WA+X1AlsF6f8ZPjtm6obpiDJAZ5l9mOKKBl69jijcfwAFnCEKCyl8SjPZNzYjs23n71tXQ7emaPK7keeO/Z0hPWK95/FXj37sSHj58ppeN1YWJHHc+Y/TOmerwxsD2Rdk0sFUXF142n3/+Wtv5M74EwnwAxk10uD7QqkHtjhf3Xe8WXwLJ8JSOi3l5vet0KRsk4U4vNV5zPqtCQkVXvCqO1/F6tg3Uo43srrXQMDWMbvVlx1/YoEoGRlobaac2OqMcmN7MWNqW6P+zLgcLSu3RA6AG0XEetNzGDODGJaRsgSLqWHlraVgVf1VpmeYi0VihVWIqnB+l+llrYj531eObp+cbL805Kn0mpoJVh2zZOp5l53vDOYHRnco5UEj/+9JltTyjjCPNMVVnwZUApBe8upLLTWpYQh73QiXgP5+WR1go/f/jM7SZIsjtaLKcqaCEtQPgPP3s5cHSIKUrYRhPUWO+B1hOtb2Pkpgn2kZf+mVw3rJkFjN4LWk/UWghTB15Q2lHKAeoqHR8UuexYY/A+s+4r635QW6fUBOqFlOT6tk46KSnLWFNGj4p5PomLN25AHrxCgetbE0ipUOozxgpBQKSORrR6NcuYXEloSOsVUyVZz5jRFesN7YbBSoaBaO3JuRDbGPPSRdduJGaRpjk2wfKBIKF6vzvkNao3CcSwGmsck/NYC2YKnKZH6JbLEtCmirGnN05nI91SEzAGtqlQxyFdjEontuPgOCpTEI166xntO9Ps0cqRs6FWTUpfuonfffteumKvS9zL2giiqmYpLI0VY1hNAx+kzCh4pWuoNcg0fRQoXYyuks4kCYWgxOQ5zKeidW1o3XF+FKY0tO24aQQAKBn9g2YKEkV8xINc1NAnqjHuF9e00SIDGVhTQNFzpfcvEapy7XwZY1rrRQLTNSFoOk0ibu0XjSpKpjjKOHQrIgVWRhz9veHMoMt0ia3sGvn3LYt0ioEKHHriPugFeohrha/aMcoKYcAKdUArJVKPFumqoQfBRhlNr2L+uXdFe1Pi7O7jLK+E1yleA5EhlDimV1is8hQK3ooMybuZlKI84/qQmZguGMjBEr4TdbTusje0EWSwjENUt6QoiXG1Soe0tYLTHppCaYtR8r5qpZm953Q+obvhmq8EfwJViTnTcuG8PHB7WYcEpw73/ESpGq0rzggmEwVHLhw5ozBMYWY7xmfeRarSVCPHitaee4pmR5GSNFhQjZolLGBdRS/KOGykcghHu4kHYzt2kWoANRWaSkIcwqEsHPkKGoyd0U0A+1WV4buotJqovQ0KhEgfjJb3NqWD5TSJVMQ1tIfr8SId2DYkGbHQVRKPgLVDd13p3dJTJqcbRnlMifR6oBGT7hETl8uZ1iSRbNtvolUtciBMsTKdHC5o4tGwToyktSXSL5Bqf+z1q4Vq1klEw0GR2eiqj27YBq1LFGIUw5MdMZ45ywjxtMwcLRJjAeVoNbEsAVMNxnSq0nxef+a0BCbvWPfM0VbeaEeqMlqVi7+MMY6cXLz3KCp3h1+YJtF0DRai1ppa5UHXNMSiOD044lHw3vJ8TZig6EdGO8Pp8sBi3xNTYU+RajRJI+5O1VGqcVpmijO8vKyESWHszB08flkuYqKhM/kTPx0/EPOB8zOtH2AK4Xzh6fOGtZ3l4ji/f0su0G2DaunO8vnTZ/61f/V71uuK1Z41J5pqhLPndJppnwq9VR4eF663JxGbZ0UjMZ1m7HzielsxznBaFtLxgmkR5wSTU1RleXdmeuv59vIVRzrou2wkRWXcu0AIJ2pplF7F2PZyRbXOvEy4YqGt3D7/jOEt6Sg8ffxAOgVckNz4aT5zWzfm5YR/+54P28r58gZjAkoP12drArHvQwtEQYUBLdAAACAASURBVHuBq9es5CFn+9gsxSVbojiczXDGykGli8u5Be4sxt4E25JyFgZmm6jD1Vwb5CzmFjVGccYwTA937Q6g1OiUGGrO6KaH7qlKj8xIFrSxEyGICzzmxLRcXjv7zoqbUtHIJdOpvHkX2XcxPLy8fKIjWKF92/DBoslo1Xg4zVAFLxbmR663F0qNHDESU6XsG3vchGpBJefI5eHE4mZ0O2hI4R+PgnUdoxvp2IfzVKFbx43uDa3SUuOIcXQcuhwWdMeaiXJI4aV0H51KSZ6rLRGC5u2br1Dacl1v7Ie0xr1xqAr7tlNaJDeRDNUsCCDlLKlkFM/3yS3Kip5LMFCifSvVUlKWiOIRhVqrJucVpQ5SjijV8NaLuxeFt1I8KTwtG7ouGNVoudLoaCxODc2oaXTsKEY7k52xxpCjTHH8fWxIwWgtgR4K9v0g5U5Rkk7Ue0MZJdrGWjFO9jutLaobMaZ0hbMzuip6VaRSSLqigI3PUhBYJcZTpJh9TqJxrG1D2cY8zZguiWe5Zo5aKM2QW4WSMFoObOnYWa9RumNoFh+YwpeEmfK8cdtW+BvfALBuPzGFRzEYNcFqaVPGmFJhvKG1TM0dG4xsjm3oSLtoNSVty46mghRudiStbeVAjGeWRqHRhMigzAhB6HJw7XYUoiJH8t6LPOn2MgwpmtrGQfQ19WukYXHnksrkrfZO5hfcUnf3LnxZiv4ai3vXn2o02g7WrupiRkNRqhr6Yi0j+96xVlBMwCv79o6cUiMhzFon4TC1o1DCuEUkE9YEGRm3jrWTjAdNEeZqqlIgq7tpSGgt1gZiOvDe4U6BlDNOn0aXWUyyatjq+8Ag1VSotWMdQ8YVqTVhzQTInim0iC4mQ0RqY91gXzeki2eEJOKsTFWdvbDtG7P3FK/ks61FDo5FgSooJEBEK8EG5rLjreXh9IhRmrePj8QtjojkRsqJb77+lt/+5V9I0tMUyFXMYVN4kIPMwL+V0onJU9U9Kl0Rj0TrncvpQq47yjTaHHDecewbSnum2XC7VXw4o2wk75HaLeu6YozoiLW9G7QFnQWaEiUCO3jxtTjnSLGRu4zKS9XQpca4+ybAY3wBLUmDvVWJTraGFDe0O1FHx93NTTS0SDy9M44jrtCNPNTqjtKitS9VJkVKQbaJmg+0qaN7LHVTV5WYKko7KoWXbZMUtbBIaldPqB655UxLj5iayKnhXIWSSVnkSf+irF/vqNaM1YHL8oafP/wO4zJOSUKPVTPWebByA2sct9shrXff0E3GZZN/EBlA9VyW96St4sPEyUUmt/L+/QXdDxavsEpSKnoFq4zw7drQpQx9ixqn0XpHSaiRhqLvqSKFWhvOg2ma25OMYmpKbOUDxgi/NR2V2K+0l8Z5ecPz50+gO+8ezpJM0zTzGVo9o61F9wp2J7YIzbCcHL1OHC0O3M7Gu2++5dLf8/T0xPntG7wTvZS1njdeU2pkmj3GeGwWDetyuvD0csO5gLae5aIxBF4+rxiviDWR10w3mqMUdFVsJTFPojk9WudIBec9T9uVy8OJvF3JCh4e3mB9QJXET59/Jq439Hyho1j3nVsUl6joCjt1vQm0XhtOqfG0/SwuYispQM4o+rnx8fgZ1Q3TY2deGtY0ciq0/Jk3s4JesFkzN0f8+QMoj59m9IjO2wpM85lpfqRRSA2sDsS4sW9PnE5nvDsN9p90SrwK5Jpl3KELRXW08lhjBWmjDb0nSoPFX9C605tsQtaKBrNW6XKkmEQH1qWzJoDyIqdpxDGstcKaLkam1l5NIMftoJTMESP7foi5Dgmh0EZjjWWZA85YwuzResQiNpGixAgwY4wkTrkgJ2mjxUF9O2DfbhxHwtp3hPN7TD1o9gVc4tG9451VKJ041s5+rHhvCGFhqZ59fyHGxhFvKMzgaErnxxqLnWaOJLy92jbinsR8Z2e08iitcR6C9xg9GIAlMQWL1pZlfqB3xbJ4MTjkIoc1H2hV45zhiKKnKyOgQ+kq5hoaqlVqdzQtCKRSC7XnofcqaGPpOJSxdG1I2pCqZLIri4xSG0zeD/d2Z5oXtBJzlVFCqxAWex8j34Ye+rwcM60rlNXouw64dmpuOGYMHQsE63He0tpBPA56FRRUPrJcV62ie8fqwXodspeepRAzxokBronRyRlhJ5Yiyji0wmoJ8yg104wi9YbBEbwwYFMqpFzoOrFNFaMs1oj+szQnxrWtEZFpjFOGx/OFx7dfMYXAmzfveP/1n3DswM//GwD/wb//H/M//6//I/cO45//X/8Ly+nM+68eeKve8FVsHEGxnd99MaZaTR+hAlpZ6mBuiR4UopvQ53+FfyNf+fPye9Gx0pmmiSncOasNZ91IzGkYDBgnJttyUPMw5Fg5REoKlqVUGYsbLWgfMeZprOujyzkSkOrQQnY5WCn7C9d/EknHfaWYkAK0UtugOyBUG3m+jJGn0pRBh7h7JESXC1rJgcv5O6S+k1LDaIdzhpgF0N9afX12KN2HPALoUgC1VlFNxvq9jcOkFTSdyNUayhgpPJF72Bg90HQdY+qrgcwYjergvSQfBu9f455lCpTQRjG7RaRQNTNNYRz8DbnKgU8OVo7gA1olljnIGH+wgUuqxC1zfnhAmzhMR5J8p7Tm5CbivuO9oSJyOaP6cKhXnl5eoHfOlxOxV1KRDvx0WrjmSHi84KzBO0PfxSxVu0EbkcPEeFBqYTpNnB7PfPzYOZ0v/KZ+K3IKp2g6Mc12mAR3cn7DPM9oXdj2A1Aom+lV9oBcH0TSkArv3r7niLuY1LqlN8vHD8/DQtfxwaGVZl1XYky04ghzYJrOKJ1xfaO0hvcGYx29V+bFsW+Rr756z+Vs+fjxR2KMrEfh7fsZbzVhVlyfI1ppHh4mti3RsdQietjL5ZFti9QaCVOkFkPOhZwSzsuBw+oAOpBSxXmPtoqXY6O2hFONkjvWOeIhuMRgAtdtk4ZGrBLU1DXGBLKN/xQl5T+b9auFqq4LtcDRN6wyTC7Qu2yqwU/kKDoYrS3L9MDD8o5SN+K+4Zl49zDh3ZmYVkpJLFPgHBTHUVAWvnn866zbC9//5h1/FlZJRxqZvZ1MU+L27OoXOo76C7zK0KfKmEV+9268klFY4ccfn1A2ULVmvgRKCeS68vPz75EgySvv3nWWy8JPH3/P0VfCdKYWePN2Ih8DzmsV4XwhpYOP10RSncU7MZB0h1kWPq5PmHnmjZ9BN4rW1NpJtbG8eUCrhlGaVjXzsmCwXB4emJZP/OY3fyJaXjpxP3DHjp+9dJK6QvnAsW+i7zo/8PunH3Fh5rS8Zd+e6GxUBZOeMVaRauWprNB2Go2jF7wy/P7DD+QCkxVXtjGKjx9+xjnLtJyFkYvm5XqjlSthnml0rusmIztnSETCPNGLQc8e1UTn00tGzZaWG1vvaHbJllaanib6oUlJUlj28pESZ0qrHAes646xkVJWjtsbvL/g/YIxDjvNLMs3yEjf0JWM/lpX7GkjRtFOKqNJSUwVpSWUsjhriWnj+fpMLZ31dgjzrnW5yXOWDXWWDkMt5TX7GZWEldiRsSqOGOVhnnMmpn183y/Rh85YMYZpYeVJJnodTExBzXRVZaPqYhwpTbRFzlqOeJDiIZ1ZPxEmL6PArjDmjLEaHzxaV5YgXbiYZSw/aYHiK3NjWjSzO9FqQZGpRT6/4DS6OSbtuZzPwjPFU4rmq/ffcr19ovcdqyu5iCifatAtiACuNaw2xCOSU+Y0OSZtWTnQFBQWNyuCNuy3TIpiOAohoAduhy6xf9pq/HJhXQ9SKphZ4VzgtiaOfKOYhp88kwqUXEY2O3L4Ug5NEW0lYrooWkbLIFze4B0GQ5IWoDzu9H1ca9Bk6ewpTc2AbnjXUbqQ0gutebxzQ3taJdJSScqbbvdZs8SXeluHJEdjxucfgsdMM9oIK7P3TvdD66garYAzAVQdWfRS+FltMT6QgkI5R3BQc5VnXy+UVLAF3rkz5+++5XRemOY3XM7v+e433/Pw+DWlgnWe7Yj8vf/278FIUf1bf/rv8e79n8Lf/S8A+P67C58+f+Tl08bL8Zfo2fB9/55nE3iaTtKpVApjlBSqWnLLEQUurXWKC0zhDV/bB37Lj2MsDynHUUzpQd7Q4xnexYgpQFiUEkOndEmNSHyMGUVWwRg7TFpgnci6WtXCbB4dKWlVMjTBCk943ceMQrqxv9g3rLXD9S7Z9M440SgjXc460n16a0P7F+RgVUSG0gxyeIkSFypqsy++boUlHQ1rQA9zSq9tMFtFslDqPUb1y55FFzlRHuYktLjRW2uUHEeB2cah2KBUgibdX+scvTasUXQFzhoxbRoZhdeK3J9mkk62k2fWvovBVHS90jm9HxTmSa5j+bqGGPcx7bKcLpcxaXES7hJ3Hh8fmWZNLgdzOPF0Xbler9yuG0FP0BVHWgHF+pRYD9GQl5peyTFiUJeutDMGYxTrTag8b9+dBNPWO8ZW+PRESpE3uXM+P2Kd4tPTM34yrCmhOpwfhDDTsMSUMO7EvDhiWjE2YC6dUjuqCQLNKMc0v2WaV7w1WD0RponL5ZH9uIm50Vl+/Cnz9fSe3uRA8/hw5uX6xDQ9EA/ZJ6zWxHQw+RNPbcU0TUud7776DbVUMpVcdqZwEslJ1bx9+zU5P6NMIMXEdd/w9i3Wdh4uknwZponbNXHETu+eaQpYK4edWizrrYE6eHgI1KyZ/TuUTjQ7UqyYSfmgFkU4gTYHk4OcCnSRcK3b7Z+khvznsn61UFXd4Z1HqY7VE97OMipuCa08l2kGmjjPehM0RXjk66++B9qo/gsPlwfW7TM573z17juMW9n3TK6Fnz585F//62+47Rs4gZ13LUxIazwoaIgDUls98s4BRJysjXDNam2vGiprPbUUqlY8HSu70rw0h7YL61HpePybB64vBykdfPrdn/P+/XtK7/zV737H5XEhTI78/IZjA+sU0+T5+PkHUl6peWLNCw8XRymRnGCeT8TrjRAmccU7NSIhm+Qxp4NWM8tsoYHVC8HMFAdbi8JxzQ2nNFuJtElzfrew7oKsMFPH2cC2HpwvM+W509uBKTes77xcd3JULLPmw4cnOp3zZWaetADtTzPzNFNrZlt36InTdKKUg8fzhLOax4dHYlF8fr5hjcMvFjV0VtpPTJPnSDt+ksSMcLmQSpKOSBG27bpX5nBBOcOnDz/w5vHCtm64lKBqgveo3qgUcjzIWToUpdw4zxfcdEHRuT79xOEd5zePpP3g4/MTtzUzLWeen6+C7qhi1FEaQvA4N5NL5+npM3/1w1+glBvd0EZpYgqh6yHpYBxupPAwWmgESouLuJZCV5WSB3y8J5QyOBPofUSe6mGmGB1ppTQ5S1Z4K3XwOIc2TyvRVoHoF83QttEppbJulf/EvvC3VfxyZzbg/y0c5PpPcms3yM9ffqnHrZ4b8s2BCJgH+X0D3D4DCtQyvC8D/zbZL4+K3uAXuemkX7L2FMTKHyxjGEwYvjhaNBjR1rE2wIP34+sDywT8YSb9H3XNf+wXwD/mNVSIH+XHx//jH/nT/+gX//bf/q/+8z/4s//y6w2+vv+F8Rnw+/HjF8vy6x/JBbgY4E9/7YX+Y9Z/J9fkL9f9DPiLS/AfWW78+OV6/NuvP/0f/s6/+Qd/9D/9nX/rn/4l/n+s//72wn/2w59Loa00upuh9QRtpPtLUxjtsYPTS0c0zMNbcdfnWm2pVJnkGU0twsbV5h4VXYd73Yrhr7bXr++cH+mESLAAgsyTtrN01ERrrbBODpx0RTwq2YDSGmvhiDLSXo+KswLBD5MfemWB8T9vT5QiDvics0wG7ITeM8vJc3u+sh+RoxykXkakdhAuqFIcuVBLJIQFheF6u2KUHAqtNUi0r2jkRQNdOD4Jv5VuyOUQ859S/PRxJYQPAyW1yUTISWyydXdsmKPVzOk8czqdeHn5yCnMoCAlIdqcTo71tuHDgtINZxW9GkpprNcXcttI8cAYx74bUmq0vkHtGJvYVs0yvee0JIxVvHk4Q4NaFO/eJP7+n/3v9PaW0/QWN2Wc2nj6lMhd8/K80RSs27N0/bXQGS6XgHGd2lfSIaY4nTzGNhYTmGeHtR6rFdZDPBoPjxO5PHOe3/BwTljneX5auVy+Fv22UuTUwXSmcGbfP/Pdt9/y27/8M1Ls5KhR9VfLw3+u61dfiXHg/YSzJ5wpLCdPbYlt32gFHt49UnLh09MHtviR1qFdLZXvBBGxr9RSebi8JbfMvh2U+gGtJa7z6fY7moMfPv6etSTsNDOQ1li30FvmzsurTTQj971OWvptyKMHk250WO/ideccHz4+8/ufX/h8u4rmtgvc+fHhr4HaeXn5GdV2rvtnKSS0XOjbkfhcN7T2+KDQayOXDVQlN4k9fdkPeuls+8Z6rEzTxH57IuYdpWBaLhxxxRiPt2dqKbgjU0vF6IXL8o4Px0dyyUzTgkLTa+E4rpxOni17rtvGut/gmokx0orB+W84LydiXjEmM09ngpmleLLQSqa0xu2pEdwjs5u57Td+/PhXnE4erTvx2HB6Ix6J83LG6rt+r/DVw8y6HbgeUBquN4mRXawXVd/Ik6ZrSlxxFs5ngQnHFDmOJwyK+WTRXrifWksMrQ1CLpBxnud5u6Ft4/G7M85cyHuh1MhL3pjdI5+ebxQin54/8X/+/X8ASnMckqrUuuBfLpczU1jo3WFtkGzsfYVuhTgwrgdB1ovBSis5xWtj0FmNhCExxkgMp0ebxnleRsrMzrYdGO0GgUKA2KXKQc1aQROVnHHe0IzBGsklr2XkVTMwKFawY3Uk7DA6Tf/pf/13/1nf7/+/Wf/Nf/jv/rFfwr9c/3K9rn/n/IBGitOaD7TSLLNDaUfridO0EGMWjbtS9JbxVvB6d7zi3XillKYOo54EEXTh5jaRPbQRf2qtQ6tZkrLGeF+63dKwSSmj6Dj8qylV63v4gaKWPoxlDYYEKmbpjBojyKsji9beWsvLKqfmVOsohiW1qyEBEZ3K7Xc/o1XlfFlIUTihrTeOKK7z1jLWiybY2oCxBesszjsaipo7qCzPRdXp2CGNCiOdbhBkBpzfedHn1lpJ221MWLucAbKW4jZmlpOlNY3uiuu6o/pnUIXVNUrLdAoxHtghJzvSJ7QV3rP4I/zgm1paq9R2w3lDfVmHJ2Dn8SVD1WzbDwRnmBZNztKccd5hfOeb799yWb6hFoMyicfLjPML+1bYjkglUlkpTaRgvU8jbGknpsLt1rAJejOgGg/nQEziBbLa83ZeqPXKNE2o7Mk58O7rN9S2E0uiKU3rjmly+LnhvcXoibdvJ5yb+f777+lNsW2F/foPn/7+eOtXC9XUdrbrQXBnWjVsh8I6TYyrpNjQ6T3TVGHPhdY6Kd/YfniiUXHW4+3C/rSKy7HAbTtGTnBlLRsGzQ8/P/H5ZaS/IHgM3aC0MlBDwgNVVfhpdMnjVb8oTCWpRX4t+cmG86T58ONH/uovf8T4K8d2QulCI7Hvhu14oXIwnzw5RxGp+3f0XjjiQekR6624OYvBOxFpa22otdNNIufO6XLiiAe3/cAFg5smcpZMedUlYtaojZwzKVVaB2s0qf1uIEdEh6cHB48uWeg1G+Ih3Mnr7TPTonFuYbvt/w917xar25rmdf3e4zh93/zmnOu01967dlVX16HLrorB1tY0JCiQCAQM0RCQRPtKjAJCjIlBkStIIAYJHoiRKCaABCNGLzgY0xAxLQ1I213du6urq3ZV7fPa6zAP32Ec3qMXz5hr7TZtabzoyLjZe675zbXmN+cY7/u8z/P///5sNw1VyXjtdLzhfPOIplGEPHF574IXV8+JaSLFDoPFGYt3nrbtV9hwYAm3aN0IBihF4iSjKt9YiCeGi4egMiGIznA83tJ2AzlmVBENs3eOzdBxvrvPPEXMGTx79oySA5vdmehxrUFbSzUQcsD2jnlasASCmmWkmi2HJVCS6HrymaXYyOG4xzctoRzYXVpilMjAcYyEGMlxYQ6J4+l2NXMI69B7DyhSTVirOB4Por1yLTEIlLr1PXFeMSTNgB+2xBjJsdBtz1BqRhOQeN4Fo+86GnUFyK9JKQpxepe75JO77GfRlHlvZRPCrLxEGT3aNUCjVnD21aP4l/75H+HP/OLH/I8fXIGSzWddelfjBHcfrW5i6aDIhifMT60/tWWpu/QhXj4rdxrDl8+O0rzeef7ar/0Cv/Unv8UnS/zli8Eq9/ztj3f89sfn/N6f/q6MStcutLxmleesAPC64pjqepi01r4EsWv1qVS5Ku9OUlMqpeaX3W7g1fettRQCJfPnv/xAtKbGUMW+xBIWSePJiXmeadoe120YT0em45XgZypY1wontc5o5QVRVArHwyImSSdUh3FcGKcbKhlVWzGTuAawLPEoUh7TvgwiGTYSH3s4jhjraJuWZZ7EUW40F3XLZdmhMbTF00YnXaiVBz10G9qmo2sGznf3GPot2jSgNO998D0+eP8DvvDFL/HGZ95kCYKp08D3vvsO77zzS/zQD32VN954i9M4CbmhFp6/+IQnT97h9z16CsBPvvG7APi1H/4VAP70+2+w3ez44R/+Gqz3UsoL+/0Nz549Q9dEyIpnbiBuHxFiIpaC85pYEvt5pu08TY1MN9/muzf/UA5nRQyJxgjuKhWhTtTVaxBjQilwFtr+NR4+/PXc6++x3eyY55kYIksYefToNXKeeO+T/5lj+AVur/ec7x7yev8V2uNrPLPnDBsntIcoCK067vHTz/Nn3pB27L/w4XuEmPibn/s8AL/5/e8hiDGRIaWUGE8nSWtDkvNqkY6ks1aSvapQEe7wULWm9QCr+Ntf+Ap3N6oxak2nqjSNRauGWle8V9Xc8V2VltF9Rb10et1hppYlYFDoCgqJ1S0pr0xdeb6NNiJTKkcJ4igJiyHVsiZZyfRIzG+KnCTz3Trhj94hwEoRjJa2RtbppVLKnUQjoapDAXEW/SlUie/Wdz+HSuss2hZilohvZQyH6yMKR7YSa16TgZXQIel3UJ1lXjIhHSl59QcoR0XkDcZqyMIFTimjcS+7wMIvXmkdRfjHtd6tg2IEG08T1lWck/qhkklFQwJrhNN7CgvGOypCF2GNW01pxtmOWCKpJJY4obVjSas0I86EXLGmY63ZmZeFkguxGKJvOIXMcRbDa9sOaBTaFOblxM3NLd5X3njjAfv9iRzMin00WOVQNjKHyBgypcy0XmRn+0Pk3oOB03RElS1xOaHMQi3g7ECIME0T3kp95P3M9XP5eavaMR5mlIIwzVinONGD2jMMlbDcrJHtHUM/MLTb71ce/qpe37dQzTWTSFAKtayZxlOl6x1aJ+YYiGnEWgkFSEn0cFVnjPZMy0JVnhLFpdtYOZXUZFEmgvakmgnRcrsfRauSJMazhEQ7bIlxERRMMaumT3J5BcBt101XrVBtBVVGJXcQ8GmOvP3z3+PH/rn73L6QAk25mavje3IqrJVx0lCNpAjFZS2CwfmOlAIhZLbDY1JMdK1jQRIwjG6Y88S8ZKgWbSwhiPOxZEVYTuuYIzMmmWmlKMabJU7ojLDOauV2vqXkQOsc3jtOpwXnIhYHNXHv8h5NO7Df3zCHI+ZkwXiaxrPMB0yj0E4co1oXNhvL6RiZTkdu1/SOzfaMkr1kOdOgixWjiF3w1qGMxrmGxrXcv2jxjXTQzzYbxmkSTmyWAtw5z2ZwgpJSmhIhBuEq2qZHKUMshVQlPchkIxpDVSV2dlnIesHZSpgFqO8bS1YCPs9uYS6KT24+xjmYThXfWZZ0wrYW5kJlpu3l4feNJ+dIjDNpmbBlYIlBTsm2YVkiw7ClpJEcR7LWHINapS2KFPfEeKLkim9atK7EOFKyRRktiVZ+i/ealEUfG6PgnkTfpshVtMfOWYE9V4kuFOyN5GiLc7iu/65MBpawMM2/vDAsq4FLqbUbW3lVtCrZwNDryHDNEa9rwSe+c/USNG6MXjVfMn6Xou9Txez6DNlVa2u0XR3oa1WMevn3a73meWuJJr3fKf7dL7/GlzctjzvPH3n7A/7Gk1spSLK4tSuS8gSrdq5W/r2vvMlvfHQOwN96esOf/Ma7LEUKXaMU/8rnXuP3vPWQrTN8/Xbkj3/jXT4Yg4xHtebx2bnot1XFu46m6dZn+bDq6FoihWwrZ92G9sEgGPyqUNazhBOn8JxKg7fnGKu5PRxYQpD3WBxhSSzhBc5WjOowVpK9Ujbsj1fkFGncwHGciTmx3Z2L7KMWatV4Zzk/u0e/tJwtHQMtm9q9dFPnlGmcp2kGtPIcTzOlKB49foPNsJVOjnWEMHN9fSPRt5utbORA4xwpRKZppO8bNputRK9quT9yzMzzCJ8C/gvG75V0oxbD+cU9fNswjeN6r4kxbllmttstQ9Oip0Aab4jV84lvMVXwNsoL1qvGExfREHjA+/Hpy7SoHDIxhdVNviaYpzWGVisGPfB591mi6iWBT6k1yjOzOzvnYT9w/e43ub5+h24DQ9vT+krgA5TLaHXOsuS1G7lmsR8ND/QZcFrf8/zSwwAwz0fRXZZCLmpFNTWrMU66feh1/uI0m7bleDgICaZIQhsITzWmV1KXrvMr8D9SCpRUca0Y0ErNdK1wqkspNG27qlx6kdNFCZhYloXOW5ZacM7JIX0J5FUPLBGhrDG6oE1aTaIyglfKoDUSlqMsOcuaq7U8yhrZP5WWQ84qPKfrPClnvBd9rTGG6XTCGE2Icph3zqCUpd7tvwi6CZTsq0HjtZcwmLisTNKKUZJHD3WNa7YSQpErBkeJadXdGlKSDqteD+UxilGIkmA1pqKSRBeTVjZrpmk7Kk4wWFrJ1NJYtBIZgUQBC4/WWIc2YpRSugpCzTXUCNY7CoW261HGMM0zzjlKiWidV+Oxl4YAHbVorK0Y3VOBmOtq1gzkFIlJNrsMIgAAIABJREFUM80n3BiIy7pe2xs54GO4vi2Mpz26Ck1GDvQNuEAplZxGrNXMJ5lCa+2ZThM5T1g9ME7jSx9E21ree+8JjXMYIyg9bwsvnh9wzhDjkWUOGC3JYdoUYrmlbXpubIFqaL3F+0LbJho3fL/y8Ff1+r6F6rysMOlShMumpIjMsWBNhyqK1g6cpgOxjKQMIVfmkLFWQdYsLmGUJcXATGTotpQCYRY6wDzvCbNlf1jWVJhI1zfs58BbDz/D9fVzDmW/bnayqNxl2gvDb6Ujl3UDV2JgokLW0PYNb//c+/y633TOEq9pmzNJjKkTFkfVIjZYlkw5aob+jHGa6Voxv4TAesOdOJ0WfGiYQwQ90rcPqCUSFqTTooTrmqKw0xKQlpG2a9CqxWqD1oJ+GceZe9tznO2YpyhJFiRijUynmcZ35LhgSTTas0yWlDMxC/R7CZW03HJmLvHOk/KRaUqcjkeokb5tyTGRcZzvLtjvr1BFmHPuLgc+K4ZuYDNssUav4v6M8wZXPbf7K6w3WN/SW0/FcTocUN5iW4tvFGFJaK15+vwJ4zRivUY5BzSURZyuvjWiZa3CIowlY1tN124Yp0QoCtdYvG9RqnBze8UpHKBa/GZEq8LWDaSYGMxM23Rszi948vF77HZbYih0/YZPnnxC30jTvYRI1xm8b/GNIwawtnDYn+h7xWbTolYMV4jSDbV25Pb2QNOeSwwwBXTDNC/ENDGaW0qWfHbnhc23TBLo0PieUpSc9pOI/e/YvpKCsyYfaTjbnvHg/g6tFEtYuL4JhPgpcV4VSWcR98PLbqZe27cyRVjF/7C6qlmLSZGR6rULqdfkJ620nONebth3lpi7pB/FuDqjx3lmXISwcdeRvStu775e9ikxb/2Dm4m/+MEtf/wrr4lGzwiuzZi7ruj6JtZv9g9/+SE/MDT8iz/1LVCKP/W1z/Bv/9Bn+RPf/BAU/OaH5/yrn3vE7//pd3h3DPyBLzzmP/rHf5Df/VO/SFXwkt2ZKpe7c7abDY2TjsrSGio7lNJcHW45pQVrCn0xNLanOk2qlclqts4Q1yQW51p6VZgnR84rsseA3V7grOFss8M7z/X1zHEaOdttiRXmuKwEhcr19TNiEke5NQ0pjvS0fKbexwQlWnXfMrQ7+u4M71s0Buc7pmnm/Q/eRhnD59pBoht1QqtEjBPeZt584z5n2w1aKbx2aAxLOJLLwsXljqYVvZ22+mXnr+SE96/EpUatU6n10tqw3ewEL2UtxkAKSdaHuqDUJW27A0ZqypRaSSpzqwwG6ExDpjKUhi4a3jM92/61lREsh4YUxACb650hrEi5VCq9axm6lls03jfiol5T7nbnF3z49F1+4Rf+FkuXyUFT8STXsXSarks8nve8HzarEWiNaJXe3Kv3aAzdp9K4+m5AG9ZwC1mPN5thnUaY9QCZaVuDMpW29XgnvOi4FHHVl0zbeQ7jKx14yWCMg5Vc0LUNKQdyylhr2Wx6Ul4Iy0JFOoVm5Xc33hJCoOaI8z0iL804rcBqQq14a+n7hpAk6bACquqX6WhyINVrnLPQTmrtVpmU0AGM8mK4lLPsarxSgpXKQRK4jCWHglGeptPklfPgGiOHqxXHmEte9w0EiVX1KpkCvx5oUyqkNVZV2MYLne/WA77BtIZaHVCEAGIKxouBNaQgLnSVsc6jlaUicH3vHdMiU647RvNdYyCtUaTOtigVSFG0mzXptcu8TlxsS0iKUida1+CcX0lHHmcGcl3Q1dOYBqedNLCqASx9J80IWVStUCWqwtpKTAshWZSyLHPCmY5a5JlyrmeZFpEQZMt+P1KrQZEwaCm8S0In8VLkkkihgVVX7F3DeBSfSMo3lKpp3EBME9okUpb1zLhCronTSaZMOUvN0bW9dOZjQmfpqAclpmKrHN4YpilyOgY23dn3qQ5/da/vW6h+852PZNwWEyiDdZqzbYtSSVKEgqPWwhICt/ujdFmMZZnDWuCCVoWh61bHosc5cfHPywmcIsy3NGrDzYuCtxWrJCLRtg3j6UhYImebc5ZJojzvoNIhG8oSoazMOsmHW81W0kVSCpzLPHly5ObaoNtKVjOpWpZQCXpCVwU6o33D9f6aq5sruq6nKsN0mtFGMy8LY3jC1fWB1m+YY8L7jFGRmG94cO8HiKHw5JN3Od89Zh4nNucAimo0GXGCz2Gi7xqWIHFs1q4jqxro2wGbDUppxumWoj0lLSxphG5HTRMpTdSaySfD0J+TcmEME41vKLmwzCc2fQvV0zUDRjV0bYc3LZ1tcE1liQttK1+7xBPbfkMSRByNVSzzyHwSDh9kTuOMdxuqlYdJG3BWU2vicD3LqZVKLJl+uyHkGd84aqnMMa6dg8KyyOIqi6SmoqlagNrKJMb5lnE5kVJljAdCvgIs2gZQWnh0rtC5HajIxsLZxeepNXA8LAxdRwwdMU7UaqjtzHazowJnu4FaNxwOI9vLBu89bTtgdUPbtMyTpHGUrOj6dtVQR9JcRU6gJpQ98PC1HTdXR46nE4cjxKgIYcI5T+O3svEY6TA410CtzMGjlScskc3GU+rIvDxnWka6piHFKpGp9VWXCwWf3TT8t//sV/jKruc7h5n/4Gfe5edvR5TS/K7P3uPHP/+Qx53n/dPCn3z7ff7XT/ZUKn/wK2/wo/fP+PrNyO/87D0A/vL3nvOf/tKTl1KZL21b/p2vvM5XzwXr9PbNiR//yW8RVknSP3HW8a9/6RGPO8/PXJ/4wz/7Hlcxy2g+l7UIF0Pa0ynxV96/AQT3V4p0ie/ex51pra4djVYrfsujHX/wZz/g2SJj///kW0/5j3/NW/yJbzwhVvgdb9zjv/vgmrcPiZIVf+obH/F3ftPX+Op2wz+8PkKt/Ny3r9lfX7MZGtrGYI0QFxrX4ozGOknoKipKrrhWmIGXI9XL7gw9bJmWE6dpYjwecErTWoleTSqT4oLKFacdNgt6rLWa0kBbO8Y5EfOIWSVQcZlIeWEzDDzQO7ZLx5BbLoYz2nsbtGrZ70fiYnG77ar1lmLj9nDLzc1zHr/+Jk3TSvqfddK5TzObnWF3dg9tBHdjVodviDPbs4aL3SXWSLFmjF5jOAvDtuPCn8P0HQDMmkBzd73++JJh8GvRIWuoFJOKs7NLdmfnkmLjG4rNhBBp989JuXB2LvzVnGZMnlmoPEg9+wDfPnwsmrjC+vw6FAWnK3ZNvSm1YIYLTtt7tE2LqjCejljn6LoWTgee/+JP82R+TqcaxmOm1sx8vObsvCM2C0ot7OYfYL95uI6j5YB1OiR4LO+xb1rC8qqj2thunVBYvF/B/lqRa8bZBt9Y5vG47leW7cbhneN4PFAGhaJBKcVms2HYvKILDL1insXX4By0rWYaM7lEDB5nhWpAMeSa0GhiiLRNg9WGNx8/5unTjzmdRt568w2urq4YxyNGS3deG4NxlqLqavBU6/cxrB1OiVqueKZpRJkCRWOdI5csa7c36zRoETJKNULVKXklsCw463FWIjiXpUqgi7FScDtFTEeaZkOKhqyko8vatc1pxhqPhCm0WCthCk13N43ZYnAclpFaJ9q2k0NFLRgtE4RchMrRekfKipQiOWu6zpOjwuhErSOWBt9oQpgpJVCrpWYtyY61EiO4RppXqiqcM+SYcK4hxZZKxfmGtm3JeaJpOjFh54wxPRpDsZIWV4rC6Z7OG+6S4FKVyOhSJEDAKINWBmsqujhKlWQ9Z0WXrPVqEDOOUjPaCGNZVWm45SKpbZSwSozUOmEo695ZJXSlGnIATETTksICKhFmYebmmFG+oZYkiWFa0ViD45y2FY8FqhKWCMoSU8DbFhDShlZu1UH/IwL8//CjT1DKkaKcqawVELNvvESFjZLXXoFpDLRuA+VE4ywpVmJJpDzSd4e15e8JAXzbENIsp1kCvemYx0zrzMuIVG8ttzfXaGVwxqFazeG4rL/0ijINumrJOy4Cob5bf6WzpEhJxjH7Q+Tv/m/f46s/8ohxCizLkWmK3Hu4Jc2FGEc2fcvN/sTxeEvT9Gy2DTUZusES1nxn3RTG9ILjdKIpmrx4NjvH/rQHNDenFyLMzoVue8HZ9pLTnBjnGc2MqqLROhwi9+5fcjweJBK0bTgeryVzmYzzjXRz40zbdQQVSHVlUdbMcfyYWCreG26nG/rasul7qhEJRoyRRGG7PUerSgoL2jhSWhccKlo3XO42hGXmsH9B4xt6v8HbRk71jcFWByWy9ZZTnLnYnOOMjCxP80KqwmR7fntAO4kN3I8jg4qrOD6TsnATQwyQI955Sk3EUDiOBxkPlkBRmRAyTdMzx1u2ZwO1ao7HSAiWvh1QOhJiRauWZdnTuA3WKtouAwsPHl4wL46HD3uqmpjmW5wzNI0S1FDjSHlinBfmOHM63bDpdmiEN+hMR9tuiflIGAOn04yz4BpF5zbEmHj48DXc7QtOxxPjuFBVRquFXAQHlFhw1tD054QQuLmdqEWA8Dd7ca6iMv3wjG3fM42ZGB1t/+kxi+L3/MBD/o1/8B2+tZ/58c8/4L/6sS/yG3/ibX7r6xf83i884g/8g+/yzf3Ir3t4xp/9p7/Ab/uJn+fdk3Rl/6n7G/7GR9f82v/p63ztfOC/+XVf5iefH/k/rk7c94a/8GNf5M99+xN+3997h1QqP/pwKwVlkWL5t7x+zu/+O79IKpX/8se+xO//4mv80Z/7AJXLGttZGKdJNoBPdUtlcwiM40hdF9lXz6Nsrp/d9bRG83MvDqLzLoWvvzjQGc3rTvPN24kvbRr+628/IQZxpx4LvHtc+OLQ8vee7QEo1WPbDWMMnJYs3SxVaFxGq0zTGHbbLV3T0LaGiOYYCmrJUCHMlVoXYgncjkf2hwN923G5PafvW3yzIUW5JzGWcQk8uXpB27Z07ZYUI9ktkpRlOh7PvWSFp5Gu7djZe+w2l3g/iGTE9jx/fsVHH73LgwcPeeQaUsmS9U4h58DlvQ2PHz/ijg1trKWkTIgT3kugBmvXRWsxqZQUsF7jbIPW0i3WWlOopLQIxqxrYFrvrFWrfHc9fO0C5z3UOxKGJD71Q8/uvEebYV1bhQGqYqI3hY6ADif67Rn9dmDTX6DqI74yB14crvmhw2scmPmEA9eHI9MSWaajRGWixSzSDKj7P0jaXrDpOqZ5JmUZ48YUef/qAz7af4em2coelAtKOYxugY6bcWTku9iQOLmGR/253C954uc/+AZ86XMApCVjPoWraiyUbEk5CroqA6pQlSJTyaaiVp2jUZrxkCkdeNeRcmZeRrxrOJ5uZD9ar8uLgdsbjVKefmgpJXFxdknO4rZPIYqevAhDt/EdOUnBYrTl4vwcbRb2tw2PH7+J1pmYFlIuhBBISQISFNINts7hrcZomS6mLOQR56CkAd+kNT5UtMLWaXzr6PsNKS1cXT9lngUdZ61FF5kEWWdpGkdYqgQSuBatLNMUcLYhTDMhLNQqfhGZFGmUcixzEI5t1S87ycoZVK3UVEQCUCR1TKmCNhVYWbNZyDEUQb4pHeX3pjWpJHKcmMbEbrchpD3OtGiVcM7KAVS11KpovUJpWBZ5TrIpKNY0MJNISaGdQE2cE7SiRiQ2tdwFwiDyHS+ft3dMb+3QWuJZ26ZnjlGMtEWtEeBZ6Ei1omrCGAlPyLng2w01Z4oSIqs10rRJS6KsB39nHbXqlQQo5AfTWJaV/VtIIquLica0zDGgVcZ5odWUGFBWDGFFKUoCcqbte7KO1CyHXI0Fs2LXnMb5jhihqhPedsS84N0/ImYqq3pyFVE+xRITjIcR58Q4lWtZb2QrJgcWvKvkMLPMhdM40bSWHA1aew77A8pafJzIdcaUfp1TJlo3SKJT55nGGWcbshZh+DTO8ktFMc8SB+msIaeI9w0qKqZJup/qZTd11dYVg0bxcz/7AUueaLotMR15cX3Fo+tHKKVZwhGrT5Sc6fuWq+uR2/0BozZsz1ppN5LwXWGZJLawFosqmhRbPr664fLykkePPsfp9Altq6C0GNNhdWY/PZOCyXYcjonb4wnTtNiqWNKIiYkYDnjfEOKJs80lzdCRSiQUKDFJZrpyktLiC1fHD7h3eclxOZBqRywzzjg+2T+jZPCu4XIraA1J+Arc3ky0XU/VGUhotyUrz1IqIUyMUeOcx3tLVIaYKlM5YWtH1pkx7pliImXFzXwkE9F9RyBQwkI4HSkEwq2hbQbQmaphniYZmbvC7WGkEldMlKeSaNyOOR2wTqNW8oLzAzkonEk0bc9ud8lxfIrSM17fZ1kCKluWODJPM950DMOWvtuyxCtKNVh7jm8tMcDxIJvBaRzxvZeOTnY41/L86S2QOd9N9P1ADIlpWfC9R6uC9Z6YA88++pDXX9NoY1C2sLtsaRYwWpGiPNRtLwk7Mb5AGU2/MQzdlnE8MU/zytBtafvExgcaXRmniOLTWKfKX33/irdvpXv4X3z7Q/7lz97jN7y25cc//5D/7Jee8Iv7mVwKf/vja37q2YHf9uY9/uw3xZz33ePMX3rnE1CKn35+4Bu3I189a/nfn93y2994xLunhT/3bfk8RvH3r0bZ9Ly8h//8nWecEOzX33hyw7/0mXu4NYDcWL06du36nb7q7KDuYhy1GFKqyApEDytJZGdevm5G0ThBy5W10L0/tLwfCoM1zCjpIq1O6FMpnHde3KwK/pkf/VG0krGfMaJ/y2UmpQmFRisvYH0FWmVqltGcRpKEpunIdLxBaYdpDS5pjO3R9kKMfyqTdQAnGdsfPXvO06fPadqOx+09LuqGrRnoOSctGTs5Bt1jtg5lPV17TtvuyGXVzRe4urpmv9/z5mc/i3FWCihjVrRb5v79HZutJJwpLVKNnMPapWpwtuNOxwaGnGeW+URaInkjoRcqB8GwlcQyHwnLRB0uPnVvKUkvuvtIK5xtpQsuOg9yySiTaLuBUj0pFewaCep94eHn3mToDd/+3lPGJZGKJ1fL+bbj3kPHvQcP+Hwq6FZzZOYwjhyJLLuI0oEQ4OZ65OY4Ec0DVGxlAjMvEkmr4Mkn7/Hue3+XpsuYJKQS6/0qWDHMo6SnHVMilWccPvopRtMwbB1hGXlRXjHclll4kndX6ztiWDAbifYMIeFbizKG03FiymCMQquKJpCTZR4lUrMUMcOkINpTv3kloyjBcLm7L3rPJMWp8x6jDafxyLJM4tovwjRtG49qJWGv1sKHH33ANO/JqfLJJ09QKIa+E1lQTWgFMRU657Gm4JyEF6iqKcnRdxtyXSBHNhtH225YlnmNbjX4xuJsZeg93m/Y7Xr2xz1933Nzs6ei8Y2M0JWWhCqRhEAIC8NZT06RYdgQ80IsE8r0OL2aRCvYVWJRVUZbA6Q1uCFBzaAWrDP4Klr4XFe0pPGUKtz0rhFPgVJe5IA1r8lvgWHoMEZjcegKJWaMdrS+FYNcBmdbOUwYhSSfrR6WjKRa5cz2bBAd7hgpOVOzppQJZy3OdIKgsoZKXtc3UEYMgcdxRKmEsx1934h2OCWaxjEHYb6nnNZ0rVZ8Akr091hNRQp3mb5VlJXC0zmHNgqrFbkUicm2HudF96u1yDBRRSQmSuNsK+u2EvLDppc1txYlRj4gphOlSmRsRa/KH0PTipdEI4f2iqZWhdYtXqLqvl95+Kt6ff9kqqjIZEIcZTHFU4rkGocQ2PStjOnRGAeqTlycbTjuM3Gp+NzQFk+eMkvJ5EkzDB3h9iQ3qBL26Vwjpir6ZitAe+WouaJVYVxGSpaTmEYxL4lSs7jrYlzHjHeZ69JWFX2WUAdSFOfmzYuZjz+AflPZ7jROG25ezKtLfAEKQzewnBLTWNFrp+xwmzDGEdKEMlIwh9lRNx25Kvanwwqez+zOtoTgyK4ST4VpuhbU1XKUk2Py60kGbq9HtpsLDuMVqV7JSMJrhv6c/Thh0kijDcvxlqbpSFWT4sQ6WRbn+zISS8LUhEqGVNUaOziijOH5dFqjNA2YwDHumW8rDx68hmJhOS00vsNtLjhOB6oaCdM1A2fsT6MU/iWSjccYx/40o7zndjoSFdSiOSwjWE1aJjDy/d/c3mIoWOUY54UQZxq/oSoxU0zzhPcW31hilHmx8ONO3L88Z7e7zzQvq27U0jQK5fZoG8hpItcjTdvR2g3jHOn7LdZYcT8qTWHGuZ5muAAVOR6eMXQXnJ1dcJwNvoPjYeHxo4c4bzm/OJNRa1uZx4Crmk3dsj/eUEph15xjlKFLlaVeMU+J/fGW84sN3cYQ8i1nZ/fJsUHbiabVnE4zcbRc3NtgdcuDR2fM854coOlE7H+x3ZBi4fmLvRiX1ksBH4yj6JpKRqvKx9PCI294s/f80a99hj/y1TdfvtgoxZNpQa1c12dzeqUnVTDlQm8FgfNm3/Dd4ywA9LqaItaEt7UK4OmyLt4VjiHTG4k4pa6BBYUVur12S+GlrEBwXWsHtayxxvVV1/WUZPHrFIxF4ibP1iL4mCtVKU6pMGhDLmvSEIqN1RxWdm6t8MGHRxnxW4V1XhZ5KwZH5xtZQ1g3R61RLhOWQq2GXDOnosl+kAVeJ6xeiAleLIZ8mFnCkcN0JCbpdnBdOD/dh71mKhXNInrUAmebHbvLR1xc3ufFzTUfv/+M19+4oG09KS8Yo4ghkOLI5b0N291OjHK86kLHOGOdWZO5pKtTqyIsM/uba/phQNsG1F0MsGSET4dbtFUY11LXbttLQHwa0VSMfRWhqtF86laD6qSrm7M43HNmPN3w7NnH8PAz7M53lDpjMBQjKT/jeATdYl1DDbyEvV9fXUOe6DpP129oOtEabn3PtjVkpaGDp8OJ/lzxxWHHtj3n629/xPeefIS+eIgpM3V5D+s+4ge/cE6OZ8zzSUaiiEehaSwliWZP2R3WZ5YwkuJE2wWWcOLR4y+/fIuf/+KbWFdfMlq/+rUvYW1atd4Ctq8qcXN7w5Mnz1FaM08LxrQ4pbm8dw/vG+b5RMwLYTkHLOO48PDhJVzPANy/fI1+02CN5vnza6wx9H2LQrPZiPwnpWUtPLWsy1oTF02Kke2mwznNOJ7IeaLvz0hR9siu0ZjBUtbsipQkCvhUNRcXG25ur+maC5aQ2HQdIQT6xtM3luPhSDUWNHhnGI+35KYHMmfDgDGWxtmVNgBlpYIoCqgoaEhVsFbG92hFawaWsGD1GohSlGicrZhMY17YtB05BdmbVWVZZlrfyHRNVTCOHBK1OJSBs+0GpYSG0DaBlMTotSwJ6y3ayAHNGI0JDaVoum4ra08pzHNA31GBcsJYTdMYKoWYMjlJk6HrRItsdIMxCe0UeakY09J3HTEWYlmoxWOMrFF3kppKZNgIMUGbSq2iC+27FmMTnVbcHsbVwBVxql9NxIoSg4RLWPG9KCUNL+ckQbDrOpYQxKxXKyFIaE3jnBBntJP0PZfxXox71nR474kpMU9HmrYlhEhJlcY7bCfyCZQkoRmrgEYOhFUOwyVLHVVKRmvEgOwKucz/T/Xjr9r1fQvV8TTR9I6mafBbRa2GtCi0knhFbytdY/CmwRmDczO6aDwbLh+cMc+yeYacxcXuWcfJFWt7XNvIYr4YarJ07RnXV88xXbu67wvWZLQza8dJ0zYWtHR1Gt+R1nFk2/gVl1FoGhFFGy350EorQnKEkyFPmTIrmuaCVCslV2qxaO1o2p4YRjrtqHWhZEU6ZbQfCJOw0i8uzrBG3N7724qyEv94Ot5y9XyPoTL0Cqc+ojxVkiTUZxpnIVqGzRkPHl2QMyyj4qw55+zikuNhZJr3+NoQwsJ+/5zzs3vkLAtALpWm2UialLY4c07JVZya3YYwRd7/8AmPX/sMH3z0MZvtQIwZZSa25wPGtFzdHlHVMX70lFoiThc2mw3gqIBvJH5wP+1Z0gIloMoGZSd01JRsaE1iyQfCkmm7HUss8hAYg9aO0xKxXUfSka67tyYvGTAduSwCsfYJ37ecxsgc9xhONK3l+vY5vpET8/X1xPbMQ9XMxz0+KsJSoDbs52uc2dJ1BaM1pWypuhDqnrTMGN2hVEMqgWHYsN3KQjqFPf1wxjyPNL5hHCdcrrT9wDwnQlp4cXPkdIiM08y8HFliIJbA5qxhOBvwpiGVSL+V8WpVimHbcHm+pcYtId2idGV3ccFZ+walHHj2dE/TO6p2zCTJrscTVCX7gNtZ4qR+2bP31tDinEUl0X497huezIEPx4U//fa7/PUPnmGsWbl+EhoghoIMiFnuzshUa1nZf4kPxpnf8sY9qUnXEfNdQZvW1SDnhEQ/FjF01fpSr5RLplR55u4KWylYxfGVUpRuNy9BBeuL5D+/ECNzLvxAY/ipF0cU8MP3N0wp8/bTG0Kp/OLtyBcGx197XyqLXmve6hp+9ule4g8rfP3tbwjsHOFCipHErK5njbWarnF0jXQkXGuoVTOHTCwylanZEkImprzqyQIpBMlEj4laKmel4RzPwBm98qvBcmFRma69YHd2wdn5Of1wRqrw4UfPORwOvPWWbJAoSSZKOWEdPHx0Qds1a6Eqh4eUxEDkhw3KuJdfl0tlWQTF491m1f4J3qzUTJhFH7sZdljnV52zIMpSTtSc8abFulfEf23sy0QkgF//7K/Ds19h8e+B47fgVwqnCcDVens1v8Ln714T/m8+93+5fsPd/7z41B/e/b0eGH6lf+TTf2bBfupjv/tlrzy73BJzeFmoPnj9DKMrh/3E/vaas/NH9P2Gs23H/ctLur7h+fMXHI8L2/6cfnBoWu7d/7xMnsYZ5w374y1N18L1ewB87guvcTzdYk3PAyN6wr7dcjxOeCeM5r7rGDZSZIQgDZtahJP62muPmGbpvAoay1HXkbTWMM2nl+ahGAamaebB/Z579waePa/07SUlP6QfLPv9DWe7HmMUNzdyEFyiOOFPpxGlZuYlMI9KpB1UHGJ6BbDOEGrGt5ppkoCfWhKZUGQRAAAgAElEQVSNc4J3SxXtGrQJlCxR4NMk8Z/zPBOCJS2RUjXeOcZlIsdM9UIg0aZiLLiiSTXimwZvPdYJbaTzW1KqpHwUqUDnwUgxZUzFOUfOsBk8uc6M44TWsB0GUl4YnEOhaZq7CZdMJdtWitN8F0DSrilcW0kU01rhHKTUrB1xwzQdKcXgnGMJI/1wRsmGZRlJOdC4Dmul/lBWk7tWkFy1rgQEs2pQZdqUYsKt+1xKUrvMcwBkUtx4J2g/Cs51dL1lHEeoCu8c2qo1dKClqkQMaWXjRkGTaYv1mrYzlDzjFCidSBH6zovBy2uJ/17XbmMcJS2UEjDKEVMmpdP/uwf4V+H6/q7/U2SZC5vthmlKoCJDL+lIWvXkAEueuB5HjLLstg6SJcyaTW/pes1pHCmp0jc9tTFoo7nYDXhvqIjLe66JMYkbsqwxhNSK1R6jFqiZrhX9SeMtcziiVCYks26kvNxM9cpTLVl+6TJ+AKUch6sT1Mx447BOCcXAeLSJeKu5nk80TqGNFeSW1+Q6sxkatFZMS2EaI761hCUSjhFlA8YojBUFia4deTRkJkKyuAZCqthtS+/OyMGxHCHmwu3VkcsHPRt/j/4ictjfgNIstwfGG0scI841xCAi7JwX2taz3Zzzwfdmmh7GU+R0PLK/ueXps+e89/5+RTLJ6bvrFbvLhHeVq2eZplGcTh+hi8W1maY5Umui72V8en6xQ1slho6UQB/ZLwFvPXNIbFRPLIUlHmi6DWGRkbpSitO4Zw6BB48e8/TZDeN4Rd+eQ93w3vsvSOXAPC/EHGibLft9QNlb2tZjTOa4n1jmT0BlppNifxgIy8SyjLSN4DvaZsfheM00vaDrJC62tQ9p2hlt9/RND6lg1JFmcExLIobK/nDNfn9L4zcsc2G73XJzfcJYGStNy4RSitubmRSTRO0ZcZc/j0diTtKtjiNGa5wbGE+F6BbabkOYLdQTKc+cbgqbs0KarpjDDcY0XN2cGE8Th1tJTfHO8uHTT3DO0Xdb6SyvV6XyO964z9/88AW/dMj8+Oce0WrNT3x0RaMVf+iH3+J7p5lv3J5QKfFrLrY8nxa+fZhXLWh5CcYGWYjuHOB/9Xuf8G9++XX+tS884C987ykhZv7Jyw0/+fSW3MizUovwBAW4/YrxeMdbfdV9lSrUrwBxpcAZjdeQa12zsXn5+kplKYX/4b3n/MGvvM43f+pbVCr/1g+9zn//3gumtfj9y999yr//tbf4mx9c8c5h4g995THvjzN//5nkbUPlNO7RRoIklM4oBIWllH0pPzhokQLlum4Cq7NXmSrPU0zM00xMhbL+jEB0s7vccll7trpjUH4t9AvWNQz9hrZtuby4h+9bYVdqzc3VC15cPef8fEc/bClVpBB1xQoZK/IUYyx3zNkKjOORaQpc3j9Dr6l8IF33EGa6rme7vRTiQy5gVorKPKFQONetXMy1s60gLjNpWei2F3j3yvX/I9/58//fd4t/RK8/9jzzKuIKvv2dJ+x2Pcs88/HHn/Di+sjl5Y7NMGC1I4TC/fsP2AyBECPGGebplnl2aOVpm5a2czSd4TS/2syts2y3G0rN9MM5CoM1mr5vJB55Tmw3W+7d3/Hi6jmnUcbMoGjbnloTZ7stOXuur68YhgbrLTEKW3VztmNZJtqmw5j7XL840g8Nn3nrdZpG9tRlTvgWHj1+A60c03Ri2HhijByOE90wsCwbJDzHMJ9O7PcHzs52HA4j7eTwjWdZFrQu9EPLaw+2nE4nmkbJhEAr5gmOx4W+t/RnEmua50C33dKfNcQcpXismr5tUGlG9S19v8V7xxJHChHTi3lRME8O76Rz6XRH1w3kcsbxcIOyiWma6PsLlqVgqDRb4fUu0wlFpm8GznYD45SoaHLSQKZtO4ZOxuveafFrxMQ8FSIFbzY0XhNCXg1RMLRbRvYYXWmbXszACkGNJcFBGdUw9D2iG5epoNaaYmWq6xsv8re2xTlPDJFSIcZVu6sQVJwSfalSms2ml707ZZrW0/hBAiOGnpolwtx6GKcjyxw5350TwkQugc3gGE8S75pzpOQAZV4xczv8oKWrn2dK7alqZl4i2+25mPo6CyqxTCKVckbz/5fr+xaqXQvzFJkPE8s8MU4jXdNhdcvQGZJV+GZHSjekMnMVM633dI3jNN3Qd+eUqmjbhrbpyEUzTwnnWnmIxpmb2+fMc8TYSlpO7PfXdM0GrQX83XqP0oW2aQQ7BeTqgYQqljAvbIcNpZaV87bqWKk03hLLQowV7z0VCzWTknSEaoGmAVRkAW6uTmw3PUoFrIXtWU+/7RhPM89vntF0jnmOjJOMimqtECNOb+idxTcQlx5QLDGCallCYZoL43Fk8BWtNB89uUJZCQJ4tve8/9GRplMs40LVkTkUDidNLQtdVwSxkSrTsme77bHqyHhK+M6wLKeVWScFmTYJbRS1zrSNYRo9zz65xpg9OYP3GkiQNdXPLKvz2jtNTJnLe3uqTjR+oHcd1kdc3xDHzH46sbGDbNh24sX1x8QAMYkJ4+b2GuMsH35cOI2BWG9p7QbnOl48vSLWPTEG0TZXS60KYwPONHStnLSP+4mqAtOYKc+eU8pCmMHScbbzKJWIeSKkE8+vKvMcOD87oGoixJHz7SVp3mPMzHa343gS7bGgogzOTeRSuN1fYYyjLrJ4+NZRspLUFKuxfmCeJAZ1CTPPPjnh/EgMmdYPWKuYp5mua8kxktPHoAJaF07HxHbT0vcnKomUNDk5YprIoRAWBWokx5mUTvgmoOqrR7FW+Ivf+ZA//I+9xQ+fb3jnMPLj/8vXuZpm/uK3P2RJhf/wR77Im0NLKoWfvznxx37mHdEmfXqsS/0Ujkq6m0/Gid/5Ez/DH/k1P8jv/dJjqPCzVwf+zsfCBQUJkYghUf5P7t6cx5Z13dJ6vj4iZsw5M3Ottfdp9jnVqCiwyimzHCwcBFIZXIk/gPgBuGCBkK4JBh5/AAshJByEcLCQEJcCgaqAe2+d3a4mmzlnRHw9xhuZ+5wSHAkk6haktfdaubKNGfF+4x3jGfvA23sXL5wSxVTWyhlRazv/65/83BL1p3/3b/Knf/dv8h/8z9/xH/4vP6C14j//F/8F/rPfPfIf/cMf6L3z7/2D3/Hv/J3f8l/+S38HgP/iu0f+3T/7izeF9j/9x5/5enD8x3/vb3Nyhv/uy5V/47/5h+LO3AdkoyytF+Ealld7ZcXouo/WUjQg1a2ijmpW4dpqI39nxCvYq3qzECkNdxz4Rp2Zlad1AXY7a/nw/ius8/z400dJYIdAQ1FqxypIKTMOA+/evwejxcak2YkcG6VotJkEFdZlpdhKIy4b7Jiq1holZYy15Bx5fnlGKzD2dVjuoMW/VnOhKkPXwnXsre1tNo1WI4NXjNMIWvE4/Jr77dv/Ww+H/z++/bd54x/9+TNff3hA60Zqjr411h+/MA43rHZoo5mmEbogtg7jjPeNv/jdP+Z8fKC1Qv0iUP3Xqw3g008XfAhY32kaSm5oZHAoVVTAl+uVmDO39cL1emUaFLUYUnnGB818OJLyyhoz63bDes26RHqH6TBSq6S4W4w8XRZisZyeFdd1xVU5LM3zgTt9EkLC1jkcDjw+PdL7AefvifmZXDLeO2wYmU4a5TzDrLBDwDrD3A+4waJQfP3V1yzLwjhqlvWJYZxwNvD0/AzNS7kJMi/c350J3tEUbOtG0CPGan74sXN3dye8VKtQeqRRGYc7GfZVJacqr8WaGCehANVsGd/fk+uVYA0NTWyCyRx85fsfvnCYPVY3ejG0nFFNnvexVKbZU3Kiork7H1iXK6obrPa8uz/wdP3EclswDOQYqVpjzUR/RYblhdYs4xjQRpBcwyAWmdPdO2KUlGJv7CoqaMTyFMJA1+wHYLVnbCLnuyM5Rck6IIKA3VfxzgkbPaeNTmMcHeuaOJ6OHAaHMp37uxNKFZ6enhmGGa2VBME6PD+/MA4H8ZXbwDRofvjxO85377AW4ialTM4GctE8v1w4HA0//vCJaT7TurzP/d2J4P/ZAf6r/odPtj94+9f/zb/dr7eVmhU1GS4vBdUc83yklUIzFe8OdL0AGy1P9OLwDnqT9LrRig/vHzgejrw8L/z00xdui7DibtcLMV0YpgPabKg68+XxE04FuVGwcb47s20rKVaenhasDSirGCePavD8/PwmoU+HA6VUUkoCVXaOVFdK8cImM5IkpMkaupVXWPpGR5qCXNAYI7Vy3g8cTiO5NJ5vL5zuRdVdF2l9UFbhTEO1EecaxnVyGmgacpM1flcd54TVSY9Ya9hipbTM6DWti1ncakPJFWM1sa40hBNpnLQhqW4laamyGLG1oSEnINULrTVqAecGgRi3xN2DNIptW9sHjIK1HqPEB1faBaMDrRpKynQt/cS1QUMz+AHTG24y9FTYSsc2BU4LomoHTMeSKUWaiMIgtau5JJreW3eUwukRVKXUTtsr8vpu1dB0Rj9iDCidqbWRkvi2jOnERaO753BUDBMo5bEhk9ILy0X6oONNMCbBD9TcaV3aQFr9GZQ9HQKH2aFNBSrOeNYtobTHe4O1juWVO1cct8uGNp2SlbBWe6JXhdUBbTe896SoMUrgydZYghtJaeU4H7i7P2I9fP50EeKCiyg0cbVs24bRZV+xyG3s+mefAPhHW3xb8e4v05//+/dW6W0H5EtK9XXNvyfwefVAyru/1jS21ndv3j/xwfb3ef0Yrx/ntRXq9eP8a3/ja/7kb/yCP/mv/uz3Pt/uXds/ngSgXpVXteNdfg5W/fyN9Dev5s82gt3XunNfX7++N3vB/m//1X/lX6Y3UUCMsW/r8N7b60KGVhHLUJdUNPvPSsoSRFWVbnXpZz9mx9w8c3cMRXy5zjnO5zvO5xPH44kffviBf/A//Q/88le/4W/9c/+8pHuVWIh+/OEHrpcXvvnNrzmeBAFnrCHHyHe/+x0xJX71zTcc5oOo1loR142fvvsO5x2//OYbUJpeG85ZLi8Xvv32L5gPI1//4je8qtZozbYu/PDtX6IVfP3NX8OHkb5jbGKK3B5/YFA3/PFrur9Do8Qmg+L6/JG8fGI+fUCFs4RTnYPeeP7yiafH77m7f8f54Zu3lrBa4fHLRz5//78zn2bs4YFSRC1uvVJSgfyMd5mX6MHMaArKOAbryPGZbbvy4f0HulpYlmfpT++R3+kvfMv39Ko4nz6AKnx8/JHxcKBShNuNxhkrD3Xv2FIWtTwJV7N3RS5RHrTlRu8GpTytraANxk6EceS2PNJyZR6OYp+oEbrBOYs3jtY0xmmct2hlqHmj1cbpcCKnhSFI411OhdYUp/ORf/9ZvHz/1kGUsXGc3wgtvcM0GlJMjGMgxkLKndILcUs4q1EELrdn5uNE64Jf0qaxLgm6qHigJQFuhHl6uV1Ytw2lJ+5OZ5RZQGVadgxhwjsFqlGyplG5XG60ppnmia4il+sLWnt6qajXMJTSOGf3TYoUE5gO8+HM7bZwmAd6bwzDxHwYqXXjemkY0xlHERGc0QTvOJ5OPD0/c5rvSKlwu62cThOPz5+Ex9qlPXJ0RxoruWwsNynbyHnjfD6isYKjMhpjC58+f8Z7w9PTleBPGL/w7V9+4be//Yan52e+fL4Q/EQYAtoYeb39+swP33/ifHfHMCq29UpKnRCOeD+wrk98/PSRw3RmGEZijMzHIwrxdH7+/MR8PDNMM6VsvFy+8OHdA9uW+eXXv+bTp59Awe0qh2BrHbVWYkx4L1vIbdvesH0S9nKkknZclWxNhiFwnE98+fLE8TxTa0SpxuEwkVPl4f6OwzSgFTgTpGihV9b1iRAmculoo7henzkcjhymO+iBGD8S44Y2SmwGg5QU3G4rxogfuqP47vvvOJ6O3K5X4jLx6998wLnAn/7b/8kfetL+it7+qKJ6Ptzx7u4dWmuevmx8/W6Uh4iuXK/SHWut4GqsmSEc2JYkwxMGYxpGhV1VWoHMu4cDpX7GmIVhbkx6xgdPR3OcTsxnQSrM8xGjV+7uHyi5yABhf0Rbx/27B6wxvHt4z/ff/05edEoRhpHbbcG5mXVdscqQS5DVSd4hy0bS5tYKtDnFFaVHhnHCGOmG1xq6EhTIWgUN5cLEFjNaS4OTIGAarTtRQbdEXSq9y42xElmT1JSpVXijnZVUNLVqakssvePdIIGKpNDaoo1AiVuW7SvdknImeFlZrHHFWcT/2wPGgXVQe0NZT6HvNx/F5Zow1oDyoCzKdHJpLCliXEGrDvvN1HpN617wKUr67tctYxqsKRGs4Jda06SYKNWJZtWK1F5Scd5SmwRxAKjCTqQXeSAgwOm8pd0YL9V+Q3Ci7FJpLMKscwdQQWDRg6VXBOMDUBrH88zpGFgHuRkXZyThuEVij5Riiakh7YdKDhhrektkhuApeeHlRfBRPji02g88U2C5vbCtCW1gGCe8H8hJMCWqg7eeaRwxuuzGePk8W9qopfH4cuO6FIxtQqRQ4hlythPTRimVLdcdRaLw4ffWLK8D1etw+srN38H8r1Oj3utJf/8dXgdUuqIr8Z5KuGZP4bMn8Pe314H4tVZV/d5w+FqmofdaG7V/Tvmz3XEj23L661J+H6DF96r2b+fVMsDPJQXylfDz6Kl//j66kADU27fVf+873L9PJeqhNZZhCDvwHUoWBFzv7P50aWPKRdrmtNZobbFWptnaKnfN805NnMLE1AL0ihoVfhjlITUKb7a0xsfPn4gxcjweMcZQUsRYTamZUhIheLwfefUHs4elYoo476Tadz8IqKbIMUoKeTyjtRaLgDbSWBYjzgXm4z0o9dbvrlojbRs5V+bTSd6//FwpTeuE4cA4HGlaal21luukt473jsP4AIyk2lCqUUuR/vScZOU4HN58zFrvrGqtOZ9m5vlMUp5u5BXZq6hIx5N49a5RrB9daVTv5BpxtnF6NxLCgTC+Iwwjra18qI13+cBv9B2bjrwcMo+3xi/evcdgKL2gpyyJ6kUYkn503N8F1m1FtYFWtbw2AWUb9/4dKTWUhiVmlrWhWsOisMrRdyzQEDRBa7ZVGKO3bUErz7ZesH4g+BHahuqK7XPGWbEQ5WRw3pHLlcvtBQ7CK8654Kym1sz1dtmb3iy9ia2tFGlGKjVz216Qqs6Gc5VuKlsWsoCywvlWLrBtC9bKQUs1BNSuNddFnqfGaF6WLla4esObI0/XJCQBEt4Enq9fUEruj/pFvJYpbVjr6WR671K+clskOKSE5zmEI0Zl1riyrpktD7TasXYRq43WtK5wRqOfNM40wmCIW+bDV41lfeb55Sp+Sn2gqcqWKl8uT2g14pxl1RXURusFo/3uhbco5RlGz7okjJVQ7Ve/eI+1ig9f/YJcMrkM9GZ5/4s73n11zze/yay3G84FrAso3dE24vw9d+dfctu+5Ve//i3PTxvWCfN6W4/88lcPKBU4jO9oXLjeLjw83FGz5+H9jxgTqCh6cfzi6yOH6cC6RMZJYezXKJ3ZYieXq7y2hjM5rwR34un5J263EWXFk2zNhFYSirxdL7hgWVfNNN7x4f0HBl+pXZ5pfmzk8oIxAyEMlLoyDRMheKzRBH+HMbJd1ha2vNBM5vn2hHYD8yGw3jZql2u/60SpYLWHDvM8cZzhcsl88+tfcz7PXC/PgMePBq38HxsP/6m+/fEK1RfL+198Raey8UiqhpgLNnRas+SIeEOTQfsRpTqH2QoKoeldJXOEYSbGjjGK08njhonWG5fLlY7e21wcxhoOxwlnnHTXuomYEtY6xnnkwy/uKa1zPB/Y1kxtlWmewDS0Ech66YVpOlB6wTtHWwoxJ2wwUmXZ99OOc9Sc0B7GcWYYA7lWRuNIORNjAQo5b2grKdP2itew8tBtTZG6kZ7jIq0r4pcz5BqxOGpv5JjwfqAUUeGMcWjV6fvQZ0xAKemQjyXyKgkZa2Tgl9gOdScZGAu2WWrWGG3pLUuKUslw2zqU0khxkwd1vqE0hEHTm6XWjnFdkB8UFA0/DCjYuYAarSveSQvZEI54J2GUtSSCVdjQSUk8e9sW8YOntkKOOxi+l93rKDWEMW37KjTRq2a9RoyTISljgEopkdp39MbrUIama2maSaVRrk08QnSMdmxxR2g0eUi3qsCIqllLAV4VOiRMEwUELaGYvpvkJYEbBsMQZnIqAmqusiauVQJCzgZoQpvozaHYawWNhJVaLeTcKBVCUGwp0tZETBvTOLPcCs4Kd9B5hWYgxUzw9g9IIOr3Bry3QfTt54EMgG+K6z74vamN+0inXpVT9fYxO4LSev03Skljid7Dia++1leV9vXfyLArX8n/+HiVOXk3WL4qrq/Db9vT/rwN0OwnLhmSW3/1zqqfh1tkEPoDfbe//ev9O1KS3n9Vd3un9kJtidxfkUziE+w7nUAapwxKdfSOV6q5gq4c64FTG2i9MVXH3C2tJZI3jPv683z3wHiYJUXbGylGtm3jfL7jdDy9DeR9r9NNMTJNkwywpdKRKtW4iSo3DiPWmL3yUpTYGCU06pyTMoXW6UoSv+u6ClXFOdkaVfH6tlqFVdsV3g/01qRRxzmMUm9+xMbMcLCY10FfK3oWJa8Hg/MWw6vS3kQQWBeUblg7oJTGoFBGiS2kVZx3WBdo3VJ6RnVFb5qm2l6/aaBnlNoPC1UOw8dhZD7AdU2gR7z/gDaZnp7prXJUJ7Ja+eH5CyMz9X2lpQ3jMi4EtB3ZxheM3us/bWXzMI4nbpco17YZUAaut2dSEeUv5/c8Pl5IdcGqxuxmtLcMo8MNK61YZutY15VcN8ZRcZo9y5IZ7YRzZ+7vZtbbhdF7Uqw8Pj6idOH+7j0p/gxFf/8wSwuYKtzdG66XvBceRM53Z1JCBoWuCNoLJ7dLkn0yllISp7uBjiJfGtaOqLaBkdVwyU0IKiZgvDTV1RZR2r/5HNdyhRbY4gZsNCuHuhj3e5VGRBGjMLZTs2IcpBHMOEUj0Xva+aZeWva0prXCbREu6ZbWfbXsQSeoGmcO9JoIozCyt9KkojPGnUl64ctFynPE/r7SWsM7u7cpwfl8JwKTdXz+/IX5OPPytPDw/kjaIvM8ovBoXbjFz9A1Rg38+V9+z93dO6YgG8zaixQdGEVpFT+MOG/wbSS4ex7uM0rvNetqwAVJ6G/xwjBpDoe9cMPPfPXuFzw9vxDCAD3hjKV3wzxPPD195jDfUYqWKlzkWujNcH8+s9w2jgfHw/k9S3xBacXtmhkGz2E6cL0EhuMoVsVmmY8Tfz38NZ6evoCSZ3our0d6UFqTcsTZiRA0rWdKbfhBi6UxRtCVLW58fPyRW3zCmoGUbpQodbLe6v0+30ixMIyB8RAIExwOd3z14Tes8YWYK/Px4f98MPwrePujg6pj5MuPC0p3rBopvZPWyGm+p5kVfBQv6DDtLR4K50EpQ36rDCxsccUaw+EoNYd+lL5dpQ1PT88iw6jOFheUapQSmcYZbQ2lFl6eL+ScpSpNaz59El/ry8uF0iKl7X8XE9d1QXuHspqtdDKVaiqlNdReGZZbpccruQiv9bat5FbRtlM2MWLfbhu9V1knD/NefzailHgOey/UYoGEzppOpau+t5w01O5ZKbkxTAMlCURaKl7bm39NW1kxdQq9F9KW6Du3rW5NXhxe0xO02oXZmRJGWarO5FIlgU0HJUnvkrok5HXZe4o70JB3Ey9mqa+J7oazA7VpapcHbU4NrTvKdbRptJZZ1yYPIfYWk1QFlN0brYnVY9sWVPey6q0ZMDjrKEWGVqMcJckDzWiNRhO8NHCUktHayuCN2sMnUqnbuqNXWZEIvNrw5XGl1SSDEbKqAmk2CWGkGDGkO2cppUhYpu6Dm9Ys24ZCVHjvNUrJ+57PB5blhnUTcek7VUAUucM0il+s2r2pRdFql3RuVdTc0FgOoycEt6uVnpg8Ru/INeT3XmvDWYdRklBvrfFf/33xev79P//fqK1jrai8fbd29L356lX9NLtpPyZJhmstoGij7ZtyTAejPUpZeo84Z/B2eOscFzamp/dKKulNXXVOfg/SaKPFKpKlveh3OfP3/vrfwnsv13Itu4VGkXOl5IJRCuMsxgZqaaI2eo9SHesQZE0Bq/drqhYZzPfhz9q9CrEJkkoZzzgc6ftgleITxlhSKljncC6wLKIS+RAotaELlCQ1sgCn4jk2sQkcmmXqWtKutVNU4XA8cffwns8fP7I8LhyO99RaKVUGtLRfkx/OH/DD8NbA1XonJgmChhBkQ5Kj/NxaZV1XQDxrSkMvrwG1QowRYwzO2jfcnlKKGCPLuqDVz8xaehcs3z7EotgHcaExqCbcx3VdeX56wTnPfJR7E0pqJmO88uXzI8fjidNdR5mG2vmMpSRiFFalBLB+toj0pljXK70VpoOT8hc0TcsQW0tm2zq9W5p6E9Nl21Llnn67KfGEpkzo4Y1GoGgEpwlV8dvuyU5z0wvLu4XeFjqOjmWeDxwmCdmUfpUAonKsIYrirDQxV67LE11vxKzo1cvruxg0A1oXpklhDcIfJeOD58Np4mmEaZoJwxnvRpz3tBZ4eBe4XifO81eUXLiuP7EsC/PhA8Ng4L//HoCHdyLMWDeAUqI8GlBqEC6z84x24OV2lXtAhxwTvWdBbrXCuoldKqaFUjdSkoFPkGUGlFBvnA203hmcBRq1NrEd5YzfW/EExWs5T/dcLxshWCGJKEVpV3JemaYjp9PMy/PKfJq4XJ4o1VBKYnAj3UZa0yiTUMrhnBeV1wiG67ZFNGoPGTXYZJt3vb7gww7L14FYryyLVIwfj0ecsVxfsmzykiWVwnVdKXVD286WVrZPkZo1cQ08frmJ5Q5ZWw+zWD2Ce8+WN56ef8AqUEiTlPMDyja885xPDyxmResDl5eV41kDhuX2hNN3eAPX+IRWjbzOHKYzt+WCHRN0A57cMJUAACAASURBVNVyGCZeLldyXHBWms6Uhl43rLakeGU+OnIpUBM5KayrTOM9qo8E53BDhfoJ5xRWi00PXbl/f8+nT595vP6IVweGw0iYDL3MtFw53w9sS6O2gHNWihi64ro8k9uG7Y6cC0rD+XyPVlcJYZcVYw/M0wGrDM7OOKM4n+7JuVCSUECGEdZtw+rA6TQSYmXbBDX5z8rbHx1UzWj58duFd+8PO4x95TgbvDYEo0gI7mIcB3ItHI8HnNfM85F1qXz++AltDC/Xz7z/asSNM9dLofTIlpRUCKoX+v6gXpYN7x3bJgOIcaLUXJYrtRZCD2hjebpcAMMapQbOecsaN2ptbCnTFDv2RYuvk4a2u89HIx3ZRYZnNNSaua0RYRdCbY0QAjG+egAV1gluore2V8Epmm7iq0QAvcbqvaFC4YaJUgSH0algyl4dKOsrGZoaVEU1nVRu+/oYnPH0JivMUjJGG5SWAEkteffWKZTp9CreopozPhjWdUUrJwPWDmY32rJtkSKpJ6ATU8YaTa2vq9RC7bCuUlObcqNWhfeK1vfuZOMpLUtNYbFgKyknlIZ1WwVzoeUGIoy5TqkSyjFagM7O7elb3TBqRCtBkeVc8MGiNFgjfdUxyuFBoWXV6zylqB1YbGmkN+8jWu0cyU5rBaOhm90jafaHeanYfTXd9haU1gzrKgNYTpn1+oXW5EU/2JFehVVnvSjgvSW0BmtlaHztvO9VoZGGEGvFfqG1lUHQSGtXpcp1UyBvoHUV/qeWa+btRekcvVTxZhmL0lZWyHuAqZSCteLNrHWPcygZqFS3+3UpiiK7NzHnJEE61aitiOKqOsZaUomCZfF+N/7L6rYU2evnKvXF1liqNuRWaaVhvJO/qwXV9a6gK6zXKNUxztBbZ40L3mlJ2+/xk1qF4eqCpzdD1RpFld+5NhIYqlk8eqbT1Saftyla6ShGaApageJBO6zOtKIpagC1K29uQOM514mv28TUpUJQWwmKlFYZDyP3d+843d1Te+PTl88YZfEh0FEC3+/CjjbGMh3mfe25805bJ66b2I/CsBMEdiZhLmyb+MF8CBKIqhVrpfEvxbh72yxlPzi01kgp7mzNGaPNPrTL76bkQilyCPVe1nOvCnitlbhtaK0ZwiD/tlb0rkSXnHFO48NAA8xusRCPe+FwOHA8numvirmWqtbeulA2jPBclenUlmWdjbB8e1fcNqmFpO9M195ppbBxpVSP8vciGpREVxbVDesi99jgA4MxzMZx3DxLHchq4tl/plHwITDqGdcGUbB8Y10i2nVu64Y2Fd0a707vWdKVL18esa4wH8Tn7owj58rD2aGUxbsTqBWtAl9//Z6YjvQOxnZO8zvBGiZAbxjlGQdNdgrtPOOo0Cjef3UCZFCNW0Gbjm2BLa6s8UYIwqm9Xldarxjb+Pz4TCxJNgo6Yw2UDbw5iI0hV4xR+NBJpRPTTYJ64xFjDbfbxhiC2CicIWdDzjdZlFTZ4mlk86RtISc4zJ7gFUo5jFHEojifHjge7rEerLXMx5kQpIAixog1M7flM1o7zuezEG982OkCe0ioB9AbXSWsHylZY52mtMj18Vk4p6kT64IfAs4arteNd/eewzjy8DDz6dMTyhgJtMZIGB3OnWi5cz7eS3OXcczzxOVyofTI3f3Al6eVMBqG45HrRZ7n63Ijbol5NqIu9kZcP/Fw/06U0vZCTJrDeMdteeHh+MA0TuRYGCbDtoDuhsmfGQPkvDIEKPmFwRkyFWdlc3e+O/H8OfL+/QMpXFGqE4aB55cX6mo5381sqaDZMySxM4wzMd/IceX5lpjtiYYguFqPrPnC3fkBoy3XLeMMbNtVqmSz4nScqeXGMAiX17v6ljlwxvFwvsebkePxyMvlCacnHu7fYUxhuTSOR4/VsqlUyqIzuCFxUZbz8Q4fGmWY2UKn1f+PpP5/+dUdkzlxup+ofaX3o9SfboX3X514vFzIWfBMMXUOs+d8f4dzjvGQGIYHKpkwaQ5HzXJdWLfI+d3E01NFmwy2oZShbI2iFL1AKopPXy5olzHasSyCaKndypqpQ22ZHBvaguW1iizTgWVdyTnjh3EfrDrWKoyRobS2jEKhtaO2TBi8cNaSeIH0rii9KineWbZY0VpUROuVwIN1wfodGaPkc6TYKLkyBE/uC1oHbov0wddcxQStGj7I8BRTpGm56TvnUF3UxnXLoqTuA4r3nlc8kPNGglM20DXiYXLSwjE93MlpKTe0H/ZhR1qhStmLESq0JoYCY/1eIyd1kZLIz6AcpSvyJp+7N01Fvl6tFQq9q4mdlCO1CpnBeU9e5YXX6eQUsc699XDLhrihtECkY1xoiN9QOo1F4epdobRCKVm1ayOHFuUMxkGrClUN3gZRJ93uj82V3rN8bXofipvYJqy1ovb2TnAebYVVV4ooEqDIMe+JyIKbxLNKF3Us7fYFqyyFhtm7moVdZzA2iO8sF1K6IkxPQZf1XqRVpDvUziF+DS8Z7f6JXmW9t4k1OhW9K+yticUFDX0/jMWYdjVZVvqt7b43v+OYvJXFkVI4r3b0kWCTassSeutdVsBK1NWUs1Q07sGAWjO15l2h0fI6apVcC0uM5Bwxzop/EhlISymUdMNoLX5o6+iq0Ep5C4+5AKVbWvO7kppoRIwdQHvC4NFKlKeYK9db3A8HohIqBa1ulF6wOuA1oDu9r6ScMMEQnOauH/hQBkb12jTTZW1oPXcP7zmd7+RnFTyPP37P9XLhm29+SwiBlAV5V0ombitaKYYwodTr9axIKctwaPQe7BEoP13Uz1JEadVafjevB6UYpTJ09n4f3uXg33ujJGHRDqPkAkopaMTeknKitsYhBKxxtN7frBe9FVorTONA8H7nJCra3iJI75yOB3xw8mNo4i8uWYgPh+mA9zJQi29IXj85RbQVprZSmpw3brcb1g0oOs5arLO0LYFqb7af3uVQF8KRXOSe0XSW1qeqyHHh6eUL3ZyZT1/z8vwjqi8MZuCYJ5SXmsz+vmHsQK6VphZ6caxLxGuL0Y7LduV4nmjrwt38FanO3LkRqy2H2XF5udGbxugzIXjmozQZpXZFEXg4zSgCvWuMVvSuuW1P5KVgXMcbRY6PPD498fT0UVb6PXB5+bkB6+Uxsi4FE5Skxatcu85bcq4s25PUoRaxYuSa8FNFdU8v0E3eG4wUpSjm6Yy3j1wvC8NguL48MY0Tvcm1Y4xjiZHWLK1WrDO4vVxCqU7wnpQq63rlfHeS6zhHnD7g9QFvTrSmaaVJ4KfL4VhykRWlpK6z98bgD5R8w1qDVo5apEJ1UJlSJLnemmwVlZG8gTEOaz2X24IbBlrzpJ7wKpBTg1JJW2dZImEYAItRA7UYBusZDwPv3r0ToP6UOcxeLAm6YRxM05Hj0YNu+GCZwoFPP1W8K3g/kvKG9yO5rDw+XlHK0VnIMbA6uT+03nl+eaQWT7zJ1kGZjSEMQMYb0JOj1E53iuAGgp9Y44pRhsPscHZgngfSlljWTVbrCtb8zPP1SisOBdyd70FptpQxrqKd2Ncuz0/cn35JyhpcEU95czycFI1MaVdQG8YGtPaM425nM0CxjF6IBOu2QlUcxxM1VnpUhOPI3fmOjz99z+U58XB/ZrneUDpizYGgM616StlwbqDVTFxXlLLclqf/Z1Pl/wtvf3RQHQbP+BuDUpY1LWg949yZMC3k1JiVIqXIsr1Qe+Hp8owJA7W+gM6Mw4G0XRnnkS1uXJeVmDpx9dyuG81sXFZZdaS96UZ1SdttKZHXF8bhSMqdXDdCkgd1U51c8567kDUZgHMBoxCcjHMY3fDeCfM0RZwXv5q1WkIsumMw0JW0WJUNa+S0cbssjMewDzFR/JsOUUyNomTp8NVahgVt2G8OhlwSxshToKNw1ok3rYnR/lUpqy3J0Ncr1mppsyh9D+cozK4utl6pOWO82xXDBl0eiiktkgT1lnVJTNNMayvaFOLeCmKd5+gDt1vHe0spGdU9GGG6ad2kotV0jNPECEpV6IlaKqUiCdouaq7VnRhvGAK1JWpLeHugVYN1lkEN1MJu2LcobUg5v1XpaWStbbRiXVa0M7uiI33b27pK45APEs7DYC27/SPsD3NFqYBVNCBXIQTorkhrxlrhwoIoOrU2rJbfjULJQ5UKqu++zV2l1gbvPc6LGd9asbDIg7sxOId3IzQpodBaY73HGidhqi3vLyuHMcgAawzGTOKrrLCtSRTvjiSZMT+vd5F0qLGi2OWYdktpp/WCQzzEAnWXz9/Z6RVNwd7Q5rwSDJyWjnZJolaMlRaUtK/VlRhQpXe+S0AC2JPemZTFk+29hEAsFu9f7SQFVKGpTCuZTsVazxrra5QIrFTGGu3oraK9/Dx7LyhVMSai1YrxTsohiiPlRO8G42SYrk3CjSlv6KYIIVC6UCdSL1jVsNpBk+vWOo0zMmwcYuArBkKF1pIcPl3Ae2Ga3t2/w/hAKQlTK7ebqL/n80m8s62K6ryt3G7PaK3wPvyekVaRU6SUxDiEtwOZ/AwbJe8EkuDRWr2RGlorrOsmSmIIYk9pDevkQCIHQmEx1ipNfFprWq1s2wq8IvdEnVb7OqYWsVCM04AymlqLvAp224DWCmcDr0QG8TbL8JyTHAgDUsdY94G718p6u0nYSFtRmEpFK4PVhkrat1l7+415Dcw1aB2jIYQTeT/woA1Fa1SpArb3FmU8jQEf3nO9fSTGlcNhQq0dn06oA1zGC1vcOB4mjIHjFJjCRCMzeIv1Dtct0+RAn1DnB27LhlKGMN2xLBfG8YAPI94atGmMBMYwMfqRVsWe4pzi+flK7ZbYKl4ptk1sXXXZ6FsnxUpKK/GlA2JhiC8by2UljIIm815Uu2TEf5+L1HK6pkAZvHXkZIi1chgmtusig+JOzShbQVeLbY6gvTQY1cZonRwaW377+dNh8oHxLrDcojSTm8ytVfwpMDjHaT6IfUoNpOy4XTeMj9SyopVluWxoKxuClBPVefH8K9mu2d2eYnSQA7sZCIMmPSuUdvJnTrOlC1obTsf3GKOJOeL9RCmanCKH40iKlZKu1EdLrY1lWUUccJYiKytybjw9L4QBbrcVbeQ+cDydiXljHAIKwbh5N1ByZxgnpsGwLJnj8UwtnfuHswgm1pGrZYs3wPFwPhJzptQFWuX56ZG7uzvOQ+N2fSLYE6OZ5Fqcj7zcPmGNJcZCyUKSGacDpcH18cJhOtIouHDAegP6ilYaFyyDt4yj5nKVEghUYn434t2EUQljEoP2+7Z1I4QT0zjQVOHpKfP1L+5ZrgWjHePRcbtd0HScHxhCAJUw2nO9LJyOwtvNsdCmxtPjo9ATtOb5+YUhjFwuLyy3C6eTZhje8bxcqZ9/h2qGaQps20b+OVn7V/72RwfV7y4fOd+d+Pzxyu32TE4joz/hx0SJmsfnR1IqrNttXwVVfvj8jA+ytrDa83z5wjyfqVURl4WaHZ+fPrPlF2oXuH9vfb9pQ/CO6+WG6hobPDFHSuvUslH6hrWGaRpJpTHPJ/LOdUwxy7pPabQ2hCBeGq21oIeawnq3Kw99h+KLoiWGd/HOeaelts5KPWOtdVc75dSizc9rPWcFyWPtaze0wgZDKYpSotS5NQjBiB81eHKBkhPCERWkhFZS5ZdWsT644PB+wJgGvRLCQM4JZw0pLTi/46VyorUsgZS9T73kKitGIJUCykiLiLGUBM41DrPBaM8wOS7XF+KWMM7jiqaWzOgHconEeMXuwSYZqsWIndu+5jWNlFeCl4G918Lt9sLxfGC5bXt9nsEqS+4dZwxGSbjEWUtJde9ybqIJGi0YL93loV4Fh9Z6pnYhBpRU9mCMIaUiXkCt6U26lb0JtGbp1VHbRmtltyRotNKkkjjsgZeU5eRL0+ItpO/4F6lq1CRq7YxhIMaMq5J81rvNoNWKUTJwdm329XmmK40Ld1ibKbni3Cg+1t6hG2q5AlVWsN6TkvRnv75pY8h1R3jtHD6tZRgtJckGooBRMlRbI9dS7nn33MlKWGHYtpsMfvQ9gV1xVkuVn/Z7Aliq/UII5CwBGWMNOWW887LKKxmvA0YbUsy7HaMIENtLf3ZK4vvNSVp1vAto1ZmGkRBGbsuF1jLzLA0w3h3wTqFNweiBbdUsiyK3ZyorjUCqogYWCuEgSe6cL+TthtIOdMchr9mWwTklXeh+YI4Tx2VEW01qkZwb59MDIcxcLjeGSQn8u9U9ANWIceN0mpnnWUgJe6orpcgWrxzG0+6jFp+pqNqR1jLeH4Vx+doeta/w6cLQFEyXHIharbtqLaSPUsrbIaEWsdq80gxKLdIdjiHntLfZSA2iKN57FW4XW4AxGuu8DJq76kuTJprr9cI0jQyjDLFGS2SvlMyyLAxDQCuxirSmMEZJMUIUpUjtDFqtNeMw0dseJOxdfPnWSDq9NUqr8vrj9f8bXRlahaIa6ExvldP5AeOCUA78xNH9ivX2zG2rKN3QOPT3mnaXqGfDkgd0chhtedkSxlkSjo8/XUkx8fHL72QLXh23uMn9zTi2eCMMA/N8JOUi1iw/8PDQGEJCA8Enpinw8tJBDXjt2a431ijoq3xTOHVAOxm041bfugRaWgm2ozPQLcOo8KrTKgTtUCrRayNusj0zVtOa4hAGgrWUpqA2rNd0lSnxBd0Hzoc7vAuEw0hMi6DbdpsRamQaD3SVgc7xeI9+6KzrhRQLh9AZ5xOH8YFO5fH5M61AM2LNaL3QKAx+FMTgfi/WWhHjSi3ybBMrVhRlX1dCkM1XSo2WDVWLZ1PuyZp5lpKJ2sXS9frM1NpzvV5RvdHqKmn6rkhZ5gitFKfTkVoyWmnMdWEYHVpZzJq5XCPv333N5XpF4dmWRGkJHww5ZrGp5USKBT/tA7Ud2OKNb7//jnHSPF9vPF8jL4ul1YJ3A9YaUctvT9xvE58/f+Tu9Cs+nA48X268V3BbNrTqWJtw9sCyXrluC629sKwvnO9mpmmmds8YJtZ8IefGfD4S8414+cTLJXJ3ekdOlVxvhPsjw3iPtoXPH6/cn++p/cZtWYmpMhwULy+VX/3yPVE/kcsKVILztCwhbq0MyyrzzOAnwjBSaiImx+FgeH5+Yj7OtF643l4YRr0ru18I+Z7SHrkuLyhdsXoCrNRK+/+r2rl/+m9/dFD99PiRZW18eXrm+rwR0wvD+D1bLAzuxBofuV0KHYNxnU6ltRsueFCiHF6vEfu4ortwT52LrLGgjHQJG+P3TnBJMdLkNH+YRrCay8sLuWaR/FWXFVYfaV2jLFLHaiQ9mZMk34ZhYF1XWfk70EYS+tY6atl7i50SX2LOskqwYlQP44DSCT9Iqk7bilMOEAap2tVAwVsoWtVMoyPvJ6xaGq05trVwOg+icmlwVtZ33it8kAG3tQO1VLZtw+lAzYUwB4yRlLszcjP2wcFapVqNwGEOxJgxvaGzYRg86y3j3ShBsG3DmC7/3yHGhHXtjSM7HyeUshhrMNowBEOtlWEaWdcbzndBPmVPq1ZWgApqr2xbg26le9ho5vFMqz8PyzV3elH0WpnHQEV6jTWICtWUfF25U1uhIV3x1jpSriilRbErid4r3p3oiP9Pq0ApEcquvmbptC+t4J3HWSvtHVajyBgrw0frjWEYd/Zcw3lRHY+HEylmcpMwmtGCXYpxw1nPGCTQs6yRWireWWpJ1ApKWXmY1yovo95wwaLcq5KeiWWl1A5asW7PAHh7wAdLbeIVLVUe/DFFXpWZTqXUjPN+pwGoHQNU5PrTGmscCgnisQeMcpbVudKOskZK0pSS8EHW8trKgbCXjNdOAmxGfrd6tyJoLQcJOfAhgQgKvYsVoLdC3RmAwXtZZdbENAxYnXBOM48HQhCfdO8dpSsoGXpbk3Wb0YZxOuKcZxw9W1xZnqSdqplAI9J6RnmLblJv2LWhtkJlLzwnQeu0bMlkNApaIN0q7/XA1/rI2LWEOa3n/ftf8fVXv+Xbb7/n48cn7u6/xlhDKxljHOu2kMrGNB/xPuyoNkG/reuN1hLDOAjVYm+xqlWGW61gCJ6+/5nVZn9tLxiDBFxg94jLvaAWuW6FktF2H6kk/nPODINcD6/DrTRciS1DlD855NVWoSsajXVd5J4GEnKk4hCPe9w2rrfb3oK0e4AxcoBJWQZXe6B32V4oNKorUsmkXKQVR2upat1LC6BR9lWn0hqDwRopAWld72l8OYDm0t7u+1UVeu1QCnhL60hIs2ZsCByO73n6/IkYM0MI9NgZnu4pqvE0di6lU1Oh7E2GMXXWRawF2wZUDSpSm9gy6JHabvTnC+XbH8V61A3OB3549OhmOE7n3fvdKcVgnAQf5R6S6TRuNwU7b7X1QGnPb4PqMMyArN61kcPIsG9MtFU423YLh0crI3aZXPFG0dLGoBXajBhjiXmhFwWtMo2jMDhTwSkrQn5v9ASxFs6zR5u9KnSNOKuZxyP+HLgtN7EJKM3tdoXi9vKTBVCoBvM4QoXDPPP0/AKt0lsmpUKrYacEyDVmjWdZF5yTmtB0awzjhNIJVKVVh7MTWls52J1mYm5saQXV0OpA7YmcCtBI6xNWT/RmxNNP4bZciamicHjXeXwuDOHEdYncbgs5fua63KjlRq2ybZWwKChdKUmIBPnjF9Iq1/n1emXZLujnFdXuyHWjPcomlG73pijHy/aZx9uBUhufHv+CH4NhPMysfaWT6KVwf37Hc/rM48tnrO9sUQSyj48feffwAdCkvhDThS2v6PXEdz/8RDcr83ziL777brcBFS5r43z6JcOo+d13P7GunfFg+PH7L2jTuDsfyZvm5aVwuxWc2+koRQNpvzYVWntyfq2F7sRcOMzzHvKWauXr+iiq63Khts50COT/g7l3a5Lkyq70vnM/7hGRl7oA6CEpkiPJTKb//3M0ppHZNNkgUFWZGRHufu562CcT/dRGXWgz8QKDoVCZGenhvs/aa30rNaq9o1RlDE1td/7lL//K4+NXwRv+D/L6m4Pq9aWwqSupHjAkfXy/33l9qyxR/BNNDeKykNPOmH67nGT15W0k7wlFo9Qd6wLBeKQE2ko/ueoc2441Cm0CNVdicAS/cE+VII5mrBeZv/fKy8sVTeDIhZwLhtnWgXApexevqA+K4E/sx5Vtz5y0pVbBPhmjQQvORoJSHWc13nlqkb50qx29SfAkxoCznuPoM6lsBEeh2lzfW6xqDCzedcE+LAlnH7HWk8qd7TaIy8B6hXWVdESOu2Fww3uI4ZkQB/fbgfWZnB3OiyVhUEj5RuuFnNUMXWScHzx/joTg+P79DW0d93vD2Ip3hlIE0SMdyQqnAzUrFJWWNVSHswarMpezZ/QDaxWX80I+CuDYdkGgbEdhXdwMIO0Yjdz4SsM68R3GsHDsO944Hp4ubPvO/b4xhoRKnJGU/5ESzkOvfb5/nZGFrKA75JQwzmF0Q+EZTTOoGNVwLrI3sUKg8gygCR6tj0xvVep3tcN48/EQ1crAZKUKA7bQR0JrsVtobxlDUEopH6gha9VWBb4cw4nWZireWrw9Cw6sd8YQDqKgmAyt36Xf3VryaOQmG4OciyTa28axV5a+ssaVrVwBSVkepZD7Qa+F1gT4rY2mK4Wda0GtRWkDaKmKdcTraYDvtDYYWFzwwuCbqW7nV0GX1V3WsFlUQfEhD+mv7okYFsCJ2j0OrBuMLpQD78XicT4vMBytZZ4+rdRSuN0Lxi7ENUyFr5Dyzn3bOJ9XQgj0pii18fp2g2GIy8p+7NyT4MlqNSgdBZOjJmtVA10sH87bCfmXB3brld4zKEsd8FBWflFPRGdpQ4DlP//09zw//cxoiu/ffqCU4XS+CDFiiGq6H3dS3ricLyhtxBqhpMp02+6MATEuaGOoXRSsWjI53QX3Y7yw4YbElEop5GMjLsusNO6MaeOoRa5Tt66i0LaGMooxBOkzmqzhUWr2fkuCvpRCqxW9RPGbD8E/DWXoE0nXutB0tdIzoDWopbDvB0abSQIRfzbMLUkVK5U1UkvdW/+giuUkwa4lnsQmUtt83+Q6eydO9A7KzGKB0TFd6lyd1ZQ2KG1gmISSZumtY4YQNQyaph1Ki7qrJ3Gi5ErwXv5MM9ikUKfGN38I8WNIk1EbMLpnDI2LZ0bTIm6MgnWFVgd97OLF1Rk3A0RtZO5HppXMUeT32HqCLl5ro+TacM6Kp7gWbPAyKA7x+L+/jF0wStqFjNXUItYhayxtFJZxprZOjGdSknKKt9dXgjIYHzBRo4ynlIZC+NbOOWh8sKetNXKPPXZ5fqnOcb/TOyzLyn7fCN7x9HhGG4dqhZYrP77duG9XUq700QjR0rLGWUNPWooMWqUmUQujcTT9Sh6DXjWtVJyxRO/JR6KXQlc7ziuCN2z3QoiBsJ45Dmn709qjmkfRMDSxPphOCCta7WL3w6GMHLhr6cToAQkcltQYq6GUxO32O+fzA/u+se9Jyh3amDmPgzorSEOQsGTtnXuqbLeD3He0tvgQebvdMKqyLLKVPZ09b9c7OR84f6LVROmdsKyUtnH9PfM8EtckgoLB0PB8//Y79/uGdo1SG8Z47tfC778fGANPTyu5HuAa5vtf+P79B60nzufBy/cXXLDk0risdx4frqR65fXHld9eXojR8fpjwzn4l18D//w//a+8Xt94ffnBeTWM+EBrUnFb6+C33195eHwm9xvffvwLD0089EoPXt92lBGi0rbfuJyfefkx7YLWk/sd3Sw+aFK5ooaik0j5juKP6uX/3q+/Oahud4W1hX2v+CBq5J6kkzfXTO+aGB3BO0EmGUeMgT4qSg0okYe14Lyi2YZ14lVbF4UynWOHGFacFr9fSeKLCyEIY81nhjIMItYFnI0y9PSDsErFWTqKhDdKwpjC+SxJ16fHTxzHjcfLRW4W7oa1ms9fBKlzu+04Z9HG0lolRod06A+AOwAAIABJREFUvf8QJUFpxrCcTw/kkmbCuuJ9IB3SMIQe1JqpBmrutL5j7YI2mWUNXE4rp5MnLp02HK2e6erO29uOC5q4dB4eNC6c2bbOt78MfKzkIkrX/X7HBTPRJDBGmdiSjLOeoaWUIOdXnI9cHgbKHAR/YT8Sp4vh+jooRVZwazhTUufeBBdl7UzEd4PVgVYzozdq8gQbqFk8ixJas6jusM5x1IKxYsy3KtBrwwZLbwO6PNjD4im5sd128ekYS0kFVBUCQW0Ee8EMYcRZNMZLJ7bzHq/FR2o0HNug7oXlJAENreC0BIxeJNVaD2qRoJl3C22eqt9ZmhLocNQyyAlMtFiNPLyKKFy9d5SVh0z0kePY6X2SGlqRZL4z9OEFKWWkoEHpjlKOPmDbC7W/kxXA2YjzhpwzLizksqN0pehB6YmqO0dPWCxd/RGmalrS/bkVsaGYQZwKRmvibz3qQRsFj6OrJl5oE2jVUnJmiSvGerQxMDTtyAzVReVUFVTCBY1zEENAMyipSsjI6I9gRO+iZBuj0U4LOcEJWs65zpdPCw+XT6yr535L/P79xstr5vZ6fNR5ajtYVs/Xr2cZaKrm5cftI0xUWiIl4R3LOnRQ8/igIbRWyTnNVbSdfm6FUpbeoeUGyvJJfeKxn/hkHzh1qVz+6cuf+OVP/0gMJ5S2vL58577feHx4JITwMYyOMdi2K71XCXYosZ4YbWTIO2Tl6px4n8dUQHNK7MeV6CNKzYafyXmV9XnDe/GuymWhPkJWY7TJfZ1FCkrRaqXkJIdK7+U+M4p8P10sS60lrHlChkwhI0ipgwTfnF+m6isreqUVJWVyOrBGbvnv1bgDqcktucw/q6dCOwfRAaVkqXE14k1utc2g1KzY7R0128H0exuZllAq833qM8j1HubrfUBruKAYyH9vvaG7Rc2v+86XNdPbXmrBdM3lRyMFGVbHGBhjhA6hmF/bMAQ8CUNLjeyAPi4SWLOz9WzM0N873m8eWqwaMxdQxWJVdxqD/WiknIhd7pvCVnawyuf29e3OaQlMYII0DpqF03mhliwZhNbxwdPXwfXtDU4nlnXB2gXvHSklki6sSyDninGa48jY1Yu/2k6iS234GDC6UQ8h0dQdetbkqth04vqyY7xm1EEqmVaE2w0dg8fqhV4qoxuG0tzzDTciqkWcBad20IOmNTk14UXXircWTaP2hJB2BYOlWkBb4Ut7t4hXuzm8UrTRsD5IgK8ZrPG0nkGd5JrUjf040EUBVe5zJlDboEhIglISHfFCl5JRxkxriyUuYinJueGDIk0ajfaK63awxIWhNXBmPxqjW3JRqK6pJTDoHHujD/l8p7zjraW3wOvbIczZCsEH7vuvbPcbY2jevr1itcG5whjw9vaKUoZ0NEGzmURv8izpvfHj269yWW6bKNbHwvdv/xdtiL1wP+58+77jnGZPgyMVwnrmdn/j8WGl3RPb0VnXhV4rOb/SuuVerijbedn+DewjwTqOVCbVSLGEDZD3svaOtprbfScsg33fiRFyygQfsGaRath++/86X/7/9vqbg+q+J2IcKLwoNANqG4TIXG95rJbWoOAjvXdqy3gvEn4tO0/PJ/ZjwxhRuAQR5bBGY1eH1QsmGKw1vL3suEvE+4VWO2FVXK8Ha/zEvheicbjziRoNy2JoHaLzWFfwfhDDI4/PFmUaoxg6nqcneEoPWP/A69sLp7Ol1oMvX34iHY3WN06n5aPaseSNh8tXarHcbh3rHKBIhzw8es+gO2NUeViiOZ8D17rhtaKPhPdIalKfcAvo0AnuTG8LfVh+++3Ax4XPnx1LfGLfb4SY2G5XHj/FCdh2DAqDRIjniR5q/PKnz7OwIOAXgft6J6fw9WSJYaHXM7lmYoRz1Pz22w/G0ARnaE2T9l2g+jO8MQrU2WizuIXRAmXTOHUhp0xLAz0GTil6LVhVWRfLaAsxLhh1cIqGkjSjG6yJ0BTpduCVm2q1IWpLLZVzFNakMyvdHLNXOYCXIgXvPU4HrBm0nglnw8ZO9BZj18nV7GLJUAarMypA7xGlHOjKcWwAKIQ/6Kyl14qznhhXrB54ayjes23b9HoaWUcqiD5MNJmE70QxknBI7VXg7MhBwXtP7XKjFe+iEA+8O6Gr+Ke1jaAcQzfaKCjjsUECLEfJePuHH8gEjWWRFakfjKHRdiqp1khIhTa9gIButKHJR4LRBDumGr1nWp3+wCYHxtoTcVmI8QFrRGXWKHKqtKGxztJGoxRJz2ttCG76Z1WakOkFpztaSf94CAvRrdiHB6wLoL+R84l9T9zuO4v2xOBROEbrGNt4el5pVXGkjeNo5FwZI4NyonaUPlftnlalnW2owtCKoRylVVG1uuFIha/qgX/yv3BRUcpCbOTT88/86Zd/IsYLKWe8VdzuN/qoPDydsc7Q+sDMVP39/oYxlhhW8X7WClaU1iPdOa0L3kkifoxOH5r92DiOndN6QRn9V/5U+WwONaTeFE3rdfqp53p4XjtS9y0HppIrx3ETBJqRDVKnYZST1XjeUUN8qEqLAqtmc1irhdElkGKd2BBkcJyD4Sj4IED5jihQfYiCOUb7IIv0iTwT/+mg1DqDo9Kc1bugv1BCUZm76I9BVcJ94gmXf3/3kIt6P7rQJ/QHw1kCosZ0sYnU8T43ShDVyWr7dpsrSxf4uRlM13wPaVrHBu8FGdJyNJF+ZtpP0Bgjg5LU7fZpQQl4KwN3750+2mwEHHQr/lLnH+i9YC+P+LJhrSHXg9oEG/Q+qOaqKdcdSfVJhaq1hh83TfSBy+kkSLIsHtVeBbIeQmQMOThLWHeyUlcnhQBBfk8Bef9qyizOo/pAI13wp1OgpcYaF0Yf1CyB3HYU0qzJPPsLdhhKTdTUCUEkgt4a9WhEc6ZbTS4dYxSmiQd68SvVeIaqjCbV5L036ZgZjsWtDD/QymGVIboAteGMFNdIcNZjjOK9jlcbhzWeXDJ5dLz3+OCpLbOEE9t2xbsioUsmvzVJ4MpoD1qKAvQ8FCn0VL3vKESFHTSiFxV7vxdGt9Q8KKnRs9yHaSttOJQSlGKuUnOuRqNFxShCv9F6gJpbA1VIk6Nq9CpB6SpFQwxNSpUxjCToU6d1NUNyhtE11jkB9tcbGsN+CHptVHnOWB0xQ5GPnUblz3/+r/xuz/zy02cezg+spxvffvxgdPj0aUhJji5s9xv7PaPHQa2/cToHciv0uvL83BnDUW+Nx4fAtr9yu27ou6X3RG3z4MJBqgNrVwT58T/G628OqkpV0lE+OnBba/z88zO32w0Y6N6p+WBUK2uH4yAfhV4bdRS8sdSq6TWjjSWXuRKrFRUj1hhKPui6k/Mh6WADxlZCXFBj4eEi5QCri/jFoszGtml69WjdMFqSj8ZoTsuJXgvrorhuFesttR+0Dif/xNNjIISVZX2QGrOxcpRfOZ08NVsG4Fzly5e/47dfE8YevLy8SQ1nFzh/aYkQxdOZy52fvvwdwRuMaeQCxmouD2dut4Ou5fRy+y7DSlgSOTdqWait8W9/uRH9yu2WccudeGqko2NtkD9vF2ox1KwoWQoExFpgxWiuPKp3SrKiQtWBGY7eCs5D2RrBBp7Pl7nCHHSlKGiCDUDFIHglhiHdGqezQw9F60z1DR5PZ4yRVjJjHFqLUik1eJ6gG0ZXLpeFUmZZQcv04aZwMWZqHpiBaWlaGaA8Rz7QQ2GsY4xKOe4yQGlH7xJw64sE5PIh3kA9Kqez57RGardYp7m+NY7cwWhCPDEaok6rmbxfNd4pvB8oKqdzxNqF61WGQFESK7UW1tMCXZQu8W4a7veN1jJhEYRQq5PjaAYhOo4kq89l9aAMCotzA6z8/D4uXLcrvUtblzWWhwfhX5a/arhpvaMQf6QLQTA3vUqDlo2SvFVafL9IyluriNKV1hNhuYhamaW8otRK74WmJuYqJVr3yLavsG8bRluMlsBgaxN0PTytJYZ+5fPTZ7xf+fb7K7lUmir0zaJ/3fj07Ci9onUgt4iNK26J5Npp40pHo4xDmYJ3QjjwfuF+zYwRqG0HJf5WpRu1iY3IugEqUZvCGicVvaPNkA6MUdE0vppH/pmfOY8FZwNxPaMJnJZP8/2S+tQ+BttxI0TP5fIIM9WPsaRjZ9tueC8Wn9bfB+HOtl3JZefz+gVj/YeftJXGvt+BIfcrpRD7poSRtv2G0hrvl4/GrndPda5JNgbWMuYDQc2Gq5IT5/P5w37Q59ah5URNO34OojIwM5Vr8VYzGs5aUVQ/HjSKVrP8Nycov9H7RyaglkxrhWVZJYQkzKqpJtaJfxMjpnB7O6qJ37vUKn5hJcHU943HQK5PCa/KIC0H/S5IrAmk10bLGhyp5mYikd4DZcIZNkLjCMu0NkC0nl+6xeH5EQ+5z7yXWMyvO2CGKRHfeB/MjgYZmPq0OKj3imH5nMqQq9F0tGHmKILQFHiSbV8rrEuntg79zwA8Pv2jtAQiyD6tB70flJrJ1fDylrld3wjeyjCEwoxB/biWxaZknaJ3S6sJF+IHus9ay2id4RdGm6Feoxlz4EF32bxM5KPWUHOmWS+foeFZrCbawBgKjZ1hzrsUgbhAShVv5SBi18/UUWltEE4P5CIIK20N27FhmsaaBacsLTWM6zjETnfb3ljOJ45jp45BmG2OylhSn0N0qYyWgNMcnBdaC0S3UqMi5RvRL/RuoE9VHo2xYsl799QLr7szShJazuRbp9KoqpOz1MR2ZyVYjJoHRjPbJpHPtFFQkHurUqiuaKrhlNyr+wx+gWMgDZRr9JNW0tCsjKEIXg6eY3RoETWEXmKCF6VaO1SrBKNJ6cppuVBrJbed4AJGRQwab7tsydpBqwf/9pd/5eYPzhfP/Z5IR+b4uwd6t/z8p2euLz9IR6fXTTB9Cn778Z3T3Mqui3BqP386k1OVzU+vXM4XUrphfODYG6M7aoYYHv5fjJT/Ma+/zVH9WWrAlC4YA9pEnp4C6X5H206wmlJkyOzNYFQjlUyvoGddmwy4MNrgfn/jdHmALitNhqeVzLCW0u6C5TEG3RPeq9lZrbC6Ey+BNgq5HOz3xGgG7ZOsQZqid0VKhX5UGCu3t40yOvebxlnFtr8QoxcKQAtseWONC9ZG3t4y3j4irvgbR67c9zvWwLG/opXw9epkS0rdo+J0euByfqKUXfihVW7oA433F0YrfP+tsW+V/di4XBbut4w2ge//Vvnx/UqMTVTQH4kYAiUPlkUziqdlTWuDvdxpveOXyNuPY55kFT4EWhPjfYiGkizHNWMseOcYZRCXhuly4zVa/I9PF/FCpqMw+iB6WXf0qkl3AeEr1Uh1Z10dy/pAq43oFaM5+pDyABsaZgwWc6L2g+gjVncZalOjdz1xJkY+5GoQQiQXCZF4l8ilsbDI8KZlnZt7QqksD48uiowPnrjIcG2NQevAcl55eDzz/VvG6oVl2Sn9hdrBqBPGiecu5Yb3EiLw3tCHWBz2lFiNmOVrreLVtZHWC+sp0rMgWVI+GL2znoIwgc9Bygl6Zt8S58uZZXH8eF1IOXF5WDA2kFLDB81987RuKXmwHVe89figqG0Ql4DWjT7+WLOoOSDUVimm4V2U5L4y9GHkYeIi2oD1slKkGToF6JjQ0EOjEJaiaxI2EWVOql1L6aQkVbelJEJQDNUZdcy+eeEpKqPY687L7UoMT+x1CCO2ZdSheNszL/fE+Xxi9Cyq8tCgX8l7oo5Cao3aA9iBclCaIm2H2CvUQFswThrjShG8XOuikkgrl0brM9DpJKAINaPBV/PM/+z/E+ceMVh++vIPjOb4y19+5XKp08LTJ60gU8rO6bwQgjxUxhzKtv1GLZnHh6e5Nm9ipayN47hjjPBT3/m1CoStmja8c+LpHZ13GkAumXTcWJb1Dy/pZHbllEh5J0YZllurWO0+PMK9V3yIH4GL99V4yQe17MTlCW2cHGhkNpRrpWRhaTo/sVNjqqmQ0iaWAeumgit2gfrOc61Zwmhq/m8KRhfvNqPNtX//aILTyKDU6vvn8Y+Qj1ZSUKGQ+7nSGm2kylX+esF6eSdNatKqowW31hW6I9xWrWaewAqtwssmJr1Xy1pNbJBPEh7TzIwdQj8QAsM8ILzjuJDfH+/hXa2m5/sdxePmPwdDzYFeiwIN4K0o1SdzAuTQwTcZVNfTRd6jPghxTPV4TB97obfCw9MT1qhp5WqUmrndZRNpjWDSxshYGzF2oJsQUkaH6/WNJQS5J6XM4+MDygy0tYK6Kh3nPFpplhhl2xYsQyty7qjhWbyVYNhotArRrhgl39++ZUA84L0p1nAClfn27RvGL1yWx0lvqHSrSe1gCXJAu6xSPd2r8I1Va+R9o+bMvme8D7Kd8h7TFc4aTFhIh8fZs6z3c2XxAa8dJx9xqjKapeZE07Li10ZRc0JpTbALpcozbF1Xfnz/xhjI4XFI6DTlO713wmzfs8YwjHyerAkMk8WqpeT3a4zCTGRcqwV0wdjwQbMYXQoyrHJoZJgtVeqLW60ceyYupw+ko6XKcVFJoUZwpynQZQwKbT2GhdYT3lYYlZwHyjmMbZjxhW5eUYhV7n67kQ8r25DW+cuvwoB9+VGxThNC5PW+sywLt2vieoe2XLndDafYOMUHfv2XjSV+ZT1pvnz1/Pi+8fDpAYbh5eXKEhYalbTn/yez5H/o628OqqclEr0XTE30yIl58PRosH6h5z59WbDvG0t0OFfpCI7lOA6BVKvB4+VCcBplNEWLNN8V9KEwqmHNHCSCJPhrTjg/SIcBq7nnb1IflqEVjfeQjgTtidJeCYujJI1SnttbElk7dkpe0WhSurPfKvfbxro+Upp4XU/nheNoOHPjfj9I+cCHRC4JrzSX04nXlzvnyzM5Z7b7hvOay/mENobv31+pNdNHIrgT+ajzpmBIe6M3hTGOoC8cb4pRNSp2KJ7T+iStLw1KspStYnXkeux4u0BvWD04PwbiEqmtY/SF++3Otm1YPKNZenYYv6DMYN8STWmGqyxugSps0dvrLmvGLhDx3gfYKp4tJdxNXEQRxfPZdtbFs54cSwzc7m9c4hlFxMzAWWOXBiV9wrkzqMrwU03Rg1QKLnreu8Rbl7SwsRZj/WRIniSQMTJKafJRWZYHUIlcpPlnSofy8xsjDWB0Xq6vXLdvbPeENycGB0MlepthF1sJwVFaoTZma5g0UFmrSeUgX0VxVcrIqfxohOj5/uNGL0i97WhzVdjx0dCuCZ8r6zK4PJ7xwTMUH4D8bc8MKq1BG4HeRCGvNRNcQHWHY3DkO295ELyDbj4+d4bAUAZlRLGS5jYouVBrmf8UO0VpZdZBbuz7jgsGZQZ+caCaNKdpxVCy3XDWYrSEDgdgTcQG8WcDlF6pdErt6FGxWtGV47fXVxhXSclSsCPM9puD377/4O12UMqLrHSV1G+iO1oNjgrbUfntmxN+bZVQmzPgXZlQeoe2nV4zvTtAfHXvnx9Uo5YdpSrGeJSaSqr6iVPzeL/w9fPf8+XTP/Bf/o//wr5txDjB/DVjtCOlg+O4c75cpIVrSIJ29MZ2f6P3RvALfVYcSzNR5jjuOOvxfpXVcu9il8iJUg5CCJOLPJXIIYB8aMQYp7onpAJJ9e/0njGz3rhP9bLVRs3SjW6tn4P0mCv2LkEeq4UGMQdwlGZ05uq6TMX0r27rSlFLnfQBhZ3Vm2p+n60JxeEdWfa+Gn9/lZKoLcGIEiwcotL2IVgqGB9oLPTEbyHhL+hYqyVgNQw0abFqqk3fuBAgWu9YPaaPD3rTHz+rcJgVdBhKYSaX+XZ943xeccrwlAIvPs/3VwbF3juqyxAqhSyAkS0RekwP6RALhZIaX8F8mTmQd3rrc9YX+43Sel4bRSxgozPUX/GPR5u4MLGGiBVCf/hmtbE4+zDLRWTQD7bSm2wx5PKRQpo+hFFbS6VVOcxcr5XX1yQkgpI5SpM2wu3AOY0xHr3dUF2LrSQEUq44ryexw6CxGKMF/O9E2R1YSpnnKA1m1uKWlFlWx+V0Yb/vnLWsw0qqKAzenFCTWvJ8+SoHwVpEAbaN+/WNWjPU9+tYMIEDQ8CzeE/ooLWljEzt4IbhEhbK7cpiI9drJXpBseXSscZwtIrTWqq+MbRSGFVKXFJKpH3HOjsHSk04ywHHKNlWDSW/Kx8iw8j13JqQLmIIeOs/lP9SEktYGVWxRPcR9LOIWl2HHDKUbuLvdIN0vBHjMtnIk6Xt5LBjNajRWIIlHQkfViHY9C73YgfewmiN0Qyp3GeAVGx7yljaUCid0NbL57cV7rcdYzW5yKBbcqeNyuI+YfTOfu+0Y0c/GNIbPD08c1pWfs2ZP/+3H/xv//vfYYxiCUOqdmPk1z8f//5J8j/49TcH1deXnbh4SnpPikpzymkVs/7b/SYDmzaMWgirZ1081jmu1ztutaIknhc+fVq53wSlkJNi32Vgul0LVne8v0wPa6b1OFs1MunoJLXhQkcrQ3CB4hQ+aLx9ZrsNfHyUFWHJxLAwVOYUV/zaOC0LOSXoJzob7bDkodiOwZF28i6qjdaZl5cr3q30rFlPD1zWwH/6+cJvv/3O8+cv/Pm//ZlgHdaL1eHl9RvGZnKuLIvH4BjKMoqSJp8eiW4Ql4g1kR8/vmGG5dNDJOdBLprBIfyyHkkpsbiIsoqSOpfzicvDwvPTWdKMolFQS+PP//IXemm87Y2TtyzWE5cTV/Mb3q2UQ9pijJG0bjkG9/vG49MjDCVM0yhe4D4SS1zx7pEvn3/m7fobL2+/o8bKuipAWmeUhofLhbBU8dKVhtWB4B55uDzy/eVXcm6gPNhOd5JyBwlG0aS33RiDstCrkVpYEm1UuTGMhu5GVqJKWK2NQ5LIVTrHj5Roo5BKw3rBHt0n53FdLMYA44axCu0GFy+Q5nRUtF1QRXiQIqIIXmd0RVFQW6ExbwC1wUxPex9pdbAfnZzEqO+9wduG0QcoGSTbBKULLN1iTKV1eZBrLc1UvRlaFfB6b1CLprU/EpZKG0bLjK7ROIyS77W1Ri8CVc9HohrN2MD7zhhvqBEY9cRoUQIgtdPKxDwNhSWiMXQt9gaawWj5vUooTOPQKG1kSzFkwBeIuxwwlBJETS1NQlYKMDNEpoTXa23HOZkMSpUQwn3LvLwe8n5qi7UeZxTGJJQaOHsCGrlWchHUnbFKGq2GKKlaD5hK6s/2mX/Wv3DqgSWe+OnLP/Hp6U/ULEnqdV1ZV8EwyeZXuupLPQj+K+9tT1Zrcsns+1WCfNZTJ5vZeU/JB+nYWZYVZ71chxOnV8oBo8tAjJ5AfUnN13rgvSUuJ8YMZiklqmHvmRg9xgb63I7U3qk5Tdi32J1Gn+tDpT44tdYaXIh/2Azm+r4VORxZe8LYqQpOEXe0CqoT14jSdq7IG3RRhWEQgpPVMpJF0HNYlbBWlYOtEjaqQq5nGa7yR35BW+HajiYDoazRpQXNWo1S8jVpcjgwWtH6PIyq99X7EKpAl3yANICJ2iXKpvxM4hkf6KL4xTzg3MGrSx8DrlJ2BswGXSu5d45Z6dzfh+0hau88CEv4THBaeii66nOlrubgPeXmASU3/PwdfXxuGXjvYFjxIysRdkTxHvNQ0iZiUTzJxgScU4y+zOH/vfr43c8rDNI+Kt49iMDTyyxy0JRiQQWOVFEzkNnaTjkS6xp5+fZKXAzLIrSN1jrL4lCjcz5fYO3C6DUWFWQl3kciRE+roJUwmVsTS8Ro4K0jt0oMK7lUSjpYnEWPweIl9GkwqK5pNVPyIMaFUg5Glp97aM1eDrEc2CFD+ViJZiWMwOrPcs0+WQmLjk4vG04ZtBW1WA1NXBZG6dSjsLiFYDzGCqGh944PbqLIZFUfgqPT2FslOC+BKytFGtvohCi842Mr+KDFgzomxtEaTJABWTlPc5Y931jUQp3IPecG0ryYSOUKfcV7hxqCIVOjc17PtLFhlfi/l2ClcKKDtYqn5wWlYLvulJHJqaOMfE/nyyJr/1xZV0/eC0prTidPY+e2X9EqYJVBq4p3Z1AFZzRGW/btzuojb2+vjJ55fS3kVPiv/+e/4t1KCIocFT4Z7rf07xwj/+Nff3NQHd3TayDlO7FZjqOgdRdVcyRoitvbhnOBXjU0i9OGkhTePBFXxRLPaA37PZH2xuXyhNca1Q+M7gTb8Sbw9PBAOsQOsOU3UIZRNT13Go0QPCUpRtM4sxDtYKgFd5FgUG8n7u3G4zlyHIqwKB6fL6Rjp9aO1Za4SIWZMoO8K4IJjJI5naVJ6aenr+RSZPWmLMFe0N3x5dNXAJYQ+PLpCznLifIUMi4UkhLWXc2i1i3uAbtUGAqrraS1a+H54cToC8F3ogl0P8QioCp307GfNAyPj4O3txunNfJweuDkTtyvr8TguVzO3Fvi0+UrPW9EPWjmFesWluWR6AtWR17axnI68/AoPqTz8szb7VXwL2LLo6aIDWJsD35hDEMuO6kctCbruyMVjv2OdR3v1fTmCgUhKFGYat8o3eECNBS3+8b9eCXXzvXtOpPaMuS01ufvMknPd7qB7rQuzS9Dd9oQLFMdYw6JgZYLrXRQhRgNe/I8xMKyngAoVfzPUvQqKJWhFaVWSUj2LtgXVfFeo61U32qtGF0M8Eob+UA7NQdzUcNKLSgkQd3qQBnE91kU93QX5QYnjFd5jKKsotcObLOKN6Im+B0ETq/NKr7SVmR9P19a9+nRk/5zRSd6R8qNOqyEEHoTRIlSUvnYjahlrZAPAW0756gl462sLDWLrIJ14agHIxu80wz9xwN4DMUpnDmfPvP6+huDROtV1MShcDYyxuDH/ivWX9B2wYcmK7kioSilxdfXZ4sbvdKNpNxFcZPBpg3ISTyNfaRZrvHO02xYLFo3SklYA9YEvvDMJ3Xhy3jm1BYeHp755ad/ZgnPaGW53b9x5IPHp59EsesCDq+1sW1vOOf5YsAcAAAgAElEQVQ4rQ98+FO1Zt/u1JrEo6k0vVWGUrQ2JP06OutymQ1l781SnZSkX3yJcg320VEISL/1Cex3fpKCJdDzHoo8xQVnPe+podYGtRUUlRC9HBZmQl6h6FXoFtYorJXw55iwb4Z8n32ID1ppO4N+CoaUj7RW0PYsCmZrH0NwrYIrstahtZo4K1EvR5PQqHOitI6pAkqkflBKgnFIink2hTHGvB7lz9oZIrEY1LsVYSgwEjQqVTYRczM/7RYymEpITE9vokLACIpBZ1kjzjpqKdR74stU8V+cWJdgYK14Ow3mI9w1ELyWbHgabbwTVZgMVf2hKOs5JL+r6GqixZy22GHRKEz/I3Aypj9WWgWl8RDe1TIr1ceIhxOYvli53nuTWuD3nxUqSlkGUvxhlSGEE8Y8InWl8nVqaaISK6R1TylhDc9hH/UsIk5PKCtWhdteKCmRm2Mtch2PPj6uWWPFryp2t0TrmWVZJOijQBmDaRBixJaO1VIWknPhsp4RnKPntCxYo/j+7TqpObNcREuobQ1nQnSk4yZKe6ps+U7ZD/QM/kKj9c79dsMEjVNaKpirDHU9SxZGK0UvndPpQm+NNtGT2nS0knIVNeQaX1bLdhyMbmlNSAxrFLTYp0+fuN7eZG6ho7t8H6fTmTZ2lGqU3jAmcrpc6C8Qlsi2S3hXkGUK5+RwYHSEXvn8+Sz3uey5XCLLaeV23zBK7ineiwLrHFhbWdfIqx0Y+wy643zg2Hd+vGzQLZrA82MEVfn9txceHk7UlljWhdt15/wQabWR8ivOG4yr5KMyjGXrb1gVcV5ELL84rtedY9v58uUJu0POGzn98Uz67/36m4PqerpQi6Blgj1xSwfGHtA8vXZiWMTA35v0o++K0gttGNZlxSFhkNv1DbB4fWEUR/CRYgZjJM7LQgiRYBdc1KRc6O2NMnZijNSjEewJOyzOeFkTBcPiNaMuDC/eO2ueWPzC09PCj+8G56Tz9vtWWLzmfHKEKDzDPW2EJ09wD2iX6KrRq9RNvl1fqSURQ+S4J7qv1F4ppeK0ZZSB1wHvHRZFKm8opxnZYNqgtYPLo2fgsHbgrLRs7KVwXqRPWhvF0Q5OiyaGZ0rbWZ48y+r58fqCGoFPT0og9tqy7RsvL684q3h720gpY6zh4Xzi6TnwdoOOI3pQeuHYKi40jKsch+ApnDd8+vzIb99+AyUcy9b0hP57jpR4e3vh376lGYCSwaft0lCjhibVjDZ39KiE2KWSLidKvtHZ6KNxu0lV7rYfUve2WnKtGAf7/Yo3K6VlKg6jLHXs2OFpY8gDfSi00XPdW/EuiFcRMF5CB8pYgnM8PFlAwPtKJRgGpR2aijaO/Siy9i9prvUCxyHgbq+DoLS6sG9RCqM8vRdGttSZHO5KoZUjlcRAwllGS6LTGukZFy6rUABk9adY7BmtOx0J/ORyF4M90iRVa6LcNUp3rBko/4eiWlue6o5hUKhFKn6DDSxBPH8xSlOOsRpvBUtlredIb9R6oAw4Ozgtgbg8sYSV7VpxQWpHx15wp5WaO6k0FINWDsB8rE5DDB/rb+sGtSVZK3dN8CsoTWkFh5RCuOAwFo4tsx9y/dIF4SaDmihsRlusMeSaSHnDOflcja7RStb64l2UMNVA1N+vnPnP5hee1IXRFZfLZ/7uT/8La3ySAyadbb9izeD8cOYd36TNtBq1g/N6IoR1ejQHbQwhRPRK9CekmlcCI7VVWkms0bMswhAVD6YcXnI+ME5U2DFX6YyJbxqd4Fdhqw4AsbCMkjGqCh0BPZP1MuD0Ju+PdWH6UtvH8DZaAeTaV2YqphPH1HuVUGofDCXwfEZHzcR72jdKOlCXZ/oYwobmr8ND8llu/d3P+R5QkRY4H2TdOHpnNHEKj97oLePsVCeBUYVGIKNdwagh7X3qfYDVdAW6S1GAGpWh7Me27t3DS5XGMxkM5ZOltJQP6CHKtXMSkNTW8fb6itoLX1hwFZLK/PCZodr0uQ+8VQzTJ/JMasFr/wMz1vq7VsvHYFrHexBr2j3mL2OMIWULvaL1Hw9za4x4HYdkMkQNl5Wyf2/5eWfcGo3qHbSEfzCKMWRo0dqKmmktoxf5exDUVysZba3wepVCjUpv+uOeOUbHu5Xh5P9ZT8/0kSk1Td+xEqTiDJn2VlFmoY0MupIqmGHZk7ChUR2rFbU3UnqhFQmaiupdwID1MkaEEKT8YSrLWs/BXVt8WNDa82AMYwogzjoMmrwnnFE0I4eoYz/wY0Ubx5Y2YlwIXvypq49s1ztee4xW3PeNdX1g2zY5LDnF7bajVMTESE073Q5UcSzrmdv2whrPBOdopVKzwnvF4gOfnp7xURPsEM9pOTCpsTw+ou2Jl5fvEhBcdkoRPOYpnjFGc3leuKUb0VhyubOeFqI+4aMhtxuPDw+c4pnRCuHkiGfDc10wAxgLShuOdCVGNUPGCesV52XhSHe+PEfaY2e0ytdPD/z48cI//MPPfN3v3L5vrHHw8q9v/Pz173n8+cS27azuzF5+UEsUyofTNA6xD7TGreyUPlAYah8o27hub/Q3Q0p3Tqfnf+cY+R//+puDqgwhncfnQM1lrnAMp+WR47DEaHE2UkojekfORThzMdBypiAnUovldDrNhhCwDKK1gMI4Q4wrJckJqJnK8+MztSbhxA2L1QIY9zP0M3rDKEsInjbmjVk5gpfu9J++fqI1UWwvywNKG9bFoFTHuYDDY88G6gXtM2/bC+jIYGdxK/gFZQXF0nIWMPviaErN1O0jIw9U10TzNIuJBDYf4wVUo+Uuq6E2FVofpudp8HD+zGg/oHXSJoPy6bRM1MuBdys+hqnSVa63N0qrdBz7cZWQQC6MkVjWE7korIPt2GStpDPWOXJ9xZqFgaUNTYwrD4+RVA7uW8Wvkl6vo0h1a2zctxsxRHpvlFGxKKT7ZWG7XdnSgdEetw8+f/6M9RbtMindoTmOfEWZRlwEVI2NYHf8Ysg54lwk9202iWWULtDk5zRmYNRCq5pWu6zrjOK277ReOJ/OjHaiZLlppyw90QLpj6RDktrOGgkL7Z0+oBmpMaVLxWxtEhqig7Ki2pRaUFr60GmdoSFXqVV0XviDxothXhnwRh4W3nkwTMauZT+KYI9aQiuLNRbtnFTgOiUKlTXEJbBvea6RzVRS5FVrQxupE0Y1NA5rpE7TSDAYFx2lSXBudEM+LM5ZWg+UKtW62/2OOhmgUfLOdttYemDPifsxMHpj3zfa/Dxp5VAMtvsbb28vLKvHBy0lB7Pk4r5f5T32J2ruHMcrrU+MkA0YI/ibMT+noysZOrSiV1jCSdSVAqp38deqTskVZwLSSiybiCPLQeNP7jNfzGd+tp94YMEow2l95uuXfyT4izQsmUFtjdYOnp5XLufzbDuStrGcD1CNdTlh9DvwXgones04I2q3KG7igWs90/JBdB7v/YTrj48/Iw/kgNJ2wudFJZMA0sDZII1a83dbhyC3rFWTHiBeUaWALr5WUe4cM4EjvsveKeWgtUpYTtLg0wWXg1KTg3qA0SjtUUqu4z6rhVtJOCPNO20OZ2p6MsfIRCd2DLGojNmQJevp4MQn2psczhiDNhq9N5yWhHSusv1QNBRWHoq9EK0Mb3mMmfLXM61e0VRAQiFGawln6TmpjoozwnhFi+L+bkVQSmEAqzVtyECxXi5c315ZxuBzXChYlDK8mF3OCGPMytrp6Z3WBq0M2rz/3cJzfefq6iHr+vcA1vsULRFFoGtpH/srRbW3Tq07zi1z2K8o5OdrtU5rRUdpUZeN1bSp/GoE29SVwlpNt0EG0YmPkh9DPMW91o/302jD+4BttNht3jmu4h8GhpMNk4z8jBEZiFWp1M4l+untnf7e1mgtz22THLpaa2A8vSVy8+Sc2PM+vbGafZtkjb5jtMwJ277hneY4EtZYOVx4J3YZr6i5UVXi+dPTZJbKcH29b5Kej+H/pu69emTbriy9b/ltwqU55hqS5VqCIOmlXwTo//8EAd1drarqYhWvOS4zw2yzrB7mzrz1RHQJIqAOggCJe+KecDtirjHH+AZ3uiNFz93hyLdvXylrYd/tqNlKYC0rfPOc5ws5Z3qTUAmshvU8gYU8KVZdmC5XtLNoBjzwdPuFmCr7cYfOijJnnm9XTscDYxix48hQILtAIrD/fsDrwGX+wmtbXFA3piXyw+M9P39ecMMOrXpqpyjOsRs9TTuaUgRr2D96Yl4JKuBHzTx/hRq4v3/H+ZLQZqKzI6nB+/c9ujnJO+SEdw989/7I7ujpe8Nf/f7v+PTpX/mP/3uguW/Y9oHj/pHf/9VH/u//+kfJMiTL8/nM3bsDtRlieiF0dxgaxiWsjZRUcX3H/Tjw9O2F0rxkRvT/KKv/kgjOU2IjrReCN3T9CYVH60hDESM456lFc7r3pEWg/Nb0HMYdyzJvbRqeEjPWWDoX6F3HtM4o3Qg+UOJCK4XOOfrDnufnZ8EpWEhpou8tznmmm6Brejey32tyHnl5WcBcMabQ+z3OWy5nOaWFfUCrwBKvWNuoKRFcR64rOV+J6yIFBURynmkFCkkuOixpW/M3tdAwaBOZlxvedpR0Yb87ysncKEEj0Xj69sR+f9yGU9CtSmNQFV/XPMsQg9LM84vYKcxCrDfu7nYc9nvmOInVonmcF5yVMR2hnwBHznBbFmIpxLjSMcpaUUe0acK7HeDhsaOWhlYdxig8BrtoxnGPth3ztLLEM/04YoMkxo1RaHaiNLe0KZMZbSy3eUWrgo2KlD/Rdx7tFuIkQ5vtAuREQROjIkcJxLQS6cNJViPaoZslKo3WFa08FIvdavqUlhOd0z05ZRoC0ZcualFmSk08P6eNcWqxJqB0pZXGNEeMraRVfgy8F2ZfznkbnDxae0pN9CGQqwya/TiwzBIAKa3gt7DHvC6i9pbM6zhZakXbREG8e9ZJYMFYR6OyxlnwVsqiKnjb4z3yQ4jB2Q67l0rRnKv4L7db6HpJghsAI4cglfChFyWnFFqWL+mYVkqR5qCUJb2dsvzYGrVjXQ1fv32m1iRq9dlv8G5PrTcUgnnJiOk/pa1f3WTmqZKiQRtPraJeK6WI8QpaVmedH8l5IZXKfLtu3fHSirLEhGqG3c4DlaEPjH3Puiykmhn6IJbFVikaOu9RvPoRDcErDvmOv9G/5114lOGpNoZwh7N7QBLYqIrRlnWJtJYYxwHrurc1aq2Qc8QaTR8GWQFXqevNa6TmKAqQseKB3JLtOW1VpsFtqfb2tgIXD1yid/s3LidKUZtYY2ot23AvwQht9NtBVVtHxfLWBKsUeVNotdJoEzYbhhzWci4s87SpZZIPKBuVoG2c05RWguslNPPbv3jbJkhrkHAcy+aPlOdBTWi9sXq1kkOfFv9rbXItGB3QVdBxla3GurbNzlMlANVEIc2IVUHVTFWZmNuWE2yi0CHPq5BBCVu7NWhmQ0q1Qm7rNsRLZbXe/lM0NCUBU2NEPVZKMQw9tMblct5+awIfksaYyku3WRuMWJxyFt9oq5FGIzaFsY2c8vb61LcAl1MarapYKZraVNV/8z5uVIrffi8bJTeMrtvQqLagnNqU8/KmklfUxgDd8gxWrhmz+ea11vIdY+RzYjaOs1JWNjjbfY3e0HvWgmLbwsj7W0re8Ftme/03PNjr0O0N2raNXyt+eK3Ejynicd1KIH4bfJVWpCLD6zIv5BRp9XWTlN6oC0o3QlOMXUAxi8YeM61maknymJXGdoahD3JYL41lifR9h/MdTYCrqOLph0oXOubphtaa83PC2YV3x4+kPPPjh+9IaWU/7qlVcTwOfPnyBe97Xr6+4IY9p/3I52/PzOeJH747EZeRh32AumKbIsWF4DV6KYRgOPQj9/2enz594Xg3cnroIXvW2RF6T6xXvqmOuRU+3u3oXcT2O1StVJO4TgsfP/5PeBP5+vUTvu8xbqVNir1/xLgLT083SnQY3XPYP2LcwtOXK31/JyxktXI8PLDEG6qO2KEy9JYffzSklNEE/u4/vOfnz4ld/47j4QOH3ZH/5X8+8fOv/4WHOvLl6xlc4nKB7959z+058t0PD6zpRVBdvWe/O1HKgvU9rXU8Pc2U9vTfPUj+pW9/dlB9d7+jZcM8Z2wQlqHSPTmv7HbCD4tzwqjCOOz4/sM956cbujb6bsf+sOOsxQ/UWuFw3OHshkoySlifCTobMLuCVoI2qbVyHEYe7/Ys04QmYLaT/WDYTryVwzEQF0UtCmUN1gTxH5rMbhjRuhHcHoXGL9KulXMh58iyLDQ900rjuN+RE6yLtDOs6UznRGFyTjx9RkdOh3tu08K7h5HzU2Q49Bz2A8/nM8Y50lPF+47npyfa4Di/SEOHC4n51jgcDjQK63KVEEJNuOBRSrrhS1koZaA2x8+/fMG6SvAnqp5QrtENCVTB6J5OSQd31TN+UDQV2e+N1OqZwPU6cziO9IOmVcs47nl6+kzOCaMtUhOaaEpWzbfbRRhzWr44Wh1YS6RV4f1N8yxcvpQxVsnhJa3Mc5QqRO1Q6oZxhVwz6wq5SsKxrZbaMq3Pm1PPYqrCGk3O8kPhnXwR15Jx3mCNpRUBYxcFymhKXQAhG3RBfKcxVbRyJFVpZFG+UiUuE7p1WKM39E2SNZSRmlxrO7JWoAwgPwhqa8NKGyLGuZ60JmKMjOPwhrKhNXItsrapgLKy4lUKrWSgdsajrbR/xaWJslbapoqI6masrFCt7THmt6R1q0Z8hbkCUuta60pD421CNTDKEVdNLk04mlVajUprtGpAWbpxxxolDa2tJiXNMm3NWTkxdCPeGS7XM84pWfM6hbH9FtLJMhA3QyzxjRfrzbY6f6M2SBAHIOcVhQCyc4oSxtAOFITOE3wTxcwamjEsSeO0wTphF1ptttWq56N94A/qA0d9IEV5Xofde85PmevLldOxoY2o5FabDfu0EPZH8eo1WaWXnFmXiVYbWstw+9sKP9JqwmhPbVYg6tuQmPMKqmFD/6Zitg3bFNeZnOI2BKi3lLek85ftg7GldLbBt5YqUH4tQ4N5HXCrVM2WvGKDpSGBrFYLBcW6Rmmjc3IgkwG1bn7PRs4CN3dW0ER1G3BpUEsmlRXnpao1vapzQCuVdV3lu1O5zUNcJBhWC+t6oZmItvu3gajWLOvrKmxerZug53jdKoltQ5VV0v2lIauVJvQ/lODvTKMoUYWNgrpNQxJszRhr0MbRtpdRbYEmgeiLJ1qrEaXkYHQ4HmnA9XxBa+iD4X0buelvrCZTLFgdMFYqf5sCpYz8ACqgycAoOC1Rlc3m8W00Gd6tkUNOE+aXUg1tfqtQdc7hvJHHpMDYjtrk+4Lt8/H637YF8tDS5OesFStGa5Qsn01lhFIi6q96O2DY1z9b6xZ+qrQceW3pqu3VviH+2Fd3gt5CZdYYjHGbl1Y8stZqnPVv1x9KWqiUtmit8L2X0g0UtUnYMg2V8/Vlu/aFLvF6HTS1iQo0+jHDVvigVaZujGLdKt7bbR6wzMuCdVVaqXQCVUlF0/REbZ6P333g6dsTrVb6vtDawGF3YolnrPE4JxubsT9hbeby8sDOOZ6fz+zGE/vTyH/6r//I1/MTH7870I2WwQ5YlZnON8Zdz7BzPJ9fCL5jDAOGwCkEuqpRL4nDrqeFnjUpjBt5f9cRjgc6lfFWkZWiLI1x33HTZ3q94+HOsDxHjHX4sCObGz4MnOcnYuxZ4ldefv7C34b/g3eHR762la67o7RvhDAQV+jDgXl5ohs81gTmyWH8TK1JruEG3u65v9uRFkMfDL//3e9RRfHxXeOPP/2RwSvevRv4Wl84jY/clkhrht14ZOiPvFwWdu/2GN1h/UqM9d87T/7Fbn92UP39hwfWpTHdILcLoes5vxSOdz1hMDgCDwcBat8d7+n7ER3h8TAKT1QVSlLUpgl9x343kmpmWRecNxzGgLNhAwkHrM7byQzuju/RFczBset3AhlnAfbsd3u+fv0Tw92JZY48fhjJuXK7RNCJruv4loQhV3Lb4LuSmnbBom0VFa1VnPUMvWNpEw8fT/zy6RsP3QnbQVoSQ7dnWa/cP7wnuB1af8R5w7/88ROn+wHvB9597Il54e6uo0TPGBzr+sRlmrl7COTc2O01/dAo1YFKtOqZ4g3nRozTrPOKdpHzdOW2ZJ7PLyg98fhYsd3Ey3lhdxo4Hva00lPajbBeuLs/YNQg6yagHx19b3l6unF3fGBZz9sQVbGuYV3P508XlnWmVifVerWQ1MLtGul6Q/CO8/NnchWe3DpF5uuNrtvKBAysMTH0e9KaqNmhgiFGqTRMVXizqSQMFe8aWhnSMmNMT8wNSiLVK1pZWpkI3pPXvPXXy7o55xnvjxIY8ML8bBXiWghBo410jdcmAZSYmqBCTMUq6Y+2AVFoiiiGykBVhZglPbtOMvwqpbndpMiibNDvEgsGwxC2RqLWaEoJEsoF+UHTFtU2wHrNLMv8ptqGwVKzrNKMEcB1KpmcK8E71ly5TS94v2JMgI2vfDlfKCWi1FatiWOaIrfrmb4z7PoBrx1pFe+Zc56cG847SgWNrGpvt4VSV/bjkUrjel3AVWpO4q8K0ixnxjuMUzQKXS9KZIoVhaM1UVFc11FbYl0WrPYEJ1WJWmf64PCuw1nPGm8YraXqU4niqnSjNYtVFoOl9wrf9cxpJdWI0oaUiyC+vIOmMdpxV+456AOgscZxd/89moF//vaf6fpuq1p+VYogrpOsq/0oilSrSOFBZF0naLw9p1oblcwaZ1JOEh7Bbh7CSslF1u2tYlwnJIZtAJDazW0gV/bNn6qQg1yuGWe2wgrZPUNt1JxZlxltGy4oFJvKth0Wa0uApVYFSPNPzVuAibop9kGG2+2xyGFtQRtN2JiWdbMMUIVxvawTQ79D1Pm0PVYlr0tasPYIWHKVVZ8ulZJEIWsO8acq8XcaY4TLrFaMybQmyX3hUG7NPSUTrCDSliSDqqzXRTEtOeJD2V5TJclrrbZWuMaw82IlS3IIqAgYHxpWK4ZTT2qwJLv1pgIoxt3I+fmJaVpxpqMPiru58tncMMrijabzlpQzc44Mu5HeKOYlo3rL5SrbrRQTu+OO1grTsq29tWz0ylaF6b0MqKr8dsB0Xmq2WyvbbKreCkNakzW93hAD9RVR1SQM1eoWcpOnCSiMFs7sq+3h9b5yYJDH9fq/2/aZaLXIkAsSHN4axcQqIIQIY8zbANz5DrNVFRut8c6iFFgnVgKL+IddcMzLQq5V2OlrIRvxs2olDW8SOTWEEIRzGgI2mM0WJIUtgsDSTNONFCNUERKUVixxobZEWiexkrUCVfFyPlOL4eXlhefzN5zp2e8HwRC2RMpi7xmGI0Pfc9iPzNPM7//wN6iaOb7/nmASn74+87/+b3/HEi9kG3h8uOf5+cK7uwd00ZRSSXmhYgluRxccu75jsIY0NQ7DwHF3ZJ6/8OlPnzgnCbk2U1EmkJaOsQ9UGj53HHdHYlm4fV2xdMRpprWOohXKZ9aXxGn3gfN6RamOGCu1zWhzQ3FiWn5mXd5Tk4emcKGypv+G73eo9js63zhvDYS7MbDzHwjWoJ3D+shu93u+fv6J1im+//gBrQ1GF3Z/GKB6jqff0wfPy9OEVZ674z3WwzwVHk6PCCbw/x+3Pzuo3h0OqINjnhqXeeFwGjnuLce7jjVPmFaAPUbt2Q0dy1I5DkfGcY/Roridhg7X9RuXUCogb9MVNHjlcE4Rk5xira7Sh64sx/1ewLdmIq0zurMs68z9/QeGbkdWe5acOd4fOB4Dv/z8RE6V0BvpN3/+inYnlnjB6MBtecGUyM4d6fodfhDe38vzwv2joebAMDgOdx/pxsCaC9P0wul0oJQHQrAsk+Ph4cRtOvPXf/eOWF/QJO7vvuN8vjD0kZbhb//67/jHf/hHHp3m8UPPH//pM+9+PHC9XmnVE4aKMwNzhhyb+ACZuH/Xsa6e63Thux8Dz18j4z7z7v07nJ54//Geu4eevAbOl8Ldu/c83v/I5TzThyNxbaASSlfu7xxDN8i6uWVu043DYUSpnpodt+mCUoaffv5XDI5hsNRcUSpRygrmiioWqy3BVtyxY7cbud2u4lHsxKdUW8JuakXVBm0KKsnHqiOAWbndIoM/cNhZanGc8xVsA6KkbKk412ONxXsjg05MLLOEDrRpm5ogbSvjrn9bYTrbb56sijVOmI80fBhEHTKNmjK1QVwEiF6bcHVfU/itFmpVNCcMX4V8gVvksZRWWVLcFCok3azYetPbhowRQH2KFe+C8P7mRMlK/IJ5QanGvMyUUlEtoGjk1FjWC86mt+tOtQxVSfK8JnJq5FihKKq2mH4gLgJkVkaS2imzddE7rtNKLQptjXA1i2C9gvV4A+ukOIwDThtKzLy7vxNbzXKjHzpKSUS7biqvWDG8t+SSmBGwu0ahjYD0nZHQ1HK7gWpoZ3E6sN8PeNsjdcoS1KIIU9O2gC0ZVzUKg7UebxzeSOPRoRzpS8faJF18d/qOu8N3/PzzL6xx4vHdA9YKh1RpCU2t6xWjNc73iMopCfWUErUmutAJjWBTOFupTPONaVmwbsAoGaRk7Q/zPAtmzLhNSaroJu93isvW0mZF3UQ+gzmnza8sg29t7VVXJedIjDNWBWwTBbypRqmNsqmiaLu15ggaTUoPKo2CUr2QB7ZiAaNkDZvygrEaawNsIaxX9NM0XUglooynwealbVu6XrrA1ZtCJ+q8eBUz/RAIQVGKvC7Cxxe+pxs8uwGuU4VFgdnS87WhWmV/CASnucwVaxtlWz2XIqULoZP6X72pj0opqcdtjb4X5icY3iL525CrqewGxzUqliTWA0XbgmCVu+OwHToX9v2ev+4fMd0qQ7x2aFOpqmKcxyghiQzaMq8L3V44lKbref+44+nlC2ldOZ12LHOUrIG2wnM1isM5YXYAACAASURBVGWJ5PybRzU4RWKr5bWGsh3Iy+bqqS1Jy9arL9dorK6b5aCRKLSNFyuqo6YU86akmje1c3ud4c2/qhC7QdtIKYAcZJq8Z1Inu33mWtvsA3JAsEZjnEc7g/Oevu8Yxg5rNb2V7zgQlmva7i9KszRYiTe3UKr4xLWW762+72EjHBhjWZa4fQaAWpk3dTYXIbKgOozpqS0Q1LhRFApHc2KeF1lV2yPogZgNy3qmXGeWpTIMiqZWbmvh169f0KrjOkWKcliT8dby5euFU6nEWEhl4cc//MBpb5mmiX3YU2uiVnDGCwkgjKxrwbSO8ehQXeXbeaYURUxVAnprIZYLuhvxrmM/DERdWY0D6zntb0zRMe4Glvkzl2tmf7+HWvnx9I6iG+PyO3b7I62M9Kby/vQALTKYA7fbzMPxe0r7grUjL5Ph15//mdE/0Pvvef++sk6RYfeBslSma9yKjIQvax1cX2Z2u47T8T2X67+wzo397sg47nh3uuMf4t8LnUTL+xDXmXHvmabLv2+a/Ave/uyg6jtZZ+a68u6wwwbN4/t7rFMSxmiStDba0QeD7+SJWi8rWF8MfX/CWLehgQw+eMy7e67TFecD55czqMrpOBDTwl7tmecZ6ypu7Hl+eeG8XBh3B24p8RA8v758Zl1uLOuKduCWTCo3difPvE7ophmOGW0mHsYDOWe63UDXnVBa0/eW2ixGw+4w8t0P98RlZZ4nxv1IzAnUQAkLSmcMDus0u93Ay+ULXRg53o2sqTHNmZfLN3zoCUNANQi24/sfP9CsprSFDx8fGPeOJZ45HAaU9lgd0OZIrsJbu3/X04+a20vPX/214vOXP/H+7p5+hGHYMf7NI74bGcaG6j3OZo53P0rScezRqrLbedbF8fXrM+Ouk2o874nTglJiS/B2x9DvyfnIPEXGnWVZCqWKmhqjwfuefle4vmSsaaAM4ziKKmmk2DolhZovouKVgm6Fx3cD2i7UMpAivFwu3B97Hv/mjtt5IYwnPn2LdLZyPHUsq2OZpTP+dDqg0bx7f0+MmZwjz8/PGGcw18r5esOaHmsDrwxLvaGavHc8vzyJSqCF+5rWirGNuArn0aiAVo24ygC2romaszQHbR69ipAB4rIQKRI9sEJGmKYFZ+RHJq6bZaM5WQGXTFwKVCtBM+2wGqZlhmYJvmOZJlSztOwoZWGZZ4zRONtjyG/IGrkoFcYFCUIozZRX7k8DzjiWeaEkgYRb4wldJz+WDUqMEhCpkhy1VokvepVVqVVgrMX1mhrl6h/7EW8tfSeDhSjihX4chTVrCkZbWlFgA4ewY1kuWwtOh0ZLExKatUZqi6iqGbsjfRBofc0NpeV9KjXLQVJZHI7BjJQGznUE52g1cWw7fuB7eh3wYeDu7gf6cCSXyvV6ptEYhnHztWW0gRQXlkUwOG9hqU2diqsgtqwL2xAgCCUZKmXYV9sPcquFtqmmpSSs85KmZmsaao0YF3KNjMNuC1LJoFqr2Ioa5d9A9+sb8zOlhVwzXu/k/WpSKlC2hqtSpT5UK0VqdSubaKxxIuWVYThswbtXD6pYSEqJOOcwzm8KrgwsrYqXc3fYYV14A9GDeEqNqYyjVNPW18F4A+TXWsBU8dIqg9pCVnXDoe12HV3QPF0mQKNeW7nUFqrSlXnVpBRQqmzKfIOaOe47hp3n6QW826JKSpTV1yanUq2s1ZVYBSrCH3UKVDPkIp5OkMGsNEXNhbtTwHeef/3Xwssl8VcfHvnbEdbhRm2WW70RhsDD6cDzpy/EVhiPJ6bZSCr8MjGOB47Hnn134DRcefd4z+dfP1GKqNjGBbRxVAauS4Rv8k7ve0hWo+0OFwJxrZQ0b89PVMPXYJ3WgsHKm8W1qQbak1vZCjCaUA50e1ONndW/3d9JKQZbGE8qPaHmTc19HY5roebXhjCLd6JuWmkJwRrBfmndpPq2KXKuxFV832mzARgjGxdaJa2FVsUCIQxcg9LCRbZOGMh1hTUW2iL3MzoxzQv1tT7XKJyCVGdUydSUhfphPev1JhB8GsUIjN+EgeA8YXgnfy/Q7IBthXGnJMCMYk3C4a2lcb1d6IcB1W7UJszRy+0XcrPcH0a+fH3m+8cdT7cbP3/5icPhyPc/fMf5fGOeIq4pprmQ0gT2wHI+s85NQl6PH3l8NxCnKD7tpFibiE5qO/zd3Z0Y7YC5Srh57PYMQfP444HPv0x0TjGvLxy6H0ktkeuCqwN33XdonSj9kWs3cX84Mux2vLxcef944ufPP/H8cub5W8C4iNGaMZx4mZ5Yl4zTPfOcefdBqoin6yrEnFo5v3yjxR2//+6BmGfimhjHHTFFQgjM6wvD0HG7PjHuwn//JPkXvv3ZQVV1BaMdab6hTCOrzFovfHu64YIhaIXzDudWbvOVUiPrmkl1xZcObQs5Fbzq0NrRdQPz9MJ+v6cLGpzCdpU0LzQtRQE5FzCNMGhepivrqgjdgPWBw/4jL9eJp/NPeGfpR82cvsF0Tzf0WF9ITaON4tSdQGmGcMBoy7JeGIaB88tFoMxKbZ3jitwUlylD0+T1gjKVNa7kUilVi5+wKBQX5nQh10qHIcdAqZlpfuK+7+j6Hcu8cJ7OdHvLEistat6/f8dtfmK3H0ThQtP1Dm9GYoWD17S6Z41PHA8jj3c9jkbXBZZZMy9Xxh2kfMObe5oq9KFnncTxGZeCdjPaOFA9xjicDbSySodwlPXIw90H1nXhdntmGEf67oTzA2uMfPryC8f7Hme+4+VlQpnC7CZJoFtDVY3LNDEMnaBbVhgPnq43LJOs8/qdpQsjSnVi+dCJ7z5o/s//+B5rCv/X3xueppWP7+457jxfXxZJ/qtMLgmrAssykbdwiw8NHzzTYrfOb1GLtLY4N5DShi5CY51G6bQFUBxKVYzSzLPwCG3oGLuOr98+CeJMOS5xwdlMUZJAtkE4uKBYl8g1J/zQQxKAP6kKfH/zpk7Xhevlyn6/J6+RdV7Z7RxFF1qpDP2BlDaAuNYSYlIySFkttYjWW5raSgq2m9MdGFHSQhhxRuNdw1uD1RaY8b7Huz3Oi6pyu02gNdoUrO2IEYxvOKeoqWxhi0xTlWHosNqjkTaXYALBOrQX+0JKSPisQWkzugnPUKofHdiAVU3wXG+MXEPnepb1ivcdRjtKmWV4i8K11UgntlIWrR1DMFjtuFwnjHI8qHtChWPdsWPE9yOn03u6cEetMlhO042+6xmGEahbOl1JE1SrhLBDqU3l1JKIX9ar+PmM3/q6pa0tR9keWG/Q1myrU0XVjZSFEtB146Zwbt67VoUfutVBylpV1rjCGBb7izV+a2wSP2wthRjnDYdmhC4EMrCmzLpOG2zebwqtqNmliD0h54jCvK32tdpUuLiQ8kLX7cXT2RqvKfBaE41CP3RoY0kpbc9DBhgfFMOwZ509eWvVqmlrYauRphZU7rFaBtFX7P1GFiXlyrq+IpskNKQUWNtQOnGbnPCQc94CTBmrKod9ACy1QDVifVBKFFdNxWhNQuw0G0pVnlctONOoeEqTnIME5mRAhkwfwDjN4XDP16+/8ssvZz7+bocdC/qjRs0Lx7s7vDb4UXjKw77x8P0jwe2Zp4UhiD0guAMf7g50wWGzVI2uUQgTxmtKhXd3PfwnuW7v94XbDL63KGuJFloRkoA1Bm2EP1tbeTu8lOzeDgk5Sy/9m+dXNPAtOKXfhluFrP9p4LY1vjES5Cu5ULK8lq2xVfDKTW9pfKWkkUu8rYqqmgg1LQEyWBuj6fsO6wzWanwwGCNUixgztQjvNtdCLXVTSmXDUjakVm1y8G9UNHWjR4BzhmHs8M5jlKH3wPD67dfYj4Z5XZjXRQKjWSwxa1okoFg9rWVs2GGMBV2QUl/xV0tznfh8u16utRIjbX+PVsjsoTMpRi7XTMXT7e/ww0jRFtcNaNMxLTM/fv/AedYYZdmbe24uYweLN4GaIrvjI9VMzN+uDHrg/uHEp0+f+fj4wOG4g0ukxX9iiQ67s/hxwGhHXDPgGf0OHTTfLt8oNWJUAG3Y7z3r0ghdR6tXnLoj2MTD6R3H/Uf+4Y//hS9fz7QFPn54JK8Kowbev1NcL1cRXKIIGt4rnNWcX76yH+5YlRY/f37hl+mKUpYvX258/PHIuO/4+uWZy/Ub/fD9v2+a/Ave/uygmmuiqoYNjVwjeY6gRBloyqGMQfKbjQKkUjbcemKZV7rRgtHQEsvywsfDD8Rr4pYuaGW4Xp9oKrM77rnOkd2+p7HSSiPWwm2a0BacO0g3N1I72fcd+8OINT1xjTQ0+9Oe5+dPnO4feX55pus1JTm0S8SNKn5dnnF9z/kc8d7iciPVG/O5cF1muhBo7UIwFqxwRFWniNNMWitKZYpqzPMzS3GsV9gdPd0QyGXl06dP24/WJK1DTWFco+SMd4Xd8EArVggExlBqxJkeFzpyhueXG/fHPfM0czo+opAh3ypoTDRVMViU0ZynJ6ptaLwoRnGmUrg+L8RFgkOCHJD+529PZ25DJJWZl5cX5qnhXOXrt28orfj29cqheMYBbteZ2iJxUWidpBFlSbSaSFFRsiKlxLjvRLWxAjm+Xp7Q7QOKlVobQ9BoNfL3/5TYHwyfn57F8D7suM4zKVZokmo9ny94k6Sysc4405FyxS2JZRYg9rpWcovSQa8CiojBktcqPjbliOuEahVne2o2lGQwVtZTa1xRyrDOcDwdOR4+sKwTX799QZuMDol1TnTek7PAsTutSGlriDKKzmsGPZBSIZElmFYVzjiyqYQQGMdAaYVmPUpX1vkZbzVWK5b5RtdZ9vuRtBhc0OKTXH9bIXad3fruFTVHOq9pLdJqxnv5HB7291A8WhUSNw77HcPOoe1KXDUxNrw3aCprSljrCd4xrQJ7vr8/YLQirYngO4IftuFMWrhUc0zzTeDsTQYm1aTO1NqA1kLDcMZjrARLSjE4HaDIWllg5Um4rnVA4TdV2RBbxFjhUQ7jwIO653ftPW5j22rvOB4+4P2JlKU6d1lWIHH/cKLre/FG0mhbqt9YK4xUpbaVu3xO5Z85lHa/rf03S4DgzIJ4TV9xlcCaFmnsse4tmPKKgsp5pWlQ2olivEHjS5EgCBqUkoCJJLUF9L+mWT4Xr9WoCFpNah0rxjkhAjTEg6plBZzzgjZCX3hDJTXItbLmlYpQHABazVvgTyper7cLezMyBDlsyECnhXTSMkb11CqPgbo1JqmGcYVucOQUaOW1AlaQPPL/KqkolBYMXduwXa1WnFVS0lFeK00lkFRbw3pRka/XQi4eU2Ro05sC7L2j6xwxO6ibCqs0qmlMg26nKbqBsmzABHk/q6xiu8EyzY0Qeg6HA7/8/ETw93x/ukPdZ3haMEXW5A93Oz7/+kTXW0InnmE/gO801683en/cmg1njg8jtUbMbMitiiWpNOw22AF8OI5M/kpE6rtn4G63k89Dazw9XfF9j6axRnD9iWm5ULIMhK0ITF4VhdJFEH55C+K1LApjaagmhJRWxWdqnGUIm8KqjaiNpZFapWUZKvPGdTV247uiaM29bQmUFNK/0SRKWZnnG9a4rZEOqYqur+Grbcug1m1IbdvfuV2TbSuO0GKlccaQogzka4E5STbAaY910nzpnai8D6cHqcAGTJXnHqMm56sUu5SOeX1CoclVb/+8kXOjtu1QaSqlZKbZ0PUH8ccqCZFpEqkmtM1c1oqxA76zJKv5cikYJd7MohrnqaLsHm0M1+uFlDUtKa63C1YZxpPl+TyRS6J3sGYo1RDnzK/TF5yS8OT1ZeIYjmjV+OlffuL5+YKyHb/7cKIVjVUDh71c2mhLU4HhaMhLlMeSJxSVP/3LV8bjjt3wSIya822mZM0Qem4XsU0KtrDyL//6K6jCw7sTwXe8PF358OE9376+cH75Rtc7puXK7ToRM0xTRz8MlPSZ4yHw7cvP/9558i92+/OKKo75NtH1I4qOZZk4HU8c9obzy1XQIQput4mh3xOC5rJ+w6AlBV81odNcL5lUZuYyszaFNXIKm/NMLgthONCSZUpV2KkopjUTS2V/tORoyLVyXX5ht7sntHd0vWWZmgCwdcR1jorn5XplWhPNFcoqFYyfPv/Kbt8zzRceHj6S28I6N0wr7I8j5+uMcYmUBnwYBLirZYW1rpklrux3A8uy4XbGnlIXrGuUVOnHXuoEs5zqnNfEvKKUEhh7WRj7PUb1pFbYjR1NNVwYUKpnmZ/QdsFiSbOok7SMUYFWbwSvScVifGNZzvR+T0kZFRrnlwvGdExTIrWZ20Wjm+br0xNGWe7vT4TOEdfKf/vj39P38jzm2xWlX3h6uhF6C03z9deVePxCzQmUJljPbm/JSUML0l5VPKlVuoOnc4Z5LgRn6DrL5VyJc8QHJ33UeeF83vHyTfF8+RnjK/3unmm6kmMjrQUNtOKoyI+uVQPoynWRCsJlvrHGTGpZSgqapuXCmiZ2fU9uRQaSMqHQlNToO4OqnuACxQl8vNVKrpVxHKEUdDOM/Z6cJXVfSUzX9ObvSVk6pmtcUcUTQgAS1in6MNJKwnjD/eGBZbkSfMd8K9w97DkcRm7zjZf1Kvch0NsdCs03Gg+P92ilecqLtBeZxqp+86h2A+hFUwp4DyFYYTq2gjfQhV7OfxTGwVEJONvjfYfzdatlFUW5xEQ2Mhg93u94OQ+0DIe9NPmYfaAVQ06i3Cwx0orCO4s1WvynzQh5AU3fD0Al5RmadJgbY1jjTC0G1KZqaIe2lliEFuFDwJitsUcZVEnksnLIe96rR+7akS5bChljHN7uMXrgFS/bWiOmmb7XHE+DHJJf61HL5m3XCmv9259vDTmkIs0ualOxlJJ++bil860JKOy2upb7przQqFI8gaxStRKFaonTFqyyb4Nja5ByknS+sYDZIPKibuWSNgyRQ2vh1coqdVNIjcI6jwyRr6qotAU1VejCiA89IP5gpRS5FXJNWOewtuN1hGQbxte4sKwru3qH2mqRlNJQhVO8rosoyC1s6t2r9qYIXSdowixlHCiNQJXEC21eAzoqbiv77Z4aabKqjpTzppTyNhD1vaMfAl++3sglY6tCZYcBjK6MO7+phg5l1OvLsA0+keYbcUloOqpme/2EqGBMpqBIRQo89rsj07Ty05/O9HeP7O4V2iuIidqgsz13u0Zne0qqxPhMa5GyKsq6Evq9VEGngukNNUPnDtRWycWy73rS+vntut0fH+lcIRWN6xpTmxh3HU41nDJ0rWG6TEmZr/ONnT9RFkhEQreT4VUpDCPoQkkJYxspVyQ/D8YWNF6Ei2qoLRHLipq3wJ6BYK18vvTGafVuG0Qb1rIF4BIKaUys1YA328FIIlGtiGKfayaXVQK3JULbUF7WbvaTItaY12bBf4MgS6mgtgkjZ4Xkx34jRIjPWooI2A4pzlmeniac91hnCU7wl6Fz9KonpVmsJ/kdSityquRUuF4nUXuzDPtrzKR1JhdDmhfZhvDKg5Yh2oeANpaSFctapDLVOILbrhXd8cuLPL7aEp+/vLDf78jPC/M8s9/vWPWNz59ngnfYy8Sffr2yGzt+/fqPIk54hdOWuK7Ub2eUrjyfn3HOcr584RejePf+jrhWTt2R1ASJF2PgNp/ZhRHnBkpNTNPE8/OEMu/wnUWpjDOF2zniTyulXIjzib7rmeMLtMq0XLkrR0y1DLZjPt8wzbOsjWIblMoyrxxOe+I0kefCqX/E+QM/Xf8HGVSNVfKB2XA1JmtSkhSh9QL1t7bHREWO0n5kTMfp+EgpmqfbFdNWtJM187QuLFmjYuH8MuM7xZrh+faC0R23W6ZH0XlHig5lJ7S1rLeVXAsxzeQyE8KAdz1/ev5nxuHAsDfc5onSNNfbla7r3tLn8wqhO5JrwvsDn798w3tHyxUdKsvSoDn2w46Xr4ZmHMOoeHm5oo2iJIVuRsIi08zpcMf96QNfvv4TVml0regMaU0chnuKyjSVSMuMNggLzU2UdsRay+32wuFwoNQVVQvr+kJOM8ZqPhx/x+V8Y7ldRKUL48bd0yhV5T56wJYe3wWm9YoyFu87ujiTJmg14Ycdt89nHg7fg6rcpivOdNggSBlUxYVKSYnDfsf+ZDgcCik2jseRuDZCb7hdFk53e56+PaM1aN3hzI7zZeF4F1jjxH6EZY4c9z13++9IecJ6TauiUNCks95ojTMD1mhSmbF0eBPo9orp0sSjGAq9H8nFUuPMGlcuty84t0dVQ9DQqmdZIiataCenZusLzgqmrNvv5PRcA8MY8FbUMWOUKIEKdr5grCOXZ5wuPJx6np8irTTu9nuM8nw6f8YFT/AKjGMcPOfLlZo6bO/pxoC3ikUrBq/YjQfiWDG24FDYBqPvKEbRTKDzI13XETrLhw93fP3yhPM3nLWM/YHn+tsP3v3dPSlKW1Qfug2gn9FoCQQpTV4r4/0OazTW9ttqeMNieU00kV0/YnTj6XqmtsSHh5HBd7Qim5HbsvWOZyn3qERyWkgxoZAAitYOq3doo5mWZ6wT3mQu4j0TT6pcb2hJPkutqyaVSowV3410gwzGtbS30oRj2fM+PbJXRxSK0hLWBtYVopJBXbsNw1QrKUdpgnoNS21p8lQy83LdVpqWV1h9a1ItmvJK6I4ypG1t8aVk+T6p6zacIkEwpbZ1qRQ3aG039fNVhY2ktOK9wxgjr0GVtXjJcfu7OgnItN/qOWNaSDnRu0EQfJtCS4OUV1JK9P3+DX8mNoONhbkpWGaDtr96BkrJlBwxWrzRr95UeZqVuJEOrB5E1dysDWyDc0wJazskv1a34VmsJFKi4bZBVgbUVxXYOo0PAnkHgzZyYKCJn7TrPUrZjZep3jyTWsE4Si3xPBdy1eQshRQNsF2jGwzrmqlbm15F1PpWK5XIGldSHN7ey1embKuZZhamqEll2NLlhtPdHb/8/DP/+J+f+EMZ2P1NoB4LL7cz0d0RhgPP0xXve4wxdO5Ijhd2XcCqhDad+I5T3tawDuuAlGg20pJ+u25NV/H2JOxkkxh7R22est6w3vLucSSmG3NOnPYW1Z553HWUupLKRb6rbzcWJozzxCj+/RgF/1dbwzqB8F9vC6GzqAYpL6w1ydpfFUw1aAzKN2qV8gJZv1tKWalFI/xkQQ8qbWSY1fLnnLGCB1MGmyO601trbsUY8W9LjaiwT18V/pgipaTtWs3iy14bKeVtqNVb5e9riUKjtoWcMtSKc5IxSCnJEGsNpjnBfgXhZSsUPjiCH1Gmomre7hPFd6vFZlGapdHTIbSf2gqlVGjisRKniMHYyrqs9P1IMKIolyzteSkXctMbsjKTWsecHMsiaKyvL5XzeaLWHh8kuGo0lNkQ84hynssl0QVF5y0RS4mFZemw3uH7xq+fnwi94XI5czsv+C6gzcrT18/ERfHwXiqBQ+eZp5X9rqPEG12/57v79zSEdz1PV378+AOn+wd++vlP1Bw5vHvA6g+UqGH1/PDhnjVOfPl2I3hFTC+UCMfdPU5nptvM2HU8PNzz6dNn7ocf/18Plv9f3/78oOoyXmtKXaEqpuUKRj6s2hhKzdymBYDSFiw9h/1HWgvEeEHrwrokrBbf3zxFfBcoJbKuUTANumOJhTV+FR/frZJCT6sB4yrfvl0ptXDs96A6geXbzG2SE940rzSluKor1+vCMOxoaNZ0I+aK0pFu6MjZE/zAPP/0/1D3Zs2VZFl63TqzT3cCEIjIzMrKqhZpFGn8/z9FJmuK3azqyiEmDPf6eEY+HEdkP5Ukim00XbN4QWYggDu477P3/taqjE4Fzp24vn5i6FtUaTF6JfqNYAYkR7Qs5LAxtA6nBzq3kfPGeLuxzAsiWLpzxzYHMh6tPWTDuL4w9Hd7UjRQiKzlRpYKVCBGgVQN0zIDCykWojc0p0Bi4XC6kMrIGgPHg2W6rtXlLRPn4wWjHYfhQHjNNL2k1ScGGQibY3aRIBbOfcf7d+9J4hl84PJg6Np7fPrMNB5qkEe0nM8/Is3M9XZFqx5ne3JULOFvWGO5HN9x6E6s242coW16Hh4c2zZyGAzTNNEaQ2vPtH3BB1EDeAl6d2ZaPvPyZeZ8HFi8gBIRvu79HfsObTOqBKyxDIeG9+8vPH0Z637p6oANKQbGceHQHXC25cvnT9hy5P19zzxPlFwY+jPRw+Pje74+/YJpPUYLlNo4NQ3GSqZxxSrD/YcjFM3Xr68IXbg7nmhlvXmfe0cKCiMlTsKPH76vvGC/ENaOvuswKtPYFiULuIwPCmsVjXUV+xU9Q9vgkgAcm9hwrcS1gtP5nq6zpG3gch7IyeCMQpffLSDH9oDqDNZU29TL69e6k90MGFW7cVoK2qFlmTei93TO8HB/JkVPEZppWehsz91l4H06gQi09sDgRhrXEYLh6/PC6/gFn1eMytim4Tjcc7u9kMuMEn4PR7bc392Tn2a28IJUgoKmIMllqxxTA7koSpakEghpwm+BLCxCWZatEh4QfOuEncuZkz4hqOzaxg2UZHn6+ht39z3K7PByIckpsfmJDCjZ8Kb/rK9NtTZ1bfstbFTHpODDQogrgrs63qcafTYf8aGuOkhpEbLebKWsSl5KxuhmLwDhbQ8zhq3uDtvmGx6q7LDPkDYKuepRxRtbta4NbNvCsni6Vu+dq2okSyWzbvMO7zZ7Ujvt3anCtm2EzcNBfyMAQN0VjDGwbQvOWZSs2KpcSi04d3Zn55qqKN1pBrCboVRkaB2Siviqd26qpEIWjFMIpb+ly3dJKpCxLmEbuI01aKlEJXKEWEfq1mliqOsP9XkVVXogC85VffPmC1lSJSXirW2aSTkTgt47wPX1LdQQmGsqSztQKQyl7IG5GoPHuqqLhr1oF4LGNVzuzvz62yeefjF0TcM4fuXa30D1LMvCvFzRq6NxHUVeavhSCrRIuLTBvnNpZMe4LvSu7CmS/AAAIABJREFURTq5Bzp/v186s+FjIQnPtK20Tc9BGlx/YU2JIhVy7wwP/YGQEjHcmGcocaNtEy/Xryy+YHRBNx/Y8krwG8o+EENEWEUIkhhnSqmH1/PdQ+1ua8O2+QrKT6Ui/WJknheMjeRYkCrRNJoSawCyiGoPE7K+BlrttjBdQ9Jd60jVXLIzdgVVaVwPJ1JWb2EtJmpxmXMGH9DKsCweKVL9eqoc4n0vjVJKJSKIvbEhQSpJYxqEqIdeHUVd/5AFqXwtQmOuq0PKVe6xYieo7IfOXHXn2thvDNngPYgqRKjUFblPFwpGiNoECRspRlAK4ZoatvSekjJWSnTXkqHWA66payq6HiCarqEUMNYR92mF0RLttnq9NorWVUJN0yxIGXjoJU/tE1odOPZHPn78DTULLvdHlvErXXtkfA07fiohQ0XQLfPCGMZqs5SSQ2uRBVp3YJsDp65HI+jbE8fBkUIirJnT8UgqLQJD250o0u+TV4FUkbW/0DZHuhaev8Ll/v7/aR35b/74v+moKqQUTNO8j0hB62oukbKGQEIONI1m2xKtcqQEt9dnhFoZxxtCZpyLlKzZ1soSRQTatuO2vLJsn2iaAVEc1gm2bWOeRowBScPtNmGMxG31whRzYFkzqxiRKlHkjA8C7xPTfEWIAqVBqrpPaU1iXWcOw4VpnBmaew59x6/Xv9H1GqUhp3qSvqZq0Iihdnca3TBPC05oYtA07si0fGRdnquuEMMWE0L09F2HazX+2WGMg6yZr6+ocyZGg9ATcc1ILNP8zNAf0Npxm55R1KTgEq94btjukdtV4P3E4dQiZYfWgv7gWGePETNNY3CmJYuCFInDoS5Jn+8cH19+5vvHR/qDxPsqPjBNQapA7y6UmDhfLihUdRH7nmHQrIsnRYsQgXF8wdgzIWlsI2gayzKvPD58QKuG//Jf/pGu64jbEx/eX1D5wsv4Xzkd/8A0zRjjOJzekcQFzV9ozT2fb09QCj5Izvcd9w8/8Ne//SOP7ywle8gO5yTn47Em0YeOaXzPP//Tr9zdHSqLz1oud5plLPzww4Vl+0pYNV3nuL5OnPqBeQooXRl958EihOV0OuC7ma45cHd/4tPHrzV1fmh5//iAv79nXTaGw4Hn1ys//fQeCzye2xrkCQ2tVRyODuscL89XurZHD5rr60LjqtFF6xYpIsfDkdvzDWsd6v5MLKVqJiUQMpfDkb6vO9ZhW3HvHmGsYzMZPW3booTFKsWpH+pF0XUYU2/CUmSUhnd/+AMib7ROcnd+IMa6WzmvnuwLXddxuDwiZCFsgWnUaGNQqqVxC91LrKPMXLDNEaPfc5teeH79F5b5ihUSpQMhXasFTA+ENNP2DiE061qYZ49AoE1kXXNVmhZJKAapwZfdL59A6dplPKczNjlCTkBAacfh8B0ff/3Csm24ttm7ZRGlBCEGQlgwpnYPa7amdnE3X/FMTdPvO6N5T9PXsaVUEqXtnsqpO3gxvKmN23rD2wsqIVTtqJZSJ0lS7wnquncXk69wdFNRUJTybXfVhw2BxKi2Fqp7KCrnxLpW7mp9/TKFerNMMRCSRypdx/dCUkoNCKaUCGFDaY21HWJn9YpdAxq8J4ZI1w7fPO91rAopJqy2dM0eQixxV8DW30EIQdMcCKutumJR6qi9gHGapjWMt41U9iKe6qKvM9HE5j0xmPoaFbHvrkqUKgiRWZaMUhr5zQRbdpVuYJ4zMRYwmZhAyL3TXSJ+U2xrNdblImqYquzILc2O4LI7aeGtc17/rnOKmOsaRyVmVK+Vcw5RIl8+jwzDHUM5EItAnySkTGvrCHy6XQk+IkWGEoi5Q4xXoNIxChklMyVV/vYWFqT03+6XawqEItCsZD8SpST0AV0cxrq6MtUOFAxCaw66sluRBq07pB5ZlyMXoTBdIQWJlboay5gJIdMfDnVsS4O2gqY7cTydGBdPLLCthrbtEWS0bAnZk1KiP0quLxtNa5FYXl6eEaLgQ90njzEgBfVQhCKFmj5ZSmDzlYyR804VyFCyJKaIFAWl6nTBWrOj4BIaRYmZRivcXrwKrUhUC1ahIrJUkTTO1sPSjgvURtUgl5SceocxirZ1GCuR7JILn/fJQMXQVS7t/jmF/QAq8fHt6/VAWSkZqQbBYgLUtwlMtWhqUkj4Ul9XWQRqf38LXdFkpeQqqUiRLDTIukYohKTVApTi2NTpoW56UgSjGiQCbSRNY+pOL4Lj+UdEbhBEhvMPlCQwxjCcFpQ0xDWRy4rWiaBmrLMgVpZ1Ypoj67LUQ1xr+fkvvxJC4IfvL+gE0gum14C1knVZ2X7zGCu4HM7EpNGq4Q+PJ56ff8a5nsG8HaInvvvuQvn/C0d1mTNG64qKyDXRFzZPLnX5uR/OjNNTTYVvgufXF2IU9P2BUgzzOuGcwJYDIDmeep6ev9IfOqx1NGlgWZ4pydO4A04P3LaMM5p1mXZIcItSknmKCBlY1oWSj5wujnFM5LIh1JHsE30z4IxjnTT9cMIqCH7CGofVjqf5ia45EbYMYiPGG86cK7YorBQCMUTaw8Q6jUh5ADLee+bpC8PRYWzLMtbdV2EFEYszlpAEBM8WNMfjA+NtoesaRJGUKKtCVXgKuu6OzK8MzXtIpqaOpat2J7ny66e/UvZ9n3lZ0fJctaXSsMyv5LQguJBVoh8ObNNGEpItb4TlhVwSD+8ekRqsPjGtiRBGcl6w6o8I+Svv3j3w87888Tz+jC4/0XSWEJ9QumGNXyG3aHGgqvJmTocTT1Mkny1ZQNtr1qkqJI00+NVDaOjNI6of2XxClsLp8j0qFUpwuF5DaSEEuqPDNQe2u59oWo2QC+sa2KaVxlq0cJwuA6dDQTBz9+7Cy/MMRdK0DfP8yuO53yHblnkaOR8blHBY+0di8GxTQKlCipKf3r2jaSXbGhFF8+cPPzC4L1iruJzu0UqBAGNbfv30M0obZIK2lTw9XemOF0x74HBsyFnzz2vhOHSkvKF6S0yRobPIViOKQeuMaCWn4wFpJV+fR0SqSstxmjn1Z/yy7kndBVF+B4c3jaSxNTW+TDMpgdBgTB2j+m1Dmo1GDjSuZWgPpLiwrRFre5xLlLySReL5ZeQ2vdD2Dca0+FCYt4BSkuADXTeglSEBWdYbyvF8IMs7kCttMyBoGZdnbGOR6sg4V/h8KYo4VxGHLBZSVcfO6xVrhh0OXkeM1qm6v4nkHI68jycG4Qi7Aew4fECrlmm+1T0x1701yurhxq8gMta2IHZzE7UzGWJVliLU/rU6loyxamW1tjVNX2qgR5bajSykb2tNbyPMInId9ZaC1qZ+bbftxF1VWgNlDaW8hazKDnovGOMqGaG88UHrukBKCecqJSClVDvLpdT/Fj1N474Vm1DH7DEFINE2HVY3375eu6L157RGY2277+ClbwzNdV3wfqNr6yg65lI7m6XesLXWSOzegYRaKO4hNFHIaas7rKXeIoSohBEhCshASpmc67hYZBCIfb2mwtdDyDtpoj7fRUDTKoyCr7eVjKgillyIKaNEoWn3aUwEbSq9421vNudAiAty68lZVZwTO4GgZITcXyNfg2RQ1yfSHvK6v/RsW+ZvP1/5h+ZAs7V4kcnvLALJ4dSxbYVlG5EIcoqUecW1A7fbCz4n+h2aP60LztRuOfL3m7nfPM52CCT35weKlOToGZeJZhBkEVFUBa60knY4kYEfHu/YoidsDqWOxJAJWpK3xF2n2fyNZdr2MGUkzjeO3R3fffee/tjw5csLbW8Z18B3j0eUkaQkSWHGdgNNM5CLYHAVX9TYC51L5LLgV8cWA7apUovX5xuqCBqtyTGwpZWYUsUc+lB7p7kyiDP1NS6lFlfWGrxfdza2IPga6o2pjv9DqgfiEDOLD/VzUyKS2kmtXNhIEplpDDTGMmuLDJmEx3iN1Yq2tWSRCaEqxFOskqBKwsh7gzdhtNqFHAXnmsrspaC1IudECKGKaKKvdJzWgoDbuFAE+BBh3yUvORNjfc0rS7YavqYy0/UNYVs4Hg6InNFScBxa2taBhJig2bXYhSqEaIYWgcJHcKbH+41h+JGY6xSlyQe06gg51V33UhGSUmYaWWhjRkqN9hPPrx+R0uwB14XX1wWreq7bhjAC/9Xz5csTp2PP3X3Pto08vdzQJtG7I89PXzge3gEruXiMbTBNxzT9npv4X/34u4Vq2Pakr9TkmNBGVlyRVPh5rRfuterRhMh17Igh58C6LRgnQdXuwLyOHM8dZlR7Kg0ogYe7R6QwdfwmNjrXcDrc88oL7NYX2xReX184DMPeSdHIcqbkBYTGyANzunI4NrTdQPYJZzNW3vH0vOD6jnGcaFrL6l8Yl62ahFJE7KPF15cb2ghebzNddCihiPGGMS0Sg9YR5zTzrOn6lpgKMa/ELLF95mV8QccA4sC6Htn8RttYolcIGWnska8vv9A5SyGQU2GapnrzEoLgS12LiJocBe0gud4i6xyxrCzrK4ge53q2+My8BbAG4xR+0fz29ZngJzYfqor2+D0lbygNRvZ1TycGrq83ul6zLDfGaaYbLkzXkddpwhiBVTMhjHz3Q8/tS8YC/fE9KUjIG9fxBdsWLvcPeOe5PlumayD4K6fDO1L0PNzd8+tvT/jtmXkSPJzvkclj5p7z+R4RF9bs2fzIf/oP/0CMGz5sTPMn1jVyOnWso2RwjmgDw58fKTg61dK0imm6YoXiPFhcYymlYbKG/nBkGTONOxHCivCeebrRuiOPj4+M0wuqc0jpOJ0u/HDrca4m143RICTeQ2cesc7VHaUUKdnz/ff32Kbg/cZt3Pjw4QEZN2KC4dQQs+Byd6yqw5iJaUF2tUOckYxXyTotvHu48PPtV4zYk/LRI0vEGge75/7QdeSYaRrFtubK3CXS2ELOimUuFBVZ5oUvX3/jZrv9tY50rsOaaqUKW+E632pY51ng2gbyhs8ZY3ty8hhTSJtnixntFCFdEVIxLVMNqlkqzs22aK3ZNmi6lpQ9Ma7oRtHKASEUPgSQ9c8SXhESfKh2KGUtdxwYQse7cscgGgqZpmnp2+9p3T1+W1jWZR+rWUqpdqaSwYcZSHXnu1BVnaLumsY412AS1WSEeAtSrbUDavo6XuQNkl7DUuktIb8/xK40DbF64JUy39SVQghSjoTgq85T7NzVPWD0pi3V2n77nm/sVR88iIhr3Lfd1frv1TE8gFbVs55T4q10jNGT8oZ1HUqZfc2AnTAQkbLQ9y1uH5W+TaErazOh7W51QtYdTvaOK4WmE2iTqz1pD5qkDIiCNanifZDEwjcsV4XQB5ROuxpUVvbmzgilJJyrbM0UZV31IFMb35LGSYyWBF/255D9ep5BZYxWeJ9IuRaxMkMWuXaI80aMKznW/eS3CXLOlX1r3P5cr6BtRW2pXBAFJIXvvht4uS785S8jP/8i+e47w/pxYTkGltuVR6HpuyPbfCPngJUaouLueOZ1uvHr01fSpyfOx47OnSGN9E3DcDr+fjMVkpg2cswMdxeWMBPHRA6B6euvaNcjZCCFBRUacpYs00jfXei6gS/jFd1YfPgKa8/pdOG2PbFsmdPlj2zrlb/+7TeMamCLXMfMdPvM7frK8f4RnTIqTmipcNYxb884VlyGnA9oF1lTwJqFwQimecV2hlY4TvfvKWiatoeccEogS6Jtjmw+oozBbxvWasgZKQvTMmK1QYiqRa+MVlFf8xjZ1pWQ436vW9h8YtsC8xJxa+Vb189vIu7v7Ugi+hroClnyNbzSOE2KLUZ5uq6q23NJKG3RMlJKxWl5H1G6yj+EkKScaKwGagBXWlNtWbq+b+v/pxGy2zmxlWYxjRMpFTbvK+Ipp6oQF7qaELPFKEfTtEhZcX/IvVg3rgoPSmKLvhIclCKsdf1RKlfFCVtE6YROGSkCMtZOvRARJyEhsUaQ8kLxEiUt1ugaYhVVSiBlhzse+XB4t0+SaoMipUD0mWWZcK1knQLSfMfdpWMLr3z9OiLkAdfDukysm8Aq6IYaoHt5Xuj6hi2P/2/ryX+zx9/HU4Ww70pRl/CzRCHo2oZtjTy/fmYYDMbC7bbRuCNCmopTEYnGGMAydJZ5fGa+jjWpnT0p3FiWjeGokDxy6B95ev3HCg/PpV4Mc6Gxd4T8lb5r0PLI0Fed2LoGkJmuOyOlxHUNId1oiWhTOzsyd5yOD+QUWbcbShWUrZahbRQMnSSmiZx7tD7wMj6hjSMGTdsckKxIKYhBYHW17yzLzPFoSbnCnEPy5KwYlycG2dN3gpQFPq6ULVCSxmiNawwxgNcj1/ErzvYsaqbvenwIrHNAtS3LEui7Q+Vl5mfWFeZtIsaRlDU5g+0t1xnkIvDTZ6R4AxNH/FUgiudJPPN8feV0uWMYjjglmbcbpcwIOkqJIBdSbBmXv1U0Tmy5Tv9CChPJH/FZojuHsQfG6zMf3jdYWwMs7y7/wK15ISxAydydFdY4lFacTh2Iwu1lpWsU7y49ZVX4lDkNijT2nI+PzOvM3fkI0rPMgWc90394oLVH5sNKYz3rVvDeYcyB5tEg1crrU0vORxo70AyOcd54//gHtBV8/fyZd3d3LPMrp6HBL2CMQ0rN6TTQDTVBH1JC6wN91xHSWoH+uiMLz+l8wFjJFlYolsvdT/R9T0g3np9nnG1xj+ByR8oOREPMieHYAIKSIykqjACjBeuSMd+fOR9b7u7vOfYCpxtepydShtaca6H6Usnhj3cngs/ovvrCD+09MYxYm+mH2mn0OaBURtgZDJxOLa/XJ6KEEhVFLiQR6C4KcsPL7QWtPVY1+Dkg9A3dSYrMpK3QdC1ZBOJ6pXEDZvOYpscNGaU2bN/VYI9cwNRggqOh7+4IPrP6F3SMaCdRrmOePaVk2mbgGI60yfCQLwyl7mlVJWjPof8Oo08IKZiXlZQSp/Np36vcu2Qpk4vHWrl3OWu5JaQk+o3MhrW2hptK+VYoxlgTu9a6uge5737GWIOZ9f+1+95d2cf+YR/vV6c28C0MFLxn25aqiha/UwJqF7AGgqy1KAlvZWMpuTrkdaZxFYNVYWp1NUApwfl8qvu5b6EX+TsDM2WPNsfaueIN9M/OE877fmrdZ2UPYJVc17SarkUKs++Z1l8i5wyy3iRDLOTi6q41Yt/NLFi7/8LZ8maHEkggog00jWOZBWB2/m89jECmacRub1P7KkEFyiMkxtTCMiVZdwe/hfoz1TClmOdAyjWMmHZlssgSIRLISI4SrUTV45b6c6WYUHoDUXFhMYeqS6beuyo5IdK2EuuqXvp0vKNpHOGlcpS9DwT/dednb2xJMTQGv3gO/YF58fgQuI4zfpEYWYh+pqjfl1RjKuQC25Iw40xWBWksS3hluo24oNBNwuqavn+ZvjIvz2yfBH94/4GtBCiF5mA5tg8k6Vl94ngaON+dSbnl5uHx8Y7r9TObXFA50p0HTK9RuXZ1s3CcDgZiR8mZbR0ZesuWAqdDx7J+YR0rHcDY+txuT3/Bmpa+SMbN441mWmf6pDiczsQEpdFsBYTKdQ85ZnIqSGVQ1PubAZZ5oaSK09o2j7WWtwm8EpLWVp1tQeKDJ4ZEzNWMJpTG+92KVyDlgrUSZxRSVPVvjIGuc1hra1dR1dU02I1spWK8KHKfdFQBihB1lbF+xgpaOxQOoUGpmlFQWtG3PSnU64j3HqUr6s0Ys196BJK9AC61MyyV2o/BEqFAKZCqQFG76layrYEc66QmhUAKARIEvSKoxA8h075CUeUOwmecMGgp9+lbIJeIUYXoN2Ku1z2jG1L2JBUx5kjUgsMlE7OiP2QuUSBVYXv+hf58wdqG4WhJaUOYZ3IM6E4gsmZ+/oxOJ2Z//R8oKf9tHn+3UC3JM149IUw83L+rIOEcKmqhJFzjGM6C1Y9M0wYohkOHxKJtRxCFHMBoQ+9OjNcnzqcz94czv/42Y1Tlq/llhHJH9JD8hkgvbOvE4dhjVGS8vqC0YtnCftGMaN1itKaUBZ+WqqwLhW1d65J4Nkg9IssJWRrgmWVdabsehePQa3Rp8WFF2YiPNwSBxt5hTcU1hVBYxxnhMmTJ568rOQe+vv5GCYJtm+j7R6ZpJWaBlqfKRCw1MYwQRLky3jwhT0DEdR3+WTBPH/nup5/4/PpCCp5pDBRx2LshhhAVff+e+TVwfX7m/n7ACEUxHULAdCukPNHaBWUkh8MBP56Z7BONvcdfZ4bhByQnnj9+5Ic/3LPNK0JF1kXRDZIsFMSFLBXfXT5AgXWtHLj5c6nYmxh5fv4F0wuUSMR14cPpHSel8UpzOhgOXcupc8zbAsYQo+enP3/PX//rwvsPH9DC45oLUWg0ihAyQzOgTECVCsc/Hi3afODQnDh2J15vP7PNXznd3zMvAdd1HPqBeX6mVWdiHqEoHh4+8HK7kVUdqT4+nFDFYI2hHQ4oBOs20roO5Q6E8krIG1K3jGGkREMuktuWODewhhun0xljG9Ywc7nc0bUtH7/8yrrcaGzHw4f3PH35xOXuQpYN6xy5LTPCRNal/lzaakRWSC0QynO+a3n33UBIidP9O7wvtGvCaUfXHFjXK/wftVD98P0DIU2oRjMvG1Y6wiaxrtC0BmE75m1hOHa7hrKgzcJBHMmxwThH3DzarRzPD0zTFV0Sp3eP5ORp9UxIG7lIrFJ0x6o6FUKDjDhnGPp3e4dhpWRdD2uumrq2lwmpHMELcvJIE5HFE7eKgnK2w+iOZb7yTt7zI3/AbHKv3QpSglYNbfOIVhX3k6mHy8PJcvdw3FFSO0A8bjgn6PsTWhneXOoVOl4L2BpuUtR/ROz4m4i1b6PxvYiVkpRWYlyrg1y5eu8pO9M01d1V5yqY+w3mXwo1jSzybhGrHQx2m08hMRwsh74np10qsBeQ1QBlscbxJpWSUtTdUBnpG4NRNYTxphSt9ixoG7V3Jvce4o7PSimCCDg3kJP+FpQSVHVqDBtKJYoW33ZspRCEnNEq47RkXFVlPcPvKXpRMFqQsiQX/e1QQal/31pJYzv8LPbEeGVXgqwBxEbgt0JB7W75XQKgJG0r2ba9ENWVbpBy2TvrBtcYXm+ZlCp6TBRByZU3LWTEGkvINUhVn4o3okBAq4Sg7pPXD0UtwOq6R7XMSdVyPhk+ffrEp08jf+rPmM8bw58P9P2B19snrFVsU+0CllIYxxEpFVq1HA8dudTi4nA8sI1XXr++frtfprySc8+WAp9ffuPh4UfMMZOTgKYFK2j7DiUUygri9IVle2XbLCrPtEOHzAZjT3gWptsrjTbYtmPzI/3B8f0fB7y/Mty1UAy9OZF9pGlaXGORNKz+RkgjCcnp7j1kuN1+5fbygrUD0/LCcrO8e9exPH/BuZanj7/QdQ5lWl7HgLQdMS2UZSTHO7ZoMU0DJRKj4Hi8IGR9v3pfaDpDLpEUAkGpKp9JdfWmG3q0TWhX7zuSgtAKIzNFlN1cx14cKmKKvLF+k6+IuBA9MUWsriGormmrTUuJ/cAiahBM1oaayBElBFkZYk40zlAKbNuKEq6GUkUGUfFWIVS0WzX6SaTVyFKvKwhB3+/Xgb3zmwK7bS7tqmlwtiHGiDUGa+v7NlOFDIhSJxY7CUcbSSmaKATS1DWClPbPkpLEre7IKmHrioKRhBjqaslON5Gy4gN92IhxgVIPqBS/66jr7ryQhSLqwe50uEMcNWFfY1Bm4PzuhCRC2QjxwPndgcaeyWv7/6m4/J/5+LuFqnOaENY6whdVCRnTgs0tyzozHHs2r5hGydAdsa4gS6KxDTHdcNqSdT0FdH1L02eIupoTUHSNo7VHSly5Tj8DBSUUOXs619PZltv8TCkC7yMhjpU9WqqqcFuglBEfNiQt1mputwWpBaV0hHTDqoRRJ3JKGD2QfMTplkPnKkw3eraU8NvE+fTANBZk1tyWK1JbXl8WFnGjcy1StgQfaM8WETXu/EdUGWhPiXG2dN0dL+MnlvEX3t2fmafM6/yZD+/+xPPLz5zuTxjrMOpIKCPhZshr3VFZ0khJgW2JXI6P+16cou0yZdU0neSHhwtSXfhvnz/SmsgtzbhjR44tfWN4137gdtBofeS+PXL+8d/x1//2C//yTz/T6oa+s6weZKtYbhOPw4mDbWgax0M/kLUgrh0qF84Hya9fJh6dwXcWMziEX4leMjSOZf2VzraUe82lcdy1Z15ngS+Z67jg5xUrBDotqKZhaM/42XOdnuiPLW1v6OV7rl++sM0Tum0Z+p7x+on7o+NyuvDb7ZXWGUo2GGeQwuKae1wjWbxkWxa2CMp0zMtXlGhxjeHr5y8cDz1JNKAj6y1gO8PTyxMhPqFVwzAciQXa/sC2bjRN5Vx2XUvrLCnXi6UQHaLofbyleP94z+GuZ3zVTPNIUoESC+hELpFtuwInlMkUk2pCWUMUHq0M8zaihMH2A4f+HUY4tm1hFfHb5y66RAqV59McWrJXFCZ075jDjGwKkkjRQC74OOOXzMP9n/j85YXr+IkiCzkJehTSSc73DzTtmZfXF9YwI0RExAZfMopA9BONveD0UIscnfZxrtt3G7fKWBQWZwdKtqQ0ERNo4VCyRRuPtS0xGGIU3Jc7/p36j5zEHYWNGCsFQ0hJig5BR0o1tet9wLrMD6dL3anbk+i1I5bRRmC1gVSLv0LF9AhR6m4Y1YYmVKnhq5DIJVSlqrDfGI+1U+kp+BpwkJUHWepCKd57cgq7bUvuXdeK1NJGcn9/wujmWwELEGOiaST9oYWkSFHU7o9QpBSxVtLbI9tWQxQ1PCR2S1RCqvreEcLwxnjMKWOspOkOSGERRZJJQL3hCfGmP7Usy37zBxC1E4n0NV9QKv307aeVAqwRKCEg67p+QPm2Iy1VPUisU32O627qHngqeS8wJSWJirUSomKzhECI2tHyW0RqhZL1Oc0lYTQ4p3h9WYhZVsB9rEUkFFwjUaYGtnLzkhcgAAAgAElEQVTKu/MeioScPUYnlHB4VJU2lFrgllI7y11r2JZCLqKui6RMEpm8j1at1ayT4zj0TPPM9Xrj9ann8GPDKhaQkqbt0EDwC6Y1WF1Y/IQstXOWc0J3LY3QSFs7gjn9LupAJHRJKFVB+GmbmUtg3jZM0zCc72lcg5UNPq10xiLv/oRxB7anv9FmSRGJLx9fKS4TvKJrBON4Q6pAFp5lfuLl9YVYLP1wQveSdXmpnXd3YfFzvf8lmNeJgXvWmIhSVB0umRBFLRBJjMuEdj3tYSCGmfHqUfqAs4LWCWTYuD79E9Ns6YYGgSfnA2hByc8Ya7nePEbe7dIZSTM01Y63Rrqup+kGcpFIa5EkcgRpW1oV0bZUvJ1YMLKGuJQWhJRr9794gk8UeYdzEkOiZEcsASkSMRZCLMRQqt9GCmKp1z6jJBKL3KlDdQKSUaYWcDlsZBEq8xmNtpV/rHT6Zo8qOVSJQMq1SwvkEmEPFkpjdstXIZVMphBSIa4V1VV3Zqu0QAqJNm/2w4q7E4i6YmnBew9U5XTJEanq3rcUsgaihSJnMNoRYyCLTE6hKsdLwDqLFHUX38iMzJre1FBjlLEyqbUllYwUCUQipIwoFSWWs6BtH7H2Ae83+tPhf7yy/J/8+LuF6hY9qMzl/B3eR4TO5AxSWqSIiJxZXgWH5gHd+YqA8A3FlN3yVB3MkDjddSxri98E43yjOQyIAjmYuogvFX3noBQE9Q3z/BTpTgYfeubVk5JAl4TRHS8vFZrrvWFbV87ngX7QPD9fGZePKHEko3E2UHI1bGjZ8PzyL3SuxwZJXEfG+RXXniveYlOEMDOOgSID45SRRpCmwvM888fv7plSYblFDIIf//BntsmTwpX39z9wmxZO/QdK+AtaWXIqrC+SD//7n5jXJ+7v/8jPf/lHju2xBg6ur/z44Se0dcj8K+8f3jM+j7wf7hlvX3kNmX//5x/p/tzwfz3/n0jX4MdXVNmwbWZoHG135PZlwQpozw3zbwm5PPP9f/r33OYRXT7x3R+PJDYuF8v11XBqDMbNpG3D6AMHe49VUNoOYTV9qyl5YxoSl/MBd+i5PU8c+g7zYNiWxOZvNLbQ2RZdCsIITpf3LOMrdhhxrLT3B3LeGFTLfP1cXech8PB45tB0lFL4bfqKCJ6X5y8c/vwTYb3y9PKCNh226dASjMyYlFEi4bdE0dB1PXGZuF1fuc0vnM8dOaY6amocrTvtSBKBaztSXOlby9dXSNnDdMUYzXS7UaLHSio+aBiY14zfJu4fHpCqYUtVbVeRHhI/BpQpFF8PYRQDBIrUHPpHtqCQMuDaFu81ScIWFqLa8CljbSKVleeXK1oXlvVKDL/f8JJMbBnCOCIAJQJFTryME5QD0ghisdxmX4skpzHWsPiVxIIPI85dCDGzrpmmfaBxjmW5YuSALCf6rhYfuURSLjUYsN5QGkIs9XPRKNqmqclZGaoCMguM7RAIpHEc5UCKkphnzpeqQc5JsqyBP47/gT+Z/0xjB4TKxLQiyYzjjV9+eSGVTOMAJDFuuEYyDD0palIuSFW7qrkElEiUovY/dXyXU0KbqhqO3u3XmtppS7EmkrVukN9g/lXrmHKq0wLjaldQiB2RRGW16sqrfQtSAcQckTrStR0ld7UoRu7dzUDT1iJu2fK/uoLuyl+d6mgw125QoZrScs60raXtYJs1Mb+RTOto3zWKw+HCujlyKt/S94KMMYKm1TWYtT/ewPqCQNNWW1zY6vcqhW+6UW2q/jPEsv+MuySgVMtcATafEUUhZA0Q1aBZTe2HmEnJVswftTjMJLRR2D01rXfdJ7ATKmroLIZKVhB751gJBTLTtooUIPi6CpBS/v3vS8Fw6MixygqKyPshpuK4lBZoo5nGTIaKgMqQZQ3AtS4hyORSRTSXy4lpnPjy5cbl+3f008pNvNK0seLYlKFpqxHQZI01mvK80rU9xWlyWKtoxSiy//35jwtkWUOM0c9M8SNoRfCx4opaweoDH84HYgkc++/o1UCIV+hPqObMujzxPH7ClQMkyaucKMVgnWd5CWzLwvP1hXGOqJe/8dN3/xtOeabnEazgy9ePKGs5DAMRz6+f/8ISBN//8D0NhtY4+vtHZNEIZoIwDN1ASZFNBgZnKKXj3bsjX56uuPbE0cB5OFHSwsv1lfF2o++gaTXSe8r6lcjCchtpz3fI6YUsNc8vr3SnH0h5JMQNjcQ2imsYiXMgti1580gM67ZiNVjVoGRmWl4QUmM7xcdf/8Ll4R9whxOsHoEmlhWxRcJWQIGSe4Enq4QIqQgozP6+l7JQEljboXTCs6GMRRfQuh5khcykYIg+scUFkKx+ru9XKufKmGrmY1+JrFrbwjhOQKFtOzYfcK4yiK0zIEDtu6kg9tBZpZlUgkImhBro0tqQUg07QkZpicyCECJa2z00WvfMY0govVJEJgWP7VtutxGpDIn6+0BECF8PvsZ8Uz47U9W9stRw2bqF2pDwsTYEpEGLf+X1/l/8+LuF6jQtdJ0hhMy2bXXXjxaJZujvWcMzKW40TceyJsiGnGFaXzhwrnrGxrF5T9NKnj5HhqND0OB9QGRBipIi38wwkZfrc70glFTHTlIScsSHSNfXfdVxXrH2xMPjiY+/TDStqLYXVbtAPihgQZkL88vKutx49/ADy/bMOE3E4HHGMa0zEQ1ecjwe8KvAKsfxYGj7ho9PK2PauL88soTAelsZLif++rdfMUpgC2TVMI1XLu86xunKw/EOnX9gnK50Q8M/2P/M08cb5/6e+TnweHwPW8/94UQJV757/yNPLxOX4z0/fPge7hKGggwKZVp+uHvgctJE9ye0z4TOI2INaF0ezoSr4dIluq5hjRODcwhmbl/+yrxlbN7qHlJZuQwtZb3RRsflfOGaJ0iS4XJmvj6TN0/XGgZrGKfAoAvLNhFNoqRIXCWmG0AGVM5cjGO+Lbz4Z+gVanZ0pu6GDXZA9YYQVuavn7guI2jHu8s9ujie//bPuBZEXGlsS4mB4CeG4YKSjo9/+yvvP5yQuuoNc0y0jSL4G/NcQea9rjs2WU0MpmONKyVEHg5Hog+gwo70CUzTDdc4OucoAkTeSCnx5fU3DocjrWvBe75+/UjbHZCiw8fMfP2VmEZ0Kawb5LLR6ie2uHHsL5TgKVIRN0suGqMN2W9IDMumWVZPKoEsa2EVkWS/cb1ObGHk0PdQ6v7im2fbb5BivYCu20zRI5qOeVlx9o3lWXcBte7JRTIvG+P0ihSWtnnA+4UiVub1CrLFmsht+kjn7hj6HiESKUa64YhSji1sZAJSKbLfgMi2TjirsE7iQ8Cae0pJxPRa9zGVoXUN6zbTGYOWF3wouEbz7uY4zI+sq4AcUFpj7IGhbfn06415XnFtV0NQRSGERJu641X+VcFZT/oLzkhUaWs3qFSrTYwRUbaavs+/J9NzqgWiMXV36w2ijxDklJAycRgGtOohyxrmEXUn1FpJP5xompY3YD/U1QBtIkor/FK/HaLuJAqRkTqQ0ZQiv43na2GYyXlj2zK5HCuejPytaHRWoaVmSfXnEHLXT5LRhpqcX/XOR2XvDEcEAUomBgFi38fcv6+SBa0KPpS9sI+VQMBeyOpCpLKOa0S6MmmrhCDtxJFahGq5s28TZCKZjRgdBV2DYaTKpCRjNOQ97KqUpIiyr0CAUpmUAzHVMS3ld4e9UgpnaxhvXRPG1eL6Tb1qdS3oX55LLRh24QGlygmkroGalAwpF4TayQgFSol0jdrfSxokNG3HcDj+d+bea8mO7Mq2HFu5PioEVJJJsqp4b/Xttrb+/2/o11ZWlywWkYkEQhzpcst+2B5I1gtfi24GgyEAhDgR7r58rjnH5Ho+cfwy82BaevWZZboye0Vb78EJDrs7BBZjCobzwK6+AxVZfMAKkFpw7n9NRhfKcBpHRCUoVIGfLaVo6UzLOC+M5yNJgZEzS0psqg3BX/Bzn4ttXIBkaLYbRu/Ydi2ju9IPgVpmPvjkZ2RRsq0kyzKxhEDVNoyXM8N0Ywwj0jrSEFC65Hj6Slk0XG9l3nLdfSTpghgdSz+y21eMtyPH6YzpNKYo6ExN0Ugunydks0cVJY25wxSBVCdE6zB19hXPy8g0nHn6cuH+4zvG4Rf++ucrH3+453S8McyeoW/wHlqzZyk7TCO5vPzMdv9HCJG+fwUJ49Sz0KBVwflyRKqC++ITVfWeRMn52lNHw7QsqEaQfELq/CBUVhXz7IgyM6ZZV/TIBYlEq4gssuUoxKxoSlUjU958ZHKHw/u8XQjeY0xBXXWEmGtohcznp0Cta/+EkBZILMu4blbydcN6TwwBJUNuOnRuPbeBlCjLPHpZu24pSJRlgV4rm/M56UlRZmKCC4Swlhqsnu6mLfLmR2YvbV0bYvxbbF5ieEMeioyOy8g7S91UFMqwOCBJtm2NFCoXgVhLXajvD4r/CMff96gKSFIyLTOn8ytVXVHImikOPN7/hsGOCDPTzzPGGIRMzG5mWq4cbwNt1VA5yTTP+BgRdDjrmPoFaRQhLCgZ8IC/9tiwEOKCqTSkRNfWvBzPvJ6PaFnRCFhmyTSN7Lc7plEj9ITzltP5RhB5HaPljnmZaZs9Onlqo2nLA+fLE58+/o7b5cg8OppNR1lsmMdEoSukDhhV0xYGfM3dQ4WUjrBYTKNIr4HaSP75xx8QsmHXNthpoOu2FJXht7//wDj+gqBGBqik58OPv+HzX37i0/0nLscjHx7f4QZNWUTifEeZIBrF+w8/0hWGqBPRCbrmnse2JFyvjCh+t/uInwfmbU0tX5j8QFPtiDYiqpqqluxS4LbZMFUGexroupLd/o5heqEwEiN3NOrKsszcrpayLHM6ep6RUROs57h8o9ndA5GubEiqwC2OtlH4IZvBUZ66LCiqDfPiSV4QJguzZ5KKsttyOl0ZL6+YpiROA0Jr6lLQmIUYDSnNMBv2mwPKNLz/9DuSEVzGAetOdOVCtJbRG8q6RmnB+XpEqUA/fEUvBZ/e/TPT+IrZ7vKNXwXm60Bd1gzLRGkSUQpCgGBhii+IwuBdQ9cK5mXJJ3kIiGDx0eZ0uZXIItF/O9EYQRAOrXJC1wtYxEK7ecD6BWc9ppPYm2e0C/u2WdfVitfLM4u7sb/bI6XBR5hmh3cjwed1Z2VadJQUpgLOAMyDpW1rTKkwqmYcL9hIfkiUEh+XHCDUJVLmC5kPFu8kSpSUlSBxRmtJFCPTMmNt5p6OY58HLllh9IbgI85NJGmRCpSq2G7u0FIzzwNKaoT2SKmxSyDEhbIo0UYRgiJFTwwWFyWsSCF5Kqm/PZKmhkV5gg8kAXVTcjkd+dP//J9gFHXbrN3iIRM4ikSMa3CTFQwTAmBzMj2uuCgyZNx7j5KWEE1ea4m0/p+IlGBMHhzfkE4CtXJVF+pGEb3GLm9bosw0bRpN3ZZ4m5XbfHPJ1ZJlkdZmntziBLkxS8qElD6/Hukt8Z9X+1LmwckHkVF1f9MshXD4aBFWkWJWiVOMa1AKEo5pSoRQrVfkXxUYoVdmGVXej79ds1NEyfyQ40O1BkjywJzX9JCEZ3EShEayDvArBiylgPeKhPmuNAu5qrEpt+55n9Ui8mY/e0nJnCrnPCnl9kIp08o7harMNBjrRG40AhAih1rW4Mm8hOz1J1fcihiR5GrV/PHXUBxZOc3BMY8UAWdF7mFfKQ1ZQc6e27qqcCECeg3oKXa7HbfriW9fTzSHB3a7hj+/fiHoim3pUYvBmA2FnpmGifvHd7lK3Fm69gNNu+X49BOX8KtHFWNpisBkA/VuR5Q+M3ylgK7KD3cV3K5fEarg5mdCUnSmICZL9I6xn+jniddpYvCJiOPp9RudbWiaLUZ2lPVCVVek+JFxmXDnG0ktPF8t2mikknipUSYwxx67QHr9hU2hOV8L+mmhKBN2DOy6kmF8xnSaKWT6QyoDI5b7Hz7h5gmE5TJ8plM7iq7j9x/3jNcjbhq4TSfG8YLedsi2RaXA6/FEsy2JNnAenvn22fH4/gOBF376OvEv//q/Qjji7REzf+P535/R1YZ2D6+nK5v2E96PlPWMG0CIDsXC5fiM2f+GyfYYnUOWRSEpRYGnYMKjlcnbh0je6iqPSDUSj1JhxapJFrtAstn6k/JQFuOyWgVyuDT4RFFKvIcleCQakSTOZTxbUZSE9bzsui4rnFJTaElI2T4QQiIlj3O5pS1fL+Q6nALi18ILKeSK1ssWpPTWpkUmr7yVfIBYUVmB4GXedKWSobeZ+xzz5igFhy4UJCjMylOOEVZ12PvMnxVCEKzFJZBqRXipN+bzP8bx9wdVkwgiJ9yjCVAkHAPWDZynkn7s0YWnUBBSTjL66PHJrnDjHcPtSiIxDAs+DrAE+qui2+fgg3Weaco8s2mZ2B3a9WllIcaItTnU5AkM0wm/SE6vPQ8HiZsFhSmZx4FN01KkHUncCH5gv/kN+/YRb2bePX7EW0cpBe8/7PkSfmLuZ/7w7gemMVBta4bhSigXnHUURjNPkfttTdkeONn8xHd49xGrew6fHnk9WlTy1EXkw90Dpmzol2fsPKJT4P2uJfrE0h+537dUCMy2YpkCh0PN8DKipUILwfvDBmMaFjtjyoCNFm0SXVXjiNwuN7YdHJ/PLKVhmGb65QyFodYbpEq83HoeDltisKhkcPNMdJbusGE4LVB1gKZpPiKZqFQglQIRDItz9PZGZyqCjwQjGK5nts0jzo00ZYkLI3NYMENBVBYRPIPzxKLkzjzkVVJT8Xw58mGviNaRXI9zlrKtkYWGCPOwICpBu92yabbYqPj6/A0dA0a23MZvpHChMSUuwDJPDPaKIBBcRGpYgsOLisGP9IsnRI3QDh9nQiGZiQx2ZrYjSI2UJVZOGBnx3tEPLySlabs9oZ9Z5jl3sceErkv66YZKPSaWtPUdwrTc+ldC6FFVkXEeLhF1zzA5KhOyv3FZMHc7UlS4sODcmDFV6SPeKUKaMlCcGiUjRhusyy1vf7swTiLCmnqVVCzziaq2OdShNSptSWR8TH+95ZBCXTCERF1XJBaUqolRonVEUjJPlhQ11o5o73IoKwSmZULIvFYSkpz+lB1IRSENzmWvo5CC0Z6yGiZKVGmYpymn5F1EqoSPzyhdsz3/E1xbgoykOOMRKKNZFsGf/u3f+PL1G48f3uFDQCGzV8oPqDABKucNyMxT5xe0TkhhCJk6lIefBOAxJq+JcxAp8zVDcJgCirpgmVQON/Gm0Ga/q9GK2a0NTsicVE8RqfPbsoKSvx8h5ICklBnnlKIAuXo3EeQi9UjweeAT8teVuhAeJSXCF9nXJuCNZypVBAKLDVn9WwfV4COYQMLjnMoBIZmVnkwG8AgZiFGSovxVvRVAygUKUhmEKAkrBSGu3epCBRIO76tVQVlg9aEqBUWpCUESo0RpuSrKeYANcc782igRUq2IqZXNup6bIZIZxFpl5moKgKeuFd5ZvJcopRFxjX+lgDZZyR2nCLLIFgEfQEpUisRocRagzdaIN8JBzINqoTOZJcQ3JSoSyXii2iikhGVMhFXdTTFRFCVN03A+nXn6ueX39ZZt3VFtDxyqCq0EU/Q87HbM88LHux8RpeL//bf/h/fvf6TZ7Bj6V9692wLf8s9JXNjs9qTxxu16oao6TBGIQTKmGSMNImiUMRQ+0l96ZG2Y3Ihzlqau8H7h5fKFl2Hi9mXhsDc47xCj4+X1K3f377hNP6PkFskHytJz2FQoKTjennk47Hl5OVLUWx7vO5zvuQ0z8XDHdv+Op9dnYgrU2vB6OfN88TzuH1FIvPUkbZmGxHG0FJXmdj2hhKUqDE8vJ9AVUmsGOzGOR/x0o24b9r/bcDmf+MPv/sD/IQLH1yNaOgSCRgWIE81G8uEQ8NNn3n9sGS9/YVwuuPhMfz6CrCik55e//N88PNxTFJGf//xXdPuJQmn65yO7ak9tRtx1xsYC0yperz2Pn36HqDoEGUkmgDF6CAKRXG73Wu0oSgWSTFzOV7ybqMqG7eYeUyjAozOBCxcsycW8KU4BTfZ/FsWKzSw1EU0MHikl1DnZH3xEF5kgIlW+XpVVkR9YAG1MrpEWYsVphXVQhuD9ej3LrOuiMKu3XFIUOlswyXWy3kViCng3YrTO9iQpviu0MWUah18/P5nemvQEy+wyJzblj1eURWY9q/I76zn9TVvif/XxdwfVKAqSSsQ0YmpFAHy0+GR56X/CLg4TFPXBEFwi+IxiMLrh7rBDJkEUC/v9I9++PtHbz2iz489/mvhv/1tFKR6IIWXcT1kiJXT1lqG/YK2nui8RznK/v8dZTwoOQk2t9nRVS1nekWjZt1sq02J0zfPrRGta7ncdm0oyJ8Ohg6dfrvzTjw+I1PHhznLTr9RBoBXsuwppewbncs2dFJSN4K5tiX2Pq2vKOqJnRUwWO48E55kvgeQtbhJcXr8R3ZE2dIzjgtwk7vfv+Pb6QlW3XG6vNE3BbCW3aeZ4u7CtElX9DrsILpdvbLYboi15eX2hbgy/3Dyq3dBfRlyS/OXLZ4qDQXjwwXK59cxqopYwjQtBCxYBWkTQC8vs6V+/MdpEdBEVz6goafd7hF3og+V4GXn/7p6v3/7EH3/4HRu14/V4QhLwzmHnXPF3lTNVWcD1FaEkXVMxjCNusjze5672a7+ASjydn7hdR95/+A3WZjtGSgM25rVgHRPLOHAeJva7B27XC4tfiOqIDTNKKa7WUXaeBQt+wE2ebb3hdluQ2uA9HM+v9NNCiCWmnFEqskTPfH3CjiMIz2Z34NZbpAm5P3tMCN1zGQJeWqKYqUpD8JZpdhR1Q9FoRIqoIjIuCfxCP1/QZZUB+FIgpCaIhSThNowoCTIpFueZ5xldCeq6xgSNiCWFFozLmRQd3XabQ0RG46JHSkfVtL+ed1KRhOY2nlgmRxSJiCIy4OcJQbkGbmZIimXKipMQgro2OJdYrGSaLSnWmCKb8m2wmKLAeYH1C+N4YRoDTauoyoLt5sD5/MyyXNE6D6rz4jCzQoiIUVDIgnGccd5ifb7oeR9zwjTOqNeKeKky9mldHYUYUSnx9Zcv/PzzT1hvM0rNOopyVS6xJOFWRXJNuEdB8AumSHmoWmeUvPL1SBKlrohJE6PMOCQyjkizZKQRZvWTrcGdtdErxLUd6W0Qizl5rLRYPwf1fe0fYw42KSWxcx5s8+Z5DfUkj0AhKMkBPMFbxavSGYru3ZpWJ31fw2utKErNNKyzl8wJ5pgSirC+/8xx+t6OniIxLSA8MRlCjHmABkTKzVsujIhFEnyTX6wUcilXSmTPmkaQCyWkVDlMJUAIhxAe50DIJg/qYrVfeIdSgbKoSK6FkF9vIVYFXAWUBLtkz5tc24dQOYwGFuciUubWru/1sxHKQhK8Z54SUurMyE16/do8Ap+93ulvB9G43uCzJWhe8jbtbbCOKSe0jYGkIKzv760qNqZIVTeY4crp9caHd+/5w6c/Ij8YNlXLMB1xacHbJtvChhnvAre5Z2sX+vMZHxJdu/t+3hrTUdf3qGLH8fKCjxMqdTRVzdVPxDRymxJ103K3uWfyPVUtOL9+QVLhl4V2v6H/jxPHp2esdNTqge3mPdHnoo+/fP6/Mu4sWrpmxtU7jKi423fINNFPA5fLiWLwFFrz7cszzfaO4dZzKrd8ecllJvuwp7/9wjRK7ne/ZRpGpITb+Uix2xGXmcvlwnWa0Mqw+eET0+2VabhQlgXDvDC7yMd3G772gV+eBopq4XR5Zf9uJY20BY1PMEmKTvP4/iM/fgz86fOfiXGH4spxcGzuNmyFZuoth11N3F9pm0BwjmUaQC+4ecSIMz//9f/krt3QH0dCUbCRO6bzyNAqRJtDRiYZ0AWz94jZYQqNj5FpEVR1s/rfWwKW0hjarkKqgqoumaaJKATWRRa/MM4eozVKJ4IIhOgIfkSmiiSz3Sc4m68RWjPP7rvPNFt8YF4sxoSVsSpxNqwlFwIf/boFERl5GdaKY5kf6JTMJQR+LR/ILOs5/5yLhJKSGKGsDZUoV8pIvv6EFLDLSj+IIeOtQnjDdaMUeCfzIF1WOfyVAsZoYvSrJ/4f4/j7HFXrOE03yirx8nylqXZ0uz233jK7Z+q6Rqot4zyy3z1wel0IIZuMH97dMU4Xkq4Jyuf6SKExsmHXKe53G47PE6Upc5IwOPbbmv2mw08zu8MBXTvqoiHFDKP1LnF5Wfjh4T33ux3jvBBDvglu65amFbx+g4/3H+m6PQVkIPzsKTHsm5bT68Jdt6FRimUcaTYJa3siYzZVm5Kx94hi4tvLT0gRqLct83giiNxm0y8z7z488vrliWkamOyVWrcoX6CMJlYlLnludqSst/TziWQsftHEeEM5zawuFKHjMg9Yv2BMxRQthW4ZlwKfArOY6b9eaYoWbWf6eOax/ESBY1GGIAJFKTg+P6NNRalKjpcnrD0SppHD5oFNc0B2NXYYqe823JZnStlyWSaSdAzhxGUIKARLtAhlGF4H7j/uOY+5Zk8VIFKB1rmJyRJpC41fFqZx4lYklFI4r1Ehcr0+MyeJE9ucqG9zm4woFKdxIMw1lQr89Xji5fiKEoZhXogmrwCtS8Rk8dNlbe2qmW6e28uRbbfJCp6MDJPAxhGBzOn/WHB8fcW7id22QKmK6CXjMLK9b9d2Ho8SBkTJPC/UTYdWCu+zMVRFgalKpnlmDiMvQ08lKppOo+rMiZUuI4pM8w7rX+lvF+o6Y39cdGtndUVVSaw954tUWeF9BU1H123phwGZWy84Hc/rkJMPpRLOOZ7On5nmibvdb+jnhcTMrb+ybT8ilST4kdJsuFwviHGhbmquNxCyIBKY5oHoJa1SmbmKyQpcjJRlyWIj1h9RVlGVHW5ZcEvE+xnnNEt06NLTDxnz05UKVQraast1HEkCQnQZ4C4S9bxn9/LPNAsPlNgAACAASURBVO6eoqqQSmZBDRjHnpfnb1hnUTrjUazNzOPgA2WVMkM55BBBBnZnxa0s5YqpIlsFRLYEFKWnqSXjWOYkOiG35qSElFl1jfGNMcr3fxOTxTn1ayuTyKvknLAVxPUa9hakiiFRFNk/6+w6MK7KXoiBwuQGHLv6RX9V/RJS+fwxc5tqHj5XdVOqrHrEmLXZPOSutoVCYbRmcdlfmTmoGaSO8JhSELxAoJAiJ/sTIg/NyoOsiGltkhJ5uE6Qm6NEQYpZ4RFFtk2k4IApA8ZD7nnPNP/EW8tW01ZURcVkS4R4GwpXMoDMKeIU6+84nrR6UZVW2XIQNUpn5FUMKodRhKTQEGwkBbUOvrn+MsXsCWzqkojEB4H6ToLIHsCyVGxbwzz7/+QpzqvPgDGJJCWLWx8e1lBaJFGVmh9/uOevn098+2Xg8dM7QnNmlhEnFLfxwk9BsGkM/fUZ6wM/Hu6RccaniqKqWMZfw1RClZBgsdDtHjH6gokdMYEpChbvcC6yKWqiMWwOG4R2jHPk4f6OUkb6eUDGgg/Ne7p3FURDsIYUDL+92/Dvny/87uO/UogaRMCrhtNrT7QRKQvs7YLEYMSG0+srjd7SqB2nlyNykgg8wTva+x/Qdw+cxILQNtMnQsQtE3/5fKLrCsIyI8NEpKNs7lBLz3y5MNwGXPCIaPBJsIQe7w11VAynF/xQoOqGpoLK1FyHmVYbfFi4BU9dlbh5pu1ano832roDBMllO88f//WfsIsiyQA60A8DMRZ8+uET19uAmy80G41NuZxB+iv9t4FgCjZNRZgd5WaPE5qtkhAEtenwc2QaFrQpKU1LXdV0VU0UV15PL7iwy+ziMaJlg6kKlj5SmZaiXkghJ/bP1yeirdkkTX+DHz59JHiPkopuawg+P0hJqUgRlNRIoQBFDCvmLiU8eXumtcS5QN7CvKHixKqexrXBLiusUspsEfjeYLfWCq8+f+tmQsiVySHlLYxRGfcmZd7SaaNW218mD+QcQCAzn3PgS0iF+5ug4H/18XcHVZlg7ie0rOkvkWhnNjuBkg39LdDWhsJkXqLWmV9ali0vr0c+//wfSFPgF8/yeuVw/4EYdogUOPx3zcYoTnxht9vhXa4HrQqJn3pKaXjYbbExh2u8czRbhRENZ44s08D7XcV/9APWnVFKMF6v7Jqax+2GaQrsyh3TOBHDjfEyUxlJfzrTX4503Z7Ddscv4xVVdpAKFhcwpuawv+c2XVn8zO1iUSJCYRBSMs6Ww90dx3HiehswbUnSM7Vs2LUHTsdXVJH9KY6J0/MLD4ffI8uZ2zQjhWDbJcrmETmVLLJCGnDSYrYtfQDlJnbvW47HC7p0SOHodlum4YndXUVRd3j/kpPNDGj1kSkAFeASdrgQloXLsPD+oaFqKg6HHV+/ZEC92G4ZLleWMHLY7DB+wOvE+8OBYejxMtBtGkRhcvqy93RKUYTMDzTdluAXhrmntOBi5Ok88fHjbzMqJiYe9htsseM0XWg6zSJndKqIIXK6nHClpaoMSkvOpwubTcPiNXXR4pZcpYq2eJlQi6TZ7unthehGytBSVi0+LFg3Yd2Atw5VJmKoSUFQmQ6pBIv1eHuFYPGTxjlLoTXEClOUjO6KUi1uSSSvKbRBiZhxUMEze8ewOO7ffUSIGx64uYHZOoQJBNdyul4IacSUDUWlWJylUB12WRBElC7yA8E007b3qLnIwRipmacFrXIl3+vx/P28C3FivF7x8ca8BJZlpG0Vl0uPswuhAqRnnB19f2FeJpROlHXD4mbGYaSuthSmzKBqq3Eu0JR3LHYmqYEYCtwiVl+h5HSceXZndrsDxBo7OYSIVG2D0I5huHI696RO8+7hIxsqlnDl9fyVpr5jGzfUpwO1f0dZN5npuILrQ/D89NMzKSa6rmOaZ5x3Kwy8hOgoS0WpSyarvyNdnM+e0LoscV58b69h9R/ud4a2iblKWWTV9y1UVVW5o/7N+ynWVTDEFbuVYfY5FEVGXam3izVreCmv+kN0SOHXm41ZFeAc1pEi0TQm+1BdWme7N9U0EuJEtJEU21VPTWvgyJLiRIqSGPX3z4O3NLwk++fIA1sS+euKMQ+iuZUrK7+knIxPMQfMylKiVGa2SrV+7SEHMOrKoJVZbQh5yFRivZFplRX3qUAIky0oqwtPa5UbeDCEIBFK5kEcRSLbGJSUkDRKK1LMvuQYBKT1xhyyksqK7oKIVoKmLLJS9TbMrw8JKQRcnAlR4WO2JYnV5xdDIMZAVWS/nfMruSHldxJTICWPKnI5zGKb1ZKREFFAEhgt+PRuz/W28MvTkbu/bvkYas7nHv3bzBie5oUleAgWJQ2zT1QFuOByC+KqdANMy0hQikt/oW02qLbBIUFVNM2BIqo8lMfEdbqyDIGmK6jrHSEFtod3+FfDP//mv9GUG9qHjuO3n1nmmYo7MAud+R98+O0/Eazn5XkhViCiJIXMmb0cHUVxYLfb8NOXb1Sq4fQ8cei2fHp/4OUkMKVGSUm7eY9IZ7QIXOYrO2XYtJHX05lAQbCSu+2OECKDN4i6Q+onQpqp2obbeeR4i9gpsr3bkOyELhTz5BHG0wgyZ3STg1u30wVVdLSbA/31iC4OtI3OhQECtpuReXLEaBBa0TQtgQnKESkEwWketp9Y2omqEUzDjPOBsnK0LShj2OwK/CIIcST1C5OQaAM+XpG65Dr1CFvQ6BavPMsCl5df0LJCuUeCKthuO5IKFGVDHXOGwzQFt1v2AHfthqutGYcTIWqqerOu6hUBjwoRSaCfZoyOaJ3QCEKYsT4iIhRlg5ERG2YSBUYqKAwMI8poArmAQ6ZM6ShMkUWc2ecmTy1JApY5+3QXl7MCEPE+C1hSZU8tq/FJSZ0DjOJXDjEy5HvK2jhaFFUemmMi+F9/tv+rj787qDblPZcw0dV31GWkrhqu5ytal3z4+JGuLNl0kkLueNh9onpfMPUBN0Zi6HFO0jQFWhlUkmw3DyhtGS5XhIe2NBQisulaKr3D2p55GsEahB+pjaJVgsWVuN5hTGRftDxdvhLnhUZLirJCyZr+dGE8B+43LV9uJ6Q4s3iPMY7kwJQ5GCG14nh5whSO3eMOFxVSGNrtAYRhCdDuOxg0Sji0LBmniaruCIVH6Ts0Xzi9nOjqjrv7DeM1sniPagSzt7i0kESk6QyOG2iR04Tqiiy3nPorIQXaTcQuM1IbejcRnGe69ez3Fd5MRD/ihGeYf6HUBV25ZRzOJK3Y1g9cLl+5XXra7p5Bek4vnylFQFd3fHj4PU3aYMeZrtlRmpLj6Yl37zpu/hcqVYHQ7Nr3jEqQ3JFYKupUEqXEqYaUcqJxvFqavUHpgnnRKD+RksNUO3AXZFTIYHC+xxLZdg/gBbONbHdbruceESKSxN12s960Ek3VoFFE1WMdNLLB2SUrWhIQBcs0E/QFWQzUWhHjgpcV/TRQyNXno8BON7wb6bY1bVdhTMHpMjNPC5tuS1PXnN1CP09stncgDdFLhqlHREHXVixLwiVNIWtEzI0q911D3RScrj1RVEQMXlhMXDg+/cK85KYuOxlklW+gp+GE1pGi0DRNR79efGRw3MaJKhjeBoDj6Zw9a0X1/bzrbwPWLlT1Hc6MuDlRbDq0WGjLPdM044cLIUBTPZJGiTF7jNoi5USMM4ud0Uoji4IYAsEnVCupNEjVomROolaFQWuNWwSm2JNSAWlku9H4GAi+IMTApumYY4HRLUoKNlWF9gHfvmMTN9wN74l+S1lvaZo2m/gRFEbz0+fPXM4XtNLUdY11jhAjLnistRQ60jQKgSH6X4M53jtqnRAxraGn9J8qTbU2pOAIgZzqSRBjxjfFGEl+tQSsXZ0xRKQKGdtk31bha/gmRsCu/tGsPubATiIlBzhiKHISPfHdmylSRCqfDa1vYSnehq1ASgGChqjWINWqbqZIio6U2qx0vymfUayge4f3ghjKPPySbQF5yAsEL0ghrwXzm7NSa+1MlAsidX9jQXhL0ds8zAfBm6WVlMfnrNB4wECqyDe39D28JGQEKZjnCMjVm7uqy6v9QKti/d5lVmxkLaUMPdMU8WGT1cy19CCJiFbQ1gW3OecbMlLnTTFKJBwJgbMxrzNjXO0Pbx7gSD94rMtBw6xQ50FXiEwtsIvI6eoyE2WFyIqrkQFpIrvdhq9PV758eeH9/UcOco8fExwyJxhd0pqGFAK3eSEp8HHi9TQizK/3y89PX9jsPrJtJNN4Y3GJsnREVXB3eOR2C1BFXq8XSm0IVlEheXy4Z1yuRATvHz5Q6IBNhsCWw11i9if2zR3DdEWVkdfLkcNmy+G+5DKdudve03UROzmcqnm3f0SqGRMEMki6UvPDbk9dCKybMEXN6eWZD+8+IZRifn0hxZnbBO2u4YcPj7wcb0RdUe8esJcLt+krp9MTe1MS5cwwKuqm49S/8PLSU3cb+v6K3FUkYQnjSFu8w4WRIDKMX5eC0lSotkaMOdRXtkUOWguDVBnL9HI6c7i/53oaqbYdT+cXhJFMy0AIE939BhscgQpdw+7+gDQlm6IjCI86GManV5gHQnmX54wkoJQUm4rzzy88f/53to/v2LYV13FEm9xWN+mC94c/YplwF8ntdEUgUcUd/XHm0+8/EpMh2i1lXVB3LX2/5OuOEdjgKRKUMjEvkVgEVKmJYyIqjXUDGkmM2VccneW6eA5VgVdvG4OFEIr1PIKqLjG6wNqZwlTYJaALw+yndc2/evtFQCuJkrkJM0WBePOZi0zIiCEQyeSQXAYgCD5bbt6sWsZkRTf//o9x/N1Bdb+5YxoW2rLlh3cKISqQI912wzD0dF1N1yX2dcend/9C22r++tefeNjdI0yFKjbUZYuWjtPxhUJq2rrG3wyKxL5skF5Qt7lSTaIRomC321C1iWEOFHXF+TqiSoeuNixhoj0cOA4LxiiGW0QpT9UYQjBMPnH3fsvr+Yr1EGWgNC1LcggioxMIA8ehR9Ub+usrdVWji4y1ud0ulHHO/iVZAyUieqyAzaPmeBpwCUyrGaPFDwPDDR7vG5Y4E5NkCY6uLZhTZEov2EmSjGez2XDtHSE+Z9U1OLwrwGkIiWkcUQlej0d02eQGHDFBMTDPklo3uPEFqSS9BT9KQgVdt4PZMesj7/cfUb6h2RjOTxdEjJyewDqHEAPLmNgcdtjB46cbsdyinAC15+Guxr+ciHXDFCImltS1YBwHTGGQIge5jBmxvWAyjrJu0EHh3ciyOKKCy2kiCUkhEsIZ3GgplKSqa3bdATsHlnnJ6eRmw5QclVoIoUdogaIgioWURppWM9mFomgxRLRW9MOZED2bbUeKDq1qLqeS2V8oK40sZ1zQPL7/Pa/PLwRhUWWJS5Ki3tFutwzTlbpqmKeFxfdQlXhvMKpDFTWNkQSZcqMMC01T83w+I9iBnHl5PQOS7WaXn8YxJBvQQmHtF4Ss8EGwLI7r5UZV7bhdn3PgI3X0wy9ZpV9OCGe4L7vv510MkXmaUPKBqhTMU2AYNUJt8XbOFZFqhxIhlwaoBSkUbvEs/URdd1h/REhNU+4pS4M2idvtCmLhsH3gdusxSrHZ7tBaMw4BJRuW2VMYRdtopmVByIgSkq48sDGK9+8+ME0TSkvwJQ/iDxymjjiUBNlSVTVVnTErRhumaeTr169YaxGlZLF5SBUyq3/zvFB0eTAMPg+jSSRUUqS4IJUDodZVsVoDR4kYPJfbQFFAiBtEXAfAEFEyEP2CdQUpZY7h23rLmISSChtW6L6Iq98U4A3nJNY0ex4oERnIH3xurMsX/rxsT6vvSwj96yBJ5qQqGambCmcNy/LrYCdSDnNVdUmMKiOVVlvuG/ZKmRzoilEj1/70N2xXrmY0+dc6UCbyJkeoDPhOMd94YsoDXnYOhLVWtoAkV0V1ZdImh0wOQbcOzivHdPXMSuGIQa04rHVQz69AHgoJOEf2A5P/XqSUU/dtjTaJcSryg4ZYFdOU7Q9J+HztQCDT+n2IWbVuumyBuK7BkTzArkr3WtMZolyFgPUEWtVYJbJ32PsiV2EaSRRyVb0D222FEoAwtG3L+XThp58b/vW/v2f5cmKpZiglbb2hbEpOr6/sNjVxHFgcXKyjqP+GuBAlpS5wrgeZw33WO6wbMUNBfxtp6oa6ypD4rrtDETAVqFCz2AmbJkYfmZeZjR6oNUw24sUTMgZu041xdsgETV1n/yWezX6D2ewzJi4ZrA182t/TlC1RFpQCdCz57cf7fO3VLW6csojXB/b1AVFJUtGwKS1Plxt1lZj6Ea13aNmT5pHm4Q9ch1eG64V4ELw89ygt+fz5J5Y5MFwD21YyjT2VPpDEwq03/PjDHdN4I6REP1xwiyW4C+3dnpevVxA921ojlKbrNizTCRUaSl3RasP5dEaVKpN2LgvLQi5/KRLSlYzzgB0ts/AUQTGeJ4yu+fFffkd0C/1twMiCtjCo/chy6zl0ETlcKJWjbu8xKlGUM69f/0RVbVBhZH46sXtocKcLG7Fl20Yu5x4tBLvtNnvFo+flOLI97Ek+MHlBqku0Ghkmw/37hl+eXtnddTzcbfEikJbAHCWF2VKwEIRnGC0bXUCKiGARKJyJLLdLphIlAzIQkiXM2T6TK9s9oAhB5upWafL5IGOmoaS0hrT8dyxeENle5W0uNymKkjflNa//+U+M5v/q4+8OqpuuxN8/UFfw+//9t/z5Tz9z2P+Wpq15Fl8Rq/+tqnacX14piw6pLNu7ElMI9odHnr+eCO5GVViWecLoPUKEDKteU6vjNNM8HJhGsAR2+yabsaua18sNaSSmKrDeECXI8o7LeEOGwPF6pmorurZiTDnFtmk6bjePHUeG4cr93QEfF0xxZbYaaWam0LJMf2VZrhSu4d37T8zOkorA0+WZ2jXIUGQigTRsdpHeFVyWgfv7Hb98/RlVRW7jDcR7nsefKCuPjvdM45EyCo7XE4kefLkiJFqWydK0Bi8Kjq83KtlRtxJVeG63K2WxpW0euAwLspQIFajbHacRCiLbaksUgetyodRbTGcQZqYJNU31QN3USBdwdkZvBOfbK4fCMlDw2G0Z5jNN9wnCC42WYBxptpTNHdXuwPX5hWBHQrIU5QbrPKYy9NNCVSgWP1BKTVEqRjtStTXeOUZ7Q8mC4AcWt7B7+MQ8L0xxQmsoZKQQDqygVgVJjhlsXG4QtqIxknle6DZ7vF2LF5Yr0RxISRLiQjICbRqM96gUEUnjnMM7y3a3YWNgnhMIRT8tSNNTNoLrZaD0mihztd/xeCKmGedn+mFGyMT5ciMkjTQjXi0UpkJJzTL3TEtEFzVGzyx2pDINloCLmv3hI3fbHcOlZ+49wSlqfaPQCesNnprgZm7LwPZQ4K3lcgnMbiAR2e83DOOI97/2Khu9oWskdvIkGdlvHimLBjdNbLcb+uvMx49/wC49wzDQbTUpLYyDzYNUUpSmJhEYp1eE3FGWVVY1VJmTni5RGklVVDjrUdIT4jNdt6Mu9iQ/E0UOAhHzjV8KQQgC6xOn8RfMULDrDXFOzLNZ11MaozVlUaKU5t/+7f/j9XjMgYDgWeySvYlkPIrDARpnwS4hY1eURwhJitmP6YPOXeMpfB+MUloIKWF9mVXTdcqLMWB0rl4l6u8DaDZiRspSoqXO8Pjvgaj8b8qiQEmNXdPjSUTSG4/V5LatjGVK34NUSuWVvPeQsS9vaKSEJKNmgsxtSmL1yYYYKUykLDXTJL8HqWCF8+MRIhJW7+objzSjq/yKkMo2CMEbuSA35hSlpGk6/Pyr1CfWKtOy1LStYhjVf/Jz5vh8RmJp2eCFIYmQleVVuZTKZdVINN8H+azyZotDaSSLlYj8gqyTf7ZCtE2bazFTrp6FuK7m8x9D9IyTy0ruqnZCQhNpSp2Dd0sFKuRZP71N9C5nAoIiolDrjfZt6C8r0DLR9/lnLniPUGVe16dI15YZPxglu/2ep68D3771/PjjPWWrWY6ScTsiKCmNyeEUN+HsjPVQd/e48PT9df74wwfaGl6ee7rtBkXg+DJgtgdO/UAIC51qKLxinGdCvNJWBaPzXPqeWVukTBTNllo7hLeURcPW3DPZG/P8Cqrl8eE9CcHSj8wxd8fPi0BqQd3uuZ2OdN0du8OG2+WFaS7oHu9ZYqQuW5al5N2733M6/YT1kWhKmnqPMp7n6UayDfcP79kXhvO5x2w2MG/4H7975Pl6A1/S1PlcfuhqJht5Ol9ouhZrHfsPPyDtwnj5hbJ9gJSQGEJwjOMrswvUyaNkIIwz2BsAstpzuhxp2z2vpyceD7/len5BRU2cAyk6jLjDeYnSkUv/grnB+8ct9nRmRGBLy73dUzctaVMyOYuLM719RfYjm8MGVXg+vN8Q48QSBlThsCz88Jv/haa2vP7yGV1I0C3v8Shh8UJTF5bp9B8UseHSv/I0vqCM5HyamWIiuXvqUnK+SR4+bAnjV6LfosOWolAINSG8JdUlwgqebzd27QYnEnawCKUwXb5mqUJjXQExMfQ/cVq+IiScr57Hd5/YNAvJGXwoMKbC+wjCEJMjEXFBUhaC5N588HkwfcNivQ2hSulsDfIBH3y21ax+1X+k4+8OqsjcF12WNVUD7cZRFpH/n7r3aJJs2a70PhfHjwqVqirryifQgKEnNFoPe8J/zkGPacamQTSAp27pVKGOdsmBR9Z9nIAD0oxgTLMqqk5ERpzta6/1Le96rq8kpxeXb46NZnGWczcQwoiWmtvrGw59D4Xl8PwVEQ2maDBlxZmJ2SVEldmM/dJT2JZhcfTTguoz1ujt9YbD4wtG5jrVhy8HVqsAWmDlQD8cCIVANAWqXXE8nSmriGLNLAPd/IwNE9rPKFoKUyLSwv70iJItzU5w2D9Th5YbrrAhoQtNSJEvX85UtcP5gJYlVlr6c6IqG4JoWZwnuDNlWVFWgsHucamkNh5VRYZlROKRNJiqIoSAHRdMYXICWESWpSdi2ey2jHZACI0xVyjZ4vwB71rW6w3jEKk2DefuPT/t/hOTC4QS6tjyODzx/Xe7DD82Dc/9nkJMrMp7XAJRV/jCZFC5XxBLZLYdrdli9MI5eSYGFC2fPr2nkgEhFLrQPO6/Mo4LP737jvM8U1SR2c+4UXJzVSFkwRgsKVqYBO3qBjtaAhEPIBSkwLpaMZ+fkFoyTo6yqVCmJArwKtL1HUpY3JzQdUBpTV1sGGaPtQKfZiojcB7G8zNNY7CzpU9zVsHPJ66uI9erlmmcWUZNjGdO3QsSAQq6cSKJSD89s8wTWpqLmuYoVHWp/xMsCPpupJBrmtWa46GnrVrwM1V5hRQet0wQS4T3HJ+fsaeJ6CxVsYJoqMprJtszLo5q2yDCQt0E6kZxOL2gVcV2/ZaYEnZy1KZiHH5Fgai4pTBbDCciJbvVG0xlGPqe1fqWggkjNaqqmOYzMQiqssIbz9XVDmsD1kUSC5GBaUoo2WDnRJAROy8Uak3bCCQZhh+dREiL1o6qWuUqw6DxVlGWEu8sShrmqSOJDudHqr3JvuigkKJEXtLeINFK8fj4lV/+8gvOWpQuWJy9eCUvambweC7NMDIPgv4CySYouEDulzni7KWJiEgMeUCsqxZrcxOWkHmVGJJDKoEUee2f9c3LLJYCSiQUOoP+LxaCEHL1Z91UeA8hpFyDeQkXlKVAa4NdxIXXGnNNZ8o101KGS+OSvKiNkCc1j11mlkWC+DUsl1Jmf8aQCD6jnvJfjIToESzEFHAu5urF9FrnGpDKImXE2nhRA+HVviBEbqx6vfZvOvFFTS2qvIKNIaue6TKoxxAQeIpCXli5WZ2MKZFCrinNLNNICDnMkS6kgOwZjmilmWL21EIe3gUCqSTGKOb+V37j6/sohaCpCsAT/F/RHr4dAhJNVeBTyn5cGTP/ioQICa0ilZHM02UAFv9XT13dFAiRWKwnoS/826yEK/JNuesdqJqyzJSO07nnw8cjv/ltS/iLQ7wD6pmxmzByjSocHSfGkCiXgbL5tQ89yop+7lCFhtgyuJ4pHKlVmWtwS4GPA4vNRJaQBhQLLmbO+Nrs0AaCEAjlSS6wpIALGlWu0MJSmXv+7m//F5Sa2X/6H3z4yz+w3rachiOp0qxXJVf1/cUz7VCjolIeVUvmc4eyuUCDELh/c8fTg0NvNFQKU5doPyJmKIodzkeK0iMRPH1+ofnNjxgx8TIM7G5axm7kZtcyfP3Ibtty9+Y7UrAgZq6ubuhPJ8p6TbUSzNOIkTWqBKkjwg6UtWF2E0LOTP2Mq1bUVU0/PuGi4Ll/5HQa+P39W4yCKSTmSVAkyTCfsEnRlhAnR7PaMfYjYU7EVrK9WfNl/0f+eHji+t0NFIZpnjgeBraNZrtbcewnzr5Hb3as2hXD/EwINbff/czDy1cOh8Sbt98hTj1qvUHbhbHvUWYDaeaw/8p8lpzHmZ/+5i3z4StWBcalxa8cfnQcX/4FlXqapkYsB7788kT70+9J3cjzwwHe3VKu16gyIp2jqlccDxOCFV2YaXXLdrvhuA+8nB8YnOCtWVPUgaLUGFNdMFiR2eYNj5QLghY7TwihkUJSlr8yUZXSeO/x3uNcBPk6uCZC8JcmOsk3PMB/gMe/O6gKranWiqI0dP1Eu26IIdCde7ZXFRjLy2lg8g4pLNWlpCMR8S7x/stHrne3bK/vCR7KYkPZXlEOgijOoAxGKrySBFHQTXu2Vxv6cWS7bekHj9QmN5Z4jygF++4FaTTDMtAvJ253v6NdVQQS/Rw4TUfaqxv2xz0hLNTrClmBSDWzs5yGR9abLc/PR1qxI6WEdZauPxCj4uvnPdvt+mLWFzlQ0khiLIkcmeeC4/HM7e13PDxOeCsgLmzWbxnHM9Pc0dQ7xtGyXlUYU2QTcxJ03UDVGFwITIOj3awoRYUpax4fXyirDUjJ7GZkIVCypChWuBjptqZqGwAAIABJREFUxw4fHb5InE8LQRvqKmFHTz8HlAi4pee5O3J/12K1Yn86kpJnHjva3YbDfORuveF5GlnXKxbrGZLj6Gfc8kw/TKxbw1q1LN5BYVDVhEsSaRTj0iN1gXVnzoui3VYsfkACw+AJxQlUVnIeHj9RNDVNseI8H1FFRRcEE45lWXAuIZuK0U50zlJqyewdaThidIERLevdLUVV8Yc//+9YD3aRxCVCrCmKFYEZZdboquRwmvGpIAXJMI7M4TNleYu8eB1dbFCyRCBZ1Q1TP2OaLaVeY5dHytpgrWCxlm5IKN3hY8Q5SI1nmQQpWVIMROf59OkXfrj/jjAKnpfP1FXBuvmOxS/088xsC6x1zNWEjzN26KnaOwg5TXq929B1C94dKArB6TB9+9wVuqEfXyhMoCprJnvEpYr15grne5pV4tR9YpgPSKlZ5hpT1KQUmOczTb1DCMM4zazqG5Yl4X1uGSuLrDi5ZaKqFKSC2rTMY0ArDUnj/IJEE52nNRVNLXnuX5BFJDJTtQWyu0W4gugbRFrDX9ViCpGY5pE//flPdF0HZC6fj+ECnH7FDCXihVyhZOYlh+gRSZEuKypE5l/GlLmZQqaLIqmIntwDfuGLxpjVAyE9IapvxQFZaY0omTmazpEV2lwrlZ9XZ/XUeX5daYfshywKcVFoZR6l0muiPeUQmKkZ+l9T7tnnGlBFDihEry9DbFYqlISq0iiZ13c5Qf+ralqY/P+M4TWMlitYY0pUpcptf7MiCQnqwgRIAXkJkaWY/+zFaHAJ74FWiWmc8e6SsiZ+sxsYk+typyl/g78iuGL0CAJlqdHKsEQBOqueImV0VQoLMUpivGiaOc4Pl+ctCpnfD5GH9ERe+wsiTZ0B639d45rSJcymc9XtPAmsy4cJLo2FyafcRS8yEivGHDz55jlO2VdrbSImBUiiDwSRO9cLlTvll57LwRw22y3j9JWvX07c3taUsqA4SYqy4XTukd7z809vGcuGwQ7MywGj3n773HrvacwKaTTTHJiThVISneWqXrE4T1GUDMLinKMoFH03svgzbjHcbH4gMTBPM1N/xEjJkhL9WBCXhNGSUgee9v+MXUZqMaNXBcVqTVomRCNJJWzaN/TDE+Pkqbc3vLvd8XV/Zk6elakR1chpfOa63qBrA6lmshPCbFBqi3VHRjtwmC2FFog+kGTPy8sTP/7wji8P/53H44wdHTJZisoRF8XYWd6+q3j68siqvMbGwMPDX3jX/J7+9J6pg5vrN0jVkSjpJ4euDVEIqrIleEsSOYgXUuDh+EylbxAqsvgBa0rmuFBpzfPnD/z4t/+F7Vbw+IdfUOsbNq2h9vD5+EI3dahZI2pF140UUmNuDB/efyWEW3Y/tBgZmUY4nTv+59/9Dc+fPrKdS1xc8/7DnmkWbJobSis5Hv9CeBIsouDupxtSrLj78Z7leab2uelPS8fzYeH+u1vCPJAIMHfY4cjUfUZqj7QCN3SMh461cWzLhTfVCr8qmfo94fyAsoJoLI1PbLYKbyPf3f3A7e2OIeQArp1m6qZiWnqWJVCUBqkUZbVlmUZiSMjc4AJCoIoifwc7ly0/l++xEB1VY7CLo6pKlM6NdJWpM2HkP8jj/0ZRrSnb3Jc9TCNSVoxjz5IWFl8zzgvb7RWn/oCpIh8eHiBp2tWa4ekJGxaGaaGpC5KC0c10T1+IAUypcttNcPgoCFEQUbTthm5/ZCk9s4tsVzuG8QtpVPTjSKEnhhG6uWe1WqF1xefPn0lYumFis604HD5iihFd7WhXJVF6hm4huA4pBP/57/8r/+2//a8MfZ9Zl0XN8XRCqxJnI227Iwafe9jtMykq6qoGuRBdiZQapQPbzS3TfEaknMo72B5TbKmbW6Z5T4i5LWWOFucXkInJjzgHzWbNMp4pm4phWYhS0k0HiqJAyZbZy3yK7OH3f/cTf/y3DyR1x8PpiI2OsAQSDZvNDT4aXk5P7K5WyFLRWcsSH0nFQBIwu+7SpBE5a8HD1xNce1QqSUbig8P5ke1uxyISz8OZJQxoIymagpfTC154FBEbEk0tsMIynnJT1KpoqdqSqAPWBjQaxEIprilExSAEk4dlHqnbFdOUOO4/UaxX+JT9dTYIanOFX3rccsQbSV1Gnp/2SCVAWvq5o2DNPGt2VwXHYaCkIYhAFILH5yN3Nzek6GmrVR7654HgM//TOwVa45JlXiJB2stqN1HohqeHjrJRuCWwXd3ilommbYkpMgweIT2FCihZst3eUZZ33L/9iWn5gnUj+2FinA6gJsqiQcuW6B1Vm2t9V6sd527Ghz19l4hBcX9/xbIcuNrugBcATG1ZF5LFJ/qlA+EooqSuNiy2Jy4JoQpiXDKonwXrn6lbibNLhrgjSaHA2VyvG4Nks6mJaWSZAiIFTNXQdWe0LHHxjBINUjcczwfWTUsQmtFPnF5Gzv2ZtjUUtKSnNfVTS3IFMbVIUeSF7gWan2LiZf9A1x2++UNfm4JSunRxvyp5OgEj/TAxjQahWmQUpBCRwoLwOKcv66hLYj5lj6uzC84VJKFzgCoIBDL30cdIiFXGM8mcAi+0pDQF3ZDLHeTFK0lKSBGYlxHnypx4TzEPqjHkMoOQcF5n/NRlmMrqhcj+1AsPUYjsTxUE6krnppikLyGrdCEGSIpsnc3r/dfu+stwrlQixdz89Rr8IYqsJMts69AqFxHkboGsikuZm5xsXmfka08CYsIUkrYpGAZPTK/Kb/a9Spn7x6WMEF8RUfGvvKQOKRLEXNf46wgsIHmQHucvnFORLlGrPCAHPzHPEecvNoxLzW0e6D0+TAyDx3n1muvKYSguobHoGMaICwUp+AvMPavxVWkgwTT57GO+JMRSikgRc6VkcN/CWzGErD4lRaXAh4CPRbbLyEBZtzRNw7nredlbfvvbGwZ7JDyBLzyrOmHnDpUKVs2KGC3D1H+7XSo9Y6odvfXM7gQpIm2BJXEQFqlbwixZhgmCJ4QZJRVGbfByZFp60jyjdW4RQpd4PzO6A26GYBM3QXF++kShYVU3HE8Dk32gakqm5cjYA6lAoNFGIXTFabS8DJ66MgQpabY7TscD/uQxStMNPT4lypCw9sASHVJZmkoQpUKFwN39O6LQvLyc0WXBcT5S6y0qLfg5/y5+ffzCavMb2vYaFRNagg89U/eMkeDUzP75I81dSdefGGeJ3NTs6h1Nq1nGBY2kTIIqBZzU4CaeXgamJRLwOH/ieQzsdldMh0dadYNNM6k788Pv/jM27uHze/7w50+8uf09b24D//B//CNv3rxjd1dRCMXioXML1g0UEaSfGfbPDG5CKo+SDburNRsj2a0T3TJTqoLH4ZGhqIiHFYV2DFawvbpB9Qe6fsGYhDI1w/jAFBPDPFBWFV8ODsnIti3ZrDa8//QRbM3f/qcddj7xj//yb9Tf/cT1riaFHq0Mox14d3VHEieO4yPazLRmRoRAXBIh9AxTx2Kg0C0IiSoUShk82S6ZkgGRmxdRItexXgpLRAKpDO06b3uRGh/B+cxPnd1MdP8/8ageu466VYzzyHbbcDweOE8DyJlDr4gaZA1pVoii5XT6ipACJ2CeLLd3tyghkFrz9HigXVXM/sDURW7lG2Yx0o1nEpHQS4KaOfZHhEn0yx6hdljv6OYzVtWczz1//3c/8PHjJ7QRlHVLP31mXixlk1itDW3bcjh+ZNXesqre0mxK3n99z+I7alOihWF/ONFuK1SRKExDWVc8PX+gpOK7739msT1FqZmnmaZuUKpFF55pkTTNmhgVdgm0q2tCKqmrhAsT4+i4/qkEuVAYQ4x5jRaixwWPlhV9l1ivGspCcfaW49Bh9BptmgzmlYKu7yiqisN4YLu74jTukSpSmoovD1/RxrOuSnobqJWiqFpECV1/oCjg5XSkkkX2o2iIBmZrkbLk+dQRk+dkAzHuaVWNiJIZi5sPWFFkpE5yLIeB7eY2n9CUp60AkZitpGwtzGs8kllENpsGXbZ0X37BNAJZVCRJbgqKgmkZmIOlTDukTtRNgWdm3a459gPn2bOqS9w0UJYeGWdevj7y8PKZn7//nk17xal3zKNFixXPx5nzqWfVNrjgMbVEhISPgd3ulhBq5hhp2xX9qaeqa5J0nMYDKQrq7ZYUNSTHMCeEWRi9J9qSutFcbe4R0iJ04NxPDNMTxkBZbrFu4u7+HpHgtHzMHdEiMYcXhLFcX13hbcLaMyKVtGZDWd+BjLTbSH+eWBZJXW8hGgQld7c3vA6qSYysNzXLfiYx4GzP0PfE9oa6ukOT05pXu7fEqIlxoWryoChVYHFPxFAgkrzcqPNgvtteE0LFPPZUtSYSGcaREM5UlaJdXVOVVwzDhA2JJAPLMuGtI1y2Hm5ZqJ9byqXEsSLQ5uGAdGkzERyPR46nJ0JyWQ2NWRmNGYx62fDmNLqSWYWdRo+1CmU8IiQIAlMnTFnkVq9LHSZREqMn4hDSkNCEGNDyEsBJHmMMPmhCBKEiJEm6oKliVKSYB6ok4iX5npAyr74uwl4O5KQcLFjshFaKGBqSeG16it8YpcFDesXAXJTdQkmKQuFd/tmrzzQrmP6iJudw1mVyz8rupRlHYDLaSXqEVJfXzqFVuAyfl4GZyzWQQftakwsEIiBzLWN+eJROSKG/JfZfY1jZn5pLFV65s1FcXoiY0Dq33SyT/Ia7Sa8BtORQOvtpM0g9XRTl7Bf2qcdZSYzrvx5vSfFy2SIxTQFrBcKoC30hFxfUTYHSBcM44KMGH5BZBEcSKQqDdZ7FCYKMmXxwkYiVyg1CQxcvHlvxV7xIjxAQUsHsxOW1jSAk6/WWaRx5fDxze9sgRUXxKaJ+COhmTUKhtEAHTdVsePbP3+6XRmiO3RPWSRAzrhP4pKm2a0bncXOHEgvJOZQsSTIy9gum2FG3KdMaYsLPjrpdcx4ccbKE6Kn1HWotid6z2t4g44jRFSs9413HqlgxjILIwvn8BVJDszb4JWLDC49PX/jtT79Dp4YoHPNyYJ4DWpVMU0+1vkZJgylbfBhwS0AbxfXNmlIqxj5hlWNZOpRpub/eEc89daF5Pp1QxpCE5Xg6I1Piu7fXaLNhOA18fvqFu+0b2usVnz494+c1ovTIWNDNA8HPpJXhPDi+e3tLsJYf3vyGh9NH3n96oCnW7HZXfHj8iNSKsqgwpWe2B9QZTFOjREVAQbHl3Xd3fP78zME/Uj5XBCTGJNJ5YmUMTow8Po60dZWJOGVFWEbOU89pSujtHbHN9+1P4wMhClrdcv3zD+DOfPjwB+pNg5bALn+WJmsp1gkXTjx+Xdis3hEiDCycl8S2LolR8PV4xAtNtUt8/fyIMpGDnRB+YDzMuCKi08RwOmPEDskeYyrG8YBeBDIlpNgTrGOcB7SHbnrh5s09ppmRIRDtwO5OkULNpyeb6+21zq1yFPiYueEpRVSAZV6o65ppypiqoshtW+I/zub/3x9UD6cX+lGy2Wzpx7x+qlc7uu6ZzW7F58dfmEJHWW2yebdqKAqBKVr6s8faCSEU45QxI92wsNvVOXIgYRhzNy0JijKw+JnBPlGU6yyTC0+UuYfcx5Ah19HQtGvCnAgRXHT8+PPv6Yc93vcEX9OfK9pGI5Ql+mvm3qPMglAVQnn+/PF/4+rmDXYqCSz44Nmu3+GiQ5mBqZ9w4YjRW9r2mkTJ8bQHNOMQMYUkkJVKU+YP6NB5bm/uEDj67oxUJWVVczy9sFoX1PUVbpaoemHV7Dgcv7Ld3rN/+YRsVii9wnrJECyLPbOubvINQOz545/fI2PDu/trIjMuzCShCeLEYg3O51VX158p64JlXmi3WxBgx4GyaVnGmaWICAzGNDRmzTB2zPNCCC1OCkpNDs7ERKFWhMLl+jVtqNuasdvTrHacTntcSuyqN5ynESt7AorKVTgfOI8dqlR0PtJZyzidWdzMNM5oecLoyNXVdwxhZr264dxDXS2YsmKacmJ0a95Q6A2blUekHafziKAmxsDxPLE/jdy/2RAj2FlwdX3D2D/ysn/k7//mf+IP//YJU26oV4aIxRQDq63h0E/oYsXN7g2FrunHE9JYkkg0dcu7t9/j3ERZGXRRczx/JqYzV1cbunPPqXtht91S1xJvZw6HZ8rqClQEpQnJMrqeZZ5RYkYSCUnjQ2QYDiAdVdkik8IU+aYvVKRe/+qvc8uUfb/es6oqTlOP70EYjTYRjc7K9UpnmoGHZZoIvqCsCqSa6U4TKeZ09+l8xJjMqiQ2VOWMUBPWQdvWDH3HZv0GKSvO/QuqgBAt2tSYokFEjZQFdbPB9IqtuaaIV8TUIDHElFuClArYeaE7P+L8xGId3oXLev1XHe5XLFRAFzlxaheHC5HkPbog47SUykzOlMHZSb5moiJNU6JVSRwuCKQkMiZJOF4TsCnlOsUU8mClVcY3zbMALkPRBdAvZcytSK9oo9egkEik6AmovJZ8bTaKkUKH7Pey+bvoNZz0ylCNPlwwWJc3NuV1m5TZe7pYefFx5yS9jwGlEnVVXKpTExAu6X+JVg6lM2cyxF9xMnltngNNpZGcX9f+l8Et2wDcZWi9NDQpvg2UpFz1GqL+Rhb49fph1VbUpWQcJElkO9TrzyFeKid/tX7EeFGHU6AwYF3Eh1fyAJdDQ6LQitJohiHgI6h4eV4SiEhblxRKYy058f9a3kCi0lBXBYt1uCAyPSDFi/2CvLJOgXnO1y8vfNXsyc38XiUK5sUjyEO60pLVes3xeOB47Pj0yXB/XxFGSyXh8csXlustFAofM2C/Mb82ymlR05/3NO2KrlsQyqB1yeIySzOJmRQyecSTsK6nKFp0GUlKEkKirTSHbiQ6hUuWldmwqa4RUSEKRT+dicKji4KHxxNVVdCUBT7MFHWRvcVeYwdBdbPh+fBEU2vurlsqI0lx5OWlZ3t9T98HSqNIi2I8v3BSNc6vsKHHFBv6cWC9E5yXkb5fqFc1spUoq2nLW8p3NR/f/wkvDff394RPX3DLC+d5otnWuHFGqJbj+AGH4r69Yzaa/jzy45sb+qVHeIGIkdO5w4mCoz1Ryg2/uf+eh/4B2ZZ0PvLu5paVG/j+h+/58rBnWRJRJ9pVxB4FpoHn8RkXB07LhK40aUlY7Xnz7g0311umxwPWemJV4FMuZhiHA1QlsPDD/fcMx47npw+M3cwPP/5A0hBMovcHovRYu7BeKVQFRXKM9oyODqkjS0gUjUTLhrJtwAXwC3eVpJIrpuUF067Rpmb7VnL880Rbr7j7eYO3E0/DmdMYuN1dc3zqeDr8ke21QMk14/yCjFvubkuU9CixInDGdxOFqOkeP2KutqRwwA0L7HaQHG7WBOvQwjFPmTNtZFZfg8qBZJECbpqom5opjUxLR1Nd5SDtf5DHv+9RlZ6UDLPtOHUTm9UtpWkYZY3zAW+hrddELxB42tZcDPeO1bZgHBfihcXZti195ynNO+ax5zwNKNFQ1pplGTCFYLu+oe9P1CtFjAbne4paUFcNIY1stzv2h4Gm3REwXN+84V//9d8QNwIpIm29Zpokbb2jqRucT7zs/0DbFCCumaYzq7tb2nqNsxZByzRYhJ7Yrt9yOp/oxgeifUNhctBhGCzrTYMSW5S2BBKqcEyTZbPeEMXC48OJqmhp6xpnLcM4IvTMqnpLCM/5OdYLPpYgIy+HPbvdlpRKdCnwYuFmc8df3n9GysRmW1CbFSlMRH/GqIZ5USzWsV2VzH5ksrmqbRwiTdNh5A0DT8SkWWxHtzgSJbUsGYYDShmMzOrw4vdINaGNwfkJgcDohrpcczw+AIlz39G0LYeXgR9//C0hRLrhhFA2+xjHgmf7QFGUqELwy6f3rFc9ZeX5+OWRN/cbbD9SGg9qYH88Eaxi6p9ZrWGaQBWRvgtIIfFzQNYKZQxjr7DOc3Vh2/ogMlc11jS1QK3ALh6fPMEvlHXNpr3l+eUTiAnrZ7ZX1/glcj6/sPgDzilO+4W2uUbpmuimrPgtZ7ZtwXlINJXm7dWW52dBEoGX7oVxPrDbFpjinrZ2DPMz11fXVGrN2Z/xLrcWVVUNQXIeXkhxIEVNoSE5i8Ry7vYs/ojWgm1zjxLZLCl0IMaJr49/+fa5q2SFsyM6gvQlN+t7buqAVCZ30F+SzcFN+Diy+IkUG6qiYbFHpCwyNiVZCm1YlkBpDJUpCDOZnFCsCLEkyJECTVs3HPoX+vHMut0ilEcqR6k3DN4jdEGxGK76HaW7Isg1pjaIJHHutY1Lcdw/UtczNnjskltOvil3ZBUuV2oKSJKqlLS1ZOgkzkcyPzUflrRW+JB9wiRJSDlIpeWrWulJ4TIcXZL90uTVcfCvyfx4UTsjSr2yODMkX3DxWJLX+zGKC5T+IlrGSMKSwf+adFlny8uFSBXxfmGeU1bnL3JfjCnjiVCEkAgp5lAfkJLAmMwoHMZIvNQnRhIpBuqqpC4F0+yJCVTKAQkfAloGBIlp9oRYonS2EmQlNqFUgCTxr6vzi00ipkihZW7P8jGrwn9VR2okVAZsgJjjddkSESGJkKHgLq8QlU4Qs10gxADRIwDn5GWln/2t8bJ+L0uDd5lvyyV9nEQkkQ/8SsE0BZKQxChRMpskCqloqwK3WBYXvynyMeYQlqk1dWOYpgkfEibGi2qdDybVuqAsC2ZrL6979hALIZFSUBWCZbbEoCkKlRnbMiFlQVk12NOR47Hn+lojsJSjwDPw8DKiyyp7nadsD3t9mGZFOVQUKVGpNdv7a6ZxBpmo24oQS+Y5K/vruuR0XNBSs96WHI4943gi2AqUIcaZ+7sb7LQQbIE2nmnpstf/dMRZz/PTifvbW7pzQIgO0cL1bkVwAWct+xdLDJbZCbbtDY9PD7gwU9UblL5luxXY5xd0kdjUBbM9M08L03xitV4xTjPHfYFdPLvdFTEm+n7Gh4CLX9juMod8fXNDs77hautJYiDoTImZ+4GiEbTbBj85nvcjylT4DjA5xFjXKxqpKI1C6AKpAlNY+O//+o+sy4qySchig6xbfvjN70lx4Tzu8UPk9oc7rBc8Ho7Ew4GyOWG0pRstp7mjYmEsNhTAy/MZwkKIAe8UHs/jpxPrq5L56Nj+dAOxQKgGo8/MOjANlp9//p5/+vzPBCwxDYxDpF2vUVIwhYlGVQQpSGXE6UB7f8VK1QTXIpZAZQyhW9jvH+lZ+P32io8vn5H9GrODIQ105z1iLnGlQJuWh2FEGqhLOA0983lCFjbPvapAh4rKTPTTyG63YrNu+PzpK1MnaKqZNATWN9eEcKKVG0ScWEsJ0VLqBhsm2vKWpAPYhN7ODOfEu6sd+3Pg+dRz2Dvadvf/aLj8f/Px7xJdretomy3n44Szke7kcEtuTPE2MycLXTKOPbpcsh80GBY7sd1prnbfs1nd88P3vyeFJndsB4EpWzbrazbbNVVds/iRvh+pygapFNN8unxplTg30rQNWhmkEggp+PT5A/M0UegCU5T86U//g0Jr3tz9jA8dQh9ZZss0WVKC7foOLZt80pwVwzDQDydmO1KVW4IXuHCGFIiuoFBrjLxBiJLjwVKaAqNz41Vgjyk1ShZIUdEPM82qyUUCxwPdcMIYQ4yaru/YbtdAYHEj8zKw2VY4P2HdSHd+pCo1kzsSUqQsE0oIlGqQVYWPYwaMhxqS4Lif0HJHtBUiFShW6KJAVxKfoFmvc6hABpSaOHYvLOnAEgcmuzDbnq7vIWlseAGhqcsfmJY5p1CDZxiOTH1HVWaIeTd07Pc9/TCgSs/jy2dUUSMLSTe/4NJAobd89+57wBPFiPMz52PEhcAwdXRDz3qz5v7dD6xWVxSFoagSq9YgVaBdFRRlwdeHD2gteHP3I8FNLO6M1pqiVKRUYMqG69srmnVDUQtQjrItaNYFSxhp2g1Swv78ibJqKBtDVSdurq6xvmSJhkJtuG7vcHPH0/6JUhWIuaAtK27etHx+eI9gYR57/NLjZwtRYmdYra54c/cOJRuEUFTNjqa9IwlHjDk5vVoZ7DxBEFSmzr9TacC5IyIplKhY7BkhIs5O+HnGnkfC+GuYqjKKShpKo1DSs2rW1HVNu65p2joPiJrcYBITRVFjTIF1M9MQsHNBoWqUTLy9u+P+9p7WrGn1Bhkj48mj/JbkYwbEQ/69ILDb7kiQodZJ5gpbrdixZfOyIp0LpFxTVi2mLFCFRmtFaQqmYWCcH1HljF0i3ouLcf8bBymnwV9T8SJxuy24biLRWbzPIQrnLCnlISD4VxTTayVqRBaRkGasmy9+UcFrm1FZFshLmcPrYJnI9Z6mLNC64jJqfvu5EBf0lss+0DzmpQuf0OVGoyReGwAgXWgJMWBtbse5RL/y370MaS4E3KWO1YfX0FdECUVZrBCpuNAIQCCRUmXmrTY4l3JATSpiBO8XpCLXZFouz/V63QDh4tt9tS+ki6c3s0yrQhO9wNqUPa3fEFsgZEbz53+THHa6vGPROybbM9uQB9HoCSHbIhKBJOYM2g+ZopAtAdnbSwooyTfGbD6sZDWbFBA4nLeESymDIF3wW7k3vW1rptnj3GtxQFbuQ3AYo9GFYp4DMcQL2UCSYlZfk4iMk8cu2UMcI0Bu5TFFgS4k82y/eY4Rr1YPQVUX3NxtkaJicRota4ZfEk4bktas64o6KMTQ0Z8O3z63U9fTlIZjP9Lu3mBMiUsWnyynk6U7gyk1UXQsS4+3kbLRKLFmGi3LMpNiQVM1mKJgmixLEvTuxKlfOJwOPD4+MvYz3aFn26wwKTOyp6nHzg5/ijx9fWFZJvqxJ6ZE9OC95bB/4PHhK935yIf3HyhUhZsts114s73D+wHV5PfvdN5TVRWr5hqjaq4330FUaF1hKkEQgsN5oL2+QhSKQ2/ZvFsjyjV3Nz/Tjz2iVljbkdQ937/7HT6BkAuVFuz7gWpVoU1gWE6sNteUxYpte8WqMbxcdxbPAAAgAElEQVT0HwlK8/bqDW9vvqde1cQk+MuHZ5RJVGUgTob3v3zGBkvwZ07HJ7QsCJNFCYmWE+MZhu7Ax49/yHxhJZh9z7HfY4MFqVitr3j4+sKf/vgXDs8DfedxS+Tr1z2H/ZFhOPH0MLBZv8WFxOeHZx4OTwzW8nx65sPnB2ZVEoWiadd0roc4c7VqKHRiTo4gZ26/byn0AKJjGRdkWaPCmBukmsTbt+/YXgl0U9JcV1TrBik1zaagblq2W8NxP/C4P9DNC0WZmz//9P4jqYi0JHZVhUsz/eGICpbl9AWVZmRw3G4UTTVQr2eev/4z/unPdF//FWFfGI+fOT9/Yj48USyOtZmR4Vdby//Xj39XUS0KxeJOXF3d0HUjKUUWO9F3Ez/9+JayKuiHR5RObNYVSmyYxgdWG4OdNphC8ubNW+ycII0ICT7ONKsNfZ/rRp3VGX7cXtN3C4Vq6IYnMJvcrFMmQsin4X54pip3mCoRRc+HD3+hrg0h5JP1l6+fMUWFKRSHl553P9wTQ75pb7eSaZpoyhv2x6+EJOn7B97c/UCZNgz9QN/3vL35G8piRWSiMIahf+b9+69stmVOyBmPErn9YRx7UohcXd1wPo8sg2O7rVivG6annsV/QegWKQ3eQ9Ua5lFTlRX74wtalMy9p91IHh7+ANLhgcMhMbuP+FkgXcs4HIl46uqO1WrNOCSkmDFmjVAly/LMOH6kbXYoUbOqPCo64vzIlBbqdss8D5yniF08UUqqRnN6HqkqT4wD+2dHoQxte8eXzx95U2tOxwdWq5bT6Uy7NqzWV0QWlAGtC67Mz5mNOSmub65QsmSYP/DmzR3LpNhdr+i7CaNqbm6ukcKw25Y8PT2x295jJ4cpJLW5Q1+XgKWttsQ0UumKsFjKYo3UME9nyjLmIRDJurmlqRKkClNqTvsHtEzUZU2wI4NLGNNSFQoZVywUmMozdjPFKoILWDtQXq9hbqkEnE7n3NyyMtSNYhgtSgjGfsaYksU9ooqETC2d+4qSK6pqiykSh+NDHrqba1JZ5354uUU2Ez4MbHYVJINWNX33zDxDVV6hdUb7KNUCuUZ1tM/YyVOUK4QS9FNPdIHSTExLVjllpRGioCk3mEpz6j4TfKIpd/gwUpmG3eqWqmrzUOIFdplza0ktoZippGQZElXVcB4ObDe3bDctz/uvLIukqW7yzfysqPsKOReU5o663pGUJIaEw2FqQ3foWMYXTDUzu5nT2RMC39qR+Dasvjoqc7a+LCXWW/phzIULUeCdQmqRV+5B4MNl+LvYBXxcsDbgfXlRL1+Hy4CQEe883hd5sPpGPMpd1wl1UUpfCfuvdaUS7y7YrJS+KZFVqVBKMM18C0tlxdCBsDnc9RriIX1rfNI65RCgUN8CZcSQA0FIUhQ51Q+XBLzIHd5YfNCQilzZ+s3bKi/reYcPFy7rhY2YUm7cEgLmSeRSCZUn2JRAiUShJSFAutTGiov6yGXIDSHgnP5mT8heT0FRFNRVfaEr6MtQHXL4NUYKFUlJ/Wp9ePWnhogQDojZmsQrS+vCWE0BIQMxFaRUfCtRuCBUUeKS2vcZWi4S2T4SI1E4hAx4D8MYsk87BKTQ+TnwRCKHYyTR5LcgkANp0aOKRGk005jvLdkTnBVhHxw3N9e8fXPLPGYbilQOK3uU3rCqS+x4oJY5eDP2v2Llnr98gqZl9iXDsuCV5HR6pmkNh8PCNEj+5u9+RtDQdZYYYQme0I0gElfX33N9c8vnL38iIXned7x9+z1VPXPcnzG6YR5HNs0VOk2IFChLQ7nSuFFTNjuevvyJT6dH7q9+y/2uwfv8+i/LGYWioGYZFwILf3r/TxRFjYoN52mg649sdt+xu/6Obnimrg3d+cTLyyNKQRIOkRRSyUzNYEYFiZRwHj/gXabQCH0iyYm2vWGxJ/pJ09zcUvoTVVEgVUG3DFg/0pqaGBOncU9MASE3xFSy3dxQlAVLXxD8DE6gy3wQVX5DUD3PLy+c/Ym6WqEWgWkVzs6sTT44p5OgWa8xtWLW2VN8eHpGGk0hDJQL43DmensFWqJUZOgH1iofIJtG8ec//AOrquXtzR0hDMzdBDozqEVURDejCfhlRowCb3rO5zNSR+bJs16X6EJTN5pVU9K9HLm7uiXMFW/fvuXP//Q+U1mSZacCSknuvqt4/28nbq9btrcN3mnm+USrDd1hQMsWOLNu1/z5X76wvr1itdpyWxr6ZWBJinkeEayI7sxsDWpZuFvdcD69sN3eMqqJtm7x7kSyJXbu+eMf36PVwv/J3Js0Sbal61nP6na/vYsuM09Tp6p064IQ15gwweDH8gOYwAzMGDCWwZWQBFxV1alzTjaR0Xi7+9UxWJ4nryZlGMiQ3CxHnp6xY2d4+Lfe732fd92siEHw/vXH/0/D5b/Px18dVOsmx06Rer2m7wbu79dMk2e93mLyyOu+Y73ZMU3gl5xhmrB2YZwsmVwT4wQSfvnwiRA8zapE6pTePV+OFAGMfCB4yTRGYjR4FzG6YFks3TJz+5Dz8jJiihmtYVkMZb3CuRE3ZozjK9vtltn2vOx/4f72G+5vf8e+OGJkhsgCl+OIziJN01LXBZfeIJzGKMU0JvB8UTQU2w3LpFg1JT5C3/doPXHYh3TqYUHFhuenPU29IniDpmK4pITcw8136VrczG6j6PsT1vXEUOJ8h9Y5QxdYbQtczDicnzACzntJ224wpaEfToRZIUOHlC0ey+JOTJPldvtN8t3GEe/OmCg47D2Tf4YomYcSt8xUpebpaWA+STa3KyIwnDs0ltuHHd1oef40M3aR6puMGCTdZeDdmwytS5pqj5sNCs/33/2W/gIuLBil2W49/eVCWa4Qfs397Y798TPn8ytllZHHNW1T03U9Nzc7Ynimre5x/pnZz+RqTbsyxGjpu5nFnxGURN1xd9dy2luUsggx4WeHMbd4N9LmJdtdxeFwwVrJbneDDC55+sqa8TyhRM8yDQRXo7MLTk7gAxrIEax0TbUqIMyURQFdR9cvrMscpoHl0NHcZvTTM7PrCC6yWa0Y5w5tLArBYh2TfcGHmcoUFKZECY+whjyL4BXb5jfIbOZ83oNYmCZHloPJFrxXVMWaGAKL9cx+AqOJUn1940nN5Bf8nFLxIc4Y3TC7gSgD/TRTNRukjyx2INNrSr1CZj1KTFixUBUNSuZcLgNVYzh2Hct4oirXvPn2jnEeWOYFZWqQLUWZ+uKLrGa3esPh3LNMM/KysD025KFG0GL0CiEzpJSIEJBG0Q89p9Mn8mzC5CloZJ3/R1zLLxNRGg7jdZiRQjI4+LB3nIbryt6nFpWyMNRVwaVPVatKyKt6GDFGoE2GnQxfoPeJLPDF/5n8fjHGa2VnINcJBzXN/qviSBqCciWvQ3BSPFNNauJtFplGiYB38jqERvwVyp8CWCr5Y69+znQN6RpDiL8O6wLwziJI9YfWzcxLRMhr21ZM3s1lGel6iQ/llWeYVulKgVIphGWtxl/ReUJ98dQmu8S8CEJMYxdCXgkEC84FHDmR61D4j3JtdWkoy5xz/8WakYZJHz2ZltRFzWlO6XmpQAR5DSa5VEdrr0E+HYhcm7sIhDAxTIF50cm3e/WmfuG3Ki3wLgH9xVXZDFHgfSo2GIaBfgpXqkGyMUSSF9mHiXPXMcweH1XCmsnr34mOPNc4F4kiHTRCCOn/LTrIUrp5muKvvlpIHlulJe/efsdmvcavk3q8LJ66Mry9vaHPF/67//G/5T/7pxVv7wVy+Lr6rzYrFiG4axq64x4Va3LdsIwCDZSlRQroe1BC4BkYOkEmR3AKJWsW2zH0R7RqyXPD2J2oqpyq6BFCkOWSsskwpWfoL0yuw46OOl8j/YXefaKoFc2N5vXlFzK9RRkFQpOXqV66G3qC73EjiFaiPbz/vEeLDBEEU5jxbqY/d4yjpcgNw7jHZAUBz+n0ilQqDbPFitX6G+53LefznqhmhqEjLwtsCGRVydoIToc9hbQUssWpmptCpsPtfKHvAyjF3V1DsCPOKrIqY3aCTGU8Hz6hqluW0wu59AwXl8oajKTSd/gA8xzQJkepkna9IraGw/iZyIIkIzcNPmjO3YWmuSPLNShPkUvOxz1No8iNYI4jWkG1LiiKkjGUlGXJpql5fH6l0Ird3fZa/qG4u7ljiB0+KgptmOcjy+CQ9xV+/kTXWYyoWN8/0F0uIFsq3fA6j1z2r1e2r8A7y/tPn9hVBcvznsU6xmCpNwV2ipT5lkoFzKvg7uGWT5//xOWnRx5fZqrdiv1+JmQn/vL5I3pzSze/8OlV8ObtjsxESiN5efqRZbKc5sj67luimGlmj1Qt9c2ZD38e0USm+cAvjxdm+R+PovpXV/9aKqrsjvNxSC0O80LfzSglmOaeS/9MlmnmKTD06ReLUjnjYBGqw9oT79//hJAOqaAsNfPoOO47imxNsBlT51hmy+Pje0QUOKsRsaSqcoRynI6pmrNpWopiTYw1Q1dgbU4UqVVoXiKr5oGmTi1G1jlMntZXeVbx/PqeaU6rjNNxSi0cSvGHf/J3aF1x6c6YXJPnBSa3uDBwOD4zTSeMkuw2G3CGqmgx3JHphu58ZrNqISqcW8gLR2Ti9eUAPgGOpSgJHrr+mct5z/H4hMpSgMkthqpYs72p6XqLnQsq3WBciZsH7NwxzyemccAYRWYK9q8nuv6Eix1KG07dLzieKYs1/anAuYUsk4Cibba8uf0t6/oNOq6QtqQpKprSsWkkw6EjKzKslxjdcP9ww+3NFkGgqQ0K2KxXTKNjnkem5RlrX8hVyc36nnnsqIoFvKWqIk1d4/wFouV0PFNXK4Z+wPuRsAQ0Dd4qTudn2nXJNE44FyhyjXWvLOOAHWei87TlDWWmkEIhREJChUXSFG+5v3mLUpJMm6T4zzPH15Fv3/ynvL3/A5KKPK9QVCx2ROcFKgPBQhYFisAwzXhZ0Jhbwiw5Dwes0Ny/vUNLgR079q+fiL6nqSqqrMYtIypqhC8xJilwZa7IhCLXglWVFMymKLD2jFEQSL+059FByPBWcdj3QEmWlSz2wmwnoswJ/+jMKGNDWb6hyhtW9Yqb9T3rZsPSC9bVHZt6zXSeCMtMaTKwjlwa2qJm27Rs6jVGRmx/phCaUq7JVcU8p2atS98zWUuQnhDhcDxgrcW6GbtY5tGmUN1+pH5WiDHibYnWNyidAZFwxZhEB8en90hxIiti+rDgBhEqpExqC+Kqo4rr0CbSYJeZtAr1JO/tFwXQWY9RyQdpl3CtVr36HBHkmb6qX198nwF/JQqoKwU/XNfAxEgMUOQ5Rkucu9YIxnhFP/nUSx8E1ibv5JdhSsRIphVCKJz3hODwwV8rYAVKJZXSX3mtQshfkU3LPDJPnhjUtVHKE0JihyqVkGyp+ek6UJOCUzEsV5xS8ryK67AvRUASsUtaT6fBPCXWg08tP0YbYlSplOCqWYfr8G6MuBJI/NUrkIZRraCpCrQqkg83fh1WY4xJEev2yTJAJOCTX5yIjB4t0we2uHaFR77UnILJUvlDiPpq3fgyFEa01lRVhkBfCQohralDRODJMpjtwKkbr/ABT4ohC6RQ5HmGdY5uSEFI51OzjvcpGKWVRpBQY1Lq9LtEpo+8okhM5XRQ+GJJSI/1es3NzR1EdbVIXO0vNhB/OvHnP/3Ef/8//M/8q3/zhM8rst3q19eeZk9W1BTasK7WZCJje/sNSq2o6jXv3r6hOx5wY0cmPJU25EahfKTNM+a+Y+w6hHPoEHm42TEML8zDESMjy9DzcLemKTzj+YUmNwiR4bxnYsT5kdv7b/j9t9+gi5mAwjEjpEAree2BJ4XnZOIyFziG8UKmItYKlGkQomfoOiSarMpQpsJHw3r1HWWVY4xjuBzxg6K/jHSvh8QHnaHrJOME4zRiMgsHz85s0SIynhzLYgnassxQlQ+EUBDjSKYyVnlLHB2F0MRw4fXpM4VS4KCqCpZ5JC8NRZaTqZrtquZvf/MH1nnJt29vaOs1N7s3CKXo+sBvfv8tXX8kulTPrHOTanT9QpXVfPPwPaXZIFh4efmAXzzr1YrtpiFGy+V8YgkZUud8ePyFVbvh/u4G4kSuSm53a6LxjIslDDO4kTY33LQSz0SgYLSWfujReYkIgeKupZ+fOM2PWH9mdCS/exRoVdIPJ54/vmLnI8fTif2po59n8uIWpyesKPj2b2/INy31mx1hDUErnOz4ePqEbtZII+mniG4tcTnx/umRT/sT8+Dpj2cupzMfH9/zy+MeuWqYvSczK/7pf/63bO4zHj8fQfTk+muZxX/ox19VVMfecLtr6PszRheJvyhgtao5HPbc3Ozo+o66Fmy2DfNUk2WWxQlMpgiuQMqMm11J34GIkvGyMIyvrNYSI1dYQnoT6UB36dCqYrV6QCpBDJ6hdzzcblCy43ycKIscpQpOl44QTqw33yAwaZ0XDM5mvH//CSFnqnLLYi1VVZLlKTHeTyfKosBZj10i3UlSVTXLMjKNPbvtPYP9hW6a2K0rCBXn85lvv/2O6AtitNRNwc9/WRjGZ/I843juU2uT7ZkHQVa8xbtICBYlW5zfk+cNQjqmecDZ9KH07uEHnvdPlBX0vacdAnGGu+0bTt2H5N9dLHm5oVqVLMtVpZOWw9FRlAXBgTYr6nJit2rJchiHBZNN1DeK/eOZvMxpNxtkEJz2Z1CSUm2oijVzP1FVOat1y+HQgdTU7bf04yvj1FPVtwh1ItiZLM8piwyCJbiWm80K6xzLuGDUA4hId/4ZO82otmYYRwpdge+o6h3dZSEzgfPe48YLRbGirNaM44h3kbl3VHnN1E/U5T2aBanm9CHvI8f9GWks223OPA0oAePc8en9j/SdoywrvC1AC9p2x/mikDHD2gt1vuFyOWG95fbbv+HPn97T5AqhFmQumG1AaMUySDIUVZUG3Hl8ZuzS2jjkE9hAVgu8EnTnA1qM3N1uuds1XLrkqS2yCWc9dZlT501auy8dpPGEsigQMg1MJldoJYhW/Pq+O+5fyaoKHxRaVVTlhrFfKHRBbe6QMjKePiBDoMo2qZiCtJG4X3/H4fRMkJYoHWPXMfWOqqm43dwgyJFWgZvwcqLISmQjmN0CrsG6hXnpic6zmxuUlSA3GHOD0qlNbHEOJQTLvHB8+owIR9raoPOCwEJ/6vEhoLW+ru0hiJh63q/fY7wOlQaVqmLdl6BTIFyboqz1TJNN6p1MYang3VU9VCkNHyMER4gCgSX4iPOaEH0KBYWkVGud0vvWJRU0XYS4DrIueXIRX1mePqCvyCq7CEKUV6VRXhU+f+1uyYjxi4p4rU6NDinABXEdvJISK4hImZqjRMwBdx0K0/ehVCTPDSEqUk3p1dIQ0r0qqwxrHT6k9ba/FhYQPFqRDs0+oqS8itgpZq80CCWxngQCVyEVEYiIVqAljKMlBIn4cmtI9oDcpIHff+GrxhS4SqcAh1DJ1y/EtRnqyroFqMuCvJCMk7raDBRCSsBiFCgVGLoJkNfXwhd81XpVYIxnnsN1qPdXp0bCZdV1sn0sNiQE2fUnS6CRJm0+liVP3NcY0CbZAlxwlEVBXla4MKV2OtSvBQY3N3fp4IJDyuthIVgI4OaZf/sv/jkvz4/8/f/6M//Vf/NfkJVfPaqVVPSniTFOlJliFoHlAsIYtptbqiJhxtqiYBovaQsmPG1mWJaeqDx5VbPa7pjGhaap+f5mzTj0xDliu55y9y1hWcCWFFnJ6TyQmx1BXnh+HtjeP8B8QsqGLBOcLz1CJOJIWbYJMCE1eZ4jlEAJRXlzx6byvL7MLG6mLVZUv8mYJ8146UHNTNMBeVizu9nR1At23FNlkagEzjpwC/5yYl2smDOJcIGq73n+Vz+y+oPB3pQYCg5PvyDbNaHvKetb2vyWb+92ONtQFCWnZY9YFFpO9J8PPIWFqrhFZRnTrKjud2xuOi7HHqFzPn3+hWVa8KFBVKCNRijJfHpliHd89+5borU8f34kbwV1VaVGtCpDiYJlGikKQ9dZMJr1ek1ZwePzI915xgaBqTIWMfLzpwNts0NryTKNnLqRJQyMY843u5qn50d+95vvWGUay8jxPKELxWU583Q+MvZnyGHpXrhMDq9+y+g92/UN/rBQ55L+YMlNxnC+UK16+vOe7iA5mvfUDbTbt/zy/mdskKzudlR3M9/8zZb+9MyyXnM8BYyJeHJu7gz201+4WMvlMPHD7p6z71FAf96zyXeQBbxb2K43aJFx2MObhx1ZI3g9T//Ppsj/Hx5/VVGNvmSaztzetWw3N+RZwf3DDusWqrrmdvsb5ikBxYu8oWla2rYEBNM0JGVEl+Rmw832ge68sN3UrFYFq3aLiIr1uqapNmxWNyx2oqgCbbvGLZL73Te8vfse4QVTN4OX1EXO0B9YNy1lscYtmqrYEmPOOAqC1yxLpCjWeBc5HXo2mzu6i2WYRtabgn7o8HHgp/d/z/H8E+/ePRB9Qd8f+NNPf0+7zUB6ZnemqApCFDy/nHk9vHA4vRKixeRw6frrCm/g6emFrpvp+5HPT48piHRa6PuRplxxu/0N2/U3zGOk689IZfj06YnLaWbqJ5Qs8EGjCkVmbvGuYFXvWDW3OAvLDEXRUhVrlPGUVcVwzJl7zfPTR6pCYGSLFitiyBPuhoCQFeMiicYwWI8qchZmbr/Zsd5UNLUiyyDLBC/7Z1AjWZ7Uh6pZUxTNNTBTMM7PTPOJee6J3nE6dMxLDwgOpye6c0ddtFRGY8JCaUq68wvenYluQXiDFgXLGBDIX1O7dg7IKKmLkiKP1LVCYNAyEO3CdPFEP/D0+Y+cDu/Jlccoj1ssMjqqUnA8/szL6z+g1EhmJEWW0+oVRYBK57hgUdojF4+7jGRZCcIhdCAvN0QWumGBrEaYkiZb0RbrpAaHjEbfor1AM9GfLqiYEyI4N/D0+Mj5uMfPE1M3UucNTd4SFkFTrHi4u2G7XlFow6opkEJS6DXarCjrNQRFd/4KDp/diPN7LuMzL8fPfHz6Bz6//oLJJefTmcPhld12i5Y558OJ7vLC6fRKf75gZ8c8Ouxs0dIT7MwwnkHAdrdlvb4h+gw7e4SDUmpu6pabakeZ5QzTE1EMrEJOHg3EFVo9oExFlBLrAz5G5sXz+OEvOPfCeltRlnVK3ceIXVJFnVIKJTVKfkHAf2mughg9WWYoshQIcNcQkw+OQMRkKcy1zC7x/6LHeQu4FDITX1FSSR2MqU9bSKy9xvb56jVFLPhr81S4Ko3+HwH2nXO4a/91uCLahAgs88IyRQSp5lQiic4hcRgpf1U3ua7eg4+UZcZ6tUJKk1BQCLRKw1yeZ0ghGIceuK7ZRVpnG62pqpKkH1xvFPJqk0jNRNFLnLtev7eJXRtSEQDoFHTiiwKdrkuIVFiw2PjrGj0F5hKbleiJ/ouH9GvYTQooTI63GufV9ToFIiRCQRCWAMn68IWS8AV3hUcKyzJZvEvs2fSl08GlyvJrO15IRY0iBZ0goqSgKmq0aIgxS+QGvoTGQEgPzCyTS17fkGgQPgScX5DKoowgBHW9/18UZIFUEESfEvAu3dt0DxOhoCwLxnG6hvDS61LBi6bbd1z+5Xt+WD/w008/8/f/2//BMB9/fd+2mxpVKGY8Ns4M3RHChXk8ctwfmGdLURbooiSqHItndungoVROXimWMHOz+5Zpmrh0JzabVQrgqQqtC37+8CP740S5KpidQ2rBqmlgkqw2N0zjgpSK6fIKLnC/e8uyWPKyodlUTLYDo6nbkqLU1HpHKQWni2e1rRnnl3QQEYbRDXTTRxY/0qxrpnDg8/6RYZ7Z3d5jSk+mDVWZkwnBXVsh5YV6VdOUK55/PGKaiv30ATcNRDvipxnbTXx4/xc+/vwnnLXkYoXwMHUT03hEyZEitDy0N6yaim1TcNl/ZDkOnB87VnmDiBaJZ+wutKUh0JObiF88Td3wdnNLf3qmzAxu7PHLTAgWayNNs0LpmXO3p6gMSiu2N/e06xobLvzv/+bf4EOBLjKKQrM//8w4Ljw9P2MdeFfh48w4OS7DhTALoimZo+HpOHLqPcJZmtagnSYrBK+HPd088/njgcWCWzyPz2eypqTZbZKdZ36hUJq7uzvubm95s9Vc+le0GmgayCpBUANPjxeyzPH49Jnvv/8eISU3uwe2D1v6qcMzolYL4/SK3tU0W4ULlkUH5izyGo+co0WXJVZceHnd8/z5PZ9+ep989rpkCYam+rot+A/9+KuKatPWDMPIqi0pMs1pWJDCMQ6WLDMYaSjygqEbObw+43xP1bbMgyQvJS52LJPCFRlFrQhhoetnIGPTvKPMZ+Z5oa0auuFEXRxpW8M47ZkXC6FGSwhuoao24C37/SeGYWa7eYcQBZBh7cLQW7RSXLoT292OSKBuai7nV47nJyAgxQ3LEhmmE23dUDcrghf0XQq8VJVmPi+8/+nMYiemsWcaA/tDT3geuLnLOB1HnPNE0TN0kv7lE0Xt+fTxhUwtaNUwzWeck9glUlcrMlPiFsHx6IlIVvU3zMNE1x9o1551VrCpt0gzM7kzuZ9ZrbdYZ6mqt/z5p39Ou2qYluu629dsb9ZMh4WicmjdsF29ATmB0CxuYFXXzIMlK0GoEhsniluJKSR9l1NtBN3lTLvL+PzpM9N8SoeJ0wdWbYu0I47AZTAYqTifTkjTsc2/I5OKz6efmM3E9uYtbfuGcfpAd3nl2zffIu06DYIxUpcV7x5+z+PjJ3KdIYUmKyPRGpq65Xw+UzY10QMyDTmFKZimI4QxdWQ7qMqMxS24qWfpGooqYt1IbWry24LzuSeImabJ0dIzDS/kRqPUwmw1x8uZzZs7KiU5XR7Z3X7D4geyWjEtDjcOoCM62zArzexGCIbj3lIXVWplyjOOFzDFmnGZ0VIxW4vznibfsNvsOBovT2kAACAASURBVHUX7Azb3YqwgWkY0a0iYpmmicw02FHRGEO1qjEGvJ2o2xxIw+qqrfDRI4sc5yWvl08YkbEVWzw9s5upqcCXeH9AiCPBa2LI2J8/EkTELgPOz9TrW5RbUMZQlAVjvxC8Y1U2SAkaRaYSBP/cv+LimXouqfY59lji/Q1SFTgfECEidapDPTx9hLinXpXorEYrsN0erRMRASaUFMQrvsenyem6dU4r3rJQGEVauUeRgO3eo6QkMzJVpNpwXXOn9LtRKZRkXVrvX1M4yYeaKTKl6XxMCqj8MuB4rF+YFonz+rpqv4aYRKQsDc4JrPPXQVoQYiAz2ZWmEPkSB0oKpyfPDHVRMtvEdpVc4/dXlTLGcEVkpdcIIVHKUJc5ReY5n0cQafX95b5kRlyLBfj69YS4elgXnNOpwSlIhPyasJfSEyOMw4R1JPlBhKt3FYySaYPk0rCJiNchNXlbnZ0ZZ00kQ37xkIY0MBZFRpwdcYHAl6HPAw6Fw9uIC1/bqr4cGmKc8H7EzjmgkyIqA4SkLBsDSugUHBMJORUVCJ/un5GCeQTn4pW6EJKPNaTnRLzWGofkMw0hgo9E3K8Uir0N+JD+vYQhE2S5ZrupeHx8wXmBQqdmnght26aSFhcxJtkgkjqd1O2ff/qJMMz8l2/+E/6Xw79mvVpR6a8d6of+glQ5UkuyXFHPE1mbM08X+t5TlJLFLphM01tHUWY0Kse4AqXh5fhKXpVopajLhuPlhf0lKcIlgrzdMZ7fI6XDqIzZntnctFz2L6zrlna743w+kWUtrx/+gXHJua2a5GePgrn36Kg4uldWtKyLlsufZrr5wlIHpCopk87E8TCQZRXv3j4wjQVVXTLMR8ax53J64Ydv/xaZa4wr+ebtO8I0IETLEg3eNGgTqL/5AZG3qKbj9eOJJs9QrsGYNeOUM59e0eIvNFJR1yV2Am0abm+/ZX94YXV7R1Uo5mWgyTJ+/92On18/8Oq23OyatKUQNyxuJrAQXcan50+smoLS5Dz89oH94wtE+P67O4SGECakdEQh+fzyM3e771E6Yu1ElVV8fP+Zrgus6opqJQlRME+aLM8wuubldY+PC4Vpadc5y9xw17a8HvY0q4rz0DHOI99U9+giIwuCbj5TiIjNDSuR4+cDm3XBfv8Tq/U93fkFbVpq06NFhhM5VVUS3ERZvCXTEoxgDha/XFjmSO9eeT2lYODr48zNeo2pS6qmZfaOxpRkWlMXGfv9SFZG5rjQxZFSV+QrwUWc2XcwTj1uLPj9777j85Pl4XaHFWeK/D8ePNVfHVTbpqI/TfTnhapUlFmFUYHcrBiGjg/nD7TbnHIXEPEjedHilUSZDC01PgwcT6+UzZruJSZvZ3/GhZF5GjFFGjCqckf/8pksk3TnESE9RbFhdp6uf0KqGWcN8ziRFxmrTcE0T0zTQFFUuOixfsCHE0quEn9QS1AZPnqG08DN3RYha+blgtAOt8DYiTQgBI2dI1q35MbSn2cmu7CMgf3nV/YvHe++W9FdVsmuMBuiNDgrmMcJY3LiUjHFwLs3b1nshWhXBD+Rq3sux08UxcDnj3tu71uK8obu/B6jQIuKqm7o+xN39QM6DATvGQbNLNMatm3eIfULOpPMcwpojMMZbRTeD4SouQx7lE4d0su8MAwDIRj6856Hd7+hyrf0x4Whd2RyTaFLXvuBpsoxSuHmiWBmClkjZsUqb5jshA6SMI1UzvFw9wf8UqB1oFYZOqvZtjf88vgJGy3rZsPl3Kch/3QhMyVFcY/3GZnRFGWNd55L94m61AzdiTLLicEyzTN9PxOsoK1nytww9BD1gjIBJCzRgjP054lxtmzqzRXYLVHGIAhkWc7QLTh7Yle/YXIC62aKzDCOgs26JS4zp5dPbFaarhs49Ae2zQ1jPLGMltkuLGJhg6RuW4q6oAsvtPqW8+DZbW4oS0+wkri2lFmFdjkSgwsduTScuxNlaRiD53ycGIfAdnuLdwo3Cw6HT3htCGNEiJmm/OoHasst+/5MnhWoxRHLFVXZoEIyD7TrG7QwBNkj9IxWDSZrcV7RLa+4RVHXFYMb8cOJvM7IEMRBYaLgZlvTn3u8WzBC4maJjQ7vZx6271h9uKc7ZfSTJqCxcUbbJfEmp8jlvCfaR5o2AhlaA8GhZWRektIQRQpnaXFVIr3D/zteVUluBCFapsUTiCiRalAzI8h0YoJ6l/yH0keiFxRGI5XELyQckYDExxTUhUmlFdEmVue1aEDiUUrgncD764r4qgBmMiHhpmshgbzC8r+EtorS0F2ZplIIAiCkIMsU8gv26MuK/so8jdHhHXh/basSX7baESUTnzREQxCgrmqhEILMJItADCb9IERPEJIIKJkKV8LVYyDil6+XQkxKwbw4XJCoL+Gt4NEiUpU6tXrFr21WV+ETo8B7zzCLq6c1/BpsAkuMMdUuRglRJrX2GhozManyMSr4dVBPz+VGUjcFZ69Y+muwTgTEtRVMao9zC94HlLwOstefj7IwmCzyejymAJvQKYglAhAojELFxFK+QrZ+pQIgBGWWgxMscyCoayHCdQA3SpLJEiUbvJ8IIXmuE+qwYJoWtL4eZqQECUpIzscTf/7zH6+4MMdqs+Gf/d3fYec//fq+lcGjs8Dr+EzULSKXHF9PSC0JYWLxPZ+eTjSrFWWWE5jw84QqarphZrjMSGkY4pmAZ3EL+9PED2/eYkj1mO36b9Aip6nXHM5JmVvvtkghGMYLkYW//HJC1xXddCY+Hbi7vScGB2Jh1VT0lx57mrm8anSZ0zbvEPWFU9dzs1njJghLB6KkLN9SrZM1SAtJ2zRkyuCYGadItlKMoiIw0S8BVWyv1b4V3/z2ls/HFKRWpqS+veXz5ZHbrOYPv/sbMpXeq0VrGO2Z42FByQ1vf/dfc/nz/8TPv3xkXRfoXOOs427XsFUbPr/0NIVh7hfqlebybFBKMg8XpkmyrUvGoeflAnnUDNPETV1wOLwSwgVrbzh/drTFjsXOaJEY5Z07E61m1axp2oYir/i8f+L188j2TUQYSdEqfvz0yC4TlCuDXwKyFbAsPO/35GXOMPecxltuNxmv7hk/KVY1XCaNyjw4g2nWTOOROHmG/gj6nqbecfxwRGnPbBVFdUOTTcw2kK0dU6fIBJymvzA6T1Vu6bpXHh+fCPJvkEOHVobT4cKqWfH0OlP9UGNERn2zY14c3gZUmOl6Txd68rii3W4Jbubl8kTWFmTGIUOFMfb/7Vz57/3xV1f/l/OA1orDvqOuGtYbwzwKVm2Bd56X/QGhPHdvcpwD7xqG8wovR0Z/YewXdFbQdx3HlwN1s6EoWsqi5HH/yPF0pmpqbLTYMFM1LUIkhmCWa+ZlYFkmiiInikDTtjRNRYwT2sw4O/O6/4gUAolBScHr/hOfHv9CP77ypz//X8zhhXmJ7A89p9MRpTK26++59ANlWVFkLcEbZFwx9w2HV8fQL3gLWhTYWVOX77jdveH4siBlRttWDOeMywHOp4HHD2dydcv97reU+i3nF4OfDG31wDgOrFct3l2TudFjdEamS+qyoC3fkImWYXjFLh1FIZinlLpMAsZAWYARDbk2SA8iLuQSQlgYhjNKO8Zxz+vrHiktRheUZcXQD6A9YIlW4X0O0dAUKxCGpl6DLShNw5ub31NXGXW5IoRAWe5SB3PwlMrx3dsfWDc3nE8ndN6y2u7Ii4Y//fhvOXVPjOMRHxfOlyNBzOg8gaa7aeQ4fsKJhf35icH2yDwyLAunc8+qLcmjQi8TqywnLAMunjmNE9rkCKFQJgUlxnkgL2uyPGNcZs7Oc3E9XruU4MxK3CwwMqc0K4ReeHkd+PNPe1wMBDczdmeMypjmkdEtjOPMPI6I3JPX93STR+ucqrrHLwUmy4lk9NPA6XzA2o7JXlB5xbAk1m93GRKb7/wM0bBe33K5DHSHiSIrkDGnEls2zT1FmVHVkcnOKWjjDY1smS5fMTfV+o66zKiMoCzgbvOOXftACJaiqGhXK3QZWO1qvIhYJ1LFqAqUlWF3cw+xJjcNRIezHcJN2EuP9h4jFKO3WOEZ55HLcGKaX7mpGprphu5Usu8zrNf44FjsyGxnLt3Ay+NHsJ/ZbSXSBxSg8EQfKbMSPzqWxaUGIyUQUiKVTKvf6x/ilWeZSxZnmRZ3XesmeL2SUOQR7ycW665rbIfzSS0zJoVkuA7BX1LvWW5QKktd9jH5SYOPKCVpqxKjzL8T2go+oKRMHFCXQleQnk8A/ZQE8y6tgIVIkHshIk1bIJVkWWyawYS4rrXTyjxGcFd2J1+Yp8Hj3JAGSqeuiqi/KnqR3GRIMvwV2P9lwpUiUOYFRpcpbQx8YaSGmFBLZVXgfcTZr6Gk4EFKh85SqOtLe1QKCSUAv1aSPK+urNNA4KvaKoWDuLDYcD1hpMCaC9dVeZajZJYGYEgHA5KanBtNrk0KfF3ra1NyP6nLJjNYF4hBoBSkJ9MBRmeKKALj7AhREoW/NoIlmkFdVdR1m15/xYl9GbK10jR1RfDpPRHDta42puvPjUZIyTQli4n3DufSSh4i8zzjvb8yYSPXeBw///wTh/0+qexS8/vtPd6fGK+FAgAmN/R2wM42UTX0wtj3RBuI0TItkaKqWcYJozJyU+GnkZfDCVho8owib0AJPn16pr/M7FY5eVlR1LdEoKpb2u0WlXnaosCoHGcdwzCy+J4PjwcWa1nGwMP6nlIWxGVBCc393QPzHPhh9TuYRw7TAfMu4xxOLE4SbcCOgtwU1OWGLPP0w55pvHB46lDUrOqaVX0PIbDdNWgt+fjyf/Lz4585dyPnU493Pc8vn7EEFjshhOB3v/stQlmqtiJvau5/U9Det7QP96ibhl4PnMbPPJ9+5I/v/zXS5Mk2UljmODL6keM8MkVN0Rr2p57jGHh8eUHMmtvdBucmwtEzny113bBcjgzDRBTwcr4wR4EpVvzy/pXzYWEZFKfTiefnE+36DeQaWVuylWb0Fi+hzEvu7krmeaBpNpRlzW634tvf7NCZ4XZV03UHFueYR0+VN9zvHuj7M8+nHlMpSpMxeov2IVle8jcsvWS3uuPtZsXSTyg1ssTILBYmPzG7yGwj5/MJXGA5O+Kk0ZnFesPbt/dsqh2rZkWwDnxkXWfMneNmXTF2L4z9J+ZloC0NUgUux0+83awRYSZ3nvv1ivPiOHQ9Xg/85f0viNLyxw//gvPQ049fP5P+Qz/+qqI69JZ3b2ten2f63mJyS1k2OGeJTvJ3/+wPvB6fOZ0lh0PCDh3PF7wd2d2uObsZoRXn0zP32zestxXWT5TFmo+vn5heD2S5IkTPet2yXd/x6p8RskztGNWO/WFgmhQJe+KYZotWOY+PHymzjCIrOB9PjKOlagQhWJ4+fcDHe87nnqbJE1vRK5QaeX4+s1nt6M+RY9EhZST6jGkKTPPI3c23FNkKoWaG4YBdDvz+h79Da0vbOs6HnrbeEZymuwwU+rfYuEcpwZv7d+yfHNv1ls26RGcVUnuCn/ju23/C8/OZm9sVh8NHfOxYr3ecuwNNW1G0kv35M9pIMmU4XR6pmhVRLSgZUb7Gh57bh5r9wbJqb8iUYJgypCxwIiDVzKp9g1rVHI8HZByxcaHQK5Zhoiwkdd0gY6AfO5SSKBlYNw1FVlA3DefjJeFtTGQJDjuNlHnDvCjk4ri93zG6geiTD+vYvXLzsOb19UBd1WR5wzT3FE0OMjL0PWN/pq1Luq6nKAPzMlGXJQ4IOsGjRZby1kVZM40d7apC66vXzgfsEpkniJWjKHccR8fh8oTWgUI8sCwTozuSZxXGGJQKPF9O/PHnD/RdoGoi9duK0QfWu+9oH77ldPmINgNlFrHTOV2LG1AioHSDC4J5XliWF0wZOU6PCN3QzQfGR8s0Oy4Xi/SCzWaVEvFzZP90wtrIgiWGQJEZFgJ//viJpq5YG8PtbsdxkmyaNZtSEA8Ah+ubUrEubhDmgp0rvDC464eNlJHT655MaspmA26F0gO58XiXg4WH+3f8+JefGMeBtijJi8hsT2R5xeQlcow0dUkIC64fWRWGORaMe8Xnn8B2CicUCpAhJlVqnpnOe3YbwbuHLcFZRCHRJsM7i8kKhDD4uKRhJIK4+iPFl775LybDCFp+8WuaKw4qPRd8wGiDMYJzt2CdQ8gUlgwR8jwpS9anMg8hv6iAgWUZ8U5d6QDxGjCLaC3JtCKNFOFXX2sM8ZqEltfmpGsK/EoFMEqhZHFV5EhreJ866pWWBDw+JAZqGoaSOquVxDqP9Z7I1yS9kYq6UkQhcT5RExDXkJVI/fXeBhYffx3CRYwYJTBaMo8Ot4hrKC3+ipcq8nRIn2aHDwrpfbI2hIiIjhgV86ISGeFKU0hrbUFVGlL7m72yWVPqXoRIWSqyTCZl+FcFV+Bj4qc2bcEweeKSriPExDmN1zTVOC6Mg0Drkogn/UQku0WmFctiQajUzBUTlSBZLyLWLzgPITo0+qo0p6DYalXhgqcbZoTOrr7YSIwSY9L39PzUsSwKU/hUpysUhEimCvJszbwkP55z6fCTZRmLXVBSYa1FSoFSORAZhoE//vGPfPG61jrn9/dvkeFEvHz1qPq48Msv73m7u00WptxA2UAUbFYtL4eeprkhyCN1XpEXnv50whQ6IdJcSuXvD0dWecX9wzuCmFIHu8oQwfH0/EhVb8h1JM4L1WbHy+svjHMKTwqV8f27DR9+/JFVeUu8zSikYJkEYhJAQS4li4/EVjJMlsPlgA0lVS5R0YEfKaqKBc9w2FPkJeCp8zWXwxPBl0TtaZqSYT+gM3jzww/0hyOXXsCSiBDDMFFXW+xyRZuJwPff/QHnF+wyYl0EYemOZ7QoKFdrpA68vP4DWgVUbQm+wE8zwUcOpyOmUARn6XvBatPgl5ksl0Sv0aqhKAVaC4bhhPm/qXuzHsvS9Drv+YY9n/mcGDKzsqq6qovdJikZlgwKMqBrw3/Wf8AwDMGAZYs2KYmi0M3uqq6uHCJjOtOe9zf54juRTd80IJqC4X2VSAQi9hn32u+71rOE4Gxb8qyiLCuSoDjXNVJ4VGJpe8PudsOpO7C+2vJwV7N+pRlNw7lzlNWK3c0b2uIj7qQIU4adBN99/R2pC9S1IJsp6sN7Vpst3377JU+PdyRFST8MnPcjP//FmlN3ZEpyrmcp3eQpMo8+w9X1ljxRaJ0gneH0/Mz+dEdqt5jR4/uCq6trjocHhpMjzVK8UazWW/J0hw0WM525vb6hSBZoMZJrybxU/PTTD6zn1zzfPzLfFDzdv0NMnmQJYwsznZCJhM4EJtczBkOZZxz2Lac60I8/4IeEf/Xf/wOV5T/y8cc5qlXCze0NXfvAYGoWyyvOdcfT44EyW3C7+4LmLEgSSza3yMSzmDnCMSXzc9ZLzaFp2K03ly+mlr6r0XLGfLbkfG4ZTE1zPrBdXpFwzapa0pua0VpG02KMoZvq+KXuU+pzw/XNhrpuydaCefmaw1BTFSkf3/9AmlasFjN+/TePLLaB/WPD1XXB11/+gn/31/+evm/401/819x9/IQUgavdF9TnjqyE3fyKafR8/eWf0nRPPD+cWC13LOYV5/pElV8x9S3CL9jtMpxVrNZz8uIN1sYV5qs3Nzggz5bk5YJxatg/Tbx5/S0f7t6T5p5yJmk7x3K+wpiWcarpuiNFtsSF6JtaLpeorMM6T0BjfMt6Medcn2JloiiRcmC9ukXKgsm0eE5orZHBMnQDV1dLjF2zWVXYfKIZFcuqQHgo5muOz2ea5h7vBvJc8vwcqNsj88WKc1vjZB+7tn3CSEMuPUokTGZEeM3kA9W8JEk0RbGiyDdYZUAG+iZWeOIHhJtR5Tdo2TG6E3XtmM00jIFELngY3xGUY7HZ4esjOgRm5RJj92ilGF1CknqkkqR5greKeXXF4fwBJQvsoEikJq9K8rLi8XAHQBqWfPvNLcvZjoAgn93yX/3Zv2S+eE2a5Rz2e77/zb/mcP9bnJmwtmVeQlFU6KA4mxadxGCa9BpEBiJDYGi6B5J0wdQ3ZErTjylKBqSPoay0nCFUh1EOo1ry0nI+1/SD4nbxljT0SNWymmf0Y0dRzj9/7oZmwHiFnwwOyXyWIwJMw4CbBpyxuFAR8sByfoWxH+iGhiKboZKKff2EVwaRpgSpUUgSqUnzGU/PnxjqZ6rlAi0VmEBWKdxpg/mpYKoLBDHhLZB44Rn6AYY9m8Lys7dfk1UF9el48R2mCOfhYrFoO8PkHJ872y//EuElTCXwIpAkkjLPAfn31ucv6XdJ8AozcSEaRPyUCJokiRBQa/1FGEq8E5RpTlUWtN0YRaB4WWVfwPIO+iFEML38A30gyeLU10YdxR+QTpCmUWR+zuOEKEq1jqn/0URLQLhMC4OPjytPdUSgXYSXFBELpVOJVoJ2sLjgiXPI+LsINgak7CUEdQnuO+fItKDIFJO1hKDjRDSWrsbJp7IYJyIj9tKuFHNJgUQrEplSx5zYxX8bgfyZAiEsdWswVqP0xYXrX4SzYOgd1mqEipNRIfSl9GDEWMtoC5wjIrdeKk4JSAUuSNzlfENwf7joSE+aaLrpMtkVIGWsMcVDmWdUZYbzdaRDyBji8sTXRSeKwRgmFy/yQvyBzypEbDabjMN6gX4R2MIivGWannl8ai5FAgJrDbOqQqu4XlaZ+vyav9ywvH//nvv7e9IkvTBqFbvtis1M8+E3958fV3tu4/Ck0GRmThE0VPd0Y8PkZuAjfk+nJdaAcOCMI0kmhm4iTSTzqiDZ7uiaPfP5OrbPhYbTU0OW5JSF57R/YD5b03YNy/yKzdVr7t69IxEZDokYDevZnESmrG5vmc4N3jQ8Pf5IH3IGEgZila8Unlc3K+zoUD6hyEsmO9J2PV4pimJJmWeUeWSOJsmOtu25e6iZV0u0OAM3jBcm7dV2Rdc/kS5yvBuRScZ+/4gZJ9IcbFejE4kxEmMsVaYZ+4HFckFzFuzWK6ZpJM0EAonzIta+6mtGdUDpGd4b0jRlu3uNCEvOx2cmA1V2xea7EhN63t8fyRaaVboh1TluNPjRs16XJHrOMHaY+UQ+U9g05/H5HusDVQ73hztScUPbaK62Gw4PU0Q1BUvwnrncMJzuufvtT+y++o7r1zfMM03TtpzbjorAzfwV788/8vQISZmBE3jpMJ0hX+RI1zHYjhBS8nJOkkKzr5HBYpozBE2WFCyWkmlSnA41bnIc24FilrJ/PpGmWUS16Tmvb3ec6zvKXHE+7MmTAgJY4yMus2lZrBZ0pxbTDjyPDVdBEcIJRWSp+wD1qSd4aJoDMiT/+Yryv9DxR4Xq8XzkVG8JUnG13YCSdMMJQUY1l3y8PzIMAzf5iuXizP7+TJhGdss1x0NHufIUeR7rRqXi7uNPOGN4Ho4Eranbht3mlnSuCaOlr49kRcGvfv2ObK5RGiY7oCdBNSswZkIowXE/olgQzIb66En1DELCeT+S5I43b77h7qcfkdJTpDckYUVzOvA3/+f3vH27iiLk2bGad6wXjsUi4+3XtwRf8fvf/56Hh5/46quv2G/estoqJIHHhzu++eqfkxc/sr1WpMmG9WZLNzzw1Vdf8uPv7pByZLly1I3j493v+OLtVxyOe+bLa851jVKa5aKC0COlp+8MTduQpQVa5qQ6Yz4rkV7T1BNBGMZpZJhqtApMfcXY9uy2bxi6NiK8gmW+SNkf7iNKxhmKMuPV7QytLUW1RImJU7sny6/AJYzDiSBSjG/Iy4TmbGiHgefnPSqbeDy2ZFmJVJIyK3BmIJ/BYDrGIeDQSJez2d0w8yX74yd2q1dIIRl9zXJZcj5qEhLms5Rgc3I1Z3O94ONzx2w+I2CoqpzgFXmxICkTfOJRlaQ9WcT0RHpRE0laQZhYroGg6foGVcwosgWzuaarR7J0jtQTRZ6jVbyJKEuYlTuCLxByhhAldVPTmd8xX82oVq+4/fIX7J8+Ybo7isqhshnz2ZahGUiVQuuSYrnEmlgzq2WClR5mCVm+RKQqevdkTuozpmnC+IEsSIpCUyQLpqYhzyq+2K4Zp0CeZohMkuWa/eFEPbQUKvv8ubNjg9CAExjVoZnjXMC4nlmakV+XmHZECkOqIVVLWisIQZGoimkcyfMMGSTKG2SQaJlghhTpcpIwRs8jljDm1A876vcp41ldFp0xbW6cx4wntG15s9OsN3O8E0y9AykJ1uAua/tArOAdxylC5qW4hFBU9G1C9PwJAc6hlSBPU4wLn+H84eL1zIsErROMlXgR53Dey4uwmzBGfF77vmCh8iwhyxSnJk45lbp00QuPxMV1qI0XPnXxyMakvmeYwFh/2dLHwoIiTylyTT9E4RurNmM6PzZZeayLYidORS/1qFKQJArGJCqROFq+oKlcZF5OLq7hpY01qhdh7Jyj7T0+JJGgL6JoVUpSlAm2jcl2IWJdaRTOsZHJ2Yhksi5chJ9AEsizFK0yrIuT7viFH0e8UsZg1GhAiDg9jC1QIgpCLekHQyD/PJ1+MSVoGQX+OF2EPC6CFgAfTOTZCoXxAv8iJC9UAa0EIsAwBbyQEf3mPPLiSigyjVYJzgqU1NHGgUQK0CpaL+rW4tFoJWMYDRDCkyYK6yb6KeCIif6XGJjGU5QwjDXTGENKQgo26zWJ1hc2rkLKOMkWQjD0A7/57W8uAby4GZBaslqUeNOTz3Mg1h9PnaeczQkGbG+wSlCfa45Dw6xQmLbDhJZsV9G0B/ToKPKcWVrh3EjvBuzkuNosOemR+6d7FosZ1+st1VXJx7sHqipnnkukUDhlWJUzurZG+pxZluLUQKbmbL9cMQwB+ijge9vinWNW5rS9jPSLrmEQnlwXjMETRKC1I1VVkTvLp4+PfPvLX+DMSJ6l9OMeO2akyZzdJqCEZrAJpIqmm8iVQqUBJk1RJIxWoYWgF85DsQAAIABJREFU63qqouT0HNCpY8SjZUpZpMyXC8Z2AJ/SDSNIh7cto02RfoERnmr5htubW379Y4ubArJMWMw2DKEnwzBOA0IrVLZElSnd0GB1znpZIFRCnhU8PN2jsoLlZo5WCeEw4AfF4fCRwcR8wds3tzw/fCCMKUGOjK7hp48/ft64zNcZzjrung7Ioef2iytEgMXuS9Y5/P77f8eXX/+CxDkqKRh2G3SSkqwqOB2YzV4T7CNZsSVdCVRZge0xwhGGDoVmd3WFFynH00Q2n9EcPbevvwAFs/wNztWczxYnRh4f97y6vWG5CBjTkyYZMlj6JuftmxuOx5b1asvhcE+aZGyWM54+9gglGLqB5LagPf+e0S5ox5Fu8CyuChINqd7iwx9INP9fH39UqDbdng+fNMGlTG5LMIrFck6mcmazkqRIcE7y6f0Tp+YjyieU+YKphzFMHD7tub5+xbuPPzGflRyOT2iVoFWKcjmHhycyMQMXWzDKrCTUzwQj+fT7M3rWISmQXiORFOWMzTzn7351x/65pr+SVDPL9nrF97/9gfl8QdtMnPYdeZaQhIRf/smfsbup+Nu//Uv+2T/7ii/f/Am7bc4vvvsOa2ueHu/56qtvOJ9GvO+x7p6mO9A0GiU9s1nJ4/0BoSzLjeB+31L3J16tvsWcLXVd8/T8gFCGrBDoBLJckxaefnwAOeI4cKwnlHJMgydLSvQs5+7+R4qyJBEzcl0Rpo7XV3Cza3n4aPl02nJ+PiB9SlmAnwzLWcbVZsv+qefq+grjzwRGkD1tI8iTgtPpCfxEkZUcHves1wtUkjMrFySJBNHR1jXGC8piyXa75XjYI3V68bpZpFRImTBOGm8Ny8UMn3rMeKbrR3Y3S2TiUTZB+pyqKJnGHm8dZV5h8xEtK5TwFDNFoi3IkUynvH61wvpTrKLNHAsyJtvD1F0SxoFqWWFMz2QMt9vXPOzfQ+I414Fl6TjVH8nzJcZGjNjkBL43WLdBuRmrZUZmO9QkeTrfsd5+iTFP/Nv/4z8yecPtF19xdXXLZAxpmWNdRTd0OCVB9iRKsb16xeGpp8yW6KqiH8YYJgmKdMwp51tCltE0PQHNPC14bE4UpaTMHCUZiagwSUzKG+dJVKwRHtsJa0CmOao9ker88+euWGRI42hHRXd+4NmC9BlZNWdAoUZDlhd4J0nSwDQJVts1D09nmqeaar4hSTKc6VgslszLFVO3R8qeTbVETBnaLznUE/au5HQqmNwlLC6j/9GOA246MpM9P/tqzWJV0o8TY/uMUuCFw40DLgiCSqnyjEBgnDzWgE7lZyrQy+pcCPk5bJMkGp0Imm6MYSn5kjiKIU6pUowhZpHkpfnoMolzPkRhGRTBi0vIZ2Ka4nP8gjKKfzWQ6WgHmF6mpi/oIRG5psZ4rIsWhRBe8uuOEDzWxYvUC1rpZb1NSKLHODjk5TkLIbJBjTN0w4R3Eqni7/MenB+ZnMP5i8c2Zuyx1lLlkqzIOHVDXNmrgLt4UIW4iF8RgzxCxdZYf5nu5lmCDz7WWgaBc/bzJNp7iZkUw2BxXiMvoTAu08cgA5ONvtL4fy/BL0+SSfop0haQkYPrgwdvSXIJQl2qNH0E8iM/ExB0onD2RUBeXB+X6aaWUQxPU/TuRv9vFKwKidaBtu0wJqC1QiJwOBCCRCu0gmmKtiB1eR0JCiUCsyJH6kDTjoSQxAIAGS43GrGBcLIWYxuEkBHZtlhgnYlta0DwHqFBS8n7xwfu7j6RpCn+EsTTOkEwcbh7Yp3ugHcApMpSG4dhRpmnpH5glV8zjprUZaznFUWVESSks4xiLui1QgZDmc5QTpJpSLRgka9p/QnlBafnnkQPCNFS5XMqXVGt1gwfWxIz0dcfCd5gbcH2uoh1stkcgWMaBNXsLad9z+P7nu9+ueAwDVRlxsPxgevFit6A1Dl5pmjPZ4TzaKFYrir68cj52PH65hucC3z89I4/+e7PsJfQr1RrggYpYbG4wtARkhSnAm0zUITA7e01VSEZk0A5rzieDyyLG25vS06Pe9JUMZmGVDf88Lua1e41WjiO3SPLcsn6+g3nbs84DJRzkConSdZ0/QNBTfTjyGpxjRdw7BvSokTmPVLNmWcF9diQZBnWCt7dfWTqB2ZlTn00eD3w5os/ByZUYglIqtkVfXvCI3l4fGK3WuBUbNGbJotXCVfXG6xxuN6TCMV8/Wf8+b8oGfoj3dNIXlk2occOkvVuxce6xssMLWMlsS1LhK04nT7QDPckY8ZqPqP30fccCoVRCZkumUYHZs1/+xf/A3/91/8bnz7+iqvX8Yav7wfKPOf73/7I11++QgRHnuUoVbBcFDzvP6FloJqnnJ6ecFaSVwlX64rBtCwW1wRbcDg8MLgT2VCS5IJqtiQpZv+vxOU/5vFHheruuqDrO5rmkck0rJdvWa9mnM0R1WRczT3Wtjw+nGmGkTe3M1IpedyfcbklyIL7h2faccIrSZosWcw2NO3Asljz4BsUmn195LDvQDyzXmZUeca7373n1eKG43PgOA0gz+x2K8ZuT9/0dKeWqz/9GV54mu6O1VaSqRV5CpNt+Zf/3S+p8pTNTuOd4LvvfsmsCvixoO8tv/jF15zrBz7d31OUOcfjIzqRdN2B1WrOp0+/ZQwTXb8jy0s22wXH9jcYPlB3EvmQ4f3I5E+cast2e8XjwyOSCi8m3ry+wVrFrAyMY4+xjxRFTqJTpFBM04hUCeudZOgCWZKhZM9peOR2Bl/+csH3//PEfLaibkacCbx+s8RNYMzAclsgQ2w7cq4BV9A1A/MrQXPqefvmlqY2rGc7sizn1Boen+/Ybm5Yrr9hHH6LEoKImpFkeUm1yLC2wxJI5Jx27KiqnPbg2D9YynWC8xM/+/ItMl/w/LBnvVqR6QY7DSzmJd5v0WIWm4tmnqGPKJois9RNw2KxINEzXMgYvMEnPUoYXC05HQeKYslirQhK0pxbXPD8/tPfUTc9m+srhPDk2YypU5eAmmaxWOFxHA9P1N0zmZ6z2dxwev7E4+lAWgi6qcX6I9urHafjATqLO9ccTyeakyVflgivUXjsMLDcXeF9QjGT5GVB150QMsV7A8EyyypsN1FPHc6MrMoZOjEsNgnBOawZsMmKYD1egpKWxBqatsdUcHo+Ekio5ivKtED5P3zuknxOkgSmEEjNCuNSKq1Yz2c8NT22CSSLjME3KAT9aLlZ3zJbK9o2kgB8mHDespQ7tvKaUc1wSmHNQGg9p99LTueC3kRBE9e+EmM9dmxRtmG31NzuVpRlGr88CSSpQhNQKsWKBCMVkzUkiaA5G4bRRdFnY3sX4e8JVfnCBo3+1Nl8yfEcoj/UCwISKR1ZFsNQMcQUzQPOespZwWI+p+tHrJmQKs5YXwDz1gaMkZ/9cEJIghdoEeLU8fKDIsREvJKCRCcxLBQuQjoAIYaspmmg6yORQF7CPFGsWoZpYBxlnBJKcQmkx1rRcTKMxsc1PZEzSggkSRr7682LsH1ZV0fCQJAwuQigDx6CjIJaSoGU0SvqgkAJ/7kMQOnIaK2biM1DvdgVLmEooZgmxTBagorPhwsxnBaCYZw8k1Wfz4UQPofXYuvWRagHjxcBfPz7WSojVN/FClhxsSEEH98jUkPbOQjpZagsscFdppieaYpMS3kpT1BSEoJCyoBUsejB+shejWrXE4KkyHOWiwX3jzWf72wu9gHhBVmaXG7GdXxvEBAeHLGdS6cpp7bHO0dZZLx5dUuWaqSJ9gxrJsoypyxyvLf88MPvGCdDHpEMAGSZpiolIkkYwx82ISfbkhRb+rHBCc1utcQNHWVSMa9K5olCpzmySsFrjs+PjEqSastssURPgVxnDMbx8e4jN2+/wBjLsX4AEr549R1loekHhxSe1Avq8x2JnpFXnsW6xE0jTX3GZyNPp5GxS9EfT2SyY7Z2PB/+jurqW5bljt0moa47jFXoVHPq9lTZNZ07o5OEYjnndD4hSOj6Hp0smM1b2q7BGkvfNWhVkEmNFz3Oedqx53G/59XNAjNNlInm9mrH+fyMSjVtPyCkQqiAVJK6aVltVjgTuL6+pakV6+WWcbzj6y++Iq0cp/MDQloSaaP3tRUsS0mRpLTdhCorjqc9SqSEaklvG5QwNMNALioImnG0kfKRBp4fTiyXC7I5VNU1TftMnmpa15KlOdki5fnwAYEg0QVDP+FThZaC/f6Z9fKWvp7o+46yWjM2j5zali9/8efs333Pk3uPFZbtcskgB8bnM2MjaMo7ciALkoOBwhxZzRYcpgfSsCDJC57bPdPkCNpxOiSsvi0Zhp6b7RXBjdR1z5ffbGi6H/ni6ltWVxXWnggu0NUdUjiGvuHwpFlvC0SQYAMuTIzNQC4WaKdwypEGxaba8enpkfliSTlPcUPOcqU5NCfKvPrP1ZP/xY4/jqdaLnm6H5iGAZvPub9/xzhs6McD4zTSNCOf7hqsm5h8wuFgccWEUiVdt8d4ydQN5Ks189mS/hiY+oKnfcPT3Xvmi5SnT0e6qSMrUjwTfVcw1BOrxYLd5kue73/k04cT1zdzxr7ldKz56u2XfPftt8hEkhQJnXkilwXr6mfsNjC6J2bFLYieU1OTF57d1RxnGiZjAE+WC7JJcnt7jQtHsmzAWEeWFCihCVZhpo794T1KrNlsbjme7kE2FMUVQ+dAOaQS9NM9D88nvI/VmHef3nNzm3N7u8ZZwfH0iFIKr5ZsN69w1vD7n37NopihfE9VKAIdy43EjVvuPioe7mqcT9jtrvn2T1Z8ePdEWSZQBU6njvVsRn/qmZUbzkNNnmxZLyyb1YKpUeTJKw7mPbc3G56Oz7TDkXmZc6qPkGh0LrCTwDqHVglJmtB2lrwscOYc74C9Js3geWjxzlPO52zmt6Si4vmppcxnFFnJNE1UVUGSVGSJ5/b6NW7y6EVCl3Tkec7gLDKZ0Y814+TY7m6h7znt31FVJUpJbBipFiu80JzbR8bQAClj1+O8YF7e4PUZM0JVVXhrYkBEpGRJRTlTzBeaadQ8Hh8ie1NIlMiRsqIsIS9LEi15tfmK06nB9h1TP7DYXrO9uaJp7jHdQCIVg3MgDHV3IlhLvT9T5AqhHC4JkXvpJYWuUN7QdG28sJpAUWU81SeqwiLCyGKZUmQVMinYH/f0diLLoqDwKv+8NgUYzg0hCaRpzldf/ROOXUsmLSoVnM8ntvM1fhLoJsFPgVVxRf+TpUznrLNrlBhpTyd21c+Qv1c87n9EYJDFNe2Ycj4b9o3BoOKqE4GZ3KXudmCWWF69nVPNFCIEptEhpEdlKYPx+GARwaGTLIqoIAgy0PQj4xRbomLSO4pMf0n9vJQPBWJznXUTbdfGCk0VvYGxpU4y9FH0ChkFr730zmslL9O0iDvyPq7/0zS2Qzl7qeoErLMkUpFqHXFR3l88o2BNQKciWgT6cBG3F9h9gCxNYqrfDLH1Kvw/ywqmyTGZFIJ+CcMjgDRRCBHieVxS+wEPwsUCBJUD7hKG8hc/abxZGIYBY1xcw4sLczY6VWm6lraNwvOSrb+QCxzDMDJNNgpz4nPvQyDLJbNZhrfRgyulJ3YaRM9okkh0ElFM3vvLYv5lihvtHaOxfKbBXsSsFjHkNIyWIDKCf6mB5WIPcEzGM04x1e9DXOtzEeVChovD9iVkd3mnCEGqNWmqeD70eC+QSiFC7EMXPk6WkyRjHKNVI74eF5KEVBRFQlPXTKNBJNmFZHAJj10oDn0Xz+v6asf19RXTNMVKVjMxDANZmiKV4NPdI+/evyPPMrxz8XUK0U4hEolLJEn2hw9uayU3aUnvHnnqBqr1Gl0WbNYFiVR4N9IJhx9OnPYBoQc2+RXWdBiTctjfM0qDyjUil4yjIwhHUgRkyHEOPn66R2pNohPqc0vjatLiFp97Ot/F7vgsfkeN4xE125LZR14tt5As2O8/YuXIstwyL+echu/RQlCmKYmekxQl+1MNI1SZ53CaqCrBuX8gNWsQFZMxLBdfkKbP3L2rKcXIaq04nu543rfUrWVVjqiQgxC0XUvXe9JC0PaGPF0x2BMfPo28/dmf8/p6wdQVqDIwW6S0x/f8X//2B/JygcNRVDlJJpHyS+q6pioyJv9MMCNj6wi5YhoGFtkMlaV09QlbW8ZxjzSCokiYlzOOzzXb7Suyb1JQBo1kmlKMG+Lnw0E/nRGDJy/WlwHVjtPTQBAjx/qZup4IY4csC4au5empYblZcej/J57P37Ep50xuROqB8/7MrFxRH09obRjOhkKtMN2RfmiRwjCfZ8yTK7JUsz8dWSx3DLrmeNqTLxTOD1wtZpRZwn/827/iZ9/ccP36F/yn33jOj5blcoMdem631+hkwHUarKK1B5zbY4mWOnGeqNIUj8bbEQIUWcL77z/ghGGxu6WotghvGMMZlOfd+/f/YGH5j338UaH6+w93ZMzIMoXwGXef3vHju3dobbi9+gI3BA7HljS1eFvx/d0nXr+dEWyJTnPOT88s5hW/f3/PmxvD44cB14xs39xSlimyeOb8+MhydkNZZkgxUjdnQtDcfPUFz8d7dq8q0lIxnwVmxYJffP0XoCI7dL1dcapHWvMbhO5ppnuKvKS3Z5rnjiQfmVdXnE7vaXtYLQumIa7U6pNnNivIi0DTH1CpI8s7inRBUVYEp1hTMV8p7u4PlFXC/iApixVt23C9ucb66APtu4bJBebzLUJphPI4b8nLjOfHHpXEJGnfd9j735LoGf14IqB4tfyCx8dnhmGgSEsyueP5nac5Dbz58g1lvmaz3NAeR8w4IJOAViV5VtK5R4QrkK6iKFOGzlBVa8qF59Ac+fKbN0gF/eOB+WxBWWXU5xPPe8tqXdE2DanM6YeO9WbLuWvpx4F8VjBbxqmn8SOyCLy+2kJIKPId7x++R+jAbv4zxrFltkjJKsl6syW65jq212seTx1BOqyc+PDxE0WZIYRks1nSjHtUogkhozlbQhhwtuHTp18xGMt8nSBFSZZskeKM1i2HwyN+hGloWK8zQNAPhkprmrbGORiMpa4bFktNOV8yGUtaxTvLIptjphbnAvfnO4bBE3RCuXJMzZlJOOwY0OmGYbCkeQlI3NQhnEMx4EaH9Yp2PFIucoosZ+xbjLUED+M4keYFpo/Bmrp/jpPEXpAXJaqQdHVLXszYXt1yeDhHlmb6hwvebrPm8PAeJwLbq7fk8zmf7t4jVcZytmam1+ghYO8SciqE9gxjR55mlx76nBUpAcXpfMJMBmM9zeOebioIQSAShURijYkJejtQasdiHdju5qSZIkwTWqafp6Dee4KMQsZ4g3aaBEWeZVhvaZoJYz0OAZYYLpNcalQvAPsQu9qV9kzTkb5rooBRAmctqZKkqcSYidFMCOI0ERE5rdaM9MMYWY0qfPZqJkmCDyFeqIk97d4bilnJar1mfzxhTRun4sHywjx13ke/KBGG7y+cUSE91oI18ZxfwlIEH8/DwWgvU0gfJ3cQrQlSCpyLYS/JpVYVkFiGwWBsAuKSZL8I+XEcsB5CSKPnVUTvgkSglGcc4/RToGPCHokQDq2j99WFeJoX0utlKgxCetqXYJePojkIgZSBLI8s12maCP7SECXFBUnWYX0BIoHgwV+aqYKPoRKikL1UU4GMfzkEh1Txs+CcurROXUT3BRGltYj+5xdMo5AXCwCXxyNwLiEIi7oIXIJECoFSlqY70vYTSmpEiI9VECd0Nlj6frr8vogcc8JfsGEJnsA4erJM8/VXb8my9A8lD96SJglpmuKs5/vvf6AfR4okwfw9+0pZFTgFn9qW9d8jPOZ6IvgzAc311RUSy2b3BisMDBIrA8NwYOxHvBOIxOGcABLOjQchOYwHxOiZlzn74xE1T0jTifb0SNecSAvJ2Fl0ojHKkJdzZivNsT5xPMB6u+apPjCcO7JCIxhYrud8eNyzmm+p5kt++PADXmakChKhKRYL5osZh6Omn1rGIaCE4TB+womRp+OB+WxLpjSpTJn8gV//puZn37xhc5NzPte0nefpaU+i5ygG+laQphpUQj30OOXRqebNq+9iMK3wfLh/z83111jfMfY1p/09TjwytkP0L6dnlotbggg4HOViSV4sCKHhdG7oJ89mN+P+eKQsdggvCH3NulxyGlNEkSGEIZU5gx1AGA7PPW9+9hX3z++ZhpFUQlWmBBlbLHWeYG2PGTXlZo13sSbWWwchIVElq9kVq1XBkKcsjKDcLckTTxAW09U8fHpPWZbUjePYP6DEgmoZbZHXi9eY/h3j0558d0tvWlyfkd9MuIee0CRc7a4gwPXtNWnw2NPEUfYUswVhqmn2BdfXb3j83d9wfF5xs87ZbaBpe8bWkKWS2h7BLajtGWFadvkt5/OJfJMQtOL8VLO6uuLU15TFnMF6NIqbbcm//5ufSGYZvvv/CZ7q8DSQJRPTZCluKrx23N39itDsuF7ekmUVZenoxj3BKdrhzOE0keeBRVLF0EE+4R8cH969Z1vdYP2aVT7nL/7Ff8Nf/Ye/5M/+aUVTdywWCx4/dVx/kVIfoTUnkjQwtEu++fmC8/meEBS76zVerPji7Rf89O4JnTwxThNpJmmHD1i3ZDlfUewqjqc7uv6B+bKAkND1hmmMHcvrm1uMa/DWkxYZz6cPbLcFKggGc8Q7yXdv/gndeMLbZ77//lcEvyIoSZ6tsN4SppEiUczmP2ccIVMJVTmSiIrm1PH0eOR06PHijJSSYRgJGFrbsZzPKWfXJGKLnY5UZY7rtzzXlmouuCoydpstuIT3H37D9mrFuYnri0QGcCNVqanrM8U8YzZf8vTc0w4dXrb07EH8c56eH/HCkSY542jJioKuHTgdJ6wVDNMj1UxxbMbodxwFQyuYsgYuX/qL1ZbeDCTSM5kzm1VFXpZY39GPA0kuOHf3mI+xK/up+cTQSoZR0Q8jOi0J0nFsHphXb2hbCPJEWVR0vSVNYZwaLI481+TVnMn0aKmZVSvsecKGlrE/UmQlSkJrW3COJNUYd8D5HKUkbdMz2hbEFe3U0Noz7elAkaxwWIapxhlYrTZ479Fpwma+ZX/8ifePd/ipYrnIKKolHz4eyMoJT8tYO6q8ROpA1zT4dGR/7tlsFkzWkoo5eZnhdI0RilPTU1WCosqxxjNMlnZ4jF3SRUFVLsl0SZ6PnPeHS2NbPKQu8CzpLZiQMyt2zPMUY+E6WWF/XaNDigKsM1hj4tQpCISTGBuwQVM3E+dOMpqMYRIYFFI6VBCEwSKcw9oWKTp264LXr28QYqJpz5igScnAWdABfOxDd9YihUSrHGsNzkyoS3B96CasjxMsAAK4EENWwocobERASk+WSKbJ0g8mei0l2BBIlKbMc1ozYI0naImQkRiQZAopFca6uLoPKuKiCBDiqjqEF5Ynl8moppotaQeLEHWcBl8kotICpf6Aj5KIz7xN53tGo7D+hTvKBekkmc9zEh04tSFO0AU4H0hVQKtIGPAvKC5eQjuSvIjBKWfjdA7JpXbV4qyJQS7HpVEriixF+Iyfcnb8fB5xAurJc5CJpetHfJAXr29ABE+iFN46mqbH+fQSRnuhMcQK1nHyeKcvItRdptGB4Caa2mLM8uJbjVNI5xxCWbJcMTXyMqF9YZlGgV0WGinHy1Q5uwTlLg1bPm5wQrj4WdWLPcR9FrLj2GOMj6Gmy2uvQgy/JUm83jhvkSrjpWY2CuCAlPYijpPLYyWWI3iL0hqEZRwmbm/fst1tMdaQJAnGGASCPI9e8f1+z/t370l0ggsv5t04cV8uKlKtkSrh+e+xJl9vr5iVO/bnnrKqon3EGNpuYJ4tEF7hjMGNMctgjKBjIs0GRlPz6vVrDud3NOeJyTuCtNipQciEc1uzmG0ROiHPlpyae0JqSRdzjK7J5gWg6KeJyRn6sWW9/grfDXgn8HnG8zix1DGg1pzfIaxCMSNdFjRTz6nv8LYnGEO1TAhhwbbQPD69J1WO0/Ed1+ufs1nc8HT/nzgdCqrlhjfLL2j7TwzTRDnzqF5SlkuMr+lbgdSKvHRMg0SJiX66Z/++BuDf/O//I9fLjMQpRgOqmHh8Hig2FRqPeXrGecXq2vDwoWM535ClliwrOJ1q0qs51ytNmlSc2idkyMjShMUGqs0VYRjwJuBlxzhMMHWYNifTC4yuSYucT0/vUEqRpJr5fM3DfUMIkr4/UzewWW85PPW8vfmaU77nz7/9V7x9e0uSVrTNib/6N/8rX37zT8mKgb/8X/41x84TRIcorihSzTDcY13J7vaWMKpY8FIGktQzjA0qL+lMYL1eM5maLLvh7asZeSKo9wakorMNt+sZw7nF6o8UheZVucC2A8PMkOTg9iVy3lKUBXrwpOUO9yTIZwWDT/HzCotgsZrT1g1BC65+vo0aJYfWnHn/UWPdxLAXbBebf5Co/C9x/FGh2h97ktkSM1rM2IGd89XbFQ+/TanvPOm653DYc24eeP3q58wWa4QULDZLPGeKQpEUnt0qRckV23nFf/jxnvWsojkoKn0VPVHmRLikPrO0ZPWN4NNdz2a1Ra6vWF9Z2rNCKIsJJ+pmJDt6dJ7QHh4IBIZhItUF3gfyvIIAi/IVD4ffcPPqFmsUP/7wwHyxJknAy5quP/P4cOD169cEYpWYd4K+f+Z69wUfPzwwuj3DMKESkOpMkd0yn+0Yho5FtaTvU169WfL+/ZEQNHmpub76lk+Pv+XT3SNZrjjsj1RlwWq5QwpD17W8uvmKwA5rJLe718zm0NSWoTuy293y008GCXT9EAMFSuCcJkkkZuoY+5G2jl3fy9UK6xKKoqTta+r2kSAdkzM87w+ozNGPZ4QIDFNsPDqcaoQvEMJwPDtcGFnMr/Ehoz8d2K0WXK9LDs9njHGcux6R1KR5gwiKx/0TaZaBHFAqchAPpweElugkY/Q93fSMF45j3ZPmKRmomh8BAAAgAElEQVRXNN0T5/Yjm+2G+5/e4b2jFCmT7QgqRSiJUI4yWXM+PtL0j3TDM/PFFi1yUEfs1BIoEUmgGw5kmcZMKXkW0SSte6D+eEZLxTQGjLEsFwYTWrwHOyryyTB0LUmaEKRCZint1JLrHCt6LCXNcE/jGxCWrtGINAM70eMoypTz/iNpprGkjCrQYajWM9rjmazIQHlIEvKioG0PtGNL/XBkNivxfc9gj+gk4BJJnm+AJwD+5v2v+eFvHZ/uDX/yvsLbd4h6JEk8mxOI80g6S5jNcrzX+GABz/HYYK2kG2GyMBjPZCUiRMETiLxRJTwqWIo0sFoJ5qsqrj7dhDMdSfDklwmZmTw6FcgQEM6TyajMJmsuq17HaBxSZRgfvZVCxiGcI4BzBH/BVb2seJHMyhItNO5zDWocCeZpRqoUh8nE6aiIJAKlBVmq8U4yDBFXFUIUSImM7VPj6LE2fPYSEgLOG9r2kbbt8F4B4EUgeMGszNFJwNjh4mn0fPZgiID1lypWoXjhhGqtyHMdRbq1lx8VCC9JU5inPV3nESFDKHERYYEklRR5Sj/YqDQvAaxY1arZbmIT37l2aPnyPAE4dJIihMbYAS9eWFmXZL+IVc2T4fI8ustzGduhtFI4J7DeI/0fSAmJDEjhY3XxixFUiM94qcVshhCGaYrBrhdOabjYArRSRLRYnJrHQFUMXMXaaxcnqv7Cpr1YHYR3EbVmExwaLWQUqcIjIXqzzYCzabyJuETipBAIGdBaME3j5/eNEDFtJy7hMJ3Eib24iGZCJBmEAIhoV5NK8vX/Tdx7NLuW5dl9v22Oh8e1z2S+yvLdZLRCE4kSB1IopNBnlgYKDeQYVLDV7OrqqsrM9/KZa+GPP9tpcHBftSYVEoMU9/AicIEDYANrr/8y336DEKNcAMJXqYrWCh8CHz58oG4b4jjBuGE8WASJjhTz2RTpIVYztPxze8+0eEVZnehNjbIpQ99jzYZJdkEz1Hjf0tVgnCFNNHWpWNxMqOqKfjjw6W7Ud19evKFvW7ra4oezFl+nlN2BEK24XOVUfYp3MbvjPUoodBQjhWToLcJJLldXxFqw61rSrCDNFfcPn7HNJVcXv+V59yPBdHgJ5S5iutboILAhZblcs75M2D42pGrBz99dkRdw/+WAFjGBhF/88q+YTC9wvsdaaHY9aRaNpRyJpxs88/k1T8/vWa2usZ1mMAEfNpjBMvSB5eI1NtpyGlpmswW9gVdvb2nE3XjosBkPz5+ZzW447QLGjPFtVkLTdTij6DvB7foNf/eP/yvojMUkZVs1rK/m6ODZHFvwDQM908mCan/isPtC4wbyKEGYMY92df2WKLU0TUMS51xerJCqp+0Mkda8frVGegE2YF3Lof7ESv8n/PD+X/Gv//X/RNXM+K/+u7/BuB15kaNTiXWGN9dv8Ej6IWV/GBjaBxI6dLGgsg03kxgSzalRWFGhC8Fp6JkkOU8POxpjuZ7O6M2Rh3vNxSzj6f49Mp9xdIGVaGn7GI3DWMnlNzPaciDqp7i+QemEq5sFH58rEpVzmc+ZSI2+vkF6T9cNGOuou7E2XhtNnBfYzvw/IhP/Y6+/CFRxkjxeM8sEfSlpjg1Xby7xvuHTp4akbvjuF6+pv6/5q7/+JZ0xbDZ7dBKACReXEC0FrvFE8YKb9QXFv1xyOVng3cDtTcHz0w1FbEl0zJvXC+7uH5iv5ogwgoy6fmB5+Zbb1xLkCR3HMGz44eMHZtMb8okiqhI2+8/87O1vqE6B3W7Hr3/9K7YPDUmUYHtJnl+g5ZEsB+cEh31HO1hWyyuG3pNGGWW1wfaw2+0ZupSJfiaKC6IoRyaO169+hrMKLSKSSHO5WvPD+48Yr5ERtENHb3t07qnbmvVyzeH4iTSZIoWkLkturt4wm04IJuLQPLBavEWrKcH3zOdz2saSF0uKHKzNSXIo9xVdLwkSdscnFCl1ZRChwzlP18Pz8x1SK7abEiSYXvLw+BOL5ZK63VLWB3zoR2DkIoSS9MOOKFIMnUTriMenJ6z1FLGiPE1IhgWaQIgkSV5wOB3Ipxl1c8BJaMuSbNqgyEn0jCxXPDz9iaxQ+GGNigac6ajqnmKScDjWxFHC0FVUlSDLJvSmxDpLlq+pqxrjK+p2YJJdc2qfKdstUaIIwZIkMXXjadsGQSDKwzjSUaB1Rln1xFlMVfXY0DBJZ0TRjCxL6fvqDOg0iZ6w2T0TaUVvOx43j8wXE5xQlP0BLxLadke+VOwPPVleMJnnGNHRD0fMEHCVophf0nkDquZwumO5eMupjHE4ZBRo7EBTnlBaYoxDJxFf7r+QtRmTyRQpHtFSkagV+9Px67Zr25p/9a//lr//hyf+9/9hwTSK+ZvLn3M7W9IpxWI+oQ+W8tRg/cjgDcYiGBnGIORY7yRG5i4Sgkh6tIbZbEKSaOJYkmUJKgoYZ+n7jlRYYi1wKiKgwVviOEKrCNN1OAdRGiOQeDMQgkMrjVPQeU9vzyCJEcByrjB9aQ4SZyD4NWieUTcaxBkEeU9RxCihqOsB6zxK+vNkWaL1qDsdBoezY9STIKBjTZJo+sGMhil11koGQRIrpBaYF42h+DO4iiKFdw7rxFmfOYJffZYfeD82PAklz6BnZJStGXNeX8BdYGx0UtJAX9FVYmyBk3yNZlIqYG1H13uE1Ofx/ugwT9KY6bRg2FfnFAFx1p9alHZjzWxnz0z1eYzuxxF5ksQEHNa+aDbVKEMQDiED1hp6M0oknB9lEQKPjgRFEo+fl+DOaQ9jk5dUkGUZkOD2bnTN+5HlVUKSpQlKK7rBENDnqK8zq+ot/tzs5J0en68fhcHh5cDkLcaMhifCi2541AFneYyQjn5wENS5MGKUnkRaMZlMsU7jXIWKON9/LCTQSqAjgbGWF83ryJaPrGwcK4R0XF9dc7Fej9cqzjIN54mjiCRJeHp+5v2HD1/lJOLFLCckcazH71BrWc7n2Lb8um/L3lO5hl1dQpxghoYsm5HkMfXxxHKek/cRk/mCQ/uPTFV6lkVkDMMJrR1VZZnNAz4oXLCIkJJNFDrr2TwfSeyUzfYLdXOkqSVSDMwmC5pyh7cR15fXBGmY5Esen74gdExvHc4Z1osJf/o3f0L7v2Z9dcvT80+kWpFoSRIEszhikFNEbBg6iTEGay2ryYrHpweiJOX66pLdfotUimP7xHbzTF9nFJOM9cVr9ocNQhYMrsHYDClgtzmhtWYy11zeTPj86Yn1esXQWRbrN2w3Hwlyyn5/z+XV+D1/2GyJlEOqgc3zJ361/BtK/wVjHVVdYy0Uk5hIWz59eU9V1eQTzfG45W5zIpkvqcs9+2rDapHRl4Krq1dEJPTuyNDB68s3tPsNf/2z32J8zsAG7ECW5izmC3bHJxK1Ik0tXW0Y2jH2b3P8HX//DzvefXvgd7/73/jmu1dsHn8gzf97/uv/5r/l//jb/5OBlLkPXM497z+XnFqDjDq07mhMQno5RQdQkSDD0LQt5fBAzoTD6QtifoFHsFpOyKTEm4jd7o5ptuR525PKLSbStM6SxhkPjzX5YoK0FYWa4FeGT4/P3N68YrYsmB7GtJBJmtDWJw6hJyNluXhNVf1AWTXIOND0O4ajZzpdUixm/z4w5r+X9ReBat2VbDf3BAuPzxsub9cIv+LVz9b82799z7/87m8oihm//itJZ3r6wSCjge3uQJx1FGnB8RC4uVkxm3zH26vv+HJ/R8yJY/s9b97+DGRBYRzG1uz3DYvrwGK95u6zIkkj8nzKMDR4/cw0X1A3G+r2gNIjVU+ASf6K+4d7pJLMFymJvKBI3rDlH1nNb7EmpjpVxAkUecrx0IOPiNSKy/UVn3/6TDPUxOmc590jabQmUQXtcKQZWm5eXSAVbJ4MaVYzzZdY07LfHknThLYeR1xCPXL3eWAxv+Lbt6+oqg15HlOkc9I4Z+jasVUmaLp+7Lsu6z3l6UCeZyAOqFjSDi3d4NkeepIMmq5hXzYkaYF1guliwqAsLgQyLdnsa0Q0VgF23RbEQJYuqZsSaxVCaLKsYLevCD7ntLcELEH0uHbK1dU1AcluU+HkHp1EnMoK1wWiOGHwLTIqkErx+PwTi/mK+mgQUcdu1/Pb3/wzvvz0RBQ7RAjUpwprGpIsox1OKJnhzZQ0VpjBsZi+w4UNBM1y/gpjDEV+gWQLskIMMadyQz5ReJsSxR4fDG1b4q1CkYFwtFWHFhOOuwOrRcx0MkXomsvVDYfjkUl2jbOCWEd03RHnBLEeA+09Bp0kGHsimwqaribgaIeOi9WEw64hEo7Z/JbjYcdqmdB2HUPvWS5m1M2OuLhiMD3WW1SUnPWREMcxxp8wXTf2qSeexXzsWr5YrghohqEhScaqWj+MIOJlLdcrfvFXNwxbwT9LfsYiLvAe6qokTQqKbyJub5dIIjpTIRBjwLkR5+54h5BydM0rRxwrskQxSSKydEpnLH3XjGH5ZnRqKyFwjJWRUiuMD1hj0EqOAClAkJ5DdyJT+Vj8YO2o8RSBtjV0vRlBZXgRFo4gKAQxVqoK8MERa0UUxfRDi3UeKUawI4QlTdX4PvQWj0eGEfhIKdDa09uGwZqvesjR3S4JAawdR/8j+3lmcINjsB7jzk74M7snZUBIh7UB70eg9SIJGF34isEIEP4MsCEEj9QBMPRmBEC8jMWDJ8gYl9xgI4/HjYe8M8ErzzrNl1H5yw1jVJTFDCMzJeSfHfjWWdJUoLSkPtQ4J5FRNMZjeQHCMPgK3NkxL+SfDV/C43xP33uMGU1n3o1aViUkSaRQQjD0BsKZafajZjdNFFmWcqoqjHXESp21wI6AII4jrHEMw3hN/uz29z4g8Gdpg0YwZpJ+LUzw4+EgSTTH6mwI44WBl0g8UaQRIgbs6L5jHP8HF1BiPH9V9chkSwXCjzpY7wORHuOl+sEhVTYePvxLGYAjeJAq5+0371BaYa09H17GPZsmYw7yH/7wR5qmIU4S7Bn0hnM2bTGJCKEe3etW0Tanr/vWBEuczdCVIzDGHQpfcNg8EilF32mCt+ATCAlpAkiIU0tkBFoVXFxIdtst02nObFEwNH70GVQHJtNLrq7esN0+oKTheDhysbyk7yusGWt0m75hsIEgauqu/apLTicXCJcSJ4a7Lz9yk16hkhTsgulihfUlcTbH1RXSJ5yedkQoqmPH5U07eijyhMPxkdOxZb7WfPzpR5ROiJKMug788198x9AbulaR5xLHieDHSWjZ7EkKTV1ZIl1wc/2aze4OrXKmsxVt0zHJMnabR45lSxRnODe2yvVDSXU6naUvDcMAWmuCaNlvHmnbhiJ/S1ZIPr//QJEW7J8f0aKn7L8QNVcInfPx7iO5TNFxzHoe8+nLe1KZsDmd2O7u+cWvbzD+SC4lwzAwtI5IWpracjyMe9G7FGtLIiX4/b/9n9Hpiuu3Ea6S/Pjxf2SG4/ZyRcgyhv2G7e5HdvWBJH/H7W3Cp48bZJJho5buGCHVjFQKVNjxzZspb28u2Tz+yOkYUyLJVILNLT/dPbJc3bDvWy6uVmRLQ33y5KqgMj2bpubnNzO0WFBMUj5s37P3gm/mOadqj3MdBMH3H59AB/w55SD0ilmR05clbdVTpCnBOfJiRtf/mTz5j73+cjzVas6qmLGeL3n37hIbYoxPyK8G/tP//A3Xrwrq0vLNt7c8b47stgccLVrlPD1+4Ve/WfL4ccs0nhEnnk8P3zMYQ2WfSIrAh7s/sr5K0b2nPjhav8e393y8G1hcfIM1zyRp4FjvSTPHH/74xGw6ZRgUDIrn9shkWpDFa64v3+AsLGdrJtmaY/2JKIH9Hi6u5ghleJPfsphfYrovWN9ytbyhPp5Io5wsX9F0km++ibld/xWTIuYPf/qR1m7ouoH5fMXz0xPi1MHlOIZAOVyQVENNZzxoy9NDRaxS0ijiVJ4IwSLDhq5NWa+neAY2jyeUbJmt5nR9RZIrvv/+C6s3d1zO/5qH53t674hVzGnnQfV0tkIYTdt3rJRhffmKp+c7hIbm1JDnKUU25xjf0zaG6TRGacnD/Rdev3mFMy9tQ5ooijlWj0wXOYvilig1CDTXNzc0bYREY/sx967z/XjyjWOSRBEMmC6QRoouaKQqqOsSFzymGdlHHWYM/TPe5mSZoyrd6IwWisP+wHJ+QdOmJElEmiQ09cC+24wZlGY0KzT1nmw6o6sDUTRg7GmsbcwKpNJ4K2ltw9DFhJBRt/bc/BMTRwXr1QQtZpD0NE1Dnq6Zz1YorTidHiF4ynpLpKYkSUQcBayJEaHDugEnjkgXkSUFvqjBdShh0ToijidopWlag3cD1kRE0Yo8n+OGDt9bIpGCHqtolVIc9nvSbMLl+oKyHPACdBQRGI1IeTEDngGYz1a8++6G4k8LolKfQ+UFWmuEksgo4FyP0jApIlKdo+KY3WlPqlOElBhXIZ0gCIU9aw4HF2hPJwKeGDUmGwwQUHhnsUIggkYMDmNb4igiCE8/9MRCIgkI6+iH8jwG9sjIE2tNsGE0JMk/Z2q+5JJ+XSEQXEAnmjidUbcea17GtOM1xrHCekM3jNq/4CXGBNJ4/H9Dzwgsz+1KwQfiOAYkg/Vn/BdAhLOZK9APHYN1Z8f8qEsIBJztzkDrrGU83xYpRZ5Nzsah5p+47EcnfZIKqsYQgn6JgEedo5vKIXAyDi/kmBYwOshI4ogkjnCuH0P89UvuEsTR2NLTtMM5Ai18dbQTHM5anBud8uJsHhsBviW4ga4PODf9moHrvT2H34+mIGv9mIDh/CjNUOEclTea/4LQZ2e9Hxl4rZEqUDcNzmuc98jwEjEGSsQ4K0bwG8ZM1xdRsFQQRxHWS9z4on5NAxCA1iCUHtvDznfzvMzmHQLJYDzWjbKG8dUb9SQ6EgQGmrYdyyPOjLY4HzykspxOe7rOIV9YcKnwwSBFoMimzCY3TCYLjBnO9x3NelE01vQ+Pj3x/sMHlNbnVsCvbwRKKpaLOfkyYasrEq3oou7rx7vr2nNZhcUaQVsKrmdXNN0WHUsOxwOxnNM2JVUJ85mmsVuOhyfiKObm6h2ncgN5AeFEWdasizVeOIQsSLOEU1WhIwWD5tt3b8ZpYJYTWUdvTljjGXqJ5UQymZBqCN4QR5LeCWYX36C14lQOvLq+RktJT01jAm3TMkkt7Tbi1dUlT5v3zOav2Dw/QBBcrFZUVQthQIuMVF+N4+qoZb9pGYa3FJOENI1pm440m5AULU1bUXUbsibB+oLFYo7WMXhBX5WAZzlZIsLoMchISNSE+Szmy3NDJDX77ZHL6zlV/0BVxVxcrKhPjqYsSRLQqQD3ilfXP+NyPaHzDV92DwSRIXWKDR2DHZjOFF3vMLJmstRstvfYAMVFziAdg69QRlA/lkzSGW23xR4UabogzZKxHrmzeCzZRPLq16+R7o6g4cOPv8MdesgTRNRz3JS8erXi0Dsm0UBZeurSE4Y9QuRcTOc0Q89slpEkS5Lkmm31xOJmxh+/r5i+uuHp+InqySLiJes3NzzefSLPFMZLQuTpXMXp1LOY5TT9jj4U9IWnwRAVKbWpMPWBQ2OZFTlEgslqQr0ZJX1Z3DHRt9i0QznFq4sbyrpieTGj3jf/LpjyP8j6i0D15vaCWBTc3L7Dek0yhbuHkn35EWM6tocHLla3VJWnLI9Y1zCfXtO0LUOzoO/nhGHDYCIet99DiFjM5vz+Dx/4zc/f8rj5AaIFdRVTlY6rqznvP2z4/vf/yL/4F5dMphlPT3dMpgW755IsnSLsLbNJ4OH5HilaktSgVc58nvL69pecji2f7n9PnFqyeE0xz/EMpHFO8DF93zPYBnyEdTVZNiFWgSR9w+7UEKWWumkQouPm9QXHk+Ridcvp+IzSlvlyxva0AZ/R6A15PvZYbw5blNZkU8Hx9EwZBG3T411Hno0h1ftjSVPuQCb0Qw2RRcspxXTOp8f/hUrd0faGaXqL1HOIBC4E7NDQ9o8MpiPg+PSlJlF7JtOM/a4jzQRRvGR9uWZX5bRdS5qPOZRZLjCmQwhN25VMr2bkoiDoHE/CsflE6iOCy5DihNQWISJESLABhChwukdmFanUKLHkVFbIKEZIxWJ6S9O2SD3G+HhgvpjhiWjaMSi6677gRQWiZbIItH3JbHqLVD1N3dI1ljzVOFvTDyd0kp1/wnviuKDtSqTIMX40c0kJWbYgUnNOwwPFLMUNE6yTSFdgzUCaFCRxwtPzjvn8gkhH1E3Jze0VdStRMmUwHVJAVbVoOQKe2WTJsfzCdJYRxxoRRra2PBqCt0ymKVXTkic5KpHM9JxhMFT1js1mRxbPiaOcuq2J4lGnJEh4fmi5uo6YpAlhKBHAdHrLw+MDWaromuHrvhO15Wf5GpMeqMp+FHwyxjTVTYvtITiHV4FYZrRVi44DOIH1Y/F633Vgxr5qITwugHUjM6q1RjqJdePfp9MFQ9/TtRW+H8X7BIl1DjN0BBRRNseYHkGMThTOO0JglA0YGIIin01xqscbCwTM4M6gS4BwyNHDThCG3emRUzkGbEvpRle2lASgrFq63vBihjHWMikSIq2ompG1HVm4EXgprREqxvnurDMd5QJajcUDxgSM8WfWdtSaJlFEHAvqpsM5iZAj8AZHkoxmn64bxtFx8COIZTQcWcvYHx/CyMEGSRAQxZ4oBind1xau8X8GIi2IIg2MkgkhXljKUfeqtTzXsZ4d6GFkduNInR9Xn4H/KJdou564EKxmV5wqy46BkeOVECRpkrGcTeibGmfNVxI3BDdqdINhMIxM81lUEM7JBs4Z2sZih3EX+nNLFkIig8U6A0KMLVgE8C8BVqODen0xYbc3ON+hzga6MbLL4oWlt44QRt2vOFeLjfrp0VylfIK1HSgBfnTaIwRS2VETacTIwn9FwRIpFXEUUFohRThXso5yD29BK0WezZhNr/9sRpOS4XwgkkLQdx1//7vf0XXjgdsY+1V2EAJESUyeJ9Qc+FJ84ZV9g4jU133bdnsWi5txwtHHTGcJk/QavOFYlZgzqHeupR46VD/gbUKRrxn6jo+ffiBOA9ZqpGoZmpzJesKhO+CdojeWj3fveXVzSSABlYy616FH6R7kePjVEQyDQMmcZqiY5THWHWgGR7ZImS0yNs87TtUd09klu/sDZWOYTTNOzhPHU6xIWFzPySaev/27O25v/hoVJwQGbr9Z8PnjgdXqhg+f/y2XFytOxx0//PAn8mmgPJ3QKmG1ukIpR9mcKPIly+UFj88P3F6u6VtDlqdsN1vq2pHdXrGczBhai84zoiiiPjXEUcHVu2uGVgJnw51uUNEKKTOmi5gsk7TVE67ZM1veUPd2rP89GlY371BK0PYnpHJk+YTD6YG2LLmYr7l5Ndamd0PF/f0TxSRiGAbyZML+sCfJIgYzMPQNxSQnnykeyp7O9UyKC7q24uluoEgkj5sDihZcTkrAh5TOx5R1x/70A3ZYECUpQUkiXRBlksPHLwhTIKOap43AmsChmNC5A2+mV2h7oq4sv/z5a5rdE9acOHlFtz9ieovKPLbzzGcx+77ktNlxc/Oap0PJfFJQV93ob8FzrEtWl5dcX6/5/ePfkUxSLi8u8H3gZnHLKghs67m6umF72OCd/f8MKP9Drb8IVKNMIqVl1x7IZzmPj3tef/MN7fsnpjYiTWcgJNY4oigjiRdMiiVSPfHNt1dsNzvmsyVQgI942v2B/TEhn0XcPT6DgO1zidIFjw8b4mTGm9vvGKo5fS1YLNccdnuCdygx5/p6zu65Y7GeM1v+HNNXLOe33D/cE6mxv+TiYkUQRx6fDohFzOXVhGGwNE3Fm9dv+PT5J+r2yOXqDUWaUx0HkIY4WRMnLfcPDxTZkvXVNb6vWS4X4+iMDq2hmCju7gekjFG6pe4bZtMlVbcnT5ekckY4ZwxGycC0WNJUlkkxJU47DuUj68uC+tjyOrrhVJ44tI/84re3/N2/2dHVX/jVLzVaJ5zMj1RWMS8u6DeQpCU+GJybUdtH4JL9ruLdzxPads/v//QTg2l5/eYtWkrapma5zhjaAecNaZbiLCAtSZzx5eGZ21dL+h7EuX2nqR+JE4PmliSe4Lqay+UF+80jOu1o2g5HjOTA29s3dFUYe9Qr0LHGhoa6PxKlU3z7DEEzyV8R5JEvD3dMiynGGE6nHRcXS451QxSNoCTW2TgONob9ccfgYhQGH7pxRCcNXd+RJho55HT1QHAD3gpsrykm4dxP3NI0NVIGjOvwbswANabl/vEn8nyKNYHF7IK+b7BGkiQRNuwJBrzXgEIJzbG6I4klSTqhq3qquiZSF/S2o673TPM5k3RKFs3Y7U9MigVSaA77J2KZslxcsHnuuLlckyqNPzXMVUE3VITOMYlTNIbpPP+674ah5aq9ZpdBI55HFkyc7TXBUR56bq8nxHE0/oDGCi8HPD1Sp3hhUJEawZfwBOkJxiMRZMkUrzRWNEgcSnjqag9SobQaGUWtaI2nNQ4lY4IKeDUaY6w1RDIn1ZKyaygbTTck1L0kCMOkSEZ8ESxd01JWDuv8yAy60QwhpWB/+MR+Dz6M9crWg440UazoBscwAOfRsTEDSmSkacqpNgjkKME9G8SUHh3nzr2MijXBidEAKRjD48/fuQEHYqyokjJGiJgg/DnQf5xPx7HCWUffj21Fo2Z0ZH2TOMa6QG/s+DhfnVsCpxydBTtIVAjIs8FoJIvDmHdqzvWzL71ZPmCGjt68MOfjs/Teo+Qo2QjenYF2NGp/EUgliaJx1N61bmxp0pKXvKw4ipnmGabrx9dFn4G9OLdr4TB2lIyMgWJiNJKdK2fxcnSL+zNLC3CuptVqbPNyfnw9XxIOvPdYK2jaga6zZ5d/wJ81sIKxOMFajw/6q+ltlDoI0jxlPi143pbjNQp4yVr1ePToMsM5jUAD5/SEs9xDynM/vBPfnwkAACAASURBVPtzxiov+mYhmM8vyNIJ1hqEPlezCoFS4xHq4fGe+7svxEnyT2Qio5TFS4gThY4l1o+GoXjieHj/Z40qOuCQdL3h+uId11czvvz0R+LYc2gPBMFYHy4lF1cLuv4B28dIPWF/OjIpHMEUOCOI4oh8ktGZmqo7ciy3+FKQ5BIT+jHHVTik0jTNnhA0i1WBDBqhYOh7qnKLDZ5IR/SmI4uvCL5n8/SECwkin49mqqrGu47B9Lig8e6Bn563zKYCH3KaToEO/OMP/4AEJrbABsP95j1CeOq6Q0UBGQ8Mg6DvGmTheHj+E8dDzXp1A8LhhUHrDBVJNtsHJtOIJFJEszlSSbzMId5wPJ6YJwmdM+R5gQkDi4sJT09bknTBbHFif9iQJgtmswVFmjBPI2Id6DB8vn/gZrkg8Zo8j+mbE0NnaZsjT/cnkkSS5mtWixv2+w27zYm3317QlTUCh9IRk1lM0x5RekK5fyKL5kQq4f7ugbopMV6RqojH+6exiW4/gkYpFlzdztjcbejtkcYUCB8xycH6AR3lCJGQz3KeDk80w47j4z2XsxWu9YChF3OuLxeoMHC5+jWvXxXkU8dPnyqKfInoJpw2zzDrqK1hKA1Pu88Ui0synbI/3FGVA5mu8cD9/gvzeIaxFlELDv/4QDTPaLqBH5+3TPJA6CcUC0XZH1GuYb+veH1z8f8ORf7/sORfuvHu6U80w4GHwz2b5h/Yd7/nxw9/iwRmkwk3N5f0Q03TPxPFkkgnCOFJs4Q4huvVa5Lsgte3r4ijNa9vfkOSzPn25hfk+ZRvv/0t37z7FXk+YbFcM1+uuHn1Db/5Z7/kl799R98Gvvn2Cms8y9WS1eqa5UVB8I7bm1uUWBPHOUIari5e4WxNXXYsFyuWszdIFZPECZGccn294njcMZsVFPmUtj9RRJPRNLNIKfsT1jmydML8IqcZFM9Pe7zv2OweUFFMXdcc9ydUSEn1kvXsF2TRa46HDtPDZBKx230GAWl6wXz+GiHGetDZdEmsXpHECYfyE7PikrZ1oBvq/o75POe/+M/+S1b5O+4/VXz4+J5D9YwRO3a7R477EuFz0lRi/YCn41Q1/Oo373jenDCuZnf8iFIwdB3Cg7eCoR/o+opj+TiG5PsUFRtcsCArIr3A+xiPRKoY5yGK5kiV0LctceooyxOgqaqafihB79GRJ40U68stRTowSSRK9cTRnL6v2O3fs5gvMEPN9vkDXVPijac+dQTn2R9/4v2Hf6CuD2RpzH5/4HRsmU3ekCRrmranayL6LmBaQdscKMtnwCGV43j6AaErlFT0Xcd0OsUHw/74hA0Nh2pHVQ/kWcLu8AXramTUUTdb0nhB05YcDvdIEibZCiEkThjKtkIoT1MbDmXJ9jBwLGOEnuOCQutAkWU0TU1wgSSyaCxFPEH4lFN1ZDAVs2nMaj0HOeOnp4/4GLpBkRYRNhhEHDjVd6SxQ1tJaNzXfbc/1ZhhYLnKUWrkycJZ7xhpxWZ3pOp6jOtoqppuMFjGvlHTO3TImE/WJEmGMR0ySAhjkHoILcfThs70DMEw+BbrWkzXjOYh4Rh8T5QkFEWB1CCDpx9qHAaUpB1gXws2h5jtKaFs5DkPcnyeNgQQmiQfM0wXiznT6ZSiyMizUbM9yV4R68nIAL70dgaPUiMgDP7M1IUxvkkEhw0Bew6BH7WMI3voXM0w1AQvz0ymHCUBkUZpeU4IGBMPxoeRSBxD29F159xRzvpWIUniCBWJM0v4wvhxzkIdzWfjtOGs3WWUGSQ6gHcYFwhSnO8rUVIRRQrrAubc2PUCb0fHvKXtW4wNiHOSQThng0aRRISREQ6Mhq4QAkoyavSlxDiPPf/9K7sZHGYw9NYTXsbkYSxK8MGdgerIsocx1+scIeWIlEUEyzCMTvzg/dk5H9BCobSmNw4fxpIGEcQIWoMgBEszNHRnNjKIF0AuUFKymk/J4uRrJNhZbIzHkyQCjaGre84k6td4qsB4gEqifAT06iUvduS6pQzEWtNWPda/GKAkQmg0isViwcX61Tkq7Cz1eHkTEOz3B+4f3qOikXH3Z5PXi5krUtGYCRy1lGlLpCN2hw2npv66b5+3O07VQJYnPDx+4Hd//wOnukREmiAr6u6RdDrlm2/f0bR7iuySqulwQZPNM6brGYN3pFPFoazorePgampjqdsaM/RkmeZQfqZpLNPJgnw6pR80y+W37A/PFNmc4Aaa5kDVfyGEhrbtqU41Q2NIoil9f6SpdzijqMoBLx1RkWN8TdftiWJYLBMIMNiBi4s1dfPAly8/0PUlh8OOIAxt43l1c4OQCqkiArDf3+FdhzEHjGm437ynbvbs9xvev/89xlrq7kjnnmiajvXiHYu5pCp31HU/6sltT5LHHIcnymbPaThyqCuyLGfoBy4Xr/E+sD1ssX1NEWcQWxoXE0KLDA0qEUxWBQ93nzHDgG00KsTg4WK5ptoEnBnYbu9JY8lQOXAN1gasG9vcVouLc/dxShRZfvrwI58/f2EwirxYstneczgdqJsDk0VKmuSslgv84Ij1gm/fvaO3huXkmm9fvwYrMQNEqeCwr2nqAxKIdcrl4i2vX71CxglBpFxeXDOYA8MwTkA+333G9oYgIrZlST0MKB2PJQVNSXPaYpsKq0o2+0d2u0cObcXxsEEz4P0zVXPP5vl7qvqZfbkn1jHONliv2ZcHDscjp65mu61YX8z+ScTef/z1F4FqU4+xJnFqMMYy9ANPmz9S1vcMxrLfHnn9aj22gOCIkgodeYKXLOYXzOdT3r5+jXN7JlPB1fo7fvbtb8AlzPIbquN4ck+jJfPplMXsGhkKtpsDaZrjfYQPA1macty1TKZTXl3/GmcV93ePxNFkdP1HCYMVBKl43NxTt0e+++5nXK6/o6w6yuqBvjecji1tu0eEgBss+9MTDmi7kqHrGVpLJBPSSFPuDEm8YLcv6YYdBIGOA11nKPJLpK44np558/qW+eSG1fIGIT2RzilPnqo+MnQeITJ0HLGvP2CDJZ/MkaIgz6ds9xuO5RdEEGweN8SJQypF3youlm+Yq19SyEuCG1isFBerW2J9zXz2CqVidFKh4jEP9XDYEbxHKU/XjfEvOho47UusCRTpeuzfVgfiwmJdzGJ5w8P9gce7msOuxvQteXTJNHqH9BrhYo7Hks9f3nN19Q7MLWm8ItITRJjzpx83PG8DdTMhnWTMZ5fM8rdEckIcjz+c1g/IqMWYCi1j4jiitx2dcRhnaM2Ru6ePCKWoaoGQGUrO8DanaWA2vWA2eYs1EbFak0aX1GUAYQheUGSvmOS3dP2eYQhMihlmcMymS5I4Js+WKKVp2hNSWhCGz3ffExeWU3/EB0uqFNpJpE2JopgsXdINJafyiUkx5+b6G7pagI9RLJgU10wnV0ynV1irGTqJM5I8jejbI107NpJMZxcoHfHq9WrM/xSGbX2k8TWnuh0d0q1nks3ZVoev+643Hd3QMJ1HpFkE4twXj0PKMYKmbSzOW5JcgnQ0zYASo+7Q+1Fna/FYNRDEgKCl9y31UOFdDcEw9DUyKCbxGi1ihm7Ae40VAW8H8A7nBgQOYyxmiCjrlC9Pjo+PA2VzNhTJ8YfdO+i7gb5psN1AsAGtY9K0oJjMmM6XLC5XeJXyvNNYPyMvZqRZNh5uk4RhUJTVubGJkYmVQox5udYwGDsCPTk+tpIQhKNtO6wZTVejNtURJxFxnBHEmBP6UkUqQmA+mzCfL4DRvPQCcAkC6zv6YUw5EC9tVQicNzjfIIQfqwlD+Ap2hPBI6c/a3TG6aWT6RrDmhR8lFVKOWtmz/lJHEcUkR8hoTEA4yxZeFBNS+jOo0me974uuUjAtCpIoxwxnNjSMzxMkQTg62zNYiRRqNHGeDwCxkqRJhHGjGe5FYjoaySRJNiZXDJYzmhuNWCF44lSjVKBt29HgdD6ccAZ1WaKJpcQMo6wjvLjvOet1RfhquvO8RKaJUd5iDabtwb1c/AvlytnwNOrrrX05Noix7SxYkkRwvc44VwwgwliRK0IgTRN++ctfoPTZqAZjaoS1OGtpm4b3H/6IFyWjtvolm1UjzlprKSVKeeRk4EvxE9vdM5vNkdk/mYRcrG5ouxOdaWm7jsfdj+hkGCObotHUKERgf3zCmJK2rQmqoxm2tEPF5nCH0IHeV4gIKnPCoVBRzGQas15fMLQC6Qrm05zH+3scPSacqNojWTZO9nb7DagWpTR11RNCwWy+YH/aUVYtxiZ0w4Hnww8E6UgzTYImcROSaKCujnSdYDG7JlYpTXPEuZ4kjbi4eINUUFcVRT7DhhSVKDI9ZS4nXBZz+m7HqT7w+PhAHFnqqsT1EdNizn77zN399/R9h1KKenjmw8ePBBl4eP6ADAl5klAfj8TaI+IO25XYIcVH0PsTdWmZpFfcvJpTd1uetvfc3+9xRhCnowyntU+U3ZaqPjFYh1A1aTzHB8/meYNSFdvdT2gRsV5N2O+f8DbQtw2276HPkTblsH9GhJa62jMMFdNJzsXqktO+wtmSLBvNgVJBkkYoLfl8/4HlekpwMXGUkWYZQytJoiltXTI0Ysyr7QWgiFTBZDmlti0qSkmzmK6tCK2jrO/58P4j1cHSDxWH/ZGr6zmLlUax4FV6QzaZEs+u0CqiLQOGmChN2N1v8H3Gcjnly8ctSiVIHfG8f8IFh4rHyWFdt2y3G/q+wzvDYjlBJ5JTu/93gJT/YdZfHP0XxZKqfqbqn/DbjGlxQdU+EGJB0z1zOGzozJxf/eqv+fGHB9r+mTxraMqMxXJCnitinRI6R1XuyTJFHuckscTZiIeHZw6HLdYoptMJuJimO5AkI8M29B1KJvz2tz/nD/9wj7OBPJsimVCVJy4vLYiM2fQt37//v3j1dk7b1Vg75XnzDCJCSMOp3DJfLBFiYH94QMsZxaxgd6zICkldV9Qng1aKKIKfPnwgjudMp0uqtkUqSdMdyPIptlNkRUfbG7ypUdoTx5JJMWMYKmbFir4fYzW2uyfeffvPmQrLw9MPzBYl8+klQU3phhJjHUI4rDkR6Rk/fvieLJNc3awJXjCc4Pb2F9x13zOb5gQETQXrVcHTs2E6U/z04QMyNqyXK+6/nNhsnhnslqVLWc4uaG3NLCuomhNXs0vSCHaPPabzFNMrSlOxXs/Ik5w0taSZojoKjqcdWXqNGXpmswXlcWC1mpIV12y3R4b+hJeGzdahaVitctomoHQ5Bpj3GXXYMwxjqHGeZ+hIkRcxTe8oilvqcsd+/8Dbb74jVlNgAF3y5csTi3XBbLIkS9c4Y1jMLpnMFux2O7wVzPJfMJ1cY4eI7fYRnfbMZivq9ojzNRezC+KoYLc5EURNEJAmYzh/19UcDgPGQ9NVeNfjvaGYzBgEOF9TFPmoRc1THh/vAUue5YiQUXcHrGtZLRcctp8oj5YQBLNFwnwyozM1fS/48PGBOEmYz7Mx+oeAQ2GdI8kTJlEOladtThzb49ftuJjNyKqMMIxxTWU5fAUuQYxj4eO+5+oyw5mBoe8JQqCEAO8ZvGGwljyfAGOOqUIxDBYdpSTZaMayzlIUMwSSSElgNF55b+j92HcfxzOMhary1M3onn8BpwjPMFisPY+ez6NXgsYaj+/NmcnkjJ9GR7+U4xd0ODvzQxirUWWU8OWhgSDJp0sK4YhiRRLHrJZT8II4CmTpWHwhgoEQ+L+pe68eybIsS+874mrT5jpEqqqs7mkJEvNEgCAwP59DTlejp7IqKyszMiPCpemr7z2CD8cisp/qgRywSQMCITzc3MzczG2dvdf6VhzleBemeUHRCyQ6vLZMhzUueBbPN0QqSRRLZCTOFa3h3wUgNedaTBlW5PI8FRSgIo+KLQ55LgY4T/18CBF54RlHG0JUhDdMh0cyhNcE6ecxoiP4UOMIhDaY0YHQ50CUO4vnYKEYhhFrA9Q+AENDcl9ITz+0DMOAQOLcp3vhAUPdVRxKgxTn1i3ChyIpiaKYqjGMxqHjT/Wy6gzZh360jMaBsgHNKsE6g3UdMsqIkhhbGrTUn4NokhFhDWOXhqpGHAoVBp8u3HYpQ3DMWIc8j0ncWdBLBHaEYfAoGeGlP/NmQSEo8kDWMDZcD9J/9tV6N+BthRIa50Nw1J2/3t3dLav1mn4YsGe/t3OBThCpiJeXe+rmiThRtI0hzlO8E2ECf26+woOIJG7q0Mxpxi1ZMaPIFsA7gMBkFYa6PjFd3HF9u8S7I0/P96R5RhZfkGVT2u6FLJmwPzwSRRFSDhhzIk3WgGAcW2TUczweGfyevhtYLdbgBbvNnvXyFc4KqqpE5xFRHgpX5pMZ797/CWsM00XC0EGSFXjRI8goJprt8c/gCrSOcc6wP72wWkT4Cu5u1xz6iseNZbVaY7uIPFW0Y0nTwmp5BXJgs3khiSIeN/+V0/g7vridY7ua2eI1X766Q7gpXbTl/fOJm9UbIpUxNJqk0PRtgoo6imnE/vSRNC2Qkcb6gaY/EUURReHYvOxJkgw7tETeM5/O2TZ7HBn76oF+gEW6JFEFUhjm+RVZ6jhuW+bFDNc3YBVKD9Rly3JVoERC0xw4lS3TaUI/OLSK2O8PWAujCY2OaTpwMVmy2Txjm5LF4gqd9mz2FTKaczztGEx5PqQ6tCrou46mO7JcXJOlGadqh/CeU+lYzgqsNazWE0bfcqxqpK6J4rC5XGRTdvtT2GqeWcSd6YlchtaOKDVgNQLLfLZmUuRMiwWz9TXxqLnRhqIxzJKC7ccTb25v+D//5b8xSaLAg41bZkXL69svMcYyTRtEnGOdpetburYlTTRNWTObFAxDR1l1n/3j/1+4/NWJ6nq9RmlPGi0YhoHV4oYsuqEbH+j7jijteX7eUNVl6AUXijRL6M2GY/Wednzhp1/+QNt3OC9p2obdfoNzgouLK7q+wgwa4RO6rkPriKY7kkQZT08fsa6iOgWg/H/5L/8bp1PN4fTAOFouLy4Zx57DYcs41lg78PDwjtki4fb6Gx7vt9TVM1mahBCWsHR9jTGW+cUkdMBrENJjrWEYmrAa7neMDHilqNpnBlPhfcxoW46HPUp7DqefaPsN1vY8PT9yeTUhTkCSM/QSxEhZP2I5sHnZMpveYqxGypT9cYNxDU7sma0Ny9WS+XyGkhIlYopighCem9s1kRacygPVUbFcveZl84JOd+QTx6vrb8n0FYv5NaPpWcyuWCxWIA1Nv+PYnHh3/wEizXR+RdPV2EGhXYq2ayI54mzNN99c8NUXr1HCsVpPgudt3JOkktncc3VxQRQJslxwdTtHyzmvX70hjgukz4iTmHr4yOgaynZDNx7QUYoxEX07kiYKpRKqtmSyUHS2ROoeM5ZonWCNIkkiBrchKUZO5SN9V4P1lKcj/VBixY4kTSjLLVkecXtzSxxHSOFIM0daWIyxDMOAMQ3L5YKXlxNxpJgtNItVmKSYQTOf3pBlMxazW+aTOU23p3ZP+MgQpTFtU1MUKXd3X7GYfIsZLZNZzGK5Jo6nxLmhMw98ePieh6c/k6YznOgZXUPXCeK8oOoPyETTu55j9UhdlrhR0Pcjx/KIMwLbO6SHSZHTDFvi/Fc8VdREuCasZC/WBVqf4e8iiBWpBId9g3cSLVPSKKeIcjIZM1QDQkS0g6OtRgqRo02EN6C8J1YaayK8jcjinKar2ZdbRizoiERnRD5FkqHVDMkKO66QckWUztCxBDEw9B1d3dFWFU19outDS9lgLVaA16CyiHiSEeUpOk2QMsIZydAPtE1FVR6pqwNdXdI1DW0bgiFeOvJJzPpiyfXNDTe3N6TJFO9y5rMLbq5vuL685ubqisvLC7ScoeSU6XRCkWekSUaaJng/0nQNzoXhnPDBi60EICxN3TF0QWCc+4+Q0mNdH1icn9BSXoU60/MK2FvABbEYJpGhznQwDV1vED5MMOW5HEFJj3UdVdXiXJjOSRHA9FJapO759zWjgYDgUQpULHE+HCI4r9/PWgvvB7q+ZzRjmNqeyxWk8KSJJo5jjA1hJH+WsEJ6dBRW48YojOUzGs2dxaPSEdYr7Nk7GlBf4ozM6ui6LniIz95dhEL4cABIsoR+HGn7HqXUZ+qB9+7MOZXBbvD54BXuk8QRJWCEx5zrbD+1wYrzIUziKE91sJmcDagO/9myMVhB1fkg2M/BvOVixpu3rxmNOeOzxPn7Gr4/TVXzlx9+IMsVfQfeq3PS3599xO6MsBIkc81htcP2iiSeMnServt19d80DcLNWMyuscaw2ex5eHihrE4MbYoQkr4f+OrL39FWjjydMY0vwWqm2QJpM7qmRMuEWCWkscSLhsHU7A9b6nZDkivKuuXp5YCIe3aHJ5LoEhUlyEgiVRr89G3A1eWFBm/oh5oiTxnMnnFsKbILVosVDCfSQtERs+8/YHpNVx14eHxHY0/sy59o2galBVmWcjw9kyQJb7+8xdqRQnhM65muVxz9kXfbDVevv6atW1IP1oy0TYNxPdYq7m5fM8kvOewrqrqnbTx3b5a0fQlScmo+sNnviJIMiyM2GbmaU9XvKU8tkdLISJJMHO/fvcOOEXXrmU1X9P2Rqttw2Lf0bUdZH1kUK2aTKZEK/O84URSTC6I8xjjPaj3FozHeY6lYrARpZoGeqn3Hvi6JpyNCZggJQzsi/UCsakzdUB9PdJUliyZ0zcDQV7TNgceHdxhj2W0feHl5BDnysn+gHVoGe+TlZYNhYDZfMHaG8rRh7B3eKKwdGVwEUUI/DESp5pvf/hatFtTtCRFZsnTOfn/g2Ibny9XqNVe3dyyKCReq5puZ53/9n/6exSwmU0v+9ndfoXDMpzlZHDIap2NFkgiu1kvWiznVLmiEw+5A07T0/f9PKlS1iphO53ibslx6vNixXEegChiv6PqadNZxf/+BLJ2SpRnOKpA9m/0TVlpm0wt6u+fy6hXNaaTpHzkc92z3G3TsqKuO337zDe8//Mxu94KQlsPpkSxLmC9yDrsTf/r+O9q2p+9aalvS9iP99sDYK3TUMpiW68srXjb3HIePvGwKvO8xo6SqH0jijON+S9kc8aKnHQ9sTydwht2pJ80kKo65f3pHFC1YXF7zvD3h+l/IizkSSdMeqcoD02mEUgXPL39mPpkyDGvev39ERB1Ct1RHyXy2JI5SOteT5pKXzT1SVSTRDVJEWJNStXsWWUoa3yDMktl65HHYI92aq5uCL27/E6aHuttjbUJbK4QKmKyuJpyA2oS2OzJP79i+bLC+YTrPGK0iVgvq9pHlYs7+8B7vPbv9jjhROCco5jF2cGh14nL9JbvtB9rWg5iwutTM53cc90+sViuqH5/wqsa4iO1+Tz51eFGxXq7pxgNCC3758ECSWk5VhzUROo0QTpGknmW05P75CNpTvgykkQA3slrPSOKviVSOykJ9Y133rNcFSsYcTyf2xw1xbCiyC/AJXT2gxQShTxyqI1pFDMZRNx1enpgUV+AU87nA+BIhPYqENMmJdYrQniLXzOZrhL3lo/kORIEqpjRjxTD0NBXAlvn8irp2rBZrympH2ze0/Yb5fE5RzBgHT92dcLKjmC3Q2tOblmGEdqxJEs/pcCTRcyJmSLdlOZvie09fDvSJoekaRLogVzkQmIzxo8aUA0jL9dWK9z+fOI7dOfwhkRLquuN46Mivw2oJoWmrkijRZEWO9SWmb7EyRsk5UpdMJwnDANZZiiwnEpLGNahMYqXCGE0iCnQUEalwnaOxRPGIUCN0jrHrMMOIHQOAXkURMoow1mKtw1pDb8zn0M+ntakUCiUjlFQolZLIsGJVKmCKhAsTQY8LjFMz0lUjbdkglQykAqk+r4SlCOzLPCtQMiJNYhYLifp3/y+A5RuksCSZxVqLGQISK1ICY3q8dyg0QUwZBIZ+6PDG4P3k87panIWnR34WRp//3YPEhckl0dnb6pFIJIok0kjhGIzgEyIr6DeLtR1d39H1aRCgMtjiArxgYDQCYxX+LM4+VcQGDqyh7UMoChQIibMWrUUoOhkc3im8DGt34cJ1ZEUMylHWDdYJpAUtP3ltJXEcUdfBdyoU52axEGBTyjEaGwKY4lPiPXhIrbOopCDOMtiVIYTkgqoXeOIoUBCM+dVzLc+zEoGgH3ucEHgZalc/sX0dHq1ASU8zurN1AvAChcf6EHzqXfhZoKQH60jimLdvX5OmKV3foZXCORfqUkWgGfz47s+8bF64fX3D5qVFyk8FBsEvjLSo8/2cTBKM8mgBq/U1VWPJs1/fQueTOd45elPTdgOCCI9jvtBo1VPWJ64v/oHLiy/5OfuZSAuq6kRnTwg34WK9Ynf8mfFUMZ3M8WLA9iB8Htb43Ym72y8ojx3toEAoqnJPJEOJR1XXTGYXaC0Zuw4hGybZFWaA7eEHpEtIoytUqmjbI1mWMZ2s2G16+t5yOOQImWHUie7UkeqaQ9mRJjETrdkdnnHWU0wWvDzXrBe/Jc8T9qc9Vxev2G8f2B9aZAJdX5NFK+IkpS4NAsd6dcVm+wHroWkPjKNi6DuS+EuM8Vg6+v6AHi6YzizG9BTRgmYYaPoHrMuoxheKyWvmxZJe/55urHGiZ3gZSVSEY8ryMuNwfGLwgogiAPR1jnOe02kk1Ji3pOmUdmyxcqCYzUEZ2jYmikY+3H8kzS+YYlnMX/Ph/QPCCybFhEk+ozzeM8mv8aJHKo+xdRCZIyRxSlvVNHXP+mLBfv/EDz88sVwtwE3JNIgkZ11kFFnGqazZbiq+/PoWNWkYjMH1MV6BtZbTrueLN56nl3uK2WuSLOZYHemcJ1176n3NRGZsNyP//E9vmbg/sJYz8qzixy5ntVhy2LacNh13/3hLudmyXORMsguenh9wxjF0LbfXr4iiBJuP4ZC3Xv4/U5f/Ay9/VajGOmIcNXESTmdVeWDoLevLnP3mSFWOpMXAOHRMtx3HMgAAIABJREFUiillvSXNL5lOFzTdjt3hBSFivvnqNxxOL8ynC172FXE28rx7oDp1TKcL6uaIMT1xkpPla15eduTpDWX5DLLh48ePeErG0ZPnE4axp25qpvkcj6PrQ+d6nMB++8R3f6q5urmj62EVz3jcPlBVJ64u3/LL/TviwqGThKraUTY7ouwucFSbJVV/YDw0/PDzA3/79YI097SnisUq5nQMU5GujVDEzGYzrPHoaOT55YH5bAWiRccTZtEbFFP2hwdub17j7Dd8+OUDF1fXNLWkH1t2h5bi+g1Pjy/c3rzi9asvMKZHugUP988B2m09SRpTlUfWyxsSlbLZ7SmmkOQxpwdHsbbcft3ww196rFGsLua4IWOx/gcmhePD/Z+YL1L6riTRt6TJSNU1ZFLw+LGnrX+PTBpethGLFXTjCxP7z5wOhn54F9ZZ3QMqnqCjmPJoEV6TxCB0TtRJvI3JkgIzdvTDjukiI3I5aZxRVY7r9des5hmnl19YztcMnSWNY3we3p29izHGIkWOkIIokkSRxjMwnd7Q1j0ChR0lx/1IOhsZzZGuz3n4WDGdJ1g/YkZJPxyJYsHD0wtpfIP3CUU2Q2lN09Z44TntRu6uL8OkTq44VhumeYZUsN1sGWzEepnhvOD+4Ym+P2GNQcUdkXrFpJCcDi8MY4XWGpUAoseOgle3vyGJC4ZuxySfoYho6xqsYZnfYuSIzgp2zY5BROTplATNJ6HaJAOzLEFbTRILptOEY1nzyffIOVW/3fQkcYvWHUKFyUkkBf1o0TLCRJZWOpAGi+bUw+b5RJJpvFB4q3C2wEmQMgYnaTsHwqBFjfMmrL+twDoT2JcypK+NEZixPwPs1XmtG/6sVRR8nM5/Ri2F9PU5Xe4cCEusY5Jkgj5P3qRWoftdShTynBh3IU0+DoxjG8IOowkCU0VEiSaJE+IoIYo0aZKiI43WCqU0QhQUhWKqCVMy96kta2AYO66uLeNgMCZYGKRWRJGnH4M4EcKFoJHz6DQwYodBnX90+vOiPYSbJBqc/nceWkA4tA6Lce/OPlkRIPbO98HbKlKcDU1dnwoSPi/xz1gxf6YSfA7VRZIszziVHdYPOAbw6tckezTghQVC/fIneD5egXBhjT86nA3rPXMOVGmdkKQqBJLOe/cwxXWEilKJlhF8IhqoEHUCj5ZBsBoXAdFnrFW4P/YzpaLtRqTMCAUC7vPzx5ie0UkGmwQOqvdnpFjAdKVJxKkcsM6jfRCQ1oUDbhILEh3qRpWI0FLy6u4Vy+WSfugQgvP/DY+sd46Xlxd+/PEn8jwjSiTlqQ23hYAcE16daSPBsiKkx40JRarxo2A+LYBfK1TbuiPSMavFGm+qAK1nwDtFUUxIsgQnDjw9/cLXX/+G3XbHLw8/M1tqxs4zup7VxQVl9ZGyKZFyGibJqeZUlkgF+0MT6mlHC2aKoudweKTIp+hoRjEpaNotdoxI0wnlaSCJC6TM6E3NOHoW8yVlWbJaXbPdl7iuRCvDMGRY3RBPZyznKd2u5vLiKxAjdpRgJPPpBf1Qst2UfPHF10hp8JWhqTYMtme6SBFCsZpdoVVK126RyvDyfE+ceA6HjyTJiizN2G5fmGZT9tsNXoxcXLzGyYgP7x+ZcUkcO6K4YLL+ks3+F5x1RFnM0/2Gq+kd16sVp65nvz8xiSPm10vqqufiasL+JQi0unmhLCtQgvmq4P6nP6FUwmJxTdUc2HY1kZbMJ3MSvcBFHikgSvfM52+4upRU+5FUx6xnb6jqnurQsJzdsV685lC/p2sBYZBa4rzF2YzbmyvGcWS0itlsRje8Z5rfYbsFSh2x40hsPB9++IHl4oJhaIlix6nu8KNAOE+iYzox4fIy4+HxHUW6ZrEoeLzfYwxk04j9bs/z5kDbOlaLBFEUNP0bojcFMslJnmriVDN0iuX8G5I4x44nlssF1WmL7RPSXDNfTlgvLinLElF3TPIL4iT7v6sr/4df/qpQ7YeSWfoVVf8DSSypqoo8zbj/uGU5u2b2+pLTscaKA1WzRUcZw+hZLtecTkdO3YFh6DBGUB4aquOWYTB4D8v5Dcfjn6nrnpfdA01bs1i95l9//0eUtgxmR9Oc6PsaVEfT1DTDhtHNmUyWFAsJYkPbxSA0h9OPPD0/c3lzjVCabjyErmkxI0oUovVMphGz6YzTxoPs0ZMYc3I8Pm2Z6AQPTGaCXx5+QKJYLu5ohx1Ca9puADLy9ILD4SccUJYDWdwFa8AgUTIiSwVDNyJ1R1zU9HvLbvcLWZbiRoUZLAiD8zFdf6Ssn1ldpuz2T7zKv8aMPV1fMsti+rZltVjy4acfWSwXJPoSKSvSwtN2nuksQaqSfHbADCkxXxHlhm7Y0FWG67cr6vqF+fQV1lXIoieNY07NDkRHP0hUlPG0+4WbV1c03YA/HNExocVmVBy3JZYWLy1mELRNx+XFFfvDhvcfNxTTKW3bkWeSOArexFG2TPIVWi7YbjZEyrOcXTOeLJfLKxCSbJLQ1CPt+MJkMqGqW5IoYzZ7RVU9E0ea2XTJbv9AVbYI70kSyXJ2zdPzlrruSDJBXR1IM81ifkPT7ulES5o7FFM6UyKjGuFTylOFTjq6rkTFI8J5nvcjTdMh5AtC1FijQIx0w8DMT6nKlmFsaCrBZJrRyw1JMgmdyJElzjQ6CgJb6DDR1KJgEk14fHhmOUtZZGs2mx3FbMRaxXFXEimFET2LySWVEzTtHtX9Cg7fTLbIasacFf3YkRafEElnAPqZOfrLhw1NV5NEmrb36CgiVppkMoRJJp44kljf0I19qFrtAhgd32B9QFgJPDiJMYEV65FopfHWYH34O59S7PLs33QueB4FiHOtpzdBkHxmYnqJUDIItTPmSQmFUhKHwzlL3X4Sa8HneOZOoYRAq4g4iUjjmEmenZ0P4swSDSD80YaVbtd31NUn0XDGSUmBUioI2igiTjQ60igVI5VC65TpVJ+DMvL8WDisHzCDO+O0DGMfWrdU1KFlh/Di3HUfhJjwnIUq2NGfW6M+CdVwW6x1CBGQSKEZSwW7jwpiKEyT/eePe2GJ47BOHwYTSg7kp9W1A0ITnRkEeImQnyTz+fMJmxPrHF5+kr2hvhY/MpxpBwKPd6G4JNgVDKOtGQf7OfAVvt+WJA7r30ALOAtQF1BX3nvSNEIrwf7QgY/CNPZTAEwESoF1JhxuZNj7uzCLJZKWKDoTE5wDL/kcHROeJAkFJl0/hMmxM0gZ1vQOE5rLEAgfXMHr9Zqb2zXDGLjCIUinzoFTxfFY8sc//gnjPPMsxVlJ35lQqvH5eR2EsnMOqR1SW7wNNofmNBJbQZSkn1+3Tbvh9vIbFpMp2+ca70bWl1dUp4rNJtRxo54oyx3Xlxfc3NzyvJ9xOOwQOOrqwG9v/pb62GC7miiNuVi/4sPzO0bTksYDSnek8ZLpJGwGrvI7jqcHRnNiNIKmienNiJQDiJSyfeFp8zOTSYZSoKMUhCbLC5rGnCfZliy+RYkjaIPwt2g90OiGLI4Q5KSxxhvDrLjETyb0XU8/1AgvmRWXFLnDt1MEC7CGq9klv3z4kbKuKeaaJHE4V9LVDZgCp+D2+hrbwXq5JpsoTuURHUlmxZxJNEdGkvLYECeWSTbBeo3ONVdXki+/+JqPDx39yyPSaKZLTd3VpEXKx8efuL68ovId+3ogySXdWJMNCZNZjvQTTqcdSvXs9kdur78CYcmTO5z9gZdHy9//8z/wl5++5yZ7S9W9x3aebHZBNp/RDQ3WZTTNC3VTsj90fLl8Qz4BlVj60RFlGXV7ojpVTCcZcZTz8/ufSKIarWK8c2xOJw7HR0YTMZtNeHj/ntllwWbzjDIT2sRgBs0iEdQnwXpxSZqFQc3N1QXH7sSpk3z15luOp4+UuxPf/eEepTImc4U9NGivedk2FLM4hMGHHeuLKdYKfvn5njgqyKOYKBE87Z7RSjOdTelawzD++p70H335q0L1VH4gjmLS6BKtLVEEdoyIlOHi8iuqskbqI6+vvuDp6WekCy1Ap+qR3b7i4uaS6TTmT3/+F9pqYDKJSFPB6QB11TCbXAISYyuE8vzlL99zLH9mMktohxL8BGdj5vMLzJAyn15wPB65vfqW42nHh5cfuVhdM3SS6WzObHqF8ksQltH+TDtWPD5LYhWTZPD08g4pNM7nWNNzOjYk8YLjtqcZP3B1o2hLyUS/5pv/tOa0B3f2ctomQpDhvMEzMp2ktN2JpnIkyZLrqy9pugeM1+TxnNGe0EqS5pKq3JLEa65WXzC6EknMJJ/SdimnQ8dqkaAjx+m0w3tJEo/8+Yd/YzovECJhuUpxxjCYDVXXBhhyvcejWK6meJPw8JcpSXHFh83vWS/X6FWHlSd21QNfvP5HDoctUdyi1MhtcUffDTy9/EwUxcznX2EGB7REYsHFYknX1Dgc88WaffUjkKIiwXK5wDnHen3Nd9/9gdG1ZHlON2xo6iM3l6/IixvcGLNtXqiqmsXCM3Yxp70jmUh612FFh+kFOtYIqYlTyWgaTtUWHVmG0TCfLii6W8bBI+jJ0gWQIuIjiZzQdxuEUERxQpxGNO2Upt0TZR1ZesskKXG2ZRhqIENYMHYA25NGhrI5IpQD3xFpQ1lvcN6j0+FcO1lRN3viaEmcpoyNoix7qvKB2XRN23VoK4j0lP2uZLXIqfY1Jz+ihSWN56E3Pc+JY0Uc5YzdQN2XODfy5XrK2LXstzWFy4AA/b9cLSn8DP8s6YeKyVwRJxFt6z4b+JESbwLzM05yXnYnRtsTRTH+2COlRiuBYghd9ToIEynlWQhYhA9ILIHA+k9gd49whs6M57VsCFcFMRWCXJ/A8+K8hldSo85hJaEAEfyeAnVGQlmGccC74IHUUp+B7Hz2Tnrnzx3xnG8TCNEhlCCOgtDUSp9bk8LnKiXIohSlFUoqPvUYQRBo1g6BVtIZurqhPlmMNTgfwlVKRyRpTJqlpHFKkkahy12n5GlMFEuU/CRcJI4RM3aMF4LBGMahwYyWvhuIdIaQBqXC6l3I4J2051pdL1TAKiEDdN+6c6I8hJacy4KnVQS+gxCBSxwe/3Awcbiz7QCQJoTHzqE0SZhWOm+C99Nb2tFj/XkKL8FLgcSTRALvLP0wgtDn8FYQxXEscGZk6MM09nOi33uUkuRZwdB152an+Cx1LU5AkiZEkQTGMIH1OnzMh4NHnCRnEX6eWp4FrLcOKSXL2ZKyMWxsaO7yzoXrP5t3hZQIqcPzEXFOWDiE8BSTgjjJsLahmEy5vl7jnMN7g9I6bAaMQXjB0Pf86fvvOZ1KkiRGSkFTD/S9JU7Pc3LhztaUMHGOMkl6o3lut2ihuVx+iVeCU7P5/H5ZZBOWsyV1ueXqasH7D7+AWDL2Mc417PcbZNQTRSn7gyaKEi5WCw7bPZNJRBwbqsMe1xUsJzO68cDj0z2TSXHmZ9esVjMW+TdM8hWn0wun4WdEJbFGo2I4np4o8jmelm44BN6vytBRQZRIjHGgPG1TcXx6JE3C4xbHgiTK2B8PlKcNSWpA+nAIdxFOjdTVkfokePvFW373m5w/ff8Ds8klSZaTpJKhEwh7xdUbycf37xibkYvlK6xsWC40+J6uHZlkisEpJsWM9c2SSC84Hl84HH8miZcsphdY1+LanCTbstn9iauLtzTDwC8fHoiV5d1HRdXXPO2eiRcxPTvKg2eyWLHf/sz1t695/9OWOMtZZDFtK1gtr9lsH0nzOWhDcxq4WL+lSK/RynM6bBnNSNc/8vNPC4wxHDZHZpMZyXSgq0tkrELwa/AIDJGekc/hw9M7YnXJZnsi0gnd2PHFm1sen75jqBMWi9ec9u+wqsLLOVYrXpoNTdtwcbPgd1/d8fO7P+AGRyw1i4Wi7zv6Lua4faZv4fpyzn7/QNfBt9/+lvL773i1/orlVYQ3wYvvFaAUx3IbsI+HGqET3t7NiOJwIG2GLUm3Zn15A7JhsCXr/JauazieaqTyWCuI1K+HsP/oy18VqtZD2WxIJwWCFOlXnKoHVGzp7DNeS5xuednuww8d1XHc9cxXkmHwnI4tq+UlghapDVIkjEPKP/7T3/Hux59Yra8YzI7p5JLd/oH9/oDSmuOxJrnKMBaK6ZSy3TDJCpRfcb2es3nacXn5mr72RPKAUTUfPp64vXkDpuBxc088iRDeUtctLg4v0NH1mCHm6nKFo+a7H77n5vorJlcLyvYDi9klzlR889u35HnKx8dfUPGa5+dn0lSzvphTtVtOB7hYLoi0R4opUa4Z/InaHpAkIJc0ZY02Ei0mXK9nLOeXbF6O3N1+y2Z74HH7HWla0Pc9ddOSJlOUlgxdx2bzgtY5KEtZWe7evGa33zKZFXRthHeK9SKlrE+QrGi7OaMoeXr4jrdfvgKXYaMK5wxJktANe7K8wDmNtTV9OzCaI/PF67ASM46HDx9Zr9asVzmmM7zsvqdsK25f/SMqeUvd1Fg7Ir3j/ukj02nMq9ff8Pj0nnw6kBVvGIaO06lmUoRU7GhGxtHS1CnxbGQUJdNszmFT0g4HJumci+VbysqjVUvfb4ijGdbFHJo9g3FEImU6STBmxJkMF4cCiaJIsXZKlsVU9UA3HokSHfxPdczT8/dMZxHT4pq2eSFLJULm5Nkc7wOL1lpLMUmp6gGDxWvFcb9BCEcSzdA4jKuJlaCsAw5rsAcOxwOjVRhXofSEpi4pkhnOjvTmibbOydIJm9P23MPdk3QFaa7I8ylRPqGs93zYvsePlkxkTGYXQAXAqX6hX/RcthdElWQRZ6RZRdv1nOc8CC8RyvPh/oRUNVoHVqfSIvhAtTojl4IkcC4saINAAv6dN1BIEUI0Sp+DI+eE+VlIfFpXB9EUEumBq3lOXHuHcybwSkM1evgKIkxmkRIlNCLifP2fKlPPgvTTFFCcWaA+dKvjQVhJT0inK2nOftfQYKRVmEhGWqGU+jyljKKIKIpJsyxgmQBrw2r/0+/GGMwY8ERNeaL2p4CTEiJ8vo5QWhFHEXGSBNRVEqO0RmnNNE6R0+k5MBW8ZOM4Ml8YrB0x48g4GMZhQEU9YIhaBzbcT+fC5FCqc4jKSKSTeBnEmfCCKI5JEo2gCvJbqlBXSvBfKi0DxFuEgJg52ySSKCLLIvpxDHWtIvCuHIJIKbJYB/C+c+fHWQSLhfQ4A30N1pz77c/fG+eCj1ggaIeO0foza9bhzsEnicPZUC/tpMdhzp5aHypaPRgfrCBSnqe/ZzZWpENlaj8EQSpDTVWgEQBJpDDGMQz+PM0GIcIqXwlBkeagwpr+6mIFhC1YHEVhI3D2ydoRfvzpB+4fH0izAmsGpBjpuhrr5LmVzQV/sdIoBcaM6FwxvvVk+wlpNIHEst8fyLNf38y//uofacoDT49PLFevmU/viCJNfXpksVgxmzmOxyNC1XhvebjfMZkJptMZWkiyaMZ2U3F9fYtxA33pGMYDizQjTW7AZ/RdxR8/fEeeT3n3/l/JJ1PSDKaza9quCofrUjGdzmm7A0kyw7uUcdBU1Ybbu2vavuRQNmRZTt13XKyn9OOBpqwpa4OjpO09q9Utfd9ixz1eZtSmJFEZVf/MWEKkU14OTwEzOIBSlvnUcjhW1L3l7pvf4GzPfj9wOhiubuZcXMHrV9/w/PIR6QWnfY3QA37sWcxuOR4No/ac7IF1ccts8ZZh/556PDH0MI+nJJOaD0/vUCpiRJKYCX6I8P5IdbxHCPjff/9f8aZgsZa4scC2NUNXcbW4oGoc8SRnObtEkXD/4SOz6ZTFfEmUFuRtzMP9T8ymF5SywSGYp5dkE0vdHtlsOn73D1/yvH3h8NJiU6ibGhEnpFHC4C1pUrBQU7Z6hkxjhq4i1gnTtKDsTuT5LcvljD8fjpjR0ZuBzo64suNivca2Rxg1Qjv61lLkKb/c/yunCpIi4/uffqLtGoSU7HZjwHgWEdWphLTl6alhvo4QMsW5LfcvFZPolrKsafoar2P25YHZXNCPDc/PO+azBbv9Ft8Jrm8uaatf2xL/oy9/VajO5iu8z/jp549crpZcru8Q5YQ4dbxs35PFKzh7hcpK8k9f/y8cn/878yIjfmuZzW4QzrOYXbL3H9E6QTAj0hE3N8GrWfc7vE2xzhDHEinumE5TJpM5x+rIMDiyNEcQANbLxQUv+wd0eyBLZ2x2G2azNUncMp2s2D63rFeaup1g7ROz6R3WV0zyBW3tGX3DapXxvNmR6StSvWR1cc3woePN7X/Gjn9gtCNxNqPte2Ibo5hS7Q9U8sB88iWrC8XgLG9ffc3DwwvWBL5bJFKkTul7gzGey5spfZWDkCTpgtFtqNua5Sqj7tacmgdW8zu6VmGUYzZJqY4d81XOYnrBf/uXv3B9I1Eq4ub6htl0jVo5vv/Tj3gxImRM2xni+IiITvzNt39LVmgO+w19ZzH2RCQkXfOMtZp8KjB9yvPLlvWVY7N9wDnHm9cXzIc1h8OePJ3j/Eg31BTpjONhy3x2TXm6p+ot1xcpwqW8bD/yu7/5J54eV2w3H5hmK7xsQQwYrxlMHbx0WYkXM5TOSbOCdtizmE9xO02iPHnqmBZv+P6HA+v1N8yn39I1MZbfUx1avv32kufHA14oomSk81ucS7B2ZL2cczj2KJlz2NesZjkXV3cc6j3l+MgwLOmkp+1NwBCpIJxiPWd/eOJYHkibCcgW74NFpUgn4CV9X2KcYzQj0+kl+92JYSy5uLxiHBRKG/I0pavDlqG4yIljEMwoJisirXHO0Q8v4C2IkXFwNL5mOrlkbB1O5ygZakyLyfzz6+54bBh3J+hHik4zza6YLwoOh4aQ/pbntHXwPHrnGLqzQhRhXaOUOns1I5SOwy8Z/IpBR3rsWTScWfvnGVcA43MWsFrJEJpREiVFcPCdofHGBuEqhUD5gCD6BFIPPFCP9WG6hwM/BhYncJ4mcg5bqfOfg9D49PlBtLogooTACYv6xLR0Eu9Dm5NzoJRHKRdqSm2oDGT87PT8PA3m/NWjKCGO089rf3UWwJ/S5N67c4WsoS077MHizmtpeRazOo6I4og4DnzgKI6Jo5gkSpCZROpgmfDOY4xhtXRBxNqBcRwxZkTpCcPQ08UueCAROBeEo1JhkmlHd6aG6jOCShIpMMbSDQap1OcptsSRaEUWx+zNgLEeITVSSITzxJEnT2O6fjgjmNQZweQDczTKSbQ7B6jCRNx5FyaMOLr2QD9YrNNIz5kGEHx96jxdN6NFCo0847nwFq00aaI5VPU5lR+Cc58OKnGiiOII7wfgkz0iPA8QnigW9EPPaBxSntnC5++VkhqE41SNzJdrkjRhGAZ0rPHComxoxPLecn//xF9+/JEsi0OpBBFShdrMT57i0EKmgnUBjxSSNIsYzUislyjWCN8SaYV1v3pUD8cju80zOppRVg060dRVSxQpRtsSRRmxLlCx5HDYolUBPqdILrlYL9k8P/P0+JH55JKyLdnvSnRxYnc4MbQZq8Ulw9AwmhadjKTxlH/69j+zPz3Qdj3TbEmRzdnud6TJhLz4hufnX2iaPW9f/xahCkCx3byATGj7gSz37Hc9bduwnKVM54bdboNWMadjjY4kcarJiyVxnFDVR14OFc4ohFQcynuKieCwrVgsFsTxidNpDypmOp/yb//2e+LUAQVp9Iqrq4K63gcihWm4uv6Cst+ynqx5fNngJgeWk1vWuSNWksOxZLfrefN2Qd29cHV5g4wyDuV7VrOCV/kXRFGMHRTSa1SU0PUFD8dnIpFzt1zyeDyQxwVZMmWRz+iqIw/tkUg53OD4m9/9z+z292zLF1azW968/gfeDT9g+po3b7+gLJ+QOsI6RZpNmK9yoqTg6jph+/A95ui4Wb/GtCXGV0yzC1Z5xuPmGeET8iyi6zOy2SVGNkxyiJyh2VvuvnxD32w5lDHN2JGIFB3PsX5A49k/fWDuvyEvFFm+4H7zI9lyzi8/N6EyeG7pO2jaHbP8a1IlGPoDAs+p7JgVU/LpBEwS2s/yjOfNSLEYydMULSFN4vNhV1Pkc7KkQAvJcvlXoVD/r17+qlDNoxuOZUusIqxrORx3oUNbThnGlNYYIh2TTwoeP+4pj47f/OZruv7EF6/ucHh2+w3TaUbXLlksZgxjw/3jO07Hmt3hA1evLGU1wRrPaCRZmjNfxgy15uZizv3mHXd3X4NTJDrml3d/Zr0c0f5E5y1fffF3/PmHP7JcezQpZjzStxtm8ytms7/DGkNXt0yyO4yusXFLeTrRtSVXtxlprKjbLXkR8/D8I9vjz0RNwmhHpsUV3gre3F7Rt1sGY8HccPFK87z9yP39E7P5nGHsOZWKV3evqOqKrtsiZYodY2DE6gPfv3vGGMPp/YHpNGU+nzMMA9PJAu8MbbuF1Yzn3Tumi7cYb7C2oWkf+Nu/+RbvNe9/eQEMx/KR9dWaOF3z4f6PvH1zxTx5xdDF5GnG6fiAUJZIKrSecXVzx/H0xMfHD1xcBCH29HBkvs5pTpay3hEnKXGvePfTR+7eZMyKa1bTv+eHn/4PpB+oy4E4jlFywnI18PSXn3j37s/M5ncMdk3VbIgig3fBp5tGlxwOO+JkSRRrjGyJ0jnDYJGRZTbNmc4du+ORxfQSyZJUZzzef+Dm8kuup1/RRQ2nwxhaWFaSsn3BlJ6+3zDYKUWsOBxrJtO3fP1/UfdmPbJsd3bfb+8d85BTjafOdC/vbTbJbqotwRAM68E2YPgD208GbAi2YQ0ttSyxm31J3uFMNWdWjjHHHvyws4r9RMMGJNsBHFShTlVmZERkxtrrv4avX/P93/0ds2/nNO0WIRT90JFGgkBmRFGCHi1tM5IloFRKEK6xThMwIUwk0/Kc9iA57FZkmUU7jRgNfTeSpRO6cc1ut2dWXpBkirZd0bUD81npq1o7Rzm5YjotqKodVfVEkGmwitFqmn4q2g+OAAAgAElEQVRgNndoaqJMMo6WNM6RYkQ/Zw4BURzhRsW9e2QaJJThGbNZxo0KGEb7D/LtnmOrjhmVwuv6nLO+V10PdEcgipBH5lQSBJEHsCrw3erKG48CFR5jv8AYyzAazKgZuuFopjk2P8Fx5K+O2lPpR7VHRu+5ElMem01eHPPOg0XrHNaao15xxOoBgY9/EjzHNknf267kUV/oWV11TAuw1mH06Hvs4Rj0L5Hq+LfPUgQhXvbRp0X5Y2et9oAEUIEiDEKCICAIAqIoPn7//Pz+ObU2mHFk7Ac/lhtG2rahOuyPtaAeNEnpGd44DomSkCiMUGFAGAYEYUySJORH5tsaL3k4PTUYMx6LFQzj0CKVZhwcaSqQ4+ArbDE454tBtIG+8wy0sV77j7UEgSKKMqxpcab1ln4kSkjCQB1jkvz14zNkfcyAEopJXhKFPdZWnu6Wx8QBacnSgCwOWG/NM52OsRInHIG0CDlwODSMo0LI8GixMseQfcc4NvStz3yV0ms/nXA4AcaOdINg1H6/OF63/hxatGnpeos2vGSoHuMV/CIljLEuYTpPCQIvsUA8pw44nJA8rZ748NNPREGElAGD1ThhCIKUYYxAqBd7nFT+eHmq2JJkEVKCsRVte8dpfI6zI4f+/uV9e/9wSxKHRGFKVdVEqcapDYd9QyFT2q5l7ANm6RRnlsTZyHazZTot0OOEKIWT85D1/vc4W6CiHcJaAgqiNEPIg4+Wc5pRG4LQs/Wb7SMqiCiiU2QgMOYLRldEQY7QimmeUh1uKMsCqzvm04y+h76DNJoxDg5rO/pRYbrWS+c6SVRWVAfHm9ffEKoJYRjw6fonijImyzKMseRTSRTlRLMLrK3YNQ+EaUi9rtjs7immU/aHJXkx5/37XxCGHU9PS4RzPK5u2Vdrqn6P1A11WzGdRJzM3kDSsXpYMV1M+O73v+Orb96RliFVMxIlKVk6xdqMIEiII8eX+x8p0pLLsyvaKqTMrlg+dJydXfLpccX7VydYU7HcjczzFPO0x4UTHB2bzYpRG65enfHp83dM418ym74lSQLKco51HUkRsN2tPaEWl+yrjr5XFPOQqPLHUcUldfvAxatL7q9XTE4Ubt1jspgwSrksLmjMliQeqTcHBFNfe0yN7TWn85KbD7dEkYNAYxuNQRNlgvunj8ym7yiLc4zW9MPI2eIr0olh+9SzmL0nUJLtU8frN7+kmDzyN//hO67+4hvq9oHd+sD79+dUu5ZymhNHATrRYCGNJVaHlPkE3VuMASk1Xfv/E0ZVjz4H7dX5a0a7QwYNMYqh9W01WZJxcvIVVse8fm1pmi3/6B9/y7/510+4uaRrR9rar4+365YiP6FteoTs0NYipWGa/oqngyRNBFcnMYES9OOe69sfuBhOuVz8gv22JooGZJiQJ2c0wzUqyHj35h1ZcsKX689IUfHD999RlClFVOIM9IPXbRgdMnQj/dBQFlNWT0+oIOD9u1dslyGr7Q2gabctaRGDi9jvIU2h7hri9JK4yAhsi0SxfrqnLBKGNkOJFGsGsiSkbbcYWzM9UWxWltX6C46Ri2lC0CVkZUysFlT7mtXjHqUiDhuJVDlaP6BHmC1ybu4eCMKMMAo5Oztn0Hs+fliSpRlV1aNUyCT3cok49BdbkWf84eYnwugdd4/XNP2Kr97OCEk51I/oMUSYGeiSLNtRVSl/8Wf/Nf/yX/wrlLAYHZBGJZXb01YxXQuReCRQCUJEzGcnfPPNL9huaqI4ZjIpSOIpd3ef+Prbc5q6J0sX5PkUY1fUhw4lU8r0jG5sqeotgg1FOkXbPUEUMegYrQ0Py2vKWUKYhDTDDYNQHLoDfW9RLiOOJUGgSRPJdqexssHKnG3ToB08PS2Zzne4YODm4ZFklrBfBwgtGRpDFs9IgxBjLG1V0dkWEUhOzxKESOnrCVk6IQvP0cGOopgdwVBDmlXUzY5JuWCSvacftnS6wiERFPTdPfv9jsJmKBkjGIl1hTkmNiB6smhOfTBYU3HYNXT1yGz2irKcUFVb+qHn/umHf/DOi5nPFEO3p4pqNpsnorggjkNG3R7jeY5jU+HHqn7Ef2QhOeaGCt9hzotj3IK16EEz9t7JfoSSCOH1mGmakGcZcRyRRAHGCowROBeAUB4gWt8Xr0eNRGCVwAbHyCDxHEnlAZBz9liTecyvlALpPJMXqAiCo/5QuBetpOAIRI1FW3NkuDiC2H8ArINjUcAzQD2CUOdg1OalgtWbaY6SASkJAkUQht5YFXhzlxBHN75wjOPgMwSPTKGUnp0WUqGEJIpT0qzwHfHK/53WmmEcGLX21aVdR98PdNvmKIEZcE4QqJg4DvyCQMWEUUSa+hG/CgLiJCLLFVLOcdaiR8Nk4mUKgx4Yhp5xGAhCh9YNceg1ntZ4575zjjAIkS5i6BzWWKx9XthYoighzmaM+y1WDRyH6FjjI8CCyDEaw2Ds0bx2lCLg5RDOGoZBY0SEckf9B8ec0ThCj8d4LYmvanX4CYBt0aP0xQXHJICXBZcAhKbtHdoc5SdHA5iwjkD61jdrB7D+XAnwWanOogKFthFKTYkCgVKef3Y+AwxlYbfd8OOPH3DOm/uM8ZMCKR1ZJtjdtf526JQ34ckQKY+iPykIQksUw35jmMwynp58lvjsbAE8ATCZZgy9JY0n6NGih5rptORp2TKOGiE1QjgO+z2TsmQ0G+puzdnFLxhNj8URxTlSRNRNhdGOaXnFqGucE3SNoaq2JEmCGSTDeODz429odMUiv2K93uFoSMMUaXu2q5aTxVvS3LE73BNE0A01T7snFDNm00u03iOkIIoS9ocDQjYoJZlOZzg3EkYj2u4pk5DlakUYS4RSBJFiaPek0TltMzApE4TsaRpNFMRoU3E47AjCCEhY7275cvMD//jX/xW//uV/yzAu+eHH3/Px+l9z+/uaqMyY5DPGfk3dPCBcjHUxehx59fqUvh+QymA0dF0FTvgYrsEyzU9493pE2gzbN+hxYDHLkQoGp/mLX75D73vs0DG6itoq0JZpcULv7ri7uWHoLLPJFRi4vvnCr/+zf8T+0PK02yCU4+7pnsV8TttWjLahri2b9cD5xRU2e0CIkENfY90UaR3VcE29jpEuxNU7cDkzMiKbst5VXL5eoCvHUzUSRRlDX1NOcuJU03d7wihBBCFpouiGJWGkqOo9V29e8eX6C2kWIIMErfeo0KGtou464kJwqDeQPHF+ekEQGoQGpGb1tGe5/MK8fE2kcna6ZrNaI9UAZkKzq5AKJuWcNA2p9+7/AaT8j7P9SaC6aw40+hPT9AphcoQ0jFiu76+5vExJsgQlEmbTC7q2RkpF1ziy9ITH5S2nZ3OE6IjjgrfvTwhURBhMCJMDQShYzL+iayzTuSGJ5lTVmnfvL/nyqcG4hjgreXo6cHV1Rd+0TPMp+80Tb9/8mru7hu32ie/X/5Z3b37JZv2IsTfsqxua2lJM5vRmjRInBDImnwg63VA3mr5TtF1NUcwJRE6eTdntl8RpxjgEYCOCQPL48BmDJgwzzs5e8emn35EkeybxW3JVwknCanNPoFLGUfO0XjE7TaiqEW2Nr6XUe26+tJTZa8LIMnaOvveO5321pGseODu/ApcyaEegSg71jyyfIv7JP/6nfPl8y2+ffkNRnBKn0ssS4jPGYUQPA9P8hKZqqOMtX319xmZd44jJZ5qmhdP5xI/00xxjDGGYcLb4iiypuf7YUk4LnBkQTjGO8ObNa4ZeIhNfKTmdzHn79hVfvoy0teDkLOfht3dMpxl9V1OUEf2wo8zP6ZoKa9cEcY92NUk+4Wn/mTiaIkSEPNZI5vEVbaNxtqPvd0RxRtM2jLYhm2bcLu+IoxwjasKkwbYx9d4Rxoq2eSKZeC1q1WtOZgsCG7FaLqm6Dq1WRCIiEjOKMqFva7pWo4KYt69/SRIMrLd3pHGI6y7p+5G63bDZbHjzKmccBG3fMZuXnOY/4/r2A2maMilnrPWSXBUYaxj1ATMYVGAJg9TflHXgA8BljLOKcUyZzs5xRhJGAUPdYgaJkDGPqwN5GdO3HZune/LMj+UAmm7FZPoGHQRYGqpgxUWZUZQBTas8EBAgji1Vxzvy8at8GZk6Z45z+KNj3z0DsuPYNQQQR72pxdqBw75nv9t5JeyxwlKIgCCIkEGICgI/9g7DlwgxKYOXUHhjfS+99SXwx/0AJZwHlUq9VJm6Y3rAs3kIjsBQ8KJVFfL5tR7ZY0/kYZxlHDSDMUfNowesgfT7FEUxcRZ7gHlkap83d4xc8qN8g5XuuD+eETbPznOcB9xHZvZZV+tH/4HXsSqFlLy8piBQJEnCYuFrc63zhSLGDIzDwNBrxrFn6DV921DtN6yMedEGC0LCKCBOAqIkIggVoQpRKvQgM/YZo+Og0SLj6nJKP2ovH+g7Bm1wQcjtqqHXkjiKkBwBu7W0bcXD0rDf915OILxz32uVYXQdddNjrEIE4lkTgpS+gCAMM5zrn9c+xxMi/TmxEAYFjvbYyPV8vH20Vds19H2AEBHWGQIlX669MBQY5/Wt8phg4DdJFIYkScxu16NU5HUDeOlLpEIWswVhMPW1YuLI5ss/JmTsd08sHz+AGJFBjKHnuXHqWbLQtsOxWMOP/ZWURMqDf19tO/J436KHCjUpaZqeIJJkyYxnoFptW2QATbsnCRXT2SUfv9xQlCXabQhERF4krFZLknRGFCSUkxnd0JDlKdtthVQhxvTeuGcr0vQ9kbX0HYzaEUe+716pAKUkd3dLsnRKUxmECMiKgKGLqPsKFcZgR6SLETZn7ATV4Yn2oPj2/a/JCsGn23ukGunamCBMifOOoU74+uuf8du/+46imJAXllFvcVZydfmGtmnoGri8vODm0x7CA02zpCgTMDl5uqCfNAyDRomBrPSL0puH76jbB/6q/i94c/UtX7//J3z71a+4PP0bfvzDv0YFJTfXt2zX3xMWBVmmWd6OlPmcvmvozYGQCZEK6OyBKHQIDr5I53FHEjqqZslicUXX1Ug0SvcEYUg5ybAix+73CGkoFwWNPqA7zaTMGMIEGWnC8IRv/2LBen+LsR23y4q//Kt37D4vMcQszgIeHp4IDYz9is1S8v6rBd3eMcYzquqGze2Ot+9P+fs/fOD09C3tfoOQBY1umKc5uZwz6ohxqMAm5PNzItkgTUQ+OeGwH0iiiN51TNIT3yJoNUo6hNC8fnNBEFm+fPzAm6szuv5Aaw8IFzDqLVmZUxRvKDJH29ekWczTuiGMBBeXV8RqwjC27HcdxvYk8YQoSdkf7gmCkN2u4VeTX5H8f8dL9aeBqkxgqBXtuKNtBoYhJ00jgtgw9DGBnLBcbnEnCUqFpFnMjz/9iBUWIxp2u4xB96y3HZfn78mynKa7xxhDls7Jkoy7+498+/pXLO8cUZgw9CNRmPLm8tcsRM10Cu/PL/juh5+42/yB2t6T9pKTs1N+/On/4M37AkXMcrkliWMcCXGiabuKYh5z2BxIphZERlmestvtiXPNZr9ld1Cczc+oqpYolgx9QyjO+dm3b3laPzCYOU5ohFI4GSPDlFevS2w9YZKf0owdSkZI1SNVSNdLtpuR6TxGypxB75FBgDAl1aEnTgWYHmN72m5D1+84ubgkSQRCeuf8btdxcjKl6xzjIIhigYhyijIhiTMO1ZL5YsH93S1RMCFLJGmS+o71dmCSXREGH8gncx4/tbx+BXHwjuXqgcuLr5DRiofbkUn+iu3+M0kWUFeKLBO4QLNcfyIJ50wnpyxmJ9zcffatOnWF4ycMc57WN8zmOV37yPnJN4x9R9suEUBzEASt4uTsnLoz9HpEBZYiTjFjwP7QcTKbgdT07BGxJ9wMmt32mixNj3q5FJBY0SETS1V1jDoiKUYkKXFscXaBIATVEMqUxVnC03bJw11HJCaU8zlx0LDf1TR7waPYI2VGFOYMXUuaxBz6hkDBZPYK5wxxnFC1kiiOGMaBi7MLmrbBjAEwMo4BSZx7Y1kQMp0o4iin7bYYUzGfv6Zq1ihlieMUrSOgw4gRGU54/+7n7KpHpEw9qxm0ZDNJV6mX992oNfcPN7T9GiENpnSk+5QoUQineG5JB8+a+nu21xH6UbIHsc9xRz6LEp6zNI+k65FR/SMD6wSo0AfS+7pO8Q9AXY/WHThH5zzQRSrkM7sZBkRxQhhGXh+rfAC9D8g/PoazGC1etKJCPetUxYvxKooCovDIjloPNMyxCsobpwKk9LFG7sjsvmhPna/GdNYymgHd+LQCdRzFP792wR9B53NKwDMT/Rwl5Z6NRuJZRQlKeRAcx5EPj3f2GGovMcbnbErpgXovBsSxucmDQIkMYvI495IG6c1m4JnpUY+Mw0DXGYZ+YBgb6vX+GNoPUvmoLSEVUvjjq4IQfGkWMohIlCJyltFYhhEm8xOm0v5RF+wgVP71BYEgS40HYqMvCkAYej1Qt4P/HYuXjDiFQOOkoektQwcY6x3G+OsxCCEIBNW+9n8jjnkOAoQ0nklX0ueTCvViuMM5lHJEka8Whuc2K6+zdjiEMIRBQBgmx0WLP22BUuRFQZaVVE2HkB2BipDqyKA7wf6wY7f6wulFxKHtqSqNCo7vBwRg/KKhd96AeFyEqMDnF4/jSF01BMrxcP+B89M3hJEgzQNkMB7TUvymTUUcnBCoAIlmPi24fxTodsC5jiBISdOUJI2JIsHy8ZE4neCouX/oaRtDOTNUh5Yifc3pSc6Xu79jkp/7iDITkmUJyJbHxwNXr17hnPW1vaNh0BsILNVe8PrtK3a7PeO44eZ64OLVO6wd2K5b/vLP/xv+6lf/HR9u/zmXp5eMds/tl1tOz06R+M/fh/sdaRpjXE99SAiDmCwdsS4iChXKjui2JIlqRt1S93tfirPTnJzmaN3TtT2L7BStN4yjIUsLkAP/67/8Hzg7OWMxv2BRvOX2/oGsnICKycoTslSxafZo1wAJlp4ggsik3N/9wMniimGoOLQd3379DYdtxcg9sZIkaUzbH82tbs1qf0DlJTkdddfT7jRfvUrYr2rscKDuGgZtyKaa7cFgZUSWztDjLUN1YDafYo2/x7a1RpoCaUriLGC+EDS7DWP7FZNSIYeIv/ov/4q//t/+FwL9jqvTilCVbJo91j2yOHvP1m6J5QQ59LTW+Nzr7kCap9TVAfoFZWooogWzvMNqAWLg7v4aKyxPDyvKWUbXOCQaN2rOF6c8bZYM3cjYCerqwGx6Rhg6nIvYNp8oshlhEHNxOufzp3u6wRHFoCiYlCX76p5JvmAYHLPTBIGiqg9/Ch7+J93+JFCtmjV9p9ncQhBK2uGaLLoiYkJXt2y2S1arHU3rY5Xy/GjuSf2tdDqZgcm5efwNSXLDzS3sqmui2PH+7SXCFLSV4/b2B9Yrydn5lJvrJ/p+9KPg8ZoobmnrEmci7h5umZ7C49MjRSZ4++rXtPUdT4cPXLzOUW7Cxy//nsurc+pDyPurb7nm39N1hv3OMp0uKLKS7f6GopzhnGSzuyYKUsKw5NVFzH4DSRyThAVnUwDLqC3t4SOx2tIdcqQNKcpv6DYBTfsHpouQaXqOEYBzHHY1UbKl61vi2DErDWn4iuXjI69eBURtTK8Nb85fE4gT2sahwo5QFPSNIS8Veem4ufvCn/38K65vPrF6+oySgkiec9j2hGHA6VlBSIEeO9IwZr1+ZJIbzuZnbHeOywtBEc/QIuEvf/7P+HT9Bz59WJEmc6q2pesbXl38kt1yySB8CPrZ6SnYgIfV94Tpgbav+PjxI4dqQxCOfPywpR83KHlCmbxjv9uhZEwcxIyjIQwtzkqeHg2H8QFjNYtFwjhaxqHFmJZd5fPj4lyS5xl3nzcEUU+cJd4gZA31/om4TH2lpnP0csuhH0iCKWFkMd1Ic3hi8faKujYYE3Iyn1NkMX03EqoYZ2BSFgy9ZDa5wtGx3V9zfn7FetMRx3PipKVuWpwVdP2WNIuIQkt1WNJVgklxznx+yd3jD3jjRsRkckrQJAgGuh7W6yXjaJjOUqSI6RuIEsiTkKGvmM0W2LhhO/YokWDsQDEpqN1I34Q0TcRuvwO8oWp72GPMQBqnBKHCmISuUb6fPhiwo2e7nfMRRwjL2fkMKTVN1aEHdwR4HNuink1OzzYqv0nxrAV0R7B7NB4drfvPIAvxRwe4Z/6eGU//aEb3jENLU++PKQAehEl1NHOpEHUctQfR0VGvQqQKXthTa7zU1mmDAf/7QXiMZXKMo8Yaw+jEy7jee2gMDu01tFJ4IPvMoB4BMkfwOeoBPRqvjz0ysR7Ee12sCjx7F0WeNVZS+aQE63hu2FJH1lYBzhi0sx5wOkAYtBAeVEqBMEeQKwXCes2k0SNi8KsC+zxWPzZHBSpkNklfWGDrHNb4MgJjRgat6XtD1/aeYap2PudVa8wRLyklUaFnsIQSiKPWUgUCFfpMWWTIZCYoZ8fHH/3xCALr60kJKIvumBLhMGJEoNFWs28btHNYvAxEGomQPu7M4ePOpPTPK/ExYUoGZLkjiVOketZTqxddslIe4A/agfQ/96fN58lK6dBmpB80EPqEChkQevcih6rxpikJkh4RBAhpqfcbHu4eeHWeEihJ22ogerlun2tc+/6YGxumXpIS+FKEcRjACspZRjDvOV3ktE1N287Ip/C02hCq/OV+mRcThLRou2Mx/YauVsSJwDFF9IYoKBgHQxJNCIOIKEzRvSLNejozIgONNRmnJxcIm5EkMRv3QNdXtN2BLHrPfJGz3xp0HjGOhpPFFeM40vcNrms4n79maO4IXIyyEaPrOXt1AUowjgqCERP/nv/+f/qOrttxejIjihNeXZ0RxzHaKKYLz+iv1x1BHPKwuuHi5FfU3R15NkVrizYDYZAhSFFqxDkPtLUd+XT9exbzc7RpaeqWJJ6yblcEwYgvBJlwfXfD9rDl363/mrunD7x9dcW7d/+UpMiw9sBsVtC1AdPZCUGQ0vVbdruR+ewtQmqSNCGK4Pvv7xg7SPM3WCyr9RNxmFPMC7bLCtTAr79+x92nn/hys+PtxQkPmx0iyXHjyNW7t7RDTdMdaHY+OcRquDj5ljZ7YhSWatcTqxQ9alTgs3iDMEIFMSqEp/U1+yzHWcnNl5AoL6gPNdKWnL46RcSSSAVkmeXzjeHsNMK0lpNFggphu9qwfKqY5BFnJ+ckacaHHz7wOj3FKO0N6ySUxYIgcSSl4OGmZ5KWNM2KTJwziU4Z2NKTcnkx58uXjyRFzGx2xvt3v+TxYclu1fCrn3/Larmm7fckyYxsGpDEU15dLdiuNc5KwtjwuFzR6z/WA/+/vf1JoAprlOyZT16he4uTGmkCokDQ9w1P2y/02jCMvmN7tf1CVQnmJxn7aot0IVEwJU1jbh5+hyDkZP7nbDZ3LFfXKHfP7ESy2+8RoeDNu1/x/XdbNvu/p5i85qenAbdXRNsfSdJXMBYEJiIvS1arFaZcMykvmSQTVpsf2TefSdKY0/lfMJvW/OZv/x3nlzGiiYjjiP22oeu0lx3kJxTJBfvDE0WREMcF1sBoVlTtHUYnaNORpTOqw4ZuWKNCxf19RVkkfP/hmi83f08+idDWovUjaZYz9iN1/UiSwXRyijMKYw8Yo7FmZL37iUn+M4axxLQBbd8ShDldd8DFMcVUIl3G0D9QJpL1ekUUwzgueP3qz1gve/qhZXX9SJ73BEoTqYS27ZGh4+FhxbS4oMxPSOIOxpRqv2NsAxwtSTSlKGMeHrbs14L2dOTx8Zqf/+JbhhEO+4rpdEIYTHh83CFI+fGnH3j77g3DEDIMe5IkYL/dk6UT6vaRoRd89dV7rm9+ZNSWaXlCP1RUfUs5VdTtmoebhlevc0TQ0/VrZNCSB29odytOFjnLp4ZibtkelpwuJiw3a4I+IixHXxm3PhDnmXc5C81i+powgKreEqdnmNGiSJAMKKGYTs7p9wOH+kAxiXCixcmRKIkAD5RG3ZMWUDUeXPR9S2oL+sY7joUQVNXBZzcaUDJCqIHd/gFshLYtbdsiRYYZe25vlty6PWk8RakSHVnCXCKDlKFVTBZTmqFlt29oxwNWK4Ze89On32F0ALEHqvsdDINBF4osDxHE9KFgfjohvjXo0bcleZAqcW7k4rLk1WXuwdPoR+pN07PbNxyq9nhzgXEUDP3AqEfsMT1ACF6YT8/PHVlH/PhePI+Oj6099qhLfPk9EaACOM74X8b4zhqMadG6xXXiiA+8PlUqgQojoijyYfxhTFkUlEXqgaxQWCEwzqK1z2Htu5Fh1Ghjj0wbBMrHUUnlI41UoAiUOEoIJIE8agyFIwxDRC6OethjasJR66u1ResRow1N075keArptZtR7E1QURwfwbBfIFhrPRt5ZGstIDRo4d358qVq1Z8rpZ4NTL60wAHCPsfxw8iAaPET9eMCQQhBEITkWUZ+xEVW+6pXz75qr6vtBoZhYBh7uqb3+nzjjjIO30gQBAFxnBAnEWEYEYaKJEoIAuVTByxMyinTiUNbX6gwDhpnDVEIWjdEQe/rRYW/VpzxFauCEOfEMfeV42TEHweko2prhjFBKX/b+SOLbeiGjnEIkcKzqPZoDPTtVA5tNONokDJChYo4Tn1VLs5PvQQIFyGEZRw7ttsl69WaUIUkYcjjzQoz+Ig0f0QswhnCSBFGEcYKklAeW8P8dVrXXoNdLkJ2FzvGdiTOU7bbB7JSYq0C1b7cLVVgeXh84s2rb1ChY7t9oK8lwhXMJxOubz8ThJbJpOT29okkLkmiADQURU/f5iTRhLY74FzPemORIiZSCS6E04sUsAQu4+y0pB/3VAfNz958xXr7SNfvMYNjNktpqicmWUYzKCb5hNv7e5xRpLLksD2Q5yVZmGOMou81CC8/iSJJoApmsxmH/RYRZAgFD08fCNOGyJxQlDlP64/87rvfMZfQ+tAAACAASURBVJuUhLEiLQraQ8XlxTlh3JElGYdqxcNqxbu37zg7uyKKfJnErm54/fprjK0Z+pB/8u4b8nzk99//LVIkFIliWkjS6BIVStbba3a7ljyb8P79n/P589+SpadI0dH2axBw/7gnL0MINZ1tuF/eMBhDlivuP9+zq2ofz6giaiPIJimBSdFiRI+CNInZb28IRYgzzl9fzHja3fop5+SSoa8I1Uhe+pryNDkB0aCkoHc9cZjy2x//Z9pecT55zeLyjJvbe16fXYGCer9jHu04NAMuKui3I06GFPGULHfEcUB9uOPQpqiioulCiukFTb1BBQYVSLIoIwp6grChTHK2lUG4AWklk2ROLzskhjiNGF3LbrMhLSYY01EWGQ/3GyyGJL5ARZIsD9g87SiztyTxSFHmfP/jbzk7e8NJHP/fBpT/sbY/3UzVt2TZCZqRdbXiZz/7hpvrNU5pwrzEmJ5yPmJGOLRLRrsnTCbs6xYt9ny5+0KS3NH2K4IwoCgly933COWo+4H5RPG0bVhvOtIs55//7/8j0+w9oLi9u2OeXxCnKR9+WvGLXx8oyoSu79kfbunHjiKYoySM1Z5pcooMLPvOcKgNq+Udu3rFOW+JU01VLxmaDKdaFukFgoQ8j7H2gru7G/7sV5LlnUWqgB8//EQclkgT8/7twsd1BBlpcsrYP6CN4an+zMl5gnMR9b4iiALm85TPDxvevLmiOlScnc7RQ8IwCH73w2+5uMzYbkbq3QekGPlHf/bPuL/vsWGHUZf0VhMBcZCz2X+hTA903YgxgvnkyuvKwgHlJNa1KClZrr6wWFwRZhdsn3Z03RKZNCgl2K073r6esd50tP0TUVTwl7/6hh8//Ia7u9/y1Ve/5tPnT8zmAU+rFft6w2Ab0vwUY0LiMEHbEecC6loTRqBdhx4VUeKra6VyIDT397c4JI+PFU/rjjgdyfKcvhlpqkdUNGHUgnFsGLuaSGaoUhJHMVo78jJj0ANxnDIMiskiJIlS1vsOaw7MohOUgENtaHrBdv8TWRig1IwwtQg7UB3uOHQN0+nkCJpGynKGsZK6GhhtTVs7pHwkTVKqeoc2hjhOmeZfs+q/UDctxlqsntB2G07mU9puhwC2m5rZ7BRrNdZ1xFGBCmCzWbF62lFVWywjeXrOrHuFChxBfKDvrzFDzOu3OUVWspjMiCPJ8mnHYbujKHxPM8fPBT1KApFh7UgUTsBm3KdLCjLCGKiP7KfzN2ljLMvllrMTRZmGhJMcKRV6HDhtc7QWqFjSD2DGkkPVst1sqaqarqvpu84zqkpi3Xg0ID6D0WdQeISuzh3jq+QLA+uZS/ssIjh+tUcgGGBxRyPU8YPFSRwGPTYMfe1lkNbyKCVhGBMEEVGUkCQpcRyjAukzQo8xRpHwH1tK+DTY5zxXYy1mMNhQecNSEpEksWc3j4LJZyb4OZ1AKukBWxAQHl3+zjmMNhhrPIurR8Zx9Jmr1eiZWPwoW0lxTEwIPIsonhnSZ2OX9Syz1TgcRjy3SHF8HL+9SBH+oX9BHDNzj4kDuME3xln9sm/OOh/FJQRJnpIVxQtD614YdYMeR4bBywuGoaPvPMASTrxEmannPNpjcoJ8SUHw7V04RxAUXL32o/3nHbbOV5yKwGHEgTJXR62vlxYIIFAJmo5hMISh16g+N5gFyj9310l/PKRAuePE4MisdoMGYvK8IEoDnFXH4gd/bUokOEPTDDwu72nbmiwpkRLiVKI7//845RcURxCtlKQbB4yVSGcJwtA7/y1YGTAaQzxWSLFhuf8D09nXnIRv6NotSZowDn+Mp9KdZagzTk+/4vrmX+G6EcGUtIg5dDvyPKcoQxwds8Wch+WSi8WCJHpLWoy0ciBOSja7jxTlCYIZUTjC6FCpZOh7hEl5Oyuo2ZBlOe2wJB4HXpUx45Cx2n3GuhxrD2inySaWh80dg5lytbhkEitE4nDJhHG/Zt3tKJMZqcqYlFPa4YnmMDD7+WvqmeT84pTffvd7mv7vmWQ5662vYW3bAKMNZ7MLyknBDz99z7c/uyJQE8JEMPR7kiDl5CShOvRkeUIYBETBGflVQ54lHKqBSVEibI81Aeen71BBT3XwE7txcDgFk9mMPL9EDz2r1SNZOqdrLLP5CW6ecXd7QxD5czCZFjRtTdO1ZGnJtJhjnSMvSsaxY18dvAFJnRJPJENr0GZLVlwxmY1kUQwuwNqaw+6JSJSEQUgcS9Ztx3Y1oKIFra7ptjWTWcLHm09cnlzQhw1v3/45g+gwbcHJ2Stenf8F80XKevMTm6drHjd/jzFzTs6/pWp79CiZvknoq55I5qxWK9Ikx1rHmLR8efgPuDaimM7ZHFYId4YKY04vF5yEJ4jomtXjkiI/wSpBkBs+fNqRlhOyLGPcQbVrGPuRpn5g32yIkpTFScaP3/+IPb/CWMv1lxVhJMknKeV0wqgHyiL6v8KP/8m2PwlUY3uOcBFhHBIXETePH9B2ShjELBYx232LIEKLNVk8pekayrKk7nqEgq6x5JM5+7s9TjqCRLK+uaMoFoxGoBsYzYBwHVJGrHc3WKsJZEwSTmnHinI+5ertnP3WgjJ0XYMQkjgqiMM5Dw9LQgFCgTE5QlT88OHfcDY/5fzkZ/zwh594//UJRoeU04KndcuHjx95+/Y9y82S7jClbQb2Oz/qqusnyvwUPRqQsHraYsWAUCOHasDakTgd2Ww3pHmJEJBmiTcqiSlxfMOkLAikZFfviPJrVDAnmy64vq4QYkSJNZOZgTFgv6wJJgdUHjAMhsV5Sp4suHuQJFlE1xpmsyldfaDabzF2QIqYwKX0TYceItbLjizfMpo1dX+PeZpz9foVYoxwFLx5PefT9e/YbQa++fYtXTewOFW07RpLR1nO+fjpJzqzxJqM3/72PxAFMUM1MJlFhAJuP68QqgVpmM5KgnjksGsJIsfsNOPxYcvjakk/COqNYbqI6bUlVBlEPWlsaBpfy7bfNFwsLuiGitHsqaseg6beVOTJOWEekCcpu+2W2J4SyYhD0xLphIv8DBNuuL5+QC0uSFKBcBUnJ9+w26wIao0cNXHmOIwjFkeSzGn2ikjBKCo2T3ecLs4xdsf9ww2hnCPsHaN9wmqFFCFDV5PFBV0dkOclva4Iwz0CH7E1mh1YgVIhlpZR1wzjgLaaPDestrc0TUOgIqLYt7s07ZZyAqcnl5ydnvDh45Ll0yPnZxdk2R+ZGast2jaEsdcOSuk72Z1z5FnKfltj7bNbXgCKobdsDxX9oJC0pHnpR68GgigiTmJAYFSOVClRmFCUDW3bsF6uyQrHxauC0Q5YE/D5wxPbbXWUcT6bV0aes0+95tTf+J/lAUKIF83rs8HLwUvD1YtBypmXVIIgeNbmHtk1qxnGkbar2WyNZ3OPof1I5Ue+YUAUhhR5wbT0/eZx/MzUiWPDlfHsnrZHnaQ32AipUIE3gEnpGWTvM3IM2iDMHzWHQRARxMd4qucfCo66QO3/aY3Rhn7sMZ05ZnMeD9dLLJbyukUlseJo4LLmGPrPS3zTC1H9Iq1wL8dVHuO2nD3myh5NTu4oITDWYMzRxAYvEg151IZKKUnTmDxPvYb0+VrDYo31wFwbhmHwMVzaHFvIjvnDYXCsofXMchx5Q52KPBvuzUuW168X/nga7U1kxwVBHEuGoUd3Wx+zZcBoibGaJEkpiozD4YDBvyZ/AMEpgUGyOwjsGCKVYxi9sURaQATgLKPu2G42rDdrpFIUxcwXO9gemcSUp1P07RbpfKOQF80Y4ijEDNqbxvAmKueeTYQOhUWFBhVbivyK+rAlzabEUYpTAYtyAuwAnw5xfloyL3OewglVu8RiEHZLN25ZnJzQ1TXWWrIspcxS0mKC04bV6sB8coEZB2Jxga5hWpYIYejsgbDMWN/f88t3OT/7pkUkcJJNSIoJfRPy6dPfcL+WjCYmDAPScE7tOkIb0RvBLIkY6gfScoKzA323ptEDcQDtoSZLS4SJiESGFhF/+MN3KJny+HCH05b3l/+Ufb0mz2PCqOZ2vePd1S8hdDys1gSxIArnJNGUrOz46+/+liByvPnqG3abBkdD0xuag+D87JKmqbm5/cBs+g5dNzStIUnmJNGCTgpG07N8rPn251+xP+xZL1cgR/r+jrdX32Bsj0Xz+fo7FuVfEKXdSxJIGKY0dUsYRGzXHVkmaPolMhDsqifm8wIZ9Nw9bnBjyNm7kHq/YZIV1MOWw37J7mARakrd35Kpc4ZDhXCdz522NWE056nfU92v6WuLWTgOdzXq7JQkHehExf2nL9jwI//i3y55qn5E94oyLpilIa9OLxDBjrY58Lj6ghynzGYnnJ8vOOw7LDFBPMFWHW8uS+73FW0/8Lm7YWYlsUgRk5LF9BX1fmR0HaP2dcaRmvPrX1zyd7/5kTDJqJcND08rxmSL0oa4atDDGkbH/cOG03mJk46HxxWv376lyDNWD2sYij+NHv8Tbn8SqM6KKZqBcQhIswueNjfEUUVenHPYDhxqS1lA3xmk8HqGMj9DqppdVbNZ3zIpEr59/ws+X9/wh99+5Gz+iv32wNXrgnrteFw+cXYxJWTOvATrYDI95+uvfsl3v/k7fv/j31Lmp5gh5etvTrm773DOkOcFeZ5z/bFiNhesnx4wsuHy4s9x+hZjlvzlr/5zfmMOCBH71qzDgTw/Ic9r0ixhuwYZVKRlx+PjLUEQ42g5P/uWag9W7PhyfYNlA6rCCUeSLcjznHs9MCnestnusDR0/RabJ6TRhM2yQuuGVfsZ43oi5fj2q1/y19d/QxCvaZqEk/krfvzyB9aV4+urSz49fE85myFdxmF7z6w4ARGjRMg0u2K1P2DMDwyd4u3bExQjaXLCx09LnFYYuaacCMrsgrbpGUdNGM2xRGzWT2g3YG3HdrejzAsQ5zTdQB7D4+MTyJFpcUbd7thurlEuQ0WW0E0Zx4FiXrJedSjhKHLF480BYt/atHxY0XcJTW+wwjEYqIeKvouZTARp7Pj48ZqTxSlFESJcQTduWC1jymkIoscYhzYWlGC13jMtYw7/J3Vv1iNJll/5/e69tpu5+R5rLpVZW1f1dBPkDAURFCAM9MX4gfQyehpgJEGANCNi1Nya3c2uLSuXWH1329d79WAeWa0HNSCKA0j2ksiI8HB3C3e3c//3nN/ZW3z92dfski11vef11QuyfEi3zscjAukxtkM8r6XMUpQ11EE+3K8J/CuqquH29sB0lmF7Cx7uU/aHR6R0yMoH6jrDsmySpGC9+sD5RUTT9CjZU1Qrri6eE/gOdWXosfG8Ebvdjh/fvWU0Nfzy6/8K3Wkm8YK28RjFmrx8pCpaHGdMp3dkxQGVK2ypaOqGMHK5eV8ynabYjoNlW6RZix94Tw2q9L2h7XK6RGOwqKtbonBKM36OdE+ijwHwDgZlQZ5V7PZD9aQCFsLGthSu46IsKMsKo2OapqcocoqyoKqG7XRthnBIFHvUreHs7BwlPH7z6zdUVYWSg2Dq+57ZfMRiGVOWLXmW0zaKpu7RpvmIuxq2YdVApTqJUk7BGiPMT2mY4dnyB5rsJHQFlhKIE/NzkFPDYbRBty1l3ZAeD9wjTgloG8f28P2QMAzwfZ/A9/A9H8d2Bvg+w3Z713Vo3aB1/1Eo9qd0d9efrAUnkoBSg2fRPjV/DZPX06TRdvA85/8SzBq22weh13VDSKpre8q6PoW0nkTp0zR1uK06BdNO1tphIaDNYA94Kj/gpwDc03kaWpzkT18zw+98+nmtBZxE50/3/WQneHpuNq7nnggGA3PVmIF8oPvhtk9NWn3XkScpBUPAaxDhgzgSJyavdQoj2Y6F5wVINbx2XNfn5avpx/PddZqubVBKo2RN5NtkWtO0gwVj8A9Lem3R1nL4e8kUqxmKDSwpMVpQ1wl1U9J2EEQxUtm0XU+razxL0LYd6aFGnCphhTEgJNoI2q6maYbK4SfvtTEaaU6VtVJjKxdH+7R1xdXsT7B0RejELK8u2a93H6+Xjh2AViS7DaJzGY0u0cJwSHYUeco0ng4FM8uIh/VbLOFTJRWu6yBMRdPuKStBFE/YHzZIO6TrKtrkSNtFvH7xM6YLg3YcqtxjvxGskoL64FDpERUti8lL0mJFWysW8Ut0V+BbmtfXC95+83uieYhnLdBRjthr0txiNG5YPdwzny6gc3C8AalWdTuyTGLZkqoq6doCJzpnHE+4OM9xvIZWW9hhiuUr9vkb5vaY3U2F0S6WJSmKivFkRnJMcGzBQ/6e5fKS+/strn1JMNIcDw2jUcB+uyUX+YDha0uQGt1YdG2FY3t05kjoz8HKMFJTVz6WchiPR1S1Js8TpHQIY0PymDCfRWhTkGQ5eXFkPp+jrJbDcUdZCVbrDyzPX3J70xM6w/ceVz8Qxws0CsfWNGlCnbvIcY/wbEYXZ6z3R/LtAUf6CNvi+WVEUiTUeU09ylDmQF42eKrCUylz22J+9SX7omExmaLajP3xyN3mltg9pzM5vcn58e47fvbZ5zxuv+WYp+jAxbDg+vIFxlph7BVdXVPlkpdfX/Ltd9/i3kcEoxPOTVUcHm0+/eSa7d2G+tAy+cLh5m6DVIJl9BzHnSNMws3NLVF8ztiLmc+nJIcdk4XFtz+8YTYfYRRkzfafISn/yxx/VKh6XkxW36G1pOtzzhfPqbs9QRije0mS9STpgeX0GcgdvagpqjWObZHtO5QG+pxkd+Dnr/+M9zcRceximgcW0QsCK6XrDFFo4aiIOJqzPa7ZH1PE+3e4gY0tPIQJGC9ctoe3BH7Mfr9nXT5Q5BWLsxGe54Lw2RZ/i9Yt0/GcrsupyyPPnp3z7v0No1EwbPNUj8SjS8q8x3EA6dL2EoSD64ZorUiSI1LYHHY7lFJ0jeKYpCgrxIQ94SjGj6AqXbwgYr1qQcbcrb9nFFncvsvpxQOTxSUf3m6w7ZrvijdEgeBs8SVCRyznn6AETO0PZN2Gi8tXNE3PbpMQhz5Xiy/IixrPdbm/eWC/Tfn6Fz8j2YOlXPyo4fzynKKq+d/+418zmVn02iee2ByzI87OYxzG/P1v/yeWZ1OMcXGcmB/f3NDpA30XYbtHyjJHqSnPrs/57e++oxMNdVNRJgY7dIlmgs2xxkiFUZpgJMmqHZWpUMYhTw8c9hnx6BOePb/mYXVLPHFpuooyNwSeYPvQIUXPYbNjOfsc6VdYwiOvFdubPRdXAaor8PwQgSbyxrRFQeCPKdsOx2+4uJyRNwlZaYgnYzzXITkkeNYA9V5nPzCezIjHDrodc/fwSHJYUZcahI8SHVlxICtT6iqgbjM8z+B5EWhw/J7N/g7XDWlKi64XPDyuCIMjjjOiKAuMNuR5ju/CyAtYPzyg9QAKn42X+FHI/aqnDiuEdmmbc8KgoqlypAnx3BpneNFxPB6ZTReMggvKquKp0hSgbmuMsOg7zeFYopRgszvyD+o7ZvUYgwVy6FEXZpiadb2h7UA4g+hp+x5hKfq6PdVk2jTNAHM2RtL1Q1tSXTcgW5RlkaYlTVsBK2aLEeOJT/3Y8MQi6juDbVt88dUZiCHU09SC3//2juNeIpRFrztgaDYaqk2tgURwSuYPUzoxwItOHNghyCVPk9lhK3kIij1xOk9czJMTUggJyqAsd/BwAoaOsknIqwPr7YAoEgxCyrYcXMfD9Xxcx8HzPYLAx3XtITRlWYNI7FrarjvVq/bUXUfb9x+DZpIhJPUkVm3bwrUdbMfCshXKHlquPpYTWA6W7SGjwZcqOInPvj9VrnY0zYCtarsO3dSD7aQfQklPjNthIvpTgQGnNihtNEK39PxkZfhIGHhq+fqDUgZzQn09+ZKVkEgj6dueru0+TnOHCe5AYpBisAM47jARFqciho+2D23QvaTratq2oWkNlHJ4XSqDxBkmsq6kqls2m+0pSMfAe5XqNF3XGCHp+pqmLdG9xmhxel4aIZ2TVaCjNiD6wS6idcvZhc90NuJ4hK4fmrEAlIHIs7GVJs/bn2wWRjFYVxRS9jRND0aevM2Stu+RTyUVSMahh2l8PH0gTx8IvAmb3QO9JXBt++P71nVj4pFHnq959/43LC6u8QMPz3ZQwQxXGeqmpMp8aB3GsYcmZ7+rGE8mJEmCpcYck3vquiRNHmn7gufPr7GiiMi95Ps3N7x561JlJe9Xv2dz3PHy7BortKnMPdNQI+UFo/MQXR3ZHzcIY/im+x1l0WDvN9ytv+Xq6md8/vpTPtw9opTFs2ch282B474jCAMs26JVhqqUg23MDRmNBJv1I64XIWTPOJ5zzCoc18dgGC8KPtz+PY46Q9g2x2OP6/c4VkvXGRw7YDo+4/5ugzaa2WTOfrtGCEXgB9wXOWdniqpJSY8Ns9klliN48fwrfvPbvyHJDzy7WrLd3+K5M5L0yLNnL2m7LXWn2e1SHNei7hqkGkoizs6uORwNrutS5BWT+DlZltJUhsX0NYuzC1Y377GjiKI9kKctV/OXGNEROT5nn19j47Pb/Jqs6EjTijytaArB2fkFL168ZLP5lm7vM5oaLFVze7dCihHnn9q8Xl5hC/jbv8u5WMS0fYeSIQ8P91jWGMtWlKVHV9e4kcPdekvZWngjyX5/R1O4vHu85PX1nF2aMZrPaPYp64eMcOrjUHNYZ8ThBbbq8H2X9e4D+22N8ATHw4FwHDGeTdCVQLcW04sxeZWj+gjPMYzCaKAoVEcOx5Rooqi6Ft/3/7m68l/8+KNCNak3JOmRF1czylKR7nui6YhG50jlIe2MyDujpyI/GpQ1QYqQqjry+Wdf0uYvKYoDUnh4bsgnz3/B3/39XzOKJ2htoWwbYTV4Yc9iOsaxJuyzByJXo/uWeDKiaSSO09A0GXQuZdWzWE44bjrO5p/w7Zv/yOXVJfOzC8b9nyNFz3G/w/dt2kogGcIZbdMxHS3AjWn6irLLcWzAaHQbUVQlcprSd4aySlmt1nSVQVodVS2w1TnrVcq3x7/j4mpDUdQ8PkRYtmS/T9Cmw/cOXF8t2R4eabuU0XiK7g13D0fGk5ZR5KK1YjqO6boC40gOSYXvjYjHQ8gh3easH1c8v4j46ue/4Le/+R3H7FtcOUcpizAW3K0eiOOIm/sb8rzHj1wcTzAZT6jaOw7HPa49w3FW9KZmubzmm3+6YbGYkeZrinpHnjXML3xubyCOazbHD6y2Dzx/sWQUTvjrf3zDq89niN6lqTPe/bjnfLlAG8XdfUJHTSQDjgeIwiuErNGdR+DEXD0745tv3lF0Bt0YyqPP4jxgMTtjfZ+B7nn9+hnJ+i0Yxfb+QDQKqdKWY3XkbBHi+Q1xNGefrOjagtCJsKIpo6imLo70veSYb9FUOF6PP+1BK+qyQqkON9Ckact4blPXNlm+5nG9ou8FRbkHO6MpQorCwnVtvDAD0RO6YzbbI02bI+SMY7qmqW8JgqHNYzKJ8ewIYSRllVFUGa4zwnZLyl3HcvaatLghS2AUTtFiR5UfBgj1LMayPBzXQgiFbffs90cm44jZfAk3NQCtrug6jbKGCWNVSYTQ3Nw+EjYeUnnQSxD9CfczCNWi7CjailEYoY8Zo9gB05MVmtA7w7GmtN2QzB22njVt2+AFBttRw/RUyWEibKeMYpfdVtH37cCWVLBZ59x+ODKZWwh6bMc+wfBblBIgrcFHaXqun82YnyuyIqOtbEyr0G1PWdWUVU/bD+UDMAiVXosnA8DJ98rH6esT/3UIzwxtQwxv358mhFIMqXEhECev7BDgaiibirzafaQSPMXFhJRYysb1PBzHQZzsAY7tDfWo7okicGpS0ifPZ5XXNO3AqX3ywCp1KhOQ6oTEkqcp4+nrpwpf2xn8sMoemp6kiE6oKjGUHLTtEIpqaqqqHqpD2/K07fjT45dqaAQbChAkllInwT98fxCSJ4sBwzka8KEDiFefbAlSnRYJp/MuTuuDjg5t2o+T2CeWrZTDJHa4X4VSzokf6w4+XWkNdg8xLDbaTtPrjqYtePvmG/K0OPlLBxKCZTt4jodtC4Q0A4bsxKkVCJRQSNoBD+a4BGFIFMbYjgJpiMcuh/2RIs+H2tm2/0gyAInjedTtcfBqy4FWoI1GCjMgr0yDsgafr9b9aUHRYUyPsgRGCZKk5YvLrzhk9/S2IU9LHu7fEU9/uphrI7GsiN12y/L8jLrPCB2JaCTxeEQQ2LRlzXGV8Oz5Ocf0kVWqKOsUVVTDzp8pEV2Noy1sU1JWG35YVci9RNXfkWY5i9krYisicit++Rd/Ti9b9oe3mMRjs0k5n7/C9Hvu1w94wZS+rrjbJRwPPe74FYV45Ha/IS8l58tLoihGqy3bbc18cUGSPXDICibxGZMZvLs5YLs2WVqBMFjynMnYJ0t7fCdkt9sS+AFd+RJXxBTlFtuxefbijP1+w2Ff8+L5ZxySe5o2w5iYOJ4TxQ5Fo+kbl/n4FR9UgiUdimOKJX1s6ZJkKUVpIS3F2fwLmqZCSsFqdUM8OqPtJboHbTouri4oii2eN0bJivX6EdcaY6mArpEEXsQo9DDtGG3D4ixin+wZjSIMBVmaM5s84/nlK55fLzCNJhwFWEpw/04zedhQVj2VBRcXY2TQsdm9RdoRTfc7pBpjq5jXX37FcXvEiIhvN4aRzDnkOe7oQN76nM8+Z7KEsu8YRxGxHVHlCQ+rFaO4wQssLNtDVBp/5vP+7jcsx78kSXZE5gVni5i3H94g/TGt29GpgrROabMey04InRHxeQyyoW89aI8E1hjlC3zHI8vXLJcXFHtJEAoe13coxyXNWrK8oiz3bNYPLJfX/6/E5b/k8cebqfoa35kjLBfP9VHzhrLNSbdrlBoRj6b0bYA2B6I4Is23HJIVftASBs+JFhckyZ66MWTdlpvbO0ZnQ797kqW8/vwzuq6mbu85plvaesdkPMNQI/AwOqMsMjwnZPvQ8uxZhhthkgAAIABJREFUSOj7SC2ZjqZsN0d+9sWfcjgmJIcSgUcQtlyefUrXlShjgQrpljV5mnM+W7JdFZT9msNxzfXlM3ptyPIhCVeWQ382MqUqzZD2L1s0gihcImROU/cc9nuM9ljvb4hil7KqqOoC1w44nn5vdnB5/zbBsiIcx0abjrKGb3/4jhcvU3z1nKZqsHAJvJgkXZEWJcmhZ394IBzFbNdr3r79lovnENgj8irj/vGO5GBRVRrHc0mzhtCL2e8esVSJxmY6neK4DodkgzAR3/z+PQ/rt/SmIBh5SDklr79n1CmMaNjuWxYXEV/+4hJH+lwsn1GlDcpakB4N5+cT6qJi5Hus3+9RlkNrGnZ1QVPZnM8W5PmBcCrpWpvVbUUc+4S2Q1vX5PmRWX1OV3foviQMLoe6QUfQVB1KKgI7oqQiCAbT/XFn6Js1VuBRHWxc7VBbhjzbUVcpTe/gWiOSQ0vZrBmlsJyfsVptabqUc3UOQNmWrB7XA9xZNnTGQkx6XF/Rri26pifPBSM9XFjb9kgnUloKDqkaWLaWw2HfI2WDJKZS5aCArG64wKke2x6THg4o5bF97BBC0PcFZdEN3eTKUNYl9b7Hcnqms5iyyIGetq24vXkHXABgOza+P6WqGkBjqaFIQJuBTiDkgHGCk9ySgBZUlcaPrAG63zTsNjWeF2BZPtoN6E5byf0Jsq+1QUgYxQ5dWwICx/NOEzyLxXLM431KlrUnv6OiaWrubvY47gjb+YndCqBPcH+QCGlwA0UQG9yRTeQvsLSDZ/u0bcv2sMPgkiXl0ORUdyRJcZr6msGn25+2/A18xGidtrzNiUowuAqeMvPDvrnWAwLpY5SeYbpoD2iCkx/xJ6as1hVZVgwfeqffNTQnnUSZsrEcF8txT5QCC9tROL57arw6iUw9TKnLpsWYZhhEPwXJ5KnCVgikdbI2nCaf8sSdfaqB/diUZVn4oYXj+8PC4iRU+64b/JcnL2lb1x+rQp/8rOokJu2Tt9SyrEGEn9i4P/laW7p2sAXIjwiu4bZDgYE8eTlOQlebj/0SWg+Crmma03O0UGrY8nfswRLh2Daeo1B2gO96jCcxZVXhOA69GdA7fV+Q5yW2JXn96lOmswV93+B4LlLauLaLEgLbsnE8D9f3MEKgacFA32lGcUgQDu1dVVXR1PWwXd01bHdD+59lDwtBY7ohJtgbykoNntNAnqbd7WA06QfqgjKGoqvY9zlhOcYULn4gCK6ekRx2rG8TnnZDLCdlv0uxLBfpzLAoiOIL/KBCtw1Keux3a2g1th4zUhF7veFqPuNh/Q1t7XB+dsEoWFKQgEwRxiU5bLmeLJDS4E59TLfD88csxyOyY0JuUqSw6SqHq7NziqIgzxNsGTMKQnBtdu8zlAubesXl/GuiMOZXf/+fSZKU5dkZQnUEYcgheU/dtATucvDFI3Bdm+06Y7lcYDvtsAAQEsspOKa39K3D9PKcvDwynkw43N4TegFpmlBkNVp3FHlD6M+xL+H+/h7bjrFlhOf6BNE5vnzFf/tff4E/yvmHf/gV949vePfjb7h8/opXX71ivX7g6uJT3t3+HyjLQzkpVZtQpBWfvfpT0uyBfXJP6EX0uqWuB0tQkuzpexiFI7SpyRI4W5yRFhuqosWzBauNxbOLMf60YbspeNz8I4d9zsNDhd/naKHAjhifPaduamZaE/MNtf4lrR9gOTZX1895f7NG2pK6t5lMX2O7ijwrWS5GXDxz+bB5Q2Uk/guNrEGXHZbj0NY5XdfhBy5NK1BugzIxVxchWbdB1YJDv0XYLn5oofyQ82XE+7uU+eyayYXNYd/jh1P2h1tGQtKLI22tWcyf43DOy4sXFFVBXRdUqxRbjomvJG195Ic3N7z89Eu0ASFSHh8TTGeT7A7/D+Xkf7njjwrV0IsJgin7w5qq6gjHDgaHyJ8NXb+UJM0j6a7H9VqkbKm7A7b22R3f4wUOZVtwv3qD7WW40RmLxYLb2/dD89NO0ouKrgsoa01VZET+JZ7rc0yOTBY9Iz/Gt6dMJzCKIqpcoIRAOA1O4JJmO4riyPXFzzgmK/KswVPnzOZLVvc3aN2jVM8oGCFNijYbJJpnz55Br2kKhe2CEhFBKLi7XaO7AeBuJPR9gDE9eZYyW0xpcknfwcX557zP77BlSzQzFPWGKgtROuL64jW/Xf3IeLZgurCxZYcftRx3Nn1Xsl23NOWGzz4LMI1Leuy5vd9jBw1C2EziKVm64m/+8X/BjxVKLbjf3rNO3lFWCZPoU96++56uFTy7fkFdF0OHNy27VYUXaYx+ZLepCQLJbvcjfqiJRgl361uiKMYJAkwfUdePuF7I3f2Ott8zic7Rjc3zF1MaozB2wW7fMJ1ecrmMCYOcshSsHpdIGoyVkx4r9rshbFM1Ba4bMhotCM8lN+/2xHFPXRraxmU0mnBz+w5lGz7/6hXSqTgeUvT2SNN2NLWmSB+wRUCWpghV0TaQ2AVe0mNJjbJsDseSZJsTjWLK0oNWIZsC159gexGr9QEnsCgbzT7V1L3EUmOKLMG1RnRGoBuFZbV0fUea9sPr7rDHsgzPXlzT1YYyh6YWtJ3Btnwe1wV9WyKVxA9tHBfKck8Ua/xA8f7mG979+IDvD0xOKQKkrWh1i02EcjVFWSOOR5qqQilFXZ8g+yf0UBTEhGFIltY0TYFSDn1fD3xY1wVpneIgQw+5PoVwtBmETlWVOGpIbLZtShS5SAJ0L2m7IXRjzODXCwIL31dAS98JmqbHsqFrFEFgEYQOeV4CHYZhW/uwz9jtLC6vxzi2i2Vlw4ecFoMd4cQVLfOSrowpyx5dZPiOSyMaOjNUgMaxz9VFjKUsOkdT9jW9NtRlQ56mJHlBXrbUB02bGrp2mM7pftjaHyZgpy1vcaqI/YMA0sDjNCcLwR+Glp7+PwStpJDY9inU9REEP/wrDGjTUlUNukxPHsYT/1Nag7XAdnBtF8txUPbgX7Wsp6Q8p5T5kMBve43unrbpT0L1FPSSEgaU1VBfKoVCCmugBJyez5MdVtkKy7FPobUnf+rAzO36YYra6p62bKDkRGB4CnYpLGtI2jv2gJowH4X78Ng6ozFtj2jNx/MyILYGa8lHQsCJWyvlUD16kvm0fYswkr7rhxS/HKgJllBDhZUZvKICNdANpEYoyWQ+59MvvuSYHOj6nq5p6fuWqunQZQ1JOkw9hTyF4hTKGqbZPCHApI3nSRzHpe81Td0yiT2icAjqdX1Lf0KR2a6D54R0pkOcgmlSga0syjInPRwI3ZgsOKezXbz5QFuI4kt057B7+BtgCsBxtWU+OcfxxuySDDqo0oK63dBWitYNuf7kGe/ffMtqt8GSFSPHYFkBkT2hpaHOc87Pn+NZNheXM8o84Pknr2g2W1okLTXr/T2xH3K40Xx4/JbpxZKispgsZsRxQG80jjVH6mFxkeYZYQSudNhtHxhzxTgEbRKqNgNxjsCiqB4JI4fRaEqaJbSdT2yP8dwYa+wzn74gr75DSZuiyLBUSFEcub3JOb+8BtWxPTxgTE1Vb1iGn1A6BY4Tstk+IsVwjTA9BF7A8jxAWlc0dc2bH3/Hy6tfEngTvnj1b5hPzhGipe6hLSpi/wrbEvjuhLTYUlU219dL6t033D28GUpAdI/G4ri/ZzZZYjshUkOeF/RdAzJjMjrn4e6ORhe4doh0MyJvzNi74sUnz/j3//4/sO9zhHOkISAxFbpuMG3C6iEh/fDIv/2TryjzipvKYkzO8fEe04cEYYtxYH9YYVkFJB1ROGaT1Dzsc5bLn2ONBGWz5+5+w2J8zYe7HxGNx9X5nPy+YDm7xAk0t+92xHOPetXS+jZVWbGIx2yOG4wcsbnZk1Y155fndI1NX+fEniHtFabp0NSsHnY0eUfXx7y7+XbA/ekRy6lFmxU0ScPEtZh5Idm2oEktzhaX7I/fI1QwcM3/P3L8cY+qFSFlRxBNsGzY7TeMQoUyNmVWs0sSgviI78eMxja373K++OIVTe7x9sM/MIomhIGPUpLPv/ict98lPN4/YCuLLNvSNi2W01M1Fo49I4om7FYrXr48x5aargzxPZ88bYnjEePwimy3pRFHIMWzbPKkZTyJQWYgK6IwoNMHksxlPr8kzTP29xlKpWTVnLJsaTAUdYnuOxxrzstnX/Ph5j1VYXj18l/x/fc/0FQO8/MRTZkwnz1jNJI0tY/rtkhhiALBZ59dgt2wWh0IAg+hHS6XVxTlnqY9MpucUaYJtt0wjedsHn/k/CLk+uoLVqs155dnWEw47msW9TXKyymrCtEFoFf02BwOGfvD4LmBht26IZn8DqhwxHM+vLul7jenfvCKOB62xM6uAg77G9ompW5TZC3YJQce72rc8IATNtS1YZ+2jHSNlD5ldmARTVmtP2BJZ0BTkXG5eEXkL6mrko4IFXZopyZyz5lMc/rGIwwNVfPA8dgQBEcE5/S1hWUFvP7sGZPwNZt1ynbd4rshWZnyeHdkPB5RFAYjG6q2ZLfpaZseRxb4nkVb1die5vKZR1FogpHG9iGcKJrG41jlBN4YSUOWZpyNLkjzgi6XyFqRHQteTl9S9iVn52Nu373leBhCX8Ibwiau49M2BZ3SNE1LdnQxusCyNFor8rTD822aqifwJcIGy7HIypL22FK1JcIz4PikRcpk4dI3ml472J5kf0wBF2l3GFOiVU1vLM4uLsmLFGE6wtCDYRcchUWe5ri2j+dM2W0SpLFpuwOFyInsEFHBIAkYRK45NRihMHpoU5P2MOGzlEXXd2jd0fXNaZLWY3RDOAKlBELahKGP7iW73Y7145bFWUwQglIWXXfyNSpFWfbUpcS2XIyWQ2uOGXx9cOpyx+B5IZ7r4douAoXjKpq2GWoibUVKQ2MEuilxnlmoM4umThFtwdgIbC3wu574eIZ+tOlajRSKIm/Is4qm1WR5TZnXFGVNUVYDoL4z+DgEyjvNXZ/4sB/7vH6atp5QRTy1Nw2JLjh5YfXASzpNO38qEuCj+O1oq46mzAcvrnmiDAxTact2h7Yt18U6Vc86J28mPPkshwln2z35c58epWYgLQwTO6nkUEP7NOR8SvaLJ3YtQ2+7tIaCgj8IeQ2inY/2haarMfUguj+2kz0JzVMV7JMNQkrJ02nReiALdH1HbephXiw4TYwNlrJxXG8oHlAMrX5CoIVBWhBPIrgDQ3+61/4jVaLvet6/e4elHOquHc4hEqEYhLZ98v/qoRRA6562rSnLE23hqT5VnBjAxsJIjZQC51R1a9sKacUI0w/yXkPXdShjI9A40kEqQ9+1pGlCpyukDEnzDXUT4fkKv1Ukjxuy8pGL+TWshmm80Ir9dsfiMqSpakI3xJMdt7dbvCBGqZyu7knbPSp38KVhHl8OafpozGFfELhTqiKjLisKT/Ds5RleNEV2Pp8sf0aTr9HNb1gsFuy3b/HcDtf1afUOWy0p0gpbWdhOTHrcgAipW4XjLFlOAgLXwrYd7lcPvHz+KaPxiLzaEgYOWX7A96fYjs900lLlLqZTdLVHmmScLxuSJIegIMt29HpPpw1e2PHh7hviOGa7WWMoke6cLDsOOwBtBaZGubDb75jNp/R9w+phRVGmKBlxt/0VTb/mr//zMBCYTCXj2AFpUwto+wfe/nBgebnkfv0DXWvYbhPCKGa3XnF99Zy+tUnSW8oyZaMLbDWnrvaMw3OqsmAcnjHxr9iWW8oiZXp5Rta02MZwOOxx7yLG4YyispmdfQb5irH/HN2NaKsOy+nh2uGfdEdbnKGtDbt3D4zcS559csHdh5ymH0gpTV/R6XuqTcEo9hgvrpkufR5334Nx8O0xdbWiSgouZs/YHO6Yxgtc2VOnJVX+wM2PJa3WeLHHm29vCYIRRZOj9JHUtLiuTaULHOPgBkce7rco5ZAXQz22E45RAVTFhrwr6TuDp1rKPqY3kufnL8k+3OCpCdt9ytlyghcq1us9X3zxc4rq/ycT1dFowbubW8LRglFkUxWaIs3JykfKOscVI55fvyI3Pfn+R0QrqfOG9KD55NlnPHxY84s/+TmKmPc/HIncCNeN6Fqb0L5EOZrIW5IFe7r+SFt6nC8vh+74sYcUMzbrDaOJj+5j7rc3JGUDak8YTDgeU7zAQkjD9rBlOrfJkhrbMhSVIZUWyAo/eMZ4bLHdHgkmM6haPGmo6xRtKjwn5uL8mqJI8FyPSbxkPu84Xy4ZBztev/oSY615+/6evJowGlkEnkP2sMb3M4oiwwjNeORSFgdc12d2NsIOD6zWHdN5gDENX3/9ijTfkGYZbuhwPBQooan7nOXzACnGrB41pbynqSzCCPpjRehr6rpHWjlKDf3lunf4+b/6kn/3P/yPPHsZgp1iexMCb07avOXucU04CbgYT9keM7brFdZognR8DvuCM99hm+yJznw+e/YVb3//AWVGpMcVuhEsLiK2mz1xMMJSLtvDHcdqgzdykMZiNE6YjhzyUtDLAuUI8rJiPjmnqgybTUIQzyj2PSN3iYx8lEqYTUYcjzmBrxF9S5aXCKU5bDqUp+ip6VqFJy1aCb2R9FXLdnOkayWX3hK310ynMW23QxQ5uuop2x6pKuq+HK7iymaX1CgRY0mXUPR4dcGl3+D2HZvOp8jMqfmpoS5blOsM/qbeomk0VScQpj21I3UIDE05XPaPeUoUTXBCj7qsyQpY79b0XUkcxrT1AOb3vYD53KOsSoQctsZdFXF2vsSRIbpvGY0hDAK4G953QWjTNj7x6IzHxw+4roDKJQpjHvwN10WIkO5T7nu4keAUWhIoyx2CNAzoyK6zqWqN6TuaU8NR2/ZgOpTVo43Gwjp11huCwCXPSppaE08CnMeMbiirAgbfYXqs6TuJYzsn8SNOE5Ofmqk0PY2uiMKIvreou5Jc7HD8gHYJ3rmmt1se11tsM8LcC7RJkQaaEhqTcyweWDkZ7tmIKBoxjkNGQhC0Hp02zLoQgc3NzQfevX1E9DZN1hM3irGxyIqKPK/oWqC36FqNYxSusPkJ7/VUK/sTxHRI8puf7AWc2JrGPOnYkw9TIE4d8dIa0Erm6Wd0Q1tX1JUe6mBPCwslh6auwYtpD/QHZwh2OSd0lpASjPhYEdt2HU3d0ny0OQyP9w/xU5YYmrOG7f9hsjwk80/3LORQeWs9+XjFR2H+RAMY/paDtaDv2xMf90QTOAWMnvy3PwW8BsEthKBvG9q24wnkKgcVi5BgW0MIUKkhrf8UyBqm8IPdoixr2m6YeDZde3r9Dn7qj6G7k11CKjkE2KRCSVCWPdgm5ElVm2Eh2rWGTvcYM7BsPddHKPs0mRV4ePRaI4C+b0mSI7cfPrDbbvns0+eEoYMyGtmDbmGfJIzckr7pCC5eAt8BIEcW2a4hqBNm0Zxs+8B6leJ7Y4TtoLWD3XuMJucIy8aVAXWpGU3O2ZaPtJ6isHPGkY8T+Pzuw7c8VgV582toNZ75T4wwVFXCQ3xGZWsib0KkIsJJRFFWOE7Ew80jr7+ISVRFUxvikYfpBZ7y8WfnoHscJ+b8/At6cWC7OxCNxszrFqUtXMvgOZ9we3yDbkIsbIosIfTGfPXqL/nbf/hrpjMfIzRVYxG3E/aHexrTEQQBjV6TVwfKomYazimLAxdnZ+TllrP5lDCOufnwI5+c/zmOU+GHE/xwxuP2yItPXzE/k9y933LzsGWoG9P0smOXvOP86i+YhK858HsQa3Yrh9lkgWXc4T1+8hjbyka3LZIJvm3zyeVnvH7+F9zfv6OrEu5uc/orzWQS83if4EeGH27uEf4M0wpGwYT7u/c09YTFPOTh9h1nFy4uDt0h55BvmE4lth3SoYYArqg47BNcv+e43SGVZjKy6VEsF2cIqyY9uFw9d+j7Uw4ma3lsvuGTr/81ZEfy/MiQR63odYhnx7R5he0PdeKOsvDkmEnUkyU9sWyxAo0IarZ3FcFFRZX2yGZJp3PadkrXCEJvhAoF2/WWKHf56vOvcPw5q4ffcmhh/irGwsXzQq6v4PWrL/nHv/31P1NW/ssff1So3tzkbHcHxosQZUs0R6q6xFIwieZcXz5ns36PbTkoEXJ+JdC9pKr3jJVPr1vu7u9ZLi64e/iWaBlT5hazeQgm5N3tG774Ny857GN++OG3vLj+FNd3+PFth/YCQtdiMplgOwNjtMorQJGlJaG/QIopQdRzOBzoapebtKCuU457+OKLz6nMGs+NKKuSyeScXu8ZjUJs2yUY5Xz37ZbJdELdNDyu3qKUi9Az/uxPv+bNj9/zm3/6Nb/4xWdkRcP68HvieMLbImEcu1wsP+W773/LeDHn09dnPKx+4Oo6oDh6NHXBcnLGZALFsqBpE2aLBbv1juXsgsi/ZLXes9ndEAYuSknqpqVtS2xXMF3G/PjDhulC4jgzjKhodwlCGC6vYyxxxt3qd+zSb/DDhiCcopTEsSKMOVK1d3z/3Z5ocoZFirRKonhC3/aEUYxsbTbvH7CinvOrEduHB8r8yCfPvx5Yca1FXtxw9cIl33VkWYmRmslkiT+SKAnLyTl+MOOffvtPzM7OSbMMKS4JwxFFnaJ1x/74jjwZ6klvD1tsG7qypkw7NB29trE17PcpeVbhBhZND8Ju0Hh0TYVjeZSFTWYkQSB4uH1knMa8fHnGdCKwXE1VgO06Q8CqXCG6GKVGSOVj6Ll7eEBowcEWND3UvUdWa6aTCX4UsrrbEI1muJ4gz1pGsYUXhqz2K5qmIgyGD2WpPOq6GLZsLX+YtiYdTavoqgHp4loBgbNEeCnG1symMdv9mrLI8QOfIAzo6p7tdofv1UShjxE9Wkc88alG0YIi72n7lmgS02tNsyvxpEcQW7iFpDkajH4SCgYl1cm7qBmf8ExSCkrZ0/eCsqzA9LRdR6tbyrIkCiXLZQjCkBcljuvQ1i1h5GDbM6RU+L7Nbp1TlcfTdjsIqUnTnDxtiS5DHNdGiOojVklKiTaarusQwiEvcvB7xMiQuDmld8QWMXFhI8UAo+/qAtf16XQDQlJR0PYdnRbkyYr5TLLd1tzdPjCbxXRdR1VXTKZzoihmeeEynp8zm4+ZjpdURUeSljxuKvQ2pcpsdOVS7lP8ImTUR1RVS9N0VGWDaiUu7smb+kQpGITZkC46lRycmLB/OJI16I/cU/NxInoSwUIgsZDWiSsrhkmpNhV9C00DJjs5WU/kg6cCgYFbOjRIWZaN6zooZQ1cWXMqODiFpdqup9bdsKXOMFkVJ4FoPWGjTtWuUoF4ahwDPgJcn4SocrGehrEMRQ5GG/TTIPkURuq7Ft11P01lhRi8i9ZTda7CUgbTm6FCVnfDe0dZ6F4jpTrZNk4MXgNNU9O0DZ7vYxsHgTixWPXJn9sPDWK6G2qZ68Hb+/R8pVAn8a5OZQwS27GGxdup9QozINGeFnlPxQtVWbBaP3B/e0dVFIOtw/VAgmPF2ELT1ik4ikTnKKtmv3v38Xp52HfMpxFNVdAB0rZZjl8wnZ/z5u536L7k5SevkfcCIzMCAXVl6FTGId0xnXsctjnCNAjjMp3apKv3uO4SOyq4u/sRe/kVn/zpz1ndfcvqQ8LLlxcIS7Hbd/SiwgiDEwg+3LwjLfaMwwWLxTn7XQ5GkCYl47GNbwUcjzuyNCUc+wjtMRtf0jeGqs7Z7t7ihgojDbae8q8/uyS/3/DhkCN7ENrF92Y01R26uWU5vSA3HXmTEFoTyqphPPHp9QqhDGEcsVnfgvYRqkB3CtSB3bZmXDVE3pTkeI9tdfRlg6tt4uWSfX6PtAIao6maI2/e/cDrV18gVh2Ob3C9FU1e8XBY8+LFS87OvuSb737DKHhFnh+5GD/jkD8SRs+peoev/+wvef+9w2p15OGwZVrOGMcjfD9kvd4wm4+R1gGMha1iluNPKOo7ivYR2/ol/91/82/ZHX7gf/3f/wO2LWnKGqNbLKuirxXa5BR5xmGXcHYek6UrBFNev7CpmwzdOGSHnGRfMYuv2faP+G6IL0O8UUcrOtLjlsnykuQARkmm0yWbVYKUDbayGMUe3W6ELTO2Dx3p+w3X0xham3S7RbcWMrQY+zFp12K5PlE8xnU71vd39KbgV3/3KywnptltefHZXxLGY37321/x1efnNOV7duuE2UX8L6Ex/0UO9Vd/9Vf/t9/8n//Tf/9Xljes2A/HHX2r+eT5a9rS55OXLwj8KY8Pj2ixpso7Am+K1iVhMCLwfdoWvv7FVzQVWLZGSU3bgOeH7I8borHHdpfx4f5bpBVyTLfs9lsCN2Y2dThmJcfkESlsPNfD9HBMdjRNSVW2jKI5RpTst0c836euasbxhDCYoinJs57L809JkgRpacrqSOgtKIqUY3qHsiSWBWm6ocgzZjObX/9qS2d25EnHevOeLz/7JY5tsX5MUBJGwRmfvLqg7xxsW/D82SuCICCMOhwrYDKDxeyM+fSaw6ZmFBssR2MrmzQ9Mvo/mXuTXlnSNFvrMfusb7zf/T5NRJxsItu6lKq4RT9BQnCBARNG/JbiV/ATEBISElRxhRADUF3uZXARZFVlZEZmRsRpduO+t3fWm30dA/NzMsUgGYBQ2XjLt9zdXLa+913rWekSKTtEuCOONX3/TJaHPD4+EkQK4U0wakKah0yyOc2pClFJiy8WLOcv8eMeqbe0zcDZ+RzjWHzPYzqL2Tx9oKwKJtM5Q1MTp4L5bEnTw+ubGy5eXhG5EU+Hd6QhWO3jEfCDH6yQ0ifP5sxmGWW7I59mmH5OL1ui1HKsK0J/SZbmHHcNRVvgGkuYuiwvZqhmQCmHsu94etzjJw5p4tB3CmUrokTRtxJf+GNnvROiVExVS1zPxREZ2iRI1YMTjK1Mrod1RiyIVDVhbKhKOBwKwsil6zTH6oAhQAQtnl/z8vqHBMKntz3K9LRNxf5QkM6v8NIljh/T9AXXl5+xWOUMvWI2O6NqDnShZwRLAAAgAElEQVRdi3AieimRssITEMczHN8/rex7hBviOCmOq6nKbgyM+BFRELHIz1jNFyQxTPM5fTfg+2K0ZAif6WSG74fUZY2xGik1x2JLVbb8x2IELP/Xww5jDU3Tsq8admUJ1sH3A1rlMpdz3Mr7A31hwBEYbUhTD99zkFKfBEWIUt6Ir5KatuvGbvC2ZT7zSDOPMEgIgxjX+dg/PzIrHWvIkgCjLftdg9YfJ1owDIo4jpjOY56e9pTFcBICp1pNA8tlRj71aUzPdvGEeaWRLjw9r8dWOOmz223x3RhjeqzboBQ4novCYG1A34+WDscR+MHomcrynCic44cRXb+nbncnkkKIcCaoQVCVHYaAQfekqeTqKuXzN3OuXqRc/uCM65/fMP0yJPpC0i060knK+WqGPzEobxgrWO0f1MH+gcfVdU7Vnaf1MXxkCJwmseMncBKN9gQucDi5ArBm9GeO00nvRAUYJ5+uMwopYyTD0NLUFVV1pCz2FMWe4rinKQuGrsVqNYaYHIcg8IhCnzAY/bEfA1nWGrSy40G/HfmzXd/T94pejocJbcZvbSxK+BjYGr3A2rgjG9Z8HKo6CN8bS2CikCiOCeNwrJYNfDwx+nLHNqxh/D+9+sTslYOkqWu0Vp8sIh8l8UeR7/sBjitomu5TLayUarSrWIPjugShTxRHpOl4+EuSmDDwCU72AAeLlJK6bhGuQxSFWDMeM8bmLvGprUwrxX674+3bb7n/8B4l5Snc5nJ5MSVZGppJzWTiUXV7zlaf49ieri1oVMFfHMb78v+8mZDGIb4T4toGn4B0ktPWFWW9IY2zU6GFTzrJaaqeKDuj0wcGNZClCVE4Zf3wzCSbcHE153B8IEqnrLIVy7MpQ1Oy21XUtuHYPVIeCpp24OrFBU+7B6I4wvcjyvrAfHHJcj5FSkMYRLiug+wd8mzGYdcRhi5dY7BOx+axIIwEm8cnPM9Dm540mZNPM9x+wSIP+OabrzgULTefnTPJX9K3I782clwWyxuq1uJoi2kC0jQkSizHsuDs/IbieESbkOnsNcbf4wcJURLTdFtUX7LZPHN+fkUSC3QV8Pr2FdvdO/bHNbg+h+LIYnmOCDS7/QeKekPknfHy+gWP949cXVyRZzOkbum6jsBPicKMyTIgSTNsp/j21/+cd9/8jiwLSScOfb/D9yOMCklzwfP2jtl0QTdswIYI1yXJErSSzLIYx0a8ePmar7/5BcVec3t1idI9UZDTdk9EwRJPaLa79Vi9m+ZIacjTGcLRdMMToR/hGBffSZjkAVoLPnv9Gj1saaoe7UxwnJbfvvsGYQSz5SWOkAQiwqie1fwC6254WheEvkuWeEReQvXU0vQhkTdnGAzSiQmpCcMFnvUp6zsS55YsnkF05Nt3e9LM42UY8qd/8u+TJdc83P+Kw/4tT0/PWM/gBAN/8sP/4D///0Jo/r+9/uhEtWyP/OjHP+Hx6T1NpbhavUEPFqslj4/f4doVWgqMgYuLc543PZOFwXNnlIeBODFM0nP+5Yd/xnSp8J0lqxcLBBP2h459URInHlUbEMUtUh6Ypjl1Z5iaFdCffH2KvqsIgpzlYobrzXDwGFRF31ZE0YI0yRFOxiALgqSnbWuy7BXPzyVpNuXpcct8NUeImPXjW5aX8RgycUKGYU2eLlBK8Xz8Cv/O4f63OT/9R7dcnGf8+qt7ZFcRLHLihUtThxh3y831iu3+AUzIPL/lWG6YL6YUB0uvnjg7n3LsdkzThKenR9Lc4Vh8wLWG1WrKZn9AU/Bwp0mSnH5ocWwBtmWxWKGVizVwc/EzmsmBIBDsdnv8cODy8priUOP4BYvFirZ2kVriBR7L4PMR4u5IPLcinWmCtaV4brm7/wWrbM7Pvv+KaX6G9Qxn8y94ev41rjtgjMMge6p6IKnm9K1kv99TtgNF5ZCkHb/4+6+ZpDP8RDFfnCOSiLvH96i2xY88zq8mNEeJNB1omE4DoijDCxqieUjbStAhcvAp62ccH5JkSlm3GAyDcogiQdE3GFsRBAHWd8HPkDpheziwEAEPdzVSWaT1KI4ls8kS4YbYSYrDFpcjyyTkPL0k+eklj9s9XdcwzRSBuKRuG7RT0w8OStVI5RDGOWE0pem2+J6P0RFh7NPT8ni/ZZUvETZE+C4OHtM8xPMknmOZTqbM8hVNUyN1z2ySMPgG1Ut8L2CxWPHw8EhVlvi+h0Cj+gFDR6cG8EdSgcWl6fpPXkHduVjjIpXEC8U4iRJjeMlxXIz96FMU4BiGoWUYFFGUIpU4IazG9anSA0Pf47kW4RuwPm0jmc8mYzWsHmuEjVW4VqMVTCYxcRzQ9zU4AsfxcBzJYV8zDBLPcz/B0j9yUT8ilLqup7qqGHLJRKxYLnK6rkcbOQb8fB9HW7J8CSjW65LIzciSHOn39EPFzfUL5tMVYRjTdg11VxBHHmEYUncKowxaBYClaXoqPRD4FiM7HGnxggBHSJqyZbc2CK/BrvYEkWARC+ZnM9AJwnNQyqUsIuJDQlJGHA8l9bbB6SK61o5UAnXyk5qPk1SBNc6ntb/zkcH5MfXvfCQWjCLe5WPXvR69muYTwBQ+Cd6TJ9Xj0xp9nNSORQLD0GLLEalkcU68Uw/PC/CDEM8XJ0tBMDZICR8XB2s1ygxji5U2DNKcmKyjLcF1/28sWPejfWD0rBoLWmqQfPLFnnwnAJ+IAZ+Yr84o5o0e35mSljCK6LoOz+WTzB8n9WDQ9ENHrNQ41XZHS4tz4rraE2FCSoODRLgurnBOdIMAV3j44ci4FUKM01gAO/bZj4Er52RJtrR1zf39PU+Pjyg1fKrAHUW5pe9bjBEoe8eHx5H/u324Z5bmDN6EYXj89LwM/QGpPIIoxQxbPqz3+EWCUYYwH38fdx8+ULWKpcyoD4pUSUTo4ZqcafI5ZA111dAMB5omR3kRQoR0XU/b7lBNipgOlPtHtAbhCSwDT5sjZ8sXHI8lnnCJkhCtYozuiYKUJPNYbx754rOfsd48MMg1fTvDCRt2u4rz81uen3bEiUeahrz/sCNLLbvNnkHuECpj/mpFIi1eNMMPDcsoZ/v1I+ezz6jKPcOx5rMXr7i6uuZ37/8PXNExSV/y2dWSX//dr5nOZhzbD9i2x3d8ymJADwZHSPwgxFpD3ey5e/uO9foRP+6pG5cgCYgzl/2x4nx1Sdf9mtCd4FmHroHV7JwwEBS7gsf91+TpK4zt8L0I1UCQ+by8WHB23vN2fcfx6RuWnkYHgjI0FPuOOF5wcb4iy0K0mXDcbzEGbl9fM2jL1epLnrdr/tn/9td8+/bXTOIr9s89eX5N03Tcfyj54fffsDr/AqU1sndZzpc8bQ906pG//dt3fPmDPyOJXdbrZ6LYR0tBLECpI8+bZ8LgHCN2uL1hOp0QSUlzPJLkmqFryaIzynJHPg+5uZ6AdggDy7e/+oo8veB7X5wzyZZ89/BbHOGiDxovNMSRT+CmlMWas+WMsouZhA3D4QMf5Az7i3/OUa5ZrAKsG/ObD39PdPBpmn84Yao/OlH95bu//su3327JphaPc+7frZkvxk7kOPZxnIEglIRhhu+F9I1Dlk3BBjT9mtub1+z2B7QSyEHRt5YoinjebXja3uMlAV1XsFxckyYTtO5G471vGNA0VU2aRlgdggs3159RVAX6hMHJZzlJdEHbNfRDSZLGhMGUqmoIwxXz+ZTHzW8QQc8gOxbzS+rSkk0NRfmENYL9riRJYpJUsN8VJJnm1Wdn/ORn3yMMzqi7J+rS0HR7ri6u0Kqj7p5w/YG66ambHQ4BXV9QtnuCcAKEJyLCM0rUbPc1+/0zWRzy0y/+gjc3PyfzF9ghRfcRD/drEAMOEQgXPzE877egB7Su8NyM2Szn3d2v8eMKxy7x3JSyrDgeNZDz9a/fUpVj247Uiqo64jgWhUcQB+jQJY0j5pdT3mQzfpAveGwrWn3g+elAHl9xcfMFh2OBtgXN0HC5eEEgEqp6oCgHHOExqI4gjGg7i3EGri9fsDvUWHcgYoHraYxwuL2+ojV7Aicnz1KW52cEUYIyBV4oeHxQhFFOWe9whYPwI5quxiiFkSleYHl6fiafzoiTjOVyShKl1IXBOg5SK46HCjwXnAjNQBjkoBdsHo8EgY9gPIlLk+B5K4pyQxRmpHlAN9T0sqfvWjZPR4r6CVwFNiBKBIdiiyd8jLasn/YY18HzDUkUY7XEag/PjbDacH6eE/kB56sVruuy222w1lIUBbP5FOEKqrpGK4vjiDGFqiXL6RzXCuaTCUkS8u/pMfb/1+64/m6kQljQQ0NRdwzGIKzi1llhWw9jTgD8E/pISUOauuSTGOG5gKDrPJRxR4GiFdZahn4gywST6eiRHIaevh8nv0obBqkRQowram2ZLaaUZU9Zdp9S744DUiqms4S6bqnKnhOkE/e0xp0vMmzc0+UlyVKw25Z0jUuS5AxS8bx9JAw9JvMcHIE1CY4DUeAyTTNcMfbMzydzXOPTNR0WSz/0hEGI7ztINXC2usUoh6au0MoS+P6p7cdwuTon9mcMrcv56hVXl5+T51OkqTkcjjgmp29cympPUT2h1Vg1atIWrhRFviWZery8OSM9s5y/Trl5uWCSeSSpx3SWkiQ+rqvxfXBdM67VHU41tzA2PX20DJxEq3vCZ1nxyXE6Xqdq2pMv1LEfyQOnvbvjnBLvpxW7cEcx6QIotOrpu4a2LqmqgvJwoDoeqIojdVXQ9w1GaxzG1L4feIRRQBSN01jx0aJgDEoqhkHR95K+G8kHUhqkHNFYStrRa4qL43o4YpxSGge0tRjGaa4x9vdeVxykHKjr6pOPGvuHftlR6PtBRN8NKDkeDJQ6YcvM2MpmT5+DI8b0v+N6WGuRStH3/QkPNxIuhmEschikRA4DQzfQlAXrh3u+/d3veNqsMVqN/l7399+G6wquri+YrlK+ev6G9XbDdJFi2xbXxCxvb5kGU768qwD4xWpCM2zYPO0QYQCBTxDkLBY5fefSDQNni2tm04D99pGq3PB8vEOYkCwJaUpJXbY4ruBYvyf0z5kkNyThWEYxWV4yO5/Rmp55eknuZ0xnC6LE41hWeN540Ll9cUFVNbiuZpIzBh+FNxYydB5DJ/GDFtmGTJYZXWcIE8V3b7/l5Ytrtk8VRVFidE8mMpZnOdKp+PbbBy5WSyLPRXYlrmMoqo7V8gY/7NBNyTzL8OKQwdHEfsgkzXh9NsVpCwbj8Hw8spgs2K8PzCYzgkiRzRPqrmUYavqh5vx2Rdk80ncVUZohURg6XCGoy4owCFGtS55k1G3BcjknzwIe74+Eicv58nuU1RbfD6iqPffbZ9ZP73nePfD49AFrJMo33HcR0/QS4Y2V7E2t8DxB14ArRhLQyxc3FOUDh50iSQVyaJlmM9JJyPv7DW8+fwPWRw6A39NVIS9eLmm7Cj1ETPOIyAk5bCr+9T/7dyiObzG9R1+3XKymFN2R436P71p2zwd2+5o0EghfYjrNsTziIZnnU+q6YLurwQrKvWA2zQm9CbYrmFxdsi02VE2LFDVu17HKF9g4YPPwnkl+zuQsoC6fEXbCajZlmeZUnYvr+bTtM0ZLposVIgzBeCRuzp//2X/4D2Ki+keF6v/6L//pXxp7xHPPcazBmo48XmJtP3qchSYOzrm8eEVT+qjBJU4ilDJM0iVCJOzK32Gtg+pT2rqlH2rcYAAzI8ClbQpeffYK4YXcP72jbCtwJR/u70mDHOEH4GScn5/RDZpjWaOtZrt/IsmgOFjuH+6YzHN++/UjcZxRNxrhG8q6oWpKhGdQlAyNgx84aGfD//g//A3nVxmO49F1FW2niKIJDgYhPLq+Ic5mHI4KKwxhGBCHIYeDIkhL4mxCK6EfaoRn6IaOII5wPOh7xcP6A3XdjIJBJ3huSFMOZP5nrB9KNrt3+JHLL7/6DZunLU13YJJnaNNitaE8HHFdw8XZG7Qy4DYMsuZwKJhNz5C6wxEag8uh3FPWR9QwipfHxy3TeY5WDrYL2W4qzvILgulLnuqIID3jm2/vOQ4lQZwSpbDdHzlWLUE4ih/XC1ByoO0kSjs03ZgUX28OeEGMVJqm7dDK8uH+LcvFjMib43nR6EfabAmnDbc3Z1zfnNOplqrpUfZIFMaUVUvXWQQxbdcjtQQdIbTLanaO8H2GrsdzfaQc8F1LWxTMJiHzRUxZFbihxLgufeeg9YB1WubzOe/vRvtD7E84tJJN+Yzj9nieZV8e0aIi8FysSnlab5GyQRmB7wYEQcigNEZ5+CLAGIUXGoxxqMqCLPLJ4gzfE6OPz0pev76kbzuskWMftawIQ29cLbqSdmgYZEsQ+uNa1PTkecx8OsEazZs33yOKY/6N4xj7/yunwHUMQ9/iOQ4uCqkaXFeymi95Hd0yHAxKjlMfTsLIGoc49oiiMVwilUPXBxgrTr6801pea6YzF207PBESzmKieTZSD/oWa8b6QtfxGWSP5/uAx/NTgVTqkydQKUOUBChlqOvh5PsDnDEAk6Yx4ssW50KAnnAqeSJPboAQxx2nvP2g2Gw2OMJhuZowtC2yUxijqdua4lhilGF3+EDd7onjFNf1UcowyJqmbhl6lygMCKOxmMBzUnx3Oq6be80kXZGmEwYp2Txv0aZnuZrguA6y9/BEgh+4CGdCU2mq6kBZlSjpEy1WbIMtT/4d7o2DGwg84xEvfCZnEfPrhItXE85fzJhceEwvBemZIZs65HFCHAWEgeDTUPWTO+CEHTi1cH0sNBgnqi4fq1lPJK5Tsv73gveTP/b0Ws6pbUkID+F5I5TfY2T9WoWSPUM/eujqoqQ8HqiOO5qioK1qZNehpURgCXxBHAUnO4E/tm+JsUzAaD3aR/qOthtO076Wtu0++X61BszHBqvx/YxDdjuyq8uSTyceh080hXH17zOfL/ADH8ezuN44Nf2I4DJGo6WkH3r6thtT+647EhVOlgDc01TYNWPzlQU1dJT7PZvHBx7u3rF+eDjVp44i2Zx+HyekAkHg88WbW9zQcggVypcEIVxMviDJlrTG0ncVP1+3APzdVcpqlbJ/KnD8jOXyAtNBV/QYx3Con5gko/fbWEOS5GgFcWTp+47Hu2dcT4LKEDZnmmbo0uL4crRAWI/Hh2/w3SWeyJlMQ0Lfp+kOWBMwnUe0/SNDL2maiqFviXyPp4eKJFxxufoxV5dTBvnE0BjU4KKNRg6W2XRCliQY5SBcn/Pzc7wgoNI1rVJsDwe0dpgvl0hteD6u+eq3b7m6ukYEDnGYgex5vP9AqwHPIw+m7A7fcf/dnjDMuVtXKNeQZgGr1S03t5eU5TO/++YD2SRH6d34XYoFZfuMIzNmixnK1oDgYnWJkpI8PqfvC4wWzOZz2kYyDA33dx84O1+Ov2/lkWQerSwoqyNBFJMvztiVB4L4cwbOsYEAqRF2Spy5fPfte25vXxPHKVKNlbvT2Zzjc0FzONA2B2azJYOWJMmS0BMMRuH5GVW7ZTKd8c13v0F4gmO5QekAS0sazMmSYLQ5hR6b9TMvX97ym6//Di8JqPYlt7cv0G3Py+vv8eX3bzgUe7ApfX8EbYmDGYfyLUEwQypNFEzJJg775y1ts+fdc8ubNz9AOR1KlMQEDFogPAev7unKDj+KaY8NroGzsyV6cAlTjy+//ILHu3vm04T9viFMBaHwuFgs+PJH/+4/CKH6R1f/ff9E3x847hxm05DFMmCz+ZY8nzObL3h6WoNI8FyXtiuZLzOetw/M5zOEk/NwtyfOz6nrDq22XF+/Zr1+JIgCoiiibTakyRnffVPSmx19B3Gc4mM5m8fEUUxZSJQqKJs91zevURaatgRPsn4ssaZBW03XK86vPmfQNbiSpinxg5ChNwjh0nQtyaSm7N9jjcfPf/5zmqZjkl5waB/wfYe6jpjPLrl+EXEs36O6isPxAYTDYjrHenDo73m1uEV7CR8+/A2+OxBGP0AQ07ceRbUHleHgkcchzbFB24YsTZnEc/6nv/nv8D3Dj3/2mqe3DZc3HrPl57y//x1ny5e0Q00UGbpmIAy9EwJIMvSa0J9TFQ90izuMXHDYVRTtHjd28UMP14lougZIeHHzE0xn6fqKfVXz9dcbKv3EY1fyy1Dz2eKCNPPoyxbXSoyytGXF+VlCsa1wApd+aDkeaurawQ8jZOMQ+gGPD3dM5yly0Pzt+ndM5zHruz231wltXdKWFc/bI7ZS2PKRNmoIowm+DjByRdMOBJFHFPgMnaSsB7Qz0Jc9aWhJ4ilv79YkKQSRoq0H+k5g5Ii/cV3DbDbHDQf6JuBYSBzHEsRzfNfy5vNr1ps79ocNh3KPH3vcXpzTNR35xMMYSx7krOWRPHZYnk14+/6AIMKahraXxH5IVRcjPkj0KGsJPB/TC1q9xxjBzdlL1BCg+o7AFziuxvMdhOdQ1CXgYVxFVTUMg0Rp8N0YpQamkxXTZcx8NQVhTuvf8Qo9h9l0ymIS87w74HmCwPc5Hir6vkbHPZ5wkC5w8kuOQR+F6sdVchwFdJ3A4o5e1dMkz2iN42ocF+I4p7KS+3CDE3tYAxdOSqZTtIHBWAbRU9YPhMGUKA7p+oGPtabGQFNpPH9cK2tjTw1OGsd1UWqsYLSNRoiaPFtSlQP745q636B45mz+GnSEYytC4RF7U6LlnOOxYHd8pGoOhGGKF0YEymdQBt+PRtE1eHSywzolqpMMUUQcR2ipmc81USTZH7cYx6WnpZVH+q6j7Q4EWUDdD3RtTd+7QITWEs/VzOcXWDdgGI74/mRcOaOY+gusFvzq+LeILGM5v6KqD7iOZjaZ03cOxihEMNDpNUu14qp9hZGGsqrom4S+dbAe9HKgO1Uyykoi5Bgw+tiahDUfoVJ8atmyp4MJv1+7j7YO/oDIcFqnW073lMuogMfGJxCjauNjMt6idccgG0xl/iBkdLIT+CORIAgCgjDC9wMC3yeORr7rx9vWaHuaeuqTH3Y4hbrG+3NETI1IKaP639sG/oCi8FGgK6no+w7XFUilcYX3+79xnZG/6gk8xx/LK1yHQQ6ok2fX807tX8GIpJK9pqtq6nrD4binqQe0UmOT2un6RH/A4vLRgmGxSuFZzSSznN/+iP3dA+XQkaxccGFXHT+9hlEV9e6c1eQVQZRyffk5lfee97/do0VGGKY0/UB/KLGuwfPgs9sfocye9cMTi9kZQdhz2B6Z59cECIy/J89WhFHIoXiiLVpubxKeN/d4yYKrxSXv7r5BsufhQ8x8dolRLotpQFkdWMxuMPoDYSQI3BWf33yJbjLmUQHuI3//q9/iORnDMcGzLmerW7y44cOHDYuzFYF95qtffIeRA2EQE4iE9fYDaTRlmvZsNnfM5zOur36MDQ9EE0mcxGirKYsGz5+gM8sv7ypmU4c0YmQS64HdoWC+WHF3/0xT9PixR99JyuM9mZ+PtiXjUBUtN9c/IA2nxJdzmnZDeey5/P4rghC0HCgOktki5+vf/JLl8oYsu+L9++9YnF3z5uUN9e6AUAmpf8MPX7zmwzd3XL68ZujHDV4Sa4QvR6vT3OXXX6958+b7GNPgaZ8QcGxAsW+o9AEpXfJ0wvuHLRcXOUEUsd9qJjOP7fZAPrnCcwOeyreUVcvV8pKHhw0/PP9TVjcubx/eEeXjYaR8uuM3vy14c3VLGNQ83zssoyvuikeS5BLZb2n7I6F3xWAhSn3ypGX94ODqjunljP7o0XZbynqHtD5qKCibjn/t85+zedzTSIWvBlInJPEzyuIZ103xgoz3m5IvfvKG9d17smRKKyt+9L0fU5fr/2cF+f/T9Ucnqv/Lv/hv/rLYwzRPOFu9JAkmHA47Pv/ilsNOYnVDns/oO8Hj+j2TeYgnIopyBxgGJdG6JvBTYj+n2Fc8Pv8O2Ru+efsrWudAEt3wvP+OtjpSHwoCclbzV2Pwpra8evk9HtYb2qEkSQOet1t2x7e8fnVLGl4ThRlRaimKgeublK7xiJIBX/ik2ZTpXNB1kqf1jigap3RRcEGcjE0+vSqou+OJy2dJswll2RCHE2SlyPM5coDpNGO7e+Zh9y33uzu+fXhLknq4doZLSlWWlGVJVRgwBcvZBMQERUM6idg9H4hCwXQakWUBq/OU7W6P40WkWcoPfnQLFlxXIvuOaTbFCzVKGaqqHaHjjWE2jxB+yPHQ4/gdyjq0fU9VVyitSVKPvh0oyyNPjzsOzVseq3uEahDCoT1s+OlPXzKfCj48bLA2pG07tK2omwHhNwivp+sGhIgZhphe9tR1QZr5GOmxXm8IopihdZCDRfaGrm/JsghjC25ulywvUt6+e8bXAYs0ZTU5Y7feEos5oDBAGIVIWaG1QPgC11FkcQTW0vUlwndIkjnCH1FAcRCxXC6QMqQZBpIoRXYatODV9Q2vX17R1yV1WROHOa5vqZsjWe4xWyUEUURRrulLi1IdTbsl9Hy0UVy9SDHDQNO0uAFYNT704jjG8xy0Bd+1pEFCGqfkacAky4mjFM/1cZ2AJE65X6953u7pug5rDV3fjDLSGQNN1jr0sqZqSo5VQ1EZmrbjeXvgnzB6gv7aLdG6xQ9dfC+layvq6khdj/iga2+F20ZIdZoA2Y+95OPEaTaLyLMpxyMMmnGdehI0Wkui0JJlLoPX83fyd3TJEZyO7fMRd+mhzwN24YF22fMgHvimfcuECXIHTT3wSVWcAkZaGaRUn6pMXcdFK8N0FnH2oxkqsDiio+9rpB6o2xptW9q2BuNxvnpBFCTEQcpkskB4LlKPkPyqLrHWJ01WCBFTlSB7D6VbmuaIMRZPxFgLZXmkqRukami6JxwXtIXnpw1VuSeMIlwnIoympElO1ynKskLKAc8NT9aHmjAU+L5L3yuEE2FthTYlxrgMAwRBhOcJlLTEcYDjSqqqOLFWM4qixViDyKZUseXOfmAtHnCvfPzbGGwsDBgAACAASURBVP9zF/+LluT7DuEXgvlNxIurOcublNV1QjS3eKnF810862KNPHmQnU/Czn5cl39Can0UVx+rYU+Tyo/IMOCT2rOn0e7JxmEdcD0xAvSFGIONLriOxZphbA/sG6qqpCiOFMcDZXGkPJa0VcPQ9WilcDDjoSr0RitB5BPF7ifBaK09WQokauhHpNWnytvTW/g4lrcGpcfWK6UHjFafOK/WaLRWWGs+tXl9Et1GMgw9TdOw3+5Y333gabPm8nrC68+mXF7lLGYxy0VEHHkcjx0fBSoOn2wrDiNz9fImxct73IXDMp8wIUa7iuxmRhymLKY5P/hmB4D+t39MVQ14kUcvj9RaE3oJWTod0/OJxhOwWuRMspTy0JEkU1ZnS+qqGTcAbU1b97y4PmeQB65evAHHkEQZD5vfsZheMp3M8f2Bhw9fc9wPXN4sKOoHjDEIM+Hi7IxjWaClixAeUtV0vaasv+P+wx1//7fv+Yt//K9i3RbTx1zdLPBFShRaij1k0ymd7MYpuK0odgdS1+d5/UwQTpnMPepdw/X5OXEWsX3eoLVHnIVoKxl0Q9ccMQMkYcqmvKNoS87nKa4MySYZQZCy3+6oyp6zZUJbS25uv8fjw4ZFPmc2jcGkWCvxfYfl8pI4jImDhKIokUbjuiFNUzGbxtSFJssz2r4ijhOK8ojnBLy8usAOkrI2LOZnvDp7QdU+cuxdbm5/ROKlbJ7vSfIA2bsYLfD8HqMEUZgQunDYP1Aca85Xr6iLHuFK4kTwdGiJgoAwyDm/nLBZr4mCEOv0yMEwm09pm5KmKImcGaEv2G8botCwfdxxffEZWI0aGozvEXpLlqsFXeUy9A0fHr6m7GuMFoRhAq6D0pLDoSVJYqyOEKLFCo80ybBuw9//8muurq755u1XFH1Pkl/hS0mUh9hIkEYTwiAnSFOkDblIM4qypix2JGLOPJ9Q1FuqbsPuueVf+dP/6B/ERPWPCtX/9r//L/7y8nxFHCagxImTqJhMU46HFmEHlmczHjYPaLfBdROm0wWO13J//8zli59w9+GOLJ7x/u2G9+9/SbgAX/o0w0DXKw67Bs9TCOFzPPT89Cc/oixLsD30EcvlJbv9A7KrCQPB8+6R1ZmH202p+wNBDLe3twxdDVKTpBJXuDTNQBD4tF07TsWU4GyREfoxVdmTZCFN05JcSGpTsZrdkkcLDocjxtGgZuTJAu25+KFkt27B78iiiED72MFi6hnb+wBrB/pmwLMOy8k5i9kUqXbUUhPHKRhwkVgGLBLXFUSRxY8CtHY4HGt8P0EqByESnndrppMlcbLizZufcr66GsHWckc+Tblfl9Sqg8AQJCHW1RyLZrQdNOA5sHnc8MX3bskmKc+PO5JJwtAHKNXhByFGCPJ4QRreAAHPz1tCPyTyLsjzC4pqjW/m9K2L6w3s9xLhOpTFmFavSotrXFxHE3geOrAI2+JgmSQLng8FJhgQKuDzl58xyb/g7btv6LsWP4yxVlIVDU3bEiaCrrMs5ktefvaSvjPM0zlpkqAGTZQmJOEcoQYuz5f4qWCQDdZ2REGKG3kslyu21ZE4aka/mu6ZJAOh43B2lhAkEU/bPVpJ3nzvnPPrjKasmC7SUSg7Ma4H2tW4juZ8esXNy9ejELOWZuhwBpcsyDDKIRABrZbUTcU0ienbgiiJ2B8qjscW2RuqqqTv5NgsZWKUDpHGMKBopeGpOPK4OdB2iqbp+E/TGQD/pdyhbIcbwfZ45FBVKLfDnjiYt94VbhfSS/iUmHbHpPV0kvHq1Yq+txxLizIfhcm4JpZSkmfApOdX6iveHd9TFJq2VbhioKx7PD8lCARadTzebZCdR2QE/W6grc2Y7D+JC601Un2Et48CyHEctNJEi5SbP3lBZQ90rcZ1Qra7R8riiFIeGIVwNVm2oK4V9+sHhmEAC1VxZJAlQeARBiFVc2T99EBZHRhkOT5824YgiAgCOzIJVU8QaIpyR9f1BH5AHOTstjvKY00gHDxPjXXGsiFJopFZCHj+gLWSKEw4lmuadscgW6SW+L6LYwTWejRDQ5As8eIZvapIwhhfRERhQhTH4A64nsYBsjQln05xvADhO0RxhJSKw25HWWw57va4NsSbCYZVSfzCYfY98F8947xsOXs5ZTmJCJea85dz8mlMGEAceidfqoPjnr5bc5q6Yn6PCTvdHo51P1EJxqnraBP4vXo9pd/s6E39vT9hrJ713LFr3T21WXnjMBFjNEp3dF1DW1fU5ZHyUFAejlRFRdvU9F2HkgrHWDzHwT+VFkgpkXKE+n+KkH1ktTojqm/oGqxp6NsK1Xc4tkPJFjkMWKPQuj8Fyxr6rqdpKqrywH63Zfe84bAbaS5JGvLZ5zMm04QkyglCS5Jm5PmUDx+eUXq0e32Kdjku1oEojDj7Imd3+8DTocSahHQRQ2B59+0z9aHDovnZXQnAL1/ecnZ5hhcG3Jxf8bD+dqzsvJ4ShYbjvkN4EUmaIxyBEKP1ZXfoME6A70k8JC+urslXOU4qaaWHq6AfFEM3MM8nSFmT+AvyMOKv/up/RgsNRjOJlzhOT7ktmSxu8eKxEvaw35LPz7i6ekGvnhDJeOj4sC7Icg8RCYrDE/M8wIsMrZIIv0frEse4WAau01uuJiEinBInAbLr8KIEK12MaTge1ry++RLXtSip6LoDWubMljNcYwgcjzh6QTyd4YhhtMOFDkX1gGvn3N6sCEKLdgTSSnxvQpZExH6E1T55fI2UG5oKpnOXsmwRQjDPpgSMrXxnF1NU33DYa25uPuMsnyJCj6opcd0AVyjazoAQnC1umOVzuuaZh8cNZaVZLc84W81x1QSrNdvnDdaO9cNFu+Hdu3L0zkuPQXmEkwQjFWeXE3A1m4c9r6/P2TxtiKcz+u5IXUk+f/Oau/tvubl9TT7NSQOHaTJHJx0f7jbMZy+ZLwOKRmNVQKsMi5cJ3377KzzRIHuHxWqCdSS7wwbPyZmv5niRJk0mDE3N07YinMR4dciLz18wtCWqO7Berzm/PaO3iqKU+KFivy8pCpiEHq6zZ1NLhDMwiwKsNfTW8FwfcFrLn/3j/+QfvlD9F//7P/3LNDnDFS6D3PLw+ICxHUW9oWx25FMfPxB89/aO1VXA8+ZIX0M6gb7O+eHrL3n34St66ZGfZ2Tnc372j/4M1AzcAascXFGwvt8Q+RMuzq7J8ymOaBgGC9rj7um3fP75GwLfQzgxq4sFWmbIvsHxOpQzGozjLGO7PZJONLGYkkVn1G3N5nlDOvE4O1vRtP1YIRhEeKGP56fsy5r5bI7nxjw/lKSTgO1+j+P4iETw3eOvEbHCEiJ8MFYRRDmP6w1GNGg0wh+YLV7iioirmxesn99xaEqassYVYxvU2eocz0tQqiNOR3p6FF4jdUE+meI6IWEUUVcdZ6sbXr7+nOaoGVo4HJ5oh4JeH/juuxon9NkfSg6bLZ7r4eHjWEEax+x3O6LI5ers+0zyc457KKqCoTeURYvRY7Bgu6lP9XItu+M7hKcIRITrSlTvkOUJvRZYa1CyIolzwjBHa8HyLCHxJ0Shh+sYEt/DakmnepLgnPXdDieomUY+qJjz5S1NGfGw+xY/NGgtCGcuqecTiRwXODubcHO9ZLXIEVZzdpaTTZcoRxFPIi4urhmqFm0dtNsRRYryaLHSZ3Wd8fbdO9bFhvkiIBTwb/75Of/WX0z5J3/+OZ9lHk0B3RCQx1Om2QTluvTKULUQxiF921H3BX44kPkTtBF0pmW7LTgeS0BjjUJJh7KGQ9PTNi1WgTYORd3Q6o5Bd1RlgekNSoHnRZyfvcL1LG1X4PsBkb84AfUh8H0CIejajv9sdgbAfyXfEUQRsT+hrp5ZzDMiP0OIlom7ZKlX2Fog5bjGdU7eRTUoFrMJL14uKEtJUzvoU73oiENywGoWS5/vkt+ysc90cqDpBuq2RQSWui2QytB2Em0tx6rFjxKatEQXGqcMx7agj+xMOInWjyNWgDHI8pQ3VNkjhg5PZBTlHmUKfN+nH3q6rgPHY7c7ogeP3bbCGMN8do3vCxxXsj8cORz2dH3DevNAWVbM5xN833LYNwhyBtXx9HyHVJZeqvEBtpzSyprHpy1DPyC8gTCMqZqOfmgxtqGq9mjl0rY1TbsfU/PuFKkkQWTwgpHJFAY+Zd2wLRqi85rG+5ZffP0VzVCwOM8R7gRrXJIoQdmKut/i+RlCJNR1zXrzjkEWRGFKJ2usqHCEABsQBAFhNPJ5j8cDXddT1y1SKSpaDskGeVbBZYA/cVnmPtOLmLObCRfXUy5WOVfXS7LMx/MN/xd1b9JryZae5z2xom93f/bpsr+Xt+6tll1ZEgwaMCnJ8MBDwx57YvhX1N/wH/DQHtiALcHQwKYpU2YVWRSrbtXtM09/dht9s2Kt8CBOZkkTDgwYoPbkZA4OcgMZEeuL93vf53VtA9syn6D6+om3+hTOwmB4sg4wGE/MWD4wTN/jp97bDZ7+c8e/P7Fbx3CX+SEQZhjW6Ik1x3/3/fCsn+gETdNQFSVFnpHn6ah6VyVK908BMEbFX/zuOhoYiBOP1x/PWJ26BKHNcjHl8kWI60kcZ8APO6ChK2vubzaUeUaRHqiKgq7uUFJhGMOIH1Imvu+TTFyKrAVMPM8nzTLeXW2eglzi6VJ+r1Eb+L7H6jwiN3dMoyW6lJg6Ip4uWE4nuLECU/D978b1//898zHFBMdPMIaauqjwYwcbDaZFmnVMooDDoaDpLGR3ZLft8GIPNfTEic8kiSiqkrvNhrI0sJxRGGoryfNnP8b1PC4vvocdLLn6+pq/+Muvef69CyzV4/g+wkswDY9vbt+iRUX90GKqEN3tKdsaf+7R5iWqLGnbI6tZyHG/4bjbUpQpCJteSg6Ha9RQ0eMgK4VnWxSd4tXHf0TfCMLAojZS6qagyHtWqxV1c6CX40vqMAhOVmt8byBwY9r6KVCoLU5OT7i6vuPNm8/wwgNFsacuFclsjuf73N5cM50NXD6L+c2/PeA4PsJtedzdYAuf07M5v/3NNwShiZYWStZcXd/zwx9+H8+BvKxYr09p8oqskCxWIWm5wfcW7NMtqnexTM0hu+Xu/p6L1Yy6OODPXDpd8rDJKdsDJgZhaPO4O+DHEUFsYJkuYTBlMg/xA5PbzTWWMKnrhu3jFowKN0ywPJsq3VNXA5PpgsgP2B8yFotTJonPMTuwy3KazmS+Etzc7Lm/2yLlDi+UKOVgGQ5d7bFaR+x2N2jlYZkuQRhSdQeOxwOJ5xEsJ9zfXiMIeXGxIi8z+k5g6BysGNNxaEvF4ZDRVAOHek9hSByv436z5TbdM52dYlmaydyhqTtc1+N8OuHj7/9n//AH1V98/r/8zLSmfPn1rxkUTKYxVS3HVVez5XjMuLu/Ja8qTBEim4quq+hqk93+yGHzlu//9IeUe42pjzAxKLcth90D4XxAZhZBaHN28grXc5ivIrTWXFzMkarAdkqCyYqqe2AxnSKY0ps7etXRdimyrynKjrTIkGx5fNxg4tDXFrbwyKsDg9EQTX1sM0L1FtoYqGvJgMk8Ht9+HBFx3NWUdYoTLNjlN5TySFbsqMuWuttjWD2+69O2HWlxJPR8LpeX5Icex7Koc4OmKvj0zadsNjejD6Q4cvHshHn0Cj20pMeUXir6HspckeY5eZ5S1wZdJ+n6imGwiJMINdTcbW5BaK5u7hFmj22u0Fpi2QNhaBD7U3Q3G835vYljR5imy6AdvMDj3dsvR29cKzBEj+eG5AfJ9z/7ffbbHIFmtTgjz1Mst6cqJGFocDweCKMJUkNR7zGFie4NLNukqQcMeiZRzCSymM9mBK7JchLwmB9xLMF6FdAPLfNwSVMPPL+4xOgd8nrLyeUSyxG4DoRhgOxDkpOOH/50DqbBF19/STARTE8tNtsronCKFwwgLWxl8YMffERW5JTFESlsfH/Gah2hGs3F+gVmP6Bki2kF/OJX32EYBnVjcZCS2uwwbZ+ytsh1S4/m9vod83lIP1i0rWJQkq4bKIqGJmupOk1TNyz8GN0YYPo0UpHXR0Z7C5RtT9m17LM9h/RAFAZME4/JNCaKFrx+9RkGNpfPTgGD/f6I6gcmYcI0mlEcOmYTn/8ymADwP5vvWEyWeGZI3w0oabBYLHn54pRP9GvOgyVV1dOMW0tGf6oBCpLYJ4wEh6NE9iZjRaXGEiYMA7atmc41e++Knb4Bxma3aAZlm9N1Ej3UHPMD+3yD1Dlpusc2BafDCrU3kZ16OtjHlIr499qSnrx+w8C93CBmFQjoWqibakyKY5IXG3rVPQ1zNY+bew7HHQOK/S7lmGbUdU2WlmBYON4IzC6KHK0GhHAwhYMg5HiQOK6LYWjqqiMIJpR1SlZtydI9uh0wDY9jtqeSOYoGhp5B+cTREqU6qqrCtlziJMG0BvbHO5SuCIM5XWPTyZ54NiGcuITzhngmCUMLxwwxZAAKlMrALEa80TD6TZu6RpgwncxRg6KoDzjeiEhquxLLbdG6opcVjufS6x4hbBwzpC0lvZS0bU6eK+rOpYladl5KvdQYoUtfKKJlyHTtEk7gfD3h/HLO+cWMyVwQJTaeb2PZAmHCU+HVB8MAw0gZGAZjfAExxvTWgB4DXWL44Cc1GFmvpjAYBAyGflJi9Qc/7O/YvmNTmRBjRa9ljQ1So0VlfMn5ELQS7w0L778ThKHHqzdrLFswnQVEsYPn+eihJ5kGRJHPdBbzeJ+Rpg3iCSk1jJ2yGKaBYQps00ZKiWWZXL6Y08maMAxoupKuV7z7bvOEFePD0P6eRJBMQj794QWFl1L2mvXyGXpoEK6JEBY3m7d0A/zBdQnAw4+/z/5+TxS53D7eYWqTuuyxTJuu67Esm7ZV/PBHf8xiMec3v/47AifC9S3KOiWOpyTxGUW7oVQ7JrNzJvECy+nZ7UpOTs85pHuUivnm3S/46u2f86f/9D/hz/70z5B5z/XDNZY94bMf/JTmeIUQCh+N3cZoq2KnUtbzS7KHHUG0YBIJ+i4k9nq2aUaFxLFr2nbA8l0eH94BJ/hegBxyWm2xWMUs1zGNLHh7dY0f+JjWgMkMy23ouoZpcoJsBctFxMP9PWWu+ejNp/R9RjKZU7c1whIjCaHu2W32FJlgsZyxOXzNdL1hcX7PYb9nlwZM5z6hG6JliyEnxBMPL56xze8RaoVrl9xebwmjNVpXdE3Odv+ANgoMYVPVJZZtkVc76sJmNpnT9Q3DUHF1c8vHi1c4bsXtpieaONzfP2CaIZZukW3O9c2RuhPQO3St4uLFOUG0wHYE282GvN5gmDNePf8+6eMWIQY6PTKDpRwLLE6WZ7x7+DVp2nJ5cYbjgu1PePXqnN3DljJT/ODHr3m8v0VbPa3yOb08wRHQNBKtNUEYYXsmnheRRGvSfMeQD4SLCLMdWKxW1KqibDu0NJjHMYXR4A8R6+WEsi3xzTUnyymNPIBO0H3GZGkhcFg/v2Czy7m93+EmIQ+7K376x//VP4hB9e8NU2FnfPvdPV5cIKXN9MTlmJVjraUacH3FIGomIewOX7GMP2a7fWB15nHIWi5Wb3CrKStxS3XYInybandkfX7CsX3g/FXMcvEj6rqi6yVSNvTDHbbv4jiCSg/YRETRkjhYofs7rh6+w/IGBttElYpsZ2D5Br4xwzVMdCewpzWmA/vNnukyZtAGeVmwfdzi+3JcXdcGhrJBaeLIxRpi1FAgG4jCmKJsOewbfDMkSGL61uCQDSxWZzw87Fkv5nR9QLJQZM07jvkB3wk4liVajHiVs5NLdBNx2G2xvZKBI71yWMxfYhia66tbslxjWyWuX2E7c7zIZZ/dcHi3Bx0z6ArTcrm/krhuQzSZsD3sEcJBdj5KKcKoh6ZDSoOq7DEMg2+/uSaOFGEU0e5LXNcmdBec/mhFmR8oq3sEPmkWIJjz7CLh2y+/JQgm1NWRzeOWMInRg6KsG1Tn4KieOAjYbLbItifyYHoy55gdeDmZ8v0fPiMxp1RZxaBsNocDrogp2y3vtr/Ci2xk0zFJBDfvcnwPwtCmUzXpg6TKXFazZ4SJpKqOJNOAep8TeTFOInn+0XPqvGM5j5Bqiuo6FnOP25trZtOYk5lP3yTU6sBvrzY0dcj/luV0tRyxOUPP+TNB2zcUu5p9WjGJltS5hWU6rBYhh80O4bvIblQaDQewHfKmR5kmTVPD0GH5DkXX4GqHpuvBkAjLwDJiTtYviPyOqk45OzujqY4YwM3VhjR/JJkIzs6nGLgoKRGGy8lpAM142+lqirO0EINmEi4wzBp6SbqXnBgSxxv9i8YTF3J4f8oyuu26bqBX4yBpmiYYYFsmXQNeIBBmh9IGrjulrxW2pelVg7At2nagSDsGFLVuEKLDI8RfwCy2kM4wBoIYfqfOYXxQ3MbP+LNrFXUDnlRU+oBpCrp+RO10nSDPSw7pO6bJHITAdW0Oxy1iJphMX5FnB5quxRkcqrqhly7CSChLRTIVKN1Rlnc01UAYTjCFx3rl0smOPIV+sDAHwYvnzzEMza6oCWYBhnCwREAczPCcOXXuUpWPJInCdkwGY2AyCelUQxhP0a2DVBWGaqk2PX79mpmZUg63dGmDKToG1VPkW7zAQKmRTev7Pa434IgZtojoOND1kv1NwXy6QIiAMh+wzJaBFtOc4jgBqmsRg8dyeULZZ9TlO6TUdEWJ57xgMXOxHEFVH/gq/gKXiMhxsR3By+ES1bf0Q0d07pMYEW1Xo1qN3Y/khbLOUaqnKEr6DtpW0zYa2arRa/zBLtIzPOGgDMPGNIFhbIoa62VHAoEh3lsGGK0Iw/ChxQwMBv3eJzug1QjsF08hK/F03Q7vjaHGaF1o6oE8bYgm0LUDw9DSC4kwNOgBz40ockl6bMf2sKche6yKHZ4wVwLD0AyDZL8/sNucEEYGh2OO7Sk6WT7dMeM1O6qpA+KDugoIg9lJQn3ckA8ddlKS9YJZkJBujizO/Q/H5bEtmC0ddruvuL5/5DI8YbH0sByXZOLz68+/5rMf/nOeX/4YwzhwffOOu5t3zFZrvLnB1XdXeM9W9FLTdzn77dfsN3PWywW3hyve/fnfcrY447c3v6Yqdnz8gx+w32/4i3/9Vzy/fM16ONINAffHB56dvaHYvGObppgTyWJ+wdB2ON6ai/OARh1o7ISZndB1HXUvqKQiDD16VVPuevzY4mTu8PYqxQ3HvMR0NuXq/i33j3foQTObr9jt7okiB90nOKGgbo9cXrzm+uYrttucJA7otaYfehB8QJbtj/dEgcvp+QXXV1fcXN8izQPCP/L1b3o21wVvPr1Es8OwTzEcl4ebO9wdtKpikCbO3MQyDLxYMpgZnjtlNbO43bxjOV+jjY77xyPJZE1Z57huwLPnZ/z6b3/N+jTm5cuPsAzJ/e2O2YtnyMpimUzB1CSuy/6mwxQNSTyHLiOJP2KwNT//u1+SzHu0qTD6AF05zF/APS6+b1P1NaHlYk8lZXmg61bMJycIW7I5XnF8yLDDE6YnNoMR8eLNKfePdyyX52TVA35gs5ytUNUex1sRRa/ZbuqxRcyQHI8Fl5eXyPsdKo94cz7h5vg537wruHj9mkGUOL7Hqp/xybMFV48PfPrJj2mbEmWYBP0e3Q+E65gBC8uRfHn/cw53Jt97+YqGhjA4/f8wUv7/8/l7FdX/6V/89z973Dzw/PkppgjQWrPb5DTqhunCH43BzhLPd7h4FhC6E5L4lOfPTyhzxeXlcz7/5V9R9TlucoJtTrg8fU0pK17Pz9i2JXebbyjKjNkyQCmJ6yYcjzuqvEeq8WEXh2egFHn1wICLYR1xjAUnySumyZymyVFVzyQ4Iw6dsb3imHK+vsQLQ6RsKPN6XOP5Pb2UTKcnWIGBbc7oKbm4mFOUN/i2w35zx0//8J/wj/6j/5g/+MF/SuCe8cWX3+BYAmGbpFnPYX/HQ/rAd7e/xU88itLk+uqeVub4UUvT1tRlg2Ua1G2GZ53guhZdN1ZZgkVZSq6vHvH9EZMThj6P2yuC0CdwLnh812Iw4FiCx7uSomzoZUSaNjxst1iGy/6ww3ddLCEYdEeZtjw7f85/8c//a/7mb3+JpsTzLVTfkh5LVquAqilxHQ/ZQav2zGYzjscDXdvw4vWUY7rFtGySWcJhn6NUR9tq6rbDtBRVVZIEc2zToFIteZfhCFis1gilQHdkVc7VtuDTV+c8PlxTNA3rmUdXpTheiGsY1M0Bx45AJrRtS56mGAQIU2PbLon/ikUU4Yc2m2LHLr8d+5MXIUVTMQs8mibFFjaKnofNNfHkhLarEEIQCBfLdahUjuwcZKMo25xeKw7bGsv0SI8dsrCw7J682mDbEdN4Sdn2NF1HVRbYRkCpSlqjg06xiCLSvGW0Awz0TY8tDFbTCdNJQBy5BIFHXjScn70gDBOi2CPLU1brkM8+/Qjd28zXU5JZxPwkQLg5/7QYOar/59TF8Q0CNyYMZixXIRgFA5Jlv8I3LPJMU5YahqcDWgwoqZgkAX4QUNYDSr2XiYaxOED2JMmA7Qq+KfZkNCRxRJEW5FmHZYUITAQGx32OfLLfuO7oFQ6IoLRpyuG9nZD3Q+rv4ubjR2vYGgVMa0xHYbkWnerJi5y6K9HaoapbetVgmi691DR1h+xgt99RFBVVXbA/PPC42dHKFmE6gMGz55ckSUiRH9G6YbGYY9sWljWwOEmYLUb/oTAMzk5XnCzOGKixfUWUTIiCCYYCtI1jxwShRzJ1WS7PsERElj+iKWhbTZaXVFVKWdRUVcWgNA4JPid0uYljOfheQCtrDKclrzKKoiZwI1zH5bDf0teCwE0QJli2Syd7PDMijqfoQWAMFkJAr8G0bTwnYJBQZN1If5itaaRCDhmt7HFclyzbsz/coc0W0xhDJgQDe/eeMq5pZh1qoTHOXHbWI/VQE5srlrKdNAAAIABJREFUkpMlnajo3ZzZWcD8POLkRczq+ZTJ2mSydpkufJLQx3UFnjem5w0xFgSMxICnOlktPox5wjT+HUTW++X56Pt8r5V+CHkx+kB/V2Tw79AMYPwdw2QyDYkSZ2ykUhphCnrdIUyTqqq5erfl8b7CtsyRM/v0omQMBsIAyxwtCqrv0RosCyYzC6UGwjAgyytub7IP1/D75L/x5ElI4pDFuU82KZkszpEUXL37nGk8I6sLEIrZ/IRPv9kB8KsXE4rjATU4vP74E6wGnl++4pAdOBx31DUcy3t+/sv/g/PTN7z5vU/Z7q85HAu8wKEqx1Ce5xqj3STfEDozWktjuCbPTt+wmJzgTSTKO7B+8T2ENfrJhdkzDT2C8gFjW3H+6jOG0CI8nbE4X9Nrj9lyhm8aTOYu39y8ozgWFGmBci0GbbCcr5BGh0tIX5U4UUDg+vSyxbYNHCtkffKSdzfv2G8bnr9YU9cjxskybbpWkmU7irxhNltzdrFC0XFId2D1DLTEcUzT1diWQzIz+Pqr74iiCM9zOR5rzs9eIlTC9k7gOjF1L2nbgaouubm65eJiyf3mlgEbPwpHjKTnkeYFZZkyCRPKtsWwFLrp8DyX6WROXWlsx8d1QbWCrta8ejlHmwa//Kt/gztZUomObFMSuiGThUdbuYSJiXAdZtOI2cSmyhW2FyCNLRjjM9rCIoqn2OaRtiqom4ZkOqctCxbzFySzmrurPa9fvcQLY77+4gsm4Ywwibm5fcARPuvzE/a7jrPThKzMqJoI3zfxfcXmQeI4J5xfLimrhv2hxHY0QWwilEsnbX78yXPO7ZrDV3dsjgVRYrAt4dnlGfN4gTe74JPXCZ//6m9YvfgJydTl62/fMVslhM4FWVFzf3wkcmLWyxWP2xbXDvjxj/7ZPwhF9e8dVP+vX/yPP1uuItK8ZDGfEHqXDIOJ6WY0lSIMDRxrguPY5FmFlJIkSnh8SLGcli++uyJZuixPl0gV8cV332FNOr693RMt1zw8POAFmtA7oSwkxmAwqIDsUGBZDobZU9Utj48b4khQlT1BkNDLgUX4KZcnP2B7/IIwNLl8donnTJA6ZXXyHCHGNX0QC9quoK4bDLMgjl2aGkzbJC9KQtfCNad0UnN9fUNb10wmazo54TdffsEhLfj8q7/jdvMNre5oVMXxWLLZHKnLgdAPiaIpWg4cdw19bZDuU6q8Z3+oePb8FZ4fkacNaqhAJ7St4uHuyDFNadoxoJLlFX0vEERcvduw2xYMJqR5yf1NTtPlKGlSVzl1nWNoF9UpZF/RdQqlNabR8frFx/y3/81/xz/5wz/jX/2rX9DKDCE8ZDswXVgcdgVxdIbl2AhroCotsiInmbjIziE9liyWC6Tu6KQgT1N82yLwJ2hrIJh65IeGwHRZrCa8e7xHDR2LecLQK2xHsVonNE3PNAgJgxCJ4JOly3/+vYE//sGah1xgTWbsjzV1a9DplqZr6I2ebXqDIUx65aG1xLcWKHPgIU3BbijrijyzCMOeKlfkxwrfCnGDCXXT8vbumjLPcWzFYV/RNJqqksihQwQDnZyRVxW9obEdC8fyWC6mRMmoTm93R4pmtIG0XUNTSVRvYnsWupe4ImQ6SXBs6FuFbVhYhkVoh1ycrogjk77vsK0Zs+kCx4fdbkvXKqYzl+fPL9hvU5Jk5H4qVaH6hjiGP9mNqf9fvbDJ8z3TScTJeo5SCmH2hFHAvFvhDjaHfUNZagzMp7KdUbmybZNhMGmlwZjsHu9l23bou4bpzKBzCr4qr9i3e7quQauxGz2ILUxbUVUtjuUTeB62FdDpkvvtgSZWxCJBZ2OBwHs34/uVKfA7dXeAUrR4JxIvhjQ/UpQZwjDpWkmepygt6HVNr8b0v+wkw9BTVz1ZmlO1KZ1sGAzokaihJ4j8pxT5gCE6PDdhGl9g2Ro/1AjRY2ARBlOcwECJI1WdUbc5tmtgCxd0g2wPDINJP3TU7QEGQa8UnhtimR51d6DrU9CC6eSEKA7RhkQOLVK1SD1Wg4bBiiBMKNodCkEyuWAynY/eUCXw7TnPz55jmortfo/tGwirxdAOphDY7hgKsUyHurunrDdEkY9pKsrqjq4rKcuxMtT1NYPI6fSeQ/7IYGgcp8O3IwJ7jqDH9zp8L8KyA3oJMLZTtWbPxs7gzKGYZdxZV7SLgTvxwNa7Z+c8snfv0SeKZD1jNU0ITzrmlxbnr5aszn2caCA+cXBnJpNVjBebaEfTUI6cUynGsoKBEak1PFlCRu7Uk1r5fj7V/x5q63d/NjDQmIJx5Z9YNE3HYIx2D8fxsB0LpTTbh5YsrTGt0XIw2hQMGMynVi9zDJUJgXhq2zo5Den6AqUgPdbc36Yjn/bpzPvgVcVgNk9Yv5pQRCmWssnSOw67HMPuaVQGvUk0ueR7X90CcPO9lxR1wbEuSJIZyzDmYfdAp3Ky9ojpuUShhTZS3t1eQx/z0avP2GffUpcwmYU8Hn6LUh39oNGDTxBK3h1+iaE7aCOUbHDMju/uv+T2NiUOp0h54PO/+5JpNONP3qzAvmQyS/i3t3/N1zebEQ/ZllR5Tph4VG2LbE3ixKSpU07PXzKfzbj57i2iN/nk45fkaUNelxR5TeREzGMfpVryWqHVntgPabqe/JDSdxrLhMALqKqcruupqpSb21uCIML1DBh62q7kcNjTtB2TZIVSkkO6xbYtVqtLhGkSOnOyg0GeVWgNSfKSy7NX7Hd7Vos1rhNg2QM3t3vCWKC6nL4XnJ49x6KlrTMGJ6BuO+x+oO1gNj1loOF4PDBLzmnaivnSoqxbpHLZprfML14QLxKaYiDN7pnMXiBcg81uh+3GCNHhe1MGI6OXCj00rGc/wnUkhohIZh4nywVvf/sNsndZX6wYehCWwBZTDKWJo4T6qaWwbzVeFFGVJefLNZ3syFKb5TJml22Zzk+ZJjHlUXJ+fsGAYHku2OcPbHcbmk5i6nMmkclO3eDNQupsoMzvCC8uuXi54uT0glcvPiUJe6aLZ9Rtyn6fIUyTts6wLZfI70F6GCiKQrIMz+hVTyUkg+r5w5/8w6hQ/XsH1c+/+4ufrU/P+PnP/xLXM1mv1qTpHZvNPdN4SVM1tOoW25Mc9jUny0uUhKJoODu94HQ1Z3q6GD2O1PTujrzLuIwDirqj7zpMHBwv5bjf0TaaJLFwHQ+tR8hvP1RgViRhxHa7GfvjrRXT2KKVOwxnYLY8pegLmq7jzZuP0b3Pzc01lXzEtCw836OuNL4zpWs109mSqkvJjwW2qZH12Alu+wLbdulUzdfXX/HlN1d8/vW/5e7xK4SleHt9JM96hs6jLAta2ZMVPU0ZUJQVjpmQphW9ctkfDKQyuH984Ob6wN39jgHYbgqms5BBK7QwqOoOpULqRtJ2HWnaIqWAweJY1gjTZX/c03Zjl7rsGiwL+takqveEE5N4MqWTLVVbMEte8vr1a77+9v+hkhv8UFBVJcOgRvA34Acr5JBjORWGMCjKAtdzaSoTpSxaWfC4OdDV8OLZxdibbirwerrOBOnyw++95nB8JC0KHGFwso6YT3zyWtI0BoYeqKXCNWwmouVMvOP35gO//PU9v31wWD/7hJvbd5TVAUMIilRRFh1oQZG1eG6I1gI/CpB9ydAZvDh7SSs1VdOTuFN810H2mq5uyMojhja5vTvQVTamKyllxv5Q4doJf/yPP6ZqMw47iWm5+O6Krh9XnVJL9vsUU3j0g2aXHVCypZYNtjuqt1oNmNLEMC0qpTCM8YETBA6WYzKIgTDxCBJBIyv8cIppexS5JMs3dF1GnMRkx5o03bI+WVKlNbJpiPwVvh3wj7djBP1XLwSGJQnjhEbuaeWeafKK5WKNfRSYnU1ZDBSFfpoDPhgDUapHCAew0FoxaI0wnyo+VYPvd3w3/YK9uaXpO9peMhg2WglMe6AXY7+775u4hosjYpYnS4oqI82OPLMvceoQ1aknJfV3CtnvalTHrzMlQvkFvVeRpnuapkS2GtWbtF1L19cMg6DrFINW9LIFbTNNzvB9n7LOxwPbUPQoej1gWT6Oa2M7PcpIMcx+HJYtSZSA69nkRU3XajzXoSwzEJLZIkJ2CtdOCIKApqnwwxBl1KTpYWS/Gj273SN102DbNpawmEQLPCfEtE0UmvvdI7Vsmc7mCNvgWIyeecs0aCqF505wXZ+qbFC6IvB9HGHRdxLbtxnsjiAIOD89QxgjHm/ctCiausbQIb47w7QHBrsnmZ+NfORig9INPS1F8YgjPMIgopOSJJ6ymC357voLHNfAEA5F1ZNlFVVbYbsG++OB/WGPsAcMJ0MbNdtdThApTLvisE9pKklZQLBYsw8eecuXpMkWYzVhmFio1Z7+2YFilXH+R2vWP1nCZYm+PLJ8HTJxA6YrF9cSOK7AsU36vsd4IsKOdIKna+XpjHmvxr+3EYxD47gaFkKzWAQMuicKIxzbp2s6uq6jbXrefn0YK1KH97Wz752uI7JKCINejutmrQeUHFitViyWEWVVsN3s2G9bhDmi3Qzef5fx5StKXKZngpv+jsZoGYwa39FsdgeiKCCeJpTpnh/fVAD83QuXtD4wX8/whEm2uUE5A4ZtUsqa28cbAi/G82OK8p5/89f/ku1xz7G85vbuiouz1ywWIcf0EdNImMwSbo/XRPaCub0gCqaYhqbbV0zFJbNFjNQ5tnaZey5ny4jf/HVGMT1FNyUPt+/o7COXsxf4nqZtOgaR890XNcfdFis0qauGxD9lfX5C6Nhcf/05yTRGuyahO6NsMywRYtQmumu42V6z3b7l9eUrWlXwcHONOcw5O0/G1saiomlL3MDFcU0MZfLpp6/pWslud0WvGwLf56uvv+SYFiyX59RNzmK1ZLc9oFTBNE5YzF7w0etPMBqDvLjncVsSTnzKukN2BvMkYR5HVKnifDEjy7a0XY3tOPS6Isv3VJnLydklhpPTyY6uNYjCGWfPQh4etxRlQ9c02L5DMl0hBpe+P2C5PTfXb1FDy/G4Iw4c5vM5eSvplc355Yrt9oG6bnDDCaenDr/99d+yvzX4/d//CW/vbnj28iNcYfLu7kuOu55PXn+furvn6naHZ3k8u3jDrjhS15o3H53zN7/6JVmmQFkksY9WGdmmom0zQNO0CsMcKPMK29b0rUD3A/vjt+yzA9/cfMtXaYm7dtg3ktXsJZE7sLu5IssfePv2inf5lkpr2qqh0TANJ1i2iWe72LZL34Gna9JjxnS9RB5q/vCP/gNopvrf//x/+FldmMynCUr6tE3J6fmM26sDvu+xmM148WpO07x/2zWZREt6aTKdBiSRRXos+c1Xv0C7JZOFy2BMWU8n7I8Z08jGwMIWLm1bjorR9AQtBWm2x3JNBiERho2uXSy75+z0lJt3e9ANbX+gUpLHwx1ZWZOlLbq36fuOq7fvCCcxX3z+iOnVVOU9jhthWBInUmPqutcorTnmN+RtyiSZUZUZWb4hiadYrmB1HhA4ITZLmkbTVCWyBNsOqGswbYvDvqZXDUVRY5gKy3VIc4kaSrIixTR9mrZhe9ijlMnxeMT2bJrGIYhGTukYXLBwPJ/r2xuqqkVJg8kkQGlJlRtMpgHR3GUwbLAkGouyaXEjg32qCSKHqjvw7tsN/+Jf/q8YZkoSz8lTzXq94PG+4vxsTVbconqBJUKkqlHKIM8zosQmjmdgGuRZxyQMCaOEom5IVgaeYxE5ZwSuz/XNF9zeX4O2WMTJyEU15myyFjW0uM7ATrb88JWBX+wo9yWbAv76q567o8Pt7oGub9Haom8NJgtBr1o8d0nXN7T1wKBiBjqePZvy8flzzHqsA+2HFl1HtEVBnsPD5pHH4+NYr4rCd21u7zLazh0B8VaAkoLiOGA5cHdX0nWw32855GO7jm1qTEehhQTds5xPUYPGES6hb2NqC6EMDEMTxquxCSmwCCKXVilsz0dqUIYCyyArMwLf5vnlJaYZIGWD4/b0Q8qgNb435ZPfe06STCiqPfv9kT+tbQD+Yh6iREUUrsnynK4/4lgzrm6+xtxaeIZFlraUxahpjse+eBpUNabpMAzG2JU+jI1GXSuZxA6LZcAt10gXilqSVw1CgOcJlBZ0qqfTJW3bEfs2cTxWCkeejT3YfDb/FCMT1FX71DY0BlHeN0/q4T3+aBwcyvDArt3SNSaL2TlRFFKUR/KixPM8XNejKhRdC7o3UL2gqTWhH2O7NgMmhjEC8hGKuqnxvIA4jvE8AaJls3mkqHJsd6QsCMNloKNrR1yaa0+xzAm2Y2OKACkNTNtBa1BDix9YCCFpmpIkXrBcnqB1TxDEJOE5TS1QCi4uPyKI5lxdf0fX9Dh2RCtTZsmS0D5F9wOOadPLDNlXoF1MU2AKTdOmlP0V2JLIfUUSrrGEQVuDKSLCICb0F0T+ioGGTuXo1sTWDoahMa0BhMP+0GPpCednz5FDT98bRKFPXt+zPb7juM/Im4xGSfbHeyQPzOdrPHeO7Md7M6seedjdYduC6dxm6D3SrYEjfBgckmlEp46keY5trbG0QZd3qF5Rtin1MNZFG4NFVXYk05iKEnmiCNYWiyBgfT7h2cWcaNKxXIVjZW/iECcjr/bJe8H7MqixGvbpvWesdMN1HRaLKbbl47rOCIpnoGpy8rxlc9/gOBZC2CP/VVhP98EwWhEY6HuJ8VSKoZXC812m84As2yE7zW7TYj4FvN4Pue+3A3His34Zcmf+hvviG0zDZTA8CrknsKfUdUbTHPnpownAX8575rMV2W7H9uEKL1rSDzaFBMdvOab35FWJaxskUYAfG1hmQVEccBybIHS5v7+nrDWGHZDMZvhOjGUmLOZrhi7Hjab0oYUyLCzfwbNt6qyjsyt+/t0XLD/7lHhi8+6bB+xA8Ldf3+AMCfHSoy5sjE6jywqhd5heyzB4bPcpk/gEBx+pCoomx7AC9tkVnhXhTWaYPfimTZDElM0By7BZJacsZmuCSJDEE9LDgaI40PYtYRRSlBmr5BQ/8DluClaLCdc3D9h2wG53S5YXnJ+9oKxK+m4Y7xXDQMsBP/DJ8iNZdkPX5sQTB9sJ6JqM5WSB78e0qubNi4/IHrbc3+wQoUnVpiwn5yThhOnCQ1jj8z9JpkBNVVeE0ZS2VVT5kbY+cHb+AikrHm82+PacuilIDymrxQXGkNNVGi9IGNyW+fwlgTfFchruNo/YhsHj/opK5tSlxBGaQQzMFs/xggE/7ilSjWwUz56/YpfdkGU7pvElnerZP6Z8+nuveLi/YTBG/u9yfUHZHEBbPLtMePv2LavVlMfHewwcqvZInva8fDOhajM+Wr1AbwvC6ZLl5JLAcHBtQZHmtK5FadiU9YHdPmW5SHjc3KNDyHcpz5+fc/V2i++F/N7Hp3T1Pc9ePiNtdzjK5kf/QSiqX/7rn8nW5OwioaoqXr/4EW/ffoPnmoShhzAcPF9xvA9GnldbY7sJj5sUqWtMscSNLH79xTdMZisWk48ITZeqkBiqxBI+YQy9HjgeMs5OTtjvHlCDSV7BobjFNae4ZojuYLWMOe4K6nrP+cU5SgseDymOn5Du9zhDyDCMh4rnGtjBnjxv8ScTRAe7ZkfdH7na3BK5guzQ0dJTiQOdPmIaEcWxwnIGTtZTyocWLW36Jkc3NpM4oC0Eq7mNbDzUIJCyHx+WpsCyTdpWoaSB57k0TYljTZDKZH/cIRXYTkSWNVi2z+NdgR95MFj00qDtKjAUddfTDB2DUjj2hMlkimULuq7BcwWDGvCePH96GDjuK5q6Yb6YsJqf0DYlQdAxn51S5j0/+dGPOe6PnCzn2KaFJTSfvPgTum5AaBctFVnagRmxz0vswMKwxBi2GGps06JvOyxM6jynyI+UdUa8dFnNI6aTOVFsj9WzUuIla7KqQ2vFm8ghv7vnUFjsK5PfXhdsu4ZjUbA4mYMTEZhjrWjZSaIgJs1rTFMTmhZDW1JXEttOmMxmVEWNh83xWKG1g+prmjalbaBoNAgL2zU47gpM16GueqRsuLvJyauSrheUpcTEQuoa1Wkcx8G0TJpa4QmfpmoIvIji2OPaEcIcMG0bXIEbTNADaKOlNxo61cLgYFujb/WQ7egH2KUlhlJM4zmykwghMe3xoWkY3pj6bkYbx3QSYZke/2gnAfjzSYTtGBTFgbocsMyQ3eGA5Q5ERUxkhWRpS1UMgPVhrTrKVWBazhPXfRgxQIZAdpIo1li25p4tnddTthW6r3EGC0sbNEULcmBQA0EUsT45Qw7QqJzlZE7sTTmzz6j3ijJvnrBY782pvzvgeRpeQcOJorF7nl98xh/85E845Bvut9coBGE4x3Es8ialbQfCMMQKBZYzDtq2bzAYHYY2UPKpptWwmSVLLKNHDw2yg14OGNrEMFws22W5eIZtuyhVIaVms31gu71h0OAFLp4DfZuiZIUlDOpWEUcnJFGCY00xzZCsuKNuC6pSITvFYAyUVY0fhNie5nF7jdAe88nkCUQfY5omvc4JIwfPMbGtgPPTj+kHTdGm2I6Hqj2EHtuuqnoDWqG1QdsVtF2J65tU8pq8KJgml4RuA8eveBnFfO/5a16fv0b0JiY9l6sFkV0xyIZjMwakHG1SF5KqHjCFRxwm7B73FGlJHPkEUUSaVlRVxjCk9K1EtXNOowu84Un3tGtku8e1p5gqwMRF9i0tBVXf0OsGQxi4zhTVmTR1z1dffY2pbfbtgX2wo9KC0zjBTzTT6IzpNCaMBc9fnXB+viKZWDjeQJFJej1yAD5k/59KClzb5dmLE+J4FDLabkw/u77N7VVGnvaYtjX6U41xvn2vhpomqF59GH6HYQxztVLiRQJha5rKYvuQY5rmh03Ae6+qgWC5ivCmiq1b4HkGlhXStop5PPt/qXuTH1myLD/vuzbPPntMb8qXU2VmdVZ3VXV1NykCAgVCEMCNdlpKWuuPqJW2WmlLCVyJklYiiNYANqmW0CAhVfVQXVWZWZlvingx+Ghus9k1u1cLe5lJ/QfNWEYEwgOG627Hzjm/78N35nz91Uviic/PNuO6wL+9FHS9IKg2RHJg8ObMziN6dc/ptOf9Zz/hyflnuL7JMb+malIsJ+Fy/TGeG6B1Sxy5pKctRX7CENZ4FrM9CINu8JBtReB6fP3yayxnwBB67EqWY/bhvfefU2waAtei6nbMzjxk0xLFLqLVaDfi8r0z7EDQqoEgGYOk16++ZLaYImyX6+s74sClKk7E0ZzkvSX7NMUJBXnzgDAGgshjd/sGS8Tcp6+pa5P3nlyyfXuD7ODi8oosPdJ2A61yUW1B4syJJiu61mQWheTVDtdLkP342egIAxuN7gtgVKA26ZbYDYniOcKe8/jJJX3RoAaIziPSQ4rnT5gvlrh+wMO2IkrmyCrDlg6t7Ch2J1qZUZYts2nCq1c74nBgbTsUpcv8fM7+lJId91gCbM8jLUqeXj3BFDmy81ksVziWQHaSu80btPZZn62hL2gbhW1JbCGQdLx8+4o0K9jsb4gjh74zCeMJQRjR1g3rsyW73Q2qk0ghacueWeRxsb5keTbhm1dfMAljEjdm+fR3HI4vsM2AY3rHxcWH7LMdfujjOOd4hkvVSYLllOX0ktVqgVYmtt2zP2643R6wDQ/DNtAMZFlG13bEdoBWFmGQsC92lF3D/pDSGiHbuuT65Ymn7z3iBx/+w7/7heq/+r//+c9P2S2H/DdYlsn2oeFU7HB9QZKc0+QaP9TUpWK5PKfrFINqgFGr+OLma9I8Zb1e8Ojyfd6+PlBlR47HW3zX4X6343z9MadTx2oVE/gxvueSBGsO+z2mEDx+fE6eVgSRz93tjtXZArRLWbUEyZSvvvkrkuiK9HTL/nRimxcoA5JpQGXcY5kBhppStQdaq6NqcharkM2bjt4TVKpAKsF2U9BXmudPPuabF7/m2YeC999f89UXR4ZeMFnMORUF6TElcSds04IsPxEGLlJ1TCYzhl7TtaOaL88OCG3jOB7QIttROhD4Nn3XYpoDrdSUeYdQFk1TYzsulm2MKsXOZz736YcW12Mc+8ueflDjjaZucFwD0xjHZsPQslxc0VQDd/eviMIJfd9SFDVltefjTy5xzIS8yHny5CnrxQ8o5W8pspa2lazO1uR5hRY9fdsgm5peDjAMtJXGt0MCd0mWnxhEj7Bs3nt0xeNHH3DKMibxhE8++BPudg/4UxMPi6F2yeqGUw/5MPBQ+Dz0DpuqwsXF9cbx7nKyJAxWFFU3rigYGq1Mqq4Yb2Z1S543lI2BHfnsNhtkJVDDgGkMNM1AoxVlJ6krSVEeSSbhWMPhYRg96IHV/BLbcqibjkFILCfAMB2GYSwQm6rlcMxA2WhtcMpGyL5ULU1toZQJWnJKt+gBTKGxTAvfntLWFaFvo5XisE+RXUc/wGZ75OH+nulkQlW3dL2g7SStbCgqQZo32MYSYRj88W4cIf5fk57y2NK0YNhQlXJE/NiCpJzgqYg8hbr+dzimwhhT+BpM0363I6nQ71pWqq8Iwg7bkfRzSe90WFbF2XJOEi6o64GyLrBMh0kUYwLSULixQ37qaVuLtmuZtglmaVEXzbv097f/wog3+h6tKlCDRsaCP/oP/yN+8tO/xy/+6i/49Ve/QCMxTRCGwnIsTMvAdxwcE2xnIAwDtBgw3VGPWTctShuEiY8bGHSywNQenhvj2CGuNcF1A4JgwqAl6Sllt99iObCYnLFanTEYDUIIZtMJXmijceg6Ra1SLNvEFTN845xBGlRNiZSKqq8RtsA0FL6rOex27O4zHp29RxQZeK7PLFmTHvZUZY3rAlrTt5AXO8rqhJQDalBUdUVVtjjOBMcKsAwoyj1ZvaftTyAGtLLpe4nvC3xnhtAG62hgZUn6wyus9CuW1obAygh0yto7oLqa662iGsC2fBx7iml3eHbEk8vPSaZL3t59hewdlyR1AAAgAElEQVRrpvMQLQwcZ+Ql390UlKlJ6AegejzXxA877LChaQ1i84quNzlmb8m7O4r2SOieEdoXmD3IqmKQkjCeYLoFng+TRcg0Sii0wLpsae0G/WAi24Gh70d2trARogWzZbdpkXIMMI17qmNan3fc37PzGZbd0/cDnuuNRaR2+ObLDf2Iox61s0qjlX63r/2thW0Yu6laILAQCJQSLJczJhOfw67msKveUQN499A1FrqGYTCbh1jRwNHJMASkhwNaOtRVQ11VrBbv4TpTfnxfAPBnUYNrhfz47//H/MGnv8fD619TKsnp9JbQueIf/6P/ijCK+Nd//n9Qt8fRumdZ7LYPaCXoO8EpO2GYA5OZB2bJ/rhnMl2MP+9zHjZbztePRz2xTNmdMpQcmxdJGCCLAZOIZ+89ZftwS5rWGIOmqPORS+aeuN480AwWrgiwPZ8oDDANF8eJefToMY08YXsCpSW3D7dYXkiVjvKSydKlqVuysqBTBY3RYpgm8SRAqymrWUB3OlHLgtZMePzoGXWzJa81D2lJr1rmswXYJte7e0JLUFcNabbFwkXonrJoyYsSP0hYWTPKoqMbBizHJ4p8DvtbMCSyb7g8v2B3OBIla+JpSODH2Cbc3zScX11SVAXr85jiVHH5/DOKco+pI/7opwsq9cCzqzX3u284pZInz1dMV1Mc3yfPD8yiNaIfqBoTN3bw/QV3N9f4nqauBIuzOV+/+AZBixoMet2zPx1J03tcx6M3Cno0TSX4/Pc+41e/+n8xxZJk7pPMJxhWzNNHj9nsXtNUCfHUJk1zLi8WnLYZl1c2N9e/xWTGNFlTlA2z1ZRejujDsqhYn03J844ibzFNi8V8TtNqvv7md8heMp8tEGiarsAPocgErmvjBz6+H/L69SuyOsUyJvjWbEQJdh2xd87j8yc8e/aHf/cL1X/xZ//tz8NghtARBlPaRtJ0OQqLeLJEdw73D9esz85w3YBBCfbpnqxocKwQJU7I0sVgwLMj3t7+junKw7ISGqkJAh+lHbBKZvMFh22KZRoY2Mh6wLQV+33K7e0OYdbsNoLJzCPLS06njEY2eM6SusywnAFvEtCZJoOh2B0e+OVffYVt+GDuODvzSPcdD69PFHXL9qRwAgmmpi5NmsIjCmKaLqWqJbutw2GbcffK5MnzS/L+yLF7he56nj1ZMBjQtT29GnBcm1a2xLGH5zm0TU9Xdzi2RVU2GIZPrxri6N0uiGwJwoheWig9MJ9PMQywLRvPcZkkU9q6w3EUpinYHzbjB7EW9L0a06y6Rw0WpjDp2nHXNDtVZKeCoirppcUwGGjR0bRHnj7+jC9+dcv5owClTH751/+apt1SZmOg5fLyEXd3e4QWxP6EwLXRtUEUeiN8vFAos8O2g3FM5YYMjR4B0UPP48unPLv6hC9e/Yb77UtsqZnPLzllByQ2pz7gzU6inZbQPUf1FphHLK/l6ux9qmLAcCSenVDnFm0nkaLDdjvybEANPb4XUbUV97uXDIbB9dtrGlmRpgI5DNRtR1tL6AWuFdEPNVprjseKxXJC6PsEgYUWEmFDXRTUeUXoTDCwyPKUplUYVoBUPUNfgvLoB03djspGxUCva/zQx/MdjocMz5nSDRlStWhtYNoC0zJplUHbSCLfp61asrzBCWIQUFQZvhcjm4EkHPdx/95x5DH+k+IF2+MDRd1SlP0YKFLAYHNlPqLP4Xjo6OS3baR3u3XvVJmmYaG1YlA9AoO+kwShyeXFAtl1vO02GBE4hkB3JiDp+wE/CJjMIkyhMKSmLHLQOZPIpGsaXE+wcCOoDMpsYFDfh6jeAQC+R1QJUEPPR3/yOb//D37Cv/w//zm/+Ks/x3IEwyBom4FOKWw3wrMnRI7F5SoiiHyk7lBWRSslru1j2h11U2K5PVFi4LoWtiWYTBMm8WQMvOkBLzSpu4y83GNbDp4dUzcFZXMg8Hws22N3fEtRpFR1R90MNE2H7UDo+9iGiwYqmZHVJ8QwdnjkcE+aFZSFIIgdDOHjGAGO29PUDYEf4fvGeN17OBx3oBVJdIGWLkW1o6y35NWeOJrg2iF1k5JXbzmlOa6bMJ3GdJ1mv8soq5Tt7o6b17c0jU1hGlT+mKy/efmS6+Nb7us7Xt+/4vqhoDIX6CGGTpHMA1wnQkk47CscJ8ZxTbK8phc1nSzxXA8lO7JDx9n6EVEYsT/umC5mRG6E2TdILdjuCizhYPoFldzRlCaTxOXR5RIhoOs1VZujhh7VF1RZxSJ5gtCCsi1QymQQJtpKsVoXU4nxd4d35impKbKeulYY5vfQ/e9FEhrPt4kSH8u2R+Ng0VBXBq9fPTBO9L9FU73js+rvkVf90CO0/q4AHZFaAtd1uXo0JUtbHu5SLGt8sBvP8PfneToLWV5N2Fn33N3eEwYLBBZ1naJUheOHtCj++GHkyn3xPKbNU17fbPn0R/8JWj7wzc0rHp3/GN9Z47oJ/+Yv/xl3D2/pOsV8donnTMmKtxhCIXCYTs85Xz+mqgfuH1Iuzp/hOAanYktVpwTuHDnk9NTEE580L0nvT8ySNZYyuH1zjRSS1/dfc3v9kshd0nU5yhwwrYDL6Yc8X5+xrzOEZVPkOXd316SnmnACX361wXAUl1cX1G3Bb7/4kmHQXJ095rdf/Yb54oKmUoDNow8esXk4YSgXz4k45aPu+PLJGtk3FKXk6uljqkOF6DXTJMa3Q06HDboeuExWBJEiPynWyyuapsMwIIwilNL0fc/9mz2b/MBJdoQzl5v71xiOi9BTppOYU1WTZim742tUH5AkEYiWvDji+zMMQ3Jz+wK8mPVihSy2GK+2vH81wQyWrJ2aM7Ng7ffUXcgud/nmd1+xjFZ89PxDPvrwc5TRI2ybsmiYJglvXrxEWBWnvBqpJSiq6kSWa95//iGuPVBmmjB2MHAYWhBmx4sXKR/+4BkvvvlbAjPhYh3w9tUbXDvG8hWn/MTD/YbLsyfcb15yPJWU+ZTP/+BD0rTl/PwZpmWxWsbYZsXm9oAfzdjuHsA88Ld/+5L16gItStJTQzKLCf2ArhEcjnc0tYltK+LE4ngocVzBdLoacY15wXwx5XgsmC8T4tDlzYtX/OEf/uO/+4Xq//Qv/pufy2HsBAhcsB/wg5Yy17Ryj+UMFFVDEJ3x+s0Dm/Qex425v2+o255k7nF7s2O9WpKdKkxDo9QE07axJydWyzXlyaVsak4HiyjxyNICz/cpqwzfn2PbEe2QkUSrd50QaNod52drjrUi7yqy+g2+k+DZPnm2RZYdlrTpdU2fu7SVRkpBV3fYymExe8x8+YzX9y9QuuV8fcVpl5OEFl3VUaWa+XTNLP6Es4uYjz76jHCe8Orha2Jzxnp2wfIqxDAUluWhlIHsNbZjMgw1VdlgGzaW6zGokTdomIowChgGsEwbjcXQN/hhiBcEeI7FNIzxXZtnT6/oVYd6Z5Y5nUoc2xvTgrKjrHJmswQ1QFk0zOdT6B1kp2lk/g57Mz6ZCu2j+zGBK5wjXlDjWBPquifwXS4uFhg4uL6BbXfvdqIaIm9BXfTEE02WKy6fxrS1Td9X1LnAcwWogf3+Bj90aeseGLjfPKAURLbDIApMbeGYEY00sByTyDE5n06YLCY4lkEYRRRHE9cxKZpbiryizAvm7/ba8lPPqckxbM397Y6qPBK5Fk3XoYTP7W5H02nyY4WtBRfrp7SVJNvnzKZzXF8wXayxXJvt5oEkCQnCmLJuMF2w7YjAWsNgkedHPNNlnqyxhUlWlgjTpB/AsgWmU2AaBkK5TJIYwxK4voPnzcjL06j37PsxAW302G6E7hWmgiIr6DVkZU5WFkip6eqWuizQWlC3kn/UDAD8z9aeuoXF/ILtZtSBBqGPbDySaoI9GORZg5TiO/8570xR4h07deSwj8n8XkqiyGa5itHuQDfRGFE/TgCaCse2iZNoRFj1EqEsDExCK0EMmro8Mk8SulbyTfk1ujQQmYdS4nsOJnzHUv0W5q4GjYgDXuz/li/f/AW1zBkUtHXP0At0b9HkNgYutu1gmi7xbMFqdck0idFaImUPumMY1BhwMgWyhSgIGIaOpi5oZYYXCIJIUFQZnm8RxBrLsBiZ9JK+s0D7uKFE02CZIa4b4DgGvp+wmD5hGGoethvkAFGsCB0XU8XUTUcnB1x/CgL8QGCaNtvtA0opLMtGGC1h6GOYLVLl9NICZVPkB7JTOhLpACFsipNmvympqj2ObTOJziiKAtMaofz74z19r/DdGdK0uN7v2GxgufgR773/jLrsqSqDY1VjrK5Yv/eUqb8k8te4scPx1FDWNcKAw+EBhMByLDBLyialKQu2b488vnjK5z/6hOO+JwwiqkojTwrreOKUnbjLSpTcI/UOtGA+ucR1XQZl8ebuGkSIrS1msaCjI56eE7gTGEwiN+FU7jnuS87On6JCQaIn2MKiqWscx8GyQ8qi4ZhW2K4DahQljCzWsbiMkojJ3OOUZViWh2UZfP3VLWnaYFr2/48cMIaxTKLIJopt8lMzTqi0+k5aoJRiGHrmy4CHhz2n4ximGrmrMOKpwMBgsQ4xVzm/S7/CFA5nqwuUUuwOt5xfrtBA7Fv8wW0HwN88m6C0pJYn+krw2ee/x9ffvOaU9kRxzFev/hVZdcN6fcFifoWU4301juasF5do3aPpub65oZOa5eKMOLKxfYVtTLAcSTIJ6Pt+nEo4FbKwWK9WWEIThYL73T3rR89A9MxmIYZr4tsBjhp4fvURP/rs77O/27M53eDZNkFoEwZThDlwn/4/BOEMxzN5/foaA4/V+ZokmvDhB59z/uScX/7yV0DDYh1RNSFDqhnMlunknNUs4vb2jsnZGmUGGEpzd3fDRz/4E5I44v7hCyaTM97/+APaWtLXHVVZUVYtH370KWWdIkzB1aMVsrXwvTPu0xcYiSCcJ1gGJNESw4KHzRbPsbjZP+BYA1VxQA821w+/5s3Nhs8++Zjj4Q1qGJC95nL5hNvjjsViTZ3doo5vef3l7/jVX79kGUUIKyNvV3T4LJYBv/97f8Dq8ozjsUJKn1amXN+8YDZbEk1czh67KLGhb8a96KoqKVOHH372Ka++uSEKZ1xexJi14unFGdevf0dbayZTh6o+MZusKfICsx01wk5kUFYN6/WU4zHDDWJ6s+aYpgztEilNbu8eOB0MhNlyOnQ8vD3SiR13b/eEYcw0mRLHHre3dySTOY088eb6JbtdDkKOkzdtsd0cCVyH477A802KsqbIS/LijqZrWSzPODtbkZ8O/Pjfhx3Vf/a//tc/VyJDGRXH3QktUpTqGdoJrt2TNw0YAWk6cLd9yzb/glOes9nWYJicSnj95hasnF//9Wtm04RT8ZqybDGNgKYVfPSDTxlUzauXW7RRUXcZ99s73FAwnT3n9fWXLM/mFIXC8Vu6RjL0CtkVaKNioEHriPP5ezy9eARDzXr+iLareHz+jE9+OMO1E94eCu72t3iRw8X8fbTcc/t2zzxaQGdg9BZvvt7whz/+B2T5jsnCJktBi5xff/VLHg4b5KBx+ohFsiBvc+qmwDBdTNt41+3sGFSHIyIM06IbBhA9XmgQhStkZ9KrFiUEvZY4tmB1fsGxvMF2a2ZRiG0OaEoG+nEZum7w3BjTsOilRAgoqwzHdIljH6FhMU0wTI3jmSgkjmej0ewedhjW2Pl6eHjg4nJKVQr8wCMMfM5XV7i+we4BcO5oy56f/eTvMbQ+l5cJZS/Ji4Llcg695LjNSYILwsBDGBo/sHi4vcUQLk2vOKZ3OK6PZZtoneJ7PUXhktV7jrsDbVPjeBZuVKL7gY8/+oy2sdC0qF7j+1PK8shqdkVeaNpii9NPMUzJYppQZjVeEIEwmCzOsI0FWVkwmdl4yubx2ZrZ/Izd8YHlIsTGY3k2xXB6gjjkgw8+Yn2WsN/vcFyLMI5AKYa2B9WjVY3nW5iGQRxH9AZI3WM7DpPJHA0MUqOVQrYaNcjR7CMtetkhG41tWmjR4lgeUZjg2y70arSNCcUhO1JVHabhc8qaMczVj93F/5QxTPW/9D2qc8nyAsvyCHwXy3RHCsLJgqanaTXDYPLd7P07kKnGfGehUoOCYbw5W05D16dcx9fUYU/VVAy6oip70uMBoQZQJpbpEvkzOjnQdDVdJzB0iO14NJ2irjXTZoZTBahBfP/y/Lup6+++g9toUvWCVN+TnwZOaYtth5jCoO0G6qqhbQtc0yIJpgjL4HQ64joeoR/RNg2D7NG9xhbRiHWxbTzHIM/GtP50MsP1fIoypyxLDB2iepOhg8SfY4slg7KRco/uOwxb49ohjuVjugItHI47g/RY4QUWke9CrTAND8OzyLIKE58kDEF3eK5P03SU1Q7L7nFdC6VH3qPsJVVzIs9bIn/OJIlBCZbzR2OaX5iYpgbRk0xdpKo5pBsUmrYtKcojoe/x/OpzpuElYWTiGYL61HHKfNJSUWuBiGbc5zWmsyYvWqQoiZMZZdliu4owsjENk36oQfRgSPK8xWbBenHF2eyK2JtSlhVZWiLrijhxkZXGUZJpPKcxJxz2Dwjh0HaKxZlH20Keuhh4PHv2lLP1GfNYk+Ydhjkfi3vZ0yuB6VosJhe49gS8lNrSyH2HpTW+n2DaPp1s2W4KTNMaj+93WtUxoOd4JvHUpKwPKNWjesHXv9sw9BrTEt93XxlXXAwMrh5PWJ1H3F2nCGF8B/AfO68jbnU2i5HdwG6bY9nWOz7suCk7vrZBcG5SfnTHJLlACAWiAeVxfv6YcGohZcvUW/LD63H0/9ePx851PWS0acujJz8kSDyq7kivOjb7N3i+jUBjCJOvvvoNptOxnD3F82Ky/EAyiXh4eMvjZwt6ceL27dtRZ9x7lM0dKMHF2XP2u5xBOiynS7LmgG2auJ6BNEaqRH1sWEbP6YYUO3D5w8//mNV0jjPxqPqCzXEspi3PwrEmWPbAMd0ijABTzPH8dyY7J+Tp5QUvr79mUCvO1xNsS0G/ZL3+gM8//pBTV9B2ip/+4AfMZw9cb78gr1qWyyXZ25R9VvH0yXs4Xs5vX3zFflsyjaZ0tuR8sWS3v8N2Ip48+Qhh+GOwN/bJ8iPr1ZQ01UwCj2VoUe8tpvOY9PQG1RnEM4+hrzFUiB9LDocUBoPFfMn51QRbOWjLoJaaTz77lIftjr7X/OqrF6Q65vLzz/mru5JfvM6x3Ce8//4Ff/qn/xtv7685bDL+8ot/w9/+9UsWC5+i2rJJ95iewnVMjtmXPLp8Shg9RZsOi7nLMXvNoX7FIX+JUD6r8zn39wdMfN57HqC1JvB8mjrDcyMCJ6Azcs4vn3K2+oT0uGO9mOPaU4bBZP9wTxzOiWOP6XyG7UUoFJtdwWRyxmq1wHM8plMPQ1gkyQzT6rl+s6GpJOvLBU0z4PuTsfk1mMwmHk0leXzxhPT0wP6QEfrneEFP0+XkpUZJm8uzMz54/z/4u1+o/umf/Y8/dz0TRMXD3ZFp/ITL9SfsH1qyrCCazsjyA9vDPQ+Htzhez2Enqcqeuu64fbNjkIpe9iRJhGkFbI9HetFSlg2bXUHdPvCwuSYIXba7I02fk6YtpmVhBw23DzdUpUapGs+J2G0KTGJ+9OPPsIaYt292XFz5pIc9JgZ1bfDy+hV+1JF4K+73tzR6yz7rOV8nCOHh2S7H/MA0ESTenMRdMNQW5+tnzFcRx9OWIDjnt7/+hu1+w2zhU9cNTZcxj5fsNilNnxFES27e3pNMXFSvGHqwLAutPDzfpuxOaNXj2CG9FJyKHMMELIUcJK4TckiPZKecSZzQdwPTyYI0K5Gy5my9pGkaLMNFDmNLxnV9Qs/HEA6dzHn27DFJ4rM5viCehLhuTNOeGPox/W6YFq7nohhwfWtki9YZkyTh7dstYewThAZvr/cs5+d8/nuf47kRtif44MPPOB53hJHHZnvEcTSffvwz6qKkrRrK9kBXmwy9w+GUoenY7Hak+QbTLjFwkSisIEL2mii2MULYH1pcNFpJqjpjeebhWTaOa2FaPT/90R/zi7/8G9KyYzYNmM5cZGmTpgdWFwlVkWMMFqdjRj/kPL48J/IM3n/vQ7L6SM+A6QJyoKiOWI6FHDTDYKG0wXa3ITvpcVKgOpTMmM18posEZQxkdc6gwbIMZKPwvBDHDSjygWNao80OOQhsLKLQo++gq3r6rkf1LUM3ELkxlvnt7ptB3dY0TYNpOgg9ArKbfqAdUqqmoKyO/OfxaAL57/Y39EONZdsM0qCsTwhhk2YbZtLD6m26xkQr+9071fgeD6X09zt3ehQSaNExWRhgDjyYKff5nrKsaeuBtm0xtEtVdghGtuV+f2CQgrQ6YGqfZ5cfUPUth7JiUILH1hVG5SE7NRYB37OGvkdUadBaMChJF2Rsi4L0OGAKD8f2QfR0Zc7Qdmhh0rSSRmYIp0LoFqsXNKWkrjOGziA9dHT1wGqxQghJ3WTIDqbJAt+3ybMcRIMwyjHDNXmCZycYwsG0NFFsMptN0PT0QzZqjJseSTdeXyWYz5bYjsQyGkIjYqg1eX5EaYnjuTiOje069INAGBbTiT8WtcA+vWO7ewPaYxJdEAUhthFxtnpMGGrybMv58immCJB9w/PnzzFtg4fD2xHhZA54rsNy8pQy7bh9dYvsDbRhkmcNV08+5IPnH4BqOB43DCrA8dbEyQVZdeKY71GyQ9Un+lqSHjL6vuTy4oL3nn5ILzvOzpacrdf0DTx78j6L9Yw061FDzf54SxC7DGJgX50YhMHdQ47vzgj8CXf3B+K5TZSEXCyfEjsWQ9vgeSGq0tzephimTezahJbLq82e2SzAtMawY57v2VR7dAJB6yDrnqY9UdUNh4PEMGzGYlO/k1iMWy2+53B+EWFgsJpfcTrVvHm5wTQNND1g862VyjAsLMvmydMpcWKz3VR0bf/OggXfWtyUGuUBSmkOh+K71P94hI0RkSVMlpczjCsLx7E4O1uMuKz5E54++Zjb+6+p6oLJ8oIfvjgB8ItzG8PoqfIDpuvSNA3bzZ5BF9h2jGn3dH3K0CW0wz1ZnrK+XNHLsfPdS0GSJDhBixdq9vtbutomSRLK7po8L4jsD3n86DG73d1IrPASposZahBs9hu0abOeXiLzcrzGqid0PYpC8OL6G37xzTccTifyk+Tx1XMc1+ThbksSzem6nqYuUUPLe0+eYRomvuVjdIJkOWOfvkTWkulkAVRstnuqNmW7P+H7HXm6R1g9m0POIn5GkzaE0ZTJxEfVB45pTjhZkbgGjhrIyob8lHF2eUZe5mOY0jG5ve75/Ic/BQ192+C7BqHjMY8jHrY3FE3H5ZMrothj4k0YNNSNieebJP6KP/j9jzEtxdv7e57+4CPUUPPm1VseLR4xiI6bh5TgfMrZkyuarqfqJKbt8eFHT9hnr3CjkM7oyBrJfDrlxz/+iKrMKZoHTN/kVOz48jevyPeSvM754uUXONMNq0cl2n3g7IOC2fQSh8do22OfXyMcmyh6xtnFObf3v6PIUmQpMYOYXsDmYWCzf0F+PFBVe7q2gV7hTWyyYsty/jHCktzcvsR2FEM7MElsmrZCDaNR73TKcOyAh4drTseSx+8tqdsepQyiOMG2fAxzoG0a+l7hWONkt24rgihh8/CGOF6DoTmdrlFdz09+/O8Bnup///N/+vOH+y0T/wmr+fv47oJf/PLfYnkFvj9Dmyn74y0GFr20SI8duzuDtq5JD6P9SGuT/f5I3XQcjjnHbIMwnNGkYx+5uX6DoV300LHb5QRBxOOrp7x8/SVptkP3Hk1dYhhwf7cjjGY4nkPXGuzSjHaokMMBS0xJ4jmbXYruj1zNL+m6nF999Q2u7eH4HfVGkeUntrsjrZKszs9IDx2PLz5h6AYsp+fR5RN+86sb5skjjqd7BmVSFxrdK4qDjZIw9IBhMgBC2yAUshb0raCqK9K0wLBbJpMphoChN0B0BJGFRqGQWJaHN2vRwwC1z+lYE88sapkRxwmGkrRSUlQ5jhuOKtOqppcKP7KRw4BjGtiORdNVIEY0SlkAQ0sctTx7+hyDBb1uWZ4blHWFgUMYjAlTYTUMqkcJQRLM2e/eEAQ+i+WCm5sNTXXC9cH1HLpG4AaK3eaAlA2vX71mdRGxWDzid9+8ZpA9lqXYpRWm52IbJpP5io+efcrmLseLT8RRwvVNQdtpzqdLZCuJJyCMgMBz2O5uubp6n5sXL3nYHdB9yzScYbo+12/vOH+8xg59ylSAqGm6E65tMYlWuLHBIAzyLEXrjngaMV042H6PbHrqQtIPmv2xpKgLtA4ZDMEwGMRhwnK1IK9a9qcaL5iSJEuSZInl2ZzylLw84TgKw3BGsoQc+LYFJAwD2VdoNKalxn064aEZSIucwTAo2hIhLAI3RgwgdMswqHfdwlFV+V/OLgH47/MDZZ1hmTa2Ld5heWq6XrJWU5zeZZAuWtvfJaXHXVGN1gpDmN8ZfwalMG3wAhOlen6zvyWTGtf2acqGoVMwWNiORVn2dF0NWmELC9/yCc0IS5mjJEGPf3et5liVi5TfwtrHkf8YdPl+T/VbjeaUhLtjxqEpmCQhSrf4gUvXelQlTCYuseeQeBEX54/pGkFVHZF9ju/aOMaMaXTGZBKSxNEI3FYGSXiOayco3dJLhWW6LGdLfCeEwcT3JkTTFXKoyE63mJaglQP5aY/htAzKQDYGnhUzDy8J3BCERRicY9seZ4szVrMzbNfDC1yqtiTPavpuQIgWC01dVJRliW3FtI3Csgyuzi/xbJsylwgVoYaBoZM8On/GenWJUpq6UaTHlCJP8d0E17UR2iP2nzJNzpjNJliez3ZTYBkmrayRbYVpdpQyZ5asiYMp2oRWppjCITQvaYsW2zUxDI+z1VNmswuaSiMbwXy6xrIqTqf7sYjroKw29HXJLF4yaMXLu99ix0/jOrkAACAASURBVDHR3Eb1A8+efcDV2Zz8UOA5CxarGQ8PG7QcuFzPqYuctu7pbQ/bTHBMg6K853A6Mgk9vNABGjaHnCicQlgzezJBHA1MNLIfOB4kShmY5rt2pzAwxDiKdx2H2WSO5/goJXj1YsNul40NAW2McLZv1anaIIocnrw3wbYNdtsT+anGtN69NxhlFALxXdiwLMamyPg16lO/3bs+ezJj/pnPi29esJifjZpeYVPXOYf9lmmyZhg0v38zhiD/YgpXj8+R0uNi9j625ZAeb9C9Q5GP1r+H3UtMy6IfBgatqNs9w1Bjuz2BN0XKlt3+jizVTJIllmkjhMGp2FBXHYFv43oDh+MJy1a4rsPb1y+wxZR4dsaz957SNin7ww1VI1EiZz69YNvkpMWW077kJ5/8jB/95H0a44SWmvRwxMRhEq357JOfYFoDuofZNCQyTZqiJa0z9sdr5skKy/awbQelS+rqhOd5TCYO6X7H4dQhlUHfWdxf37Lvtxz2Nbv9a6bzOV1vcra84MXL32IZmvnqnMncRfaKXtQ4fksSLZkkMTe3X/Jwv+P86oJBw3R2wfZYslhfMZsH/PpvviG0PYbBIEkiqnJgtZyx22zZ7O7ZbO9BRJjCxjY0x6bExcFwYg7bI6ga3w2YRhaB5bBYrvn661sms4CmzPFsg0F2aNWz2+Uc0xzXWjGb2fjOFIHm9u2J9z86437zFduHgbo2OO0KLPmYn/70j+gODYEr2B23dHLGpz/6mMPpiOwiLM8grfc4viSJPdq65+MfnHE8ZNzd37BYRMwWlyynT5E6Jc1umITn+K7F7j5nEgUEccDt7TVtY+JHkJdHDBuuHj0ijhbc3L0liizC0MVyespCY4k1yXzgcKzoZIcdGEwnMxbzCV1t4vmSunmJKxx+9rP/7O9+ofpP/4ef/1w2PYG7xrI7Xl+/IopWrM6nbB6OFNWGwF2Q7hVNLRk6QZ2DEJIomJO3FcdTiux85Luxat92DO1A37W8vT8S+lPm0xm26dMULoKQpitJjydM9YhPP/2AY7pnMZsySIUyMqq64y9/8YKiOZAsJG/fHGnrFmVqnDDmsN+ROGv6OmC6bihOLQ83msDR7E+Q7wuoNFXTkR80xanAclpe37zAdi2unlzx+uYNRX4iikP0YHPYVrTdwDBYWDZobeBFMbtt9g723aF0z3TqU9UKx3ZoarBtgRcOeJ7HarEAsyFLSzw7JJw62NrharXm0ZMpVjCwzw/UeUdfjQnCySQe08lNiev4nLKCQdU4jsa3Ljgc9gijxXBMWtnz9maPHmwePwno2xhEC5YgSHoUNXWhiL05hiHwEhOtXCxLwuBhCsF2m7Lb5yhhUslb+r7hydVTYmcy4kZkg+lX1J3k4vyCspLIXuFbDpOJhx0lnPKGNq+JgkuatKZttzRNziqekecWs1nC42WCadj0g4UXeQht8LDdE09CTrsdk+WEcDrBcF12ac5qNcFRCQ9vt8SRh+tP8UOPxfSSi6sr4uWC12+/wegDXLPi7HKO5SYcDhld02OKGMu03n149/ihwraM0dMsDXbHe/K6Ybp+RDSdU/Ut0lBoe6BsMsqyIHBtkJrIsYhdj0o2ZHVO29cIq8ePHdqhwXEjppPp+BCgBZbnYlgG+alASAPHsmi6kXVpaJNJmDD0Bv/FfA3AP8l2DLKklzVDL1C6Q/YVbT3wxL4kNhL6zkJrizGlbDBi/xXo0aWutUILjRrGdL1p9wz03KotndVhYmDS03cNdd2itUvTNQidEAcRngWOYdC1J95cv6aua7QC2ZrMhxinduil/hZ8CbzjUL5DZel3RTNiwDSgoKTSFQM9cRLQdwayslgsY5KJje/5XJ0/wjJdLs4vCUOXsjyhNTh2wAcfPeXq0YrD4QSiJ54ZCLMiTTOKrMI0babJGX1n4BpTZtESbdY0/Yay3lDXmqasif05trlmu9kjW4mlLbqiR3TuuCphWbTDwC5LKWuJH0bEk4jAC/CshL4eiPxgxP8UW3SvsYwYYYIcWsr8RHYo6CoD13YIgoDQjZhEU1Sv2WzuOWYHjqcd00nCp88/QfWKzUPO9ZsD/SB49vwDFqun+K6HawuuLq4InYimqjhkO2rRES0usEUMsgItWE7eJwk+pOx78jZH05MXe/I8Q3cmq/mU0J9wf3tgt99gWj11lWP2Nb4poAffT5gupiznM6KJg2xrTB1y3DxwNj/H803utzc0jWSarAmDmLopadueVlfEvmIVTSiHDG20XFw9Yb4IKYsSZRnEwZxTsaV3e4TlMpPnVE3JKRuQ3RgGFN+eo3d7zr1SxIlNPBGc8iNffnmP0tY7McD3KCshBIYwWF9EXFwGdLJhGBgncOY7JYYw3u2+jugqKUc02He7qXx/lk3DYPV0grgoWc/fQ5gDt2/vMLHQlPRDySSOib2Yj1+kAPxLa/v/UfcmvbZt6ZnWM8as61Xueu9zzq1vRNgOG2cak4ZUCjokEh16tJBASqT8E9EmadBApEjopEBC/ABEgx6YFigL2xEO37jFKXa56jXraoxBY50bNv/A2d2NvaW1x9L85jfe93kYlEJLRexmxHFLWWxJognLs/C0pBE2eV7RdS1RMKPrKyxHMIx7jDb0/YDvpYTBjKfnbzHKRxtQykFKB1v6VHl/+k5Lj7NlQoBAiAAntGm7hjwviaYJVdORLWb4tsN0cc2///f+hH/vj/8DgthH07FMfS6uFjAajoc9i/mcYag5HlfYwkFoi+39MxfnM+6P3+K4HkI77PIDSSrIi0e2jwo7VLTVwOe3P2O73nP1+pbn/Ro7lKwfH7l5fcbF9QVNA47wmC0WfNh8Q133nF++Is4k201DkpxxcXlJ21XYlsVus8Xz53z59ee8//CBVrWUzZppeoFuRwLHYbV54NXNa/Lje7pOE/gZfhQTxi6MhoeXF+bxBfEsBNESRSm/+fW/4JvfPPAP/6N/SFGsedp/IJ7eUFU5nhODbAidhOY4UFVH9oeW27sb+nHPcQevb19x3Gour+bYAl5f/ozi0BO454gxYxF/Rb5p2KyPWDrhkzc/5fxsxtDvqcsRz/Vo+5Gbu9cs5jGblzVBENO3Gm0s/uqbd3z62U+YLeDp/QbbJFxcxEhLUTcVfS/wQudk7usFYRRjOZKqaXHshLIuWM6veFk94zgR0PP+wyO+byPxiZIQZUaE1bPePyOkZD69Ic1iDoctvmfjuy51UfAnf+8/+9s/qN6v//IXSXTOOFSMJme6WIAIuLv8FKkkD8/PLBYXlEWFoEWMNhaG6eSabqhBjgx9RxBBGIZM03PaWuA4A57r07UeSRYxKo3UHvfvPuAENt+//Ste332GbQdoWeE4EZqRN69/l922oSo7ulZz7OTJvlKWHIoj0hXcvHZ5eV+z3eTUfYnuNY3qsUWE6TS2CYiylMAPGRsbo12arudh9wMPjweMkLx798jj4xPGVKg+YByhMQ1uHNObAT/wEOb0xm7ZNkHgUbenYpSwIHBdPMfhOBSkcch+VzD2NTY2g6mIvICJTFB7RRrGdE1HoXL00NP0mnpnYTuK2XlCWw54dkyahMynKciGSRYxm/qMncALBUVZ040Wxb5kuTzHMpKhd7h/eU+cxvhRxObliO4U0+yONNCU7YqqluTF0+nqa4woK4tDcyCeelRVSZ6/MKoex5vy7u0HXn36GZv2FLq+XF7w8PiAtC0mM5c0SnC8FD+IWT8cMF2PH1hYrkEaSY3Dwp/yfrvhs89TPFvQt4pu9JjOYp5XKzrR0vcdaZrQtgo/cOlRKNFwNp3z9u1boolDMJXUTcmrq0/Z1QVIh+d3j+wOa8I05ur6jMMmp2gOlG2HtAMG3YOUZGnC0LXYpkeYmMl0iZMEDIy0XU++L3i5f+Lh/Vvyl/dsNs/oVpH4KUmUcD6fYAU2Wjv0usOS4Ng2atQoLQiClCSIUE1DVdUYpZjEMZ7l0Lc1ji8YdEfTDxilTjgwI6lqxT86P139/9PHd2ht03cDeoS+PeVgPcdwY13g6+RkCOPHQfXjRlWr3242Tw3oU/Pe9QRB4CAxvG9XHMcDQeSx3x0xGMIgox9qptMFy+UNXdtyNrtgNpsyWh3KVqjRZug0XTdyZk3wlY8axcfW9d+cV0852ROWUmA0JGnIF5eX7JojoyOZJnN0f6IROM5I6Lucn50ThiEYzTBUtO0RKV2mkwVJEhH6J0aytDyQhmGssa2EvvFw7RhbOpjRoi4K6nwg8ANsBorjCryOxeQKXTmYXpDaIV3dMQwukbvAJiSbJGhcmnrAsmpcyzAMFkoLJlmM4ziUTcdoQTqNMIOkyxXZNCCaSbSu0UrSFScu9HRxxnKywMGm7VrarkDQ0tQrimoFjs319ZdYRBw3AzfnX/Hm1S1JIvGchDSeYtk1UWDjeAEIF90b8uMzy+ic1JoTBx5e4NP2kkmwZJpm2JYhcSMc4xHHc5p6jw+czy845C9st/dcn58Tx9OTJ96JkfaE5cUrsiyiaUaMjlC9pu0twiigGxXVuMPPHGxho/uT6W4SLmjamn5URFGGVj19W9M2Lr6Q2KlNdZQIpni2z2hKdruawJ8hBo/ZuERpxX5b09QaYZuP/N0ft5oKSwomEx/LrXj7wz2HjcS2XH48dEJYIAwWLrYtWV76SOdkPIv8jMcPa8xHMsDHZe3Hg3pCV2mjTlpXPr7sfSQDSCk4ex0xXO3RneRlf4+hZZLEWP6ICCSuN2Xctfz08ZRR/cuvl1RlgeobMDFNV1KqFVJl+F7EQIPQC9AWlrSYTs+o8hrVSzw3xg9C+n7keDywPzxhWRZROmN/aFCjz/nFlDhJ8LwA3/cpypJdXmCIsKyMrt99zMJWvL75HbJ5yPl5SlO/kMUum3LLdy//J2/f/Tmh71I0e4zQVNU9u9XhVHRUhmye4sqY5STFyB4jBjZ5w9n5NZPk7LRxVdB0Nc1Q4dsZN2efM4qK7WGH7Zyy83EyZXfc4/oFQocEQQDekR/ePxAEKRpF1bVUjSaeuKcXBzOyWRcslhNm8xlld+BXf/GvcYVhsHtmixn5ZsehqlGi5bBf4ToOfpyQ5zlXsxn3755pRws/SvEdD2ErVOPg4fH08sKoHN68uSGKfIwwlHmDVgrbcinzmv16w1iGRF7AMLa4c4uffHmOYy2o9MhQjry6+pIoHPD6FiMmjMrjZnmDKx1cP8J2PLYvG1bbF7pOYgWCwJaUVUNoe0wWKW3Xo7Tm6f4Z6TtUVY7qFde3V4RejNVHVLXGD2G929ObA8+PJQjFqCuE9tDWgLR8Xtbfk0bXGHraPmccJQZNXlZk2YIkjTGiw7YsfN+hrTTb/RO3169JvDuCuOPx+R1lVTObL9E6oRkU/+4f/xuwUf2//u//7RdxfMt8ucQPUvbrijgMKYqKeqyJPIlta2xHsduUnC0WdENLPAlpB0NVH0gnIWp0mM192trFC2AYYL5Y4sUxWsgTTsZyUEqfDtVo01QdTX9Aa8PDhz2z6ZL9Yc3hsGW/3WK0RFLR5A16sJhM5hzLil//+nsC74zzyykGiWoF0ooYLZ80i8jiGW2rqSrwvJhd/sy+fEabkTCYst48UuYVUgc4tk2VN1iWIclinp6fUGogCkJWTwcGbQj8EKVGqkIx9JqqyU9UA9+laQocxyIxMePQIvyRRRoyYCFTmyw1LM8uqPYWXdcQ2D5NUzOfufh+xLGsOPYF/aC4eXXL027L8tJlkSYEQcz19YTtfsugPJJwyuJiie96bPZ7EAPV6CLoOBQrpmnAWfoZYZiSxFPqOqEfeqJMoqXFMd+TlyVeoLE8xXfvvsVyJVmc8PCu4P27H9hXNZtqhW9F2NpCSEloeSR+AnZHGHj81b/8My6XMZ/+dME4SiLX47u/euD61QVn0R1G9UhLMo4+Wkie1zuU6tk+70jDCDrNWCks4bJf9zxvfqBTHUUz0JkWpIcnprgIPDfjm+9+TVMdUb2FFzk0TYWNj2P5aGnBMGCrHt13uJaDK51TEUb1jJ3AiJGhqWiKntj1cfWA51qETszQW4jRQ3YuedWwLw4oraianqppaZuR5WKChSJLlgyDQKmGcWiR2iPJJoRRjBAOu90GISTjoKjb8sQ7lR5NPaKUoutr/vHVKwD++5cPDJ3FqCRdp/CtgKEzdEPDtX1OLFLM6JyuPU/P11MW9cdCycdBVZqTjcf1QAiNGhWP7YZDU1BWNVobHJkghMSxXaRwKYs1rtcwmYb0nc3z6oDrBUynMxACRc6VOycyGUP/sZ39Y/P6x7DqX+Nd0UaTZTGTiU+XdBS6I4p8FsuU5XxCGEqSOMYYRRSHLBeXjLpA0+LYEVI4WE6DET1d31H3O7TIOb+4ZDn5lGm64Pb2iraAp/uc+TTj8uIS3wsI/JjIW5yKKHmL1A5pOse1NU2/wXVTYueO0I0JfIu2qRDGB6Ooyor54pqr6zuCIKVpG4qyIgp8xGiQyuLq8oZsMed580Lb11jCYb9u8eSMeXbJchqjzSkPq5SgqJ7xXRvfDrCNReAkDMNIFLpcXS6JIpsscHGkRJue9WpN3znEk0uMgXQ2xXYsXBFxfT3FSMEwCMq8IlssWF6fs1guiANJ2x5ZzC/IkoQotAiCBCN8JtNzuh6afsRYIIwmDZd4TkyVH0jTJU0Dz8/vWcwuuJimhC4M48Bicc75bInveCgF88kFlh5wIxu0TV03BAuXbBpje5q27AnjlLw4UDQblGnIZjbtUDDmA5NuwtCN7Pc5ZaFOxqr/nzzCIIxgMgmYzxIeP5RUpTlhqYRECBsESGlAaHzX5e7NDKU6hr4n8ANWLzld9yP4/2Pw9cRRO135f6Rl/DjE/vbvSkmQQTMpWZxd4gQ9F688ti8jeVnSmZF637Btu9+qj3/1JsH3DdIKsD2bUXdUB/C9jM2+IsuucZ2R3e5betVjmOJaARDSdgVN1TL2DklqkU4s1qsDs+wzbq4v8UNBkbdMsgsmySWH4xale2zHRRh488ktHx4e8P0EpQd8L2QyCVgdtrhuwLHa87y+p8gLUj+gODxzPKwJ7Jjt7vnElfYywjiiyDdo3TMOhsZSqK7D6mBf7Tlf3rJ6XiHslMuzO55efsN8dkkQunz39hvOL6+JopiiLNmsd/zsd/6Aql7hOhGKgdV6RxyHoG1s2wW7AW0ThhHbVc+bVz/FD08CIDVEfPr5DOh49/07ZstrovCa0MuYnDs8PW0IHI++bXl5Gfijv/v36ZoNo24ZEHj2nKE7Uh41bS/IsohpJAkSGyNcHn5zjxsObLcFURjw7oe3LLJPWMyucC2bdGqz3TV8+XcEZbVlu8qYzlKqTcnN3SXfv3uhwSNvDvSVxZtXNwRBzNB0pGnL/eMHZpfn2HRgenYPJfNZTN+3HAqbsduwXVfcvTmn7Vo+PD7Q71Jen2c8PjbEvoN2jywuIp6e1iAMZSX5gz/8OcfigJAxWo/4oQVCc9wJ4tQn8EP8UFEWhrubBZtVjiUVaeLT1yPN0WBZPbY1oBnoxoLdboslQ6IooGk6bMcgjOSP/85/8rd/UP0//vR//sXTy4Zv3/4lzy9v8T0LoxXP2w04Nn0t6DubOMoojiN+4LE9lhzqHDeK6Lsag48QDnVb0vVwdWth9MnXvd89YkmftuzxPQ/HjmmVT9G2GKfEdTxWLzk//fp32e/3aFOxXe+Jooi+6/Gc4OPD0qduNE0LYy+wTMjD+ycEHelizvO24HgsqZs9T6stVdPTNcePzefhFNoPQopDh+o1vuOj9YgtbbQ55c6qsqLIc87PzijygiLvSJKEcTBo1WNMj1INnm/RDRojFaJ38G2J5TlEbkRVNlzfXVDst5Si4yqe4lgWlqi4vb7h4X4NTstkmlIOPXXV4pHixIpjU9OPGukJhOMTxiGeF9M0FT01ahwYhGT9tKdVhqo+ItyQs4XHKCy80CLzzrj75JLdoSUIAgK/xrEThHQxOiGJQ8ahYxgU/TASBReowbDd7Qgij6bvUHbJNPPR44Dl9MyXEW4w4VDnRIlkls04fx1R2BviMMEa1lgW/Oz3fkIyW+DHPk070jcKYduUdU1d1zTFiWjQVB1V2ZNMpuAFVG2HUD59owgDDyNGiqLEYDP0Pf0w4FgBju0hLM1+d/LJL+czeqFYr18QGObzK7oWunZAmYGurxkQtHlFbHn4XsAoYeRkB8rSEwZl0IbZJCGNM5S2CWIXxUB1OGK70DUNZrCQMiBLp2BAnFaZhKGLMgNNW3LYlXi2xzD2mNHDddxTFk+6jFpj0PzjyzsA/unDe1TrMLQCS3hoqZB2hzA2r9wLAhOilYeQ1m+HRCklxiiM1h+jAAZlRpASaWuMUXSyZyXXtKbDwiLLMpRpUSLHdzNif45gZBg66vZkAzNWT5xJHPukMm37ipvghkQtqevxI3/yb7T9/wZL9YSo0qRpwnQWUbhHRKLxfYvpNOV8mZBEMYNSpDPF4tyl7fqT590esWy4OLsly2Lq/pm6PsUqel3h2D6OtBiHDseKuTy/I52MBMHIdJpijCJOPaI4ZugbppMEW3h0oyBLZkhsNJLJdMpiOSNNp1RtS6v2CFfTjzVlnbPZHhhHwahGLDmQhhFj0zMODZbrUFcV99+9Y/d8xLQhvk4JpAAJyXRKHE4om5pkKpktY8rCcMw76r4ijEOwbEwPsyjDkSNSeLysc/7iX/0Zm+fj6Ts0jiyjhDe3r5iEM8I0Ynl1Yju/e/9LPOZk0yV3l1fMvAjFiOe6DNXANLvjbH6H5/kI65TtPx53yGCkyAuW2QW3l+e8rN9TNS9Ip6HrOy7Pznj1+gpjaqQ5meous2siArAlx7ZiknpEtsX84pokPqdpHlhtfkM/DHTGpqpHUB5BECAsTd/X1N0TZV3jjgFx7TOqht22oqkl0vpxEw9ScgL1G8iyiPki4d0PJ4OZbTv8iJFC6pM61UiyicvizGZQA67r0DY1da3Ijx22fYrDYH6MAZy2pgbzUTtsnb678FE7LMgufYrJGm1rWpXTVc/oJsIIh3x3ZGgNbjrnj55O/OM/TezT77IDur7DsSziaIrnhBjRMI4aYwZ8z0PaDnF6SRJ6pPEMz4vph46728+oC0nfwWK5YJJOuX//TNc0TKcZ282avjNcXlzxsvqAKy9IYv/0nLIiirxnGAayNKEodwyDhRkt3n34JW7oMUkusExM3w60pcfb+2/ZHAukPCNOXO7ffUsWLJlNU6QVghUSWhG2jLHTAGl8xqpgGCRXZ5cnW5s/RdiaY/mO77//wNgnZDOYzTL2+wNFteLsbEYUpQghsKRN220JApckneA6M4axoS5Hkkzzq1/+JYGXsNr9hsS/YjpPMYNgeunx/PJAlmQUTc5slrGYnCGskrqGwPeZpDOSScB2OxAGKYGjGbweY4+MzQbXOj/F1XrFMB6YZEt8L6IsjkySc+7vP3Bzc8HrNxc8PDyQ1wrLVRweLXbrgqtXE+pKIWXEw+MviaKIaZKilM0kjVl9+EBf2bz55Ix8fWDsPW7OAvTgMs3OEHHLh3f37I+KT7+44bh54ddPP+BFgnFfYyUBP/mjn7N5eOHu1RIrUhRFzTiGDL1BOnAs9hR5R9WtsZyO/NiynF/z4f6R4CORJ4lnp/5KWfP1F3+XJPH5l//vN3z56b+N5RbkxZ4odelajZSa2D/nmJd8+vktVTmwWJzwl3/48//4b/+g+s/+1//qF03fcTgUlNWOV28+4dvvPhAmEd9998zD6pHDvmC129ENHXmdU/cdg4BjVeA5Np4boKio65YgsLi7uWA6yXh+eiEIbKp6oG0spIFDfiQvRyzb4NgO+02ORYztDBh6MBbbdUNZHYn8mFbZp8KJbdPUDUPXk8YZu11OVfbkRcs+7zkcDwx9QdtqRhXi+RFoQ286hJTYtkvX9uhRM0lnVEXDJEmRtsGyTmB/Yxz6biSMA6qqwmChlKZvDEPbMg41WvWkyYJ+sGj7kkhaHIeSOHG4uboi35U4lkLJiti2cXWCEwzEqWG3a3GDED+UBO4MrWx6ChazhE4VTKczHKbkxZHjUWK5HYOWNF1N3Xak2YzpYsGxODCfzxHGxfJAtQMmkFjelHy/YdAWGsMiuWC/y0nTKduXBtd20WbAcaHIc4p9z6gUdVVTdCXSGlG9zW67xgw98dShrRzWeUm4tDnsH3CN5otPfo9373PyvGAZLejqnrOvNdOLCf/iz/+Mvt1QDw1tcyDPa8qqYrs/IGwX23VQGHol2Rx3lNUBW8ZIPMqmw7JDmrZBMJxc9n3LMIyo0WGSTdiuN9SNoao3WELQ95JjXdFoRVUPWLZHUecIowiFg29rUtdF2B5hGFA0G/KxxbIiXM87lT2Gji8+uTvhX0KHwdTUnUIow2wSg5ao/oTSiTwPGwupTyF8ZUZ2u5MT3RiN43o07UAUu6RphlKAVAgpcFyLf7Q4lan+h5ctriPxXYmUCm0gSlwcx+V1sCAUCUq7J5QOP15V2qfSjho//lSftkVqxPMMQWDz/zR/zrN6QUob17bRKHoNWkts6fKHv//7zGchVd3gBgGTWYbtakY94NoeZ4sFSrskJiTQLm05ojS/pQwg/jpf+ONaVRuN63rEsctav6CCnsUyIfQDEGB7I2FqiFIomw2DqgljQZT6RInF8ZAzDA1BrJCWS9uoE56u65ACtO5pu4Jx6On6Ate3GXRNUR2RjoVta2I/xrcjRtXRjkcmM58smmM5PtlZgLAGmqZheTbh/GqC0RJQ+N5I15Zsdiv2+zVtXdBWLU1Vs9+tWW/2WDhk6YwwfIXQEW7g8MVXX/H5Tz9DuBZ13Z1UnkYQeUuS+BLputRtR10oppM5N3d3BI5LuS2RY8b13SWLRcaXn3zF15/d8ub6kgTIN49gehw6mrJBSofXtzfM0jm2ahi7iv02x3SGWToHFEPTEloW3mXScgAAIABJREFUXXmAqkDVOXHsY5SmrWsS2+fx/SNN3yBdyQ8/vEf3PufzBX274bjfE4Upg1R4XogQHofdC1FgMc8W6Lzh/eM3fP/0nmZoGdUpZ2dZDq4MsT2DsHr6oaXtKoJIcNgpEmtC1vhoPVAUPU0tsayPXSohTnxUc8qh+oFAOiMPH2q0sT5uUz8C/AVIYWFLi8kMkBV9J3Adi2HokMJn81Ii7VOK+3TJLz9mqfWJA2zMR4TVX2tULcvi05/dcf5vLRCmZ/O8JhLnpOmM+6cf2O3XfPrZT0h8n5+9O2VU/3dRkk2WNK1kMgmIA5dd3iLUQOA5WLZHp1rU6HF99QW+5/Ddd9+CsWjamqurCzarhuurO+LURo02lmVTVxWz6ZyHhxdmsyVh7DD2mv3uhfkiY7YIGQeN41rUzZEvv/oc37fpO0XgSPJDyXwZUDcN+0OJUhZGWsTBDcIb6Rq4WLzm5nJB6ExIo5TlYsLq5QOe63EscoSf4rs24WiThoYg9fjw8i2OFZAmMU1TI4SLsAxxNOHsIuP++a+4mL/Bc5a4tk/fBLh2TBqmxGHG+7fv2GwbgjDAdT0mk4jjYUscpfSdzfWrlOfnNatNQZZO0SiKasskvaAb93z7zZ/TVZq/9yd/wvbwiDARYeJyLAq0UKz37wmCiEKv+eb7f8Xt7WenF3BX0LUt2fQzwsDFDWC3PzKbn5MmGVHscf/4gBdwil6saoqNw2IxY7X9nsenb7lcfk2U9Axjg2MWpHOb1WZPOxzQHqj+Ld76QOIscRcugw7oiheaEV6/vub6VvD92xUOGXEcEkU+N8uv8YRiaGpQW371zffUjYvj2jR9gRdE2F7Hdr9jtzuSTjLOlmd0reTp6Ym712cMqmYcFe8+/MCrTyYc9xXdWPO4fkteP2KJc37vD77kV9/8Gf3Ys0zP2a0KLOmhrTV5sSdJI6r6QNOM/PEf/huwUf1v/qf/+hf391vWzwcsy2N3qNnkB/LyyPPjC+2gySYZTT3QdCVKwUCLGvXJ2BLGVHWF75z++TfXCUNXEQQ9u+eBIFvQqQrPkwglkcZFak3kS+SgcbwEgO1+jRosyjpHGJ+66glii7odGYYRgUKYHm062uGEnJldLOkNFPkKV9gY6WB7Aa7wKA4lyi0wo2DoR1zXpe8Uk0mELW26esQmpB9bPN/Bch2UhmyS0g8dtuUSRjGBL6iLljjyTtYipU9GnMBHWj1x7NA3J1TXfrMncsB1bC7vLplPZniuhY4f2OUlWmhef33DeXaLS8vQ5TQ4HPstSeCQuDGOr3h52pB6Acdqx+ZY0ytN4Aa0o6E45lTdkcV0RlNYlL1CDz2dNDw95Uwimw+PDxzLkrrJOVQNaRqgxx5lFIeyxI1cmrpB97A/tiSxxX7bEgcTAueCut4g+hOGq9Ule3XkaXXg6iwhsUPasuXlaUfsOUyTW7pess5LXDVBCEnXVdihZqgbXp53SNsib2qMZZ0eI1Kg9Yll2eua2I8Q2qJpTxQErTW+6zMOA3oQaD0SBhHCuByPO6qmou0LQi+kbxSHXYljx2A8uralqXMWkwQfi8vYYZe3FFrR9gVCaVzbR48aMwpM8NHuNBh6ZehUgy9iTO0hNeB4GNtgrJF4ktCrgc1uhW179N3pWlGpkxKy7TqGoSMMXZI0oO1KjOgQtgF8RtXzXy5Pg+o/P+5IUp8wsnEsje/aBL7Lbt9y7S5JnCl6BKP5iN45lZhGNZxKVEZjtAElEZYmm9t4vs0P9SPakSdsmO7BgqZtcBwbx2358O47qvqFKIVubDD0hJFNEmXcXX1BGk0ZR83j+D1uL6AI0Mo6bXPht5gq/VHbipEfB2bBYpEyTCucLEFais32hUN+RImebHKG64bUXU2WnBFHEf2Q0zYNfa9QSqOH0yAh5SmPWOxLJBGffPL7XF5eMqicfvwI3xdg2ZKeA33fILTNoDo0BtuJEMImCUP6oebD43vGThAFMWka41ohcvQRg8CMHX1fYVmKSRbiSpc4TAiCBNsOSF0faxxRjsflzVfMpnN6FNIPEWZA0mJJC9fy6OuGrj0ym05xbY+xq5E07I8vHHYvuJZi9Vzwy3/9FsnI2fmEKIopX3Yc1ieEzmb3jv1uw9MP76g3e7p1RXPs6TrNLE1hKKnbDuU47LdPuH1D5DigBlQ1wCBYJClmGKkONa+vX4M0qG5DFkVMnXMupgukV7Mvn+g6xVe3X3M+mbHePrMtSvq2IbNPMgUZJgQCxrqnGDucQOD7GRcXn1LmFcd9zs3tK/q+oigOjCqn6wscsWRiZ/i5fzLRtT2H7YBtOyc4/48b+o/X9J5vwAysVwO2435s7/840FoIIfE9m5vbANsZ8L2IJMlOn/MIL8/FbwdSozndRKBP1/+WfbL+/Xh8P75n2bbD7WdnDLOOl/sjiR1StYJmyLHQZNMIP/QZB/i9+1NG9fgPMoS7Q4medGKo8g7fW3BxdkvXaZLJksk8Ybdb07eCwyHHsgYuLhe07RHLcvGcmE8+PRV7jBF0Q8NidonRNnVd89mnn5yY2q3BdzXIkTSZY7tQ1XsO+5xJtmC7e0YrCD2PqqqRls9uu6GqG6aLmNXLmiQM0RLqcuCLT7/EyBY3DPHTjJ6S1WaDY3vUQ8MoDd7Y4PWa81dXrI8Vq+2R1J1iWS6TyZzDoeXy6gIvdMmLgr5J+OTuhvc/7Bn6hqFveXX3CtcduH934NXrS/bHZ4Twub19TdNuMdpwdbngef2O7757D0ISphFNpejHkiyL+fDhPUl4zWevX/O8+hbFySi3XTfsqx2bbcFk6tCNFUU1Elk+x/o3hIuKzYeW3/3qDUXTUnclq+0DVX3k7PwGTcHz6pnpfI7npgjbhrHi6f6RVjwTBD7ueMZsmrDd7NDGZaw1QZYhxIZ5GrHePuIHFzw+v3Dz1TW/81OH+vk3nF3c8tVPIxYULPWKKFrTrjZ4WYad2FBBX4NjR3iOoVGGvOqZX0iGUTEOI4djjh9qPCclySQWCeiYsjyA6Lk4v8NxDYdti+M1uG5Avj+yO+xRyuCHHWHU8+u//IbL5U/57NNLdNsx9JJ0kqAoaaqIdthjOYY8r/n7f/yf/u0fVP/J//hPftHkilEVWI7Nw/OKQbe0jTldj5sWqS3KY0scexTlgCDA9CGeFRFEE7QeiMKE+SymqQay1GMceizpE0cReVExzRI8y8eSmji1MFoyNAPStjjuc2w7IAgyyqJk1AMCi0E1aCUZ+g7P83A872QqcTyc2MLYHqotCWMXx5aEvmbUI34sCYOT4Qo9ECcZVdkSBhGOYxgbQdv0nJ/PT2F9C+qhx3ZOsP7ADxlHgeu5xGmMsBSeZ/A8jyDwOTtLcWxB23RUquAmXYDy6DvJbGbxyVe3uH7CuhjwdM3oVGgT4boSbxYgx4ymWSE9yeP+Gd+BUM5ZLA1qkGAko93QDYbNtsFzfQLHoaxa2nKkG0fG5iRFyLsKMUY4jkNVbEjcc/KmxA0kT/v3dLTsdx1qGCmbhrw7ULWaqlZMJhnNIFnEC6q9IUlSBl1xLI78zhe/C4PDercmTQPe/Xrg4jpiaEssDFVh0FbNaBSWZeFaEeN+YLNegRioa8HY2NiuS9u29ONI3fXYysYGymPNWTahqnviYEJT1VRtQTu0eK7DfDKlrQt8K8YykKYZRhmMKdAYvDAj8j1Wx5y+1YheksUTzqczbqdTAiMww0gxGu43KyzLoRxGBjkQWRIpBV1bohXUlWGSTDjst3TNiO8HGKtD6QYhLYryeDLbYFOVHXVdkcQJ4wijajF6AKnQCGzHQpsBNUi0ViflqtAIIXB9wX8xuQTgfxkLRt0SBhZCuIRRwqg1TVvz2r/A1zHauCcMlRCIj+UTNfYfrzdP1+/DqPEiiySzMVrTTBS1HOi6Hs/1GD9qeW0Zok1FGPo0jUYpD98LiSILP4iYT66JXJfV8yNFnROEEUvOGQ424/gRT/VjMPVji0vAR3+7QBuB62kGv0b5LkVVsD+uqZqGMEqYTs+YTq/w3fR0BcvJNw2Ci4tLomCCUj3aVLT9Ds+zmE/OWCyuGNTAavMOIQyL+RVBaGN7LZ7v0fY1thS4VoTjuGTZDcpodvkjSZyCNeAEEEYhYRSDBR8e3qLGET0ajtsOlE3oeqhe09QtUgqkJRhVjytsri4WBBNDnj8jZIPnndIYahjZH484rsf5+ZSyWfHw/Mhul2MZRWjD4fAMCFSl2L4UOGHK7Zs58zRgf1hTtw0ekl3xge1hxf39mqZXhIFLkkik71L0HT98+573P6xZJOd88eYGY9VsHp7ZPRwIkxlJOkUbsLRmUCO2l3F2fosWA2O94+5qQpRGFHnF5eWUOInxiLiaXnA5u+TwvKbuToW87lDguC73q3tMH/Lm+iesdyseDgd22xX7/QoLgeMF7MsNYZDieS7bzZamKTBK49kZsRUS9wGhG9F3mtWqRsgTJkpIyUfOGVJAHIfYjstu22A5p/MhP54tIU+Z7DTxePXqjL4diOOY2WxOU9cMg2azLk9Eh48lKa1/PJ8CS0qUVh8RV6e8qhQWUkiSM4dt9Ja6rsnrLf2g6HrF7fKG0ESgfNLFjC9/swLgTxdPGGvNJ1/6+FFLsfdpqxGtPVrdYHkO49jhSoPr+YRJTBQErJ4b0tTHdmxsx6Vta/bbI9OFR5omFMcCjOGTTz7DyJb9riBLQnzXIIWLbU0o8hatJVW1pmt67q5/Slm3CGPxsvuBtqs4v0wQOiBLfYptzievMl7dxby56UnsZyK3psrveXlsuH+5p1U9pnWp6wYc+PnXP+PlZU3jSDabI6oZ2R9zPrxtmC0muFHF08Mz+aHn/Owczw7pm5xuaJjOMxbLCW+/f4saLJKpxyj2tE2L5wUY0fP0+IGvP/8J49BxrF6wbIe6LYjjO87PMp5fPtDULRdndyg1cLv8FMHIL3/1HbPJjKurJferH6gLye3dJcdDgx4tjqsDgT2j7Qt+7yf/DtW+5sP3NcHFeza7gsXsipeXZwQRjmfx9LDFj0fevX3GaEXsWcwWtwjjcn15wfQsoT4qLpZTZumUbfuCNUpcZaE6QWq7WNi03gVq/I6Zfw/be/aPLfH0S/rVC/sPT6zKe24yQ92mZOdvWB83KKvm7bvviNMLrn92xurxB55X35JlS6azmN3hO447QdceWM7OcT2LUfX4vkV+qFmtn2j7PVJKbDtCjUc8N2YYRnw3pBvewxBye/kZx/2Gw3ZkuvD4zfd/geV47A/7k+nPuWKWXfKHP/8P//YPqv/sn/+3v3AdjzCx8XwXcNBaopRN0wiy1CfPW8I4xvJsqr5CCcOoRjzPQjoGiU3d1Aht8fi4w/EGytJFjQnVrseWDo4Fvitompbn5y2OmxEEIeuXFa6TIaVh1C37XYsddIy6Y5qdnxqfWEjPRlmCXo2kyQW2x6nN2jT0VATBFD3I0/Yx8jA10LUEcUjTtEwnEwQORg3YVkQQ+SjdYNkS4UiaceRYFEhhsC0P2/bYHVZYTkLd1IzmdA0npGBULVGU0HeauhoJMsEndxOqtsNNIqJ4xq/f/oqq2uGMA2VrcfP6FWYY+fbpkb4rsfyap11HW0vOklsce8m79yu264HdUXFsd3huzNkipS8MXQlilJTlyXXftTVoRT+0NEWL5dm4IjghWZSN0A71UBMFS9QgOW57FDtcz8b3YrpW0XQ10+mU1I/Y7lecnc9Yr59BD/zsJ+esVjVvbj3+4OdXfPHVgqrMaQt49eoSx8nYHg789Hcv8GOLQQ+4viF0J9Bb9GPDIC3SdMpundMNA4ITaceSFlVzAoAPjSGwfWCg6HOE42IGgVQDxtR4Vsh8MsO2HMIwZBirU5M5TJEI/M5mHiTcnM04WwRo0VK3Hc/7nHebLUc10o8G3fX0eqQfNXocaeuScVB0vaHTCscGx+qx0OT7A44nMYNENR2xF56GYW3jWh5DO6CHgSyNGcYa6Qhs2yGIPMIgPkH6hSYMpiSTiLavCfwTJ/c/z06t//9u8z11o5HaoR9aDuUeI+DsLOVT/wanywDrtBk6PX+RglP+VeuPmyONwBD6gix2caTNxtqjwhHL0wShDbag61p8N8Z3XdJ0wqBG9AhqAKEj0nhOV7Xs13uyJEHahrOLCUmTUjyODONf6yl/zKmeNKpw8pcK9Ai2a5jbKdt6zV7l2K5D4LtMswVB4GEJTVWVxIlHFEf07UASTgndKaGXEEYOnhezmF/guiGvbn/GdJaRlz+gTE6SJGBsxnEkCCRKDyg1Mk1nhF5E3VUYoegHTdXVOJ5k6AVaS7QeGJXFoARN16GVjeckROGEaTKjzlu6Gq6ur7A8wbHcUjd7hCNxJjbS7pHSxQsisshjMU0RnoUWNcV+jekUWZRglINSHo4bkKYTDqsd5X7PZDnHtlNAEMcahg7bdehMT9vuSfwYg8NiPuXrzz/n5ovPGSyXyDuVKDebksSGwBXYScj86o7LqzuE6+KGAckkJPYMfbvl7bvvyXuL809+xmS+JHFSdG2R78E1Dr7UHA8NkTchiyOk69EMFp2Q7HdH8n1OJwaGfkTkDc/r93y/fYfwIywnBjlQlTVi8HEDyb74niDwSOMJi+klFh6qU9x99gnxPEE9j/x/1L1Jj2xbmqb1rLV231nr5u3p7rltdBkZWUlFQoFUTBgwYMScCVLBrwipBkgIIVESqkKiEFJNACHElAlNSqkEiSQrmxvNjdudc/x4a9223e+99loMzE8E4hdk+cBlcne5TDKZrW+93/s+735fke/6pxT/B+vIU3kE0LeGquwZ7QdmqsKK40ApkAhrcZyBwB+JowRtR/p+pK01fuDwcLdnGMzT9sE+nRtPyq2QxwHVmuOQKhXWWFzX5dWPLhnOCnbblufPPyIOBEYMYB3OFwuE7Bmbli/eFQD8+qMX2GHG+hYS7yO2DxqtaxaLU3A1g5FYUVPuG2azY/vTLLug1W8oigPT9DV1VdG0Off3OX0rCCOXoT+AtTRVxerslKoceXf9NUpV2HFkt+0I05Gbm28YesuLq48xYiDfd+i+PJaTOCdM0xkwoJTh8Njz+P2Ob/5yy8PDgIoTqjGmHQfKeoujRqpDznJ5QhT7NE2JcCMK3VCPPb3ZInXB3XbL6mxONlUUe8vrl19QlG+wo2C5iGgOEYvVgq7vmE5nVFXJ2zc5L149Y7cvUdISRB5t7eA7Ec8urvDCCtNPkHLCySLl/cNb7CDxfdjne4QQZLMJLy5O+f6b7zg/f8Hd7Q0OMYMsmSUXMB54+/63VIeCJPOYRCnvvntkMJa3bzZ89Mmc2/wrumLFcjXjfvMNfTNSVXs8OcPzLbpzMNQ8vr/HCXxWZ6c0VYtRHrfX15xMZ5wuYr7/mzd8fP4JZf1AOD1n9eIZLz+eoquKb24GqvEZ6vSSXZVDb7l99zd05gwZfM7N7W+RDwU3D28Z/UsWqxnaFjzeXzPWHn3XMIj3KBUy6oC66kC7LKdLmvqasj3QVC1Vs6GqWyaTmPX+azabnCASYCLCVLI/HDhfvWa37rHDKR9/tuTrX+95/ekVrb3DOiNtHZDNXO5vdpzMrnh+ueCzT/7tv/uD6v/0P/+3v2i6AitgMANV2cGomMwiXD+kKEuSSYwfOQgnpegKjGMwdmAYXRzj0/c9fTcwDBZrHaqmZL+vaVtL6EUIZTkUI9uHA0k0oR8sxmjsIBnGY2dz2zVIKenHmsksQNgY13cwQnOoNVpomjEnSUOUjRjahq5qGGrNyXIGo6RsK2QYY9uOs8kJdanJJgtGU6KUZretkHIkiSKmsxilfDb7W5zAp+0NSXYE/CZRxtC3CNmS10eFQOsOx1FUdYnnC5q2ZDQVjudRND1F3VCXDaMr6JWDqUsSL2G/q2lHl1H0OCLizZuG7f09z1+mbDaCro0pxw3fv/mapupo7YiRDbKP0IMgCX02txVmNEwSH+NYAjcG6SIsBNJjkoVIH3xPMNiBsn3kZDInSaZs1lt83yVODEI7RMGEodFYqwkjj2kW0rfwsN1yehXTFCOff3LOT362YFAdP/jDjIdtgQ03nL8IWJ6dEWQTHor3tHrPu+t7vvrVLU3ZYYTP7GJG1e/ZbRo8PyHfH45MXfMEh9cOVd3gxhkq8tG2JktihmqgLmtCT+E7Dk2nGbD4KJ5dnh75ttqQ5xVNP2LUgOmhKXI609DZlm3+wM37W94/bGj6EddRKGnwAsmIQXYGt7doKwj9jNBLAEHoOTjGo2oalH8cHrsOjKfxnJBeW7SwDBaqrsOoEeV7uEFwLEFQLkJaun5g0D2D7nG8AKGgKCsQw9FLJ3+vqP7T9Vv0oOh0heMq5pMlaeQzDVKmfYrqI8QTbgchkPLYZz4+DarW2GMDj4UglgSxwgqokgoZjwShRnodVhqyOCP0Paw5rpAPhSaNJmTJjL6BPC+4ub7F9IK2aekHeNzeEFY+5uDRdwYpn+iXHxhV9gPs/9hcZSz4vkMUOzyYe+6bO1wvZrVYcb66JPRTXNcFIdEDlIeGoRtZZKdM04xRN0d0zegyyVZIJG1lmaTnGA11VSNEh+cLhBwJgwRXeEeYvpeBcejHAZyGtj9wqFq6fiBLAprDhmJXgwnoh5YkipmkMV1zIAxclssZZyfnTLI5yvNRHkhlmGQpZmypqgGtXYR1ePv9O95/f02gFENdIHtLIA1jmaN6mGXxsfbYGRhNT+xlmMGwOxx4dn7Cq2en+GFENg1RsiFNYzo69vtbMB1CWNIwYnF6ShT7uI7LZH7Bs5evuHp+wrOPzxFOgOwlNB3OODJPA4xR3N5e09Qj0snwAp9xsEjr4sgR3VToHmIflvOIMHXxXAc/WnJdH6i0pd3nuNLj5GSO62omyZIgnLBuNqg04PXlK8JgSTZ5yaa453C4Rbgd0vhgPFzH59nFC8ygibyYi5MfYIsYJ3dBwO3dHiHc36PW7NFTKoRiNAZrHIRwQaini5l9WvtbpBS8eL7i4uKEUbskSUpR7hiNJggD2mo8NlApddw4PFlleCqkQAisFUdahuWp5U2wOA/hNEcql2V0ggSc0FL3Pet6S1W1XP/FgX/wtE34i7PnrE5W3F4/0FUekR8TZgGff/wzrGjY7juSLOXx/pFXLy95/+b9UelWluKgWW/eM5qBJJ6RpJJhGHCEQ1ndMInPgIz1eks7rimKA+Pg4QiF67vAeEzT2xqL5O7hPdlkwjQJsdJBKQ+Jh+ONrE4umM8yosDjf/nT/4P3a8u+NvhhwruvS+aTE5zQYKRL3ViybI7raLZ5iesJNo9bjDoylK9evUAKQdcaGBMOm5q+27K5LxGji6Hg4vI56/Weh7tjUYMXtpRNS9XuQUcsF2d0fUFd5YSBpTqUrDclRfuevq3ArejrgWdXH4MYEcrSDluGRjO2lmyagh2YTEO+efcrsiQg80/Zbbf80Z/8FEcMrHePXL06J818/OiUj358wm/++p7p9JT7x+8IvCla50R+zJ/8yb/F2zc7oiAgiiyr6ae8z99hjaJvtzw8PPL5D+b0ZuTucceLqwvSLMRzPQ67W7aNZdcLAuMwX4YUjyl+fIUgQzNDS8lffXdP60mi+QueffwZvido9MD99oaT+TPqdk1bPYLqSZMFNzc7hkHjeQIlR9789sDV5YK/+ptfAjCZBxy2sDq54u33D1xeXFEUJX0fsmu+Rzkeu60knYZMpymd7omzBMNAnE24vn5DEl4QRQ51IfjRDz5h6Gp++IN/5+/+oPpf/Jf/+Bd9a9GmxqKwuPR9jbIuvrAYIemHirLs0P2AZ33E4BHIGcJaRGtpywJXSMb2GOxwlMJ1XOpyoK9rXM/QNccKwWQyYRgM222PCHv6wYKj0XrEc8GaEWxIGPm4QUZvOkZb4HkKV4SM3YAxW/phRI8OSMP5+YR9vsPzFfM05HDIyaYxjkzpxqNnrCg0Z5cnXJ6dHbuBzY75YsEw9sxOzpG+g+4bhAEx1mSJJAkSHtc1p9kZnivoRcUooDUFMtRIpUiWMcmsY+YtMbhkk5BCDxw2Wxw7cnqZECY+373bUHaC/bog33dESUye+2zyLVVVsIhnZOGcQQ4oV9B3iqItUEbiOAGegrZrMY6DEQoxWLKpj+e67MoeI0qer1zQMa6X8uJsyuY+59PPP8aJQn7z9rckJkSlAfW+xIQ1y8sJ67cH3FhR1iV91+F5kmw248tf5zxs9tzeNjxuN1y+8hnGHZUu2A033B+uiSLFKnnObJIyX6QIRra7d+QHjRsEVG1LIB0Eht22RjEBUaNrBdqhM3ti75gCz+sa87Tqs0+Q+iRImfgeTZvT9hXFvkA4LrWtKcqKLFlye7inMBWdEGghQISMvcZzQwZt6BF4vo/ExfczrHDohw4v9lGBix9alHXphoGiq0EZ4tilb6AfNEkWUtcDbmAQKKR1CQMAQVkVdFqTVz2j7EH2jBiazjLYFtwRBQReiHQUnufxHyQnAPx37R5PuHhSYkdDFKUYY/gJr5mLBX3vYoU8sh8/MCAt6KHDmPF4AH8YYtWAchRKCQ5xRRcULBZTXOXiCZfVKsPKBseLAHGsAh4FvRGgJF3b03cj68c9Dw+bY3DKBly4z7ClQ133/K6g3R6/WTs+paePg8RoBjzHI80idqJEpR7TLCOKXNq+QnngeS5KCDzh4hgJY4fRA6MBrMFhSuhMaPZr6jyn2BTUVX0MHzo+aezi+S75vqY5dERKcTI/wXFn5HnFfBJzdX7BaAVajwSeR5quSLwpUg/09BjTEisHoSSd1Yz6SCMRrqTod0fu4thyc/cG5frE0Zx+0FRNxW77ju32mrqpscPAIkuYJzG2d5hEJ4S+y6Z4PAYTtcUxmslJyuXzZySuRNE8FZ1sCV1JddjweH9N3RY0Y8s8iJG9Zr/eoexAr3t2d/dsbq55eHyPdAKCwIdi5pPdAAAgAElEQVS6JL+/oe9a5Cho6pIvf/MbvvzL33JoNScnc04WC6anGR4Fos3x/Z5uzOkYcPyYutLk25LNZs2bb/+GX3/5Ff0Q4wUj9c23CEfysM1BwWQ6RZcj+21LKw1hnDLLPNJpwEfPX7Ocn7Pb1jR5T1Fr1NByHr/gUI7UmzuC2iffH3h4rLBSIZ78ovZpgDxG8MWTjUQeHwsLT95UsPiey+uPrgjDY+JZCIPrBEg1UtUVurOsHyuUwxOV4+h/Paqyx4ue1kcv+QdV13EUV5/MsRc5u02Ln2RUbYcioixvqZsdTmgZleJfPwqqfPX5BIaCTpcUtWWazdkf7tjla+LIJ1QO0/Cc+/s3PDxucANwAsNmvcVRE/RQkudrfDcDBqQcWC4n9L3BD32kUhhTYY3FjC6uf2yQmyQZXbtBd4pBj5R1Q5yE3N48MlpxLKyIPIaxpe8tdrCEfsih7Ll4/YLVM8Gzlwvev33gu+/e8fkXP+T6+gFroSg7kniFG0BdDfQtBKHg/n2Bco52kb7WXFw+x3UShn5NFMPF6iXvvv6eUXtkkxWTaYAyA4tlyDbf8/j4gDNKTi/mlEXB5q5BiI5s5rI7lNw+vmO7uyfNZkjjcyhqTpYXRInl8eENDNA0irxtj6+rtIzWwxiLHusjyzgLmScZ202HcAZcJ6SqRuJgwq++/JJyV/PpJ3/E2fmKaXJK4Hi4MqQZ9kynK4axQSiFm8Ju0/H61eeUZU3fd8xPJux3Dr7QbA6aq0/O+erb71jvbrn+/i3zyYpabpHG5epUEYiaL//2kWiV4bsrsiignjjoMeN+r9gPJUEUYnjEUwsCb8q3b/6Ktus4O/kUVwU47kCWTnlz/Q1OEOOmgrZz+eTVP+QnP/o519+/w3VdVhcefedx9eqMpn2gb5689f6MxUlGWe0YtcKSc3u9Y5oucR3Dav6aon7AypwkTfn+3R3/4Of//t/9QfWf/Df/6S9cEeAbgdKGyI1Jo5BBd1gjKVuLckdcT6OHETcwbDcaVzkIVWLHlsl0StcZhkETxQGBf7whB4FHmsUYa1HSxXEMWhva9sB0OgEBY+8hGJhms6OxvNP4rkPT5eTbLVVdEcYGGLh7WxAGDqEf4HhgbM2ApulKurol8D2qosJgiGKLl4w4KkGbnK7tyWYRddHgCoPRI+vtA8/PX3LY5NRFgbDQ1kfg+YsXr7i7X6PsiBufMfo7ut7iSo9VFJKJGCN8OnsgiUOmc03dlDzuDmhZcHZ6QpaGrE4vqAbJ+9saN+7oupE4mFHXgrvbiqbZ47kuRdXRjIahF0SBRTSSVXjC5ctLHtcPeK6laXq0OCJehvao0LVdQdNqFC6fvXyBJMTxFHGgkG7AanHGd199hTUBYXZc3BppEYNP3u7oaZGBZTo7o+ssJ6fnbB5z9NhjhoD7hy2xF9PvMza3kqE/kISWUE4J1Irv326ZLlJcoagOe9q6pqnBOD3bsmSsc1YnC+I4Q7oOZjyyEIUasFIT+RGHw4HZYo7neRyqEj0eG5fiLKEua5wgpteG/X6P5zt0Q02ahuiuAOvheRG+r/C8gKo4EAcZTdODcAmdhKYp6foO64KfOEil6DsYdYN0HIahwzDQa81kumQ2S6nbkiBxqMoWx/FwFLx+/gLTGYb+CK1uhxqlNONw9DtGQYwnIzACYzqEHGmHAScIUZ5L21X8h9NjmOp/KDe8vFzgBsGx/VyMDEPLuZiheg+sD0I96TjHxLIxI3rogA9BJntE+0wD/EAxDIavy+9ZtztGLWgaKA49h0NFnteMg3vsjHYAYRgNKCRGjwgJQeQekWtSEsYBK5Ux7AxNo3+ngokP61prnwZle8xYCwdjNUEgKIMC7QuUOqK0+rbDaIMnPQIPkthhOpkTxQn9U9OT5zlkYUpsPdAQRRlekhBmFl8Z9DBQHzT5RjOJp7y8eoHvOrRtg+MKlPQo9iNVM7LNiyM/VGqKbYVjXKI44fz8CmkNm809Qro0VYGPJXV9mnbLYdxwqLc4ro/wPRAjaZwirEdfN8ymU7Iow44aEUe00jC6kiAN8LyeyHc4mc3xsLRdA1LzcH/HdpPTDpqHmzfU20eqrmRX5PS9RHbQ7w8oLyJRDtJo8rJiXWm63sGTDmEWkKUeQgxkfkTTHCiqBqsytKswfcfj9RqVnTI//wi0j5EQhJr2sGdbtAiZIKTLTVPw//ztb9kfGl7/OOWHPwu4OBl5vjpldbJitQjIZoJhcMk3JZtyy3ZbcfN2w+nVOWfnKS4lz65eMI3OSWyKpwTTk4y+HQldj0UaEwQOD7t33H/3iMwdgiTm4aHEjE/bAcxTCh+ebmJPvlX5JIAefeTi6bKWJSHPns/QuicI/aOn0g4MgybLJjD6vHl7h+u6x22D+FCl+gHt9kFJ/aDiglKS+eWM9yZnMpsQp7DdrBn7ligGR0HdtKSLlL93e3zP/Vl63Bj6ruJkdsLl8yscEeB4FVXekgRLfv3LL7FYptMpoKlrMFoQBj5R5BKGAXV9LCOYzWOwDm1jCYOUpra8ev2Sps2RIuBsdUWRv6dtFKvlKw7FA2nmAR6+7+CpKV0ruHw2ZbvJ2R7eI6VkMl2Q5wewEX4Qstv2vHr1AilhOlswXwbUbcXp4kdsNgVf/OiSx/uSJMoIY8nDw5og9AiDiNXynCAT5PmW2SJCG0ldjLx4tuLk7BxtDNFEUNeWy/NziuKBqoQ0TBiGG9b5LQ+PG4QjUQ7cPazpepfpbEJR1Ww2N0wnp0inZrO94fEhRynIslNWyzPy/TuaaktbligbEEYJZbdHuJrpYn4MEx1ueX+dc3J6TjaJCGKHpjmSdBhTkqjh/fUD8WyJl1mqYofrQNMO+M4Eo0NOFjPSTHLz7g47Cgwju+JbtOnIsinru4ZQdpihI51NsDZHV4rl8gWn04Rvr7eUnaQu9iwWIZ0u2eaazz/9hHgi2exKyqbi6uIK0w9kWcLl5Ypff/2OfXlPVXhMkxPs6PDxR58Rxh5XL16xnK347IuP+Iu//N9IswQ/FBTVGmETLq7mPD7c46kVP/uDP2S9fYu0CS+erzgUN1RVxzAecxJvvn3k008+w9qGtmnYrDWOb/k3/rV/BQbVf/Hf/7NfhPOQwrioOKNqd0jh0g2abuyPnh414rkJo4YyH4lTB9fliI3yXKzVJGlKliYEoYsxYEeYTEKaQTOODtZUSNmgpEfb5cRJQH1QVMWOIDgOsW2XY22PFA6uckmi5JgKjwPqwnCymJJGHlIImnrA8wd602C15fLsFW3dcLKc4bkKHMt684gXSIJAsV0P9GaD0d2x6qxxycsdbTVw2LaczE9oq5JhGBBSYM1AEge4ZsTEmspsCKTh8xdfMJ0kGOPz+tU5xmqEMAivwXQ9jucyNJrdtmE6dcGcsDxdUtYVTbfHkYo0nlDWOUEoCESCK0P8VKEChS8UoUrwPZ8wgH2+pus0roIkCZG+wzSb0tUalGExXTDJlljT8Pry75FNUjx3gu/MWJ5mrB8fePF6iStadrZBtSlKapQ0uI5AORNm0YzDdkuYSA67BqUOuI4hTRVh5lBVJUVpKTrJu+uaPp/z7OTHtJVg7CzT6YLbzQOL809oRoFwOlbzJYlYUVcPzKYh0u9phh2+mxF4AVEY4vpPzL7R8Lh+wI88hnGkaQaMURwOLfNoiqcC+s4c6xeVYESRZjPquiHyM8IgOb5mo0NT1cymp3R9hx8FSGdkNBbBMYw2DCNplmKNYRwgimNGU1GWR6tCGk/JD4/0fcd0HiJGkOKID7l4dsH+cKDrWkbdoxxIwoAoCAicGZ4jaaqWrtvjSA9wGLoBtGTsLXXV8B+fPgPgn+fv6QZN1bZ4fkDgezjS49QuiMUU8OEJTgVPYH09oMffh6mwBsdR+P6x2hPgenjk8dCQ70ZGbSkPhvX6WA1cHHKqck/XGfoe+qGn6xu00Xi+j1CSk9WCKFM8bq5ZMEXWAW0zPrX6cFS7AGPM0+r2w7ChsQbiOOHglmy6nKHX9F2FEpZAhYSuQ+y72NFQVjlFvSVLZ7x++RolR9abW6quxE8M87OM2TTDJ8CT/pOX0ZJOQhzHZehbpJBsDx3FUCJdwWw+O1YNK4scPXY3O+zQ0XQl+a5iaAeklTSVpasqqnLD3f0DgzVYKbADOMKlLmvaukbi46qArlnTdgVBEjNYUCohTVLy9oG6q2BU5Ns9tpEIfMquomlLmuGA54S8nJ3j0DGMgml8QiQ9xGCYBROW8zPmkzm+ctltt+wf1ijhE7sBqdAEq4Tp2Qlj07F/eET5Hl7i0WvJfv2AafbUrSZdzDm/uCCbRejhAUvHNF3iS1BhS7k/MGqX0Fas5jv++Oc+59MHxvI3LMOW01Tjdt8TymvOXlYEsuXHnz7n49efc/rsNZPpc15cXGHKAaUczldX6LymPWh86fH++p7p1GcyjRnVDG8ypap6bGNZOSvSOOX7tw8MVvA7IRV+d/mx/H/oFk+bgg9/I4VldXIM60ahS9tqJtmK/LBGoJjNFhT7ipvbR56aAv5/BRX2yWpwrP0VT+UZUgmiuWLtP3J19iMWiylFvqVrW5JkhsHDDBFxPOWn10eO6v+5dBlGwaFYE3oBUiVIIVFqoC00pnf5+rdf8eLljLPV0aP53Xc5XtDi+y5ZFnB39w5rJM9fXiCly/3NAdczGHPkMUvh8PEnH1M3O7777jvOludcXb5ktVqiTc+gYTSa6+t3rJavePlyxeP2e7pxc8RVRRPqdg3CI4xD6qrh9Wcr3l9f89HLHxGGIYdiS1V2nJycMZlF6DGnazRhJFksFihHM5tN6LqRzWbPtsgZ+gZXuazze9brW5RQ1DpnsUz521++ZXUWke9uuX1/QxS5rKbPsXbN3eaWNJ2hTcvj/g3zxWuSdI42iiQ64/nVM8qyoazWOK5m6C19XxD4MXGiaA4tjnII/ZDbm/dosSWe+LhyxsNuSxaHbPN7zi5fYUSPH8D76x15sWakxPc9pjMXYVy6fs/XX/81jHPKpgT/QJHvmc2W2NHn/PQZUg4kE8OvfvMlrlowWkEQKtqyxFjLvmwIpynW7fG8iMlyxn57oHQ1J5cnZHLC8mRBrS2ur1nfronCGT/72U+5u3mDGC1KKA55w8NDz6urT7hYnLB52/KjH3zKwIbr9/eMOmBoPYZ2oG8HqvqBfoCXHx39wFKEKGJ++MUf4wT3PN4VdG2FQOAqQVHcEvgBnbZsNrcsFxOaZsth35FNAybZOUVZ8g/+/r8CeKp/9l/9J79oTMW2sgxYhvZAKH2GtgMLQRQg8DBa4nmKKJjhewG+cjF2xI4S33NBVEjXUhcWKRykHLFIiq5DDwWuculbxThq5vMZ4ygYho7lyiVN5mw3e4QcmM5SBJI4CpFSYzC0zYirYtLEp2s128cCRwaEkSL2I7I4o617xmEgjiN03xO4CYl/RlmWHDYd42BZnLgUuUV3x3apthdIL6BpK8r88QkgnuJ4ktPVjNDzGE2C71cUG4MnY8zY8uZ9jwgdTian1FWNcjRtPzL1F4wmQmiX04sVnpwgbMyvfvPl0cuHx3yasVsXXFxM+NkffUyanHN/957pyifxlkQM2N6A35JOHX742Y/p+wElR16+fIHjKkY9crY8ZbGYMEljlAPTScJoXNxAE0Uhm+2W6/e/OlbizS44WczY1jtO05SL049wXcXlyQqrFMJalAPGarohx4kVrz//nLp7oGpLPN8S+yGm19BLhBbEoYMZXYx00NQoJRmtIfQS9tsdgWeIgyW7xxYzehjrs6/2GOswDh1e6DOMDXVdMZlNsMLyuH7EdT2kcLCjoG812B5jRjzXx1hFXrTMF+dUZUtRlBz2JY7j0bQ1ehhR6qioaGtQnoN0Bf1gcZTCUSOKka4xDL1hFC129PAchzAIUO6AFQXCbfE8H8eRKAltA46vGLTGCsMwViRxQhh62LHHUz4Sh7bSyKda0aau6LseObqMg2EcR1zX4x+tjh7Vf3J/jTYhTdNy2BdYbRB4XMpTApNirXvE7DytRqXgqPyO+ilEJZ5+b3A9i6Mc9GD45vAe7Wp0f6wUdl0HZI/RA46IcFWG68SAi5AO/dhSdhVV11LUDW3fUjd7jNWcq1PcNqJrxg+zA8I+KVMcPbL2g6oqn5qzLLw97Pn+/p66LNGdpauhrwwMliqv2a4PFGVN0RTofqQsetbrnN7WqLSlNVt2eU596NmvG+5uc969+5ai2CBHQ4BPlVc0TcvF8oxI+FS7giYvEKYnTcMjkUQpXBfCNODs9IyhN9w+3FGVFbptsWjSaYIeDV0riMKEuqow44CnDKazVF3LvrrHcR1Cb0IsYlwnwCqX+WSG5znkZYM1Asf4NLXA9T2UBEWCFSnS8Xn57DOmkxl2GOiGFieSnE0T+qrk290jheiYzuZk0ZSzF0tefLxiPkmxA0jXJ14smM0mDO0BIX2WF5coDLQtbpiSzSJ8uyWSLYGR6LrD8RRSHnE4u/WOaQKvXuVczr+jvb+hrzXZ2TP85YIhGrGxJJr7uJHPECi6ZENvOsq9i8+EaRwzjo+MqmOWnlHsd4whWNel646WHVcLEj/i4X6HwCWJApLq6OW+vd2jAfkhkmefSBJPw6qUv7e6HB8LhBU4juDZ84AkcTB2ROuRMAowtmfoR4Q0x+a7zYGuM0dSBhKBQvAUrpJPXNUP7x17/Nn03CX7YmQ1f0WR72jLEYmL48GgO7LkiovzKz769TUA//epQ5auuDg75/3dHW+u35PFE1xnpDrUJOEJ86VCKHj+4hW//fprTk8+5+c//xlhYPnq17/mo5c/IQxD2v6A500Jg4Sy2ZAXWxCwXu+QwmO92eA4gqFzePHqit3hAStG5tPX9B1U7Zb5coEUE1bLLzCMdL0hzSL6rgKzQIiBN+//JY7jsN0+onXLcnFBEi8I44BtfkfVbCjLgtks43DIuX7/hslkjjGGvoP5MuXx4YGzs1PevlkznSa0TUmxl6RzRduv6XvDbLYgThVKBQzDgLX6iLETHmZUdIPm1cs/5MWrF9y+vyHLfKIwpG0q3r3/JftdxWr5Ea4bIAhZLs7JD1vsAJgFy7MzjOmYnayoKo0cjp9t3373L6n6HCTMZyccmnuGNkAKh1GDxKJFTxDExL6LGF0un7/C8Ue65p4g9JhPT3jz/TueXS0ZBo3Vx1INISRtX9OUPbPFhLzSXDxLif0RNXp4XogXLhDtnn64Z3+7ZzF3+fLbG+r+wFAZVhenfPvdV4x9BKrgcV0SpFOKLuf67jtuvq74wz/4KeeXE7xgRlFFnD07papvqMobPvnkJeM4IjCURYfvT3FcydC1xMGE+/s3R/LBruf1R5dP5I2I1ckzOp3TtJr9tuCjZ3/Azd1vCILg+DnfVsTJlD/+6b/7d39Q/ef/43/+i6bpsC0444htj6EWP3AIvBgrHKzRxKE8eu1Ey2HbYA3k+wOTeIaQNdZaukYw6BErLBbJoDVN16OeWnxm6ZK61pR1h+M8IV7ocZwMx/WIYx8vUAhhmKYTqqpE+hrP9fGCjqbqiNwpSgacXx3l/VCn7DaPoEYQLp4b0lYVgZPQlgOuH9AWmhevAuoqYnEyZboYGHUC7sjD/YbRNKSZx+r0jLrV9Laj7mseHwt0MKAPHakfofyU3la4aqA8GL67fcfYaaSjMWbG4VFT1C2BP+VQtYyDYLMrGGyBMQIzZHhuSF1XWD1S7EYO1UiYClYXPkPTcJp5zJMlTdMhVUdfSs4vz5hNIpqqY73eIxnYrR/x/YC7u1vafs10es7t4dfkRU++yxGqZT5NMPS0e8OoPFLHI1AFMsi4v39EqJ5Decd+XzAMDp7TQl+jQoEISkSX4waSk8spVSUZG0EYWK5enFG1FeXhQMlb9vmGxeScoetpy45JFhOnsCuvaZoWUCg5ZbvPqboSa0aiYErb9XiBoO16ulZRFYausTy/fMH2cYfvBshgxPUkSMndw5YomhB4AbvNI55SgE/d1lTt8cIgkIxGM2hL2/foscPQoAfD0EkC/3h4Dt2Ao0bK3bGpTAJj3zAOAqUCirKjOVhOTxfUVU99KJjGIWPT4KunxhxhkKP3lCB2GPoOa0asGfH9AN1r4klGkPqMDFgs/9HpJQD/9OYOawRogycdJknEcKg5ZU6sZkjh/u5A/fA19O0RwWMtEoE1Fs9T+IGHkg6e5/C+e2R0WgJP0hQNVVOilIDRwXU9kAOIAa1HzGioh4beGCyCtm9pmoaxlwytYdrFBDplHI+hLvsBSfWEvzyGVp7UKiRCCkat+e36msdqjxAjQyvJNzX5rsZ3QySGXb4jL3qKskUKRaACTk5OmGUp/b5D1+DYlBGfRm+YztzjNmHUuNYh8jLiOCYMJUnq048tBoMXRNRdz75qGJForbEoHD9mX+a8e/sto9FEaUBdFRyKirzoj+1DcsQRBqV9qnzPzeO3eEHMbLakyCuUNgQKRleSTuc4jDhCUNeah7tbPOMThwGu3wACFWYkizN6M/DN9deM3pLl1ReYOMJPQuIwxQkzvGmGiCyRJ3h1+ZznH3+K1gNtWSHCDD9MGbsWO444noerAqbTU9q+JZ0FzC+usLajynfk7cjouEwXE5QH+8MjVdWiR0UYB0ShQjsuVRWgxRKSM77/uuXxly2P14I//8s9/9ef3fKXf/oN//uf3fBXXwaM9YqAiLPLkOlUEboOQTzDBorruze0neZwKIhlyPnyOeW+oMw1aSQ5W8acTZaoDRRVxeN9yTCYI0dZHJFTH5BRRwzV70H/4nerBEgTn49eXiEAPWi06ej6mrpuiOOUURv00JPvW4qyw3GOA6nFgDj6YZU88lqNGY//VgikUJy+mMOF4c2bb9jn1+hhIIo8Xr16wWFfovuOXv6SH3133Bz85of3bLa3mLGnq12S8IzPf/ApZb1GqYDZ3Keqa5AO+2JNWR1QTGnKgXIv+ear3zIOGteXVM2ath65ujynrQVhEKFETBT5dN2A1UukMiipafuW+/saPw74+uvvePX6JX6k+ebb94Sxx7v31zzc5/ihh0TS9TuGxiVNUurmjroaGXQHaKRwqOsCqQSOSnG9ET1IojBDD5p04iJMSpbGPNyvUcojSxRhInB9KPMtuoeLZxcU+SN+kOF5HV9//cirj6/odc9q+YLH9Y5XH/2Q05PnfP3dlzy//ILF7Io///M/ZTp32d5XJHFIkbdEsSKOU+LojNVpxtDD0LiMOsBxXURUcLt+Q+xP+eu//ZbPP/uIu+9/ietFyAAm0UuCWPO4fUNVaA57TZwJ5vMlLop9fuD84hkjmqI8cnvTMKM9jJxdrsjLDff3N2z2t3SVpG72jEawXJwzy2aEXoQfxfSi4wc/8MiExlQFnjswCyZIXfIHX/yIn3xygecMeNlAU3d0g+Gnf/xzdpstb999i58keIs1u+KAM7p4xqICwa6tiU6n/K9/9hecna9IAoFuR1zl4ooli+kZ1jSIMTmi/zY9QQhiNMyXZ+w3hiye4rlTptM5+/2e/JDTNiFRvGR5MqMtfCaTGTe373CdlOVpTF0P/P0/+vf+7g+q/9l//Y9/MVutcL2IvukYhxHHd4kmGSJ0sb3G9wV+AEoK2kJjx4HJJMCMFqE0Ulm61jJ0PsYaXFfTty3T7AQhYagFgS+OXd/plLKukU7LbLKg6DqauiKbRSjhPqV7Da5K6FqBkpb6ABKH5TyjqwyBF3IoCvZbzcT1MQbmixXGBNRNTT8eaOsGhKJs90ziDKMH6tYy2hxj4P5hh+vOjz5BkdFUA1Hm040FXujwsN3ghQkWOHQ1XSPRfU1RdDDAOPTs2wZregZjGIcQo0M6XdFbw3rbUFYbusHSNA1aS/qhpG8GiryhPBj0UKOLPUkQke8sQzdyujrD9UL0kOAFHmjJfLEiP+z56jffYKWkrnLSKMEKiwgKsklImfdUY41UA7HvEidzmnJE9w8YOxD6Ps12R90aul7TmJbisGcwEDo12STCKAc1BFwtEw6PNcXOgJDcPNzR9B3PX1yy269BBBzygcO+IAl9hkYgiCnrHtcLCEOX++s1Td6jUdhREIUZRXMgy1ySMEWJgH6sMbZ7qht0iOKI5TzDc48H2NAP9J2k2JX4nodQAoM9quRFCULRNc2RYWrBjCOOCglc97hGFIbIC3ClTxikREmAoaMuOvrSIZJL4shjOp2wnGdM05h839KPBuU3LOcLIumj2xbPc0hjH9/x2D/u0V1F4vtIGdC2DW0LjisIQ48otgyDwHETVDRQlofjlsEK/tHZcVD9F0V+DOi1DVkSorD8RL7m1DtFqRjLMURyhKMfh8Jh6I6Q/yfVdhyPFa5pGmKtoRkL+hREqAgCj9liQpIKAh8C7+l5Dj1SeAydQesaqRz0YJG4xG6Kr0KEVdhBMGVOTMo4/L5N6EP4xXGOiJ/xaXD+EGCRQtHEBSQjkR8cVdbRYRwUD5s1RVUSpQHg4vkJq9WCaZriI7CFwTYBnszw3QAB9OWa1I+5OH+JE3o0GIznMEqBQXD7+Mjd5pF26OgHTZokKAnlYY3oNW3Vcbt/wIk84ihku95RNQPt2LLf5dA7nCzOcTyHvFgzMKBtS+q7TN0Q2Y1Mkim+cmiGGhPEx6BUX3IYDtRlhT86zGcxUeywKw9Yf8SJfNa7LS2WJF1R7R+xQ8toezwrWJ0+Q80yuqGkL/bsdgfaemToNXVjGAZBvs9p6uqJ6xpStx1tr2najrpc01UHhr6m7EratiBKFfuiYLAO05MZkR8xCRMuzny0XfNYNmiTgTph36Rcvx1AzpmtPmMQUybLZ8TLT1FByicf/Ywff/pv8vzslMk05P+l7k16bO3O87xrrbfvdr93taf9GvIjKUoUJVqCLQcyPDWQBEYCBAgQZJDfwUkAD5JMk4yUDkYQTwIk8CBwJkEEGbHsafgAACAASURBVJYsipTYfd05dU71tdu3b1aTwT4flZ8gj2pSqAIKtd/3Wc+67+vyQp9qvyXPC0Qw4v7xli8//yVJmiB1j6kGht7DSwKm5wGXzy+g1ww7mJDSNT2P64q6O5Zt+dC+h984JI55VCn5zb8ZgLUs52M+/eQ1xvRorRh6jigfc6RsWCxx6LM/lGy3Ja7r8rc/RPzmNwgpjnIPeTxpOcJhcpYwnCrCwCMOUuIoRsoBO2heX16yXPTc3P+SH9yOAPiT6ifEWUbkgdoZXj//COW2eF6KoWFX7Hja3DFfnYHsEMJlUHsct8ExKUoJ7h/eM53MGE9mODI5yguSBbd3j8RRzKBzHMdlNj2lqp847Go6NXAodyA1+3zDr3/9BePJFNXDKIuJsyO9I/BivvPp3ydNXSbZKcK6nJycY2hxZch4dI4QiqLcsNsVlFWF41hCf442GkNF38F47PP11+8xtPSd5ZPXrxhMxe3dDYvJOY4rGXSJYz0+/vR36doDnu+TF82HcmTIeLTCSo/duuB3fvCHlFVOUTRMZxHDUPD0eEeajAHBKJtz9f5zHBExX4wYVMF+v2W1WhIkAZv8Kxw3YjXNkK4g9Mcsk5hWxVw8f03gj7HG4kofYX1c/5G+3+GYGd/53neOBWpt+fLNz+kahzavmU8uOX/2gs3umkFZJvOY9zdfcLL4BGsGfDknjgXjZMxPf3KFkfDq49e8+eKeX/+k5M//bMvTviIZBbx9vGZbKTZ3BdvNE01ZEolLli9ec9g8IhHsDzdcv9+STEtm/mecJSOSasJn3zvlUHVc32m+/dl3eHaheXz/wDi55PTFS55fvsQLGva7A5cvZ9RdQd1WzKfnnJ6ccfX2idl0yW6/Y7V8Rjs05OXxe8pG8fGrF6yftuyKG8azKdloRJIl7HY7fG/C7//g3wGO6v/+L//5j5ump2vaI2JIaZbzOcJKjOlIoxQMGNOhTUHgrkiSjEG1SKfHDVyUirDW4gUC3wnxPIvvCQ7bnukoZZrGfPzqE95fPSCClmSU4nqSJJO0qkOZBmzL0IAejo69p6cN40nGfDxj/bgni0csV/Fx2E18tuuKvrNcni0RMuX2YYvWkqbdo2lpenCCCN+LifyYNBmhscyWAU3hMZ+c8Nvf/y1u336JIxNOzpe0ao/nW+qqJE4CXF+iKot1JU/bB3wRHFmJxAzUuI4gjiLUIKjzguLQ4+AjIsXQFUjj05YW3/MIHYnuOgIvQnUtqIHQFyyCBbPFCKULptOQx+2Wp+0ti9mKssvZPhbUnaIfBqqyRVtDkkS0dYfjCQhDAjcm8Hy6PiJyJS9OPubqusLzBYvxCDeK6MuC2WJCZyNU1VPVFVZBq32qvKDsDF6yZBwbdOPxqy9qtN9R7Frqbc8kjqk3FTQS1e/RtqBRLdJ4BKGi2Hc4SISRqF6T5zuEdKjKkNPlJY7jUFc9WTZCdx5doyjKA1GYobVgv9uxXM3wQpf7u2PZpaxqmspgeovrSAbb0Q4tfadxHY+6q5hOEwRQ1y1WCZqiwXcDsBrPkwjt4EhomoIqb1C9xBpwHOi6FqMMVdHhOsfyxHjuk84SsqlgPhsTEoMVKKNp2h5HugSeS5YF+E7IhzI+WlviaIHSLY7r4Pry2Go2HdIIpPFwpcd/cXrkqP4v5YbtYU8cRYSuQ1cPXHonpM4EIeNjY/kb7qOQGD2gVPcb0D9YjLFkWchkGpFmIb+0X/NkctJRiraaY9gkRhqIQh8rFdI9Dgl998HMNQiGxqIqQ7GpGWqFYwVmkLzZroltTOakx4fFB1OQtQbPdXAdB2O+Uaker1aN1Yi5S7LKsEbS1g1NbbDCRboOQZgRJxG7XcXV1R1F8YQdDKt0xf7pQNkc8ENLXpT0ZY/fRzQ7Q75vKZqaoqk5lANlnlPvaiQO2rT0XU9V9Nw/HHj/uOF+c8DKjH7Q5PUWg6Que6SQBPEI1w9YLuasTk4o+wN5ccDpLG2zZ9A7pO7o2hYcH4TF9ROW00+JlEMgNSZyOAwdjtb40uPuaU2eFxgHDmUOWh7LLl5PlrjkTc2+2GC6Al9AmMbc3N9yeLwnSTMO2uVpm6OkwB25JNMJ4/GENi/Zlz1NNyADQzqJGHsCp+noOsMh73CcjJPlORMU89g5Fvm0JRtFpIlLm7c0XU2cJARZhFYHRp5gOs6oTEA+GLRQnCxmZNMQJ/U5P73ECWt2xR3hKGYyyggltI4kGk0wm5r6UBPOXOqqQ1vNxeWI1TxCDBKjJdvD9lheLTx0r7m5O1ArjfOBY3rcan7g8mI/IKOOsP/f5FOl5NnFscRVlQe07hmUwg/co9kJjoIAffxcPDwckNLBWM0xMvO33NYjRs0gxPGGQEqHyWnIMH+iKbeUhwIsFGXJ12/eUB4Grt8/MAwzfrQ9TtU//VjRNz2qL9nc92TpmMaU1E1BGMb85K/+LVF6ZH8f9i2u6xCGHqPRjFY9MlvM+ejjz3CDgSROEQiybEpZrfECSxjDz372N1xd3TCdxQjZEocpL599Sj80XD47p6hvieMR49EKx3GI/ZSu60hSwWIxw3M9fvKTnzCfnXL78CWL+QKljxIQKSSO6+F5GckoRvgFTT3w+uW30abn6t2XCOmRjFzW64IsSxhPfFw/4ebmPUMTsFp8TBg5aK0JoinX119zd/OexeIcx01YLc94ulvz+PgGL/IwHAfvk9M519dXhM6cOI7w/YT5bEmWJeSHmuUq41vf/ojb21v8wFKUFa7jo/XAfttwunyOKHo2Tz3z+Ypqu8OfviSOJfviDbtNydAK1JAzdDBJT8mSDN07qO6A7/mcnZ4R+BFJ5vO0u+b64R3Pn5/x5Rd3nJyveLivGKUT4shDWMObr79g5H/Ct34Qs34QZEFEGMOu2fPs09dEE59KOBhZ4QfnZAuHn33xS756uyN2n7G6HFGVN3SlQlhLFo2pniLu3m64XD3H80t8kXF+esFf/fRPefV8yl//279GN0vmy3Metwd+/vOf8/T4yNC5KKv58uuvmS+e0fcdSZKBkLSdYjR2KMuK9brg8sUFRrhc379F2gLdjjm9mFP3a8p6x/vrG+JogiDg7/3evwOD6j/7b//LH5dbxTzzOF0u8YPR0WZiW6QV6MFHa3V0OEdjcDukx9EpvO0Jwpiq2RL6KVmaMqgdbamQNmI2iXBlRFnULGYTXF9S2zu0NvTDQJZFHIoOqw1Z6pClMV3bMkpiHMcjCA2hM0KZnvHUxRchjoDAj/jkWx+RZSl5XhOlY5q+paxKHFfQ9ZpGuVhPonVHWxkcB/xIINyWfjiQJIo4nlAcWqKJIpsrBtWjGoezkyWBd3TAl/sWV/RoEyI8QVvXDFpjHEvoS1zrogdLQEjgH6Hmdd/gaoFnIYtimmI4lnKMR5b4ZEFC5HkspkuE8Lh7KtB6wBGWvKyZjVaU+4L9YUtTSnb5FmUGrPGpmx7fg9VqTjqaUPUgjMAjoB8GbN/i4fKwuycdw9Mmx2GgN/DJx99HWc1u957ni5TVWcT6eo1wYBAJYqiRVYcINKQKScc0W2Baie8FnD57TjsoHCFQvWUyvcD3JLoHqyLQgjROqKoKR8Z4vk9Z9WzXO7J4zMnyJdYY8t2BstoSJkfzUlWVTOdjHM9jfzgwqJ6mq3EDSKYRaRJjgbqvMEbj4SGsJkmh2LcYC6NxjO+ER4C5laRJRFXUPD5tGYYOlEMYBPhBy+tPZkSpIM4ky2WIH3L0MF+c4EQHFBrhgBSaqh3wwxFdpxBG0tUtvSpBCIrG0PcuBkOQHMH6nu8w9EeoOK4hCWe0zUASBwR+wH82XwDw3928w3V8VAdJkBCFKa+T86O+lQA4vrDthzf2kcKhfpMDPU6sEIYu43FI15Xc6jtIXaTM2Of1kX2sW9AaKXzqpqZTiv2+xQwxpveo9zV9OaAahR0UajCUec9uW1FVNatwziQYf7g6/VDq0pooDAiCAKUM2gzfKNQp+4adGNh3OWowaG2IIp/V+ZST8xnT+QRrOrZPFVofmcbTdEbT7UEoQt+hrRviaIzrTSjbhiAIOF1MiBwXX4fM44xEWhzVsbvb0eYWxwS0hebm+olfff6Wp83A/V3BZvuEUYL99mjKqtuaKIqPW3tpicKQaZrxhz/8h3z26e+w3t6Slwey6QlKarbFDikDfN9DqIFivWWrckwIoZVMozmOzegrQxplvHh+QRC43G5veCi2tCpiUxS0uiELR0TZhPtiw/vr91itqZqGYhjwApcIhW867u9vuXp4olEtThjjRw5DV7B/3KDzFq0H/DQiiieE0QhrWrb7e4qyYjo7JZnMGbqGd2/fUChDWR+g1zhW0KqWd/cPNByfG5GbUumGQe2RUcLMmeJYy+Zwx9N+zxdf/4qnzQbhJNRFTRadoFvDZnMPxsXtHebpigCHar1hqBSz5QXj5SXK5OT3G8TWY3co2B8G2laDAMMxvvJNdOQI4pdIedQSiw+lPVdKLi/mRGGAFEdck5QeSoFF0+kt2WhCGiYYo3j/bo3A/bBMdf82PvOBxfrNIU8gcB2XZx+dMXm+YBQu8VyPt++u0TZin++ZzKecXK6woeZ33g8A/F/Kcjl+ze3Dlm/9zj/iabchL/a03YF+aHFciZQOQo+JYgnW4+TkhLvriulsDE5H0x3wAnjz5h7wKesH/NCh6R8Zho7vfvYjqqJlv98RJzHjLGYyjRllcxwx5vr2K05PV1ij6TvFKD0SQQ77e7LkHNcfuLm5ZZff8fj4xGq15GlziyM9+kEhpWE2m7HePIDTE7kLXA++/PIr+s7B8VrqysEoS5pNkNIyGMVikVLkO8p+S5y5CGsZjIsxNePR5Lg8qFuubz9nt3lCoKnbHiMUbVtR1jvWTxse19cYHfCD3/4DBlXTdT2L2Qnn5y95+/ZruloQxTFKtXz0+gVN2xIHY1aLMbYauH+6R3outne4+M4Fh+09eb6mb3pePP+Estjz8vIzRlnM/c2BV+cJ6/0Nh0rRtpZk5HKo9/R6z2K65P5ujVU+b6+ueP+m4/QsYzLJ0PaO5fyUultzdX1gMZ7w7dcxX/7innDisNu1vDif8NWXnxOJObNlxKHq2BceXuyRt9e4Ycb5+Tl39weScMmLVynb7R3xOKAcJM7EY7dvWUxfsZimzKchX717x7/6V3/F2flzMHt++otfM5nEPDzcgptT1S5xsqAdem6vHxmNIrRuuHl/oOkK1ps90vHRxiWMIk5nz1kuF4yyM9KRzxdffYnnJnz7s484FBv+4If//t/9QfV/+p//5MdWtmTzEEuI7gqE0nhBR1lL4ghcvyIIBEIOVHWP46vjiVUMOBjCaKDZH409s2lMWxzZjsbu6XpDbWr2xT2h57HZPiKGmOk0wnodSZASuSNC12e/3yGEy6EoGcdTZuMpD7ujyebicsFkMmK7q3lc7+i1pekP9MbQqD1KG7TpCQJDNgrxpIM1Cg+PLMuwcqCuFH1rCQKBUhHrx46+fsTz4VDk1LVhMs94+XrO4+Mj+T7HjyKGIUXr/ngZayyg6JXA+GArjVUCJ3Vom5ZqaAmMxhcxkZMhJQhfILEMZkAIj9Dz6TiW0xCGpq9ph5aqrJhNRqhGMNgOK+ZgB6p6S+AndH3P0A6URUk88o+FtLKlqLZ0jcZRMZ0e8B2X6qljcpbRNBmuheVZhtEVWMnd/a/YmRLjlFycJ/R9TBhEeL5kvT8O4nFoETYgyjIW5wH5vsb3Z2jV0NYVUZjRDS2+bzEqADrauqWsSw57BU6P43i0VYeQDlocM5DnZxc0TUlZ7XF9h7JoKHJDNlmw3VbUVQWiwxiOKlPTcvhgbYr8kDgKqdscP/SpW4M0IdHIRXqK8+UzPHdEq1p21T1WSzASRx5Vo25a48Y90XhgtgzYrQ/c39X4gY8TdGzyPdgE3VlULVFDhXCPeeS+tzjOcVgV2sEOhq4wdGogiiPqviTMNI7bE0YhatAM2hL4Auk6aOFgMfzn8yNH9V+UTzgIxmlIFGc4TsiZM8PXIYLg+FLFHKH6gOp7MN/EAL7ZqEIUR/SqpW4brvWer5+ueHjafNhGq2MUpjuimhxHojpBtdPYzmCGCrAMvWVoNYEXAx5NW2MtuI7PRjdETsjEO8oRhJAorZjPpggB+zKn0QODGeit5s/ufsW92vH69TNG4wgvFsyXIwLPwXciVAdRMOKj1xfMxy7LNCN0Y5oesvkcP4wZ1HHY3+2bY7HJAy/McNMlVdtTPd6R5yVPmxwjfMI0Rfiaqq1pO4fZ4pKTxQLTbzkUO9q2oW9y+rZDW0uxr9g+3aPNgBIej9sH/uIv/5K/+uVX9BKS8YhkOmI8WXCyeMHq9BTpeQyNZjodkU0zbD8QhCHGGnb5mtnplNFkxNX1PVXZIsUx2hAGLrvtGt9xieMMpTrW6wcORU4yiSl0zqEqcayLtj1FXaOUxXc0t7fXfPH2ntvNhk53GG15//6Kze6BfdlQdiV5taFsKohi8EZ0Q0NVb6n7Ad8JaMqGRkBrBvbNhn2zJa9KvM5D1IIkmeD4I95dvQc10CiDB0hrGbqOy5MJvuvRFDt260eKbsfNzVeowCMez2gOBabv6ZyA6eKC8Tjh9v6Kx9sHgsBCI9GPBiNdttuGplVIFzBg5TdX8sdDkBQghYf5BtJvFLEf8urllK7foLUhzWbUXUmb12TjhDAKcZQHxuGQN9zfPgL2eMCzmg/drGOxStgPWWs+INUky9MT7CTgz/71r2jte8qu5mS2YLa64MWL7/Lyxfe4unrD7z0cqRp/OorICxchLJ+8/A75oSCKfZTe4rsOgZvx7t0tnpsSRYKmMlw+vwSgqrZkI4e2gXdXj0SRS+AmeJ4izz2S2cB695bAnvL97/4WnSoo6i0GRVV2VNoydIqz5Qn7/QO3XxwQAmTgM5mGtOWWoYoIfY/J3ONpc8swWDb7HavlGcYa7u/WPH9xySHfIMRAX6Yk8ZTtvgDnwKAHLi5e0XclVsP5xZxOPBDIKfn2mIs07Am9U5Su0Z1gMZ+g+5713Ybz0wm3t1d0RjEoQRhOSMcCpWt2Tz1tXyD9Gt9JOT+f0dcFVZHz7OwTBtXy/vqKqjkwnsY4nuX2eoMeNK8/+RgjYzbdwPvia5zweFPoO5aYFJ0PJOGUotrw8cvf4nQa8bO/esvLz36LUexxfb0HR5JGCX1dUm56XNelbnvqZkd+yPn08jOeXYbsdweqpqYu4PLkOzz7aMrt1RbPAcdvieOEbnhAq4SXH72iaJ6IkhFJ4lPXhqY11G3OdLGkzjXPTk543N2QzMZ0XU5dtKzOZ8ShZpFd4sY9n/96y6vPzvjFl1/y4uX3+cGPXrJcjNk+3aGtYjGLgYJ0csbJbMUkzXj/cEMmY9zARwqJwCWbWm6vc04vI37x818itct44qOt4OZqx/nZKa9evMRxHYQ93gz/6Lf/yd/9QfW//m/+2Y+FsiyWI959XpIkksBPUEYzW4U4BLRti+N4OCLBIUF3AqssSejgBx7CxIzHMZOZj1EBbtCRZDGHrcXxXeLEUhX1Mf8qQrzg2KrsVcE0mzOZhFg6qnrDbjswmWXsdj3aHrBotvs7kihlu32kVz1BEFLWe+qmwVpF11nC2OCHhtPzEaqTDMrgOwlRDEXR/Ib/2NQFrudTVwprJa6NKfYDWrXsDztkoGn6grvbPUmQMomTY9vdKNJkipaSQdZopRHKwSoFNiavckaZT6cGJlHCdDKl3ivKpiFbDOghI02yY6lHNPS2RZuacpCk0QjHM8Sj9JgzimOMa3H0QEfN4nKCmzhE44jW7JjOE4a+ZTFPmc0WFIeOIAyxQnwYeitmJ1MGzzKbeqSxR9cGKG1x8PFihRem6MFlkswJwwnnZy9puxrj1rSdQugI3/eo2oHHh5JBWSQSM0CazckLyf3TDqxDoxTbQ4PnJUymS3pRYWRD4Kb0vcJYi7YarRVFnhNFCZt1wTAItNGk44S8qijKnCBysFi0AqUGQsfDlxFdPRD6LsIKjJKAxHE1w9CT7wa6wsW0hoenB1pzwJMJo8gljl280MXNBrzUMl0F1HXHr//6wPrWIYqSD7FLF3RKXwtcRzD0LXqweMIn9GB1EmNth9WWruuZLeYIV1PWFX4kMbIBOSCkpOsN/dATpRLPcQl8Fz9wiKOA/ySZAPAv6pLl6oQkjfECF+H1LEyKrxMEAcaCNubDChOU6pAfYgbfFKyEEDiewVhDh+bWDhxsjwxA22NbXGhJ7KeMRgFgkE5AW4eE4ZhsFFG3ivxwfPgKKXE9l2wUkWUJZycLXM/lKn84Fr78lMEaequo2prtfs9bueWt3PKz9Vv+ev2WQ1+RJAl//Md/xOWLkw8SywhBgBf44A4MpmJQA8MgsKYjyXyW55dIxyV2PZajjDQEKTqs6lguV+z2hv1hIEhcmqri5t2WIJywOFlhLWyettxcH8j3PlKAJ1qGtqbpJa6T4hiHvlZoIxi0x+7Q0JQKzIfmc7fDDSRhIIj8EFfGdG2FtQNDZ9js1mhhGU1O8OKQp8MOaSMcNyBvCjabnKqsMUJTVgNFrinyiv1+Q1EX7A8H9vucth0w2sUgyOst13fvUFogcUmiiDgZU9cd+7JE+j7DUFMX5RHn1R0oVc+myjkUe7quP7Kuq93xEGNj2tZyd3PHYXNP2xXcb+75i5/8jPXjBo2g1BYjXF6dv6CvK9b1Gpn4RH6MH7hoPZAfKvZ1w66pebzf0Nc9vuNiA0uUrgi8iKeHG07mMyYnCdPVileXLwjiAeNpmnaDtA35ocIxlrCK8LyI7aahqBscT/ytce0DdsxYgyOP6lTkh62ntSSRz9l5RlXn9KpnGBmS7yX0DwWqM/StgKEHjmSPu5stvbLwYfj9JqsqOGZfjbUfClzH0tbpfMHpeEo5qlmcLJgulyThDCcMiBKP6+trtO744X0NwPYPfoBmy+Xlgpvbd3Rdw8cffRffd2ibkrJuOT2b0uo7+k6wWI0xvcP93QNh6CGFw4sXq6PtQA4U5QacgSg6inSqJ4fz+TnbzSOWEeHIo9zVpNkE8BmlGXV5PDD3asN8mdDsSkbJDOn13D8VWFfhuAGe9fno5Ut++eXXfPT6JWkywnMjmrolTRN2a0UYCaQrWJzEYHwuTj8l9EK6oaIqDzgy5ONXv0VxqDEalC5x5YS2Ucwm53R9hRWCusqJgilGaAZKnp52GO1g6ZGOSxSmnCxeMBqPuLn/Ej1A3ynSeMRonrDZ7bm/vaetOy5OP8b14frtFtdNuTib8bO/eM/Q+UymPg9XX5J6ktGoJxARzVASzSPccMVoltH1JV+++QW+lzIOx4RhyC9+9UvidIzrC1wREnoXOJ7lxYtT/Ah2m4rRZMHT/obNfsN4kpLGS7q2JC9qwjDGjxTWGqrCxw00+aHi9GzFZr0mcCek0QmLxYIoUtzebrg4P+flizO2hy2HbcX56hxVlfR9R90Kfvjd79PvS6reIKTmW999xrvrn3DYaL7z2ScYO3D3sMUPPV5ezOjbAUdECBUxXYR8/vYL/vAPf4/d7paH+wPf/91nvH9TIgh4+UnK/f09ZpA4iWa9zlmsxggc4tTj6SnnZz/7HD8U/IMf/Yd/9wfV//5/+K9+nC0EVd3g+i7LecYh35BmAVW9p2oV1jhoA64TYHVPW/f4nsR3XPphoG16ppMFQm7RumN1mpLvDH7gE8YOZ/NTfH9M3uQkI4lFogeFLwKMHpjPZpRlTToOyNJzrNuDVPSqQEqL58aUdYUrE7R2qNuGvnMoy5Ysm5BkPq4XkmQBh8OW5XKOI3yC0B5PE73PoCtcx0ENkr6rUUqjlUtxaIgiF9Vbqq6kbHu22wHPdUiCANNaPDel7TRlc8CIhq5uafOONHLp+g4zSKTnksYOQrpETkwYOUyyBEYKx/Wwfs+zixPGoxhFRRak9ENFGie0amDoCpzBwwtS2qohFJYo9chrRacHelsgPElV15ye+ZydJrx49pwm7xHGEIUefd8hXc1ymRHFPtpp2e82ZOmEIBQMrBFyh8EjzTKmkxFS+XhJxfrpjiRMyU4dvHBgkSwYZSMORcP85BI3SKmrJ6TomWQzHFnz8nTJJInIfI+zUcssLNCq5uFQYEkZig1OmDAYjbYDrivphpa6qomS4980CH2yWcL9ekOcJUj3ONoYLXBdj8CJcF1LEBxzb2XVI12D4xmKqiUKUqLMJZvF9KpglER4rgsotLVoI45GRtEySgWe6Lj7yqDaEencJxq7KAtN6+L6Ka3eU7cHhsHBWIljXZqqwXTw4uwVy/ESVyeU+xLPb0hHEUiJ43toazDC4jiSKA5BKDwR4rk+oFBDy386WgHwv5Z7HPfDRrlo8IRgxRJfj49oKmux5sjbM0ajVY/8zUbIfGjgS+LYJ4xC/nX9N1w1D2h7BPY3VcfQDzS5YvuUU+fHE/Xm8YDvB1g0dVMyWEU6SnE9ge85BIFLkoTEccJkMkVrS15U3LV7Qs/nq/qev8y/5GdPX/DTp6+o7MBqeULghx9axZqhH/ADn8l0RByHKGVQRtL1mrpteNo8sdsfaPuBLB1xtjpDtT1nqxl+IOhVz+WzMxbTKXE0JhsnfPT6hN/9vY/xhOH2zSN+GLM7FJQFBE7A4+2Gula8en3J6eIE3whU1/L12zX3DzltV4EA3wsoS4MaHCSKIi9pOpf5/BWrk5dI49GXPcI4WCuo24Fm0JTlhn5QBI7LevM1eVMReSMOVU7e5OwPB3Zlwbo6sCkreiHQroO20PWSquw5FGu2+y2DOb68jRVIR6PNQN9a6qZnsA2tbtnka8q+pOsUjnQouxLjW6JJRtVAXlYUVYuRHoem5tdvP+d+m4MbEWQ+eVewyVvmq485e/EJMpvhSI9QxtRDT43BDQIcVzJaLnj2asIogZOTc8aLBcuV5Fsfn/DZd3/E93/4nl603QAAIABJREFUx5ydv+Juc8+br94wHk+Jp1Nu1k+8294yNJrIt9xsbvjzn/+CZDTndHHO2E/YvLkhVmO6vudxXdB0Guk48Bv02jdfxf8vo3ocXCVwupzz7NnsuOWfCoaXHu2wp3UeEXsJpieKMgQ+0pXc3T3StBbpOB8Qah8A/x+oFMYYrAHpSKQQrE5mKFlTZw1ZOsZiOZmcs1yds1nfkiYOpxcZr3/xBMCfLwO8oKMfIMtG5NUDwmS8fvlduqHB8wN6VZBGS8bjGVGQ0TQF89kJceIThws2uz2TyRxtCx4e39EUPlGYIZTP2XLF0PV0aket9kjpEYc+SJe/+PP/l6YvqauSj16fouzAdDTlxbMVug8ocxfHN1w8O8PzPbpGMp7EKD2wXLzk8XGL62mscambHKs9knBKOvPYb9dE7jkfv/qUd+++oCwODKrg9PScqmzZbbacnk/pap9XH52y2e4oyzXDMLA/bHj27BVRNMVxBI4rOTk9J4gdHLfF9yf4gUXYMVVds1rN2BfvyNIJQvb0g8CqgMhPGaVTFsuI+9stWgfMVgGfv3nH+ckn+KOWprmhf3jiYmX4/d9/RXGjGIqau/U9MlTc3/2Km7sN+6qnHxJ++INPuL//krywrE6fMV0s+OLqb7h7fEc6SXGDAHSIlAGXz0+5vb/F80Jm03Nm2SUXz6bcPtwxHZ+jxYbdQVHVHZfPnrE6Sbi5fmI5HzN0DVdfP7A6GbHZNESJout2NGVH1+3xs1OmwQwZlPz8F7d866NP+OLqlovpBUYfaLuWr989sTibU9ctYSy5uX+PHzqMgxWzkc/d+o4kjkEI1vkdt+/esZhdcMiPB2DfDyjKHi9QxLFLEiUE7oR3D1cM3ZTpLKOotyB6Hh5v6O0ebST/6O//R3/3B9X/80//tx8bV2GlJEo7hk7y/GWA7xuGMmWyyuhaj7axhP4Ia1uiMEHpDuk6eGTMpimq33PY9jx/7XF/11BVFXEcEkaC3W2JF0ucOMLzS1wnousNoRPi+R1FYYjHPcKBtvN5f7VnvrSYPsGagCCI6HuXXh2vO7e7PVHi4vmSfK/p7BacgZOzFcNgmY6XSBGx267Rpv2wPe0ZZ6tjoWfkUZZHLWw28elMzaG0WCEJogRXpISex347ENmBOM14e/UAgO9CGsyoO81H370gCyYkMfihQ5b6eKFLm3csVilCtuw2LbK3CL9j6AQPjw9Ivyf0R9TWEAQuTTdQmgPbomAURrjSwY1Smt4yGEPb1GAcYn/OJPNZzASzyZzdrqRqap49OyWINONZQtN2xEGA6ms++vic3W6P5wt8X7Ft73lxseJpq5hMYg6HB4Ru6QyMIh/HqdmrnCDweX66oqorUi+h7grm5x6jScLT/QGQ1LsnTHVDu30HXcUk6phFmizSGB2x3nf0wscaiOKYfbEnjkKMUTjO8Vo7in2iJKOotiCOSJa6yYn8FE/6R5e9NEhPHEUMztHgpFBI18VzA9wkIUwjPJdjpEIpepVQVw2qVozDGWFqiaYuwkm5e+9wsjohSDVKtgjHAekiPeiHGq0VfW84tqQ8HGfCbHGK0Q77dcE4G+P5DjgSK4+ShnZQ9MoQxgmee9yadHWD73ksZhOU7sFIRsmUfxokAPzfIUinYzKJmc5GfI9XBFWCMRGOdLGYI0tSHjfLWP2hGALWCrQ+xgCiyEdIxfv+nsJUSBx2jzVmqHGsod47hL6LKyVNqck3A/tNcTwgTmZcfvSKk7MzIteDdiCQLqMsww0ilBBstyW7bUE/GO67glZY+qbj5OSUP/qjP2KUjXjz5i3b3QbpwHQ2YbFc0HYD200O9piTSlIfP+RYZpE+oT9lGFw0HqPphIvLE05Pl1jhs94OPNzlFNuO2fz5B/OQodzWPLy9QyjFxbPnCFcSTxK82DIw0IuUz79+pDMt04XH1+8e2O5bxtMAJQbUoLEd7B5qioM6IsSsQRAySmfc3j2wedgxTif4boAZKqyWROEUxxUEUUo2HfPm6Q13RUmYrnjcPrDdPuI4AX7g0fYlebFDqQbTDajOHjmSxgUpUbalV0dDnTagrcERIZ47QjNQ1DVNZ+iNYdCWdujZ5g9IV+D7Y5raHBWPqmNX5jxtDxzamlwpirah6XrcaIqfLrGOj595BNMxRhv6oWbfVLSmR1iw0qFH8e7qPXmz56svf8rPf/VT8ragL9Ycbq9Z3z7xeHPNX/yb/4df/fIXvLp8yWx+SjSOmY6O8obZfIF1IrLplMnEww4h89kFXd2xfb8nHGK6vmK9LeiVi5TukZ8qP6hOER8+b+IDAxWMAVc6vHh+QhQbWq/C/YGk6B7Y5fe0poVMwj5kGAxSOiijubvfUDct0vnmDfeNgvgD8koch1U+2NWyNGC08ng/vGe3u+PkYkHoW8q2IowkZbXh6bHmd++PwP9/KXbEaYh0Bg67jmyU4fgW15nTdhW73YEXF79N0/SsZufcvLsjTReo3udp+8TN7RVFfcvt/RV+cCTbfPryu4Rpw939A2k2YzwZUTY7gtBlu96TJA5FW3F5/oyy3bKYT+j7R4pGMR+9pugUh+KBi9MFrh0o84r8YAimEhFqXp19SprMmc2mlPUeo2PC2KNTdzgyBCsQeBjlk+83jCcht3dvcH2N7414eLxCG4iCGWfnS7748heU1VE3vTiJWO/WODJgu9uC2xGFE0J/ymZzRxQHzOYjqirnkN/jBz15XuBIH917uL4gTmO0SpHewMP6hqvrKx7ud/zoR7+PtT0P92tiT3Foduw2e377W3OeHivuH8/46Nsv2dQHktWYpte8eftzpMh4+fFziuI9ozCmaAri6cD9+g6kSxCkfPqtVzTdAw93WwKRMFukrNePDKpnvbllnGXcvL/H0mKwdLpmvd0imHDyHP783/yEj19/j9lsSjbyuLkaWJ2Oae0Vv/jrd6xOVjytb5iNL5nPFggqDtsbktUMHHh8vyWdzlm+Cvj6i/f0joMKDX/vH/5Dfv3rz/HwsG1Hs82pipah7SnUjt2hZDIb0w4909Dj5fkrtsWaw6HCd6doHtmXX9OULm0NXqwp6oLEPeHTb5/xxa+/Zj4/4dnLE3abnl61/ON/8B//3R9U/+Sf/48/tlriu6CGHS4BfTNQbhyWK0m+G9hvD8SxwHMHrN3hSsknH7/CWoeha+lbzelJhEPIdB7xcN0grOTsfEY8GfCExPc1N3cVo1HCYj7CuAojD6SpBUfiuhnd4LN+zPnsOyvub++5OJ9TDZZ2qOlUxz7fYwU40kG6Bj+UzBZjlIXTy1OkDHl4vGE6GqNMyWazR+ASRJr9pqdvDYfymjBIaBtNNglxZExnepJ5QNN1dJWh3FTHqz8Fh2pgkx/QAkbRjL4yBJ7i08+W+L7DNM64vIyQfo8xRw3nyXJMZxqy+Iy7u5rvf3ZO24ZHUPZ8TBx4PD2syXVOX+dIccR6SRsSBQFt1yJdl15ppCPougbfl7hWoFuFtT6bfUUvGkarkN1uwEiBH0coZUgTjyj0GdoIKT2mU5/tfosTSFwdoYUm8hNcxvgedK3DanyJ6znc79Zcnn7CkFvSUcTEX7AvH1lv7rHKI44nVE1N09d0qmddhchsSWMCHO+CuhGkSUQYjmg7yWiU4vouSmmGTtF3A57rEcUh0hHs9yXtUGOtS98NBL5kPllhrXPEgruCrj3mgJXqcT155IEql1CmdLbmcKgotyUoH6U6mi7nZLnkP/gn/x7j1HDYaiLfoytcshACmdA1A9LtieIQ1TU0hWKSTfEdgxgcAmfCODvSLZpeYxzBulpT6Q7tCxp6BmEZ+v4Ya0DTDy1JmBA4EdPRGFdohNVI4eB7EQKPfxpEAPwfdqBpe4bWpa57Zv0IX6Vgg+O2R4I1CikEWg248oiH+2bTaq1FCIvjWVo78H545NA1GGWR1hA6Ps3+OCgp1VMcSqySzKcxl2djXjw/o6s0m9stbz+/OmKbugEvjUjHGWmcEHs+nifxQ0mahAT+MedntMb3fJaLFZ5vCCLFaOqRjQJGk5jRJCYIJdKRNHXPzfUD1zd31E2PKyNG2YTFaspsGTGZTMjLmvUh5+3NHQ/r/GjUyQJaWh42DwwCmn7g6fYWxzOMlhmBH3F6fkrXlRzuH5iNIj755JTpRGL6nMBKJllKEEiGvscRIXbwyLc5vh+yPDmhbmue1ltOFqcsZzMcVxCnIVGSEEf/H3Vv8mtbmp55/b5v9e3u9z7N7e+NiIyMiGyc6bTTZblcUDQDJoVAMOEfYZQSAyTEAARCKgmZgRlQVRIwoIQQElUMsCy77HQ6MyMymtude7rd77367lsfg30zXaOawKC8daQz2NJZax+dtc67nvd5nt+CKlN0ZUPbtuyPBw77LT///Ave3u0I3QgfSd9WVKWiq23y4kjd5jiuhWGeCufLqiArdtR1g2X4SAOKosS2A4LYoyxzVGcym5/RdDW7/RbLsVFa4zgBQTSi7loalSONlvTQkiQFTVsjDR/LNmiqhunsjCj2aVXF3e0NTdVg2w6H7YEya3GC6IS0DmLaXpFXDVevrvnlX/8VWkjm40tsz0WYcDisWd5fc3P7ilW+oogt9mXKu9cpn33/exh+R9V0UJp40qVo9twdtizXa1QJWX5AaEld7aBTRP0IyzRYbUuKSmCYpwexvu/fB6p+/RJIIU/9p2gsQ/Ds6YThUGDEmsxOubm9o6lzVNuSdy2N2zCoAlzbwbId8rRhszlg2iZacZp/tQZ9uqb4NZXqfePAaBwzuHCoZkfml1O84IzDLieaTNhn3yDNlseXP+TJFy8B+Ol5iOpM6vaI6wywnNOmQPUwHofc3t0yGV4wHQ/IjlvqvCOIR1TtgUePzzFtRaOOJGnLIHrBRx9/m7arub9fM5wYvLn6FaE/QgvNcpkwHM057LYYtsVktKAoeuazh5TVDdIITxuLoiVJEnZpyq5d09oFWVFiYXD98hW7VYrvWby7fcnlgyckxYbpbIxtm6RHzXy+YBCNyYsNWbbHdT2kVVEUJX07ZDAcIK0WQ8ZUTULbvh/6peDu/pa2VTiezeXDMULW7DY52VHz4Ycf4ToxZdme7IRqT5E1RL6P70YIK6WsG3Rr8ebtV1w+Oud2+ZqLi0u+892PubvZYjsGaX5gvbmhbBR/53f/ENF1vLy6RYYXFCpjnd4RhZfslvdIMj54+AEfPF7w9OwRkWvQKoN4GNKpmvXmFXE0gTog8IfQ16jCJRz1rJcF08mMF08+oSpSHl48oih37I4ZXqi5ulkzGAXU6oDWNcf9CcTyzdcJ3//+xxyrV6SJxWQR0nU9nhOTHTtco8NRPqOLCevrJYetxapN+eH3X3C1XHH18gp7kJPkp+rAqtyzXyfEwQVZtePhw3MG446b5YbvfvtHSCtjfZ+x8J8zmrq0qqWuFXm1IYoXNFWL0BZNbRGGQxazCePhCNWWdG3J8r5lsz5wTA88evqQH3327/7rP6j+1//Vf/4TU5cMvAnUEYPIQQowcFneHRGdweOH5+i+Q2iI3CGuEeK6Dj0NhlHRNS2W0+AHDqK3iGKTQfSI2blgfBljKklX1YwmIeOJi9AFTaOQjqY62Gz2G7AbMGpsyyWKK4qDhxAFVhRQty2j8amSCW3StQ1trYmiiEOyQimIozlJmuB5kiJLT8xmnaJaiRdYbJY9mpSzi5j9rnt/7gJ6g5vVPZYbUNcNkePhWRZlVYFhEQQxcRTgWYL5JGYynzOYO8RTi7vbmm9/7KE7k4tHDxkPPRzfw7FPFKUnTz7h/PKcILT4+uVb/GFPr2vqVJAcU8J4jG/5HHYVkePiGTaGK1C6pykLDOkiHYnnBVRljkBjSBclNEl5wIpCZO9w+/aWBxdzbu/XfPD4E/arlKdPPkJVLcfkcKIUWR539yvCYEhVl1i2x3gwwsBgEJ/x+NEzurYH4fHdT3+Ln/7Fv0AIAyeMsH3N4ZDRVD1VkZFmWzrdIu0xtu8j3JRjZlL0OdfrhK/ebFDCou4kWbY7UZoahW27J9oUNnlen4J5hkEQBtimg2GYuJZFXTSY0jw9BDUNoTMi3ReEfsBoMOC4zQmsCM/0EbLH8U4eOlNIhDb4u3/wY378ey+IJm+5P15jujaT6ZDRWBFHPk1TsTsm1K1NGPeY0qUqQKsK24aybCjKBCF7XLenKGuavkF6mjSrKLKOJi/oihLftymKguM+wXc9bMOgq2tmoyGWKWmb/lRDo6EqM/6j9x7V//5+RVNKlLJRVMz6wcmfqq33NB3jN7zztquxDfm+5F/xPktFrztsx+Dn/T1v8lu6TuLYHr5j0Raa/aaibwscWzAajpiOAs7OJkzHMa5jofqKYBgynU+4XEx5/uQR88kYP3CJRwMMx0IYmvEoZnE248GDM2bzIZNJjBAdN7dvQHQMhtH7JH2I7i2U0pjmyZcmhcayIIyCk9JcN7RNR1211I2maXqKomW9ylguU+7XKbebI6+ur3l7s2G767lfZtzuNmhTYnoxadOhpUHdNijV4No2wog4Fh3SFIxHCw7bls0uZzgeMB64eKbEkgLLsnjw+AmT+ZS2rTCEiSNC7m637NIlTqw55DuWq4RvPl9zPCoM22W13FAeWvb3NRfTD/nxpz/gcP2Gr375Jemxo6o6qvp0X9K6pywLlHLx3BG2aaP6mrpNabqMOIo4XzyjKTV11WOaNm23o9cNSvX0SmIKl7YrUKpGaBNJSKcMkILh8JxhNMMxfQaDmMEgwLcdsiJFAYasqPI1VV2Tly2b5S3b1Z42r6nyjirvCOyIb33wMY+fPSPLK27urlFKIvuIrnMYjM+xwilmOGHkzpiaY6a+4Hwxpetskv0R1VREsyHSMKEXeKZEGw3bfAVK8PB8RuQ4dOuTB/d+XVI2PaYpTrVmWrwnRnGim72vPtW6R/cK29JMZiaVk3E8r8jrDVmdkNc5ab4mbwpcaREeDER/+jlto7m92WHazq87Bf4lRfXUlNH3p4oqhGBxNsKfNtzxGkmEIQPS4xXH4pqmvaVvfSQO33p12qj9X65mMjnDtk+WnLIs8YOIMBwQBWOk1Czml7iO5Ljd8+bNr3BCmygYMZ895uXLN6xWK6azMx4/esL+sKWoT6AQSzjMRhfE0ZhaNaw2W4SlOOzXCENjuZo06WhKi7pNGMQLWnXkbnmLG4anxgFTIgxFEFq8/OaKeGgy8GKEYXK7vCdJIR56bLc3dF1PFMYcD3scOyTN7zgm9xRFw/F4RHUGaVIjZAkYGKbDdOFSFjsOuwotFHnecLl4wuJsxqtXrzlbnFGkNb47QOuOILTJyyVoyWz0lKZsefR4zHFfojqHMJzQNhVt1xNGLrariMMJf/XTz5lMInwvYp9tmc9jNDZhFLG+T9AuPPz2iK+u/oIgmtLXDk27QZoefhxwdf+nfP11TiMN9CHj/nXO+fhDDPNUeYYBPTZa9nhuyPQs4IuvvqZrQn7rt77Du7dLpJHx7voV2nDxQo+ySkB0NG3NbDrl9auXCOFiujb7ZMvDJ1OurlYUeYXnuTx+fI7qUqTOsFTHIJixW+Uow+fZZ1PevnyLIz3ioU1bpwysc3arX3B7fYXjDjl//Ah7qNkuj/TYOISczWKqssd3TJ49e3gKktcKLRStvObdmz2PHjyj1y1FmfH00W8zGhX8+Z9+jmH1GIZJWRik5WvC2CYeufzwk78FZKr/4r/8T39iGwFNpdB9jW1EWEZEUyVMR48ZTUcciy1atPihh2dFuI7N/phTVS1JluEHNr//d3+AqWdc39zy4lsjXC/CcQ164WBri8vFBdgJZlgwDs8oNx3pQbGY+cTBQ/LmGj8I2S1rwiBCypph8IzlIeewTzB0iO46qrLAtVzKKmM4nKJqkzRbkRxTsvedjE8fveDqzQ7bloxGY6SwKcuGs/MIw7ROa/cyoet6XMfB83wsw2QQRUwGcxwjolE5jq8IwiH73Z7buy3zs5iPP7uk1xn79R2GsghjgZA2ih7HDKkqRdMcubh8hNQxoSv5xVff4PpQVjUCh6qG8fkzyqzD8aDKCiw3pNIS3UmKWtEHHa5nUJcS2+2xLJusKBhNYmzHI88rbNsmXRfElo/EIh6ckScplxeXFHnH7c0VSrVY2NS6IckOHI8JKMXhWGBKjWN6HA57OnVah01HEau7W8qqxg0M9sUGy4Gm2RC4IVV+oui4roPtelxMIw7ZATuOUH2F7wQgJZutwjJt4shHSInqFI7jIIUky3Isy8W0bExT0neaIkupsgLbcHAdC9OwaFWFZZuM4xm61biOhRZgmqe6IHRJK8CzY8zOJI56Pvnhh6zyLf/i53/CdnnP6y8FhqGpCoUhHcqyJYp82rJm/fZIWUqmCx/T6OiVwLE9BsOY0cRDmh15Up2QrCrDMQSedAgtB09KPBljOxIwcO2QyXCMLQWmNHBcD8NwyMsKKSWGtOiV4D/0Tp2kf7zf4TgCLUxMp/sNOvW0klSnOh1To/uerq0wDfEb3r1G0/c9ve7QpmLDnmNZYmibZJtSHloso2U2G/H88YLz+YA49hnPQnxnzGA44dHjZywuFywuZoyGQzzPJggDvMBHWgZd11NkOUVWkyUlWZZzOOw57k8hHtu28X0f0zx91uFwwnQ2J4rD9zVAgq7r3q9ZxWmAsgw838a2TTp1wg6nRU1WVkhMLMvEtA00iq6tMbRJ2yl6oyGrKq6vU969W/H27p7VNuWwy3hzfcfNqmCT1Vxv1mz3Nfv0pHZ7Axc3HDCM55ydj3j69JzHT54yCkOmA5dPPnnC8ydzAg/MoKEw79mVNyTFjmOxJ82PSGnjuD5lWbC835IXDR99+gl/8Pd/RFUfefPqHtX01G2O58bEZgxlC1VPk0GaNBRlTduAaUQY0sU0PKqyxnVNZtMZpmVRlTl13uEYAwy6E/q2h7qqEGaPUpIwnOJ6LkUm6Bqbi0uXwTCmbkGpjLJbk9YJvhOTHwrSrMP2LZJsR55uyPIlrU6o2oymrDBMmySv2d5v0bJGWD0SRRQP8YOIeDSk6U0s6XHx4AG9JRHOhFJLhN0zHQ3JdM7V/RtkB57hEHgWvhVye/dzWiXwzDPaZUNXK+7WOa2S/1IdlfE3PlVOA/6vsapaawLfZDDqKUjIzT3DaMrN7TeYRkPkhRRZii8DomZA25wsFavlnt2+QhgSDacg1XtEq6B/H6jqT+9pgeOYTC+HeI9C2rqk6zMenI3YHjZoZRO6F2x27/jt1ame6q8etAhZcXt9eB9AhNnMYTo6x2BAVTYEfkBXmQxiB0v6NKpFYmOaJpbp84Pv/SH/5h/8Ax49eMBXX77k7nbJcNRzPG6RRISDGWW1IwzGfPbt3zndv0pIsorxeEjohzRdzWAgWK03uM4E2/FBGHjmJyRZgmrBs3xif0TXOKw2O4Ql8XyHqq4JApe+bYmjgKZUqBb8UNKqgkE8ZTiYU9clnqdxPZvpPOL69jVJuuLrr94SBgar1T1heAr5xNGAroW27rCkw4tnTwlDg7u7OyzTR5oFVaEZxQGru1t031EVBnH4hE5mRJFJdii4mD7AtQX3NzXf+vjbKDp8x0b3OZZhcXe7pulCopnHF6//ghYHsx+hVYdpKhqVY5kTfD/k5t0t290Nv/jrP+f65p7Af4xhCywvIhxMuVr+iqJd4bkxh3xPFD2k7XKOh4rxzODd9QbLd7DdgCxPKMoM24vouiMPLp8ThC5tl6PNjk7t2W0KpITZfEFZJQjVY7slw+FjBl7I1a++QA9mDB+Muf7iC/KsYjqf8OXrK5oy4/nZM54/GbHf3OLYPpbwiTxJWya4Qci3n31EVVYcc3BMC2F2OL7Pfr3nfnXAdATnZ+cYuMxGD5EmNI3m5vYVDx9d0jQdk1mAMtZ4voXvjfjir1b8e//O34LV/z/5X/+Xn3TKQGsTOCkctmVQJJrj8ZZMVawPa1Rv4nlD0qRnvV3y+t0thu0T+hOqqufZk2+TZAWvXr5D2hW7XcVicc7t2zt0X5JlFQ020vVwmymLyOLRxWMkAcNJyGxxxqeffczmXvPlF9/geh37Q8I2zSjzDqVa6HtMJF1fIk2LvpEUeXbyd3VQpprANSnLhPlsSFs6CEPTNC1haNN14sQelgqhPaTQTMchVSHYbZf0XUfXKO5XNxwOFdOZw9g/pTNnZx8QRjFtvyFPdwhMbN/EsXzKzOKQHIlCB2G1HPcFghGmrXn55gtKGzbbjOEgwLM9yrrieFQ8uTBxteTx80fYjsvt1R1BFFJlDZ7Tk3cdrmgpkopBdEZVHzEsRZH2lLmir1wMo8HsHOquIW0qVustVVcxmA5olUKSEHiStncIvSHrVYugoGoETZFh2w7DwYymKXB9g+u3b1kurzm7GAECXZW0SY4lLXwvwrF88myNF9Zo2ZMcSqKRQ1dvaA4GujfpvQ4tLYzeZBhHpGn2G0593eRo3dMpRY+irU/YUik1gRdiYJzwughU37DfZcSRDaJlvUlIsxqsGmSOYQmULhHCQhkZSdbwq2/uWB1WBANBXmpqoFUlvutgSoFWCqU0bWPSKwOhfDzbxndt4jBgGA3xXR/6hsD3EVKjO4GlTVwpcQ0D17KRlsHwfEhdKVSj8V0P6JHCJPSGFEVDlhf43oi6PA1sUpj8B64PwP8mejQ1ju+impxJM8NjjEDSqRPS1DBMVNdC32Gc5KHfrP37XtOh+Hn1Fa/W7yjTlDLNMbXEMmA0CIiCAKO3EVgEA4coijFMnyCMMGwT2/M47Ev224IkrbhbbdkeU6q6pms6bNPE9xx838b3bOIwJI5jHMcBYWAaNmhoGkWa5vR9Q9uVqE6dUs1a03U9ddPS9epUOt7Up7Q6UDUVtugZBDaGDciOrmkRysAybQzbRhpQNSVdI3CRaFXSAbZpYdk2LSfvupIGVSMwZUA0iLFsh77XlE1LLy2TOaU0AAAgAElEQVSiUUAvNH3Tk9yvKPdbwjBiOBjjui5Z17LJGopCUuU2m2XDfl/jWA6Ba2NYJuOLKdFozOJsTtft+PP/+0853DUszsZ4gcXxkFHnBWBQtwbadNmnJVevdhR5T9+D1ibDYYzQHW1xUpurJmEQBjx5/IzJeIppdHieQEiFaQYgNL2uqOsEIQxcX+G6Bl2nSfM1hpCEsY3Witg7xzPGLO+3HA8Huq6jqkuSvEYYPk4c40QuTbOn7G7IyiWOPG2dhosLzucP8ISLtHqKYkddFEizJEk2FMUBWZXsju/w45DYGbA+3tHXFXbQg6exDRNZHSizEsc7w7eGRE0EWnNzf6RpT7VQ4r1HVAuBlCZSiPd44L+xt8SRg7tQrKZbympNXVdIwyRNcso8JysTXt6+prB7xt0MbXR0SrHd1EjTRPc9ghOrWwp5Qh6/r1eT4tR3Op4MuHgx421zg+lqlGjIs4pOG1hOTNMVDIchn77NAHj5rRFldcDzfRxH8+GLj1gtXyO09d56UWAYLoOxYHOnuTj7hE8++5RHj57y8cff5zuf/h4X549RjUeW73j16mvub1/x5PEz/NDg61evMKwQpVpE5/OtDx5yvfoFsh8znZ9RZR1PHj3lcCgZhkOms2fYnuawP+K5PqYh8DyFaiQ//p3fpao2rLdLwlGE600II4cwGJAec4bhhGEwp9c2Qiq8wGR/SJB42JaFZTdIsyZPa7Jyh+lAnqecTz/A81xUp3j8+PxU5ZiUeJ7Dg8szBuEYU0qquuTdVUpbK7L8QJ5tqcuEu3c5WpksHvrc3q9RysfzepqjxMKiq+DZ86cYrmK1W/P67SsC74xhPKduNvRCYVogtU2Z1BRlT9Wk1H1O2SXAgKRM8byIF89+yJNPnxHMwB1qmh6KUpIXa5JkS5FkDCYxGD2H8hrLgSS5Zbdf8+LDD6m6hCRN0b3Lb/3gx0RDg5urWwzpMBwE3C2vSbKCVpUc9xmua1HXGaJ3EJg8WDwmTcDwQuKzGCVTlndLvEGNMBuqPGc0lYjKZBqNOKx7PD/ADmrWyxVD+yGLuQ+WTXFcUjQtZd9hYLJc3XNY9/zej3+b5eaWyWyGwEJ1Hr0+UtRHFhcxzz98ym5Xkec54KI6getENFVNV1v82//GP/jXYlA1/1Vv+o7N3bslpmWiZYvtnEqhpWEyWFzgWCZFpTke706K53FP1xYMxyP2SYpBTppY/A9//E+IpiZOqPiTP2vxPZfVIaVvBaaoef7sEZSwiM65W6340e+94ObmNWNXMj8f8pc/e8V1nyLtJUEwQigIgw6798lMzU2bYNng5g7StGn6HD8PsYyIXlXYXoXh2OyymqqrUZWHZ3g4hsXN7h0Pzs7Iy4ZMF1RVRRxENK3NeH7Ou5tfsHiwwHIrun0LOmM0OsO3DMZzCc5Dtm3B0HdJckladND4TC9cnNBiNJtze1uz3tV0XcHh2DKZmeR9ynp/pGhKfG/B7foWbTboTjKPbZKqYBgIvMmO+2XO73zn2+ydLWaQMfEe8na1xZUun33sIvsMtzvnanVDXbU4tsLyS8aRz36dMx+N+frNHb2hT3Sv+jWL+YRo9CnH4zeEoSApDJ49XrDarCiLhNAYsrzLceyCm7vXzOdjetkym01o64rt+oaz8zPe7rc0LZjSxo9MlCVYHwrGs4ChE1JXOb49wJ3alGVNcbCxlMB0OjDBs8a4uqKoGto6QZg+gRtQZjuEZeB7A6TWSO2wPyxBVoTehNCXiNCjLUo82+OjD3wO6QY39Gm6Bml7qLSibZcMhj6PP1jQYWEaEfPxHFdoLEMShR62adLrnrKqeXf7DuFkdL1PninCcILlQtWlmL7BIBqSHDVKZSymM9pI4dgxbdthSIuq6CmqHdvlDUVVniwaXYspBIekRccCw2gJbAeThvF8Rq87qqL7mwtPm2jpYTkh3+k+xOs94FS9JUR/6nzsNV1TY/xadZIgEUgtMAz4s9VXbMQ9Wjf4rosd+eRpCUKyP7Ys1/dowHEd5mpOd7/BsROiKEOpjl73J0W717Rtj+oUo9GQhw/PWZyNmM0GTEYjbPtUM9PrjrbpKfKGzeZIlpcsVzs26z15UXJ9s0IjT8r5ez+tYxv4gYPue+qmoWya00CCOClqjosqaizLxHEcVFdSVyW2tOjb00ArkFimxLRNfHfOaBTjOBZaaIaWoG1q6rrjkMBul7B9tcYyTc7P50SBSav2JInNKBxgeTbzJ+coLaiES9+4dMJiEFZ8tOjIE4kpxoy/69LrjK5TNLWmaTq8OKSqaupNyra6oknfIN0xwowZemB125Pik2uEF9EqAcrB92IspyMe2IzGY5JdR5tX2FrjdBauYyM72NZLrChgOphjdDWFkYHlsk9qVvUWQ9agJYd9iYVCKcVgOsBwAoqyw5QzPNtAdjYfPv+ALE2pK4Fp9xhmS1FWqDKjD6AWNa4dMBue4bsjVJsiuz2qFPSGQ0+PdG18Q3A8rAlcF982OVYrcATL5c/ZCgM7cDHcAFcMKVa3COOAZTmMxw8xHQ+tGvrGQ9j2CY8q9Xuf6MmLKuG9lUUDPboXaBRSaAZxgONAWS+RjUWX3VMZmq6GsmzJtc1k+DFFcqR3FVK5tM1pgyGBXoDQ74+HPgW3JCcfrD4puFXZ0VY5u+0XmGLMp9/7Frfv1tjdiKYt8MOQkS9/c9laZkCW3TAcehiGwd3tDarz2Ocb7n+54YMPPsUdr9nlKx5/8iMmi3OaxsGwOg5dydurz+lbhzS5PwFuOrBjzWq156MXn1E/69kfbrDMAcNgxNXtPbdvG84vS6qswRIe2/WG0IlwnDOms5Dr65pW3fDm3S1PHn+K6xq0teDd7Q1e8JxwAAIH3xVIMq5fbrAcmy5q+PmX3zCeBidaV61w7QG+N8JyWt7dpic1uh+xGIYc9j3Pn1p88eVPGfrnvHjybep6i9IJ4XCGoOb66pa6g6ePH7O6O55If55gNPwuh0NKq3q+/OYv+PQHP8Cwa7p6zWwyoipWWLGB9sb0RkFaVXTlnrffvCYKXV5/85oPXnyfi4tHrHfXdJ2PMD2S7J5Pn3/CZA5/+dPXSEOxy1/h+DGyFpSDltBwOe56EmvNbHSO71e8efs1hhGg7Jp//s/+H/7O7/+QKjtyf7zifP6M1W3Oq1dfIKgZuEMQDsdNxu54z4fPv8d2mbBTLePxJT/76c8AzdnFkKopWK9W6F5wtnhBWtTM5mdIfcXdm5SuNWl1ynR+SVV+g8bh7u4KVYX89OvPGcweMA581utr6tLgze4bXoQLdusNaZbT9pJHZx9SqzV1UfP8g4iffvHXFLXNQgzoREbHisloyE9f/TVJWhBFQy5mE777rU/5+RefU7cm5w/OSDcZRlH8/zps/n95/SsV1f/2j/6bnxhui+F2BFGANgzqtqLtC5LiSN/nJ/VLu9iOR1mkdK2FY4/J8h3QIy2JGcDhuKWoNMKUGKZmd9gzDieYls3Lb97Qqx5hdigtWW1W3N4meFFLmje8u1qy31coCh48HUF3TlFI8jzjgw8e0BagCgOqDkdYNHXJ8+ff4vp6Szg08COgN5CGhTQamrpFYNAL8X6d6pFX6uRb7Y5UZcb5YsR2vUGVgsA1SZICM5ogAw9hDEiTJeu6oDMVV/c75mOHwWzKerNlNnuONi0+/3LNen+gVS374wbPGrFbdgwHgpdfXjOYWLimQ4vibnfENmviMCI5auaDMbgt19cb5ouHdAKmQ5NKKlTb8em45g9/3+G7P+j48DPwzIJnI81/8u9P+Xt/eE4vS9z+HDRklUKLliqt8f2MfN1wNp6Si5Tb7Tc0raAscuJBQFm1VG3LYPwITUpRNjRdTqcrLCumKhqULui6FkM6JAeB5/rE8ZBDsqJpa5KkRACRF2KYkKctRZHhezFaajzf4emLKW9fr0F3lEXOaBJQ1zZKV0yGU4pEYhgGw9g/DSmqRasOKWwQiqY9sei1lgyGHgiF6jtsV1I1LWWZMB5HeOYQ0TgMQ4ehH0EZ0ux76iTnzddLrt+u2e+OfPHlL/jlL79iuUzQGLT6lFKPhwFd13A8ZORpyXgQoVuFa8QYQiJ0j+paRsOYsjpyTO5RukRKkyjyiaMQ27YYDIYYhmY8DhiPIxzL4GLxCNuRaF0TxTZ/vzn90/unRkPb1jRpxawbMHAGCGGg3w9x779o6xxTSkzDAn1a+aM1jWp5mVxzKBp0byOEZDgIOL84IXkHwwjf94jCiOlsgmmadF2H73uoXmGYBqpXaDSWZZ8CakhU15HnBWmaURQ1SZqf0Iumie142LaLYbogDPr355NlGUr1WLaNlBKlFIYhUaqjLGuqqqVT+sQiEpK+PxW7C2HSdCc88MlPWYAQ2LaNZZo4ro3rmtiOQBo9htSgFXlekiQZed6wWSesNwnHpGS7SzkcU1TfnUIyoxDXlwxGNpbjo3obzw9x/AhtaBpdsz+eVnqm6TIcnp9+d2OB454GbcvqGcQetmmxfLel3iU8HkfMxia5TtkkBQib4eQh2h5R9lCLBsMSBK6P73ksFlNcO6bKbIqk57DeUR4KDGUwsFzm0ZzFxQcMwzH5/Z5kk9D1NqZpkRx23N9fkyUFTd2TFznHXYXUPlWVU2QVh21Kkq1PjHs8ehSKiij2GI8XzM5GBJFFPBjSI9G6Pa1juyFl3mObLnXe0/aSbbrCtk2atES0PV1vkTQdWV4grJDVPufzr7+h6GpaJRDKI3YChFKoCo6HlP0hRdomRZPS7Dvs1DhZJ1YlnTJPlDOt3/ebvq8K7rv3tWs9WoNl2pydDXCHCuc8wpEBshSovuTqsEd6EybzCeMHD1k8GOPcufRdR5F35GnHKe0v0UKehtVTNPOkTvf6/XEFcRzz7ONLmlHOLLK5f/eWujWJY5PvffgHWGbPdt3z6e0WgD8bK84W30GaDrbVgtIoZZJmCYKaojiQ5jcUaUHfmfyj/+kf8ebqmtXdlq+//pL1ckNV72gqzfGwJzmULM4lSfKOthG0bYftaqazMb5n8ublkrPLKa5vIOwVq+1rkqTDCyy+evmXVFWP5VhsN0uiYMLZ2TlFm5FkBfQ2+/092ui5eDin6Sp832eXbAijAXf3a5pGMhmMWN3fM5/MGYbnSHvLbveOxeySy7MPCYOYyWzIMb3nzZt7pqNnOLbPbPKELN9xt3vL7PyC0/5KU1Q1ya7kfD5mdgk3VyssV5KVGw5bk4++9ZSqVgg9AFy0yIgHHskxwzBbJnODv/yLX1KWDU+fzanqGtscnKwy84DVfYZpelTqHkXBYvwY1zYYT86YxGMs4TEYaLo04/x8frp3GC7DwfRUf1dXLOYzWt4yGV1wcf6QN69vePBoTpY2RMGEi8UHQInuQ4QqyHONcDM6KqRhcDY/47At6fWOxWKB6wZ4/kmt19pEyO69LSdBqSNV0fDLn71lthixmF+yXRXE8YC23eKbIbNJjB+PmC3G3K3XXEye8vTZQ26ubnh3lxAPp5TdjvH4KVE8Ic0LopHkeKwYBDEGgjJJGE5CjoeUkX+GVgLHsDhbDIhcj+s375Da59mLGddv77GJkH3Nb//O3wKE6n/3R//ZT9CKuunwXJ88P+JHJk1bU5Ytpq3R2qGueoRsuLtdUuU9pqNxLJO6NpBGh+eZJ+O6cmnbmq6BtpB4fk+Rl1i2S921bHa3OJ5H2RS0tQF9RBQOGMQLqqZnuy2pm4o0P60d0qrl+t0GVZfopuHszEO6PU3v4UYVbXfEMPXpeDXYlollWxiGTdNqjumegT9GmIIsS6CBrul49vgZsg5IVE0jLI7FPX7kULUFk8mAmy/v8YRg9iTicFBIAaooWK4KejRNJ8jbhkNywLRslLLpO5O63tG2CWl2ZLNbolQJJtRtQVc3eGFJus8YByPaLmcyjun7AU1ZctQZShT0WcPzYMfzM8Vdteerrze8/OLAZGLxvR/NOHvq8b//8zf85TcwjkNeX91hOfBgOuRwXTGeCh48+IztuuLl689pupzR4AwhIjbHHVVVMPAiTKOl7+Hm7p6qrnEcn7v7FZ0qcX2L1TohTQqE9gljk7Ioub+7wzJtukZgGJK+AzR0yqTrHJbLIwjBdrWhqUsMYTEbXZyCbqFD3w3o+gTLaLicPaLKO1SfgpZYloHvukjhMp8PMaTENGEyHpBlCYbpIg0X141Ik4w8zel1hdQeedJy/XbP9bs9725vubp7x6Ha0inYHNcUXUVZKVxzwCg+EbbyIqVtWtLsQNfX+J6H77h4lkNTNUh1Cq9JaYBuT5VmaXIyoUdDXGeAbVrkeYmULn2vCQIX1VZ4todtO6zXKza7awzbwLI8/q3mpI7+8XZHUeZ81j9l2A9wnYi+h647rYiFNBCAavP3RDRAa6TQNCj+dPsVqay5uBwyiG2m0xGe61JXHfv9kapq8DwP07So65q6qXFdF/u9soUA0zSwrdOKXLxngAvDpGkVeVGz2ezZ7TLyrOVwKEiTmuOx5uZ6w93thuvrezabHVVdYxgmQRhjWRaGYdD3PUqp98rV6V6jVEvfq5NKI04VZV3X0ysA8b52S6B7EOKE7Y3jgCgOiEKPwLcwLY3tSrzApe0qqqagqHLW2zVZnqN6jdaSsmzen39FkWsOx5rdISXJD6w2e+43W+5XB7abku32yGZ7JC81ZVmSZAe0dpCGQy8MgnjAoycLbPf0YGJIk1/86pZcjzi7/IjQC0nShiRvKcpTN+x8PCEwTQzd49qC6chjMR0xm3uMpqfAmR8MsYcRm7YmmIwxbJskqehaB6FdqqRE1ScL1P3tnu2uoy5d+taiqWt6pVCNwnUCRsNLirIkKzKGowlZnmDaDr4f0zSwWifkRU/bge9FJ+VeW7iGRHY1bVNRNAlZKZiOv8UgGlLmGfmxpT0WVOst56OYB+MnPJ1/SHJosO0B49GAIi/ZbHbUVY8hIlzPx3Ig7mLm+wWObZzuM7c5PdZvaqL4zTcFokcrfuNTdW3Jg4c+/thDL07Wrq4tiYMhWppkWYljCJbXX5NdVVzaF5jCxDQF21UGvTi5TziV+5+e/E4Dat/rEwZLC1zHYfpgBJc2VacoixxbKFQzoukr1usN0pR8drsH4M9nBll1RBqaotjRNw6PHj6h6Sq03rHbrqlrgy9+dkcovkcUB3z19Vc8eTLij/7h/8xmtSaIMn7213/Ccd8yigc4bstmd83F5Zy7uyW9CinLPYfjksHAJ69XpAk8fPAxTdMxGkfYbk9WHAjjAG3m1JViPhuzT1Ku7l9jmA5NVXE2eIrjuRii5fU3txzWJp9++oz7u3um4wXb9ZrheMLiMqZtWxqVcH2zxI8HCFOz2qxpy9P1aTuSri2oCsV8viAIfL78+qdUbXm6t4pzHCvCtgNG0YzxKCLP4MWT32V3fMfdu46Hj5+zO15zOG5wXZ/ZfHaqwjIFYRjSqSO310eCUHI8rtGtz0fffsZ+lzIeTzFNA9M2wKypSwthZDhxyHxxzt3uDqTA8xYY7kk5b7WkLgskAZ4nUX3O9r7G9x2KTHI5/wzTMGlrjR8aWA788ucvEXRUhcvlxTnpriUaOdSqIfRnLFdXHHcHzhePiWLB2zfXBIGFabrUlcC2LV48/4ib6yWeL3nz+oqnj77D+fmM27uX5FnGfPaE0djl/vaecOChuxKkg6FPD/nTcITpSWoFeZnQ95rzs2dMJzPy6p7QCxkPz2mqlOFgwMMnY7zAx3EFx+SOPEm4PHvB2XxKHNsUaUrkz7CNmDKvOJvNacolg0DwyXf+Fgyq//T//Ic/eXB+gSEEliWQWlPlHQKT2XSEZcdUdQOip9cVru0SxSFapJiWgee5DMYmXQu+E5AfSxyzZ+DHOJbEsDkhAa0BVaUwLYPkWGIgOexyjscNRVHRNh2OZ1OWABaGrSirirqqiMcjJPDkbILl9RzbHj8coVRG4IaY0sU1A47JEdeXFGWK6sH3Y4QnqOuC3i2ZXg457HOmixneIOLq7ZqLsw8pyoLZ2IKuZr0/4hsjPHlK+q32p0qsYlfTFoogNmhrSdkk5A2oOgMlKcuUvgfDUEjL5fa2YfbAp2kEdadZv0vRVUvgBKjCJfIdpNdx/XaNH1fE1pR3r4+oXPEouGfiNLzaNjSVILJ8hqOQyaMR/8c/u+If/+MVn/8iIXBtDtuEL7/ecf5swfaQMxybPHs6p8h8anHHcZvg+xG+2TOJfGgljy8umY9CsrxksykRZkYQ2uz3OdKUKCVY3Zd0nUQ1guP+tI7NsxrVNQyHAU2lsRgjhCZJjiANbq9z8qIiTxsWswWhP+FssaDXLcdjgWP7SKukKlosA6LIOf3d+Ca24aKVxjAlZ4vLU0IXgzyrqJscyzFo2oyyytHawHVtpDAx5ADXdwHNaD4gHHkIGybnMdLymIwHLC5HFG2OkJrZZIwpNWmWEA4mTCYj+h6KosFAM5mEFNUR33eYT6c8evCC6WSC7YJhGJimhR/6+F6M6gEtcVwPYfQYFpRFSZ6WNHVHlpbUTUtZ9yitWa42/MfBCID/cb9hEPsMiojAiAmCmF6f0tC90mig71p8F+jfr0R1j5Dws/KaZXWkqStsp8f3PNAW/y91b9IjS5am5z3H7Nhsbj6Ge8x3zJt5szKrsopV6G6p0RwgQRS11oYLbbTQ3+jfoIWkBZeiFlpIgEhABCG2ultAD2yyq7qqs3K4eae4Mfts83iOFhaV3NWKEFoGxMIREQiLCHe3z97vfd+nqQV1DY7bp8GVbtBa4Lo+vh8gpfUwkJoYpsQwLVqlqZoGzwtAPKihUiIME6UFaVpyfXvPzfU9b9984LtX77m6vuf+fs1+n5KlBXGckWUledFX2mRZz+/WWvcDrO8hjD4kZpoGUgqk7AvZ266j7Xr+u207aKUpi7IfrsuaIm9RnYnv+YxGQ+YH054aEzpEwwA/sPBCyWg8YDIdE0UDLLtHXDZNTVEUJGnJepOy2abcr2Lu7ves1hn7fU2a1TRtR6s0SZKw22cUhWB1n7Fax7RKkOUVlzcrdmlHZw54c7PmF199x4fbLWWjwXTIioI43mFg0FSaulH4gxClJZ47wHFtVsst96s1XSfpOo/tPieuEvKqpNjEhJbk6HRGVsd89fVXSEPSdQbbdcNuDbfLgjQWqNYmyzpMw6OpG3b7gqro2O83rJY70kSxXmbstxnC6CiqnPcX72l1hT9w6ZRJEPYJa0NbnBxO6ayaupPQ+UTRKYZlsd4sOZ495cXzj/H8gLo1uL7fIBzJvthhWRZHh48Qysf3Ak4ezbFdF+n0eO3l+4y5OMQQgrYzuLlNUcIC/gNdTes+ga+VplN9ZZWmYzgY8OzZE0xHsZf3JPUez+s7i7WTYPsdhiEZT4bMTk9xDgyO9IKuabm/q2i0iTBEX/b/sFX7zdFjsAENjmNzcBax9u7oNAgFz87PcLwxtci4uXvF4mTMJ293APzbQ03XtiRxQRAM2Sc7UBLDbtDK5Mn5pxhGyOOzx/zyV3/C6dnHnD8asdt94OzklC++eM6Xf/vXOHbAbDZiOHbRrYvreqRpQegfYZqK3XZP4IeYwqbVmrax8HyPwdCnrDr2u5KT45Me6lE1tK3JcHBEXif84ld/SVmUWGLE00cvKOuYsmg5P/oBVbtEqAjTMPF9h66rWcxGOKZLWdRo3SJNkyLPMVTQ27oCm1E0JU1jqiqmyFvQLW3TMJ0ccfVhzw8++TG//7v/gB9/9vd58vgjPnrxhGRfcvbkmMX8jGjScnFxzfzglLOnA7abnHAoSfJLhtEMz3W5vrpnPF5QtfdoFTIeLRhPx1xf36Nam9X6CiU25EXO7c09h8cuWkt8d8ButaSqNcU+ZTQMCUOPwBqiqbi+WuE4Bq2OyfOM4cinKBRd1+LaPpdX9wzHNml1S5kJdDPjxctTjo/GVHWBZTqMZyNubj5g6CFH8zF1nRL4M7So2OyvME2Ls+PHNFXLIJywWW9pm5JJNOf8yRn7ZcD5swF3d3cgc/KsQamcZF8znBwCIJ0eprRe7RiG/ftDVXWMRjamrMmLS3a7LbY0MejY3JZgLvnq2yuCUch6t6aoaqJwge9K4l3JdHqIZfhY0mEQjJgcddzf31EVHXQNw4HPRy//87/7g+q/+tf//A+vrzY4zohBGD3UyAh810MKiRcOUcRI08A0BNISOI6L7WhmC4/rdYEbGKxWeyzTZDYLGA0D3r9eYpoWwhK0rcb3fQxD4ziyp750DdJwEBLqwuDu7pbNOkeYFvtdhcAkiRtEVRLHGaqpKbOO+3WCPx5gCdBVQ5p0GGZLXmaEgwjDsMmLHOnYCCGhs0H3xdl3dyscKQgCyet3r3nx4gwhFXW+ZsiAIs7xRyOSXOE7mngXo0MLhU+TKOwAyiqlLQNMo6bqltjaom4qbMcgiBSr+4yucplMRnj+FM8a8eGbW0zT4enHUyb+mFE4o0XSlAoaC0obezjENg1enJ4yDSQftiWmHeEGHqu1xdffKv7oj28pmwHre41hdJycT+msAaNQ4qiQQehy/Nxlu83pmj2iq2hzm8OjMcvrDZYwODk9xTEDbKMjHES8vbhgMPDI4o4ib5F2zXq9oWkMooFHFI44mC9QXcN+lzMcHOG5Ey4v3+F7LlqBbXkUBaRpxSgaUJWaeJdTlRVJmtB1LWWp2cVrtJFgiRGDQUhd9j2KnuuhteD4+BjXlWy3a7SCzcN5+J5PmlV0nSYMhhiYtHXNZDRkOBpgPdRUVVVJNBgy8FxkZzL2TTzXJktzomjaBzi6DscdIqRN2uQ0eYlp2Vi2hWWaJPEeJ3AwTBPfC6nrst8QqBrXcanrqg+G0XvsHNemVTW13qFEhmlKRtGU9WZJUVdoLLb7LXkV07SK/2bUk6n+98LCcUKehafMowP0AzK1bTtU1ytMqq0fPJsVWgkMAxoaboEQChwAACAASURBVLuEtK2oqhLVCeoKLMt9UEZNLMtFCBtpBkjLxpIWlrQRRr/1qJuOtu0JPW2jaJq2915WFVpruq6jaZq+xsXQSMsEAW3X9UG4tqHrOgC0NjANu6dvKei6XrHqBw6NVoK6VnStRmBiIFFKILSJlA6mqTFE1w8qXdsHYASorqMoSuqq7i0RSlCWLUlasdvl7Ha9uhvvS9K4osgUXWtS1x2maeL5Ho5jY0hBUZZoBK4XIAwJpoFlW/3AbIMhDcqqpqorDEM8BJBy8qJkvdqxvNmyWWfskoy71ZKqyTiYT5lMDhDSJqsbqqaj0xplGAjLBssirmq07VJpwcX1NUWt6JTNepOT5zW2LRiPfBxpoBVk8Z7bm3uUGPHo2Y8xTEnbFFSNIqtylKkoqoK6zCjrijxrkJaLNjrieEe60SRrSZEnJHHO8j6hLCqSfcL1zZJOtGR5ynqVo5XPeDIhK2AdF/z61Tsur6/YrHcUqWa7vSWJ12SJ4nJ5R9k5HB+fYdqw2fd/k/lkTpamXF7cojpNUdfcrm6odMU+zxmHc6ydgxAOeV5zvy7ohNE/r/pnTw+1EP1A2TZNr6ZrzXjssTgK2BQb9nbO5NExZgtZneJPfY4mU4y2pU4kDiFDO0SsKupGsVrnNK1GaKNXUx/qqdAKQ/QwAeitB5Y0mZ663MhvydYFpjJJmoZSbDG1j7Qkjaj50YcCgG8/Oeejp3+P6cJmuUoZDmfkZQ8sydKaIi9Ybm7Z72LOT16QlwUfPfuY9XLDwfQE33d4/vxzfvazn1FUN30nsRkynoxRWrHavmG1vOP5k59wdHJIWedEkUPbFXiBz2q15erqEiltkn3F06cveff+O65v7pGuQHoVB5MhhjaRMuDR08ck+T3vLt5hhzsMCR8uP/Di04/Z7rccHZ+TbBUH8xl3y7dUdUySZNSli2FV3N7f4Zg2d7c7zs4OGYYHWMaArLjm8vKCw/kps7FFsm9Zb17x/t1XXHxYsV5fk8RLVpsl+23M+3drAn/I63dfEw1Ciqy3DFnGlMmBw4eLLfPFEfPDkKKsubpcM51HuK6N7Wqm0ae8/MER33z7a1x7xKc/+AKtGkR7hKky9ss1QlkUcUZXK0ZRBG1FUVUcLCYU5T2rZYJpappWEUU+liUfLDMu0bjk4rXBk8fPefnpM0xZc3lxj2357ItLLq+uaZqGL370Cft1icBGuoq6U7hujeosfN8HbRHvtqTxkiePntJVJbfbW+gGmM6W6+stWig6ZTEaTEnzS+JtxXQ6xbYtLpcVzx9/hGEXNPuKttUsN0ukpZGWoiwqlvcJbeswGrs0bcfJ4xOyRGE7ClWDow/56NkZ09GQv/x3f0I0DZHS5PXb1+zTjCRtsBwXQ5hMDnyePPmHfycG1d8apqqUYjgN8fyIqtZ0bc3Tj0/ZrnfcXsfYxpamyqirluFwwmYb43qa2cGMfXJLMHHZ7GLKUqN8B6EtiqLi0dNzsnyLZXlI2WCaNWHokGU7XD+iqWr2+x3D6YhWV4TRiLxqQLR9gbComY5NNmsb8o7RSU9jaHLI8hhXeoS+h9YV0jJQSAajiNV2gzBd0AZNVSFJMAyTUTBkGs4x0dhoRvaIqzdL/AOD2aHN5VdLmiZjMhwRRAH77SXjKCRXAnckGHQFznAIaoHveKxXd+S7AWJQ0wkD1x9TV3sG4YiurGnrkmpvcn4SoD95QtkV1F2MgWQxG/Dm37/l4GBOGNiUcUfZ1jhhSKpLXr0qyToLcy85PjnDkYpPnncEVzHnT074qrrHiQwsv8VVOc+OnjF2BjSi5P3dVzSFoEoMpHB49MilUXDy+IzF8QxTurz77i3DyKQuDaQdk8QmZdnhuh7xpiLZgR88YDqtHZttSlO1eL7N8cmM68slgd9zozerBNNyiHctQhvsdzmTaEbXmqzW78gyk65xezSqqajX4AcJ+0QwHi4IPI+ugckkQOuOskxwfYElHUbCxsQhzzNmM4tOmBhGw2ZzR5EpmrphMZ2gVIFrWXQyZ3l/xdnxRwhDk2cJZaUxLIcqazE6p8diJg1lXWGYLXQeSteEkUuXgRfMkVKyjffYokCQIGU/iG23K8o2x/VM6ipHa5vt/h5/4OCFEqVaVG6SxHm/Bnc80qxkOp/1CdO0+/51t7zJGY5Ccr+i8kq6zkBh9phf0YFWqK6jbZq+WEd3KA1/uX3FqsupygbP8xiEw16B1QZSGmijw7I16BYFGMKk6RTCFCitqZsORO+/rKoawzB6JVMr2ralrmuklGigaqqHgVQgdF/w07Vtb60xJG3T9b5iDZZloVQ/yFqmgeW5qJ73imkI6rrElha2I/F9F9/z6JQCHdE2ijQvSdKMtq5Rne4tAJ2kaluWyxX390tA43kOg9DDMDVatximiWnYFEVfhN80HXVdPzDd+0EbobFtD8MoUdQP1VkKHiwPWggM0+zbDIA0i9FK47kuQpmgJUjQhsLowMJFK0krBKrTmEIjpUWLQdW11KrBbDtsy8QLBK3oGB8c4Do2ZVERTkKKskCaBk7go4RB1cBdmlKtYpzNe8LNjjjekm6XuK6FsBxGwwmO1aJ1A6IkLzJ224ROdZwcjXny8RH3yzXvLq5YrTKkYSNF1TcntJLtfkerWubzIVWpuF+9Y7/PCLwJhmgBj7zo2G++xjQKTMvAsvvvAYe/+ncwGobIANIs47uLNUJqVNmwijMc3yQITa7vNnSNRPgac1sTRQ7bXYZCPNRR6e8V1f7mSqB1h+49IBhCYjmw1m+4irZMjFN2F9ek1wWzR8eYdkq9KxC1wqljptEJKi1Q3YCyKem0gq7FsHp/O/Q0Nw3oB9uJ6jNWKA3SthhMXYzJiLaqOD46QZour767IoyGtCL+/nX77u1bbG9AXXcMRxGi63j+g9/hr7/8Iy6vLphNFwxGPvtNimHaOFbJn/35n/DFFz9mvy97dGmecH3/C9b7r6E94Oz4OemdizZq7u5uOJq9pFUFr1+v+dFPPkXoFAXc3m7IsgbXH+AFAZ4dcrt8T16U/PhnL9kll7x/94Gutgg8D9PKKOuctgTDTLi6uaMthjw+f86XX/4ZqnVYzGYstxdc3v8ShGI8PMQdrCibewz7hFX8Fl12RMMTOm1StRWW7RIOItouJ8tu2Cwznj79GdGwY7vZghR89+ZXFMWW5bpiMZsRBBPcsKLurnn7FoIg5HA2IhoEfPn1NzjBgHX8BsQJmorpbEDV3LL9ILm7yTk/0yS5yeXVe9rZmMfnLpfvrzg8eoQpfbppQF76fP75j2j0kvv9FU3W4oUR+/2Sqmw5WTwiGg748tufMxmf0qk9TeXx8gfnZFnKp58r4n3Ccn2FaBe8/OQzEA1B5BLHr5jOJT//+a8o44if/O4J7y7uqNWeo9lTUPfcXlaEkeT45ID3r1vulx/IkopGmhTGl6x+2TCeHOP5Fm/fXLKYTBkNp1xd35AXPmqniQKfqtjgTQTCrKlSg0ePnxAnG7Sy8cMGZeZsi/eItGMYHfD+7QcCd0KrW7JlzNMzn67xefPNBT/87DPcUcivfvGW4UhwdbUlDEJaEoSUZIXzH2PG/I9y/FZF9X/9F//DHxb1hrS6p2lKmkbQVgZ3t3fMDiI+XL9CNybDQUiWVqRFheE0IBSbVYHSNaYSzKc+TZljCkXXtiwWAzqt0drGFObDer+hbgo6ZdCqBsNyEZbC9R3ipMb1AkzpUDUlptDE2466rVgcjkjKhkaWmAKkGZFUOQP3EFVp2rY3cFd6C1aFUgLXtXGkpOkU46MFy/WKNEs4PDlC656l6wQe3cbkyfkBN+sdk5MRRWxyd7MhbQvuN7ecTs/xfYFdBzSGS15tqaqCIIS8yZlOjwFI4gzfC3GlizQUnjXE0CVJnDGYRuzSNbt1gmnb5FnCYnqEE4wpGnj60RTZWZweHRPvM56++AzHG9GJBsvoKPOC5e47FqdPyfKWJ88eUasL6qrk0F1we5eQGSVJdYOhK9Ibm1GwYHowxpvYDKI+xLZc3XN2NiaMBlzf5Fxcvmc2GZLsFW0lyLMKgfdwEWnJ0obry3tM4TM7sHn0+LCvdRFmbwSv4e5mTxBExNsCKRzqqmO93mKaEmlD4HtMJjMsR6OUwPdDjk5CtpsM0+lTuKrre1mLYoPtaLIspWoSDs8cNCmjkcenP/iEpu4ospLQD1gcHjKeRKi6wjBNLClQysC2XCaTkJvlPfs0J8sbDKNjeXfHyeGc4dDBtvuvf7Q4RumOmr53sSs1YRBwv7mmUx2uEaKwSPIYJQyk9PB8j6JM2e+3CMNDSkGW9WGiMu2gculqk6puWW22JGlCWRjoRpLuK/67syMA/vnqBteDY2fK0A4xLQfV0A9pWmEYgroo0Rpsx8GSEm/g04wkuCZNUzM/mPH4yZzZgUc09HB9C8MQdOqh3kr2PZXGA0+yqhtMKb9XsaTsk/aW1a9jTVNiGOZDV21/f9s0LV3bIYRJNIhYLOZEgxBLSizLxvMdbMekrgtU12BJA9OU/eq+KOlaTVW2oPphFy0o84p4n1BXDUVRsNvuyNICgcCSfU2eaYLr9udWN72FwXF7qlcQOnz2w+f88IuPeq65JYnjmLwoaLoGBBiWRFo2tiOxHBNDWHStpq5r2tYArAf/rImU9veqseO4hEFfwWWaBoZ0EKZFq1rarqHrFF0rqLuWoilpqoqu7gf8pmkwhYkpDFAKyxDoTlNlDV0taBuT7TYjL2uUNkmzkiLvoNPYdosbCNzARRoSSwjcoLevWNLDHwxxHJPhyOb4ZMooGjAdTwgHAb7t8Q/+4D/jv/6n/5hHH094/NFz5otDpuMZp4tToijCdl0MEWAZPgYgRa+ab1Y5bdNR5YI06dDapKzbB3XdxrcWHE8XDMKQWpcIF+5XMctlzHZ3h2E2GJbPZpsSxwlVobl4e8P7Nzfcvt7yODwiyRLW2xiND6ZEqJ7oA32myjAMtGrp2h4q47gmT548QtmKdLjlNv6SyzfvSfN78qwi1EPauINSMl88Zhdoiixmqoc4tmR5V1BWIAwLeof3977o/meq7200AOagZRW85st337JOrwksG9P0SeqYvF0z8KZ8/rD6v/nJczbxJfe3a8ajARdv3nFzvcNxfaoyQ3QBj89/SOgtKPMUw3BpVcy7txfYrub+/grPifDcOZbbcHz8A9rK4+LyNdHIwDWP6SqPfbzB86b4A5+2bvn22w9c3S55+vE5na7RhqJuK5SWWPYEdMf7725xLAulLD5++ZSygq+//hVVVnIwPUCaBk8eP+bi4gOePeTkdIypQnbpKyxpgbIRQuE69MjtVUroGUgz5PjknHcfXrFL13jBiBcvnqI6jeoE52dHHJ0ecnX5BikDijZlH695/uRTPv/iKU0bM5vNUORs7hyefDTl1Xdf9YpzdcXtdcJ47DOdROzjW5IkZTwZUNcp282GcGBgSRvp1EipGQRzXr//95ydPaFuMla3FePRAR9uv6LMcoq8ZTiOCByDbXxLvheEgUmXD8nLDUWREw0nfPjwLccnpyhVc3d3x3K1pCxMppMxwmy5vVlR1zv++q/e4gYtm23GLklZLF5wejbg5qaHJkzCR0TBEYZVsVqtmM/7vtbDkyNuV5ecnj3j4CDg5jLDczyiyMUQDaptUKXD6dkJdZYR+g5dF2Poltv3W1xvwnTxCETagyzuvqMocoLI53b9a/LM4uB4RJ7kpMkSyxnwydNnDKI9y+Was9NHVLrh7n5DWSqk09F0OQfzMUmSkKY7Li+v+IPf/6d/JxTV3zqo/k//7H/8Q9Xa5JmmawWz6JDNMsZ1bVApg8GMZL/hyaMz6sqm1gUnT0LifU2VG0SuiWP4HIwHBGZI5M2wTQfpFNQ1KBq8wEUaBnVdY7mCVtU0oiQYDqjqjjQpcPySrE7oWokWW1Rnsb7PGIVj2qZGGZImk7iNx8njI/Imp9tWdEqxS64xXYXpSJpG4jkRdbFHt4qiMEjbDXVTIWrwAgdDu6SbhqYF0TTotiVONd7YY7lZ404NposJ1B2TE0leB+RpTOd55NU9Td0iTUlRdKAygshCqRZh5hjUNHXHyfwZbmCSFBXXN9/heiGWeYDRQpG1uDObycEIW8bc3dzjGDb7JCOyQn76+7/Dr375N2T7e9yRgww8nEF/F11mW4Z2xGZ5yycvf5fZ7Ad88/o7wlGKpU1m0VOOTj/jkx+dYzMm3sagS3ZxS1YLXMdHd0OKoiZd31PX/XqsrirQLnVuYEmTIitZ31aMohmWMSFNYlb3Bav7HNNuSdIdy+USgUkQWNRNhikiDAG2LTg46qt8prMBli1QuiEaGMxmQ8Ko763L8z7TIC0ToVvGEwfb6TAtiyCSNGKJ77oEgYM0bKbTMY5rMJ5EmLLu10Keg0YhDQ/bdBgOPJLdisfnh3i2w8HMInIdRv6Y0bBHA1pOb9fIih3T2YKWhrpJ+y7L0zNMS5LmCXUR0+Gwje9puhJDCja7NZ2qabqMNClxHIlhGiT7gmzXkO1rtBJUZUUYhcRpjOv2IAcqyX97OgPgLwKPo+MBK53xanfBnVUwsAYYHVRdRasb8iRFtZrRcIRhG7yWW9rQ5Oj4iP1+S1lm3w+b0rKwHRvDNHC9Hk/r+T6u52NIEw1Ytv2gXvHgHzWRUiIMA9u2MaWFaZjfh7rQBp7n4Do2oe/1nrrIZzod8tOf/YSz83OGw4DROGAQ+gShTxD4WJaFRtO2HZalWSwijo5mLI4mHJ/MODmZMp2GjEY+x0dzDuYzfN/B8y3Cgct8MeT07JDzR0e8+OSEjz4+w7JMdtuEtjXIspbtJuPmcsMv/+Y1r1/fEsc1Wd6gAMeze8VVqB7GqU20MumU6qu/DLNnwesGpWuarqZtWzQdXds+eGQLlNJo0aF1iRQ2UjiARpsaYSjsh6G01tB0fXis6Wo63SJMA4QkLxrSLKNta0Dj2BZoRVO2mLq/Yeg6qHJNXUua1kRoi/F4ymI+65ny6RrDMhgGY4Zhr/h7jsl8Puf8/IDFgUteJPz5v/0rfvnrS/IKHj+d80/+yU/5L/7xz3jy0QhhGvihRRAldFWN0bnoGpJ9zS6p2KYlVZtjWwahP0DVLoEccnI0IW8T3r1/TatKmqrF1JpBFOK5IUVS05VW3+hitMwXY0aTAH/gcDSac+odonQHWJS12XesadVTo4RACOMBy1vTNR1KaSwbDg8npG3Mn3/3c9KdxYsXLzFszatXF7x9f8F6k7GrGgpZUicNSZXSBHuCfcTtdUbTCAxTIHhoAED8Jr/Vh6l+E7MyBIOFQzzY8ParNQfzA1pjy4fXH9ivrnj19TWePOb3thkA/9rbsr3TDMMhVxdL9sU70C5PnnzGfHrMzd1rPlxeMY6mrO9X+GHIwcGU+bHL8i5HdYq8qAkHPrYVsd2mRFHE1fWKwLdJ8wtc16ZpU9KCngY1OcCwc95cvMYPBhTtjqKuGYRTttsV2+SOfGsS2D6uV/P0o49YrjOqvOXs9JgwjBCGYBN/4MP1BdNZf55lPuSzz3/CLnuPwYTnL8558+Y9VRvz9u01TR2xOF4QuueYckxa3XC/jHn5yWdIOyHeKI6PnxIGDpdXFfODxywWIVp77FYt88MBYRRQZwcoZVAWLXGcEvhDbq42mIbD1fs1w2hE1zWUZUyebfHcgGgQ9qS21kRon+FggTBKVDfg6dNT3r17j+M4DEdD0qIXUwwpKLKc+eEAx1XE64YiN/j4+ce8efUGzwlZHDxFSINOdyRpRpxc8OFiyfHJE4LAw7I78nLbe3/zPfP5EY9OT6ialNvVjvGhzWqVoUSMa9t41pD9tmM40VxcXCBkw5v3b3n05FP22T3r/T0Sn5P5Sz56Mefu5g7HDjk/f8TBeMZ4YjMLfJaXW9592BD4I/IGQmtOUeqeatduiDcGXVshzIK6zvDtT1gcTliutgSDGaNJyCbOMDFpyobVuiLOb1klioE3Z3pUcPH+iiTZUFUlR4en2NIlCCN+/KP/H5Cp/tn/8t//YVnuKYsWS/RoPATM5xZ+aDAaD7i9ShhEJuPpgPNHj1mt3/Fo8YRPnhz2FBVp0VYBo7GHtDu00GD1iX3HMcnLhH2eIKXBaDhnt71mMjwjSwvSbcJ4YqFFhx94GFZCspFISxMOWra7jvGoJzxZhuLqOuPwSNKVK04Xj0mSLfs4JziQJHmNwKerTHRbk8Qxvh1iBTamshiYFrfX98RVTMee0JwRnUy4uH3P0fkB+XaHtBwGgxlWWSM9SbwXpPsY04OmjLFah3E0piwy8rxgYIT44wF1oyiThpGUlNqkTHIsu6E19gh3iG4dgtDGscCTHnUnuLi5YBwGDMMRphVyt7kgOAh5/dUVw5GmVC1tpylUynadMx5aWAFs9zVeOGZ4qLm5XSKdHC1SVquW2eKIXf6Wokx49+4G202JhgZu6HF7v6OrJEK3rLeXZDuNbbtUTYxqDTbrLY7jMBo7xHFC3QgGYUia7yj3JrXeoRuTShc4VsTqXvHsxYJxNEHYDeHYocokrW45/3hINK0whUWcFECDH0gOFhHIHG0UjMZRjxtdCMKJICt3xMUeU4LtddiWR1vOmEwitps1F2/eU6VNX6XT1FS5IstTPL9kNJxjmhLLrVmcDGlVRlnA2XnE4eIptmuS5RleGFK3fape2g6RN+Lm3R1jb0LgTLlb3rNar+m0QtlOH94wWoqqpmpqyiohzwxsx2U+P0c3Bl3uIhuLYDDicH6A49kcHY6whIs0LUbRgPEw4OmTY/7Lh6vlv8w7mtxA1SbhcITn+WxFy6tsyZv8hm/iKzwlcU0D05G8c1JWbcZuv+Pu9oogsHn2/BHj8ZSybmlraBtBXWmaBlrVJ9+rWqGUoFP0IAFtoDR9EEyYDwGu/rFSmqruVUMQmKaJ7wWEQc+lt22DtmvYbGIcZ4DrDPnm62+5uPjQI5GFxHZcBtGAxWLG4eGU0/M5p6dzDo9nnJ0eMp9POD1d8PjxKbPpiMFwQDgICCOfKArxfA/DMLCkhW3baKUJ/AhDmKyWa+q6xJAGWV6SpCXClJi2xJAGlmcjbZuu3/ziuyFhMMCxnd4WwUP1Fz3x6zdNAwITgYHW0LZ9SEQ82COKoqYqW5qmoapLuq7FFD2vHkMgpIHjWfiBgxvYOK6N7di0nSIrepXYsW0MadFpQdUoWiVoNZRtR912aAGGlFhSYjzYPPI8Y79asbxfU9Zgy4BGaZSp8Ac+0vlNgCvj5mrN3XJDlmXcXn9gfXsDrSKOt/zff/xv+L/+zR+x3nyH52UMvYCBHeIqA5XlTAces4Mpi+kMafo0rcLUikeHcwZmx36zYrnvSFJN4IwRQrPZJOjaZrdJ8aIRniep6xrdwnZVUhUC17MIHQ83dqjKjqo1H6qpTHqSaf9/6G8YFF3T0jb1Q4eqSVUmXK0ueZ+uePPVkusPW1RtEfgmg7HN7GSBP3HJqxzdDjjwjojflsw45G6ZUncdhgFos68W6Cfj7697fTuWQKGwQoNi1PDy5eeE3oCry+9YrxR/78f/EM8TpFXMP4h7+fVPJjGnj094c/GB5e6Ow6PHnJwOuV+tsO0RWaoRosbzJR89/wgt1rx5d01R1GDF6FwwGHlc3f6K7T7n/HTOxc0FdVswmHQMwjlpvsbzhzh+zW5/TVdaBN4QupzdakM4sbhfCmyjY2j7WIyYLyy8Ucerd19TNS6zyYRvvn7FJBrhBx7x3T0n51OECdISnEY9gVFYgrtVwWDo8uabt3z+8hHx7hbfn/Di5Ue47pQ02zEYH7KIxsyjKfV6y9XFG67ub8iyjKHvcb9eIaRDVWviZMluv+Hw6JyLd2+4ud7x5OkxTVMRDjxmBxPOFi85Pz2hSDOUKFC1z/OPHrPd7eh0wnpdk6eKu/sbTuc/pFbvMIhwfbi90Pz+3/8JX371F2zWO1zPZBtfEToHgCZJ79CqZpNsiIZztquEl89+yvNPnmD5NpYekJYbtCEY+B6O4zKfnXOwsPnbv/kVp8dPca1B3xvueUyGcxrhIZ2Oumwo1Q5bJkhcWhFweBJy8eFrqjZHy4zJ5Jz9fo1tDqFr6YSmTCwC32Y8OsC0W96+e8XQO2UYjPCiZ3z88ad4ssdx32xeMZy+IJpq7m7fYzkj0nqNECGHR4dURcnnn/w+QRiyXu6YTw8xvT273ZIWyNoKw7LQdsHXX73DtA3SbMd6lfPs6Qs8z0e3HWEkubl9x3/yO383FNXf6lFNtxl+IJlELvG+pEhqTk9P2e3WTA5s9smGxeGcqsw5PonYpDGDgcluU2OpCWUtcFyXdZ4SaJ9BKHHDiO3WJN3cYk5NUOBaLpqCzfoG15pTpClpvGMQzDCkpi5rsqpBWoJB5FDlIJ2O2Swgy2N8LbFtxfPnM376ezP+6P9MCIJD1vcXRDONaBxc08DzXG7e3zLwJZ4zxAvAaG3SZE/jSfCG5FVB3dSYToaxN/BEwPWHLZZlAga7XYyqcmzbw7csHNfldnWJF1hI2yYpYmzbYBi5dBo2uzW26ZEJk2WSMxgvQNsss4TBYA5tRtlkGCX4lsd6s8Yau/gDn87QFCrGs13Onh6zXm0xVEXRtezzDV1loGg5WxxSF4rhbMjRszHX71dcv07whyFRN+b4aMG31Ya7+1tMpYmv9ozHAZP5nOvlkk716epGJHz9zQ2jMCAajljua+pKIGqXpsqRI4e6CVgczjieG3y4Szg9GxDvSs6fBKzvJU1uIUc1ftCx3e5J7Irh0KerNdFUcRhGXL9fMxxHGCLHtg1G0ZSDhYsfeNzeJYAijCo8xyMYdAhK2qbDtR3GE58qsahzxW53w37lE+8y5gcRT548ZrPL+yqVWuFgk+1LfKdjcejRtB1109C18OjJDDAQsmQ8DXj05AV1U/Llt7/C9Tvq3GSz3WFaJm1rsttkVG2BbRHwHgAAIABJREFUQFJVOeFghB04NJ2myR08C5A+zkhS54Jf/Plb6jpnPrcRjWI6nOMGFsPIoWkdhJlzPpnT1i1KSNbLAhYBAO7wACnt3s/c9rx7DEGa7knyjP1+x1dhh+96dNkNza7DMEzCQcBwcoAlDfZxhSkVpmFiOT0lCiEp6wqJwLbdvoqKfgDrOkVdN33QSfdr8K7rHuwBBhrdDxJCoJSiaRriOCF9SOwLQe+hrRtub/+UumpI4hjXdXEdr1+nmsZvKjL70FWjAONBwTXo2gatFVKaqAeLQ6d6UpkQveqpVd8W0A+SHZ1WmNLG9wKcIKJpWyyzL4gvqp6EZVoSx3UxH34Xw+lJR0Ve9I8N43s7w2+qs4QwkIaJafYffZCsRRvGg11C45kSU1p9r6tjIwSUZUnTNICmazu0Ulieg+c5eL6LbfX1U3VVUVc1bdNRNy1VXfXWgU59fz6GEJiGQoiKqikfPmfieQFVozEdl5FnY0kXwzTI0pQsTfvasaJCmpLJ+IQDz2U8Ddntt1xf3XD5bs+f/z8/p6sLZoMB08UBuR6hvCHRYEIUtUzPUmaTCXdXe969/47nx3M0gmgQcnYy5d2bV1xuS8JHExZORCADrm52ZFVNqxR1XVHe3mJ0NmXRYluS4chlH9cYK8ngaIpr22RthhAmphT8pqusj09plBYIIR/CTn2lkDT6PuEsqdmUGdHcpW47NrsKLzdpak0R7wgixT5W5Oot39S/hl2LHfog+psxQxgooTG+j/z/B2/sb7yyaBhFUz77vY/ZpXe8ev1X7Fc+T54ec3H5nuHYo9XZ99dL0Z3w1VfvcMKaoXRZrhPo+tfO692X2NLgkxc/Jksa1ps9SdyAshi4I6KDBX/63V/wPJTUWYoyNd++ueVgHCCUicpHHJ2/xNIfyIo7NuuEYBDRODnXuxXB0CYp1tS5pNllTI5Oubi9xvF8dBuQlQXlTmDNA8r9ksePpyAy5sMzHAGbeMfh5CdcvP9XzMOOk9EXRG7Ni5/+gM3NDV/8oz/g6dkZIjcobYd4/x5VD/j4aEHn5Nxcf8suHfGf/uwPWK7/But6z9HZE7767hWfvXxMnq/omgikRSsruqbg8fyIm6tfU5Y5aPj05Sf8+tdf4dhL1m8rbG/AQRjw6PwpV3dfITuL9bbiePERtguHkzlKbrl6f41rCYbDEa35lrZ9jOUKshjOhsdc3/0ZKIdkX/CjL35IHO85O37EZnvLapMTOg77rMWLcupGEQYjhLTQdcNHzx4hhMHP//pP6RqPuoJ9fM3T85e0Kufd3QeEaSDKHNmUjAKLPJminJbFecbt3Z603NA0ChuXQn4ALbGcEK3Akg5Fc82r14Inj78gLdf40ZRffPvHePopk8U9Zb5BGoqrD0uG4xfcr75FdTNG05C8KJktQoaDc5J0yWh0yB/98b/k8OQRQRAhLYvL+w1lPuRgPGE0Mvn13/6a2TTkYB7SqA2qaTiYj9hucz55ec7t9QfevPuGuvqt4+H/p8dvVVT/j3/xP/8hjYNhNLx4cYZt20RDgyCwsYwBF283fPaDj/ClT7zbIGixxYRxOOJHP3pGvMtYbi4YjSXT8RFFWvHuzSvibcYgsKnaGtPsGeqqLcl3krbO0a1FGBhE05A8rwkHLp9//imr+4RwYKBUh2laRGOL2XBBlVmsl1vmiylV6VGUOw7PoSwNLNtkv24YDhyyeEPg9T6yquiwDYMk3iItl6QtKNsST/ugDO6bDZ4WBJYkrxvyMqFtC6q2wrBAaImqWjSwTXd0KKq6RRoGdB0Df0ijwbAgMnzKrn8DNg2LpKhRWtFV/WrY1C1FVjH0D5iMpuyKPa5vo+uKIk2YjscczKZURU1gB6S7AsOoMWWAa4/RVcZ4OKEoDPzBgGRbMIrOiFc5k5nHZLYgLe/RqqDNIo4Wh6RVTpltWO63xKkiTxqu11f43pgsb7lJ15h2S10XzCZj2tri7PEZIHn/+gpHhL3Fom5AQlsKcmLa2kSbOW5QEAxCtJ3Qli7bmz1KKFxf4dsSbVY4botp1ex2GzzXpmrifhWrLeL8CssyaYsIAxvVdbi2gW97jAdPGQ/HWLYiSbeMJwOef/yMuiuJkwxhWEg3Z7fLETwkzhsDy3JxHB/dWgT+mLyK0aJBtZKybHh78SWtSlFdxXq5IxzbjMYjbL+nJo3GU4ShaBuTpipJ04y2bdCqxBYWgTPGFwHZNmU+HfKzH77k9GjK86enOKVmaB3hGQHnC4/PT4bcXa5oWonjD5FOyH8le2b4/1ZJpO3iuQMQFtLxMCxJq1qapg8UgsL1HCzbZTSdEE1G2I6L1gZtB2hJ22k6DXlWkuVlv9Z+SPe3XUfTtfy/1L1Zr2Vpnt71e9+13jXveThjnJgjMyszO7Mmt+nB7W7LGHEBSFyA5C/THwCBZO4agwQWEsJCiEsk27IMTbfpqqzKrMrKrIzxxIkz7XnvNY8vF2vHyfZN38BFs6UthU5E7GGdPTzr+T//39PQVlXWTQN7N9D1PCzHbq+2DbJdKDIthZByHyWwMQwDy7RaZ0ob7Xuy02EwHDAa9zk+OmQw6NLpePjdFkNVN+XdkkzLbRX7cbvRuqVKtXlYw0BZJrZj4zgOtmvjuC5+4KNshaEMTOViWh6mYVFrKOqaumnrd2vdbvgj21739xlEuefEat0SFJqmuds011pjmmbbu65UKxbl95voUrYIMiElSlmtq0sruquyoKorHMem3+8TdAIcx8YyFU3dRno2q5j1IiYOM8q8RkiB7Sk838V1HWzbxnOdff5WItDUpabIq9Z5zKGoIclSyqbE8ew2ZpQUJEnVsl3rFq2jLIWyFMKUJGnKcrWlrjWT0ZT7Z/d48vSMh4/u4QU+WVmxXO+4mS3ZhiFKtY617XhsdhF1VRH4FoYQxFHGxfWMxTalPxoipAGOy6On93j76g1pmjIYdhl0pty+WxDuClw3YHJgAQ3htmI86uM00M0CHNun1hZ5vj8BEc0eF/Xe2dbUVUJZFEhh4LkmQUeibYPu2SFn9wYoU+O7Nqv1nO0uZBOn7PIN0jHJq4RS7Ih0xPn2mml1DFq2uaK7BS5xd5/cNWHtL1bDb9df8+2vX1CmmuPjAVVZk2YRu9UOW4z4w7BtlfvLUU1eRRjKpC5NkjhluYxZrVf791zBm9dzPvvsE2bLF5R5SyzJ6gx0iOv5bNaaMK/peGNOp484OTnkq198w+mZx/nr1+RRzPSgg0GHQb9Pll6yW71kNY9wu5JHJ5/QNzqUSYFSBqt4Q1YKysrHch0CZ066/YKDw5Tpg4LhxMazcsqqR1FmrFbf8uS+z8275+yuXvDty19Qrtec3HtKUSf8/Gd/ge2PCLOYOMu4XJ2T1TVmWYCdsYm3PDh5hlVrPv/RQ/qWye71b7CrhFRKlskWU1SYKDIp0JVGWTl1aRInc9bzgjiU9PoOl1fvGI+CdlI0u2G52NJUBp3uEMMQpFnILkoIvBOU6fHp7zxiuT1vha/I2KxTPvv0RyRbBdrCUhW6MegNPC4v32E77STFsQIGE5ur2zlSuMzXz4mikqODQwwchDZJ0oj+wGazSVDKwLIcXrx8xcA/wB/DNroicLqcnX4ARoTZQLaoqc2WIuC4YFs9LFezmido3ZAmMQZ9jo8O2j0I2fDm3RtO7t3j9PQ+htkwPLB58+4lp488hBHw+Oljnj5+zHYXMRgdEoUVnjNAGpqiTFGyw/G9Lkk6p9PpExU3JOGSPHI5mI7YbbY8eviALEvwXBtleRRljMBiejDky1/+GiEV0bbi4HDMjz//21Gh+jcK1f/mn/7Xf5pXW/xA0QnGRPEWZSk++ujHvHz9Ff7ARWtFGG4YHmY4zpTusB2bnl+ck5Ypli8xbYvZYkcjFFlZtuxTaSB1gyFMdtEGyxwhsRkMfBxPkeYJedVg24Kz0/uUWUGZ15zdPyDPNcr0mB728VyfBw9OiOOMoCsZHfh8+u8V/OqrF4RRiWnWRNuKZ48/pkgVltXBD1ro+OMHz1htK6Qt6E8MfMehKwJMBaHeYFWQF21YUtkCZEO316OqCookw0Cx3cXUusa2LKpUYzQGXb9DnbdfirIpGPl9qqjGtbz2C99QuNKmyis2mzVKN/S7fR4cP0JXcHn7jslkiKsGHE0fMei7pFlBtxsQrlOG/WOEcMjLkqIIcU3B4eER2yRCmhau0yErQqStaQzJb1+/YB2l9HoDbFvSGAW323dkFXgjk7IxqStFFmUooYm2NSf3TugEE/JEIkVDfyhpqoLVfM0f/t7n/KP/4Kd8+bPX+I5iethlOS/BS+gMTAzXQ5ptg1DT1MS7mF5nQGCPse2K0eCE0XBKmmdIs8FzutSlTRjt2qUT5ZFXFZbZZ7NOMcyU0XDM8iaiLCt812c48kmSkLOze3i+R15UvLtakJcF22hG3YBlBKxWMXm5YbNdkESC3S6nKDNM2yMKLdarrEWeXJ+T5SHD/iGGoRgMJZZjsEtukHaNaduUVU0SQ7/jYWiNoyyaQpInEbpwKbOMJAy5f3bI2b1DTsc9JkGf45PHBN0en33yA35w7LCZv+JnX35D5+CUZ5/8kMFkiuu5/EG8AOBfyoAkStlsI4pKkhcNRZmjLEW/P2QymXBweEDQ7eAFAb7vo2wLx7FxPa91+ZR1t4zi+T5+p9MKN8FeHAoMw6TR7Vjf8/x9CUArzlrdKlDqe7FWFAVN0/w77mOWF63IRdLolvEqDUGvP+D45IBeP8DxLAbDLo5rYRiidRYthVJm68Qq2YpS28L1XAzVPj7TbEXrXV5RSsq6Is1yGt0gDGhESdUUSAMMQyBEKzaVqfYtWOZeUDYgNNW+WMBS7TGSUt65plK27m5VVfvmqVaslmVJVVV3//a9q9zsBY2pTCzbAjRlVZKkCbvdjnxPSbBshe22X5CDcYf+MMDxFFq0v9swTNt64Tih0SWdjstwHNDtBfQHPfp9H9+3sGyDRjR71qcgTXLiKCVNE/IyJctjijKjaTS27eL7XRzHaetIDYFGkCQt1mY8Osa0fCopMWwXv9Pl+PiATtejyHOuLq64uV7i9/p88IPHdDp9sryiPxhzdPKQ4WRMmiWs1rcsFysWb3bkywhfCWxT8OTxGb2eQxpH1GW1x4qZdLs+/b6k2hWcqntYtk+UlDT7DfxWMErQEila57wuc6qy2DvNBut6w2/lG7zAZL2bY1kwPhzQG/pIS9Mf2xweTakqKIuCSedDqlzy8OgEP+yS5w1StC5te/neSdW6Fcii3bMirTKuymtc12azSDDwKPKM2c0lhoAign9fWwD876JisywwZMBmHXP1LkRZLkHfoCw1tgvnb39LuNNUtSZK5hS5JBhNOL9+xXqb4Bkeftdj2Jvw4sVzmrp9zdmW12arq5rx4CFBr+H5q19x8XbBQE5Ba3QZ4JR9apnwfP412/gNdn0MjoG0S+yOz5Fxg45eMVRjdoUki31+89tbwsTg+OSQRbLDV0Ncv+J6vcAwbVAW377bsn79Nf/X//HnzBoLMKCUeIHF8HDI49MzlquYJBaMuw5DP+dXX/6SXXLN1188x3SHxEJiOQ3J7JYsFIyODwl8izhKqUqQ2ubd+RWnZydUpaKoQta7NXlhoGyfpFzTCMnRWZ8o0qzDK+Kw4P7ZI7qdgDDe8vbtSzx7zGy+ZtgfYZoWliUYjlyQJXkqKVOHMvcwVIzbsYnTmCSp+M13P6c7sAmCIUm6QmrFYNQly3dsNhFRtiCNBZ9/9kPeXr9gu00pa4eDA4urF3M8zyXcaTpej7rZEGc5RZ2iawdD2lSlYLvb4NoTOkEH0cCgP+DXX3+H7fQ5OPG4Xr4g2oVQdPBck7QIidIts2WKkDV1CVlicnDic3V7Q10X9PpdymbN7W0EaOq6IS8123WBrjaYWFRFTFOleK4PosBUbf49jTX3758RhlvSNMdxYLeteHj2AXmW8tMf/8d/+4Xq//A//Rd/WjUpwiqpdElDxnja5+LdJYZs2K626FJyMj2i3w0whUEY3rJcvCVPSqTIoQi4fReTxDtc36HODfKkIk9AVCVSVnj2gKrJsWyFaVvsog1+Z9BWximbxWxLluwoc4jDGNu2qSoXL9BUlUuWFwzG0B02LOY5v/irt8wXKeHOIE8KPv/8AV5QYbkpuzAhyxIePTqjqXNKnSJEiik0yrB4dzlHmT66rinTmmDQJYsz8rxCGw6beUSR5jiOT6/bJ01TPEdhSYkjHZqqYTIco2yPsspJ44QwzRioLlmSoi1JP+hTFw1ex0SaHlWaI80W3L7ZRDRGe4YfFrc0jQKjZrVJOT69h+1I5psbwnxLkpQ4Xsr9k6esVymz8BItTW6uZnQHFhfrqxahI5ZEec53L+eYZsPh+AFpGXNxETIa9JgvNwgpydc1dRUzmBxjO31+9YuvKaoEIQ36Q8XqasNnP/iQ3/27H/Ev/uW/wnEkh/cGrOYLHn50zOnRE26Xa3p+QFVa7NYZRqW4/2CK42t0oTBUhjAbsihFuDuqKsdyWgG12SVISxAlMboZtRgiKrKoxNCHGMLBsqAoYLtpmM9nRMkG0wh4dzUnTUv8oEdDjmEElIWHsiyKsuDe2QlCNOR5g+V0WK7WGEpS5AXrVbgvrijJixDHGmCbfeoayiKlTgRW4+KbPj13TJps20peFGlU4CiPg8khgecxGvtUlcAyAupaUOsM5XsYhsPV9Zxff/OSTd1j9OQnnD55inI9hDTwXZcfzi8A+GJ4HyltDGVj2y7KsjBMiZSCphEkcUaeJ5RVfTemL4ucpq7uxvJFmbd4JMPEch1MZWJabVRFGK14lYaBZVkEQYCQsl2s0q2gkYbRuquuR6O5G91LQ+7rVduLshSdwMeyWjesrCryvCSJc9ar9sMPJIZhYVkurusjhCLLK+IkIcvzNvvaNNRNQ1GWe8i/aJdrhAEI8qIizUuKqkbIdnzbCsqGRre55Lbtav/AGjCN1pFt86wtg9X3fWxlUZUt71WKvx5/aB1WaN3V9wL1vZB9//fvRa0Qfz0yUCMNie959Ho9BoMBruuigTStCMOCLNOUZQuvNy2F69l4vk2v18UPAlw3QJl+W4calWw3KbswpiwqBBLHcfADl27Xo9d1CQKF2zGxXRcpFRpJVbdRiSxN2yatXURRVKAFRVGC0ChbsQlDrm9uiMIEtIHn9dEo6kbg+h2GkwO6gy62K/B9n8m0j+UaFGVLcNhur6nrBEsM8YTGFCmTs2OeffwRA1fx9V/9ksuLNTSqJTrIgsm0y25TkIQN2SphpEckSUpZGxjKak903m/dQ+t8NxVlnqCr9rXc6XoYnZq5scBQguPjE/xOQFruuLld7SuDFVoYuI5BuknY7TYMRx5KW+i5QjZq79yKdqLA98z/u/sXLYUip2TpLsAo2G4kSRZyc7UhDhsMVZBnmv/I6gHwZ5fnxEVMGFWYymS+fo02csJtynK9xHEcyswnSTPidIkwXOJmzdt371jMZyR1Qcd3sUyLDx99wHcXvwHRLufU2RBLac4vXuCaR3i+xeuXL1nMdxwMf8C9D8f8xV/9FSePu6TWjijf0TUPSOoc3wl4dnQIySXG+lvGvTFPf+cn/J/f7kjLgHfPb7B9m02c47gjepOA1IzZrQuEHrAjQOqEn/3sL2maAfcejHH8gEDDkemivSEYml9+9SW2DLh4/Yr1astyU3G9rjj98Ie8Xl2y2cYMuxPW65LR6JSnzw6ZL7bMbufsNhmPH/4OjUjJy5jhxMc0bZTVYbspOHtwRKNzPK8tQlgtGk5Op+i6wfa3iGbE/XtPubo6Zzx6xGR4uF9qTZjdbsmLmrJoTy4vLl8jhcHx8QnfvfkNadpgKgMv8MjrkPntjo4TYCuLsq5Y7V6zi7Z0gyGW0yEtdux2LSdXWCV9OaXX7VCaBvef3aOIJYdHD1luE8bjAavFhjyrqGuB6/To+kPyNKVpCoqiZtAb4g+v2axjbDHA0j1Oxo8ZjSR//hf/mn73PsJK2rrfRtLvOySpYJetibY5tt0jK9qK6MG4T5rFbLcRD+7do85MOmrC0YHParFhub5kF19zfX1BGKaYqv3MR1QYUnF4cMhg2MELXKJtzU9//B/+rRCqf2MIQdol4SbDH/Rw/JrtpuDy+hWB3ydNJJbb5exxn+OjETcXBa8vvsTzhliupn9wxLvLixZbFeeUTQGLXdv601G4XY+h7DOY9pktQ1a7V2S5zdOnD8myLpfX7xgPDpEqpQw1ZhC0KIbSZjQ6Ybe5Jo373N5e8fSDE0yliMOSJw8+4OjwIRfXvyZMljiWzcFpyc1VyfX1lqDvYNBldrOmXFcExzWd7pCrFxHSLukfaGwrx6QDpSAqcyzTYrdaobRJXWhc18E0LMo6bWHyWUIv6FDpBlcpPM8h38PSiwpqGtjNsT2b7S4kK2osoajtGtPqYw8nVCImKlIqISlFRZRrDDvl5cvfYJuf4HoOv/zitzx7OkbIjJP7I86fz9tQ+uWMxc2Gex8cUJUpUmWcXyzIyoxJT7DZZijZoBONaxwzm++Yz1oI8tB9xqz6iiiPqHKLkydHJLpksZ5zdCpx/TFCe9zcvmTYO+LifM7Pf/4/UzYNk9M+cRVxeL9HIxLy1MaxIIsX7BYjgr7F9NBmF294+vQJRacmyzVZVdP1Leaxg2P5pEmxH/XaJGlKoyV5HiNlge86mEJxu3gHpcDzJMqu8D2NF/hsw2uq2kVKTX9kkaXhfnM/xvZq6lojlMB0IS0anI5BxQbDVOx2WwSSwbDDm/MZvnDJipw8mfPw4RnROuXmbc7BsIPEIssj8uoGJ+ihBh6r5ZbJJGA8HpAXgt26IssK+t0hGIJgMKAzCAiCDl3VoTjUFJ8+QRuSJq7YbRPyokAI2o3v/cVyHKxa0AhacQGYhkBa7r5Nym65rIhWtDX1vhO9ZZU6jr13h95nQsW/M2qvqhYx5Lo+TdOQZhl1Vd2NyJVSd85iGIZkWYbaO5B/3X3K0gyAoiqRBghDYOoWY5VmBcvVGnSDYQqqqkDK9rbrum0ZMpWJYSlko9GNpqiq/XMAKQVStPk+IQTSVLieQ11X1E2FZZhtM0/RoIW+wwnRNG2n7P7xl3V1h9yq65qqTLCUSV03e/esunNLLat1xlo0lb4TqS0HtvkeQC/kXXyhzcpCrXXLKly3dZpKWa2ballYnkJa7e0mecw2qttjKdrnKGUrfD3XIfA9OqaHpiEvcqqypCkb8rwVuxrIswQpavyOh+3aGBJc38ByJLpRWMrBMBRZkpHGbbVvW3Zgo6nbBS/PB92g6wIpC8IwbRcOlUEUpftK2waB5sXrS4q8wpAu3aDLyfGAXdSwWcPZkxGysWhysD2bzWKLWdv0D0/4+OFDPnx6xnI9ZzbfIKTm0WOY30SMRI/7akxZwmydUdPs4f4mCN1u32uNlA1at9v5hlS4noWwNUWYYxhHmMrldn5OlG6x1YgsTdG6wpYjFAbhZsn9p0/J65jX8wXzxRv+bvATlGzz/e2yHN+L1feoKk1bSmEItKmIU420U9abEtNqGPTvUdYF5zfX8PQMgPlmTmccUKc1r85vODwZoitJEofUdc3r5zNsyyHoGswutzRVwvCkYL2IUCLAaiqWTkRZurx8e4lFQJy0Zs18fsHD+6fcP/mcOF0gZn0eP/iYe6Mt2o+5mkf4o4Yvzv8FuhTc8z/k0JsQJV9z82bFOBjh9/rk4nfxph7/5pcFg+ERjSwx+xpL2pRRwjIpCOOcm5cXnI7O6J49oV4ZnD6esi1tDqePMc2Qt7+9oQokqzcXKF0Q2F3SomEZLlGWheM2hFVNxzmg23U4iE/pDB1WmxXeoA8O/Pqrt6RZSic4wBoolpsFRaHodSdUdcZ6ExKHBePRBCkaPv3w73P+5oJ4u6XbscmSmG5nSMc3+Oart/iBw+HRCdfXzzFVQKfnYEiHXnfM23ff4thdylLwR3//j0nTjCy2mY7PMJRLmVb79Mchf/L7/5Ak+oqXL19Q6ZLVOqc7MOl0A5rawXE1vf4H/PbbVxwd9XkbvsXyuhyMp2zfzgjXOxxnSBXVPPrsI9ApX3/1nMn0jKPpGZoSiaaqcwwpsS2DJHHQ5JSpw9OHD+h2KtK05uMnf0KpK6LcBJUhpWK5XvHqzYzPf/wTbDFjNpvR6UkcX7Nc70CWdDsjTKUYBkOSMGN9G3Pv4CNKc86Lt1/T7Q4whMNkOiRcw8HxIW8vniPqDpNpwKtXr/C93v87dfn/4eVvdFT/+f/2z/50OOoiDY937+aYwkHXDpPhFNc1afKa08MnbLZzouQW3Rj0hyU0JuvlnLevFzRlies0dJ0xvtvn2dMTDF1wNPF58ugZUprM1m+YL3b4rotj2y342284nEwINxndHjx4dMD11TW+3+XDDx8TbiOW8xX3H0+wHYMstRj2PS7eXmCbPllsYNsS14blLCXJSixrzM1VSH8gcEwDUUsMV6FFRZ40SNNkPPW5vr6h1jamytu2LRwaCaNhH1PCJt1gYGIbFk0jqaoCw5AIbdDpdMmLhDCMiNMU1+2ghSRLIoJhD0MLVkmMVArTEMRpxXa1JE5jXHdEmqVoq0YZFslWoEvZYm6KnN1uixSC599dEyYNebZmuQ5RluTwcIiyLWyzwVGKNGk4Gx/wm5ev0aaFLw06ymSbLSm0iWM52IOA3a7BUZIirhiNptzM3pGnOyxp0NQuwhAkyZYsrrAtmzCOCAZD0iLF6ZkIfHa7iCp3WK5vqeuCOE0xTcVmVZAXmqqC7apgE13TGdrIZsBiecV2s0WZeyeoEhRNiaksmsagrHK0FnSCA6qyZjlbYEgXRI1UgpvrHW4gsaw+rt2lJsUPLPpDn6LaIYW1v+2MokrI0prtNmpH3E4fIdredss28d2AzWaLpcbkmUGvO2S5muErnz/+wz9C1+SsAAAgAElEQVTh5PCY8fCQ43sHdAcdXN8FDBy3Hetato0QPv3BhE7Q5+DwkNPT+xxM7+P7/badC0FRFqRRQbyICbcJUkiCIMCyHXr9IR9dfAPA+nf/iN4goNf1ODgY0uv6eI6HUu2GumHuuaZ7l9Uw2sykYSik2TJChZQtAmpfjdqg925njhACz/Oo6/oO7F8Uxb5d6nuR9l6c2XbrStq2tWeItuxVuV9qgjY+oLXAC3yUsmiaCstqxaWpTGzHxfO9NpNtGtiOgzQVGgPTsDBNGykVtu3h2N4+T+yglIUwVOvyynahUWvRFi2odiGszYya+wUkuXdJ2yhCVbdi07L2Y37RXp33t71nxrYxAAMpxZ2oLctWrL1fsAL2xQf53c+bpkYYAmka+wpYhW25mKa1b97aZ4q13D9PA9MU2PYeHWYoLKtlwlZ1RRjtWCyXbHdtW5nAwLYVna7HweGYBw/vcf/hGaPxiKqq9/W0BXmuKatW3JmmpNv1OTk94PBwyKMnRzx+ckrQUdiOwDQETaXJspqi+L7MocjzlkecpiRxhGFISg11Y6Kc1rVN0pyb2zllKel0fJJow2xXstztyLKKOK0I0wotTG5nC54/vyJOUizH5upmzfmbGwSKrupwZA0pyoYoqdGiPamSQqCpEbR/buqaMs+QtFMXKSDOS+ZFTl1DGCVUpclqfUscr0ijitlVxG6zISsWlNSUOiTLl0hpstiWvFzseNKbtK8dxN1rWNBWqN45ukApanZ+i5pzvYped8rp/ROCbgdhNNTmlv/MPgDgX/VrqspAyAx0hWUMsQ1JmqyxDJt+0Gc07FGXYEqbuoogk1TawXA62Fpx83YGuuGLX/+S9aZgOj5gtV4zGrs4rqYfTLHtpmV09o9REl5cfMWbF2+odz73D37C+lwyUA/QFnT7E4SZkeqUJ48+Z5FmXN2YOGOburA5mkxYzkNyS7DcXBKuLvHFALfTJc9NTMfjxatzfvP8gj/4ez8lz9dczjfce3aIKVxS5eANHHRkUBo73l5+ix8MqUTGZrvm9Pghnllx/uo12hSMp2OuLq6Yjob0un2yrGC1WTHsP+Lh00OqpuH8zQxlV2zW7ULtsw8ecXs7Q2gLUyl+8cUv0E1OlqZkRUkSCX78dz4hz1KWmxscJwAMikJyenKGYWlevX5O4E8oqghld+kPBrx5c8l0GuAHU1wzIIlvGHQecm/0jNXllpMTk9+8fInQXku7WZUcTu8TxyGr9ZLhsItldyizDFXmqN0LHg2WfPzDPn/+f79lMp6SpCt63QADH0vZDCc+3b7L/HbFbhMzmQwoa40hfQxpMBmfcv9hl9UqJskyuiOXrF6QhJJHT0+JY831/BVVqairiu06xHIqLMtis92QFBviJKFIDJo6bklLusSzA16+/i2TgyN6/UkbYdKS7TZkt4XpYcDsdtHm8EUH33e5un7DH/3+f/63wlH9G4Xqf/c//pd/2jQpwrCoqzagfHg0wVYCKSzCcIcwHKraQqiIOCrxuy5l0ZCXEUHvEFManJxOeHD/KadnR8TxltVNTVU0CF2TFDcUhUYakqPJE3o9j8FAYdsVaIc8NpFWTN0UnJ48ZjQ44OmTx5yfX9EbQrhVrLcbPL+mzDKktljdJgx6A1xnQpGF5JlFVsY4nYayjsm2NV3PofEEVWlThjmdrqB3bDBf5iwvYRCA3TOJNyHTwTGNNJBN3cLGBx2yXU6VNjQNlE1NQ0NZtx90juNQFjWWaRLFJY5SmJakOwjomB6bIkUYIPISiY2t2i/VMhe4jkWhE2zTopIGyhast3OE8kjKiO0up9Alm2SDbdisNlvyPKUqK9arGwK7jy66hMmKgisOTjokmxpdGhiWx2y75naxwbUVw+GQ3Triw8eHbBcxt9E5wbCLpWyKRqFsG2U1WIbHer2hNxK4wQjX7VAVktF4gmxMoiQlT0tMz6OqMtKmdW9sU5KVgo4/RuuGXb4mCAKKNCNvDMoYHKtHFGaUlYHEp9PrYKoGtEDZHQwT4mROf9DFcwO0Nqm1JCsTHNtnOh0RBAbnb18QJxs0Ja7v0RsYLGcLHHPM02dPKcqYTk8xGQ0IvBFZuaFucnrdDrPrEFPZWLaH42l2yRVKKfrOkLP7p8RpTtJUeIMObjBgNpvhWB1G42OUcvG8Hv3+hLLUOI5PWTRsVglRkrBYRqznS3bbHUVaUJTQaI20bUwhMAxQtgNS8vHFtwCcf/g7KKNhMBzQCVwc18FxfEyjzXQKoxVSeVVQa01da8qyQmtoamjqhqqsUIbCNBV1UyMQlEVJnhWYhklZVsRRi1F773QKIe4cxPdZVaXUXXa1XUJqq1y11vusZitSDaFQpk1V18RJhDQknu9iKoVpmJhK7YWgiRAGlrIx1J6TSbulL4WmaSqgARoaGqqmBtG2W0kp2lH/nkygdQ2ipqxyiiqjqiuKskBA62TaFva+GKCq2gpWQ7bd8W0UQN591mndFhi8px0YhoHrOPu8btMip4Byj6eq630NrIaqbpdp6kbvXdL9OK295XbZqir29bINhrTQ2qSpGooyRQiNZZl38QXTVGi9F8t5TpGX7LYxt7dr3l7MuJ2vWofUVKDbE5C6fh9RUFRlw2Kx4e2bG25nGzbriCJvsJTN8fEBjx8f8+jRIY8enXL//iGdroVSYu90ty1ldVNTViWmEAhdY2hBv28zGfv0+x2m0xHKbhcB+50Bk8GU4WCKE3SwPZcg6OD6NrbrUdaCi7czpKG4f/8MZZmYVcVY96hqSZI1Lc4LcZcPFcJoX89NTplnLT3BkgS+i1AS3fM4GE2Zv7lkdb0hjyXbcEOY7miaAYbrML1vUtU1njFk3OsjRMX1PKSIFY+7Y2Qt+evRWLEnSTTNvr5VQo3Guzfmo4+Pef38EsfXKLONwOyiGbuN5h93Wv7xP08W5HmKKQwGfQNR55hS8/DBAcowcW2T7WbJer2hzBs6nSmu60ORYKkUU7jkpUvgeWyjkKYWrJc7bq+XyMajShXj/hFPnzzg4vIbtrs5g16PwTAgyVM0Npah+P2f/gM++lGPV1dv2cxu0FKhBUS7HNcyUVKwXl3z3Zdbnj56yrffXeEGNtv1ktH0DF1XpEXG6f0hy1nIvd4JvYOa6zdXPJze59mnH3N+/h1F6fHs4ROKeI2uTJ58+hE9z+agf0geZfS6NoPuhPks5vhswu1NzMMnj3h58Wvq2sFyCs5fz6iqgm285eT0jJvZNc8+mrDbFQwGFpYa4fk2UZQR9EuidEkUh1R1RVVLHj58SpKFBN4h46nLepOBUJyePSTJY2gqbNVntZ5RNQVe4NALTknLJVeXCwadAYPRkPOXFxRpzO//4Q/5p3/2X/F7H/9jjk4Vf/XlG374+Ufc3J5jij5CaFxXMp/NMIXPqN/nhGuay+f85If3+OGfCP7tv31DGj5BqyWbbYSobYajUdsmVRQs5xvGwwnr9YZOd8Jye46pDJJEY9nt0ub5xYJKC7bRjuurSywrQEpBKRbs1hGHhwMWiwW97hDf7xCnK7I8Raoa1+nQ7/nUZcVwPGG7XjGdDLD8LW/ezhDSZXowwLY65FnN4fGQLIuZzWaMJx1st+L583OUEvy93/v/gVD9J3/2T/5UFxHrJKLTGeLbHkWTY6khYbJgPB1heRWX7xaEYUaUrsnyCsvuc3XZMOp196Okhk284vz6NUm24eRkymeff8DVYsXReMhw8BGlWXN66rNdxBSi4pNPf0Sc2gwCzeV6Q6AqRu6AZZySbSNcZSC8tlWmF4zoBwPyrG3YQcU0skBaEHR8LBWQo1nEG9IqwhIOfV9xs4g5mB7x6Q8+R0tQdsz9szEj/xGuW7JKI2zT53Q8IV6WiLJhEV0QUuMbAdl2R3fQbuomcdw2tGQFnU4P1xuSFRopJHkR0vEnWI7keh5iKYsi24Kxwgq6FJlJ33cwHWhsi1pn5HVDGq2oZYOjPOL1nEIL1nHLHS2b9kPnYHjGZpeQZTHS9HizWPF6+Q7XdJGGzZvzSxpDUTUp0qhYr2M6A5vByCaOLPKy5sXFW7bFisFwSlNClRecTA6pm5wkt6jMmqys6bqH5FmCqRSYNbtwjm0bVLVgW0YkRQ7SwjJMMExMHw46Y+Zvb+gN/dZh3WZQw3q3odEtUD5OUrK4piwLkvQWYeacHj8h26Wswx1RWWKYmixu0CiS6gZL9drN9TwnDQ2kobB9F12BY1rE8ZoPnz6iN/BxA4cwXHL/7LR14lcrlNHDNWzS7ZrReEIhJbVIuL55Q4XGC6b0ulPevb3g7ZuXDDpdok3IyxfP8bwu/eGEuNxgOgLLdvCdLt1OB6UESbrDtFrEzmq1bEdYUUoYx6y2a8I0Yb0LmS22bLYJq+WG+WzNHxdzAP7b65KXrxe8fHXN+cWMt+9uuLi+YbZas95FZElFkqQt67Rpl5rej6lB0zQ1juPAHgNlmqqdhjcaZVp3QtN1PQI/wDAUtmMjDUmjW1EmpMDaw/nLveOW5wV11exzoALTMO+21aUhMVSbfe33BwSdHoZqeaKW2xIEpGm0KBdl0shW5CGM1hE1LQylMJV1d9X7aIOlLAyzdTsty8Jx3DvxrHUrMKSQWErtt/LbTGmzF59lWaKbGilakLtptktkcZJSlhXVe2bpHYJL7kVSQ1VXmHvxrrW+y/VK06ChXaQyzH1+ta7Rjb7LuTZ11S4S7R9fm2lVd7WgoKFu0woNbVTDMBSdoIPn+e0Sm2Hsub4WpqWQUkNdkWUJuzAkz1sUlpR7N7LRSCkIAo9u18NzbaqiIopyVouIN6+u+fabC55/d8XtzZosKej1O0wPOhwe9Xny7IR7Z1NOTqb0ukG71Y8kDFNWy4T5LGS53LFabdmsQ7abHVEckeUpZZUBFdIALWqUaWFZBobR4Pom3W4Xz+tQFhpVGHRrhzxvqOs2k6/fx0ruHM6Gui4piwwpoD/o8OTJKcLW6G5NoDLOHg0xDIM4bDgePeLm/BbXlGzmETdvUmQhyJKI2buIdKsJw5pFmrCqIx55RyD2zVR76H978tGehEhtkOuGV+lbXjx/TX/konXFelVQ5gV5XOI4Bv+p3Qfgv58tcBzFwXTQRmrSmH7QoRENeS7Is4TtwmA6OiLomETJljjOKcoSiYslfTy7IQwj8iTFUibDwZTRICDcVqAarq5vGQ2nPH/xgl2iWcxStruUzTakLBS7KMQNFK/ffo1rd9nEgjBZEYcR12+3dDoBaRpiSIHjwvNXb5CyIs9C0tTA93yefzcjcC16fZNwneEPHY6P+qw2M95cRAhpoUXDd68ucQ5Nrm5uMB2Tr3/zDfcePMA0AmbzGYv1gl2YE2dv2a0ETx5/yhdf/ZrDozPe3XzL61dLptND7t0/o9EVWRFhOQ6bTcn08AgtE7YLQbcvWC2vWC7nuL7NZqU5OBhiSJPeaEwYpoTbjNVuxfUsY70LKUuLYT8giRp+8Okn3NxeAw2mdHCdHp5vksQZq1XMarnBdz3SZM35iwtevnrB4w/vMT34AfPVFYbccv3ukk63x3xZ0yB5/s0lP/rkczIZ8+bFluORycuLGxSCd79KoXfMYNIlywqy2OLp00d4Xsqr325xHZP+sI/fMahKTZHV1JXGcTTLxTW7TYHjZ2zXMUm0xXcrfNvH6xqsVguU7BD4Ju8u32G5BdGqy5MPR5R5RLhZ0HMP8J2AvJCsFjNsd0BetUuWhlQoy0ILRZZpeoHBJluwS2+J4oKanDeXN3z6O08wKsFPfvSf/K0Qqn9jRjVJV4zGY4pyRbiLKJC4fUnhLJHa4MXL5xxOH1JUIRgVdeUxX6zYbA38fodlseH+YZ86NpjflEwOYtI6JipekRcOvm1y/u4Cw5pzfCzwbI+izii3ipu3FrYNcbjjycMTsk3I68tXDCen2LpkE22Y53O6vR40gnfv3mA7EO0KDg4G9DpjXl8syIsU0MT1FtsKqKipihpduwS2Yj275Yv5hk4fMAuSZMZ02sexPmT19c+wvYaaHOkqmqZESZMkS6gqhT/t0B11iLcFk14fUyq64y6r3Q12MGSxmLdAfdckzm4oUXQGkiQVxFGKcarQRsRmtmJ49pDDB6f8/OuvcKyGwOsRVhXT/oiqFJRexXa1YuCNMHSN2QkwtMcqucW0KgJnxHKzARMCyyArQ7zKJ8mgKCNcYRAcB3iBomd3mM9DhI4RmMRJiJIGu02I5/g4jsd8MUOaEsoUw6gY9CWb/Jpef0AlaqStsROHtAhZRbcYfkAY1vhe2wKyC8u2cSlQHJ0M6HaHzJdrlGUQRwXS9Jld78BcMD7wsSwfag/X76N1wvxyTWOANAWqcckrwIiwlGRzUzE91iRFSrmN8FRBSYhqBI7pUuUG6D7LlWCXXFE10OQGq2WGVAGDnsPA7bEJF0RJwc3tgqxoELbAtYaIQpHHEdfpSwajEXR8/vXXX4JhYuiG3390xvnb77D8jNHojHH/ANtSbNZLHNvl2bMPsC2LXbhFGjWGsCnzhqIoyLKMIskxpIlEU2QlSQNFWUMbkWS1WyIkWIZDkVV3DmLrtpnU0sC0DKQ0EHukkuM4VFV1J6jei9c0TRFCtON7RyGFxDddlPp+bP9+y13sndWiKCiKAkyFRKDrBjQ4dssBZT8u1/t86/sowfdj8hajBa1TWVTf50ANw8QwNY02KCmBut3mNoy2drFpW7GEbIsTWnetzdV+L8ahLGuqoryLJtxt4u9FYrUf1b8X71Ia0GiauqauS4Ro/5/WLZ5L094vWqPrlmwgBNS1wfsFcdOyQLfLUFKa+Lazf2zl3XFvGap7DBaCqmnjA0opJPIuA9weixbgX5QFVZPthbZFlGxbwbkXt+1x3Mcx6mafa6U9qTDAcR0sZWEaJk2j97/3mDTJEI3A9S2UBb2BTRB091ljQZGXLBZL3l1ekMQlWVbj2DaeZzI+CBiOupycThgOe61A22zZrCOWi5DNKiTLijYzXINSbttyRDstkgZYlklVVNRNjuc7JFHI5bslIwI+9s+odEPZaLRoT5De56nfy9V2mQpoQAuDomiYL7bM64RfvD6HPMVwTHq9gL/zh58xGXf4vT94yvnrC2azDUcnRxRNxKvXb7hdRQTTEUK0Qr5qSoTUCGHeLdo1uj3REPuGLE1NRUElU4KeQAtNmsWYpoU0Cg4OxyA05O37tt8NcAObMA65ulphGiZdVzO7aWun/Y7N2RPJdr0h25RghHQ7h7jOgDgqcN122lCWFdbIwPNHDPo9JCGGrHGCkrcvrnn+/IiiMoiyOWG0pduz25iRN+J2cc4XX39JpQsmB+3x+/kvv2HYH+NbQ37963ccTsZ0hxWbzZKsUHjdiO2yx+HxkNn1hiRZ4jpnzC4NtpuaMHxFkT3j009+wq+++YLr63PKJmYy8bDrCgsL2WhkkfCLP/+O3tAjCDrkWYRhDRn3j/jLf/MLer0HSCRhGFOkLo4jWW7eEscJaVITpxmdgcnipuLBgxGvXl5wfRPz4PHvcXu7aEkxCdiewWqdYFsKz5vi2BsWsznNVuF3ugwcm+36Cs854HJ2wcX/esmjx/fY7G6oCw/TNNmsQ6YHY7J4R7g1sZSJ5IDPfvghVpCz2F2hfZ9f/uJLfvSjn/Lxp/+IxggRUmFVO/7BZ0/55HhNap9ye5FxLUZsq1v+2f8Scnx6xur/oe7NfizLFvSu31p7Hs8YJ6aMnCors6Zb19W3TbttNz2YB4RB9gNmkIwlxD/An3AfeEF+Q0ICgbDUQqAGGVsYEMZyW8aA2w10u6u6b92qysqMHGI889nzsNbmYZ+Ie1tCfgGJJqRQpkIZJyJOnhPn29/6vt+XXDHYPUGaT5BdQRQHfP07KQezGbajubmeMxwFDIYerjNku92S5ueUVUGnMjp1wOxQkqYmXetjmArT8LCMAbZTs1klRFGMJYY8ehHSNeDZMx4cjzg6fMDrd79Lpx20MGm7jPNv59h2xePHjxDCw5AOpe7I9Zz371Y8enyAMcyZTh7gGmtefvuSzaX+fyQu/998E3eQ4/+7t9/41087X054c7PBDzosanQ3YDxtECpidbNGa4Xqegh1h2AwGjAaDbmZv2f0cEh5u8VvXMajEdt6w6q4RDU5zx4+RXQWu7KkKSVFdsvDpyOSbUTXdVzevEfLlmdHP6SzMzzb5P27a4bhgJGlKMWQ7y6+p20sPNvFdlOKvEF2Dq5TYRszFmlDWa/RQmG5ElMO2G0TAsfHdSTTg0NOZgd8+/U5/qAiLzVVU6FkyiCeEHoxi8Wcxc2aqhEI4VDplLrLmdhHOAOH9XrNaOiSLDtc08aLNUnR9u5i0aFFSxQf9MeYKqXTNuv1EtvyIJRotWNkD7HtmJyGLM+ZxiFl1VFQIjV0KsKwcwLTRBcWFRlKGmxWKUmVEbtDTqZHvLr8Dt1pTGz8wEE0JZVSCFNgNAajoY8/sEkLqPKUPE/wQptOSyzTxzJtsm1GFA5o6pJCKUyzwXckyTbHcCxcN8K3xliWZr6+xZQOZZ3TyJZGN8R+/yTIS8UkHiJLSRQrMEO2+ZquKSjShqToODg86mcqqy1h4BEFE4Qw2O2WuJ5Do0GrmjRr0Rg01ZyJP8SxpzRWySZd40qHk8lj0nyJ7w/YrTaEnktRl2R1hmlJZFczHAzJC40fhniuxfZ2h7YFmepZsKYO8cOIXXaDazk9HaAzGcRD5psljap7lmmjiFyfaRwzng37FrVh4dkhD05PsSyT5Tzh9qqgEwZlVeB5Hp7rkOcpcRhT5iWr+Rqleuj467evePjkCf9pmALw79pPqeqCsizoOt1PmBq90+e6PVmibVvkXpjcPYfv5k/7bKm4//gdQN405Z4KUPdFpU7ct9ibprmfXG3bBq3a+1xo13W0TYvj9vnUOxHc7lvwP//etu19yegO56SUvi90SSn2x+stptkzTZum6d1KYdz/PHcRBCGgrPrVJ4HoYwxC9v+2U/c5xjtO6t1Igd6vS/URhl5M9oJVolQfX/h57NRd9EFKuWefKpq2pRPcxxzu+KqI/uso1Ze2xJ7BWdc1PZtT9pnWruuXlvrwJezxR3d52X4S1MGyLXTXi+heLNN//aafVr2jDkRRhOPYqKahyHOKoqYsGhAaz7fxHJvhKGY6HeF6JkVRkCY569WOzTqjqjRto1C6wfNs4jggCj3C0MMwoK7qnrmblSRJ35je7XLoOoLQ4+h4yuHxCM+173m0WndsNwmbTUKe1RR5SVU3NG2NwMCUDpYtsW0Tx3HxPJ+otfjCfYBpmyx3GW3bt+97gUjPTxUS0FTZliJN+hW3YYR5YPGNfcPpyQNc02SX1khDYdsCqWF5s2YYRXi+ydXVO1RbogEpPMqm5Hf/6UvmOzgZDPm1wQssw0Tc8VPZjzrshavoYN0m/KH1mrPHMVmW4IegdI3jatoqoK0tfuvoDIC/tnlNq2qyXYcXhCTpAt0IxqMZfiQp6xzMFil9DKHxXYfNNuHx2adMD0KSZMvV1RzPh3dvc0Zjj6K6ZeAf4/g262XN4fQhaXJNrTIaqRmNJ4hOkKU1u22J47kIL+P99Q3TgyNEuWE+X/Dx0x9iCJvZ9JBkkxENOi7e7miMGt2laDUmGtjkRcl8+R7T8FB0fPbxB9zebPn8By/odMtoOuTi6iVV3dJ1IR89n/LlH36LYzmoAlpd4I8mpOuaBydDXl/dUGcdn3z4CZ75iIvVH3CzPicOHnP80OD9q2vqQjOZzDDMlqxcMp6MaCub7e6CTz7+gjwt0a1gtdoQjz0Mq+b9mw1Pnx1TtzsuzhVB1GK5LZuVZDo+5XbxDWm644NnzymrHNO0yYs1ovP56PkLfvJHL5mMPfK0YDR4TKu2hMMK33rM1fxrVBvyF//yF/ydv/V32Mx9nj57yvTY4PZ6wZOTp/yFP/MrlMXf5fz7a0r3L3L+9jUv3/wOjjnF6DoW2w2hKIkOz3AcF8OsiKMJi/X3LG778nAQx9jSRemGOA749ttXzA5mlM2cKDji+PiE9fYdxUoymnhcLN7TGRrHhPlVRjg4YTYeYDk1P/36FfFIUBYKz5mh5QWrdUoUH5ElLb4TIM0V79+uOZp9wnAG3718SxC4RNEQR1oY7DClTVnnuM4BjhXyb/9bf/1nk23/H779M4/+/7t/9J/8eBqPuV4tCf0hw2iIP/BJk5Qs3WGZDnXd8OFHx/j+IVqmqFay3iQI06GsN6xudvhmwC6Zk2YtihQvMHGsI4ZDk9v8CteByHiEG9V4/pA8mXN6fExgh9imw83ihiKfc3j8EFU6bOcXDMZTLtcVXggXVxuK2kI6OYYdYHuPqPSast7QmTmWFSEwKbItke+iGou0yhGG5PrmhqZWDCcK0+pf0E7Ojkl3Nq7vYhgm2VZjW/0vNMscYUqXm/kVjmUymoy43ayppQGOQYcm9EPKXYpjhQhD4jgutmOyWdUEkUGRK8bjgLrrcH1wXZfhcMhmMefBaMbJyREvl+/wrI62LUmajGgQ0LU5jZkTz0Lmb69pVEsYOwwHUe+eGAJNi+85lE2DqgSWKUB27LYVgRuzTlLKRhF4HqoJSZIcx+4dzdVq1QvFpEAKybZdIwyJZXtIbTEdTrG0jWhs1jcJwnagddiuUqqiwrUkE/+UbCmxvYK8qpBmQ60U15s5piMwTIeyqTHdENcyyMsNlS5QQrJNM+q24PjogIEfcrNZMT2ckJcNSgtWqy3QYTmSrMxxfJtslyIQtKokz2tkpymqFCktkiQhva2ILB83sGmlxc16ybZa4wx8kiqnaQRIiyjyaHKFxqASOZE7IvRDLOEwCYecTqbYCAZeiG4UaZGyWu7205qKtm1ZLJaoVvPm7TmtzvEim6xMSbMU1YFp2Lx5/YrdekngWdAqgqDjs8+f4Pk2v171QvW/1fL+yD4Igh4RpXsR1ewpAF0nKBq5IZ0AACAASURBVIryvuijlO7dyr3jaNsOvu/dN897jFVzD+PpekWwR14pHMcmCHwk4DkOgzgm8DwC3ycMAiyzn7dUSvWjAfsMZbdf9LkTc3fFpV7Qqn2L/w47BHuiOsC+sNU7n1LKvgQm7sRzT0Ho/27evzu2uy+OGf3qlmVhOzaWbWFaZv9ziR6bZZpWz8vcZ0lt27lv69+5mn1JzLkX6YZhUDc1jWoRP8dN7WAvNoG9w6uVgu4ut9rHBZTS+9ECY3+k3GfObNvF3pfSesyVwrQsOnqcGHssUtPUaN1imsY9vQF+5ipLKfE8j+lkwmgc4nodhqlp2oYsK1nON7x7e8PFuyWrRUJZKDpt9OtP+/UvaUDdFKw3CTdXW64ul1xdLlktNtRNQxC4PHx0zIcvHvLZ5x/y7Pkpw5FHUZS8eX3JH/3hS77//h3nr99zc32LlBBGNscnY54+e8DTpw84OjrgwdkML4BWVWw3CcvllvntClkppsKhrCpaLUGYe53YRxh6GD9AS1NmdKrFMEwGgwHRbMQuECRZRVUqtAJpWDRtw1df/ZS/9/f/N37/y69ZbPrSTFPWbHc58SCi0QWbLGe9q2mEwU2x5Wk4Rgr2jxO5f7yqfTxDkKuan67fs0vX7DYp24XGNgaE3pDdtsa2BP9q2B/9/4ffvSTLS5SGB48P8UIbVVt4vkXZzLm4usHxBizWazw7JNvtqKqGODygqQy+/uanIEoG8ZQwiBGywnU8hPZo2hLdKV48+xxh7CiKCssOgI7379/hBQ6G9Pnkk8+5XZzjuyZ1WZLuSvwgpK4sqsLCDw3mq0veX71lvUsIAodODbAcA6UaVqslrc4wXAPLtlne5HuGs8+333/DanPJ+4trshwM4fHm3S11k/L+4hUCn4PjiPcXC2YHzzk6jnlz/hLXkUR+RJW7rJKXqC5mOLFZLK8J3DG207Be7lCkJEnKarViPJgym45J8gLTainSmo8/e0GeZxRlzfffX6FbRatLOlpMw+Ty3Y5/7hf/HLbVP79oY7K8wrBNRpOQ9e4G3UJTK96+O8eWBwSxomkTooHPYDhmfBBze3vBapkynR1jGgXffb3go08e8t23r8nKhLIU/ODzX8L2H9DIBf/9P/jb+PYDnn7yjG8vf4qqDMKhIFOSIqlouSXPKuJowDff/RG2dcDnXzyirDTDeMQunWM7FkINMUyFYwscR/DTn3yHUh3jUT9HPZqF/O7v/R8YIqJSJVlVMolHaGUhjBqlGlTTc5p3u5w0rShKaNWGKHLQrUGR14QDGyV2KF2RJAuWywVtZRLHFk3hYboWN9dbnn/0lOdPf+VPxNH/P1Oo/r2/+z/9mFpzcGzj2hmR4zMcDcg3LZNRxC//uR9xc7WjkwZFkTOZxiyXOxTLnpW5TDmYRBhOh+pyHjxxaZoOP7SQZoOUJa/ez5keOUjV8xmzpGQ0iLg4LzmMB4wPJ4wnMb4ZsN3mXK++wbNnJFvN7LAhWe4ok4bI97Asied67LYpq0VKUiyZHA0ZDQ+xTZembHFdj22SkhULLF8h7Za2tXHtmCiasN3eYpodCAfdwvnL1zx7/gnrzYKi1TSVxDYNPN9F0rJJK243K7Sladqag+kRyarAQNBowWDo0DSKYJRT6wrPGxDHJp2oGTgCx5dkjcIfDXBcm7PZA7759itKuSIyh9RbB9eOqLRiuVnjD0dslwq1K7BjiUVAXXc4gU1bF4SBQ0ODMEzqtAGvL0O5hoMdBhSNpipysqLCcwKisU2ab8jzgul0iO0aqK6iajKicEzbSDr6FwLDsPBCm3n6jkT0qyC6WzEe+VhyhGN6OGZDkqwwpEGWJ0hDUFcGra6wLA9LeJRlQxD5aLlF4pFvJRYmx7MxaVrTdT43iy1FkeLHLlWl6JoWx/AQhovt98W5q5s5s+EMs+tnVA0ZMIxjRmOfFsUyyXlwesIHH3zMTVKQtDmh6zOwhzRFQbpJ8G0TQ9aosuDs8CFRFJGmC0ZhRJHldEpQVYrb+ZKmFZR1i+mYCFljmBFZ1vRHr54gr3bYToDqTKqmJx4YpkVSplR1RlZuWSxviKOA6XhE12qOjye8e3/O+Zsr/srABeC3khSJRkhJGEYYwsS2+hUi19231TGwHXPfdu/h+YZxB+cH6J3Su0b3ncgxTQtDGv2R6t4NNE0Tz/Nw7Z6UYFkW5t5lLcuSNE32t6H2QH/Z8/fgj5Wv3H356E60Oq7du6awP1rVPVZrn8/sAf/+fd7UsZ37/Omdi2jb9r2YvLt96I/Ef+a67kVzp++/l/727H32UN47uT9PM7DsHrclhdxjsxR1Xd87rAjRZ2r3E7J38Yu7MQAp5B/jrvZMVUHTNP17Xe8nX3smbd3UFEWJZe0vHnT/oqlbTacETdWjqCxhYkoD1bbUbXNPHACoqpqyKNhuNqzXO4qiRyxJYSL2g6BS9uKwKFLSNCHLc/K0IMuKfbTCousMHMfA8SCMXI6OpgyHA5TSLBdbzl9f8vK797z+/or1qkJKl+l0zMFswLPnZ/ziL/6Qx08e4fsx223G5cWCVy8vefP6kpubJUVe4Tg2Dx+d8Omnz/n0Bx/w/MUZD84OOIhijo0xXWfQ6P2kqexpDd0eT9br85a6KNBtn5uOBhFKdly1O6S0QWnaqkXXmjzdcHV1wYuPXvDxJy/wfQfHdrFdGz8estmm6E6wXOUkSY1hdASG5FEw21MiOnTXYNk2SoPWLUIq0qZkQcPhdIojTPJVjalqRv6IbF2SLUv+6ukBAP/Bl296yodX8fb8itvLhjgccHo2YrUoePz4GU7ULxKdnT5gEA2ZTGPmiwuaysGyC7KsZr3e4tgeYIOwkUKzXN+w3LxDmiWONaZUG7QApUoq1SAtl/ky5d3la1zbI99V2LIjcCb4TsjBNKbrDHbpAk2L64UcnRxQlil5VTIZPUWTobWgVglpofD9QxxHEwVnWLbk+zdfUdeQpB1KGXSy5WpxgyVtZtNj5usVnh+QbK5A9eYLtSCKAt69fU1eJKhOYNgmrUjJihbbLtmu+/xkWaUcHz/AsSKKfEeWJnz38g0fv/hTdEqwWN+w3aWsN9c4jsFwFNPWGsvpH2u27dNUDZapuXj/lqPjA7579ROksaXIO3ZrQZYucFwT5Ia6XqIRxMOY7VLi+orvvn1Nkm56FFsU8vXX3/P0+TGD8ZRdUjMYBmy2t1xe/YSTg4+YTL/gkw8/4s33/wO/84//Z7KsQlclqqk5OJ7gOw5llVIW4Hqauu0wpIvp1BS5ZrNOePLkCeev33JzvWQ08YkHPlWh8f2QAR5Xr78kMieMokNCZ8jJ8JS2XjGbTHj5/Sva1ubkLObi8oLbxRolb0kTSTDwkGaGZfRxEtUIHp49QemC95cXeM4Qz1XotsO0bOabJbWSBH6E7Uiubl/x53/p/wdlqv/ib/97P94utiw2GUoVBO6Iqm2o64bVcsflzXvG4wOqdMLR8RGBe0yVe0xGU9brDfkqx7QEujMw3JLpZEC5c3lzfs3h8QDPa7m+SBkMAhRzrq93WGZLss3pOsXx4QFpWXBxccEgDrm4PqfWLbu1S1EsGA49RoMeTj07ipkvU+aLCtMpWG0WhAMP0UV8+/UlnuuRZhnz1fu+Dez4jGcjdtuEVkkalVFXJXlWIWXvYM1vrvF9j02+I61LytZAtwV1smR2dMqrt+/6Y7RIMgyPsKQNXU5Ztqw2O0bjmHAgefLhA5q2YDAccH25YXoCpmVQlDmVgkEwgDbl5PCYy4sN682a4WzAcltzEA1IrhPaas14ElImHVIoKq3pzIrbdwWD+JhOVOw2c0bDGAxJEHoIoYljj9UiZzqZUtQpuzwjCHyqomW7TRkMYwQmqnHIc1gvd7StxrY9DKWQQu9LEprIGzO/2SBtqFWH5YDoDEzLYbGa93lSV+GFAsN1EKp3iuqmJY58HNOiKjJM4eA6HXUKUegShDaVgk56OJaHFFCVCU8eP+D1m+9pW8Hzp2ecnR6zWCS0TYljSBAOozhANRWHhxHTScggtinLgu02x3JsLLel7hp2lUJRM/RcRlZMIF02aQ7SYRwfQGWStyVhHJKsWmrVEXgDhOgvwoqqRNHt83f90XY8ChlOIvK8JElaikKT5il1W5DnJbvdlqJJ2CY3JNmcLNsihElZSOY3KVdX7ynqDcLSRHHIv2T0TtI/CDqqKkF3oFpNWVZkWUbb1mjdcDdF3k+Stv0O+n61R0pxPw6QpmnPo4R95vKPY5ekENi2heu62KaFans0Udu0NPVdS11RFiVt07uxSu+ztlVFXZf77Oj+8/Yi8O6Y3Pdc4kFMHEf4gY9t9w5x4HnYlolhmvuVJXqMFoJ636q37TsMVncP2++P1u/iDd29G2tbdj9oYPaO8l3koQf693fWzwvQOzGLEL3oNo19FKL7Y+tTgj5OobS6F839hUIvalWr6HTvFJumtc+HKkBgWTae62EYPdHjLqcrRC+Ym6bGsSSDgc94FBKFDsNhyGQSYRjq3kEDo59W9X1su3dkDWkgBZimg2ohL0rKstpHIgQdGq1rLNPG92OkITAMje0KHI8+N21q2rqirRSd7nm9VVUwnsQ8+eCE2WHEeBzw4fOn2E4vIt69veL6asf8tuD6cktdKfzA4vhkxo9+9Dm/8KMf8OjxA44OZ0hpsFwkfPXlS/7p73/Dt1+/ZbVMcZ2Ak+GMJ9EMy3HYZfU+xrGPsIj+/6CPP9Q0RR9TQQg816WWLe+rJUmy609M6l6IKyU5PHrIwcEhYegzGEQgOjpp0mAQRDGD4ZjLqyXrbYrrOOzKhKsq55E7xsBESgtQ2E6HkB3X24TfvviK0wdj/uV/5c/w6GHI82fPODmOqWvJclWw3dX8O097ofrXf++iRyjJDstgf9yqWS0THNdkt1Mk6RzXsUh3muubK6qqplMmrtdhmjZR7NOoCi1KTMvDdl2qsr9Q0F2HakwOJhM0Fde3W45PXtBqHyyBkhsMu2a9yBmGB5wcPKCpMiwZ8fjpjO1u0a8YlQ2xP6YqaxbLW4Yjh/eXtyhuCUKDoqhotI0wOkwtQCjSrEDpDi0EhuHSGVDUCUmSs1msOTwYAy4CG9mCVBrXN0mTBVXh8OKTZ4wnI7wwxosLdskNN7dbQicmyVa49gDXiajqnFblFBuPZx885JuXv0eZ2Tw8O+T89Sss28dyGharWwzp47qC2/lbtG4ZDMa8fnlOVRZ9LwHN0dGE8zdvCAKPrjMYTSeUeYvruCzmNVpUWCZokfVxJ13RacXRic/b8yWD4QOa7h3bueaDp1PWizVHB2d89dVP+Zt/6x8SjkyG42N+9MO/xPi4YZ3tCMYh4dBlcys4OZmxWjQE7oi8XNK2/UVRp0wcK+bmZo4h+1iNZWt2m4aTs4C3by9ZzGvKvOYPv/k/+cOfvETaEyazKcKuma9uGY3PKPKMg6OQ68sNR2cjtNySFwlYHaaticMhjnlEss05Pp5CZ3D+9pzJQUyabRmPRuy2JbbfMTk4RcuazTrFdWsMy+DP/uK/9idfqP5n//nf+HHX1iT1CtUEbDf9bnIQdlT6FlMesFhcovUGrXMW82tcryTLFLBD2A1ptqHMSwK/ZjXf0qqE07OYq7cps9GIcegyn9+yTrdsF4oXHw9Z3LZ88OwZy21FK1/T1DXCsgl8h2dnX1C1FeGgocnWVIVB1wYYdkearBgOPSxL49sjtknFdrMiCjySdIWUTn8EaLnYtqAqSuLohNn0CedvvySOfKoc8kyR5w1lW2E7kuVuS6cdRgdDOpUz9Q7YlCll2XL4wMfCZ355y4PDI9q6ZDCSID3cyCCKDVq9o60N1quawViTVwmOHzONplRK4AUOSXmDkgaN8ondGKu02DYCuoRnZ8+ZDAasV2uUhk7m5Nol8AKCUOD6CkNa7NYpTZETBha1rqjrjCor8YOYuqlQdYHqOvKqQbQ1s9kJXVdT1yVNqUCZeJ6DYSq6VjAZHGOZknSbMwxjbGHhWSGT4ZDIjWham8gf4Ro2jao5mJ6wXScUbc46z7ANi+FBjNrPWrZND5bv2Zo9wL5uapLNjoPRkMAB1zFZrZfYniRXFXVT8fDRQzbJgkoXVE3NZDKhEzbSgcCTnB0+pGsFUTAiS3pG6a/+87+BjyT0Aso6py5z6qymbhuEDetkQ1G3OL7A9xyKSmC6AmEk/QZ8ZxO6McPhAGlCGAcUZUY0iui0xg8GNHXF9dUVvh+idEvdlKTpts86GuC6FlVZkGYZqhUoZeI4HoalwazAEKzTHUVTssvW/JuDEQC/uV6jtUFW5pRVjmVZBIEPKKq66JehRH+EbprGPexfdwqtW+q6pCwLpOxnRbtO7xel2h5FtXcazX3etK7rvshjGLiui2Ea+zUs8z46IERf/OnoMCxjv2LVI55U25JnGezzl8C+da/vi0ZBEDAYDPpZXdU7moY0+2a4ZfdTqftsqpSSsiz/WN70frDg5+IFfdbzZwxMady5t30e1thnc+8+x/O8e3h/13X7XKjuj/DhPjt6t1Jl2zau49zD++/c1LvblLLP8JqmdT8G4Pv+vZBtmwbT6Bv7iJ9ldg1D9m42vTtcFDVVWdG2GlP2wH4pTTotUW2/pqNUPyCSZ1mPrKqbnufaaaQEyzLRWpPnOaYh8X0P13Uoy5KiqCiKlqrqUK3AshzCIGA8GXJ6eszx0TG2aQGSzSrju2/e8+7NhjztM5uTyZSPPnrBJ58+40998SEffHiM43bkRcLFxS1vzq/4+iev+f7lBet1AsDxyQEvPn7EDz7/gB98/pxnHz5iEIes11s2VyvEvGG7K+iEuY+F9IjCu3yzEAKtGpqy6vmqpoEXBFSd4m21petswEYbGsvtCRFaQFaX5FWD7iwMaaM7iRf51G1FkfUM2G2SIwwLSd/qvykSHodjBC1Nraiq3pkvZMFXq3dg2jiuzbdfv0IXJgfDMbvNisdPxvzGb/x5fmG7AiD9tc+IJha38wWeb/Hk8RlFvmIwmOI4MW27w7NCDDVjPJ6hZUXdrLDtAMvy+1W21qRuIc1yyjpFSIvr6zWu79DhcXR0yMtvXiIMiWlOGE1d3py/QmoDU3Ska8V4NGB24JNXG7KiJC0UeVOhpUdRt/hBRNtWbHdzhO4YjGPev7/GkDFlUTMdPURRoeUKsx2TFw2YCUWp6Qyo9DVZXvWnilLi+T6XVwvQLoORw/XNnGgYs1wltDqhViarzRZh2KzTN7x7O+/jCfYEKX08x+L2Ju1fN9sJptmxWa2Ig2P8IGJxvcUPLOo6RXUF22SF5xtYhs168wqlW6rCYDKc4gYJRd4wnZzy/vKcOBhzeHhGmmVIw+LJB4cUxRrPnaHZImjJth2uFbHbVrhun5Nvy4iDw4iy2DIZnrJdSjAqdmuwPQN30LFrbviHv/0PKPIEa7Dk0eMf8PDRGfNFT5ZJVisOxk/wA4NNesNwNCT0Z2gyZkdBf4Fraa4ubzicHWLagrKseH/xms12xW6XMJ6d8NkPXhDEHtEo5quffIkpG85OT9BaMhwNOJgFzK9r6kYRRj4CGy/sY0iR/xwhW6pS8+DkCd98+xXBQCDMFNM2WCZXoGw6VfLk6QtMEWIBqku4usn5F3/9r/3JF6q/+V/++z+u8yW2eYxJyOmptRd6LZPhlMHwiKK+YXKg8b2A+e0G29G0ake6rRGuT1uYOMLAMTSu4xH4YzqxRnYuOhdQwMXVkoPT54yCM6p6SVO7VHXCD/7slMW5y8Mnh5ghuJZkOjjBsDwsL8GPB6RVySabsyt2RKMh2ySh1f3qSJaVjMY2ndJ9w1b0L3wSDwNNlpScPjilbXsHy2IE2mW1vUTpGm1rdusGz/PQtebm+obZZIJjuCzyFMuqCN0hN1dLRoMAKXq8TX/0tsOLBNI0Wc63NHrJdGqQpi6HZwFxPOXByZRdlXE7v+LhyZS80OSZxjVMxuEIx+0olMsqWWA7kOmatK5oKxM/EGyTLdHQJd8V2EbAZpfgCB9VCsqmxhIdputR5jtix0dYFllVoSuYTH2i4AjVVmT5mqooGA4mHBzEmFZHU2uksDEMzWR4RFmVdGbN7XrOerNDa4Fjax7Opqxu1ziexrYaOg1V01BqTadtsFpaISnLlk62+MEEP3bYrEuyLCMMBV988QnpLiVJdoBkNjlCaINttmYYDhAqZ5Os+u+hyfn4wxd8+vGf5rvXf8BkbPPw+BFhOKSpW2hbzo4f8uD0Idc3K84vl1RNwywKqdc5TSuxHBPfMxGmSbXLKbOKg5MBbZVgKIvAD4mDgK5TrDYL0jJHSQtt2mihiOMASwqqsiGMQgyjdyFcxycexOx2tzRqC0oShwM8x0dIizCM0J0GUVPUWzq7wA4ciqrG8Rz+jbBfAvkbiw3Skj2uxd6vCMmOJN3QtDWuZ/cunzT3+dhebNJ1987r3RCAEHciRu3zqHpfTOpbzuY+LiDoC1N3LM+27bFOvfu3d1arGmkauL6HafS5QikFjuMSR/F+GKDnlva5z/5oHgRFVpJnBU1dU1VV7yR1IIS8d3jvRGlVVX2RqeuP5e9ytz/f6tf7XO5dRldI7gXPndi1bQvL7B3jwWCA67r3gjcIgn6oQQoM+bOJVOBeBN/dD/ruz33xTAiBahXGXqQahoG/jzC0bU87sKweyN3UNXqPurprtd/lYW3b6adgm5YOSV03JGlGmuYURUlV1f3PrBRlXqD2Wdi+nNZfnNR1Rdv2NAnVtljWzwT/cBTz4OyIZ88eMjscEA98TFOSZQXrZcZyvmW33VLkOa5n8fDRjM8+/4DnLx5y8mBMNHBpmobbmwVfffktf/TVK15/f8tyniCFxfHxEZ//8AU/+OFzPvr4MUdHIzpalosdL7+54svff8V3P73k8mJNmTf4gc+Ds2M+fvSYR+GMViuysukvUrhzUu/uJ2jbmqYq95ljg9nxlM6HubFBixZDgCVsqmwHWlGWGUWdQyepypo0S3q3lr4Yp5VgvtiS5AVCmiChEw2VrrgqtjwOjugveTSX6ZrfXn3N9NjCIOaHH72A/CVT+5wD+5ZI35Jc/oTN25f8hdEpAP+ovSF0Ag5nUybD4/5+kopOVMThY6DF7CzQkqJQeGFfcLSNEVfXl9zOb9GiYhidEUQxtVqz3mzxAgn0JZemLsi2msl0RFokWMLjcGZTbkuOpocMnBNODw+5uV5Q5CWu5dHoDWWdk2xrlrcrrs7XbBcbHGmhaoOsXOH6Flo1VOWWtNhSVSVVYZDsLvADn2gQsFzNUdoiHIZ0suTi3Q7P8zFcE8u2ubg4p+tMVvmGbVbS1oIsFayTS+pWsk6v6DBoa4lUh8QDA8uK2Ww3DAcjrq7WKC2QWHz84RcoVhjGAbNDD9exqJqEm+VrBtEptiNZL3cYOib0TnjywSNWiwTddhRJyOc//BjD0myWKwQ+QWQzGLicv3nLy5evCKxnPP/oA1ZXG2bTU7755ifMDsa90NNDtEz651+d8ubtFcePD3h9cYkZSKYnLslKczQ54Nd//V9gdTPn1cuO/+Uf/32KW4vAl7w5v2A0VqzXOUFkgfYoiw46g9HwAGl0XLxNGI4clttv6NqAzbKh5QaQFOWSxeoKe2BwOD2laTpuV+8YjXxGscdsGmOaDrar+Sf/6/dEQ3OfO+2jI6E7YxC73G6+R+kM1UhOz2ZUTUPbNsTDiO06xB8OKPMdk/iE92/PSTYLmmqB50yRtsuv/fJf+ZMvVP+r//o//rEfu9Rt09v8xZzJ4ZDh1GF+WzCIbDzbRVUllxdXnJ6+YLHYYFoSywwwqYlGil1RI40h2+uGcieYjGZkhcIPbSYPZgyPGyZDh1H4hDgc4TLicPyQXfKeVvmEQ49ku2UwnLHJFpye9RmWrhvTyYqk6gtRujMZjj0uL66ZHhzQ6AZhhmRlR4dAtR27TYHnS/K8ZVeWmLZJXS+RStOWNVmVMDocUHUVuoTQP6TcbXEGNm2jEHXL9WaFH0W0hkZrEwxBFNl4tsX1dYKwPKq8wdEdC5UhlMkPPvkCUxtMJgaFbGmWHT99/QrTlPihxTg84O1NghcLTuKI3FSsthscI6YuS7LqFluOUGVDWWZ4VkBdwep2zWl8SJMLLGfLLukoqKCRiMZiHxHsuY+qwTZNmqpkMh2zWic4vs22qbBaD9vuJ7DzuuLB+JhCp+yWW7Imo2gKylYSdoKqbVjWW06GE4ysYbtNqTqF1hKlKixp0wiN47voCpoqBanp2pDN8pZx4FAmMJoaqKrD8X1mByfoTmGYMUKkgObh4yFltSYvSyxrgCk98qpmne7Iqi0VJT4d61XCKkuIRhpHdjiOy2qzBrOkLRV11WCFFkbUousWVIUfjWjymlWyYDg64vgowOh8nChA2IphPKQuMqQrqbOCZluya3b4wwEDP2CzusV1BphWR9toqkxgCou6TGibFMccIETAdrcl2aWYZofrQlUU2I5FPBxim5oHJ4c8e/oxTdnwl/du5P/YSdo2wfUcHDtgfntLki4oq13/pJU2dVMjRQ/Kp1PQdjRVhWFoTFv0TNoyA9HerxW1TYPoDCxzn1naPx5MKem6Hv3Uqoa2Lfpyi75ztySdBsOw+8UlBaIT96tKe5Ip0jD2rL7+GF2rXqg1eYnQGnRLWzewjx1Iw6DZH4nfuaeGaRJGMZbVZwyjMGIymvDg6JhRHDOMQgLXwaAj8FyGUUzo+djSxOhA7pOandbUTX0vbHtxXKGUoihLsiKnrPoJWMf1cJx+Y92xbEzTAimRpoFlWximcf87sdMdousLko7jIYSxjxxIyqpCSEEYBbieg+3YuKGP6/XNfsu6c7BbDMNGA3VbY+3zt67nYdn9x4XsM8CWKTGkQLeKbk97KKvq/gJFqZa2UXt+qtyXgKBpFavllov3t/ut8QrfcTk+mvH0yRmff/6CDz54hOu6/3gjAwAAIABJREFUbHc7rq+WfPvNW/7oq9dcvFtSFh2O7XF0NOHZs8d8/MkTHj2ecXQ8QqBYrVacv77gD37/JS9fvufmekWeV0ynE548PeX5Rw/59PMnzI4G2IakqhrevLng5TfnLN7OcTJJ2UgQBl0n7x9Daj//IIC2zmnKFin6i4nUaPiD9gLf84giF9PqsOyux66ZJlKa2KaDavqBBRBUVUuWFXvMG/16T7rDMsU+D2v2Yyui5bpc8MQ7xpSCQtd8s7zENHwmR4KRb6I359h6x3Klefku5Wpt8u625q9+8ASA3zy/pG5tqrYlHAWkVcpwMqHIoaoTmralyAWjadzTVDybvFI4noHnd5gm1IUEo+H4eEBVtFh2x2QyQSnBIHZ59OgR0srJ85Inp59RlWt0a9FKAULz5MVHnL/6KUVVMIiGFPshkCZrcIVNme44nvXCertRtI3k9MEZXVdR5g22HVHVNU0lOZo+oKxrClGSbSo+evScLFtQZYLl1ZLpcITlWJiFYp5vaLwaUSqOJ2cMDqaobt3zqu2IJF1QNDuqzqCo1+jKwTEMynqFqMf86q/8CoETsZi/Zjp2WC0KDFOyzddUZcJ2YfDB06ck6YKD8YxBFJBtMqYHUz768Id0ymWxuWWbpHz66a+CkfN7//s/oZGK2fEDbuZzGlVTtxsi95gXnz5GyYKb1xmVviIOZyTJFs83aZocxxTINuJ2tSIeBgSuj4nN8vYG727CWrtoLUHCj37xYyzpYdjgmCFNqTk+fMz11YKuDfnlX/6cr3/6LcNhSOgFPD57hurmIByytKKpYDIzKfIMrRTHh0/55V/6ZW7ffo8lD7H8DVIEfPL8MUIZHEwn2JbJ9WLNi48/oKgSDqcPKXJJFJhIc0O9i0iyJePxiPHwiO+++56hf0DX5qyWO6qyJN+VrFcJp0ef8vyzA7I05WD2jKOjR/zoT085m/36n3yh+t/8zf/ox2Vi4jkuUegTRSZtWRN5UY86Csbc3l7SVIIwOmK1mTM5jAmjAEGDMlt2eYnrRWzXGXXeITsbx7Y4OpwgDMmXX31PVScsbjN0V+F6cHo6oSpbbjdvePzBmKbSPDz9iF3ylrYYgTIp876Je319yXhwRqtaBuOILGlBaDabFZ3QpFmJVgJFRlOCaQwxLEVZ5YhO0TY7Yj8gTQpMUxAHMePoiJuLOZ2pEFqBKWhLgSUEjm0xjo+x3QG7JEOXJaNxzCavSKotrn+ME7VUVYNvxqR1xsnklKpasNssEG1AVhcEjofvGLRZzenBMVfXO3a7ObEbIE1BXSTkSULo2riWRZbk1KVgcjCh05LVZo1d1wwHEem6ohI7mi7EcwVV1jBxB4yDI4qspalzXN9H6Ro/DNjtJFUFVblGGgY0NUM7IDdaiibHcw08N+DDj0e0VU1T50jXQxgGdlxQZhUCG1d1vF83hOMeb/Ppi8/wmoirxQ2NKWirAtsy6ESNEA7L2y0fP/kElMmDpyG7tKITLq7p8P5ihREqyiqlUpoyy1GlQ1mUCKmIwxDXCfHdgCzZsN3eEAQmNi6djggGMclmxdHoIXmes87O0a0JRsbLt9/TWAWm77C4LZjMDri8XqLYMjsdoIViMh4wnUxYrOaku4LtaoG0HQwEg9khTtDgW30zu1znxHFAVqxIszVRFOJ4Rr88UpW4nktZVPiBRKkaxxkQhaP98wIMKbAsA88esZinlHk/R/qXrP6q4reyAhC0rcS2PWzbRHcVvu+hFRRlRtOkCK1o6py2TSnqJZ0s+3lX0bFJboGyd0yFjWrkHrDf5y0d28GQArV3Lh3HpqxTVNsipUVVVpRVhmUbWJbEdgwcz8BxzP5qRmhcr3cT+7xkL6hMAxzXwHX7vKhrOYwHQwZxTBxGPUnBtJFGzx11LBPPcbD2x+l96cvAMk26TtHRAA22Izg8GnEwGzAYujz54JQXHz/l4aMjDmZD/MDDcd2e74pEGv0ylZCCtmn27iv71SmJuV/ccmz3/uheGn3Gt27qvWg2cBx7nw+19xEAG9fz8Hy/j13IOzw9/fyz3Q8qNFXTl81Ms8/fyr5kZVomnu/2wP+uw3NdbNumaRqqqrpn7eZ5L6Qr1YDsv5euT5/2jvh+JQx6V9uQxr0LDvTFNMfGsnrc1W6bcnuz4t3bS87P3/Hu3SWr5QopO45PDvj0sw/5hR99xuMnJ9i2ZLtb8fr1G7779j1ffvkdr15esVxkNG3H8UnPm/ziFz76v5h7k1/b0vRO6/m+1be7O2ef/rZx743IzMjIxs5y2S7b5RJWIZWEKaAYMGCCxJT/AM8QA4ZI1IAJRSEaCSGBYFRF53Jh48x0NpEZGRG3Off0u1179e23GKwd4WKAJyDkPbq6g6O79zm6513v+/s9Dy8+vGA0cTENnbyouLlZ8PbNLV9+ec3793ekacxoNGJ6MGEy8zg6nnJ+OMdvLIqyRYjhIaAfjBT07NFqqqMqMlTbgRTYjk04n7EkJy8Kirwiy2q6xqRrJfRD3EHTBeHIwQ8sHNcckGlNOxTXqpbFw5qy7BG9hcCi6xtEr4aYTl/wNrrh0A/54+2vsAwDDY0sSZhMTYxRT84hcfsE9+QlpTnCmD/hH4wGQsXPXzzBDDyqrmK7S5FCx/dDxqPZfqh26ZTCtn0m0wm3t3cD1D7Kkf3ANlZ9galblIXCcYNhAaNqinJDtK2wrQDXsRmFI7K0oG5zsrIijlIM2+OzX/6Yoq6YjCcYhsV0eoxUIY4jsF0d29HQtQGx5QYGJ2cHVGUHQiPNC6RuQm9y8fgMaaSs01sMT3B8cIFp17y+fMfhfE5WJFw8P6HNBJbms9puODw4QooG3XSJ05TVYkcWp5TViqoETfoUZUKVGaTpHbapc3RyiBcYvHtzyUffeszd7Q3xysByO84vDpmGM6JozdSf0nZ3OO4heSp4dHLGdhfx7Y+/T1Y+8Od/9gu+/d0zHu43hMGM95fv+ehbh3SqoiobTPOQH/zGR/zil7/AMU4xrZzNdoE31RF9yLNnR6haJ3ACsuoSdJsorXAMQRCe0eoZVqADHV1pEIbWkNlVFaIfFNRC9Dh2QDgak2QRXSeZHB5xMDugVR27JOXJixlFmbLdFmiWwy5aDFcQNHSzJC+39Kpnvd4ymYR8+PLbfPHlF/TKoW1rhNSouiVp4vPyGxf87NOfgJB4zpRdsmC9SPDcCW8+vycIB6Z4HGc83BY8e3KKEDWu56PZkkZpfPTye/wb/+q/y6//4BP+jz/5IaEf4ngdafme+9uYX//kX/vrP6j+4//6H/5Rkgpsz0PqCb3W4HkhlVqTZRXX75cIWWJ7Bm3XE2c7dMNms4mZHrj0oiHLcqTWohsdL189xbQUm/gBw62wHRfbawhCg80uISsfyNOG07MLiqpgejRnm1zRVhoCk15VHB8f0aqKoqhI0i3SrDH0EH/cc3sdc3LmEqclrjsmz2vqLqVpKjQxQgqdjz56QZLmCD1nbM0xDRtds4aBTTaMA5cqz/E9h7LrKXcVk6MAXWjUmaSj5ex4xnob08iYo/GIssiZzk8wDJPjQ4dqt+FbL5+wXK159Oyc1dU902MwrQZT+EhbMpkG3L1JePT0lOu7a0DDcFuSYodCIHoPw/GxbIPtNuZgdkTX1Gi9GoYn18LsLYqsoq5bhpqJou9r+lLx3W8+ZzI9pKga8rIgikum0ylB4JLsOoo8xQwMRG3Q9YKsqPAtl77vkIZGdB3z+NFTPvvVNZY5ordqXCMgyUvG3jHTaUihanqpI/oW2bdYmkledqyy5bDdalu6suFgdgqyJLQ1XGHy8uVT3l/fs4mX+J7L0XTEZlsS5yvQhqKObG2mwZjr60u6tiX0RmhSJ41jaAUn8wts3UNTgqqtMV2gSZl6AWmRDv7pvuXZkxd8/PFv0vSC1eoB29LoGkmtSsKRRBcObdVhGwGhf8T1zR2LxR1FVfIkEDwPBYsyZp1VpJsSoevMAo+0KJGmpKwa0rTB9S0sV0eIHsPUCcMROjbzgwNMqyOKlzT1MDRNxi6mnFBkCk0HY0/F+ENjaP3/N9UGzezp1LAFMnRzX5TqaJoCoToCLwBVUTcZVZ3TtCVlXaIZGnVXkpVbTCnoazFsMJFoUuJY2oDzamrKqsAwBs4pdES7xb5h71FWGZqmo7oBGl6VNVma0+//PaIX0Evqeu+xlwaOPTRlNSEp84K+azH0wacu+h7VdnuffIPowTQ1DFPbx2X2BZq+p+0GbNVQ0qrZ7VKibcJysWW7zagbSVUKNpuYxWLNehNTlg1SG1idCmi65uuWftsp5L55b+wHzmGAHcQEuqFjWhaWMZz8vxpibcdGCkHbdqiux9jjwr4iG8AesdUP0C/VtsNgpXoMTdsrQfflr07hWM7Xg6ymGbiOQ9/3lHtpglKDaEDf54VNy8C09D0eS9GpQY5gGQaaFLiOg+O4yK8Yr6aJJof8a1kUgMK0wDQkB4cjnj0/49kHjzg+nqNJSJKEh/s1b15f8fbNDW/f3LBeRZi2wdF8xouXT/nGxy94/uKMIDRI84jFYskXn1/zs59e8unP37NcJJhWwHQ64+BgwstXT/n2Jy85fzQnGHk0jeD66oHPfvmGt+/uWS4S6qhkjIfQBuXu8Fn2X1uhxF7bWuX5kK+WEtOyMEYeK61iPJ4ShCNsx8E0B3FCWRYURTmYgIqGuu6hlwSByexwzHw+Y3Yw4eHhlu12jesa1E2JkHJ40JI6hm7RiZrbNsPQLXRTYocd4egQ3Q6pdA17esT84imW7/KNb33Cs2cTfrAdMqr/6G7F4j4izxoMaeF5NpvtkrptyPKW2fSMsmrZbNdousK2Qxo1sHLTOCMcBUCHqU8wdQ/dqMmSFM/1kLIl3iiKrMY0TC7fvWa5XFA3HeHERoia5XbLwdSnlyZVnlI1CZvtPbtNi+12SAGrhwbD8AmnDlXZEicF4VgnLzpsX6OqK9pOMJ2ZXL2/ZexNuDg/YzyeEFULyk7gWUe0eYdhaNSVIipW1E3Gyw8e8/bNJcmuJY3WaPUgzDg9nVBkFb0acIu96qnzZm9JsthuCsLQY7Vac3p8wbMXF0itJolr0iIlUTd4ls0XX7yn1l0Oz2wmwYzzF4/4p//jH3N9c0mabZhM5jx5/AiE5OSRzes3b1nfN/jhhBevnnN9vUS1BkJm+H5A22bc30VMRhbxJuPl8wssq6NvfKQw0K0SU4Qs1ncs1yum4SmqShCNTt32g3RFU2hCZ73eECcJs8mc1WpJrzosy0f0GZqZkcYFgd9zf3fF7VVNLzKKtGY6G7NaRYxHU9J0A70OKITs2UUNXaVTtQnHpwe0DRiGR9c3rDYRWZWxXiVkecx2E9HTs9q8ZRQGTKchZW7y9OIjPGeM62pMp1OkdJkdzekoaOs5v/1rf49Hj074+ad/xs3VhvHEIileI2RDmif81vf/7b8Wg+pfaaba7FZIoyRKAhzXYurrFGWL1Awm4RFdEdPhYzDBcm2SWCOKcnzPoixa4kWG6MC0HdJuy2J3g2W4XHz4nKatublOkbpkexVzeBpSVgm6a/HF5ZdIxhxONFq5I4tNHj+12G4UqpdEuw1OEJDXE4qdhj+xqZqKF89HtH1M6Hjc3xRIo8KzXNZxjTFSaJrNn/+fv2Ay1dFMm6LNoRnwTIYJp/NDtts1vh/ieCNk8cDB8QShJJ7nc3/7K8ZuwDbOyIuCRFdMFejSp+oSdKfl9v4amga6MV23gkLno+ffo+4fsNyMA/eM213Em7dXvHj5AabncXn/E373117wsHXYVu8pdjVilKD3PrsoZjrxqeucXlQoHMqmJQxHHM48qrplubijKjXafmjinp+f8ukXn2F6LrrpYwYWoXVEvNvhWDZjz8a3W5ShYYVHFGVM0ccoA0zN4X7xQNcGXK0WzJ7oJJsSz+4xm4xN1KIOC3oz4OHNht/93nfYJh3L7S2/uH5HbwumU5+R8Gmdirv7DVJYGNqMzthgj1re313ysF7y6oMJ2bJDEwYjVzC1piRlxnxsMhmNeLhNOTt+StU0iN5jF62xrB7PcdGljh9MKNMVt3d3QwbUCLl9iBjNQooyo2s7PHuAn4/tHe3E4Xj2AUncoo/W1JHAd86AjvU6YjaZcjiboxkVaZLg1hFqmeD7IZHdUAUFvTmiNXV006TIG1opWa3uMFxwOp8i7xB9iWmWWIzQpUHf1fh+T9tkSN1D9g7bZcT04ATDqWhVhqGH7J82GI9CNEMCOvf3C3rywUoldYQC3/cxdEnd1gjVQCfpe3Pwj+9W9HRsFhHrRjIZTbCdZJ+L9ClrHYWkKHMmkylV3aFLk7rJ0fQOpSriZINuCCzDRKBjGCZ1XaNpkrZuaOoOqWl07XA6dxwXaQrSJKbMJbZtcn56yvHxmOPjAUg+qE4hz3OyPOdh8cD93Zq6Utiuh2u6KDUYpcpqKL017eCzD4IAKYbT+moTs42yr2UF9ArbMnE9G02XKDrarsUwBaIZPhddN77Gc1mWNdAAOoll2l+jo4QQdE1H17WDEUnsz/xCw9AFmlRfF8++Kmd9ZZz6ihPb1hVtU2OaBroGogNNMzAti75XA6ngqwzmXkFr7t9f27aDSnNPJmjblq7t0DVzaP/vy1h0PVVRkOcZnVK0tHhhCL2g7zoEUOQZmmEgBTSlolIdu23E9eUO0zTwA4fpNOTVqwNc16RtWu4flhRZTVW1vPnyhjwvEUJimhYH8ynz+YQPXrwg8G10YyiEFUXN7c2SN29+RZG31PWwnTctg+lkwmwWcjwf8+HLx/Ry8JaLVc1p5FE2arDBiYGqMLzUIKTqFaptB+2tHJi/XduQxDGRiqiq6uvCnKnr0Gt4Xjg8fLRf5Xg78qwmizWKIgLR4wcBQpgo1ZPnGVKTtH2FUoIsV0zCKS9fnrPdpLx99xrb03BdhaNbJKmDbDXS+J7CK3EMna7cEvh/GQux/ALTd1gsI+qyZrkrsC2PuovJchPXqzk8fMYu+SFZkRFvS5xQ4pgB3pHFLloz8k+wjIBt/IDMag4PDtF1l919Rhg6XDy6YLl4wDT3W+wyY71IKPKCoukoXRvLdNlEMYapo+stlpdQFR664/PBqzEPDwvW24gkqVHdmEkXkFX3qCbDcmyKsub2NseyR3S0UPncvF9QdjB2AmwkR/MTnFmIpW958/NfMQ3G3L6+BukQHNgkyxbf9xAq4/Z9zXZT8eTpcx6fn6PpsF6n6FLn8vKGILTZxT6nJyFC6yjrLdvonpvLkvOLKbI94Mmrj3BtE+H7HJ+M+PGf/pTTR+c8fTXnzWXJ2HxKVt2T3zY8e/qSg8MxqvO4924ZH5j8+MevCcMpn3z3u2yiX3L97pYsKzg+HL7++LDibn1N6M559vKCd5e3LK87js8Nns5O+dGfvcY6dzk6esybN1f4wSMePZ2z2jxQ5TVtAxcXz7A9g8WqIgwPWSzusXULTTPpZYPGsFwYzxSalbG8STk8tinzHudoTDhW/Mk/+yFhGPDig+csFzvuNq8Z+QNVoqoKjo+n/OQnKxCQxiPOz89pKsk6/jmW/oz55AV3D295cv6c3tFx/JpGCR7Np2xWFQiJ6gvapsczQ8bThuXyCwLfZDw6IM9TTk6+yY9+9L/y6oNf//922vx/8forzVR/9w+f9ZYNUhisFjmPn00GYwc6vbKZzaZ4fsCnv/wCoUnKKmG3izEsB9PSaYsYL7RIc428TlB9jeuN6elZPFSMJbz6+AT6kOvbW3QnQ/UGk7lNo9YcndjcvWuwTYOxd8Bk6jMNX6GZNZ99/iumRxKagHEwpS4rlssFr1+/YbNOOT09xnbG/OLTWw6OJhR5CX2PpndUdc30yGEXlSTLksD2mJ/O6NuGrozxxj6vb5aEI4cka5nPAuI4pal7ptMZad1SVRmFXuJmNiezQ66TBzptC1VIODJJk4bf+psv8fQjTKGD1ZIVS+ginOCA24eOrqxQ/ZovbnacHx3Q4/HF5eecjCSdNiXJc8qs5+z0mN1uSZbm2NaYqm7oqTEdHdnaRKs1zx4/4vLNEisAzx3z7v17ZvMJhi3JywqlwDZM6lJhGi67eIU/H2MZB2yWV6ByDsYhWVzy6OyUsvL54s1PmT3qyMoRbdny/MUYUsnj8zk//bMFk4OAx4/PeVisuHp/SSV7hN0g647OsDAshaoFjuvRoWhUjm1AVWRMDia4OmRphTsNaNsCUzPxnROaskZIlyKKEVKh6NGkSZ4XqL4CGqSAlo6p71GV4I2muIZFWzU4noZtSVRvowuBpVlk9Yb58SM8zyCKlqRljaNPWD2sODgY8/jJc9ou5s2b91RtSlWXvH97y8nslFQJFukKyFG5y9PTC3RL8eXDF9jOiCSqqKucwA2xdZumSpiEB3img+NqRLsczdSomxQ6h9nolDRJ6FGso3sePzqnKFv+E2PIqP57Zk8U7YjiCCl6XM/d25x6LNNEdT2WYUMvqOoay3bokXRqUJI2dU+RKXRDw7Qlmi4pygzTsgcjmlAURY4hTULvAKV6imqL6jscO8TQbXRDInodIXR0zaCtu31JyqEsK/q+x/N9NH141s2znK5tGIUhR/M50+kE2zYQe7yU49r4nguCPfaqJUkqttuMXZwSJ8nQai9K6rbFMA2E1AdVatsybNw6tH3bfmCm7lWgdY1h6FjWsC1t9wPlcBkX/zcs1yAsGCIW/2I5SimF4CsNa7fPPA5Eg6/UtUopynIYznVdp64bQOD7Hrom0CT4jo1tW/thVcM0dMqqJk7SQdDQ9XSqH3KATUPTDCQFtd+KfoWwknLIFDdNg6Lfiw0senqKLEeT2tfq2K7rKMtB/mDoOromkL2gLnN0TeB6zpBTrGuaRlHkFUmaU1cNumbg+TaOa+L5FtPpsJTo+468qCjznjQp2Kx3pGlO23W4jsNoFBKOXMYTdwDxBx6IAc+1i1IWixXr1Y44qmiaGse1CScBF94hH2vHJHlNmjcgOupmsIbB3jDW9zR5ShbvYJ+7DQIfeeDxM7kcoiu6MWy6235fxOtpuoaua5D7cpyua7jeEK2gF1RVTZImZGnGbjdgfJQsiaOGKq/RtBbXN5iNnvD7v/+3sF3FenHP1c0XuP6Ybb6EJqIvNUzbJm0i+lbjv3/8DQD+nfYtVT7EbNarCMfTmE0nWI7N/X2MpjmEY4td+jlNriHaA2ZHDpPxjE9/8XME4Ngu52eD6apTHZNJSBIX5HmF5SrG4xlRtCFNC+pSUtYr2lphui5tnWM4AV2V4QYhSZTij0f0jUme5rTdjtksZLvWsf0eYWyJdj1BMEMzFNt4h6HLAYPXSkxb4bkHTIND8u2GolIcXoRIBe/vVuy6guvLBYanczp1MDqHsrOp6jXH/gHHRyZSh8en36Zpc16/uSSrc6rK5oOPDrm8ests4rFeL5H42K6gr00O52O6RtG2EWfnh7z+LOVvfPcPyNrP0D2Nvhnz6MkhY9/j83e/4v1lyu//wXe4fLsgWt3j2g6/+du/R5pvefvuSz79/McE3ksOzwyaKiL0ZixuV9DraLbG8n5FGHh47hCTMQyH0WTC3d094cxgeZmjtSbPHh9h+jq6HnAwP2G5vOHq5oYw9FGdTtf2jCc+aZpQVR273Rpp6IwOjsjKe0auThzHNMoniu8Zey5tWVI3NePgMbODkMv375gfTSjzHBCYtsndzTWzgzHbZU3f2bz4cEy8tXjy6JxO1mTFhs8++wWzyTlt1yC0HEs84YNXU3766S+xdJu6SdGEixJrvGDKw03L7/723+PlNzzevrnGcmz+l//5Z+yKn5FEgufPjrh5X/Af/Pv/xV8LM9VfuVH1JxNurjaMxga6o/P2/RrD0AjHgmfn3+P84hE//OE/Q7eqQWNnK3plk2U6adtiWYJNvEUTc/KkZjwbgYKmjvFNB8dXPNznmIakqiowhpZc2RZEm4bb9xUvPzjn42/8Br/82VvKouWn7/8CJUoePznHNsc4Yc9Pf/iWut4R+JJPvvUdfvnpFQ83d0zmit/67W8SJyVv3rzHdhySKGE8PSDd1CTxmtOjAyzpQ6uxWd/w9OyMd7cPaKYBfUjR7ijbGN1u6Rqf95cLCFom4XCa68uOTfRA6DuDhtQLEEbJb37/ET/60Xu0OuXFRxaSCNV5FJGBoTuk0SWpSjmwLIxe8uXigUcHj5nbT2j6Owypc7fccjh5xC5paVsdXbewLRf6iKYtKEsLU8FoOiUpYoQOuqkhZIfp6GidRbJZM55NMG2LNM5IkoIPXj0lb2qKvKHKlrTxDmtiUhcFeV1jhSNGcsLNg4kqaoLQxTiO6NwdLy+e8PP/7XOMXPIv/97f5X/6J3/K/FTnxbee89nn77CQEEjSKsF3Qnohub+65+LFMUmu4Zo+jt7hGBJR2Lh+T1/XaISMnAl6BZah7z3UBbZrkqUZWabQpI0QirrOCQIPIQKSuGQ6PmC12JKZFp7nkEcl+abh2dPvM57NqFlStxsWyZoxIwxjQrl6IFKXHBzO6YXiF599xnQScjA5ZBMVSGFy/tGcuoYjfUZWrNmmHbqAu3VMVt1i+LBZp9ANJ+M8T1C6wvdshJRUbU2xy9F1G00zaDPByfwI27TQ9Y68WvLh8TmqM9CEBe2QL8ySNZNwymQ8Gdr9uk5ZDrixJFntN1eKpm0BiWjlANcXGknc4BguwYEkr1KU7IjTBsu0B/6rbCnKGIWiFy1FHSGFRtWkOLaDkB2qz1mtEnR9GGxty8HWHfquQ5caR0eHtF0LQgyiAakNNrSioigK7h8euLm5oSq7gdHbDRUZx3WGzKuuEfjDYPNVy71uWlzXQ2gSur12VHUM4oKvtKSg7zmoTdNQtw3avsU/sFUFUppIpYbGfVMB/deD6Fc0gqZpUF032Lak/FqU0O8VsE0LXxm0vsJtfUU/+GrIbZpmD+63oFd0HXj2eX4zAAAgAElEQVSOh+06eK7DZDzBcQwELWVZEQQuZd2gSR0hhv/veob3FMUxcZJQ71WyMJAHNF2jl0OswLLN/cYYQnuCpunQ9bRFTUfPZDLb46hymqpEl5K6qthFOdtNju/7HBzOCEMX2zbQ9EFNu17tWK8itruMXVzx9u2Kqi7xPIdR6HF4OOHs4oiT0+lAjegV2+2O9WrD5dWCy/c6kqH45Xse43FIEDocHs548eIRUu/YrmOubh7ohSDLE1aVTt0D0kbsvz/s4xP0A/e2Ux0IuceiDYiuKs+JiiWmbVFbFtpXZAdzEECIWkdJnbpuKaoOfX91MIwB42bZOl4QcHA0o65zNpstutBpu5zF4g5DtzCMIR/945/8KZ43YuSOMYwRtu+iIoVQGtLUUELDDw7I0+uvf1/eXxX0jcZ0OuPi5AB/pCi6BdtdjTRqPGfCx9++QAmXH/3Z52S7mtubHbo06drh4fTFixkIKIsKzw24urpmPB6x3m6ZGzPubzOKJkM3FJ1QIC2CkU1WV7RNTzA+oKgATHoJ4aynSgRCB9EdYls+prOi7Tps65C8fIfQBTYOpuVQ7xGCZ6envP7yDb77BM2XGBik64jVTuPm6hJd8xB+zeFBh9Bt6lzDDzx28RLbGDE6CPnpr94h65rf/LW/z8mjjg8+nPDZL+/5/IsFy4fdEKPRJXWlI2WHoTS6xufDV99ms33N3cMGJwz5nb/zEXUWEYx8HhZXdG2DH5m8/fwNF88OMWzJ/eKW8YGPJg4Z+6fsklt+8hdvmcx1Hp1/kyTfomsu1zcZZ9/5Bua5Q1lG3G5u8bwRp8eHyK5hs11RtRk3RY0/NsiyltFsjC5L/uL1p5w9nWOZNaavEW0TqrJl2+ZAy/urd3z46tt4nkOtdoxnE1arBXH6gKbpfP7ZzxHGA0I/pS3HWF5IODVZPTQUWcnWSGhrja52qaqIyWTC7PgM3dSoEnjxocbmQSNd21hGy93NHZ1ec3Jyykevfg3brfmTP/lzvvPJr7Fc3fDP//k1509C4miDadislxGPnjwhjlN+8OsvMK0r/uk/ueL163tcz2WxXuH4gmdPP6Rtt5jG//MS8//v11+ZUf2H//l/+EeGI4mThqptCGcWqneo6pLx7AjV1fS4xLsWTWgsHm7RTR3H97hb3NJ2gtPzx5R5h0QDJegbQRC40LZUlckm2tLWg17S9yfMxhdUGdRVwdHkjHEw4v7qBpTJ5ds7svyBx+dPidYtV+9/SV1mbBdLumZoIoPN5HDGdpvw9PkJN+/B8mr8oKfIBE3XYNkGdVPTqYaR5xLFWyrZUdGw3aTo9ghneog/cljvFvR0HITHSKBD0dY6y4c1vQiYHY5ZxyVKq3Etn6oUhGOHKq6ZHYR89wcjdvmK3dqmKluEpiizBonBZD7l3RdXaGGO5R5QximKmPulYreqMDVFtt1xNDugyjuKMsWxJyRpQdE2uIZPV7YYjkmeVgitpO56dK3DnQS0RYVj28MA0mnUZUndlmi6RS9bekui2yazUcjx2Tmlajk6fcTIcvji6gsYK0zN5uLY4fCwxhMTmm3LbukyPh3RGx1S17i+e8NkMqVNhkxZ2mWItseWYOmS2Ww2lNxUhVQ5lhbysLoirxSN6IesmWMS73ZURUtetHRdRZqVlGVLmtaoTgM6yjoGWpTqqYuativJqpqyahFKYdmCNI8Ak9l0zotnr1itXnO3XFK1Q7M9z3K8CUMRyjlgs91iui5x2rGLY4q65G4dke8MNNMj2W3Isg7HGzGdGWRpxDTwsbQxWVRhmT26LDian1PmPb5nDSzTqkVKC8uxMPRmEAgol66r0PSeOIlZPKwRQmM8GfO3y+H2/z+oGsex0YXNcvVAVadE2w03N/fkeYHneYNnXWtoupbdLsHQDXwvQApB3dbs0oi2azEtc8h+7tmheZ6S5wWB71NXJff3t+wnQ5q6IY4jiqIgLzJ0Q3J0dIgpdZI4o6qaITvaNaxWS+5ur4YTqhSUZYnUNFzXoyhrsrxEN8yhxS3BtEwUA3e1bhruHh6I4h1lVQ5DYKdo9sNv23UUVTFsUDXo+/05/ytQ/0BNpao6lBLQQ121FHlOVZV7HmWL2MPjLcv6mp86gPrb/cZNp21q2q5DqUFl2rYtuqZ9jfTqVU+7/3vTsvZYqb+0ZVmmQV3XZFlGmqaUxbD5TpKM5WrL7e0Dy1VEVQ+MzvV6y3a7o6oasnwQSbT7bS+wFyi0X3NmPTfAMV3aqqXM6oFiUTQUaUVXtTiWhet5+yzuMHwHgYfrOEzGIcfHUw4ORxg2JHnM7e09l5f33F5tWC4SWgWO63J2fsLJ2RzPt5keTNEskzQr2WxSlqsdt3dr7hYbtlFC3fT4wYjxdEo4GuH4DpZt0nYNm2jLcrXh5nbF28s77h9i6gYmsxknF8c8P3vEpDXIyhIhtaGYh9hLLIY/90pR5SmqG7bcAvBcl7ETEPQm7/IVSZKSZTlJEhNFEXG8Q6nhs7MtazAV2QaWK9B0huhAmrFdbdmsdqwXCW2jcGwdVQ2RGanH5FmF503xfIeiXnCzeM/N6kvuFpdkaY5hefRSJ00KZqNDLNPmD00fgP8ySxjNRmRlwe3DkqRIqKse1ZlkSUKelNzfRIguQAqLg4MJhmZxc7Pm6dMzxlOfMgfTtFGqpGladvGWqmrZRhGeP6OqBjax5Qk0o0OaPU3bcXp2QZrl+L5JrYZoiON6FFVLUTQ0TUtd6XjOmLLuEBpoBghZU+SCNIto2h11mVDlirrQaLqM++0NvVREy5ZpOObm+oZwcsjdwy1SSsYjl77rcN2A0cimylL6wsQyFY1q2MUr1usU1xmR5UvmB3P+pT/4de5ulyTpDjDwzGd8+M1z3r19YBzM+eS7Twc74HhCHkXEkeKjF4+4u7nGto/53icviO4eaKqWo9kFr75xQZqXLO4TdHy++fE5n372I45PnvPmzVuytGY68+naGs/0qLIKKQpWdzXnx0+YjhwcQ1IkDY4x5WGxHC4/TU2yg5PTgHA8ZrNqmJ95PNzf8+aLd7iOC73G08cXlFVMGLrc36+wLIuyLmiakix7w3K94/HFM+qtR+BMKAuP84Nv4lk5ee4SjHvCcMZ6uaVTFapTVCUU5Ya79T1j7wLP6/jVL675zne/xeTQ4Y//+H/n5uaGs0dn3N09cHIWsN1UWPqYk5MTwknPzd0VvjtDNYKPv/kh1zfXLG46fM9D1xo00XN99wbD6vj88zecnj/GdXxm02NGQUhavuZv/Y1/669FRvWv5qj+V//RH6F0prMRaZJhmj15EmFqGrtoPSgj7THv3n/B/NhjNpqzXG3pRM14NqcuGg7GM5Joha5BWbTomk+8KxG6+rrNKtHR9JT1fY3oOw4PjplMHfTO4NOf/BxDN5iEI3y/5+b9mtXinixumR+5PFwXrBcZXtARLU2E0VOWivnpGMmMulHoVolhaojexPUs0nyNose2Zrx9fYUVWhRGwTbNGI1HYMIySUjjAscOaVRFvSug0rBDC9fUMfUAl462bECrMXQfz9bo2xalclzLwTA9bq5uCUcnlGVB2W7Q3Qil1qjGpIzW9LImqgR9VOBpNp2lIUsH2h7NHGG7BqMg5OEugj5HEzpt2VLEKY6jYQqLqslpq6EhWzQCJQt2OViWhuMGhKHDbpMM7FBZYQgXaPBNMKoWWwjGB1NEFnMyO+L2/pJlfMX5yQWG1BHGkmD6Idv3PckiZuwb7KoWtas5fzVnlfdoSqLrNpsiYu6HTJxTDGtEqyXUomKbRyitwvds8rql1X3SqkEKG3dk0HYZ94sVwcSlLXOiaIluuqRpim3Ze+5mj+c5tK2iacA1LVRv4gWH5HnMbvOA77j0UiPNMvIiJk8iJmGAaltQNbIP0XWXh/V7DidnlElD26a4gc/t+o7b5Xt2mSBpFZ0SrHc5y3iBbtiorkTXJU0zFFamgcP8YIzvmVRlQpYmBKFO39cUmcHBwSFS79hFW7paw5AOm+2Cri9Q3fB15kfHSE1xe3fD37eHX3j/XVVRlBF5kWIakjhOkXKwLzXtcOqXwibJiz2s30IKbQjgx2uUBCkDdM3eZxzLwRDX9fS9ZOTPoBvQVp5nIoQkzwb7lJACKXU8z8O2JXkeI2XPZDxGiJ62GwpcRRXTdAVCdmTZEGNQXYOUAsfeg/WFQtHStA1pntNLiWFZNG1Dvkc5tUpRlBWGaSL3PE1DN/ZZRR2JiVJy76rX92zTYWDWdOhpKYqETpVIrUeTAqkJpDZsRP/FIV3tN6ZSDKh91HB2/4qfqksd0TO8l777mhIwZDXNgd265722bbu3UvVfSwB0w6SpOrI0p67aQRpS9zStZLcr2G4TyqqlKhvyfGDJxvGONM/p9vzYXimCIMBzPBxLJ3R1Hp3O+dZHL3h0Omfs2rimjiEURZqSZjnRNiLP80EfIAR1WVCVJUm8I01T/CDg8ZMnPH/+jOfPn/Lk6WOOTg7paciLlDTLSLOcLKtAauiGRt1UWLaJHwRoho7r+biuD0KjqluKsqZTiixPqdsGhcAwbTzXw/Fc/MDHtAZUU121xHHJ7d2G9dWGoNRhD+Nn/xmiesT+QabvFVWZD1owBuuapg+lu12V8a5YI3q5DxIPZjIhBE1TkRcZu11EWZWoVtE1YrDnaTqGlLiezXw+ZjSy0bWOusjI8y0vXlyQZTt224JwZJHly8Gy57iYuoNt2NRdRme01D2oTpBsV0hp8g/8EIB/HN/iOIqmyaHVaKqSphvO6HQOhpUCPb/67B139w9EccVsOiUMHTx3RJ7nPH1+Qp413N9tUKQ0laTMJY6vsVjdkpU7mqbDsmx2ux11p8irnL7paLGpy5S86qmKHGkC6AhMNKHRNCnL9WK4Blg9adIhpYbnBhhGgCYVvjtmvWgJXIeu6jiYGJiuiWOPmR06bLIHhBoxcnRcbELvCJVbjEY+49mIImnw7J5W6ZgTRRY3bKMll2/vUF1NvF2zWGyZH9l0quazzy6ZHkx58vSC7faeXo9Itjq+P5BHjo4uwOm5fn+NbRqERwcYTkyy3tIi0NqGzWbHzXWMZYWYVsPNzRV1MWN2ZFKWCeOJzcFhyPJ2yXQ0xTF9yjLCMBy2y4SmqVBMELZkk97j+yfMj+YUKdRVRZ6m9GXAR8+/gVQpvuOQJT3f/t63kMriYXlJT0nfQxB4JEmKbliUVYyhn/Dq45e8f7fg7OAZ22jJJtlgmA0nh8dstwl5ESM1gRIRdd1ycf6YL1//EtsRPKxyxiOTNM64eHRKrVb89C/e8du/8wO8oCdNG+4f7oi2OZouub1ZM5oErJZbDG1KEApE7ePYkrzYMD2YMT0MSHaC737ybTTT4hc/XTI/cemFYDKdEEUL4uSOIhX83m/9m3/9B9V/9J/+x3+kaxLD0NCkJIm3BOYMIRW6YVBVGfP5MevNFsMcMnDrVYxnTjE1ySwMydId221ErynqvqWoC1qV4Tpj6qpE9QMqxmgs4lVF3mVIT1HkHZvdjvAopOp03t/cIVRNYJlM/HOauqGuTALP5OL4jM1S7RFRJdG24OrdGyxPILWcJO5IY/DGLdsoRXUmhq5xcRxiuibStjGlTWi67OqUXHRYQscsHYS2Q5QaeVLjhS6y11C0dHZPkuUUbYUmBWW5xMKnzEocWyMtaq7e3fHo8QWbKGO5XWHoLtQ+BiM29zWO1XP/sKV46LG9MXlUIpWNJVIcK6Bqc4RhUTYdRRnTCUktuwG43HTkWUUnFI5lkSQdk8khZdbheD5xvMVyAnRL0aQFbd5iOB6Ngni3Qdc0xqFPUeXYfsDVuytKFZPVOY2ocX2QrkIzFaqAn/zkc/Juxycfn2E4Dr/3O3+Hh2jFzdUVppbz8oPHRHGC61i8/PA5x2dz4mWErglKNgRjk/nhCZ7rIzH5/re/j6wK8mKFlgssK0DXNQLTIesTVNfT1vb+LDhYlTTTQ5oQbRO03qDRh8xWm9f0bQt6T4NEoSEN0LWOh+iB++0du909opWUVcnsIKBJBFkaUxQJQtfYJAVREYOhsX5ImLo+vmVTNjlGp9NWWwzHxNFmzMZTur5F6z3yBpZxRFNLPD0kDEOqtqDuSopyRbPPlgrpUyuTXjdAd9GswVGvGRq3t9c4lsu/YjoA/LdNTJYW7KIY23SpMg06iWt7+PYUXZq4loWl+5iGjuMMp+0sbZiM5hzN5+hai5RDJrNrhm22aepYpkPdFmRFhtAkph7StQodRVdJVC2RoqeuG9K0oshL2nZQbKq+Jsu3qL6grlLWmxvulzfouk5V1sRJgh8M3FAEFFWOZkqKIqNqSwxDUNU5cbImSbdE0QrVt/iBPcD61ZCHh6FAo0mJkBLVDapS3egHMUELQipk36GaFtGLfaN+oHcYuoEmDZQC1f/lVk7Qo9qWqi7plQIh6AVD3ECIQVmrFHXdDGIT3cI0bXRD35d3hmyqrusYhvH1yVpKuRcMmLi+i+VYSF0OZjBDovqGTjVDS1iXaIaG0ASKgUYwKGCHSINj2wMn1HMYj0KmkzGu4+B7NucXR7x89Zi/+Zvf5Qe/8THf+vgDvvHhU7754XPOT45wTAvVKMqsJs9bqhLKqmezTXn79pr3l3dso5jdLkFqktnhjNnRAeEoxLAMul4hpEa3f6CBQfeoaUPOWchBOWsYA8ZLN0w8L0DTjQHtpWvYng1SUDUVUteGrbro0XWB7MHsew71KVJa0HfsKV2AQGgS1beopqIpyuF7IwUIQRCEOLZNJVrWMh82sHRDY0122I7BaBzieR6+52PbBro5/JyUVUbX1XRNy2a5QaqBM1wUJW+//JK3b99TN6DrHmg1RZ3SiZ5tGkNvs0tSdMPEcTwczcMQNVkWY5oenuvzr7seAP/ZZoXqezRd4nkuR0enTLwZ08mI2eGIh4eIUTjngw9e0rXg2i5SE+RZS98pVouU1fqObXyPJnUcfQb9YOSaTEakecIonIMsyTNFWbZ09KiuYRfXaFqP1ED2Gp7r4JkOVVWg2kEQsIrWtKonmLgYpkuSLsjrCtMy8T0bU7cYTcY0tGRFzsUHY9oLSRlbHBhzwvERl/fvCC0DVZmEYxd3JMlVw8HplKvohqTO+cYHR2SbBN3xsHTJ/GjKcnVLuqtYxzuioqNqodztoDXZxgo/dDmdn3H5xTs6JUF2tJnLtz7+hE6teHf9K6KtybOzC3yO6XKTx6cXPH7yDKMJOR4/5Xd+9zd5/uQVzy6e8fz8JZ7f/F/UvVmPZVl6nvestfa895ljysg5s4auoavZZJOUwEEgAdGWAV2QkmALsGECNvw72v/BMCAI8AQYlmXBBkhLhi4E02OLpEk1e6juqq6sHCIjYzrznvdegy92ZFK+4Y190c6bBLKiEidOVEV8+/ve93l49sUFSRzy/T/7itVNy8HBKbvdDVbWvLl6Q9vVvH614sHdlJubho8+ep+2ahgdpcTKEFgfEU+YHUVcPD+DSPDjZy/Jpin0CdXuBuMEu3JNUe6ZjhZ88P7HbHdnKDnn/v2H6Ebz/ItzkkTxnV/5Nl3/htdnX7G8htE0oeo2GAT74hW6lSgZUO8sDx9+ROwHuG7KaJKx3W+pqh7Ps1yca+7d/YQgrVmvDEW5YbetePzkIUEQ0vc+Xbdle2P46KOn+KpBS8dme0mUHfL08V1+9MOforXg27/4bW7WL7hZXxJnEavdG66vS2aHE379O7/38z+o/lf/5B9+1/ccgRpxcXnO00/moEY4k5DvVsRJQFUXJKnFGMurZ2tm84Ddfsv1VUMc6qG9KxoIAq42O4I4JUtG1PWSzjYoFRNFNWVeIYXEI0JiqYoVVs2odgW6LOjyjsibEIYZZVmyvik5OMloW4GMJV6kqKolkgDlD6xy6TJ0r/D9mt3uEiVGOD24spu2pawCDk+PkN4Yz0wR1g3f3ByczBfQ9xgnOVgsWMymLFeX2LamrwzG9Lhe43qBMHPauqUudzgjwCls76OUY72+QRiP6zdXxH5A1zZMJwlffvUzFifHuEBSd454dIKQBi8xdEYh/AzPl7R5QaoUnnVI3yOME+o85zgZIaV/a6vRBFJgTcfD+w9wzuAJhXYVRW7xXEJbW86vVwSxIkkcxhn224YsS2naNZ6X4NAYHdN2HV0r2K43uK7FGsfByV0i59Hu9nS2p9hviGeSdDzh5OQxnufQdkmWBFycn+H5Bb1qmB3O0K3hZH5Ivrwh81MWs0MOjwf7jB8JnHG3MOyatqqxOCbZFN0JfC9gv6sJwpDAD7m+eU0WZygRoKylbwzSN6jY4HkJxkDb5IReSpgkpPGUrsiZjKcgQ3ZFwepmh0TS01L1PdpJei0wvUa3PVGQ4vkD1qnWPda0A3B7coTvJ2zWe5xxTKcJb65e8ebqDKUMYRBwvbphudoQBhO0cUjl48mI6+trrO2YTDKcMNws11RVju57EAKhFL97O6j+5zcvCYJkGMJ7RxRJLA1CmeG6sW/oOst4mtF1DZv1+hYqr+j7jrIqyMsddVPhbjN/g+99aJMrTxEEPqEf0jYVzkBbO/KiRvkBo/GMOBusRGEYsd2uhqHXlbRdTlN3jLJDsmTApykPpBLvNoyb7Zbt9pquL9ju1uRFTuB7aN2xWl9SVEuK6oauM4zHhwRejO/Fg2zANChP4PtD7tTYFiEdnhcikNgBBosUHgI5oLtUcKtyNQjrcHbYpCo1RPCdcTjtcAYYQF0DjYFhkMUxGLBuMVlvizhKyUFNa807BeugcZXvPk7ets/fbmzf/g6D0tW5ATkVRRG+77+zZkVRdGu18t4NulEUDV4kM0QOrHEYDXXVsbzZ8OrlBS+ev+bzH3/FxfkKawRRFDGbTXn//Sd8+ukH/OIvfcxnv/CUew9nJCMPozVRNBiQjDWUZUXfaaqq4+zsnNVyNVAW+h4/CG5f1/C5v9XJwqDEfZuffavVBd79mVLD1tLoQfXqKX/Qy/oezjqM1pyojG/6pzgGOL+1Zrh0SIlwDtxQitJtS9+2gx73Flc1Ho2I44hGGPJ0iAt5fkCWTciyMW2r2e8K6qrHugHboJRH4Pskcch8NiEOFXEkEbLH2JqiWtO6lmzmsc8LbB+QJZNbE5qk7x1N2RKGlrIYuKiBH9N1hlBlt8Viy789Hoxy/6gosK4jjlI8mdJ3PUdHGbODCW8uzul7Tdu2VE3BaJzg+TCbTZHS0TQaoSq0bfA8H0/6tLVhNE0pq2ukUnR9T5T4JHFKEk04PZ0TRIL5bIYxLcYM9rQoVBhtCKOQ9WpJEk3o2oa63jIeTfADSd91JMkIRYx1jtk8Zpwq8vo1h48KHjwdcfmyId+3jMKEMJH0XcF2n9OrBjpHmBo26x0ukFR1Sb1fo2tJuTOMJjNEYFEupQkE44czSrvmxdeOxfEIF2ouN3sOD+Z8/N7HSFlR1i37siONptw9fsjF8oJRMuXrZ18zid/j3/v7/z4fPvmQo/mC3/r1X+bB3UOO79/jo09+jScfPCGSJRfPdrz68iVf/uBP+V/+2R9wsdkTRXc5upexvlmShDFZFnJ5vUWEFiUSwmzMKB3zfPkjtq8L8nZD9MGENz+8oXY5WM29Rx/w9cXnvPj8nMenT24f9gueLy8RvsfZ6xtmhwGbbYM1Ke89fZ/XZzlVu6Ms9zx57wFJnPHk6RFf/OQVTx9/Rjr2SDOP3rT84EffZzq6w9XFHt16jOcel2/2hKFjuqgoNzOsKNmsNN/+9l+j7c5oaosKEhABo2nI6ekR6/WG/T7nm996wtmrNfcenJLXe1wy4uWLF+zLlny1ptcBZdPxxZd/ytnXW775rQ/QbYvrDfVO0zc1o2TMr//qz8eg+leXqQIfJ33qusa0El0YlO5pKokVDdtVwqffntDpPZdnDnTC8npJGI3pTU5VWWw3I/Qt+23JzD8m8Bo6u8aLMgLhobuG7WZw3vamoy9zstkxVRVizSumkzscHn04PHm2JT97fUYWB4wfjXl9dUOcJGwvB0i7H5YI1+PMIYuDCa5X3Cxf0pY96cijbffU9aDznExSeit4dfmSptoyj4/paojTkNPDQ66utmSJ4vosJ41gtSwRfcjj930WMwt+yGpbMZo3vHi25uzrCC/McTocHOD+HmdShFKstwUqiNnsKw4XY/ZFxem9Y7768QVRphj5Ccp0bLcbzL4n8lKIapq6Gpiu8xkKR7nf0WjDwWhEVzfgSXwZEqWCpmyRWDb7Fb3t6YymbjTJKGSzeTP8oB17dFoj8LCixqIoqx3bzZ7pwiObHFKWWwIVIrSP8BPwGhLl43eCF9uK7PEjvNBxXZVM4hF1d46XRPzoRzcovyeZWJqw58fnX3Lv7vvsa4Xyx+R5zjidsV7tWa5z3lxbpIjxQwV47HZ7PC+mNxbTOPReoPyAotgShIqmrTBOkIZjxqMxXdOiEPS6pW4k1aZiMp4SBAKloqGksqxAbknThOV+hbACgY9MBL2UrK5WSCUJIgCLtYIir+lcRSRCDoI5znlUnWY6XtD0UJTX6M6gdMu17EAo7t95jDNQdxIvmjAJEzwhQSTESTbkktqWol6y3DoWiwOOj45oqwJPKaqq5/ziDbw3A0D4iq7XxEmPF4UIY/EDR5wMp+8oyggDD6zGdI4smtA3DdkkAHyarsfY4QeV54X0TYt1UNcNURSjraGqhtOqLwPCMGAynvDJp6fUTUtRbXECxqOQrnYczE8Io2Fz6HsR40lG3yqaxhKHycBu7QzaNvSmBiT7fIe29WDHsj7GTsEJtOlxDNm72SShbYfXEccJUkn6rkf2csBPGYuSjiDybk/4Dms1ZVmhlE8UhQNu5Vat6vuDaMBTg1q26zt8IbHC0ZsOh8APAvA8uC1fKekhlALEOyJAFIWAw1pHFIWo24HTOIPuB6YqcKt4HUqxbwc6GIpQnveXKtO3PNd3jLA4gBEAACAASURBVNBbbaxSiiiK6Lpbg9bt0GedwwlJGIVkWUIURbdZ2Q6jB63t2esLPv/8K0zvaNuOMAwYjTM832M8yQgCjzSZ8mu/9ggQbLdbmqYnvyUsNG1HFPloM+R/Oz1spvWt9rXr+3eqV601dV2TJAlGa25ubm41tBlSqWFgE7xDRr3994SQlGU5EBqCAZBuHLeFNDd8vnJ4kEIMulznhgymdRbpJM4OG11jhq+ntsNVbtj0eoMZzPc5OrozvHfWvtOvdk2HtCmu17w5e0Pfl4SBZDJN6U1JEBnSeUjTlhxlEaMoZTZZcHNzzTbf4QUBVV/iexHJLKbrW4p8hR9Igigacs2dffd1D4OIquooC4egpii2YBRBVIKUzA7Td4gzKxXSSa6vb0iTjPn0gLzSFFc1oRcRR4cIz0PKiiSLadqKw6MJm3XBYrEgTX10a2j3Au1pbK+QJiBKJGGY4kUjwrhnNk/Zb/dkacA4GSNcT7XtiTJJVym+9dkn3Oxf49qQDx4/4HJn2PICpxoC3+doMeX58zfMH99HBS3jWcbNVY6rthzef0zcO756s+ajDx8T2Zp4fkDRdeigZRQn7OsdZy83PPjwDo+ffINp8oooyjFAOg3xM0h9n+PTx/zLn36PTdHynU8f05oXqMAjjHzoZ/ztv/O7nJwGdNWe9dUF+1FAEnb85F/9lLP1P+YnP3nG+qrDizyMJ/ECy/d+9AOevWn4nb9xn7/+2wlOXFI0B9x9csSkPGYy8Xj++pz99pL9bMz9w4/ZX67wRYy73nNZbJjNYmIv4C9++MdclyUNHc/fnFHT0oqazM7paZkHmmefP+Pe/W+zL874w3/65yTjjLCPmc99zi/OePr4ff7P7/05TddwfV3yrV98zBc/PsP1U95/7zNurs6QIuXgOMQYjXF7fDXl6kwTh5rJzGe30vStx8H8Q95cPKPWEYvFgsurnNnBiH1R43sRn3/+hqfvf4DwG66ulujdBCN84kxw8eyGg/ln/MqvfsgvfXKAtMecX32BqxXnN2eMsvd48t59iu7y/9Vw+f/lr79yo/ov/ugPvnv18pyP349YXTQYfJKJx/XZjk+efETRdgSB5dXzJW0V01USazV379/ByRwvjnFix9FhTBLHbFZbBhGJT1VZunaP7Ty0VpS6RfoexycjpJfyZn1FNp0QZ4fkheXs5Uti2fHg8ASjNXl7iVYbxvOWaucxmYxIswc4kdJ0a2zrsau2RAmMs5jZ/JTttsbPDFXRMT3wgIqLi5JZOgezJUgMq+s1u+uCi+slkR8wW8yZRCmLLOI7v3TMZFzhY+l1AeTce+jx6uuO/bZESR+lJGXRESQBdWnRFm72G0Tsga/Iy4q+dew3Fa+vWoz0We8L0rFgv8vJokPu351T1wVN1aEij171VLqlazWB8ulcTzCJicOI/X5PloW03VBySbMReVngMEjXEwQSnGY2j+nYU5YlgUwpiwahNF4oycYzqspSNpdo3dM1PVkW4QuPTb5k17ZEXsCT6AS1bNjvt1TGsD5fcjgZ02xb6l1OoDqK7Q7PjUAL+lKzvVnRVQVlUSOlpe9qnPahV+yLmuVyS9MJAm+GEgl9L5iMpjS9oetKut7grCKKEkDTdQPmpypbOtMMp+Wqodj3oCWjNMEPJZ0zpGFM3dQgNLYfWJ7W0zRWDxYWJxAioGl6DND2Lb3uQcAkHRGrmB4Hcsgsrjc3+P6wmZJOE0YTAi+l3NdI4dFbgx96pHGK66CuS+I4wwD7bovwQXohrh+0rNY5jBPkVUlnG35/dgTAf/L6ayT+7aA0bAqLXPPo4QdMp3OsrTG95f69h3zjw495/foNdV3j+cOJNgwjrIWqbijyEikVCImnvKHl7inatsPzfbTt0aalbnPC0Kduc/q+xvdjhBvO7kpEFLthWImihLZx5Pka5VmSOMUYiKIAIR2rzRW7fAViEAwEvs98NmEyTimqLZ1uKaty2CzVDdJzeL7h5uYNfd8T+NFt8cnQtC0IcWuOUkNRqhsiDENRChAO3/fI0oTZdMRiMWMxn3J0OGc+nTAepcShTxT6hKGHc4M6FhzK83BuAPrr25zq2yFKyrdIqw5jB2qBFGp4LxnqXMFtuUuIYbMZBME7KsDbjevbkpZSQ0NdyuHvGCxS+t2Qba1lv9/TNM0wmDpH3VZ0XXULYe9ADGdlP/DxfI8wDojSkHSc4qQlLwrqqmG33bNcbjg/v+TN6zfsdnscEAQ+SRKSJCHjUUIcR4Ci7/t3+tq+6969/qaub+kJPUVRYO0A30+ShCzLUJ66fXiwaG0AMSh0bx8UjLnlzkrBMLp1tL7k0GVDUc3xzrAlcEgcpu/p6hqcG877QiCFJM0y9r7mc7Eceg2+D1JgnRm+Ps6QFzm97vA8yXicIKQlDBzZWCJURxhB0xQgHOvtZlBB50u6xuC0IvRS2qqn7zSjSUpZ7RDSEvgRfT/kVZWMENLSdwJsShAo/p3xCID/9PkLwihCimCwnwVQlDVeoNjvd2gjsFaiNTg7oOW8QCFdeMulrZEipCp71suG6XSE9G/tZy5Adx3T8ZSL80vatiLf57SFYz455OBgRt1WxHHMdrsijIattzYtAOtVg698POWDiTg48djs1uzrCk+G/NIv/BKm96n7GtMrjkaf8MH7DxmlEk963DmM8FIIIst7h+9x7/EhKjUs4im9NThXEYmErrGgDM9fnOF7Ld/46FNGieA4uctBHHJ1VjI/HFPvp5gqY3wIu3PLb/7a3+CHX34f6YV8+N4T6qIe+OYi5Hd+59c4Ph0WTRLDbn/Jf/8//AH/8x/9KX/6xZ/zvT/7KSf3PyQ3S8gkxDHTO3PSw4yT4xatbmj7OX/9V3+LyK8ptjV9rxmNHuD6lsks5OXX32c+esQHH54y83/Azc8+p8exKTJm85CuzDl7dYWLLeNZxE/+4hnnN+eEWQSNT6A6OpfhhSkPH8asrwtEYFA+jEeHSAHX1xtW63Mcit1+SxzPCEODsQ2T+YjV6pKjg1OePP6EthU8uPcR7z9+n9dn1/zNv/XrfO+P/1cO5g+RQcGLZ1vSDKyTRHHE9eWe2B9TVdccTT7j3hNJVW9JszHPnn3Nlz/8IY2puXt4h323I5CGzVVNQ4pxYxZHD/n400/5zi9/k/HoeMj6I/nOt/7Wz8VG9a8cVP/L//o//m6TGx4/GnN4cA+kj7Etx4sHbFfXTE5LqqpCqQlp5lOVG45OEk7vpcRxyNdf16TjMctNw+VmjUoD5osjotDDk4auc0wnM07ujSjLiihSFKueYp+TjUM866Noabslut7z8PQeb15dIqWHcz6jeQqupSwEUZygO0FT94SRwJiOvG1JEsGTx59RNFu21Y5dUeGHEoTFNS1K9oySlN12wH50dYeUHlq12N7y8aenzDLNYtyQ5y8RlAiRY2RPvvX5/Aeaz/9CkyQZaXSHMMiIsttvrPs9fhyDgiQZsVxu2G0q+m44wx4vHhD4irIumC8SdnkJIubxN57w46++IA0ytHQYDzqt8YTHfrPHj3xcLNHbPcIqjO7Yr6uhxOL56K6nKHbEkWO/awYF7ihiND4ki0ZING3Xg9LgPPzIJx177HYVViu07qjbmpKCnoZt2ZMkY8q2ow0F2XFC2eUEE5+icby53hKNPfBr6rZlu6uG5qqzgKKqe7rWQ/gjRgcLtmWLFoq2d4RRjPIFeblHKIeKHEiH9CRtYxhPJjhhKKqSummpih6jNX4gsVbR9gZkTxKHXJ5taTtD0+d43nAiVtInJKTJexprsVlMnE4oNxuEH+FHEXlVYIUmr3L6rkO2Paq0SE8hfcV6uUbbGmyH6Tx8LwatuNlcM5mmRF5A1VTIUKJNTyADZuMpfuzRm46i3mAMxOkUKTy6WtNUFVIGQ657c02rK/6DxSkA/11bEYYRi+mcNIrxREwSzVgs7rDb7QgCxXQ6ZZfnjCYJbVdSliVd31K3BUE0tMU95XN4cIAUAxC9bgZ/tjGWpmmZHxzS6Zq6rZjOjsn3+yF75yVkyZiq6IazuhRoUzOdpwR+iLECbQfItWSE1oJ9mWOdQsiQXmtOjh9y784TPBkiRYB1jt1+h1TQ6YbetgRBQBz5ZGmEkrDdrtG6Q/mCutnT63ooRkmFMS3W9hhrUcq/Be3fclBvrVZvm/pSKuIkJUlSsiRlPB4xnU5J0gQhhweN4Tw/OOatHVBfQgjiOH43rL496Wtj6DuN1uZdeert0PYW/u+co7/FS73Nr76NAggBSg2aRa01vh+glBoMVVIRRuG7jGsYhnied8tGbbFO0naGzTZnnxfstjm7fU7TdOz3FWXV0/cWbRxeEBBEIUEU4oVvRQOS3hjqqmG721FV1UBu6HucswSBfxtL8NCmJ4pioijGUwPKS6rhdb19X/4f7NnbYpuUgjAMhpyxc3/JgdUaP/CGQUsIgsAnFjGTPsRZc5tLNrzdrlrd09Ylfd8OG1rHMKgqRRVZvt++4fLqirwoSNLk3cba2uGkPryGQTG832/RuuPq4poXr16Ql7vhwUMbqrJhud6yLyuyUUKoMjzh09UNNzeXqMBiXItT4IQYMGi+T991pKPxbYGvw/MCdF/y784XAPxzFVI1O3pdkOc5YRAT+AFIRxAGt0QHizGOoixpu5amKegay2Ixoe8tbV8TRiFxInGi4fJqTVNDEAz/TVSF4+OPvsU3P/mIg9kD7hwf03RLbq42HJ8cUjc7jJak2YSiKFndGDwvJhkZHjy6w3Q059HTOTfXEE0028Ln6f33GWUeF9dnTKbHlGvNflXx5ZdfslpvOZjfY3+TU5chXQ6ykiQTw83mklHoI+zw/4BgzkcffpPFfMx8vEBqn6+enfHbv/2rLN+8YrcqScYRLy+u8aTHb/7qL9CwZZJF3L93RNNWHB8vePL4Lm0zYCuNtswOfVbLC55//YY///7n/MH/+EcU9Y4gmfD001NefP2C6WSOUwXPv1zx9P4J6JLl5Q7n3SVJPWQzwjQp6+I5vfXAVcTmiMlizPmLS0TS4VrN3YNDjieWb78vWK+vONuN6M0Nu2tNnd9wND/GVA2xjanXJet9wcPDE4rNGuMc48mUs+c5XVdhpWa5ukK6DCkS7t095Hr5nLKQZOMYKRX77Z5V/hrTZ4xnIYfzR3z2rQ95/fqcB4/us10uSVKfps+xVAh7h3/rb/81envF5UXB8dEBwipmcx+BZZTOkDKgLG/YLMEPQvreMT9IKZqapinoe3h0/xFf/Oxrpodz/uTP/oxX519wdX3G6srw5PGHxLEiCjLef/IrP/+D6v/0h//Nd42TXCyvOLyfkU4adNfjh4owDdFdiHES4UmM3DGeWqYzyX5XYbXl8J5Pvi25utiSJgPnMVQJgoIohLr0uPcgYXmzoS3A1YJRPOLunRmRigkU2FaQxB6LOxMqa6lly7a5pO87mnVEXijwWqqy5eH9FE9YblYFXlZhrOFwfsh2XfDy+Zq8qZnOhwanoqBaCdp2C7LECkHbegg8wiTEiwN0Z1i+ESjto7w1iID9Lma3geevOs6e+1xcKA5PxvS6Bb8ijDL29TnS85GewBmBLz0W2WIYvIVPHI+IEm/4oes5hK/wZIzzoaz3/OjL5yg/YTadMh1l5Jsdo9GIvutoqpr5bMp6syILI8rK4seKtpOEcYRQ0BlLoxtkH+IHw/Dq4bNdV2yWO3TXESeKOIsp8558V+LkHtuGtF3HOD3E9hF23eJ6hdAWZXuOHx2RHIzZrXZc32yZTw9omprtbsf1zZ5OS1rT0pqSpq2QVjGKTmj3Hbq1COtTlS19b9ls9szmGcloTFN3aK3oeofnQ1FtsTTMZ1O0lpyfX6N7QxIr6qpACEHXWTbLkiBQ9F2HCgLicUw8ihFO0JY1zoDnBRT7hnQEQdwhRMg4GZPFCZ7nE4cRvpKgLJ3ROCuwbUe9aZgcnZAmY0IvpmgH/I1nYzw/ZDqZ0tgOZxzz6THWCgyapq4QzlG1e3qgagrqek+gRkySBYvZGGMNwhua1FEU0zRDtu33ZycA/OOmZpTEfPbpNzk/W9PWHcpT1E1B2RSURUdZ7kAZbtaXKGWQEk7uHDEaxQgBd45P2G73CCyhP7RGx+MRfdei+0HhqXyP3W5DFGYIEhw+XdOSxAlRmOArSRKPmUwTDo5DVps1eVEjPIe2w7lZ2xbp98igxzo9KHiloe1LrNEYbel6zXy2oO8MwknSNEUIsEZjjaMs2qGV3NXEqUJ5hl2+pmkLsixFSsFqc4mQlihIESh6bVHKQ3k+fWepyobtdhjghpZ5RVHWtF2P0UM5rOl6hPJuYwMJcRyjnUW8y4x674ZT4HYoBd8P8Tz/3Wmbf+2fvxUJ/OuD69shVWv9Luf6Fj9VlhVt2+H7/u0Abmjb5t1W8+3wFccxSZwikLRNBwyZS263k8YOD1Je4KECBYqhYe40Bo1xPZYhTtD33RBrEAPyarhOqHcb3SiOSNL0XSShbVqEkLfWLnWboR1yw9PplCRJ3pEPxO2ADcPpH4bNrecpYNiodp1GqmHAXzUtW6s5FiFYN5Ta3HCxwBp0N3A83/6y1rE0BV94G+I4YjRKybIU02ucdQRhTJZlw4OHgJPjIyaTEaMkJQwDqqZAKof0LNo2WHq8yIHUg+AiGCGExdiWo+NDRpMI6bd0tsPYEIfHaOwzGo9vecY1zpmBBiF72tbw+0eDOeifKx8pLePRiCRJ0b3GGIFzmropafuKZBQTRwHz+QylFGlyiDGOyWhO23YU5Ybjk1OEV1C3Facn92i6G5I0JUumOFdx9uKaN6/XfPXVFwjZIQREcUQc+7SNoSoG8UZVGuYHMZ3bIFVCEo2ZT06wvU/gK+KRYXp4xKO7KTjDTX5FVXdkQcRsJpgv7nJ85y5VVTKbTXl4MmO7zJkv7nPneMrLszPWN5r3Hj8i32l+47f+TbKs59kXX2A7xacf/jLjOCTJQl6dXXDn+BF+qLBGMcvGhFJzevoN4rijbx1Hi1NMXzCfH1FXJYGnOT6JWV42tFXA8+cvcX5B03Uk4zmj2RQvnrCYPeT1xTmaEXcf3aPrfJbFOcv6CnTILm+ZHwcsjqDYOjbLHccHd7kpf0Q8fo9ZFHNVnmEaeHV+iWenNOcv+P6zCFKD7hTxKMVTitAmlEXPrm6ZTxZMoohmvyXfVMwO51TtimJb8fj9U87Pt3z4wTe4vrpgudwOJj/hiMIJXV+jTU9R1Gw2O6r+guWFJU0Fu3xNnjcof8T65pp4pDB05NuQB09SfvAXPxmIDV7L5Zuc6eSIxSIhTDQ3lw1hUrC8FEwXijAcsdzs2DnDeJRivIp+l3JxUzK/k5L4JQeLmLpd0nUlTdfxJ//X/8Yf//EP+JN/+SV/9/f+f4Cn+kf/7T/47mhsOZhNCIKM5f4aL9XcuaNoq5zVpsWiad0GZE/dDErSF58H3Dk8JYgs4yTi2598k2bXcDS7y6sXl8zHcw4WY9JEcLO8QsmEthpYd7/8mw/5+uwF1suRBz34krsnJ7x5dc5+2ZCoGIVHtdVEsaAqG9q+Q1hIwhGeF3G1foXF8cl738JVM16fPeM3/uYpjb5hfePR7AsmcUTdwsOTb1F3ApValFQsRsf02hH5IbFKkZ4h31e01Zirjc/z55qvnzVUjaQRDUaUAzlA+ghpmB0B0iNIJU0Fzvr0bUMSx5wcnrDPd+Rlyba4Yel6amfQfjSE0fvqFqEyIopCRtMJxS4ni1KkkKw3K6azMVhLGicURcNkeogILW1n2eRLgsjHCyK8MER0Eicdtpc0Vc/iaIRzLYvJAVWtWS/XjNKY2XjBbtkShoZA+fStom8tnSl4evoY4UcEYszEm3F+eU3iR8yjAE9k3FzeIJVAKIYzuwsZTw7Iy4I0HeDV6/0aFQJBTdM3BF7G/fszhAnYFy19WdFpQddpurokSifUZcsoyljebKjbgjAM8ITEIyEMIzxP0HaOKPCRLsYLUk6fjJGhI98bZpMJThvKfYGXOFQkOZqP+YUnj7l6seRqu6IqctqmQUiPVneUVYsxQ3kuyqbI0Ofi9QUSSWl6lPIZxRmNboZWqgwZj+bMp3OCVGApSGKfNB6xq3I2uwolfYT1SOMRRVmwz7c0TUkYhMNZzgmOj4+Q0vH3wgFP9c+UAWvJ0ild33H2+muqpsULQsJYoALJm4tr9tUOP5BDG7rrmIwnhF5M27RcXl4Q+P6Qk0PgqRDlCeLEv82aerRdQxD4NE1HW2tmkwXW9ghpSZIJDkOchbRdT9U0CCXx/AghfaI0AAtlleOkAyEpykHr6exQrvJ8RW/14FK/PRN7yqcuavrO3JaTQkxnwMJ0OsHzJNfXl+T5nr5raeqKzeaGtimGbakSIPSAc7GGrmuGTWiaINQwKDkEZVVRVjVFWZHnOVVTU79ltmoNDI1/Yw2eP7TW+74fWtu3m0Mh5MBtFQKphra/lAIpxLuzv5QS5xxd1w0s2dth9e3wKsSArnpbohqGuiFW8JYL693avQbblveX+CtnMUYjpRiUurcaVxgA9lIJYBiIhxP7gPGSwicIYgbG1/B+eL5HEPggBGVRUdfNO7pBWVVUVYU2+rZND9aaQWTQd3RdMzxcvT3T326I3/7urBkILmZg0HL7+Q++8wDleSjpMMZirCJTEUcmHOKpt6raYXnq0F33blAVwEWz4Xu7n3KwmIEcPiaKApIkxg+CofjZ1RjXs9mtWC2v0X2N1g2XF6+pmg1p5hMlHvl+j5SS3bbCoajbIY/b9C0CQV1bhIhxEoRSBEGCF3r0uma329O1Gl+G6N5itaJrDH0v+Q/vHADwD16/Jt9XKDmUOqXUVO0a5cF+V+DwydI5bdexurmhriqSeM7de8d0TcM+3xOnKavNDUJZyqImDDw838OYCmcts+mc1WpL1/ekyRjjugETh4fvh8N7YR3bVU6cKDqds1jM0EYThyOODuacHD7k6volXihRwqMpKpJowk9+csl222A1XL/Z8ck3v0Ft3rC5tCRpyH5XstptUL5HSMSrs1d04pg4bjl784q20fzydx6zXuYYE/Abv/kZtrXMDsd4geXOwVPQHr40PH404eP3Phn4o/ERUha8ePEzlGypywo/EqjAoJuULItAVsNW3g+Zzg7Y5xV+VGJ1zHuPP6PqbhjFC9abNWm2wJBzcb5lMj9iPGm4M3/AfrOnqB33n95jtVnT1gU/+On/ASriKFxwvWm42u8J6fiTv9jQBDPKfs9uZQjHhlevrjm/vGJd9+z6ig7H0b27NKpEh5a61zStYDoZI+TwvWm92XL/3hP6/oY3Z3ukslQlxKlheV1wMD/GWEmaeTSNYTI+xqo1r16teP/999lsVmjtGM0kL79qiCc96+UGzIhsFINfo/sSpWJ8LyRNFmzXHUmacXQn4uz8jMvVhsqUPLx/j75OKDZbFscZfS8IUp/ldsvxvQU3S0OUJpRVSRwFHJ3E/PZv/t2f/0H1f/8X//C7QexIpiHXqxvoO/q8oqpXRJniwdMJ48l9hFeTZhESx8nJFF0GnNx1lCuPuoCqv2B6POJnL1+yOJZEgeL865aH9yfs9jVhEnB4p+Mb34n5/MVXbPsKPzVISuZ3LaVboa2PaQSRF7K82NLVHulM8tE3HtGWmmrbszg8wKoCi+L05IDUm/D1Vz/kcDHhyy/PeO+TBUeHGaFLkdpneicljS1PPnrKs9cXmMbgukERG3qKpuxA9gil6UxIJyGIPa6v9vSmJZ4rnFAk6YQoHFPdAscdY4IkxhdjiqIhTBVOWfImB0/Qast4GlEvW2IXM06nBKLECkj8BTLpQRrKsmO/2kJvaPoeJHhKEEUhQiqyIGW13HNwkLG8WWFMTeAriqIbYOZdi7Y980XKdJRRlj1926F1hx8k+J7HbHJIVdZoneO5AyaziDA0aG249/gJedei/J470wn7fEvZtygvZd2UlMUapKVsWrAevu/hK5+DbIEyjipvMb0kHUU436IdRH7I+/cfMJY+q92W5W47oKDGx2x3OQeTMR0OTwpCqVivco6P79BWHYEHkR8R+Iqu06gwQgmF0orT42OM7Fjt96TZmM7UeJ7Ho/uHKOcznt1Htxa3qRjFEZuqoXEtPZbrTU5d10ySOZ4NmE5TrJU0LieNfapyS2MdoZ/QVjlOCsq2ws9gv79hubxkk68wNDhjaLoeFwgCGXF69ID93lD1OV40+OsFg30rySLaph+wSsLwu14EwD+pdzgL680WbWriJCEIIjrT09meq5sLPD/EuB6tLVGYonvDarnBGsXDB0+pygIpwfMlvgoIg4Ret7RdAQ6WqzXCSfzAYYzm9PgBjhbpaQ6OTmmqnqJcc3OzxFnFdrelaRrCMGW7r3FO0PUdcRIShilNDUKClNA2Dl/FtJ0lz0s8X+F5AiE0Sg6DRpokNG2DsxqJZTweM86mrNc5Dsl4OmE+PcD3Qxw9SRreDlU7tBlkBA47ZP90h0OjdUvTlnS6GzZMaTwwObsWsNR1yb7cD1eHrqWqK9y77ad5d7aXUg7te2vRZtg+i1t8lXuLoxKgu4HYoJTC933SJCEIg3cfE8cRnlKYW9GAuyUW+L5PGEQ4B8a8BfwP+CvfH4ba7nYDG/g+Qsoht2scfa/xvQCcRN9uFR3gez5xFKOkxLmh3OV5PohBJzoM+APJom1rhHRIpTDaYuygYNVGY8ywFW+aiqoqCfyBBDDKskHGcJtZLcsSaw1C/KVJ620kwtwqcp3TOGNwTiKlxWhBWfUkXsCpypC3hTXrLBaLNZq2rjFGD0IL59h2e143S3zfw/Q9DkfbtrRtQ57vsGiatqDTJV1fYU1NVe5p2wrrGuIItGkwpkWI4X3Oiz1xEhElIdL3QBjSqUdRrtFaU1YteVmhb1FmAo8wjAgCQRj4SNkQh4ooTEmzMX9/kgDwX1ytCUOPSq/+zwAAIABJREFU6+s1OIU2hiJvmE4WKOVjrB66AwLmi9lt7MLHmILN/pIwDuiNYbO7ADdwhdE+B/O7FMUWQcR2m6N7aOqeutJkyZyu9ths9jz/+g1FUWO0JQwy+r6jKiuQAScnd8Fq6iJnPh/z4NGCvFgRRSFl3nN1cwOy4/6DexyfnPLo8SlFccHzZw337jxls3oJ8ZRXlxc8WnxAmkq++PIZct7w/IsrTu+NuHtyzOH4lLIpePrRKZvtmntPTtG2Zb8uOBg9YHroKNqWwztDpnY2mjMaD5GZrkl4/HiOlJKfPXuOUzMend7l/Pw5mBEHi+mQ+w8nQ5xElrTrwTYmZctqec5suqALr2mx3Dl+jyT0aEpzqwtOePX6Z6TJHeLMgi8wVYauNlxdr7l/7wMeHB3xr158Tp5oTg6OWO19posEmo7takXnByA9RqHi9OQO+faM5XqDUiOmkxmYGCkDdB8xmYzY5OesriwHBzH5foduFHEcMT/IeP36Gq0tTd3ieQsODyecvVzSdDlRaqiqnDjy0a7g88/PiUd2MCFKR5Ym5PucxcGCy8srrLXEKfzw+y/ZFjds8muSZMKuesPB4V36siYZBxyk95kuYjzT8vD0AcaWCKas8zOuVjsePL2Ln0ps1PL8/Dm/92/8Rz//g+o//cP/7LuTbEI6blkchlhV0+uGdjciUzG58zm7XlE0axJPMu6P2L7oMXbL3cePaE3LZOLRdHsOZycsJjGRN0YASTZiW65Z3Bvhzda4JOf5yz1CWB6dvodtAjRjPJlgu5Sysshxw/2nY/bLnMD6hPMxxmimoxRre6LI4fuOJPBIvQk36yv2eUlnepoGIm/OYp7hhZbpYYpsY3ZNw/Xugv0yYrdsiGOL6Xw6s2c8iZBdiicSjk6O6Q3EiwgRd2SzFH8WocKUIPWo+pq6NSTpnOVyh+4b7h0dUuwb/NCS59XgoJaa4+kxXbkj9FLSOCBQhrrrWZyckrftsM1st/Qbh0AhPIhCn7Zq6LVGC0Pb92ShzzhLaWtHNlLUbUNTRzgBbVMRjQKMCBEqoCjXCK8DP2a367GtGQoMymO93TJb3CUZhxR9jfUV4TSiKrb4nqHa1khPYSyEIkB3BT6Kigpay3h2PGBOup44G9M3A+omVSF+lGBURFdpdFMQhAJjHD/98TNUJGhMz9Z1CF+xGCe4zuJ0g4oCRJrhRQFdAIu7h0TWkMWSbVtS1A0+4GcCI/9v9t4c1rJ2z8963rXeNa+15zPXqapvviPdTdtukIxskJAFGQIxOiIjIiAkuSkBEJAROQDJkRECEmQZEBgD7unO373fVMOpM+15zcM7EKxTX7sl1AQ01g3uK5V0gl376Eh77/Xf//X7PU9EFqZMopCizlGmYRKdEiYwW5zSVi2H4yNhFlN2CuX4DL7HPEkYbMeQO9ihZXGSgu1YrhY4MkW1BcJYPD/i+dkKUYOyGsd3CF0wg0sYSfLiHoFP2/r0A4jBJXQjuq6n10e0PxBNJK4fUteGTPpUPdR9Qys2NGWB0g7/djyWMv5uP276HMd9iiSM9qSuK1F9zaDMqHHUA64YWZRZvGS5mLPePtD0HXVfUlZHcATN0LM5bABF4Hp07cBiuUQDTdWxnK3wXI/9dk8UBlT5kf1+j3AscRSRJDFDX+O4lihKcYWL6mtcCUJEFGXLsd4hvDFjp3r7ZKDq8VyfyPcxqiXwUwI/wtKCUQjtMUlDJukE3QuOxZY4iVmuLumGlt42RFlEmAR0bUdTN2g9oHVPNxwwtsTS40qBtQOD3uFITRY/I4gCmuEd2hiMGkkHh3JPVddPqCaFMgqlBtq2xVg9IpO0RqkegYs1AuGMdiwh5HjnQFhg5Ij6gYf3BJuXrov7NAiGgUSgaeoKB5Cui9FjO3xQCq3103bTfpvxdJ5UrkoZqqpCCEEcjbiy93lPpQakdACDxXzLq33/mLYdIwRSjmYtKV2kO962fx9nUEaD4+BIiX6CmApHIORY2mnbbryl7gVkSYrv+2itKYuSoe8RjBrbsdkfjiW3tmPo+pEiISVRGI52KD9EBiGOC3XT03QG4Uh0IMnpWNngqVRlxiKVeo8CczBWc9vt+V/WnxP6CYvFFN+XWNOwXa/pGgVOwb68o6tzXKeh7dZYpwGnpyxzEA59p3CMpLcDZatGbKD0EdLiOx6B59APCseJcKQPjmZ1MicIPEIfPB88zyENY9I4pGsbtHJxnQBjLWHQ8G+mI63j72zvabsKiyWNU9JwguPEhJFH2+UIC0W+o2ktQRjiBQ7b/R2+DJEEHHZ7PF9ydXmNGmo8G3J+vuKYb9AmJJ1IAi/DkyPVwA8Czi4yXr26pR46vFjy4affRYYOV88XHMs9WbYgyUqCwJBEPvNVylevbkinEx7XO47bkmpwsJ7DJAiobMV6+2s+37zCNh5//ff+JQbV8sXrt2NRVbnEUUw39CxX5zw7i+kaxQ++9ymr5QWdUpzOz3n+LKUvXNpKoVoNxufq4wgn7lBNxnc+ueTh9ZrvfPacH//0Jxid0qsSz8swwmPoB0IZ0LU7smQ5ZqiVIFu03N8fcd0A6UXgBmhb8ObtO+LpCYfyjmp34HhfE6SW+4c7otDlx3/0K6Djk08/5Xh8ZLurMH3C/EwhRYLv+Vih+PHPf8mL6wuqYuDrdz1WblDF+LpeXk2Ioow0EmALrIkZqgZJwiR2aeuWYqjYdgI3FnS1Qgif3f6WfVPi47NKFxQHhTENh6ZhaHqGHjr1QNFUnF6f0htJFr3AczyKo2K7OzCJZ5yuVnz19Tc4xufqasWbb14h3A7dG7pmIAhXpGcdu+Oa66uXRO4U13q4joOXKr74fMf3f3jNj//05zzstgzelkn4Adnccvu45uxyStHe8fjWoS803/30jH/+9/713/xB9Sc//i9/lM1nDPaA4wR0pcf58prf+cEfcP+45t27d9ihIwkdnD4lmw68vXmLlSsIDFdXKQ8PW5bzK8a4VEeRH+kan6urgLrpuP6k4/GuYL9pmCVXZFHKJDjlfPUBcazQrUEPkjAJKauO28d7Vs9StIhxXMt2c89yNuFkOcWRhtVihevE2CHhxUcfMJ1PQCh+/3c/5OXFBZkfM6gj99sNp+crXn6acMzf0FYOUZaRLjxu7+6YpguyeMlm+4b9tuVQ5VivJooWaOWQJhHC+JT7Hm0tvRqYLGOC0MHRkkDGxFFEGCVk6ZTAz5BS0HQ5vheD9UhCiyN9dGCIpwH1sUXUCqdpmHoJSlj80BIn8ag9FwqjLKE3QStNGqeoQdD07egGdzym0wlS+jRNTholhE8KTYyPNS696ZmtYrStSIOApiuYrpYIL2V3LPFDl65riMMMp/FRTY9EUJZH/MBj0Jpel3SmJnJCfBVSVg1BYAg8Fy+MKIqSoiwxno8XuUjZ4QWWrhtw3Zh2GChNR61bpvMJprck0sW1ks3mgSAcMS7HvsdzPCbSQ+iBvD5SmpYoDvjkxTOy6ZgnTiKB71ukH4w51SQYs5Yzn81hjRv0hJHLen3gcMwJswikhxoUXhKCowlij24wSN+hVi1FmaOfcEjCT6iGGsfVBJ6L7g1ZMCEIJLQGIxZMZjM8O2CRWG+8kIUTydt3d7RNjdsmdM1AshxQ9cDcSzBI+kExSXwCOeXfCAIA/rM3P8HxLWkUs90dediu6W2H9CRN3YEyhI6P741ih6YZMFqy3t1Tt9W4VaGnV5q6dnBdgRcOZNmSu3dHrDDMZyv22z1xEpAkCQ/3G4o8BwxS+qPWdbJEDZqm6ejanq7TSFciXXcskmiHY56D6LB2QFhJHE7wvYjFbMJytiSUkijwaatxo1g1R5Qat3l+oJFOhOt6xKlkPp/juBLfd1FGIaylqRoO25yuHfB8j141GGHGOEKhka5AqY5ejbd7DYogMhyLN1T1gSjMcF1DVa0Z+oEomo2cYyMwWjIMCuFYXOki3QBtzPg8xiC9EOn5T/Yqg7WjotR1x9fO+1yqeYo7uK6HtYxEBekzny3Gprj0RsKAEO/jrU+GrfcbTPuUax2b8+9jAu8zq13XPUUF5Ldby5H1ybdWLdd1/9z/+1ZI8HTe//xeKeu4T/GGJ1yUtZah60mSmDiOv90mKzO2933PGz9jnhizwxOyaugHrDbfZnLfZ23HyIQzbkaNwZWSrutRncaRDr4ynNgQrQ3Cvhc59Az9mI996Av+t92vR1muYwki+W15Ls1CXGkJ45BOlUgpCAN/JE9gwDgkUUbf9cThEt/z6dp2ZAn743tM9YbQjxnoMEbRNhaHAAeHrhHUBQRBhLYO0g1p2oIib8iyGUKMpAFHDrT1wN9ejhnVv9cVqF4TR/EomBDj57CUlratGJQlS1OiMAEEh/2BLJqSH0rubu8IgwnCcQgCn74SBIHD/eNuvBOVRNw9fIP0QOuB+XKOcHzydoOf+EyXK9K5xDgNFosMDWk64zsfv8DaFjHMub5+wavXrwhi2K07pO/ROB3W7bFNzb72+fjFJb/8+ef0ucPuUBGlKU1+i9WavN3Tmw7fpsynkmO9Y5ZdsJqdkcYRdd3y6Ucv+eSzF7SVIZv5JMGKps65eunz7u4VN1/2SKm5vjol8CW90Xh+MJagA8mPf/ITJsk5V89mNNVA2wjiNMAYySHf8Orre4QreHzYkaYBFvjyy1t2x5LFMmOfr1GM5AGlNmzuC7qq4vuf/VU8X2JVhhf0bNcPmEHiBAZJQlUN+EHCbHYCjqAdBoqmZpWecH62Yrfbsd8WRAHUeYnpEpIoIQhjDvsSz/dZP6yxnuZ+84bt5g2REzFfntBXA/lmLIYWpiFY+hx2OdIG5FVDbyqm85Sq6ejamPXmhvVtTej7WDFQ1xVWWOq+4c3tPWXdPjGvIzxfslouqaoDRZ1zODT44Zz9cUcySxHG0CjF129+gYPD2fITzk59hjbj5Cygrjd89erA4iSlV0d+/I++pjq0XDzPKFTOv/wHf/s3f1D94//pv/pRNpnw6y/fMNiG1WJGXlW0TkXrQxYFvHz5ASiPpu559fqOdBVy/ckl1y8+4stfvmM+O0ENPWkyI5QZFxdTTB/jey7zM4fXb36F0FNevkw5PNScTM5AC4wqEdpjqEE6DVHkkSZT4izk8fGAFQ2O0YQyJoszjvmWSRJyzGuk6xElkle/euT2/obL61MCb8avf7pmvznw/HIFKubhDu7efMkyWnE2f87N3SM37244XaXYVqEGjyiKObkIiNIlVdsRpT5f/Porhs7iaIfQE8RSjnpVAuapj1EKx/r4UUQ3NGRZxna3BwxdVzGbToEAxx9oVE3fd1hliB0XZ9DM0gVCuByHilZbrOPgeg5V2TKfz3FdqOqG1eKMqlYkqT9Ctr1Rl+kKQRIHhEFKVWtwwKgC1zVE8ZRhsNTlkbZz8MIJMvTozR7fH9BqwCqDHgZMp5hMEqQradoGz/XwvIiqbKlyjReFGCPJ25yBgSSdogU0Q4eQIZ1SNFVLGk05lCXDULOYJSgz0AtB0dQ41mK7gcB32R0PKBRoQSB8fBlR7o60x4Jyf0AYB6EFrh3RR0JEqMGitUWZgSiJ2G4Kmqbn/NkSogYtavKyYOgFSThBhj75U0knClOOVUcQgRdpfDEDm4Eb4TtqtPBEHngDZdlztpoh6KirnvxY0PU7HB0QTTPKYU/oCJIwpOkqGnVACmj3PRenL/Clw3G/p21b0klAFGQcyyNdZ/ClQ9OU/HuTsT38d9WBdqgp20fqviXNZuMXFe0zS1cIY/GfSjqHw55B9aSThGNxRKNxfVgul7huSFUXxGnAbJ6xP+45VgVpmjEMHVkaY61ht9thDLhyBHcfDkd0PxDFksPxgao+os1AHMf03cDtzQNmGItEnnRZLGdobajLCqsH2roh9GOyOMZ3Ry2pdB2E4yKEoekODL3GFT6OjbEqRCCoq4qub4iiCNeBuipI0hTXHcs5vu/h+wGhFxEGCecn1yRRQNcXTKYJQRBQ1gVFtWV7fEPblYS+z6COWFuTxOG4HawOlMURbXrCOMCT7igWEAJPhjiuxHHHgo/W0HUtSjeEYUAQBGNhSSuU1mirEM6oXVVKMWiF67hgGbXJbUvdNLR9/6QHHYfSIAjwff9pIzvSB0Zck/hz/4BvB1Brx0zo+2zsexSWMeZbCP/74Xl8bu/P4bLgz3iv4iln63s+jhC4jkMYBGilqZvm2wxtGEYjPYCRc5okCa7jIizoQSH+ietF4AekcYIxmrIsnzK2hn7oKYtyLE0O42De0POV2lDZhlMirIW2aen6nttmx/+8/gmg8byAbJLgeeNmOElivNDgeAorJMbRVPWWKEowg8QYcITGEx6BkyB9H1wIIp9+6OmGgdPTJVkSsd5tEa7k7HxB6MVUZU/gCxy3w3EVZZVTdiPuKvBjVD9GRNJsggzGLwmhnPJvTcZs+T/wJW1XMSjNbluSpAmTaUTbVU/5XgdXCuJwgiPG4dyXkuN+jxd6o9VYGPa7HVmSEoWSqm8IkymuN+bAJ9MEzxPcvLvh2fVzPMfh9ZvXpJkkiX3y/JHlNGGaplycBZydxuQ7RZpC1RzZrnPyo4MRBi902G9zknTk2DZ9x+oy5PbtmsnSpyy2fPHlL4miGbvdmovLC7CK5XLGxeWC0+UFaeaSJQnPry+Jk553b2q+852POB4PbB4r0tTF8yGOYr75as3JyQW9uqM6dtT1ni++vuP8YsI//N//IZ7jk6URi9kpVZnz1Tc/Iw5OqZsjXVejbUnXSKxReF5E09bcP94xX50RZwP3D7cMOmRb1KxOXpLKKV2jx+y6FQxaMnAE1+N4aIAQ4yQId89+t6XrPIQz8NXXPwN8lJJICQ+HR1wvJJRLokjSdZq8hOUqoSz70XBnNE3b4obghWZ8XKnxowkWwySOkYlLbTT3mxsOx4FFliEzl11+RIgFbV9QlhVxbFB9xd3bI64cuLvbE0dTvn71FqSh6g4cixrrSIw03N6vka7P+nFDkTt8/wc/YLc/UNUNvW4Io4CusyxWIZPoimHYUdcOk8mUP/r5HxLEEc9eXuI5Eb/6xVsurlOMB9uy5F/7G//+b8SgKt7fEvp/Ov/5f/G37OHhiAMoqSntjnA64Sf/1zekgU8YzamPliLPuXq25OLZCUql+E7I/cMNh7ImjjJUGxCFM7r+DZM5vLj653jz9meIwNAOB7zII8sMjzcGnAIpA44Hy/xSM3SGcj+QTBI+/u5nhOGS7aYkTVL+5MevqfI7jtsNZ4trVvMlm8MNVdMQ+BM8EbE/3jJbGM7PPsTqCVEoaMuavup4KHuEXRO5K5S7Y7K84P7OEkU5bd7guxfcPbxGCc38dMbXrypOL2O07qh2A5OnQsnHH74cgdqNoqxbzi5OkaHl819+RZKNjmptDJ7jUjUdk1nA0GtK1aL7BpeAJE2wGA77gqaCq/NzrG/Z7nKyeUIQSFRjcIXF86CqOuIgw/MkD4/vEMLDwTJfJFhlqOuWY9+jXTU2v41H5CYgPXa7HcvFjIOucYgpjhUniwjHjASAJI0ZKAm8hCiKaKsBPZRcXS2Q7li+Oex7ovmc+3dvEZ7EWA/dGCbLgE61oAK0trjCJUknVEPF0JdY3eH5DtZxEa5PW9QslhlF3RJNE+Jggm0Gmk3NfDVD07M77LDa4IlxSxRPEvxQju530xGEGXW/RpuBNLpml2/wwp5CHwmFoS1B9R6TWNL2PXfbdmxsZhOa1iHLfIw94NuIoihobc93Lq/JG00+7FguLxnyhmeLS/74pz+jHGrOL05pqiPScVDWcmj3zCcTApOwP3QYYZgvM4qHLZfPE3qnpTiEGOWSTCGJAw77mrZryeIMg+LvX33vn947/7fnt+e35y/1/AdDy/rxDmtcumFU5bZ9xXyeYa0YsW66IYknqKGnyPc4QjCZTXA9n/v1A6E/wyoNpmS72RFNUpJsQV11YGtOlkvqImdQlsXyObOzmNevXzFZhHS1g0VxerLkd37wV3G9Rx5fPyAIwTlyPLjcPTwQJVcszjPe3rxm/7DDCMPJyTXhXND0r1BtxGIWgYpJ/BWN3nFx9iGPj29IvRUfvJyhu4zLq4z13Tse7nf88Ie/R6PeUhUOk2XKZrdmtTxjd1fz8tMJ/+s/+Jy//jf+BnX3yM2bW7quIggEZ5eX9F3Lqy8fCGTAdOFQN3vC4AohC7749QMfvPwUPxj1y9t1hReGNMOaps0pKpcgEjQ1+GFIIx7ZHlsW4QeYpsRPWgQeb988MFvOKQqFH7iU+ZFZMiFbheTrnLPVKW2vkG5CXeTcb2756Lvfoa8rPn/1iiCMSZyIy5MpZVdStprJNGDoQQqHrjtS1QV+HFEPFSezBfVBsa9Krl58QLXZ06nx2ueKjk0pOFktMXZLncf87j/7GT//6Z/w6cff4+rihD/9wz9ic39kdXLO5fklRbXl1et7lBaEiSCMfKTs6GMXZ7B8+uwjVNeguoDv/vBj3r59x/3tDR9/9jGTWcpmu2M2C+mLcy7OJ9T9HiM6iqZg8/CONFxwdX3ON2+/wU8k+V7jh5r/9D/6H8T/+yv////zF25U/85//R//6NgeUCLgfndHkkYMtcPp2RLtCrL5FGV6vvO7EZu1ixdrdseey6tT3tx8hSs99pscL9BY0bBYzUinGdGk4VAcmEzmNJVLVWhu3rzDQbN5UDzc5jTtAcVAlfdgXSSSw8NAvRes729IQofDoePh8UvOL1ICz2X9sGY6maOUwHVj3FBhlCKNEk4XU1anKbe3O37+yzecv5zRqh43cDH+kTS7JJss8aMDu01BHM2ZZhlRlNANiqKqmMy8sbHnOyxPAiQh2SxCjSp2egdkIBHCUBxzEIYsC0E7hIGlHzricIo1Y6Ep8BKMkiijyfsK7Xi4gSSLA5pjxzC0zGYRVdWie4EjDMZA28HVs2cURQlOh7WC59fXxFGEsIy3xkSAI13K6kDftHgEOMZjPl1gtCWKMoSjGbqeruoI3YzjruF0eTpicIxHUe0pqxwpA+aLlEkW0zQtbdvhOhGDsgyqRltD11mkA54rGBDjVjuCKA051hWh9IiDGNeTGDHChFUz/j3JJCYvS4QD0vGomhrp+5S2QOgOxzXEi4SLqwsK06DcAc9RDKahqAuMcDCuoSh7Qm/CoCvyvMEy4HcOoV2QlwqEJo5TtIbBVgjHp6xy+q6m7TvqoiNKBGnisQjP8KVLZy2P6w0fXi7Z3lVUfcUh7+mbitXC537d4gOe59MPkij0sNIwTUOU7zCdJiymKTc3O4ZBECQSEGxvjjRNSzKRWNdSdAPfD1OuveCf1nv/t+e357fnL+n847riv313gysitDH4UuC5Ej9wMdZy//BIXbU4rqbveyx6FFdYByME+7KgVxphAzwvwhWCIh+pHEa1+E6KH7b0jcZzQ4SV1KWL9RqquubxnSVLM2ZTibGGPN9w9/aWt980ZOmMsqr45s1rovCcZx+c8+Nf/AzhK5JlxOlyhW17etnzcrXCsZZlMKOXA8+ur8kCSVs3zJcnI495v6dvD2z2LfXhgBrADwW79cD19Uvutz/jy29+wcXZh3z60TWd2vO4e4uQLQ/bG2bLmJvbOxAJetC8/eaA7zqcnSUIG2G1j9WGy2cZ5bGmLgxt1WFNgtaaKI5o1D3b3T3toNB9jyMSts1rDsWGaXxKGg8U5RatDYf9gdXJhLZVSBkS+CFnqzN8X7B+2GOHEClSDJqHhzt85iznZ1inIH8oqasa3/G4Pl+yX68RUuIHgnxfE8YebT1gtKJuC4x1sEogjEMcp9zcPtBZwWyZsjs0TJIpvgjY13uEP5DGS+Jpy5ubbzg9O6GrLPUuJI5D6uYRIcaCoes43N8dePnyE55dniO0JfSgMpZnl2eETkRXdcTxjN12Q99orq9PaJuaoZdYYxiGhr7r+OiTZ7x6+wV//Me/xGkars5OUK3HYDqORcV0NqU6Fnz87CP+2u/8q78RG9W/UKHqBxNWixcU9Ts+PPkQpRymJzO++fotymg+PP2UIZP0fMPZUvLB1aeU84Guu+Xi8pTF7ILNZs9+d2S1mnNyMbIy//Af/yGD8iiLLZPJOYKE6rjksN/i2XNqs8URHjef92jtMl9NqH2fqrkhTreUuebu8YGiGjg9fcbJcs767mYsJwwufWcIvYriURGmCVUX8fahIc5r0AnKGF7fPZLKFGMlm4OmidckfsL9N1vaNkc1ksgLSNMJH05DXr1+JEwMjlXMp1PmK5cvfv4KYxfU647FKsCjp+ksuJam2TGfLrEGlBXsdweMVfSOJE1C0ijlcZuTzVPWh5o0zOjLDiE0ThhiXY2vBJvXaxzfJ4gd2qJlMp2gheL1178iiqcU5ZEknNO2LdJx2ecFwonI8xbXWkTr8N2Pf4fb12/RqqNua8JJRGcqEh1TFo88v1ri+oLz63P22z1GOUynK/paI0MIvZimHHhd7HFlwGa7Z5KBsB6eH4NRGEcReaCtIEyXBL7Fk5bH3YEgCvDH/AHKDNT9OJTaPiDNEu5uNsSxS3/sud2+5uT0klp1tE2H63i0g0I5ir3e0VQlfjx63ne7Ha3V9BiMCgm8hN4cadsjaZTgyhRRGDwhOT9JOLYldTWQRhGlEuD3+FlF7Hs0ugVvBq6kbTtum1uiIISgJc4UZXekcTtOX2TMT1zcOiP0Bf6LA5u7lg+uPsJPFF/+6i3F0PHdH3zEm5t7stOMvM2ZzgK0CjkUA88/XpIlIZvthrrw8bIBL3T5Dx++Yug6luECz0p2+w3aaYnSBEfE7LdbpOtwdX5NHGXk+4Jh6JlMI4IgoG1Ghu1kFhGFKV9/sUZbRTixuJ5LVXTEgc/Q1WPWzrho7XIs9wyqABxCP2Y6jVlMZzQ1VHXJdDZB4OMKiTEDfdvgWI8o8QijgKIoGPqB6SwD0QMOjvA5lvvxdm0we2LG4OcEAAAgAElEQVT/rsG4XD07oTgU1JUGR+OFmiw54ez0Gbv9I13fst/vsdYhimdjdMUx4CjWmxukDPDdmN3mgOcJyrIAJyAMU/qhYtDliNCRCZEniRNJWTbkxYAXBLi+HUuhbYMnIzyZjM1b1yUII9q+HD31boh0YibpAqxAiIgkmTCocRMe+iMqTWlFP/QEQUAUhU+t9wHXkUh3jCuMpqanghxP+CkDjus8sVbNt5nUtm2fMFTjLfv3GdU0TbHW4vt/9rEthKDruicE1p/lUkfZgBkLVVJijEUp/e3veB8FGB+r0WpAOi4Wy/BEKJByHHqsGVWxRo1DVvikD30vO1BKkT+hn5Ikoes6BqVI0gRXON/GDcqyRKkxz9o0zajDLY/je7xVGGuZzibMFxNk4KKfcFfGWgZVMqiOtjuwPxxYzE5xvJLD8QhG47kR5immsVysGPqOqsoJsiVD22BbjeO77G1NLxwmMsEcG8q+J45cAicm3zcs5hlluQMcVqsL6vrA6mRFFAXstnu2xx3T1QQvdGm7Eq0svvSROOR1xWoWj/EIF8Iw4GHzCI7DcrWiafuR+d3UpG7EoDWekKw3a4TnYa3PQE252xE4AS8+vKSpxtvBp1ejQKYqFJ0c6DqXMJaU+5bEBxH5NHlOk7foQXJ1aSmqA/P5JUq5bDYVwulxvYDNbosVhu3xwCRJ+Rf+5vfY3eZ88+UN3mSOawce8h0Hu6X7uuc7z57z6s0bXnzyA9JTl84x7O9aLs6W5GrgbPmSb755hysFD7sb8n3E1dkPefv2HafppwjnBBzLN69GsH3T3NL1PYo1ug/A6TnmGiEcIj/DmpT98YYsu+SDjxS/+PkXKOtQF8cxgpYkFPkBqyPOLubcv73H9XKyZInWHW1+z8T9kKEJ0Vg+++RD3t19xcXyM9bbW/qqRwuHm1e3JMsz9DCw3r5lcbLEaI84Uyibs7kbWM3m9EKxXuekkzlN39EPivX6jsVixaAUfacQOPhRzO27R4am4cX1C8xgR5NhJXkYWrpWcHZ9wRef/5Tzs4z9Y01udwR+ymwac9jm7G5Kvv8DS1lossk5vt+A8jjsFBiX7fqGy8uP0bFH3wXYusXUlpv1FmPgo88CPv/55ywXFyRtxua+xJE5Qehzfp3y+VdvCKKQ+8dbpGtZLZf01lCjMSLmYV9xcnEOnuJQV3+pw+b/l/MXblT/zz/9b350qDQiTAnDBNtZRGOJ/TnPLj5mOX1JkCk2+4amb7l5d8s+f03T1zSdy7t3r3l8ODIMDXmx5+H+wH7T0neWoT9gVE/fOQgscTjjsG+o2gKtLcvVOft1Q9Nt6ZXBeh1Ve2Qxv6IbNIdjzTQ84+rymv/jH/0JQxsT+lM816VtGzw3wdQeXiIouoqyyymK42i6mC04HmoCF6QHbdfStg19bfAcHxcNwh83fqJEOnOuns24eXPA2gNd33B/d2Q+nSGlYRpN0G2P6QZcbRj6kiScMUkz2q7D9yS+H3ByckYSpQhrmU0XHPKSMDa0dYlnfJIgIJI+VdmRzBNqY+msxAYObizx0ohBDChZI1OP0EnI85Ku7fC9gOMxxw8C4mRCVdeYYSCdxWhlWd/VeG5IkFisP7qmz2azkUOoesJYjhvWvicIPPzIYTqLMXa0WRgsvpcxDA5Dr8kmM4a6xhiIgxBDj217POuh7LjxfFz3GGUQdqAs90yyGVa7TxfmAdfxiYIA3/HRSuHiMp1O6Yex4RwgcfEwwif0YyLHQ7QDWZTgWI/yKEimM8pG0degmhY99Fgl8QjwnDFzaJRmfziihc9kPkUYhdFQHEuGwsPXMUa4tEYhhp6hHJvNVd2yWCYs5pL80OKkHslMEknLNJ0jAo/nz09wvZGrenFxwl/54fcJG8H2seBqecI+H7D4fPDhR7y4/oBpkjKZTWktYAdMG1LWe4yuscoySTOOuwPGaoySdI0e9Y1JSBz7qL6jKEb8jutr0mnMZrOl7UqS1CEKYhwnpmkPVFVO01t6oZjOU3QnQHlESUDTaax1kb6D9BwQluksIwgjBOOQgBV4gcSgOeYlZVmw2b5DqQIhFMY4hEGM7wvyYvSaB35KXTVo2zCdTnBFwmyywJMRQ295/uJD8oPl5uaW59cXzOcx/VARRB6InkHXtI2hKBVnp2ckUUbXNRjTkx/37A63SOlhcbi+vmQ+X+C6YmQHywA/hEH1owpUjzzkvh9zohpD07ejnUpbHCKEjajKiqLYUNVHPD/AFT5tqxCOIAqikQzQ7XHdpwuPrTAoojBh6Ht83yPNUupmVLwqo4ARnj+oUcE6qAGlNY4jxixm1z+1/cVTQWk0DwkhmM1m39qu3mtYsyz7Vi37Puf6ns86mqW80SFv7LfPh7FP2Cvx7VD5Prf6/jk8z8PCyDr1JNLzRtGBGUUR5gmtZYzBkx6u69J1o5bzPf9VCMF8PicMQ5pmLPOEcTQOxU/PPVIM4pGHLF0mk4zT0zNOTi+YL+ZcXJ5ydr4gzSKEdBCOxPM9rOhp+4puyNGmx5qepqno+5ayrKkLxdCCNS5+EJBOZvSDQ1k3DLqj7nuMHbAuVH3J5GyCnETUbUMSOKST0SbXdBXTeUKvepQG1/VJJxl+IGm6kqqtcHxLEPo0VY+UAld4WGMRaLI0I4oiwsjD8SSeH1C1Ob7nYixEaYLGoukAwfLkFDVoHG2omo7DscRBgunJkvCJ4lDiuhaLJHzKRnpewgcfXxOmAdnSR6OoG4c4XnB5dQFeTZR57LZjwadobglCyTQ9o28FJ5cJh+Oeuq/55OPv8MHsnO3dnu2x5V/83b+GCQzpbMXi4gJVGeJA8r0P/4Df//1P2a9rFqsJ/eCg1cB3P37J9nCkbR2ef3BJo/YcjorV4pLLy4SudTg5j3l7/yXrTcV0cs7V5QVNVZPFU7oq4Pn1Nftdy+WzDwnigbKpyQ+Sj757xpu7n1A3IbPlgmNejWXKXuF6UOWC55ffZ7/pcAOXxVmEawMiZ0a532C04Nn1xyyXKe/ermnKlnxf0rWC+dRjEkf0reLQ7AiihjKvqWrJ9YtTmv5I1yuEZ/ny1S2rxRmucPGihFYpjsWRrm+woke6AeVxT5IktEOFIyI+evkCz3F5ffOas7NrdNNjnZZPP7nimA80XYcrOgIvwXVDBnWEYYGqBdfPIwK/5uZtgR8qMCG+5xHHCd/7waf4vscnn3xG2+c0TY1xKvJ9Peal4xC8hpOLJflxw2wyZzJzefHiA6Sc0jYD724O/N7vfY/icEsa+TR9hyVkeXFOXh/wIs2gJFa49NT8rT/4d34jNqp/4aD69/77/+RHoon5wcd/hbaMSOYx+3bL86vv0ZRHbje/ZLvbo3BZ3x9YTFNCT3D3ukW6lroUtMOaLI3pK8vQdnT9mmrvEQUxxbHnsDUI1+A6Ce/ePuAFNU0pGYYeaw74wQwvdCiqI1b7VMWOriwJZYrraB7X73CkQOuO/f6e6TQiijKaQvBQvcNal5NFwuFhS1sb1tucwJ/TdgWTZMax7tkXBUmasVmXbPc5Z9endENHW5RgI+LUpTj2pAn0vQHh4DgRtgPpaZTTM9iabujoWsvJ4gJniDgWO4ToWJ5mTKZz9ruC7WbLdJKy323wA5e6zJGOxQt9BhFybFr6oeaw3o8GqdDB9AOLbErqu9THI4kfw9BRHTVDO17s8mL0PFs0r76+AaFxpItxXOqmwRGK589PEK5BC0ldDtxtHpChi1IDQkccHismSYIxlqJQNEM/FhaUjzaQFyVd07GYJ2jVYwJBPF8gfUkzDKThnDYvUU5FlR9wjI/UDm3dE6UZURzgKENgY8qdIM0C1DCQJuHIDQwcPC9mMZ3S5hU4Pa3pCaIY1fVgNVcX56hO4QcJcRAiAsG+2BNHNZPIw6hgBKDnmq4raPqWwA9J0ym97dgc7smSjO19Q0BMQMzpZMJsseLYdEQCUndBMklx3Zgy3xDZFGVa1nlL1w0cNi3LZ1MmZz6e43DM94RRxur8HLf+mmtnx07V5F1CmE3wPUtZtgzOwCSBYl/S1D3GOgjTEgYeL66e4buSdmgI3RTHlfSqIYozQj9FdS2OFnSdIs1Cgtil6iqkL/ACQZpkqH68cB7LPV3rkk5dtGwZcDAWVosMD0FxrCi6A1VVoVWLNgrp+khP0LcdRkmkJ1Cmpe0a2qbDk2PDVkofox08GbJcrjg5OWez2bDfH5DSYzqbjw1vVdL2BRiXpm7phgP90IIN2e5vcVxNVeZgBVaFVLnm4WHL8VhRNjleIEgmCbf3D9xv39D1BY8Pj6i+43x1zdnqgq6vefXqNfePjxzzgiB06duBvtHMZxlZPCWLl+R5R162KKsYTE/TKXw5xZcBdX1AiAHpJvjyBM9NiVIfz5dgQUqLsQ1dX1LVOw7FPYfjnqrK0bokjg1a1OyP9zTdjrbL2R8eyMv1U+ndHRmlgxrfn9bSNh19P5DnOVrrf2Lg49vC1ft2PoDjCOI4fnqMeNrMjgUoMyiM0qh+QLqSwPNH25OxT0QCjbGj4en9ptZ1HbACY8Ztqu/7uN6I3BKugysc4jAcCQPWjpQHd/w7uq77Vqn6njTwfqhWaiyYCdcljuNvt7bvB1atNdaop0HaGzeqVYkQDtpYmq5HaYO2CikFVZ2TFzus6Ngf1lir6IeBti3wPIHrxLiupG0LsmmI0iBciZAdwumRToDbjVxj4UWYTlPt93TtaHOqyyPWugT+lCj28COXrldMZzFeMKKyBl2hjSavjgjHxaIwegDlY5QgiROqssLzLY5rWK+3dEOPNj1N2+E6IxHGCoe8Gr/gWVzapgctyIIZg3ZIkiV1WRAFEWmYEIUSKTMQksUqQ1lLXq2JogBlHby4J4pn1IcO3xN8/PGSLPXJggm295EiZDpz0XQ0hU9xbFispmw2G8rK5aNPLvjsu8/5Zz78jMd3Dc8/nfBu84ZmKPni12949YvXrCYnZNOI21cbTB8zP5uRZRGn2YLq2LCtDmMXJBqwXkmczOgHmEwCuna0sRVlxRdfbPnke2csZ1ccD488O7/i/mGLVjW7ww1KOcRJzPZww+NDgxt23D9+yZdf3HG3LegHQ681bddizBgFSzKJFSV95aGDR465IAtSJr5LFp/y4afXNNWRX335ltubHRrFZLJiMtcjtqy1FEXNZJUwmfrkRcNkuiLKPPa7Le9uDliRIGXMPFwR+5psmrBebwikJfGiEQulYTkPke5ogtNDxmoyw6iBk/MT9vmaJHDZHG65uoj59U/f4fgSz9Psy4GhrQm9BNc7jl8ijeXu3Zbl7AyCA2+/eSCKHOq2QTgDTiDZ7QvOLs55eNizr7ZE8Qwrasq6ZL5asrnfs5xF+FbgBxM2m57d8SsGZbm8PCfPHzBdiB46JosThPIJOPLmmzdUTUmaJaxmc8rHin/lb/67v/mD6v/49/+7H1m34fbhHRdnJ3RlwyJNqOs96+IbtPZ5fKiIIhetj2gVMQwQZw7f/OqRpm3x/YFAnNC1Idv1uCkSwqdXJduDxQsCyoNis36LtQPL7Dl1dSQKA8L4Gb0+UtU9STQib7rap+kM4STkuM0xqiOSMwQ9J4sV/SAojj2D7rAypB0OHMoDxjgoIyjLniQNyKtHmkZz2BWsTpfsthtcXKz1aLuB57Mlg4mJEoevv/yaw6FgOs3IDwcyPyPAx7UdaRJQ5YIkC9FDw2TiM3QaTY/WivxYczzsuHm7pSpLymq0j2Al6SRBoznUJY7jYVpFc1CAx2QVki5OiOM5iA5PSsp8oOtb2q5GCIOrZyhbYqzP/03dm/TYsqXnec9aK2JFH7vN5vTnnrp1qoqsYoFFi7ZEN4IBwhA8cTO2Z/4fNfDM0MyABwYMwwb4B2x4YEGQBx5JlsUyWQ2r6t57uuxzd9E3K9byIM69Ev+BmIlEAokcJTZ2fvF+7/c8dV8jtUAFiuo0EGhFPRWYZsQYx+WLSx72e272Jx6LEzqyaDsiXcjQGXy/Jc3ATZK7h5Ju7DC2Rwwj0vpY6eGc4Hyt2Z+OPNQFy/WayfVkywytA7q6Q/kBkV6w3T4ji2Nu7q7Jc4+XT5+QR2u0kgymxQ8D7DhnDEGq6U2LGQShF/L8+QvKvkYiGUdL251AOOIwwNMCRMShuCdKFWV1xNkGRYDyck51QRwErPIlAw45JWjh2G5XLJYbJguRlxE6nyAZ2MYrlMppRc9pV6CVT5ppymKHNpIwDqlwHE3LOHXEQUYYOeIg4PH+RFE8sMgcSmpCZ6n3VwTLt+SbZwyeIVnG7Os9F5cLwiGmH48cxnvSLKNqDU/OzkmSiN9df+JxGNHJgsUyYxEoNvmWyXU0Q8nkLCrQDNYwTB1NMdCVHkZYBq2praHva+g0ceozioFj0WMAGxgkClFaRC/oRUPVG5SQCCNnXFjf4YSPjiK6rqYZax7rAWd9tFQMQ8doO9LER31etUaxx7sP33B9f42nffwopjAlLSXW9cSBx3q5Zpomjs0VAwN4A0+fbtCBpSoK0iRms13yp3/673K23eB7Eik7VquQaZRY2zH0FZ4vyPOUV89f8+RiwyrLWOQBx/KB06FhtUhZrySbZcrZ2Zbr2x13dzvW6xUDHTd393Q1JEHIdpkTRRIz1ZipJ8tj0nSF9GHy9gyUIHxG02OngdFYqq6nHg29MZzlC16dac7iAd81fPjwNQ+7Hc6O7I8P9H3FYBqqdo+zHcYMfOuzb7uSpu0w07xCT+MlyrM418+DrfhWw2oZxu47e9g0zT04JQTOOsw4YcbxOw7sMMxVAykU4rvPeUhUUiFwqBkuMFvC3DyIWeuo64qh7+ek1drP6/9Zj+p9q1BViiAMCcKQyc78Vd/3v8NszVIBBXImGnjezKTt24bRDDOtwPdwUjAxw/xHO9KbE9IbQfYY2yHUQDfs2O2usGYkT2PGqaGuS4pjjxQKKT2UD8M0YKVDBZLaWsxhJJAeZV9iBoeREVLCwIiTBpRFWIVsJHqM8F3Gbl/ieZainokjUmnqusOhsM4gXYDyLFrGZNEKX7Vzyt5PaN8nXySIacCJibKz7Mo9CsHZcoMdFeNkiTOFmRx2GpFCMvYWT/mMo5mPQQOP7XZJHAcMdiRdxRg3gexwnqSuOyTgXABK0vQNdWVom4b1MkaEivfvH6hODe+vHxH5BAzs3h8xzsPTlhcvnzFQgadZbSLS9MDxpqTeB7z6fobrBaMXcjqMCC1JIkXkZSzip0Sh4Ps/+DGLCGSzRIY9p/IRlQia+sjhcETYJUr4DEONGWuGTnO2PSNOQt6+3fJP/o9/zvn6Nf1Q04wNcbpC6pFPH96jvQhwXN/ck2U5UmiidMnrN6/p+4rJDCg9cvdwwk8tN/f3BEHKze0VVduQ6AUejlRnWGXBb6lqyzfvrhlax2qZ8OJFTKAFWZwjhaUsezYXKdb1dDtFqEPOz1Oqhx3daOknSag052eXvHx9xu5xh5YRUozsT48gQ84vnqLVRN9PnI6GPM/phoZ+KrHGx3MR4zTg+47I3zAOIZEnuT/dkMktz1885/bdPal/hpoE+8d33H4c+eL193jyasPm3Mc0mjRas1xaurFB+QFNY1DSxw49bdNweDyi/Ia6ViSZIglzBBXPLp6i0ozQg0BGLM8Svvrq9wTeyO6xIVgEdNOICDzSVJD7mteXL6jKA0Onebj/xH/xj/6bvwOD6j/773/emQeiYMX9/YE064iCJe8/fOD1m+ecjobRjOweKoahoyxqmsIHp5Ce4md//CPurwyjPc7O5juLJ1MmMVGWhuboSMIQGIgzwXp7ybt3O4TnE2URn74pWawlShnsKEiinONhz2aVUhwOxLFPGAnaSpKnW8qiJ4x86qZAqtmIVJyOCOcRBDFOToSxpmkHgiAmSQLM1DA5TaATghD8UKJ1wiJecmo7Jmco64L9Y80iD1FO07cNTdGyChzT2HO+SRlbS5ZsuLm7oW56wsSnaXrOznLstMAh8P2QJ0+eMI4942RZREvevX/HZBRm8EkSSRxnGAEytKTxAqzFjD3WCM7XT3BWcDpVLBZr7h73nKqK5blGRzHWKZp2h/IkFxcXuHEi8hNUENKZWU0XhzFd0zCWFQgfzwvxfA/lQdfNejczKcI4Qq8W9JNEiJHtSqO9mNt9AdLn9YvXlOUDw9gxjhPWCNI0Aio8b17RaaVZry5xYmKzuSQKc959+AohJVJDx0CcB/RmYOgd23xFHCke729ZJEuaouL++oSn3KxDPRnqekea5ozD+PmfnwCnaUqDc5LNZktT1XjCw9eCKAzJ84z9saJue5wQKC/kdCpwDCySDVXTMYiKSHlssxVt3ZNEAYEWHItH9mWB52UIq+mbBjP0rPMnOE9x81CRxjOSLI5XCPkaL3rFcnHO9f2BKFX03YFEh6SpZugFURyR5yFD03O/u2dX3KN9QaA0U29Qk0+iM5T0qLsTRV2jdYTD4JRgc3aJ8qDpB5ZnAReXKdXpgBlL4szjVFScypKmnXCToK7q+VDDEygFXVMi8Qh1hu9p2m6kMxN+4KO1hxlrRtPQlANd2RGHIf04d/5Mb0jinLPLM3aHPWYaWa6WFEXD7rAH5WiqgSxOSZOYh4e72VrExDC05EmOVoppGHj17Auenr8iiVakmYdxJXcPt0zWw/Mzmq6k7VqazqCDCCF92makazuqArLwCXma8vT5OWGY8PhwYhgrtA/n2wu+/+UzkDVNd5gfCifH9mzJch3TjycWK4XvpfSdz+XlhjiFtumwVjBOM2KuKhqauoVJwOSxXa1YLzOO+0e++fSeb+4e2NU9Og6RniILtyRhTttUWDOCMyjPMPQ9ZXFgMEeUmu1cn22j9L2hrQ0CD5hX0aDoO4NzAqV8hn5ESIUUM6bL9z1g1rzGcUwYBiglmeyEdRMOMw+InvosAPBRypvT1Mkx9iPjMOCwRFH4XSVg7pzOWKm5LiD/Fps1SZJZAmCm71BYMA+e4t/4Xecck3XEScZisWIYzHc9XfuZuzoMHW1fUtUHHg+3dEPJMDYMw4gOJHke4EnN4bjj9uYWKTRSOZQ3fZYTTEShJs0CpPBZJ0vSJMILNFJ6CH9ikoZABwTKR3sBSIkIFYOYOcvrTUY/DnhazWvYqZ0NX978ELDMl4Bgs17PCKxpTqDHscPXmuPpgLAhURDQFWaWPAQa5WlCZbFO48Yl/VDi6YnAS1GeIowUw9BjrUPriLad7xO8gFk5LHzWq0v2hwfaamQCus6x2GzwQ4lQkqrdMU2O/WEg9rYscsnNzTvGMcBTivPNM54+fcF6G6NDKOsHLi4vSTOFGz2ebH7GdhtQ9DX1WPOXf/kNie+IE8syeU202LJeB+A8etcj6TgU9xRNxdXHG9KV5PrjjqdP3jC5R75484pxHNA6mdXMQ4XDcHd7YhwmuvGGYTT87Gd/wFe/vyENX5FtNN1oSbNznHAkuUQHEc4KHna3dJ0miBXt2JDlTzi/uKApO5Ty2O0bLjc5ddkTBh5dVxCFATc3D9zdPLLaJDx/tuTsLKdvHPvdfn4oTXKcc2TJOQ+PO16+eM1yk4L0CXTKRAvCIglI1wU315+IophuPFBWI00zIv2aly9fcX+85+OnPVhNlNoZU1V0bPJznlxk7PcdSkwoPMxo0ERcvtiyzTPGsSHUES9evCROQPsJL1/8gFdvFnTjHVm84u33/oQXr875xS+uiJKnrM9zbm/v2WwWVMURXEIS+TNPWEgCL8UPWpqj4Te/+RUCqPeGOFqw2mxxdIQ6ReqAx+ojy/MLaD1+9MWPKcs9TmmEzVhmOV98kfOnP/3P/+0fVP/p//UXP//m6weOxR3ZIqDpH0iTJednP2L30JGtDF1nWS3OUSqgqyWvXr1hf7jl1ZsNX/96x+6hZLIjCMNi7Zisj+f5rDcrXryOGIaOQC94+uQVu4cTx1NDFCryRcLmwqerNcdji+cbRtMSBT6+H7BcPGG7OaPvGwSS3fEjYRix3/WMRuBrQ9W01M2JzXpD08/A9ixPkcqSLUOaZk9bS7phohuOBL4iyTY0bQfjxN31B/q+IooyojAjSTOEkixXS+JFwHHv2F48ZxQTQRJTtice7mukCFlfxHieR9v1WFeBKPGkjx9a0jxkvcmoCsOnq3uePX9G2zYkScTp2ND1JZN1dHVLEkXEYYgdBG07W3V8GRAnmlEazi4W4Cn6XmPlCSsqAp3Tjz2md/ie5vGwZ5omNss1VVkT6hBlIc4XmGkiTSIeHu5om4muFSR5jhUDOjasggQ79uzNgejskrI68sXrZ3Rtw8NuBOMjraJ4fGCoD/garICy7TBtPQ9DfoBSEb/69a9p+xMIj34cMGLWnLZlz5Ptxax/PR0ZzECWpwipSRaKtz9ec/YkIVm1LFcRRV2yOzQMQ8t6m9BWjvrUscgjrJsIAkkchNR1ObMs+44wjumnmqY7sl6ndOZAli8JgpwwiTBTBf3E+eYST2r6rkVKhfR8+mGgbQaUlOSJJtYph8cOMZ3opp62ClFO4QuL9hO+/nTF11//EucVWDtwuit5vLsliGKUpxkb2O0PpNqj7CsmKYl1RhYuMM2IZydQlvuHO3ozkmQ5Ao+yOtD0LXU3oXxYXmSEcYxnNXJU7B8b6q7h7PKC9SomygQT1SzbmDwGN4CEKAhwZkJKaLuOqqyxsiOIA3y7om9gsh2BJ/GkwvMizCTw/YRlvsUpx9XjN0zOksSLGVovFRaHcI48WaHDiKEZmYaJNNVoLVguIoax4nQsEFYghaTrW5R2vP/0e371q99SnHoEEXVTU7cnyrKhLBsmA03dUFcVWmvieIG0MX1TszvesC8/0I073ORzvn7Fsydbfv/b93z8+EC6iOj7lqqsiYNk7kn6n332wvHkSY4fjLNYRKfz2ljNiWCa+1yc5XhCEAcentdzf3/Dzc0R5YfEiwyIEAR4Svo86ZYAACAASURBVIIZSXTC+facSAcEQQx2oizugBGYgB5re8qqphvnqs8w1ljr8Lx5dd+2s+HIfdaMaq0JtI8Q84PrrOKUCDFTJGDWwnreLHhAWDx/XslPk0FIhZ0mhr7HmPmoS4hZg2ut/a6WYMz0XZfV8+avf/PAa1bBMnfK1WyzQkmk+tdr/m/7sFr7SKk+VxkEQag/d3ENUkAQBAjhqJoDcZzNnfXIIdRIWTT89m++Ybe7x1lLmuY8Ob9kuYxxwiI9jyhMcQbieMEyj/GkI/BDivJAM+wJAsXheEILiY9AAu3Q0w89YRziKY+JWfbQ9wYwSAfWDYzDOA/Wo5sxeK7FDBZnNL7nY2xBVZSMzUCUrRlMx+PdjrPFkihIyJYbmrGZe6eywFcBDzcWY0aUHgEPhwEh0dojjROaeiBfnHN9fYfAcjo9Mg4typuPCoMoJExSbu525HmMkiF3Dw1ahTh75Hh6wNqMIMg4P09o27l6IpWjah4QNqI4dlx/umccWgKpGYXl3Ydf8un3JWcXMYvgDb1p+MH3v8fbt8/IzwRppnncHbi7HVH+yO19wWq5ZXe6ngMBFdF2R6qqpK5GwtBH6I6b6wNNc+Dlq5d88cVLmvaRvlEEkaZreoaxIUyXLPMFf/STH6MDw7t3n4j9nMvLxTz0K8voOprOsF6dc3f9gWmYeHLx9LPMwRB6G6apRTBwfbXnycVLqrqkqUqa2lAUFZvVJUnmcf9wxWZ9ibNwfXXgUD3Q1Ccmx8wkTzzM5Mjzp6yWIfkipjhWBFrxuKtRKuVHf/A9rq4+Mgw9aRgSB3p+ffs+1kqyPMdMIPE5P08RdiSOE8JwpD6M5FnM7vaR3e6aLF1ycX7B/f2O7faMJJPcXF3zeNPT1RYrHB9vf0UQPuEP/+h7jK4iSifak8+rZ1/yhz9dYiqDp7dcbBYMreTy+Quy0FE1jnx9RlkZsqXlYXeFNEuGfqQqH3jz4gdcX/2Cw8cCN0oGem4edvzhj37C5EY+7Rr+/O//HRhU/8k/+4ufl1XJIrug60se7jvevPkRYZByOBxZrXO++t0N1nZs1y/48ss33Nz/DcYITsWB3//6hiBS+H6Mo+cnP/5jbj+0nIpPuGni1786YnrHMl/TVCUf330i9gKSMODm/QPJeYqIH4niFevtFqUsu0dL27VYMbPXkhyM6ZG+pa5b+mHA03M87qxkvdrw+LgnilOsGNmf9kzW0rQFpp3AKsJEkSQKYQMmqylORzxhibRmsYhwThOlc3J3qm9wzsfJgUBHJJtxNnz0CjsNBDql7Xc8vbzg6kPB8XTEmol1+pbFOmC1TTkeWz59OrIv93TGYEWFnSayKKM9TZxtc5IwIExS+rFhsoauHeiHAiEGkiSiHY4oDXbyabtxBql3R4TVPHmyQTgPP4goTyeUkuRZQnMq6YeBKMtp2p6mOSHFvO4bhgFfa8IoQEqHkhN12QKWIHLkqeb4cGSxihASrj7sYJgI/RAhwcoOKxXW5Ng+RiuNmHzMNBKljqp+5P7+gThacnn+kjgMMV2BT8A6PaMpj/Rdy/7QIP2AKNNkS4/l+UBZNiyWKd/78jWbs5TLZzFRNDF2AWVV0zeKi7NLuq7GOIv2Fdhg5srZuY7gqYD94448WdLUjslKhsFQtBWDaWnrGuU8VqtzPnz8RD91DAba1uD7ARKL6WukUMRhRHE60g6W7FlOY1tu7/YsV5cUfc3V6Y676j2nvuFUNMQ6ZLM+J8jh8f4B0RtC74z9oaKsWnylGYaBsjwRCUUchTR2nGsNCJIkJfADiqIAqQj8jCDwmSZNcYSr93eEemK1Smj6hlPZk2Yp/WAYGOc+YteTZQrDSN+Ns8LTTURxyGa9QfgKJy3TUOBsRx7PBrbJCaqhQWrBOPYYM1K0NaNwpOmS9eqCYegp2wI/iJjMRBLHBGHIOByJI0E3NFgDk1EUTYUVgq433D3scUrh5EDbCspixIkSoSqsHaiajm4ciaKAtmtpuxodaoIgwJiBqt1zqD4hfINSIRcXZ/zg7ZdcnJ3x8eM1v/vqK8LUI0kSnBNzshb4RJEC2xKHCS+ffh+lHLdXO0yvCKPZ9JSmOZttjKcs9bFj6gIUHpaes7MNP/3Jz9jml4x1QxZpnm6fk8cJizRjkS7xPEG+8NFaYkyD9iGJY+pTzzR6+Foz2RGpHEJ1IBucG/G1pB9L6uoATuBp/bnDOpMCzGhm2QWCcTSf7VZz7054CunJ+SFwcihPozwfh8BMBl/7n2sAoH1v1sYq9V0Xdv7418ddwzBgjPlbRIJhGGibhskYPmujPqej88D77de3Bi1rR4wZkBIEDjtN2GkCAZMdaNojbb9jnGb1aVHtMCNEYcJ6HZOlCc5KwkDT9EfiNKYfDGVV4ocC7QtWq4xAKrI0oa5LFJp0sURgsQZwDul79NPEODqGXmA7CZNmfygY+gHt+ThnaKsegUASYqaJcegYhomuKTHGcXHxnNNx7lv3nSXUEfkiouk7RARd32BHxdXNNa0RTGPHYHq09snzDKEMXd/Q94YozGjqgtubR14+/5KybAhCn7OzNYPpmcyEpwPasWFyPn6i2B8O5NmG03GPG32afiSPPDxPILwl588zsDUfP97TD4LNKqfvLEyapntkd1/z4vn3cH3C08vXXDwJGAbNm9dv+fv/3k/58//gB/zkR19wffWB3398x/3jI3/5L68YZc2Hj7/j6lPNi5d/gHV3fHh/zXp9Rpwphn7GMl6cf8mp2HN79zuqwrI5y2m7luJUcHf/yNdfX5MvcvKlh45qfv1X7/Gl5PHhEWs63GTBGepyjyJjMiPvP+14+/YHuOnI/rokjRc0/RUwEalz3v7BE+7v7nBmPf9v7BpW2RMe9/dIETBNlrIoMMYiXETbGHxfg+w4lQVdU7PYLHnYNYydxYqeF2+e01UdD1ctTy9f8fKLNZPpOR1rXr/6kvPt8/lodBjpW8sPv/9D2r5iwtB1A1JKwkRx3BVky5BxtIAg9M/phh37/SNpes7z5yuyLEa6LVHi+PDhhjD0+N1f73j+Iub82QZfrhntyDAK9sePONujhCTwfH72058Qyoq//KvfEXsKM1o6RuwwcP14YrCKffmB169e47AU+46Xr8457Rrevn3L2Fa0o+Bkb+mmPbSSy+WS/++v/wWnsuc/+0/+63/7B9X/8X/9b3+e5IKrDzvqShEHaz5d3c7qSb/n5lPN4bHEOoUnEyYOKM+wuzesNxlZkvLs+ROy6Dmn8pamOTBNO96+fc6bV2/5k7/3BX/2Zz8iCjWb5ZYfvn3N5TPN1adf8vL5UxbPNcZAVVgeH/asNyHal4yjJAwFaeYzjg3WJgyD4ezsjDgLqcqaJE5JopAgTOk7SdN1OOYVT3HqwIaoIcCyY7mO0WqNVhG70wPnF2fIcUIEGciJw6nA0xOn44lpHBlahx16lrlHcei4urrh/v4KpRzlseeLN0857Hfc3d/j+x5ZmpKkHkXR8otf/A3WK5hcTxQt0Bp8pUjChPU6Q0gFSoA3UHYtSgqSZL7EHqeWOIno+olhbOgbga8CfDUy9iPCesR6SeBJxqGlqlp8T7LYLqnahuPpSLZaonQwHwaMPXm0RDhJFIZEoSZKNFE022qqvUXHiqI5cbbeom1Me2h5uN5ztszxckkY+SRhQBrOqWJxbNkXR2QI6+UZTrQcjwXKUyRJxirfEoVgx5bd/Y5ltkUh0N58SOVEwKE4UVSPnB4rdrcVntDcfNhTlyfycMvFNmBqNB8/PjC5iYuLc5JUYOkJYh9DjSJgGCaiOMChGPqRKAhYplt2Dyc8NafSUeihcDzbPifwYrIsx7gePxA8PB5xbkAKhZ0MWocssy1td2Sx1Kw3mv3ViWCoWYdrquKIVBVK+NB6tP2IVpplMmN/dsU4d02jPdcPd0TxgqmdCQQT4Mzn3p901G2NpxxSBfTdwPHxQKgj0mSBEgHF/sTx8IiZKoT0yJOUti5m5acQjKbADY7JKqq2R0ro+onJ+Ein6PqROEjwPR87tni+j5kEXVVjR81/9A/+Uwbr2NU7nBa4zwnYMPZMzuKkx2Tg/vZhxq5NPdd3d7RNTRhoemuQ/oSKHE1vKIoDngo4O3+GUB7VzpLFGQKf/f5I1RzoxwI/1NS1oxsMkxhpupogjGYiQRDhez6eH1IUDQ/FR2rT4fCwk8VNlv3uwPX1gTRdcvE8J0oF4wBS+jx7tmWxSGdM2gBmiFEqZOgE59uXvHnzFE/5xGFGFEFTNthekEQZgdYof5wPVpKQQCtiP+Lp2QXf//ILsmTB0NVM08h6tSbNNPvyjrbpyJMNSbRgGiGJc5arDZ7nYaaBYSpxWDyp0IHH0BcUpxvKskAIH18HTMZgxhGlPNzkGPoRawHhcHb+uZBzqjmv8D08P8T3g8+Yq89KVSnwPEUY+Cg1X+2nWQaI7xLTeZUvvhuAvyUESKmQUmHthBRi/nuoWWTy7UHYt5SC7wgGzjGaESHk57W/nZW1QmKNwYwD/ViwP13TtM3nS/eWrhkRbk5kx2HEupE8j0nTkNOxQgiFkxPj1KFVRN9O7G93hGmI1oI8z+htj6cdIpJY7QjygKovGceeIPLwQknRlpjOEYcapQyBFxLoANNLuq7GWYv2ZxReEsWzjWuauaSeitlsn5IsY5h6VsslMlL0HqSrlCyJECP0ZqIfevqxxUnDqShohpLe1PS9Rjufqbd4yuPlq+eMtsUPPBzQDSNtPyBEgFAKkOz2e7brBco5zNDw7Pk5ebyhGzpaRrppT+jNBrsoifD8gdubGxwtSnec6huaUnLxZInnSa4/HSiqFk8b/tU//x3Xn/4F//v/9n/z1Ve3HJp7fvmr97TtI203bw38wPHr3/6ORRZyPJWsl+eYqaNrHdvzBe/fPdC3Cj8WLNcrdrtPXH+o8f2Iqmx4+vIMHfgY1/LVVzck8Za2v2f3uKcsevqhoRsqvnl3hR/m9P1AawzHouf+9hPn50seTnvuH45EQY51I//vv/odSEFZ7RhHgeen5Iuc7fmK9+/f45zFDwS7/T0Cn9/+5j3L5YJTcY9Tc+3v3fsHyqrl4jLh/mHP8Viz2JQ8fap4vIO2CvnpH38f04+cDg1fvj2nbQfSdczmbEnVnmiaEWsgjTJCzyfxVhybgqaRNK3DMFA1A762qDDhhz/8Af/y//krkmjL2ROfru2Js4EoTPizf/9nOGu5urnm66/vef36FWHseLjesVqsKXYVxhgePljOFmsm78TbP5R89ftHjO/oKsVDfYtyI8XpxNWHhrOzDbvjPV99+hWLszNO7T1fPLtg+WRFdOaojwfevHjG1998RZSsefpixX/49/7Lf/sH1f/pL/7xzx8eKnQAefyC3e7E8VDx7OkLrJE09YzAOO4bpG6om3vGQeH7HlfX7/GIeffhnt488kd/8oahs2wuFH/047fc395zf/c7Li4WbFfn7HY7zi4WHE634AumKaWfTphW09Yn8kRT7Rui0GORaeq6ph0rojhEkmOmlrbtKY4tWZKjg4nJjjTdkSgOCCIfOw08f/YKN/lIIQmEh/ZBhxFu8jCDxdOKrqupdjO3DyfYrM857nvqpsbzcrIswYoOpwKubmvQA9vzc3wvY7nyCLTjtDOsNmsWi5ymsAShT90WLNcRkoSLi3O0p1BTgFYR49jRjyPVUFJP5bxSdJYkTZEC2nYkTTOquiMMM8IwIZQxfd+AbcmSS/qmx/dHJJryVBJ6mvU64+bhlnaayBdrwjgmX+Ro51MdS5Tw0DIkyzOC0CPNUrquR/shz84WKAAV0o09XTtwvlozti1+4kijFW3T0fYDX/7oLSL0EF7H2TZDGcE4TUjP0Hc+2otYLCNOxR1N+wDOxw9zJmeIk4DJSkYjyPOczSbHDAPGWLSOsEaj5AJJysdPt+x3HdefTlTVxMXT55xOBafTjs32jN3hQBhphA0pq5Y0S+ZO69gTBpqyKPnizQuktDOkuSiY6p7TfYXls6dcCaRnmUSA9OZKgRlH8mzB4VDgBwYlAzxf4nmSsbMI7eOHHsqFuMnDjzx0IlkvY0JtaaqJyQq2zyvs1IN5ihk6PA+E8jkeKxbZBoPkbr/D9SNxqMFJbm93hDri6cVTzGh4vL8n9EMC5SECQ5RFODMizEQUpAghqOsRZQ1jPc7GlQSaTjI1giz0MCP0tebm4yNDXyOVIM0WZGmC9jT395/YNzusD0hJW/eEUQzK4QceSZSj8EFMeFpiJgjCgDDUTFYw2oHJG2fE2qjxRICyCX3fcjo+8ObZc/70j/8dsiym7vZI39Ebxc3DkWbwGV1E2RRonaH9lKYpGQf7WW8qGAZL1YL0AoR0FMcjx0MHImCygroxlFVP1zkCP2a1yAlCjRQaMwqsFfRdR9OVBAEEoWSxyOdrdRRd19C1R7Adh+OeyTk22zXWOJq6RvktSR7w7PkXIAJu7q/Jcs324pymq7m+/obDcU+cLBAEtM2AMROjqTDuiBPz0SRM4Cw4D1/FCOHwtUB5As8HxEg/lNTtkaEvmaaWaWqRYmIcG4R0CDEneU1XMdm5WzmMBpzDjP2sdh0NXdcxTRN1U1MWJcNoKMuaYZgZsN8yUZ1zfGss/Pa7EDNdxDmL9uf3T+vmasLcbZ3X/NM0D65KSiQSZ2d9JeLbRHjAGkPT1DRdQdefaNsaJQO6xs5pqwPfi1FEBIFPmnlM1mAnhcBHqJGmbRAS8BxRmJCmEa3puLs/sN2c0XQ9x6InXa55fCg47TtimSEMCAdVVxHFAZebLVJN88Gbm7dm0zSSZwG+LwlUTBBIzNBijGGRZ/St5PbmHiksXVdS9TUTjsFYkjxnc5HRFEdiTxCmAZ4PSiQzPkhadJCx3j6h7wam1nBxcU5bl2y2G45FRVHWjGYgy2Mm5uM2JXyiMCUKIrTWSDehfcU4jERhjlUVp+4Rrc7wRQqiZhgHnPHJFjFVMWLsROBfMk6COI24vf/A5El6u+fD726Rvscv/+Y36OWadLUkCGKWZyv2hytOR0caZxRVgfQUZXFHWY40bcv11REsRHHE3f0n7KRJspT7+zs+ffyADnIgwLpk7qHfHhknn8fHnrpv6KaWY7nDC3J2pwqhPaxQCB3y26/eobKRzhREkcfD/sAoJHF6wdgZlJZIr6cfKvpBsznbIryOm9srynJ+XSEs1jriVFDWJ5JkQZx6lNWJuuuI9IIwknhKoUSMdQO9aGbeb2Fpa8mLl+ecTldUVU3fGXwtMa7h69/f8ubNc+rqRHEcOL+4QHqGpiuYJsUoRvzQYpnASdJ1xmap6d3I/rFiu54PS+NwyTAORP4aPzCs8ktuH77h4vJ8tlLaDknAfvceMWxYLAParuGwK/jq99+QXwqSxOcXv9gxWLD2iGVJFCt2xxNRGtG3PX1fky4W+Frx+tkPKPYDv/l1ge9ZTvctVRFw9nJB7yrKfcM/+o//DiSq/8P//N/93E4eX77+Q+I45LA/MvRgXcnQCo6n/ZzCpY4krynLkijYUFUFSibky5A40bz9wQ9pKsnzL87ZH3YcdhqHx2E38de//BXH4kTV19zcPWKcx/mTFXEukDbm4smGw8NElg389KffQ4xPuLr+gPAtVeGRL33atiGKUjyZMbQjURhw2O/xdcCp3HM67RmHkXy5ojq27B8OJLHCV5JlfoFQM8jcjJb73SOj6dnmG9puTse6vmUykn5qQPj0U0mcK2xYMwoJPkyMpMmW4+ERayFbxDDl82pq4xHolCQ+Y+RAFMdMo0dxesRXCVjB5jxBiAVSO/wkwEmN9gR2koxDT1VVKBkwTSN13QMK52qGVpDmCZv1iqocGKcdQRhQnhy+r5BKgKcIkxzpQCvJ+99/TSg8zrcXKCEwU8+pOHD25Bxj52rFarnEUjMqj6KsMKc9mdYU1BSup+0NoxFYN3D59Axfa37z29/Qjy1xnOLpBbd3FZ4/AB0g8LXH4+GAFYooWeKkAmlQStJ0AzoMKJpHxqFBexqrBpyQxFkwCyLsPUqHjNZhhSRNcqI0wMmRMA4ZphE8i1ACawY87TP0PW3Tz9rYzTOQEdITXF3dIQPJs/PnrMIFWnssn2Q09cDUD0xqoJ8kTVsghSXPUrqhI8lCrDWURcdy+ZRo5XN9V6KTGDM6hlrS2gkb9QSZQyhLUZ8I/IzzZUyzG7j6bUfqGbabNYey4v74gPIFbVdjrSENErTzGbsemGjrgcnwGR/kCMMAayzSKUY3MDIxtCNjbZEi4Xg6MtSWqVGIIaCtR4J0wheCTZYQxwo7iZnYkMHFxQohPIwzdFNBFAmatqedJkYcOIknIU0C2qHhcKzwbEjozyk/ngDnEfoRypN4YYjwoO0cu8PMa1WTQ9qJujuQL5es8zPef3jP4+FEEObUrWG04AVzKljUBUkSE/gZn66uadoTnuejlE/XdjRNS75c4nmKSGdof0XV9PixB8LRDfOBn5hglS2QAo7HgqqZawqd2XGqrmmbmjxfUdcdX3/1K/quI45TtNb4Xvj5CNJHOoEZWrCgVEiarNA6pihOTFOP8uajy7qeKMuSY3lH3c39RWMsDzc1Eh8hFIKctp7FJIEWRKGH9hLM4GjbimHocc4y2TlNHMaaaWrR2gEDng+eN79ejBkYTY/WHspz9EMz9/alYw5Hxdx0dY4gjPA8jZCKQAcIIfF8nyAIvkNizYnojKr6lp8q/9Z6f+7MKjn3U5WnMNOcljrnPnvP53QbHPOc68AahqHB8yzWtky2pm72XF29ZzA9UayJ4oChdyThgij0GMaKaRIoLdkfSup6wFkQ3shkHL7OcDKkb1vaZqDuOnQgadqeY3mkGytW8RrTj5h6IAlDnLOMU0+oFQLL0PUMn9+z0kVGcarI85i+7YnCkMAPGboWX8+99yiMkEKT5QmeMqzyjElYjmWDm3qkgepwhEmyWq6ZJkU/tvi+ousczgqiKMCOPmKKyXJwVuLwGcf52Leqj7N1UHgIBdLBZCxd25AlKc46urambedjLCcGhPIpTiPCarTvGMzcc++bkiSJGaaW06kFNVHUNU4EBP6S48MDQbhgsc5oxh4tE05Vg+9HfPOppOsg8nMQBlSJp2LKSpKnPg/7GwId4yaPMMrpmpbd/prJDhTVnrbp8fyQKNF89c07wngFCurW8fHjAzryUfHI3X2HE4qmHhDCo2p2ZNmWMEj5dPcOpZaIyVHs7vD8Jcdhj5SWGI8gEcTRhq7vCQKfh/sj1vYzj7ufKRtJvELgM0wjfWdIsyVpqgkj+ObDDdpTWHqUJxj6js3ZBWUzcX55jmlDwiSkqA8cdzWR9hmmim8+XjG4gjQImYaRMNB88+4Di2XMbtdRND3GWqJQUtUFSENZ1ES5xrYSL4l5cn5OFER0bc/F+QuUtJydL9k9nri/64kSzdQmbC9Dfv3r33B2nlAcD9R1yWb7hM60HJpPrLcX3Nwc+T//6V/z9NWKm6uOp8/O8XCcjvDy7Zp+6LlcP2Xse+7vdyyyNcVjwZ/9+Z/zgx+fschikuiSYarnzVDnWC89/uE/+K/+Dgyq/8s//vnQTAg7Xy5Lb0SHA2m8Jl9oFJqmfyCKBI+7Eilj7m9L+k6wXOZsF5fcPdzy7sPf8Li/5fH0kcn4dL2h7kbQmiBNqNsR5TvW2y1ZsiRfhizXAdvzN6w3a9KF4nA6ojzF7f09V7c1wtOEcUjdliyXTxn6ltOxoi478mzBixcX3N7WDINg7BVxskL7HuNgOe0roiAiiZfsjjfoSDNNkvPthiiNub27J1SKXvg4MdGOJXgN+SIkjSKenD2BaeD6m5LVImB3U81rrnbuyMZJCp7iuLtlkedE0YK66dCRmXFeqU8/lHTGUtYnrJEY0zEOAUJ5KBFTlwdG0xGomLouiMOQ9XpL3xVoPQOXk/j/p+5Ndm3p1vSsZxRRR8xqVXvvf//FyVPk8clMbGO5RQck99xBSAhbooMQbeAOTocmNGy5gy06SEh0EXdhCcmZJo+y8PmLXa5iVlHHKGnETq4h8wJWY801V4wvvvG+z1MShKNp9lz7J6SEQKQsNggpCEJTlQUCgRktzJ4yS3HecNjuWcxCtcnY7TLQAZTmOnYkacI8D3y8fKYdBmzwJHmJFweGqUcKR53Wq3HHW5I84ccP7/BKkmYZXXsE3+KlATXy9vXXaHnPtY+oXFFuKsZlYFkGggu0154kT/HS0s8dQq+HZHvuOTQ3pKnk0r6jOYCxE8gOQSTXW3y09EOHMwqhFbMx2CWQyMj8Bdkz9SNZmuKFYPELi39BqIXLNPPm4WcMlwGZB472kaI6oGXO59M79vcHhNB0p47gIC9yovA0dU3dpByPVx4e9myqgaZYxQIeQ7WRZEpQ+IJffPMV37z+BVp4tukr9vuvOLytyNMbijLHS+j7me3+Dplk7IqKPASiWDPCUVjyYkNwmqJUJMWaLfzm7Vu8iYx2QiYK03vwnohGylVK8Pb1VyRJiXOKXdWQBUnwkaBSNBkiAY1it3nDYuDldCLKHEnCdRyxYkQpjQwZmgRjF3RRrCgunSDiAkph/dqsFkHRjxMqXQed6+nCNPUoJKVK8W5hCZFAjqDACcFz+8JleGFYRqzzZGmBFNAUOVma4Lwhio6iUGipKYs90WeoRJBnKWXS4NyCDTM3rxqKOsOZlFQWlKnkdndACc3Hj+/opzM2Lsx2Ic1LErlHxoxEVeRJAwTMHBiGEeeXVbJQZew2t2RJSZEJfv3r77i933HtO/ABjeP49MjL4wuny5VxHPAikNUVh9tburHFmJb9PiXNPFlWrpnn1LDb1zhnmaaRGBKyTKNTQ5ol1OUOoqYfJpbZYBeDsw7v1mHH2nWzOc09Hz58IC9yjBk4nh/XrLZz+ODxwX/BSDnGaWKaZoxxhLBar8QXQdXfbFH9FyvViphaLVpJknzZ1eyLKgAAIABJREFUqK6t/uA9yReIv3WOeV6Y5gWtk/8fcSWFwHuLc8s6uHQnYpyJcubl9I5+eCLiISYEH1fttSi5PdyTpZJpGJExY3fYkRUlWVoTiSAiWaYgKsbZEoRm2zT0/RnjLT5olnEBFQiAmxxJIgnCMYUJ3WSUmxqBQkeFFJqb21fIRGKtR6qAVhkhCr7+6hu8vxJcSqZqEJHFXQHHssz4IFBpJBEB6RRlHRFBUcgdZnJ4Um5utkx9ilQFxhhubm6RIkEi0aljnCxlkSLlquA8XY6U5Ya7m9eM/YISGdPgKXIoihKdRNr+iSxr2B8avDeEkFLmG+wSyXROkjtcTJntjAoaIWExV4Z5pBtnpBakSc1kWhK5MF56+tMzPiSoJGFycNik9NfPDGZkk++QsuT+8BUv5xfmGaqiZFkkIkrefnsDPuN0fCbPdgzTCzZcaK8DMWpub2/58PQBF2bSvOLdxx+ZTMflMvL55RPdZaYoMvwC+90WgiTaghCveCchRPqTJ/XFupxPKkKf8/UrwfOpZZ4j3ilCEBSlxFnBpn6FcT2n44W+dSSlJdEpm82BPK/58G6grkvy4kBTVpR15Hq5cti9YjAX2qlFioJlGak3FUWe8HL8yDiMaF0RladqbpmWCakUeZFyOnUMY8c8GYpUs0yWYDYM04CIkbcPv2Z2C+N5op0G/DSvsSmT4OxCP1z48OEDaQZRWqQOnE4XFjfStlfasyMsJdvdnuvF8O7jO66jRcgNv/nj3/Cnf/o924MiTQVuTvB2INhAmW/4qz//zHaj+Ms/f+Lu7Q3DcqKqGx4//MTLu4E//Xe/59XbO7ruyvGppahr8rtX/Gf/6D//OzCo/qv/6bcRRVJL3j0+I1JNVReMfaRutgzLBbTkw6cLIQq8D/TDyMObPc4GkJpualE6cP9qw7Z+jVY5P3z/zKuvtvz8VwJzXAidoqoVQgVev9rT9c+8f2c5t+/58z//9ySyQkoYxifyIqUqt2yajPG40LeB87FnchMox6ubNwTXI3PF9WUm0ZH9XUkQPZtNxdB6NnXD4SZFVDkHWVCmKUJH0rrm6d2n9cp9XzIIzeAGgmwJWA77NyTinhAH+n5id6iJMUP4jKKRNEUDSq3mHq0p65x2WjhezjxfP/Lx6RNFVfNy7BimBelWyxfCcLg50Lernahrj+y2Nc4YfvrhRFHD9WzJSk0/RnySIMoUMsGhaLgeR2zwlLlYiznOUhcbmqKhd1eWOLAvd9xuSpyBSKSsazbbL5YZpdA6ZXEwuJHZD4ztgtYZeR7YaIkLFdMyUEiBznIMC8RIUWYkKuH48sTD3WtwHjsFiqxith4lNfu64nF6Rk+eqQ+kSc4wXHHGIVRKmCUmd5hJUogMLTwxDlgbmIcJXZTU+wOFrDDTjNQlwqXsD3eM85VEWobWMNFiXKRMUzbbHXlVYeeFh5t7siRDJpKgPO3QYqwhdRXvP3xPj+E8vaAlFEnOp0+fSHWBtRN5tqNMEu5364N9mhYIHp2X1HVGdzyR6B0qLbl7s+Pnf/CaItU0VUZ3SdCq4c/+3e/43Z8+8u0v39AtR374y09USpOWlnP/GSdm3AK2t2Qqpx97BrOAltTVPdYmBBHIKs3sB1z0iDQnphIbZ3SRkquIWWbub+8pq7WpPYUFpRdu7taM57R4dBbIssirm9ds8xqdKHSa8nw6Mo5XtnXBttkTnaduCuIE/WkiLeBnX9/hekOIKfW2YVtXSC3BK7ZFSVFHXi7PlOkG4ROIkCWBTKYAGG+ZBs88DitPMzoW29KNLfPk8D6sVqq2IyDo24EYZ5w0iKjJiwKvAguSxRkm16PSjKgCrTsjdIIImm4cydMM6RSL9ZgY6KKnn0eSKEjSBmczopMkyrMpNVm6loiqQlNlCSCY3cylHzhfBpJC4wk8Pp2YBoubHX3b8fz5hfPnAW8FiwiYGJmNwZFjp4TraUAlkm5ZMEZRFhoTRi6XlsvxEWuvJLpEqwqlFWZZCyVmspjZUhUp1i+46AleIINmHmamqUUEz9iOKDTnlzPPj88cdns2TYUgMI4X5umKGReUiITYY+kRMuC8JwSLFB4fNFJKxqVlnmYIKWb2LMtCEAFjl9VMJXPsF+FJlI557knkKgNIErWWtpzDhoANgdkMSCBER798Zg5Huqmna2f61jC0Hikz7l7tWVxP3RRs9jlpmnDY3XJ7t+dy7vBB8O23P+fjh0e88Axm5nRZnx8xhpVcAJRlyTKurNI8LQiLYjQDk3AsOGSmIDi8XXjujwQh2BQFUgT6boLoqWuNFpGEFCUV3oMSlnG5YKMh+pplHkmVwtoFbEKxTbl2KxD+8XOLomF3K4mAEAXN7oZpHkhyj04DWqUcbnKclfgJvn57Q9cFhDB4NZDKHbd3FXZaKKoGoUf62eKFwHhDWaZEPxKXjFRVvHp1xzg/83S+oKqUw8OW9nLBzyDygA8RYsrL8UqqNJlSLMvE4gONLiibEhPVF/ZvR6IEwzTgQmC3q3n342fyPOU0f+J6HdnlOUXheD6+sN3fYudAe/lMVW3wMvJy6vBO0h4XPn1qkUlgch0v7ZWIZLPJWYaeuQ8kmSYpvqAY3YLKAp0LiNThR8+n4wkRHEkWyG8E7bzGWNJCErSmPXm0zshyzThfSLKMz59OBOfJkhShd3T95Ys6OvLh3eMqh4gXnBHgcqR3lNU9oKn1gXkM6KzAzhdyWZCpAjP1eOuYh5H9ds9he09/eeTyvCrHu74jzRPGcWa7q5gmSz9O5JmirkrOlwvNQdJdTiR5Sa4yLpeBoinpxxO77Ybd/pZzd6LtJxKV8fndB9zi+ObNA8ELrEg4j2cIDUGMDOPIy9IzDguPj59Y7EwUYmUCL5LARIgabyK7TUrXjkgl+OrhjrHv+OH797x/9z0/fv5rLssLw3Ri6s8EN3O7v+P8+cw//Sd/Bzaq//pf/8vfZtkK+3t6nNAy5/X9t3z48ZH3H3/HMEzsNneUZUkImjSp+PkffMfYGxbT0zR3aC2QYn0If/rJ8ld/9Vf8R/+44rtvv+Xf/tv/BzNo8Bvq24hMI9NgsfGZKKAsNrx/945huCC1YVkmvM/Z7DaczmdOx4WXj2fe/uzAZt8ggmVwGbGYOf/0wnGwpFVFkpX0vSFLNiSZQBQTqhAkWcSngWWyvNiOWM10rqfQGbu3D8zdE0XmkD6nFA/keoOZX3DW0dQ5h/tbrhfF4T7SLyNVJbh0LUWy53odsGHmdOkx1q7XZEqhdEoMkKSafhogaO5uNthpYVtXzH4iKWrmsFCnNV034oxkv7/l5fkFGVOmQYDuMYOhe7yS6Zxx6ZEqpRt6xrnjepzphoFMZwztmWU0JNkGqROG6crYL8xuRIRIDCOyFBy7Hm9Bs2oVlz4ydp4sLenakbuHDWmWMI0Z/Thxf7hj6Cf8As2mYZ4cy2xIVEkIjmXxNKrh/ecLZvBsyxJXwmbfwOJwU6QodiAHXDAoAkJFEp0zTxGvNSpLyOqcsiwZ+hkRElKZk0pNmk48nVomF+imllxWlEmJINDOhqEfSJVmv9kRvCOKwDAOSCHARpomJa88eQ7B5qiYM49XBJEiLxEx4+aQUG8Dl3bB+ISgwPhhlVaYiaqqUbLkeD4zmw6lBC+fB+xS8vr1He8+/Qfa85VSb3j4pub58tdURcH+5it6c6HvryydJfqADZasWl/4Ts8dTbFjuy3ZbDS7Xcb1cmG8WnTMuB5XHEx0CjcHVEyIKHSdE4PA9JYxGNIiR5FQ5AnL2BHlhMwEIWoskUt/QaQRmTtmZ9FZQr3L0XnEeQFEyjrl5nbH7X5PtIJ5NAzXZ6qyghgZxycSpanze8wcaceBpAioNGKtJ4qAEJGyKtBJxNgJ52eUVozT+CV2E/Em4L2hqlKqqiEtCka7YGZBImrMvDAuV2QiWezEMLckOkcQ6OYWMwvMOOHiSPSrW15mket44dKdSVMNLrAsjmVe8G4EGYhOkCUbsmLD87nn/acTAUlZFwz9hAwaFRWXY4eMCV03cul7PBIlC27vX1Nsa/qwMC4zMtVErdftajRrMSwEiqTAjgGM4GZz4P7ugRALsmwtARkzkiiJCJFlmlFaQpLgouf29sD9/S2J1ux3O+7uHxjnmWke2d1u2d9sEYr1844ghGcaO6Z+YGxHlnnCuoWuvXI9n1mmFucm+nFgsT2T6Znmmd1uj3EzQgaKMsM7TwxxtY3ZEaEcEcs4zAgyhBRE6Yn+b0gEM3aZCHYd5pwzdP0nrD9hlonoNM4G8kxR1Tm7/RatLXmhydKMMtszDxPPx584Ph6pdU3btfzw7ieeL0dkoSAJSAGFTtjWe5IEQrQEp/ER2m6gKjekWeCmuidMFmNHMBElUppqx11xh58t0aYIAXW5QyJ4+vyBIitIEkmRSw7Na4SusT5Bq8j+sEFVOS7A/lAhMklRleS5ZDaOw/0tWSXZbTe0l5Zqc4tUjqfTRyZj+PT0TF7smZfA8XTh9maPj3DtJnSqMUQ2zRYTHC/HMyIPBJ2DTklzjbcdyzBSZFvm2ZNlmu66/p93/YVhWhgHhxQFUkeupxcKrZEikmjN4bAneMc0LBR5Q1MlfH58wbMyw6NYUMrz8jzT1HdfWLUSqRZcgK7rqcocbyMOixQ1aRF49+l7xj5y2G3JVIYxhrLOebi7IzBxbdcbQ2MNMYS1QFwp2mEiBosMgs12x+PLB5pyR6YUl5cLRZmu2EGfIGJCN1xI88jSB/KkwUzrwCbVmjENTrDZ7JFCYazFWPjqzYE0zenbYS3c5gWClHFsCSJSNVCUO9prh1ta3rz+ho8ffyQhQeuM4/HIzeGAUpFAZJpnQrBMk4PQIBNQqaPe1pxejkSnydMtebaWoI11KJVh3IBZElK9xZgWb0q++e5AP7YsRmMsjMPE2A207UxRJxT1Fm8THo9HdJ5ho+RyeeH5/IGgEoT3RBdWc6OP5GnJ5fhCUWiscaAVu9s7HtsXjuMJka3lWqJGxoqsSSi2G4xPcK7EOklRpfzl7/8D7z+f+W//2X//t39Q/b/+73/zW+cCjx9PFIVg0+zouwtZfuXtm7fc3Nzz4f1Hbm4bijKn788EDNMwc72e8MFiZsH5+iN5tqNq4B/8x99wfO748Yd3OFtgF8+8tLz+dsvt3T03t68YR8PPvv5HDONnmnKHFhl28Wz2FX/5Fx/59PieXfMVP/8HD1i58Jvf/AqVel7CmeKVQ2UTpydLvSuQURDcTJOXuFGuX6K6WHV9cyATE73pEUXGaE4Mp5Y03TFce2I4k+mG4QJ3NzeMw4m33+348fszSM1p+kg3dhyPI0F4zseBstqt+lDhiEoSgiBNM7wLDNNMVdUssyHGsFqPhhatNULAYK5IqZhtpK4aejPgzZVDWUI0WKkp9xsizxyyilxvSaNCake9L7hePdfLzGafY40n+JSykIxtj0wSos7IckWVl+gkwUcHLqHZbhnsQDteiQ421YbD7o6P707c3KUU9Q1mHrFLS5ApEUuRNSymIzhDXdVsmwKzGEL0NEWNkJ40X68Hv7p/WMHcb7dcr8907Zm7+3uKLKVtW4blQlPu2ZVbrtNEnmhmY/FzZLOrcK7H+IkoJXld4VkY7JFlmrm5ueXD4zPbZs8fvP455+cjBskYW4I1FEmG8CCEwjhPkqaM/YDwAZ0JxhGkSkm0INESswSuJ8v9/QElQERHjClB5SRlxAbDtZvRwiFVRaIznHVYE1jmwDKAiCmLn2i7v9naNPzJb/6I2R+5nk/8/M3f4w/+8Jd8fn/CzjAtkTRNCIYvLL5A3hSIkDKPI9f2RAiO4AJ5luO9w9mZw2FLtJHgAkrlbJoDKk84Pb0QokGWCVILmiYlKwP1psZHQGYomSAkWOdYlolxulJkJcFZhv6EiJ6p8+x3W9JUcnw5E4wjkQnOxJXDmKdoFZlGA2gSoWj7HqtGijpnmQ3BK/KiwFqPWRy7/Q6lEoJbayLBS7ROqasK69ayTyJTijzDCs9gJoILaJEjxWo1G5flS2nKIaWirjZfCoCCukohasq0omhSLubMy/WFxTvyrKBICxbjVmxakaKSDOMFfT9yeX5GRclhc0dTVBgbGAbL0K92uXFsOV9bjJOUzR1VVdFUBU1T4XE8nV5YTMQukaGbyVWBXSzL4te/qwtsmwNvXn9NwNEtV8ZlYl7WnOk4XUmSlGVa9Y5OeC7DC1JonIHuMq5ovXng8+MnjtcTUQMqMiz9mjFeZpa5xy0jqZZURUVT15RFyjy0zPM6DFfFWuRalhn0yPH4CbsElFIsc7+WtlzALiDF3+hkJUKtLNA1/x4JMTDPFmsiWgkUATuPjMMT0/TItLxwPZ2ILiPPKmKckGLVsNZNjXUzQkjs4ph7j10sbXckz3KSPMGoCcNIkooV46MFp/MLu3JDTkaGIrp18Ap+QavA3f6Wptxwerqw2ey4PTRs6z2ZTFAxZ2odWYDD5kBRZjT1hk1dUBclSuRsmh0xrsIE65cV1J4pBntlNIZpmvAiElOJ8ZFpslwvF0IsGeyC8QPDxVLnNafnM0KsSDF8RiJS5sGB00hWuP2yOOrdhpfjBxYcSVKw2T2Q5xn95cQ4OOpNSWRinia03jA6OI5nAgqVKvIqwVmJjAk4g5sm9tWOTd1w09zRX3s22y3BQfsyYJbVcIbuUFoiRbpuxG3k86cjVVUCiufjE2V+w3V45vn0TJrkHA4Vxnl0UjJOPSKR+ChJkwIdHTe727WnkqbkacLpckXqkohGJ8n60vSF+LA7vOHV3Z7Ly0TVaM6nFomnPQ9IEfmjP/x7JNrz4cORxTiQgSLLGM6eIk1JMomUmrKoiDHS9wNCKJyDJC3Iy5RMK5Z5wVpDXmQU5QakwgdDphrSVPL82NFUO5ra44xiv6+Zp5ksK0iTAm8kxhikyAGYlivd1TGahV/95hckesvHDy9sN1vM4ijyEucnlBZY65mXFQlo5oS62iD0wGaXU8hbkCmPLyc2zS2SK9fzwKZpSLOUl49XhI+IIDidzyQqJYblC2XJM11HfBBrqTQIyqSmrnKsm2i7yLAMdG1PIlediB0i09WSp5qs8LStRycSG2aenp9ZjON0vdJPE7/+oz/hv/gn//Xf/kH1f/03//Nvz6czd68SfvWLPwaXMA4dY+f59rtbunnm4c09UPKnf/Y77u+/4v37ZybbUjYF1+vE01PLpronT24Q6srnT8/0beDh1YHZPVLke1692lNWGWW1Z+gjdk74/Q+/oz95rGvxJqWpNzx9dHibc/9qQ1p43v7JjsuLo32J9P7ClEzE6NBOINOam+oNWTlTbxxZlkJQDPMT/fTEZtuwuW1oMscYJuYpUhUl3949kIgce7F894dfkyVbkBadOD59OmGCptxUpJXiMiykRcL1dCbECb9YsiKlqQ6MUySvU7yLtG2H1hprLF+//ZahbynynGHoOdzXtB2kVcrkOmIsaPKC6/FKexnwtqQ6RByCbVNhRoPwCUmaMw5XmkYjlcU4h3Orbi1PU4q85tqdyVOJSnK8dixmRgfN+WnA+CtNmZM3JbevXtN3HWO/0A0GfEocNdZdSbOAl5E4C8ZeQJZT1xqY1gPKriy5GMIXB7wkSTUhBLI0od5XqOiZhOc6zowXQ5nWPB+PSLFioJwXTK1jbBeqww3OXCFIRKaJWSArNUopRhMYe4NGoOWKWJLOIZYCM8x423K6dKS7nO7So1WBEpoiSzlfXljsqpokKObBkSYNzkCW5VhnuXRXyrri/v6BaZ7QmWC3KTifOywz737fctjvaOo9WM12c0f0nmmYeXo8Eb3idnvPMlnO7QtaJGhV4WzA+Ug3tdTpHmlKxq7j9z/+RLXdMEwTfT+T6pLuOjNOBplqCDCOI8Mwcrr2qKRgmhdkIsmqlLa9EIJjf7cnSVO0lwgBifIktUMmEzKuuUsbFOfLxDit1pVtniOioOsXlNB07YS3Di0lRVaQyYJclWg00zCTpwV5nhMCtG1PiJGmKcizZsURhRmdaqKGxVuWeaYuS7QSRAfENROpREGMguu1XfEu9YZ57DifW9Kk4PZuzYmnWUkUOZ8eH5mmE1mRYb3F4cjyGu/BjQvtuCCSSKM1uW5wRPr2QplWzN7y8eUDNjqyNEfJVRYSI8xLTyIlVb1hjB4bPKkWDNczQYIRlpf2Qjf1XPuOxUeiFIzGMjuJiZ5x6BmGhdP1yqeXJwZjKZstQkuGoV0HkpiBjizztGY4teRyfmFYOs79hcmekYnDOs1iHAGJFw6ZOnQmsH5eCRlJRd9PzMvAtAyru3zbkJc51geMcUzziBAWqWBTb0j0+jvnWUKSRXQOeVXwcP8GrRRFlpNrTZ6AIpImKakSSGFItECrQFUJYhwZxgtjNzGPDrMEzLJADIx9x9R3aAWJStFa4NyJabyudAClyXPNfnvD6aljmReSPCXNNDF6mvKG6AXRalRISXRYmaDk1HXN+dgiUdzdPKBUDlqTViXWOhKd4PEEmZCUJQaHD4qiaL6Y6wpEOnHpOpZpIs8azOTYFSX1dsPYX6mbksOuAaeQUbKp1wHt2vYkqaKfIlmq6c2J2UOW3qyGuTiwWIGOkXHoGccRlWist+ADd3e3JEoglWG2E8YaynpH2z5TldB3PcEZjtcL3luMDXiGlUXtU6RMmN2Jy3Dk5u4WYqS/nohG4ZccoiDPNE4sJEnJTz/+SAwTqVLICJLAOJ4gSubeMA4z222NXSx+ESRpRlZIEB7rFo6nlmmacFaQJg3eL2w2DXmagVDkWU7bnrm/P9B1E/3oGeYjabWyQa+XljyDZZlIkobdfs+Hzx9YhoXFCHb7hmk+s93t8T6sVAudkuYVPhqCVUQWnHe8fvOK6ASp0rjF8vT8xDw6yjJBovj+r97x9Vc/o64Vy+JROiC1xXmLWWAaHTe3e4QwXNszy8z6AmYHynqH0CmX64nN7gbpMpSW1PUeIhgzsdhAN410XYsSATMFpunEpqqYxp5qI7hcn0n0ZjUhDiOfPl5Y5oAUit3+BqE8Sarproo0Vah0oar2OLew2a2D9WIGzChIc/j4U4sWEm8Dd3cbvBH0/WfsuOInlQ6MQ4+WDZnKOR0NNnbMiyFtamQi6McWFTxaRybrmFzAu5EskTRFjrQC4VJSVaCUZBp7sjQj/RIFKdMcM3lESNlvd0jl+Wf/9O+AQvX/+D//xW+rTcrrN3f8+z/7C979+BklEnb7A7/81Xd0xvDp8QMfPn/k9mHP6XqiH2ekVoyz5+3bPVmWkWcJwS8kicKalO9+9oYkKdlvNlhrqTcCRc4yBYS2/P777xnaHr9Y8rKjv2QUteDpo6dtTwwdJBpeuhPJrJiWjmGOZN4R25JxGEhjxsuzIasSghDY6Ll52IFKqJoNWRHpPr5wnSyV2JI2NfdvDrh6odiA20vMKXI5vrBpGrTK2Bwqdjc7LsMj3haI0jEMHbUqkX5HimY0loeHHdWm4dPHz+R5TnSB4CLffvUNWkgu5xN5miKLhLIp6HvoxpbFLJg+ENyCElDW+ep1N44ie8UyWpqyZlkWYtJRZBtiVJgpZbYGqR33dxXOtBhnuL294ebuhpgGnLTMbWRbZlS5wkfJdnPLuIyc2jPegpkd9/d39NcjqQoUef3lAF047DXOLIxGsN/ccXxecUFVlaKkZtvc0l5mqiYjzTKclSSFJhGBz9OJp/bEG71lURqP49Vhj0fgwzqsaOHZ1TucWmjKil3zgNKWKDxFljL0A/285v0SBbOdqOuM2G/4o1+/4XBYrSsy2dFNLVWWUNcVUirGbm28lsUGazzWWvb7DWXWIJWn684EIlJr8jrD0nO5tvSz56cf3tO3nlRs2FcleQbPX653pnFinlrqOqcoc/7kN3/MMhi6dmYYDcIoYoDBGp6vMwjBte14OXb8+NNneibGacZOFhEUjpnJLzy8eiARmuvxiBSQZIqyrpnnwGIiWVZyuVzp54myKTDjiCLy5vZACA7vR+YwMw2QUqNlik4FdV2ipUArySZvGK1jsf6L/Ujjo8GHSJqU5HmNmUbadqAoK7772WtCCMyTQRA5HPYkKvD08sLp0uO8w8vA7Ge8tRRpuX7v7YKMCZttzXZX0/cLaSLJCkl3bbm/faCuSqqypqwrunFgmBxPzyeWeaaqFc1W8vD2gd3+AesgVQVSJDhjSfOMpkzJZY5MDsSo0DGiE0k7XJnHgf1my6E6oFlxPsu8kGWSIk2Y55k5emYjGCaD0ZEXe+XjeOQ6DQipkXnCFGZ6MxPVer0sRETnGq8EgzP01oLOuAwDl+FKlAGd53gtmYNFKUVwjnmaqNIc8WX7WRQTKrmiqPjqzS/I8pxhbkmLlYUqhKapdiTJiqhJcsluv6Wstlwu3TogyWS16RGZp4l5HinLkqKscTEQhGOYe8ZpIkQJaFxYW/llljH0E/Nk1tKUClzbM85NaB1wbsQsE/kXM5Y1C3aZSJTAmwV8JFEZgRbnzwz9lWt74fnlmaGfqcoDTdWgSOnHjt0hJ7K61gUVXbdwOT8hkTy8uiVJ1nKVTlYma5mm2LDw8ekTMk1RRcI09dw2G4T1HC8r3WW7Lwk+UhcJWl4psjN13jJeeibjiCIlioTX9zdsmpTPxw/YODONI9FXHPY7kI4ffvqJx+ePpLlkXCyLm2ndlSAdu/yAc4LRdkzzTKm33DRbtEqYlvXza88DwQpC8Hx+eeLDcSTKAoNliSv/NgjH+aVD6ZI089hlwJiwDt6hZh5GJvuEsVem0XB9alGmJEFi7Ep4KKtAleeYcc1LR28JbqGqara7HVE7RjPz+uGePE84Xp5RSUKSJnz33Vckmccz47xAqxKzzJgZCHrl17rIzc2WcTC03ZGiKCEGji9nlkWj01UE0w8TScy52RZ8+vQT3ivuX/2c2Q3r4JxUVNUWncJse6RSJLpcFwO0vyEZAAAgAElEQVTnC96F9XwrM6arodrA8bwQo4FFUlYly+xJ0hJjRmKMPNzdrWxhb0jLhKEfAMWyWJqm4vZuS1llvJzOODztdaIqK5ptzbv3H5EqwThDUZbMZsYLmG3LpT1T12s2Ok+33L++pWkyiqzk/iHSj0+40KPyASEKBDm7+g3GTrx+e2C7T/j08UhV3ZJlKZfzwq9++Wus7wlh4f1PLU11w+nyAZUkCDSfjx/JlGRuryifcbjP6TuHlmKdgZKFZZLcP3xFTCXDPDMuz1y6Di8KkhyyvAI548zC16+2mHEg1bc4LGaMFNWWor7hpetx2kA+QwwEp7iezzTNWmALQXyRA205nwZCKPhv/su/A4Pq//a//8vfbnYFm/pn/PN//l/x9bdfA5r/5D/9OT++/57n7iPd0PH261e8++mRMt/Q9xemaSYvBO1pxpmUwIWyqEjzwN1r8E5gjWXselwQdN3M7//6J6RM1gO8X9jWO54+T5RNytdf/4zvf/iJooKv3j7w5s2OJFv4+H6gtZ9pbjKYFXHxCAFRWDbVA2++3jFMj4QYubv5hle3N0zDhSLL+eH3H3GFJKsagrZ88/N7/uIvfofyOUPnef/7jzzUr8gSQVUUZHptx3etZVkmbu8OfH55Yu4H7jcPeC+52W8hWzFM166jLGrGvlt/PsmpixopAm3Xst1uiSl8+nTCLoaHmzvyJENJwTJbdBY4XjqS4or0GUk6kKQKlRo8V5ZREFkoyoSyqEFCN15pTy1aJdTbO/I8pVtaOtMBEhUkb+8eaJoGISEmFsFMkZcsVpIVOcLNvLl9xcOr1/gwkKUKYoKZB/KqApGw26SIqFjsygHVSqKkXpmv+os7XGmSYkXXGGewMrCXmiSTlEXCPI9EEXl8PNHkOTc3DX0/YhghaJ6fX5BBc7O5Y7jMnM8jSVayv7lhvzuQ6IJdUbJp4Nd/9Asy/Zq//w9/xnZX8+lDT5oWzJMn2ICWkjSVxOhIs5SqbLA2MI0r3idNc3S2RkuMkYyTXdWqZco0Br795pamFMRF8OPvW9Iso6pKvDNIGRmniWs7MvUj0XryImc0HcIIXIxczBUbBcti6GZD7xxZlVGVN5hJfckuS5ABKVdg+9APCAfbekdZbjCLRSWKVw93nI4nxn6h2u9XR/kisPNElJZ+GOiGkURvWExExsir2wM4z039inmyWB9QSvP+/WemsScvFUoq6jJHi2R1aYuIx5HWFUWVcDx94HK+kqqCm12NlI7L5ci5P9H1E2W6QSUp1huCmwgmkIqCn3/7a25uHmi7ln7oCD7STz0Iy83tHjN7goN+6LkOFxYfWWykqAvKrGRT7vAucO0n2nHCmIHIQlUnlGXNoTqQS80cHENs8faCnQP9CP31ivYejWZf7zHzzPl6pswzdCrpx5nz+YI1nlSXeCk591cmGzjsXlFmFUImuBjoxiNBeBJdICVItapKZzvhfCTE9UXPh0BZldRFw7yMDOMLAk2eVYgY0SIhL0pGs+BcR8K06kjJyHRCVWUomRHcCryfl4VEfwG+q4iLjigifd/RXnuUSpESvHcoqcmTEi0Tru0FaydcWLDGkSQ5iwk8vpxpu54QLVoLsqxAJSm72wNOWNqhY7YLz+dnrt2ZIslwNnA6vRAZSbOZRM9kWURGSXAC569Yf6Hrj1z7lmkeaPuBoVekOmNeRqappR97ZhtAJCSJwvkZ76BpGppNCTh8WHPI/dSvG1Gt8RJ22xvKtMGOAe01hapIRcK2ydlvG66nC9GO7DeBP/h2Ryonlv6RRgmKKmeSKQ8PD/TDxKUd6buZcnNDVqzlsOeXE914JckV1sE8B4oqJylmpqVdYz2zRytDqnOsVUilESbS9gsBEFJzs3+gzGu8jzgWjHfkZcG16zkfW6r8hlRJ/vCX3xDDQF1sqIqMw+0N3bXDWkmaeqod+FnQ5FtyMrQT5EVON07kdcloWp4ePxJ9QVUf8DEipSXROcvgSBO9RjdsRApPkmdEcpyPfP70E9Pc46PEGs8yOQ6HBzbNjq7raDYliU749PGRLJMM84i3kdvdDU+fTlSbmtmcacodIL9cKyvqckdUnt3tPS8vLwgZVwVzLohRcH+/smOlUJjZkBcRJdeiaHsd+PrV11z7Z57OHbvtlm8evubjh2eaXcliRkL0FHrHblcyjC1ZnvP08olmUxODYLPZstnUPD1/4Hw+0tRbno5nDjdbts2O9jpwaY/cPzzw/v0nXDBYHThPPVkpkSIyjoG6FszdDHICPG278PbbLcfTlfuH15RlRt9KDrd3pDqhHU7oFFyY6dqZVG+w1tAPkrox3N29Ypo6smIi4KnrLUIKplHx9//xP+Tz+/fc7e7YNIJuGLl9vWG8zvhpobeeN2/u6cYzm32DtY7gA9bNRDWTiIwffvjEpixo0pL5esXMgT//fx95eH3L/uGAzgWLsTiv1lJfqjFekFUNRbHDMzC7DustnoDHIXTGpw8X/sf/7n/42z+o/i//4l/99uX8jvOlp79amq0iyzSX9v+j7s19rdnSPK1nTTFH7Pmc8013yJs3h8qsiVJS6lIX0AgDCzycltoAByTUUoOEhVEmDjYGCITA4D/AbAkDBxVdXUllZWXem3f6hjPtIXbMw1oLI66EBcJBKo67nRP7nL3jjXf9fs9TMfCesc8IdEpdXfi9H/2cw+aAliFj68iTFc7NzPOEmxVJsOX+4R4TCL767RUpNHXZsb5JmGmZWsXXX31LmDnS9MDbryuy3czNi1f88m//mrvbzyl2lvuHR7LMLCapIGT76iVf/vod+1XBMC85RO89zoQ09TN5fEMW7EmNpjqfuH/3iFYh5/JIGK5Jg5F4Z3j88Mjrl5+g5wCH5nW+x0QTfT9TXT2Hm7slj3o+4YXkXD9SX1s+evk5Uzcyq5F8mzOqhqEOCKKZprJ07ZXNdv29icbS9i3b7ZbLeWnYGpERxR5rR6bZMs09XdcTJ4o4lHz05jPqamS7jYhVjvAjSkVU3UAgDM4ufL+2uRLEGXleUGwSnpqS1BicMJxOJUY4ksjjfUbXW6b5mdkqymNLfYX1asM8XgiUpyorlFYgQEpFnmrCZEvdXPnBmwPHY8c01SRFyG6/xTvPbDu88MRJRNe2BCG4SVF1E6bXRHGK1JpUBvR1hzYpXo2EQYrsB5I8Jy0yZj9zra/EeYRMoO3KhUMYBayLFVpIprYnsoJinRBtHE1vqfuKceo43g/8/s8+5+n4SFdfWWU5UiieHo6cTxVZuuSqNpucNMkwOmSaO2Y/oKRBKsN2tWWX39INljAB6Qz15UrTjRxe5Lx6+ZqqLkky+b0fPWCaFXFq6PsLjommb8jCnEELBkZWUY7vJ6q6ZZgmolBhXAhOcr4c6YaWefK4ydC1jm4YiJMQqQync003TjhmlHZ4LFmakqUpbrJkYULVNPR2JjERl2vD0M4UecZulaMA5xfFcBhrEJryZKnKCqM8Yz+CswgkN/ubJXM5TegwREYh/dBxfDih5aLYNVpwOpdcmw4dBOy2GTfbHWM94SdLaGLcPLJb3/DDH/4MZz3fvf+GduyZkYzzjApABQFt2yAFi38+0ERpjFCW4/mZaZh59eINSimiKOB8OXM+HQFB3fVEgeLmsKW8Xng8VXSjBDUw2Avn65XQBNy9OBAnKdey4un4yCgsJjTUXc/p2mDnpTyYJynGeLSCWIbQepSwjGPP9VIS65BYZjSXDgEosyhB51EChlWeEQBSzEShYmxH2q4iS0Iik6FVuDwwGY2Tlm4cAUnXwtRnFPmWMJaU1xPH04n++wywkpq6rumHHodnsCPXuuJaXgC38FKFwM8eP3m8FUQmwk4T09QjPCi5/B9dq4q6boiCiEAJxmFgGmaOpyeatsR5RzcsW/W6vTBPE1GwIYpWeB8wdCPWQt86nh6WE5WsCJYMpllIAOPYM4ye29tbXrzcYcIlCzlMS5xFKoMKJEJPzFNPEsdM3w9h5bVGGs+1eULLGG8lzaVCKU+RJohhIlICK1oG36DjZQPUDD2zNZg4RAaG59PMtQ1x3PD2/YljBaN3fHj/QBQpokwQZQGDXyILXe8wsWdyDW03s1rtQXjiOGOcQNgQJQriOEeKHuMzHBBkgigy1G2zbPuExESSuxcbwjDAaE1z8QjhuDkcWK8MkTY8PRy5PNUwC7TSTN9nt7M0pZ3OZHlE0wxIqaj7kihJ6KYjJjQM/fJQZXxKmkS0VpOul4hIke9wg+N2n/PqdsvThxYTxQx9j1QKKQ15UZCmK87PNW3r0TImSUMUkt0u4e5uT2hSLucLu/Udm33Oh4fviHRGERZczu+xWCQJfVfx0ZsXNHVLP1he3Lym6c7gHE3XEEWLkKZurpggxuiIp4dnDvsDeZbh3MBsLXmeEMkNRluatmd/2BKqiMvzkWHyrLYxj09PGGXomxY7D+gwpJ87hIpRwQx09N3APGrybIv3FqEt1XUkT9LvX3NI6XFu4u7mjigIGJ3j5sUddpgwImIWA9aOXKuKc3vkeu0orwNPz0eqy0RVObQpQOTc3B746ne/ZbXLqGvLF7/9HUr+XwW/PMtI8pKH9xU/+vGPqJsTZVmhTcJqU/D2/e8I4pBQZUxDh53g1es7QPCLf/AzqvrMJKFYG4Iopmp6qu6IIsA6y7UuCYOYIlsTGcP56cq62KJUgFOG3f4GqQzz1HP/7gPnhxOrYk117QhiTdm+xyMRZlEpOxfiMSyR9CUP/B//k3/2939Q/S//q//8L25fpTw9PPDh8WtQF2Y/8OHhPU9P90yjYruKCGTM8/0DUijKY81mHTOPI1VTU6QFL1+84v7+Pc7B7c0Lhi6k2HiSdUeYxjTdhbaMWa037F4r+n4my2easeP+6S3/8M//hHFy/Mu//IZpWFbUWkuybcHXv/yaLAqJ0x2BiLADWHXBOknraqK0IM8ihO9RUnFzc0M9Xti9vOWzl3cEhw6jA56eFXm6wusO3Q84N1EPgNJMrsJEioeHjmSjcULz4eGZVbwUo9b5DYNqeC6/QagDofZcy2e0LkizkDQJKS8Vr16+xvmZDw/3tG1P5APs7BDGMomOa9cDkijSSA9OaOa+JiGFMabq6gVFFCf4ThGGgv3+Fc214/n8SJhlpGnEtX3i2E6s4w3WCexcwSTwViHDCKEbMp0wjUuLcH+4xY892AnnAoRImYaWPN+RZgmP3x0J4hwnRj66+Zy3b0+gL5gwpmscRsUobQmCgCBIKcsGoxwAQ1mzf7FDjTN5HHN/fmaz2RIHMd3ckIQJt/mO69gw01G3M5McMHHIPDhCL8F7UB4tBZfrmVWRkoSC3h/xJuV8Hvjw4Vua/kh5qfg//rev2d9ZutoSmIjj4yObdcFmXSAU5PnSJE5zjTaaODUUeYE2McNQge0pT/f004zU4PwV7TLuPtotRzbC0jWWqqkxoUCGls1hzTQNtNd+2chmCWKGD80ZLyUbv2Ub5wjRcigypk4uZIHZU597Xr58wbVbjmYm2xKHHitmej8wMnO4u0OaGO81UkikcoTq+2HJDnTTRDtCJmPCJEUGM0YYbrd7uq7Bomh7QRAFi+XEz0SJIUxTtEmJspCs2GIZeHh+pO6mZRvWLlSHSGekWY6OBFVbc65nnAApHEakJCZEzleGrmGwgjjMiUPD6fzId++/ZfQtJlZ4JMhFkDHMAwiFdAY7T8z0NP1igEtSw83+I86PNXZuWa0CpHG8uLklTVY8lxW5KWi6ltqeSRKJ9SOEEpPk7Ld7Xr26Jd/mDM7hEEzMyGQ5um+H5Tg+MQFKxfSzxevlJtZ3HTKE2TvqS80mTblb38I4MnQNfe/oxpHZD4zW0c8z1o3gJvq5px1GtMzxMmCaHSAxQUISGJTrmcaa09OF83EAE7Da7pixXMqGrrc4aRFGYIKMKEjph47J9gy+o+1rxmEkNDFZnhGEEqNC8mTNKiswQqAk5GmxaE2tRcuApqnpx4Y8jVmvVkRBzNiOTOPAMFRMc88wTGTpmmmcSZMAoTz90KOUB+m5udmjlcAYwXobstlEWD9yvj5yOi75+s12jZRiMcGlM9bOBKZgf3MgiB2WFhNAFMRMvUAItWyoqolxnInjJfe8Xm9YFQVpbLDKMdolk7rf7HHzRKxD+qZlHAfquiOKFdIEdD1UwwAmwpoAtT+Q77ekYYy1I1pKQPL2+IBUGj8pLIrJdTwe32PdEmWIYkPTlBgRMMwjUlliHVBXFdd6JkyDBY5u5SIJyHf0g6NsS06XZ8Z+4NPXn3J4FXOpjiBivHJ8893XaKMJQoMXI7gZPyukSsjzNY/Vr2kbj6Agyx1+kET6QBhJojghShSzaOlGQbiCdVoghKMsj6SJYr/ZkKdrno5P6CDE+on1aotnQgQj4FilN7T1gDSOti356KOPaJuS8tTzxW/e8Ud/8Mfc339A6YF39x+QMuH2doUdepq6Y3aaVbEjMCPd2NJ1EqEs49AsxJgpYL0tlkHyewNbUeTU1ZUoLAhDQ9tdF0mPkXTXCuEDxmkijDVutkgvuZxL9nc51juMKIjCgP1uYaenyQGnLFZ0eOtY5QfKS7tc3yaiqVuEj/BKgRXU1xLhoKk67NwRRpIs3hIYy/lyYm4twiUU24CnY4UTESJKkF4RhhHn84lxtsv3yKSZJkXdlvSt4nCb8dVX7wmCEC0DinTHH//J5zwfv8a7iCgoOJ+f6BrNZrdFRx1lOaACzcPTE7c3L1gVMbc3+aIX9jNKa37562+4/SzldCl5vl4QsWawE8dThQo9VSvJckGxShByyVJrEzHScXhdUE+O5njEWbug+yKBDDx97xAixE49DsHl1OLmECkUQk2Mw8jYz7hJ8U///f8fDKr//H/9b/7i1YsbuioijjPyVcRX33zFJ59+TBx/RN/OfPTRHffvapquJCsShiFkmgaGVjN2AUEQEMWGyfZMc0e0ctwfv8HEBpKJa3PP3/6lx4ueKJYkiWCTvearL84MbcOPfv6aqEj5q3/5JUl0Q5TOyCAi3mW8/ebIJk/peodUcD2/J1llZOuE87uRw6uYqi95OlWcPnS83OcgU2rfcx7PfPPNGTBUdcubNyusK7keGwKV8vj2QlvVbIqEOFxzrXqE0pyeK4RvMTbCzBGfv/nZgmyqHlmtNuzzO54en4kTTVd23L1Y0VYKEwT04wPffP0WSUt3FdSzXRSDUtFYRzWM2GFA2Yi+DllnBqMiNrs1URYSxzFpvuLDw5EgjShWOUPfItVAkuXUbYuMFJemI4s9L7c3TGNH13aYyOC1IAgFRgaMVvDpj39MmnkeH058+PCBbK0I9dLWjpNwsd54uPYlsY7YFTu+ev8VXs4kmcC2KUmUoWMwgadretK4wAQKDOzXr/HCkGaCtu+ZJ0VzLQmNQRlNW/UESuGUp++WY8C6rdkU24XBqSFMDEoH9PWMQrLKNYfDa+qhxuF4euy5XitWa0uiIz598yN++kevSNcb7Bjy9PSAtYog0Dg3M46OdBWxOdyw2WyXDzEx9+8uVJcrgXZMc0cQhQu6q7Xs1hviwpAnBrykaiua5oQ1itnN9J2lbwfkNFNWJdepIogLmnGCyTI3lryIcHiatifOFHacMAjOTw37w4YgtlTNGa0WJWYcFSiVMM0D6+2aKFwj5YRkpu5qjtcLbQd2sNjZEq/XmECzvsmZbI8SOUYp4ixisjNSwrqI6JoKjVxMOc2IFJqbwy1GL+W0thkYe08aR4zDjJKg5IQ2II1FKcnQg5aOIguJo4h5cgydpKx6+qEFZ+najvJc008zRNANgqGZmIYerCYxMbaDqba0dcWl6pDGIGePbwVSzHRTR1IUJFnKOFSMTYdyKdJbtLOs8gXS74OZOIwIrGKkJ4wEQ9Mu3FwB5/KCVA4TBvR9zzCMpEnANg0IZMC62BEKxTR6Zi8xoSUIQq7XGSFm0jRcqAVjhwk8Wlk0Ej8HmCjAhApfz+RGIaQjCNfs0i37JGG33TIMPR6Iooy6OtO3M743lFUJWpEXGdYKmrbHuu+VmT5E6oin+pFje+I6XTEqQMwSP9rlenVA8j0vVs4RwnnsPKGNQccjaZ7gJs31UhMEBbvDLdJIhAevJDoMUCKkrBsEEVKEGGMoVmu883S1xc4jQagJdcL5cqLrL5zOF8pzi8NSNVf6bgLhyIuc6try9Xdf0nZ2aeoXIVGe8nh65sPTVyg1MLQTl1NJnqcYFRGEkjyLly2btXRty+VSEwYpYaxp545+mOjGkcE7utlhTEJkMrwZ6QeLkgpnBwIdsl1ndG1JXV/w00BmAkYnsVis1bx9+grnBcpJBBZhRoIwIU1TtFgUuVotXFiTGubeYQdHmoQEOmO/iwi8Yr5GFFmOGyLK04k4GQi1YOwNQ++puwvH85XD9pb6ciRUivUqRymPiQQmMmSHFT7uOF/OVG1DlmR4MZOvc5yT9G1FnkZgLee2ZrPfIt1Ma1u0DoiSgPJ6Yr1b46RkHEbSKOHD/RERKoJQUdUd86QJRU6moTyVVLXj7s2e4+UZiAjjjPV6z+Vcs1rlpLkmjDV5mHM+ncmyLVEo8EPIrgiIUk20SjDSc7u9QYsR52u806xWObtdwfnxyKVsWa0TplEQJoprdSFJI6y1XC4NbhYoI3DeESYF09Cx3a6IohAVLHE2YzwmaAmDiGvZsr/ZUXUtzoKfFMoo3DjjBkGcZ7TtBe0l/eQJtGSeFUG4BXPl7tUt4zwwzw4hFcfxyGwLmuZKrBXH5weK7R3Pp2f2acQwOdbbjGboWBW3eBKGqca5iG4809szv/nNI5t9zOYmQoiQ43mRI43zgJAzl/NI1wz88Cc3HMsjl+aIUjljL7ieP6CcRznPi7tbhJwo9opff/EtabphUyi+/u13GGHo247dOqOvBkJSzNxSrFKk9QSJQShFIEOGqSVMDbv0gBcj06jQBrLE0FwmhBfESYqdNihG9GTIizWTL3l1s6e6ei7lQKjgn/4H/+nf/0H1v/8f/7u/sKPHTpbdrfqeZer45ONP+ebrJ6To+Pbre9pry2Z14Hb/Bu8mvIdxdFg78MkPVgtYV505HLbsD5Lb2xV97Zllz/nYoJD83s9fkq48cXjHtWx59+6BP/vzPySLcr794juUz9juCz794Q+4XjRJ3vHjzz7jeIz44U9fkO9TBnruPlrx9t033L1O0PUaV0s2aw8y4HCzYr8b+eKvG262L6i6I0EmOJU9+dbSt5a+gh/9+Mfs7u749JPPKFYp1+bCq4/uqJuKa1UShYZI54zNwM1twen4gfV6zdhBc+3o24bD5sCprkmyhLJ5IEoSno8NaR4jTcg/+Id/xL/9b/0r/Pizj/jBJ1t+9MMf8pOPfkgqc9bZnsMrEFoT5wmXqqTuljJRmuf008D1WjI0M+M4kKUB3TijA4Nnxs8QmpDj0wltBEFsiOJwiWJMjqbqWK/WDL2lvIxAS5qGHNafU10r1psIJTL6scOYiLwImbzAo+naK3kWEYVr8jgB3THNNVolnJ4vmGAGLKE5UF3PBGGAd4qyPNN3NVmeofRSKgoIQAjarmd/e8fz8UKa5AQ6oilbvFBEcUY/NOhkJF/FrDc3DHPEc/mEYEYIu2xl4oyp0aRxiggqusEiVUQRr7jb5axWK2axHMOH8ZpQGZxd0EzH0yMSTxwvRTBBQHUd6TvH2HcoFRDFMfW1ZPY1MlCMvefN3Q3KhehU0tgWKxYRRqp3+P7IPDrOZctqteaw23M+npitYBwkqyJFhDMy6pFqRsmJ/WaFcAmRCcmzAD8bxq5n7gXn55LtyjBPLW4MKLKcVaRJogDtDYHUWNvhBg+1Z7UybLYpXd9Sllfy4vvtuhPEyQqPQepFJxmEEWVd0bQdURQQxZp+rJDBiJ1nhMsWrEzXcny6Yu0E3qKUYprHJSagFYHWOAd9P5Jk2WLREoLy0tCcr2gPWjisc+jYLHGDyTP3kiiRvHh9y2p9QIcBI4K2HRfc0tRh9IwcLKIPUTIivUuZqLhcPyzFCxcyW0+oJKvohpd3b8jSkPt33+JFR7KFa/VIIhP22QsuzxOaABNIgliQr1ZYt+gO62vLPGhCoynSkMgHKKu4tGfqqSFZrVntdthpoG4nnNVIlhvAte6YaZmnhtBETDPUdcdqlTH2DV3TMXYBSbzkxywDGk9sMtq+oxxKRjsjnEJhOZ4fmYaRUCvkqJB9QBGuMQhm239vkdKsViuydMvsLM5bgihmvVnxB3/wObcvIoQaOLzICZShSHPC0NN0LcIYVOAX45yOiOKQy+XI5VISBDFxnDGMHo9jnnsQZjHzSIsMlrzzMEwoYwjM0uB+9fqOza5gtB3HY0fddsR5RN/XaJUShQeSbEeSRSSRZOgHlIwYxwnrO4ptvOQbteZ8vTKMM8M4oETE/fMjySahnlqs7GnbhmHs2B1uCYKcy6WjaSpCk6FETJKkHJ+PS35eTAu43bcYkVOYPWNT0zcWN0qqU48fA9bZDWMryJMDZVVjtEQJhRAB02gW8YLUzHYiyhTOD/zg89dMdsm/xmnBdpcj1MD77x7p2o7ddk9d92iVcHPzmr612AncODC2Fmc9XdsDiixOOD8+o0UIoURLyTxqTpcj59MR10uUA+8XwULXdcyzZ+49BsXYDnRdRxwHGJUyDY6iyKjLI811Jg43qEDSTzV5XKCkwCjPfrPGIDkfH2mblrHzDF1HrHf4WXM6fyBdWy5lR54b3n9RYsKMydaLpXBMCIOC0faUVcVsWdi7QmGdxLmRaZpou5okTjken5imnmId0Q8NTdOSpTlShGgVEoaSru3Z728ZRk99nRFzgJ08SbIIV87VmaGbOexfULcdbdeyXd1wemoJ4pwsCfnu2/dstluq6oi1nsPda7b7LV9+9R0q7EnTCOlH8mjHOE30Y8M8NAxtT9+2DF1DW2pev3pNNzT0nRT/bqMAACAASURBVONy6dkfdnRtiw4Mm11CXffYKWSzixnGC6v8lt0q4PG+5nQ94o2gnRqiSBGT8vlHn/Env/8zfvr5j+i7J6r2yu7mluPTgPRwPV6om4Ysyrjdv8BNV5qLYpUrPv54tZADpKJrPMJIglCjXIRWMau8YGpbNvuEfpipyg4/K+qy42b/gmmyID2bfY4bDU7KZZFynYGIyVrcMPDP/sP/7O//oPpf/7f/xV9oM2IC0KYmXwf86S/+nL/7zW94enjiehTU7RMCz2Zzg3MTb999yccfv2YaxOIXjxdOahpt2a0/pumfaLuW+/c9ty+ipW02G6JEE4cFRVEQxg3FWvJv/qN/l+PjyPHyW376kx+w32949+0TX37xlnzlibcjRZQigoynS01XP2PxmMKw2zju7l5wfJiIky0iGnj9I83vfnvio4/3bA8pYz2w2UUkcUx7zoiF4LB6SRCmTG6ma1oQkjxbcSlrfvflO9arLX3XkCVrdATv3t+TbzVtaymblt1djAlSTucjRZTw8sWKoWsxxpLHa5IoIFARaRpxejwyjFcu14G2rVhnM4eN4E9+8RO6fiQIFCY0JElCXTXL1mCeeXj/HuaJKMpJ4oBhbDgeL0hlSLOIaZpJ4ozACKI4QIiE9++e+OTT12zWW8YeyrKibUtwChjY7TdcThfixBCFMVV1Yb1bUZUNQRTRM9EOLUkYcrd7weVSIXEEYUZ9tcx2ZLdbE5gCrVaEgeZyfaLICx7uzyAcabrAvruhRUjF+mZL2zVo4Zm6hkjrBcPSdmz32wVaPNXUVU1TWsYuQKplmOiGC0mUst284nrtWW8ntJ55fuoQWmBMRt+WfPTmDTKYWW8XhJN1Ax6DlZbj44nz6ZkkiXHWkSQZQmqc8xhtSLOENAtRWlFVJdO4cA/rZiQIEtpxQliNHQYCPKqXTLWim0aC1NM2Fi9n7l4FXM8tTQWDK5djxWvPw9MTl/PENFi8c4ydp6l75nGmaXqejk8Mo2OYW8JsXJ7M+4lsFaJUSF2d0YFGhgKhwVrNzf6G1Sqgm1u0hq69EoYBs7N000CSp9Rdw+PpCXBY6xDSka3kAnr3IeWlJowdURgi3YogUPTjmeNzxTgIsjQBu2RynZuQUuCxzN8rXIMoIssLhPT0Y/09iH4mSwvW24Ak9QQqJClyMJJkpTi8NEgBY+OYugEpB4QbmOaOcBXi5Mh2m/Dm9SuCSDPKFiUDfvj6x2zyLd3Y4aXjdr3j5f6OWdV8ePwSREAW75guM36UhHHBZrPiUOTUdcXoOlarAq0l0swEOsSPIWkScXO7Q0qFl2AFdF2PFBFGRXhnsX5ppns/IBgII41Xgtku0gqtQvqxZxxLhPcMDZwer2TFis1NgqMlz3KCMKabrjgx4l1AdZ1A2eX9a5e/kfKCPFqTBArcgCLCe41Dkuc7pnniVH6g2KzQQYKTHSqwtG1NWV7px5nVekOWJcShYrvLyBPNzz79hE2ec3uzWrBLtgHpKTYrUCMIz+Fmy+xmZudBOOJcMbuRcZZEYc5msyYINMYY4sQwTpZpkmRpih0GsJKpmQllRBKkKBmSFylNWzJbFuRZVSOVAakYhpGmbXHKMcqRfhqX+4IJWa02zNMMDq71hX6c2ewOoAwfHt5htCCNM6IoQUqL0mCUZpxG5mkiC7coJ4lNRCAdbjAYHdB1F/I0IzYr4jDizUcHUB11W4OX5FlGEEm8tHR9C0KDaWjHCuuWh7b1ZsPk7Pds4oZpmtiuc5wbaZoLh8OeplveX2d77h++JTH5QkoJBNM0okTI2Ar6psH7ktubF1wfW8ZhYLNeY0eHNglJkuOkxw4Lr3fhOTtuNndoD01TIrUgCjR925GnCVEosTNkRcSxfMtoBc6DdxlBuMF6i9KWONeEqeLxdCTLV+T7FBU77m7u2GaSH7ze8NlnniIfWL8IaJuWfnSLOtks31dRHC0c2rnh5cuP8SwK0jxPCUPD09OJ25stWR5y/3BEieXEyYkBZ5eIwjAMnKuWME3pJstz9YBXM0ES88233+CFQoYhCkeUGtBgJ4/0AS9e3vLpZy94eP8tSiuCwJOvUk7lGYRmfci5NldwAaGJcG7g/v0T6/WKYexpy4k83CNlS5EfiAJF1Txzfh6WXKyGfr7ivaCua7q+RusQN3u8aJjGBkmBDmc269eYwPLw4Yk0Nby5u6NIN4SBoW5KnO3QQiPmkF//6i2n6zsG33P3+hU6bkjSgqf7E++/a/n0s5THxzPHZ9jfbumv8OI24VqdESKiyBP6saXsOq79M3GgCYIAjyeKFOtNTNf0GB1itGf2A9dypO07Xr1+xdBA3/WLhnUc+U/+ngyqwnv/f/viv/Pvfeb/4A9/n3lIsNYRJpLffPWX6Kjn7VdQXzx//IvXjF3K7359T5j25HnKMEocjmBVc7zv6csVf/ALh+1ink/fgpbE4Y71fuJ3X35Lkd3xh3/0E9rrSJok5FvH3/3qYWm7dSP3bz3/6M//jC9+9Sv+p//hf+Yf/0d/xvHS0U4eaPnqy9/yo89+xuQklhFnWv7qrx749NMVfWMwBNzuDZv1jn/xy2/45JOP+eVffsGf/ekP6eaGh4eOIr/FKMvlcqZpBSI8cT3NfPLmU7SWtH1HVU/c3z+wWaekUcHj+Z48S5YnEt+TFgHKBDTtBUmCEgonThT5gWmusd4hfExVOj68P7JKDI19IorXKJkwNAOB8ux3AdNgmH3P7OBwuOXpeKauO7Az6/WaMDCUXb+w0xQMoyZNC4LIMU09dhywSLa7YolhDPDqzYbfffk1VTVwc7siDiXz6MAJvPdEoUPJgNBsiGKBdxP94BitpJ17hJzorx0vbj/m/uEtebgjTgxV33Btzrx69Yr62pHGC5zahIaybLET7A8Fl/Pp+4a9X6w+WvL8/ESRxQgpCU3E6XRFCUWWZXRDx0yNcwJDxuVcs94qwjCmrEaiPKLtKtKkYBofiIzGzwnIGRPExHGIwPLm4094fHiiKhucl5RVzeRBzxJrJ9I4JgoSmmZgGkd06BFyyTAe9jfcv39GaYlWMcfjhappONysiLOU7Trh4fkJFYHvNcfHFp02GGMYrhFh3iOlYqwMWinGeUbrEC0989DQNhM6mDEBjL2g2KwQQvH+/RkTOcLEMNsZj6QqIctiksLQNhNJZCiyiAnLqbmwTgra08g4zSQpFFGMFJJ8taIdWsp6sQ/FcQpAFm1p6jPajMzWARE3h1uGeWC2I0+PR+Jw+V1PTzXzJEjzBLwizxP68co4OJIkZrYt/TCQJiuCcGmin44V57rHyplNFrNfHXg4PTPOHXOn6BPP9nbHq/WG8XymvZQIH3CpOnYv16zymMAsR3hv393z8cd3vHrxEVV9oW4r0jgi0QLrFT4KqJuaIkxZZznfXN7xcDqy1gXKWq5dSX+1iCnBJIp0G7Db7vHTuKhCnSSMNPPomQdFksWoIOZalVzqB5xVXE8NzkOcG4R0CBUsw7wY6borfT8zjBItI0ITogxooxibAT9Bnq5IEs1ql3Gprjw9PXG7fU2S5ly7R+rmghIrPBInG2bbU5UjYRgTxxpDQKRjhLeEkcK6jmmGeZQorb7f7sCq2OOsYhhqxrEiDgU6VGT5mjjIeb5f8naHu4JpbKmrkWyVUtUlu90BCPAClLLUdcOHD88oGaKlYp4HojBinizGLPDzQGsCGYJdbF7d2NN0liiBJDTUVQ04lJwJk5B5FtR9hwokgcoZuhqhBC9fvuF8vtA1PUFoUIFiciN+cnTNYhJ7cfMCbWImW/P4dA9hiJQa5wRCgpIgvCKJQrLc0HQzTVUxu+8HWBNzfP4ddpSskxusH+mGkc0ux80BgUmBmdVmzV/9zb8gzGMUmmlYht5itQE1Mw6WqioRZiZPV1SXhjiJeT4+0TYtSVwQyJCu7RFiJklStpsDqOXe2LZX4gjWm4Lvvn2HMQYl3fJw0xpmO2DCGjtJApczq5k0jJAyxOslzhGtNG6s8dZg/YAMAyKRkShBN5SMM6yznG4cuL25ozyfmXpBlGqa+ZnZZnh3pj5rPvn4c8axYu4U5Xlg8meC0JIkW9b7EC96pqZFWksY16h8JMk3PD8dOT6DFSFN1/Pm48+xduB6OTP0kqyY0WJFfYVuvMd7z3p9oL5WJGmERxAEAVXbEAQSo0M2xQ14i5A9ZTNhvaesen768zf86m9+SR7tUAKqS40KM4pU0kw1dhK83L5knHt0rHj+7j113XO4vaNqz2gTYqIYrQucWrinY9/T1CO77Zp3755ZFyuSMEDYxeaFKdHcoAJL0zQ0ncUxYZ1BhgNSaK5lT56GSGUJzKLWXecFWb5GaEtiNhxP93zy5kcEkeXvfvUFJhAU6RqtAja7hDTb8re/+Ru8yPH6LR++m3j98kC+Dbg+tygrkGrg+VEwjJrDSw3Kkmc7unrCGwuqp3y8UFcTJhTgvu8AqITj8YkXLw8Mw0h79QyjI89TdC55+K5j8h2rdUJzWaQfaQ7Pj0fe/fVF/H89hP6/+fl/3Ki+f/v+L6xtKa8fUEbwv/zz/51pvrC5sayyl7x+c0fTNGx3Bx4eTmQrTZon3N83xBuJCUe0nimyiE9e/5C6hKbuefEq5fF9z+efv+KTN5+zPWS8/eZb9ruUtlGcL9/y8z/8E54fz1zqBz777FPev3vAaMkv/vRfZfcyphpLPrw70j86/vV/7d+gbUOO37yjbB6pBkdMTrYKeT5+w08/2/F7H/+Mh28vKGVYFysMsH4Vst8e6Luepn9CmJCna8nEheq6HAlppWmaI+fLBy7XE0ka4r2gKmuydIUSM8+Pz3g/k8YpdvYIN5JFOcZI1uuU87FEyID9rVo4dTonjiI+/mnEPOZoE9L2V27e3H5fANtwPnYMbkBqQzcsvEuLJEhjwigkXRW0Y4sONSbUxHGB0ookMQRByDiPaFWwWsfUzZU0T3l4/EBV9tzeviTPA8pzSz9U/PQnP+f58cTz8cxs7QJWnzWRVtRDxWq9Y2o95enC4XbPxMzsBS8OrxC6RynHw/1p4doFFhhJI8PsPE3doYIloC1cwjwPdGOPNBHn85lpnmi6gXFcrnHsZpQ2ICTDPOG8pmkGotSxLlLwCuEDjAm4XEpgOYaWPsPZpYjWDTNhFHLtZrp+4P7bI/NkiYt8Cco7SyITgijCOUsUBozTiBKaKIqRChCW292OtpmpLjXeekxkMQbWRcZ6vcV5wf3jd5AElINjlpp2qgnTAC0TvJ0IdME4SPIsomk6Hp8qpsHhnWWb7Ikzj9czdT8QxDHee6a5Jggk8yixY09fT4hxzc1uS5Yoxmairyfmrkf5mbI8MbQjmc4Y2oF5HtHOEeocKQIen56omgatDUpo7OAxMsK7mSDQtM3E5TiwyjeMY0vfV1wvDX72rAqDHRfP+2oTkWQGoSTXa4P1HVpr0jhHG4U0ChkGjPPM8anEqwmVKop1QWFyuuNEO8wEa81mt6JIc9IwoKtbrqeG8toyWs/qNuflxy9wsyIPU3zrca0ljyLiJEWYmjAZMWZxgOtwQz/O5Jlkl2+pq4F2HsijlCBMCTJFqCfiKCTZpuhEIUPBNDW4fiaOMryS1E2PZBFAWDxIyzA1DFWNcIIwT4nWIdVc4iSEJqLvG8a2RxCRpiuCUKG1JIgUKoRxsngrSGJBvlaYUPF4PnNtLlgB5fWKsBNaRIxuRueeKFPYcUYCSbEwcKd5QiiBMiFOwuRH+tEx9DNB7BC6xXlJYAqmCbwcubZPdGON84Jp1tRDz/FcMkw9Ew1VM+AJiJKc63XgfLnStANdPzK0E84GDP2EtS2RSamvE0oYtIq+H1IFdoI0Tumagctl0dSWZUnXtThpIdB4NaADh8XTzxYCw+g7yv4ZaSTeWpI0Zpw68BOhkRgTMA4TU1UTaE2aRTR9ST9NxEmG9w4vHEaPjENFlq3RMluYqkVGGCzlnLZvmKxcNrDVidubW0ITEWqzFFvCgCD1THRYK7hWHZe6Ae3o7DPDPFMUG1abNel6pu4GzucSr1q8gCzd4eyA0o6+a6kvNfvNDdIJsjhFaIvUM0WRgXKMtsYYibM9Qo68e3zP9TKQxwl+nuhby7W6kBUhdTMxW0eQCryMmNqevr0Qhysiqem6Eqk13k5MdkAIGIclpzv2jmn2RHoF3jJPjrbuFqlINaDUYokqH2umziwUgObE8emRPMu5vVsxzP1yP71UBD7heH5LN4881Vf6bk91XPPhm5G21WSrEB15zucKYyLGviMwhvUm4lo19K1HhxPWerRalghSKpruTJbmS9TDKKaR/5O6N+mVLUnXtB6zZbb65d323ZwmTkRkZHczb1tZFwmKARL/AzFAiCtqQiEhhjlmeiVUQyQkBgghBvALGCBEU1TplvJmZERknIgT55y9t/ert5aBhxgyvjXfLm13udsy++x9nwdvE0AhZYo5BVSSME0tVSV4fnykO1sSFFrVGNfRrG4oqy1u8igMF9cyWcenm89IipphMrx48RKlK6y5atrrSjF2hizLQMA095R1dR1c/CiYObU7DseJc3ciy0rOx4BKZy79ifbsKKtAiClFnlNkFUWecumeqao1Wje4MNDvR0ZzBb/PQeBC4NuvPvLrX3yGGwa66YB3gufdiafdM02zwUVLk3/Oq88CMnjOuxM6ag5PA1988ZKXbzS756t50djIV79/RCQJdZNy2nWcz2fqRUHfGtq+px8gLbNrT8UX7J5P1IscFwTDpQcUZamY+jOJyrhZr7m9WTINln/6H/0bUKb62//6t7/9+o//khCuJ9WqajBjyjCeyYoz//v/esT4DsTEr//sl8z9TFnlbO4zRjPx+j7lshvZ1p8xnGc29T2vX2umseX16wecldhw5HwceLH9Gev6hr/7f35HVjlCMkAMuEGAV8QA9U3Dyey5TGe+/PIrzGPK7c0DQR75F//H3/Grv/jHeJeSyQW/+NWSrt9xv7ihTDZ8+NCz3CZon5Hpnu2DIlDx7u33iNDjnfpRQTgznHrKZA1S8O0379hsGiCh6wzbzR0xwDD1V/1pFikXPXV9w9P+wps3W8bBXV3aU+RmXXHeD9xttsjQE+eSRBiWTcLsemy3wvQ9n336wPnUEbVApIJLf8CNgbpa0LY9Tb1ESkmqU2KMPD4+kqSKrKgIGNr28P/xTJ2JHM8nbjdbzucTSmfMfqSfdnzyyQNtO3I6tsxmZrEoOB2PLJY169UWrRtmM2KDZ+5bbJCcLh126kkSTVSa83BkGiyrjWa5XBNcRtv1LBaaqlpwPs7M48RsHMfjGYTh7uYVWlU875/Iy4aiWdBOHUkiqYqSpqjAB5arFf3Ucx7PTH7GRqibBqRhv+8o8y2P+494cbm6lkkREWQiWDQZbdsj4xKZRM7dyDRY8qJApSnWOBIhWZT1FYw+T7hgsWYkLxTeW9pLj5QpPoAIEh8Dd3cN1hl0llBVCxIS3v9w4nl/oqw00uW0u5lknkk8PH7o2D+3aCR5IXjx8oHuYjifzteJj4A61/zJn36KTwz784VErXBzSq4yyqzk8V3LPEu01qw3i+t3bryQlwopHfM882q7pVmUjH1HIWqGEQbTs1wkFFlJWZTM1tAP3TXLNxskyXUaJhQ+GE6nHjMHVouaqkjo+4lUV+hUoXXK0EWqokLpyDjOzJPg+enA0HvquiR4T9/NXIYeK8I1MjOMOBNJU0VWK1RqwF51gtlS45OR8+GCHyFXBcEL1usNd3efooqUYqFou5Zg9xTlxGBPqMbh0zNJrqmWJYfzmUvrmF2kKkt0OjNMR7rnnsfHj0xzS5or3GiZjhOkJVZHRO7Im5o8q5Hx+jkMnWMKlrIs6Y8T3kJW1HTjiEo03klmY6iahjxvmMYZMxm8D0xmJBEpdbG+xgekR+uMeQ4Y6/E+kmkBzIQoENScuxkfDVIKsiynqHKQEaEEQgpOpz3WGoq0ws0z0zTRLBboTNIPZ3zoMePEPEayIiMGyTTMhGhQSqK0wMWJJC2YrUDEDJCMc4exHc6PV5GBKIjOYyeLRBGjoyxKYohUZUVdrVCJJkk8MWQ01Zq7+yVpGvDOYidD3zryLGd1k7PalNQrTVFnZJVApQrHSFVmSFKsAUSCSlMSLYHAMPbUeQleXEUKKNwcKOqc3rSoVLHaLPnw/B1WQFHVDN3A5Tiyvb1DJzmH3RmtJVIGhqnFe4G1gXmOGOswbqAqU5QomKZAkkRmMxJI8MJyak84GynrBqmuf3/s9jgceXZD8CmJEHx4v+d8HlltMswkqLNbikzRXs4oCTFY6rri4faW4DzWWJpNyc3Ngq4b0DplnHqiBy0rgov0vSNLJGWWkYoGGTRNkxMFLOotZZkQScmahKYurmur7fniZ5/w/NQCgjQNhCCQMiPPE7wJ1PkG52eG7oJ3kfXq7mpLm8+07cA0XQ/45yO4YOjHDqUStF5y6S4k5cDNiw23t68Z5pljNzNjSbISMwuUjJg+MA4BKa9dAmsDAU90hu3NirrS7J73PD3tSJKchxcbxtFClFhjr/n/XHO5XEUF4zihlL5O5YeB9eqWyR85d8+kheLx44Fc31CWazwjXndUWuFROB+oc0HbHml7S5HV9JcTIstJC8VqvWSaJ4gSbw0qsZRZDb4k04pLf+bu4R47eeb5hLfgUUjRsL6p+e7tB/puRgiP1jXzPOL8jDHyKsCQKV074o2hqpcY47F+IlhFkBObzZZ58JyfL1eVMA6SiIkTqW5YL3K2qy3EjvbUs27e8PLVPdFN9MPIZC23r2/4/dt/CVSYcOD2bkuTL1ktSm5u1xgXSJPk+nlVKWaWXLoekWiyMkEkEWJGjA6hPEPnSdBIdVVhxwCQkqqcqizpR8vf/If/6T/8jer/+D//V7/VmWNoNYGef+/f/zXPzwcupxEfHHbyvP48oV7kvHv3e6pckaaW8uaJ1U2G8CX/7j/5J4TY8epFzas3BV2b0qwq7u8/ZX/ck2QntusvuF2/InEJ2+0tAc2pP5HrgpcPv6Ib9rz+9BO+/vrI/csV337zjl/9ya94/XDHpiw5jt/yyWc/ISVleZPiswtf/t3v+Ed/9le4QRNjTZILQhQEDHf3b8irW354+0fsOOMdFHmDiIoyK9m9U2zXmv3hSJnekKqEGBNkLK6w7tEiZSSvalQa+PwnP+PbH74nzTIOxxN9d6LrRtbbNYt1yTwLouipmyXe5xSVIEkli+yet++/58WbnKqS7PYnpArsdgdwmtvNA0TJerXhsN/Tni9sb9a0bUsiEzb3S54+7pnGnuWiIQmSy3FimiNVk7Ne1CQyxTrJ7Bzj3IKMeOfJ05LNdsFyvcHMjq5rWSxrNjf3/PD+kdFemGc4nTtCkqBrQVrmnE4DgqsOczQj5+N10/Lx/Y5IoKoWGCPZbLZM5nrFJcho6jUxzlR1jQ8K40bs2JOqhCzPOJ5bpNYEArlO8PPAOCUkKqJUxLiRRNbEGJGJIyaK9XLNarlkvVgw9MOPJagO581VX4pgNoZx7pGJpEhTlIiY2TK5GZVq6rJmuVjgrPkxe6bpug6lJUolTLZnmFqU1BgvOB17nj7s6YaJMgusG0V7GpFY0miIFu5ub6hVSVlIqqVidp5hODK0hkRUrFYJN82af+vf+TX/6l99xX7XsW7W3Cw2FIVnnntINOWyoWw0i0WFkJLIVRMYEShp0ErSjzNda8nzElEEJhzGSapckypNjNCPM203k+cVWiXc3KxYrXK6LhBiJMsNi7rieDjhXKCscyZjmK2jriusszy+PzK0kRAkMUhevdzgrGC3v1p6Ep3hRWRyA1EE8jwhEVfAvZsSoogMdqDtZ9bLDXVe4fWMWmSImNDvjyR+QKUXioUmLyuKJGPsJ8ZoSJuSvL7qHKfBkUSNTgrW6wIlD5z2z3hXUKicolYgNMPR4oKhuGnIspShv0BULMuXnM+O06mnzjNEYnEiUFUNlc653S5xIfDDh2eU1pBI+qGn259wU6BMc4LzoBUChZksRQZFmjMOjsPxwDzPeB/QqUaG60NMyoQgAuPcYvoJYwLIgMPRTkfatmW4+OvhKk9RWuFcREjNZGYCAaKmzksS5ivQP0KIPQKPlutrqdAPDKMhRsU8jyRSMI49xILb9adsmjussQTpuNiWxXpDVqYkKpKmmhgDAoVUnsl17J47irzi9Ztb2m7H0I5UZU6iPFmWUTclQnnSsuD5uMdHy2hPnC8tIYAdDanUNHVBmkXGoccOARE1IOFHUctsHe15ItMF5+HEU7vj4dUX18O3c3gvrrxadV3f2v7C8/OZRBeo1JMWkSgs8+SY7Xyd3gmNdRI7zERvECogheB0GAkiI1GRmICxDoki02sSXXNpO9puuEZv5sg4HQHLi4ct83wkWk2SRMb+ghkNq+UGrRX90DGbka4f2d7dYbyhHwfmMfDx4wdILNM007eGNE0R0THPhhAzRJKSVRKdC+wcaaot3hqS2jGZkXlMaJYZZZ3xuy+/pyySq57ZWNwUma1HSsXczQTnWa5qxvGCFAnee7bbDaM5Yn1HntYcni5ooVACtqsHVsuaslAobUkzy/sfTjA36FKjq4xpthx3LbfbByZ3IC0L5iFjf/rA9m5FWaVIOVPXiuOz4Y9/P/Dpp7fU9ZrbuwXnc0/fGW5vr+ajNIv0Q4KUAuMvFEWGnWG5XHL3sOHp8QN9GPnm+z9S1hltP2HtlY+apJpEaaSXJMnVgGjCQD8Zcr1gWZaoRiCEoKxSvvvuHdbNnHYXTpePeDeRphXeOYa2IwaJUsXV0jUOECVpDUN3YOqgyHK0SljflFhz5aHn6RolNVIKnLOUWc7z45ngI2kuOZ8vZHWNcddSan+cSKJke5tz6SyL+xXj5cL51PO0e2YyniQtCAGi63h5+4q33z/hKZiwdDzRToHLeMJ5EPLE/rzHYEc9sAAAIABJREFUe01rLqgi8vx+D56rStcKhE65vVtx6ff4kDCOHikj1lhiUIgokXlC17a0l45mc0teFDw9PpNkKX/zH/zNP/yN6j//53/729/849+wahbIJIVQ07UtX/z8NcOp5Itf1DT1BmtSPnm94i//6nNWyw3Hg2W2Einu2D313D80VwvM5Yl6uSIv7xlmifdnqqpiOJfs3p9486ZAhIrd+Ui5Ctzf/pzL/MzbH0aqRUmpS56//8Dnn77ATRlZA3n2Em8gGM/x8j2jMfzx+0fqm4IVa1brz5jsGa0u1HnB+Ry5faP5w9+/Zbf/ju2LNVEpzp3A+kiMmr5rmaxlu7lDixwhZzabGpVq+rGjH2ZmM9AfZx7f9ew+TlzOF6pS05161uWGT24/5fbuOt3t2if+5Oef8PjuzIsXW968+CmLYoUxgahqxHLiqz8c6c6CvjuQuIZSRIYgEDrheb/D2Inb+1uQAkREKMmHx29JZM6nbz7HDDMSRULBarVBaMP+6cLz056szFBpxYcfLpxPhlcvbrH+TIgJWi2IMVJXG6xJ8GGiHw1SNHib8fkXDxy6A60dccIRjCcYh7eR0RiUDszTRFnUbDYLhmFimq/XWs/PZ7Jc8vmbP+O4P3Pp35HnNYGUIA1ESa4rsAl4SRIT7GiJgM5zpFcUWUKqM7w1FFl1LQflAmMESsLQdrTnM59/9gYpC9I0vWY+RY5UOQmCZVGTkHC6dAiVMk8OM8ystyuM9Tw/nwghsqgb0lQjE8HNtiErBed+YmwTmmbBpZ9w3rBcLanWOYtlSSASRUTlkWn0VM0GoS2ns2PRlIyzJdET3ckxdhm3d9f84N32BoJlv9sxtBa8JRET02ywQlDfrlksJIuqAFNQZzV5mtOUNW6MLMqC2VjwCcvlCplDUeWoOmf2hkqVzK1FyRLnNJBdrWLrFca0iMQShGCxvoK359HjbCRRitmP6FxR1jku9Nh54vbmnqauaRYKIS54G+jHiShGEJbJeITOGSdHIgpWzZZmVeHtTBwjBElZVqwXFVoHdJGjRc40jJzanrSo0HUgJOBnxUKXLMsH0rRivSwIvSbzK27KO+6WWxQjadqxKiXzaQK3BqG59HuGwYIWOCUASTCBuTP40COlv7bUVaCqKubRYmyLzANZrlHB010uHI7P+DCiZCALkTQKdKaRmYV0xEfBPCZEqZjngVW9ptA1z7sDKktYruV1umVLhM9Is4hQA/M8Yo2nKSqq5QInDf1lD16g0gIXuV5fl9fDk8XhhSDIgI8B7w1uHnEDxFARlSJVkiwtuL2547Bved49EpVhnBwxCE77I5dTiwgOO1rcPKAShwySRVHhpgEzzJwPA85cUWTzeJ3GtP2J1WLDT3/6OYmSzEMkesU0DITo2NxsEYnAiYFuOhGEwnrH8+7MbBLKJmO5WqB0yv54YnaWc3umblJefXLH+XihUjlNXRKVJcvENZccDCER5DHhcngkWsh0gQjXW4YpDLzfPzKMTyRpoG5y5okrXmm1YLY9dr5i0paLhug80WbEkLFoltSNZvZXVWeaaJq65nB8Jk9TyirD0TLNkWAVUR44n07kmeRqnk2IYWJqr7GcsR+5XE4M48BsAsZKyqZkMi1D7+j7DilTmmWDSh3zeOUtN3VJ1WQYK6+/303KeZhpu4ksT0i1xkvweWQ2hmkaSUKKHQbm+QKDZrNdMbUeIQLVomFsI+umoe93jN3MMAakDEQMmV7ycP+Stn9iGjzPHztclKg8JSszns9/oF6lfNwdOI89URbs9VfMswQTWdUFt3dL2tOZNy9/QpkUFKtId7E0ywJkT6ZKymzN4XnH3faGNE+ZTUTnlvP5Wtb0/sL93afcPVT88bu3bG6WZFnCMIzc3b5kNgO73R5rNFb0rJa39O1MmaxJYsSMFwQlWi5QZCg03gYmF3g8nni9veNhe8Nje+K436ES6C6WcRzQsub29soKLvMlqfK4OZJmilP3nmFsmdoe7xVWdazKjPXylvUmwVlHlgvadubm5g3DtMcYixk9dh64vd0w9IJ5tghtIIorMk1nXLoL7z4cefnqBVEeiHHF27df0s8Zs/OUZcY4v2ceFNMMReP5/uM7mqxgGAf2lxPWJ/zs119wOfU0ecOr9c/5zW/+mh/e/Q5EQnAFi3pN3TR83J/xPkFKx2U4EGPK7tGTqSWJEAQfSNMMZCQqS992KJUitcZYw9C3tNOJ//w//odRpvr/3aj+b//if/htlD1ffvmew/PEenXD/njifH5G6ZTj3vP5Tz+nrBRfvPlz/NBwOXZcLp7vvw388ZvvQPR4a2nPkW+//cjt9iW//93/zX73NdKvWFRf8OHxHX/yF5/w1TfX1ur7p69598M7slzS9wNuSNks7zkdjwxTy3rzCc5v+NnnP2f2E99980fO3QdCsqa+W4HQ/Pzlr5ks4APzZeD4bCkXGf/6dzu+f7/n1J1YLgoSpfn43tMNB7RaUOYVIrlgJsPldEJlkC4U1XLDuR84XD6Ql5IYNdE77t6syEvQ6YJ6s+CThzWrYoFLZ8aTpcgKRFR8/90jeZ1z/+KBcZr53Zdfky6vKr/dhx1u1NxuGwpVk4SMvEy5v19gxxk7G37+858C/mrmCJ7oHXmzZr1acj7vCVZTVhVFHXF25vtv3qPSFGk1y2XG8XDAHHp+9dMXpKkAm5JkKYfdBHjsLGmakq/+8MfrqXjhkLHCuwStW5g0m7ogmiswP4iI95ZpFvhY4YLF2Y7ueOH1Jy/Jisih7ciSBW6cGaaevFmj6hxLz9RfKEKByjNmRjItWeYVm9WWrMyxcUblgbRcYKNnUVeUusC6K+ewyRSBhKhSdJFxOc2UhaXIZvA1nTujMk0/nWiaFUFoLu6Msz1VWlCuF6SpgmipaoUzHpWkiFhQN+XVo25G7OjIMouznhAiWZ7inMWZDuFBiQLlC+w48fLFHYtqBSFhvVJ4YYluwvoRmcDDi3tk0cF44k9/8Rkfnk8czgfKWlI2BbOH0TpUmuGsI0hPFgRi1ozmgLuSHUnCyHKzQabXrJ7OJFIoMlWj9TWLJIKk2dZQzMzThaauWN9oVOJwocTiMQzM8YCMkpvqjiyvycqaGCxCJ8wm53Z1S5IGptni54EgDNOYsNncYt1EVdes7m5RuUIHQ47j7m5Juazx8wUfHZO0KODz2zfXgpusaLIVIYB3BhEnVCqJ6spUdPOE6WcylRCTmePFYY3AqxM3LyTeCN79sIdcEMWCeZIEMTP5lve7Fi9zpITz6Uw/TVdcUZqwaZb4Efpz+PEKLFytciagtWKeDONgr+8l12AD7eGMCx6EZh57RtNjvMZ5TXe88lnTomBT3KAQnMYduvFM88T55IjBoKsJj2Olt6yLNcYJfFDMtqObO5TQVHlDkmjmuSfPNVle0g0D/diRxBTlM1RQPyp/1VVVGw1unrEukIkC2zmCC+g6BR0RUmLMQN9aUr3g5qEhSwLBSeYIx0uPQGLnnvM4YMSPBrPWs17ecrvd4t1ElUmm6czXX/5A1xtUBkkCTbMgz0q+ffqWdj7Tdy0uTAQpqJoVm/WCuZuY+4l+ONKbHf00UucVn27XLFXC6RgQOsNGcz1o5zW9cRymE816jZsd3k44AzKFoCUhSbm0I8Za8jqlTAsIGuOACO1+z9TNKF2i5NXUVtYVznd4ZylXGqkUdrRXSHqaY6KnG1q0Lhn6mefH9xgzYn1PJgWTnfBDjjfJtbQVEtphZrlZoDNHmkbWzQrvBXmZcT5d4wT7dk9ZXikJUijmYSTL1mzuKkyc+PD0RLNccn9/x9z1tKcLeZYiUsGuf8aqCXNJ0XpBvoD98YISkIqKIBNkNuGERRdwOVxYLtakublGkkTNYrHEekuIgLTMfodOag7PMzpVtMN1kjnZlrpZEEPB/nBhvS1pGs20m3lxuyZ4hZkNq9UKlUaS3HHsZoS0FHVBvSyZzUjftQRzxXctVin780cCMEwji2bBYlnS9gemaSSVG6oqQ4qRoR2JpmJZrXn88D19e0FEgWe6ljSrDe2pZbtZMnTTVQIjLbZrsc4Qk4lpkAhmLsOJY9vhJsXqvsG+HWmWG1rXcbtdkODJWHKyR3RSUlSap6dH1nVNmk5EMpxJuVmsSLKAt455CIQ5Z9EUBNmzqO6J3jOPz9R1g9YpfefYbm+YTc9sYLIe5pTz2TLanqxM8dJyPAgyLRnbATMn3L++5f7mDfiEalEw9Z6b5g4Xr6+bRtCpIBNLNnVNv+u4WRR0J2g2W55PLfM4UaQpQ+vJsyVFLvA+cp5bQijw1pEWPT44QtAsm4JC17SmoylTju2BRDUUQtHOe/JVQ+wF/9l/8s/+4W9U/6f/5W9/q+U9p8sHfvKzFxxPhndvj2xvHvjTv7znD3//CCQ4O/H49A3zdGJ/bDkc97z9+K/Zrrc8vFwRBQSvSVTg0h5RWvHpT/6Ew/PIV19/TZqmaJXx/Ljj7duvGQdDkixYLAt2TxP1ssHME0WV8P67icenjySpJUyWy9ORGBV/9evf8OU3B/ZRsVi9YpQF9uj56pvfM5kzWVkSlOCrt+/44fkt7XTmfrPGzJqn5yOCAmN7hOzBFTg7cX9fMwyeokzo+8DlMrBY5XirWC02iNjx5vMXFLmkbnK6/hk/dbhppl6m6HKJCZ6HT14QE4FPIk/7Z/anM8/7PR+PHeehR2cFRbmgbZ/oupkkBZEEtqt7Lpc9zbKi6ybGweC8oWoKZjMjo+C465ExkiiHtZbL2aF1wWqbEmVOVgmcCBwvM69ebFltF5z7a5ZIKc9lb1lvFBD5/d9/w939C4Yxsj/M3L4ocGFEiBSpZ867gV/84gsIEusOaJmzez6RKIjeI9EUZQESTqcTY2+53W6QylEuJC5MPD0/03cOGUqy1DLaEecU29UrMi2xXDj2R7rB48KMEpFCa2KICKnoLgNjPyETzXqz4un5EaEVZaUI00CZ13w8PpGWFQHHZCM6KVDCcbnsEEGxWmxZLwveP7+nn3qsNTgfGVp7XTDFwDzN7HeW7c0G26ZUuSTTiq7tkWkgL5ckKqOsNsh0YpwH9scWh8c5RdEU6Lhg8kdGrzh1IDJPf+moSsUwXXj7wwesj5RVxWxGhIQiK1BK491EJjVNvcJJyzh1RKfIspSqLimXJdZZ7h+WGC/46o/PpKUiZoHd8UiZJiyzhnl2nLoBmRp0dd2IoQOBHusd06Ao9Yrlsr4qYM3AMPec257x0lOqhOgjwV897EjF60++ICqPFJI6K9CJRoQUO3iW6wpdSdq+o1YVbvQMrWG8BIKbWG0kZQnnw4Wn0zPt3JPIlLIqCNYRestPXn4OMuPdh684XnaMpqcsA0miadsTUnSUjWMaJ5z1pGXHNJ05fgDlS1QyMnQjua7QSUKMmkDEzJpUrnn98p5VnRGE43Qcac8DqoyE4MmSilRr+r7Du4x68YqyrBDBINNIsaxZ5LcokxCDRWcJeRLJ05R2MlxGA86B8dysbtClRkTHbbFEakVnL5RpBuKKe1tWC8o0B3Gd6GpVkCTqR0rIiEBeqSJe4sWE0hb3I+C+qmqcC+RZSqo1WSa4u7vFOg9ElFBMw9UQt9lqVCK4HGbyImP2lhAS6kVJ9J7Hj0dyueAvf/VX/PVf/9t88upTNssNd9sHvPd8+LBDikhVC7JcEkNg9gOTOTMZR9+N1NmSMisYuoHhYvBmIIrI8uYlUzRc5gu6TMg0nB5Hvvn7M2iYfMv5dOXXnrqB4CZmYwnWI40lrwuKYkEk0A0jx9MZ7SUvmzX3yy39OGBFRKUlmWyQPqcuaoQwBGEQiSBTNSoBncEwGQ77FutbTJzZPbV055a8Kgk6pTMz/WC4v30JMtB2J+rqFavVisvwjPCCT159ynZzg5kNSpVkac3xuOfSDrigmUeHs4b7h5dYOyKCoEgLmqZktdxyOB2wc2TsHYmaCVwVmqnK0GlKCNf4BZNi7g9IAufDxKKsqDOoCknV5Fy6C8Fl1NWG06Fl9/xIVTbMY2R2B45ti5CC9V3FMM4IJQkx5+5lTduf0UnG3RtHVefEkPLhwzN1nYOcOZ08Ml1gwkyST4gg6E4daZJSpBnn0zN5VqG0ZzY968UGOxm2t0tkEnh+PlCUFcELNpsVXTsRfIJSCjNFzCTJ04xp8PS9IS8Ui0XD6dQzDIZPPnkNwaCVRgaY+oEq12w2a1KdkWcpzSLlcjpiJ0sic6yHvBYooYhu4tweSGNFWpY/4vciXX8kFRXDNIIIWO9Z1FtSXZJlGSpR3N9v0OmE8wKtK1brV8TEszu8I6Uh8Rrnn6jqDe3FsbnV7J/317WsVOyfWjbrBS9f33Fzc4fzHpk4TscjpV6QaYeMNUWRcHe35fuvv2OzqKnrJXPv6LsT9apkGu1V8ZuUfPy44+O7PYtFQaIU7Xng+7dfE0aP8JKqzqmbFbunM9N0Ias1phsx04gQGXXVYIwnRohiBjlxOXcQZ5xXxBAodYbxkBeKuXP8s7/5NwD4/9/993/728fHR375q5/y3bctOi8oKodzsH8+8sUXX/B//Z9/R7NIQVi++sMTy9Wa3u9QheCLz7c8fveW1zf3jIOl2lScuuuiViD49v07hHJMZmK/+0CkY7Y9f/5nf0VVNnQ97I5HEhU5HM8MbeA3/+jPiQiMnXh+/gErAmGaeff4nh92Bz6+/5ZDd+Dx7ddXH3bMmaaJ7bah746YLnBTa37++hPO7UTwJT5eQ9EudFgz4V1Cnmn6biDLawKW77/7SJZWxHhtcMY44iawU2R7+8D53DP0Iz/75BeUek1WFBz7lsPpQDeNjGbiD19/xTCMOO/4yeef8833j6hy5HBsqcqU1TJjGgKb9YJc5+yfj4xzR15oqrohBMFyvaLrW2L0NGVDmefsnzvKsgQCSiuyIqXtDemPP2SlNbmu8a7nux/eE0hRGoILzK2mLBXzJMjTJVmlaYcBoQ2Xy560CAxTz9gFtjdbzvsTq1XK6dihpCWJWxZNwWwNN5stq3XJN1+9R6UJy6YiTRV1U9O1A2ma0pQlqUoIMXB7XzHNgaa6QUXPOHf01hGkQsiAlBIpwnVqZK8++M1yhRQJKI1OUoqspBtHPCNFVDy93+GyHisE82lkuVjhZ0uRaMoiJy0KpE4RUhJmT3SKpw977JCgkkDTQFZodOpZlCVhjhR5gveRfj6jVIpAsGpuQE7kack4DFhzbb8n2tF114hAe9ldNYXtiPCwymrkuObSTpyHEZVpZCp/5A0aEiEQCAgRES1VrkEKDsOJutQsq5IpzAymJ88EPniGyRDQLDZ3hMTycfcOkQiid8zHC4fLBcrA7A2nc8tqvUbnKafjGe8VwQq8vU5XnHG0/YEgA1JmvLy9p6k1x/MBKRN0nrK6uaEuC2bXE6VlnC8436OLwGKTsdquqNY3QErsYff+zDw7tndLiipjHkrK9I6hP2HdiLsIatmwWRbkWpLLipfbl5SlZH+4oPOa1fIGawxm6unPlvPe0l0MTbpEy4yndscQHNv7FVWqsJ2nUDUvt3fgPNM0kWU1QmX0s+XpuOcy7xjMhc1mwc9+8RmOyNBP1MUSJXLm3lMXOetNgU9h0pa01HiTUTUriuZqgKnWKWmRYS6W+XydWKAli9UteZlTpJq7+gbvLE/HM13fM457vHfcrG9ICEzjSFUtqcsaLQXgEFIiRUEqF1RZRbUMiNSQJgXCCmIQhCAo8gyBwDmDjxYhwJuAmfyPbW9Ps8xJULjZUTcZDuj6GRtmpBZ4b9jcbPjVT17Tnk788e07vvryS96/e8/peODU7gjCX/8vHPPsMCZyHi7MsiUm4Xp4KjR9N1FkJUJ06CSyWbzg0p85Dx8w1jCeIsIkRCvZbB7oQ0tMBM1ihTEGkgQ3O/KiIknBeEcMkUQXtG5mdp5FWbNqFohMMgXPyVqSoiZLJLkQ/PKnv8I5wflyJMtTpjEg5ARyYBwFJgiyQjNPhtPHlnk0vHp9h7fQnztWyxRrR9JCYqaJSzvTnx13L+8pCsdpf6SubiiyhH6cOV+OlFVOkZYkSX7l3mpBmRd0/YjzI844pmm+6kOHifPlRKZKlk3Cfv/E8WBwVjDPLafDGQIgJgqVoZWh78+s1mvm+UxVNEih6ccLm81LkkQwjT0//dlL3nx+R9/PzLNDZiNRG5rlgnO3Zw5X1FPbtVz6E4vVLZ9/cQsi43y+GsiahUAmYJ3kZrvAyxHjOp4/GNIsUJaC4KDvBOPoWCyX5IXmdDwRbULTLCmrnLbtGKcIUVDWOUIbHj/uMJO8DnmCwntDIg15rmkWDYnM2O32fPb5C+4flly6A+PQ471FInn16iVFpTkeToyTZbFcEKzHjglluuTYfqRcXLFQUwsJhhgSXAxkZcqiKvjwuEerFBEUOpN4ERFB0Q89VZ0zjTNSZgyjQ0SNDoJ5NAQ5s1jnFMWG9+93vHi443y0jJNnuQ6ApS4XuMnSVA8873c0m8jt/Q27fU+II8FHFosaES196/DOUdcpQzvQFCVZmvL27cerNcoPnNsLl4sHcdULVzX88pe/ZLWVvP+wI0QNVYYxPSJNmEXCl19/T5UrYux49/1H8iJlsuNVQVwoPAEhM5JEMRnPps5xXnA8nKjzhqEbsdFRlgva04n/4p/+l//wN6r/zX/7t79VaWB/nAhobrYrsixjt3skEZrz5cJitaWfjkgNX/z0z3n34Ynd4UiMNWYuKcuaRJecOknvIs/HA+fLM8YMDCahqm8YukBZVRjrePXwS9pOXcPtuefrb766PnijpevO3NwrPrx/5Jtv/0BR5Tx9fM/oDe0wcDyd8cYjLxOrpMCJFoLBB8P+2HI6D2TFgnJR8vbtRF5UOBtp+9O1LZsohi6glGE2nqousdZxOQceXt4zuwtZrrBm4HQYaKqKV68+4ePjE1JHgvfkqkJKzenSEq294q26nh/ef6CuasqyJljH5XRGyiXentmubygzTX++8OrhJW33A3/xF78mAmVZs9luGaeRYRqZRs/YTyglGU9w6j6yWFSkumIcA10/oAuu7dZoyPKCx4+P1+JSkhG8Q2USGRO8kXRDz6tX94zjSKI9ea559/0zZaF5+fCSsfcUecamuacuwY05L17cEJzi6WPPm8/WOBcgBmbjePx44uFlTpEvuL9b0Q8t1kaqeoVOMsDSLHKyUnI4XPNkRaF4fP9I3awY7czhtCfVCmMTnB3RiSZERXs+EuJMTKBeV4zTiTK96mmVkvgOIjk+h2kM1NstaVkQA6goGLuRMs1JkJhuJLiW8/EEQXK3rXh4sQBXMtsJmU0E666qvSQyGEuWLUhzgZ2hKXO68xlvDUM78bB9ye3mltP+gjEdzvTExFOvanRISYLidrO8eqedoVipa2lFBMoiY+ivU5yqqimrjCS5yg3adsSOijzVeOm4WIN3niRK2sHQ9TPL5Yrn3XtcMIgfFaypzlhVCzozEbRBUaNESaCgHSambsD5wHpZUJUACd5ds1omAOFqIur9hXJZXzFFMbDe3LEqSqa+pbUT1kXm3lLlFaXK6Y49tp1ohIQwo6vA3eslN68qOtMxTB3OXh/eRZLx8HDH5vWC89gzHCxVTDl9/MDz85GYJCRKYo2iLhaUeYYZEu5uXvHi/p4yL5jMxCwSJntVNzdVSqqWLNcr0kUGeY6LgdF0JEoilcSFkbRMKKsaRcowjhhvWDRrzGQYhjNNWaKTAudnZOpJiopxhm66ENRIk+coH3k6P/E0tESZsl5vSDLF5Dw+JszzxDqvKeSSH56f6c4jRV5RrDJCTKiyBYmOSA0+SMwIZupxbiQvljgHmRLkWjNNjiQr8NZQKn0tSVpPphOCtyRKkZclddmQiATrZqL8UceapRS6oi5yYqKZjGV905BlGiWvHEsXJOfHjsenI+fu/6XuTXZlW9f0rOevRj1GVLNcxd55TtZOITItuAM6NGjRpGMh4YZlbBogd88NgBCW3AHRMeAOXAc9QEjOAp3Ms/fZe625ZhXFqMf4KxpxLiJ9BaEIRfHF973v83Ssy3LlSGIZXYcuPCKCXSHLM/ppol8XVJayLFdN5bieUBqEMrho6ceF8/nCsLaImFElNWFZuWk+IIQmJiN5VdDs9pwuJ6ZpRpirxajaFtgQ0ElGkBIpEoK1pIlh9jNrdEzLjI0KVYBbJiqpEX7h7Xjhy7dv6MQyzy1VcYNb41WdW+4oy5RpmSjykioxNLsK52eWaUQGEE5yPB7JMsVwHjifFu5udngsh/2GzFxziLlOeXr9gsojWS75+PgB7yNSw2J7rB2Yg0epSFXVPDx85Hi6XLXXwOP9B+bhws8/9TSbgihn5sFf7UxEdKyZ55nLeUEnBS56hFox5o5xvFIScpOw2pZN9RHvItZOKKmJciJEyePDd+j0aq1L80g/BpJcg4l4aZnmGRdGtruaeRIc31usX5AyI0kLnH+luwTqugAxscyWuv4FN481bTeRF+ZqOxxmyionhMA0WaI0RBGY1x6dwNevv6Wpb9jUW9rTiYfbe0SIlGWNkQWXS0uSSYQUOCsgKhCQFYZL3+F8JEQBShCVBClpthVPvz1CKLi5e+RwWyCNoEwrpqGjqARZmqJMCl5fvx/2FXe3DUu3gExJ05JpXNnscqb5yLSM3D/cMa9XhmvTFOxvH7l0Z7p25vW5pawM/fhG1TRE2YGwXE6BqtgR5cKlP/HdLz/RTz3PbxekWejbBe8997cl7cWSpQ03NzV9d31umVHgHW+nnqqp8WFlDTBOPc5p8BnaBKbRcjpduFwcs+u5rC1VUeG8oF9HsqzE9xYZLA8fHri8tpTZlmWaqKuMsXN8frxnml6wU7h2OchwfmJTlSSpAS2wq2SZFv7Ff/kv/v4Pqv/63/wPv7qcJ6Sq2N7mrNbz9PWNP/2z73h7PaPzSNdfeH+bgIzZLvw8k9qSAAAgAElEQVT00xNS16SlYv5muXQeqzSr60kYOf105P5wD5ngx5+/kKYZn7/7zDh0tF1HUe55PR5pp3emwaHVliRJeH8bOdzWKBK+fHulKPZcThM//fAzWV7SjwvWO+p6S6oSsjQjMzXWCoxOKYoNdXlASkB0KBMxqmQYuisKpLmeborsQF5JlgWiF5gMoowcz9/QSpEnByQKuzimueXLl6+sfsGp5TpIb255P77SDj3YSFNv+OHHn/j++1/Q9zN9N5GY9KoLXCJapzhriR7K7I7z5R2dS1yItENPYgqev72yWkvEc3x7QwnDoT7wfjkhtaSsNphU4KxnU9/gnMWGmSozvB5bAp6bbUVdN+RNw7S0eB+JJBS5out79rsKYwKpuSJrIjNpTDDScLO95/OHR95fTtzePHBqX1GJuZqqqoTnbwuHu5Lt3pAlBd99d48g0vcdQkTqTcE0zQRvCdEyDBPeJSil6fuRl+cjmUnJS41MIj4Ewio5X1pu7vb4EK6MPCWY3ERWFySp5v10ok5KohOMvadIal7bI1Y47g8bSAw//M3PhNFRNyXOWZrmhmFcOPen64+ISbm5aRAyYx5XxqkjzTOMaVitIiqF1gmJUoTF49dIXmQII5jHwNh3yGj48OGWbjgRQkSwEpzCVCnR/C7fJDWmKVG15/CppCpr1PWwS5pmCDTWRjbbLYfDDeO48n55I5XxyhhMErq1I+BoT2ec89igSbMCRSRRAmUUWZaTmpSqLkEqLJKl1yQ+Z7MRIAM2LKRR8MvvP5Ebg3eOKAISjyAyTSsKh2Wkm0amOeCDw6SaJKmxw8rx+MyyWIZLR13kKGOYrWMMliGsuCi4OXzAu4hKJafzyDh4mkaTFwZJQqYzssLQDx1vzy0mSbCJ5WW6MK0erXKWZWJdzmglaaqKpkr4B3/2+zx+2PPl2xMqyajyjCwYtumO+/0nkmzDqe9wITKPK31/1dgiIu+nd8qyYrPZUqYpc7/QtQtllZOlmmlc6NoBJQWZyuj7lW/Hdy6XE1lqyTScf+7RY0ahM4iebVlxU9bYbmbtZx62B+osI1USGQP9OrMsgl2zod7mLHMgyypkknPpBoalJRDx65VCYYxkGCd0Iri/2bOMM9MyE5Esk0ULiffhmplWErdahNQkeUUMGiWvXnXvNQHBsjqaSlNXJe0Q2OwO3N03SBnZN7sra9XAMggmO9LcF6zzitKCrNAc+3fWuLCukTRLqTcFNjqc8HgrqMsNxgicX0AI5tEyLw4fLCrVRJESMOR5iTGaKDwyB6+uut3Vrkgd8UFg6gaUJSqHnQSMjkQqpJcsU89hv+X2bs88TthhoIiKTMCH3UdkUJzbN2Y30Y09eZZzf//73Ox3zMuZtjuTJDX9MBD8FUcVhKM9LSSqYrPLuIwtMaZ8eHxE+EBmCqQWPN4eiHJFRoVdF1zsr5IQe+Lt2DHNC852XC7vICR5muPCyCIsJtVonfHTj78l0Vd+8W67ZVvv+fLzC9XmOtS17UKRV8QQCO5KJ0l0SV4W1+2m92wPJYGJcX29YqxcoCxz5iEyTQPW9zjfE1go0ttrEdFNDOMAgF01i7W48DsEn1wZ5sCXr2+YPPLx00eUSPn2+hWVaB4fPpCnOcoIxr5jHjLsOvLh0x37G421M9OyggCTRpSR+CAIBNJKk+Yp8+KuEoZkg5JgZCSukqEdyPOMKBwfPj4wryvjtPDHf/wnvL6+MY0LgZl5nCirBqRhWVe0Nte4SxZpCkG5EyyhY12vaC4RJIebDePokVKRJEC0BO04dxObombuO1Y/U2aCzaZmtQPb3ZbgFSF4RBo49wM2FXSLYFpmlHSU+YofI8cn2OwDaR5wS06zLXDeoVRKuTM4aa95bpXT9UdMkqOVQBuDCAYfAsIsDN1MkiQkOvD0/JW0qjncHfA+cOknvHM4t7JtKvq2JwQHBJI0Y5wWmkKz0QWntxMiUQQruLye+Id/8e+RVTVo+Pr8xuHhkSgkL99OWLdwc19hQ+Bue0dQkR//7pllXNltbxAyZXUtN4ct/+Qf/bO//4Pq//y//ctfbXY7unFini2XyxlrJ56+fGFdFopqzw8//EgkcO4uLGuL0J6m2fLX//ZvKTcz97+446VtGSeBkI4oI3ef7liiYOkiu21Df+nxFoRKGNeZYepYV4sRBbd3BwKWaR54+vLC07c3srwkzVLOg2W0C5s8p5smhHbYccJKS2877g9b2q7nsP9AkecUWhNdAA9VXjMHx7k9kmcVP/32CZVAxOO9Ii8KhBDX03S1pSwLkCvn9xfWObCpN3TjcOVy7necp5bMlLg1MEwdMV55ldZ5qqohBJhmh5CKYZ6o6w3rMnE8zggpCEHibEE39YyL473tkUi6rsNZT9fNhBBomgItJN37TLlPuH/8yDCtjENP8BYhJEJGlBHsNls6O7Ld35AaybhOkCQgBMZotJIYIxi7mWm6UJiadbZ8/nDDblOSkvDh/oYiL/nptz9TFBU6sSy+RyeKSxfYHUoePpQIkWJtwKSB0+mM1hHnJD44nHV471nW5Yp9SgxSpZyPrwRhWELg/mHLuX3l+fUNrVKMUuQ52LAQYqBKcgSCyXkOt/fYacEtiibbI4Sh7xbSXLF5KEEGhssLp28tol/QMaJSw/3jJ94u77Rrz+AdxWZHvinICocdNLc3CR8+CdrWMk6eJElRKpLmirzKkcpBSNntG7rLQDe0zNYzTIGsyjl379TbgrKpWGxkdBPRSVIJm7rg9XVhGCcECeMwIcJCcILoFUVesNtuSJOcr1+feT++UuV7Cp3hmPjpuceT0qQG7UBlFWVTkOqERECzybj0E8ELjAq4YFkd1NWG8XQhEYosd6jUIkSkSbdsyi14yTiMWDuzrBMuzgjlSE1BanLwAaklWmuGbuLl7cjb8Y3ZWZIyZ7NpGKcW1IpSEY0iiRlyAYJEJ9fPzdh16KjY1jsyXRLWhcVOXBbL4DR5UnCoNGN3xHlBWVZ07YhboKn3rA6Ch3lU/PrXT7y8fCFPc8bWcX55RzjLttnQdgvL6rm73XO/OdAkJWVaM8+aYZ7ZbLekpmG8dPTnIwRPkmbYNeAXyzpOaJlRpiXRjwQ5EYzEioS5jYhZsb8tKbcNd59/wd0v7qnrilJVLMtCdshxxvB2escIh040vZ1wi6WqMhY7/G4TCIv1ZEWNtQFnLfv9AamhH3ryLMMYWCbHMnqUuZ72ozUUaUHTFKSJwdpAkmVMy8g4XBm9w3jGR48PEZNWaG0wErxf6dcLTbVDyxTHwrbZ0g4tSZ3SbA1aQ1akFIWh3GbIZCavc5AZTb1Dmshl6HE2EoMnCZ77/ZZpaBmGgURXrFNAa0FVFMz9ShSQZAopNfO0YBKLkhG3RKx1WC+umxwZiDojywXzukBQlKpAScVgZ2bjWFlYppEi3zC7iaxOcE7QHj1/+Ms/Y157JjeQlyXVpmFdZ37zt3/LPEdMWhBj4GZf03Y9T1+/gc+4v7vBOiiyiiIr6dueh/sbRFTY4KmqnHlq0VKSJyWHmx3j2DOvMyoRRFWxrhCjI/sd6me/u6dtJ0yhrwP0sQNWjDR8uL+nyDP++q/+mmZbsKye43kkS3O89WRZAkT2+w2JGFmWjn44I33K+W25yl2kQDjJw/0tUhjS3LG/KXl7e8X5meBgmWc2ZcX5fUKJlPNlIs8ThILESJRSfHt7YbUBpQVhFZxPb2z3kBWS07lncQtZFWjbmblL+f7zFiUCIliefn4mzQpCDAgBaZozLwu3t3ccT2eE1GjRcD6+oZW60kCqPUVWkeqc3bYiLSCEkUvf0tS3bLc3KB0YpwtZWhCcJTUGETXjsGCtJU1SEikZLicux4W8qcnqikNzS1MYuvczgYXd/hZwpEZjZMFpvJBXCbnyRAJ53mCUv0YBlKY9Tuw3D/z6737gdO5oNg/IxFFWKUUucGPGrronSR02RtKyYhpWdCrwcaWfjtigcE7i4oLzgcxkKJlTVSnOzSyzYvULShjG6UJ7GpntzOPjntO5JSiDCysKhbWBNE2Ifma3L9hsGh4/blitw62WdY244JAysjiPdXB6bdlUOUWu+Ju//pHoPbc3G7CCOHuSGLjdbLGzRXjD43d3XKYOLRNkSCirhqrJmaaWzbbiH/9n//TfgUH13/zLXyEC2gjm2WKMwa0BQUY/PfH22vPd519gDIgoqMpHpvEMYmRb37LdfqI/BW6bPQ/3JTKbQBZ8/doyXhz3+z1G5Hz96WfyNAF53XhYu1KVOz5/+AVd33M6Dby+/x1leUueHXh5uTD7DmcspZZc5hEVc2QEIUvkLJm8x6+WP/qTX3DuFi7tiJMTTo18eX/mOCgSbZnniSgkzaYhyypMAtMY6af2qkZrKubFImTk+P5GlW9pqgbrL+x3D5RpwuhW1rhS51u68wUXZ1JdMM0rZVUyjiNKSsZxREZFalKWaSbLSspyD+FqstHK4EJPFJBmiibf0p47xq5nnK9lm6IqWJaFPMm5uf/A8/Eriz1S5hXb6o40yYj6ipwwQTKzAAYZPP3Yc2pXvA3YaUYBWXI9HTal4nZ3x6ba8eW3J+piT5bDjz+8kBX66sxOVvphoB8HTCbIqpJ5GBBR0LUnbFxY3YIyhmGcUMrQd+3vNgTXk+Opf2e1E8u4kJqKbGPQFZwuLcEq0ixjXjpMKskzRXdqqYuGdVmYF4cSKalJOB5PYFeapkb+Dtnl5Yp3K9iUZQhU5YZNXXD7sAWZ8PTTC8t8RhvHNmkQIvL+teX9x4X/+D/6c/7wj44EfsJkM0kGmSy4nM9Y73g7PzNOjiSDVBXEAFFHgoDFLuSlpqhKxsnz9csLSaaIUaJCxjz22DFys91ys79hOEeW8MIyjQhhkNKQGIMQ4pphtlcSg1aCRF1B7dX+Hp0otLQcqh0q1SgZEVGTJIas1gx+pR86MiVY14AmQ8tAWawc9hV5usHIhHXypCphGVesX7DrjDE5SVaRFRlKwvFtwKgaHSRSRbwXLGPAx/WKrfKB/fYGIytsSBBZRr3bobOabp4JasYxIxJJkmp00OQ6JUSYVseyBJqmQUtBKgQfDzdkSQrGYLISIwsOmzt224ysGdjcJ6RZRrPJSOuJLG9IZcXQnfAEnEwRacFlPOHcTPSOr8/PXMaVblgRwqNEwtx5/DRTasPtzT3KVIzDCiGSqYwqL5mXlkQp7u8fGbvI8f1Cv75TlwWH2z3FfYNOMqR1XNozbT8hvGRcRwY5scbANi1QAvp1wChNXecMc0cIkbzJ8QJCFPho8asjTRICK+M0kKUFddmgJFzaM+sSMbrALgtZqq9/VrXkeDrRjZa0LFAmUmY1TZOjjcI7yTj1mOTapFbhOgyrVNAUOd1x4NvpnWldGAZLmtY83N/BKslNxu6+YgkLJgetJUoVFGV+tcUFQZlVVFnN7W7P3DuGIZKUFdok7Dc7siRHkGISKEpBIitutrfY5UQE/KoBgZQJQmRoI5mmCzYKlmmkazsebu4p0pLT2nNZLoSwUG6ur926aPIi5zj19ONAWiacujNaJ5R5dR3UpbjygwvL5rDj9nFPUQhO7yND57l7bDjkG769/UDUnrad6S8TShjabsKJwGv7ehVAFBo7LRRZzrSuzLPDholhtGwPd+jUYLQkTxOOxzPOQgwChUKGBI0iTa9DlpQa7z0CBUrSjTOHwwMfPh3Y7/VVuuBTDocd83RmmD1aFsjoybMMnXj6/pU8F3z9+sa6QFNXdOeJ9rRgVEaalhxuNmRJxeXcokzk8fGWt9eRKt/y6fGRn3/8RlCCiKWqI2FNyc21dBZcQl3n185DGnArfHjcomVNlVWsdqDID4x9R1akSKVwLuI9aHV9fpdjzybfUxYJdl3Z17dsNhv6/sLjwyNRrbTtQlXfsT/cMi5HovCMPWTJBh8sRVEzjCObzYYkz9GJJEvz34k6Bqr6gXGcWGfLZpNip4WpczzefWDuFoIV9NORvmvBBDb1juPbG9068nt3H5Fix/HSs9sdEEj6y5m7mw1plpAkJeP7meEsWOxAmq8QNHlRE41ncC11sqNoBG9vlrTIyXLNNFgkgbv9PR/v7pinidNbjzIWO0sSnSCkR4mMpirJCs22zrlcVkSaIFTEDo6yTHh5+QKhvLb5M4O1M2/fLMsCn7/bMLiF1WqsBWsHjJbs61uapuRyPvLDjxeKoiRNMoo85S/+/E95O77wfHzm8x/8Eauz/PD0he3GcLPZUe9qVndmGR3TaPnn//jfgdb/v/4//rtfjZMjTa82lmEYUcqyLB3zaPjw+Imhm3l6+kJZGIZhRMQUN3vqsqLtL+RVRlpoXk9npBR0rcdLQ7ErWS5HhqOnSAs2e0UQC/00kKYF87TycHjg7374v/FiJM8biiInEjHFSp7vYGmZh56Hwx1pviPLE8IUuH+8Ic8gKSrO/RvWOtIU5uFM+3bBL4p5XLChZ7ff0jQ1zk/YJUHrBB8tRZFS1imzbVmmq7WoKBL8mvD68kSe5HhneP7yRJAKYQx2CLjFIU0kkQ3Be4osYZ4GZBQMl55EGcqsINMpZaWoq4LtZkNTSTIjSY0iS69g9nUMXI49xiRsdzUYibWBaZg5HCreT+88PX0jKzKiV0ipiMLiiazRYmaLSBPsGjAhkOUlWboll4a5mymyiqbM8YvEzZF5Xki0QsuS1b5R1bdYH/j8e9dTxDS1dP35GrWYPMM0sk6WZiNQKkWqmiSXHC8XQFDWGVJYNmVJJOHl/UTRlBR5Q5PfkiSRIcwMbkYoSVVWRC9oNgVSz4z9yL66R3jBZrdhtYJ1dBSZxkfHtqiZ54W30zvdfOL2bouWitPLBSUbNnVGDBapBeM4UiY59aYgREtOQh5LmDr+0//k32ffPPH//b+/4fJaYvJIoGTuE/oOqnxHtAmpMtRlxfHyDomkbkqWxZGnFWWtmOcTREmqCy6nmUQtaCXorCeva6JqyUrJ2L0zLh1JWl3ByzHiHRhpsGtP0+QQPGmZ0GQVm+qeUzvRrW8UZUmZVHRtxzpbrF0pioz34Uhxm/D4cEv3NoCX7JsUFSO52ZKXFc7D+fWEIGBMBdFT1po8K7FzICrFsiq608S0jGy3N+zrW4ahJ0kMN7s9eZZQlRUKwa7YMFzO+DiQ1ArrV5LUUDbNNQawDsxhYVoCmkB7PvJ+uWClQpuEiGKaA6tbWZm4jCPzDAbFx9s90zRwPk/oJGOYJV234uPMue1wzlPUGbo0rGIB7ejGC+fLmXnx9BN007U9XpcZiRa0lwsBRVIUROPpl46X12e8h7pokN5RZiVuNQh7RWc9H9+J2pAXJbk0JCEnLhXzMPP89hMvbx3RRaI/cTp3dK0jSyJawXs/Y62jyVJCiFi/IhLJagPOexAaoz11JTEa+r4nSbfsdncQIuNwhXJroxFiIM/M73zl4ncRjUhWlVTbitQkGL1AiEQ0yzyjjUabjGhXbD9e7UXGgPXXU2iaEGRKWSiEGFgnjxGeqgpEbZCmRAqFiJGnb68I4YFrOTVLHUZVtN1E33ecx1cWIqtbEcpSlRUrA1OcSZOUuDrKvGCYFiYbmWPEK0GmatLEMM0tYEjLFCMEN7sd0+A5the2TUWWqqsVSEB7vKBEzeP9HbfZtcA1s3CeXxEE/GghCIqmxNJT1w11XdOO70yTRQVDs2nY3dXUVcEwXJvfvT3jkhkLzM6RlJaQaFJdXqUwJuJsxAfFvHSgJW6NBGlodgVj/8o6Lnh7FcNM04m1F9zuP/Dx8YEsXznsD4zDjJIGHzxeBvKyYLPLOB7fcYsm+JU0iVfu8G5LFDnTIPjw+MjH7z5xmY7MUyRPa5R2lHlDDJrj+wufvt+x2h6tclKzw3pPmu2IMbL6d9YF8tzhpsjb80zaZBiTkoqGpmhItGMZHYfDHfNyQXtDkW1Zl54QLEIEIpqsyPFhIksUUkZMqjid3zFKkaYarTXH4yvbKgcvSFRxtRb2P/HwoblqsfMKaweKskIIcb2++cDpeMGHmXqT8fPXM2lhGNeJduxIi4zX93cUCYfDgbTIKKpIIlM2dckyweePD7x8+xnclRSyf9hyOvU0u5r3pxeQAVNlbFXN6HpOlyesmxEiUmSaJBUIBcuyUOQpSa6pdwWLW2iPK26RaG2ZhoFpspzPLYuz1PUOt8jrd+92Q5lJnr5+ZR4tMYAQAq0z8kJzfBn58PiAVB4hPaf3E9MIUUve3p75o1/8EUYFlknj3ICQHmMMh5sN/XAmeE/wgZv8BhXh+4+f6M4XhFdsthWolarYUec5eZbz/efPvH39ytJHlBRYRlaXcRl+i6khUw3n54l2sFRlTow9Pnj+2X/x3/z9H1T/1f/43/8qhMDx9IyQiu224u31G4mqqPIKFzQ//fbX3N5s0TJFEckyiXcWgWRuLeiIvtnx3jlKoH99Zxp7jEzACbLckJQS56EdLFleYN1EolM2Tc5vvz7hZSQrMrSULMtEZjYU1ZZztzBcItFZ0sQhRcFlOrL6Fm8t33/3mU1d0V1aLucjXbfifMLDwy1lnpIlBX6F06lDhJqbmw3TMkBIESrQDx4jDVP3SpEphFR8e3ni5rAnSRP648z9hw37rcYPijQJ1OUNRalw4Vo0eLuc2d8drsrLz79H3ihkuiKSBKkU03whzzNCkFgxs157SVQVVwtOfQ+J5u6mYe4nhCyY3IrUmmAmjNKkOme/vaFfRlr7jlQr48Vy29wQ3IlEgXU5iZboaNEx8OnzFhVTmrrh9qbm/uGGutzgneXTxw2Pdx84nY8U+bXw9tuvv2HyA2X1gae3H0Eo1OJ4eLhjWGcm6yhqxen4TJHkHLYlU7cQ18jcC7xfqMo9Yz9Q1xlFVjDNFm9nFAt+EcRgyXJBP4yc+5koSqpao5WnyXcc30b+4I8/834+XQfPYkc/tuQbBVFwf/NIe+5ZreX2/p5tc+D5eWByjsUtJFnNPC58952iqTb0s+c//IsUN/0V/89fPjOOgo/f3yFVzv/1f/YsznL/+MAaO0Z/5HD7wKUfiGKhSEu6dbluMxqNAqZREQIMQ0/XD6hHhVsCn5qGZltxnq7ljERavLPc7z9ihIYA3323uypOZU7wjtVb0piQl3vc79SkSk/kFGhvWJ1k8StposkyRSoTQgfHpwt+Ddf31hAwGob+jXWdabYl09KT54a6qBFI1mW9ol0MRLWwqQqshUSl5JmiuSvQqWYZAjpTrN6CtKhcEYTEE/Cs2NUzXSJhgPPzkaE/oqUgLBEIyFQjsoyquUULTVUbslwwdAN2FqRJSZoqTAK7ww6ZJryf3ql3GXmSsPQtPnpWp66PGQVEEA4cCePqOB47UBoZwa2O86nDzZYqSwirpSpSilIw2zPTMjNNljLNuN3useMAMWJXT1kKkgzO7bXgoWUgrA6CompqhArMziGigGAJ0TFNjmqz5/Zuz7SMtO89iUhJsuRaVnKerutxM/gYcG5ERkdqUqQ3TBfHYgVZnXFTF1Qy4/x+Yl0XpnlACEOSXAUDdb4lrAuj7REqEteV1XqcBpkpLv3A6Ga8ASsCMmr8Ch7BsPSEuLLEgCkTFndBygEVBYkuQMHp8krXnlFBkemUaZzo1xmbRaKceNjur/GVZSVVKX/wp59IbkuqNCWycu57zs8tUVkq01DmN6yx5zL2oCBJA+vkqfKcKq8ZhwkvPTExqKgIU0TEhKTUpFrS1AemccFISJNrmVGbAl1oOrfStxaT5OQ6x9kFqzyLFsjorllUo7nZ3PL89Y3Fe6qbEpUqhqnj+dSRSE2SLpAKmmJP6jzCRZQumexInhVooTk0B7SSRG3wyjJfZkSe0M9n2mnCz46PD595//kZJ2Zu7u/ICoXYSgYxsbSKn3/zlefXDpdK5DZjs9kTiQztQqJSitygheb7z79PnmT8/PWZ/WbPf/Dn/xCpDOfzwOtLS1Nf2Z3jCjqFy9QzrpHUVBQmoe0HnAIlF+blQgwr59cRHAz9iE4ESa5QuUA7gVrgfl+AnAn0tP2FprlB5TCNC48P96y9Ii88w/BGYiRCRNwq8ItEEbDrhM7CVXLQTew3e+Z14LDbkycFby9PpNrw8jYSpGCwJ3TcYITBpCtvzy0+eNJMkyqFm1ackOCv2tnJr/TzzN3NI5EFoTPQC0M3EX1A+oxEedI88PWpQ+cVVS25jG/kRYOdu6uJr9JUqeHl5YQoS/I6I+B4ezlRVjWni8PqCV0Y5BKY58jNveb5C1RJxv2mZlg6OuGRMgc0zaYhyoAWmjrbEG2kHUdyYxhnR31ISVNDO3cIkfFHn35JXjqm3vFwd+B4fCPJNdO8sDscKOqVX//1hWm8kNdAVrD7sGFcJK+vJ5RUfPfxO5wY+OntlXlZiCFFN4qhtWTCMvSapNjx+L3hx990pEnNuT9h8mvH4+35iTRT5CplaGcimk8PW4gTIkrA80/+8//67/+g+j/9L//tr8DjnSBNJWXZIGOFIDAOM1mZcD51NNviCsR2lig8adagtGKzr6/sufPIOJwRLjAuK0LmFElOEAJ04HR5xyQ502xxYebT5wfOp5lxsiCuWa6+m7jd3zP2A9v9hnYYkGLi+++2lMmGn3+8ME8Dv/zlH9K2I4+POePY8cPftjRNQ2Jq9oeKD59LhiHSDy2LnyjKirquGMYT/dgBkbSyxBiYp5FpnFFSUdcZYz9xd/cL5tGxLhN3t/f8+PNv2B22/O2vX0mylSACy+KJWIwy+NmjgKos6LsOIkzTyNOXJ0yWI8jZbGqkjiidMy4dzl1oLxMmJqRlztPTT7y/viPQvL+/cnO7J1GCZZTc7O9w68o8D2idMPQdTd0QvWFoW+ZFoMx1WBg7QwySzTZBqowia1jcidmNdP0FlXqqOkeI/NrCtR6RSJ7f3piXHuctUl1ViI/3DzTbApWdOOxu+Xh/x1//5b/l4e7DNcM1RKwUTu8AACAASURBVF5fWmbnmML1fXF3uKVIIo8PB5Z15bc//ky91UxdytAOZLKirhKyrOL0PrCrNT54+tnz9PpMWnmKMifqlYfvfp/29YRU4FDU+y2ny4Q2Cq00+22JNhaZCHSmEMJiPNw0kQ8PBq0XPt6tLO+KAk9+W+DWjPeL5+uLJaqSsqoIYsBkK1pmpPpAkSXstiWvLyNGJ2w3GVmuObcd0zoyzD1Jari9z3AuElSCKBXKRN5fnwkKRJaQmJKquCFw3bSluQAJIVqkjix2wjlJP/ZEVpS8Yqyq/JpjHZeeYZiwdgXhafuW07m96nGzkiJP0UohRUaZ37Dd7Thf3jBJQZZUV+SPlkThcatlmiLeS6ZpoqirK6sQxbouSCJ1keAVrNaTR0iiINiVRENZV1z6CWlSlIloHciKhMhEmucYU6CkYZrGa6O+SRDSskzX097d/YE0VazWMi+Wfhzppg6PIy8rpFSsfsIGi5SC7XZHmiq8dRzPHa/HL8zTgBGCTCYonzB3E01dkJoEP2se7z5j1+u2v+3f6bqWJCmpig0xroQws9td27ZDN+IXzbnr0XmKVCkmTamqgve3E10/oaREIJEqx/uIc5YoNcM0E/DURUVR1ugMvL8OBsZopLhu0hAr2huGDubFsqszqk3ObBeM1wxdx+vlQkSgjCQi8HHBhYU8aSBqRjeiTUatNyTSYeeFZfZYu+KDY+oG9BqoEoV2Ar9apAiUuUHEq25zmgfCConMiHECuSJUDqJAKIExKUIIBjvSTwsqSoQTDKOF9IpA2mwP7JoD58uR4Bdq9pSm5uX8RtNUTHOHkCsIh1AepSTWRpI0QWaWVZ+JQuNdILiFok6Y7Il1GSm3NZPt6aYjUQjOrWWcoN42nLtn7BDQSmDXAestWkC1qdDC0PY9UUukVzR1xc/PT2w3W0qdk0jJ5TLSvrZsqwzvHFop7g83KC9QSl6zl2RIbzhsa+zisH5g5MS4zCxz4P7DFqUTtNYIb7BLIMsVzaYiy1O2hwPD2PLy9MRm3yC0ZrPbEZTA5JLLe8flNLLb70nM9bdhWVbuH25ou3eens94L+iXF/7qb/6SclMwzt2VgRzeGe0Jk6RczisPHx7wtkVHjxSRNBX4KOnblhAEj48f8G5hv/3A/e0vyPKEGDQ3m4rUpGRFwTDOrNajEBhytNCIOGKAsTvBeqW5nE8vV1FGemX8NtuE1UqIkWVa2G3urrEy26NVQpGWHI9njLlGU0JYMOaKfro7fKS/TJjkmoeXUiJQvL09EyVURQ5CUWwylnnFL4oqu2IB87Th7uaB8+mJaWzpLhPdxbPbPZDmBs/MOEuybCaTI0l8RBYJ/XpG2B1IQ5VJ/HrBiJwff3rGpIYkTa+4QB9JSsN6HphfJ3reae0IwdKPjmAzjJY46zhfjgzTiapQROFpW4fXCpVoohTgIURFnmR83O54f3tFaOjHF4S8Ir+qTYJbA3ZMKfMUGRRlfYO3EWEHXv7uhVyWSJXgteC3f9tyuH9gnQNFHVlDy9w6vvx44eX9zPff3/ObX7/x9v6VZV3Z3WrGacDaQNVci2dK5OhYk6UJLnRgBDYo1ij4p//ov/r7P6j+r//7v/rVsniKImMeJubRokgQQiP0zLJMJKZAqGuDMkaIOM7nCaUcSZHRnibGy8S8HplGR16VeH+F90ZdUVSG+4cDQS4MY4sxKcu88OWHkWpjeH55JVEFm/IOv0J/OVE1OefuDaxiWzUM7RlYuLu/IQTLbrPFLpGnpyNSWS6nCSEdiOuW1NGT5TWHmwqtBUJpkJ7d7sDPX75djS9WkmUpVbFhs9nw8vztd+BvQ5ok/Mkf/iHt+E5T7xnnnk1z4OZhR5JqsjwhyyuGoSdGQ9XU2Liyv7ul7QbGaeHh4z1pqqibmh9//OGqZFwcMa5M48K++YDRgdX3hBh4ePyERPD4+EhVNHSXjjBeN1Jagw8RERVlXuCC49JfyNMNeR1xLiXVW87tE7t9hfcp3XgmCse6SqZ5IS0kqwu8vswoU7E6xxxX0kKjtWR/aKjrLZ8+3bAsE85a6s2VO7evbxGx40/+4I/5vU9/TDt8YxwtdVNy9+GWok4wiaLIa6o6pe+vXNTNbs80j9hJcrjNeXz8gFSCYVz5/Hu3SB2ZFouzMyok3G5vOL+1FJkBOaBQFNWeYRyZx4CWAaMzYljox9O1wKBX+mnESMGHXcmf/4Pv8bMmTXOanUKrGUfJKBTfLgWni6X3MNgLUnkIKcugWEbLsp7ZbUumaQE8MTou3ZHj6czu9oYoFbP1xBjIs5pDfUO7tLx1ZzIt+cNP311bqFnOrtkTGEH8/9S9Sa9ty5qe9YyIGDHqWa9q77PPuefcMsnSdiIhI1oI0aeN3XDHooNNh/btISEhEEjQsJAs6NKkgftIdBJhY5yQmfeee4pdrjXrOcooaYyt/A2ZP2FqrTnjiy/e93mugKUoSqydM17Ow2p9R1nlWAxjO1GIgrxISdKAD56P7/dEp3h6uCMrFKfzATNZUpWzWjZYO1GWGp3lpGnB6fyMjwPTOHE6nxmmgX64MoyGcZwd6Cp3ZHnKYrlBScHZ3Pj+fMCRkMiJW7xQbnKitxwvPTKdWDQrLpeIC1DUKdaMSJUTkoCNnlSXxKAYhwGipWkypEoYJ0Oe57PqfZoI/nMBLAzoQn3GcykmO2DsgDGBsl6wWa85nk5cLzdk0CSJBSfwY8SOBjfMxY6i0kTHnHFuNGWZ0/cj3kfKskarEmMn2uHMNI1Er7jdLJfLjcvpghl7siqnNSPH0xkSSd9PdG1PXZVIJbjdjpAEVustQgna4UKUI4k0iM8b32HqGDrLOAascVhjSBJBnpboWDBNnqLO2KxqJjMb0JRXc+46BR88xk8E79A6I00bzORQKiILTUgMmRiww0A/RoxTqCQnuEgMgWKRYWUgbzKicAzOYIhMyUBveppiTVMuGYcbkThjqm6zjSvLFmhZA4pP+z0yVchEYcyIzBImO1KmEkbD/vmIDY40OGIMxNqTpgmJl0gZOZ1bxnFAJBIpK/J8xqRFLZBiljJMZqDKckKfcjlOCCXoLnuOhz3Ri1nd2nUsm4KmyMiVIPqI9xOlLrieJ6bBkFeaVCaEJOA8FKpknFqGyTL0AyoIzDQiRMp2U1GWOUoWbFdbxm7EmIl6JXn37nu0XLAoSqSAw/ETqBv9ZLFWobUkKxZEl2OGAWsFmc5ZbBXTNNCUGw6XjvES8Eai6xQTEybfQ5CIRJOrAp0JpJpIiCRkZJUgJhM//vCC0gki95zHF5arOxZ1w/7lB6RSRCmoi4bnDx3eOc7HM9iMuijo+ysxarJ0wTBFfPAkeJpmy6JekCro2yvDrSURA8/PnyCRPNw/Ye3IrZ3LTJOXBCaGMdD1lqzI8EHggaxaQ5hIhKDrzninmcaJvm/JVIZWkjIvCU5yuZ7ZbJYQQCqLFIZogURzOLzl+eOZ5WJBVZQcjh/IdM7D/SsOhyN5qjBmIlGwXG4YbwN1VXDrrqQxpVl5urYlkRnLTcrx2HI6WP7gj3a8/fAj37UfMVXGy8fAqs7Yv+wxQ0JwI9U64+X9C9PNE5xkua25+0LTXTyuU/TtldEk7OoFddrw6XymNYZf/uo13jgS5u7GONzQeclu+YQ3M43l1lvSNEcpic5zLvsj1WJJqlP6ywHSjMVyST+8IEXNw+M93XDjdAjoDIpM06wzFsWeJuz5cnniZ0+SX3yV8Muve0o/oBJJlliO7z/ysN4SXU5VlvzJn/whf/THfxelOk6HjizPyYuU1bqibee+0ThO2CDwUdL2A4nyuGCwQ5jFDGngP/mHfwsG1f/un/0Xvz4+X7nb3XE+XrmcrrPr93aem3l9j1KaN1++wXpH349oXZEAbX/kerXgUvIsmwHLJnK4HPB+mhu7vmMcRn737TuIlrJokFT0XctmuaJu1uxfTqyXa1LpMP2ZzWbHNBi6dmCZV8RJcbtegMDYRe4f7jicfsSOhqLcoHXg1Rc7utuA9wpBwhevXnPcH7lcOqwbCSHldu3RecpqveN2GRBCUJdrRMK8xVlsOB3PJMJzd7diNANlXTBNltVyTZpKLucDi8UWH6/gNQJBqiTP+z1lXTKOI8Y4siKjLmtUmnA679FZSVFU9P0NKRJeP3zF5dRhvWG5WrDe7FAyZbOtGW4dOBCJI/EOneXsdnfcLi3OuDkHlwZ0VVDXgmE8k8oKZx1Nk5IqNf9dEgnJyK27Yt1I1xluV4f1s7FisVphnaFpMtr2QpZLjPF0wwuXywWpEpI08suf/SGpSFBSMA4Jf/Zn/xer9Y6XT1ectfS3Fi0FOs04Xm/E3GKjx/sBWfUIlZFXiq9+8sil/x3IiSyvuXUdt84zXHue1hvuV0vOxwt5I6nKFe35RtApngQ3eZZlyna9gRhwBJxwbKpXOCuJPrBoUspU8vbHHwhR8vbjle/fnvl//rVFFCV2kbB7pdFrj7lZMrWiKmucHbHG8+rVG1It0XnF27ef6E2PUBqBxo6KVGgmcyMhEKOnrnYMpidJLd5H6nIGmp+ezyyqklQJbhePD56iyki1QkiHjR2r7YZ+7DnfbiTSIwLUaYZzgajATo6mKHi83zJNF7p2QCaKPK/YbV4hRSDBk+UZw3hjHDsSAX3fc75cmEyPEII8r3A+UlUZqImymv3Z50vHp5cfidmEDYFtuaXWkjRVDF3gsh+ROkWqlCpbQfBYM9LdevrOcm1HOjNAknI5tpjJsd1sqMqKJJFMk+Ny65mmEesmJjOi0wzvLEQ3b8nNDODvpzOpSgleIqUiUyUxJig1czUTD8EnyCRjUTYsliWpVnRjz/Vs2K4WfPnVhmk0dP0NqYAoGfoBZwx11XC3vaMoSsapny/dxRIlJCpThCTBhxkfJ5Fs1kvKMsWYCaIi1w3TONLerqhU4rzDTpY81Vg3cjmfMYMH5rLcYlkBCc4INqsNRSUoSoWQmsuxxQ4OIcAHR5I48hLyvKHIanxwZLmmKhTj2PP+wyeyTFFVK/ZnQ+c8ulHo1GH6ia4bSbNImlV0bU8SBK+e7liv1/S94XLqcFOgKXOSKAn+s8tegLMDIjrSJOVyPTEMA0WRMbSGMiuw3lHpJVko8E5zv3ggmhEh1Gy/SWC1KmnyBu8tfedQSgMJZT7nUr233IYW1zuijyQipc5yds2CVAui7DnsT8SYofMViZR89fUDSgbaW0sqUsarxVlDXWWsmyVjcPzqm5/R9SNd27GsGuqqpu9ahr6HJGF7/8DoDMFF6maJC+B8j9YwTh6ZKywOHzIWTY1IRm7tjadXO8ZxxI45JIIkGEZ/I8aWXCSM3hKTgWqRI13B/vkK2lMVBVWtOHUHJmspCsnYd2gq7DBSVgbvJuq6ITJiTcLLhw7rBxarlOt0Q+gGbyWLNGdZZpwuZ4pihblNlGXB3f2Ch/sH6qIhkZbJjpxOE7vNlvPlwGJT8vHjERk1bXuinS5IqVnVC7K8RmaaoTUIBN7PQodmu8ENe6K1tCfPYpGTaoGPBmMCMY5Ym9C1V07nCzotaJqKabJEbxAxR8sFu7t7rB0xbiIrE/rhSKYVZgwYI9hsNcv6HoIgEQGdK7qu4/HxC+52T7w8vyNNBeNoUUKRpwlNrVk0DXk+cTtHVqsFPlqmUVBWiqLyCJlSVwvKZcPtdKXKNlQbIMtQasmizPExkKWa9bqmrBPaNnDaG6TMKOqJb//yHbv1Gzo3YRNNVSQsmxydl/SXjqqoiVayWi0gCuxksaNFpynLXUoIKdM0UBU1y0U5x9ouHb/66Rtu/YAPliQuKRYZ3dByvThePX2JkCPjMFJUCf4qeGw0p48XTsdAFJHDcQSfYH1H1155/XqD1s3Mxt7uEGlkf+gYbo7rpeXxiwqIfP/9kWYlsEbiw8D5NoGMJFLM2VVj5+8vkaJS/OP/+J/8zR9U/4d/9l/++uFhw/sfD4zDxP1uRYyGNI9czhPLpiErUi7XjsvpgA8Jzip0HvDBkMQcoTRRzm05KSVJhLrKmVxCIiztZeB+e0+qBF07MXQTq0XN6bgn1YH1ckviodCCqii4nQecdfziF1/y4Yf3pEoSvcJMknGaaLsWKRSbTcXpfKC9OBZVzeXYk6USkZg547Ooeff+xMvzGZ1qjIFPn95RVxl28kzmwtPDF7y8vCBEwma1JkRPWWQcDi9YO0EUVFWGkhnPnz6x3lREB5fjmQRPMiaUhWazWLCsVgjg66++ZGx7unOLsQYpGhbNisn1WDuRpxlaZyglWG3v6UdDnldY33K9vqBJwYKSjjQLZEXNy3HP/dOO5bLkcj1QrkqyumbsDWZSJDKy2xYcnkfGaQQ5YewEPqKUwJkEkozRtMQwF3tu7YlMl/TDDZkopFQY4+bWtS6w1oMv0Znj+eUjh73HRUe9FHgjWFcNZZVSlwXjMKB0gSoqjO8YJ4tzPX7SmGnk8aHh+GL59PGFprpnv2/pzoHM57x6bOi6FpElGJlQ7da8XA5MTtEPLUoqcIFUBiAgZDJvg8eRstDobEJERxIDqoicWmhNwrl1fP9dx5unNdVPII6adkhZLwpqWZDJgr6FxaJBSg1RY52lHS+MdiRKQbOaywzb5h7hJO3tBCGSyhylFc/9mdFdIQomk7A/nMnLgqauCNGy3TU0i5Rx7ElixWQCLy8njocrxji88+hsVqE+bHb048htuLBarCmyHIKfW7ZKURQZZuyo65zbtafQK3xIuFz3hGCwxtF3nhgkZVkhk2xuSC9qxsFxeBnI9BozOS63MzER1PWKdR0opcP3OWHYsP90ousntFSUZTGjowaLdxN5qSmrnCRROB8Yx45Mf7aHnXuGz3gZHxypkui8pK5rpISh73GTI5Wa9toh4a+Hme3qnlTpmUcoNc2iwcUJkXi0yAhJQMpAtJZESdpzy9AGfv7zX1Dkmu+/fU/fWXSe4uPMlJRpJM9zplEwDpZuuJBmcd7wmkhW5PgwkpCQBEEMlqrU5JlmmuzcPq7AuiveWEIURDw6S7hbPSBDSd/1VEXK3W4DwjFOA1VZs1qvqesKoRI8htul5XKe85tFOWfzC5WSJookDSAs3hlkEuY4g9TkWcF2syFGCCLFR8VoB6BDmGQu05Qp3oGPA3mqqZI13anncDjgTKDMG1CRvuuQQuKdn9E/Ksc5R3TgzTzEkjgyXbCsGr542hHxFJWm73ueHt9wd7fBO4NILMFOjJeINxHnBsCz3dwxjSOplggShr5ltSqoMkmmE4T3+NZTpyU6ExwOe8Z2QpKRJAqpIlkmGPsRnVQoUmR0uDEitYYkcH9/j8gytNA8f9qTFYphGBmGKzJNKcoMrefBociK2XrnHTotORyfUSInuPQzpg0mZ0kzIATSdM7wFumah7s7JD3DDVRVkMSW2ylSV1vqBvoxsn97pc7rOU6StgTVM7UBN0aqYkmhFLXSlLmkqjKcNwyDQ8QCJRJ8GNlu7tAi5fjRsKzuWFQ5xvQMkyerCq7dga7tEdpR6BWm92y2Kf14wdq5+GndkWaTMvSO9fqRssgo64bTtWW5WxBUj5sSNqsKZ0Z8HCGNGDsj73bLL8jSOzZ3JT4xjJPBRYW1Auc1wQayPOPrr79kGi1Vo0mVYLvZkOsGHyLPh3dkZTFrRHVA55q6XGNNZLFqGHvD/eMjQswlQZmUTK7lfD3SdwNNOUeKrIcs1RSpZOxHpFYoNJfrhRhTjvuR5brkeP5E3xqEkBwuFy7v92RuIEs0puuRIWeWhk9cThdcPJAXS65XT3eLbHcaG64EnyPijjdf1ZzsgZgnLKqE86eesU3JtGMYDSKUXC7PlFVFU2UIJswkMM7hMhAqZbKOzVLjI2yX97xaLTmdzlz6jr4PROFJlGTsLQkFzTLncDrirOR531EsFXnzQKrXJEqjinuS/DVtHymrHc1yhzEOJSZux4EPb0/83k/fIMuJdjjx/PFC30piSPnqJ3f88MNvZ+FMcFRFxuHlihQF1k+kpUBmgnYc+Kf/6D//mz+o/q//4n/69elwYhotj49PxOjmNl654stXv6Jt57Bvf7OczmecDdzOIypziESxqJccrgeSxDJ0jt12h+0tu+0d/ZBwev7Eptlwt17w7W9+S10sEYkjS1PGbsT7QKUrUulYNApjehIkZbbg5dNh9hTfbzlfLpxPF6pm1lu2refl5USSCIKR7J8nnLtya5/pO49IAmVRsFzU3K4ji0aD8CybLdPU4azlfrMjSSZO+wvr7Y7T9ZlUCpJEsWiWTLZHpYrt+jV//v/+K5bLBu+ZocsmEglMvSdRgbyaN1dFkfGyf+F8OlPXC4QULFYNx/OJ0Y7c3++IIeV6HXHB8HJ9z9ANhOgxvkMJQWJz2qvBBE+iPCEK9i8fEdLgmVhtFgyToW0NVVpzPncsFxsmeya6grLIyHXB+XRhtVry6d1I1zl04VguS4LLSeJcirDGk2V6Hlj7ARIgKopizsh0vcNMLanK0TrlcvuE94773YY3rx8wduK7d78hyQuqZkk0DqxBeEGW52zqFaYfKMWWN0/fsNvVmHGkKWYCQo0iyxKKtebD8Tar/YaBMk2IUyCTksQDOJASLyyXS0t7u1GpAms83kBwhjSzJKnEhhwves7HkVz2/Mm/G/ju+xtKLfn6m59wOBv+4v98T1HvsH5PiJ716oH96RmRBg7Hjjy/R6mcoT8y3K7cr+8INnJ4vtHePMNoUDJhioFcS9b1gixmKCERAtIkwX+GoveDBaAsS8w0IKVg0azh83CklWLVPHDcn5nsSJYL3ORp23njLuWsw23qhvVmg5Bu1ovW6/kpOrEIIZim+cJDTBn6+dKWZRlv33/ifLywaJbsdjvayxFEZLW5Y1EtGK43RmMJOqc1F7ywqCxnsVJkKuHTjy84O7F7bKhX+cwOHjwER11nvHp6pCkrEiJlIXl63LBaLUmiRiaKGObCoiQhBrhdb7RtS5GV89P5OD97J8xP2VpFYmJwrsMnjjRXpIWaRQa9Z+gCucy5W23JakHb9XiTsl4vSDPBOEFMUso6nxW0/cg0zk90QmlUmqMyBWqiyDWJm9nKdV1R5DkJgURExmlgciNKBsbuymT6OS/sU6bWMw0dm+0CfOB8vCCkZrPZYe3EMM14o+v5yvnWMwwBrRVetKSlwo4jbhwoihzrDYO5oBJPqircqHGTpWzmzHLf+/miE27UWUl/9CTGoYuU3nu29yuyXHI5XxjHjropKBY1ac5n9F4Pfr4UWDNRFBVFlhNdSnd1KOFROiEvMnI9q1j7dsDZSNcPLJuCuyrnL77713z3wzvSNMyGIhMQKiP4BBCcTyfGaSS4yGa5JfqEqbeoqNFJRncZqYqS6BJePp5p2xEpSnSqaGqBnTriqAgDVLpAkdNUCxLhEFKjpOLd854YmFvhSaRc5nz34T1aJWw396hCg488LhbcDhfyQrNcbAFLUSUoFVgtMzKtud0sJhrSUtDoLdYaMt1wt9kwTheu/ZWsyEjLHGc8w9VSlBlFlRP8wHalWdU1ww0G3zJNgcsnw8O2QiLBWbS0ZHnCjz+eyPMNWSbRUjF24PyISK/s34282rzmaVviYuDPv/8tkxRk2QpretJG4UKkbyeaokAJT1UssJNjUWdEEfjhxyPGz5e+TEuMnZjshBOGQ3vm/DwQuolEQLFqeLlcKfMVdVnQjRNPX3zF41crms0Dq6eM0+2Ci5Gum+HGWhUo7TAmYOzA8bQnyxZ4b1F5oGw0icgQIuV42qN1xfVsCQEmO7LZ3hOTia6b0LrmeNwz2D1lsUXnkbbtZ8W0SuaXlynO5rYwIWOJSC3D2DOMnryIdP2NzeIB53qyIqXMCsbOEWJKnqXEwYL3c+45RLI8p+0s58uJ4KEqVkBgGK9IBUXtuZ0GorKcuxOKBa6f0KrAx5Tn50/cPyw5HXvOpyuvHte87F8YzI2YWIabo65yhv5ERLFabXn5+J7bOJBIxWBfuFzns+vurmLoJX3XoYuMxdJzOo4cj55x7Hj+dOb/+zfdzEZWmn/5f/8l/bXneOw5HJ857ydkatndvUbrwPvniWma0Lrh4anm9VeSb3/zHYu14naZ8BHSXJEXJQmauq4IwX4+6wP/6d+GQfV/+xf/y68PLy3H4zO7uzuMm1hvVlwOI/v9nqLUmMkSrOaPfv/vcLfdzkiiNEGImv52Y323pmsHtFD4EMh1Qc6cTVvWijf339C1Z5IkJ5XFnNtsGmIc0WkKiWW72RKi4Pn5hJBizoOkKYfDnuVqx1/+1beM0wRR0t5mTSMIjJ2d1pmeN5+vnt5Ql0u+fPMlHz6+p++vPD09st2ukfJzOQLJN19+zd3uNc8ffoAkpawWCAlSJCwXKyASE8cXb5743W+fWa4WCGkwUyBicdZhJ0FQkrwukalG5yXHyx7jHCotMH7iuD+R5pGgQKYZ/ThyeLmSphVeWKLoWFRLzqcjeVlSZis+fTiwudvRO4uPI0NnqKuCZqHxIeHa9pBIijzHTGeyNKXtbySix4850XnOp448W7JcJbgA9VKz3TaMo+XLN6/Z3tcE7xht/znnFUh1QlFK3r3f48KAsYaHLyoKXeFHTxIO3N/d8+ruV9jRMA2WfoRmVRNQtF1LU+VcTgeSEHm8v+flhwvvvvvE0EeG8crb929JQolMEtZLSVVVtINF5JJxdATbk9iRZbaFIUXnmjSVFNkMwPZBUdUZIiaY3pLnNfUiw7ievNJMxnE9jgw3R6ZT/vD3UxLdc7vBT1/9Hi71/MW//B4bE8axYxwCIhHc+gtgSLMCrUpWdUXXfiIO8ObxF/Ttlf3xI4nMSJSgWWmEYGbbNjVZBN8H2suVqtRkOmNoR2QW6TpDVVbkuuJwOmGNJ03VXF4sFjgHMhHoTGKiI3hIZY5XHpnPRSYSgfcSvGIyjuV2hU8mjB3ROsPZwDD2eDdv92YDpwAAHUFJREFUxVKpEMCtNWRlzpdfbrl/aBjaif5qKRclo71xfb7iuogDqrqhWSqqXCBJcG7i+DKSpRlZPjKZibEXSDKqLGO1yNms7tBKE4OhLjNef/FIjPOA1reGPM/I0hw7RqbRUFYFi9WSZrmiWa3I8hxjJ4SAMp9h4eu64XJ64Xq5oNP0M9alB68o9JJlsyDPcopSEtREVTUsFhUh9igtkGnBYByH8xXroaoq6qZCiARrPTKV9OMZIWG3ecIahwuGppkLJ5lOMXacc8oio7vCeBMsmg1lsWIyHiUCy1XJ5A2Xc0eW1mhdoJUmeDdHiJAIL1Eqp14tCMLRjh3GGYa2hxCpakU7XEiSnO3qEWKGm6BZKG7dlcN+QKYFh8OR0/OV0XRUy5T1smQYekbXkyYOd1JkZGx3GTALRKoqZxpmP/lqseB2HlmtdqxXC6yBGDQySdE649pdiVFgpp7btWW/P6FTQa5zhtZjJpBaoMuMziXI2JAKgSwkKuScTxe8DzOYPm3QMkcguF568qzi9PGM94r1/YaqyiikoFwkUDh6M1/SgxekyZJEQMAwTJ5bN1HkkrIsefv+e1SuiH4EGdG6RKeauqkw/cRoLCLV2NEgfGS9WnHuLmRlSlFoQgz4RJLnJVPf0w5XvA4QoNYFREkUCXmZcGkHphCZ6BFCgUkpspTEB8r8jiZruJ1vhEnSrDVtOxFsyv39htevd0Q/oqTHup72OiBkwdc//5ppNFyOPZGJspSMY+Tpi2/Y7BpCMqGrJYP31MuMUlc0RTULHBYrUmnZNjlVseJ8mwUBUmWQ3/HVN3+P3/uDP8HagXdvv+Px9YbVboEJYCZLU+ZkRUbRaKSWnA43nh42+HHgh9/+hh9/eMv/8b//Fc+fTnStpco0p+crWjmESoikxMQyjp6QeNI843obmGyPcT3OSabBoTPF9XpFyZymWZGmBUJEdJ7Sjjeul8B6vUOoK84HDvuB0U48PbxBKjjsL5S6QMuU5XLNy+WZ4AxSgbETr56eGIeRjx8/sds9IoTlfDijgkYkGarwZIsSQ6Qd5pZ8DJ5M51gzkekanaUUxXxBkklKWc24Q2sCZuzwtqFoHmg2Ea0zOnumqiPWGvKiIZUlfeu5v7+jrJeY7sLlZSDNwHuHI+Xl9IlMCj7sDzzeLRjGiWnsqYqG1SrleHjhi9dfobVEmBTTB+4fMw7HKzYZeP2LB2Kp+M3zWxJRzK8CS8XASNcLOnGmTwQfX67000S9yFitBFLo2QppS5LYkEjDy7GjWpazvfL+jmAttvdM7chmWfOP/+F/9jd/UP2v/5v/6td921NVa8pGUpQaQck0dqyWFXVd0N4m7CiITkLiKWoJQpHpFbfbFZUp0pCTCset79muVzAmFEVgu9rwu786UpQJ9w9LXDAYZ7ieDI+PK374/iNlKUmSwIcPZ07nK3kuaOqKqQ8URc3LywnvI4lSBAJCzc/ZZZXx8fnC49MS5yeO+5ZUFiTC8le/+R23iyWVBXWtUbLheDhzOHykLjbIRPDy6RmlMpTKSGRKWTQcX16o6hQXJ7qxJ0kk/TBRljv66UJVKfK8RApNU+2QWUIq5i/pd9/+httlT1OuKbKKLEu4u/sJwxQYY0s3tKhkicokMp9mK8gwYbrZUV03a5wVM1stnlFLjYjw5tVX/PQnP5k3kcs7is/FiFQFErcgCsNoRvI8wUwBMw4UeUHVjFy6K3me4Z3iw6cDVV3Sd4bLqSfEEVECIuBjwLiBsk4gaooqYu1EZy2ms7T7K093G7TM+PThyocPn3DOoLOSSIsbPEoGuvGzi9gl/PjbT5zOB+pmRVYk2BAhhfN1ZOgiy8WSk5nIC4EfIsP1QlO+IhELxuAY/Y1+GOnbge56I2Xm0aap4nw6oLMckQRUJrm0N5QC0ycUSqKlJy87vn5IuHSGdal5un/Fjz9e2P/mympdY4YLSmnatofPZp3gHFpJvJ1QiWdXr8hTTTu+0GwKRBFpdjl5VnE5tqhixlbZcUCkFfVqR5J8VkuWGZPtyVRDxGOdoSpz6qqmyHOElEyTIUZJWS2RWqNLyWa5pNA5aIUXFcM0D6A6i/TjCR/mS8Hl1jH0jnGwBCeBFCU1MSQzDsgJVps1dw8bvHWYIeVysOQZiNwyjuAnjwuRNM9ZFDuOzx2X/UT0KZexJy3mmIx0KZocO83P085ajAu0reF6vqBkZLFYAZq+nxBCsl4vccYz9ZbNZkVRzcY0FwQmRNrphplalBIkQWMmQ51XjJdAdxupiwbpclZ1w2bxQPSCqpakOtKZGzY6EIF2OPPux/eY0VGUOYM9MbgriSrIyzm65NxEXdZU9fIzh/VE3SxoryN9P1BUNcbMh8nsXu+wzoOORCFY1Q1CWXRa8Pf//n/Aq6cHvvvhW1yEh/tHtIa2PSCFZBomQghkmQQ/IaIgqyRdNEyjIkskWaZZLJeMw8C17RAqo70Ybm0H0VGVOYicRKbsTy9Mk+P1m1c8/eyRdFFguojwmjTLiGNK6QvydC53xSBQqsFYgzEj0WX018jT/QOLZcWHDydiTJHpjIrru4njeQ8ioNMCJSV53lDlS2SWcLM9gzGUqw1CJUyDY7NesVksEM59/v2ICJGS5zl3myfqpuTWvmDGkbwqEJkhYJlGhxSRoq44nS1lUZKmKVoU6CRjUZRE5zGj56c/+ynbuxwwCJEwJZAkGgSUpcIRicaSScFyeUfdlPRtzzRNtP2AyCDLGhya82Xiw+HA1YyYSVKkFTGNdDYhReD6C7f2TLPa0PUjvfGMYqKbPLkq8dZz97DEjYbJTEQbmMbI3d2a58tbmDSr1ZLVrsKZChE1ISS07cDQW1QZmKzh/ccDZSmwZqSpHlgsH3h3esun04XV01e8HPcE35MKT5pYMq3pukiWZfjRI9ysYlWV5nJz/J0//vf54z/993j64jUPD19QVRVaTuRp5OPzhWGAzGlC6jh0A8PUkQXNQm+QycinTwdi0FTNhtEZ9MrTj5aEitWqZL1pOJ1OPD28YtGs8YQZzXYxrNY1Wa7xNkIo0TrD+o5mUdHebpzPJ4bRIlXg3B65XgM6K7H2QlEn3M4SoTy5vqfrevqpRaoCbyNlEbHBogqB9yPjAPEzp0AkGdZEbteOGCfOxysh80xOkQNZn5JR8OHjM1kFQqSk5GTS03eRhy82lFXKx7cHpMnnfHpfELzGeY8OKX3o6ITn/HH/WTqdsP80IbIJlQaG1nAbDvSTQ2tJICMkI/ebB07XkRA6Fiqbi2ZhpL3Nr17bXYkdR/Isp5ve014DiUlY3K9ISjh8Mvz+T3/Fj99+S+8kSblimAaqqmbyjq5zFNlM1Agxpykji12FEJZ/9WfvePP6a6KLfPvdtxgj+eLNl+g04l2EkBKDpeuPTJMjKxe8fvMl/+A/+kd/8wfVf/4///e/ltnA/eOcSarLe96+fY+PhsWm4NvfvOPV45csFjnNMkfpgiSZt1zH/QfKCl5+nFDS0NQLxi7jen3m9VdbnCkpdcWH/Qt390vMAG8//sh2/UQwI6qYzUT325/zV7/5LYmY9YFZnlEWC6K3yNxwPp94eNghokQwUeYNaVrwu28/kheBGAzLuy1lnjMyH5qXa0BrECLy8Ljh1va8vDxTFBUvH/d0/ZV/+0//mO5iyLWmLARaaDKdcj23qHRB25/RSs78teP3bJdPbNZLDi8HbrcOqT3P748kaUKIknSR8fjmCWEFaalY3y0xU08/DhAVq6oiUSNjr7hdT7zeNkwukouSotYkqeL9Dx8QMuHx8SekQRLCSLNckKSCdmhn/7dlLkkYj8oTcq0RwVHIBaVakWU5UlqSmCGFIoTI8j5jW2VopchTiVIKHSGQQEwZr7DJJT/85nnGixjBx+MVORaoxFLVJZvNE2/ffcSYkbouQKYs65zz4Dgz4nXJ/nDCfhrohpbeTSxXS6qFZrVZQARnPNt1BYxstw9kac7L+xOT7XBR/bU33ouR43NHqTNW65o8r2iWDX3XQYiMU0eZ1Tg/EMVAKlOkWjBNE9/8/A1OpKzygkUGp0PPbvuA9ZK//PP3FE2BigIfU+w4YcZAd7vws2/+ABFTTscDl+uJPG0wfqK305xhdYpgJYfnE9HM4X5nB67jGa1zvB/RQmKCQQpFITNMb8lSSfQTCSMxgRADSklUookmsqxrQvA8v3ykvbVEBM/PB/woCN2F4CJKpiTOYcaE0Q5M/cB4GxExsKiWFPnM0C3zlCRY8iyjqjMUnkoXBCcYpoFiMccnzGRRiQQluNvcUaQNn95/wEwDgx/AO9r9iDlHvn71hs2mpqgCD/c7IME5hZ0kv/jlK5bLiuvFURQFIjqmIcxmoaFjbK8IqUik5nSd2abOO0IIKKnJdIFWGUWRslrUSCJZLlmtCyKWyRq6YeT55R1dd/3chlcsFw0xeC63K4GC1fqRellyOHd0N4HzkUwr7ldLvHVYO82RAtdiaZFpzjJbYLqJfhgQKsHZBDsFzsMFE0HrCjM5VtWWLM95Pu9plhUCx2n/kaJQlJmAseM6dLhckpfFjOkaB56fj39tNHPO0Z3O1Cpht6oRPuIn6Mf58/W3CSETtuslq3U9FzPbkWEcSJVm96ohLRNM2zN1F5omRxcClTiqQlKW2WwXY0KolKKSRBHwMCPMMLgw0A8dZmxZLlKaei5mXC5X6lpT6Aacpm4ylot8lnJEgUoF51uPihJvZrZvd7uyqpZolaNFRVXW5CIl1/OLw8v+A/04ki0KrBkRaYFI8zmvmZeYKSB0QHwup0mZ89Ovf0KRC2IeUImAMBFcxDjB82HPNBmKYi6cTZMlSxNO545+kixXK+zoufUX0qxg0+zQiWC9XDKdB0Y/M20VkgKJNZ72NqLirJ3tTYJuKtIqReQD58uVaegQNmFR1TipKLOKRbpgscywMZIkCWa6sFne8/C0xAbPu3cXmnLBYJ85305MbeTLN1+iM0F7vrLdrLAuoS4XXNs9I5aIoalrlnnO1B3JqwytEnAdIUCWLbDTlRhGensiK1Z4X/B3/95/yB//6b/DDx9/QDnBcrPi+eMHnh5e8/1vf6B3hsH0LNY143BBhYgdE1xMsDIwhIRmu6KbOhIpeXpzhw0SJRJy4YnW8O7tlbLZkceRb394x2jm/9Vcl0z9jTzfcjpemMyZrMxw3tJeB+p6h84UX335Ba0ZiEGjssjDei6cVnpNby8ELzm8fORht8DaBF3DtbuSFzWX25kQHdEGnA0s6xXjeMP7ie1mi1IZ1go2j4/oKJEu4eHxidPYonRGPx0JMSGahARYLLfoVHG7XElFzsuHF8q6oj16do8KnZf8wS9/RbVJuR2OyKujlRfKquR63lNU8+drqopffvNTbucON0mKosTbgTyF9x8OpIng3/q9X9DtL1gdGXqPVjk+OpyKeCHouxt2jAQv+Nkvv8H6T/zlv3khCsPTVysGm/Du+zMqtuzuV7Rm4DZcKJOC3dMbFtWSui4J08D++UzTLJDRky0KFvWS9/sX7u8avBNkKiNKy261Zf/hxP3Tlv468OaLDfvT4W9H6/9//Of/7a9v/YWH+9cM443T8fpZpxfxwfHq8UuU1KRK0rYt1jh2ux3X24lUKV72L7x6euTpcTd74JVApB6CwJnAOI0Y7/GuZxxG0ixlu9ny8f0HBtOzXi14/nhBKmgWBXYSrNYlSipe9s8UlaZpVrx+9QVCpBjTMxn3GR+TUjU5Ra7nBuhtJK8VT3eP/Pa37/j6yzd8/fXPqZqMH797x2qzRusZv9LUJTF6ZCIRCkgikYidJo6nI1lWUpTzAZOQcnd/R39zhDDHD5p6w4ePH9jd3VE0BevVDl2mZLmk0TWeyLff/hYzTmw2W6yx6FSSZxm3q6UqZuXpFCBxCUWVcb7dyFROkSo+fvpAmeWEGJBCcTjs0VrP9htryfIUISPb7YphaHl6uqPvRu62D8TosW5ECIEuCupmSdEUmP7/b+9ediVH8jqOf8MOh+95z3Op6u6pZlghIcQzseYdeAseAIlXYTMIsWrQoJ7paarqXDJPZvpux42Fzw72U0j+5AukZGf6H+F//H+aa9WRJBnGwjj0jMYiZUxdd2zKlNXmQBRJRBAQJDHjNO+6eAfb3YFpGtjuD3RjTz9qTqdnOt0SxH5+DeVC3v50oRtH7h+PrMoVq1WBsZa6adhutxg9kWUpSZLy+nLBOzvP+/Mh49TjvAfhUGHMNA7zLL5xIstz6qZi0iPH447rtebL5wufP1/YbT6y2TxgRkfXjoQiI8Ex9l9ZbUq2xzv+8PMLwyCJVcrr8wu7wx5rNGmck2RqHo4/TTirKYt8XoyEUK437ylAMdZ5jDWM/citqeYM9VRSpCVlsUJFMaGS7xG2mjhSxHFMmsR4PM6IOfhhmLDGMU0TIHh5fqZtGw6HHc5Z4jhhXW5I4oTL7YaMFNv1CmsdWZGSxDGrVcH9/R3Hu3u890QqoCgyyrJEypBAhuzvDyDDeYJFnmKtYxhG+qEnzhO2+w1ZlvD58xdkGJAkEpVGrMqSx8cH7u7XrNcx06Q5v7ac3wZeni84LB9/cyCUAdPgGCdL1zfcrjdut57RjERRyPcfvmOcJi5VhRDMc129x3uBMZooDPE2wGqBdxHX15HXp4axB0HOfrflx08PrMuCNMlIkpQgDLnearSZQw8IAlblGjNN3G4VaRwTygApAwIxv/o01pIlMUJA1VSkWUEk5msuI4X182KgrRu6qWW73SBDiTYWaxyXy3U+hBBKqlvD7XqhHRoGrQmCCCEVzdBze7vS1e0cuhAr0jhGa4P3AWUxn063RvP6csLoebGTxBEyjCiKEqUUMgo5n888f30lThM+fPxAoDxdXVMmBUpJhm4+MeysxXlPka3IixwrLJOxaDsxaoP1kMQxZTH3EI/WoGKFs5Y4yUmyhCAQOKeJYsV6u8H6+cCqf/+IQBCGETII5mumQtqmpkgLAgS36oZKIvIiJ0ljej0x6YFRT9RNxfVyY9QTWZJS5hl6nGjfd/cmp3FC4CyMfUtdVfOJ/MkSyJBbXc/xmXp+E6fijHKV0fc94OfC1Qqcs1ijMX4ikgoVxESB53KriZMVWswpK3Xfk6kE4yy74w6pAi7nE6GQpKnEeUuAY+gdMgwR1iMjxWgcWEMeJSRZxNPzC2bSRFIgZYT3hrdbxTg50jjl7fqV3eFAfemRMkSEHiUVBCFxkrFfb+i6GmSEtgPaGHI17+qrJKNve6Ig4HSqIQg47ncU2RoZJjzcf6Is99w9fAAhaOsLZRLz65df+Lff/Qs///wTSQRWOIauY9IaFQmKrOR6rUnSlKatscahJ831ciUUEZfziSAIKdKUwHmevn4hCFIOhz319ZVBO5I0oa5rynJFrAJO5xsyEqzW5XtRrTBGEEcxaaZ4fnqiG1twgkiFhCKEMATnkUrgPex3R9IkpukHJtegJ8thvwccgx4os5yqajns90x2nOffWsfp9Mo0DbydL+RRRCwTur5mHEYCBErNkeZKRVwvZ5yzrNY5Ss3TOZy3fHj4wMcPR1arjPPlRqQcz6eaSCnGrmF1f6BcS65vb+AyIpky9prm0lCsIqwwCCHZ7/dYO9J2Ginn/vip6ni6nrDGEccpTdcQvM9Lnroe4Ry7/R1vpyfadiLNSvr2hpksWZKyKrYcjlu0s3TdSF7mbNMtb9ea33z/kaqp+fBwBAHdWCNcwKAn7vYHTpczkQTnPdMwUm4zqmvN/d0jTX/FGsv33z1yrev/H4XqP/3zP/7DD5++53Jp0XpgtdowTj0qDun7njIvwUOWJWRZSpqW/Ofv/4OyTHi4f2B33DONLSqS6AlkNJ8aL4sDTX2jbdo5eWObE6uUclvSdyNSBIjQ46ym7wxZpjCmJxAJkYLL5UIQCNI0oalH6qrl118/k+UKrS2Pj480bc/94x6Bp2pa1tmabqqJCLk7fmK9KlBS8evnPyDE/Lo4DGC7Oc4DfI3GexCBp+sqbtcaKUPCMCBLCtr+ipncPKbjPe6wql+RocLakGHQqCSmm1q09tT9DT10+N7N82a94fHhkbYbeLx/IE+Suek5KslThe47it0OoeeUKydAuABnNB8eHijyFD14oijk5emJ+tZR5iU/fvqe6+UVby2THgnDkPr2xm69xVmomytpFnK8v+fr8xnrPJObqM4tz28V6/Wayfq5xzIv6dqBIPS8PJ9oOk1SCF6fTkxOICMBQuO84PT6hgeMdUzW4DxMgyGQCT6QDL1lt8r54e6OD58e2N1vkUGIFx5rLXoyXK81m/WKu7t7LqcrgYzmVCBvyPINZZkgI8WtviGcQwBJpihXKzwOazXb7QalIvRkkTJCRgGRVBg3Ut2ekZEFPNJf2JSCbLVhGDW//HLCi4JIJoSBQEYRTd1gDdw97N/H7owEwjPpAa0t+/2Oa1XNi41Rc6sbgjAgTTKyPGO735AWGaEPsNZiBosXnlAE9E1LkRc4K4jk/HsKpSJLC6ZpwBrLZrMjyzKstdzdHZBhgNbzfYkIeHu5QSBomgHhgMAjI4GzHiE8SRzT9wNfvn4hTRVJ+p7J7SxJloIUDHpAxYpxHDm/VcRpTlKk+NAhgvkBf/9wz2pdst2t2WxX87D7YC58w1DQd/MBwu12z2q9Js4UKon4+vXMOBhkFCBVSBqXCJnMsZFxgp0Mt+p9RukwH4gQAoZ+vnenyc5FQSDJs5TjYcdf/9Vf8jd/+1v+4rdH7g4lSSQo8pgoUgzDRNN0tF2H9QKVKIIooLrWtHVNkqWkccI4jIAnS2OMtYggYFXk9F3LtbkRygjTG5q6pR8N/TCgpxGBYHvcUqxzxn5iGDTeegIhCCKBUopRG9quIU4VUZrwdrnxdq7f+98FiICiTNmWBVKE6Mkj5bxgSeKEaRzx3vPxux9IkogoCtjtdgxDz/n1lSSOOd7dkWfF3FPmNK/nJ6IgJItS+q4hy3L2+wNtUxHIkCwpMFZzrW+Met6JdoEgTmMC53DWcrlembQmS3MCMUc1d31DVV/RRs+JPnGE1nPLU5Io2q7h7XplHDWZUsSRom5vWGsQDrqupRs7AhnSvC/c3qobxk7zvZpE7DZ7inKFw8+zXI2fH+xY3qoLXgTz/zAOGUqe3l5JVEKWJ1RVRaISjNFIFTFOdj7kNk4EYcA0WcpyTd3csGaOUh5GjRkMd/sNt6rh89cLm+OOyUwEUUjXtOwOO15PL7yeXnjcH8FYAumYtMFNhvNzS6QkWaQwztBNE2WasU4LROSZJsd2s0V4R5rm3Korg9asVmussTjRY40jT9ZIGZIVCU3dsd8fmSbNfrPBmhEhIyY7kGUF9a3FOocPJFEYI5wjy7YQOby1bFdHjvuP7I8/kBcb/uvnPxLJkKGp+P1PP/Gv//478COrIsQOLafLiTzL8SLg7fxMU3UIL5EyfI+yhvPrCQhx2s/tMUUJ3tDeKuqqJgxL7u4PeN0zGo8Tbp50kRfEcYD3kjgJMUbPz5Ku53qp2O8OqHgOsrhW57kv2AxgQkatwUBWxuChyHKutzNV3YI0mGnenKnrmravsZPBE5JmCXESYYzm6fmFu4c7Vuuc/eZIdT5jJs049ux2G1QUkaQKFWcYq+fe+FVB0zXIRM6LHinBe4oi4pc//olQSc7n/+brU8uqLNhsE54uDZfXC0W+Yhh6NtsEPTXgPNp4msbjsXz4+IB3GuvmAriuK5gMRjoSNU/R6fqeQAVU15ZYSuJIcr00qBgub5YfP32cN8emkHFouFwqVBzR63Eew1UWVK83tLEoJdnudzT1BS88w9SiwgSCkI8PD/z69Vf2u4woUvTtQJQGWO14O1/ZHkqc8Yx9TdMN/P3ffRsRqsJ7/+f+DovFYrFYLBaLxf8S/Lm/wGKxWCwWi8Vi8X9ZCtXFYrFYLBaLxTdpKVQXi8VisVgsFt+kpVBdLBaLxWKxWHyTlkJ1sVgsFovFYvFNWgrVxWKxWCwWi8U36X8AQIz+svsBKM4AAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light", + "tags": [] + }, + "output_type": "display_data" + } + ], + "source": [ + "# Let's plot the result\n", + "show_result_pyplot(model, img, result, score_thr=0.3)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "7GrWIJywLV-V" + }, + "source": [ + "## Train a detector on customized dataset\n", + "\n", + "To train a new detector, there are usually three things to do:\n", + "1. Support a new dataset\n", + "2. Modify the config\n", + "3. Train a new detector\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "E73y5Lru-wBx" + }, + "source": [ + "### Support a new dataset\n", + "\n", + "There are three ways to support a new dataset in MMDetection: \n", + " 1. reorganize the dataset into COCO format.\n", + " 2. reorganize the dataset into a middle format.\n", + " 3. implement a new dataset.\n", + "\n", + "Usually we recommend to use the first two methods which are usually easier than the third.\n", + "\n", + "In this tutorial, we gives an example that converting the data into the format of existing datasets like COCO, VOC, etc. Other methods and more advanced usages can be found in the [doc](https://mmdetection.readthedocs.io/en/latest/tutorials/new_dataset.html#).\n", + "\n", + "Firstly, let's download a tiny dataset obtained from [KITTI](http://www.cvlibs.net/datasets/kitti/eval_object.php?obj_benchmark=3d). We select the first 75 images and their annotations from the 3D object detection dataset (it is the same dataset as the 2D object detection dataset but has 3D annotations). We convert the original images from PNG to JPEG format with 80% quality to reduce the size of dataset." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "rHnw5Q_nARXq", + "outputId": "a61e0685-6441-4ff2-994a-15da68e507fe" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "--2021-02-20 03:04:04-- https://download.openmmlab.com/mmdetection/data/kitti_tiny.zip\n", + "Resolving download.openmmlab.com (download.openmmlab.com)... 47.252.96.35\n", + "Connecting to download.openmmlab.com (download.openmmlab.com)|47.252.96.35|:443... connected.\n", + "HTTP request sent, awaiting response... 200 OK\n", + "Length: 6918271 (6.6M) [application/zip]\n", + "Saving to: ‘kitti_tiny.zip’\n", + "\n", + "kitti_tiny.zip 100%[===================>] 6.60M 8.44MB/s in 0.8s \n", + "\n", + "2021-02-20 03:04:06 (8.44 MB/s) - ‘kitti_tiny.zip’ saved [6918271/6918271]\n", + "\n" + ] + } + ], + "source": [ + "# download, decompress the data\n", + "!wget https://download.openmmlab.com/mmdetection/data/kitti_tiny.zip\n", + "!unzip kitti_tiny.zip > /dev/null" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "Wuwxw1oZRtVZ", + "outputId": "7f88e82a-0825-4c9e-e584-bd43589feeaf" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Reading package lists...\n", + "Building dependency tree...\n", + "Reading state information...\n", + "The following NEW packages will be installed:\n", + " tree\n", + "0 upgraded, 1 newly installed, 0 to remove and 10 not upgraded.\n", + "Need to get 40.7 kB of archives.\n", + "After this operation, 105 kB of additional disk space will be used.\n", + "Get:1 http://archive.ubuntu.com/ubuntu bionic/universe amd64 tree amd64 1.7.0-5 [40.7 kB]\n", + "Fetched 40.7 kB in 0s (165 kB/s)\n", + "Selecting previously unselected package tree.\n", + "(Reading database ... 146442 files and directories currently installed.)\n", + "Preparing to unpack .../tree_1.7.0-5_amd64.deb ...\n", + "Unpacking tree (1.7.0-5) ...\n", + "Setting up tree (1.7.0-5) ...\n", + "Processing triggers for man-db (2.8.3-2ubuntu0.1) ...\n", + "kitti_tiny\n", + "├── training\n", + "│   ├── image_2\n", + "│   │   ├── 000000.jpeg\n", + "│   │   ├── 000001.jpeg\n", + "│   │   ├── 000002.jpeg\n", + "│   │   ├── 000003.jpeg\n", + "│   │   ├── 000004.jpeg\n", + "│   │   ├── 000005.jpeg\n", + "│   │   ├── 000006.jpeg\n", + "│   │   ├── 000007.jpeg\n", + "│   │   ├── 000008.jpeg\n", + "│   │   ├── 000009.jpeg\n", + "│   │   ├── 000010.jpeg\n", + "│   │   ├── 000011.jpeg\n", + "│   │   ├── 000012.jpeg\n", + "│   │   ├── 000013.jpeg\n", + "│   │   ├── 000014.jpeg\n", + "│   │   ├── 000015.jpeg\n", + "│   │   ├── 000016.jpeg\n", + "│   │   ├── 000017.jpeg\n", + "│   │   ├── 000018.jpeg\n", + "│   │   ├── 000019.jpeg\n", + "│   │   ├── 000020.jpeg\n", + "│   │   ├── 000021.jpeg\n", + "│   │   ├── 000022.jpeg\n", + "│   │   ├── 000023.jpeg\n", + "│   │   ├── 000024.jpeg\n", + "│   │   ├── 000025.jpeg\n", + "│   │   ├── 000026.jpeg\n", + "│   │   ├── 000027.jpeg\n", + "│   │   ├── 000028.jpeg\n", + "│   │   ├── 000029.jpeg\n", + "│   │   ├── 000030.jpeg\n", + "│   │   ├── 000031.jpeg\n", + "│   │   ├── 000032.jpeg\n", + "│   │   ├── 000033.jpeg\n", + "│   │   ├── 000034.jpeg\n", + "│   │   ├── 000035.jpeg\n", + "│   │   ├── 000036.jpeg\n", + "│   │   ├── 000037.jpeg\n", + "│   │   ├── 000038.jpeg\n", + "│   │   ├── 000039.jpeg\n", + "│   │   ├── 000040.jpeg\n", + "│   │   ├── 000041.jpeg\n", + "│   │   ├── 000042.jpeg\n", + "│   │   ├── 000043.jpeg\n", + "│   │   ├── 000044.jpeg\n", + "│   │   ├── 000045.jpeg\n", + "│   │   ├── 000046.jpeg\n", + "│   │   ├── 000047.jpeg\n", + "│   │   ├── 000048.jpeg\n", + "│   │   ├── 000049.jpeg\n", + "│   │   ├── 000050.jpeg\n", + "│   │   ├── 000051.jpeg\n", + "│   │   ├── 000052.jpeg\n", + "│   │   ├── 000053.jpeg\n", + "│   │   ├── 000054.jpeg\n", + "│   │   ├── 000055.jpeg\n", + "│   │   ├── 000056.jpeg\n", + "│   │   ├── 000057.jpeg\n", + "│   │   ├── 000058.jpeg\n", + "│   │   ├── 000059.jpeg\n", + "│   │   ├── 000060.jpeg\n", + "│   │   ├── 000061.jpeg\n", + "│   │   ├── 000062.jpeg\n", + "│   │   ├── 000063.jpeg\n", + "│   │   ├── 000064.jpeg\n", + "│   │   ├── 000065.jpeg\n", + "│   │   ├── 000066.jpeg\n", + "│   │   ├── 000067.jpeg\n", + "│   │   ├── 000068.jpeg\n", + "│   │   ├── 000069.jpeg\n", + "│   │   ├── 000070.jpeg\n", + "│   │   ├── 000071.jpeg\n", + "│   │   ├── 000072.jpeg\n", + "│   │   ├── 000073.jpeg\n", + "│   │   └── 000074.jpeg\n", + "│   └── label_2\n", + "│   ├── 000000.txt\n", + "│   ├── 000001.txt\n", + "│   ├── 000002.txt\n", + "│   ├── 000003.txt\n", + "│   ├── 000004.txt\n", + "│   ├── 000005.txt\n", + "│   ├── 000006.txt\n", + "│   ├── 000007.txt\n", + "│   ├── 000008.txt\n", + "│   ├── 000009.txt\n", + "│   ├── 000010.txt\n", + "│   ├── 000011.txt\n", + "│   ├── 000012.txt\n", + "│   ├── 000013.txt\n", + "│   ├── 000014.txt\n", + "│   ├── 000015.txt\n", + "│   ├── 000016.txt\n", + "│   ├── 000017.txt\n", + "│   ├── 000018.txt\n", + "│   ├── 000019.txt\n", + "│   ├── 000020.txt\n", + "│   ├── 000021.txt\n", + "│   ├── 000022.txt\n", + "│   ├── 000023.txt\n", + "│   ├── 000024.txt\n", + "│   ├── 000025.txt\n", + "│   ├── 000026.txt\n", + "│   ├── 000027.txt\n", + "│   ├── 000028.txt\n", + "│   ├── 000029.txt\n", + "│   ├── 000030.txt\n", + "│   ├── 000031.txt\n", + "│   ├── 000032.txt\n", + "│   ├── 000033.txt\n", + "│   ├── 000034.txt\n", + "│   ├── 000035.txt\n", + "│   ├── 000036.txt\n", + "│   ├── 000037.txt\n", + "│   ├── 000038.txt\n", + "│   ├── 000039.txt\n", + "│   ├── 000040.txt\n", + "│   ├── 000041.txt\n", + "│   ├── 000042.txt\n", + "│   ├── 000043.txt\n", + "│   ├── 000044.txt\n", + "│   ├── 000045.txt\n", + "│   ├── 000046.txt\n", + "│   ├── 000047.txt\n", + "│   ├── 000048.txt\n", + "│   ├── 000049.txt\n", + "│   ├── 000050.txt\n", + "│   ├── 000051.txt\n", + "│   ├── 000052.txt\n", + "│   ├── 000053.txt\n", + "│   ├── 000054.txt\n", + "│   ├── 000055.txt\n", + "│   ├── 000056.txt\n", + "│   ├── 000057.txt\n", + "│   ├── 000058.txt\n", + "│   ├── 000059.txt\n", + "│   ├── 000060.txt\n", + "│   ├── 000061.txt\n", + "│   ├── 000062.txt\n", + "│   ├── 000063.txt\n", + "│   ├── 000064.txt\n", + "│   ├── 000065.txt\n", + "│   ├── 000066.txt\n", + "│   ├── 000067.txt\n", + "│   ├── 000068.txt\n", + "│   ├── 000069.txt\n", + "│   ├── 000070.txt\n", + "│   ├── 000071.txt\n", + "│   ├── 000072.txt\n", + "│   ├── 000073.txt\n", + "│   └── 000074.txt\n", + "├── train.txt\n", + "└── val.txt\n", + "\n", + "3 directories, 152 files\n" + ] + } + ], + "source": [ + "# Check the directory structure of the tiny data\n", + "\n", + "# Install tree first\n", + "!apt-get -q install tree\n", + "!tree kitti_tiny" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 304 + }, + "id": "YnQQqzOWzE91", + "outputId": "455b3e61-0463-4dc5-e21f-17dd204938fb" + }, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA20AAAEfCAYAAADShy4pAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy92bMtyXXe91uZWcMez3zOnadGz0ADDRAgQIIiKZkUHWGbCioomn5zOEIRkv0kv/hP8Itf/aCQ7AjLYVEkLTIsBi2SYnAEAWJGg81u9Dzde8989jlnDzXk4Ies2mff7tskGEQDDXp/Ed333r1rV2VlZWWub61vrZQQAkssscQSSyyxxBJLLLHEEkt8OKF+0A1YYoklllhiiSWWWGKJJZZY4v2xJG1LLLHEEkssscQSSyyxxBIfYixJ2xJLLLHEEkssscQSSyyxxIcYS9K2xBJLLLHEEkssscQSSyzxIcaStC2xxBJLLLHEEkssscQSS3yIsSRtSyyxxBJLLLHEEkssscQSH2J8YKRNRH5ORL4jIq+IyP/0QV1niSWWWGKJJZZYYoklllji7zLkg9inTUQ08BLwM8A7wFeAXw4h/OX3/GJLLLHEEkssscQSSyyxxBJ/h/FBRdo+A7wSQngthFABvwL8/Ad0rSWWWGKJJZZYYoklllhiib+zMB/Qea8Cby/8+x3gRxcPEJF/CvxTAG3Mp4Yrw/hFEEQApDkyPPCnzD9vjhFog4VhfuziMQuRxBCIJw8LX4V3HRq/D8HhnCV4h7SfBxBRiNagFAGJ1wwe8OAD3nlA0FqjtEaU4L0HwGiFKIUA3jsQUCIEH3DOEwjxd0rjnMM6T5IkaKUX7tPHuxN5sNkLfTPvuRDmhy3GU8P79FAI8Tcx+hrmH4YQ8N7hnIfgMaJIs4yAUFtLIKAEtNZorUmSDJ0koDSi1LyftVIX53OeqqoJIcR+knhL4SGNjI9NLu6JxXu/aGsIIAS89zhb46wDCQhgjCExCVrreC4B7wM+eKx1VFVF8G5hqMR2KhFEFEopRKR5Lg4fPN7H55XnOcYYRAQRicc240jm7ZN2CC3c2uJT4GIgizxwjxcjtnmesnCih0Euzj8/pO0/3z7f+Pt2jHh/8W4o70hSEy+hNEGn6CzjYHcPX5YoCQQB2zxDrQz9/oBur4NJNMFbXF1RlyXeB6wPZKJIs5R8MCBNOxACSkNNjS1rlKRMpyXT6RhjNFmWobWmritCCHQ7HdIsRSnV9E8cB9ZDUU6p65qqqHBlSZZqVlZWUSYjKFBaIah3zSsXY+piwMX/O+85Pzvj9OiAoigZ9nJWBz2UNu0bTxCJHq/5Aw14ArPScnI6wbnA1vomeW4wRuFCoLQBpxKKyYSBCuTdBAf4IIzPT+mnOb1ej6AE7y3T6YRpUZOmGW42pagslXWE4BAJRJ+bYJ3HNXObb+cqedfYQsWGSiBNU3rdHsOVVXrdLqiLqbE96xJ/NxHe9bf5PNSMY+88o+MzRqcn1HVJsDX9TofhoEdZV5S2onYWH+KcEdctmjlFyLOM9fUNer0+k+mENMvodrvxrZHFWSlAaN5iCfi6wtma2lqSrItJ03fNjs376R3WWkRAiUJpDaJ4z1za/KSuK5RSnJ6ecX4+xugUbQy9Xk5iBLBYH5hOZ5STMbPK40NA49EizfmB5u9KKbQxZFlOt9sjTbM4Jz3Qzoe3ZaHDASimjuPDA5yb4UKcU0UFtARSUaxvXSLJu3+j5yo+UM8KxtMx1locnjRPyUyCK2umxRSvNcPhBr1uh8a4ma9Xf/0MEAiLN+Hh/GxMOZtgvcWpQGIUqTE4G+dDgkXwiE4Yrm/RyzOkWXuUekhffSB4cLEMczvzwWPe+/TacRog1OCg9oZaFJULFLNAXZWkuaKXp9RFSVkEbEhI8wRtPMGXZKmCSjOZWmolJLlmvQepCLNScTzxkMGwJwx0fB6t9fBhmY8fZm58v57e/9/wta997TCEsPWw7z4o0vbXIoTwL4F/CbCxuRl+7r/8LwhBIKhIjNoJXgKIJwQXDb1m5PjQmMQhoHSCJxpbc4NURaMshEgWIvGweOtQwnyyCD4SrhACwQviFcHXWDtmfHpIMT5FB1BegVVknT5ZfwWbZ1SisOIIdobxJX5aMDufoSWhP1wlGwzweJSCXq9Hv5uhlBBwzGYTgq8xWlNMC8ppRa/XZ7i6gklSjkenjE7P6fYHrG9skuUdnHWIr0kSCChQgkoM3gfq2iIIGoUWhQTwwSIS5otqaBYfHwLS3j/RYKtswM8JmsVWFd57gnNYW1HMZkwmE9x0ykaScPXmHYablzidzdjdv89sek6ihTTLEJ2xeekq1+48xmB1g36vT2oUeaIjgQ2glOHsbMzhySknJydYVzMY9Oh0ujhnIyFKDNZalNZobQDB1hblBYXgJeDx+OAIweO9Q0sgAWxZMZ2es7v7Nrau8LWjk+fsbG6xublJfziMBnrwFLbm9OSEk6MDRifHWOvw3jOdTChmM7Iko9frk2c5Sim8gtPphOPjE6bTKUmScunSJW7euMXa2hrD4QpKKYJ3pNqj8CBCUAobAi4ERBRaKSQIznq8qwneNuRVo1WKUmbBIHAEHCJE8usjOWzJ5SKpBebGBUrhQkvWItEMkXvjrKUsa1SAwXCF/uYGxfkp5b23GGx1CUlKpfrIzg3WH32c//V//l84e+lFOkmNSxTHZzPeeeOAbrbGZ3/8x3n2089w+eoaxWyf0d477L76CuW0ZO+05FZvyO3bt3j8p36SmzeeQtcOs+o4DAfsvb7LQK7w7W+/zp9/84tsbq7zyK2brA6HnBwdMptNeOKpJ7h95zbD1ZVIkGxJYhJGBbz05osc7+9hRwVvf+ubTI7v8fFnP8UzP/bTDK5ukuVdUpNhtJmT9rbPHiBuErDiOZ3N+IPf/R1+69/8K1558WV+5lNP8PM/+WmylTVSbamD4HRKJhZdKzSaoB0TLM+/tce/+3++yNGh51/8s3/O5z75EdKs4HAy5s//8m3c5qPcff4FPteZ8dFPXuegqpl6xRf/5Pf4zI0n+dHPfI5Caqw955vf+ia//cdf5/qVOzy9kvCf/vQbvH10ztn5Pkli8eQgOedFzeH5mDIonE7wohAVnQhx9AiBDE9FnkEnN/zY5z7Pf//P/0ceefQOQQlBAk4iIUwlmTtK5L2WzRI/hIh+s2iUz52bAtKsuaEOFJOK3//tP+bX/93/zVvvvMT48D4ff+QWn//Ex3D1hO+8+RIjO+OkKhidT5lOa0JQVMFTlBWPfuRxfukXf4mnn/4EV65cZVaWrG5v0Ov3CSrgm6lMY5HgCEFjxaCDpTh4m7PDe9zdP2bn0U+wc+U6yXzsBcCDeOpixmx8SqebY7IE0QbIiKaMLHq4qKua57/9PNtbW7zwwnf44pf+nJ//R7/MlZuXSDqGhCmqmrL31iH/9l/977z857/HO3Q4Fs/l1YyhyhCEpJsxXF9lY2eblfUNnvrYx3nmmU+ysb4DQeN9wGjVENOmye/2QC5avBJJw2vPnfNr/+Zfc3zyHGdViesliKlY0RU3+33+yX/3L9i89fQDpPBh76MHakAH0NOavede4E+/9Wccnx4yDSVXnrrNje4qey+9xguvv0y1OuRn/rP/lk996kmUrhE0EjQiFpEq9qfXBAEvjWsoxGcQJOBRBBTKQyg9f/Q7X+DFb32J4+KA08yzvd3h1vYlTg5rXnhtD18ckakp3c0r/Oe//M/4+J3bqMrhfSBNo2M7BFB/C3ay6LxtWPYFoQgBcA1B9YSgAb3wSx//9D6uqQ0t9d7iZzXWC6fjM8zsiOHwCkduwJ+8OuHrz+0yPRFKZjzyyRU+++gl3N4Jb7xc8+K9Dt0rXVY2pty5CVeGivO/1Hzhm3u8bio+8RN3+OVnO6x5y1f+IvAbX52QP93hZz/Z4TM9T+ISrAZHJHY/sFl4YegG4nhoia3A3B5/oH3LJeNvDRF58/2++6BI213g+sK/rzWfvS8E1bwq0pA3mocvRBeUJhqureOrJXXqwlAVIbSTmlxE3oJvPXtt5KS5aAwtvc8gk0jsmhdGKUVojRhpjGAVLvxTTdQGIUaXBNrhniQJaZpGQhR8E8Vz1FXNzM+wlaXb6TEYDsiyDOvi5OG9x9Y1SmJbghK0NojyhACegGoiJUYbQkNafQjRSyiKgI+fExCJE6S0BK7tAwGjm7lNFHiDFbDWUQMqaLRJEBUnOiNCphPyvIvurdJf32Z8fsLB3j3ORieIFFSV5Wj/hOFwjWtXr3Ll6iXy9VWM0dR4ZrZCdw3b/W066z1Gh8cU4xlFcUqeZ2SdHKUkEg+grmsCkJgEFSKpVyJxwhUf+7Mucc6ilSbp5PSN4mY3o5hO2d/bYzqecHhywvlkwmAwZGNzg7WNNfqmT6+TsbmxwuloxGg04nR0itZCmhhsbZlMz5lOxnR7XTr9PqsrQ4bDIaPRKXt7e7z++mucjs64efMmjz/+ON1uL0aiPJhEo0THsSQB5aM3N/gYLTKqjchGQh3HdXQszIezasd5/C1NlA/AORdJmtYXTgvAO0fDkJsx5xeivgnGpGitmJyfc3p2gu7nZHmK72TYEFCicD5gp1Mm43PS1FCUM/qdHNe8s0qaSKqLEWmlW5oQ31HVtBGlo2PFOVywBBzUNh4fAtbPMNoBHkIbAbWISOPRd3EcO4/H4xyIVszqKYGUlbXLTOp9PvLkE7z03IxvPPc8ob/Cp1Z+gjztxSiCj/ffRk1p3usLoyhGOEUMIQg+CNb7GGEWEPGo4DBEhxHBN84hBTr6Rn2whOCwdYkPlvW1FXxw6GnJ89/4Amf5q6zplFqb+M5hCUowOiAGxGiM1rhQsbq+zc7lm+wenPD5O0/x+FNP8fYXvkzlfFwtNVjvUVmCrhKqaYlODElq8OhoozTTmxJPsJbgPJOzMfffeZO6OIMQEOsRHfB+SmI0kDxsQlzihxgX0a0HDVuIH83Kkt///T/kN3/rN3nr/quMj/b56CO3+MTTT3IyOuatu68xCxXntuDobExZeZxTlIXFJ4GPPPoov/RLv8Stm7c5H59T2Zq1jQ26vR6ouEY5F9C6ndDi2qqad04ZQ5qmJEnjWFHvjlnFSLHSCZ3+CiZLo0M3LHy9YLSHEDCJ4drVq3T7PbIs49KVy2zf2cYMMhQB7VISelzZ2eTq5qO8k/wJV9ZWYTYmUQrRKcOVVYbrq2zubPHkxz7KE08/zfbOVTqdXnTySnNPId5TSx1iNL5pTYjvX2xZa+PELkhTTWo0SUhIOjk2sgV0ksc197twmsj8v7gu4GJkK86llkQJcVr2ZFlCSDRJ2jyC+RN4cEz8dddbNJuMAmOEJFEYFRBJ0aaP1hXidWSVypMmgUTZGENqbBpZGA7fL0RBiZ/H1Vo9SnRkB1xdM56c4+oaO3HoLCfv9RkO1pnWOd/6zj7f+saLdIoEbI+ZDkhiGKwkbJgNkqnjrcMTjk/eorNiuHP9FsnknOPZmGk5orPeZXUtoBOFnWmmxTmKCf00oatBOQMWtIAoT6uo+EGhtXGFuV5j/vkS3398UKTtK8CjInKbSNb+a+C/ef/Dm+EQQusMnJOlecA6hBjSbl8waYmbmsvYAIK6IHE+RAMw0EYZPNJ6kIkELGghBPeAsdvCN7LAgGpkUy5+tnBsCGEe6o9GOPMolvcOkyZR4qgVUR1osbYhbXUdJSgeVFdhGoJSliXT6RSlhDzPSZIEaciZqOY8DRG9uG6UjYUQe8wHj9ZC62t3xOhaSwZ98Bftlzjx+lY6J42UcyF6o7SDxgBPlMHX0VOa5AMQoWdShqvrnJ8ccnK0z/n5mKPd+xSnZ7jpOSeH99na3uTS9av0hgNMapjVFbXz5IOMq51rlOcVp6NTzsdnFGVFmqakeYYxqjHeo8EujWSxlQr6Rg6mlI6kvCHNWhRohdIpV5Ocyfics9GI0dmYWVFR1RU+WFbX1kiNQasOWZayurrCwf4B9+7dIzWGoiiw1lJVNdPZBBsc+WCAMQnD4ZAkSTg6OmI6nfKd73yHuq65efMmKyur+BBwQWHSaEQ37gWEgLMe52uMKIyOBEead0AEQvOMWvnSonRVoRb+fYGWgIQQiY5ztvGWXkSuQ4iLgaBIswRCl6qynIyO6ecZaWJASyPZCZyenvDqV/+cYjbGJIairnGqiVYRvbDSnJuGGDnv8RCjwToOMFEKvMMHG0lE8BiJ8iNnp5jEo7REH42o5rUPkZCKQjVjQLRgTEZQhjRPSNM+u4f3mU0mrPe6XL7+CN/+9jd59bWXePKTP8LWdjLvl0haA1qbef+1RC4EGseMgaAJXnBesD6gjG5krz6+K2Kj0UFASQDVLLISMEZQCupyhjaCctAxglRnvPziG1xb3SKsPo73Ub6ENoDHBY8NoNMM6oT+cJPHnnyGP/7DL4BJeepjH+PLL7zM6eQYV48RCdjgkEZOasoa6yw6JE1EVqNV4zAgGmh1XaDxvPnGa/zBf/odLl+6xHB9DQkerXzjIIuR4HYcxXGzXKJ/2BEjCgsO0hCl+XXt+JM/+jN+9Vd/hbtvv8Xx8T4fu3mFJ29d5/TkgIP9e0yqGXUinM8qitJRlAFvFd4rbt++yc/+w5/l1u3b1LXn2tXrUfXSSO9aR6JR6r0Sq8YibGXlupEgtt/FCGFrNiqUSVAmAaI0U6nGGYbngnRcXGV9YwPRQlVXHBwe8db9XR7p3SRVLs49QaE7glpd47A+R9uEnbxHXxLyrUv019a5cv0qjz/1BM88+wlW19Yb2ZqOjlQPzgeMuqA9DV26aIVEB6sQ4ztz6bwIidYoLWQqxacZIoEERdodoJP0Pe/doqri3d0oAQQX5YjBRYVHcAiepCFVIXgSozFJIE6xF0RN5mb5AuHkge584HqtaWaUoJWKCqbgAY3oHqIMiaQEUYgKJMZhpJyTth8c4liRQDNGY5StriomkzEheIqioN/rM9jcQCWR6NqR8PVvn/LN14/Y2VzjM1tXef7VGefjY6x2iKrIcZiioC5H0J2wur7FSseQVBlGTbG+YHNrk831BBGFC8K0rAhMyHSHhB540wygqNJpbbgfGFqCLu/6Owtke8nkvm/4QEhbCMGKyP8A/A5xnvrfQgjP/1W/EdEX6WbzPCKItM3F6BuLOuToCW8nv/kk4D3eOdpg3VwGRWsqX3gbW7HI+99IQ44WAmdVXSHWgkkacuRQ3qPidDk/rwuBhIC1NdYqfEgJQSiriqKYMpvNKIsZdV0hCBMzJYSYC+ca8rQyHDIcDpAmuiKicN42M7SKoekQCZqzLsrtaAhOY4xKI4dpIyyxrwUVZJ6HEPvNRsFAQ3pdY3x773AEgkRJQQiCKwvqqiKgSNIeNYFOmhPqKWtaGPT7lMWE89GYk6MRxydHnJydcH//PvcPdtnY2WJ1c4PuYIDJUjQWPPS6PQb9AdPplNPTEePpmGI6I80STGIiGQ4elJ4/X8KF/FNrgzEa7x22toCgkxylA1nWpdPr0+n0OB0dU0wn7B8eMDo9YWtrk52dHTq9GN1Ls5zNrS36/T4nJyccHR5g64q6rjk/O2c6mzEra7rdLv3BgPWVFQa9PsdHR0wmU1575RXu373LRz/6MS5fuYbJMpQyMQ+uyUeSNlqoHsyNaqXBsph7SZgHhdvxrJTCOTd/ngvv3gMSySCN/CP4JpLafKch+BidNEZQOqEUiYQozSAVplWFU5a6trz62mtMJyNWVgfU9TRKPFtnio/OERWicyRGnZkT6QAxN04MGkFUJDxKKTKdIdrg65rMNDKVEKOIJjEoHQlJS0JFBKM1qAQnAuKxdeD+/SN233qZRy9vY7I+/dUNTKLBSVwATRuF9ijRiGra3RD/1tkhQUfpbRBciIuqDdFIQ6I3W6mAgsaoiqQVBVoLWZ6gdfRvhFATVADrSbXhydvXuHcw5mzvLvtHmzinEZ2TpBq8UFpwKsrD894qZlyTdqE7XKMGKlvx+JNPcD4+YXRUgig6SSSvHqHKK85nJaEoiOabihFOpQnaIFqhVIJScHY24Vd//dcoq4qPPv00/X5OwHLt5m0u3XiUJMub6dS/K2dniR9GzMVjwlzR4h2UU8uXv/Q1/v2v/hr333qD85NDbl3e4PGbVzk/2mfvcI+QCOdVwdmk4qyoKGuwVsDH6NhTT3+MG7dvc3B8zPbWJXauXEJEk+TZPE1BHuIUnTeMaPxprdHGRPk3D4pt4gfNe9h+K4oL339LkxaiEgGkiYLdunWLF59/iTe+8BxD69i8tU7Z6ZKmBm/h2E5YXevR21hnY+UW2hr05W2efvYTPP7EY1y+eoU0ywDmDuJ5uxWAm9NF3fYvi41vVuIQvxEEk0CWa5JUYbzg0gSdGPqJYri6iUny7/rhqqYteA/eIsGTaCE3BpMI/X6H1ZU+WgLGKNKkRs+J7kIz30UQ5H3+tfh5ItCIkhDvCF7w0iGgEK/AOTCexDhSNUFhEUloc4zlvRf6YNEGBxr1RlkWFGVBmqbYqqLX67G2uoYoAz7B4vECZRF46e6Ywc42P/3sGrcq4Y3XRpR2Qm+4waCXs+HhKLEYcayvZVy9tEpHa7KQgBOSTsL2VpftfoIOseMmRYXWNZ0MtJKLkJa6sE8/LFxobpU8GAZf4vuIDyynLYTw28Bvf7fHiyh0492PHrRACG3xANVECKQhdAuTcmtwNVZtaBJ2fBPVUI0EIzTSBa00cxGDj97+IJHUBB/JTGwQC5EthQTFXJrWvFi+NVaJUr15tK81XpsoWFEWUUYhMCumqGYh8z6QZx16vR4gTGZTbG3pDfoMhwPyTpdOJ2tyy2LBDudbiWhss9ZxiWglX74hkBeE9YKktsStPbbtO+9d9L2K4JtoonMW56LuPFrgjUQM0FpFCZfzWO8Rk1K7ijzr0O1muHJCWeSsrm6wurXF7u4uZ+dnnE0nzN4pOT47xbzxBpcu73D9xnWGgxWU6mFMJOWDfpdOZjifdpgVBWU5oywLANI0bcZLjGSKRHlhZR3Ou+j9DCBao0SRGI2rLd45VJLRX11jMBwyGh1xsLdLVZUcHZ8wnkxYXVtldX2Nbien0+3Egg29HsNhn+PjI46PjxmI0HEwnRUUxYyqqsiynH6/z9bWFll2zt7eLqPRCS+++ALOCx957Ak63T6T6Zi6qgBPmiTR3AghSkBD1NArI/MoR2vRxPHejvMLMtca04vvQBs5ap+vKIk5jrTywDg2nbXNb2MEVhOND+/rxgRSKJMSPZI1o6M99u69w9XeICYf+ABBUCgIHvHRk64JceEJoSlEI9GREqJjBhflrEpHb6IrJRKjeoLSDhfcXFoUaD3wF+9WNEoUtavBGBKl6HX6GJ3yzr37pHiublynO1wh7xiMSlAoyrKIEesmetdKL9vIMwJKaWzTtzoxoA0WjRODFYXoJIr6pel78YRGguy8A00jqdV4H2VAyjtAYcTw5J1HOD31fOPseV588RWe/cynkO1NKlujMARSPAbro5RZmS6OApXkiFacjU65c+cWb77+EuVkhEhG3h2gTULtPYNOh0lZ4VzAuhDlzVVNWVvOq5K01yXJuoTgURgOT075td/49/zH3/1dbt68QbeT88yzz/Lzv3iFtTRbGHvv7+Ff4ocHDsE15EEB1nq+/JWv86v/17/l1Rdf4Hj3Ltd2Nnn69g3Ojg8Y7e9Riccq4XQ2ZeYDs7LGuQSjM9a3LmGShE9/+kdZXV2nKCrybhedJpHgtNF4iUVD/F9l6TVreZIk7+MkaNfl1ohtbYN3k8FFFUwzfgVu3LzOz/79f8DXfucr/Pb/+ZusPrXGY598hkfvPIMPCWflGZ/5/CfZvvk0a70nePuVXdafvM7n/t7n6HRzCO36GBU4c5kngJ8RpGB0cESWd+kMVlGSoMQ080PrMm6oW5NGkSZCp5OR54baCb6Tk3RTNgY9rt+4TdbtfdfPVhEiMZaAiG/m9Sa6p4XBoE+x0o88N3i0qgELpPPIX3um+UL/UCwUyQogDlId701JQHuHSEKQHBssuCjXDN7hXYXUYyQ0x4QPdj5pnZzRsd/eYjQMXVVRldU8tSDNcvJOh+5gOHdGNh45lFZMS8f93YJRUfHER65wbUchr82oixF5TzEcZnSNRllPNS6YVQVXh+tc2RjQEYXxGYGETq/H1kqfvhYSL9Q1UbFkBKMCidF4D6KhDA4lgfQD7aV399lfHcRoIW3OZjsU3s39l/jA8AMrRPJetC63+JL5pkDIPDGDqEF2/sEiInHwXMgwQojRh0ixpJnjW99ikwcVYp5XnGikWUyay0tj9NMYvdJ6Oxp5oTEYYwhao1wUSkhznagO8wRxeGIhDaXj9ay1WO8wxqC1wtoZBOh0uqytreN94PT0FNtMInmekRjTkNCmypJ3sdiG0lhrmc1m6CYSocSAitW/fDPRRMPUR6LaEDpr7dxQDd6jtCYxOubOqVh9riWhMRTehng8wdV4W2FVgmiFtRXUFSbNSJIM8RVFVaHQpJ0BXgKdLOX6oE9RFIxOTzgfnbG/f0ivk1NNZpzc3+fK5ctsXbrJYDVWtlMqygX7nZyVfo/aWiazaeyfssJrSLNYtSu46GE1oqjxsZCnVvOKnIkxMQ+wITaio+dvc+sSg8GQ0fExJ6MTjkdnlNZycnrO6sqQzc118ixDKc3q6jqdTof+YMDR4SHnkyk6MUwmE2bTgqoqKcuCPO/S7XbY2NjAOc/Z6SkvvPACs7Lk1u3bDFeGmMTgrJ0Xg9HKUNW2kXo2Epc5IZ2PygcWuChvvChE0pLxxeqVc5ms8zgcah7BUw3n8hdShyYa2zoelI4RJiUGCYqUQCinnBzuMrCWtNvBOkh00hRFaaqHKsEo1eQaxnbG/ErmUUHlY4EcrzTaabw32GCwrkaUwzqLC428snGQzO8PLsatKEKowXkSJXSznDzvMaksZAlJr48yoIiFAmJl1hid1LqVSl4Q3ZYYqsasNdogJsFJghVDZ7BG6QOpyajqGT4BvEd8jKq7YAlJlHgXsxm2KrG2xlUVxhi6nVVuXbtOOYXTd0442rvPvXsn9LsruMSTJR2CpNggpNh7HtcAACAASURBVEpwFkRSTNJDmQxtDAfH+3SGm1y+tMPx3l1cLZgQyLRipd9hZ2MdbQxKJ9QuYCtLUVQUteW1owMORmdYL5gkw2tD5WacjmeUNtA/KzDZGsqsLDgNWBK1vzMQaoTd0YivfOWrKC8Up1P+39/4D7z90kvMjo+4trXJxx97FG0tByfHTOqSSsG4rhlXNbPK4kOU3P7Ip3+U2488zhtvvclnPvdZ0ixhOpuxtrZBkqegGuHd3FoOD6hRFprVBM2iysQ01ZOb6fA9xz5YOTh+KIT3HAcxyiYhEBXImjuP3WElrPD6Wy/zrTe+ym/86/+Dzc0bTMl58/Vv8RO/8FH08DrPPP5TDDfeIrvUIevlhNBIvAONFDoqPry3BAnMzvd59fkv8dxz3+bTn/0JHv/EZ6IjS6Rx9i406mLSJUmFvGPIOxmTwjJYXeXOk4/xicce4+pg/YHKke369T6P9qJDGuWANI4lJRIrJ+cxiiciaK3QugIshIYSSKM6aNv7sM5f6PXWpgo+kDbqB6MUxvsoPU9yUCVKHJmOuemagLIFgpsrRz5oxOEX+87ZGusqfJPbmyQpWZ6jjIl9tkhW2wVZRfuunAUOT2esbQ/Z3pSooqos59NzkrWM1Z6mg4DXOG8IWc7W2gpbnZTUg68V00qhJGEtzRgqhXFQ1FCWnkQCXa1JVbRHg5qLOD/4TmqwOL4ekMVLXMdbG3rJzH6w+JCQtoBzNa1xOi8UIGHBUxKHb1OqIEbJGmMuyilaiUTrFVZR5jSPOizIGtqoHBdGiTSedkHNJxOldVwSmnC6Uqop4x/zx2JumaAu5uEoG2sMQWtrUh1z2pIkQYWYszOdTiiKAqU0WRZz1oqixDbRj1bXH4KjmJU452IxkzxWn6xtzWxWMp1MyJMMIwqd6iYn5yIC05Iwz0XOn4jMCV9wjizNUHkWP28KPrR3YxKNuEggbOWj9IGAV0JRl5TjEd28SydRaKPB14gPKJ2CNigDSZriipI8ydju9llfr1hb2+Bwb4+jgyNmp2eU4wmHByOG61tsbG6wvbVN3umQqJhb1836bK6tM15ZZX//gMOTU8bjMYN+n16niwA1Ho3C6KRJLoqEXxqZnszHiCI0pNVkXdY2DZ3egLPTEScnh8ymFVVlGU8mDHs9tne26HY6ceFLMrqdHkfHx5yPJ/PnNJvOKIqCoiio6z6dTpfLly81ZabPeO65b3JwuMejjz3K5vYWK6uriA9UZRGjpy7M88PaBaaNCLU+iRi5lcbD/GC+0QNv0mKeW2jyOVvS10aOm5POiZq6KC08H7s+zGVN/U7G7etXeOfFFzk9OWE9S9EmYzKeEUIg0Rpva4KzMa+hJY6impy9ZksBr0jFQIj5W/iE4Ax1MNTeAzZKfX0kb+14ja+7x7tYiAQTMDpuuyDWYYKlm2vyvEfayTF5Rt7rkyqQIGiBpNNBBMqyjo6P5tl5315HzautimpInDZ40UyqQNpbRSea8uweSdLFScD6EnGe4BUuWLzT+BAoijLKQl1sc+0DHk2eJty+cpnjp5/k5No1di5dZ6pSkDJWEfXRGWGSQFk7QlBonYIorLMcjw45v7fL1e0dnvnoRzk/m3F0dIJONCv9LqLVnLQJsZiKiEZMQrbaQ7/2JgdH52gjaJORJilpbkjTjNppbMjY3L5Otzd4wEhs8wqX+GGHcHo+5Vd+5de599Y7dLTm5P593HjMpdUhz3z0KZLg2d+9z9TWTAmclQWzEMviR7kb/PRP/RQ//wu/yKR2dNb6bF++hFKB1aBI0rTZdqLF4rYnsQ0PaRbKmKaQEvN86wV+0/5zHmlb/CxaBnrhGjx4TSUxupMqtp7YZO32GjeevsZj3/gGL7/4Cr/3pS/Q2YTty5cI6zcYm8ClR2+iuk3V3zbw0l7T2sbBVPGdF/6Cv3zui7z24lcRMfyoALZGkvyBiFT7l9A4gaW5b2M0SaLJUDz22GN88jM/xlZvQN7mqTc//xvlljbzfLstUaw7HLckCY1zTSgAyzxYKYs5gQuNfiAaJg/+LRCjdviYc4zCANoYgjYxjz5YEqNilc8gaGfjGvPX38X3BCHEKskhVNGJ5m1DYtOmyJnM7yM0zoLFew04CML5cc3u4THbt6+xMQRqOD0tOJ6W5Jd6DBLBBKCE89JSp4aN1SErWjAWphbOa0eSp2x2E3pKoRxYF5hNJwxSw2avRw4oiVFQhW/ykQ0PfW++5311McbmCgvABh+3qgrzOOxSefEDxIeCtAWi9z16eRarCsWJN+BjDgpRjiWiLhTsrQSQdvJuJ8YYMfPeN8UpGo9BbVE6ErK4X1jcL21RcjkP+rUSwkZGKY2e3i9sLeB9JGkKuUiibqIVFyeLUUMRoapqZtMC5xyDXp8sy6it5Xwy5nwyptPpxGIPRRH3DmuidN570iylOxjgBGazGYKQGIN3cV8pRNBpSprl1K5mPD6nLGcx/J8kdPIO3U4nFkApymYPOEVZVvjGgyiAa7ZGsNZS1zXW1lRlibMVIbhY4rkuOHz7dcLBIVtXrrGyMqTbycmSHBs0SsUKXcF78qxL8FGuZUwHo3M2NnY4Oz7g+GCPWVUyO7jP0ckB+7sD9jc3uXTpClvbO2g6VNbibU1uEq5s77C2vsnB0RHT8ZjR7ITEGLI0i5EfnWC9ixGiVBP1B/7iGQsoSSKZV5osyTBph053wOrqKocHB0wmE6aTKZPxjOmsYGN9jdXVIb1enyzL6XS6jCcT9vf243NXmqqqKIuKyfgcW9d0uz0219dZWVlh//CQ3ft3OT074er16zz2+ONsbW3RHwwYj8fUzqJFkTTFMB70UDP/Mzou9EW0OPh5CfvFypGLZE4p0xQAuSCG0SCP7o+5YiQElGm3i2gK9aCItQQdt69d5uyjT/GXX/k2+3v79IdrsUw0QnCu2RfPzh0pF3vheUx0WCJBMI0FFCufaoQMS0LlY8QsShgvomvtu7Yo+VQiaLGIs6jKY3xJpgNaGZAEKxZJE1wlUQJjLRL0fCuHoohSyTy/yBm5WLBipE10NDSCStg7PuNr336RH/+Jz1PPTqlsgVMeTR3b2P7nLiRZdVVzPp5QVTWdjsajyDLNxqDDIzevMUVx6eoNXpkc4nAEX1KUU7RRaFyzNYmh2+mjVUKWp2xurPHam9/i5pUdbt+8Qe0Ck7JiNDomSxO2t7dJs5yqtkzGMyaTgrpyoIRPPvUIlzfW+Oo3XqCwwuUbN+kMe0gilJXj/KxG6Q6ra1uk6YdiWVjiewwNlKcV48NzJicjahWoyjGp8Tz98SdQibB3/4DTcsZJWXJelRTBxYqGQdHNcz77qc/wT/7xP2br6hWS4ZBrj97EJLHaqFZtwa4mp1MuMs4kXNCr9yAw3z/SmLjX58OElO0eifMftWeMbK49aCGE4xd+7Ill38Fkhp0b19ncucInP3vGcOcKf/yH/wF/MOPlt1/gylMr3Lh8nSyN+ygaFs4ZArau2Nt9ky9/6Qt887mvs3v/TUJdYJKcaSmIdMBH4tK2SR6wBQLtbqNJqtFGMF64fuMGg94qdXDkoltq912hrWMdyaCKhZ+MijaVinOZSjN0kuKdxboJMdLWnsFdRNkW+vc9TyG0z7PpE+8wBIwYlBhSrcjzFJ0aHBYlsSiKq6PjVz9QoaXJLWtVPR8AB4g5hwpRSSz0Jh6RmPsf2rEjQqxO/iBBDQS8KKhheqSYWcXtyymrWnATzxuHM45KYS3p0BdFGiBYz7lzqJUul7dTekJMAwgwU55uP2MliZE6EahtQFGz2euwrjVdKgwjcFOMpCBDkOz7Et16WNEbL2DnDv+AJpLTJV/7weHDsTo3OWBxHKjGIIU56ZG2VHlMrIcLQnXhD2i9bu301UYmFi/TGH9NpaP22q08N0SWdxGtaHXNDfmTIEgbJQjtshKJnxc/z79p97KwrkbZ2DLVFBgoZpGMJUlCr9dDa01RFJRl2cgic3wITKZTCJ40Tel0OsxmM8bnY3xDzKy16CDYOhKqsqhJshQdAtZ6Sls2UrDYX5PJhGJWxIiFtfPonfOesiw5n81IsoQ8y6jrirIo8M7irMU1pM2WxbzinSSK3dfv8ubuASvrm1y7cYOrV65y6fJ1usMMX4MRyBrSELQ0xRkCtUrA12xducrqxhrj0xEnR7ucjY6pZhNsXTAenXLvnXe4ceM2OztXUCYluChbSxAube9QDAaMjk6YTaYUtiDrdBrPaCurCyQiTV5BQ+ZF4ubowTbFWgJKolFslJClOaenI0ajY6qq5PxswmQ8YTbbYHsrGrRpmrGiNVmasr6+xu7uHgcHB+RZJMTT6ZTR6IR+r0+n22Fna53JbMbJ2Rmvv/4qZ+Nzbt+6zUceeYTaOcqqitX/YCG/SuaE5aJapp7LE9p911qp63zTcHggKhILgGh8E6NuScV8q4wQ940LIW4OH8TjrMIyz4VGgqebJDx25w5prfj2y69wcnrGWn+Dbt4hVHER987i6hrn7PwaEB0rSiJpk2aD9rivohB8Qu01tQsYomQ05lS6B3L0vPMLhDN6cMWXaJeQKE9qBBUU2iSgY56g1DFX0HmLxLo0iAjT6ZR+v09ZRsdFC60VWsVtEpQGlZhYJAXNG+/c59rdfR69cpnR8S6CQ4UqHifS7C0YJafOBWazgqPDI958801uP3IDrw258nQ6hsuXtzgNiiCevYP7HI3vUk/O2Nj0WFehkyYXsMnxydKUbp5x5/YNXnjpZfCO/b37jKYFVoSNzTVcXbF7uMudO4+wurlKlnYQDKPTc46ODnHVhJvbq2TPPkPhE3Zu3KSz0mNczri3e0A5O0CCoSptO5HOISILZuZ7zWYWDn+YB325vn+wWHwi8tAvmlWyhHsv3yWxiq5OydJAttLj+pUdVrdWuf/m25wXY87KGaOqZGIt3giVdQzyHp969kf4hf/qH7G2MsTbml6/y/rVraZacqQlrlF7hL+BiirmfntMkrC2tk7a6TwYUFtw4Lbkp13r21lqTglbbjW/eR+9KVrinzIjVApRayhj6G+t8/f+4c+xe/w2L796l+cPZ7y1d07vJz/PtVuP00bwQsNW6umEL33hD/nTL/wRz7/wDYwRzs7HlGXg8pUN0s4mmD5t4bQwJ2xtCf6FaKAIRiuM0mgDZ+MzCl8xUDl1WUFwmCydy93bvnpYhKPlHvHEClGaQNyjM/4XcE2BKCHgXUUkag/273f5xLiIUMW5PTGRFBknJInGpAZtFEbHed/T7C0qSVyjH+yKvxXCu/6cd0MbIVUS+0Sa6rjNPnPNgkK7xZQ09kF7soDEWGQF4xPIOn02hgk5MDmreHVvwsTnXEp75GiUJ6p0bAm9nGG/KRbnY/S4VJa0o8kEDI4QDCaBO49cZqXb5fIwJauO8Wdv4KoRyfAqknUgiev0/O1ugxThwX9/V29c2+nzQ98dSb3Y9zWEwKwsObUlAVjp9RGJEVTdrO1/k0v/cOD9BuXfxIHS/uLdqoB3n+Nhbpn3iL3fgw8HaWsQ2sktxMRlUY1xSXyxovQwlkBt9yQLIk3RkQC+7YQLiZiIv3gxG6MwhJioHw9oZBVNnltMEm433AZBNbp4wbvQVF1sCpM0EUEXHDKPXEQj0zlHrqNe3NpoCFXOU8wKQoAkSdFJgvW+KbZRkiUpiUmwtcU7T5IkkXgkhk6nh/dgqxrvPLYomZUVhY6eyX5/SK/Xo/aeWTGjdpZev0eWx724RqMRs9mMqq7xzlHZGjdxFNMZ1jmyXjfmOtUWX1h0gDzvYIOjKGfUrm68VnE1dt6hEoXHs7u/y8nxCa+/9BobWztcu3mHS9dusrGyAlowiSHJckQprLfxeTQy08Qk9JOU4doK5fiMo8Mjjo6OOT45pdftMxqdsrnxDjs7l9jZ2SHvdDE6xSMMul3Wh6tMphP29w+YlQXBKdJOHvMBvUPpWE1SNwtoQHC1a7y6Cd4FvHUEEUzWQbRhI81YW9/g9PSE48MDprMZe/uHnJ6eMxj02NraIOvk5N0uaZaTphnDfp+D/QOKIpZxLqsqVgitSrIsZWUwIO/kHI9O2L/3DuPRCWcnR9y8eYvBYBj3z3GxYIf3YR6lbCPFWgtKfJMbHQjB0hYmcd4hVqG0arajaDfQDfOoGd4RfENG0M3my/FdESD4aGSo1pPqo0wwhEBwHuNjJbInHvsIq1ubfOu5F3jnzXv0syG9YR/ny0gArQcnMQrXSiSJ73LQNRaLkIDT8V0LU7SvmLlAkAQLWOew3s/fe2/B2QChzVV0UTjkNUF5El3QTS1GKfAJvo5E0RMo7Qxra8TFSpqn4zNef+M1rly6xMb6erTlEFzwZCpDh4Ailr9XGpTydIYbrF36CH/x3AtsDn+E3somk9P7cb5p2hRckycbojIgzRLcrOSVl1+hO0zZ2NmmbPZfHGz0kKAopiX33nqVL3zpD9gKwqUrT1M7yJOMLHOE8wpchUhJbVKuX7vN9uUb0BngR2dMzkeUCGenZ2ijAMtbu/dJOkO2eh1uXLnK5Rs32L78OJOjIw4OTlDJAJUP2bh6jdWtTd7eu8f+0TFCRVWe8OYbL3I8ukKWJ4BCSdyjMUnSC0fXPIIhCzbfxfKzqKhqoyMPUrslvmcIF3/E97nZfypEQiMhEprgPS/8xUt8+Yt/Rr+TM+tkXLm8wdbGgPW1Pod7+4REUSo4txWTsmJaV1ijUabDnSee4vN//yfJBh3EKLJughigWZ9jgKvdHzQ2KEZ92vapOeWCNorUFOdQGpV1UHlKVwzBaFxzrodDaN0Iiyb7g/ZoACy2jtJjTUYxmXG49xreGa7dGqKSaKxnq33S7R1evf9t7u1N+dTVLtXJKeamR0ssJtZUUuJsdMx//K3f4o03X6aqC85txWg8AUm5cfMRdi5fI4RmuxIig70o0hAu8pS8R1wV89TR+HrK3Tff4tEnnmRvdJeD1+/xyCOPs769tRDkawnbu8y6cLGlS1DgtUFEk2iFwYCYeQVEhTTKB00kL+25FitxLjy4dw21NsD2IEkUtBFEaWprcKJQ4tDao4whoQ8kJLqP6G587i1R/B6FbFpKP+efi+2V+Tcg0fGOxK14VNNzF3NWoCYWHdaAD0IVYFKWDDqGoRZMDbu757x5MGHmA52OIc1i381czdRbcmPIdbRNxYOfWvJqxtpqSj+xaEkRFVhbDXz22VUy7VGz+0y//kUmr3wD3xU2P/VZkmtDhBxCJ7ZOLtb1xbuMUcMwv8f41cPM//Cu79TFx40z2Nqaophx7/47vHX3LndHJwzX1/jEM59ge22T0BA2s3j6uX74h3WODw/5e9tXf11qQLiY54LM341AdHi3zs25ArD5lW5shYWNzN7TkofhQ0HafAiU1pIo3VS6a27NRVIWJFa98t6Tqihf8j4gSsWy/42EkRBzmOJ+SR6h2YS2mTwRFYt1hGgMhmb2EcBo3WxkGEuyexdQSjchaktbyjw4BU5FYzJALR6v4rWCrXGupq4tM6VJOynaJBilCd5TTAusdeTdLr1+H2lIG9qglY6kTZsofRSF9+Csw7kSbRICGu0hEaGoa+piBkmCzjK0jpGO4D1YSydLSZOk2QssNIYpFFVFVZTMxhMS0XTSjMFwgDdRsxxmJaqs475iKlb2c0lcSAOgRZOYmAvkCdTeEyShrAK+HjM9G3O8e5f9t1/myrUbbGxdYm1tnU6vi0kSRBmMEXzQUbZhorwQm9FN++jeGt31c85OjhmdHHO6e5f9w/vsH7zD/v4229s7bG5dJ+/+f+y995MlWXbf97km7TPlXbvp7tnx67ALgICAJUUsAMFGKEL6EwWJgoBQUASChEhYAgsQJLGLHdvT3pd/9fzLzGv0w735qnrczmKXoQG4N6amqsvky5d5895zzvd7vt8eKEnVzMnKjGs3r7KoawbDIaPxGGNrkkSDysK1tBaBINUJVliMD6IlUipEqrA+qGYqnRJ/wOraBmXZYTA44fT4mNl0zqKqWNQN61sbrK6skGSaVdWnSBMyrTg+PmFR1aG3S8nYHzcnNYZ+v8ulnS3G4wmj0Yh7t28xPhtw8+bLXL58lazI0UmCMQ3ONnhiI3n0b8MbWu0uT4NUQagmSOAH8ZAQBUWbB++QviHxAA4pfNykgo2GtRYhoz+g9AgX5rjxoX9RCA9CoqSiqwtE11HXNTc6JRv9Pt8rv8/9e4+YVQbLBrPphKZeBaswDRgLVihqmWKlw6o5tWqQZHibkiSepj5jNbU8sYo67eCUwvnQVxmooBpQWBuFI6PwjPQpUhfYZICfn1EmFXkqSVVJITo4VWE7ioWdRQSsVXv0vHfrHc4Gh/zsN76JBPKySy0AnSAFaG/IlEcrR+IW1C5F9V6iEI/4zn/9gH/+C29RZhJXBWXKxjVoKck8dLOEpAg0nKS2uMbywYfv82UN62vhdYSEUoExFWvKkc8co0YwnzYo2QkIpJiihKNIHElaMU+6rKqcrLfLNCnJ5HMy6ZkvDNXMM28MQoyp8g5mnpIsKjreUJYpIrtCZ32HYmWPO0+eYXVOJRzpSg95loH0aF1TLZ7xJ3/8u9x99p9YXVsjSzoUeY9Le1f5H37+F+j3epGKHotaSJyQqI9sai1dVOBR/mJq9+ML0n4yPmFz9x4vgk+XxyOcwrsE7yT37z7lX/3u7/H9D9+m3y1ZXe/xla++SbdIefTgHouq4eRsyGg+gyQBr9BSszCelbVdrn7pTUSv4MxOuX7pdda3N5jWnqaRZBkgWpvmj55SVG4WUXDWA0TmACxpk6Td8IjGY+iPzRPBcg5d+E5Y50xA9X3ouTPGMBkdobRlPhmTl326/Q7S99jceRmts1Bw1AbpXDDylppRI5D5Nj/zC7/G9Utd8iT4wflYPBbOY5oG39RIA6nImdceJVLSUuPcnKaegTMgdLAsajOCFplAxOTWoe0YJT2NSxC2ws0XPL/9Pvv33qWZplzau4IQOy+820+6+y3C6IQDDU6naCVJAWE8zisWTYWUUKQpzjZgCyCBJQE0XRar22v9SWTWkASJc6KREIE+pywq01j6qKxEC4ukxosU4Xtoq+mmm+h8C4QONgniBwXDP8wIFgvSx5CP80QscCdCHJgioJmDaHBJgqQLLljA4AMdw+KZI8hRWARzCZUY00PTNaGo+uRoQoNGScNqT5MlCoNj5B2V82zqlAwwyiMT0IuKV3sJ23spZbnAoVHeoJshnelj5oN7LN57m+SD26xPKuqtHuLmNlzbIiTXezhktH6yKPT5rcKBkDH8J6KEPqKgAgg2WIGZFtdgEeHwyCZrGsNweMbp6QmPHz7g6OiA2XxMYywLJN7BgwePgw5Ap4sgeg4u88R/CpCbv/DBhc/t2vNpf3Pxc/zKtzOy7ZmQ4BUWYlOFIMcjnQne0g6E1MHu6wfskV+IpG0+n3Pv9h3WVtfod7rkaR7U26QMSJqI5tBRPEGo6NWkNE1jox9ZCMAD7m7PudIQr6J4sQQce8xaMYYINxBk/cNjjg+BbzCDDPQpIc/l00MFJ/yetQ2uafA29L5572maiizT1DFRwnvyMqfslCRJio1cZ6k0KklpoqSycYLZdIKQkrIo8B4W80WgU+SaytYYHJ1+F6kU80WFG53R7a0gpAw/b0CZZOnXVtU1TZQ5dxFlUkJinWW+mJMkBZJAz3LOkCUZXilsHXqCTGNxNgRhQTwrSPhaa7AIJBoTzZzrasp4PODho4esrW9z5eo1tnd22NjepNdfJclStI73F9BphpMK6T1pktMpe2xubDGbjNh/9oyTkyNOz8aMx1OOTwas7p+ws3uF3b0dsqIk9CooiqIgy3M6nQ7jyZj5fM64mZDoJPQ1+tZTLFQcW1PulkfhncMSRCiUTBCJJs1Ssjyl1+syPDvj7GzA9OCI8XzOZG3C5toK3bIgSRJ2d3fp91c4Oj5BDQZM5nO0tujG0DQ1o9GQLEvpdTuURc5gcMbzZ8+YTebM5zV7Vy6zvrGG0jokbl6QKB1Pz0VRlfAhlaIxBlBoJdvCepjukcLrcQgnsC6I/LR9ne1CvRQleWEEERGiOiVCgBRBWr8sSZKEpmnYWF3l53/uZ9lYW+fv/u4dHjy4T5Zn5J2ctbVeVHkMC5mL9BDnLS4iAV60dNWAnBsKHHnowUNEn8BQnGmtHAQhIFOEar61Hq/AexMSssi5UdG/yRJ6TYk00JZKapqGv/nr77C7vsGNG68AkqTbpVXcbIs5LfHKesFgNGd7dZVH9+5y7/5DXtkJbADrPEImwaKkWiClYHt7k/1HY8bDM+aLTaoR3L97j/SVV+l0c5QOQVu3LLl59So3r73EvTuPefL4MXVd0VHB+9B7g/SWRHlQmiTNyJKURkqUgE6n5Nor1ymzNUbzmsHJfd5+ts/DJ/tsb/e4vNIjTTNG4ymn1ZS1tU3WtzYYVg3zesbjJw+p6wU6kVgbhAlGpwPe/ZtjQJCokl53jRs3XuWnX/06Iu0i9HkAioxqevjzNVYQFVBbtTG7nFcf9YD6yfjRx3JJiHWJ0PetEMZjZRD2OH5ywO/+X7/PX/2Xv2YwOeHN118l62QsmorT0yNOzkYMBkPGswWV9dSNRWUpdt5Qdrq8+dbX+ObP/DzXb2wim5qiu4LIUlLhqC4y7D7HebZL0AsJiI979zIv8ed/8bEDi4/96yL6ZBrL44cPefTgFm99+RU6/T5Z0UcoRVoq0AFhCihkmJvWOYRQPH+yT775Bt31dfJOAiLEAReTl7ZHKtFBpEhri/Kestvl+HTIaDxh99K5FcvF998mQuHJcCjvQq+v0CQ6IVEZTV2Hgp3IQqxxHld/jhF7t1shsljut5FVlKUZvW6PuZiiVbRzac2lRRuYnqPonzjEhbsTMwQvIU0UmZOIOgSVWjqEN3ghKHurXLn8JV796uusb18Cof6bxPYffljX+AAAIABJREFUD61bGa5I83fBnobBPubkLou1Dp2db4DotikfGEmqLFo0KBv8YOsFuBoqIajxHJzUyLzL1csFTWXYUn26NjARKhOUwXtZQ5I4vG5ohKS87Hjz+jWSdc9CDUn8HHV2jH/vXfyd77E4+BDRjNCzEIvZhUEMniEWT/CFBZHj6UdkxgD6woP30STbf+y5kEqE/TgCHNYqrDOMJ2POhkccHDzh4cP7jMYT6oUhTUqKrIfMJEIYvPVMp9Ogf9BpX+Wf2vikQtHnGRcSNheKyuEptyDcEoBCKJTQCBHSLtt4BKHtZzGrOTo95cPb9zg8PPrMV/tCJG3WNDx++Ijj5wd0yw7dbpciLynKkrIs0VkWEziPU+eJq1IKqQTOxAdTyhh8E6sQ8lMh4rCetX105/5VIuj2gwgSsc4FtSMZ/ay0BiVD0CkhBLdtw68gCiWE6rJpKiYTGwNAuexh88JTR/EGJyFRmrzsMRmPmQ5HKCGpG4MxBu8IiJH3FEUBUlBbG5CxVKN0gnCGSb1ANTlpntPgGI3OmM6noboiJV4EpSoI8HeWJXSKgmZRM5qMKLQl1RrnDbWpMFOHdhnWBNESUzV4E/uKcFhTUdcLmmaB9QZJMA9XeJSWNN5RTyfM5xVHR4d0Oh2uXL3KlWtX2b10mW6vT1rkIEAJHaT5CT16HkmS5PRXNJ1On529y5weH3N8fMT+8Rmj6YLD40P2D7a5fPkKO7uX6K+sIlQIotf6K/R6XSbTKaPxmMV8zmJRo6RGpGopGCNEq7AFoHBe4p3hvMcyJA1eSrr9PmW3Q94pOT46YTpdsJjuc3R4xO7WBrvbm2RJQl4WrG2s0e11OR4MwusvKmZzS1U1ceGr6HZ77Ozs0usuGA4nvPP2uxyfnnL5ymWuXbtKlmbM6xmLpiLVSVD8EkEcpK0baq0iHC9AOKTUMT+JMiJCxmTnggrZ0mQ9Piuw9BvEh41etJVx5+Kmp17w+AvegJ5Op+SrX/0aWmd897vf571336aqp7zx1uvkhUYpAvrdbipChHhfOYTyCC0QTiOSDsgaTxKSZXFhAxI+ChOEZ7JtJrcuJICNsCyqOoRWUgazexnM4b3xYCGROvhEOUeqNVcuXeb2e+/x13/1HVZ766xvaUgakiTDqrDFKylRnKvJPn72jH4lWN3cZDqrqBaaLJGkZcl0tghrQwZJbdBKILxhMh5wePicnc4Ox4cDeuU+V69dJk0VDY5EJ7x880vMf77BVN/hcP85p8cHrG5cQ8W+Wy09mQ5ritaSVAeRB6lCYjs8G2I6GisS0jxnc3ObuwdPGI1nLBaWIl+hzjMePrzNaDYn6fR4enyCT3J60YuxWsyZjMZ4UwehlsaiVUqSJui8JOt00WUGWuBkDIGEDNnZkrsVbxcXEjki26Gdb/+Yi7BfwLEsbrf5jRcIr0F4HA7hJQf7h/xv/+fv8P/+6b9nthizd2mHb33rF/nzP/1jbt+9S6pC3+NoPGM8ngOSNOvRNGO8lmzt7fDaG2/wpS+9yo2XtpFNQ94JiGuaCnSLmH3aOcbPEk9r3xNi/fYnF/ps27fj4t6sPm+SHzsuBehUsb23S7ebsbG3EX07ZbhGMpxRS1iSXod/OUev7PHazVc59CVWeyyfXGLwPlRzlNZImaCSoJjsRIet3eusbe6EBMhHImQMqts1MJAe4g1z4Tp45zHWoZKURWWoY+IcWjfaG/xiEvgJl2CZ47bKkW1Ar5QOSokqIU1zrDAhpggH/cxDhiN8fIgLX0il0EqQCE+uBYWWZEqwstJj86e+xhs3Xmbvyi69jT5euU842o8+lonZR07YE6ad8iFp82cjZv/5PzN78JfIr9xErN7E5znBkDxFOBHUMFUNLg97pnHMpWOA43unjtt3D0lMQeNynJGsdnISIXA1jEcNWZ6xupbjhMCiUNKhVw1r/gxbzfHmAHN0n+a928jvv4c8PWRVz0HVeFeDaBBujqiPoHmCy+cgu4QJo+NcimZuLzx97TyJPAfvggeviPMwUvVm8ylP95/z8OFjnjx9xtnZGVmW0e10EKJAp56sKEizFIfHWYF1nrppgHN7+39ay7n4lK8/6d8Xh1/+2EPQECBKvPnoYUtEOkQogjsPjXHUkzEnz5/x/PkBH354h/dv3eHWnXtMxtPPPNMvRNKGB4xhXhuq6YzTkxO0TsjSjCzP6XZ69FdX6K/0yHolWuvQXGvtEva+uPgEHn+LrH0c8lzyTeMFdzbAx0Gzw2GtAWuAYPItCZVjb4n4TFAnDIlh9LuKLxHWS4XSCmMbvHBkaYaSwRDY4XGNQEYYVChJ42wwFE9SdAymEyHRiQuiGQRvFSFCVVBGFKQ2BkfYQDKpqI2BpkFoRZqnmLoO/VFRClwrjfDB/Ni5oA4ZyuKwmM+wqUZaH5Qnx3P8NCR7SilSpamExFsT+tqw4cPZKFkP3gcDVetDcC6lAG+p5lMW8ymz6ZgnTx6xvbPD7t4lrlx7iZXVFcqiRPkUoYOxqncqqG9KTZJK8rxHt7fO1t41BqcnnB495fjokNHojMHZKc+fP2dza5udnUtsbu/EJEaxurpK0e3Q1A1HB4cMT4c4YymKgHQGJElGTzAXXlOopQm1jabQOs1jtaRhbWOLXn+dk+NTjg72mU4qnplDRsMhW5sb9HtdiqIIlgV5TtntMBwOUaNAQ6zrYO0wm80o8g6dTo8sKzkdDHn27Dmng1POBmdcvnyZbtkhSxMEkqY2WAFpkqB18B7TSoXZGPve2nkul8IlIGMC57wPi/bF4FqeS/u2CLJwvCAAEvzNotJq/J7WGpQI52Qtr7/xGp1elw8//BDvGo6P9tna2VjSM9ptpU3apASnHD6YuyFUgRMN3kq8k0tT2PaRDo3zLX1VRbpvqPB6IWmsR6qEJE2RKjTCYwR1E3AHZ4lJqSNVCZsbG1y9dJn79+/xnb/8K/75v/w2KzrD5WFxlUKRqkBVxhN89LRmOB2zs3WNsiuQskZqRX99C6umzBfjWPTR4CypFqyulMxmY06OFDurlzk6OKVTdNja3kCpcF4ba5t8+a23qCvDs6cHZElI+IRUKAWZcnR08L9DQK4FMtWYLCPNUhZ1hfMTKisYDU+oREKnu4r2c+oGRuOKlbVNnjw/YP7gMbvXXuL+4yeknRW+VJQURUGv06Xb7TE6PUHKhCwrQWhmxtJXms76OqqT4bRodXCX9yf0C7Uz6pOX9mWg/pPxYx6hHcBHpEQ4EQSBlAAlGByc8vu/9/v8yXf+gtlixGq/w2/8T7+CIBQv9vf3wUK1qPBWIETGm6+9Rd00vH/7PZoK+qtrrK2skyYFvW6XhDZZDEqMcrmvfoaj1DKu8fhIDoIgRuFcsPFQOgnn4T1ZXsR97/OO2JkkQj5UlB063TK2O/glstO23Sx7zFxr7yNRaG5ee5n5cIpMgnhREjb+F9+KCwGxSjRCJegkQXmNUD0uX3uF/tomxrkA6NH2c8oLbJ+YDPqghK0QKCHJspw0zZnOKxon0VmBuiCS9APHRy5+a/djI9XfC0nVNDSmQedqKVAWznIZEH3i+OiPXvg1KdBpQpYnqIUhlZ5UeNa6Xa594xJb/Q2KJIvgvCM0QsqPHuVHGoLze3sxfXHxZgtBVGMR2JMJ5u5j8gcPkD2FePMYka+FeK/xoKOYGR4vPcIJjIOh89w5HvD33znF14avX3+V2eKMLG9IOhU+tahGsret+KUr18kvl+R4lBGIyQnc/y7V6TPM9Aw33sdPnpCfDZGLCT5b0PgZWgmUq8DOEN0eYm2OF09xLJBcQtAJdE40F+PZ0CmpYhHNhtahdl8HGtMwHJ5xcnzM8fExB4fPeX74lMZZpEjZ2tkjz7pMJnOSLCcVofe9cnVQLhfJUshGKx2us3/xDn5Wgv+FHJ8KE37KPvaxb/tP+DrMGxNBIyUkWoY1w1vHZDph/+iYx0+f8fjJU+6+/11uv/f3nJ4OGIzG1I3FxALOZ40vTNImWml+Ed6krWsq62iqmmo2p1oEw1qltuiv5CgpMM7SelmF6l1s0BQXKlu+Terc+YtxnrBBkDX1Ini7eecwTY1rgry9EATqU2v2iw39SpE5JlzbAC1w1sZEUuBNjXcWa5ulnLHSCVqniEUVKJE6jdVEEYRRfDAA9sYuF8o2gDYmyqpjcCI8mHiBS7PoLxdgb+MrrHdR0UljbRRsaQxNY2JiGRqQZ7Np+LcKNMm6qs83fS8DPc36oCpobHhohUCnaViok4Q0UYG5WweqnkdgYv+Y9hJr64D6aEVdzVksZpydnvL08SMePXzApUuXuXz5Misra3R7K+RZQaIThAzKVx6Bl56k6IIuyMsu29t9To/2OT4+5tmTJxwfHjMcjjDGUpQlRacb6TYClYRg/kp6hV63y9HzA87OBuR5TlF0SLIMKUJfZW1MQDikjGbmEhl9w6QIoil1XYMxbG7tsbGxxcnRPk+fPuRseMZkFrzjdnd2orhIxkayTqfTod/vMxyeMZlMmM/nWOtYLCrq2tEpe2xtblLMp5wNh3x460P2nz3n+vXrXNrbo8hztFTBNwxJY0FIRWVcpO1G83RrISLCLyj9SR90uPHLhN+2XUfL5vYQRITYR7BErkWYgyr21TVNE5MvQV5kCNHQNJbr169y+fIuzlvqeoGxFYmS1Jw/OzFvChVADNaD8R7rNdZmWOupK0MaBQLCoxtRTyWC2IYI5+WEoPYWocLXoqVXCY9SYeFz1uIjpTwgU4HunCYJN2/cpJnOuXPrQ/r9Nf7Zt/4FeadLliZIqUlVQioV0gdKZm0t2WpJVnZQ2mBcg6sNjVf0NneZH1iqeoYFsjQB2/DyjWt0d0s+uH+XfrJOIjRPHz9nfW2NIisCOicl/W6PN159le3NLbqdDLzFG0fTLPCmIsGRqAQvBZmCTpYwLTIYB7EgQYK3NiStToNokCKl11tnURnM6ZjT4YwHjx+jO2sMxwvWsxVm0wrvJErl7O5eZXNtG6JPY2Mcz58fkTjBatkjFTpSuVjOLE9AZNsekvb7Lb7mUXghL2r7/WT8OIeA5dV2mrZOKZzn5GjE7/0/f8Af/oc/4uRon9WNPr/5W79BKuD+7dsI48lUxngywdWh0NlJC1KZkZclWdFB1mN2L11ib2+PPM1RsecsgKcCRKBAi/PqyicGQ+f3PcwO8Ji65vTkjMFgQLfTY3NjO1DwnSNJsiBE9rnythd7r2SMZ4MioPoIXOZxy5VPEGg7gsl4wnS6YKPXo+sFqLBOftQTLrxcWD91ohFJiqoqhJMU5Spp3ouBgccJ24rKxxFLG54YAYooZNR+BAXG2tY4Ea2NZOvHeaGA9UlCJC+8w/b32r8XiGgfYKMCoZSx+/CHlG9sk6LlX4XgCJ0kJGnoUyuShBQotOTS7g7K6+CtKQRIFTthz4/34xqflEC0gikSEYrsViBqTzITJMOUZt/AaIjondEcDzFTSPeuIfsFjoyFgtSGwqd0K0wGDYPZMV995RKFmbOePuXltyxFfo/GOFTSZe+yxWNwTYY6rGExg4MP8Lf+EjncJ6stLBZoMQFX42hYJDVGW7y14GtEV6Ff6yPe6DLvTZEkKBaheEnojTqf2CEmvZjCITzGBurjYDDg0aNHPHpwn7Ozs6AanqYkeYdEaYoip7uSU9UzKjdCCIVzClcLPAqlSoytsMJhjYkAhvgEddf/HsdHEzePijZJCIetFwwGx5wc7vP86VNu37nD2+++x4NHjzk8OmS+mGBshXOe2liSNKOxQYPis8YXImkTAhKplkGiFC2FTQUjRJ2RSAXO09Q2BKdRBlcrcY4e+DiZvFwuLi2qFib5eaDaBqftMuQJSSA2JC+L6RgtPGmiwTkaa0NPjjUs7Qm8wHkROOPRRLexoZ+ushahw5HrKiaXKggrKKVjpUvGvq7gB9ImC01dh4XcBqRCSdmK1IYFwTWEgp9C6xqlE2Sk/CklcN5inWNZ4PN+2V8kRehjci4kVq0tgHcG42zwaxOhAumiapaMSaf1HgMURReRpWidUBR5UM+0LqI6wUPPWIejJiEmQHVAD4Kao2E8HDIejdl/+pSH9zbZu3yFl67dZHNzi6LTIcvz4C2jNIH6JkBYvFckusPVl15ifWOT/WcHDAZnIUB3nrqpSUzDeDZEpwlJnqGSBCUEW+vrdLOc4dmQo+NjDg/36fR6lN0OSkkSqYK5uFKhZ8FZnHVoEeh5xnmQiiRTeBPmzPbOLkUn5+joOePRkMHgGZPJjI31DTY3N8mLnDT1rK2tUZYFg8GAcaRMNrVlsWiYziZkaUanLCmLguFoxPDsjNu3bnF2esq1a9fY2tpCer28l4kMj67zHhF9kUQ0o/dxj8S3ML1vO8nQSgUE0UbELD4LUrZ+hCyTsranzTgb545Aa720FBCCoHqJo6kNeZ5QFD3m8ymjkQnJfqQa413sPw2JnxMOmcpYYFEkSRlkqL0MCajzMSZogycfk71gUB/QUEtj7FIUyPpA27WuxkX0uq7Cuc/nM1DhmGvrqzx9AFevXOX2B3f53nf/no2dy3x9ZQ1XRjRaROqXszSmZmFqnCpA65gYJszNlPfu3GH7yssUq5uMZyOUSpHeoYRjpdflZ3/uZ2i04PndQ7wN5/7k8ROuXbtCp1viPRRFzt6lHXorPfJUh95YocM9szVFpMGiFKkSZMKySDQOyLMMfIqyTVBATXMQFUla0F9dZ+/SVR6PRjRWkWZ9rFMImSFlRpZ20Tqj29Vk1wq0kJRlRlNXHBwfczqaMGtqptUcg6MJdaKoghXDbx8+JyKusAIa70JC7hxZrOiny4U4Fs1+IkjyYxlLpknsyWmMZz5e8Id/+O/4gz/8Aw6PDtjb2uCXf/3blFnKrbffY3A6ZDGaYKYLlCUkTevbXL50BSUVT548pW48K6sbfPuXvs3OxgZFliwVCgMwE6r6IQHRy8LM0pjXh+Lr0n7Et4VTh7OG4eCUuqrZ2tyiu7JKmuZkbTIj5A81P4S4EAPEavfyz5cwkUMKG1EJG3psbQESRmcjRsMxt56fMO0nYCp8JpYqkOdIYXitvMhJkgSZZzCdUnRK9i5d5pVXXwlncDGz+aRsIn7tXSjEemfxLqgBd/o9qkmK8AlJkoWCkzx/7U+fCOcn2RiL1glCSJI0wwFpljOynnndUBSB0nghOPqMi/viebfBerisgZlAojBAVpSI0ZRESPplJyiJSpZUTb884EdP/ce/JnzsbQWJcWxjGNQJplpHztbZbipSO2B+sM9sVLGxUSD8Dk4U1AhS4egngi/v9Dh6NOL1zV2+dbPEDt5B7x2gM4HdL1HlDURnBcwpp7f+jubkkLXTBoZHJM1D5OIhWs5AdvFFjqkbtHXIxpKroKptUlis5qTXV9FvbDDtNFTC0GMNgcb5IBjjvAzotBR41+BFhRcJdWOYTEYcHOzz6NEjDg+PmM3mAb3OMja3tiN7yGOco9dfYWNzjcHZgKPjI4QIDCchQiHU2rCvSpXgXY2OcetFRHM5xCdP+S/qWKrNn1dDwvcvFCCXQ4Se/DDdA736ot6jjc+u857RpOHZ82OePn7Mk0cPeef73+Xh/Xucnp4wn8+ZLhbUTRAMTFFIUYR103uaWsS4/rOv4BckaRPkeXZuTC0lUrZJW0Kig1dRmiRhDYl+UkFwJMrH+payE0rBrVxwq6Lj25qL8LE/J1QkfKQ4CqHBm9AP5AXeOhpnlokfkcroYuXfeY8XEufjh4WmcQHQUCHFaupABfE60s0aA94gfFCrTFSCs47aNZCFAHrZl2d9TNZEFNGI66cN/i0ChRAaIz1CmCBfj4vvBYR0GG9DZSsiFm3QtHS388FE2BpL4hUi0TTCs7ALIFT+hHVBVdAHTrNRnsp4yk6OQyFlQllkyExiG0dTNzTGhOq6B+MMwoXXk8ovVcGEUHhrmYwm1FXN0dEJ9+7cY3Nrm+s3b3L5ylU63S5ZHtAwH9U8QSJUhkDS6WkuX80oyx5aabIsRQrJoqq4d/8eJ4MTXrp2lct7l+iUJUpIOkVOlmrKTs7JyYDBeEQzrMjynKIoSJMkICu1JU1TkiTFWRcFV2xAo7xHJhKvGqyDstNlS+7R768wGY2YTcY8e3bAeDRjfWOdbq9Dnqd0Ol3yPGdtbY3j42AhIKXCGMd8McUvPFmes7bSp1PkDE5PefToIaenp1y5coWXrr3E1tYWaZIGNFO0dNxgwbCMU5Z9a4RnQWu0kEvvM+dd8CRTMiZVwV8wFDTaHjdi3184qo3qU+3XSiuUEqAgQyFwWNtQVw5ragQ+9mGpSI0JiZg1YAw4aclSQaolMvZ1kim8E/how4C1gUoZ7TRazRjngziJ0hKh0kDLdQKtE2rb4KQJFEspsF7SGIuMaKHWCVIGlc9MKDrdLsPxnOFwjHfBbiBRAiVAx0KJtQ0yVZBKahfsCKz3yDRhPl7wd+9+wDe/8TV6a5sMqymJkmRa4U3DxsYmX/+pn2J28jc8339Op1dy/+FDjDW8/PINdBIoVkWvR1YWCK1w1sQSk0BgCWmxwAlBmkhKJRkpjcqC7YQSJU42GFNQyQykIM1T0jxjdW2dqihJ0hJjBmR5l7VVRZ6XZGlBUZQYbWh0TpkXbG6vczYb8HRyykw7Kr/gwekzPnj2mP5qH+tC8aZFAYqsYLPXRwixdLSaVzVHg9MYfys2VldRWYb+gRHiT8Y/aHiPFcFHtJrP+cM/+iN+/1//Ho/v3+XGl17iV37zlzGLGd/9T3/LbDhjOplzNhhijWNne48vf/mrXL92nW//8rdZWenz53/yH6n/4N9w+8mHrK+s8rW3XiHLVFQ4a4uhIiYzn35P2znSBsyosH7U1YKiLFld30IlKbS6k6Klzf2wYZ9Y/slSgMy3eUxMaP0CRGgHkLaiqSWoPHiJZhk6zbj34TPEWkpiaoRLlv3OoUAbjh/806DodJg2hpX1dW68+lV++7f/V17+0mV4AUkS5+d34VP4uhU3E0GN2UqKsqDT7zId5FiTxIS33S9/iGsBKB2LuFqHoqXOsAQqPSjUBS/PGD597rE08ZCRDuYdKiswk4Sm9qx0VlnprraGDqFcGNFC1SqrfH51lc99VhffD1xAJT2gJVjP2XzGO+MFE9th+GDM1+/d5ad2eyzGA+ZnU1x1hHbrSFeSJg6pa3Qn47XrikJukqlTNtP3OKn/A9V8H8kOa9s/Rz/fAt3B+xFF0UO757jhiHQ2RLgznJ/j9YIGhTECqUB5gbASmgqRSbKrG4gbfeSNVdgrkdKTIpFo8Cn4DEQaevSi6N5sNmY0OeXodMjTp884OT5mOBrRNDVl2WF1dRWldfD/dZ7JdIL0nrXVDlvb6wwGI44OB+AKnJdYI5eFUanBOYMxEpkkJGkWkDbvL8xtv/z/i5I9/wiGFOfIzkcqKm0hIQwRxQpbrYP4OwSQYDQacXR0yLOnz3jv/du8884tHj18wHBwymQyxsdWpFB0DvMxaHgHNlk4l/hJn7esfNr4wiRtiVaBKhipTlKGhSVk96EpXwiBEuq8aVleeCgJZWDhW4GJYLLcrg8Xq7zxVcGHIFUGVROUj9z3rMDXNYvZOBhkx6A2eMa1ktYimlYKLJLaeMbzBdW8wvoq7FPRlynLsiVNU0sdpFJ18EtJkwyZFYhS4bxjPl8EL7a2lzied0ATXLBEEBIhNFJohEjCBwIlwbgG7wNC5H2o4JlonByWULGckEJKTGWRqNBsWhlqLXBJCBNdZRHWkulwP5wwVNYxnNb41LCoHcZ6TGNJhCDTCZlOqI1h0SywTiCJ1E5rMdZgTBA5UFpEtU+JNR7vK4ZDw3gyYv/gGbt7l7h0+Qp7ly6zvr6OVIokzUISoCXOhV40IQPNtFX2RAAKJvMJDx7eZzo45undu1y5fJkrV66wurKKlopsbYVer2RtssrZeMRoNGF4VpHnOXlRkCZpaBB3JqI6QeVQJwqpBTiPs+CNQ5GQuow8z+j3Vzk7HTAdjphOF4wnT9ja3mRjY41utyDPC5RSZFlGWQZ60GJekySK6XTCYj7B24a8KNjYWKcsC0ajCXfu3GY4POPmzZe5/tINirKLaRoas4j0xSggEhPsttnd4nC+IUkicujOTeDbEvKS8hbRZxmFa1pEWkoZVBujJ2C4FiEpkkIEOwjvEI1D4lGifTRdTN5NfAGFdxpBgpbgTI3EkKicRAUFJUVQEfWurW75iJoZjAs9dD4eW8sEK4HYmyJlQMY8Jji7y/BsNs6RSYmNQkOLumL/8IDN3jobG5ukRRVESLxbRmhKCvJEk2rJ2t42X/mpL2MOP0QkwXMuTRK8dmzu7fL01hPeuXWXX3zrJunpQdi6bEM9X6Ck4srVK3ztG1/hz//0z3j6/AlXrlzi6OSYTq8kzTO6aQqJQgpHY2p0ltHMah49fUxXVGghkCql9qHClyrwUmKBRbUAJ5jNTfB0zJLgiRcFQKQWlJ2SPM9wzpJnKf2VVbxSSAU6USitcd6xMA3HZwOmzZCFqZAJoDz/9Xt/y7OjZ5TdHrUxyEST5wXOWb7+la/ym7/0bbKiDPPEW2azIf/xL/6UB4+fIITiZ77503zrZ3+WlbKgNW39b1Fd/+9ttLU4EMHCpLH8yZ/8Mf/63/w+z/YfcfnSFr/6K/+S/YNn3HnvfarRnGpcM5nMUCpldX2NX/21X+MXv/UtLl+6RKffQTjPr/7WL3Pn5ITDf3vM6PSUIpHkuUSo1sih3Uni/5eBz/nn0N8pl+uQ957FaIqXNVnZiQVadf634rzK3Y7POzWWfyd8RPXDoibbi4TDiwbXjGmqBdXwlFmTUO6t0FUJTV0zqyvm3tOra5jPSFZ6L55LPFaWpmRZxnQxo9df4X/89V/nK1//JhurmzH2i32GPqw/sVx5nkgKoohZQB6l8GGft54iz+iWJWmSUFn5D8o9xohHAAAgAElEQVRpggVCYIFYH3odpdLBNinSWtt1/AfRI9uw6aOCLBKx3B+cs6gkIe+scG2lz/VXUq7eeJk0LYO2QAuetjRQWruAi9T8H23ER4BW+bKdk9ILrGifE48QnmxjjY1vfgN37QaT2QC9vsbg+IB5PedkNKA43mdn7xWkgNKHdhIyx8qu563tBYgBonpAf7vB2xzyHN1r8OoOtVCI7ozyzRJ/6Rr+0jZi0MMdzVkMgt2TcAJnHVI1zDBkhYKsQF5dRb1xGbFbQF+ANBTe4pbCIwLpW69Dh7EVzx8/5u/f/i88fPqA+QIQkjJP6a+sUBRFVEsORbbZvKJpGoz1dLodtjbXOdzfZ//gBHyC1hnWBwst4S2tlUYAKQSucdSNCboJsCzKtyn8P6zY8v/fWK5b8AIaH96Bi3548ftOEIQGfWyfCvYIjx8/5MNbH/LBrQ+49eEtnj99xmQyCa1CMfEwVYX37gKLkIhKCqQMBXOx9KRun6t/BD1tgkC7utifIpVa8rJb9EzQCizIZW+Opw1SLxwt0tmC8IKLi4uPPxMExO1crMEuZf0FSjhkVqCdQ3rPYhoEBogmpSHXC8cI3QQS42A6rxhNFtRVhbEe60A6HQRI6iAkorVEK8jzlJVOl9Vuj5euXOG1r77K1TeuUTWG9959jwf3H1LXgQIpXBQeXyafMYBtPJCQJgVpVrCytkpR5BhXUzcL5vMxZjGlmk2YTiZU80XglgN5npPlGePZlMYaFtOa3JV0VzdINnpMXc3x2VGARJqas+GA+WKGiwjE3cZSHJ9xNpziXKBzeoIZsxaKRClElmOcoTE10ktEpPOF/jyHtR4pg+KhUlFgxRkSEiaTIXduj3j08D6bm5u8dP06W1tbrK2v0+v3w0KZpEid4GxNmpdkSYJOAsoglaAoc/I85fTwgOHBAcOjQw6fPmHv0iUuXblKb6WPEJY806zpPnmWMZ9WWGOp5zW2DvRBrTVSKJxwtKqGYZ+xIARpnqFs7K90jlRJNta3WO9vMBlNODo55GwwYjKesLa+wvb2RjQ8V2xubsSkbMzp6QlCltR1TVVVTCYNaZLR7wdhk+FwyP7RAePplMFwzEvXbrKxuU6n26OpF9T1jCRRsbgR5qiINCOHj3YMgVqstV6ibt57EqXx+CCGI9QyaXPOYY1bbtA+VkeDmmP8+/h6UokgouNaPzii4bTFYWISIZEiJVUliAbcAoTF1QbpNJkKXk9aqVhJDEEBBJqniwhXoGgG5K4xQY5YCo11FusdxhuEUjgpMd6DkshUh3uoPEmeohNNbQxl0adAk2RpVF0LyJq3DUp4hDcYU/Hml99g/gwmC0Njg2LqwlTIYpXLL93g9p3b3L4nyVKNbRps3USBI48uNG9++XVmszHf+c53OD07QSWSJ8+eUnQK0iIj90lQqo1qn95K7t5/wNnjW7xy5WXWLUzmFU1do4tu6G2QKpixW8dsPmM2GSHSMhptW6QOSVniBUWm0Rqaes765hpZt4uXlspMg3diHvtRzQI5ndEzHn96hsoycut4/t4H0SJDsrW9jeyvUosa9eUvsVIqtJuHddx5erlAuDknh08wxnG/3+Pnvv5VfJH/JEn7MQ8nQjFCOfiLv/0Ov/N//+/cufUeuzs7/Pb/8ttIKTh++ITB4Izx2RS3CEJZG9tbfOObP82v/8+/xaUrO7ja4rHIVKC1IM9LXn/ldW5euUoqgny7cA4v86WPkBCKi41nF++tkHIZDHnrWIzGHJ8esr67jlRJ/AG09hEvpA8/0hRpX5RlTOBtTTU95vjgLicnp7izU0Sxyd72q3TShF63pNvv89Y3vsH1lZTNsvOp6JO1lm6nQ+0ayvUN3njzTTbWV1DUEdlTCC6IbVxIKH17UEkw1Y4F1uDlZJG4oFqr1LKQdvFxOS90fMLbjllla2kSKvugZEKis9A+oTVaJ8t9bXlyP+h6fwSIOGdVCmSasLm7y8rmJmW/g0yilcBHDx0LbRgHUuE5Z3H86GtC9CXFo+I1F/G7SzwoFvH61y9zXUnc/Qcw7yPVgjvv36OZCE4XM8pFw7Z0CNGAmYFK8XKGl/t4ewchnoB8gO6McX6OFfex8hAXXPEwucc5hd4u8Os7yCpHDXZJDjOS0RFyZHFTh/NnUIDcW0dudWG7jygVKEMQeBMIF/ZzVIPA4EUTfiYM1aLi3v0PefLkMVqnrK9voBNFqoN2ghQKY11oU3EuqpGHoq0fL7g/PcI0liLtsaimVM0AlehgBq5ynFU4lyBkitKA8nS7Pcq8WOpXywvz4h8lh+JjfM520YgetQSWjzWwmBsO9/c5PDzk4cMHvPvuOzx6HGx6zs4GTKbTUMCWGoeG1mdZBoV027ZHSbUUjcu0QYkmtF1FYMBFJtRnjS9G0iaCmAIqPshKhkUGcR4ot6CkDwuT0hopghHiR6u3eB/gXaVwXkS/J5arsFvCuyF7liJMQ+ktwimUTJBphssLqvkMb0OwGpo8VRQ9CPCmFwFtmlc1TWMRSqOVRFqBq8CYMAkSHShgSmkSlZBIRSYVG70+L1+7yutvvY4D6vmUxWzKfFZHzyodKpaROuIBYw117RAkpGlJUXa5cuUK/dU+HsOimjAeneGrKdPBKSeHR4xHIyAo2uR5TlbkDEZnVE2DFikd2+fVN77CN37pF7C55N/++3/H/rPHlFrz+OF97j+6x3A25vR0xOxsCvIYp8PkDApNAtMYrDdByEjHzSfNsSqaNpsAE6tE411UI7Sh9yhJZaAgOrPctCpvePpkysnxPiurq2xvb/PS9RsUGzt0V9bp5gXCQ9MEWwUT1R4TpVnf3OTmyzeZ7e8zODpiNhnxZD7l9PSY5/vP2LtyhfWtTbJuBzBMpyNmw4pur0+Z5RhjWMznIUFUGi31sgrirEX40KvlXKCYZnmGNw5TNyiVkKYpeRpM1IejAWdnp+zvHzCdjtncWmdlpY8gVFfLMqfTKTg6OmQ2m6O1wphg+TCfm4C6bW2QTjLG4wm3br3P2WDC9RsvcfPGNcoiwfuEQBeO9F8vlghxW9iQSkbUyi6DKgFtj3xIktoFo0Xa8Ch1vkw4Z1EqzknC+3cRwWoTKSH8Ui1MLOu14J3EWQFORmkvi07BNjNODoecDmqapoKiXIqreO8xtsFdEB2iXRO8AxtN0iPFKvRzWrRSgboTEz3rLF6GKpnznqLbJfEJOk1C76hSJGkwVhfC452hXsxpmorHTx7yZ3/xZ/yz13YDuojC+gbrLQ8fPsSmW3T7q5wMRuxulUgCI6BpDHVjSFWOzj1vfvkNzkZnvPPOOwgVEv2Hjx+TlyV7eR6a+rUEG4oZSM3t23cofMr1X0zw0pEkKUmSEPSBwrqkdSvpHW3TnQlX3lsaU2GlAgx5JmmaKffu38IqSZYXqDTFuFBkSpI0eEeZBbPpFDScjQZBqTRJmS8qvJAMhwmNqbGy5v23/5a/6CWslx0SqfHeczQY8OG732N0vE+SFpzsP2U4OGWt3w/04uW6/5FgrV2+W9DmU/aLT9xrP+UPPuNHP3B8NBBpWQ8fP+AnHd3/gJ//w87kRR3OsIvVjeXtt/+e/+Nf/Q63P3yX1c0+v/Zbv0yZK97+/rssTAXOo3XKjIbLly7xq7/xm3zrX/xz1nc3qZxHaVASPEGROE0zdrf2uHH9OjL6IIYPyzKKXyJsF2SPYrDTov0BdbYoIdhY36TodgnIW0DiwjMdEQRx/l7b/f6Tr5x/8asXGtjaj8C0aaZznj++xbNH32f/4C7VomIN6O8oKh8q6kprym6Xmy+/ztrsFLlooOOje/D5kdtXyfIMPU9ojGEynbDjLdYtECKJIh8Rn/LRVl60aETbgdgGh0GlL9EK5xVSeJQMCsEujerGfD406mNzte2RJ7COrHM0xmK9R0V163ByP2y4fR6pC0AoRd7vkcf7ZxuHUpE0ew51BsNxEwqb6BDj/bhHuKqtJH2MF53DtsV6AUkqKMqUlAW5XiAmDYtxw6hyDATszxs2Z2P6pSSTg5CscYjgGO/2EXIBymCFxpMifIOoz0hJgBIvSrxyCE5pZIUtpyTllGQ3R9htxELgFwKSLdAWcgUarJ/gVSiKITRKJEFUDwhCI00odFKDN1i3oN8vuXRpj3lt8WIVpTzOzqirilam2QHWOJwLvY7zxQIpala7JYmSVI0h0QWJLENbS/DkiUJ7Fi9rrJMURUm320VrHVPkj8JUn0WU/vz37/zOfeSbF//xsW1DvPArn34m4uOHWy4dodc2UCANdT1jeHbG0dEBz58dcffWQz54/xYHBweMRkMm0xF13VDXVYhNjMXgsb5BRo9p4jPoCc+0VCqyYEL9wlkd85MQk1kXem39D+AqfyGSNh/jONnCBBFNI6qPWSRCBHd6cMFHrUWe4u8G/w8f/2uRtRicErioOBAy9N8QaRN4HzxLpAjVDSPOpcjj+YQNyIUqDjHYUC7SuAyNWQS0yREqWDIY+/rEQm0xTTBGthpMImlcUItMkXS8ZCPLKRRUWiGyBHSCVjEglU0wnhYS7dUS3VAyCDgopfFCgkoQSRbVhXJ8lmEE+HSG8IbZYozPOqRph2rW0GlGFAoKlfLyS69y/cpNBqOa2fiE1155lW987VX+cngExvPaq6+xs9vF43n/nTscPzvlrKpYNDXO1CQWEulxicK297FuwrXTIlb8FGkaEFJnLTaKU3gbqhlNY0nTyL1u5d2jKuZiuqCpjhkcj9h/csTulWtcvX4DeWmPsiyCkbQRCBK0LgFNmpesbe2yXq6ys/cSo+GQ/efPOBtPmdU1+0dH/x9zb/pk2XVd+f3OcMc35pxZA1CFiSAFECBFWqIkSgq1ojW4/aHDYUc47L/M/tQd0Q63/wBHuyWqZXZLFCeQIAEQQAE1Tzm+ebjDOccfzrnvvSwMJNhqBm9EApWVWffd4Qx7r732WrQ6LY6uHrJ/sMd2p0s36qKjjN52H6kU0/mM49MTJpMhOhJkrdR73QnnExArMHWFqH0/iZKRVx7ToRqsFJlSqDij3ekzHJ4zmQx5/PCUyWjO9nafTrtNGidk2zlpkjIajxgOR8znC5bLJfP5nKquiRJvGp5lOdPJjKdPHzIYnjIcnPLCzefZ299GK025LDB1EPEQjQDNmtCkpac3uqbPs4F0YUURMNaGYEOEiravXvvmfUJSFsKP5lxhl9Y6xtR1mDse/1RWIZzCSTBUWLtA4TBWYGpBqmF2/D4/+sHb3Lh+iEsNtagCnVFiSv9+TRUq6tbfl5KglKWWGZVxCF0iSwtVhJIJ0pR+ERQEcRaJNYqyEgidU9VgVAzKK0Ta0uBihQiggZbOVx2o+el3v0t//gbXbz6PVI6yglrEPHh8zGA55vnr18k6bQSOOEmwkebRbMLp+YTrWz2cq+h2unztq28wG425e/c+qU4ZiAGPWk/odrdIshglKpSOQMdcObzK9s417jy64MajJ+wcdVgKkKnGCEesNHESMS8spXNULiJyjpQlqUhJrIAaau09JxOtcU5w66M7PD49IctTkjQF51hWFUrFJHGKcuDqikgJWmnmJYilYuZK5vMZy8WCKFIoCad37/DDv/3PJFpha+vXpEhS2xKnNFUd4aqc09MRzz3nAmC2ufh/2obwbJXjEyEpl/KnZvPd2PXdM/8m8Cw+dx+69Pnh3zQ0Gdmcw7kwrlcTZvNDVxchVmrFzd//kkz0mWtdn3VdMvJN6j7BCbsdxho+vPsx//u/+b/4wY9+xFY34ff+4L/DGMNPf/ADBhcDTgdL5jNDUcPRzRv86//pf+Sv/+ov6XRbDXS5AkhxnqkgpAcvRBRhIy+G5emMMtSQGrwdWNHcHM56lFrgQg+bAAu6nXsGBp5uJTb20mZ12nw4z75Swuc0SdD6DYFX04NGjMVbvnia4uDRKd//znf5+P5PWdQTDg8O0K0uk/OSrXmFyYL1iRTITkYc76PznlcbRAT7IFZrmQ+kFUYpSlPx8O5d5oMBdx7c5ZWvvMmXX3k1tO6Fytoq6zM4NnrkkB63CnhsGmXE2qCFIVIaEzuEqoCgynxpQnzKCBG+yqesJpaaRGZMVY6KBLGQSOcBFe//qJE20O6cZdWjJprRsPkZn17WW9VHGwGU8KUSuS5YuLD/OK88qHArAPqfv+LuWRorsZxwQXKjkttUCFMVk84qthB0sxZPFpLhcsLALSmevMveKKKdb2PFYyp3DyOOSfC2FK7KKYt9hhN4enJCOR9ytJVzdHCAzq4jCgMh2dPxHCtHVGKCVSmxk6gkDfTHpqZTI4lw+D59KzTGCSoEWkRI9hC8hBCHWJf637GCYrmkLJZoqYgjTVlZXG1QgIx820BR+4qrNQ5Te0EtoRxZKyHLFRJHkuUYq6lq8A433krK24hYamtxWiMjD0SrEC6sUu5LY+YLHBtDuBnal+f8xj5hN9ZOsY7z/eF90Uz4dxpwxgXbIBOWCem/ACcFVvrfR/rCb21gsXScno04Ozvm5MlDzo4f8vDuxzy8f5vB2YDpZMFyUWBsTVX7Nc7YGussVV1T4/UolBC+OKMVQmsQXjSttjWuLqmtWQnBSRH5fESAlKEw4DYzyU8/fiuSNvDrbiO7L4SvMCEkTiosCiM1Tobqmwsy4ha/MUg8VUO5UC3wSZZHvNcvXKjQ69GsT+EHUlp8cb0G5bwSnQUrpfdKMcabRgvpS5xKIKRDYqmLOfP5GFNXxIF6YK3FCQPaG3NT+usyVmKEvxfjvFphYiz9WBMrx1xZCiUow2bhSWgWG1QTpbGr1cfhe36avj8h5LrXTgpqKVmqhKTV5fqVI04n59weDNDacqXVoScswhQ4B8eP7rOYjZgXjseDu5yPP+bxyQmJgrIWZGlKnu6yv7PFdtzl4vqMmRTcP33K3Y9uU16MPSVDWgoFiYjIglz6MnCgm41d4G0NlPBWCxaPLDgriWVEJCMQPhFYNdxqb2sQ6xRRa+x4gV6U2MWcgpqiWKJVRlWx6plSOvHV0kqjVYud9jZZf5fx8Jzh4JTBxRmz+YS6mDM+O2F394jDgxdoZYmPjpSiv9WjouLR4ynj+QSD71vTKkHpFGkktgasIFYxxtS+QiK8bYIHDhSR1ERxTJzktNp9ZpMRs8mU5fyUrV5Bv9clb+X0un2yPKfV6jAcDhkOhyilKZYly+WSqqrIsoztnT5RHDEajrn10QecnT7lK1/+MjdvPk8UxQjnq5aNsuRKwKOx1ZANPdgDEqbpg3NeHckDA37xsEiEM80s9b1z0pvNW1uvAjDnvBKpk6ECHc5hjUFZH+w5CU7VCLEApzyPHkcaSZ7fjXhbDyhFxtGN5zBFiYwEy6IgjlIk2ifGwqs7+uDRIYXBqhbLeoFQBbGMkXVGpGNUOcW4EmMqMBGgEVZhag265dcDnSEjDywI56BWVNRY6ei0M/IswbqaXZ1QDWo4KCE3QIIkJk63MfMFUsYgK5wwJEmC0ZpH8xn3Hp6S5zn7h32UUmz3t/jm17/JeDDh7OkZkU45Pb4gyx5w88WbxMIghEXHOV9++VUmx+e88/MPePz4Cf2jPgWCha18nGoNWZZgsoQtneKUxEWOxM2RFaROEQuNFd5g3JuFx+T5FjvbiiiS4CpMXVEZQV1WUGuyuO0ZOnWFkgIlImonSaMMLORJRLeVEssIJbVHlrWnvmIEkarRccVkMSfJD7F0OTufUVvH2h7KBVT0kxvUZkj+SfLcZu12o1DwKXvdJh68rgd9saM5x2VfomDD4rzKqBBqAxXfvIv1Nf96h7v05anOUPrchPnScOv2R/z7//v/5Hv/9H2yVs43vvUNsiTn7ocfMZ9cMLoYMx2ASjp86ZXn+Vf/+n/gj7/9h6uEDTy6LkNlyAuCgHAlKgIjBDJPQtIiQ9uYCA9e+nXCCazxCLXvezPUpkIIiZK+X1OE8yOasDqEZxuPRjzzbj/tafhnv6qjXHpOAgPWA4dOWDQSWQoWgxnV3FDrHBV3uXtaMK7PeWVa4nZCb7QSFMIxj3KIU3++YMrtLVkdUlhcVVMuDYvaYLTgzge3+C937nFuLbsvvEktNcoZEAZL5ANJ54AaKxLW6aqnr1nhVXAjoVGiwtmlF/OyJYYliCTco2blObbKisL1IagFaKsQVhNJhXIRRS2otUAZSywTwLedxDLz8ZUT4IyPmy4lzf78/lpV83LCj9wl1T0nw9tshAPsGsizIUBN0hSlo1DtasaA+PWnxTPHuiq7loBZ/VcIVMPACj3fQkGmI+qJxbQkpbXMqyEqnfLcc4fs9h9i7IcYOUYwJ6JEVgI3VoweT/jw/VPe+3DE45MRe/sddv/8G4ijF6juO4aDc8bFOaI1Z/9mStbbQpL7GE6AT6usb64REkkCLkWQg0sQZChinFMIEePEC8BrWNfFEeMsDC5GPL7/lNFwgjdnByFrlHXYymCUw8QKIy0iEoAJvVVeVj5tpagIEiURMqYq/XypCs+EklJhQouPRWIk1M6itfIxMG5jHMpL7+DXPTZXugbGIcQkrpErVs1bDfM9gGRNLB/wXB/DCIuta5/jicgXbBR4FXBDYS3j6ZIPPrrLR7fvc3w24P6DRzx59JDx+QluPsMtppSzMXVd4bQvu7uwETgERghPFY90APNBunWxydZ2xRJqBAyd86qfkZagfGIshUCH/tcGXP6847ciaVsFkBC82jZS7ybDCn+vtUY3pUZ8o60JDV+bm6qQXpWuqSrIwBW3QVnvMp/a0yed8HRJoRQYiREglPJN/6FnSTmvouikw0mBKUuq4BQfJwlZ6ql1lAXOCozzXlFNr1AjuS9kCGCURSpHHHv+soq8IIvnDMvVhgE+mG7UJMFhQkIURVHwkPN9fkoqL+CyKNlu9Xj1led47fe+zt/99Bf8/d/9gKcXj1noBWlH0N3ZYTaf8vRiRhQ5iicFH9+9y/ZBl/3DmwwHM2aTc7qtLtVScPVoj5dvvEqhFF+3Fe+9/z7v/fxd7t27z2K5wNaeriPwvi1WWKRSqx4qKRqcMVgZCIG1wtMsgoKjExZjTFAR1SAjlIyI44wsycjTjFaWkSYJpampqgqtMqyxzKYzEpFiraWqSu8rFylMbWl12qSJZntnm+l4n8HFGRfn55ycXXB8OuL4dMju7h57h0fsHOyxmBnK5QyspZW06WQdrDUs5yVSmeCdpjCmYrFY+LElpeeUo7xiFY3MNMR5TJxt0enlTMdDppMxZ8MBk9mYXr/Pzm6fdrtNu9Wm1WrRanlj7vF4TDSfU5Yly/mUYj6n1WrTOtxjMplwfn7KP31vxMnxCTdu3ODw8JA0b1GWJdZWCOyqGbau61BB84nbZm9bI0Iiw5hzYU6tG8Y3w6nLQakQDU0Zmr7TsId7hcvNeU0j5++vJ9Kal168ybf/4Pf5+x/8gHtpyl//xV9z8eQcFStkpSjqisoY7xNXQxIleO65AKF8k32AUq1dA8BNX6sIlQRjDFXtabSJjr03SlFSVzV1VROnsQ82tSKKvKiHrWqkECRxEhRk64AWO9IkJY4tkY68P1y1REmJUN7MdjAcMhr12dlvE+cpYgaHh/v8yR//EX/zt9/h5OQpebvF3Tt3SdKEK9f6KBGjIsmVq4f8/re+QbfVJb92k7OTY+rKIEXEcmEwTlJbR1EumM0q5sslsYwQgR9vnF85AtmAujQUy4osa6MTr06Gqz0Sbr0ZeVU5iqJEaC+YpJXDUzATZJx5tUtqIqVIdIRD4VTlZZCF8lYdbkmSKJ/URBlZe4887/iwUAiPwH+OCIFgo3r2KSH86ue/9Ghoe78cvVwd7tlv17WuFYu4IWAF0OmynVgzTzb//891bJzLwYcf3uHf/tt/xz/+43+ilS/5l3/2V7STLg8+vkU9P2E2njEZ1eiox8HVq/zJn36bP/+zb5OnGc5atFS4RrhrlU74B6CkQgvVdGZtfPaz/w/JtwjBRmjWL5aFt+1JolV/yOVHsZkgfOodbvxMfOK/TQJ4+Xft6jeaf6l1RJa3SVttVKzY6m9zdnIfnSREWq+qL0ma45RmMquwUqDkpyThzve0lWVJWRlU7lkFk8kEm7VWZthNL9VnDVFfKPRwgJAgtfKgsJJoFfqY3GY99fOOtXGRCAraSoFSPr6II0kriUhVRKQiL9+uE5CxT8JltAKBL79nPuW78Hcb83atsuewoZdOBq9TEfqfEWK17z9bAf/nOi5d++ZFh+tbxR4SynROdVCwkAtU7JjWZ0TRiDd/94ibr/Zop1MUFcpEmDojsh14PKV65ynq4ZRrZ0v20XC9xda1A5L5lOJnH5Luvcjucy/BsM+jR3dIu3voqAXRDB3PgDEwRYoyjNQIQQ/BFthdhNgD9oA+hCQOUnzCLnFoprMZjx+cMhjOmRVgnKKwDqEjwIK0VKamLGrP9rKWxXTBcrZA6ggVKaRTJFFCO0vQOmVZWNx0iXU1UhrmyzKA5n6MqkgTaRXiNUI183Lf1bOrwq93fEp8IbydYs1aJkShPBzivK8dMjBuEVRCY4SgRiJ0Ru28/VZpFhTDMcPTBzy9/S6nx/e5/fA+t27f5fHJOcvKsFx4pfPIKWRp6UYJvX6bSipmUrEoQ5sGPhnz9gv+z3VdU9WVT86soayKkHsEGrSUoV81UKglwbYs5DVSbazDn3/8ViRt/hABKZErb5dmY2+yemvtWrbcetqTsRYnG85y2Hac7+fw+Zlbne/TEjYhvBeFa2gazmfPTiof7SiFU77hH+kX7rooKOsCoyVRHLPd7zMxYCsbZOIjjABTCqysccIQ7DJJtApIdUKrkxFlCiJ8udw353hqGQQ5dtboVCilWmt99i/WNgmbipB+gAgyqVgMR9wtxvRuHHD16hVevvE8L/V22W0r3rn3DvdGJ+BSopGm25YkbU1R1pw9XaDkOcaUDM7G6DqnFWvyTHLt6gEf3b7HfMePlmEAACAASURBVD7hxvPXmNVLzospk0dPSKzALCtKW+KwyEj6knWjpBL89RB+EDfvQEmvqJhlGRaP+AjZ3HOMCDx8IdfqonmeE0vHfFkFoReNEIKqKpjMxlwMzuhHOVGSo5VAa4VRKVWp0Dt7tDt92p1tTs9OKZYLjk+ecHZxwvH5Y/YvDjm6do2yLNFS0Gtvs7dziEQxng8ZjE+Zz2fE2m+GTsNiNtvYuAM1QzuEWiORAkkcJfTjbaI0ZjIeMZ/OeHJ6wmQ24ejwkO3tPkmUsL21TSvLydOUwXDIZDKmKAqssRTLOUII2u2MJN5nOBzz4Ue3ODs/54UXX+TFF16k0+kgiCiLGYtFudpEN6X8V89fqRB8haS6qbiFeePnyarugFSCZvnGrU3YG+NrJ4JQT4DP1si4X8Sc9ZQNjCXSMdtbO7x04yaVqfnxj97i+//wXf74j/6U+/ceUs5riKX3EEQQae2VR2XohxEi9DM6L0lvrUdUBQjhvema+a+0RmqFMRajVpndSqGShhqtPU3SCCiNwQpPOxFS+gRFSWxVYerai+g0dGrrn1Nd1xyfHPPe+7/g6mGXyuwiS4dxNU44nr95g9e/+jo/fftnDC7O6e/scu/eXVqtF8iPDrxokDPs7/SpX36O48ryve/8f2yLitf2j3BGo+PcK6rWFfPFkOHogq7s+i3VOio8ki+F8Im7NfT6Pfafv+kRVFOjJSjpLUqcU1ghmS+mCFugXIUUntGwqC3LomI0GjC+OEUJQ5ImRFGMkRVWSNK4QyvtomWJjmvysuT0wnHzxot86ZVXiLReJ9C/wvYuVgj5Zv1Fhjgs1GVWid1mpaD5+af9+Vc7NvDcVYWH8L21UIfCkXqWGhQqH+LZv38mAfniR0gerQeLJvOSt3/yNm/98MfEIuabv/s1UtHm1jsfU83PMMsZg4sFKu7Q2m2xd+05trZ3aMUgRYUUfp3yFezmI1xzA2ilgtKx3xP9sw7PWYTgWzRtBmJ1e56eXaCkJNbxuiJjvdrs5ccgVnf2ybqkw5OY/DqzkiG8FPO7lVgkQXjs0vN2IJUmzVvEaYpMIg4ODhlezKmiHpGWSOk/O05SnNTMqwKjgsrbpbfl/+RVmVlVk6I4IkszRJYRaf9MPTDx+WICq1MG8Erie4KjyPfE68CQ+FWPVSVYgI4lcSJQqg5j1D80rWLanR36OwfIJPeJm9AeNP0UcP/zErYG6FuB4qz3CRVFq5hrdTTA+n+DXrZfdmzKtwsBKgO5Y1iMhiiRYkSFFDGZ3qGbHBJxgRBDYIFaZLh3Tqk/eog6n9NeVrSpqAVU1ZL5nTGLuymit83ZqxXbndfYfenL9K6+jhCZBzBljWSKsyc4ewGiRooESBAiB9oI0QHaQBvnMnARDe1XEMq9QpDqlL2tAybjOTq21MZQV160rioNykBpHbUUtLo9tBTMBjOE1aQqIxYJwioEETiNc37fl6ECXiwL6rIkir0wl8DhlCKLk0Bv3nieG3Pki8NTm2t6c55PB7uEWNtouI3f9CJIcrVSGCwwI8agqgpXWcrRlFsf3uad929x+ugpy6fHmLNTJsMpDwrBqKxxsoVwSzLhSBLHjWt7XN3b4mirT6Yk90/nfPh0yflgwmKxoAqCLsvlEuus70ULPfc4r/IppVx52jb5i1LP5jbKJ6LOBSX49a7zecdvTdImRGAkr1aqIEKyamy2QUnOreheQkpskCdt+m8CSOVv3TXy5j5RaKo362SwEToB8AmREwJhBdJaZBTjlMYKCQqiOCJKMiaLguFkQimg1WqRphl0DKaoSZKEytSoWqNqh5G+R06i/GIsJVvdNjeu7nPjaJdev41MJU45P2FM5elcQfXSe9IJkGvZ/0Ylr+m588bSNtDGQiDuQPlslMI6fvre+zy8GKGtYbfT5a//4k/5dvTn/M1P/4Hv/9OPGZ5fYEYVqZHIKCOhzdMnj9Da4ExKVZUsF5bpZEq1MEwGE/YO9/ndP/sT+i9c5fbFUx6fHJPUCqfARIKiLrHWUhQFEDanYLjq8MHgOmmQXrwjTalttUrCnfCefUpFPuCONDqJQzUSEJIoilZiGVmWIRKoR4bT02NmFra6fXrdPn6RVCRphjEWHTkO8x793SOm4wFnxw9YLGecD885H53x6OkD9veO2N85ZHdriwgJVtJrdclyxXwxZ3gxZDwee4N14RM1G+hCwtmQ94fE2ljvOWYFTijSdpc4a9HqzBkOB0yGQ2bT2/T7Pfb39uh020RtRRRp2p0Wo2HO2dkZ47Gnozrnq61xknB0dMhwNGEwHPLWWz/h9PScL33pVa5ePaDT6ZKmKbPZbC1UAit65NoQ160342Z5FF7QB0JfTwBRfKM7q7kncD7EEqzGrRf8D4IljZJrqAJ5oClsSlISRzHtJOHFK1eIvlbzgx//hHK55Ft/9MfMihlWGr8oC4ExFmdqVOxw2q8VzonQABxoDCGwsnWFsMZvgE6htSbSMVL53lMhJTqKvFmuFE33kKfRBwK/Uz6Rc8r/vLbWSx8L2N7aZlSN/f2aQL3GP7P5YsGdu3c5PX+JypScPHpKp9UhDfSrN772BlIrfvKTt/27MIYHd+/TTnN6Wz1wFTqCq1f3mB9PGA7OqBZzFrOC+cxQOslssWQ2nlMtFyglybI0qJ36vlojaiQCawucq9jZ7tM/ukZhHHXoW5MIv3nL0I/rCjAFwlbgamrjKI2gqgyDiw6PXI2sl3RbLd/srjSVdZRlRTG6oJUoUueoakMctzk8OmR7O/dtw9ashJw+cy/4zIqu//4T1Zhm0d84A5/53a93NChvyMt9oOBWsIUPMy4F+s2nroEKnqnJ/WrH+uqttYhAXZpP5xzsbvOn3/4XRNL32t754B6jiwJhSqajJVHUobPXpW4t2Lq6xc/fe4s//OZN9ncPA0AZ+YSsKac5fCIm1kmb34+bHz57NAGG/53R+QWD0Tl7B7u02o1kvoAGe3z2tQpCxrWZfK/uduOXNn7smhM5v0daA1I/M5zcSgNDKuV7gdsdyFO63R55mqC7PdI0xq9NDisElRVUBsrwefqZ54+TFIWXgfd+j/7tGxtikNX6cenSP/2Vhj46n4f6vjLnvHKgDcj95TH9OYcLJDUbYiPtkLpGSl8Rj5VAq4ijo+vsbB2ydbCNTju4kAj8qgkbXE6ALiVtWqHjQOXcYFSsmYnyWUzlN3Y8Wxk0laWcVVRFxayC3b3n2Tu8zsHOEVqmSHcb5xa4+UPKH5whfnSCm05QYbwZlVFELWy+g27v0j66Adu7jHc6iHgf3BZR3AHVwVkVmA4VyOsIMQkX0gOhQShWonhNFt+AIrJCWAVO+/FUG6SxdFottrf3mFeW6WRCkub0ul1SIaGuGM4muFjQ2+pRF0uGx+cszRxpBWZZYuKUKrYsrBfeKMqKxbJkWZRenVgK0jiiv91j/+CA7u4eeZqw2++ihWAtSf9f8RKfma/imbVk9TvOIVyBxoDQOPzzEKsCC1QCptYyXlbIeUm6mDG/f5f5k8cY64iE5NWdQ7723JfZSbp054Jb797l//jOjxktT6jrEVnbcuW5NklW0c4s03LGex/fYjkdc3pRcb7MmM4WFKXvk0eEcSUAIVDKCyM2FHMZErQGSGmYRXJzHrh1r6q/1YYZ8/nP9bckaQs3KuUlhGY91xq/NY/2NV4HQgmk9YlMI7AoXLjpwEf351n37zTfy40qT6PS5FxA84VEJr73xxo/pJRzZEmGkhG1kCwx2KpkMpkwMZYYSR5nRFFEbY0vgyqHlA4rnZfVVpJMa/a3tnjlhZu89Pw+eSzIejk60VDwiTEb7h5n8cGqrTHCet+0kIiGX1ofQagBCd3tHZ67ccj1jqT/8W0++IefcO/WLf5LGnHzm1/hjdfeRIuY+9k9pKk5HTzi+OSYhYaspUnTiEjBfHHhk8Z6iSvh5edeoL21RRJHfOn1L7P93T1kJJHGB+M6jjxQ5CxlWa6e+2bSYLGrd6CVXlNY8ck1odImVYQMypsqVD2b72tXo6SilWUksd+ERWgEXZYLHnx8l53eFteuXafd6ZJl3rBbqshL1ztBIiK0ium2O0xnQ84GJ4wnF4wGA+plyfDsgunFmIO9q2z19kjTmEilpIkmCQ26s+mc4XiCkBop/bmV1CgvAhh6x4xPLrRAC4mnpVk6cYs4bVO0OwzPzzk5PWcymbC3t8vOTp80S0mzhDxNSeOIYZ4xHI4piorFsmC5XOKcoNvpkCQpF+dDbn18m+F4ypeGN3n9Ky+ztdWnqirKslwlZs27aOacQAS6akjqGlBgs9LmPCJcGwPWeEpOoLk28HQTptqmau2aAKtJ3Dy4oKT3d7PGgHMkcUQvTdnf2uYbX3uDf/zhD3EK3vzaN6CCqgq0DeGNwau6wCmHdypVyCimDs+06aGUAl9Bc9GlqrrSUeg/DKiY8malxtRgTKh6C6/KWBIaiL1ZvfdUgTROeHHviDo5x1VLhPQ0Bx17ylGapuStnIZ+dHx6zGQ65frV67SyFu045o03vsrZ2Tl3bt8hSxPOjk950unRbh+iMglliRKSTivlxZvP88E/fo8H9x4iZURRzDCTGfPxzBuRC3DWeBVTV7MwhX9GgHCe0hvFAmNLisr3MlXWoYXvi3UC34ukmnsUPrBzfmwoPPXXOd+r2NADvVWBlxG3TlDXNbNpwaI2TOaWyXSBqQUicStE1ycgzwRyYfA0nkoEuu3KV2K1zj2DRl6KbcXmEvprH6vaT8jGXNiDrPDJ2mRZM1ks6UUJ7VgjtQiKa/4KLoPRBtcIkzTo+eclrc/kn2uynb8zpWB/b4u//Is/4Q9+71u8/dZt3nvvDl/96iEfffBjhOvz2D1k92CP/rVtop2Mb/3pt/n+d/4fhKt8YmYczpm1ybJrPst/uBTSe6KyoePoNp4LjpXRrIXZZMLgYkhvZ5ssa3kxACGwpvYjR4eEdePBbAb0jcBLcyn+8Myb9V+E6h41UOHMElMViKiH1MkqB1zl8CFQUlHkPz+oHe/t7nH91d+h32sh8L06BIBJKN8Daj+Rp4Yg0UqMgdoYEiHDHuKrSFEUfX6ytr4L/yYbL1l8T53vDzSYqqRaelW6UDD8xL999pD4x+ewGAy19VXyONYk2oOau/uHSBkh4lB7E8/MlC9YLrnE2GAdY63jLBneczO4/P9/k7Yfn6gMGsdiWPHg3XMuzpe8/vUvc+0br9Lq7wdBmhG4KUwfMn7vMeW79+jPHLiSAoOJWhTRDsnR6+Sv/SFi/xrq8ACXROzRRogUZIQTAZiSAXR3GkEHyP2zEEkATlYyQ4Gq6zz8GUqnQjbzxPjfsQYhDXv7e+g853kBnVZOu+UFpEaTKQ9PHiNiSdZKOXv6lK2tNibPkCgWywINlMsSUxmcg+WyZLEscUKQpQm7u9u88sorHF7Z9+0qyu/vHgjxLTpfqAy8cVw2rW7ekV/P3Wptb8bS5iT0lXRB8Be0YEpHWVuIFZPFjHuPHlNPNFtWc6BucO2Fm8S9HN1PII0QKkYsHdXdC5bv3mNYTRlVU5yuWVQlxfGCshphqwXVfEY1XyCNwxio3NQnYg2FWfnij9xgLhnnWS2N+M1mYWh1/+E/jWZDM+F8LG+pKi928nnHb0nSBkI0nE65nvxhoouQpHgzSrlaJK0xSCVRTVWORu1OegTOiWAwaFcVhWerbEAISmRICJ3fV6VEI+n0t0izDIxBqwhhHd1IIdKYpCoZnJ8zGJ4zK2ui3f1V1UdKE6ho3nxSeIV4Uqno5zkHe9tce+4Kaa4gVcyrgqKuMHhpfE2Tyq77ijz60vT/BeraRuAtAgriF3sv3lxFinhrhys3D7j2wk3SScHbf/MP/Ie//TvEW//E1rUtOlnM3uEOb775dQ4O97h77xY//PF3efutWwyHkjS9oJMnOLxHR21qdDBUfve9d1lstfxzV+seLuv8RHcIksQjcMaYVR+eY43YNNe+KidbgdRqjb+EsSC1QkVRoK/6Xsa1WbSnqiolsRi/AUrB8fFjHt25zcnxU/YPjtjdPaC3tUve6pJmbW+rIBRRnCFURN7qsrW9x3w5YjIZcHpyzHg4ZnQ+4OG9e+zvHnD03FW2D7ZptVpYY1G72xwcHPD06SmPHz2hLrx1gJQSLWNqUwdl0RhBU+n1SbgNIh9xqomUJE0z8nGH87NTHj15ymg8Znd3m73dHVqdHmmW0+n2SVtnTMZTJpMZRVH4PouyJo5TDg4OmExmzMYT3vn5z5hPLnjzzTfIc+8D19AhNxM38JRIFSg+1tqQVPn+UOvLosFPBG9rEQJwZ6z/AhDSj38VgVQUZe2pA9aghOd7y6bfzfhmeqUkUhi00ORJyu7WNkIpvvm1r/Pz9z+gWJT89V/99yRpQm1q0ijBGp9I2cj3UM6WJXVonLau6Y8FrMXUFdgEGZQkhfD+bab01UpnLFopv54IiXGOKCQnKoqoqrmnoRhf7SvrCkRCXVbc+eADdp57heVsjKiXKxSuUZFN09SPyygGobl7/wH7B0dExpJGCa1Om2996/ewdcXTxw85PDrg1se3iaM+1184QCsf3PV7Xd746ms8fffnnJweM497WK3ptrscdXYZL+Y8HQ8xziGNRUSSoq4o6wXGWkw1JY4cVTnFLlJG8xKERAvod7vBnEEhJJi6pFxOEdYE/0wvP74sS+omyZd+LJwOLljaOdZJhMhRLoK6oNXWdHd2WRrh1XMD5NgsuZ+gT20cvnoiV8Fe4wvmrBf+8Oudr1yvFX79Jrm5Fv7aRwgsrHMYAfP5gmK5RFhNt5MjlOLp0zOOz8/Y7/S4urNNr5/T9GWDw9mQPAoHVJhqgXUpUdK6BB7+kgvZ+LNYIbDOeXkIrSU7Owl/8Gcv8Lt/dJ3lsubf/ZtTXn3xKzx6+JDnnr9KZ/cmb7074s6thL3tl8izFpgAljUN/p96Gb6hfvPzm+t2ziKko64LD9ZYQVlVHF45Im1lNNU6AKkDmhqea5P8Xgr8XFNp2wzSNpO15u8NsARRYIsRo+FThsNz+oev0e1dpTH8XbUy4Ne3NE3odNpUScT+4RW+8uob6O4ehZQYU2BNSbEsSJxEyVDVEBsPpvmjBVsbhJAr31cV9kEpZVCE27jkX/Juo8irxQklsEFGrPFtK8sFy+WCTnvjnM2jeubcYvMaA73bSeH3G+fBZCU0xIlfw/HxwyZJWXxiHHz6DTw7Zi/rD3z276znxm/u2EwQjPHx2Gg44v5Hx2h7yEsvf5nnX3mdbLeLoQKmqGoKF1OqDy8o3y7IlxmyDQvZZZn16Nx4k9bLfwD9lxG969g4p9aSpbGYBeSxRivhNQ+c9X3DEIZwBHhg2YkmMF8DMl5dVGHdGkStAIRBywpBhYgseSch1xk7+9tYYylnM6anxzw+fsK7t28zLmZcv/kcr7/xGlf3X8O98graCWbTOdPF0vd0G0lR1hRFCXi2Ur/fY3d/hzRNiFPtQcCN99bUAqVobC2a6//8wf5soray7rk032EzcVt10whwIqW0EQ5JMV9iFgu6WUY1W1AsZnT3t7je1hx96TlAExmFMAJhYW5qHtYLTocDzs8GXLz3IRc/+jG3f/ELTucn2KSgtAW2rphcVB5YrwWilgjbQjjlYxNdEcd61aNJyDOamMg5R6yTsEZ/MsdYK2yHvvqqxlR1sCOznkFnKqq69ID45xy/FUmbD7qDmbZYr3qf1olgrUchZJDudC70kwSK1grpcW4lftAkbp+1mUvhVfI85iHwfTIOIkmkY2IdY6rKJ5DOEglIBaAVRZaziCcUVSOjDihPfdBaU2sL1iBxaCmJpCTXmm6esbXdJ+lokm6CjSKM82bBdqNK4IIwhHFga7+wW+F7VnzlxiPWTa+SW21cXp2xlpLJvEQOxgi7oN3LefHLryDSDg+HZ5yf3efw2hFZO+f+ySlpr8vNV19m51qKFS3G5xWL5Qlnp/eZLsb0Wtu4asjP338fmcQsEsWFtJw8eIyrvEKSD7JCYOXWdFQXaChRFHlKVkOVkw2SsxZSsYGiJwKFbVX1wfPiZajMSZp3D4RqljEOJwleNI7pZMLHs1s8fvSYvb1DDo+us7WzR39rl26nS5zkaOWbfb3PWEy7vUPe6tDp9BicnTMeDDk+e8JwdMb5+IT98yMOjo7o9frEUYTDKwCdnjxlMVvw8osvEQuoXI2IvDCMDobxznmJeSUVMtKUZYmUChknxLGnfyZZynjkxUruPXjEdLbg6tER7U6L7tYOcZ4zm84ZDAaMhiOm0zllUVGVJTKRHB7skcTX0ApP5ZzPL1Wcm2S3SZQbA/qmZ7RBSBuJdk+T9Mjpikpp18pNIsDcASfD92BGCBUFFLaZ1b5RXuBW3iRKRWgd0Wq1qbVC6AgnFUJppI54591f8J3/9z/yv/4v/xsCqOoapSJwktpaahsWRKGCDG+gZ9rgveLsClRuEkshFErJYAkCpq78HA/32tynJGw6LqD2wn/55M7w+NEjHg+WXD3ao9fS2MqtAJYm0ZUqQoiI2bxkNJ5x/8Ejrl65iuopEqXZ2dnhtd/5CtPRiJPTE66lObfv3CHvaQ4P+hDEIjrtNts7fabzMfePLyjzlNHFGW0Dra0O2JrJcMxiMsPIxDdJV0uM8BRRrSTD4ZDzpxdMSr8+9DotokiRZ60gGCMxxZIP33uPxXzKzZsvsnd4zfeWSg+kWOeCCI8Ogj+Gsi59BdRZlK1IrU/elV6Pm2ZVXy/8fDKQEyAaX8Cwngb18NW62IjcgK+cfoJ65f8xq7O7zUH6qx9SCkxtGY7GDC4GXD+4ipYSqQU3ru9z9cousROkWnm5bGEC0OgbzYV1OFNTFSNuffgOu0evsHuQrcbYZ4uxrK/dNQ9lA5F1wfKmEQ3SKkJlMbfuvM/946ck+T5f+9qbvP7VFyiN5OcfDjH1nG9+/U3SrEMjf+0T4uCltrqMEGQgAiX0k9cnhMDZGow3sZeRorfVX79n59Z92eGUjTiHl9sVFKVhMZ/T7nSD0fN6MHiBsfW1NJVHh8OUSxbTE85P7vDk8ccsyjkv5Fdo9a4hxbovtyHRxpGi3cpJpjGFM6Aikv4WVnq1t0hLnLFEQqHDPFMeiWKdtG4g5U5gjK/Y+J6foEgo1nTH1U1/7uErMF6Nzq1ZCM6ghaCdpb6H6FcYs5vhsxMCpzVCJzgUkggpg5n5Kgh3l97spf+vTvbrgR6fSNg+52e/iaOZYypQ/Lu9Hl9643Xcqw6ZKFQ7osBLhaVOYUcLzMdPsB8f01+AinvMRYrZvk7vK39E9OIb0LuClTmIxNMfDUSlxIkCI50fh37T5NIYFmJjbGieFUiyrrHqWZOuK2dxwqKEQagKoQSidhT1kkVhmQ3GTM+PKUdnzMuSl166QdbrkHVzkliTpzEoyXIyw5iKTien2+0TJ+2wX4fkXXo7Jhmqg34Y+3WoWQckn/YOv/g7XSUyzV3aMOdpEjW5ijuK0rJYFshIIYRjNBhhlwVtvUfWycnaiZ+mUqEx2HqMWc4pxjPOTy94+6M7/PjufT46fsqTx0+onj4lvRggiooyjnGuRlKDtOBqZLAZ0pFGRTJ87/c6P8f9GiGkXK1RK8ZAiFMdNvgQBzDbEYB7H6cY4/1iTR1Wd+nHqWkKVb9knvxWJG1weTCs0KTw1fSrefNeT8uRDbXAuVCxESuAbJ3Zr8v1m1TIprLQfK6SGunAWD95ZXghxvi+FalivKyxxdYFQngkPZZeDj+KIgoWHvkVwqs06RppAsgoG/l6iJVCOTBVRRRFdHe3SPoZlYRkXqJ07L3XbBEASBE40b6/zzPLfHJS1T5rF+BLtyGQJGzsQkgwglTntKMWZ6cn5O2UN//k99l/4SvcffyYn/6n/8iNXovo2g0+eHDM8sOPGI7vMpuOeHp6ztXDm3zl8FV+9rP/zE9/+jYPH16wlXU47Y1Iu20qrTmfTZg8OkVZcEpC5L30lNRBPKJ5rw6lvCqflr6y0agZqpB4evNG27D8g3qkV2l0wqsa+kpK+AposbUOW3uj7kZuvvFYioI/1WI+5/7dOzx5/ITe1g5Xjq5xeHREf2uHdqtNlra8caqO/WcZSaebkWd9trbGnJ8+ZTIacnZ+xtnggrv37rK7t8fh4RFb2ztcnD/l3u0POT8542i7x9W9LWZ1yWJeEscJ7X6fSEfUtaUU3ptOKW+KLIA6DHwrJN3+Ft1en/lsyunJCSdnQ2azgn6vR3+7R6eXs7Wbk7XbdLo9Ls4uWMwWVFVNnubs7uzQbXdI8xih/BwxxqstrpU5ZQiYoKqqlcJXMy+E8JYZVfP7ai3oE9hrgcXhJ6p1jtpYamNXksGmsRxgYy6HhcmGnkvCOZROMMZ5A9TUATIoRWlu377DWz/8IX/1l/+KuvZ9nUJ5up7SFUJrhNI4Z/ziLwhUTE8bXHtAhi/pcUMpguFlYxDu+dX+eoLSa0PfDPHVKsmta4OUiuF4TK+TYRKFotmXRTDLdCAUVSWZzgre+8UthIioSkPyckLc7aIlXLt6xGtfeZXv/ej7PD09wZqE23fu0ckiWu0eIEjznG6vw2j0iJPBgGLWIt7aYzwa8/jxfZaxwKLIdIIWitl8zsXFKbiKWEVYA0VpODsbM8dvjJGKcGik8qqZWMt8POL4wQOm0wl723v0tkusSlYbijEWTO3XJ6mpa8NkOsPZGuVieu00JPkiVNtZ5yGfsx+tVgrr/1RbH7QIqRrI1cNqDm/lItSqiGOtQ8mG9r75gc1XoNr9SkGGWCWMsZZcPdzn6v4+URi7xkCiFZFSaEA5T3HDGVa0nmYPkyAjQSvX5Hn6CfS1mWvPfvynPRcbRFyGoxFKSHq9Hq6GSEpKKygWlq29Pq/97pd4+eWb5C1NbB1/+S9v8OT4nNe/fBUlK1ZVNCeCH+YGohJa/o11mPX0XM/z87C2XAAAIABJREFUsJ+OBxcsZgN2Dw+RMghMKAHWehXmJuxb5YPhGQVRrvHwnMlkRt5qr29yI1H71LKMhUe37/GjH/w9D++/TxxXHF7dZ74sME4E+4XLw0wKQZ7G5GnEshZkrR6IyM9/QegxtuQ6RtUCjSKyoBqvlGf6EJ1rgkxPKVZKhX0qGOn+knJSA6qun4g3+I6SyO/rdYWU0Gm3PN3yMx/GM+e1hHVMUKOwMqHV2aPf36LV6oNs/KyaEDyIyzSR8irY2vzELyDg85vPx36lYxMc8XQ2SdzJcF2wlNRihkOjXIKoc1huszyX6BqErim6RyQv/iHRjd+Dg9+BuEUlBUIYpKu8vQMxiYZIx9hAgxZu/ZRZQU/eCsI/rAgRRDSaSrNqPOWaMSYEWkpqV2PqElPPmAwGzErHuIqorCZTEbv7e+x86QZCR0yWJY+On/Dw/gNOTo853N3jpevPs93dYu+gzaagnZTKU0IF68QjXK7D4r0mBatdLSScYvWLG8/5C76XZtj57Ti42Bu/NhTLmpOzAUnSIm/lvhhgS/JWRvvqnm/LsD4+ny1LxqMFZ9M5J8MBH/3kewwe3mZy/IjZ4IynF2ecL5ZMjKMSCmVhUi/QCFyBVxeXCggq5VoSxxFpEpNEEUp6X7xFWXvVeOf7ZIWQq52lri117cVIaltT1eUKGG8qhqv2LBfskayX+1crw22HVhGRjJGR4vjJk898dr81Sdvlsn8Y8E20JzZ6LLiMLMFataihSDaVl2f5zFLKVQ9Yk8BJ4UU7sHivGghBJCihsMYiLJ4LjkVLkFbibI2ra7RSnkZmHUVRkNa19whTEii9XwQWRfgs6506YqVJ8owkz0DpsKH5jF7ryDehGt/g3ASaUgucWSOwxhpMSFSaEqtbLbdgi5JyOefRrXtE0nLlYJ9E1Oikw6ktqfKMK1ev0SkLxiaiVgl3Hj/i3Xd/xC9+9hF1tSRPf0g330WIGYPBjOVozoW5gCSCNPL+KxbqqiKRihoQWiAK58U2N7gdm4mz1hobEogGCWvUdaSSyOBTJ4T0an7hiwaJaJ6J9EIlMvQmKSVxmgbwhaBS6PDNosZY6nLJ+clTRudnPHpwh4PDI/b29+lv9+j1tknSDknWRsqEOgiMtLsJWdZmNhsyHg0ZDgeMhl6E5MmTJ2z1t1CAwhAJR4TlaKdPHTmeXJwyHo+Yjkq2t3fp9zoUpWAxX+Kcr8BWtcE6jQgqorX1qodJ3uHwKKHbm3N+dsbDRycMxhMOruyys7PN1s4u29u7tLIOp8cnuNqQZxmtPEertdVoQ0eT0gu+1PWaN73qcROsLQBESLJDJa6Zo+uX+czkXU1M/7waqnMTVTcKUMIFG466xgoRegA9kKKjmNp6f6XcefqWKWqu7OzRyzvs7ex4awXhPcOM9SbeQvmeFOPWwQbCd6msEWzfO2ewq/kCoXIb1g3dCKOEjUkFlVKfsPlzWROot8bX5pUQpHFCrHS4Hh88KSWpjFf38sIbmu3tA7K8xy8++AitIl64+YIHKawhT1O+9MrLzKo53//JO2jVQaWCB3cSXnwxg1bOfLHg0ZMH/PE3v0b7fMp/+Me36Hf7PL9/hSVzxsqyXBrEdk2CTxoX8xmRsNhaslw4bB3RyrdQQlPWFcYoRqM55dJ7+GSRJEJyuLvHst0hi1NiFbG0BKN2Vgl3Vdfey8cKqjpEGkFAqVmL/Nz2OdfnbeyXhlZYM6zx/XDzsuJiMCSJI1564TqREBRlye3bd9k/PKLdbqM3VOl8MrQpmd6c/Qso14UEoNlHhGjEHjz4JvHUSbnyZ3NAHRJbie+9AOEsypXs72+RtltsUjh/tYfhr9vHS/7ftFs5cgWVOEIuRJrEGEriVkWnq8FZtKt4bi9iN9snlRLnvAKqC7KuHruwrPZc1+wteOR3Nen9zrKYzaiWc4Sz7Ozu+n4wKXzLiWuqTwE1F6FXPNyQbJ6RdbTyhF5/K9Ds18HiOnELCElTmXACZwRPHp3xi59/zPDimOeu73JxMufhg1Ou3nREWq4qBIL1eZT0yXcryZBRghPKJ3oxJNLDf7HSRAhacYIKAOsn34sPuIRYM1uEDG0ZwisgX35vnzKsCEhX8zpFEFOoKsCSJDGtPKPGryFN2rQKcD9jyAjlViNdpTkHV27QynOyLCdptTyoGRLRFXThSwDe0iSOPQVi9Q5+8yqP/62OTdC+2dssDkGJdoV/HjZFmAjiA9TWC1ycv8vOczdJX/8XyCvfwiVHINrgPCZNk/QKi5MW19DynQprRvO+VqE9UAHVxuhUbKqyNvQ5gZ+fZel7rSazAZlY0o6sFyLJO3Q6+xiZkkhBMRrw8P59Tk4u+NkHH/Ojt9/ibDJge2eLP/z93+fa3lX0TkaS5iDWBu9CbABHG9DKpXEWCiLr+fTFs/PPYhRY57CVQwfQulgULJcVadYmz1pEcUScCLKkjZQO4ypmixHlck6xWPD08Qk//vkHvHPnIR89OWUwW1BfTHCTEW55TmRmKCqsMz6xkpIqinBxBEqRW0WsY6I4Q0W+hUEpTaQ1cRyYODiqusROp35dNMbHmMig4m5YLkuKovSxkzCrYsGaItnYjfmh4NXtfT9clESrGEsqhY5jlNZ89O4vPvN5/lclbUKIu8AETzavnXPfEEJsA/8euAHcBf5n59zgl5+M1Q02NMkVQhsqbasXb1fpK0IE1BVPT5B4qK0p9W5W1TaRzeZc1nrKYxOqCUJwEs4phUAqP80M3l2eUCZVIWGIo4j/n703eZIsu878fnd4g48x51yVmTWgqlBAASBBNqRuqUm1mYaFxI1EM620kFn/C+q1Vv0vqHfaaCGTqdUtazMZ1RJBGkgjCQIoVKGGrKzMyiky5tHD3d9w7z1a3PvcI6oKBNkwtdpIOVCWGZHhL56/O53zne98nwImkwlYw1CvpKndcdQjndOYmFzkNmPYH9AfjaAsCDjA4EOIlScSTdCSyrYpIEp9d11SKennffef96C6Kogi04YyGDIHk/0jXrn9KlvXNjl3ij/5+fu0jee9a5usTStkvMU1YyhGirfl73Pnxjs8e/Y5Ozsv2T85RGqDc/Ggcg4aHwiNQ1eOrI4N7SbTVKGNht8pSP9l7NwvN2h2tMhuDuhEXxFiVUR3VDYVzVB9KjV3VQ2TJNclLdDOV8+3Aa1sosolw0Wt0Urw4jk52uf87IQXzx+xeX2Nreu3WF29yer6Tfr9NYqih7VF9OIIhqJcYTMvo4fa+Rl7+/uc7+7imgqjNE09Z3NjhWvrq+BblBU2NkeUpebs9JzTs32EhuFwTG5LLi5miDhC08QKFVG5TQg4EZQ22KJgZDL6/SHz6Zzj00O2d3Y4ODpia32dV27fZmNjneBbmtmczFgyo1B4gksUxrQWQoqOFKT+tOXXneqjTo223nt86uXq6D9CojIn+kIcgEjTjWhhxOU664nLVAetIn3XuRQkpJNt0eeiLUFlKANl2ceiUS4wKEouZhVrK6v44MnLItIqdTwoEUlzxhJkjpfkUxaWIifdkUSiIEiIYgJdAueTZK/q9haiAqTROn4vrbWmaenR4l3ktltrUS5W3iN1u6sa2yi97Nq0Z1iG403+/n/wu/zv//J/4/NHj/nBb3+fk5MT+pklt5bReMR7773H8WTO55+9RBvhKYHc9rn21goSAtPZFNGBb777Jp9uH/D222/x3rUbmB6c2cD+wQn13gTlhUrNuX7tGsM8Y/+VFlPeYLB2g2GxylQUR0f7hHaO94rWQRBHqDwyr3n9tTcB4aKquZjOaLHMm4bpdBopuN7TuhbvfKTVumgY6xfiFomZQNSJUepy1SVt+FdeS7JQ6EAza2Iv0vkFz7e3scbw2t07eKW4mE754IMP+c28T1H0k23BVWVJodvbwyUUp/v1qYy0SBDSPV0KZES65J9YgZUYaC/i2ksiagohJKrhItlDQAeCOGaTM/KRS89iMRuXe+ClW7taV1n+bMSgArYoFiACSiMesjxQzWfMJzW+mdOGU3LTwyoFbs64PyTRSGLwoBSk6nL8HfrK74sOHjExJSHxbVWzv7vL6njEyvo60KaKtYBWC1QZuDLWXVgY9xePtpbeaABkX1XGlzg2KnEt4gPvrqQQZymyMYP+FuPxLebzGefnTQQAJRVkuXTTEoVygm+wWY+qrnn64S+YB8drb7/LqBcRfkM0XF4v85jDall8gC6fRKV4oTufwtJ6qKOXLd+xrLFcHslur1NEOnoHKGkNnTBMUeTo0AFtvzpQ7hLeIFHFcXPrxmJKK0jtCpHR4iX2y4uPexqho44n5PrfMDD/d/XVgfXdSymFlghOaPpED7QujxZkYPFb18ny36F4dQW19jrYa2AMDXE8tUhK3LL0uOKzM12V7SvPUBN72fK0PmSxiYjyiDgktHE/bVu8dzRVTV01CAW9fMjacIAyDaYcMm+EBw8f89mzXZ4/e8beFw84fvaI6aSmDpYGz9vf/ja//1//V3zr7bcYFAVWZ8uEAS6Nd3d/S7A/TtAYdy5FoRJwewUEW7yDL8+ZK0YpXSxPnHfexWr/rA5MpxeMhgP6vQyUYTgsMDYjyzNCCBwenXF4NuNkPuPZ80c8fPA++ztfcH64y9nRMadnF8y8Yi6GJkDe1lhpURpqA8oXGMkoyz5DY6IncqnQ1jPoZWRZibYlSmVoUyzXvPYEaRHfEvBYm/pd0VE0SGLSLeJwbUtd1zEmyVT0wlM6xQ9msUdHi6IYP0GG0mDyJL6oYsKW5SX/Nsy1f1dEDi99/U+A/0tE/qlS6p+kr/+7v+oCqQMkmVurhWx3nDQRxQtBYrCIAdFdRTkiuIk6Exff8u9iFbF61QmbxEzZpmZCL8lRRy03VK3UYoJCvA0lauHDEFJEEc2hFXmRY3sF5Jbp2QQ1m1H0hthMY5WKquEebICgDLXWWFswsL2oxKairH0QRW40JjiM9QTjcSEiubnXSTuo2/AV1hhyG4hAZ2wtjkGCwStFKyDaEDLDnW/c5M5r1ygKwVRCnuX0PZz4GrOyRlEabo48J0HT6JvcrAve+/73aPTv8JcPHvCv//Uf4U4q2rbi4uKEdjIhtxlONFYbciM0dYVGyHCxz8zmuEXyuxxlScm2tiY2XHZSoF2iqi2t95HOIRBSEIyKyJZOwZ8ToRXBK8GpQKsEn5IHcS2qrTESyPKcSseKKSFgVKQFdQeqAKFpadqK6cU5x/unrKwecP3mCRtbN1gZr5EXOVlmMFahtBCUQeV9+kPFhleccIDJchSKufccHOzwB3/4Q97/6GNkWOJ7+SJphxj8D4YDyqIfN3FRsV/RuXiPBoQQ6V5pDlmt0SHS+UYDzWTuOT3e5fx4j7Y659Xbr2CUw4cKJ4qWGhQUGoo2oJL/oQDBLRFh1+qEgQiODGV7eN9ilSWIR/kQpaSJ67IlMJvOGBV9Cq3xBKzWoAXnGpTW+CAgLfgWLYGgNR6FE0nMtxTUKJNM4UFbi3eevMzwLTHYVtAjGvWu9QcMR0MUHnF1rGhZg1YWozw6eJTL8AFEQ+3b5LHo8aGNXjbW4EObArlIZ1BJ/VAjGCVYFQhKE2zBNM/QhcXgUD4qJGo0WabQtkEFyKzCeFK1tIcyDZkVyn5Ofdag2xZDQ/ANrWT01l7h1qtvUx09R7dzzo7OmeUlm5s3MWQMyiG//d57uPOKl3sv2RjkPH76BeM7t1krS1bWrvHDP/8ZxfgJG2XON+7dZG08hkwxKC23bt6indT4xvFi7wXXNq9jleL27Zp8ZUo1WGNS3qOqDX3nYPKMzVFO0dtCmdgDZ1c2eeONN8jznM8efMpkOsEkMROcj/uxBwmK0HpUq+iZAqMzcptBcIjPcU5RO09DjdewhHC+SlPsQK6IL8T9XQOl1dxcH/Hb3/4m0+kM7T1ZbjHa8O1vvcutm1v0egmxluXFBI13gbaq0cFHv8ge4HOUcSA1kKMkUdBipB53UbVMYxbs7kWQxSIzyXRiPqhUTQo5OiHtomtqqck0hNYzOz5l5WYKi5VK6qosvOhUOueEuL81qKRgKMkqISoaBqJQTkMUCtIBdBC0KE5mnvOjC3YePeOVu69hVzRWBVRWLJ9z2o9joL4M1r48HkFCUn6NH9e7wHQ2Y2V1jZW1VS5XwXyQRTIbz9J4/iY9GcTFB6qURVESa+mRLiYmR1CokIBRAaMDqqORpYRNFIiJ4iv9vGTeG6A3rtOvHf3hClala6QqqKjY4iBF9FUMPsqYI4anz5/jlfDmW9/CkzH3Aa0ca1nGqEhVgZDUj1XcUKKeRMAZkMxgxJOH1B+cWQRLoU0KfyNLQKdkPj5as+iaR0saa8hNTk8V5LkmUzm+9TS+xWQlRheg4j1frbIpLtM2O8pl15u4UBwMId6/UvHcpEmWQekMVBqdRUVNlUDqv0X5GsCVClv6zqVF3SVTAdGLjJ/85i3KO1tQWkT1QGWLuQ2denk3Bp1cWqRHg0KURbqkXQJKPEpFeqp0BQaJ7QNn51OevXiG947MKDbXxoxGQ0yoqSfn9Ne28D3Nw90dnn/+gN0XL9g/POXjz59wMplR1zXiKpRvaFsgA68Dq5tv8sr9b1EMyvhRJc4FxWUQZAGTXf1SpWejEtNJkt+qAolSeTGZQ6d/N/gQ13pAET99fB6iLLUoqnmMRZhWuMkRxXBErzeAQR8pMmqrCWTU02Oqg0Pm0wueP9/lL37yCx48fsnB2YTzsyNm8zPadkZbz6ED4kVhdEaJwqqYYGV5js0Liqwk0wWZzqKNiVUUpUEbjzYeER0fGhmB2HakdNz0vEu2VMFhpCWjR9CeNmtwBPKgyRsovVA5RZV5dL/E2DLGbSZbFCIiaJWec9Jy6NZ0BM89PkSD7l/V+/n/Bj3y94DfSX//H4Ef8iuSNqVAlCyUyeL60ZfyeUE6pouH4AUrHStbFhUC1UlDB2IvVdr3leq2UjAmZrodJczY+FBDooVZZRZ5Rtdg3NE7jFIom+GlRdro15IVlnJQYgcF7nzCbDanX87p93sgsZwtLqCDxPKsMSiTU6h8sZBDWvZllpEpjxKHp01qlTFYV4oY+KSKiVeRDpLbWM8yyfdFrCK0sRDfSENTzXn/s/c5cXfYXBlTtMK1zev8R99/j4N2hq5awvkE4w/piWaqxsxQNO2M3qDke+9+G+ctw6IgVxlffP6Qh59+wPHRGfOZJ3hoqorWxNK/1elAsBmQlCzVJQQ2jaju5L4vTVClOtRBL/rSun6W+KMq9SfFpD5KMyucEhyOJrioHBladAgUWpEXBXlRYoyNMuWJC+1DpK12KJCKzC7Oj06ZnEw42NulPxhy59VXWVldY219jeFwGM2zrcUHjQ+aQTnEjxryXs6snjP3jqOLC5r6BScnE7yxCa0WtA6pt6xFG5XUGWNARBBKpfBNhRcXqbAS+xusismbRacYQkFho9S/Dzz5sGDQi+p1rWujUIuNc0vbDKNiCd4kKpKOkoRJ4jg+b2MtWepbqto5NrOIdxTGUJqMzY1r9McrTKqGR8+e0dcZb9y9x+bWKjaTGIjoqPQqeBCPxmFUoAmB2nta51M12YNy5Mam568IWiFGkNbRyb3rIqMwA0yRx7WWZ1itUSEaVDeuJYSMYT/n5ERhJK4FpaJUf3Wpgo5EE+mgDXkWRWHaTrbe+4jGBxcTjmBptWVuTQrQUg8XJAXDgMKD97TNHAkFWinaUCIyoSw0/WGfC9eiXaC0ASWOVue0us/m9bucN+cMrGEym/J0/wjbW2PUH5Dbgju3bvHv/+B7/PCPJ7zc2ebO3YxPPvgp65ub/Kf/6D/jT/70z3ix/ZzbmxsUqkZywRYZIUQ13WxccHpaMRoN6OdDxqvrKDNmeLTDp2fC9umM86ZH5gOFO8OGdbTS2CwmuS5fx26+wmDQozg85bR2aGko8wzp95j0erhaURZ9xoOaPC/o91pC8BS5pcgz+v0BeT5A22pJGVnQ8C7t/csdfgk8oZYHXfqp6+ur+PGQzGiURJnrV1+5Tb9fpJ8I6SoGv8i6LMaUSDulbs7RNmBMhgSP0g1RDKC7icCigyclK0qW9SdJNDZJUtxRnLADpRLoGAyEqFwc8hbRGsFhaBkaQbU1Ku8cEGNl0IfEFAlCaD0XZ+c8e77HXOc0csrqas7ZyZzD/YC2fWo1ZS5nzILQtCPm5y1Fovw++vwRmYLdL55ydNqyvmJBOboUVLpEYjEEX64ILNDLZQ9r+ojW6rj/abPoB+ueVae4uiDzpRhWI4Qm0M5arLXY0iCqTMFri8LjU2XN4lKCoQlykfpel/1uouK+N14ZsDLqMzE1rI7puYyVtQ2MUtiQ6H0kOpwG0ZEar3VGWfSwWYHNc1wzR4vCSwQ5wZG1DtsTMAUxIE99SCgICsHTEnAqoIOjQINWtEqRKYvVi04xlDKRDrpI8pe0NJGYsQWETFkyon1NLyspix55MUg9pjmKxPX/8jhd/mt3qHZIgIn976KjiIWEeCYGCYBB2RySgFcEQ7963b8NuZu6Eluk5D99LUoW1u2kREanZK4Yjxbv62ydlboaLF+NqztacQSbRCl8EvuxJNReIpsq1f3R4plMLjg4OsbmfVYGYwZlhg6Oo91D/vSP/m9+/Gc/4kIrwuqY+WzO5OiEZjJDmhRn0X0mwalE11Y1ToHOSkxWRMAChSiPwoFYlLLpfR0dVlKcfPkzJXEvCahkwC0qCryhDIGYAIYQY5PY1hE/nBJwdYvCIaXhZFZzfDZjazjg2kgjRY6SmtYJp+cVTx5PeL6/y/HOF+x+9pccP/2U8+MDzi8aTqce7w3OC613tCHgUnXb2owsK+jnOb28JLMZNjORVpll0ZM1z9FK4UPc35Ui0ZoNEirwktaFS/3n0TIjgroJTBNBQgt+hM4yyAKiPbqBwlqUKrgQQzkuGa4NycnQOuoz6ISgRMZf0lvwPilVL5M272P/u3iH+wr94Orr103aBPgDFTkW/4OI/DPguoh0XXS7wPWve6NS6h8D/xhiMKZimr7sr7lyeC+59cYuVRIj/cYvGnC1il1pkqoyMYdb9lFdufEv82w7vvNl/nP6ucWP6CiMEBO8+L08zxgMh4zHYy5OL6imFZPJORBQJlLLJAQk6GQOHheZ9yHKBweL6NgwnZksSlr7OIH0JTVN0SrKjLK8T23MQo5ddXTQxXYbMJnn/PyETz7Y5id//gHvvvUmr9xZZ/98n+tvvMKrmxt8+JMHOJWxsTpmZe6YN54dWp7NGsr9XW6u3uTe5jWy61tcH6/xrddf4+NXNnn+fAebD/nk04d8+tkDtLa0bROrbBJR7g68u/yMv/4VP1uSr0uspQ4R7gQhdKxuKp1YdeqrZ1n3pXS9WwW9ssT1G1wTqy1R7lpiH16IgidIVEl00lX1iDL6bcvFbEa/32dza4ubt26xubGBKXpUrcNVFb3MktsorKJEcHWDeMf61grfeuNt1osezcUF3rcoHQNLYzVeXDpBYjDVNa2ihDa0BBxGp0b5lGx2tA7nA5Vy+IEj5YM41yJBcAL4gHeO1ntmumCqQbQQfEXTtEltNc7HtnWRcmUNZfBkoSXoSB1t6xpX12RYbl6/RTkcQ56ze3SMm1UcvtzhjW+8xltv36fs5TQuiut0qq3dMHYmk0p31ICYKHnvo8l1ok0t11mXaAnaJpndNKbBL5MymxlmTYMh0Cst1fwMFRq0SOqJ1IRW4ZoQhSpksffQIaSdgmnrWpo28tJDMMv76O6dpNjqfHqPR7xLKFmiNau4Kds8Iy9iAhxSYC+JFlEWPfqDIVVe0PrAydkFXzzbhnyFe7fucOP6Gp6G9du3eOe73+VHP/oTDo+O8D6QGcO9u/e4/Xu/x9HBIRdnp/SLHjp9tiAKLUIIDlfPWRsPMSpg+yW3t24wnmwyfXrMg/P5IskJQXASYmUz+eqhFaJjD6nJbeyX8UKWW1xukHSweXxSvPVcTGf44BB6ZJnFSaRGZpnF2svUuy9vCpfW7SIClSvfgUg/jfOgOwcs/eGAjunXRWMd4zEASkPeyyDvYX2LVjMkRBaAkCo8KjaEq045WBKQplItY7n9pzvr7l8W8Y4EF6kU1kX6UdDUlASlsXKG7jVk9gI3OaDojYlN76prF4mAnGs42nvJZ598xCcf/RxtWlbWetQb6/ziw4fUdc7W9VfYOz5i2kxpRDGvFVFUA7Ttk6EoS8Xjh5/ynckFihsRDRedhA2uPNqvHYfuNej1GfZrrLnEajHmEm39UsIaEqVf6WTDoEGDd57jw0OKPCfvjxcsrJgjxwjP6PQcVQ1S4VzN/Ow5w5Xr2GJwZYLEM0+RZYa8sBRlwc3RFndu30q2PyxobnHAhOAanPilx5K12CzD+OZKkbFpaqpqzniUL5PENOqXR7xj24TgFy0bIQWEWnUiOF3q1l3/0lzuPj8RFUhWkYgSbFbQOmiDZdRfRalseYO/6qW666S4JUS/txCimJpJok1KGVDLgP//C0XHfxdeC8yBq3HEYqQWZ0X3jkvj+bVXSoBTeoNW3byRKM4jsRQVlVdBKU+/n7G2NuDk9Jz3f/Zjvnj0OXs7L6gvztl9/gVtNWVuNafbgChyNPklS6yQhK6CIsWGAeU9VlusOcWoCkWRqsQRuFHxUgRZ6D3RIVfq8jNIi0mJgHMkbW/wNvrkCigjqTjpCKHi/GQOVY/1YR83r9BFwPZKxsMC27OUOnB4+pzz7c84Pz7mixfH/PjBcx68OOTg+Iz6/Bh9cQQXpxCEuTPMvUVTY21AJaG6MivJeyVFUZBlOWWWUxYxaYOlKNDluL+rZnXxvTapbUQrgheCb1PRICoGO+/SnhL71yTrEYwlyw1ianJrKEWhQsCEkl6Wg3GR+WNMqupFE3AJIQkWRa/a2Muv05kbYxCX4qEk1/RXvn7dpO0fiMi2Uuoa8H8qpT69/I+YWjUmAAAgAElEQVQiIimh+8orJXj/DKDX7wlCVFJRNjVJR7pKpMqoRf+SSZtvt1ZSt0xcZypufp2ClwRJ/FPpfueVJK7LcDs5WKVi9SZurjEp6oK6pUplTKasMbhUZu6XJVvrG7h5w2E4Yl7NaJqKLDc03pN7i2i7SGKW5Lx00JHUY9CxsV1iEud9QCuzqEp5wgKBjlz6pa0Bsgx3OhA4hEDTOoIPVJMpk7MLLm6M+eDnP8X9+Ee8ef8tXjw/ZG3zOu++eYeetYx6CtvPOW3njCYzirJiKx/w9OSC3WfbvHNtnXt3bnLv7qsMV7a4dvsWvY0hk+kpL7efcbS/T6iiyWBHhVwgn1xCvC73F8LCe60LGhWx4hqD5ojhKol8cqOWlNZO0Q86MRNL4+qY9GpNr99DWkebNbRVDCraVtN6h3KONvhF1c0HofVcWvSGtm2p53Mm5+fs7+6ysbHJ2uY18l4P7T2h38NaRU6WbAMEfKDILGsrY5RA21oEjY56GejcpOpOQOkod+3aFhFH2cvRydTPmphYKYmtjSqA1VksapH687qgIsJCeElGrcTES+mA93WcQ0JSJ0vmkCEmdyhFluWg+3hVUgcfN8U848P3P+D08JTf/M53ufXKXc7rhk8+/5xeXnDn5g1W1oaLa+dZfikO6mZiEshJ1fCOFmCUjoqVwWN1gQQIzi3EQLrNTJJojdEm+cjZ6M1oFG3tKe2Qk/YlP/mLH/Lo00d84413sKYmNwV16KODjeqw1iYxIx3V8pKaZrQEWCo9LZRMlVn8Tro9QyWkLqFnC8U4Dz4EtIobrg9RQTMQKditD6AtolqMTh53WmOyknkrPH72kkmt6RcDVjfGiBUaY3jz3Xc5nVzw85/+jMJmnB8fM19dY9gfslqU3Lz/Gtoaghdc0y4kx89OTpC2wZQFWgkmVUCHq6usXyi0eopQA5GGHLTB4dGiEQ1taGmSpHTrG7w4xDcIDpc4/tE0NqrfeSRVUhsyZzF1hVeQ1xUKA6rbO/864gZfn9R96fCAtIf7NMuMWu5+ybs9ATItVnsMHpgS/AWhWUNnm7G/U1VxT1/UiRKoQKL6XQrsL6eTHf0pBt8BUQ0tc6yqEDXE+biORab446dMD56SyV2KcWQhiAHxKoIpeEzekpcTvHvK+mCPjbzl1tYteoMGe6cgL1fwoUKOTxhfW6dpPTu7O1QCre1z0c45PptxvP0E/Iy6rqJ0PRqC+hofrr/6Vdd1BBUXz7ujh339mEXaeaJJBhf77RC0Fcphjs7jmusO6mZeIc2UYpyjFMzP9jg+eMSL4x1o9nn3vX/IsLh96Z4FJNKZy9IytD3W1ke8/cabrKxuXfqxpQ1J97WkTKnzEs2spU37AbBQpM6yzoeJBYDGlUt1wjpxv5MEJAmkPUlfSQS//klBtw6UCthMYXKFygwmL+gN1rh+Y8xgsI6xxddf5OtewSNaEVpH0zaJnqWwNkN3SkBXErW/68naX/f1y5K1r3ulHlMCXjpZOw8hAqoShNpDVc85O3zB/t4eP/v5L/jwo0/Y2dunrirm0wsGRY7yDUYJTdPGGCGdN8FH4ZjOZ9WRKvdBkeOgjWdMVT3CuX2QmzG+FI1SUaE0SMu8VUwuWuqmBgJ5kdHrFbGi2InCoYjm81US0eihlcE1PgHgLd4d8ezJT8myGRtbNwh+HbJNstxTVTWPPv2Uj54d8+Jgys72Li8ffsrR80e08wOqdsLEZTSqh2sV1HNKFynwoHE6J+sPKXPPsNT0Bn3yssTmOTbLYx8pOmkUJD2Ly8OmJAKJIssYhEjrDJLiI6UIEggCVglRSKYrDgRCaGhQNKaHLvtkWSDLCmxmGGaGXMZMvKU5UzRygcKhk6+01lEMSgCll/HPIh+5VJDxCG3wV70ef8nr10raRGQ7/bmvlPrnwG8De0qpmyKyo5S6Cez/da/XqXR9adddoFcRDE4IlbbJbiZN2hRwdQFkqu0ulCW76lQncX45ces2dZWoHp0RnvlStU2p2H+mlcLGKBMfIDMZo8EQt7UFojg5OqWaV7SuRVQ8JHKTY7JIzcpzG024bTR2NioFtkkC3WiNGL0wP5WUl3aIZ/e5An7RaBsWyaVe3q+UaFVirKYsPEUWqCRwcFGz+/AZ7YHjvJry6ZOHHL+8z8bGKnW/5GzaMs5y5vMaJ45CFGWtOJ5W7JzscWdcElRge+c52SDjG996i83rGzx8+IA//eEfs/PFC6zWiL80dosEbvk8Lw38ohohKkGGC/EAtUAnO/W2rkdcqWV3jARJb4nIsPepJ6qXo1xJazWNiop0xmpMC1oLOIHEXfahsyDQ8bDTASUKY2NVt60bqosZBwfHjFdWWV9dxQL9fhnnXRCsVmRWo0Wo51Ma1ZCtZgxGY4rCJqVtwROoG8fZ+YTpdAa5Zm2wwsrqWuzBJCZtzjn2d/cI3tHPB8waz8rKmH6/wCgSqOCi+IeCTkLaBYdIiD5ETYNI7JGLviGRC67TWAiS/Kd61E5xOp1hNOn39JhmU/r9Aaurq/QE8n4fJcLqeESea3xoo4iFi4h26JDFFNnETSsFwxIwxqKsjRXlkOg7aSwRWYAocgkp00lJraPjB6/QGIKbMSx7vPbqKzx++IDJ6Q7v3L8LjYo0RrE0TUSxgnP4dNgtENAEN3am4dHOoPPLUYtxRUiVwkThTftLt1VJQjwFhfOB1ntcshCRpB5V13NMPohjIkLrhdv3XmPt4Qt+8fFnvP7qfb7xzuuxZ8vkaBHefucdTo+OefnsBb0s5+mTJ9y98wpZlkWUVaKyrguxb6KZzZjOp+TWUjVz8p5FWRZUY6PAtxeIN0hoEGJPkmvneGJvYlA1IdRUTcu8mlBVUzQtEto4r9I8q9ua2rWIUdgipw2exgf6WiemVojoPh1T4qp8+uU9/ksbAl8Oq7o9+urZoNL/4zxLIXhkI6SQQyuQtsFXFxg9pQ0zRAoKq5IoT5sAhh6IXmgxLLBG1dVbLlUxYBkgJIAFcRg8hjOODvaYNzfZuKUweg6ZYuPaBn6sIL+IiZrkzBqJPZFWQTBQK9ykJpvPuTFwDJt9CHNujTS9YcYvPvqcd1+9wY0bN/joww8Zrmt2JnMOvTA9qdnZPmB+dkxwFbN5dfXR/nVz5vTkXdPEqvYlNHBhiivdteI4aaWiOB7ggmNyckJv0Kfs99jY2oiBbJdUK4VgaJuGMK/Iep5qPuXHf/zHPPnifaZqxmt3+gTXXL2hlIZrA0WRYZXGWsPqykqS2o99Qt06jb1jCp1FWfPO0gMEaw1ZZpPFSdz/BoM+w2Efpds4/tIl5kv6oIRA0zSLM/eyWrXRJqHp6We/Uqm58mHoKjPaKpSNQVtQhsFwncGoH+dDhxT8dTKMVBYOCcaweb6gt8YocJlQSgqY/s5W2RaVp1/1ugyDd++5uv/Ipb+rJO7Rsb4U4J1DXMtkMuHJ0xd89NkjXr58zs7TBxzs7zOrauo20LokcGMM86YhU9CmZIJUySWd2111N54t3fEUFj3o4gPHp485vdhmZeU6VkVjahegahom1TH7hxV7e2dMLs5p24ayzBmvDCiKjPF4xHDYoyxLctWymp+gQqCuS44OGz75+Am7ewesr/XZ2Cx4+PADxquat23G5Gyfn/7FDidHR+wcnPDh5y95sH3G4bmnmnvyEDCuAneGcxUtOdiAUZaegTJRHfN+D+kNkKJPqQM9A3lRYLM8VsdSxTuOTaQyhgQedeKDy4pkWHxvYQclASceIaBCBHO9j9oHXa4QcFGQxJSEYsxosMbGyPLO23d47d5drvXH9JshH//smH/1Rx9RzR5h7ASlprHIFJazY+Ef3c0WpS8llNGjzfsYj/yVysL8GkmbUmoAaBGZpL//x8B/D/xL4L8B/mn681/8ymulkHzRnNeJRcRYPj4AidurNl3f01J1TpnISV7WryKNogsUYYmSLRK19LW1Ngl8LKW/Q6qqqfClipDWi9KOEhPlnDUE5cmsZTweRwpU2WM2nVI3NQFFGTKKECkxXa9AVzpdJGUQKWLSPY/kOSSXeupspLmIdJMgpOpjDERZ0DtBicaQcfPabdbLgg0jrF3b5EVTM2lgdXyL126/weOdz3ix/4SPf9HSlp4X80Pqec7b1++zUQo31lYwbcNK/wbXv/UelRwwP9xldW2LW6vrhONTJqFm7fomN+bnDMaD2E+oiap7V8aZK+PxNZMqUeP0soJ0iSKazu0YjMXJh+hLVDyWybhOcu15niFFpJ1qIp/YWkVjY2VOuQbjFLrVCzqqQkUpa6WwOi6RSF/NMVrj2wYlQq/IMUpRVxVmahaTNbSO4BrEt5gcxuMRRZnHcpkGj8cqw1l1waeffsoXT59RFn1euf4q9+/eo2kbTs9OGI9GXL9xg6fPdrg4u+Dm5k0O9g+599o9hhvjuPn42M+4tblOXdVUTRMPa2Po9YbMPVRtAyKMBkOssbi6IvhArygoinzRu2lCw9gohs7x7OUO23u7eKWifIRRiI7jsba5jvIOixAk0oQkxMp29HmAziNNpPMwiTS6+WzGfDajPxqSWxOpHYlqVGSWKMAYN1vVBUZpnXbrEGJSbrTBaDAIt29s8YPvf4fPP3vMz37857z9+jcpM0NTNUiI+0NXScuMTp5e0PH5XeK2e+cS0ndpzoosE7cFbNIpx6kFioeKCm6S7A58EDwKk+coo2nmM/q9hjLTqSqnqFp44+1v8eTZAcfHp0jjF/fo2paN9Q1+43u/weTklL2DfZx4Wt+wtr5BbzQmL/tpr1MURc5kNsUjFKMe3ntqcbS+woYSEYvyDppzmqkjzE8J8zknJ8c0tk7zQMhWK05PtsisZm/vBdvbT1ChxSrBSvKWKQqUiclZ3bQ0zuMEWh8r1yZ4nGshy9K+Fjc6WYQ53b66/PviiX9dPPV1gLcCJYKWDuAJsbLnPM43ZLlFJUW3ULe8fPGI5/uPeO/7/yVF6hNeKhTamLRJEjySSD3uVFHjbSqW1LIU8Kd+PYNF+4bJzgMOnh5x/+3/hJwZvj2gnsx48fSAG99tsGaK0oYghv4ww4hCe2hOKx6//xnbHz9jQMPqzTHnB3uY/pTByg1c3ZChuXv3Gr49ZZhPuPP6G0x/8ZQHT054sX1C3Wp8UJydT7mYV7TE3m6tomFo54f0tXvvpbUFEbyI67hLThPwsiw1LmxBlO9WheJk/5jzszNeHa9GqmRmY3ClNVp5fFvRiiW3BlWWGJPh68AXD5/z/NlL+teGaCnw7dedDzFps5mGNs6dtnUo35AtqlKJaZNiA7SKNKREZ1ZIUmaOAmXde8qyJMtIlfe8+8hXp2AIVFWFd36xN4nEhoTOJ7W73iLB/fKrC2YgXsNY0BrRhqIcLERCjF7GBou7vBSLfOmiqU9TYXOLzTMWGXrHwpG4d19eSL/c3P1v+0stxiB+udzTu8ezjDnk0p7VJWZdMhBBiAVbiLhfzOcN59M5+3s7PHn0OTvPn/LixXMeffGcw7MJiGDDLLKpur4wY3EejM5ofIi9knXaklKfdtBxDpqk9xAWIFacb7Gv1OJFczY94vB0n5s3BKWFZu44P6vYO9xm7+gJ+wdzTo6nzOcz5vMpzrf0+zkrKyNGowFFL4FtbopMvqCXZ5TFBnWT8fDhCx49eoYxiuGg5Pnzz5jPzphP/xdmk5ZmPqWpK6q2xemcWmWILSLwHRp6GTGWylfoFUPKvE+vsIx7Gb3MY6xHCo3LMmqtKLHkIY5QbBeJgoJeklDKpfpp7E3v6gA6xiGLn0vAdmJWidZR8VEliw8f8F6l4z4guiXPFTfu3OH2W/+A7919k1c2+ty/s8nKeA0tGjXJYb7HTz8ObB9c4KygcEQhpWipIkrh/eW2rwjZoHQ6uzWZDzG29/7LBf6vvH6dStt14J+nyW2B/0lE/g+l1I+B/1kp9d8CT4Hf/9WXUpfWkKTY1y/QrLBAw9MHV8v30TVkqvTvyUx3qZJ1tVetbdtlRa67SkcxhNSDEjfUuAhYbNAAAb8IQiEGDbEyZmJfUFkSxkKWZzQpmMka0E1C4lWgms9jcO/9gg4ZAK0tRVEk1ZpY3YmKUOrK4oxUDENmo7F3ZrOFb0zs8Yg9f4SAtQUbN+/w7buvsHn7Br3dbSbKYI6n/Mb3vsf6zoiTnzSE85JZUVNPttn54ilnnz5na3PE7u4L7gxvsrV+j5ujb8Jqi6krsumE3Z09to9O0YMek8mE/b19LmbTZbXza86D7llfQT46FCKVFFVXbUtVF32JvASRGig+2hyQRapqN57OO1DJQF06Lz5NZgy6KGirmtSHG3+P6ZLxNqaCEgUr8jyaDXdWBEZFJNVmGVnRYzgY0CtK8jwnECJakvqyQnCRlqaANiN3Q9Rcoa1GW01hY0BfBo2f5OhZn3F/i3425vmzXXZ2X4IS5vMpr732OtW04vR4wvpwi6ODE8Yr63zw+WecT87JjEEk8L3vfIdnT59yenZMUZYoY3jtjfvsbO8yn0YK2K1bNxn0+rzcfoE1mrfe/Ab379+L1V6jCcoTrGDygtHGGvsHRzSA0wpdFrQSohiOOEznCSjJ/yeNlfcuypE7h3dtCnqTMqgxTC+mPHr8OTdv32Jjc5OiVy5MqI2Oqms+JQZLE2sXqZRaL8ZNS6q8h5zMzumXmq2NLfTrhu3nz/jFhz/l9tYbvPn6ffLCoExUqlQLBCj2cXV9pi7J03cFnWzRa7ecp9baZCHAAiGzWYab1ngJselcm0jhzKMpfBBi3x6QWY3BYaWlzLOoAugVonPWN69hTYb2gSyYCC4EQdBcu7bFb/29v8cP/+iHHJ6ecHR+wtknHyEYev0BZVkyGkVfG0FYXVuJ1htGs56vxeepIThPPTljfrrHfFJRHe/RHr/k5fZLpBzhm5rMGsq1TarzY0ajPs+efMHLF8/xTY1BWFsZEXwaWx+o5jVV66HzmNHQuPhMz05OIauZnsUehRC6Hohun+9CnUv7+Ze+c2nj+Or3JIIWyfyPSEXRKC8cvNxnNB4yHsaKg9Ul62u3eL73PEoUAioExIASu+j7EicoG1ChwifD+RACypoYYGNTLaNIn8HhpcVXFfXxc+qdHQb1lKz5jNlxjZMJF6fn/PinT7lu73GPG4wGPayuKY2QiyILLXnesDES7mzmNKdQzTJWV69Trva4qOEv/uwDbDFCE+iNwZQtjx8/5MnTE16+nOJag9ElKnMMhyOM1ThiRV+kxdh84R/01ccoi3Ny0csNse8OEpBG2ttCqmTFXjbQBBeYT6qE1q+yubkVhZCUENr47IJElP/48AVel9wYD7HJpDZ4RZGPyLNV+uUm/d51lO6zPECW920uGag1jeOTTx6wMr7Bvft3Y4Sb6GnxrTF5UwR6RYZNx6g2Ghv0Jf/JaMlT1y1leem5sMhP0+Wi16exNjI0iLYh2iyrbMsp3QX4X57HQCeqpg3oDJsP0FmGMgXG5IQQPZ2imLZcyS++7qVUAq6v/BJIqhF0IcTi3/4u5mm/5PXlRyuXkv5YRIjMi6iPEC1yujMjSAKZRTG5mDA52eHo5Jifvv8R73/4MTsvt5mendBW81glU4aW2MJhVRZ7qQDBQIiqs20bEIkJnDIlIg6tfGTFhGjn5BMVVgA8lxgpFpVlOAnMmsCDx0/Z2DjEoDnenXKwf8be0UsOjp4ymzRU05a6rggShbhOjx17Owrnai4uzjk+OWI2u0BpxXe/811+9x++x2vv3GXzlW9w85XHPPjoEx59+hnHezXnRzPOLuZMHXivUCpHRGMyg7KClorcBMpeYNCz2CxHmxXKckBhMkprKTNNZjxiBG/S5+uELbvqV9PE2lpqbbJZrCJ246akY8FEQKXzne0YMl1LVAw1LSIq+rgFD04hIbareAJBOfpFwW/84Lf43n/4n/PNrTVGBExwIAZ8QKymLFcwakw/HzM3x0BOCA0htIl6HU+Mbm9YsOKQRMNMlXodaZv6VyzQf+OkTUQeA9/5mu8fAf/ob3StryydkPrWJCr9eci6RnSdNilNV4uJ717QlWLUJSSea6JaXR68S/e6SNJEcSXDlS5g03oh/hFCiBUHUr9c4il3jaYaMDpWabI8J4giKI/1AWUiepJlFufaiEL7kILEKFmtjCEvSrIiR/n4XR1idh68j6XyrvSeduGOwtUdtpFuFhHzIHNmkxl+2kPurzIY3eG7W9dZvbbK9mcP6G2UvHfte+Sbt/jo813KVfjm9DX+xcn/ysGLM04mMy4+eYx93fLGu99hvDJk4k558XKPjY3AytYtDic1VePYf/GSj37+AUd7+yiVDjO+fD4sk+PFd7rkukO/pFMtSjMhxDGV6O3AwrQ2eFSa/CF0gTSx4pOOWa01mbF4rXHE/owFrUUn37dUtdVKR7lYYnIRkzaF0Xbxs9pEWmuWFWSZjQpFJsOrEANmL8kEPKopaQ0NDa2ZxX830X6XJNtN3rJ5c4Wz2THHZ/so8WQ2QxrHvdfu8fTJYybHJxCEwli0BFTwhLpCzWpWTM7W1jWePXnKdPeE6f4Za6Mxvd6AJ8+eclyM2Nvew+Y5Sit2t3cZjUY8e/qcza0NTGYJxB4rHzw6YkRRVlxrvDYEYwnGEM1DuwAvljwv94p2TfoR/kvjKoHgXUKiDcYa+oM+EgK7OzvMqznrm+sMhkOUin19EAM5kxLmumkWIEsnla6NQUvi98sECZp+MWJzvcIqyHXOcXHMiyefYrTn7lt3sDbDq0iJi0WZTlUtLPaBDoghVeS6/9nUWNx1bvvkpfjleS0pIIz9cwZ0VDJ0LladCqsYFpYm12jx8eBSmtHKOkXZI1pmKFSI6/3ocJ/heESeZbz+5hvsHh3wlz/9CVme47UmeGEymzKt5hwcHNC6FmsNRa9AW4OXwMrqCndefZVvv/c9ttauMTnY4+Xnv+Bw1tCenaGrC4LRuPwYWo9F4Xa2+eLjn1MUGW3b4pxLFeU558c9QGiblqaqIp1cF2AzlNI4qzk7nkb7hLxETMHmxg2ODg4Z3rm1ALkjZe3rqhF/s4jSGA2+RdpoWltPz/EefOsJXmLFxgmokuHmPX7rB6tMJhWTk116o2GkcYsmuLjfn59OWV21uPqU548/Z3pxzqv37zPaXKf1CjE5mR2giAeyqGmsJLZzwmyP9fEWL/cPePrxH1D1r3F0ccDN1Ws8fnLEv/roD+ltPcQqYZhr3rizyQ/ee52sPWFzJMxOHvD6XXgpni+eHPLud++QFTm0sL5xi6qFp9vbbNxQnNUzjs8MT3dPaGVIbzCidoJWNevjTQZlRnQP9SijklJeIo2qq33e3Z+Xv98Gj2idzrv0ulSZ0Sq2CiCKunFcTGb0+yV5fkliXOJ6aFrH8ek5x8e7rI4tKysrWAuEFlTA5jnD0Tqra7dYv/YKm9duUxTrX5oLYbFum8YRvGI+q5m1NYOBdHHaMl9Jh4jWitxaCmOSumMEDiLdOi527x1tKwuEfjENE5VqARZqzWAwoMhz5lXsbVuII6Gu+LT9klb+GLNoUE4RgmG0co2VrZuMNtYZjdYQkjy/WgpDXGH7/NLK2OX08v/Pyv6q14Jq/RWQKMkz+cigMN2z1iysOSDu594HdvZ2+fTBZ7zY3ubpk8fsPXvE8ekJ57OGxsc1YsQjzqU+ZoMmng0+scokdJUdHy1UOnoey2KgToGUpHsXlc4vWAjQ6dS2EwFr4eTI8+EHj6inf4jBMDmumE5qptWceX2GnzeEtmVezZheTJjNp1TVnNbVeB/BVms1RWbJQs3O57/gT5my9+YbbGxucu3VNVau/XvcuPcGH77/mEcPt7nYfU6YPMHVDVpFplcIAd3E+yx7fVZ7I0bDHhQQtEOpC5RE5kHVGmaVQhlLMDmxE1nAOryOdGkly/0rSMCJRnRSXlQKkxS2LxcHuvXT+c5KPIAIwSHS4mkxCLhO2yKLSZv15EPL6rU1eqPeQp84luuSroaGoA3G9shtjjeWQBbjPFmKn4gEOl/GEASfWofEx/XtXcC5SH81/xZ82n7tVwzKoudFzI4DWttomKwuGyezyFBj42FE1iChUkgUcUj4mkBSg7vKcb0iBQuLBXJly1MJYewG38dKmVJ64bXRgfadeIFLFbFOdW5RIdM6LjyjI9IcfBQ5yDJQKvYgCbQ+4ELktnofUF7SJI0HhlWReiWp90Z3/kDpEAKW5WCirOx0MuW8mrF7MiPbO0EPPIVAb1TAyHJ76y7jzVeZ9L4AP6FfDfn89fu8Xz3i/KzFqJJH28/h5z/im7lnOr/gox//BGsLvvnt3yTvDZDgaEKLrxpcVeG9RH+KpHa4FIC5OuaXvkIrg1UGI11zaSR4LNBQOtAlLOilnYpo9MzTCxQ2Vikl+oXFq8R+HqNQeURinHd0IuOd7o0ORFTIRMN0FuNpFrTW+J+KVhGJQomOdESts6ReJ3jf0oZo+uq8x4qNQg8JXZHgUa0wzPuMeyPcRcNIWdb7Q16cnXD67DmrxnJrbZWLs3PmUjMSzzA4+hIolWc4HHJzdcw+gZ5SjLMMP6toWk9fZazkAzLn0apifXWV0XiIsYq+he+8/SZ3b13DqBaNSqIlmoDBK0NQFlTsuYy0KkACWhRGkudIAjwWifYVepXH+Za2bRK3WxbVsl6vhyiYXlxQ1XM2NtZZWV0jy4vY8BwkKUsasizDL8y9dfK3SX0bBsRrlBKsySiLAeOhIpOC0mQM+wXPn23z8w9/ym+t/IDBeGURzJnFYRcTM5G47haqlZdpLyJ8//vf585rrzOdHiAiFEXOzM/oKkbKROsQHyQJkBDnhLWIUrimwrcVrr5A+QZxDQohiCLLi7jfaRP7LFVAxLO3t0vV1mxuXcMWBe+8/Q77h4d88smDtC9aMpsxKHrR17FtKcsyGqOLEAy4KnCwe8jP6r/g3q37uIs5PRryMCW3QlBDMJEAACAASURBVN7r44wgvZxcWXTjqXxNCPHIzG1MVCRRPZt5jXMuqo+1ERV1rsG3PgrxeINrWjDg3Rxs4OMPP+BPf/TnvPL7/wUhaKz95RS9v0nAGUVfWowSdJHRTKZMJudkxZCV9Q3WNlZS8tBCr4+bT9nbeckXTx9w//VXqZsRKyu3KHo9MIrKeQ4OX3C4e0buzqnOzzja3+HiZJd3f/v7eGspRqtgLRJdfQj1hNDOscqxvfeEm6PbrN5/nY8//xEnF4bt3efUaxdcv9bn8GzEy4Nd1ocZbjbls72f8GrxHU5ePiKXip6Fd958nX5/hTALHB6d06qcvFzl+uYKpxc1vbLg+OSE43PHk51zpk4wRcZFPUMCFFJh2hrra4w4tEqo8CXFwK7P+8qz/H/Ye7MnSa7szO93F3ePPfet9kJhB7qBZjfZJEcSZySzMZnMpPlH9SCZ5mEeNKJJQ7LJJhvN3rAVaq/Kyso9MlZ3v4sezvWIqAKaTdFMEsYkNysgMyIywsP9Lud85zvfl+x2ZI9Eeqy0xCYxbZJNf1tMVaDx1RBrM4pWj91rm5hMi2VG45VpLATDyxcvOB+OuH37GuvrbYLOcfNLrPagZQy1+106gw2667t0Nq6hTPtbYyHGSFU7YlB4B0XeZ3N9nzxrNcUtEq65BHJjINOKwogwhEqMAINZvF5r6TvPMtlXGuN1iQeW8IzSWnrhEmqfZbmI4TTqzQ3D5PWlcHn+LJdMTE53fZfWYJ2syFB5JtLs0UOUtc57sctpenyba/DtxE19x7+Vy/cHKnX/XzyaS9IkSChhWSmlUNYsgDwVPJrAbDZnNBpz9OqYr766z/OXL3n48BFPn71gNB5TV3MKVTMvS1TWIqhMPL8MGJ0KDibD+SSSR6oSqEaRtGmmbVhhicoal55erJxzw1IQbGQZHcVkLzAf5zx/fIyf/SO5LnClpy4Ds7ljMplST69w5ZTpdMpsPiVGEeTLMptk7FNfNwFrNJdX5/zmd5d88+QLik7Bwf517tx8j43ta3z643tsbw94+qTg5VHk5csjxsMJ0UWcE0Cjv95nY3OddqeDzgzRlig1R4XkyaszfDRUQeI1sQyRIVy7Cqfm6XzMQqU1BEDXCdBXCRAMqYjyOii12t8matYGF8TSQ8Ua6W3LAIu2gj276HCqBisWW2Kf4ASs0qmqpxVlVAQMSnmxe1mZh4rUWZHicikypPsWBTQmCguG5uc/wI/8XiRtsLyo0Axin6htGUpZdGowjLHRC1tJnFLNTS+oBOmGsWxClCBn6c/WbFCoFIC+ts41MrApoCNtXAllkeZ/jVHSByE82OY9GlW1uEgEmwSwUVSMIVBXVRp1IgnaNJUaa9DWYIzHBDAhpZ8NhRDZPKy1qCTB3GwiekVNRylN9Ja27WDWC2zPErJAnFd0bYesNWCiNOejS4rYpn+ww+TY03Mz/us/+xNu33uX337+kovTK8ZXz/ns83/gV/e/IfqMGM6oyppf/Oor8qzN2vqA9fUeVxdnKB/QUXx9fN2Y5arXJs8qdbX52WiNRWiIjSdZWNBCRaDEkxasRcUuLpLy5r2aQHtRBo+SaBstm3FMvZMxCI/Z6IjRgagNSon/h1Yak6prsVGSTIqLNjNp7Mh99UmxMWqoa0/tvagRIdQKbXJiFCXQmM4/s4ZMada6GdWGQ3vDu7fucW1zg24r59b5HmcXJ/T7HTY3Nwh1zXQ0YWOwxtZGm3a/x/btDZxz7Gxto9VHbG1tkxeaw5dHXI1GtNoZ+wc7bG0POD95hdKwu7uNzS3KlVzf26IwkUgQVTXEJ1GRQ7TEaKQXLGh00JioxRvRN15scTFnSADKUiUypEUqeZMgMuBa6wXd0BgtBu3O8fLlS4bDIdvbu6yt72CspXZODLGbSni6dk1Vz1OjokaZHGs8kZx20SfXLay6oMgMa4Meaxs92oNemrvigaKUIs8sc0VCJ0n01rj4nNU1QCvNeDzmxo0bvHgxw9eXTKdlqtYJPN8Eu6LMaaSfLYTlgh0cKtSEeo4KFd6V+LomhEY4CXJr8NGjrSZ4x9nlOa/OTtHasLO3z8ZgnT/59CeUoznnlxfS5O4j04shdVWhMcSZA61pdTs0ZvWb3Q1U9HzzxecMR3PyWNFWNcpCpjQh03S311lrDygvx5ydH6EyS4yIQq4yoCxWGelLnFdJfj9A9Ewqaea2SXK5XRS0MyuV6byLi5EvP/+S2fS/o9MtJCkIMVkBNALH/5LqQCRoQwg1Ojii1ZwML9GmYvfadfEt0gqChRAIFLS6O+Ttrzg6/iVunvGDD/4tRbYBJlAUhv5GwV//h78iXB4TqpJrNw44f3nMZ38zpre3z61336e905YKrNa4eUU9PyR2KsZuzuPTQ7Z2r/HNmSK0HXlnm7fu3OTd3V1anzvcr++z2Yrc2MpphxbvHeRc2S2mV1M21na5OHWMr2oGrZzoPRfHF9T+nLX1Gyg34vwVHJ4fc3mlORuWBAOBOXmuKSdTei3HR+/c4vpGh5bymBgBI1Su33OJZ7MZFxcXDAYD+v1+2rOEAtkocaoFkimxZjWdMR9NWNvZRGcClKpYg3bgHU8efs36xhbr+9fY2Vpjc3uTXr8r9wy9AEQjNSYPtAY55sJiOgVrOzsU3d53nmvwAsQ5B0YXtIoeJi9EbKOJH0igYBIV0iiypl9ZiYKkIfmzJhRcgKDvaJuQYbb8Pe0LxiYxMZNUbZPi3h88lFTbFApT9AThT3GLSoCzdzNUVJS1BLRFUbyWuH338R30yN/z6/9/yBGI1MkaR2tFnuc4YlKYrrm6GlKNLnn26AG//vVvePLkGWcXlxyfnjGbi1AT2uCdF7aKj2idEaJOdH/pmzTRIxY7LOwiPFVaP5diWGKnIT1oCpMqcdJ6sZiEcdnq05jR6BATo0WBFusZynVmFxVn8SkmZkyvrpiMZsxmUFcG5adoJf3vnUyEryTurrHaSj6pxS5kXgWizolOMbycEc6HnB9f8vLJS3Y2dun3Num1Onz03i63rrV59myXo5enjK7GjC+H+LqkVRh8LBmVjizr0rEFudKY6KDRAFbivaaViASZEFFReqUdVuD3FB82UIpvcoGmJWhFRViOZfK20IGICB3ekxR2JenVQO2kRzlmEI0iGI9XHo0wjMQ0XC+sZaKCOkhiJuyLepErLP+tKtA3ca/EpTotytYYFHlSMv/PIWmLDdKXKlkLpEGCfa1ErUvQ7/Q44IKXaopK1ASzfD9SdUorRdB6MdC/hVQ1yQQskrTm56Ui4zJh0lqhMNKLFkhJXEhVt2WjYVMhNJEkjy0mp1ZL35vJEm3TlRwPT7maT5h7z7wqxewvSK+cKOopkY9GLURTSNVGlb6DViqJMkgTs0JhTIfuwFAUmryo2D8YoKpANa0psinzoHn68hlbpg3dTZxSzOYV2/0+Wwe3uH3vJ5ycj7k8/YYXh19z/GrC+fmEk9NjMUquAV9z9OIVp0dHQIUKoti2RHOXmavIh6QgLd0DuVV66a/T0E4XNIBIbCaqWiZrmqY/qfG68GnxkwncSF2LZYIFpP/FpQQfbWRx1CK7r7VQWGMyobbWiGBA2qi1aaptdqWKmoABgQxQWiZ/g6hCRFnP8fAlZ+enlGVJt9Ph2sE18VRzjvW1Td7eu8t8XqEM1FbTG+wQtjMuh+f01zPW+ht0vaeczim9obvV4VpvF+c8nbxFb2uAzQt8Brpf4CJ02l22b+yz1htQT9+GKPLsWW7ZuX6DvMhwVgQy5nVFnlmo02aBlqpjVBBEDEejZdmMok7VCMDIt1wuMjFEYhQqamasbISzatHfFhOtctmfJmNlOLxkMpmxM6vY3dkTA3AkAVRJkdInlEy84ESIRpTGM2zUFHmkDJFudw0Vr9A60Onu0O5v0u308ETpNTSpEuicJNPp9KXnNKZKflMNljXjyy9+x9bOTTY227RbGeBxocT5RvU1NUlLOI9zcWGYGUIg+JrcRNomUOIpZyKjnFlLFaXnzlhF5Uts1sFXgeFswrOnz+gNeqxtbFNkOTcPrvFf/umfcX5+gc1EZn0yGlNO50JPdJ7xeCI+g84xq2pOXh2zsdNnfW2Nuo6sdwquZiNMJr5N3ig67Ta9XocwmtCyOtkHiLCFC9IGlhuLNXkC0gw+lCgVCDqiTE5WtPBVSSjlxgrFGG5cu85P/+RPaLUyUTBVQlvzPmJtWiNIVfFmKL0WaMY3/s9yDyAIEyPA46cP+Z/+/f8CKue//e//HR+uf4BdVPg92gS29vZpDT5mMvod54enfPHbn/PpTzfJizbKRtY3OvzxH3/C48/+nqdPn3H08pCDm/vc/+ZLLr74NX/iSgZ/so5SLZF+VnB1dcLDB7/l2dGU0XjC/MsHjMuC927scmNjm4Mtw+T4t3S04+zRF3T3Ovzkv/pvuHfQo7w6Y1hfcXZ2RqZbPHjwkhgi9UbOxwf3KCvL/ftf0u90KKdn/OwXv2Hicjauvc3O9nVaW4YXV5ecX1ywNWjzo7du8NNP3+Xu3iY21qlSbIhRpfWpgSMUwXtmsxlXV1dkNqPIC0H8094Vo4AmceXaj69GaDStImd7bw+VGXyoIDpELd+hTMRmEVSNio5ur03UBlZoP9pkEEpirFHG0x8U9NZa5O2cotNOcvyv328wGG2xxrLWX6PV6tEqutJOwOtHAxaSBIeE4mwTuJtEp2g+Q4Jh6SFTi+G4mP8NaoQoA+fZ0obHGIu1GVZrGiciRROc8a0jJHnSpfujnEPwPrE/HPV8SmYMed5Fmey1Xt7vPv7vysrevP7f9Xjz0HckivH3v+RfdMbNQr1yHX7fGS6fW75CpRMIccHiT5VW6T8fjy559OQxz14c8ur4mPv3v+Hs8Bmz4ZDZbMa8qpjNK2onbTMu2dRoLb3cOopCb4xJ6VBrlHfE6PFe9t/gIegI0ae2DtkrF5S42ADdCdBXr4PbzVhtaHQmgRWxsawJDm0CvtSMz8eUkyGhjLhqRqg9Wvdp2w1sXqQ2GgHMYxDwOzfCZolJvEdpQ1fnuNrjXSDGjKLoUE0dr6YTLs8OybITWkWLrbU+g1bBwc46W+vrzGYV56dnXF2eUtYzJtVcNAdoE8jJaJPZQAxzfKxQVnzSlAoYVWKCRzlRRdaqaDTDUuKmyZQSkDxNT5+Ez5q5vyjMsEzaGnpkDEthQpH0lvlvtElrhiWzOSrLiKYpBsRkcJMJ2y4KndYR0UYqkiamgonwJqUDKiVrTcImQDDY5AsXUpEqhkgiAPyTx/cjaSOiohNkXzX/jCgFqiRqoCLYAvGcSYusCmgjKjAoLcGzUmiVSZIXlwNaqcRRTpS6gF8uzFGkQlWqepGqLSqpy6xSeTTNBW56YaQ1zUVQ2pJlSfIbt/AxqoX4QC9qsiBeSOM4J6g5RM3ly4e8fPGKeb/DdH6FdR7t0udYI0aNEQiSIGpt5bsEMTAN3mFswOianEgwCqtgrEvG0xn9SSS+GBAP9unubqII9K4itVGULqIqz2ZeMrWKYXC0x1N2uvvsbm9iu21u7ubcu30HYwOdtuXxg2/4h88+5/6DQ8YTR1lWODeXngFZORZN7NprYpCETSMCC0pJv2JQiqAtMWgc4KPDR0dQIclrxySsAqTqoU7SzmJqm2gDMcpmbguMziDWOA8+mQXrVLmJaKKyYsruU7IRAzoEgpXm7+gjUYv6qLY2+YbpZXKJAQpCtDg/w8QW0bQIQWNihVWBnALtC6wH6oqnjx7x6OETlLJsbmxSDStG4yGn56fcunOX3Z1rHB+fEfBkuSXLLSF67n9zn+2tlwzW19ja2mY6nvHll19z7cYNNnY2KXpdtBvSa3cZ+iF5UAyu3+Tw4hxjc14MhwzrkjwixtHB0+13cTFwdXpB3mrT7fUF+Jg6WiGCG9NeWwevha4QIdMFTmm8FQPxkAlnqklIlmCFlmDMSyLceCVGPMaCjxXzckqr18ZqMY5V2tBudamrmuAcJy9eQF2ztbtDdzAAxE+lUfWLzqe+NitBwEI4MFGUrEEFS9HpojILETrdtVRprYmxJDpPlqnUc2rxxuKsojaOaZhRhECMFUZZtC6IwFrHYMszpseRndt7QE4VhtTeEXwkN5qynBB8gNhnY+MmT+IhEY8OiiwGcl2iQ0Brj+lsoU2XfrvD3MEoBrSWLcBVFRZNd/sG5ZMnnLx6xunNj9hcs/Q6kfVNTZavgTHMZxN67QG9/nVUkVE5h3GBrJSE8cpXjELNlw+eMRmf0W63KYymrS1OZ5RaE2zGeB7wcUpFZI4mE5lTYpS+4NoFglEEFHmRY5UhKE0ZS6wKoA020+RZh7kOmMyCzVE6p7+xyQ9/+BGZ0aCqhFZmAuIEQHlQDoE37TLga4q5CsDJa5DKEdFggAyPjRl4Tb/d59/8xZ8TlGJ7cw18ECQ1zBif3ef05X0O3vqEbmsXPbvk4fP/yMuJxf1ji4/v/Cltd041aHHj3h2uHaxz/emQ8fyUe3d3efI/HzE/vOQf/uY/MT2bMJ5EWpsdtlTJZgtePCm5/+Al3V7Gwc4mu5nmRgi8v7nGV1/+nOPjJzz8xrFZGK7vbvL5k6dUYY0P37uDG054/tUzNI/Z6mpMscbY1fz2mydcu3XAnY9/iJ9NeXT0ENMKXGuvk3c79Ab7XDrNaVlycB0+2LvOf/H+W0Q/wk8vQa0Rkyqm11DVnrYRWWulFRcnF3jv2dneEbp3UlOMCnIc17bXKLRG4ZlXU1RUBBVotdtyfwHvK5xX5HmLGBzjy0uiqxj0BuSttlTAlKEJjEAA2Rgs0eUoU5ABg6JFL89p2zaGRI18g0KrkiCV1o62zShsm8LkdPMC3VgaKKljNXu/sgYKg5mDVYLqZ7qAaAScgWQcrzGZSz5tK2CjSlGUBoLC6kBhoFd0sNbQ7ve5efMmP/r4A3KlpLK5SFhUc+Ir8UNzjeMKyCx9dRJEZmS9NRqfUF5Tx/t/8hAAUq7DCj0z8RzUaxVymccqRmIDkGux/Fltb4jwmhrmP/9bNZ+VEuxFOqaXNZUE4CYSrFB4TVz0squoRO0vgEcxcjWunDG6vODxN7/h4uQZz5894sHjJxydXFIHw7yGaloSvBN6uPcC0i1iD4VRTYIV8cosfHVVkKRMK4lPIwGCE6Vb12xaS3k1iTEFDG6YHs13lvOHRiBFoVLMm0D8FB+5ENJ4zTBqSnTpfANoVWCLRkF2hou1gIwRVEyq7SS2iIKmcuW8p3YJjNSSMMU6LlS1Q+WpXMCVNeV0wolRFEWLdqdL0S7Yub7B+m6XqiqZlTPKyuEI6IZ5koyRtbJCiUziRAJ8aLwKNG0aDcYRY6QOTgDfxpIlpmaXVDxZFSBcFSJsxM0iHmJJHXMULaDCU1Jrh/Weom5jYgcfOgxL0U8ootAzg9IYYVZiVKR2Y7JsRqCNCy2UngGB0OhoqAx0KgLFuOiNj0YRkrl31AqVGTKVYVaq/d91fE+StmYqpkU6LqswUmWSyWFQjUiYVFpS35P43nicj+hoRKVNa6nYxOX7A0lhMSxhn2Z9TghjTKXoqJs+rDfofDEpB6U/Fm8x3dRvaAQZpOdMPs9bkUQ3GDIlyjkUGcoqdG64c/sWO+tbfH5xwvOjY1SEQmvKCBgZwDlaZOuDR2GwVhR6bJZhrSFGJ2o1i8UhEnXNrBpRn414NJ5ycXGM2e7T6XUZbGzQ2dim6A5YDy1Cbji2hgtX4SdX7OucdqvHRsugpzWjK09/zfLe21u8c/M6+7u3ePzinNJb7t//hvPTY4ZnL7m4PGFWTQkhMVaUeDX5SvjNQiMR9FuulWzq0jemUCaig15UEJvluVENlb1QJSS2mcVpsCREQ8cmSZRkXhmDjpFGyF2QluQJ11TNtBIBEZuqaol+apL5dUShlCGzGegCbaRqoI1Qx4wyOF8h2aAGryFEXOXoFV1y3SbPO3SLdTQ5J8fnTOdzxlczjo+/4vz0kiw31K6m1crZ3d9jNnYcVec8f37KYHDG+voW0ymMhjVHh18TrEa7QK9oMzKedZXT6Xb53ZMHbOUdplXJnVvX6VrLk2dP0FazubXJ1WTM1WSKsQXXbt7E1ZGjoyN6RhF8xcc//gnXd2/gvGcynZEFuXYuXQnXBNTN/VHLpl/vY2q4ZWVOeMpyksyGA5PJCJsZMfROabDNc4KWfsOry3Nm5ZTtvT36a2uAVNJFF0hDCFSuxFgrNGHNSp9JtqBAaZsRY8TYXKiEVjZRo5NXk82xtkVdicmUNoa8XYAW5NsmemEIAR0r7l7bkCpyWRJsJkpiUfjpVVkRI1jbxmZr2OwSjFT18AED0j+oQ5r3XaxtJyQ1UqVVRSEokHKwtr7Ltes3MdpxfjEi1FPae5ZOC3xlKFMAZYwEKN540EKx7qZ+wK1BG9dps7l/k0cPHvHw/td0Whl3btzg4eEJ01mJrwOlEzqvr0uq6YxcCYU3+kgVAk4pvAKlDBYrZBLtCcpJwBYcIcqCbG3qL01+OKPhiKvLK+KNXVSsZK7pDBaiQc2K+gY0n+J2Udlu5jsyv2jWXJ3Ec2Dz4A7b18SQ2WiDpqQ6fUo9fUZRXNFuHTM++iv0fAM3POFaL+PZxRmPDp/y8knJvX7JBz/9cyp9AeoV9z5+l6DeZzSd0tq9xVudnKdfPeSzv/5bjs7nXNUXXOso/t2//Td0uz0GG21++INPuLG9Q3V+znonw1+95GCtxavn8PL5Q9q55607Nxi6mt99+ZhXr045vxihW23aHcvdrU28bnEVDZfDkouZ4/rOdZ5dPGTr1vvs3rZUkzkvj16xsbPNk5enGBPZ3lmnWygeP7xPf6PDB7mhSXBjs7eqhi2i8bVHB9jY3sIY+60IWqvIjes79HsFdTWjrufkWYveoLMIsEjMGGMt08mM+eSC6eiSrU3p+zMma15E0yvaJEpLCwKLCiWhdijvybTQoxZjYWVIKFGEIM+UKM5lLfIsI9Pii9lUYDTN+gQYgykydCaVsEUyFhU6pr5IH8jbirxYZhTBC3C3jBMkHsgyTZFbpjNREn7vg/f56IefsLW+S25t2reajYbFObEYvbIGumQr0qxbNrOvVdLkx/+3Erbff6zWuCSBaM5RiSCPNKLLJWiEWSL/wq/xHRW9N85kNYXzOLFdSCrUtRORCBToGJiMzhlfnfBqOOHvf/eI3/3qF1ydvmB8cUQ9GRG9x0eDjzl1zKiDoXKNx1ezp6UELC1MUhlKZ6SyREmXPmvS34WGERQiWrJbfLK2gRRj6iZWTRTeJNRESqZWBduatp/XhMBStqV1M47mCcxcUvAkRApAKd9ncXviouosgmTL6x5TktG03jS31LAiaJRU2yvvmOMZjkeYy/Nko5HR6XQoOoXYQQWhnrqqpi7nlGWJq9NcMJpQBQH9SfG2kjUkePEyfs0ZT6ce0HRoZD0Kq5XYNxK3xXdSouoYiMkI3RLMDKysljYYqA21y5jVChNBTLqEtQCsiBaXaEpilNYSmeNe7nk6s2Vt3S/uhQ9Bqv7WkCdqtVZ6KYDze47vTdLWLOyrAJU83AxiUd+RYS/VthiSOAjSm5Ib4amqxE/3YeUmkeZZUoMMzaRXShT/UK99ZvN3q/YAkrTFxc8hCLKjUxVI/mjxdaQuo5QklVrQHrEs0oAFlaGipbO2Rau7xpoO2GeH6MwQZm6x4McoFSdRBqoXEqVKi/WATwlNQCijLiS4xDu0q4jljNPxjMOrY17Nrii941/9xV9wy37Euu1gi5ysiHRbHeo6MB2fcjMEppMSnzvaGXhvGU8cv/7N7zgY7NNrdbh3d8DBrbd4//13GF9dMJ8M+bu/+1u+uH+f2ntGowtgLhL4mSU6ybgXtM7QqGdK7VgmEEI3XblfxCbRSombyH+l3r1lH1JIXjzEgFUk8+nXK6Wri93injaKkkonpEwQM6kIkh7XSRzHSHgZPAGb/N8kQZe+EbPsbzDSnzjY2mX3Ws35+RUPnz/ng7X3ydpdukWLO2+9y+Mnzzk+uWJ/+xrTyZiyLoGcPO9ycH2f0WjCxcWQdmeNiKLX7qAnEw4Pj+jlBVMmjHzJxsYOKiiy8Zzb927x6OlTqrMhaxvrzE4u2drehNGc8dEJg/4avnRcPHxOnrdRozntzQHPT484fPmC/a29tECHtBjKdSCkBmmaeSALTYOAsVgg5UmtNVarZLbuefH8GfO65K17b7O/fw1rDFVVUWQ5eSZJllUwmU45fPGCtdmMjc1N2p2O0AdUasz3woNf0B1iSInd0l6gmcc2M8nfxeMJuNrhK085nbLW6eFnc7T3qFpSSLUS3GqjkAbjknI6ZH9vm43NAcfnR7QzeOv2DZ4ejaVXQWmqEKm8Z1bVhBAENYsL3akknhETpTrgXcCQpQ1AEWoPWSCUgcLkXD+4ib88ppxVHF4OKUJByzpcaZk7R/SBPG8TEbqhbuVQSXWfELHKkLVa3LreZ29zk16ueXV8wqPnr5gMh4zHJU5n0rsURPrZ1RU2ajIlipnzqsIpRbRiOt/JWiJaYjxRB3TyloqqFGQUT62l+q11wdXJKx49eMAHP3hL6CfpaIoSsmym4E+tJBiS0UICTBYgTVr/A+DT689HY6ZTx/7uBplShLrE+kPOXvw19cVDyBxPzo7JT8cMjzVeaW68nTPQbX7+698wHD9i+vYGb/3gh7QGmlfHx5SHFc58yO7dd/jxn/4PzI5/xYe3P+bpsxOuja743S9/QTWfcnhxQm0nfPLju3zy6ceEaaCOjmdf/yPnp0/Icsvlq0tu7ffY2thh0C0Y6C7/+OgRD3/7AGM0n370PtcO1rn71gHlbMpXL8+xvQxdtDm9rHh6FdhZP2BzvcejR1+xu9nFMKTbLbmxOcCXIw6fH1O229C7SVWXtH2ia6mIjTCeTil6HVnHMs36zoasvWqZ8CzyDWW4vJpSVp4sy+hmo7iGLAAAIABJREFUfYip7yZCJFBNp2RFTl3XHB69pN/J2L12jaKwlJMxOs++9b6LdRdh0yggJvozILR0pZb3f+VoglUaAS6kXUGb1fLNaqKFsHS8p6pr0LJmhJU9IaT/W5sRQsRFR5HbpYfbMisAJNCyWYaxGf1ej92dXdqdPioImLRIWFjGEb/veHMfev255df5Ph3NXiv9TpIcLUB2hfSeL5KNuPSu+9b3+Od+sSYlS4lh+tvFqIoC30jcaKhjLcl4NMxKz9MXpzx6+pjDF9/w9OGvePX8C0blnOHM4euAxaJqhaYgBo1SOT5If7oPDZ0/LM6m2RvEvuR1UN9anYQyWKxjjbp1TAmcVktLjTevQDMexO5GLRgs6juu1apdRfP/1de9qQr75mNaL5OuN0WhvqURoJd76tKGK6zsvc1gFW6UfO+Ic2NijFxeXmKtpdVqCdMjL2i32nRbbZxzlGXJfD7HuWQ27Zc6FKvfcyFK+EZcLNWsVEULy2ram9fr9divyTdSn5uKaEwSDVSp4CAKr941I64ZfYn5phLVNiml68U6uswRpHq5KBewQJMW8zvROBf3kMVa+PuO703S1iA2cWXgNYOw6TQTiXZL1EaClJAEH4Io03nnCbUnzzK63cECdVBNTX5l/VCsoGph2Qy6uniuBn7NIY2kSyWY5u+MEUGSxQDXmhBdojUmNEKJcpAxOSgLRV8kC6nAKLJeF1XkRJN8J4xUimJqVvWJjimJS5KsN1JJilrLhhG0ePMoMDrQMYq802JdZ8xtZBxLZudTHj96zJkr2eluMBtso693mZctjl8eMT5+wfXzC+oiIx+ALWqGoxmbRYv5dEbbX6FNC2sUdTVhc6PN+iDD1wOM1Xz4yY+pg+b5i/t8+fnf8fibxxAztEWSarXA5lCp2TRF+YJIKCE5RIWgJikjbUQMA0p4zLqpdCa0KEQxUFUBQ+pLWum9+s7KaQMKJHqtUjJxF5CSvHDxukhi/Cjp81HO4aoabQNVjNIYm54PCWCY1yVX0xkmb7G912VtY4M6VDx58oRnz18wGGzQLi45PrnEGE273RXaYLdLtzvA+8hsXomyU5GzvjZgoOH84pSNtQG+qhldTblx/QCbZXzz5CFXV5cUmSz+Pnj6gy5/9Ec/IoTA2dkZsXZcu36dVtHh1fEZnaxgf2+P8/EpVVUSnCO3RugfUZSNgndJmen1DSf6xrNNkhSfKpdRKQEanCfUQn+8HF3x+Zdf8OL5cz795FNu3byFVuIppRZqbJ5WUYDRnJ4cM5vP2dzcpNPpiFdaELVQlMK5egW1lE1V1vklpz3LmkBQQ9QYndHvdJkMH1HP53RswXw2JienntdkHUtDidVaFLRyE7GqZHr1ksxcoesxmVK0Cnj7rdtMJzMuxlOCksqP0tIbGFwtf29l02uer0MUVSxfY9sZjRthcAGCIiTfsH53jcl0Qjmd8/WXX+MnPe7d3GI804zKOe12G9sRNFIoM5ZoAiaXeRS0PJdbTa/fwRrFi+fP+eqrb7i8uKD0Fp/ySo341/gg9CJBqOU95dpKgOZcjTWQaUEQtbEJYZQ5YnOD9xUhOJRXzCdDfvnZP/Dn//pP2drp0vTkqsXuI6HJopC2aEy36fflHrHE18FEaMUIesIguyIrCqyGykdUrFDGo7qRvt1jeH7Ofn8fLjyz9iZXgy5qv2ZXPWEnDOlubNPeO+DB+QvG3zzgwddfoe3bqN4eH/hdNvsDWtzm4Hafdz+tefro58Tzh9j+W9z88H1+/bufMTx7xV//h/+RXjS8c2Of63stRhfQam3gw5xeXvPe2zd4+ugphw8fU/mcn/zgp5yfHNFXmhA008mEqprx6mrCWeUYlIqzl69o7+3y3u27fH3/V7x8dcg7uzfY2l/n7laH3z49Zavb4pOf/ID17R2+fPSSy6tLNm5keGWFXjytmI+GdHKFLgoazF4Zw6Jslq5tDFC5yNcPnvCjn3zM1mY3rZECykjLgefy7IS17S20bbN/sE+nZTEmovDYokjecEt63Gs7qVaohnaXAp+FNHYMSUDlTZqQ2OlYYxIdmuQD1Yyd18pyxAjVbEbtHMpIRVDW5pV9IIL3kclkymQ8pt/tyCkFEU+S94qL9zTWkucFWmsGg7WlxcFqnLBS9fuuQymFtfY1GtcfSvC+T4dSdrFfx6SgFlVkHiPlbM7VdEynXbDe6cnMXsmpm7D3n3VE+Y8kZGLXslwLmjqkgEQRQ4yGcgrPH7/gy6++4tHTRzx4+oDHzx8ymwxRtUfVNagSr0qUzvFBE0OGsS2RXHclPpZAjXgFF6DsSiCtFgmPTgyPxd6TKjgCgEkPfWPBJGteSFYOjVp6+poxLgS6lqJtaXyqZRzcvPafumAxLl+zSgd8829jc22/87mVv09g9mrc1CRsq8lFM5dCjAsVVZ+SL63B1RXzWcV4NCXLLJ1Wi267vUjmWq0WIQSm0+nCl7X5v9g+NUDsElRdViqX96EBFd6cW6/3BZLi6gaYiSlp0lIICp6IQ5mcSAbeYJOE7iLlTgtas65lKGxaIyWRbCiwanGt34SumnEkPe/peoblWPh9x/ciaVsGxOq1xW5JjpNAMSweWXltFFl/V9dE56jLGrynXbRRTbCmGy5wet+mjJOuuFZ60Zj6JoKxOJeUpNEo5KV/4Y3fFxl0CjFUkC5+FaL45XgniFAdwSsiqZyqFDWRMhkn+hiISqf8waCQcqsPUcAtWclApcQl/S5IvvhRqBgoMsWg32ZQdDCxplNO6LRaTCYTzh4+5sHkPg+yFvagxUwN+Plf/xWj499xdFTQXrvBzrU17tzZ4PhkQm/9Bndvv0PXa0bjOWVd8flvf0nlSu7euU01r8jznGvX9+gMtrn33i3a3TnHx8dMr0rpgWp8u1QjeZEGaVwGbio1aMY0NlKhAp2QOx8CLorm0GukKhUheMSMMqCjNOvGlSStWcReS9i0JmiF0halA0Zn4rOV+hoFedGgZeKJ6qRQL0JEJM+dxSnhy0uVtREoUdy7c4e9nT2qOlAUBd1em2sHW9y5eZ0YFLntob1hMp8RCOzt7bC9u4lzJTYzeH+A84EiLxiPJ2xubKDDDu2dAYNeH1fX3JiOOdg/QGvNj1sKX1VsK0WWZ3TaLXzXkm/26fW6fOg/4ejomLkKbO6ss9frUFWOjZ017vjb9Le3pY8SMaCWvg+Pd7Xcr5hK+WmcS29ZAiZiSCpZKl0/MY2M3mG0YmtjnbW1Ac+ePmF0dcW7b7/Nu/feYWNjnbquKIqCrChwaRHrdtoMLy4YDS/Z2dlhc3OToi3Jm/fLxW1VFXb1seXmGtHWgoOiyLh76w4nh+d88Ztfc33vGq0sJ5RSOY9pTVGKpCIamE0uiW5GbnLGly8xJmKzgl6uOJ4MyTPp33ExYIuC9Y0N2p0WdSU9EVp2FqK28k+J4qyr5+hWl0wr2kULrQyV91hTJNCmhdMZaxsbFL01nr58QTurefzsEqcNm2trTPoTOhtrxF6LcjQh8+BMm1aeY2uPcQGUpzAWkxXMy4rRZIq2OVpZfBChHx0tqECW57RsTqYMKoAL0n6tCwmWfTlHa0XRytFWYY30CElPY0ApR0AndU2Dc4onzx9zeTVkc3uABFkS+Gu9DEzkboZFp8oSkW5W/dSfktB9FUAF2TL7WtFbm4MaY7XlV198xu5Owd7Bv+Ls6RFfH/2Ovb09PvpXb3FLbVH2BpyePWSndcmfdRUb737A188e8LO/+z8Yvfx73PyMH376Fo5Djp+MOW7X/OTHP2VQDDDxGDuZQjalnI/55d+8wgXL2Ax48eIJa12FbSl6JqfT26GeVWy1LaOzEc8fP2Wj1+Pa+3e5qnI2drbYbFu2u2CzDs8Pz9FZ4P7JBWdXkVu9gp4Gzp9Rv2zRmlbc3t3m9jsHXMxr/urv/55b997mj+++jzY1//Cbz3jwYsLWjTu89aGhChBCybMvvmBezljb+JicjKihKueoWpO3OymOXiZYQWlmdVJSW4TIKZOPUu3KMlHkzXKLyUyifjliBJ3lsFA8XEqcL9Ss1XKvj87hvPjeLXvIvx3YKyQAtNZifFqXdWyKsasvhCDvO51OCUSMNYvCrTFmxWhcdurh8IrMGjqtYnl+6Wo0ZxMJYBS7+3vcuHOTm/fuCrV05WNJyd6iIvWtZGwZaCrV5Hff/X2/j0fDhmqAu+BhMqkZzqYcD0+YVDNqV3Gwv0O/3V0wUZT8Mc33/+cd365JNY/oxTkkAAiPrwOvDi/49S8e8/Of3ef47FQq8JUFuiJK5B3RT0FZ6ghBSRFg7uukc1ChYgmUxFBjkzesYln5UQkg0EqtaOuoZbCtSMF/SsSSoFxMfZ0Sw6+MjSYxa0CrGBeVJlgCkm/S/Faf0yviaKvJ1O97fQOcrY7R76q8BV7HrxtxvtWq1fLvNTZVqEOy6cnz1JpAM148ZQi4smIyGmGMqKO2222KoqDXE9VY5xxVVTFPFlJNkljX1eIzpf80LM5LKZXaM163NHnzWi4ZQUpAXoUw96IWADoESdpURogZ+AyT7k+Mi49MxROwWlFohZESb3rvRhBlddSuHEqqkQFROI/Jn08Snf8Mkjb5TrKKKVTyy1g+2ZRBQ/DJfDDdtCRWggq0WwVGtXC5owEEffBL+cy0SC4aEWNTcF9yiGGZtL1ZolyUrrVelGRBkGgfhaYILJK4BaUzIf+Nqa+8raAX0YPCEvF4Ih69UMOUwkVYLgTpO2mtxSjVaJSxaCM9C977RZWnkZaNeKpywsnJkEmrTWdjwKDflffodRmNR5ycnjKOCjUKlLXi5MVTLi4nvHr1Ga3iG1qdjP0b25RlxtwN2V77If1OdxGMnp28YjQZcufmPt1um7L0TCYzLsfHYIdkLYsykZj0ZaJcpEQdFZPhpi8oRHAhpsSzESNpEBJBqlTqkfCxEYpN9Eat8EGUmmJ0qCSnu4oKrfr1rSIvcTGBTRKeFCquIK3JzFwvP0clXnrUFhM1RWYoCsPEZQRlUUQyq4nKYA3oULOz3kdnuVSxYkk7h8HeJleXEy5Pz7mzt8MMx8XokswG+t2MTqdHiIE8F+PiuqrZ392QczbQ21lPKm+wn4Q4Ygh8uP0xIUastdSuQlvNdX8HFQOZsdz78D3e+VheY7IW87IGNIPCcP3WLk4b/KRCR4dpqkQqCtc6jcFGqtgYnRA26e1sVB5DFKEZ10gaR6iqisH6Gu+8/TZ7e3scvjjks88+48WzZ7z/3nu88957FK0cH6THrMhzQoys93sMxyOuhkOqqqI3GLC9vUPT4yL0zGTOncbKYj9UEKNLyFpshhH9bo8P33+PR/oh33z1BT/6wae081YSshGjS2NE0lvFQIwOaz0qllgqcqPJraKy0DZwOTzFlQ5fg4qeViuj3W4Jfdl5QdKAOioJpLVs49FX0pUVvfTn2YyL6Zy2zfDaUCnLBMON7R3u/fATvvnNGS9PT/jy4TNavTXOL0ZkaDqba1SFpiLQUZbdvMf2+ib7+W18jKz1BphOzqTyXExnkLUYbA7IQsa8Eip3FqF2JTG3tFsdsqiILnlHEgk64mONAzKjKTpt0JEsa6F1TgwBYxXaeEKsxdqkjpSjOScXxzx78Yw7d28l5b5IVPVr4FNa9CQQi8mLJ0rY7OopRS5ItjwvmyzRMC+HTIePyfIZvc0uqqp593ZGu7NFbja4/t5N6G3zxdd/S2Wek69bnj46xNUVmzs7fPLhHU4mkT/+wYfc26z5m3//JVezIaG6YGd3m1Yn5/GrV1T1HN3dJo4H2Pom84niajLl7OwVnU6PrrXcu77B6ckzHj16hp+1wRkODx/R74KOOe1iwEa/z531dU7GNZflmPn8jHHQ3Lz2FpMLw7yeEOyAg70e/nLCndt32O21+Pl/+l+pVMXtj96jv7HF//Yf/5KLq3PesZ4nD79k7sY8OZ/w6sIkFWK4GF5ydPyIrKx494MPKdotoo7EuuZqeEa32ydHKm9NgBwClLUnxGWgA4KcG6XwdYXSsLa5LsquUeagBCp6QRMXpY1ly4MIiKmVj0rrb/DS3lDkFEWOXo0SV4MEJQlbXmT0TIssM2Jaj/n2yxUoYyi6HZTrMPOWqHxCwFfAnSh7ZVVWPH3ylF67YK3fXqD4zXvFCEorbt65zbVrydsvF1aAMSJ45Bta2Xec/Xee4GJ9ejOY/r4fqTIRFFfDkkePjjiZXHBVj4k6YFuG/rykDoHMCnV6NWj9v5K2LT5xmWO/VlSNsFCFbGWGt+/ucH17h7v77/CPv/yGh8+e8ejpQ4bDl/h6iFUlMQj4g4FaQRkdPukiKBXBKUzIyUyb4DUBJ+C+FrXGRb8Zy4SmsRGKvPFdVwHjRaVKhGeWj4vy9GrS9FriEb9dCfuuFp6IxJ9vJl//nErumwkirPRbGvtaRbF5z9UYeZHMhriImwhBLGzUEsRoaKXSphBQqqaqamazOVpriiKnKFqJStmh3W4TvDDpqqoSdopzorZa1zSSkk1F0PvXwds3k8vVa+NDIFoFSvxnYxBgXqyfHCFGnDfo2E6GA+lmKIlXY+oWMRoKpdCxaZNp7ldYrDcLeqVaxiYNg67JRbReaRH7J47vRdLWYCWvo1ApqNcK5WOiKdGUyUTKvynfmiQ6ESN5UWDQoA1eLas1y09iUcqlubkrN3NV4n/VG2VJp1hOCnnH5eR4s0q3UItp3jvKpuGcmAxHL4IoQiExWFtI4oDCZjYFv4JuNuij1oa69hDtUrY4VamMUsQFpVN4y9pqxvMJRycvsSctDIa86NDutmlby+baGv0spzZTjp4/Q9dzgm+Jkl09ZnYReTSboozi9PSQh5//mrd21lnfWKe/tcVweMHF5Rnz6YzTV6e8OhnR37rBzHnm7nxxXUIIqafPLxZgaegVNUkxrTaLQdwYlkqyp1MCBtLAnvoGYqpnKdWATBgjsudKibBCc8+1FrNS59xr9+/1Q6WSuVqMCUVS8FosaqCVyLtiMmII5EY8tkIQ6pt3DqNEJKoACgMq1KgAMcyZT6+oqxmdosVseMX0YkSYlrR31tjfWcO5mljPGV2OmIxHaA15Vohwh1JUlSNmFoXG5BmT6QRqT6vXJcssFk00ilnaHHwM5JnF1yL/brTm9q27VLUjqIosb9HpdjCxIu90KGPElV7UQIMTaVpiatxPC5aUoxK6LImJWeklC8hCezUaU48nKJCGY6XpdHp0uj263Q5nJ6ecHZ/w2We/4OT8jI9+8AP29g7I85wY3MLGoVO0sJnl8vKSq9EVMUb6/QF5ngswE8EHR2YylJHKc3OLfXCYaAguAobgod1u0e8V3Ll7nVCX3L//Fft7OwwOPsJqmVuZzRIwkqW5KAlDnmu0CuBKRhcndHsHxLbhfHjOdDgl1hXlbJyWabVAK7TOCGSUfk5UVuZnqDHRoaKjrkpq77n/5AkbgwNcq0e0bWYqY+pqQtbCdHuMrs44HU8YZD288lSTCX40ZqI9pasYZDl+cx8dNGvTPc4uznkej7l+cEDMWmSdAbvXM7zpEHSHGC0WRa6gqkto52Q6o5rMiU4MuOvgmVQzfKiZj6+oZ1OKTkbQgTxrY0yexBuC9LpF8apzs5oqTonVjM+/+oIfffpHDHptgp8B4imnVQ4xodlNbc2naZd2h2o+JtYzbJxBNUNbKzS/THP87Lf86mf/O7cPBtx6q4fSCtu5R9bdAjUHAjvXAt31Nrl7xXQy5vzwJbvr27S328zCU8bHl7jTM7784m9Yz3I2Nm9xdnjG8KLiYP8u71y/xXZPocwlVVScj1p0si1mpqDT6fLu3etMLp6QtQb86cd/zOHRkJ999TWTMjKazunODPtFl7OzCVdnx1z/9AZVPeLp00OqeYnvrDH+5jEnhyf01zrc2r3OeqfFODvn4PpNrh49ZeNgn/aaod/t8eLlK3w1Y6Obc35yxHxm6G906fe7FFPHZDrFRQFQDvaus3t9gG21iKFcrG0ms6kYtooER7xXzMta7E2UJGGoSFXWVPMR7bxYGEIT48LrsOnr1iYnhGVQ+nogktbRlfVXZ5aiJWh7p91eQadfP2ROS++qMQrnKubzGd1e6/XXkcBYreiuDTB+iJvnRGrZsWOSEdcKfDLbNhlnZyepeqK+9flRyT6UtVpkRfq8GJJfoezBvpZ93eTftiBYfPvvCJ5XH/t+0SS/nV5JvLN8WhuDzXJizEC3iDpQu5qr8ZTxfE67Jx6Z8Tvu5x86GquFJtKSisibr9ILppFKsWO7B3/0030+/HiHy4s/4i//8gu+/Opzjo8ecHXxnOk0UAWDjp5IRUx9uDKMNVp30NEQncJTEk0tsdZKQi6Jm8Rmr8eXcfHYaqLTWGmotJd+a3ylWLQpKoTg0yVWi94xkPHRGESvUgAhAd2rMe7iXL+j/+07qnjNsfqcMeJPq1jGx9+lC5B+ApJyJ8JcWv08CbvT+SH94k3/exOz1bWjrj1VVad5acis2HwUvUIKEVUlAibOCcvJe+qUBIeEEK2yqJpzWL1ejZl5DAGFI3glRQ+d7LViJZV/Z8lMl0w3wk5L0nZIlbYiM7S1KIYK1pN6+1IvpE6q6jEJ5JHGsUclf0jEuqE5nz+wBHwvkjZAktAFh1wtNoDmd5Cpr1KwGDwoI0i/D5LlaxQqaozSSYluOfiW2e3Sg2mx9ugmyJMybHOsNmQuTjO8UW5eGSCrE1V67pA+NESaVHxyvFC11JIWqJUIrGS6ILM5OilFhggyA1JG0iSpiFcWJnF9taHIcjKbQ/AYVUrFw9VkwYPzjKdj4mRMVitaRYdJDAxnJXXtsf0B27t9is0NmDhMu8ZVE1quJtaRYVXj1BTvOjwZTTh+9CUmM5iigwuevMj57c5v+fDDj9nbbTEua7x3xOAILmC0wWoj8vs6Y8kMjkJTUSE5PaQevkXfVLp/yUFeIYiKpOji1eEjBC9lc++dqDCpQFSeED117V5b9FYbUhukSJpsrQT1KqbEOfmVpcZ4uXNyz3ItXiYuKrIYpb/Hanxl8EmHXhGkodk5qfw1G5CvINZsrvU5Pznl2aNHTC8rbt16i34ro7feR2uYTsc8uv81k8mI3Z1dcpvTare5uLhkOpnR6w3YXt9gsLvNq8MjJicXbFzbo9ft0un2mQXHyfEx8+kcWxTs7+/SNobq1RlrvT6tuaOT5ZQuMBtOUCrDtsSOQhNECSx4kSgOIS06KTlyAaUjOvnNxCB9pT5KH6ZUKQ0BhYsRH+S+m6zAmJxcieloXhR0u33W1jc5fnXEV19/xYujQz7++AfcvfMWm5sbqYcNtDV0um2CgpOTEx48+Ia9vQNu3LiBMQbv/UI5stnYlvM0EGIjKd8EmIFev4X6P7l7sy+5rivN73eGO8WYIxKZmAiCIEVRJEsSJVW5qqtcQ6/2S9n90H+C/y6/2Mt+9LJX92qXXV2lllSaJYqUCGFGAjnPkZEx3Omc44dzbkQAoqr7UdWXiwAyMqZ77xn2t79vf9uVvHP/NsI5Do/3eb98BxE32U/tAwcpqYwlL2tsJw1xjAVriLUjnw7Qpua9O+u0VMnR4T7PHj2mKktEkmKMl4vWRkAUU6NBBVm0KXAmRwnvz2kw7B8dc3pRs/H2O1gR45KW7xcTC2oZkeiY9c0bpL0NimmBaqdEmWZ3/yWHJ4e40Zitby+hpcIWFdW04NXxBUcnF5TTEU5K4jRBpS103CHWKf0kJZFwNb7CZBm2hqmOEWiyThenJbmpMLbi6vKcy7NThKxRiW/Cbq0gimOsrZDaJ6Dq2jAtLNOyopUkfParX/FX/+qvad29xU9/9ANGoxPee/cr3L77PlJFnlmzJUokLNYz+X3YMr66YHDwGF1OaClIM0e6YVlNBX90f4O11RZPH/wWoh7X3roJakLWXcOInFcvP6OnjjAXEFUpN8qMcnuH6kaHH//6c462z/mwu8nb7YTLVsJ4JLBfjKjTHHHnOi8uHpC9miBdye72gKP9CwaTUzQ5f/HtD1Eu4Qe/3WHn1ZjbX/0Gev0lmx9cMb4cIbcFF3tTSq7Y2T3l9vWM53uateurxKkA3ea8MOx98QVatunXBT17yf5YIHTGq7N99naf8vxkn699+B7tOOKDD29zdPKSyZMrjg8OsVcVq5NlVm6+jXYTmlrojbU1tHOo2jO7QsW+RlVF1GWNS1mgLnzAifDKjTyvMLaxiPF1pFEUeTmlAueqhb15/md4dli//WNyhgmb4CTIDhtHfeld7oy1GFO9ZlgzP3zQFScRpqxBOJIs+R3aZkbiWV+zl7Zb2KJLWY6RoSehE9Hs+VJqptOcSEekaUqz2fjU09w4ws0ulZvFAsb4PUNJjQ7rz38bx+uJ7tePJnEtyDJFu90musqRtQTpVQ11CZNJAZ0uzZVzbv62C//8Zw8XzGoIO7Boim1DL1bjBLUD5wyxLFHCIaQgakl0Iuistvh3m58wnf4Ru9u7/MP/812ev9jlfDDidHCEtFfEuqByOZYCV/vEkQgBedP6SamGZWsawzusnctcpVbeX0E0NWCvkwGVWfBBaFReC+BIyMVYNQAc97rz4WsskTEzYxBrre85asO4XYhDv2w8zmMg+zuPLwLNGeCx3lXyS03cFkCfs3bGJrIAcBehv11gBv3rXo/FwCtyTJBXelWPb1SeJAlpGqOUIss8G25MNWfhqgpjQpuCqpoRL4sA902G3QjnxwwaiZ4ptawzGGcxNkKLjFgonG+93mSPvJxfQhJpsqY1Fza4Wfv50eCK+X0XM0WUcAv19+EKNcqSf+74wwFti8h/AWA5zzkHJs5bO0hrEVaEvhGCxq6nuUgCPKpdGEA0uCeANodYaJ4+L+BcHIhvDsqmTuzLdcKLNGcY+FIRgLVnkqRARzK0JMD3x5g15PVyTy2Ut/w2LjC//r/ZZGyyTcYH0AqJdBLpsQ9PKt3nAAAgAElEQVRK+AbeCm+pnDhFbr29sTSOzAqWoxQRp9STgsHVmMvJmOHZK9zlkOGwwsVtVpeXWZe+yfJxMWU4keSVYDopyN0UjILSNzdX4ynf+8cf8PA3D0nbPVau34EkQ8Y5pi5RMkaJmjK0K3ChbsgF5qbB1yLIEGl6qDXX3hP/NB3klZL+HKVv/isIjkPO+DGhvKOYpZ41TF0M5N+k+Z1rip2DVS/zRRdm2H824TwImDOGWkKsvUmOFd4WV0kBSmGFw9E0WcQ/Jn1D6xfbL9l+sU0/XSONMyIkg+MTlJIcHx/w4tET+r0u6bUIKsfp2TF7u/v0en100kblFRmSxHn2rSUkZjTB1IKDkwN2Xr5ifWWNpC/R04okVZjLMYPhhHzlGsYJWv0lUqGQxpsZm9CsHFeDq728V4R+NDSsMYjwuLAhwSKD3GAhqaHjiFa7Q106dJL6zL3S1HWFED4AjBLH8oqk3WnT6ffY29/lpz/9CceHh3z04Udc39wkjhMvLzSGdrtFUfaY5gWXlwOqqmJjY4Ner4e1liiKXssGercpP89niQAhfC1WIuivtFHScefuFv3eKjqYt8Rx+L5Cg4ypncISURtBgSVWvlZDS4iFQVEgDEQuJ5UldTGmKKaYdh/rFMIpjA16Ch0jpKIscx/42gJrisAsGnZe7fHrL37If/fXf4PsdpmMJryzeRepPOhdXlomPrGM8oqd3SOWOh3+7Dt/ztOTY1bWr3N0+RScQDmBMo5qMuUXP/0Z3V6XdhYxOD9mUlRYdU67vcSNjS2uL/dRVUE1LhiWjqPDM/Jxycb6DZQUmKBmcFIRpyk6iamqGmmDNNxplFQ4W1OVFU2Nm5YxVWmx1vH82TN++IMfcfDqJX/3H/9Prq4O+Z/+x3/LzZv3vezO1gxH53Q7q+ioRVn78aaUIM3aZMkaLV2gRAHDSwbbj3j52yfc2LzNklwiVW+BMXz2xS7f6F4n6WwhtObBF4/Y+fwx39xM6HYT9kcPiFstVtIW57/dpd454v31jLPtn3DClBM3wlYZebWMw/L5f/z3bNy+weGLA4yE08srzg/OiQcTXCy4dsfx3ts9/vTP3+fRox1+8Pf/QCkEZ5MpxtaMyor+9SXGg0s6sWTz1k0mJIwLeO+9D3nwYo9xWVFHNaWtWFtaYWu9Td3OORlOeLL9BKNqlm6vsH7/OnmR8/D5Doen57TaHXIryK7HaKEphiNW2zHr1zYQKkh2rC8jsDikVSF4NpjaUJUV6XwxBL9sEWlJPh1TFPUs4IqThDjJ/FND7fUs4HAhSGv2Qp9y9rUifkdeNE3zMiksKN/7sd1uUZmKOPaS/y9jeYTw7Syk8I5sSRyTNjVos3PwL3PNC6RAxDFRElPXXoK1uraGWwh9jLG0Wi3u3L5Bu9OZncObhxNBfmbs7Js5GfYtIYKpyxtf+F/64X73PviiHh8Iaw1ZGqN1gqy8kkFphRA+uRduhI+xIJQ8/NcBNg+b/b4jnUHYCu9IJgBFbTVGeUVVZWpsfUqiErTrAZpg6UfSq4m7jv7qHa5v/lvOTkt+/vNnfPrZZ5xeHHJ0tkdeDnEix8oCRI2hRAhLpATeyr8BbHOwJYPEvQEDKrhM+zjClwx4QGWC7X8w51oYW02sKJWv4/Is2oK80S0olRY+dxG8zVm410ftXNZsXiMW5iSGWHitWNgz5y6R3pHRzcpM3vz815hE54I0ugGzgVXidRAqZsAySNyZSwaV8g7F/pwNde2TwrWrqMuSqoyItF8jtNYoJdBJShonWGcpKjOrh2vAX+NKOb9nfqnTSoOrMdYgjE+1OwlGGBxeJeeIiXSKFm+AKeHXUxv2p1gI31xdiBmR4z9nLndt8MHi/Wn+k0LMrs1/ad34gwFtAZosXBY3m9lNNkMAkfRBsi+lEczSN2IubbNuTok22YkwRuefJYSnxYXwAX/zPRbB48KxWAf35mMmIHtfr2RnskgrPIVqrJdkWBss6p231hYzKw2fmVSeP/Lduk2YvM3Juxm+9Gumdb73Ex6QOWOxtS8cl056iZyTxDW0nKKbZKjakZmaZRWj4xSSkiQz1NIyKsa4EupRwYm55PJYIDo92lpQ6ZLl3hJJtkRRF9TlkOFoQl4J6mmFM3B+OmBwdkFpDC7+HN3qsLrRIdYVk6scZzwL6ntzBf01wbpZujBBvNyCAI4FC5Q+zW0Oslkxz+OKAOSU9u9bm4pJPuHk7JS9V/vEKqbT6fgC9IVF6k1QLgJoE41kdqG2wtGAZhGae4dMC3O2yeIbNoeyOJyUvnm4SjCARPom1UhKB6NpSVE7Li5H/OTHv6C7krK81uPtu2/hpjXlKOdoMEHkgvv33iUlY3h8xWQwYToZoW/dpjPpM5lO2Hm5zavzI5aXl3n/9tsMhpfs7++Rj6f0RyMyrZnqiNOTEy4vLzk+O+et+/d5d2UZnHfjtCFJ0lD7AueziwHxOGNwIQPuGpkwIRMvxEzLPbcDBqUiXBR7x1fhnQZVFKO0Ioo0SmusSWg5S6vdor/UY293j4cPH7K3s8sf/8mf8N57XyFrt70lvfEW/pnIiHSMtZaTk2PyfMry8gre9r/J3HkAPnNztWqWzFFaIrVDamh1Y67JVdKki3Vm5qI4nkwZjiZUNRingQgnvFTSKR84VmWOFTWxqH27ieKKVNSsL3eYnAw4Hwz47PMv+PDd2zinKCuLd3jUvh5RBMeyOse6mroo2Vi7RnEvIRKKR4+esvtqn34lKbNlysGAjTVwtUNkGZ2160Rac+vefZKf/BCmgihK/PW3llYcEQvJ2dERhwd7fPCVe2gMrhpjSr9BqbqNGQkiV7PR15weX7Lz8jmX5yNiFdNbWQ6bna8hk3GE0BpbGrDQijNarR5JnGFtzen5MXVRonVCO+kgjGR8NSJOU/7v/+s/IG2JKY9ZWhJMx8OQPBPUteXoaB+hU2I0qLhJ16B1inIlFREXL1+ylGjayQr1ZcJhec6Ne5tY2efeh3+FXTrli0fb7O4dcPdmj+ff+weq0122z5Q3tuGSt96/R2t5idj2MFLwT5/+E+PjPe5+/VsI2uwfDzktTujYZZ4/P+Tg+ILl9dusXrvD2cmAyAxYW0+o0oT//OlPuBje4eN775CmmifPn0PcZe/kzBuzmClZX5EupXz9g3u8dX2F49MBO/tDslbNzsGIk9xLQONWxv7wnHJ0TrcdE8WKzBmSaxn95YTD0ycMS8fw0jEUGTUGKS2VMHz4lQ8YnQ5ZijX37t2fsebCKSa5wdgysMue3up0l4gTHZilEDyFtSuJo5AAJDjIzjdQ5xfqcG9s06xhtpO//udCJOlmK2lI2lm/FwcHuVZdIeLoTajG/A1E6P8HTcJ80Xjotf26+Q5h/5RKIiONVJJOu40Tevauxvi6m+vXr6P1rOP1LEh7zdFa4IMPJEiBXuxFNvvwecLxX/bxZefQXA2fRAVB1opopS0GeWjkDDhrKQqDsaAXg6//esQWxtcCSyssmNLXSsmY4WjM7vkVe+djXrx8THH1ir/963/DvduriEqAdDhhQHlZmtCCa3eWWLsJt969xl/+D5+w8+qQf/rBpzx9+pST4wOG4zNqNwI5BlGitPVS+NmX8uOiAR2zkS6EL5kIpRvNHliHPm9SKaTU/vcuKFiYB/N1Hfp3ORfMNJrGzHL2/s3fi+U3r5MHcxDZvNf8a79JNPzuY/PfzWMj59zM2fC1sp/fY0Qi34yfmyTKAnDxgGxet7cIEBeZMV/7Z4mV8ol5a73hYJCHVmXp477QBsDi0KEOPYqimSNl40I5NzMJjFxlMaqaGeU4EXlzM1dghHeasE4Sa+9KPL+Ys8gPBCRRRBp5LwPr6iC3bQg5XxbVjBUpZEig2RlptCg5Fk28/88cfxCgbc5gNMAkLJYhkLbW+QDYzalHKb3j4+yEG72o9NkOLzMPHd/F3HbT1sZn94QLrID1zZcXaOHFAdbUtc2sU8P/MwTt/OQsy4qiKDyJH/pyCCGRznpE7ry8zlhDXZXYqoTZDfa0aSRAh8FXq5AoWtj0bCPX8CdMOOPwnXxndSNDawDReH5JkihmKW1TlyWqFsRZjKsq7GiEygt0qjCRgiSjFRWkdoSrBZPRmFpUXIkJg7MhaWtAt9+i046Qy8uotMfkckx1NeSizrHBpAUnyK+u2J2cEWlLXYIg8vVVCBLiGQClAeuzRUrMABkQGjiHaxBAuQDvVGgaZodZqwUbrlJlaq5GY55tb6OcpNfr0ul0abda3k6+ef9mkcEzdjMgiAeZTVG8bdga68GYt+z1pjmFsUSmBluD8dkyG5ipsjLkBi//0grnLKX1kovl9S1Oj8YUVwarNVGWcX3rJr2lFarKcGPrNqY2CCdJW13W17scH58zyq9YurbC8uY6UTujvdIjWe5hMs3GrRus39riUtWcnp9hygqVaVY21zBlTbrUYWJLlq6vcuPuDVSqKIoKof3YFDJcc2uDW2fjcFRTmxKBBhTOhPti7UKyxHrXSPCOWULOagmbwzlHFMe+kaZzvkm51rjakErFqvKgPI0T9vcO+P73v8+L7Zd89PHHvPvV94m0b3HpjVCM/1k4hpcXFPmEa9euoXWEVjpsDP5ezovHHU0dqtKeHbIWImfQUeTrS53AmZrRZMLFcMikMhSVpa5KJClCxZQmB+dQ2oEwSFshrEE5hxKQJhHGWaSIODg+5dn2Dmp9jTr2606qvKFCMYaqLImEIYsVWay5trzEW7e/xrU7d7l5fsWPfvRTer0lfrN7zPBgh/TSUOQRtiXRWYel5R5Ii4pjdJUR68jbDwfnT2EdiVZIY1lb7nHn5iqnZ8dcja5YX1vjq/dvcq3T5vLwgNOrESttyf07G5y2UrrtGCUJMjmfaXZaoaRF2BpRg04cWeLZQ6RExyllWSGEptPp0E5TJvWUSEXUpkQXBS4vqHVELLsIoT1Izgs+f/yMB08e0e8tcf/+V2m1l8gLWF2KQQ45OX/Bk8e/4uvvfpNe9w4bNwoKJuxeHnFrYw3VSkm7NVrvo9yU8W7JR1sVp2rK1v2bPP70gEk+ZVfvMrmuWL3TY9JOOI1TPv7239DtrrH9Yp+9kxMm5op8Iuh31inlmMPRAaWKiOsJrXaO6Y24cfct8scxFwP47j8+wgqL6PbotFKWcsnp+QhXl5yXY1pLHQaXp2yXEx49P6IfaTauX6N2GacXJwg7YSmLeL67w7ERXFtfpZcKvnp/i1yNmQ6vWN/c4uzwjGFZsjcs+NNv/Sln249xcsrgaoKWgrt379DqroCIQuEFJJnE2GSeZXeSdrfv26gsJgaFX1vzyYi6HPv6jhAw+doMv1p6abt/H2Y7d/Oz/d2G6G+IfpzDs84hcKnLiroqSGUDyF5DemFNNsSxJEo0iZNYU30JHxb2Bzd/FULjZIZKpf93AxqZf/bhwT4bayvEcpk06TCT7PA6bGzYO6/GaH7bPMnNn/MHeLgv/emNRPR854WFJPX8/JvaHK90wkKWSpbbba7Ghok1IH0tflk5agOJ9tcyeEaEesbGC7ZJ4Taf1XyiRWDQzoFxGOuojODiquD0cJ/x4T4vnjzl8eEF9eoWUXeZa607OLVEhUOpMvTIlFjnE4bWOSIvACDrQ9aJub51i3fe2WD7xZjnT3f49NOf82z7AaPJOVV9hVRThMz9eA+S3kbS14CFpi7LhoSutY7K+NIMECitUFqjo9hfS+t7loJb2JOUB2qzHp5zdNvEo4vyyEVTteY5zc+LYGzx34uSycVehc0hg/x55uXgGhD55dLIN1/rY6bXgeWbxxwM2gBmvLOmCklMpRTW+F6q1tiZiQnCt9uI9PyaN+fRnIvF11TPEuzSs3ZxHAdgPO/ZWFcGU1sqZ7CmpqorjPN9SQU1tYSKkIQIiSsfjTcpKj9eI6CdeCYfgunawrWxzf0KMa0N99va8MbOGyzO2cl/IaDN4aiDy0xDsHlWJEAT6U1HjPDOibWzoa+WC3b4jZ42MHDW+mC36VbfAKhw9aX09L0Ltpsu6KBlYGKcMUEK4XxtD/NJ0UwWi7fCBjGTZDlnZs42zjqUFcROIkqDqx11BCKKcbWhBVBOcEJRUhMR03I1kaupI6gSSWIksvKN+4gkCJ/1QIGxJVIYIi1IdIQSCUq1MMpgoooyAmF9TZaLY+IR5NZyYK8Y5pJuPeLi4IBqmmO0JFcltgZrprRHE6xUlNYgY42aOrSb4oop5VBiDJx0e9j2mBUpWJVTOpstLuMOYqrJTyfIeoxEUFceOFosaPzCJx3WKZSMiKVPoEnrgnRVzDIpdrG3XWBqTBMweA9NkBqE8kDX1li8Fhk0rXaPpZVVhoMhF8MrrsZTwKGVppW16HU6vu7HWuIoQuAbYWrnUM6GujmfUW6CFSEVDk1dKVqVwQETmRCJiK4raZmKkbHkFjqxZljB7uEZ/X6PzKSewRMp40nB0vpNPvhklXxSEWlFbylDxZonh4fsbG97kJUkrCyvILstbCvlK9/+I4q6IMk0Mo4YVTWrG1ukf9ajsoalfp8iiljfvMknrT5VMaLTSdFZTNTK+OCTbzCZXtHuZsRdybQ4wxlJZCOEkmitqccFk8kYYQ3S1eSywskSpyqsqJFOU1eSAogjBVJQ2wpU2AxwYPGGHg4iBUrURJGXEnircBFkUE2RtvbullpiDcRxSqvd4/j0jBevdnixs8dHO/t88u1vk2QJxlgiqYgVmLpCuJrp5RVPT47Y2LrJxuZNEP5e4ULNoy1RKqXMS7+YRz3AEaUlVpQIpalEjLLO15pF3gCkVAmF1fQU3qhHKQwKKS1SV95VtHJETlK5NjqDOBswsYZbq2vc//Ajels3eby3S3u5Rewi4qrACkvdWqY2lq7OSbBo57h3+zoTGyOEpN27zo23P+Ab3/kqa4OC7/37C+rqiHExZfP6Gu/f+Qpr3RZxMkaKFKwkDYmfOFHh/kQo68GwKXKOLw/IJ+dk2vL+rbdIzQkPfvUzqGoOzwbIzir3ri3x1sYNZNrDaoswoKzEOkMrEuilhNPCUk0rEmlxVN7ARkbkUlMnGQK4sdZn8+Yaz57vYCqHinOupZqWWEZHq/TbH+KiNpWw0O3z1id/zrPv/6/svPhHLp9/lw+++h16Sx9ydH7Oq7O/Z2U9Z/MjSXvrDkzfZu8nP8amI5ZurbH76hG2esT2ix3u37vL2vomsbD0I83m6SYn4wFuq+KjO7c5uxzyaqq4kW5wY+0Or16+Ynhec3DwOaPhhPzoCCUESq/y7gfvM8h3+OWDn2LrMXfu3qSMWzx6esL60S7f+uBPWUtus/vrn/H86hjRyrBXl2y0WiTdFV5dnKD0hLjOOTs45qhSjHPB0FW0rqWgahIBaWuds70LnFNES0ugOgxGVzx4eszqep+j41M2q5y8iHj67Bm99U12955zdXJIJ0nYG19wbXOd4e4+NyeCrV7mnXg1REjvsThDNBJJ0qQwZokvCEybKqiLE4p8iGM1ZIht8wYh5LY4Ec2xCiIUZTS16TCXSwYGICR1TCnQIgVqqCpMUYAtcHaKczVS6tcCV/93iU4MsqVIpaKtnCd/5Rv83ozRA4SGqI/qJURagsywzpuHCAHKerWILXLGZ1dUnRi6KUIoUN7k4jUurynC+7LjNenbHy5w81ACVLiHzHRGHtH4IpTg/WnnNfVejxRYRmbwmDgWrLU0eRRzYgwTbVBpRtLtobQLrT1qwOGsBlPgdEIhIhyOxNYoWwdgFfkI2UzJJ8dMhmeM98/4yed7/PJc8LBKSPMr/k0/51/fSPibd95he/MO/+nJNi+ff8Hx1Q3WbZ+2iFG1AOdLSmrt67C92ZHw/LAGtGDpRsLHWzEff2eZf/XXd/nhf/4xn//yFzx98lsu8hFW+FotJ4Ir94IscF4nJSgqy0wKLBVRIoOZRnhM1B7zK9/6Ze5s7YLDclB9WR/XgsCamjrUk80AmrXzURZA0JuNvmEhVl0AfLPfzz3aZoYfSoU61DAc5mDtdcC2WGbyGmj0w2RWm2qtDf0I32iWLRy1q3DKM1NaxT5mcBZbezl3JuOZ8VzlCipRo/AAfuadJBtAFFRbziGbfo+ErIwUXiau5Owc00SBFZgKsJK6klS1pLIJyoE2BmMKcu1wtibJHJEqkCTe8FD6evbEKZhCXBsKGUCok9QiAgxC+RZC1mkcMhi42eBG6mNUKaNwryF0ePN9J/+Z4w8CtDWHC+xRk7mAZkIEuWMzMJVs+F2MscFbYHHAuqA9nvv0mPAv39thoW+MBG/N6ZF4c7xWtAizgdBkIWZaXgfGQBRpnItnrzHGYSvjs0phQjrhjSnKosCUVWApLFZC5QxFVXhzEimwUuDq5nN89sW70jjvcKNkkIsFUGldKJx0M9cwhUCmKaLbJa0rxoVgNDrl8uSILDe4yymmrLGRZFiMSaQmrhVrLmGCwElN6iL0BISpIJVEiYTK4i7G5OcTrqRFJBWX2lB1r9NON+n0Ei5HFZN86gf5a/m6+f8NQyoDbTxnDZuMg5vJ2aRQPivhQqmg8kGGcI3WPNxX17xGEUUpS0t9kihGSUk7y7gcXHJxfs7Z6IrB2SlJkhDHMVmS+t5Uep5pEuG6Nj0BGytb54cM0lqMcFipQGiU8w6LSEVuLFJJ2kmLyeUlg8E5V0Pf6NmYOiQHQGhBu5cRxQmFddSFxIoWcbrCqBxhiBFRj0mpyW2NtRIVdaiMoJiYUDMHQnTQEUxyyWScI6wgSZfJWisIIZgUof5Pteksb+BExSi3uNqiZcLllUJGDjfJGQ2v0KpNt9cFcUDlpj5THbWpnaM2zmfGpMSY2hvN6IjaVAi8s6IKem0XLtasv4szCDwwo9lwpAyZTBA6QicxqdYsKY1LEsZVxfbLV/zdd7/Lg+fPefve27z/lffZvHYNZVyQP2pEXVGUY/Z2d0BELK9uEEexr6GTPnsnpQ4SSotW81oF5wClMIFJFziiKPKbDl6mHMcarRWFkwgVYQntDaxBGodyEmQasvkOi6Xd7XDz5k2ydovffPEFV+MzPrz/PrFNsc6Ql5Yoib3MyjnSLKPnHEwlSOUX9VBL0em1uffOO2zJdSZfbHNrfYtvfPwJxWiAtM+JZMKwLKidpdKSOpFMtMW2YpSOwUoEMcPBhHw6QZDz6OET6umYq4sB6ysrVHXF6cE2ldBs3rxHv9OmrAdI0UI7y1I/o9dJOKKiuqho91bIeksMqwoT1mNbFUhn0EqRRpIbm6scnR9xOR2wGq3TtZKvf3CfoYtIljOKMJdjKbl9fY2tP/0Gk5OUH//ilxyOX3Eyjdnff45IT6lMjhCO3Vd/x9byN7n/yW1sdYVxjp39PWyc8ZtHD7l97z6KGKdKxqll2475zd4uX//Wt4mKkntbq5w/fMkPfv6f+M32Y/rLfZZsh75wXJ0coqcROvYGSmdnZxRiSjtpMxnmjAYlMlJUoy4iXqMoNrj14V9QHpxymp+BtJR1hrr1Ca3iAnl0TF1LtJV885tf5+HTHZytGExzHjx5zmU+RGiY2oqJc6RxSmkc40mOqw1VNULphFt33qNwlpOjYw4PT4i7y7zYfkFUVUinUUmLYVlxun/E4ckFGxt3Zs1em8Rl2LwWN13PEljrpYeANYYskiSyQolQ09aQZn4bCrCPMDeaerWwf4cWH6g52SYCc9MEitbMv4CIYtrtNiKuEYmeSbdmn+vE7P1rWyPjmMQK3yvThehggfCa7TGiIdwUMm7NP6+hfELM0e6kvP32HdZWV1F6gRJiDkzmgQH/heMPE6wtHv6WuDcemf8sw20qhcBKRQwo41AonDS+dYIw4TJplNWkXYG+Bm7g65Q6ccRalhAbEAYvj48kFb5eElsQieDybCc4U2FdzPF5zcODMx7v7vH0yUPK41Pu6Sve3ezwl/fe5sPb38akq3yYWNLLbV5+8V3+6Wc/4j/85OdUFdx+9y3u3rtPEsVo8CZmsQjhsPWPhfGE9MF+U8ddVQWGAbfvdjjcVzx6fEKRX2CUTzA2+4FaSDY28jsHKB0jAjCQQgRZTsOILYzNwK4YY1+T7S0qvOalNzXGzWvJ3gRfi74Nb7JfizVwiwCrkc6DB2xaa7TWs9q55js1r30TpC2yeXO5ow2fI2eu5w6oTXDrbgikUFLhS1D8a5Ik8VL+rMU3Pv4md27d5cnD5zz4zUNGowm1ctSACfutDHt041rb9EujGdduTrI4gc9SyGC04ovzfZziNBqFjlOiJMLQInKgjaIyBWWcYTNNFBmEKMDFIdkT2DTnXY4xLpgG+iXO1MEpPrQYEQtLn5stiKEFhPOs/fymuZAc+/3HHwRo85TznK6fmX68oee1Zk4bz+rVmislxML7hL4x8nVTkWZw2QW5lgeBizag8GbG4rVJZM0bNqK89v5zoOfDtlkmkwVNcNAze6DhXQqH05y9wxMmkxIlIxy1v+FBkmKDM5BEeC291rPCaCklKkw6GbJCUgoiHRN1NIqIzrVV+rJG7fQQZcVk94SRPaMwNVUkyYqEJVJ6pBTWcaYcBZYlFaNpoTHIpQSXai5PzpG1JG336CaKYXHCxfCEyfEZdSTIkg6VtFRKEJvXXYQc89HtQm87B4H9nEvYmu1ytlBInwHyls+hl4atQ48829juYG1jlKHQKqIdJejY0m63uXnzJnlR8OLZM46OjhhPJuT5lMlkxFlp6KRtVlZWvAuYkkgVslrCg8AmmMc6v+CEMMW5xg7YgweE5nww5Pn2DrqdMDVFkJIYtPYyCClVaGmhsE6QX41RUYpAorVg6+ZWsKMVJEmCFY68zKmsQVYyjHmHjr0Mz/cw9A2sY63RQpDn1mfpkEidARo/8g1CGe+0aR1a1AiReyZKV2ihWFvqgnHklcWiKYxgWlpKZymrmkRZ+p0OOBOcrMSsjszRSAQaWYeXpjRGJcZYalv9x3UAACAASURBVOOvq9Y6FG/7bLwR3jhAWO+QWgvB0fCSYV1RW8fF0+c8Pzzh+HLCxx98jfv379JrpZR1QY0izto4Kzg+PmJwOWRzY4tWNyWNY6qyoihylPIub34dkDNprXM+syywSOGII02k1ayYWSlFHKdUWKz089FaQ+0MkRMINM4KTG2xVY2rDf12h82NdbJE080Sfv7zL7jYP+Ab731CXfkkUVn7ZtQqynxvP60QwltnCwyx8L36lDEo4VlhrGXvxQ7O/RxhCv7o4x4rK+uYKuZyR3NVFYxMwWUxYVBOcSo4uLoYqboYV+KM5vn2OcvdFnfe/gBsRWdFUx4fMRiNKKdDXj35Ncsrm6yubNDttLi5qZkMD3i6/XPe2bgNKmVSjUlri9YptdBM7NTXaJQGMxmy1m2z1FZcXI4oihXGGlxPcWNrnVZPIRwo19RH5ohKURU9ltc/ptTX+d4//n+44pS//dcfkckJnz/YZv94mx8Md/mf/92fs6z7jE+P2dq4xSSRvP21+zzffcTVZYu1610O9w6w3R5f+6u/JL8cwmjEYOeQqh7wYHubJwdHvJO+x61uj3ogads+PTkl6yYMJwVnZwfkzkslbZmy/eCYNNHUecLB4AIVXVKobe5kkvdvrTBRhl/kAw72xyTVkK21a0xHY4rijId7+5wXJVPrsDricnTJcHyJE4YsbXH3rbcZDoZcXY0wVYSwJe1UUzvBaFoyqUsOj05ROuFg/4hup00/zdg7vqBUkvGrIcv9da6ufJsNGVZTayzqtcKM+eFrgr3iIM9zpuMx690OuxhsPkG5EFsQEFIAbgh89ntxIw9/m9p6eRo+SHYyvKAJHJ2bsTkEcxtiQx1551DnRFNmt5DzEyidkKadID1uJIpvHs0eEnad4MDsrf59gO6cZ41rIzGupLvURmcKI2uPMsSXuVf+yz8EbiFE9Hzb7Eo5v+t6sZ4PRO3itXTOuzeKyr82lKEgQfQgTTJ60RrpuGK9m7DUkj7Zq7xTp98XCjA5FsXJpOZ8VHN6uMvJ499QXRxycXbG4aSiXt7izjsf8s537nI7HnBnaYTop+xWJT9++ID/5YefcfTgU45++0Om40PGGqKtO6AjcmcwWFA+2WaEo/AZcBQSV02xVUFe1ewfHvH5b75gZ2eHnZ1dHj98wOn+AdOrIdbUiDhGtzJardYCMAnx6CKY8ZbkWFtTFKEOrK78HGwYrBA/+q1bzABfA/7eBGI2gB8h+R2J5KK52u/rSfamLHL2niHObdi5Jt5ujkXTEX/b3ezvxff9Mt+HCuOdzcM1sQFcCSFAhvES5LVa+bozh6OqDUsra7x17z6D80te7u8zKgsqAZUNzKiz1MYhnUW44FQp7Cyh5HG4j8kb5m9WlzpT0zXAU6F8GgghTFDyeddaHWmEtrSzGNXTtFMHVPNJ0kydZm2S3vRPSu9AKYwI7RdqYJGlDBJIQrxjmwTx7LevYYrfd/xBgLZZcI547QRfe04zSGca4AB68HJG0VD4dn7Gb+pqF/W9i3b+nm52v/O8N3XBvtcYr02ahhVqHKSa7wpghQ2UZ/P+wUp21v08TBChmVZwcTUhLw3OgKlrIiFCw8bGlWaezQxJi9lkmGVCccHEw1HXjsvRlKvzc9rtiFa3hVOatX6PVZtxpjIm1Ey0QBQlcSFwY4sSknYnIYskKzKiLB351YBpPsUaQSvNMIVlrd9jpdfm5KJiMJ1gLVgMqPmEh7n4YvF+NxPLOuslroEBtQsLgqfa5e+81memDN5qWRAt9PLQSqNUjNQJWsd0sgxZ1khjsWVJohTtVotupxNciCR5UXBZDMgnE8pOx4MQY6lkI7FV89HY1EEKhxYKoTw9j/WCI+e8O+YkL3n05Bm5zbFBGlLXNWkSBR22A+NrNqXw7RBQ/ty1lKRxilIaZ7weGqGQkcYJ4eWDOmyE0vfjc6HVOM4SSRncDfHXVWsMEdZInPQF2UIbgpsukYy93a0pUdKiheLk8BBpBaPxlLqu2D86xMWaGsujJ09YanW5s3WDTjv10tJQlN4Y8TQrmk88BKmzDe5SAWM3WHvmuhTGcZRIhHVIY6mE8KYtVY0VCqk0rnIcDyc82zuCOOXu7S16rQQnK/IqJ4sTtJaU0zFHh7ssFX1W19dCNlH7YuOQorPGN3pv5pRvcWBw1vep01rNmLm6NkRJQqolla2gKnzLiRAEOuEBW1VWOOPrybIkZm15iTQyXF9f4b137vLqyQse//YL9ve+Qra6io4SlE5ROvFMqlIoYZnUNVJWYAqK6ZRIaGI1751jpOQ3T55STa64fe9jBuMJeW2I4oQ08UYm48tLnj96wuDinETHXAyuQLRodSKODnfRAtbWlpnWyjPPrYR7d++hlKLVafPgi4cU5/tkvZQlJSiOL3j75hLtb32FvNR88fSQs/MRWX+FlnJcjgvE5JJEhprC6QgcZNahS8FkCuctwY+ePeajbsw3WylZqLtCgSJCqA2uXV/DZiN+/fQlK6sr3Ox3UfmUXqS53b/H5spHqHST1Tt3KS62yTba5NUp/SXBn3znXX75s5/x4PEXRLsxrdUurx4+48ZbNe/e3uLl7jEUhs+e/JaTizHrGzfZvPUW9+6+xc/+/gfsnx0RSXjvnTvsnJyye3rE5WBIXWta6QqmqLi21GZ8OWWpv8TNm9e4ttnnOn3evvWX/ODhZ2SjIcXRAZfFAf0tTXlecJlPOdp+ThJ3iVWLKI4Ynoyp8pLr11Zoxxlp5SisoFA+KZcXJVorLsc5J5evyK0hL2parQ6X4zHd7jJ5Lck6y4wnI67GBXCF0l62OHO+/dKG1fP9DqAoCqqqIk1i7myu8fhzi81HNCDICu8wS1ibm6bGrpnrDdMWVCsqil/bV5sF3DXrBHa+Gfhi9WAJIGcJ29deLSRKp2iVIETFDJIK8eYzaVQXDbgg7KHOVlhTBwmgxbgEg0FFCicgSlMfXC7sN/+tHM2lnp1dUy9EE1uAwAZbfUfsLLGzOFV66TvejEm4YGYT1Eom8qqCpUTRWu5AG9opJFEBVNTWYl2MMTVXozOmZ4fs7J/x/Yf7/Gr3ChWlvNVRvN9t8ed/tMzNu7dQW/coXY+OaTGc9vneg5/y8OkP+dVP/4nDnUNe7Q4YjCyRUsQ2o5jmRJc5T5684C/+LKeMU2JAC0leG87GI64uJwyPjtl5/Cv2Xv2Ww6MTdvYP2dk7YjQuyacVwkmEhSRqISNwkUQoOWOf3rTFhzkoKsrSJyOsZeZd0uBd29Rf+aTBm2zVosxwsV9b89ibsWvTD3Xxe/w+E73mNc17Nc9pWLbm+xtjZqxf81yfrJxb5i8CzDdr13w8F+JcAqMVTN0IbJtznmVTIvaNqI1n+6rKsrt3wP/2v/8fOAvTacVkXFAbi0w0IhJo4XsR22a3bjJH2Fmd5IyeFwtpG0Eop7LBcykEy0rgjPC1tRJQ1qvVAt0iKYllgZJToFpgpxc+y3nfQOt8OmTRAd0GA0K/9ix4YmBAWJxtCI2F++VkQ87+3uMPArQh5qANGlC0CLIWqFnpjRCMCbbqIgSGotFieJtZIQVNu/oma/AmrTybOAFMNbR3M3DfnAS+IbZXhC9Sxc3Pi646Hs9ZXCNpDOyStzGtfJY/aJlrJZi6CHQbrTOsAelU6K4emj5K33zWGU+ZV3VNpP07+0aMQWrpvG24CAtHJSQj4agRlHnJ0dE5xFNaRiGyDKGhvdzh9s3btGRCfTFleDVClTl5VSCBYnxFnlaITJFIgR5XRJljIivcaEA9rWnblEtXMHQ5pfVF4sr4nN088xlsngNL5K+9Lzlumlc2mYbFa+8aVi483gBhIZxvWG1qnPG1f02vt2CwSSQV7SSmqisO93apjaEoS++KprwbWaQlWkk0c3Dm8FkxreZTxH8P6107I0kUKWSQthJML6I45s7de3z43ldIdczTl084OT9iOh1jg011VRZIqcgnU8ajCdNpgbGOJGthrKMsS9Ioodfro1QUztb48awEQmiWOn36vR61LWm3Mzq9NsPhgOHgjLKYMi0LTFVhiXC0SLKUNO2StNpEUcy4GHJ6foKtDUv9DFPBdDxAiylZlFJcTanziljFtDp9dl9c8OrlgFJYTs5OcOWIV9ee8c69t3n77h3a7cxnvuw8uydVWMRESK7MMwueoURgLd5ZM/RKNMbMat2U0r73l5BESQsRpeRlTSU0UbuP0yk7R6dMy5LNtT69dorSEUVd4VsSgKkKzk5OuBpd0el0WV9fRxJYVOeZP0vQWQXJgsRvDlmW+L53aUZZlgwGF6yu9UlaPc+qCYWpCxwl4F2/Fg1YrKkpiymmKtBSs7mxxkdfe59elDA6GnNyeEBclSyvX6e2jrIW1Ajf50x5lyzlDJGrSCLpzUCcZ+dUpMgTxdLNO5we7DLRklxIRnlBXVmiWtBBc39tk/wty9NPv2Cal+wfHvLy1S5KRyRxxFIvZfd4wPnlBStLLWqpOJvmSCU43d1m/2gPWzuiTsLB+R7LvQybFeg04ReffsrFcUmr3ScfHDE9OeLiakKSdoiSDGMFGQYrJBvX7/B0r4AyQsYx5xeXPHhywB9f5Fx/yy/bkYVl0UbceB8rdsknh6zUQ772jW9xo32dk2dfoNoviMdX3L7VphYdTFQR31zm+OSAnafPeVv1ePDZZwxOLnn07Ihbb79HnsdMDuDwfIdPbr7Hr08c5+cltbuO0hPyq4wHD19SmTPydUu5UqEGBpGUxF1LdV4h0w5a9cjSNW7dXaXfMgg3oQBOjr/HH3/U4a2lr/JKtqj6jq38nOJwn2p9nypVRGlEajOUSqhKiPsZx8cnjKcFS0t90n6PweWQy5MD0qSFiHygKFVEbWFS+JYp1zY3KZAspQlrDmSUUldQ5wbjcrJOj6997UM2N6+HPcinDIV8k7Za2H7DGhvHMXEcowRsLPVZTjSptCBsKC/wR1PRFCbynO2aETLujUz9wqc6sMIFGb8hCZlri6WsKoqixDqHEvO1Yh5+SepaUhntY67gYDk/qzeStKEeSwhBPs3RWuJMhRARSkfBYEeiohSnIqaVo68TrFNe1v/lNN6/3GOW1W8SagAimGaFcMmF3zsLVvo1DkEuNAaFcpAK0CHR4oRByRGZc2S2A3GEi8DYEeVol7IasHsw5JePhjw8gr3TCZ18l2/e7fLfv73J335ym6Vrd1hd3iCWEmkKRpXl8bDk2c4Bp9//ez799FN+/uwxR4MjRHlBXOZoI4mqiFxkTKKIqL1CPal58tsnnJ+cEuVTXh4fcXZ0yqNXL/nhrz5jf/cYczmlHJxQTc99Gx7h/RJkFLHUbxNFka/qE36c12iM+12J4CJYaWz3RWMeJ0Uw9PEJPWuCbLwOydpAVb/JWDVAra7ruWLLZ0deA0r+Vs7BXkMoLIK9xdKe1+NSryqJlJ5JPRcBW1MitAj8fJLT/Y7M8ndApxAzz4iGJdRK09AsNlwnLTSRjoikpJiWCCWpaxgOx0ilKMsaqWKMhPEkpy59XJUlMXEUeTVZAIgyMFdCBGM+rf24pIn3Fxzkg5qjedw6H0+LugYlMR7dUTm/dztbEckRkRgjqAJLHyZJ8N/w18Enm5wQCEJDcKeRLpqRE841ba68cdvsmjX5k0D8OAfSvU5UvHn8YYA2mAXw3ulNvjY5Gntx24Ah4andxib+Ncq2+WNhcCml/OQJBZxvFm16UDT/vMV6tjcdc+oFvelrRZgL52GtDbKCee8FT59Dw7ZZY8B5SDOpK86nFXklUSpDoT374nwzP/9955lN39DT96zxmQHjXROtbyiN9XI1ARTOMTYlxkhMXZHEKXHcopjkDMqSq8oiY83Jixe0Ol3aWZf29VWWS0MnTdi8tsro4/d5+uQBmBI7zTl4vktuCqYuZ1QZpIW88jMljiUmAo9jvEmF37SdT6s4hwpuPA3gDQWInvFsNN1hw3UsWMWGzKDUEhUpT7/XNaaea7TBX+vaOgbDSy4ODomcI05if0/CxqWEJA6LljF1cKwMtXUB8AspgzRyzvxBk5ckNN30QYmWHvApKen3er4XkDFc37zBKJ8yGuc4JyhLx3hckGUtNjZuIa4LDg6OODo6wVgV9OYKpVLa7SWyrMVkmuNkYNqkJIkzbm3e5e7du1hnWF1fYXPrGkcHuxwd7lEVU4SpKfIJSdKiMuBszNatu3zlg49odds8fv6IZy+eIKVgZWWVurBcnr+kmp5w+/pNdp/vcrizT7+3wrvvvsf7H33I0eCE57s77B60aEcR19dW6Pc6tNttGilvM+/mKRgwzhIpjbNQVjXjqymVMXR7XdK05bPqTd9B59teWGtQCCKlyZLUZ7Rqg5dWKKyTRFkLhGGUF7zY2SOLJFury3SylKqukdZhhcEJSV5WDC+HVGXJtY3rpGHuOBs2uJAssM43pW/mpzeo8ex4Mc05OT6kjSXtdlA6IlYCKl/zgWl6u/gAONLas7nCJxZWl/tE8S06MsLelty6scX/+8OfkBtBSoW9JylrC4kAW6OVojY1ESXGVGgpUa4O+nlLd6lHtNzn7OyQ1nKf9a1NTvfOiaMEKSQagah9L7VWqoiSjOXVNXb298jrEhVrhnlBXtdMC0dlc9ppQjTMyZKI8XCIzC3KCQ5fHlBWvm/f559bNrdWefJkn9XOCu1WC4RiSUW04pjaCeI0oaxhud/CighaGd3smLqYUhnQ0f/P3Xt9S5Jl532/YyIi3c3rTfnqalPdPd090+MwBCAMOABBwUhckkhKWnzRE/8F8U/QK9cSl/Qkwydx6UEQBQoCRRHCgDPowbj23eXd9S59Zphj9HBO5M2q7iGwxBcQUeuueytNZGTEiXP2/va3v09w3hszmxQ4Y1DaIoRGGECVKFkihqdsugk3WiVNBuyen+InQx59+Bn9Z5YrN7/CML3Ozo0rZC2FsZ7hacng2HHn81OeHE/pbCu6jZSz0wpXCPYeTbh67escndxhNqmYzSyTvGJydMTTkxN+82/9TV7/ZpfDz3f54O7nGJ1Ryg4+aXPl0hvcvHydpcTSaYwR7pzxNOHs7j0efvoB3/ydb/HaSzd59Z3X+dlgyJP7f8Ay0DUdhrJF2ihpNhscHZ7z9Oku0+mEJFXoLMFLgcwSqsphcTQbKfm0itUqwaysKKqKvf0jLJ6GSpHeoRotbJZiRE43UfT6BePRlLIsg8cisbLlLuiKX7YJESn3PszTqRI0FEhvqc2mPcGkRooY1IflLPaUXwQhdcBUJ2s1AHuBK/tAqYvyg0IKdNpA2im1suM845z/8uAVSjdptQXtTkLWXkZHkYdIVF8IKMKvOlAMnyvRSQNESrDv8LHip6hsrPKJFAjiVn/NUraLUlusbIr6jM1P2wKwJkJPe+EhAOGGBBlUg6XA1vFIfQ2rAdaWHA8Unx1Ouf/oIf1nP2HZPmOj4egkW3xn9Ws0X/0G12422VnxdDKLTDNMsoQ1knF/xMH9j/ngs0/5/e//mA/vPWT69CPszOFki9JWqBQkCcJrGrpNmjWZJgaROEQlePzZff77f/JPaGIZPNujd3rKyFT0C4MxgkalaOFopisIJUMPWpogdRCYcQEFBh/aUxQSEcPkxSRlMTGat9UQKIGRs4HUIQi3xuOtx0hDVZkQv+GpBT4Wwf/6cWtj0WARu+Ai1nyxOrdInxQvxLf1MXrv54naohfb3Jw7VhIXE7HaL23uRbiQJC4mjXVxxVvx3OctVu2e64sj4gLWUZVBdV3FVgSdaLyPfYgisGGKwmCsI8scjVST6gQlw4wk4vwDsUvGx2viLzzvAojk5kUaLwQWT2oh9QKVXSAXDoGUSfBglQWJnIUxEQnd4Z7xBAWkUJWzzoeeRhd8k3WSoLCBTeQs3qvwfX0tlOLARzXKmMQFBU6JcRfaGl+2/ZVI2upsVMrFUG8xKapfF06Qi4F3kEZ9viojVfAWt1wMmNDrFT3PFjdRJ4h1Nex5VGKxDD6nZ8og215zkT3P98e9uH8Il9r62nvjQvTEuxKBYZRbHh/0ePpgF9s/D55mMYHwamHQ2zCJBAU+gU5TpFZIrWOS6OYmyNI5jK0oqyIEkVrjRlOUCz5OE2UYSkGpNNpBf++IZ2KfUiu2ljfY8BnX1rdQW5ssXd6iPTtju9GkpTPc6grDo0N8WZDnJb3zKf3BiNJ40pgYTwkoXUAkKupKY10BiaAGc5cfIZ47bXFqeg6RCL89UmqUSkhkgpE+oEbqAjVyeEpT0Ov3efr0CbKypI0sNBKnyTyJt84FDrOI/WU+SN6CoDImmi+GSRRvo0cbWG+jPH7w+8IRTajDfV+WM2b5mI3Vdbavfo2ltXX29/c42N/j6OiQaW6YTocUhefa1eu89vpbrK6dsH+8y2Q8wvsCT4rWlqwB7W4XoTWT2QzjLJcvr3P9+iUabU1elEym5/QH4MjZ3l4m1atoAabMSXSgi56fTxhPTymLc974yk0Kc84kP2Z9fQ3vBUpodrYUJ3uene1NKCyDkzM6rSZrq6vceukml/0VmitLtJc7NJVkpd3GO4OUKsgbx0tY+62EBcFjTJBvrmxovs7zkrP+OdO8YGN9k0aaIoBWownOY6uA7CVCkSqNFjIwR12orEofJJOzNCFJMrJE4irBaDTk7qDHxvIylzc3aaYaT91TF67p7rNdZtMZly5foru8jBKSoixIkiwWCcUFaiZU7AMKPis725t0OiknZ8e0TUGn3cXLoEaaSKAqmeVVRPfCBD2dTTE2LNTNLCUvNeurK7RWl1hqN3Gm4oMPPmWlmfBrb30HpRNkKpHCY6wBY+cecNI5vMkxUiG8ZdY7Z+ba5L0+w5Mz2lmDNGswwTO1JVNXMXIlhbQYlwOCdiel3W1gJzmVryjzClNNaGhLXjZoKI0rBOtry9y4eZ311VkQMREpz/ZOePD4KWjBSTGhu3yDlY1V0s4SS502pjIsL68wGs/Imm28TpnMRvSHU5rdG2ytZOwf7bH98ms0V5c4eTYgNTNS0cOJEU5kGJ8y6J+RpQNaos3t67fIGiX3fvKv2Nvb59mzu3QbW3z1pVcZTgf8+f/9L7j99htsb67RqTJ+9vO7eFqUos1wYvn4/gM2Lm3wyre+hSo3eOUr32NzbYvT0/+Ng/eecjYesLZ1menpiG66zdHdM/7u7/09Plt+yMmf/JiT8xFerfH67W+zvfEKCQZXnPHuN74N9oj+ccl4KHnz27/CwAzYqY740+//gB/ffcBBNaJzMqXjGty8+QaPh0cc9c44m47JJwUvXbvKSrfBqH/KtD+g2+ly/eWXyKdTTs72uXnzOtY49g8OKYxnY+sSp2enHB0f8ubqKv3RiFF/ys61V5mWk8CQkJJ+v08xm4UgQfgoAvBC9PcL1qwaMVdKgHAYE3o5ZEzYLmJ+FVe2OmkiNta/sP9IIwqF7IuynFYKJaMSo3CoZpOMNqVKYyJQV/JCr2vYlyLrrJI1XVSjqoNpF9dw8dyaL6ScK0mmaRb6plEhgPIhCUWAQzOa5AjruexWAzMgCof9tUrc4vwWJDeBuS1QuKZ2IYMTSIyEwkIDT5MKShu8xvwMJxqcTGFUeg7OTkmOPoTeMT+8f8ah26a1dI2bV36J1zfe5eX1nLXtbURyHdwGLAmENLhZj+FZn3tPH/In733MJx9+zMMPfkj/5AlFNUE2JVqOEckSlWmQ6Q5Kj1HkaJ+gfRuDRKkcKGjJLkV/xE//zZ+ROktSlGig0ioAf4migaSVShqt4BHqhQCZYIM7FHYOGPt5hVnyRTrjlyVNPhpveRf2EWyXFFoH6q9WCq0U1mmsq3BOzuPNecsHF7Em8UosJmovbi/SI7/sp35vnYTVa3OdzBhjFoy9ny9WvPiZi0IpX6RhLhQx/MXr44MIEfxSBYKqqoL/abQ58IR2HiHVPE5UEogxvnMeU5jYD69ppJosC7GBJJxvb2sz9Khv4fxC8ljTFBcSZBG0FDQ6xAgCrPAYJInQaClIRUEickJGsWCPIea3CtY5SmOw8zYoEac5Of+skETW7w6vUUpTK4bWOYFAxMrmL97+SiRt9TZH6mJ16sXNe08ZucUqCep1LHJ/Cai+8LEPyNcc2wUFHSkvqjk1ElC73C0cxyLyOFe+c0EQ4MUy8iLf+AJJkBD9h6KCxnzweGeoTIGtcpRtMCkqDnpDHj07RA57rAqPdvVSGJFMH3pvrAtldusCfc/6gO4Hc+lQnVMy8NYVhKDaOVRZIQoDXlJ5z8BVzJREN5toIcmMofAVxitEWYTJrXIwyzmrZhwcHLC8tcPq1XU23niNN199lbTI+Wx3j/MnB9jSkfV7aF9RFMHzwutoOGgvvFgkxOSmnhTi0h+vjagT5YgyCcQccRIxuQsSueGSCR96yWRtuC1CMl+ZislswmA8hrxEjYNXSpqmFwqiApaWlgLqFCReIhjp52iNlyLcWD5QdpSQeCGpvMOJoECohIvfqU7uQq9d1kyQSrF96TJbOztcuXqV3d1nHB7uc3Z6Tu+8x+HxMS93V1jb2KDVTXny9CGnJzlOlKjEs7W9QqPZCH1zYpnjk2O2t9qsb7WD+pQzSG3pLqdoleKMB1NhqjwE/pUl0Q2uXbnOaGrp98Y8frzL2toWb7yhODk7ptVoc/nSFXone0z7I8YTaLW2SPQ6kmWsaSPlKstLDTa2PL2JRVXjaDMi5hPiXCxHSIRcpGiEcyilCvetDkm3NZbj4xO6nSWWl5eZTGfoyAn30geVVCEiEBGQsxBQWbAVifJgKyrnWV5q00o1k0GPo5NTRv0+N69eYW15BWfDZNtIU5yz9HvnVGXB2vo6G5tbtJpt8rwCofAqWEsIGcCQOSKJQyvY2lglKRv0JhNGg3PSVpOslZEKicwSDBIxFkynU6x3TGZTyqrEo9FJQpokZN0l1rJNhC7ZWl/jys4O+4/vQnzwnwAAIABJREFU85Mf/5jrL9+gu90MwIsUoYfNFnhbIYVBYQItxDsaxlD0+sjRhOnpKVQZZVlilaCQnhEVA18yEVVQzYoeNFp76tjYGMgnBWmS0emuoqRmkngOpzn9hw+YTAZ4oZGqyWBimMqUSV7x8Mkxl1Y6HByds7O+xq0b15gOB6RKsbK6yqPHjzkZDDgbjtm5fI2bSxtsrEpKK1jatBg94dLlBJUfcHz/iO6NBiPjOTgq+NMfvU8r87yy02FZlVzdaXE2OubKy5dorrR56dqbtNsbDJ7sIWeO0b09LluJGozpj04waYO0m3LjxlUORgPu7j3j1b/x27x64z9g+eorSJuzuqR57cY2vd0Be6dPYGYoz1PSpEHvScFrb36Xib3Jj3/+Pud9xze//uu8eusmH/z0fSrt0XqLh48fMusdc/21WwyTjJ//7A/52yvf5uDRR/zZZ3coBSRpwne+8y5vvv4uf/DR+zw9PyRbaiKlpNtts9Jp4qZDttZ3GJwPOH70jLWNVZZaDZbaDR49eYZOEqRKmUxmNBpNVldXSLQilYpRv8dgcoe2TMjLKULlXNncIlEKJcWcnF5Tif6ym86SgEa7iAgLAd5hCoMQCUltdblYBSD2W9fMhIXnLmLQoPYcPBLjKiAlpBnaZUgXhLkudlzr/kXREq/jM1HxVYSELQTLlrIo8R6SJAmeU/LFEKdWAA60QCegso7JcACVxtlLQAjEglT7X6O0TSxWMX3sAwrJtHeh99iKoNZdV0/byiJ8jvAZWIUbj5mMP+Sg1+MHn+QcuWvsT4e8oQd872aX/+TXrtFaf5nCrNFsr7OyqhBijNQVkOCt4fB8xp2Pf87p5++xf+8DnhwOuH/meXxacHZeoU2HzOUksxE6dXgtETSRKkUlhlRYGipD0cBqiWwYhKxouQSbJBhnUV7SyZaiZYHDxWQhdR6lPEI7hA6y69Y5itLgXRAFc3N1FYnzOV5UXwpuvKh7IFRIhkOyENdDX15U3mJSLIVHJnqetNWx5It0yXn1auG5+vn6NV/WX7ZYBVzcZ02frD3VLgzAzZziWW/zZIsAiix+5xe3+rG5sfZCP5+3dQ+fDG4ZXoQmsDhP6GiJ4CO4FB4PcaCzHiVC3OtjwmOcwxUlla0oTEKaqCC8ppLgmRqHtxAiKEG7oN6olEZK5nGiUyCwaJOifYKQYGWFEwKHwlkFpsKXE2w5jvdHPR/E+WleTHJYb7C2CpW0uqJXtwB5FwQhQyA/zzScdzhDaIOoQW5bz2e/ePsrkbTV94O1FimeN7r+wubDRZVSRTPXgI6E5y5+hBB4LQNlSUQChfdB4ECI0PTsApKolbhQulr43C+jPVYLWXBdSbDuonlzESFBBBNvIR1SaaQOlcDwUZagZmUxzjE1YGWCt7Gh2xEyEiUivMncuDCo3cV9RVGCoDDkYr+cQeDwvsIUE2b9c6rDY5qFZ2d1g9XtbfYoyceWpkqwpUE7Q+YrhIe0KsErnKsQODKlwFlOemeMTMWnvT4vJykv72yxvLWJGuWko5KN5RVaouRoNGDYy3GVi6aECwiDCL9rUNYJ4mT2fI9gfTkXz/W8DhvZOd7VkysX54iAkAkdHO0LY8CGHj9fljCdomSoqEkhyXRKq9UKVJHYT9bCo1QSxsvC0Lo4mHDcxll0pPeEU1RgzYy11R2uXt0iTRKGsxHDWYGxFbqpefWNV7h64zLnZ+f88Ac/ZDybMM0nnJ6c0Wy0eeP2t3icPebs+ARFl0wus9Ts4vA02w2wKeNewaNil82tLbxP0FmDte4O2jdCn5ROAhIpBV5CVXrWN1a4dusaR2dDkE12D88pbUl3ZZvV9XWW1zZYWWuTppaDpwc0OhmtlQ7D8YSk00A0NEYLVi9vslENGezlOFMGCl9VzSkPgXITFFpFXCCc86CDQItzYKxnMp1RlIZ2q81gNGIyndFqNEjTjE63jReCylZY4UCFa+kECKFw3jCdDihnA9rtFkpJUi0Y5xUrq6v0rOHk6IjdZ7vcfvVVrmzt0G42qYrgk9NqN1ES9vd2GQ6HXL16k1azA1JFvScZUHwZ5pmAHFokjkQKVjptOkstznsjRrMRXpQ0Wx2UkrTbKatuhc7yEhaYlQUOQWWDQpaUiqwRKohSO3Y2N7l+dcb2cpub129Er8jQbzkejGgmmky7QCnGkekALigZEsita28zuHGFRnqOd5alpTblMYEGozXOe8qiDJQNpfDWMp1NyPMSGRPotNlCaM0s93hbURlJPstpZW06jTZVWZFoTYEjazRx5ZQsa3LeHzPVkt70lN6kwuZjUiXQ8jHtbgeZpaSdFslSk5EfkXY8W8USq15SiZJLN1fJWgOGxZRylPDkYJ/jJ6fMeid89vgJ5WtXef2lZZbOV+iIlHW/webyW7S0pizvcOlWTrOzzXLaYTZ6RlkdIvSQWTHi6qWbbC5vke2dMnCevQdPqco/I5vss6Ir2ksFr718hR8+/ojpqEdSSXpjQ950PHp4zLuv/DK/+Vtf59K129y9+4BLW23eeWuDWze+w4PPHvHeex/w7OkT8AN+97/8DtduXWL3+D0+PvuY88YZVzqWR+cVrZfXObYjOh894fzuUzZEwtl4zHKjhRmPeLj/lOXVDqqVMes7Hj9+wOZkla+9dYvJsMf+7jO6q5ssr3Q4PjtDScnG+jo7G5uU0wmuLBjPHFYkWGlZ6ipMWURGB/PYwn8J1eoXbgJEmmJ16GcigpDCeoppgVMSnUpclMYXngshgDBZRz+mL9+9w6MTFQKlCE4IX1d7/AVlrz72OUEyCpTMl5NINZob7AbrHSl17OGrD6kOsBbWc3FRWTG+YlZOyESLRAfK1b+NSvrv61YX2uor5QlKuXVijAjnuCScF1n0Ufk+u8c5j56uMDxJyc8/5LWbH9Bu9vnOK+9gu7c4rLrcXnNcbozBTFEr63jZxgpNITzTqWJ0uMfevfc4fHSHjz7vc+/hIcVswGRwzKTfZ5Z7KpuhXIoRCpetYJIlSlnSqiQbYoiXmkmiqZI1lNdkTtCUloZKcDJBC4nRQcFZW3AVeFtRWIPB0kg1SifopIkXTYwLYK8xJZPZhDyf4rGBPSViwPWiJHvc6j6wxeTJGBuoj/FecC7YZdStF/PxJ4Ki5IXaY4jnaoCw3j9EAHnh/y9WzxaP58U+t8XH6wrbYjJXJ4zGmPn7FrUcLo7P82KyVieBL1JFhYvd6hEgCF5tKgC288+P59X7aH8UCidC1qfOBpaWtQhn0CKJ/rgBRPcygDiFsVQuKEo2Gw10miGjkE74Hg6hVUzUFuJnAcigQq7RaKNxhBYbLxXOa5xTUJXYPMfk01iFDCCTjKBPPZ8Gj+AK4yucNyEf8BaHwTkTQe0Lf8d6JARPvjA+lFKxS+j5AtKXbX8lkraLY1xMmBbQufnTIjqnq/kAcy6o90kR+5Fql/p4+0n1JZPvc+Xn8LmLg34RZahvknnVIAot1MeAcFHG9YVKW32jCgU1hTJ6jikVyCaeQCkTUqLTFkvdtaB0ODkHS5AnVaFcGmTtmQty+PnUG06gdRbpZFisa1lXb8BUiDxndjpAGUX38nVef/NNNr71Dnu9Pr39U8a7e4xNH4oRiRdo6bAK6CSYVGCjtP6gmtLvGaZnE4aZ5n4x5eFozO7RmNHBKaNijFY5E2eoqgxFsC4QkVYo4oWtw/svXP6Fi+/rRC7Cw97V9ANBwFwDIivq1XeOaASkQ+qA7HgpohqYmJ8b8EGivabASRWTDT+vwkLt31JLS4v59fcxaQsosZgvgFKCkh5TFYwG5wF8aHXxytFsZKgkYMhpQyITz+bOGo8ePaGopqA9j548oN8/56UbN9jeXKWa5Rwe7SLEFiurKzgzZW25yWA0RSlDlkGr3UZKgTFTnMtRytBIMyobdNiKqkJlLUSqSZtNmkuelc0NVtNNSl+iE0Wj1UYKQT6ZQNam8IqV5Q0u33yN+3ce0l3fwaoGpYOsu87mVYOfjJidHMaJx6F1WMSo0cZ5JdWHCVurkMQpRafTIUlTTk9OscaysbFBkefMZrMgxpJIdKpwymKlxymwUZbXi0BJdrbAFhNKAvXV2iwkimmgt65tbPDg/B4//unPONze4dVbt9jYWEcpQVkWaK1JE03v/BxjHFev3KDRWsKJDK0WlKCEJMtSzGhKmc/IEkVpCqSSrHY7tGgymo44PTlmtdUl66ywtb3Fm295fvSzT/ACJvmMTivFeU+SJmgTqh6NNOPyzjZpe4XT/adc3b5Mt9tlVg2pygJEkyxJ8FUZ7hhvg7iKtTjvuPLSNd74xtewpmIw+pT3d+/ivUN60MbTRtMVCZkRuNxCqsGJ6J0YDMeRgkbWRAlFrzdlOi0wUtHIBE4r8knOSzevs9xZ5wfv/YxG1mC50+B8NMShmXqNcY5CpLQ6K/hyBgqW19cpvKeznbG0ukqaZXR0E93aYHlrmaPpJ8zGp0zODjCDCW6qGYspJROuX3mJydgxxdK5ssbT3QmHd88YNZsoUyA7MzZeE5ydHJO4Nuezc+7fv49UCVvrlzH7PfKzAUmjzU7H8c7WBm/+0k2SzTXOHjzgRw8/ppMllCcOaRM6MkNKx8lwzE8efsiuybkzhN/97f+Mr3/tDd595yVm0ymj0SPW1xvsNUa0mim/+Zu/w4effp8/+7M/5fD4Gnfe/xmzB9fobrzD9VeWmdz5KW9dv0WWdfhXP/iAb3zvW/zRH/9L8mLGxuoa3hq6nTYaWMqa/Np3foXe8f+JryzKOzbW17hx7SqPnh5QVC5W2jIGgz737+bkswkSRxWNqZOmZjgYc+PyJTrNBkrMJbD+ojjgC5uXQXXWKzWP8pUI823dzuYIa2Ytjl9DbQET/cVJW+itEQgc1ntUTYcU0W9xHiS/eNB1wBnm4cqWWGlJkwSlNfU6Tg3ufekBhGDc1T0zgHEl40kfp0xYkz0gNILkF+zjS/Y5/+vF13/xkS88MwcbF6+VuAjy5gGzmFsvLMCgIa4gzrnz93ARWNav9PH7Uttog/chu/EuWGefjUbsng057A/Ijj7hFk84L1OmszcRydtce+uXuPbaDtrt08luIPUKl51kqaWQooGVkCtPZSb0Dnc5fPSU+5884KOPPuLJ0VNG0x5mXDGcOE5nJaYqaFiw0xFSW5I0RSiNkg4lBQ3VIVWgMUjh8MpSxbWk8gplHakDnKUSFXlhKBxQOHRuUUKSywqlE5oqQWuJzDxO2ZgsSCwWY6uLKuQC/LgY28FForTYQwZgjGE2raiqirrVJ8QSNsam9fvrvufnJfmllHMlx8W/iQykxR60ix6ti6TsRaGSxe3FhM3VbK1YZavfu2iA/TwNkucSwhdpofXfIc6rq27h/rcelFCRFgmLhmZ+PtjDnRgvR12XBCloJgmF0xhklOWvY7o4lr3DVRZHibGepUSin6tU1boIFwC8gGDsLWpqaojhS1FSGYX2Kc6AchXVrKQq8nm1saZNL84L1hmMKYMIYM3MC41cz52jOi7yPgICxGSNsKYLV/c5/ntCjwxNnLFs62ttufjPh2qM8MwlxZ2PWXd4c7z0scetTuThuYTL1WVews0jCOVZnMc4R02/qwdc6IUL76sRqXAzLlImw+Crk4x6yAkhg6iwd0HJ0YTmVmNLxq7kaDZh73zE0nILX6Rs64Sk3aScZeRTi/cGY22sQoVFTQoRglgcwWcmuq7Em0gqEfp/VJDjtdLjKUkBIzMmvmDoHY2lZd598zqvTqaMrp4x+8pNksavMB6O2D8+48nRKYO9M3IlmVlL6RwZGbOqYjQbYwcjitUOOJhUOWY2xhYzRuMJXpb4RAXelfOhp1vE6ztP3haqkWEYx3Pswbn5wPYIhK+lUsMi5Alm5s4FNT1rbegZCgLSFybq3qG9fU4i1kdZaSUkSiXBADoiQgEkCI/XXlsoFUxg4ymuxWWE86HHSimkVvjK4CrLzFjStInLS04PD1hZXWaSz+jnBeurK9hJyWw6pt3p4LFYX2JdEfoTpcf5guOTXWbjU968/Rq3Xr1KXkxA5GQNT9ZMGYwGJKlnNDnH7OZcu3aNlZUuSeoRsiRpgJclRhQkKsGUNlIOJeN8AlrghCPNMtqJJskUs9mEZ8+e8PTBA06e7dM/7dH+2iqvvPkGaXuZSy+9BFkDhMN5Rau1ytrmDnuDc8p8iow3p3NVQMK58E+qbEDDEgfGBhrl0lKbrY0NEqUYDcf0znusrq5gPYymMwpvabQykmZCUTq81whUMEUV4d6tyoLRsA9LS+gsUFwqazg+O6eRZTR0h1u3Xubs6Jjd3V2O9/e5+dJNbt26SXe5S6PRAGB5ZRnnPPfu32Fja4f1zetolWJchcbiRTjuptZYoZlOp6g0VLfTRoqqKrKlLjOpGPVHTKzBNVtstVOWV9aYjQse3L+PvLlFo9kkySTGVFTakcmURtZglYTN12+z3dnA2Iq00aA3O+En7/0pq50W6x0Nrgi+hC5QXo3zHO7vY9P36Q+GHJ/e594n+6hKoCqJrDzaKTKnaEc1PJOXiKqg3ZK4rEE+M9jCkTuDlxY7yzGVwTcyJuMp9iBHC4N5dkCrMUK3W5z0+8ikQaPTop208F6Rj89JlKTVUBhC0/bpyREizbj+8quUFoQJFEvbVHz+7JTh9Iyvv7bOZPyYR8/2OPeOqSzpNiXaGrpOc/rsKfu7y/T2Ztz7/CGD9piN1mVEy5LrdU4Hhm//8jb3Ht3lfFqSypSttWWkOaaZNTGzMbI6Yand4OTJB6zk19jeXmdol2npNnsne+jBiKtpi6PRgNwKxh3N2q1tvvKVm6yvNFhqCZ7uPuOjjz+iNz7je9/7NZJum1/+3re5eW2TzvqM/+uP/jl/9Ps/xZCz7rtsLq1xkj9Ey4rd9z/lUV7S2LjER48+oZ8PuHJrixs768x6p1zeWOeVl26SZE0OT4Zo6Wk1m9y5+zmvvvkOaasL4ohZGTwLHYbRYEJVdrl2aRupO+jzCfl0hNSeb3ztm6wuNRiffIK5soHUnegjdCHJv7h9Gd0pLKomCkZJ5tGOFNGapa7KxADtYmfUyYb4AlYqn/sruPUEUSGQIDNktkTmkqhiWK/VcV0AwGItCBdQemRQ8A1rS5h3wvvqA1j4rovJCz56A4J2ntRaRJEzq0qELcOLveKFU/ULtvrYgkj44iP1iuW5CBgvjqcWAZnj7ojoAuwJFFAbgWsVz23tcRX2XPfSh5Q5BIIBPLyopcVeXhECPeULBCW1HvG0TDmdSGaTAn92h2pywoPjMQduFZc2eKfVZLu1w6Vul6+tvc5ZsYnsaJbXuwh/m4ZIEU6RugFVWdIbW47HBT/66Cc8evA+Zw8/ojg8pBoJhkXGqUs4mYIf9HDWYdI2pRUooWgur4BIcEIF6q9KkdbjhUJIjZEJwpcoXyJcQeUsuU+oKsiqCuEqcuGY4Mkrg7SChggVHiMcqUqDwIUSeO1RqcU7kEKhnUNJRxUl4T1R5AxiFhHP9YKdVGB7MWdbhapSUBB21kTRtFqFPCidB4axnfdO1+MyiOVAKYr5YJVKzgVDAsCYoBMdtBmo6YWhUoiPtjPyImrGEQsEobgg69cSEoSqLL+gKjkfR1wkpUBkf/hIUQwxcg2cXOACC5VAInNNXADhZTix1BVdIULyE2oqCu8D80z6SOj2FmUh9SEGNjgq76lkpC+K0MYgUeAlxguoPMpbsqSuXOkQ87vAsItXNlRJK0dlDMrmOGsxOEoXkj/nBYVLEdWU1qDED8cIV8P6ItyDIs4zBiojGbs0XlcTnvMXgJnzAnyMTH1I5D3BnL5WwK/HSTgv/47qkUKI/wH4PeDYe/9WfGwN+GfATeAx8Pe99z0Rrvw/Bn4HmAL/lff+Z3/RZ0BIynysYoiI7jnn0FKHQe+Df1U9xdUJmZAXj3hqJCAUMGuUIN5+0XivFraIk3estvDcngHv8NbgJHFARRlYr8L0HBEQ72OPjQ8Tfaj0xEXEVUHJ0Xu0czQUICwn1Zg//vRj/vjeXZJWh6S1xubmVbotzcxPMDKnkga8wJqLRNL6aAKIxApF5WSo5IlgGihVqCSFPjdJpUCnguVOB93Y5sHuA/7k8095NJjy7teu8drGNtev7pC8sgbrV2nQ5GTQp/joZwxO3kNYx/Csz/lsQlJk+MYyMz2k4Qc4LRhZQ25mJKJAigKhNNJpXOmwwiGSMJkQK1wiLqoeSbCwj4urCEbHSoYbo56YPGBNGUwLZZwkvcB5iYyGnYiAmIhEY6RA6ITMpyTO04hYqZkHKzL4ggkZja0F4ILipmCeCDsf6UCxmscclQ1VuwRF4oK0bIVDGouSKWWSUTnNUqPLjUvXWNte4af3HtBd6tLMUo4PThkM+oyGA3rDIWe9cxKdILwgH8/wFhKVcHXrElfWNmlIBUlGbit2D49YXd9gNrPkswrjNE5reqcTWtkyk1FJVXmmeYGSEqkbDHNDmizR7a7TaDTpD3o0l1eRCTgMtjIcHR/y4Yc/4b0ffp+jh/tMzyekSUbv+JhXXrvNjZu3EE2DU3mY0I1Ce097ZY32xhb9k30woarbSGVUXwzKi9Y7rPNU1gU/s2hxkShJu5EySxPEUpvRaMLx0TFXr19HpQ2GoxHTyYykkSGERLuMTDQpKoNUYIzF5AXD8QSHYDgrGM0K0jTFWU+qw6LS7S7T1Akby0s8e/yIe/fuMBz2ufXyy7TabbIsi0bWAu8tp0d7DE9H3H75OknL0pZVmD+SBFfNGJSe0jpUZZDR1ykVoZc067ZRoskw7zM9PUaYBlnWJp9MGA/7PH4w5crlS+zsbFE6TykcMxPUKa2xNBotdEeDdhgTpKb3Hn7CT/b3eeutr/Dm195FaYXKOkzyAZW1HD18wrhXMC4E/WmBmhgaEkY+IdEZTqVMTZD6Md7RThRr7ZTV1QaYimI0Q+Uan2TMnKEoc5aTlMoavFeMhxVJoiiqHE9Bq7WETxo4kdBud8F6bJ6jpWO9nbCxmjI4H6FUUJAcTid8/tGniKQDconN6zc4GZ9wdLzPS9sZ0jouv94mb25z8LM93Njy+ttXGQ4nFKJiY2WdTd9h68oyzbdy1pNVGrR5/GSX/X+zy2tvvcJ0PGVydMZXr7yJ0su8/+D73Hhjnc9++ohilNHe6fJ4ZjGjITeH56hLfZ6eHbKSrfLs+AE7icU6yalIyciwNuXK1Sv87m9/i3ZjGVdO2d7YYnDrNqef3uXPPz7je7/6N8m8Q+kZl29cp5U2+eZXbrM/8Nz57M/puH12HzzB6BydZVxbX2H58go/+OgOv/Ub3+XZwzuY4pCvfuUS3/7a2yx1lzg5G/PJo7tUDUHREHz3m7/Ek8MJD58eYYyARoPW+jLl4ABRzcjpsJErmHrWkox+c0ijlfLa1Rukbow9+xGpu40Xr2J8LSDBlyYiXxQVcAhXoa1DWAeiwgqJQ+JUDciI0MAfd1n3HXsf2gF0FpKeIIKyANR5SGpQR4jIxgAhGogkoeFjLaju/fAO6fx8nfPCgQ6gqJbJQmWtDoIXA57nMyXPHF1FuJDupJWg61MutbscHZyDsQS1wDom+MtsdaLk5ylmrWYZbIFjgHdRMAMf2B8RE0RhCYJdCo+mTr+kZw5aQmB4gI0kRonzoVtc+RrdD/Qz5Qw4gRcKpxRT62hJi5ueMD1/Qr93wvv7TT4aXCURmnf1Q253+3xl5wrXLn2Ts/GAm1sZjQRUmiGzFa6iQ5XDJXipqUzB0dFTPv/8A3Yf3OfgyS6Hp+f0h0OmsynOGiYjGAzGTPI+lQ9Aa4Im1aF61swkQmSx0V2RxWRbkIDSKGFxrsSUFudMUO/2PrB/7BSJqG2P8QiUFDSjgJsTJgi5iVANllLikwQndaTcBQBeEipESnhMPNFe1Gt+naxcABIh7xAoqS+ohcjQv7XAhPK+BhM8xtYVpXiv2IsqXUiqnq9gyehnKoRH2KAPLnwFOsR6eIGSCil0HcrOR5oSsfDhIZ37sAVwxFaGsizmfeiLSdoF02yReRZa0C5AbXkBHETWlFRq7pnsfBDtCgKZ4V4QKoj3We9iQheSZ4sFRChMiBifIdASROUweY4fjlGibgWS6CzFqwaVVpTOI3UKBFBSAjNXYhwkUiCFRkbJf1MWgZHgw3ronMM7g5EFzktsnIkyX2HFlEJ7ZCrIrKdrVei1jXOSsw4lKyDDG7C+RV8sI1WKrEocFcYFmqR1dk4BDdchTgWLll4xDnbz8/9vn23+MpW2/wn4b4F/uvDYPwL+H+/9fyOE+Efx//818NvAq/Hnl4D/Lv7+Czf/HB8yNC7WFQ7JYuNlVHqsB5UNU5Ug3vd1iVYy39+LQiHPfWZ8mRRyXpasEcQ6mAtNmvVNpnhxq8vPF4tfXfoOF8prFTnyDoPHCsXZcMxpb4jzCWUlaTc/4bXbN1jfaFOWBd4FkQvhaqNRE8uuYfpQQgb1LZir74Qyb+TJyqCAVTnIOkt0Opuc531Gp31O9vf4g4OPWbEEKuTWMurq17m5c4mXLqUsiyFvbUObimcne+wdDGg2V3jn2+9y6cYOx/fuMpj1eXy2T5ZPSbIpToRmTAV46fHKIGRwh69Hau0Af9FrEdGn+aJeUxZDcjYPBhaulZREqt3F4hhHDEorkjSlNHLex1b7btXTrly4/vGN1JNXMH++mMSE88GIWkbKn78YpzL+sIAseSEpK0N3aZm3336HpCk5KRXPjgd89tkeD+/eZTQcIbRiXORYm7C5cZlh33B2OqEoLFkq6fX6PN3dZXtzA5lpSDX9QZ/+JEcimUxylle2uL65hTGG8WTKaDwjz2dMJ1OMqSgLj7OSleUNVkXKYDyBJGVpuY2QFmNK9vef8of/4n/nvR9+n97pMaLwaKcRUnFyesrP3v8565vb/Oqv/hrf/Y3f5Nr1GzgbQgoaLbqmtyq+AAAgAElEQVRrm0wnI8pRhfAWZ+wcaRMiei3FiT0ocMZrKARJpCd6IC0qjDHk+QwnJKsrXQbDIePxGKRibXmVfKtg//CA4WCM845TX5Fowfr6Ol5KRuMpS0tdljsdyqIg89BuNehkKWKpTXepzenpKc+ePOX4+Jiz01Nu377N9s42nXaHpN1CecF0OOOTjz5idaeFE6FB2ONjs3o4TiE83towE1gH0pFlCXq1RdMnHB/MGOwNGY/GNJMUW1WUuWd/b5/ZdMLm5iZppHEKIWk2Q9XPOEPiHdZ6tE7YWFtm9+lj7t69yycffcw773yVqjCMR1OStMHaxhrf+BvfobN2idwm/OEf/hGfffjzANooQSmgBHJnMUIjkoxp5Tg5H1AkKUnWZjYNUvKlhpX1Da6vrzMrg7rncDQkNyWpF1TWMhzNSLImly5dQ8qE/uCcVpZiPAymOXk1IZ/m7Gxv0TvvkVehp29W5kgxxKae09kx1hacnzia73ydhw/6HPU825dvI8oRnbWUp/1TZknKyekQ7p7QcvDs4S72kufg6ce0m13e+e63+emDO7z3Lx/hBxP+7ndvcnYw4HLjGivtLpPqgJOxJDmVVEOL6R+x/lLCZCIQbsrd40fsTnI2lebZqKKn2tzeWOXXf+U3sIXhePSULU6xZcYP/vUP+fTO53z717/L+eSEf/a//mP+9vf+Qy5v3GZps8l//A/+AT/413/A+fEJx6MJ088+o7Rt1jdvcDA8oSos508/YVzmfPbxZyTC0xuM+HCSo1xGt9PlsztPeXQ0xVZL4Fv0zkcMzgcIIUkaHWbGsrqyxuH4DBLodLvsn5zhJgVKCXJbkDVbfPLRp1zbavHGu5cQIoBRc//JL6FOLa6JzyVu3sem+HrmDCFnZaoQevoL0OzF5MYjQQY6+3OVvXq99c8findhYheAEm5eScA5nK3VIAEl0Dqpj/oFNegvqRjOH6uBWI+L1SxkpLhrEI2MpNNh62oLqZOYNKn4KX+ZLfbpBIiPOuYS1NwQEV9TVy09+CiIgMcLG79vMz4OTlgSaunvFCeDhyPx6IgMHikTDJ5c9oAW+CbKxFSmPMRN+niT8cG9EZ9N2pycPuOrjQfsdGGzfZu3l7ZQKuMra7CV9fDtS5SrW3i/QVMatPTBvseBsTN6Zz329/a4d/8ed+9+zvHxAaenR9gix1YFVWUoyorJNCcvKorKYCwIpUm1DglKTW2pe3jmRafgmeatx5QGaw34Ek8d1wTmEjEp8d6jlQp6ASJYxVgfhEe8CKbYgUbnYiwhEar26NMRSH4+YQoZWX0BIwtnHmPEimkdI7Bw78iF2OUFmqL/krFZs78We9he/LtWPYfw3cvSQBkrRnO6YzRjFgqvZOjhyrLY8hGOte43X6yshd8LyuRfUm2jPmfx8drXdtGS6+JHIkQA5utzUMdznjrhuzgnNsayIgqN4OvEJYBowoU11hmDN2Voi9Aa5ZpIAYnwpELjfYXHBS0MrSl9oFEaFwATJUTQrNChElYVebSuCXekcYGjZX1ouBHeI1W4nkoKUqXI4vgK1zL2xNUFHhm94XQaEmPv8c6E4/Y2zGnUhRcR27Vqivli/+BiX+S/Iz3Se/99IcTNFx7+O8Cvx7//Z+D/JSRtfwf4pz5c9feEECtCiEve+4O/6HPip1HfMbUIROCFRhrg/DXhVfWNWydsSoSsXeCDAz0X6EG9hQEVJtmaVqGlnpe3g0R43VgZst4L+twFJWQRmVg4V/Mfay02lkSlELFCJqiswyExFmZ5hUdQlhJXTRkOJqyutlEyxfvgTxFKqHEfMg4qUfd3BcEHU1VYY9FpHPxeBqd5NM6nlMKhCPK3LSFpZYp+YxVZWVyZc3o8JD/8Mbu+4v1kxupyihIpSytXmJkG49KSyQG32iXf/fZt9K9+nUeHT/nk8T32zo748x/9nMHxgGkOzorgNB8DBhGlUiEe2uI5i/ewnF/zxR8PEdnxcaL0zuOVR6pQWq8FVj3Me9GUiCqQ3seEjXkisUjRqT9jcUINybuI9hAXDbtzDzgXgTbh5pVW4v6lksFTS2t6vXOePH7Cy2+8xBtvv077dECzFapGu0+f8Wx/D6RiY2Md4RxnZ8fk1YgkS6mMoT8a013qsrIaUGulNUKlWONIsoytnVWyZpcky8LkpDRLnS7eF6RJgrceZ2bgNEnSAJ2Rtjqsb2/ipcWYCY+ePOQP/vnv86d/8sfkkympDIuej5VRJaGcFTx5+JDjw2N2n+3y9/7z/4KXX34FR0DSOivrdEcjzmZTnAllf+E9PlZKA4gS5e9dLd37PB0Doo+iC49VZY63FSvdJZQUDIZjJHD96lWajSZPnz6j3x+QT3L2nuwxOB+yub1NlrYYng9weUm7kdFcFkEiWoBONWmqaDYbZGnC7rNdPvnkYw4P9vn617/OSzdusrzcJZWKxmqX8eiM0WBAlSharSZa1kCNR+kEpMF6F3zQRPRhsSKgeEpyeecSs2nC2cmPWG0uRXWoAAqdn/diU/M2aZpGjzgRqC9SBfnfCDa88vLL0ZxU8PTJUwTBx2Y8mlAWJVeuXeLWm68wLWD3/jN6w1MKM8MpR2ErplVB4R2jsqIkYeYS2mvb3HzlLU7GQwbVENl1+NKQVzlapxgHV65cYVbl5EUOeJIkBWEoihkq+tplWcrSSpc0VaxudBmP+jw7OCFLNaWYgGxSCU+61KB/dk5RjuD4EVcvLeOsZzwd8/R0yuePRthJymy0yzvfuMJATbl3fMBkkjA6G3Fwp8cvf/Vt3n71XdqZ4HRvj/OiR54YesMhn39+j7XmOh/fPybzbXqTlLyRMGadspXgrGV2NGI6OKV3cw0/brCWNfn2N3+Z9z99wMd//ohez+CXl7h88zq/9zt/n/c++AH/y//4f9DMgjfep58849GTcz59MgA94+TwIXJ8zn/0W/8pj09G6MyTtRUrKynrG8s8vncHWMZqRzKbMLUlSUOzvblDNXNUzjOZCBKh+eTOHucnH6KTNoVLKWcFhZYcHVVM+iOKWUlhNY2lDqYoGU4qGo0GXa0ZmDwE7baiqByjcc7j6R7FKOXyvTavfs0hswhY1YnSl1TbFntz5kJQOpDoTAQEfWxZqHck5vN3BOTmk6ifP7aYU4n5XFuLN8SmFMRFccyDjxWAGs2TMqlLeQRfpIs5/CIhrD2UXkziFgLxeGzSh/YCJ0LAlQs4nU1wWvHSm7dJ2sth/rIC+UV89ku2+rvHz/J1pPI8TdFTg5cmPpeBJ9bUBGBDFREJ6Fg38HH9lAgf/B6NhwqJFhkCi3QW6StSxiBy+kPJ49MZT46GTPY/Z2vyhB1pmZoNZslbbFx6m1ev3WSjMURnG7h0E52ltNIWUhcgOogqgJDOWPqzgvPeOYf7ezy8f5+7dz7n3p07FNMZzjh0jJPycsY0n1GWBWVlMNZH8DghizQ/RAhWHUFB1HmLt7UNkos+ViGGcDYkFtbMeI5bGmMw60LSJ5WKpq/B40rY2F5BnfyEipmIVSkVBZpEPJb5ZfuF5Y2LeO8Lz8yBZDlPgp6PCesj8fP9fNk+FoU/iMf9vHpjrabuEaIWjTFh7AVZ6VDt0oq0mVFTZZ1zzGYzhBBoGdaX+XEt9E29mIQtHocQIlbKmGtGBOpnzWKr9xfZaPPvH8Z+SMzk/B72PppaCyJzinky7ryPqnShvOddDfJYcMFvVOBRzkFaoZMUkWV4neCRIf5IG3gRLIGcC+0N1nkSpVhqdsF2mEwmzGbTcN69RaIQMkED2jmcVKRKkWhBM4UsCQnZQqQYq2Uh1k2SC60NJVUAD6RE1GwyGURpwjgL/exSakBincNaM9etsPaiX/EXbf9/e9q2FxKxQ2A7/n0FeLbwut342BeSNiHEPwT+IUCSpPOMvK561GpSoSLjYqUjKk3VCP580EQcqwZJvCeo0cSKl7/wXLPWIuQF8iaiH4evmwBZRByIyEG9UIj5cby4zdGL+H8X6QoIsHXSJiQORWkcRWWYzcpgYVBpSuX5/5h7s2fJruy877f3PkPO9+adpxqAKszdALrR6m6STVKiSClEW5TDVMgh+8HhcPjJ/h/86r9BL35xhBwKWwzaQVE21SQbbHQ30QAaDaCAQs1Vt+485Zxn2IMf9j6Zt4podkvygzKAuHXznsw8ec4e1vq+tb7v5OicNJU0GpI4wru1A0L57N8nFD5p8x4gFXPljaeVjIkUSOnLLNAKEdVDo22GdHBtbY2Xrmzx44tjxicnbK6v07AOPRjjygynI+ywoB9LzsoLusvrrGwsku8/5vZ7f0GtGLL48tdIVzp84+tf4231Fsoobn1wi2JY+p4y6SVaXWj+9InO7MbP7nMFagk8slHR7FKaqpBgZvjov19YhEN5hHVyJqtqrMUaPDoXREFkSBjmSdvstjPbUC8lb5dN1/14DAtYFdAE6X8bTryqORfSl6/aIN1aliXnF+c0D2vEO9t01xfRegdrHO1Olys3XsJJODs74mDvsS8dMWOSJGFr5yqNMPlLCyvdFUphKYZjms0O9XqddmuBRnMBoSK08d27URyTJjUajRbNumWx1SVNmmxcfZmlrWvIWKISKM2Ep3sP+X//7R/z3rvfp3d0QiOuY6xDRilx4m0uyqIkUjG1OMYWBR/8+McIbfkv//CfcvPll3FSEKctFlc2GI/6jHoFzhYoKYJ/okNJiRICrbVnIpk32vpafYW2bsY6KSGopzFaG4a9CxrNFp3tDgcHR2R5wVJnge7ri5z3ehwe7dPv9zg/6TMcTOn3Riy02+jFDkUtQemSTj1lodVECM/CprWUtbVVarWU87Nzzk5O+MGf/zlPX3iRt99+m253mSRN6XTaFBbqzRqvvvwKn3x4i/7g3BueSgnSb2JaFwitEU4jnAUZ+7XFOWpJggKyyZQnTx6TLbVZX1ul2WwwGg05OLAURcHq6ipSSYyxJPhr5YRXuHzp5k1azTbTLOf99z/kJz9+n+svvECns4iSkrLM6J3scdLPeHD/PlpPSGIHaJy0lNZikJRWgIrJLMikxbWrN6n3znmaP2X34imRVDQShc6m1NdWOTw6YDIeU6/XfGKpIobjMfWGZLG7RGehg7NgSChNRtrueLPdwYTT/oDM5ayurVKWOWcXPeoLLWQxobtYY6mRkE9KTBLz0WdfMullNPM62ytddh/e5f7pMdo0GZ7l3Nh5lbv3HvH/vPcBv/9b77C9VOPlN1/i4zv3+OSzT9leXWPcO6V0io+e3GKhvsR0sMuiaeFcxjgb4oSh266RFfDXX/ycVKS82O3yeneR39q5wujWlOOzY3KTM6WgEDnbO1f44c9/yFE2Zmm1xSiz1NtLLHc3OTy6zfi8z60P/po0P8O11zntDXj9xlV+452XOHrwAY+KCcY4egdjlhKFE5at7Q2s9MRsd2WV0kCBJu4sspxEXL92jd44Y+l8yOHhEUdHQ5pxwo0XX+Tu03MSKTg7OSWpd9m6do1YT+mPcoajjGYkMU4yyQoatYTxOOPg4AQnQi9K8Iv6RazRV1WhEIAUL2hFFacQRYpISGZboHg2XXq+Z+4yKO37QILQRygKFBVTYL0djrXGI/KyUoqsBE3mlRrVxi/c5Q+pPuirHtX67XBo7/MoYkrnKARMKOl0mqxuryMihcER/fv4I1z6aL+7/c2EzYZrW4UeRvhjpQXpQmAnytn3Ei7CihTPlRsPProIgcXYAikM1sDZUDOalJw8vMvg8Db758d8duo4iF5ia2WL9e01lqKnXF/d4GbnBmZ5lStNSMUEYU3YDC0uTihJUEREztHb3eenP/+In96+zZ27tzk/OyabDEiURDpHLL3nVpkXZFnGOM8psBCubRRXbIKYXR9rDNY5Mu2FzazxMunOmsC+enVhZytWMrz0UiJUefT6fF/ORMGkJAT5LiQHgZVz0ntuSkUUxURR4tm5cDOkkGihvzKmg/mcqMoFBfNkxj0nt//MezgXkvDqZOep/TPQwvOve+555+Ykg49P/XvOmLZAJggpZvPsMnGQZRllWQZaeS4koiIPiEdR5P1rQyJ7uQ9u9t2q7y4q1W87A2Rc0JGYj/3LZEbFhlfVVVVFE9hATkvrq+UqxUUXmDdVWmTpuWUlvYcuTkCpcXqKy0tEkiBMiUgTXBThiBGyhpXBssL6lhoVzkxrgxSSWqNJlMSU0xEY7ceH8uNeWt83qKXwPrHSBoZYBfIo4EcOnLMIp3yHqHOzGS/wlXChEBPn5uIulZCdkD551VpTliVaV2CWgF8wFqvHf7QQiXPOiV9mLPDVr/sXwL8AaDZbrmKxqsbFyoTvcsCtAm1c/V7dUOych8N6kz0rRehPmlPAlwdkhfgb42Vaq4FflXLNnhO+Btda6wMy88x3mF3g6ti5F4avPbaeapupGAoZUZQWayVCRDhraNSbbG9cZWOrS7MuiJRGCUvpCn8uAYtD+kRNCj9pIil8027iFyLhv35AWjyyJPD1w1IZWvU6L65u8/d+6zu4s3M+/PGPqcUpN154hekw59Ht26hiTKcGOSMmo3M2FjrUllo8vajx4e1T/urTP0W23qW+kLL1wgY717fpDYcoZxDkOKGDt1YJVuJcNDM4pfI8m/Gm7pn7G6Z5WOPmi6WcobYOrK9tN2GC+wk0L8Mx2ps1CtwM3QhvFO5t9UnPMW2XWCBX3VuqRfPSucxXK4+qKDVDf6skrtFosLiwyNnZOVGtCR3FNHNEUYfNrTXipE5hC3Z2+rz2yuu8+/1/y+nRId21bWIZc7Z/gCs1ca1B0skpnGE4ybH4sTMeF6ysR7QXlrEIr5wWAAlTGt/KLlMaaYN6ow1RCrHDSc2gd873/92f8t673ycb9EgAM808K2s1JAlx7MVYpJTBQ0YirOKjn75PPYn5p//sn9PZ3sFiqTc6dFfWySYjpkWOEL6XrWqs9cM+KDHiQhlkBsKSpglRktKoNymKknq9Tl5MqKUxRWkQAhppwub6GrtP95lmOc1mi621NTrNJmfn5xydHPufe8fkCxmxlAidMlGS6TSn3agjlCBKYr/mFwXNZpMkillotjjY2+fxo0ecHh/z6iuv8/Jrb9NqJahYEKcpG+sbLC4s0HOOLMvIi4IoAaIgd2wNGI1AEgVAQeHLdoRzrHS7XNnZoX92yKNHD9jY2KDdbtPva0ajERcXF2xubtJutynyEiEFcRKjgE6riV1bxTrB4f4hF+c99vf2MdpQr9f54Cc/4s6920T1Bca5wE779E/2KCcDakogqtJuhPdrTFIGoxFOWa6tb7KY1DnZ2yXLJmyvLlPmJdlkyGDQJ8umIIS/L40GtVodGRkvN7+xznAwpOyNqTdScJokjugurzIaFxRacnYxxEm/EU8mQ1r1GsZYjs77bK0tMzw5pewPWUybdBopb3/zBm+8tc3EHlO6Fj/64C637z9mf3BGs6Z4WvRBWtx0xGg0JC1r9M52+cf/xe/zyaNH/Pijz6jlj/nt16+y2oi5ON0na9bI0ybHw1NGkx7XlpZY21lnpVbDxJKzYsTSUp0XWERtLbPciJD2goPeLWRnxEp3CT2Br914kSdPPmN6/hPaMqYZpQwnmi8entErjpjmEx58+SWd5iK983OMcTQaCW9/820GxweQjUhriihNmeQFd+7dwwrL1asbuERQljkH5/dxwmHlmPF0H1k22FleorXY5lqyQDnu83R/n1Z3nc7yJqa3x+rmBs5dkFhDUk5xzqGd72NyQgXQz6+fxhii6Ku3ey9c9eye5rShmPkpzcv+aklCXCU0wmcYVbhyORh9NiLwwKjAYE3BdDxByYi00fFbdhHsQYRERrFfq2VI2JybM3KXk8pnHoG5my3t4tLzzx3pSg/MIpnkGYMx5Nr3HsZJEB8xzILHX/aotpMZryccM3nNOSQ5S+VECLm0gMiFJNYKnIgxIkZSBnZNofH99lHo2nKAySf0Du8yOrzHwf6QP/mkx63zOnUKXkgPeXNH8o+/+Tr9+ht0VrZ4ba3JSu0FqFvipMUIhbFQygaxKJCmBFOgSdEyQVqBHk15/4c/5X/71/8H9w/3SWsxrXZCmiRgC4wpmYwz8kxjjcCUvhdMxnGIOS04EeTkSzDBvNl425YSb2LsjPes9X1GAinme6kOzJV63kOXKtYKIUJI2mYxharMlisw1/unSRWh4jj4dSkigReycBJdlvzix5wxnpEFISa4PMRdqOypxM2qu+2Coews7hDM4trqdbOE9Dm2r6pOuUwiVD+ddUjpQjwrcaGdREoZrqXAVUmdg1L7HnYqFqwigS55t1VtPlEUzf4XQqDiKBAmEqXmCpOXz7n6XZtKQdH5pBqfdDrcrDLOWeHB9uqehaTNOItQEuEskbZE1hJL5f3z8IlQJbxijMNkpU/2dIls1IjiCKMLXORZwTgKpZwOjNZkeYk1fu9cWFgkXmxTTHtM85xMC0yeU5VaVrNWWYI4igpFtgHDryoP5uHgzOJIWhkqvnyMYMKaGsl57mG0pgjzwhurM2M/f1k29R+atB1VZY9CiE3gODy/B1y5dNxOeO5vffhAOWSa4nKyhkfnrL/x1Re2xoCL/Wbj1UE80m399iED+yFwM3akoltVpLxBsvR1rz5D9lN+lhhcQguknAfsAhnqzy9NNvELJFjD5PCiJSpsQv77WedLjq5sb7OwsMJyd4u11W26yw2y6TmHR4+wJqeWpIGt8uqVzgZmMCCOSlaNobMtY5aEWGMRTtBIm9gIpqMDr+KT5yS1lHfeeQdXaj764Y9Ij07YiGI204Slmy9w7a3XGOQDHnz0EWUxJnOKVDtWOg2GEWhhKIszDh5d8PDOx5z2hgxPe9hSIm0daZQPZkVQyRGeShZCBFNwC8Hjwxjr/cSCIWUwdpstDD5JDSWUzhEp6b14hC8VrYwJER41EkqirQnMnb/3FSI1A2aFV3yKYzULZGaLsKgWPDFvCK1Qo3mK9+yCbe1MYakyrHQ4VldXeNg/YzIYYqaOlZVVbCnoDYY45dmUes2xvbOFxDAuCg6Ojxhf9FlstRhNc8qDI3KrKayhLEfEMviOxHXW17dJ0zpSRkHByoMEsYqQKNKk7hO6SGEosbbkwYO73Ln9OcV0QiwFSeprvW3h0JSQZwhRn/mrRSHhcmEx/tkHP2VlZZXf+YM/pLm0RKlzut0VBhenDIc9MH5sS22DIXxAlKW/fkWRc/vLW4zHY65cvcpid5lGswFAnmeMxiOPYElFNs0YDoZY5+h22qyt1Ohd9MjzjIVWmzSps9Rd4fTslIveBQvtNlvrW6SRpJ565TDPilkQiigSSJkjpaVej6mnDRq1Jv1ej8ODA3728c85OB3xzjdfp7tSR5o6uAQRrmlZGo+4Cc9uijDmrCv9uCsLhPLrkwLqSUqjVuM73/oW49E5n3zyMXt7eywuLtLtdonjmF6vh9aaxcVFlpZWqNUThPRzW0poNerU6g1eeeVlQDAaT3DOMZ1MONs/Zn/3IVp4AZw8yykGfVoSXDZF6BKrDTrPiWyJQGB0xpN7DxDCYtFc2epitOYbb32Ts/Met+89IE4iVKTIsoIoihmPJz4JQHByekZabxApRadVQzDF2YzhcIJ0EZ12C2N80C8igy0LbFEwEI6zsylXa01Wrq2TXutw860GX7vxIu3RmK12g/FwwsOnx1zbSHhjfY3Th08xyvLKtau8vr7O8cFDWlHM1doKK6s3+OzOfaZPh4wOLNl0iY0XbyIXrvLRrVtcjLcY5AYzUgyPj2jkkps3rrC2tUa9bpGdGru7Pb79n7/JRm/CXq8gGmrK/gPOdj8nG/UZTHP2Hl7wtddv4GxJOR5y/fobDPsFh8cXHPc1utbExQnZYEArGuNsjTJdIlMJT47OSaKU9atL5EKDKDm5OCSt19BEnPV6TMaOiCmHhxfsXNlmeXkVKZo8uH1Alk353V//No/PCj7+6/dYWlxE1xqkSZ1SSrSe4ISl011EGkuelbhpjnUOrb1/kXI+AVGSYBD7rDLZ84nQvLdcIlVCWm/4XUVYhJPUUol85njwYhuSCsOcsRLOB9bGBElsmzMeevGltFWvFs8Q7F9K+v62XOmr/mZFBWvOWZjqN+cV/aaTCaenh9TEKZ1Om7GOufXoESd9gTYTnE4R1vhAWAqE/VXVI8PXYL5D+KIw6ZM/xAxZdzhKEfqkDCjrjautjChE7Fk3bUCUlMrLzZOdUY4vOD7O+LPPz/jkyTnjo3tsjL7kxbUF3lh7jVa3i1zc4htXFnhnu01nuQXtNmUSE2GxbgnhJJGr0QAiAVMkViTEWF/ZEtotsI7xoEc/1ySNRSJziNKOcjSlNBNKPfXCREKAjHBCop3DaF9m7YFD7ff/SpTK6CD84PuYtJJYCSKU0wmCPH5lXizA4EvNC23wvUnSr6tKhWqXCiYQs7gKBRj3zD4eGpS87Yz0Hn5SSZTwXnTWeMn3Z8qDA2llq3hrNtbdM2N/xp5QJS1Val4lNmCtjzV8LlqRBd6P+Jnx8xzj9gxbNSMGvICbP0XPUDspkCEZqMb/bE7PcwpUSOJsiFN9DlmRFs+W481ir4ptCzZaVUIXx/EzNgKzvwWgsko2cRYnvMWOfQYgl+G2+XkrQ5JXnZO1zkuLSAmixKoYZxXaWv98db+lxDhHoS3kJVJmpEmDSPh+NL8GBSVHazBaUzGWWpcsdOqsLW1xMRhycjFCALEtsS4QBc4Dy0pF/rqjZuI0QrhZm6qKwrwPYJNU0guvGYs2JVIK4jieVfrNyCJtg8hL6IUM65X4JSz/f2jS9n8B/y3wv4Sff3zp+f9JCPG/4wVI+r9qP9sMTQCPnlg7K62TjhnNqgL6ZrT2KVGFyjkLNgSK0mvJVCba4vJkmPl9BQbAVT04z57H5Ye186SsUqB8vqftMnJSMTLOOd9MbeUMMLTOeTrUWd7eZEcAACAASURBVKSMcKZElznj0RAhpwiRESkZULYg6hBuprcN8z897AbO+MTFGoO1Yo7yCYeTkMYRCMfICYppxqPhY/7qgw954x9t8M2vf407H33M3S8+R7SX+O1f+w4777yGu7bE4/0jDu49JR2XTKOIm1dSdm5s8un5U/YeHVLmEbVUgLY0VIR0OqBmvnxTOJ+4uEC3+EA8dH9XTP6MVvcohZABNWLeZ3FZDMwv0QIlHBIvdOGv07MHOSzaeeWeOWldvUPFsnnbhBkqxLyP0ViL03gcqEq2Q9ZnrfWIeKjDVv7mY62ZlUhGUcTS0hIvv3aDdj7k3Y8+5GT/lLx5itWCwXiCEc4H50Jzdv6Ug8NHFDplMtYBxPB9nMZCUQa21Xk5WVWh6FaEhmO/gIvgc4QQaG2DJHaEUAZrSybjAXu7uxR5TrvRZFr6zVZFwjfAOk2RZwCkSRqUW53ffJxHoMbjER9+8D6bL77CN7773VmpQKPVJk5rOO0X6Krp2bOpIITDGoMxZai1n/DZp5/SaHW4ceMlut1lz1KpiOl0Ql5McA6vRGg0zkGcRKyvr9AfDOkPJkRxjU6a0mq12drcoixzmrUm7XaDVrNGq9XGOq9oKbX320vSOkrG/pYaD6jUG02arQ79/pCj0xN+8O4PePW1q9xQb5CJFtZ6hSfnFw2c9Kxx2N8Bjyxba5DGr1GJ8kIrkZC0W02uXlml02ny859/woMHD5hOp6yvr6OUYjAYMhgMGY3GLC93WV1d9vLNDpI4wpqSqzubLHSaHB4d8/jxE4yeUo9alDpHk2BcRK4UubDEGGSZoacTb30hHCvdBiWGIhuSDfqU+ZhWt8HLr75AJBVX1hfZf/qQPB9RloY4irGJN+mWTjIYjnFCoE2PvChIk5i1pTo1WTAYTjk+7SNkDaFSIhWxtLxCp11jf/8JvV5GPswpc8H+aMoP3/+E1956GTkxPLr7gDe2l3hy/IiziymDseSokRHF8M03X2LjH36P9ZUlsotTmvkqX3/zm/Rzybs/+ZSzSc6/+pN/x6BsMC0T+uqEnx2cEQnHcX9Ee32b0+MRrqhhrSXtrjEoM9aXFxgd9Vhf3qKVOobZEfcPL4hOc/7qx5YoaVLPGh6Aii74s3d/TLfbRReKw/MvGY2HFMay2Jb8V//1f0NtYYM/+aM/orf3BcPBlChtYkROlk94+bU3eOH6OrEe8fDLW0yKIVd3tjg+7ZNlBpNblIXxULGysoCxdaRs8tt/73XaCTRbNYYPdsmzCVIIvv2Nb/Do6T7DQR+H5fr1Ld56/WvsXezy+OFj3GDKUhM2t7aDPH9IJZxDqL89GJjtY84Hu1XZj9/3fPLhgxlma7gIaz2uUgf2f6j6aob9Ab3BObVaQpxIukvLfn+M0rCvqXnw/dUn9BVPzitcLi3687cIFZ3OWs5Ozrn95V3ee+89Pvv5B/zBP/o6v/Hrv8bRRca9219wfAGvfv1VVhdbyNCT61sifqWGttmOUlWO+N40DxY7qfye7cJ+RcEEBSKi5QyiyABBnkQMgUU7gek+4+GUD04THg4yooNPeCG7j8hzzvYleXKDja99j2+ufY/rtSnry6ukay9gFlbpxJKGtWAMztvWYZykpEEkIlzp0MWI/mTMmTb0egOKkxO6NcHytcD2T3Lu3b3L8WhIZ3WNzt4TBoNzTGSZliMMGiMcpTUUuvDAtxXYwviEyflyt5kKtPUqJi7Ec4ggImLdM7tylVgV2gTmTPreQ+n7kbw9RNi7qx5MybzaSXg2y7pQamftjACAIPRAkMQXvs2CYHAtpY8h50RBSFwqdozLCdVzvZqXfpFCXCrEcvP4RlwGjAEnZnHi5feZAf6X+9mqAV3NrSoKEnImZ69m4Og8cfN9+WJeHSQkMoCNs3428TeTxecZNP+LTxTLUqNUSZ4XVGbgSZJSq6Wz76Iug9fWhljAza7p7IN9dunZMyQyikLbiR8XCZKosOhCU1gHUlGr1UjTGknkvfysFGgJJpLYWIGSiCQmSWNfLeQCauQcOvbq5KX2ZtdlnnF2MqZsRLQWF+l01xienZKdH2ONQKQ1hJj60t2Zfcil+4UHZKrbYq0HycpSk5c5hc596a/TRJHCBFJC6xIQs3JKhG8LkkJ40sm6r17yLj1+Fcn/f4kXHVkRQjwF/md8svavhBD/PfAY+Gfh8H+Dl/u/h5f8/+9+2fuH877EdFyWhvDsASFkklL4bL6aBdXm4XydqKv62MR8Ejz//ass93IdcoUBXHaLr+hgUQXss8k/n5ZV2YirfHDCYkT1XtWCYkNiNUNDHNPxkN55j/3HTxCiQbuzwrXrG2xuLaKURyaFjBBOhbwh1GZbibTCJxyW4B/hmzArFSsfaAsKo4mMpl1vcCZTojhGxJKffP4FFypmudsllhArC1FKe2WJxaWUp4MnHOzdx2UjanKBkZbUmku8/OZbXFv7Nd77/k85fPKEb739Gk+ePODP/+ovMU5jKfCKZQohDQKJJZrXmjPb06kadKWr7pWc9zHMrvj8uovwu/DceGjAdnOrCBcCaelZ1NJ5durZ3fyZUXdpEXn2mKpE0kvX+sR7tvKGYKQqJ4jjyNdTB3bVm2Iq9vb22D95wjk59+7fo3/a5/OLMWnaoNZsU1iNjCMWWi0mo4zJKMNKiKTCRjLMBV/WEblKCSm0qIcNRTqBQqKEAusTNWt9Ba9DEKUpSS2mMFPAMOydc7h/QIRiobWI0oJxOUaXxjcYW0muNdZ6KWWVyuD9YpGRQEiHtIK9/af8xff/jNWNdbZ3NrCupFavU280GA1yz2rPLm2FvHkBgCj2CW2aJjx5ssvx8SHHx8e024vEcUK702ZjY50krVMUxczcdDodMxoNaLVaLCwsktTqDEa5Z/WEV3eq1Wpo7ZhOSxBeaa5RT1AyJGyRRMjYm9YLr4TnZEwUS7qrddqLyyyurLP7+A4ff/wxe6enLO28xGQ8nq0ZKvKy17ZiJISblQNhHbosSGLvvzKzbHee4V/fWOc7tW+zsLDAvXv3ePjwIUtLK6ytrRHHMefnF4xHQ/LpmPX1dZrNBqUuabSanhVOOqSpopYqRsMByhTgQMUNhKoxHo/on59giylJmlBPI4R0pLWI69c26GdTytISC8FoOKS70kTbjNWlVZ48usvJySEn56eUNiJOG0gpmEwyqpCgKApGkzGjyZgkiVjrXKHdqdM77xMJxzifsLjURIiIs9NT0miV61de4ChOOTw6J881E6V4dNbHfvGYa1s7bL72KncOjzk920XomOmwwUUyJkoyNrt1BsUx5viUtfoC9bTBx7fuc5AZ7pyfsm8nmMQyHoyxozGTss+112+ysLHIUbbP3tGXNKINlpcXKDPB+/fusv1ig+HhPm9u3eCFqys8/vIOu4cHTHKBzR3v/fwTvvn1b/Dmyzfo5QWilrN/ekxvULK5ucUk36O+1KJVX2R14zqRUrQT+MN/8vvcfV/wxa2HfPngMbW6ZakGB4++YHtNsdZtcdRqsL69TVpvkCQZBktNRfSOT8inCYcHOSqNUAlYfUA2Oef9W7foDzPSuEUUpxw8us/j+/cpsh4vvnSV7c0uUSJZ3VxhOp1wPN6l3qhx9fo10iTBGIeMhAc0ZcWi/QoPqSi1pSz1M4j9fJEMS6ZzYIuwVsc4FEZDWZQkaQLOUK/VWFxaIIqkR6hnlSHi2Yh2vjTPPsL/+1mfMz/fQrGSqILY2XIDQiCcYDya8PDhQ57uPmapu8Af/JM/4Hu/fo3V1VVUM8MlKdO8zitvvEK7lRCrBC+lP1/nf2Ey+dzpVvuZnV0YX4ao/BIBNkfKKc41Q7HjBGdOKCcxT4oFvuj1qe3/kPXBpxjR4q96NziMV9i0Kd0SrkUFv/fmDeTrvwlr26w0YlakpSa8rUoel16yXjmckpRWeesQbSizkmH/lP39ffafPubp4SMeXgw4ORugz/vUXcbyVot2M0FMSk73x4xki1q9Tq0RcdErmIwzsnLq91XjPWRLa0KgLRHG9+dVCbQKiFbVx3PpVs139IoFqZIiIZBxhHUSbS1Ly8u0Wwknh4c47Xc1q+ciGcBMHboaMBXg6xMFz3xVbREOMQOGPT7rgoLlnFVyQZegYpueNzoWs4HJnFEO4LoMPfcuJHt+LFbqyX7cz0o73WUGbV5uWDEx8w8Us7E1H4pVnxkYa9DWS/uLcO0rTzYbBNOq+NNVPwPoYS99zvMJZPXTAyyVcX1IDEXV0hK+q/Ogb2EK5KVy0Eo7ValolqT5s1ezf6uQuAkBVvpSQim8LYi0GiMlMlLE9ZRao06S1n0lnVJEceyFcKJoxrpFiWcDnXWUxrfHqChGCoUpvT8bFqSTWF3SOxswHE/orKyzurJCtNDg4mJIv4SyzBBCEMUxSH8NqivmqnXMgtGOUpcURUFRlGjnK71iFeOcREVBJEWCqkzTS+utqpyPJVU1brWe5RC/6PGrqEf+81/wp7//Fcc64H/8Ze/51Z9TCffDbEcQl1CNMEGM1jhjvQ9HOMYKF6RgPfLnEYb5Qv88C3YZzagatVUUzWr7q8SuStwuv4cNE7J6HjdPMp9h3wL6UZFifkFx4CyNWkI9XiBVgiIrGY011uaUekqWRahI43uyfP+aN3v0bhShZ9kHUuGD1Sy5BG/uYXBCE9dj6tayGMUIkdJst3jx+gs8PD/l1ofvU48S1rY3WX/rZZ7cfsyPPvkRfXHAcW+fTx88JDqBKxsNUDEjZ8iNZXl5mZfeepHT/i5n42Nq7YhGp45TEiMjBCnSRQGxtDjM3BA9XHMrQnoZNm8ZJkOVLCFU8OXw11eGxa9K94R1CK+R6lWfgrmlscFaVDoKW1LY8itTtstNtvJyouhcWNB9sqJU5D3c/uZgnQENEJqA8XK4xlrG4zF379xl7/gJWVRjWkLkIuqug5k6dg8PGWUZhbG0mx0GZyU6axPVAemRQlwozXVi1qfopA8LEDLI01aJW2DbHLjgkSKk7w+R0iEpsLbk7OSQ/vkFkYgQcQOaUIw1ZTYBFRLRUuCMRZcFOo6JlB/FSiqMM16tC8f9u19y5/PPWFlqk9Z8qUSSpmGOeAZLikoDzYYyDI0QwisyRoqNzZLGcMTZ2QUXF+doban1mhgH6+trLK+sYHTJ2dkZvmvC0OufYW3J4vImi8uLjEd+YV1aWmY0Hnq7A23QoynaGFpFnVrsTecjlQalJnyyLS0i8uU9UknqjRZJrUG78Qq9XoeHh4fcfvwjDvaOkUUR1NAinAx+PhWrXvXf4oPJiuWPo8jX5GvPRFhriZOYN954nW63y8OHDzk+PuHJk10WF7u02y1whqPDQ0bDIdtXtuh0Opgy8yii06SpYG2tw/p6h9iLaoFKUHHNm28XV0kiR1bkGCFJ2m1ebr7CpL/E7ukxn352h4ten/FoynA8xEVbxI2U0mimRUncaGAzTZrEgKQoph4McN6GwAmBsYYkSWi3Oyg00vlS3PPBBaPxmPZCl0Jrnu4dsba8jBJ1YlVDiQl5lrO2vohKF1nZeoelnb/DF/feY/P1K+wfPOHx8QnZzw549c1t2q9vsRwPaJmCre0dHroznvzsMae9KdaBqklurq2gmyl3P93jxo2bsLBE0VqgvrRMZ6phNGF1MWEaR0Rpk0lfczbJuHazwcdffMb9h+c8Oh1ykuUY12G12WL//Cm/9911vrl5lYUPFf3zQ44Od7m+nbC09TI//OxLbn77O9x47T+jnvT5+Qd/ycN7j+iaPRpKkVCg9BQzsDib8uj259jVJRrNLnJgebp3wnQyZW2py0vXrvA0tuwfHHA+OODr3/oO5xentBYb3HzjBpPRBb2TE57sDYijiN/8zrc4P3jMw/6URBqKyTmnFxGL213O+qdM8ynnvYzbt2/zxrc1SRp6iNW/j++YD7iMC6Xszs3W36+AQbE6Q0rJtN9nOMopckOzvUittkxncTGob1gvNOJAyQQfAAUEW8zZu/m7Pn+6FdTn8L6oYZ9H4PsnqpjBH+qso7VQ49u/9g7f/vV3PPujAHEKSNZbddZ3boCte31GZQOh4UOiqlTvl16xZ+IrLx9u8YqvVgiwEAkLFDgzYjQoORrBw6PPSQ4/wpkmf/m4wVOxxautjLVOnW63xbd2rjNRda4v7XCt/ndYbihIG5jaIrkI/nih/MxpvIiRKTifHnM0uODpwYTjgzGnewecHTzi4uSI87MRg+GQrOzRyzPKElQOkc5xtwfgpiitsGVKmTSIazH1yDEsR/T6A8+ElQY3K/Uh7PE+0DSuwqktJaBk5FtRLiUn/qqG6ifnwv5tPdNiLaUtIUlZ29zk7//e77K63OFP/82fcPh4l3xUegl36W1YpJBE+LXdi9bMdQgQ+H05JDBCSpSKUSpGVsmcCDoGs4Q/fIEKgKdiPZ5lWOb3fs6WiYA/hC37UhWRT7okVfbuk0T7N9/tUqI4Z79m0WVVvkpVhunBeR/LGiAKn38ptgkxwixhvHQPdKgYqsDn52Oiy+WPyCgkpfPjgNkxQqhQZmpQoZRSVlVTs7i0iudDl63wXswi+Ka5GQHiNQmM9c9HUYRzliRV1FopUT0FpUBGyCgijnyfohe+88CUc5ZJNmE8HgOCeqNBFMWhOssnuI00IcvGYAzZZML08JBiPGJzocnq+hodVefs9AlROg4VC8+nShVbWN1riYoikloKsgnCay8UZTarMosumaXneckkK4LEv/Gq0c7iTKWE+4sf/9FCJP9/PGYDT8ylcWeIhqj+XinS+dpZFcUQaqN9wsczDFtV2iHFfCEH//eq9K6aIMYFeVBXeVi4+Yczfx3MERFwoXegaiS0s4levYevgp1ZawZVKMfK8hKvvnyD7c1NJsMRt754SH9UEtdARdb7QyQJrvQyuMaEBIVQGhlq+KvSi+oaulAaoHWJ1gXTfML05JRJ7jiYZNTzMeejISML5WDA8soqb3/tDa69tMkn6z/lk88+ZfjuLi+s7vDCxgsMXB9qmqkZ8uj4lPXPOrzR7bC6fZUrN1/i4x+/S6Mes7K5QXTvIS7319A6n6hJV5V2VpvwJdERd6lw8TL6Fo67BOU+cycE3i9EzuaMeKZBtaorhksL3nykXbpW83/D/HcZxp61wbQ7ONYTPqtihKvkzvcOMmN/lRTU6zXiJPZytnmOMILR0EvXIySRMETKkBclF+envkRKKbyyWgj8A4rtgqCH/57Bn08olFCzPLZKIEV1nULCiZKUpoDIMBkP2d97Sp7l3pRWCVxkiZMUlZToUoPxjc2VopHMM4QE66IZ+ljdQ11mfPzB+1y/usnNV26QRBHtVpvh4Jx8VMwQSinCwi49uBDHMUmSYIyhUW8wmWasr69jjGM8ybjo97n95Zfs7u6yurpCkkQkcUy9XvOmooVhmo3Ijw9Im0vUaw2MtmRFRhwlwezU+8AMJxl5WVJXILTF2lAWIyTGCUQUI8I5OkDGMUkU00wdraaEWo0n739GlmUkxpAXBdpookQE/zZ/Tfz3kxjjlSWrfSqJIiIlKXWOMZpavUZZ+vLQ5eVl2u0Oe3v77O7ucnZ6ymQyYXV5ESUSzs/PcM6ysblOs92g2WyAAKm8DYQUXg5dKYG23t8mjiTNWoNaGjHJJuTOQS3m2tUdRLHO4sU59x8foB2srK0j4ohBlvF4/4BIKLSMKCyAJMsyb0EX5P2zovA2B+FeWmM4PT1H12KkTDFOU281KZ1H4ItQLnJ4dEErrdFKGuQqxzQME3K6ssZ01GJ15U3e/J3vMo5O+Nd/9L9ylO2R9xz21j7djYij5gmJyvmid8bDL3uM+wnDzDAsp5T5hJX2OpHs8qS2z9OTB/zWb7xGf9RndPCYJWLihTauqVlZXqYj1vjs7IAJ8N69zxHTkuviChuNJQ5OH9FotWit3OBscI/33/0BS5uvMixq3Hj9JcrY0NeOf/i9f8Ag3eZkbDk4u8Py1Q7r16+TO4XeH9A/GzAqJQuNJrWFdcpI0sslnYmid9rn6VGPNIl9EETB4oJg5Z0rtB5NWVxf4+W3rvLll5pufZHD8wt6x7skpsD7ngkOdh9xvPuYZrsFOudg9yErKmZ0MCKtJ9hWnWY9otlsIRGUpSFKFJUowFeth1+VzAmB97+qUPLnX3LpiXwy5OHtLyhyx8bmNdY2rxKljWf2XQ/YBXQe33vrGQjf+/MshfH8h7n538M+5wKKLoWAyAGGPC+4f+8B1jpeefU1IhxGG5QKvc66gAhK7cA60jjGlBYVB9BLejDUBHZEffXX9V8rnM88TplLzVd9L8ZoilyjxyMGp3cY9O7z488e8ahXY7HR4aZyvPPyMr/59S6P5CYvbrzEzWaftCbYaN9AKA8SWRSZEEjrkKYkUTCykGUFejRgcH7BnYM+52cXHO895PDkgL3jEy76F+TjC8xkQJEXTEtJqUFajZQajEDp1Cc1QjLNNVlhMCXoyQibODrdBabOi2Epq5AyDsJvDi+i4nvvKmNvLzhhQ/wVFAHdfAD5cj2f3Pie51Bl45wPxgHjLP3RkE8+/4x6Krno93xXRaQQoQRTXLoXAfF9hrETovp5KXGrYkEZYj98YielmiU7UqowDioQl2fOfUYrXCIALrNUs8gxfNYszhHMvmeV7latS242P54tT5wLpVX/zz9hljiLEP96s8NLSZcv1ZxXk833KoQjjhJ894d8JnGrzqESVZNSYd38mFlM5tzsOB87VXGKmR87S9ifvf+C+WcJ59nMinD3ybdlzsx52wupDCoCFQtcJHEKtLNYp5mMJuTjKc5Y2gtNavWUehKjC0VZanSee8VTJRCxZ1l9uTJMsowC0IWhnI4peorNnau01lpsX7nCC6sF7YWOT+SfvxPOeckN6+9LFMekqgYipTQZxji0MR5ox/dsJnFKrd4gSizGjdHlhCIv0EUBBJbxub7j5x//SSRtILCuQgIF1a3yIiIV+a2wRvpm3RA8G6AMkvCxiAJlOZ9c3suLS2JOXpq92gCdDIaenpycJW1SejETFcQYfDub32CQke8fw7M8M8dzvH8YIZESCJSLZgmlwrcx1qOUmkpZ6a7wymuvoq1Bttr0BjmlziiyIYOLE8qi8CIQoYG8tHhZYCRWeFbJ4Qetc6Ge3IIpjfd+sBrHlClj+r0JdmqYTIfs2im11S6Lmx2cNYz7IyYXJbLRJpcRk2HBtRtLrHeXKE9KtDWoSHCsB7z/4Y8oFXRvvsT48By0wUQRJoogSUjQCOtpeatKJAqsQhCFvkLjJYVDB4DEgbAIUUnIzldh77EhZ7hShTBp6++hqJDMyB+TRpJUQIogJgZRw4nUJ4Sh3r1Cw6p/u8tGnSL0bVxeJp1FiHmT73y2+vNVThIToVE4IanjoMwYj3pQCPRkTDaakGeabDIlTRKmeYmRklazRa1ZYzjOGEuNdjl1oZBC4aRvwnbOYq1GmsAiOxE2GUGiYiIh8CPPX0NnDE57NixOE+IoxTnQWtPv9zk5OcU5PGOiPfqulCRJYpQEmxmEBaV8MlrmuVcmDWNQxjEOME5TixW7j+9y+7NbXN25hlOCOGkS15uMJn2v8hmur9EWnMEZX14pncQ6KIzl8PgUqx2dVodOu81Ct8t4NOL87Jz7dx+glGSh02bnyg7LywvE7TZ5NmUyyemPDygbbVZW1rDGMplOsaYkSmJsuK95abGZwJYTxrmhVo+pNxPqcc0rcOHN2j0K55MSFdfJbEajuUin0eGEU4STlERkVlI3FiUMRhs0IUGWICMH0vufTp1GmAHG1DxH6AzZtEAlMaXWOGGJpOWVG1fZXl/i8ZOn3H/4mCdPd1lYaLPY7XDeH9CfDNna2qK71KXZbKBURJI2yadTBNb77yjAeQUqbSWFcRiPZqGnOTYvqaVtNtaapLU61GNsvUYpHB9+dId2ven95FC40lLkBmcNcZqQxDFJrcZC0uHo+IRCG29HoQ37RyfUdzZImnWm/R4qTSjykrPzM6yBTqNDNvGiMWsL60zzHBLFVE/JXUEmh/z0s78mPlzi0elP+fCD9xieDGjblJ3lVbYW2jw6PSKKBNl0wt2DIZPTmKW1FazJ2Vm5xtFowv7ne6TtOr/1u99hpdvgk09+xOnZKS++9Xf53nd/jx98+hcsdlsssI7UU/Qg5+ioTznNWN+5hmvUabSb1GvQnzo2ljZRtRFPj8ecXTxlbecam9deYWV1m09+dp/JRUmcJJze/4jmK7/D9tfe5Pd+9w84ufMh/+e//GPu3NujEaXoYoSUCWYqabev0mjB7u4DJpnEakdto8Orr77E0lJE0p6QtBa5//Bz9vYPKRrbfHLrHqaccm1zk/Nen0F/wMMvH7Dc6dBcbLPWXcR0F7l1/z55U7DYXmNxfYftjRXPolIjilSQdi9BJSFUgZlK76xfxgWQM7DGTnjBJzcPFv2e6vfYIi9w1pKkMdYmLKzusLS8Tr296He7yohJ+gRGSO9I5v9TGOG7lpSQ/lhBkPw2CBfPo4Agm27x61+eaSIhwShwviQrSSKsijjq9/i///xdBqMR/8OV63QWE6TQNK1BZiXalUwmBaf9CUsLq5ip4c6tx3zt7TdQTd8Do4T1qqvCB6tVx50OW0AEOGNnIgRc6tUqZUKpLdnFLnawy4M793l8mHPUNzgz4u2XF/jON97kFbvM6tpLrMXQFhN2Wg1eqC2itUXYFjJyxBKEklgh0Sjy0jIdjCjGPQbDPodnfR4/2WPv0T16p8fsngwYDzOmvT5FmTMuRuRmgnAl0hpwAu0k2kjQBilKUhkTpSlpLfX7XxxjtGU60ZisIEb4VozSCyuZsvAMCsKbWOO9WJ2U83I/AQgvSFGxbr67YL6H+haJqoJDzOxgJCC0IUaiB2Nuf/AzhPS9SFZrXOmB/YqlEs7NCKi5/VG1efsxDIRz8QIgfhxXHloKpBepckIFmhixQQAAIABJREFUtUU7J1GcncWTMtgYcdmLTVT97j4xLIUldxqNZ1Z8eZzDGUOE78EXVcVR1eNPVcApZonBzCfWelbZhVLgyrPYvyKUGuNmgIwTgPLCIS4wbErFNBrekN6ERE9IQRTPivdnAPSMFAuJtGchJVJEM+Aa5iRKVaV0GaD1LCizhFlJGWyYZq9mJi4YFC9d8NKzeJBaElThBb6XXzmE8oyu0TmuLNHGUuYagaLIC7I8998LjZIdVOIBTZOXOF1ihCGqJ0ijEVmGKCJiYagnaSgrjpAyxsSCk36f88JxZXsTudjhom4xsqQBYa5LsCkCn0tYUSIjr4hdliXD6ZCinFCWOcYWgPVVSpEXMVO1GBVH1NMEW2h0nnlRnNA/dxko+KrHfzJJGyLGT6gZLRMmhcQ5iQ0Ty3ti+gFuJb7WzkGEwJeSecl/grKkDQvJnDafMz3GVvXIeLQlQB8OjZR+8nrvGMFMUlj4BlnnBEIpFBbjNAKv1lj5yEmhMC7GDzvjX24EGCgzQ55pDo5OGOkpB4MzisJRi2PiWh0VpVjtQnIgIBL+HKSnm33/qUUo65MePPLubf0iIhUTRxHWwTRx5MrQtobSQt1qdjox3Rs3ODm44NNPPuPOl7scln0W2uvYWsm7d3/OKg3apWL12jVi4VjZ2WIjSrj185/ROjjl7ZtXuf69X+e9+z/j08+/YJqVxGVEpBXTpMCI0Jge0DeBRNjgCu+klyAXNphwS2RlshcEOqo+OL/MBElbJzHWL1AGhxFglQRrUVgSY1G5htyiS0mhfUAgcVQaLTKwsFXtuVD+/KpFpsrvK+Rt9pidXlWiAxhIREQhJIWziCxD2pI4EfTOT5heXCBLR1QY6k4gC4OYFoCgKByi3oCyxNgcJ7VfuMOi7PEB73PmYF73HU4nDohMIUFIPw7iSHiUJmxkfmR477tJlpPnhV9w8QhjFCkqyZNOqwUIyvE4gJECW2psUSLjGCFVEF/xp5iXQ7Ca+3fuc/aNIcsba8QxxGkdIz167qSb9TBSoVKFxhmH98+RFNpxfnpO73xAo16ns9Sh1WpzZecK00lG76LPcDDg7u27HLWbLC0vEseKNGnQSesU2Zjjp49Y2dig065hR4bRZEy91SSWEWVpcbLGJLOMyzHJ1FAvEhZsi1a9SaoSP62dJVGgIkGhHXGjgyoFC60FmmmdIi8wqoYWCVqPkTpDpQlGxR6TtxYVe9Ws3MBYF4h8wNAInhzu02xLGs02pfbjVimFcCWptCSdOp3XX6bZavHwyROOTo6ZFgUbG+s4A4+f7DOeFGxubtJoNChKi3AxMpY4W1UJ+E2zNAJXOkrj16xanJCiKEuBRZBlU1Qz4Wg0YHSeM50UlK2UyWhApBxbq2ucnY7JsozVlTXiNGYwGpHUYo90OjFTwy2NYe/ogFqjQdqokdSb2P6A897QW084vEHvqEe71abZWQAzwLiSSTHirz//Mz5+8AMSeZXhcJ/J+BCpU+I0Y3u9iR1PmOxP+O6vfYuj8zGn3Q6nT47Jd0+IRcm3fvsfILuG/cd/zs1XX2H9jRe5/ektnjw9o2+b/HT3/6PuPZ4lS7Izv5+LK0I+/V7qrMoSXVVdrTUw3QCmG0NiCGIGs6CRs6BxjFv+H1zSuOCCK244CkYOZowEBjSgiUFPA41pWV0qq7IqtXj6hY64wgUX7jcisrrR62aYZVnWExk37nU/fs73fec7AybuHQ4HR/yjr/4un3/jN9h/+ir/6l/8a47vDsmlZZQsAnuoSxbWsb0J+zev8uWvv8zu5Vv82b/71/zsrfc4m2m+87uvMp2d8NPvf4+XP/dFNBN2uh1ef/V1Eq9p3fgsWvwJ27ng2laHPHMcXN5FqYw8CaDewVaHJNvg6PCE6WzBxv4el29s8eDpXf76R29z58EpnfYuk+Ipk7MRk/kCa9tMpqHg6Le7LJygujhnuJGgkgRqg7UZh4cjhlLw+PEpr18J40Eas+rGmj/qC5bQVDP7SUbpfbNXiSqCxWweIfCQqFa1YTqeYauabqeNyDK621fobl/meUquOUubv0ZEPg4HVj6CabiQoC4B+VUMDgewwJmK09MT3nv/A54+OeY7f//b7PX7pBpUUeBKzYPTc777kx/yo5+9D0rxz//4/8In0BOO13f2MRfn5J2Ma7deYVBbWnkPrbs8fHiPnWuXOGgfoADhPFIYHBbvg9zbCqh9iP3KsIzDpa3JKoMzFZPxjB/fOefw5Jhk/C5v7o056OXsvPlpsmtfxaFpV0d0ej0WsgdCowRM6wRrLLoqybOcJNnEeseiNhSVYTQdc35+zuPHT3lw/x6HT54wGg45Pz1nNBwxn04oFnNcXeKsobYGIcOsORfbSZxxcd82LSEWJwULX1HVQxZmQauVk2dtWh1Nb0Pg/S7eWExVU9g5lSOyV5bK1EuWSsgArEvPcrRPU0A1S6k5tQIzBc6ucrLauii/tmEGoEqhsghvSdOEug6Gc42OxDUr1/vQGhC1iMIGlUs0NwggsSPO5A0lQZNbhPEz0TDDR4MSseyYX7JnqmF6xOrabeyfX/bhE2WDzuKkp/Q1n/vSF9nY2OLDDz/i/PQsSHO9J/GCVIDQksqaaHsf7puXsjEFD/sh1oPCiTBLbLklGobPAmJ1HSLuXxFs8zHhZE/TbClTtD6yPkqghYqO3S4ycA2Ttnx0MR9+PmdeXsba11atQuGiGydwIUIa7pt1Efd3EEY7hAuyVScFzYRjJUOOrxCgQCRBahxAyRppNLau8YUltVDXJdQ1OIPMNJg6FEFYrK2grvHWYnXIz/EVqixIhEZmCp/qeJ0SYzWV91hbY0cDFvMCoz2dL1zn1uYrfDrZxBaWSkg6Wocc0jjKcox3I0bjEwazM0ozpaoLcFWII1iECq7rxofYonVC3sqRLqxV5QVlWUTg4VdXbb8mRdtqESwZlybzjIfJipxkaR/b2MhH8A+5NnyzkSiua3Cfa/CMr5Vtf3MhYVkFC3UTZByxZ8XHU01GZGjJAUV7+EYCIkRAomrhMSIkaTZqpw1Q1objs3MmtqCWjvF8jrOCarFANTa5NAhPLNTw2ChBaNioMHTcxmAaLj5Q/QolE1LZQtSaclHiypJUSeqi4u6H92gddkmTFsPjCcKfcenWNT594yZSVfzN+yc8vfMeB+kWB1cvsZiOuf/xPdjeh8JSTsZsd3t8+be/SX5jix99eAdTVUiriDwoeLEyo2hCTkRUV31/hI1OpOkbCUOEuhr5QbgXcnlvAaSSQeJnm8bjiCIJlv/Gqkfy+de6FKCRR66eZRwtEaWJATCOfY1E6aUXEAeeKxX675RWtPI2KknJO116eUonbzGblwzHUybjaZjrluWAxAiBcQ6pFdQiOmz6JVPbLMYgNYhrlEZ667A2bH6hAruD8BhXY2yNQJIrSZaHYdkFJdPplLIsIaJfQiu8aVy5IMtz2h6Kug4/F9HjqizRaUIna1E7i9QaEUBu8ILDZ894//13+c3db+G8I8tyEp1QN/9G3FPPB34R7lmSoNMUIQVVXWOMYbKYkiQJ/d4mGxub7O3vsn+wx/DinOHwgpOTI9rtnL3dfa5evcH+/h6z+YLzszPa3R7dbo9QFNfUmMCsi9iU7A3SCkbjObNZwc6GZ297hzRPoo6+iR8yWvdq0jQhSxMqgtRBSEGSBHAEglGQjWh7OLNCRmej4cdsOuPdn73NxbOHvPL6G+wfXAnGKdojsVEaFtjznZ0tNne2GQ6HfPjhh9y/e4+9vT263S6nxyfMJlN2d3eRUtJq5ZhEImNfgtIKL+Jgc2cDQikVJBqRaKQXpFqRKhkcrARoldDrtpnNF4yrikQ5NjZC/9rW9jb9fh8vPPNijnOWfr/HoqhQMmU6my0tuefDEUmek+Tt6PBVUdiCyWQSXPScZ3+34LOff5PKVPzNOz9H+YxuBjYrGB8/ZD4f4UWCqDtku9sskj0eXwx55eUX+A8//C4vv/wlcpUguxnlZM7Ozi43br7Gs0f3uRjUDLuCv3j4Uw4fPuFi2qOoWtw/esrZU8F3fu/zfOVrn+Onf/s9NnZu8LnPf5F/c+8JrW6PbqrpSsWZaJPs7HLtRc2iOuZf/PPbfPmrv4EUm8ymcHZ0xr/5o39FnkjwC+5+8EO+9c0v8d2/+BPyVsaljUv4qsTYBe22ptVO8b6g1Wmxf+kq77zzIXWtKIsuBknWT9i/fJ3DwwcMBh9w584JJ0dzhmeCc7tgq5dSWkFpHCcnJ2gdJLLdTpfpeMB8csG8GPPKizc56HU4stDLLjEfC2azIfVOhfezAJwA+Bz8SpK1HvOabarWkHe0jiqTwDa42jBdzEEq0kyTddqkWRoj7Cqx++UvsQz/1ju0r1E+AR+kaBXBRVHbLCgzpGPmHY+Pz3n44Uc8u3eXwfkRPoFPffbTtLYzYI5fTFGLBeejOf/+3/4pD58cYg5P2b12jZ/82V8yHg7JleK7GGRdcW1/h//6v/pDZkrS2tgm2d/hybzEfHSX37q8Q1cptFd4myATh2CGpIXxgZ3MKJF1QeVTTmcFh+fHJMNjsmLEoKxBXuGNT73MZnqZFy47kjxlYXcZ+B1sbTBizPmioPDhzFJK0e9vkCVtbF0zGA0Yjcecnpzw9Nkh9x8/5vj4hIuLCwYXF4yGIxazGXVVU5dlADadDw53LihuLB7rSsxaqiN8UAw1ff/eW2QsWqytMKYKsw+zlE67Rbsd9rFOQ2IvotwOH56fUmppEBVqIb/Mnz7ppr1qJ1m7HhXc9JQM5llJItnZ2MQ5x2y2wBPUICYMuaWpARsAoDH9CqCDRfhV3tAAD2EIt6VxXGys2FcAvoyjlJqiJ+ZablVgLk3oPrG0n5NDLnMZj3Q1n7p1ky995g0m0zl3XYX2wZytpRVbrRadLMF7GMxL6jhuwHiPdYTiUzRHb7jWZaPH2n2VUi6NsdavtSEzmi3XyPallEum2nobCEYRwfSlLPT5frX1z+ic/4WvffLnwr0LeYpUxOKyYftWeimcD+MlnER5cM5gWXIvFIsFeE+NRDiHLQp0YnFeYZzHOENlLN46WjKLhjIVwjo0ElOUFAqUy4MhmrGYqgyMap6QiAA+OGuwlac0UAlHTWixsL7GUmOdpK49haz43ODr3LI5zgumgzkeyDc2kFn4nKaYMR+dUS+GuKrEWIM1JjC1ciXbDfuwGSTuSdOMlkiD94DSqNmMqixw5pf4KKy9fm2KtmUz8dImKmZBy+ItbiBYywVjcu/XvhsX3idnHSx74qJuer0goOmJW0P7mp9p9N4N3bwydhFrWuJffFnvsBJqGZLBYL4hcVIxL2ueHZ7iz8/IN9qIRC8/y3PaZh9dLiMDtYofEaH0q4BirUWYpgcOnBVQZWx199m61WN4dMRsPGYyX+CsYjqfYNwQLSR5Kjl7epv3zu6TJAl5Itjv7ZMWKYcPzpjLkk6SsdPv8pVvfp7CCT6+/S4iXbBIS7IsDy4+IqBvSgRmdNU0+/xLa42zHisDU9A4hCKIAxjd8n6vv1YFWfh7M1bBxYguhH/uZ37JW//C65cV8qvvsZICfHKsQESunXDBCAeP8Y7aORZVTafXZUNqcgvZVsn87j2KooI0zL9CK4x3oILVrTEGoROaM66ZR7jUzIs1Fyl8/KyBYfPR8MVHJNHFYCGjxbeznslkRlXXsbE43q/Yr2d9uO9JlpLleRiE6kPybmK/plIqrOHGKVV5tEyYz2c8eniPL8w+T9JLSXSKVimVF6tG5disvBytIQIQ4glBuHYuSEeaZKCuOTo+4uzsnH6/T6/bJcszXnr5ZWbTMbPZhNOzM07Ph3R7ffobG+R5iyRN0aVmb3uL0WTKdDZHSLC+wkmH0lFa4zXWSM7OJrhas73ZpdPWJJlajgyhWZJKBHAAR3BGDfKSLGlhnUJ4hfMWJVO8r4HQjG3rCT72zsxGU4bS8bH/iOHFjK3tbXa3+3gf5hZKGY2RBGz2++xub7G10efdd9/l4cOHbG9ts7O7w2LmeDSdMJ1OMcbQamckaRKasXWYP+QRoRdJSrKsRafXZVYWbHQ3EQou7e1y68WbnA5GXIxm1EZSVQbnPEW1wA7HbG3usLW9idaKKo5oUFJw5fIlTk/PWRR1kNSKkPQUiwUqyWjlLWazAlNWMSxJ0jRFCMV0PiPvtrClhiJhNJvz+a++xh/8l7/Ff/rbuzy6/5AP3r5Nku+ze/MV6G/xzu2PmIwUeecK/c1Ntnan7M0kbnKNaur4f773Ax6fHJO0M45v32YmCvx4n47NMeoM4QYIc8LF0PB0sMP+za/x+rUrPL7zgL3WFpc33+TpvROcfUR7f4Nv/M63ef/tH/DuWz/gyuUd/vZ7f8Z4ZhhP5rQSzebmNtN5icLiC8Od957ybPhDnj56zKs3XkEUhjsfvsNsPuH4zFGbBc/Ozun273FyNmR78xKD8Rw31qR5zqOnx/zLf/kAb2acnJ5Q1RlG9KlMzUldMp0vaOU5aaLpdjooMjyO0hh8kjO3ntok1POaWVlw/doLLC4mFLMJidxBkOEQWC+QwgZGuAlsDTjaAKKxJyYcZw5fG7yQoXdcSISSpElCkubhDF0ey78aFW7isCBak8c8vBThLVPnSYXDW8l8WjMcz7j/6D0e3r3N2ekAg+fFV17li9/4KleuX6fXztBOwOIcS0klJ7z18Xs8ffaM12/c4v69J7z24qu8+9a7vHDpFSbVgrFd0OrleCdYzNqMvKA9S9mRCcbVPLr/FvVXrpP3NhC0MEmCEZZWPcV7waKUnI7PyMtjipNThouEYbIFLcWBlOxd3eHm1asUdodOq480FmGmTKclM9oM6xKbWEy3hfYZPdkD71kUCx49fczTx084OTnl6PCQo8NDLs7POb+4YDafM53OqKoKUxtMXSN86NNb0TI+KIu8DUyKgCRN6fd60c2ugtjL62K/r5DPg6FShvyhKktMXTEej9Fak+c57XYbmSgy0SbLUpz3zGbhmuo6DCqXcQ2sm4190rBiJSmMsd8Zgpsj6Cxh/8olkiTl9OSM0XCMMZ6qrNFSIEQjUg2tH83aFUsT/wjmx5PQ49fc8QMT5+P7CikDq+NX1/XcdbIqZsK7NOdtOCubHi5j1iV/IU/QwOTigv/7//xjZvMS5yqU8KRCst3vcnVvl0wKxrMFhZeIqqY2ccyE9WFeWWT+nG9aOhRay+dcJZe9afEVAO+Vkciyr1+KqHZqjjNBI4lscuUm320Adu+fnxP2vKPALxZuzXNtQPMArIfYYFww4/hkMBBCoGPHaDARFCHVF9BqtZAIUiHBOirfqN3ivRESq0OOi7UkIvJ2tcMWFRZD7WqUs8EEx5iw7o3AOxNk2kKGlpHxlOGsZI6h8pbapAhpMHaBkClWKA5swkZ/k262QY4k2epTVZZ6VkJpGT044eMPbjManIEpwddLYOCT6yMs2+jPAWHGXpqQSkVbSITW6HlKVZT8qtevSdEWeCTBivkImyTS2sAKc4gBQckgF/TBZU84ViiFDsO1VxT9ClH4JBIUEvLVz4QAFJsjm+Rt/Tp9CHRCeIxZMUYNWmFMGCQpYretl8HdMkwMCyGmrhzzWYkrHUYKZKLQqSLvdJAiSP+cMwiIvV3Ndcd7IjzB2D04zTQNtFJIZLQmBUU73+bqtZf56hdeQ1MxHQ04Pj7n6dEFh2cXnFwcc3L6mMV8SD2vkaMK72pGypL7DL1Q+KpimCsSIRheTLh39wHtbpvTo2ecDJ5wlJRMZwVKKJwMU+1ZFhrNE/NrBXmDCjWujS4ycE1AWWe/1hZ789yjTLB51s41c+qj46dbOVXapeX/37Hq1taCEA2iF59pDIQN5tEEr6WLo/A4ERqtrTMY7ynrMjwVIXh0eIyrDVl3A6Mzks0dtIsyj+mUoioBh7Grxt0G2AvrLMyBWxXmflnACyGiI1vTkBwOMikDg+UtUS/vMc5irWM2m4cholJivAEp0Fqjo6ucEBKtBGmWBROKugw5gXPLA1onOshVl7CnB+E4Pj7k8NkzXnrjJVKXkiYt5gxjUQi1MaBDELc+DlH1YQ/LKDOw1oEJRRJekCQp3nkGgwEXF+dsbPRIkqtIpbh0+SqVMQwGI0bjEeeDC3q9fuz3kwxqQ6fTRXfboVj1i1A4xl495yBRCc6Fwm02W7Cz00boTVqpQierpnWpRNDGO7Oc/2OMwSUJre4m+IT5bEbVuNGiEVKxqByzomI68zx+9ISOus7mpuC9d26ztbfL/v421w622e63QYT+wlYrC5msExzs7dH72tfY3tzk448/5tmTJ8vRAJPRmPl8FvoS1tBVF8EhlSShPynO0dnd3ePS9i4bW5ukiUZ4x3g8Zj6vmc5rTO1J0hY6TbB1RaMGn86nKK3I0oS6roMRjhRMpmOcCzMge/0OQmmMdxhjabfbDAYjhNAhPXCCeVFxdHrG3fsP6G1toYTGmYrZ6II/+eN/yx/8439GN23RlRscHgtuvPoZfv7BjxB1l40rW8yGp/z4B++SZF3Onx1iyh550uX2vQ94OJ7w7a98Br845cdvv4OZthC6ZvvmVf7ht/6Q7/3pv+MnP/sxrT/a5Jvf+DwfDZ/w5P4hly73uLyXITq73L/3hMloyNs//ymnDw/BaC7vX8OWc47ORiwKQSrh/PCIUmYkCjZaGf12j8mizXs//SnDJ4fU0znTyYAkSykqR7uzydHZGfcP7yFEwqw0lLWjtjnpbIPJHMrpmJYS6LSNNTowT9YghMarnLSVga3Y2uwzn5QMBgOm8yIkOVmLw9MZqREM3Jjy3m0kLXZ2N0h0B0FGM43CU8ckVzfBb+1UE0FCZGt0koaENstI8w6+ATmlJG/l8WyMIUmuxdBGRbEWX5cCkGW8DctbSIeiAKeZTCuOD0958uQ+jx4/5NHDJ7TyPl964xa/+/WvsX/9Ci5JsTrDosAHKfJsXiMrjy8cZ89GPHjvPu6kwE3mHHRavDcf8Lk3X+FH7/6IF69tMSkGJIVlMjtnYBK2jQm9NmbB2ZP3GT15hauvfZ56oXg0rRkUE3pnb+GqlHunJdN6wuUNw7XeBq+8/BLnsovqtrjWAWFHGASpc0xHUwonIWthVUKet9iVMC3mnJyPOD895uLZGU8ePebw8JCzk8CkDc8HFIsFVVlijcFUdWDNTE2iNd4TirZ4I4UPoPEnLfXb7TZvfvazfOHLX+HDDz/k52/9nOFgtJxtppTEi5U7tiAMmW5YpfXhv0IIkjSltpbaBCCy3WrR6XQoiiIUb2VFXVeB2foliqZPOnYLIbHOhIHNKpQSRVlw9/59Ll+5ym9861tMJ3PufPQxz54cUs/L+Pn8skwTzVgouVy9eKJ00UUfgyYsylUx18gI8WKNqQtAr5Shh0uyapcAEEvCoMn71hkmsRzMrZUmFQmTi1EAroOTGIkU9Ds5u1ubdFo5dbnAmBrr3BIQXdsxS0azkWMGEiK2KzS5SXNf42+tG4ksgVII80Q/wSk0hEaYV0xTZ0Fzj4gjFETMeZwP/WVr0SKcNW5llrJ2XU3a15gLOmfjGCSJkjLKMBXSB0WWVCr24TVzIEE4h/TheSJXZYD1ApRCJClSe6ppgY/Oxs5azMKQaImlDqogIdDGLa+JWCsoKcA5XO0oFiUlhgqDcR6tHIrADoZ1Y7AerJA4BEmmSDKJVWG+XN7pkWedEFcjuRNyjJAHIxtB7wokb56DFaFFRKQyMIBKkiYpVbrgV71+TYq28FoVbM1mWiuahKdxlWkoX+JClqxkHzKiDJ6V4ySsiqr1GW3N19cLhGVx6GWU3oX5Zys0ptEyPy+9bIqRZgGLeE0yUnjh8oMGNtUJ+9t7dLf7tDY71MKAglTCbDSk9GHBEZ2XbMyRhWzMWhpUY2XjqmSkwmmKIYmQGcYpOptb7B50wFleqiRVrZhXhkU149n5Pd5572f8/G8+JD1fcPVgg0lWYYqS8nzGZG7wVYm1gtOTIdPBBJ1Y2tLhEs8hBZOiXBY8ltCz5AiGLmLtWpsI0cxR8b65bzIWnKuiRK4FyEZWuURxWLFgwUzEx6bZGOAky/+Pj2aZTfiI+n5Sny1iFegazToxSDdRz4cg1qzLJsgve0MI7kHWe8bzAuFgPFlQTkpmtaFcFKE/zXtaeYt2K6eoSubj0XLdNzKLgPQ1fXV+xQA3e154PAbrTfz8LlrtEj9/45IoqGvDolywKBYgBEmaIVzoEgAbilMpSZMsuBulNUmSYKqaptm6cZPMW62AUi4ztvB+o8E5Tx4/5NanXiRRGanOEYREw7oa4wzKhf0qlCRVCmMBKZaHl/BEiSixb9OtZh5Gvf10NuPk+Jh+v0+n06HX77G9s81sNmcwGPDxxx+RqJQsy7h69Rq3br1Ev9vl2fkZ87pC+gQtFViPq/xSOjmfl9SupLIll3e36bbzMArAmOUh55zF2BqlFToJwz13bryE0C2GRycMBwOMWQTJqpQYmeCShIWdc3Ryxm67w97BDWbzivHjZ1wMBhTzMa+98iL9bguhJKpxW3UBPd/o9fn062/Q7/X4+KOPOTkKnx3vo4tosNQO4zFkRFclOtFhsCnBpj9VisH5BRcXZ5xNLjg/O8PUNe12m6KaR+TYhjlaSjMYDplMxiSJ5vr1azhjuDg/ZzqbIVQSpB9S4RDMFlVAqK3l6Pgk7stw9Cslqa3DOYkxitvvf8SnXnuRrKPJasO92+/z2//g27xx69OcPhpy++37XL9+i299+xuIXDA9vkthD/nonXu89OIXOT0v8FPFYHyOTi+4fv0AtZhhjCHZ2IDNNp4JJ6fPOD8yyPeecT4pkWbM0Ud/w3dP3icRu3zw8B22LxsKxmz3d3jpxgEf3nvM7Z/+AJEKknaf9995xEae8tqrX+OFWy/x9N57fO+v/5J5WdDPQw/1ZJyQCsFme5NLewfCbY3wAAAgAElEQVS4Xskoy+nkLY6Pj5guPJM5FHVwX827Aq9UmHFVT5gV4MuC1naf3e1LDIYTJouSYmGYVnN6vYzZbMHNq/u08ozh2YjFIrh61s6Tq5xJ5cilRLTb1FJh5hX9jT5BKBd2uSQkozScxCdAMW8to+GQ6XTG3v4eebuD94LagyMmuhAttWOsXJp7LcP6WgJIjGOruCtiGLcKfOGYnh3x8OFDvvfDd3h8OGNzs81Xvvgi3/n2b7DVv06n3QZZgrdoBMp7jHAYKUF5lDOI0YxsOuemgE/vtWm3Ki5/9jpXWwUv7Xr2uxNe2qrY26h4Nj1BScH42VtMs12sfZ3aGBaLmsf3nvKD//f71GcLnj0r+ckzg20pPrdzzpsvvsGtK5foHHyKvb0Wvqpp5Xto32IhHLPFhEwk1Eh8GWbSDRcV50XJcDxidnTM8MlTTg+fcu/wAYcnh4wHQxaLBcV8wXw6A+/wJrR8OGOXY2ysD0BRIkWI2XWFimBikwdIXDyHmvMI0jSllechWY5Fd3A6DvbifplfNW6I638XS2Cqcbp13jNfLLDOMU1DfAVo5S3yJKWua4qqoqrDjLumTwoCg9cwWs06aJic2lTL83u6WHA+GIJSvPnFz3P91i3e+fl7fPzuHYr5gtl8FmToUhLaQkL7SzP+yQsZP1ezLn1gmtbW43qugQvDohvWQ2kdJPTNL0Q2srHBWy9OPpnzrVo6UrRqBVDUe5QKRXKatRFaczEeMxuPKWrLvISyMktDDxsLyUaqKeJzbkBsKaMp23PM1mpvNXngUuoIQXm1luMs1UKyMfbxzS8uf25ZDC8ZkJWD5nrBCM+3oqwXjUIEZQr4pbP28tp8eCa+KbCjVNBUNoDs1obzxYXRTtga5S2yAltWkKb4VKISTd7rImdlMDgzBoUnSSS19CyswbqaBIVOgsmM9MFhQRKJFhfAD6lj450LX1NSIKyJZkMpNUHC6lQsliXodjDZ6xzscvXmK2x9MOJ4cpfK+CXYHm6jjIaINvCca2sHKfAivLfUCk0WjZn+bqIBfl2KNtEgC83coxWyEb4tntv4zjtsVeNVLJQIyWXQ8AqMCEYWzQJq0KNGHtkUcw0q0bgBrtg3D1F+KESgcdeT/FDIuYhwKILVfrD+D9cdmgul8yjnkcbhaxsGAAtFW2f0shab3R7dXo/WdpeSmnI2wZQLZhDQqPh+jRbbsSoqgCXdjxAYEyavO78qLIzx6DSnFhIXnbyUzsl8ihaKnB5uW/CsGjD/+UMS3+GFz3yJV7/xZVpphp2WzKYzDoenDA4fMB8cUi4mlPOKajZjupjj545xUWOMReCCI6f1JFJTGRMT0Ubb3hS+wa2sYcWEEEvkhrjxZbiRSyQsaOtZPsPQiBsOF6zDRYei5vMrKZfPtDmYiEyp83658Jug6b3DWcFS7toU9k0hTpg9xtrvKZ2EgySuWQ/oJKXV7XF+fMJwPqfwkiqGClOV2NoyLYpgeazUGosa0J2lU5QPA5mbwnEVfMMfpRXWO6ReOUcJKaiNQaoMRJgZp+OQ3KKomooPkPGgMHGeW7Q5lgKlFXmrRVVUVGW1lIMU8zmdTgeZ6OXdVFIivcCYgkcP77KYfwWVp7TSDlnaovAXSCWjTXw8SGLhLRDUVb2c3ZYkGle7cE9ic7mL7plaavJWKxQizjMcjji/uKDdadNutTjYP2Djxk2ctYyHI6bTKfc//ojTo0O2t3fYvnaNre4mp6cnZErTStt4Z8EFSaYTgqKqeHZ8SrVYcPXSJdrdFg8ePqYsa4qiDLKaBmSQkllZcfveQ/Yu3WT7xst0L1UMhmeMhqdMpmMGVc3UWioZPlsz0uDOnXtk/S5b232E8pTFnHYr5cb162zv7JFpjTM1aaoBh9aSmzeuc/XKFe7fv8edO3eYzyaAwNV2bU365bNMsqD1r+oK0WqxGE/Y6G+iEkVeZwjvqauK6aSkrCRCaNIsw9pgOoOAolhQ1RUnJyfUZUmxWFBWBpRGSY0XkrKsKOYFrXa+TPQaAAkUQiQIKWl1E4ppzXg4ZzId8KUvfJZ3fv6fGI/g61/4JtevvcTv/94ue/tXmC1qnjy9zdnZR7z3w5+ifcHN9uf46md/j3//vf+D3/z653j/zjFSC77x5dfxf/63yPMhWatLp51Sjifc7OwyPp/w5Pt/ymbaJukoqukFD48HFOkHVH7CRtFiPqgZTktaYoN+L6MYPsXKfbobeyzOTsnSNr/9W9/h81/8An/8RyOkgkwpptMz/KzGj2pE2kZJxfB8xsHOJq1LLbRKObuYcXo2QMk+eRrMAZTIQFtU6vHSU8yHOFFSGMf5hWexEFiT4iqPd/WymE50gkSQpRmt3MbeVUtZGZK2QLbh2muvc2nrVd7667c5G55THtQYNyaTIfH0JkWIgPxGZASAs5NT8ryF94KDS5fCnCGCcZRMMoxbpoSxdouganNGP8elrV6h+zrOTiL0iM8GRzx++hH/8Yf3OB4vuHSpxde/8Sn+2+tv0OtfRuUJXopgRCUsFokkDT1LHrQDaUAUJdXhY0Z3fog/vUv/4jG/+cIpdZ6jkzbq5G95qTfjwdvfQ57NWNx7xvbcMqsq7t/7D+g3vogZT8G3uPuw5IfvnvDR3T/mxy//gH/0X/w+f/APvsTerVdoUdIzGSrbpFaOsQzzU23pg8mAdMydYuAShtM55fEpo5Njbj+4x92TY46OnlGdnVKenzErS06MoahKVGWi23OYz4QPxUdIImmQxdjL7ElV6JEXPqiRhBfRXU9AnIflgn87xhju3r3LvKxCH3NRxPM1AmBSYV29ek4+SAwbeZuSYpkjhRlcKzA6yCdrJpNJAPrSlDxJSZIEnSbUJqMsS6qqWhYVOs5faxJ6F+eyefyyaPARGDPeMlnMMd7R397mm7/927zxymf46MM73L79PsPROcaU4VzTSTwLJVonkS1MAlgRG6Qa90e5NoM3HpTRiEvHETE+9CHFgtZ5F/HMpi3GPQfQr1ptIiTSsG5oLAnBjRK8N9TWczacMpzMEYThydY4jE8iGBJN1kRUGjUMmrXE42bpMtnE+VUus7qvQoTrbPIXJRVOhdaR59m8cP166Xy5xpYvwWu/BKuFCmZuzfdCP72Na+n5UQFxcUaGLbb5xELFmVCMGePimCyDdxZjLcY5jA0AtBIi9GQbgxKCTp7TT8L9dFWNMAad5aRJEgyWlMRJT6IFykUjNuWoogouTzVpllJKqKoCX5Z0asilJFUJSZJQSYNwFq00vq7DuiHEOCcFha+x3mKWJIQj0RoRR6oZn+NkD/QGQua4ekxd1+ANjjr4KSpwElIbirdgkBjmMDtrISr+hIwqwl/x+vUo2mjYK4A4zR6x7O/BB0cV5wKVHwiAkL0KWBpGOBE2ZMhLGwUvq2DB89T9Un+93AhNYSEQqCVasGK2oEn8GzetZkzACk1YoRoKj44BVSCQzmHrmroo4qwxz3QyQrQlMldLg5VVoIzvG9HrJZLUwCHRLdNZu7yehpL23mJtmA9VO0eNRgiLQlD7YBRfUvNsfM7PP37InaczdqeaW9fnvNbqcOmFq2Q+ODR92nso57hqjrMFZV0xO73g2bND3jl9ytnP/pp7Jz+hmk5wIvQu4MLdV1qAbYrMtUPeC1Y9b+vBYK2AY1XQCEKxZa1bMlJhBphYFq4eF1ymYxBzzi4ZsuVKWzJ2wbRh9a2GzeMTQShe83PgR1hnUmoSpajlymmqqquA4JYli7JA5B26nS5lWTIv5wRJa2gMF0QnLYIkIXxmS8OoyQbkNitEbNXfxrLBGATGhLlv09kcJS29DRBCBWv9qgpSDL/K11by1HjACYlUannAJUlCpcOoBm9dGP7oAsMTzr2AEKVS4oxjPBwwGozYvXwJJXRwGIzs6bJwjs/NOR8KNmOW60EQCtHQgxMOdCElWmqUVrE3KjKxXpDowAYWHmbTKYvFgo2NDfb2d9nZ3mI6nTEej3j8ZMbYGF7/3GfJs4TR6Smqv4kSGUpkWCNBB9TLesFksuDj2X2uXL1MXXvmiwIfr6mubej/SgVSZ1zMSiZPj/BJKFI3Lt1g4+ASz549Yv7uB1ipcCoU1SpRoCSFMZweH3M6HtDqZCjpKZ+OGZyPuXnzJi+99CJ5kiAiENMATnme8eabb3JwcMDZ2VmQQhrwNkiZjDHUpgpKAimo64rFYkHQWzsuLgagYFrPmEznTGczilpSVAIhUypR4WxNkim0khjrEc4xHk/DAFuV4DwUi5Kitgil41BTWBQVAr/sg7HWU1aWurYIpdja3mPKjNF4zryYcmV7k9P9HS7OTyjOCpzz7Oxv85//3rcpXc2Hx/cYXZxjzqZ89sUbfPEzX2V3+4DBcI7IJ3zhy1vMp8ds7PUR21e5upczGD/j3rMnnHhBkvZ58VbKtLjHyy/s8fJXvs5P3r7D0Z2PGc4+ppu2mT9NucgM3StXWNDn8HxMN92ks7lJUQ+BKZVLKcsxf/4Xf879R4/Y2T+g5SwjbxHFgla6i1UaoQV53mc4mnJ6fMzW5jbzuaGuJUiFkiHB29rapKwrziZndDsJpsyQuWRrY5PRaU1tLbP5jCzvYmqJtJYkESzmC3Y3txFK0+31EaqiHk0wRcVcGmS3h081u1f2een1l3n44RBr65i4soyxq9gVz0kPrU6XbrcbFCtS4iMijNJkrV4YESHWen9EjKcxUW3A+vVXtONaxlJbW+5/eIe/+t5fMrVzXnrxFf7+73yTy1eukOcqnpk6xImobfM+DOC1cc9LAdJ6vDH48ZDJkyNOn1wwOxowOTxnuJgwzeboZMbWFiS6TdpWqLYnmy/wi5JEGlAFw+kD6uocaUv2+m2+9uWv81vfeIPf/MLLvPTKy8iNDWZSIekhFhZR19QOzlyNIqFbWtx0xJOLUz568oT7jw85eXrC6NEDxhfPOJqfcmbnOFOSz0raCwNO410474IRQ5xN2yg7nF/m40sJWzz/QwuyCOQR0UhqTdXjvUUlKVJJnAlFlXn8OMSFCIrJmB81Y27Wn9k6++r9qihpztmGvvKsZHHNeV0WBYhQADWzvdrt9motuGCDXlXV8yqnJpGJfgLOirUEPvQ6Szy7B5fo9TfZv3TAnY8+5NHD+4zHA6yrkYnCeUFVG5QKMTNRikSAFwopw5mi04xOHvru1z+5kEGeJ+JAaGQASJukugGMG9JASb0kAdZ72hpwN3hYJyx7zJ3EYEO/Wix0guOGDkYga7loOCObzbSWbHgR2ar1vUvIK2Pvf5hT27BsodB28cxmuQ9ZqpfW32KdoVsvSm0j74s/2zz7UHgFRlJF0qC5o55g+OVcGJCtlSZNU1KdUCwWTCczqiIU3Q3D6r3HsVLENZ4l3lpQKpAXXqJjDiy8QHtP4gn9jonCaEmFC2ZmtYcs9O3reG3Shz0kTDAvlC6M8nCmpioL5qKmFAYtEnTMxTwh/jipgmmcaDI3ixAm+O3GmsX6BOdTHCnOy/j5PDiHcSZ8TuvRMuyFuq6RaY2sa5QOzJoSQZnmTDAH+lWvX4+iLVIFYdGsLeamOzFKsZrEWqrAkDkpIjIVCzIX+owsDQoglhIr4BPUdniF2QiEJFFAUwx5HyncZUHXFH1hVa3T5c//WVHYgkb21/TjuWhcYSjrBaXNWdQlF09GwdrUVNiyDEyKCjIusZasO5rNJVEyJOU+Fm4h/gVmIhQrFiUciQwOSs4nwTFKgKfCIahcwdOTEz6495jZtKQ3tcwuJlSzMDeiFpCIsIh1qwOtFDC0cWztX+Hqa5/jWjXl3Vzw9Ed3KM4nWOlJCNS3iPeCX0LpL5874bkqpdFJgo9Of877pSdNYJsaqcNqgzdNEz4yow1D67ylmTztm4jD6r2W6JALPxHitcC7teAV/wTg6ROZiQfhJYlMSKRGywQV6DGmkwn3FwsyndLb2GBeW7QOfYyCwJ6FSxex8BRoqfEyWsiGqmT52WD13su0K16TdZbSWnIbgrc1ntm0QCmHEBoV57QVRblkQXw8fPAhoQ4zXmJPpNZBc64VOk3QZbTXjRJGaw1S5DGDUjhTB4248iwWM4aDC/YuXUHrnESl8e43PRd+KfsDQnOwdSSxF9M5h/DB3rexkg4Es0QridZ6BTISbOe1Emz0+2xvbnK4WDAZjZiMRnQ7XXb3drl69TJFVTE1gsW8IFWKrc02wsyZz4c4qxFJi3a/D1rinaA2gSW4d+8hRblAKR0apeMfY0Mvq1QJt155naPzKXcfHrKzvYu6gE43pd3bRiUtKhMa51udLiJVJK2M1mafw2eHjMoFxxdDTN1iMbqgmFU4F2fSXT3AeUdZFksm2Plgy97ptFH6IDD8KGREC1WUPznvUFJS1wWmDkxqolOG4ymj2Zjb9z6k3e2SjGdM5gu8D8Nyw9xJi7cCmeUIWSOFpa4s3V6Pfm+T6WxBbWYxYRAYE62xVehNNbVhbuckSUaaZDg8ZRnmx21u9el1ahbjU06PBuz299jqHHH7vTs8+vghL7x+A5VB6hyfevEFbl65Sf0P/zEbOiPrdfEFXL9xk7PiiJ2dHD89xaQ5byYbtPWYjz74EX/1/feZT87Y2YZ/8t/9M05GR/zVn/yv7J895dtf/Qo3fv/bvH/0A54+Ouejnx3Sa2/S6W/y6N4AZ9oYrSgpOLiyARs9LgY108mM2sLG7j7y6GN0Oafb0xjdwZqC/uY1rr94i3/6T/+Q//l/+h8ZzyZUZQ0kIe7Wwe4ZZ3H1jPlsyMXZGYt5D1t7trpbzMYp1hpGwxFZO6EsjhFGoHVG3soYDYacHV9gami1NqhKQyYFWZ5S1TXj8zlnb/2MJx89YUv3uXppk8u7KVq0kF5QlwXC13il0WkLISRBCSbI210cKji+4WMMlDg0Sd4hydsx8qxAy+f//5czbU0h0IzoyXTCwf4N/rO/9w0uHWQBVTYpHo2THkQVZqLJMIIDG0YVKOFDn7RzXJxf8P5bb1NeXHCQpzwt4Pi8ZiO7RXenR3ezHeYzaYnPFNiCcXqE1WOqbMy4njHvSVxnm7Jc0EsM/8N//0/I25rN7TaKYNYyd4qp8CTeI4oKM5pzWEz5+fCIpw8eM79/xMWDJzw8ecbpbMi0qPALD9MJ2AVVUlCrkmI+w1YeX2ukT6gJPe5LrLLJE5Yh3i/dD4X3y1iHbwBoEQsKolNgc7dl6DlGL3MNY+rQJxxfzxVivwTID+d0lBn6AAQ1/eJhFlo4k1bKkHgGxd+v63r59SRJaLVaJEmYD9b0NJVlGfKA2oRcJfamyVBRRAVOAOZDvzfU9QwtFDdffondywdcvneFZ08f8/jxA6qyoNfvkaYBJExUitYCIRxSNfN2g5lOp7dBohKw4KXAy6hMgCXoKxqLey/CWSUIQCYrIH4doH9+HwQWU0QV0fIlmlEB4e9S6kg0PL+flkqzT+4hIqngWfZsy9ie0ziCSiF+sectAiurwsIvz2PnXJi/6D0mFt+rM95G8mBpV4JpCs74zH+Z2czy+2JlDNfK87AGRFizodi18fpiYQgI4ZdLsmHmG/WSlDL0UvvAUvqiCC7J2uCcQMaRG87XeBmZa6eXZjumLPHWYFOJyALInspY2HsbJMjSYoVHxTxQNGtAKpzSWKFpxsn7ZS0QjJtC3r3ao1I2JjEaZx3SB0VT6J8N+6Q2NdoYpLJI75BRgip9yF29+/+Je2TTBwF+zZY4VLtE1G25AWIQb1gCLWVgKuKCsr5h0VYaX63DR12XMDYIiRSsbcL4Xs11faKTM5I3y3+7YQgb2eISifKxcTbkqsGMRIhQtGkoXEFPW3SaMBoOmA+LgCBIhfQuFKK+YZtCN7kTHu/CmG4h4kwL0TgohqlcYXcHp7tcSVrakyBQLglz3XxFIiyVN4xGZzy8/5jjpyewGNEXXS53cjaTDNUUzM/BMmHqBGiCo5NCpR1ckuOkjtatYfG7Fcy7fDWozi8+/CDfcNaCioW5EEtZwIqNEc993qYR20c2QsaN2jzjWN2wjjKvehPX2dcQRK1dIYiNiQZKPvfeDbAgfGim1UKF9Seh1+1x7dpVup0udVkxm82QiyIcSHhSvWpUDex47J3zAYlRUj3f0N1IWoRcumI1eyWwa46irGhbh1AaqYIML+CyoaC3xlFUZegbExIhogTRuZUzqggHgtJhpptUCp0kKK0wdb3cN1VV0yGuOx1MSRAepQXOVgwvBpiqRqskzodpkoUoiWn61EQoLPI0Y6O/gSlqytmCsrQ07l0QJVbehyIxPspw6AUjIC0lqdS085z9vd3glBav9ezshCzPyfIWKu3irGNzo4/qSWajAa0k4+T4Au8NVaXxBKRayYRWmlK7EqkU3lr6/T6LwQUQR3tYcMaRJC1eunWN05NzRqMJxXxKa6zJc4kzIL0CG4abGyyqlbB3+RIni4LZfIpTitF0xtMHj5ltbbOztc9wOCJJoKpKsixjc3MzJFs4alMxL+ZUVRX66rI2SZ7itab2Ie75WIChFVmakuc5vW6PqzdfpLKGDx98jHWevYMDCnPKeBJYMhX7Ceu6xhUlKknIVYtqPqcoaiQaaxwCRZqlmJhcKZ0gcThbLdFdKRM2N3rs5H1mi5qdvX2UK+kmkouTgrt3j7m612evu8v9xw/43/+3/4Xf+f3f4cvf+jpZlpOKBJ059G4fUaUhDLUtWcdzwD6J95D2QGb0UUCbrY3X2Np+gV7nCZPZET9660Ou3voav/ud/4aOvUAPn1C3dvjml77O99WPuZjPsHOBsgY/mdBJu6hum1tv3KAcnzEZzBkvNH/x53/Fb3zzmzw9PeN8fEE7maJTQ1F7WolkMhmyqGouhkNGsxGVLel1O9RFcPTzzgaZmzWcHc+pWdBptYAOMik52H+BzXyHD97/68A2GIvwBVvtTTY3erR6OU8On+HRtPIexaJkPlvQSlMQjnpeoNotWr2c6fER7U7N9v4NXn3xBRJ0OFFVKBSUUngkzoVktjaWo8NTLl8+WAJVQmo8wcQmSXOETJ6Lm/EUhJjERH7gF0o30XzPhX7Vay++yP6lq2TdNlZVFN4gtCD1oD3h3xNx9qp3eGVDIldbzHjGe2+/z3f/6j/y09vvc/PmFX73732V9gvXuNqWbLS3aPV2EInCKsF4MccKyKuSBT2m7TH6luRT+5fZvHGLnZdvsXflMtsbHWSvhRM1RhqchbqqeToecDgf8fTRR9gHE84enXF3cMjtwV0WgwF6ZDHDYFZixByvDLWVJAuPtg5de9rSwcwjrGThNFZLqtQH9c0yJ4ugY6OeiModvF/e22XhRsg1PI2JRvx6zG+ka2T8Id74KgzCVkphpcPSWMSv604+cQyzApwhJJEi9vmI2LrgV7RPzHH8GiMTfrcoCsqyREQGLonKgSRJQpIr5FIRZHzIKYQPDFZZlpiqRiZpKEa0ojaWqqgRSnH9hRfYO9jn4PIBF+fnYXwJHo2M6YbDuwrnK0TUOEX6MM7/apihdabSrxoQhFgrGsIIAEckAdbkkOFcW1/vAhFHL4R5OGHETHNjvffhcxNbgOIvNz1sv4xQWGew4hUvSfJm54klmRbcohvgOhSYhqoOzFhRVdFdNPRKCuvCoO+Ys64buDWAS0MW+DVviV92nc3X12DyCIQHCa1WSRiNJKO6J95TvwZIw1q62NTOjdFOZMpcXeMXAms9XmqMdQhjcHWFwyBTSeISfKKQOvSwJTYUtVprSDNypUhLj/ThOQkJQofZyzaqkcLaCOvUikB4CB+Kd9mo5poN40BYi/QGhYmMmcTaBvePhXNUHi1rhHj/pXMRoIk5rTPLZ/h3vX49ira4UYL2OXxJRq32qmiToX9DrpyIGmpYRMcdHwNgo7QM6P1qA64bk6wGCcZ0uCnyaIqFRtYl14JcQKME0PxHKYlzK9p9HYWxPtrAe4PBo6WgFi441WhBKVxAqvKUel5Q1zVWGLQLFbj6BLgZVBWhsT92SeK9xLlQ9CiaPr3AMmprSYVHOYlwUUroHd5XCF9zcXHKg3uPGJ2OSWcl+/0tXj3Y5aCfk0qH9TKaO4b760S6HBCokGgZhvmWMSDZKHUQcWELokTRxUPeNwFoSZnQzGlzPrg9CiGXKE9T5PmlOYgIQyIbzbSP97s53Hx4dksJQNSFe9+gWTEW0ayxdRZ01QO5lKc2T/W5INUgcStGViBIleLlW7d48cZ1ZvM5b/3sZ0ynU9JUU1sbmVGHqe3SjSmAnoHyd9YvP2fDOjfIWpzOF6XBwYxHqzT2U64OTWMstQ3IvtQarVMsJUVRhYN8BTfEBO15NyylghSxkSNqnVCJ0M+FtZiqCsWelGHOjpZ4W6N0inOOs9NTZtMZeTcnzTK0TqJJhQvzopbXGp6pUpLtrS367S7lbMF0smBSVBTlPOjP48DVpq8OEfrDhFBgaqSHPNVhZpmpaWVZTIDCe9Wm5vDZU/4/6t7k2bLrOvP77eY0t31dvmyRSIAASVAqkpIYkiipOslyNa6BosKuie0aeey/xREeOMIDDzyoqAgPqkJ2yWpsqdRQIkUJACl26BJIINuXr3+3Od1uPFj7nHsTgEv2jLoZmS9fd++5e++z91rf+tb3eTPl8PAmViEI+6xklBdYo/EUtMpwfHHBumqYZTP2d3eZzEdMs5Kmrrn3yquMtKYsRvigUq+NxShDno+pl0fkasz+zTmPHz/g5OgK3zhMhMKKeacHVGbZvX7I7PyCLjgyWzIf7fAg3ufxkyNeunHK+9qzrg9ZLhd473n1lVe4ceM6o/EE7x2r9ZrziwtQBp+XFPMdRkUOQdJdMVeX3gBjNFHDRM2lD8BorpYrVquKG3fvslh1rNcnooKVZ3gvnpG1C4zLsdCGUDRNi2tFBKAoShwq9eSkQz9KYG6MSWCLwjtHURbk4zk7O3OMv2SceXL7Ep/8ZA2VpxgX3KopFt4AACAASURBVN7f5fHz9/m9Pzjj6fkjfu0f/lNu33oVizAgYgnR10QTaGKkVBOcr7A2g2BodMSqMbde/jpvfO0f8eT5Q4zz/Ivf+Id87eu/yg/f0nz7B7/HwY05H797xK2nDa+9/gWe7jzmz3/wE9T5PuP8gH/53/xrnl+uiG5FcX3O137zNZ5eaP7j7/wB//bf/i/Y+ZzgReTFljOcs0x1wXmleOfdd/gf/scfcXF6RG4jNhdBqOmsxDsHdLjOEfwIq6ZkI0U0geUCzs4fseIZq5WjLKfkRY7Ru9w93GFVXXF1fs6N69cZT/dYLmouzpaMi5L5dIxBzorWNTQXgZHaYb+4TuEL7t69TjZxRNWhM41iTIhaqgwIS8ujuFiuuRagF1vutzpRPVY0naPtvCg+6i0xg7STRD4VbKWHj9I3o3ohDAX5TOONh5gzIgcdCLR4MhQZYl8iPpPOZywXFe+9+x7/8f/+ff7qu9+iqZYQHQ/bY/64esrERoyv6GJEY2gXFa2CVisODq4zzscc3rvH137tG9z9whe4sX+DXE0JmaWJHct6yWq14pPzUz45PuX5w+dcPnnO8fNjHj7/mJOLh7jn4NcZ5/6KtX1O1laopSV0E8gVZVZhqMijwZMTvAGTkSmLMUrOzGhpY4fza3wUifP+fNx4marhfOzbLHrZsT64jX1Fagu8lFhoMwFG68153SsbWotOLQreO/Hq+tSjP0e3A/IYBTTrq359QB56i6QEaccUVKutfvv+99u2JXhPXhTkeZ6UCVMPOUHYgkqBtRTTCTb106kQ8CpQtx25zQguJvPhQFaW3Hn5HnsH16jXKxaLK67OzvFtR/AdXVfhfA14TGZFl1xl5OUUnalB4Mv7MFSWtjMw1ccGfRwRSbFoL4AHyqnhPfbURpO62nrBvL5uFlICEFP/Wp+Ub8cfva3KZypXfJbN1b/upz/XWyVU2Ys1VSsqn6uqEqqslfMxTwyq4bZVQ8mCPvpKYt/pGrbWyafWyHA9aZ32Kpt93N2fCyTKfUQNMapKAL3ZCs5MAntJathGS7MSCVjXzotHburND5klJqZQHjVBaZwRsagixXvBiom3dFunQo/VaCttWDrFkNEj9Ma+KokihAwdDTol/b0CqcTSGhUdOrQYWrHB2bqnpcdbqMNRx2GuQ4qndEgFigQsCPPv/w1WkcdPRdImCz5JY0aGcrUoQ4akDimIftzqkelbckKSvR9Qkp6OxebG6qsv1tpPUfXS5hf6kfZb1Y6e07wZRGPS5JIES6JCI4bWYpa75RCf7nij5BCMCrzWrJXm3EeWlxUxs+SzGfrGBN02+LrGr9dYJw2RVoXNoahTxY4AWNA5UeVgcjwiqSolcoNSOVZ7TLR4JwimTnxekITxvGu5WrfYtWLSWYrRmPndm+TjEjWoUvUJm9zSOkKBeLAZEHNfG/B0ItagUtU0VUfF0FNLkpOOIFmpnqhDqjCnErjykhSr1BxNkARQ9QsbCeK1qAchQmKoAFkwxGhotYEsw4RI1rlEzdtKpL0b5PB7HzNpOFaJZrDpF0AzrIX+UNNaY9FScldRpF+1YNrT8YzR3oT3HtynNpHK1XROKCck82MffOKBRzm8lEr5d2pxjduoq5exSL2Muj8IosEHhVKGIsuxGogdRnusloqrySw+QhMjy7aWpCEN/ZA2qyiG0MPG2tNJLJ11KNsrQlkIka6WakpUEhBopUTII7MYXXB+dUndrJnsjIUiYHK0LfGBRMcQoMFaC400th+fHKGiYlKMObx5jWvG0FRrquWS9dWCql4no1Shu4zGFt86EbsJETpP3XWsm5ZMaToVyYKsS6Vljcb1glHXECrFuvY0naIYT5gdZCyuVqi6IgsVOyNNcEvOV546FuyVinKyT56NKe5qRnnkvFaMdueUpuXs5EMe/PU7TMrXyUeWZvKEo5NP2J3tUZSJheQrVCgInewLoSgJdoJtLxl3Y67dewVz8IirJ/fJo+Fw54C9g0OenV3w/NkzaBtoasbX77I/LXh4/z5v/+B9VDbH64AtDLPJiMyK0fa4LOQQTEHH/u4+N2/e5ubdlymmExrneH56ytPTC6qqY1xMcK2na2pa10HUWCuV2OVqQVdXlHmOyTJ0Zglai6eeUoS6oalrZtOxoMqJao6K+NDigqD9zjdCdTOW8c4BrTnhYr3m5tig2nO+dOcW2e6IdbXk3fsPmJfXmU+nYMEr6ACzdVQZU8peoiFTAYVHhY5v/MJdbhz+C9ZXK165N8eq5xSF4+jxU5brlr3yOsEteftbb3Fy9BS9tJS2ZDIpyI3jn/+Tb5Ltet56+/d4cHYfw3XQV+SqoTk7R1nP1XqNMQ7vJ8SxGMDWq5pYR7SXynRbdbSNo/VtEpHSOC+JbAyaWBkwhtA4nj58QmhX5GbMZLrLZHZI1zrqCG3QrCvHfC+nsIZFrMjyTuwljKUY79HpitOTJ2RZy3QClxfn/GT5nD/aa3npa/+I6TgQQiZ7rQLoZddBGUUxGg+UPK0QsQsVOTs944OPHvD6V94gGNlbrVKpSX9zuvZA/3BCDhWEZJcSAyb1z4nMNwl4EgsCAautKMymvtLo4NGHT/jOn/4pv/+7/4HT40dEV5EbaNZLLpeat599QogNIXo6VeC8xURFMZ1w/fYtbl+7y5ff+Bm+/ku/yO71G7QELhdrzo4fcbmuODo75slHD3n87ClPjo44OTnj/PKCs8UZarVGr1asafC1QsWMTrUEsyYmw2Zl2+SNmCXUXWGKHKd7yw3pJVfKo7UiV2BVBjHgnR+sXgSslr0+xr5ylgBP1Uvyy/P7KIGr0MlTgDGAiwLoBS/nSogdpPNKR4mERZzNEL0EpD0QSj8niG9kJDGStEIZWSdKx8TOUMKIQYJd02fqKdZx3gEKm2fyJa2wxmA1GBWxWoSSIFXylCJakVA3kwm2zIgWPCK8kuea6Dq885ggYxGIIs9eFMxyoWLqEDl/fsK6rvG+IxLQRtO2HREtnqiKIRP2PctKS9XLoBOFcaNkEAEVDEYbscbRhoBUB0Oi90WJXtBakVmFKmxqz9CEaIYKFCrxoGJPg9SbpE2Rety3qlVh027TX0xfiZWiZx+vKvqE34cN0wvDAHYq+kRVPhojFE7v45Cs9c8/PKcg5sMY9eD35wmy9Pe7FqW49H6RHkElZQ/lRenUZApSS4RRSYNAibev5G298FwAHSkyjTb980vSp53AplolVl4CAXQErwRExEks5RD0I/gALWSpDy/kGmeELZW1nkIZnAp4A44IqiHS4tyEtiuIUUM6a4hGZsp4sDltgNYLIAJxoEgGPDZmRJV0JhBxQgFCRLhP9CfSRhoiuIhv/y7QI6OkuD3VO30xIU/pWAjJPyR9RZN6nlQQTwYtdL2hgJka+z77UlsUxiT6YbRIZm/9FLDxoRjQxxhTdi0Vg+i99JEk+dJUw0glZbkhtIqiiCPvgtp1VCHwfLHGN458OmOvnHFweMDYGrqLC9b+CBXWiNIAMDDHVQrgHREJylXURGXJigJjDW0X8D7iXcRHobtobdIB2rMPpeLX5QadZxgXmWYlxd4u+toelDkBcFHoUNKxEjFANqA5KlUdOxQdMXZoHQlKpyB7MxZgGBxAkil13PJt67nBKkpNWdMHGL0KWYBkxqgAqy1aZ6mx2qPaAA6cg7UPNCGivKeIIjHbz4tsPClrUSkxj70qZNr8tpAt1SNZfQNzn4Snnw5agkqUIbM5o3JMPplgJmPy2ZhsXEDr0SaDpkFboR6GnrcN8hr9cMaUJCs9bNDDjq2QqjGpoVgbob/Y5MniW7quAhwg3PS2a2mVo+paXKKumZia3unfclqnRoL1LLM0rRFKlTWiFtkEWQNJYUtpLSh62jB9lHG7Wl6xWFyxd7gr1TBlBAEzFqLCBU/nHRabhIMi66airTsu/SUoGE/H7EymXN/dxezsEghcrBbo3NC0DUWmUypv8I0X0ZTxCL9coH2gVVEOd+dxmRhNj3RHc3FCyC1qVNDGSB0N0/k+wSt823AwK7j10h1CNDw4vuT05Bh3sWDnGmTFAePxjKY+53vvPeBkeY2ffX2XsDrmW3/2Nj/39w75xje/zrPlW5ycHzEZz/GxQStNZoyMVQzkFnRp0UVBoTRzU3Dj5svMX7rD6uoTdqYTvvrGz9DONB/+4Z/y+JNHjLuGPHievX/Ea7d3OT0+4fJsSaM01kSM7jj1olRldJTgykugqJVhNt/B2pzpwT7FzoyTi0uyckK1XBNDTIdYoOl8os2J2XvnGoosJysLnHPo6Knrji4GTJaLalf0FLlhXOaU5UR8Db2n6TzGZMQYaNqaqq7IbCAzBd4UFLsjmtWC1rVk+YixKcjzEWU5pQ0GJVKmw2EQldD6Sm2GAFnuk4gOQvnNDXzl1Zd56dqOqOetVqyu3uX/+O1/w9XTh3zy3odcv/VlHsVnVCdPmY1v8Zvf/A3uf/whR48fMMuWqHjKn/3ldzlbH/H4wce89Sf/jqzzZC4nCyW+CzTJLHZSTlhWF1R1ZHG2oMk0s+kIbSzLqzVd1+KJcv8ohVMKT5s8knK6RuGbgG8aMuXZ3dGMJmK70QbP0cUVmY500XB+viR6z3Sck2eK49MrytmYg+t3KdrIUeuBU+puSbdqUMFx/9GEunPMkP3N43Fdg1YWbcvhnF3XFS4ECkwCJ0US7Uc//h5feO0Vbt+5Awn8kHNXDftwH092Ss5jk85yTb/Hp2QhIgGgkz1NGdmjhKppaDpH0zacXVzxznsf8d47D/net/+IuHxEu7hkHGq6phKFRZcsFMoJrZ4T8pzReMbt6ze4desWr7x6j9dee407d24zn884r1p+9J03+eThxzx+9ITHj094+PQZi8UKt6xYVWtcVdEuV8SR4UTX7KMZL+rEkgnEuExVwECrAFr522lqLVUETUSpdgiYBwp/qNlOa30IkpBsgYMxCUr050Kkp0ImvD9NVq9q7dmIlW3o/v1LpNgnSoJGTBXxIP1Evcx6f6ZIMK+TaXXPaElJfOoJk6/3Z5FURXpJ+v71c5slPzlRdy7KUoLvF5gc0s7hg5PYJlVefBBGUuhaqq7BEQSIDAHnRXkbF4QCirA8Wh+kwu8dmkCWWWyeUYxGNC2sqxZCkKqilj4rZfsEAFTQQ/8U6RwmBIlLElgv8YImeKHJEb1oJnifkj4G0TNSnKiNVGui2o454lBF6hUegxel7aA3IG0aqC2qJhBElGcDj5Cub0tlfWjpeZEtNLCIwqbvrS8AxBRbETcJ4AsAdUrY+p/t5xA2Vb7P9LRFUYz1MSW+6Vtapcg8rYG+XcT2FVpAK2kBkuGIeCXxl44SZwUr51kMokjr25agDUolfzktoJBLMaVNRQxtDCoTMSiCS607Dq8UbQxiF9BKjN4YJ3FP9Bgl+UbXaZq6Z/oI0OSDqPlG3YGGJkAbg/QHRr/pLYyw7UWspKKR8lk1TOemMpkq2e7vQKWtRyI0cmT0bxg2ZdeN2bBQAnt4cJtjq1JQ/akK8/A8w/PFjYdb6Bf0pzK8Tel5w13uFWG2n8OH8ALiMCASWhHd5g7pN0ObuL3Be4wWydlHHz5g+TTj1v4+Iw1Z5/Bti0283KgUw1uWmD5dUxySU+nBkqAtFZXoguPk8pTjkyNGN3bICysNOcqjcUzHGQfXZoymGcpritkEOyoIiFx43XkyWzLKJFXrOePDCaH6JKrvbIjDxxCd9NZ8zlykAUEwpc1GJJOYJrKnrcqAyusGQQu9k0RZx5QMayWVqtTf5rz426jPPvtn6Af/fx5Dsq4ETbFW1KQiIn9vjRibe+9ZVxWr5RKjMmxWEFLfnbEG3/nPH5ZhAPqHHj7vk8bNprqhVxhrsDZjPB5z7do1tBlR5pnQCtJiCV48YUxaM33lsU+bFQg90mQY26KSEpjWRrjw/XpPCmC95PL2/dZ2HYvlEiLJuNvSp8wKGExgtSYvSorRBLShcw0qyOHiFwtcVbFSllFeMBqN2NmZYYucqBVdXbG4uKRd1TjrBaVVQr/0TmrQQQk6HDQ4AnmuKQqLLzI6KxTbtm0ZH1xjbHIuT8/Yme+T25LGB64dXCPPLHZdsKpqro4fMjaae9dmgOKTp09RnPGlL99jd3/OZCfj2o0pZ92EplFARsSgdIExCI1UeYz15FFhTEekJoQ1RREYlRBjjdIdxaQQFDAonh6d8MreiMP9PR4+e4JbnRDqimgMRVGmA8BidI7RUXqnkKZyFcWjbTTapekcp+dXPLv/ERf1mo5ehRVW1ZroIkEpdGZkjWnp5YtA5wQJjG26n9N95p2TINLDYlFhtCWzCuegLMconaOUwXeRet1QWgcTg1Kend05F7HDm4YGxUXdMloXFF7TtZF1LX1hICpouTGybmN/3yQxKFSiKecolZOrCfuTQ/TdQ5T2ENZ84xe+wdOnY77/4/d4evKAq+PnvDy5zn/3r/97ysMb/Pbv/huePPkev/2//6/oP79NefAKnap49viKl+68QXN5glt31KtI3TUEq/E+YqPQhC9Xl7jgKExJ07aUhZW9SSezXm2p20Z6fBLl0zno2ohSDbs7Jffu3KbMMzBjzhYtRjdAYL6zQ9c6qtWKk9NLprOS8WTEwf4+P//zX+WVV7/Cw2cXvP/h+7hOjGAzIyI0y9UisUs2wYNRht5uxWgRR7BagDTF5q9zLV9+/VV29m8wms6G/WHA2IZdAxGOIomG9MFpTIExwtDwSn5OWY/WHSFaQixZrRpOz4/5wY9+zE9+8j4fPXjIydkJo5ElcorN1hQjh9eR3Ja4VoHPqCko9m9x/dY9Xv3yz3L31owv3rvG/v4BEDk+PubP/+xPOT4+5uj5M46OnnF1dclquUJhaJxjNB5jvCejYTq2PD1bEmxBR4MZ7ZBbBe2aDagoqtZCLeyD1f7MY1OBjJEsS7TwLYAYGPbtzEqlwTlH6HuNEwhKGmc9JDwvUiG3e/K3PxozZG1SBQ8GrUOKa3RSoU1S+2yCyr7W2bemREI6fyPRB5zzg9daWZZD4tUrDA+qkYmJMgiYKKG2KWM2AiTp/PDBo1JlROKbRNftPF3rRCFQ2y1tgDR2um9XiPTlML/Vj9WrVBZFgc00dVPRhU3FsR+/qKS1Y3Peft5pvJW8RKmiiEx7qpj0UvgJe+7nNk3AMBfbFMZN/1gcPPOU3vSL9d7DiUGZ1tbGUmD70bcH9SrUij4h+/zIQhIjlRLHjcjbCz/zKSDg8763nbAN18wmvlbxU8D39rhsgjBAicBXig1CVGkfEljIJ29eg8IrhdcGbTMB2BMQr1NiaVREGbWhYaLQHowSdVDRlekJj308rQaxwg340OcdZtjDgoO2alP7Qa910QOKacyjI0ZHUF4ov0FE2wjCkhoCci2xR1CA0cOOy5a4jAi3fe4UDo+fiqStX1A9OkE/0WETtsrgKHpuMUMQyzDgITiZlPAiX1iecoMS9JvOkHzFCN5v/YygIds3yzZX+4Vr30ZJ2EIhhqRKDUhMXy5WIMaaq4oOxeLslGfVipPZlLs3Drm5P2diM5RyOJWaNpUWOl5fdUnBskq7RoxOqHV4tIkoFah8w/1P7nO6XPJ0ccIXX3+ZO9dmZNaTWcXIaiaTjJgFQqahMJwuLukefMT58pKLyyWlGfHSzdvcu3ObosxF2Ub3yZBcg0n0DgiC16qeQhPYppZuxqhPbuWfzcYmAhpSgt6sjSHJi3GouG76zvr1osQLyVqUBhecUDVU/xrwOZfytz42qNP2ZcgbkETN0EESDZEf8M5R1zXVuqLIwTpH6BzB+WGDH0CJrcsannv7dT/nWnpz0uH3o9ApFKIuqM0o8cVTnyMRksqU6YVzIFUq+8RNJ90bqU5ZawcaaW92H33EdaKw1q/rAdFTUkFcLVeIKMwWkhwc0pcq1y2sxkDUBrSlI/G+dcDojma9omkdlTbkZUG+LMkmI6JSjIsxN67fhBBpGpEPXtcrbG7wbUsMou5oTEJHrSLXGTs7M9RkRK2hcp5nT5+xOLtiVpa0rWN3Z4+yGKdkp8QaTcigOltj80jbdjw+PaM6OaMcK9YVPL84x7Ujfvzemzi9ptipcN7SuYgPkc5FutbQaYOxuahWkcRVEvVVh0CGStYgaVPXmt2DG5TjXbQtWdQ1Hzx4gLu+w43dHRywuDonsyXBuWTiCjE4jJE9czIeY4sCY0fkKqJtxIVzLq5W2HGJtjlKBVwjlaOsyMlHBY0LLNe1iMZERVaO8J3cc5PJlCzPWNcV3bpCJVp413rW6zopmWlmtkQrjXeRq6slF6cX+AIOikOKvYwiz8hHBTGLnFYNK7dgx1oOdcFPfvQBb33rTf7Vf/lbYtaeG8kuVAIukMBJNlWzibmUBAzKKPYO9kAHYiz55W/+Im+/teJHjz9mNJvx5PEF71c1/9O/+/c0bc3Hj36AyluOn77H6HyBvZxydn7Obmb5L379N1iefsJffvvP0Zkl4mlDS1bOqF2Ja1Z4DHW7kOR8VDCbzQjB0XUtddOAChg0eZbj2jXONbSdA2WYji13b+7z2qs3KDKLi5bmwRGdC4RoqaoaFQ3WjonRQbRYUzIa5xxc2+XmzV0urtbMxmMuLySxsIUkYgGHUr2gFWgs2iJgWHQYlRGBu3euM8pN8sySSLhaLFicnnDn9h2MFsZGCCHRyV/cRDUwxg8JBzriJD3FEdEqkkeFDdC6luerZ3z00RkffXDM++/d5+Gj+6zXK+oqUo6m3Dq8SeuPuLxa41tLXhxixwWj0Q7Xb91lsrPLq699kZt374hh8cUZZ8fP+N733uTJk6ccHx1xcXHGOtmA9D5leSZU8vmoYDTbZ74zZ3V2xifPrsh0Rgw1TR3YvXnIb/2zf8nDb/8NZz95G4VL+7+W89j3570gqLJni+jMsCdvBbX9//t4ROtIOcopixFd11FVldixuIAXbX9hloQo63ur73j7HHjxtfrq2SYe2QhtqQGk6+lxejjLUveSElVh0QZIiWOIWGsS+yJjMpmwt7e38TvbeoQt5cH+LNdaU5gMqzRtiLikLulDwPVnUaqsxAheR6KTaqDY4IrgRDKvQpolgqgQEpJjUEpC0vy0bcd6vSLEgqLMKMuS0EhVU2s9yPRnJkPZTCo5Sio4RmdYm7zRtLTcRCXxJl6qXz1lr098vBNpe4MhBIYEVxJDhvc7nO9sn+9JRToFsUoJaBrpE4wwxBv9vifTvkmeZP7lmfsQszd27iu99LGSFjsDEsAfw+dnBp/XrzbEV+rF197+GZXGSgys5fbQeiM+0idIyqfkFSXgYOwBf5XmQ2KOkBCigMYrQ1Carm976OczBEAYeiaJ2TmtRCMgaTGoBFOEvuVGqU2rjzEDy06sNCQOMipilSGgCZ2lWjWo4NFREq2EbqAQyxux1loRaEVgx7skjicU44jE7cpootFErYlGibG2EnZgjGoj1Pa3PH4qkrYU/dHLWMjm0S/U1Kemki8EBqUMPcc1qp5esK2vt11jkcd2pa3fRAWR2iBhn3dZ8OIi/dseQ7/cgDiqIYFXQHCepqpZuSVHlwuitUysYWQtXb1mvbjEjTNCkZFlUn0Uf9M+4BXETypqMQl4dCkxTOMQPS44YnBU6zXPjn7Cjz/8kNe++DK/8rUv86Uv3CEclOQYxkVOUVqay45n58d8+6++g6Pl5OKCq/Mlucr54r0v8I9/9df48huvYyd56iEEYsSFFrde4123SXyTKejnJWxplPqM7VPz1Gfhus/S6UVNhuSdOCTuMUacC0TnxWg6oYCCaKWKQ0od6Q/azcT+f3qoreRfJ9EbnTaY4DwxWkF30ibuWxH9IETKoqAsRpi+VI9QBEWefdMbstnUYxoaWf8y7YrNZq03G3AvKJAoJM55ORiFwwTERDVAeODIAWiVxvW9R8N0fB4YkgINazb9mCHQObdJuIn0Pmw9pbOuqwHV7cdPGmwjzokgQ+ccSmnK0Zjd/WsUox2ii6wvz8iITHZLQt1Rr5Y0dUXrO3ZHBadn53x8tWRnOmcyHrG7s0tR5pRlwcHOHLsXeH51QVhWSfhBJPC11Qk8C4zHE0Lb8e6773J1csHBbIfrBwfszne59+oe+ajkqmqkvy8vgTXTyYjJtUNyE3h+eUY5y7hYn3H87hMUlrKoeff+O9x62dJVGbzqJZGy0CrwQeODJbQKVYNxCotYRSivsN5iKQBRZdTWcv36HaIqWNeCDK/qhmXdsRcU2WjEznwMQTj5bdsSY0dwcH51SdfW3Dg0WFtwcnZBVdfosgQysmyMsRnrak1TNagQGZUFVVOJWEKQudQmYzyZcOfWbVbLFWcnp0KdjNA0LV3XkYlGMlJ5tVR1RTQRm1lBOPE09Yrl1ZIsg6uZ5fp8DlhMkVH5irYLPDk9Yadpmd0+5vs/fMSTJ0+5+8XXWYTAl7/4CpM8w5oe3YxDD1aAQapeOBoSuIaQVONC4PGTC0x2yPWDLxGXgdfuaR7f/4AHH7xJVhaM7YRMKYK1xBA5f/yE6XTOf/uv/iv2RlOe6XNevrvP0fEVmZny6PmZ9PkZEfboXMBaQ9u1jEc5N25c5+TkmMVCDFad8/gQKUYlvmtwrsX7QJYprIlE33HzxnVOjp5x995LBAzv3P+YdSV2C/PpLioaTk6OOTjYwWYG5zqOjh5x/fCAxeUxO5MJ66sM79Y0zYrcRhH+gQQOROIA3/e0b9kmdqal7AGJekcMuKZGuQarA9HVWFMKlSv4XkyXhKEO87CBNzW9z5FBAu+np2c8uv8JH3z4Dj+8/zc8ebRkce5RUTEaaazJuHG4myq0NaenlzT2GvuvvM5XXn+dL79ym1mhUa7m9OQJ58/f5W++94ccn59zfH7GxcWSalmJkFcnvSLT8YRcazCG2WzCdDyhzAsKq1EW8BVGOdarcxZdFD9TZdid7vCfcuL+pQAAIABJREFU/7Pf4g+PGv7yJ98fgDTZ4iXA789g6TvqQecXAdz+Y1+Z6oNWoifTitm4JFJiVBTAg9TPnBB831P7opx5/dEox9cW4jfs4aQzU6dgOfEb4mZepTLfq9nFIa5SmiQM0Sd6IoqRZ/kAbvdVsq7rhnhpGxTvq1h9PGWtpciLIcHpuk6+T3ICVAqL9Fh7Iq2WRCa4dJbZkPCGmJgDwh7YRHlSNfFBGt18iEPi07YtTVNhMs1oMiLPC8rRaHgPVvdh74sghELGJURJDiMRvJPhI+ATZY7UDxicVEUCcfAvc2l8+lRtG1SWYktqBRkE0BgEx9JApmsZUpkXigf9GuiNv4cYSWuiCkM01f8uw++T1lJap4nEnKLs4aXDp9cUQqzqq6efBhC2Y+Mei96OW+LWeyKB7H1y71XPwUkXhhQb+vtIaYUJAa0yjO697ZJIiZfetYgnM5KAoYL03yPiJVYbUco2Co8fijo+XVOIMk9CfUwgdJJzUVEUwn2raVYNeJ9qgANCiEJUort6RdNe0rqKEDpI9xCk+wZPNFF8/oxJIk8C+IvK9xb1/HPG+NOPn46kDbkZQ1INHDacfumpDfe6T+K0Nmx6vVJwKneFJHVsVHm2S9Tb3OoXEji2S71p3Hkx2RvocX2ZO33+Qlm5R7D6dRo3IhoqCj3Sd47MWm4cHtKhaFYLrqoV1+YT7LggWFlkVmcDEqES4iKVGhmjzUEgSNumuic3QdOJMzxRU9cVDx8/5vruhNs3rzPf22WkJ9goXiqLasn9Tz7iydkj1vWC1brBNYGcnOcPnzExlv1Jyc0v3hMz4pQYtV1Du1wRnIMY0F7ok+lShvnt//28/Fe+LjQO2eUZ+reU3B+bhA1JjpyT1xIuPqnsLF5T3osPjOmzlq1HX517AQ1le6NJaFL63guKkoMXxwa5M17GzyhJzFyiPigFZVFSWEvnJEAxSuGjiKm0Sf3yxRGSDVQNG8Nms+5HoL8HSFLBSgs10mYireto5M5JBuvBddILErasNAbkeOs1ojxXb8gdScakmZWKlYKoIs5LYGBsEi4ZDhyZO+d6+4VEnzGWpm3QHtqmkeQeObSiA2MzdnenGAxXyqM7OJjOaRZLjqtF4o5HduYznh0dcXx8zPJyiVGKnemYl1++y/RgTuYd87xkevsWqu7onOd5veJZlzZ1LZYZretYr9c8PzpmfbkkVB3rqyVGWbJ8jJmUnF9dErxjOh6RWUtRjpjN5vjouDktuffaHR49+IiLx+f4rubq8gRtc9xHLbPpPsGvILYo1UBsCbGg7RrcuiI2kVCvUaEV9b/oyYLHOPEZVBYa14DOyIspNi9Q1mCLAmUKos4pJoZyfw9Lzr27r1BVFcpAkWX85Xe+zccffkjrO6KOuK5hubqCzrFqG5pWpI7rrsMYQ55bptMJl8srqmpN1BaTjSiLEUoZ8nLMeDzn/GLJ1Up6q9pOKEcxShBXlAVZbggqo3OeEB1i4+Hx3RrXrljWnrPTjN1bNzEqJ88t61Ugtxleay5XLR988DGnJ1eU812u3X2Vy7rjj//irzmYjPiVX/g6mc2GgNUDXqnUXxoRuZJOVqHW1LWjLMbc+8LPUYyu81//7K8zPTjkzW/9Cf/n//Y/c3b6gPLaLl/7yn/Gr3z15xnla/7ozT/mj/7iHd544za/+Y+/yrf+8P+iae7z2msFysJ07xpOnfLxx+c0nWdcWorC4FqN6xr29vY4PDzko4/u07YteV6A0jRVhWo7FBnRu+EOb5uWs7OGH//wXb76s1/hxz96B6cM167tc7VYslp6RuMxhZ3QdZ6imBBCTQiO87NjFotTTo+f0iwXGCweTduJQmHrAj4FvcI9NoTWIwr+BoXYbvhEr9JqA6RppZmNCohO9gPvUm/0i0Fuv285Ug9Jr6rrA+vVmnc/fMAP33uXd959n2dPnrFcLcEaMp2zszsleWfQ1g0X1RMmkzGnZ88xmeIf/NN/zvjwFZbnJ/z5W29x/ugD1hfPqS+OWa8u6XxL6zta52hijiMTGh+wu7PDwd4ermu5aGpRTnQBR4dykI0zQhep6pqmc/guELRG5yWLxtGh2b95G2MzaNuhl6zv6+pZG4L4G7RO53PKkbaV8SS+MOl7YvuSZwbnGqEoe+lF1QmIBogh0HabeEjmRA3793Ys03+uEni2UY/WWJuC3aFaJyvPJuGGGLdjozCA2BGP93qj/Jfioj5p6z0/t4NLpTbUyE3MJP31IQRc19G00tPZpeuIWqWKhvTtSa91D9JKIuK8I8XUaCOJhojTafGhU1s+tVFe01ixzmnbFhcdo3GkHE2weRxonRu2y4Z22rZdX89LKp+iPq6jaBSE1NcYkTkejKWjemFOejbVZ6iE/XkbGeLTIakjqWLH+EJ8opT+1C33qaQpSFVO9/GA2lRUg96wcqSVxw/Fjz417JO72L/u51V6tr7Wr7FPv9+h1YheTqVn2QRc3Pi/ua6jCS7tO2n8VP+u4gtvT2upnpVEbGs2tGOPVNkSu4vCCjCrIsH27VSS0G+qa/Sh5fBXpR5wlcRQhDquht6z6DVdHWnXlegt0FsBpCnxitgGuvqStj2ncWsx047bsV1/PVEKAMaitJxa4unXgz5J8CXRNf9Tj5+KpC0VpZIYyEb6FhIWEjZvHHhhs9hsPH21LDVoDjcln/kd+b34AmKkXvjaJtvvb+pBmhUGX7DPW7gqKccMXmF9CK6QioPzlFnO7o0bjPYO8FqzrJaEds3uuGBnlKNDg49OenO0kmbMEFPgb0QJKUqZ2fQbbSot93YFRDBZQZGPGOsRe7tjbrx6h1e/8mVMMSc0Fr+C1emKtmqp2xrXtphVxEVHVztcGyhUwfNwxPff/j4v3ThkcrhDfm1GIbUbtNKMbYHVBq+0+H8YI/S3lILIuMphFBIFhHSgqbQAYr8d+5g247QuglAQskRnkDHoDxGhSah0UwbEVk1uhq1eAhhkkD/Ny5aNKnHw1dYhpT5brZWfj7jOyVjLahVj0SIfNsssyxkVJXWMOOfoGodrW5FFV71RuPz2RuxGIQfWdv9Dfz+EARPrAQFBhxLaFhWuC2Q2p1EpWE5qbDpAZlRqtE6KnFqEMfpkVfotU9OkkgRMaY1Kh79QT+XwEqUzhVYWpXpKjgQwKEPn5FDPcumJCyl5hL5xndQXKhWK2XSMd9L/WRY5eT5mb2dKaxXLy1N8iEz2dzk8POCd995HKYPr5HdNhLHN0K6jqdes4oKzasXBZMZsvsfLN29x+/AGphMVv5ju4bOzM+q6kUPEeVzraOqGru1YNq1Q3YjEAEVmKbQYFD88es7BtSkxRPJyyt3bu1ycPRVqBgAV68VTLs8+ZnlxjKZiNldMishkBpYlE2vIuCTkK4y6hO6YzF8ytjVKrWj9FagxeV4SoyUERV4W2CLHBUVUOS5WXK0ucU3g9p1bnC/OGI/H3Lh1HVNYogWdKVCBvLR434Ivhv2s6xyjckRuDc1aRDPm8xnteUvddXQukpkcYyzL5Zr5fJcsL3HLFaDJywld1+JcIw3eOrCqlmR5RlSBuqsphoO8xfua1sFiVXP0/DkuCyxdS+NbjM7wURTaqtUKHSOvfOE1RtM5e3tztDF8/PEDrk7/mK+98RXu3rsj92wSexiCDgo0iV7kI0UxJkSY719jujtHFTlVCIx3NW/8wj3efOtDYt7yC7/yDX7pl/4Rl8/e4SvNE5pizKywXJ6+z40DePboCS/fvc6jx1ecnVTkNvC1n3mDx58sOD56hLKW5XJBbg1npye8/8H77O3tY4xhva5QRjMajxELSovCMZ+NCa4j+JrlVcN7735MbsecXl6hixFeKeq6xXuo6pr9Wzexec5yccpyfYExjsWF5aMPPuDhxw9p6harLa6DYlxI9VGPiORyP0dPDJqLi0vKUjOe79IfjkoPhfl0ZiryPCOzKbBQst/GAfEZIK7hU9Pj0xGaquOtv/4e3/3e9/n+j37EoqqxNmNSjNmdTIl2RFF4bhxOGBeW6WjKs8ePef+9H7FcXNA1NU1V8sPv/jnn63/P2dm5nE+rCqsMk8ySMQYHJZoSw2WIeC0Bmusci6srMXD2HrwEtF3biXpiYQmd+FVdrSrQGV3scFhqFyiNIVpNXhbodI6H6Ieka2AWSOQwrD75fjpHtBqSNtmz/QBWxkBiWji6psN1LSpGMpsPla6gwLuA12rT48/mvFTpjJIgVqWeM7nfeiNra1OVLCVCfcXMd50keFEhVkEe54S6aGyipg3soxdjnJ76519oJUnJhdH4kL4XpadZqjMS4BtjhnRMKVK/mpwxMVn4GFIc15d8gtzLWolac9N1oBUdyUbJtWTa0IaGoSeJ/jrdEDM2dU3TNkIXNwaf+b5YkpJlvZH+VwxUQrX1p+/f7xMq75JwBhqjNFptVMn7efK9WVcfY5otZeohi5Dk3PXMlF6AZkC345BcbgMB2/HIp3Kd4fUkjNkk87hNgtbbRGzHsZ+OaeU/pAIJL369v7qt39EpxgopWSP2sfWnCiXpantPtE0lWd5f30MZY8R5T3TCl4pRYwuDMpK2ExUhOhGbS9XN6LW07yihfIcYcZ6kki2sq5CKQNKPadHKDxh5WqGoCNErusbR1GsI3bDWe1VXAoS6pasu8W6BwrEtGdOL9RAh6IDBCOtKGeEh9EWfmIovPgqI83dB8h/E10XK0/J57+ehlAxij+XHKJvoUKamD3jNJsMPekAYtlGB7eQLtiRR0022XZXblIRfRLS0UqA3nm99s+ym8paeXKJTREAlJlNhCepjauTXQJbnRDPHxwnlSPp6XOVQztEFjwo+BdNIk2cgqbyl5R4kofM+ghfaRgiCXLVtS1dX5EFhY87b7/yQ//AHv8uoDhzu7rGMjvtPHnL05AjVpOBeA3h8Bypago6s1hXvf/ABf/XmHnsvXeP1+VcpclEg09qQRVmIVlmIMSG8vapSz6uH/gbd2o76wUqJCAOPvE+IgveS1AVRJzNakCKrDb5XB0q9WJLapNL0UIxSPdN2mMN+4+0f/dc+7/HiRrbpldRmy+MtOahba/ExkGcZ49GYiyjVpbpupRk7qXjhJNAJW8jYgM4J7sIAC8mWRw9JSRVYKoySUBmatuP84gJfr9DKJIPeSNdUZJkiS2pNQtvoqY6JepM41T23v1el7G/EHrntE+Smc7RtR1kUqF6JK4EFMUJd19R1TV6MsJklL0RMRLtIlok6ZZ5ZXOjootDDgouUtiAfl9A1nJ8eUxjDzevXRDRgPqcscozSeBfIs369aXamU8ZZTpnn3JjN+fivv8sOluNVTVhNyYuSSZ4n5LUjasPR0XNJtH2UAM5G1qsV9boim07ZnRfkVpFNdnn++BOuzo6pvWU232N/Z8rFyQl1VXMw2adewuhwSp5ZLq8WEGqeP/kJJ0dnTHLDz339y0xnGaU2zMZLYsj4+s9eI94z3CoNKjzhS/fG3N39IrcOMy6uPmGxGNO2IrpjM4vJxPg8YlmuO6a7e3zzN3+Z5cUCHQxPnz+hauGDBx/QBYcyQsNtXUuRaJpBBYyG4B1ZYSkyS9c2jMuS+WxCNIqZm9KenaM0rFcLQoxcLZZkxYRiPENdLiFG8lFOZE1QMC41eZHROodrYzJVF1uHal2J36SKKJtzWXeE58e0oeKqWuGNY2//OlrNmYxyRpnlxv6Ys+fP+KvvfJtf+dVfZjIZc/3GdT5+5yeYGKmqNVerFbXvyIqcMlMYBa6TvsnMZChtQXfY0vHSvTmZrXl6ckJoCp7/4C8Zdy2sI+ujM77zO79DPD7iK6/fZF45/sHXfp4PH3zMH/3Ot/j1v/9NvvyFr/M3P3yb0hzy5PSSthkzUrvM8zntyHNy8Zibh9cJriPPMp48fkJZFkIHS5QubTJ825Epy3Q8oShyKHOiz1hfeNbryE/eeYgpcvauz+iix7mIc54sC4n6DeW4oG5F5bFtOu6/9xHPn50QVcl4soPNI9GsiRqu3bhHUe4lObtIcJG6alEYxvP+fIrDjrxVM2HTaL/9tZ7snoClrXNa94iZ7Lw8Oj3lw6dP0OMx16Y7zGzB9dkuewd3yA9u8tLLU3bninGhKU3J9998i3r5lM5XPHsa+eTBJednp0RzhYkO1daMXUumNSMmODQrH6l9RhsswaeKdhLgaaqatmqIMTApx9RtRwxtOnM8DZ6qrgmto/OG4CMhdjjvsSaiVEfXrTBGYa1JwWVSvhgSMVLfJ0QSfS+NyTYQ90Jyg7Q3NJ2wIZwPuNSLZhCANgQJVFFQZDlKG0Z5RuccmVaD8IfNMmwSizJGwLK2azFaABCI4lMaRKDIKA1KY/I8xS1edChjAj6TJ9bA4LGavCiGQLsHM7dpctvv0XWO1nWyerROFNK+jYLhLBH/swTnapVAA0l+dN9TF8T2Jx3UqKg4OTnh7PiYvWv7jOeTIVkJPqCiobf2+XSiSQS9de0u0Rj7gDqkuVRqm+qZ1nyqgA2xS/pcp1hlSOhS1WyYb8ngh/8PbK6+zztEEcPoS4hx09efMNhhbLXW6d76bPVlaB3qWzj0Vh9Zn+Cl31PDW+ppi+muH4oTfbihtl9gWOv9l7crh5vxkt8Xi65NQtlXf9XWtQ+xVwKIZIziZ56rnwep1nkMGbnRkkyFjTCa0kq8IJNlUUyKeRuZDwGsvQsE5L4yWouCdbqudPWyClXqgUx5R+cCdXVJ6y8IHKDIBUghQudxyxWuugK/REWR/Edt7KR8WnPamqSGqTBRY6NUvE3/N0re4o2W3tb/xOOnImnbTOQm6ZE8JVERQpTJiDFl3H74+U1S1S/+zULcRiS2H4PUf8/X7j1Thk1WfWpxbtCn3hhZ0CwrVQPnthZ+3HqdVN3wqbFYSmZYY8mMoWsaqtbRBFF6cyribMR06X1vjUdfeZEiohfp0yg3qdFWNp0oiARJIGA8KljpQGkNd75wh+r8GX/y3b/g7L2HFB10MeBU3xRssUbkvyGioqXMZwQTcXgulwve/P7bXPoVX/z4Pq8c3GAyH9POC2IbJOBPCIWLajA5V8kLpAdyoU+ESLuj3szhNiqk5GZzCblTSRZZNkOhpYRkwN4XifogCTYqljKbapiZT286w2YZ4sYAmn7MeWFT6ytpw59+TpL6T/QerPgxVXXFer3CN5K0ZlaUQvuaTP/a2x8FpNhUjPvets/Zr4XDraV/zztR3fKdwxCwNqeu1rz//nuUO2PauhafmULhW0/rnTytOH4LmJfOSEna0npKG29MyCjo1GwtPQvG9kqUOgUcCtd11HXNfGeETmOTmQyCF3RLK4rc0naOWNVcnp2wXqyZj+cUFkYmiKJhOsBXqzUXywUql0qRNQZrMqFqRk9WWL75K7+EWyzI2pbrB3v8zBtf4uRqwbrIWK6lunPz5k2y2ZRF5zg6OsYYiyMkXrwYt66rFTvTCbNRwXxW4uyYdVHw/PKCB0/O2b/3GoczaRpeXl5x1S3JzZrJpMD7NYVaYIuF0CCrSBEtB/MR+dhxcfycsytPm08YWYszFaFznDxeocqM3bGiay55/PgD/OQQpW7JmiJK5URFkWtXOatVxff/5m26qhPZ8bZld/+A6WzGhx9+BOnIklWU0EMVCK5Fq4h3LXXlya1lb3dOnmVcLi6FBZBLha2uPV3TsK7WVI+f4nykbgPjUYmxJSaHzGqsCRidkVmx4DC2pCgmaG0JvqYoRrRZR9fCed2yPDnFuzV1U5ONNUtdMJ9NKDOLb9dYYKcY8ae//ztcnT7mG7/486jgePnebQo0b373Ozx7+ozzxRWz+YRcVRilaBsHISOzpdgR6Jp7rx/QuX2cPsPmJdFNqfU5l1fn3Lr2KpcTw/ff/T6Xlw9Znv89vvPW25zUii+8+lX+/jf+Cd/+izcpRo6nTzs+ebTifGEoJrvsTF/GesfObMK9eIt1fcHl+TltVxNjZLFYYPMMF6QFXdHR+o7gHJPxiBgd0/mEcT7nHEW7bqkbmM+mVE7hokQfITjarmVdL8mswuaa6zeucXl+hsKyv7cPMeP4/AJjMub7N+lYcn51yf7hPcrRPhENOExWcHjrBlpHQbMig9E2ad/eJGxavCWVko9I3aHfE+MWtSsQhRGixDOzzC035zu8vHeNJnrKsuSNL36JV+7eZjrfx4wm7O7lBN8yKSzKa559/DE3D65xvjjBdU8JYYkOntwF6UNxkagsTeeowxqHoo2aLkolyuqI8iRJcSuqh15AqappaU9Oh7PbuZpOR7rOMytn5HZMF1co16Jti441zfo5obvAmkiWWbFLCUm9+oX+93R3KYYKnFIK3dsc0AtC9ECwpMhN24nRMSqBb4bWd5gsoyhHGJtobFmB1oY8z4lVRZ7njMfjIQHp2TzGGsbjEbruYwSFHwBc/yLNUSmapiVFUiL4oMDmGWVRkBe52L40DTbZGfSxUh8vAQObaNM6EDb9e31SB0N/v0JtwE4VCUqUYLWRpCODJJAhvUfiUWoobc7F6Rknz55zfnpCjI6TM0XlWoyxjGzJfDyVoNz3lU4xqPAx4Lwji7mcmSlm62MVUAnwFjXqvsrF5q7YgLaaF+Zda0VyBmcANNTWnpvGrJfR75M2IOkwbNbGtq6CD6LQC1JxMrqnzb4YBHwmcdr6Z4hnQtwSJpHv94WF7SR1OwH/7Gsk6imb73/ez/YXMSR4WwnsC2bwvYL08Hx95S0OyWsSAx+KJBKDBLrQEb0j9nNsBVAOmcUnATpwqf+xH5Bkn5R2MIUk+l1M6sdK/AYV8f9h7s1ibcvz+67Pf1pr7eHMd6xb99Zc3e6u7nbajhO77RBwTAIKQ4SEEEg8EJEXEC88wRNSJJ4IvIQgBYkkCCFeg4jlmAzG84RbTrfdXV1dXcOdzzn3jHtYa/0nHn7/tfa+VW0bwktWqerU2WdPa63/8Bu+wxbMuTDMVCYFz/X1KWfXD1nt3KZu9ss7CYLIr5Z0y0tCe03wa2IUpFGMYYQzGyAX64rsA7kLJOuJ6BEqmWImh0SOPSH2n7+2W8e/EEmbUoxVCLYnSslZ8jAa8hCMD92bckPHQHtIvAYM+edN6sYg/aXgfMvTIudRSEEe2ySG0uJnhFl9li83fuft75/T2GHJZTHNBSa5Xq1Z9Z6uz1S6gYnFzixzq3HaoLOoGW3bFqksdak8ThCNsW48Z5WlVa+1xVaBt7/yLm+9ep90d4/v/OYTXrQXLGNLXEoQra0q8I+K7KwYEaaNcqY2Vt7fwpPnz/jDT79P+qV/wm03Y/dwB/vKPk8X56wXK3QElQRLPXKziv/GUK3aLD5DosJWRSdL4lmSdEUxQw+ywAs8MpFjkk0pKkJMEDfVmzxUkV4aQ5tUethMRpjCeL/S58YESpKi7bGTikrksFkZrYUPoZVAJhojnKnjEy7OLyCAczVVMRRVCKQ0FlPF7fGohtrQsMDl8YNfqoBtMOSCk26aCfv7e6wuI6vFuQQBfUtKMiacMagMMYQR106Zc2hJQlIuvAKGql3hR1jp8qWSLIvstUCZhTRMmQsFxpKFSzBes5SohgB2mFspiuefhtS3XL44obu6whKZuMRO05BTpluvCKGnjbBcLhj8jJxzOKto5hO60PP89Jj+8pxXdvc5unWIrQ2RhFeRdfQYV+TnYxYJ9a4jxFiCkCRqTymxWi45uCmcif3ZjBdtZl5XVDpzdXZKdDNeu9nw9r23uH37Jmb9KVcvFvTtNZqAM1dMmiX0E27svsr0zRvQex4df8zZsYgOebdkZQ1+9YLLVaT1GjNtyP0lqAnmqOH2O1+m73dQGmLwouJW1g6jHH3uCmRwwc7kgIMbN2gmc3Z2Dui6RM4WsCjsqLprNFgL82nNyneEIJ2g2PfCB/WRFGRN8H0nXf2cSDHSrlf4pAgJsnb4pAnZ0NgJfbem64MYCpNp25ZuLYbKfddBUDTVnKvQk3XEKhHpsUqhQqS9vuZwlrh1tE+7eoGmY64DeybwW//0Fzg//oh33n6Lt197gLU1RzsTLp5HJjmwkxPKRykI+CAy/zGANtQ7NeGq47vf/JAunzE7mHHzlbv82J//Gt/8J2JfcPF8xYIzPjl+yMO//wFXSZOrGbZ5wkefPqcNmuuLFR8/fEoX5kx2DpjvHWAnmoN6wjd+8uucnz3j7/29v8N6vSTEntlshrMWHyNJyfzxSTyZtMlkepppzcXlCdPbd7n/2gNOn53R9ZFkHOsQmc0n2GhJcVm4Nmt29w4gwePHj5i6mqODm+zvHND1nk+fPWbVOqgUyWR8ckznt7DVTIJfAikHqrqCAosWRb5xFR7XRPLA5x06CAJF2pSSNkUwgTUpokoYpcR3K2Tu3zzi8M//DHs3DrFNxeHNfZy1RErxNWmMadBB1rYvvPsFnn76CX/4vmdnNuP23TXtYk2/0LRtJmsHWsxvAwOMLWNSwCAFzBiKsIBKZZ0pgVfpXg3wPqMzzmmcq5i6CV0CHTIqeGqboTvn7Ml3Sf0p3q+IyZOSJybPhoKRx6KW8Hp1KebK1RzRPOrlvWXw4fQxEkPCOScCqHqTUGljx24FbCCJ27HGsI6OgX6IwrHa6qJIvKLGxG7oMqGUeEmVe0eJn5SR7pqtHFVVSXe3dJW2i9+fDfCH7llk4N2pUehGEuViIzLufQqtc4FKMlJZrFZkqxEkVRCV6gRnp+c8/OgTFheX5Jg4PT7hulvhU6CqGnaaOc0dK8b1W3FYTMVawKpx76yqirquC2SvQNdClE5rUdp8CZqopOAxxp9D10htiriDYNgQtwz3ZOiWjckLwz6ZSgyxSfg311X+31VVSaqEFpJC6f6Uzq8xA9JLYLQpbfhk430qfLaNaqkau6lKf0Zw5Icc2xBgVZSoP/+3ze8yJhCQ05aWgCS828/P41qzmTFjRrn5/uUKVc4yndc0FlBGikhaeGGGU6QxAAAgAElEQVS58NQ7BW0MAsvF4zKIYGGh3ZTChFKSPIUUyVEK0MPnpRgIMWGVAcw4/lIKXC8ueHb8MU/MbfYOYbozk05pSnSrBX59DbFFpcGSJpepJcbyLptSuIF+1RILhLuua5m3GYiivG4V5Bj+2HvzL0TSBttJE+MAGHhGOg18mJINq6HLJq8bPIeGjUYgXwW6kLd4a0qNCcmw8EkFSYZPjBsfKmNsef1mcVRKFlz5nlvB9lBtUptJn5OodKXEGBAP57ZertBVxWzf0TgnBPE2CfdEK8xEFSXQAQ6Xx05SiiVANmVYZ8hxSD42PCxNxuTE0f6cB6/f4sUk0cc1XewlqRqMemJEpYRy0jYeA3rEfNIYJXu3URhTEdeJ1dU1T9srzq5ecPFoTas1ed1RJ3ndGA0MiZqsauPEkQu4lYSUTUYrLQGDHmsv4zWldHNUccccLCJGnlRZKNQ4/1NRgSqZ/9aCtr1e5fGfl4Zj+YrC5xiuid4qt43chgGeWpI5rRQ5RlSMNEa8zFKKpK5HlapjShGUHvHPI1F8+DczTv6cB+rzZnGnVGgHPzq0wVR1KXxoUobKNrzyyj16As+ele1o2FSUIUepKomCcR7Uk8t9yqPIiDFiCK502MCC01A00SMEVWFBi7JaCJ5xOS5BYM4UnLts2M5qGhyNM1Q6UxuFU4YUPYvlgqm1WOdQMZKLKawuBqISCGRizrTe80u//MtUKfDF+6/S7Ew4W1xDNcHHSOt7ps4U2K34s6kkmHutLSlEfAiQBVals9hxaGvxcU0ionNCp8h8MmFvvsOsqbEzOFsc4/0x66s1TiuMXZLjFRrNwe4Ot3YOuVp9l37VUpsZVTOh6xPrlYdgqYKiqWbYesq68/hgWFwumCwuWV4/J0cPCBxKhrgYNRttqKoJ3l9R7TVlszcslx3O1PK8pElROiVaGSpr2G92CMsFXY50MRJ7z+X1NV3bkhE/tL5rIXhJ+IDlcompZjhX0XZh462jLdZqYh8JMaJVNfIoUizzLFlCH6mqKbP9Haa7UxobWJwsWVye0a6uIGmmjeXdN17j+qqhMhOObt7j5s0bXLRXPDp5xDd/77fR/ZK7Bze4uXPEjcMD+rbDmYa9nVvE0JNzj7GWEESEQ7uavq2I7YRqehPVRxbnLeaq5fzFOZfXlpmbUeXIk6cPuXN4ky/ef4fjdklSK84Wn/Dxow9Y+8dMDicsX7Ts7UE973CzS/Z29kjumGxbfOhpV2t251MO93YwjePk7AWYCldPObtcEHzCOMukabh565B82nO9XEBwYBxu4lh7j6lqgbp1LbZyOKXxoWW5umJ3PhGRo0qzbjuiv+Thoyf03tP7FcunLdl45nsH3Dg8EsVNhECfxv6YrDGZolZYAoxx4VPDimwoWKNxLg/Lw/DUXAJWRwlWlILa8OAr7wjf2mpyChJM+hZrhVOLmo1KbErB3uEN7r/yBp9+8pT5bAdTdZw4xaVfsehbjMokv8YqICZyEM5KDJCyJatYOjMKXRIy6yq0ESGvQc0wJ49K6+K15gTI0gcxoibiSKzPj/n4/W/hX5wRfFsSviK4NBSD2SBFJCYtIgJDEW5Y04VjsbWtbPjdxkpXTTzMZCterNYY049BvWLNgOQAKVCu2valbpfO0PkVrGT/sUa8Q22xbdneNwaOVZQJWrjNElQrrYg50fsebYrFQ0kmB4rJAJPcFiLRI0VBeHVGmy0p/U3yidoA9eRzh/2OUiAsKsWImJjsfZH14pKri1Pwnvm0QTuF0ok+StJWG01tNanLI69PKUnQlBEIvyl+qsL3Kzy/YStXqqhnDjoEJeYat+PyvXVJ1kriNio8q01Xdbvbmou64NC5U2MnSZJEkmaAJg8JnNRQHZPpFGcdGUVT1+V1GxsqrSkqnll8NMOgVDh07WKB7r7cnHhp/pafn/s7m8eH2xdDZEC1jWN5K+aVY7vBwVbSVgq7QwFju5NZbv82PUW0jpTECVrGsWsmKBOJGVLS+BSlOOgDSYloTFYZYxQTnalVxiqHUZakFEFByIkYAypFTMwoDKse4ZWmjf6CXBrxY8zKSDetb7m4POORfsQ6Wu64u+xMpqJ43a0JscVamE5qLIGKDCnjjCiLpyiqlSEXUZYQyCHg21W5TCJS11iHrutRofePOv4FSdo2BM4hgVOojeTrcJMBnROKhFJiZOx9FNU7oxjEFHJKomC2NZEGDCxsVcIoWXbe+MwMPiRSRSleF2XjCtFTqEAlmSrwkPIdC2MIpcTYT4k4FiEpgtJSjcwSDs1dzeF8RpcTuVvSdx6SYrH2aGWoXWSihSiZlcAVpdMEZF0mpSdnqQQGH7DaiuN76Im+pfYZg6azK6IOxOsldVvj45yoWrTxBT9eeCDKiuBE6eTF7PFpDcaJtGnoyaFnN4F1u2S7RnNBnXbpoyErR1QZq7zwy0oSF7Ncm0jpLlGgN1nJdc8eYzXaWpmvfqgGD/mW+HTEnLA545UE3S5q6iyKO8qKzLGNoLx4dUWtiSQ0FkryK4pcZWxpgdvlHEs9mVI1LDllTkJyLUlU0nlr8Q4kMj5nuhjoQxDYR+jZtY4b9Q77+5lGZ3LyXK1bji+vCRmUdXhpQKKzQiWPSqoEvzJ+VcrolFADZFNLh84AVhlU0oTsyTmgMbSpSPmbzDoE9nJFCBldZVaLC168eI7BQpSF0CgjnZsQhUeZOyZ2ig49OgdUDIgnXiLkTNRSwEAZUhDzbeXMWG0cgoCkIn1siTlgqxpdFFBhs8grIPuATol5XbE/neCw+L5l5YNsYabiaH6IqhP+aonTE3TSOGVRJBpbU+uakDKVnRL9iuvUcndvl+Bn5P6ISVZM7BJnIsomUvJiTt111NqikiDZY1aitNoFjFcsOs15rlmbNcFAUFIYSa1n1Wa6vuXi6gc8ffJ72JxozJw6VNIRthqvAg+P3+fFpyu+9qOv8Ma9G8RgYQZhmTDLCfiaeVzjK01vYHZ4h9V6ydHOAfOdI5zbwXBBZQ64vlyjfCY3hnUfaPamtMqwWj3i/PwUU1l86FksFjI+UqZC+J99EkGDddfhzBzMBF3XXF0cU9eOhY+SJDfCkamNJuoGWzWEZPBZEnHftzidUTmik4UQ8UWprXKGpqloJk3xapNApl0brkKPz5FbD97i1v3XmVaahx98n8X3e4g9pIQOC27tTLH9DHSFuXGPw4PX+al3XufDR9/iV3/11/FBY5Lm5LxnqRrWsyP07BbVq6+DjUwmNSlWGGpcamkmlsoaTPTMG8+suuCj7/0Of//nfwVnLXuHRxztTnjw5lucnVXYMOX+nfvcOMn8pX/7Z7m+fsS3fvMfc3zd8fgioFWDCS1p5TnYm7NYRZ4/hh88O0PNdtiLlrmBsLpiujdn70BsHZZXHf6sBzuh3rmBm0z4+NNnOGfY23mVPtUEc05OL7h55ybX12v8uiPEnt29PSbTCavFgtOTa2y+ybSacXZ2yXqe6cI552tPjA3BZrQK+NBxsD/jX/mJ97B5XfYuQ9SS+JtSQFOI8uYYMiot/Onh98EiIAdRWMtSDB1SuJwVHikkGmXwoae2dQloZW9NKYgc9vKCqxfHLLsFR7fucPPOm0BFtmWBTyIOcHF+xvJ8zSrA8irSdz1kiCnT94EuFWPilMveB5T9W2sJ/rQxTGcz7rzyCikl2ranbVtijHRdIAZN9BGjwE0snff0eDotnYp5qPhnv/VtVBtxzmCcou1WkEMRbBHIZCoq+iF4jKoE3jSE4EY6k6rEEkY2Uwbke9Zy/brOA0NSBL7vaGNfEiWDthalMglJaHxS5KgLB00XmFbxvMoKZywCfS2xVC4+qSVwttYRY0KXgDwrkdtXShFioKod3veoHArU0OKqhkFCJBWLA20scdhHGewJ5NqXjLXA/OQ6hBgKd06em6IGVZONptciLIEVqFjwHq1k7FgyDVc0+ZyYPDO3x3y+w65rADWqb3J9QmMdVon4lrEOjCLpSCIIxJZYitQlsTSMiuTGGYk/khIYW4noRFG5JKwlcScnEUfRuiARPNY60DIGa2eLN1se+nZorXAWlE54H8kpkKJBG40xUlBJRmKerEGZJIJSWSE9F12QLhajZa4oCyqloppd1MTF1g9rxa82hp5R9bTETClJ8io+cKoUlTcd5E03mTGhSJLLIlSIoWAwiKDJ9xsKGhSFzSFo0nbo7KWx+L5BQL1cQAc1QomVAqMtIcLJxQqVw8baKIZSJB84iWp4OUlFtJK4TQ/0mSGOzJI/DII4MUoSnrUia4fOYLTDq0Ryhk6JcFwdFLmNXLcr/JMnJK+w9+4TouVRuOJp94yFCiQTBMZONXYKNyKBilldk7UiDmJwJFwpehttaOqa6aQRRMQfc/yJSZtS6n8C/jJwnHN+rzz2XwH/MXBSnvZf5px/vvztvwD+KlLi+89yzv/wT/qM4dhGgeWC/8vol/lGRY9clZuulCQFMEg4SEfGIN22bdjjZztjw+CQro0EZgMsbbsPJK1qqSRRVBNTSoXHJBMv54wKQxWu3KgyYSndqoh0f6xWOJXRObI7ndBUE64vOqJv8WGJT4l5M8E6LTzylCUxjRkvqhuliyRBtUiWsqnIJMH16lg6hg7W3Qq/6jDeoFNFwpOVHwUoBny1SsXoL+fCqWjxoSPn4rWy7rixt8cbb32FV9/b49e+9Qt88vEKlRyp+L9ocgHU6LG+O95jpchFGFaqiYmikfXZ0QClMpmH1ylF1hIo5BQgKVSUrpXck0yOCV26m32MW2+rPvfu48N5GDvynFgSda1ksdTDdygviCqOC7MkK0aIpmaAEWSSD4RVRzWt0c7Rx4izljYE+f6FgzjUdoZkv5QexNetWF/koVJFKgtUEjNqq9GhdLysRelMTIKVdqamazuWq0u6vqPvOs5Pn6OzQaNxxfck9h3OatpujV5afE7EroMQRCq7XKihSw2QgpD2TeVKFXPgOmRCDKxWS/q+L/DKCoUWL5skG14uqmIhBNbLFb73TJoaU08xk4ZmMsV7z+nlisX1khASRzdy4chJ8pj7Dt1pZrqmD1BPdnnw7lvcvn/E5XKPh48c1jrm7jlxfSEwmZxFxTNJVynESEIRUqb3kb4PMn7slGufaWOg8x19lPudQiThBJLqFHgvBrK9YnFyhU8L5vdqQm257npOF9esU8X+/AiVMpeVnCvOYbXDeU+ygWwVipqZg1xZmnqOrXZxVrxojKnQWZOyoguB3HZcxpaMIviekALaaJw1pFTI0EMlFYgKGlczmc2ZVZAjzPuEtdDoCN5C6LHWcufuPc4ur+n6yI3Dm5ycXdP2aSR/j9FqlgLXtK6wRW55gD2NHA2VMM7Qh0zVNJiqIeRItgZTVzRhyv6s5uhwV7zlbtzi9HrJIgQm85vcvH2Pu28ecnh0h5PvvU9lKzwzLq7PWWI5O1vycf+ci+UJBwcH3L79FpW1hKypQs/UWKZkTk9PuDr5Q559+jGJVzi6MeELbx/xnW/9Aa/fq3j93pt03Yz59HV+5mf/JZpZ4Ld+7Rd4cXzK0sxZKo2lp7tYMMlTXpzWHLxyn8Pb7/Jk9ZDDu8c8f/Ftphr6vsX0NXdefZ24iJw+/IiJtUx2d9nbOeLg5gEYxf58QmCHTx4+ZTqBxlZ8/etf4/3vfMD5yQX1wQGrtuXi6pr1asn+fIZ1NXfu3uPyesWzFy8ISuNcQ50bdmcOcstysWKmOuaziFJryOIHptVG5Gt7JVQv/VYCLVfgejGjdAYihkROHrRBKUtM0KfM9WrJ4uqClBK1rWiaCUcH+6gskOZnjz7m+uI5J8ePubw8572vfI2j26+Ujoesb8+Pn/KP/8kv8r3vf4ezszO6lLler4CCXMjybwgb7s8IhUuy56Uke3JVVeKZ1YswSdcuWS6WdF1H14r/okGhrSnFWimIxbEwAxenC6IPTCYTfAz0PaA11sq10oV7pY0hJVWg7iIW5KPItotXoXRatgU5hMMkwW9KSdTrdOHpJlFE/qyaoTaKrGwpTlvZp4s8+IA4MggP2TpDbY2YovtigaGGOEbutyn7dNZl71C5wN0VrnLURhMIIuozjo8NLHL4OfDkInlTFE8lodnqXmx3fJRSWOVIg5ecUQIRMxrjHBaF0warFe3VJXW65sFRg9ZTrK1QucOvz2lXLetVKwJle3s0e4cI119goD4kQvZoK6bwMRX4ZELk+zOQIyoOYnIl0dVSsFCZkfO54WJ9RqBnzFpSKeQm0GIoPxQphcsoBTRrjSSVaFI0hcoh3zlGjy30FKUlGREYrR+RC8aUPcz3ImZE4cTlAX3G4ARU4tpBqboUYrYaGHJ/1KbTNcynLQiwdM40KFcKO3KuSufxeg2mKyK2J2lU0klUQUuBaPufkReXXkYQDQvTCL1WYpDd9Z62jQzm8Z+lsMgtkAApk4jGbPi3CfSg9FheprdERgakUy6Jn0hOBDkXrVFa0Go6RFKXRBG9bTl+ekLbQlJ7PL845bK/pMsBnwI6ShNF5eEcRMkUrTBJo53FGSfzzBkaa7FWkktrDVVVY+3nDey3j/83nba/C/xN4H/+zOP/Xc75v9l+QCn1JeDfA74MvAL8I6XUu/mHkcs+c7y0deTCiBraryVgV6XCNLSwh0PrgfQaJSJhaN+W6ttWiz8U7PMwUFJKooLHwLvatGw37V8JzkUYoHRetpK+lDYJ4agqmSNJeZKOoAMJj4+ZoETBLQZPu14SVcS4GTdvH2A0rJcXhO6KrHLZDGTBMAaGtvFL10kxKkYprUllI1Ra43MgKkXCcHp6wfnZufBMsvBABskvNUAlxpb2y5NpuHZaa5JWGK147cF9vvHnfpRny+/y5NH7JZmUxUuVzX9IvD93DI9ntu7xD3taWUzU8B3K3RkrJ8NCIIl3SpGUrMBd1ctjZFjIRi7bDx2EG4iLKuc/TO6hQEBJpLZb+5TrLd5R0IbA5WrJ1fkLri7AVoqgDX3KZKXH7uzAKlHbC+TwuaXaKuWa4YKUxU4J1yiXlv9gXOq9x3uPbWbMZlPMtAISb779LstFy//9W79LbD2994RBJjknamvpVivariNpVUxCezHDDmGEu6SSDOSYSoFgcx03XkCJ1XLNarUmI9AUYzdePwMcRJJ5gzIVtpmhXEMIgXrasLO3z3K54PzyKeeXVwKzUbnAR8CYjLYJZaWYkbPi3v173Hv9TaaHM9Rkj+nBXU7OTrl4AiaYUtnStH0n3XOlhMdRNrCYxKen6z07VV0Mq4udgxbYbl98gZQyzCd77Ogj1Kqlv/Bcna5QVWbma1Q1JWrH8fqSb31ywY0dzSRH3EFFjFNCFKL/TbOHNZFOW3yomJRgWOmaFC0oB9oyme2ICmAxEQ0psmpX+OCZzfeZz2csV0s0G2lvGZaFd4gCLR56Slcc7B0y2TvEmIQKa9ZXLwjdmna55NMnz5nPd6mbhnXbg9bsHezTv7gQzH5RCtVG/AHrpsIVLooo9oqKat97rElMp1PC9YqT509wkynT2nH+4pjl4or9ac2rDx7w4I038DmjKsfu4QEvQg/9kt3GsH/ziPjWm3TPHuMXS67bBat2TeUaJtWU6ODq9II2r0nTPTq1pJ4kcrdmHRJZG1L3gqfPnvO9D5/x41/6c7zzuqVbfMBEAWuDnla0neH7D5/x8dlv8dWvvMbZdeDozn2uz8/Z37XMKsNBEFju+dU58zuv8Y0//6/xwv8Kv/2bv890b4arAueXLTrscXrmYNnhKsdkYqmmionRzCYN7t59pnYXYzKLy2OOj4+pbx7w6aPHNLVlf3efVlmWqyUherRyTHeOWPaRV169y61Fy/rhpxjg1buvcGf3ENW2PH38Cac93Mi1EBi1FM0iYEuhc4Aq/fD1T34Y65gfHICzm/VbRekcRE+MCtyUy+WKX/qVX+fhD94XXqE2vPH6W/yb//pfoioS7+KdZmnchJVZYCwoIhlfIGqW87MTPn74ISfnJyzXK5RzEpnEDSrGmtKF2ULJDPut1qYkCPLY9fUlIXhSDHRdN3JslVKELLCl2jrhdcYC9WOQSBcCeUqwXK5K4CnjOmcl6rd1RQyRlCBGUflEyZxwRfQjp0zTNCQfRDAleVmz9SbxGb5/SqIGOsD3RGSk7GumCCKkQAx94b5BipJYKF0ENFIiqWKtk/VoYLzNR6N01z6/7Qn6QmtFXVVURt7POSdJzhjQ5xGBBMI3V0qS1qqqxB+t+HQOsP+BPJW39nqVM7oE4ahhdwVrMoRI7j1dXPHs0WP602NU9DjnaCrFznyH/YNDlm4BShF9QFtNYoi78ihGqHLhULO5BoPq88CPphSYNoqfpXhbrpdCFWP5rVhJlRhQbwgTWqkiGFX2tjx8Zkm4rClw2Syb2JaezZAAx+IHmEbvxJI4yrYzcrBVKHYCORVq0CaWGhJpow3ZUWy0GOPXIZ7dTqCG77mdlMMQ0yZJhlFCo1GF+lMWkmE6Dj5wulBSRAtmiA82RfGSF0uyNI7Pzftt4m5JIENIkniprY7aVkF/cwwl9RLjFT7SIM+/KY+XfxVgNgBVXeIxpzU5FnP0JJ1ZFQJd5/G9QFFDt2TZZlp/yeWzY9arjr4PrP2azIoY+yKIRykoSSGn6iuqaUM1adBGE7SiI9HHocFj6EN8aY37YcefmLTlnH9ZKfX6n/S8cvxbwP+Wc+6Aj5RS3wd+AviNP/GVn00Y2EyQUYbWWIxxI3wvR2kPy0TZ7lYMVQU5+Y1nmhzbE3DA3MqgYCSp5iyvUwNWPWVBHpSAaLPgCg9ONoCNFGnKEZ89kUjMgZAjOiaydQy9Jp0T7XLJsr/CTWbcf+Uue/tzri9WokKTKMIbkWwdxrhN50OyohKcqdIlG6qOwndSKtLlxNWy58MfPOT0+JTkWzEPLHBUjSwO0qrfQEO3F/thIocQ6LqWbu3wfYs1cLC/y3Ras7pajRWP0Q279KRHc+yt9/rMzf9MxXcYEgOc7uXETskKIB81JjhqfJfhvmutt+T+1SZx+6OSts3FHX/LY9NiWMjkgo1Jckbgn0p4gklnPIll6LlsW9YpYXqDriqCM2L0KPXXcu9k7KmhKlU+b1hghwRRHi+dvbL5GG2K7PP2ucuY6fqOqjY084Zbd+/x2tWS7/7B+1x1Z2jAGmRRUrKJpAJhSYgKZvKe2HuxpsiIIStSBRa/OcU2R1Q2C4XWlqaZMJvNaLuVKCklqZSNmwFIQmErdD0F2+OxrH3g+rLDp6V0tnBoW1NZw3w+xzkjvBTd0JnIapJIO477tx/wha/+CFFrPvjgOe3imolLpNiyYybkKmCyLMt9qcCLCavARHKQXTGmyKprMZUj50ztKlZKYZ1FWScwUaUIPlK5Bhtr/GqBDoJnjzbiWw8ukE3Daec5+c6HHE0cD/b2eU3fQtuax8eXvHjyhHtv3mVv94jvPXvGs+NjfvTBa7zz5bdRR3f59GlE6Rpwws1wDmUctnJE7fGhJ6WAKtAY7z11VeOj+DsO3WIJ3AVKtDi/YKFqDmeHzOa7tP0KTaaPBlvNONo74vzsAl84JcEHmsmMRdvhQ+EUKCVQZSDEKMltkoCXAhdzzhX+DEynUzofWVyd8a1v/hbEQPYtKgd6D82kYbq3A5XjdHHNxfUStdOT1x02tVS6EvsFq9G14+DOA64ef8zZsiWnBSqdU6WnLE/XfPfFQxZXPcb1BH+FChl8xpk1VZWxKE6ePePBK4dcLxdcrxLfef+SL3/9XVxzwM07Rzy/fM7f/4e/w/mzTzi8f4ezVc+k9TTKskwXXLXX6MmMjz58n8fHD/npb3yDf/TzP09qLMcXJyxjw7/6jb/Chx98yIvjf8aDV28DHeiKeTPBWc3k4A6NPeTH7x/yjTfv8jvf/R4nyvBi+QTXrlhfBa5yzeGNe6TkWS0vQTlW7Yr1umd/d4/+4CZaKQ4aQ1odM1lFZn3g6MYD/uy7P4FRc8g1WRkSCTN09X9YcWwT1gLFFseaEjWCX69YXL+gWy1YLZYkN+HWa1+kD5nnL65YLVeEvmW1WjOf7MjeYgxV1cjvXcfaTqhsRdUMHDQ/ft7bP/I23/iZn+Lp8ydctUt88PgQ0YUXPuzL2/Y9w6ELBA9F6Vpp2rbFd62IQ/m+iCSBrSqigkHMLMYg0L3iAyucGfkZkyj52apiWju8bwnBo61lMp0RgwSUsqZ4FAZbCa9zUk9o+w6FqCprbci2VN4H6B0DXL+si1nRNA1KKbquw1hB8DSTIlbgPc4VI++tcx4C3AFilYEQxLBbacnShoLsWLRkiF82RdFhHwU+c403Mdj2vjzur0PymRMqqpdiLcXWni0vlMJ36fIQs3TtEdgtIUIIJO9RKROiIjQ32JnPmc9nOOuo65oqZ5q+Y9a2dOuWmAovedG9tOfnjKztelMMHaJLUxKozOfjkoJv2ey5wznnreePxQxJELQSf0Py5tqmARWmhrixdDbRo6pjKt2yTBEASqJumFMa9/OUQokzQxEiGToxQwMjjzGDcw7nnFjFKIMriTVDrKGHmPflGO/lxEl8VceC9GdiwWGMvNTUKBd8gDfmNKRHmx/DtdUoso6jwuMwNiX+SeNnDMcmiUzjd5LxV77v1gcoSviZh6YBo73DdgwKlBhDem1GgUGjUxx5uCqLHH+lFCRNCBl8IvlAbNdcLdcsLhf0vafvAuu2JbMmpn6Mr8dz0hoT9CZRVYIx8zEXFEvC2VxEoP74Htf/H07bf6qU+g+B3wX+85zzOXAP+M2t5zwqj/2xh6K0JmGjcl+kZWR5RWRSh1LRFs9oeIFSeVS5Kd1puZnDzSvHQKSVCt0gThJRI/RMKltD5WKoOKRU8OGfhfttdRxeTnCiVL5K1SkX1UeFFTGRmGnclL2dGQdGEQxUtSL0PX2/xqpMVVfkGPFl46mcLqy5XERDBv0d+Q6azYKacih2DzUAACAASURBVCKZzGW7JDzJ/MG3v8+zpycQIyaDVapA5FSp1r18TlBUsbZMEVPhFMQgfIPQ9zijqSvB3mcG/t/WUR7ayrM+f/9/2IPj3zYLSh7OCwqWucgJZ8Ee20KcjiWYjFsk8M39+aM/cKi0jXK4Y7VkfIKMkbxJnoRhSYFuKpKGoBWx/ExJQYzoGDHOkI2RsZy2r5VUsEAgBsNGKl9a5kcqi5VWWuT7C+FYrqskXeRclDWjcBIArQ3Re6qqQRvL8nqJpRT8lGLAvEsiLJ09kykqTUWSVmeSgRw8OYvQymC+qtRGTlqVRLOqKoGydK3wPkpXUKatwG/RGltZXD0lmxURB05BhM5TCgAWYyqM1Uwmk2ImC86KhcKOrrkzn/AXf/pP8/Wf/pdhMuV7H/6A42eXVG7G2q9Z5l1OPv2Aq2fP6btYSPTl3GQXK+NWFvF1120SYudGniNakvOM8GhBo5wlZY+PLamy9DlAG9EWsrJQw2odmMUJbajRaYamofULLteeW4evcPvuHX71k6ccXyzIr1pev/8269k+j54/J2PwIo5KQtHHICpt1jBpJvTWUTnHrds3efDmW2jt+PB73xcoKrlQDfWmsKWkuymelob1ymNVwke4Xq7os0G5CTpFjHXorPnK177O737z9/FnV4zuNzkTg6cnUBtHXU3GINM5O66J3ks13lmL7TtM7kgpoFUgE4kRutATcmbRdTx+8oQffPIxqzbw4PZ9Pnj3Pj9x+8dwRhFjYHF9xVd/4k0m9w75B//n/0WlHTWKe4c3ae7PaFvHsyfHxHiBUZlZvcPl+ZrrxQkHswbrErk/4wcfX9HHnkt2CK2i/c6Kd7/4KvPdhmY245vPXvDi6XMu4hVn14KQmHn4N37qp/m13/gV3I2bvPb1H+eNL7zBxN3gK196l197+k1Sttx/8B7/wb//V/lf/u7f5fl3fwUd4fbNI/o04fzykp36Ln02HNy6wV7boS9WvLN/yIuupbL7XJ+/oAuBW/ffoGom9KsFlsTTh4843Nvl+cOHTIxl6gM2Qlx7lII3X3+TP/O1H+X2rRu88c47OL0GehRWOL2pLMDbEdS48H321zIjlCKFwMnjR3zn27/H+ekTutBz940vMr99n5DnmGpKU0/J2pC9qL3lXJR0c6ayFdNmRt/MWTVzmmaOKLs54dwaWZfffOstpvM562fPCWgRXPFxq1C32VvT0AXY+r5D0W9IPozK5BgxSuGcgLjq2tJl8Wwa6ouQ0YUDNES2irLuKTsWbFOSpNDZihjE87FpplJIVStC6Xj0vXhyxhBoV2vSoPw4wDrHT9lceEGxSLI2HNYY6sZxsL8HiNLdbDrFh4gypvDDh5hH+GZd32M16MqOBdnxKPueQAAHwbJN3DKoJ4YQBNqpi4ctLyfM22ii4XV98GILU5SdBzVNYzQxiKXKcM6D2rfA8EoRNClSTLRtoGpret8zbxpeefAa6ClKWwlsU6LNEpFHPSFbT7ZrfNfh10u0q9Cml1wwJGIKwl0rkdeQIGk2sM1hX1dZkqVRnas8nvJGVCWmtOlWD+eDUFW01mhnUFv3NCWJ83LhV4nVgMRlkvDnIstfkraMwES13L8UArEoR6ISPgRyNjSlOJCLYnPCjAXzYS/WWoNh9KjLvIyMGSvSpcg7zqYBTqiG+GBTUJfkfzsDK3/JkuTq8lguMcsQe2+HW0P4pdEFRvrS2422COPYQJolL4/ml4sLA0xyGNEKxo7ygCLTagumO9xeJWqzYhWiqbSlBkxlqGoHlYHGMW8mGG3JSTrwISS60HN97Ud6h6i8CjxWYMKbpG1I3OLQ1S3xacoyRyj2XTkm8PFlsc0fcvzzJm3/A/DXyzf668DfAP6j/y9voJT6a8BfA1H6YSsDHQdYVqMQSAhRjJZLy1Xa0UJijaoQH/PA6SoqVUgg+cOqdMMmYKwZW6mwUbnbPHUY7Jsbvp3UpLSJrjcQseE5hhFOOVS+s0NRoVVFU8+4e/c+Ozd2uGgvWV5dcH15Tde3IsqRRZWw95EqG6IVIik5j0Hz8F3MODn1uLn57Dk9P+fxxQkff/SUxeUSV5QgXbYis6u8SISkJOqGnzk2AjHyGU1VU7uaytrSbYEYg3DMGCCEZaiqrfxoO7FFISTpTVVkvIplIYg5j49tqnMliVNbC9CIrS4KVBn6EOlDIsS8mb3jIvAyjnuc8GWlUmWBTmpInooKVgl6lR53e0mwTC51BEUcEji0dCSMkPczuSREn1k8BwPRcVC+fK02zxsWx80iJH+PKEXBRG+qVV3X0bZrbBADX2MdzXTKZDLBGI1JWRaVLJAGH4UTlXIUci6Qc9gIoshNwBqL9wLDDMFTq6mIv1iLc1VJmAOLxYrj41Pq2lHXjXSHeuEHiuxyg++7ws2wKO0IRbDHx4SJlKRJnptiLwaVWmEMNHWFo2JXT7hVad59ZR8TOmp7i69+6cfgiwFleiLwoov8+j8NLE5OyUq++yCFPEBUpBgg97r3fVn4MzZniOKtpwz45OV2ZIVWjmQndEoRLORJQ+UqGt2DttCLgWsKCpIjUhGTIYviPUobmnpS7B9y4aoochQZ574LpCS4/pTFiNdqSbDrqmau5qyc5Qtf+AJ/4ed+jgfvvMPx8Sm/8au/RjLFYl6NDAEimmY6Q5sZ63XPo+MXxNizN29IyqJMzbL1kKJA6UKmamb0faRd96XkWxQBc5LidcHt5JwFdq1gsErZSFuDs4adaUO4XLJYX6GzorKOvb1dDm/eZLq7y6dPn5KU4s133+Hq9JTnH3/I3/nbf4ur8O9wePsNur5ntrPDbD5hr85UlaFdrPF9RzVTPHj1Der6kNfvXfPi+PvcOqr54jvv8fjJFb/37d9jXkXuziacXZ6wzj2TyS163bNcafpc8dEnlxzdOKddPeT0+QuuLxNWefKqY64zb9w+4ie/8VVOrx7y3p/9c3z1J/8Ce5Mj1teeGzsV+IBVO0yqI37wve/zu7/+S7z7+i771TU7Vea606gpLPuW+3de5yv37nDj6RPe/JGvYJ58yqPzE37/w8dc9QHdZGZHu+xOdnj+8SU3dndQy2tU33Fy/Jw7e3vcqufc2NnhlTu3ufXKLb7yo19jsVrwq7/923zro4f83GyfB1+5hZ1mTA6kIg9rPlN0LCvjVjFN9sKYB8U4CXgrY3A6c7G45PjZI95q1+RqF2OnuHqK0preR5yrUEqECnQ2VHVNDlMm0xlu0eCqOUpVRCzeB7QyWNtw9+59XnvwJn/4wces2l4UHWNfCq+Zqq65efMGIUWOT07Gsaa0xigzJhIhFji3VlBgfsMZx+jBGExlRQBCSDhALgU+6XbEEgvEwk8rMRh1LUIebXtN1/X0fRS1RhTOVYJwCT3Bh6L4WpQI4zayp4THw1quVNlToO87jLU0dU3d1NSVZjabEGNmuVqPPDeBf0nPTpUKt0JsZIyRhFP2gs2eOSSLwsPL4zUSaslmL5TOTmJUR85DPMMYcVtrX0JY5CDrYgxhjI8Gr7gN1UWugyqiJSELN9YoUClTuQplRfToanFJtoa6nqJTwBaxkqSKp67RRCWiIYqANZmgZG3MyN4REAn4wSNsUEGmUGnI0n0dSqdiRVOIC0nUI/PYGaB0zSQuTCXhGTqJcu2Ex6URflLOSThVYwSu5X1LFy7EKIluKRxSVE61knjHlnNIKY0+qSlF4TFSrJC2Gg+SEOpN93Ts9m04oHno/Gnx7eQziesw17YRPmXrH1LRlxKlIamTHSaPfMCRqwZFnVo6kyPlKSPFBTXE2nlMuow2QlfImwR5gzYadua09Zh80vB3KUNv5ta4wZdj2BGHhwZfQ2MMtXPMrKXSClc7stWkSlMZMybglo0KvcRCYfREHMbCMAeGhDXnVJSvpQDkQyBmaPuE1qIwThIYqLOmqP7+0cc/V9KWc34+XgSl/kfg/yi/Pgbubz311fLYD3uPvw38bYDpdJZzTIVYq8eUWSk5SRSEJJnqmCCIBFNJ6ga4mQTBWmVUVgUjHF9qt6atxGS8sGWxHioKaawQbCZ8Kkapw4L7mXN5qQIl18VIBh0hJwNY8T5KmpQK90xpJpOGvb0dVB0grHkRPb7vMSmR6oaMJhRCuB1Mq8fK2HCdtqqQZVMQtZ7A8dkZJ+dXnJ8u8WuPHjl/RZnSCBcwl8k+LgIl0R3+fzvJMcZAygQvnR4hy0ZUNjAMzgw6F1ZsmZQ5ZbIq/KiSrI0JSvnOGoF45CSwhk11VQ6ttbT+jR7x2lrUKWQBSIVjyNamXHLyDddnGwoxjuRNpUeNqxSD4MwIn8iMi8awYJCKFxBSEUMLl8vaiuRLQrzVpdB66JzlUvB62RJiqIgOKqpDtVMawHIfjBZD75A8hXgg1S6t8G0n8CC1XwQIFLWrqK2lqSpUiBhVxkEaRExk44gh4EPA+zh2pL2PxJJASFJXpHidY/BAqqpaoHBdy/n5Bd73fPFHvsDdu6+wuDzl0w8/FOGP4jukShLAsLFkReUcMXTkGKWrNWxUKdCu1wwVUasNRldkDCr3ONvz8fe+xZ139jk4PCTHnqtn72OaCUndILYB78WjqA8ehmBDqQLrEXah0oqu74ihp8k1KgUIPZqIUpmQAilFnNbUVUO9d59+1TE7yHRmQsiO9vQpd/YPub7y6LxgqhtMaMi5dMnz0CFNoBNZSyIUYyahBZrlQwmKJMkf7oMpAaICuq5DkXn+7Cne9+zszDm/uAJbRHlKQEIuCpkorLbs7O5zHTXXV0+p6wqjLLLbmrEgYQFtLArN+dkZWmv2d/e5uLpE5WJ4HINw7MrG7SpXquemzJuM2ZLArpzBkkQRrmmYTefs7x0QY+ZyuSJpESOY7e2w11huVhM++P4H/K3//m+yf/sNXr1xmx/70pdBKbzvWSyvML0qVWKFqQ3VxHLT7RJby6xJ1DawtzfhtTdeY+IM05x5fv0Yoxru3H6Di8s/RBztGvr1ivOLE3IOOHOAMZk+BHac5kD3vHvvJt/95Nu8/d5b3No9wC3g2Q+eM50a/tR77/Drv3iDk6fP+MF3P+Fv/Nd/g8XZB/wn/+7Psjz9iNn0VT54Evn00SNu7814+/CQO6trdm/sM3vvSxytr/iR1YL3Vwl0RXAdJ6dPyLN9Th9+ippOmfie6Dt25zs0UfHK3gF/5r0f5Yt/6qu8uHzGP/61f8qHHz/mn33wEXffeBv3D36F23/wkJ/5y3+R3ZtH9Eo8iPTWPvfyoV76KXtsAqWpbMPufI/lRU3ft4TQ09QNbcrobLGmRmuF6wJ58H8rAb5xVnitzoESPcCMwH61tqMf4MmzEx5+8hirKpxGhDRST2WdGCQ7y5e//CWenRxzdn42WrmIWIfesu8RwQ5SQCmoqylVZSElfPBMJlOca9BJsV51+BhGSN/g5zT4tMaQETM58VnqvYj8pChh6XK5xFqHdRW+D1hjSpHYljXKEknEkKTAzFBc3OynuexRQxKkS3KtxoJnxhhNCJ71aoUyDhVTEbMqPCogJy/xx5C8leJ3ypsOny57xnAeJQsDctlLhVfjfU9OidppnKnKfI7lHDYxj1IK5xyrrpUOWtpwolK53gP6JZdirviFSXBPUmSibAMpSECsofM9dRDlUBc7lLHjtXDOopNGRU/C04YVKgT6diEdziiJq/cCIbdZyTo7xhkDX11jFGQlcWXXdcJfLvBV8bcfYJIF4pbHk3+5AYAU5aNfirKy0luxYOnzleKwqB/KPjga16dRbxEQCyvx1jQFMqfLODBUBf7onJMkmU0yA4y+ftGI0qJwL1/2+NMltiBLsXlsPGSB3hqzif2G+HgwAR+K58MYVmNyWwoByozF+qFhMsAujdGYaEoMBCqFsZiA3rKI0AqTtyGt5VlKsY10k1hpQEVtREWG2G245sMDqjw2Xqscx2uQ03B/ZG51MZKMIkdD23u5ljHKZ4zJ8NY6qqSRFEtSrEZ4afnsYp9kCrzVOoexE7QebDkyzmjsH5+vAf+cSZtS6m7O+Wn59a8A3y7//78D/6tS6r9FhEjeAX77T3y/8lP8M3IRDVBjW1ha+qCUyHZTqjRpCNA3KXl5r6H6IINtWJRfxvDKEUOQKonazpDH85RWpdJQFkPxEvssefVlrO94s6LA1aTSJkqOzlUCG0iBSACTiblntbjixclz1sslSimscZAVWju0cYSYCakoSeWB+JlGDLFsXoVjVL57zJGzs0sePz7j6qIj+QzRy+KThDita4P3HYRN8jd07ZxzY1UtBBG8UD4Q+ojvxYzXKr3hXxUS9zABch5RrmN3a7hGA4xgnLVZNsON5OsGYy+Jkxor+JI+R0IUoQitvcAwoiTralST/Hzn8HNjbuu/P+zIpQKV00bxkXKuavAw2xpPIQRygrqqaeqaLnmRHc4Uid5ybioXo3SFYPqHpG8bz5xL8j0sBMO1kkqoNQaPjLGxoJeG65iED5IyMQai96QgMv7EMG6weuBBFChRVbx9crJjVThlEe8RWKChqitc5cbOWgiBvuuYTqbMZnO6bsV63RJD5ujwJuc3b/Hok48xSuP7juglCOuTiBtohcg+x8y0EihMDJ7aKqIWOX7vfVnspWOJs6x1IgC+vebifMWhv+L6wnH20bd4/5u/yIO332Z+/88SV52oe+ZB1RCss+Qo3kRZp5Fb0vsW362xzEjdGh17jErkHNBaYIEqJiZuzo/9+F9mV6/pU8t3n5/z6eMLjj/peO/d9zgPL+hWz5mwQxU1jZuQjCalKD5o0bMOa+rQiqcVmqAEYquRIEBvcR0HP0npUmdUTLSrNcfHzzGlkKFskdo2Gu0cyhaIkzakrFmue67iBb1yaCw6b3GDo9hxkHJRy+xRpmfd9SgFd+/cJCdfKsGKft3jsyKEipQsOcD19RXWGiaTSQme7Dh/m8oyn06ojGVnvkuMmXXb0vvAxcUVbezZv3FIyJEXTx/z2uFtvvLV9/jmx3/A+eUlcd3z9oPXuLhacnpxxnq1YEJFxHO1aLlenWFcTV4vyemS8+NTfvvZpyz6KbFuWM0OuUgVCzIXT0958vCSlJ7h1AGKTLQSQFTTm+S0YG8y4ezqmpUxzPeOmN58wPd+9zt87b33uH7+gif1Qx5d/4Cf/saP8fa7bzNxc0yuCX3Lk4+/zZ/+6iE//We+/v8w92a/ll35fd9nDXvvM92pRrKK89Dsprol2epIaVmyJMcQrFiAXwLHipEAeQjykIf8BUGe/Gf4JQgQIAHiAEESJFDbmiy13VJLLbFHsklWsapYt+58xr33mvLwW2ufc4vsVgPJQx+AYPHy1r3n7L32Wr/fd/rx9MMZ//qPvocZv8b+3pR3Xr7HV24coJ49wX7pDb75vb/g9Vde5K2nx7wx3mexXqBnY3Ad7dkp+ymiL6/YH4/oo+bnvvAl7t19kXdeeQPVev7P//X/4qNnT/nrpx/R+p6T9SWPvn/B+ScP6P5Q8eGzh/zz//K/YvTCHZIuIf9/2ytBzHsFNU09ZTI5ZDw7YG//iFdfeZXZZMb5JbKH6UbGzVS9gBMF81KiZKmbimbUUDVjMCMSVf41SvbrqDnYP0SFQGMt8+UaU2mqLANPWVL04MEDLhdzfPA7YFeCndCNuqrQWuGd+NmEORMPUecTadWijacxtex/xpK8DJse+CgdIYrawzsZ7aJ0IrjdGa3CL7iQcLHHB890MpWxLJlNGDUNCvDODa6OGIv3fHsWoqCqKjn3rcFaLYEntZXhylENhb88/+K3kW4P0CrjX+V9MQB/kgdRANnSbG2Xwa73X84ZqQt2o9eff5Xaxxh51lftBh8l0MVoOQ+t1jInLrNKzkkTFpOMIIEkexwy2sYqRWMNVkHoe5bzuQDfvSI6AeRtDoExVhQ1xmhaB20XWG46fJJ5fMMeCQIm+zTIVE2+NrCt8+SMDNumLd8rCWqWhNEta7PTrO0yKinRdx7XdQJO5+ZB5cZsW8tkL6GWs7mAzMUbr01k1PeEpkEp+Yw+JqpKUholql7eawylBmZo2MuaCt5ncOHHPO+p2I2SAIjyicQOQJlPC+jtmiXbYyTNNa8lLQ3H4CzLYyZUzuIuyilys2xKiFUudWJ+dgcWLYMXahgpUD7B7kLMjZeCQSqZA1J2BjYMnFrmfbbqqfzSaFDyPmOCzonHTcUELuAtgGXdC5sWclZFiJmAKImkee/I7cq1/qEsmVQUgvm5qCorY5MwCOOsqIyhslr6m5/w+mki//8n4DeBW0qpR8B/D/ymUuoXkef8Y+C/ljeXvqOU+p+B7wIe+G/ST5EcCVvMR/4shfEQdV7YEiVpNWJJyRLIvHFLkk/p1gVF0EbSfdJO41Y6/22hnXb+yRc87fydvOiGGGvi9Rs/IA5bhGnQsEdPCjl+H0EEY3IEOvqgcWlN6644Pr7kB9/7Lo8fPWLSjNEJvK7oei8zR5TFeWmYqiRSkDhsGDu/Tyt5CPIF7bqO09MLzk7mhE5hkiHGkLFw4Yu0FdqeqK59pq1P6XozmmIiebKMK1BmtyQyc6YUJaK5pP0MaU47l7s8kNsLTm40CsKiP3ejhPx9gDWWurJUxmCUmEaNscPBET8vKqu8jx3qfBeN+cz37pxag4xIqYGVGICdwpzlsQNGGZE46EBIIqssDWwZICmmZInQLshUUkXOkDfLLYi2fR95EyoeQaWyDCMJ4iUoVhLEdONxsafdrPCuz4dQlkZEiBpciKz7nkopRqXQz141lKKuyxz2noDOh7BjlLbm/xACq9WKpmmYzmZIQIakNW66jslkgvEho3Tyfq0CkyKNUfjes1ouMbmJgxyVrCTRSRrOhFYWrS2VhimKvTAlXTqSW+DjM9774cd859/+G+7MHPt3b1DdHFPNxJgt6aJZkqqy1j1HLPsQcpJcwnUb8D1hs0YFT21A60jVWAnESAkTLS+98AVeODCct5c8Vsdsnn3M3E1J9hBdtXglQTUVFpu0JMcmKVBqDJ1zLPueEIBo6JKWKOxcNGtyQpj3QBwOVEIiOodRisVcilhj5V7ZugYth97gS9GGqplw/96rtNRsvObg4BaL+TlWZ0lORjIFENe07Ybl+gqMyJS6VrT7WieZJWOlYNzKsmXdFq+wGObt9tBOkcpammrE0dFNNpuO+WqFrRpu3b7L07NjYWOIrDcbDvb3MZuW//gf/y5Xrubq5IwX7t2nGY9JF4m6MtBDijU+JPqNpbWKKlhstFS6wfWKsIks1j39/JymabhYrugc6Lbn6GjEKy++ynvfeUCsPHcntzi7WrJ/MGU+P+Wkn2M3mlvVEX/yh99hxRUfnf4ph+M9fus/avjt/+Sfsnd4wPmnMN2/hdaPCX5OXa351V/+LeYnGz7+8JyPHp7zbLPg9t0bvLM/4+xH7/HVL3+Jf/XNP+LPH37If/vP/nP296e8c/sGjz94SrvWrPs1hopbdsztyYTXX32Vl958jZfeeYvTqzl/9jff5Xvf/g4fP/iQ5mDCPDqulmscDWfnC5hdgdX8+be/zW+fP+Hte4dZCWE/u6F83r6XZM2BwtgR1WgPU005unmbN956m7qqsSlglaEvcz51Re9jmcoj2iirsaMK21aYykqTlH9HGVOjVORof8q9Ozf50Uc/Yn8qjJ4COueoRxKD/f4H7+diLuYU1ShjLnzCaoXNgTwplmAh6FYt0GKsRRsjc119QteG2tbCDJTte4fvQG2TKYuXpwRADHH8sqyl0CvnlRJ5G4jPWue0VXIx6fMYmjKIvvydwhCJmkEPQKn3AVKefyb0D0XSFVIkBURiamRwji7+nXImyJMNStQfBawrkkmFnGVVVTGdTNibTjGk7FkPpMBnaoBS81hrhwTJYm0pe4CLEevcUJNZKyNmUkrC0is5Dy3ZdekjtI7er5mbU4LRhKhBjfFerrUkEVtp/GJkMp7k+xxYbzzRiwytqIZ2bizFVqByMzLUJEoGG+/wN9uim8LkpqH5Kl8flCLleUFhTYMTbnX4WTHXTLoc2qVWyiyQRPbL2zSqXCNh37TJrGzokSUtYG9MOgdAFVZY2gbFtm4r92K3ftl9SVN53QYkt7DcQ5FH77JpW7auAOKFnQWZzwbRJ6Iu5IJ8MKVVVtRktnA8YdIYtO/xThKqS4gfbH9HYa9i9v2hBCRiWI9bzUB5PLxK17IMJE20mKXytaYkSgoIL6C1gD+BbMXR4JIiGdi4SO88ru8hdkSnCDG3qXkunYSkBGIUW8n2wu4sptxTeOdBaXR0kAIpBlRKRK+J1pQn9se+fpr0yN/7nC//y5/w/f8C+Bd/28+99nfISIFS0s2DfKh843VePCkHNqTc7caSnpMLAxn+mVGA3NA9H5+5q8kdPHRDgpO8m4IiQJGDlZuSBtPp7sOw9RPtNDdJ9NZGBYKKhORJyGbjvAy7XW7GfPy44+nTpzz56LEQiAeHEBI6abSqUC7KAFDv8fSMxw2GrTm4NA4SlqCGBVli0l3n6FqPigaDFiljVKgo8jetKrQVE3mRaOw2tc9LJaU50xhd4V2k7xzRB/CwHXKYh6NTBpenYaPTSm2btYJ0Sb1I0Txrra8zbmbLuGGkWQzBI3lw2020NGuCbFxHVa69hvVS0JrP9IXX7u3zm7VcWz2YSodQkZg9Aj5IdLEP4h1D5Iwmz1/RZAluLArrnPCX4nAQFJRN7rGYuVU+fQtaU9ZcMVWrvC7MTqx0DI7ge5J3qIzolAMnlIZVG5JMG5WZc5nJ80E2OG0N0SVSllSsu5b5akk9nVJnUzSAc46+l7EDk+k4H3SapDX7R0fExUqerxAkxS3J/K/Y9/Rtx3p+iSZQWUNVVTgSlVFUk7GgmlF2cmMs1cRm2ekeRu/j4mNOVz9k7TXz+Rk3qn361nFx8YDF6izP7vEkhTQ42R5rjCFlqUmI4hHsuo1IN1yHSYG60lgDdW1pC1bI4gAAIABJREFUqopRVVFpzcFkn+nRiKo55Guv/xzp4AM+/c736I3CW0+oO3yQZlxHhc9rqUqGWmkc0MZADKCDIWDBGDQxo3iBGBxdt6HrOyajlFPPIl3b0rYb/uFv/Rqvv/4azWhEPWoI+YDqvSckScWKKeKTHEzrruNq1eNipO87GmOGQepFMpMrRxmkrSzjpmbTtuzvTbBaMZtOuCISfBjWoFaKu3fv4r3LCZ2JGB1d27PZtFRmhDUVKjfd43FFUJaUJBW4oLdFRpKA6WxGUoqf/4Vf5Nvf/BbHJ6d8KQaq2lDVmvXVCqUb6rFltVwwv1hzd9YwqzS4js1qw/rSsoiGTivqxrBZt2izzzo54iYxXQei9XTdOYuniccf/YioFKdXS8gpgU9OTxnVYG5pJtMRr7/xBfp1z97sEK3H7O3f5d2v/BJ/9u//ivFYvIftZs38Em7dfpdNfMC8XfIbN16ievyI+6/cofniPZbf/waq1sSDKZN33+T2k4f8/OwWn0a4atb0ZxfcnRzxO7/6G9y8fZsz3/GNv/gb/vz9H3CxWHO5WjMfj6h0gKhpZkfEjedgXLH36iu40LLwLU8vjnk7vI5SI2mu/taeLYk0OAWIoMwIW8/Q1QRtx+ztH8qulWTeI1iSivio2PQ9fUg0dd4vNSirUEbmlJYzBAR4lbfiuPvSHb761a/wp9/4Y2aTKWkyY71cSUNR17Su58aNI3rvWa3XgqzvsEHaiPRTxp74oZiXJsOS0Oio0VaLOkQbYTcGGVuJoJcmklLAqjJOJ2GtGpopUdnIfwtDIvut0YaqrvBdj+t7OadSyoi/1BZ653wtYVro7dlXvu5cxDthrLUyEiiTVMH1iUHGApmEhFyloswoyYVy1qbccI4nY6KP+HYbeFIkmUVaaqsKS8I7l8Hzzx6M5T1em4EbpYkbj0bZz4dI8uuaMJ3hncc7x3K5hDzgWyeFLqnaPhJ6TzKO3lTELKnUts8SS4MKDp20SPidJHPbqoIQIPRSCpVrrGXPRevMGm3BeqW1ZI0kGbSckihkyCqegZ3aaaqH5q88IQX8u/YypKxeUECIvlzlQc5b1lhKklcg9X0BuBMhRPq+Z7PJc93qiqqur/kDpbFjAHohDjYiY6Q20GjCT0ghTDuNzfP3tsx6K2D2MNJq4K+Gn5IbT3IDmISFAhkPkLZWDmMMVmuSFolnU1lhVZUmaCO1T74+MUSRLkL2fIfBcy7sWPGKloYyl5OaPB9tEEpKTkPplzOoXWwqBOkpJOhL7Ak5N46gEw5pQjdB0rhFGQIEQzK1gMDBy2zc6PMswDiA+NtaehvoJ/uHE/tHKjaviEoBZzTOan7ylLb/b+mR/7++lC4hD3mvIeu9gS2Nk7JBdrv+k1YyLBM9mE6l0dOQESHn3EDn76bIlGZL0KbcdWevXBkwXZi18iBr0lC0h/Q8Bb09SGSliKxiWMBR4rhjrFFGmqWu7zg5O+dqtSGFxGrVMmnG9H2kqdYDc6eMYTrbozYarB4SgWIOkygztEIMhCS63BAive/pug3BO4gJoyxNUxN9TRiuqcjsSHmQdGmmlNr6DLUYQGMMMnTQSDSs957gc5Oah6/KIS4H4WCcZqdRZntbd27w8P91bqTDcM+yfzFvNDK0VFAmHz2WgNIxR0kDUaSSKezyUTz37x2mLJWtWjFIdkpDVJq2vGkUNKqgTXGn4U8RlJFiwHkncdNeZvOQ5SwpCRCQ5AdI+mSUe1gruW7CTCRi2CZdlRRSuU5yqMcgnkJjJa0rkr2fXvxXELLkO+Z0ySCNUoaxEgweSEL+mUbljSev4bzhxQw66Fz0GaWGpnJLaIonbr1a0dQVSml8iMwXCy4vr7DeM5JZA3JIZflf6yShT1mLazticigtsuLJeMrBwQGT6RhTiS8iomTeUaNZ6IBrKl5/603SrEanmrffeJWXRnschBrbRyY5Zpmk0Eq8hilKI1yCiGKWv8YQ8DkuXJsalDDdMQaRW3lPpaAiYGmJVy0XixPaO/e4c/8m05sTVGPwNhJUSzSJqOt8+ytIHp0Pu2AqvBLZi0FCa0JMEmYQWkB8bMZUVEYDDpUco3rKzRu3aEzkF7/6H3B44wZibk8EjBTmcTv7BfTgAzu6dQ9My6NPnwqjOJ4O61xnqYjP+25ViZ9IEZiOsieyqdjfm9F1GzTizy3F240bN1hvNlycn6OUDAruui7LZ3sUUNWGqKSQtMbQrVs+ffiI3ndwtJcZAUUfOok8rypOjo/FoK4009mU+srQLZZcPjvGHNzE9x3NiQFvaNqaarbChhVVlRiPLM8uL5l3GyZNTd919O2Cq6sN9TRysXrA3dt7/OqXf55xssxPL/nODz9glQxeK0xjOesXvLh/yKv37jA/fcp73/8uv/QrvwlqBMlSjyf88td+mX/1v/9vRLPBUPGv/+23+Dtf/ho3b97HjvZ48/Amb914Aff0U27+4pdwyfIbX/01fv3vRu7cfYE4HnEwMXzx7k30+Yo3Xn2B/Xcamk0kNhXf/N73+eDpU9Z7Ix4v1nQR2nrE1Voz1oaxJsu8Ovb2Rtx8+SYnJ8cs2xU//PAjvvbLXxN2cnfj/dzmLUvvCkqeGZqqGTEaj9HL8hyJxF+KKdl7XBREWkDH3P9n/1I+SiF7lmVXVSgVSalnfvIJp8eP6GOgn18JWp4kDKR3DtvU7O3tgdas1mtiiJnRkqYjZp+XMhqiBDKoHEyVlPjCIVApnX3cGfzN/jU577aIPVrOGR8CMWkqZfLMr3Jp5OKJXSINxbJ4fRu6TU/bi2xSVDCyD8u+WuwfUqhLz6axtqKyMjrFaJ3fl3iMjDF5j5dapcTDgxSqw4BoKgqwJzxIGhiLyhp8LC1fhlWV1EoCyjIodowxBJN2GJDt0hmqnZQoCcZGa8ajEUdHR9RNI5LQKCBkPRpRN4ngZB+IIVApJTHryWCUJDO6GEhI0nJRjzTKobW8B0kbNbmq9oypsAT60NOFVvzAIcq5n6+JNvkAy/VkTJEQwKcCgl5nmMpjsduPFRmuuHDyfSsLofwdpSjJvEPYRVTb5n/4PnISugADsubUUFNIjSUJ0FqLvFXmxom03gePCw6jbP47ov5SKjPYaaeW5TpwXc7pwsxtP2QcnsmYAjqVOmf7+bdDt8vXdzaP/POGbxHWQJpeLT7tlBNZEzIcvE+e6AvL5gd2rPRuAwCRm9EhJVWloSZLaed9lJpeS/02KNHKdc33MKa4BU9i9oxTGGSFT1I/+whd0BBFieRCoHMduleYaDHJkFxPHyMuKVTyqNLE5jm2Kr/XSCT2XkITtUJ7L3VykuAaCfqMxKCl/3iOaHr+9TPTtGE0ymwZDbkpIoNAgdjjDYqKlIzEnGaTq07SuAl7kMMntM4+DWmyzC6Nn3aaK7UtHiFv5uw8aEPRnldxgpTj1Y2QK4LS5zCORAAVCUT6BD4CQWNSwiaDSRUh1iQmrBaaiR7z4v0vMTncsF4uOf30CceXl5xfzBkpjUW0rvuzA2bTPUZa4ZSEevvgsAQwipDll8lo+hTog8fH0mwtCX6NSoGUakKqMFbhfUtDjaXBq5aoczHZSyy6IbMDGoJJRB1QJqBMj7WJuoKm0sROvhe9kQYl3CTaDSiHijqnNKUsDRM5nfeSWJgUJKNliCTiHXN5xlORo5R7aDNaobPZW+mKpDVOOdq4YtUZlNe0qyWqhSY2BMqw4RxKmzfJsnaMUkPkbszMpfgs8oaqZRg2qsz6UPhY0q8CJonMTyVwCZzSeK3pvRilI4GUtMwDUuKByyJbaQw0RB1BBxReBjOSze6JAh+JvDI34lGJ5cREjbEanzwmJnyQdFGjDF3q6WmJvYRwrBdX4DxEkdN4Vfi7hAqJKuao2ZTlf2TFbAE1Elit6VzPXnXAdDKiqhQY8EnYMaUQ9MwFsYXEiK0rRuMZl1cLDhrxSAYiSsu8r01MpNEUM7LsTY5QoceoNSm2RJ9wrWLTB5JRRN2TjAdlmIQxM+tw4wvOu2Nuh5vMP/Ys3RWTvYqbswn1Vc/pg6eE07UEBlhDChZClRPJvPhLA2hb46P4a1JKRG0IakYbZkQlz1i3WbG6mhO6PVSsuDz5mPnxgscPH/Ht9pLLN+6y2Fzg1AtEs4/2lfgO6ohqDNo36LAmpTWhhg01tWkgBkwIqNiLHEpbVOVw2hNVQww1Kjoq1UKsQE25c+9lDu7e4aPjc/7iL7/N/ddf52zeswkVkYaxNdQpoJQlKtHKNzbyxqt3OL644sOPvwsouljTR0VMBuVlj4tKE7WMBKl0pMqFjQ6OlKRY98lB0IzshL4X38/HDx6JoRyFDykf4rI3bBY9prakKtJrLwBd56hjoH12yXh/zI3xlC5twCR81aOMYn804q03XmV5csmq9XT9iqNRjVpcsDx5jFpDqiKnn57SLz03ZxPeffc2s6lG6TFtMnRxAXVDmypGzYh+cYmJK9pFYGE77r5+j1//3d9jVs348Gnk/ZPIZH1Gd/IpzlSsU8U87jObvIY9qlHWcfPNe2gLxBVKbfjiO7e5ee9V1v2CzXqOmd7m5V//R7x9/zb/3duvYVPiw6//GWeXS/746+8z/vMrYtsyGyU+/IvvM5lBkzrefvcOf+8Xfp79114h9Jb/5V/+D/yPf/XHLHpNu4nsHcx40gdqF6jQmKTAexyJmALOeKz20G6IraDDXecJyiBv+CcUBKk0dZYY5byFiKoT1jhGFuoUqVR2rGhF1BJwQyW7nes7RglshKAiPo/PqG3FhBEmWhIehZawgRRAb/j+e9/gz/7w65yvwLkeqzymtlIsA5VS7O8dsFqvB0kjShIEo0r4KGl85AJrIEuUhGZlrgtvdU5NjhLX3+XwBDGc4ULAVDXOSVy80YaYgsxINJUUzj5SW0Pf98IMJ5FpAoSYaF0AU+GigGIhBaw2jGxFnxTBZ9dPBqpDHjQdQ8THSG0tofN4En2IVEnGsBA9WAE9MBaTBJXXmVVUOTRLwstUBtZAxSCy8+iIfYvVUNuKNhedWhkIMY8RUoPEWWuFi3mmpREPN0bOyz44aNfCLoSAiuKz9c5hqorWOy7ncwgJqywqRPZGU4yqUHpFbUUWqZISxkNJdedSJAaX2VlDMIaoEF+uCpLKnDQYQzQyLzIZg7YW40NOQUb4k1LXKTK7FYeaL+nsZfIOX1JIjchQhXvJzJoSIH/LwOWGYQeqL3ViiJGUE8PLzFNSCTLLEtncREUl9a7RIkUMQfxSxhiUAVtX1E0jY24w+BRwQgmKPy+JhzBFL2oaLWRAyE2JQmpgZU0OB5KsgZjEsimfaVtzgahrtCpzkfMnzM2MFB1ZhVbAAq1Qub7ynmwLUTlKP+B1wGuFV5qkK9AVKToSEjQmLLAmoHCxNFViuVBGD3kBor4Q/30BO+R3y/svcmQXPD3Zx1nsTFm5F0PAO5dD1lyWf2YlWJ51qnTI9A+obMGR+6rwQRMrYW3pDSYoRiFwkSq6eoLiCkMSK4xSW+IpM3uSEmrQKo9fyfsNShIpG2uwWuVn/drK+szrZ6ZpK5RiYavKf4oePA4MWgpFl1rQDiBGymz6lC3FEmm7jfkfdNzPXZBreu24/TeDp0oNdGx5p0UznbL0bQukXEc15NHfvu8QI1iFUuItcC4xv1yxCp6182w2vRibfaBvO0xTM51OOdo/4HD/gIP9PeyoIYaeMq+toBCpyBxyOo0ygnDEFJBo+BzokhR952lGirq2AoiYCq17ghd63WhJjouZ4UwpDClUJjdYLgRciChtqaoxqOUOQ7N7gZ+7z2ngcLbIZr4PRucNuqT07FDMMpfkum9Ga5EgROc5PTnmgx/9gIvzCy6v5lxcXKFjomkaUhCpScgzU5QRyr33QaS11pLodtZhfuASBJ1TvdSOHxIoccnlYChImc+Rrr3r6Z0TmUpO2VNZd6AQpmpAs0nb+NuBzt8GwlyDsMr1TOSvD1RYNpBvmcPetfjgSDGxXq2GBMbBJDocQnmTKrHD5RnMO3QamEfxjtZ1TV1VGAT5EmY00rsNJgmim7IhWcy1FVVV473D54H1tqppQxZwGIvSI8aTESZ6SBaiJbnEKji8C/jeDddJ0lwb2lCzTJYfPHzK33zrfRaLnle+8jZHk4rQrTg+OePx8YpVXJLGGZVXBq2FOTMGtJHrpqM05mV9ydqv5B8MKiXatuXi/JzLixHt3Zr58gp3coZNMLOK9z78Llenj6m+8AVMDNQpYrMMIqWADnJgSp6NZtU6Rr2nIO8gBnCZeSSSiyKfgUQIPcYYTk5POTs9YdOtOX/2kOMnP+KVt95meuvlLKOR9WC1wtQNylhQAWs1rtuQfIdWMix7i/5vEcsQBSxrqop+0+K6nqqqwOb9mViIZ2IqEiCYLxagFLO9vWFvgpJgp2Xv7h1177B5GPtkNGJvusdoNqK2hqvFWnxHusaYis1iw/HjJ2il2N+bMZk0hM6wN7HsTTXjqeF8eYVLkYurFYv5FR1X3L494d79N5hvIi4YuugZNxXGWJq6os1shWpqlG2IZkw0NWZ8yM3X3uTdW+/yoz/5Ez589Aldo9iEFhcd2nmayjCazOiVJASaGBnvzbj1wqt8+OCHNLNb7N16Bd/MCPWYe299kbOTY76tAs8mI0Y01FdLmr6jOb1CtSe8du+At998jYObNxgf3mB5/Iz2ZM2Dp5/ynU8eEOyMid1Ddz299zRKS6GtGPwyMYlnqpmMqbQl+kjbChAQlMQNmM/blD+zWWevSMEt87lSVVWW6lkpeHLIhA+CzidEZhaDSOITcZDHhyDJviqzuYNXN4eI7R0c8OV3v8wf/OBPIEJU4uVMKWGsZTyb8uJL9/n+976P9wLGGqMpYQrl7abMiJR6Qmk1FHDaZNbKGowSu4DW6vnsp+HzK+lLB2ld8e7C81JGKQqBoRFLJHyMOSwiMBnPaEY1a9USg8rsmHwOa6wU95lx08Zim0Z+3gBcivdUaU1AfGFGa7SVhisZaSxKzaJ1GQMT81Ehqa9kVjBEAc5MTnve+pdklJJc93LPDXi/BfDy50yIVcFoYYGcc8yv5tiulSAkpfApkoLHJkXbdfkz57WQA+ciEvjiUkSNavEt5roqKZHsG1sRvd/WDyrv19qIhLeEmww82Zbl0oOiimvruhRupRHY1oZbpc21p+MaY7ZVbAGZKcqyx+fDNdJWJpeSZJDHKCnGxaKjd+6fMYbRaETTNHJ9QqkTuDY391p9VOpbRK6oc8NVwvJiURMN9YEa1rnYi3LwSC6A0nAklL8zlAPbR2X3e0s9nrkNpVUOeJffG1IGlVPAKoWysnaV0pg0opYikLpu5LrkMQ+FAZb5ceoaAyfM99YKYoLBZlJ7aNqyrDQU9mrn2oUQ8/oSVt1qg85gU4hbL3YMMb93sYrYaDHeoGMkJBDtTCwU5lArbYkh8fiBzF+sm1r85hkoaayhqSy1ltl1KW7P1M97/cw0bbC7CLZN2zYKV76+G1+q8jcVv1v+2xk9uO7RKgu8vJ73upG795SS6MuHAnm7gcco5uKk0xBbLkxfCc/Yyv9KwO8gvUJidbU1TGYTbt++zdHNQ5pJgzeGRduyXs15lDyXrqOezTgYNxzO9rh1dIODvX2srWVjU1IAyfXJVC8MmxMIUxO8oIIxpUwUKrQ1+F5kncZUBB+xjWjGo8ohAqbMl8tN8o7MURlLUJr5esOz80uuFi3rTuRtkpolT75OW8Zyaxe9dqN3NrbMOhk50EOWhgwNN9vNteiS224FqsL7nicPH/PokwdcXFzQblqU0tT1iFobDm7epKkb1usN5xdXuN5l75Y8qGg1UPK799BIl1aGPIgOvvj6YrnugvJYI4ebxB4XA7nHe0fyZPOsRpkkAxRzIVBiY1P2xW09dtvPPUgxnltfRRpbtt/hwEky+PXs7IJVWBJ0x2az4fjRJ6zXa0g54VLn5MCyKe/cj5h2PZyyE8v7lcQqkcR6gsuMrGHYtKxSpNhjTaJpDCG0WKOIXoaLA/QuQJ1wEUH/bUXw0IfEtB5B6uhbCSKorSH0LaFz2KjQHkyEzivWQbFoK07OWq4+OWM5b5nducPhK7e4XK8I7RozHcN6RRkrIIdi9gEk2SS3M4lyox6lKRFgQJ4NmUck4T5t30PVUN9+iTrd4MZ0yr07E+bv/3uevfcRKEvvIuhahqxnDynRo1LIt1hxdbWk2a+lKcsjOGpbEZFZU0WOI2eipGa6EOn6npXruTx9gt1U3N5TrC6PZG7cZgnEQbpmrMUnhBE2mmfHn/Lk5Ix2s6JuJuhUcEBJQAMpcpu64YUX7vLpkye0y7WkbeaCcPDw5mTSIm13rqdqGiR7qszSyY2eAoqXKIKLHhMV49mMW3fu0EwbmnHD8tMl1ozZLD2x1rz80iE/9+6XuVh+i1XXEWNHomd/v2E8MVTjQJ1gNj1g2UWC11z0Hn+xoNlrWa4sWo9RPqCSouscnY/0MRKNINCbTUe76bGpZrnp2PSeZKbcvHGE1oEP51c0o8it/RHT2V1oNPVkxgZNo2q0Gkl1b8d8/OiY8aRhfON1YjUi6YqkEgtneBQbLsb7PPzhA/zlkl965y2WF5csTh7zx598j9+qNV8YT2i/9R5XiwXLJ5c8eHbFYhOY3RxDMoyqBp0SznVYpYhdD8rggmez2RBC5OpizkP9mMV6RYIc1W4+uw//pNfOfjKsEKXF95GLtFKQuRCos+yxdY4+SBAIWbFAkplififoYxhurQEsfZ94dnJGioFKS6Ebk8xtHFkZkbPZ9HkNSRBDOUMMwuyZLQ4mESpa57lP8juNFX+aVlIsktieP3nFbj8/2eNqhrpjkOdnVqbsuc5JAybNUh6Ym3+IMZrKjjk6OmI0HjEebTBKE3rHcr7AdeLZilFGFAUiXmX2LIXB2y3YdI5cz8xTPiRE4q0/W9/o3MiTxG82m82wBxXzqyVX87kAaxkkEtYh5hAjBV5x/eROW8Ay/57KWiprcX0vDFeMXF2cY+qKF168x43DI66u5vhW7pXr+6ziyJaW3MCjwVSSVqyqimY0IqjcgiVF8NLkuBxxH0PxWZUFlGUx5fOrAoFtLRfXvfrXm7RtSEn5zNuG7/N88T/WK5//3xBGV2rSlChDqUuj4ZPMtS2NMIq8NgXglQCOnDyewVSR8GV/mLXXAjy0KbWJ1KZW21wHp6FOlQj+LZM2ALZI2qPWRdIngHhpOOSeAyoJy1uSTEuadr6G5MCd8v/y3ZXPpcXCYU3NeDpmf1/88LKOt/aZGLeWjd45eufyvMTin94G/hVJozSeiahLnkHc+Z64U4MzNMqqrHclJIoxWmYuhoSKiqhEQRWluMJ7sViVwevl55U1ViTGMuM3g91yx5EUeC3nYooCFmtRfhlthgHokkyUrSM/4fUz1bTtvkoRtcuExaxJHv7/T/Ezdr9vd9PdfW0RhDg0QSp39vKtKiOE1x/WGCUi9PmGsPxZDzV46bal497f2+Pll1/ihXt3GU1GmPGINgTWiwUj4KnVHIxHNNrIcF+Tf3OmrKUA1RJNKtU2RXcccwpeyglXQ8RsApQgkzFJfLvPvrF6mN22RZiuNQiFlEF+1nzV8uTkhBtPRpxdrli1PT7ELG/V+drFHWTus/fqmtY6bWuE/MuHRiUl4U51UjnVR2SdWifads0nDx/ywfvf5/z0BJOL60oblHPE4EUeQOTw8IDReMzJsxOWq/Xghxgi9dVO2qUqcJHasntBUBWtFEpn9BG1bW7yRu2TTLvvexmyqoMCFWQOldeQG0WdvYKl2Y75ENNKX5NnkO+JEEHX13O6duFknpcP8vuvFuew8kTdcXJyysmDx7SbDVWWveTwagbYMSU+7yxKSQbWDkM7lZj9V/OlGNdjwlSy6Ucnw1XrSuNdy2Y9ZzwboxGfVlAid0JLOInrA8v1hvXGUdczEoY+yJpZrTuUSzIgPAqia5XIgSyKvt2IvHPTsjy7JLSeSbPHqDlgtHeL/Rt3SLcSaQHxwQb8mq1J3WQQApQq8pXcrObEx5TEp1FEGCDN3mg8oqorVFVz+PIb3PrSLSrAjSIvbo7J7nWitngqvHcwruVnxDgMko8psml72s4R0SRlUMZKMRh6SnRyQWpB4sn73tE0I27MZtBdsNcE9mrNxCrwHaHboGIkBoVzAR0SPibGteH4+JjACV2IJO/BOlLaJlglSpCNGfbZsncVkKCs8zK/UKTPMe+fxeuava8pM+EpgBK0vB6NZNxD1xJ9pJmMeem1V1ls5gQit2/fRs+OuDE7oLKGp58+5YMfvs/x0yf0MfHee39NuzwDApNpTUeHMhGXHJ1OTA72OboxQvsVi01P2yfqkaXRFhVh07b4mNB1I1IrFF3vcC7Q4lmuV6wurzgOF8xGli/+nZ+j++Bj+i6iXcBORsT9hmCqrO1QoEdgFftHN/FJ0/ae1kV81PQh0S07RtMb/MZXf4M//KM/4/0Lkdmm2nASOh6uF1zMT3j0F9/EfPvbtMuWqdMcphHq4JDXv/iL7N24xfp0zv7+EbWpCV0vMu3O0dQCxPVtT1XVnJyccTlfUk9HTPb28r4v9/CnfQ0qE51lVWhcVPQu4ryniaI0SUH81OWc8TGyDp4DkCCJnJ0ekwSRDEYsdrYeKtBjHj58jNEI6680MUvKjK1xPvDg4UPm87mEUBRmS5VwKwYfnc5gh4wHKUCNGYK7ZNuU4mh3z9tyD0PJf02tszuCpqqq4c+7CdLFe6YUWCsNotFSrPd9L8oHBSkGLAmrNZWxdKHP5wckn/2zud7RRs41dB55kz1vEoKV8kzFhDKfDUhLpQhOwuhNZ1Oci8yXyxyiElB5Dp3Pw8QTCh+DyPZimfNVWJPtuVAUF6vFckjsLKoL1/fCFlVSCFdKiyyv71E+j3hP0pihNLYSv1NAZJECAgUaa0Q66D0uyDDLK9F6AAAgAElEQVT2YdzRENKnyjaV71umO3LTGq+jsqjdcmRYK59t2srn3D4P2//eZbnkC2JR2X3Cdtkw77d1AsieKazqNvFxsIPERLtp8/pKw3m1+17atpUU574fmrbijRd/dgb1c32ybWRSJiDkGcuXYKdC08PXldoCndLEpkyeFCZX54Zy6NeGZmV4ikodn0FQW1vGkwn1dJID5szw3pICXwiHlLhar1jMF0gqY8kzMMPn0cOzttNcizb2esUZt01euQdl3ix5TmQMSLANYpmptCEoiEZ8qiVUTakyPJ1twwoCymf7jaL4DEvjGwlDvydJkYK5lJFi+dLl9VPWwY97/ew0bao0SNuGKhUUYvjCDjIydPbl+SzJhtufd73x2D5sZaPd0szDm6BcPihxqHKT5NcVNHr7HnYRuN3fVxarVkkK1BTo+p4EHJ8cM3o4pnMbbG1RlZXNKkZ0ihwdHDBtKkbGYPJ7dtGRvKAJgsDroUNXkGfZyGdTGeErx3Tut3JiYS4EVZY2mEDbRjCV6JFjIkSPrWqR7LHzAESIATYhcra44uR8n9UmZCQWUjKQqvywF4ZGDRvj869dpk2kgSlDOuVW53sWpfAU2YuWcBcdOTs94YMPfsDZ6QlWaWptJJHKB7SKWG3oNhuWiwVN3TCZTHnxhTvM5wsWqyUulsQwlQ+P7foIKUFUmcJXQ2NcELn8CShDJ4chprtIW0rIHA4NeXadIQ/bTApDlrAkLUPYh+uRD5KSHilwT16DWxQ3JUHtyvvW2jCdTJlNb2LrQ2g8G7+Qz7tYoEMQKVU+LOWP2/U/oJEUxrQ8f6VpMQQS7WbD2ekZMSaa2YRkrIxbQKRatTWgAz/64AfYUc356RlWTgrxt9QNISmUtnS9zLU7PDiktmM04F1gbRuIDtcGYuvEwB4jKsqmFd0KwoqD8AJfeulN7v+Hv46pj5jevcXRvUMOb98gporl9x/hnj2E5WZgEOWQFoQNRU5Oy7HdOVVOrqve6Z0TVW3ZP9hnNB4Rk6epFdVI4ZYXbJYrXpho7k0NdnPByC148+4+588uMbFliyfKoRcyk+CjNGw+Qgw5Ec47UvC54MgpYxl0UVmWVFeWg8mIl2+P+MJrL3PrxVuctIbY96TgSUnGGNRKoW2Fj56Tp8d4YLq3l/cm2W2Lf1FnBLUyhsVyzkcffUSKEVPZ7G/c7qkalaUHadgHrd0iuYIDCbOiNPJ5lXiLra7oU0dMckg3swnP5qcslpe03ZLTh094f7Ph5fv3+b3/7J9zePsF/vK73+Xjjx4wvzondSvoerSqSEFRYViuelyAYCyzvQPGjFCqpqpGNFau68VizXK+JKZI7wPzdkMXIgc3XiAlYTB77zC+o+5l5IKLmjff+QXaVSTpMc4kMIYQEqOEyGOSFM73XrzHZFSTUmBUWWL0GKOwsynTSvP3bxzwSz/3Dp/8zj/gD37//6GuFA9+1DO/WnBxuaHvTzC6Ym/vBuOjFxkf3kFPp5j9MfVsxrg5QuZnjqFJtPM5Ciu+YQxNNcKYKvMYhhTAOZ+Tdn/CzKbPeQXvIXgxqSpDyP/0SVJlYyIPCRZvTWGnPbAKgS5GKoQZJ4rk1ufiazhhVcKoRIwVXatpu0hVW8a2wtoKNZqw3mwAGXr75PGTzCaGIWqelFARVC4cd7MQFFtmA13UNtvzEuRM1zEfOJkV3vIP18+q8u/df8owY3mPAa0dPnhCcHI+xECKcHFxyWjcgI6k0GOCQuWB0jaKd42UiEqsDj5G8A7vPKERQEVl9UdKKfvgRJoegoRdSTrrDsgdBcgln0m966l7R9e1orYgDTLJQMqNmjRDzouqYstQMTRLiW3TUkIzyv8TBiOxXq/Zm804OjwkhURjZR7e+dNnkkIMOb1Yfk5I0MeIDwnddkQtNYV2AhjKTSlx+PJnhXiEIjtA6879hyKTy6zKzr0d7isFn9026FJLbu/+80DpddaurImtVYksdQw+N/tK/OMo0NaijabJ7yGRBrBW5fC70AeW/ZIisd1tlnffR1GbGSsS7ZJmXVK2P1fltPv3cxp7uQ6iYpemqcwDHsD+AejYBdjT8PV8ofK1i8PXTEacC+CrtATpyYxbaT+CQmYKoVAEYZyj+COd62maBpNDcUiJGHItnRu3lNcggNXmGuBf0rhJ286irGGZ51uk21FsOfn+R61JZWZa2ibFJi0AtVJKZrcFaQDFZyhp2CnrrRVxW0NnXygknO8RuafIe0VpI7VI1mN/7j0rr5+Npk3J0iro9/bh2T4QxhSp0vXZYcMUddRgpJQAieua4/KQlZtb5H7btyAafSDPS7neiBXkLgU/fL38nLJx76IhKWbJhdZEekL0OB/xMXB8csK62/DJo4+xtcVUFlvV1NYyqi2VirSrFclaZtMJdVXRdh2X8yuiUuiqQdtMfedi2lqTBzcLZWxMYYuEOeidB7dT/KndoA9BLE0ZmeClMi4PfKF9hSHIZutuw9n5BevOoUsksdJ5FlCQBySniqWhidkeftee/oHZ+klLJDcP+XBdr+ccP33C/OoS3zuqupGwDCNpVNEHgu9ls4yJzWpJ9I47t+9y7+4dTs4tJxdnQxMbdiDX4R7mZvL5EQ9x56CWa1KCQ5LIyPIhKdHHOw9hbl6NMVtZQ/7sQpfndbjzuYtXohyQCjLjlpljlfKA+BzcozXT6R6zvdvEqmftK27fvs0PjAZE0itzydKAbjHclx3AYftY7Vx7+Z0+D+4skc5JyzZilUKnxMH+jLv375IMfPLoE04/fUrqO2b7B4zHIzG+a8Ooqbh/7z5nF0t837E4XzCbzghhAzESfaTvPanviaHJe5qseVspjApUyfHO26/x1q/8PbQ9gLoh1D1Ow6ILPLo6Ze264VrK7LKYm19pzIxBGCLS0BSBJF/KniJ692gM3jtOz0+4tzrgfLGiPrxDqxVPTy+5uGoZTW9CvcdV+5hX3nwXzyd0J8fZW5NkXaY88FcZfFL4uG2enc8IewYCIsIMloQxCU5JBN8zbUZMmxFHeyKhXpyKpJQEfe9IQNf1oBTGVCSlqWvxF0riXpDxC0Fm4PkUZQaj0oyaRgpOSuEr7zHmZzAGiYN3waErOzDWu89qiVOPQby8LgSC92gqkg9opfnB+x+AtfjYCWOrPFWtcOueZ2efMl9cUE8nrFdLVvMFe6MJe6MpoXekTlM3M2KCiTFoFWiaEToGjvZGOBfpU2R+eszYWpQTFmDdbmi9ow+BrvM0dYNBM7+6IpHo2gXKRN649zIHL77Fx6nhSXfOZt7y2+9+habuGSOAmtLkQzxx83DGpNZURjGuElbLGSBndcKYwMF+5PAr93n13j9hdXnB3jjgujV//TcdB9U+R9Mj7PSQ+ubLqIObeK2oZ3vEWmNSTXQ9StVs4oYUNREjAFwQKR9ojLLS9CD3OPost/3x2+u1V/Edke+f95FN7wlJMzs4ZHpwJGsqPxuA+GGzZ3jde/oEljQ0bX3X4aPHux7N3hYkUxBiRUwNdTPhxg1LlQSATNbSHOyjQGL+s7JEq+sMGKWQzieW0TmpTkvYRYpkZiaDT8qI1DuJ71PCUmS4sUIRfETlCeHDPq629UQ5C6qqGmwS5WwLIeRAsogPDomVh74P1E3NdDqm6yIpuAEQkya0Qoet34ucGxzzc+Rz7HmxQJR5kypmd5K6Pi82ZF948cEVCZfczzCoJ8QKUGiS7WcIIeb49J1aS5vhmb7esOQ6KyWqumI8HjOZzRg1DZWVcRBaQR88ve+JTp49pXWuT6Sp9iSiMbTOidRNKTQ+32upR0pzFXbA8nJQDTYKnUcGxSi4W96DQgiYGAVIz/WM1tJkFv/k86/PyCifqxm37M1uvVqIgRzOZU2xSwNsgfa0bSL98FkKiJrvxy6L9DnvbWAJ87k13I8CjQznePne7RlffqqoJrKkOEbZUwYQY9u87ZZsgrVv35t8lgJqZ4A5//rCPieSDLhWRUOislQ54lXIx7F44QgOFaBShsZWA5MYM7g/qhphFUM+J0PEeTeorYa6PI+2kvyJ7Tw+mX+n8nURcEPmDsu5lLQmaUuKAe8dru8H5dr2+uXaMQRp3BD5aswZACl73WLeR1K2lpQUdqsrtIpELUqXyhqx5ewaBz/n9bPRtOVXOewHlmJnpRdcoqy0otVOSlIFi5ApZuTg+aZrd8N9vpmTDYlciCdk1kahU822yy6/e5dd43m9NEPBVWjcgn7EJLcxkej7juUyYCuZ69LUI7ro8XXFdFTRVIa+azndLDk8PGQynWGsZblu2Wx6qlpgRY0kaJV5MNF7YvAUv1T5fD5IMk5p2IQ1EoZJbeGAARWJOSUMY0W+ZwxOazEOBLg4f0b4cAHjwGQ0AdeS+oj3EW09thb0IIbtfRAUfkvJD0UeDMXxj5PzXj8kAvP5Jaenz9hsNlmaI0hu8nkWWb7HWouXp2rqnMq1YVQZbhzsSWFiKtZ9K83qzgMpmSGfRVtDyEZaJZLZsvmTr3PRmxtj6IOMHijkmGyque/UJh8qGZ0qgyJlN89rZ7tOB1QL2WC0UTkUZng4MiqmJPijrolW0dgR4/FYIrrzfWUn3KJEA1+7zrmBUzsbvVKS6Km1DEo9PNhnNptimgY7GmcWTBJVZ/v73Ln7AskmFu2Ks+NjjLU0dS3FST5Ub9+8yZtf+BJPn53zvfd+yKP+KWcnz4j+kuSXVAlq28CowVYW56SIUZU0UGkEqwD/x9f/gDc/Oedrv/YPObx7h6QT67Tk6bMLwuYZoV8Ofq3iNylN8O7BlpAkTJ+2HpLScIcQuJif8+FHH1C7KbNpZFF9ndm9+9y8fYj3G548eMj502eEF17l6mrBxdWay8tLahJJaVrvMSnhEGN0nxJ9lCQtlZO+tDY4L3PWpOSXdC2VpVGu6wgRpvs3ONq/w/6epms9n3zyKR8fL7m6WpJi4uLikth1HN3XWN3Qxx5lasbTPUaTMWY+H5pU7z2aApSJvt95L8WqFrln7x3GGkZab4vnTG2oLPO9xjKXC0ouogQ/EmO4D+goh9zZxTl/+u/+HRB5++3XeOWlO8Rxw/zKs3c4w4eO9WrOjcM93nz9NabNhMPxmEWES3PFZa/xmwQqkDpPpRO39sbUbkFwnsvFBaldousRyll5drVidrDPTFva1rG4uOL3/++vs1iuOTk5xrsW5RL7doSJFT96cIy9+ypPnv2AR59e8E9/97e4c3ALHRMesAY5aENP7JZE7alp0X6NSnvZP5Lwbo61QNeyf2DY37/Ff/pf/DP+/j/6Hf74T7/Jv/n9P+LxRw+Z3biBnx3RNzOM1QQMXZAZQKMMwG1CoDHVAL5cO3tCwiBBL3U2uVv10zdt8vMQaFppkWFGDdUIbMNm07E3LSyLzw2ewvcBr2X2oCcRXcLNN4zGhqZuaPsWrYo8TNjrjMtzdtGyWvWczZ9hggOXCNqirWE6nVHXFSoGJqOGzrsdMFqRtDQ6ST1XtEZhrkrBnozMLa2UxkTxNKuoSEn2tiJdlv0x77cZtCtgVbnW5Wuyl5QGcuu5lj1zK/UKIeJ9wNiKveaQWHV0aU3qJa5dagQZCC7MoezDZSacMeJ98UkYrZA9Q1bL/KtAHOTL5TCo8jkUsr86ZpWBAqkZnBskfZU1OyOT4uCpTUM9kDIQK++rDHA2RbqZ70U5/4JzXJ6fDzJO1/W0m5a+3aBcT6U1k9FIfPre43PUfVQaFyNV1TCdzqiMxWpL7/o8l1PjXB41kc/CUk3EGBjSzQQh3dZxFLmg/J00OK7I6qI42F0iAobuNiTlzNieG9cb1lIrFEZJ5I47vimthnO9zGwTIaqcw35nYPtQS6pSDsj7LyxxkXsmZC7gcH7vNndDk1Vqia08UvxqcTjjSuZCxvivjTySQ78wavlSq1IvbveLXJlsr0fMz1MBWUr9ln/kwIIV7DSfKSomjOBF2KRpVEWFKAnQRvIVSCQlGbQhyjmZTESFJECNtYOaTmuRoKpUmvcMqiRRsJAl07vSWJPrOh9itjRke1RmHKWG0tsUzdJjhDQsvWHN5as4zDbMcx+NtsNcYa2VWGaskC8UBdiPef1sNG07TRTsNlQ7jFZZ7HmhFDq8oG56Z73GYZGrnV+Rdd12y4SU3yNFf1l8W0ZINmoHuahOZY3tPKwliOR5BGYoeMsmmf+e9z7vewHnRAJXW0uoOiqrmOgZo3rK/t4ETWKxnLNpN5i6GmJpa6uxphLNfkqCEPiAsjmFLpFRj4wi5SJKF3STnUJ96BS2G3TpqEKIMmRXSwM4bNQqMpnW1LVh43pWq5bgHTqaHPAtyV+RHTS03Je/BTXa3l+GRmiXqSp/f7FYML+aC9KZFEIWeBEGZTRSkRhXNZW1jJsGqzWu75if91SjEZNRg0ex2myTiYa1lxmytLMGYbg0wjbEQExqG7GbwDuXZ1sVRk0OSLTBZInY0IAqMaPGwvSVjUM+fG6SdJ6tFvNlyHKYLI+x2Vw/bJZJmBmjDWgvEswdD2fMm1BB0Bgu+bYp/knFXRnsPRo1OWo6yiaiFGRGVgZsGnQlPq0hEQrZ2MpF3J9N+JVf+rs0zYRv3P4Gf/WX3+P09IK+PWO9OmF5scCtEliFrayMtlAQlEJN9qE+YqOnfOuHH/Pgqqdv9rh/5yZp1LJIl1zM5xy/f0J7uaLZO8wM5XWmXtaJyo2HFulN9n4OzwLSrFaVZTSZ8Obbb/P6/SnHH/01rj/m4Qcdf/Pt/5e6N/u17Lrv/D5r2MM55451ayBZlEhKlCnLalse2h0g6XSSDtxDnpK3PARI8g91kOSp8xQgQNJIggCBo6STdrcBWy1bkmlRM0lxrrnueKY9rCkPv7X2OUVJRj+qL1As3qq6956z99pr/X7f33d4h+dPnhEfXtH+zm+zUI73P30P340cHhxJNAgKY2s8WjRv2WjB54maUtJEhSjmJWVSHWJiGEe6rqOuEm7wxK5h5TT91ZpPPn3AJiY2+oD1umOz2jBzDmaJ64tLwuyIetawHT10A9pWrDdbjDXMFyM3yxUgzZhWEvmgK50NDxR127LZdgzDIFOMFAlOsmZCTNmgIuVDTDGODqIgl4JeJpJO1I0lBofvReMyeKFiVVVL29asrtf89dPHpNTh3RaAB48f8vfe+k2+/KUvofiM1eUN8+Mjju7c4cmDR2xXoq+0dUVlEi/dPeH3f+s3+PlffYt1N3D+/IJbTctXXv8S3/yTP2erYHHnFvdff41tN/DwwRPZscYRqxUhOoxSuH7karPiblvRtDUpBO6fnTGfz6gXjTSgUaEN+BjQ3tN1a8ZhTWSE0OH7G/wwJznPtt8S+xu0H1Dei7tjPee897z37JrvP/iUT8cb/ElFP08YA73vmZsKmyKaiNGBxij5vDY0SeOUXFtdNEFBnl/vHRpFM7PimqkiomT9NzvylSx6SDCMnsEFRp+4vF7y4Sef8rXTlxiGEe/GPFnS0gikyBhSppfDxeNnPPjsQ4ZaIgG8GwQo2ttk1tuO73znB1zfrNl0K+rgMR503aCpCcOWtjrkYNYyOIdX4kpYnBx1BtmilmIv5A5OIVb4+25+KcqkSZcpl/cE54SVr/WLDpel2N77KOeDhHhLkHwp5vaL++I0GYKntjVtO2c+m2N1hbEaZ+Q6DcNAZeSMHqNYV9hIdrR8ccojTY1Q7KytqK2lQWii2277wuuTeiPrvqIU/NaIcYhJYqhWKnFp4Dx+dIxdL420DxjyFEy+McUIpXzEEKbzLCF0We2NaKuduBZXlSF6MckZhkHoa1pRNTXtkVDNXZB8v24ccNnERhsBHmdVLe6aKjE6h5g6SHEv7HXpDqYzM7dCsi5k0qG1zg7GNrvWSl6vKg1WliGEcs/zobjfoL3wbHzuzyeTD4/QcEPMWX/SWJcGbldfqKlfKXRSbRCGnMrrb4fgZhOOPfpmGRqkNBknFDOZaZ2SXji/fuE95GZpqnXJxldJcsUK2C+/79yNldrlcu5fj7xEckOZ0GRDlb3mmPyWyrgjV6Q5JiDrRcnDhahQMeFHR0ph0muWnOVYGrDSQGcWUmnMi5nJvt40FSdJ7wkxobBTEy+GIZFKS6i5MLSEB15yEven0IV2KddPrk2cgCPFjjKa11SSOkOuidTJ0/OqQFuFtnpyEf2bPn49mrayISXh2O6sdAuaIqiV/GH+mj3UQriueYPNF6l09rup3Q4lKRe//IxCE5j6CZUgZ6GFkBDaSS6M2TVtL/LH4ws/S8ItU87NEESyFPMxBZwbQYkjTq01nbIYA7VWmLMjwdi14uTkmOvlDav1DfOFWGnbqqKu6+kAKvQHY3LuyQQZyvv0QUKn91GA8jajvFWhDKR8rSkj2nL900507j2t0tw7O6I6tDy97lDBE90IscWgsVZR1XOS1gzjKAUcTC4+v2IRTAjMfvOU/0buHTKtHPuB5c0NXdcBEqQ6Ro/OZYnRGqMlA0WTM26cx6fCk/aM40g1W9DOF9R2oGP8hZdT1g+QzSrKfVUZpZKCqbxWbcT9R0A+0e+kkAhlWlUZjBW77F1oqRTslEZfl41xR3sQFKpMxrI+YRxwzmFsNVEvJgAso0JFhO3GUShw+f7vKB+7rk2pHdU35YOiHNIxMtESSjOTlBIgwRqhsRhLY2tUTISk8FFRUaF1Kwe10gTv6boti3GBaWo+/fADPvzoQypTM3YD68slB+2Cg1v3cGPL5fySiydLnj59zHLtGL0jWSlUO+exqiamCqoZ26R4+6fv8uizOV5fo48iTdPw19/+Nrdffg07P0RhmZwiERq00kLZMCahkkYZKYgKCkkSSlJd19w5vMXdl+5RNRVWgb9+TtdGVlbz+Nk5Tx6cs9gqRjWjPX2J9fBjHAavpEk7vHWbxvbcjEvc9QZPwsUg+pHoidHnPD9F0oqAJiRF1/UslytC39EenZKS4vr8OWtVMTeJ2gZ6ZbgaOpbLjmEzcvf0kPsv3Wc5ej67ueHO4j53Xn6VdlbjXI+yFSe3Tmnnc5r5gqGX7BqFIsaAUYaDwwPaWYsyhu12A8B8saCtKzq7IfpECjAmj48B5zxVg+xNIWShnBToEbAaXNfRDTBrWkFjlWU+X3Dn9m261Q2b645URQ6OTuhHxV/99Y949Qtf5uryguQHLp494OkHP+G126fU1kOrODo7JPgB5QbeeutVvvL6yzz7keVH776P10cou6DbOg7bFj/2eDdyc3PNdtuTvEeFQGuNaJH8SDcE1K1jvvSN3+H49fv87aMjPvzwEbdvt7z15Tv89JOfYI5m3D2d4YDBJ777Z3/On//Ft+mGnkUdWS2v+O/++/+Gl4/nzFG899HHXC6vqJJodl596RX+6D/5z/in//P/zkcXNyz7ntYqXjmZSzYVCxanZ1RxhD5y012x7HqSbYn9DVWraCpDb2JeK9L8J9msUCSh8gfLrBbq6K+s4H5xJ95NIVQOzY7gE/Rj4Ga1mop3pRJ+9NRNhUUzROiGkXXnWT98yr/443/Ov/yX/xfX9Pzhf/jv8Qff+L2pXPM5x3K9Hfnhj37Oat0xP2pZALfaAxaHBxweHaFsxeXVDd6NdJlyKC55Qtg12bm3THyizgwGrUkua2uywY61NrsWyjldV5ZmcUzIlHE/+mnyQkovgDylAPQ562nMJhDWaoKXSRykjOQHkVkEeZ6cc6xWW9abDT6KzjCOAQKoIGdPcejUSSaBLmuWlc4h3zFg6pqQwRSdw8ODzxRCdkyifSmIAnzwrNbiVrnZbEk+ZB2RmHyMKcg5PYyA6MHnzVycj2GvYJV6I4TAtuuos4thAXzdOGK0sJ8qYyGfVQqJp0hEMIZRKbY58kAZi7EVTWUxQaIGlFK40dFYcZsl14ZKI/dUJZSS159SQGmhIaZk0KFQKF9soktdOQ0DSh3HXuORW76pFFTqhVqkfHw+TkorjTGlkJcadj6fTT/3xaYNxpgkK3QPFPg8/XJyC/1c0xZCyFljO0fJHeC9q0n361Q1vQ4zaT/TXi2g0k4zv6+bB7L7NVNdMmHoe3X1NFmKog3XKj9vZjdQSZDpkfJ7VKVmkfMuxUREoyswVNSzBjMM+OjEyCwUSVMhJu7e58R+y9m/pb5S7LHuEp+rP7O5HSkzq/QUUaSImEDeS9lJoPJQR0Fu5PLPygYvcqH2qdu72yLeBYqoi3GTZDqjLCHkem13C3/lx69H0/ZCob4r7HXO3iiLq3CyhQIiXb2gbMVgX+cnshSvL1IjYfdw7m9u+wSx/SlZaeQKEi+NTPyl37e87h3v9kXrdx8cIcTcTOZCnYjR4IOC5KmtBIJrBH0Qt0lN09Qstxv6ccTaOU1VTYhCWUCli5dGzUs2T+b0xpIxkXYbQX63cmgQiUqRg5swSed8mSgaNdKuoA+B+bzli/fvEedwtbkmpbWgWcpMU8f5YkHTzllv16S8yYzD+Dc64+w/fDEjaeT3Nv19iqzXa5Y3S9HrlCZCjuFcIMqIWyuVRfBCDbFaSwAiiegcwTqqhFBCY8pI1+5n/cqPMsmT/0xf45wjn3EZgABbVVRVQ920VLNGEMkYMMrivJuKLG30hARPlI744rOQpk26UF69AERTg60o9rLaZEpBFH1TiFHs+ItiLq+dkn8yoZRl893baKdroXaNd4hBjBK1TIPkOqfs4GbklzLZadOCEZqPd05Q3Dpxc33FOz/8IZ9+/CmNqanMjNff+CovvXRASj210czahsqKm6GyhqptiAr67ZKZqzDjnONZi24adFWzHSNYi06aodOMrsV5TfAB9tCtHaJZjL739oCS7ZSR/MJjTyRm85ncj+RYhA3v/PQThuNjWgutNdgk1/P0+Ji2MuAcSoNLiaAMump56f4XuBgEpQ4koopT5JDcCosP2bwDxfOLS8bRcdhIVpEYqCS8qhgxhOjYxsT5VcdqteWoXdBUNW4cIYGF5xgAACAASURBVMDl8wvM7IDXvvwljg4XPHn8gPnigNdee11QQ2W5uVnhRo+KsL2+4GA+5zfeeoumafjhT35MTJFZO2c2axm7DuccVtfUTSVeWW7MLp9CVXNONG+ix3OgRB8Y5eQkxYCKMh0qDmRW12gqBh/ptpqRkQefPeXjjx/Q1A3B9xwfzbhZn3Nz9YhNP1Af3+fo5JjzJw8xjWfTX/Heez/i5uoCpQ0jmjFWpFTxO1//W3x8+ZRnvdAgnY+EQVx7h67j8aNHdH3HYCrOPZxfXaPmDzmqK774kuUkwYI1i/kZ7WLBR89veP/BQ44XiT/91rd5/6NPGEPAOkc39qwePeDqkefQWn724Secdx4doR09Jyevs1yD1cccthbiCj3c4J6fs12t6ZqRWkto+fX1OR9ePyatN/SHt+hvnpPMAYSKysieV4wjrKmpKsOsbRldh0JMIArz4N/0I0UR/YPGZFZHUhZTVbTtXMxxomgUYwgkb0g+4KOjHwbWm47x/JL3fvxTnj96xmN3zRvPn+cibUcMCymy2sDB4SneC0DS1BWz2jKvDLX0/aQwsrq5JFKsy40AqwnRfRXgSUsBKFvSrtHY6bflJxdtT1VbXn39NW7dvcPgPJfnVwzLjvVqPdUl+2Yj+8h90bSlrO0pNP2Q/LRvGyPh3jHGDNRq0fYFofph9I4mn7IDXcwyB3aOjnLs5Ay1EESzGiIVct1TofPlMz5m4FIrhfee5c2SbrsWCmQo+6Fo4EMM+NHjhpFusyF6R20r1CnMZwdTrRODuE1ryJRLT6yqSb9ls5ZoGAaSrajnNlMsw2SJHlIkkBjcQFgt2YwDWtvcZIkus87OnAogG7qEIGHIKSFxD8Lj251ZBThVmmRebH5gT9KiJIw9qqzZzGYSU92mhHoHv9xwZHd+vNhcyVrbNUnW2injrmTKlq+NJKIXgyczNZK7ehQlDYyKEWIgRRmLTRS7TM0tQ4jPv9e9p/gXAfAy+cmjtkkTNw0bPqcXpdQA8n87xcCLe8kL9XUJNf+Vr0qMWZzM92TN5u4mZSMOXVnmhwLYxBTlmcl73Ogcfd/hXY6AyCCLDDFGTPCTx0BpyMukbGdAJOZvSgmN3NhEawyVIuvvoNaWZMzUQxUDksLCmhqNlKenMe3qK7H3mq6ZTAjFvCShiNFhdDbp0kJ59iFkR8x/C9wjy+0vG+luLFsWg2BzoKXA/RwikZJw14uzY8kl+/wi300LsqsTcaKBKDXNbYGCKgiSXAxdYi5aS0NY7lfKk5CUlDSTIYkLXNy56GgFWAmQtgX1UGC1wmpxPjyczzk+PsRojXdCb4kucnB4QDOf40LA6JbgNRMlQAm1TpmcoyR9JaRESAKmpSSufpIVlSc8Sq6TivIAu4xeiu9YxASF73sYe6GeeAdJg2lI9pDKatYMuLqmmR/gukSVanS0JD0SoqCEbdtmke9OFFzuuNwuBcVFkV0jLK+tbFR5ekVEpUi/HenWPbgkAalWHh4dFVmajwFqY6m0FM51bTiczfBupNcDDo2uLVVlMLXBVC9SXEn5PpfJjNrd66jA52ssNMWAT17y7VKiiYaTgyPqasZ8dkQzO2I2n1M1NaP3bLZbbpZLbpbX08Q16axJ2D/8Y4AQpo2MJHQCrQzGiulL9BYixNQjce4KrStxqcKilCWqlJvZJCh1Kqju3hOYp7Oylxd6UJlyBtnMjYz5J4qnQgATbQhR4aPQ3mxVYawUK0QBI6IXGqjXBq811mow0M4alJLg6j6MPPjkA8bhmOA3tE3D0Hl83GKSxVQ188NjDk5OODyxqLHDB8fhyQnrFLi+eo4+OCBFT/9s4Pz8hqAXdGPi0ItdMSVsnnzoJwmGnWijQQAD771ke8kFJ8TE00fPuHP7mi/dWxCipjmccat6hYfrLZuLa/woDXWIKw6qLUYgbpLrefb8Kd125Nai5pUv3OX+2V3ctsNFTUoV2kQqZam0xeNJo8QneGB1eYGOgVtHR7SmputHaUq9R1ei+fB58mB0hY+JjYMhWlyMzNs5uj5E65qD2nL7sGFzU3Hv5ftcrj2tuUVQN2g3UhO40QZtE9vVihhHFI6TgzmVbdiu1lw+f44bR46OjyVs2ViSndF5iSdR0UEY0FiMbXAWlIWq0tA0NEqmBMELA2F0PcvlJYu6IcWItRVNveD68ikHs4GnT55w//4rvPLyPV79/d/m6Wef8r1v/RkXl1c8Hx6yTJqgE69/42s83q5ZPn7M4OB6cYRqXuX3/vDv4x8+4snVJzwb55h2wZ3ZhtX6EWaWOFj09NtPefjuD3h2vWJEc351w7f+9E956eVbzO+/ylu/9wdcP3jAt7/1LX7393+fN94a+NG7H/PNP/lXfPXLd/AEXvvSb/Dws5+QLARqWu2praJuJJtoWC4FlEvwR3/0j/mP/9E/5KMnV/zpn/8lnVmhUiK4gYHEdX/D/Pyc49Zy8+wRcXPB+vqap+uNRDmMFSEFxhhwG8dmtYKQaOuWYeh4+eyA6wjESBNdrm8nFKzswBNiTWZiCKFFo7TN/yjilcfj8QGa6ojDxcGUt4VypDTik6Ibeno/MnaB9bKjaVrSrGUTxKK/rSpsU6FwYgvvNtxcXfHxu085O2tRlQKXGJPjfH3BsF2TlGHTD0RjATFsSLqATxn6LAWshp27oIBmvui7lUKFgAoGa5JMsb1HaRicZxgctq5pZ3M2N1tcjKTMdNjRsAy2Kn6rKpv5+ImmmQoVMSoJyi61BzkOIRuUFE1oLBtsDo8WmwRF0qLJsXVLimI8hLJEjBw8PuIyTTRYcX1GZRfHXMhHLbqc5DUxjMIyGYCoMaoW8zAjVK0UQwbXpJCPRNlPEjmbMbs2FiDXyPsM3uPZ6dRVPjdDyGHuMWK0QhWWUhKnwwRTzueYm9cyvdNGM+TzpW0ahminekAbO02ZbN1AKs2HaMedj1iTm2EK+yWD2Qpiytlwe9TQkidmJtpfJGppPCK7e7g/ufrlYH2RJwQBwYOcaYXhovIkKZFAa+qmFYZQ/nqtskay1K0JqVHFxWLSI5Ya2Xk3TTeVUZJtSKH1s/tVatS0kwTFmIQJke+XykDGr+qzprgf9oce7H2epp8hMW5Cj05JaphkmO5H6XVK36OUkjNDfhDJkJvogLaa+cGClMSIhyRSoM1mQ/RRWEwpZhmjIhmFNTkOQ+3q/dJLTPcpP3+VzaZjGoyVWtxMdU+WtxhhEqErDFYkFIAjMiaZvFmlsUoxpDyhF3t39pvmpESLGZKYbakcc6GtwqRAVA5UyXn7m0dtvxZNGzCNBHcPQymiFSRDiqKfEf4x04QsxETQiaRidj8kq5xjRkz2vlfajXFFYSKGDkqrPPoUrvukA0waRZQJW/KQc83S3s2IQYqlIDtc5qtaQfC1zHUEXYy57RR0y6pMuUN4v2QkgwTWiG5Ia4QOkUS7VDUN3osDZMphkknvWYYPTnKfvHCKUVroFoCJCZ1izkxKpNzNCz1STVzyoMTIo1IRE/wUjKk1IrTUC6K9IyhjPdIfLBjVlpRMjhzoxS1NgXPjbpOOwhlORJKq5SBL5NG7ycWfkcaSMtVEMntIpOhQCIVj7AbGjcNgBSnEo0hUVoMLNCjmRrFoapIXMXDoHFUz4/TwiDE4Vm4kzhp8rVGNwSEW31MDQz6+YjYYySHLSmvQlpiU6HKCQ1tFsvKl3cWSqk+8dvcVdPJY26J0hTaWqmo4PDjkztkZXd9zeX3O4yeP2G6v8XGQVjNlKoNRFANQEL2Q0TZHBiQSHmJDdAXN3KLwaFWjymNtFFpbqlkD1uAGh84U05h3zbLJS24M+QdqUspuVCoLLFSmBssRh3AQEugoTqNaMkySKoe3z5zwEZ2KbssSbEOfN6dmMecLb7zGJ598ymbsCNGz3t5wsK2YtZrN9oYYPc0sUjUa5xPbtWPoR46OZnTBMc4OGUxLGDusHjBNjbYKt4n4VU/nBg7TEQSFJqFVxOhS7CXQdprkq1xoKaMIKVJnnrzPwbkpaCI12h4SLfS373KkX+H9t99heT2wGRtGqwh2g3LPSMYSo8UNjjBcMo+J6BcsE5hKE6sKlyzohmQcaVCkIZBcRx2lpA5GE1RChYDvOvxM3KksDWPshDJatbioaKqWk8NaAtCbht4uqFvD8QnY5phuPRCqgXq8Znv9hMubFTec8XAVOG1fZq62HNnA8cEhm/6Sx48+4+TWARWe05NjrG159PAxN1c3VJXBuS3KRgZvSaYlaMVmu8FUilqRTTQS0VQYCyQJWcbWBGAIHdu+xzSgzYhVc9p5xeChqSxtU5NSZLVccTO/guNDltuB+dkr3OgDHlx9wrOrJwyj4Yvf+E1mBwv+37d/wBENtVZsteXWwR0erxQ//It36ZLj+uCY427g91455I/+7u/SnGg2PvD8gwecxEs2V+ekeoEhMrt9xptvvsnBq2/Szu5x9Mqc46NTlLXEbo1fXdBdPOP6cKRpFcdHd2jrGe08ocwB3cUnuHrkKg7MrKJJnpvuhhpFtzwn6BXL60fgtvRhSU2AWYVvGg4X9zm7c5/bC431W25+8hjdOZ5uLqjbOfZqhW1nLPuBzTCinEOrbDMeRkzcyP6VEmm7EsFXtpX+HIYufxaYzkiSRlMBhqTBKYdqEkpXLNrbHM1m2GTAB4zqiGkg6opuHOj7EQYgVvS2Yvbafbbf0aQx8s53v8c/+1//Gf/wH/0D7t9/iQ9/+iPe/+nP+f7b7/L0+cdEG6mpuLm+po4hT6ktN12PnR+AqlA6Ze1ZQGVnRplQSbSKUlBVOxmEraSmKOZQJM3ogtDUrYB2xWZf5RN6CJEhm5iEbEVuqoammWNthXMBNzpiSGhTY2xkHDYFfhSgEKG1KyWjwpiLS4l7KSYmGYxWkoOntaWqWrS1Od/QMY6JEC0Ri9aV/MyoqFWV55VZbxP6bMpQzkyNsSZrIQXMVSS0McQo1yCOWQ+UgoDcthTk2dwsKVzIACVipa6iSC2MMoTkcaPDWivndZZQlG7BOU80Zqf7NpVM3ILDRGiszkBrwhqFV+LyKSaoSiyYos1gg5i5hCSa5kobfMi5kDE30sK3nqhn0hTk87yAERLrTUzCKrA50NokldX4ChDzmJRt2felNeQj8oXpXp6k+eBQeFmbxhKCSDGqqiahGIOf5A7JJ1wcJSA709J1CWXPD2jIk0CV6yet5Z4qrXMUgDTqKkcT2Vwvp5Rr4CRrTWdX2RCK+6F4NUzmIDHsuqmYU8amyWSRDe3YZRMDbZrYlRolZjAluwxroVVKrSeHa3HUzFUGhkwTRUDf4lwrDtcC/GttMJkdE5MnOTH0qhS7gUAGV9uqmd5XCAHvwiR/EkZeIuQ6MoVxokXqjMonZPrvU8BhCFYzxoSLWoyWkidpTwS2SYYTOkZszMZ+Kpvf5EFDWTARJB/U5+bbKJxLmQota0ErRWPM1Jj/qo9fn6YtQ3771MNio1roiWKoUBANOWy0UtlTOVPS9lz45Gbmv8sdtjaZgoDJY4VCAymN4G48XDQtRXu0PyWCF9ELGcHanPe0cxKM2V1Ja0302Up5cqPMtEQU2F3SPVpnlz6d+ex53BsdwSeC16RsMlImkcJk0ZTQw5Tyop2cJNPeIirNgFxro/UuuyLJxm6NxRpFZWcy+RsHutERQqJpWr7x27/F9bHjgz/7Hs/jY5L36JCwuqaqKpqmxuhqsqyNIeYHcJ9+ml9WRtKKuYZSJfhYCtdMKCCFgB8Hxq4jOC+HpFJyOClFpaSdOZzPOVkcMG9nVMqC98ShY97W3Ll9G91YztdrtkmxsQa90aSMzr1Y0OyQpWk9gOQLlXs6OnxyDH3Hz997j48++IjLi2dE70hKENxh8LjRk1BUVY2xgnLevXcHYxLL63P80O/WVoHI8vreTbxUHjqmjC5rikGONPqaGNVEF4havols2rnRL81aQf2y21ZxpNIZohPE+kW+eJqWUMHl0nRtktrh9tNzkekCot0SBGoYnRhUuEBtLZXRWGNQRvQng3PcLJcsFmcYKuaZkqe15fn5jQR6G8swOtHYhEQVJAbjzbfe4OVXznBu5IOff8xy9Qm9C7iMZBpjchBv2iGaCoyx+QD0BO+mKTFJEM3Ves16s0XRQnHh1DX26A4Vt5kfnrHpRpSZkZInIUYxIYi+T0QEkRgcIEjiptvC8RwbrWgGk5iADP3AGAZIIU9HFCkZNBXddkQlg23naJ2orcqvX9MoS22FflrPZiyOjhgjLG+WrDZb7t4OVBhGF1htHTerkZ/8+F0OXnqL08Ut5hnoWC/POb5zxMBI5yPzqNBVS9AV4xC4vN4SosH1iXjTca89pK3nuFThh5WYxxjP3dMj2rmg5DoplKlISVD9GJQ0cGNku95S1y1G1bQWDo7O0P0ge7W2uDHw9Okz6loMbTabnn7wfPjRA9bbAUtkTFCHwMHoWG2XxHrBy4tD7pgGo+AHP/g+5xfnVIcH6KgwYeTOrOa3X7/P//2df8m7nz7CdjWHiwUGz2Z7jZ9VzI5rXn/zVY7uv8rFGGlmBxy+1PLhuz9A/+wdfvTuu1ydP2HcPOAL9++yXhn+o//g3+ett77I1brhL//Fx1xfXaNMQjtH1Y3MR02dAt/703+FbWp++K+/Qwwa4zTGC7sgGoXVcHSwgLDCDT2VVhwu5txsepz3uM2W5DxuGLAuYjL1K8RIU2t86PFY6rYhmKwZ0cVQgAkknT7U7s8TTGi0Qp7Pwggxbcvp2QnWapzrcWNPIuG8NAyTVpzEzz/8iE8ePZJYiwTvv/8h/+Sf/Ld885vf5B/+g79Pt9nw5OFT3v/pRzx7ei76x5iYzxbMdWJeWQ6PT3i5nbH1iav1ht4Lvc0ETwiVnAtp5wC3C9EWGt3nneRS1LviVxm0AZcjU+bzBatVl4vFTGnKU+GqmVHPDgQ8I5IwqCRuuTE63LirXYpbXvQhTwXFeCJR6grJohJgWV670CAVtpImSNynHSrVQJy0RJKliuCaSkw5RCOkUcqSsnwDpWSCpxUqMzhSkiwp5x3Bx4ltJPlqUhKV5qBcv7YpzI+ipfLZyXJXJ/kcKUHaUQqnc6GA8InsJC1GVSFEnPPYDPZZY9EKBucmLCFFocUNg2OMQfZRI3EOLiS6wYkWKEaJ73CekMVSIUFIMoGzUda3km8qy1yLZRpRzr3JWj/bv09OpHsVQakHSsNSatHy+OjcxCSy+VJmOclZbTHKTti86B/zkCKfv5+nJSrK695pzVRuKq2WSJpEFBCgjNZK6bI3ydp/7fJa0tSg7Nc2n5f9FGAhsdOx7X+v/d8Le6uYnIlsaTeF/fyGI9RFjTFWzE9imGqcKtcDypHNZ3LsEmKgY40mWE1VzalTKyZfMeJimGIGdno+YdjIeyfXlsWTQEAfWW8610VS67joGYMnafY0rrt5ZD6a5T3nd/did7D3XpHeYz86xHtPVAEdDckodJToEacVqTTvv+Lj16dpA9hbHLspW/m7rPEJO9dHKbpkfFmoZSrlnBb2S/D0wq/SmZcbuX+h9xduEXDubz7GmOmGl9dQ+LLAroHLuU7JWlJdy4bHOD2A++PoMsk5PDjk6OiIpmmk2542jukfTq8x5c/3XTW12QVuo8h6trATVU8Tx6wo2GuerDXSJKfCpZbmrTgudSHnaJCoKsXx0YLqtqZpbDatcISINHpVTWUrtDKZqbnTBUguxy9BEsoULuuzyu0XLnuUw4lECI6uE7dKpQoP29BWYucdUmJW18zamrYytFVFo1vGTgl1QSusrWhnMyKKzTASfJhoEIV6K/epIG9Me07ZyCUuQTb54BzPnj7mwWePub64Zhy21FUihZ5xDIxDyHQGMSdR2Y0o+J6zs1u8fO8um9VSXCC13O+YxIlNlUYIpo1Zk6MFjMFPVJAdumxMNiKJUQ6ulHZgBrnlymumvLEJL9tbx7tnIj+PiqnprqylstVEHyh6w0DCe8c4DhATg3Nsho7ziwtcH7k6v8FtN7zxxhe4dfeUfr2iqSvmizlqYWWKGgJXyyXbzQ3e97RNy52790gRoSeECDqibcXgInG1YTavOT494/HTcx48+JS+9/TOk5TB+UypyYG84zgSYqKu25yrlwuYvMkrhMqrjEwgXeZIV3WNzvq6qDSz4zN09RJvfe3rvPPdf8VmG4l+RCEukSkJgq5sjfKOEBNj8My1wjY1qhLDk5QSgxsnrUUgZ/gJhESKBucV/Zg4Pp1hakvvRgwpF6KR48MF2rasuqdoBb0LbLuebrthGHrSsKRLhg/Ob7BGs+009XbkC3XF62+8wl/95XeZ3bqFXcy5Wm/wIdJ7eHZxQ2Utl8trzk5f4vjsZbwPvPLKS9y/e8bThw+5WG3xeGbVgtD0xHHFEBJmdDg8UdWkaOlGiH1EGYeKjuQ8rRZ67zgEwkHFwfFdTu8YHj/6DO8iV5sNlxc/ICVo2wV37t3n5PiI2XyBOlecnBzhLlecWMM//oO/w9sPP+HR83Nmoea1xQkXoaeuLcZ6hmFLGw64d2i5Pddcffwx3/vL7/HBkyXH6pRTe4wbOkKs6G3LpjdsB03bJwwVTTPDuzVXy8T3//jP+ODpNc+fnWNONbd+802WK8vf/sY3+NpvvckHn675zr9IrJcrkgo0CfQYmAWD32xYXV6yOt8yrxoqC5tk8GNPSDCzCw4WM+a1wi87hu2al+/eJmD44JMHXK22KGugrkluQA2eeaupmpZUVRwczPjia6d85aU3uP3KfV5988sEJZNzs7eXlaNR7T/oKZ+be3E7rTFUWqNSIASHURHvPCmMpCDTtda0YpPthVFxdXnFH//xH/PRz34mU4cIStdcX655+3s/pN/2vPnlN3C9Z7Pp6LYDdd2CH6lrQ60TMXhG56kbmRDMZjNsBE+kKeg9KdMh1c6QIZ/PJa+paMtjjJnBYjIgFfAhcf78gnFwHB5es1zK8wIxN2cC6PWjpxtXUuxrDTHQaMTVNimgRWf82FiN95p2NpPQaB+IThomeb07+YYWqB9jrDS+YcDFUSYsJpKiZMiJUVjAaC0T1bxfBQI+qAw6mrxHylRjdJ6oZZKVgs8a3QjJTNMMnRtdmYEnlNFSCyQBg6pKSsS+7+n7fF2MyYDynq4/nxc2F6YhpZx7x5TL5UuRLNjjzpwMj/Weuq44ms1w3tENXc62DQyjw6WEthXJypk7hJ5uCJQMt1CiU+ilCXIjSst+DhncVXItctcl9yJlil2SulBOMFHIl7nRi/6Hu/VVgM/iiiCntUYhZjd2b38vzoUUhlTa6cnKue0zeFAatJADm0sTUv5dCrtpltUGneMaUm7MS85nOcInU448hdTZpKe8q3350P5EsWwQOexh+h7l//djL8rXa4UAlJPMPg8U9mqN0oyiytRzL/svv3aZRjpUlIlvyrVrSkIhjTHKe8jaTfHfi7sGan8Qo0rzuEcRDRGd5P6a/C5TYnICjzFLdHK9twvklmcmZJC1mMWhldRY+Zrv+yZO1zmR9agj4zCKoi9TOl1liUPNuDXoiSH4yz9+LZq2aYrwN7zYlBeArqTwimURa8SVpazQgvQoNT0wkBs8zVSgKZWyOJC8aPLPIU0oiDgFygIJIWR++ucL2hcbkLJArLWE6MRitmlkUXuZ6omzjs7UV5kQLWYzDg8PWSzmVFUFZCpjhmamZjHsIVcpTTlwEvK5c7kJQUbu5eEMMU05LsWxqPDMhRonSFcRPaeYD+aYiFYyZkpTZ3TCWmnerNWEuGugVL5X3gVS9EQlBh1hb9qxf/12C0C2PfL2R7Y/t0KgF0SwLHg3TpMhUsJqTVtVVFrcKlP0+HHAJTncvYJx7PCjY7leYv1Il8DZSg6E0f8CQjhNfctGk6msaq+wUSRpnLSi26xYXV+J+1y3oduOWBUkJDYqdL5XYZTjcT5rCX0PwXH75IjrkxM2vZjVpJRAS+MvoZmykWgyNXhqguPkujRmNymjBb2qrMVhSE7hMkUA9jaWvInuAezT+i3rch9Nmyaj+fOSz5O0znpS0QtUVoCDECNhdAzjyNZ7nl5csjxfUgWF22y5d3pEfeeUk/kBL929y7OLa9A19dmtfLCOjK7P1/KG2ewQYxtiTGJd3dQMTige4+BYzFvGfuSjDz/ihz98h5dfuo9CELxxHLhZLqcDJoRA1w94H1ksJvePbAedXmjiUBpjK2zT4JNQeI010ojNTnHOYNsZ1WyO6baiL0mGEIXyW89mVLOGuF3Te8/WOY6NYT6fE2pDs5hhX3mFB+seFwNjCvR+pBs6UghUymZt2JzF0SkHx6ckAr3vMURsRggPZjNcHAnRo2JgPYhQOyZNNZtxdihuZp896/B2QdSHzOoFdRh48O473DluMbVmvRywtQYsSRnW3YaToxPmixlf/vJXOT++wjnHf/1f/ZfcOzngf/yn/wNf//rL/OAnP+Xh06eoZBmDZdMlOr8hKoWdQTKWMAbidkswoMNISI7gPX3XUUXLpnPcrDsWi5aTW3dYrzeMmw03Vzc8fXrJF1+LDEPk3r3bfP3r32B5c81vffU+f/7ttzk0mjfPznjlYMbl0wE7bDgNI4+eP+DW6atoNUpDlAb0uGJ5sWX+2ivcXA8MvaJHca43pOjAK5SqSXpG10f6bcfge3RMGK3Zdolv/eU7nG8c9w4bGm0Y1lt836HjS9w5PeWTz24Yhy4XpYmqqpi1CmrLyd0v8tpXXucrv/EbvP3D73O5vIYBVGXRlWF2cMjpwZzu5oKLB59SETN4CFVlGceBeXPInTu3wY14teILr9zl9OyUw9u3mS1a/vP/9I+4iC1Pr685PD2dYhnM3rO+X1DsITpEEr1zoAxx8Dx/9JgnTx5x/nhJd7XkzTdn3Cxv+Pl77/L86ROCOcZ72Zt1lor+5bf/Ne+99y4mT5iishm0q3CD40c/eJfNSp0gBgAAIABJREFUquPLX/oK682A9/JcR0a8cyidsEAVI8oFNl1HN3rGGKU4NkL/j4UKmMHKUvx6X4xD1NS0ydvTBDzjGKfnPITA+fmlTHu0oW1nqAzGuRDxOqLrGcrWAvAp0akGFSAYYjYWsUajshxA6UwzS3nikmmQSoHWOei3UKFSphIHOfO10igr57LKZ2KIDjf2YO2ko1Na3KJjUoRUHIOjFPAxEVIgavK0bTcP0C+ctarU1vJ5kjMtKVnrBXD1mYZmrckBwZq6rl4AtEVflZs27+XMzw1SJAecg2S9GiM04xDxTii12hoWbQujYrPdoog4JxO5qGUiGYNoflJK9H6cwOtcoouGKSmsItMF0+Qsao2emBZS9zD9v2jYinlFAfbL+9qB5BNbZUJxS2+iJN4na6WVloZXaz2BhPmbZalF1l6lbPCS69kCHJePFOPnAs3zJBBpvqpaoh/0XhNWKIpl0mWyuYVSenI4tUZPNY4tDqB7jeGL9VAU+vDnGrbP1wjl2sZsohLLhqKyDGN6U8L+0UbnabEWynPwBDcyDj3ej4xezK2wDaZqci0Up/rfy3g7049Fp+czgCDTsSBDmRjx+ecW9pzSOY5A/5IaXpf6PFJM2kqG8/4wSSumSd3ebGnXxpSpX/m+5X+iNN4xepSPBKtFR6oTRM3nXs4vfPxaNG2wmx4V446pU0YoADpTBnfDt90EKuYuuNiyTyYJ7JAArcUS34uDxIR6KJXD9PL30nsTi9KE7H98Xoj6+YVcDg+lds5R+4gKKQlNRe0y5ipjmTUtla0m1EibPGpXeYMvWUgpP8RqN3GkNBepoDkpN2oxZ+gIambykS1WwnKtQ5ImQMP0fsuvHUppRFCbR95tXTFrKpZjxzB2kB8kpeUgqqxQ2MZiYV4QonLnpmu417ADSlJFy5UGJAgy+v1paKGFqqlp00ClNZXWeKWI3uFGEXhuhp6mrkUvZwKbvocQ2PjIYAxbl7OkQpQpRzalKHa05ABE2Rt21rrFCjr4keAG/DiwWS3ZrDeSReV6jA7UtpZmK29eGgHb3NAzqw2r6ytu377Fl1//Ij/5+SeMyu/QuYwMRURfEdUuEnQCEkzRamSXsXzA+hCmOALhdYNOO3qxUBrTZF4zIXBK5YnsXuOWN6Vi/z/Z38ZIyn6++0AA5VdGkz0whCBhlS5hk6LVls31DecXz/HjCLkgyLcYayxN3ZAOj+k2G/p+QKlIt+3ou45m1sjkNHqsUrz68j3u3T7jna6TrBWdaK3GJlAxsl6tpjVtclaPUlknmw9RASkiIYyMbsBGMfeJCIWqH0ac90JDrhpO7n2RtNRsVtfcf/0NNgc3XD78FDDMF7d44/XXWQ4eQs96HOjWW5rG443GJNj2HVXbcHJ8zLO6IimwTYPyRvIhU8IqeW6Pjs6oZnMeP7/g4LCSWA2lST4yaxtIgfV6KTSrELEaxiAUw3Ze47slh7dPOLvzMo/PO5KZicvJsCWOHV1QrPuObnQsMFRGvodtWghS0HSbLccHR3zw85/z8LMHPPukx2jP3/u7/w7trOH/+D+/ydHhCdXtO8wP5vR9x9XNNSmKbmgMjug9PiVMHBHzhJFuHbFBYdtDVpuOpMCoRD96bDMH1dCPiW4IXFyusPUFP3vvA1Bi+KOBjz7+kB+98zbV9YpTFzmdW5oKTL8irC6w2qGAKvWE4Zqmsczbinsv3+bZ5prtKnDrzgFqsyL5Nc5ZHjz6gJ9/dIqLaz78+BOUtYw+8sGnD1lvzunWI2l2QsUZ73zvr7nuW77+5h3wEaMChkRb1XTjQF1VvPrmKyyvN9TW8mR5wds//g71wuLXnq6/IinHdgh0z5+wWm8wYcStL7HJkVJiOwZIhvliwcHBAQeHh1xYTVCRy+UVq2HDHRKzTcPhwZx//id/wTf/vz/hlP+CL331D9D2RdrNfkGxYxLkP9OiW9Vac+tYJpAPPnjEp599zGzxhxwcn/H4/JqPHh9ytRX6Hwkqo7m6fM5fv/1dNqsVFs92s2UMiehTPtMMo3M8e3ZNUz/l8mrFrG5kupESMXqs3gFCo3d0Xcdy0+V9UIO1oPRedpfeWf7nvTHGODk8OudQSuHUOO1/pfAsn3s8VVXRNg0xBYIfSSnbnKMz3Vn2zeAdipFgqzyhKeZOcmaVbEzRtGXb85wblqJo8GNKRC8XvaoaFJEUPWRHO6kb5Fao3GgZI/CdJWYBLkKTU6K1TjFXk8Sp0BStskYZ+RKb646kdvokrZWEXGdAu6lr2qYBZL+vqipPf9wvFOz7Z/p+rRNCELkHu0mRhqwhlCYtAt4lVPD0bqQeRpFa5MlpEDenzDKRex/z9DOV9auZGqpd8xJwwWNtVrJpkxlY2e04n2+SRcmUj5WmpihmY7udId6u7su12X7DisrN3R4rhkRdG5SqcM7n7yWZcWSJRYyRaMyeO2Z6YTJmrc2fh+lexViongIEqxQxeRBg9C5QGnYxGGQabswP6nRuT4Dsi/dy/89irrP2Pz5fBxea504DKCCCgE06m54pCsUwhSg+AlWe9KbI0HV06zXOD6QUsomLfI3O9FmlxFwm5ty+wjIr/YPkD8prNEq8AVIGvGRIp7JJeq7t92SEEiCf3WUzEF1qmWLHP/UE8gWi48tr3Wc93tTITT3MbosVUEGgEpM3XJ1EC6vza+TfCk2bUlOBWrrZydY389WhTHB8XlSlgFbTBlm0NWWkrSgOkrJQi8hRLNOzzo09h6HSBOjd4VZGxoUbv3859x+u8gCWTasseK1k0ZSRdyyZaTHuGoL85nZc4lxQE1BmjxdcejSYcjeKu2GUqjwvajMhNsWuvBTfSisMRrQ2aTc9idFPVINCrSvj5zI5iUlhtWVeN1TaCOLn5OtMZWGAqqqwtkIhVImkS/Pnpns03St2086Usg6r6ImQa+RilAlIkkPRVjW2rvFOppYxiBOZNWIVHOpaqBEqm8sosRnW2lDNZCLS+8AQHBerNT4bsqTpOpWGeteokK9/iCHz+fMmpBKr5ZL33/8xF+fnrG622ewDrEkQI24csNpitEWprOnJNJ2x7wX3HUbOTk6ozMPc6EwjLRnRZ8Ai+Kw70zsEsVgLK+2njbXoJQsNQKiRhhhGdNaqKS2kALnU+6hiPrTKprO/We+hakUkrPKfa6VRsdAvZM34IBPWkOSg9j6iwq45TCFwMGv5yuuv8fOPP2XwiWHw+BTQVlGZisXJGX0zB6XxIVHXFYvFjEonXHLoOHJ2dpd/9+/8IV9+84skv+ELr9zGjYH+juPxo3NGH18o3KqqYpHNAmIM6GxGIs+0m/YIbcQZM6KJylDliVpSipAUR0dnrLo1BwcLvv6N3+X6wWO+++QxIVq0tdw9uw3LFd4brLvFRlmag8Oc7TUyONH3tQcNtqrAaKLWVO0cITMnUnBSeCYYA7ikWW07Zo1CR6h0lUPcA24ccONA8KCNUEN98JwezEi+Z7tasulqfNLYqoWU2NxccnX5hLWDjW6IaA6qxEv3TtHR0zSWxXzG0ycXPPjkI776G1+DOPK//S//Eyb1fOmNV/jks/d5+OgjZrOK2cEp86NbmKrGbDesu0RKHm0sUfUkLfRPbTIiXAld1Q1b6m5Ns50z+JG+k8y5+6/cpesdz6/WvP39n9IsTlkNnvc//pS7pwcYLIcHx1ytr5mdHPH3vvo7nATL0VFLM69pU2BO5N6dI8blmto42oXl9d98g/tvfpGvff1rfPDZX2NoiSRmTYXvRmwKHB0dcnBwyAfv/Yyf/fTHoA191KwcqOjQ44pxY5jVX2BWL/jpX73D//PHG37w/R/gq5ax7wmjRyfDm298iS/cu8f33/4rlusVdfsGyq/50hfuolXg4ulnDNGw6nuGTc/q4hITHI3yqDCKJbSuSLalrmoAnjx5gmlq7r7xRfrVFdEYrjerbMykeP3VV7l7dMSBMRjvmZykf0lNIGJ9OR8CEVdcj7Xh9uktaqv56OgzPkI0ZQfzOeIA6vE+kfoOoxIuOL77r/+M93/yI4gOrSVL1PmA1RbRkkMksO4Gfvb+BzTVjLsnZ7SV4vxJj3MD3TjQVA1qdOhcF7RNA8YStWa1HUSP5gJ1Vb9wFu+Hae/H8OjMxADJEizTBZEjCB2t6zratqWqDFVVU+uKxfEZ694TTcUwipmRT5GjkyPu3T4BEl4wTSprqOsapRRnt2+jtaUfPY+fPN+dwxmINVqo3rbK7JMUhKmQz/VEwlgrJipKM29bQGiGJHkPtqqI2wGra8Dis25H7l3Ofp01zGcNNtM6NT5XhwpTyd43DAMFVlVaU9c1Jk+KSuEvIcO1ZJxWole11k6NzDg6YoKmath2PTHJnl9yYrWpcpauAGfSaCbZK4wmacOm28r5WdV5T/a0s4ZkKoKyuCRFd4xZb5idB0vVoJVFpZ3mO6SAj9LQ2apBUctkMgZi8NOaj2Sq+TSzk+sjbpniqV0otgXuKFEPWktOX4jifGuyWZ7WYKzKQeGWFLNzZf6++7q2fev+wuqSaytwQEqGKscrODcQQm42VT6ncwMhruJyhmtlcxNYwHzI1LQ8nVa7Omnv55d1KuwtAQnK65zW8N7vEwiCyiD6LtM4xERxrC4ynRQibhixM4sC3Dgybjf0mzWkMMlcdJK1gxVdpjGWpBV123C7vs3oxeilyIukEUsvPPsh7PYA7+KUryjymmyUouRnpTxIMdrIhDjLhApQX3qUphLWmxjA7eompSVGYP+efh7ImKKzcnNu1a55S6lAEf8WNG0vvsEytfrFf5PyTFJrLcYFuSEq49FSPKaYsrtOyuNPYBp5w87xZleTTqhBuUlk3VqML7xGv/f5/sRh5yy066u11iQ5o0SEqBXE/MCzQyfkX5ORhKyVSiobrKTceAlVUmf0JBWObijTtEKVzA9oQRALIpPSZBGbhz4oI/EDVsnhoRQTja40DNZaqrrCeXHM0dpQm4ba1CQnr9NoQ4hjRiO9aGhMlbVtdtIRlLllmZpOjfJ0M3ZDmjT50WvJf0qgtGE+OyAlzTgGXEhoAyHA0Duq3ByhA0qbbDsPm37E+EC7WBBDovOR7ejZdgPVfJHDn3eoSEHg8g2dGt7dOsybbnBcXDzj3Z/+BDeMVKYmuIy01AalZWQfTaKywl1OGaEJ3kueTIKh63IAthxuKPXCZdmHasp6lenQDriY1p7K6yubgIQQGAcnm3p+U7qM/PO/lfe7m3SrHF9R3v+LLyY/n3lzC0rttpkM3e9vOwqNGIcqlDZoSz5EhTK2vVwxdj1nR0c8fPyMbrVhNpvRNA2zxTF3bp/RdR2r9Zp+9Iyjl4OwGHHEwO/+9t/i9373GxydHhJDx3p5yXK55PXffZO/+Pb3ePzsgogUIgU8+f+pe5Mny7L7vu9zpju8IefKGruru9ENNAACJCiCEAeJEo0QLYmWIxQ21/LG/4AX1p/grbxxhBcO22E5bIXkSbRFWxxEgSQ4giSIGeip5sqsrMx8793xTF6cc19mNSHKDm/oG5Gd3dVZ+aZzz/n9ft9Jxmw7H6aBxpXpUdyKyAW9DVxu2uTOWC9SAxcFSmp25nP8oeKTb90i/uRn+fX/7Zf5g6/8Fl4UnF+ucH2PUaB0gZzNU7EtYIghafNCQESoipKyKrHRM0ZJO8IwJIes6EecDWy6lsb2SOUwykI0lEVNWc6QStMOAza7k01rRakUJh4iKG2S4QCwXl8AFlPMcMHSdg19EIzSE5G0g8XuJSTT9hanTHK46lo2q5cQRk5OnxFDy/5hyR9+7as8fHQCIhkClNIQKGj6Nc4nP93gbJpea8noXUqDQNIFEEXK3mv7huHZY+r5LkVZ04yOl6uWboxEKdj0nsfPXmC9B10iixnjKKgXB7CYM9s95AtvHdCfrLgIK4JWvPPmW/zI25/n/ScfcbPdsDfb5xNvLbn073ERLbeOj5lriW1HQh9YFBXlsuDo6A7373+Wb3/vGSePPkAGyc1bt1mNkWEz8CPvvEGh4PT5Y95++12ODm/wp9//Ad/73vd4+uSE/Xt38G5EyURVLpRm8+KUg6pgt5qjvGJPC/pxZF8q7pSCRy8aaifonSMWM6q6xjYXiBCw3jPYiLORIAxjdovEKHypQRlUYWhHB7Hlcr1hd2+f12/dZq8s8xYxFZx/viiI174HBDYPzyaGQWkKqroiSkE3jtQ+JnNYdzVtDm6kXZ3z9Q+/x8XZCceH+2iZjADKsmQ+W3Bx3mCKAhttiqyQmr3FDnfvv8HQXCKF5eL8GcGN6XcL2Nnb49Z8zsVqzeOnJwxjMsPQpkC7q9cUEm1kO3iESFXVBB/wfnP1KrNDRMzmZMGHxDbwHjuODH0HsUDrlLG5s5gzDJf0fYfKAbtGSMqyTpb1QlCWJd7aTCVMrIfU+BhcFCgtGUdHCGn/mlUlRVFc5etpg1MpgkAbk/W3UFc1hU4a8bqep709gvCexWwBUuaGMbl9WgIxeiKJRWAKw/7hDWZ1wTiMaBWRcSRkxsBk6tT2PUVVMpvNt6jabDbHu2SaMckvQrbWljknbdpTkwtmn4wlJp3ZtWw7IQRFUaT1JUIepEqU1pRlsa1PpFJp2OlSnieA1IZyNgNVsmp7rE1Nn9IapVPsQrSOwBXSFvHJVVQkFAwpUbqAaHJGHZkip6hmM/YP9hlmhhg9UqQhZsgND5OxW5aIqEyjtWNCgqSU9MNA09pEiywk2ogtDTVR93Kjmd1ZE+qW6sYGkcy5MjI6gQcTWhPze5WoqVdrOGbqn4xb1dnVVyShxdn5eGJcJWrhlPeqrgb+XGu+cqO+bcpeYaxd5RROX1doY2rcJn3YhFIOw4h1Id+H6TtFMvEhONzQ4+2IyEPgQBreh8wUQ7gt4MLkgC4ldV1jTJYxRJHNk3xmBGWH7Bi2DvwhS6KSJo1rKF16X5SUGJGYb0GACn7LigtcxSKkYX68qnFyvRiJ2fzGb/sFyO9zHgxEYq4pJyCHRA2FjPrKVwLXf9j1l6JpA8ivZbsQtk45QjLRGLeLaAttXyssISMkKaTReZ8bIfkKipCa64/ZmIp88FxDFraL8ToyFOMrC/Z6fsd1dG7LAZ9usHyFCU4V1zvS6fen55UaxfT4Ov/+NKmYRI7XohCuPS9iOnQSPepq0Wyb4VdeAxmhzLQSOd2gMQdMp800rWgYMqUpSpkWuydZS4c0WTBFRWgHhJCM1jFcNlSmYDZbIIopfDO5PE50iVcbgek/pk1t+nwmxDTdkBKDKQxHy118VFycvSD0PcRAPzgkPToFhSQRdohJE6RKZFkQVMHZxYpV2zKGSDsMLKs6TYfiD59vfJzvnN7qCDJNHqvSsJhVNM5C8ElAS6Z8yKRfTNSJuLXcFQiqqqbUColECclisaSqK1brdouuxe2Eh20jjrgaIHjvET7RCFT+zMkb1jQx9N7TbBp8tg6eaJphi7KRN9npkMgv8tq7MW3J1/98ixxfm7ale21qfKdNSaCEAj9pGhQuRlwMqQgMgeePH1PXC+baYJYL9vb3qMqCstTczE2b1gnxElKitdhmwfWdpe0GrPV4F9hsOo5v3uVzn/8C3sGdu88YHKybdrv2RKbSzucLhtExDAnJIG/MwaeMooBkGB0vLzYMY2C+rAgxvYvBBzbnZzz58BFny4pYwuMnTxlsxIaCdoiMXUMQDlVWRKWRZUnXdziRLO195+mahhgC+wf7eCE4v2xZrTZIFMt5jdERZQIuWC7OG7QWzMpIZSRWQWstsWlYjY627xPFMufKCRlZzhbMqxkBGF2kaVacv3iCVJIg5vTOsxlGLIKgkj7G4RmtSy6XzZp+cIx9wJiBBw8/4vzyjNm8oKxrnp+d8fjpM3Z2Dmn6Fj9KXLGL1J6LzQbnHVpZgvfZjU/hhEEag64K1DgQvKPUBTvzHTaXDR7B7dff4Obtu1RFwerynBuH+xwc7KPqGqMkP/bFL6Giw8nA65/6HLP9GXu7N/n2H/8BXeM4WZ+zc7jLO/c/xxe++EWq7xgePfkQrZe4AMsbx3SVZmkM+yKy8Za+s8wW+7AoadYd7z86wUfJ/Xd+lA++83XaUSDLHW7MK+4cH/Hy8XuMXcc3vvFdPvvpkqMbh5w+fEIIAnJu48HeLperFR++/z5sGvbmM3aP9jk4OObu8V0ePXxCc3LKvcUOwtY8eH6OteBLlfYNkfRFUmtUFAxjSFqq4FEislgesX94hxePH4AokQrGsefy/Jx/58t/j0/ef51df5Gz8yxo9eeGMNM+J0Rq6xyRcaJO54aoMIbZbAZCsOl6Fss0/Seo9NnKETd09JtLzk+fMS8VCocfHVVR4LXOqLvntXuvEYTn2clzbhzd4q/99F9nXtZ88IPvUocFSt/AKEXbtLRNgw+BO3fu8iM/eoNf+43f5PmLlzjrGa1HyvT43l0NTZVSlGWJMYaDgwOcc1jrGO2wHYymojfteWKqL3JR5p1NLm7IhFo5RxxaQtulwlomhOj5yRlCSW4eH0H0dJsWoyV1PlNSDZAatZ3dHdq2o2vXFCqFGTfNBu+SNk0Klc6sGNEOjEnFd1XOUDLZ/RcmaZHGwRGCZVYvsd7i7JrgNghkQiayaDkV7Ylet246+r5jb2dJjGl2LIXAA5fNmsv1iqXcpVosk0FSgCiu3Divzi9HURQUZZGQxdzghQjGm22DNv0drTVFURBCoKoqnHepuM2GJs45ohAZ3dSUWhOso9ls8NIR8MyXc+bLPUI09C6tIVUUGSEFO/YpPzQmkzEFhEyZVMpjTIUy5XbgFvOhGmOKc/HRU1YlxoCzPTG4rO93IFKwPGRzihCSRl0q5LwmRsHQD7RdSz/06EIzLxcsljVKZnZX/jspmmCSHxiEkAzDgB0tHWxZRyIPXX2OAwgxZHOxaw1GdhhUmVHlnQUlto1yytaN20F5jJPTuiLYH0a/jFsWyvT5TfXD5Cg61UKTnv06TVYIgYwCFSVBeaJOtMrejozNiLWWSEJex2EEPFYIZGGIziLzaxyGlG3n8nNSWuMFDGPL6EaEVihlkGoKY89mXekTSvV/2jmZMMKYy5er3Wyq5aYG+epnUszTVd08NdBCJBM5RXagnqqiXDdMxnFImdZOmEz1Uq3lQorSinHrhZ5r6Ygk5SuqSDICMv8/CNcGMgVs238xUSQnyCODZVtXl+g9IdvgIq+aqGnMf2WPOhWacUsPmeD8eO3w2kK+17rc643Y9DPXjUhe0apdu16BkVO3tJ3k55/YIl6paw9T15anCyEFKHP1+DINEZNgNRt1XL1PV6YrkynJVnN0/UVePfpVgZ+nDmT9U0JDNEon1z1dFCip6WwK6Byt5eGHj3n68Dn+lkKbkhBS/slWCG5t0h3kx043ud5Ob5LTZ2ourqsJk1OMzBMVtreeEJmjHRzWBjrbgzSU9YLRB0LfMwwpD6wuDQhBN4wQwagSpTW2G5C642y1YtU2mCq5nVnv8e4V0dz/o0tmo5Dj42M+9cl3eO9732dsLV5FnAv4kKhvUekUdBN8Wt95FmWtpdCJmuKcRytDXdfbz3FyAJ22mK2LVb4RJnHxNBXTkxg55slQiNjgGPqBzaZJrmKZzpGa4JCnPkDIBcy0Vnl1SLFFQK/9+cepEfHafZumjyJb+UY0GhlTPIEXELQgKMlid8Eb91/n/e98l8vLE8pqTl3X7FQVSiWOtxsG7NAhiZR1yXxRY4PFURJRFLOade+IekY3wtMXlxzdusdyf5+zswvMbIdudK+sfkEKed3Z2aFpO4YhUWzTWyuIHrwLjNbT9Z5NOxCEpijnKF2jTc3YD/zeV/4VX/3Kb+MqjasKzj54Qlkvef7ikk07sphVuP4S54Y0tSbQuTEdnLClY0khuHPnDo+fPOHPvvldhrbFWp9s1YcGKTRHx3u0zyx93yOHSN97ehMoZxUuRs5enrPuB6Q0WGdTaSAj0Sa0bB0CshREN1DrNITpbUMsDDY7gGqR6CxCGVyUOKFwUYILRKHonGP14pSilBwcH1EtZqw3a85ePsW6FaOFoAObYSBaRztuKNSA1GPKe9Q188U+pakxZYkoDLFrGd3A4c4ux7s7nDx9xvzGbV576xNUxiCINKsV87pGqjTpH4NnjII4es4lsLvHcueQudjhw0enrKLg5OySSlbE24pVPzKfVxwV8J2PPiK+FHzq4ICnTcNn3nqLd24c8KR7QVAFQVZcOossNM5IgqrYiMjT1QWtjMwPNHdu3WRnr+Lx957hbMfFqufBoxO6ocv5TOm+6JuW6BWv330N255yerlid37AnU/+CH/7P/gP+dzdO/zxH/0+T188pxzm2Krng2cXIAz9YOn6ESMi0af9QJsK4UcIySFWK7h/701+6m/8Lf6Xf/KPcf1AVRcImXSNPnpu371FeNEQZaLa/0VX8k1NVDMXk653+itCKYqixPrAquvZd4HRCZyHYB1CKWR0dOsLbLtGErDdhuADQ98yjDYh1KVGGMF63XD39df40k/+NMdHtxmalnuvv8bpE09ZCaK1NE2HVJqyqNjd3eON+/f59Lvv8vBXfx0fFTb29IOlMIaQqVzTMMmYRBsuimK7NxKhMDqzeURynctDmBivhpjeO4RXyd0wjjSrCxgaagKmLBgjdN7T28BoI90QEdGy3rSJhqgNvXMIlbVdwbG7t6SsC6Tw1EYTA7SbBh8jpigzoUdlPVxKo0/0PUWMEm8DfvSoQlEVNQhFYcqkowvghz7p3r2fKDsEEelax9OnNtmhS0U9q1nOStargXFsKasqacSkygMCmS350+CmMpq6rum6jnEckFLn8y9Ry1OAdOLqOedfsTWPU+OaWUlbmppzCU2cm7S2EJgiOWZHqVIknEgByVpLFssZZV3SDxOimxAuGxKK650l2DE9f1LaahZxg1CYoqIoaqLQ+QtCHLc0ysmcTUtDDJbRDTn/yyNY9/j0AAAgAElEQVSNAlI+cKKJpgxPkV9/0zRcXq5ZNy1N75CjQOtAWaW1HnFbB0ghDRKVB7kTVTJd0xD2+pma/tzjQqJxSilTbeWTxlUJgcmDcO9crqn8dkifGo1Eqyx82Mpc0nvuX2m8rl/Tf08NXLB+S/n13m/rjevNHQAuooKAQiNKQVEWRAGjvzKiEyIZ0znbIN2AnC1QeWBErludy+ADOc4gie6RevJqiOAdSpkEOCCS6/m1WvyqBr8CbX74npeMCH0ISJ8bUEVyb82tQ4Sc15g1j+GqmfPZ9Mj7FLEk1AQyAWFqogUmu9xPiFoUIpnrxJRXrEkaT12YrWPrv+n6S9O0pStbegJTC5wamgyabq1Z47aJiyI1PVrrLVI1TRBiDutLSE+GN0XMMD/bJjASsj2syDVsuFawXt0AHxfZfpzje10APRW2W23RVNyyrYuvkDwpcmh4QiYmoWx63iSnndwEiRgzfe36u3R1RZI2bNL+sX3umSowvbZtajvbn/ExN1G5OZ20S0obbt25TQSGl567+/fYWewzP5rzo5/7MVYfrjjZbIj+qvGd3iutNVapNBmKWSPIRGHJcyfxcfQwZ9EIkaczKXxToLDjyHroEUJQVTMKIXGqIY7Jjjj4kLn3ic8dhCRESe89VVWgyxnKJrTT50nIhDL+sGv7HnKF5gohMlUiZE1ZikXw2qOlROLoxzHp+HyWqIvkmKSlASJd12H7gaLQtF3Dphvouj6hfvLKBngbgDk1WPk5TQfjhOpKaRN1Y/scIeQsnKEfc1OSKQTEvCGJbTeWcVauqKDXhcbbBXvtM8r5PTJk3V96jiG7lE7PbewH1heXeOsS7zxGrHeM3tJ0PVonwxHtFYVS9H1Ht0lOh/OdBbMi5RQpY5Leq9D40aLLktFrxiD56OkJ/9Mv/wrr9Tnf/NafsljWFIWm60a6tifYkUVV5Hs0bbzWpgZoej8ikenGixGcDXRtT9O2qXg2JbP5IkVZyFRQ7c5Kbt/YpzjcRx8dMd55k+Z0xYuXa0bbYbqW1eU5opox291BCJ20GiLpUrz3tG3H5eUlBwf7bNqGlxcbvLVUeJSSjGOLjQZTGnRh8G1HUBIhDVFoLtYbBu8gTz+HMWmMgvNEC2HwVFrRCViiEdHhxzU2epTeQ5aa2d5u2ht9dv5SJbqoEMpQLRaIKLEigNGsV5fIITC4gd5atKqoZEnTppy6ICRjcKlQtw1FMVCWAVMv0dUNlssD5vUusqgIRnOUIxaWhaHEMTMVYb5L2/WMQzIPCR7WXU/SYiSLbqkl2pQMMYKsGBrP6umKQRcMRaSsd9E9rC87Hjx7zgJHHSyx71LAu43ceO0NwrcfUnvLnlYoWdMxYz0O/PhP/CzHn/5JHp9d0l+eoWe3eeczP8Zs/xb9OPLWO+9yZAT/5H/955yeXXBx8T0eXT5gHBxaQtO0LOYzlvNd7t65Q7NRnDnLg26AF6fcevI+v/u7X+Mrv/m/E9yGfVuzUSD3l8QmoCSUxYw4dti+xXqHDwPaFFlv4VEyoIXieO+YZbFD4y4JY0CEwIuX5zx+8pjDG/vsH+0RpYfoQJTTSfvKPpdO2FzEANY7Jv+3tO/JhGwog48SHwWjjzgficEz9h3RW7rNiqHZUBiB9QmBsTmbURWGoi54fvqU+XLJ53708+wfHbBqN0Tr8ET2Dg6wvcKPA1IZbG4KJk357du3uHP7Nh8+fobWBcq++jquD5WmgnLroKdVCuQW4NwkaZAfOyfFlsngggAZ2awuEd5Sa4VWUBUVM10wyIIXm5amG1FoQog4H5IxSD7gtFGMqwHjDUVp2N/doRCSpm3QWjGvakxRU1Y1ZTVDao1UOp+LkdIYrA1oVaBnJo02pcYUNd5HpFDs7uyhZmmP7/sBG1Jm1ZjNn2zb42LAFCWrTUtdLWj6tO8sl7skkxKNUCm71oVIP1iMstTFjHo2Yy9Ghj6hdWVZUpaGtmtT0eoTrdoGjwb0lHOGSBlaWbtlg09RMbn4D7lJGa1lsCNGJ7MQbQxCKfwYqfTUZKefDd4xDj2x6zI6lVZsiJEgJZYhndUiIlVgUAFdGgbrUNoQg04bSj7rtDZMkpQwrTOn6YceZ0dUVAQx+RYYhEhItXOBrhu4vFzRNh3jkM43FyyD63Guxpt0b6QjUyJjcvSeZAve263N//UG6rqLpFQSJdS2Ad7m6E0FbIzJcMMYMFPcz1SPpppXa01ZCaSzybgHwzZwemKUcYVST9+NMVkvPyZ683y2bd6SVO3KBt97T7QB6cFKR1TZEj8j2FOdYmKxRRSdHRj6REfEh1QT2fSeWO9SFINIgI42JvtzBIbRMowOMm1bCY2WCqMUBE/0DsIVs2giUH+8WI4yrZO4rX9yfXO9iY1pT5mykgOeID1ikmE5h7PJBRkT8+cFApXWcM6v1FOub4yIkCjAk0TCR4eNERfjdg/+i66/VE3b9QIx6c8EIuHc2zdd5A+ceGVAwrXCNqEUIt/oCZ66bsahtCKEDEGTEJMYJ5gzQbpTLwXZQWZ7ZZ6yzBtSnqgmlFCByLkuIm1+MYbkKBOuWYinX5PXz2ToK9IURiYNilLJ6SnBqXkSmIPAoxCEkH9/dr30GeL3uTn0Im4zYbZzkJg6XJnVq8lqNVEwEz9dovICTu4hV26J77x9ny9/+ed59vwp3/rDb3N3tsv9dz/Jjc+/RvX6fR7+2Xd58fwx1qdmIHhwDHg/IkOiAShVEkKywFYliXaSQx6FSPEHPoRE08KghaJSaYK07hu6GNidLalkiShneAJ4izIVcrZAho4wdBTRUWmZeN5RUOiK58PAmR0o9Yy37r1JO4ycNWvW/QahDD4OBDVmjnPKshFC4rPTl7hGA5AiQvTp4PcODww+aXUkyXEvRpfCbrMF7fb3RHGFignBuvNorxGjQHUDnYsIFIGrz1IJINs4i4yG+hhBqkwTDoQwEqLHWZ83+3S4KCkxJjXMaboYttz3KKf7Iy8PkT5vGa7uI8GkMc3OpXlShMy5JXnTTtZoDmKiKjkrGe0ICKx1dMOIy7koIjiiS4L1J2cveXHynJejRyLwXcs4WubeYyPUyyVRG/zoaMcBjKIZIx5NHB1BBIKPvLx4zld+6zdo+56m65j3C8q6oCoLLpuGmRIQTEKRsiV3iMm4w5hEy9FG4WUgaI0UGqyltwPduMIOHbqoEXEkjBvW7YKmqlmg2Ds4olwsqKslXRHohxPcxRlCwrpds2obKqUZbZ9CpMnaiJhE0d0w8OzkOU3X0rYtbnQEa5EmZS4563BCoIWk1tCIZKiyGXqWyxlGJ73FMAz0w0BRzvFhoB1aKANRWKTZwWnB+08fMXRrTKWRPvDGvSPe/fSnMVonI7oQCHag6zZYB348BOH44OFj1peX7B0e8sftBjt0WOdpe0tB5M7RDocHh/TFgkdtpHMS11pYt4gd8HsVd27c5XB+m6KcIVWBLmosIHKEi+tb/ODY3z0gVoZ2WIEp0UVN0DrtgVJB1qzossB7Rze0xM6ivOCpF9z/whf57oPvcxwE9sUll02HeviY1/YCOjR84dOv8Vtf/zrtC00pZvzy7/w+v/P9B2zWgrE5w6vndLLiZttzNKw4qjzPT1aM5yve//oPqHYveeczn+LNN97mzZ/5MX7w9JRf+9WvcjlepkGQFMRCsu5aFvtz1GLOe48f0fUXvOw3dMOGs299ncEIirHg97/9HkeHCx53G0yt2L+xg9A9XTTM5kdIWfHy7AWuPQW/YlYoiArvE5Ly9NkT/vW//OfYZo3RFqUGCmCF4satBQcHc5w6JCoFwuWhXDoPpuJgmv7muxlBwPZdGv7lQaZEUhnBrFBosQCpGaPFBY/zLe3lhrPTZzSrFcFaEIZN1+VxSKJlKaPY3a2p6pKbt28Rx47Txx9RmhlutMjg2Fku8IWk7xr2dw8IwfPy5SkhjPh+zd0bS37xb/0c/81//0/ZjC6hFn4Kn05DqOuNW4xXk3Rnc+5lTG66PmtkVEysBWQKbk71hE4GEsZg3YgpSrxODo0379yhXO5x0TmGk1O8bYluwAiTHSY9hEjjArPaMIw9rvfsLQ7y/p0GespIDo4O2MnGPVMhPZkbaK3ouj7F2GTbeu89Umt0SIwWpSW78zkqFIgY2IhIP44MPpklBamT+593WA8Xq4b5PLmx+qAZBo+UDu8dRkW0TJWJcw4bAtEoMIr5zoKiNEgpmNUVQkDftsTgUUoQhCaOaTishUDJpI0syprBuoxoiRw5ExOaFRXFrIAxn+VBoCMUpaLUhlFotKjAa4g6NW4xoqJHBI8I6bwVIkUiRJ/MTkKEMQZ8EHgXuVhBM444mTL+gkjPWaMQwSG9x49jOud1QVFLQpA4t6FpepquwVqLMiWmSFrHup4hdM18qQk0BBoGN2BEpFSJtaDyYDREv2WqIBzeR9arhnG0VJVhb3dOXV9p/lO0kcWNDhc1LvisJYwkp/6AQBGjT5pR4RK7TEhKFAUTQh6J3jEMXQofH8fsuOjRGhIVMNXCU0M0eRp4HA6HFIJZKVgsDHv7S5Y7+2hTgkxmdi4beIRpYBsgjIHetRSziAotOzNJaEtcH6kEKFPho0CEMUVqBIeznmawWwfn6KfBcEAYgReR0VuQligiIdhUSGuQMuJsi/RwZAylnhBKz+AkfVCMFLQxEqJEAwGHCANSl9iYHVrT6sjxKCnKQ8QRsCghMSENCYIKeJmGBdE7fLQELKUKFIs5yBQNFMnh4EJSGJOM+UJgs2koRosKiqHrsa5HGZFkPLqkmNU/vEHK11+epk2kYnFCFtIgYbKilykYcaJMZsqcyG8MsM0lk3BtisArgsrkFjdN1uKWH0wUBMeVxWiaO6eGLqNCaXA2URynSZ7Ywv9RyDSFkXkFZBpBgjTi9u/A1TQz066zTkYnp0glUSaASJalzqUQSinTDeF8xPuMGsrkZIiIOG/T4vfZ+j1T57ROELoyCqUSTz9kh7Ck2pmMKVSSDwbQObPNB0dVaI4PdtnfqfiTr73Hez/4Gn7/Ht95+Hn8UUH/8oRxWKW/qxIypqJCq0CSxQmEUGhVIIRGkgMYRUSISVcgMyoa8UJhypqlrNgtaiKecHnKy9UKomG3WtKOA2OwID1SjATXEEaB0ZLdcsb+rMTEgPIRI0uGQXLiPHYULKob3Lt3i0GCFY7gW95/9iEX3bdSjEKauW0/9wCpaRISSQo/DsGhU5oDQicEaHSOGF0SwzuHhqRB8QE/UYxIzkkhBqROlNGU4xPxdkChkYhk1CGvHJtkRt+iSFqEKGL6Dkid4Hyl8jQqmiQeHx1CQ1UWVLMapSU4n0S5MVE8Qi5sQm7AtNTJNCIm1UGM/qpZixORNaHfbhxTDphWoFS2sY4YJSkLgxIpA0UpTT2bJ4pq9OjoMFIipWbVWR6cnsPOPhHB+vQF9WyGFfDkxQvWQ8+TF2fossTGyOtvv8PnX/8ETT/w8MlDum4N3lIWM0LwdM0Kj0HqGh8E63WH1gLpPHYYQSWbcBs8CokdRxb1gju3bvHRk8dcrNcMMWIHy9BsGMYNvU30xohkdfECgeMDNdB1h9w6srxct3C5YX56yeMfvI/rOuQwoOcl7CygX3G53iCDoSwq7Djmhm1g7JOea7CW5vkzuqZBEdEy7QnOg3USaSTSO2ZaMK8Um3Zk9B4bHQrNFEofQqAoDYjkkheEBSVpxhUiwGazYlZIZmWFjILPvv0af//v/A3efOs+0Y54OyKFZehe8ujBObdvHBFDz//11d+muzjjBw8afq9bIzwIU0IUVEoy8z3/0S/9e3zrvOG/+hdfhaLE+IbaKxgcLYbXP/EWv/izP89yuSQKRT+OnL0859nzU7QpWO7sECN0fc/F+hJpDEVVo3SNRyFUyehT3o7Umm7oWV1esFvdYHcxp64K/E7N5fOPmEnF4c07bFTB3ETUcM6tvVs8eHjO4WKXfe3ZR2BP1tz/9Gfxv/kNVm2HqzxajRjjefjhNzl/+V3+5s/9NGJnjYmnPPnwgsXODfbnjrOTN3njjc8y9A2ubxCAwTBql4rCIDg533D6coWUkm5osXJECslcCfaiZt02FKKka+Gib6lcQk1eu3dMUDNUfY+dg/s8ff6EMD5hphuK4Bhazxg0TgvGccWTh9/EqJKDe3u8+9Yxx4tjdl//JLN5SfQWt/NJrFhRkVzYpvM2W+Bud7xJ06WIlCLlpPnMyJYRChkowsCi2CWSDJ9SsePpujWnJ88SWqEMRJkztVKyRHJ9K5jXyRRFOAdDT2kK6DfEYUw0vPWag709lrM5wVlQ4MLAZnPOMNRoArcO5nz2nfv86bcfMAJRTHlg0+uaNOpp/yEEhNDETCVM9cFkR56HsFsdGIliHkVyIhQGGy0uQu8cVW3olSC4kW7oid4icMQwIHzBGH0qhr2ldxLtEtLUdQ03Do7p/EjEY6Nl9BbrHcgrgw8tdc5njYyDZxxGmralrEpKXRJVMlCQmoTA+MnRMKAyE6c2mmpes2PmWFXRWk/TNgkNjY51OzI6gVI1zkak8AQ74t1ICAPazFKtJRVBSzAKGTXKe6q6oiwMztrUsOX33EeRGoBIsk2PycCjqGscI67rGYNDWIvwjiAsQnmKmURqBa1H+vQVg0Vn+ie+oG8jne1phx5nLYZAJSKlTtEuxETnFSINI/ts6OGFQShDRCOUzjKatNpDri2j89iuZ3OxolzUBAqULAgy4KPFDpauCbSDRReSqjZZY+yYzecEBN53eC8h+oTElgVayvR5ZPmJjyExh0Qk4OnykE6IOfXuElOU09IlhEDbJF1pqUoQCuvG7AycDN+kSHl6ZWFQSmQ0M2nzZcx+jyKlEhI90Y0ENxC9IxJwXjBagVGalFmcGTci3atBpFip2WLG/aMjlJTs7O1zfPseg4s8O33J+abBxiRTQkhkVaKqikIZylCzcxDYNYbNo0uGS0MfI3Fc0Q8Glx8syqSjHobA4ATWkkxbfDL0CMHjB4csXM69S9mrQkQWixmLmUFLwf7uDe7dusXbN47YnxVIGehHy6oLnG0EF4NmNUQ2fcuLs2c8e/ge9rKDIJCywhMwpYRgiWRnU2GRyqFUQAtNgUDlpi31J2lwIESkrBWmFMx3FjhdpiB4oeiGAakNVVUikUTnicJAiBResYkXdINEF0k3rMuaoq7+wlbpL0/T9kOuiT43MbYmfc1W6HqNO70tKCcaG2yRuOuON1fWpgkQnbJD0uO9qmEL4TpFjHzDTyOTiSomM3Q/xRBM+RTX3HVepQxfjTYnJlqGZJVMqEjiPCe4PgSRUbvc8GXHK6Em4XRuMIXIKJfHW7c1IxFCJAfHIJNINIKNE1KS6Zq5gYLE64/RM+WgBR/48IOPAM9733+PddPyYfuI/+Gf/c+Uv1awGhre/957ROuRMVm4SqXSRpxt5b13jKPNIm6YLGQFCeGTIjnMJTM0zbxesKPnLJUh4hjEHm3f025aDvcXLBdzmrHF2oEYFc5L3Ai1MihVIKRGy0BRSKLQ1BTs6oJm1XDy8gWXAYqq4sbukrt370Ml+ODJe8Sh21IyfUbYtpVAFBmNiCid6QAiTSSHYcxGHymHp6gqDAIvVXJuTJBYbpjI6w6ij0QZCT4yDAN1WQNhG/gY85qUJOpqjBPtVOchQjbsmSz0t+M8tvTE6/bXE39aXPu5LTWQTF0VYWvwM1GP47RW4yTGTjpAZy0uUz9ROukxdKJRGZ00UInukv8fV5mDMYI2FaaaMa6bPIQxFIXGjyObzQbnLP3QMVvucHzrDvO65sbRES5G5vOaRw8/4vLFKTomRE86z6yeo3xCJlUI3D2+xeXpizTBsyPDONC2HUZqooe9nchbn3iLx6cnOOfZbBrOtECZSIwjXdcy2hEdNF3TpkPDj7TNJVq/ybpZ016uWNYzzl6+IIwjb7/zNn/lp77ER6fPePQbv8rL1RluOEdLjdEKkbyCk4HDdMD6nD4kE23Xu6Qpcd6neA2RtYvGgOixzjJah5rcIY2GrmfoB4RUeUrfMZ/NgMCNg31+6oufZ3X+ghfPHjGsR2oJYWixmxWl0SlTSCkiu+gZLA7vUmnP3/33b3Py+EP+6D//7/AkjcAwDGkdu5H79+/zpb/207z/f/4r2uYSpMKEAaUjRal5ub4kmJGf+fIXWS6W9HZk07RIqWiaHlOU1PM5L87OOHt5gTZLjm/exhQmaWOFSKicElhnQUNnW77+R3/IFw7v8vrNWxRlyXp1Tv/h1/l3v/wljg73+IOv/Cb37hzz9rufwvaXPP3GV2gv1/y9X/xFPvszn8csLPtVwbOf+ykevHeCawe0HNHzXcr9Y77zwTeo5iNVjJS3NeNFoBtP+ehhwz/7x4/49tfu8+F3voXOe4O1ARMkYXDIoJktFxgladsG38Od194geofsemadZXV2yk4c8aMDJRldxKqaL/zkX+Xm4RJdHfDa25/BzBSzoqN98Zz2bMOjByecXJ7TiYaL1Tmnj3c4e9ZRzgqKegGi4rvffcDqr36e97/9LW79xGu8c3OZT8ar+BiRSZIipn1NqrSWKqk4ms8xMhWYAQ3CUdaKe3ePuXV8wIinlIFSkFyCTcXOzj7zYsazx08Y+m47nIwiJmt5ZKJMOkcMiq4dGLszJDCvK5xVdG3Dk2bD/ddfY7aY0fYt8/kCOzT0Y4/0DucCn373XX7w0QtWQ5P2rqx/mXa0kM9sqTRCxmt66mkAy194JVOGdL/GzPbRUuCGnmePHhCjpBs9TTdQ1wppAlgBVTLxElLRbjYUSKJz/NzP/yy79RFf/de/nc/1mOmUPrk8miJRrshOydlCwVrP+fkFi50lo/fM5zOsGxFRUBUF1rmU8RYj4zgQpGE2n7HYP0JUSzYWRNMSRHZVdC1GSZxSjDZR1rRKdM71ekM1m7HQi6Q/1MnwQWtNtJ5+6LF9T5U1N965pKeKKYickBAbKQKJmyHzq8iHTYiJahhDjjSKSVcoBTlClH4YCHbAqYT7jm5kPfZ0LgWdqxAo8FQCaiHR0adVbAqCEoxAkIYxgBMSj8bIAhk1GokW6UxNsqQ05h8Gy8vzS3Tfc3DjCGMkTduxblrcaHPNmfSC1o7ZmTytrXazSXo/O2YaZTo4p5iEZEICkwtkjMlJUsRIcA47DFhboQq1lfI4n+46oSWmKFCqQI6CEBzBp1qvLBSFVhQmoTceh0dgVWQkIKJPtSlcNXIBdEggQEreuKqVpZj07mCMpihLFjs7HB0d8ObxDZSQNO3AkwcPOXlxyfm6YdMP2OCSqY3RqKqgD47SFkituNVXFHuOQhhKJdMw0nnsMDAKiNJTxpRZKsaIGsGPAucjY8hUQZl0nm4ckFiE1mhp2FnucLC/y+3bR9w4OuDo8AarTc+fbdZoPySXVCUIRcG4X+HFHCVLdmRgKe/xqe5dTn73D/jge49pnaK1FsuYfJrk1Hek6CeRS3Yfkp9EYrZdZS0XWjGragIdtutxMiDkQFQKby1RmcSACyGZwAwjIkScTwYyMbqEUAaIXuPtx6zzP3b9W5s2IcRrwH8L3MzP/b+MMf4jIcQB8D8CbwAfAr8UYzwXqSP6R8DfAVrgH8QYv/Zve5w//7jTcspXLjTj9R+K+c9/yAZ8vdma/v3jhiHb/zd9QNdElVNzd910AbgyO4kQRKJl+JB1cFuEJm7pnfnVwFVryOQmOFmVwpWu68+LQtM/0oQh50jkr4RoB6JP9DKjFFqq5GDo/NZG1jkHThCCxIeIJeKDI8aQpk9yev5Zr+VtRhcDXee5uHjJBx+8xzD2+NHjY8/v/8mf4sWYJqkBlKwT/Y2QslMSjp/pp6nQl0JmxDA1jCFPBmM+3VXIAZEih5H6JBBezuYsZjNWmxUxBKpC4lEEJxi9oDBzal1QCoc2gYAjiIgVgWZscTYwIwk/vfC0oWd90SA3G+4f7XF8eMju/j6b5wNRXOXSifw8MjCYaARxQnPTBjzaxK0WKrl/1UpTFBUBQSU03ng66/DO5entFZVX6ASjh0wXnDJZhPghyKyfio3UAMZrN0L6rGI+gOPW9CX6FEBpXeKJS+9RiRe5XafTRCHpLvNaUokiJKRIQuDgk/g3r0eZRef9MDCOIMpkWCNNicXR9T3umq2yNiIPBcKU8bmddu/u7jMOkb7rOTg8ZllIxqFhNq8xxnB4dMjO/h7KFBijqaqCgGB35026VcvmxTmL2Q5N2DAvZ9y4dYeyntG1a5p1z+F8CW3P6dkZbhgw2lAYw9ANyJhozYv5kr7vcc7Rdi2VkfR9jTESo4ukQwwJtS90soW2dsT5kZOTZ5w9fc6yqvGd5ejoiNuvv85rn/gExY0Dll/7I54+OyfIlKEUrUPJSFUmq+wYHCJGnB0ZxpHCJKQs+lTQOZ8E2Gm4kqbfPkbarqcbBoxJBaIxBUppnLMJ6M9mDDHvWTvzGZ/91Cf55tc3XBAJSvDgg/f51V8ZmNUld27f5u7du5SLOevB8+hZSz07ZndH0QrJ7sEt2t7T9kPKlSkyahEjOzs7qEJzeX7G6uyE0TdUbc+t1474u3//F/iv/49/yg+++S2ef/g+qyqZ7cxmc8r5kvm85uL8kvff+z5Kal4/vslifx8hB4S0Wf/n8HkS7IVj06x5/P53qWzD7vJdimKOVJr5zhGLvTvs3bzP0fEBe3ff5+D+MfN7N1g/HfHC8Obb7/Ijf/3LmN2SKDuO3jjgl/7jH4c4S+eLG/nON7/JV3/nt/nku3N0dRMTF/zEz/4CzSrwrT/5NjOjOT3Z8Csf/CbPLgbM7hFFYWhWl0QviVpjhaA+PuTo8IiL83PEi+e8e3vO2/fu8vzJGXWt+U6zZm3HFCJeCOpCcHs/8BOfPUaJwG/8zq+DOeHnf+HnqIygqWq6WZFoYbOBdewoFjXz2T7LnYFVf8p7HzwmtieY1+2+ShgAACAASURBVN+m3cCTh2tufMaiQpGKZymuHZdX/64yCiUCFEIR2wbGDmmWCJH0cKaAH/8rP8pyMeeysSjnUMEhokTpkjc/8UnGpuXRoyeMWe8bXCpmdxcLlrt7HB3fwVnLcu+QSmuid9ghuUSGcaSuCsZ+4PL8JeNQo4yiqmYEN2BHz3JW0fcjbdehTcqxkioFX0/W6JPT4ZXTc2LBTLlar5gs/RuvnAGqUtB79A6lJIVWxGAJARZlCT4NcUShUVpjZhVFYehjoDQGvGN3ueBzn/k0f/J730BEULJAa48QNpnWCIWPJGdIa/MwY0PfWvoumQtJqRlHz9HxktBseHn2kkJJhq5nNptR13NclARlQBfoskpF42BxLqEWKaPNUxmBU5kqmKl7IOhHy+g8NkR6m/LNRBSJdh+TgUjbt9Rl0oG54FPzgSCBWzkuJvgt5S41Gp5pTi4ROVxbZJ11ouE7b7FegEi5o9qAs5amH/EkDXAIAR0jGkEhRBoYpCVN1AqrFGiTqG2ywMgCrzS6LKmKGQUq1RtS4bVB5rxSITTOQ7NqMLMZSmkuVmvaZoPKGa9VWWKdS86HMjloRu+zXjNT+gQ4IbAenAdVJP1bnKQxIuTaUWJ0ClD3PjAMI5qk3RMynbuqKDBKYlSRmGBeMZ/NcIXBjQPGpAiAxI66Qq2cEDgBhlfLYpEbtxATkp6eR3YlJyG8QoLWhqMbNzg+Pqac1UgBly/XXF5ecvLiJf1gGW1IUT/ZSE5KQWkiRaaFau+wwbOodrm5U+OC4lIlh0jvPW6wOAVCgxERFQLCgbSgvcCHrHnLsQsmJHTWsGB/ecDh4S5vv/0GN49vcfGy4fTJGd/71gMuNxd0piZqRfQDkoBSBh80o5MpB9YN7O4v+NRrt7i9u896vuLpeUepdFrH1qJ0IMRU56e2IzdrXJM7RZHqVJGcX5WQjP1I2w6MUSUUX6bYDaEUvcrO1UEw9D1uSHIcO44IFdFCQj+g9Ijk/zvS5oD/JMb4NSHEEvgjIcS/BP4B8Gsxxv9MCPEPgX8I/KfA3wbeyV9fAv6L/P3/1ZWao5yZFAM5O48JHQtb84Op8UrXK9+vNV7T13Wb/umKk/0vXCEWXEP0rv1sQody6yXktUfc/jYSN9hfQ96mAvxj3WV89bFipsrlDiE/4FUjNyF/qQUU6aCNQIg463KcQf5/ebLmfUgZayPIkBCblK01OVCmx3U+86Kl3uaYJDOPtLFcXKzz61DJ9cZNYsy0EWkzNdpZX6BSBlbMlvSJepqakbh93WL7JaNAC5Wg6dFhY4eTBl0IdK2oy4p1uAAfUuCwFiwWC0IQRBeIfqSSgUUdWVaRmfEE3+PbnrXbQIjMqpq6qghVQe97nBvxw8ju7pw37r3ODy4vE2k0N0aJXkCeSuevMK0jUuipNhwe3aRfN5z2luAjLopEPNUGLQ0yDDiXHEJz9iLaGExVJhTB+6yJnKbEWek4TXnyoTo5bE4GJeKVtRG2QwfnHMoltziTndO2XyEvrO384hqSLFPDmvRvqSCfhMDgt59Z0solK2chBRRFchxVSTBtbXK3mhy3ZA5Ujdv1ltbl/v4h9XKP09M1bbfm7u17HCxLvB2A1PDv7u2xs7fHi5cvefbiJZ0XGFNQ1XOen56xbjpu37xFWQdkM3J8fIPZfMHzJ5aXTzc8ffhwi7BLDPv7+2yalsePHgEwm81S4xxiRk9TEZhC4kUySVEGH+SWiuLGgdFEms2Ks5MTnj58yKUqKIThaO+AEAXf/8H7iFmFiJK6mlOqdACK4KkLRWU0l+cvcH1HN6twzubGebKIEPiMTfqYEALrfELktWEYB9abNllca4P1Ae+SAcFsXrxiiCSlpGtbHj54wI3DY44P9hjbNZUWdL3j5OQF3/r29wGBKCt6WdH0JX/4+39GbRyD9hi/5oOPPkoCdRESfSZ6KlMitOQ3fuVf8J1vfxO7WYGGGEek8ezsznn7rbdYKMO//srvEYOAPE1uN8klcxwt/dBzfHyT45s30WXg5cVZctYzNUEIhNKoomC0Iy56mm7D/btv8M3vvMesOmVnd4/lcgezOOJ8Y+ndKU/OzhGFI6qROHpu/N+0vdmvbdt95/UZ3WxWs5uz9znnntu4i68dO3Hs2EklIQUERVBFUyqVaMRLVCBK9UKBeOYv4AkJIfFQgICiIiGgSiSVSiRKJIbQOB1xJXYS+957bnP6bnerm3O0PPzGXHtfl5MYCebR0W7WXnuttdecY/x+v2/3Q1+iPX2DFxdbrh4+YlCRpy9ecbWOzGcnlAgvXz7j8cOP2KqO22/e5f77z1mtN+y2ic4d8MW3vwTZE+Oa84sZr4YHHCw7bp2ecOv0gMF7MoIIphi4urpCKc1mteXJ45FPnt5mMVvQdQsOZ0vyqcHrjG01pgTOX17wj37tf2ZcXfH1b/wed37n9/ngw/eZ9Qo9GvyF5uLcczGcs+EVY4mQj1AcEHKBbDCmZfSRnBx377zFvOvqOjaVyNNedr2rodjnGW3XOz569z733ngdO7tdr9tMSpFuOUNpRfAeUsYURDuUNIcHS8psQdvPBeUvgsS/8cab/MRf+ElObr/GfH7Ce++8gx+2OK1x2sp5HhORgp137KLn5asX3Fa3KYNifjBnvlwS80hIYgDx/gcfVLSXygKY1HPXRiK5nvtKGdGqofaGYH/uUcT6H6dRJe8Ha23X0rYNMWba3tGawspvWSyXKN/TLnuapmEAZn1PU+BqvePsxUuePX5MHAPaWlIsqGIAaSblnBHL/zEGXp2fk3yBrFForq7WQg+s7pCXl5dYowmjJ6ZC2y9Q2uBD4PHTJzx7+RLVzBiSNGMpjKgccDqwuhiJPmKUES0dEY2l6MIYMjYEQs7ETG2GizAmrMZZi/eeUoOPc6VlU6zsjSlRkujWjCQ/U2rTNnE6itZkJFrFYNBWkaw0EK5t6RcLfPZc7QZiVtimoVUtIXhUiBiDoHUlQ0VSs1aoxtLPl7hmwbw/pDm4RarGX4dHJzS2pcQBnWWPlhOnOlZjKNShlBETDGU0afQ01tK4Fm0MYwhi1OP9vkGVAb1Ga0cpWfJf3YaDxYymsaDSNbJba4mpbooxMuwG8KCMpu072adtlf8oKCERo2fSmYvGXRoFoxRFZYwyZKXQaKwyGGWlhijXQERRkHUdliOFhVGKppvRty3L+YL5fMFyuaDres4uLnj+7BkX52f40VOUxtqGmIro7IowvrRSqKxoVMKoKE2nbfnal3+Uf+Vrn+J3f/O7nD17yHPOyEWhQsIUcFZjKMJgU1koqUrao0LCGo1pLMtuxr2TI958/Q2ODo+FkaYt73z7Xd57733OLi7YbQdcO2N+fEdo9UpMtyTv0VNCRqVMY8E4h9sljl3LZ157nXF8ytOrM3DinJpTuSbSUevxCXKDPbgRkiCuwkBV5Fjw40CUkQVUN/yiNLGuQ1oZdM4kP+KTJ5NxVtamkgo5ZLnu/4zjz23aSilPgCf185VS6o+BN4C/Cvxc/bH/Bvg60rT9VeDvFFkZv6GUOlJK3au/5wc+pibnpjnJzQZM8sxURYr0fgOaPvvehfkmYvZxyqM8zqRvu+ketf9ZpvfrphvkNcIBN52qak6Kup7ycWOb3N9njySq69dWC3Z9Y/Nh6vQnJ8wkfH2szEknlyFTC9MUoiBrZcrykq9LEiQrFyoHWS7cKQCwZNkMdNWmyWtRhJAqVUNJunzJmAYa26JLomTJX9JZgUpyuWUrItWcaWpzOGkA4XqapLJMKbTWaDROWdBWBlIxVfv8RCrVbUcprLE0jZN8C93gbE/JUkib4ol5IJeMbTtUaeiLw42F/tCxaE/p5reI8znnrFm/fMUHjx5yp3uDN2+/xtPFB+QiFvxFTeHf11ENkwXshAKHECkFXrv3Or11+NWOy1dnXG4GYi6MKRGVwodEjhlnTPVekcbVGCdUuMKN5jzvQ9Dl3NEV3VLVeKYaAxjRM07I3YT+7s/6OnDQNzJVputpMt2R8/vGNSKPLo+hlaBMACHiw8fjIybarWmdmIXU61QrVSeAwj3P0+ZaFz9xBs3kAtvtwMvLK7Y7z3breee9D2VRNUomzlaeuzJaJpLB0/Y91hhyVozDjjxu+aN33yGnyGbnOf/t/wvrWhqtUMHz8OEjlgdLUBrnGmzb0HWtBHwa0cJeXV7duJZrXIjSpJQIXlyjtJYJsNEK7wM5KrL3OBQmZI4XC5b9gpdPn/Py2Qu2IXJw+4TdaksOCZ8jXdvQz1tOD5eU6FldKFJOhBBJGaHFqChGRlXTFhMyqFAQcgZtsE0jnxuDtg6UIWfRtFrb8qlPfYrHTx7z4uUrrLXElPGp8MnPvM1f/JmfQuXA+uKckjytMwzDwDiOeD+yi5lVUIyxRw0DsyYQbKRny+/+3jsQr/jKj/8I27Dhm//4W3z68z/MnXv3ePzyJT54Zn3L4D1tD7dfO+bFq2f8+I99Bb9LvPf4DKsNF+fnQsVqO/puzsHBkvn8iLE4zjYjTdEk27NZD8RtIGXDGBKj3xJi5vT2XXZDwxNdeJq+Q4mZ5eKIppmhCrz37ndYXz7l/jvf5O6djsOl4e7pm/wzP/tX+OCjx/zmL/0SLy9esYuDIOW5MO4CrupWozIc3b3D/XffZXt5gV9t8dvIcnnCdjdQbKa4CLrw+u1j5q6jLYnWKAYtmTxjipRUMEUiJY77BebggD948IKjpufH7r7FJ1475Wi7wSugsbimwxjH5TqidtDaI1r7OsP2iGE30BaD3xay6ulnB9jmLnYmDAofHM6esGgsjTng7Z/4Kid3bvPJe/cwpwdC0TLC4Jv2BmoxbpQmJzB1QvXq/Ir37n/El37Ss8QQMZSsWO08j5++YHbyaV6eX7IdPOOYCT5REgxjpHVaqLrGUILH2oZPvPUJPvvZH2JxeMrR8Zs8ffKS1eWazsAQR3KU4soPAVWC0OnHwK3jW7iuleeqRFebUsE2bV2nYzUWk4FgJdxdE3CUQWuL0tWRUdv9fnvTgff7HtUoKNlK66JSpkqmdUboZznQtwbV9BwcH5E2Dtu3dF3H0DSkIdB0PRrN13/j6wyXgfVqoNSBjHMtORfOzs7JKObzGdbKgOWtT3wSh+PyfMXjp0/Yrrcoq3j69BnaKPqupW06VC6CzudIU/Vnfuvx44602xHR4jroR5xKqKZQosOPHq1a2naG0i3ZyyBmN3q086TqUK2UxmpDqZRtYyR+Z7cb0EZRsiGnGh/DVDMJocyaqhtUULJo3hKi15YYZiU6aqsILjP4hPcjjTqgXxxwmA0XYYPWjlgSlCzNnlaEHIFEsQqlM1GJLOHgzl3U/Ajv5qh+ScyyDwkCJ+9+qg2TmM+4mqEmGWJKaxbLJY11nIdA8OyZL3u3bwq77ZbogkhScqqDaFnDN+utNFk5cXR4wJTztq/4St1tSyH6yDpKJINrHUULJVLsEWSv1FkGoeOwExSzFMk1U0pox6VITqEy2KRwxeAw4v5aa4KkC8GAR8zv5o1j3vXM53MOlgcsF0u6psWPnrNX5+x2T7i8vGQ3DPgUKVoYHb5Q3RBE32/2EVkBlQ2uiONvQHP39BY//Ik3+PDgBa15ilYSJ9MrQyLiSsapBEaRXSaUTDJAgoXpODw44PbpLe7ducObb9xh1hueP3vJ/e9+yKMHz1mvN8TsJaswatqmwSmFIVGyBHKrItpFrTK5RMYI907v8NWf+lneXF3w7W9+lycv1lxsV2zy7rrAq5FFU3kltEhB26QHkPPXSD6FSF1iPT/qkFprqkHS5Ak5DccLGIXKshLkIoZqWStyUpT0/2FOm1LqU8CPA78F3L3RiD1F6JMgDd2DG3d7WL/3saZNKfU3gb8JwomHiaxx7fqklfCGhYCrqllFnu6/zyOZ0DZKFXxSL44bZg7TffaONDcatAJoMzkxXudlGGPIpZqc1J/XN5q170e1mJqTj+vnpg2kPp8Ky0/PzdrrDDNBb5SEUJabzyHW2w3jWF0Ec6I1HWbK6ELyp6wxxOq+k2rIuNHinLE32ChyUtkbtMxcF6T9VKcBbcSBSXA9LdQrU703i6l/E5nYZDXRHWCyN98HLBtdG4RKh0S+zvsF1KKyksmruH8QxhFtC9vtQAgeFDhraWwjTZHP5BRxtqPtFpAGSkyMMTJExcH8kFnuML1m2bcc6EOOu1Ps4SGHJ57HjeNis8K//xEnb5xyuDhkPWzRSmx+S56mz7q21/I6dKWUgqCfT5895fz5Cy6uNlytxAExFqFKCA1E1cV+svIV56UQUrVGluDoUmTKU5gQYSqqLJYxeT/1VHt74Fhz5oqRQYD3XuiMKZK1UFqgitcRbUfMuaJ4qqJ6dWEvGaNkE1geLjk5PcH7wOOnz1hvdlAKVpnqDEktpK4bHa0NRtWAU60ZwxQ9gUQ4JFnspaEvrDZbXrw6J+bC8ekdPv+5z+PmLUZrduMgNNUkmsHJsnsYB6aMvLaxlLAl+i3b3RpzfsXFasvm1RW3Dw/BB3qthYJUnTfH0ROjWGinJJl66/WGcRjFNVJpxsGz2+7oenF0M1osuDvn6BqHVi1dY3HacPf4hKF/xT/9tZ/m9dfu8Wu//r/w8MMHfHax5P533mV3vkLFgm4M3nucLlxdXjJs1ygFJ6e3OTo6JKTE+uFjNAaUIaW8n9gJVSmgtEWRabsZShuKtlxWtG1as0IYASmw9gMp4xii4uv/x+/y3vsPaJ1h1jYsF3MWszl910m4bHOIdoo37h2Ty4KezEFfaA4a3HjGr/zyr9NZw1/++X+O3//27/Heu+/xtZ/4Kf6lv/Yvom3h9Ie+yLfv/2c8e3nBvbsn/Ft/46/zhR/5IhcjDFsxI3j29BkPHzzg7eWSL37xi8y7Gda6/ZqptaadNaCV6FOxXK1GVuuBy8s1L56fcXJ6wu07d/BhQ7+IDOMOQwtJ8+Uf+QKPPvqIP/rmb3Nrrijxgq5J3Ltzm6bVaDuyPDDo9oDMEdvtjnG3YWclCLnRDauLNe/91n1ev3uXtmi8MrzxmU/SdnPuv3+ffjkXcYhdcPD2MSjN5eW5UMtLrtbp4vCYskLbhm9/5485/aFTvvyVL3Bnfsid+ZJP/+ibxBRxXY/tWoIXg5zOZNYXT7j/7Bk/+TM/y8/9C3+ZxaGmhDU2KYxe0iyOcAc9trXCoFCWRic6rVC6ITWKWQ60riHYjCGQMFyNQlt8+fIVL58/5/XX7nHn5ERoVsh6t/GBpy+v2I2aOYYxKy4vNvzi//A/8fjJc/Qv/ybGzInrLZcvn7MritF7tts143YjtOngiSnx2mu3+cwPfYbtZoM2M/p5RmmhZZjGonXDELbiVjgOrNaXkiulDWdnrzg6OcGnSNtagh9pjEYby/HxMajHqCx72eR+Ow1Ci4KmaYm50Fhd88Vqduv3Y9DcGAhrXTXlJTL6AdM21boc5rM5KSaca9gOuz3bwBiN61twFtc0uKal7Syz+ZLkb/Hq8gXH87t0rnC5umK12WIbyZma5APDdou1hjiOXJ69Ig2Jly/O8GEUCuEog5+UIo3RLGct57sNVhdMDhhVMGXkeO6IyeBjwfY9hYzfQa46cFUU81mHHxV91++ztJTK9F2PdQ152EoNUBQxRDQZ70cZnurqPG0dfhinHR9jdA3JFlpYUTXzThlSCaKV1uKunXPGaEccItoJoyeUQtP34Bqa+SG3miUM56y2O0qCGA2qKFY+4JW05yVHMKIzv9t1mPkMZhJNka3C+0hMAZ8D2RTIYgxS6nDWlGvkhCKaq5IibWOZz1rG5Mk3bN3l9UgNQPUX0Kjq3pwrq0Qx+sCr8wsKhflsJhq3XAg+EL08hqoGciXJ3lsQt+CmbXCuPkaS2rR1lmEr+6miMGTRM7eNE4OLXNARTKUYGq1quLYmpkJQ4HVB9R0nJ4e8PjvgpJ/jmgatDefn59x/9z7DMArLRDu8F6fLoLToLEvBaENWla5eG5EMWDKoJCwMElm35JKZAa1uRFaiRJpjKbQl02oxURtURM01pdX0bU+rNZ+88xqfvnuPu8fHRBV5/8FD3v3ORzx99IQwbChEcdAtM1ALUvGEsiah0aqV+IdS4xaMGCJ5BRc5sz084NNf+ypvP3nE2YNXHM2f4s4sOlmMlqFDzqp+rGOeLJIiZZzkwRVZa2JMTBInmPSZGWsNbeewdrq+xbtAGACZYhLTiZjzRFNVlJSIfuDPOn7gpk0ptQD+HvAflFKubmqvSilF/VkJdt/nKKX8beBvA/SzednTG25MwIoSN5xaXu7pDzcRM4o46U1f68Je93PjuV83Jt+ja7tJL7u5aN88bua9sC/kbxThZRKb5v3/CcJQlcM5NZyTs2UuIpwWY4P6uGri48f9GynPpwZspiTNktLVxljc/ZTW+zlXrjEDcd9o1tuq3bvWWpwrlUwRtNYSUKjEfRHYWwvHKjR2rsXVhVUZJXapRUHU9TmKhqzIHI2cjRTEQezzrxPmpVmY3jc1nejVFtFqRzFun4kxGb/sm5D6e1onOVJXm4Grqx3b7Om6lq7RtG6GMqFSFHtign55h75bYtaKw/kt2oNjQtoyv7VBNw4TPFcvz+nbHh9CnRRO51mp79Xk4innkDWG0Y+8d/89vv2tb/Hi8VNULFBt97M2JC2mIWIaIyXDRFNUWqbGWVOb8oL4F5s6C9ZyruTa5Kp6zqnr6U/JktFijOgsKELZ3IfJIgMHKp1y0q1N5jh7nHcaYCDID0rR9R2LxYLNbrdvEBU3kOf69f5aq89rGhSEIO8hpRrnoAFxmQwhstuN9CmjtCGmTN939MslyShs27FcHBCr9bGdldrsgmtHcedyBqUzKTjCaPEq0kRxJytjwCsxflk0LWgpHnIqYK+v90lbuV5vODw6ZgyRQmE3DKxWa5ReyBQ5T82yYblYoHWHddB0PUpZwhh59OFD7p3e5a3X3+IsDmijefH0KcPVCusaUoz4cUsYMqM19I1oFI6Pj1kcHPL0+QtCERvqXGQ9l+GFGNFoY2pTDM1s0gQCKdM0ju12U5H1zKNHj9hut9drn9bsAtz/6AmPnz6ncYZZ0zKf9cxmc5yVjKa27bFO0bQGYw45ahvuHncsT5cs9Zpx8Mz7OX7whDGIJfpswYDh1vERn/18y+HBEY8ePWV5MOdLX/kSn/zkZ8jM0CWxuXjJ7+wuaMopP/3TP80bn/iE6CdRdQgp9GvU9SS+vkQKhe1mzYMHH3F8fMit02NyOQbnyEiofIyZRWf57Gff4o27B+zOn1HiCqcDpIgzA2/daXnrjS/QzY/R7RG5GK42F6zWFwy7kWGbef9Pvsv55W/w9pc/z+zwgNV2y+Nnz+Gg54ff/klODg8liyp6/vCP/oRPfPoT/PNf+ktMvkXzxZKm6QhJ4QOcX654/z/9T7h1+5Rf+IW/QW96SgCVDdZZKfpR4kSYI4qBP/nWNxjC/8jVJvP2F36YW6cLTBmkOCo92Wh8HSIKeRkcMphJKJIKuFBAJ0rZUtC8895D/sH/+g2+/OUv82Nf/FFc8waLWtSjFDGLdcFrb77Gz//Lf4V2ecKQCkMpDKFwdrHhyZOXrMIlrjnApgRhRyyybuQUWV1dELwnZaGEHR0d4ceR5093nF+O+DSjkGkbJ+6QVSvWz2ZYrdmNI+N2oLOGftihr66wXUvTH7NYHhLCIE7SpuqF6p5CmaqDGzNtbYStUP+Xuu7xfSqV79WST/u9rs6OKYu2KRdBrqwS17udD9hesu+MtRRT43Osw7YNbb8gjAd0amS2OKbEzG4cmKOYHxzSdTNmi0UNqY4YwG8Djx88wyqHztA6g8lJKGwHS6HmkbE50pJpSsKUACFispgpYC2XcYtOA4vlkmQzu/VAdA4963HtktXViNaG+WJBUXC5uaCf9Wgn2lhrDWTYbnYYnevkrWCMou1aNjt5zqo2xEYpjLXCqEDVpq2QMhjboqxD5SzrRhbHB5W1OOBmj+tmtPMluu0xTUfTGOIyYbTjfLuG0YNSNL0jh5GUA1HBEAZsAbfZEF88ozQrsuvRbU/IBT+OnBz0ZLWUWtFU7lCi0g1B5Vpl+sjm6gpVEmHYEcZR2EQ1pkdVypxRRj7X4tysYe8Era2qyK4i1iZRY8k5sh0Cw2YHqdQAbwEXrDYycNemJuhk0EJvNHX/pchQvHFWZCcxMw4jKUVKkqloiVkQHy0FyxQ3pa1QG7/0F77KV378y7jLHa8+eMCDR494/uIFF1eXeB9QVAM5lUm5riWUfZNWSvVtqAhbrnVJVrVe15B0IhtFVppGCSvLOocxmrFoVC5YVUT/1hgCYKzm1q0D7ty5yyfu3OOt4xO2z17yB7/7f/Phs+e8WF2yWW8gJIwyovtSCnQiq1111O6xJWJzIWtHUfLelFTwWcALZSxDKVzsRpEqOEvrZGiolcYnpBkvZY/STfuQIHYi7UhFzISKkgHFMHpCEkmKdqoCG8IAcM7K7w6ZEIuca/p6LbrRqQDSKP5Zxw/UtCmlHNKw/WIp5e/Xbz+baI9KqXvA8/r9R8BbN+7+Zv3eD3xcC4irN2I1h1DVVXGPaiFom1Fq/3012f3f+F1/2kRtfzvXxatW15x/ye1S+9vkDjcpjWKsIVly143JdGJT5HdMgcO5FuATve5G2Qy1gSpZ9GWFgjFOCu00iYXlhIkpo2wWfVpKhBRplFAtUhau+RgDPgZKEVqhNJeAqjk0Woo+5yyNdWJkYA2b7RpZmHUNbRZ0L+dMjhkDOJXQukGZlhgzo8/4EjB2angVYZQJlzaV/jVCCJ6mkQZ0/w7udX9KLPCNJSUvoYW6EMPIoALDMBJTIqTEsE3MlnNOjhZYs+FqvWYYt4xDZtE7TO9IxbALjm0uqL4nZUtrLf38CNo5eecxumE2W9KROVu9ZDFfitg4RvLEK5gGBIjNTK7n2Wp1l8JqqwAAIABJREFUxZOXz/id3/89Xrx4iS0amxXWtqhKC1EUlBEbf5EpqnoeC5IcUhYE1jpM1WBQpgbJ7P8uQitVEgOgFEZRXaYiBhG9xhhJKWH0RItNe3Qp165vjzJP1w6lXlOpNoUKjLzXo/dsdjt2w7CPBpCuTO2bfNS0nHHduCFFTohBMk8K+DFJninSQMSQGIZRqGmAMppu1oEWy/ddCGhj6+3ynK1WOKPww4447ihI3k5jEyVJUPbgR3yKom8wlp2PlHGFPT7ETDlON6bxIQTOzs54/ROf5Of+2Z/jV371H5J2IzFFtrsds3lP03Q0TUf0gmgapZjNeopOZGPxBXxMnJ2dc3F5xZ3XX+PhO3/MxXpFKolPf/qTJGX47oPHkBM5Rwa/ozULUIoQE0OIDCETMYQgNOWSoaAls9EKFx4l50CViuCsJQSP1poQ8z6E+PLiHG0ajNbEnEipYDrJomr7nq51dNYyPzhguTjAx8RqveFiswYVUcqj1ZquFN7NW4KJ9PmS5y+vuFyP/P1f+lXW/pKdL3z9//wtXqxecutkSfCaMCb6puXy8pxf+ZVf4VOfepvl7B79TPPs+ftcXl7wIz/6o7z2qXuEPIi9+kSXVgqlHKW0MnWslCNtRffQH3YsdzOi3oHr6K2DMlbdhkZZBSVhbMJ1iYN7xyhzKBVZFnLU4VsnYqOWHDQnUBR39SEoIYqU2PKFN0+43Q78pX/1r6GPjtjmyH/xd/4rZrOOf/Nf/9fotMWGQljvePjwCW+++Sl+4mf+IqQAFMQq1ZATKNPy4YcPibEQVIeZHWFwtI2ihIhCizFDLqAaim3RSiPjrsTl1StC9gxxoNFBKI0qkrLG54BKBVeqCUMROlA0DTmv0bmyHGzk4uyKX/xv/y5/95d+jbc/+zb//t/69/jc25/j8sUrgg8yVKnZXyl4dtuR3/jG77CxSy53I+PVC3QSTWz0nqIGhtFj8ihDqqqh9OOOXCJaFRaLBZTCRx98SNs0ZH3O8/MNuhRKCXWQOdJYTYy1OERJblLMrFdrfEw0sxmua7CHC4IPaBLauD3roShVKeZU1KdAUfuGbaL33yyPbh43tcE3B8ICVCvJK8tQsCQ0PmuhZ2fF4EeOF3OMNRAFKUlJGpO27zBNj+2W2DIyJoXfDhQFrnH7BsBqzWLW44eBEgOHXcv89FRcG4uwJ0Y/CuLSaLIyhDFStmsWBhi3DCXQtQZrlBgwWM181jOERBgHFAmrNK7vcQcHzBa3iPGCkhXzxRLdGMY8koIg+rO+FU2eE1q4yr4WtiIREXOrIEYexkkGXkUpjLV0vYG2Z+Ml/Wi+WNAYiSiw3RyrC2W4IvlAcUJ9DQmcNiRl8VFJdlxnaIpBjxlUxBjLYjmneIfWhaQSl7srMpGLyxWvzq9Ec28cSWuMayho3rpzC8Vt2aOsrKGSGyoeqqoUGq3RORN2A9mPpHEgjGLK1mhpnnRltwjrp5Hr0ci+7KwlRL8/zXKGcfRiFGVBFUXGokxDDLsKAsn5Kfl2Nxo2EF0beY/EliI2/33XizNZ9TIIwct10WgxmVOTLv96IO/HkdnRnNPjW1itef74MR9897u8Oj9n5z0hRNBVPqPErTcUyYQVoKBAzrVJqxnCXLsrUtFLlMJ2DjubUaxBVRmTQoMuiIF4wTYO1TuWJ6fcO7lFtzzk9t3X2G62nD97zm/+/rd5+fAxV2dnDBjG+hhGW9HQFmmMshLfzGQ0xdQhf4GsNEULyqirw1Ipmaw0Q8xcbbekGMSBs15nmcqMqkPESo6S5k1Vs8D65qYkyKgxlkRhDEGac6NRpmCM6A+tknxLpY3Qe1NGIYN0VRR729Q61M5Zk9L3X6em4wdxj1TAfwn8cSnlP75x0y8Dfx34j+rHX7rx/b+llPrvEAOSyx9EzzbB0zfHYFOjstfh/BMNmHycCukJwdk3fTc+n36P2RfN19q4AnttmFLqY3b5+2dUmzumIrg2XjdRPIBcJFuiVFrbNKUv+4ZtKnQ/7hQ5IVulOIHSlTSFqTrV1FcsbYNSKGtFO6AFdRNr5Uo/q81FrJMcbQwmqUqPpO5fCtSkQXL0/Vzs4NkBMqFQymKtGKmUIvo4cqZrimwOriE4CERC8hgn9M4SDXFEDCWMr+LOii4q+7E3V2lV0SFxv5t4hynLNCOmgCdIMY6giX6MoD3t3LJYzGm6hu2wZbfZMAbhmZckTVF2C6BFjZnD2QluccRKF3Yxk0Oh+ESz7GlnC8AyjiNps5H3MWWUnVwUZbNKdar0+Mkj3n/wEWdn5zjXojMYCVETsa0WXdfNLCTZ1eTrVJ07dRE0rm01OnpUJcXJQEDvz1WU6MAQhke14v8eim69nyB0ggJbI8W61gq0uB2pMpmdXFOHZcBQaZNGFm8AU8+verbL+3XtCoS0szcu2IoW55TAXNNQcpZZla52y95Httstox9FJ2nFGn0IA9o2Qvmt0RklRrq253DZE1pDjp6YE4vlEfOZJYQthcguBGb9guglQiD6zBAjcZHBCRpgSl2gs9Bfry7XNK7h9PRUhgbKS6EUJOhTVYcoSiIHOf9bZ4la8/Tigl0pBK243Gw532xwhzPOdyt49CEKmM17QoLlfMbqcmTYbumdpuRE8BIuunrxigePn5LQGAfk6kxljcRh1DzKUrOlqBtnyjLYMfpa/2oqkm6MJqTEOHq6PnIyX+AqvVNV4fxuO5LTSjKMasBw0bqi6Q05lWolrdkMUOwM3RTWY4FmQdKeZ2eX8O592oeO3s2leYyZ8/MLfvkf/EO++IWv0JkT3Mzg9ZrZbMazq9/mf/u9b9J2HU3X0HQN1jmssWjtUGqBUU0dAOn9dFkZRddZbt8+Jp0nOhM5SFtMESQkA2MIPHn8mKuLc05PT7l1fIwqGdPMCdmQWGNMQiUrxWxMaH1FKRckP1J8w4sPH/P0vXd45/e/SX96h3UIXLz/kG1n+aPf/h0aZVEJGApH81vsLka+8evfgBIoZBl8KUPwmWIartYDd2+/wcmdN/nuex8wj4lw/oqwuiLnzDYmvFIk1YJumZnIq/t/zDisefbiPt/47d/EtpF5Y2iyZdYeoruGUQ+YXNCjIbieJnmMskQ3Zz73zFMn6NN8x9nDV9z/zn12Zxc8un+fD975Lq3SXF2uePLwkbjMjoEYAn4cyeOGTdI8OBso2mLGS5qw5vzZMwbdUVOH0DpjjeiXc4mkJCiQVnD79IRZ21FiQjkYdluSuWLetcxmDQe9xZSWsNsQwkiU7AqMdlglRkalgI+Ri6sVlETfWEou1TGx52zl65523ZSVwo2YncmtWU1A0fc9vr++bRpyyRqdlWEMhVQMKYHPGp8kkFkZRdh5RpIg5MpiXUtRDuNmtPqIPFp8WqO1Eh2p9xwcFGLfo0ph3G7Yra+wqnAwX8raqhXWOPrOklJk2K4lv2oYiSHQOYePEfQBZt7hlCGM0hA0XUfUiVjEHdC0HUklcap0jrbrib7QtB2JxLzvKbngrOHwYC56R+OYz+bkqBh2FopohdbrVR0SOSlWtakmKhHrtAwrbYsuhdY6jk9uM2s7jEIaWV0Yzp+zfvWE6DOg8TGhQ0SNkRh3pN2GuYmMaWSIa2LaYmjYXnl0TKLr6yyHbUfRhegD3g91AJTJZMLg0a7BpoDO8tpzrrEDRbT8JUX5nyMlglIJp6TGSQpCqSwuJSZcBej6jlk/Z9wOBC/nYGsNGkPIiVREApMQKYDTDoWtbJZQh9QSkC1In9QCKhWICa0KzooecPQB70dKzhjnqjOwJdb9SE16Og3BGoLVtMaIbkqLYYlTMlQ+e/yUhsLF46ecv3rFdhhR1YE4gTQX1UkxIo2MoVRZUIGSRSMvGCUKqct1ZUtlwDQW07ckpSUWKCZB4ktE4IiCV/DJL36Rn/mnfhadFZcvL3n80UPuv3ufZ8+esl6tiMGzywVPIiuLSl2t1aMEiluDwlGKRbuC7gvZOZLtyLZHsLUgAdkJSomkLHrIkFM1TDFoZ2RQWqRGUmWK2FK1uSoiDqq9a6kLjKD5UtdkxBXVKQ1EdAGnoTHikKkQRmDSiliBplQjuIrKUHQFM8y+9vrTjh8EaftZ4BeAP1RKfbN+7z9EmrX/Xin17wAfAv9Gve1XEbv/dxHL/3/7B3iMfcMFE0Xxmm61p9gVsTPXtbDPVaA/BQ0r4CaV66b+baJGTjTIm8fUrH2vNu3mz+0bx6o52nPoyxTSXZGZfSZH+djjTTlrOeeahVZNR7QYTRijawNZ6sU83XdCotjbqHMDWUSxp0fWaLX6X1CdCVrPOaNy2QssgT2Kaa0TKDxloWElg6pxBvLcK9VSPHwhZ1IYScWJ0LkkopJpZI5QfCZ5TS4B3SZy1vVvMTXDkymGmHE429G0HdY1FC2xBcI1Kti2w6aC1rE23BqlLTmBHwO2tcIB7xyL+YwcImE3st5suNolmGuc1dxqDpi5JWPRbHLEx0KDI++2xFnB9b1k37Qd292u5uckVFaVCiEXacyZ7cUFz5895ezqHKMt2ohrlK5BrqBxTUspiRzyjaanahwnmiJGogCswVpLowwp1Mwbrhs8pdgHXlPfX631x4LjpcCp56KuTUkSmqyEaabaDNahxPTvxrADpBlESSPdtE2NPpim1pPL6vV1uh+U5CzuVSrvkb4MxEkDoKbmVSgzIURCCIQoH7WWQPdZ6wgxsrva0PQ91lhK9BjlMCUxDFtWl+dsdjv88TFp2ZPyiNWW1rWoLM5oqtFY7cREIGdilOeolCCFpVI9YhI0+fLyghQDSoNGNGW73QBZEWMgF9F0rFaXFAZGldhqh3GOT37uszRj5mK3Bi3mBd/94H0Ws55eOxrX0TaWixQIfuT20QmzWc9stsA6x6vnr7habzk6uV2zfmRD19X5K6sbcSQK0YbUibFVhnG3ZRzHPTU2FSl4Y4q4iqIfLOcYbXBGGtCJbuODoDUFcdjLZIq2ZET8HyNsYyJuPDQLSlO42A50psFnwxAzviiST6icRBdYYBhGXp2d8/jRMzoVSDlxeGvGq7Di4vK7+wyiTJGJshaDA2NN3cRk+qiL5GSVDF3X87nPfZ7bJ6csDw54/OADLp9/hFKCTo45EEvmarthtd6ijaVte0pRNLrFaQN6Q8oDSs0YNg05Q0gXpHhOjonkW/K45fLsIb/2R89xboZxHR8+eojrLB8+KXWd1eSsmTUN/YNnbLZXaCXc+YIMp5RtGMZIUQ2271k/fsbf+8//aw66BSZJ7EECNsETqe6luTDXcPXgA7YrzeZq4A+/+S26uaHXLXmr6eyC7tDRHESODnpmdkFuR8bgSUHjzZbLVxts6LC2gXZLz4yf+upP84//4Nv8zFe/xo99/gtoremOjli4lu12y4sXL1hfrditVrIHW8vp6YmYHGw8zic+/5nPkLolyTakFMh+g9WaYTdydvaKcbsmJ48zmq5pCSGIWVQqNMaS4iAOuyMEZWg6R7uY03YtTdfhc2azE8qXTwFbmQeb1RpHwS7nOGO5desWp6envDjfMKS039M+tl/fWKP2soXvc9wcvN4cpGYKsVLQXUVs1rsR0zhyLCRlUY0ML4UBI24uxjZ4E0QmoAxZO7SeYXXHuFmzutwRU2A+X4hZgYLgA37nq4bJsLq8IqRA27UorWjblrYXg6UcPFED0aJKYd73pNkMZR0xesaYBYmxhoRhiB5dEvOuxSihK2rj6GdzvBZzkbZt6LqOrAqL+RxMw3xe1wzr0G5ODAN+3OL9jtGPdP2Mpp1RMKSs2W42hJjIReGcwzUNrdZYJQ2iOPIalO3QMdI1PZuiGbYjWRtc29G0M2zT0jnJLiv+XCawNqJcQmVP3CXaUkhhIO80xmmocSwHi4acPT4MNF2D63pU29Nbg5rcKpWqZhCgEnuNnoqZ6AesybjGoHVDzorsBeGW+lKGlvP5nMODIy7LxT7YWmIVJk2/ous6utkM6xqcbYihVM+EXEPur9leokOOhNGLyVXUsp46R06Z6MMeYIgxQjYSZK+v67fRGoJVRKMIGtCyVjnrcMFTxsBwvuICxfriEo3Cak2oNSlZQsrtVCfU2tJUvVXaDwapWcJT2VBlSUVVrRd4YJcSY8zkXGmlaqJXgp7PCV3H4xfnsB7ZPbvg8Xc+YHh1id5liodtyKSmg2IosVCIkJKAU0bMkbKOJBXQpgFj2Y1QdomoPUpbGgo6ZWzO9FnRUbAxodJ1Du8ePcuFXOUICqmjpve7qWZuU/3QWKFZ5lzq30khcc0FlQq6FHEdVYLelpLQCWwBW5QEr09VeJmG4LUu/lMYAdPxg7hH/u/8abwC+Pnv8/MF+Hf/vN/7Tz7Q9acfQ64q20GoTfUHp2auSAFz7cYDKOFcg3CqJySgVIhpss2fkCxTucelwtDTRSfPQ+4tWSRZQol1qbQO2ZwpGaO0uC0lmXoYjARZK3Fj1EahDbURm+xfpbiX9iphTcbaSpEswieWH5TsM1VUtfJXOG1wFRmcNEQpBrFmLjDp6Kw2tK5BF4GIbTW3KFpRlOgpmqat0xUoJWGsoTWdFLQ1f0TVLA+jZcHLWcSXJY/4QqVJaEpSxFDFvHVRyvFaG4YRF56cDKVYSrYo5WibGcrNicritGPR9AyxEJIHY7BK4SigvaCKpuA6i3Ey5StJ4ZoO18/JbabpItiey/WaHIRa13YzsnUMQQSgvW4p/ZLtZstuF1nePWIdryi66hJq3IRCHJwKEvwdvOfq/JLdeocpTi7UKE5Ok0smZMgy2dt308AU3l5KVV/UBomUSSVSTEPTzdHW41wnVAJbpz1KYQyEJCGaRVVElxaQ7K5c0cmQAqZoUpTsvRglq80WIXhOF5uCinzVBloZCnp/HqusKpwvtF15HVpcwGrO3jTXkFtEiSSuYZByoJBqLoxMPiPiZBWru56KYJUVN1LdSuZSHJi1DV03o+hC23WYtmUXEqUYrGrxmwte+edYdYdI4mq7Y7MO+DFUinRhGEdszowxolOm84m+N5j6GlPJKJ0YxxW73SV5WIHSEuicZEhimdzjEju/5dVlZrUz+BwZY8IWxVE7I8w0D1dnHDS3mC2OePrqiqsYWb52h0Ihhw1aK2bzOdo1mKaj7xecXV5xcbmi7TqU1jTOUiVGFdmf0H5dkVQ5H/Y/oytFxkdx+7QNZImQmN7l+WLBrdPbMnjI1yYlkynDNAgpGSIG62TVCTvPNmzZDFtBk7tA0yb8NkARp66ZscxoaFyDLob5vBdL81IoCMpom0Tbtww+slqtBSFMinEIWGfJuQBJ6C06o80NSEQZNBptGsKw4/mzZ6RY6BdHXGxGHr+4wKhE1za4VihMTdboAKuLFUOX6GYLQh5prcZY8KEQy5aSg+gTcqTQYaxYZRfjOOg6WUeNJSuF7RpiTmyHAZQYPcWY+Na773B6fMDd1+6ijExqXdvK790GNBajFQ/f+4Dl3LKc9wztDqMcTT/SzmZiWnTDAAsNV1GxiUK97bs5bduhskG1Co/CbwbCesOjB8+wpqHp53TW0DQd7fyA1knx0PdVd9yA6xoOZg2zzkoAsjZoZ5lpTdu3dF3LuNtx77U7XG02PH51Qd5JeK49OkKlnpyFpeBToWksqrU4mZCyXl2x222IwaObBqUlhL1rG1J1/2tToY2gchDEqjTY1tEoOOwsL+MONawEbW9amq6h7Tq0AAekmjlp257br53wnfffw6Qsg8F6uUw6brQmlojKWpz09HWR/L1D3Jta91KKDA+ysEOEPpWAQkyFYgy6aUm50Mw6Aj1bb8jKgjZE3THowM5HbJPJtqX4TMaA7vBBMrkW8wOatiVlTcwdY8pkM+e1T71JiAOX56+wTuHHgaEg1GGrsK5BNY2s0yiM1USdCQQZMJo5MQoVzwNDiISwJZbEwcGSjCNnGfilNJLTgKvMGucaMi1Kd+imk7gNZyEpVHZYGsaww6Dp+55usUSbVjLctpcyazSmGjYYyEEo4dsL7HyOUoIUGTLEiCta1udGM5sJZTz5nWRZKs2YLCk4smooJhBSojOajkJbCjoHLHVYpYqs1Vbxahw5W29ZdD3z+QFqdkhxPRZLYz3EglJC6XSNrdm3sso2rcV1DqUyjTMwOqlpcpamWWu0UWBhfrTEWoPfjQwbQaVjlsiArAyu62krjTImj0oeTZLYpX0tBYVMygpRKiiGmImpZT7T4jiPonVWGB9ZkG2tNLHSvrVxon9TwpigQCmSIZxyqSHVFmWs3D9GoYhWjX5i0qkpQpZcPlWL4IiVdsLUga3WBABthQmevaDRKeOSpYyJ1I+syZyVJRrHXIn20qLxqkAKPHj3HcrVmuNmTlmNKILkzOqC0dA2hl2K6JwEcLhxDafKHJMl2mKzIQ+Jq3hBioqYG4ptMLYwU4GZLljbYmkYksbbnqALRbXoACbW90E3lKLQjCg1klU1yYsjA1H07FiaZIhZCWulRAxZaJo+osgYp1CWvVwr5ULK0tQKDdOCEgdU+eOKMzh6kg796cf/K/fI/7+Oia13/dU1t3x6BdN8f1podU2cnzRoGmnq9IR2TaEzH3ugikrccJWMKQksXU+GyaF/b0pSP1fVeVELoRc1IWhTUZ+kkNJoyYhQYV94TUicqhfJPmMLql5JssdEqyJF1cQdziWhSkJPyEsq8lgpCSe7PjehHMsUR3ImVG0OwGpN4xSNdiIMLmKUYY0T3VxFUVIJhOwFCUpUSpsRBC1HpkDkgtA3Ra8nDkwoVx2eEjklTM30EHol7MpWbFEVkj+SMyVr2r7nYLGk6cUFc9Y0dCh0jmxDEf550JC2VcS9xfkZEU2vDF0/R2sHxQBWggqtZtku0LMtu90Oqy1RCX3AGUODYmYaYtezaxvGcccRmqPjW6yHNeerS1DiWCjZd1nQv5y5urhkdbWSaVmuDTJUpEmEuNK/JZwuhDQZSsg5eRP9LbkiyrlQUsRjcM7ROkfTSl7LhFBiNLk2Gs5aoUuKDY2I40MmlkRKgZQ8mqaic4hd8jTVu7EkTOj0dN0J81wExFRKr1V1MoniY6uJ2vcT19dwET1bztUaukwa1MSk80zI3wkl2Ux+5zFo5v2cWbcg5EzW0qw2tkU3ho3f4XPBZIWzPW0Ds24jWXtZkbBstolcDDHKZFyVgg8jaFOduWQSPhkClVKwTpNy4OmjBxwsZxzMGq62g0wLlSbHJEYxKRLiyBhGygC7ILSPPHqsNQzO0fUdyjhoHF9460cYxsLzF+d082OePn1ISju6fobqC9Z19P2S7Rh48vQ5OWeWR0d1WCA6hBtLISB04VKKNJx5Qvel4e+6jju370DJ7IZdFUzrSjfUkhmWkpwrk6PtZHJkZA0lR0DhjBGN4O6MzWaF9zuUEm2DbQ3Hxx0bk8RFLkV6q+i1w2RpEru+xTQNYcgEr0E3FAc4GHYDPnkmywhbi2GlDFCqZqBQokx+5XklGUSESNPPOb88x3YdZ1cX3L53j+fPHjBuVgyXVzRWfGZKzrQFsm1ICdIQRbhPkCiKknCdQeFROmOVQuOwylKMwtuWUBrGcctqXPP/MPdmTZJk2X3f767usWXW1jM9A1AkSMIAYpEEPYEymklv4keniWY0GgURyxAiBpjpnq6uJZeIcPe7HT2c6x5ZPQPwdcIsrbOrsjLTPa7fe875byUvpHal5sz7774h+Ej0EWs8y+WJZbAg73QPNI4lNxDDGAYkNyRlnr//DGnkMO5ZStWhwqXSLmecHwiDOt5hLYsUzq1gx8C8THx8/wE/7HVNbudSo7VCKalTzJ9YA9UPhwPv3r1jv99DXRjDyC/ev+c//N//gU8f3/Nf/+L/4W/+7M/4/T/8Y5a0dDTJMB52xOjZ7UfGuzvu3v2Y56cz33/4nvP5QsqQcqGZQhwUjXbe0srEx48fef/+PdfrVdfz6cTd3R0pZe7v79VlUYQyXbg+fEJKIgfL1Qqv7o/dNdnhWyHUrIVh9bi04K1gvKM5Q0IjFeIw8vbdK/Y7zzJ3XaPoYBDTJRNGh62tiwZu+Ze/rmFb64L1w3mP7bVGroXcjUhCGNUy3jiMtxQCnx4Tpe4xRo0LHi6ZqRgenp9JpUGDNCW8reQkIJ6UJ3JNHIejNkvjnnAYeJ5m/uHDBT8a3HjCDhHvJi7PD+SWGaPFBIeJQZEdY0jSwFY0082D0ZiPhkM84DzL3DClcHAOTOxnuJDLxLxc8DFinSXujvjxiFjVhBUjiPWkKXO5ZjzgrVImfVDdsVQDtdBawVqvodylMj8/sywJ6z3Xp0pNE8fTiWDU6l9a1eFTMwxhhDjwNF2Zzo9MAscQOexf48QzxqPmg5ULtrVOWRO8A2eaNm9WiDbQPLy+35MuVx6vZ+z9O6rzShOVXqtV6WHTjRA8h8NILhdlQLzaEXceGyxxLuS5UqaFPM+qeUdYcmIvjd1xz+l4oC6Fj+8/cb7O2LogCEuuXK4T7GBwDm8FbxrVCmHcUVOGolrQZoQiFWk6HI7ed4mAI5eiZ3HfL0WaasJwavhV1OdALHgsAdVAN0U7dLCNNnBzygw5v4gVUoqgA2UKdYM0KbJNDv1+jzXgUISwNl1TYRh1QJvU8EZSxYjDBLBNKd+P9ohzkb2DwWjsVCLjWkWmK/V6Zs4VSRlxmbC3RHEa+5JNd0FfQwa6Xq83uatW1eGxzSKpUdqCND1HSzPMJVNtppZGzJUSI0YcF/Tcnqsy9gZviMaxZEstgpXSaywH4rFSKU7I3uJEaepOTCeEqa6+5oUia11uSVawxuOD68ZiVl3CjdBQOnGTgkGd2ysgxuN+YIT4w9dvRdP2j71Ww4aNpdjhSP1UWI0a1hyX9VI1JBc1bmCFn/X1Uie0GTMYyz/mfdlWWuT2K3xJmWyt9dwPNeFoKyrIzd6zNfmy4IUvKubVQnadkKyhhRu9YyW5dg5srTdHRiMrkni/MWh6AAAgAElEQVSjY25Tw37vrHVgVQTZqsK5rZme4WaxrWq2UFo0OLNT/NZOWarmhNRa1ajCqVip0IMa+0HXBJxV8eUQInEY8MEjUsgts6QZehGPUWQRU3h+/oT3DVvvCccjwTl2UR2YLmmmlIVSFWVLOZNzxUdHLYbrNbEbNXtLgFar5lUFz/FwwHvbNyC08DRr89I0C8Ub0rwwzxdev33DcX9AWqXWirVs6KUAz8/PfPj4gXmatjdvXX/69avLqNnopGuzLi8GAes6vK2/TpfNSYW9zivNZVmIMehDXivNriY73ZznRVzDape+ZuGJaMZd6TlU/V/dqJBmRXAU813XOr0QWL/3ek2bYQm3pWxefI7czCRcp25SNHPpJeUYePFc9I/auttkJXrLw/m5F6IWFy2n0wkxghPwVThfE6UaajXMy5ov6InBcDqcuPaoRNVwqt7JWKWmzCmRc9b3AENeEt/+8hv2v/fPOYw7ni5zp7917Wp/z0otiihndTzT7MPWNWaGGJW+5L3j88NHzudHvBVKnnl8/IxI0pBjUWrosixcrxOXy4XhdAKzahy+HDT9puJyKzp16sNxv+fduzfkZebbb79lWjSrUGpVfK6pFsZgN1qt9Aab6jCublNVabAsV85Pj8zLhLEaMSGtdZOFrJq30gjBEaJFTNXtzFrGUQc1tmshxSjdy/SVFoPbrtV7rwM4e4t42dZX3wdpqvVNudOuRhhi5P379/zRH/0RP/2f/gU/++u/YioTuYkKy8NAGCOMwnWp5AZYx924A9OY04SLEZFCyWmjvteaqLkypUwWKDWxLDPGNMZhB3HQ4UP/nbt0XZ18a+1W/6t9syIZpekA63f/2T9jdxxxXpGvKkLoeVfGVWzKuBDxMRCt0FLhMO6IPjJPE65CqapVNkZNFKB1ZzvbA491rT49PfPp0yfu7++5v7/nRz/6EcuSeHh4YF4SKWWcV+tqZy3eKDWZWilNmRLee1wt7PcjP/3J10zTwuV64fPDI9dp7qZQ+kzM1zPffvsdz+dL11lG9vsjzkeOw45xd1BKV218/vzI+199g2lVMxlN5bvvIjF4TqcTKRVc6JpWqxbYaZ5pziKlMLx+xWHcUVOiNSFYpwwYVo3Nbf/btOStKSW8FkTapnVfh2Ivn6svBrpdjlDQ/cRY1Ynu90fG44GnecE4R4wDrVnioLlnrTq8GzkdrRonFG1uS8pM04xQKDXxfH4kDgHnFAHbH7+ifvsrLunM4Dw4y/lyIS8Tz09PSE28uj9yPI4YEXJOKg8RqBhcE0Jr5KRmWoIFZxFbqFJo3fQH22hkSkuksjClK0G0qTWdVofRSCFrVaeUcmWeE9Fpth/WseTCUgtVHKaqAYv3PZqpNq7XiYYQjGG+XskpMcbIcLzHhYCEgPGe0JHLuVSu52emp2dySTSvg+DqLZ7G0KN3aFX3XedVs2cFI5WdUVlHsIb7caRYw4epYvKCKTNW1GFZWtmKSj2LtGHZ7XaM48AQ1ZQNC7tdJHooLjJhqdMEreJ80HuLNkTWWfbHPYWGrYpWFWmkZVEji2FQcYBzhBgYdjuKU8qiauKUUSVdM7bb7djFQUOzU2aVyDR19MI733XZyojKuVCt6h7bqtfr9aN1VpHStc5o6jxemz4xarKhZ4nxHmOEUm5u1HEcCM4SnT5lOvT3+KhNG4snT9DSQi5NaeJZyHPicp25LIlcWieIaJB8E7okR2hOn0nvPdlkrDEEp1m/a2zEKheyDYq0zcdhq1v6h+m6b9EiFzFau84lI9bQnOoFlzTxGBLPkrnWhdwWUp0QiVjxHfgBkUgjdvDldv8a0v0muot7r/NX6UnKFecqzgv07E6xQV151hpY1iqqT8D/RxBbf/1WN23SN+Mu4Vj/9NY4vdDU8AK9ku5a99KFct3E189fZrFp7tbaxN0KNf0ZX27q689dX20tSK3BeAe563maFnRmPRTMi6JVpFsi235IrDov3YBbk47wQIfnELG3cGd7uyErcpBSAvtlU1pKIXVnoJYaOelCrwaqMZji8VkRPpxQpXQbWsHZsN5uRXi6OYuIGp1Yq9MBsR1tMkrd9MERnGcYBmKMWO+oNRPmK2v+jUhBUJ3QdW58937i8dMveTzueffqNW/u7tl3nUNujeuSEckY20glcb7MWHcgeM3sUZmdYRgixkLKGhS85FkfjgYildoyKS+IszRTsUEIo+H6nLhcHthfBxwQvSdXpfhl0eLg+XrlV+/f83y54ozZzBzWg/5Gs7mtUTXS0KneirbeXq1/XWe7NTUByTmpg5c0vLPc3d/hYgChH5aqSVvXo+0FMN5tdFn9ndTEonU63IuHalvbao6yPmcdxu/o7Kq7TDn3rLf1OVuRY8MWVvfigHC2h4P2ZvZlHpKxtg9e9KBRO30NsV2WhWWeSMvEh+++5c3r1zg5cn1+4vIps9TCYbfj7fFeN3MXGXd7rB8prXA6veJyOVPCwizQclYBNRqVgVVq6XWemJd5y61rTZivEy0XWipdYxj6s6gUF7o7WCkVsYloB9XWjn5D+mtr5JRZUmJJGR8M3kcu1wdyWbBGncSkZKrX5jzE2BsYv/HrrbVdWP7lDm5evLfrmgvjwDBE9uOgCFkvvGop+DAQvFpDOwyHYWAYB10rKBIXY9h0r8551ew9PXF+nAjGMB6P+KDhxbVWPj88klLC+0CzjVdv7tgdBnKZaaLZZNM8UeqaNRRZ82eWeaakuTvZGujPpTXqdtdEEQlBu7fWGQfS8yq918MuLaqdOpxekXLid3/vX/P//f0/kC8XmqilsgSHaYbUM4aygZoyzhSchWmppDbjHKy+y46uXwiBvR80sqNlLHcYow1d6L/DusdO16mjxlbpSdZu+VRNKjnn7Rn7yU++xsTA58dHVj2zNAHnEWtpRmg1UZYC1vDm/p5/9+f/FusjwWpzK7UpimQMpRVazUhr+Ob7WabGLWCY50Rrj0zTwsPDU8/iU33fqzdv+b3f+1e8fvMWFwa1bC+VljMlLdRSeDpfEHnqDBCvFPwaeff2NTlXnp6emL3jF7/4Bb/4+7/n/fuP1ALORlprPD6e+Ye//4b9fk8Iz1yvV6QVyqR0dCOFqSasFYaYsMbwfF30PHYOX2GISs1zxhOd0kxbTjQMqTXCOPLqdOTzw5nc1j23zxr7OV6rFoGK6rQNvH6Z1boOj1x3zqUvi9P9PcMwKNum09BKbRzu7tmd7in2wrA/cjq+QgqM+wPjTunB85w4nQ46NAuwGw58/v4DT+dHKDoIeHp6IqVEjAdyMYiJnK9nihSG/ZHduOfzx4+kSTPdjOgQ4PlyRaSxzHMfigRaDVhxRKlIThSpasNeodCtyF2EjsCkspBKotLAWYxzGpvRGi1lPbfFKLJvPC6OhGGPJZPzwnVawHtsHDHGad5YMxi86lqb3jPnHPSGzjRR9kJtFFep1hCPe21AQuTx8ZF0veKBYMBQEZmRoqZyvpaN8VERLrUgBo6nE8f9HjsnSpk1vLk07sKAGKtDujwzeC2qjenDR9Emao3dqLVxvl4x0XDaDVivdZczFofbBjoIxGEkDjucDX3vcAxj5sCIz46ny1mD2MeRIQal8GGITjW/WEMLnrYklUnUAj7gurZwtx8YrEeq6hmdd924qjCGoEOTlLchdm0N8FsNK7bXxP3sMtb269T/X5umQtsyxxBlbA3Bq7OtsxyPB+wY8c4w+g6SiKUSEBt0t4kjpiqalUvGGzAVypR5vs48zTNLlzZoNJSl9kEt1iidftF9fgVADAZvLGIdWaSb8fVAk6aNjurJZBMZbb1Bb2atNezjoPmqrdIWuNRCuV74+PF7fnEwnJ0lR0f1BtMqwRhMA2ccBUeVgSZe9eW9TtrqfmM7k0c25LJKj+USITeHbxbaOixvnZKrAzftVzr11BiscRgUlfunXr+1TdtaSOqZtyIX9te/Zm3YYPvvmkAv9jY5e2lA8tIZsn+nGwryA1RAZwsvIwhuqJY+e6ank3GbbkAXcOpUdf1WL7/Hy9ea2VBqxXcHHj1gOp2T9XCp2hS8mMa//N6NX0cvlmXR4qFINyKxVBGK3K7botkaSg293aP1v18gJUYT82x3EGpVi3fv1dVLahfU9yYPbmYn3vutIQBFxIyp2OjYDZbBFshnlnOmlmfcuKP6QMlqWTz0KVgpicenB6x1vHn3Fc5BSlecE4YxEIIl50pJk04LsdSQKa1oFpI1iK24aLCucbl+4tOnKx8fP2CCo+RFJ/yizelSM58fHnh+fu5FWDf9YG1g7Hbgq9OmojR2M5O5rdeXyNiv3eNauhMqzEvjcnE4Zznc3+Fj1GHEyzX7ct29QPI27ROKPNZaOi/+tla27UduA55V6wlokSWaWTbP8w/QMp0M9aeNVQXa1ilezzZbXVK3Z6r/zHVoMi/zZmUdYwBpzJdnWp7J88TnDx94XmaSNTRrsGK5ukQ1lnF3wniNO2ilUUvmerlyfjpTc6E6KLkSYi/EWtcDzhNLSkB3hTJq/3y9XFiWWZ/37sC5BmcqkN+paaV1ZEq1paU1rvOyBZbHIXI+n/n+++87Ve2EtMxwOOFEhw5NlEYcoqKTOWeG/VF1Cp3SuO0xP9izvPc45zTAd6fPQwye6CzOGQ6HPZfnZ+LOc3+643KdMLURnSFYcGZFesFKRUohi7DUyvl81sJpWZDSKEmYEHVkc5acEjTBRYNpiqYb05jniw5PxPB8vnI47HE+stvvCSHQRJjniTxdN/rZei1rA2rQIeS6vr4Yb3SUz9WG9SMfP3xgvz/y3/7mr/mzP/9z/tXv/z7/5frMdH7C0mA2+NCoYjDOEbx+/9waVSCOB1ywOKdT91ayNofrM2HMRtty3mG27ER9vpZlZp4n5mmh1so0zzw+PzHsD2DVLU71cKrxXPLMr777FTaOHI53WmhbsxVbFrX+FhFaLbRacFL5+quvqcbycL4gThHsZm7IuLUOFwLj0HPCujGRtbY3ubpuPn78iLTK09OZUhuPT8/8p//8n/nxT34XN4zsxn2n2Ss6K1VptoK6qF4uFzUOKoXLddJ4GKdhyjlnvv32O6ae22ccmFoR40ilYZbMNKumtnVn32F/JE0XUlpoS2JZdCB1nbIGTnuPd5bRO7WeHwcG7xQRxHTjIsdhGDl1swwoW7HX4RSQpuYykhWRNIB1vUa4DTdfDnC3Zw3D/nji7u7EGAeGMKiu0FoaDhMCu9MdWIeIGmQF7zYEOdnMS5t2Hzy73cg4DhTxGLPDmKbnVJn4/Pi3GDvw+quvQAyX85nRDzoMqgVvlMKaaiUar/KQfuaIag6oVanwIgVs00FVqVRrGcd7duMeMTsqmSKWigen+jUT9lgqgqc1izXdYMtAM5Zhd2B3OJKuT8yLNgrDMHI8nqjiScaRpnkbPmsINTjbGQRV4yTmaWYnilTgLXaItN6M0hoBo2uriWZZ7hypVKY5YTtrpApccyHVxG63J75+RdztWD48UNKESQXjtC4LTnXvBjU6EtOwtg8f+7Mex4H96cicZ5Z8YZpn4hIY3agW7tarp4DXJMQmulkZ65XB03St+aC0uWYanPU8GGJkGAbNX2uGVfduDBjnyMNIzYWWoFnBdwMX17PvbF9Xpg+GnVXdYc6JVLIapq3smL639gvb1iKdAulD0MEsfXi/shmMajiN0+w5awwheEz/HqajgFrT9NrB6GBErBp+4SLVzDRRKqfJhnxJPF4vTNJoVnNGW6uUKgTRGihEvd42Z4zrujvrMZKhI7atdmRtPRN7PpqacZsOdKhmttSK08JHFTNiCT5wjEfG+x0DkQ/LxF/917/gq5/+iOvjmceUqcb27FptqPr4UONYOspZO8qJs9SlboZgClhqjWucYI1a/ltvthw3MX1farU3eGWtxjpDYC2OvmTV/KbXb0XTthaPN7qg+eHfbqiC6VkJa0OwNm164K0uff1fys1B8mUB9LKR0w9umrYXP/tlc7cV2u3mjqcFci9W++f6/pmtmFy/tpZ6E5q/KNxtn3I73+22rSqLhE65Uzu7rRFoTWjNbJTNFa1bLXdrR8VaF5Ia67SoNmC8Tlms6XSPdfPAbK6BW1PaIfZW29aIgea6ne7uCEPsiKEeGk1EaYZpVhdKxZMJGDBtKySkf19rDTiF0mOE0zHyehw4joExOIwXmlWov5WZ2haMseSa1LTBFM7XT7QPmbu7ew6HAyKQ86ITlp3D+QPn80WhRRqlJKrRJqqahdAEw8wyP/Dtt9+w/NJy9/o1YefVAMQA1pBT5rlbHDun12C2Rr9P6L3faHemP4GtTxvXQHXpidbm9kVsTypCyQmkF+25Ml2vWKtBpuNuVL1XU1ciTJ84dlS3lPKiADbqRCWJlNK26a2vrXl6URqrU5rSTkSEh4cHPn/+rN+7/1y+eC5fNH79tf3s2ra1usZOrM/T+tyllJinmVor+8MdIQSm6UpJM84KeZkA4Xg6UbxDgmc3HmigERXHO5Yl0ZpO97UxtaR5ITgdDtRSaba/Z6Y7aZamGoH1ALCGkguPDw+kJel19b9LuVBLN+RBUXCa0JIiHHN/blIpHZHSBjnnBYCnJ0VV3rx5xZIadOxPN3/DsqjhTCpFtSHG4r15gdq2X2vWxnFUFHsYcMov63ugajmHIaquzKo4fVkSQ4yKaC6zZhc5R5FlW6spLZzPF56fn1iWhWCj7lnS+qEoTNer0rE6QnE47DjsBpxpGOu3pu1wPDDu96SsU2ZrHfOSKFnv2UplCSEQQ+iOcn7bH6xTIxhFSyq1I7256GFfaqXlRJondoc9Vgr/6//yJ7z/5uf8Yj5rcVgWQnB4q3oOYwTnoWTDnBKjG7A4Sq2atYTBWKcTTxE1VKqCNMip0LomNKWJlGbOl2emHmbufGCaE9988y3Hu3sOxyPGO4KLzGnq02vhr//bz9gfX/GHf/wn5JJx1uFid8k1GgFijDr+BmuYHy787Gc/w4aIHQbERmyI4F1n0rSuF9b9qXYUSUOudc0qimsVIe4ITWnw+HzhP/7H/8Th1X9ndzhyPJ447g/cH08cxoExRmKM1FaRtGzDnBACzqWe32mJMfL111/zb//t/840z1wuF6brlXmedS+wVp+PnHHeU5zvTWGmGbXKl9qY86IUNFF7dZML0TsIjlqyfs9lYTcOjONIjANxGJmvE9YoUqxbi9mGQt3oX5tgZGvatJldKd9m2zvXffSlS+s8XRVdbeBsYOccw7gnDCPDfsdr9P2qFXKqfZZlcN5QquH5fO6DTEs1hiYTw+ChDGrChahZU0ebl7zw8Pkzr999xT6OhDBwd3eHszBfzpTWeqi35kP5YVQ9tBhaBet1eIUthOiUDlYF60ZiOGHsjpIixVlKjT0QubGUACWwFIcRta0XY5DmqDTmJTOIo4lhmhemeQE0jy3GAetHovWcH59Qoyo193FOcybHccSaotTcJTNfZy3YrSUej9q05ErwgbvdnjKfERq70ZHzlZorJjdsj45ZciZLozjDbrcnOcv3z2dkytgCS64cjntyM8xVqEPAuEBzTpEQZ3FV0ZFU8qaR1Kw9Sy6VZckMu1FrI2Sr2VoTUsrMS+JYKz4GZQt0N0PrDF4s4y4ixvb1GnuAeNPw6z5UsMaqqYy1neYqWKumKsYqhb2KMC+VXBatO6wOFGUdiK61qe2N3VaPKjJMPzvojp7G3RwKS6s6gF8N+ZzrjJG0IdHTdME1i3cGnO86f4MY1eGZNd7IOCoWrKfVDHNherjw3cdPfJovLE3DqIvQ5Tlta8Ca6L5eRGirKsegLIb1uTam0+0Nxq21s8F2r4nWVHetxlWqka4lk66ZS/O4WKleGF4d+Mm71+yHyMdL4XyuPE5CNVF1Z7Xq0EkyzTVaR8bM2nD1pq4BqRZyb5iHcSQ6R2sJqEBVQy2rweRb/Sedqt00e1mQjkobBWpEcxv/qddvRdN2e5mbHsjYvhmtsCdgen5YFz/o4OzL3LXN8a+/kZhbsWhf3Iy1adLpmoBpdBAFabI1ZdHpLVqbv7pqxnoT2frhQOeyrtMORe3Wn22+KJo35MHepiPWepzVjbGJgOt6PrRY1PWrlL2VX6+IyQrNtj5BNKxW/YgheJ1cRucJxjKnRG3li85WTVtu16iNW7t9/y/QNosLvh/qjcHpJCyltInkkb4ZXhqhF7S1Nqz1QM+wk9Vgw2CdEINR5yDTiF4wHqrTh6PVRUX3krFkJJ3BaBRCzldyPnO97tUWeYiEoBOjJWdSabRsaEUwndLhRov1SiNxZL5+eyJNR779fKGz1Fgj1jQwVm3kod97YUNBtQldm29HzunmXrqiXn0ggNGNVPqwQYvBdUGulFjZUMpasgZnlqT0HlmtaO0tw+xFI1SrTpdcn7KbTo+UqjEN0sf0q02tiEBHeW/urOuy6O+lubl/2k4hcc5v9K5mWh8A2M12W5s3Lby9DyjaylYUrc1IiIGUb0MVg9BawVvLGAJYQ3SOn/7O7yAhMqdEuixY65TyaR3LMhE8HHY7nh/0/jlryYuG/voQuL9/xZTPLCnTmrqdlqJUiBUxb61rk3pUgukFYKlla2ZL1WmeugRBKx1VxRCqUxpxd0uLMTCMA7u9NliX61kRFavaliaNN6/eMaXM9XzRwZOBeZk1U6lP/Y3Rw3YcR06nE/v9XosgrwWI0gg7UtQL0GEYOB5OnCfVkJ7uTnz9469x3lFb6Y286j51wFNx3hCGoGh7W2mvBuM88zyDqGvoMI6M48h+v9t6+BD0PZYGwxDUjdSoKcHavNeq+8JtUOW+WGc5Z1JKOO+pmE3PRh/yGNBspdpY5onvvvuW0/2Jn//t3/B//J//jj/+N3/Aw4fveH561EZ0uvQCQ6fNPu5o3uGcNm6uU4+thZITpk/6c6fP1rJGtTSa6D1qTZ/d3W7P4XAABNMc3vrefCn65W3Qa7W6m6YlUVslt8pluhJj0JzGCrk3QNbCmlmZcmK5nPn53/2cw909X/30dzBBMLVivMN5r3rcBgWl86z3svXoltIDelsVUko6Ka5Ni6sKYhzTvDDnyoePnxnjyH4YGbznuN9xf3/HMMb+PqmJQc4aHJxL7c8LnE53DMOeeZ45n89cr2r+tBZlpRSGjga0qtmEH777FY+Pz4zRKbJM00Y3KMXUWM2hCkOPqGiVyzSxLAunKlynmd2+4sKw7S/bmU4vftGmP6dFKai14NwqtfjNSLbep3UAXCmXC8uSVK8aHhmGHcO4Z388cDgdGXY7rHd454lBKWw2Oozz7A93fPXuhDWWnDJO4OHBU+cH/P2RcRz59P1HHh4edG91ldF7pqVANQxhx5vXb9VYKicuT4/QVJNeamHOmRjV0n69jmg9Zcq0pIM+aUL0OzA7UnJclkwIQjzaziqAViNPZ8EvmSlljCvEQfChcbq7JwZLKo1lmqm5gNBzIw1pnim58Oq4o7nAh46cG6NSha3RwWijuzvi47C53EqZyaUxzTPOeJyxvL47kXzjXJ95Pj9ohqMb2A0ByaqjL8aSnSFRWYDcQCrEuEcESl4oRFrwzEvD+oFsAqWp419djdxMR6e7Hr/0Bjh4r0Pw2mhOo3LsigoFj1xhmWdy1uEQVWsaNVVSKcRutwPj8E4bGkTvmdg1kqpBE+J4wE8zZF2fYRjxcVRTkNpIeebp3AdElk7j1ULVOoepDR8VRWsv7j9Gz+qUM0vOYLQmG/p5h7U9q6wjh1WbCR3EaxNl+tDH4iDYzhxyqpHzsTtEN0qvE3T4ZBTkzUI6T3z/8JnHPCPdkK5ZUZMUWfMV9TpkZZuJ0Kw2adIbPLEdTdMdF+PU/6EZg6N1Scc63IwELEFUSlK9Rso4H0gpkx8+c/zqDcvlzIcLSLZUIpWAdSO50eMYtCmtUigtgeh6kVopFAxekUvv2B0OhBGyzMzLmbRcqVJxBjVq6etMBKRp7rHt12FoWKtf56zFIZi2onC/+fVb1rStyIXdGjCwN1hU9FBcp3grwlbXZkNkK7bWLnY1VIAvm7Ybj91uAkKM0lS232adWMiXNEHoBiXIhrJhTefabsOBF9/nRXZVh7hXQ4mt2ezXYaVbdYsG5/ZPt+u/fXSUq+eJtdZ6rlLpYcZ6T0rWg3q330Np1GXWxtN02iXqgoh7gSpyo7HdGuBbcd1KVdTT6UTOOT1wZ7dQu6ZmRZNyq4SiNril32+FoFR3ZYzazy7zQjEGEwPWGqJXLcpcK61ooKd1UKWQ5qVPqyzRRVKeWNKe3bgjeLVTxgilNnBRJ51R8F6UHpZBWiKnCzJ/xLeZN6cdH8+zFlpGOhyvHO28LFCbBmH3N2QFnqQjltbqVO18rp3Gqpt4azcI+ddpuV9qKxB144ROu+0BzGU1gVl1jxsCczMiebnGEKVZePHb+3mby/U1ub7J25P35d+/fAZun7/48zX2wvRBie33ZPvm+pMVGV7vwwvE+sWzWltlGAZOxwNOEhcM0QWmJXE5n/mJc4zHPflzYpmv3L1+x6XN1CbdLbaxXGfm65m8zIRx3FAdsUpZkiTkJWGM64MdvWDNkTNY3y2wWwXcjb6WMimplsKilIiVfirthlWqdlAHEc47nAvshwPDsGeaO/rsLdZ7WqnkXInDwDjuCEveDkuaUhjXPSHGyPF4ZL/f8/btW+7uFJWsrXGezuS8IFnvoVKShN1uYDzsCOPIMI4c707E3V6b7kWNEGxQzcZ5uvJ0uVBq04apCVU0mkH5ipbe8xPiSByiOmUadFpsPXQnLEX91UwpDo5h3Ol77rThdaa/3yJ9oFJUk+lXTZYjAsYFVi2YRWmB1ljGwZGLUBrktPDh/Xfc3R347ptf8ad//Cd8fP+ev/nL/xdrDKUkYgx9ECbsdyOpO5EZdC0s06K61bywTFd8z6EsJW+ojLWh77/qYrk3+0576fqYBmo6sg70vuLXs+EAACAASURBVGRnrGwMMYI6GBZq689Bcxi0WEqLNm8SI7ZmCo0wDvhxUIMAqwhtKw2KFmG6T3oGP/SiQDYETHWo+oox8vz8pAiJ9RpW/fjEeDgRhpElZc7Pk+rG6BlDQ1Da2H7P4XBkGHc6cMHg4oCUQst5G7r5oOYKWF3jL9Grlx+maYP++PiZ6zJjEYboMd53FNGAtajqWQu3EJQeOV/OfPfxk6Ii18TznBnGveryTNqoRSuC9vnzZ7xVelRJi8oAqhbWL5u1l+Yj60tzl4pO1EUoKTFNE8Y+4j56XFB2jAtamDvnGHYj+8OB/emolDjvu4OnPuv3x4FP+4H9eOCnX/8uo/uGecrEaNkfB9796B0/+9ufU3IipcJ8XbC2UXN3FfQBpJBTwjjPMDigDwu8lrRhiIi1uDgQxFKKZUoCOGXIhIGEGuyE4PHDEaxlLhXrAzFq0L06F6paKIbAcl2oOWl8UWfrtFJIy5VpumgGWkc/nHc4bxl3Az4MTFNiWqq67YknL5kQPHEIPF+vnOeJXRg5n888zVeMzF3n2rVnnT2UUkXcgETIUnr4s8MSiN5jU+WyaO6nhEzxhsVqnMzShKfLhNSFljI+F4LzbCiO0QEVVvctZ31nDFXU889q3JJXd7/Sh0zeGZy0zSL/Ok2UogOFEEeciyAasm1sgL5PWPr03Qs2jthc8NHjhx3VqtOlEShiqEXNLkxQA6x1KH3TousQzIb1DGuI6wPnnFQnLw1vG2MpxBY6RdtjnNbXtehwcq1nNz269dQipAYt2y2/1Xmt/3SHbmp00mty7wNRDOl84cPTA805mjebF4Ozq7OxAiZiDM06ijFkoFiDBI0TcDUgLeu+vTJQthq/Z+NKp5iL4OPAYBx747AeslP9Y6vgjaHmxOfvvsFET20H8q8Wzk9nlqWxNEMWjUPxFMRkbfj6mje9tFFqplHKuvfaEBZY0sI8LaSsxjKhGwE524O0tRfcaiHl4AvWGQZvGYbAbhc2Q65/7PVb1rT94GVWzzuzLdKVXod09K21jRpiN/eOW8Dh1gC+XIhya0LWz5VO+CWypGjHzfRhRR7ErE0hW3G/IR7StsVvNtTw9qHfjC+K99oapVRSLgQVOGxNjyLpWgwoWLcphdZLBbpdcW8ArFXxLKIOkStEXlrWPC8RFR+jFu9G6I5RN71dq9xciORWcCO9ERYtXESU6mSdw02zcniNaoVcb7A3CkrTQGmNRCgIet3X65VnY7gLalUs7cb7RrQYldqoVGpN6n4lFYPt2jwt6iyWYpVLH7wiNcYpX3sYdcKHdUCllYXaZlyaCJLYBYPxhmbadlipecSimq5acWLZcn9X9PeL5Wp6wZR7ttutMNjeTpEvWIbrxqsNrBaHOilTul3OmZIVRcGtCIiuAts3wJs+rtN9asWJdNT19m9+/fl60V/1P1jR1RtKvD53t2t++Xw06TmGTRV3tjsBvHR5bf0aX16z/p56XSEEhmEkxMg47qA0JFfSkrAYdfirhegcaZlVl9jpCtTCNF8ZB8thHJjGCAi5KJrcUOShpUJJuVMd+mAINXZYcuJ8vZBq7flzFdeauly1uunMfFAtiXFaRKesdEpnXN+ltOCvpeLsQK3C85Oiwj7ooIbaUbtSuU4zqdSt2MdYdWbsh5NzTouoTh/UYveR1hrzPDPNV0Vvm3pySVUUTdAD5PWbN7x69xZxhp//8huV+fR5Za2Zab7y9PTIPE9do+S06RCD6jahpIrgcEGdD4chYK1DakHdKId1lWvT5oLuULZyPB71955umj/r9B7afsCvAzgd/ETiELHOK+ps0GDS1m7U9/4A1pz5/OkjT09f8Rd/8Zf8+3//f/EHf/Bv+OUvvuHzp4+KlOTaz4nGdJ2xLpCy6q2C9Tw9fGbY7fBBER8AHwO7cWA/aNFaqtKhUsq0jjStqGxrqNbFdHdWYzHmRmlVvVTRQYBdf/es2YfG4Gx4QaFvmrlHgZqJ+5F//Ud/CNZzWdRkqrabLb2awxSmUkiSNkR2GIYvzptSykZDMtgvCtImMC8ZjIaag+rFpFaWUpDrBJ8f8Z3K6kJkt9tzujt1fYoyB4xT9sXAqOeA96SUetO20rf7Pt4ab969ZZ7O/OqbvyfGgf1hwKyDstawXdddU8FII7pCeH1P2B8p14mKVTrxvHD35ivG3R6eLmBqH0bpzrvMMxlI1uowxRilXHE7f1+ewy/pyLbvg87oOWCMnpVrw2JaoaZMTUI2ShF7fnpSYwVn1cThsFdkylp2w44YgjY5lyvzlHj8pCHmx9Me5xrD4Bh2joeHKwe5w1nD54+fmKeJMQ7deEY3bWcdu3GHGEduRSnEtRLFULNqH+PhwBAtmcQYd7y+u8P5yDVV8q50IyI965TC6rrLbu31gJpteet4XmY+f/qIbQtSJhqJivD8+MA8ZyyWZZlwzREHjT0a9jtO968ZpsKwNJo4rNWiNE0z3u+UTTGOWHFK6Z9nkCu7vbImpmnCtcIQT1gfqcYh0WJtxIuaueUpYaaEvS6QlJWSS6bFqLr0HlnSSsU2dbe2K8tCOjLrXWdEPCnaFkecE1rRhkeao1XpjBfda2tO1GAQUdaGdbpnSKuUVHBO8H5lT61ghKE0MKbimsEYh3UD1mdsDODVPKWKmmG4YAhjps2rlrSfpF3jVWqP0zFgQ3fslbat7S/q1LXpEdX2YhU1W3NokdXAQ41p1vrXOtcjJAo08KHdtIpCp/opMuWiJxrwBFgyz9czUgdqTYgUjLeE4IhWHapFTKc8WvBqqGZEqcOmOkxVwyonq3EYvXlDDeFWedCKmFpLEMPYGiKFaiq5NMpUwHgEocxXPInf+fHXPC+W/MlwvjYqjkLE0ofgkhEJSLu5H+u9ls2DItXCtHRqeNaG3drQg0bUtMViyVX9Bah6T1uTzXUSHNZB9IZ9UE+Gf+r1W9W0rYX6rUhcm5wb3Wx1nbPbpPsFNeIlYrEWlS825R/SI1c92/pa6WxffH1HBNZmr7JCwf1peFmBG7aQ401zB2xGKmuR0n8W27TEfIEm9nNHp4LSUY0qOmVFHY3WCe764182pivlS3FKux3epRY9UJQPp01qd9pZr8t2HUItt+uw2O4CFvR3q1U3V6cPibGO6zQxp0WdJfsDtB6ItVZKWU00bG9C0WwdGrUqKiZKlFGKWkdKW9P7p0X9nloh1UUnvdI6KqGcfoMneOWh+174ieu+QlIpaSE3wfqGNQveZGxN2JrUdtibnm1yazHSsrBMM63qTG+jIv2G+369XoEVNVVes6zBQb3h0SnvrXnTRq1vlKYX1dI/7/zy0jVYlrVl79N88+X6s9ZuGR+tF4p+tUn+J17tB9/qpVXObfosv4bMffE/L/6tWqC3H3yR2ZDxVfy/uusNcVDtTc74YcBMM9NlwnvXueJKFRljYAiep8cHjDiWaeZyfmQcPePgmaJjHCMpqUBb19L6HKnRQilJaT56FzHGcF1mtXtvDUx3V+xGO65P0TeNjqHbZlsGB8Z4HX0ImlfETE4LzkXm+co8XzmeTrjgkdqf7z7ImOdF3/tVJ8Ct+R7Hkd1O0Y21AM45b8iADpka1tCL4UJJi0YI7EZCDBjn1HXOeVLVZykOEe8dy/mZKVXEefy4Q6RRaNtAyjqnznO14OKIx/bGbp1YacGB+O3AV71xp7dZtULHQHk+Y3oXJkZ1ZopI9X24SqcgJtWu9euyHfVpRYt/a51SfVAziFKFX3zzHfdvfswvf/WRH/30n3O8f8P7D58oxWCaRgwMww6k4oxwvDvovUBRkpQKMUa++upHWKNBxqbOWKP3Vxvc2vMvUTqxYq04F3TMt1Lm+/PSVmqRd9szXVuhkaltQYON6e+d5tQF5/S574M5HyNvf/QVS22c33+gdWpyKbkP7GSj1NRce3MmWyM1DBHn/KYZPBwOtJJo5V+wOx7ZHe9ZWuPh+bmLNXVfahqMxApFiwjLNPN8vtJEbbl3+wMhqMvdOA7qNodaiwejaIKxX0bTuJIpRZ1c92bHm6/e8unTdxwOI3d3x9uAqhR9BntRGL3HGWjWEcbA29MdOSs9cMmF87wQum7IVHW8s6JFFb1Z1WGh2c5y+XJj+uK17XetYX1nokjtZ7ggVfObrNGcM42AMJQsqu1x/Xu0xvPnZ1K3uX9qnzHG4IMn18rj4yMlQauJzw+Jy/Ujf/fLv+mmBwceHz8RneN6vjBfrvzo3Vt2Y+Tp+UHphN7zL//l7/Pm7Tsu88ScF67PF2SpPH16YEmZUgzn5UIzhmEPJlwR03BeHS6d90jPv4re4oKhdiq+osnqwBeDx1tHzQXTMq1mrAMxjbLMLHPutYRS1oUR4wzDbiTud/jRczCRJoFWwOWFy+WBeZ6ZlwVrbN8LlYZorOV0t8O5xuUhcX1OlJYoBFpUSppzYNHBUXq+YC4Th9rwVvWrS0uU6pmbpS0TRhp3+x2+ebI0cl1RG9nW7tqcXC4Xjsc91jtSrUhN0BwlCTUnpBWcHboGtuFouM4aGsYdTRzznMi5obMgAzjN6RKhYbXxk4a3XlkPQbWNDUtldaXWGfNut6c1jVXxPmp2JmsDpsu1dgqy1oT9zDeaN+i9p1XZ9GitAx0NdYuUlS1k1N17Nfxr6/3p+7VIpRhF0G2tnUYIVjQH0dr+M1rFi6GkhU+PnwnmHl8TxjTiEBh3A1F6CHV3GufO4OOADXHTeOfuRpmrIm1rvS5V9zg1a2FbM8ZasrHYUglpIeeqTpK9Vm7VYKzgpdGWZ66Pv2AXdrx7e8Dv3lGfA/li9EwvWh87c8JIBFH9urTaI27cVmOrj8RqHKj0WzGWXgJug9ia1eRJSv+vNHVsbwLNYUXf/c1x/h95/XY0bb1QWe3vpWcewFpAKu1Imu2N2ko50X++NWyr3++LP3uJCqx//rJ5+wJF+g1/1prg1opWbvLPmzEJqutZF87Lyd1KD1l/vrn9u/XnGHQSF0LYCveVNrkWNdZ1q3SBWnTas8LYpjc3pWhuTs5KD8idHmO9TsZTNwKIMXZaI9AqranoVDYaqU7y1mmNd47oI/ev7tnv9izTxHS56OHqLLnrgHKtVBF2u10XvgotFxVvZlg9zxRmdv3QQ21OpTdeooVJLjo5TMD1ulBqw8eR4+G+O2Ql5mVimQtNHCVXdegbGvd3B8ZhxFnV/Ezt0nUeSnXRNaI8YmcgzzNmuuJ2J6L3fRLb9WaCOhH2MMouW9HJEOtSW9HftUgpsGWzvej+XjR568u+WC/SkbGmsWO43siXpllrrVaM9Z2OaHuDZVbYT58Uq8VeHOLNafX2i/76Y/eb//jXv062K71dCLde7QWA3JGI1hHedaTyZZn0pTlPI+fEPKnpCjXx6eMnnh4euTvdYYwhLcsmUD4d9zx8/MTl6crT0xkXDKfTPZfpWTVhOXfMS3qotrwIl2Y7/FQ/Y27OgVZ1Ab4Axm+OfFJFdbRGm7x1lqTPuleKCVaXtzXMM0qhMRZvPYOPipQa2wNhu51wRx+gW/53ZGuIA4dxZD/u8CFsGl8dQqjBjBgU9aXRaqZmLaRqt9p/+/Ydp/tXm2GCtYEw7nXDNJrvdJkm5pT7mu3IEKYfzlBywaC6AYsK7V13qDSt0pzaE+uBb7dGpokSZkIIisJ7r+ixXQdv+rFeb6nr3qkmLU3WfVenkJoN7LBiO5PC4EPEh4HcDCkVrkviL/7yr/jTP/0T/uh//t94ukx8+vA9UisxeIYYOOwGWrqSsmomc3cZC95xeT4rKu0cl8szJk94MsY4WtOMqpIbTbTophtfNNFCvjY9hG0/I9YCY16uzMu8TcNzmUjpCtiNat1qw/uhD9YSWId38Hg+c/67/451EY2R1ZB0a/0W7ZFb2VzZnFHka0lZG/2sxdVKrd2NA6/v7jnsAnF/pBh1QHzz9i3TnDg/PpGXpEhzLZu2WmTVwOg+nWqjXC7AFeuUTTEGpTvFIRJDuO0tnfqjtKlbLIFYy3F/YDfuOF/OGKMBx5tphXOIdWq44R3R6WDQBofxjsEHxsORzw8P1KquwtY7LSKVnqH3lo5uNNW5rhb0/1jT9kOapLNyc9xFnRBlPXObFqp6pa5HmxlWq/tmBO88g4tIFaQK3hk1CjF6XqdUcVYoRddKbolxd+Rw9MyzRlss10nXTi7E6BnHncZqlMz5fNbGyzhqNZRscAQwYTuDELi7P/Djn7zBSEVawMyOacqkNFFa0WgLK4SqmnPnLM42pBWk9iy/Th/XPVCt16Vx08NXvRazktdENtOM0teQcQHXpQ+tZVJLWBvYjTuKzLTjkeZVIx1jYFku1FI2t9xUoCCI1SmjxUE1ureXBmWmmQLB9gH/grGRmmekZaKzmvuWFf22Rq9t4y4ZQ4xjjwzqNERUOw/dnbvXbKFHpazfw3tHrZ0mOzhas6xsBesczgQdTItSSGtTjZuu676fmabTjq4Hrx0mi8NAa43p+tzXIN2Ea2WBWejeDq6jToquaTPqvaeKhsMbTKfxt60ZK6Wuygutq/vQ3zpL9F5pqbVRpGGtDojIC9IHmmqqokijd562qKFGWhLLU2U/RmJVOqux3VEU2+fAeo1mRSCcxYqaQjkniDOYajeqtbQ1102bNulMH12bdtt/a1Odoe3PpEGZYaCZfunyBAdL9JZxMPz0/iccfvoTHicLi2F5+J6H83fMxlMWwzJX5rngQqa6hCvdjVoEcU4/8IQI3mqcQpbaayPXfy86SNKnWqyD7bWG7N4D/4Mp+29H07aiXrltjQorOsX6OX1arsWxOFF41/b8MhHlixqj7jTSurdIXxToFNE5txXkpiMeanfRm7C18jR0rZrtNKmOE3WY9mbt3icO0mi1QFVKR5MVNdOE+VQapQmrsN4bi3Grn9wNiVvzuqzX5rK2trn8iOsNlTEUg1qEGxRu1+H3Nh0x1lItGnjitLnsVavyxVEd0hqUa/BoZoRFbEMzcJU6GbxljI7goXiLCZ4pLZSlC1fVoo0QI0OI3elObfan6UIrBWt1I7c9oBlpCD3Y0hpt3HLCEghe4eUlN87XTEqq25AWCC7olMuNOLewzAnEUKl8fvrEUjKn04nT6cTxdGLHK5Zl7g/x3ItwQ3CWVrQp9Kkw7IT9aLnUpMLXjkAu04QpjdA30pX0+AUCRdsaQ31Ib4XwirXSH1hsLw6MNgQ0RUOlKv0xxh05JapogGZtnYZmHK4XbJqV2Sg1Mww7zbXpIuRK7dk9RtfqSj3oa6vKDcldf/sXD+IPHkuzbYjr5G/9smLUmdF28SwdpWmoOYmYRskLdVkoJdNM7YQBg3eOVjIlzzjr8GQePnzDMATKsvD543cY45UuOYzU1N0bW0HqDMyk/EAMjbDfcS0XUqvMOTGhYa65FaxpalwjjYpa9yol0nR74X44NkMRz37c45esTogo3doPeriY3p2al/emu0Ld/r7SypXDbiR6sM0gSfPb0nzlzf0r7BpwW4BquDu8wtX/n7o3eZYkW877fn6GiMi8Q1V195vAAQIJ4IlYUGY0STSI4JIbGbf6f2USKeOGoiiChJEgwEfg4fVUw50yYziTFu4nMm+9BrR9TLPqqq66N29kxDl+3D///PsyRRw3NwemGJiCZ4yR6APR2wxFLqSSjKrTE6UNmppcFy+4GvjxT35GjCO1QkmVtCaONyMUleN+fnrm+fmZZVVVwGYzkHp/GyFERf5EKWXB6f730hApti/UdNcbAKKNey24vFOK8/2bO50DSzpvOAQHrdj8I3aIWXdChFazedyoeE2zQzlE/fxKu220dTUALxG8J+Vn3n/3X3n37uf8xS//nN/53d/jw+PP+Vf/1weWdaOkjfNL4zwEhKRsA0vCctUZiMl7nj89EcaRhmdJ2snyFE2SqPjQ8Iiyq52zYXpwvoDLpK1wXmbt5qfEtq7apV83Si5E35CW+Oabv8CJZ4gDN8dbjsc7vHV/xDUqhVyE8/OZX/ziv/LlV1/xt3/77+JaIQj4IFZQOZ2vLKKgooZ2pYQ7rwplIpyXhVwr67oSXp4Zi2f0whYjLTSm6cjNOPLl7S3rsvL49MzT8zPL0mfEvHWrdO7S+y5+VclFE8HtBK12ywHtxg3DwP39rYIuST3uHCpS8PD4gb/8s1+Q5wUPZjg9qkqdFAKBMB1xQfdALRulrCADwxC4OYwgjfVUkXLi9u6Gw5t7/JY0Qc2ZXUh7p4ayjwE0Cu0qrl0DZ6125T2b+RErXvvArsjeffC+G+WKNui796DT/eC6mFdr5pulrJVaKkKh1IVoXqbDdMfoPOIi3k3AxpIzW9My6dPzIy/LM64V1m0hl8x/+JN/p16oPuj3eU8Mg4KM6IhAKysHf8+P7t8ZoDhwX0ZOLyfmZaYrEeacqDVZtzVTUiAdMn6KPJ1mnuaZGiNbSfhwSyqbgdUKtIusCIlSoNZESkrnlm3j5VyYbj3RFxCvHeDjG3ytTDdf0HJhTg+8eSfUxePaxHGCsiyEQYhTpLnKWLQTSBVcsVzIB9bmKcyUtilA4CLj8Y4lrcRWiL4weg25zR1Y2hONTJBAa55KIGdHbQMpG+XbBRoe7w4go55dQ0HGBMuG9wPBT8rsEdHuGOaL6bRg16GdQM2oahC6RmpqBryICm+I05m3KogSZBTQNICg5EpugvhBn33Srn8xMSDn0fsqly5an4MVGRiHG52ncw3fvFo/ZfTziwqCpZooKIio9G2Hd1Fn70reP09pjVRWQg34qvOECpJGRCDgKFEo5sV5yAMhZWYpnAbPwXumMSBFO1K5CaEI+aznkBOHryrSpLdNkGZq2da8UduBC8MJFDQodTVhNFVsbD4ipTI6T/KZ1gpQkZSopxNueIP3haeHDzw8J86j4+arN3zxoxuOP7nn/DKwtpHx3TvmfCTMkVjAs1DnM6dPTzwtCxxvES/8+N0t//Dv/5gf38Bf/vIb/uQ//Rc+vryQlg0fVGyk1apsjebYijUtREg1kaoju7gLLf51r9+Mog2uNv+l63VdsKmsuleZ6BAheIoVLlHcpWIVzDftQifrKo3ARXZ//7mi81m9b9KT2dZ2GfXLL1skhuh1ZKPTvYoNX2uFr0o4zRCES3+w/2C9Nmfv29CBTLHF2YoKE/TvqLTdQb41R7AuQm2GjqAsl76Q1fFeaQDblqCAM3pNaUU9UmgqsCGOIm5PZnZXeXuvdV14enximkYQx3Q8kKp6sfVXq6rsJrCb0OYr5bWL9o+qX9WWjWYFpeiN90DwQgg6pJtN7WwYJ5wbyIVd9CQMIzGOxLBpx0IEmloCvP/4PR8fPnJze8fd/Tucd5Ssw83NCUE8Q3HkZeN0XhhKwufMYRzY0qbKSU5I2YaaTUykYVTCHUy4dJk6UiLyejaiixRcblR//FeCFk1nwi4NYLk00LgkE4rg9n9r+z3ts20NpQKlmnB1AGcdv94O60WHoN1aW/C1aqBz1z+US2G6e801U3RqzdZeNUKrvWdTc8kqRjophVrUt0VNwB1SL/YRNJ1xvL098vbNHSE4HjaVci9F7/UwTVrw5oK4SoyOOHi8bxzGkRY8J0P9XtbVxDM6EovSBnNmy5nzsuwUrN5R0qRMSKWxbnpodbGja+S9cYlLnb79qucoqvbpvcrui6h5tptn1vnF/n3Fh0grEKPj/v6Wu3df8nA+UYLncHuLl8bBUFxpetY7EcLoGYi6/rzXopxqAFEjV+XKl6p6tsMwKT1pGJRiOg48Pz/zVBPewRC0EPRBjZtrUZAAFOTJORtgE4jBYy0E67y0Szy0Lvket53gRWcUX16eyTXZc9ApbEV3u2qv0Xi5GLDr7dREuYjsVMNXSGSraqTeKn4cyPnM8XZienzD23dv+Pu//3v86Z/+Z37x8J/IRf3p6pwZRkPNbb8Mg6einbvSVpbzzHC8YTrcMs/P+gyDrYPamA4H23XOmAmOZXkmpbzP7JamwMwwqT/TbcFURSsS2Dun3uk9GoYJ76NRffS869PGOZnZO6ocl00lGJzNBvbOe7WkpeiMnbvQ43Mp5HlmOZ+R99/yPD9z+PItb/7O30VqIZ3OeDdwM90SQ+B4VOnzed7IOZk/W4LcyEUtTbwTUqdqSsFlTdxENLafzjPOCafziRi9UaCUlltLVWuY05myJsJo/pbWIXA4yJWaMiGM3B1vCb6RlhfytnB62kjLmdu7W4KHtC3E4xvu3r5T9VFTzAVMDl/pqmp7UrQT5Nqeb3TJ8Z3ZQj+DxTrhKK265R2s6+JRrxg8FitEbMZVoNS8x5DaLom6VKyTINaJFG5u7pmOtyxLohTzmxOj2DbBB1WOVmqYzQSnzTovFfENCNr5bcUAYFHbi5R4eXxWMDXoZwtSmKKCMzpPPajyqY9a/DeHn0aqCPdv7xmHwJdfvKNsK+usnpbrNrOmjbStSiereqqVKjgCpXpKcaSc8ampQu2g3REfvcUFeHr5xPsPD5TthcjK6DIlQ2uDecU5FXIwsJyyKpNhCPiAzsKlDSk6RjKFkeYn+/9N1XVN53sr2h/1TthyAhvN2JIJnUikUlgTTIcB5wbO5xPrmlWg5XCHzJm1OF7OmTUn8xX0uCakVc/geVZhDxe6DZB23nr3uuflIXj63Hetlfll1l6msav0kHb7HKyIJ4RB/604o+tZU6GPxvQ12TNgUXEQpKpojsBic9gKFmle4MREu6w7pfZG6GhKDNZpzITg1VJGdKAFvFGDNZ9MpVG86OhKguYyWyskJ4zeMYQBiVqYilf/xShKg/at4oMW47iGl6xzdc1TpKifmt2rPq97PXqEzRSXPm+LCoE0r50vbCZOSmUcNA+kZk5PT3yfz5RPf8HdXeDtOBHliJ/eUWJgenPEi2MaRqKLKh5SINTMT3/6I5aSeHx54Otvv+b98p4PH194en7ifF6RGPBDwPqHNlOoCpRQLF9H82WgXjEBoopc1QAAIABJREFUf+j1G1O0AZ8D/dYlu3TBgEtAlo6+GvLc2t4l09/sdytkeqK4y/H3H2lI+aufexWMr38XVCHp+mt0VqyY2MKrN7m8T7t0Z1xfYHI5HDThvRzofQPjZW9z74l7g05dvL4vrd8H+4xSbGi6CSUXcm54Kw4aSr/T9zKPEYEuSf/q3jide1vXRPCR6ThR90KlH3aX6+gvpb2lfd6j1nqhmlwVEP0XcpGeF3Hk2kgpawI6jsRxQkQ7jy2rjGsME/44mFqmHsRKU6jM88rz84k1FcZxwFF4c39LnEY9nB1sNnAvJgRwHG9YGzynFcSzzIvOBlQtHhs6I2nsjL1we4X4tNdS0mpO+cOvz+/Z/j5Xf93nqdjXqa4Bhxo0r+vKtkYdbi2V6pReUm3N55xt8PUznnQDRFHWy1+0y/O/6sRdC+hcX+v11+1rvPQZoGLGn5l1WbTLLQ5ncr0NHaL2Tr2qUius66YdZAfneaG9PHP75i23b+7xoXt31X0OzAWPHyJ+Gvjw/Kzv1WyG0WjUOWeenp6Y15mT0Xp7kdwaei3aUKeUwjAdVFJd5FWc6M/rOh7EIdrf6/5LOYPz3L/7khACy3lmq8JWKqXMvP+08dXbL/n57/2c//gf/5Sbu4lxcty4EX+YGI5HpMEhDEq/MerNLujTlC5bjDrQclIZ9ZyRlHT/eKPLxUiMAw31Vvr46SOPj488Pj7t+3HbkkmmgxOz8bhSow3uYtIsXEChvhgs7Oq1CEobvFovIXiWeVXfIardp8s9vbY1uN4L4jRu9eRUD2QtprHEJUaVzfdDJISBx4+fuH/zJV//6hv+53/8h/zu7/4e5MSvfvkrphDY5jOlNoaoQ+616YyjGPX15njDll+YT2cONxPH460pxRZKWYDKsq4qDd6Mqt5AWobm8WHk4AfkYB0VcTbQDjUrEtyVSkW6ZYfDu7Ab6V5Ee/R8a02LoJQyzcmOyurPbTav0Yvptt+vlC73eAceaDw+v3D+7mvG+cwXPhJu74jTDfe3b5Bl3gHEcVJBk+PxBhH4+PETHx8+sq7q7bQtG01giKPGFvv5pRnAMAScE+Z1YV6qJqDPMI0jhzipSERTOfLmGof7A9NBlUZ9RVUpMblsKodh4G56R80bz8+PnGeV8V82BSUDnsPxlhD1+zpI1Jqqy3a6ts6erNDBMuxs7mFv39v6n2Cm6jp3eC1opp26Dtzugkx2nvfOXAcI+pmrnWgNGM7EX7otSy6F0d4758oQI6UKeK+JpuiMZuvPtLX9evt+7BSyZPY7XVH04eFB54WDGhcPw43qAniIg1dPQx8YxtGUIyMiHmJAhsD9/RHHV7SUoKgqcs6JZVuNSrjBspLnE+u2kUomNyg5ssyVsgnJVZxkpAbcICBN/Wl95JCOxGnifH5kSRuLJJ7J6klXPUXUKLtog4gqlWEK3N5NDGMgbSvJj+RWoQXceMOcG6k5ttJ0rsiKFPWo1Tla7wK1eU7nmW+//2BnysC6zbz/9MJ5TYQhcDrPrMvG8XhEvKOFI1WEcxYoGSg4SXgCJVcr2IVCwDUtnJyI7nGnbC0VuSiUzaww0rbHWedMCKehwHjNtJKAcqEC7oUZ+6iMsxxV592KiTGtrNuiIKZXEFMXYQdi7Qh3qo45DAMiQf15q+YYKRcKsp/d3kda1fUmRpHuLLWtbLycX3AtchhvzQqh7nNorWGzwQYomQy3C54Aar2i/GMFlo2x5E0F+vos1liXLiq1rc+xq9KvCb/TmvmrtZ7HWyOoQRRlLkUveFsnNRXO25maN1KbSd9+g/zi3/PVj3/Cj37yW/zo3Y84hIl6SsxPJ15OL3x8euTT6RG/fWTIz2zVcV4buTltQhRhGqLVHwrgkRZolVrVF3RNldO8MZa/uSz7zSja+s2U1wWTiOsRdFfFada5ca0LchgVrelm7A+kx7SeClyrRV2jZHuC/VkhAuxBb/9FMw7KZei8qzG2WgnO08xkuSfvuzDIKyS557qXxPc6f+9IgYjKt2abFxMTteg8a2cSqliC3pUHQwiGujoTMVGpaLMlB0TpUyYx3mgQ6n7DxFzvAVUJayrosOVK6BvVONRaDF/EL1I/HMTQ9P2DNeBK6bAXfHQFOUPXa6U0pU2mXJiXSjiKddtG86mxeZfgCWEkRkVFVebacTwcOUy3PJ9eWFIin14YokPkjuAjpWYruBtbVv+jddsIh1uVLUb9k07nmZSzzUbIxdWeCyL7OeJ6vV4+L8o+f/V/v1Y4/bwT7IP6gfT7rXW0Kf2VwrYsbOughNe6S1sqfSmnKxU3HeKtiFF99L164nEpodnX+us5UGhVkzonV1979XX6vfVSLBYtGKWawuF1gS9K28y18O3HD3z/9MB5PpknnTCEETdMPJ6e8UPkeJjwAaPZKko354W2bFTvtCPRdF6helXe7PtiSxvOOQ6HA71L1kUSOhoZo0qc+xB3L8YuENMLl+v9+dmTBFTSt3R1sOY43r9FHl+o8giuqudSy6SabBak8vT8AHFQ2tNyphZY2kyftNDYYE/HBEhqayq40Pd+rbvCrPoXauftvKzaXdwSy/yyJ5vFhvBDCOaxpgdpzpktKQXT2wyb7uuqohdeaR3O6RqkNjoU0Pr1WdfkNL8wDQOnl6yHlLsoH+7d8hBegT77vnFh3xNa7CnLIgbPOAwMVizXWljWla0Jj58e+OLdB05L5o//3f/Lf//7f59f/tXXjNP3LPNMkca2JBBR42aamhW7xrLMhOHAYRpg3tRkXSA4Mfn1Fe9QBDyYt5gN9NNMYKUXT5jaoFOz3dYAb1RSzF5i/3x9v1gst/isbIS6KxD73cNMabslaYLSmtqvWM/i0tW57vR3BLoJxXly0CK3OKU9nh4feX564WY8cH93RxgCy7YR48C8nDSpPoz8KP5I/dda43Q+8fz8TKsQXCT7/vO0Q1DMPkdBIV0bKj+eWM/a7XZBQRfxjulwIAwD67JQks5C+7YgweEl4bnh9qjqsu++/JKHhydOy8qyFYbpSK5iohrdH7Xts2yCJsm+n/dlVNquZXROTDjJYqemG8YYiT2XeN1d6/H0r4vvn8d/7Qh0Bovf68O+H0rR3GedFwNmCz4MpCWjBaZ+Q4hqRVKKdhh2P0UwBd+LCqA0VVgFZSM92p9rAZFAs7nuEHV9DIMKywzjgZvDHdM0IeOIP4wcDhPBKfVVgsNJYJoGJnenapkihAxtU4bKmlXBttTG82lmWTO5alxczytpSVQytaHAklJoKOJYcmUMgSFGJDRoidY2alVKMBKQCmE4MB3ecJwOlCGzultW94JzAy6ObObTeRgG4hSJYTBw3xJ+lWeEvczRfNKFEVcb67awPi8q5CSe5ieW6ggu4kcVcdEOpdtBrpR13s57TxgcwUD4dg2Giv5Eb7oBNWtMVnNuVVLWwqnt1jDL8ws1b9CM4te0sClY/DWrEu2W2bx9KzY3ObOuM1CJY0CYNJfhYmvklfhta1fnSkEF60ptLCnTfXULhVyFVhqSlearlH/d7yHqCEKp3dO07OAVTagZ9ZD0kIyZsKXMsiycW2JOmbYmllrYfGOsGk9yu3gg9zj3mgX3WfOl5zewnzkFzRFqZ4WVBmTKOtPKpiBe01nJrmxZUmYrmbyeaXnmZoz4N28ZZGQtifPzA99+/TWnZaY4jXuHtz8itsDy8IKUxlIyak81EnzUdHlpSjEzJgkN1q0hsrFs/y34tO0dFv3f62Sxe1EZKGadC+Pu2nYI0pfcBfWqtSmN+BrBFaX9dAGIvWgzyebrhPvV5fVulm2+vcMiOjfUE+0+KEkvInuQt2RJ3CXRaVXFP64/a//s7UrNa78PsHfa6FdjyFwpBe8vc3dYMlDN42xXqxFFuDCqDaipN2Ysq4mkUjg6KqLdPEfDkbN2sOKoikRdXtr7uCusbdtGa41xVO8g74IWm/uQuDN00Ypsb+a3IYBgdgmoCtWVmlE32w1hIAyjipXYfQ4+EIYILrNtK+SsBsA3N5TzC3nTJDSnxLaseJNud1EDfGOjVPBFKZ9qiLoyz+oFhg3763rrn+P1LMSlk3VZP10p9PNXX2OXQqwnrb2L3AO8WKfF9sBVXOqIamqVnA+WYCgfPadEk6CIs3VyL7XzBYAwQPlVEX25yMvX73uyXSck/ToteRXZudiXok0LpxgiXtTYubZMCYYClsq8bTytMzixDlKDKng3k5twvLtn2haGKULT2T8dyp5ZtxUXRyQGztvKy3nWNW+CRM6bVK+IyfVrIBZEKbA4C+6Xw6CDLZ93f66Blx4/upw5ogd/srnVl/PC4ei5e/slEiIigXE88nf+1t/Ci+c//Ml/5DAdeXh8IUwT0+gJftBOlUNnCa4OoX4VncqYzTYg2JyQ9kvdReESTRTP5zPraj49JobRD7z+GVJKtg69gR6vP2PDaLuGV+gBqHTB0qp115xegeghf3c87jExJRU6cqKdMp139fuv0g8tLrFu72SgyHDfG1U5U2o4nxLrupovZcaNR3719Qf+wc/f8a//7/+H//Wf/3P+8H/6H/jflxO/+PP/bDTMxnld2TadwtBZW6Ulil+0WLR5umXZyFYshjhCU4EGnZ9vO4KsZEZvlDtDiFslbZZkdBrS3jnccbYd4HCuWrGtHUWhWaGr92zvttv5QTPFUYRiZ0MvloP3YHu/P0d9OQ6399wdJtzNET8dcMMIdUPwnM6qdKqqo47j8QbvA9u28vz8QIgDw6Bm1tN44DDdULJ2x+f1vKsme6+2KblWkw2vOxOlJTWZ7sqQpVVaAR+1kFxFRTqaPW+hkLeN55I4P1ti6BzPLydO88rh5pYxHlhSpXmVr9/jsNjYgcW2PT33XtX+OqrbtFPhPwNZMSqu2CylGOCoQg/1Kkb0M/vSSb4GfHrs7GBqz0GwPet9BHHqIVVUEfR8OpnwVCSEqGBXKwayOJbzTGuF1gzaqeh9s5/TweYet65BEee7+FKlSKUkLRIX53jxXgvxMKg9TvAc7m6YDofdlmWaDlZkR52lE1MddCNCYBonWogM40CtjXdffIV5WVOr7WPZyC0xzyvbqpY2OBgPA+fTM9t6prWNUje8aI1SslCro0oA8YhMtDbR2gHvYDyMhHBDw4MLDG88bhBSXjjPLzQT/tEOtUZV9T/T2b/7+3u2rIVO8x4/HXZgOIYIWEHmuiflJQfYi/poLBh3eQZKV7/YSu35pWguG5z6/JWi4m0iRpt1XnO0WtRsuWRq1a5e5SI73xBaFRxNqZI2MtGVDpXpciaVjWMbqe1Gi9SqYiyNpv7CvQNsKpqC0/NWGtNwsXVxDqLTwqx1iiKZGAeFjppSX4PRoUUagw9Ig3XeeH6e+fT4QpwCw2Fg2RKPzy98/OZbnvLKUjKSK1kaKQhjUdXGIm2/352R1mNoT1NqLQqINh0vImtxWW0kI9VNqcNBc5S0rjQ3IC0TPYyoabsTj1Qt2HKutFDwvnKYJr786it+8pOfcDfesQwn1peZj+Mn/BRJVFpbmO7v8MMBd1wNtJjxEcbDkZ/8+CcMbuKX//VXPH74hnVR8TqTO6CJ0oH/ptdvRtEGV0lg///O/TcDSSuC+Cx51UPOaQfMirWikZhGL/ZeF2HXARXgGgiB10XjKyolGCLS9v/vX38dJEvOZEsqtm0jl3z5ObBLlPYis6P/1/dCUUA9qMSp8lxOqtSos3XXlMnLN/cDI+dMzRnfYPCeiIqe0FEHEfY2eBFq3fYunH6mfthgB0HQwWtRWpTOKAQOhwPeR06nkyVRWrgBxNjV5S5dTv3dXboHogImIUZFkasmvs45hnFiLBUfI1tK1FoYQmU83uB8IG8b5/PCMNQr9U1HShveO+IwcORIGweGcD2jpPSFMEyE8UBOLzqZlbMJtcBm5r9NZKfaNLlaC/t6vayVWrtMyWeF/isQQi5fa+jR5VDt60xerT1nB4DFLSvgdXatpETabhmiaKfJCibxSkvq/mB7gtHpCn3tyXXRpvMy/bq1uLN1ZR2aqiDZboh8vWf06Kh7F+vyeS7XvH9W2OdPUy4cbm8YYiBXpSLlLfPp+cHWqzAOQTshtegAcy2GvH+Fi4Fv379nmVcydRcY4iqhpcieeIk4PKpU6m0f7oqWV4lP/1y/Ri21Z5k7XQndz0pNDIzTUela48Th5p7T6Ynb8cBv/dZvM58Xvv76A4fjPS/njZ++/RHTeMM03OCHYf/8pT8nu4fBOq4iKhjiXVcn609IJbdVcEcNjHVP6mxSsL1Riu4VXYO6jntXolNH/aBCQvrzRcWYSp/30+SyI8RiNNXS0A5Uq/z4xz/m8eETW1q1KHNiXfhqAJNXkYKrNX4d+ztlG3ROcC8UW+V8Ou17r88QK/lh5uu//Avubu443t3x7//tv+WP/ukf8fu//x2lbPyXP/tzRCy+lMRhCIQQ1SA3RF1XVUEwF0amODIvMyJiin0nA+MUuGmmmBZCP58UcCilqcJvLzoF83urRB/oTGdnynrVunMq5tSsuL0wD/oho90pM2CXpp07EUvclZUgtSJRu3hb3ux5XbpP91+842dffcncKl8/fKSmwjgecGVHaLTrlBIy9yI7cjwecd6TUmZbtQv3xdu3HA5Hcs4seeHldLIZjvO+Z5yp63WvzmYzlyLdWsURognfOFX0zPNGS5qcbmVlGgdVY/WmsBwipQnNReJ0i/iR2lV6tR1tHXTUcuJq/0pvcV1B8rt31dW56ZoVxq2Dp22fj+uJcjXES/OPK1TMwCvXvcCa0QCDFny5JAO3PD7orFF0A9u24URBrpw2Ss4cbiK1ZIIT1m3GSaZkvyf/1AvjZweMd0sZlVPfzy77O0EQ89WS3idvTY3Eq+73Uhs1Z2otnJ8fdnq+OO0wifPEcWI6aAF3mI6M45EYouYDQTi2SYHOKiagZdR47yniOcTIm7s7VYvlQpOuNbMsZ7a0UFJifTkxzzMv88KybpznlW49oSqJBXFNKeTDLctWCWHi7u0bxiny6fF7lm29AEdoIdRzp2qFeRyUFrqkhBRv8vuVaKJlzvz3eoe8s4z2/K3aZqfnBhrHWruMKnRxs4uKoVEh0U64qjvq2vROIDiSmD9Za9BFtOR1A0Gu8lwVxWuImDqdFXG0ggpxqMWIisB1lpq/NE68x/lIcEG7sl479OI1b3PGBFCmgc6Gl5zVbgPtFteGKjGHYE1Ald4vKbOuG1uDsQ5IDOTaWNbEsm7MeWUpCVcaRWAtjbTpPiw9T+m5hlxy572jXTKtK7k3wVVPySoEltKmQisOPI7WBmpuyjCrRe1QSlYBlaAsieg8NzeRw5d3vPvpO7766c/46c/+LtPNG5yMZFbwo3ZnpXB7GMl1Zq2VSQK3dwdcnDjmI+MUub+94ac//inH6Z4YDvxSKu+/z6zrWc9vPekp7b+Rok3PvHbVdbumnTVb/NVUjsykudPvOj2oNZpTEYv2Az+jv2+fuflc+v+aTvKa8nXJPhUl06CZ0oV+VkphMyPm08sL5/OZ87IotSiGV++zp1lXharbP7MF4tqorl6ERn7teq6Ktat7VYw66JwjBs/97ZGjwNjUWaJ5KM6Rm7BuhafHF0pVM2zlSvZggCVOesEdlXENmgTzAepo+a8jT+opdU0NlVfXqY9aE+gQImGINAqpFFLKeC0NNDFVbhWCsJXE9vxCHEZAn+U8z+ScjbOvyMzp/ELYAse7O4Z4IDpV89IN4fAxcnfzFXV54P3ySCrqJYQ00paZ55WUiwZerlQ9neUG0rstXWnMEpN2SfD1oND1+Lqj9v9PnezoLajtgnZ0FJEHLc5ca+RNAYJSTUGpKL3Lhwkwyuh+aF8BBHJJCC8/87I69x3UzHeP2svRvYOo96TuYIr3SvWhI4vWiWp9fcOlWOwFqdN1J0521VMauBBorbAsM/M6U6qi+t6Sklob0ehCOWUGU5prWWNByYVWtTuSUoZSrQsh7IIYpWAj/3uR11/9QO7PuD+vC4quncEuQ98pq3Yc73v47du3pOUFKTOn00ZOjRCP3Nx/wdt3XyHOU7JjmTOyQUNFD6rdd2/Fy5oSbt30WdLUN6ldH2C69mopSodcZs7nmXVd9f56LbheF6EKxtRa2da8F1i7xYEV8cWEHARHCGJxSw2Ea09grcM3jqOp7+m8gThVALsGqH4oxu50SFEqdo8nvQsfY9Cu1y6QVFiWBbdtrKAgXV35q2++43cPBz5+/x1/8Ze/4h//j/+I77//lu+/e8/Dh494A0eWNTEEzxCDXYfO8Cmd9oz4SPCOnDYWKjEOKiH+Kk6arQlCn2Pzooc+TYwiqQP9zkFLZ024ayM6TeRLVQ83sUSuF2KpZPMN1CJAbQWwYkForeyiV94G8LUoNyrelf1N7V/jnIq7ILgw2Dr2Oi/TxMQRdC1tOdNagnlmWVamaSJGRc+dCMt85uX5WRPZ6DgcJ+7f3PH8bGffyeabUrKCn727Jl6s+DFzYCAMAz4OzHVBqpr35pJNzU/3WEDnGafDLWESDocjPg6EqudZa71D7PYC7ArT0D9bcilWVIl4u/fKAhliT2BRkZ+dNnntgXrFrGnYjLNcndMXYMp7b4rfxoqpOrMszu/fM40TKWVi0OeHUYJj8DrfNgRa0YQ+p42dot6ugOQmuyUB6DxduypIL6yQaiBan4FvWvgYAOn0g6uYGkotldpM8CaRcwI827xwfnjCOY/zZg0yBIZB9+owqm3K8XjkeDgyDBPDMDJNE9U7Qgk2g2wiGThKa/jgOdzcMrUDNM/wxYHWFBSrVOZ1YcsLrSpdkLaR0sL5vPJ83mgeGoF1q+S6UqswjTcMYaR1ULNkE8gSmmm3CpqnHXwkFY3jCHgp6v3mva19va8pJ5waEICgs+TlInin4muNJga+SdFizWsnOaGgmHei+6Hq/Q1DsOISnBdycCQnCt43MWVmTOqhc740JjijOTYbnhRT63UGHOn5kGg16Jkn7BTJVvSMVismjREpJ803vN+ZFGLWNUrJF0IYqc6jM3RVWUmi+ZUy4vQcdoZvK2BeKU2ooudH1iCJa54g4NB75bwK0/W6oBmw3PbiWN9fpOclsltXSROCC0irZMlXe/Pyey2oxVPVrn6IjhoVqCIVmmu4APdfvOF3fv4HfPGTnzEOt9AGSvEQJpobwA8ImfF44DYeETKlOnJuBO8RCdweJ6Yh8OnjB96nD7w8nu35Oz2DmzV86mUs4a97/cYUbSKvC6fXlfTrzoaIyiM36RzuyzB27w7o4pJXQeuHirGdc7zXPxda1OfUHcCQ8EZOmeenJx4eHvb3fHl6uuo46SyU9x4fLkhSs27MJWm166InU3XvrpUucGLVwk4hbb0Tdino+n3qJry0xs000t7e0aaBUAqtJFIrJCCJo/lMnKN2QKqiRaqAqMjnBQVXxMX515TA3rUIQXaq3+d8Y30LLZRoju7T1GmSTlSmeBgGQt10lq0Uqm+k2kipMh48YRjoqkbLkpCiAiNTGNm2jVQ2CorKO68zWKlsLMuMkwm7OYqKeOXlD9PI7Zt3PD/ekdZnKMW8vlZezme2XLgKi3rYtR4EX69TXn3VVXDYAdjLfdPn7n5tbXbflWqIaLtej4YMlppJAuu2MbpBD4yqQbeWqvTIrKpztSa1D7ii9unPYleA/KFO9PVL95d2BL277M+ewOwUpH0fX+Y+uqJeN6PG1nlp1dRQy54UKU3ZwOPWGMIAg86mlZzY0so333xQ2VyjVtZ5wT0+gXPM51m7P70g6UABupeaFVmKrtrzqeDlUsAo1fCCXl8jetfPTP/dpL/RoWfnvInBqNpf9I71/Awl8fbNPdKO5NrITS06li1zPN7x/fsPmviEpB1mj9K1ncMZjbd3/vTzmQeVFbmqqKZUMJrO+j0+PrDOZ41X26LfK46Uis2w9X0q+3PaNqWNKxjjEKfPPPpIznk3Mt3vheiMZxO93nEcSbnw7ss3rOvCZrNgJSW1GLnqVl4XxP09r+OGN5qbFhsGUDhhGKLNFza2Tdd0biAtk5ujuoHl03f8wjn+3u/9Ln/8r/8Vv/e7/xv/4A/+gPffv+fp8YmSMsF5cloZTZSkq1Lq0Lsqo6VtIY4DPjpeTi9Gv/Ko9pWpALaKd/UqficN1ehMrPejrpOWTeyp0pp2M0utbEnp3BqadAY050TOjccn9Zdct42Hx0dqzTslzbtoZ0jv5hScKIiRUsI5r7YPV6JANEjLxnffvyc7UdouwrZutC3hm1CoFPUg0fd0auOQzme1DPABqnZ9O0gl3kEQxmnk7u6em2ni7nhku79X6uQ88/j4yLquF4ZLLUpP994kxJUedHNzR54T5/VZix2HmiBviRiqdXkqh5s7Gh5EixURtT3Qbr8WRZ3Sq3Yn7HPntXVwsasmWnz1rynQOv37et32fw/x0pVTAbDaJZBfJYbX67qDxHEY2NKqBvSiHo9DnIh+1fdCY+owBJuN1BxhGgdqSUBTK5AYlbpVTW3P+f36rnOoTlO+7DfNd5rFup6L7MJo/ReG6zX9jt4d1YK3x0+Nt6VkqoNchbQpAGJYFrVUpce7gA+RYRiJxyPTYeIwHTlMB47TkWk6MowDLcS9e++dQE3EOOJCUCAreg4ctXBFvfRaLeSSWHOmNS3YlmVjKyvLeSCnI8M46Xw17ABGbVBKYk1nluVFKb4Fuys919L4pRL4cgU+6ll7rW8geBONUpC7Fi1YmhVwfT4X8Wop0ITqDCKvSlnNa7ZnYaBiK/joqK37CxroYvmUoAVKq6b+2+yZe+h+mM6YALbylUXR05o9R2u7oInSCRPLsiHO6zyvd0CnpgqtjxaEhkcB1laagsXuYoehP/1yzxSX0tymNMjN5uxFTClF83pxKhDiPCrAs3eQL0B5zz8vuYqzBau0yGtw+NdeTajF7U0ecU3tZY5Hoh+oeaGw6v72hqcbAAAgAElEQVQIQpsm/HRDGG9wJSJJwC0oW62xppmweIY44p3O2K/nWUVgqDxsT6zTyHreOJ82Ti8blJWaN8srDKwWbw2Lv/71G1G0GVHu8v8/0IVQE8Mrfr90lLIqAvfqqztqyauk9DpxvX7w3Qke2P/+hyiVWtCIIWY6zHw6n3l6eKDkrPNc1ZzRP0vWdzpcD6jXn9WuZ/cwMlSje6z3GTjNb5WG1j9na5eDxtt8mAZrlQSmFryDYImCExuUV+1TdWQvBd/ASdgliJUV5fZg3RrUlq3g6smBu9Am0YKv1uuB+MumuX4+rQcQ6UmaonMuZ0VU9ucdaCQ7RPQE80EYxn5PixpADn3WJ7Gui1Irh0iIfk+GlItXmaIWi1sqakQbB8bDDed8Nn+/xrKtzOu6F000K9ZsTX2+Ol+tlc8Ktd6h68/nGhS4Xh/9zz3Q9G5pX+P2tHej3rxthKEA3hIzRbB6QpVTglj3LtI1QqXv9fp3/rrgRkeJmwEE9nz6nB1XA/BWQAp9+N7mOEV06NvQ/2YHREqJtCWyNLZNaUOlNJx4trLqYLb32u1YZp5fnhl8R/BUgrw8PVGBeVnVZL014hiZhlGNbFt5bRzf0WREvZP21anXWXLZ59tau1AGPy+6sYPHidKN1VS4sZxVier2OHI+nWh5JbimaoclkXIiToHv33/HMI04F5nCQQ1Hvc4tic0VNH8tCOQMCXUa0qs2dKooNUiA08szp+cnak7KPkDVErtATbCZOO2k6T28FN9GwQyezTpkXhxeLpYmWCfDu6hJlHUUxKtA0jhN/OxnP+PP/+xP+fTxIzUngr3vsiw7CNapkdfCGT12qbmyroNeeF9UwvIuuKD7Wrs0zeikUjNZRuaXZ8q6EI5H/uX/8S/4X/7JH/Lv//hP+PabD5xfXiCrn9OyrozBMca4U0p9gMM4ElyjtEyMI/d3t+bvphRUqaoiLE2gZJyYLyUKuDjRgltNxVX0oaaiHmvVkc1KZFnmvZjJOe2xvJTGuiWdyS2VDx8+EIKKa4cYmaYD43DQOZIYKMWSnKD01z5P06lcoEbu3/zqr/j0/jsOd/f89s9/nzhOlFTwEtS3smrZ5rzXM8CKoBC8mqoDuSTr9ui2HqYR1wIlZZ0Raeqz173SDuNE/CKwrhvrtoI0nj991G6sbbtcGojjeHOgrIV0zlSSxt2eYIuqZgYDHGpTKXlpDh8H7aDSixMw5Rj9HLrITFDKvepAXsffa6okVFOsveQMGBUyeGfdwWizQ9UKmLLvp2qoajH1u2DKcSlvSNE50lZVQa+WxuFw4OXlmXWeba5a54cCChT4IXI+LWQbPWhG99vjO6+BuX7WfP7Zms2B9zjeuxSvYltroFNSu11MAz0fgdYKXQhNaNBsDrPnJ/YDGkrzqzmrxPm2sS0r6ekR8Y7oPMGpIXMw4HacDgzTxHRzYBwj9zcjcYi4MOH8SBxviXFCJEJ1KoiBEHxGwoK4yO3tSMVRyob3PyOnGScNKdBMbTcnmyt0ws3NiMi95jiVi1p3k53KCF2ZsRdzymrYb1+Dki0WOwVhWsn794LS8PQBOTY7MLv0f63WITOBGEFzuJtxxL97q2edwXS1d/ot0ZeiD6aGih8GvBsMfILDOJGWDaThXCMn7Yhd52V7p81QAhFvE0nm2VbMKxZH8wrAd4uLZnRMjMnhqifXjW3LSBSG6I1xY7oJtk4byhowLTwr2sSkDzrwfQEfrkuCa4bcNajqva1Z2MGIH+bcAdg55gfr4ppHZrYct1UKhVIzmURF2EqBlIhFoHjTi0hQNpaXJ7b0zHKORC/UAvN5JaUV5yoxAKtnOW+8PK9sK9SWCMHm7asJn5RKSeWvuWZ9/UYUbY0rYQN7OJ0eVjtH1Tk6ktWqDQl72f2MkKuZH0u0+/PqD/a6UINfH9J9jbR9do2tm25e+PrRVP1yzmx2+KrcugZtH/yecOQrywGhC5TUXfXRLoxeiJVaEH8pZ1/RCvd7ZL86bcMuXZOzrN4a0SMUVc9yKtpSRWmOzgs+Kp9bctVt6fqwvSFFwF74m0hEa25XnuuzMXo/L8pa++F5hT5y9Tl6cab3xBCTXszuKL515QwNKrXzwXWzl5RxpdO1OgKoCd153jSW+BEnwmTDsNuWiRK0hmtRD4nDDc/P73GiSYlSMPKuAAc2p9swzrjbD6X9MOzrZw/s/SBk/zcR2Ysq9s9+6Sr0P79at1WFRVLOar5bFQFOKREkQ21q5Dvq1xL095Qy3lc+LzH79WCF+DWo0QPlD7/62rtQAV8J+rQG5fOirXchbDs6UVeJqyRB0OtdzmdyrXinNMeMmpUzqkVDrYX7+zukOp0VTRvStFuxbJvZPpjJeDO1TSol132f9LUoV4cB/Dpd8/NDodOAr6nVtRSqKJ0qBB3AD97x/PhALYng1Kfm5jAiruIDe1H69os3LMvG/f0t03QEhBgGjoej0qZcxAWvSZLDYs2A0AhovKulsKSVXPU+L/OZZT4zn0/UWizJNrq1OO2sONmfS7EZtf7oYxyIMWoh6i8UWBGVgW6Bq0q/C+NogdkanJeFH//kJ4zTuNPiaMWMwfMrz8vPi+BrxdScMzWVC6Xvaj3WHvc72BMCa9oucbAVkEpaV07Pz/z2f/f3+NWvfsnDx0f+6T/5I7795oN2/qJHGKlpZfTCcZp07iwr1dF54XgcKFW9+8IwUFpgXs60nAlOvQKHOBC8EKPOZiG9e9xIubKlSmtOZ/la34uXWZhxODIOR+t0bldFW+X25p2pdWoBEaJj2Vac0bRENElxzmlB6UzUxfV4Uwg7zUaBxpwSp5cX3BDxCDUXaioECcb4FEA7EKOPRk9M2p3wg8316GwPNBV2Wmd8G5nnmWWeEee4u70DRGnJVsTd393R5A4E1vmEqrJGmutAlNL8bm7vWF8Wnk+PBooYqo+JCDkVxLoWA2rbxt7aEZ239T2XsN+Dj9TQkOaQS4j9LDZqbpH7nKQ9rwtAxau1uxc8rercUS/YbA30blyPK506OY6qIrdtiZIr87Jwf3dkWWfWhxXvVCbcB4dHwZpxiCynC4uhlooPKlSi4B77fsViqg8BHyPV5vCLFSwp5ct8sbvsycvLuoNgVh76YStNfSJdjy0WE0ypWtCZTx/0z6U1StKcyYlSDkspEMSsSgqlbaQm+70R52nOGXDlOIyqnhqGA8N4w3RzzzgdOR5vubm5sbjrCcHhgsamKgbm1MaWNwWpRQvLvUNqc6kexzh4YrghhqC9xNoVwXWea5+n2oFb9cRsrRjzxvKT3FlT1eiXxYpZ67ZZ0QJwaApUXJcZgnrOOVuzrWjs1jnDjBrD9zzU8pGi61mqsEmmRkeQqPRAH3hz/5ab45FSNlJZCKMwjhNv3rxlCBNbyaRaSLUg1q1d1xWnAvyWm/dfl66zNz/U2q/dg2uOulZqTTobPmjnsV3Fbd0ulsuKxu1qXTSwfN41Y5v0evjX2TzXf97Fg1wfAdAUzeGsmPwhuqEgMhCCmtM3GqkWzucTTgIRoGWqdeF0RtAopA5iE/N7SwQKvm2keWUuwuJAjdarWhhY4yBlR14zLRVcVYZEbZcOOQ2y7c2/6fUbUbTtL6uoL2i22zth4qxwE8U72IFYCyACdPoWHdvo7WjAFrvbH/ilqttnbzr68BmlQfZNL+SWFb0r6pGVtu0VLcH17ooVZXuVJWiR05oq4LVKLY2EkLbEVlTOtauEdXoc6OUrHak342X/r1gQocGWFtZlZdt01i7lRKoFVzR5wEVw1uGozWa22h5ISm2kksAoGq0H9aqcajWC0p/cOxBd9vb61edwevKwP156YdVvfRdUkf0eNXp7XtHhYvNO4oVaVGmqD3cr8mvFfTVRgnaRY1ZvHc/5/EJyjsEPHMOg9yVveF/wR3BROweNRNoyaU20XDWB7qpvmELwvnraJfhYh/FCzeWCyHm3d0g/VxW7Luxqrfu8SLXv79V5qYW0aXdAnMPH3k63Wc6SaS3inXWhSiGOip73Nd7Rp8u912dx1Rjk+k+vnpkT9m2EiQOJPveLuEf/Du3o6ZByudCB++d1tpearunjYWRsjSVtlC3pvvUqHS6lkrfEd998y6f3H1Rm2YZ0z6dnYvTU2nh5Oe2qifH2zig7hW5t4JwzAOBCmXBOTHL+tShM8AOgCY5woYJdC8b05zYdDgwxEoNnCJFtbQRxrPPM+eWZnFZujkfCECitEMdo4SDw5u07hjixbSqnjDPz4mrd+Fp3s03nHWlNlKwB3ds1p7JSpbIuC48PnzifTkqv2xLjEInDwDyvKsjg1BZjHCeSFUWC06TcCSGqAmLJGR8cTpRqmUnE2ONtXwHW7UIBqmE80KiMw4H37z9xc3tP2lQOOeXCGIUYVcDAWxenyzDvktfu4vEmrd/31+eBE0ccB6J1A0stKoJSCjFMqPkNVAl8+PjAN1//FV9+9WP+3R//Cf/sn/0z/ugP/5B/+S/+T14ePzGfn4khUEtiWReGOKjYRk7kVHFFQLwK7li24URnp2pp0FR0RpzG0S1lEC1AUs7koqi1Fi9qFaBzt44YR5p1Pr33lFqZrIjWxDqpWa9038ZmXa2DrvPWqKaKJqUaPaqZyIWjtg0RlcDXjpYmQKUXgN5rxzIXK4wxynK1tV+Z59nEbwZy2tSDMHijMDdD/JX+1GNyoZDzyrasqOql7p1lmZVNESIILOeFNW9kURXMYRiUJjcccGGizgvrdmLNhVpUL69311NKNGam6ZbgHFvOSq/EKF9W/IUYjGIpexc3eIsfP5DD9S5UQVRZskcte4b9jLvMtV3io7NzEulxIuC8nkvDoCqE27pqt7hpHuDEU6tS119Oz5SykdPGvCyIeZiVkhGURjpNB7bjLTEObOtGLieQQCVQa3tFqOr5zGJdOewsylnnuYp9Pi2tOgCzI96XY0AxWr0XcjlDqs24eXGdjUafI4Jmv6P8N4SubNuNhSmmw2zUVtVPaXaeCJQO5lSez1r0Nnem+k/gA2EYGEe1/gheiEHn526Odxxv3jAMN/uc/BDUbD7XghehlkzaFtJ21rhnDIZaVU1W7Ip1TQjZaQe9z6o7lIpercMK7MVHqaL7qFZazdDqngu0Xd/cyrSaEWc6DG0noVCKgusCxlTR4nfvi/WcwRoE/a/1e5x1bBpr1jlVNa8eiNOAT55GZggjX315w3abya2ylkI20atcK2nVeb8uLNVMoK0UzGXCU5KynMS7vSuvgNeIR/AycHuI3IQG66bzdm5CZMOzYpmDetaZIqZyUwvSiqFwtpZbo+2Lsmf3P0BFbtikVLXhX8cFWbis6f5OWUzB1XmamtgRnbc5bXtv6wCWUnH79mi0XBhjUIaMgJdmypWOZoqX0jT/EssrWs3aSWx2oSh7LmcDglrbTdT/ptdvTNHWKNDnnfqmt2FuEaUSlFqVA1tsrgjRpLhpCGrUKyRfD5FeHPTCrVnw36mKzWg/7WpDFTuQRBDjLvekglbZysb5dOK799/y9PLAvJyhVhx6UAXnUf2rbna66cKQqgaG3kOxjsSW2NLGvMyk2rnIWtTRLsPOelCjKnhBecLOCUMMuKDoRLUAJE1wEsjNcc6NPBfKWZO5VCvnvLGkRSW0lVcH9Yq6J3oA9UDRGkhp+3XgKympP9s+A4civyoUk/eFd6FYavKuRV5XWbRWPEq1qC0hIZBrUjUr1CNFxc20XS2uUVOitWTXpodjpxD2zkjfyDlvpK3hPdzd3pHMj2McRvXvCZHheCB7ITTHMq+kl5mxis4MdDTJhoGd85rotLZTphBUTADlyxcTuOgJn+7HfhBoJ9C5PueodM0QIs7MrvUg9Bro7KApeVW55qQGpeenG+pYSWlmOlSG4S216uxAawXnL/QxMc2n2jTJ0k6MeVWVglQLoOJ72qYJs+sFZdMObetWGoHgR3SuRA8qEZ2D6WhcSdo5qFnpZJSKR3bPOSeNwesMlYjgqya+ToTRC0G8msvXRno+keVyYPU9nRbrnLWm84oiDN5pB25dAE2QmoiFANmT1AsVtXdutIh1PVzsM3iWQOS8F4DjODINkduDdtcc6vU0TAM308DDx/dITdzf35O2FR89uSmSGcIAqEEpbqbL4C9rQdwC9iw04bz8fAx5rrUrpQqtbeR14eHjR+Z52fdciJHSYE0JHwfdP6WBi6ypQhEg2uGm9OLgHds2qxpitU5DLw6MzZDWhArADHrImcrnGEeCC0zTkWVN+HjEhzOtvTAOjlaSFkS1gnhiCFZUowblrXdAsf2i1DC1KFDT724xMlpnYUsb66p0saUsSM7qXSaO3DLPOfH1t9/yW7/zO3z3/j3/5t/+G/7RP/wD/uI//xn/4cMHqI0iGtfXlBEfGOPIEAbtMJWkRZloYZu2bPRxxzIv5FrI6J51XGY09VZr9zEMA+uqYIvzOhTfmibPFRRAK2aKLrrWq2BeWFqkQqfsi5rT45UOCbgaoDlcUzr0NA34qIVnI5OrdvjCMFEW8wd0QmmZbT1xPN4qXS+O+OCZV31O0QklC8n8w8Iw6TkI4CNbUc8vVX8Dqd338GJ02zuBwYpqgON4w9BG3t6+4ad/+7dIbeNwODKGkS/vvmB0kfPpiRhA/Z90plgsK77uXPljxXkhpQ0XBrZc8DbLV0pW4R6b79WuXsCJ7j8tKK/VhC/qfvqyuNvKDgZ143Dx7kKJ3LtuRjsUA8k6UOQUkPQIx9sjo4m71LKxLIvtVU9rjufzC/P5THOBEANLqviadV7TeeRNYBpveVyeyKWSSiCEEYk3eA9jS9RNC8NmgieXGSi7dzYj76WDcNpVQoylIYK4pg/UcijXuyBF9mJGxUNURKRaXoD0ToflY5Zod0N4PauLdas0AUasfpYOlFvyLwpESFWrgyZCAe3c1mpF2w3eeZ4fHlnPD6qOy1/hvSrNDjESY+DmZuJ4PDBEFU9bzmfef/rAsq3c3t4yDINK2NdKyypg1Vv3IQaaa1Qpu2hN74jAZU6yUym7aFxXVFTg76Kl0LuveuZ0QPUyKtOsG29LyjpM1dLhXqRdRJAuqb1latVp9xxhy5oTjeOooydVKLkRnHbDxFWaUd2H4NRI3ivN3N3ZtdUKNYP52urn1c9d+mfJmbUrjxLAHdlwDF740W3gqymwbIWz025fyRDKRqyF6IRAI7VEkURZz/tnckBEQMLewGjGdNOb03O+ss9dqoG33nMvnm5LU3bQwejtzdMcrC7R0qpAThzwDaRo4VqC4NwAXijNYpiACkAJFU+tnsaowInzjOPI4XhAWiDlokqntXG4uUek8vLwyLpVXUds1n0t1HbpENbWrlbED79+Q4q2Xg33xFb/3DsQSNmRLmeeWT041Fyp0vbKWIxD/v8x9yaxtm1ZetY3i7XW3vucc++rIl68KNLpIsJ2pJ1yUlhGRknLIBA4RQ8agITBNECAoIVbSJZ7YEQLyYgOEoWQoAEmbYRl7JSVmHTazjoznGk7MhzVe+++e+85ZxdrrVkMGmPMufa58TIyLITk/fR0zz13V2uuWYzxj3/8v5UIVIVGR9sWVKMRSkecWpJYG9/WntvQ0VyyIZiFeb1wnk883t/z+tUnnM8X67UAPWo2MQlpfQG9/G8LugJVCHiiV+PUcVKBjK1hul4dgIpWKdqNoaytMmi/dI1mqLthrZV5WXj16jWnV0dkLqo0WCurKdR5B6PDSv6timmUtf59r5WnDPE2NaVxHPtz6pNDbHvuE9qFzUXvmx/RdggULTYY8tQ2eRONEIhO0ZDmvaWfaZLLYSByrZIlGhgV6b13zgVLRFSGOhiKHMaRkHba3zGfSCkrkq79+H1mPr2Ip3P1TWpdqxw4G89WDf60RxNeoHUwekU9vduuNa0LFylG/6jUEHjlIjE60nrmfHzJ22+9y35/x/sffLEHITHqtYqzlnORnoDWRk9oKKkhfc7u6zConHLOiZQqFPXSUXlg849z3pK1hhzpxtb6G3JO5LT9D1tfn4IQhWWeVTZ8XpRa43U+tgCXPn/a1m3j77BKqhh7x2g+VmVrFDQN+AejYG1y/tdCMNf3UGrz17JeLtnmdUs41Rhag+N1qbRTNqVEGAdu/TOG3R7xGmDXogGUetMlpGaCr4y7gLNqn5E2dW0oztrXojN5ZXHaH5CS9i2t85n7159oYuAULFBPP98rug3JxjUDd9Q0tWjVbrCKRM7ZAmStoWj/mTdJe+kgTqe1iZDLhWH0lHpiOjzn+Vu3nL/7CeOww7uRIdwQfaDmWXvvhmzqoFuwW0t5sn6Cc+Sr+3Hd+5azqoACrKsmbct8IaeE80GVz0JgGHdICLx6+YJPPv6Iu2d3/K2f/wV+5A/8CH/wn/gxvv6tr/P4nQfGYOpjOGoqzPmsgSjCUlab11g1VsGPYRg5HA6cTmfOJ2EctUIpWG8WWhkVp/TA1n+NAUTUrNW1xlQIWn3Q3kBdkE3YtVHYmsKnCIQwKI37im6q1VitZFZR6nSwXsbgvPnbqVJktbM1Bu3Dy0WpYkVQGtqgHm+VxkoRBY3MtkapmUaJt+B9Pi+9l1ok2/pSoQvtM1brm+Wy4MqIG+H5W2/x3gfv8v5n3+f+xT0ff+sjCJVpv+Pu7bcYPvmIYkrR7UxtyV+MkZRWJj9pz5RTCp1DgyqpgjQRL0Ovs2SQzPmsFcTRbGYata1ab47GFg7QeX7dZ/xmG0XfN0STG56cFQ3QbMmI3SM0KR/HiVJuyVl7Nff7Hbe3t52+XKv2THmn5/6yJm7unjGviSVlxmniM++/zxe+9EOIZPxyYoqR73zn27x+uKckpciDVX/aWeO97S26lWooZIkb1pNJqyzr3q72BZtwi1YdtProWpxgKeJ23m+MBJ78XmhWRhtTRUEaBXBbPKY9ZNqn5JTuJtKZB00lNLiId7FXLkou5LWyXk54D/cvhRD0TB2GSM2Z18cjNFE1ETLKTnn14hPmy4zYPFN2V8EFwfvYzwxdk4Pdo+16Sz+ftnnS4pc2zK1g4FoRQbY/OyZpSW97To/xLN7rcVbvt9PnqfegAsZFdATjrOsyBhVS2U8D0+QpqbLmCiEiLoKLyjhqoCbaA4d3pDqBCwZQK3g0Oa+WN1XYe41bc14VoCuF4DLjmDjsICRhGCuEhHMLnovFocLqHCkXjimTClpEEIe26Wkrj7N4uV2/pcZ6SrZ1JzpfFFmyn4EijoxX4Rc8Uj2ZwFohe407S6kUHPv9HknCWjNurwn/6jJjHK3dQu+RD54gWqDRwrHu47vDDc+f3+DxLGtWu4SsokJI5XDYMw2BmhOXs1DXRpvf7vk2J37rxz8iSVsLfrV64502tm99QpviXZO0d9IcI+y/NqkbyuOgFZWv0ZFe0u5JBYaSOUN57U80s9fNzRnXd+bDjz9kXs69mTwnPVhCk4C/GvEQgkrmupYsaFNutspDDFGRtdYfRkBEN1qlnTUqnQZ0vYQq1eiDmXVNKo7vGiLbBhTWZeXh4ZHT/QNDVaqfyqprRcg7RWu886aqxZOxefNxzX3vKpWw8cTfeN2Tg63RBN74XaNJxXGillET6RDxPlqZHqQUHSujAsY4mJKi9g9cHwiqvmZ9guK0YT9G4mDBdlBkM6XM/cORIhG8MEw3zJcjp2VlrUWtAWxIDBRsuUKfY+3av+cQvx6Dq7H7tPFtSbazg6z3ZqKBX/tsrY2oEMsQI2WeqU59V16/OHH/4jU+7tjvn/P8nfeM+pdZc8FKhFQaNQaT3vYMYSR4bQr3cSSMilA6J8zLRaXHRX3RCk559a5tmsZ1l6LJZM0GkkAVT15XpBTWde1zp5k5Xxtwz2aNoUNhzdZtPGsndW6ghCUhXI3p1uOyJSsteXO9qrn1p1z/fB2IjdNoIKOupeA846hN8v6qH1GL0QE36N5SRQU+dsH3ZNG1irp3RGvsVvplIMSRGHfbEVTrRgHHDve6KbS22RRjJOfM8fjI5fSgh7RUkwwfuo/kMIw9SYuDXn8qRhvNSj1uvSBtbH0YkZps7myJcCmFnFakCjEMHYgQIjl5hIF3Ds95+ck98zzz3nu3+FAIMeNCRtxiVhBWrW2UrL6nWJORJeq+VaursNg+1/rwhgbYtaqLq1ZJVUDCixC9UowD8Pd+49f50R/7QwzO85f+wl/mJ/74P8ff+ZWv8OqTF8zno0pzSyUUqwI4zxiVDUBtNgeREAbWZWWZL4zjxN3tnmVemJeiMt0m7azS/Naj7bV/SUE97U313rHb72FZ+jzagrq2f5hku+0+b/ZftvslUrXXNURC9FpJsWROK/56PSllxCpRzoUegNfGUDATXufUrHtdE87OYb1NmhiWUsgWSEcDNM/LQvRKZW1rSNedXkfrjxzHkRBHUjb6Zs34UnFLwuei3nDTnqUmZIgwqFpemyftvGmV5pRSX5dUo4Lamn3SM3zltygIMTpKTZwvK34xu5kQ+vnknKBkQ/ckVmgJ4zU9Gtr+X7rIRIs1vG++hoHW6637jdK/vClQOO8JRIZxQGTPJg6UcS6wLCtSKqflYgqKlXk+895nPsOP/oGvsqSVjz56RTre8/z2lh/64R/Gf+ubfPTxx9yMo5me6/6tCYNSskD6HrudQ2g/VwPEHAriWE+T9y1h2ZRk3+yHu36/64R3e9h+voVk9rrr7Vz6XLcwluZHaC33lLxSq1kk1KoCOs5YE55O0fStx9MEvGou/f1zzpxPJz3XRdQP7uGxU+NxEAaIg86TYGbi+r5yda9Dv5/X61jbZZ5eZ7tG35h7LUhvqoe1aS90+Bxam00fi09L2gDjd9GUHZ36ApaqirlD9MSoqtrn04XLZaYaY0II+Bh1PjpPDJ4hamw2TLcK1uoV6Nh6z9jOKxNOYfSEODKLQ2Th9tnA7bORsp7BJ4QV8RkXCsFnBlZCXZmPGr/keaVmAzhB7Q2oyoKzpD9ggTYAACAASURBVK3NCOdR4RL7e0XvoQ6JToLG0OvtGV6T0QaIzUsiBwX9MWZNiCDzSi2FwzRxe7hldzgYFVhZPa6f2rqe9P5s93W/j+wOA2FwHI8Xcj6TloQXYT+NSAzUtLKkTKkWf9jd/X66Gu3xj1jStlXJGhLbetTac/QHDIGzalm8kvJv79FvME+TuavEpgWFPXBmK3lXtFze6dmlMC8zyzLz+vVrcsrqD3a1mV8bmWLoTbumdg212k2Xq8AUmwKWJDpLZsQ8N/Qt9ZB2XvssNnWoLVloiVAIXpHHhvLVaspOm6lp6xfR7+57MnK9CQjbxnw9ZjnrtTeKR/t9q3R96iZ+lbBtIhX0hJYQKT5Sc2KqjjhO3BA4p4WStJI5xkiqKttcXTUksCVuhrQVlWFWmkRkYFC035KDkhwQGaeJwUeqOIYwEac9c6mc1mVrwLaqEdLULp0pbzXx4zc2aL9Rbraqm1FNpI31pyOQInSaYQtgqar+d7M/MA0BDC0ODsiJlGZCEDP8hSXNXM5n8prw0ZFlNXN3K+17U34SpQ2GQfuehhgZQwQfEe/xQdF1Seal4qA6r7Z56Ng7KQSnUvfilA7mMWPWmilZxTHSMivib4bKy7IwDINJVue+SW1JV1bGbtH53g42nVf+auwcwqaQ9iYCvonk6JzY+k2crcOnzb49OMzbmo0xquy7CZH4GJ/cO5NcVcnrLLg4MI6D9csN9hQNAb0IMYxM0w7vB8Iw4QiGtGkgPE6RcZiIFhz072trqlko3D8+MM8zOWW8h2VJGlTEyOV8QYVLfA80vaHVoAIm1eTJ4zD2IG4YrKpbTUCijXGb76JoItZHpDDCjioTN7u3eOutz/P13/w6z99+xnx55Pj4CfP8ijEKo8eCrh2tBzIEbw36WyDj2JLX9ot23aAHbelbuoEm0u6B2pZ4qTw+PhhK61hz4pMPP+R4f8+v/fwv8+P/1D/Jj//Tf4RXL1/wjW9+g4eHe1JJZAy8E6VvRcz3qmr/ie7MhRBgmU8w7cCZbH8VxtExjSMuCGmdySmx309qA2HJmjNgbV2WrqTJ1XTq50RrktErtXugc1JVJjV5lyrkmhlHNT0uJfW5X4xp4BxEZzYoBlB6fD/7YlCF3SpaCVL1Nm/+Qib+Ve1ciZow57z2frymTJrS2r9Xq3Jj19JpiM7BGPG1MCIcXOCAY8Uz4FiWmeQLS15ZU1YlTNtHOwDjXAd+cs7s9nul+9VtL/Y+qNC4U0rbE3Es7zpQocnYSimu09xDDfhirQKhVdq28ddA3F318WhK63qFW66eGyyq0KTjyV6fiyWl9F5lve+l98A4F6h4hhg5H4988uoly+WC89rT+7f/5t/ABwWHa1rJ68J5mVlWlWoXzBA6BoJI77eiJQX9+2x/Sj+nTHShxwgbcND3QItznHNQr+Itno7XmyBlA+gaINWA6TcfglpnKECkvwgCu2Hg+e0NUuDyeDQgt8VHJkxn30b3Dk06cxbyupKWFQmetK74YVTAx0AhYTsLvJmL5azUX+8b68mhlp/t+nQPDWMrNlyv5w0YFKEnzgoOtMD/zXtQO1VXE4X69N/b6LhtXKFVRBtN04GLSncMWEXUUevAmh0iA7Vmcm6xYwbfFBOM8eEUxJjiaz3TzIpB2RYjwzjinSNrGVfPmRLwccA5mOLAfnfgEi8Ep0UJT4Dq8TkzlZlxvZDWBT8vMGf1ADBlc1xBE1GuKm0WHtmU6YqmloiH1nyJ0VntFXpLFRgPRIIXqousRUgVsikCNy/TZb6wzjN3txM3+z373Y7dOGpfbHZghQGh2SFF1blIC2EUfIjEyXMXD9TsSHNSFU+TzAx+wHulP3fQ0sGbFkqf9vhtkzbn3JeA/xZ438brz4nIf+mc+0+Bfxv42J76p0TkJ+01/wnwJ9C0/98Xkf/jB/gcW7z+yaJWiV5Dzjqyo0kI3hOCI10Fa03FW7P1NvG5OgSfbg+q1qcbtndXjbdYq3W1G+60eueB+XTuXmy+dzU7CzLahgUlKUXDeW1urvZdxPueIJZaKKUFpy0A1e/AFa1SNxJt1PWuLSrfqaINDcxJ1aFKUVsC77x6R1iPCrWSnfJqNaET9cVoZMArBKwpI22NubrBXKu9XSctTwLaq/vqXOMkbwqeDfmsRgVda4EwUHPmsqx89v0bPvO5z5C/+YL7c7GNWQ/TnDLJFNdyXgHRfhapNE8UHehAKEUDaFcYxwHZBaYxMo0Tu/2O3X4kDsIw71mKMK9Jg4tWCbDZ5AAvGvgUgSLle67z+s8+p984yN98Thu/3oNgKHl0gWCJw343sRsjy+WoTf65WBWg4KjaEOw9IQykZSWtK3EXSdY3JBWGODEFz85b4jFsyb+aeWqCVqSyLolcVtasVTZxDjHIsNTKkmY+fvEhaT2p75drh0/rDdF7sK4L67IalbA8ufZWabtWYW0Is6rmPT3M2sHb9gXBghxatWZDfUspPdECjDa2HXqdVmZVueveliYpPk0T+/2eab/r79OSth68te95NQ+67QamhChK/Rl8YBp3jNMO57WSLD2wtEpXdEzjxDRMfTyWVRNvneuZ0/HI/ctXlJQtoSlM04R3gZw2ylyjBWv/V1LQxzWlrWJmyUHnStTqc7sfOefu56YVPxtD2y+8yag7XzkcArfPAjd3A/ePn/DWu7c83N+zLono98rZR5HuMNq+XFXSPJlQQnBmvGz3wAedZy0paMG6vBFsZlED1Bi80ZRqU2rS7xcC6/nMr//qr/KlL38Z99aB/+Ev/nn+rT/xb/CVH/uD3C9nUim4xyO+CEGAoh57u5tho4HmjKC0oCqq8DXPq/rzhcBlyaRU8UFFaUKYCH5gnc9axR48p+Mju8O+Vwm1milahbb71IE456mmIqeVl/JknpRSKSXRREoEoUjplLN5vnC7P1B9ZZ1XpXLl3McbBzWvFC+EYTDRH+nggFJs0UDOnh+CBtghqLhTscTfmUpbMdqqUg9jr4rpmnQahAVtbRhKwM8z9f6RJJ78cGaH5+E8k6Iwp6VXnLg6/9p8bknb5XIhDgNxOjARAK3oem/gpGgQpMu7gWjFhJX0vLv2Yq21UnKioL2buNDN3Zsdxrb+jRoeKrXq+7Z9QpPW7SyPFmYpG6WoKqGpC4oA3kRlXBt3S05FGHY7Rf1v73AGyK7RIznzcP+yB9OHw45hd8v9wwPny0WVq8W15aD9tG7Q+/UGOHv9uK6OOUS90nh6vrU9tvmsilgl72oP7op+XAG3PdnTOdX/bDt83+vbP9L9+qrTnMJ71NIkK7NgPw3k+aKVTr99Xq0mt297hnPao1iK0tnzsvL65att76+VNC+9D82hFbRxGozU4Qh+MBqwt/u1MRGaSjhsia3S7BUs17kbCObXoobmT8f+OljvQidFOgVVwLyJNQHXT2q9bg5Q5eFa0fYKP5BWNQDX/mDt03Juh3NJBeZs/teqbTw+bN5vSitWMTvlKOr5oqJeZ73O4JQ66jwjwu0YKX4gRofbjxzeGTnWgaEGdjLavVggFmSI1HHA7cAd7qisZFMbra5QfNbPrHpr+1nrDAByvsf4fXykWSeItTRJ1yNo1UkXIjnsET8iEvHDiBu2eMk7qObD7AbH+PzEcjqz3s2E3RWY5XVvVvX4laXMZOdZnXoQxmFkt5+Q4pn2gquBgGc5z+T6CWFRwSuxOK5dw3XV9tMeP0ilLQP/sYj8LefcHfA3nXP/p/3bfyEi/9n1k51zXwX+FeBHgM8Df8k59xWRN6Lc64fQJ3WtLZjXm3HFdIAedDQOtCGVdkOV3mCb71XVqksfXSUlV6G1Bqa1GovMdbU8KdooPlqzvPaTpB5ABOcV0SpamSvG8VX65tYP1pKeVgEsRf2nat/cNtlYDS4VidHX+47QtPnZBCHkCjlrPXDZ/ItqFfNA0arGs92B3TRRpHB/OnK8XGhpIPZdg9FEWoXwujJxTTkJreG4bo3h3+8homPSJKg3hNKqfM7hCEiI5CqslyPf+va3uD2unM+ZnD0pLYzTLTc3B+KQWFPCe5jnqiIg1dMqAs30VBfCSJXEbj+wvzmgAiAWeFZVAPVBKQKXZSVV6EpPCv2DiCFXWKCtB9q1SfD1Bt4OLW1etX/z22F4DUq0TR65QliswoChnVILeVUKjncOrbip+SQipkaacX5gvmiPT14TRbRv6eZw4HJ7yyiOyatJaW0oldP3SEXFEZI02mJShNEALkW7s/oOBXh9/4r58tgFTbZKs87J3W4yGfmRoTjWtPmfNUXGDahpNFmPDtk1SPB0HrYBEnFgaoRtDrZAWFX6tl6tKvTKW6Mp7Xa7Jyhw+27Pb+8Yh4HdbqfJULwCRey5bV3HEMg52c/RlP22rCH6TYRht9uZPHckFw3WVazA0Sq6dS4c5WhBs9J31pSsF67w+PDAq5evVGhChJwWdvsdwzAwX2bdV4Zx+35R1SprVkp5RUg5MQ2RYdTKTJNzTjlZL4f22TaJ7mHwqoyIBjE5V6VHifrh4YXDswNrWnj+/JZpHDg/PuAEggsd9R4GVRoLvvXViP1sVUwHYk3ZWG9bv9stELSEot2P6ANrTSZ577Q4XcvGaijCulTOp8D6+Mjv++IP8Wu//Gv84s/8HD/6+7/K137lawxx5MXHH5GWVUlxIgTJDINnHMYOALRAXanZmkwfT2d8iYQRlosCFI5JKwImelHzSinC4bAHm3/av6yV52iKio0qHIfBKo4KPA3DqOISUjsCr3uHrhdEKHVFcjP/1XJEs2zxQX8nNZk/HzRKLVIoWfSQdQCFtGar9GyJSKMjlpIpotUX7z3LMnc1W51HGmx383rRRET3jkpJmeoFv64cv/sRp8uJF7sDcXeLP9whOSk6vhYCmwhL92ZlU3RuQXJKiWd3E0MYCD7YWIEYfXqIAzkrRRuRbldQa+57Ts7ZhEmaf6ujWSeISP+cBgSN44jvHo/QxIza9+1V4Lrt50gDNUNP0p3D2hxWFVfzrcoRejxTq6o6O+/Y39xwd3tLXma++51vsSwzwTuyg1oz58vFQKLItN/jh1F7jUxptFWRYk/K2zm8+dbSK2CWONVKYzx4r0qBNZsdUQc2VXDkure4nX/fy765At6xfvo+jjx5TgNotNJr8HgpLOcT52FkHEaW+UzJq4k0GZBg1SoFW7fPbUCAs/1+MVp++3ABqGYw4ZWd0CjGwXtciEpFDoHB4lWH+QWXzDAGvV9WFd7aRto+1pgh1WTkt/2tzasWP8SoobkrWpkRUYurSqVIIZqYixdVKpdayVXZOsGAfPVYdJRccSFY4tLiFbTNwVk/pllreVqMslG1m8IwJvYnlgiJOCSDcxnnPGutLI+FFU12P/v8hiU7xE+kOsBwAAlME9TpTIgVQoaQiXuYfGSoyp4qtFqNZyiemJ+qPGtfPmYXUK1VRtjFEWw+q+rkNq81eXXsBhVbqoCLI9P+wLAcmZdFJfeNvnw5n2FwDPcP7F+95u7umVba9LBCRJWWsXlTc+V0yaxecGHHWzcH4rhnmRNrLux3B3bDHvxMeFwJj2dCrp1KDhv77fs9ftukTUS+A3zHfn50zv0q8IXv85KfAP5HEVmAv++c+w3gDwP/92/5CttAdMLy5MuLWNbsvlfKQRDrbSodNccqQVWkewh1pKJum0APtAEsMWoqhCp3L+aNAfMy8/j4wOv715zPZ0XwaH41TtXVdLfrzu1galP+ujwuXWmuVOtTM8nUnI2+oWoPNAl777GNoAkzgOul2S3AbRtER7dsEscQSc7xlS9/mS98/gN+/e/+BpdvLXA+dzVBxAQcaBLtJkvcAuqr66iyiTP8VmXc66C7/Sk2Zk83cNeTl4paH+Rc2I0jpSTu719yWR3F78l5ZV7O+iqvXkX7w956AXTTbM35in4kanE42VNKYs1qgfDW81t2uxt208g4RoosnB5PLPOF43kxnQMVQw5ONymc68m8uCaWsv1/Xelp19+SBx9aRSv2+/R0XLZ+yn5I0T5D7+M6X6jeesdMZbUDGDSk2OOdIvApLYTFU1xGSmWMI/thIoojOq/9aaLAhsrPAqaYWkQ9gGpt90yb5sfRsS4rl3lBSnmqvqYX06+nlMo8ryotL7oZqXhH7JWoaxn99tg25Y2SBGaa2dHaJqjhzKdoSwDboXddCVaEdKvmtQOxiei0Q2AYBsZx5HZ/YDClwnEcG7Kge5O9b/ueeUm9lzP7rb+zCViA0Xelav+AAb+lsgVRXv2YFAXVALBTtq+SxGVeOB2PpLReJakDtVTWahWrEMh5O/SLmah6p6a+RdS7SytrKtmM0Ux0H9QqdowqlOF80LlSCt6oYmGIuBDYTzt83LEkB7Lnxcf37He3lJw5HV9TyoUYHZAVCKoOsqi1hgVt9pFqQIpSJqtUXNFD+RrIaIFNo1nnnA3l1Tk4WJVjCFH7uWolDFEPZu+4nI988Lkv8JXf83v433/yJ/mPvvof8OP/zI/z0z/91/Eh8O1vfMP2X2WwXC4zy7IyjZP6XopVPZP2/KWcuifbEEbGuxuOxzPn04mbwwHnYJx2lOiQulrQgFEXleaqHmvFEuzYE6NwtS5astBsGkLQZKSWpOeNVS5rFopVlbyDy+WB4CN5zbhh0iCRQvBKeSplUequ14pMTgUfRgNOs7FXCiIqa41oP/Ewjjg867qy3+8BNRL2vt2L6/4vDWx1jQbWYudrSsj5zHJ/z3GY2b8Fbz1/m9txoA4D62VWKwPUfPpNoKet45QSl8uF3bow7AelME2TStvnYpU6LEHVPW2IkeZNpiI79KSkJcUhKHDRvM8a6Ng8M5uIjPPOwKCGjm/ngO5vBkpisvbFmShRBQI+WAXSua6mCGKUf6PToWOIc2ahIAQZ2e0OHO/vyWtmHAeWtJLnQkqZm9s7nk1vs9sfqPie+DVQ2ZvoyrZXNsEUOiDQADjFUDbxKJFq/Zvt7w3g7RfwPWDvU4ZN611usRj997W2pK/96XXPtfdqQE5eVx7vXzOEgZJWA/ZrB6A06dv2/7YXBx+I047VQK+2Zz55VNmuxwJ7uxG4WpFsCpxBFRm3s99rxclE2trZ0u6/jsFWBcxl7WullIr3mWVZe+V3Mlp+dKGrg0/7icPdnmfPbrm7O7AfI9EJkpKKBXnPWoRlTeS1klPR/3NlTSvH46OyA/yFw13k5u6WNa1qEH1FsWq9rqWrqA8g9IIA4nXvoyr7olgcUwtCwccdRVYKM8UtZC4kzoSpImUBt+Al45fCQVRy/4RSUJtaqzil4quFk4MhEl0DnbSwARbbu61gU4qxkKLvNPsiRenfJupTgZpKvybn1Ks4isa/KpQzIGuiZDg+nhg+fsHh5qDA2B5iGUycadOFEAI5FeZz5jIkDgdlsa1F8HGEMFLDSHaFJAGx/uLqmgCg7UW9Pv7pj3+onjbn3A8DPwb8P8AfBf4959y/DvwsWo17hSZ0f/3qZd/kU5I859yfBP4k6EG29ZgYBdCrqSDQN8JWtegBc61dFQ8sUZFWRpUt6NG6qVWltt4rRRaspGqvU50lE0NwKlO+riuPx0ceH+9Bqplfxp6M6AXZnHcbtuN7Unh94dtBIG1jYku6VG1MA91Nhrgp+mhZvkom52bW7WzT9z1Aa+papVSGOFKnid/8xtf55MXHnJdLR26xMrjDs9ttZtkaPGhjfkM52/cr1erHV/ehPX6rJO7Nh/feTNOfTAjEgpeb3cAUR5YkQCGXhctyJJNsKM0jyWF8800MZHu7aMF/IAa9pnmduSxncHA8PiA5Iy7hXOKSz5zPF2qlG7NeV2FC8NZqIk8+701Kw4ZSts2v4KpXVcSOFj9V2mxzpStnWuVFq6BGuwgQvdIHcaEHGTilLoVggAFK8cklIF77aSianLf9bhNiukqOKroFi1FifcTjVToX3QyD9+zGQSlOo/ZQ9UAFdM2IwwUNltt89C4wDHrt1wlbezQqoNhhGOPQaXzNNFhRVO1HEUsE23tslRD3ZL6CSf5fIclt3S7L0lF8pQLq9324v9dN3JJMbC6UUtQw1BI4XTW+X7uzdX+NLsegqLrSOQo5q1RyU0cNRqNsp2XvKLDDphlTr+vKw/39JvxTlJanwbz2Eukeqp/fhDOkahLekjHxwv5wIHgFpGK4SoDbd5UtMQ7eq5CNWXMo4O1JJXOIkTVXnN9zODzj4eGRw/6Ap6jNQZftN+9J0Wo2dr1tzEIIOFNP9N5bH5YeYRoINbqZjlFLdpxrPRea7A1RDaN3k3qgpZIJw4CLniLCeb3w4eNLnr/7Du7xFX/x//qr/Ev//L/IL/3ir1JOM7dxTz6doRRyTZzWCyknxmmyimrue+Bkn5FLIYunSmK/u+Hu9sDxeOLh8YH9NDKMEUxMKo5BbQOM3lftPrV51yi5eq+SJtc+sCxLn9sqvKKFMQ02bN8pyZQWM85ZNUlUNGa+LAzDSM6VXBPDqH5Ex9M9Yzbwwgd2+xtKVSuX3TQhONtbC8t6sURZK3c5qY/b3d2dnVcNWFFglLbPVKFZgHjv8VWogvYL2jyufmVNC8VVxOt6iC4Q2Pw/rxkKLWFr+8W6rpwvZ2KFpmzqfWAcB2rV5H70qvZai4Kx1Wj062pS5gaSNDamiC57D2A0tAZPtTNyXRN5yeR1xXkFcK+plE1sqSdwoB5XQ8R1ZcktEaqdiCQ9icxVA/1aNVgNwcTDRsfds+ecTyceXr1mXQtkU/f1jv3Ngdu7O0KcjCFiYihVWRnNByul1Of1m+wGQ6sUILXqgu5tG7PGORWcwqqT14HONYvi6Vmnib3zFuj2RGGr7rS4T5+uJ4uzhBaxuZO1eiFS7R2bH6gxY+R7/a7afnMzDtyG2/679vitxNTa8/oYWUWrcB1v0KtVLendCgPuyXg4NxDiDWJn+5P9rVfpFAxZq3l3Rkd2lcPzPXfPb/nq7/vdfOV3/TC/+3d8kZtpUHGWACsaE61zZj7PnE8z65qYl4Vf+bVf5q/81F/l5uaG3/t7fz8ffP5zaiheMx5hXRZq0dhyTYl5XSmpMMhAzQpapXVV0/RSzRqlsswr8zKT00IqC8cKqRTe/fx7+ENkiYX4bCJWZUdEF3D5wFTexq83hJwYZKb61PJjPStEVTDXMbC26nOtel4Y4Dwoe5ZasvrTDpOuJadrodRCqd4UlCPgKQLjcIPz6q96d3fDQWaOZaV4VYmediOuCMkJKRXW84Xj/T03NzccphtU5qrS+fjV2rGqTpD1tDDvTkjRNbzklTJAjsIyJ9MKdxbnGUBvipg8nYLf8/iBkzbn3C3wPwP/oYg8OOf+K+BP20f8aeA/B/7NH/T9ROTPAX8OYH849K/pLAG5rlq008k9fb0u9KukrYpAqX3T6eyEXgXABrFxfH2XmVctvFa23hZZzgmcME4DcRhYlzM1ZfPDEJrfln6FLaC3GpV9bw0wWtDvQ9DU1DlSzTw8PjIOA/v9xM3tjR0gYp5OG82iFBXACMav13HSfqbSm/q3ZsroAvM843BcjmfuX36CHyKX5UKtlWg+RA7HOE7sdtMThLdt5tebr36P0lXu+j369Hv8/SfB9aaGJmyhKj1IaqJt8K/vX7C+fkkwtbAxjAzDpKip8xZ0RmIciUE9sESElAVxE1AQnyiSePXqE47xnt0wETy8/fYtu2niN797zzKvtOy70VybXxnWx1OMekvd0JDrylk7sBtF5LrjqQvV8Galze5bEyBQyBEHXT4bMzvPteCd9tU5S9iciWTgAyFqdbCUFazyJjnjxfV1gNFxqyWHzZLCo4lzq0w7m9jtUAneM+x2nUZbRCy5NbqFOEWPggX/Xg06WwDTgq4WeHWhjE6VDPpeBKN4mEeLmICKpUrBqp5tPNv4KxghXX6803e972uoHcxigMQ0TdRaTUJ+YfAq7auUqdA581rVfJogioRtHdh9LVZpC85bYhXRjLgSoie61nRv1WvvcAFFiBtFxdbTuq6czifm05mcV5yDtK6MgyqZpbTaMgqUcm154a3qHChZuCwzOO3/DTFo0FZFKyROX5/NjDU43yt5znua+mbzjcN5rXCOkVN65ObOc3ieia9misDltODwDPFAcIpGOgLeZ1UvLQkMXQ1Ggzzc3tI9HovRfLC14DbPRQyM2youjmEaTDDBU3JmnmddA1LJueJcBO9Yzicev/Mh7+7v+KF33udn/9rP8KO/90f58pe/wk99+zv4/Y6Xn3xMWVdKWhCsglqlsw60zSlymRejOEZVnSRzuTywm2457Hc8PBw5zzM7BsYp4KKnov55rV8wNfVdZ/fE5quzub0uCxJV6n+eZ8ZRe2uUgSGktHQaeK2JtC6ktCAUW3c2bhXAKrBkhujI5cKrlwu4yjANhGFk2t0wTreEuOPxuBKtIue0jETwkTFGyppA4O7mpivDDrvB5qslmCio1M4N9fRUf7p5XRg8uOghOtwuwmFgDSrBjYj21Kbcz+guenIV/F+DZ0rF1f1SSlavLTtfXVXwQ7xjLYmSDbxxUfdmy8hqrYgXCFBLhaLCK9dVokbJbPtLS9DqVfDdenWda9XBrRdPKWu6x23vq1UuxWk1+WlqhC7pWVOcVgtSVuZNwLO/fc4HH0QCA69evqDUxHQz8ezZc56/9RagQKW4YFiJiRE51xU1h+DxZQsBrymNItbjJAqxZIurNMRsldSWzISesL+Z5LR5vZ19dtZ1yX07e4xdoZ/dXgulrpaEWfLXAtuyPa+dZbUpY3ZUsvUubud0SsnOVRVr81FFfLz3Kvl+NcdaHPaUWbPFOs3gvh2WpfkX9qql22I0Z71f3vZ/URB+HCbc+LQP8BoQbj3/aje18NGLj6jMxLDgyolBTnzw3jt89t13ORye4/d3BB+5nGZevbjn/Po7/L2v/V0ejkdevbpn9DuogeN5YV4L47Tn5ubAYRqUGh6Cndka2Ca9qgAAIABJREFUT+AiUvc4NxjVtvRxFjaAr0gl4bhI4EREgucwVF5+4zeYpk/4x/7I7+QP372nSsbzC/7Oz/wc3/ratzm885zf8f67/P4hsOSFeV5YcyFloYqniOPiHIsTJGfWy4nz8ZF0OpHnM3WZKesKLqsIlCSKgc9OrGIXLE5kQMQT/ciw25vYkkb+3sM0DirRP+0Yb9Su5pwWTsvC8fFIfPEJwzSyH/e44RYRM4mvQjFQDFF/2vmycL+uHO7ucMNIHCeGYWJ0lUTG1eY1vPUGIxvD6vs9fqCkzTk3oAnbfyci/4stgA+v/v2/Bv68/fVbwJeuXv5F+933fWzo+YY+afNwQ5635K09X9en+Rk5k0tt1IyrTaJV5eRq8+3JlVEQpCVtYoCwF6QUcs02SSs5J06nI2XNPXFpPlegydn1o/lavJlsNlEHUBWp0+kEVRPDKpVpp5v9NDWOs1bcGgLjgwaGy7pQvSNOk/YmOfokahQiBNZ5QdYV7yoBiEH7ahAoVSXhj8cT83zpr+s9bmyHVUoJAXx8g1LwD/nowhxsiJw3KkKpmVQL4iCL9ridLkce5pXqtE9ncJEhDGhDsPYgO+cY4sTz52/x3nvv8+zZM3b7kTkLqaxQPIGAj46aVu4vJ9b5zOP9wH4fefnwWilh4nup+knVTLSnKRsK2nDFT0tY28bv+gH5tCrZFmUDEtp7BK9VByNU9IqzswBKYyATyHGD8tRNwbAiDCEYxax0q4Sa127k7t3GC7cs642AuPa5Xo3Cp5WPgDcOfqN3qAyT0dUsKHA+fM+YaGXpirZ7FQTpQU4/1Ic4kpLKhSuqHBBRawxnohAimjiFGJ7Qoltw0D6jf75VjFsQBy0R9t20WZNQp7QKG5c2TpXWxL71hAAaFAenyXynVYOLJpPuAzX6TuEOVnFUqpHaOWw0Mk1w2/4moDTD00n3m7Sh2y3AqLVQaktQfe/10STRgoBW9cMRh0gc1XgdUSn2NidadYGq3pJNFKPaWDsfWNNKHEaqVG7vnqOUcs/d3duczgulqC/fw/09jtZjqwdYqYUqhRBR8YDdvtNPb/Y7Simsy6zCDFUrRY0O3quYNmdboh1jJEQN2DRZV7GiZTXakVSyKFuCEPAucv/qyOHuGe9/8AX2uz0/+Rf+Aj/xx3+CL37+i/zmeeYw7jjNiwnvREM/tSfIs3l4qRhPwjtH8MIUA8uSOT3esz/c8ezZLY/HR9a8qgJh8KScGcNW0W3VNbkCHZXaoxtEvVKMa156VSDntQfWKvRzoZaVnFZySZr4e4cfVN4fp0yDOATC3quRMAW8ClWlVMk1M4wT63Im5EIc9nrv3VVvkhemaYeIZzG7glo3gKT1snofdB8p+nyA1SqMPgbGsMenC7vDgUN8j8PdgbTbs4RK9kJdMg/HI6uJOmznwxYI62fXvpa771rraU1FK2E5433sarXjOOp+ZGupi4ldgZLe++4LJ1d7yXXS8eajtUqIVfZ6T3lsKrBh+99FnFkpbKCVKdBaHxYGTEgx4FlaQmOVN6fVypvbZ9w9u/D4eARR77Sbm1vGYWRdDc33aDJowmPO0b1sHZ5oAjPX97AlKU6wNouWyFVa/9C2xyvbw9n3esp22RK364dSfDf6/5vPbQlvCE6BV4vfsMRNjLfagHBnPzeZ/xYPeu8tcMcYBHoeN52HkpVOKkgXUWksizZfMEXGp5XCluRftUVcfdD1+X693q+rlNpfqmBzq1K3hF3F+Ix2GjxRm9cQXwiDVomrJEpZqGWBsiBpJl1ueXw489F3X/DN3/wWH33nBd/55nf5zW/8Ax6PR14/PiBeCKPj45d/l1/4xb9PcDBFxzRaT6IIzmvi5pohfQgba+RqDPDqWznY2htD0D5jPxL3E8/2nvT6Q+Zvf8RwWfjC7/kRfsfnvsS7N1/k8/mRnzq+oPLIP/5jP8If+LGvkuvMJa3U2tQtIyXr+Tp4x+g9dZ2Zj4+sDw88fPIRrz/6iPsXH7GcTkBhyY/qqxc8PngzF59UoKVoT/bDw5m//+o1L8rCy5cXlpePXMRUrqNajSxpxQ07PcersF5mHh8eGPcTz2/fYny2U5p7znYWqx+rz5lQEpXKJa+k8wlCgGHg9vYtDvtnrGshX07UkpRB5Fol2fU5/v0eP4h6pAP+G+BXReTPXv3+A9F+N4B/Gfgl+/l/Bf5759yfRYVIvgz8zG/zKUaYwEBpQ5+cdLpOR7fRANrhtCQpT/nUtaMk4MR65Op1cGqBo1To6kpb02XbqNUfShPClAqX84Xj44mczXHd681tPkzOmyWlV/TICbgmvOHguolcRM38RFyn4uW68urlK+bLwttvP+Ott5+R1kSVbFQh66FzrjcyN4qOlKK9Jia/XgSc+b+VVdHF4NUban9ziywzKT1oUlelm/bW2iiFsnHaa9WerGliGkcEmNeFBoTTx48+7vYrHQ9D66yvvp2E9GqD0w3CB5UozlWVb7pYAZVp8AzZk1DEUXKm5kQTaRFUwSylC8t65HS+5/nz54zjnuwCOS8cj69J80krT6WqOhvVpHsL1anBrla89ELa5t4OjXZwcZXU9RGQT1fOUh7+JmKhAXI1ZKBt8tIPK2MbdQBAREz8w1JJe90QtSfCB60+lAJxMGKvFFJGBWqK9nzmqqabitpJXxfVfKUohSzFLCHMPNurOIxWxbQK4hz4MKBJWrSgfztw27W0REqTczM4bddTmmpj68mxdeE3anHvocxbA3wLIILTA0QMbdVEqCVVW/Wtb35VJ6P3rlf6Wo+bc9r/5CzhcdZs3Q55Z3K8Dtd7Yeigkjati6nRBZvD2Nwo5ovmBVNfFJwzvybvO1KNKLrfrr7Uyvl85vTwyLquhGtBk2kiJxXiCT1g1vFrVdmc9XPTmnvwOA6jVbMWvBNCNAVboyXFoL1qUluPpLNeJa8CODTxIeFwe8v5MjMMO9569g6vXrzE4/FGUcxSQZSC7bzeyykGbm5uubt7xt3dnVYoqsrfn84nrSyhdCuhJfMqKtKBDbdVa2SZEVNvTCmDyBOvSmfCJKCBSKZQWfkH3/kWw80tX/rSl/j5n/9FfuWXfoUPPvM5Pv7mt3n+/G3Kmjkv5t/WeiUEyFX1xgXGQROXRictqTDGiEPI6cywOzBNgYfjzJoSh7pn3O24LItRtdWGY7+fGKdRzacvC4fDAcTx8PBA9JFaPTkldrsRnGg/lO3N3o8m3y/U6qmjNuD7qFWPUlVVNOfUPabKWpG9HrLOC6Um2w+UXTeOivxXq5p0KXrUX7D13xwON4QQmeeFlJIK0rjQPc9KEfX1kwaSZiCokIMTMsL+nbf5/Lu/i7ffe5dvvnrJh48nUskKEgXP7nAgZQUCsDNfIwVLopwyTlpf3bKuZPM+dT4QByUeeacUqbo2o3IziQ/BVGp17eZSepVcDDxWcEqumD8bK8RbLOKaE3pu637rDdvA4WVjYBDxfiAOocctIXi8mTVLbVYASg3zSp4istHQg3NQVOAiDKMJAwWCnQshevPSGlSQxeKrtucgVenyfkuSNqDArlKlSGniXtX6xkvJlBx1z6pZGT5dI8BsfLyacVtkQGjnoqFDmiQbhyFs8V29itP0c736s1KRqmtNaoXYzh2NfxDtL4xi1eWge7TeI+kMNh8Co3niEhpw3kBZZVu0PbsUrfps/UXN6sH6NK16E+zv3jmN96RVBRV2pSXdOLp/lICYaEqW1IXfxM7e9lChqg3AcaIA9TwXPvrwNVOceP7sObfPnvHBszveffezfH73Dr/zC1/id37mC/z6176OXwKvPn7k5cePLJdKcYKsibXqPupqYQga60TvnwjiqU+sQDkpENcAX+txVHBT+ncfamVImRojxUFwickVhmGE8ef5e7/0s/zt2xtiPfPweOTV6cLd81v+yl/+kJ/+6f+N/a2qK4/TxDTd4t2IZEeoQixqQ1TXBUkJXzJBKvlyoS4zQSye28HuMHFzuOXZzS1vvfWMZ8+fc7i9ZdwdGIcDn3zywF//pa/xU//gu+SwUKaRqQaWy4WUVuq8kE8X8v5CGAciQsmF+XTi9UvP3c2BKB6XBq3sSdEYh9bfpuun5sJyPKkGhA8cX77GoRRN/b9Q2RJ56XHPVh3+tMcPUmn7o8C/Bvyic+7n7Hd/CvhXnXN/yD7r68C/AyAiv+yc+5+AX0Hj739Xvp9yJLpsoh/6hA+WdWq8pA3zrT+sArmocqPG/k18pJJLVSVImjcSvZrQJldtfW82TM55Ne6rtok0xNvWTk6FZV5Ja8FVZ1x7emOyJocOyUZ18IZmipgJsW5clg7S0ENdm2LoVCWLHhzLnMiLUJNQml+FV88hTPgk5VV5wJMpklWgOrKogW4qQnXajzSNI3lZyblQMnBe1dy7gmkRkWvGO+v9uZZRr2Lot24qJWdNmpwzie3rO/i9P3usZ06kC15oG1ZDuxx4j3hPrua7h2Mtdih7zxjgZgwcL4UkDvFKN9JRLT2oxjnUaWrheDpxPn+o1QB8v1ca3FrQE5Qyo/5M2iDeSw69umbX4VyvWmBJyJsJ2zV63q/fenSum7G9t4RD0GvHWUKnKk7OqrDqFoXS3oomBtqTPagggDManUm5u6Bqf0vSQKraGVHFFK0Q/btl07UURYlK7bTJauvJ0Q7QbaPerCUUUvUh4lzs1TdoiO2GmrZ5BHpgKmqf0eaLKw8VW3NNOTbEYOI8GW80FvXQ0e+eSzaAQil74hp3Xe+vGsdvAXzrizLpHtZ15TLPChZ4T6naSxe8pzkrdezBAk8fnAl4bFW4Rv00/IjglYqnfT6aPIECEKkn5vRDLg6hg0WNRhVCJC0rD4+PXaI9F60a7HY7Us4qXuF1Xte0sCn0ZXLWb59S0n3S7ltwXm0CtGlTgSVvO6pFTwqRAE6DoeCd9r05NJmVwjDuCHHgeHng+Vt37PeB714euD0cSBc1U6cUAopqxxisOjayG0c8wnw+My8L86LS7suqvm/eB6WvuVY91fmjwiKmCGsbu8NRCYgfGccD83zu61tKIYjTioIEQjsEo+f48MjLFy/4whd+iPff+ww/+zN/gz/2x/5Znr/3WT56/Zr9e+/x6puP+h7O60HsIAbPNCgqHgbH7UETqeAjQxhxXv3r5kUVVkOccCEwz4X5ouh1iCNLWTCtIy7LmUo2qiicTyecOMYwEZzSPne7iEg2ALP1fKpoSRzUtkQkk1PBucgwTN0cO3hHHTIlJ/rBGn3vpatGdQJN3tViQdfOEEcOh70myfOiCH3QSm3ORRXWXSB4IScFLktWK55hGJgm9QBsYloxOsRVgqvU4FllpOxuqdMNwz4TTis+ZdZlIWVVS63Vxm0YbU+uV8mWAg9h8Hijn1bR3NqBKiaK6P4gCoiIvWfJWdeuD0y7Ee8jYYhKO806lqlkfND9GjsfnAs4UZGqnkyIJaTO1P5s32mqum0P0QptppaE9xkh9spgle3M0Pke7Ixy4NWwPTijFFYhOkdNif04cpj2PLx6yfHxzH4c2Q8DY4zkiiYvmuahfnm65usVyNYom86Z/FZtQLkBalI0xBTBlYiv2USJtOfKJRNjsPjge5gPllA5rNlBqmkM2RneAETeFC3ZkrcWVwh6P58wH656x4KJPrWYT1UnndJe7R1qLaQajFGj69qHQECYfHy6R4sK9uSyUrKel0ZU4FK1nSD4oMIs3jHtne1N0RK50VoFGiV2E9JwBlQqWF76NRSjQ4KB/gJKoXPU7JAamGvgIcC3Pnxkd/eCFHdw+4yvTjveDysDjrduCu+/M/Ld5xNvP9vx4nbkUgcel4uKl/nKGIW8FuYlaVp/dWZbeApUiiRL4BsyXzYvvAZGI6zikRqRZHFGHBiGiV2c2E8HPplnXs8XpGTWJSMy4M6B81JYljO4Ywd9VVVVvRrxkU6TL2qQrmupGk3WKLMeThxxThhlZBdGhsHjByHsAuN+z268wdWBOVXCfkcdJtZJkCUTa2VeVr3PPrPKhVgC0e+RMpAuM3PIPL6a2MWBnX9GlqxsDgpBVOAtG9ovCNqD6a/mtFWMnaO6YjoEDcBvyf7/x0qbiPw1thjm+vGT3+c1fwb4M7/de189vzf3bwHfpjLYgq8m69mDZH0xTe2mU0ps0VXjNovQNwfNYuvW/2F/71SoK3EG5aqr6s79/QPLMuOFzQuuI/l1+z51E6OofcPRx3Ulpm+atknkUrgsM2PcE4fR+M/FfDmu8wntK3Gu9fUEDdilJYf2p7RNISk1aVB6RKtMjuPAsmjjp/d2AIfWC4SpSFqgbz0/OauHmR9iv55PS162v7cvvlWOuqSyRmQ9GczJW6CkAg6GSSLBsxtHpkGFDwRFhdqnNjPNXvkyGmlqzb0mpLHRbKyi667uh/3/aZP8OglDritsT6s5TyiRvedmo8Nt860hfNB+2BKEdjRdUWpdm2JCE6jRBKq9qFXolOK3WhVmCKoSeT6dNchHe75y0g1aRLrXVwuM21pq9+nNRFSv26q8OAMt9Au2Xq83E9jr313fh57MXT2n/XvrO/u017dHtWRH15P5FvrBAqn65LvpzdZ10ehLzms/q1a9NIFcUyL0gKyZOgvDEIHAvC7mfWgKdvkCEi3xrmTrL91AIgN5K9TcaECtT0gR9pRWrR7a+AUfVEDE6I7t2vf7Pa1Hzzntp1Eas7DJj29rrKHyjTrWhF3aPtjkrzWwatW6LcispWjVxrd7o4n72++8o9SrWtkfDhyPpqYrasqdUlbFQteqq/raZYFlWft809/qXM6ldH8g9YxsFOrar0krP06BgqYuWpV6qRTU0kfAgUndB6iVGANDjMxVqHnl8fVr5rfe47333uUXfuEX+drf+Rrvf/ABn/3cB7x89Qnvvf85zg+voCr10YsoRUkK4240CrIabtdcyZL7/RuHqD6OVYNlqZmcF07HMzd3A8O4J60zwZTmSskmMR7MLN2x2++gKEVKTbmDmQzbfLXKSQyBaRq0l2NZEfEEP+BcIBh7AgJ+nKiSWbOY8u527LfzKHQLgq1faxhGRBLOrbZ/XbFFemCLJRVbn+p+v+82Add2HHjHspyITgPSVIVibJfm89bmvjIMNrEqA/at4mxiFs71ikczGlemgutnZTZlvGEIpuxcLT4o5FJB1F/Ph8gUB4YxsiYFP8TGp8UdzjmCa2ewVY2czeMmSiRa4QmtghU81ahwqlinMz/n/EQE5E2BJudMqCh4NQy2MahkE7HxTOOArCtIJYbQkxUHHRgstuYxsSFvIvttHernb8nTdciiYdjT6peTTZgrxsgQzfbG9oxrpsmb+75+pnnlua0n+VpErAMKxmp5sxfu+ufr8wMDgZ1c9TnLFXPK/s9SIa00il82Kvj1e7X7oH1vMIgCjqZxQq2i1f2qiWGLPy+vZvAN4Ix4p2wO7zTpiEG9H1v/ZTeOb/fIe4LFS62yBoJ4axVQWgdSHcfjzJIWxAnn+axJzHzh9TvvMLiJsgTuTyfOy6JKsN4hZlHlXCWIaSN4T87qh9niArvtNledqUdu97S6qzHtwIT0oodzCrSO+4ndYad2N0HZMDklalb19FqFNVWieEpxXXQrW29y104IQ2fQdfsFp8BxjK1irCfNKE7ps8mxrCiI5jP5IVPjCeQ1+/GODz73Rd569hmODw+cJZFMGTiVhFAITrY1lAt1BXErySXO9/c8hj1lqKT5hCtJFVkRqsuYYUEHpfV79yivF1z1Z1vbXsW+xG3997/V4x9KPfL/r0enLUijoWHBbksMWrCzxbxddltqD/7FkHqHJR11C+xaIgbXi1qDhFrU00ZRZgszRIMcbJF2Ty7ckw2jbX7t5/a8hrZfbzKflrTpQ/sv5rIQ3ZnT6cThZscu6gIQVxXhoQUlvi/sWgVXi2aS0ioeAGpS6732k47D8GTz0qAqEKJVNFy4CpK1L0qTyk0IoFbBha2/5E2uOlwlsmDf7em/tQBQA2vtWXIOglSV1NWTWcU1HETv2I0T4xDxecE77XVsYwp0GqOiLb4RbZ8kbdfPf/N7Av2e/CCPtgCvE7bre/w0edN70eeyb9QbHQeD3nQuUqm2obdvJ9c/y0aDs/28bw0tWC1m9hjqqPKz80Jalk2lCu2XiiZ9XfN2uDa7h+vxuj7I+u8taWt7S6McKkL2htz+lfzxda/km4fvpyWI7f22OXt9f0xQwKpyDcFMpiaqzcVtPlvvi8mVq9eiJ44jTWAiRKNLmtBLO7Sdc6xpweE43DwnBN8BjCHq9ZUqCmigQE3Ognjdg2pW6eRew3Oue7+p2p+zCoYCIw412L5W1Lz2n7tO2FrvWaM46R531a9rlS4fnKnNiiUBW/+KiPQ13vtThP6eKgG/0VB3ux2v708qnDQdePXqEe8nkKAVHtf66vTzFPCpOGeKm0HfsyPqXilWAwOKG1g12lX9ztIotKX3BdqWD1X7sZw32Xmn11JqxtegXphOLUB8deqtFAcoiYf7l9w+e5sPPv95fu7nfo5/4Qtf5P3PfZ7zZSZ+5rMMXpgvZ6L3kJRGWHLGJU8pWvmrgJSC1JUYI9P+wDgFchEkaYVpv9/hXODxeORyWqCOhBgpOTHsJqZhx/F0JK+Jm8MN0zSZ6m+gVtdBCEW/tXKvc6L19BoDwAWTul/4f6l7sy5JtiM77zuTe0RkVt0JEwE20aJapEgNFNfSYmstvejX60EPepCoHskmG0B3QwDuUJUZEe5+Jj2Y2XGPvIWWHqHAuqh7KzMjPdzPsWO297Ztk/Ye+uA1aXR4P3Nynpz3ofaWtNs6sn0pPT1RZJvbxul0orXG9XrD+8DT0xOtMeYFRhdE/RG9urU2HRtSR3zvat3tfZCkNbcBoCzLQqeNGWoa2EbRbuc3GOi39646BT96CEiLhZckq3WmaSK5KAY3rVBqITp0nSG9wbWRdbxFCFEZOPBTwiFxtic1GshtFF3O/kSYGhnfoIlb2w2WSt0Ls5QSvbvhcH0EyMzERBhz7U8tMjPPheH9C1o4vby88De//gtuH18oeaWWDecCv/q7X/H82ed8/tUPifNJ8xIPg13zHIf32n0d+cnhPLPc6eE8cOY0K5fTFGSz0Q4dkb+bw605no4iqkmPNF0t8OVAYa+7/J7oyC/VAtn+u49j01mu4xw20mGXde6MUdezoY0z7lCQH0bEHNUhI/57AcWkSPCSe+FIacL3HVBsvZGbzMWT5ymSum3LI9d0PhB8JHrHFNWYxiSqIUje4hzRBxkF5Zzezyr97i7gVNbqfSdFR82dD9++8p//09+Rrzd++dlnPD99zjk88eGbO7/+5jteloVs7ob6jI85hnzexzPcZntKr6S1hSiQr+7HHQVPu6zRqGsgxIhPkTQJCGI9cQFoAVp3dB3RtG0rpWSWZWHbNrZtG+Bk730AFzbjzruOCPc0x4oC8utq5dmd8b3jk8dXT6Wxos6NDmK88Mc/+xN++pM/4uOHK3W7kTdVxTgIyalEH7Za6Aq0RJdoTQaNL683tvlGd53tvtLqpq1YTc+9spdsbjRMYH/ochS7/96BKoI672Vf/b94RvxBFG1ShClF7EXHDceAYhvKJFNhBBjn3HCQbIoGSWnGQJ525AeEVWMUcb2LxK5zuMF9L8qs/2VKibpFei6jYHlbtI3P01VqckAkgO9930hCXdBZOJ7b/c5vf/c1PsAPf/SeaQ6j+BPNu7xnnI4FGgNFP97V3gpQteC1QCaJv0+RaQrKuG3crqKRF+26aPs9Tg40KzSN9j0m0Hrfx3gDdtSlN0S+iCGlj6gaTgZjr8ud2ScilYBo9qMVhU1lMN4PKWR3ThEKfV7OSpY+4rsV9uaUdXwdC+hPsTgPa/NThZmXg+EY5I8vO6SlGD6sk+APG9hYhL14M7tsq8CGBN4bwi7Pz4+mYD/mxnfEzVGsnRvbllnXlXVZWJdFEn4EmS7aMN9bHwcHHbr3Y88Mvs8xejoGcuQQVvbgFmaI+tui67hW7PmbFfbxvh7X1VtXSHtf24sDhfZu/PfgmLuxVHv/Si8i7ZpOJ+huzL46Pz3TWuO+rIK0zxMtb7QqzElMEejEzZhSYUYkEe1spZJGlaMxS3uBvN6n6DwhQC9u9MmadX9vlWgmE01nJPbHtWZGCznnfQwBu7W/FGtSVLVWB9sFe8O/sQ2CI0m2NQq7A7pvUsne2+EA18TZedJ8ouTGsqxcnp+ZpxNfr99xnsS44va6UEvVeC39mQoXKxOhEjrPeD5WcMvcvwPD25z016nLZC2KajtJhnzQ4k3Nk+ck62NOEylEYnSkGCUpKxKzK4AL1LJy/fiB5+f3fPHZZ/ztL37Jn//lX/Kv/tW/5t3793z92xfSaWbLK701ptOM6435+TLAq4CAYLUUtlVmum1VkonaRfK2bY0QJmKYSFF6715fbrx//8xpvgjqrGNZhN2VQsh16RsOKruSJKYq+GDmFvLstyz9qtZPac9U1o8+1d7ozSlD+f19ZYmasW3DdVV3vBUR8zxjRVStEuPmObHVLCBGErfQZbkPsx+vs/Ral/aFNE20NfPt737H3/3t33J5eqJHz+n5Ce+jSO4cQ6Wyx2hZl7L+9yLHQCCZNZfp3atqxA9gIucV5yClQFRmI28ZH4I6BkqvpP299ehOk/ToTecZ5zw1V7Z105YBAWfknNWZroij6XHOWQh+GIoNsCiI5FEScL/3S2OArp4pXWKI9N9KbHZAdI7TFHk6TeSbpxeYzicuT2eq2wfWSxEuIKwxaK1WkXHryylD9RZEMzmXKVK8t9+vz6PusVqeiXkLAE4MHdJ4X3X6VoDBtf0csJh/PGc7Ar4I4Go5mcoh91JjL8rQol6ZuapnwW6wou9Jp/uDq/jh91qx/HhODcPcvXaUKlPOIn+YN6qFC6OIPKpVGHuqVs15tkNu2vuQedpzi1Ma/+69pzfZy9ZvAAAgAElEQVQ/ii3ruad78tr4+N0r27ry9W+/4XI6M4UTp3CmrPDh2xc+fHwVgw86zsuzOxIauyrIjzaSbp/DuXHmVQV1+2hvkD999IQ0SZ48JwVSLdcxlYkXaWuHXrVYbmWY+i33O7XK/k5TZJrEtMl5J+6PWgxGbySCAiHe5jcaMOCERXQyC6DQad5LpeMDP/vZP+df/Nf/A5TI17/5JcsNtkViEw5iCpReJY6QIAjQ62ScIXkrXD/emMIH5pjJS2Yrd7a6QpNizdRxIxftO7FgeQqgUmQ10/EKB3QG6PP7Xn8QRdtIFB/Q9D2AHW1WbcuKTf8hybX30R3ZnGmoj1aq9i6mFe4Dle+t6YR4TaTVNaPkTN0yvVSRk5WqFLh/2PD7/JbtASU4zuCAT7M9jU7uTvsnHMu28fHlI59/fiEmRdHRnqMuybQsVJUX1LqvinGHukp4Kr0Xtty1D02tir2n1kypcq1Vosxw2RRtLSPQxuiJRBp9ONgdmcPvFT6jKLbHY+G1Yhb93os87NvvvqEugXeTzAGLSN+i6xnzUpTArbpgp/bbuNFP7DRwm9Ooa0ep7XGpfb+I+D6L8/j9xz+P7M1YTQfUejxTXXPp4B7ZXAEiQ6dv98bJKmho74DBMXrHDOWjI2yCNq6PQ6sfoMmugeW+8Pr6ysvrq8zZccoQKeN2dF/DuQe3OvudZnhTaz38HnBN5sWZ45YliW/vpR2Csn7ig/RlHHbfQzfDKHjtZ+z7JmXGQtiTssHaeTfW5T4fCXKpuhbkUC25jsNw2zZyzmy5EJMkhXlZxky9La8YK7X3i0mBMU0yxDfGpGHYWKtE62hfjpiI+OTZSsZgX0NjpyhuW7VUSs2jCNtWQRvnWUZwrOtK730wHruMzVwWO9Y/JOCXMDHTNGGz3ExCaXE2l6LFkjF13TYt3aSZWtzjIz555uk0kvoUkpgddJUBeUFga2343vBeZKsoWKQaBWEmBtgk0kx0KK7DTGsUkMAS2L0AkYJBGUO89MsqoOQQGWMIkPNG3laaMs/QdcZbojbpD/1i/SGXd5/zT3/2M/78z/6Mn/3TP+L53TuutyeCSg6X25UeHbUIuLTcFlrrw/ADdf8qtZCvAgakaZbr71Dyyjx7np/OvF4XavXcriueM/N8ptRVnD1jYNtWWm+yzh0syx1AxpxMEzlnTqcz4NhWk6DG0TudUqKHnZ0tJcsz1sJP5sQFNSgpY6yFxUkbR2C9p0cAxvZdKXUg/95JARSCZz490TvcbjeAsXaP8VFmnsl73m43/v4Xv6K7zlc/+RH/7P17NdrSmKTxvRsg2zS46csSzSlN4+8k8YzKDpvqRWKPD8LGFho+OEIS1qKUPBLxfZ3JXsjbRlWH3CnNhOA5X07Q5GvS31fkTMMNFlvWmrLofmcwhwLDMQBmp8WozYPbd4pcS2s6dzNX8rYRQ2QKnuvHj3z7zdfk+ypnc5JRHU/v31Od435feDfNynrLjDrrCz4OlJbr3WdfmlR6zMZ07MBOFxmyFOFmyOZVNXAEMffn8cmz9tB3ZmfRcUba8Sx9C+69BcEBdaD8/rgd+5xW4A9QUM82+/2fUsrI79pHShzP2tZEmUXYlSFNCx1TEhiraWBZjAKQ11rFZVDf02z0TUlhf9r4GrD+cWHa5ExUMOQO3jdigtNpwsUrMSWmeGJyE1RPXtRcBe0xdpHmOr3s46pG3qJAsV3DYLV7hS75iYonhVkNiTBNRDXDcQ5cZAfU1VfBdXXqLKL62ZaVQh2/a11XlnWRM6c2cfk2hY/TVhAno01qrxhbHIKBo8NxSphghNHszeFcE7fz0PnRT3/G//Q//y/85Ac/5z/95a8Qd8oAOrrHwGlXtWfWJ1Kc5D71TiyJ++JZc+XDyytffDZRXaGESokdeqC3wEBIRt2i0KLlrzaR1fmh6olpYppOct5vmX/s9YdRtGEH82jUkcPGWQDJONfJZWPqCbzMqxob2TEGP4+N1cXYwtzRGGiMIdOW6LphANF0Q/ohO6kDIctbFr36AZm24HCUhB0bcfeFv6M6FrzsIJSAVymtQYhSbXtPKY1l25gvCdfa0PKWqkYNzgsq2BrpfALnqWXFdOAhikFCb4Igyiy3NhamOEE17anp9B4U2XPiDtQqrXfO5wtPF3HoylkMTUTdsAfht8EPfQauCxreFHE01Eqf+EDYe68s942Tn6FV7qWStEcLHyjOM80zX82JpTSxqVaHvOb2PgIpbOqevIbv2zQf0TV7Xo8Hzp5o2MsSVSsgDKF6e6gcCz8L/q03opf5XDJTD01uTQ8uiKowZFXYLieJaKXTbOZYbwTnSJM0gRe7Zv3HOfBOmul/97vfsW6ZdcvDntnWmrE8bxmxB2nE4X68lY6M9e397tTCHviP+8Pu0T6I/lH+uLNij+yzcwcGTQ8Qu3bvdQaZxYgQVAYY5Xq8zMMyliJNnqSJ1279rJImdcZEtf7eO6IXBzbc8bPLtYW4H+iOQN4eUdRcCznX8SxLNbCo0i9FASiTXXd6Ldzvd0rOTEFm0tn9sOLTDvIYo8xcdI9zj1rb5duGiMYYOZ1mALZtGQmMOCqqzBlNKBV170hsKK2MOX0gBjLd6cw8H7gvKylGPvvsPb/61d+xZXg+n7jfb9SWJVmlDSbG7ldpleB3k5kpRebTzPlylr6E4MlFZuWVTZvMna1TJ1LIUpimE6dJrtk7R6kCrLkoTNWWVyBhU1XmeeJ8PokBTu84P4GLNBdZbldiOvH5Z+/4xa9+yb//9/8H/+Jf/gu8j3z38Uauja+/+ZboBKWPUndIgl60Z2706MjICqeyWOcdKYicMW8L3kcu55ll2VR2KJK9p+eLSPdKxceoTNXCeU4qlbT9GIhRrNy990yzsFpbrkyn82AUo12Dk/EQKUa2daVU24cH8MUKH+15vN1uYy+WIsBHjHFIvM7niwzeVYMcr2dKmuIA9wwhFxdTYbxTilo4y9/1JkYnFtdMSmlrodQsRZbmPsICHgdScwBmNKaWiowmMWBFWBrpeUmI0kRmqi3rSs5SsKbBinUYbLcMJgdhHkytkHwgpiTgmYfL+UwuRcc5qBNtrRKHW9V127nfbnIvtXf2eP7b87Ui15J1+bqjlo1cZP/GEHA0tWGP5LKxlZVtKfgELjhe7jem04Wfvf+C8+WCAIZhJJC9d0J6jMfO7bNmrbgzgyXzkBNmpA4g3OLiAO00LuHAq3HUVvYeyVEQgD4n1aiGJj2j0+4uaaBWN8b3UMT5EHbjLJTK0L5xj6hZvPP0Iq7bIYrk1Wl+GI7XquvEcpdhDDO+Jm6nXa/L4ZT52c97AwY6Ot7GeZyT3kpjW42NFJWMfH6a3LvoHEnByJ1VFgC9tUauIrXsXcZ9bOtd47IOWndCMPjgxFVTWfug//POqSKmKmCuAMEhD7Ic9uFPy6/NQVf7tYOLauCVSGqnjzfZJvQguZfXIqjmwlYKVXPosmlffXCEKag6JI/na9dg7VHyIPLeW61uyJaLTYd/B+je02i4JgB5i46aHOfPnvlv/+2/5Y/+yz/h17/6jo+3G02dQtM04dtM7ZWyecqWmackjHgtnN9d+Mk//SNa6/ziF7/kw7ffkRssdSMkjzsn4iz+BW3bCF3GjuRV1E29I312vquqT4rhoAVnwxOnM+l0Ytsyuf3/pGizlyFdhn69/VoHrfgHyYjlGIJOWXB61MLLvnOK8EBrFnRUCObBTB1ab6zryv1+Y11u3F5fxfo+BLUZ39Geh43G44GIPyR530NxDkUPgtw6vMwCa40tF273O8/vzoLGVwuygiT2LsYstGMvYNMNVwRBbFoW6OHr6JRSuV5v4kbl0WTAE6eTut11MUPo4ip4Op2YpiSOfcXL4WfITNPfq26eIlNVa+UY8URxPuzCLVsDcgielGYulyfm6SQFKZleVu73hbZlnDZOp2mGFDidzzxfLriYWEvldl1Y14VaiyCiKv/roueQ9dFMvc7+TP4/rcHH5/RWQtBqeXjmx595i9rF6Ekx7cjbIemwwwD/WFwqxDAQq7HWzYHF6bPXTSGJS6DVSrkvYpDQZB11dqR70kLIigG7T05hXUHGQMcUKbXfx6ErFsdmsW/9J/J6KxU+srAim9oLkGOf29v7fWTjBlup123FxzzPTEmkS2kSRimmpAUGUoAFk2iY7ECRUGeyLUm4Zb/sn0QFlZSSR3IqgIOXPgVN7IStn+QneqNVR+sHhNBDiDqEtGdas0RXHmErhY/ffYtDh/8aqusD0xQOh9feKG9Fq8klvZcEeb+3e3+SMYQm5Wy9SQ+G7lFDpX04SEsQY4WUEiFGcJ7SHd1HLs/viGkmX++cz2e8d9zvN86Xz+i9cV9ugxkLQYwydIECwn6lGJnmidPpxPO7J56fn5lPM7ls3Jc767qQ14z0V3iN0boula1ovUAz2S20opbfiPvdYK9xlJJZ10ULf3VAI1IrNJf45tsXfvLTylc/+jE/+sFX/OI//0f+2R/9jNN8IqYTDsf5+R23j99BK5Qua1Ekto0Y3eHeaUwvDeebDKeWvxSGp2fmc+Czz5748KGybSvrKtLTEB2lbjrCQ/ba/X7nrHLEdd3wypYNyWcVZ1UfhUmxvdV6o6zCsD0/PbFtMkohpaSMLYOBtWte15XT6cS7d+94eXkZTLQxusa+LctCSsL6vb5caa3z7t0Tu6FOG6M0rDfF0ONaxRr+8u6JyXt+88tfYpLAdVtlDWft/9L9byzbW2ZF1sduRY+dwVR6kXNeWARjcoQNA2SYckgjB2gdYQOcuL+20ASgRcCfvWiG1gurJmLei8wxKaPto0pVSx5JaNBRNt6ZqmDvcTPIWJgZ683qI9p3tLdfwSXnVDFURSqbdIzHer0rs5hHzmHAiMXjEcNHLrQDx8c/BdDZi8q3clrn3JirJu6N5huwm8Q5h0o+O6021rLq2WJyRieOlAdwbwB5+nuODQ1F21GasWX2hW7tMF2BuI7rBwXJ4RzRi5frh4eWBYnvu4mOXY/lTMasdez37D2BNsew9yZnS9PcVGPpDmpbX7aY5Ymzq7n6Pp6F1oNqmcrcu4IXj+zjUA9U7VVvjZI7eVvp9U6v4vBoM1+D03E9ToBzMUfZFTJGevTeR24hCgCRJHrn8DEKuxYF0LBh4dajKSCbo1eJy7VkyraR1/0ae+90p+OymqgzfEhMej9iSKPPT+4hOiu5jzw3H9RCW9ilwADNeWKYmBS8kp+Hn/7kR/yzP/45rcPLx1euL1c+fPc1OS+E5AhuIpcV1i69ec7jamb2gc/fv+PzH36F8xO3tbFtsNxfWUvhMs98/uMvef/5V3zx5Vf45mhL4fZ65+XDleUmOe22rgLO5ZVSViGiaLQqsab5hAsTLjoI+xiAT73+wIo2C2XH/943ijFxhmyYBtxe3hLJkUBr0tsMxdF37Y/JuPc77Ss9J4XX1ysfP37Her/RchkRvuv7PQSyTxVt3on5wKFoOzIKR4ZBgomli+IkuRUJWLVWpkma9GtxI3A4p85ZKe2BKHi1BTZEX+Z02ZDkrj8b48Sk/UFpCoSY8OFE613RPLher6yLoPTrloWZUySN4crZR5LkvXQiig28JfYBvNNkrpMmSUhPJ5FtpDiNA7TWhdt1o4RAcZVeG+cpkfFgvee1k5Ln6elMCImn9kRSVO/15YXb63VYf3d9mt9bYW8SgLd/f3ymx+/ZtfuiiDr+nH3/W7kfMFyYnFPLZjzVVbWk135A+QFZz5pNOMSquGoQ9c7QvqbEg/pMaiHosWKsgjJV8ySN0cMx0TlBvziwos7WvpaHzj30AfZPfSY9fEN4NISx97QD+YjkAYdhs/5RrvLmAD9KIo/SY+uruVwuMndM2bPeRZ625m1IOGSqRRd4t2VoYse7N+RLn5YkfrKknRPnKVniclg7tzcIy9/Lb/AuEvwsxYkTKWCz54fZ1as5iCsKXHScMkPr/c5yvz9EuxQjISZh3TlKF92Qstl9LaWSUlSWbWeLrQfO7rskUeau6Ubvzi6tFrmYsD3iiDb5KNp6hMlyDqYkBhm1CisgMtvMNAXW7cbLy7fUuoKT3lh0jUisEoYgRMc8Tzw/n3l+fuJ8FoZo2zJ5K7juiCFxdM+SRFAs02O0vguZXQgoIxLGfKG8FaqarkiTu8gIbf07VykVQvLkcud6/ciX9Uv+yQ9/yD/8/a/42//41/zL/+bf8PT8Oa8fv+X5/ReUvNK2hV6zrne5p7WJYUgIkxZvOseqCevlnTC83sm62NZXQoTTLMzUtlVeX195er4Qp4ltW/HN8fR0pq531nUjxonnp3eU0rjfxQl2Umarlo0wnfE+EmNHjvMykspSpLE/pZmU0pA2mqGNJTvGsskg2sS2bQ99WIIr+RGnJaFVIKV3YTX6zqKvanw0Cm1dz6VW1nVlOl8GmGoJdAiBUmVIekxa5DQPGCsVRuEyz4l9Btw+DLr3viPbAxStimxrxlA6MSXmWUYMCeMjvV4SR4Uh8CgwWpv2nkrhIHEsDQbO+yhNRh5imjifZmIQNUBME0HdUZvG5t6aSDO1AKpVEn5xJFUTNXSWJW6f9Yq8v0+OvK7EELk8PXN7uVHLdjBxEQY958zLy4vsPSfJ9gDT3PH+ABSN6X7cu96lp71pHhGCOLCaQ6Q7RC7LRXp4BC+932WJj+fN/jKWztn54UwaameNjqCJUrDaXDiRLepnUFmcd5Dz3kNvI2S8nmnO2D39vUcVlMXbsaaGAseYyDaKdTnrrIA8AJejZ52H9zVgp1anctjO9XZT9019LvYeTow1bKB3H2vfzhP1JPBJP2Pcc+AQqIjRWCuZkjeN6TKXMXgpNIcrYm07iKA5U3darOlwcZEW2/nhxW/CicrGejd9V8VZyeJoWcVbwCmoP6WJHpVAccJINgUTQeZDxmgGVrpIkPO31EagjnXT1exn5Gi9D4CgO0fujft6g3oF54iXic9/8iX/5Mc/glL55d/8Dd/8wzd899v/m9fX31Lqgoue3rOMdcjiq5C8I3nHeZ54vsySh0xnPn//FR+fruRVxkA47zg9X7i8f+b8/nMu0xPRJ1rutAqtOvKSqaWQt4X7/ZX7/ZVtvbFeXymbgIo+ON1bN/xpl3x/6vUHVLSZK+C+0CXtFgpR/lJzsCOyqQvWHP3GgX9ElTpq4CEyk8ceLG0cr9oITGVdF67XK7fbjVY2fGPMITkm8p9iWkYg4PsmDCAopyUQ9r1SAzWq7/huUifH6+sr83ea6MaZ0RwbVMOdInGayRhKbrp6BqoDgiAK4qmNyU16rcS2N+B8INcqfTheDn2TInRQVEOTUaBTD5T/Y2FzlMbRPURJYM/nmfefPXM6TXSVMBpiXGsntwrzRPKRtTSRdeWCrxAmR/CdvGyUDrMWKMbohSBMnCQF98MB4UZheXxOn3odGaLjZzj2Xx2f45Ed+sfes/U9YfTG6NJlbpZ7lJAFlYQ62NFZ78ZiNv176w1pAnZjgKjvMh/KULIHyWE3+3qUMZOiDkVTATF3UZT72ItwZMyO98T6vj61H46fya7DCg7YZYfHQs2+31A/ew9D7a14G4ho66MZHpM2dy3YtGizZurQC66L258Vbca0CWDi1ArZUdWcJcRIcBMoM4UWZFXRZBwil9O29+b6MCExW/9dCmzJayWvK9frleV2U/twRa97x8f9/hhr0XsfDJvFDpHqBimiigAyhgzbnpSBxnodaHxRAEeS5Ai4YThk1xdwiroKIrpVmOOE84Fl3cQ1zXu+/fobWSsRGY6dFxwV76XHVtau2M97Hwmu6sEsw6U/fvyO6+2VWjq35T7iQc67257Jy6XIl/Ur7oSCXrcm7KGDMSBdbKKt53ikQpIgOCeutJp8OQIfvvuOy+W3/PRnP+Of//yP+Ku//it+8rOf8/79V7y+vJLSieend9yQ3siujn7JR0qWRFJMQZD4ulN9ItfyCpS4RveNZXnh6fJegcdKbbCsK6lH4iyJ2LJunOPMVlfut40cxQlxnmfW7U5KJ5z3XK93So3UFrWnz0kvcJC5b6+3qxQk7Hv5+E/vfeyt1poUkE9PyuRuY/3dbnedE3ge+1zONkkmDTSwvWlxoNbK9XodjEXeMi/Xr7lNr9SyJ/MGRNRS9ZmrZXY/mogdbIjfxIxju4MxszvjHBVUEbBCDBSsV8YAV+2tVKmxQ/aHsHACxgQXaL7TahHprXcqE4vklmkl0ww0CZ4UTgo+6T6tlewKy9bUXCvQnHYyd0QOjAA3xkw67TnKNVNr1168pMoW+OLLrwgu8O3XX7OsH+SeKdBEPzBmrtHcnvN4dhm7xOC9fwh2x17vHZ1DvnQAup3e6zDGDz0WfMfXWxUJWjgKqGYM1vfPakAKSSSuhlHIH4vOPt5Z5s7ujFRDwCnhs4XBFqbLYMo3QOQnzjLvdjmlfNmgdfs+KzZ3xcJOJBggqYYqbl+jp9Np/L7aBKT2WqT11sy/Sc6TuIN1cq06L1Xfy3roYory/fNMp0oP7v2KZB798I+MJMltV7+4GIb6YppnYY9NuaJAvo1Xwu5Fh67AzrIsLMudta2kkJhTYoqJ6JN+TgEgnA80HLVm2kF5M6nxiqw9xn72vWoryX5PwdQXfozfck5Gh4SuEmwauWaojdA73/32N/zZ9X/n+l3h9vXGr3/5D7y8/pbSMjFL/3SvWeYt94YLmTR5fF1Yrx9Zbnd8v3BKFy7piQ810F2h5MJyu/PBvzCdPseFC8lDmCZcj0Qibm4EHPTK56a66RXWTK+FnDe++fZrfvfNb+lT4t30Jb/+1d/y+15/QEXb48setL26Wh/T3wQB2+h+T/qMqpWvuRFsLEDLxtLkSyn+3gXBKT2zrCv3RQ4q1x6ZNbu2twzb43WjCeDeG2SL89gcO9g5ugRo/Wy165DPGhQtMrZCEoDeOttWmUslndVV0Xd8F8lG000+pUkQ8mKHaFPHNUfRIanbllXu7sl6+EY9eGKMeoDuphFiFHBG8m33vUBl96VWOQhbFRTxfD5zmmegDemMfJ+wBsU1/DxDaKTc6ATKVokucJrOTOezjHdQ22u7r9uW6W0/7MehMpCYTxdUv2/NHaV9b4u2fcDzY/Lz+wp5W7lmEhGCNIRLT5YcQh3/5ue6MkU7S1WqzK8LXr6GAhpm04vKalsXkxh5X4bctXeh/FsXNMrGawz0ar8DSIOvmiwok2ejL6zgs3X6cF8OBZt9nrfMmwEVx3tre+EIYtjPe+8FFVfNvyUzTZnI4KL2N8i1g6MYG35I3qhiFGAHT7D+E0sKuyF4juCTMDnap2RyQns2sublvld2EMfRJQZ5M8/wwC7nccFRtsL1dhWGzTmV3TCQZUtOR1Gtv/OtiYvcH5FLVnWwMxmbDAA3kMX2iaxbQfL8sIvfn7od/o4UExR9fgoKnOYzHcf9fufp6YL3gW+//Zbz8zOtFZblhnMNc6aNXuzXd1mYOEHWUliWzv0+7iRbrsIq6BDukndXS1EJcLiffXy23osme9Kv01qT9K53ekWRajU00fXRFEp23pNLZprEkOl+u3K/vfLF+/c8nU/8zX/4j/yrf/OnTNOJJS+cL08s9xfW9SYmLa4Sg5g1Ze31idFpkts0k2vULm6C5pLZnJpeuUxMnnfpmetNhoznsvHZ/Bnn05lWMo7OPJ1pbaHUQurS4xaSY9tWnBcnxtKkYPXIoGAr0OlO14xjWzO1NpL2cLoDmLiu65Acm+X2NM2jALOv5Zzl97qgLpJdARZwseED5FwV2Dwwza2xbUVBl8jp6Unk707HSfRK3laW5c7lIgzctm170fCJ+G0zEIMaLD3kBM5irhWQksDJcwg4l/BBwBgDF3wIlJaHIkfA0UIIIrneWh5nS2uNLW+02klBWLWASLJaN6t3kUaWChFjnQLT5M00fRTKb1UGtl+2bRP2PwSSd7ggA8drreoeGEhp5rPPv+B+u7JtLzK+qMnOiini3d6nP9wje6MvUjSKGVjYi9zRerGfPbhdzu4d+HlSUEdHiGiOpjymnFt6Pok83YyFDgyeFmCf6mV+++pNRE52f0Lov6c4FDWKAVvWG2iulfY7jBFqb3KzAf773cVRPrPEED/6gSXPMDbb2Wd1auaFMcBGLDQFynSWrq1VaV0fsT/E+LAvrSpqKu/1VtTCMAjzXph8M3eiNBrSO+08uFZ3K/ouK0+RTRrSmylMmvRqppTEzTim4YnQescpwEvrMgqgdlrOMkpo26RfLWdy3ugRXGtQKsVlAlp0hkCcZuIk6itPhCYGJb05WjUKVo3n5LAmeKdqo4P/gFEHVbwPZDSPmXoEgovE5Ki9MF9mvnh+R3658rtvbpSb4/W3Gy+/+y23+o3EZFXE0Cq9bCRXyL0Tm4KSH77jY/4Fvf+WngP3lyuhwjQlzjHRa+Pluw+k9I7uE6enZ05TFBmwFu6tSesBzoEP0D2BiGtQubFVaD7w/suv+OoHX/J//m//6/f2gr3+YIq2gX2MjSh0tyTQmliAHtr1sMk1XHQ0HNqQbXkPoX3bKNY6VQOBJZSSaFV931Iy23KnbIskeyq/cIqm7Jf56YLNHTYXnaH5dk7Zt2YOjV3ZQR3YaRvfVZqD6jtq2E+tEGuDKkG8OdjKxrKtpHpRhFmvqXVKtuZVYytEgxxDxEVJflopGoC0KdVFgvOEqAGgyf2MPjGfEtMs7lkOQR5LlgPezA9w3mK1RFoxmJV+Adn9ci8clNwoNWtSDK1XSitMXZqM319OpOcnXl9vuDgzP73DxUS5ZzHgqKoPb1UHLYoz2LYutF7wwSyYx5PZ/3AmY3BDYiFOTnLdYoAghf6eRFvQloRM6hZbdx2TLfTOw7q0AlxK3c6mxiC5FGIQm2Q1hZSBqV6lDh2cb3TrZa+O4r0k0R0cMnMs2ODuLkxxbo3gugy/1ATEaSHmXZB+Nd1TpdZx61sAACAASURBVDZd200TIBnSbkUZve/3rTtNnjXxiTpcG+tteOyNEzbKDlfbe3Zw79vc7lnvhSG1QuRHItGIOm5j2vuvjDXoKm+0vaibbJrEkKJbQY3D+YnewyhMxCFNEOeqjfZBE5fdNEV7AGw+jjpbmGuaUwReZEkKEMjO0iRB95UeNNu68PLxhXVZqKWQkvSlzSnhFRypreFiHHNnct7AwTyLa6QYNkBvYspirmMh2Ew5aE33oxVszsCeIMyB94e5bBL/RGLZcUSci3TfcD0CkSkG5pQo2w3nVmI84XxjKytfnr+ilY2yrZIMNCmMqva25SYza+SeZ5G65FXjkiSN4/lVKbhzLnifJNb6oG6pQeOJfpbQwAWIKmNrjZ4FkMBLoYA3h1GnCaY0OPTuiMFpUeT54sv3dBe4329cnp750U9+yt/+8tf8V9dveH9OtDVwz4E4P/PyesX1TgB62YhemEOR53nMETd4QaZDiMQgUqPgHXNKKuGBl/tK6YEWHNU18rJx+3ijL4XT6cSGxLAwTUzRs20LJWdOp4ktd/omIx9aBR8VzOsAKqXCMyVxqvVd2IJsMkbdF9450jxRswB4IUi/3v1239mpWocT6e70l0ch93r9QPddJPzO0UplWa+kFHl+ehrybO+EBehlxQGlZaqTc6KWgquFRCe4Aj2DKzJnNKSD8kP6cLbNpMJJwL2Crl2nSgPpJfbKRtNNTSPx1jkDs6oFLTrSs2hxs9eGj50YA6d4pvdGLpk0Sw92ocgMqLLSe9XWAKfFquQpJRdq2aDvEvDTHPHBy3MzRtZMqhSUsv6/KptdYmDwrK1R8kZwfqz12itVe8RwO7smwJ2M2KitMk+BlCYxE2tZ48g+YiTo4OcYk8z3036lEOS9RBoqny/FROg2a83YLoEfQzjkSC7Qexznn/XpjuJY2VabJ2g5lNc8y+EgHF2ZDSD7BGCqhQmYcY0aWTWRztsM0lYbxTlataLuUSJ5LBxNpcBBYrvPK9Nn4KQwleArLTF4hx4LApKP9MM+m6ikRk6q/zhJeCUvdE7kn76LKYmeta03yUsVCDByI3hVTTm399d1wEd6ExduelOfBU93iTAnaZOJUZ69qq9MCg0CNnqnLTvbxqZjhLZF4r6oZhQoD47aAZ+IaR6jTIL3UqjFcDDI89QeqE2UQ6UVoo+Mu6/Al9OzuVu/p3pVNP2d0k7SQYtln8STIncxIbqcz4Tm+fjbD2xrodwb1+/u5HolTQEfBfippZK3wtYK1TVKd2xNyuFYHXXNvF6v5HuhbJlaV3IrpAkunz1T88aHX/8dpWx88eOf4p/FbI7uiS7tPhFDyusGkPy6VL75cOV634jzPJQRv+/1h1G0OTSCySZzWtVbVBNrZ6fIrRsoB/SRdDSjTzUxlQRnR/SrultJ46QWYs6kI43apW9r21ZKXvFdNos1x45S0orDTzAJR+RofH0wEKKnzps65YxgIdiC7+B9o3i1Vg2OtRY+Xm8EF/ni6R2nOJO9o7imtuKFkjMhJUEtkSbX1hwQCH4ixhO9F0rW69BA4dhRJdn4YivrVc5jcivrJwLUbaew5CKNwXZAOEX3uxYCVXo7zDCgFri6K845pln62FqTBLg2maU2O6G6y7oyTzOfvXsiBc+Gp0ZPc4EwRciVGKA2GXAsdYI05JaatRAtepA6nJnL2JIJFnL1CGhatKHMqBkIOCu0D7ICrwCCs6LjEVV7WIBjIYiG23kZY1CrsFaDXbIm5VpxVQXC3hG9Z46BmAIhOEGpdB6OSKsCuQrjoJwKeC/DhPveBhhQtFLR3dHz0bQgwZqCxfWr89Yx0uSi/uHzddilgrqQrdaTxNDjNWku24b3xlwpkBADvgdFEmU94zxpmqW3RhFAr8Wq9J/JgeQQxFqkafIknTKIuF0qbYCOIck7qMJobBfQZHfPDMog925SWTf28S4hQg/mqA6lbUhLnQuKdsrvWreN+/XK7foq7ocxUBSierpcABT8qGr6YzJoG0or4xVk0LT0VwjiLs8wREdKZtxRlFWww3wHtITB8tpD6kbcAnTwtiZOzkwNZLjzPM0EH7itL8zJcT55Xl6+pdO5nE/kbZOkogjbI0y+9NbWJix+IODUFr1h91MS5jFLCUGAvRbDdJOiuuEWV6vY6wu7K0lUimHYbROkvyK3RtE+HJs96FW2KmiUDPuu5cZvfrMS4gTe8dnnX/HlD37Er3/9G37x1/8X//q/+++5XwMv106cnzg/f0HZbgQykcocIM0Jm5327vmJy1lmucXBdpfhPBxaGRLVUjZupeLCzPnpjGuOcs9sOUN2zO/PuOAgQKFRqNKj0jMpeFI404og8TFJQBJ2purszRWv5i/zNFFb4fV6Z5pnzs9P0KHkDN0xp5MUy67jEMtqMTrRUQF5w7nd2TDGxP1edERAJNcsTpG69y6nmRQTvRRK2YgxcZ4TpTluy0ZMkfkyj0mVp2niaZrwrdK3Bd+lSd95N0CRWhqhQ/CJ3DKlAi4yTWd8mlmzMFGAAI9B1kX06QC0CtizgyoWSzvmwOdixLs4QMF1y5gCQ/ZOw4VIClF791ac67is1+v9KMKqSRVx0lNeOs5H5vnMNEVO84UO5C2zbcJQ1iIsER2mmGiuaTwTpD7GMIYVP80ntvWuoMTO3ARnoKuAjHsuIn0zkjjuZ5bDCfuk7QoxRZovasggjH/X3MlURxLrVLroJLG3XKfouehx9JGr7V1wLlrcsX4qY08fe3M/1ZIAPDBnu6yygypwTJUkAI8nqeGUAKKVFCeIYfTrdVNnKKDRQaXuTsfb6OdR05hmZ4NdT99j17GQbK0N+aswgeh6hm6fwT7XfptG3tudGwW/PF9pnzkylvtDFNa/9TZaJsQgJZDmhOkt7ezt/iTFlYJM3km/nevaptFVmVEat/uLzqJcKdoTm0tWcFkl/a1Ruxj9+DjTw0RzgToMSxhnUnBOADcvQOzIN7yj67/3NmjJ4fjpnTqDWk6uZ0TrO2vaSsEy9uDFdOr+KgO885bJ60bpd3pYaVX7er0UnD04WnSAp3pPxXOvDm4Zv11F4pgKvWdaF8b1+voqDrU4cnnlQ9mkOC9qqOUjxSeK5jcOiQ3BB5KboMK2VZn52B15q1xf7/xjrz+Mom28LNHYC6MhVdOvmKxn9IO19vB9wFjQlnC9taOHHf2Qrxdy3ViXhevrleV2F9eigZ5/LxXX99j/ZiQVb/7+GHisr+h7EjpNjmXyuxaRrXFfFpkV9HKjfr7xgy++xJ/Ouln2oCEMVtehhVYQOrxPBH+i+0IPTQoYZIG6Jmm9DT81C+a3Qccm1Zv22Q+NtVMpliRBw4Go5BFUfQ+Y/WuulduyspasMiKdBdIr7z9/z7v37yh541u1xF7u8gyqD7goTl7RBwoFnAwL1k+vxfY2DoF9xIPcy5F8y+RCQgxcTuIOd3+90g42y/b8rNh/++97Iu8f1tynZIEAHkV+cn74fnu9HTAqfRCKnKU4no8V0LbO7H63Lr08IU5jfQ6Awa6p9gcU9ij/MEbwuKYf9fPH0RQ7kmms8/G+HV92zzpd7HoNeVTQo6jBgUlDQhBZxnyaH4xIHp6JrvUQJA4Yun2UW5rBz/GaHt0U+8PfD0dQPZx3w4rH3p9jbJGkxYvLY1f5tCK5Kew9EFvOXF9fuL6+jusyJH2apofD1+75uq76vfKMWkfZbLunJvFu4BoxpiGldIpOH0Etk5LV2kTudXiOtsf3dacFsI9aRDum+YT3gVqaGMBMJ373n/+BFGdO81kUCQME0aGm7OvM0OOOo6spgs0VE8mLWMHLMxOE16SScn8qsBuyGEBn8+mMNaxNEGjn3F7sD8DOjCikQ6bVTtOeuBAn1jVT2m+I6cKXX/2In/z4R/zVX/w5P/v5z0d/R+uN0w9+yHp/IfTMefKcUyBOkRADy/0uxg2tsy6rmAG0NiSHrVbOOvNnaw0fIpPzrNvGaX4ivvNc25VeOrdtpV4dYfLMUZx70zThqxN0u8NlMkaj68DkMNyNt21lniZK2bgvC753tRUXZn7bNmgijerNkr/dlU6a7k/Utsf8vbdsB0Is2MQ46R6DXvfB7iVnmXWUpKidp0nmMXUD+jQhMyCoibTQ3B9bRxNxLUSQAdnzlNi2TPCOpAz8tq243rlczjgn5808JYFklaGQM9gKGYW1tLi32Y/WV9+76tewdddG0er97hKLxtJp3ll668mlyveatLmWSm1ikLMsd2HeTmdSSpzPT/TetT9oZV1X6aEtlZJlfE2MiRBOuC49MinNwqBbQaVxyRi7UiveR+Z5wvpCDWzbiwk5a5yHVgsUkfCWvnGvlXuIwxF0OM5aEaH3RWSURxdaLai8H7mAGcbIte5n2lu5/Oi91vd4GO1wOAvG79WYLDC1/17s/5REv1bpf05BDB/2/vo6hkJXZOSRKDDk3PIK2vp+ZP668oiWc7zJ7fT1tu3ibTZ5zA2OeYc559pneXsuj3t9yNmOn9s5xGwumJmJOJLmHsHFUaw1NeAKXlsQSmVbZZ0u9xdVdohzqvRtRxo72OqCZwqB0+VCjDMlF67XG9KbHQne6TzBIM66XkcHBBkjYMXrHveVBZUbTzoWwod/3t7vRh8Ag8yV3N3F7XPhJD7VXMlbpVvOFaSXvauSD2+zNgt1u40B6JjCpjWWZaV/+5EQ1b9g2bivKy9f/4YQEtM0M08z0ySArEmQ0zThwhPbuvH68WuW60fuy4377Tu++d0/fHIN2esPpmjrikY0Or6ppGygN3txIsMsHwu6/T36wz+wJ9xvk+mHar3X0QRtA3dbq3vBpsFNf/Lhuo8b5vieb+n2t4n/8XqrHgTVrs/JZ22IQcfSHR9fX5jTxMmLgYIdYqOvSA+irvI6OrQKtSKuRU3kO6KAFho5BE9UZ6BSC61LY6Xok8tIAkEQohiEWTDWstUymBKTfUrMUKmWonsyI0oawHt35NI0ca9075jmC6fLszRlXhfu333Hum7ymZzQ+qLb7wQiPhRh1TSo2mFnCJsFu/G8VCLmOnQnB9v5LNbVKQgTVGtnq5Wsm/v3FWPHoPK4fvtD4HyQbsAnf+b4c2Ktm0ZhFpIekjHoIF/5jF0T8aY2w4OpsCCHoHZNJRvWsHs86Jxz4/c8gh6MYuE4e/AISJghCE5mNI1G5kMh+L0CR0GJ3q1IkMci5hqzMkyTyp8egY63hbT9t3ee7vffcSy47PvfFp87CvvozHj8uiVc9rO/7xAGKaYeWXNB/npTM6OXV5ZlAdBhx+Vwr/sDEGLrSoYe18GeWbEGTpOaOqyZY4qH9Xh02bPntu8B+x3Hz/X4fBhr14dAb47gZWhqzlVGdDy9Y10qy1L48Y9+TGuQtSDBSZzc0V8zY5H5PrV4WvfUruGpcbCsP+yzoWJQ57+xrvcC07tA8OFh7R6f40jADs/bijYnmaWwpL4PA4hSNl5eP/Du3Wf8+Cc/5ld//3f8xV//Nf/jv/tTcutsOfN0nrlfT2yvH+h14X5baDdxYCu1si0r3nsu5xMlFzWGCmoCJUnlNE30bVOH386yrlyXzHS6MF9mrq83GmJOMjExn2acC2zrInMCfaDlwu1+k/mDPmFzzEKUHrFSRDa+NZlLFRUsCCEQYlRWTO7Xuq40G8gdPFsp9C6yydb389UMbgwYs7EB27Y9xrrudFad9LgJCh1Y10ynilxUB39bMbiuG7lUlWhHYXuL9Do2/d2jx7NWXOi0srHdryy3V6bzheCFVYwmpewNmsrkgJptnqrFi0pvInvrQYCvViS2Si+NONrZ+kra83O0G7de05Izt/vCPM8KSLqx4szd1UnDzXANHvPY8NTYRjEYY+R89jKIuck8znVb1AVQ2LPeO1stXLmzLKuoJfoObI/4hzBz8zzTcdQsUkszHBkGSLZPMfOiDn03lpH9UYS99bvJyzRN4+yi7TO+yohFNgZIYzC7vf4x6bb3szPpGJ/t/h/PhE8m7n3fy8eYYa+HnxdSbkg+eweMcdIKrTtPb7pPHjwNBAwWGXwf5y+0B3dKeQZ6ToEafjlt0GvEaKBdY/dMcDtl0dmv91DIHs/Wtz2Bb89fe5VcH++Hq4RJAbXuVeUgQNiWM8t9Eat+A5ys8LYh7KaGCzIvcD6dOJ/PTPNETBG6Z72v0HTGbRVpf84NFu2p9YEQxezEOTfY45QmyU+VjZKHx0i/j2CqxYTj5816JgcdtZFzVqfjOnJjMVfxUCTPrbUIaKl5irmORj0vnfdEF4VNHNJUTwgzsKk7twCj1a/UslJuLyPPEZl8Yp7P0i8YIqfTmdPpHTk3Pr58YF1e2JYbuEZKBwb1E68/mKLNgp0zpAAUvXCKjO0U9j+WDO+b2fpl+vc3+OHnBBFfuS6vwu5s2egADMHZS5f99TY5PL6O1/T2z7fJvCQReu212k7HNm5Qrfp9WXi933Cns2hwa2OesiIHgnR4L4VVDIGsB0TwHhelb0JbPGVzxCCSvZJZ141lvR2u31h6a+aX4LJtamigUki7T3s/hbpXcugn0uJjIN+atJXayE16Bz5+vPJ6u0vP1WaN7HKNIUau28p3H+5sW+dyupBmaE1o6B72IcRSp4iMTm6rJsj2oUDnv3lOJ3Fj67Vwb4WtLFpUPA6X3ot/K+GVJVCXo30MxVEue5w0oyzqmyLeXtYH41UCAIKEnc8XppMEdrw2kNusAQMrnL2fe1jbpu139pY690T6JITx2XQGU4gRqkidam/iduR2B7GjDfJbFPPtPjjuw2Phk8um7yPryTun9uJSFJgxCHR229/9fd4alIjsRKLv26Lr+BwsibDPYQmDfd/bIu8tivl2nz7seSd9HiZHsWJg2za2ZdEZj8sDYCRFqjxTmZklcxBPpxO9d+5qUDLPs/RKtargiRZ5uZKzSjqDVwZDikcBBm0g7n7/7Bn4kB4+3zEhOxaRAD2IQc58PoPzLOvK0/OFebrw9//wa+Z05svPf8h6W7m+vrIti/YH9mF73UYDfh/AW8dR256IucGudQV0dJ3XQu+7wU3X02+fmaXX3xq9l8Fg2mF9XCv2OYVpVBa0Zul5QgY/JxfoPlBzJkbHZ59/xs//+X/BX/zlX7NumR/9+Mf85je/4fV6pa2buCkuV3rNNKQfN8YoDIY6FKYpaEK8O4K+3l614FS3xNqZemfJhY078XImXGb6VtleV8zp8/LuhJvEBCGETnSevK5igDJPak8vhQC0sZ7WZZFibhb2OmtvzzzPeBzLsnBf7jx/+azfv7O8rZncT4qwEIV5atqPWJuMVgjRy5iKWlmXBdfh6flMDIF1XVnVcdQ5x/16Zz49q4Re5+sNkMbjtf8PHK10XHOqJIDkpe9zud1GjO514+U88947ptMJF6DkBboMspaeKU8plbyK+ZUMAbOkvoMOTzbWHpUhyniMfczGEdCxOGAMmvRCm8xrB1+mSZicUist73nJNM08PT1JnC0ybkGKXz9YLSvC5kniZC7SP960QN9K5eXjC9/87htlThs+CrsnMn+Y0qSsoNOh2Vk48ejHLCwDnLyeE711aEWs4aP0MpvMSHrARU5to08svk0pDXMRf4g/D7H8GFM1x6gyyBUzuzK31cGyWHzSRFrknWoWoqzYkDg6K8psHhoPYxTkexV8PICvTiWtw3m3N7wy+SI1bcObwAp+G6/jnAFFTvvT97PqkRHUnCgEtGlBchSdndn0jLLPo+8AD//Nw3u/jXNvVWcWNwcLLhm29JZuBVMwGOC2F946NmgSYLt2Ybvocp3eO5nZFrVfN+5yfOn9F5f09+/fCzDZBICzFpZeC6XBmjNdlSX2PKc0CWASwj7LLoWxXgdo+6ZgHXuSKLPOkNEHRwUWvasHwj5mRIpSBrHgvNcxJo3sA1EZNOe/z+zJvRYGe5xNQfZRShBSlHaovHG93Xj9+OHg9JxwPumZWMcc0Zh2SfLve/1hFG26mF3fkSLbJAfFpCblPCzKY+IFe1JXq8nkeFjQR/rYNiBOCqZtXam5aG7+/chzvBS57D1R/BSD9paB+9TXJHl5TDadFh1C4wZ8irpxFNU6vG/r1kvS95+TG0jwjtNJ2LXWoJVGyYVSV9ZV+r+cHlBTmvQ97d6ZfGJHvlE2tGug7b2PYHpkLlJIg40REEvRi27GF4JmTpPMfrrfFmHcYmCikYLMnqu16IBpmKdJDVu6zrepGiy9oiUm0WgP62FneRT9c04OWUVa6Q0ZmrpQ1KDmuL7cQ+Gq8lPNQ+29j8/4KMPovY9ezONaAXmuVggN+REyEDsFT5omYppYlruaowiTI8XwYa1rQuxdOFyP06J7Txi7BiM7xESb3/aZN58Igsd1a893SFic9I18Srph/30s/OSfqL1ugRRFNuVDHIjaESU+IqzH/bWjtm7Y9B+LouOfQ0La9j6JT+3btwfA8fWwL998rTlF5hVkaaVwv995ffnItq4jOc8lq7RpL4ABTqfTYOAeB7uiiV+lFrX3LzrnqTdFueMo7OSw1CSkysF5RJmsp88OFruGgUge5G8AReVp03ymNRC7cTGp+PDhI+f5HefzEx++lV6HVstIDKSAlYK6ez9m/vkQ1cLcaXIm9s+WnNoekZve8B5a26W80u9y3G8S8/onMsTjejgOzu0aQ30IzPNESpFpPjFNJ2nY91F6ful8+YMf4v/Df+LP//zP+Xd/+qcCntWm+yziwiSshbQBynVqsllLJer+Llqg5FJZcqXklafTiSkEKJXoHNF1Xu+vpOg5Pz1zZ8Ul+QzX65U0RU5PZ1ovyPRoib9l3ViXlTQHoo/0vlt4R5XTxRRU/aCDroMmNLUyTTPv3r1jCpMqTDYFDcSwwnkpagQ8aAo6pF1CnPZ9e7lcOJ9OLLc79/udKSUulzP3O9xuVy7nM5fLhXUTFYvMJWvKFnpqK6okUBCqN1zT3lOV9pWSlaFD2Mya+fjhW0orPL37jKenZwE01egipVkALj3Log80qlj4B3MJ3AEAFLQrZRuJVSllDBm3OGEywV0aqCz9AEEk1q7reog1+7rctm18TYAr7Yvt4sbZ1RzKeR334+B0kj44IUUaa7rx7fI7rtcr27oxJU3GmzC/A+hoBbITsUmTpune/TC7wIFT5YaccRXBHaSHTz6bPGUZu2HnQCWXPpjHZZFC2XsvvaW299yhhUXvVdfkWWLdXjx1NJ/wDOBXjDzUuEEoK43/8v9e++rwnjhk4WHs/6PqxQrv2vtDDvW9+K4Os15VSWb1uJ81kRA7sA5A2DlVv2h+Amgx+Cj5s5e17ogS5tG12vINex2BKHsd89jfV9RZj+BRNSPPRaSNvR/+3v50qsKJkuQI0xRHfuC8J83TyAV2YSg4dYgoGh8mdX2WZ1epNVF1Zmrp0PU5VVWX9N7GObf2dTCcPvpRGIpDcvxkC4Vdvz13cbzdHs4DW5f2snPBZlTmrC0MmuOIbMAJW+52tY7dc9vb5hAavKeWxrpmZqTtKJwSp6qKgpzH+u1t0xxF4lrwAUKk5t+DeujrD6No0wdnW3Fv6FQLY6fWtSNx3hOco6vVkU3bEZAdbR8o/aFirrUM96WYklqQavNv1yqyq+wORF7n9kr/+5X3PuD2U5rbt4j98e88exARlEcWUHaOKUTpDXKoy08Spx9FKLpzcoVZGYZaKXUhlxvLfcV1JwVbqSpTEDtV7+0+WUPwo5T0+6g8e2LcGj4F5um8D04+MEsjIdZ/z0XmebTeeP/+c+b5RFb9sE67YgbctlK2hUbDJ8/lNDNNjt6kt6a0O9tV5oLcHSNRgX74dz08Qa9J3JW8c8xpIjhpvs5FHPpCDOLWyB7k9rkhb/u23PeDxaFYO5rSeMQe8rgOBjqp8hJAA5EgLDGJ1bRTZooBdFmRgBZxfi+exyG1F48O6LVR+2Nzt8nHjhLJYzA77i8ruo6FkPd+SICOe2Cs26MTmBcXN5M4yb1RZk2Tejm40UOChyLibWE7ZFr5MOH8sIds3x2Zl3meH9he+6zHPTpkxrU+IOp2UBzBodakL7L2iqPidN29vHzgfr0d7p3EkSklYtI5W6v0q1wuF87nMznnwbodi8fehd11zlNKoxRxZJymqFKtqAiglujWf2NJb93vXYzyPFJKDyyyrfEj4CIdLZ75dCZNEx8/vop5UIyseeN+u/GjH/yU2/VFLJ5bEYRZbbD3+CA9td05Kk0ONKeObuFReuucI01R15EUdcHvJjCyr+Jw0bS4HqPK79wuibTPdDxYbf2mJPODWitMKap1vRYGpbHkG19//Rumy4UpzfzxH/8xf/VXf82f/Mmf8NWXX1LWlbLB6XxhBfK2QC/4ustaHeL66X3g+XIZw4ibJuEhOnIu0MRxUGYKdWKUfjWXItOc6NmxLuK0+d13H7jUJ95/9kSrUshF75nPF+7LKkWGAmeSQC989u6Z5+d30Du365VpmjifLqx5o+TCtqycTifut4V7u+r6RuV7kkAVZXljjA+SQAMZLBnN9c6y3piniY48h9d14XQSxjjEBFSutxutB1KK1JKZYlD1jPy+ZbnTWhH2smR6sb7YRqkbwyXVO5ZVWGkfijBVpVBr5t2799IW0GS49+k0Uau4K7fWiMHpYHaR07Uuag7o1F6Youy3LefB+tsZaDPtRo/asVfeR3orbKUxTVGkxMoimJuo9wHfFGAN+2gBi0fCerQBxFV1gZ7miXlK2v8HLjjC+cxtngSgUWfL5qIWe/7hXBHn2CJ7JQQ2NNPqauFOE0bJqQzW99GK4rUP1p73sbiwlgTvPR4x12mt4WJQubuaXWku5724CKK99cde3sfcbR/8/qk4f1RBSByICmY+Fl9HAM/+PsYoRSqPoPrxDNv7Ax0B6YVvTc3rlHGiNVEi2M8jhj1SeCsI15CcTQsxY4vQUqeonNP7MIofHzwx7jlXq1WKoQM4ucfElzvcFQAAIABJREFUx1425yz+2n0K9qTHdR7X7VuAuVUBmfqhb7HUNuahSuEE5Kygm7OHIrGDvTd2DKn3NjJIe1ODsmbdq0lJgFlJmgMY30fRtdJaGWfksix7bnU4x+2zeOdIMSro/1isGottEthZf17u3d66IY7Dqv7QGXtBky67z97bPNSG97uksTvoeNYsCj5xYs2kNIlD+yR5nXeO4Jq6AhcZz+DANYdr+zP71OsPo2jDKvXDayBB0otkjNQx8bSNfpTRvC2U3qLjn2K/xG6vS0/G/8Pcm/TIliXpYZ+d4d7r7hHxhqwpu6olqtkTFwQL6gYaBLTiH9CSggCtBPAH6A9IW620kSCBO2nFBQEttRQgcMsNCQIsVWVnVWVlVndVZeZ7EeF+hzOYFmZ27nF/kdmtXXki8DIiPPxO59j0ffYZA3vbOjQoljPsP+vDQP6Fa7qprrxUxW+b8facICdQtQpYvDSoD5czjuxAk85B0gCtsI0t2NWmzucnzOdH5FwbmuFIKihcCwprYzYAZlWV6wyDDbQE9iSAnMMQRwxxACm0awaY9Bo9rNFYBusWpRNs64Lz8xOOxxPAMi8FYJxO96jBI5cMlxKQizoa3g0DJNEMzsPXgsIT1m3B0+MjAKWqsCRnmgEAcDpg2el1i1mJIYJgc9eUjng8giGB0mWepaqoCFRQ4RUxorYyzGi6G2MJ/Z32EBmyoM/eKGh9wu6c08CBWtWqVqGbmdqi/JwV7GQwiVGXpN03Pr8di/S4zgntKCs/35zOlhMKWAIK7xtFw72wP3pnYWuhdCj2/nz8/iz6a9Peqzb7pnPkosa+Q+lZe1vs73skrHfW9rvb91pfKvM+36x39B8gdi8UUPokplUUb+5HLUrbCqIid34WenUppTmNlDOgCGOPplkSySzV+ForDocDnFMOfhaVQam4Wt+gIK3ee8RB7mfR6+zPlbBX9Q11LYVlRlSXCJswSm8zSddSHA549fotUinYcsbD4Q6n0xG//ptfwXnC8TTCOeByfgZnUR00qhLYene7/o7GJCiWk6nsPMC6B4uCaJbwhXho9DJxzBG1cuuhsv9nlv1kqGofAPfOnNWWOO/BW5VgkLdWsPIxiER+SZIUDQ7f/8738Muf/wK/+PTn+PGP/wmGYcAyz2DnQSFivZxR8gZCbsOivdMkO0lPxzROalMdQhiQIYOYi6KELjg4ZgzOY2PGdjnjdHqFYRrBpWLdKuA9LpcZzhHu7o44HO5kzAI5jKPDvC7gWjQJrRjHoT1TEbk4oNSK8/kiRb/gASJdewWopRUCwBB0xinqUqHJuNFnWZEhSbRkvzhc5gtK3jDGCFPznefUFG+ftw15yyAXsC6EdT0D0Op6TXh8eo9xFJXLUjNADKaMyjKztLDHFEYwKtZNkLpxHMEsFDsqBZfLBZd5QSmM4/GEcZzw/ukdapUxIiLbv2FLW0McrChm9kW+xF4Uvp6t2hcv7ee2lyx47OMLK1RZMp1zVmXggKD9h4ZUGcOFCIhxwDSNcDSITyZgSyvqIujIGAcQi6AJuCDGIONgFPEXF8EYGsq8IZCHjzrYuEJ8Gwmitm5bQ8As+TcBIRtqHzRgr1WSa4uJANJiaFdArHqfEgG6JyUZFEVh0wiQ8M6KaK7dA0MdpVvkOrG6TVps78u/e4HVZsPZWCPAikD7s7bYTtrNxB/to6E0/ivXvq0vYvfnIIUXD5vJasXjHgnbE0P5iyu7TdcIjh1LBC8+ZG3Zdfb2zY4t57ffK+ufI2OGQKfDsrSnVAi6a/ug9wmSrOr5r3o9kHmw0lMdRIzJC1VfUKMCT9LvZj6pWvxA5qudTJezxNMBhIDRYmS9xzlPMGplSvtXX2y9Kp5DGCwxRrDmCJYICktNw0RGi1eN3QLI71wIGEPAMErPHVjm3OZcrtgqFmvKrFBBkU21WyM1lCLI42U57/GiU/Q8AFyT7jndR12R4Ztevx9JW3PYaBREAhqEDSsB6u9uN7C9rn+OLni5rjLcqkkaHMoWvKB/mNweMEgaUNFO60Ma2W3Vpl1i994+4XvpvfbqN3ByhKfzM9JWcHfacHf/CuPhKAsScn9yyW3jp5QbR5hQILNq6t7XBAj9BVUNF2vF/rrpF0BryGywdAgopapkNDSZdtII7j1QxTkuyyoBaMqKBIocNIFRkwS0UTeFg4P3A1AYPo7wBPA2y6bQ9cBchP7lCff3JxABJSXM8yyBokWE/aKCoELizyRoC16SNu8I43SQ5LgyuMqGPxwPraJi11xKwePjIy6XSxs9YdXDa8N5vf6gDsQoDzt6JOiS9dvUWuH8oEqKO43Oxgq0SqVeWiqbqOTp94ZEgaGJl6p1QVgxlav2SvmGQNn6srEOQSvC5nB6pOI2uO/X/W3iYIa0VWa9U6647hFDezSQ7xMzCeSu980tzQWAqsztAgn9XrlF/oDbMQZ7cna7/16iXNjvemphzhkhOizbiucuYZP7Imbbhl3Xyi2oH8ex9RxdLpdGvbJrEMQmN1piVkEG56T/MUaRT05p0UTuxo5AVOpqZe0XDO331qNzzTTYnZAUXkQxMgwDvvryEQzC/cMdxnHAr3/9Oe7uT3CeMV/eI6UZJSc4CEJq1C1mbg4TJMqh0UVFi5WSS6R0Glvf0otlCdrrN28wxEGpz3IP5nm5el7WS0C0B3IWPN8Gd6UW5DUhew+UKtV2pbwoMQtUCx4fCzIHfPfjI+7vH/Dx9z/GJz/9KX748cc43Z3w1VdfYUsZYZiAOGJbL+CUJIEBAZ5ALoBRMC8biCRpMToo4FEg75fWi9oC7qECVBn5csE03eN0HEVOXxPb56cZokg2wHlWmqHH4TBJX6FzOJ1Ows5YRWrfqkC1VKkcQ2xd0PuUUkLNN2I4mhjv+37fOzmntm5szW4lIecVOVfMF1JEnXGZz5IEloRtXRF8bKN4UtowHUKzL8/P73G56POLHuPrOxUQqCLVnTMyC02z1gpXPZgETaPCWFPG0/mMbUsIcYTzAZd1luRlGOCD7CdyUJERUast2cZ1yMiMrAJkWi++YhuM49hQeEOamo3AXvk3zKeUTQevR0RVmq153+PktAgTA0TUSNY5sIK5IoYKEYqAFOlqQS1Z1txW8PT4tfQJ5YTpOAjxs7Ki+Qt8FOQNlcGuAF56mKdBxGhszmMMnSR878tQAZ2J6pw8q0a/DeHaftaqCanXYnAW5IkkGM3Zw20ryDtEH69k3HsWhDlOaaUoIFgvNHVjZKoGy9S8PEiKrzbD156K+Y/dbiidlfeRUPKw0VBxo6dC90rzb3oedlBr9bAWEZvB5VsLSy9O1qOJirBaoqvn6nxo+61WbWOwzwOaPWstCvraE8leKMO3Y1u/P7MWfplhnIq2eBUgkN/tbTbMkLFEdb/eygykDW4V2mJYYytsOyfCJ6jUetIsdgMqoEw1JmvVqILK6bozYTlmGW8gawwIQVh1t8mwXbvtKVZ0Mul59gjuZZ7hNvXvDASSO9DYJo4Q4qBU/ps4WNeF3W9Z8xqXEcBmJ3RklKwLgDUOkyKl9CWXLCJ+GwpIi44xhg/21De9fj+SNuyxNt18D6DN7bEgF9gDRWBftO39Dea8lggFroMU2yDbuu2y9tiTBOLrEyHYwv0wUO1f/WJqf/tCoPvSuV/dEz1GKQVMItW7rQUlM0Ae9w+vUGqFdxVZZ/Zs64rL5YLnpydcHs9Yz9LQTuSFqw4HRwMc1DhptRMvBN096gDo4ExI87RU2CKgoYAjCN1wnRWRyiKdynIfue4qmQ93J1QmUfcKAVQL1m1DgYPPFdE5sJPEkGqFJ4j0PUuVkLyirh28//JLpJtZSvtSxfQR03jEOAwgTwi1yKDXIo57mqb2TCxRNdUwQ3HMkdw+21taliRHu/SRrEdDbu3Z27/SE+FDgNPBplb9M0fREkKS8mB0O1e+Uf2qqeu5tndYEQarQJVSMM+zNuHuQh1WQWyNu7BALbf73IK7Uq+u335uNISWzHoPF0TYot83lrTac5LrZDim1uN52391xft3BOv5tt/b70xVqq+y9ZVJe/X0CVtDV8gMXT9jWxPOOeSScT7POJ+fsS5LoynXkmV/QqiMBGDNSRnWdHWPTazAKGfN8fDe4MzVlA6rDDD2Iqywbas0M5PQN2x99WMNeuf+UpJ2ez8M8Z0OR6RckSvj7uEep7s7nC/vsW0LHh6+j5QXPF+eAehsLqV72Jbw1aHSte2TQe6yIh2ZoxR1uThI/9P9wx1iDBgPJ4QwiB17flalPVHnkqBZaMByH1PrT+rXoK2JNgaE94oxVYDbjCgpAm1pxZYzKoCnc8bdw/cxxAkf/+AH+ORnP8Nff/LX+Kt/+ld49eYN/vY3f4sMgOIAkKjiggRVl+HdATGKsua8LK2fU9Y1AeRQoPLrGrwRM4g9Joq4nM8454pXr97g/uGEp/OMlDJyrricZ4APGOOAShk5rwiDx+l06vqkBF0jcljXDduyYpwmjOOAyzJjXTdhHMQIIiCxqm1iV49NeZO5ZbQrRt4GS5IcV2xpBsCYxhGokqyldUVKm9o/oc5xFcpYrgxQFXETSEBXakUugC+CSrkYRUF3ELpoyhnOSfUeTFjWFeu2wRVRO21zWH3AdJgwTCO2vIlC8CBJdEXRvjCxwTVLMbMJNVTprausYjIhtt408wOGnOWcsSxSRIjDIKqe+pnmD8bDQSiSLPQqch4xSu++qUfKfZWNI0iNidRk5HSG9HWizTYjncOYOMF7wqvXD0jrijWviDHi/nCA12KR+R7SYkqpGZwqyrZhGEcEH4T+BvHN5BxciCDn5JkXKS64bh/3MYIhiDJXriIqDd4D6i8tgHLIWdERkgJO8Nc9gtYqQM0fkOYSLS1r66+UIvPrdJ+LPwC8399ndq5nE+y+omgRo4vDIAnrbdImit4qyEP7ufQ+w161Su84EWnQ79Wv7T5KzoWbamLPFLPvJVnjVnzFTbJgPra333KtUiA2ZHH3CfIRcqyqRfuiMaD6OddSuHbfW9xhNE6tZHgiFZoqQuHNGRsZE47AVRJcT9rL7iyBxY4G70HQVQx9CxjYNRBdJ2l9ge4KpVQwYUvbfl/1mkotyCVr0s7ImrB7VbTl7nlKXaDABxkxEsjJmuHS1pfFgdZPXiuDWGaAysqXcTOO93UoH1zArMqk2ge9bRk5X66Q/W96/X4kbV3Ows33W2Zr6BRaptve22XSPZy7/27fvH2S1Fe113XGvMzIaRNovvbVkQqDBxwgQ36BHS7ma15w79j61y0M/m2ZdJ+IAioVbtLiJMpny7xgiGesyyJol1ZAC6oECucL1mVFToxapZESUF69mUUydE2+mITuJTPCqM0rsVJt1UqOc4B3Q6sO1Vyw5Q0prWAumsCJkRPDIzz2nDKCDzjeneS6vAyQdgxwLaiyo5u4ROEN87zAVY8IaYoOwwFjDGBf4KJcM2lm3dNc9E62+8ilopKIHYQ4YJwmuOABEpqASMxqUN+cEqn88gEhRGxpw5dfRhjEDe5paPuzu910Dg63rx5t3RMEtMpQKQWVgcHLANBiVBa7JhO+YFZpXOOQkwTL3D8/oED2xz4DbO/LsCShlAJvTd5EH6zr3pgmHWBp6J/JP99SEfcKpQkWiLG2Bm0pUcldghUw2z6Rz7djAlbJtn4DSfrsvOT9tNNWHAHsr3oce+SlL7rcPpv2gXo+dv1SCRe7tMwzHp/eAcwyCNy51oPQ+vdIgpeSC0KUPWPPwPa2FUX2ZJm1cpj0+iSgjHp/qypNXTu33rbInZaA2p5lwDgKPex8PoMZrUkcwJV9dM5jiAMeHy9wBNydjlhzwhe//gKnuxMe7u+lelgTtm0GKQokBa3dQYJIECSvQ1SZAaVSEwTtiMHBDR6vHh5wdy9CFrkULPMFj9sTnp4eZR6XD4qEyJFMua2UPZghkgGv27ZdIcP7fpPKPVjUF5uTJ1ITr3aLgLSuePfVV3i4u8f96R4/+tEf4pef/RL/4I/+U7x6/QZffv01lm1FHCfEcZSeaBMPUMQhxiCFrCKJgnNBgpng4CHjRUrNIB0y69mLxkjOOE0j1pJwvjxhOt7jdJzw9DwDJPMNl5kQ3FHksb2grnASfGzbCuc84niAI8K2bjoncmsqsqLOt/cADoMIj3SkMKzbBubSkOFtS03d1Nb6sixIaUMqm6IpCesivcYywFxokqXIUHRBRmujz9noEgA4HCZ5vkSAYxQUnSHnMU4TjuEOIUhfI+CwrUnXhgRu8zqj1IpxkDEJcZAC2rotcEHWRy0V21ogMxYP4ArkzEhJ0FrvHZyP0v/W+XErfNyiHI0Gjh1hRrOLQj+1V+uTBVBLFrqhC6hcda9X9ckWbBftw5Oqp4cgAblWlJSwLTOenh+xzBdwLvBxF2ZIacM8zzie7hFVYMxB1PTSusqaL0kLhMCWqvZtoQkw5FJAzIh+V9297e/u+0ill0ttpxX4NJl2RErBEz9dSwW73ddZsaq3zXsytQtsif/Yi+K9vSZH2FJthYdGlyZV/zP7KFsd3u8J4n487v4TOxaMKgpLBvcxLS25bM/d70kSG0qLZqeY9yJtCNrWoOIbgKp+6n+1lit7anL0rrXU9QyVndpp9nz3bfq7CnjfsYCK9Iq1pBd7iwejGx9kx2YTWkETCbQ2HIWCAQZSldVGTNqbqv1bXhNK+QBUeBGssbVR9mTZk1N7Sm3siKc9Nrjuj3dtf+Wctaizj9QQxFg0C4IWXCqUFrpt8m8pWHJGqhY7iJCQ8xF304RpOgJaLJRKcZ+876ik3A5u7Ccb9ShbWIsmRTQziBm1kiaKilJ6DwaQ8nUf5+3r9yNpgwzPJIMWq8m2SlCqmgUNWpU1ZBthp/aYCEOtuUvWNKln27CyuGVTb1iXGSVtoKo3G7L4rowwADH1+gBoR08MJZENbVvdKjN7oHjLibZXXxXvEzYhs4jBcSGggehsqngJl8sz5ssTJjoCpEmSq4iecBgHcMoiRJGLWhFpiPdkAzV9S3q8QYvMcFxAuphzKbJxdJ0SRB0SpM+pCq2DWNA2iV2q9tkxMhGoSn/ANEw4Hu4wL4sYY+ewpVWqf8MJHD3iEFFQcb6ckciDEoN5QQwZAzMKOWD08GHEGCXwrKWKclsxTrz2YYDguKKiauLgEIYJFR6ZncDVRlonee4xBhyGiGmMiMFpUFewlCTz/EAQUTULPyUYAO+iLo6MI21O+lrh9MVqUmWQC3DwGEPE6D28AzYkZE5AdXDwCBRUTZKaZHoLPIkAJ8/XmpuJGVCKUfUBqHsTMpcKsn1UKiqZsI3SBhQpq7Xu83bMTldBP30MiCFK0GDJqiYPUkDWPSsXLJxx4Vpo8iZrsFWhrOeyN7jOqUpyliIEFLHUnkWxCaaaWMAoEjjAaa+V0P76WUC9kIIF9WAga/M4LMCqVRT39L7WUnGZz3h6ftQAYRcv8d7DR+HBMxFWFadwOjhcb4EmTaPaBxGmSEkokbUqndW2rJfhqDEGmU2oiq9GfQveSzFFHW7eNkEDnNgpUofpYPtAB4O20ETKNpWFYzCNE4YQkLczXr15BfIZDIcvv3rE69ffAWMUWXVAPq+rWjpN1OxeWMHEaHmOqNkx7wjjEHF3HHGYAgZi1O2CtCVwISBVRNuXrXCUQVrN9FQB77AVKVpJ1VzGIVjsD1JRIc1FuGTp6XEqIuO8Vo35quei1AWP73+Dx8sb3A3fxXd++If45a+/wF9/8jP8xX/+Y7x5fcJvvi7YKmE4vMK8CIoQvAOjwqGAHWOcZK3GwSEMAdEfAEeoPCLnATklRWazrNDopM+NGNUx1vU9mDfEccLdyWOeN0HcEuPpKeNwOOL+bkKaL1hSwng4AhyQE+OcF0kcQ4AfIub1gmXbMAwi919ywbrMOg9QUPh5nlFykfWhBQPnPMDSe5qTSnqrwARrvOacyGVvpSCVKmpzavekz0jNhhPEiGsBpwTUClfFRrDu2+CBu7sjCAHLInMJvYs4HO7EFjGQtgQfBpzGA0RQq4KxIcYJ93cPADkEH7HpQGCv87TStuGyzjge7+CcQyosAZJStw7TIP2oJh5TM4Y4YBoDAIeakxarJBBuvW5FNyus2FrhSHpitk3aA6ygNwSJIZZtRQWBQoBTW5DXFaVk6QknQhwjUpHetW2TxFgUaQucA1wAzpdnYbNkQU3Pz8/wwSPGCesm3m06HDAOEREMcEGuBSknbCkLBTVnpeZVOM8AHDxXBOcwhNAo3hUsvfNZZNvjEBG9R+aKrLa6aLJnMc9ahX4qo4eEDeLV94DQxhBxqa1nyHwns/idGAdlFUDjHEOkdp9qlEShumqQ7yXD8eRl7hogAQoJ2iE2XmNOTT5uER4bCSF/Kj31/Xt7cIBQ0J2SrHNLcHxH+yMvrSqqNilxLWPwDoVkri7I6XgkoRBaRmXJHBTlMb8viVmV5EzkbFvCR+BWkCd4tXGEXLMoeorxhI2UQN2Lf4CoAIPMX8AEPLv1bsu/6qxOvS+sPb6VUSrgyp7ggIq0wrgsegsmHV89EIK0ddQqA+JJ5urJ6UhCtaWsqK4EF1WT06rUU1NVlsKcAA1WWGapMILioLP6gEKMrQoNu9SKVCo+/vh7+JM//TNszzP+5pdfIKUZDBFVMjVNBst9h30VOEhi6Z3MNE5ZbTz5VkCXdSHPyTmdEWiLp9NxeOn1e5O0CeOrYWydTLpSGViSB3Hc+0VZlZrUuTckpPXGEAiuVVhqTaIWVpJuAKXxVQlw+yqPVZUsHmbsFZs9+eoXLXBd9b6mlPTVXzt3O07/L8xogZG5gGxILgtaxCxDWS/zBeu2YJjEYBWu2JYZ63LGOs+oOWsFkVqVT+hzIk+9N2AyvFd6Y9qQSxVBlqp0KmbduOKQmBaFufek1AwZyp5YyowdRWOczN+RIaqxJTfSkO2QucJ5QpxGrJcZc85wMQK1ajVkReKKh4d7+GnCxlVn1khfnIhxGPVQK3PKfXfQ2XQxYpwmDNMEoCIX6UkYhoDDOGIaJqFiosCjwtWMSpaIMVxQ0Y5KTRzHnj2z9C055wQ4IkOLdlTnped+VRGDF9p3qfCsg3EDITsSmgarcCQZ7XCnWFQVBSB9byq7tCxBBDMAqaT2tBGjRgKm2KfOoBtUCshaIE0UHZE6833GmnO9C9X80UnFjLQAQYqsuc7gSae3rk0yGXqruAImaCO9WSzKewAoRMAFDENECCT0Wa3meZU79y6AyMO70AaYGt1rXddG+5ICjxZIFLCtVaTBAUloXQXAFU9PT3h+fgYBCD5c2QtZS9L0LT0duaFDdt7Sa2aUHW4U1dbHWNAq7M4RfDD1x4o2Y0irq06DDqfBCjPajBuzh1ylrzFti1b0Gc4N2B2aFaAI3keM0wHL5QKighgY40h49/gOlQn39x+BeUDOi6iucqem5kTBlvX/qaMpkq0jfe6OuAWgRQdBT+MIlIIC6V3iWsClYNsW1CrN4czAplRJ6RGDzr/q6Uvi3feht3J5IXh4WHEMzem64MXW8U6/IVexbc94PL9HuHuF6e4OH//Bj/Cbv/0M77/+Eq9fvcacGb/+8hF30wnhvmCZn3F/GoGaMIyKIiqL2gqQIcp6ycmBqgeoYisZJRWluctz20oGUBE8UMoClxiHwwkEj3MtSHkDc5Rkf004jBO2ZcP58YxhmlAywwfgMs/wkRCnAQUJeRUluDAETNM9iETQY91kttkwDNg2WSPH00lsZC5YV1E/e3iI+/50UhyN04Dz/IxhkCp2GhOSzg0zIYJtEapkBVBIKIjDpKl8yfCORASmShIxxAHHwyscDoTLZZZnZ4N4K6N4hg8mBFJAteDh4YQYBrVfHlwZQxgQfZSkoEph73g84HR/AjggXRYQEYZxAJCxLGeUXDAMBxB5jINvhWRAjin7x8PmCxKR2OFaQEr5S0lkzWXwfFWGh6g9U00gVAxBgtMEwpqyFFpCBKn9Jw0Ex2HEOERsm6jnheCA6hC9w9u3b6UYxxVgKfJ5kr+5v7vHGEcUCsip4pwuOATgMEbEIQrqqoyLSdGXbUu6P0W8ikjsYMm7arD01NIVndZ+1r9M6t57L1TJLcm9jhGOQkO6XSvUWzBbW2LMWmhmzipKAqgH1OIsS39QrfA5gcnBk7RxDA6I1h9EDqkK4sVaFKyl7lgJ7e0Nt/RyYO9lAvAB2ui915liRktE59OlsCW+tU/aCAONaqsEkSZnx3TgAIBDQ6KZutixOBWR2RkH0OfQYuguRgX25FKSRL0WJ4q83huSBgS9/7jqv2NkLbyYlyO8zBZjK5YJSqLgSMdcY+7m3RUwX6s0A2iy+z27RoJTPYZ9sY7poL2VgmtFTdrqVIrGRADp0HqLV2Th6hpzDuyF+RURsaaEzMDdMOIf/umf4o//7M/wxaef4ze/+hI+jCglI5XSCqpW/GTI2nIkNn7bCnyQYfbHYWzIO18uQqMmhYFY7QvXhuG5Pit+4fV7krRZoiUPzjiie6DL7fdWDbVNfa3mIpXr/WVJlSVOexDaFouzz/12ef72aXTb2LoH3te9ML1ctS7auvfY3NKyrvjZbJxotyMcemxRJJSAfZrGXVyCGXlbscwXXM4XLMssM8IYsCGpdjsIJI24RYKjWivCAFStCGj55upeWpWL2YaM7veof/VJagW3njawSNxKxdB6jkTePsQAklkGSuFYMA0DDvd32JYZZVvBOWO+LCLG4BwoepkFNE14wlOr0F09L5ZkQRJ2qfrE6CVodA7HaUIIB4QogS5XgBXCLixSyORFMjkEmWpPWgG7TlH2431guG/O53Zd3SZ01ZqJawWxQ3AOgTyqNsy76OFjQCUxpj1Vx+gCtVYsywJRMvSKxpS29vo1aT1AzgmSZ5TEClUAq0qVJBVxCXt1KPidQilrs6NT6vt79NgS6r1vbO8dcF5m9tg59ecysIqLAAAgAElEQVTVZHp1nxymA+IwwfkgUtgaIDtHnYy7IN9F0ddt2/tWTbHtpVdQ5UdR1VP02xG2dcHlclG6W9TZMrk9v148ppdHl5/t/bh2Teb8jTbVftaSDkKIkpRa8BQt8CNdN3Vfb3I91Pr5dMk3Zw3Ivo2KTvcFp/b8tFr9/PQsQ069x3Q44Nc/+VmjCpe0Im0r1nnRPrXanPjtV+/4vaKPDBXl8ILOVBCeLyuWNSGtK+b5gsyyx1IWtVZyHqX1sElfjFOf4W18hNrwbUtK/SRN3HR9yvjvLhgg5NzPMNqDB0cSeJ+fn3G4P+N4eoWPP/4Yv/nVp/jlLz/DX/zVH+C7fsR5LRhrxTS9xfMj4ThG1LoieMjgbX3GW5axDs7N+qwqSu6pYQUoFdVDZ21Bqu/OoRahI8ciyJrzA56ezshFKEDv3y94+/oOr169xvvHJwTvMY0DKirKIk3vMTIO0xGJMtKchLLkGSICFMBVe+vautgHc8vsId/2KwCVvgemw4hxGqSKb7RlcpiGgyArLLQlB49pOsKFgC2bfQSYq9BmWcRPxkNofVKVBREZxwgXggq1CTphYy8AYFk21JowjAeQ8yKjzoZmB0zTiFoL1nUBKXvhcrmAUCR5YUAGtDNcFIQjREUqlGbtnGvqrzsdW3rRrGBZSxFqlu7FnDPCMDbbBljBVMbQbEkSWxoGDFF6WgOJmi9YZ5QKvIkmXKHMiiqnibv7V5imE8IQMU0jjmPAcQhArnj/eMHGToqbtYLLhnVbsc0MGo8IYdAePeurMp/PKJkxzzOYK6bDCKOktQIt7/3evS0zW202MKXU3me+zjmHIUbkXK5UAHu/dOXDnfWxmQCF0IDNTzURCe+Qa4X1kBk1uxWdwU19nODBjq6O3YqfV+011ie2+6KeSm5Fnr4v2M79JTaNfS/WyOt+ktmyJqBABklDBaxq0elneyuOfab1VjabRh6oWUGJXTyoPz/7mdlo0sKfZuBSAHQMx66hiZ53O95fe3+v9s++Ps7tte//Xt9Pewbbtu33iagxY/rz/ab765yTxN17kGoQMAvT4fpE0JJ5dlJUIwbIewyDQ2Dg7Xe/j+gCPv3pX+OLX3yObV32P2738LpdizT5tdm3nHeV0fv7OxyPR8QYsSwz1nVRJgIL6ttyGwbo29Oy35Ok7ZoeCOBqI/f9NcJT3k/beMOyiK573Bq6VRi7UpDmxuqcoEpvlswAHwb+t68PkLirRXtd3eivz35/+5m9yMO3HY9AkkgEwul0wul0hCNgmRdc5mecz2c8Pj5i21alEGkViwGq4sm4BXkWXGpjZUVnOBlcpApkAaSegPzj9iT0qiJy8zOpJHgZK8AMIAnF7yQ8e0k4ZEjwOA7gQFjXBaVuGIIMcg2OUHxAcIQnesTT8xmUVsSjCIZcLheIilro1kNXE9LvC1eE4HA6jHh9f8AQHZwn1JpE5StvqCwUWOdlRp4nGQhc4ODYKUVDkCMrHPTP/cUv+WVzCua4ekMqa8O1xNhrwz2xBEiFWAICre7mnJFR2ufaK6XUaGn9fbBkzZroTTbdko3Wg2cjE4T0pAZYXAx5qbK2BmiIkIDrrtWqrZWw04NrgVPJf7vuflCtBQTDMCDGQXtWgMPh0GYw2j2y9wKS3Aotq5OTZ8aqkt9py1i3hLSJuFCppckF90IrV/uNCFln0JjOSUoJOW14fnrEuq44TCNiCEibUNWsp8aolj1ytgtiFL2fdPXzfvjuuq4axBj1ymGXrZYgcIqjzlrTgOjWPtDuCG9fhaug5X4fZ2CSSxUAhQDyvp3H3cNrTCqk8NVXX+LjH/whnAOWJEgK1yKKYWxjHK73fz82wWwG6yw5dh6FgXlNWOdZgilVUss5Kx2cmo0CidgESERtALHh2VTWSHoWYhAq7G7zRdqfiIAg9O0+MJRgvLT77Ww4MAGoFXlbcHl6jyEOuDse8L0ffIxfffE3+PPLgtdvv4P/hDx++6vP4DwwHkaZXbde4Kkip02QTdlCyIoaOkVCnQtgWFEiIKOgkp4/Cc2BWHo3axXl2tMdYTycwBU4X2aUInMmv3pf8eb1G7x6/QbLIj2TYuuOQou8LLi7v8N0f8QWVuRkw50FNTOBjefnZwzDKPtwikhpxbIkIDjEOLT+KOmbA8ZxwLatmMZjCyC3usGoWrXKQO/T4V6EO9YNo997wZZ1aappFz6D4UA+IgYCUIC8SDHHEWJ0jRJsNC3nHEI8gpWOnytpT5oUauanZ7z2Hs5LolMLsOWEysAYo8zDg7BW0rZgHD1oCNi2Bd4Nwgpg36S4nXMYx+kqgWMWhorIie+oTKvqywYVxLgwPAqiE3rzVgvKuoL0fTZcPOoaLCoEMgwR4/GAXKr0E1uCFAKO0xFxGPD2ew94dTri9TRhe7pgu/wCLguJbi0ZYSA48lg26XfzXtE9v9uu1idEAafTCdu2ygiGss9oM1Eus919AgGgFcX6QNx8jBUOE0T46zZBuv1Msx1FkY2+uC7+XpIN50RV0tBXUpoekRS2We1fBQlVnQR5tefYJwN93Cnnldv19YU565nrr6H36b1fsetrdpdZZ7ZZIaRhfjCDIe+1v9/ted9nbr3kLdatWRULSRJBu59ot2q3fVUKIMQ38Wz3N3YvxmG8SpzsHvRMFSsC9mMX5HuLI/vY+iYx747f30cR6NheTDqJ6CreAbSdwxHCMMCXAqQko44swdQxK+245DWmSjLKiQHnPV7d3+Ef/cmfgyjgJ//x/8VXv/sdyrK1eNli4g8jdTSFTTvPWvd5c6aO7Jw8N27jKOyaWf//Q0Cgf/2eJG3XC6Kvzn/wzm6D94iB/G5P3OxVa5aZUtaUBW4bXxSndJG98ARuF9Zttm8/v32/JYS3C90M2G0F+jb4h1bSbY6YAV4SfFYUKkhpxfv3j7pxMi6qBpi1oZy0ot2aYwxyVfoBilQ5ZT84ZM72VyJYxDvXmVHB1bVr8tgTvL4Cc3svCAGsoh0g3YRpQ9hkuLCwYyqWZZPBsoNy3gmYRgmOz4oYORdxPN2BGXjeLoB3GMYR0YddOrderx+TlRWKnlCFHVfkdUFZBZ5nztLzE73A8M1Yal8hM3KtyGmvCIEEDf22157o7uImvfG+rRC145L0KQ0xgrzSCtmkfm1wOiOjyBDuzsD1aFpDNxQNscr0S9WvNuewGjRv1SwZqBl8aANTYxQ0xqEidLK4llAZ/SAGSe5yLZiOp6vkJsbYhDBsT0gip9RM3R/btrVh1BZUWNJZclF6oqqB6XDKdU1gFgUrZnEamUURrr9WO+f+/6XAUUFskt3S93M5P2NZFhChjZcQNHQfl9B/rlExdscmtKbbqqdVppn7xukC57wGUNTx81VIYUvNfomb3Onh0nuyBwdWqKpVaIXO2ZgHCW5ZHXaFoBLeCZVJKvcThmHAZ59/hnEY8PDwgJoLShIKtSeCJ1zt/35N9c5dzW5bW4I+ZdQswXVEhPMiypBKFjK7AxhKNXbSf9uVYjTAYB3fAUGimZG3vZ/OQeTFiQjBE5wTqto+PF1Gojgn9FuTISfvELmCvEPNKzxJEeGHP/wRfvub3+LTT3+Bf/z6Le4PE95Fj/fvv8LgCefLGWVbgJIhcXcFkQxvPowT+Djp2oiCMlZgS5LMYBPapwVLJmBko0pSznh8/x53FRinI3LJuMwrQowolXG+XPDq1WtMk8zPKxnCYFABl2XdgKgjYKiCcoX31Br4Zb15hDAAcJjnBcPgcTwedC/JGrV9Xrm0AB3wCC7CsUfNjJI3HA8HwMn4CUeMGEd4H7EsK4hkPAzUngYfcH//CtuWRExkHIS2FaDFkdShNg6sw8xdDAjOo8KDVVlYUClhdfgQkLK0U6xbwrycMRw9pnGCdx7rtohIBFX4IAXRmpP0uQWH6IXqXFXdkihiXZdGIZ2mSVhBzimquo+QcF1AztiLts57eC89XJKA1uZLiISWnrgi+gAfvIiKLAlDjIjDBASPeVnBDCkyOoAdYWNgI4igFylbKW/YUsGaE1KuCKEoUR0aSArrRQReDkrtFsRmmiac7o5ImuBbgtOzAozlYfbb0DUTFZGRJ7syrgXk67rPl+z9R4/omX1kKxwSNfsln1Vbcm7JlAu+CUARhCZZ0aFJDBU9c/r7Pfa6je8safO6BsSWGdoriLIwhuzva4cG7l998bRdLxvSDDGMlqyRxaeS6IhvlML3bVtNf+9a/7CiZV5p++bfmn/vbLT3HlTp6n0EiT0sTqlywR/Ep3txvxP+0nNvKqFXiZhcZztO53e/CTEz39HHTL1/6d9nr+ocXGsrCUIBrbUVbWstcNYED2X+OC99/yqSFcgjrwU//+TnuDzPeHp8Bgo3tUx7DpVzS7Ts1OUe7vE7eE/2jYUk+yMpki50VecIgUw8DW000je9/s6kjYgmAP8PgFHf/6+Z+b8nov8MwL8C8BGAfwvgv2HmjYhGAP8HgL8A8CWAf87MP//WY0DQBV2+DWOVjFNWt/R4QDelawbCFsW+OOy8oUFPO4h+tvycuft87Jv2NmF8KXGzn18t+A+C4Z161lejbjePfW5vKFpw0gJ/3fBqfJgrLpcLtm3D09MoPTxe5qNJ3wajloyqoiBgyJw0nSkBCBpiM5PgSANDUtCRmpIg057QykKUBdwb25euZTeGPR1P7tWyXDCNE2IctPlV6QHKSS6pgKYDxmHAGYyUpbrvnQRAIyLWZZVRDeuOGjW+NnYDyaCm7FZzwnx5guNN+qCItfHaa4XJicqRJoGlVMyXC+atYMmMZd50uHZbNlfXfbtOrtC27j23lTci0qBUni8RYRpHhMGBfcXTNuO8bdiWgjGMiGFAKbXRS4ioqbyZM90NI7fKtlVIZSjtTYLL0qxsaJp3MhQ7RJFnJu8QhogYRHY+QPoEeypjP1zWJJy3lGRuFV2v8zZig/mq4lkrmmNflhm1Cq1VgllZy6RqmYR+7p04uJxZEF4VngFIChJUrq+1cz63lT4b5LnMM9KmyIX2zwj1ROTXvfbsWFBpQYcFKzaDLYTY3mvJqDm7fS6bFaKkR2QYjGollTcTljFqcrgSTKidFPk+NPu2wDUMg3L9WZEQKWYZpcR5j3XNOJ3uEMcJRA6//e1v8eb1GxynCWkTamROGwjK63c7JdyOeYUy2BrTWoDZN5GerpDBryIDz5VQSfqXSN+79yVYFRoaWApS7nVd2LOMUZI4Qw0sqBtGj+AlkJOh0qpkp0msqc9KH3FFzQU5PaNUoLz5CDgccP/wGvev3uKTT3+Bf/BHf4TT8YCHVw94/+4rUIgIo/Qf1W3BEIPQh4kQ4wAfPQi7LH2t0kxvlVfnZMSCzGdyrYfTDE0MASkX5JQwHRiHw6SjHxIAwrJu8M/PGOOAcZBxCdu6YjiMGIYJ27qirBmnwwExRkzTAOaK9+/fCTUvBNzfPyD4iFnRuuPxAUQOl/OCWmWO3vEogfz7x3coRZ+1G2G0oOAChmlQATFFEXMGWBQTQ0BDIGMUMROb8+ljgI8S/F8uj3BgjOPUAv+UNnB12uclTItlWbGlijgdILQy6feqvLW2AceCxE7TiLs7oVWnDZKUVxHbOt2NAFWsa8U4jUqtTghhwDRNaqtIZ/uh2VPnyq4eBKd+VgpRpalfC0Uy+ACyItowwA8ygmFeZrEZqrRcSwU8KZoZpZjKrGrJHvf399i2jHleQOQw0gFLLjixQ2LCmhnPlxWXJQFhgIseW5mxzAkAg92ohQpBvwRd3yl1tSh65Uil9KVgZyj8riGwF6KNPdEXpsV+leajGsOEa7PzfQJoCVuPuhXe+4CtWGb2zPorvXdKOVa1SI3LyRGSUdIlIBQkW2fTEu1jjMxXGnJvqN5VW47GCAYOSFJmBdmdIt/b3h6Juo79OrZXs5N7okLKSgDtSVlfILuakdclMWbHRFhJEUdnQEFpM+WAa+SNu5MgQEVXVEmxo1pe2fSbf4lI2TE259Xal0p3Pww0oQ8+w/6/T1Bvk96X7m/v4zwBrOM3LC+oRRSXtyTjceQ+eRRRRRMEjlWMjwEujL/5/AtcnhcMcUQuSUdmCABCKnIH1KtnaIkaYMjz/vxz3gu81g+bdNas904ZDgEpyXzjb3v9fZC2FcA/Y+ZnIooA/g0R/V8A/jsA/xMz/ysi+t8A/LcA/lf992tm/mMi+q8A/I8A/vnfdRBby1VvgiwkahUJag9Lq1NdkGB/a89eHujOs7bNyNbHxaXdbKf0EKZ9iv1LlYXbQNt+17/n+u/kYfQbtd+8/QIEdtTwg9+r42Zm4WnTjgIAFdvKgsroDAkpBynJLct7pFhLANUmfyvlKEHRTO2Q9hsN2zdSuJP3gqTXDmVPVL8teWNmFG04FXRllGB2Eznr4/EoFDfoUMVSUZIEJuuydIFyViUegcMnPzWjPRNJacLteVQzjI7gSZqkPTHGaUDwSm9kEj6z3grova1FRDykByrhfFkwrxmgDsWQB3OVuBmFpD9+vz567ra9+mcOrs2xH49H/MEPPsbxbsLXl3f4xa8/x7wl+GAcdgkCnL9GeMyY9+iNKaoKtWenOViQb2jPMIji4OE44uH+XgfSBqnWk0MBayFEaGSBAlB3qoh9riVg5ty3lLHpIFdLruzGmWyx/Z04nOuEg5mx3RQ69mdge2UPAORtDkL68yASiV+462KLnXdPebEFn7atQ9e0WOJItlYt0g95dwCTb0mbJWwAWlJmlWgiNPnvfo/3+8Tsh1caqlEiATH2IAso9iohV27y/xIoXdsZq1AXXbfZKKVEOiuLtPIfEYdRVLNywvF4RAgBz8/P2LYN3/veA8AVXAtK3uBgQjbizPug4nYfOKcVRIIi9t0sIxI025HQBZ13gPbsmZ2qVXrmHHQUCVFbRwQj8+60WzsXo6IsywICMIwRBJGcl7Vi+1Z8hSDYSvtBFWn1UuHIIy8z8jEBLuIHH/8I/+E//Ht89ukv8Jd/+U9A9AbnecE6X3D/6i3ScoaLEdFBilA6MFlEaTY8X54lISNCrQQ4j7TpqIfCyEXoc4LWmBkTBVCAUErC89MjHl69wZs3r/C7r95h3aQf6ny5IPkVr+7vcX+6w5dffYlSAgaliNWcsCwXEE2IccS6yvo6HA67Qh88jN5hfnSaJqzrdrVXBFlJWJYF0StdrVa0eYW1mnqR2CzvEQaPw2FEqQXLPAMksz5DlERF5O+zVvyln69U4Y1M0wEAq9qd+HKZR6mMjyKJrHMeKWUcj0eNB7R3rzIOh1F8fyFsS0YuDuM4YBhGWMTmYwRRACezQbntI6I9oTHkR48g98yh7VEfhFJvvpq8FoiqiLtsKYsaca2iWFkr1rxhjCNCEMGXUjMYFYOOqdlSxjyvqBAmA6aDJs0BwQ8q8OHBboAbD6iZkBFQCEA8iJpiTYAWUq233OYIJh1mDrYeV50rR1ZEl01jSbQxIMz+lFJaMa5vATDWgX2GH0ITZrLiSo+mmE0UBook4syS7Ntn9+9jQxwVSWMdMZQ73yiCZbtU/K0P7hNFsyFZmQC352Xv6xMG8VXXvWTXtnj3lQSZuXvNFLMeb/my41WpULXz7X3jbcxo90yuj8Bwe4JmLRd2vJrR+0Fm1lmJe4JFRAhECDE2NfX+mu1eXYMQImjVo2n2d/34LSmOXsfDL93n68/GB2uxX1fOibDVtpmvNSq9vM8EgrxzgAtg0VMFc5Z3VdEOqGVByVVUdlPWHETGglS2sWAfnmeL9yxeR48IXqO43gfU6sBVmAan4wNOpzus6wJHz3j/7nf4ptffmbSxnNWzfhv1iwH8MwD/tf78fwfwP0CStv9S/x8A/jWA/5mIiG/vfn8M4IOH9NLDBIQiBHcd+PQwqlTICnrlxv7QJvfPkAB3mkakZUUu1xDs7Tm89LpN2l7a1P37biHu/nVFm7PklBU7oi6AIUWONJktpSADcL6a1Wp0ulaZ7g9F+n13nqzHgFIjdUqoGExLFDXAqlrC6Dfl7bX2vzO4PUaP16+F3niG0DvnGSqMIIqMuWSsmwwyRa3Y5gtKSljWGcOglDpPmIYjTjji/dMjWFWsADPAvBs7hgyI1dlzABQml4QHEGPGPoIpIKWCbcu4nM9Ytg1EQSg3EjLuRkCfT5+s3/LZ2/1QylX/u/75c3cvTTL/cDjg4x98jDdvH/Bmfo3zMuOyrvDEmFVuPQRBv/peAauAWjJm52Xn2A+iNJqiIXSHw4RpGhCjUyhf5HPnWeipmXf6Za0Mp0qX0ptQ2j3Rq0TOO7e9gNowU+dcQxMtabOqpvMehkte79nrIoc4GEWEg9ekSta1I6+fRy3wtCLQnhhdVyn7oCOtM+Z53pMsmJhJBtWCQamdRA5rup51ZWvAkjZz/lbJ7qXwbx2YPRPfknEJ2iRwuV4zZi/svu/3a3fAFmTqH6KvMLDO1CHn4XzAqzevMcQRj4+PTdhoGkf89JOfACAcDhPStukzlcSLnNBQTBDD9l8f2Nt5eJJZczULSgmtVBaliBE5VC9If1HVryZqUwqGYcAYgySOzBjHiNPpiOBkFIIpyVmyty4LtmUBwFiWFbmIii6XJFLnNsssDHruUv1s99cLfc+7AqoZaZ1RUwbGCW+/+328ev0ZPv3kE/zjP/+HOD68xZvv/gE+/+WnGKc7bOuKyzqD0wrHUv1nddi5bqJ+qbRaHyIqsqjegqRww6JsV6hoAuFUWVKogVsu2NYZl4tQWMdxQGECmRIpEqYYMQ4Rb968wbun9yiFcTgdAR+wzk949+4rlHKHcRxxOBwa+uv9ALAkYA8PD1iWGcyEw3RqSMo8y0D5oKMCapFeQk+EZcsoKWG4O+F4PGBeFizrKsi4B1JakUkq1n7wih5mrGtqaEjJImAlrIpBKLPrisrA8XBAdA45iQS+I8Y4jAjRYcuMdVkb2iABnNA3RaTIYl/GV1//DuARoAG+OJymA7btAriKaTqBq6CyeVuwbULlE5tBiroLcm7iP0Ro4jqy/LVfski/ZK0Vec2IoSI6Gf9RVA1yiAEuCF2P6yT0SJtN6AIKF2yFgVQgg9ujjHApkkQP3sHXgiExYhYVaO8c4jRhYI9aHVJOqMwYhgOGcAAUATHE2YasG/rG1ZQJE3Lemt24FYc6Ho/Nfklvufi8viD4km/yijRb0G7FFqPbmm+yQF98hVFsgyKOe/99jJL0LvMM9h6egibLrLRwQmSJaXISeq7rhnvb9VjxrT/3WtH8yEt+qNljLliWWeyb3gNjm/S22+wbcQUXE9iyoN5GFlWx5ayz2tzuz3vk7iUQ4dYG2+s2VnXwUoTrE0+WolGfGOeSVYX8OsbtEc8+3rs9Vp+o9KwMO7/eD9r9uX19E6DRH8ueCcipqJmoYaa0aiGIEQiAGwQIyBlMypaB5AVZ50hqZAFA1wCKKnjWLlnDzTXv/as9eiq/A3AT1+zFTGkhuiwJuYp67Zav//729ffqaSM5o38L4I8B/C8APgHwjiVFBYBfAfih/v8PAXymJ5iJ6D2EQvnNqSP2i7p9IP2DqoXBweBVdDfhQ8qhBGp70tYHSPvG25O4fuHsn/vBfXjxZ7cbWb7fqyF/1+fcJqhOPYxVZfozYf1PPIXhuVUEaJi1viLAgoar7f5yF5zYoESZfyHHId7hXdbvCbQnfYw2TuMlA9YHxnZMU3CSYdVOJtJDgr5cNq2KCL1iWTfEIWI6joKIeqErHo4HDRJImoi5YIgjhhClqR8ySgBd5YqcqOH54HXezG40RE1KNlqqwLIkMCfwJjLtuRQAQZAUF0CUkcteBbaZVr1BskD89plyrTpA8fp527+9wctZ5g9xrdjWFduywDHw3e98hDkl/PZ3XyMFoSxacm+f0yt49V/g0HrPxnFsVc9eCU2CNpnXYwNwLQDOOcv1dpUlZigFsa3o7rnvg3OtwBCi9MnoUlUUhRsyZGM97Jhm5Oy2XSe6muA5HViMiqLBrqBKRZNrSXZaxesbii8N8dFK3NPTEwBFt1joL6VkoGZ4TXKdI6zbhnXdFKHY1dGMsm2BhzzXAmML9sey7+25SR+ZRy5JqVm+JdovnTvXfc/dOhL52Y0Akq1drlKFZUZ0HtN4QFYH/urVfVPg/PJ3X+Kj734k8ynBSNuKmpPMQIIWyCq187Bj9dRIQGwWaaLoWLq1uBQdMCq0XOkvlSJAHAIGFVxh8jgdD7g7HRo13BGk/y5vWNZLQ3dtrmROGUaHL0WcMXOFg/TX9ffT+nikQuylCBAjyAdRgMwAaw+CAyGOE77z0ffw6U/+HT795Kf4R3/5X4DiAfAHPJ3fIcYDtvyIbd7gIawOKDKeKwFOUNSiY0PM7yj+r/fimjXAatdtHmWhinm+gAgyv2w44vn9o+x5Zrx/9w7f+egtjqcTyHmsOcO5AFLxkFozzucziAh3d3dgZszzgnWR4sL9/YPU65LQw6SnaafECY1NEj0Rf5E+NuYs4xLWWZEPYVk4FxpSeNGG/tPpiPFwQswZ83wBasU4RGCQgeBrFlZIDBGIkOG0mREHAnTgMoFgs+TWbcHT+2cQHO7u7xCnCeu2YVtnMAqGwSPGA8gxghdEf5iOwv6IAZcFyFuBD7qvsFOZe9tj1K9cso4UgaBGNcNVr5RQZYhUsaO5ZKxKZa+hIrjQfDqR9LflkuFcBDmHOIi9zJC5ggRCqqx9cFUHCFcQKrgkRIy4q4z7yjhwQVZhrVQy2A0IQ0TNAMqKlBNAsu6PxyNs7Ij3e08xwRKEglx24SZLvo7H4we28zZ2smTXErLme2sV0a++SN19Vh/whxBQs7ADcskdFd+OWRraWaqI70CZIK5yQ7NikOIOoYBzlnut1MeGKF2pY4o4hFA4g66za2VL8/Xt/Flmb9a6KyD3iOAtQkWW7Juvwh74s/ZUS68VN6TIXreUweYDIL1bpbPDoY0FEpSoKEUyENqIK4g9MaEAACAASURBVIsVyDk4S9jUF5dSULQFpafdvwRmSExCfX1Q/bo5cv1HdUl6US77TPOZ/Xrq1863Jc6AxSUShxAxUlqxrgvAeo2wNREh6gyaSOek90dVYUmYOjLfuaLyzpqRfMMuyFSx2xXjhSjjKpaxc44xwqkiecob5vksbJ4Xco/+9fdK2lh4MD8motcA/k8Af/73+btvexHRvwDwLwBBW/qHe50I7VVUtC/C9Z0yGuXNxXY36raqXTkjZ+mbKZsMobZj31z7B+fUb6CX3rcjMdeqmC9VCPrAvy1I2o9h9Mh2nMooJJLwgjComIEXtrYnCQaEmoIuYdhvoMxT2pOxWmUKvUjPynut76+LHuxXLSDvr7mH+ltSgopSknCAXUUuUl30QSF3dljmTarRFRjjiLdv3mI6jpiXi3CIAQxThHNCp9tyQkCEp4BpnHA8HHE5L0g5wyZdyLMSdS2rPMWoc2dKQSkerI5oTglz2kCFMJEMKhYEQlCEnLPcYxdak3NW2o8lr/0zvF0TvXPu783tugEkkJ+0x0d69gYgAh999BE4RJRKKPlL5CKy5+iqXbd9TFYtI2iPY0ddMyqZJRnWA1cLkLfckg5yeyN3Xz+S73NbVgS0eXwWKMv6ZxTIUGS5R3vDcjX6mXNKI4ZWuW7vy/7a76+seyahRUnfCmuhosI56wmo2tTLoMZ+/BAhXtcVy7JgnmfM8wXTNGKIURxgYZSURFVSE4mSJanmek2vsaBGBFtiS176Jn7Zb9e2hohUwS+I4y1C1whhp7reBkVGEbR5lrvN2Z14E1nyHpozNQQakGBonCbUWnG5zPAh4ng64e7uHp998QvknPH61SuQXsN8viCtCwIVFCSxwS5ere1belDbH84BKnnunSGdDHBRSp0KjWgRSRKwDOKKnFbMF0lcuFadNZixpa3NEDJ0mJk1gFPBGwpgFiTBqy2w3j8CYRhGSexUMMb2dc4bUBLyVvD0/mtwOOHOn8Au4u13voMvv7jHz37yE3z8Jz+GP7zGw9vv4YvHR0H2wwB2Dsu2wiutmCtkvTJAXmYp1i0Lsqr+jJiVWgjUzNg47cEfkfTA6bDjLWWh0gxHjNMJNRdcHjO4JsQYcTmfARDCOGJNGeenM4aBcJykd+L5+UmVTktTbX2/PGOeVwzDCO+zStUT5nlGCLH1aMYYcb48y9+GEVQLximCUZDmDTF61KoojaIGzAUUBsTxBN42VA5YliyUSkTkKvspBKHKH6cJhSJySoIuVbEpOamQDmRES64ZzhMO4wi+q9i2rOsnY4weXHWWG1UQy+w0Hzy4Ot3nB6SSUKsMD99W6bfiUsA1tV4ru24JzOtVcYZZKPxwBMekSPvcaJ3jOCKYEi6TUBmdUF0doaMUMkIQwZFSGMF75Cr9sqXKmBpBgBKCc8jbhsv8CJ5n3N1dkNMF5zFg2RI4zSDyYJb+W15XBMcYQsBStiuUTa4BjdIuQaupGXdokiYePaoWgtD5e9TtFjkxu2A+soLhfWyfab6qp1kLNZxQWe4Tc4VqsWpMuI9VEf8h9PV6Qy80xDPnDYGAITh4mzPXxglU5Jwa4mhBj1D5d1/RF6PMLjcaPBGGIQIInf29ZntdJTps/kn6pMx2sxbV7N6L2bxWnjZWSP+5ZiOsKCcVdoCq/W2XMDCjqlZB1cSwSAapxWxS1oqc63Q6tWdlPu42ia1VaINVC2DXftZ1sWPVuYJSCO7jpn0/fejv+ntqz+Ol5M27KPehFGHWsjHQlLqboC0Io8xUqyw0ZG1PGVRojXXsAsGjoghNuzuOFQ5srbTn+y35Vh93SA950CKirOmc5ZxNUO+bXv+/1COZ+R0R/d8A/imA10QUWNC2HwH4XN/2OYA/BPArIgoAXkEESW4/618C+JcAcDicuDnxl/NUcbgKE1uzqX7QzcboNkrVZnvsm8U5h8q5m7PUHadLPvoHdFsBAq7pP9cbcq94WyJ5m5h9w71tm885J6qFuZkpaFlGK2JOmyeLNlzuML8jkt+VClaqBro7y8zaP6DDe50OG9QEiRzJnDhIIFFtY8vFf5Bs2mf7buPt10goKcEFh5SBd+/fq7MSIxLDhBCcJJvO4/Xr13AOeHx8rxXKDUxGBZFjL8sCXz1mv+BwOGiSK9X6tnK0Uu103RAZZaCiMLDkBC5AASFxRYXT4ZdStS1VFCPhBGkDGa2y6IcbenN9vbfPl3Atnfttr91gifNcthXzOsIxYSnS6/CD738fy7zi6fksQ3mbamC4Uum6qsoRAbRLz9fKKhMsDsEQGXEaAdGQB004nVbESr8+PcFLqe7F9Rzi0ERShOpqBQO05Mz2jw/+qrfC+k1NLKM5JOz70HsPhgTATumRQGc/yLUEpWowvBvZ6z1ZSsGyLBrEVBwOB6XSyvNNKaFqUFG5ypBbDV5ijKhcZL8QIZek17M34lu13lB3SZL3WY323C2RKGVTRETnzSly2L/fXt4HOK1f3dpPS+KcU5nukttzBu/FHEe7MMfDwwOGcQSD8fmvfoWPPnqLMUZsmxa3NHgqNcFRhY8RVdUJb4tWt5XZUgu41IaMV6EG6GBeB0dKQ82iaEfe68gSQlpX5G1B8AE5yaDmYYgYYoTXCuqgKDKR0FFtjAT0jjjvtZ9ZihjbmlBqUbVRVeXNgqAUMAoqqP5/zL3LkixZdp73rX1x94jIzHNOnepqEAJAE0TSKMlEowbiG+gNJDPN9DJ6DQ5kmsg0k0xjaaiZZCJBEGiSYAMEwK6uy7lkZkS475sGa213zzhZaMhkMuswO5VZkXHxy76s9a9//X8xUYjEzEC4+4owHjmdjrx588B3f/1L/vzPf8kf/mf/REVKHr7i8cP3TKc3tFrIabbx0ag5QwiUqoFzzhroay+SJthqxKzn3HuMaCBRBTNUztyCUVF7ivP5mVOcOJ2OSM0sz4+UlPESeX4+E2tlOh5xwVPKlUYjRE3Ol2Xm0+NnpmFiGCfGcVS0X+CaFnxDwaph4DBNa7AeB6X4DeOorQp5IaVqvmhv8CGQjVJMU98z5xo5LVQGVS5FuFwXHMIY1c4hGUjmfWA6HDhf1atsmEaoVf0BS7L5uY3fWhvDOHJ357lc5nUPr6ip+hDv1vUKIrU4mgRaqaS2MKdZASQfcC4CjuucaFmrgohaBahnWyCGkVwqNVfECdPxQEmelBO5VGKIDEPUZAn111SVRk+aFSzu0V23LXFeewlTzjivcz6XQuvG3M6EIVqjJgNVrxc+fPctn9JMGiLfDwFCgMPEdTog071Wjp2jZkFyojlVzyymdtjXsRjDtjaXvr/1KjAW0GolsCcq85yoJTEOleCjViTqxrLQPa+oYNiy4LznME1aee8m11YtE0s4EBDrzy05U5uzMaFxm0gXa4q0pnO6CJbg1pWGrxcXFdDCaduH9dw1VHxJ6kYlTClrEM0mlqFrWCRoAZJm3oq1VYtFt3Xedxp7q2ucpJU860E3P8mXvlzb3q+AkcUKmCgGXfTkZaJy2//Xf25xTvcKVND8RVxiG0CxNppmz9XW1j3ex2i5qlayuzF4X9P3zJKepAMqWreLEffH15/fKmt1nQP7GPo1pct97L3fU14rsNRdXBZCwFtV+Hx+Yp5n7d0PzixzNrVX+wRUAyIgXtePar1/2rLw8rj0OsgKiq9xV90fk4LX/W3Oe2KIhCHgvLIKSs0gDR+cVfj/PyZtIvIzIFnCdgD+S1Rc5H8H/itUQfK/Bf5ne8v/Yv//f9jf/7f2U5nKeqmanalylKl94NtZ9z9XQdrukNfsWtZgRENw9eQotaP+LxGPniErxapSeYmC3wZIe5RIaV1aZdAB9NKXQh+KdIgJFew/Z9+gq6fwUuEN+5RgQcu+ebXTF3Rio1LMQHXKRRZMshpdZDW50IfbTaZaqvKUvcc5cLUZbdEELsTb4qc0wEpb+41KrS8sCLTwKaS6u4aGdPkmBOdJuXDNF8bDRBwHzsuV6/nKYUw8HI5E74mjJ3NlPs98/PSRWhvjQY1bL5eLGqEKzNcrrgbGWHBxQMzHR6h4hKwKznq+SRdfP0Xr18jMJu1aawVxugl3/rYoylpaU7SjNvN5q9SaNFj1FR9QifEeOBgiGH3Ei6wbn1hwUXHah+hUKhsRUmnEXiW17+ty5Nc58XxZuLuH6ByuBJwURu/5w//wD/jlv/t3/Oq77xGvMu+tVVJJWmXyYguwSsoGpz0vzkeGwa80CJyjmoHnlsDp3V4r1IaE4UV1PPam77WuZqV93qi0fffLa3jfK7a62Wqj85bgOq9qoE1Mdc0JHr9+bmsbzabPP2fghQDS54VXsZlWtbJHN1WuivI711QYgpf9rzlnzufzWkEopajUNpqASRMGH6giZipeVdGyFIZxWCuEAszLFUcjiKOkhd6zsQqwBA3qyq7/r1al1KhSZDPk3TEEpeT09aGZepqIWyWVh2heZj3QEQ1gtJJWd31QKq3uaoOqa0gGoh8ZDwdciMxLxvvINB1wwK+//Usuz4/83W/+QPtZa6aVREPV8gKhk6hwsVdJN3NvEVmrkD3oFRoSBJxVXl8AZPqvN/2LNEpVWtqSFqbDqFRn7ximO1t3VKnMGbh0Oh3X4xhcJOWsDIqqiUAqFSeRJWcWk5Ff0Wuv96YHm1UUZ1DgyqnJ8eUj8/OvOI3f4Gvg9/7u7/Grb/+af/Ov/4i/9w//AXdx5Gfvf5enj1dwC/GQkfMj8/NnJqeKltRGcOCkMURPasXmrd5LnCPVLvXtqYsqFnoL3jBKu4gwGIW+lCuXp+/46s077o6RD2fBx6jG5FSyNPwYmI4D1+vCZZnpind4ZRZ8en7iTmA8HgijJpTzUjgejjRgPE3kpCImx9MdOM94GsmlsaSZssxM48AQRrxTkYm0ZPKcGcbIEAMpzQQXSLnbdQzgCk0aSRpuiqRSSangGpTnR1wtRCqkpp5KzrHM2vd0PB44TINWuJrnulQu15kqjcEHmhNN/AsgWlWO3nOZgVxVQt+AFq3SCzivwb9VuZs7aPzlG5a5UHGIBFJWVeOA18QswrIk8rIwTPcMQ2AYPPOSyKXhKoQw4MbA5TIzLwtxGMg4UvWkXInBMw5Re9xaIzSdP8uycG2ZECNxnGAYqEsizQmuF2Q+s8wNj1OQ8XTH8PsnLq5QypWhecIYKEF9FmVetO+niVLx28JlVj+sbg0yjgOuFlxNq4ARrRH8wDgdaA2OU6dbNz59emJZNrBE12uhVcfhcMc0bYF2bmcuy0UTH6/XryJqV6CGH0pzHAK+eUKsNAoKs1pEUhZbPyDPC9l5vFOPv9x0DxuC9tJGr59TU6JmjWdyq+S8ULL2IoZhNMsIoTYoWRkPwS+M0VvvvYHlVd/vMTshE1qSBtJMybKaXVPTz+xJaamZmpWY57yqW+Od9rNYdUtaQ6rZfjitxr1WLNjHkhoTyuq75k0Je1M67MC7JTX2nBid0eFwtZpo1eax6Zz2q/bvWPekNbFqa9KitPtdu5ElohovdEaEtqzUKi/2Ytiqlz0x7X3e/XHLJnqtp0+kqRgNlVL0b+PhCM5xdWeWtJCuus9Gt4Gd4gRXO2VVRUfo8U+nq6KdblqJqxYDYji+4OkVOt3vNG1RBke1fca5iB8OTIcjLSeag1YWXeed9s2G8DcD/H+bStvfAf57UcKtA/6n1tr/KiJ/DPyPIvLfAf8X8E/t9f8U+B9E5F8DPwL/zd/iO+xmGTosdiXaVp1fU6LWej1xfe/+5remqUa96VWTfgPAXN/Nh8N5qmia9xK1+DK73yhdWwn/lprWH3sUYF/luz3nPRLRn6ut4XoiB+b5ohNZcWB9rvdElILKBbe2Di7VYdgmazV0tsPt/XqKiMlSxzWZrYZYNOm1263a9ioI8Arq0c/JwmvEeYbpwHQ8EqcDrf2oapGl4BxcLs+kVohh4HA4crlcuV6TaaFEnNNFM95NjHHiMB5oNK7LhfEw0qgqL1/NZNEC9yEOKlPvnPbfNPCiyGa1ynmXfNWA0vqqXOdG9+bhTViiSwH3xU0pBoVqPQ2aoPWBu1VJpCccNwBBF6Do62qpleuSWRarpnUT6wb3dyfePNzz8fFJA4KsHHUxaqJec21sFunqkNqvsFZxd9W1DgLUqjZ19Pso2/jYKqq7RXtFtNYhQGttVWDdKof6olWZCpSaYGbhuN5rKevrvGw89z1t+MX164utiNpbFD3o0nTS9PmuyJYgTSnF3quB9MV8DW/XjqfPz6owKhCiXjtv0uDaf9mIQ6cEqsDO4+MTtVatVNjYd07Zq96uea5Gv2v9EstG23Visspb5f52PpVSVkpvn1etbVTlru7a1wCbgErlsv4x5wLNOVwVQhiIw8QwHPj0+Xt+9vUdMUTu7k782b/6Ew7TxDhMpnrY+0dM0t173bysiFpb1aQWVjS5NlXf8kHHQHCyVU4NSU8p23k6U3X11ofhaBRKLlYJGBjH0SoYg1b2va5/eV5IOXE+z5SSV4pX/6nfs6hFQxjX9bUfa7MbIuLVF1F0Yxa7foiAE7yHdH2i5HuCPzBMBx6++ppvv/trvv2rf8t/9Pf+EZenwnS44/n5B0KIHE4n3ZhLD6gtkTI6cr82tW70ptZ6D0sjDFFBnOt1nbfee2MPGOLeKnW58PGHxGE8cX9/x/n5TE6FMHiu88zy48L9mzuGGLjktPYl+RAYRs/zc+Xx6Yna1HvMO8dB7pjnjLeYsqTC4XhiOpyYl0xNmTBo1cQPJ3KtfH6aefvuhJcRygUfwUevtMHQQDykZKwZVQsWwapc6tXXclLYtVVczSp2RCbXQqpaBTiYofDnz4/E6BlHVTuNrTEvidrElPIaqQhU8K1p33NznA53jNNEqY3lolUg7xziHSXNxGGk1UBaKuPoCBFC9AouVUGKEMKBEDuo+Iw4ZRhEuz/LPKsgiLOev5Kp4qwH1nFd1EtOPIRhBKeempfrjHMzUxwUVEb7uHNVVeO8CI7AhPULt6YVNAHBEaqjZqWKi6gKJ7N+rkTPdLrDi/rfzfOslVcXGYdRQYNclFXSGt4JnoBaXimw0Cnw9KSgbSJKfUy2pmyOXunuGgQ9QA7DkZAcOZvnWi0Mo7fqmaxUNGlKg9Vev0qr2uubk9JKY4gIwrXOtm+rkE2InsM42BrUAbqCQ60zCmqJ5D2roFXfd9sKvlsBoanVRCnWbiBWMbNiQIwDTmyfK8UAxW4FoABksz25iYGbJSNVE+EiqmDeRd6kr00VXGsEYOjMHjaK6j7GeqnJsMUaahWzWTB0T9Pt0wzwtjXYh0DonoFr3GZJ5LarrN/ZHy/2UDaRlD019paJ1j/nNta+jbtfxE499mgvGW6637oX46+/tj+8DwzTgSZqN5Sy0mMR3Uu2fsl169ySNjqHpVsPabxUS6Vfrf5PqlNbhVYplmy7ZsA/KGvhcELEaT/2krVGhceJVQp/w+Nvox75z4D//JXn/wz4J688fwX+69/4zbtHDw5fBGf6YTcv3NQVb77zRfJz+8/eun2++Tz19xIjuW39GNvXvRwsgE2ElxzcTWnodXXI25LxTx3z/vm6uxb9+pRqSKupPDlLcNe+FgsyWod49lNth2joZ2jFRUQRyWbBbh+cyj3fJ2s7XvbNeewn2n6ilNaQVlffqSGq8GjwntPhiAMGH8mXmWvKSKrEmAghcrp70EpCVnrMMKjS2fv3X3E8Hnh+fuS7737N3cM943Hku+++Y/6ctPKA0RvW67tVXPV8erVDWN3nBajd6mBLspXzr5U0lVeXNZjb37NSK0IG7wluU6bay/X2a/RaY6336s/VTB5cRHjz8MDpdOCHDz+QWmUcBpZlYRz0esxzV75TlEYDereO0RACwQechPV7+2Pfg/hy0eSLx7YQblYQ3WB8j4ztX98/sxQ1ZO4P7w2JdTrmtNeqV9H0ejnX1cv0OvTm5O0YZZ3HANkoOU46cguNqpV5EWouUK2Zvlau16tRvQohRFRUQZO6WjJVNFGzEW5JwEIuefWx0/suPD+faa1xOBxssd421n6cpVaVk7fEsifP3c9NG53rev37e29Bnj0ddr9p7efgPmHev8aJqcA2pWhMR/XcOl8uNPT4H+7vmeeF77//wDc//4a70z3n65lSMnenE4dBBRCcXadGZckLOSVLbgPN6KKtZJpAtyJYAzERZQhUgaaWDNqPIuSsdw5UBW8cJ/WvCYOxjhwpZRNO8OSUaEXvZw8OXu2hMaSzlbQGj+t67o2yJB3B0U23VqXuiqhk/dKEDx8+MUx3PLwdGIcj33zzu/z44Tv+zb/6BX/3D/4BQxROp4HzRdfp6XhHTgvz45kCdJkHHSPFPOXcWn3dU/P3zIi9aNAtOOiaiphI06rX8fTAXXxD+fTI9XrRKldO5EviOE68/+o9nx8fmeeZh/sHSi3c3d2TloXLWasfYtSxcTjw/PhI8I7T4QFKpVWtxogBqyGOSNBe4fk687yYiM+g1LLqra9VhBgib+7vtjHbFg2MgmO+LCCe0+Fee0Yvj2ra4aP2JosnGKB2GEecNC7PAIXWFfu8g6iquh3ddy4wxIEgWrVNOSvLoANPomCxDx5ntOY0n83vMTBOB2JERW8uiVocwxAIYSJGQVzRymJa1CrDCa0kzpdHROB0umMcIksqhBjN90w4Hg8K6Jk/Z4yRmgtXs5qhNIaoa1wIHgmR3FQqnlYQA+KCD+AD0gwIkC0Qr81ERorggmfOlcePHzkYHTIOg6l3LjxftN1gGKIG9U2VF6UoM0WcVXvEUZvQ2Qy2KuMcDENY+8NSWqwvutN9teqlYlvaI45X9Vi8jvnrRfsAxXlbGyN51s8DNayPMTINgWWZmZd5XeOHOHBNxVgmyn4RUQl4Gtor6B0ta4XNWduEVliNmUEXs9qvD34FuxUgU3CtVFVIDjER4wCtEr1b2zHEiQJRPqzV8dagNqcoSFUVdGnNKm6sjCrbXtf4pcnLhO12fevrQmfN9GPv7JeuiqmgoO1q1fbwF0meUT5b2wpzoID0K4lQf7yWPPV1qx/r/rWbCvWXVcP9/rX3+9t/z+1et1879/HVbRGm2z3EGCkl45vWwuA2b2AXRxnddAWs9bo6aS/Gf48ZpWm8sSyJVJa1DaACx9OJh/sT3ns+f35kmS/QdtXCvjf9hsTt/1VP2/+fjxcBpE2S9Sf9BlVas4rSbvC8FnxqhWH/vv2F2G6Qdw6JvdF5e81tBaxvpt30dB9Q7b049o9bxPw20HotafviJ1sgR7NJhk7qHoj1xEOEVeXPseZtrx5HD+rWyewczvpT1qbLm3PZfKJ+4r69ci5SNWh1RfnmKhWuZp2tdHVMq6iAJSyeMY74EElifR9uYIhH7u/e4ULlzt2RSmJerszzlePlzGWeSXnWwAhRXxq7iv0cgw2vvsB15OdLfEDfk9JGJ+pJkTfzxtfud62V5tpaHe2T+bX73RetXp1yUgiiaE+wHoJxGPj5Nz8DDz98+pHn5cI4jlu/jrxEnGBTZVpVu+q2iO/H5e2Y1SDnS7XTl6/bknjZURz2i/Z+00gpreqVIp3il+mUJWA1Q22ymb/3MfpFpe2Le4RZYPQNNwOb4AkoXYJaKDlzuVw0yK+9v0qpxdXmsPfOFLdenn8InmFUfnw3DK1Vfz8cDisIoBuN9jF0n8ZaGnVtwu1mtI4QHYj2vLhddbAv3vv7ddtDm3NmD5/sr/lroJVD14ZctPfocDzig+fz0yNv377lcJy4u7/jX/zzP6JUOJ0e1v5ZL0IYIklMfS2nVUafkilpQRrE4M3I3tGqI7gNBEgmf+6d9gHoJqqBZ+fwa98qDINQ65bo773/Ukprz1ophcGHVeGvj/me7O/HEKICC31LqVVRzdqq9v+uyXJFqtKBgyUmS87k0qjN8fnzE4fjO8LgeXj7Ne/evuMv/+KX/Oqv/4Lf/b2/z9s3Rz49RtJ8RXwgjhPzeSanTMt6n50JktTS158tCOv3a19pXmmm9nhB1wdaUpphRcDDNB14nq+EllnmxBQiNRUeP37m7s0dx/HI41J4fjybVUUj+gE3eT5/fGScJobxpOvwMHG9XriwMI4jNVdiUOP182VmaiN4hwsBAvz6wye8c+qT5jQorjUby6Ey4GzuKH1bnKgX5GDCT3OyPm1PbtUARdvnxNGqmt6PMTANqoRZilZlxauNAM7ZnuhIrbGkikS1EABHroXPT0+ICMksGUSKUcrgfD4jRJyfGMOI95qUeqNMOTzRDwYEqfjB0jJOBsYhUpYrIcy0ltf7WWpiOadV+ENCoJRmFatISoUwKshWsgqNNKPFbnt+hWIUOuXxo55rgda0/7F1yxujK3vvIVdSLvhx5M14Qmy9W4wi7JxWphqV2ZQCh2kkjpOqMJp9Sxyi2dI4q9Z0mprJz7vGGBVQWpaFXBZq2mhuwxDpvV3BB4KH6OPaAlRr5bokVSKtiVoGpEWa9ZOnlpnnSvC6w3vvYefnp36WQiqZ+Zp0fQmRmrMa0+eqyRL9mun/57xQOlgrbl1Xxas3bHNWSWQTyZAGrRRyrUrTF2i593xbMt4KQdoOoOxxiFbhEHQO3O5trTOhbA/bxXD7/fl2veiVte1jFKjei0PdbKHrmryuJ9IrSrsEafcd+0rWbfIEu2Rvd6x7MKr3du9VrG97zV+AUrb23cYqtwnaPunrn/Wit78z63wX9wqQE13lta/DOfdWJeiG7r1a2l8DGIi4v5g9FhGab8q0Mu9JRIHWGCIpLVyuV1K66p7PrgBQNxbI3/T4rUnaYEcf7EEarJtpTxrcKxH2PgDuN6vubn5rL1/HftBZ4lZuRvM+QOqf02ls+36b16pre0TgNtu/Rcd/6p/r720vg+aesK1ovO/B8Jbtg+EHN8hEP8bXUHxdyFTetNMWV3okL5NY98rn3QbU++uRS15pCq4oBcE7x3VetI8FaK1Q/rhxyAAAIABJREFUbJNblgYOojUFl6JVwc+fPzHPM3ECHFyuF1Ka8V6YTkeG52fOl0WRQLSaqKpfqhzpnBjtsSHSJ+o2GVv7UohGq3NYJSxSykBOxegRu2vSES9b2FyvdOzoALfJzW1vY0oZfONyufDh4we+/fW3fPwYuXs48u79W+7vTtyHe1wM/Or7H9f3d3n5fYWl/9Pv3L5j/337XsrbxXL/uE2aWmv4ENbNZL+Y72V8+3wLbgCrMJSiSluNXiHWZD33a2IU9n4f0hpYbOpiOlot+GXbWGqtKiXfqYJ0vz6lNi7LvKq+9c/p6pnr5xv6lkuB0s+/WOVNQNQeIaXEMAxM06S9iFbJVTqhXsd5XuzaaFVJDdRNijl6oCtF1vXzO/J5u4b06tJrSXV/7G0f9uNNRKjitM/Me+0tHQaez1dCCLx//577+3u8F3793a95++5r3r59b72vlshbJarVYn2Zbe09HMdRaaBg/Omi/ZqmEumCIs9KO1a0t2SlvoUw2Xzc/LVU/l+Y54XL5co0TSa0Y+IWuFXMaF5UHU8EotOA0vlIlwttRrXTob4FVc4CsIpWyZ3IKlwzhGBiTHoNow84H8kiXM5XllRw14y4yP39G77993/Bn/7Lf8Hv/8Ef8tX7e378dOJjuSC1MUwn4iEx12ebKz058zbuNlS6J5mtKcDV15bbv6/PN+1dcU5YaqHkxKfPHxlLZrpTM+qEkK4LQQTwfPr0yLt3b3n37j0fP37kel3MiFp94JzzXM5XQpwIIhwOIzUnLpcztWSmw5HBgtDDNHCdL/ghMMaIiIoK5bzw+PSo/nrTYHuYmrtHUXXH3u+6pITPwmE8kAs8PZ/X+9E9jLCeR6XE6jyqRWl8wQnRe6pvZm+gzx/HgelwQJwKtqTW1Ch4GqFkLucL1Mo4RYIPDEOklMLT0xO1qPXMMAR8EGqbtcfVNaZ4MLNzHTfzrJX2cThqUG8V4XEcWWYouRAHtfNQj76CmAJoykr3ozNbSrcJcFbRQoU60H63GAKpQLN2iCoe0HtRxVFFKL0SZsDrMEa8d1zOKuLSijAgxGEgmqJlKYVidOS+lwjWY4o3L6+Gb6I9UCjFrpqvmNIPEyllsymJBr54nBtWkEk934w2iPoPBh9wMay0PS9KqRUTg1K/RfX5Cz7gnK7nOamyr3cq2hPigJkgsOS0WqocwgGckHIlmV9X9M5Ejto6j6jFKHNdfMwZSGAMiaa7VGttVe7u/bu6z1dSTgrKVI1XalWfPXHeet907cF5nGjoXdlRo+tGjlt34l4Z28UNt8D4Btg1OiCGHXJre2B3r+/QE7j9+7uVVo+LLLbphyJbP/v+8RL8fwky7tew/thTaW+Tsv2xdIbBbczUP/+n4s79ce1ZL3uKeWvqvVYqtj/ruFxDoKZskEqD0hDfQeAd++zmOtiVQ1sJwIVANABZrWUq5/PzCpQ472g3n/E37e/98VuVtPXHGjy+ckM0od+SM9iSuheZ+k3W/uLmykY5XJv3+bJqsSZ57AfmTSVpF8i+Vgl4GTx/iSTcJj3r4F2P+SV6YZGy8cr1/9dx1rnj1f7uRGVPxbjJ0lX4dotC/63VtRrQB46KHNxW0ozr/cV9+bJKWe14nFNELC0LaVl2AbgeTJNGlUxOix5NE67pmXGcGMcTQTwihXm58unzj+AbpfUNxhtCeGVJiwZetqgq88FZhVA3GYr25+ghKhrbF6Ym+8ZeXeBCCBZo6mQvpTD7jMiygQH0xEf/78Wi8gqtaf/3/UIkogFtl/w9TBPjFPn44wd+/PF7CMLx/kheFtLc5cC90U/H3X3aI0JC52bvwYf9WN0Wwi/R/s6FV68ltz7fN5N+XfaL8C1VAYTcN9jQVca0wbpl8z9xKrHbYP2c/dzbH6ciyTbWMS9Eq550pas+DmottFpIi5qEe+8J3prFc7IqjSiKbM32vV9VX+/Wc9DgQ8dOjAMxDDbWVO3x+flsx64o7rZhREWfjBoZo/pW5Zzs2opRC1n9zG7HTFcI3SOLpb6cc/1fH8P7ao2uAZ5hnDielKJ2Pj/z8PDA4TBxdzqtG8rbr98RYuRyflSVLadKiCUt1JIp5j3WAbDgNSjp9wQ236Mt4a8rYq4Ji4q1hCjab+a9Vks6qGKVhrX3UfWbQbxWzWCVQffmqdfY1tls93oFCLxKvyPgTb59yYnLVc3q995GinpqwN3oAGJFmmM243UfCz44vnr3NcfpyF/95V/w8dP3vP/Z73D/cMfnp8+UpRKGiel4pKQFlmp9fX2O97HVg9qsyW0tVj3e1Gf777VtrASd39oHE53j2irz9UwVeHjzjoeHBz4uP9JCIIpnTrOew3Xh4eGBd2/e8eHjB65GjSul8PbNO56enpgvT4Q7TUSmQ8D5Qa/V3DhMBwv61Yh5yTPzWY//MASy86ryiyd0YaMGNSfOJTHEwHGaNDAUVUy9zta7ePCkBetDjtBUfCKlRE4zg3luOYSUmhnQK20uN7XnwId1be/rYl4WLumKeFl7iFopODeuFXFdw1Q06XQYTSWzcZ0XarnQmiOGEejWKZXrdQEcp8MbghNoWcemAQnjOCCWWHrneD5fyKWogpz3irznC8fDHSK6Z0prpCXjh1F9ISUjRu0NogU2EaU8NudITYPCIh6p6jOYiiL8OWdqVluV0as1UE51Ff3x3jEdDjofiyq8StCeNNccOduUQ5MkcZVVpAGIUQVAGtOuj1TPXQHYmRgjx+PBKO+FtBSl7OZMGxqncSSOowFBqjaqfb6NelDT62W5mi/jzDQNDONgiYVSpi9LIg6DgXQKZrRSScti/Y2ahLnQ51xZK1nBOwbvmIZIRchN1xtxniqNVrNR3HUv1NjK1DT7vtrayhTYg8Pq8VmUjug6Jb6te7LOYTNp112m80PWqultYP9anKmVq7Ym0Z0Nsqlg9n3Z9oZs1hZNj1F6jNiPoMcybHvpazHufv9BtMD0U+ybfcvDfo/qr9nH2/u/3cbl/bm+v/Zzu63Y7T+3v2f/WT5GpJr6sVVa9bUdKLJ83ixLeizcC0g96dWfG8jf23BWQT4TLMvZ1IBFFHlZTdX1/SJK6/1Nj9+qpK31ZMR+tx3rJqPeKgfr63iZ/HTkXVq/6S+/p+fMPbgrNzLpt5+9//xaC/Ocv3jta9Ws299fe+1PVjf2733x9i340z4gnWj6//Ze4yo36aV4sU1/j8i8PNaOKrd2c017ibttdMK/6Tq9+B2UL27fuczLulh1zyRnq78imp0n7Ey4YqZeC7nO1nskNAqtWLm69X6jmafnJy6XiyHUxqW3Q9EmYq2SqOWgIR2GzK8I0w4x6pe61KTqkKuMsVI7ssuUrjbXegV4ox+8VuG8XaheHSei8ekQA2/fvWUaItPgEQefnj9zPV/4fH420YDDF8ibfndPrm3stC0J2yNO+/ulC2v3uNkSvBCsEXyXnDnnSKXsNv4dbWR3Pv17SmlQqnkKBrsHmEJp3SUynlzLasj6U1Q/YFUzRdaRpk356w2AXArLov0mtRSrzllSbZYJ0b4rWlLUGhqUe2fJ8GDnkK1Sl22TNIS7qDx8a4lOuc1ZzYi1R0F00UeFJ9RkGFKaNYkAanX2uW4VGOr3pl/TfcK2T/77c93qYd8H8CV6qdLocRp4fr6qmfbbN3qeY+SP/vm/JsbAw8ODBkpJJc8dxTYcDcZCsCS0VzwKFgyxzqfaWKk/JRfmdEVNjrUi7HwjRK+2Hrbm5LIQJDJMRws8KsM46oZsY7pUFegZBqVC4VQltPWkxprAc1GE24vYe5Uy3azi21DT82TeZ5tHoN5P3w1v0QSr5oVGI0vj/PTIeLgHEe7v3vDw8BX//ttf8W///N/w/nd+ztt3b/nw8TOPc6a1gg/q26eA1QZwYBXGYj1ZtRVqVQBCCQAqwy2igjo9GZWqMu3qfafBmYp0VlyD69OjUpIlcrq/41zPXM4XlflPVz59+AQVpmni7cNbnp6eeH5Uf8I0J4agSr3X+TPeBYZhYpwCpQWggBRoiRA9d3cDl7P1AHoo6YprhYfDwHRQYZ7r9UpJiegd42HEB1WUXdKi5yO6Ns2pkIsWaxuBWrUqrsmQGpULAlXtJryfWNLMfL0qpU280jSd53w5U56fVI1RBB8cOUFJiRAmjuPEpV54sorgYD1U43hgnq/UVsjlypIS1+WJkmdEPLWqKJZS8mZKuVAKtOMbWoOSEx4FKA7TyDgOCiaWQie9lVJpS7c2OGqg3VRKXm0gPGHwSFCaWs2ZlhcMfTBpesHHgcObt7RDJLiF6AND8fjjHU/VgC2pBFStsDRlrTgzJ+9AQMr6ncfpsAJ3NWeaeMRFxmlCaLoetEaar0arBWfrWbG+qU4V3zMa9vTlGCPBRYrZO5RaWOZZ1xaHechq73tKBTGz6hgC3mnyiGDMBhiGyBQHclZfR+1zbQxRWRDd67WvgcWUs71rUJXq3XLCOxiGaRUSy4j2mopWTFyD5nTOOtG1LZekVXhbY4rNaT1e61cWUZ+4kkjXwnVN3FRYzTuVee9g467QQ1+AttjzpQhJT4T6ftuTjL73dhB/ize2Ko53ahdTa1l902res8ZYk4muhLxnfvTv3P8EKPJlHJ12Pe37Pel2L9vHjntWSf/O2+rdbdVvD1rue7r316qfg+teob0H1GF7lV0rJ6g+cjO6aFex7LHMPjbpIL9VKJvaBUjtgGy0GFjf72VV1Fiv9WuFm596/HYkbfuAEzZfCP3j7mUvK0G3N/rFo708+dsbWNumeFPqZhr4my7Y7WtuKxN7pPvWaHZPifupwdsTKKSrg9Fnj53XlsS6XtWx5BQbQHS+rezf9jKA+8kKX/vy9dtr7P9vKrg/hb5sL9gOZUVtdtQrpSx5QgxroNtsIuS8sOQZTVS9mfBqn8OChljJqG8UmxCi0s1xF/hD53u79bOc0Xw64tRRKUXMdMPUzcdZUOKhdWVKoZt40np1z7847ZeVhr+5KtuvTamZ5nTBv7+74/444b5+y7uv3vLDpx/44cOPuO++51cfntRYYjd2XgMetMdoG3/7CtgtGqWy69vn9OrOa/RHRS9fcsZvz68vnKlksGbrtd8Ip70JwdOFU5wTMH59TwR7IrK/pv1+0nqi2/rgNNl8AyFKIs1XpSTJS5Stn496U22bgqK6agKvgUlPigoplVVIRcd9W+kdKfWeIz22LWlVCpO4yDQNNDKlLpSSVjNt9ULVbH1vrLlHIV/bnPp93SOOryVt3YhcvGM6HqkVzpcrh9OJ0+nEw8MDwXs+ffzA+/dfMR0OXM7PtFaITrRXrQ1qbF0UcbcRBy6oeAs2Zxqq/pcVme7VaRX20LHY19AYvVpdRL8CDZtpbVuVK/f9mbV2QMKv1caUyhoQOmeV2ia4EFWwoWnimLNWeUqFfE2kZKqFzltAriaqlWYKpJpy08pqYeBxLNdnpdDFAZrn/Ve/w69//JFf/OIX/L1/+J9w//CGr9//jMvjhbycdayHQBZBmrd50ZFw1rmG6Bgcx3GN2vbrR0eq9/MWAwQcTY0YGoQmzE9n7k5vmMaJ+NWIiwNPnz8RgxBC5PHxiU7jaw2zv6icTifGccI5oTw/syyL0mDDwDiqEm/Jhc+fPjKOAZpoEN40uUiXM9NxInohz2cQ9dmrpTJM94TgEA/X64WUFlJZEOc1ecmVXFSsqNSs83lJzLUQveM4Raia3Jea9d6FyDDqTMh1NyeKmhZnSxa9wBgDKSnNL4RAGyrXudm8Hhh8ZBg9ucCnz89MU8b7xjB4vB+Yr4uCeM5znRWEUAugQq2zUZATc54pdUGILMu8sj3yklCRr4HgB3yMtFRYSsIZbTgEpwrwTvDRawtGSni0OufjiLgBmoPgefjZe8b4NQ9vJt4MEycZWYDlw/d8KAtLWqwPTvuFvQRCnKilmldtAyrFd/aRVZutwlJqxotXKX8/mmqjY56vnM8z4zBwd3fCDZ60A7/V+y1qH6TN4f6I02DaABrQp5ypTdfez0/mBRgD3kdGN+BNyMz7yHQYQITZbDtKqaRUSXlBWuU4jmvVv6YF77yCA0uy4mtvkakM3hN0CtNqYVkuVITmAs1HfBisv7Kt2KeY4qImOYVCVsGRpuyeXCrzfKGdL6oUOmilNS3FvDI1pghDsVhFE7fBb3RJROMfOvDcweFXKIf755pVx17re9vebzGqgbP9ffu/9b1tqwZue/vL/eflQ2RL4vePWzC377W3MdFrQPd+/ds/3x/7dpDbfvrbpHK/bjrnzXe7rXuuiKhIDPvv62yH3fcrqX63z764CiqkMzhiGNb70RoEVxCpL+7Hi2ta2+2Hvfr47Uja7LFm8j/x/GtVnv73fWDZy5fb46ZHBNakri9U6ytfCaZvB9ZrlZLb43n9+P8W596aoU09e+pob/9/m4Tb/rTSubqEq5bTRXm4zgrejbXs3TP7VcDEPhejAvWeNa0Yv+zv6kHp7fl/cc7230Z74RJvIa0GgCg6JU6Q3JWPwDndMGZT0iqdZindKDsa0lkR2toHRa1IU/pb9FvC8doi1qsTfTI6F9ZzVTU55SYr8siqjldBee97MGGXDH1xn5sGuK9Vt75ImKUoj7pWLpcznx8/0cxYPA7qjfX2zRueTeq/rEIGHRmT3VjuCao2F99SDXtlZj8vSq98rf1bvaevrol6o6qnnyWo+3N5GUyyCzDNtLwosumxRMBrdcbZdaq1EuKwUsX2/YWvXbf9xlVrAaMqLrOaZZesDffOVMz264TI1pPQj/VyuVBrXimntdbVdFsNXrV/q1P/NqCnH4tWENbrWgVxqg42jAMhelJeoFU10nRAFVQ2yGv0QK92vkz49xtdvxbi3Hofh2H4Al1cKS3OIzFyPN0zjhNPlysNuL+/ZxgH3r57wx//s//bzEdHQJPfWjKtZZZrYrleKCWrAXU1FTvR19HsOaM/TeOogbdVFTTom7QS19THTvtZhBhGq2I7Yoh2Da1vywW86yCYIsk+RJy3KlNrlJxUOAkNxmlq24F4alPVV+1rK0rRDeC66qnzuKZId+81qVkrntrbUvGtj82+4VbS8sxyeWKcDlzmwsO7b/jq6x/41Xe/4s9/+ef8o3/8X/D27Xu+nz7w+XrFoWMth0he0irsgPR1QYELrbQZOGIKobWa35FTkCKO1iNUCy2jwIeBYtKEaJjwkgt5njk/PXO4f4M/HDjUgisq5OKcMF+u5CXx1bt3LMcTnz9/1sSiNUQWhjghrnA5L/hQGcaJWmVtwn96PBO853AYAO1lCoP1rwo4H0x4xquFy5wQf9Bj9RNDGFmenrheZ3wQnB/UugYh5wV1FEwqSiFasR+HgZS1CptItOpWm5c5LyxLJgyDVpSjJy9Xak6A7jOuVebLmRK0Uqum0Nq39HyerVriicMRWmO+XpBFe9/uTvcqjIGKXwhwnI7EoVDrovtZXkjLFS8oGt+2ntSUK+N0RxxOSAjUCjk9c73MTIeRcQi4OJLSTEoXaqe0eU9LV3JuuBDBYZX8QhwHwmliGT1pnAjTg+7lz59xpTGFQFwS+Xwhu4qbTrZm6B6hPfqBvMwrI8VJM8GUSMuVy3yBq64pwXs9Tuc5HI54caSlUIwO3q0/9PjaSv3Wio3eN+pMTottUyYGV4Xq4O54t6kLlooLOqfTsrCIVvqzrcfVFBBL0eS8piuUjBenxzkdtP9fIDghtUpwHhe9UhabSu8rI0v9fXOuLGTcAD6OBOcNTND10IlT25NxpAUVjelWMt5FYmzEMChQaeufVJ3j6urTY0Gj0JVMrk0BHVAGkXkMdnVk1zbq4Gt96FtRQMGv3geOUe72kv89MiulruCzIW0aGzq0L79tLThyU2x4jfGhx/HyuG5fcwuc3sZEryVu+8dt0eY2UduDW3uBk33yt6/glaaFgUpdr5cTrbLtA2zB0domzoITRFRIR59T4EwLLN0fNeJcoKRKTlkZYM4hooDB/tjWa9bB59/w+O1I2vYJVeviAXbz6n5gsP5++/OL3y3H2wc8PejZzAA12nWyKXS9ltnffsd+0Oz/fvva16hre25wv2G379+/p1cN1r9hQcb62bvj6IlDp6uh2fsXaAZidMcNkY/bR6xBaE+T95WJrf/q9UrdF7f2xbHvin+7yVsreByOkSbqnUfV5uZlLqyZdS9hp6QLs3PqtYFVWZSjoOqNTfnBuVW6DK4e45bU63n6dTK3pv0ZpTS8b6upZV9kMYuAGAIpBKVv7O7Pa+Ph5d27uTa7MaX3ejOPvl6vfPr0GXIi5YWPn34kt8Lx4cTlfFkX5t4bqP0xfzOo8TLJeeln0hf1zbdFn+/0kj6hlDOfaLiV6tVfezvWe2LjQlw/Q0RUUr1V9kwKHa8CUgyA0D4xYP3//aKvPT927Kv6Y6WhAdIyLwiqaFgsQe8VH+dkNYHdEteustXoKlxaedu80brUfv+svl5sn7HJLNM2CrOanDZyXsw2QoVHvPPWR+kRNLh1aACY816yvn6xOd2uaXvrkf3c1PXAMR4mHh4eqLVyfn7mcLrjdDry8HBHrYU/+7M/43Q6cThOfPz0kWW+cH7+iNSFfD2rwXZr0DwNE4gwX03vAw1HLqpo2SQgPuLQXg3EqTJWaVYt175YYQTiCip1MRABnFdj+b7m9HGr579tdqrEqeNrFcBpL+dCT2aD+TV651cvKt1vKiFGBteVTUWPrxV807USVL47N0+umeenjxxP93g5EIcjD2/f81e//hV//Cd/wn/8n/5j3jy84f1XX3N5/ETNV638m6x7rX2f4cW4aei5LikRGis4oHNl6xkV2QR6Go7gwSP4Br72Hj7h6fGJKoEaovY7haDUSaPaPj+feXh4YFkSIo67u3s+fPjA9XplHOB4GnHDxLycma+VZZkRFsZhYIgnAkrTy1W4MyGbYbnw+PSZ8/OF0+mEBKX5hdExz4ly1kr4OA3oBq1gw5IcvjmcaDJDbQRfOIyBFoVWta9SgyqHGzylCtd5QSocxgNhGGlORTBSSlyvz3hpjMHRSqZmFb5IS6LgTDBDmRkVNGmMwvmqgkWjd9BUNENawBERN1CrENxILtoPezqMauaeErmoMtx0Oulcl0oxReZhGHEuqP1HLcypi3fEVSyr1kzOqm5b00IM6nfmglGNUQGuhlawiYESPf54JPlIGUZSLjwvC5c8M3hHaJlaFu2nTJGlOHxQgZLaKnnO0Iqti30dySafr8wTH4KBuxioVgjWM9p7qbJRzvs6r0rBBec2UQkRYZlnA4QEFxQMFekVaE8LBqiIVmlTWpiXCzn3qq9Ro8Xjg6pSTmMkcMALtKJxT80J77V38Bg81yUxlwwFWs1IzQyuET1WARWaqwqcilLim4udxLbuEzl3k+9myaUJQNm5hOg1sUwF8UIcFLR0TRVve0uAF0eVqj1zRqfXLMvhWkW8xnoNi4leeewBzNuYtQPzt8CnSKdl9n56jYGriUw1i6fYxQuvMcS279hrJXyZKO3jnFsp/9sY6BaQ3f9+u+ftk76+/remgMNgIjv72KF/Z0/onIRdscL6y2iwFid25/iiBWTTPaBV1iqc2DjBqJANBQGWvNIhxd6Xs1oINWG1d3Gq4vKTsXR//FYkbT0xkH7S9UU0Zz+s0tPs4r3Gb93LJrfOILdvsPcgptJTrY/JXNv3R9Ov2b5C0yz4dyJbYLtHE/YBVa1245TTqo7qlijYMewHXA/UrRa2nu9+sq0tioKq3vTkC/Nm2SWBzc49W/J6G1D3hlQNTDWRKV28RLriYVPd091E0QtV1+uznh/bpNrdMn2InpM01grndq36YqNIBiZ2MUQ1xrwuKtO7esTBior3q9WPQUwqvzdpi9fm61Sz3TtZgzHlcG901Z6UNXa9JnZewXxWcm7glR4XQiTEiJvTmhi31qyy/mWStgVnLytF29iysWr0niqQSiUVtVJXpTW4Pl84z1e+/fgRJ0LwXlUcd+OuX+Jt0X25CPTfewKyBcSF2jZzyZUyYNdhS/R0keqI4R7A2NMYYVugywp2gHMRqV10pSMrWnGrTZVGexKUUrZ+H1nNb2VdFDcjz2aVtpwT80UrQrSm987oS6VuyJvfVWFLKep/aH6CXeWw2Ebl/NZr0BOijpw3WE3p52WxNWGbb96rcqkzGwCwyrDTqmk1jrvY/Wqt9gKmjqddIraNpa1HteQCRi0tJaOMsnXx0tniVQ1tmA64IfL0+MSSE18dDozDiHeBP/2Xv6BW4e7ugfm68Pj4iVYL5/Mzg1e0OFq/n65lHsSvG3SfIyrOElYBEA3GROXuq1aCEDX27kIcmsjp8es56zq+NuE3vX99HGerUvngCT4qfQ+lkzXfx2pZE/QuZ92vaU5N771RgVJWkYHBAJDWGmKN45pcFfVXE1GPp5bJFc5Pn8nvzrhpJLfC3d2RN/cHfvWXf86HH3/g93/v7/Pw7i3h34/MWSmyaiCdbT3U8VtqUQS+6ZozxMjVAlVnib0GwY69olsMGlSWUm2uA4gh5roWDIPnfH6kRsfx4S3BH3iIIx8+/MD1euF4PHK5PHO5nPnmm2843d0Bjm+//RbBEQet8B/GE61dWFIy0/VIcI7x7sj5fObpeqW2yWKdSoiO53NSElFThbY4jjQcz4/PawIrAsfDiVoqz89npXZGZUmM0xFpQlqu1Fo4TBOgtN4mDgkOFwZOp1ET4VIoWRNfpcwt2okSBO8GTVKsX/p8vqpBsw8W4E08nc8sy5XD8Y7D8cjxACT1ZWuowqOIBuVOwI2Oes3klBnHSByi9q9dPcs8c10Sh+mIj6Ptz57rUrhcZ6YpEK2irSIXjVaTVXsbrml/1jgetQLZQAjkXJhLoTpvt1r7L3OtDAjNeYjBaJAQmko7eedx04Gl9D7spr2QFLwTYnRG0baxZBWaRiMVBcXEe4agAkotq8CXDbzUAAAgAElEQVTHsuS1Nx0RfAwErx5Yl8uFagI3cYj4oOtkyYllvtBqxvuowGq2ir0461V2UCtVhGGISG7kojYiPnqGcWAYRut3DZRcNUGTgDcxjZK1bWJerlznGQleaZZmhF3wKL2/krKyasQJ85KZS8MPQoyTrXFK7XYSLbYzwNsscnrFqlazLUGrXn71X2u295knZNO1UJwjeu3XLVKQZnsxKoJE1sSZKozDYD3R5htnW2dfx2idObTtvb1q1GPVsgWXtplY4oHdcy/41in9m6BKKrpndYuE22rYRvNT0aG+R+0Fz7qo0/6xZ/r0dW0fc2+vKyvwrsf1EsTb4hux9pkNpO6tDD1+uWUcOedwRHtOgXoFCvcxmrMUfV+s0Fi0tv3rZBVra6Wt/psvE+tKK04N172CctlYQoom/OZq229F0iZAcD1pQ1ecjmw4rSKIBSG0pvQWq2HogN3OVazagmXB9AStFJXzrm01Oc1F0bGSE7VmvFeT116Recm7tcUO1sBsfdxkx5sSjgYr9cXfNjQLO35tc9SApolof4BVgdQPyxI0EapUqhhS49bUheq0J4nW2ydVyafQB+g2SPv31t77hlvRIkRVclb6kZXKVQxAB1iwz+s9LNAnkMBuEHfERjnkKO1mhyDTwDtNPlockK7GBOSlUFHxCtkN5IbmIt3hDRzFlH4QrwktIMGTqt5XcV5fU8XuoVYJSquUYjSNpht6VwvUKorgnTaRKuvDKIRGGaxocBh8NPn5irhOG7Sx0xquaVOwyjtvFcuOrILDi8dXIbhAa445QWrK/59iZJgiDJ6nOdE+q8eQd9BM+QmxxOa1xdRrT8J23+sXr6m1K/bZ76u5ZBeWaNRmtICwVSf7eNqrP+2r1s6pMlcfL/0Y9oDE+nu/Lt58bRDz6DMgwhQYqVggpSp7zcH1cuXz4+ddVczhpNKaI1dAAmGw94uQrbqRkiYLwasxt4qGNFKtULJW5EwCW3bXrrRKypVq8vO5WDUMXW+C06BApBq4oqisd17VUNF1rQtngKpr5vwSHew/+/f2pm5xjrJPfM0E14sj9/sbBqoLhGEijBOpZD5+/kyMA8fpgBPP8+OVP/6jX/DmzVtOx/c8PX3Et6rGzD5AK/g4Kv1p7flURLWVTMuZVDZFsFqSothsyocp5c0sWrz6cRWtMHgfaLmt6484VcVzzpm6ZjPxGk3ygkdV7Fql5IXBB66XM8VVpmkEKtIyDkWZHRWas4BxodSCE+17KbXgrMJAVYU6EVmpsbUp+FZQr0kVytFnaGfS5TPh7kSWzLs3E7/z5sDy+QN/8s/+T/7O3/l9ju/e8PDzn/NXv3zCA8NBrSGW82L3vBI8RklqZkquyfqSZ2pNBOfx4qm1cyzQqmXVfZCmcF4ToTlHNpolwdFKI7pKunyghsp0+pprbYQY8PGgQhtlZpoOfP/jr3nz5j0hjLx5+zWPj585XypH16g1c5wC3ieEC+MYOZ1GVQW89/gPVz788Gc8PDwwDBPu4HCcKE3ptM4FnBMOk4fstd8sX1Yq7nU5E8uV03RkGhvSZsQJl0vm+fGCj4HjcaCJJ04D4pX6ep0XQhCG6JjThZIz4lRR9O5hJHinVOnrmeNxIo4jIp53795qJW5JWmUSOB4PNIeJWWRaqUxO1xFksMWykMt5XWNOB62O51yQ4KkCS240f8BPR2Q44MaDUvxz4/PTBZFKHBwDcDoGDmPk+XlmvlyQFhhiIIwjzylzefyB6XiiukhtDpERFzxxiCzzlVYS1WWkDsQCQxRyyzSpvJ0GJM0s10RunjDcERA8MKkwqfYXA0glm2+eC93wvpFKpTJQxTGnSq2ZEKCmZMqhmmziCk0CpWqy5JxjOIzkkjnPV1LJDIPajAzB42VhPidyukKDYTzgomNZErlli48gjANehOFw4HQ3mSWC/nNScemiaUAp1AJLdTq2vVc68RAJwXF/VGXKatX1WipFBO9UqCQlrbqmkhlPd4xiQmam+FlyIi1Jj0sCaVlWJWLntA+5V4/6ugEQooJZWNLeFYXzkmgIxRVcUMZIEVUTDiaw1UrBAdF7mmuq3ps39kYX61KfUWUHBAMDt0ICxmZpth6Gdc/XOKjYGq3JFmuSZ2qPTfv2cIGCJaKDVcmq9vqyT/qAZlYpPQrpQLKTLabvsSw3NMvXYoP+0P5xjVXXalfb4gY93ab9670fWV4yUm574zvtnl4xtRi2xzIboC/kVHCttw91oNohrTNw+ufavuGsKjc4wjCuFbZa66rwWtsWD3WvyVLSi/jstcdvRdIGt4p6t0IjP12h6O/dZ+1rGRNe/INN2rlzprs0Zw+ebjP/LwfPl8/dPt8/w6/H2/v0lNal6jGyIvjW82n5zq5HQZolEcaXLXXNWrzowPfs6Gw90WRHM30lOH6tpN763/UPL/7eA/c+6LgZ/OvP3WXZEkVDJ9wWdJTWzOtGBS28dzSTL+49Ov3n3m9v/c71ELfr2xPpBvigSGQydSrX3Jok9WN1VlHsQXAI2if3siKp6X1fKPvk6onJMAwqVFKN02yIln7Hl+P1xTns7sn6DwFpSNPFp9XKOETuj5HpaBz654tK1Qrr4rH/zC8pCfs+tw3N2tPt9FytHxLtKdTFTuWA10TBErW9imT/3j1165aKuT/f/THcVodra7guXGFCAtbxtvVZmjzyNu4ql+uF8+WMiKyN7yIqTFBz0mTGb5L5fXxp0NLNrs1IOnoulwutNaI9B1ZxLFsfm16zsHoY7SuOIQ4MJuIi0pOy7brr8W+iRppk+hfX53bsfHFNW6O5Dlxhn6kpp643VimO6lflnOf56UJOmW++/oZhGPjqq6/40z/9BZfrzDc/P5A7+6BVclro3kk9wejHWnvPXa0vlC57stl7BfucG4bwYry12imoAGWl8NDnkGCV02xJlNAVTPtav9J+TFrdObdbDbpJu16fvraLrTn6VW39vdaqdhRRvdPm6xnABDlUyVYBrG0tozUul2e4nBkORwR4uH9D9L/mL/7tL/n44QNf/fwP+J3f/Q/49V//JflilZ1xYE5Bzbd7YcyAxb26m9jadBhPlNyrjTYX6HuaJcbFmAHOrRW3Wju9uyGlcnn6hK+RcbjjZz/7msfHT3z89Mz93T3P5wslV4bhTIyNuzcnDsfIx48/4HzjdHcgDg7nTupXJjBOgbu7E6VkvHtPyZm0ZEJoDHGCQ+A6J5wLXK5X5usz79+9ZRoHC7qhlqTKrjkzjgNOIC9XWoNKZSkVFwZKa5yviRg1aMvJAKHWqClRxev8aZVsa5V3nsM4EUPg6q4sKXO5fGKII6fTAyEMxNEq6E1Nz5s4YtR5uFRTtqSLamhg1qXqRRqDqYL6UjjPKvoxzwmcZ5gOhDBSgRAH1KaircBarQUVDbUeGK/iHuP4wBgmQPDLVSlUzTEeToBXOqhZZEzTRBCzt1kyMnSQVPsyp2HEGSTshpE0L1yXK/enwwoUdaYIohQw7T9TwKQUaK7ofBX1SvPiNEHNCzktiFRc0OQ729rYBYWcOEJQ9cTl+Yn2pDHN3ShMhwOtaS/vMIxgAFatmeBNadcJrWZCOBijoej9rarOqe0Tsxl5N/xwYPAjwzggglWiC+M4WHU9m3WEjtWcygpW+hCQYL2y8v9Q925PkiRXet/PrxGZWVXd1T0zABZYLLFL7Ropk3ZFmckoraQXvej/piSakSJFA3dBLi6zEBbXwUxfqvISEX7Tw3GP8MzuwepxmEBb91RlRkZ4eLif7zvf+Y5aMzsxSFsWVddacX604q5ZhGxaloVSCofDTsBb0ZQgcztlUQWI+kcIIasVaZL9J4daztG1zaHFgyVXZ/hrcKPU5sjZ76cxd/t0pyIS8zepvRalGLRgTWKmDeRdxXMIFEtR1hEh3tr6JPtOc15fv9N8vE7tdl+TeOeamFRKXalK+phhI4g3t+/mkF0aaCuS0Wyg7WPqpltJZfuePr772Os2Xmrv3c5/i2Gu46otvitF0SoGihJToCtg6TI5S2z31RcfPQ3gGwTa+snS2OOt/qtJseTCbwfv9iUD8eGk+dh7dWUopa5iG8D2/mswqWpI9PXn37+3Gel8ELB2k0bVQEU6rsuCoVW3MWeprWjtbrQShlUXyaRp1X4uwCWVvEkRjfrwQekeqPa3sNlmm3Tdz9dz1rL8CxCUk2ugQB76D7OPQNWg2zXYS1lkaFqbyqjVwuUstVINFPWFpP9/Xj1YaIxKn+3Z0u5Nn93qcWrW6MZcpL+vPSnQ/m27gH6Zlw0stbH9cKpdHbO9f703ujIxdZtNMZCnGVcKozGQI+fTkeNlJpZUnay2z38s9b/dv/65ul64rrLJFXTeFvLaToJ5y0L186h/bj6mXb/OXH9kg8iZJQlYVdQm1NVVS9HASl7/O6bENE2cz2cKUjNSR1Nc11LCVhezdj7tmnumdGVMS2Ge5xV89PJEqZXa1iRjLTmItXxzpyxFAiBtVXUy3Noo9MDutkC6AZ62YfSykjY2/RxuZEYrVteliHNpFuBvtBa7buvY7w8cDveEJTKfJ+4P9zw8PHA4HDDG8Itf/APD6Bl2A0sS05+wBMIi3qxSjlZk3OtaQ6lALkuNWg9Ye/Ksz7zeEmxbvZ+5mrtaa3Idg1Zz2DOlt3UKVDOCfmxkQ62mB4h0WrVgpaj1/t/Wh21tFeR8m205StawEGp9bZLn/Ph8xL+M2IMjhJkXLz7h00+O/OK3X/Gf/9N/5n/7k3/Kp68f+fSzT/niVyLzc27E+5mwLOQSKTFVgwNA29rzTlUHxcLpfMJqL4YGre6mEXtayO5WC2Pqsy5Mdq3Zbc8phXk58vjiJdZawjDy+PI1X715g9aGFy9e8vz8zP2DZsQT0xnrIihp03J398D+sCOXxJuv3ohRiBEbfaN3/NF3fsD7pydiyhjtCEtAYdHKkGpfMDEXKnjr0EoCvrIUlhDwOC5pJieZO6GIPE6yOYkYj5KZrfdotxu52+9JObOEaX0mnfP1mS28e39kP46Mg9RtTtNESrAsQbLqqayGKSEELnMQ+Z337MYdy0V6O+oCltYbT44dc0CZUE1vFM46IbmMXOcyL2hqA3Wlq2lWZhh2GG0JIa7NqEVRU+WXNRti3cDeyRzI1QQh523NimER5YpRIgGm5TtkMxESTgx1QsqoIPLt3biroEBMWIw1VXEQSDEyLxHvtLR50JZUm6IL0C4CmFIhBmlGbbTCFS1OiW5zh53nmct0wRrHvpJG0zRzPp95Whb2u4HD7iBmJ3XtN6a6bAbJ1ocFlDbEGNb10GgB57LOidmItQMhJZaYOZ+OnE9Vmm8lrgvLXIn51qBeajFFRi0SxGWZMW6ALDFKCJFYa5y93wijZVnk+VyJ7LLubWLkJKUT3ouaqLWdinGBJKUdxmjM6DExscSFVAG0c26tz9ZalEExRVQx6x5Wuj2ojyE2IlGSHn3LFVXB1lrKUlqvyJskR0fEtpdu5nalgshKgJkaj2vnq+yxrOToLekodXxsc7QCRa0+JK7bHn19XrSZDbT3CPLZiOIWf0h6sY9f+/2pj4X7fbi9Z60jzr2jPC2jsu5bfZ1f28dFnprpnff6Pb6NsTEGjMzlUvcjydyUdX78odc3BrTdBoDtZ/KnBYhwxRDIaH4UMMG1jrVN9I8dvzH3/Y2me8+HYOQ2uC/bT+oDLfJCYWg/huz744pTmV2vR7XsSKoNaVOS2jVtaFJQMdqQv2GteJPjfwQs9q+P6Ya1MZU6STVpzhawtCAyqSp53B6h9TvUllmrP6Cx0jFvbn1Dky/UpFTMqW7oXweHt9fHMhAfe08zkbhdiATAyTFilVr0wXC7JytI7YPk7niSNZFxiVEYuyvJbN09y0fufXvd/k4ktGkFT2lZWC4XltOJOCisEbnJ8XLi6fS8Pgu34Lsfh9vvu/3OPpCWH2yLT3N+7Ov+2r9vAVvPFt1+72oO0d2HjzFxPaBEdSycqrKLLHUHGqlrWuJCqH2enK3On7mq5nOpBjxaLM+dW78vhLBmDvvraNe81FqitnjHjlHtXVClcLu5Y8q8V0qK5JuTowCMUDfMTZ7aj1l7NYLhFlzejs9H/1sOCKpJRwTLeG0rkFXMl5m4JF49vsbZgU8//ZRf/vJXPD8/8e1vf5ucE+fLhTCdCdUpUoa/sYTUhadm9HIFbUpf3ffbdbwBrL59Qtu4+g2wB21K65XRTjHWZzV/cGwJEjQoW+s+hfhqoK3JXayVViHNbbOfg21tu2rlUljX4FJYAylpb1ABsxZDhfPxxP7wEo3isH/g8fVn/MPv3vGzn/yU/+l/PfH4+ILv/vF3efPl7whHkZwO444QF2mE3YIC1HoPlRIxQymJ8+nMYS/XlUute7kh4UolZXJJNOpJ6i9YSRhUpijD27dfsBv3HA4POOe4nBf8MHA+nyV4jRPHY+Thfs/d3SMxiQzZD2LfnlLk29/6NtM88+tf/4bD/p4Sba1udFhTmOeFN2/ecTgc0KNmHLwAvpyYpzPjINJKmszLWualZshMM18RIjMrxPQm18xaDHgrlvRaC/BrZQvLEvB+YHBC3gQVWUIhp4D3jt3ugcv5xPk8MYyDzBWl8M6jlMiocynMc6jlFOKSmhcBZ0ZLiUBGUYqubT7qfhLFOEujOIx79ntpYJ9JzJP0fdNajiGAQQhXrURWOc2LNHcu0h/M1qbySmdiVlymhVykHtRo0M5RsuL56R3Hp/c8vnjFZTrz/DygtWKaZ6TNhmXnDCFLSwattYDlurdrbST4VQZlG3EL8xIwJotCQVGbTNeGzDnhrGfwIwXphZmXS+3DWWOFUgnxXIghopS4FT7c36OU1PBMS6TV8ch6oMSALAWUsygjJP0qaYzSesN7z+A92jiMNmSbsTljYyKEhWVZmCZpM6G0WkseZH1h3RuMNYxmt661Sx2fUkkqg0JXEA+IrLEUeV/9TFiWKmuTVi4xOcYyVgLBQmrrV5E6Jm1RWgxgjNV47UklY1wjUgXM2Eoi5yQ1i7dk1e0+LGuX1O53/Kss2ev+rSpYESKrvf5g3IAiVoFXyaXl9qUJe1s/O2UOtVavj5+c/zAmF7D8YYutW9Am/860OmXYar0VvWt3i6nT2qbh9rpuMUAvl7wlokspHYATE61+vHqS+mMxXh/jfwyMAqu3gqg+qL2INyXP172+UaBtG7Drn62px1xNEEq7qMbipy4AzOtNvgVtH/tTmsynHrG0mS7/UdPJW3q6WWD0r1u5mdS/rBd2NXn6m91fowTM1d65Bqo5BXkwlKoukllYNKT4VXeT9nYsS5Xw9D+7fU8LEEuRfjjrQt6/t5vMKXeuOqXR7lA6FkKp68UklYK3Ui+krWQGLkEahYo0VcbfaCv2uitjpNfAtw8Ie4bkY3NI7gPd50oN4lJl0/sFZau9sraaXHQPZDve1TilVINwWZBEWmcEuNU5U9ag7x8HmT1wSarUInLJtM2XM8tlYrlYtLdiN+8cc7P0/UeAYQOY/ff0Y9sH/ilJNjdV1rVdW7vu9r7bLFF7Xx/4tuxUb9u/nQ/re2C7TyvTVnecRmDkyrKmGFcCJMSZy/nUZcvMBqyUuDt6Y/HDiLe+Ns9UzPPMPM8opVZnyp5VCyFgnV3rBShbBkxkOVvtQgqL1GpVOUi7A65+HhoYLjWr3P9MGL02Pn0mqa8T7DeZHhi3e64UqwRaGanTlMknen/rJLCKCS6XmWEYefX4moc7qT36yU9+yjAOPLy8RxlpJB9rSwLv3XoOpgZPqE3ukVJCGfDWX82P9plWd9FAb0+GtDHor6UZuoCQUSmEq4bh9QOoIrJzUfiIHLRJf1LqnwO1zg2lFAUxqliWZTXhaTLZ/rxLW/MBstSUZK2JKWGNIhsxZGlry3w+E+eAMppULOP+BYe7F3zxu9/xdz/6G/7Hv/5rHl+/4tPvfItf/ewIxjDs70hFGu5GZHxV2+RhXRtb4JBzxntb18Yk97eCM72y4AlJuEk9UJyDZA6NRppSGwZniPFEjJoUd6RQeLh/5Hg8klPm1atHno7vWMKRw94yz6FKXTVvvnrP+bTgnFsBcU6Gr758D1Qnwjrfl2XGGIUfLcPo2O09Rmuej08ElTBma06vlGIKkfdv3mGM5f7+noKufaqkbnueL6Rc2I8Du6E2y9aIAqU6/A7jHufkOk0N5p2F56dnzqeJZU4ic/QDfpC5nIFlWjgeTyijxf1PW4k3lGTfcq33RkXMbiTHLL3/lCbkVBs1G2lMXFqfQbhMJ+lz6TSXSRh47y3GylrklDSZj0HkdcZID7Yl5vr9QgqdLxdA4/2e0nqMKsn8zlPgyy++5N2brwDY7/cYoxl3A4+Pj9zfPZCLBM3OGVqrgmEYu16jZQXs1vl1vrV1T9XnwZRCaAY/1TzIDAbndjhbmMNCLi370ECFWY2Acs1egUzfw36H94McL4oUW2nNOHrmKZFigKxILNja+NyOw6ooCCExz30dkhLTGavxyqE02zWojXjuCcgC1RlWlBm2GgKVXMl2xMio7Y/buivPnrUWSqlZ1kQuUkO/LBMWVz+7GZk0K/lcMn7wsn+HhZxrrb8xsvapSlIipkjmllTq9vMW1zSSS8ALK8gpBazd4qhNdSKxWttb+r25fxXAaVf3V5HJ9wCo/5wqRRpPq2uCtgeYq9LCKErZVBbtehowbZ+lrnFSFysxmwA2RZOLt/dKhkoL8fE1Sq0eSN2Cqp7c7/c0rYW06D/TSN7bWJ6bmO9j49qOI8C1xVOs13eLL25f3wzQpri50NsCxcb498Duw9qzjzH3LSC7neTbTS1XgeLVJPzIgPc/b4Pf1+e090jQ3ztY1vPsArye2S00uZHInUpOqLzVfimF1IBZj1KGkGotlqsPVC2a77+wz6j1mYwGMNdgvXuPUmp1pCsVVK3j22XR5PP9BG0ZqPaACiNsjTBaINLNJTbZQQV9RqGMWQFD/2Dc9p1qY9YDCRBjgjXT0QEIgGGoLFl9wGwNMtu1bGMgVv59D64Qwgf1W1fjRGOari3GW5boNpgFPljsrudYJR3QFFUIKXKZJ04XS/ZGDFWysLIppfXZ7s/n44vEtSb7NrNQirgh5vqc3coG++N+eOyPa8dvr/ljC9nH/t2eTYUw0ZlSAyNdJU4XlnkGEGChQFGIMa+A2lZpnVayGRtniCmugK1JF9sYtLkh7P92vrZJkaoNPJRVSteWarmX3UIcE9mKHDKEIEYGbNLddr1NntePVb+p9RtKP04rEKbJ4qRmK7Zj1GfQ+5Hd4YCznul4JsbE4+Mrdrs9+8Mdv/3tb/ntb38jzbSHQZrTKjFDyVqRohBZ1nTyGyUETaHUTIe92hxv7/0taL9di9urPc9bP6F+w7wmgdq8bGMjYyXfNY7CmgvpoNZnWcCPWc/xNlMsQGQjHBSQY5D+Ts5ia6CAaoRWq/3V5GWBFNF25Dwt7O4eePn4yNPxyOc//TH/3X//V9zd3/Ht732PL3/9m9rw1+LHPRzfi5xPFUrcsraNuGzzNC6hyq4kSNTrWIsJz0ZiQggL1sr8L0XqnQ47CeZddZ8zxpOT1JsNxrJ4jzKF5+MzIQYeHg589dVbjDF867MXWGt5fn7mzZsvGYcRYxzLvDCOIylG3j3/nnEcGUaRsyoL9/cDJc3c7e7xo9SEjYPmd2GpGUVVa1gt+8OBhyC1p3cPDxUQZXSVmE0Xx+AdL1+8IIWZkiU7czydpHm19dhqgR+WGTNNIp3zI27Y8cLvyTGzhJnL8RnnDM5Lg2djDV55QozEJZB1WpvC7w93dW2cmeYLz89HmsTOe89hvxeSKESWKJb64+BXpYeua9huHJimCWqdMiqRgRgD02VmnhfuDgdQmpTBO6kHnKYTOSecH9eMZAwiPbTy4KMyVVIqGT3qs/rq9Se4cWSaJ2LOiJ2W9NvUSLYsFTHOcs6BkroukZsmtDZ4Z1FZTIRKUeicSSju7vaAIoTI6ST1v36wWN2cciVjm0NEWdlXx3Ek+8I0z9XxEvI8U3JmN3rGwZNiZLqcWUJkWRaGwZMoKFXbzJS4yuUlSaEomeooGzns/PqcWK3IThrBp5zXLJkxeq0pKfU4ckBqZk4klQakZKWu4zFGcqwkoVJrI/GccnXQVNIzUFUiMmmcHfDaUOoanaXBISlFYq7Eu1EYms5ZqlRVkZpOqaUVg5G2F2+ZqtTtPyL1DCHV9XGr2xYCbWudE0KoypOw1nK3deY2hm3g1Fiz9sQFId6Nsp1Cosa0BXJVCxUa2KokZ9rATVv6b5UW7Ttb/LyRe+1YYuiynmPuAVarkb4+7i3R3Mcr/Z78Mcnp9r52/OvkTL9fbX+k1+3H8MnteTTitZ1/zjUzbP9LkEeWDw0L+s1+q39QaKPWjMhqCqA23evq3tJt/P1N6n+2fn2zuq1/WtDd7PDl/+0GXBso9EHnh9dVrjJtSu7QNY4usuCqUl0ii8h+Sk7Y6nCWFRzu9hwOd6QsDo46y0MjTVeBch243wLZPkDsx6FnnuqtqAFq/ftmshljOvlZA3fVfhyFroYPCoV0JBHnzsLGHq9GMwoawsxFCuZvGRBr7Vpb0gK2/iXnwFXw2P7+OnfDFmi3WpjbB7XPsrV51rMxbXg+BsrWcWN7T78Q3r63/10LxopSROD5cuGXv/uC37/7PS9eP6IOB96+fc98Cd1m8WHavR13A+abnfnHCIj1mVPXQK2/9v7VL263mbT+fG414z3QbZtO/9mcc3UfU5ScCdWwwDlLCoHz+cg8XUSyYxSpLCIhyYWSRfqy2+1qwKqIIeCcZgrL1tfq5p61gu4VxOUNKPSSFGsMhQ7srnMAVK3PtNai6lwsKVdW8zpr1saiZaN6sPCxOXVLBK3jqPrFv7s3SmPcwP7unv3uQAiJp/fPODfy6vVnaOe4f/GCf/vv/i3jOPLq1YG8YwYAACAASURBVCsomTDPpBC4nE+YtIgDpFKdPDKvwrsGUlsyv59rfbYwxnhl/tIHBf18aOt4X7Nhap8mpSqIKkJgSe1Cqhk0Wbvl2G2ctw1WMnCt4e8G+vr53M7ras1QCnIUH9HczJCA7rMFRPodzpzP7xnGgZilwffrV695enrH5z/9Mb/99S/5Z3/1V1zCwsOr1/zmF79gt3PEArv9Pafnd1vLl5XDE8fMJsFWwLxMjKMYo7TepbmClwZOlRJHYO99NfaQjJaztrYH0TVz5olRsYRMLopXr1/w/vk9TyeRNJ7PF96/fcunrz/j/btTlfvuSCHwu7dv2Y07xt3I+SxNuneHHfN8RoXI4+NjBb4jWomJzDKf0WrAWc3Lly85nydCzDw/HylFmrw/vn69kn2n04nT+YIfB8bdyDgOaKWkBYWzMs7UckZtsX7AOo8xGecGceBtgWLJIv2rrs1FOYwBbSCXxLJIQ23vB5yT3nGXy8SyzFDXM+cM3t1XUxDpVxeCkBzOWQyw30lgPM8XzucLu93I4+NLmVNasnXTNEn/t2HEOgcUrDNoM5LILNPmDBtzIoS4ylJzScRJMkdGKQbnwBi8sZSUoWaJlNbirKykV9gSIxCZ5kRYFrzzGC+EK2p7TmKODKaWdWgl5ixZMtrSm9QRKSyXM5fLpdaMSX81rRXT5VJNPwa8H9jv9rCDZYnEIO6LznruDwdiEdI0xiBjuUzM3mNqYP7w+EhakrjVGsUSFmKstWGlyolVbakyOPygyFnWLBAnYFsDajUMa2a4FKlZviwTqWxZOmcdgx/Y7XarQkophdLVHK3UzGOo/Tlj3OqgVVNpJHSVuNUDSNyCgL8Ypb2HNtV1Uy0y9rWG21ld11ikvYq2pJxIIa97UlMIAFwul1UloOo6KWTBpnARdcdGqm8Z1Ixz/qrXq/d+XaubvL+tj81oRdZ+UTI1N2BVs0MNtGGuidhG0NyS76pmlJT6UKnWZ68aENpA37YvKq5jmlI0SpUP3CO3U7kmQHvy/xa89aS24I0P+8Xeyinl+TNAuYo12jW1vW2Lnyr2YAOlTSX/h17fDNDGtWSQmzq1LfjTGFrAVJAeYJWVVtt/i671Wkokn79mEtpEFFv9xG2gpNlYgQYU2zPwwbjeADRgbQDcvq8/9lWwXKhlzps9uNZGmFE34ocR4wxzjBSlGA57jDFM08QySZHt+hDdHP/rAvkPzmH9XSbF+vtOptUmqUHVVPFmJkCRgmetDdpYCluvq9yyR4KE1yBsHbN629WV3PL6Qbr92e3vG1uxgZS0Ar4mS2uvlLaNsT0c/aKxZno6ANJnB9rD1j6b6qLbDfKVo177+2PZpaufIUufWNhrllL48vjMkiYMkZfnI+PDI+/OEyqzOhveZmf675D/lj99ge3HwKbIRcq6wN4eq2e/epDbj1N73WZMeinmLaFwe38HX13CatbZW8OyTISwQEniRGgMpdQ+NpVAMMYzOi/ub0XsXBRwPD5TULhhWIFEu47GPEqgIc040VtWMoSw3ftaE7JulKZq6ldCSRjrQmaeJnGI1Vtz6Dac/Zj1wLaXDcK1QUz7XUoi8SplI5LWuddifgW2FJyTHkbPz0dO5wvf/u73sOPIuNvz/umZn//9z/nk09dYY5gvZ+IyY7UYHTlnMdp1oLXNl1u5JjTms5c1t/ENIazPYcss9pti//62IWqt10J/Ovl3qW1PUgwsi2R5cmlzi6pAqCQTUmu3xJnWf6+ZOLTvaHOwjef1JisgUJcsTYDrpp1rxrXU+apNpOjA8elLyRC5A0sI3D/ccdgPvP3V7/mbv/khf/FXf8nD4yN/9L3v8+aLL0l5ATR+HDk+q8piy71LWYgDsiIpsFYmzjxfKCUxDgJKlNKVIReCT+uNpDLGMHjLUAMxVTaw39rI5Oqst8RILpoQJj55/QmXeeZyCXz6ybcpGd6+fc/d3T37vWEcDpQsMtz9fi+AbT+CDRQST+/fkZL0rBu8w2jF09MT87JI9mxJWDOglfQLM9oQcgKjGYYBpUTabLxlr6sTX62tLCjmOfF8PDIMnv1uh9/t0Whi2dpKSBZBen9JA2dp1yKEiphyxJykoSeaVCJLCNVwJ0FR7HaDtAAoIh0MC+x2I6N3LGGhpEiYJ8gRbw/sDweMVoQlVimxZH+bUqNJsSUzLW1fmkx/HAe0NkwhcDw+iyQ1DRhj2B0OFXwUzpcjISScsdzv79BI+xmtNFYbipa69GY+orRkCkPKnC/PDNay2w0463DGcD5fOF8uIjc0RhpW13ohaXyuhMwJAjRNzrUOy4LaatTb8+OcJVZnzWmacNU0pq2rYYnrmpBrJklr2B9kHpFl3RTgN0ojcmrGOyKNqnXdN5QWkjhExlGIur27g7zIvM6xfmdbv/QqC9Q1u6SK2jJolayz1uKMw9ca6NbDsbU68d7LnrEszNMk12+3MVBKMiVNuWOMpShqD9zaLzTVdc4LkeB0Mx+Jay8zbTTamVoDty6/V6qo/X5PCLIWtvWzgTYBYlsmbJPbF1r5SiO6GhBspHer0++VNqkDhrAR9tKYvgKdun4preva/WHM28cTimaW1JulbOTZddyuV3CTV8KqdDFrW7cFA+hyTXbexl/t1a/7utvHbxNIwGr41I51G9Osf9+Yq/REbX8Pm8qvB9V9xvIPvb4hoE1e/U26DebkJbraQv4g2Pkgi9AFsv2EWGoT3CsAk/OGzrpzuUXq8l4BboIxNhDXvnP9vPxgDSQUrJOpXduWeRI3HqWrXr1mPJz3DPsdylpCSTitGQ979vd3cox375mXhVQke9fklP3Y9RPq6+RYpYid9XodWn0w6YEaoMpQXafTexefzmWRQmlgoLEz9Ty3cWo1YKzBUAvqWlDdruP2/rXradK4LRhsdTDlg/R7Y/Q/yFDkQsjhAwDXMyrQHtztoW5ZglYUSxHxmP7I/LkF1f3PZH4oVNHEoogUvjqdOM3gdWYicxcgYBncSCpbrVDLcPRz/Rrwflhbd5txaBr/Pg+8AvoOUNye962soi1oTXrRv7dfQG0HOtefA9ZoSrGMw0BOkXmeJJDTCr/foVTV+qtCzq6ygDCOe8ZhxzwvKGUqs7mgtSJlYVj7a25jtjkGth5xsoEty7Lah5dSpHg/9ZtgvfZu3s/zXDXqar32Jtks5bo5d5tLbaPsXRRvwX4p11r/Rhz0i3upTIIxlmEcq6V2YbpMDMPIw+MrslY8fvIp/+f/8a9Aa+7v7zFKsldpCeJIlzKYlmXeNhZj5L+bS5fSoIqoH1ogAFsh+e18b8xvP9b9Gt4IFgnUr8fp43O6rUttL5AGyCXLerSSbSvhEDHGrwFKD+D69UXem5DGu02yoqUmJRkKm4xbK0UmMl3ec5me2fsdSwrc3d3z4uGOt29HfvjD/8D//MX/zvf+yZ/x7T/6I7783e/4+ec/YTd6lovU60ynZ+kBRK4F/lKzZOyWodztdqAK484z+pEUC9b6Kv9Oq5S31HYNxgjppxXElMWau/ZwnZeFEBeyUiwx4Ycdj68eyMXw9t0T9/cvMRrevP+Sw/6OUhLzPPHJJ59wuBu5XM5oXXj5eM/Dy3v84DDO8PnnP+P5/RP60eHdru6BZ5weCXPh/dOZ3U7kmYXE/v6eaZp4fj4yxFCd0wz7w44UI+fTiWma8N6t2amM1F4tIbGEwODF4n05XjjPE95GvK9N7d3I6XQixEjByX5sZJVDSQbWuR0pSTZmmSZiTDh7zzjuyLngva1B9cIlTFIzNjggEUPkcj6SU1glaU3yNk0TfvBobXFOTE9CECfIcXSMowel1gzywe8IaWEOC6lErBlQWXpAtvUohABW7Ox1bfHgrMVoQ6TJ5SSuSKUw7nb4ceB8HlkuZzG+yYFSyQDpEagY9yPaiNR8Xi7MYYEi9u2l/i+mhNGaYb+XbNXpxDRdpNl6JcRszQrLslGIyyJ7j1bsxhGlRFkwTxe5FlUYRy/A1Rj8bmS6zJynhSVpvPVY7dnvLUNOa/3PEhesl3XOWiM9956PDKbgXe8gmiBJ3NH2aT94xv1ILpGYZM1e5pmcEsenJ+mjaa2QASmyAKE+g955iV+0XoG4SB1lLfJePgfizpjzmhfCWo+2rma3ROK6UBi8w1tHofZxq8RYLqKkUdZKvXK5lpfP87ySTj0B2EhJVVmgHlxovdWzw3XNXwNvq/y/i2V1XZsbSEcJkaCrPbr0FZZew7rGxu0arl51zyqIqdCtKu5jpLNaNzp1tR/KeFzHKqWUig0+VK60f/fxRr/f3u65/X7c1Bt9Zu52jNbvYIsP+2vq46L2PTLWste6atAnMtflg+P2r28EaOvh0m1Q2/69Zg1ug5UucOXmOP2GfIuQ1fogV+b8JpP2ta8/8PuPZbVuz+e2Tqtp362SgNVog7ceP4zYwWOHAZxhbx27uwN+PxBVIS4B7WoTyazIpSth7Cbg14/l9fgJl/lhNujqMzcMQCl1gabpfjNb6kxXVrrVj3UmM+28EG5BUWuXOmAVY1zT8j34vJVZ3Y5zY/RTgnmWyd83lpb3V4BdB0zprwe2fQZgu3fVAKKBtpyxWn8w3h8D/re/v/p5lfrFkshWeuXMMfKwcywpM4eEtl4cuZI0he3P65bouAXU7Tp6MNbe31LzPXDuAfStvO/rvrMds79+wRNtdm6fbxbE/QJtlGSZyZnj8Ynz+UTOEWfFpcoaw37cYZ3iMp1QKqONrexoxg+WnArH45nT6Yy1jt1uR1Eiyewt/Tc9f8tIss69Usq6kTVg2INUrTXWDV0/oZotKmWtp2vAUK75+p63DbjfRG/n9e1z2gIF+Z2WXn7yg/V92kgzT5TidBQp04tXnzDu9jy8fOR4PvH3P/97Xrx8yTiMte9UQpVMXGaMlpoLpSTwMB0YkCkv8hNT16p2LY3saOvqx8B8e55asNHe2z8nMUpjbJFPba0X2jGaE+h6zCpxDiHg7IC1juYy5pxHnAXLCprb/W3BjpBAnZpA1fodpVeVgFbVfRJqkCHZNqXA6kIugfP5ifHhdf1s4fWrR968/T3/8MWX/O2P/pbv/9lf8Mmnn/En/+QH/PKXv5DG4UoMceI8STaMTFGpOl2KiQVFjDu0Hpjm4wpox1FkaMZoQtyYZulXZrmczqQQVmt9kexK9nIJM7EEtNVCTOjC5e1CjIqXL18xLQvv3v6O3U5s3I+nZwY/cDoNGKMYRoNSifP5PUUFtHEMu5HHx0/YjXecTmfmc+TucCBFTUmWeZmhWE7HizjrecdSgZrXEmQvy0IqAg7ishCnWTI/zlGyAEdtrTSrTlJXanJGhYzWRoBtgWmemZcJ7zwxBoyVfmEog8qGZkB1uRwpWRwrtVI4b/GDI+XA+RIJU2BeZs7nC1DY73Y8PNxz2O847HfM86Va+heMAj84FAJenDHkEIksBG3JubAfd6AdSlcJOIUYFgGKw479bqzrUg2OnROiKCWMd0IKJQnInXOkIo7SOWVSSeLsqgslJEKM/P6rr4Ss0JDCwhLkHkiTYM3+sBdJqLYczxcZn3VtF5lkyFuWYZ4nUgwMzrPfSzbVWVsleWl9xkoupNQAgOwJ0aVVOjgOQzXWCaQUeT7OgMJ4X/ukGUJKhOWMzllqlAeHs55CJqTWA03kmGM1qEnTmePxyLKICc847hjHUazzq0oiLKGOlZyXZK5r24MpsMwLYV7WMdDe4es+YKupkYC6uk82U56c13tnjJV4Jkmz9pTLJp+v7rDaIPd+CVKjqGU/bDFYKVJbWErEYtef3+6317GvuICWlZjSm5IgtcbikoFGyf7WnDE3e/tNRbMCwCLSzgbEtHEUFMZklDFr7+Nc98E+dmjxRn++8o8t+dKD0Vvjsw20bdeoKkHZIg0BYPKenAroLZb8WIlGH6vcyjL7WHO9jlxoDeevfn4T11H3hHKjFLzFIu3lrKPUBIgfPNaaShj+l1DTBqAq019a9qXd3IrIaRNWXIl0ZW1Sro6L632tEjwtMsP15tW3aGPRulQJQRJgoYQeKC0V3h6Q9Rw6vKLqb6vRS8MypWRJpVWZnzQjrMWmutVVbcx+Y3ABDJJSdt5XPfiBcbfn/uULhv0eN45EYA6BkGdiODNNE3M1pCg5Y9Cb61k7VZqzGBt4qvnX9ZyUgBZrNplQqhpuXQPOgtkyMbk+bDXCL22AbhYTasahtzXN3Vi285S8VP0FZQOcNaO1gUUBlc0tTcazBrwlEUvZpAxy84ih2a2bGpxnSjFI082yNd2uc6Y2qF8f9Fvr9asAUyEL9u1iVEdMqU1y+nUM0hWbVKBUu3KlFd7pNfPqd3vMuCMpCajnOYBtVsam+/4eRFUJgVatlZVkSRrwUqpKafTaHLxJK4QbaZlI+c6WKW0LndYyx1vNj1LVuMN0krbSGZu0zaoysq13n9ZbU87VsXFZeP/+PfN8qQYGO7FPVm0RVoSQSQm0HupaQA0CMqejNEd+eLhHLKP12rx0dY/0tb9MlVKmkqq9fG11ULP9MQRSiLXeUN4nj7/CaDExKjmJAUlKGGtWZrevjSuVbSq5NeDM2Aoc1mxQaVlEuiekOTia1WGxAUhV7y0gvSa1wbsRZz0pwfky4YaRTz79lN3B8+knj/w///7f8/T0xOuXf4x3nsv5mWWZiSEyTRes0aj6LK21vmoDnaUo5EFpmcS4Mpt9rd4tOSXXtBlANdbROScAqW2cSYJP6emYVlJlW0M11tmquFgXN6mFVAljEtIcXuoQZSNP9fxLBXEioRciSfqfGa3qPJbvU0pBlkDH1A3aWlezKbWFuVKg5TPhcoF4waiRyyXg/T0vXz7ym69+ww//zb/iX/4P/5LHP/oe34qBz378E371889x3pKVRluNmgOWLMoErUBFSlYopO8ZKnF/d6jBH3ivMbagSsKQWealuruJLG+epnrOegXAqji8GUglUZS0CEglYIpjN47kbKtBhybFA7vR8vbNW0oB7/Y8vXvLy5cvePHqFWFZePf8jqfnZ/74u98hz2diiKRl5unNG5Yl8vj4ipQLKUHMBjce+OqLL3HW8PqT1yx5YfQjpd7rOM2kknC7kdE4/P0ohIz3PB2fifPCi90d2ooMX2lDCAun45F5mtBaMXipgRur+ce48zVwFdleVtI0ffAG5yO5moOk2jNNK835Is3Gx/HAOB443EntlbVa+hjGiPcD1u4oJZJLlAblOUl/s2GkKEPIC8RAuRzJFXwMuxHrR7z3YpWuNDFeePv2DbvdfgUIOSWW88zlfBZZ9DDgx0Eya9asMr2QopSGyIJfidCMLglTCqfjiZQjh92O3Xi/khjH85lSFONOJMfzEhgGebagtuIJ0oOypMTgPW64ExCopBYrhSBET6cUiLWlirjoyt4qdVyxtj4oaOMlG2YcubQGzbE6WIrkUBwPC5ejZMSMdwyDtPYYDwNLWLhMM8u8VJBmGJ1FD3s0gZgyx0vE6FoPprRkanPmcjqSS8JZJ/WOyqC1ZRgkc9li0JgqWKFUg5UkdaLa1JIUQ2vGK2qMwDLn6qIpe5xTGmJC6ZqxV62cIzGOI1Eb5ulCTFFiuFrLGnOU57k65DYCdoUqFUykahHf1B/KGpyyEIIoAnLC+QFB7tU0RCtKCvX6qvmdNhWAFzGhUuIg3JyXqTF5WAIQhNCzFuMcShnpGVq2eCCtcYTd9rZeoUReXcoLrOVMm1R8ixDbv9q2uAK5zv2yUCCX9qMPANYtEFs/s/6sA4nVuZOyYY5c96IW7677Wksk3WST1kxefZ+zdo2TQOLbFsvmlMhTWvfYf+z1DQFtCt3V3LSsVytEL6UZdOiGBZAAVVjumGtTu1rH0qZCruCiNYym1pjlnClKvtNqBUmTciSHSFMEKaC0+itFBRMSKEhNV32TqjUO7cGqE6phSGXU1pxQFVJlrLWR9L/3HucHrPYMozi+7e/u5WfeC7OYM+dpZpoDKc0sy4nz8ch8PovjpCia1pveJopuyBJhhErJ9Vz6rIlcxxKlMFYbjVNO+tPkQkzy+wbgUo6rHLMFTVs2RUa/VJBFyfVB3tib1Y2SbWK3CdweBlWDK12Hs9mtNmlmbui+tAc6k+sckFuVaiYiEqNIVbasBxjj1vPZsixNFnUt4+szCCsY0TK7LlN14KpNQWUoVTdXAL2B9n7x6FmfBkRyBWF3h5H93cgwOJTONQOmWWKS3ia6NgNXck9a0NxAWs/UyGjWhZGe26jnUc9xXZCauq2hfapBilZr41+llDR1NXp92lrWqo2zMHjV7l7bapst9sPWCPixRvr15RQxpmCNFK7P04kYJsbBYQ/j1b2b55nT6VKdGUUK09zenHXEcKkg3VdDEigYpiWurQycFelYSsJCGwMlyNiSRIoJkEIiLou4hmlbJ6M8C84Ky6wVTJNIeH21++9BCXV+NtOdlFvrCYu2BhprqDQaI6Y9OQG6mxuSDYgxVcKqtt9Ynxm5k86MDP7A4HacTxNPzye+893vcfdwz8uHA6pEfvbTv+P+7sDD3T3G2Nr7qJIdjYhaC7yhafRzMySpm2UuWgrkazZK5p5b5wMVfDY5pdYKa0VO2cBSk4YsYbPdF8Kt0Go7SgWyKScSaZUAueoM1zZ+Z21tyLtJmsMs7LrUsgjJICAIKKkGJ4pCohXwU0SmkorIalECjEtsDnYb4xtTRKmEMpaiZ+bnJ+4eBpYlosYddw+PfPryFe9+8//ym5/9La/+/E8ZP/2EP/vzf84X//AbtEm4MWAJ5Lhgs4bBc04Bq6TZsEJqsoyxDEPNNMSZmCQbYJUhL5FlDuIum6pzaczMy4wbLNYZWUORtiLKSCsAY8Udc7ffY60nJUNYJqwC7w3vnt+hjeZud2C+XDifLgzO80Y9cb5cMN4Tl8j7N7/nsB94//69EIglcXx+R4gzD4+fYIc9OitKSngvjbVLzlil0aXgvGeaLpASJQb8/oDRmp0XN9CSIM0Rpx1OWZyVvmqFgtKWicL9fscwCmFha8NoozRLLGhdQQGFeZF9a6fk2btME2GZGZzDOo3WlnHYE7QQeyEuDMPA7uDIORKDNKBGOVksVc2uOE+oWaY5Sp85O7RmzQvkxLQE3j0/4/cv2B92zMuF0/GZUiIxBs7HM3eHO5RWsqbEQMkZPw4MNQu1qoRyJuYohhp1jc6VaFUpQJwxJXAYLEvQhFnq0Xa7gZAmspasoHG7SmJLLZc1si6GJVBSZLQW7RxWa8EntRF5c1eMMRNTBidSv+Hl/doLM+fA5TLVc97WkZggZo13BmcHhsELCEzSc8/oiNMFpy3q/k6acs9nzuGZcXTsdgPGWg6HPSlmYpQ9LqhC0grlHV4ZUqhSs5SJSjK4xiq8c8RYs2TU2ndlKUZUEhIjLugI8yJyzgVxlyy5SEYXkT/2BK8qEsvIvlbIOSA11zJvClKPnVJmOl9Web5xbiXzGkctJU+tXKZlrKo6LCZKEdVYa5MUY6AYVWMfUEYkd6oqBlhLPmSLd8MoBGmUrGwpzdxJzICscyvRGWpLBpFu59pfUJFDIoWFQvU0UFraO2kh/oTe78hvGkCqfdcodf3WtY1Gi7t68FMl9K358BXQ25wqi5Kkiblxnu+dhuGaXA8psCmvKsnekh+qyeWp2EP2wuYenLuYdnupihAUqipUaEkjWnKnlWFIXK5VO7faA/FrMnj96xsC2liL1o0xa8PoNoM/HJzt4m5Tr+u/O9R7KzOCTcalFNWCXoDUCphX4NNL3ESzK0HYNnmoGSDV9ZOQflsiI5JsW7NbttJzxNqa2t/h3IixHlcLxwtIH5/5AosixMRlmpinmZQnYrowT9JzKseIq9IdVRN9/bWqzn50y8Op9Y9qf6vGkNd+UqX14ikUldG6FucrmXCZDyfXKoVq41wLcIsu6+8/JmssFZDRQHudB70sr//TpMQfk5C1l9TUZcgSOLbAv0msmjwKNmkAbI0bt++6NoQwRoq+c8nSfLVu1KqBozXDqFCFzRGyXM/HD89bFm1nNIN3jNXBSRv5fUzihNdaDNCBy4/JD64MQ0w/h+VnKSlSPRetdQWBDTCrik8ae1vqLKl1YMagrLCyvV1wA7h9M2vnHLpoQlhq4G4pJHSWexzCjFIC+o7Pz7z56i3v37+X2pb9fgXcTcbRtPzDMNJ6cIEYETRTkcNhA0+pkhXzLEXuh8MBZ8WsJMZal5Rl02vFwRL4BlLaFn4qE9fqrloWKlQJX+uTA9va0tdsNYawFSK3+b2BFdUmbt00NnlgL+GIMVanTAnciiqIYQe1KbVHGc3z8xHnPS8fX7Lb73h48cBPf/pTjk9PfP9732e323G5nGjui62NQpvvfRH7dS1gY0uh74nZMoqtHlkymrobk+3zm3Smgra8EEOspg0WqrlUL2tuY9TWhHaefZ3cbTuB/n26Zt1UXZO3THC71rReD50rWbvu1gJku1eqvjehSyLPhud3C94X/GGi6Jm7wwu++63/mh/+8F/zN//x/+IH/8tfc3//kj/9iz/nx3/7I969+Q07v6cQSDGQ5pmYMwaD1dLbbHUVozBNYgozzxOF2ksvFXIQAw1xN90Y6HGUrJoC9rsdqli82aEdaF0wVtYx5yTzsSyFFKU+9+XLl6AVukCYFsISePHiBfM883Q8V3ntwKLhyzdf8vadzFshIT2Hw4GQC5fLGY8GLS6H+8OOkqSpspgyaMIS0Epzd7gnpYQ1Xuqea0A2zRMhRHb7Qw1oC0sQq3kxQSzcPxzY7fZi5FQ05/NJAs4lULJifxhIWVz6rHUYK6TXbr/HGs0yTVwuFwY3Mo47htGwpDNhCoSY8G6PYofRGaVFnh6i1IgNesToodYmSU+/FAOXaRJSI86QpR3A/f09uB1iJOOxznE+XTBG8/BwzziMxJjw1jOdTlxq+4L7+wPKGJYl0AoOcjdHS6lSZiOSb1D47QAAIABJREFU1/fv3knz6HFHzoppDgzsSLng3SCtb7JEACEnpsuF8/HI/f1BmqGPHq131a6+mjYtYvbS4iXnRRYbYyTFyPEipiVayzzwfo+2biV0dVUDnM6ReQlMS6BkJ3tcff4MqhJHknUa7MCwG1EqMc9n5uVEQbJkSlmUknhKtXUkFXLMaGcxlUQrORHmC/M8YaJiGJ00uA/i+hiWmTk00AhKt2x8jXka6VozJy1W9X7A+518v2okZgshtzh0CZKplbWoYLVmt9t1Rk+sMU+uAGEYBjEamSU77JyqvWzF4KWZhbSYVGNI4tW9yrk3tUPdm5UmVkXIkuJK8G77NWsNZSSuCqRUEs3hXHjaJoOU2G0z/Rdn8xajaKXEEd1I6ydV2wSknEiJSsyb9btLjV+vZI0g5AWsZSztXqA22SFli7/WrG+3f/avFdAptcVtbY7Wtb3Bhxa2tb3nlsj/4HUT5/X1hms8piQ7J9/ff6wRsH844/aNAW39jWoo1tRJ2V9sVttG+nXFgMAq72qbc/93C0raMVawuE6ALTWqrwZQQJAYkNSMRmXKlZJ0uLUe6yxGK0znsKi1wRqLMXa98dYNlXF3kmkki71tkl5mrWlsTImQxDq3IH1kmgxBpVKdG6s8qQbmRSiCmmlpC0jNHlYLZJH5CRg1RtyjpOak+q0og7E1W1PWHI0AuhtG4PbBAD6Y1LdsxxV46SZ2e8+aYu50yc31qO/l18azd/1ZH0y72da23znnKmAe17qlPuu1bjBqy641wGaMAS3jaK3FaE2q8lQZFwE3TVqba8axVBZNm63fYJtTNfWF1ko2wkHmRe0gjNIaqxq4rsYXlO485fPteBK8pvWYWrem1dftD3oCo2W9VGWrVM1Kq7yNpVYa25rQWgtGd/Nbr4C41aesvfFUq18T0BWTmMlcLhdSEvOXt2/f8fT0RJiX9R60e5ZzXsFb3ypAa808z2uw3py0WiarTivmJRJjqkSJx1lLDMKC5hSYprmTmOgqn5VnrxEVkknXqzVyoVx9VwOJ/TxfAX+9Rz0I6ud/D7xVzSw1omG7x60/WbXC1hbbwDsKbb2YNVjL8Xhimmc+/da32e/27A8HXr585Ec/+pGYlIwDSteGstaQomy4toJRVQPhenipXzOmumnGdT3Tylxl0WVT2xpCa013vbJ+GGOrecm2Fuhqx26rLKqw9VVs49Sv3WswwlZz2bduaGPfnueWhYa6buVCKbUesBo4SK25FrlWzitIh0bM0N0rec5EKlmQlieR8/k9d/ML9i9eEZeIt4pxb9kddnz+s1/y1T/8iv/2X3yHIX7Gn/6zv+A//OvfY0lkZcmDNBpOc0QXTcbU7yzM00IuUZ4VA8syo5TUb2il0Ji6NhZUNbBSRXpnzfOE0kWcJLES7GpAyb2KVMODGr7oupRYP/DZ6894/+49Xz19hTNC0Lx580bWPvcaraRWpwx3PD+9QykYckKphFIWZzXH4wkbIi8eX6GU1JymKLU+l+lMCAIGnXPs9wfm+gyfjmcoJ169esXh/gHjB3Q1+5jjzPPxGV2D36Eag+QciYnaxLmBXcWyRMbdiLNyDOsMyzwzTWcgV+dmViJP5phj7xzOLMS0kIqAQONlns81u2m0YRyk2XbKqboiaryTWqjBW3KU/nAtM6NyJixRTD+mCyWlWl84E6vaIgSpSfJeMh45tdqfyOk8QS7s/K6T52dE5ifPcN970huPH8A5LzVkT0eGYVdragrOGLy1PF9OnE8ZzYhzUnc4L+lKqVKgazGxERrzIuuptV5MYGosNY6HKqFO8gyWgveOlCHnJNlea6G0ulW5lhAyOkWKjhLk50AhCKlcxIgjxZlSFFZ7USU5qVEzo5DjYVlYplkygN4wXRTLMrPUvofee5xTdWyhlETKkRJzrQcTF01jWJ+vnIvs+ZUMbJl8a704Z69xbC2P0YWxSg9DWFhCIM4i3zTWMPqhXnOpcZcozVQBby1G7ZHar0So66vWCjeIqiGl0hDHqqtqZI8CyeCt8nyFVQrlHDEt69rcQl5Z9w05i8tyyglFrlLGFjfU45WmthIpe9FZapFLZq4ybW0N1ll88hhfn62yKXdafN5IB3EVFVJe1fNSKIzXaLYYvvVxbcdqJlwFyI1UUGLmVKBzWFYbmFQKVZrSAlalHC3ZgdRsq0b6pzWbqJRaDcdamqLtRa0Mor1uM2cbaJOsaPvVFgPUBfgPvL4xoO0q0MllnUwrAJF/1YzAlq25DT43QFBzST0av3m1QeqZlFb31aSZpgMWkmQTNkakhBZqQ0nRInd/a4XRWQIUVD2mSKwE8ChUbYpJnNB6EZ6k5DXAWII0n5Rg3+C0pL1LEc27KoLWS5eNVCsI6JiJUsdI1wASvU7ykmsgQu/ko6t1f3XoUfLwS8JbgN6ayaxsBW1yqu3fNQdwtcDfBt7bpM5rEet2b66zSN1suTrWWhuj9eqI1F7S3+PavKSZIcQYGYah/vHXzHwFIb3hRgP2KChKsj/eOpa0VJKgjk03/o39KaV042vY8GydqAgbNXgx1WhAgSoh0NYKKyXDW4PMrQF0AQH1RTYVW6UXzspcFGcvyQavzo5Fnqm17syqFVCu9WvrvFLr86SUolS77hWIVJBn6rnYajQiQb3UA5WSCTHU+9Ka3Gfevn0n9VQVaHnvhdGsfb56bXg7N2HSFi7TRMkF513tPbPU8Zfs2TwvJJSYhhhpsh2WhaW6SS7zxDRd6hhI37Yt81pZTN1Ys80GOefEXJ1MvfdX86sH+qWIvj/lzSmyJxja+9tLq1ojl67nYU8obeSFgHdjpOh+txMG//n5HdY7Xr58wf39PX/y/e/z089/wq9/9Wt+8IMfiPlFXIjLRFKZGANK10x8afdc6gtVy/ZIBAAr+1zlyjXL18tQ+ud1y3ptvdisNfXZFVmzZMIE2MUqT82pBURb9r8UAZbtu8oKyLbgqT+PlkkXu/cKbpUVJ76Ya/BZgamt+8p67mXNhkr2x63BqpwLpITIZrWYJ+R0YlmeUPkzcnTgCru7zHe++xmff/4Fn//Hn/CXf/kvGF/u+af/zT/n53/3nzj9/tdo5VDjKHWSecJEAY6plNr3T/YFbSyKzOB9tTXPGO/RxosiJEtNkDW2NqS30rQ4RVTOWKdraUskFcnapRykHjNfWJbEPAvwOaQXaOOIIfLixUsUivdP79FGc7i/5+3br8glc//iJdaN4O5qw2apJVdapNBqmpnOJ8ZBnpHT5YJGZPiqqqufnp8ZhpG7u0PNWhhigsvlzJt370HB3d2d7Jtrlivgh0HqHnPgfF6qucjC8fnE4XDPq1ev18bpuQX+JZGTknYAOWGtZjfuKmFRasNeBUWxLGKeIfLZZa1L0sCoxRgjJ01OiiUuNXgVUrQUxfPzM9PljLeaxxcP0njbjCg7EvNCShpnNMZb7g57Bj/UDJJGo5lzwitqywoJnnPOXC5ncsyMbqh1zbJIpSRSSVv74X3y+jXODyjjKWgul5nz+UxOicPesN/tiSEQQ2AcHGFxQCRnqRMLKZKyOAUOgyg/xNxLJG5SUx8roSSOmK5m1nIRGd80LbQMtnDhGWsdw2hRymMQEByWpdruS9bKWS119rV+LIZALnL/rBPyzDtDDJl5msXR1Bmck357znr5TEpMl0DJARC3ymYhL6UZEkBbq/H+ToxDUlwBc/+qyR1A2juIQZWoOC7TBEaAm2u9upIQEBSx9I9JsqS6AqwUpKF72wOGYdjaDbS+okg2ualKGnjTRUClq7L5ZZaWOKYCMbsCg1L3NHE2dV6yu0KCSOzYAL6sm9Kcu1QQjVKoXEGs0VXarNhq15A4Yv1vKVExRszElstECgGXvICotjY34tqs9U7bfqHWaANQUhJS0w/UuEdrva3VFWRJTL71gG17QZ/NLD0BmLpSHdr+rdcWD7aWBMXaoL31sZXJoNZzhRoftzh8VZV8iDm2JEVNTskFrVjk4/Hu9eubAdrKZoyR6wSRwIc147WlLm8/ep09W00+OtDWZ3auJEv1Z6JFphbf14Gt2Ya+mbai2dYLo6mNBWUkg1Eqz1H0mtGQmyEPT06ZWOLV+SQjN6zVkwj7UAs2c60xqdm6wVbTCQXLLMWLwgZILrBN1KKaHEvBaj0q39MHh6U0TbZ8n7atFqoCmyw1H6BrFrBNppqFbGNHr/+9DtbkIflw8vbB7fYBdRWY98fp73FjvPrfAStoa05E7Tjl5njtXFuA1x7sEOIq8+rrs/o+by2ATlncp6zqmaq8MSgU6bHSHmB1bdfbH7efWyWnes8ypUgQ2+QZAqJYdeZN/9wyaVLovp37Cjq1bIANLKxZNfly0czXcWgWzzlvi14DSCKh1NXZD6YQIafK7lUZbm5ZItmoVCmkJL0FY9p6nq3jmBLv3r0nxsjDw8t1zKdpWgF0CFIk30BLk4VoLZmIEKrjVZWDhNqEda6uXMYY/Ch24Q2QNokLQFziStzcOmhRNtndtgEEYgWe/Zp123esAeMmgUmlz/59CG7aXNBa1Sz7lsHrjyvzodWtSgZJadDaUoriPE1My8Jnn32H/eHAsBsZ/MD//W/+ncixjBxzOh1BFZZ5JoWZkoRlhoLpnLJUrlLwahLTGtcKgZXrHG0yEAkugDWD3b/aGtETIj0Qqk+8gLkkjaONNrLGdcB1lZvoD2XHvRlKO4dVQmlEJhlzocSE9cN639esplKCxuidzPo2IS3jvclkUFI/XHQhp5m4BIzZEeLE/uB58fITYvw9n//o7/jV3/+Kb/9X3+flZ5/w3T/+Y378xW/IGYKGpDQxI3JHJfJuV0lBayzOG1IOOGck+5Miu90epapiQ0mNi9TeRKxWaO24TGdyXIhKJL8xBlKS3mSp1vKFBKVIy4SUCs/vn9FWssoPD3vev3/PeZp58eKenBPPx/d47zifjrjxBePuBSUFUpzRGpwTMPH48IK3z0+8ffOGh/s7tGlWChk/OLQ2vH37DlDc3z+sjmrjbof1jtPxyNPzM/u7O6lJqlmPcbdDacX75yfScuaw94QQSQnGUTJFp9MJUVaYlcwzRqNywlvD4O4keE+JSyVgnBuRHlcBpResG/D+Dq0tYRGiJiwTSoEfLGoQ58gSDafzQsgF6wcGL43rz8eBy/nE8XQmp4wfDF47vDFEowlakTF457i7u6PkTJilF5xSTZq41KbZss87Z1lS4Hh6rqFGJUK1QpdNMRJCJOaM8woqWaG15sXDawHUiHpCpN2aeXZcLjM5G8bRk9EoLSBa196A4lgqz3hzatzvBpwfhMAAQljWPaStCVLvKq+QY5WQRkqsRiHGikLJuG39rQYQ1hohVYsmxkUam6Pxgxa31MFjnZDO83RhmRceHl6w3+2xB0VKgRCm6tS5VHXH5uYogLhK1Usj/2Gexcis3QdZC6pksHM73OSjUt+XU64ErKuKKzDKoEwr09Bo50k5rYqU9sc5tza3bsRWrKoArStxaDU5S7ZaQHbBOIvKkpHSVd7XJIxG69U3QCE15PIECjkh1ybgaa2zM9I2R2aXqWqhWrElH6Oph1RV8qiiMW1trnugqlni/4+5N/m1bcvOvH6zWsXe+5x737vvRuEXYYWLCIdlZ8oYZzpJIVmkQAI68CdAJwWCJgjogIREA4kODYSUHYoWQikhIQStzFSaTGdmOGyc4XA4XKUj0hGO8r53T7H3XmvNisYYc611TpR0kLd0dO89dxdrzzWLMcb3je/LMWlxSPZmQbWN9EZKS732vBmZx9YJBl7AatzZipYri2pfyGxn+I4e2c74PbtljQ3lwPie2HBPe7TWrgyLtVBYN6PzPThgrNX+1bwxbJ4VcfdMsPX3u7i81g1J/GGPvxhJm+F7BqD1kRSVGvVeDi/n7JPnfl/ocf/WZj8o26C3f0vTJGvG3FCYjZrzPLDWiWoUOdG/G1txFfCGGgupVqzNPPVseGpCnGzWKnKlmqwHr3ym9VLp6YJQWqCyxJl5TkzXK2mOkCVwMQ4olUwWJM1ttCkZTyPm1Yg1gEx00wZbkzVNFmjJk5FclF3Su0Mk92O/Rzz3lK72nOcI1n4BrJvhTlmoTdyGxjz/jPa8PTq774lsgWDORZLq3WPfd7Wnw7S50HXdGhzuF/H+s2SzKDxer0zXKzln+k4EM0SJVKD1qoWHFjQ2FKfNs42eoEixfDMNikXx1BrZOKqRCmqjj1k9INv4yHfxegBvEunSyCyBTPtdMwltr2+BrfOWablon0sQNSylyzq9ppoVbbVuRY/2KORzn5eGfrZx3XuVNUrb6XTCOTGLb/elGVvP80wIQaSb62YHcX68EON2yLU54b3ner2uAUrfCxXwej1jrWUYDlikqnk+n5UKZ1jmpFXjVuVrRZSN8iloqVTtQwiErl+/574/sqk8tntcSl4PiDan2jjse8jkfkmlsOs6+r5nnmdBHkszfZcAp9RKqtD1o1Ro+55aK9fLFec8L16+pOs73nv9Hl/7xtf52te+xutXr7k5HqEWzud7FeqZKSkS0wJVUJxGDWzXKWPbqJ1u7YGIcdmEPtbixX6P3Xr4vPcrba0Zv24Miafm3Dt2yfes92VZmOeZXumE+7nX5tXewHQrlmh/bi04Kr0X4QCH0SBkh66rOl8pReetg/r8HEEr0oKKFhzUI2kxTPMbbt99weViie413ntef+TEV/7gi/zJl3+P9z/zcdzg+dTP/Qx/8Dufp1bpW8ZIf8jxOFKZFZXweG+wDkJw1Grpeo/34peVi6UbRnIUlELECzIWjzWFlGYNwAzTvFDLAqogmJZEpRJVVU56Owu5QI6F002g76RC3g8DNy9uMM5y//iAM5bT4cj5MtGN7zKOR2qOzFeoZSaqol+phdM4kvuBJUpCJ/2oUnAYjgfe9R6qoHDzdKfKiqKeOxxPq0LqNE2UKvNt6HtByKm4LijCKAJIYgxfuV6vSokWKlnXd8RZUJlh7DgcxNdxmi7ynuOBw+FACL2gJ3cXHh/uMHi6blQEZ8QgapRD3+GcIadFEjhruH+4aDEDWatdh1HTaGsDx8ORbhgwptL3jtubkVoLR5X7t9ayaIGklIJROvI4DtLTNi+MfU8cFqbLtBa4GsrggtAjL+cLS1y4HV+oLDscj0fZi2rhgzcf4r3j5uZW0JNaxejaVYzGEUPXU82AoA1pXXvOOY7HI+PhQD8MOOtYllb0dBTEG7AlGtopjbWWabqy5JnQecZDjylGvRXRGEXOthQjl8sDLni6LohgHI4hHMilWwuNOVV8ZyU5qJXUFXLW9oScSaWuSYokhYZ5icSYKVXGtu86ul7OkOt1IsWyBe6m6BqXtSF7+EbdK9ov1nUdDk8BsRVIkZwSLnhCP9C55hcrCsWt6D72A/SsRt/zPDNdrmvx1XmPs51qCEjfXYxJi3lGE2rxL7ZaqF+i9BNbY5/EB3KeXsGgBuqtGCYG4KLimck5aBzVyitG9s7cKOGayFincIASjEsVwbLG3HFOi7goytjOSenzs8VRppmUI847sJau7zH4FdHGPGW8bQXVSly2Vgrr7MpOWFEFq8mrJpntfK2a5In69RbntaS8JYiSfErhoVq767fTJJKn54HEzNu1Ps87nrfiONvW+T5htKsK8A96/MikzRgzAL8O9Pr8v11r/S+MMf8j8GvAnT7136m1/o6RK/1vgX8TuOjvf/tHfMoTWgv1aUa8Bs7w5LB/EvSzHdKlbJ5C++fq9/k+Ve5Gf9skONE8QnroQG4kNGVD6VlqG5NuIDlhY5M9h1oj7JI+a/Rm6A1GxeO8d5qQGpVDl+/bhSBVGWdZ4sL9/QP3d4883t+TYqIPomDXFITaYxsPRahWRwupXq2y+bUhzApz0+h8ihZpwrbK6MsAyvjt6F3ohBTJ9Lwikq3vbZ9I7ZO1PSIm1ZGnvSxroFx2ht2lrAnLcyi5fcZeua/ingR1e4TreT+Wc1tFpn3+Plnb5o9szPM0rd+3BapCaVAkpBRVM5KNfxNuUDRV6kzrd3EAVTZf552gR6HJC0si6J32I1pHTlk449r8bxuq4FoFCt1xtjFq/X2irLitmxgjS6osiiKlXFi018tp8G6N0d6NsiLNDZ00tdK6hNaejrX/q3C9ThyPR3LO3N8/SFJlLON4kF4mJz1N07JwvWxJlw8dqRTuHx5ZYiQuy7peUy4UxO9KKMgyvrmCdZ5hGBgOoyrltYbnwnWSoLJWSZBTRdS3qlmHqhVr9oisMeiBaJ70deznyL7g0wpAxmyfv6/C7RPYVjyYl5mnKE97H50jttGZWa9X0C3D5TpxmWZevvuK4+lI1/XcvnjB537zN8mxMI4HhmHk7u0brNX+wrRQ8yIHMZIwFaXjYXbsBt0vmsm4fUaHbHtuKZI0SbV2LyRiVIFzQya9ejwtS1p929rzg/erWl6t2kehc3dZlrWaiu4BrSjQxr7tLfMsAgDjYVR1QEGtQxD0ai0keemVad6QrZK631/2Z4n8yD6Zsyih1bxwvVy4XgLDqcOaW2KsdOHAT7z/Cb7025/nT778RX751/4lxsMLPvWZT/Hy46/5+lfe4pVS5U8nTCncHLzGHFJME5TdUIogrQCh87hiSHGh84Mq8AmNJ2bpR2nFjyVmUszS0+Wk4JKy9GlI4Cmen9YVjDOUpfBwfyfeaM4RusDp5sS3vvUtKPDq1Sum64WH+0dseJR7q+jedY6kZWI89ISuEzELMhbL/f09L154bm9vtMos6M2yLNzfPZBy5tSfxBJEPeWOxxNFfdpqzXQhYKj0oaP3juv0yDzN2FHGbFmiJGHDwDgO0tenzJvL5UzJEVM91EwIDqOJybJEHh+/Q9f1BD8Qpw5LwHkRbjF2wbqKozBdM+eHiHNquDxETK30Q0dFkn5nDafjDeMwcj2fhbpVhY7sjAjBHE5HuiAiTFHpfMuyUHLlxYsbbs0tGIMPnZgYl0ycFrz3HA6jFFlorJKtYDKMA4OaXrcZ3Ho/nbf4LqhthvT+gJcguhaW5SqJi81UI4WWVkC7XC4cj8e1INWo4bmxdijat6r9h/O00ghb4lBNU0MdsNWQkwTHcYlcLhdqhS4E+qEndH5Fd4xxSl2z65kVl6SFFq80YikKWbTgT1nRuYrQYQ+HA851YhJ+vfB4PtPFZTUJlwKhWRHDRosPoVGV65q4yTErSrdCw/bQdcyLIHuVQsmJubDalJRSVHl2o3Bvptjb2ZBzplqvFhFeYgFnSblZrWSc7VZAw9KSEbPu4fvzaWVuGKWrl81YOwRP8J6cqySWen8FLVOqoBOF0pSioFtCD8OoLgJGaLFUjYxzklhBbp4mZvLdKk7Vk8HYoi1GQs1srRkoTVhOdz3bS1mLOKXW9Ywo+hmN+fH8XN57h1plDMl+Kv3LbU7VypowWqeWDkVU3+2z9316FrSYruo18OQ5K1uptdggrU2w9a9LBmxXxPoHPX4cpG0G/kat9dEYE4B/YIz5v/T//uNa699+9vx/A/i0/vwq8N/rnz/wsVLtNBloX6J5nO0P8/a7fUX3+eEKspCeP/YoW/t3rdLLsaErGmA3RM3uIU5FnjSJQ4PvojfLOZGPNsWJW7yta0LXsmip2sqXdi7QhQ7x0tHAPGwbXoyR83khKYXl/v6eh/sH0jSrCIZbE9wi6TqVpiQmQZ0ki6xIYpNTbZNETKKNeAPtUDX9smvQVPU1RhfKHlUDKIoiVYkit4kMT+7fc2S0/Zm0yrNHuPYGvPvX7pM92Ghqjca273HD+BXpev6Zz3/2c+N5sraOGWKKeT4/Mk2TJKgaQG7v3eZggR1/fo1897O0IbG1rhuTQSpSQhkJa5VN6EGy8JcovPvj8biiaOjrVondHdLQGmcxkGvh8XyW59Sqa078+YopKgCk16SImtF+TWfN2nPaaR9gS8rbRt+UFNt4pFI4HEagcnf3lu9+9w2NZpJixtqoKqoB7wMxZWLK9L2nH3oNvAVJSEnWwjge6PqNZpeSqEder1cJ4EOgGklkRGa+rglFUfrbfozaPd8QYem9WlE2vVd7I+79Pd+QsM2jra3L0AXM7p7sCxct0WhJQlwW+h0quZ+3gkLpgV4K1QU9ZBwVQVKstbx69QrnHS/eeck3v/VNvvCF3+X164/y8vYdckqcHx8hz+QcoTbVNpkLKUWCC7J3sNENW9JOFUqdXNRW2NiPRVuHco42iqkEWyubwAviG+Oivk5B75E0tDu/+VlmHaeGHAfvNYhw677UqLUtGdwjmsuyMPQB58TQ1Gmfp4g2bB5zKQrFKabN4LTdw63wJcFp+761asHLFHJ94HJZcHcdx1MgjIaYP2Q4vODVu5/iePgSX/qnv8Wf/cm/ws//yl/h8LFbfvFXf5kPP/gGZbrgnSG6zHy9cJ4ujJ3X65MgN2tvxXZNUpy4Xq4wFkoGUTy1LPOCAWJeJLGMlXle1iKhMY5cDDWKgqNxst5TQfZ4U8kxEZ3h5nDDcDggaMwJWy3LEnm4P/Pi5gXBVs53H3A6jNze3DJ4ePPdK4/3DxxPN/SjoFN9N8o8nWbcDrFFWwQu01lEIeoNZOjCgLeelCL3D4/0QdAFi1mNt2NaRH3ROwzasqAF1pQiH3544fHxnsNx5HQ8cHs6UEqiCx5KIcZFCiWmiRAVYrQYHIfDiLEIpa4skIwmJB7vq6JLlS6wUoZzrUyxkDJc5pn7+weGYeAwHOi8FGFrSUxxIsYZ5wzD2BOc+I3O05WHhwdSlOLf6faWfhzACpLkveexPHL34Vt6HxiGnoYMrAwBY9azc2mJQ7XMc2Sar/g+cHO6oVZwXQdGRKGu1zPUjPcD1RnmWJjmBxFQU1EHqwjK0uxT2t5pxfsypcT1ciEpQohBe6dUuMtv3maPjxfIGWelDxNYr9tZq3YsYsck9FZBqJa4SHKAUOQaHdY7SYyWGJWi7BmP+pOhAAAgAElEQVTHQedZUcr8QlwKzol6Yug6TIoscWGaJ4yxBN9hrSenyjxfyXlZx1SE5KzQi43bsYgstvXVWoe3herEP/I6XYTmmFrigLJmvF572OKs+pRlNM2Jx8vMMk0Ym4Ve2zmC66SnVeOBnKqygBoDQNA3s6rjbr3rlar96NK3ve1lQLMSaChbkWQlVbGiMbXiXUtwtn60ihTqu0GsBCSBA6N0waoxqHWOru+opsPaoAnkoj2HGSd5nfZQa+HbGlUtVaZPi2c1EazomWRYC3FrhLUrLO6ZU0IVtdgi4IRBevesGswbLVCLZYKe29SVhbK+P6zxXVUgpylwt89v93RP02xnmcQviGBMkULgj/Jr+5FJW5UredR/Bv353oxoe/xbwP+sr/vHxpiXxpiP11q/8QM/g6eomSktcXj6MaVsqnx75KUF3fu/b6HVduOePzYE7Ll8p3nyFVcaG0gjI4pJWQmmrQEfDGPX0XjDzqk5cqvS05IDt3tfWRxOBStKzuQ5s5RCzIlSVdYb5Tan1gwrBo97ydB9RU24wSJ8YtbvZqg7+f99ZaBWKPkpjRC7qyo8S4hbj8l+HFvAupfgLkWbU58lXS2JbdXyPQ1yT51s32uPvj2tcj2tgrcAvnGyQWSJWwC2ny9rH9cuwfxBVMx9UrgsC9fLmcvlvAanjRe+f+06j+qWLLT32i/oPeI39B19H+g6T9/1GP0uYkwuFS3nDF3nGcYRMCuNrgW4z39Symt1cJWIbvNkt5ZyTqIS5USsw2C25AsYR0mqxN5gQ4j366N9x1Z9jTESUyTlyPl84cMP32pQHWiUgsYPr7Vyd3fPFIXu0/c9wKoO2XpSGirV5m9DTttB55QitT23BduVGGfx0EJ6DADmZXlSnNhX4yRAkwpgzhGUVrkeVnXbhFvyukcv23xuictzbv0eAW73oVFc2+9kHm/rC2OgSBBvux7fD3gfWBYZn8PxQNd1DOPIu6/e4e///V/n7f0dP/WTP03wnru7N5haWdKihQKl3Wql0WAIPmx78bN1sC9s7GmfkoBuiWlKad135KFiNSh7zEi1uK09QTpVjdP6lb7b1lQbnz2a2cajocd71Lyt8UaBHvoeZ8uaEO/lo0spotwXxZuq5kx5EpCJoE77TrU2ioyud3SvJ5LyzPS4cD1XurFSzIXCCceR1x95zZ99/Z/xtT/+Y/7yX/klFuf57L/4C/zhF/8fvvtHf0KPJ9lKCZY8ZWptQikeEEGIWlR0KDWqUuTx8Z7Hh3u6bgQcKTVquFmRiVoN1Tipos+JrpM+ozkvusdYMpVSFR2mIj2PE7UeqVSmZWE8HFmuM+f7R4Lv6EPP+fzAPF0Zw3tcHgspLgydeG8t04Lzw4rohdBzmSZBAtf538578U5a0kIXxApAhBoqnROBpiaskhZZCqmIxPs4HrE2rD2Q16skRdfrBedFxa9WQVriLEFeLYXpehFZfd9xPN0wjgO1GKb5QsrfxblALR5rOnx3ZOhOdL2nC1cezx8Q0wPzEkmLoeJEVTJb+uFA6EVWP/ieWgwxSUHYWykyeGtY4pXH+3u60M4iKdY+3N0zx0gqhZt6C1aoeLJWBGmWwFgC9xgzblfQ3vqHtFe5SNHZhSBnuxMUoxooxoH1zFECdDlbxBvRBZlvuVZizmAtabcu0DUX44LXYs6So4p8CY2x06I0RpKqNOd1r/DGq6jIRsvekpbEw/1VizCyk4ili9A8Y1xYlollnohLZOgPBB/IqXKepJ/xfHkkBC8FwL6nFE9McJ0jUBjGQD+OdFXo8jEmlnQVQ3tQRfBtL5Zrk564hjDJ/pdIaQJaDBIImsiXKm0TthoxoTbSZ9nO10aFbHtRGwNjDIeDoe9nlmUi5QljpJdc0CBF1kR2UWiZtuCDsG2at24pRfa0WshJ6eL2KSK1nscrpbLQNA6q7v3GGijSL9sKmxi7Ml0E5dMfK2IotYqgWCpZRUoKvh8xdmCJFmMKWFE9l/NB2CTeQk7SEhRTegLsuOClBchaYQUYcKX19ftV/K797Ftg2jkmZ4egbNZKoi1njI4HkkiBJFc1o+ji0zxCPsMKZ0q5lEaRtv017NuF1lhzl0Qa42QskMTthz1+rJ42I5nGbwE/C/x3tdZ/Yoz594H/yhjznwN/B/hPa60z8D7wZ7uXf01/941n7/k3gb8JrE71Ky1udzAaldBtg92q0/sge3vPDeWQJIknr4On9KT2f/sKlXNuNehLKa19XCvlyUgZwGq27ipYUwnWYk2FolV8Kzx9CivVsDyrhlEtCxOC34ky1UqtM2IJ4Lx4ouV5AW2erVplrY1D3BT2QN9LkTFjqVJClQlb9Uq0p4H1T7nOVYjEsCbMbaIbIwhjO1/3FfY2Kff0oT1t9fuhW2gSsX99E4rYJ2BrX5AGH7J4W3C4Q6p2928v4CCXsCV3+2RtDUIr6ku3UUxLKeT9/c/ShD5Pk3iuaJOuyN9uFLFGtbIK8zcee0tM92OyD+y99o10yq/ve0na5HCuFJo4gs5RL43C03RlWeZVYUzoUPv+QIPzIl2cUl4THfFPa9ciwUI2GeOVpmUtfSeJU/CevhNlMLtufpW0IiRQS1n7ztqhUzUJOD8+8u3vfIfrdRIxDL/52y2L0G+MsUzztH53USqbaR1mogx3JYSgKokixgLS6zJN01o0aP0XIXgaummtJt2zVPmrFWPRNjVK2Q7g9mj3NGURH5HvulN+NFsC0+5j219av16jTRu3+Q7u0dDtOTKfJFndggRZCzuUugIofXZXdJimCRc6Xr33Ebq+57333uP8eObLX/59Xr16hXVijxCXhUZRLHHB1LyZWFcRtqlkrTdorwxyvuUSSRmaqXqrpm9rVoop7ZCUxKspsLbCqXq5UVmWSFFfH+mFlES7sFGnW9K1R/JW6vPuQBT7gi34adfQ5lLwFmeTsBqKCk9pZTznTFqimLwbqSSnyu48ssSyKZC1REPmR8Y6wziOFA6cp4WY77l//Cq3r34ezyvSHHD9xEc//pqv/tkf8Pv/9Hf45V/7l3nxk+/z3vsf4Wc/+xmu//zr5GnCegNe9n4Q+mjJhimmXQFmo2k5J6JV5/MFjgZMoFZL0vmcc0GEQHX/VEXIJRd6F3C+00DSSvDnwNRMMJYQHNMSefPBG8L5TNcPOBfAGLq+Fy+xaSJeL5yOAyUufPf+LYfDyO3tLc57zteZaZoZxiMi1uU5jDdQC28/vKPrBS2y1vDy5QsNxGfma2QIx9WG4jAMGCrX81mQYirBy/+9ePGS8XjDOI5YK8mtMZbHRxHzaGiEMXLPoSiKMnGdrnT9wHg4kFJimh5k3FKiAl0XCINjmq588PYNH7yVe916irwfwDiKUs5d6Mnnmel6xXUDwXX0ij6kGMlx4e3lnhiv9H3gdJI+uqHvqFS6lFimifPjefWQvF6uLDmSi859VUTuu4EudNLPZK0qsLJa8pQqFCxrrfp8igpksYYlRrrQqVea9F/FLKjGdU5YU3HBrfO8rceDIq7eO028RNRpmiacE9Tr5ubUdlCgssSFZY5YFfyrVQzJh354os6di1BHY5RCmvQoBv1eMhbtjBMV4FkLPyDiXVeCzzgXuL29XffFZRFWQT9ID6Ig/uIR+vh4pusCh8O40velx048zVDGVfDdyiyJSxRT6ihonvQcZyxO/D5rFkKfJgTBByqGXCSuqyVT40JOiakUcozUYVhFwYwxK9rjrMeeAqUMpDSxxAspL8Q4UwqkMmFtwPueoe8pNqsohlAuY0qCDHebUJuv0ucnNEXWZKQp9WY1EZfYUBW4nVmp+Vlp2M2X2Og1Y1A6JGt8XAHfeTrXSyLnHX3fE/pb5sXx+HhHyZbD4ciL2yPBW+K8MJ1n4jSTFmGfrLoTBtFx8A4bPCWBDX4VaJNNuqltSjTcgAlJqqTPUmLt1o/Y06kSazvapUCv5ui1YvaAkNEPofWy6cca5N62+Lw2xfDWuiPXsIIUFfSSEIbf7jp/yOPHStqqmD79kjHmJfC/GWN+EfjPgG8CHfC3gP8E+C9/nPfT9/xb+jqGcdRYVqlGVYIoawBFa2zr5dkFSpIRlw0Xq+LBXtkO21olCWpNjXLXdaAa+lW34N85i/FN0rQZuW4Zs/ESRlZNYhRfISaIighJT5KoP7qWtddKE/9wvmX2Gy0vdIoQNRqlVj8BDbbESBsMOEfGsFRwWgmpxqgMvCYq1qr8fGqjs1K8jKJu0HrehMKwy3jbPVrRmOYFwrog1ZLBSBWuvbSaRovUYNm06vzGuac2vjmKekgVeO9fZjUgzUV8w+TyrFL3RJyjlLZxmDWwhaI9X00RUIQmGuJojV1pXrVUpQ0+VacTJaiZFJPQVjSITEskzfOajKDIZcMTBAG1eCuJvLGiwllyWiswdXcvmmeULRZXLXOMkmhbSzYVW4WjX9FKjankCtOSMYt8z1oLaZ7gKsl3yhljxDfGWbUJqIKKBS8y28FvPn1NIXUVhzBqGeBVnESv0xnZLAxG5oKtUrxAqtYli9z1EhdpENZq7+Vy5ttvvkOtcDreKLJa8S5QTKZWodV2XeB46MkF4jRzSZF5mrcEsIoqVtCgZY4SRDQUruSM8069y1CJcVQow2CxOGPx1irFRg57755WOJs8fBNCEKR4E9GRggrr2m3X0NDm9m/YKdTqoqxaZKgYuXdFeiGqqmtJv5WXeYmghEZKqTgvqqkV2Rtd0b1DZG+lZ+3Vu3QvXxJOR16/fM1vf+7z5OvEp376fazPpHkBIjlO5GUiLVe8rZjiZO/K0ndS1Ky2rVtjNinlFcXVhHYYBqQfYPO1k8RpS7AkiY0yD4MTUQEj0VtLtKTYoAinsWKE67zKbkuVWjySlNLolD1Qxe9I9t8mstHETZKuaUMqVQsbGoQWoVENSvfyHdjicKVgUsUXI4IvSs+k9SnpOWSrrGFnJJBb5oVaPZ1x4CvLdEeJZ/rxyGWeOBxuCDef4PDiY/zR73+ZP/3CH/NXP/mazgd+9rM/x1c//0d858//QCxtkiOnjkeVS6cuKhQj111zXcda4mZPLqJe2I96IhgZGymG7zzdjBWbhxgxqJdRFjVcU4vuOXJuWGM4DB2Plyvz9cxxHDkdRuzJE6fI3d0j58uFPojAxvn8yPly5XT7gtCPVOuZUibmyGW6x3cDGMM4HIlpZp4/xFgYx0EQIevwXvrbHqcrKZ05jQdyiWAK3jnC0NNVSfytsXShw3iPtZ6UDfNFvN/6vuN4c+JwOmJMZZovvPnwLachaE+ckcKoEzRsnsS2xSlKUJSqHGdLLR7IdJ0YOVvnyaViTUetniUW4gJ1vtJo5j4EnImUNLNcEuMwEjpHjAVsZskL+ZootXK9SoHHB493BlzH8eaF0FhrJS2CAgsl0XAYR26OIxahboYukJem9CfOg9U5TOikk9AIolFKosbKeDpJ4OyCJskipU9eKLonDr0I0KScVjZMLQanCE3XqfDR9cr1fAVTcaHHmqI9OZLMlVqkkF2kf9J7z+lwxCD7iRinR7yTYF4CaTlpsDK/jTGryISsM9k3fNfj6Vb/2zgnputErYv0RBoR+LlOM0uUpLofOkLnGXqhJc6zIEeLUiPB0PcDBilk4nogE0shTVF6tfsD1iYwTuMbYVIFK5IcQucWxeRSEgZBFoOTrDXXJP1cSK/upEVGw9Z31hKEVAxzUiZMWXBG9qKhP9KMsE21eLdZSVkcLtg1MZnneSvAWelDDHbQRF2SjLT22ifppe8kycglUU0mlkophmBFlKekrM8H7QIBI8Uui9C0a4oSR3SOw0lslSReEwXMMI4MB0+OM8vyyMPjmcM4YrH4vmOOiev5LOekFuy9Ve0JU0lxJlMxJawWVdZapfNaRfC26JdqxNrGaO4QeqwPhH6Q9gznV6uvmBPTvBBTIseF4hexCVE0uK5JHLSWCSnimVarQMudVCNtUlXjDGcs4t8uCR16lqyJ4A5k+H6P/0/qkbXWt8aYvwf867XW/0Z/PRtj/gfgP9J/fx345O5ln9Df/fD3hqe9UjvaUPN/KBr47hELeBIGa/LVqrlND7E9V6FOyaLkNxJtP6nartiYVT+XPUqVU/NjVcRIYOKFJmJgcdZhS8GaIkFXVbjYKxVABRqMXkBLUNyuWtAqF2tmXzIlq6eHyuJGpF9oiU9h25aI0aguZvPfakhfU8XRYaFQtJfBPLkG2yZpLYoaSk9U3SOQmihKk+0OPUPRJvaUzO1aW+9U1sqUpj6sdCStyltv14TSWIcpsonbhjqpx5MkgjLxhVaKcuXbd2q9Wug1G63+arJJUZNc7VerkJ3Fa1XA5IzXuVRUzKVVqqQyaAgWnCkYow3TpjAOPV3XMceFaZqpen3ioSa9YX3XcxhPDIcDxkrPQGsUxqqQxQ7lCla/t/psOS/y/MY6um6USn2VIC1YIzz44LBOUCdrpdopfT0G7zqpDhZpnC55wRThmOdc8NZictZKEhDkjqW4cL1exWcHpSnqIXS9Xrl/fCSWhDWOeYp6j1vP5qKBNSw1yr22YT0Eiza/N7qrAUpSRbBUyZoEBR8YhxFj5HBra2aZJvX9slr5VJ9HjFSQy4YkNxpj6IKO60an9dpnKhQR1nlU8lM6b0NPk/rLNQR8Fc0AlWOuGnjLPpdSS+q1J6k6ahHkydpKNWVFzo0JeOvphgPm0BGMp0yF4DpevP4o5njinY/+BGme+JMvfYFj59XbrHKZHzAkTI3UtNAZoz6PW6+CVBtlz5TAY1PCk3mjKHdKOBcIWqFsiW1Lstt4NXpzStIX4moLZjY0eNv71KoCFRPSvgpM1f087ySVxfNStgVJyETiua7FPKnPFen/9B2pmC2hAaF65bzKqVcteiwxYRCaHRWWGNXAda27yd+LIOBVAwK06EE2kArX84ecbm4p50SMRxb3Lq8+9nP889//PH/6u1/gr/+rf5kcBn7ypz7Jxz71U3z1K7+PpdI7EZ0QhTthAOSUBO7Uz0YLb7VYnB0IvSWmWQIXZ6hJ1lqtFQEJtdhV6yoBnpaFonPVOk12dXOspci6LY4hBOI08fDBG3yF0+1LYQKYR3wXuDmdiDFzPgvNMObCB/f3sicFR3AQU8Q58GHEGJHLX5YX3N1/yOEwMo4HPZdEKj2Plru7O5Y4cXt7lPsaLMWCHweWeeEyLRxdT50ytaRdoJypphKqo1GkczbkLEU0UsXbwPF4w/FomZeFh/t7Ssnc3h7peo81CW+9JClVnn8YTpRqeXi8MM0Lh0OgM4ZULa6/wZjKPF8gTlgyhz5Qc2W6fMBycfTDIEIkRcRAahFUa14q7756T0ync6TkiWpk3RhVELXFYHPhOk3E60Q6HuhCR4wzuSiiYgI+dIKylArOi6DU9UqJSRJ1k0nTQq89ZJ0z2GDJSyI4KTQ6mzFVCmFChyt0wWEITNNVEaCGIjlOx1FryTK/lvkq9yJGYozS0zcedK1aaq5crxdm9cpsMVGLC4ZhIJfC5fLIZb6s/+esxQVRznUqwlbWuMLo3i6Ud5OixluBfnhHYpYq9NsUE3SR0+mG25sD8yzXOU+ihrtMor4ofXxN/VLQq1KyxHUgnnBekOvrdCW7IrY9CFpSmtBULqTUCuTyfgWntLxmsyMWDfM0rftirRUTAt3hhOsc3owMncSN0r+mcV0Rq4GYC1MUf9K4JKyza/tE88kspTBPE9VJgbPT1gRbZJ+MKVNNwdUq/aHKGKpUrbNVUiyavHmNBzOpJHKRlp6YMiKeVBXFFvS17zuqqq6nlLAsnMaO8d0Tzr7H+eGeu7d3YltznaRI1nVKTUTObCTeySVhdgboIoJXSUZYbbiAC72y1TzVFFF4t+C8IfQd3YsbxsOBm8MNwXmWizB7pACUmGMilsIyXcjLWVlVgtanuGdPlXWvlTvs1t81QT4F1ra5pHlHAxsaMkcDmH7I48dRj3wNRE3YRuBfA/5ro31qRlbavw18UV/yvwP/oTHmf0EESO7qD+ln00/ZQY8rbraiYO3RuLZtUreeiTZwLaBt1dV9wtroOdv7t9N3S1T2wdfT60GTSAS5oOUQ8vlrz1tVpbiWhFlWyk9Ts2o85ueiFy2p29M2V0oUPLm21kD/5PVmr+LW/m/fZ7UZOu+/4/qaliBisPWpzH17TQu0ggtPr1NfWzQTXPuC1o3uKYVxRTQ0yBXnebPdkt1znXOrH9N6f3eiDvuxqUpH2Pe2PBUg2VQ0jSbZraG51gxGDFiXRZDN4D0xZUobi4pSTjO5PvX7kABRVIZKKco1l36Xvg+M44DPntB5bcj2Ky1SDq0eh6VzlhQnFYXQ5touYBSBkcPRY5AgWyZ2oe87+n5giRFjHX3XUauqIBk5RFJKCvM3JKJRvCoG8eKpVXuNShSPGWehVKaaKLmyzGLkmU0RtM1sRsho0n69Xrm7uxPqm9KIUk6IgqYhxrRaNHhNXIX6FalVzLEXlfNt9LY9xbkVEdp7iK+N/H9TxRTvQ0NNuzVUylqdrFXWcqPdSRJrsU5RsDWZsypGonNQG55Xw/q20kp5ksC13+37OffPb98jNkP0lXdf9L2LcAaKMAdyLVgboGq/Ya0ELz5zy2Pk9M4LXtwcGU4v+MT7P8Xv/YO/wze+81U+9ZnPYs2RtAhNaM2D9Ls1I+99367Z7Wett6zRFNtj35vXxrCUslo0tLFoCpr7/r0VtSybKfb+/UvODMNhfd82NqUULpeLzhtRzSvqfbm372jX1/osY4yMoyADjULWBHPafXgi3jMtK0Ogoflg1j6i/b4p56+eFVZ3XKWdPzzc8e5rkb6f5yv+tueT7/8sf/p7X+B3vvh/89f++K/zic/8Au8OHZ/+xff57X/0kmk+U92sc92IoIhV6nupW/FtVTuT4oWgtJbr9crhMD5LmFsSbXa+XH6155CzqllaaICGpQueUgVhKvnAEqOgYI8PkgClxOFwwDrL3dt7rLUcTyculyvXDz7g9OKk9HJZt8s8Y12/9mD1fc/hcFBxoYhXoY6giri1FqbpIuhv3auC6v3wUvAKviMuEecM3h+5XM5M0xnnmkgH+GA5nY6UdOV8mfC+cDgI3RvjOJxuROjCyNWGrld6olDscqkMQ2EYjxwORzBCmcxZikapZtIyM1+vlDxTssOZyvFw5OXLkTgv4vFWEsZWEY9SA+9lnvn2t76h/dcyr+brhZwWTY4sVLvSvo2xXM5nhmFQGrbsK7lkSEmCQWuEWmjFhmWhkpLRnuRCWmZK1p6juHC9XvDeMQzityatF5ZchJbeKxrRBIVEZbDFNuK5FZMIQYHQKKXPcG6npArQ+HWfHcZtb68ULdbBvExcLheWZQZn1Lahf0Ijr1V7pVSAqsRIsI6+D/TDYe27C52RPj4NB+NiuF4S9/ePPD5cVQxEbFOylzkmpuFydvkg9GHv/Iq8p5QoKREj1K7iVQkTItZsgkirybJpfbNiseGtqFumujEXci3aEiF7VioickMVn8y+CZbszo6UtpYLYTdJkFtKXROoUrY9t/m/lZyJ00RRtkkx0qI0DANdLdJ7yaZ5IPGC9EDa6sCLunRcFowF4ySZdraSqyD43vdrG8DQHxiHG/Fh7EVAZb7O5EVaOeL5QqSSpgWPwVWLrRYylJiZlkipEJzFO+TMSpkSEy5XXKg4H+icx2rvXIwzeVrAnPHBMww9x9OBcewYh57TzYEXH32H4/HAaTzRu54SC3GRGGGaFqGk50KMB1I6apE5M08T1+tVfWJb8oYqr0ihMDctbSOFsAK615g1SS9PkJMtjv9Rjx8Hafs48D8ZaSSywP9aa/0/jDF/VxM6A/wO8O/p8/9PRO7/jxHJ/3/3R39Ey0H1glXMY1USNa06nTFuQ4EaGtUC9vZ7+VOFS3YBwtZcybMAqj55zz3StKJCGhA4a549dxPW2KNIVitPUqzaFPZa8NGCkfZYucy7z26LcS/UAawH7f757fvsk8F9n9nz5z1/7E2k2/P2yd36HUtZA7k1EG1jYjfUsA1re86+l3AfhLcErk3m/fuuV7q7ZBlL2Uysbf157X3Diry1+5w0yK0octnepzZkVoHpmjFGPGys1YpOTkrT1cS8mg1ub5USJJnH6Pw12pNY9XXOKGICN8OBejzgmiqhilz45q+G0GZbQmeMBWsInagqtjEOocMhal3GGWKaJTkycL3Iod73HZhmglxZ4kQpVT1UlHqRhLedU8EYUV6rVXsHtOEaCp0PpFRIqbDMkWm6UkyjCa6w5eoz8/DwwOVyoeuFZy8UXW1cxooXVJIEK4ROkB5jGIYeYzz39w+r0WgrcOwliEujFZiWkLOak8ockURhiQspKtK2FlkkWWsbpwS3svZylv6uRQUnui5s94Ems6yUXJ0TLSmptWpAZZ70YbX1+/zR1kITJFlNplX5FWNW7n2hzUFPLRZrO/GlYuBy+ZBucLx6/S5D53n/Y+9Q85nP/ebvU+vHOJ7eo3SPPHxwJvjAMl0kQFbabwvuRVVM/Hr2jdv7PeB5oakl/O05LUlrIjJtz9roPt+7P+33hZXtYLb73J7XxmqPzEkSbZ68377YtRctkV6CVrzb0NX2XfaPlrC1H+89pSqKXAreu/U969oLrNNLn4ezXC6PPNy95fTy45yniGHh9vZdXr/+GF/9sy/yW5/7LX7m07/IZYp8+rOf4uOf/Cm++MV/ShgTwQWKqUxxEYQTCcSs2fVh14pkjTK/jTFiRTJLgrIVA+SnUYzad96P1fMfqvQtTYpCeG+pVZLPZYkM45GX79zSdwMfvnlDzInjUfqdztNVqH0YSsqcbo+EEFii7CvBO6WDGW6OJ6Zp4u2HbzEYDuNJvJqM5XQ40HnxLVqWmcfHB25ubtS7yuL7QEmJ6zRzd3eHtfDixa2wAXIWU2OrCpPXswZ7Ce8CXRixRtQAweJ9x/39Hd5MA/wAACAASURBVI+PF25ujmpirmjLtGjPrMO6QN8fOI4HUnog58w4jhy6AWMOGF4yXR64v3/Lw8MDy7zIulsWpsuVUpP0DeJwTkSyaoFpnrl/G7UvrGeJM1kpsYCanreeWC8tE+rtZRBxhlylb64aw3yZeLy7YzyMBO+xfYAsSM58faAkvyrx+i6s9z1nYWJUoMa8Ki6WYnCuo+sGrPUs86KS+wvWqvhTqco8kiSzqAJoQ3lyKlSbOB5PdB1q3rwXaShrn3atWdQ7jcSC3opVRdW+5EUp+M45grXQdzhlJRgMfS9U25gSKV5FIdTuPFpd8/aUJG1Z4noerV6tQMyVlDOx9efqfpCKiB/FedHiw7j1+xogJnKMWLv11cYlMi8LNRfIUgBs+18X1LrBWoJ3eLqNip4j8zULVVfXb2tnaGdLO6eMtYKKGrEwWVlSiGint9KK0h0PlJi4ThPTNFMuF3zX0Q3C0BFNkCIoYVTRqiLMJGdU2ddsrQDOOKzJVHMVZPXQMfRHjA2cH2fevHnL9ZI5Hm8YxwM3/YnhaJjniW9/5zt897tvuF4n/WyHM45gg8ZvjqwWBT2eoRvoe1bqelY7E2MMUYux7V7nWkk5Mc0VoVgfefXuSz760Y/z7qv3uL25wVpDWSL4wvCyo5TE5XpmjgspZy5z5TI55kksIA6HkXE6sMyRkiSBTFl849IcSSVtjAwkHmz7c4v/q6lrEvf88YNi9Pb4cdQjvwD8C9/n93/jBzy/Av/Bj3rfpy/a/tISN7MeHqwHfoMfYZ+cbcnPXtFLvnfVybf9mBVx2wancXPN2n8lG8gqzCHvuv7bKO3Ptuql3a6RRq1EPsc5T/AWH7wEqKt4htloYLU8PTDZkK29/OuWpH0vsrZP2raqNuybGvcJ3IbaKSLlN7Pm56jAPrmUYD5t46+JmtEJ2FCKUlozq/yuqfPoBawBUEPoWs/ZGmjtJ65hNw+Moh3qTO+cBt1N3EHGR6hLClvvIDwl/7BmcFZuVKVQ4iweJOzEIYz0LuYqTkNrEaGNx1otqYIIYlSydlPS7LTSczgctFfJEdS0VtSppFm5d53ImavvirPSS2TcU+TQObcqJIH2VuisGwZpTrfOqyR1WQsaoijlgcLlMglPPm8biig9bpt80ns9O23g175GGzrEUF7GuSLB+mVamOaZah3deBDT1ZSFb19EclnUqxTp074iaqZUCRBKMasZt1CwZKyTegBSBWnt+4GxEwSszW3vJZhp6HcXOpzdjJKhai/kFrQ3Gec29xva1faCfZGlrbuNyrt/jcz5lmi2a9qLZrS1135akvk9qLuBWlsgrjNY9x7rA4fxSNePXOcrpURu3r3leHPDzfiST33ip/kn//gf8ubuG/zkJz7JkjKYnrjcE4zIoOeY1CNG+9WMrOFcKpXNdmNNouzTQlcrQsk4P0t47F6Y5Gky1l67H4f981sSXRGKU0Nhy9qH23qbrRq7Z5r5fEyRWuqqpAoSADrr6EJHbcEkZvd95Hr2BSvvA/3AEyQ1hEDaI7S7M4K6fR/JdUT1ri5CK3v74RtOt6+hQtHerp/4xE/wjS/8Hn/4pT/k4YMPGU9H3nv5ip/5hU/zhS//LtYNGNfm+UiMExUpShlN4I3RrQu77pU+BHp6UoqrmuaTbdS0SvyWDJdSmKZp3avW++Ikct/f3WY1UlsQQmWexGLjdDqRi5jeO+c4HA4si9DNbl+IJ1sphen6wDQ5hqET8ZYiiNvDwwPX6yQFoSKU06DIWxNWap5zXdevtKh5nrFUap1ZlkIpB47HA4fDQFE7i5wjy3IhLpHDcMPxeEPfj+uZ433g8fGB8/nC8XDkdHrJ4+NbchbBies0Kc1P1uGyLDjruT2dVEJ+whD07JDC3zAMJGNW4QWA8+NZehOjVOa96+j7UYRuZqnel1pY5k79P7ez2JptT2lqxKKo24qLUuBBz8fHuzu+9eeW4TDQd70YSIfAOAT1kTWkODHPV0rtubm5pRT0LNdenJpZFqEKeu+JSxaqIkaThspSRcmyJSb1eKIUQe8eH8+E4FV5uMd7EewoJdMPA8PQqdiD3N8mKCX+kjdrQbQJKM3TtO5HouK7FXCgCKW8traLynWe1vksLSliVp0jlITEDF4+QxJLsRVo1hhY0XxJaio9M8n7aHw1DIOgZ1XsNELvKRpWuK6n94EUI/OS6DqL63oOoYNSMEnokyKqkoVJ4cS7zLqmDKoCGEX6xwqiyFx1LTSlYeeDIu3tLNz2pPbvhpzFKP2TeRZPQGssN6ej2OzkSFxmob17KSpTK7WIB1xKhRg1Aa9FwBVvCH0QxLsrivI7QficxLg3t7dMl8Td3QNvvvOWcRh4eeq5GVUdu4idkbWW83WiKTjKPuTpfQcxCQMqV2zM0p+vsb+zEqsICioIcKZgvcOpZY8kTp6cE9998x0u1wem+R7zE+/z6t1XhFOPqZXgPCUHnPXcGDmLrnHmvExczleJQ2LiepmYp5mSBEFNSRhIMQg1tdH4WzG51RbXQqjcoCdn548CVtYz6of+7/9Pj4pQl6ouNoMGVpq02V3g8xwh2xzNDU0qswUBDdqV37XcsKmxsb7n/hBr1EMJmJoiWjPrBrCarLWgWjMKNrGC9pklb1VbqlFPKosxIhO/+alpXvJ9ErfVZHENgux62MCWZLXn77/TPqB9HkDtAzJrpfG6ft8xfkpD3CN+VpMKH7aG/YY8bDeWJ/2J650zmkRotbd5VrRAouXa63WvSfpmSmi1r2ulJ7VkXaEw6V+0T6oeTyDoKil4rRVTrRx8WdQSW6W/rOqfLTczGJWK3dADSdSMEVQheCeKi33P0A+Mw8AwDFhF2Pq+p8DqW2Odw1tHsF6kyd0mpb9XL10TZaALXnrJVAofDC50WCOyynG5sqS00ixLkR7Acr0S40KKQvPbEhOppqa8yQ0LGpWZVP2ybZJFKYLOyH0XytQjl8tFCwZOzSudBpdZ1cyEcnM6HFakxDmDDwMpzZzPF5ZZfbY0AHHaFzaMIzlnzuez9lMpCrpDbPfoz5YH7X6n66fVIPaovLUSiKQs/nduZ7y8r2huYeyWZLS1safYPUeXvx9F+Dl6XqtSXmlUY+3hBxGTqAVp0RQKzXQ9cxgP3Lx4je87Pv7xj3JwA5/7jc/jB3jxUaGiXR4ypjrm5SzV06rv5TSIryKbXk0T73lq0dFQrueFIgn0t735OeWwza1pEoXcfTLbgq092tU+o5TCNE+MdsRYS0xZfJhC0L5nuQ3NP7DWyjJLQO990P3Ria+Qs4TQKzIldLcQVKQgRp0r232TPk9PTkL1FMsHD4pqkNPuurd+aYMTigwWYz1ZfY1SiizzhLcDcZqJ9ZFXH3mBd6/4+p9+nS/97hf4pV/5qzxOhp//5Z/jN37zo3z7a9/kgNg/HI5HLpdKTgsG8fiSAERZHaYVHOuKeDw8SPA7qi1IzmIiXasEGG1vafexIat7KrnTeWBXpb0owXLXcZ0WLudHlmkGLF0/cnNzw5sP3lANnI5Hai2c1QtymcVnqtDmV2KehKZ9OBxIeeEwHgjeC+J0/5ZGdQ6abBhQy4q7NYlwiip4Z8g3B6H5Ggn+YxTxFumlVIqXs4zjgeDFPDnvDI2vlwnvAqfTDaI++A6X61s1o/aYmHh4PBNToQsDXTesiZP3jmyrmJarT1jJmVwq0+NZ+phLIZesBtqi/BdNUlVFWSd930kQH5NQrMru7Kd5Nhq830uFq2rtTv3YWekLNLUQpwlSkh7HQSifvQuULDFLrZllvlIOB3zoMUqRt6oeep0cKS3aFmKUwuqxVvau61XWs1DIRfG3UXLbnul9ota4zq+UMmaRnqXr9bq2jnR9wAenRTGhj9XM6sF4uVzIKTGM41qcaftRSpXrlMhJigBd163y9fIcQ66Iz1e1gNwruf9qb+DMKookSF7GemldMMasxtXOe7pW5Bj1aqugfHNUWn/X0/UDXT+sRZSi3namVHqM0i+D9E8tCzEn4iz01toEO6pQXiUpcxgVsmp9VUuM+NDT9z3e6TVq3NeYIVbZNdInK8bYOUYVbamiuho8vba9yHmQKLnZ44APHaFzdEnokylnliwWKbYYfBg5HgbGUWLCaUossZDyVXotXeB0GpguV1K6cHd3z9sPZpYovWjFeHIB2/Va0M8bumJEwMq6UdVQwdQibRzaZy+xjMP3HaWKr5w0GEgBKFdY4kyfPbV2lFr45rf+nPv7t9zevuCdl+9yc7zh5nSLtx3gCa7HGEeqlk5jQGuc9GQOV66XC3GOLLOcDWPfk5P0F4qYXWSaJ7EtajTYRvfACoWybuy89vc9SPL9Hn8hkjbQhCBvwfq+yrcFD3attG9BxUaFgZ1im2nUOU0EDViHblYtyX1aEX76WRuVb01ySqXajY4HrAGi9FKZ9Qc5Ytf3eoqAPaUabajY0+/SrqdVLOXzCg0ZaUldC6D2171PHp9/L3iGnpVCrtt1tmt8/tr2OmvcmpwZNsEGqVr67XtkUSRq1/49Qiu799x/l/33yCqpv11XC7B3PTLaZN/osOu9xDyRM5c73ticdRP60ODLO4/t7Ur5kn4iTah1vljrxEjVSL+XeMx4rBXT3y74FW0JurE3H8CGsPkg98qHQLc7fLzKA7exjQ2JMWYd4/a4v7tTH7QMCI2nXJZ17qWcJDnLssnvqVBZ78uau+o6ylUqi2W73fJ77Xmiillo61GJSQ7o+/t7NUjulAazNWAbIMerfG4qQlmJUs0UpMuRcmSer1Kt9P0aULe5v5+HwzA88eGTaxc5e5H5l4OnJapipi03UL7XVlzp+16pb5mkIj+5FJUO33q2GtLdfteC+1Wifkd33tOk2zzcm8G3n2maaNS7fYJDC8Axq2qutRbjpHo5HI64rud6mSjZMozvcPveRxluAp/+9Ef4/Od+ne/8+Td47/XPkNM7ODNRyxtqvpJTYp4nTTw0oFmLAXUtEDRZ4v163u+Pe0RRbCQ25HLU5LrWzdesBfzD4J8UmYD1/dv4yDyXRKQZGTcJd6MHtuwnEsAKdW/b+9o+0u5X1EDJ+V7nf1nRtea72HXd+t3EOiDsKvis6HrXdSt63fz6JDDKlFTaTiHJoqqU5TST44zvOuoSOb5wOOf5yU98lq9+7Xf5zd/4h/zKr/41qnW894mP8Jm/9PN87SvfZgwG6zOmBrzviMsiyqdVgrHgDKUVr3ScQOZqo+22xEeYCNKbuu+bbkhzG4f1/3QMGwU4K8W4trnvLDHOGGM4Hm4IfQ8G+lEKU8YYHh/PlFJ48eKFmHTHhcPpxBB6KpX7+3tSXGTntjD2PX0IDF3HfZX/749Hbk4nWR/DSB86PvjgA5brxPjuu4L8LQtxuWJqZrqcWaaJm5tboZx7L0bZtnJ/P9H5gLGeghGLh1JIaREa47IwjkdSqSqt7zHW40Lh9ek1Dw9nHh+lv67rPEZVq4e+J3QeTGa6XFa68xIFoUtLpJaJ1qtmrMGr716KUvB63rMrayCtfXzbvqDsHLP1J03TjFUri9XLEfEBO44D3re+6UBQZkIpUYRmlomaxdbm8fGBfsiSuCmVNuZMSsta5OmD0/UkyqwpRYw15BI5n8+M4+HJumnrr9ENo9IFnbdcVOVTxrNbv+c+EYsxskzN803QQjsMyqKQc3KZm8JwJYQD1iSsC2A8BjEsz0n6sUvScVXVXmsbcwbtqZQY7nAYORwH5nkhlUZvk4SuoSdG94p5mrDGMoyjUlF75nlmmhZc3NHiq1UD5SKolZHEyBg5TV0I0k+pxaGsZ7ItmWAqPnRYKyrQxtiVjpqLqF9O14yzZrM2No3ZIGizzK+89mV1LqwFummasHErwkt7hjCk0HsxLZOsBx8IQ0fvLCcr6sZd77m9PXE8jiIiVCrHI2J/UeD8cKYLntM40ntLjgvLfGWeZs7zwuW6cJ4Waq444+mdw5pKTYJ+LnOmLAslR+k3deI16HygGoiT2ESUCqWK8vp4HEUnIDj6UYqXXpNtjGGZwZoXRH/g/t7x8PiWvnsg+G9yOHTc3p64uTlIi0ToGfsjvuspWZhVfd9xGAdKLpKILlGK5bnI2r9ehQUQLF0fqFnptHp/U0pY41cV5ucF3B/2+AuRtEneWZWmJn82OhKaLBWqcF13IhgroPMsIWm/2xC3QpOkRjfAFabUYPx50vacJmiMeWLIKsmiIjVFVCobNa89mqO6tRs1Z6Nd7XvSqlI56lr5bJv4HmFpiEFL2vab3P77b7//3ur+PnD8QeP4dOx49vkiOb2vCKSYno3N+qFr/8/+85zC1WsSUcrqUbIiP7uxz3XzcvPe41sCh1JbMTom7b60w072rlYPX1O5KpuZUdqV0azMWi9y5Lu52JKzFoxI/1FQb4+At1ZRADEAbQpWcv8h+CBogI6jD/5JUNhEWErJpAJpXliWTda4ZFVzLGqUrQt+mq8a1AaqcdRFktvG6C05k3ISGoPRXrzS5r2gcet6MVUDMhklGVuVy1V0sdRmj6A+VjGJMe31uh7MXQgka1bVrHbAiQT3IBtpGEQZNEs/Q84i07xohdIU6RGpVainWWm2rcPRUqlZ5oS3bb5IIaMlc21jlOB9U70C1rUYQtAKbl6T1xij9LCs815Un76neFONJhVO5ZTrE/GNdf3bTfRin9g0AYjnFbVaq6q4qg+XAdo+gZMK8zAoErUQ/MDL1++Q3JX3P/lzEN/hN/7e57Hmyuv3LaFLXO8TdfGULMG+9GwKBbSuRQt0jsgoN6oKbAfI5hm30X6fI/h7RsCT/WK3n+z31FLKqg4KrMWdVDJWleEEvBJDX2MMi1Yte0WhS3qOAm/G8i0JiTHy+LAwDj3jOJBzJcWFkiveddJrU0VyPeeFWiK1siadouSq80eFAuZ5Xj8n50xToZXousH9mXmeOD/cc7wJzClxuHnJeZl4/xMf5yv/7Hf5ype/wje//g1e/WTHeNPz2b/0S/yjv/t54vQhQ/CUlOiHUdRZS5LeVmf5f9t711jbsvQ86/nGZc65LvtyTt26u8rtbjvd7nQcJ7asxBCEoiRKDFgxP6JgFIRJghASEgGBUEx+RPzIDwQigIBIKFdQlASZABYigGWC4xjb8b3bacd2Y7ftbnd3ddU5+7Iu8zIu/PjGmGudU1WNWorrnHTNV6o6e6+9zj5zzTnmmN/lfd9PsrolqhNAYYOURLVp/NzxOCWXOnz56ULo+fNmTtyAHEuVW6fmnQWDgnOm6JkGpqalWa0x1rLdbhmGgf1+zziOXFxc0DUtNzdvqka1UMKnMOleWe4F52yxnU8Yw9xxmaaBKTTl3GqH8/r6eg7oYwoc+z1Df8Ra6LqWnLUjlBOl+yRIzhrkRnXoy3lCBySfuuSbzYbtdls+vxZ9hrLXWpdomparKz93byrdehwn+iFgTOZw0AB+s9myXm8w1rLPe3a7u/Lv1ZECFiOOxgjTFLXwoScXjME5g0gzu0XrT+zZvtXM918IUQtVZWarOp4m4jAyHI/kxhFHIU6OaVLDkBh0fICeCz1Xw3BUTZEYnNdkD4mENBQ9Xe3a6PNR3V0txhRWg6Q5OajU7rZVM5lpGrm8vJwLCWqCM86F2PMicb22VedLo/eUzour+i3HMBznfRTUydH5tc5GKzFeyBNhjBhR5kZKanpmrI7CqLP7nLPE6HQ+4DiSUqBpm9P+korhUojlmX3mX5DRMTchMIaB1WatSXyhtMcYzsymCluAXMrMOs8zxDAnr5qMpXmfM1icVM8BmQuqxmhB2qFze+uaDPF8PzQzg6SZE/aVXscsM81T9+5Q/m4ghok0aYfZlhlo6giRNcnPGW8bvDNs1h3rVYNzSkPvujVivDpRpoyxmYvLC6bxwOFwQ7QyM2/adcv6+gEhC/eHnv2xL+MzlPLcrDrIkf44cDxkehJTDLofocdkncN3WjSKMZCxqrVLE95a1tsV1w8e6PXM4IzVRKvvub37IncH7eyaMmNwtVrj7h1vvHHHarVm3a1oVh1+07JZr7WDjZBN5uJiS06RVePIUdf5objBeq861pqopUn1/uda1DHEIkE5ddTPn7nvhOciactkcjnQ+lA5p7DNPLungoQSbde/MQdfT1Zz35rQ1U5KxZwg8iRFMdeg9SwR019TtVIZqtMiJ61LTtWcoiZX1RhDzv6rx6yVwFrdfPKYTnOoTh04ecfN7pyWpUlOTWaeTObOu3z14XOeRJ5rec47DECxPT1RakADmTnJ5dzBsgyHNCcaZErpSb1aOeecJXVQNt65k6bvr0G5Sknk7LLIfP0rReR8LhuZ05g5OSV1yNkoBKPHqkYdZqY7SplbZq3e2N45nG/xTVcshzXhbIo+TbJSCskZ6yxN0yFii6tk6ahQRhrkOI+yULGz0g6maSpVcd3gQ3nA6lBPDW6csxgHJpXzGnVYSgnF1MmuJp5G+ehKOdbHhVoQa5W7JrtzvhqL1XhJeEXUAtxY9UEahoH+sFcL6ZxpvD441OrZ6Fy6cp/FMGKsJcSEjBNgij5D5oqTdY7OFuv7UvqMMczJq/fKsa80mZoIRtH7Ioaq+TRPJA56zykdofL9TwUUCCWQqi5ZztnysK6vmfn3nPaL+jA0hHBy3zvvZM5rmicLILVzXIPjczravOeUpE2/Lis/q9OnVv8t/dATcuTF65dZbS4xbcf7XnuNn/mZn+BXfukXefjSAwRhDHv2uzvaKBp8BJ2Vpq6aYS6EiA7zKgY6p454LRzU70+On9VQ6aR3q0kynLr49X2VSnh+Pmo3SN0O1/O5EVH9q/eOqkV1pfscy+ycnLNSlowp86JOlsu5PC9Oe0k95wNdcS47v5ZVN1mTsBgiEeZihHOOYTwl8ymjGrN5Dzzd70YMakiUsVYnw0xDz253R9OsSUzsdp5mdcFq0/Bgc83rn32DH/77P8zv/6MvQex4/6sf4mO/9WN84sd+kK5ZlSDasN5ccNjflzlNhhxKkCx1lqC6szpfjBJCKAOPlcIagsoP7JkR19MJdO362KJlsU88g4trKnnuZo9D4LDfM4bi6JvTHKh0bUvXtAx9T5g0eRTg9vYx4zhhjEoEdN/SAN9Z/axt43jw4JLD/sju/p7Ly2ulLk9xNi6aphFCZr/bYUxmu91ibUuYlE2jJhcUGnikbVeQM1PQgE2McNzdMww9vvFsL7as1mouMIwj/XBkdzxgLfRmwBnVZlUDmN1uz5tvPKLxnqsHFxiTlN7ZT+x2O6o5jSYhganMrIpSinnesl6tWW8bphA1Uc6lu2INzhpN2kpnOEwnKqu19sx4KeO8xyBMw6hrz2inYuqPxEkTOeMMdrDzMyyMI0M/EkHNRWx10HVgErv9nmO/w7hM26xo247gPGmUQoMXYhrphwOrVUtKgWN/gMzMbhlGHbhtrGGaxrKXmNJ1X5VnRVLHSYH1ao21uq/U+Y1WRqw9rdlKXez7Hu/9vHcgTtkSontlirEMSD+jcde9x0Em0hetXIuOYOpWDfmos+OGccBYh3MNTZnlKWiiY43KSaxVZkwq+15IE2GaCEGfg+vVisZ77WpOUxmlAyGGWRuINVjj5g75aqXzwqagdG/JmRBVrxtCmEcKUOMyyTpmpMRQ5510kdNzqLKC6nPKYua1pfupJpopxXlthTgRxoExq/Si7Tra1VoTM2vIOXJxseHqcov3Tg18YsbYBp8yx35imCYdyB48U7+HqUeSFuPGKdDYSLe+4PLBixhX9ucUOd7dctjdkeMIF2tiekDMmX4MjKNq8XWG2kScBrx3SqlEo3PjLduLDQ8ePuDi8hIxpsx9FdabSy4vH9A4Ybe74+b2VucFjgNhGtmsVrhuTZoG9kPP4xvIRWPftZ6LzZqL7QbX1QJ1ols51muHHxLDpOMuhn6YO26kRDN4mt4XI52JNmWGYt42juPMEDnPTd4Oz0XSRj5RjJAnqYPwJK2I/JSr4JyUvDUReGtidgoazhf0/Iv0UOZE7TzhmTME0c5FrkPLEFKqfy9rxavM8RB3ckc7/xz1OJ74bJwMDc6r1qcOwcmu/ukq+NMP4Po7K33r/Pw9/aCeu3nGztqzpwPO83OoD+cnE6y5M1g6I3MyZ9Qq9/x4zwNY89S/Vz/P0z+rwmkofPiSgdXu3hxblOBbUazTz9zl9IIWbVM5Jlv0AdZ4vO+UE446UxrJSNZ5KM4qr7ptdJAjxQ5fH5elwlmMCqh03CzEQh+LMTKlWK6fVnKznM5ziInDOFFNMio9iZwJU9FezEG20Tk8ISFR3ZWq1jLESEyhNKkjGZ21pgJdIBvtKDo/nzNdA5FqfKVyKu08SnVWzFrp7Iee+7sbJCWcWKxIsfWv5x/iNBKCatPWXYc41SFZ68mpdC6idiFEMuvNCmOqDbtSZfq+n4P4+qCp7ozG1KBGaZZhivN5q7rWej8CWhCa15QpXZSJKaSSCGqnrmma02wwRDvIJWmL8cl7qyaO9d58+674qctUZ7bVAsy59uu8QCJoRzMnzahzLjuPWBI6pHocJ1brFdcPHxBSw2uvfgzxhh/6f/43vB354Ps/zJA8U7yH3JOnRC4D3nUGx6k4Y4wtneBSHJGz9Vc629WU560d+rcajJx3L05sBnkiEazV9Hr+avV9TmIrnQ9KsqrXo+6ztUij17hSfc/3lVMHsxaP6po472LXdWat5XA4zNerdlDPK/n1uMXYs/cp9T7XomL5L+dICAnEaZciTNpBskI/9Dx8+VXG+3tefvFlPvWPfo5P/uzP8k9/5x9Apms2Fx3f8PGP8okf/3uM00TbdIQxoDrGRMoBK6rdcsaSRYpO59SNzpnZ0n8YejabC7rOk0J6oij6dtcopUTIGSeuvGcuT85/Rw03LG3bqFFDSnSrbqbGxhCYhoH97p7dbseqa1ivVvO4gM1G7b/1egSOhwPeWrrNCu8tKU149gXfbgAAIABJREFUb+maluNxKBQ0h4glpYBzuo/HOJFTZAoTb7wxQLY0zYa2WWPMaYREJpTuFbSrNTGpLnl/vGeMIxfrLe1KizW+bbDGYRtLu7bc3T3m9de/hHOeFx88ZNV2+tywauDgrFKufCOQhWmMhCngy/DpysrQ+0o7IdYZVk3HanuBd22hcpc9aRrUEGKaMCSlieUnZRM5qzlHjRFSVrKm8w5vDRbIYWIaBp275gw2W2JS+l8Y3WzyM/QDYUozVR+nyblvHAlPiD2ZQEzqIOpadWQ0RhPjmCpFTue46QiP6i+gRajt9gKgaCOVqlgku4yjmr1UGq92stxcKNzv94gIXdsi5XrGGE/SA6NmZGIdTgzD0DOMPeTS6UuqWROyFiNjZkoTKU9qjBUjTXGarNo6vW8GQkzYQqnNWSnrJy2hJq9D3+saaNR0REpHEmuV0uycFj9mOr/ao/XjAKD6O9eSinX88XBgQIuMjW903E/SpC5zcqbWBEv39Jx19zZAluKySSoF/6kY9ITZkbnxnuocfJKmTIX9kmgbT9d6QvBMMeKw6jdhlKa/ulDasneGzarDm6xGOvt7Yo4MUyRgQRqcbxmGEZMzgsNIw2q9xvuGwxDYHwbuhlvsYVJTHiM0Vrher9k6uL95RLYovRzDEDKp6IZT0v18OB447Hf0h4MWDAxkEm3nsK523Tp80zL1gb4PmDRhGsfV6opXXniFKQXudzf0wxFnJiwHCDp/9jAZelnrDLsYaRvLpmt44cEl61XDxXaF91ta62lbr8eaM2E14b3jeHSQEl3XMq0mxmHg2PfECGNhbRyPxycSty+H5yNpY4669Y+UTsEKxeK+PEBiSnMQd95Fy/nJtqKpw49r1Xp2aCyBvgFSdSvUjkoN/p+mEuqhlc6bfkdN9Ao5jxpE1GPJnPRn9Xh0QGwNdM6SI1Rflc8+dA30K6Xg9FCtnaJzmqMpre8nz8lbkqAySuE8uZsDWTl14WpF67w7ULuR2p0pgXRxsSLXbtxJB6hdrlPwdd65PM0aOkuoOFX4zan9o//OmaFKXSaJrFPlOae5aodJc50SMGc7J3mGk+sZRcivFLuSmAQVtSqt0WCcduoab/FWH/RxGolTJttUKpsni3y17tffFWIgTpEkToWxMRKD2jGLEQ3AysrPWee7TVmIaJcxxqhj0FAqo7WqpdP0LJcEMIFEwCJo8D0GtaV13pbELRURLHMCm3MkJ+Gk+yw9jVyMM4whowmDDvUFjDrF3d/fceyPeMB4r8njNMyJizUGX+Z/pTgxTpk06XpvfCZntfyvCYFSlzXw1o7I/olu1PmYjNrZknJNKPeIJkBSNr2qiSoBPFYph8VQwTo3rxWdG6ddWV/dXQXapsFax263LzqMJ/epnKDOFzu/Z4wxnELnan2tna1KBdOqZvNEB1tE5geyM6qBTAIYpQhZ3+A7HRXRDzp0/YWrl2maLdsHV3zjt3wD/+Dv/zC/9Auf4/0PXsG3gdREHj26xWalh1G7MqBr3NSqrJk7enZOlMJM3xSROdF5guXA+d6a5yTv3Iyl/ql7SJoLBOfJW93TaiKkgWgsQ+LbmYpY19d8r9ffdbYv6KD20+iUp0cEDOOIdyf9bMqJadRu2jhNpRtbrNVz1fiVa1fMAOpzZu7kSnEZPq//lX2pugJrp3ygazv2fc/+sEMCvPqB1/jFX/wUr3/hi/zqr32Wj12/xmEY+S0f+6188EMf4rP/7y/jjFebeqd62KlXZzJjSyc910JQXUtKlbTWstlsGIpBizGuuPSKlpmeupZKq2wYxrGMNSi7gmhFvibKRpSFEMu1N9ayv9+RSLRdS9M2ZOeYxpFjGRTclEB/v98xxVGDf2cZxzDPgtzv9yCJ7XaNiCb1RgwxBO7u92w2F2w36sLonMVYGEehaR8wHPc8evNNYhRWzaYUdDLGCutuDUTu7h/T90euHjq61ZYQBi42W9arFjGZN9/8EiEkVt2Gtl2r8Q8J6xqurh6wXq9pvVfdvQht1+KtYehHbm4e0/d7mrbDGF/0TRvWqw22VPOHvifGif1+xzBqQWp3v6dt1UAmF3fSMKkpRQiDak8NzBKMupadI6Nfz87StmqtUXMGmDtUKWVMKRR57wsdWGlzq04t4H1bNHJDz6HvadqOzcUG32yw0mCMumMej31Jsjy+MWWGXqHAN5oM9MehFOBKt2x2P4am8WU+nMYBqkNuWK835XmQ53twHAaVTZQOsTUWVzSYXdPpMx6Km98eceU9+mjAWn2AVg18isLhsOf+/hasGnB0rddC7KwXK4mjdWrPj2EYdXarFe1UppgIxdDDWk3o+r4HSVhRt+imbSFnxvGoSZ0vLCGElsxhHOmHnt1+T+M8rW9KV07lQDEl+tiT+h5xTpOB9QY7jqoJG3tS0O66tWpAlDDYVJlcmWgiTgwxBXKIHMOeodCnG1/cNEsRapq065iyznSsa2XVdlxfXWOaRoud5bPvSzFGUqSxBmeFaQwcxl3RxgtZHFujY0GcabCxIw46A3calZ68WW1IxoFRrRlRmSbGCmvnWT98SCRiGkfCcugn+klpzyElVqsL/MNr1FBn4Pb+Vlk6YaTpPBfbNU23ZtVt8b7DmxZnG473dzz+4q8SpkicOlzb0jmDk1YpoCGTQyKNiTAmgtUYWF1YDTlGbh4/oj96+mPDMBy43G5puivarqVtOwRhvVlz2O9L0KqFiGEYOBz3jGNkSokpBPp+KB23QWOuL4PnJGl7smo70/TKQzSTSQJGNPiY31cokznrw9tIpSilUncAKJ0Va9RuvDzsZw1CNqX6oU5Zar93lvxkZuKjKXSpjAa1egx1M1VOufJtteuRcp4fSkYyJOWQi9VKgYiozXZSm+j5XMxflenv8WR4oA/ejApeqjbOQi4Up6TCeCnswZMxSqrNJ2pdRjtrVgNLecraOytNSeYZQNXlLZNRIXMSVF8hpmir1NlRTAn8imgzZd2IyPrv5jnA0epjpUfq4FU7C7Br5e1EgyxrI1cq7Fk3dO6c1mRa14TFFIdRZn2dPvD0oWZLF0+y/szZhqZ1eO+wtuqRUqHv1m6vQVK1V05MKRNi5jAc1SpbhBiSUhww82BctbIun4OSXUodaptIUsYfzNegXCkRlM6fZ3vgahEs5Vo6o7z9ENUtCSmjClIqe3gtLUQkB8g9NVwXAWeFbGDKELFESkeVhCQVrY/TRI6ZrtvSgAZHkskixCkwDkcaA+uuo206xqRDyK3VwHYaAlOvVdXWGWg8MceS7BjGKdAPB2JJUtuuoW1URJxzJidbBPCBULp5egtrlSSFfNLq5bK+it7NWO0S5zL7TMcVGAyCs2W4cDKINYRA6SY8qd+qhjihdGB8HX0w021BR9kUF68ENiltTnnrSsfUurjMCWjMOkBVvPLuidrRicYSRAferjpPjIk4DTS+4/LqRYLteN+Hvx4R4ad+6AeJceDqAy8yWaBPWll2gak/YMhYlMany06TfJCZmq40mxPFWDtGp66b7itnlv5Fqxk1AtMA6myGY+1IWmsJKaJDddVl0YjFu2JaX8Yy1IJQfxzp2hW1Vl8deo0TNptVqSFoV9JZi2u9DuMdetpuBVmtosnlmI0BPInEFM/10tCPIz5TZhTW7UOwDkxWjWpGcxcz73VnyToGY06aZAMYcWSx+u+LYQoDx/0O71vyOHG4f4NV+wB3ueHypSve+OLn+NSP/CRf97XfxIBh8+IFH/kd38xnPv0Zpv0Rb0A6XyrIgssWmyMxT7oHz8WxqqEs4xhEGPtAfxjIrV4n65zqr4ubKll10tbpbKNs1NTHeo8pHRM1dlBDohNySZwnVhtPP+zphw7vL1U/5z1Nt9J93zgO+x3DOLHdbjAZbt58E+u8Gnk4TbKGY0/bNnTdCmccjW3w2xVkTZqPvVbSEw5TnDpNVuv89WbDMI1M6UAaAzlZxtjR+K4UYVZM455+dwvhCDg650EsMU2kMNKPe/xmReuNuv6lQNduSrCduLu95e72MeuuZb1qCdNAJrBZdyiN1dNtthirmp43724pNxQ5CkY8Dy4fkNLE/WHP4dgTYsJbTxgG6rgISGSJqiFKGYk6LysD0QpTDljvVENkO73eooVBI5BCJBTTj6ZRi3/n7OyC2K7OBtWT2W4usN7jXMsqZdz+QD+MDMdI59c0riEEpb4a60oyHWl8w8V6gy2MItU1ee6nHWGMdN0KMky97gkxKV0xpMhuvy9FI42vmknI2c90/RwnUjjStQ3ONVpAsJYJmFKdD5lw1pJyZhh6XLL4bs1qtdUkMWsX+ubmsdIdDVhv2W6vqOM6JBlSgH6cOOZYioqTUj/DEWsavNMEve1arFOHYUOn+0gqTJiYiDmQMow5E0Y1BnFilREyTWAbXLvCekfXavH1mI7EcSJkwbbqEGkb7TaCUvhDHAlBzWdW6xU5NazSmmnstbtX4iLKE13ptXa2nTfUZoLqJ0MM7I8j5FF1Xs7TNg3tesVExHrLerPhYr1Wh8sLHUx9fXlJYxvAkELi9vaWx49u6AUutpc0q2suL96Hk0gY7uh3j5H9FxnvE9mvyf6CmFsmVkzjkRSPOr6jc2WviTRdQwqBu92eHHRwvBhPu251NqnzdESG/kiKE2HqyRZWXcPmsuPq8ooYE0M4ME5HDoc9/f6IXEJ33bLdVldRR16vmIZADg0ENK4XQ0xZtfXO03UXtNuWybVInHAlOkpZyGJIQeh7SHng5uZI5E1Wmwe88tJrXF9d0XQdTadFvTQ5cnLkHNnvHzGMR0KCMSZC1mdnf7wnjj1fDs9N0laTnxMH1xab99rJqp0hfQ30gZ7zkxzQ+qA/db/yHLTO6dcZZU6kuPOJtvL1WXT6N2vwnGEO3LTaWivrzInDKZgpv9famSuuD0fttJhsEVc1K5mUJ3Ke5s9YP4lSUU5akZxP3QGt4ktJOtUSf052q5YNZi1P/ayUY8y141e6XyIqOZ+r3Ul1PTU5gjqbLYKZyiVQYxhrmlJxy/M1mY8XHdxsnW5I6oQYy7GhkS0ZEVucq54cPv62FM1KKyxXlLNAcj6HUrqCqBHFKUErAfzs9GnmNdA6FQJrYhoxTg05QrFvri6VOZZ+SsrElAkp682X1BFqNj4RRypdFAyQtGqdU6YaCFDoeuSE96r9qa5tgA7inIsUZX2ljGtbWt/grJqV5Bi1s2mq66AOWz0F6KIJNRGTVVBsjWAkleQQMpEYE8FoAp1yZhoGxv09MUwl+HUksVivIvRE1vl0RWsTYyaME+I0QUKEZDJpCkxT6XSXB52xkSSJGIWQhSGMIBnn9XoO48A0qVGF1DYtpxmAtnY+Yirz3+Jc8ajakzrPzxotyBhrodgfR8DbMqJApFS0TXHlCqf1P4vgjepTJu26xKSag2IoSxSIhkJDzbgEjajVej/qkOTGazW5FhjEKPVE712HxAgpk8UiVqkWvlGnymnoiVPglZdfYLXeYjcXfO3Xfz2f/MRP88uf+iQvPHxIe7nWsQ2HEWJmiAfstMfm4oqaNMkmnXQ/M0NBTrpgkar9VfexaYpzl3BmOpT7zIgGVPUurfdsLTTVMRjWWS1aoQOCXRk8b532j0VKgkjVjmhnJ4slFRMCsSApa2IrhSI0ew5nrNHuctXezjRQRA0FSsKs11pm3aYxJ5dMLcyVJ4gxs8NmSpmm0eJVdb5LubAtCgVL0CS9TpaTrPrBYehJIdN5z353Q+Mv6WPgfV/zAT7xiV/l0z/7c7z5e77ACx/9ILmxfOybP86P/sD/zeHzv4FfNbofee2MSo762wWSRL0XpBp0neihktURdyrUK+vVdS1Vi1jRvazupdTPa4SxOAPmnLEz5VjKPVc6jVYgR0yOeC/c725V0zOV56P1iBMOxwPDFFit17S+oT/qTLKu68gx4JqG46B0scePblmtJ7bdlmEIRCYMlszEsT8wxonNWodbGzE463HF4OHQ37M/7gBhe3EBtHjpMGLYbq7Zrtfc33+BN17/AleXL9J2W6aoMz+dMVxfXZBSYHd/hx9GEkZNozqv5uEJmrbThKnxrLwjxYB1Lc0qc3u/Y3fosT6qEUOZiWhEaKxjs9pwtenIacQ0HuuPOv9vLJTCSTuMgYBrPaZpccbSGIOJysaZSEQjWnxG9UzOamHQl/1fi2WJmECSpfUe16zIRjgej+XejmozP47s91rsaJoVxioNzbmOHCbub450bZ5H7lhXXCHHQH+8wfuGy8sLmsYSpxFnPeuu45AGwhhUm1oKaRZhympAZZ26Tm63Wy7WG92HYlS3S2MJAkZS6YJHDseBpJWT0t3Se7Dxlq5p2G4v1KIfdTsNCSQb7u/uuL27BQO+dXhR/bkzLcbAMB7pj3uGIXA8RqxrsBaaTgtMOUamqJ3RYRxYrVfa6SyJq2qFdf8cp1ELuKWL2XmLd4IpMWhCh3WPaaI1GSfCdrUucYVAoalPCaZUzW7UBOjYa9HUF2dHnY3XFdMu1XVPIZd9UnciKDFPrro+jelCivRjKaqX502IiaZrubi8pt1uuNxuudpsdVB3B23ruFhvWLcdDocRx8OrK66vH3Jzt+PRzS03r7+JbYQH2w0P1isuu2vCcU/fH5RmKLcE45GmwzcGF7T7P449OfSklOjF4IwWUnLWRokRGKaR3WFPjIm2afDWYmmKrELZA3EU7e6Xa73abrnabFBlgGM4HLiZJozTeH91fY0fEmEfmPqgGruNZ7XtIE2MuwPjfg92RLqB63XL1brBS2aMiZvdwP1ux4Sw3mxoG08yE8fpEeMg3N3ecXFl2F7oQPKufYizW0QyzSrRHx1TyozJIk2LCAyHW4b9HV8Oz0nS9s62l+c0utPXp6DnLb+pdInO8rjZRa7qXp5o3cCc4NSkUUoAJyIkk3QSfPn3qbSQM27z+TGeG4TU96dCTzqnRhLL0ERJJTCKc6BeP0eMJ3eZJ6mQxZIYR86avFVRqj6wywiBKlqt56kmKVmrCmRDFfvnSjKv59mcO8jVLpa+L4YRDQ4yYCCHs/fqZ2E2JgERNbVQJ7+ygcgpaapJ2ynpPI1FOE/a5mtTrGhzPqX055qi6iwpYmanSe+Vx69W3Kfkv35txdL4FmfVgn6aEoRILA9LdVLUwE0t5NU8J8ZMKGYiqVRDM3r+rC1JfaGDZtHIvh5DfsKgxuo64PSAFBEdtlmTtVIZN8nAmeNeTrXvW5J0OTuvJYG3tWOKGgZUzYdImge+CwYvCXCEnHRwbK8dOaW6tEzZMEYYBnXZG3NEvKOxOmDVi4ZYqdx3U5p0RlA2GPGYMjB3Xt9pYgix6DIy3jez3iIVEZO1hhTikx3npB0u1blMs66pFhlqt7HOeIwxqhlNSfRC6cDklDTpzpqEW+f1AZuUeiKlu6GJYpq1bdY57b5k1S7qydekoNL2Zv1cuY8715RCRLHUL90QjKEsqVPV1Bis93TdCu8axv7A8dDjmxUXF1dk4JWXXySOB37kh3+InOD973ufjmRIwv6wZww6LN5x0mEh1aDldE8/vb+ealq1QKVullUDZ23RWJRiA6XDVM//+fy8uo87qzbiOjS4dOzLfpxSLBV2Q85nJjYxIVap0NU1N6WERDXNyWcawdMcOSl//7Qf1EJQjFFnFRZKfNXm1r9f9+/6Z6Wnn+8/FbPxUh1PUPdn0tnIjBNtfxwGMrDebjjc32hHHuH9L7/CzxvLb/zaZ/nsZ36N93/0wwzDyCuvfoCv/9g38MnXvwBWu7HGmUKtjhhJJMmEYlxD1vOazvZNUj7TI6rOLVK6hka04FMMTax1SBG1JoBiJmSNnfdycp7nvNViRiqd2cZ51I8l07QNRlzRcWbeGHTkR3VUq7O5qm29aolWOGfZHQ48fvQIuRS6tiOj+irbrTUoPoTS5fYza7M6MYY0sT/syVnwTUsKqiMd+iNGMo0XfNMhtsE1aqogYcI3hpgGxAiPHj0GDE50Vl9MgSmGUp9QPfD9/Z5+v+Nqs6VrdHD7NAaO/YBYy9p7Nuv13J3OKRNHTZzv7u4Yhh33/ZEshtV6Q7P1mJggBvrxyP1xxzGMhW7uMNbTuGrBP5ElMc+JLaZOjVfqoBHhUNZrdSNU44+Otm3ZbrccD3uOxwPOOQ2AvUewGNuULqGaTvlVhzfaiXGNL0WXk77ueDwSY+Bw3EH2CIkBnYu4WjVMU2AcK52yoWksJkJEtX51DxmngEE1XN55nLFY8VjrVYt1PChjxBRGiVRXRDVc0SacugIO44AkQbIhJ9gfDsSUVTvWtsoCypBjIA4T+35PjBlnO9abFSKqg2pag7hIDrk4QBuGoFrDY68JhhWLFYO3yswxkvAIGEcd+uyJ2KyshmRagqgp1zD0SHneNb7FGNUs56w63jrEWkh4JzjXlgJ8nvXA1pTCkAhN07LyDc4rOyuEk5GXM8p4qDFPymq6hbNItDqTLQd2+zt24ci1iaxah7VbttsN68tO6aOiRewYdEYbCA8fXHP94CEvvXTgzds3+I3XP8ejmy9x/9jzcHvN1foFHmw/ACYzhp5dv2M33BOnhMfqYPDiM2AEnBMaZ5myYYpjje5oGkfXrTjsjxyPe6ZJu4hd09KUOZwpB8Q25BgYdntC7DXmzw6kwbm2uKImQg5k79i2GzCR47jj0RtvEN8IrLcdDx9ccbnZMMQDt/sDU3/g7o2BVgIvPrjkxZdf4YUXHtBuIocyBiZOI8kmxCX64y1hvOfm5kjbBlbtmnX7Ek1zpQYpL214cL1misKQDGMWpinQWkvu1nw5PBdJW9VQAU88ZN8uOZr7K3L6u/XvnX9//vW52Pr85xUpZZKc6G/mLBFQfd3JnezppOxpbdb593pjQTbghNmRMJckLpO0UltsuE/dOtCbM85GFrOGon6mpF0sPUZNjEQ0SJ8TklKlyOhnyHM0URwnOD9uUwKnOmVeHwZxrmjXAMjhTVfOldHuAKY4y+lxU47DW0umJibl2FNJbEWrtnWeEWV4JzCf33o+awB4OrdSHtb5qc/wdPJcE8PzbugpsS5Xf07+YoBpnEg5aKAdJkI+uXaSRSujSRM0LQZURlU1TbGzFmaKaTb3SDVQBu0MGt0AKw1YRLSMWTJnY4x2XHIZd1HPtxGw6H8o/aN+9nocel5NWVMlucnMGptUHoB1PeQ6BwsNBkwW+sOB47HHFOG/L+5TxKrjzASKDTLoCARjMClhko60TFNiKK5/625L022QpMNwjYEpHpGp0GxxxaFPAyRrDMbraIQUIiHlMpdKadD68NGA8bzAo7TnPAdZJwMMnZFULkMZ1eD1XGX0XKfEzFoV7UxVY44QIjlR7LY1qdRxCmVd1XOfMxT6nMtCHCZ1jvMOcaY4XJ0ZeGQVyMfSss9na9d5T9uonfE46KzGV199iXa1oltv+OhHv45PfuLH+Yc//VO8/OAh282WIQuHg5pomCSYyeKMIeRJg5uzveuJ83a2hp/ez87/PNfaVpzr2aqeMMbzwlo+szVW+l4MkWlSYbzeSxlXTGDsEzRtijuiJh4q9lfHUu2mqUGJVpX9W/bjylaYwoSxp+69Uv2qe6Qm1/XeqJ+l2jXXQb7VQKXqMWMxNpBKq9cG5rx/q6m3fv5pGrnf3XPRXWKtpz/2dNaw8o7X3v8BPv/6DZ/6qZ/ho9/029m8vMWsN/zO3/2t/NQP/SA3+3s2zRbnHbltCMOgn93qXmKMm7vw6cyBTHKaK/MhTLjc0rQrUohzESSlUOh/lD3G4mxmnMbZpEVpYOnU6c91XqfS/lftqujgDP2xR2Ti6voBzhqiJNabTSmWZKZRDTSqfq4fBjabDVdX1xqUj4Fpitzf3+nw7LbRjlLWZ0rXdWXu24Q1jv7YM8cDYtWlM8PQqyZOUl1/I7eHPZHI+vIhyXj6SYs+2nU3OsJkinStJWWDb1rdZ1QJStOqC+LxOBKM4Wrb4PwKMY525bkSLTxNYSIMJ1fUXF47DAOeiDOJxnti1sS59Q2ERCSzvbzErTt8f+AwDKSkZGrjO7rWIDGQp4GEdge7pmPlHRfbNW3riCmzOqw4HI86+Ns3T8y2XLUNm+2a/f09tzePyTGw6Tq6bo31Lca1HPvAFDNt22nSNKkRjt4nsei/HCItGU8MkWE4aKwDqv+yOoS7aS3OC6uVp+taQpjoQyJkTdJyTBz2B0BI0RCjdtiUsQPDsed4HPBtVxJuV2IKwVrRolFKxZ5/5HA84o2n9aqHbboV1ntsozPG1GVVnxvHode4UyziLNvNFUpsiRiX52fLYX9gmCbGEDDO0HadFhdNMeISwQk6jkWEVDrdcRwYhj2EQddoswa/BmPwpjAwDkd2cV/O1Upnj+Uy5qhsY7VwZKTKQcrrxWhIDboM1nVlGLhqvHPZ//oxlMJLkQ4YSKLjD7xRGzPrDMZbkheOhzu+NA0QasH1UtdxKp37cZq1siGOxEyhhzc8XH0tr+8/z5s3X+DNmy/SrSzXV5es2w2r9pJ195B18wpTf8fu7os8fvwmMSUur6+4fvCAtnFYY4h9hDSRkmC5II1bHbuUQUjqxBqVDj32O2KciCnh7QWr1UY7kOL1ug1o3Np0xaxtgAzblePh5Qp3sWbXGhoGHt3dsXt8Q39/x3j1gIeX13zNB18iOosJPYeb19nvdoQQ2Dx8ifXlQx688CIhJaahZ4gH7vY7bm5eJ40DXTdxdeVxeWI3CinteeOR8OZdx8svvoRvt5hmi/ct1rZk58lTx5fDc5K0nRbm+X/nwXt5p/5XEg7hPKCvFdJqyPEUbbIEYue2+k//3rlKn2tQrq3v+iDXGORk/12TwaddEM9/ppKiXKhiZw90vaPqJ5oD+Prvx5CKlXmeK3z6GaxuVlVTp20URGw5llOXhawV6Wo+oFWcNAeizJVpKfbOp+S2JkM1kDl/PZeLpt05nf1UdRG2dPxo8vnDAAAL+0lEQVQqIlHTx5IMmkIFeEvSVlr659f/6QR7/l5q0sksqodqZKL0pDnwNWVjz2XIclRaXP035wA6T5DHshoiOjctkCk0Meq1L0FkVroMWQ0LVENDCd5cSb2K0UfpytUOXE3qQOb/G4PSDPP80eaEgJLYn6PqvpRGrO2QqskUU2iXujRmK3/JosPlRQNtKdlszrowNblLjFELDpQH9DQNpEmZFBHLlAxE1XVNITAlfSh4EVwREosRJoqTXJoYZERSo7lmEqzNasmc9UEQU+kiijBOI1G0O0NW6odSdk86xywUSsg41x9O7qFadlEthy/3S1nv84iH032s92+edVlSr4PRfUSTjYQxbtYL5ax6wXq9Kvu1FlakaE6VyZdxjSbzMQbMfDj6mVPSDu3sPFMKNK7QNqdpZOh1QPPF5RXGOV565WXGcc/P/OSPMB7vefjK1+g6CYnDrsda1T6RDTGoM5g4MxcIZg3x3N6jVCXRbiOnrlMt5NRu1XlyU69PNuquhmhXN5FLh1sT71yNc5Jo1Tlrl6xtmjI4vOpYy+Bgpw6tImVWW9kzz11UczZFQ2tK51OprlmrWLNDbIpK+7VPzNIrWlDniPF8TMmTe15NNs3ZeqnmOFU/DZDOKPdqkFCLRfUZFTn2R1ZxTdO29OPEZmtIU+CVF1/ms7/6Op/5xV/i8Rc+z/s/9NvZhcBHfttHeP8HX+OLv/4Zhmmi61Z0XcfYD0xj0E59GVUyFxmBOl9QZ0BqQDYcjgy3d2y2ar6UkzpQ1o4iWdSKu+xv1jrGqYeknQYqeyHrc3ceDaB3yDzGxVghhsA49ByCnnffNGy3W/qDBju2rIsxBMZpQo49TTfMA5Bb15b1knHe6n0GSNKhthOlc1BNnyYtYLrG0rYrDv3A0E90nY5lcYXmHlPPFDLX1y8g4jXhJjGFgZgD/TDRdWuc7xjGwDAc58p8IunolElnCLZNi3UNKVtyEpxruLxoGMPIo5tHOifTT2rAUwprXbemMQFvExvniFWPHjPDOLK/34GBMQfGFHBdh8HgTEO32tA0DVtnuZLMbr9jOPZMKsBlPN7pDCnrMdbjy4iE3f2e29t7EOhWK66vL9V8o2vYbjeMx6Oeg/HIbn+g6Tb4djNXmFMSQtCiA6IuhM6Z4sw4YazBesM0auE5xsSx7xGxeNcUk6hMCMLQq8HUlA3GtYXWr/u16vUhBiGWe12MKaYlwqpdY9tWu/45q406JWFDLeurU6dB2QC28zSdFtekfJ7joPrS4bjHOuHy6ooshnESbu/vydliTaZNhpgmxsOBGCJiVUOocVyRehhl8jTO0lhHioMmUkGNv8J4JI4HchgRAZcF7xqsaeY1LmKwpYAUQiCLlIRXO6fOZojqOKhGSo6ua4gx4rzHezd3//bHA8jIeqM6TJ3zls/2LmX/OGcRr8YrrXesmoa2a3HrFrfuaNdrSJlxP/D4jS/x6EtfRIUm0LarQq2disGW0LZqwIadIB956eUtD1/6OP2Y2Pcjb9zeIWnkcnXDqy9mXnnxik37gKnfcnN7x+PbG45jz93NY66vrsjO4QTwOoB+GHbkDM3VBZeXHe0gxDgxxa40LkqRMBV99DRwN474zuJLIjxNwjAGpmkkZ5VmxJsd+XjPZbOmEXjlwQWr1vN417HbH/nS629w++ZjLh6+SG48Po/YcMCkkXgM3H6uZ7M78ODF97FabbTIZDzr1QoTGyYDOQ2E445RhM32ksurC8Q5xAdu7m5I+YA0e4zfYG3Dpm3p3IovB3m7ztO7DRH5ErAH3njWx7LgucOLLOtiwdtjWRsL3gnL2ljwTljWxoK3w7IuFrwT3u218bU555fe7gfPRdIGICI/kXP+1md9HAueLyzrYsE7YVkbC94Jy9pY8E5Y1saCt8OyLha8E56ntWH+/9+yYMGCBQsWLFiwYMGCBQueFZakbcGCBQsWLFiwYMGCBQueYzxPSdt/+6wPYMFziWVdLHgnLGtjwTthWRsL3gnL2ljwdljWxYJ3wnOzNp4bTduCBQsWLFiwYMGCBQsWLHgrnqdO24IFCxYsWLBgwYIFCxYseApL0rZgwYIFCxYsWLBgwYIFzzGeedImIt8uIr8gIp8WkT/9rI9nwbsLEfkaEfm7IvIpEfmHIvKnyusPReT7ReSXyp8PyusiIv9lWS+fEJFvebafYMFvJkTEishPi8j/Wr7/sIj8WLn+f0tEmvJ6W77/dPn5h57lcS/4zYWIXIvI94rIPxKRnxeRf2rZMxYAiMi/W54lPycif0NEumXfeG9CRP6yiLwuIj939tpXvE+IyHeX9/+SiHz3s/gsC/7x4h3Wxn9SnimfEJH/SUSuz372PWVt/IKI/KGz19/VHOaZJm0iYoH/GvjngI8D/7KIfPxZHtOCdx0B+Pdyzh8Hvg34t8oa+NPAD+ScPwL8QPkedK18pPz3bwB/4d0/5AXvIv4U8PNn3//HwJ/POf8W4DHwJ8vrfxJ4XF7/8+V9C7568V8A/3vO+WPA70DXyLJnvMchIq8C/zbwrTnnbwQs8F0s+8Z7FX8V+PanXvuK9gkReQj8WeB3A78L+LM10VvwTzT+Km9dG98PfGPO+ZuAXwS+B6DEpN8F/Lbyd/6bUlB+13OYZ91p+13Ap3POv5xzHoG/CXznMz6mBe8ics6fzzn/VPn6Hg2+XkXXwV8rb/trwL9Yvv5O4L/Lih8FrkXk/e/yYS94FyAirwH/AvAXy/cC/D7ge8tbnl4Xdb18L/D7y/sXfJVBRK6Afxb4SwA55zHnfMOyZyxQOGAlIg5YA59n2Tfek8g5/z3g0VMvf6X7xB8Cvj/n/Cjn/BgN7J8O9hf8E4a3Wxs55/8z5xzKtz8KvFa+/k7gb+ach5zzrwCfRvOXdz2HedZJ26vAr599/9ny2oL3IAo15ZuBHwNeyTl/vvzoC8Ar5etlzbx38J8D/wGQyvcvADdnm+r5tZ/XRfn5bXn/gq8+fBj4EvBXCnX2L4rIhmXPeM8j5/w54D8Ffg1N1m6Bn2TZNxac8JXuE8v+8d7EnwD+Tvn6uVkbzzppW7AAABHZAv8j8O/knO/Of5Z1LsUym+I9BBH5DuD1nPNPPutjWfDcwQHfAvyFnPM3A3tOFCdg2TPeqyi0te9EE/sPABuWrsiCd8CyTyx4O4jIn0GlO3/9WR/L03jWSdvngK85+/618tqC9xBExKMJ21/POf/t8vIXK4Wp/Pl6eX1ZM+8N/B7gD4vIZ1DKwe9DdUzXhfYET177eV2Un18Bb76bB7zgXcNngc/mnH+sfP+9aBK37BkL/gDwKznnL+WcJ+Bvo3vJsm8sqPhK94ll/3gPQUT+NeA7gD+WT4Osn5u18ayTth8HPlKcnRpU6Pd9z/iYFryLKPqBvwT8fM75Pzv70fcB1aXpu4H/5ez1f7U4PX0bcHtGdVjwVYKc8/fknF/LOX8I3Rf+r5zzHwP+LvBHytueXhd1vfyR8v6lgvpViJzzF4BfF5FvKC/9fuBTLHvGAqVFfpuIrMuzpa6NZd9YUPGV7hP/B/AHReRB6eT+wfLagq8yiMi3o5KMP5xzPpz96PuA7ypusx9GzWr+Ac8gh5FnvT+JyD+Palcs8Jdzzn/umR7QgncVIvLPAD8EfJKTduk/RHVt/wPwQeBXgT+ac35UHsT/FUp5OQB/POf8E+/6gS941yAivxf493PO3yEiX4d23h4CPw38KznnQUQ64L9HNZGPgO/KOf/yszrmBb+5EJHfiRrUNMAvA38cLUIue8Z7HCLyHwH/Ekpv+mngX0d1Jsu+8R6DiPwN4PcCLwJfRF0g/2e+wn1CRP4EGpcA/Lmc8195Nz/Hgn/8eIe18T1Ay6nb/qM553+zvP/PoDq3gMp4/k55/V3NYZ550rZgwYIFCxYsWLBgwYIFC94Zz5oeuWDBggULFixYsGDBggULvgyWpG3BggULFixYsGDBggULnmMsSduCBQsWLFiwYMGCBQsWPMdYkrYFCxYsWLBgwYIFCxYseI6xJG0LFixYsGDBggULFixY8BxjSdoWLFiwYMGCBQsWLFiw4DnGkrQtWLBgwYIFCxYsWLBgwXOM/w8j4AfhhpUfPgAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light", + "tags": [] + }, + "output_type": "display_data" + } + ], + "source": [ + "# Let's take a look at the dataset image\n", + "import mmcv\n", + "import matplotlib.pyplot as plt\n", + "\n", + "img = mmcv.imread('kitti_tiny/training/image_2/000073.jpeg')\n", + "plt.figure(figsize=(15, 10))\n", + "plt.imshow(mmcv.bgr2rgb(img))\n", + "plt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "PMZvtSIl71qi" + }, + "source": [ + "After downloading the data, we need to implement a function to convert the kitti annotation format into the middle format. In this tutorial we choose to convert them in **`load_annotations`** function in a newly implemented **`KittiTinyDataset`**.\n", + "\n", + "Let's take a loot at the annotation txt file.\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "n7rwalnPd6e1", + "outputId": "539d4183-cae3-4485-f894-772c334b613f" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Pedestrian 0.00 0 -0.20 712.40 143.00 810.73 307.92 1.89 0.48 1.20 1.84 1.47 8.41 0.01\n" + ] + } + ], + "source": [ + "# Check the label of a single image\n", + "!cat kitti_tiny/training/label_2/000000.txt" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "QA1pFg-FeO3l" + }, + "source": [ + "According to the KITTI's documentation, the first column indicates the class of the object, and the 5th to 8th columns indicates the bboxes. We need to read annotations of each image and convert them into middle format MMDetection accept is as below:\n", + "\n", + "```python\n", + "[\n", + " {\n", + " 'filename': 'a.jpg',\n", + " 'width': 1280,\n", + " 'height': 720,\n", + " 'ann': {\n", + " 'bboxes': (n, 4),\n", + " 'labels': (n, ),\n", + " 'bboxes_ignore': (k, 4), (optional field)\n", + " 'labels_ignore': (k, 4) (optional field)\n", + " }\n", + " },\n", + " ...\n", + "]\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "id": "GdSaB2ad0EdX" + }, + "outputs": [], + "source": [ + "import copy\n", + "import os.path as osp\n", + "\n", + "import mmcv\n", + "import numpy as np\n", + "\n", + "from mmdet.datasets.builder import DATASETS\n", + "from mmdet.datasets.custom import CustomDataset\n", + "\n", + "@DATASETS.register_module()\n", + "class KittiTinyDataset(CustomDataset):\n", + "\n", + " CLASSES = ('Car', 'Pedestrian', 'Cyclist')\n", + "\n", + " def load_annotations(self, ann_file):\n", + " cat2label = {k: i for i, k in enumerate(self.CLASSES)}\n", + " # load image list from file\n", + " image_list = mmcv.list_from_file(self.ann_file)\n", + " \n", + " data_infos = []\n", + " # convert annotations to middle format\n", + " for image_id in image_list:\n", + " filename = f'{self.img_prefix}/{image_id}.jpeg'\n", + " image = mmcv.imread(filename)\n", + " height, width = image.shape[:2]\n", + " \n", + " data_info = dict(filename=f'{image_id}.jpeg', width=width, height=height)\n", + " \n", + " # load annotations\n", + " label_prefix = self.img_prefix.replace('image_2', 'label_2')\n", + " lines = mmcv.list_from_file(osp.join(label_prefix, f'{image_id}.txt'))\n", + " \n", + " content = [line.strip().split(' ') for line in lines]\n", + " bbox_names = [x[0] for x in content]\n", + " bboxes = [[float(info) for info in x[4:8]] for x in content]\n", + " \n", + " gt_bboxes = []\n", + " gt_labels = []\n", + " gt_bboxes_ignore = []\n", + " gt_labels_ignore = []\n", + " \n", + " # filter 'DontCare'\n", + " for bbox_name, bbox in zip(bbox_names, bboxes):\n", + " if bbox_name in cat2label:\n", + " gt_labels.append(cat2label[bbox_name])\n", + " gt_bboxes.append(bbox)\n", + " else:\n", + " gt_labels_ignore.append(-1)\n", + " gt_bboxes_ignore.append(bbox)\n", + "\n", + " data_anno = dict(\n", + " bboxes=np.array(gt_bboxes, dtype=np.float32).reshape(-1, 4),\n", + " labels=np.array(gt_labels, dtype=np.long),\n", + " bboxes_ignore=np.array(gt_bboxes_ignore,\n", + " dtype=np.float32).reshape(-1, 4),\n", + " labels_ignore=np.array(gt_labels_ignore, dtype=np.long))\n", + "\n", + " data_info.update(ann=data_anno)\n", + " data_infos.append(data_info)\n", + "\n", + " return data_infos" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "PwqJOpBe-bMj" + }, + "source": [ + "### Modify the config\n", + "\n", + "In the next step, we need to modify the config for the training.\n", + "To accelerate the process, we finetune a detector using a pre-trained detector." + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "id": "hamZrlnH-YDD" + }, + "outputs": [], + "source": [ + "from mmcv import Config\n", + "cfg = Config.fromfile('./configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py')" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "HntziLGq-92Z" + }, + "source": [ + "Given a config that trains a Faster R-CNN on COCO dataset, we need to modify some values to use it for training Faster R-CNN on KITTI dataset." + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "pUbwD8uV0PR8", + "outputId": "43e76fd7-c74b-4ac8-c8b5-4d2cc94e610b" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Config:\n", + "model = dict(\n", + " type='FasterRCNN',\n", + " pretrained='open-mmlab://detectron2/resnet50_caffe',\n", + " backbone=dict(\n", + " type='ResNet',\n", + " depth=50,\n", + " num_stages=4,\n", + " out_indices=(0, 1, 2, 3),\n", + " frozen_stages=1,\n", + " norm_cfg=dict(type='BN', requires_grad=False),\n", + " norm_eval=True,\n", + " style='caffe'),\n", + " neck=dict(\n", + " type='FPN',\n", + " in_channels=[256, 512, 1024, 2048],\n", + " out_channels=256,\n", + " num_outs=5),\n", + " rpn_head=dict(\n", + " type='RPNHead',\n", + " in_channels=256,\n", + " feat_channels=256,\n", + " anchor_generator=dict(\n", + " type='AnchorGenerator',\n", + " scales=[8],\n", + " ratios=[0.5, 1.0, 2.0],\n", + " strides=[4, 8, 16, 32, 64]),\n", + " bbox_coder=dict(\n", + " type='DeltaXYWHBBoxCoder',\n", + " target_means=[0.0, 0.0, 0.0, 0.0],\n", + " target_stds=[1.0, 1.0, 1.0, 1.0]),\n", + " loss_cls=dict(\n", + " type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),\n", + " loss_bbox=dict(type='L1Loss', loss_weight=1.0)),\n", + " roi_head=dict(\n", + " type='StandardRoIHead',\n", + " bbox_roi_extractor=dict(\n", + " type='SingleRoIExtractor',\n", + " roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0),\n", + " out_channels=256,\n", + " featmap_strides=[4, 8, 16, 32]),\n", + " bbox_head=dict(\n", + " type='Shared2FCBBoxHead',\n", + " in_channels=256,\n", + " fc_out_channels=1024,\n", + " roi_feat_size=7,\n", + " num_classes=3,\n", + " bbox_coder=dict(\n", + " type='DeltaXYWHBBoxCoder',\n", + " target_means=[0.0, 0.0, 0.0, 0.0],\n", + " target_stds=[0.1, 0.1, 0.2, 0.2]),\n", + " reg_class_agnostic=False,\n", + " loss_cls=dict(\n", + " type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),\n", + " loss_bbox=dict(type='L1Loss', loss_weight=1.0))),\n", + " train_cfg=dict(\n", + " rpn=dict(\n", + " assigner=dict(\n", + " type='MaxIoUAssigner',\n", + " pos_iou_thr=0.7,\n", + " neg_iou_thr=0.3,\n", + " min_pos_iou=0.3,\n", + " match_low_quality=True,\n", + " ignore_iof_thr=-1),\n", + " sampler=dict(\n", + " type='RandomSampler',\n", + " num=256,\n", + " pos_fraction=0.5,\n", + " neg_pos_ub=-1,\n", + " add_gt_as_proposals=False),\n", + " allowed_border=-1,\n", + " pos_weight=-1,\n", + " debug=False),\n", + " rpn_proposal=dict(\n", + " nms_across_levels=False,\n", + " nms_pre=2000,\n", + " nms_post=1000,\n", + " max_num=1000,\n", + " nms_thr=0.7,\n", + " min_bbox_size=0),\n", + " rcnn=dict(\n", + " assigner=dict(\n", + " type='MaxIoUAssigner',\n", + " pos_iou_thr=0.5,\n", + " neg_iou_thr=0.5,\n", + " min_pos_iou=0.5,\n", + " match_low_quality=False,\n", + " ignore_iof_thr=-1),\n", + " sampler=dict(\n", + " type='RandomSampler',\n", + " num=512,\n", + " pos_fraction=0.25,\n", + " neg_pos_ub=-1,\n", + " add_gt_as_proposals=True),\n", + " pos_weight=-1,\n", + " debug=False)),\n", + " test_cfg=dict(\n", + " rpn=dict(\n", + " nms_across_levels=False,\n", + " nms_pre=1000,\n", + " nms_post=1000,\n", + " max_num=1000,\n", + " nms_thr=0.7,\n", + " min_bbox_size=0),\n", + " rcnn=dict(\n", + " score_thr=0.05,\n", + " nms=dict(type='nms', iou_threshold=0.5),\n", + " max_per_img=100)))\n", + "dataset_type = 'KittiTinyDataset'\n", + "data_root = 'kitti_tiny/'\n", + "img_norm_cfg = dict(\n", + " mean=[103.53, 116.28, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False)\n", + "train_pipeline = [\n", + " dict(type='LoadImageFromFile'),\n", + " dict(type='LoadAnnotations', with_bbox=True),\n", + " dict(\n", + " type='Resize',\n", + " img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736),\n", + " (1333, 768), (1333, 800)],\n", + " multiscale_mode='value',\n", + " keep_ratio=True),\n", + " dict(type='RandomFlip', flip_ratio=0.5),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[103.53, 116.28, 123.675],\n", + " std=[1.0, 1.0, 1.0],\n", + " to_rgb=False),\n", + " dict(type='Pad', size_divisor=32),\n", + " dict(type='DefaultFormatBundle'),\n", + " dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels'])\n", + "]\n", + "test_pipeline = [\n", + " dict(type='LoadImageFromFile'),\n", + " dict(\n", + " type='MultiScaleFlipAug',\n", + " img_scale=(1333, 800),\n", + " flip=False,\n", + " transforms=[\n", + " dict(type='Resize', keep_ratio=True),\n", + " dict(type='RandomFlip'),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[103.53, 116.28, 123.675],\n", + " std=[1.0, 1.0, 1.0],\n", + " to_rgb=False),\n", + " dict(type='Pad', size_divisor=32),\n", + " dict(type='ImageToTensor', keys=['img']),\n", + " dict(type='Collect', keys=['img'])\n", + " ])\n", + "]\n", + "data = dict(\n", + " samples_per_gpu=2,\n", + " workers_per_gpu=2,\n", + " train=dict(\n", + " type='KittiTinyDataset',\n", + " ann_file='train.txt',\n", + " img_prefix='training/image_2',\n", + " pipeline=[\n", + " dict(type='LoadImageFromFile'),\n", + " dict(type='LoadAnnotations', with_bbox=True),\n", + " dict(\n", + " type='Resize',\n", + " img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736),\n", + " (1333, 768), (1333, 800)],\n", + " multiscale_mode='value',\n", + " keep_ratio=True),\n", + " dict(type='RandomFlip', flip_ratio=0.5),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[103.53, 116.28, 123.675],\n", + " std=[1.0, 1.0, 1.0],\n", + " to_rgb=False),\n", + " dict(type='Pad', size_divisor=32),\n", + " dict(type='DefaultFormatBundle'),\n", + " dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels'])\n", + " ],\n", + " data_root='kitti_tiny/'),\n", + " val=dict(\n", + " type='KittiTinyDataset',\n", + " ann_file='val.txt',\n", + " img_prefix='training/image_2',\n", + " pipeline=[\n", + " dict(type='LoadImageFromFile'),\n", + " dict(\n", + " type='MultiScaleFlipAug',\n", + " img_scale=(1333, 800),\n", + " flip=False,\n", + " transforms=[\n", + " dict(type='Resize', keep_ratio=True),\n", + " dict(type='RandomFlip'),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[103.53, 116.28, 123.675],\n", + " std=[1.0, 1.0, 1.0],\n", + " to_rgb=False),\n", + " dict(type='Pad', size_divisor=32),\n", + " dict(type='ImageToTensor', keys=['img']),\n", + " dict(type='Collect', keys=['img'])\n", + " ])\n", + " ],\n", + " data_root='kitti_tiny/'),\n", + " test=dict(\n", + " type='KittiTinyDataset',\n", + " ann_file='train.txt',\n", + " img_prefix='training/image_2',\n", + " pipeline=[\n", + " dict(type='LoadImageFromFile'),\n", + " dict(\n", + " type='MultiScaleFlipAug',\n", + " img_scale=(1333, 800),\n", + " flip=False,\n", + " transforms=[\n", + " dict(type='Resize', keep_ratio=True),\n", + " dict(type='RandomFlip'),\n", + " dict(\n", + " type='Normalize',\n", + " mean=[103.53, 116.28, 123.675],\n", + " std=[1.0, 1.0, 1.0],\n", + " to_rgb=False),\n", + " dict(type='Pad', size_divisor=32),\n", + " dict(type='ImageToTensor', keys=['img']),\n", + " dict(type='Collect', keys=['img'])\n", + " ])\n", + " ],\n", + " data_root='kitti_tiny/'))\n", + "evaluation = dict(interval=12, metric='mAP')\n", + "optimizer = dict(type='SGD', lr=0.0025, momentum=0.9, weight_decay=0.0001)\n", + "optimizer_config = dict(grad_clip=None)\n", + "lr_config = dict(\n", + " policy='step',\n", + " warmup=None,\n", + " warmup_iters=500,\n", + " warmup_ratio=0.001,\n", + " step=[8, 11])\n", + "runner = dict(type='EpochBasedRunner', max_epochs=12)\n", + "checkpoint_config = dict(interval=12)\n", + "log_config = dict(interval=10, hooks=[dict(type='TextLoggerHook')])\n", + "custom_hooks = [dict(type='NumClassCheckHook')]\n", + "dist_params = dict(backend='nccl')\n", + "log_level = 'INFO'\n", + "load_from = 'checkpoints/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth'\n", + "resume_from = None\n", + "workflow = [('train', 1)]\n", + "work_dir = './tutorial_exps'\n", + "seed = 0\n", + "gpu_ids = range(0, 1)\n", + "\n" + ] + } + ], + "source": [ + "from mmdet.apis import set_random_seed\n", + "\n", + "# Modify dataset type and path\n", + "cfg.dataset_type = 'KittiTinyDataset'\n", + "cfg.data_root = 'kitti_tiny/'\n", + "\n", + "cfg.data.test.type = 'KittiTinyDataset'\n", + "cfg.data.test.data_root = 'kitti_tiny/'\n", + "cfg.data.test.ann_file = 'train.txt'\n", + "cfg.data.test.img_prefix = 'training/image_2'\n", + "\n", + "cfg.data.train.type = 'KittiTinyDataset'\n", + "cfg.data.train.data_root = 'kitti_tiny/'\n", + "cfg.data.train.ann_file = 'train.txt'\n", + "cfg.data.train.img_prefix = 'training/image_2'\n", + "\n", + "cfg.data.val.type = 'KittiTinyDataset'\n", + "cfg.data.val.data_root = 'kitti_tiny/'\n", + "cfg.data.val.ann_file = 'val.txt'\n", + "cfg.data.val.img_prefix = 'training/image_2'\n", + "\n", + "# modify num classes of the model in box head\n", + "cfg.model.roi_head.bbox_head.num_classes = 3\n", + "# We can still use the pre-trained Mask RCNN model though we do not need to\n", + "# use the mask branch\n", + "cfg.load_from = 'checkpoints/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth'\n", + "\n", + "# Set up working dir to save files and logs.\n", + "cfg.work_dir = './tutorial_exps'\n", + "\n", + "# The original learning rate (LR) is set for 8-GPU training.\n", + "# We divide it by 8 since we only use one GPU.\n", + "cfg.optimizer.lr = 0.02 / 8\n", + "cfg.lr_config.warmup = None\n", + "cfg.log_config.interval = 10\n", + "\n", + "# Change the evaluation metric since we use customized dataset.\n", + "cfg.evaluation.metric = 'mAP'\n", + "# We can set the evaluation interval to reduce the evaluation times\n", + "cfg.evaluation.interval = 12\n", + "# We can set the checkpoint saving interval to reduce the storage cost\n", + "cfg.checkpoint_config.interval = 12\n", + "\n", + "# Set seed thus the results are more reproducible\n", + "cfg.seed = 0\n", + "set_random_seed(0, deterministic=False)\n", + "cfg.gpu_ids = range(1)\n", + "\n", + "\n", + "# We can initialize the logger for training and have a look\n", + "# at the final config used for training\n", + "print(f'Config:\\n{cfg.pretty_text}')\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "111W_oZV_3wa" + }, + "source": [ + "### Train a new detector\n", + "\n", + "Finally, lets initialize the dataset and detector, then train a new detector!" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000, + "referenced_widgets": [ + "c3018c8715924d2b83d817cc6c448a2d", + "aca1c388eeca4c87b5b6306302630303", + "b9b75e2d894e467289cb83070b8bb998", + "767c8f4fbc924027885851365ceb6292", + "1489fe29d91748cab449718d687f4ee1", + "bf1e5d0665a141ac9c2085062ba77801", + "171ea927699a474084c49f8874942ae8", + "7189ce8a6634410a9e633832e8151070" + ] + }, + "id": "7WBWHu010PN3", + "outputId": "fac18cba-16a3-491e-b3ae-1ab99d71c325" + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/content/mmdetection/mmdet/datasets/custom.py:155: UserWarning: CustomDataset does not support filtering empty gt images.\n", + " 'CustomDataset does not support filtering empty gt images.')\n", + "2021-02-20 03:04:44,198 - mmdet - INFO - load model from: open-mmlab://detectron2/resnet50_caffe\n", + "Downloading: \"https://download.openmmlab.com/pretrain/third_party/resnet50_msra-5891d200.pth\" to /root/.cache/torch/checkpoints/resnet50_msra-5891d200.pth\n" + ] + }, + { + "data": { + "application/vnd.jupyter.widget-view+json": { + "model_id": "c3018c8715924d2b83d817cc6c448a2d", + "version_major": 2, + "version_minor": 0 + }, + "text/plain": [ + "HBox(children=(FloatProgress(value=0.0, max=94284731.0), HTML(value='')))" + ] + }, + "metadata": { + "tags": [] + }, + "output_type": "display_data" + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-02-20 03:04:57,872 - mmdet - WARNING - The model and loaded state dict do not match exactly\n", + "\n", + "unexpected key in source state_dict: conv1.bias\n", + "\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-02-20 03:04:58,180 - mmdet - INFO - load checkpoint from checkpoints/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth\n", + "2021-02-20 03:04:58,313 - mmdet - WARNING - The model and loaded state dict do not match exactly\n", + "\n", + "size mismatch for roi_head.bbox_head.fc_cls.weight: copying a param with shape torch.Size([81, 1024]) from checkpoint, the shape in current model is torch.Size([4, 1024]).\n", + "size mismatch for roi_head.bbox_head.fc_cls.bias: copying a param with shape torch.Size([81]) from checkpoint, the shape in current model is torch.Size([4]).\n", + "size mismatch for roi_head.bbox_head.fc_reg.weight: copying a param with shape torch.Size([320, 1024]) from checkpoint, the shape in current model is torch.Size([12, 1024]).\n", + "size mismatch for roi_head.bbox_head.fc_reg.bias: copying a param with shape torch.Size([320]) from checkpoint, the shape in current model is torch.Size([12]).\n", + "unexpected key in source state_dict: roi_head.mask_head.convs.0.conv.weight, roi_head.mask_head.convs.0.conv.bias, roi_head.mask_head.convs.1.conv.weight, roi_head.mask_head.convs.1.conv.bias, roi_head.mask_head.convs.2.conv.weight, roi_head.mask_head.convs.2.conv.bias, roi_head.mask_head.convs.3.conv.weight, roi_head.mask_head.convs.3.conv.bias, roi_head.mask_head.upsample.weight, roi_head.mask_head.upsample.bias, roi_head.mask_head.conv_logits.weight, roi_head.mask_head.conv_logits.bias\n", + "\n", + "2021-02-20 03:04:58,316 - mmdet - INFO - Start running, host: root@f0e5be20007b, work_dir: /content/mmdetection/tutorial_exps\n", + "2021-02-20 03:04:58,317 - mmdet - INFO - workflow: [('train', 1)], max: 12 epochs\n", + "2021-02-20 03:05:03,791 - mmdet - INFO - Epoch [1][10/25]\tlr: 2.500e-03, eta: 0:02:34, time: 0.531, data_time: 0.222, memory: 2133, loss_rpn_cls: 0.0286, loss_rpn_bbox: 0.0177, loss_cls: 0.5962, acc: 80.5273, loss_bbox: 0.3859, loss: 1.0284\n", + "2021-02-20 03:05:06,998 - mmdet - INFO - Epoch [1][20/25]\tlr: 2.500e-03, eta: 0:01:59, time: 0.321, data_time: 0.021, memory: 2133, loss_rpn_cls: 0.0214, loss_rpn_bbox: 0.0122, loss_cls: 0.1736, acc: 94.0332, loss_bbox: 0.3017, loss: 0.5089\n", + "2021-02-20 03:05:13,968 - mmdet - INFO - Epoch [2][10/25]\tlr: 2.500e-03, eta: 0:01:44, time: 0.530, data_time: 0.221, memory: 2133, loss_rpn_cls: 0.0183, loss_rpn_bbox: 0.0148, loss_cls: 0.1515, acc: 94.8535, loss_bbox: 0.2882, loss: 0.4728\n", + "2021-02-20 03:05:17,195 - mmdet - INFO - Epoch [2][20/25]\tlr: 2.500e-03, eta: 0:01:36, time: 0.323, data_time: 0.021, memory: 2133, loss_rpn_cls: 0.0115, loss_rpn_bbox: 0.0129, loss_cls: 0.1297, acc: 95.3516, loss_bbox: 0.1971, loss: 0.3512\n", + "2021-02-20 03:05:24,202 - mmdet - INFO - Epoch [3][10/25]\tlr: 2.500e-03, eta: 0:01:29, time: 0.533, data_time: 0.221, memory: 2133, loss_rpn_cls: 0.0075, loss_rpn_bbox: 0.0107, loss_cls: 0.0982, acc: 96.3672, loss_bbox: 0.1558, loss: 0.2722\n", + "2021-02-20 03:05:27,479 - mmdet - INFO - Epoch [3][20/25]\tlr: 2.500e-03, eta: 0:01:24, time: 0.327, data_time: 0.021, memory: 2133, loss_rpn_cls: 0.0071, loss_rpn_bbox: 0.0145, loss_cls: 0.1456, acc: 94.5801, loss_bbox: 0.2525, loss: 0.4197\n", + "2021-02-20 03:05:34,565 - mmdet - INFO - Epoch [4][10/25]\tlr: 2.500e-03, eta: 0:01:18, time: 0.538, data_time: 0.222, memory: 2133, loss_rpn_cls: 0.0082, loss_rpn_bbox: 0.0143, loss_cls: 0.1099, acc: 95.8789, loss_bbox: 0.2154, loss: 0.3477\n", + "2021-02-20 03:05:37,889 - mmdet - INFO - Epoch [4][20/25]\tlr: 2.500e-03, eta: 0:01:14, time: 0.332, data_time: 0.021, memory: 2133, loss_rpn_cls: 0.0056, loss_rpn_bbox: 0.0124, loss_cls: 0.1216, acc: 95.4492, loss_bbox: 0.2074, loss: 0.3470\n", + "2021-02-20 03:05:45,023 - mmdet - INFO - Epoch [5][10/25]\tlr: 2.500e-03, eta: 0:01:08, time: 0.544, data_time: 0.221, memory: 2133, loss_rpn_cls: 0.0034, loss_rpn_bbox: 0.0104, loss_cls: 0.1065, acc: 95.8496, loss_bbox: 0.2072, loss: 0.3275\n", + "2021-02-20 03:05:48,367 - mmdet - INFO - Epoch [5][20/25]\tlr: 2.500e-03, eta: 0:01:04, time: 0.334, data_time: 0.021, memory: 2133, loss_rpn_cls: 0.0043, loss_rpn_bbox: 0.0109, loss_cls: 0.0918, acc: 96.7285, loss_bbox: 0.1882, loss: 0.2952\n", + "2021-02-20 03:05:55,575 - mmdet - INFO - Epoch [6][10/25]\tlr: 2.500e-03, eta: 0:00:59, time: 0.548, data_time: 0.222, memory: 2133, loss_rpn_cls: 0.0028, loss_rpn_bbox: 0.0085, loss_cls: 0.0843, acc: 97.1582, loss_bbox: 0.1765, loss: 0.2721\n", + "2021-02-20 03:05:58,963 - mmdet - INFO - Epoch [6][20/25]\tlr: 2.500e-03, eta: 0:00:55, time: 0.339, data_time: 0.022, memory: 2133, loss_rpn_cls: 0.0037, loss_rpn_bbox: 0.0105, loss_cls: 0.0833, acc: 96.8359, loss_bbox: 0.1700, loss: 0.2675\n", + "2021-02-20 03:06:06,144 - mmdet - INFO - Epoch [7][10/25]\tlr: 2.500e-03, eta: 0:00:50, time: 0.545, data_time: 0.221, memory: 2133, loss_rpn_cls: 0.0030, loss_rpn_bbox: 0.0095, loss_cls: 0.0806, acc: 96.9238, loss_bbox: 0.1642, loss: 0.2573\n", + "2021-02-20 03:06:09,550 - mmdet - INFO - Epoch [7][20/25]\tlr: 2.500e-03, eta: 0:00:46, time: 0.340, data_time: 0.022, memory: 2133, loss_rpn_cls: 0.0019, loss_rpn_bbox: 0.0115, loss_cls: 0.0867, acc: 96.6602, loss_bbox: 0.1727, loss: 0.2728\n", + "2021-02-20 03:06:16,846 - mmdet - INFO - Epoch [8][10/25]\tlr: 2.500e-03, eta: 0:00:41, time: 0.553, data_time: 0.223, memory: 2133, loss_rpn_cls: 0.0021, loss_rpn_bbox: 0.0087, loss_cls: 0.0701, acc: 96.9141, loss_bbox: 0.1364, loss: 0.2174\n", + "2021-02-20 03:06:20,318 - mmdet - INFO - Epoch [8][20/25]\tlr: 2.500e-03, eta: 0:00:37, time: 0.347, data_time: 0.022, memory: 2133, loss_rpn_cls: 0.0008, loss_rpn_bbox: 0.0083, loss_cls: 0.0689, acc: 97.3926, loss_bbox: 0.1634, loss: 0.2414\n", + "2021-02-20 03:06:27,654 - mmdet - INFO - Epoch [9][10/25]\tlr: 2.500e-04, eta: 0:00:32, time: 0.555, data_time: 0.221, memory: 2133, loss_rpn_cls: 0.0034, loss_rpn_bbox: 0.0080, loss_cls: 0.0632, acc: 97.5488, loss_bbox: 0.1285, loss: 0.2031\n", + "2021-02-20 03:06:31,136 - mmdet - INFO - Epoch [9][20/25]\tlr: 2.500e-04, eta: 0:00:28, time: 0.348, data_time: 0.022, memory: 2133, loss_rpn_cls: 0.0008, loss_rpn_bbox: 0.0065, loss_cls: 0.0539, acc: 97.9004, loss_bbox: 0.1013, loss: 0.1625\n", + "2021-02-20 03:06:38,476 - mmdet - INFO - Epoch [10][10/25]\tlr: 2.500e-04, eta: 0:00:23, time: 0.554, data_time: 0.221, memory: 2133, loss_rpn_cls: 0.0026, loss_rpn_bbox: 0.0082, loss_cls: 0.0621, acc: 97.6172, loss_bbox: 0.1304, loss: 0.2033\n", + "2021-02-20 03:06:41,997 - mmdet - INFO - Epoch [10][20/25]\tlr: 2.500e-04, eta: 0:00:19, time: 0.352, data_time: 0.022, memory: 2133, loss_rpn_cls: 0.0011, loss_rpn_bbox: 0.0059, loss_cls: 0.0596, acc: 97.8223, loss_bbox: 0.1199, loss: 0.1866\n", + "2021-02-20 03:06:49,368 - mmdet - INFO - Epoch [11][10/25]\tlr: 2.500e-04, eta: 0:00:14, time: 0.557, data_time: 0.221, memory: 2133, loss_rpn_cls: 0.0036, loss_rpn_bbox: 0.0064, loss_cls: 0.0631, acc: 97.5000, loss_bbox: 0.1242, loss: 0.1973\n", + "2021-02-20 03:06:52,881 - mmdet - INFO - Epoch [11][20/25]\tlr: 2.500e-04, eta: 0:00:10, time: 0.351, data_time: 0.022, memory: 2133, loss_rpn_cls: 0.0016, loss_rpn_bbox: 0.0072, loss_cls: 0.0570, acc: 97.9199, loss_bbox: 0.1263, loss: 0.1921\n", + "2021-02-20 03:07:00,207 - mmdet - INFO - Epoch [12][10/25]\tlr: 2.500e-05, eta: 0:00:05, time: 0.554, data_time: 0.222, memory: 2134, loss_rpn_cls: 0.0009, loss_rpn_bbox: 0.0063, loss_cls: 0.0606, acc: 97.6953, loss_bbox: 0.1232, loss: 0.1910\n", + "2021-02-20 03:07:03,655 - mmdet - INFO - Epoch [12][20/25]\tlr: 2.500e-05, eta: 0:00:01, time: 0.345, data_time: 0.022, memory: 2134, loss_rpn_cls: 0.0010, loss_rpn_bbox: 0.0056, loss_cls: 0.0486, acc: 98.1641, loss_bbox: 0.0882, loss: 0.1433\n", + "2021-02-20 03:07:05,260 - mmdet - INFO - Saving checkpoint at 12 epochs\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>] 25/25, 11.4 task/s, elapsed: 2s, ETA: 0s\n", + "---------------iou_thr: 0.5---------------\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "2021-02-20 03:07:08,400 - mmdet - INFO - \n", + "+------------+-----+------+--------+-------+\n", + "| class | gts | dets | recall | ap |\n", + "+------------+-----+------+--------+-------+\n", + "| Car | 62 | 131 | 0.968 | 0.879 |\n", + "| Pedestrian | 13 | 58 | 0.846 | 0.747 |\n", + "| Cyclist | 7 | 67 | 0.429 | 0.037 |\n", + "+------------+-----+------+--------+-------+\n", + "| mAP | | | | 0.555 |\n", + "+------------+-----+------+--------+-------+\n", + "2021-02-20 03:07:08,403 - mmdet - INFO - Epoch(val) [12][25]\tAP50: 0.5550, mAP: 0.5545\n" + ] + } + ], + "source": [ + "from mmdet.datasets import build_dataset\n", + "from mmdet.models import build_detector\n", + "from mmdet.apis import train_detector\n", + "\n", + "\n", + "# Build dataset\n", + "datasets = [build_dataset(cfg.data.train)]\n", + "\n", + "# Build the detector\n", + "model = build_detector(\n", + " cfg.model, train_cfg=cfg.get('train_cfg'), test_cfg=cfg.get('test_cfg'))\n", + "# Add an attribute for visualization convenience\n", + "model.CLASSES = datasets[0].CLASSES\n", + "\n", + "# Create work_dir\n", + "mmcv.mkdir_or_exist(osp.abspath(cfg.work_dir))\n", + "train_detector(model, datasets, cfg, distributed=False, validate=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "_vYQF5K2NqqI" + }, + "source": [ + "### Understand the log\n", + "From the log, we can have a basic understanding the training process and know how well the detector is trained.\n", + "\n", + "Firstly, the ResNet-50 backbone pre-trained on ImageNet is loaded, this is a common practice since training from scratch is more cost. The log shows that all the weights of the ResNet-50 backbone are loaded except the `conv1.bias`, which has been merged into `conv.weights`.\n", + "\n", + "Second, since the dataset we are using is small, we loaded a Mask R-CNN model and finetune it for detection. Because the detector we actually using is Faster R-CNN, the weights in mask branch, e.g. `roi_head.mask_head`, are `unexpected key in source state_dict` and not loaded.\n", + "The original Mask R-CNN is trained on COCO dataset which contains 80 classes but KITTI Tiny dataset only have 3 classes. Therefore, the last FC layer of the pre-trained Mask R-CNN for classification has different weight shape and is not used.\n", + "\n", + "Third, after training, the detector is evaluated by the default VOC-style evaluation. The results show that the detector achieves 54.1 mAP on the val dataset,\n", + " not bad!" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "MfQ-yspZLuuI" + }, + "source": [ + "## Test the trained detector\n", + "\n", + "After finetuning the detector, let's visualize the prediction results!" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 578 + }, + "id": "_MuZurfGLq0p", + "outputId": "b4a77811-d159-4213-d8cb-b73f5b5b6d1c" + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/content/mmdetection/mmdet/datasets/utils.py:66: UserWarning: \"ImageToTensor\" pipeline is replaced by \"DefaultFormatBundle\" for batch inference. It is recommended to manually replace it in the test data pipeline in your config file.\n", + " 'data pipeline in your config file.', UserWarning)\n", + "/content/mmdetection/mmdet/apis/inference.py:205: UserWarning: \"block\" will be deprecated in v2.9.0,Please use \"wait_time\"\n", + " warnings.warn('\"block\" will be deprecated in v2.9.0,'\n", + "/content/mmdetection/mmdet/apis/inference.py:207: UserWarning: \"fig_size\" are deprecated and takes no effect.\n", + " warnings.warn('\"fig_size\" are deprecated and takes no effect.')\n", + "/content/mmdetection/mmdet/core/visualization/image.py:75: UserWarning: \"font_scale\" will be deprecated in v2.9.0,Please use \"font_size\"\n", + " warnings.warn('\"font_scale\" will be deprecated in v2.9.0,'\n" + ] + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAABOgAAAGVCAYAAABEu85RAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+WH4yJAAAgAElEQVR4nOy9WYxl23nf9/vWWns4U51TQ3dXz913IC8HiSIlxxJFRYLiwIgcWwniwEicAA4gGM5rgCBInvKYZz8FSAwEiR8SI4OBOI4jJYpljRwlUry889BjdXVNZz57WEMe1q7q5hV5yQSi7mW4fkChu845dc7ea6+9hv/3/74jIQQSiUQikUgkEolEIpFIJBKJxEeD+qgPIJFIJBKJRCKRSCQSiUQikfhJJgl0iUQikUgkEolEIpFIJBKJxEdIEugSiUQikUgkEolEIpFIJBKJj5Ak0CUSiUQikUgkEolEIpFIJBIfIUmgSyQSiUQikUgkEolEIpFIJD5CkkCXSCQSiUQikUgkEolEIpFIfIQkgS6RSCQSiUTiJwAR+eci8hsf9XEkEolEIpFIJP4sSaBLJBKJRCKR+AlDRP6OiPzeR30ciUQikUgkEolIEugSiUQikUgkPkaIiPmojyGRSCQSiUQi8RdLEugSiUQikUgkPmJE5H0R+U9E5FvASkS+JCJ/ICJTEfmmiPzKc6/9OyLyrogsROQ9Efnb3eP/uYj8w+ded0dEwgcFPxH5FPBfAr8gIksRmf7FnGUikUgkEolE4vuRIrSJRCKRSCQSHw/+HeCvAR74FvDvA/8M+FeA/0lEXgHWwN8H/lII4Q0RuQrs/L/5kBDCayLy94DfCCF86c/zBBKJRCKRSCQS/99IDrpEIpFIJBKJjwd/P4TwAPj3gH8aQvinIQQfQvgt4GvAr3Wv88BnRaQXQjgIIbz6UR1wIpFIJBKJROLPhyTQJRKJRCKRSHw8eND9exv4t7v01mmXgvol4GoIYQX8LeDvAQci8r91zrpEIpFIJBKJxI8xSaBLJBKJRCKR+HgQun8fAP9dCGHy3M8ghPBfAIQQ/o8Qwr8KXAVeB/6r7u9WQP+599v/IT4rkUgkEolEIvExIAl0iUQikUgkEh8v/iHw10Xkr4qIFpFSRH5FRG6IyBUR+XURGQA1sCSmvAL8CfAvi8gtERkD/+mHfMYhcENE8h/pmSQSiUQikUgkfiiSQJdIJBKJRCLxMaKrQ/frwH8GHBEddf8xcd2mgP8IeAycAr8M/Ifd3/0W8D8Qv2Di68A/+ZCP+W3gVeCJiBz/SE4kkUgkEolEIvFDIyGkDIdEIpFIJBKJRCKRSCQSiUTioyI56BKJRCKRSCQSiUQikUgkEomPkCTQJRKJRCKRSCQSiUQikUgkEh8hSaBLJBKJRCKRSCQSiUQikUgkPkKSQJdIJBKJRCKRSCQSiUQikUh8hCSBLpFIJBKJRCKRSCQSiUQikfgIMR/25Oc//7lQFAWgWK/XLJdLqqrC2QacZ1DkFEYzGfbZmYwYFgUKj3iLKKGRQBCPiCACRiuUAqVUfCwEvPM4F/AuELxgraWpWyobmK49bYDWOpxvOf/GWW1ACRSZJtNCv8wZD/oMyx55ZiiznMIIxrUYJQQRvNJsrOPx8TGn0wVoQ28wQpRBRCAErLW4psY7Bx6MLnHOobWm1yvItcF7j/cepQXbtEBABLRIfB9C90P3Owg+Hrv3hOAgBAgBTyAAPoD3AR/AEeLTCAgYYzCFQSkFAg7L+ffuilI452iqitViTVM1aK3Z3tpmsDXAG0cQT/ABWwU2q5rlcs1mWdM0FhFha2vI9s6EopcTQosNDUqBzjMy08M6jxPBK2FTO6aLObZu0WIoVE613mBEsXNpj+HWgHVbczY7Zblec+v2TbbGY0QMh0+OODg4ZFPVeA+ti+eJB4LDO0fwNnZKiX1EZYa2bXEuxN+la39r8d4DoBQURcFw2KdXFmgtGGPIDRQqcOfGVQa9jNVsymY2pd6syHPF3qU99m9cQ5Sw2KxpG0uZlQiKzXLN2gYeTWsenJ1ytprjFnNe6o35qRfuMrm6w3hvQpYbVPBkVcA6y6NqxuPHh+hVy87kEr3LV5DeAEfABY9SCksghIAuc9q2pbIt3ntaZ1lvNmw2G46Ojnh0/xH2tGJnsMXO7oReL0cpS1FoxpMRW5MRWgvKaEzepyz6DEdbFP0hBEXdOkRptFG0bU2gZVPXrNYrpmdz3n77bTbLmkGvz6A/QEug3iyo1nOqzQJXtQyyLfJej2wnZ2tvQq8/YDGtWBw3lE3Or/38L/ELv/qXuT9c8NVX/5D1O+9TbDxlMUJ0RlWvmc9nrKsWU/RRZY/GgycgmaJpa0JwVOsNm+WG9XLDalahvGKytUVRBIbDku3JhK1BH5xnfnYK3nFlb5tJr0evMOTBkeEZFTlbg4JLe7tcvbFHdrNgWc/59uv3+e3f+xO+8a33GG7t0xvssLezx+1bV8E3rOdnrBYz6nVFXTfUtae2LZevXGI8GWNUhm9bcjQ9Yyi0oldkaCO0rqENjiYENi0sqpZHh6fcf/chvZDzsz//c/z8v/krrJpT6gfvM7GOkTEcHx/ytddeY7pYon1gf3ebT3zyLrev7dHOTjh49z7aj3BS0tsekuXQC5a+AMs1mVGsvKUOgg8Z6B6bKrAWw58ePOTebMasqvFesFWNrS2T8Ta71/ZZupp7Dx5QasNOWfCFuy+xPxyRi2dVLZnZNafTKWFWc/Olz7D1c7/Avdbz2mtvs3j0hHG15PKoJJuUjK5eYbB/k+PTJQdv3ac5OyX3GyaDjMFwRCmeu9sFu4WnCC2LuuXMl7wzbViYEfQGnJ0eszw9ZKgc17fHZAFuX91lUliGPc0aw+Gi4X//na/w1qNjpBxx89ZtPnH3FtpVUM25uz/hpRuX2O4bejqw2TTM1p75vGXSH7N3eZ/X3n+Xe08e4YJF2oareckr128wmWyRjwa8d/yUN95/j2rTsphuOD1b0YQMekNufOIVhpeuUAfP8clTVrNTru2OGfdz5qenVFVLyHu4fMDD0zmnixk6bDCuYdzf5tblfW5s9bh7bZvaLnnn4BF/+J23WbaGrf42O6NtVrMl9x8dsci2aHzgc9fGfOnTdxiNSl57922OZ1O2Ll/i7ic/S9Yf8affeZPXXn8bpQ0//VM/xdagx5tvfpv333uLF25c52/967/Op195hU2WkW/vUvQmhDogTUCUwZcGb0BZjwKmGlZ4tr2m7zUIoIn/JhIfxHvwgYCwmS+Zn54igBFF0zSUgz7jSzuoLAPv8a0F0UiWId1aDiUE362VNATpVk8h4BZTwmpKpgQcoAyYEvoDJM9BFB6wHpyPa4jMKHJjaNc104MTXGvZvTlGRp5WIFCgEDI8OIGQoSRDghBkibjH1CcPcdNj+rZifXLMk5ljle0w2OrRz9ZodUqwxxi/YTTaIhvsQjsEN6LxJYvTA4ws6PX7PHl4yPsPHhI0XLp5k8o61ieH7GiHNJ7hYIfR1g5NtUI2j3HVA2RYovev0U4GjO/coWSH937zm/xf/+R3OJvOyEaKK3cm/OwXP0d/MuI3f/f3eXCyhHILH4ShEm6ORuz2h7jacfTohNlZRa0UG5MxVWNm6hKV6XPrzg3+5t/4VW7vGV776tf59pe/xTuvv8VyM+fuy9f52//Bv8uNF14BswN+C9QQQlyz2gA684BD4YirVg3BgI/r1Yufj4gQwsWa/Xz/oJS6+PnwP4aLxfYH+eA5/Xmf4/na2ALKgani4YSSOCjHfYXIBpwF1QOXgROqxYrvvPZVvv2tr1MUJS+98jN85me/iOl5hAWKBvEav/HMns746p98g6/88Td4970HVIuWUd5ye1/x1//mv8Vnv/g3CLrf7eF+MieC827gP/h4CARnMUqjQjdGdS+2ixVv/Omr/IN/8N/y5ttvMZ/P0T1Df9znszfv8C9dvYtS8PvvfpuTdsOm3lAboellZKIZt4pMNLUWgon7ZaUUWmuyLCPLMoyJ2/eqqi72qeePBSBIwIuglKHISvpFH60yAh6lBMmhlZqiZ7j74h0+/zOf4+7NFyiCQXmFUgZE8ZPq43n+1pfveiDu3yGAPBskAuC8RbQmBHB1TX005eH79/n2G6/x9PQY1VOUox5bezvs7l/CGMOtO7fZG+8iIpigEA/4uLdVutMHyFm2LQdPjjk8POTxvbe5/+ZrvPr1P+Ls4CHToyc0TY0Pik0bWNUBC2xtTajbhkC3d9zZ5ubN27z00qe5duMFvvCFL3L9xl2uX7+Myrp+Lh6hRfAYcuTDZaKPLZ3MEq+dgMjzV8pzcR0DiBPCqub08RFH9x/x4O17vPPwHgebBetgaWpL3TZY659TYMAFiw0e59qodanvHi1CAyIakXgPBekmxG7uaV18r//lv/9vvufg+uEtLx6RToDSglIQgsM5R7AOZzQtsKkaFqs1wToyHdAiaKWocQQJcXBX4L3GGEWmFCIKbTSiPdhAlhEHjxBompaysZispXWOqmmoGkvbxsbAxyZYNRu898wETrOMXl5Q5Dm9Xo9+njHulZR5gSkztMoJOeiigawlBKG1HpG2E+gcKoBRghJNCAEXLC5YvLPkweCV7i6IBUsc5PBxkXneCZ6/rZXqBvHQCUqxLbRWsU07sSYghCA4DzZ4vI/iVd02529D0IogUdwAjyiJ+ycRMp1TFFFU1Eg8dufw3oGBwhQUA02ZlxRZztqsqes2DuoCzWaNUp6yl1NkQ8DjCXjncM7jEYISJMTFt0fAO4I4tBa0EoJrsU2L4pkAu1wuCSEgYliv19+9SMLjfHh2J32/LtiJuyEEfHBY6+LGALr3ihNXCIG2bbEWnHPgBaeE2arC+8B0tmF6PENhGUgPdzqjUQZTFtS2pa5bMl2Tm4LCFAy2hsjsCf1yQCg0TZ6hgmG22bC4/4gnh08Yb08Ybw2Y9MdIv4emxRuNBIv3EIKQGYNRQutdPAcbhWZ1cY2etck5QQnStaEXwboo4Cnv0ZnCdQvO2JDxeocQcN7Rthbp+q/wbIEa8Bd90IgiU5p53WCzvNs4BQqTUWxtkevAvJlhrYWmIVSBpq4RpanrGucCohUowWQZvTyn0Ia1j/eukgadCS54vBKU1lFRd93KniiyF8oQghCyDJ872syhdIMLARscuShEhW4Mim2ijEb5KFxro/DeRqGPwCY4NJbxaEhQAURhtCEzhkwbNus1p9P7ZMWU9XJNXa3YnQwYD0uK7V3qomKzqVltGnRdczI9w+LZHe/QL3uUyqCcI4TYls2mwfqGoISgu91l1+bB+9jLvcK2DttYmqqmdS1BSozS7IwnlP0+/bzH3u6EYjBiuqqg9vTGu2zmEJShFYXzjkwbdJYRckdVb5DMoEXAKwIegkehKLRimGUUpguuqA2nzRln01OsDgwmE25ev8FyOmWzrrj38DG927e4cXmPWbWmbiyCog2Kxgv+uXVIt3bAh9BNOopnuxPfzRkgWoNSOG9j31WaMuvhRDNbQ7VcUecGYwqkmyedC3gUkhm8KILSBG0IVmFdAGVwNjA7PmG13hCamut7YyaloFWG0Rm5ydB4eoVgdMb8+DGrdkq/7LGczVjOlwy2hgz6BYXJUDqjzHsEArkSSmMYbvXYGUxomkc8OVtgcdy//y691Zz+zg51W+MFLMKyapktK0QpMpNhg6duYh8eDgZIq/DB0zQNVSU0TUteFly6dIk8u4evHXmRs7M9oa9zpvMKdB+ynN3dMaPxkMzAerXiyZMn0CvROm7QqiqObQicHJ9yduKZTuc0teXw5JR//Nu/yX/9j/8R92ZTbn/qM/y1f+3f4Oc++3Nsj0YohFri3FNojQMWmyXLqmJYjsDoJMwlPpQ478R1UNO23H/wgEf3H9DWNVmWcfflF3l5UFIO+nFNYHQU6KDb+AWC9djWEkRhCo0y55tBQSSHfAcxGSCQ5fEpo2grhxdLNshQGgRNCBpFXARnRc6g7PHuW6/y3ttTbn3uDtv7lyjLHAkShQ8rgEK0AB6xlvnBMW9+5Ss8feN1LhcDcgyvPXqfo7Dg1gtDXroz4Mb+DqPBiNmjFScHc/Zf3iI0M9aHBxxO4d479zh5fJ+ldTycz0BZPvOp27z84ovM7YpHR085mB6zNxlzdzRi7eaYgaYYTegZjSk8lELrLM3rBzx8/IA//v03uXe/Yd70GKsJ+njEn/zelL6cMXuzJtSKpmgIgwL2xjxZW14/ecBq1jJ/umZ12uARVFmyMRVHzVNkWPDCdoFeP+bJe8d87Y9+k298+Q2OjzeovGTnZsCiozAq8doRuAgqf8iy7WPH+ToI+PEQmgSCAm9Aq7ipk0AUlc+VIh2ftyrQ2jWFGjI/WfLtr32N2ekDfvpTn+LGzVvsXr0NRdcG0oJrWB3NePM7b/Fbv/UvOJnPIc+4du0q7TowzGquX4HBYBh3tYnvy4VwGQDvujWK5fDBfb761a/wla9/GReEq1ev8Jmf+gyf+0s/w+df/iTDec13Xv1TsoOSXgaSZeSZ0JQZKkBRC0Y0Tjxo9V2istYakRg5i+OvAbr9wnMqohiNUnGtnGVxDHXW4XFkmUGQaDbp3jf+vb/Ys/+kz//ndpvv+yTEPY1WnC9iFQrp/tJay9OnT3ly9JSz2Szu50Wxtg1ra7ESKMuSnd09xsMxOii0QKbLuF+i4eT0IQeHj3nvwQFvvH2PN9+6x8HBAWeHh5wePCJzDWG9grbFmBwkQxB6RQFGUVVL9i5t8/In7vLZn/4sn/z0p/jEy5/k9p1PMBrvA6YbxwXrA857lDk3DnnMDwpifIz5YPcNH3w2ENcCLhAaz2o25/Hjxzx6+IAnx4ecLGZUtDTB0XpL61vc+aQncX2sRCH+/H6JWkV8a42E2IbQDaNKodW5gK66x+2HnsOHCnRRPAqAuxABLp5TQuNiyGBdBwKOtq7IjMIojdYaXehuAIsuuuDBe985orhQ+/EQlEYElNaUWnfumdjJ13XFehPFgdZ5rPcEBE9B3VjatqWxHutqVnVLvmko85xFUdPv98mLkrIfkMzQYBDTi+u9Mgcf8LbGu5YQoMwMeRYjBydnM5z30cWWQVYYxHq8t1EE0rprnxDnzPBMUIoXzOHPJVwFEgJKBNGCFnmuTRWBuGZVXnWOq0CvKAnEhYVrW1pvQUAbhc7MxeSgVGzzoONjxkTHXdO2cTJ3ddzcK8N4q89o0IsCQ1WzrjbUmw3L2QbX9igHfbTWBCGKI+chITEoHS42h945GluD8zQB5vM5VdMQVGDTbGjrmpOnR5zKMc55NnXbTR4K6zxV3aLO3Yu+E5G86zpvbCecje3XhWGDjyIc3n/XuXvvo/vRWYKPzsC6MBRGc//RIb0sw9Yr6k1NmRuk9kzXc+atx/QKGmfZrGu0zhiUA4b9IaOtjBrwKIqspBwZRkETlObo9JjFeg4Ktidjbl6+yqVr+5jBgMl4j6adU4dA5iwqeDTxPoiSTRTrXNNitMaHgA2e4Ox3RXpFwCGdWNWJazp05xqFOJQnIyDa4nKPcx7lXBRvRXXCHBfCXBwQunbr+p8i/l+8Jy8Mo+EImQyZD7ZoVp6gFOQebx3Vas1isaHdCGWW0zqL8w4JoLoVu4QoOnpnqeqaum2xPlAicVHQuWRpPQQHEpDAxT3jCVgfaJxlZHpobdBao4wgPvb9DEVRFORFjvIWgkMHDyo6Fa2PrkSaGi0wHAy4ee0aV/ePePDojPVyzbE6Zrk4Y3PlEuraFUaDHkVZokyBzlrUak2zbGO/XmzYGgy5sr3DIC8IzrOpNogEgkQhUIs6D4rgXRSSrfIx4mI9zjps3dA0NRYQZ7m+f4UqBC7vXuaFl19ClPDGa9/k/fceYlc1g2yboqcxTYM2QmgDobYU1iOi0VkR53ELwSvwlkCDb2ukqRjkfbwLmKJAbe9weHrCwwcPGG82vPTiS/QnitqsOJme8ZprCRIIePK8ZLFc0wbBqW5TLcR7Mg7XtD5gXMDagLW+G+divzMmRnkRT+samtbhXIHpGXrk9L0wGPTAlLQI4gNaqThONBUhFDQ+oLIM0RneeVonZEWfvNcnVEs265qnT47Y7RVI0UcFwTcelwXaYOOI6hpuXNljvax5cvCIpmkweca6rullI4JoNlVD27YImlwZxv0+g7KP0QWbzYaNazmYLlifeQaZYI0QRIE2tAjBOTbOorqp1IkiqNifx5MJoS5oly2bqmFtJAYlrGWxWBCIY7VWijLLUT1HWWhcXlAOt9jb3WX30h5FrhiO+myqNaenJ1TVhmF/CESBvK4a3njzLarNijwTqtry5lvv8PDBfTZNxZO24uHZlMnWJV68+gJbN7aYrVeI0WR5TttF95qmodrUODP4QaG7RKJb48T1QX/Q5+r+Pm1VU63X7Ozucu3mDVSmaW2LNgaTFTTritVsQVH2GAyHF274QMC3EoMcXfDRFANs23D89BSMYu/KZSBQVS3rTUM+yDCdYCQqbihDAC9RUwqm5fT0Ed9561scTN/jE5/7NJ945RXyvHMhqQx8C86BDrh6wcH79/ja7/4+p2+9xctXb7J/eZ/F2VOO6wdsb2+R37pBr9yirdfYsyW5M7i33+Po4JDDucOMrlH4Cr9aUVtLngmjUZ/bV3a5urNNTyuqnUtkV/a5tr9Lr6cJWlFe2oNeDr4inB0Szp6SiWN+/JRv/u5rfOdr99nUBT4fsFyvcAdLTu6tuJxlDFEMsx6u7FHs7VGM+jx4cJ/Th1O2epfZ6Y0YDBrqagMijPKSQZkjfYWaPuXVf/F/Mj16izf/+FXaU8+WGTMYT7h19SZ7u1dB55xvV70PeC900wLfbZH7iO1y34PzNeL5/89//+FEuvPIFF0n++DzP7rNa3RAxY/owtAx/uV8VO66YwkBnAQa2/Ktb36Z73zjNa5Oxvzil77I9o3L3bXLwIGIAyz333uTr/7OH/LOG+8x3NrlF37513jxU5/GmD4nh2csT+5jl+/RK4sf2fn9uNFdiu9y0SkRlNIXgkx043hC8FRNTVYW/Mbf/bvceeEuL730IjuXduhNRmADm1ffYt3WrDYbWhXQmUH3SnShCc4jXWZZFp0rKKWjIUQUSsXPjUaMc9NH3EvFQGcbXTshZhdpnUVRr7MRqW6+98GjtOqC3ed/Hz5ut/BHinzwl/Ds/6HLeNPn194HhLhPCcFzenLG05MzTmZzVpsKS0A8tHWLUxv6q4prV2+Qm4K2saAyXPBM51NOj4+ZzU74+je/wv1H93n9rXd56717HD49JThPLpAHx3p5Rl8JpVFYW7GuVwQRdveusXvlMpO9F7n70i1+9me/wOc//3lu3LpJ3htCyAh2g+gBBB335kHIM9WZjcKPrXPuec618+8yQwdQITwzP7YONg2zk7PoTjw65GQ1Z2FrahytBBy+mzuiJnuuhQXVuf47m573nBuco0dP4mYgiCB0e/JOuAshnHs6vi8fegWicyk6uqxrcL6NDolzgYkQnUE2ijVVZch0p/Ar6Pd7KBVdc9F+C94L2oNIwIVukBOD9wHXNiilKLKCosgwtHgLuWhKlbHJhcb6TrSAxvrO9h3wIRCCYANY62mDZdPAsgGVNeSrmrIssd5hg0JroewN0AS8NdhKId6SG0Wmuo23ik6YIB6PQzRkOkPr511LFm87V2E3QMe2090ioBNAROJj5+0q0DZt19DnLpQoh52765QyUV/zLa4TSenOFx/FlW7q7gZpjRZFbjJ6eQ9RqlNw4xRvjKLINEbFYzOyFR0f65rZcsFyuWZ6chqjNHlG2e9ddC5EoYJglCbXJrb/usFbS3Ceqq7Q1QqU0HhHYxvGg2GXkurR59Zv76O4GRzP3DfRFei7NvVd2rB0vVcpgzHmQuA9v+bP/3gfnTuhiwiFEBBdsKlbbOPoZQXlwGAU0TKuM1RvRDCaql6zaAKElsVmRW/lKNaOs7phvlpSEBgrTX+0zaBXcFYvado1bXD41QoePmG1qunt7LKxLTZAriDzAdW2WBtdRKJju2cmQ3RsY5Qi2JbQREfdRT8SwXkfXWgh3uhKxcd98FjbooxcOCVD8HgX30NC3DhZ69FGYYO/SMNW3fwbQiDThjIvKLMcbxtMCJQ6us1kCG0e3Qg+ayFXbFpH2zY0jdCKZdPUbNoGWze4xqJ8wHQ2eyegcgO1YL2l9ZYSFcVSUbSuwTVRoMPFNOxO4Y733bkrlZgW7r1HdQOjVxLFJAWZ0ihlyCVQZIoiy8jyHKUV+EDrLG1t2dra4sU7d0ENOTg8Y7PZUFfQVhumx0fs7W5z9co1trd3yYwQxKBLzXK1YHE2p1nXlJJR7ka3oCCIcng8aA1aowNk2pCbDGMyJBgIGiFanL11bNZLNq6lqisenR7zzsNHZFmfXwnCX/6lX+TT/Z9nuar59te+yZPNIcNRzSBMmGwPojvEtuzkBeOtMZVv8c4SVFwYZEqwHoy3aNfiTqaUxZC8zHFGGAz6eK2xm5rXv/0qt6/fYP/SFaZa8+DxQ5b1mst7O1zZ2cG2gaqFjVPYrs9oAaO68Uo0ge4+8/H+Vupc1JdOVDV4FwMSouOYnxnDZNjn6l7BQg85Xjec+RDngACLdUVjHOt2wqYRilIhWQaqRmc5SmX0yj69/oDJZJutrV36pUarjGrdUAt45ellBuVa9sZbLILi+OgEoxV5WTJfb+L41Qbmyw3D/oayn5ObjL3JmGExwOiM+toVNr5lulngSsVoUFBtlgSTRZHcj8iKHqoc0FqLcvF+y7IMqhhMyEShdXx9tamo2xZ8y/HJSbwHO1eQEOLxKU2rhFyDyRQuWLKiz9Xr15jcu8fR8QnvvPsOL+cDRv0Bw16f5WLDwcEh0+kpVy5t0ytzSlOwuH+A8o7tQY/1w2Pe+YOv8u4Ln+XqaIet0YjAs/HWi0KbjCLvgjA/Ru6YxI+I5/vA91pIisR5xzqyLOPGCy9w49YtQtOCMUi/iBuZbs4WoG0bVtUaHwJlv4cqcvIuuON9oK0bfAhx06gNPlM0xAABGRAECYpikJOXGc51rl0lECw+NDFQJNByRm9iuXZji/XyiLYzZzwAACAASURBVMN7r3L7xpB8axsoIBt05xhAwbo54enBfVbzUyaTnBdfucLu3jbVaI+i8rz08i77t/Yx/QGz+6e0dc3uYMTj9+/x6OAx9CeMt8eUgzU3bhpuDIcc2Qq3WdF3Z4SjR6iVp98qysEWRV6S5y2eCr86ozoxtHVOeyQsH64ZDTzjywM+dXePejrHF2O2rl9DDwzXb0y4e2mMWsw4evSU6aKiv3edYvcKj54cY6Y5V65fpSdjcl9gdxuW6zMWdsGSiplztFphnzzk/dO3MG7GJ4c76OEY6wvKyYTP3rrBYGsr7kJcXEcGwDqPEYW6GCbO5YvnOszHbJP/wZTWHyzQxTXphSTzwdf/GXvNn69YJ90hyLkidG6OMFy0bSDQbDbcf/9dvvH1b1KvWn7qU5/lhds32NrpIcrDcgbDbZwTZqcHvPX21/jqH/xz3nv1DQb9Lf7KX/mrfOFLX4JeH98ExoMxs2Hg6b1jVJy0fzwch39BfC9XVfA+ZhNkBgJoY7hx6ya/kBVcunaN4e44vtA6xDlcYzmenXG2WpGVPZwmurCKAsmE1jddhgJok8e9gjKc5+oJAYJCFLEskCi0ik5i72K5pKgZSee0i1lFSimMzlGdO96FNppPjOlcec9d53TN/8yFDhfuqPOMuRioiAh42wlCwuzolIP7j3hyeMTx2ZTFZgMqZrjV3qGkZbWqOTw8YTqdYxB8G1ifzXjz9Tf5zquvcvT0hLffv0frA1Vrcd6DFrR4Ai2IRWtAtWSl4dJ4xN7lXW7deYGXXv4EV2/e4uVPfprReML27jbFcEiXYxfTMbGItNBlPHZhDERi/wrfOyrx44mcJ7WGbraSGMnzEGpPPV0xP5lxcnzG6XLJ3NVsxNPgQOg0Lw3O473DtQ7ro/kmdNqT6+YKcbHNzg1ugoqptj480zjOM+f0h4ugP1AifSaAuOfqfgmiDcE6QmeNDM7StjHlUXVplqt1dG4VRUbZi6JbyHScYxSI7eyh6nyB1tltRaM1KN9CsBgNRa7RWmic75xFwqpuu+MxUcxw0NgWbwN109KqQBsCYRMQWVL0SkQE52ysVaIU/TKnNIasVyA+I9MBg9CGuNlytqGpLF6iSyvLMvI8p+yVWGsRJ3jReGkIzuG9dO1kUaIRFdNDtXQDKlFkkkC0sXb20jg4qii+auleF6d9ZfIocDrdiS3nUZNu8BXf3XbxscJkFFkOJqbkBu+QEDAKpEuFC8GTDwbkaMq8R79fUo9blpuK1XJN1dScnZzG6E2WIZmJCfGiyAtDnmVkQWObthPDQIzGEVDBU5ZR+BERVF/jCWya6CRprCeUJZuqireLkovlzbnIKcLFgkrraNEWNEppbNNcuM3O/yYKXxoRgzGaPM/Jyx7kJTpAkSkyraKAKoGiZ+gPhzgBWo9TTRRcEdqmxbBhZVuapiHPMnSWkQ+HbO1M2O9n9CYjGmtp1hsWZ0vO7j+Cp0dIljPsj9jdLdFFnAydc9Gl4308Dx0dj+dmaHUeqH2+VkqIbjJHwEvsK6FzmZ0LbEYXCHQp0fG1eNfNJB5nPQGFCw6lvtsBKwFs0+DaFilCdIBJTDkK3fOxDmBG3ssx/RxV1WT5mlYcSiTeWxJN3QYh15q8UBRlSSuCUQW1d1T1nLaNTlejOgEQjxOFJ/ZdjSA6pkFpFFmRwbl71FqsaxDraduGoIj1+6pYy84EhyJQGEWWa0weU+VRmmpRc/TkkNUyMB5PuHt3yGCww9OjI46fHjKfzVnNzphPZ6yXNVevtezuXGEy2aWyJb1ej0wyqvmKtrU4azGDHmVesF7Pcd5Bl2rgvRCI9SmzzKBdhtYZmc7JlEG8o92s2diG1rbgWlarFQ8fvc/JYs3hasknX3mFnesvcGcdeOvb73CyWPOk2nDZ7nBrZxutcxplOFvVtOLwwaMd5DpQZHGhNe6XVJuK0HgKramVotaaYb/EFDnL5ZqzkzMOHx3gqob+1oDt/asEW/Pk5BTXWjIp2NiWVeNpWgc+oCWWOtAm1pHSOusiuwptBKVjhNb7zlEXdbwuBS327+AsogL9PEf1Bsyb+N4iMcW/alqc96zblnUT6NscTAEqQ3RB3XjaxrE9LsmzEqXiRn7QG5JrHcMVzuK8pcg1tt6gcIxGfVZVja9rTFnitAFd4ESoWocJcRLu9XpxDHYN26Me+7tjjmdbqOGE/t6Y9x4fsdlsaFpHMxoz7PXJ8h7WV/igyLQmMxoVotigsrjJWa9rni7XXDkuGY0LjMnROovnHjyCRwNGBwwuuiGDZbGYYwxcvX6Vz3z2U/zR177B4eNHXL9xl+3tbU5nK07PFhid0baW5apiNN7ixbt73HrpFfKm5f3ZlNefHPDwm9/hn+n/kX5W8oVf/CL94RCAloAl0C97FDonNyYJdIkfAolBJ6W6+StAntNFYhEf07PQCgkBZ2M696VLlzA6Q+lOeNNx7aIcGA9eAlpHl5AymvGlCUF5fKcB5T1NLgJiifbhli5VADqXEARGY8/lK4pq1nBFSgZs2Nz7FsW162SjXVyTIXkPLR5WK87efZ3D17/JIFRc2inZ3x/SG+X0lwP2hte5tH2VMgyxTypm7085OziFbUWmM8ZbY443DYeHj8nFsTPJuHzrGrcl4/DBI0ZSwHxFzyl2TMZiWXF0f8Fo4LC+5mxR8fi4JS9vcLUckjUlLmwII83NKzfpfX7E3FrUpMfocp+927tkoxKmjit5w2S6Id8a0Bph2q65WhrGt/fYyscUklHkGrLrnNkZD6ZrHs0C60YwmTDIG27u3eHO/nXElrReGF7eY+czn0Iai/UtTRXIyx5i4vrMB+kCxM/zfLmDj57zvcsH66c9n+76A+vQfZf34vnXSqee/YhcdBdrwvOP1jGbgVjfyraOejPn4fvv8I2v/BGlKfjiF7/IrTsvQj8HqcE1MOxTr1e8/aev8Xt/+Du8/vqXOXn6gFGWc+fGDS7v7yFlFoM1CvJRwbbfRrfX2Ll2/dnZfqD9vtfjPyl88Iy99xdpyAAoyHd3ubu7E93AAqGOGT6mXxI81OKpVSDrldjgsF32iHMBJRpT6lhOSJ1noT0vLqtnabVEZ07MitGIDnhv8MF3AZG2y7ACFbp/lQbiXie68uTiPOLh/9k7+yeO77P+eXZLdlJttx+TAEGbrm5ky2axYj6dc3x6xny1pm49GDA+xHIttWW92nB8dIJ3LtZwPZty8PARb776RiwV4QOVDei8wOiCvBDyzGBtRWi6kjfasDvZ4s6tfV5+8RYvvfQCd168w+3bd9neu0TWG2LyAtFCaFeAIFmByICgNPgGUYooBSm8C527K2az6fzH1EX3/LAt56GWKDnq8+e9JzQeP51z+uQpRwdPOJ2esdisWbqGTbAx+5N4z0XjcnTJOTojUbdHtef79k6bCXI+z4AWj6C5qEt3biyKv3zoaXxo6/vOwNlVsIpFJ/HPRigda8kF73FOcAGcDygC3jqaxpMpR9M6WudpraMocjJn0VpwxkfXV6fehxBQ3gNtdKZJg8JCiLZcozVog0PwQRgMt6jalrqxWBc7ft20NFWsr+YahwgxDcs7XNMgEtNmfQsz1yDjLUy/h5JOWe3SRQXI8gLVOqq6ol1uWK8rtNL0ej0GgwF5nse0VK0xqojtYC3Wxlp5UVB5rseEruZ2NAnFBSpw4WNX58Vf44B7/kUO4WIsEFTMlUV3aXUQBVOnNV75zrkXJQ8j6lmassR05BA8Ljhwlvlsg7MeEHrliMl4i8l4m8VixXQxZzqf4sTjgaZpaUIDxCiMUZoizzDEDln2+4hWVK7BhkCWZ9DYWN9MCYP+gPE4Y1O3zJcL1lUT0wTxcZDTdGmXIPIsVdc5d9Efz+sv+C6tVboI/nkttyjOqYsiqkWWY0RRrzdUdU1/vEVuNLWtIago9AI+SKyzh0YwUUxFsTuYMEdhveXJYha/kIOWUVFwc7LPfLFgaoVVUTPvahEq72G+xngDJmO8s43JMjzRgehcAB+LSkprab3vyrLFxVeMeMXoRazZoy5uaugEOtWlfYrgz2vZuWd97dxGa0MgeLDORvfdhcgev3TDO4+tG9q8IdeduKU1mQFfZPjKo7VQGEOeFVQ+OqAKo8nLgspZmhAFNuUCoXEEF6D0GJPhlUdnGsk02ICzFqdt7LsecqXxEuuUSbdQ0FrjJX5JSax7qbrCm+d3UfflIuLxwccvHLGW1tZoVTJWfYwxF6nRsW6eZbmoWa0cIj0uX96nP9xiZzxhuZiyXs6o1mvu3X/EwZMT9q9cj+6DoaHs5WyNx5gQ+5U6b3d/nm5sCdbjtKMJCmcDEuL9KUFBiNdTd0ImLqCMxwjcvXOXwaV93nrvMe/cf8g/+p//V7b3/pAb+1fYGYzpTfagqjiZHrF5coRvLFeHA2QwwG3W6FwI3pN5GJVCoQ29Xsn2aIRtLUWmUd4wVZ5GBRotKOfJgmaoC+ZnM957+x32795kd3+PIp9Qzxd4By4oVi2s2pjGKsGhnEUF180HMf07+Fh7T7oUX1EB8dIJbuc2boXJFEorbGuxtqKtAuRbnUMgxC+yCR6jNUHrGDFUJZvW07Q1FmFnd5fdS3Nazi68623d4EqhyEtMJmRakNZTNRuyLMf5FlEak2l8DS0eihxvNE4bWgJr31BICT72d1c3hNZhtFBquLqzje8P8BqUa2lWG7xkVKs1a1VQV5bg5VkakvcoQixFoA1exb5abSqmswUY+9zcEAd359ropPUWlQUyrTA61hZ0vmU47POJT7zEfLOhsVEItM7jXYzeF3mB0RmIoij7XN67zM/fuMVW0/LlN19jOZtzOD9j/vAhJw8e0CznDLr0GYwiiKKvNaIM2XMLm0Ti++FczB7Q2nSL/ADWxi+AUPELHPxzznFEYzKFlOqZABHjG9EBpwKq0KgAqFiTMuhAMTJxvS0WH9puwWsRV0O7BldB3gkm9RpcC9qjtCUrl2SrA64WYy5LSfvkLZrmiM32JXx/i8nlK+At9eP7tK/9MVfbJbdvXsKFOdPj+yyqHY6eLjmtDHa2ZDqyqHrNk4cnNOsp6kbOzs4IpUo26wqFsH/lEpf6hrIqadcZ5WobbzXHswOa5QnS36LWJeum4iTUNJs1i+WGRVsw3N2i3oGB9sxO13znzUcMByP2tsfkuUGsoxBPmJ9SPV3Sns0p9QAvhuVsiQ2WLV2wd+Uq/czg2wUER3/YRw8NurLM2oaNL9nJJgx6Q/q6ZX9LMzYGHyz9SxP09REykFhrOGhiCAEyJZhMX+hSz4aIc1vXRb7Px2r8+GCdX/ghxKULhex7baLOBZMPfnXAj0Kwi0H+INB6z3Qx5/jgKcePH3D66D6/+ku/zP71W6BNTDt2G9Axul8dn/DqV/+E//v3/oC33nkNW58xzIpYdzcTiryrn6UkZnQ4D96ilMLVzXm2VuJDeFavLUSXHIDW+ABNvabs96Ew6GBw1vHk/fd44933OJktsMjFfpag4lpaFFmu0ErRWheNHM8LpLEeTNwPGtO54M8NdjFootDdGPqcS04+eDXDRdaN7fatFymuMU3pY3UPfxy4MLQSnXISwAUf62UTCM6xPplxdnTK7GzOYrWh8SFm2Qj47no761gsNpydzHGuZXp8wtHjJ5wdHXN8fIoWQ2+QYUKLx8Uvd6wbQlEgKtArhEvbO1zeG3P7+mVeeekFbt28ys7uhPHWGAl92ioQdIXOBWV6PLPfxmwxCb6rdRdddBJAaRU1hv9ffjHIc/7XEOv+h7MZRwdHPHr/IY8eHXA6PWPlaio862ApVQwuElwsoe5jGSCR8F0lf4DOm9dpbqHrH6r7nA8chpw75/z3mlue8YPl0W7seV5pJ0SVVWuD0prgBOcN3jmEmFLnAuSdk65tHdB0G9qWrNUXQsp52tO5zTYoRd22tK3DKYdR4SL3t8uTuji04G10JQnoTKJwoIRGgXUqFlgVEwcf78myGH1qpZt0vY91zlwUBIO1BGeQPANjKIoehQ3YFlpb07QWa2uqqqGqGra3ty8KcOosj+2UeaRp4/t6S5Q5u6LqXWNqOjcU52pqN7x2bhzVpcZmWUEI/w97b9ok23He+f1yPUttvd+LuwAEQA451kbZ8jjG4RhHOOaNvoJf+J2/pu1whGNGM1ooaSiIK4C7995dVWfNbV7kqeoLEiTHDskaikhEo4HuvrerzsmTmc//+S+JwefUVRdGkgBjzVd8NMTkSSCnawUTSy/zXx9AGU0uYieD0auLywyK6IKUGsYxomQxofKHnJ6dMEbHpmm5vr+jX68zaEiPkQYtJG50GGOoJt8l7z2ehE2aqijZDGu88ywWC46Ojhm9Yxw7bm9vicGByICHQD5sOtN7U0pNss/MSExS7GWgIQR2qbl5c1FEk0MHYsxFbaENUsB92zD2W85WS5bLObfrwHZoKKj2YGhiSlol5mJj8j6bzWZAokmBbdvw9vyc1lYMy3neAIVA1TWHBwfM5kvGpufu/Ib7t29Rt5c8ef6U09NTiqKYjFp3QR/s5WVM0tOdFFpNFOpdEEMkh4foNDEKZaa0Z5AosJMHC7JcHDHtrZPOPbMNw/7/dxLEuqpQOvvg5TNa9qMo9HTt8ZkNJgUqQZqYelJKjC7YdC1t34Hx+K6j2WwYQiApjaxmdPgs/RNiX8DFEPcLnpIKKXKwgUh5g0gwgT0SYzXW6n1iVSRkLyNBZoqWJYWRhC4wuiwHllqhrM4SV6mY1QvmswXxcuD66oYuWFaHp8zncw6WC64u3vI2jAzDSIgD601DiO9ox5Hjx0uOj1cYoZBaYQqLMYaYIk3TorREJ50X7v3KPAHpShL6DODFnW8MOVClUBqspvWeg9UBv/d7h5QHp/z1T37Kz1+94/L6jqcnj/no7BkHp4/orOTm9oqfv3rDtq4pP/mEo8Uhbb/Jz19KWBWxRS6eykJRlxbTe4xWDAKE85RGszqYM7aOy7eXrGYzRjfSdR2v3rzm7OSUjz/8iMJ5Pv/JS3qfcKhsFUjIjGY/EvyI8xbvIsHnEBwxMeZQApkmIH3qyiurMWVJaRUxJBgibhxzYnb0mZERAykmjDZoY/ExMyCbYWTT9QRV8eTZc0ZRUnz+Cj94Dg8Pqaoqz8sALgyMSU21usANA/NFTTCK7qJjjB6MxAsYJWz8QPQBZKRcVCAiVWGQIaGlZHQjaehZVpaeRNM1aD8i3MB8VuGbjnsPw+Ax1qJKgQgREQNGkA12U56r9WKJsvkAtF5vOL+/ZhgcSlVIIDiPHzqid9hKURUWazVFXaK0wnvHajnne9/9Dk0XKauC129ec3N5Rd+0uDEzgKuqYj5bIGRmM5fLikcfnPBdnfj2dxWffue7/Pf/w/dZzizR90hh9s0JJcAmjXrfGOObA/o341cMOaXVxSmcBikZ+4Fus8GUBdVqmXlt3mO1QUhBdBGR8jq+k5YCu4V/2hvIjF2d2fUhZda/kBEtM2NOxB5/f0V/dwWuozQgZcQNLc4PJBUxteCDs4rDP/4O/vMLbC0pTo/pXQuqYXl6CqbJjIfxmsJv+O/+8HsURzVvzn/C53/3GcPFmrvrxM1G0Yo15QenPD1ecDw7QNaW1azGdw2LuuT7v/8HzI+fQt/w8j/9gHcvP0PrEx49/YSzT79D31/R3a/RSRFGT/CCEATNukVHeHx4CIVls94S48g4liBPgYrtNkIdkUXi1d/9lDG0fPeTT3i6+pjhvqVvBno3IHXi6OSAerEA6QmbhO9bhEw46bhrNtxtGqDk6PCA06NjpL8jjNe8ubokJMWzY4tsrxmvCw7PPkWZeV5Tp/O32tV5+7VBvPf5K9/4Jx+78/Avsr7+yySuu48dU+4fkTH3q4bMjEVSbrZ32y3vXrzkpz/6Mb5r+bf/8//C0cEKURbsQpqSSLi24e0XL/ibP/tzfvCXf83Pv3yB0YLj1QGzUnJ6OOP5k0csZhbwiGTJdjYRvVqy8scMPkuZv5E7Poz3SvyHmbBjz01A566pLkgUszrXwH2PnQzivfNsxo5tyOwlJaZwHKWy1NFn7+0UdwDKL0DE77FCtc717Y6wsPdcFCIrGIhZ8WXKvaQvhGwtkyRTEGCYPLzDe4yeb+75f+nYs6GioL1d8+b1a158/oJXL1/SuVwPJ5HruBhz42l0Ee8a3DiiELguMI4epQyr+YK1j/R9z2bYokuDNonZfM6jsxPqsmA5n/H00QmHywWrRU0xWzEGQd8nqkrgXKIfHYUF7bPCRTARPqSd2OcTC33HJsNOhII8T8V/XUv5P8gQk2ptB0Cv7+64ubzh/Pycy6srNl3LYMDJxEjEkP3ZU2SSp7KvzRU5Qf6re8lXW1bKmL0l2UPmQL6wIn3lqf7a8WsBupCyz9tuuYEJVJjAEedGYsz+SkIKUhS4yShRKksKYW9u6ZzD+QHnNFUsiTanfP6iBj4z6gxaSwg9WktKa7Emy15zGmyafLeyr5ZSDxelVBpKjUiB5HIyawiRfhwZnM9BEGX2O5JKUllDYTUpZe8fHxOt88iYZbxlUZOiZBgUQYe9tHJ0gYvLm/2iKJTCao2tSuZVTVmXhOAIcSD5vPBl+S6I9yj2SmmkUBm8dNlLTEqJ1tk83TlHiDlCW6oCl3KXI4caGmKMOOe+lsovRE43lDqR0ogQmbFYFRpbKAqrcWPA+9xJ8c7TjwFQaGPQUSCMYFFXKK0xtmC93tJ1AyE4mk2HEhItJX4YEUrmsIMU6LpAUS1Zr9eEEDg6OsRoSdP0dF2L9yMETxJxih5WD0lF02PkJv82EKQkp4CRsAeJUkr7aPGc/jsiRAZzFAJrBGHokckhk8f1W7RccLioiWlkbBtkVVBYTVkWGWxIguQTzg+EZY2VNaYdeXb2lIO65vWbF/z4xedU8yrLYwUkn1gtV1TlAcZUtMsF9nDF2G64urnm9v6O1WrFydExZVnm+24y8BRjYBhHXPD7Z8B7v7+XSimMtpOPo5jo6RmBSyFiC0tRzjBK76WCEsEwOoTKhrLGZPnb7vq1Tbv/PePoWNYSYy0xZimulYoxeAKRvncUWGazillZUpYlaRPo3UjvHDd3t5zOFhzPl3RlTbdZ0zYtVVFirJ7YtQGdpmRZKZBJ0Hcj6OxPJoVAT0xaJXJqrXcOPcu/T5kCHwJ+HPe+CMPoqAqL11DVFYVRkBzdMICQmOUBiEDfbzg+OeWD3nLfan728pJ37y5YHRxwsFpx8ugxZVVye33F9dUdr9+cc3FzS9CSkR4XHPOyQriAcAErBasqS1zXm2uUFkSZn0UtFTJ6rFEZlIsJoy0CQWEtVmmEkPT9gK0Nfd+TpGGxPOXJ04KXt/dcbRvabuRnn3/JTz/7Gd/6+GM++b3vcfT4jKsXL3n78iXbmx/w5PSEp88+YFbPYRhpR0dtC+43G9qhJwWPjJ6xX1OtFsyMph9HhPOoGDlaLFjfb3j+wROclfTkDqpzjlJpqsWC8e6OIUaqqqC7WxP6hhiGac3VkCQyZkBZkiavwJDtBrynXMyJPuC8p+1a2mQoqwoVoKgVgzUkOnbpYUIKts0WpQwHi5rRO5azgkoKNpuRJArOzs5Ybweuz6/wzmG0pi4UQz+wnNmHIJmUcuq3d0QB9XwGfo0T0PgRlxJaWnzfcXg4h8JQzUsKq9BlZHNxiUJyuFwxXt9yeLQiVXNeffkK0/dUZUAbkD4yOI8ylkprhmFAh0DoepKfIwvN6BwH8xWnx3OOZpJuuGe8fIuUKr8OrVFKMHiHluCCI7iBcegJvsB7ia1K2nFgtVxSVXB92/D29Ssu3r1FqoIYI6vViqIo6bqOGxH5sQ7IDx9xTsOwkBwcnsJRyZ/98C94tr3iex99yvL4CIklixwkqRuIWJQ22XTwm/HN+IXx/rFSqa/KGk1dYapyz77QUiOnM5uASQ4bsxG6nMzud/6jMTF2Pd6NmMISZC5abTFZhUTP0K4J3R3adejkmdUVm9eXfPGTzxDJo4ykHztmBzMef/cJqdTYZYl5eopYGu6b11xdv+PgeMWslEAPYYuhQcwknW9yc3FRszhYES8GlghUrVHBUXOLdh21uGe50ByuBLPjRzRNy/nlG7ptZKFK9FBwYEvsTLPtG754fcvhcoGrntMHh1OCtzcvKWeSR8/OWFrJpoPrbksXK8zsCbPFkkfPFpy/e8H57QtkSpSLmtuxAEoQHyLMh8hqoGrPqVaB2+aCjg2zwxqfHGvgagh0N2sWixX6+GNOjcCzIiR4+e4LHp1Izp5YTLkkmCWvB0cbHM+PH0GxQFBgSgMYdlFXaTcR9ufN92Rx76MY/4RLyPvg3C8Ccr8KoNtb+qSAwOP6lru7aw4Pjynq+h/9Ne9fBxBFJJBrLYMijZ5013JmZjz9k39FOa+YHa4g+CzNSYHkBtrbW/7mL3/A3/zFX/Lyi5fc3t2yWNSsljXHq5pFKagLmM2KbEsBpKlpjwB8AM9U+2RLmK8DOn/Xxm5a/+LMeSCukBnpux+SkkiuS8q6IvnEcHnLj370E97d3uK1wsfc2EiZXpVbZVI9kFD3IRAPtcEOkNNaZxXXe3N5V5+K6XUZbfDe04UOLTRKZFJMJiTk5uSuFjXGTI2T99/x7/iY5vselJ2ANngQ9OvJjHNoWi6vrnj7+pzrq1t8SBRVSd93DCHgYkCFzIiUUmVyRZJstw3bbYsfA2PvJuXc5K1dVBirmc1r5nVFVVrmdcnBcoW1JRFBO3pU02d1j7YosyXGiGkks6FiHmccCIW1kuACKXUonVB2mtFTorog5lCD9PDWf2tnwA6Fm2TgckcqkznpHZ8Yru+4OL/iZ198wfn1FZuuY4wRF6GLDmVNDjAkNy7EjlUaMlbjfVbAZVxHZxfWlD3iU9yRNeIv7TV5Dc1krBjcr30bvyHFVezR4ZR+eVPbg1PZMAw5mbcLodAkemXSTAAAIABJREFUqqKgtpl1klIgJk9KGWDpujEnYkwI/u7v01qjbURHgw8R4QLbMWKVxxpDYUw2oleKNMlplZo83sSuUASSQAuJEIkYsy/b6GXee2Ji8I6ysCxmFYvFgiQFw9AzDMNk6h9QtsQYmQ+XQuyllLvXnL2/wIVIdCODcMhuYGtatFFUlUFpQaEN1mrKidGV286Boe8h5Wvg/I4VNqX1SI1zIz5O7BuV7QZTmCaA/2rqp09ZYb3r8yUhGQc3SZIjRmuUBikTPkZin/9epTI7Mku0wHvH6Fw2ODcSWShMUVIVBmWPmM/n+DG/983NhnFibrhxYAiOdhyyl4WxBOOZV3V+bSnhhwGtNaW1CCJGy/fon2kPOoVJNhvFbjJPrLrpcxSC9z1EMotRYU3+vCt4x7ZBRI8RCW0NWgpECEgBs8qiZxUYQ2p6SqNznPIYMlAXA027ZVZW1AL01CEry5LVco6dFYwuMHhH6/oMiKVJAlrVJAVHhwu6zYaLiwsuLi44n7/j4OCA4+NjTk9PiT5LsI1SjN4xDAN91+XFVVu6OEzeZhGSRoi8GZjJk2/seqLKcnJCRJDl2VkqLTNwGxMh5sUkTZ0yF/ImryZwHJjkotOH2MnaE0Ik8A7f9QxfYS6CtIoheIa2pRCKbz17ikxPeHe75vXlJZSWu/t7htZR2hotM1iljEHHIofOTIupJDeL952GkO/9PjhDStKUAAzgXAY0Y4y4kCi0YjVbcHi4RBnNsN1Q2hqSpizmPHu2oj74kMPHV7x8fcnd/T1fvnjBbGapSsvy6Ahpa4YAd/ctZTVD6p3PYwYNx5Tv0agUShjKsmT0A24cKYzFWssQduy+grGbeiVpKhJSyoB8TMQAm2ZD8AmvZhS24pOPv40sat69esPtxRVCweXlNcOPf8SjJ484Oj5mVdY051e8eHvBKOHDp485XSywIdBP6V9SZXmDiwFBIvoRvEOGQCklui646UbqwhKkwBaGRb1gdbBktZgjRk+UgsaNXN+vOX/7lmZziXAdRnjcGNg2DUaXxJCQCazWOZAkOEKIKGmynFtkJqTWEu89jW/oRwmyIImITz7vC2RpSIwRN3Tc39wyHBaE2qJMgbGJxnu6Ia97Ssv3EpyzRFZrQ2UlSQnCmBmFIUaiiBR1hel6NheX3A4jx4fH+ORZzGsOTk/BSn785Ze8ff2KpSl5fnrG05MTTosKYyr0bEYyJR+dnpJcYt1scJ2jmC1RSSInuZ9RkspajBLYqQGkCok1FmsVVWlAjhhjCKPPxsFK5WS1XXuOTKMfxoG+7RDJI2RuRs2LgtFIts3A2G1Z31wiVMlivuKkPgIJXd9z0W2YxwZrPV+8e83t6Pj8zVvO/6//gygk//J7/5L//X/93/j9oz8h4vL1T6AFMA64fkDOyuyT9834ZvyqIXYdaTHV91N1OllzhJS4urrm+vKK46MjHj86o29G7q5uiN5zcHTEbLnADSPX5xe8e/eOsix49uFzytmckMANHiECxkoKW0IoQUTi3Zbu1Uvi3Q1F42nbNbLUhKHj+n7DEEcOnhxjvSSsW8x2RJdgzYL7N1cMXeDo+Ajdbbl99Y4XL19TxRkfW4VUkkcnZ3x0WvDi3ZYv3t5xslzx/Kjg4KDEViua20sos/zw7nbDz768IomeQ1NyWig+/fQTWuBn94H7sWd5/IxZDe/OX3Kzvac8O6Oa1xQLzaJU+NuWUmvs/AOef/pHHD/9Htv7jvbP/2+WHxyhlonbtObx4Sl1uaQoj7m/9HS3G8rZHPTI1XZD5T0hrTnf3OJmM+arMx6dHXNwcExRz3niCnyaEYLn/u4NVq9ZLAJ2cQiHH1KKkvtkqQ8/BFEiMOyj6abi7X0MLv+//Oo3/iuq7P7f+qQJIRBJcH+34T/9zV/i/cinn37KkyfP0Foj9P8PvkxiOgdl3AZDboodPHqUJdw7dCAqMIqEo9u2/N1f/wV//R/+I+3Nhg+ffsj/+K//DTc317TDlm67IfQN0jdo2WKtzswtmc1iQ9LZj1EqEhqZo+p+5TX6XRm/Dm9+8IKbvvsLrKME2YeObMczdj1d3+dwRWMwKZ/Tdub1erI5ckRCjDmxVTwEnOxsft5XGL1PztjXqSllO5VAroeM3tdQaaoV9+DT14Guvzu391eP96/L/nqkr1yanQpu6Hquri558/acN+fnXN3ecL9tuI8OLwGZsY0dIzLGmO2AQiL6SJhSeyGfB63N/uVCzFBGU1lDaS2FNrnplbIXmksgYqIPgdZ7zNAhW3BxxIyCQQw0Y89m02FNiVIWNakDtR2oqxkQkKpAqirb8oiJsfvbPgfEDoDMn1QCmeWMjOstm/s1N/f33G83bIaONowMKRCSyF7/Moc7pPhACtoxVFN8Dxvb+0NOisZpHdiRZXav5SHUMgdk7sE78TXP3zR+vQfdewhq4mEB2LGc3k/EyJHgYmLJKLQALWQOiTA2e49Mng7Oj4xjj49uD3h57wlpYgyZgDIaMUlSwaNEwEpHYQyV0WiZDRONEmglkSpvNkJMUlISWmZj8yRAC43VKjPk+hE/RogZrNmHCphcdGfmViCkDPYYpfFFmcGC+KDX9zHhfUBMgNYOTHAup7kMg0JrRV3mgIxoHEZngEVKhSmqCZycqMUpG6WH0TO4iELuF3ehBXLyOYsTyLnzeNolg4SU0JMrexSgjMrs+OgJIREkuDHhcTlxJt9YpNQYXVGUInuHhbz7CxlxaaTvWoYQ8QlQisJMoRVI3DDmjk5K6CHf0z7k63F3d4eR+RqO48gwDNi65PDogG3XsN1u90aKcRfsmrJ/WoD9YrZnk8ldKIbYb0pSZql0Wdp8bXcAnYAUHVokFrMCkqUqNNH1CKOYTwlyI5D8SHADfox4l9NQFQo7eqzOQRGXY8MmWkIYWBjLXBaURzMaPJ/fXnPRNbQvP0cJRalnnH1wihaKoqxYrg6oyhqjNePoOH/7jru7e2azGfVsRllXKJmZlELIfZ3unGccx9yBsxKhMgtNJoEiG3OLmKZ476mbIwRojZw07mnyRVBKkXjY4Hfx6llCm9OYo/A4r3Ahy9R9ynMkhsjQdLQxyzWFNLADhWVmUKmQEEJSlYZDoEuB26Yhjp6h74hjQniZU2+qagKO8xqyW56yLBfkJJn03tP1+bRfVWUGnqYDBlJxfXfH0WLOarXEykg/DFzf3GK0ppxX4BPjmOjHQKJgebDgQ7NA2SXnl+d8+XKgHVvabos1Bm1LVkdHuKQZo6OWhn0ox5TmnJLYh3YYZejHlq7vkbbA6JyG68acbuxdZstKmT3rQgIlBXECGo2uSNKw2TR4lQNojo+OCH1PKSXjtufm7pZ3P/kJl3c3fO+jj3l6eMLq5AypJD/+4hXvrq741qNHfLBaclSWzGYVRTWjH0ZklVAq0fsR37cMTY/VhsXJY6wUSGsYUySKyR4AGLuW5AOq0IwpocsKReTu4g2X775EKYlRdWYEWotMEJ0nTEbtUUyNC0QOtJ78SX1MBJEPhm6aO8potNUoq3LwjFDIaOnbHkmk7zoaK7Fzg9CSruloutzoESKhjUBrkQ8UImWJv4Lgcop0bB1FqQgpcnvfcnd3h5RgS0vbt5TKUgHN2ONRNOPIy4tLQtOzXbfIpHh0eMTBwTFJS1xKHM0qmkVN9FvWzSb7i5iSqCRhHLBVMa3xOXRFKwkGtM7PWz2rkUXEliXu/o4xDfkZd+M+ep0psdhojQ+By8s7/MU7pFacnD5iuTpmNat4fHbKze2G65stVuXAI6FlbpK5nrPjU56dfcD5xTXX1+9oupH78xukkLxwf8/P//aHfPzxJ1QfnE4GuAlhNClEovfZo+Sb8c2Ar69Q9+Acu0pl/3V4kITECNtti1aGw+WKdttwdX1FXZQZmA4R17W07RaIaJMZ8X3TE3xeP0PoqRcWW1nQNTQd8X6gvdoQtz3Wadptgqgo1Jx+23D5o2uadyNPP3jG0dl3EMczWCrKzR2Xn/096xdvKU56DJ7u3UC/jnR9w9Gsg9gyDrdUHyw4fKJpyxWPj1ccVomkRrwVbLeRQpfM5k84+/anHH3vAFuuWL/6CePlT4kzhSkWfPu/+RDx6COKmeLuYs3R4w94Wn+H2fIx2yZipeSgKlltGk66QKgWzJ9+GyfmuHf3nP7+n7A8M1y8/Xt+/vf/EV1XnD06ozp5TqVniC9/DrpncWr56AguXv+UbR949OhjTr/7HcThKagCYo8wEWVrlDkiuYR62zO2EZSH6hAxe0JVP6ISFcgFUPDgXfTezf9K8fbbXsk9jJ0XNgikUtnmpigoigqlbfaSQkwkgDRZd/zjaMHk1KCOKRKTztY3OTWO3NAW4HKwwM3FOT/6+7/msx/+FVeXV3z/9/+Yf/tv/xS9XJHaDdv7S7742U+4ePGSsUnYKdVd68yGDEjCVCtmlYYhBpUL9m/GLwHSv/KH4CsAda5jFKREHDzb+y2b+y1NCESpMIUijSMhZSuYQuX74WMgiIBEE6e6WQiRnVKmuSlUbsIDE4g3hQuKhPNj9hMMZIWbndQ38cFHK4n0S7N270H3zch2miE+AJuwlyXuPyMYB8/t1S1v3rzj1as3vLu44n7bMARHFAk5ESF2QWjjOOJHN4VFPiR6Simzck1IkrV4NCUlQilKI6iMoJCZLBKjZPQBGXL9p4JDupbYjAxhS9Xn8+bCH+Tfr0qMKSiKgrKumc1GynLAh0ACrKmZ1QIps00UySJ+yx/9xC6tdhekIhEB0gi+H+nbnndX11xuNtwOPU0IDNNaq8gS1OgTYiJiQQ4ylAi0ChOpKVtE5XsYIU3Bj2LSAaZdiGCarAqyxydC7evY/88AXZoMX/exwmQ5WhJTR2CKc943zcT0M+Tkmkyfzi8oRokxCmMtBRrvLW27JUwm/0qpnO45FebOB5I3UwJqNl1PUhJdZJQDmkRZGLQQGC0ZVGZpKC32iZRC68mzVk4Its1G/UnlJBUye807tweBrNXUpSUJRdOPOB/x1u7RTz8lcu5SKcfgcU4xjnJPPY4h39BxzEDV2A0ImTBSUFjNvJ5RlQWz2RwpFUknVCzwPjGMnr7v8cNAVZYPaCxikhInZCajPnQ+pCCRC7N9apoQmMIS3YgbBmIKqCTQQk8sq0DwY07nDQGSQymNUhZjdgtKpBug70bGvqX3AYTK11qanDakNfP5nKLKbCJ7U3Czuc8g5zCiigojJSE62qFlbiRWK1aLBW4YMrDos4daSJlZtNsj/A4h3qHR+1RXtZf1wgPopLVG7jwbJmBEa4nUFjEh103TYCpDXS0JwdMNI+tJlhmTys9OElilIQVUill26UdGF5EyoVJiaBvK0lCUlvlyTtAKExSuGdn098hLmK3mpBT2VPRZWTGOI+1mS7vZcnl5yXK55PDkmNlqSWFtlgCus29fCAE3AXTOKZyMBCMI3oNP+TUisil/yj52pPzfCrDWklIGTUJQJCHQYQqYUApPRE4JNDKCEDkJNsSAC54xeApAJUEcxwzCiHy9Q0ps+5FuGEjBIH2kb3twI8YUfHD2iHR1xWbT0Gw7nPf0fYdQJrNtlZgsJTO4H99j0e0+a5EPiCnmSZA3STEB45HVfEGMcHt7ixWJ0kqMqmjanvPzC5phxnZwbBtPlxJRK5KoODw+olrMma1mXLx7zds3L7m+vyN4GMdIEFmyKqSZFleBVBptC6q6pizLbOIrsnxYK0WIgTSME1Dvid4z9jmF2ijJODEhs4w6H9yKugZT0beOzXbNoDRD12Gk4PHxMb1tQUB3d83VzQ0/7EbWJ7d86/QRZ88+ZJSCq8t3/OTLl3RHR/iTY55UBWZWU/pAuxmRJHSA1XxGO3pc2xDdgAJGPyISzGcly2pGQNCs7xidow+KIAXaapbzksNZyZ3wNNuGWMLxkaa0JUZnGbMfB1x0CCVACpwL2GTxztM0Pb0xLOtJOumy/LQoNaYy2ctPa+ZllomGaqQuDUpKxqEHOxACBD+SAGMmTw2RyImNkZSytD5GkZsoMRBDZl0LqfCjp9s2pBgpCzs1WUa6ELm6vqEsDEkYysUBd80l59f31OYdwcHxyRGFlLihJ/QtlUg8Pz3irVhz03QE55FK4dyAsJKQPEKB9yMhjJklHAMyGayxoLIfZfCeddPn0A5t0BMtPk5pz2VZIkRgfX/H7f3tXhpR1wsKJTk7WrF58pim+Zzt+pam37A8WLE6OkBIw3y2YFEtmVFyIGcsT05JTuCbDn/d8OMf/B1/9Ad/xPOjFVbPsseUlGA1tqi/kbh+M379SOk93G7XKWZvfCwBpOBgtUR99FFm8StFXdc8e/aMsqgoZzUxeLTWPHr0iJPTU2xhMdqyvd+Ch6Iu6YMn9AFMkVOam5b+9p7QO9IQiGPEmBqERihJSCMqlAy3ihfjPbE+4XghideOvokMtwG2jtdvf84wtnRuoBlzM6oNktBlJuoHi1OenjznVBySnKOgJYQtfRhQZ8/xYkl69geYg48x1THEwMFSEm9BHR2APoDZKTz/lKHfslpoFkfLjL5EQ63qLG0TEd02rDYboipQR5roYf7dx5hFgWjPOQgLvr/6ProsqJbHaFnAzQ3L5yXUBhaCRXqKsCPhvkXrgvF6oIxbmHvQLViPUJm9EYJiMDAkRRwEYDBSIqlIswMQBVOs2cM9F7DzYvuVq8M/g2VDCMFivuCPv//H9MPAYr6AKTwuRUgpB7I9vNl/+DednUHT5NGcrXqic2hD9pd2CdeMvPryJf/u3/+ffPbZX+Hilm//i+/wL37/D9BHJ4gkYLGgUA7zKtcYwSeEyb66afJM8xNTLtvNSGIykDLR4puRx6+d71+lk07MmqlnISUpJsZ2YLve0G4bBu8JEgy5dlEpolJCpkSYIriyt3SYFBhqsnLanXuY1GG7n9V7tXn+/oPPuXyvdtr7YAkeGEbT2NeTE4v/d9p7cLo0aX+dMij3Ps4B7KWt11dXXJ1fcHt3x+AdQudApFVd4kl7IlKc2Fh7u6YQctKnUpRlmW1wUsip50nR+ez7aVTEKNAqZM/ikD3rZCGJXpBGgRcpe0r3UBowStMNEqE0VvVIbSmKitlspF86isrQ9k1e6xYrdGEpZTWxZt3EnP7tHDvVpydiUFMglSCFRLzbcH1+yeXlJVe3N9z2Ddvo6EQgkJ8XPXmiE+NeSeq9J7ldMviDzVYKIStEwySDlVM6PZHgAsjJOiuF98BYpmcw/Nqt4zekuEoiOVkmJ4NOZv4klJhAOMFeVrpvuIu8fZfGYlXGjMR02x+68pGqqvLFnKDa4NMe/BpjpmzGnS9rijghJ5m/JKRI9Cmb+k8AntYaY3WW7imBKwRKSZScoquNRGmLLXRmsUyFUKYpZ9qhRCBFTqotbPb9SikRJ5Aoe75pnA9stw3KZ9molIkQ9MODGCIhZglWdLlYH4NnMBI/BrrCEOJDIIKQOkuDraQQEm3DZJicuychRvLSnV9bTCmrmIWY7lN6oFPLqQszxSUnIVBCYbShKCxKTi4iscK5keA8AM4HYmwzICEyGysQKKxC6gUzssegdw8SXyMVRgkKo1FGUtc1rcu+UkVp0TKnce66PXf3N/TjgB8H9JRwIqQgyin9d7oVOUBh6vLsu5rsfep2X9vLMuPOSyHP9iATEQkif4iYcN5nTzQRsL4mSYV3mc5rbYkURQ4nGQNRQAgDAcORNRirodJApPeO9XpNe32OqCukMXzr7BHfOn7GZtvwwy8/5+X1O6qtzewyYLVcMa9q6rpGkWnoXdex2WwYnWPetdSrZQZZyQuwVJKdOTAx5W5mmFhzYUqRiQkZM8ovUga2lJAgNX7s8nyZghnCNI9DyvMpxIgXMUtZpSQpSFoSlcjSyxhyorGYgDMhkTIbnpISnR+4ur3lQAyoJFjNZ/gwsO07upRTWktlmNkShEGZEltYpFFIJaaDSP773qfoawRGSGxRTszbnI4aYsQNnuhHmrbj9PgIqXJIgU8e5yPD4GnbhpACt82WEUDP0UUFtiJEjUySQilOz84oSk0xM7x48YLPP3/J3d2W2fyQ05MTBH3uLqeIFDJ3n8oSaw2QN9q6qrPZbkoMMSClpLQFhTWsQ4uIEa0VSRgEEh8zuImSdL1n8APe53tWaMUgEkPwEBJGJI5XK+Si5Hqzob1b8+WLlwzrlqePHvHB8+fUyxXdzQ3rruHLtxckLTm0pzgpGVTEGM1hdcCjqqaa3fDm8oqxaRBEgh+RQnNgKx4fHJKM4HJ9w+tuzdV6oA8jm82aMPR8+OiYUnzEF69ecXHXcHNzjYolRVlRWsMgzEOQUEo5DCjWxLDzDBUEskw54Akie/dFwIWQO75RUxmN0pblrGB1YPCpp4+erhuyrFkUWKOwWhDDQApFZk6r7NfjUyKKzMIpdYFWGmsrnpyWXIyB68t3xL6nLkv0GKgKTVVahr7n+u6O7XZAqopydshmSLy+WiNMyaldYo2mLg3BFyyPzlDa0n75hpu2gbJAjy1ORQY35A51DKTgEVFBCDmpdaLOC5GTEbfbhtB5joo5R3U1NSYiupgCZXxACYnVJv/ZEEhuJEWFTHmOWCnwJgPwMThCGLEJ7q7u+WITaM5bPjn9mKe/9z3+7K/+gi/+5jNCOxK3Htd5UsweOAJBFJKk9NQQ+R0+oH8zfuNIMKkoJnhuevYReU1OQuAmX535bE5VFUhJfjbnda4BE6A1Zj7DCJHl+CEgrWZBTXIJWRpMVUNsCesrUrtGiRE9M8wP51AZ2vt76uoAJwVNCMxnc4ya0WwcrQzEgyPC4TExjNRFTfX9iu7mmp/8u/+HL758R08kasny8IDqk48opObLzzpeX/eUseeub7i9uaeUjuVhBUXB0299m8Xxc0T1GGFPQFiIHZyeouobRD2jD0v8KJh3UOgTisfPEAYYbyHcgWnBeBg3jPGKbXeDmZ+xrA7zPrlMDBc/5/aLH2JM5PjZMWgFpmfbXnFz9bcsDzWVUlydXzH4yNHRY1YHxwxv1vjLjvvze4qVRh4HzNMasapBFqiwQg4lw01ibEeMPMKGY/ArhJ/ls8dX8Lldq35X1MuHibAb/4yWDCk1sqhRusgJqfAg8RVT0uHXMUv/AUbGB2KWQaWcAJmMIBlLJCKjZ3N9z9/++x/yV//hz3nz6mcsDgzLxx/w7PmHLA6OGJqWsp5lME9bEArnE93oUXrEeU8gpzZO2aNINApNFtUa+IZB9xvHRK75hS88pDtKkVUfbdvS3m9xYyAJmc+NLiCkQKGQMZBctpuIKmYGU4zv/QI5Ff65Jsz1Tw4jFCKQ0i59M2aXG5HrV6XVvl7KJWVCiB1bNL/Kr5W5fsOmy0AKcY+2CiY2QSLb8TjP+vqWyzcXXF5c0/QdUQmSNdPzm0OS3FRjK5EDCJOGGEeGMVsa7UL7KLIXXIw+hyMFR5psmowGIx/uXYjQDxEo0MmiRokRkUJBpQVGw916jSkMpS3R1qPUQLFumG8bilpRzwrqukRZwyIMRDMAIOQ/g/xmASLlNHk13S+ut7z94gVffP4Fry8vuWka7v3IIBLdVBuXKaGimPznpmeHxOA6xolUtAsvjMll+V/wpJgyPhazZVsSWb0IU1NnWhF2mK9UEH4DL/fXmymI3F1JWb9KRu8fggj0Pkk0m/Dtwp7zVyLeDRRSo7XJrCwjkVpMKGIGr5LItEqhJBiJtZZxdOiQJ6WPAT86kg8En0Megsjy2amVNaWlgpQe7QzGRJRWNCOTsb5ES4+1CWtBIfFJ7dlu+c/KnA45PZAxBaTYAXSg5eR/hsyS2JBZG94rrJOMXhFimjyRfE44DNP6agNhHAneEYNjGLJssmkG2EmGjcHogqIqsbbElha8Y7cYj8HnBSKv2oiYWWe7qN8w6SIzcpwTSePYE5Kf0BVJRNANHpGyX1Jp9SQrVVMYgdoDjKSIG/LDGpg6a0KhlUCpInd1fEaV27bNybsTQy0zIiVVmVNSdwwZnzxN09IN/VdSWNVkgxGT3FO4Y2KSxMW9D9uOBrz72Mlcd9dckKb7pRAKklQIrbKJfcqysZQCUmliAF0UKB+xtkLZGq0qtpuW9bghuYipJcSRTGQVRJ/QpaE6OqCqCsaYiEZyGwLBDQztluQ8RaFZLJcYKQjO0bcdcrOhKkpmdY2dfBmllKy3G9bbLZuuYdY2mDpLrDPQU2GFyqmtavos8odWknEcAYgyIGKamKuZwSq1Rnq1v2Yqmw3meb17bsW0iABpAnbl9HNCCYILOUwFiVIm99JFnI7pEqFkDm9wDhUjIibGYWToR4KSGJ03nRQFylYgbT6YCIFLcb+BRdKUQstkVp/fQ14MJV4InMsBEgiFUjkxebPZcDAvmVUVMjlc37FptvlQqxXBJ5rRkbRiZqGwJTooousYQyCQUEazXB1wfNJyc3vP4ALSSEbvMvg3dRx3jNWYsqxdJgdpxBiJVTpLRUOmUyutMdpkEDwFlBAkbVDGEGRO7pMyBxvctyPOS3RVU1mLmM2g2zKst0QSRklmRUHSklJq+vuW27s1m03D9XbLs2cf8PSjj4nNBtotqqhBW6SG1dkxM2mYFzVWWvrOcXd3T9u1KGOz31wSlEKgnUfbglVVcbVVtENDEoqYIr7riM0dMnq+9ewJ5Wzgyze3xEFhi5Kjx0cU2mC0IqTAGCIpiiwVF5rCamxhCCnS9x19gKQLgowEmSWrPji8GxCxoJCKyhiOVwf0sWO439J1DeOYEEbi3EBMHik12maPO6UEMXqcgxQcgohWFpkk0SWSg7mteHb6mHUcuL6+wYREtTzg+OCQ+82GcH3D6HJB1I05bdL7LcZqytrw+GzFtz58Stv11Ksj0Ja3VzdsRkeMPncjk2OMDhdzUpqYKOa5B5X9IpMMqMk7U8jcuBjGAepq7y+TTYQnw2cBWgmGwdHe7ABSAAAgAElEQVRu1wxdS1ktMUowdB1SJBbLGqE1Q3AMXUs1X3LVdrSbgcYnnhye8of/+n+ievKUnx48I1zd8If/6r/l7KNPkEWNi7mRIYUGlZmqcXcE+LWHhG/G7/IQk1riK+fMlBMIk3xQFRiTzZnTOPXMduxM8SAXysyT/I9KEQqFUCPC3YH0+NRyd/2S2N1zeHRA+eQA5gq33sC4ZtCRVBaYouJbHzxlfTHw+vxnFEczFsdHmMOTXHRubjIx7PaSFD3desPFdouoDXZR4lQkCbhuHVJpbpo7vri8RBvD07MDjh59RH2yYvHhc1isECGBus8yUjyCgN+0dJcbmBvmj57gfEfYOspHp8TtlugC2gqIW5xr6Ls1LmxIxlMYD+0VPlzjxwElAkfPz1C2QMwXJFkQuoEiRJ589G1uzz/j8uYt1AYnC3725hKxbVjGGU/nJ9y8e4NrOuLGU3rJIknsck5VPUYWI017T3u5oShKZvMzKJfTQ/81qMNXbvTXpJq+J+/7bR4Pxv8ZFMlSzwdWUQwhpwz/Y74GmFgPcq9gUmQmRLPZ8OLFC37wV3/L3/3wRzx/dsKf/um/oVxJkrEcHp6h6+Ukp8ohfqawWUUkVVbdSIGPAQt7gE5N/5bCIuVvL4Pmn3TsztQAIhf6MSbWt/ds1w0A2hj8FKoFuzouK1oCEaF35BcgCnJezkRkCflnY8xnnhyk50kpM5QfGDq5Vn0A5+JkVfVV0t/74Nw3EtdfHnvWXJwWtx0413RcXl3x7tVr3r15y/X1NV1wBBIuBXrvUDErkVJiksrmazyOI13XEZzLElWlUFqhRMZOghP4FBAoYnCo5LEyoSV7dCcgp/NqBB8RRGTyWJkJ1YWVSOGwNjCUAqVy+KAuFO3QUdWKulOcnh2D8CQ8EZeDRADEP0Fq9T/YyE9gFFMNFgSpdWyvrjh/+YY3r95yfn/D/dixdQNOkf3nQkSriHIZL5DawOTrmILY4xA77CHFmIN0UiYY7fDzlOJkxSMmlelEqHqvHpDyq7vp143f4Hb6AMr9cpsgb2JKZAArMh3KUppSwQXj2FOqCkGJ0pMcTGdwICTPMDh8jBNJLF9QKSRaW5IUFDrLhYJWBJeZaCnGHC8LE3U4EsmeJS7COIIKEZTAx+4hJVZErA4UxmNUjs4l5AuotEREgzBqYrNNi9mkE86pOBIhsuxKGoFUibko8F7hnMqMpInK6lzAuUgYJjA1JmKVZW/OjbhhxPmBthv2gGcSI1L16K6nrGYUhUELsFMSZ/65HMedUsSnkH2cyHLXkGJmtKVISBN7Y8eOIi8KznkC03tP0AVPDA4BlKWllAZjFEJYpBD4KQGoGUa6vqNzGVA1psRaC0BwHh+zD18g4N0wMdnk5J1WZUmgMYxhnEIKRsJURO+54CJ3mvbzTIKSmWUSwgNT7n0GXU783W1aGVRMaWeommE1qXQGdogUky+bjzFHUJeJfhhpup4kJYWxuBCIQSLVJC8IASEDXuTOY6lKlkXNylpsPaPXEn9/w/nVNf3NhjDAKOFoteT07JTtes27N2/ZrjdEH1guFqwWS+oye5tYN6L7nj442q5Dp8AwDNOcm5iDIWsid55zaTLmV4g9Ai8maWgKMaPCMbOnZMrzWekMLkklsGWBsiYz9MhMSxc8TB2EOM2pXYctpeyhJcXE6Aw5cbOsaharFctlgbtcs91saPuGqCRVtSBJRWUKgk1IY/EoXMxMOI8HEmIKitkzICdPPSawT4jMcHXJYZVGyQwkK20xuqDrRrqmobaKVV0xK3MAyc3tPavjA/oh0DVbkm2gdIwZ8kVpTRSaMIHZymiqWc189ISgafsGZSUxFfk+xMA4jvR9j+U/s/deTbZk55nes1ya7coe06cdDGE4HHIgUTMT0oVCt7rXT+Uf0I0U0oSG5MyAJAA22h1bvrZJs6wuvty7qhtAgw4EMcSKqDh1yuzKnbns+72morJM0fRFQHArW9txHBkGkSeLd1Jg9D2mShhrSVqTQiSljLaG0I8MY6G1jjAO6JJYtS1jSoxDZBczRhXaqqY6cnjT4DcDwQe+ePMa2zjOj455/sGHHFlNIbIJHT702CzBJTZYxnHAlMLRfMbm6hqlFfPZkkZb5s6Rdz1DGEg2YSqNrQX8MlqRwsjlq1e8ffcJT168z+npC7ajZlwXLt5dohpNe9YeGKw5i7+gUerA8lVqSmzUsmgWrUWGWgp6SihWSpigVglIq1SmndXMU6LZ9AQyPitGP0iCuGWqIEMqSdYKJZtZjSHHLBscEtu7e7r1hnbZYKqKu4tLiJk0jpgCJ0cnPH8aicmwvRvY3m3xFpFNrzeYt4W2UZwcH3FyegLGcb8bWR3N2aRMj6IbeiiJZIrMhTmQoodYDr5y4uEo8vO6qlgulpgswSlojXGVMBpSPGycS86Mfc/19TXjOLJcrHj/wxW1s5ATbe1ISuY1Pw6MvqcYzZ2tsDhSpVnqTH92xMnih3xnLBwH+OGP/i3LFx+gbCXkJxxFCTh3YIr8mg3E79u/4rbvHEVChmTelkJFmYK15rOG+ax5+JXJPgs4VJrlNQpoSXKfEBFUHhju33L76hOKy7RLR1Rb6nnCcIcaRggd0a+5izdc3+9QiwVPv/st6hcrnh41VGZBdIXZcoFSmrHr2b55Q/fmZ9jNNX/0w+/w/PiYn/38CwYbePoH73FyPufqumMMirDRJNXjN7c8+egZH3/nhGffewpHR+B6VLmBugc6Ugr0dyPDy7c0Q2J+9i3K2RmqzcTP/hbfVTRnzygk4nZLajusicRNh6pmYgGzWqGi5v6nf0vGoNqaxelTzMlzmD+j2DPILffXF7z98X+muhn57CevoLrlf/4//nea7/2Am7cD2/uK8/oYNwSeLL5NvVgx6kCcGyr7HMpTVDkih0vu7xXvLm5ZPu150lhSrRj1ZMOAgDb6cKQ/PDy++fC2/9l/+Qe8r7OHHqdyFh6zSeTrJYvvm9b7wstv4JqmjzR9ZjLoeLgC7JipTc33/+0fcfrsOc9fnPDD/+mPifGeu7t7bNFyvlYWEGsSZRy2aTC7Cq2lCJwfgzM8ApWcEzn578Dz+423bzpFP1ogH4+Ox/+WUuh3gct3V9zd3FNSQTWOHCIosYoqJaNzwRgBhAedpmcv+/f9ByBzbJ7m2EdfN1YJIaVoUpqUVnuPdxVRWh0YQSgoj+dffhGo+9cUBPL1tj+WZqazVdn79ck8WLqB+5tbPv/kM169e8vl5SXrzYaoFViNzwmfojDmyBOxZq/AC5P9ifgHKq2w2mK1PhCEjFGQFAZHDAGdR5xJWAWFRFIaMOQsZ7QcR2FrZY9XCVWMkIm0w8fMrsuTx52mbi2peHxSJBzn6hRXO5RhulY55+2Zlr+rTYgfkHNCj4l0v+P28oqbtxfc3dzSh5EuBsZSCFozKJGLlwImgyqiJMvk6XkllNIcwudTAjJ7EzijJNDlkA+QMsYJyUE2SYLHTBcHCE72wLX9xfaNAF1VwEwPKe8rR0UqnCJWmthVlCmFREAiraRCWlktFzAhhSlL8IE2GqdqnKsZJo8tSTBJwJ6l5dj2W7RVVNZRNVboijkfJJbeB2H36SyHtGlFjalQciTkRIV45iWdSUFuXlAaTcYqxZgy/SCASOXBVRLskFUBUw6gm9pXsvRkAmgUFnPgK9qSH9hcNhJsJlpLToqcJmBxYnr5esT7GmMHlBIKZs6FMQT6weNDwhmL1dBUFltVBxaaRh+QWxBtc6IQklTWUoZQMjZPA22STBYEwa+ritY5rLWM3Y5hAO8Hdt1ASFHSfoymMoZ21mAzqDqQO0vqevpxxHtJui1ZYr+11iKdpeBKQU/MxJQSzom23hgDSUCNbuiJsT/0s/3WT5V9Aq9Qio2zgk2lB90+7A/86pDWKvddBkmZomcEHJEAiqinwWZls9FFz+gHVlXF3WbL5fU1BU3bBmKQwdVWltAPKFMzO17RVg78llISwUc5xFpPwVEpzaJpJQnWeHbbHbv7+4lxpjhaLJk3LaoUhmFgt95weXHB6viIDDSzOWYKXMgFfJREn5A8KSusgTFUjE5hjcKnxJgy2lSyfVJaKipFwNcSI1oLi1DASi1gJQWdxVx2759SSiGHJIy2rAjBYa0ipAzKkKPo8rMC0JNMNlNbxdw5jtqWduYItaY4g1UVJhXGYaBplzBJokuO4oU3bf6cccIaSo8lyqLl31coqqqiaRpyTozjgE+RympiUfgggGZbW9q6wRgYQwI8s7ZmfnSKL5ouFHyGyAPYq5Wmbhx1NuQ4kLPHGMPQj/RdTzNbMG8W4Hfyxq0il0KI4imZGzel31qCHwkxUDmHMRBTZBgHfI4koxhzJAw9ViWqAilLImzMRarYKaKnxCY/eEpOIjCxjlxZZrXBWvAl03cBHcEUR5sLVWjZ3K/5+Sc/ofrWxzRPz7i4vOCvvvg5YxhpDTxdzPnw9CkzW3E0X3Hiznl9d0vXbXFKc3RyxunRCqUK67Ej5YBNilobnIb722s2t6eQFDcX97y9uuPFDzTvv/8H3OgNb95ccD/ecPrBGXbhsEbCbKTiFxnTyDgUfDAsmhZnLTYHiteMgxQrSmHqjzIXRhKlBNIwsJjNOD86ovOatrPcdYFLfQ86s+023K0Nuq04XTQcNTXa1VR1TQXYIJXFyjjaWcvu3Rve3FxTnSw4Pj1lvN+yGUb8OHJ8NOP86BjvC7XZcRkL/egpxrBT8OX1JbrKfOdb77NaLKUgoGHRVpzNG277kb7bkVUDTU1U0KWCCQkXICcvXorFo7SmmgD641WiRtM2kE2iOIXJWmSswZO1lFhy0QxjIN5tubvbcP5kpKkd56dLFosZ625gOw5oU9MHzzjs0DPNYDSldYTVgs4avnj1luuLz/jo+z9k8XyJamS3YZQwrFNW07y1P3vsN/ETcPfoRKIe7zSAybDl8Td/3/47aF8/nz4cRAXITSlP6gCReYBwfmyZPsuJFBLaGEkFTok8CPNeGcX8aAVGQOkQPMpqaqtht2b79md8/rO/IJF48fEHPP3wBbOjJeuLN9y//oyqDOgYaFYRFwaUcpydnKLdHOYL2ueOsb8hhC0uKuL6jnevX/PzP/9zTkzmR//+TzlenfFsveM63NIuKkLxXNzeUrCs2hOiipwsNyxmmuX5MepoCZUFtYO8JqcbxvEtyghbfbt5g99UzBfAeqR/9Qnd5Ruq2Rnc/Ry9XFA/r8ElxttLQt8xn80Z7jtSNzDuBm5fveHphx/SPn0PliuUqcE6EoaE5fTZ+5z+xzP8u59Twkgf3jBvvkOO55w8PeLJRy8gGugHbB/IyjFcXnC/GWjNkhaZf3Jp6UPNpochaYLVRA1BPbjPfXXEy4Hz795+d1kYsuecvIamc4AxRhK1jfmNz3EFyEqjy/5EWATUrgz1yQnfbY741vf+CN2AHwP1XFF5i22XqLqFmKGRhFayRSuN0wWt8lR4NZRkUFRYZKaX/aQiW4ut3ORJ9y+07Semf0GXqB79m4uQkEwp9NstN/fX3Ha3+DxilBTXtFVED6EknNIoKwCARVQFB+uoAkVP9fEoKg5A5tO957iV8L6cM5Q4+bsL6GCUFD61FpueXCIlIQX8ojFZo7OAe+qQ2Gwf3WN5M+Uw89uvLQwPZ82DLPprz+eg1mHqZ3uy3uPXUVDUnkctarffWitQJvXUZCbIRGAl9APbzYY3b95wfXvDttsxpMikLRZLLK0gJmGMC6JDTmJI1VQVlbXsdjvZWSmFNqI2U0BKRtar4tBZT1ZVlqIKuUQhFqSCrjRZFRSRlAOkkaQzIVtsduSUiD4RY0dR0DQ189KgTE0CqsahdE3l5hjdyHucnn/JcWK6/zbn768Ven4ZWP41kHzfNIoaKLGQBs92u+Xi5ppXNxdc3l+xKYlUWQpJCC97//aiJLARGEIgJPGrFum4nbwgCz6kw55XafNARJBBKvLxYgBZR3JKhCzKQVWyEAm0Jqt/IEA3j16QWDfJ2aoKXwLBZ0IO4pGhFHaKjC0AHmLyhChdOw0Fr2BWhElVKsPMiqdXSomZk8NyjBGmxJSqqjCVo9u1xCx0UGGdSDCANhptHc2sPSDS/RjI48gY/AToJJp5RSme4KdYaqWJYe+pp6hdhe8yY0zUdaRyFjsWrBWAx1ZJQhm0kfugDCoJXRxgGHox5iejAEOhcgpT1aAMY9DEADEk8XuICZMsxjnqNlM1IzFK6muMCaUscQI6wxAo2jB2PTnv0E7TtjWzRUvbNrRuTjd2k+G+w9WNpAHmTFUKxmpyiojOVphKOI2ePO1UThgjIQohG0KKdH1P3CaMNVR1zbLOzNsZxTnQA3Xb0C4XDH3g5u5W5LsSFESjDMvlkgqLHhI5e4ouBw1/jJKAuw9MUJhp0isYq7BYtBOGSfRBwFxbk8d4CBFRSjGOo9CC4RGLzoi3ubM0TSUhBpOPV44KbTVjDGzGLShx8uv8yBeXVxgnwQxjhF3fAwatLWNIqFiIrWPIFq0qnG4JfqQHKqsY+4HUD+BHzJBAa5bLJVlbLq6vCLbn6HhJLBXJyP0ZhoGb2zucc7y5vGKMAetqjk5PmC8WjMGje4/WAVySsAEyI4k+BYovuNTQZAEoFdDWLcla+hBpqox17jCtZWSBGENk8IMAyLkw+pEUErW2qCi+jrW20s8zVK5ljJ6UA8k6cA2ljBSlGEpkuL/hW/4FbLeUp2fEhePlZ1cM2y2rxRl1PcOPQbDWyQezbmoCisGPoCRhZ2+Ga42EyOiJYSXXPKCDxMRXTT2FYCSpSCjL4Efp4zTYtqFQGPvImA1Lq9HzltFEbjYdbogcYZnVFaFkRj8y+I6SRMprlKVtWsi3Qhc3iUo74hApuqKyNbFkBu8J2dGNkbYyKCfhNEVrnNIyb4U1XUoM2nCz3TLThnnybAZPTuCaBWvf4VRm5jSf3d3z+vae0+U5R/Ol3ItVTfEKqx1VFFaUcYVcWSiOytVUww4Ve84XM+a1wji47rdc9JFNN2L6jvV8pI+GF8+eExGvweOzM+LbdxAC/f0d90qxPFqhjCV0PanzzLJDjxuUHfFjxJklZ+ff4vN3L/nLP/9vHL+65E/+8N/x7fkLXl1f8ulnn9IsG+a1Y5ESdRlpksLqDhUN1s3ALUlK0w935Aim1iybBZcYMgYP6LbGOkPII62eke7WVIslC9dgFk/waaCq39INV2Qy99sdLZq+L4wriy8S2oEFYzyx29G0NWEb8cry7q6nLYUXH76HcQsuvnxF1w989NTRd4aFseRVSzCnrN9ec7G+xzdzTheGzzc33H8eeP/Z+2xvdly/vuDu7TtenJ/xx9/+kJ+8eslfv33Dhjnq+ITONmjTcKQEHG90gLjDx5akDJWxnLSOeVVYLRUXmztuh0Rrl8x1Yu40OQXa2YITbdn0kV3X4bOwYGtXePFshQ+Jm9vCq8sNUSVcbelCJGzuaJYnDKriszdv+bM/+zM2F1+yWr/E/o/PcKcFjIecUUpCfw7OI9NirkigtATCFQXKCuifM1qVg/jqsFMqe/fbX9w8/b797rSCWImk6fMDm6hM+QZRDlBWK/rR0/cdtbPM5+2DXEtFYIsyUsCk7NNA5ZRjtREj5zGga4u2hso1FDIJT+ivKOMFz1qF72FpZtT6mM3bHdad8PQ73+eLn/xfXH7xY5ZWsdJzwjrzs//zP3NbPqE5fZ9vf+8PcK7A3VtWqoM4cvvqFX/9//w3dPYQHf/hR/+O1fkpn3/5mvu3r3hvdkLfJ4yqGYeBi9svCU3k/Pkf0tgZ4csL8WedFahH9Eyj7RHZ91RF8eTsBbc3HX/zn36Oa3dUy5qX1285Pr7nD59WpHUi01OdrqiXinqxoERPvb3DrAf6ruf4pKU9n6HiBrpAcbeE9UtS84Rm8SHKzWAO1fsnfPy//m8M/o6xWVDrZ+j2HFiANah2DnWg+BH7/D3OdC3ewE4RKSRbUS3Oce6MIdXskrDm5jzAcV9t+698Fbr75nZw43roXF9vh5cqfP2Y9VtrCvQjGeveaB/k67/pqU1jcFP6ICAPRuACubilFRBcQVNXQiKoFoh9aaEYQ1YKXRQ6WlzRpPGOpgpUbUNTH1GpY1QwVE4IDooMxmCOV1OV5oFN+JU2/d1var+Smfi1n3ksGXt8j0sppBSwVpIlvU8oFFVl2NtzK6Q+PTn8yH3Tj/vQP7z/HK5eff0L05fUV69fPFunEIYiBJa6OFLI3F1f8PriNZvSMdhA8J4SR4I2FCOF8wzsMmRfGFMhZsT72sifHsPekkmjdA1ATAmY7k8oqBhlb12gTGCbsw3W1ISkCDHhjMa5Gm0qVAkszQw1FBZmjokSKkBRkr+1f2YWihJ/vIJGFzuZ6D+6YTpPNRsBslRG1APiWwOqEKYdwx4GMunRayjBM4qCMBUGZTfxm50Dftl0tAci7VSEUuwXw0LZjrx59Zaff/Ypry8uWA8dXc4oV6EqKxZBKaITLOYVyY8MwRPjpBIqkr+ljGI2ax5UdKUQk1hCFCb/z5ywtSbZmpCC2HsZK/evZGIKpDAA0DiDcRWlJHIobL0nhZ7V8ojKWnzwxBzYDYWsEwvdoNeRUhY4e4pRKwwNqlgKBaV/jcDyN972HeyXPSH91R87gL/l0J1MKeALahe4v7nj01ef87PLl7wJa24Z2fgeo2rqAmkIuKJE3psgFAHUxtBNNjNiO6PdpMJLgRgT1kzezyVOStB8YB5qa3B1xRAkPDCXydf0EC4hSqNvYuh+4xNQ4wBGU6Ijq3zwDVNaY7QkFyojw80ZhbKaCNhkSRR2fseQRvpx4H67mQCUhlkjcb/LeYu1GmctdV1NXmOiyw9DxLkKWxRWG6IVoOYghyuKGMPEpDLMrKFpqwPbCgrb/l466+RRl6cFIE2HjFrXFCDkRA6KmBUmgjEiBWyLFrDOCgqqfKTEhHF2AoYqKBk9lTgMkuKhppmrthVGiy+aiYqc7cQUkonddnZ6T8I2izETYiSGQAyZHJJIUhH5SPKJbtPT7zqKKrRtS6llERXj/4KaEjh1MLiJPVWSTA5DypQUyN7iJ+lsKaJ/x2pBkXOmGI2xjk3fESbj+5Bl+U5JfAFzzgw+T1RgjS2ZmJV4T01R3q4WGawfBDiVSSqy2XZst1sBDicpnNYao6ZJf19xeOR3+PVAiJSSpN1GPQF0Ckol9FOlIEfyOBJGj7aaWCKZIhWrEunCKNRia1HaSXUqB3LZb0oUq+WKqm4JyuBzgWJIKbMeBnIKfPTRR9KPQqKyO3ofDgj5cjZjOZvROMeQ0sQwTRI4YA22rTl2Z+LfFxNdPxKTyC2TF2ReYpintFOjUK4iGcV29KTbW2FUKsUiJU6dhDEoreXQk0aIUGxBO0XdNBRV6IdB5A5KS7oTe287ASaFiSvXUVczYujofWC8v+c6dKx3A5SGuqq4fPmalzNDtmsiO5rVgvl8hi0N/c7j/Ui3HQTgqaWvZ6WFKamkOpYRxmIqkzx+v+qrQtM02LqiRJFOG6WxWuGcpmlqmsoRvGe93tJ1HbOmpqkqVCzcbAdyyFyvd+xGT8h5KkBH9mVJkRbstwsCLqgpvUflgtUOZyqUMuIVktOjEBYJYDHGYJQWZmCWpGhX1Zg6EG0WJkKMVDxEoqupryc8qUgRIGa4vLpne99xtFC0i5pEi0LhtMa6CqfdBIrLxL6cL3DJUuuC7zfEOGd+tGJ15tnla06Pz6k1XKwHLu5/zoun53z03nvUywX66poYI8YY2rbBVg5Doa0bFjHR7BLnqwWz1QnPzp9S+aWczquKlzfvuLu74r/85f/Lxx9/h48/fI67dby+est4H6hNS1nMqXSFUoVcMrsxkLQnK0PXR5JyUrUt031XRjzvtCFrjdICADWVI1nDdn3Hy6vMTZ/Z9gPZgMGAMgwhcXm9pnYVTVNhnWHYbpnrBGkk7DLVvOWPfvSn7Gaf8OOf/iW3u3tOz98nFIWPnjT22FSwWsB+4wzHT86IzrDe3eCHkY8/fI5pW252PX03MPiIyoWVtbxYzQjhlNtuw2Y3cn11Q310SomG1lSouqKuJOihD5kxaUrWGDStzcwahfWG1EdKiagYGPseazLboWPwgdXJGbae4YMkia9WK64u3qGy59nTY4ZxoOsuQFlU9DSuxuXELkW+fPWSm80dbC95li/o1z9CpQ44kj79aN3PIUIOaBunHYJBKYtWdpK/Pz5sTYehAr9H4/77afut8f4jxEjfDaSYscaSYuLq8oKrywuapuKD91+wmM/JWZQHdlqHSpG1pViNpMII7KMaS46J3d0dZZdoFw3z4xnFiFWFViP1USGdWzZv75m1DScLh7YF11h2myuM2vDei2e8WBVuP/85rz55Q7dd8PnlFV/uGo4/vmGsEh9/sKB78xknmwUlwu3lS56cnENOXF7e8dd/81OOVxWz+YwrP6KS4unxKS+/uGDTd7z/8YecfPspH/7Jn8LyGfF+TdndEoZRzp5HT6lWDsIdbO9ReYcqmWXdUDuxTjlatqi85eKT/4+dX5MrOD97ytzNsbMjfC5sX31J4zPzRUtzMkc5D92G0oM3hqE4KqNR6hjQhJAZdgN6fsL86VPMYo6yNZkKcfwoVNPejspIoBkOYwpKJxSZrDVJVRTkI09pkOJ1NoEBh2G91zo8Bun4NcP+MQPi78qm2//OP8988qukfOor4JT6tT//T3pNCNDxmJHFV+bcKZrlEbBZlCYfkhfVAWMxWWFMw7yZs1q2pHbEOEfbzKE4yAZTmJxmsjyl/f77F67ra3SVf4Jb8TgE7vDSk7d4jFH2WnlyxzMCzuUEwyA+TlUlxKUJHkMSdvcX+I8Dd77C3/naW98DcfAYvC0HCSlZQ0zEuw23V9esd2u6ODIUjyoFs3elMkyMHTWRz4p4Zj2625IEL+SN/dFAWZMAACAASURBVJ5VlEuzw9molDTdq31gHlSumZRGE5MnicVLzgLkGSZrqhQP1jgKUc9gJ1BK7dlv8QB/7Eey9MsCKk6IHNN39MOnB/BEHbCIIlvhh4LgLzTxfMx808/8Zpt4q05pnkWet7++58vPv+Dnn33Km8sLUSvkSJzujpqslnIq4q2axJ/MTO99P2aTEpuiQ0glE9irlaSCZgn2U2qvKhOujRwhHqyHQMA+oxRWyevnab7SWIyrp98TYsOeQVkUKG05OX3KcnVGU68wukWpSt7zb+OGf2Pbh6X8ivF8mIv2BUBhhuI9Zb3j+vKKd9eXXO/WbIsnVKAwB2k5k/oTZcglyH3OcQqCmOxm9s+JcnBiGqPgSykJoLwPV5PpQLPrhwNupSZs4nFgS/DhF+a+x+0bAbqYEkUVkU2qTI4JUhLTfTS1VWhVcErLYC8KoyBqCejVVDLJBo8fBzo4MKicMyxmc+rGsVgsWLQNzlXi9zBVH+TwK1LZnK3IQ73Hh0CIWeRUk9k2ynwFvCkkrDuapLCCYJaYyCmSEUR0mJh08hpi3F9VFZUVqdb9MFLVFls1IgtEo6yhyg26Vgc5R5k6dJkmFfm/QhswU4y52J9NDzjLA7JWH0wH90Cd9wHvR7yPxDFRJtQ9T6k9IXn8MBKSZ7tb4+oaY9zkO7df7GRSSZPcscQ40WvTBAxMUd1FPUo/RSjwzqGtPJ88TRbbXT/5oomnwehF6jcMQdiE2hJCOICj+07YVPXEZhMwrZvCIcZxnIxNpSUMldGgsyQeaYWa/tb+/uScD1TQAwJdymHBcs4cBshhzCoIKcprlSisgKwIaWTse+bzpQQroND78ISiUdMhXRvDGAKbzYbcVswqR9POcM4S/cDd+n6SPFiRBeTMMHhKUbSLObOlJNfFnOnHkb7vifuElyIAVEqJMXYMQ8c4DqQypeTGKGAO4vM3jF485bSjD55uHCZ2ZWY2m3F2d8fZ2TnPnj3jCKHJF60E8FZaZnH2mwoZh/vndEgSnuaJfX9LyDOqqoq6cbRDxgyR6IW1Op/NqeqaGCNjHCgUrHOYJGzHlKKEP0xUXm0k7loAYVmofJKwk5TSNJ70Hh9lHCUNU1KNMsZocsl4n+h1j9WKpqkx1AQvLI4QPLFuMDkyDDvu11tCLCLByomM+J7lEthXZ/IeGFT7/iOzvTHmYLIbQhAqupavlwlQryuLNuYhVVprqsphnaWoQbzEQkSrTDKKbDWNNihbc5lH+hRobYWuHVfDQNd301zncXNDbRzWaGIGckSVTOUMddPSasXCNsxNIHrPZn2PToXG2YkFWlgdHdMcVezWd7y8uGWzGThbzGlXx/jtBtNUuLaWcVwSThcqIxJVq6CqLMcnKxZlgVae2dIyO6755PPP2d2vefX5Z+A0R8sFvV+xvb6jH0bu1xtmC4d2Mm92YyDjqWdL6mZGKhpbV1iEMT1Ye2Amq1xIqjCESNPOsLYipsTtbsNdUPRpktMlhdeOZDTZaYbgubu7J6WGugKfM85YfCroWc33vv+HxONz3ty+4c3bL2lHT1vV5FwmsF+q4M5WHK1qih6xBe7TFqJnWbdYbdluOozSEm6kDFoblrMF71vL223H6+4Vl/cbPJaNHpnVLTtVyOczfNYMSTMGiFFRlYrGWRb1krbqZNO+DxtKCes0Xd+zG0bOzp5R1zV917HZbjlazIWV3mdWiyUvXmh6H7lZ76hdxeg9u3gP9Qlx8Nz0PTauafOGXTeSMGhdIQCcnoD5hHEGnTOUQIlZJDR6v/VR01oxrWdlP14etX95u7vft79n27On9vvejLDb/TBSrIScqALb9YbdOvPs7EyS3Cf5R8qZUiop5E4Maq0eXrjETLe+p+vuODtbMJsXlN6g4g5URDEQ7j+FfMXzHzzBmjnD7jXp81csP3hB86EDdQRlSf9Tz+c//r/520/f8uIH/4GjD57htzV/9O//hD/+X75LZXve/M0brq9e0vcbxrImV4rNuw2EgD+esXj+Hot+hk8zjt0SP0baSnF2ds5733mf+Yun0J6h7BH2uMUer8AMpO4Gfz1gesSUv11hl1uOZ++oTufYZ8fQak52kXRzQR476maJB8p1IpsI77fUJ0vy6ZbXf/GfuLt7x/kH5zz9+FtsksUtV9RHp9h6jnEzKC2wAKXRZqCeOcxckqyZ1nEzFfUEEBVbh8ppKLJn33NElFJopcSf6kHXPrFGvpHq9q+n/Vbe8tcOyb/6Pw9fe+wbNm2ixFNeis7KWoyrpJBtjCTTaiPFMZBnrn4zb/ebQNDHZwb4Kistpog27iHsIMNmM/L61Vvu7+/5wQ9+gLE1xh6wssOB958KR/1VL6MQy6c9OPcAohb2TPIUE+8urnj99h2bXUdWEFOWUET0ZBn1i39D7od8rpWcIcvk0ZxTmsiNEhiXi0iW80ScKGXPey5oHabnr0UNBijR0FJ0Et8xHSla9meJAbCTnNJRJnk3SGDhtPrLWUrxCD1LQJwYQWLOaDQorQ7S1q863k23SsOU3seeDikvq6e/9Ntpas+c2/cpCnkYeP3uLX/zyc/48uVLdn5giFFsbEqWoEYKOqvp7CZYxN7nUWs9yZSL+MBPnmVfAW3ytAeeFGZKP8I0pkGap7CKnNPBn/5AUpG/dDjXVbadXtdP/u3yE2Zi7z19es5qtaBylVzL9NcfkNXf0ITw92qPyxS/+mIevXsE0cyUvufy6oYvX73my5evuby9ZggSzumcI4xRgO0s+EkuaSKNQCxRMJr9fEKZyFdMrlJi8fV47Al+8WAJl0KaWJMT3pIyqaTJhzCyXq//4QDdV8vq5fBhSkGRqK3DkA7eYUpplAVrKtE7t5YcAzGFwwUBE2CmGHuPtZr7uw1N09A0jRjnT/rstlZgzAGEO6RPAlrFQyfUU0JpjoowTU6ZwvFyRZjM3b33JB+mVDEZJHJz9/TkEWslOre2TioyKtN7TVUF8WZTQnEMITGOUpXQRR6K1gqrFVZr6SCTv1RW+0VTHdhdpchHoxz7EIqcNSkWnFNYW6idIblyAMX2KK2PClUipRj6rieEgDFuSsM1gshPAMw49OQ4ig6egjXCNtsDpM5avPeUYRB2GwqVFDoByVJXjnEC3mAPznnG0Yvn3YFNJD5xMXnB7a0V4Na5qZJecMbSuIpopoTHSZpcikQTFx4t0Mi+MsRwYEw+DobAGPTh2U0ph9Oz20thyVA7YZipUrDKSJKw1YTgqLQVKnfKDGNP7yO5aHRVo61MnCFLQunQZ/rBcDSfMWsqYfxVNW8vLuW9VTXNbE4qihAzsRSsFpqwcRJSMviRGHts5Ygl47db6uYh+EIBOWZSTvhRvLmcEvAwKthutxQSLJZUTqphQwp0Xce627Lb7dhstpSUqWzNcrnENRWd7/FjJKmIT1P/n4CmvXTiMTuxlIwq0p8H76kUtE3DfDljVSlufGLnI2UsPH/+nG9/+9ssnjreXkWu7q+JYaCmxRhL3c6YRYWyQUDuqoJcsBh0pQkpELPoE0op4tuglLAFlaJyDmetmKlmqfrLAUM6yTgMzNqG1WJG8J5ht5366EgcB6gsIfTEqIjRk4t4Bj5uZQK+KfphscQcNnv7PplSFm9NpaFoUkwEAtaKd4QA7NPiq2TzlQqEEMVnzWTKxDKutUOjwUuabV01LBenRNOz3qwJccfFxT3VzHN0fMJqIUxcrRDvAgW1UTTGcDRvWGiPHzbs7u+5GwOb+x4/DPgRKtUwf++I02cL7q4veXd9wWaz4dnxipmrSNYQZZpFG9Api8uRKbK4+IDTmlllaSrDETWuecKs0lxd3vDJp5/zsx9vefbxtzk6OeHkxZx0vebi5pY+95w9WfH07JQxFm7X12DW3A+BaGpO3IwhZ8bg8T5gtCb4SLCKqiR24wBdwWFwruLoyQqrGvzFLbfX16hc2A4duijOzo6ZzeeUnNl0O0lUzIGTWYvOmrtuJN3e0S6WfPjxd/DRUzUzlq7G2prgI8MQ8F7m7NlsTgrQAvN8BqXleL5iVs+JY08uSpKfq4aUMuMw0lSWs8WC06rmzni6fqAbOu5sz7th4IOTluMnR1C1RGp8sjgUFRWNaalUxEyV8qIKprLY2hJLYbPraOc9oOjHkdvbW0iRpmnwfmS33XK0XPLBi/fpxy+kaOIj2Xu0CyxnLUVr1JCowo6iKiQUQrz/SpGAmDEIaL1b3xKHLct5y2J1jNaQsghczGPp02HT9A08/d+338n2eDvsrGE+a6ltRV1ZnDWsFnNWyzm7zZrlcikHv+k3hfeviBNjQCMMgL1EdrPesdltWcwr5scNuADdDfi1+CKmDTZcYWcBKof3nrhZU7kW5SJRFVjfY01D44748L0fktJTjs6/xdPZC77fnvHxj77N4klNvrvFjluuPvspPu6gFLbDPdZZPnr/fd7/7rdYfXjCiUpsb3Z89umX/OSv/gqGLc//4/8AbUHV00m0FkY14wbySDGKkjXaLEFb6LeYoaBPTjAfvY+yI2yu0Bb0+SnkI2ojOq7N55dc/OQVq6sed7LgfnvD66s7So68f3RC+/w9Whwsj8n1jL5T9N2AMR7jNLaZY80crEIpD9nLIV7nSYesHhDWaYgqtS/SZYqaEkofpbr/0/egv8+8sOcrPf7937d/SNvXT3SR2uzB/0sZlJZAIpRhcqX/xd/9yie/4v+/7hp+BUL2+EC6/5nH4NzD9zTOOqxx075MkUsmhHEqaI+HkLivNn1gGOl/JMLzTeBcgWnv/rj4nR9+LyvSGLm8uuLm5o4YEroW6sqeATfGKHvaEgXXy+XA1lJKoXKZFEWPCumPPh/HkVISKUdyjofzkBTEkcKy1giXUqO0RlstEm0lXUBZ8UwLZAni2pvlE4Fqej/xkdxUH/rTgwAxsrd6UmoKxJpu3p5xRBGW134+EiXNBPwpeW4KhHX96x7AP2Wb+tyeKLt/ngogCcPQ9wMXb9/x6Zdf8Obygs3Ykyh0yRNAyCyloKZnZxBgNeYkzEQErNQ8sEUfBx/KZTxIvffzssagVHwIT8qPx4zcJa0VRgtAJ2SbdPi+NZWcy5WoAY2R0JGmqTh/csrp2THzWSNKF/J03/cA3eEu/HbanjGn/g5rwrR0HPgVCVIfWN/c8erdBS9fv+Lt5QWbsSOUTERYiCEJESaXBxVXpkiuG0mwmkkhUtSkwGIfbJgmEkqcQNM8qUAfPeMUD+rIGP2BZDaOIyGEfxxAF5lomtNE73hY6y1GTLjLFIAAKC0VU2U1aEXK4mdmtAQ97Kl+YbroQmEMkX70sN6glQBMzglr5Pz0mNpVzJqKtm2/AsLsUeX9hJVzpCiDMRasoPYJMMrQVi2NayhN/AorS5VCCEE2fzGSwkj0A30WF5SjoxUlFryP2Ep8n2xKDHhhLjXit2KUpKNUxlBchbhHKIpKE+trvwGSyoJSTGoxTSkKictWZFswsWBsBQmCz+QwBXAUQ0oJG0GbjPXityZ0ZchpCkiYuMMKUOXrNYsHMCYlWMznwgZimkxjIuZMGEaG0jNYfQDnqqrCWkBJaq/OhWYmC6ePE+15CqaQRcvI4pILRmvausbZilQySgnza1fUYVKSgcG0sZeBmMtXFyatBWCklAONWzr3QwXioU+AjwkJ1VHM6orlfMG8rSklEWNk2w1sh5HduGHcDSStqbQB6ygU6kmDn3KiG2RgVVYJWFIkcWkYBmIpuLbF1Q02JsauJwyD9OVG+PcZNYHWLTWKzWYzsSgtcyWgoh+nwa+1BJ5k8YbbH3xCCAzBo1yNMYZ6PkNbgy7CQtLGApraOhbtDDuvyaoQx91UZSuH+xRDlJTfkgjJy6IYM7mYKfJbY5N44I3eozrowyA+ilpjakmnmc1mHB3NWPdzrDWEYSSVSOXE58ZYhQ4PR739OLDWCltH6V/YeEiKpKaqahSZzXbH5v5eKMfGMm8bjo5WmFk7xZWLXLudz9Fa0+823N3d0tgFqiRiLHTdlt1ug2sXB/bg19t+Y6Om0l6egP799/bj56GfPWJ3Zglo2UvC9my8ECP9MDBaYaCklEk5klTGTOlwKRVcVXF2Mmc2XxC2V1wNHTe315P8smG5aKmrmnlTk7uIHzuOVysaZ7El4GrHzntKCIzbLb4fqeyCt5e3XN2uefr0nPMnp3xwdMTm9pKXN9c8XS1RvaHabThzR7imIhFwg2ZWOeaNkfsXRnRVU6lEKIGFVbiTJW1O7G6X3HQjt+/esd3tePb0OfPlCr/ruF1v8SpjXIsyLdfrLZdXN1zcbWmPzvjANASluL1d0+02ZGvoZzV1baiKYt333G3vyOWa+7RgfvIhy9PndOUd280rXCx0ux3DeMPpyYrvPXnK8aymzz3r7ZZGF+qqobKO9bDjr/7ivxKahaRna4vRlvliSV23oAyj79luO3YlUKfEqj1CW8egPEo1VNpSVTV1Y3h7eyUylapl8IH7uztmqwULozk2hhPnUEFBzPhxy0W/4c2TBdXxEnM8o1QNIWpSzuLRlzQua1R6KDq4ysFUcd9st4T0mrpuMMB6s8UqhZuKCbuuo2BZzOY8O3/Cu+sb8jzhQuJ2HHF1hXU1Y6/QuiKXipwdOUkRQytAK9b9hpvbG376Vz8mbO75wz/4Dn/ww3/DzNXklIRZrH6J6ESpXzyL//58/TvdSimTL5UwGtrKgrPCjEBRGcPHHzyD8uzwrHMSBoc2hlIUMQn4P4xwf+shF46PakxlOT45ZjZXKJPJ9zfs3vwt/faSWQ1WdeR4gTFbFInKLajO3oPFC9ANuk+M28L25iXhakdWK2pV+OK/fIZdXfPko3Ou/vJn6KsFVmX8F5+wGjtWp0fcEDAfWHSn6bo1d7eXnH//jKPvfsSw3LLpBvF/84Gy6LFHgfqJgyMLDTBuCd0bdvdvGLoepRcc6YrGW/Ab4t0V4fqehQv0BjZffEpjC6vTlYRL1BaqlvnxMe15oU/w9t0dpbY8//afUNmMHyLvPr3n6IP3MaXCVXNqDAlZ8ylTCFhtZDOpNKhqOldZSJpHqhzg4fCyX4P1b5Sf8hic+zUTwS/99u8nj1/XHphi33Cvpm8prajrlna+IEUJJDNVLXP5BC4UyiOyytfA3b/XhX31b/+ya95//svSQh+/r6qqDhchHk6K09Mly+UPSSnjnLCCpiPolKo7/fz+i/8MrUxnElCH/XqmcHe34eLyju1uoCghBZQoVjoSjKgmBYl4Zu/vnVXi15mSn6w8BOjZM+BkHS4Pae85T6msQlc2xrH3jo176xb0ZFNhiXs2FoWxGHwx+KIIxRBw6AkaUAQUCU2ZWG0ainjVJiWvIeJOASgMEsC4J3N5+RGsKhii/GcC84oSrzWh1Mhvmj1r7WAkxj/rVKC+RmcsGuKQuXh3xU9/+rd89vln3G83jDnic2LIiaSLQGLTuX4ftlv26pwJ+XsYWnssAGLw05nDyHqb9+dZDuxmsGidpmeaRZWHQqlEKY8M/NiPqYdBG1MiFyHvKF0oJWGN5mi14MXz5zw9P2exkITz/Vn6YAT428yGAA5rSHl4IHsQ9SttD87tP8+F1AW6+y2v377j1cUFFze3bLqeMSdiyYRJpZL2Sr6J7JOnpNWChItqa0QZeQgzFOwghCChHFnuqZCuHsIs98QiPwykEPE+4v1wIIuFKeh0Hyz6q9o3AnSdH6Xz6AkyyYVGWWqrcSbTqEJOAuIEsaOfkPgASaPI6IlejxXgIJZMlRIxJ9Ik34wHaq7Cx8Tg5Y3vdjuayjJvZ8znc2azGXVdT2w6qG0tMdNKUYoWidmkPS5KjPH3h24A8gOwV0qhdo4QRsbRTEmygohK6iTshp6cI8pY6roWZQGAlsHU7XqUEo88ay3JOlKlyE68zmS2yVJR0PuBJgOvUCY/BWEj6KkS5CqR85IgB3lPMUpARiqZGN0hVCPGjA+B0Ud8DOQsFrJGSfKMc04qSFpNRuCKlBKd77FeTdUOWZxr63CuFlmoTwzBi2xvYrqFFElxmmy0pWkcdTsDYNsNBxmx1poQAqSIV6DyHlSVys2YI+og6f1qOuueRbdnHCrFwz2arnWfYBtj/IVqklyryNSMVaQsJuZlel5ybwQQctrQ2IbKbhkHz071xJyJMZD9xKCzGacUrmqwuki6sBJLF63BWcuYBACKqeCMo24LfnpvqWR8DBhjWK7EdPfk5AStDXVdk1LBWGFk5pwJPh0Yjipl8hix2tK0DabSQo/PmT54HI4QPVor5u2M5WzByfKY85NTTo9POD05IahEIpFNoXjZMCTk0D+EkZSl0iWJopqUFCVNG3+tySVQUiL4LEEIRfrxQYqIVIRCCPR9L/fYWmwRGftuvaXre7ohUhV1MNQEHmTVkw9gSoUcBaQt0+Lkx1H+3rRhiz4ypgE/dISxR52f4ZwVRoeSirBSirZtOVEw5IDVCk1is77l3bt3rE4Cq6MTlFWoRwvQnokrskU9bfHUgUmnjROZeBF/Pm00zlSH96Iw01yUDv3YGE1MkW4cGGpNDpEcIrs84EskOdl0rXdbhnxLu3jCyfEZ1cmc49WCd5eX9EPPzc0N3VpxdHJCVS2Y1RaKYdk2VLqgfKB1FuMsA4b2dodTAVIhhcx2t6P3I0kXXrx4wvHz93CVY7NZ019fM/iBMQVW8xklT9WjUmiqmlhgt93gbaRtHao4QugwKhErw3snK46ODZtY+PLymrvLG16cPuP506d0Ycfl9Ru2247vfVexWh7zxM4YuUDVLdZVlKIwxqIQuX8KkaAKvhR2OrHZ3rFbD/TqGOM+4OTZB2hXy+EyZ5aLI9DC/uq2O1Rb07a1AMU5cb/rWc0rTDXj4tU73nWXpNSRJ887bUSerrUhJfFoXPuBvNtRPbGctC3HJycok7jZ3tP7G7pk2P7/7L1Xk2THmab5uDoiZKqqyipUEaDuJptiZ3p7r8ZsZ//6XoztzbZik2wKiJKpM0Me4WouvhORWQCIoZGgdQ8NbpaARVRmCD/uftzf7xVtR5ElTCeqwKZtKGtHZQy1UYxSQtuC0aSgbzp8s+bi+or60QGT2WOSdmQM0fcQMjZpTNaoofKfkPUjhiQy+23DctMwHU+ZTsa0XS/WDjlSlSUoxXq9pRrPePHBBzRtR99u6VIkti1FMcIpSMqgs8MHRcwOpQt2m+w+BbbtltV2zburC179+pfQtzz94FuMJ/P7Kq6CENKQCicrwTcMur++JkbeAzNA39vTa6QOGIIcEvcykMxgaZEJYfBaBdoeLq82LBYrDmYjjBMPYqc9Om/Be1SOdOsNv/3Hf2F9d87h3HJwoHj0dMLB/BC6mhwz+AZVgCJTlSNyvGV5syQFxbiqmdqex6dHfO/HH2HyHXl5g9bwbFry/NmPUHXN/HxBZbe8+/iM1d2CT14npt95hB7VhNhycnrEs+Y5rz++5W57xyOjUAcHKGtpb664fvspN5e/o2/usMYxHle07YLSKI4mFq0r2vaa9advmD09ZXL4lNys2CxaqsMp5uRAGK65ZxM1fXJUJ095+tMfQ12y+uwTXv37L1ncatYuoVdrJoeaYjxjNHMoZ4WpE/2ASKQBk9ixv4cD1ntGUe+3e8YSwtjN+S80hb/szf+Ev/mmfWn7SpBuX/yWoAlT1RTjCV3j6X2HD0lYn1/VvsZL8WXg3I5BJPv89wfgQ5nn/T/JE85lnLsf3DsVmdFi3/MVpJS/WNtLiQdwM/aBN+/OuLi8pml7tLKQtQQCGkMMGWUMWguIknLc4WvsMJzep71J/edVHXkvr80SjBHF/3vngW6VpnAFTlkMRoCamEHtrJ48SSeCs6R+UFplsPtFI70PzmWNygaykDqMfoidDADgALLtmlVD4Tu3qNQBDRDIRvbMUvEoSbiBv6XeByU+B5h9ne0L4233/DCYdqrAbbPl7OyMV69fc35zhY8BnxLb0NPuvOC0XHOTtUglBeIZPLUffBV1T0p5+DlSigNILk1rUfSws8tSdpAr6wd4xoDpZkl0lde5/14xQgwt1hq0UUNRR1HVBcfHRzx99oSTR0eMyzHyLqKmEJCOP94y9C/Rdh2xG2BqxwB+/zaVGVTWD5hzBNjeLrm6uODVyzecX1ywWG3ooyQb9ynQD2fuPJyBVWZIx4WIeI2Lv3X/QNknvx+TH1hzQeDoASPZMeN2P957UgjkmAa8JuyBu13HSgDOn8ig63YfLgdUBmcMhbPUxjAuK2Z1QU5eGEkq0wNdgiZk+hiwGAGWoiQzahSF0URjcDnTa0H/i0FaloYvG0KQ9MquJcaevous1ttBmikMO2MVs8lcQiacw1qNwZB3NFIUZSmxwXtkU0narGwwZQSKTttRuB2rJu3DGhbrlZgFdp4YEsFnqipIAo7WRL9L95BJ0xs7yDoN2ipspcgqDV5WGmuTHEaHSap12KPkepDyAugcSTpTuYoUIylZci4FHEiC3vbe0/eerutwbU/bG9kUK7P3MDLGDmDV4P2lFVYj1GIyd4vBQ03Je5uixBUFpgJlDSFGWbxzw2q9pWlalNHU9QTn3B4k29E2Y5T+6FpPjh4bE7mUwei0EYZuSgSf9ghyHgCPnPOwvKv93UnMMIdi3oMxvLux76nAexmihDEIqCyBHBEB5mIU3XffKtxgij6dTgVQjIoI3LU9IgaW6dhHD1oLuOgMZVkwritGoxFV6bi+uARj6fqGxXZDl/IQPGEwhaOqKzofMCoxnk3leliZckVVEkMe2FzSjwxhJkZlopZN1c5DpCwlFrtNHX0IZKNpuo4UokSup0xlS0m8zRqrDUnBuBYJTNM3+N4TSbIYD4BmyGAf3PD3m6aY7g1OjaWuSibKsOwhbnuC91gjwHROieDDXgLvQ0Crnm7wiwzRY1U1HPA1kUgMO9NSjTEW5yQ+XmtDUHHwz0tMxzXz8ROOD4/wbcNmS6eF3wAAIABJREFUs8H3LUVh2W4b6romjmuMtvRBNFRVUTCfz2G9wg7jt1mvePf2Na3vRcY+kZy6HcirHsxBo3ZyavEYywgiq0ikKOymsqookIjaGNIwTgw59aTBL0IrRQyBTd/SBYdF4bLCq8w2iidmkS3rZsX19SumVcsHT57y+NGYo8dPKMdTVqtbFpfv2K7uWNzeUZYeV02YTuY4pSidxiRDaYGUmVQV89GYdavYrgJF4SgLRxs73rx7w6ZZcfr4hONHJ6xVplksOLtbsmoaDqcT5rMpKUEbMzFEsFkCXnrNyBhMYdmGgFaRZBW1ThjjGE0m9F3gzbsLLvpzxnWNKjSYik3b8u7yFmNHPH76lPrghCZmZkdHtCEyvZuRY6DMAescVitEya6YjScUuUL1NVufhHE2VMkshoPpAdV8xNRpYtfT3i6whyVlWUCMeJ/YdgFlRhhbEGJH13tGdYFxBRFF0/d4J4zJoigodGbbNtxcXTA6PObw8YxqPGbVbrlbrVl0iqDt4MNaoXUUKZ8WW4ZxUTIyZuiDiu22ZbGM3K3vyG8+Y+4mdHks69tQ9DZZ47TCqEzIgRA7fAwYo4lR5pd1wpjuuo4UI846iApXaKxxbDcdKXjq8YzD+QE31xe0mxWFKnE5EbdbLFJV75Me6t2DMCUKmxYN9XTCdD5ns1pzdX5Os7wjPXqCMSX54W5ZKbLKqJ0P3Tfn6r+qppTC5CyV5IfPDxVro6XAkuJDaY7CaE23bYh9TzEas9muWW9vqEaOg+MSVw7J9jFg+h7TdeiQUdESN4HmassojhidfsDB8++jOKR92bBYrCgOtxxOW8L1K/qbG9pFA9tAacfUByVHx0+Z/+C7FH/zN9DewCf/Sly+w7qEKyqaW09YdHSbFYvlBedXZ7xeFvTTmkenRxwdTTk5eo4d/y2L63ecnS15ts4oStp15JPfvuXXv/w1y5vXPD455Gc/+QlPnn2fmGvC5hYVV+DGHH/7Kd16TahHjJ8e01y+ZXP5BlXNqeoTtm1PUyrKF0/wnUKdnKJO/pbm9o5Xbz5m4Z/wNz/+OePHx6y3V3RxSbvxZLvBFWvIEPyGnCxFWYEyA6WlvE/fU8BwP8/IPUrINhmjxQg+54HlsQtMeu+s8M2E/s/ePg/Mff6xVkpOQwoZI9qJAkMbdFGTtJZESRgUN8ICgx02Mkz2L7Q//eT+8EB6Dz59EZyTf4/3Hm9qwKV3dCXAOTkbaP3+GeHha3z9Le9/cgRldoxygTlyguQTd3dr3r494+bujt5HstISBBcy9gEJXWHEM1Ipmad5yGYY1lQ50A8pt0rm9s7ex9oHFDNlCeFexWSVYmwUhcqoLAoPciLHTEo9IXcYC0WpcX1FGXqq0FPkHpR78D2BbFHJDFIrtb8WIlm1wvAjIuPCotIOPMmQe1R7Bdt30F+CjajxGMbHZH2EwglxYAcTqQfjTv2FEaL8/jjZyVR33uaruyWvX77ik5efcXV7Qx88IWc8mWQErxD5rpwzd8xGHWVNxbD3MNu938NWluUeWHvo4b7DFHK4B+Dl2qf958xZyE8xSjihMXuOnijzxG1fMAEl94CycBwczHj85ITHj0+Y1jVGDR76CoaUEvbM2f/IW8AfUGTkB//+Hll3ACvTpuXu8pa3r99wdnHB5e0d66ahS4lIwoco7DdEeamGIqSE0qQ9QJe1IUQ5k+4wo+SDFDe6lpj8Hldomoa2bUUt1XV7VWahxVJpT0Qa+ncXwuND+sLa97B9JUCXhkUgBzGnzMZQKo0tFKVWVFYSIK3VqMKSraMHtiHS+Ui7XdO3DZumkYCGnEFbMchUmrJwxCSUzF2qY1CShhgt1PUh0QsY0HjPpukHRpoM4Hf6iqqqmE8mTCYT6rqmKJzIApUkze4Sb/bSS6XQiDRzs1lJJzizB9F2gJOPgaSgD5626aTzm47lcklRSDJOXY4e3DwUXg2vow1oKHorNg8DiFgUGWPSgwUh7f31ioH9oxR7NhFu95mHRURLEqy18jOdzmiahu22pWgatq1E/4bQ4ztLWVmUUThdYIzGWQFu9JAKu0py4I1dh08R1/eUlfiHxZzoup6iKpjNZmAMdr3eg3Lee5pO6LnN0D/OFQLQdQLQVUoTFQQr7B5t3pdG7QIk9sBQTnv2USbj4+BxkHc3hUECORwERH4rm48QJCY+JUmmRMl30wrcMAHJYpLqlJZxqzSjsuRgmoWyrNe0KYlXkzbCyspITHJgn3pZjRTaOExZYYsK2p5t29B2PcoYqnokY7Equbm5AWA+n5NS5PZ2vfc+1Oo+cvnzgRghBKxWxJQIfSBEyFaCPwAKo6nHY9bLJXd3d2yXK/pNx7we88Gjp5Sjmno2xhTiyxhCYL3ZSmS7BueKL1RfZaMUSUOClHMSEb/7TFnn/VhUSrFcLvHeMypqxpMx3aJi23RslkvWqpXYcaMx1uIKJyCjUvR9h88DoAcYo8nZYvR9cMhuY5Jhz5jVWeRTqRQG4nYjslVNYDqeUI8qSmvFN7BvsYWmLmpCUNws1mxvb8E4Do9OMHU5BEPsADq9r04pZdCofUqujNVh45UTSmnKskDHDt8HYo4De0FkGLsqiYS6RLoYiFnGUzKWpGVTZnNiVJT4tOD12QWqu2N1s2SzPeHR00PqasTR8SOmpeXuumK5WLBcLSl9pnYlnTW40ZRRWWHo2TYbSJrCWiZ1Ra0Vq6WEjxiTCSFzcXFO223o+sd88OQxtii5fveOu6s7FtuO57ZkXNcEJcEMo8pgnaVPEZs6dOzAdxIClD2ORN+1KCzPjg6Y1mPOz6/41a9+yehozunzp4wfPWFxe8M//eu/8oPOMz95hB7Yh3szW2OplBK/Q2sYF4rRDOaTxxSp4mypeFkcUldjSlfisEyqGmfsAE47aiyp7WiXLXlsKaoaZR1dH9jELTEqrCnYpEwXI2NjIWnapsM7R0JRjsYcuClmvaS5umG5XjCuFeX4kMPDQzrtuL24wyM3bgqL0pGoIzhhWo/GEybjFl06XF1SWwU0XC7ueHceWFcHjKbPSFlsHjS74olITXOOhCjeLMa6PdNvNJ7gnPiGgkjendHklPHB46zFx8hisaAqS0Z1jVGZkbP0fcPN3Ya6KEipx0cttglJ4RQ4MwCOztFnSeA2VpOjx3cNKQaUlj5SWUlA05fuGt7bMn3T/jdte07kcPhQ5j55DKUGhY4i+jCs0YboA7HP2MJSlsXwp5HCRA5mBZPJhOmoIHiP3zY4ozDOcHd2jV+ck9cNz58854P5AaNKMRlVsJb7dshzYi7xG0XjG3JTkkKBc4n1+pa71TlPnn+Lb/+fP0U9e47q18S3r0mfvWIdblmojvFdT96U3N7egQ78X//wX3nz6i0vL28x2fFkfszzR3Nis+H44Jif/+hndHdLDmaPSW1icXbBmzfnbNaZ46Pv8YMf/i1Pv/9TqA7QQeFmx8Tuhnx5RQ6KTEQHRVxuxLe3HLMNlrefXHHVBU6mz/jO935ImUqCrSDNiW1HUAdMHh1x+OInUBQcHTwD1yDsEzGzbrZ3LO46crYcnTymHk1BO9lLx3uZFWpggu/v8dyzkhhAjS87IPzFSbG7w/9/uI7qf/v2/v5tT7MSkGOQreUswsJcVNSVxlrF7OjRFwCQ+8v+l09yfMige/j5HwImsg/c0XmE0WQfnFrjIKGXZel9yaz6GsGd/1Vf7BN1B0bqer3l9nbB7WLFpumIJJK693F1xqLTUKQYCsPyXXc2AfJd9umr6R642Z19BNTZqbMUZL0PDdBazr9GmQHYkWL/PeNN2Fk55SFHRktAQWYouO1+hjmad6D/g2uzozSpDDgBgB/0lkoZYg/LM/LFr9hc/ZrUvKOeWtzTD+DxhzAtMdRAJOGE+JszhsS999hfZo34smv68L6XQ+aTTz7hk08+4eXLl6w2a6ISOWtQoKxFhczOdxAlJCGDgN3xQSDg/Vnr3m8OBKCT894AvA2AtYwJ++CMG/afMCXBZWLcBU0yaLzvQyAgi2rO2P0U3xE9ZrMZBwcHTCcTdsOWLGcb/flO+QsyGP/clj//IAMBfNexWi65vb5jsVqxbho5w+dIUIkQIlFlnNbEnAdQfDiD50BIHpIAdNbVcs1SIkZP221Zr5e0zWawV2r27Lnee3mNXYcbkamDWFWhxfM+7aeVQWfBLf5Q+2qAjh3NU95HpIW9SDBxQojMCZUiFo11VhB9HzGmp1A13mis0fS9Fz10gpATIQaMcQxs4P2HdEbJ4U0LG0hrjTIWkxw5smfD7Xygmran6++4XqwonKGua8bjMWVZMhl863Y/u1SVHd1w50W1W9TggfROaUaTMbQdaaAO7yjFZvCRC10LsGegJS0AXVAWpYVFaJykOgav6NrwYPJprBVQUCtFimpgNoFC/N78QDlGSeC2kuAXnJO/CzEzHo8piorxeEyz9SwWC0AxmY5RKuFDO8gPJf2xKkqqQvrDODuUo+T9Q0z49QbQJD0ECSQJw7CupKrzXlobY6TZClLcDYBT1/X7ipjTZriJGEIIuCQV7DwkwO4BmJz3N4q9l6FSQ7qUGipm6UElaZiP+wXv/rHW4lkhrEAYj8dC6033N+6madh0Hc4arHNgLVVZcnhwSJ8VZ4s7eu+lMqcVfdejyNSFpesDb8/OeXt2RlkUwpCKEW0dla3Zp0Gu10KDjoG27fbswvl8Ts6Z7bZFofCpYd1sBQgj0oVOGJiuYLPa4nQBSuR7TdvgCWxDiy0M2WkqV6Cs4fHpM+g869WWf/znf+XVZ6/58Y9/zE//68+ZPzpgPJlQlRW+76mnI6bTKW/VW6l3pYSyg7efysK4Kkv60AMCru1YqgaGlOGeUVVTlAU5CaNnebfg9vYG+kBZlihVcLvd0nRbrCkHGavHD2uR+K+JpDCT8T4KaF9XtN2GkIT72Hc9RivqqhQfyxSIPjMajTh98gSrM9v1isvLS8iRw9mc6WwCSmGUpY8dbdOSYqR0NXUhMu62bXGlo/UCansfhhAb8fUwxZjgA6ZwAwgii3mMEhzSG7BKfl+CQhwBg7KWsq7w1xt6HzHWcXR0zPzQcv3mLW3bYCYTJpMRxirefXpH1gofIrcXV8Q+cbU459H5nB98/4ccHUw5OHqE1YayrLm8vCKExPn5Oe7kiDSt0Dbj246+79CupnIFRnlS7JkWGmccy2ZLG3tsZWnbhn/5xS+4uHjMRx+84Ec//3tury558/IV//a7lxwdH+OqmmwcIUZW2w0n8yNUbDHJMK5GZN9DXTGuSjY3K+aTOcenz2kjHIynzG5veLdY8Oqz1xzMpnzr+QtC39NsG+L1NeX0gAly04uhHw6OIo/vYmBcVKiYiF1LRlPoEaOiFDp6TFgU+Mh0WqMsjKzl2cEJj5Tixt/ybnGNUY6Do0eQDO1SEqRRmroe03UrVss1yrY8mle0vWfbtLy9XXLdb3nx4QfMT465vrhA0XPweA5KEXJCFQVVWWCUpfENRU6QAnpxx3Q6o5xM6eMFxnd897vfQinD6zPDtrvkpl+zvLuiiyWhb4BMHzwZ8SZ0zhCiRhstRaKuxVhDPR6hlGJxt2BUFhgrFWejNV0fWS+WtH0kK0vIidF0yre/9RGT0Zg2KgIFijOaZktROC6ur3l7ece0OGJSQIwbWr+VcCI7pixrfvbTv+N0VNE1a67P3lDNj6imByiKBxv1P3C4/6b9VbRdpZfhf/c1etkP6iEoSikl+4lhR2msRU57keNZzdFMGKPkjHMWU1lySFhlmU3H6PEzlHuGvzyi+eQ3hG5F3mxZvdqQqy3RPqeYPSH7Mb95/Yrp0YRv/8M/gD/j268/5fLdBdlUtNYwKiKkhrvVOW8//YSlabnMPYdV5tQ9oekz9azm5OSEEBNvru64fnnGEsX2ckRfasrnH/Di5APc0x9Aqcm+xyqoy5Lp5JgXzz/i+MnfoopnkA2UmtD1JBTuxbcJ11vi9RUmNNzd3XB2fcHjZx9QjY5oL5dEVTI5/THb4oTlyhODYnwwI6s1VTmmPpqgbA15DCqQek9CYUsr98AYGU/GjKeHGFuSkTQ60ChryEELUGflIqq9MoDBomJncSIXc3fozwNDOeV98PvX3HZ7uHt2zpfJNAW8yX8gBOCbBl/OEFMP1mXxp4oDw0lhioqTJ88gt1S1o6gP0NV4j8UIkyaRMX/eGq7YszG/6jMaYwYA6g99P4bz5/tUnv03zPchELsz70Ow8i/T3mcUKqP2T+VdYoKCtun4/e8/5uziDF06lE+k6Ek5URSFnAlS3NvF7H2rwr3Bv9aG6KOkd5EIKZGQM3EYwg+n04n0gUIsAKIjK7Gb8klzF0Sp5axDG0ceCsghZUK21IXjps3UWC67wJGrMKpAYXGAHTDemNUunFVwkB7qYYmXKa3vwV4N6AipJd+8o//3f2R99gtU+47UvqPVPeXFK9TjN7gPt1Qv/g8MjsSYLLqUYe35A/r8P/ZK/QFW5p4UuCs+7R+wr2Bs1ivO3l1wdnbGarMVUoixErSoFTkFNqEfroV4k6mBVm6UwSpNsoY2hPvk1oEIINdKnmv7e+kjiPeg1uIZmEPcq9P6vmWXEvoQiN/93b3d0z2Im3Mm60RMPb5vOH16wve/822+/Z0XfPj8BZWrcexAJI1RDwL0/pPs4XJCsAGV0W4YXwOZR6ks0u0kWpDsM7fnl7z99CUf//4TPv3sMy7uLln3LX3w9DmSjUI7K1Y+IeB9J/6Bg5dcyhLIGVIkZkVqPN4HuqZh26xpN2vaZkPoemL09ENoRN7lDAyp2MqIGvDeFkQK29gh4X5gXir4yr7+SoAuDtUNV0jqoIr3iDBImo5RIudSQYG3KF1SGoPRJSELpdANYQNijBnpQsCHQIxikBneWzQSOWVUUhSukM1Fvg8hCCkSevGwS10HiIdWH3q228BytcHaO6y1VKWjtJaqGmSJdUFVDAb7pRlklQ8phjKJjC0wIMb0mYH9VtxXOgYqZeh2oRMMVMfhcRQdc1GVWPsg+GIACp2Tqk/wYK2YhXqf90m1xgxMvwxq0KYrvfPJugcUi8INgF8awhM03o9QSjMdjVE244Mjlr0wMpIklWzaZg82Zq1IWnTzWee9LjtFaLoG1bWkJGwNYPguxRDkUcggHxhfOwPEnDPOGIy1lKOaqqpwRXG/aAyL1T5xdUi6zWpguWkN2mCyGkIw3jdR3HlWxCFuHGSBsgOYZIxBG/msCjA5U5U1VV2TU08XhBL88WefUlQl1WyGKoek0tBzs17TbRu0k9RRCUTT8p10xe6OvG07QpB+kbEhm42s2KcWP336FK01Nzc3rFYrDg+FBbRcLjEpYZ0dAjikWrLZbPC+ZzQdEze9zC3ywLApKDSgMzoPCU0DC01ZkbSutxu6psFYi6lLnr44ZXw4Y73eoo3BGIdvPTHufP8GUcPg5WhUFoq21pS2IMZ+8KHzBB3EK2G4DiklrNGM65rpZMK6runjBt95WfhSGuZzIsRINiKfEH+FjLaO4BwpS1VD5tL92DAPwHXrHIqEKyyahLMGZ2A2mXEwmzGqCm6vb2ibhpwiymhcVVFU5cD4KchK40PE+4gpwO79Gh5SCvRQGdwVAQJ9r/YVqozGD6mdUUWMkc8bogAtwXt2ibCuLPAxsN2siePJMNdkPJba0auIcgXjesJ0PGHpApu2Z921NL4h4Xh8fMCHzx4xKhwnj5/gihGrVcPy9o44+FMmAkKxh6hlbbFDKqvEeieMTpicqMuS0XTCbDZjdbfk17/+LZvFhscnj/jOD3/Mer2m8z2bpiej6UNm07U0sUcnCdYxgDUGExTTUUWIUJaWsc5MqhoezdEFrNotN+stzWZL7Dpm4zGzw0NwFe3AnHUD+NuR9vJ0YzQx9FhlKY0VfzYFFiVBDcbijMUE8Stx2jKyBZOiYF4YdJ5DrYXJ3TaMqxlHh8c82hq2NwvytsXVY6YHh9RAVReMZnMmyVH5RHN2xy9/93ueTecU1tClwN1yiVfgoySldcHjrPh9JmsJJG7alrvWs2oj8+MT6spwdDDBKE0Ic1bbI/RqyZJIs1mC90RtJC1Ki14kG00edtxaW4wpKAbPLq0MZki1szurBISBWlUVqEDImuizrDUHM372s5/QB3j57oKQEp98+inBe968O+NXv/kNj6PjxaMJs8IT+y0bn2hVQVXWxFENdFyev+bVu3e48QF/97O/5+TJs8/Bcg/XZ/Wfuur6Tfvjm2we1fvPvPdwYB8znG8ekh4GNjtEYRpomStd19MGT+U0xmpQFnN6Ats71PoaN3W45yd05w2phPpkDqMncPIjcN9me54wTc9Vc8FsHZiOLEuT6GqNritCqaBrodkwdjX14Qmfnb3kMkTqsmBydCJJ67Hl9atX9DHx4oNn/PZXv6VZNRx+9G3Ob9/ym3/5F06/l3j6/HvEZkkMC26XnvFozAc//Rnf+vaPUNUcKMBa0J6gEm2/pe4Cq/MbNjeXnHjP+dUnxMMp8+M5RTXnO7PHrHMNrmTpA2o8ZmQEcHe6YDaaYsuKHBKqMmAyyk4wZJr1Nbc3t1TlAYfHJyTlBtl5Gny9exSabAoxq88CzeXPz0el9nJXsrDUXeHEDgW+yKT42tr7AMf92vHFN9T6G3Duz21qAGKzVpTVCOuOUbqXw64do9J9v9/vpf+4cIXPe2kNzw6v88Vr98eAZu//zkM2nXr49Huf94t/+/VQP/+Y8MgUA0YP8YnDV+43ifPzS25ubsFoUhAP7y70cq8fosS0VvuADgHH5Q2FIXWfyOqsBCN2Xny+0QpnLNWopm092iqc0diioCiG4MDg8SgoDFoVeBgsdCTEUVUlRhVsUkNVWFJZ4p2lQQgjdmBdirINCZVQ8jhnMMUQwpgR2es+zlX8bLu4pFhcsvqn/0F69Vtce8nUtZA0MUBcrWn0GeWTa4iXkmxNRjEms0u6/YqO/7ra5wZRBmLbsVwsubm54e35GcvVmvV2K2zHbMhGE5MUM3weruXwOX0QmXhC7c9m+9fOkpQqoBN7pdRuDu1krQ+5q3JWvp8Hu3TinHdxdGKpJGmv9++XciKrjDEK4xRaOyaTEYeHh8znh9TVBKMc4jtnh354z1TwP34PN3wksVndYQcCoO3ZpFnISzlEtss1lxfnnF2cc319zc3NDU0QdWAcQD3fS0hETGIxVDsrNlHJ4/ueptuy7TuZazGz2XSkCD50xL4j+E6IHLtwFklXk8+yA8GNlVAOpRgVxbCOqD0ILySPhEqJ4L8Y5PmwfSVAp60Y1ldFjVVSIdQDIoyRJEexFYx439H0Hb2CMOjkS6MhZZyCsjB7X7A+OnwMBJ/l0JpFMtl1nq7vxdgeTWg3YO5THuVrZpECaUPpxvvFzXtPHzQxBjovYFHb5L0JfVmWVFXFuC73QRPj8Ri9I3TvJsDAiMlJUdQjTJaKokHdg3NRAI56Nn7gqebpBsQbLyCjsMsUuu/3k88Yswfrdo936bQ7ILAsC1mAjZalfHcwzoLUovT9ZFYKZQSpNTFjCqHGusoxqiwpCRMxJj9QMVu6phVAbTD2T0MFRKHEa8uI5rrQFX2KwpryEa3EUy/mQB6AS2cNdgBYH7Li3PCdyrKkrsdoYwZG5n1/Gy2sRjFikuQUFEPikMEaLfLBPb39vjq2M5XdVdju+1L6U5uhwj+8pgDNBWRF17b41qMLg88Rv16Tu452AF6tNmDl+mkUxij6IQCjLJ34rPlWPB2UFoDJe6xOQ+oUdF2Hyoqu6Tk8OuKDZ2O892y3W0IWT7qdj1/ne2zhmM5nNF3LZrNhXE8IVktcNBmTEihNoS2mNNRlzXg8YrtewdD/eSiJ+yxA3Wa9ISeFGcZ6LhXWOW7DHXVdywY45T1A+tBDyJBIiEQ5D1Umo42As8ajs6IagIPCOQH/lKYwjlxqCqdom60sqFmiqHUu9oc6tKThWmOHa6zE2yFmJNxKS3XRsGf4qSH9NQeJHa9rSW3JMUiQzAc1Ogs4cXV7Q1pvKKoRW5/oA1AkbDki+CSAaBrCPKTUu6/omiHJNiPx2DqHfVqzUuI70eZAQaIc1jrfdbQPDH0T4t3Uth3r2wXd1LCNniZ69LYFMkvV0uRAaRyH4zl3s0zvI32zYbHYsG0/5fx8SrNe8+zkgGdPH1ONJsRc0DdSees7L/6eSq7zvohApiwNyml0NPRE+lY8Kwpj0UVF33hur2549eoNwSc++ugjRrM5qmtZtXe0XU9ZlFCWpMLh+xaVFBNXMi0Litjj6gmHW0kBriuHrQtcOcFUmtvNhr4P+K5jc3fHtCwojaYLHZeXlyyaLbau6ftW4t/3vioF89mISZUorUJLXJh4+GlDZQsKZ0l9T04Jm6FIMkeMMoyLglyOCYs13WZL9lCMHeOqoi5aus6Qs5XKJYo+BtquI5CZnRxxkHt+9/HvsQkejUckY0hKUZYFdYKcGjbrDWU1xuSIaj3L0GBSxJiSVZtpOs/0oGY6rRkVJVZHfHeCKy2fNZqz5ZLcRDrnaHxi4zv6IaAjhEzXRaJPGC0yl9KVgKFwPVVVURaVsFoVoA22LDBZoZRFV4bNVpjTJycnHB+fUFYV13e3NO2Kxc0li9UdH7/8FD8+5PHsBfW4xKqSbbPG+w6rLL/65S/4+J//B13nqecn/PAnf8+Lj77LyZNT+j7gjKzRD3RRD3ZW37S/vvY59sievc6O6vLgXK5Q2kF2xD6RYhZAziuyl2q1sglSB2kDpgNWpNuXLN7+nr5fcvDoBHXiYH4E1QSSwswy8+cl6rpjvbgjtS2bFFEnE6pHx9hnhzB+Bn1FkQ9xv32J7yxtk0iHjifPnjCtC968fcVyvWHTd9ze3jKqNeu7Bb/4p18ze3HA/HiOb5ak9YJUZ2xdMtcFx6enTI5eoMqatGnJNmNLQ1YBrEeNsrDtyog3T6o6AAAgAElEQVSKHWe/+S2/e/lvTL73nM3pMU0+I1Vz7PwJ1ajGTiY0vRdj9r5he3PN2WcvmT47ZnJ8KkFcoUXbbsBHHY4JzhyAPkJjiKkn6w4JKOrJuUOrAqiB4g9eTQXUdcXzFy84spnj58+pq3KQvsq1/vOJSA9Bk4fjZ0gN5X5fJwcYOXTu/JO/aX9628/PlMjZyj6qHIMqENdldc+OhT1wlMU57AHE9oevg9rHXub9mJHnvwRY+9O+xZ/4K1/z2HkPrLv/nnqvH5SnU0isFmvO3p5zt1iIAtSAtnofbJVgj3RlxJx+VyfWTiIaYpDicNu3JJUpbIEtCqxVsjfOgouVxtCFjm3TkzsonUFbg0ITENZRVGIVk5Pag35Ga6yDuqopK4MZWdrccru9pdOBuhxT6RG5g3Gp6IFyOAuEvMFoJXsVNIWpZRjtBo0K5P6O9u5jqvVnhKuP6e9u2ISe1DcEE0jHW/rjjurxK6rHB+R5pMATcyTnKVB9XTirXJ6vYpvuZOAh0HUdN5dXfPrxx3zy8hXnF1e0XY/v/R6QzpEBoAvkFMDt1F4JH6PY3GQ5kya1I5nEe4bnAGreh3zsSAtyBt/78D9QlO3OIznKYBSSzgOCS7oPmkANsvGBuaeH7ICDwzmPHj3i+OgR43oyJPWa4WfHpFPvY3T/UUvwMJ6kuxTK3CvmlBnKDhnBD5C1a7G85d27d5yfn3N1e8PtekkTO7wWokgfxbu/i534q6eM32yIPtB2W7ELazdsB0lsGIhTe/+4FEGlYXVkKJBr1ODfb12JLiqwDj34wGqtBrsJjc4ZFSPZe7QK5BhxtvzKbvhKgG4HGrmyQA8DBxXRpaMaV0wPJpQ6oRBAbLVtSF1PHzrRzxcORZZK3iCh0wqcEvPSqjJYW4CxxCSeZ13XDQcnceeJeSdLjfiYJRWmkM1fCEH0vs5QlY4QH3p5CQU4I4+3W7kAi4Uc9o3KzKYHWKcpi5qychTWycXPcptKcbDJSplsMmXWuMJghm7zPgygUBYgCvHT23lnLdcrYRGFgPc9IdybQCoMVVXtJ5+1wqSq61p+SkeuSkBSRgbvTQEsdp5gOZO1Hhbz+CAh5N7DQGuNsQqwFIUd5LGWPng2V1eEGAh+mMiDRFfr4ZpZg00KqnvGWoyRvhdK7s7gEtR+EN8DPVrQZa2GhLdEGoA0jNBRv2xO7v6bUyI/oPI+BGn30tiHf6vUe6AliGccSktNJwSygqos8WEEGqazGU3fcbdes1qu2XSBtvf4wReRBNoVAsi0rbCUjARudK0w5+p6DCAH297vgTLvPYUtOTs7w3vP6dOnAJydndHHwIsXLzg+Pma1XnG7uCPGiHOOqq5ZbzZ0fY8uHOIZmyUAIUZsaRiVFQezOVpBk2GzXBF9QEVQzmIxtCGijWYyGnE0m1OMC0ahp/Mtm9WG8WQiXpAx7und9xVUMErT9T3ZC0vROUddG0ZtIGwbaIffeyBT2N9Q9FCFsxZrDCnfV1vVcK1QQ9KREncMNVT0Q3xI+Tb32/rB8yire6bncrnkaDZjPh0DY7rNhr7tmI4njOoRb96d07aB9aqlTaDKjCtb+j6QMgQv4HpOA8BlLNZI4pW1luTFXy6qIXJb3TPurHE4ldAamq7H50zCYE2BUv0Qwx1RbmBVAj4FqZimROp6WtMTyBjtJIX32LLtAo2OhL5hue5YbXq6ZsPZ25rbxYbD2YyikARUvGjelbFUzpKcMK6MDSit8cnjlMYWjjJVuEHC1HcBrROldhxN5uSUOHv3jhgjp8+eUo1HoARkRmm23rNoOrzvOdQaayswitKNcOWI6TiRfELCvgPZQZpXfOv0EQTFarmkUKCiJw8VqOXtFe3NNck6Oh+YTcbMSykm+Bwwu6qwUhJjryIqJyqlcFZYdkkpNJkig46REHv6kFEmUmg4nEy42/Ss75ZsGmg7CfBxxtL1ncy50QRXOHxMROD0+QumLz4gWs3m6hqKAqzDWEdd1qzbHhWiVEdTpCgLnLbQDT5yRU1IPat1SzP4NlZlwaQoORqNSSgWqeO8X+D7wDZ6Vl1i2ba0EXw0xOTwXtG2fm84vQt2scZRFDXOFRSuQJPwnXghtk2DLmpmhwdU44mkBqfM+PFjnqfIty7PaNoNq7tzXr99RX7yksnpt+i7Y8BSakVtLFGXHM2OKK0h+C3L5ZJl2zE5fsRmuyDnQOHsflzv146HO7tvWHR/ZS1/jlIyPJvvExPVIA/ayb2GuhvrVcPNzZLJZMzxyYyysHSrO67fvEL5Ow6PSgrb0ly+pLl+Sdtfk2lR00fweAqFgdwCa8rjkuezj0jbGWnxBpWXHBZH6JlG1zW4OTAGW9MhzOaj8SF9DpTZkacV0+MXHKeG7ZsGh8E4qOcTZuUhy05sMD78wYc8evItVBzBUUU4MEy2CTc5wdQTVFTkpEjZkJIhKo2yFbaqwSTmT5/A5Rv+7Z/PuDk7Z+sCV6fHHMwPKK2nrma4UUuOt4xNjXEONi3ad5RFRVVNcIUF5UEH8pCCWBYF5eMDUHNUciSlyLkYDMk7QmpJoceqDqsVDObrX9oUVJXlw4+eoU6PyKOKoBUhZXTO2D+bwPZljKYvP3Xv2BG7fZ3sJc1/3AHxr6Lt9BGDjFRJASerIRgk7//lS4dIGogJ/6t1/F6K+vC14oMX/VMH0h8et3/S333NTQF7WWeUDkgpcXN9x831LX0nabmRiHaayhYkDN7LOdfHfgjYkfVyJ+XdM1tR1HUtBIAUxcNba9brDVeX19wu7iCJcsNqw+xgzpPHJxwdHzOqS5RJ9LRoA8aI4skYh2S1ZZSKWKeIqWVx9Zpfba75zb/9M2kI4bB6wtOnP+CjDz/i6eGM8dEYxwqfl4gYUhNUSUaTVSlqIyKkNfnuFauXv2D87mPWn/yWm5fnhKZDZRgdTzksK8aHiubiJdXpBEqDqgwWR1QF5OprwHb/0AsIa1jAtogekjS32y1X5xe8evWKTz/5hLcXlzRdT4hpf4YNQZhYKYtvmbWSWi4JnxmyhGYl5NybBobkLigiD4wvxb0Uded/rZSCB/6Dkuo6AG6GIYlXbqpaa7SRM1bft3jfEaJ4E+8snsgJTKIoLeN6xMnJEU+ePOHo6IiyGMM+tfVeSrxfnf8TrLtybk8DJsEAaO/6D9lgeE/Omma14frijHfnb7m8uWLdrOmTJ6RAHwKN79n6hra/D3BIIdIsFqIQ891AspJwtj6J4tMYYcBp8r2cXt3jDD5lUVy6ElPWmLJC6QI1qOmC90PoyU7+rYXBikHrh3kEX96+EqCbzWbCPHJiZOj7Thg1VkMlfiNFXTKpJgBM245N09EOyRTZC1K5A47CAAYII02SZTSKQht0YRgXNXFUDGZ9GVsWIontOrbbLZumpet7Sb4YEG+lDXlAkp2SFNesFCFrVFVIQujuM0RZ6HwUg/r19hxrDWVZUdc1k3pEUVdUzqGMwdgSUhgqHAL09b0cznKKgyZ80BtrCYfQ2u71AcpK4ETbtrStQim/74uUA13fDCNxlxpp2Gw2VNWIorDMJqOB4mxxpaVMDpstWosxs9GC3u5BwT1IFodEErBGmEhKKbS1OAXKGmyMHIOAluuGpmmIfRBgKus9kLhjpgmAIj87VmLOeRjsD5OW5LP4EIjG0PWBTIu2BuMcygoTKu6kmWqXMqNA75LEhsrScN1kXMuk2NFEdz52u8pdfrAAxhhROWOVxQ5+AbuIZVNWlHlEMmCqgrq02Lpi1PYstx2L1ZrVasO26TDKkrWMRe89OUJRFENlRJPDAKqVJTEEtmsxjNRD5cKWBozl7vqGy8tLUkp0fY+yhsvLS8q62o+H3nuM07jKUdSF3AhSwtlCTHAHuaXKMh7KoiD6QGVLetPhjKUww9zxidb3vH7zhsenj5kez7CVxalM17fEoaqyM+BNQ38/DKvYbbqGqyTPJY3dpQshTLWu6zBmTF2UOFfQt0PMdOtx4xG2cMTIcAPaVVxlsU15B+gZnHVY69EDa7SPkuSKEelz3vkw7jcxA1gqqJnMyZggJQprsUXBs1PLsmkJfoHNCjeeMhpNUErTdX7w5wGl3wd3c8zi/+ADOQcUknSr1AD6MsjeHeQgzNSgFLooKEwBrRc2ccxU1ZzpdMp0MuG2LDHTKXWwBCLjyjCKgT5kqqpiNisxXY/OHZtFBm9oup6L6xWrxRJQNI8e8+jRKZVx6GHT5kpHVWpSNAREth4yrAavQ1OU9GlgxyojbOAsfVBqKRRsNit07NG5x+AgdPRdS0BxdnNLFzbM6bGHM0bK0G632FJYDiWGymp0CPShx9jItKo4qBzb2YTDuqIqhGJfG5n7pVa0PhB9YrvZkHyHmYyYlxbfw+3dLWNTUdiCPmf6pIEs7E5jSAayA+2MyJ2HMRlyxOaIUTAbjVCmJLaBu+2GzUaBKSiMosuRNKzhMUaytiQF266nryzFZELyidI5MuIh6duO5m4Jbc/UOvoQiXjMuMCNx6AM2z6zaCPe1gTtCEjCaqktc1dTHlTchS3vrjpu1wLELzaJ6+Waja3FK84YtK6JQdPjh2qqbFkYwORdYrXSeZ8idbdcgmkpJzOqekwxnoqvaAycnj7hb374XfrYcn39lt+8estnrz6lPnnGT751SDxyOG2otaEeHzKtpvw///d/57/9/AWfvXrF//v//RM312e8ef0p3/nBD6jrmazbXzhwv7/R+0+wz/um/dntfebTF675cO/PiBNA3we6tme13hL8EKZiLUXt0FZCwJQCkxOL2xtWZ1ecHjsq7fE2oHRPdXpI8fQQbt4St69h+iH68G/x9jGZEudmJLcmEjBzh54oUE4sQfprmos7zn/7/7NcXDFB80RXsN5w9enHPH1xSjUpmM4qXLTU9VPKYsSzFz/E43h785JVhvbtORefLfnw777PvDglu0PM5BlKzYESMyvF+wZQaoLKGd8t8M2aytWMTz/g+YffY9kvMBNLrRLTIlFNIatbLv/9NTcXkeff+TmTJ98jRcNkUvPDn/4MJiN0WYmXkw5yGIkdKmtwU0jCxhF/MY1STuxacCRkvUj3l+ZLW0oQkuzlVF2Duj9Myr3wa6SwfKFl1MCiSznvkzn3tifc70G/aX9mUwOOpHbsGDk8qp2hGAxslfyF1Xz3b7vXee9lhz2iJCIKW2S3+ss4EjXGX117j0031LeHhMZmHbm5FTubEDydb4kPvIqlXwIxeULoQYuVxS5AL+Yh/nRgVIkSSIrT2lpyVCSViTkRopyfy7JiMp7w+MkTTk9Pmc2nGK1IcctJkQn9Gt/3JA8kJSqzvqX3a1L29H5L7Dt2VbW+j7Stp0kV5eF3+flP/wv//b/8iNMfvYB0i+tvUEclOWZseQrZkpUVKyq/gfUZ/af/yvrX/0T63SesPjtj8eaGfutRGHIumDyJuJNAv7glb24k5bWaktUMnUeg5n+ZSzYcah4O5RQC7bbh/O07Pv30U968ecPtza1Y85AJSsAiYhrYqHIe+Z/svWeTJMl95vlzGREpS7UaiQEIwVtb3t5yze4++H2C3bOjGW2pIAgMBhjV07J0ZWYIl/fCPbOqIYYCONLIpZtVT3dOVVZkeLh6/o/QArSyDMEVr7la1BBSFcVvKiSf/Rmq/F5Ri+plXKRYxp2U9bCVy/NgjCLnSD9soZKMUqoVL7EvltXRtic/CIFSxQYr50QMgW6p6JaW45rcenp6ymKxQgpTQaM9OLdHnxL/fED9j9/kXqEBQEakWG5aLiSoNA64wXF5fsXLV9/w6tULzi9v6fuB3TQQcmQzbrnbbtgMxb5nmqaiInQOFcueOqVQJcgFaDfSkAT3rMaKS7Dvv3pFxhiUblCmQZoGoVqy0uT9PdQV2xA1l7j4oSGUR6REin+AxPWjjz6qvm+FTeGTx/mR3o/sRouRAWgwdk5rDF3XILVmlspBV+fiUzcNI8M04pwnprI8CClKGl31ldNRI5TGSnkAuIwCYSS5aRkaxc5KNtvM6DwuZHTTEDPUkNkCYuRU6Kc5IXSRAL2TkhkTsQKIhamT2G63bDc9N+auSmFnaKtYrRaFWVbOtkQRSSGQq7+YMaZ4UJV8nANbb08kMFYhVUbIBm0kKTXFny5Gci6fP6VybTHkg4db349ICbtNh9ayMusKiGhsRZNJB++y4n1WfulD3w5jDEqKQ7hO4SSqArJYw0IV9uEYMlPwxdcv1ftXwTH1AC1OtcpTZKtdvf70jsx0DzjFEPBG41Mke4dIClOrCWU9u/e5kFKiKuiS034hu++3/fvvN2v3TEHYgzX3fVwrUSHQtBYtJSFlvA8Mk8MEzRQ9vZu463dFbmwtymgW8zpphkB0xQAyxvrangnpIsaWKkUBJjVaWYyx9bWAlGWj2bYtbdsWgHm7LVRYY8gpc3NzQyKzWCyKNJHM5H1h+c06vI9sbycwMNMWKIbJStbCwTCxXi5paoplCIEUy3+FVsQQePHiBY8en3L69JT107MHYRmFvbZ/JtIhdVQS4/4ZFhhjiT4Sg2ccR3ZJMk0jMZTnYhxHnHMH5iBkjLHouaJpM0Nl2P3mRrs8A+m+violUmusLYzd/aSsrCmUf1WAaARItWdoZrquY5omzndbrFC0jWXWdmVs+8h6dYTQDhc0pluwOn1Cs1rQrdfFuFXmsjkSGiFk2WyGSA6JEUH0DoiF9VdljsWDbO+DKPDRHz6D0rZ+d7m3IcQCIHUt3XxOaxvSLKKmTEhFntBIS84jWWnaVoO0iDDi3ISYQOqyMZM6ImRLxpCp1PlcYrq9D0xS4FNkSpFxmNiOVbKdEjhf/QCLBFkrW5KM2444jrRSsjw55uTJCbPOEOKIyBO73YZWG1TTMJLYXV6Rt3fE9ZqjrgBdVitaCaay2VL26OwBRRy34CcWTUPXWWaNIYaJqd8x7Da4lFk9esxsMWPY9dze3jBXivnpMQJbizqeKSsmBORUJZ0lCa18piI1IAkSgShA54iIENxIoxrW8zljGAjjwCgiPjmi84x+hxg9a2d4tHwfaSXPX73k0g28vblioSxJKHJWWN2gcvE9Fd6RUkTbpiSV7yLLxZzl+pQpea6HckAOyjL64t/YasvT4zNEa9mIDecbyNMFt9eXXN1t0G8vcLNjdk7TFcgToxqk8Igskbmmd9Ux7Lyj73u6xhxCj0LwDP1Id3PDWiikNIyD4+6bb1AfPOHpe084v3rLj/70T9iEyE/fvOWXn/2c8x++R/jeU0Q3p0sZaRTeeb77nY9YmRXzVcfbmw1DMhyfrGpQVCjj9D8guP+F2rvgXKZ6m+U9xb8AAVLJMm9bg7KC+WxOZyWtAUImhYTtWk7OzljpieHa8/Wvfszt+a94/1HHB+8/QXQS8fIcv9vQ9yNutaNFsXjUAEe4cWC37UkM5Nyjc2Ix6zAiQOjZXn3N9fULVCdY6hmrxnDtd/z6i19DE3j8aM1jVrx6+QbnPaujI+xihV2v+C9/+gHX58/56V/+LbeXPUfbJ5zIjxDtCqFbcBHiWNjRuczHIoMUGiUNSWRQHn0y4/j73+GH7cTqyZzVuiW6HSJGdtstdy9eIfoZ7vKatNwRmOM7xWyxgq6BVpOZSGlE5h0CVwA5RhBtqVBQpTZClzVVdDTG1K6yfNv4lKKcH/AJvIfGVFb8vt7/xwTofhvkzbVIt0+kLLLW/ZzyH/PKH9xqeFEhMRSCc7EKKx7A5ALYfdudfocI/Q5Qd9+f+0L5u19FJfXvuuX9HwK0JMXMblfUOM6H6jaXiDEQqndt+YoIUYqNu3EotjG6fYdNJWXx016s5sUORkgSEjd6hLbMZys+/kSyXq0KYSUmkIIYBW8vrnHjiJ82xOmSadwxDBN+8lXmmgnREcOID0NRgNWAxbbtUMoQvGF0mfPdc2xMfFeNPL37irl/RWs2rJ4sEadn8Kx4bolcAu3ob+HiOc3rL5lfvEbc9MgBtNe4MeBTxO08cYjkKaK9gnGCqYc0gpgQjIU5jPnW2/9P768Ha9deNaQ1YRi5ePP2kNh6eXlJCpGkdT0DFlCFkKoHc7GVEqKcW3UWyJSrIkhWlUxV4eW9+qsewtNvpxQ/9GbP8V4qXpJa78lBhX1XgLwyZ4bi85oTxhbiy/68VbztIkorZjPLer1iuV7QtjOkMJA1KdXi2QGcK89zrvpFkKh/JbDuAGoKCkEjJ3IKRU34YFqRyjBsrzh/84o3r17z+vwtb88v2e4GbjZ3TH5kO2zZ7Lb0bjrc5+AcwXk6Y4vyZw/AFYlXvQhR/5p4ELH0YHWSaN2A0UilEdKCUCA0JTJEYM19am9KxZswiUwSmiz3ATC/v30rQKd18YyTSISSNGPL0G9xKdD7AT06lHBIAp0x9TMKkBYpYd51B3NyqaCxGnI18deqSj8LQ26fgiIPFbSEhWLMbwzLVjN1mn7R4WMBDTMCFyKjC8WgPdSwgiDxKTGJIj3IuVD2UyoouNLlhpmcidWvzvuIHyf6cUJtBqSCN2/eICU01jCbtaxmHW1naazFWl39MgqafTAwzKnSx3NhYJHLZ1e6sO3EPkqbGqgg6n0ITJMjBE8MnlBlq4XBaOinkWbosVYf+rRtS1hDoxuMMYWJFSaU1JV1EUvl5YDgPmStcVi0pS4eRlmkytQqv9uEwhgKMdbJIh4Yagcg7sE1vgOgybpIV5ZfYWDuk3xr6EdFj5VQ1dAYyKVS4PP9Ar8H6x5+vTOpPWDQ7QdDTokUIkKruulL+OgPHm+3t7dc3lwAspivK4O1DQKNnxwxhCovLd56SghSjCURN1qEFFjbHuSvOUFrLEaqYsypFMfrowMz0hhDloJhHBnHEaEV280W5xzdvCXmXN67zg1739WYEy4GtCj9KKUgu0C/3aJSrumqgjEENnc7bjd3mKaDEAlS883zlzx6/xnJKgKBMU4FiKr3vsRCp9qXtW99KOa2Ih02Zfso8BL2UgMJYiSGSKzeDSEErJQ0sxkog9velmof5Xeo+w4qkd57wPaw+CiU0EhZ2Jmmeqo9dKzOiMP8eXtzgxWSzuritVElfdZqlosFvQOywNqGo5NTHj97HzPvyNbSu4EpjAdA8p2vGAkiIEQB7IIsASFavrsATqkYlmqtUW2LE5JxLPRpa1qMGsk5M6UAVmGQOBfZOocjgVTYLIhKkRuNiZoUBKFp0U2LMBktEl0jmNvMfLminS1Q2pJTJCOIqSRBCRJDCgyREikePFHqIteOxbdSKInWReYgUmQ5m+FiZG41x0dLHp2ekmRiM9yhiQTvyAi65Yrl0nC1ueXt1QVy1xOO1yzXc5bzrjxbwWHdSCsjGEEvPLNG02iJlYJWKeaNpQ+OHAPRTUzO48YBM+uYz1q208jN7RVNDpzNNN6DNwkvIVYjW1WlAJ5IwOOTJ4aSWhhywhOQqfg7OOdBZwSxjB8SKQZ8mPBuJMiGbGxllEqstqTtQO9KGqMLCScj0mjapsUkz7xpOepgvN5gmhmm6ZimHuc8WQi6+RrdTFzf3nB+dctnccu0WPHx0QnP1ku65ZJXO8nT40QKDcTEdvOai9st3lvG3KHtjJwsWjUIWeUMNfV4b4cAMHmHMcUGQmrFfLEk9GMxkbYN8/mS+XJVNgfbDa3RnJ0dse0f8/Tiii/6Ky7O3/DLT3/On//oQ1azJUYVwGHc9sxEYQEbLfnwg2esn3zE0dGK29sbtO7Qs9/vb/Uf7d9++/3wzL0MsbCtxINNLSgj6IzBzs1B6CYRNS3N1/GYkfM5Vj/G7y7ILiOTRkSFHxNNyrgXF/TXN0yLjuvtG/z1xAc/EByvPyZvRloN3fFjsn8N4Q4R7iDdglPovMPMJO3smEdySTNq8rXi55cvOAo9T08eoVym04muXTHLc4xq4GyNOF7R9SPf/87HDB9ONGea2GWc38HuEmO6mt44gJiDmJOzQcpEa3OJIFQ94e41Xm05/vCMo6enoCVmt4PZGbPuiO+8d0K8tYSpQ8QGNZ+RlGHSCW0CCkGsyhWVRmTegg9oOyFaCaoEQEhpq8+URIi2HCz27IgMv0uaDLVjCv2vsufK/r0wo8o+7dtSNv/Qtvd7Lt6ysjJy94/Sv3Nw51+gFfwoH9gmOZd95eGo+ZBB9zsAtn9UE5R0QiClQEwOqIF3aCqP84/waR5e47e1f2lA4SC4I7nA2zdvuby8PJj/t63FATLIPRUOSAcp47MP3i+AXE3QjLlaKyVqGmvER08/eYbBsd3s6McJkkAqzd///FN8SLhpYnKOFENRm+QS/qdsC9Ki5YrGNDRNV8geraWxiuOTObZRWKOKMkpolNTEkBnGgV9/+Uumqyv8qxdsppew/RI169m8sSw++R7CzBFrA3ZeWDLX5/DiC9q332CGnqu7jI4LZlYgZi1TrDL9pmGmW7YuIXYJ7hzMHWLuIA0g74Dil/jH79N7tZUbBl5+84LPP/sVX375JdeXV7hxOlhThZiJOSNiKrJ/BFrKAyM1+BLQppUiiVznznx4UqWUCKVRVQqdxL0UFjiEfUG1bqpnsHKmjUipDmcUyA++NzxgzQmMLmNtDwIrXeSZbWtZLFuOjhcsl0u0NqRU7mhOak+t/Q3Qff+1J8H8K4F0D+aq6lBfrjNRPOTGgfMXr/jlL3/Fz37xKb/6/Eu+eP6Cy5sNd/3Aduhx47bIVkMgiP1eupwhrDaV2VXnsD1BroKtuQKuec+HExJFyU2QUoIoe/Esi+dcFuW1IvkuoZ1JCMixgLXsCySCXElV/9D6+q0A3csXz0EI7GyOaWxJRrQWQ8QKU5JeKvMr1YdKCo1VxbvLp0zKEmFaWl0AnMZYmrYwZTrbsNlsuL6+Zjv01eNu3yEQs4dYUgpzFiTv0VQaqZaYpmV0ga2aGKfMqCJeZYQWiCgIziMPKUWypIQqQVKSLGCcHMJKrOmwMZFCLjHXIRJTBCnwMUd7qrUAACAASURBVOO2O4ZhqOb9TUmC1aKGTOwj7Gs0MtUgnkgOFUcVAiUUqkbwFuAlY7tywCnSWceoJcNUvO1iTBU5h+AcKQe89yWxpQJOQ19CGJqm+MGBoB8DWsPoPGq7RekiYSjGA5U3JSVSyZocVNhIWpfAhpgSyFrl0QIqSLdHgPdyKrkrMcJSadqmwdi2UD9lLqxBIRGypA5Ko4m+sHic98RYky9DqNXfOunlAkjllEpEgdonv97//jJwC6XxIPuq7WE1Aspm00iDkqoARMpAlriQGCZHTAUkjiGgdUYKS9tqGm2YpCrRRVUesDcBdWNfPF6UpDs6IkwjPkRCcPV+qEr1F8znHcYYdruBa3eDHx0SaJqmgKRGs9vuCmCnYBwcQkusbQg+YCvoHVwJArCqSKhTDkQfOX+z4dGTx8xmLU1n8SFweReIbsA7Rz9lhJEsfn3ClAMYibKa4CNhCKQoir9ilRaLBLkuBlIK4jTVypxCaIvWCS09WgjIiqH69QktawVH4XzAD1tczHi13/RDTvu6g6ybxVxYYCmXCSzEQ5z2/dZconKRY7Kfl8mU2BBB03UM2y1j33O0WvL45JS2sbhhZLPZkFVDjAFikTyqajTqg2P0A96N+GkielerG5Ek9rHllaqeZdnUKoG2Bm0aspS4WMaC0iCaBtV2ZBcZphEfPF1jWLYSLTRjgqg0URdfuKG/IytFZ+cstGVuBGPSxBGGNBFiZoqFQYgMaCVQEoTce4hIBAnTGLQ1RQYFyFS8/xqlaK1hN46MLiGlqgE0LV3bYa1GuMQ09JADs3bGejnjeDkjZkfyEpMjXdvgY6YfHCdna07PntK7wNubKy43Gx4/PeP9fIroBNYNNMmxWlnmVkGGDx+fkqMhxcRs1nJ2eoxLGWUsl7d3xOtbrs7PmULgydMnLBZzhtsbvnn5Ek1gNv8IMVsQcjlIaDxGusKgE6JYG9S0JKEEyQi8qamFMZK1wOeR3gc8otoEKFzSpKyI3pewCSm46rc4Yck5YZRB5CLnNkoiZKlkyhRpu5aTZs4mCy5ublkdH3NyfMzt3R1Xl1eIZoUPE3ebO77c7bjKPW8WK8SPfsTx+hMa3SBiYi4jHz05Ar/hpb/GhUScQjloGwXKoLVB5nEfFAUSJBrTGIypaYvV59O0DcvZDLqJxdGa49MTumaO0obZbME0TRir+eCTT/Ai8wMf2KjX/NWPf8XPfvxTfvHxdzhSa2bLI4TNoBKjGwlWg1XcXJ3z8u0NP/np5zTzU773gz/lv/4ff87Z6UkZ11mxry7siwv7y/6to/bvkEw9PHbtf1b8hhTlX/zM/k8lEP27xhTumcGHV2rRCCBTGNjFuWJf+S59GepPNjnT2mKW3F9cka5esVgo5scrvvvxJ6TTjtmZhYUi390hhp65fMRqvWaxlvicWYothBua5aywjd2WeLch5yuyHBjHASNXtLlDJMVtv+HRUUMWI8NwzeXlOUfXa25ue7b9hF2fsHz0HouP/yt852MwHuFGlGjoR8fPf/FXLF+s6Iwmzz5AcIY5O2ZyPSEMzGdzyOVQPMUdMfU0ypPcDf3da0bl6axh3OzoFitYP4ZgYOMJI1y+vSbYxOPtgNQdouuQNcDIhYDVHV17ClGwudxw+eYN8y5w9vQMOav7HZHJqewplZBIoetJQNQBmN/pxUPLZX1GCTC6WiDsh/G9YuGP/wyJ+q8SLPVQh5tTKgUTinXMf0hc/5AmoaYD7xVxReuzn5kfjuf9a3vT+G9p9b2K5U2q1jORcZzY7bZkcgWB5pg/3MjwX6c9xALFb9+pQytaRlKGq+sNz1++5Pzygmkse/Z22WGFRVL847IUhJQIrhBDrm62TMEzjA43TfjBMwwj26FnmEYSgX4YcM6x9y7Pla1rdQG6rG1ZrdY0xj4IGmyQdobq1thuTTtbInVTlClS0RqN0TD0d5imKHTGcSQGh5KCVgh06xDiOUN/g5aWmZScCMVyDNw8f8NgMrMnj6Ez0ChIgXz1Kfnrn5NefQ3bLUeLJV1ecHS0YOjvuLy9IOQbbi6/YGgumHVPif0OtRtgHKDpAVNiYolkCkAnDkBvx545fOikcqg9rDn3XZcPIZDvdOQee0qJzz/9jK+++orPP/+c29sNQivsfM44Fa+y8v7lh4TMIKqtjijzrfeOpCVKKqSAmO/DHahMSCXVAaALqaj89p50PvpD0bWANRIl92dYVSWn6UC22ZOXiOBTKoSQagcQqt1OlpnZrGO+NCzmDUerJcfrI9bLJZ21lORWcUj5fueZF1S22Lss5t+xZfs9+6P8ewpC8rfe5N3vyvcstQyQSgHhAA6WAnWOgTiO3F1ecHN1wV/+5V/y05/8PT//5ae8vbrm9m5H7zw+Jkbn0LLgPlJKjChPFOmBXdIDq62CVZZSYsFz7sE7gYRqsyT3LLuqGsx733shiidsZcJm7gM3MqKurwKpZekBIfDe/a6beGjfCtDl6EBrttOONPX4yeEHx1JrjvWCR4sZ1mRMo8h6n8IUUVGQU+bF1YYowSBRMtEZg5AJE0Hogkp2rcHPLSEOTCGjTTlMRgkO6PsB1+9QCRqhkBmsKOmxC2OZpKLVil1jGGJkIpVIaSAPI9OuJ4y+HGSyYgqJPnmmnJEWfMq4EJASWitptEYHRYiJQUiivg9AGH2gn3pE7uuNviseZG1L285qqqktVH0hkMFXSRbFGy8phNAYKWlUSf+MKZBjQonEfGZYLHXdr0i2m4mcilzRhYCfHP2u+nYpjbUZNSasjWg9gSzm6ZrM68trjheWWaNp2xalCwMl5kQS5eEoc9Teuy6USUOAlNX80kWMKX5IIYTDIhFDYkqermnR2oBQ+FD8sKQ2iJSZfEAbzW5ymLRPCS3XnRDcbXdFFiJFOWyqorlPMZJkQkpNIKBE8QGI0ZMoAAVR4aMDBEorlNGVVVIAHyVASIObPNs8oBuDMpBiZj5fE5Lk1ctzcrSkUEwelGxRsiXGEvAQnCf7AlBGCVmCspL5oqVrGowSGBkIw0RwDpkKOOtDpA+++sdpFos5WhsWXUsw1WtCFnBycJ5F2xFTSXZazVugMNV0lPSjQ4iSlChryiUp01nLYjnj+vqazfaO5XpdaPKNpukszgW0VfgcON9s+J8//jFfv37F0ckJTQ29EDGBFxjVoAXIDFIoJBKZJK1qGMNIznAXEj5EhAURMn6KiGgIwrJxns12yzSMiKyhgkk2Z/opMI7FK2M2K2BtUcKWTV2IEaUkLvhywBOFkSZlYVfqDCrLkqqKrpOowsWMCJ6829G1De18QcyJq9s7ll1L2zTMZjMmkQn9FikSIo/ARJSaIRYgLvgJEQLJB5ybiGR8TmQf0Uhko9GzDgGMMSCDZIiOzSQZvaCxklYaQorc3G3ovS/50waSn9hdbvjw9ClJtdxkyUZlkgqcrlpCP5F8QPsE/UScJoKTJCKymaGaHhHvECIglKZrZwQXCS6U5FIjCOOEsmusbSB4GtvRDyOtVDQk1q2l0YJ+nNhtb2kay3rZ0DSKbT+WnK/kmKYeLY6QYUIQkW7CCiBmjLZMk+PLL76B21tOZ2vEWcPzV8/5+stXvLwd+MHZKe+v5qwXc4yRmJhY5IzLkbMji1QWKQzBjyhtOT0+pbUvmLUjQcB0O/HqmxecHh+zmi8IWvPF2ws2wvG//9l/4qOTR8w2I3644WQhUFoi1LxItQNkDapr2IQSid7phJCOlCdcAhYrghDYaAjXPQlDFhYpA5PbIU6OuSMwjI7RB3CRLmtsLDJklQNCgifzdnPD568umB0/Rq1mjNkTpwmhIo0VmE5jcsAKmM2OUKHhakz8zZcvGduWH7Utj58+QirB1k+smzNW7Hj+8pLRaG4TJJlxongqGiUxpnjRjTkilMBVyGPWNUgy2irON1fo2YxmPSPkQCYzm83oZgva2RHaCFyTMIuGj39guLi74+mrV3y0ltxebvmfP3nJn/xnw1OlUHjmRwbiAtl41scrFIEf/8Vf8/ato10/4/LP7/jw6XscL1uUbEonVN+rYCDIB7lg7+zEEu/8a0/SrRubmCtAJ4rAJYd7Am3KRREgaoreu2f3BxvK33ua+ofag2ur9PI9212KqkZ4uMn8Xe/9bYe5f2NN7M8++z/2H2YvaROlsg310EFGCkUIZT2TRh4AukR5HlJ0pP6Wz37yV3z+1/+dj1eK756sod/y4vzXmEeG9/7kO8zbNX1ccffygiNxy6KzMDOwPQcHcWpJymKXM5Q1xGvJ9WfPuXn5mm3suEktN/0dItzy+PuKx21L3F2jdo43X9/wN/IcaRSnJ0cc/+l/Rjz6CJojsjEktcN+0mJv3iL/+i9Q7g3uZ3/P/P0ZwkA6XUIzw2RDSqbIenEEMZI06Axpu8VKzXJ1xPZuh/PQPDtDHh2RNzv8dmCXHfrxnOX6CWZePGxSMois636kFFwEmiwVi3XLfPZdYtD0N4kuKdTc3DMUU0kOz+oeIJVVyXDg+Rz+qAXW/b8rGKbFA7aClH/YA1yVD+zDo9S9koKcUUajtEYAMSbGYWAaHUop5vM5Sv0bBXf+hdvvryPUgzj3UlZ5AEnl7yh+qPq6vH8pP5jyHv5ArlLJUKxXrq42/OQnf8f19TUffvghP/zhD9FqxrtMnH9O+81n4F94Ri3WckRRCk8AKpcvIgW8CQmMJgBfv3rF6BPL1TE+Oy52F+z6LX4XuO0dr7cbduNEHDx93+PChM+BIZdCcpdK0JgyBrTk6OiEk6VkPp+zXCxom1K4m7czFvMFRhWmjtWWeduhpcH7WvCWCpciHkOfZbGmGhx+HEjTAMFzc7thnDwow24cIAw8Pm75L//bdwjDxHYrsBzz5V3Av3zOB+ef8WfvW+Qy0PkRbr6E5g6W16AS7H6C332JzoEsQR9lzEkHU2DeZ46eWaICsY7Is4RjQxhu8HcL2qmHaVMWHmPITEQCIQcCOwRz5uITYFU8OAHwpYP2z4mqfUVhjpI9VuqyB0ni8NyOV7e8+uYFn//9Z7x++4a7mw2DL2QN1doibw0ei0To4i8tJBBLWmqMkUAiquInp2Qh/RAhUgIkBAIrdXktl0AAJTVamoNaZ3RT8RNM8b4YUc/nCInRTb32QmQQdeymqjSMvhSrrdJYo8lyIBGKNakSKJE5ni85Ozli3jUIURQlua7Xai+9rRVgkRWFJ3jfcmVn5/1d3m8F7ilhHJKNRQKVD8DUwa0tF0CfGBFas/9ID0negoCsJC2Rc7mOJMka+l3k+ddf8uKbL3n14jmf/uJn/OqzT/n6q6+YxoF+6IsfftoXDvZXXOTGOZX1UO1BMlEHcq5rZr3ITCrquBqYmeugV3tglIqd1HtCSqCrGkyWCkgWgIiln1Lxjd8XMwu4ez9jx3+gAPWtAN3xeoUDtqGYoNvUIrRDRkhjINuI0pqm6WgWliTADSN+N+F8xLQNKSbGcSC5iYFEmHVI5mg5Yxc9kIp8dtahYsS2Ddq2YBTbYSRJhTUNmpKiKlNEVsCn77e4HAuLR4DVkqaZIZoGqyTp9pZg1UF7H6JgM43kqQ4gqYphMQlCQlbWmswSLcAaRZAFnY4ykZKsCTdlrMeciC4zugG5nUoSq2kLq81I5p1CyIzKmpyKOXokkqoppDEGYw1y1pFlJMSJyfVMfof3kflsRQwZ7wsV0ipDkzI+ZkKK7LZDmRzUCEoekHhtFa0WyNQSrGZ0AWuLR5qo3yeMKn56EXIEkUSRAUN9kCRxH/+9H8J7E31BDTzw9dQiyTEdBsYezPMuMvmSTGu0RhuDEAprLW07Y9htKaETES+LTM/vwzf2TKkU7ieKvScdElFlvMREEImp/rwq0DQCwbDzDMOE1hLTFGCsmc/IWdK0S4bxrno6SXKqjDxRBqFSijikkhaZi8eTaRpWiwWPTte0jeHu8ookBakxeBfZbEfGfijyXCm5vr5h6nuklEzBo5RmPp/TzWaYpmWxWBFjZJgKK9HHQKkrC1SG5WxeJES5bphTIMREjE31AiyMH0goJejmHcvlgu12xzh5lNU4Hzi/uOT65pbGtAVQNpbGahqhsFIQDcQoSEkWY9OawGf0PoGVsiBET44eYpn0TFckktPYIymgmw+RlDxJCIxpkEqTvAckxrRYoyjOHA5i8Sork7nEhRLuQaUUJ59AFzD/vjRWpJpCaXTbIqTEpYiMhQEHBWBFStTckmVGyISiBF9EWa4vEtFakrxgyoGQPCEXHw9FRkvFFAM5CqySNEajrUboQguXWhJyZgwZHwKOSMxlUc0kYnSs2haFwOcMsxYxb+nThBknZkqz8yVJSGfopGLUGVwm5pIem4lIlbHGFI9Disza+wmREk0xPqqVpsPOBCslndZI09JmiW1a5K7KpULAecfBW9NoGqOLmXOIpOSQKdEpw3oxIzYWJSRu7Ll6+ZrzFNFLi5gvcMOO59d3xO1EePaE97tTyBrlI50AazTL6s3nfcb5QEaijabTBhUiJkOnLaNz9Lue4CJGClTT8fmrt/QhcfXolrP5GUdPjwjTlt1mS4xFYm7aDmkFyhqUBWEhKU8OCW0VOkui1IQh4HP11pCFJRBcz+XFDVKMfPDBe2XtcJEcAjoLsvdkX2SyPgQSkagVQwxcvn3F8uSMrATJB+bW0rQWZMJqwXzWlPnJF9+4802PfPkaWsNHRyuOO4P3t7Qq8Ox0jVUtGz3n+Z3HieJtGKMr7HEBWYvKUErEXIIfUooIbYgpcXV3SxoGlqtT5jODj5GEwJgWpTtQ4LNj9AmhNO+994y7q3P8buJXKfCLX77gy8+e896TP0NpQaM1Oc4gb7HW8vTZGd/75D3C9Jrz2wu++PTnXL7+P8mffAi2rRO0OITaHfCrb2Gh7RnUUurDXg9gs+t5/eoC13u+895jTk+WRZZRipQPfD737/Rwk/NHOMDlUq3PCFAlgP6dz5F/88D5+9C6f/tt/6nyHnncb8Rr86FYHmjdkFKxM9C6rA+DoxYny2HJkNAxcnvxhi9//jd8+ou/pT9pcWfH6OC4Hq6Q0XC79azNAkbFYrlASsf2m2/Qp2vM0YzLL37J3VZy+uxjTnTLeBc5/+wl4/MrrFPcXPX87OKadmH5/odPsKJhvNvy7OQRLq/4u28ueL35hj/5T9/n2Cy560eU96hOg7cIqwl+i1KSDz58n/e6jrP3vw/vfQKrEyKGIJpSNMpAmgj5lhzv6KyEKSG7Oe28RV9fYoFmdVxMlTcDzE6wq44TL0B0oOdg5qCKyqJEqFVz6lT2YkJY0Cv2oVq6EwjZQBQImZDCo6Sn5AdWS4N3OlHUv9579+5PWSJzQMFF9Yb94xDX6nMi94boibEfcd5Vryt7OKvInLHaojp1H0x2eOb+GNfyv1q7B+L2XX24lQ/msnf7Wb77ff+I36CVpu8n3r65QArLJ9/5Pp988h1Wy+UDqfI/lYr8bb/xX6HlggG9I/jbT4Oh+GUP25HLyyuSkNi2Zbvd8s3bl/z0i58WplPQjElxpw2q6Zg3S5bNCc28Qc8sZtHSGsuahrmdoRcL7KyhmxXrIZEh50TwxUpGpMKWDTHihpGr2y1husFPnqGfmAaH8yPbzRVjjmyjYUgSHyPZBZh6cgi03YLBRaSdM/kJIz0yznDDaT3rahwN26i4dYp0PtC4HR98d8VxWBBe36KNhtUl+XaLe/kl7u4CYyTyZAF6hgiRnBQ6rdFBQx6hNTDviBmG2x1B3tE+neDEgjaQHWxvUK1iGK6JcmA2e4+CmIqSmptTRTDSOw/toY4EaKEOTChSIvtEf33DV59/xRdffMGr12+5ubtjdIFIIuYaCKA0Uiv8MNV9R2Ur7z3jUyzn/5yRISOUfEflJeoZmlTYzQcWXi5MrPswvnvVVwnru7dwEhlM3VApUa5L8MDWKZVzpqQw1wtWVmWcRmIbyaOzJaenHeuFpbGFiVivpJyyDgCdKA95Fu/Ou2WBO9zTYnjG76bUif35J1fsOuGnCS0k1rb1PfeG+JnRR2IOzNrCloRY1UCBHDxuM/GTX77hq5cX/OxnP+GXv/w5L775msvL11zfXNEPG5bzBTF5ciXFSCGQooTXWa1xvvrZ12sWuQJloqbP7xm+IhcAsfadrMGaZRLb2ypV1vFhgaz9nR+Ak8SqGpNl3JIrHpEr6U4cJt1/TAjStwJ0JycnbKZi6h1jLOwYpcg+MAwDoy2smGbZopShbYtsNRjHOHncdsO6MTTLY6wu4Fr0xaD7or+gaZoChJCZfMYnCFPCKmi0plEtxkjQ+cAMK2msvhwwTUTFgHQBQijS1DAiRkcQMJeCrCxhZhHK4IXChsBsiricudnuSBmijQTnCS4WAEIJJJKwf+gFtQolSFocQKoQIMZM8L543gGTHpisRSlJX7X9jTZFmkaRm7amwUjNMHlkkmgK3UBIgepaFvMWJSTZQfKxVERqSlBE4F3Ax8Sgh0KXjhHvR6bRM4oCLg1KEvuWrVE0TYNpmkJ97lratkE3mRAi3idSEuQkK9JcQhlSysRQ/ehSeZiFkEiloaaEBQFyn9JBMcWP+5QbMtd3t3SuxTlP2zY0TXNIf9X6Xe19jLFMHhURTDkf/Or2wN8+XKOAerIuWtWrL2YC6f7/CYFtNMEXDz1CxLmRYRzQ2rJYLLi6uiHkhMiCGDPj5JCqAAkppcIEM8WAWeZqnlznl+hLHMBiPqe15TMKccvoy8HZGItWgpAzvvrOSSlL9HYuz8G6LRLYSKYfIjG4ArAqQdKyVFWQNNbQdg05RlL2B58DY3T107tD9YZhckipmM3mWBvZ7gaEKDJpJRUpF788P04MSrCeL8Co4p9ArXrU+aLIXO9l2yWZthh1ktUhCGMcB4ahpEm1tkEmiqm9teDKuPHeHwJFtDFFwhzLNQmZQVpScnWhooKOEFIkxIhJqlQ69hV5SlrR8fFxuSeuAFY2g0Ie5MM+xgKsPAjKEQ9WFGMMcSqgZIw1HIVcUo8rizRUOXHKEGUgOgdGY5Qt1aiSWY+quI8nY7xBCnlI6/Ip4JLHB4/znrFGu++iIxnLo8ePOJufcOMyX7055+tXr+j7W0JyqEq91kqhdPW/SxElIlLXRSJHRA6VrV3GltKl+iSFoW0MISX6ccBNgSGDr36XrSjm3EpXObUPpFj6vjGWUVAYlkaTY+Di/A1yY1g8PqZpG3ISnG8Hdp99zqvtLd//5EO+u14zkgm5yDw0JbFbCwGyVJ+0iCy6hlYvWSwjm2FkGD1hcqQsaVGsuhV+8Lz45hXxFFZPP8DahkU3ozEWYsaLTMhlY6KkwshyzPCpFB2stgRpICdCKOuGluWZj6pYGEz9gBsGtDTsQ0Fy9dcSsvilxpxRWjKbz2hmHW/eXqNmI48fP2ahF8Qa7tMuZyzWa276qfgjosi7nt2w4+tveoLbkZ495f2TYy6vbtj5ia6b8aQ5wiTLm/EK54vFQIz3kmslJTIX642Yy9wdY0YoRcyCu82OIW7IoqFrlgih0aZFmw6BwnvHFAIuJrpG8v57H5G9ZxwUu2HH51/c8ebVWwYXmSHRogE6pIBufsT3/vT7fO+HP+CrL1/x//4/f4eUS5ociEOs/ifioJBSIpRqKBrQv3Hguz+wHYJ/BFXqDlkKNpsNn//6V7x5/obu//pvnB4vibHE3Cv1/4f07jdbqZa6tCEylbEnykZ4nyK956Pcf1Wz5Sx++63+zbcqGz987hIIlLNCq5aQYJgCSgqsUVxcbvjm5Tm6U7z33imnM4PJHi57vvwff8Ff/vf/mxe3X5At7FZH/NoKLm5e4jZbHk8nPLuU+HDHyQenLP7bJ9i14MWP33D+6SuefXRMN3vGLFkUjyE+YXvb8+UXl4SbO47nHZdOsktrlrNjurZl2jmmW8d6/ZjHRxb5+YaL51ecnVzz5GiJPJlI44CZ1812yoiYaTrLez/6HmfrZ4ij78HjT6BrQSZE4VYgUiAOt7z6+sd8/cVPOZpLOr/jeKY5fbRiOn+BSyOtnohDQ2aBFitol2CPQXYgMzk5Mj1SZVIyRMoaIkWANFXvPgmyIWuDmJtazPIgHNAjxARZl8KOmFEkUvse/F29+u5f/uiPagZR0h/2/6wsElWYDEB+kGKnjUE3zf2Pp1TW1/9o/7wm7nc7Dz2af883P7CGuQfh92y68vrhbet/y1xujOHJkyc1IXLOfF4KNiFktP53MQEe2m+VYiRkFN73XF6f491ESvDm7Rs+/+ILkpScnJ3x7Ph9uvUZaX1MM1szkxaSKP7MMhOUgJiwu4ifAn1KXN8FPv3qVdmDOscw9ozjyDSNRc3mXJE0+oR3JSRO1fUoxYQIA3Nbwu763OCERqqicFI+Q8zEzcAYQaWWkDPKSpQu+4YsEyEFWi0hCDZj5s3VhPeS2ceP6e7O2Aye+GLDyd/+mnbsSW82NCjEHMY8YeaK3eAx2TDTLfhF2cRUaeB8sWYzwfhm4Fq/pL2UjKoj6A43RbLz3N5dsH5vyfqHH0Nbg29UpnoPlX/vmVw5o8ioh4Wk+tcsIDrHxeUlz1++5JvXr9nseobgSDXB2MdIniYQAe8novcHYOUepKsS1f3b50wO8Z0xJlVhKKdUrXsqUJPy/fk650TT3AdhxBiJFXxjD9JRzrPFy6yCeDkdAsP2AZEpFdpDzmWvaK2i7RTHZ4qTR5LVkaQxFdhDAe09Q54HpK539i971l78DeshyKLYIeyxUajpwlXmWbxnJW2jC261J1gEDr4nnVUEIjCRmLi6fsV4d4kfdly9fsWLF9f8j7/8nF99/pavvv6C7fYOH0amaUCKzHJerFuo3vn7+wqx5Bzk+1TqgqlVcO4AuN2Tjvbw+x40K68XUtW3FRliJdD8Zsv1vapV4YOLePf79n7Sv6996/9t2xYHiGEsHnOp0CIjJXAhxsQwTISrK7Z+oO1KNaHO6gAAIABJREFUqmiTFEJkWiVQImEJWAzaKBKG8R2DekHIicFH+tERGDBDoGssc4opn9AKqQRKG3RbQR0y/eUVSFXYWeXkQo4VXEgZMUxIUQ4xWQuEMUgs7VxCEix0UztSMnnPtt+xG3oG7/A5Y1yiJGT6CpAVrzQpipeAUpIsM1E39wbzueiKpymx3QWMURil0ZUyOWtb4mJGJzsaaxFKEClG+yEEkGCtKYBdLmCBMXtfCElO4hAIsZi3+GrQP44joy/SyZSL/93dXaihBUMBfhpL25Y0WFvBUZ9iBdYKWzOkfbpt7aNDEbQkbQqpCyiZ9sEBkZwd+TeePEHxuBunCUh4XxJqtdY1FMPfg3HV205wn1aDEPdafva/K1SgqCYa5jJJJTLJJ0Iuh6Yoai0wCSbvSrJkUsS7SJQlaXSzHXEhEGOZnIJIgCMTCX6EEFk0hXEmjSQrwGTG3cDbaUQJkCmhl4rFzGLsjNELdj6ifCim7V1LjJ5pHAHwITCOIyEU8+UswFqLr5RpKKBRAekS/TgR6zoEqXhhOY9zHAC/PahVSiglzUki0UpC6ssgV7qA4aLc+zBOuMmRusImfIcTIfeJqYLkJ1AapQxGKZIoG2tVU6UKmy4gcwGrYqXzWq0LaKXuq0MxRnzwhSqdIy4GlK6U4/ygipTuQdoYAyFrQs7VFeB+o1mKaJkUAimEUmkSAh9LrrlJEpfKmMrx/lnaT75SgUz3h/16LmOfBJxSKoCId0yjw5OJ1mDCktRYrLVlbqkHjSTA50SKHikESkrGfkfGk1Skn3q2Q19QvEYyxMwmeJw2zOZzTj/8gGfdjNnpMbpVjNOWV1eX+KkkhmZRWJLGquJRUXjTeDfhgsJU1o9UFIA3Ri6vLnHZgDZ18RaFkUskpYzrR5LIjPN5MaLNhU2aEtWYNpDChBGwPFqh3n+CyY7zzQ3nr19zdHrGyXxFt5wx3t3xi+fnXPWezfsf8vHZEcuFQqpY0mX9hBIWKzOIzExLLAktwbQNRhkmE9mKns2mZ3vbc3Qy52S14qhbkaXmZrPj7vYOQsL1A8l7eivZRhh9wyo2qFTugQDc5DFYVGuQorDqUiigm1GadrmktQvaRqCkYBp7/BSRQhC8w4riF1NLB2hlsO2M+WpNu3XEVMaLnXUEUUAyv9uSMUilODpasLaC4fqau0vHMO14c37OEQI5Oa6H4kU4W1q0miGGwDQNuKhIuSERDgWiJIpcMGeIBxZ1qfqVQ6zEOc92N3C0ThjbYZuOLCTD6PDJk9qycQ8+Me8aPvrwI77++pLlTHB6pBj6gf76ltXxiqQk3iWSLWCnB558+JR2NeP87WvWs2d89/0nNE0LQpUaa/UclkRECoAqc9c7+5d7TtbDxHFiIsuyqZ23DWcnx9xdXP9/7L1Xky3Zeab3LJdum3LHn7boJhqOGHAkUiExdDHUXEm/Q/9OEbodhRQxFxLJITicIYeEgG60OaaPqVNu+zTL6WKt3FV10CCBAUYUKGbH6TLbVO7MlSu/9X6vYW+4nIvJsSgVubP7X2QTCY7SyuTCLuREPkdyfFGpoZS9TW5VgfGfCCb3C1suakXqvEfinl1NgHbbM1hLCIJnT1/y5uKSO+/e5c5ox7NasP3Zpyw//Rvsm+dsL55xans2Aeo7Nc+en1LawPHDOxweTinbDX3bcnV+yezeh9z94AcMT14TzF2OP/oR26XHt0l/HPodQoONnrOrJXY44GR6Dy0KXry+xBnHsTSUpSYExb0797iwr3jy2VcUAu6fvI98dYH3NZPH70EYEiN92mDjIeL4Mci7YBqCCgThEzgXU9dcKk/lOtT6im65QwpPmE+xvmf56gWvz1/xldI8eu9jHnzrB/j1inDpMM0A1QxrNK2z6LqhKQyRpLhAg8g2BM7tMkjsEE6DmaYDqwLEHtilZFmhECJreihJJf7oBBl/za+/8ZBJC/Fc2wgpqSaTX/50OS64fxVA6Z+3cbueUf/zt5tBazeP/Tf1QnJ5nhumgqKQnJwc3HhNlpjJfyIz4S8FsMXew3m1SV7qXddiO8fi8oqu63j4nfc5vnufk8kJNhYsfGR5dkW/6hh6z2AH1m3LxnbgoOwCw25guevY2g7daBAR75MqyQebmXQDQ2/R2qTgLyFTLUDyQ0seWhqVA6+iVEhVIqVCSTCSZHMTJa4PCKmRApSOICW9dXR9T+s6mrrGS9iGyFIozHrg6fMdn7/4KT95/ZT11RXvNxX/4t17fHS/4tGjAnSgxNKHLQ6PEAW9N0iSDVS0gdVyTfvmkq9O3/Dk7JKtKuiLKVtZEooZUpYUTjDEHf/ij3/I8eEn1PdyTVQJnIxomZhmajxHqUjI5yfensqs42q54PT8jMVmlRQVShCkyOvXFFiXauHkl54sfm83BYWUNwLvvrkGEZnBHHNAC1xfYyN4A+xDAsbHUrLq+Flu/M3xvhsTAWRP3CJmsDD7dwuB0KPntKE0BXVRUxcNSpVASQwmkRBuLfx+2aZIkCeE0R04v1DFmOs7ud/Hm9dISoq2BLdLvqjagE6fwzvHtt2w2m1Z7xY8f/oln336E55/8QXnr17w6ulTXr68pAtzLtc9buhppg1KJ4DSuoEUopF834mGIALeZasuF/CKFN6Qmw/7td/oxx/jDYDu+hyNn+Xm11vbSMeTI9gn9gpGoZJveQy3D60Qv9hYHvfn79v+XoBOKYMxPiVQdn0GEQRaF5RVTdU0EHvatmXRrpOcUykaUWCUpqwUwQWs1xRNgykqpCmRKg1MVZhkxC4NsvKI3Y51t6P3kbDp8NZREhBGI0uFrktMXSZjdAEUCqU1E13RKIOMgtBbfD/A4IhKgxR0MrKLqRvgQoQgkV5wUlc0pqbUJTZ4rpqai+2aRb9jCI56iNhhoG09/ZBYY0ImaZhAErQghiQKGDMqR4DL+ggUhAiDjXSxQxKxrsdFS++6lAxZaoqiQKjMJLPgbEyTaASjJErqtDDzNsswBUamFNkgBWWh0KqkDjKnqyaaZb/zyU/SB5zroevZbnYp+dNo6romytsDZ59eExIKHUaKZx58SoApk9y47bp8XAaE1mhtUDIbRfuAMUnz7nzy2nMBtPaImLsNMjGjRnDz5kUSsvRZ3CjY4mjkH0QGjF1aMERy4IDMKPq4vwlEVUJTVBpt9HUgR98SQto3QvI2k9nXQAqN1JKmqZPZfHQoqbKseINSgmlZJ+DNR0BSFnVaxBcGRWIADkPyilLKUNUKlZlkvR0QThCurqiqKoGOzqbeVwyIkFDDqiqS14GzbLcJJPfe4pQmRJ8ZqIaD+SFFVbJYrhJDqusZvGM2m9G2LX1v6bqAlsWeeq2Evp6ARAZAExf71jFPNXYC2wCSLUw69koJqrpkNm8YetiuN7huoCwkvfPIyRFSJPZrMhJOQJ1QoLUh5naKEOlcj5NnuiGyl/P5OHZIkldAiOkm6rzbV4vJyyb5drkYiN4l+rJQ17RnoUAqZJaxR1z2QczJPiTwI3qfQOrgcUPL0HUM3hGMphSRYdrg+i6FoyiFEGWarG1iKEmhMKbI6u9kLhtViloPSGyyDcGi2NmI3e04sD3NyRHvvHufZmI4PJry1z/9KW/OLlCdT8nAOknYhYIYHRFB13e0+cQoYzL4KHB+4PJqwWrnQJoULFAY5GxCrUtKJWn7gdD3dF3PbtsxKUZAQuQuXWKTGSmYFAZ5eIhwA1HDm+UV68WSfmt5MD/h+N47uN2Gn7085eLKsvz4Az5+Z8bxzKBNgZTp+g7eUmjFybxhuV7SBYePgqqqqY7mOCe4vFiwXq9w3Y5GVUwnM16eXvDZ13/GF69eM5QajcRpgRMRK0hBM0JSiNTMCNLQ9x3GCApZYpRHiI4QBiJJWlBXNQczw6RKXqi7dmDZrhmcxPkBhGTTtZTKEKREGJ06g1JjypqymbDetuyWS+aTGlVUrLctl7tdCgEqNbODklpFtIxcLSLB2UQ4FhpT1FAoJtM50jRot8S6Dmsl1pbEOMtdV7JPZU6XylKCEBMDEKEp6wlyiGw3PavFFucFQum9r6LQmmY6odAe32/ZrHdMmsQkHoZThiwxXlxccfToAWZisN2WzfkrtnbFEFIAjAuO+azmuNZsLl4SvWRy9x3M4WHyzdn3U1Pk/Nv1n3j7//nWo7REJUEAdw4PmP7+93lw8oCjg+Q1WGRiTch2FmOz5rfrU3WjUEPhRI2nzoV2YoFGkRK9R2+v/cui/BWK3d/BLcIo3dizBqPOi4MAIs3Fl+dX/NW//785O19xcDCnmBasBvi//u5Lnv31v+O/fXjAHz04gGLJbvWCibX8wcMPOHn4MX7a0L7Yst2+pL26ZNcETu4d4MSM9ZeWLSsmd454/MEDrDrGqxOqu5rYXzAML5DVGx68X3PaF1w9veJOcZ8mwnK1ppsptqrgwExYtpKL3RVbu2bTXbLtLA8X73J11mH9Dn3UQdwADmEEjpw8Hw3BFxShxAuBAzSp+YwYCP2ak6rk6JOPWJ0/o91uaXc9tDu0VVw9XbDY9DT6MXdOOmJ1TiclV6tTQndAOXlIkEeoOIXQjATFzF0yhLDG2RVRR4zUSSUgBog6Pe4GvOsIoseYbKguBmAKHHA9rn+dr78NcOWaeZAsFfJ7jgBQCDBKfr5pIfRfnCn7/8/t12Ugv33aMh6wP22poZfqxj1jbr9Y/x2fFMUv/pgkfvnTOcdut+PLJ19xdnWZGqd5XmwmNfcfPcZMplxcbHny9RNebns2W4vfOkRMoNqm72h9oBSaWTAQFENMTbndm0uE9Cm1M4RUTwqB8h4TkldyIX2yb/Ee7we8EIl9S6Qsa7wUCD3ByZTQrmLABIU0kco0uOWWPkq8B28kPigGB7vB0uNZ2R2xusu9Dx/xsPp9wtOn7M4WnF5e0ZWKi5XnMGqcnOG1od+1VHGLEANVWVHpGkLFsIW+h87Dernlzek51va8uTxlvV6x9LDVNa1qWHmNFwajDM1JDaKinp9AOUFKcKOSaTwn17jRL57AkFhvi8sFT58854snT7m4vKIPgTYEWufwMSQfTpksbdKi7DYrLr3djfWyEGh/Hd5wUxF2LX0c2VoZgOMGAC7Aun7/1iGErGAb58zc+EwGQslCL46NdIgIrHcJRJRp32UmAhlTUJgaI08o5H2MvIeIxyAmaXh+0zgXI8VtIMUplAyUeEFqyOU7kkRktMMjos11WwKrtEigUlqd7BBcIYod6Y5ZEKXk7PUln37+jC+++po//4u/5uJqxfnZgu16i9t2uNYSOojxiAFLVWpUaXKYYUjWSzKnHvtMTNAZeJMphEoJndacHlDcAuPGNe9eSpwL0ZvnepQfj0fqprw98yHT6+X1uR/lqyNDb39IbyN1t/h4QvwGAB2QTrjRGaxJ7IOiSKDSZJJikAsMyvfJuD+ItCiWEj8MSBVASKKzRF+AlBhjQCaWjVAaWZRU2tDMZlSbLav1Ft+2WcIIQzvQtxa/BFHI5J8mYmKDAah0oAsUUUWCDAQRqA8PQUZ2REK/Y+gTSyv6mK7swSNi2tdKSmZNSSygiFUCBYZA3w5sNpskDXTXVEpPxIfkueV89jYSEq0SjVLrEofMMtFkZClEyItfjw+OzWqRPW5UBig02iQpamESwCRFAgVH3yHvPVoZpEkphILUPdCZZRdjyCBdpJKKEASDTekuPniij3R+gG7AuXCNKL+FJAtElmumz6yUQgpNIDKMvjPKYH3Ah+SZllB+iCGFURSF2XcYbk5gSl37jMQY8c7ic1KsHD3/gicqSXSBnCyQGAt5svMh75NMOIzWksLo1DnIF2uhDUVlKIoUnlA1JWVT43xAmZJd+4YwJKhbCJUCL/AIFIWMHM6nDLsd276jazt2tsW5gaqqiEWFUop+sCwWK4rSEoXMnQaJJ7DeblBCorXcp/SMLDGl1A0WYdgz6Jy1eeLxKFMg9sdKowqNlDXBeYbe4mwCUbUyKXRis6PvOqKPaCGpiiLLh7OvgUymoLrUKJkkJVJnYNl5nLU5VhxUCFmGnMdDTOBcjGlshWAxSlAVaZG/29ZpXDtHUApnB+j6xJrLXazrG1My+vc+Xw8yg4ZK5XjwRC8OIXVEvE+yaKFS+nBi5DliGP3ZNBqBCB6v0o3Rh4DRBcaURJcpzzp5/GgCISoQLvs2qn0nK/nvJWm1VgJdlVRK4voOFQLBO7brFefBcf/e/T2jUGoNUu1ZTvPplNVkTfCOwdmUAKsLLjctW+8oJxPKoxOGNqBckkspIdBGMq0LTg5mfPjBe5iiZHe+xJDSfxAxBakER1QS7x2eLL8PESHCdQpUFGxbSztsMYWmbmqklEyrhrJqMHMYVgucCwnInVQYncBMIWQCPYsk1fbW4VwCXx+cHDGZNZwvViyudjxdtiwPe5qDA6bH99luO/7jp0949Ro+fHzAx+8+Zt5MaIoCby0xBJqq5GhSsxksrU0egfOyQDcVDYa1KRn6LRNVEUKgHTpen77hcr2lOjrEHMwo5xXS6MQiMXn8oEBpNAXRRhQaSbpGtClyeERO2Q6B0AeiVByeHFMVnjdnV/RDDznevm1bbCVJdE9N70JKJxcpRdb5yOXFJZu1pqknye9NOjbtjt6W1M2cqippKkNUgdXlFbooaaYz2m3AxoBSKcinLHcYI1ExpuuMkP9OAiCTz0iOcFcJbA5REoVGFzVKOza7geV6R9sNuOBz06CgqmvUfILTgW4dWC0vaTc7XN/hbIfRknt3jpnPpigpcYPj9OUpP/7T/51Ywfvf/oAj59i2Sy4unvN//vv/jW5VcnTnI773R/89f/gn/4rH7z9GGwFBIUJqosWRvHNzi7BfvI8dyxugXRSCpjJ864MHCTT35ELorbf5rbNs9jliSY3hsiJDZR6SSOnEqUd2XWhde/4Kbldfv+Xd+8fcbnyW1JGGwSaG/OnrC/7iz/+af/tv/4zlsuXh40dUsxr15ed0dEzthleq54ne8OB77/Ov5v8Tp1+94utXb3CLNdob7uk72DowixYRDHc//D3qwwdENUGLltiuaO4d4w7uEwqNmc9SDbnqOVZ3qWdJxq3MjHfv/5DLVclPnj/HlZG1t7xabalNoJcdoXDcf+8uR0fv8N2Pf0TdHFMf3mf+8BEIDyoQdhuGywuG5Q55FJCNAV0gkSgKZASBo12+5vVP/xplN7z73jFHuqXaTFivLOdfP6F9+RLlNB+88x6zyQnPn59SnRxy/N5jotSEoqGezlDmBFlM9gc69cEiIUq8V8QoIPTE2BKcRdKCqUAYgttg3TYz1hvAJAAhquwt948wECN7qRa5+UWM+3vrtYzoBjiXuqsAt9h0/7z9atsvTLOZrfNN7I1vfL24ZpqMjJ5d12G0QWt163S8/XbpfI5/9+YC9D/30/x/ZPvG/b8Gsr3WvDm74Mmz5+x2O8qywAhFqZOJ/2bXooLi9NUZn3/5jDedw0WFHhSlKpI3ekxo/OAdq+2AHwKirGgaQ6ErBD1SGWQmLLiYEBalNKYskqJI5PuTTAqrppkwq0qk2+Gjp6XEqiI1F6Wn9CmUom8tu8HjLPQuqVKEUmkd6gZKpfGuZ34y40ff+Tbfnv2Q1d/9DebJGcv1lv/lx39B2605cwOnyxl3jmccTyPCVCA0rHuoZ4SV4/zlhrOV53Kx4+XrN2xWC5yPzE8Ujx494EBo3ljBkgLvDdQTfIDDR8c0JycpLVMlRv+QcBnMeDq+qRTInj0xwrBc8fLrFzx59pTXZ2/YDRapFYN39D4BdDIXKzLbiYxrUbiuNa7H9ThnwSikHK8buAbpjBklrCPp4TY6du3DexuoGX3gQrxWlY3Yw2jzs//YIqljRAbqjFGYsqAsGqaTE2b1PSpzgBQ1Y1rMrRJlRJJE8n97W5mQ/o64MepT0AQE9oFde+gqEXRcVhP1u1PevPyc89NLri43vDld8vLVOV89e8OzF29Ybz1Xyy277YCMEuUkoffICEZLum6HqlIAS9fv8N4mxY1RN473DWxBmj2xJJ3L6znt5nG+yWq8BlFv/i6dK7GPOrs+/zdrRPbMxyRBTIEa1/fc6+Mm8jm72aKGGL5p4F5vfy9ANzibfb+uJ3qEwDuf6LbeYwpJUzUUqk5JJREmuqIyGhEtg23ZbTtOz87x7ox6OuHk5ISDg8MUH20t3nlMWScKvFIMWjBoSa0bVAgIZ+k7mySgIRJ60qSzWNMWBX29w1YNk6Kikjp5uVU6GcSXklIqGgWoIYECXiG9wO06yMabXoCXKQG0LjRRCspaMZQ9k1KnhBAXMsMshRn0g6cdevouxfqGaxJouhC9SEBgSIy3spBM6oKilCgVKeYTBpdZVW2HiyBkkhRqKTg4nFBolRl0Wc4k8uUiQ1qsRX/N4s0XXshgSPJ7A6kFZXWdHOP2BpVcA3o2a7Czt5gQCucdISZ54GCzeWOMBB+Td5tUKGWIMaB08jcYh2ShFOIGQ0lrvQcDVV7RjFJday3WDkTv9yCW84lZ48OAysBmoTRaFzgRUspLBvR0lgFXozzUyP2FqJTM3nHZnL6pcc6zXu8wRjP0CbC9WTAKIloEqrJgWirK3nC5uqS3MYE8AnbtlmkzTXHXziG0R5oUYOCjpes6orV4IRAqewoKCVJRmhQWEUKSvI7+bCJC55OYSkgJNoGBRVEggEk1YTqd4pxjtVoRnKfd9dj+AqkVXdfjrMsMSUPX9ymVSGRzeevwOKI0qNJQFgVFoVDE7JeWgOco0jiQZIaj9zhH+owi1fxJcpl81rSCg8mU6XTK2mbGrVR0eGJMwKMUcg/Mjk7ySpk0oYVrY1QpJSJACBbnHdZJtHcpITCDeDKDd13XoaoKLUXyYRx6hE981kTwKLE+ZqA/T4xSopUGGQnW443CFMXeCyAEnwRBUlIWkqauUBH6NpnqzuqKpiwhBNarBUPfIk1BPZuiy5K6qkBKppMJWmmEHL2pFN5H2j4xIgvtKGykj5KZKihNgxYKP2wZ2h193zKdTpnPt8Rth8k+jz44VAAyCwwRkDJZAARvE1hjNFVVUtUNWjt8N+B6x+C3GcQThNmUmUpAXPDjukghVYqRT/ctSRRpHBglU/COEkzqgnpSYbSm0iWnrxc8/foZk9URDx8+pmxmXJ29ZrW6YLW5YrXpeffBPd6/f49KSqTwaEDFgHYOZR1hsLRBoFSFbwOlc0yrOh0TBIfHx7wbFb2LtNZy/vIlR/IudSXofUxNiEHhhMqm5ykBKzpB3w04n6SRSmusy93o3FkNNu3PtKqY1jWDk7S9T+yzEFMYSD8kNvZgE1AWBVFIZgdz2vWSxfIK5wWHJ3OqxuAvzvEhpSnXuqAoC4aY0tV0UWI9WB8YQgJwTUxyaaUlOgSkyny5mOSEInfFU/szyQ6QaXwJqZAy+8vomBoFQqZIeRUpqwKlFX3fEaOgLEqKwvD109d8/sWnDNbyne/8kO9//7scHc2RmRWhVMPp6ZZld8Xs6IhPvv2A+u6U6VzQDle8fhN49mKg45D3Pvo+9x/cRyvFjWgvRh7QuN3GrsZCJjVfQggEHzI7Q+UOaAIm91ljGfBL1+vfX9zcalzeeuSGUf4NUO7mI4EEzA09bJagpGA+gVKDyPIFNc6Ft97/ZgH+jwSO/Fa2kQkCqbDIpaIAcKmgD4LNIvD5Z6/46sunbDdLunbD2angHg+5e6CZFxseHdX03YanG8uj/+Z7TN75gGX4G17+9DPs6685ufsOJ+Uh8zvvoIctw1Lz2acrTr4jeP+Pv5Xmqxct4dChH08IDoKwCQyfv48sZzSzcx5UNQcfrzmafki8LKm059mLr9l0O3q348G85t5797jTvIM+PGE+e8Sd2XsEX+KrGqHKZAuhLMvla85ev+IgZpPx0oHwiFijhEkybr/Dbs7YXr6hEhDFEerD95iWE6ZDgf9Txeuf/5yHjx/z3ne/jzw4YbmzFJN7lNPfo2wOQddQzEBXpGslNU1j1rZLNIYpRltEWBKGBV27YnCvmBwU6d4Xd6ACyhwh5ATCHPwRSeLK25fgrzcEfpMtJvmVygtdly0+AMqyvF6UjnrJG8yU/fa7evn8v7H9Q+fnLfAA2C/0k0Rf7Ouem+Dc+LNzjs1mTVmW1HWdanhxHQQEyaP62pt5ZBBdy2X/SchcxfVMLsdf5CZTCIGLy0sWyyU2BJQwmTXjkt2J0piqQpkCpQ1FoZBO4oNN9UH20tZK0ZQljZZ4G/C6oKh0BuhTyF5V1wQR6Z1Fak3TNExnU9bbLUOuK2Q+n7qsMJVE7AKu7Ri8BxlpqopZU1BFT7ADr7sztFEUWuFxCCyD8zjvkCFy4JP6oWkEziw5+fCEB7O78K7Am4b59yd88fw1Yhd4MK05KHtk2QEbWGzg5YZg4OXLDT97seSsEzy/uGLRBo7vSOZ3DXc+fMDHP/gOrSl4rOZw9JCdmiOnc6IRzO42vPvtB4jjSVqfyDGfGAQhJereLCzG+SMAnSd2La9enfL8+QvOzy/pvCdIGIJj5y1OxuRDTtzjHcYkz/ioZWa2JQXXCI2pPBJG8CzdDsU1qBfyuiNLIdNzYwZkbgB63qe1nhwBNpH9eMcmsrxl+3MNKKXXyDg2M9i/r1LJd76ZVBzODpk0NVIrBANRCIRQyXog3p5qE6A5Bn6lf8W+jIsg0j6FDOLJsR8ZBdbDettzvtry5nLJ2XLFsL3iL/+P/5VXT37OyxcvWF2t2K77lHKrNH1woJMyzcckV44yIksPIeKjoWqqrMwAIUucy2tIkm+8GQPK8m7GG6q86ANaqltz2v6j5uek94q3fn/d+I3Xc128cVhEPq8xNY658ZQY495ub79T5CZUJjD9UkD5G7a/F6Dr+54uSwKdT/RLvKfrLGtnWU9KmomhMMW+CBBCZIplSVk2zNQBh8ee5XLJYrkk5MVO2Vt0UbLrVmx2O5QK0CEfAAAgAElEQVRaU2TGjwwDCoc2E7RSiGDBKJoQ0CaxvUJ0RJdihyUCP1g2g2PH2NEOzOZTSlmgCk0jQYsEWIkoEUGgD+ZYm5I+u27LpusYCHgBQsHs4ARZ6sRIKU1u8AW6wWGtp9OW2ih2eqAfBgbvGZyj9xY/QKQmhoQtKymptEIrgYoRXAovEEClDM20JAqNj+BsAs2urpYgI1LqzJRK3leFTl0EP3Yjo993sIQQeUKBEEd0XqWQDZ0ANB8hRI+zHuct/WCT35dLVOlgDEqbZKIfrkMcRhVqFAlwGYYMmiJv3eCVSgt629v9zzcZZGMqzAiIpUnxetyNTDypFMJd00e11mgt82d0dF3yCBBGIj34DAB6ny6UqqqSVFYLkjg2ewaJmJPnDEolBmT6YBKy25CPnuAsB4cz5oczdCkptxVCSay1rFarbAya2FNaa5QxDD5gbc9ut6HQaWIZJ1gpJVVV0TQVJycnbLdrYoxYa7OXUUypwkLmDkgFSmaZao/rB+q6pmmatLsehFgxDAOSZIivRDrGtu+ZzmfYMoF4u90upSmHyLSZonSNLgtMocAnz8Jk4SdSx1tJiIl9Z4NDKI+SLrNDk8xJG7X3KyzKgrLUrLI0UooRCCjwLslxq6oEZRI7ZUwIjkkeMR6jlJBkiUNqBDjl8N7tu+57kC96tJSURUFlNKEfIHiUERiVEhh3g6fve2QvUlhECCghcjgFeG3xWqc5RlwnKaXrNS3MCq0olUbnsIM7B3MOpylN7+XLl6zjClWUHMbIZC4QRqdgBxL9uihrJvWM6CXOgjYNKM3OwatXpziruFvd4d69Adk7cBFjSuazOaurxVvS7yTvRkiM0pm9l/wmlJD4wRKlTwBdWaGLXWLBKU2IgX7wOLdOycfbHeLgAAaHLlKohClMkjJHgfORPgTckJiK07qhPDpgIy27zYrBDclH7uSQupjw/OUb3pyfs10tuX/3AXfu30XFKZvdJT/58mvOzhcEF3h054TjaZP+npJMKpPSllxEBodwLUYoTF3Q1DVOSy69Z7m4ZLFYUpY1J7MDprMJ22FgvVohtWCzrdkJqINJbEoFBskwBHrf0fZJniiUQubmhJaSSVlQSI8bBqQRTOoGJwy78wUheJyI9MPAcr2i6w27XZu6hEjqesJ0dsCiqjBtAsHavqftI3Vdp664HbBCUpUlR8fHuH6gEAW7rkXpAiMTmzySu6U36POjpykkYD8leyW/uRhJ4JzWCO9AKKTW1JVmMp1T1Q1SiTSlKUFvOy7WG2RjOD6YMplOCd7yxeef0nZ3+O53PuL+gxP6fod0E8r5lHW/48WbUz777DOuLq84mhV8/NEJ3/rwu3z0P3+fq3PJX/3lU1wsCMOS2K8Q5RRkBRSA+uZaRLz1TUzF73h9py1JRrMq/cbTBd47hHhb3vqbLwRHWHH8/osnZ/zV337By5evmE0afvj97/DJ7z3iYGr2n+sWOPdPYC16axv1GXEMwhgL2ATgl4Vkt1nw5vUpJyd3+eCDD/n88y949/G7/Os/+dco1fKXf/5v+OzTL9DBsfu9h5xt/h3Pf/Izds9fU/lAU9b0ww6jCyYHd7j/6C6PPvyAeKjZVqdstq+YKEffLxF+RfSXWKco5ATaAmKB0HOYao6Oj2jWFyzeOLZScvTwEdvQU+wk98yEkwPD/ccnqINDWlGwXq+YzHZMZg3d5SWr5wPT+1Ncv2FxccZ2seZkfi8FGZkO7BZsRTAlUVqk32Fiz8l8gut7Li+WHM3uoAqD81DPj/j4B/+Sk4MZzck9OLpHdf89MEeIOIH6kGw0R2J6+BT4g0JInQaXBFFooCIOLb6DvnMs12uibKijJoiA0up6oRYTuyF3Kv4xRk7aRFKTjE0+m9UBSqVG0C9Qsn4ZSPfP229luwnOpbr9tkn52yw7KSV1XVOWBUWhb7zP+Hz21iephvM37FD+CZy/tz6ChOSXPNatznPx6pSzywu2XUsQQOhRA4jgEc6zW20JsqLfZQ9uYTBlgQyaQmqkEqy7HUFFDqcz7lUzohdsbEqdn87uMwyWoiiopxNCCKzbHdoYpvMDELBzAa090/mMsm7215nUEKVk8Ja2DxhlMHXB/GjOXApc37FcrUBr5rph0+3wbkNZJ99tNUhkiMzKgmG34j/++Mcc7w74oOqpdkuqk/t863/4PT7UfwDmCK6W8Onf0r16wmLdYRcrhq+vMK7jZ88veHIZaU3Bpg8cHBn+5R/9Pu++f4AsPLM7NfPJET/47h8ifvTHoObpBFSA7BCiJ4rkd+6jIQCFBIVL4U03z1lGVaKAaHtWyxVnZ2ecvnnDarvBRU/vHZuhSw3XEChMIkFE5wkiAaZaa4aRABOztcgNoOymb/Zo1j6uT7SUNwIf2L8mZrue8Vqz1iblh7heu+8lsgFUNFnSGjOAdt3UGIMM9qBQjAQZkVok/7mypCw0RkuE8CTpagShEzPs1mUqGFNcx8LmOjgiP5wbqWp8Qoy02w27XcvpmyVPnr3ipz//ip/8/As+e/KM9cUbTL+iX17gbcSoEq10JnULAgOt69GFRquIt8kDfrTw6WyHlILeZpJHJmgIkbLIR/B/PMZitCq6cRzftinee/1lZuII9sFta6f9B3+7gBX7R/b7dFN9GEUC7SQyoRw+XB/n/a1N/ELj5Jdt/6DENe1s8jbrh55KSMqqIfqUWldWMgF4NiB1Mo9vfYsSyWOuMBVFpZjKEicK2nZLjIreDqig0DpJuvphoFtfpbCA6BFCso0JKUsJhhqZL4RKGup6kgA6ERObre/x0eNipHc93dBz2i0oSsOsmjIpKwpZUUiVvJiUobMWVVeUTUG3CVSDpCIQvKfte9zQoZRGaIn1SVVtCoNRml72VIWmt5rDgxnOe7ZdT28HVpsd55drjFJYBzF4jEjBM0akz1vXiYXj3EDXDXR5IWy0gUJiCfRRY12PDzGbVvZsdi1FBurKskQKQSQDXy5dOFIkgCUGEuotIjbYdI5kMk7UOkkIFAJtDKaQdO2A8wPeJvZSDMlH6Ca7LGQvpBiu5bHOx7du+JFhGCAmLtNgHc6HW3HVAIPLwJ8brmmpI+OuqOiGHlSSRN8MG/DOpc6JSPJWlUra/XhNoIagGwZCdNAHTKHwhDROAhnEGii0odY5dcpahAxIAj7a5LcWAwJBU5VpsZiljEdHRxTSYLThzekFr16+xNQ1ViS6+TvvvYuwnnab5NHRO+q6pm5qlNFsNhtCSB4RY7pt27YsFgustZiYO5UxIEJKCdpsNrx+/ZrJZJbAvqLk4OAAbxOIHkySbA7DkOS1Eoqi3AdzdF2PypLqcaJSShNDYPCBKEzyrYpgfSRYBz4gtUm+D87hokOZiuglwXuKQiOVomsdUhomkxmF1vgYONt0xOhxbmAYOoqiQirDtmszs4y9sanWGqMUfkjpVEeHB8hsvLn/J1Qy5yZ75nnP0PeoGPIN7pptVJZVSj8Guq5HakPTTHEoQhwYrMVkvzudJdDKJKBKmuT1lcJMJLUpKLUhDENKKU6oIo8e3AMpsT6w63a0XYsqDF4qClNQNRN27ZA80IJCiwKpS2znCUrTTBrOLzYslhvO3lxRzuY0dcNAT2/ttbRRXE/mKUUqYqOjJ1IcTKmqCu0DQki6mKTKUinqasJkEnHSsFgvgMSqbfuek4M50/kBjTygXy/wPtB3PaopElAhwUeJFwohNK63+BBoJg0qOpSSLFdbNt2AnpfU5TvMm4qzN+dsrk5x/YrDk0NUMSdQcLrq+MkXX9MPntknH2FDklLURnOnmbC4WtJtOgpV0W+2IAx37h4T64roOsqFhmBxXaQXoCcNIgxsrrYII9lOJ7SmYNAS10MzqZBCUdeGro8QQCrNdD5Ddh19u8JZS3CC2eGEUquUIBgj2+02Mc7aASHi3p+tro4JPrDbtUip6PuBxdWSdtcnGa02NHXNzifJft8LLi4uiM2U8uQuzWTKw0ePcWeXLFZLdFMxPT6grCuiTIEU7dDjQupwFlWJ0YIwdOma9SExgXViPgqhcDbQdQPGlEymirDp8+/TOVI6slpeoGTB1fKKxas1+pOPOK4MR8czjo9n3Csf8ejRXepG03eB7WaBrATR7JjeUZRfaV5+ccGf/pufcvbJOzQTwe//17/P4f3I589e0tQVD+95wvYlVj3AlDOQJrW5x3ldJF9DshxhbEokQE6Qn3y9Lo+wFyrcrGEEN0C823y8b1oUhl949GbFdp0MPnaxx2cEwIkd9bznviooDVAsGZjjmec0sJu1W6rAxDfsw+/uNh7nsbhPbOD0H7gYOTia8od/9AN2G8v9k4c8vPsB0+mE9WKB3fT80bf/RyafWM5f/5yz7Wv+w0/+E+3iBUfKMYTIxfmSrX3B3UeH+KZj8uhbmA9Kph/c4VgdsPr6c85fPmNeFxR3BqRYo5UAOYAVcBHg6B5iMqG/bHF2QiVrDuuS8vERH310wqODwOlPfszZi+cI7jOtDzm8c587wROspZha5NWa06dn9LzH4YOa7nLH5ZsF5nQNtuP+H/wI5gIxOUSrCMKDjpSV5u7dI2Q9wQFOzZBqijI9qpkzFHN6M2fS3IFiDrFKX0MNQYMu8sD0kJUBImbPPwEIR/ArhvY1tl2iiRwev8PxvW8nINwY0J7o1/TbM7brJwzdpwydpGze4cH7f0AM1V5++OtteXE5Sh5zs0hm+ek+bfCmtpH0/NFPSSq1XyyNSgql1Dens/4zKPerbTcWjON5kZBsD0ZZMYDIYJJg7708DAPAvvFX1/Uv/TNSSmazSX6rsb6+eZpExlUTi26cl28vOn9TGuY/5pZZxPst86ZSN40weC4urji7vKB1PV3fM2umFCotz4NzaGnodwPBOe4dneDrCVIWqD7Qb3fcv3eHq+2CPlruHB5xr57je0cXPdV0TlVOePbsa1yIHM1qfAhstlcMbYtvCtbrNRfnZ5SThnce36Osa87Pt/jgqWZTiCVXS0dZlUznU5wfuLq6IBoF3jKZVLjYsbYtQnis7UAIBtsjZWKFGaPwV0sO7AbxdImdbtn5BUfNimZnEe98Gw6PoeuIoUU2Bt3cwUaNWDYMG0VceOzmik0/ECMc1JKDwnOvkYgSlldvmE6nyEd3EVqAC1BPgB2Ejqg9Ijpk7DAUCKEROGR0eB/RsYIx7GHfNYt4F/niq6f89NOfc75YsLMOS2AIARdiSuFUCk9M60ijiT6w2+4SEaUsM/NNIEWatxLzbWT7JwLMzbCBRD4R2Xta7K2prn3Q0vi5DsNLIYF7u6k4su9gGByjF1oUZB/igELdamgGEbNyzjFaUtVVyeHhLN0vCblxM7K5IqAI2VlubLzJkHy7EWPTON39ExFQEqNkvd7wxac/4elXn/H62TOWF+e8/Po1X3/9ivOLS9Y7y0AKKup9CzExEL1PwCUk6y0hFIoiKXg8iKjI5MMkcZWjYu1GmN++4bSHyPZzUIzsG73jmlLcIH/cZCG+bY/ydojEiHtdy4ozyWYM4hAQpRgVw/t74f79MwQnxa3u8ltsvt8wJMI5l/5IZghZ4VEKCl3QSJOBk3QD9jIlwsQY2XQtfT8QpELnxMph6BJIowpkWaKLCqMESka0iBQy4lVMIQkDtD7w5OwFNihUFFTaMC0KZlVDoStqWdLTp+QZpZG1xMu0sDRxggkDq+0SB7R9R+gclRgotEaZIgE/8xmiNIgi0JQzJnFGbQw6wDAMnF5cEAl71pAI48ENaCn2vjRSaYLQFEbReUOh00Drh5KutdgYEZn+3JiSw9mM+WySL1qP6x27PqXyuBBxNhC9p4dM2ZR7hkVvHX3nkO11iqeUOnmxaZ38AbXBaHVtZJ2/hBySIaIFKfLNWScDd1FkNptOg0dqFrsW5xJLzjnHKLFVUqcFZFEklp3L5uV7cAyIidmYBn4uCIRAiHjrBn6TOTVSfhMImBFzIOokm9VKZ6lXmpiK8Wc5BgyMheTIEoyErJUPMeCDZydFMrV1KaWYqNN5jakDKPYXmcfaHms1VZaWlrEmkHIhCmNoyoamrjG6oppMiVqz7jquVku2mzVFnrjrstr7OCbGZov1bl+sTiYT6rrCmCLRfW2gqtLN1wcHMUlCgMyEG/YAnTEGrQxVWdA0Mwpj2G23ECPOeYaQ9FjNpEJKibUJCG7bHiVXFFokGXWhEzvQZ39BY1BKZxNOgTYQYwIGnLX0XUgdw0zJdtksd7lcQfQM1iMnk0Thz4tz730az1KgzDiuZQZ3w37iG4Hcsiwpy8TC01rvDTljSNcHIdB3HbZtqYuCpihSgmz0KS03gnMpjanvbZI2mgQoGqPhRjT6HjjOvO2IwJhi73MYMxtJhJROVmbgM4RA7LssNUohFX3fsVkNeO9QUlPJEu1lYiO6kFhwUjD0DqJkvdnx+edfgVB8/P1POD6+x+lmTZTd/vNa5zE6MUtFTg8yWTbuvUf6kMFthchppQFBVAns0VlmWZclOgbKqkqJ26WmYMZ0MkEZs78WY0iAkfPJKL3rJThHZSSV0RA8pRY4LQnKMG1KaiWYacnVcsm67/j5l084uXuP9x8/oIqeq/UV/+mnn2GM4t0HdzBVTV0WzKdTCl2wUgv6TUupI9OmotaCoGAiNUeTku5wRgww9APrszW+NohSsmt7vnriUScbJt96n6kpWSy2HE+nSBWSPFup1IH2gs5bBm+pZfb18IHgEovODmkebjubQeCBtd8xKyX37t3l6OiEXdywOV+mec8HlEwJ11rlQBKZWLCFMWhpcM6zWKyQxnA4nyFdZOsDVAppNDYGpA9IZVDKpMIPgR08UWrKsqIkst5sGHpLHRW+rJNvpb7RRQ1pnhylsCF4jNYYqYhRMZvO2PQ7dheXHN6bU1UF00lNMZkynddUTUWUJb1MnWOvBF5A3UzwTvE3P/4pb56f8gf/3feAChd3qEry1Zd/x+nzn/Puex/xB//Vn/DuJ8cUk4oYU7BRCuUYWX8ys7lvlh43PEBuFE6/bL1+ff/4htZm/vabloUx7slK+03mRoGLMAx+n8wrtODh44fM7x7ig8MoxbQoqVRKb7YhmSMbeS22+YX9+J3exFvfpubfPllOpIaW0pJHj+8zm0z43vc+YbOOVDWUheD06ZL2yvJgWhM+fo+/+A9/xqeffk4j59jNa64uL9JY1YGXF09Zbd/w/PUz/u7sS+5/cJ+HszlquWK4esXhccmD2QmT+YpBaLrNFWENtTiktB6tppiJQfUdvYVQRIRao+oO51qM7bg/P2IyOaR48AiMRp6+ANcjJiVMAqtPv+Ly4iXz+ZRXz17y+vkVm2DZbM+w9oLZxzsOPnlAQBNjizt/xeXf/S0Ix/0f/ghz8AjUAYiBbvucs6sFi9ZRHlTMyyN0cwIme+epJOdJQFzq4ououMXJFClJe+g6hs4i0RRVjawOEWICXoITEPvsU1dSVTOqYoKvBJiaEW5OC6J04n5VHCxCWiyOi4qbi4m3wLih73Eu2WuUZckYRjU+bx/ElH//6wYV/PP2i1uyBPCoPJfGEPDWJguP7NXrbKr1+z6pKAAmk1TP/kPn4Jse/6aX/PL3GTnGv5tbJB3TECNRS0T0hCBREaIN7JZLvn7xgqvFksE5bAhYO6CDQBIwWmCHgensmGEy0PcRVyRf5KbWyGlDUSk6C27oCWEDXhCDTf6xFupJyawSnL4548XmnMlszv2DBi/A2jXr85fEoUeXYDeXaN9iwg7hHO2qo11csN1tiIXA2h4tAkhNCJ7oHYvlJZeLNYvW4Qh4t+N4DtYPyBDwMtly1LHivpnywLacDD2XRHabBawvaPor4BDqANMC3RuEmVF96xD57jHtsyvuH7/CfNRxfrbgy8++oAyWI+mZtDva1YYyRg4fgRgi2AhRg4U4tHSvn+CLjsnDbyErkftoKZwQkcCoW0MtQgyR1cWOV0+/5NXp66SAGHqs8wknEGlOKooi1fxZwnoL3JZib78D40PZA9qH/DWp19QeuMlNPiEQURBEuLUuDoQ9wDsCab/cI1Kgtbr1eiFTCMJ4H44x4oLHeoeNPUoHpqFK1ipNlXLyxmlbKMAghCaiCDHiQkQp0CT/apQH+nzbMNih4vTNmp9/8TO+ePpzXr1+yumrZzz96jPOvn6G3W3Aetzg8SGDWkqnfzLJhkWWJogocPk+FPIaS0qTPX1T629s6KaGQ/J0E5lxl45IYp/tG6HitjR/f+TENZFjBORugmNyrJn3j90G7WKWd4WYzA7Hn0e7tz0OLK7roZhfF3Jyb8i1+B7A43YQxa8yN/4DHnQe8iJwHEiQNc6FYjqfUNcaK3tQkaIqiT5ghw47eJ5//RLUiEhmbXehcNHjg2Xa1OCTH5CWiqoukVWBtz21j/h6wqqz9JsdMgc02L5nOQTa9YbZbEYQEUfE4aDQqMKgS0OpoTxo8NahW4/xUEWNJC2keztwevoipdbkYzyvasrZjFIndlo/m9HnixAMwQaGfiBYl9hbSqECKG2IUqCFQAeBJjAMJV322ejaxApUgoQmW48bkvyzKip0qZj4JJt1zuGsp40OsdvSO4tz2ffOJTaV80l62Pe7PBDVPhnXVIamrDCDYlqXSAkqU0KTEWQeFzItMpKPFSnxFJWozUohlSGIFBTRdZHgLdYmLzAvM5VdCMi+SSKMF8O1fJVMy7157YyD+ObFM140NxFuHxN6Pu5vJFPohUTELEN8u2OXL54hS1a11NknVBBILL70NwUxSIxu9murGEGEQPCJBixioN3t2OmIaQrKaYMRJYNzKVwDgXcOKZIMoBl6vJAEIm2nGWwPQVBoTV0lQDiEkD0He6y1DBnAG5xl15cMvacbLKVJgJ8xBX3bMrgUJjEMjrZtkdJR101iXnYJdNZyhhL/D3tv9iXXdZ15/s50p4jIEUACEEiKpAZKpstdtssP3auq33qt/nv7od/6qbuGXmVXWZJdIikSIIkpgZxiutOZ+mHfiExQlFTLdvWqQQcLSCAiM+Ii7r3nnP3tb1CU1qAb2Zhv/SCszsFPn5OAOWEIsjg5S/By7+35JSkxxkARIlpbUkygJTpDKfG7ykpCGEbvGcewT/0VI/tMTuBjoJy6LsK8FBDPaAkzsdqRVSakfvr8b+XRu47vDoh1Ru5bPZ1jNS1wKuTJDyETtSG7vA+WGMYRihrQxAQ+eAbv0aUEMjD58mWiMHuUQtld4SDP54mdO4aAnuSHPgmzoaprKufwMZCU9Ev8tOCNm46bmxX9GDjMUGOo4tQVzKBIqGljXRYFvYlcXV3x9PMvaeqa05+8z5MPPmT51ZeU9Q1VMyendtpViDQoBkkHBkMMCZMy1lis0Vgy2miqumY213Q+okyH0tJsKbTCOotxVoopEq5wZCIhyj3onLAWg1wUDG3HZntFU2kalaitpSlKRh8ZCBjruH8458AaLhvH65sl2Ras11s++/xLTmY1D4/mzA8PiLpk1XtMM2d+dEBTldT1jOQjq6trnDIcLhpmpSFaWKA4qgvS0RylDKtlS+w62utr3LxCGVinNV92HUYpnjx6wPFitvefsM4KuJ/FByTmKTRET4t3jCQfyDrsGcLDMIgkxDiskraeQdNUBU0d0WopYC2KuqxQVgBzkVDrfXKwMQ6jrXRkjeb0+Ihq1hC2W7wRHUHXd8TsJW3VWMYwMIwJpQx1PUNZGIeSNiVILXZK6tpL55WESsToQSWMlU1MTEGsILJmGAaauuHw8Ji+bxlSwjhHXVc0ixnFrIJZSW1nqARDynQdrNeJrg2czhb0ccX1xRtevjxiuVrTzGfMFyd0XebrL57yxa9f0bYN/9vpT7nfHKOMWDvI2G2IEinJRlM2WzLUnT9/58blt/aw/8AifwoukqbR5OEHFE7vZa4RSNGg8ozCaqwCHzKyzdA4U+75Ze+89D/NEf5XMO6wR0QnffvUpPGqKoezR7LeG0VZwGw2NegVPPnkCJA0+v4Cis/ndJvA26fnzPOKSkfK2mONJGY3swqKGU9fbPnNi6/50el93q8LdDI0Dxaog8eoxcfU1QnFYSIPEWtqKI9IegYkTNnSVD3NWYSwZPPsKd/87eeEVyNHsxP69JbXX38LB44ffvyhMLm7DXXhGW++4K//9tfM64cs6gfMqkfcrJ6xfv2cZbzkR4v7HP7sr9CqBDLGOWa2YAg9Kg+QHMnPyCqjE5RWUx0d4Rb3oDgG5hA10bcku0GZkkSBpsbkil0y5O0pUJBLFHOU8liT0EUlXnk5g+7IaiDS4pUwiavqIUYfkscpLEKXd67BOxXsfiP4u77e8YfjOyDMbs82/TOFQJjY5W63JwPy1FxC75oGvPNzfxy/f/yO9sPtY2qXXj9tYFFCPsiQQ2IcepbbDUVZoLXeKzWqqvo9oMAfByBzXBYTfFnCpgC8mEBrYkq8vbjk1fk5214Y7lZLHaVzprDSyCxcgbMlfgjEkFjM5jR1gxoCJnqOTw5QLqC3iVllKUuNVVITGN2hxiUz3VPnlm3X4srM8bGwxdYbz0cPj4ShlTNsLxh7hdOKQovh/unRAu0KhjzVc+2WYT2wigOp72m7geVyy9pnsBqjhNku0vREyANt70lhTqVrzEWPWkJRFGw3nrKKsBnheIBaw9Ecs1qKb9iDx/DwQ5qfj3z41Uvmz16y+PoVx5Xnnk58OJ9B0szXgZk2qMsR/sNn5M+uCeqQaAqW16+4WD/nB588Rp39RIjcKZIrg1aGpAZ0tiI/TUz+sJnNleerz7/i62ef8frNa9ZdzxCTAJ+ZKXDO7P0xcxYqR8zhDkg0gT/plhWV8i0wHqMQOpRSeyXQrY85Uo+mLJ5y3Aav7GWWKUlto28DE3OIxBylbkaDddKITQIUqameUknseEIIKL3zkkwYK3vCYRjo2lZYc2GUhD+fBZyz5f4yt9ohsGGLDyWd1EYAACAASURBVCv6/ppuc0PfLgmd4W/+ny949eKaz37zGV9985Tr5QXD0NMPPX4YUCmjcBhdgCmI2uGzoo+JOEZqN0cn+Ux1BjMdv/icC8N6xxbXWTzfVJJAQyXbLbkdp69Tm/DO5KimfeS0c8s7ynDeN4/ugmI7xpq6A9DdHe+y7BIZUWXt2XE7XUfOe6/N3YrJ3a93H7z73A7gm0ba2Wv9jvF7ATql1E5avX/ROMncgs7CbCkMymqsSZR1JcCFLiAlNm3PTh+tlJPOtBZZyXY7kmPAKGHQKaslMrowFHXDgbPQB8puwDcNNhsJgAiJOASiD2y7gZA8ffCM2aOcoWhKqsWMsi7Fo2tK+yytpTaVgD/jSAyiLQ9BUlzICRsDDQpnhRlorcNPJ8ZZSzYZFTM2sb8ZxxgEwMgJsseQMDqJTxwZ6zIu6MkkX/xz1qs1fdvSNA16Dq5pqMuKqox7ACsohd2UrIee9WrNJrTkJJ5rpSsm1P7WqDLuwh7aRBgCxiiCF2mD1ZJgaadE3sI6jJOCdbdQixh1YicliETpLkwsNQFPRmKYJhiliCmg0pSqeWcCUkq8k1S6s8m/cwPsyrIdlXTnoXd7Y2RCSmgkjCL5qRDlNmEn57wvwN9hPwFJCesrh2mxMbcTYtq3WhQheEgRtZdOpkkSkCmMYRgH2jZjymJiZxravifGyctuArcGH9hutwL0GvGbUkqizOuyonSO0QeRr03BKyklrHWEnIj9yBjSdB6FlZHRHBwcEqqawQ+MowdawhQWoJRi6AdSSgwp0m6W3FxeMGtqqrIka8Xpg3u4tmUYPMY6xjFydbOkze3UPXLEnOnHgRxHrBXvtowipsSQAzlFFBadxccvpETKMkmOfqQfR4YgxvlaG4qyxigwRcG4k7lM5zXGCDGSspKUXnW3IzFFw2vxnhq9gJhlWbAzXU0pySQ/eS+onKnKktJZDDD2A32cUoh0gXYFpqgwZgBE8r1jh8UUp3tHXltP9/Murz0jk2zKmay1bAYmxl3OGaNv/fByFkAspEQ3DJgJMNmF7LgAVYbKGoraorGMMUkIy+DRRrE4WFAkxetvXnBjIubBAcvtmnHyqMiuwERZWHadnJjkM013FyBlsFZRVw0PHx1QHGVMdU7IgWFoyTlhXTX5Lxr01OUaxgGyoi4NxmmqusLawJAiOkt38WZ5w9VVz3HhuH94RFnVOBdph540Djgcs7JAnxyjygIzD7y5WLJZr/CjwThHc3BElzKvl2t0DnilOPSBg6ZB1w3j1KUMJIrKgjWUY6BSiUVtaWYHHM9mHDUlT58/px1GotOYsqH1gaevzzHacXZyHzUtb1VVkVvP9WrJZTuw3mxQVnzqctyxKGWDZ42jcCWJDqM1R/MZR5XhoCnIKdJ3I77rJ9ZdIAVhV2llMQpyCqgk13m3aWmdoZxZgpfk4TjRuFzlGEInJBgfabuegQJtK5RVhKQJAfpuZBl6ln1L3/XEmAjK0222DNbRODuZyidyDEykVkCYfSpB9InryxVmMWdWL1CjQvtEHETy4KyVhDgUuSixaAYfaJoFP/nRJwxvO4brDuMMIUaeP3/DxdsVHx4ecbBY8Omf/inbJx/wN3/9S755/pzNsOTMRfK0MRPmza28Ne261RNI9y4u930Ujd+5Q/ldT8i9/Xufne6l3bozdXRjgrdXK7569opX5zd0vQRDpTBidebkcM4HTx7x/pMHHC5Kfhulu/uud5l1/62OHWiz6xTfXdP1XlUXpn2murN3NlbsHmKEWGjcQUl9YIk2MUb45Ccf8pOP7/H1t5/x+jLz3tljzh5/SDAzLi7WaA99jty7/4BH7/+I6uAhWdUoVQgD24BKhjFrfMoUSuGCgVyB9ty8ecX5F7+iCZaU5ly/vKHyA59f/IbLtCTxv/KjH34Kg6fUnntnNY3bcvP6Od4ZysOGmz5QuMT9yuHqDPoGqGDUbN685vLiDZULcHkJqkPNapQaMSpTOLC1oTgs0TVQaWhKMAo/9qToKcpyT3HI6laSuPvclTIUxQyrAOXFG1ZNu39jQDs0DmsqyA6jj1DpSJpTGUiJrKOUMROD7j8PoJsOQ+0arXnPViCl25TVSflQlqWsi3eZsdPPfJ9J9x/HP37s2PVhkq3aoiCMntXVFSGGvWWDtZa6ru9YA8gIIXyHyfzHcXfsQOZbWGACFxKMbcurV69YbTaSxF44HBqbFTYn1CTrsxMg4L0nxYhGsd22pG1H4xzDOMg9pcCPnja2jP3A2o90Y8B3kaHrKWcz6sUC4xzL5ZqIQlvD6ek9Ru9Zt1u22y3bMAhIZAVi8r2n85ltNgwxE/yASQEXexh7wBB9wuoC6wq00TgnipUcA/cXMyrfU1Q1KfS8efOWlAcOHywEzLtM8HoNxaUk3ZMJwwh9wD3MsFDk9x4xf3zE7H7Dg+OK8KBhtulQzpKHjtxF1n6g/cVnvP6br7m2h4TZfZIr8UNPeWh58uF7oOYQC8koKgzZTnDJ7Skijxk/RF5+8y2ff/45F9dvWG3X+JzIWpFC2gN0Rqn9jJfuzFFZTUBcTph8hwSSkWb+HUaW3fuq39bSMDGp4Hd7oE2y/6xvmXd78C7t6mOF0reN2D27785rxCxAojZaflsJbMk5E0KPViOoERjEDiEbSArvJ76Aztwsr3n98gu++uqXfPXVL3j+9edcvH1BuxwINzXtNrBcrVgPHQkJATTGYKsZRMhZk5MhKiuA1mS/YpQlZkXOO+l9mvamO9BLPnxZmzJ7Xa9Ot94kkuXG3SiHd9pD73R41VS93n5Gd0G423WIPfvtdk266/Wd9y+9A9W+O3aPhBBEoZTS/r2ljjSgNCbfXh+7n7tl1PHOc983fu/sXBQFQ0wTUiiPxSgeb11UrNdrUi6glPOurRF5pXOUxnIwm+0LcGU0GI0PgU3XMgw9VzfLvY8YMaJTpCo0i9mcetaQQkL5gFXQlAV12aCUEjpljKxXK3wwZAR59iERNh3eR4bCQg6YDNqWpMqSrUiKirIkhZHGHQvYGAIpeEyMqJDY9htGH+mdpQ8RM7Ek3GQcWVhNYR1aawYvCZzCdBPQQBWOcZ7YLj1JhYlxJuw0bQ1ZaSn8EUaMtlMKY1bSAgCsyhwtSrRJpM4w5ERInom4iUJT1QUgZpQ7GWpICZJIda4ur1GTL6AycsxlXVGXFbZwHMwX72quvXgnpUkWKf5nEZ2TbBCNEZmfUuQpZS8E8bbYUYFTSlNwQMYU7p3rSed3ZyvF3bQnfQu25SgAXd7p3+M04Sn0hIyr6XHQUzLNjvkEikkSPCXmaCPm6mrqjoD4CRgjwItWFqsNOSmSBmMStTPYNJCyMFyGHBljovcjKosv3rxs5DMfB1IY8Sli65LSaUiG49khs7rBKs1q20rYgcqEqfuSxgEfxROw0FbM4lNi23WM40hfVTRVgbMFyhlSrTCmJIbAOE4+Y3VNoRVD19JtN4Sxw9c1RVmyXC4ZYqQoKmbzA7yPtG3HVm8JOQiLZZKNhijgkm0V2kjRHoKwIeuioCwLfPJktWEMgd4ntBNZbMpK0i6j+DokIMQsdPIYp4Vtkk6GQO8Dw6gxpcEYmdSMsSgncvhMnsCPyI4xl6evRmkcmqgNpXPUZUlVOFIIdEOPH0asVZhy6oFpi9KarPXtYpwkJdhNU+Z+AdwtrtMvZQ3GgS0keCAFT8wZH4Ik5PowJQYrrJHwEW30njWZVU+MHutHFkpxMK9oD2sG5em7gc12w7Ad6EbFQXHEvaMzzs4ec07ml5/9msvlDe1yjelHCrR0cifkUGMJowS9ZGvIScJlfByIVjpyZ6cPOC0OmR8d4krLm7dvCGGkqgpc5VBWZLtpb1yrUcZgskGpEUUQtmUOHM0PWC8WXN/0LDct2WcOTxQha2w9Q9sCExQORS4sM5U5USOzsiCOxzijOZg3KGt4fXHFdnOD9yPHF9ccHy548vCMIoEvawyG5ByucmgTqXJiYYFsWMwK3HzOg8UhB2XFsxcveHZ5Tq8NzWKOWxySbMkwJlrdoa0imJLBj6zbLe0Q6f2InnxOdwxjo7SwRLXFuFI+C6CpSk4OG5pCoWKk26zptgNpDITQo3LCMeBUlnk3j9L5zBk/JLarntx6SZqelyKj1wpjNWH0WJVEWp4ynoQtKppZhVMWpRzb7Zbnb17yfHVDrzKL2YI8c3g/EMMoAHoWvw9J9JY1JGdJIrO2xKmSbnvBZn3J0dk9jmcL8jjihwiTIb3I/DXKWJQqIA7UVcWffvoJquv5xb/7BVdvOnLWtC9X/Pt/8wuODhref+8+f/EXP6VrBzbBY8tTUJnoB8Z+i7WKoqrZgQ1Z2Ylh9a73xjv7rLuglvovB3jtvV3uHIMPmW07sFpuefl8xdWlZxw8fuiwJnL/3gGhM6TR8OEPT4TNsD+knXvd7hV34Qr/rQITd8/R7jzIdQLS/c2Ij6m9aw1zB3DNAwwjtDHRuy3148Sf3Psp9B22GDEnNYebB4StIlxt6cvn3D87I6sly+s1y2rOez/+ZxTzx3z71VvG59csCsWJszSz+9A8xJYZ6+TaZQC/cWw3gTffbImt4qg+ZZUzwW+JauDHf/qET04/oiwqLs+vaZdrbq6/JbHko4/v89WwJnae1XrDqA2np4/55J99wg8+eg90iwrXbJeBty+/JsSeqio5f/6WonvJ8XuPoOrJaWC1ecPbmwsevn+MqhqoDLgSnQ+w6gjvFUoXKIuY7uSRrMP08VlytuQk/y9tKzHqSYNIW20NlBNWN8eqOO19ClBiB5FSJIRBvHO12d9bt41UJkb77sTd/Zpu/7mvCzPRewlemuRh2khQzR7ouVtwqFv1xB/BuX/ceHd+3D0oj8o+W+FHz/nz57x9+5aD+YKHTx5zsDiSc3Tn89+BAd8F7P443h1y6d/eBzkntDJEHzl/ec6LF6/o+1H2xU6CuCTMCVIc6doNtFvcoXj3tkPiernk8nJJnTRPHp2xbQMJi3EzcooMQdGPijEZxmh5OVlpFEVFU9eM28DNzQofE86WdL95Q5iUSzEFxjgSvCemiE5ZbEKypkP8hI3RlDpR46mUZfRSQxVW9jwxKEJIxKgoApjVhtBvOH9b0W7f8PmXv+FxWPFXP3rIveOCmgHSG1j2sGhgNdDdLPEhMDs+xzxpcBp4NAf7gJnxxNwRzz3Zw+qm5/Wm42I1cL694nwAe/qY5p6iSxZbH/KDg8eYwzOgBlVCKcqonMRuKWdhLGcyYzfy6uVrnj39ilevXtHmjj6KVdfUi5/4wSJB9SHsGW928qKLE3ikJqaWZsf2F3/3pNKeibVTRuWUJo+4idGV83Tt7OoQaaKy+94syrcUIU+2Ojpr4hQEIbWwFdWSSpPSbQIJgZgiMSWMFn/xOCaSDlQzmYtns4bFQYMyLeQrsk9s1lecv408/fqaL5++5vJqw1fPntIPazbLtyyXL9mu3tJ3S6LvJEE21PhoRDlnHOAYYkL7hEqBSjtylMBO+TwkPNBaQ9IGT0HWGhVvpaG3TVFhae40sDsWnCJNoReQlN7D43nXYPqtRu5O8rpDaQW/2AU2kN5V5+WJhJNz/gMecGoP0iu+yziW/08kC4HhLoB6F4/gdu3bgbp3v/6jADqza4/uDm73YWUxTby6vCGmBl1DNJkxjjjnqHHossThUVmAu6pocE1NQDo6G2sZhxFlxJ9q8CNDu0WlxHo7MJv1NE2DGoXFM0xmf8o4JKJPYw8XuB2tNCXxD+paQgjokGi0k5OdwfvAkhaiAWVQKGzKNNZRVjOcUmQ/0rcdXQxEMn6M4iuQMilsBaBD44wkj9Z1vfc9s0Gkf8pAQmOrkW1a06uED8hFaDV6CpkwxqCsE68IraQTn/K0yZKL1qpIaTKNg75QKG/wUYIlgocYnWy8tEhwlDETe0ekOkLFnUIdfGBUkX4IdMWItZa+8xPg6CbjcfGzE3nbxJBT4+Q/J5RPuE2FFS+Dcc/AU0rtY77hlhW3A950frebKvTOW4BuD9JlNaXmyM2jzWT6iJoScnaJhtNb7dJcpvsnZXkNY61EwGtF1oEY1aTll2N32kFiL6NMMRMJwgRzhnl1IN6IKtMNA+3oiTlhlMWg8FqkoxJZPxlI5iTdp2SoCse8qaRQdpamqQgxs25brm9uaHuPj4MwrXqRzCSmiSklbvoOX1cURUkIUnwXrkAVJc55unbD8eEBB4sFfbehXS2R8AxNJPLm/BVjyhRlTdd1xKho2y0g4DsKiinppyXQtRu22yXr9ZJ7xyfYZMVnUlsaVVLUFWXdYIsB2lG6BNoIS2IChUMCo6Zrb9fBQGTiAtzCarPFB8/hyYKqFgBNZUnZROVbKavWItu0FjsxJ/XUMcopU9oCq7QAeSlTWodVWjpIRpiNIQQmzHpaFCXtWKx/9AQc3qGdTxNtAqLaebiJZ4f4N0ZhS/oRZ8VfsCgLAclTJEePVpmidCijiWnEhpFGK+a1o6o1uleYqDgIhnQ85+XFmvPzl3xUnfJnP/sTwo/PqL7+Nb/8u1/yNmX8uJZY+STJsH5M6JQIKhFjIk/z8ugH+pgYnCOERGE0x6cn6FKkuChYrW6orNlT5Y3WKGtwpaWsHGVpCbFnGIUFYlSmMoZ7x0dE+4RyXtFdXtJverrzS3TdUN07pT5YcJAsRcq0oadNiUMkZTZFOT+zpuHg+Ii3F5F+o0m2YDkG2usl68EzMwU5QjmfUx4doUuNTp65g5N5jfWeojBi5msKmrOHqJBYdVvWMROtYX56j8XihHFIDESUS0RbE1KgairuHdXoa8XmopvWNS3BINriJwsSpe0kZYgQPaSAweG0EsPalFFZutF9ChibUW5qhGhLXQpwfHwwp9aa1cUl1sJJ46b5GLbdhk27IZWWQpW3/qIhkVUJSu67PEK76bi6uKIjk2PmyFXoopi2CTtbAfktXsCj+OX4SA4K52pIhs1yiy423Js9oCocTpdY7TDaTYwYDYjPpDEli+qYC68Yho5ko/wOiugjv/qPv8TqLZ/+2Yfcf3yfZC2L+/c5OnlMTC3Xl98w9gMHB4cUZTH1FhVKpVsG0B9MmPxDHLg/PL63sJ1G3KXfGruH/goHT57c4+TeKf/8n2dSkLmDDFZn6lLRVAo3dar/+x13qHBqBzxK8bAb2uzOo5fzm0UpEJMnhSjMLm0IOqCN570nh/zJTx/z5uU3xLHEhcQQAh99+BM+OvuYF6+f4erM++8/4OMPHvPv//Xf8sUXT2ke3OeV9/yHL7+gmFX87P37PJ5bZs2Ck8cfMTv7GLM4o71ec/7VS14879hsE93y7xkufs3F4pKfv/8XfPqzR3AW4AfiO0z5mNif8OL/+r/51WdP+fEnZxyfnRE/v+Bm+5zF/Sc8OH7EyT3L0ekZZhzxr78hqyVvv10ydkt++OmPidny6tkF8c1bdPWU+jCRWRPxlFWirAPJ3xCGHkyPcQ8x9gxjqklGNTWF3onNk0JCAVplAd2VNGJTNNhCg6vISaFMJhOAiJYNuswFJsueS09pqrvTqXbm1r8PNPsOq05JxSTqF9lDzGYzmsViYpcIKzbFCEphnLt9dfUuU+CPYN0/zdj5p2pjSDExTufl/v37HB0e0czmYM07378bfzwH/3njtteQ0ZNNw/ZqxcuXr1iuVuIZPXn15hxR2mCNNG1TFtsXHzMxy9f1pmO1bvHZ8ObtDaunS3SpGLQ0r8tR1AxtTpiyYfBR9ngRqXuzIsZMCImc19R1TQielPJUk1WgDNaCU6CiFyJCNhhbUJYW5QcYA6owGK0oVIHPEvY3DIFxzOSo0RiaukJpj9KapEu22bEeFC5UzKlQnYdXb2F5JQCdV7hegg37y0vi14HjtEG7A3h1TTh/S6InFpaLruM6Kt7aOctGSDTHbkZ18gCvC15ebBjKzHsf/Zjm40+gnEGwe5WLUhay2ZO6s89cXF7w7JunvHj1nHW/oVeePopnt0IUMVpLyFHMmTwG8iR3BfbKOAFcpA5VvOtXBreEkjTNbVI3COsNNYU5ZGn+ygWUv3Ndqb2X9F3W3K1MVtbSlG+tfzIaUbtOrP6UsGbXYMxTrasZvefq5orKtHxmt3xj4WYFz1+u+Pr5hi+fXvD06zfcrLYs1yvZP6Ye8oDOHq0SNjvQiqA0QUstp4wRADIJ6cUpQ2ULSSoNkUgkK4gqE1SaJKyWmBRKp6nXmqc6fYJCcyRO99fOuirF23VHsLg9L+7OHfnu/KV2MF7edZQmkFW9mxZ+C9S9K339LoNuh3covdsbvgu6Sa16i1vs3iMyrYNZ9ubqeyC2PwTK3R2/F6AbhoFwlx64R/+MpEOqNCUy5b0ZYgiBPkR09CgV0MmLjlpnXKGxVgCBpIQNZ3dG5+PIsN0Q/IgzltpZUt+jQkBlGNNA5wPRGHRVYcuKoDJOK+rCUlpLMTgKrchjoMiZIia0VniraQms25Yuip+XVZrTZoErSypdUBcOVRgqDHVVMFNw3XuqnPHdgB+GCe3OE+DnCeNAmNJXlBI6s9Ii/7ROMz9saHNgHESalA0krSYDRS2a9yQBC0krlDNoJUBMDhKckY0iF47cVDTaEpJ4Aw59YAxpT9fMgNHCbEsT2qyMJU5a6xij0OFTJg4R7TPL9eU+bMEYszf4retaivVCY63GWUN0doeD7Y2+c0YmCKvxaupLKEXhpMD16Za+q5TCqtuJDW4ZdDtEefecsOWm/oMSw1WlFGa6rsWP7I7h4h2ATvzrBAwsXTW9LqAs2TpilrAL0KgpfMMojdWIeT8ZrUQWfHhwQFFo8S7YrnFZi7+DdRiUpHxO8gKJu0mk4Nk1O8ahZXQWlSU9t2kanCsp65oMVKOkBQ+jlwBeNQUoGIfVinG9YuhGulauWW0NMeZ9OMd6YjumKdlWkntqFIn1ZiMS6Bxo245x9GQk1KBpGuq6niZaQ1VVOANV6dhsl/R9z8XlJaUq0Fj6biThmR0K+0Xu2cDQ9wx9zziOxJik8zQV3ykrjFZ7ADvGICliceRmuWTTbbGlQZsZRosPnPcSSLI3Zt11iHfXS5LH9gvltEHNRgqQoiimnxFPun5X/ORblpzSSkzqs8XkAKh95PbdiVn8HSIR8RsRb0iPdYaYE13XMahMCp7gxWg25CRpzylAjmAiKQWyH7ApkPPIMG5JfkOhNCeHDTYYupipcsHp4ZzTe6fMf/oT6p+9R72Y8Xd//R95OTwlrDakJOEGUQVMHgl7+vu0PCUl50EHcpagG60nL7rFnHre0Pcb3ORXIRvKQvzojIDuewPvFDEaKqsmhmbHdttirOP45AFj0XO5XDEkhR8CYTuiNRwVJbaeU6fAsF3R2IrZvJHPWWmskXnu8PQe667FA9lozlcd2m+YFwW2DGxiog8jReoolOHeoqbMNUFbwgAWRWkdPzg+ZTn2vPIdYVZjZzNs1ZCSNGXIHmpPymLBcHR6j5Qj7c3VHn8wakoTnaTre/kvIkXPUxBE4RzWGPEaVRqfp4JUg8p64kopSudw2tAUDQdlyfriihSTdExTAqvo+45+6EhbTWMXKF3iY2K9aQlKUdQlwxBwSnNwcMBsu+Lm6pK35+dUMVPFe9xbzKeNRpqCKSCnIMEMWaTcORtyAGcqZo0heE3XRg4XM7IuyFmMg2FKWEqKrBRGWU4XD/h8m3j1+iUX29dsU49KmoNyjgoDT3/9OacnNT//nzQHp485++GPsLrE5zWbTcfzb95w//Q9PmoaXFmz8y6702j8LUHob/99Bwz90xeU2lh2gJN0pSEoMaGe1YqqYuqgg9a3fDiELIkSwsR3jlpPZhHC5P7vYyi+R8srAM9e+hMmyc4ktTEGvMy/ZZ0oGjhd/JB5+zP+9befEeh5/N4Txk1gGCI5j6xIaOW4iPD4eMHZD97j+iKw3rQ8657yzeWSIz0jlveIZeTF5pwXLy44yOe8V/4JTVFQ2xvC+gU3bzbEcYPKD0i24fX1b7ghcfbBj1k8+qF4+mSNbzf0lWXx+EOefPqXHJ3UuOoe12+uqKpHPPt2SRo7Xr14w9XFOYdH99B6xZe/+AJTZJ785AG2OeP48THl/JDmGFST6XvN7GjG4UlBUzt8P4iNBRpcpjmI2PIEbI2iIGFIFDtojt0+xJgIeKSjZFB6Rg6W4Gu0smSz+/6ElAfIzyIsVWHI7VgKsPN9lLsp7efA7x+Gvb/ZxAAJ057DOYcriu9ldO0Au7uF0R/Hf4GhFMqa6dRHrLWcnT3EVpU8N3mn7QDSu6DcznT+D6UI/g89FOyAgh12nkLk/Pyc85evSHH6TKcCXcKdEsY4sb2YLIXGHGn9wM12wypkotIMPvPi1QUhekxd0CuxbaqiIgXo0JgQyHEgBk8KAtJZK1JUq0RKWBvDGAKjH8ljwlpNVVjxflUJcGBLlKmo5gvmdcl2dUUIHoMm5jgB9WYKQJM9f06aNkeu9YDTnjh6FqakODjlMPTYGMnDBjSM7SXlKsHbAnRBZWqqesa46klf3KBursjZ4Z/f0K09i/oEbMUmDqyKhu28wNcG7SpKW5FQrJZL3o49w9HHDD//CduH7wOWbBQqgUNRklFZT03JTH+z5fnzb3j27TMu1pcMBIYsBBuNKHASOxAoEbw0klTOxInIkpjqgInRtmNO3pWlAvt98q5uiHlnPaSn60ZxOw3u6hA1TZF3QLiJbBDiuzshhfh5++iBO+q0GPD+ljSTYmYMouLDB9ohcXVzzrPnv6FRif/j1QuKpLhZDVwsR7pg6L1mjBllLMbJdWLyOIGREqaYYyarhCkgJwFcSZo8XRtZSdheN0rzHZSQp4whW6aE00jOHnSEmCfp8FSr73/dfjagyFlP9o9q2mBlbl3b1H79um0gCgaye9UdUJfSFL50ZwnaM9e4BVp34ZTcWQn3LDsQ1qMyt5ast98k18iOLbfDx3Y/nI21YAAAIABJREFUv1/Fb1fb2/n33WbZ7xu/F6Bbd73ottVEj08RchSZaOU4XpQs5iW4TFJBENaUpthgj9di2B4Gz2YIuK5HWaFIDj6KH1qGqiwE8DOaFMbJkDuzmB2T/EgXI5t+4Krbsup6vFJyDEWF04ZZWdAUFUUW3zGnDIUrqBDwITcFZfKkdknqtkQfcGiGrseFyCZBmOSYSSW0Vbiy5KAoiSh627Ilo2KiKh2FscQYuNmsBeybzKXH6dhB0/e9iKezTABGaawxOGNQTKb8O8NHJclWkroq/lU5J0pn0SkLQFeWNKYAI7RYAenE8DLGyBCmJJcpEzlrQxiiXKNKkyfWX4wSwLC7MFPKbH0/JWmCc46qqijLgqODhl0JZZ0sDEopSeIE6ZhbhzYeNU4hEknov0YbASdgLyXbeYztNgXGTp2KNF3sO2BxN5HuWH0kdik7QhMWAG/fYp4mQZkXJyZLFumeRgovpWXDqJVlJ5clySZGGVBGjMJVLtBWYcuCsqpoaoeNAryQtXhNOAfTBD90/eQD5gl+ENAyFYCiDwLa9uOAT5GyqKkXmoiYlQ6+J2mFa2qcEg+zwQdG30PO6BwJXoI8yrIkK82ma4mbNcZoFvMZZV0RskgurdEcnYicUFvH+u1bgheZaQxBGDlMlOwYaJpmmsgyR0eHVNUDxq7l6uqCbrtFJUPbdnRDS9QDx7EhZRjGTIieru8ZhoFxCHif8TExhkjO0I6Ral6TfSIQCDFThMDgM23bst6sGfpjqrIgmUxOQZLgRs8YR4wpCX4kB1lECYFkNEyS60RmHDsKK6CnUUoMVqNc89YZqqwoDQxmx0CYkmK1kcJFiwfXfsKeKIA5xR08jMnip5CJGOOoypKmnpHCgO9a/Oj3BUxSipgVMSrGpAjZMOZETB3ELXlo8Zs1bDucbSispd9umZeOT//Fz/mzD/8c21Ssrm+IFXzw+Annz77hlfqKcehRwaNIaB1xKuHR+KzxWeGUAm0wNmOtwWZFjmLunnfQ0QREOWew06ZFAgbkevAhYyWaWuRLqccqiw+e569e8+tvv6Q5mvH+2WMW9xaY+QE3bcdN37NcLmnRhJMT5idzbKFoqpJCZ3QKVFVNXdXYsuDb9ZLXby+57jqKZs7Z2SOyMfShR2fN+XJL7S44bu4zJ1FYQ1FUHBiDB7o4UGRQWTFWmuNZhdIlYV5zOKtRCrrRE6PHZDAp0/qRDk2ZNCLEtaQU9oEFWoPOEWugLhxNWWLCiLYaWzuKusQ6I8w0AtqCTgLi9saQg0J7UDoS04gfe7bbloOixpgCYxKltbicJ4uBAj2W+KCIGsqioOw76Fuycdi5RVvL0eExs3sH2FlD+vw/cXl1xbDZEA4O2G3ccpJUL4tGRQVROol1VVNVNb7zKAOHp6fcdB3X6w3HhxrvE6OHGPWdDqVsKIxWrFdbdC44O3vCxeqSGC7xW7FBWA0DQ585f3nF8nLLo8VDXnzzgn/77/4tsyKgwog1h/yrf/m/8+Dhe5w8aNilVcrmKKGyAOv7nueuAZSno1Dvbmb+oeO3X0Ht/8xkYpS1wSqB1PIEHJr9MTCtT3dEq/b7YMPd634/1Pj/59gBNbtN8N3/83eP6PuPMIOaqINZErflm3ePS9hHzpMfqC7QWlhcSqXpM5264GnEuIDqNxzrzJ9//AGLsyOOz37IL//fv+c3v/4MkwJNWZGU4e9+8Rl/P3rsoAgavvrmOW9SwFdz2k3g9fMbVuGGt9dfsDE3pFnD//wvV/zVX/wvvPfnH+LHnjdvX5B15s/+8l/x+LTgb/7N/4nVkbPiUzEadzXjpoUQ+eSnj5jZSHd5zv3ZQ84eH3N0v2CzLjGvM4TAuFpTH88pjx/SxkM2268pkiHlBbo8oHk8ozo6BjqUDTA0NG5OZQP4hMPgjo+gPIbgoNTyfYx7CQ2YCWKb/q0yOY/kJMFVWhfosqBwFml4qP09I2BanpgZevIX9kAnHf0gIKrWHm016JKcHWmXHKtGdgItskZlAa/zVOSoabuVokjnm9kMV5aTeTlyoxizZ6Lsrj52V98d1ux3mSh/HP+wsQdCSSjnKAonp3KS0omUgDtnYdc/vQPW/RZCK/ftpGP4B8xeu9n83aL3t1/pDtPlt57Zx05OR56AAbGM383MBeSpsbR/vZ29gN6rb/bvfudtsvrOO+ep/XCLA+z/4kPCOif1go+ETcvN61dcXb4FFCYh8Hr0DKElWs+oKjziETz0A82Joyxq+t5zcblGKcvc1AydxzlN8okheWIY0dnt10liwqaEI2IrR1U1xAlMssbSzGbklDHzOTnMIItveFOWxDTQDwOHx6dk7fC6pF4scMbyKo5c3lwzehiHKPNIkVDOSO1gHEOE0Hu8BldqqCxHTcWZe0RTbJk1GpVbkdtvPHSJHCNeedxRBYVDtyOFTfD5N2DmuPIIWx0wtIqbIdLqGTfZc8nAlR8ZhpYxLBmGkZwSfnbIB3/+KUcffkCnHIFMncUSS2XE/yhLHZd9Znlzw+vXr7m4uqQde4ISZZMgeLLGqwl0TTHghx25YgLnUiJO9a54fYodFVOTP0ZhS+7k4Wm6Tna+5oLRCAEgT+DdDtDLO4bxXaacAnLaM4/vElVSliMJPk2MPj+RAUbC0O8Buj4kqafaNWPo8Hlk8C0hDtgYWURNoQtGn+jGhHKiSNI2Y52jDwPkQMpxwneYQvQsSgXxac8Jo4zU/7qQKT9NzZ+sydN6n2Im4YX4YjVJRZTN4sk8yVjTVJsLqUDITBpFzOzXHiYQU0iJ8c4NuZtTdkU/t02hO0DYHlzbkTjyuw2KXWPxXUbx1MhCi8oDYS/GOM0U+d33y4iaJaoIJmFkM4TOAvsY1HTt7EIpbkG63dSrvnMM3zd+L0Cnq4p+SuzIY8DlLE12AiM9Yxbkd15VGGsYYyDljGncBCgoxr6ja3uGEIhXK8YwJUpOC/Osbjg6mDOrK1T25MlnzTqDKg22ajgwmiok7MpRrJb4uPPXsYz9QFx3DK4laoMmEY0hq4bRLWhRVCh04ThghtXiUFa5CmLG9wM3XUfabAT11RkI8iErTdM0FMZCoQlhSvTRGWU0lan3wI11FqdmdENL348YYzE+MjcFwTj6bsSYxHxeUZiaYRwolKRZagw2awGilBYQxWh0FnYglRzn6D0mCdCXUIyFXOBRiUZ8DJIS2gfPmGRf3QdPihmtDc5qCXmYKLNjSHtUP+VMSiL7WbUdablms9mINGJiOtrCUZYFtpD4Z2EgKHQGm4Q9EEKAiHRmlJJUo4lJtUOYw+QpR3YyoVlhsmml9hJDjcJN0j256NJUwRmU2XnVsWfN7W+0OzcDhmnzu9O/3yLqWqlJmmpIRuFJjNkTFZS2IDrRzs8XDcdFRVFalqvNfgINIRKtxjnx/vAenJ2SE9NAytAO0gHRRlJ+ixBoUyTkRLAwEsiIoauyBq0cegS/HRjanjiOuMkBoetaUpLUWAn5UPTB8/ryLVXhBKDKcL3eMJvNsHXNRx98zHKz4e3FBavNlpw9tqjk3JEhRa4uztkuHScnRxwfHVE6w72TE/LhMf3G01cdN9sLXr7+lrfXhvnsCE0jxxLFrH67FBbguuvpMljXMHjDzTqw6SJDkhCOzcUV221Lt22JMfLVb55yfHzAo7N7HB4dENXIOI6EMJCchuiptMaSMCpBjoy+AxWJ2dONG6wF4yRoxVaOzWZD3/e4FLHW0FjPig5XZ2yhKEqLT4mqqAhZkbQhJjBZUWlLYTKtTiQV0DGjxoCyAR3Bh4Sf0lOP5zPUfEHbricfkkzQmmwcIyVjVqjS0HrPdrgh65aZ0RzQkEioWOJ9LUDZuOLrZ59zcO8D+nYLT3s8I4P2AiWZhCsU2hj6fpQdSlGRmoqLYaSyJVXVELqOGCXEJwYIY2TTSYJTziLHzfMZsVtDjuQ04odM9h3brceaGSFaEiKl11mjsqJLim4YefbinHSuef7yip//9KccHR5yVBS4VjPaTBo63l58w82m4P69U7Q2+Cns5bQpuVeVqJQ4Kyxd5UhZ8fZmy9i+5PEP3mNxcky/3bDuWp6+OKdQnvfvHXB6XDMvKiqjqVKkrhO+9wQTmM2hvhlQxtIsaobNklwndLNg0yW22wEfr+iMZdQV3htSLCA5tBXGc0gC4JZlpjQDuo0wJpHqaRi1IpSWbLQkjRcbxtTTdp4QDSufKWpLlwKzQqFNT+hbxmrG2o+opmZWGUwK+NWSnkhRzpjlku0QuLxYcdBE7pUNj2vLhRcbXWUgqcij+/dYLBrU0PH5OLC8uuRV8Bw1Nc1sLkV9sqRRoZP4a4YgieN9u6UymmKW2YRIbwqOTyx6boiXkHRJ1hUkDTmBDigsWWlJMI4wX5zx+OEnbC/+jnF7Sa8CReUYc+Lrr9/w9FdfcVQe8ZNHj1B/9ilvX33LZ7/6e66u3nDQvM9Pf/6XzA5OMaUFJd5/arfpuQNn7QsnNcl1mZLM/oFjZ9m4397tAb+7bVUJONu9/10z4u/WlPufVL/11Dvf9V8Db2gyFmCIIxmNmTbWZmK8KUT9pgGxrthtdBUxCABnnEhbFYWA/bI9IRlPZkCrAh8dbR/ISVNXlsIpMnFvlJwQpn1MCWMKqkXFg5NDigBsB7oY6UPmg+NTSh0gBzbR8Te/+ju+fPk1VJbsFgxxRk/Plb7g+m8/4yBHynLg7ONj/uTTf8FBccxXf/+3fPDwAacfzDn5vOLZb16QdGBx9h6P/vQvGIuIufdzKBcwbHD9Frd6ReUMH34Am6+f0b68oFqUdNpz43tu4hWLsee9xcfM752hy3s8/TKy5ed88MOfsTj5MSwirkgoHKQFpJHx6i3LZy3UiXKb8Wmgvp/RjabvHL4wLB7dB+Wmrv4o+140SUT8MgdgQE9ydiXzeNK3XIHdxZazhiz7MaU0OXn67jU+vGVWLnj99IKXLz7n45/d5+TJY0g1/x9777Ul2ZWf+f22OyZc+nIoAA2gvWM3RSONyDWUlpbEF5i7eQo9jS50ozeZC0lrDVvDYfc026DRAMpnZaULd8y2utjnRGYVCgCHI1Ik15y1oiKzMiJORJxtv/9n8HtIeQgqYMM1TXcxJFUfodVhZkogbtgDUjJdLJhlDGQAPW7WVCLuevItJfTbAZsb4sBN4Mg/tePNzdO4oRpDvowx/7BS0ZgDVtRQzwd2oX1yZHeJjC6JW99tRpvk6xiYAJdyMZphVNUMRcnbIJeMu/CuQEILfQNufelHvQ13jT/LNwbS24Pm8IZSyHPOiP6Si/8xBcIgl4eEThb6Z4TmBZv1FbLaZ37yPZI4ICZ5i9sbhpsGKQmCXSG/yKZl+exS0EHuJ8PXlfywGU75YV4IdAFa1ztnUSMFodkS1lck1xCTQnpPnTyGhrW/IjpFkxZsgsMJxUxoJkXNqU10W8dczUgYggsoqZFK5KwX1zOfTfC9JQTP++++h/SeRaFQyTOZVOzfOaTzltV6QzmdobRhebUhhMS8mnL96pyqMNy/c8Ly+py0VzM72ePsYkm7XWMmktBBv14jhML6BLrKEmki0TmEyuSQoAp0NUHHREyObhKZvn/Ah03iaL9j5s7BK1gHTL/g1dmGPhrWztOfvuL+h5r5gUZvBeh9sAXtJmJFwarXfPzknF89eUGnSl61HZe2QUwq2uhonQet+YOf/Ig//9Of8u7+nAkZuNUi29zoYQ4jaFIUrF+85OnjR1yeX7FtOqxI2OCQKlGIPHFprdClgZjoui7PZQNRRUJmfqchuBCxI4r4AWiLKYLIlkhSy8GeKwfMpZDbV0g+L6MGaaQSkn64psZkywfnHSF4pJLEgRgzOtWONiVjr5FyCA/0Htc3dE3DZr2m2Wzprc0J2oOnuQuWEN0AIkJIik4ZupD39lEKUvBZzSEENnpEGtOdUgbbhneRpARRkpJHiJz6GsNoUwWIHBoYQ/bwQ4nd50gpkplCARkyLjAGrIwhSDEGbobXkfHNIFHlxu4hjvy5oW8LcVMs3H1LOYF3/F0IkCITrkIQu9cU4/csBCEpdpXglAhpHO9vgNabAsHQ1m6x7hIZWDVG7t4nIYfDoPK4LGQOshwZdeO6KxOFMoYxhhV+2fHVHnRlgUqRIkBPRlij91gCXRD0HnoXKftICipTJGWWFCYpMJMpviiZ1J7CZ4mbtjZ/ECGIwZOIbDYb+u0KozOCb3TeRG8dmKKgLmumU40qM/DnnNvJkjrd0HXdwCLIKKZ1ORraxQ60YVIbykKgRESKhCkNldFZJ11UBOFzmp8AoscGT3AuMySsGy5oHNJxJGU5yab5wSOdw8e0M8k1UhClRoZE0gFEgWs80eao5OQ8Sfjc0FPKKbNJohh9oWJOlEkps4aGFMxSG6TJ0qpCKoQRMJmAzDr4KCAksswuRryAZZN903xvs5eeDXTJY53PNFYfb1DeodEkqSi1IAhJ1/W5YY8MP6sxvUUZhVKK2Wx+I+1MGS2TYvDi27XG3AFvVnHsJu6RHuy5WQzt/OqGRj8mtr6ZxPKmJ8Db/DViiDv565tHFOOCy4EbOq1PQ6pvILiWMvUYFTk4kJRFzXzGoP2HJHrKsiAEg68s3ip8MJnxFQIuQcJjY8R129xWTEFZtUiTk0n39hb4lGidp+0a2t5incv+EmIwKyWihco+WWgkaSdf9Amm0wllPYEY2K7WtF3PrMvegg/u7u8SmczFBcvlir5riN6ji4KqMJAim82a1fKSRykyqWvuHh9x9+Qud+/eQypo7SFXqzM22w3BQ3AeGSMvnj+l0IG6SEzmBusCV5sVLlhaH/FS0vaDZ5qwRBdpmw7bWiAgpKZvOtbLDYKUgbmYTfOVzimzkOWxUlZoo/PsrHIKpikKpvszClOybnIIhySbk/bOMisqSiNRRiCMwJSGejIB63KwiZCU1YTKWqx16GFSTtHnNE0YmAhyYFkZtDRoXaB0ZlTlCUwhYiQGcAMLwqeIjT77JeoEOlIpzdzUWB2IydAGTbSR67Mzfv34Ef/3Jy85fvhd/uAb3+SHP/oO+rCi9x2d7Wlti04JGy06Zt+NxjvqqsYrw7JpCW3HdFaxmFV0rWOr9VDJGVOihnCSEBGD0e2O1zD08RBCjmhvLd46YspjQjIFXYD1esXlcoMHPnz3IYeLKdPSMDNzfK/YbAPOdayWV0zMDKNLlBoWHn1LrQ339hc4qdi8eMV8MqHv4fPPH1PtzXlw7w4H0znu4ozffvaMl69e8fDuCe8+uMP+dIKMFhkDhVYIrXIoiAxMleJkVhOUJghF2665XLU4BFEJXl0v+ezJ55jqCZUumJgsGRdSDN4aCaWgnhgmrsQIjY3ZW7ELjtZZDHlCVQqKSlM5iVBTelPhyKzmIeICkTJLNkmJMEX2gyJhBLi+5/TlK5yeocspVRVzVdi1mNCjU0EIlkDAhYB3HZUSvHNyRGwf8pn3rFZLrq6vOWlahFSEIeFXSo1SJpsHh+yNEmUgSI8oC4qyppiBmkIXPV2fvSMTArwjdGusrpDVFK0TupDUswV37r1Df93wwimuVlf0CVyQPHp6xl//7OdMTcWP/uQn/Nn/+K95/umn/B//2//Ofzj/mL7pePrkKft3Tzi6e2fwXYlDxfINZgNDDebWhk/c+vc/57hda339FV5/rdtEEvGFv379Of7pHgN3TipA5vXUtsN2YVAaBySBV2cvCL7nww++wf7eHiHk+UUrTYo+L5TTMI3uCtcFQmhsCFwsV1xebVG65Phoj4Up0QxeuFEggkChEWoC1RHy4B7dk0e8evGMPVnz4Xc+xLuWeH3FZFpCEDgvmO2fMOstViWCr5lNjqn3DimmBSfTCarZcHb6CaGVnH2+5PMnf0OhGj6dClJXE9p9vvP+R5S+5dnj31LNS+5/+BH1/nuktoemRzgBscvfVb8mdGtYHDG5/w3qQqMXlsePNrRXl3ihqd75kLXao6WhM3Mue43Qd0B2RNERUoGQJSoJjJoxLw+p9wvCDF7+7hek64Y7Dw027jO/N4dUkKuIESF8ll6hhpLcWOk3CNSOBRVvMU5HawYjBzBP5DEmBkffrNhuLhCsmIiCftOyPH/F+qyllg2oI6z3zA+OoBJEPAiHFDIXw+KwZRS3+oYUGaS91RZ2wFEMuL7H+YBQkrKq0Pq2xGjs6HHoc//0ALmvO27LosbbP2jQghjAOTK5RQ52SrvgPXHrgW8+cURvBwJKklmJ+QVXz0RGpEKWSGe3AzXI9MXNZRsf+5WD3m2gbgxXuIUu3n5/w4aV6G8AOjG2+szKBQaWVMCvXnL+/JesNxfs3/0m85OPboHT48Y+An4o2psM0CVQaZDh9V0OcSg0lZQMvNT8sUYCzfC2dQRSLrb4CF4kfO+5PD/j8vIVXbchigLZBfzqFc6e0aYl6u4+1X5FkuTAtOgzQWAcQNNwIzO0YnIk75mUBe+9/wCA1WrD3v4MFTyzFNFCIwsJqSfSI4wnypam22CmBTUFd45PONifo1Ok1ILtOhBlAuFItCTfYNtrXA+ub/I4LiVhBCSSR8gAIgzrhYFR7noqE5jMC47v7PGggUpsENsWtoG49Vwt13x85RCTBVFrlu0VV4/PKC8FDx8eMpnOEWbBJkheXFken13x6emSp1eOl801rRKsfI+wgbWzbF3P/ffe58Of/jHvPXzIUVnnPQ8BM4yTIqWhICFJ65aXz1/w/NkzLpdXdM4SK4MUCrwlBpvzdZwkDpZaOSAAUrqxYEoDA2oXCiAVypTIOFjX+BwymKTI/SNlLzhIg8VI7qw5KLEbmGa5kY5WXimMaZ9ZCSYG1qeAQckyKhBDttLpG1rb024attsN7WZL13X43u7sV4AbmyeZFWRKlLvuv5NcinEPPY4dtzu32pFPxHgTAuJbIKJxHEwxkw2GeeJGfprlxJnLNMh60zgkvbkyuxkRRontrl/fnOqN4ePWXn/AGW7wgZt58vbHe+vaLt0+z5esFkfG2y0wMc+Fw3c0ztRpHMZSBhVlxkVC+iIAd9sP9uuOrwTotMyJhFFHrOhvqkcypwaGlOiGlFVtBraTVMgoEaFjIg0I0EJiijJLH1XWuiulcLbH256u62hsy6SuWMympBjZbC1Od5iqZMYg8YsJjMlSSQTTekKhCoxs8NbuQLvoI52LLK3HO0sXeoKOGJ0ZNEZLgnMoYfAie+oplRF2KaGkHPTdAZLcvW/vPZ3tWZ9f41NObPR+YA2avDFKAy0ypBxyIKVEGzOY5mYpLEKgC7V73fGGHCrZQ2Mry5JgHaIoUHOJLx3JB4wYaao6I/wpElLMCj0ShYAoBGVR0zqP7Tp6N2y8u55e++xlhx+ek+VGubImCENVwBhDYNDeE7G2x9p+B4B5O6D/Q8MUY0P3EEdjSfl6AATcgGtqkES/pu0f2H1CiAwE3e5kQrx2G1/r9v1rP7+xdnrtMSK/rzBQi0epRQg51agjYK8Dzeqa4+NjqskMMURZK2UoqgnEbCIptUGrguBt9n8KAR1z9aOzHq9z6EiIYJ2na3oiibKOKKOphUSbEuUjG2thGJyT9QQh83eY8iCfacUpFx9Jmb3t8/fZ944QAhJFURR89tlnaK0yxXdIYxYJ6rJgsb+XFx/O41P2covB0fjAqXesrlbsLy5ZLOZM9gzT2QxTGKyN+E7hdcC2Pc12y/VqiSjmhBRZLldcb66xMYO8NsTBMFwQfcB2HdE6pIByoul7y8XFFU2zJRHYtk1OYbYeHwPrtqH1HaJQTEVJby2dtSAS15stsjRo2bFerwkhUJiK0hiUElwsVzRNy3K74dnpKalecKfzCFMMoFRAF5qqqthsNyilISWcc8DgEUka1m2JKHMVKvtI5gSoIAVmUiAShD7Q+3wdWtcTvUcUmdHAaOqqNakq8UHh2g6lJXtHB8yXSx6vVrz85d+i1g1HB1Pu7X+QaS5aEmX2l0OKIZE2jxW60KhCDv5ZkV6kLHkOlpg8MVikUih5k7AMafCK0oQERIE2mrIssrw+hLw5Z/BkM4rSlCilB8/EPs9GIWDv3eHB8SH78xnVwR6Lfo+Li3M26xWuhdJo5rMKU5YYwCjNdDqlaG32PnQ9tss+o65vubw4Q5YlcyPo+sSr6xXSGA5OTriz2EeT6LdrGtcjBBRVzfHJCUkopoXObEGl0CnSbyVr67Hbjn614vrVK3xcUVc1e7VhemePvaIGka+1CwEbIz5lVp0cUp7HlYIcgIoxXGNWV0wXx7S65Gq7Ja43eQEmb4oEQiqEVPjgM4FeaXyC5WrNym2ZLg6ZmpweneebiFLkzS3ZIDpET1VVHJ8cI5WgdxY3eF2+LhfL11mIvIAMJIQxJJV9b4Qx1OWEok4I5bDe0/s+y9xTIvQt2+01tqiZ6gKlEsVEMFkYZLVPaO7TN1uu2hWN66mqkhgS/+k3v6N3nt8+e8rieA+7XtP3nj/64z/m7rsf8PHHvybpxB//qz9l/+AQ7132N/laedt/GQT2Lwds+/sfQkjSICUutKH1PZcXV3z26e8xSvDjH32POyeHFEbS9w4tczBQSuCTwbpsyFHqYY2aBCpK3OBDM1soks5rmWqS0yRjBEJm1gsPIBBOAjPE9F3KD1ri0R2UDBwfH3D3myekqzOESHzyyef8/Ge/4sLA+9//U95955t0q579xSEPPvou9dEBd+/UtKtH/OLn/ye/+tUv+O3HT5hOjpnqnqa7RPhjFpM73Nk/5MhdcPnyOfV7J0zjXbjYgFlAWWCVxa0+xVjPpD5i8tE3YP8D0BXYBi098/IIPd/QbDs++dnPORf7lA+/y3/7lz9gXh1CKSFNSWmKxSNpUWmDFD1mViEXC8qjGXe8zyK9oiamCjOZA/oNZCBmaC4NTIM0bnkGmakYpTMMGyOPFAGJHVaWWUokpEeqJieMc0CMM/YPS75jFHsHNqdIzjS6nCDh4miFAAAgAElEQVRkAXiM3IcyZIalmEModyDJTkY7FkuJg7Ji/GPegYXksSEXz03S5Hjat+2w/nkf41rxHyNoIUvwR1Azy5GzJxY5FODrRq68OM+yOSJ+UChlA/rhs4xoX7y5lsPqkrd5T371cauxvPb71zx891Hi0M4jImXw2l6v+OzXH/PZxz8n0vHt8g4nt4h/WVI2nHPYiWexfULgUSJkIDCM5BAFRlCQ6JVCYIijX2MiS4SDy+wsZTBSE4BGWE6bK67CFq8Sdrvl+W8eY6/OEPaMXrdU7h5FmuMaS9/bzO0RHiGzaX4kZBasiBmbVxKXPPNJyfHJEcoURPkS1OC3JcAn8K6l3Xb0weJShM6x3rYc7J2AlAidmOzVqBRJtsdMCnSpWewt8AO5pDCGGPoMxEk/gJEZik34PFiLQKInpR6BY6IChesRyw3+fEXqOkQTIRgSBasQeITks8IQpMCHgBWaj+7e5cMffJ93vv0jnj054ze/e8ynT894dHrJk5fXnK3XrLyjFRJrBE3UKF3QeEnSc771zh/wnZ/+BQfH90fUZTSGyuDZQLdPMXF9dcmjJ094dnrKersdLI0Uydu8Dgr5JuJg268EiSzlTTvuVQ6r3IFYt6yYRvnr6ANHjAiR11l5rkwkkV9nlLIG7wl+xBYUKUS6QZoqVSbcCCmyHHQonDvX0zUtTbuhbTusd7RdSz+GV7Ytwd546N/YOr3Rp9JN7x0ZtyM49ub++c3+/frfXh/jbvbPt/beI+48ssSG59wYa4i37815u9Lgxm/u9plefy9fJRF9E9B7+2NeJ/d88Ww3zxGDh//NeDb46stMaBgVf2I4eRrfxPCzj+HmOx9ef2fPlTJr86uOrwTo3sZc2h0yJ6iMYQDJR2ISOCyhc8QE15vNMDCUr4E9ZvAwM8qgSokIEZ8ihclSLRET9D3SgY+WdXvFVmWaJGTAL0hJq2xmtxRlXswYw3Q+x+gi0347h3M93nWE2Octd4rYNrN4pDB0NmJd3vhWkwn1tGY6qZBGs1o2mYIoFaoqic5hO8v1tsVay6ZtdhTXbJpboQcj8aQkQmlEUVLN5kRU9o03OgM95E3eCM5JKYfBIZtGhpC9AcKQilWWJUZpfG9RQlOXJVLrzHhxg0l/yCmjMWVWnZKaUmZ2XyEEE6WYlhU+RHwkS/OCp7eB3jmcj/gQSMFDjOi6zK8V8/URMWTgcWgTq9Uqb1YZWG9DZxBBgIj4FEBm0E8ouZOkjvdGZYAxDQadOWdieLwQN9HNY7e5NXh8Vccbf1ZG3aqyfRHIG39/M6AipYCPgaXraZuWs/MryqqiqGrKqmKxt2A6n6Ol2lVOpBrYK9GTYqQgS16NdtkORCtCCCzXawgdPgSa5YqiLKmnEwpTUCrJxGi2XYdznjZpgh8+l0/4YImDSb1EMJ1OST6wWm4QRLzPsjDvI8SOQmefBIFAS0lVlCRTsJjN2Z8t2Nvbo2m3bJbXtF32ZhRDtbHZbllebpjNauYHBZNFxWw2Y3++h15MSF7gredgb85iscipVSGy7Vsurxtam2htBiZFEvm7ShmEUElgtML5nOy6WS2z15eBvm/xPuB04Gq9wceQJzglcWHC1lqarsfajkldYc8TSogMNPpA314gEMxnM+pSY4Pj8nrF0/Nf8je//Yz947ssDo+Yz+ccHuxxfLhPSgHnA0kohFD4kHDBEwfpuB+MY10M9MHReot2kGxOyyyGZGYtPKK1+OhxzpFiQEadDWk7S99lL0KpFFIodCnAOxAKWZZoa4iNxXcdKYYcGlNVqLJCKL1bSIcYaFuX2VguL0Jm8wmp0nSu43K95qoNyFhjNldU08mwgcseG1IMHhlCElJEAMqUVGWV2RfaUJkCrSI+I+wYo9FFgdYFQmaWc9/1LK9X4AOb1Yo7J8fsHyy4owvqesbVy0tWqzUkh3hwBy0lwedEX+ct06pi1QWcSezPZ4jasFpe8uLVS9x0wmxeIyuDlZqLbUdxsaJSAkJACUVlJEVdsXcosa6HYJFCUhYSVSrM0T7FquWi7ZgbxaJQbNqAcD1bu+GCjsLPWdQl1aQAlQNhOu9xIQeA9H1PCBVSagpToJVGpNzWldZUZUmQJgPQIdJ7R3QZlBYif+suBKT0hIHGoAtNWVcsLy9oQ0IfzplXOdBGa5Hl1oXeySq01hiTb/P5nPv376O0Zu/gkOlshgsRbTqEcySfWb4+Zm2UqiqiiARlQGlMWSGNJyS3KxbllULeeBMD0fUQLWUF04Vh6gtED/PDOYd3j3ixumR9dUUqK+qiot1seHp6ztlmTVQBv93SrxqSfMUnj5+SpEKbxA//4HscHOyh1ABav8HQGOWlg8DkXyxo9o9zZJZGSJ7eOtptz3bTE3xmJC+vr5lPKkKI/Pa3H9NuN9y7c4d79+7DUPW2QdA5yWyiBn9dKE2RAf00eKMrjVns7SrU+axZReE3lrDxmEmBqhWIAj094egbhhQfINwScBAbrq88r16c8tmz51z1PT/+83/FH/03/wPvnrzP+vSCaEr2v/V9hFYIecXqfMUfLn7COx/c5xf/4bdoW/D+4ZSPHhzz8ccrPv5knUOJfMf73/6AybffpbeG7eUlupS0fUu7fcnV8wvq4NBhw3J7RSoaJodzZscz7rzzPg8ePuS3zx6zXgfmszkP3v0ITo7Q84KTxRySIySDixLkUNZRifJgQSnfh7qEScn0/Tl+veTly3P2Tx4gqgkjKpGX94PPM5nxSBqJAgM74dZGSzAwnnc9ZvRaGh4kJGU9oSxyGjRMOagV+9zH+zNkcUE9O0IWdwZpkkKJKUplTzyRFKDY+QQxJLKHuNuMpLDbJeUihBKUdY0sdIYUlfx6wtU/g+PNcAX4xwPoYgxYmwtZ+Xx5X3fj2/f150+k7OWLQGiJEiOuldk/KeRQMgSgs/olOzy/cfX+Th/1TXDu9c3nFyG78Rwxr4NCQOiCJLNElaRIPrG5WnP24oL1umFvb0JdzEGNsu9br7xDK3KH0QRMWsLmGpoGrpaQAixqxKyCvQUVhiA0IhVEIZFaDzZ3w/uKPgf/RTg7e86jZ4+4uL7k+vqSq+eXrF6eY7ots8G81NiI9BElNaowBCJBZkZNFBGfV+q5D0uycT6gioKkMgqvjQGR7VeikCSfZZ9CSPro8cSsTDAlEUnrPGeXl+ADGqi0IkiF0YbeOaRUVFWNkgWdGBJNBXhGUCmQ8KTkIY1yTo/AUUtBajq66yXr8yuakJhYScKwDobTYLleHNDKPVbCkIKkro45+cMf8+7/9D8Tt5Injzb8P8+u+f2jc86XHddtolUzUqXwydH6DqckQhvq2YRvfe+7/Pd/8We8++H7AxknXxI9jo3jgBgh9Y7L6yvOLs/ZNC0BASYTP3rrB4Aj+7vnvbbO+zWZBoujmyC524GFZC4XDJ76I4nkxg7iphXHlAM+RrLH6+DPaDkQ8cHig8ckSe99xib6LEt1vaVtG9qmoWm2NE2DdXk/H7hRko3klXHPfVvzPsrSU8rKiZQSSsov7JvfJMu8eYxkmfHn1/3Tbn4e779MxTae683XHP9vJHx92fE2VdybAOLtx70JBN4Gxd7299eut7j5LLdf73WP1Ne/qxgzQ06orKD62iHyNph5i6T0VcdXA3TB51u6QZl3HmmJW/eKpPJLiWEi9ynQrrdoJfFlyMb6g0FsUIqyTCjyBayqCi/ZpVNqIVEJpMtpHM57fAqZYm8yiykCry6ukEVJlNAnhwzQJs+srCiVYWoMwhU4pwmxgJTDK3rncTaw3K6zub3PaZYh5YpFCAGMAlOw3TR021X2DfOerm1xPiKFRpvprsFordG6RBcFhdIkDUEJijKhTIEuS1zndhRWFyOiMLsOmKsDtydEwWq1Irqc5IcpMnA1GJqjoJ6UeO9z+rbNlF0bMwMsCbCdg5Qtq1TKbjJGKryU+JioZUXvHZ0MdEpgrccKT0DiUqJ3lpT1aUjIMc4pyzBSygbpuXoAI4VVpLzkTGkIfRh+HxrKUHUdGuqgKR/taCX5YePjv2wh9DYE/G2LpdGnY0Tzx/vhDLkDKDWEMN2g55CTgqQ0BO9Yri1sLUKuUVozuVgync843D/AGJNZmUqiFCihUXpI+PEeOYA7WeqjmBR19vUQgtb22d+g6/CdRUhJIcCUFcEkeiOIA6jVW4vt+wxwCIFWmlIX9C6bQCupKHQ5DIS5n80Xc3zf0vcOQWJWlejCMJ9PmU0qlEgczBcczWb0fct2s6FpNrjekkJgagqEFGw3WzbNJRfGMKn3mE+OKfWM2WTKyckxh0d7bPsVLoZMm4+RTdOyafrMLhU6+z8Ilfu80nkD2FtSCvS2BSJlWRBFAhQxCZreMtmZ/luMK3J7lIqmtyhdUNUKU5RUdd5Q2NoSXKSoDEcnBwQROb3ecnl6zunZcx6dnlPUE8qy4p37d3nv4UNm0xpnO/rOIkSBQONd9rIMgsHnMeET2BDprUPLRCEZJN2CQMSFgIuBmAJSJBj7ZoSYspwgpICPHusDQhRYt2W5XdN2lhg1Kgk0IjOXSXgJbuzXzhK6noDF9g3dRjFXAr+YI0xNUWqiV7B1RBdoNtdIXQOHiJRTWaUQOWlJCSI6MzGkylVYIVFySCo2uVqfx6eYpdKFyT6UqsCUFfVshtCG88srHn+2ZDF/ynsfvMe9+3c5Or5PKWsuXp7SdVuatqU1Gtl7YopUVUWFYz8KCFti8EyKKfXRIZu+4+zlKa3d4+DkCBslv/38Cf/xb39FpTQPH9zjvXcecHywQKFIUiOEQxColMQkT9+1zPQRYj5BFz1KJx6JyHKzRVcVSgvWVx2pXWFUopxo9u7M0OUEmwLrtmX56hRvK6TwHC9mqMkMo3QuRAy+dcEHHDFLWWTC9T3BZpPjRKLzntY5VJkBUQcYKalnU3S5IpL9VGezKYQhJbjNhu3Ziyf7vvqUE7yESMz2ZuiyoCgryqokdP1uUTYu0FIS2JjYOkeMgW1IeBmph4VwkgpUTuIS0Q/MPcmkLpFIFA6lI1UlmEw1aLB7U47u3+Wk2bKMkeVqiwswNxWnl9cs0pxiYvB94PzVBdfrLb0P7B3s8dE3H7I+f4E/3ENVswF3SLv54Na0MPycoZ7/evx9j2HGC4noI+dnFzx98oK9xQGH+wf8+Ic/YlKXbDZbfv/Jp2glOdg7zOm0Iqe3n51tWG0C7iCx3X6ORrGYPmC7TmhTs388w0yyh1KIkEJCyZirwilvABvRo3Rm4eooqJIGUSJkTVy+ZPPJrzl79YxVs8FgmLYVH937Jj/9kz/l3Y+OgRU6bAmhQ9VXXC+v+PSTn/Hs2X/i+sVLXnzyjO58wwfH91jcfZ+9Cpr2JT/7zV/xZLPHD354wp/94AHCGn79m9/x+dNrVKypg2JfJea65vjBN2hsx+knvyHKCz786bep5gf4OqJnksP9E+b6mHvv/5D6pz/merlldb3GmQXF1JCwhFFOFlvsdkt7vgKt2Ns7AgwuRTZ9S6pmzI+O8hg8UtOSYJT3Mfh+fhnxbNdPEpCGwLGUbt0Y0jvTjSIDA1IgKDH1CabaA2UQsRipFSBygBQESH5ciOUE+sHgO4k8pqWhUDSabQsx9FYlKVS5k1GOJKd/ziDd7Y3s+PO4YR/VHv9Qx7g3YmDRrddLtM5FmjQUzr8uZCOJzO4JMUslFYPixgf6pqddbjnYO2A6mWePQ3IC/G5z+yXt8OsP8drda/+dEqOWNEVPv7zk4uIVUikO7tynnC/yPJAynqbRHB/cYVH/gNlezcHBHUhq0K/mlx0L0SO7NBEo4gaxeUz65Jf405d0yysikeJ4n+qD9xD73yRRoZjhCNio0dKQpCEgKIAiZEVB2Kw4f/w5F8+e0a/XNFeXXDx7QhU0IvjsPS18DmhCEGLC+UAUiSgcUXiSiLlMKgQSiU+J6C3VtGQ6n+XilIDZfI4SOYhOBU+3iQQBhdEkJ3HeUpsZRV0QbML7yMvzNc16QyFgMa3zvp0VvXtORGJ7jzEFtgts235gGuXiQF47ADKD6jG7MeUsIGvRKVIYQzWbY5ImNpLrbcvjzQX/128+5ZyKz71mKwrq6YS6V2x+9VvOtOFXv/wNv/vd53z22Uu2DdhQ0llBHOxilJIQLGUZKGRgbzHjL//i+/wvf/kDjqsekQwxDWyz26PJAKYsr6+5uL7merOlC44ksx1V8DmwLoqIJM8/Sulc7E/gncMPrLiR0BSHexJIsrWU1CKHCI5rFD92rMxk7dptBu589u4bEz1HRlVIOVgzq1ccwXlssKyXK1ara9ptfn5wHu9zOqsPdkdakcoMku1b8HdKu7CKXAjOBBilBssWcYMkpDeAsS9ToP1dj3EcfBvR5W2PfZuP51e99n/Oe3jbc28Tyr7sPb353jIT8O1A4+uvMaypUsI7v+uvIgSkvvGq27UBIXktCPQNBeHbPsebx1cCdGJXtnsdWbYh0nWWzmikUNnUVpq82B/ekEkJr9coKVA6J1/GyC4t1A8dSA+MCO8CYNFNy6Su0VoT+g7hI2pYyCgtkbrAkxkuTdMhfQCt6bzDhY5N31E3DVNdMHVQRBAyIHVOdjSlRvqI1B4bJboUmDjoibWi84FNt8QFjyonbJqOpmkyI2a4WFJmSexssf8aGpw9gPKG3YdE4/p8odKQrDVIOl0MOQU0CHzIhpEhgpJph+QqKYlSo4xADh4kPgZiitk7Lr0uEZVSYqTByGLXGLaRAUjzWDILjpGd5x1a51RYYRJKaiqt8FFnL60E674jDBTPkDITxMdA9Ln6djsVRe6AtrzWFAikfIM+K1+ftGOIjLRbNaDY4zPGZM2xo30dUn+7occYQcRd+x2NKW+GuBvgT6ms2Y8jeDdsHJOQGF0RlEHpNNCcwYXI9WrLctNweb2iKEqq0lAWBqM1ZkhCKrSmUDnNNbNfsqGwVpp2oDofHxzS91ni3TmbPQcJpJhBnaqYIJVBJPBFSWeKXaqrUoq+bZFCsFjMmU4ndF2HtZa6rplNalQKrF1PjJ5CGSbzCXU9YTKZUFUVfd8jZaQsNJNqwf5sStvMWF1fs95sMbqmLAyirNh2K65XSy7OVxhxRWlmnByf8OGH7zGb79HHFqU1s/mCeh2QjR3GM0EMCZ8CUmZPpCgi3gtKU6CFGlKIezo3pnTlazOdTjBljdYqM8iEwhQV3kek0jgfMeWE2XyOsw5JDrGw1tO2G1rryKICQVHXzPckNgqatufqes1qvWa1WrG3WKBFlnY7l7+r4PMkGFMOVREq999RDuljojKGGCyd88N41GdJ88CeS94SZSIlhSpKVFEShcB6h3UJlywxOlRRUE6n1KrGO4sWCRkjIZErqAOTT8a4A69D7+j7FqMUpdF07RavBWZWUZsZ2gf61Zbt6hJT6uwNOowtKJ0FUTJPIASDD5Gut8iUAZ00gEFCSqRSlGVFURYopQbZdJYNaGVIumTd9Jy+uuT8es17VxvefecBdxd77O3vs7x2SKEoTEUMLSKMCc6BojRUVUHbWUTylKVGL2Y5SqNQmV06rTk7W/Pi7Dyb7EpDExLvWc/x/pyDWUVVGKLdQMqpsUVKpOCohUbUBaaYc29vQrvqERK6ZoNSsLaRZ88SulS8Y+7To+hD5Gq95tXZKwgVs2lFc7cjzCNiMK9WMrMgpch+UYXWmMkEJQXBRaqqwMWwS3BOQhNFHsOt99m7ROZQnFxpzPOElBCCyyEsKnswWmsz4J/iTppvihxy4ZOnsx2tbbHOEnzIIJ0SLDdrwvNTXAostz3eVOyZmr29CV6I7JMpslw6xYiIAmUMpZBZnt9sabdLQrCYsqSoSmZ7C47vP+Cs6XFDuJGRBkKi6SxBRGZFwU9+8hMevPseZxfnaC347jc/oBSJbrOklJogEkplJu1uLAcYPuPNEvOf8/b+/79jhDeVEBSmoJ5MmM/nLOYLyrJEScFsWiOIuLZFScmDB+8wmUzzRjcKSJFgHc3Wcfr8HN/2TMsNlxcdWk356FsfcvLOSZY2+gYlHbNphRYVOKhKha4reinxEmToIWxAbiAsubq+4PSqoYk1B9/6Jh88eMir33zKVhTMDo4531xS6gYXNqROE5ZPOHv2hH//V/+O3/3qZ7Sv1rAS3J/tMbubaJZX/LtPPuNXz68I08ij9Sn+0ZbiFwfcfbnl8fOXPHt+zlSU/OCdb/CdB99gUhUUdw4w/ZYHry54tbnmenXB1ccds/WS/QcnPJz/OdVGoQ+P2axXtKuG48khE12B61Gyo9aBQN4stSlyaT2VmLAnJljrePToMaevzvjWt79NPTkhplyEGq22JDkR77VQlHHFD4zc0gGGuAFOknjzgbu/5bkrC9hi8mg9pLOqgZMiBrCNkS/iSakjJY0UAaH00BfzoUZJD9lr+OZ0gy/ezUKLsTD6L+W4vaFzzu2YH/+QAJ0QmUW32Ww4PT1lvV5zdHREVVVoXXwtQDems5uyQItIZxvOLs+4uLhEK8P+4gAhNJGxTUDeEvqb1vT3Gn6/5kkjykzA9z1PHn3Op59+ynQ24/v1lGq+yO/dW4TQTCYLju88pG81KI8PkmEiHCFlokhkEW9ueaSAiC2sLuDF58Rnz1GbDSI53KoipTX1TCHmh6RpxDAHWSJHgkKM9E2HaTqS8yxfnrJ59Bh5vaHqLLptEU3LZu1plld4uyROHPfu1SyI2AhNn4Nf8scNOfxICohikLll25nSFEyqGm9D9odDZRC1zwnQq7Wnt54yShobaDoLqacsNVeXK0DiOkffdmiRaBtPcB3b7YZElmpa67O9ShA46ymrKu/ThCDGAWQXWeoqk0ZEg6IYrJw0QRuuPHzWtoiLJS+uzvj81Ut+dbZiZQIvUsUy9JRtJIqeT69PuUqCD++/z8FWcd3VyKXFeUXtBdYHXPKE0GNUQmuB0XDnYM4Pv/sR33r3IUoM1ktCMrKKQwhoqUkkbNvz9PSU0/NXLLcbWmfxStIHT9I5MMd7j5Y5EiV7z2UMwjmHc+6m/yS5q3GMIEwkZQZdzBJU7z3RB+JYwADSAJBJKW4sO0JWovmYPYRb27BZN3TNBu89ITjabUvTbAnOZQJUzLFOI+tMKYVWmQyVxqTXlJl0IwCXg5hGyaV87Z6xmDDgLV9mDzWyCEfw7ja7bnzMm6DWVyorbz0P2KXN3mAnX83e+zJA76swgC9Tx+Vp8otEnjeBw3zJ3z5X5fOO72MEc9MOJBVKv75SHb/fW7e3fe9vex9vO74SoBtfbKQn5sacN9yd9ViX0CLQ6UCSHqHFQNdSCC2oqglKZGZcURQgVTa3d7nCEX1u+AyTnbOBTWrwzlFqTetaBJEUc4JhpQzFIIlNfaA2eeOpkNSmzFWQziPchmAMbRJ4KZAqkhwUQVNUucKXhKSczghRoFMGwEKKdK5j07Y0bcvF9VNMUVKWNcYUlGVJWRQ7M3VvPVrnzhljGuKSIRDok6NNHk/KhpYekk+IMTTldieJgRgHA8nBKypEKMoaI4AoCK7PQRhCkVLAOodbLocXi6BG00K5u051lU00vc8SWq88LmYz4YRAqMx+K5AoXRBDZmCEIeRAVQWOPChaH1DW0vZZ8pd1+G/4ypEZOqMJ4pctXnaVyIFi/SZ9dfzdDwknbw4a4/lG480vSlSHwS4NKXRJ5gryri+Mi5EMMoqUiCJRKIUqNCImrHOZJaMUIpDliinbNXsXsM6zuVhl+bBRaJ1ly1JBoRRaaw4Xe0yqiqqqkcpkFpkp0cFncEwkqtJQGMU01QTn6V2Pcx7rI0rmz5FISBEojaQ0xU4SvdlsKQvNwd6MxWKPtmvp2o7FYs7h4T5GJNarGdvtBimzb6HYlRwjh/v7mTm2bfHBYrSiKguqk2P2ZnPa1jGZVFRzjUsLpssZF+crtqtI33VcXVwDgtlsxqYtkcYgdKb2m6JAa58DLyKkOLa5zBzIRqgRXRk0FX6g1rvgd34PKYmcljsYoXbaEYPDdo4YBTFlMGS7alivN0SXfcOaTcv1+pp6pkELnrx4hcNkHy5tSNKThKBte168OOPq4nLXzrLcQ2F7Sy8jvsiFBSkVUmbfR6k0QqldtHjm8wqUVBlcEwIRY04hHvqOSxmMcd7hQmYaFkpQGIgWGhvYti3ORTQwKQqMzuEgSimU1JnFJhVCFtQiIoJlr55QSknfbHFKEAs5+B5EJAHvWrxroShI3ADiEfBJkLRBKE/vE03bUaoBHSbL8bUyWZJvVC7AKImpKkRKWOeJCMp6xnS+z3rb8fL8ksZ5Li+v+e5777E3LXOilZSYoiDFAG1L1zVY2xGiRsrEpM4gW/ItpRK8895DopHU+wuE1mzbCYuDQxCKLsHjlxdsGsu9430+eu8+h/OaGARCREqtqOsa70W2S5BQlCXv3TlCxQIXBE+fN7nimRLr1Ybf/f5TVm7F/t0H1NN73L13H9GvOZwJZvUEo7I/zdg2pQARE0YrCiHQTlKrmroy2DbR+Z4+hBzSQKK3ltZKbKzRQONcZlHqDFh3rkf4DB4aLYDBo02EnAyGITGAxjHkVGzyAsg7h3N2WHR6nO2xzrLtWprLS7oQsFHhxIpUTrhjFX0ZsCFlya3KsEDfdyjRI4xGJIXrWtaX13Qk6r1JZgerkoP9Ex488FgPL58+53q14WBSIXWkKHL1/cG9u/z0Jz/m6ekL7t054Nvf/IAUe559/nvufkOzd/IuSZjsYS6GEXkcrxki6v/r8V90CARGGpSEe3fucjA/YrPa8vjRI5bLKx7cv8e9O8d0XUdZFENhVHB5sSalxNHJlPneBBkVqRE0ckldaSoREGLC8rzj5YtfI8st073IyeEhoroPfuTE92ixBVkhMWhaEi8Q4hWeK6oPDnjw4ffZdoJSS5rmEjsrOLjzAfN3PiJxjVJXNJB97oSkLiYclgtmvmR9el7+2E4AACAASURBVMWd4j7/3ff/hO/98BscHx3xV//+P/LZLx+zJHK0v8CUBSGWnNz9Ht/76F/jrre0y0sMPW275uLpKdMnMFlogllSvVtz+NH7zO+8z/T4hKQkfXFN+WCOmE2xT15iX7aYj+4RX23oHv2OprwiPABzvIecHLFCsJQKVRwQ4pyL84/55LNf4L0mxT/C2WlOttfZ5VTswLlbQJsERCThyH0i7a7q7m5g2kWX/Y6ULpGmuLn+koHZbSmKEWnTeN8QQkdZDskBJBItMW1w8QrhPaXcBzHLa0vGjY3YMQTE4IuX2BH3CDGDiDcskn/eAN2bG8mvkln9f39kqxXnetpuS0yesioGosPNZvfrDoEkxsRydcHF9TmNbWiaDXU9pZ7OmVYL6sEiaKiQkAaQLh+3zN6+5kxf/Wne8kuCaB3bzYbl1RXeOZrtlsNhHiBlRpCcTanafVq/wvotk5QLpa9j0jHvP0hZFZAibDvi2ZLNixXx6TVVs2WCJ9YburbDhYj54fcRhSaZhEkVdC3hvOHy0QvOn51xeXqOTLDZLFlfXKNWW9LVFe75S2Tb0HWWy25LE1pCgJnw+FKiJwWmqPNqO6Rs1zN4QCvyHJnIXmLRBdptm8P8+hw6lqIkeUHyiqZp6fotZl1gfU/ne1y3JcaG7bZDiQKSQAtDFIF+20PwWCvQRYHQKlvlBJXJIigkJSFkZZsXjjQQKpIISFHlm3QkCoIpuBSSv12uePz0jO2zz7lcPuf8+pzL6warBNcCLjpPKrZ4abn3cJ/5Ox/yb/7t/8rjj5/zs5/9NX/9Nz/n6dNn+GZFTBu6bo0qEtOqIMnI8fEd/vAP/oxvf/SnaHEvsyQpgMwqTikNnm0J33qev3zJ7z/7nNOzV2y6niAlIQp651GDh1wKeS8cQiQltwOlvAsEn6XHYmABv40V1nbbvOa6DdDFmNeVpJ2tE5BDAYLD9j1d12O9wwXPcrNktVxju/YGS2GwiBixNAGv+cGJcU99U4wZx95Mxsl7wCGsdGj9g43krbHptgrtzfHsq8CwN///y1hlXweyvQ3MexuD7MsYeV8GFv7djhzS8vq5htsuOfrt7zM/Vr72d7gZcnbs6aGdydvA6Guf6YtzxxhSksHxL8T2vHZ8rQfdmCiSXzwzWVJ0xJiNtEOA3oacGKccSWW2SDH4GYjB5V8XOf1Qa03fuexFp7KHGikgUw5liN7SdhanPZihCuiH9FWd8kbRKBKRYoiwLXRJUZbEAF3TZJRdKRg2PyE6XHC43tEGR0JhfZad+SDwKcvyfIr0rqNpO/reIpWkrmvquh427xIhxf/L3nv+SJadaX6/464Jl7ZMlmnHbjbJ5QyH5BiOZmYXgrTCSisI0EKA/kl9FvRhdz/sLoYas+Q0OWyaNtXdZbMqbbhrjtOH90ZkVrHY5IAzq12AF0hUVmTEjYh7zz33vM/7GHJM9G03mMmXWCtXiVF524gyyVJVJW30hL6/MpcctMpKiyx26z+X40AtTsQYSMHjYyLbAoPGx8HLyxk0Du87+r4XLfqGeZcF8U+D76szDq0RI0sFLjoqrQh1oup7mr5Hx0w0Gh8SSQvDLmeFzokcAkqJpMtqMV9P7gqsjRL/dzVxDI+bIRxjY6D50gC/NlG8iiJf7x5oPfhjDY9dp+de359+9QLZXGxEVBTTZLJ+6T23Lhyh3x5vY5Sc61FJjomma2k78aJqmo6m60gpo60lK00AjKvQVpG0JsTBIy4ElAKrDW3bUZcVZVFQlAWj8YjpaIw2jqquiQm0UThX4IBUBMpUDN/L0rQ9TedZrxv6riPnJEARhpwNB3tTQAp6YyJWZ4yNGJ23shRrDaPRSLyz0EOMu0xO85MTqrpgOqoI3tD7ltCs0dpQFg6jCpTJBN8RdaSwjtFoTOw97Vo6L1VZUtWVJGoOISpd3w2LdS02HlEWexsD1iStMObzBeMo11bKg3Grtigl7NDgE4vFkhwio/GI5BNdux6AiA4VMqcvXmC0RSVNYQuRfGfN7nSHTrUyH5UVKRtyLzJVV5aU9QjfNgKcrNf4IfxkPNlhNJqQU0+lMn0p/hLaFSil0cqidYFWCh86TM7iI1g4ijJhmh4f4VL3FFY8vbo+sO4CzRCykohUZUHoW0z2aGNx1ZiRKnAESleQYkRlLfJgJfLgUltqV1AUFZ1WZN/gyISuRwuKKPKq1KEVOKMgS+CDGgAeYYpKBzoODCq0IURPH5R41GiDsQZjhBHhY6APAR/8NujCKk1RlKCthI8YiytKQpdZrnvggpFxvHl0QA4dy+WSxhXYLNT9kCIxBmFwZblPjKoCoyzEiNWJorIURSJkj8pC/U8YXKpwruLF5ZL5ak0XAvdv7rFbGcpZjbKWEHqssWgjhWLUUKnEbl1QjqY4FXny9OkQrhM5Pj5mERe8P5nx1W/eYWfnDo92ayrdsjurmU52sKYQyXnI4ivoe7l/GUMMQZLLC0v2VuZwdZXyt+5a1kaR8wxtzTZYqKgqiqIkpUjyHmMksCPnJA2MgTGnjSblRN/3xJyoawm3sHojMUuQo5zHrqFdr9FKvEgDkI2VEIy+JeZMQg9BGAZnDSpHmnWDYo2rpGlABN8GglIYCnS2aAzT6Q53lKUPmcXZJU4bUrPExx5tPKUzGBVxznB+fsZ6/oLUXHB+cQnFiD8odxjvHqFKaZaZgU2xjdDbbr9jz/02W0zCKFHKUBeG7COPHj3kRz/6AD3I2A/3d/nGN74BOROCjKO94b4SNVBmdFYc3D5gurPHztRgCzGZ//zBM04+XzCuYDKbMjvcwzoNPgAObEKZXtjJXc/p45/D8lMObmTc3ojRaIZKI1Q1xmbD+dk5sd6l3t9HKdFZrZdrzi8uqNWIcWG5+847/Evzz3lvx/GB+TGPfnrG8aPnvPH2bVzZcNp69u++zbvvvsu0yvRnD9nbvcnenTvsHr0Fi4blZx/xg+//Wz784G8Y6cgbN/bYnY35wecfc14XvLPM3L4TKKbnrH2PzUu+8813OX/0jEc/e8rs8F2KyRjIjN48pIw9ftyhbEYT6BXUFkprySET+8i9O3e5e++r7O4dsaEQKRUAD9mhKF8+eUqAhqwk/EFf8dQQ8EIWXjEF1qsFTdNji5LxdEZZjZCAhiEgxIgXoY+ewpRoW2JsFjkjkHMkJ2Eqtv0lhAhuTVFXAz4j4NymntkkH6Yoygpt5Ptopa+UD1wHsf7bBOquF3bX157OucHS5tfyG37LTVEUBYeHhxweHkIe/KzyFQvjV3xy+RlUAY8ffMaPf/J3FLXhD777bb767nuEmKmKHRQWiVKAGAClMFbWbr85wPobztOveZpSirKqqKoKBTTLFd1iSTnZAWMhgCod1c4eNWsqdpgeHJGzJXUBXdqtR9mGT2dSIreRtGq5/Pw5Jx8/o/vFZ1SLBTfHjumNKTZGcvWUVFboJkJ1QbfMzI9XfPKzB/zdX/0dn3zyEJ80dVkzcobD6YypHZPO5oSLC4oYCF1LMhCLEu86lrFn0axZd7K+UkqjswQU6JixiKzU5yznUyvWy9U2+b1rPVY7lDIYXZOixfeB1vcEH0hK1m9+3bNcNlTFhDQEwBVDkGLTdBROURQVbd+hqFBcsf43YycmT2ULclDD3yXYTWtZb2oK+gC2qqCccqocx6uO09MF54sV62WH1jWNV6w1RO2wVUlZVRzcv8tXv/Uddu/cZ7p7l93dN1BqQtd9H//kMWSHcRVtv8S4jI89d2++w5/96b/kjXtf3xIJpGxO0iAFaUxmaNuO58+f8+jRIy6XS/occa4koaAHYhKPPy0hPBsG3Oaa3uAir6tLN2NVIeutDUlp05zdYGAKNRB1PF3b0qyXooZaN3RdRx86lm2zVQ0WRYHWGu+7If1VkzYhAlyxtESxJmt1a6vhe0vNaoauyObz2AEDScPr8tC4Va8Ac68D5zYqvM3zrn/PzXf9dXPc9fnx+mO/Su326/b16mu+7G+vmwNfPodD8Mer+1Mby6vtjn4J/Nuciytw8qqrIApHI0StrF/CIOK145xyBiWAO7weLP2tADqlwTlD2yLghLrSqBtXkLMYjJaFABXZarCGqIRsvLmR9CGSlmusCcL0MhpjDTlotA4YU+Gshhjp2rWACCpiSwlcSCHj246z+RnL1VIM51Mm9gmdMroeM3Ui7cwIw8BYgxlVUFqyyvS+Y92u6IIXyVpVopKlW0sqZNO1XC7nxBQoK4cuHW/cvk3XtrRtI0BgUZCiRyUpfEmBZtUTQybESBomAKUUyRgWOdPmOJh3GkplqayT78q1E5lEHosSRkTbSgjFqCiuaJTOEXtPCJHCGMp6xGQ2xaAoKkdRFPR9x+ViTt/3AHS+G5hJg+WrMqQknc6oQFmNKw02K+gDvvfkQb7qfU9ZVfggOTdaGykak6QgjWxBO3gfbQZfYYwsXowMQGfLaxNi4tWJ7tUO4KuTiTVuG6KxAYs33h+bieO6xBhk8SSAT8CqQfcfB5rqECXttEiu4wDQ5RzFJ8eKXE0bhRqNUCbQLxtCFGBu4y2RMyJX9IMEcnN8TYFVdgAIE4umZdl2GC37dkVBMchdjVbMphPxsHMWZy3WWex1INIojFNUlQJGxJgkibdp8H6NMSJ3nV90tF1JYQtJKIoN62WWyTsNN6ocIF+xZWJMFNaiM8Q+kFKQ9LXSDscv47seY61I+qxDjwx9J954dmI52DukaTv6zrNcrpkvFlxcXBDCxnOyk+CMYsOoELm10hKqQs74GHFaUZYVOWd89PR9z3rdUx3VVEVBPS2pykLGgHakHFHKobJmXI4pXAkZClvQti2FLji6d8QiLDhbL7hY9zw/W4AuKF2F92nwehBPHWMsxdjhbIG2VvwFUmKV1jglBWpRlCItqeV9iQqSFqlJzJiYRSKYFaWxpNCjyIzqEevegymICRbLBTkIcIpv0MnTtJGQE9Y5soWuXUvSNYocpINmsib5ANlQ1xVFWaKMgr5HdR0jV6KsolUBvGd5ueTifCVeT9Yx2pOwnpQV63XDuCpp+57UNIx0Bp25ubfDZGSJvsGsOrRVGCyj0Zi6rtnd3+P52QkhBsaT6bYxYW3BqJ5QlCu6kHFFQVWNKYqKFBNVXW3IHjjr0FYCChbrNW1oKK1IQ5tmjTOynKycYVJbikLRpcysNlxUlstlz2K9oijl2C7bhp9+8gXz+Zz33riDLcqtEa8ls17NwTYkHRkVGnewQz3eYTqqmI0rPv7sM47PTqlnI/Z2d1itVuQU+cbXvkF39ozFySOZNzN0fS/nwhicK8jK0XUdqa6G+UcaLdpAUVhsVVKUjqxGpDahlGexWGCtpCyPRiMwYiStjATONM2atimw00SICVUJsJbbhrZv6XyHMWqQvRqsLZiMRuis6NZrGP6dX5xxlAJFYYm2IKiKvlWslwsWizE3Z3uMJjvYsiQET+o9o3FFTIHLxSVTa6hdzeHuEedtS+wyZTFhf3fKyXJFYXtuHBxyfnjAi08/wZKoK0dOHTFqmm7FfHnJxfycv//b70O3pO97jt56j2r3iP17X2VajhlqQiliVIIYSdkMjPDfAXS/zWYGg+0Y5d5YlwU70yk3D2/wxv27vPfu25RlQV2Vm3qenJTk1ljFRdcRjKz7uqrFFIm+cmTb07Rzilsw8RWfPnjMT794yL37j/jWN99kZzKBrKk28uW4Qs1PWT74EH/xnOl8FzXtcXdq6imoixc8fbRksei4fectqukuOc2Jywv8ZUdeeXK9xvsVVVmyd+8AxxvMP/2IFw9W/P1HP+LnJw944723GO/u8+0/+VN+/3v/E8vjJ3z///m/OH58zNPHP6EwF5Q+0MWnpJ2OZbmiDxF9krk47XjxLPJFe0pz8fcc7zwhpDEhWt5/03HafsEHT55i3vgub/3+O6jKQ7+AW2CK25jUSTJiu6A6Pmc37nBj10A3Z6eYMNp7n73pLVRuyTTildVnTFEOeJ2HQZgsRv3ChogMYD4RSybFBlKPNRLmksm06YLPHv6CvcMDqukboKaEBCGPsOYApSyQ0NqRKKRoIKCHRtnzZ4/44IP/l3X3iDt3p3z9/T+hKB1DzCcktV0/brYMEoqRpR4AhdmEtioJl9Avl0H/v26btefGLsA59xvJU19lzb1a1P5TbkoxqCdAADO1BUdB/Yr3HwBcpKaIPjG/XGKU5u7RHfZ3Z2JLYSwJRUgKEpghIlO4GJmEpxxOaCa/VLBv1vS/CYPv131BNxnxlffeoyoKjl+8IKZIzgox/krgBGi2+/sc7NdIIEoJGLQryJLhx7CqHYilomTIz+e4Zz3xixXqPBHbwOPnJ9yMgd3ZmPXnLyAU2Lnj5PGP+cu//hGPn5xxsewkCFBZml6R6w5lE+erJb3boVs3FDljUsDqjLMapYywwck4J0QWH4TdWmorwXkhU7sKHzTR9xRFMZAcNP1ijVJZGM8DUaMP4vFqnWVUGGLyxNjJd8yecWGIvhEihlUQGwyJae1ABTKBujIYK3W81opRKb5vxljqeod6VBFypCwtReWoqpLdg112ZlNsEzkxjvOmZa+Y8HzVcvZ8znoVeHGxpnAFKiYwlhZIznCxWnL/9m3+x3/1r/ju9/4QpTWmUNx99zbfu/hznj9aEJYVi9WK0/NTXFjiQ8c3v/U1/vf/9V/zB7/336GjAbuZczJ28DRVQeaV0PY8e/yEF8+OuVwsiDlTj8Q7OMdAVTh8DEQvxJ6YMjH6l8CkNNRoWtvBViRIuvxwfetBJcdAeiElUgiE3tM2kqjadx2hawne07YtXdMO4XBBQJ+BmWeHyyQFqcs1DAkhVww8aZRcySmFnaUwWqxRNiERA99p++92Dri2D0AsS9TLktVXa+jXqdI2+7wOIF3//VVizPXXXX/t5m/hmsR289yXyTIvv+7Vx6+/5+s+65dtWmsi8SVwbzuPbxiK1/bzKtFnQ+bZhH9cP54JIa7ZopSQiM15TIlBhzyQjIZovpzQWWOUYWPfkge275dtXwrQXf9gVwdd2Bc5D1/ICu3alZZsNUkrAoocPdH3dDGAD2LimTV5SOkEqKoKg8I5hTFycIwthKmnIqZQGJNJVr5zD5ATG3PdorSkztP3HefnZ0Am9H4In8jUaowymqKwuFGJG1W0vqcNiZgN3TrQ9J5V29N5T8gZUxQCDGrofSsFl5ZFbuh6/MaQOyWaZvg9RLz3g75cbjBRa7qqpA2R1HmMUoxtCVVNtgZnFN6AdYoiOmK25IEeqa3BqYIYNeeLBuKKqnRMxxNKZ/Bdz7JtqJOjrgqsK4Xdp7QwpUqFsZb5akmObNmBktYj803MCbRI1kgKZTYpTxanIBtF20iIAVmjtBU/wQ1FMGdKK8m8MrgVzliMle76ZvxsBvr1QX8dlX/dImMrgb02kWwml9dd4JvzcX2cZiIh9mh1Dbnmal9d1+GsFu8nLRJVN3SCY4zkKAsQZczgz6EhZZISOTMZshquA8QDQ20mZaTLjBFwOAzStK7roGuGyTXT9B3OWaqixBVStFdlyXgyoS4LfAhozfbaKEtNVRVMJmNyjjQr6dgs2xVqDUUhQG2oKnxfDoCcw1pZjNptFEnGKohDgqgyRmS+KRACQ1KQAEG2cGjnyTZhYpIukBFJzTAM0IOk1wzUXd/3tG2gcIUcryjAtUGOqR4WfGYIJgBEYaE1BovWkURktWowOVGYAl1bNIpgPZ6OFDN937Iwcw73bnB054jSFHz88ccs50v2D/ZQhRZJfczMZjuUkz0aH7i4WIr3AgpJdNIoJUB5DoPPF5rQSwKv94l1s0LP15A1e9NddnZmtKtznBaj3771rLtA2/eEIJ0tjadwFoyjG7R8k8kEHQenl9RL6m/M+CS+JzEF8aSMXsz6lcUi4RoOjc1y7vSQglkqy6QsGReF3AhCxPoE3lO6ip2DIyb7Byw7z2q1xjnHzBXkEAg+QAwUVhEV+JzoY8KHSJfS0MVLGGvY2dnh7t27XC7nNOtWvM6U3OyzzmhrKMqSIkaUtcQk83tV1RRuI+OSmi9lyFpY1SkEmq7B+IiqDFYPbLSkmY1KRtOSkC0qa1I2FGcrLltP07Us1i17OzPGkwmn8xX9Lx7Qd3d54/ZNCjImthitmE7HaJsJsaDrHdV4zM7OhL29Ga5wTI5nBJMZT2ta3/Ef/v2/5S/1X1OrzLQE8oSyKCnLkvVyhQ9yjlS+dvsc2OIK8ZVSRuYlYxUmSLOkCQ0pl9R1zWwv82LthXmeEt4n0oaRt5mzhjkGPaSPaTN0geU6TCmRYsBqRVVatMr4viN5g84BFXsInoQl64xTWkJxUGIuMOxTEiHFPkEbh3MF2hUUhaUsZrhocdUElwJBJ6ZZSeoacOf2TcxqzuLFM9p+TVY9qy7y+edfUM72ef7iBVop6qqkVIrUdTx59JhPPvuCIzNmPBujTcZkLwnSWg9AwO/Aud9+kznamOFYGsVXvnKfGwf71LVjd3e8PcopQrPuePDpQ9arltt37zBPSx5dPmI2m3L65DlnT1/w/lfeYVRHFosXvHhxzhdfnPDppy9oVoF79/dI7Tnf/c43OTzYhdxBTsyPT3jwwY/57KOPcUSST4xdQD19THVjn8uzwM8+eMSdN7/O/vtvc/74AV98/AOefPaIZtFy7/4h996+Td7rSV0PzYpwecloUvO13/8mi1RxFiMv1pccr57wVnWPBz/5jM9+9iHnlw3FYcHx2XMmhxVvHB4yjhV33rnNjScHrI5PUEHxsw9/wdOzFYzGzB8e85XpTW7ffYdPv3jK/Pgpn5efMzq8w9E77zGezqBrif0ZzfoEVVSMd29D7sixQ4cO3Z+j0wWX50ua08DN+99AJQfMUSZAVCg3gZAGvZMoO8DJ+lrKRzIGjUUTUXQY7ZDk24gyGaMSLq3Z36vZ3SuJaU4bego7pWAymOdrMpsQCJAYohpFIMSGi/NzHn72gJDOuH1rD60qlHaAFakVGyetzephY8GkUErAHbUZbkpB3oRxiUwzvVIE5YyklF8r5P5JRv+1AqxtWxYLkW6Px+Ohgfjl25etTf9Lba++20syrZe2fO0nDaBvQimoXMmd27e5f+eOKItUIqdI71tcMSFp8CEzX0QKm9mZWhROWNnDmLlejP/DpWavfFLFNqFYaY2bTLl95w5lPcJWI1xZSvBWilIrKEUSjQkqZxQbduhWTTaMPyW1TILc9Pj5ivnJGU4ZynpMDi3rDM2qo7hYs7KaRye/4OKvf87PPnnMw6dn9Lngoo9cdgHqCp8lOAObCN7TWEXoexrfEL1nZAxNiARTUjgYGQs+IsR6J1LElNAxoFMk9z05aAhRfI1TgBzIKmK1QicPOWKyA5XxqSUlCbLTOlHogDYaa8BZQ1nWlEU92DBZ8XUtFNZpnLOMZjOxYIqJwjomE6khUYqiMKShJqzqAu0G4NlofN/S95HxbMby0tO2mabNrNaZdQtdsmhrKAvL2ke8MVCUHExn/Mlf/Bl//t//BTduHEDXonKFcZpbN/b4yjv36ZqOp8+OaddzQtS8fe8N/ux73+MP/+iPuXHrgKyVqBSygK8byyR0JveJ85NTHn3xkMePn9C2LR7xdd84JyZESqxywhghGGg92IGE8BLo07btFqzfPBajpCd7PxBduo5muWK1knTVbvD6TuFKHZejBIVpXpkjkqznXga/rl3L+ZebAFfdsivwbqheuf5nrtW9wFYx8dI1OgDpr6rVNu93/fXXr+9fumbzL4dDvI7N9uq/r5svrz/n1RTXV5//uhTZ6/t99fVblqO6YgRe/6zbvw3N3+395zXvsXn+RpK6eZpSA8bxyne8PgOzwXKQc7L97rzMYPx195PfGKDTWpO1Hlh0VyfWGAEWitKBVUSlMFmipYVB4cWHKkZiTmLwPkgO/WqN1lB4R4hCG99wTw2KiSqwBpHPZUunRDJpB/11XUj33/eevpMIYzbdAgPLrkEljw0W4wxRQ58STedpu4bLRSdJdCFhnKWqa2xhKUotkymCfmtn6IbI8w0ibIxhPK63Jy4lSXWRAaLwaM67gE+dpPukTX9UklSNEvmuSleDKYRI5zv6IKw8iyMmuYgjhpg1IWvSAGz4lDEpixyzlYHpXEGhZd9VPSIMdNgQIzFm+uCJPhKTsJlIiZSgD146oQoptrWSAj6I+Eh8Op0U10nYHbL0U9vvJQMP0uBhtQHEXkXKNxfG6+id19l2KEVSVxfYdSBus6/rk+rmvYwxInElo4yM3TSwCKy16CyoeMrSnXTa4IyRMIi0MeSMxGxIWVKM5fHhMhzO3eYi3KDgKr+c5LIN5N5c9ABZSXoQkZOLS6w1lM6JtM0axuMJeyERZ7MhtVVuO84Z3HADHQS61KWlbVtC70HlwQsx0bUrmvWSFKEsa+pyJDehNDAbjcUZI9LHpFE6oXIkD6lTmoQxSujiRJGmGgF/q1FNVcrNT2srixBbUFXC6tJaE2Kk73o0pYDBwySaFdcmTo9WIo8MUaR61hQD00oTU2YxX6FSoixqJhOFKxwhOLrhxmKUIgYBYnPwtH1gMZ/TLFe07ZpqPBaWV4a2awlqRUBhjGU8sqS+Exl3lvj0NKDXamDvjOoxs+kuRVnTz+fMF2tSfEHfdDwuDPeObrA7qVB6SIhOPT5GGCTEANo6sAV9yiht2d3ZxSZFv5qjNFRaAkmShhBkeg9JwkKMVtisJZgAPfg7DgsWLUWO1uKdWGqFD1CkzMgYpmWNK8bcuf8WN+7e4/jigvO2I7U9OcVB6hwpU6SoHZV2DIIrorZgCtEeRrnCp9MJ9+/e5fLyguNnx3SNBFRsfCA3c2JZVSg7pHNpkTzFLCmkEZHC2tJRViN29xyq7Lk8XbJuOnCG6GASA5PxPjYndN9jDdROszcdSYL0ZUOfViyWS1KKxPGE0iiaPvPk+AKjsbKloQAAIABJREFUC44OD6idxZmAzy0mBXZmY2BCxtCJ/oT9vV0a72liTzVylErx+OExvr9gVlc0lULbzP7ejJ2ypigqYc/RSHBMHub9nMh5U8SIj1+KHgska1DEoaupca6gqkSuLoE7ihASDE0BYyTlWVtJ4+58D0rYC1VV0/edpMTmjFGZ0WjErcMD+rbl5PSU1DW080vW8zNK4/B6RC4LSlcyHtWMRzXabNLYFahhfWIMylYUBIx1qGjQusKaTE6G9WoJCaqypI49oXbcO7pF1Td81sxZn1+SVaRPgcfPnnHW/g0HN27yr/+X/5kbtWG9WHD77a9x+N53edQEHj19zlvVPUa1JaUIBLQyKKQRsr1n/Oolyu+2X7HJvWZjRyHBT2QY145xtYtSQ6Muyl3q8nJO3wVSCOItmqHahT0VOfvkCz754AGPPn7IB//3X5FCA6pluT7h7Xfu8afvfZWHXzzn4osX/OQiUZ4kvvXtNxntauy45mIe+OnjyFk65O5bM/TdEvIZLR16VvLkky/48Ef/keX5Z1j1kI8ffMQvPvwZcRHILXw2G/H2P3uXb/3FmN3bipPHDzn79CnrbsLh22/y9t2vUN++yeX5MT/8y//E2ePnpMuPeP7scy7THNcVqMdz6uKSuzffoXpzl+bh56wvLnjz6BZ3pju8f+8mfdA8O1ty+nzFXjkmhY65P+Xz+ac8U57v3Hqfu/YO5hxQPdo3qItn+DZCLsE52pMLLs5OMJMZi5NP+fzvP8XY+9x68zvge9ALYu6FRe0c6GJAK2Q9Jcy5TBruRzFbVEZCGbSTZygBTpRyZN8yq/eZTm+g6tFV1z+JH1NhzeC57K6NjAzZAgaSJ/tE33qq0YS7t7/KZHIbKBlajyi1SYbXVwXIZktsgy5UTqKTlA8st8BrBd92vTaAM78NyPObbtcLtRDCwH52/6TA4D/u9jowTl2dhJf+lLmyCsiAQ6OZTSbs7dbMdndQqZfC0lTC2FcCtvYhcHJ6znik2JkdQPaE6LG22soN4Wp9/g+alTckn+FHb+iXSgA1tKPav8HtyS4ZjS4qhi7VsB5URCTVXquMxaDRW09pAYzzIHEdxnTv6VOHuz1ipO9SntaE547mTKMszFctJzny+brj6bLn89M58+zQbspl13LuW0Th0UApftnz1KOyQeVEkqhCymyYZkvtKtRozA03ogyZHBPGWkrncCpjVcIO17kz4nU8GlcCfmeHs5m6LCQlPmsKO8KWllxIQqR1UBYG6wyFc4Ove4XRBc6VFEUlCiCVUQPxQztN0/YD61FSScuqlDCNlDAmopQAV0pFss7EoZpJuafrL2m7juAzBMNUT1DlLstiBeMGVWbm3YKlinSuINuAq0YcvX2PN998m0pJCKEkICr2bhX88Z+8zbvv7bBcXXJ88hUObh+wszfj7ffeY/fegfhm9hHjNJuMwZy3sxah7Th+8pTHjx9xdvoCTCaGgE/ih6y1FhrCEIjYty2DJbvUN9cCXrTWTEejbS2aY0/fBXrf0qw7uq5jtVrRdd1gIyDKthylRtzMbSoPZIPtNSK4yVC6c32ak9rwSiWV8muYbgz15oa5OoSJyevVdk+Zq51viSy8EuRwDSzbfoZXAKjr/25+f3Vufp3a7dXtyySq1x+/Xre/+tzXS01fft/rpJ3r3+0l7IBBPprCwIwbwNu0CS+SxzaP5y1wusEWkIKVq/OV82AlM9Sx0uz4knlwQ6RQrz++G7ugL9t+jQfdLx8ko4x42uSMVeJFtE0cGQpbjSZamBQVYRj03SA1FCmLUAJXqxU6SnHS9z1Ga4weyNw6EZIfwDYzJKMIoKIHiVHSGlPVZG3po1yQxopxX58idB2h68nrhDKKbBSd9yxWntW648XZkt5H6nrCZFJhSzssNgI5BQG1MmL+mOV8mcINE2KxpbrndDVAYk6kEOlRNPSErDEBVBLZn7GD3jxDaezgZyEDIPiexWLBfL2g7xPjYsJkPGUymaJy5nyxIMZA7Sx1XVJUBTklluuWFHqcc5TOEn1i3azptQRHGOOoXEnMCeMLlOlRIaCURE3HHAfJ3yDPVfKdCyfsihAFRBD2XEHOHoiEuJlwpDg3drjw00Cx11r8wbYAZNgCbK+bBK5PLDlnlNFf+rzN2Hz9xS6Al7WaGDNd36KzFSN/Y4fzFrYXWx4WyNaJiarLmfVaGJEhyaSeEGBUaUTmrAaD5awgXyH5ajgom8SzjIAqm8Wz3HgkIc/HSAhRAkC0YtV41q3nYrEE77FamEiyqHSCCWaRGaeUhuAS+T4xRrquIQShXGstMvTtRwuZQhvGoxF2NMYMCY5iOjxIYdkADoGUBQjLIUg3wLqtHxZk7BBbbq2lsI7SVbLAGG4UXd+xSaUT9qa+ep8sMtochf6rtAYtBWXKEpzS9h7XOfphbMZhYae0yJ6Pbt5gZzqjKkt8ivi2Y2dvxs7OjMl0TEKxt7vDfO25ePiEy8UJbjSiLCY4Z+jKmqhbUu9lvCoBlYyxGAWdDyzWDcYn2j6hixrtapZNx4vjC0LwHO7PGFeOonBkrbFFgc+RpAzoQDIJrxRxGDulMbhB4luXjjZligacs6yDHIes0rZTc90kVo5Z3JpEhyxMuxg8XegJg4fZTlWh3Ii//cUTnpz/mINnZ7hRTdcF+t6Ln2dhyI0nE0E5klYkrfBZPDmD0nK+B7sjrQ3j8ZjZZMpleUH0Mjc7rWXBujHRjXEI1im3Y1KpoXEBZKXRRrqvUXmiUijrUFak9E3Xo/uOGBMpmeG6SFgytbPsTh3K1bh6RF2OODk54+TFC24e7OPqCeu+52y+5PDwBpO6RtlIG5bEdsWocFid6JqePkTAMKpKdqYTKgJeNfjQURspbH3bcNl6cg7UTlOGRFWWuLKQxVNMQyGdSENecByy5DKZnITpYpHF99QanNW07Yr1uiX4IEEjSW76xkoQiElGqPFZ0/ae9XqFIjOpR4yrCqMVq8VCANucmVQl5c0b6JzQOXDy4ozz58+4PH7GzJT0ZUKpktFUkprHdSXSiazE5xQB6bKx4DRWJbS1rNcdfUpkrem6jtPTF1gMkxsHpCSMt92dKfboFouzF1y0c7rQoJLFJ/Drnn00N27c5O604D8/fMjzH/wQ9XDOxxeJt37/j9i5ecCkngzBkhuR0K+oP3+3/YM2NdyfIGO0Eo/bqDZ9JbS+kqmNRjV1BdPxlIP92xy/eM7PP/o558vnfPHJY54+OKG96Fidzym0YTJyGDfh6NabfPsP/oj7RwtWl5pulTl/ccYP/upDQn7Bzv6Md97/5/z5v/g3UFtmex61/ogvPvz3/PDDHzLvPPrCMxtpTp/8nH/3+O+Y9z3ajXjvvbcZp4LPHnzOxz/+CT4bRjducPzoU5rz53z3D/6CP/4f/jf60ZRsFG/X73Hv6IBPvv8Tfvrjzzh58hF92aFPSmy5x3o38vSDj+jaFxx/8SlHN+7w1Xfe563DG+Rmidkb8cWL5/zNf/o7Fv0p+9UON9+csXhcQD1iND2iZEa+8IS4RIVL8uUZi4sF6DH1dIdHP/2Es+UF737niMv5grOTF9y/c5P1s58SciDpNbPbO5idMaQlpA70CHQlYB0GgSSkPeaGhoxBmrnkDD5C8OAUCgeqkvVIO4AatsA3PVlnMNLUyfm6w6MaGCagsqXQhtpZdsYjDnaPUGoEyUBOwh7SDKhKZPjPsO6SuU+Y+RFykH83+qsBRDRK1hXb4vJXMCv+8cf/1XtUVbVt3pZl+V/k/f9xN1k7vgTObbcNwCs/mU3YjqzV6qoghjmqX7KxOlQ4rHISYjfM/yn39D4NoVey9iRlspJ71FWRnvmHzMwvMUuGT2YYCiolzSuUhcoNn8UClq0CWUHaeJIBKSv0xjODBFmJuFWJNxRKUGNVWiZv36V6Yw/OL+HFbUbnpyyOn/DRo8/56PiYky7DeJ/Rzj6+g1UHqxS5iI2oSbRmlMXT2ceOoD2FVhQWnNPQJSauhHoGZWBiCiotrFfyMMeayGw25s7dQ5zbxZgaV9VMpyOKUonSooBRVYnFTVRUtqaYlATTSmqoDrjBhscaSQw1RuaLFEXBpIwhpSCsu+xRxjB1irIyEkIYPFpLgzYljzaK0Ht638lh1qJws0VJWUZSpYnRUxiLzQqbNC5qiqBxOHxs2NndIcaWqAtmNw75Z+9/jfe/+nUmbibrfSskEKUNeq/gjd97k/vlV0TGH1uUlfuRLixK9TIatZF1IaARCyElA5T52TnHz55xeXY+AMWbRM1ITIlNDnb0CR96uuBhUO0l0raxqYCUAsvFgj50NKs1i9Wc1bKh7daEXohFTdNARM5B3g7Hbc2oQb7b5rFhfb5pREgj9NpV/AoL7FVvdblgNtf6Bsi5mjvlylNbJtbmMRjqX3Xtd3759y977Msev06s+U32d70+f3UfvwqAe91+X8fu+zIW36ub1nrr9f+SCs+al1iT+ZXpTADhKyLQ9mMME5nScs7iJlsgD/fmaz8pJcGu4JfP8Zd85uvblwJ0zllSzKhBypSiDJSNVnfjD2aNwijpiGSjQBsx/yYPEp1hRbhBKAd5VlHWiIl5wuch1ZFhkgUu1g3ZKowa2jw+YjSMy8i4yEzqikpJ+owzYmxdFpJ02MdAgyInofBuEhhz1hhEwtguV6SssZUSmaFWhNQToyeFKJLEPMiCjGNcjbDWoK343YUhQMPnKxNKHzy+6+lSpukk6TD6IP4hXgy3PeI5VRWWFNMQ6WMIKdJ2HfPFktWqZa479maJlGRi7ns/eKoJ+8s5hzMazVUYQ0gR3/Wsm4YmyMRoi1KQWq3IaYP+im9UHwOrdUsMefD2UttUUwGYhlS9FGAAY8tSo3Ukdx6lBIm21g5eDAqGDk3a3IM399KhMshqMwepl2/1myfmq07BdVT8OhAXY3z5ArtGN9Vas0FBY04DCCkSXx+FpSFjQqS9Ir+W15dlCaXBxsTKrykKubi9jwMrb/MemwtMDROm3AJkmMtYt8ZszTtTuqLbKkHZsLaQCT2LGXTOit5Hzs8vubxcYLOkRE4mQcITtMjOiiHJdTyaonQmJ/GQk1Rkvz1WJ6eXEoLhZSynPhJtQVk4yEkYeSmTgnS9td74PgloW5QjXGEJOhKJwlYa0ldjFC+5jS/h5qYHMik6Y/HJY50TKWMMqBRQOovBptLi1wDowRsQZFwkFDEr0sDW9SnThkDuEaaNNbi6ImpFUJkudPSrjhgG30ptWLcrclSowrEzm3F0O7FoRcLZ94mm9RitB57lILk1CmMNG9ltnzKLroc20nlPWdVMikKaEGXH87ML2nZNWci4KcoCV49IxqELi3aRqDJ9SkQlQTTNakXMmiIlZlVJh6Z2g/9c04FW4huZr4ohUVQrosp4EmEYZ7IUl4WIionkA6YwlM5S2ZJmHfjpo0+wDx5zdP8uprDS2XUGqxWmLFBhk96l8AlyjPgUaLM0PpUSqWUKUQpFI74aWkn4R0yewhQy7xrDuutQwYMqtuD3piDLWeanrve0nefRk6eczVtUtNT1mHI0Bm9pFg1Pnj2nsnsUbowxmjgkH4coCVaTuhTAqiw5fXFCs1oyJ3LzYI+6Fr/GdYjMRgWuHpNbL14kfUPfB6yraDsvDLSyYHcy43z9nPV6yawuOFu34gFnEvPlgs+/iKi248bBAU0WGbAYbF/xFeTiloKGQfKeQkuMidJaZpMSqxSr5ZL14soqoW07GQdaSQjJ0LGLKdH1kYvTMy7PT5lNJ9w/OmJclzhrISfxOkyB2ajG3rpJoaBUmmXTYoNnYrUAis4xKR2zUUlhrz53iGI6rYxF2QKcEpsCq+jTijYHkoV6MmK8M6Zdroi5o3AQo6F2JWU+4OL2ESfNimdnL+g7SWHXtiIpSd+sXMEvPnzAX//oQ9qdO/j9N+knh7z5ta+yN3LUtczFkLceXP+tldD/tW1KIUnkOYEycs3ma8c1Z/q+A6Wo64rzs0sWFw1GV5yeL/gPf/MLTnzL177+Pjd3bvK3//E/s8Twxq27vPH+u1QjRdyZcZ7v8ta3jzjYcyzmkUdfPGN1+YAnvzjm9OFH7I6+zu7dN/FnPcvjY/zJpzz+4EMe/P0PeXhxwcSMeXvnFn7VMtmt+L3v/SHjeze4sz9j3Hbc+EDzwQ8/4sd/8+9Q04qMYVrtEp0jlFDtlxAbVscPePyz77M6f8w33tllf/wOf/Wjn3Dx2Sm71RFL9ZwPP/gBTTimOpoyqW9w/CyTTi452DHsvVGi90pumSPK9YTbN+/yZr7P/k8Vt27e4s2vfAd76z5xmVl9/gx0ZHxwi926wJqO888+4id/9bc8OTvn4wfHuGLGrYMjdm8YOj4kpSmFuU9sx2A9Pp6STESpWxj9JsZalFVoI+wLQ0INQtfsW+LqkrhesDh7xvrynNn+Lrt3bqNiBm3J5QgitCdzGh/Zv3ko7HA0Zlh+b0QAG7ROkyhtYjox1KWRZJAIW8mHioLkbe9FAqKLzEeBjjAkzcrvVwwuhQUlMt3rEtlh6P2Tby81e5USz89h26Yp/lfPpNuuquT3VyvJ7TaAFQygak6S2KkcOnuePn1I11UcvXUkTKneo0ogF8JOy4FVcwEJej8l9XOMdbjCsdFsbNbcm3v5r9x+3blVw3OU3iz4gIwyhoyEsOWhFtDI8DNKWHSKX74vZDWofJTGZAnLwihyXWP276J2KpkDV3P05Rn1J7/A/cBSFBrz5JTYd5imo+wVSx/o8yWNbfDZ4KKjTwmXIj7LmjADxdBQM3UFuqZ3llXX0LWD4sw6qT1yiysyR/cPGe/sUo9vYN2Isqio65KsWnJqsSZhtYIQST7jVIEdFURX0vQrYuiG8LdMipIAj2+wtqJrPQyNfG0zMfek3GMwgGa9bFmtlqTYM5vNqMsSdBCZc7cmpYi1hQD5KFQM6BAxOeB0xllFCj3BQ04RaxW7dU1vM+vcY1OmtJrv/v63+D//zf/BH3/32zhVEnOgUR5KhaNCOYMezbYElaxGxBzoYj8wjcTnPCmkwU1klI3MfzHSL1ecHD/jxbOnrBaX5BRpuzVdiFIXDzNNTJncSzia1ULK6b0n5IBKWdbXPtCFjvPTC1rf0q3XrLs1oQv45AcbMQ3p1bAFqRU33uNxCCLc1J2GDei2kVIOJJ4Nqyvnlxh018kqgusMK5+hjs7D9ZS5xhS7tjq6Ys0NdTEvA2OvylKvf5bXSSyvv+76c163n9dtr5JlflMA7lfJPV9V2b26j+ufdwMeXt+XzmqLU22el5LMEVvmpOz55fcZgIiBhPjL7z+so/I1MI60adRL0ztruV9upmy9JfgMc5tWxBT4su3XAHSOpDPGBCnkN+b+SslARJB8kQgqSUW2wnDDGELopVOPwipQcZD3DZ+4rsZiUp880QcUicKIjj4qTRk9HgbUO5BVIKXIuhcT04ShMREVA74T77kMlNagnEV1gdJabGmpxxVFXdH0HqUuIVtOijnBCz1V5yxx2D6glLiBjOoJWYmP10an7lOi7Tv6rqMPER88fe9FIjrIRoMP+JDoOmFXqZjRRpGNtDFFdgmrxZKUhMVia4dPXjz6jEFZR9t6np+eMV+t2ZtN2dmdMh2NCL5nPhhV7swm7MwmjMqpgJ3RUxSaXetwXSSSiTFJrHSMQhPXVoCpGMlB0nMBnLHYwmGUDNz1ao7R0JNJIRPD4GEX89bDazPor8CxDeNGbzsJcXOx6ivwFa5JH1+DqCulfklffv2513XmwACwmKuuhEYWK4OHYFVJl3mLeiOyUWM2/QjpEIqPW0XIsPYCEnVdN3iZRVk4YIgpXXmvDBfzS12OfNVpIQ1BEte+p936LOUB5BTPCkjEmOg7SUSOMRFTSwyXLFeeUVUwGtVUpaF3hrJ0lIUTBuuQGpljJKTE/sEuXR9o1wIGZJ2HFNACbTYgWkfKYQiC2HRtBh8HBcoarHGAImWNGcIsjMlsZNl92+F7AQiJCZPF90j0+4bCWaJJpE4NN5I0THxyPqy14oNnJChGiIaySEvaENAC0CE+eVornCupRjVd7GlamTucES/MuqyxhWG+WnFxesqyjezOZuwejDg+OePp8kTYtkoWfonrNwqGVaHCuhpdFpAEKFz3nuOLCwogdg1HhztEqzhbLggXFxhn2dnbZzzblQWGkxS1kDNRaUKIrFcrYjY4p5lNHL0yFNZjnCPmFpUTfQz0UVJBN/eNNKxMI+JpaHQeVErCkiVFtBKOou8DjU+4cszujZL9Gzc4unebVbPk/PyFgKreo1Ig+Y4mR8iONgQqW5BsgSkjXbwg5YKc8uDJ4SEPsvyUxVE6JcaTmul0hitL1PPndEHAT2HVCY08cwXUMSzA103Lk+NjYgd7u/vYUcXOpKIYT2j6nvmqZTIZMa4dxmRU7uS8aU3tNDErZpMa4gzftoyLitl4wmw8lqRUrehVxqHAWqwuMK5EK5FXK62JCvqYGI0qlJmSomdSluhwwfH5JSmLjPry8pInMRC8J05GKC3S46u26gZYUtv1lcoJlcS42KhMaQsxLfYylouiYN2tWa1WVNlS1MVmFhzmMIMx4GPixckZpycnhK7j7u3bHO7tiqcjSlKGy8zudMzO+C0Od3c4ny+4efc2t+7cphsdsNZjxvWM2agS8+9hUYnSaCPJ0GgHxoitgwrowoJTZA0Htw8ZlZZnnz8k6My4LHGlxmqDTYrDW0fcaFpO1z3r9SUky3K5oO9bfn53n/2vvcXh3og37h3x0I8IxrFqez5/+Ii9SWZ8b49Z+bIJ/e8Aut9mk9RPrYYjOQzTrdzn/2PvPX8l284zv99KO1Q6qdPtdHOiKCZJI47GNjyAYcCAAf+b/u6PHmgM2xhpRIqkRV2Sl7y8ucPpPqnSDiv6w9q7TnXfQFnj0dgAF9DoqlO1d+2w1trrfd7nfZ4hYWXKzFoRJBaLObPJDNcL3nz7Lf7Hk0Oe9Gu6/orf/PJLqmPF0fFtbh+fsLJXXDWeqBX/59/9lLqa8Wc/+g7f+9PXmDWKD3/7jEdPzlg9+Zxfffo/IyZ/w2E14bVjxTR9zoe//hXJBv7kjfe4Oms4PdugrOedd9/jB9/9PvrWhPrGHNF23LpoOfzdKY8ePcHahigLrE6cnp9z/vwppu+ZHy7oNw1us+TG4ZQHt+9z6+CA7eUFHz5bsj0/5cpveGgCq60ltonF9JDOzfj8bMVkfpOpmRDrkgfvv8EJx9BUPDtdce/OPd7/3vfRN19D1CUiRPR0RrATNpstNhSUJLrtildfOcKYkp/8w0dMDm/yztvvM71xQHEgIB0j5H02F2dcfv4pixPB4sYckkFEfR0JpKxxk6Ijho5snmJJ9grfXrA++5TnT76gv5ph4gOmd+6AqsiusBGZ1hilEKoZ5lxNGkrHxQCcCamyjp2wHByXvP/ufcpySlWa4fmXTSsyu2kIVhifwXnNoqTIvBTvkSIM5D8PoSf5AKJCVNfutGJvRP9LMtj29YmvSzT//wDO5YoDRljqJXBuDN6vL+U+iy6QYo49IHD2/AmPHl9ydXkDVWqmx7e5/fBdoMenQOMsy9U5TCTWzmiXz0AYDk8mGDN7QZImv+YlVt03tZE7d80kEvCCDt0I/mbNw1yS7WMuqFAyDeCEfBGce+nkk5DZcAiBDoN2XiHQEwkzmUkiZYmYLCgmb/PDOzf47vmGL/7x9/zy7z7g419/jgmCUg5up6VEygK/SrjeE0WPlAZJlrqJwSOkxtQlbZBc2paL/hKxKZmHHMdFlUAFdAnHkxnzo5KiPMaHIVpQ2Z1eiATR4qMbAneIeDrbsdp0dLYBEtNpyawqMWYgZETYbBqatkepnAjXIiffhJIoo0kWlqsVT588wnvLnduOk6MDvHe0bcvy8go/jA/vIz5CUdVM6hrVWVL0WNcjUs/J0QmTOydcuCUubogu0C7POTw+5vUHr/Jvf/iX/Ld/8W+Y1xNiaoni+r5aBIprQFbGfAu1NhQ6O636FHHJImWRJV3G0mopSL3n6vyCZ6enLK+usG1HR8RZm6ukh8o9Yk6MAogk2Niepm3ZbtdYm2VdnM9Ge12XTR2ct7lkdRhDWgyxqsgljDGSy1p3tOOB3SdEZim/tFKR8joe3pfquAa7cr/fB39SehGMyiCeIqVwDWh/TRNCDCaFAz4jruOq3ecvVZyNx7L//uv2O37vZXLMP6f9c7cbpav2j+nlEt2RGf11Za+ErwKMGdS81qvLf/yaYx0r4NII1r14DqOc2bhpeunfuK80HvswgX3ds+ib2rcCdFprkgSl7LVQ494Px5hHmRQCLdVOLywnPzLDS5HQGkxREAaALsaB/RFjngyTRmuHSDHX9QIEOCwmWJ8RSaEjqUgE74gxAyVn6y1JZKHskDIXRrs2i+1LTRUFpVAoCQKJFppSSSa6whaBWVnThA7pswtjoQa2nhB4rVDVlJDyALAx4W1Ha3vW2w1t27LdtjjnaG0/IOlDTXLKNs4qFVlzTmQHHZnyJFWagsIIlsslEY+PAWMLRCEHbbspytT0OtI2ls22IYZAFHAwn6IlCGXo+4bVZg0pcDCbUZgcdAoh0KLEVIqQIsFH2t7S9z0u+AyQBti2LdaH7EBjLUrlQBiVl+xZX8qgjaC3HutT1jZJaSiP2QOlXsgC5I4rpcQPD/LMvMzXNsVMSbbBvzABjANNCokU1/1sbC9PGiGErywQRmanHJhxzjtiYtB1Au8DkQxG5v0IhIjDRJgBu6LUyJidGIuyQKlsOpEBQzlM2mE3uV5rYSREzM4/AZmp5AOQLclMpR2QKUQGPK5PjiRGOWaZy1mVJvpA13uc3bDetGglmVQFVak5nM+YzScczGdMJhVlpRFEbJcdi6uqQiqHFIKqzA/vUmq01EgkadQKFHInSh8HPTakpLMdslIoLVFJZUBCjFpzGXBkAAAgAElEQVR/ipREdjD2cecONN4vuTeZ5vuukCZT/kVKhGBJZAAvswYGZb3BnCBJsDFgRcSLhBcgQiDEQCFBG8Wma1ivl9i2Rcns4jqpKw7mC4qyoOtaLi8uWG4tExtR08R2u8VaS11WSGIuMZcC73tczNo/WmiU1vSuy/OfKZFlQfKRPjIYJ2iSKaCA0Epa60neI+sOOfF4kYZMIHhyCYaQAqMNVVJUWjIroZORQudy7DEb44PHDQ+mJNRgHgNJZjMHL2NetChJUhBE2GV0i8Jksw/nWG0bpJxx78FrvP3eWzw9fUTTbvDdFiVzeUPfbHACnK2yRs2Q3Eha41xAkEuH11dLts0G53zuI4wOWIJ6UnJ0fIPZPIuSn11dZffSlBk8o4u0HMBYQ6IoK2bzBfCcTbMlxMsskXD3FrerikoYlK7Q5YRyMqeWjqpPJJ+ye5syLFcbuhiYVSWzowMOZzNmVUVpcmk7WrO1DueWCLtipiMzM7grBc/hwRGzhWDeORrXIFRNjDNWV4mD2YTGedZtgws92mRQsbcW2whsEkip0SngUy6vCFHk+XYsew0uP3u0RsaIEiknMESm3WsNIWRncTEpM4uwahE2J/wTEiFAycwq2G42fPnFlyTrmBSGxXSSU1IxG0xIVXNweMjJ0RFN12HqKcdHh9jZCUufNejqQmOkJMAgOD+A5jHgQy771XKwY1QSVRtisAQZMLWhmBic7XApkiS5HwpBPTvg5p37nK47bK9wbc/B8QzXXvDzn/2chV3z7tvvcP+tP+Xf/+Ypn7qa5WbD3/79T1me/Z7yx9/lvdcfUJd1XmyOgNIfUbp/ZhuSRkIMAWVmPkmV+1QIWRdY6Sw1EFzIBiGlRkl4cP8AcdxhP/+CX/z+p6yffcidQ0tsLhF2SYqGq/MNi6JlfuuALz76PVen/yunn75Pu/Z89vEXXG2vuMSx3jznwcFt/uq//tf86N07nJ//A8/5iIvf9ZTVAcc373AWN0xCpF0VfPrT33P4YI48OeTjx8/pHm+Yz+7zzv2K6dGC+fEJ5eIAc3TEF599wc27U6pYwrrk/o3XUM7wyScXnD/5iNfuV5w8KPnodElZa+RkwbMzwcP5Q/7qv/ufMHLO5tFj+qtHxK7CxEN++8v/i9Y03Dx4nUe/PuPwzgRzXCOmCYSFUlPdPUGZBSk9hK4BAlNdcOe4oP58xe8+vuDWKzd47Z0bmPkB0tzM2IleE/Uzotygq3sIfQ9Y5BLVPP0PYz8HclpHoIe4RZcdGsfJUUD2jvl0g+JzQnNO//SSJni81MwOD5kf3QIRIN4COSM7uaYBJikRogIcQrfMjzXv13dB1kgTQHRcF8TuMlYDvBcRMmvr5LVfIKWe8eBjv8FuljgXUPUxk+por0e+uF6E/7xA3bjvl3V+dqZh/4Ig4f/zNkblYu/9N7UxJIwvvE64fMd8S9cuefz09zx7+lt0XXL3jXcQRcXi5hsoOUWLhPUbVmvLejXh8vkXOKcIccrN21UOgHdLbbHTO/7KYXztcX1TG3TotIYkGSurRtwug1D5nJTUu6shd/seAnIxmB6Rxw1KoCrNpIRw8SnyfEVqW9JqTeo9SpfocoGaLnhw+wHPF2ecVWtc44jWE1xJCAILGClIISBtoDYlRVIEb1EhIivYhI7TNvA4BLY0TKVig6cnkJRAqIRlS/TQ9w2ps7Q2RwWmgMXBBK0cUfSZSaclRVEjI2w7y8effEHjLEZJbhwvcIdzCp3X4ikmHj06o+89ShdMtpYkI9tujdSJSTVhe7am2TY8e3ZK22w4P72irkts19L3PYfzA/wQr3a9xbvIwfEJr9yZMzUFhZI431BNFO/++XeZPHzIp/9Qcbn8khjW3Jz9gIcPXuXeW3/Cn/3lf8MNM0dEiLLe8X8hV72LKFAvg03bLUpGKEuUgDKVEDKS4UPOGaYQuDg/55OPP+Hzzz9ltbyi7ba0zhIkDBlFgo/ZyLHtiJ3Decf5esnWdjstuRCymWM2dbSMbq1C5MqBlCCEHJdFGTCizLTzFAbCxagFPYIseVyG3UnFkfeWR2xKjKaauxTFOKUOH1+DdS+7Sqed8YN48c/szAb2/pzns+GTHfD0VYbaPtNs/Gx/Pv42JtsfSm6MwNM3Mef+EFD38m+/7Ji9TwYaj2d03d0HvcSAwwh5HVfugDGu8YOsV//tx5Ovz3Deu33Er5znDrQdz5Wvzn5fBxZ+W/tWgE4JATKbGkgpUSLhR/QxBKzt8KEgMdlRhiIJkiPEvZs4Ai9SIFLWQkkp0TQ9yYAScqg5Hzp7CPje56na5XIuU2SWjZNgU9b3KqpZLr0js0d89PTe00dHHwLe5e/20ROExA1aan4A+LIRR67HFyTKsiIlTYg9KXiapsGFhHMe6xzWejpn6V3Wp7Mu0XSerstaYuAYax9Tyj5IhciW0QJQwme9rpRQwpC0xqY8qSgBJQVCa0yZXQcLIqXxtJstm82azfqSy9mM2zdvcHJ8RH24YLO64nK5wfvIfDFDSYl3luiz/XahCoSWVEUuyxUuC9mnmBcuPuaMW/Q2lzp6lzWzYsyuhMWgf6YKTMw0ZB/B+chqtc0dTYrBbWfsgDKzJpUhuXA9AYzurjG8CE691IGVHmyuk4cXsgr5nmWpopw1SOy52ChQOjsYjaW6jc1aaFoFRBRYazGmRE8yeCgEJJn1C2O2NsyagikvFuIAMISRbUjIFNWYywvF0L8FZKZcSqRB1y8hd2whsT8pDqyqPMAZfifinN8xAKWUeDfqw0EQ2aEYoOsdhVFcXi2pq5L5pOZgMePocMFiOh2AHQMoUImiVJiJpjYFEkXoHa7r8VxnXBCCmDJomgF3SUiDdoRSkBz0jmR7mr5j3TmOOclsORHz/VCKJDQgUEmg1cgEkqhk0HVm+sXo6XtB33XZkGGn5ZfLaFPMfab3jioE4gDsBpEIIWuVJSSX6w2ud7kkNmanZS0UmxAJyw1Ns6WzHhs9dr0iND2bTQfBD2YLOcsoMPRW4dsWHzPrpCg0dTHL4tKjRphRKCkppKBUcHZ1Tl0oUvCEQfsy6RKhC6S2xDTaaOfgRmnNfFpzIAxz4ZmUUMqI0Q6tRC47ENmNKsKg4TjcnwEIyUkAhZD53sUYcM5jYtYE1FWJwaCT4+DggM0qEXpHXZYcHB9Rzaasuw3OuawfJyBYi0sB12ywSiAKiescKXiS0Gy7Frlcsmk39K5HpCw/rYSEgSJeTWowhvr8jKLZopWirAym0hQ6O5QlQQZfU0REMMqgTYEuAy4mnl9eUZWaxckN6kLhhMImgXOBvmlptz1bnxBlJGmHLhXzNCX2llor5pXiYG6YFIIULX0PTkWCS7Rry/P2lInasJgdspguiG2PVJrpdEq/aqlkiS8qrtozagmvHB/DBVwsG0xVMJ3NEKZg23acd5bpRKGqrDkanM/9UgDOk5zF2yz9UFWG4CIMrL3smmoR0SGCRYhEUddMDw4ptxax3RB8T/IFLnqs7xFSEoXgYrUiOs+inqBeucV0UuOCz4tVErP5jMVkTmkMnbUI16BDh/TZ7CV4kfVU0QRp8KLM5eTWEnsHxgzjOGBjzGBtsJxfXOC3LdveYkPgarNBacOkkHTWI3XB4dEJt26usVvHOp7TNEtUtGy6xN/98tf8+C9+zHs/+D4frEv+9m//EXu5pQs9T+4suFMp7iwWFLfqrEszSCuwx/66fhB826rlj21saY9lInW+lnlhOujD6pK+awEwuuTp46ecPj3jzq07TOY1H3/4j/zsZ3/Nk0dPMT2kYFmvTzlffUwRK3wn+GRzwbPPBMv1lzjX8Ouf/DVaTLFeQV1QnRySkkZXkcXdKeW9GYezm7z/Fz9ClAbVac67Fq0ED+4/QMaGz37/MevNlGVZ86iFv/yzv+KNP7/BR3/zEzobeO3dd7nz7ptcBc+TiyWkLCx//P534a2bbD8+5eDyI9x2QXRfcO/WEfP5Ib/54Ev+8XHHez/+7/nOv/3XTN98CxIsZgVheUR5XPP87Pd8+fnf0MiGyRu3ULJE64oYQzZOkoYgFRQZ1BB+CmWC2CDCOfHit8R2zcMHr/Pw/XeR1QTkAsRt0C1RPEaaBlMqgpUIX5IL9NZADdQ5STc88zJ5LeBdQ9xeUcQWJQKT2nBw5wB5XNJdPqVpzhCmxHcJX0iYzEjuFFFGUIegFqDMMHQiWZjWARaROpSJ9LanuTxlcqTRs8VOk1hKg5L5uS6FZpT52FUe6MzIJWWt2HVj8T4wMf9lebBjwnCUQtlVAO0Fbd/OABtCrAS7iBr+Cae0H5r9p7L0xkz1N/wMDEH4CF3lkuY4MJaiiLRNSx8lqBof14S+48mXX3J2tebuG8+4c/99pDjm7GxJf/WEKvU8f/ox3oOQc+bzOdP54dAXZS5FTXIIdcb1+decp3jxpdz7Now6WmJg1CViDLkEX0tkAqWBsOM8MJJQdtsPMjxhB9ll5EOoEgpgcsD21OHPnxOenxEvLgnrBpnIpoOdwNqC1nmmxze5SGtsd4kSJbWRRG8pCoUQUMSSCRVBSPqQKGIOnp+uL3m87niuNWohSOWMKArCoOscU89mu6RtGzabQIi5RH86nYKs6dqI1oIQGly7phCSNHHgFZu1w7aWbtvREZAu0K+7rDPXZ8PCzbLBxYApKqplQe86VpsVplAcLObYxjGpag4XJxihsZ2j2aywfTfIVhW5aqisUSqDjzJppNCZ1KA11rUczBJ/9V99l/cO7vH0kz9HpoZFramsYzo9RM5uoiZzpM+VBLJUdM7m0lkEMg3xYRYRBJfLcinLHON5QMndWEtpACdcpN+sefr4ER999CGffvox2+2W1ls2XUtQEowkCknXdWyuNmyXK/p1i/U9W9diY8D1Dh/9EHNlRubIRNx3+hxLTKWQOYEZr2W9cqxyDXjFGAnB5b+pr2rJ5e+EIZYch+qL34k7N2uFEJkwMlYYkbLJoRoGkNibAyJkosM4CQwxgkyjcVn+J19ilb1cCvpyFdv4vX3Tiv1r803lsX+oVHX/N/Zff91396/RCNC9vP34foyV912m94+32JPBggGTihmD2B0DXwU6M8A3yDrEkTU+MHnltT6gkDlOk1Luqr6UFESZY//Bmwuxmx/3wFpxXTX3Te3bS1xFFpEW5JJPDxRSEYSk61pCmqKLLDS6cR1GF6hS505GGpw+IeGHyWCvAwhJZXIGTqSEkQKZMsNIa0mlDL53CDlecIdzkZgEIiZCTFSVIQ7lhi4FiJltoFRmzVnXYYnQdZx3HeL8PDurZDyGLlrURKNKhU2edqgHjl7goiD4juA93nr6PgfCznqCTZkJ6EEkjRblThg8xgGgCTGX4epIHz0iRFZSULYtZrVEKphWNUJAXdfM9RSboFt39M6iSNw+PCbVkTQt2bQlV1dXNJsVZ2TW4Gw2zWDTtGQTAnZjKScV3guaZcONssSLbsgwSKLKAFcuOZP4bcdkUjKb1XjncE2Pt5bgLDZClzTrraO1W0CgTUFRlHkyQiBkGNb/A714ZMgNAq65DDZrqESGQDYE3MC2yiwSg1CZPu+sw0dHChGDRmtBDNeZukHmMGdyU0LrzBaSAqqqpqrKPKBjZhsKoSh0TfT5eJQxzOtpppDHiBTgokeGLKLtfWKzbnFdAdoQoyGEFuchxmFQhvywMVJnd2Ji1hUYhvhQHTmwAU0uTUxpN6HuT0cv18/vTEfGCUXuLRBlHoUxZXZJDBGRoPeOpvVcXG05fXbOfDblcDFnMqsxUlHXC0pt8M5h+x6ZIlVdMl/MYdCU69ot27bBOzuAnJldWBhDsIGoFN56JslyWCielopYDfSw4IGeYiK5aBrWLkKISO/RkEushuuRJEhtUGSdt4QYxsr1Q8LZHqKnNBIbPL2zNG1DaQxaCWzXseo6UoqUZTEIgBZ0PoOZ0Ums0VgbcEFyZS0uRU4OF9y+fY8YBY+fnvLk8VPKssR5TySLeaqqylm2mIFqtWMHyxychexyiFbosiKWhk3TEKPPFvdCs2ksUa0xSlMUxaDdKEBIXAgYkTiQiZPSkFKHrgsK5VG+QYsOLz1SlXgGnTgZUCIRfUAjMarAu5gBMhRGGOqyQFiLrkoa77h0Hc+WLS54NIKZURxNpzTKwrRAXimklxQaDm7fIrVbpkpxY1JSFdDHnr5vwPVgCmRpEKWhbyJ98OMgJLmArBXSGJIZEjBaQYxZxFm4nJUMHdFqqulthJBUpsZoTYqCyXTOxdoCgvlsTlXPKOdzhEq0QmRdtOCYGYOJBZcXV6zlksPbRxzUE0RyKC+YlgXHs4LFPBG7ltVyyVWSVDdOSMz48vkTPvndJzSrlpOjE9554y3efvNNFrMKESPzyQLXS0LqOK5K6FtETNw8OMAQqSrNbL5AlSWXH3/M2brhYuM4RnJ04ybzqkbZDmM0RmnWPoHQWX9U58WuS5KkK7TRlEmgtw2Vhs1my+PzC0Q9oVOapBKaHh0NlVHMJhW6NIStBlOzbT0f/Oq3rJcrXn/jVcr5lCQM0ii8kiShIHgmWqD6DXZ9jtZT6rmmmgSS2JBSTeNh5Q1SV5hCU8aUfVqCJKiColpQ6AsqGWk3PaurBtvD3fuvY5YrLs4vcE7gUchCcWs+J3qPXV5hN89QRAoludisuGwkr8aS1+e3kfUTal2AdTy4c5fbhws2V4nNVnPsNIaIUZnNl/xeUD1ku1OMSCUH3Roxrnv+2F5oY0icdk6Iu6WoyhfLR9CmQpKNhJwNPP78MU8+e4qQkZ//8m85ffKIiT5ByUNC0bIpAp37jG69RvYC2Qaai55Ne0mlNUo5mnTOxrc4U9A+eoSpDjFR8R/+t/+FsnubG7cq7t29y/L0nF/8Hz/B+Mhfffdtvvfj76MqzRe//Dnteo2cHvPqD7/D9374I3SK6HmJvWw4fvNt9J07HLSW+m5gNp+ilEc0l3D+DNNdofSGqpLEdEB/2vP+299BP3ydv//dFTdf+yvuvPUOPUsiHeb2FHXrTayc07UGf/Q6z1e/YVu3HL9+g3JWIcsTQqyxymBlNhOLBCqpwXuwitAHmi5w45X7/A//6l9j7j5AzQqIcxATUENQoDVVOac0c7IZw5ZoL6FPhDgnyIrq8DDTR8QUhEQZS9Jbrk5XJI45vH+HzflHiG7F/MaCsqjZLD2r55bNuqJ91FKcOPTNDQe33sZvFL4TlIsFyaicgPEtpohIaVg+veCjD79Amnu896OHJEqigHKQtwDFKGzDDigJw3vNqFlnpnNO6lfyNsIAZpd4f6F37qLN8bP4VSBs/+N/zgh4KRjeZ198WyCZ28BCS3EXvaWQtZV1UQzBdxqM0/aPc5/JNrZ/Lkj3h08+r5lCXrcpRRqTeygEnkQgSkNvjqmONbJ/yqQUCKm5PD/jKQ3HRzfQs1u4vuKT357y65/+nNX2GdNpwXy+4E//5C1gC0khUgF9QGhDStfmT8oMv52yhE4GGq7VB0eodjQM2YGYYkhqpcFVcUjIjGF5kpJBFBGVRt5QLi0MEZKSe0XAGTSOSaKEQUzuIhd/gljP0O5j4lVAnnX4bsOyafjyasmzPtLrGWe949Q3dEVCy8TUB3QBLnlIBfgJSRQ42ZLCJicA26xlpjWoIst5uG0F/RwtDEZElASNZHW+4snTJUVZI6WmXRWU1YRPVx2TaYW3a7bb58xmhlfvPWQxu5UJBZcbrk6fc3LjGBdaPv7kC5QWGKNJMXJ8dMy2bdhcnaHDnMqYbGjmEyE1GK2ZViWVVri+3V1TESA6RzVdXIMLRExZUs9KpnWR5V9SgY6JO5VlwRmT6T3e+s7bg29MyglGFBkRlYADFQlplA/KlW1CZD5dFhRUg079wI7aJ47FCKFHJAdCkSy0z895+tlnfPnZpzw5e4KNnj541l3DetvQW0vb9jRNh+0csQ8kH3MlicxmOWIHko1VXmHHgBNCsfMcESJX9AzzxE7nPIrdqBbDozUnKsaqqBcBpxfAJLE/12Raya5EYAAEU0xDvKdyzDy4f4bwUlXS3vyVxD64dK1DF/cSBC/ong/H9bJ01NcBX+N34h5gx0vgXtxLfjCMzX29+BEgHPf5Mktvn9G3D7Dty1ftH+u+ocb+MccYv1GrLm8tdoDn9TGoF8A/sbuxL4KGwed+oqXKeu/A6LLrScgUcx/PIR5SiAyaxqzpL40hAnJgvKaxP8gsORFjul7Dfk37VoAueku0geizm2V0fujYWVNtrOUNRIJ3eJcoTCINTo5lWWaUOQ4XfnDD3KG5wwN5ZF6Ngd94uEVdkgjZwCBmt6g0iDElOYoyKpSKqJDRyvGBISNEo4khlx+NnSumiA0Oa20eZCkvtFywSJc7m7eeECy97weXxJzNV+TSSGFAxURhNIWPWGV3GnQu5msVVNwNcJEgyuyK2ceQy3ZtFqxPKVH2jsblDK21lhgDRkjmZkNpclB+MJ+ihWRlDL21nJ2dcXl1xZ07r3B0coxzjqv1FtH2zGczjm/dRrVbZPTXAzembEThBj01ICaPiBIRA1qJrEmjJV2U+D5iU2al5QGSnUN9GM5xyB4IkUvdxG6Q5WljdG0dB+04uEbRxv1BmU0l4nVmgoA2Jk+U+noSzEzO3HW11oSgkCnrwaWUHVGzTIAatAMEWhaYQmXnXWOyQ6nzuIENNPisYrPwHFZFknQsu45V0w6lfmropxlVTwN4Q1KM7qNyXPTt9J2yg9o4dexnQHasu70J4eUFY2F0ZntGhuxSZgVKKTNbarwWIWshLlcbNqsN52fnFGWBLBTT2ZT5ZEpRFGiRWZO271kLwayu8vU0mpmeD+kt8N7Sd9l4IHmPcIYUoJAglCAM5adFUVBoNYhaR7oQaHtH33S4pkXIIju5Ssl0OkVriXcu043l4KoT45Axun6giJT1U5TODsDWOtquo9IKQtatkFIwqWqU1llDKwSarqN1idB5Qox03rPtHW3fE8V5doGWmu22IRFxMU+0QuVsmiwLghBYb3HBIcR4jSUiZUFtUsD3jpXr2E1WUuGCoHOOTbdk3fXM6hodAwZB9BFrXS5lDD0phhykHVVMqprJdEW5tGgdCdITqQlh0PAbCgXyIB6ySrlAGTFkd0IcjHaCp0mRzsfMSJagSBhAkw18wlAOoFNmuJZVgSYxVxnIU4QsMBw8KiWk1lR1TTmpkVud3dLI7mbEROgtTdOwaRuENkiTx6UY9B9jCsiYdqzInAkUqCSpi5JZPaMqG2wIWUdFGUxRUBaC6bxGlyXC96iQKHXFYnZMG5ecLa/Aew5UBuaF93TNkvl0wmI6oSxrVJQ82za01hJSRdQ1elYgJ3M+enTKl8/Oee3hPd5++JBpaXKZnzDUdU3Z9sSoUVpRkKhqzWxS08eIGPqoMYrVekXXO47mU47rKs+PnUUnjU0ys3KHZ1ZICevydfE2g8DzSjMpjimnM1rnObu4YrNZc/PYUMiYM6LRkSTouka4iJYeJSVCGZASL6AnIgYm3aKM2c27b5HRoYNlu2lZ90tO5C3mxQGakoQhYDJJ2TvwAeFTdm9G4WwguLQLNlLS+Oip6gPeOHkFrT5ju9miVUk2rMiar7dvnuDaJVrWxNDwed+x2no+vVjxyvMrRFHz3fff5+btu/zghz9kUhp837BdW/rOU5gCkQLOWoQsUFoTvCf0PcoY1B+wpv9jgzHlPqaOxtB2XAzm9ejwvEo5sJ3N5lR1zW9//RuePTvl9NkpF1eO4BrmiwOauOJsdUHsEzIIFmaC6CJSRO7Mb2Gbns1Fh5mXHCxmrGxHFIqyWPD8bMu/++t/z/nnP+HV125ycXXFky+e0Z6tOVSe88ewOj3i8JW73L57n83FmtNtR1hd8Ku/+2uOZjV3b7+BOnqIOKwRxlFjsp5iOkeoDdFd0F0+Y3l+zidffMCv/uEDXr13n++/8x7dag0WTg4nRDyyNJRSgNQEDJYKmHEwf8C9O+/hyzWLW7do7ASXPNYHtNIEIl4EDDmeiuREX9x2XH1xzmefnDO9UXPz5BhRVqSoSNJkAxShSL4kxAkpCWScgtVsLzd8+ckHXK22HB094P5b7wOaGDQJlRN9HgQVs4P7pCTQk4qpMDTbT3n06Smb9Zq6uMmte39OPX8Nu+xYtZ9D3xG2Pf2qoe8VStfoacAoBSaDb+5ixebZGhrJ9OYxVXVIEhVmdJZAAgox6uamvT6GIIlR+ywDeXmtMmrsjr1tv30dHez//fZ1DI9vev+1LQEhJ/6kVgOjXZFGg41/qfr7b7hc4ykolYXHx9BcUA5gCNkIwEa2tkCJGfNizqwSJKnZNkuunn+JbVfIWmA7zfqy5+p8zbrdEGJJUUiEtAiKvAaRJjNL0nXi2PUdLmbN8oy2ZA04uXf/X9Bnzkd/jXQwghTDOoc9QGFITA/lLdcxA+AHRgtkQE+Q1zlSDPS7coE+fpdZ9RCK1wjtz7h4/ku67RO2m47VOrAJkc70dCESZECpSJUyiUMJQeM9WoCkJEZJoAUcUiW0FhRKY2JAi4gQBYWYYOQElQQi5QLPYANd09Nutjlm0gLrFU1fcnXWUCwLpOgoTGQ+P+bw6JhCTWlXazSR+7dvcP/+KzTNhstzCx5MKbLMnon4kOiEI8UWQsTIkMkPEpyzdF1DSlCWJdP5AmUM1uYY7uTkZHC6HMq+iUynU6ZVhW96fCxJoqQkYuhJwhKVISaV50DFkKgX+b6r8Y/Z0MOLhCRQjkisSDAYzYDMrvcDiQEShbKgeog9eEHsBV9++gW/+uADPv3icy5Wl7TOsulbNk2WeHI+4HpPcAlCjuVy0BVzsl2EHYMpMMZoY/w56iKPfVJeYxF7Q++bZqwcl14P1DHW3gFUUn/jHq61zuB6/hw11+I4Sr6y9ahrtp9kSLv97SU+xHW5574+29edwzeVnn4dK3D/974J7NuBcGN+LGUAACAASURBVC/t64XPXnr/Mqvu6/7+Tcfzdcc9Xof0rc+br5b0Xs9V19uJUVR+YM59ZY9D/nPclSRXa+a4cQAWd0lSMiC7d4zf1L51pZvrtCPeXwMyKeRc7AiyGK0pdEYJEYIUBvdPnbcXZGvjETHOaGwOvIPLQrS7DMggBDpykTy5HCcQBi24fE5yl8HLE/sgccXgwUAacNNkVC7JHWqNldGEELDO4WJAmSIDhiRc8AhrB4DOEaNHKzUsYlO2S9aCImWB1BAF1kdMTJhCYbzOelzJ7/S4dvRLMugRQha5D8ETnGPrs2hlo1qarkUptduu1JLLq8SkLllMFUVdsjgsMVXF5eUlV8slqes5OzsjpJhdJE2edOTggldNatRgDJFLy7LOUHQ+l31qjR2OlRDRQqJF1m5SUVDpChGgcIbRlTWEQPAhA5y7+5Z24NnYQgj5nsf0wsQghEAPHTMMYBwDs3Lf5EEylEUrvRMiGrMeZkDSlVLE/IRACJENOrwftr12nyqMwhRFXkCQiCH36b7vkcMADVLgfaAbkfsYuWxbttZlN1/YacgBOXs6OBUzlGiK4fyElGP+NS9N9heKL42x/fe7a5SGbVNCpGF0xMFYgSGXLckgeSJrxJkCIwoKrSkKjR4YaGm4FyEEisIAYK3FWsvl2fPBGENT1zWz+YRyUpNSjTFDNirGXLIXAsnk0hs1GBM477G9G+YJz2h7LqVEmwIbwfYWKRUHBweURQYHrbWUZbkDfOMe3TgDwJlxWShDDBHnLNb2GJEdKI3MpbJHh4eD0zCIvqfxnk3XodSGMbEfYqBrO/ruGavVGqUMQpg9MDkzXkdLbGM0o1ttEglpsjGGIKGMRIkSb3uapkEpyXQ6B8jMO+dotx3rbQeLyNwYqqJEkPUxYkokJfESQiUR8wJdlehpgTQtmaGZgz7v8viRvJjpz0xEMTjoZhA8s0kH0wPrCTHXhUildjqRUo76U3lhND7oSmOohGCqMmAmZcCnbPqjhMCYIi/YZjOKVZn7fEpIIYkx0HYdLJeY2ZRyNh3GSdZfTCEvmCQgYjaaQTMAj5Kbhyd4WdLayHKzodaGQkoKISi1Qomc5Q3OIkLu67PZFG80F+0FV+sVSVoOxBRVZK5uTFnUN7Oo84InhexqN5YrZ8c3WLcNn3z+OX3X8vDeK5wcLBBlxIoVjkTTrIloai04mJZURpK6Hh16jLfY9RVBlriiI/Udcj6jOJxRqJIu5VIYFwJFDFkuIgiCizlRGkAlSWVKiqJCVzWtdTTrLa7tkDFhhESEfN2M1EwqgdCglWNqDJPJBKU1kTRILljavsNWjtIU+EGfoygq+stLrq4uQSXK2lDrw+H6CGKIpJCdoAWDAxUJpEQXJjMklB/m58xiK8uSo6NjYoh0bcIoSfSe+WLOnfQKKfQszx/RbTpm9RxrWx4/espvfv0hUk94++03ee2Nt/jRj97l6OCQdrNBCktZCCAzKnVhhtmO3JdV1njdCTpL+S1Lmz+2sX0TTDJ+EkIkeZhNZ9y8ecIvftbw+aOndFER6rvcfO1NXn/tNZ58+lP65Ye0SbB2kanS3J4eEO0l7fqS4CJCVnRuxp2D17g5F1jXUR2+xfPLSH/2K4iG1Am+/OiUzz4752h+wvzmnHsP71CpKY9+94yz0yuiC5yefoETP6E+ljx4/02mBwf01hPOP+fe6ydcnDZ8+cEn3Lxjuf8WpD5hipobr0woP5E8ac8x8ZiH2lG3K165X3P7zSnBfUD7xYTJ3ZsgEqqoqClJTnKLGd8/eovTqy/oVhXxzn3KyqFmEika6thQ+iIzSISDfgObHn+5od1oLlaavoJkJOvmORdX5xwc3WNxcB+lNCkeE4LIQWWcU8gjiknFw4e3eFVKklZU8wU7DpEAUMRQEOSM8mgOTiKEpi80n3z4OV8+3nJ0dMzrb3+PyYMfItKUshYcNEe4sgUxRxlBpQKqUggTh4WbB9uhleTk5D7G9aj5Q2ACnUWWGiFMBkjIDKlro4IRahnfy2uW5iAPwriafykw3A+A/z/fhEBqBeTgout7njx6Qj2b8srd2//5f/8PYZliZNZA4kUjBREHXeEoiEGitSEEycXFmuW64enzz3Diind+sEHNoesj221Ps+lpe089UYSYjcCMFqAyOBmdxfuYNaydZdNsKKqC4+MT6kleAwhBRrl26ekM2ok09JM9UGP3RgiEGIrChmy2D+NafgAkhoSDYHR2HfbLGIvkBCCAMIrq5hHSHoE8xl8JTj+54KOLZ1wuI62vaIPFdRblPPM+YEJCBQnJEKMbpHpA+ERIHhcs4JhojTGaspLUJObK442iKhSFzuYWiITCEHwiBE+mHfQgIlFCig0rt6KSFdO6oD6YcXhyzGS+QLgJynTUUziY1xyclHT+AmV6JtOKo5MaJQUHhzWLWDCdC8qqpCprvJtTljWmrOj6hDZVjh20Yj47QBlN3zvCUMWUUsJomdf1ZO143/Vs+i02lUQ5BVPjhcaSQQMhMjFHpZDDrDDGaNlMzkeBjwldCGTS17c6Df1iYIhlH7REEh5BT5/WlMKT0pbzZ5f87f/+If/xP/ySv/v7/8jziwtssGy7ltY2uWdJMeipJkiZsCOlzqxwAiG6DFRxDVS9uJ7efzF+NrzeG4DfBtCNJ/dyeeXXlXTub7MDn8T1d74pcbD7lT122Qv73w18wS5Q3ENTXt73jpU3MOL+UMLi684JrrXdxvP5Cvj28nm89J2Xpa6+jvG83/Yxhn/qcX8lfto7j3+KUdCuS+Q9fnX/e//vziGNv7WP3OXnZ85txOudf8sc/+0Muj3q4DXbKZekSiFwthtYVBO00iSVdat8b0lOYIN94UT2L0q+aHkREgcKqRCRKK7Rxs5m6+QUxU6KLLOUcnmfkplVpcRA7BYRLbLLY5KCECRJDy51eqA0KokJJUWKSJG10PYpm9duLgwMwMwwiClf7BCzjo9PkFKfaY1k59loMrU3M0cGIFPmACMEPZR3Rvq+3zHFIAMo1trdNdZa4wUsV5tc9ukTEzdhOpsymU5BZHZR27ZsNhuaruXk5IRbt25RFBl03KzX1NMKIUBpg5ICFcKu05dFsQNvrLP5QUwgigG8colYTPHxuhxADPeuUJpkEsL76w7Ji4NhR4tllxR7AYRJKQ26fezKO81Qsjh0CpTIlPckRrQ+7iaE0VXqOoORwQprLcSE1p6qqIfBkvC2z/9iyGYZLoOwQmSdKDkEe2LQgnMxsO57bIgZWGAAkIW4Pp8deDJSV8eZlmHBlHbf/adMJNdvMu7PCFylcRGSqddGK6rCDNncgJGSWV1z4+iQ46MDZtMaXRiKSZ0n7qHsOtNxIXqP63uuLs5z6avtWG7WbLuWsjIZsPSJUpcIJUnCEWxPGzxRZd1CKQV939P1HW3vcCHT15WW2VBBGNptR0oZuPPek01aw25eiWkwlwh+B7hmY5I8j6SUwdTo8+JYD46wpVYYqVhfLWm7Jmu4Skm0FpUSk6rC1CUBT9/VNLqn7y3etQjVUxYT6nqSQ3+pSCS888gYCCkO/YEMOgYzpGfFYEJR4l1PCBlIn8/n+XkoJFIZ+s7RNi2264lRIHWFigMIIgSpyO6isZSkSkEpEYVEqnHMKFTMWWoxgGxK5fKPPE/B+ODNmm6ZeVYXJrtRe4dM2eY90+Q94xMg99+xk+WFDRG0UhTGUKicOKm0YVLVFDonKUpjqMuKuijRepyzI8RIiIG43WKWS6aA98MDN+b5PSWBEjovzH3Mrlchh3y3bt6kODih85Inp8/QBAohwDuSTQTlsb1gMrClt82KK9/AjQmHR8d4UyE3lq7tmeiKoqoJCZbrLVJHnJAcz+dUZcVmvWE6qbF2xdXyksPDIwSGs+WKq+WKy9WKV+/fY1aVtC7QJ4EPWdt0Oplw83BGPa2oW8X94wMmZcmX50sa6xFIbIxcxYCSUBtJE8FGcN4TfBzo9DI/sKPYuZR570kyoVLWK53qAqt6hA/ImF2xFYLClAiRCHhUFEwmE+qqHhbYA6C/0/nz+JiZksJo6umE+cyz6huW55eY0nCrXCAJiBSJKbszimxrTFRZ31DpnIBzfU4kaSWp64qu7Xj8+DFd29I0DRdnzzFGs5jP0abMuTghmdUzbsxn3L39gC+fPOfzx+f84ud/z9HNV3jnvYrz81NOn92i71s++OXP+eR3v+Y7777Oj//Nv+Lo6BgRZWawh5iZc0NJgo9x0PH7Y/tDTbz0Or30YRrkQoQUmEpz594d3nn/PUw1o6hO2DhDb2pilc0H3n/3Twml5PHVlubRmrI85NbNt7j75jHrvuPnv/yIs03izBr8l2fcu33I9777A548a/np41/xs7//Db/5paOzHu8KbAEpFQgxZbkKXJz1/PbDJ/TrLWF7AfGS6Z0cRDarGZv1hFL3TN3bNM8t699/xg1TsKk2LJc9k/INDm++xdGD1zk4+UcuneWTs1NeX9S8cmvBsnnOoy8+I+jAu+JHsDhAzBTIErxEmZpb997g6rN/4N/99U+4PPyQ73//Lgt1ExPXHN6YIYuTvF7tHavHzzh7es50csDkxj2OH7yBOJzSmZQZ2tKjpEMqB2h0ccRiMSNUntLk8lVdTNCLOkvQIQatfJHlRof7pEqFFCWiqEEYCJpyIrl174eUi3u88tpD5jfukmxJ6EFXU4r6AYWKoA1lEUH0YLYktuSkZgATEZMScZWyBms6h26DWJTXuMnYX3Z1YOzWOyODaTjya5CO6/F5ze7Y63riDwdG/+Vbfl7GlOiblu224fLiip//4hfcuXuPk5MTinIPfBDX210zYv4TZqn9S/Y1u9mVgw2VQd4H5GB0lg29ABwkRbPpubJXhIsvWV085uzsgvPll8xuKC6u1kxvj34MWXNQ64KYFGcXS67WG27Wx8Qo6NotZ8+XbNZbVqsV62aD9547r9zi8PAwrzmGY8u561FXTHwlGM1r+pT1d8mJfvaIJWlILMKLwvZjtYAqzFAQmNe1ItOXhg6X13ZRCaSGWBi2esGzMOWLRrNqNKBJyePcFnyWA9ExVxdIoSH6rDecUT/64GhFj5RZ9kVoRVko5kISdKRTgkJJEIO2s8zgqBCGyWTB8VFClyB0QJeCFAU6FsxmC+bTmumkoqomhKBQomAyO+DVV1+lMLA4nBOTRarIwfGC6XSCDyEbMWpNc3JMWZaUZUXwAWNKhDJYl+V5nAtoo6mns2yupxxRQKE03nsQOXG4I1oIsq5bAv9/s/deT5Ikd57fx1WIVFXZJVqNagwGwN4Ct7d7t5Jnxru9N/65fCGNZnw4mi2PXCOwUIPB6BbTqnTKEK7uwT2yqnu6Bziu4JJcN6uu6qrMDOXh4f79fYXWOFXQC0WfKQIaCFHSk0grDHJBGTJBAoQSuJBIbSFEEtQfspQz3R/eedZ9SzUeIaLifNFwdf6Ul88f8/lvvuZ/+h//C988SmxuHz3eB5p+ixRQj5NiQUIKqIwkMC64REjJ0sNXbiMx+GJfB/bFyDVAN4DHN5Zy3ADFdn2ba0XYcIO+zkR7nQF282+ve6a9fl/f/Iyb778JML0C0H3rU15tr4OCw/8HBtxb16dCvCJbvbn/bzrW178GPOW7QLw3Hevw99e38/dpN3GKm9t5G5B68z3pj9yofqR/BhxHDN9lvK4UkDvfjb61Uza/jYn3WvtOgM7HxL6SErSRKC3ISdApxUkk7XT0IYcEZFN8mdgmnevTruSFuLMDUyZFCJuiTIuLcAN4ydRTodIAgcwL9+FEBY+IPps3xjQYRk8MFhE9QoT8GSm2PvlcpYWLiyGFH4jkIeXdFiFFqjDFAc0WCCVzkl32VrtBdky68zQNKYxEx7SgcTExCxKwkAerEF9JF4kx+fIVWlFoxbbtUhhATNR0n58vwaVObW2P9wHrww4E2Z/vMZpMKKqay8tzwjLirKXdNiwWV1RlmZISleRyuWJUFtQjMCRJoRKCUVFCWaGkpPOOrm1pZJuYksHT+8C27+galwzEuQZPhJRonUCDJh9XGDqneFUHngZ7satoRZGrHTExlERmcAgZ0UaiVZri+eDxzmH99Q2S+k+ffe3YhQ9opSiKBOo4715B5KNKP/uQvOYGaa71aaAPuUtpcU3Hjy7JRa13WFIq4/XFH6j6CRR86xJxmNgKsfPSGO5XIQZp942Xx2sgD0DINPE1RqFCssAVGTyRSlFXFaMct+69RSPYm005PjpI4SFlScCDKUCKJPPuk+m/FIIiAzJ7synWWTbrNYvlFZvNhs2mSYmU1jGZzKiqEUYa0Imd1Ge5pvcxTQSzNwGDP0DMj948sTJaIghsN2u8S0ElpU6yZGdtZveF64TPvBAPRJqu303IonNE59FlQWUK6tJw1Ta0mzVt1+cQDpBGIWOJIgUQlKZIDN8QieFVD4MQQpKu5P9760AkLgBS7ph9SZYfUqJXLJHZ59JIg5Y6pwQbKmWoa0vb9rje4XCocUrcSkm3icHViEArBNMCVKUwRqFVAh2j0ihZoVWJ0iYxInVychkAusHjUufK8tAXQx6/jDEYI5Eyp1EN7GXy+JJZrQnZTAw3GSIixJSGKzV1ITBqu5stKyUxhUHrtE3vfOJU5Ad417XIpkgS84GthyJ4idAJmHKDTMxFRBTUpkDVJUfzFc16TbQdRkpwAaFdklYqqIxBesVmveLr52e4q4rx0T4P7t1nNFZ0Z0sCDusjvQcRIhWCcVlBVaAEHO7PiPmcnV8uOLu4oGs7RnWNrCqenp6zahoevPcud44OmcwPuHxxQr/ZUpeKUakZFxITDe/emrGfpeOPTi653K4R1RjnJScX5ygpE1gqFNZmj6BSEmM6H4k5DMFFbGcRKOo6UkhFbUo60yMy+zgx4ErKoiREi8chhKQsClRhCFLggkN6KKVJoSdaJWk8aegyRcHx0REuOJ6fPuPF428oRnNir1EhdwoJUgukAS8zizMnnjmfmMnEyHg8QmvB5eUlTdNS1yMmsz2a7SaNTaMxQmiE0Ny9fZdbo5LeeoSsOL1YcXb2kiAkvf0ekZ7T02cIcYvptMIYuDh/yXZxwXwyStwIVSBNkZ+niUI/FHF+V9Hj/+/tJiD3xmdVBh+UAp2fxYfHB/zVv/8LfvLjLbPpFOcqPj9p+fLqBHdLIbb38NMx982Yhz9/yLQTvP8nH/DOH9+mcRuu/stP2TvbIoPh4S9/zsn5C37107/h/OUZ/fKc2fSQtV1hQ0OlI7fnFR/ce5fTk56ff/JrVr1j+XKB3nimccS0nDFqNd3jwNePvmK1CBzujeiOv4eJEyZ2j/4cfv3kEZ99/hUj/ZIfvLfANg63hLP1FUf7R9yqpnx92vLpl1/jlWIpPuHl1Tf8+E//kunhHsHNKGYVog6IvTH93h2enP/vfPXlp9ypf4SbCbzvYdIh+jXxck236lheOVbbMWq6hzKO6feO2X/vLkxrxnHMdDJFyzHESIgugR+mQuuQEt+9Q1QFmAxkFAFkTxBNYq4FQwxrlGkQWiTplzGgFEpMuf3hTziOLbI2EBVBS9RUEWVO2h4SMjNgEUWPCxdYv8GIiAmCfnXO6uVDrLUoGXDrPczkPriKaAYlwjCHS50q5kWI2EkUhy51wxeSYSF+k0l1A5jbFZv+ObZ0oNZ6Tl684MWLl+zvzamqEdPJjLKokEK9Bsxdv+/6OP+hj28Y1dPPIYRkcxISf05LkYHUCCiIkr53vDg55cXzR/Snn9CuzrHWY6NDNoGuTzZEQqW+aYoKHUtiDLy8uODscsH+QaDdbnj69JSnT1+yXK1omgbnLGVRcHT7EKVTcTedkgzI3VwSCIjCM/QHQUAPRFEhcz+V7NKWZAJ6AlwHemTQz0iVCrta5WRiGHKGdwXMAM5HtNBQJsCvdZIuFGxjgfVblPRs2y1GRoqiJChFcs7TyC6i87wviEAberaxRxeOVmqMTMqFsTJgJJuYjs17RxACoSU+BIwquT2fsFffSmF1wlNWGlCEeUlZ1uhSgk6KjeAVQhiKsuDwWNJtVxg15uCgZDY7ohqVxBDoXU+MgaKukHpCURiiVHRui7US4SLalCnsT0BUyQfZWkvvHaYsiCktLV2vG8CFMQZTGXpp6U3Ea0HrI62XSbmTz7ekzBjEkI7p8HiQOoG9okcIj5YeucvaFUQXcV6yaeG3Xzzi2ck5T56/4Oc//zkPv/6K8/NTzk5OqNQI11t8OU7rmNggTYVRCqMNbdsmb++YrrzLc/YBUyhLef0MFAKlcvjFDYDsVUbvqz7hrzOj3s6Ku37N9Wt542u/q30nQHTjNd8F0L2NhfYt5h7XHnVvet+b2k7hlr/exgx80+/etC+vh1G8/vqbx/z6fv/ukJ9X9/smOHlzDXhzG9/++QaQKl675lwXn8JgfZPc9ncegnnQY4fYk8FOBmD4u/vFd0tcnc3BDjdPavL7KpQG79mu1xA7hIjoskAUmig1ZWV2j6gBNVRC4OK1sWJwOVohLxqJibIspSQ6kEWZQJidVFQk8Cs4YvBkgWGS34VEGwykSkBEIJRGxmQi6mLAWZdkeZmJ5YjYGBBR7NJQBhBExOuHixTZJw+FkwEvIypCadTu4eFCMgntvbtGvF16eAqRwCkhNFEkFpAxJr9PIzJwFHwaYMifZWJECI/D0vuY6kRCMBlPMIVif/8WQkjWyyVt2+LPHVVVMZ1OMZMxffQob6EVdNloPmTWm84pNsRhYS6hMAg0RkgKoYm9x4cs6SUBFSJ7/MXMogyp9+ZOnKoiw+DxOjovMrA13Ni6LF7p8DYDNl3f46xFxEG3nVl33n3rBks+dCaBVfGa8ekzO1JkANgHjwuZ2ZjBr23XJtA0Zhg2JrDZu5yOo9L2oxiqctngM1dLZJb7pYmq2HnQCZkIrDqbj75yDjKI9Uq1IKTko1eqCzKiBDuvvkH+q1Riz9VVgTEFImi0Uswm4xySIQg+AYwCgSoMRie2nQw++9ellCzb95RVyWg0YrI3Y7tds91uaZqGtu2BJA2eTMcQDKW3xL5HysSMG8ZPLwRRyDyND3kCnwNfTAJ02rYhBM94PKbM1302m6XrvWM03mCVxkCpTbovnKdtWyqlqAuFGY0Z1yOq4+SzcXl1RdM19M5ClNhGE4Kj3pul/o1I12KYDcYsv/eBUpSYosAnGDpf44B3gbJIoSiJLdzTdRYlG4gR7yKmkEm6SszXSTMeTei6lIjpQkAqjZIaEQQi+/jE6PNkyAEeGT0ypFJjDAIfBZ1nd72VSp5zafIxGEFHlDEIPNYF2tAlqW4MFKOK0jmUcvlh4Hf99OaCIT0cszw7kuXoCWiXubjgY5K4hBBRMntHZmn3AJL7SAJb+57oMwNRJNsY2wei1DgL1noKk5iwiOS3KXSF1iabzaqc/Jvk20aLDNoPLbJYXPDsvEG9rGmvNnx07z0mZYlrLNumZzQeYYrEQqmrklXbomLkYDZhs94wHY8oyorzyyXbpqV1DhnSA3XrApfbjqmDw/099g4CW3Ge/GWCJ/QdJnj2q5JJrbhzdJtxVfHZ4+esbI/vSJJ7qYgRRkWN84Gmtwgp8EXEEUl+bQHrPD6A0ZrSlLS9S8UuRCoaxCSjN1JjlMGKiBceoyNGl6lf5XHTuoYQPfPZXir25IVC13WUzjEd73Nrb5+L8xds1mu2iwXYWfI4jD71Q0UOo0ny7rQaSvtntGbtPbWUyc8yg2R3797lnXfe4cmTJ/lZGanHUyaTPW7tlZShp1lvmY1q3r1/F8cLnNtyefUSlOfJ06/44z/6CX/153/KH/7wAU++/oLl1YL53px6Nmaz2dJsG6qqYjQe73wpvR/Sz/7fwMT5f64JgJiZTeLbcMGO6SJECvUScOt4zvxwH+kE0cImRPz4Pv18wtefO85aTzE/4vt/9h5zatpyy//yi9/i7QK8YP/4mPfvv49fXfK3v/0V65NnFDIwrhNTOHoYjSo+vHeX//Dv/pQfP/iIX/7dr7h6+IyXlxfEbc++qLncBBbrwN16yq1pTSVBlmtC4zn5+oL5rSNkWeOiAO5Qlg1nL5esTn/JdDynKGfY1SkXFyuW+wesLq9wpWHvcE4bOr7++Gtmt+/y44OfEO0qsad1RycumLx3yF/8h3/P3he/5M7+nKPZAVHNELXAXrzk/NFXtLZk7/j73HrvmF56Pv/yNwRxwbuTHyBFDRiMSimQ3jm879DaIShTsUgCsgfWtJsVV2dXrLcdsgrce++QstoD4Ql+RQiLxCD1fZrshyRzRZssQ0xzTBcDdakRpHmeJRA9qKhQSEAjRInRARU68Fui21CUgun9A7aNotmcY/o5VEXuQa9SSjJ/BBimyDL5s+6AvBu0lRvzn5j72fVi+MYL/hm2tm358rPP+dlPf8bJyRl//Z/+mh//5IPEmhcipYzCNUi3A+v+sY5pADyvATpE8vMSIs3rRExF8HZjqWQgdi3OWbbblvPzK+z5imh7pDKosqIo8vMKcWOemRbNQQjWm47nJ6fU4xcsFw2PH7/g7PQiPdOAotBUOvkYD+BcYhfBtRya3CUG4DKzC0UWrEZS8WVYC4hkZSAyI1zkuUuz2bBaLCkKzf58jlIir/nS+67TLtM5ErKnlB76gJAFo1uGWRlYX77k4uI59Z6iF5Yr21AZyayoCWi0qqko8SIS+h4hBB2eje/Z4tExsI4B5R0SgxKaUqnk/es9XejxGEQunimlmOwfIUWFFAlErMqSGBWF2UtkAWURRUTpSEDSdSUxr7+3TUPf9zm11tNZiyk19WhE223xIWJDqoxH79m06fqaQmJMWh8I5/ExpoK9SvYRWuv0uTGFNKb03KxOEjL5cwqNlyV9VPQ2Yi28wjqISSyh8nQhEnLBGBARiyVES+86sB2h7ek2TfLE3Vj+5v/8Bb/67Vd8+vAJL86uuFxvITvg9AAAIABJREFUGU2m3Ln7gNsf/AFnL15iu47Nck2zXkIuhgfnWK27DM5lkETkOW4mOEgZkyVdZrwN893XFV+vQiTDPHnol/G18Yodqw6uJZ47P9dvvzKfqjf7vP2+yqrXAao3b+XV90Aq5r8JOHv9dW9rQwjETcbZsCaVObTrd+370N7EmrsJ0L2NTfgmae0/BKvud7HnhtfsznAe36/pDt9uu5E5A3qvPD8jO+LKMOdC/j0AurZJvmnegbVpwj9M2MejEtF19H2L65P5JUYSc0KLqQz7kwpk1uhmUY+UKpmuK4P3AUG6aZROi84EpgSCDxgBIkAQmihEkr8OqGMYbsi487/zUWZABoRPCTrJd20gQUMyuo04n1OAdjdw+tzk6yWQKAp5I15GpgW2zvvofALlohTEKJE+Lyzd9YWW6vqmCgKUTImlhZa7jm+9p7WWtm3po0uVgJjYdEYqrE8hFkIoNk1PCCva3jGqSw5u7TMeT4jes16vsV2fpaHpIVXVBV0G/5SQ6LwQlxFC9LTWEQVY73HRE4RM3ghSIQsYFxUhJP8s531iU8SQvfQCN2/NNPFKgJUnJtD0tZtrmJzt5IxCEggZBHC7rwGok1HumD6vDIIxZpNFCM7T3zDDJMRdRSgEj5ISbRQyypROGEIybxQC2evkGRgSeAkJLINU39vJqDJyPgzM6TDErnL0ppVPHnKuK4lkgHL3nhsDZn67uHFuhIxZSp6Yajuimsi/EwIZA1qkqmlwlma7ReCpTAFK0q7XSJ3YWUTQZH8JmUCZ0hQYrREyYsqCuq6oxy2r1Qq13hCjoKoq6tKAj5igaVBItUGKxMjzPoG4g+mqEMP5T+CNiOAGKXOMlFoxGo0QIoXIeO/Zbrcsl0uapsE6zyCLLcsqPZhiuj626/F9QQwJBEJISmOY1BVagisKqqpiNt/HVCXb7LEhIFXfuI7GVjGmB7jWiZmZJ4jaJCmijz7JK4ZxNkb6zhK8R5Ikq15GmqbL1yux3MqioK5rts7T9D02BEIQKawERaVKSiEotSH2jih7RGcRnQXrcM7QSmhCYqZKbjrLDOOUyEDfUOCIeA8hOJTWKGMg2uQ7l7uflGnyteuvw5Mm5qpzPs8JYE0p0fgh4OVVZqyUAiN1HkcSgzc2LcoUGQxIE5e29wgPtRYYEemcpwgRR9hNXrUEaSSo3Vpjtw0f+rTQjal4MxpXjMY13ckV63bD+nwBjefB8R1McPhoQBgSZiOQ3qJySEkMYGKgVIrRaEpZjhBScXJ6xmZ9xv7+nNFozIuzSy4vlxzszbi9N0UiKLShqEfUhUTFlGqudYEwJQSH7XueXS5ZuYBXGgtsmx4tNV326lRK4YFC5/tCgNQmgXC6RGpNaBNDmHy9CIAnyZXyA70oCqooEpvRaITStNZysTpHXKUJ+cTUjIUm9B1t31NZi9Seuig5vnXApquotUFbkDEQfE8MPSE6iJ4oJOBzOFCacFdViRDsxtSyLGmaFgBtCkxRslqt6dotm7bDhsB4PGJejpA4TFewtbdYtRuevDzl449/Tj2ZMJ3O+OC9e1jbMZ9OOTq6TW0MpppydbHg448/Y7ne8MMf/pB3338fFVNf/Rdg7vdoNxbIaQzjW6SlYTgYirwIECpLy1RiUyoiBwJcNWN18ADKQDya05xs+ekvP+HZyVOW3Rn7oeUPDm/xJ3/+gPe+/x73+AsO7YJnTx4xHpX49pyTp7+l3b6k2wjOS8mnDx/z+OFTPvvNx6xWq1R86Hsebc9Zd4pifIvlds37reR4XnLwwR5Xi4b/7dNfMzucU0wU+8WM+eyAg3cFZ5tf8/jkjHlpqI8OKNoN3zw9YT6fce+9Me88OOZyueHpowuaLrBeblDlCrXnQG5AWHrZ0IpLitoz1gXN5ZblYsOt945Bb6FeIm/3lMEwuV+iJyPc2SX26pyr00ecTt7lzoMPMJMDhJYgO7w7pXNnydeRA7D72Ux5BZxQTlYcGo153rPYbOgXUGlD1IK+W9JcfcVkXFOoGTEkHyxRzkAlsDyI5MEpNbjYE2OPVjqN6ypJ2REa4iFK7AEtyEswL5GVRs8mmOKAShcwmkNZgda5YPXGLvVKG+bXQwpn6maRgSkVb7Incl/85wzOATjnOT+75JtvnrNYLNP8Xyn25rMEHL1pcb0D6f4x2zVAN0hAh6Jv31uePnzB40dPmJbwowd3iIRskTGQFkqSSa/FFEUeA2JSQan0bNKmoCwlbWd58vQl1peslg1nZwu6bvC9VciQFDbEuGMvwXDtHdcMEUFabkaI18vOmMEVGQM5QhxETwyO0AVWPVxcLGm3a148f8Hzp99w9+4d/ujf/BHzgzlC3fB/3n1ggK5DhAVUKzg7Qaw02ra8M15xf9LStx2x0Cxjy8K3dNpkz/FUaC2kpu8UThXEKOiATYxsMr/OBMB7qpjsPmyf2HrOB2xMlmxOeKIIRBEpRiPG9TFK1yhVYcoaaxUxFKgYMKVnNAWpOzbrDfZyS9dtsf2KTXdFyEFwMXqKsqAsDdOwT9f32OBYLDfowuCBzXbLZLbH3ByggsVIsMFjvcPLRDIIpP33MdlF7bRB+T4NIeAs+DjFxQ3WlvSdx/kOwWQ3Z+ziiiKCoEIInVilCLqtYNs5rtrIxeWSk6ePePrwS54//pLnTx7y4sUzrlYLLtdrlr1l1Xd4U3B47y7vf/+Ig9sHKFFTzCYsL9Y49RwH6LJGWIvrNvRNWgOF0KfxhbBbfwRSsF/02btbpDXKsAYdWE8qpQveuKeGH2+whuO3wbdX1rRv+P31z6+CdMP33yVtff33b9re7942O+LJ62y0oQ3zqLcx7d4kMZXiem0sb35u+vBXajPDGvm7tn9TYfj6cf4ucO73UVG8CQD8vd/3Srd4HXBMvvQotfOdv7FKy8v+wfbhGthLIHHCK+JrqsPX23cCdF2XJWgOrI30ziJDpKgUdVlw7+5t6kLgbU/bbwgCOp/8RfCe1eIygyopUlkJhVAapQqkNskjTKuUypgDEnrXY/sA0RFcn+KiUzRM6ixCooTIcsnsqSYhxpT2swMuPUidZGshJJ8Go0vQIVUbcoUxnbA0sZAy+5GJZJCuVV6EDjiKUMmjTOT44BgQA/tKipRelTOjJQLNNcruA0kSqBNACSDlJKWDtU0aMNJTHx8FhIBSBm8twUWkDMlbyLnEBAsBoxT1qOLg4AilFJcX52y3W8gsRKQgeotBMq5KxuMRk6pO2w+RpmmShNP29H1Kd7VZ4tpZT6kqBopn8j7TyJAGwRAj7Xa7Y9EopbIsMBAzE0rl/w++Y+mY034Bu0Rfl035B637LnbZszPrh8wmGyq3+fh9CFny53c3nRSJhq2lolASVZQJ7OgtrUtSYU8K/hBREmN6UO18G0n9K+TUoVTne2WaugN20zwzscZkHPh1cfcQiDcqKzKDdYN33TD4vA6ix1wV0lImJp1SCWzJfTvdDQEjVAKovWW53LJeXVGWJdPpJHkVakP0ji6zJIohqcmYHQslDECqjGhtmO6VlHXNZNZzebEgSoHtO1zf0CPpXQr38D6FKgiZQPngVQJ2XAqNIHC9jfwg1Dnhs8iBHXVd74DVIWhiqMjEGLE2mdYrmVhbAkHMAR/O9pRaMxuPMUrQtIbOWsbjMfP9fXRdIbZb6k2DkZomtCmoJZD83spULHDWYvvE4NNaMpYjjEoLnrZpcC5glEzb7Pp0H+Tr0ZMAlaoqMIXB3aBdO+9ZrdfMm4am67GdAy/QUVFKQ60KZN+jpKewkcInGbpSGnSJLwxBpApFSnIdOkmSTiJDSq6SMVfPVeqvhSFojXVdmnTL7HcmJTLm7yI9JFIhhB37WookzUzJozGlKu/CffIDamAkakWwaTzqvEdKKOo6SX5DSvSMBAiS1nkKEem8p84Afsj+aLoq0FVB1Ik17clWBDEQg6cqNBUFtktg8XRvSrW+ZLNt8UHSND2rzYZJIUBOiULjo2dUGqKzTAtDax1913MwmSLQnC8bBDAZj+l6S9tayrIGoWjbjkWz5ez0lOWtW9RacLw/4/axAGmQIlDqgsJoVKEYSc+8EhS3b3HZWZ6eL9h2lrKoQUDnLBEoqyKNk6bEaAnRY+oWFyNegA0hs3bljiUW4jW7PPrE8jOloUKmlDat0UqzsQ1nl5f0fcuoHHEwmhPLEXIougRwbY8Ebt86xMY9xGTKuU3hPiE4ordE3yOiQ5KCdxQxhbp0Fq0NRmu0SlYEWiWgvW1bmqbbyaaXm4Z12+EBbTSHhxMKA5ydUy0lhwdTNt2Gk6sF23bNkPartaGe7LNeP+SXn33MePQFs/kB1jlu377N4fFxYro6l5/5N6V0/9Le3L59fq7BkXTfayWHLCdU9oIKuxlmICi4c7fiyMGj5x1RFWjR4bdrRLPm3aMDbh/O6V3LeLvgw3HBPJbEyw1/9OEPWHzxGWfPnqB1xMkGFy+p6g7hNC9ePKVZRypV0iwXCNdzfDBncjTncrOEHqIZUx3sswk9nzx6xGgMnhHLreC4dhjX0XRjivgOgp6yhLsf3OPBD/6Uq0XPF4/OWFxtePL0hA9++CNCXPPxr37Bl18suHP0A85PAy8fP+PgXsl68xg5ljhv+O3/8Tf83d9+hg4F5UxzcqQ5ODqAZg89G3O4/31iNEgtEWGNkluqUrA8v+CzX3zMaP/HHM7ugAt0bcvp5SmX64fUI8/+5D5jHamNwrVPOTv5BZfnDxkV95hWH/HewW2k9LBeI0pNu95y8uQJ8Xifg7kGr8GMibIG10NREkLEejCm2slrJC0Rm1ktFYgyz0qHxM+WaJe07RVKe8TUUB3dhmo/ydy9zRLE+Er/id+uR36rt+1+zquWa+kh10jwK0jxf0O3/idqo9GIH//kJzRNy2Kx5MGDBwnGuKFeemP7JwPpUpJ7ekikedx2ueHzTz/l5z/7O+ZTzQ/f/x8IMXlcN12brBB0gdYGUaTivwsOH3qEiCgZ0ToFRtWTkqZznJxe4HzBZtvTtSHZXAhB7z22tVR1hRtYa/m6RiCGBPpdt4Fl+WoTALloHGPEtRsWlxdcLtZ89vA5v/jlb7g4P6VvWkZlwf7eLBWM5MD+vyltDURrsesF/fYbhH/IuNvAaY/48pw9dc5//+cfcvys4ldPHrFsPDakdVwfBdqDlB4hevAWKTVdhEZENki2pPTStRAQSBZALnkl9zFgkXip8CKFYSTbH4fUBjOdUYznqGKMqab4bWC5stRlyXhu2N+L4Bb0wSOvGmJwnJ6d0GwWRHwOYnP4hWe1XaGM5vbt21ytV7w8OSOIFMzQtC1Hd+7wzv2Wd+5/AMgEzjmPrgqMNkMoLoWU+OzDHmBHPkhXK4KQxCDSfNBFhPeZhwsCTyEkSkTwnmAt682Cp98858svvuHhNyf84tOvuLxacPbyGxYXL+jXC6JtiLEjEOi9Q44rwCZPvZFhfDBFjEq2W08nJNsQkyewKXZKD1RFWUmCswifirkxpv4cYyJ9hOjRDGw6uVurvpH5dRPc4rX57nfMMwZf3Jta7jeBSr8vU+5Nv3sbs+saDGcXlvft97xZWjq0mwy2N73ubfs1rOdf+bwb+wOvgnNvYs8N23/TMf2uY3/b337f134XaPcqmDgc0WvvTy/MeEbqVzHPSQd1XRA3WOO7DxuIRum8+/j3AOikMjifgLngE/uGkICtuq4pCs1kUjIu5yAy+BFJfm4qIkOKeG7bns55+qanaTYEvwGlKYpqJ5EcgDpTJjlNbSQ6y28GTyMpFAFBn423hdQZQAzpoSkknogSKqXK+ZwMqpLhd9/3WMLO9ypGcD4gQ0BLBeS0IDwhSHrfIzI6F6VAKZ/UBcETgqcszQ5cStKwxLYQSiJiSGw77xOWr5KJfJSJeiu1AmkRKhm8ekQymc8ply4KrHW7caPrHUpFKBR971iGNV3bMqpLDm/NOT4+pioLnj9/zmaTYrU72zOZjBjVJT4GNpsNGsG4rjFK4kuDDyHJIEcVqm25WK+xNuyMZ6PPQKZQmTnoEEJTlBrd59RVpa87qhDIAFEkuRwxmfsn/5P0UcPCP70mSWwzGTKBnflGtd6jpUzpZ3kSghQJuMpgXWKm+B1YNjDsnHOYcTpOqfVAFqJ1KZW16Tucj8SYkoLTTXczRVhgnUPIuItJJl5XRd5ayRgG55gORmTTlmupbGI1DcwsQQJHkvR1+NyQcyFSNcpkWVk6rlRdFGRDyhhwztJ2DV3bEYkUFwVlWYIymLKkrkqqqqRQmr7tqMqSoihwWu+ORaoEA4mQHsqmKJnO5gTfoUWH0TXexpz4dkjXneaQDVhutmxcmwzttURqQ99Gur7HaIORGYDL/oA3adMDoKW1BqlwLiVETkYjBILCaHzfs10taduGRosUQhPSwsf2fUr3HE8YiQToLhYL2GzoEdRVzXx/P6UXb1t8SBOVoDxCmyS99SGlUUuVvPqEBSHzpKvFi+sq0yDxFAi6pmUymSClxvYeG+zuNfV4RB8cZ5slLy8vcO4BNQW+CQTlUGGMihG/apiqgok0mKiJURCMoZeKPvgsQ+gYmExATioO1EpTFIIgoLc9hSkIQNd3Cex2HqMlztkkdQiJEWGtw8SIMRplNIoEiqtcGUMk1sPOlzGD5gMjw1pL6HtKoSnqEeOyZO/OEUKllF7XWKLSRCJRpUAdWRZs+x6xXSPLgmJSoeoRTgSkURR1xbZv8SLJQF0MuRDh0GZMkCkUqKoKlCnQZWRSjLE+0NsOM5kSIvS9pSgUXd9TxcT2JkSclDQ+UETYG9Wcr7aUUrM/nbFkw7geM5vNWLhLGreGGDlfrone4gLcvmsZj6ZI4agmU0oFxICOjloG9vb3OS5rtFF88eQ5TbfFK896tUYpTdfvE4Pn6PAWs8kY27esmm1akABCG2Tp6Qn0wedwI5mYjBngFtEnwFQnG4au61Bdep02Bav1hi+/+gq/bPjTn/wR871xYkhkxp4SiqBSyrMc1+hVg3cpLAcCi6tzvO2Y3T4kGmjaDW3TZDltGv+1MWlsiWuIqS8uV2tciOzN59gI602DUwpdSOa3ZkhheX7+ktFIcRAnbLoZF8srXGdRRGzXY3uPiAola05OVnzzzW955713+Iv/7i/58KOPqMfjNE5lae1QkLmeIP9L+29tCWy56SImds/o3f8lbL3g6fOWF1cN5VTx0WzG/qSgONij1mNaBWYWWT495xf/+Wf8+n/9NUfHB9w5mPDi0Sl22/Li+WMur75mvhf4t3/2Ey5frvn0N8/prU8hMmpCIQPHR/c4ujdjvHxJvVyzWG5Znj3EFgohPS+enSH1hLKY8fzR5xxNLHePDgl2xXg24d9++AG2POTKGU5OV3RuD1NK+hY++9VDQjjFXXr+5KN/R1l+n9/8dE2z+pgf/asRx/OOg/v7EAP/Wgruvn+XwozYbp4Rz74irH9EHD8gFrdQo4okI410/YLL1TOW7SXVvObBn3yf/Qc16+YrNhdLXK8wZo93j39CWTcU2iB9i12f8fUnf8snv/wp7xy9xwff+4iLx4rlo4fs3+lYtIHi1keMbo2YTY6ZH9+GaKCeQGOhXcFkDKFHi5R4L2LIfqwRwhVCXJFYbHtAjY+Ozl4S5ArhVnSbE2ZTjRoLRLmCApA9Qt5GUjOwk4b2KksgMkgud7BMDBk0GjpZYmTf4FPc6H1v+GCu3/qGV/2TNikk+/v7/Me//o/EGNMcbDiM30Xg/UcE6a4ZGgPTyWFMiW17Xj59wvNvvqZdXTI5uksIHRFLEB6tFUVdI3yLCwENqEJjKkVjt6ADKM90NkIWFT0doFhuOgILhNDEqPCRHOaVVAhFVeYwK40PyWc2RoGImujy6VK56Oo2FEUKN+ttR1mUQKRre5aLFS+evuCLz77k88++4OnLExbtltlsj3ffucf33v+AB++9yzvvvENZlgwEl0SRSGsAIonREgKr8zNG/oTo1ohlh+wv2bulaJsOoxtEbPGdRTkFWxCVpC4M+J4QGkoCvSlo+8A6WjZC06FpbE8XOtYyclTvMR3PUFJgQkcrJMveEtUosewFCC0JRlDujxgfH/ByueXJyVecXWxx1jCpKvY2grvbguM9iS7S+lUAVV2zWiUbF2stvrdY2yONYjQxXJxfselbYi7aKaWxMQX/VPUIhEjMOyFQRVoHhRhTcGJitlAag7c9WiSyTd/3SYonPJ4ldWWBJiV1b3qqaZK0Bqfx0bBeL/j6y4d8/Kuf8fDhJ5yevuD50xc8eXnB2banD5HYd0TbIZxFMTB6E/uy33Q4JHo8JbgJm1VNGzVN41muNrTWEUyBqcYoXSGKDq80vkuwrBQx2WWF7NGcxyQp9G4NOhBFXmfevx3kGcasb7e0ZpO7++/69Tdek4Ea79+cHntz22/ahwE4GzzV3/begdQSboyar4Bi8dvA29tkpW/6+RXCzI3fDzLXvr8OAn1TG+ZnbwIth/2/2XZklrxN59zubzc/401Mu5v/vz7//pXz9Pr5uZYof/sa3QTXhoKDj9nCTKQgUpGJKFoN9lYJmE+qJg9Ck9ZumSl44zgifueF+Lb2nQBdWVZImdgxLvpsoO9o28SeWgiLDBUyG0crnSb/whSgQAtH3xd0naXrHW3Zsc2yWUQCmqL3WN8TukCvBLrTaCUwMjIySZYnlEaaAmHAqIKiKglSsliu8T4ShYDBay4kMM755L+mZZK++hgIwiR5li5ApIuvsp9SjNmjTSZ2UogeI8sdc+r1Dn6zQw2LW3HDc4wgIDq0EEij8wXJLC0pU2w5ZnezDIuN4cvbQNd24EgVASL4gHWJHhlj8ifqnWe9afLnC+bzeUqsDJk5KBTGlBjAkACRzWaTZIIqez0YQ1VURFMSdEnRd4DEdz22S8ymVJ1IktLO9oRtAreikkQGr7nryOYk0cpeFEOnz0mUIopsQu5fYdjtbr58PYoihQqoONxgKbFnSF2NMU8ehUxSP1LVJ3iPcLCOawolEdogpKT3PgEIzmVvxbQ1MdBQd2XRNAtLaUYBydBH8t7l70NiFfkQZf5h0JlHbk6mbvQhblQLxHUfkm+d0Q0DR9ono5PfyHhUIwDvHUVpaIt2l0rsfKDt1si2YbNODMdSm+TlVNXpe10jlUIbhdIa6bNkO7Nheh+QMVKYZMjqSoOUBQeVxXuJXqQkKBcEvY/YzDLrbaB3g/TS7YC4m/LloihYr9e7e2kIh/BZSh2FoC4rptMxvuvwfUuwyQ/Eh8B20yBJ/asoK5RWuBDwPp0D7xzLJhlqR59A+/SVGLTW2gS2e08S5ObJnU8+lc45oirSfSDYsRdDSOEiMsv8koRWEGNiIw59XRcaWWpWtuGrx4/5dL7HO1XHfS3ofGB1smC/qDBCMRYFlTQYBERBF2FrE/NKi0Q4vTlODPca+OQP6RKjVeS009SDxe7eS/dO6mcegXWerrMYnR8mpI3E1EnzdkBkKwEGAH3Xe/MDUUQKY6hmMw7mc6yPrH1MzFYZkseSivQx0NiedSeJ0iG9o1eKqhrjRKo898HRR4+Xcsesm+3XAKxXG6Ivkj2CSeC8kAYhdXpI5spVzP6mUitUCAjvKWWFFrDaNPSbDSIqJqMJnY9cvniJDzDf2+f28W2KomC7WBCsxQ6en8DZasPHXz5ktVrzwb1jDicli80CoyL7+zN0VRF1QYuiOdxntVrz6MUZIQici3TtFhFTiJGUAe9tAqkzcBZVShsXXhGloI+etk/J2krJ6/FBXssAQohIqRFKkZJ6U8L4crni642lFJoH799juj9nqhJbNGRPzlQwyWCzBiEj3rU8e/6MF+st97//Ph98+IBKa4SApmkY1QIlJevVCqMMUiQfMWUMuigYVTV7e/u01qPLc8p6zHi2RyBgKsOdO0d4GQlCcLA/4d27xzx9fkqzWnN+esblxRXHR/c4OLrPD//wj/GipBqVFGWVwPv8fBgkKv8CzP0DtDiAdK/WiIewzoDAC2ito7Etfb/Ah5YijJkAQqjEPhYGtEaXBUcfHCKUZ7toOTvd0qzh7PlLFqvH1JPA3Xv3+LO//A/UsuZ/Nv+Zp4+X2D4i5QwhAxfLhsav2NpL6tGE2e09Lq42NLZj//AW47tznnx9xtWLC967pbm7t8+P3v+Ae7duExW0UrEIFWEr8F2gNMne4eT0GcurDfsTyd5sn3fv3WM0/R69XfP0yRl7ozX3j+5TdJ7u0UNuj2s++E9/DLpk++XPCGOPmIwR0zmhntHjECKi3IaXzx7y+IuHHN+acP/WR9SVJMZtYmqrhr35AaPpMdY2XLw45+r8BTo45mPFg/vv8O7BEXpyD63fZbY6od2eMBsZpvfuw/h7XJx8zeLpGftGEANcLR7Ru4rZ4RH7d47xSmLGU6QaE70gWocwgeBbbHsKwdH3Z0ShCTrQxgVOrSh0pBorVFSEZYdtrvBmi69ATfbQcpIs4GMgWRfIHSgCiWENLsudIkS5C3BL1cksW4wDc+p6RhQzO3iQaA798Z8bk04OxVG4nhq+qX2bZPGP3AaLFTBaIUhg2WRUMN+reV4GmmbBanOFUFCWJVEIeueplaaua8qpZTx1mMKkxaJI8kldjFAmsmoDTWOpxhBHEqKmtwHpEyOs0ArbbQnxunAjkWkh2/doUxFixHYOpSSqlBTGQJYiai25WlywWa/55tFTfv3r3/D5b79gdbmmrqccHB7yB/fvce/+PX7w4Ye8e/8+ZV1lWV2g63qKsgAREVGl/pZZdCEIRAti4cE5cBqhCxA9jWvYdlvW2w1d4xFWYXSFdJKAI9ATlCMYybrvWVtYe+iQ2ES7wJOCAcdSpXR671lHS2EdtfV0oqdtW1BzhJFEYdk0C/qLyMXVgvXlFX1jaTewuQhsS6i5xXx0gBYRVSjKqiYGg+0lzSap2CSKEDTRK/omcnV1RRAglKKh0soCAAAgAElEQVSsx+iywAcFsaBpbALLSX0zEndrLSmTdYoAopRJhTSsa3PBXqmktmibLZvFJaHrqZXGN57PP3/EJ7/6BZ98+lOuLs/48ssv+fLzT1ivzylMxChNF6BXZdKqhQAhZLVQkgmDQCEJKDQFIlYEr2maiIiBTePYdknCixCg1fVqrQjoAaSyaf4ePEkJAIntERMwklhwrwJV6TDfQrLYtd93QPq2NPP/DmPuTfsyeP0O73ldCvomZtjNL3ljf14HoW4CWG/b5x3zUIhvbf/1gIe/T3vTcQ3g3XdJXF8H+H4fOfLbmIJD8MTN1Nq4k0+m7SS7sVQgZ+hbw3aHz8k+7IljM4xJMo1TO5Duzcf2evtOgC4du0apzIQYkgjxWNuzXjYouyW0W4oyJaOaqkQUaSE9mRiMVOhKU5jIyNSMR4EYBcpotCoy9XpL3zUkrlk6boWHvkmsARvorEOIniAVUWgQino8QWfNf4RMs84XLKahSURQeREac2Whty6BTCFf4JDAr+A8UafZqsqvT/OOrLnOC96hY7rg80JYoeSuhphOviAzvGTyUxo87/AIIXMiKggVEcohpEL1fUqy1ApnA96lsINgHT7YfFEjRE/QEq0lznmWmw1N0zCdjdmf38K5wGJxCUChE4igAZGpv847XJ+ReZkAOmEtfb4JU4ItmKJEBbCwk4Qm8KfDOQsiM6F2RpRD6mXMg6LILK+YF/7XNVchdkpXghgG0HTDpHMvkLpI1ydXIbhx4xmZ5K0hD9mCnCY73OQh0jiHFTJXhAUuJjZoYk/mwT0DcwKxS5sdWqJEi50XXrgxgLzahsoNu36S+scbaMDwynEkcFd+a7AbBsDBwyPGuEs6TWmnGiWT0bQ2JaYwVHWdDOOtwwZP6AQ2h27EEGhy5dNkadxsOqWsKqqqoqjKzGLLII1UbDYdRkZqETDKgKwQymCMYFQWGB2yoazAI9MDcVctT4voGHzqH/kYuq6j6zqKothVSHwETwqUUFnqbq2lJSb2Zwb3gnf4GGj7DrH0FKWhriuE0rjoabuepu/onceFgNEFQhf0KqCUpq5HFGVg06Z9sBubAE8lCU4kirIU4MNOGh+UTtJPcT2GKJUqU8O+Otcltlf2clRKYkqN3XpWXcOXD79mLhTcn3H0zj5eVNgI7bKn1xLXe4SLqCjR0uB1QVAyVaWlREmBluB3NH2yYXDqY9Y7dAZtUlBEBqkBEUKWv/hdFcd5T9O11LVJwLuICSxSyXcjSrEDjsmgfnoA5b5NGttiiEipqMrE2AydvZHoLOitpe8sUTp01NQmUpgEqLXBc3HyAl+OWHeWqBWmLJHGEJTCxZgYcd6hg8GYkigibd9hvcOU1Q6kQ0mCSBNPgUoM25igR2ctUSrGdY1ebrg4OYNygx5N2ZvOWG6aDHZX1FXN4XyP2Hdstw1Xm4YoJZvO8ejFCdumQRlNPbpHNZkiY48RgWlhKOoxbRBs2pb9Ucl6NmFrky/Kuu/YLBcoAiJanOuY789TSnoISNvT2h4XI1EqhFIEkYMeSJOC3TiQr4f3KfDGO08fHc56CIIYJJu24+XLU+Z7E3yM6KLAeYFzDhEdZlgsi5gYpd4Sg6fbrPnm64d0dsN8b4xUFZvVmovLK/b29jAmyYqkEoTe40L20ZOp3zifDK51UeFMxWRvzniqCd5xcHhI5z2L1YZpXXNrOuHi7JLVZs2jh1/zm998glIjRvUet++/z7OTC5zbImWS0b9eafxnto7//2xTAqoycu+wYCQL1uueUgWW0RFKA7JA9B62K4Rq+fCP7vBv/uxHXD5tefzpc6b1mEmx4otPV5wsT5jN3ufwzr9ifXmJcx76FrtRoGYpJbFbsL8XmU0L7hze4533fsjjl+d8/vwJ03fnSGURTxtsE8BVTCd3COYAV9/jyYsn/Ow3P8Oxzw9+9Ff8wftHTMwpG9vw6Fng5fM1vS25dbvmzgd7/OgPP+Kjfz3hk49/Dv1jus2al4sTvvjZ/8Xhrft8/8EP2HjHajTh4J05TPfASPqwxfktkzJi21MWTz5j/eQRH3zvXepS0q1b/CZS7x9Q6gpcj1Tn+L5hvbnk8nxFSYHuNObWhNnBbcRqAVe/ZHygGH84RR7UEMdAYD7fZ2a+hzaO0+fPefb0lNv3PmR+yyD0AmkkQjQQChAl6P/K3nv9SJbld36fY++9YdJWZbmuajvkDGfoZldckguBK2mJfdaD/s59W0iAJEpYAgtyqSG549pOV3d1l08bEdcdp4dzwmR2dQ/NShQEXiC7KyMjbsS9cez39zWCsFoxLhdoOUXS0q6eEfBMDw+oJpI+AWnMdie+hnGKEVNsdQTNEVHMSZREj7Tue4lUNvx5NszgmyyBMgkyUCJFBuVE4LtQq5vLov9X8a2/y3ETkBOwHje3iNz6//L/cZAun1qype9lpnv+XAElI7fu7PNbP3iP6K5IoUUIT11PmEwmWFORtMVoUMYiZAZQlZYYq6iqnP5eEBW0Mhwf30LbhuhEUbzkGyGEQGuL0YpxdFwtrhjcwMROgby/QwSEcNim3J8kcINn1fa8ePGSx48f89FHH/H69Qsuz8/RWvDWg3v80R//N9y/e5f9w9vcuv+Qup5SG13C+/JnS0JuLFvWQOW2nUoQGusawhcCt0poDYvTnq9fPeeXX37JZy9PeXba0XsLvsLoBhkUXRrpaPHS4VCcDZErb1h4RZcoRBBJFBEv4KwfsSmRnGdUnnncJ0mJSnkPM8RIkhKtwPgWu+w57C6Y6RG5b6lvH6BNhVQR20isX7K8GrhYrGiXA4EGmKHUiJQhX7MAkRLKKO7e3c+eN0pSTabYqmJMAW0qDo/2kSJuwYPdEmtMZe9Umm2MWz+xVMgXUSF8g45TxiV8/uFnvP7sFLdyfPLRh3z68c95+vxTfHSMI7ihxxiFlprkDSlkH8cgU/E/ztLotbpKlr2XFoogsx9XHHv65RV+cCy7gXEYEL7ww6REKEFKEWWr/LuWxEETRUKOhUkWfWFfFIKIpLyvzF51UpWiwRt6WNopIrD1/t0FjlJKeV+7ftVOn78JEL0JxNoFot4EcL0JpPq2168Lleu99E0gbu2p/iZw7ub73/wsQhQMBbGprYi09X8n/foU1TcBiW+65l0AELbhG7+O8fhdANfN7+LmdwgU8tHOmvoGQEcqBXJVPnexnFqz5W5OYhn/Kf9Z/0kUXErA9sG/20TxnQBd1xY2jg+lEeYNojGSqjIc7x0wNWA1aJGy4V0MRJeBoIthkUEoMuU5+LyJVMogg2J/f8o4Gmor8bUpCa6ZDq0EhG6FH13ecLuQN/LO048Dw+jh/DzTZwVIU6F1llvaylJbTYzFL0obgpAEFGNM+Dgg/LrTSbIPTq4uyBgQEoI0QEmnFALSGgYqX1RZgwS2kblCUNyiJIKQacQpFkAqt3DJWkiZO5USoKUgGZ1fKbLx5SihmQTcmHCpI47ZQHVN440x4X3EqSzx9VpS+5grW0rSNNkvq9Jr4/ZNJC1SapTMyZQhRcI4EMaRMWYPA1/alhJi41EmoJiDZ79AGyLj6LMcK7EB6kII+JCQG4nc7uZelok0lYKIKonqCinTNwYRXwDU5HO6YV6M5ATSdXVgO+Vkz6bNGkmAlHrz2ULMjL2QcgAHAtIGkBPb/13rN9vH1+BEHjB2Bt10Y8CV60H92weP9aPr6sQ6HXN9zvUAoYsXlVLbeyLlll7sg9v4iimtMdZi6zoDI85h6grnPcH5nLLpPdF5urbDe89yuaSeNEwnU6pJk43ny3mUrehGRxCBQUa0THhT40TKoHrfI9fBISEv3LPPnERqhYygUiS49UCbP/8wDLRtizGGw8NDrq6uuLy8wpWB0lq7SXYN45DFC8U/RZHo+wERIr1SHBzsI7XEh8iq71i2bQbyUvY1PLp1B0KkbzuGrkfbCjvJyZrtakWIkcoYpBCkGHHjkD0yAaMlrfeEmEMhUAZkTidNxI0cN4SACrJ8Rwm8IyAxQiGVQBrB5WrFx7/6FXo5Z5Ye0E01SdT0xpOMousT4xCILm2AsU26KjGnmO00yQw9ZcA4J/gFAln2F0XEpQxwCmFK1dQTYwBNrvwoRUKSBHgiPmXmaVoXGRJlIZP/HUPcTMpi0+fyuWQB9WLIUusMpGamqx8dwQ9I6fEmFUAvX5d3jteLS8R0D6+q7LepDBGB85ExwBACzkUSTZ4zjMZYTWVrep8nCqkUmaybi0cprv1vMhuAmEFMZQxVU5NILJYL0jiiqylKC1LyKCVoKs3B/h5WCtq+hxevuGo7+nHAR8npcsVnz56DgQe39pmZ/D0pm1PslAOFpDE1R7OA6hxGJXQIrNoVvm25DIEwOnTWtROLNcIwjnlML8bN62JAKlJfF3IqMElsLAAykO3p3EDXjfgQUdJQW0VTTWkmM0zdZPZcLN6XsAkESBJc8Axjj9CSW8dHHL14xer1az7867/h4fvf59atW/TDyHK5RKmeumqYTmc45xidx1Y1UkqWyyXLbqAfHM1khmtX9GNCN3vofqDWllvHifPzBVpa/Jg4nV/Qrl7z5PFj/vIv/pLTsxUndx9y/+G7jElwdnbOi1evuPfg3pbFws4i+UZF/J+Pf+DxLUSBNWt22iimteTW7DYpHBMaTatgFTJMMxeaJtnsIyw6wsLz6Vdf8Ff/8T/huyU/+t5tfucH/yN/9fOfct4Lnjx+xVeff8LZ+SURj55kC4xhlWntVTWhmVaMQ2JxdkllDIeHewzDFbdvT/mN99/m5XDFgyOLD/DTD7/k61eCi/YVz0/POTne5/7hAZMmYGKFr064+2ifDz/+iiePX1Lv3WL/5AF7Dx8we/sY1Sx58tELVu0lsW9Z2T2CrEkvT0FpIoI7+7cJdoJzjpXrceMVzcEMFQx39+8weSiYmoigZe/gHtWsyfOetVk2KgKh7RijY35wyP17H1ApyeLzX3L25edIf87dh7eoDmri8pzulSNVj5icNKiJQtlD8EsOH7zFdP8OzfEJYs+CO0UQiaMjBoE2c9AWN5wzLj3N0UPQiWkQJBlRE8DCdL3hihJ6iVRzmJwgJvtEqfG+J4SIWSefy0QuW+cCOyKv8fLmIxFLIU4KwwbZSoU9V9Z8pHVjS1tmz04T/P/ssekbaXO93/x7YQmK796w/v2OLViwa1q//ltKosy1iTh2ED12Znn4vbeo9cDV5Sm2Uvgrz+g92jZU80OsyEV6FxLT6V62pnAjbZ89qU+Oj3j4zkOm8xlIw9fPXnN2fgUiUtcTdGVQxcdL4EFqRCEi5I8W8aPHaIHQeXPqB8/p6QUvnr/myydP+fDDT/jiyRNmsxnTyYTf/u13ePvtB7zz7lvcv3dCNW2QwiCLD3YGCRLJBxIRmTdOrMvs14YwqTCmZmaP6Jd7dC96hPA8PfV8/HzB5y97zseKXta0UeJlAqkJUtJGz0XoWDEwjrDqBW2s6EKdGe1kT1+f9RusYsITkBQGmtFZjRI1WijGJInKoqXEBsc0esywwKSASgOnz17Q7B0SdaLXkA6mVPU+e/v7CDFQK8Okrrl3/wFaCayRUKyfpFLY2uSgP8r+W2VVgdQaaw2+71gDqjmoJQPoErVhUElyYm8s6w1RAIYQEq4PNHaGjomPfvYhn/30V7z8+gVtuyCmDlQm1qRkkFKjk6AbAnHMfsKq1pDIvsmihKmlzKDcgPwyO2OG5PB9S7+6wOueYchp94qyfygMD6EkWSmRAZNEQqaRQqHLIFJaq7cg79JL00zXpzuRN2qbv7HDhstXIHf2iTt7vTcAT286vimb/ObzdwHAm++T1/FbUtBGbVeAq42EdFNQvw7GRee/8djue/46FmFe929BQoEoZKRyjjcAiNdev1aTvYHFdvN615/v28C8N7HjdqW33wbe3WTH7ar11oqz3Z83AazrgEpRlCdaa6TRxCSy3dbO95P3k+Ver0NyxHrtvr2u7wJp18d3AnS5HWbKMilLaqy1NFbRVDVHB3vUKmJEJOGzT5tWRKmIJLp2gJAI3jOOHucCMWTZZZZDFpN6sgeONSoz7lQGHdx0hjIRHSNKarSxhJjoup627Tm/vEQlTwKUzANLGEeCW9EVZlIyBqU9SWbzdSmz1A8kQ+eLtFDlmxqyhNcTUcrlyUEqskvTen1R6J6C7BEUs/Y9pmz8lxPwdKkwCVIYsvSNrVxpvWleL2SEBJVjXklJkaIGApOqzlKKYgAbw5rVpZBaZiAiRSptMNYSk2CxajGmxlYNVmQ5aIpbA9BYGoRQAjupSQWw8jEhokBFUAV4GLrMYIyRsnEs51G6SHFzymSMaafN7Pw7rs1bc3110wnXTygDj0p5kF0PMhuAzofcgZzPbMRSwdxQd9esplLBzCbXaZNcI5XO1hQlSCIlcYMJVAaLzUdO2zUkcYuEl1xxITPLTpQ0oE28d0pQgJT1+TMDrtRW05YhuEYBRQFns3yxAG8iAyKyACPaVmTkfd3JC4KYsseeUrkNyIIOC50j1aU2qCovjEIIG4DOjWNmjvUDSEHbZbbZMDjUcoXSCm0MtqkxtkYKS8DTMaJlADXLLDOVFwnBO8ZhoB9djm7XJTFXyS2QXY4svUy4cWCxWADQTGf0ztMX2XEq4HaIgRBGVGVZrVa4IfchrRVt3zN2HXVlUdYyRk8IiWXbsurazKqMicE7hLZopen7nhRTYR5WWOuwVUXf97n9KZUl6imRQvE5FBIlQ04mQxBjRjRSDCSV/eAynpXbg7WWJAKIkkKdPMYI6toQW8HFquPpeeD56pDZ9JDjeoLSPYHEMnjGkiidXMR3+TtKIWzkxmUwLv2rTHoRIplFWgjTrGcAKSVaZjB7PSmtmbtVVUHtEMoQhcwsqpSrwqRQPn8sEu4SFlHadV4q5/cwWmNkHjuz9NpgrUFpRXKeurJIK5moyKw2zOqKxlj6lKXmlDTeJPM42g8DKSYmCFJjqasGqRPjyiPI39m9B/e5GzSLr18jpUZqRZIFhC8BItFBiB7b7FGbmsWy42K5oo+Bo3snND7x5OUrLi9e4UPk4OCQ+XxCUxsWKlHXirqeYY3i2etTXp2eEWLCicTT83POFmd8OjE8Ojnm4b0TTo4OGZOgHx11M2Vvfsjr8xU6RA7riqmSXIp8fb5dsQqOC60yuKU1SprtIrD07WEcCMkTUfjk8cnnLWBJS5ZaI1UulkivcrHIZTZ4KnNlPZkglGTwIzEpjLV5m6cVSWVQMQPcjigFd05u88Gi5fGXn/H1p58xOrjz9vvcunULyOB612bfx5iyWbixFh0TDB4f8iZVm+x/+fmXz9i3gv3ZlMiA0ZG7tx+geMXyquP+8W0Wlx0vzy/58MOf8+z1OScP3uHHCEbhef76JZ999hkfvPcOs9lsw6KLhZF7s3r5z8eN4xsFp+9+WoG213ygIvsPBCJRClStUUHjlSApsDJ7EUmfU7GNCIjU4Zc9+6cveMsMPD57xk9/esXJn/5LfvDbf8hf/c1P+PBv/5bLF8+JIdGKSOtPsaJnagVzIXB9z5Pnlwx7PX/8+z/m3qMH/OrFr5DNkpOTKeF7U57d72mvXuLCK56+fMbTs4+5XDwnxEtOPqjZNwP91RlhsWA2P+HWO2/Rhjmvzge6seHlE8GsesXJo4r5geDOQ8u0njJpPuD4h3/K56+uOPMrHh7l8Vs3h2D3EdESU6DW+yg/h3TA8d1Dps0z+qufY/cUk4cVwlyR1wSK6D2RxHSyz2/8cB9EhYqWeLXCHxzQDa/pY+Bi9ZIJCe9azi56olU8PPkeKmra1RVhbJnMjpg9ugXa4NwZi6sXYFZMZ5LKWohnMArqRlLJCsFrqDRS1+A9wr0muSXIFokmrQysHCJWMHeQTmEQyFgjxJwk+rLRKMztjTBuDc7lOWMtdUUmkne4VU/Xr9CVpGpqdDXJ65hUNsQ7OFbaaYg3Ial/8uMb4Fy88fjusd6s/tf45G8CA3fPuy7ya1IYGPsWkcbsLzxJ3Hl0xMGVomr2ebXqIWmaZo95JZHJE2JETixIR4iCqp7x3nvv89ZE8879I97/3rvUkylff/UC1/+SxWVLRGJtjdSKEEf8EFA6UNc1s+ksqywAhERXFd45Xn3+jC8ef87nn3/O0+dPeX16ilCS3/rhb/Hf/9v/idEHTk7ucOfkLsZYJJlxLXye251LSBmKRzjZX65YCcUQs4c367XJ+r5pxGSOmt+mH6ecPQ+cL8756uyCLxeBp53lVYw86yJCKqIKGB0QKtDLxCImzkdP5yMpCIYocMkgkBiRR8aochiErC3GVOiY8NJDAdCUC8iQSNLgpSIEYHAoP6DOrojtAuElHxzeY7VYcT6ssPsTZseHRFMRk6cfe0gtkz3B/uEULSDGMYeuCYVSAqk8KIhS4IKnD/0m3GEcFBqDxOQ9iShm9mK7jlsvJ3clelswKxDjgJIDKQjGYWB5ObC69HhnUFZRNxP60DH6UuRlJImIsgKjbF7PI0nCEIXO35IgJ8bjSxBjAZnDSBo7fLcg2YgKIqunZFaxiExoKstcSUqSKD1RaZIySB2QMeY9OooUx7yvJFK4QyRVfK+5Dsas1Wl5fZ3K3u56l9uCQDfBoDcDQ+uZ9dqeeAdIWpMwrn+O6+dakzR2paa7arIYCzj9BoBu/e83f7Y3/757CL5ZAN1ltP06cDL9Hd5v99pvMuaE2Hr8fdt7ra2u1iDcTaBu7WF3E4S7+Zrd812712Xvz5qJunM7UtqGPWzARVH2RwXME9mkDlinuW7oDdvHvy0anV8D0DWTvVIlcZnBJPPmYc0OGoYBZRLabhlXyihUVeVBulybd4lx9EX6KBAxI/dhGIGih5dkMMg5Bhczoi9qfMwbYG0m1NMJWhtms5FhGDm5fQuIKCXQUjCMHYvFgrZdMI4eISUuRtpVSzs6xigyky4KxuDxLickVkpv74TIdFwJxeQvf94oc/gBbLXXUmukj4QYysIiDwzFfIn1YvcmUhpTzMBkiaSW5AUvQqDLeyWlSEZlUKC8PgZZDPVVMavPPmjW1tR1lSs6bY+1sDeZ5BEpJpJPJAVJCFxwhOAI0WdTUZO9nbQ2aCQhgRsjw9CRtEYIhXMFuBwdnoQQgSgFMcoSkEFOHlzfF5mZi2H0m3LFbiPeRDffHDw2e9S1f1lmaImYJYpaZ5+2NSAgStWHApAlKFWidfUgbQC0WPgjsfQN0g5Al7/W/IxS7c1AXZGuirWMlgLIrH0Hy+BQADmxlvqWCOXcQcWG8bfbBvLH+GZlJP9ZsZG4FumoT2XykeC8R47Ze279Ol1ZasA0dd40C4m1OUTFjxlIG/o++7yZnIYoZQvkVNax7wgxV7PUyqCVxVZTdBxJdqBva8Rco+aasQTFCJnZScPo8VXEKE1jLEsNQlyvVqzDIGKM9H2fKcXl+teT1Dh6fGFsxhgZnMsm/eOA1RpEDk6JKVI1Nf0w4GL2BAgxkYTEhUjvHOM4cnp6Tm0MSUjqpmHSTDMwoRRVVZVUrEjwPifLao1OCSVFXhx6CG4khkRKgRQiY5G/phiprEUpidYCYxRSa4SMRa6YAaoUJWGq0U6wioEz5zlNkVnwLMYBH0ZOewhSYeoa0+VrkSHkZKyY/TBjDDsS6926MUhU9gBVBmRCS0lVWWiv+zpImWUp1laI2qONLmnFiSBSZtOmLNP1KbeFFClR9WkTr77uz1upZdx49VS2QglJ7zyTylKbirmGg5nlYD5nNqtQo2MxOtzlitCNYDUp65wzK2z0jJ2AVKOVxZgaCkN2OptxckdyNkhS5zNLNq43iXl+8j778cSU8Ai8FIwp0Y49V/2IaiYc3znm1etTXNsTCSSRGQZ1lU2ipRBo1SDVEXVj6H0EY1i1LWenpzx90XO1vGQ1dHSD42j/EGunTKYNdT2ipaVRiXldo2aaeVVxtVzSDyOBROh7LpYtUmtiTBweHmLrOrfDXm+YbjHHlm36myIzbtfjhlKaqqqpq4au7YvPSwlUiolhGGnbHiE1RoHUMgOaIpEkWR6nMhAzaRoe3r2HDgMvLk5ZdD3Pnj6lme9x584dmqZhGF7Q9x0kqOqKGANudFhrsNLSD0tWXU/bjyxTYtXDg0cP8H3LIr3izu2K1eWKia25c+s2V4uO3kUWi0teL1teLpfUR4ccHByyGnvart36pMS82EkxFjbrPwN033UkuAYkiDevcbdPXld3Nw/kJOUkYg7KEREtsqdbCJ7kEjIahIZki2G3GxFh4M79A77X3cObU3758Sf8z//Lf+DB+++hdcPZ82eoELn/4Bbx9GtefvWEWs54dPdtbqsZp8slXddT1Qfcev8Od3/wiPppTzAdR+8/4tmnF6THH7O6XPLV0yfISvOjD97FNu9wcfWcaT3hwye/5PGnH/Ls2WPe+sEPeST3IWjuHp1wYPbw547XX3zJ3jRwevYRP/vZX7NcrPjBj/5bfu9P/pD+IPDy2a/o5SXGNAhTgaoQQVMlg6r3EHoCKITv6IaXXCwG7p8YkuyJ3fPsP8UMYfYRoiIlh2JECklyAakl8/tvM7t7xNmT/8JXX/2SQ7nH2z/4bWTnuTyN+LrG7k1o7B3GtscHSx33QVhUlWj2jgjCg+1I2kHI7DSRJLFb4C5PMWGGqAHniBdLgmhR88iqW3H22RmqnzFTLXvewYFBSIuZ3CdpRew7lFG5gpj0DkEs5aJ8yj+SUrGKDr/quTy7pO1X2EnFntjPoH0JvUopV4rF7n7lBjh3o0n+0x1i/anW43DaeQyubbjYjtX/VUC6m2+x+1h5OCXo2par83OaOlHXDiFGtOoQM4Oop1gLQmhiEAxDRMZA1TTs7zXcupWYTOacnNziwa0jpsP3mdW52O1Hx7SZUVcNta3xZe829gMxeXQtqesaQQ6mGtxAZSRD2/PFZ1/w9ZdP+OKTT3jx/CQUVQsAACAASURBVBmrdslbjx7yb/+H/4479044ODrk6NYxpqpzoU0YhMiFKqXN5hrNGpAJeS8jZG6Lqex91vsuEJknFfP6B2ORt+9w+If/Ch81T37+M74ennEeJ3Qm0kZNaytcHEky3y+jDTEplLdI7wmuz2QKsjJBASaV91UapxRJC2RlMQIkEmUkUkQskUpmGCym7LOmTENVTzA+EJJkXPUsOs9qdJjJjJOHD6lv3eKry47VYkkiUTWWSnlSDPT9ir5dEf2AEtnjt+tbQnQECd3Y0Q49zXTK8a0TmnqOSCbDAklkK6eU228oa8IQMms5xpi9fcseRIrMFLLWkkIHKQNus2ZGP3UMncOlnqvFOUGDVDYz2oQA7wkjJO/QxmY9WekyBS5Bxny/tFxbUSWkTzCOxL5HSoMRpaC43rkJQZRFB1CcqaISJKlAG0SM6Ji9g4WDKCMxuU2fuQbOrIkjabd/Xz/EBlT5Nd10y+rIr9sB29Zrl5vMtWvv8wYW2/o5aw+6b2Pibc5142+blNXvAP9+7XWR0Np8I2Dh29hmNw9BURh+x/uu94UbT/k3nO8mwLn772EYNufZ/VzrY/f+v4mhZ4y59vvNfXqK6/tc9rJrFl5KyKQxdicEI5aiVZG+ip25bT0vpLRVyglySOk/mEFndAXJIWUAstQOl9ld3vuSJqlKUkhmXQifwyKE3krxKqOZNRKlDMZYUoBxHLm4OENJRQlwzOd1I27s6Fwiao1LefPeDSvawVEZi1YClRJ+HDBKUFnDtDHIRnPQSIbO4hKMUdP7xNWqJy4WjKsB5wa6IctkExKtBLpuqLQpQQ8ZM5VSEnb02/nm7syVZXVxreMktWFghbKIzS9SsNN4QiyUy8255Ma7zBeWfDZ8FxgtiMFB0sT4TYBOqQwUZrCvSL0ShBDRCFLI1TKRBGhBTB7nHc4NMIDUGaAj6Swjk5rgcxiEtbm6kVJCRwkYtJTEmL2htgPBFtXPlOFyO2IW/IrEJjhh937ugl35tmUZXLgJ7KRtkoxIZRkU48Y8nJhleusF3aYjygShgIZCkkRO7I2prCXXTIx11YT1OX7dQLYd5PO1SIRI5TvcVjc2nqvyeqfftpfrMq0E10w9YwltIAZS9MhUNtUikWKWLaaUjdeVUtiqoZo01FWDtpqmtqWdiOz5JrKHXfRZsjiZzPDe048jgxtz0mrxZggx0LYtKjhUe8lyIRjPPGqvI9WGcdlivKTresRySQpFIi7y4k4pwRgyFT+s1dVrMA6xkdju7e3RNE0G71IeO7LENTKOI0ZLbFWhYNMulFYYWyFNZgqZylLNFHYcWLYd0jkmMS+Qks+y8P3DPR4+ehvd1FxcXHBxeclqteLy8pJ+tczSe73jsSZ3wajtZBhjllioKAnBAQo35n5opc2sInKq1KSyKGnRekbqIq9Pn/BfvviCV6szvrisuXdXImJgeV7jxAHT2YxOJAgam2RmXqaQfeR2Vuu5QgNSZnmntgZdwhJSYRTncJ8xV5mTyD6HWqF0CVHQJgOKShZ/zML828gpc3o2cTv57cDZuY2EsPVtSDl5ePPclAVRVmgmRjKrKmZVzX4zwZhAF8DKJT7lSU8jqU2FiQEitG3H5RVMmFLJGVIaXOcZU8RYy97eAX1ckVIkxOzhKWVhCweP0jCEkX4AJ6CaTxjPz3j89VdgLYcnd6imEzz5ecvVgv3ZBGNh6CNWKRSBw7llMr3NKA1OKF6cvmbZLfEpcLrq6H/1mFevznjn4ds8uPcOCMsYBEpW1I3mYFLT1Pm6F02Di+BSYtl29O0Fq64jxsR8vsf+wSEySYyyKCnKWJ6yR6dKyJgpuTEFfBhxXmKCJs/NKnsukrCiojJ1DhAp5+hXS6LrsTYxOZwg6zpbShiNNmUek5K9+Zzm4SMePLhPV8348nLJ89NTnj59mmXJr18zmcyY7+1TVc1G6qrsBKVzYmwIka4fOX74Lrfuvos1ezRqQiUMrl/Snqxwo0fK19w9uY0Tgs+fv+JyscIvTvnk8cccHR0jXS4osPZxWS9K4brs+5+Pbxy7245dHOTbnr3moa+fF4ERwYjMC9GYJdx5POyxcoWqBDo1rETFz58+4cOf/jnD889Jo2Q+u8voLpjcsfzr29/jL/7Tn/HXf/mUnkNev3jGW3cq/vhH73H/vcD0aMlkepcf/8bvMHGGjz76gvqywhrHxRc/wQyf8PzyJW/91u8gQkMXX7I8HvGiYrioUG7B0Z2e995/ny6+hzMzWgevqoDG4e2M1+dL3OVAvfSsXvyKr16d8nwW+dnfuGwav2gZ+5GJ/wVGHHAppyyC5zktU3HJ7bcWzJoVYbB4ZxDSoFQixJGhX9E7CGNFeN1BPEfUCTHXYBsYFSE0eJlQtkPKxOgjLgmm02MQexy9VzE7/j6q3kNO9qnlEmET9cFdovQwMUymEwg1YPL4ryY080BEE3mGDwuUyGqS0LYMixZ/FRgvEk1dARNinKL3TyBE0tVz3OUZfrkijc+wIVB3U7yyLFKLN4fsHz0EDErpAtKlnEZPJKWADwMxeLSSaGWztVMKVNag9AxZZb/ba8e68MPWO+nb2vA/7fGmjXvasG0gMyi2x9Yb7puP/WM+w5sfFuSUx2dPn/L0q0+4fVjhh4bgLpGMaNEwm9WMQyrr+4rp/JCDac3+0SF3357wW//iNnv33sYYhUUx0QYjSlE7eKQovm+xgE0hW8WgEorsgZ3tGSLGGLzzPH78Bf/nn/0ZX3z2KT/6/gf8u3/3b7h7/z71ZMJsbw89mWRWlForqfJ9Sgh8yBCO1ob1OlsgQKktBpryBsuliLTr4ID1Cr5IgSWIvQr9uw85tI69JuDbU86/Hnh6OXIWexYaPAGhRqZW4dSIHRWz0CC9xkTFhVzRywDJI5LGxKwaCgiSzoAcukA5grwO1JJGSWqlMrkjSZywuMk+8mCOODzCX17iFi2LVnBweMDxySH7J3v0jISLDoViolWec88XvHj+lBfPv2J5eUkMA4qccN/1LV2/wpO9mQfvefDoLX70e7/L/TsVE11l4kjxYUOm9XZnu3/5BviTn5uSIFEh5BQlA9G1DP1I23YE7xE2YqoKJSNDkozBIRNoWWGrCo2A2CFTtlKKRGKygEAkWfZx2dJJJUmUKuddORCjQBuR9zopq1iyeo1MNimkiKQ0yURUNBnAjYmQMrAiQ8SLQEh5dkvXBpu1NLIAWeXCN2Il2Cq62L031/vjd4Ne1//2baEKbyJv7AJNbwLm4g1Q6VphY/e13/Xp/g5rqJtA4C7YtyWR/P3Ovwvu3QTVdsHF9fvd/Nvu77spsjfZeBvF3o2f3dcrpa79vrm/a/yh1IZT8denWHHJG5/52n1OWxwiFfbPdnW1xUu+izm3Pr4ToAu+J/jMuNJSZUmdUoiyKRtdYJSZLhqco3cdUQqEzn4k0zonsBmlMdpgTPZHCzHgo6cxBm0Utq6oKg0y4dxIP7To0XOxcBAEyY+0o6MDKq2YNA11ZSE5fEg4Aj0BJTKwpGRGueb7txhR7DnPQTuw6HpWnWPRtrRtz/NnzzHK5M8nNSIGvAPvHUIHZD1F5BVP0b2vNzKZcutcGezWLDiR5Z4xBoiOMaYs19Vq46Xm/U4DKwCMEBTvsrT2s8zIt8zaZylFkcdS9M8mpyLJtfeV2jBcKlOjlcR7R200MZSBEVEAo8K70YrgPM6NRB9xPuaB1dYoWZKKvCeSDfcRCtPYEqUOjI7QD0ghCyst5/JkVk1x4UuRUJJcpYjIIEpoRPbEW9Nzb1JOc5BDym15nZ4uyLRlkdk9KkaMzMBYLJN2SuuybJ6yjTKZ/k2pEBVLFEIqEr7SyXYHzd0KpVjXUMTmb6lU6RCRKNdgINcmOcHaP4xr6Zei9NzdbinWVdfN7kht2lnwoWxMAynm604qv1+I+TqzrC+nFpOWaC0xxqKsziEK04a9eQbBjLUobYpkPQ+szjvsODCOA2Pw1yoRIWT5knQ1bdtzubiELmHnNdI5lgNUV4Z4uoeYKIalw7tUqviebHOf21wsIK2pLIIcDtE0OaBCa433nrrOAJdzI971GGnY25uhpWToWvp2RSBgpQYlaGZTKltRTSYobej6EWWXTGOithW3DvY5Pz9jtep4++13+cM/+iNO7t3j9PyUly+f8/VXT/jss0948uUXhH7EuYAIWWuqgsmsyxAJPuWgnMLA1EIitcxhFUIgR4EPAxMmVJXFKEmIkstuIMmK48ND1BwuL59y2XXMfU0vZnx9viK4gdALJo3ENhYz9gjfQyjgaNh6gggCQsYihwajBVYajFGZr5sC0Y2Z7excllalRFKCZNaBEzkII8rsKakQiFiA8ZT7h48lcCIJNkbgKct6k0wbZpdcFwpISF8W7jEitMhjhdRYmQN0lBAomTBCYCVYpbh35y6jmXLRR65Sj4jZGwUyWLjqHadiiRc1s0lFlAapIDDS+yED8EW+oMnWCELG0vdtnlxFAciTRBiFl4LX55d8eXrF/sEhk6ZmajRaAMHRtx1d22Hn+zTNNMum/ch0b8Jk/4CUPGcvn7HoIuMowAX61RlXC8/5InHn7lv0UXDRdtyeWmotmRhJrQ1W1siqwiP5+tUr2rEitNC3A5enl4CmbQc6F/AJnAsoZJE5Z2lKiIkxZll4Ht8DIXnCGIg+2xQkLRFSZxafmWK15LQ95+LslKpW3Da3aOoamQy10lRKImXxWFQS29TY2iKERS47jMlBEG3bslguEVLSTCaQAkZJ6sqUsRDqZsJsP3F2ek7dTAkJfv7hp+zNptzan9COgf1bt7nqVry+OGM2q7ib9rhanrMaJEkKlhcXpHFkKiRD3zP2PcG5DCivh8l/Zs/9w483rZvXAOi6dlZ+MrgiWV0FFs+WGDEiD1YM9hXDcsWwFFzZA/78w4/5iz/73/CvvsIPhr35XX7jziHvHwhW4wVJBV6cvsSrwL2Hd/k3f/Ij/tUfvMUXX/4NBweHHB2/z2++9wFnL57x0Vefsrpc8PL5C57++1/iQqLZP+JfvOo5/uSM5xfPObt6gU0Ng5bMG8ulP+cXX3xC6/e4/f73ufvuI+6eveT0s8+IXc/5y1dYHYn6ioErDurErTszfvHZh5xeOE5uv8PebMqzrz7jsy+fcP/7f8DJBz/kl588JXVPOGkE738woTq6ha5nIBOXZ0958umv0NQcNjOMbHj++HOOrhpmxwF31mGPA2pqUNYQo8tgRmWQtgIvacfAdDJDzSuaGpANJLD1MXqeSFoQYw9YpKxIviJ6gTQggmWkYXAKaydoIRlXS1zfU5kJZjpjefqc109fIJNg1txn/+gOc3sP584IneDo6A59GvBREc0c9k9ISXD19Wsuh5bp/ARlBkhbVhNxs5AihQHvRkS0WYYoFLqumU3mxb+DDGIlud6b5PYlrzfFN3NYCoiQobxvabs7MPQbKaL/WIBsvakqkyM7Mtf1n68FN2we/Mcf22XnBpeS6wfK4ULg6YtTPvnkMavbM9qrCf3qNUIG6ukh7773iFTVHN0+YLa/x28+usP33nnE7Tu3me1HRHUFasogQKdd2w6wTUVV1SQE4ziShMRIiSpzqpKQ/IhIFab4uQQ30jQNv/vj3+fHv/87vPPwPsd3bqNMthUSSkPKa1ukIrHdIAuxm1q5DVjbfK2p0KZk3pMZoQqIRFlX76zlBVl/P9fUv/cBP7h1yCsil3/xn3ny4ZLkRho1Z9mPCGnAqPzNxlwIm6YaL+CKMZdEY0AUqyApcoqjkYp5M8Foi4wlmE0oBAIZA9o7TAFPxuRpo+OSRJKS1mqupOZiXOLUIcu2Zfz0FSL57J2uJX03UsmaF5ctjz/5gk8//oiri1NIjhRHRt9jjGDwPbbSJCWJBA6O5ojoqK0ijG6jTJI6q7pk8b9NYutj5mNCiYTP2uFc3HOevusxKiCkRukaqRuUrnJDrDxD9NkbWCqEExDzWiIFxehHKl088wrrLcfBSYTIbuxrldrGHy/JTSpuUqEUnROkVJQOhVOnBClEUBIZTW68ZR0rfcyhYSIziXPhNt7wLi9A05ohtdPhhJCUb7oU7L/Jert+fPNv+fmZZHNToroLajnntu18s6bZLZVx7e/r167PcI11JgXERCAWIsu3MNx2AL1vk8Kuf1/vCUVMuc+lQmIqAXlvCrG4fqItWHaTqfYmZtxNj7hdiepNltwuAHfzHl2TA+8cN8//phTYXa86KTRr8fYuuLa2RtoFuCU5aFAKSEKhpGJM3zz/9vNSbLm+/fhOgA46MkLi8UEQHLi+I8lElA22yXHcyuQkSbxhcAMu5ejp5fkZk6bG1A3JO4bREbQiJUn0IQ96KKTUCFUTpSRQEWKNEj13dUAEj4wNMoFzI2PXMw4tY7/EThuG4FmsFrjXnkpbJtMJlckpeOM4MoaYO6pVVFikyUChTHC4v08MkbqqmU4mxGFkjAph8yY02bwRF0oilUZKTdEHEUNJGyWToGXxp4gyEImEFEnSMKYEY+4scl0BSJlZJzPGUszePTEJYtiittokovCI4BEpFr+yIvPMznlooTM7JgEkYjGpl1IQdUJqcpKsWiPtBe1KinpS4WMk+YBGosiyqH5sGd2I0DkF1PuQ9ftSI4Qv6ZEJH8sgn7ZUVaEVSoQcjFsqGUpKjDRobUgpZjkb4hpbLqw3miHgvMsgYCiTnVS5nViLNjmBJ8ZInxxKieIBofB+yGEDbiAlgWnmuBiLByCQPCk4BFBpQ0qS0UeCjwihCvsjkAoLcM3SWA/iIopisJqv2WoLhWG4cQFL2QONFNBK5nQnl70QjFZYY9EliSmEEiQiC+CqFFGITYXL5BVDljgSSSLhfcp+gUriXcQ7T1NVHMwOqK3GKkHwA1ftgquxpQ09y77L1coAWlsmkxnTZpKNLquaia2pgsONPd45Ygp5Aog90RvSeERUMHUwxMB4eQ6hRynN1y8WVO0V9fyA5Ef8oBFxgNiByrLQdfVMeEmgTIBJMFc5kXYyaTBGslyuGIaOmDx784Z20TG2A7qpmU+nTBvLMPTE4GmHFrmSDG7AhhFjKkBidYXwgXEYkbXmzsO7fP31C16dnXF+ueDk7n0O9g45OT6kMXD26ksup4pqfw4RLk4vGYeAUDrTs4O8XrURGXrsnCcW30MZImKMWGdzsErySGFwI3gU52mBjR5jBcPQMfRTvJfs7d3n6dlTXr26IPgl0tagLSjD8y9/zi9/MiH0K0LXE+JIFCNIhbUVlQTtIhqIwWOnDaHvqFTiYDYlxZ7nKhFFYJkiC++AGt+PCCVJSuKdQ0hBlbLPZhgT2WvIMvpV9ryMkUoprJJ5DJICYRUDvrBqE2L0VCEXEpYSsBKXFSlYbQj02VJgFAzLnP5sQ6C2NdgpJngUFsKSmDzBeIJJLMbIZNqwEBAIdA5cyEzLpBOekQTUokIOMLoBYWpEhHE5EE3ETBTTvTmhHzC24t6jd4kXHX/7i0847y+4f3TIdK4Iyx41C9A5uqUjhpb53hSUpW4kEytphOPh4YR49xZfDC1npxck06B1zdnS0z55zosu0kxnpKbCy0TdVMg0Irxjv4Z7j46Z3Trh85fH/Plf/i2LfkRjGfrEs2fntMGh9qaIypJUjSAi0EhRpBsyMxMrU2OURoRIZXIys5QqS8C1zqyaURO6hHADVntW3RVnV47W9zw0NbU6YOIS0SfiaKCa49SA9HmxHny2FpDK4ENk7+CI23fuMp9NkFLQdz1jewkBbLNXTLsFy87hkAgtsI3idXvFYDR3jx9iTaRdXjG/d4/95TnNXDN7EXj5Zc+z7gqo4PwKhoiaToqnTNzZA6dshs26DMN37Id/PXfs/8/HtjjEm9GP7TML/pBAJIQCkkcyIpKEVHP2quPP/ve/5vXLj7l1b+Rq+BUvXj4jihkXoebzVxf0i9eYVYdaCMYXPU8ef8zJ7x1z/3sPUHfPeHfvHpOq4Whe8xu//TaHd++wXLSozrN4EvhyfM7hI8u//NMf8+7K0Y4DQsAXnz/lP//53/Dy//gZD+6e0UwCbz065PDgFq++PuUXXz3jeX/BvXvfw3nBz5/9FXv/1094Xyb+9f07zG69xXj7hHg8chn3+Iuf/BWvQ8fdh7f50ckPIR6wWCn6/pz7d+/SD4af/vJj/tf/+BNOVz135iMf1ImjcZ/DQ009ndKvXnF28ZrPv/iai0vBH/zeHzGvKp5fXXKgI6vTL1kFsPfPCdUvOHrwLvb4HbQ+BjEhyoCpNVrMQGlEshn4GiIYidDbdFQ/LnDdOXZ6nAEvY0hSkpCkMCcOc1IcEMKQVgbXemQ9ZzKbMX94H9GcMnaBYQF6/z5JCdTQMj/ZJ/g5Zm5R4h52fgAVaBW5/5t3uKcbdLVHXLaMiw7bWHxwOD9g6gbV1FgTMUYjRGbyCmXy9WQdyoaJswE/ytp0F3R6kx9dLOu2aq1xfNNR1rybVNX1hu8bXf7vD9Il8lyfvZMzQJOtdyMpeFzXUTUNmJqtpc0OorbTtf7BR8YliCR88TNOLuKWAasU1VTS+ciLs56LRSL2K86eXNF2l0Qr2L8/Zfqoxs+m/MGf/C5GWx7eucXBtEIqiRIJQd4rSbLiRstcWCV4Rgeni3NeXpzi8NTVlBAdzvVUMu+hUvK4doHrW9zQ0jQzHj18yFsPHpJSLPPSzv0XOZxK2Qz8w3VwQJQC+/Zelop2SsWsefu8klWydolan357GFmahqQ6PuD43UfYjz4kaUkVKxaLSBOKjNJrolYIaxAllFBoTWOPGdNIDB5izCCcVkhraJoZE6HBC6KqkLYmqQkRi0FkmewwIHAQF1ycd6yunubgNJmVJEzhtHuNHCQp+hzIioYg0CIRuoDowV0NdOdLfDtgVETgseS9m0q5oB18QFcGowekaBGiRaopStlMuEgJVAYXYyoJqz5gpMFoA8EjAiihCC5gpaJpJM73SD3HjwqXKmQ1YxhavO+yB7AvnsVIRLTIAJD3EDFIosjrgqz8S0AOchBrtQ4QksjjQ0ooEUnCEZIAURELOQaR2ZIxBJIv839pB7EoMSiECyE1iohIMQN56yOWkUflzmWk2kRIpJjbWhIJkiYRchpviYIshlVl2NnaMH3Dt2znJ6ylprnRZpgnZYIH5C6w9skjbQkfong7E68z1m6CXLtqrBS2pJDyjI1tUSYQsbEiEInseR8im0VU6Xp5zEub981IUn4fv6Y+b4ao/IvaATF3r5+0DZlcvyKmsuaMsTBms4PkhqRTgtME2WM/rc9RgMEMkIntcC+23nii7J/zawoAnNZWVAXki9vr2xYEdsahJJEiYyYUC6W4BtVKGM6uIjDGCH4sILNESJMJNSGCzn1t3WYy/lP6QHn+dx3fCdBpmUgyMcpIihl9j+SUTuc9UhvqxrA3raisxnnPqu/oB0eIjonaozE5UCC6keCHzMpBEVM2nRxDpHMe3Y8IWxGFoHeR6D2+XzCRicZOsFrjhaHzDp2yJ4p3Hh89Xe/o/cioIUpDCNnwfHRndH7MyX5SQRJYW2O0omlq1sEwqqChSWtklZN6ooioSm0ojZD3CSkVs8mUE40gokXI7JU0bo3bpcg06CQ3PSaJWOTAOSCDmJltMeV+GMsglSflQJYROJKIJAKiGIRn6bAuJu4gtj0LocBIhTWCMY5UUiFMAbaIJUm3pDOWSpgwpoBOuSqhjeb/Zu9NfiRJ0zO/37fZ4lvskWutXdVV3eyVPeRwRiOOIEgXHfQ/6iRAgC4CBB1EERgOQc2QTfZe3ayqrsyq3DP28M3Mvk2H18wjIiureoCeFgcYGuCZGZ4e7uZm3/q8z4JW+OzJSZOUoPJDww45kSIs1725v8q4WKCtvRp4BuR88IlQupfkObSTFBofWrquY9U0eO/xoRM/v74qGWPqG7wCrfs6pty01N+7SCanIYTD9BvZLEXeLExPhvuMwiYBXFSWqo7Osa+VyD0SqWX/bxlPrnVgfQUY9gw3ISTJRlAGGMlQcNpSAOOdLSb1CGtlAs45ihytsKwWK9rg8VHCBbTtE4dzgpBxWUPyJKNxo5pyVJGNZtU0rNpWvKAwVMWIrdkW07KgtIoY1hQFXEZPo2DdX9/oZUtrzDnWWG4dHDIajRjVNWVZUzqH9y2h85A7SlMRfaJRpSTtFpqx9lJxVA2hWdE1kbhcsW4UrY902ZNth7OZyWRE0wZW65YcRGpNjP1ErWnbluVyTgityFq7Bh88KcliSCOM2K6VjaLSuV9oRGL0rL2mSy3We6xp5C4mQ4zymuPTY8pRxbJZs5yf8nd/9/c8f37E9mzMrcMdjl48xXdLxrWhqhwqWwyaZh3xvfdaSJ4QvPRLlXtQQJh11mYK56idZTyumU0m5BRYXrYs2xalx7RtpL08osyeKidGoxGTsqRQBd1KYfWM6ayiaQO2KKknEwF4/YK//n/+gsKKCbL3nYCXqcOaCaPRSBa/5I3MFA1GG5zWuGG+VYpgIFktHnv9pB2CZ2wMJmWpLPYkAakMV5Rlwpgg7Nq2IzQSed+0DW3oyBoJMUgBnTIm930uSciOz5EUZIEs42wGAiSwqqBUCpMVUqW0WOsobAnBs2pXLNZLxtUhUVm8yqyjp+0iLYmgEllFutQQogZXop2ElBR1wcgYxkATgnigKMVqvcKVFd/53ofsrxIPj5Yszi6I0eBMQa0dlTI4ZTG6IOuC82VDWWqm05qt2Zi6cFw0K8ZGcW9vj5GtODtfcT5vKEaWXEJ7doFZrgjrJVs7E4LKmAzWKCZ1xd3btxjdOuAsdLz//je4cz/x+IuXPH1yTLYFdjIi5ozPuU8QN5KMqGO/WRGQTvdFBKku98btqg9ZQRGyJgZF7oAYpbCC+Dou1ytWywWqHlGkLPchKGKCToufojHQNR2X8zkhRGazGTFFmq7BtZqycKxXc2HtAuM3sQAAIABJREFU2ZIcPMok6Q/1BOtKjNOUY8e7H77Psk28PD9jq64gBsJqwcG9e7x4OMc3l7xxsMN8BQ+eL4mrTL2rqcZjkcoZIwubJMUT2WCkjfdhP91cHa/u5/8rxOj+U77yUAgSfC5vfItVL19UiBm5D4rReMy3vvdtfvWLBb/4+V/w7MmvhCOtVuR6Sgod7WWLzo7depv/7nt/xgdvzTD2iGfLU95454f86Z99n5FZ0p0+443DHbrLNWdnmbMTxcTe4eBwi/034I1dgxnP+PTxEb/8xy/I48js4G3MOtBerli9PMefnHGyfcaTB5/z4vQlZ5eWh5+vmI7u887bb1Npjc0tB1XN3u17lD/6EflwzW8freDTyLqJTO/t8Y2de5T2Fr/+6AuWy4Lvfu8tJrM7mNGnfPTgL+nmK5YxcXrc8MkvPmFmjhjXlmhWXOSOWtfUe3t88o+/oHCXHGzXrNtE5Upuf+dbxJGha45Q1TlUERVrCE5sIJwCWlJYk33A6ApGDlnVeCmEK42taorCQDYo30IK5HIslh05MZ1uiwIjLKnqBrNaEcIEVd2jGkG5/T4ZR/Yakw2+e8lq7iknjmp7D7tzCOYu2Sva9hKcphiPxC5g2WDrMThDu15w9OwJTbNi/84ttuvboAwKKybwWGFE9Q+Fxg4BW6q3/FDCpxkYCqbfIKWUaVpP13qx56gLseNAwKMvt+cByIGraLzfp8d8RR+5/jnQF2RFDZNjQrnh89UVe+s/9znkXnIeApcnl6R1ZDaaklxFmzILHzmZd5yvG1xoGY0L3n7rm7zz3W9T7t1iazLjUM6Qypj+Hsg1tZQYEjZ7YvA9s16Bgcuzc/7vv/wLXpycoTGUKpKJWGdwVqNiwmgl/mH9iWol/r0Zg1iFvrIBVddv01ddL3Xzn5kvX9vN+ubaq1VfsBlerxiCONEjw87tQ7b2dynKkpQ6puWIzmdSkvnFK1EZUMpeTGExKmEimCysrGwUql+nF85RG0dGEuij1aAldZYQZE7sAUSVvYAGSQgB2iSUcYwnpdzfHMXSSFlUhtC1+LajygU2ZyzggKBkr5JSROWA7lUEMUdC6IgqENqG4Bty8mQ8hZN+FjKEJPsdZcSuKEZPUrK3RA3+3kpIilajVEfKHSkHQoaUHdkUch0GYGvAyDPoKCQUTRB1E9KHh6KaUj0QhezLUh5AK0VUGXIiEpGYSQkoyujeokg+Q/UehKI6S+J9rqXonIMWZqYKPXgkdJYrDzBpIMM2NfWb+tSrnrKSBM4eaiLHKKoveuCpt74ZAKhBXTBgBK8GFXydvxjXmvWXCXoSUqjVVYjEq3LT6xLTzXOvvNnwcy+4IiIqC5C+sQlZIF/zW7vmWz30ruvfY+hnXPVv3a9FXw1d8F13g9UGahOmKEWYxCDRffX3lco3Uk+vM8+G514NrXg1qVWrgqt7n6+BoF899lwRGfOmxqv6IpBSVyDq5vpeu9hDcCQbNZ/52tro77LT+h0Muqu3GU4lpUyIiabtxMhXJ8aV0N+NMbiyICqNCgZXFeheWpXbhrBeS4PPss1toiRLhpjJVoAklManSPQtU6dQzmCs79nNCU+PCFtN4QpSDHgUMYh3jy0KbFmidEb5FSEEuiADkFIOYwT40rFD9X5NQwFMuZ762pt7GtvfnQFEy1EkYAOarUSGarUVdDYYYg7SiekrnH3ghIA7vYlglIk/p3DVmYZq4DX6/JcRc64BdPIZajBl60fJwefLOS2JuU5+NkYLyMCwnlGEmDBai9F67lXSWmGLAgekTsnEnSAb2cDRd4IYrzpzjBBzhw6B3BvNK0D3CPbQaHMUn4rCClutqirx+iOSsshpB5aaUgafrjQRXddJKIIVyZwymRIjFYCURaZsNEaXYG1vsD8Ms8N1cyglQQMxSoITm0+4gcTJILzxGpFXDcapQxpv6mnhSvUsyOhBJZw21M4yLR070zH7e3vUdYXKCKChwTjNyekZ5+cXnF+c08WExmEVWK2ARF0UFGWFqwsm22PK6YiQM2cXl5xdJrrlEmss49IyKS2TkaMuDQ7HeFJQdQ1nq4aLdo73ARVlEvJdyzosWV4sKMqC6WgkctOyxBXC8itcQVVkKAxlPSbmkpgtXVzjPYSQRRqnWkKr8b5l1XREnShLTelKMhalvHhOyl2QyawHM7tmTQqetRnugCwaUwxEH7DKkVOibRrxqtAIwypGUo503UVfaemZrWjIUsnPZM4Xx4ynY4KH5aLho49+xcOHD5hNR+ztTIhxxeX8oh/YBdgez6ZUlWKx7qDzIjWMkZhi77HbV4CuUaQ779FrRWEtCpEdkzLrxZycDXQNIa5QBVRlgU0R1bbSzkJEhySLOWuZlAVFUVBYw/z0mNViyapd0wRhi5aVI8XMctXQhI6opFJrAJMihVNQOHzaCHMEsAwRx80Fsx5o2eQ+5Vjah4RJWLSSzUiMIjUoSvE0FG/JfrLUAphoK8zWYcxTKDGS7ytQWg/JYfmqH2Xdy+5l8jPakJUlxkzTtHRNQ2gatM7kDpp1S6sNlApnLcFYFOKdpoxFaSsVKdXLVXSm8YHQtoQMrfc0xyecrUFlkcBreiZvDyAXVcXuQU29vcvF5Sm+W7JcrLFaMT7Y62XihnpUk3EoXRH1ii5B0zSolHAp0jYrVr5ihWZr54BKgTOK8fYhs71DDppINTtgMtnh17/8mL/5939LVAYzGXGRPSrIRhxtpWekRMwKnZXIT1LqvUyHOUKhtEGZjNKDT13Pfh5eh5jS+rajWa0ojBeW8LUFmrJa/EqRIpw1YilRFIVYD6wDFxeXVKVlPp+TYsYWERMtOjkar0Uu71t8aEkpMJ1NWBxd8PT5c8ztfZQtUEXNdO+Ay9MXtD5zuVixXjey8SDSLOe0zURSvMlXUpe+3aZ8Vb395+N1x7CWuCqYXf1jePQryutG+Kqf8ZQi9cl8xipGU8XhnSk5f5uqSPz1ueLT3zzBuC3qXYdRZ7CyUNWksWI9PcMebrPl7mEWBwRdcG//HQ7uGfLZAUe//IjffPJbvlhEXh4pppVmN+1yf7RNXXeswgUnTx/x7LfPMY3m7f1btCcv2C0dmH3OXy4pTcUHB2/g2ktu39vnm+/9iLfu/ZAPPvwjnn/xMT//y/+TRTzhOwfv83aosI1jrzvk7eJNfvHbX/GQz3j337yHKcds1VNGquP0yTHJbfHBv/4+33/4kv/3r37OaDZirSccL1uMW7B9+w7TwwPGSjGa7PH4eM5/+MlPMKbl/X/xr7hz75YwUW7d6i1I9kg6oZiQGYHPsL4Et0YVEYwEmqGmJLWFz1IAjtrSkNGqYqKmQEaXiZxkjO26Jd36cya1FL1YLVDNKeH8mHUYMZ7twfQuKiQgQFHRXaw4fXzJcmk5qA+p67cgT+WhoahqYXFng/IeUwRUYfv2Y9B6iTYlxAn4GlyJpIRYVLbkrAWcQVgaqQ+Ak7lFGP9Xa1hDzolm3bFar/Ah4ooSowsZ13u0JXEl61JXW5Fr7fjqr35W3vz/df+o3+dQ1z44Z7H0CTFwJfz9A1UBsiQkx5RYzuc8evQIlSz5rmUcHEFllrFlrRJvvnmHD956k/2DHbYPd5nu71HNtlFaiS/oesUqBEiBwmjxKM6KsrDk4FnML1nMLzk7PeHxo8d88uBzfvLLj9k7vMvtw9soY3pWo0NrRRdaqvIaW+YVNshQNPq9j695i9f+19AWUiZ0EauExTcajajqGu8D67aRwr8xJKVpEHWNR4ppuTQixkxCwLBKGG9OgSssrix7NYyEW6SiJFqFcrZX/ERICpUtKjk0VvZrWUtAghL7Iptzvz7rPb1Vh1EKnQJWBbRRoFtSWpNpgFaAUCJJQw6xxyMNBodWDqdKClXjdEEEQuxEhaNlDxxzol2v6bpAVdSgI0pF0AllJCQtAVr3YHQejPFTv81U4kWuLSH5q8TTzV42X+uPG4T15i0ans69iH0ANhjeIm/2jKmXmtKXx+nXk/TFSbTgByYaok2bQl5WV+PDFSAzMK/kswaG2zBi5F7+y9VLrp3zlbzx1X/fOOdrAJq6tt6+ATCpK4+0m+d3EyTTvY3S1/m5fd3P5ms84oaPU9CrvoYV1ZA3+hog65WfNymrKd/wpB6+k+x3Nc7JSKmvreMGQE32JDdtruQ1iazFz3y4ztelrdfByVeBuc11UFfnMvx9w/f9K67r8F3zjR9f04aH68RVCx8k2Wza31d/xu8aHr8WoAshbdJeBlAwkokx0LYtl5dz8I7UtbiiL5Fp0eEmpYidpzaWkSlQlcNqeS1J/Ix26j0672m9x6fQLyejRCIrg7aKLicu10uhUHayGU4ZCOJW1ARPl/tNoLOSZllXVM7iW0WtlZhEaiFxZ21o2o4UPWqzeI2yt9eqf/RPD9c1iw9Gz52RQRNFzB0We3WLcq/VRuYqjXR4AXGkCoAFEwS4isYQht9JsqOOiKxMI1599Oj/YFbZS/BvLBhebQBKAVpRuLIHrVy/0b4KddCDhny4sfKbm8+h36STNTF7lIqopIlBjDiVkgSUmBPJSxBAjF5SQ5NU3wtTCMV0CMUIka7rCE7SgQaAblzXlGUpIQVDJ0ehRw4fgyQRtmuaRhJbrDVYa4iFERN9FMlksnM4rUAZiV/vv1umR9P1oJ2HwUxfruM11l8PxElo3WDkKL1Iqysqs1GIL2FPwdU5URSGuqqZjEeMq5Jb2xOc1YzKgvG4ZFyP+tSXTEqBW3vbnJ6ecXQ04nK5BKUwrhC2Z4hU2TKtR8x2phTjkqhg2a3pKk0KFU3lICmcThBbcgBTlYzrkloVsDTEmPFmLRVtK1XtSKbNWdJ5V2vO1g1nSqTQzjnK0lEWmlu7MwpXoG2JMqUEEmDQusa5TGk1VdnRNZq2jUSt8TliHaAzTReIIZI37WuYeHrvgizAyLCO05retLMHFZQAXTlnUpsEjO6l5MPAm7PaDOaqL5kqLMqA94HlcolzI6q6IvpM1zScNmuOXz6hqjQhNlidaV2H0S2FrbCm6hkOGhM0xipyGEDZtAFFQgzkJrLyHUujiV1HWRaolHHWUWrPyDhStqSQmRYFd3Z3ONzdYjquabxnfn7C8ekF81WDto7zk2PqUc3+zi53bh9SFrfoUuBiPufs4pzVcsF6uaLUmp16RD2u0aU8UgysQ0dYtVw2kS5ImM9qtWI+v0RVjug9pIi1vSelGkT6UZK2kiyKQghSle3V4ePRhGp7ytHJCc8eP8Gv1uJVl3MfaKMFXB/A+JT6Slkg5YAYFQsIZ7QATRaFMcio36dLO6MZjyfUFZSlw6iIy4nkW4JvabMiajEEtkYWuzFDSAIadz6yjkLnN1VFl8WIrhqN6U4WPHzyKcdryDFTlzVWW4gJ7z1tJ6nAAceq84ynM3xnuDw/5eTsnJ3tmaRdFyW6A1sYprOSbMesusCy7WhDkCqy1ixix/FyzXSyTbaOLiqynVLWO+zsJw6riu07dxiPa1YXp5yez7loPe2qk7ErSlq2MB4jIUtCo82a0BcgNvR5pdC93ylG9a4ZVwuWGNOGMSU+pravltPLHATYc85gkoEYKArH3Xv3WLWey/kcpWAynrBeLWhWa+qqljAS5dDOoZyDTjxLjNZUhWU6HdMkS11XXJ7Aw88fs7W3hy7HxC5TTHZos+bR0xc0neFg/5DoM81qxcnxSz7//CFPHz/h1p07VKN6M44YfV3Y9M/Hl48BeHvFM2tzqI0HrNbSnmLOcl2VQimL0gUqK1JWuBK2dgra1Q572++wO3vCpFTUZc3y4oiYO/Zmt7CjktFByXoK6t4Wb735PtPqgEyFLhesll/w+Ge/5dO//gfSeMLB/bdYdMdcHr9gfX6LcLbL8fmcRs/ZGx3wr3/wLjbWfPzTn/GTxw+op1u8fe9bHNUrvvnhu2ztW/76bxOHd7Z59+23OD8+woeOe+9+yKOPfsHjhx+zDB2rZ19QXWi22ebP3vpTwmnm019+wlvjj/nhD3e5s7/Pamk5O31Gt+x49Phjnj5/BEpxuYr8/LNnPMqeW4WmvD/j++9+F8qK1bKlffGY7NZc+pa5KrF33yG2a7ouYkczlBkTU4OPFhcizcUpJ08/wugVdZU4efkF2ztTxodvwfRNkh2JSb5ylFgsGZsTxABaE6Ph7PSMx48/4vTolxxMFG/uzSjSktL5XpWR+8VkBUWGuCAszzl5fsLR8Sl3bt+idjOal5e4ymBGI5TRkMX0N+sKyqpnDsijqAtu3ZvKescYoIBkZb3VF2ZAmp3u18ND4rJSsu601m42WrEv9C8WC7z3VFXFZFTjCiuFr5hwRZ9i2LdivWm9wx+b1t6Dy5l8TdKq/7NAdK9sVJP4rZq+6PUHOQa8fCBjpMzy7IKnz59R1hO28y3IiZWH+++8xd37d3j7/h3uH+xTlcIwDlnm1WbVcXl+weX8nOQDzjmmkxEpjDA58+LinGdPn/L55w95/vQpL1484/mz5xyfX1DOdjm8pymqghh6uxLd85tSxEewPePole3r72QP/cEPBbY0slwMiul0wt7uLnVds1pfoAtHSmsCUohIKpF0xmrbp6sDKWF8xCiH1oZCK1zpcGWJc2VfsHeosiQ5DWUpAJ6LGCuMUrJF4TBZYbIRC5Qo/aNbrnFFQWUNpLAptiqVyDahU0CpBlj1AJ3Y5SgCGtm/qSy2R0obrHIQDYuLNUcvThiNJ7ThjKaV9SDaUJRjbFFhnUPbCkUgKyP7JCUKpkwkqyj7oL6Io1KU8WG4uPCV2EPuPYsH366vOvK1P+X3XiPlBEhZ/OD7l98ITqAHfozDWFk3Zetk/9D7eA+7d3m9FPjUK/PidT+x3EvxjTKvBXmGc41xkCp++Tt+1fMb7ObVz+6PDejVA09f5Q/3OnDpOpinhp+5gon0Kz8rubSyVqcfX1XvVf0a4HEDwPWfMXjo6cxVYNw1gK4oXiMh/ZrjJkgXheCkzY3vfAXq3WQsXr8eG0Dv2i1+HUj3upTdG+ezAXhvvo/qr9UV7Du0zauysQzh+Svf/4Z/4FccXx8S0QOBGyNOrYRySe8xlsREcLGKsPSCdpYFrijBOVKCTnX4rAU4SUhoQE6YnKnrmlHhqHqWhu/ljVoLI6NZr/FtS9t6VEqYfs3RpUAIkdXlBV1ORDTGGmLfcVXKJGcxocN0UaRgxpGNQ1kZaEdti9EihVUWsD3VNmWiBp1EUqSTmHPrvrKsTd/JlSLmICmnGNG99rsl07f0LqdeQi4m6fQ3lj7wwTmLjlEYNyFIc8gSny0Ym8MkqWQkdZVVdPMxGE9eA/5zIiVxL0pGJJ8KRBark/hokGVz1XumCX3XirY70ctLDUplTF+NSH3lwhhJpe2CpDeFEOV79g9rbc9MGejMuWdqRFKI+GTEzNJ3GOcoCkdZlpRlIQuAlCArjKsIPrBet5iVpmkaWt+RSfiYCU3EGrXxEej6qpKk3DpUGhgi9AOdlkqT6Tv60F6uFWEH0E6pwYBS9wOq6j2PJcVJTO8h9WnGhXNszUYc7O+ws7PDZFRRZE/qhEWSw1ooxb3XTAwd43oEsxqndthe13gvgRxdEM+5w+mU2XhEOa7IFtaho8NT6YS3mULJdVfJk2NH9KCjpdBCw5+UJW0Z6EpH7gyhE+ZZjhJHXtfVZtoShk4idp5l17Ii0s4vsKZA2XOUGYtf3bhka7tka7pHTh3WlFSVpVl7IQbEDmwk9r4BMUa8D/1A3htrcjW4CuCpNgMvXAF1GjGtl0nEk5JMBrIoKq7o1dqiEAm1CJkNWSe0HeGj7019nUixnVD7206TU0PXBjoV8DGSfUPOCwpXo63D54QPHkg4J2mSOV8N6kOrMdoKuy9B13li66XamgvoWvxyjgkte6MZ79+/y8HeFj54stEQI4WxHJ2ds1g1LC/OuDg95vLkmOXFKVtbW0x3tplOZlSjMcvlnOV8QWjWnM+XJB/wnfhCTuoSbUpihiYn2t7jMYRA13Sk4AX0CQGTslRV7eDzOEyEoWeDSlv0MdH6DmMMW7vbHBxIwMAiCYNUwDFhc+UBAcoiwZD7I3YFKcgi0g7Gql3spRVilJoRzyHlFOPxmKqs2Z86dsc1ZXKsl5G19sSuI3SZdr0itJ7WZ9bK0VhNmTIxaYJSBK3oQiA7i1aWRbNgtW4wuqAuHVatcAYKY/v2ZIg5sViveHK25Hi54u6dWxzszphsbRG7BhmmPbpn/yqVaNsV5Mz21hZliDw7OmI5X2ALw2XTsn72jJOTObNizK3dfb7dWg7djO09RzErKEeWra2C27cmtM2c4/Nz6CLGOmHw9o+UJThD5iEpPmSF+K8gdgXZKJQx4i0ypPoi7TKGhMLgXMl4NGF7toUqxxTOyBgVPSkHlHIoa8E5xoxITcb3UqX1co0xmvFoSrteYQ0URUlSllWbiF0n56hl89E2a1L0OFdysL9P6lqePn5E0gV7h7dpFqeE509ZJ0MxnnF3f49sp5wfneIXnsvzE3790UfcuXuXW3du8/a77/apw73vi/kdIN0/8f7wv5zj1WJeL9UXR3hhKGhDH/eCD4HFvGFx1hC6ROvnRDyl3ebzz075m3//Cz7+xyfMZoYf/fF9uvU+P/vVx8xDx3vvfcC977zLZy8/4999+oB24rg3WXN7cpuL8wsePX9BO3fc+uZ/y879u1RvHPLn/9aw+PXHuFQy4z6tucXi/BHGJL717fexqWb+Yskviv+ILivGezNWTcH27du4SaLaPuDwnXdR2zW/+snfsDKJP/rgh8TpFl3tSPmcdPoxZl4xX7TYuubDb/wJjz854h/+w0/oVpHbb77F0ckF//jJbxl9/pzzdkXbrFBG0WVLk8ecpXNCc8apX7LUmq3bt5iGlu9vK/bemPDg+BKzf5tLClwBOXRwEUmdpVs5TB1p28/47a/+ho9//R9p52u2x7vMpgb9zg66bfDqEacLz3i2Tz3bJmvF7naFSsJUvzyPLIMmuKrHXd9k3lziyxpTjAjVFuWdqfi9TnZQaYn3S86OH0NITHdnTHbuYp3n/PI5nU+Y1QWz1DKaTUgmkVUnfmuqRhjpvYzcFRhX39yU975NwyJqAM5UX+wkQfadVKyNRSkp5F5eXrJaranrEZNxTVFuY434uIlvsQAOV6vc/uP6v1/lheTN//3hO70Qf/KNwuMf7EhSlEldZHF2yfnFGdOioC0U1oHWBe//0YdYq1AEjpsFuotU2mCUxujEarGgXS7IXubPbr3i8viY5WLBZ7/9hMeff8GDzz7j2fOnXF5c0HYtCqgmU97bOySmxLqRtHEUQtpIsq9ofcC6fj661jCkHfzTDsDCtmLYTDGajbh37y6Ht/ZZrs4JbSchA9aJLNJpYlmhbUlOihQSqg1YLE4nrBWJsKsk5EXUQAXWllA6glXkQsKqtFFoZcQaKlkkelS29bpXGKicSSGRcytrsehR3pO9J7QNvpUi6On5E1p/CbpDG0lvjbGT9ZRz5JjIQTpd6DKnLy74DZ/y8MFjIi2rrmWxami7QFGNuH33Pu++903u3rmPNiL3FBMjUTLpDCEHQkp9GKHaWADdNOsXjcYVyHb9fl+tB6+DcNc4LxtU/atITAowSsv81FswbTSPm0/RZBU391gZ8UFXyaKVBG5IeILaAFZXwJNmWELI0xrUwIiTv00PVg7f/frfN87jOmvuGhjzlcy39NWg3+bvDHGjQPlyAuxXgTvXn98AUENfyIitVX+7zPA8Srz4UpbBNQp7zfchDZtU03RFrsg5E0KQ8+u/5nVw7kaQwivX7dXv9JUA2SvA2/V94vXvd/247geYrs0UrwJ0r37ea89hAwYPeACb6/ml31WDVDddtbEvnd3N43eFbPwOiatCaYfSfagBBuMsKQaU0ljnKAtLYSFGqdrLJlkTYqbznnWILFcNhkz2DSl4dEoURlEtl7jeo07pTMxSYbOFwZWlfFmlsNpQGIszjuQDq7ah7TqCNugUiUpvNuvr9RrftKy0YqwyBI92FlN4snboqhJvBNWb8meFMq5PuEls6HMoDLZfqBhCPzZEgGFgyrKpIwngSG+0qrUW77Xet2ojQ1LXOqIS+mnqO4X4tQ03Wu7vdcaWUleyMrnxgx9W2tBolbrZ0du2I2WLUgbnDM6KXr7rGmKMEoseAjGLNNb01c0Q5Dtd6amRc+yRfytO5UI9HRr7sGHkaiCMKUHPcNP6ZiUz50TXRQog6Z5dY4yEcRSy+QoRTH9vbGlZtw2r1YqmacT7LmdS1n3FWJOCeCYqpUWG2kuAVU49G5ENXdYYs7nu1zcwcvny0AT6c9ey2FEa26fnapWxCrLVWOUY1wX721P2tyfMpjVVYfCLFcYklDVApFnPWa9ir+sXeWxlNdXeTu+noPAx4X0keWGfWq2IRAFJSFRW0RgwOaKSmLqKn17G6OH7yUCyNRrJd06e2hradS8n72VybdcK8yhFMeYk90EkIjG+vFyg1ZqsPUktwJZUo4rFqmAyLjA6UVpLXUzAKJQRcNoUBcporI0o1sLEjUM1ROOsSCUBiqLAWN0P/IP8RdI8rZIEaG0UIXQ9OALWGQF1sizdde/NqLVjSIzLRIwDH4NUhJQjR2F9loUj5xGr5Vx87XLEOksbG5bLNW0j/ppZq43MWpvBWwWsE//HwkrIBSH00jxN13Z061bOLXY4pbm1vcXd/fu8ebDFwfaUcWFY9qa+s3GF91POLs6JvsU6R4qBxcU5zXJBzhlX1+weHnD73l1mW1tMp1vErmVxeopvG56+OOb09Jyd7S0mk5rRuKbBkrSMo3VdM6prxvWI0lp6szhiyoTU+3kYJ/JGY9H9IiNl8b2ZL5ZcLOaMd7coXNmzO4SxtakM9ACfdW4jqTfKkJXc+xB6Vp244pJSwscWyoizCmcUvh8gjLEUVgnztCwZq5Iie0JULFctV7WJAAAgAElEQVSKpb2qXqWU6WKkSwLa+5BoZfokZDCFk3TDkAkxUxQlY1dTmHMKBNw3xmJsgXEFriyh6GguAp989oCLyy3u3z6kKgxogw8ZHyOrpoFciNm2Fn8/XZTMZjMul3MW6wVLv0LlxFFzxtSMWa8ynzx8zuT2bQ7e2KUoI+3lKV1zTl1rJpMCQ0SnSPJqwzT/UvVOmQ1bPSFFrxClmi5Kb2GCK2NgSGzLCmscVWWYTmbMZjskO8ZZQwwdqWtZLxfEaBg5KEYjtNWcnLxg2XQYa1mtVoTguXv7Foaas7NTvA/YcsTF5YpONXgqYoTlckHbTGmXc+zEoV3B1vY2ZVnSYrlsAkU5JZdjzGjG4f238NFxuQhM6xLGJRerhpOjI37y47/n9q1bVFXF7bt3X7tYfWXZ8s8HSjaFku/MwDy+MroWf1gfIl0MGCsFj7aF588XfPzxJ/z0737K44cPqOuWg1sTbu2/hW9mVGN4+4M7fP87B/ybf/Uu89MVb7/7Bp8+eYkfj3n6bM06Tam3dujIvDg/Y720BJ0IasR7P/gT3pxsoYsCNS4xaU6caJ7/wy/ploE7P/ohu3e+QXz5hOXxBfgGp2o+/Ob3mY082VYUU0e1t8Xp8gkXXUu1tcfhG2/wwY9WPH92zv/2f/0fnBw95e07W7idMXlWoEczbJgzmm6zV95i/+BtPv7x33Jx/lfcfvOQs7MLsldU9g2axYLLFy84O0ro6oDJdI+kLffffYtvvPcNlI8o3xHTCuKc23s1W/fuo299g6RHaDxunFGuoF20nP3jAx5cfERQpxw9+xX54oiLL86J1rP9zbt8/A+/QLvEW2++R1SWk+PHZFsxmkxJM8eozJTVLutTzzzA4bvvc+vu+zRNQdOesHvHoAgoCrIdociEFFgtznj66AFnZy/5xtvvMTo4QOlAaI8Z2wnTYsTqIhASJFWgrKx8Yw7E3GK1ucYOuLnRu4H5DhsX/Ur30woQW4SwXtF0nQSPeU/hHFVZUdXVRvrkvSeEuEmif5U1p3hN9/6n6O85/87N1+9/iPSLlImLFfOzc5q2pdYQTCZbmBQFl6uGk7NTLuZnhNAwq0u2bIFDszXdYXVxzvnZGfP5nNPjE14eveT5s+c8f/qER4++4PzsnGa9lnFVK6x1VNWIrd1dJpMJaFi1a0llL0phzgUPOWBMJqZMzPmqSeR8NWd9mVr3/9uREZWI0RajFXZkuX33Nvfv3+fl0XPmzQXWWZRRRJPBGUxRoVwpflxtQkeNyQlnwRWa2jmsE8Zc4QpKW2JtQXIOozM9nNFv0mVOTggZAEzPvtEYNdAtFCdHJ7x8/oTTkyNSu0anRGob1s2Kte9YrOYcvzii69ZoPfj9isqIbIVJo2RJ4GPi/PSSxaLBI37QTSf+zDv7h9y7f5+33nqDO4cHTGdjlIKsREGRcyIi/FMpvEZ0T9AYlEa53xOKn/bVACCAWy9B5Or+3ywLcfPJ/hAg6+rfmwd908k3f+fGr2slku+e/SdYk6yN0IasDapfhw4m/voa4GgGtdxm7z14Zpr++14BQF8FIl2XXv4u77nr1wxe49F4441ff62uv9dXfd5wTuZa59M3xnE2Sa8MwNvAgOv92VPOhBQ3BYlXTw3YAHDmlU5+hVvkL53zq+c5PF4F8OAKELvOKrz+Xq/73tef0+bLoO51jOR3gZ5XoFx/77mGeQzPDucvbypLr+H/rtEl/5OltdeOrwXolLpWod50IIlIDrmvWCnVb8p6WVHh0MahkiwQYkzEGGQTY2TCVWkwilcb6U3O6SrBUkkggE+RiKSAGusoixLlCgpX0aXAeJoI5L7jKWIXadZrQtsJ9bQqSFrsUDvvaaMnNy0+Ky5WaxnglJhKog1KJYSuDyiJozYJstYCxigZuGBgpfWbol7GZ4cIYqXIGKwdUkl6tJlhgSODmEEKkAMDgt7PTCv5TGF46Rto7bBhUwrx8lPXe7HaeNAVhZVrk8W/LWcD1kkhUwmAQcpYpdGu19RvgL6BJafQKm6kvoN3oO4j1kdm1DfMhqaT9E9BtDVZpb7Til+BUTBQjI3KuL7SYUtHUUrCqzaarLMw8KyDzgtyrw3almgr4A1G2HShi+QMPsoEY5U4aoWYyF2QKmw/KA/JrzeSdvSXO3T/RYGeAagUViusNThjhbGndD9xJawuGZeO2aRmNqkoVMavLvHLyKgwlIXDuUIYal7CEIxRGGM5Oz2W++VKnC1wtkBZQ+Us2jlqK76FbRdpYyQlCZMwqr+GzuCl8/QLAsRDwYg8vEAxKy16UlMZTRzHXggqG/dV29C0geV6xWLd0HStDMi9X1VdloAiYAlZ42Pg8vKC84uOTEftNIcHB9y5XaKtIRLoYkeVKwHhtCInTYwCAKdED96YzX1wzvWyVvkOQ5qtLOsNTlu0QYJfUiHXzg7MucEgfrinbrMAEp1NRNwUhR2itVTwBYAsyHkEWvqx1hqSoQtq49claUK590QUvwoxHlVopxmPxxRFQfYR3zUSDZ8jZVnitCH5RGUtP/zet/nTH3yICw2L8xeo5KlLTUSTdKYNJeO6oPM1k+msl1dk1suGtm1pQ+Do5QuOz06pRiP2dnfZ295h7+A2Frg4PcG3a9YhsbqYU3YeZSthPlmLsVZk4VoLsJVF7k3saes6E1XuyRB5k8DsU6RLgdW64eLyku3VmjQkYilhOKA1QcoEIrGwBc44OmUlDUvrnuWV8TERwlVBQaqIcu1Nv6YCLWB7TqTgyV7JuG8tceKYG82KTGkvUUaCLDIKT6aLgbbrWAaF1wozqrFZUWlDWY8oqxHrVcKgJdCj9WiSVLSVsCzr6ZR9N6I1JQ8+/YQvHj1B58ytg51N3wLNctlgdMa6kvG4RFmH0oqtrSnaaVbtioCnrCui9/gEJ2dLfvzjX7Am8IP4Idv7jtid03QrXGXY2plS1RV5vZINc74+Hg1pWDc9f2T8C3TBS4I1fZhOP1elDFo7jCuoqjHKJ8pSPOVaJQFAKXtC7LiYX3B51lGZxO3DA7BTGQd7RqV4m1qcc+QYxAsugkZTlxVlMWbhNapr0SozGdeMRwVYwzIEjLVs7+3x8NkJDx4+4f4bd7DjXdxkF7tYEtuENYnRqMSEipAz7Trw6NET/uqv/pqiqvk3f/7nHNw6vIlavnpcX/v8VwvWqZsbG9WDddcQDh8SF5cN82VD55ccvbzgydNjXrw84/MHD3j65BOOn/+a0p6zM/uQab3F/pt7HPz5B5iypO2esdRPOHx7h2K0j6ornl6MCNnwzjf3OahPOP7pz0lhRD1RvPn2Pe7NDtmqF5j0OVwYLp9b1k8ekE8/52T9hAcPn/OI37J39w6l2+Lks3M+/ughl6eXEDPPXs55uD7FFiVbhwXHiwc8fPKAu48+4P57P+Bf/tv/mXW75NHjB/z0x3/L/OSER15zOfe85SKzgxnleEx7OqfxsIyO3DV8+73bvLXzJi8//ZSTi8d0C9gKiUvn8FnDKjLbHnF3e59JNsSjM1YxcLJ6ycOP/56tccE7/+J/RCnLsmsodCadL+leLpjlMeV6wfLZF0R7jlms2c9jFudzNBEOAsenL3l5/JSj3x7xzne/wQd/+h3sdIdqfAudHMpWqHHNbj1n1MFoe5diYjFmTWHGmG4XcgN6Aa7FKBkTuhgYTQ6YTu5yePC2MPjDJbaYSfiDsrhqC5X3UNoQw5pIwFiHVoaoEil7SCIrN8iYMwSGWdOvP1VmI4cdEofysFNRhOBZLC5ZrxtcUTCbTinq+mpt1kvFjFaiZsmJFBLG6s1mR91s3TA0734P9CrL7g/V9Qf2x9durn//T2HovCllmss587MLKXRqWIUO0zY0XeTi/JSzyzMuFmcs1wvOjOawrCmxPO2+4PPPP+fZs2ccHR3x7NkzTk5OOD054+L8nMI5CfwzGldW1FVFVZdMJlMm29tsbW8xGo9RWhOSxICkGAldhzEyJ1xnrPyXcgxDnylkzUMEbTU7uzvcvXubL77YZb46x0RFsAmvIskpTOEw1or/pokom7FZ5rCyMNRFgSkc2RY4W2J6EbVC9rAxSrE2I+ELAU9QWeZZElldjcEKhU4QWs/56QWPHnzB2fERfr0G39GFlnVoyCoROo9KmbKwogoC2WdGWRdoo1FGiwI1Z2IXaGODVw3GGfZvHfDt73yXP/ru9zi4fQ+jLV1KUr7J4qEt4QiarIzIQ3tIR6eEuuKxoQfSRh+a9jok6XUyztcdw29eJ5fAFUDTDzZ9kXGoBct5SnigsP5yb2uSkRTQdP18lLDFZcl0E6DL6oqYcZ1dp1TegJFX5/O6c7z+nV8DPr3meXnxAOLIFbj6rCxXvX/v+Mr7Xf+sV9lkr2PuDWSQHmX7EpB4XSL6qo9bIovl12u+93WADtiERLx6PULwN1h0rwfEvsw0lFshykhIN+6Duaac+DoGnOzV4ArAuDFryB7o6mx7kHloOa/8zrWXXnnOKUhyvwQHkYL5Rkk0gHa/x/G1AF1/STYL9IzeGI6HnMVsP9n+QhiMVZRVhXU1ZUoUXQcqURSWyWhMVThUjiK1UrBeLCW903e0PpFSZE1A65akFKYw+K5Bh4S3LbEscdpibUFlC3xoBPhzDls4VAFLpfHaknPElQWZQoy2Y2C5asUovEvMV2thD6FIUUziyUp8xxSorPsL0OuNe9aUIdNjIT3yLGlOOdPr1XuzeqByhTBVYtxQMWWTJUdSm166aQu6B4N0gtZHhtFcKYXKagPOaS20UqXpqcYyoAqLRa5JXRaywe8aog9EFyhsT93VhhRCn9popCMmiD0oobVGW0WKhqCuNNdaKaLqO74xhOCwXYczWuK2ryHfZeEgZ3TvETgw8sQDyVDXNUVZUlcV2llAZJbIrWAyHkmFv+voIhSFmNNmbXDOcX52SYiRHH3v71eglXgnhhAoC9MzyixKSTUkpcG0OPcDtlzP3K/08nDP1RXCrpRI85zV8lBapM0hMK4rdrenbG+NGVfSvkPX0vmGJhaEYDBN2/fz1DMJNRpNWY/xXcdy1RDCAmsdZVFT1iMKayhdSV2W6MrQzhOr5ZzGB2LKWFswGk1Y604A6BBpfMCnTNIGo6Fbr+jahuwbTPI96WagH2VKW+FHie3tGp8ybfA0bct63dL5Djpou8iqi+QcNjKoFBXRw9lqzfZ2EC9Bq+E0sF4v8dlThojRjtB7QJQ92DeMJyEESWm75rEwDL5KycAXY0QZMWZMKaCUsKuum5EODE+5b7o36LW9n5ZsDFbrtZgbl+UGDFRKMZvNKMu6Bxoi1gbGYzlHHwI+ekkslt4qsfQxYbSccwhBpNlVRV2VqCRpqzl4SmNRfVBEXTju3r5FWp0zP3tECC2usFhnWYUWbTLT2Uj6lZXzq6qSwjiWS0NcLYlJwjEuLy64OLvgoXrI/tYu79y/x+7ODqP6Nl3XcHp+QpfAasc6NDRtxK7XrBZzRpOKdrUidC1Wa5RRKCOTis/i+WNS6mXWiS5mIkrAui7ig4TkyNioCEmYnetWZBQm9z4UQUyP00arLzYBIWW6ELFmAFhlISTkeqk6DwUgsjDuSFFywJShcAbrNToGchSz65QiXYob78rgA50Rz9H1xSV6vaL1HV3b0bYt1tZgCozSdDGRdBKJq3VyjlnRpcTae1xV4VeB1guwnZVcK1dWjMZjula861J2jOuKqDWLtoGcmc4mmFpRVSVn7RmqUXQ+8NvPHnDZXXC+PuI733+b3d2CFDrWvsWnRBMT6y5S1iORePXXQUI8vrwISkCMmZDkAVmSrbMsVGNOoA3WlrhSEtJUHwoUoidGsVZAwWrd8Pz0BBUa8Jm92yX3793j9GLO0xfP2d3dZXd7hxg8vmmZjCZ0nYec2d7eopjt8+jlGe3JOUpLX0g54ZzGGYOxhpw1aEdRT8i6QhVjgi5IpuDw9g6jasHl00dUzlBYg1GJ5XzBL3/+CyaTCffu3WcymVCPR7xqSv7Px7XjRjNRG7wE+g2ZAm20+Ck2mvnJBb/45af87Ge/Yb1O+NazujTosIMlcfTFKS58THrPsuoCJ6s1Dx/9I+dnj9ifVNzbvc93vvXf88cHP+LXT+Z88uQ3/OTH/w5z+SmjyS6n819z9vEt/od/+QHFzoLOnoLdYeLuMjqccdoqTudf8NnJS/zjf2Br/y5/8t/8T9Q791gs12g7Zu/WXT7/3PPi6HNGo5JffvYx56snPL8448npS6gnjPfuMbWBerek8Sd8+rOO44s5j48vmC/XfOvtuzi/Zr5e00VFWY4heeaXS4pbBXfvjnnjjTfgkxU//fg3pCzr2r3dLb73zdusjz/jf/9f/ldsUfDhH3/Iu3/0Fs3ZKRfPFpidt3j54IJffvRb7sxmfHB4iDlb8MVFx69/+jG/+fSX6KJhe+z40+/8gPc/3OfsZcvJ6Rn3773BO+++w5Onx5yfriBrsqtx+2+AmoEqQUeq6Q4lDqUmEtwzsrhiBK2FLpPzktheEkqwxZjt2S57WxNSsFLgUgWyjV2QQ4exFUaVQCWAhhGbg0gnFi5K95thTR7GFvJGPXGFkCUgsvGIRYCl0ES6zuO9bNT29vdxRXFNgsRGDWGtxViDQYogQ0NWwwaToch7jWnRz/v/H3tv9iRJdp35/e7m7rFlZmXtXUtvaACNBkCCYxBBERhyTDJpxmTzqn9PLzKT6UnSg2QjyoZGmIYcggAIgiS2bqDXWnNfIsKXu+nhXI/MrCoA5GBIjjTytuysiszy8HC/fv2e73yLLKPleVM4o/9RAbpxvTgehdQD/wjzj5J1ebfuaFcrYblkSbx3ncOsB46e7XF8fkgXes5XZxyfnbK36oltx9Pn+3z22SOOj485Ozvj9FRCsbQuKoCqZnt7h2Y6oWmazVdVV9imoaprqtoSfML7oVhhSBPIaluCiRzKWCE6cAGk/KewxbEuSol+2dL2azAaawxGKebTGcEksTlRWZj0tiIoJQmnsbCMrMbVFc5VonIowKQPCV3WKRFfXI6EgKJNJqiA156gAwYla6JC/8+lmXzj9m1h8WvLh1HxfP1YlGAxgjMoY0UxoiI5aUIIpCj3oEL8xK0xYIwE9CnAKAnHcFPu3LvHu196l3e+8EWu794qyizLtKrwQc5Pzrok4kpRJMo5CXERiWdCI7XhixJA2S6DH+Mc8OsHgYA9SmpvdfG12X0s+zESXiVAmy5fFwBYDko+Q5bVZCzEhZEVl1USAs5I2igyz3wJAtsAK7y8znrV9ir55Ss/n3oZgBrBRzn+8r9XsOG0fsU5uQSmXQauxrrm8nvF4WWJ6uXvG5LDC8yu8dxa514JMm2udh7PF5u67DIzLee4SbqVz6NfwYaTuvtqAqwq9Z7ZrHtfBcBtJLa/pEFw2QPu8o9fBlflty8uyAtDW139dyNItzkXSm3w5FRYdC8ezS9nCv7y7VcDdDkzDL6cBOkUOOdwOVMlL6CPD6xiLwbftcWniHUSxa3DgCZhSGhrsU5TW0s2ihAzdjaXaHUTcLUYr7a9l8TOHFi3LZXTmKxYn69ZHZ/R2IqmmWKcxadEMoo0eEIvcjinNdZW+BiIxmAnDTkrhrbDTAxT6zA+k6oJLFdk7wGL0xXGOXJQhDigkrAlRkBtRLtl0OgCOolhL0qL4bmSYyUjDMPiGSSTgRKPvZwlzUfpAgoKDdMVs3Lx7MoyoXDV5HC8JmKeWAZCTAw+4P0g85hRnBaG0c7WFsZoKidSPJWlEzOCbwb5fVu6lENKxYtI5MwauV6qrqWgDZ6YUlkkGRSJSeMwZk7bD5K8GD05yeLNKoWzGmKRM9rxu2OxWAi4MampKkfIAqqNkdgxK8SMWIpcRxbgYPBgJGQi+CTgXdsTY8J7OaEVwnIa/cLypiGhC6h6cYNI908mlxDHm128+JS1V24sAWXBOs3E1WSvmNSO2moxcibjKkM938baXQYfQCuij6zblbDnrIEw4L1nNpth6oZ5I++TEgSf6PqA95HT01PxDNSKoESm61MmoFCuYra1Q1Yt3iexjNWObCxDVgx9j8kBZ5PksxcfKessMUSW646mmaBImKxonGXHTUEpuZb9wPJozbodUOsBfKBLIhNUSu6xnCLGVbx2/zW0hs+ePOL8/JTuSIAWrSsUZhMOMD7chQYPQycrObWZqMokOzZ9CkvHFKr0hfGsTLzqUuflQqo8BnuId2MMocxfF3TsXCbS5XJJVTXUdY1SiF9FkZVPqoqKSNu2dN5vHhDiuThI8TGIwfW8mTCfzZk1NVU1I/uBVMIvJpOaupGiqO2XVJWjzz0+ijchBrLOwuSzCrTGaEMIHgXM51Om8yl9jHQh0g495+crludL+mXL0f4+W/MFN27ssnvjOpPZgkym94m26+mGxK4zHB8fs1yd065WmAw5RXzMHHcdM6dJ0bFaLUlxgJzpY5Zk4cpRNTUpJ548ecJnn33Ker2W2VAZBh85z2ve//mHGFehtKRFO1vRDwNdiEwmDuUMKRvQlqpusASszkSjGFYdMQWMlULQ2QprI96viANEBlRytEMnYLmzOG0YZwsfBk7Oe1wFN3YW2MoW35yOWk+kSdEPxBDBZhpXY42lzwUcHruExfx63Q+s2o6u9xhl6PuBvvdijaAt3kcZN5Vj2Q4s1wJo21oKGxN6cpuwxnBjZ852rjl6fEpqBfA9ODjgB395xtn6GbdvL7g2b6RJ5CoGDD2lkFaGyaRCdytW52e0AWazLRazCVrbspDWIgV3DlOCI3LxHkwKMJo+BtbDgLY1M+ewxpExGFcJaNq2LNcrktHi8doGPn30lKXXmMkZi+1r7O7s0rYdOYtM3HvPcrliMpkBirbr6NIJfhjEN7br5D5RFx3Y2jmy0mztbNPbKZHE8+Mz2iHx7OCYupqxtbWFP3S068hi2nC4lHsvhsgHH/ycn/zkx7z5ubdpplORAP+Dslj+X77lq3/c1AAbWY9iNnVYZxmGCNnQdp7TszWhs2zbL3Fn9+vQfUq/9yFPznsef/hjDof3OWfC2+99iZ0bX+TTRz+gmUKz7cg858lHf8G//5M/oj39gDduZ3L4kO7ZM8zqPr36iJ/qY7Y/f523/ot/jt2dQdLcnN/irWc7TKeK7fvv8PFJz8HpEV9797f5xr/+16i4zZ3Xtvj2v/tf+e7jH+DPBhZpm+W65bDPnOZTYhOJlRTXwQBpzVs3p3zt6/8Vh63mR9/5Nv/b//G/sxoUXW/haMVuVfPs+cD3/uYpizs3+crXvkntdtnNT9n+sOVn7z9iMTV88ctv8fnXbvLhn/2c5x89otqdcOPJnHfeus3E1Ryvztjbe85fffBjvvOdv+b+tZtU732eu5Xhp3/7C37x6RHrtuWt1+7xz/7wG3z1W78PrSEcdGR/gJ2ccdp6tvdb7n3+i8wfvoZyc6jvgq4gD5ATylxDYKrC3EkO5dcQ98nDKedPPuK0O+Pam68ze+0G0KCUk9CH6CBDyuKNpe0EVAOqgliaklqjlIUcrzSOZQyJzYkuRbUaR9bolXnFElIA+uX5GucqJtMpVWFCq7EjWvYzNupCCFRlza01FDPkchDje3HxfiCgkBqTp1P5USYrYRS9cBu8tP3dWF9lD6XwghKO9I+BQinN4FsePX7M3v4ezBv6dQs+cLK3x9njPZZHhxyeHHB0fsTJ6TGrgwOGpwecHR/z9OiYPkitJhZEmrppNizq6XQuoXrNRIgO1gjjvnJUkxpnNUPXYkxFUzeSmK6UBCBkaViO1y7GBK4ocfII2v5Tzc8jUJTpfcvENOjGcni4z8H+HilFJs2EaAJaJ4x2TK3UfClrVEyobIhosnWg4+a5qqEwsaQOSUnT9j1LvwZbglV0wqcObCSZgaAGtEpURvQcZE2KCVPV5HZg9+Ydrl2/xe1b9/nh977PJ7/4kHV3Tu9XJB9JMW28GY2pMMpIo2wYmEwnKKNYd2t8LEnIVlHXFQ/efovPffGLfOHzn2exc03WB1EUYzlrohewNoaEMQ6s+NfayqL0SnCHnMgxkGIkx3iJiTWe6QuQ5iVQI8s9ppXUq6OabHydAvJnioyxBLDYJN7wWktllQoIn6I0dY2TdVuQyGthVTkrAGUK6JjIWmq/C4BFbYCUy8e9OWQlByz1SaktCvnlAiS7es9fDRm4INBc7Oti31eBq5fnnk1tdAXAKXPYJbbceCzGGLz3m7rElzqFci5jjJh0sd/xdXhZWvsiOJpzLiqMi2t2+esl8Cnll/Y1Hv/IsL0Moo02U5fP3+XjlDr8Kph1GcAb92XtVQjr10lIX2QZvugBd/k8qLJ2TWRS9FeOLV/uQmRQWRSR2WjEA93I+rvUuuM1evH4fyOATjpkqdDZExBQWbzTjDIFMQ2Y8gHCkFmnJUm3GA0zazApgXMMOVCphG4auSeTeG3FmFHaUNkanSAxAI6YPTvb2zgLNmZi71EhYgpDLeYkKRUleEL8xGTySEPCp4B3GZuMMAYqJ/4apqaysLCWk2VbknqALFRoizBwMsIkGT1wR+M/uQ3lz1qA/7L4FYlY1kaKJsCri4j5VDzdUpZJXqkyqbww1DcofkyMCZWbm/byTULCWVPQd/H8G+XC0QuSbbURSjQNKhWfgRA2nRCtNdobrB3A2IL8CiCitWJY96iYUVGARqeLqeaYkook5I6U0xwFCJFzo6mMljhuH0g5biYFY4TlV9dO2DRZkmGtEomrGKmOfghyfowyQrUuSPhgDcN8gmmNMPeSJ6dIjJqIAqMKMCOpu0obtC4A0SiCGMMJSmiE5urkMn4uYfyJ/5xVAm4En7l5bZvppGJWW6zODN1aHhipItcTtBb/PEXG5YxJCW2kYxxDYN17FltbKG1ZLdfEmKirGu8j+8fHOCPjRzuNrRy2mTBzFVumAe1Yrj3NyYrlqiWGQFSGLkHlAyoFGpsFLGMAACAASURBVBXROZJ1wtiENZbZpAYFdWU5OT3FFy8LFStUCmQrXjaVymwv5sxnmms50WdY9oHjsxVHx2ecr1fUzlBVDdPZBGuhbqwEcHSRkFN5COtLZqFXu1ObyX4zSV3tgpgCtAY1PuiuRni/3Im4MHgFSflMKWyAPVXSlBOJlOD4+ARjHMYUQFZpbGWYTERyPGmmVHVNPQzEkGmHQdI0vWdIGayw+fpVy9nZCZWx4lGYKUl2Wfy0RnWZVqBF+p9VksTQCCGKIW8amQlGkhRj1wOZbAzaKCrjMJXD2ZppM6FbriFk2rbl5OQUtGYrbVFPG7R1uKpiojPWGk5PjqTh0vXUdY1V4HICr8EqgtakAZJgykQczbwmRY1PiSdPnxFi5OBon773WOswShFzZuhazpZL0AL6GGsgJrabBlxNyIGEJSrD4DOVA20dseskvTVLjtpIuBOfuoRRFqMsKsn5VIAyIpc3WlMZS9VYpkZTq8ykqamtlePPkUld0zQTJnWNVZrzemAo/pSVc/TOorWMQR89Q5AERFfVTOYLhraDviNlLWEiPqCVYQiBJ0+foHXNYrHLZDrDx0jbrkk500wmTMOEyURRaZgupuy+uU1oYe/gmJP2jOXeMZ/tfcB8bnnn9Qc8vP86s/kt3HQbXWu6mOiHgfWy5/TkiHa9Jpma5AND19OFxMRalLYEJQVSyomMlm6zNYQY8SWdM6OJWYr6rMXrMyXxsslKgOGQEt0QGUJiUle0nefxpz9jMtti59ouxhjatsWASPKdwxRQL4aAqTLz+ZSd7S2O9qXxYoxh9KTRWliIkuDteLb/nOd7h5yve5S2UiRaw2xaobuKZB0xHDP0MmbXyyWffvwJx4eHXL++izb2PypL5v9rWwIhNRVS/6X1pPw8C4ujrhS3bs35V//dN/nWH3ydg4OW549XzPOMRd7m/R/8kB/9cMX+kw857o4YJltMX7uJbW5w/d59rt++hfPP+dPvfIfDx5/xs5/8jJPnT5kuMgena05OD/jqg1t8/eFDmrOepycHHJglW/c+4e5kDqold0cstmac7J8xnLcstm7QLK7hpgve+tJ9cpqhbc/1B/fZvn2TDz94H2UmaKZYHIvK0Z0c8tcf/zmdb7lzq8KsEsPJmrT23L31Nv7zv8OQHZ9+doiOmne+0HB/EvnZx7f57gcf8X/+u1/Q3HnIV79yH33b4yeWatawWh/z1z/4c9Y/m7O1WrLdbGOcYcs1HD/e4/HHz9A7c96+9wZnh5GPKkN/eMDeJzWrtObg6AzbNNiouHV9i8+9eZdatzCfU914nbxsyMcfcn3nJjtv3KG69hrKNTCZkkxTVkHCctFIOmFC0rh19qDWwCkHH/+Yp5884cGXf4vp9kNUZ8m1gPEUFYawzTUxirLE6AZwF6aeRSuhVQWbeLGR9V8K0BGTo9gSKNl/DIGh6wjRy3rFVMwWBqsF9JGG5wuoHwXsgKtr3c2WL4C6oZd/bq2wFUKU4IMMyjpsPZEgCiMN8c1A/w+cJHIedSnjfsqCvxTHl6VW/yBbBmIUuWLvqesGXdWEruf5p484PTri+NMnnOzv8fT5U/YP9zg7PyGcn6NOV/ghEKxBWU1T17iqwtUVTTOhbgSgm0xmWOc2LJfRi9rWNVXtMEoa2MWhW9RCSH0gz4wo7O2YibGArr+efPSPsylQKZHCALqmX604Pj7i/PyUkCKuqaldxZA8QxKwMQep06osvnWrPKCdRjuLc9JozmSGoSfHnrP1GSlBGwOnsadyC0LoENsU8AwEHYjaC7MLBypjlCPGjB8GTFUJuOUTt197yO9+c4u7r73OJ5/8gkePPqYrgTVx6Evytnit6UozqxuOz45RSjHfnuNSZt2tmc22efOdd3j3y7/FjTv3uLa7S1YQUkBlTc6G7IESvpBLAIMEQprS+BpZR0kCq0hl/njxNI8NTg1JpH6btNACXOXy3wXz6xKYooR1mYsaTI37UuJLGVW+hM8LcERO6I1fvCStah1JRtiByURUNuSgS40u634BXMY6/lVMpqtzj9QN+oXfudhGgOhyTSPfR+BHX3n9RYAolBCGy+955dyqC3DuMuvt8hew+fkYoLUB5HLeSKIvM81eJc+9/JVSkvOmx+CDV9/Um8915ZgvA30vv3YFAPt7NDp+GUPxVduLbLu/TzNlc6zq6vG/JMNNIi2/9ArjmRglwtqIeuiXfZbfKCTCacUwxpfmTE6RMSDBaMX2Ys71a3Map4ixRxtDTIkuCDDQGIPRAVQmDgPr83O6dk0uAyhjZWLPoEIUrl1UKO2wSCHVOMukcVAFYu9JQcAnlRWqErllyDJwQwykMCaFBvb3j1CVxVS1yBy1IateEnoKohnHFNUoyyCDRitHVF5SLSnFdkFJR70xgrEX5F9Q5oBGGXBGJjiTIyKthGwUORsIxTA0JYyxGyZQKhOYTIuX6Zwj3V9vbjBjFcZcArsKgFQuPaRIGDxN7Zg2EyZNjdGa7D1eRaIvaT1ao4MiFjPxqHKhORuM1tSmJmc510SZLEo8BmQBfMSkPJFTIMZAGCSxyxorSZvjuM1Kzp1GfNyM+BnJTRCBXIz4nYyN6AkxbDwCRj25Kai5IrOYzrDKkoKk+sbsJeGoXJcYSnBCkfyOnQ0JMinLzwL4iS/B1RtJl1WJNcIcnVQOzRg3LmBk5SxbixlNZUihJ/kBo+T6ni/PUNaJ7wQZW1VUjaPRmtp7hiEwRIhhoIuRvh04OV9vmFxb21tYqzFOYyqLcYasDDErQjAoo9BVxPlMppdrZ6yAJErTWIulJoaA7wc5dpvl+kdPZTU6ZWF75kAM0qGKMRN9xqop1gjYqpUiG/CpYRgiKWa0Skyqhvl0gqlgPmmoK0vbRXLQhJRR6rJHwgiQXkS1Ky1j4+pWWm+wYYtePDx4CegDfeX18T2DD6hxbFmF0hmtM0RFTBJaMfSBRCjSZ4WrbZF4BqblIWu0w9YKtCGljDUepSQYIMVIGobC+vVYqwXI1lpYXjmxt7/Px599ghmWhBhwtcU1laSwBklRlk6fRlsrKdi5LMNyLl0smYNQirqyGBpUSKiURT6oLWHwtOuWSEK7WmTK2hEGz8HzPfq+Z/C9yMBJdF1HHHpWCpzODO2K6CXYIhtLanZoo+K869g7PGC9XtF2awwyD9V1jcpZxnHfE7MsWKyt5B7wgRg8s6lhezFDuSmRTO8hWgHQQwnYGH2LMiLpTBF0Y8TnLgMRWUSWYk6jIEa0UtRNzcxZFtMJs6bCWYW1itD2EAKNnrLY3iYrx9kAPRI8Mn6JfEvLTKwMdW1ZzKE7XRJ8wNpK/JlipqlrmnrCctWSYsf2tZtMZwvWQ8DkjFEQhpaYEwbLtGq4ttimijXt+cB0VnMedvj04FPaZeb4ZMVP+085PfFM56c8PVyzDpraVbjKUVeZ+WTK1sJzvvacnZyS64Bd7KC2tITqFC7hmLaecyYmSbRORov3qKtIHlKQYB20Ld50jmAUxjlcPcFVA3lQTGc7TCZzTk9+zqod2Lm2K8x1Y0UaZwzb2xL6IGzDjhQDphJgDpVYtWsG3zObKlC2tL8zjdPUtWIymTKZzlHWYqsGjMX7gcVizrXa8NnempvXb7LYzpjK8YUvfpGvf/13uXHjVmEQ/qrVy3/mmwKl82bNOK4wlLqIiaCseFCKyUQToqVuLPfub9G+HXBRcc067t18Da3f4Pt/uc9ET7j25n3srR2G6lOeHBzwhTfe4+7iLd7/q3/Lhz//EcuDj0nnJ+ydJdJUc/3GDK59jqfxBt/7ySc8X55yf+748INf8NMf/S2nfkk9cTxotrD1Ao/m2q3bvP/RPn/27/9HXn/7Ie995R3efOOLvHnvS7x7/2s8/puPmDnF7Z1ttlfXmO0HPvmT7/DzD57Q+8hsWnPy7AlvPXxAdrdgfof6RuIdf4sHW5HV/gmv7RywPf2MVt3krz494cc/fMxi9hOeH7YcD/u01ZJ+WLNwC8LRPofHJ2zNd3jn4ee4fnfB6w/vMaSe1Fo+evSI55/9LxwdnRAPDrFU/PRnS5pKc+fO63z14edxqceEI85+8X0m7XVUvkFdv8Hp6TGn3TNuffU2zb03yGpGSoYQ1nTrR0ybHWy+gaKCrEg6EY0HTolmyXB+wuO//RnDaeTeu99i+8E76MlM5rRcQh5S6RqhJFUyLsTgSE8g11fHTpI1oC4pqjCugS9kPWUQsWGkZEMeMkOniclgTI2paqzJZfyVQXnp2T4Cfygp0qXYUWxc5HKGGAjdmtPDQ44O9wl+wFor4UNeVCxZaSazBdeu32Sxcx1d1cL0AQHzfoMtl1M2fi/d+mI58A+MRJXz4oeBPgXqSUMfPGd7+3z28Sc8f/KUo8dPOT065PBgn7PzY2IYmKCYYrDGYpsG3dTUjQRFNZMJdZGuGmPRxggRQ8lzUFsjHtauorJmY4mjFFCAuDw297WRdX+po1L6u8ka/7E2hdQLk6Yi+cDJ0SEHhwes2pasoGpqQuoFZU6F1ZS1NAZ1TTaZmCPRBPHpjaEovDpW3uP7SGoDKSv6lFgpz2KWGYZOQC6TCHkAAsI+ScToyVEUVBlV1kyO6BN99EznC16/fpP59jbNYs58e4enTz7j2ZPHtD6LzDVrsfeIgc5HTCUKkD5G0Jabr73Gu+99mXe//BWu375PVc1AiQ97TgbtHAona94sii5ZWxeArtQfQpRgU3cJ0JPQOaHSJaBJjXj7Bcssl/9nSWSUdd0l9tyLV2pTh6lLTCul8DGSlcjJLwMdKcq+S4zK1b1tAJYrCRAbWO7lIToSkEq9XX5Fay3BZi+APJebCJeZaK+Su7oR6xiZVOX9RuAvpMhlph2je155wacLi6zRDmhsDoxe3i8CeCPopbUmRbk/x2O1ZgRf5T1CSYkdT+IVoC7lzZryxXP7y5hqLzLQXpWyeuXMb/79Cwy9XwGo/f2Yyy86kr74/eL9R1xnnOjzCOFeZr0VIg+Xz7OSxnOpSjc2EDEn7Dj21MvsyL/L9iufXlJkKoJRxCD4c45CL086YbXGWs2kdOmsEYmQ854cE7PaUaliEB4Gur5nWAfQGmsd3if6mOj7AR8SooPXUhA7jc4VtZlimgnGWEnpS4nK1hhnwWh8jvgghrg5JDARKnAqs24TXfIilXMOXSYp74WbJVNkguJnpJQRNiBZADwMUSeM0mXyUMXHS6FUJsS46SyBdAF8uZ4G+T1TLlzWwjobabMbinxZ5FgM2ZR/h4BRvhRao2+WRhVJqqQk5iASUGesLIiMgJopBHzfsZjPmRdPCU0m+4HgLcEPpBTou066QxiGGBh8TzcM5fgVla7Qudhp6ox2FZW1KGvQ1tD7IMefAikMhL6l7waUMuLRkALOiheXUgpl9ZXJRZUxppQqXkmqeNxldFaYpDbpUGPfwyjE586KfBBgGCq89/StUMFTzqiohR2iIRXthR0XG+WBkMrEt9HOq9EGFUkpSm5zIzojgESllXRvkmfoB0JtSWmCsw22NuRQQZbxtZhXoKykPvZr2ral9x7jJKxAWce6bemHgWHwrJYdfduxs3OdO3duY6ry4DOQdJYHlFL4CD4EQhbQSBlLypJyuu7E30znjum1KcY4XK2om0AOkeQHei9+XDFKJ85pBdaijBUjfAep1tR6ho+ZlW/phhY/BJFuZgkYqaqKyjnqpsI5aBrLpHK0FgHQkeQkrUdvuREQlYeg3chdxofc+F1Jkmhher40L5Uk3ovJv0yo+cIzUBUAh9J9MzbjnAYtyYV9Fxg8JCUhFlknEol+CAzhDKUy9XmFtY6qkgVuBKxxmIl0nFMIJGPAaHKKMs4Z048Tw5Bo10u+d3bIsD7i9k7N9swymRiqFFBaihKtDVpprHHUrqKpJigFpmpIKeJjoPOBbvAMIZJTQJFpnKWyjtlkxnTaYJ0j5ki3WtGGJccnS2w1xTnLMBQQJUUB83MW9towYLWEjnSt/N1pgzKQck+fBaA1WgkIHsSTT6viq6kECJfEZhmXMcgCpOs6Oj9hSDWzpmahBAxgoonZlJChAiSMrdmUSg9IbZoTOZRmSiqLO6XlHMcogJ3WklJt5HnV1IZGWfzQg+9IQ4WpHbWz2Bhpg2eIPZ3vsVmTsgUlqZayqFMYZQSsTlDZmrqaQFKkmASsn0zwXh7mXdex7j1uOkFpGLx4OVoVGBYBN7U4FLky3L3/Bs21OfVPHT/+oGO1OufsrCWHQ+pJpE0W1SzE/zMFjLbsXtsm1Q3PD8/Ye37I8myJ6gKTqmJHb0NtStNBkZJcI+8DMUeGKEbxozGBUKmMsHK0gP2rtqPtB9x2xWSxg0oOV8/Z2r7G9tYO861t7t65izEWayzRDxx5X/yJatp+AMB7zxBW9ENHyonD42P29va4W29TNTORxGvEXy7AzRtbnN+5wS8mU3wlXifDMLA7n7Gwc56fPGY+mzEzFdPZjD/8gz/g97/5TWbbi1JMXbr9///tha2g2sUTTAzKNxnrF89SMstVy/m6Y2/viBQ1b775kO1tw3B6zGp9ymS758a9GbtPtrn9+l2+8nu/jbs+42ePP+YHP/oJ3/uLI1gnzp/9lLA8RfkeGyJVNaXXMw5PAt/+01/wfGegXifOgmce4PF+y+OPP+GgX7N15zpnU88dW/Pgi1/GvvUee+k5n372Y/74j/6IP//zf8Mffuu/5Z/99h/yxXe+xPe//X+z/8ljVp/sgz/iseqolwE9OG4trnN+sOTph09pqgVrn9mebXPvwTZ3txL9rqefPyX23yOkgZ2buyy2b3L+k2f82z/+AT/86G8ZpqesekueLggxoSvYvXub3TsP+NK7X+D+vV2GYcXHjz7B223O+lP2H+3hbCC5wMon1ieeRb3gwcM5b7/9gHt3t3n68+9zdvSI3D1lfWxZnv0Nz06XuBsL0tZ9bjen2BtTDlcnnJx9ymI20Oha5qdcuvpWVABJ9fTtPk8efcj5as2DN7/CtTe/QrYzhqTQFgwJRQ/KAw6MJKZqai51T8sjNIEKkmCpqiKiLUVbLPOz0uMUKb1gL9YbIjDRTJotaYTZsUiTol62l4ukXIbqBaOC4n1Fka1C9p6ha+nblq5dwwg2a01dN6CtNHm4VLxpcwEe/gabfvHfp0wKka7vMcPAZPqb7f9Xb/LM3T844PHzZzw/OODR3nOeHx3w9Pkep8dHpMHjB2kMO2DRTNlqGmZVjXYVg7WYRpLcm6bBVVVhzIkdxeWCUWuNLvYxdV1R10Ykx5RCWikwYs2zyQMp8uKUXyjW/4nn5ItSW+a8oWt5+vgR+/t7+BQwtUXrQAxSAlujISRSUKSoRJFTGF9t6FmHjqFviV3PMAy0MZB9xkZFygoPdDaJ2iIFqUNSKEF+4jnNyEyKGa0iORkBaWIUabmzDEmYotVkwsO33uLmzZvs7t5gNl2w9/wJ52fHtKtzMhlnLe3gaaZzIhGfIzev3+Qrv/PbvPeVr7B74w590AxJkWMmZYW1sqYVKXjaJFJuJJqXQDbZxvs3bc7r5W3E2xSXgfhLTKwYZWmnL1hcyigJ9FMKp8X7N2VhJupLgQIbwGY8HH0B/o2gSSrrxlxIOiJjE9ZYljcEZUDFsdov36XGfrG82IBDZV2a08VrrwJXfhkD6rKUc8NIKwDa5vMh5+cKcy1f+jsQ01Up5ri/F1lwvwy0CllSWYkJZU1RNABJFDw6C0tO59IkQc6zzoqsFeFXAGybY8gjQPsyQPd3kXLm8hB4FYN6fCZcPm+X9/X3YeC94p1R6tXMvBFsE/KE4DhQHpOqNKpSunKXvPj1qu3FY/11LOxfLXFFGEiVNQQ0wScSgRwjMXrWqyXrWmFjhdKJVFXi+5ATSovhtnEOYw2qN4QhEhTUVU0zmWBDwIQgD4q2JwQx17Q6ScHYdeQcCb3HotElSELriEqJZj4hZVWkpRrlFK6pqKzDVIZZmnO2WjKEiKsbtLGsO89q1RGi+AF4Mip7VBpQZoyqTyJBU5psStDBeIIZ+WzlwigjcqIMKUsyY8xgYqYiopFkPaWkX221oTQNZfIqO5W1kgAu2iS00nQ+FVNWvxkUVwZ/CVtw1uGc2dBtQ454pOuhtcbqkr7jDI1t0NOGFANpPqeeOIyt6L3nZHnOumuJWViA7fGKHKOwhEjkmISRobVISMcJzyd0CiKDNUUqqBXB+xKsoS+SOxHwK4YBlR3GOKwzpJwkQTSPUyg0VSX28bGApsjDUCkISfZjjaapHb53EAI+h1LrpzL4C7syCAA7UviNMfh41UATciliiollMX/NRm8WgLp4hJmsqI2AO6en54Suo6401imMyig0VTWTz2fEq3DZCohms0S1932PD4m27ThbnhOD4truNW7evM1sa8FydUYfOrqhlXtBJbRxZGUJHlIyxCDgeYgBhbCRYhJG4dFpy7QyNHVFbeQ9rXYoZbGuoluuiaVzkPXY0RPgOYeIqhJOKaa1IWuLcoasLOs2b2SrWmvpwmmFc4aqqpk0BqOVALgjQLdhfl4AdcaOErWxo6PZ9JkyWJOLLHmjv9lM+lpr8WlTipGCvpm3tAAsTkt3WOkIOmOMdDbW7QC5o21bQoykpEu6liITSrfKE0LEmshQJwYvwFRKo9FpxlkJK7HOSiFSUlwJpduldWHwBeq6YbG1YDbROCPdWGsd0mfQ6Fy6lVmkQVoprIvkFLHZoCuHdhbTe4JSoDMRzaSqWcxmzObCbloPa/rzNd2qpe87fISmkXAMkgT85CzzSsqKpDXJGlnH1BmlbJEXGLLSDMOA1obFbMqseMv4YRBQPaWL0ktL6nNOF/48yhiCcSx95NHzQ4b9Y+7Opty7fQNrJpI27Cxm8KWQk+6wLKx0WRaWTlYy0qXSBoP4fVbW4NAYsoDiaYAwYHONqx13dnfp+4HUDyzDEeedZxU0vWoIKdANPTYq2kHTDwPeB0JI5GjJSWGUlTRV0QISY2LdepH4GkkMzsDp+RlPDw5x0ymz7QWRhKlkTj08PGfOlOvzbZppxc1b21y/f5fD7phPn35G8JG+72hXkRA6cj1B5UiOA6FP0ExZzOdMr9/i+s3IjWuHnO2fcbp3wNH+AahEc23BxgIhXSzuslYM0dMOPQHQ1QSjaprpHOsaukjx3RPzausapvMZKtXMt3aomymTyYTZbEZT17RtR9/1WKXIUVimfpBGhXNiFL7qBqL31FWNtY7Ts3Pykydcv+3Ymc3RBVytNGA0lTMl6Kmirmti6GkahSOyvbXNo8N91uuW1998k4cPX2c6mQiO8JsRY/4z2ATBzEI9BbgCzmXABzE1//kHP+P7P/gh88U2X/rSlxn6NaFP9KeH5NUZ16/d4pv/ze/x3jfeY7I1ZbG7w9mq59HHh3SPe1x9wvXrsNp7zrOnz1momhvXbuPrGX095fzomLO953T2iOu3Dct+SVvDJyeOo+Vb1NMZTz5+xEd7P+Jr797j5j+/S5y+Tv2g4fdmW6Q/9nz3+9/n9OzfcHi6z6zaws0WBDWnj4HaGfq0pM89MU846wbO+xWpDgz5lPX+Z/Sf3mQyvwuzGbaZMbtTk2MmD1s8+d7fcLQ+x12bcXS65vzxCrOVOGgHgjLc27nG7K3X2P2t91jP5vxUSbH/bO85Hzx9wt5Zy360vP7OO/zuN96lmju++90fwoHj9x/+Lm/e2+HOfcWnv/hL/vSv/ox+dcjnH9zl3u23mDWa3a1rzG6/jk1zlh+fssOM6bQnTgy71+5RNdeJXgKPlHFEEkMeWHcntCf71E3N7S89YOv6A7RJZDzZ1IBHqTPIS9ADUANTeSblIhdFAw5UjVDqOpTOKCaQG1S2oExhuhcbmXF9lLM0Zn0kK4srMsoy2DbNTdRYmFx6Tpc6pUxbIn/fPDs0OSdUSqgsPszT2YLJdArJS5PJWVzx0ExoXDWlmc9RznHhZauuvNffd1OXq6/NbZXJMWzM0P+htxgi52fnHBwd8dmjR7z//vs839vj9OyUrKBuaqzVzJo500pUHk0loXnaOWaVyGIr56QhWkA4PUp0ldoIGGTN4ajrmsm0oakstVOiDkipKF8cQWUiAuQ652Sdg7pYQ1NO+T8lSJeBlMl9B8Dp8z0+++hjDg8PSCqjKkuIoCYVKULfR7roGdaevvWETtj9fWrpUsc6dATfi7cZEEvYGwFhqpaiUPpkUk/EEDHZYpIpX6pkqeTigyygUYxBmIvOCkvPe5zV7OzuMJ9OmC/mvPbaHZ48+pQP3v8Zn336IW27koA/V9OFgXo65e03XucLX3qXh2++wWxrl5AtSYliSJaMUrulUh+lHCXoQwnz3yi5eqIniKVJftVvTG2+ZI2eCzB3AZrlAnoVn0hlLwCvwlBSStRKuowblC5/L8Ez4yXMmVwYsmi7GWfp8n23AYbV5Zc2zzilDAmpQ1SZizYf5JW378huk+dkHD+Tuiz5vQBaRnDp5cEnbzIy38bvF6F4Mod67y8+axbrqstyWVdf7QBora8kJ48hCWNdNB7bCOpdNCzke5m5pd6OETdKdFX5GlHW8vsb+6oXQLMrYNkloPMqQMcFGecFkG48F+P5GkG4V73PFdz/FWDfLwPp/q7A3Sv3xwU0rQrzXF1+n3JMIik2Ar6OIHT5sVKiEuIVIOWLTMRftv2aZW7GoESyqUAlj/flBs3Q9x19Z1ilHpUjtq4wTrryWWv6mGnQOAwhK3qlSNpCXWMnDVPnykQW6LoePwzoJLd/Upl133O2WnF8eERtHItmglEyKLsYMOemADcyuJyx1HXFVCmakFDJY2NHCiIDc9aga4NJNT5E1stzBpUwBFQeux1hw9pJSvx1KDeUSoLMm3ICHJpUmHEqK4ayWIgU7Xfw2BxIaBnwymwkspuLR0md0rokio4yWjbdjYuBU1iGuSQTmRGsuLhxlVKSalrorM4Ix9W7eQAAIABJREFUUzBH8WgzxcNJGZjWDfWkpp40wkycOLrBo4yhMo52usR3PV3X0Q69sA+TSDNDTOC0dI+SgLKVUeAsgkDK8TnnsEqOxZZUwhTFxwvAGkNVyThQ3gvltlAQbfEwjAgrLoxIOwqt0gZAtlZjnch8U9KComSRtYoWPCIhAxcTmbUWl2WS0IXZOLK9xhQBkd4KUBdCwA8dKoo3hVMJqxy9D8VH1JOyxQZhf8UYif6MuhjHK51oKiefqxGZQV6ec7Y6Ybk8x1rL9d1rLBbbWGfoh4GAHJtzFQnpVIMY0q5XAjQrHNZWaDLOKZqmoq4sQUX6QRZRIWoGl6m1plIKZydUsxqnagkliSLxTAApiIwhBIbUSsqn8WidqbRhOrUs5pqhi4zq1ZQCJI1VApYOdZFNIP4PY/KXscLwlL9f7kiNAN0Y8CAzvvjCjZPiBUX4YmKXxcf4wBonPgH5HVYZoRjrIJ49eqSCB/qhLw9NQyx+OsqM+9Ro7aB4R3ovoR2xFBAjODmfTrBWEpCtAasVxkiogtaW1dkapzU3Fg0PHjxkdwqhPSERiT5Qm0rK5aTIQbwzs8uoqFB6ZDwltNbC3CxjUSWZj4211NritEIVEF3nJCyyylI7IxT5EMgpYZX4Ro4L8VW7pvMemzO1c0RtwSoi5ToYK2EWg6Rlq5zwgyelhLEGV1Vi/BtFpptCluZMljmxj4mqXKfztkP5gAuR7cWCSeVQzSWQd/x8efOkJ3MhxcRYFFEaGEoaCEEJ5TwVGbROEasSzkp4i6ksvTZ0Q8uQE1oLi9IbaeCgwadA23es2pZZ1xOyRqmKylU0Vc2QNP26Y+g9IvuSY7TOkWNmMpuRXcWzo2POzs+x04Z61mCsg2TQekJG5tWBDqUTW9tTdrYX3Lpxi8rUHMVj2vMOq8TyIYSOppaxYTXMJhPM9g5VF0lBs7BzttyErl2j0ZIAZ+QeUJtFmcJ7z+nyHH1yTJ+NFLY64FzG7CnWaLrBk9GkrOm92DSgKqp6Dggj8vT0lP29PY5OTjk9OWVna4HTxYM1iwR7MpsTTM3ZIJYWzllu3LyBdY4nz5/hqZk+eMC0bsgDGDSNVQKyunLsWuZsYzPZS8pzXdecd0uUUtjaYapxRI3P5Vds/5SF4X9CWyktrrAjQkjy/Oh6urYnxMR62TOpJrx+/wFvPXxIzpm+XaGzollcZ3bzFjNj2H54k3W35uTomJ/88H1++p2/ITw/ZxmfsvfxU9ZnZzgzkQCV2YSIQ6uKh3fe5Atf/Rr/9bcecud2y3d/9AP++Icfkqn4F9/477l57SZ/+1ff5oyBrfmCJ3tHPDv7ET/9wV8yPz+jPRmo1Q4Hzw74k2//X2zPrnN8fo52lqwCAcX++QqePGPoVyStGfKSPq25O83gOhgO4NSTWw3NNvraNiTLJz9/yve+/xesh+fcuzvhcw8+x97eM/ZOj3BBM7k2p9GO1XLF46NDthYN58uW1+2Cb3zrX3L7+T7/w//0P7Pylq/89jf4L7/5O5yrE8yNbW4Or/G2f8Dy2U/4xU+/y2d7n9AaQ33jFne//B5vvfUF9Ow2aXEP1dzGdFPScUClNYuZZXbtNsZtoVSDrhTGRaIe6DnnsH3M8uwp25XjtdfuwrmFPkFjiWh89ljdkVmi1BpBEiQkbHV+StsfUdWG+WybFGuUnmMrjTIdKUdUiashNShdy2hSSRohYRC7G8AUNcXG+w1Zo16gNJT7VF+5XzP5SlmbETmXD5FJUwHCaM5RZJWuntDMJuLzEIcLxn/xmlO6Eq/ppAoDUDEStn+T7WrZjxRbxhYrh3/gLkFGGiH9QCQXBVILvedaPaWaTVATS9XUTOuKxhgqrTBOQ+XQVY21k9IkNmQl66dx/WusLfLEi08p/tCOpmmYTStqnVEx0kfEhbD4vojbUS7gTwEESi2mQMDcqyfwH39TGWUtYdVy8PQZTz97xNnZCYMfaMPAabuijQProWXZrmnXHX7tCW0gdFF8nRnwaiAUB7kqK5QRBYDKEYKsUbOWRtPoVa108c/ODhMdOhp0VFgsZIVB5K1KgzaKRCIkj1ZgGwsp0YdOwNfFjElTMd+asVjM2Nqe8eGHH3J0eIjShsW1m7z1ubd577d+i/sPH6CdJWYlYRfGlZtAmGwpJZLKWFXCAbPYHlxRq6hAzoaUvYBsORYwMaLyRWCCKqDXSzhXQSiUVlTaiTWMSgVYy5tbabSlQov7+4apudmPzAFKG9BlTVxk9uO4HW1fpCEpwRCUda7MIUr+bdSFfHFZFJs3jYRLb7n52Yvg28vg3MjA4kptsiF8ZMST79LrF7VL2tw3F/u8kKaOr73IGrvCyPslgNbmE+S88bPe7ENdBcJ0UedtPNGQJu+4n8seey/KT68cX8pXXrv8eV51XK+Se77qs7wIYF2AdvnKuXsRpPsPZ9WVY9ns6PKfL5GkXvwsXP29Ue2WU5FiX5JCX/5sv04C/KufMCkU5Fskj1lpktGYnHBZk4MnDh6fAil7bPAoa4gograoWrOMA7YXuU30GaUjaegJRJqqwmqNIpLSgM4Ro4VeplDMt2b4JPIuZTTKWoyz5KBxQXO6PiePDqZaMaRAnwK9D0ws2NCSQg9JToSOAW0sDpkTKpWxKhSzSS8AWU7SQUy6oPeGnGJhcRSfpFRubDWCanK6YlYEMh6FJeGA0V+NZEDLQ0u07YrRzy+Xm0bnUecvAKgkJmUuG01eGdwKBOOVItc6YXepXJNjEvmhq6icPJwJEsSgJV+aoW0JcaDt1mQyQxJTYIEjNZW1aCevCMikyBpilnSXs/W5pABGxRAilavpfaAbJNHJKgFtzGg+awQNt9pcJEZtbAIEzDNYYb/F4h2mhK2ncqExp0zSoCPoEsJhUFglNqB6w8660OfLRJFfQvKbppGHRElJevGWzrmwJct+/DAQGdBKUelM6DtqZ3DTSnw7qgZjslBflcLmJMcl0UMiKUqJ0EtqZSayWp4z+I7Xbtzn1q07xFAMaAsZ29UVzbRhoeYCYijFatVx6laEPgAaYxEJSkqEvmetAuv1iqqaEqLCh0DVRzqdaZSmtpraGIbe44MnhixslqqhrmZMJ6X7EiHhWcZI27eEkAm9JQwRPwzUTiTGMYrq3lpNXVdULhFCxlolY16N8gk2gPLF/HkBvl3uUEkCb0Cpi8ltTGdV6iIR5+pkXYCiJA/lVNiPxiayLo/s4sGUongvaKUlcSfnsn/pLplxcgXiIJ8vXXrvEAK+7wW4NBprFJUx1HUlaa5OMZvNcMDNGzvcv/saejjj8OyIZuqojRX2bYzkIMy2FMRDM/sEdpQS5Yv7AFksUcaq1QZrhLGZoyeFRIgenTN1ZakrAYkElFeoEcg00vHVxqF0JCpNKgxJY8EoYQgmY3FNQ44lKSqmUpiIxYDSCl/OhQ6JmOX8a6XQxuFDx5DA6QpdQQprluuO0+UKGoe3TsyHy318seiTKTOmLHIaJWEI5FRYdApjxP/PB08ICqoSfKEyViUsiTT0OBS2aYgGBmMFkFUV2kjKXwzCpFut16xWawZl6Y1l6KN4WBoDpYlkjKFyVgJEJhOUj1R1hZtM2d3dhfMzrLPY2pGyImeLq7eoZ1uo5Om7Net+zRCGzRyUoqKxExY3tnn9jTc5PD/hyfkhTgcUhuAHog/kwXN2uuTw8JAJNTdv3KTvWkyt0VXFxu9FnijEJDKs89USfT4hqIoUFWSPoifQ0yvLcr1m3Xacr9ZUyzW904Qh0vYBYzTOCtP3+PiYw4NDDg4OcVpz88YulbXEFFBB2Mqh3I9ipJ0wzjGdLzhdH3N0dMDutKG6cROYQM5YJD26ripS/H/Ye7Mny67rvPO3hzPcIecaUEAVZkAgKJLiIFPBVrfDUkR3v/SL/1Q/ucN2txRtO2RKLZPiiLlQAGquzMrMO5xz9rD8sPa591YBBClRtPuhT0RVVlbevMMZ9lnrW9+Q6PueRvTe77xjGAaapmF/T/0eN8VZzqSYMc4+Z9L7/2/bzWDGdJrynQjklElDJnaRe5+d8uT0lJOTY/7nH/0FV68eITnzyZ3PeXD3c+rQ8+bLLxNCZjmAmdS4ds66e8AHv/gJX7z3D8hlJq4TUr2Ay3OyecLJjTk3b17ho7tPuThfMDs54MWX9jnxET67S31/zfGy4Sydcfbo76nWR8R77+PPF5zfDjx4/O+5l4S7n32G7xKVP6ZJDdZV9Gcr7tz7guXTHic9vg1kMZyuKx7deUIy50wPZ1BnFnHNdHnB7YvPufrGFaq85uOffswvP3mMO7lKzKe8/4u/pml7/vW//j7Hds6N+hqfvX+f//D//B1/+95t+jBQPwlUg+fN71/hRz/6S5rosQvDUXXC/b+5h78zcLg/43BRwydPmZ0E/vj1V+nvZR5+codmD+r5Vb7x5qvIrz/n7PIR7uYrpBtXMc0+7cE++AoihGkiDZc4meHtCWRtcI3JGNOxjo+5t/qYi/4Bh5M512YvkVY1ObRU1RHIBHwmsGDIT5mZDo+FPIM0QXrDsL5gvV6DOJjuEwew0uColSUvazQrfgnSa2CEgRx6Urcip4j1Rr2v6gmmGN/r/UkZbIqOjf7JPOMf9Wxfpk1jBkKIXFwusXafuiqKC+epmokye6p6ROaLisNvGDjaypQwjJFwP/75PbYRNDA548oQyThb6tc/LECXY2L55JzHDx8zhECWzHwyhelAU1X4vQl2f0I9bWirihrB5ox4QyrKGJMrKlur9c3GWgeMr7DOKRhrDCIjw8fgnWE6bTncn+NyQELUFEN0GKeSZpUxU8CClJP6nhZSB+afYef/npsRkJhZ3bvPx7/6NZ/d/oSzxYrHl5ecDyvOuwVny0uVr8aAZItDwzLEWg0ItAYj2jMJAkntPGJhq9fZY42qEXQYovck58BbT04OExwyWCyeylYlDKsAfKXHy5JBhmKzISSJhNCrvD1p7zOdT3jtzdfZP9hjvrfHx598Qhbh9bfe5o++8S4n117AVBXJCM7ocDyV8DzVIiirzVoQtB5PfcQ4p4NVSq8qkLP6VI+SS0Q2Ki+tm1WmqsSTQh4xSm4wqLprlPeOvzMCt1+1fYmDJrq/R1BpBOGgrCVlHTDlZ8qYK/e4515DlQ7m2dNRRqqHAn/Is6DOl2WWX5ZgPv9/uyDc+H9W3Fc+5/j9l9YQ8yzQNJRk1l2Able99NvYZCKi53B5rKDAuowM0E3GgP7/CFoW3PMZEHHcF7tfN2AizwJuW/DyWSBt/LobZLF7YJ5/zFdNWJ5/T78POPf8c+086XPc0fH/vnwcn3/MuENHLvHvwpT7TdvX3mHGC1M3Uy40q75W1tLOWprZjMYZYurV48tZbAZjPZddR+wGXRiMUqS9E7ocCV0m1I2a0CPEfmwsa+oC3K1ixFpH1TRY60gGTM74uqadz/CTButHg74x6jthciKGROw7rBFSNnTDmrhYkTAkcYQRxS9/jxio1fURazUFUsYCIEkx+9eGUjDErKBIspmYS4qqMeqhYY1GdpsKStPqDYDDSSJZnQLkssCMKVqS7QaNraxVhp8xRNgsjKCDxF3ZwBbJ3S4YIQSVHVeF0m41vdMZkFghjZBEARrNWRS8NYhkbOwV+jOi0kXnqWpNkRy99K7bKwxJ00hX3cBy3bNY9lwulnRdT7ce1GvAuY35p7VW06OmUwVbrdmkx1rvqJzT1y8Lk5QbQxbBFuDOmIC1hpiigo1lMrjJHHP6XCmWaYUxZbHRQ6lMxETja6LXhMzxZJBR0paEyqlM2BlKemsuwJowGHBNhQgk0cTfkEV9mWqv4AmWFAJDCCTRgJWMpp3lFLn/4BFDP9A2E6xXT8XJpMXHiRY80RElcblYsVotSTFgrKPvA4vLFf2gst0qgZAUeJFMzpYQMxeLMypfMW1bZk1D60fWpfq4NdM96NfE1JNjJhBUc18a/MpVyqY0NdMsWJcxxnI5cVqcW4vJgksZL8qSVPlwxJqAL9emxYArp7jZXnXejzcvuwHfJO/cXPIo8R7TirZUYWPU+0sZdtocpKTyXgSiCUxchXVN8W2s8U0JQ8mGfhDOF4/IMWFcDcYiFJ+IIt10RUeXR38LW1hdiMpZrV60GZ0Eio3q6xYCTdXT+ClDyqxmNSElYj9werkk0SBkprN9snEqOcjjNVxCZbTMQYzfFCDOZpz1GC84MeQhQAlb8c4xhIEcIykEBJXT27plNpnSNs2GdZQtdENfpt/qKZOzFnW2pJyShHXfY8g0TYPDEHPG1paq9pui3hnRNE0H2WWisUgG6x3zek47mSgTUhJDTJx2K/YvptQHc0Kw1CFhM9iSHCYoe1rlU5GcDNFmMBBLETFK9RBLiebW92MNMSe6fo2QqEa5sHcESXRDzypZeqvy9hB60tDTRWHlYLmc0YlnQeTpcqBbr6h9TVPr/idFNe1OieQszliW3ZIuwlBA+RwDkmqs93TryNnlipODA46uHTEdarp+zYP7d+nWl0wnLes2QAv7szmv3HqB9HnPnSeDpu/NW/pssUNg3xgmTU1TOyqgbov3pIGQBrrViqHrsNbjvMGKZT474PjwCidXr0I1IcsMY2qODyccHE85X/YczmfQTqnrCfPZnPneCV0zUE0mZBuYzOeYfl0kPJaqaTi5dpXr168gKWnQyHKpRuYJEKGuPBbDYrHCNy9xdOWYR49OefTwLvuzCdWsQUJm3Tu6ISPiSNmRsmC8IduEzcLBrOGyg9lsyjt/9Ab7e3OGfsDV6oO6O93cWVb+R/eF/x/Zdu5pMhbchqqy7O21TKcNe/PZRi6ovqiG+/cf85Of/AMfvPcrXrl2hdViSfj1bc56YXq4x7e/9Q45WY72r/DW66+zOF3y9Dyy6BPRTjhfD/h2j5def5vZi5aPP7ugMTC9esj+6y+T7g/sf/SYP3n5KkdvvMrb332HmYHj4R5//W9/wtndFXbvkC4L8xDYm+5z6+Z1EhM+vvcZZ6vMxUIHXJklq3DJfrvH4ew6635AvOHWqy/z0qu3eP+TX3O5Oufu6V1+/ZFhdfecT392lw9un/F4gIEle/PAu++8zOtvfpvXbtyk+/A++bjnR9/5Pg8fdXz04DHNtOawbXjlxgkvXT/A+AM4Ndz78Yf8/O9+zpyKd//4W7x5/RYf/uRDwmFk/50XmZojptePuPbSlMwhq2Xi6sWU8GjC7Ucr2itwpWoZ7j2kbRZUx0fYA6usteYAmIHMNsPdvr/kzt1P+fz0Iw6vthydXKG2B7jJHth9jJ1oLZcHqsZAVj/iEaQFg68dRwd7HM5PMI3FNg2OBmKF9S04h88WST1GIpIGDewKPavVJf2wpiqsHuNrMEEH+ZJUI4ct9TGbdVpVHztnptk5P3UmBlno+oGLi3OapqLye3pfNw6sR5LowKQyUBVzXlM+167FRd762n3JP+6fchWN108ZIMIoxX3udXkm7/Ufv30JoYCUIk/PzzlfXILRUJ2TK1eojdPepPZUe1N829DUHpMiDAPByEba6E1VvEMVjEsxqKoBrZttcfg3oimXzkLdePb2Zhzu75FWa6KPBNszJPQ5UlSGmHEaOuS2Ej5hPN9sqam2H29LNSj7dvO/eedjj4FNum3NTXb3k7DR1D2z3/Izj5OU6M/O+eV7H/D3P/kp7334EQG4f/aEi2HNQORy6AhkxHnayYRJM8eairhKhPUKkwYk9aQcIAV185QEYvX8sigzMSdMNvgkxU/cILaCZCGAibaEcFQK4qGQWQxJB2veUvuKlBNh6HEYppOWftnRNg3W+GIt4rj24nVm+3Nee+tNjLXMDw7Y2z/Eea+fRbQfsCQqAmKi7lkDpvQ+ZEEShXHvsCI4SVrr5jIkjUsMQylOHYhFbEJMQH0mDNZogmo2RYpoTEll1jUrx6RHU2Vn+v+GYn1kECdqs1GuJ2OkXHNl4GjV6EQkacNcmLh2A/hRLlA9Lql4ro1fMU69LM14No3nXjmN7JYYsMuM28i1ZavY2WWujV9HFdwWpNvW8AZoa+0hnvdi2w26e/ZPfub1QtrKP40xhQhSK9PV+2fAwBEcHF/LWssQAta7wjREPTRLBLcxOwVU+bktoJ0rHoFfBc7tfr/9PHxp0/f95fCKXYBu+5zPsgWNMV+5gP9jZKtfC6T9jo+xKD6xedQIZpaf6b5+5kd6LESxHe/d5vOm54BK+D096MCq5C0KfYg6tRaVSEZAXEPA0voa57XBNV4li42rmON08pMixqnspfYgOZFyIMdACpkgurhhDP0wsO56klgCnq4vC5ax5IkwbZQpkVLC5kxtPHXtN55Uw7ojZJWvVbN9sjWYBMOqY7XuGaJK+GIud3DfgteEPu/1biwpFXCg1gUFZbnFMiUZadzee7xV7D7FgTonaqsswYhjbWusn9LmrDIy0QlXlkQ0Cd/CWiAaiNaRckmtyR6HJaUeh8q2Qk6bKYajSAg20djbxcNai/E64cN4cgneUMPLTDKZuqqp2hZSJsYAXgt37x2ZTMhB2SkusyLRLztiEEKO+KyegSkFZe9UnqNZzcG8YdXPeHJ+CTZhjOZ1eOdomobZfE4zaakajXqv6lrfY2GFKX0ZgpSEHmM2sGlMUS9yq6mLTvT1rVN5q9uh6SYD1liCiH6eIv0zKN23qi1NNeriI94bmsoTQyKETAojJ9HQOE9bF9Ncp0b06q3ksGTqylF5S+Utg2SG5ZLFeo2vPLXzWKuFkYKTjrqpmbUVwzBwsVzgbEMcOg0L8C1ULb11LOLA5eWCy4slXdexWCzo+06vrzKlSiljsDRNRVPXugCkwNNlx77d4/jqDVaLBU+fnvLoyYJ+Pudob65UekkkA9O6YjY5ZH4ghL4nBmVKkRNdEJYpEIZE6NQ7z1aCMyqLCM5Q47ARZlj2nAbF9CmS6TAEvK80MVLUU7FyHuttAaKzMm3GmwtlomV36L8lBl7ZdIJIkYQ79bKra6+A67g0GvXMyIDkgKsc1oZyjTi8n1BPJkRjuVgFqtoxmIzIUBqIEihhq/Ie9blHA/5xEqfs1eKbIYbKe7wDcqKpPZO2wUjGOuj7NathQvKOUNeY+R53L54S40CzGKjbKUGNPhUUbmp8U5FEWPZx42tiAJMstanI3kDOWJ+pvJrrrvtOgz+ipi5LzJgkzOYN0+kEEWE9DGQR9f4rnnF1Sfw15ZjEFMlDaeYRqnHSniFblUnaLFS1xRsHfaCuK3ISVr2mLotq7jFOyDngnYaH2NmULInLELliHN54fDDYQbBJQXpdC40yNHKgMpa16RBntEiMOpU1yYNryTZh6gl9TkTvyU2Na5XZlYdEs9eSjID3dALBOvx0RkZDOWqXsLkjdx39cklqpiyHxKJPKq1PAReUNSr9AGJJzjL4ihjW1ASybbC2oraOibE4scpuFUfGcvf0EavulBuH+zSNY3F5Tr9eMoSOIJFmWnNyPGc+zVR+oG3mJDNjmRoa0f3Ux4Q1ib29Sq0WqgCo/5uIo608T5/04MA5yH0mdVDbCW3V0puE8xnnwVQRZwNHE8ucwGUCIyqRygzkiUNaj6sspnbY7PFtg2k81JYu9Vx0KxZPT8kpIgK2biEKs6bGuZpHvkKwRGu48uIJtsqc3f6cx5Xj2msTvG3IzjDZ28c2R2R/QPIN0a0Rm2kcTN3A1KyI1ZSHDz/jP/z1v2fv6gu0+4dcuXKdG9evc3wwpfHqAWJG2cimct+G/vwhtt3J8C6jd8v4fXbivjvxfn6q/Ad7j8+EQ2xft6osztWFJaK+uYIwP5zxzre+yfWbL3L1aI+2quk6z/wi04clLsKLN15l/y+PWf9gwenpA/7hF3/D+eoRIke8914kpin3Hxxx4423+OY16Idzbn7nNQ7evYZ59Rp/+vZ3SMvE6vwCZw0HV+Z85396hxevGz798FP+9iefcPpgzcnxEd959y3evvU6V6+9zIPFN/g//+Nf8dHZOQsMDx4uqVzN4f6EV1+8xsnxFW69/BqT5ojHjy4I9SP8yYxXr97g6eXAz3/5EXvsc7h/xMc//zUrWVG/fI310xuc3n6ZF+xVnq4eYN94jRdee5fZnXusL86JfmCxF1jtX/JgeI/hoWW6OGbdBm7++Tf5MNzh4zs/5ZWHR3zz+z+kOTkm+opJfYgXhz9ac355xu0773F52iHrigFhOJvR9TWf/foXeFnzx//qz7A3rpNSyzpPac0+tveQDOeXl/zs5x/wxcPPuXbzFrduvc6BOdahim/ANpDASqI2vhjQ6zAzXjzi6Ucf0F9ccHR4CETqgzn26lW67pR+sOxNM8YdorYULWnwdIszKp+BwJPTx7jKceWFFxGrPmw5O02H97UqFkYZ62jIpWc4G8LLV5ydY38kWZjUFS9cvUJd14zkcUFwti6hU4zSCbR1KdN0dv+wIdmVl/gnA/bj+FuHP1oDDCHz6NEZYeho9w5o2w3CuPOLz645X9sCFiaWtXanWRYkZS6fXvL+px9z0a0YYqCZtnByCLXWtbby1LOJSiRFB8QUT+zKevU/E60VdO9EZUmJ2lHkHEHMJjTAGYukhLdwMG+ZtjVhyDxePsWEFpeFblhhbMJ6oaLCmZrKVuSYWC8u8VeOSt3uGH3Axr0z9k0GFBCisLZiD96V6bkyscW6DeNKkgYSKpibixEwaBecKBISCD14T8qZe598wo9//P/yN3/zd3zwwUfc+fQOXdeBhfWwJuSEeKtKHWOxdcOsndNMD3QoWoGdzrFDYOjX5H5FHDpIEWsSVrIy00xWP2GnIYS+z9AJvfescNQBUi+Ymce4Clc3DL2m3tdNswlqs1mBLY/DWl/8giO2Mgy5U1aiV3A45szkYMb8eL+okkSPrQ3Uxmow48jIiwu8KyqU0u8NoVelBoLN6sMeQ2SZ8gYc6/uBy9MlD+99xPUrV+nXlq6HqoV4sSJTxe45AAAgAElEQVRbwTm1+ahKgRolK0mFjBR1h6BBBOQREBkvxlx6ANSSyRRGrDHYnDFJWfQhRWUVCiU5tqSsFmZeCEmVVEYQZ8hW/4izGOOLMsojpHLu5c11bYtHtKpucjlPlaCS8qgG0XNP+y49F2OIGxapc+qfnlMugQ/ao1a+0v6mkENyqUkkqR2LKf5AQ+m5NknKZnuPFlECkIZTFgA0JyQGvT6KH7NeYw5HUYiVHiWXgSlZwOkayxg2AaUH0N8u0NjOoHME1MqS9hVrqJIjSp0zskAZ+7Ht70k5NyQGvB3fI0jKWOXebVmPGyaN/hkJOuO72wXsFcJ5jp0mbNYNvV8UBcEzH2AEPUcw8Nm6bFOziWCs4LFlHuDKe9GeNIrB5KQ2MxZMpQnQgiNnBbRNwWpyLPcm+6xE+LcBhF8L0OWcGYPzFA/SZnBICjIth0AzOGqfNQWHMTNHxRVWMq0D6zy2ctRKdsCII2ZHYCBJxmRfkkRUl0+OpGy4WCxJGZX4kOi7NZfe03idIM2mLeSIhNLUZ8EVAEpaS64bpG5I2RD9iuh6bEykUrhfXF5g8IApcio9GSzqu5dESAQk6YFLUj6jdXirB9oYPWm8MRhXGnwSvTGocNTgjaNBMJKoYINQY4UoiWgULIxidRkxKmkzxuBKw+UcSDKF0WWL2bYefJ1QCrsSQArarjTnjJioXmwps0oBK+BEJ2CVtYUGr4w5C4gz1K0nS80Qg8YZ2pLoVHy8YooY8STRBcOJZ9p61pOaOETqqIh8UzfM5zPm+/tUTa2LZ0mIGSO6DdtFwJQwCWN08RODsiN3LnwpN2pdRERZcE6BM2O9MoMEcBZvDM5kfX5JWsRYofG6QAXv8TZuJFqNa6m9ZVZbprWlbVu8t7S1o2k0YdiQNfjAqS8VQErqkydJlzzJCY9REDclcpHrplSke+UCtlVNM50Ss/DgyUPu3n/I0/MFT0/PGYZA3w8KSDP6sumOcsYyn+8hx4b9vT3qyQwDhOzohkQ7m3Gt8Zoeu+44u7ikbSrmbVMCOYSuTOuMZJUR54gtNx5fbsBWPLXrsa6EUJjMkCN1NkiIuJRoCuDhq5q6jhiBmKxeM6LHVNNXy8K7s0hZO8LgFKnpcwUvWwB69991U5VEY32uEFTyq4IXlf5iVA6XJVMnXc9iEmU15hKIPTLkCkNPh+Gj1+P2OtcbvCnpckKOGVtZ2qphOm1pasfhwZzDw0PqytKv1jw9e8xkb4ZtGqxM8PM9Jl7BxdAPOlPMhmY6JZkBcZYoSW++vtV9JHreSpl8pZTJIai3pNEbog4XDdYp0JdSwpWb09APDDGw7geMtbSTCZWt1DhavIb+xEgMgRyjrt3FTw8UMB9i0iFNisQEjVj2Jw3We9q6IsXE4ExhmCmrUKQkxRohleIjOcdgNPWsRdcMkywkiClpwE0OxGRxLhGHjqXPuEnNMCROn1yySJbF5Zpu0GaCXHxxRKddzhgFxmv1PRxSUE+WcT2uKg72D0iLFZNYs0/DvLJM2oqFgRB61kMkdEvqnGjsFO8806rC1wbjPIOI2jYYUYaw02GOSZqYhRiGIbKyA+t+xdnpmmFxSOxPqBroVgv6oWdIAy5BSD3WBFpvqHxLqg7IzjOQGGI5tyVTVyjg7aP6fkSdhruc8EnbotrUtK4hDxmTdYA2hCV9vMSbBMzxRiBHqpzwOLxtqV1FstAbQ/KGaDQ1PZAJJiHe4NuaQRL3H93n6ZPH3Lh2jWvXruGqlsdPlyxKcjpJ6LvIehWY7QuHh3u0105wOXH+5CH+5Aa2bvG+JktNyhXGNWDV1Nt5Q9UYpjPDOkfuff45//XnH7IIDqmnXLn+In/y3e/y7rtvc+vWdU6OZrTebfxWUwhYrzKvf3KH/jtuuwDcuI0m8lv27/+ITcjFdNqVwt1YKRNh9Tc1xSd0LNBn8wmvv/kGkhPTxiNJTfqzQIqZSeNonWfWnoAcsXc644vVxwz3H5J7mE4mnD5K/Oof7vD5vTXXX7/BO995m6svvcH5xRkPfvEB6fEXHDUNZ+crHjxdcOXaAdePGibzPY6vHPPyK5FFekLbtLx88xbvfu+b+P1j9paJ23dPuf3jH7NenbI8X2DrzLrJ9H3im998l+9+//s8vtuzuv8B8zhXT9iu4uHZBftHt/jem3/C3//4p9TtJ+wd7nFy5QUunxr+7b/5O/7zXuKF1xJ/+i+/z8nxC7z5vW/z8zt3uVw95MofvcrJmy/w8ZOP+fhn9/nhW/87h++8xsPPP+Xz/pSXjmqObl7n4NU3mRy+QA5WvTjrRHKPWacK4/e5ceNFXm9mNFcPOXnxdfrHFzTxE4wdoAuk8w7mV/DuGLLWpqePzvnlTz8k4/jz7/2vHF7dp53VOIEyKtfiyZesZnGllWkgV5iYqGKHhHPSYsHFxYLuMwdXr+FeuI4xntoIzkeo9zD2AN/MmVqPcT0QuOIaxAp+skcMgdCvlB3vWqyrMbZmlJnKmBLNmAP7G+A52QFsnME2GnZmjfpbarmnDLycDWRl12iz7HaeNyqjR+zOK41dyz/t2tOX1ze4ldEaMB5XNQrquOq5z6YsOtlh0/1OK8+zvaM2dCnT9R3n6yV9UpP7qq4QaWmNEGLEe/WLA9BppkOsxRqHFY9NCjZoNZXIOSooB2BF71emwqLsOU1Dh2lbsbfXsjdpuFip3FnyaMsjBRIocj7J+AQ5KsN+C5Fq/R31sGxr/LJn7Li/JJedXVQ/KSrzbLsr9ONJ3vqfld5LwTmD5Eh/8ZS7n3/GnTt3+PzzO/z6F7/iZ796jwfnax49fkq3XFHVBieGRAKjAGFVPM68Uba98w3R+AI6+AJ8GCocgiP1q1K763vR3kqHxyYammxpXE22nl4yU2dpa92vMfcMUfeH2KBniVOJsd/5wKawv0Sihi/mqPs2wzAE9e8Wg6+0f4kxEEJPjENJcQ+kGIhp0OeQRErCMHT03UDfrYjDeD5o/RtCJA2q0HHOkDPEPtA0FS/dvE5bT/jssy949OQuxkHtKhbLBZWvGC9VU46piJBHv+PNnQWkyF83Bxe2tlHk0g+NsWBsAF6tfcezRxGGvAFXylerKJi1EJ0tiJjdeS1Nsk0kVNGn6g/yToiDSPFUT0VVEphNJ89dqs++//HfW2+7EQHRx8W0DXF4dmD3rAfdxrtx56kFJbjsDvzGd/Gbtg0QJbsjkvJVZHOcRlBwVyW5exw3r/Q1C9hIaigdCJjd91ges8OEe+YnsvNY2QJjG2DOFOBR6aC6R0sNs/ke+ZK32zNb6TV3fz4yoXf35dcNSfV+I8V6qxx/0XN7HATl4oefcyaTCBhSdhqiYwsAubMHzM6O/b0AuiS57PqRkmiIORJiYIgd6dGK1E+JQ0tV6QtaP/p/OSZVTeUdta8xopHBoGxXY6wmvBplPBmrVNQkkELQ+Opi/GoxxDQwdGsk6QTNemXnpQj9AIgi9cYamqrC1hVDjDirhpkpq/9CQqnfSvtuSCYUfzND8eBX7x3n6GPGUgCurDfdbHw5gS19jAiaXIpxOOuxJmMkYRCcFGRY2OzHaLXBz0bZZz45KhFiSbA0RpvtjKLSMoIZ42JkVUZsniv6xxN5e2axOclTSohJOq0QSDkxhEiF132P8o6GOJByjxRfCudrrDX4ulLZr2/VgFZAxJNjj7MqjwthAG9o64ZJG1gterITjKghbVs3zKZTqrYhSiakSDuGOIiy+8avuvpmqrregCS7lGNrtvHN47/1jysXUflMRVrsjFFKsX549YgT9UpqvCHViaGKhEFp1JW3TOqag/0J08aWFEw1gK1qp2xtUvHW2y50o4G82XEnVqBQJxfDMDDewIyB6WzC5WpJjonFYsnZxYLbX3zB/YePWfcDfRcR2S4mQom4L8+tku4LhiFyuVgxaVqqylH5msXCcXg4ZzZt2JtXILC6XLBeR7wYaCp6G8hxwAFtU1EbR4qJhGi6phqS6TLhDdZFYk60k8xkMmDXabuIUW6SjBMdKcdIZZTPb865TcrQeCyzok+EoIxJPZ+fXYB3p0sKYLtNGIg+Rh9Xeb2GTc4MIW0GCwyRvgv0QyiN6nhTUEaZTm3sTmKP4JDNWm8wGF8cLyr13NufT5nNJzSV42B/j+OjA6Zty8XTp4Sho2ma0sw4rKuYz/Y5ONwjBE0Ordc9GI+/XBEGnZBWxYh6PHcQBYKceJ3iFeZSxiBGjborLEOIxCEyxEhyBhMCWdYMITDEhPUOFzxJtv59Kamn4NB3GyBY/ccAm8g4hhAJISgzLYFkw8T5jeejphvrgMFmtn50UdnKwVqtx61FnGXImUpEE7iNArPKQoISM7a5AeKshgZ1Pcv1muhqPcOyQvZWCiAbwfQZW2Vc8T6MQ083dPTOE4ZeC8dhoHaW2hmm3nMyaThoK9rZjDY7ljaQqoEFCRnWZIl0/Zp1t6IxfgPsjqbHej/RAKCcRRtJ45Rx2zTkPBBWgdV6RYyHes56lSbXtaZP5zjgHTS1U/Yloo1T1lTUHEtQi98yR+LQk3OirRv2ZnMumiXrdWC9uGRVT8hHQYcTgMkRRJm9TaVFdUqpWMg4LYBlTMwqwRNWtqbESSfms/mcuqk5e3xB1w80kwn1pGWx7Fgul3TBEIKWm5N2znx6SO1a6qrh8Krj/MEpn3/2GVWXOb75il6jkiEmKmPLtF0lAp3LrKtM6IW8DKw/v+T0cWCwUy5OhNsfnvOfXnufP/nhH/Otb73B2y+/wPV5SyUOv9Ef8YfG5zbF5ViID8PAYrGgqiomk8nG+3R87LiW/eE3s1EAbK4ldjmFysTOJoP1WkRaS9vWWIHWliLUgXjIaNjOOmlKseBY2Qo7Pebg6BZH9Zxb+++wPF2x7gL7x1e4dvMmtZ9x/uFjHp5+zN1f/JKqu8f07VuYrMqGcHnCaeehn9A0r/DOt17hxjvCahG4HCxPujXXX4Dp5BZXbv2I/J8+YPnwE6biqd2EF279CW995wfMbryC3Z/zwvQlZrM3MH895b/+w1/z07+/y/TFE95+5y2uXL0J0w85eOMlZoc1N65d5SAd8vD9p9w/f8z925GuDbz18otc3W+5dnyD04un/Or9z3F/9V+wx5YqHxJnh5zh+PmDB5wZw1/84F/w5re/B/WMGC2Vr7VWNgHnKvZPXmX+7ZdpOMGaCuPBuRZ/NOWVb32beP4RZnKAm7wA+QAJ6q92eXrOpx+9T5aOt999m2u3rhZvNtA10JBST4orpEgUraux1ivzpJ5jD64zfy3BcA3joblc0XXCMJ9hrh4r+1SgT0useLxpVKKHwbkWaKj3p4ypUNY6fGUwzuGrpkzdK9ToXdenLTD3289zs6mftmD29rdsaaAzONFkyXGqO7aSxqIJtP/MWykZzebi0cHP3t4eKcZtYu34OTcBV/+4l/nSWiCQY2S1XLJardS6AlWAuLrCx4gYnpG5pbQddiozSBBJuJIIr0NGNmuUcx5j/DPWGhiVZE2mNfv7U/anNeuLAewlQRR0EtcjTsiFMRNjBi/0wRKTR2gxuILKUfzWBIfWFUZfUO+RiLLf7ISRdejKvpaYCTHgaj+SYEBMSa0U1qslDx884L1f/ZJ79+5x985tbt/+hDuffsrTp08JQ6AbMoPUDElVA9ahw62yf7AaLDiSGZzVvtVYlXwi2qM5qywbEfVlkyHpZzRaf1k3hokFHXTmTCYyhDXd8JgwPCIlBcGM6PHo+471emfgHKLWaH1P3/cKxMVECImY1AM3xUzXqVIiRwW2hkEDW3JSEgtof6M+eY5lrLfXpBENkwKMUcar9zXOGYxxOCv4qlESgvW0ewNHE8t6veLu519w9+4XLJcL6rqm9hV17dXnWcZ+Z0y/1NovjeoZ0d6Fkoa5KwbPBaDLxhSZ8DaMUVCiinyJJbW9/vXeKtjCQpVy/merr10uJ5Wlp0SWqMdRIrEoJDYqntK7STkHd/uWTR1kvir4oCACG181syETjD3O8wDdJpDjuQt/g1mVfzxfO4y94/h8z9cVz68nm/eq3+jv7Dxml+H/1dvXL2bP/+7zHnW59FfP+7B9nS+bKWj+rpRYyshHz4kC3Not0aP84pffu2z5fLtH6xkWm36QLSC48/tmc06XXyxrvJb+yh7NJdBPATqVY0vWIdXOOxmf4Jn983sBdLvSs5GiPBqk5zDgbJEfigJJpkx+ohGIgRgS3lq8DyVpc2sQL5KV+WIMznq8N1jjdVH3DdYJE+Opm4bKe1JKrNdLYj9grFA5jyRdrI0o2h1DJEdtTmXoCcZjqo6YoesGhpBJYjEm7FxkTpvMwqTbPZiVGdNY1BQUMUrvL41xH1Kp/02hzOodSZMWM7XVE1TLB6E3pcAAshhqKpxJ1AgkKXTc7aKSjJAo6UgK3G6Owxav/81bjJFgCyDqBeug8g6THZW1zJop3qkc2VoNJ+iHSIyDSk5DpA/awGfrNnR/Y4pnlGuwBoa+x4BK2SYzYoJVrTcgk5Xx50pYhLZNmmJUV7V+tpgIREwqYlS9gjbg3EazvvNpFVjQveFseE5WpL9Xu6qEbuj3DjRpyVgqo5RvB9S+oq0CgwtITNr4Z52yjTRe68zGv03TC80IrwPPypdM8cQyVv26nDHktOODVwqC6WxGc3nBagicn53SpczQd1TeYkyLZPWcGD+XyVsWmTHK5FED9wvOzi6pS0jDZDKhbWu6bsVkUrE3nzNtJjjrWS0uWXY9IoK3Dlcms1Vd67RLMlKKjC72yvbrIjl0WBdYG4jRlumOL6CcAuO2TJnVP6LcbHY4yeN7t79Bdz8uyCklYoxUhcH1TOE+gq1sb1IjWyXnLWhnjSOmTI6JfsiIyaRFh+0jF5cL1l2vS76hUL3H41cYciYXkE7KR9iCUNaqzL+uauq6YtrWtM6SYuDy/CnkwEXlCYOmU7dNtUktDiEQRWnybTthNnPMZ4LzLWKecrnoQBTwUWm3JrBaI3ruVl7PZ1MzlKRfh/rAgdGGjY6QhSEHohuoC4BXe48Yw5AiaRi2oEIWUookKT5+qFl0iqJMQqOA3nglmQLYLJYLvBEqY8moebIGOijrzpZ06xyFaCK2NBXZWvqUcDHhi7F0Gr03xOBEXWhSztiqoaobVknohoF2NuXo4ARxZ/RdBzFgkg5DXBRMn0jSIxLx01a9ysqUsPKexlc6cEkDaeiQSr0+55OK+d6UiW+JU2jXiXs2011kzJDoujXL5aU2E0BdN5B7tpJnHfxsGlOrdgUYDV9Jnd/4K+Yk9F2v/jNlCcllMGENxNgjLpKzL8VqLpIgtOlGz/U0RCQmqonn6vEJlWk4O7uk6xKXlxes10u1AjBSAkMqZXpnHTxlwFlPFhiGyBATKVjEGZzxVF4bORGdqFvnMM5R1ROqpmUymWKs5cGDxzx6fAqupZ0dgbdM2hnONEhwdBeB7BLT5PCugbTm4vQMNztBco3LEXKAwjiuvMcgmjA6BJzUvHLrZd66+V1imDKYCWk6o71xwvHrL/LCaydcuzZhOi0G3EGUulH9YUGw54us8WtKiVA8VOu63qxJ//23AsZtJuU7haJhW7vItnQst1RigmChsXpuphgwPqjkx7uNumw+n/ONP/oWNt7ieLLHRCbkvkNyj51OcPN94mB4cu8RD08dL730KlVsuX33UxZD4q1v/YjX3/0hxkwgBEzroFUfynS+5u6v3+PCD5xcran8AW9+7zv82cc/pPt3H3H6dIV3DX0KnIVzHqwecLxyzGzmMtUcv3wT+8lVzkPFt7/9L3j3nW8wnF3w6tuvM7k557O7H9IPT5kfnDC5dYKpBz58+D7/97/7Gb8+3qdtX+Li4hJnaz788A7LyYJv/8tv8/0//gZXrt7g3t0F63XHbDrn5utvsH/9On2v4Sb1vMZ4XRPENLj6Cq33uNSivgUCyWBnU6Yv34CQMXszENFAHZfo1yvOLx4w3av4o+98g6qtIUdMU2sTECPYRBjW9P2aEJWtMWlmtJM52Rlitng3w5+8BHIMLlJdAZ892TnEq42NS6FINSzZRLKN6i+Vyj3RoA1vUoVC1TbaeFGkiYVhpAmuxZfndwChd4Ht8XugjLML+yALKZfhmLWMKjmz8/emGv5nveR3n1sHrcZ6JtM5OcaSaGqee+zv/gZG9oo1dnOP0iFJZug6zi8u1LYil0ArZ7G5BOPE3eZuWw89vy+tQ4f+xRdLH6cySms8IUViTKQ8FHsRi68MTWtpp5a6zRjXkyWSqBGXyFat1TBCyILLMGRI4sg4XAFoRy9vUPYbJQV09O0QY0k4IhQynaitRQVIxJbuNGVYrzvu3fuCO59+yumTJ5ydnvLg/n0+/PB9Hty7x9OzJ3RdR9d1iGStHX2FFc+kcljJCs5kwZtiYWKEVAb1Y4r4OPQ3ptSdvtLrAKHONTk3QEKi3pcVIHAbCeOQIqvViul0yt5sxtmjx3z83vvc+7Rlve6g1Kvr1Yp+6Fkul2qBUwZJu15im1MCrdOd9ZtryuGw1lBVdVGnVFhXqw+8VR1Jsi1uco0gFQI4a/DW45zFZDa1tvMOX+xLYlSPwRwSSSLr9YoPPnmfxw8fY4xQ1xV9vyanwHQ6Yb3u2PrDFX/2Al4kCdqzUEA7rE56GPvirYRyBEe0AyxM4KxBU3lzbNymhhpliSoNFxSgHwkB43AuMarJNiDMps5UIg07AJWqoXT/KGMv7ZA/7E4C61dvz197KaVnLHuefey2rx1/9/lr+eu23Zrj+frjq97T88DZ77r9NgBp9zHPA3Dw5dVw+16e7eXLD3fAs4KzFAAXdu8JbEC/XJisX/k+RDYg2TOA5vOg3leBc5v9tQUMyV8+js6VoEHyxhZzhAV29sb2Jc0/7hh8vQedtUhOjNh8zBBzVtAoZ/Ua8g5fV9S1egZkFKATMQxR6IcekQ5rDXVdKX1WEjklpVmPlOzCQnBOmWijJ5macqqxpvMOYxpGemjVNmp4L4aUgzasMWiiZSkoUkr0QyRlKay5ihATwxB1ygjKyLO2IOZZVZBAbdQrYbCi9VRhTWQBSbrwJAxRoDLKZHFOEJOxOVMVQ04xRedvSsFcqL4e9SdzKeJz2kyzVIyayUaZuiMQJwWkK2QHPdB2y1janKCiIFIeWWmigGiMmWQctbVYb5nOJmqC7msFJFbCkDR9y1hLFzIjaT2lREo9fSjiBYnUlcUZUQlyyrjG0lY10yYxaVpSFTCiIJXXN1TSsMBWCrDmMvExRnRS5fxGxtnHoZyGBaAZJyJlYXOuMMq+Is3GovRgSRHJCWuyetDZEnjhSooSVsMncsaUpF/nMpbE0C3wxmOdYGyFc5pkO3qnQVlkNxr2Qt6XErPuVaJcOY9UdpPQKznjjCPlSNM0xNLWt3XNtStXmc5WnJ8vSEFI4spCldjA5eOUBaPM02ICG2IkJjXjX/eeEAeayrI+GLh+9Srz6RQElpeXPF0syDkr+6uqEeOKL6MjiZq/UzdlcdlOQ5IIKUFOkSxuM+FwVuUWVaXegiYb+n5bhH7VpGacLo0L5wakYVzInz2u25tZuTFlCENgYNg8xhhDSoGBSB56UowMsZzHqzUJw3rdsVx16GrpNvtRn7hMfjGldBzp9uoF4W3xwNOlj8ppmIhF17r1qme1uFAQtmoIsWcyqbm4WGCJnJ1dEELHultzsL/P4cEhvm6oqgbvKyYTgzWVrr29Fj+7VP7d5noIUdPDTGFFotLRACRjGFLCS6J2hrptsdYRk5RhRSCEoTDlvE7jVYBPSoGYsoImha0hZX3GWkxJ+FqHQO1UmqogTlZ2oTXqlSECYkmpyB6txVuhT4nLrkOaika0SNbTRIsyUoKQiS4RozasCcBZppMZ+yfHXPYB6xyh74jDgHhw2eKzxww6rY5mwHtHnxNdiORBza1Dt6ZfrvS6N/pec1Sz88oaJt7RVgYriTGFeywpRnDdVxUpKI09pYRkTa51FvX9zIZYfF50eu82RVsIgdV6zdnpKZfLQIunt6IMv6T3Rm9RJrYUKb21WtDmsYCPOGsJORH6nv2TA46PrrJeDZyfL7k4X5BTpFuvMCLMmhYbLX3X0XnBzFpMpjAp1CvP2oqM3YCz2aIFe0x6BWSV2saUqaqG4GsWy56L83NW3cCtV2+yd3iV0/MV1nlmk30OZ1dwPiBxycXZIx7fe8DTxYqFvaSTClfPCN0laVgT1ytsk5lUNY0TDpo9jmuhbg740x9+j9fe+QFuuqfp6tYgtce0Hlvr1KQqtVf2CiT+oVlquwX7bhFc1zX7+/ub0KrRIxW2gMR/j00SjMVh6R/LLbRUkGWoMhJ/bKl7vC+qcWAwgreZSmmzUFiyJPAZ2tZx9NJ1jD3BWYONVuV2JpDpoerwpmYilv28z/6r73K89y63P/kFd+7fx9w4Ih3CdOKxUpOHCL4l4XAxcfXNI87Sfd4//TXv/er/4vH7Sxbnj2lTT5MsEgNPLn7F924c8YP/5VsczA55cvece4sHfHr5hPN55MV33uLorbeZXLnCfOr5s8MfsFo+4v4X+5xfXnLnFwseftbx+kvX+T/+8pucDyv+6r/8jP/8t+9xfhmJcc3+tOXPv/t9/re//AuuH7/OUV1zurikfXpGszwnPb5LOH9Ic3wTlXmqwKsyNVEqcqqQPICsyA6Sq8iuIvaRPq1o6kw7cawJXJhPWeUldWo5PDni6OiI6eEEySWQwUQwAWPVH65qSrp1Z8nJItmD1JthQYoRbxpwOio2daO2AjkpSO8SxnSIREK4ZJlX4PeYumO8bbUINdpkk5M+74YcsFv3OO3RN4AevxWv2r0WvqoZFIQoQhQNPdLXdZua2Dz36M335re/9u+y6dszBeQqkirncKOCZcOa+2d4sfLxU8pcXFxwdnpahl7FrqbUUN57Yow7A82tEfvmqUQZbrkkeEr599gLWC190InLWK0AACAASURBVIGq/ixlIYvFmIh1Ee8rmhqMKcwQo/1SsuqbbYz+O1GIBKLovgGMODYTvawpuBR2kVhL8hXBotYTBqwRKitUssaEc2T1hOXpJf/xl19w73TB/XsP+OyzO3x6+zZnZ2dq2xF6DKKJxKgnuGtbkEw/RNbrJbPZsQKgSfePNRbn9XxWJvxoWfL8gRdVU1mn431xuKahNhnjhNhDChmJllyAHO8dMSfOLy+o25br115i+OKMp/d7TuNK5Y7jAFsE5yZcn1/BOot3XoEy55XIYA3YjPcZTN6qzEYAUWwJ7Ciu6KKMySyjdFX/L8spKar3XCrAXx8CcVD1htaAHm+1Tuv7jq5TP2qRRIg9KQTtvwuw572yCdfr5QZERLber2ZMvAWwpgAX29p1PNeNQf0Px0AHu72PjiAMo1S+DP93gaYNMMOIZMqXrgFrbQlMsSSjjbMxpry+oa6bbfCDKG6g9RVApqmrDS6xua52rrXfBHxt+xj7pcfo1/wbrC+eXQM1WXX7ecrJud1Pz4Fzz3/dBeh+E4D3m/7veSDtq7avAwXLA77297+OSSZShv5266E7BkFapeYq1vM7bF/F9Pu6Omzz2cu5+FWP3OzTopoBQV2sigB3c3y3Pe0zSB3wVeqy3e235oTnspP0et8yzARYdmtmXUUXGpyrCjtCF2sBrLPEaAkpqq4+asLryJARSvOJ26CjutDVOGuoakc9DJvUku3JVi5/ydhsSCLkbMjWY2uPdxbnPT4MxJgxLiI4qqrBuIp1F1gul6zXvX6WYmoOFsmiAQPkwuqTDTsoZUfEKZFYwFW1JppayEbZgsZljMl4McQQETLJWjWvNAomeWex2RCT2g56sZA1ObI2huyElEWnrJmSCISCB9ZgnGWkhI/I71ia7FJYjVFGBq4sOmnAJNHgDAweS1015HaC846Uy42mpLplL9hscSHhs5CzApjqgeexUVk4KhtVA8zQDcR+gJzx3quU1OlE1UphIxkLTs+LDZOjnLMjEDH+GYE5Y/RGKgXtHgEcndDptpl2GK/mmSlps5/VzyFJJjnITunpKqEti0COOFH5dFNbpk3FdOJpW0fTeHxl9Xyw26JtK7MdF2G9nHTKozdMfb/qU2fKdDSPx8mYzXkdc6CuPHuTKdYYlsuOtm2JyW7lf4Uptrs556irwjoLWaXgfSxM14FJUxOzSu+unVzhYH8OGM5On3B2XoAk5wleQw9KYDF90ALNekdlGkztML6HlGlCxvkOSWUKN4IxmI20UQNLvPru7Wy7C/Iu/RvYTsbLfhlZKM9vIuobMZvNyjWwlcrCdn0JRSaQUi5geiZFBTKzaCk5HkvtXrfyGkMB6jYvr4WQKcWSs47KG7yz1N7Ttq02MF0qabIqTYgxcnFxySef3kZy5P7dz8hpYDZrOdjb58rVa8z/G3nv9WzJlZ35/bbLzGOuL4uCaTTa9ww9pQmOTDBiFPr/9KI/QG96GIUi9CCNgg8iNWySM7SNbnSj4aqAstcfm5nb6WHtPOfcQhUAspsMRUwGCtcdkydzm7W+9a3vm+6zbju6vkPrSlrKSyCZciXjN4k+YAieED1Jq8J482RvZAVNitZ7upSIRe9wM2C1RhkrQKM12CisZK2lRdhoTYpmA/QPrsPyyQdavRYx2ChahBYl66y1xGLyoI2WDSqUxAFdHPgiWQUB9Mmo+QI7GRGVRlUWWzmss6iQC8tLWqjazpNswE2mjKaaFmnf0lacPCMKX9opjK2oqjGNyQRjWOVE30fW0bOMkdliTddmllF0GY014jhqlYypGISVnUDnKOClc+gcSMnTtSusU3RZFyfmLIBVEKmFIU4cxr9zjrppqKtEqBqskb1NdBprMdZYrdG2JqUa73tyCigG1mYZb1ragYbqb0qZGGTOU+QXFKJ1aY3m+Gifk+NjKOK1fbfGKENtx6ioUSER2o7gAwZhZGhjqJqRsH6CFtMcJQLKShtx6lUGH3r6PgqQpzQ+BHyIGOOo6xF975nPlywWK64vZiyuVhwcNozdlDafs1ws0Upx/84tZv2aZ89fsJydE/oli6vEeKLIo30BNNeB9cWcMNFoB3u3G2wjjsuUfReVSdmXvcuglCUqSRYMqrTw/vMeu2vasA7Vdb0J4L5JFfqf7cibUFB+3FA1IEcpvymtyLFgH7qsdAq8KtpARIwRoLpde66ve1QaczBqcEZhHWA1UWmC0mgLOnUo5ckuQ+o5PLEc7r+Nocf2Mw4uj7mVM9OTA9TE4VXExsTs4pT5sysqV7Nen7GKZ7zwz/nl84/57Fcf8+IXzxktDyBb6nrCijnz2RPm50fMnj1AT+8xHd3iwbv7fPjwI7ow4+TWAaO9hsvFBeHiCccjza2jMZP6TWZdz9RG3pgm3nlwyLu/fZvoLP3oHp9fBPynn9NeXGHQnIwnvHN8j4mbkM7OuXj/feYffciDPcMkrlmeP+H45LaAmDkRo0NnJxq0ukYpB7kn60gs7CEcuMkUk9agLVezGT9//AGm7vnRGz9iMrqN8o7cX6OchkaRkxeQxQBEtDHUzYSqErayopbEEIWKisVVx+X8HDuCen/M3v4YlEXpjFERFVZ0ZzO++OQXfDE/pzq+zd13f4A73qNSexJzDpPIZJlzZd0ZCtsZYZ3cTD/+EcP0NXMkZ/AxFT1bNoyINOgtl/Diq9OcX+8Y0lStpIVFFRazaKDpnUf941/3BrCnlejH9h2Xl5ecXpzTdR1hcEjNW2BATMHil1wkb36fNwWmLYNOhPpzccQ0WpMJpQKWJZ7WsYDAUdhdSUkeorSAcTETtWg6ZwWRSEodKbUoOomdqFDYkt8rSq+rBFRKYgSQPKrrE6Fb081OuXr8Sy4+/4Crxx/z5MUVf/Pwiken11xdXeP7nhjKOZli3KUVTdOAUpu9sapqxpOK8STQtlLY1lp0OAf/VLl2EazdFAfzxoEzbW5Q0tKVgNEYHCLun8hFz884KV4JM1CxXrU8ffqUGBNHh3f53re/gw/CVG9GtWiuplCuvcL3XthlUQqjKQhbOUaJ7VbtEoiyH4cgWnEhFoDNS4imBCCIKWy06GKMqORRsStMsQJkJcSksBgd+M6L3rC2YvRYgDNjLMZW+D5gq7ow7AJKGaraEn2g7/sCsFFeP+1gcMJ1MkVqImZh9YlpXJFXKQlfLo9OBaAb9JRBifECQ9dM3MboO/lhKgDwYDQgXUe+fP8SQKbUDYZTCGHLWJSZRVQF68gRqL9URNiVtCi/3eBQu4y43Tn5ZQad/LxrcPjq48vPfxXo9vLrb15TbX/e/dvuc153bt+k5vB1sc3u9fomsdD2cwElF9mkaDufZTC+Gjq4Xvc5hqn8ulbcr/xcSub0AANL22Iu4HkhQcVS1UwJpSIqxc1aJ52d4SVgrmA2aYgVv/o8vlaDLjIwsJAzMoNEaGEVCBqEKlVrrQRuS1mRAmBLz7uGXHRORFctQ9HqGkCmGCkuNQGtkZZLazBdWypW8oGMHaoM28k3VAwEsHBYnTd6XdY6tHFUVQPaCjC2o4ElIGjeGEWAAEY5eIzKKCUME5+dgImxJPfalAVI2sJkEGkwFp0CFYGMwpPxSpOKVl1GWGzRy6bhkMQ5K43XGZfBZ1mYopIug41o4maEbf5XBtTOICzXZNP2WX5ltEJn0XZIIeDXLZVr6EcjYXKlQO87tM6bc9VaSeKcBYAyRtFYh7OQu5YQOjptxA0JRV/cQMW5VG9YgQPo4qwYgiSgqWoBdIy44GwmcB5AgaKXgWgK5LKIDguzVBVLS6PW4nSktGijKI1vWyojovAkEZNNfU+yGmUVZjAdMJppXUOSMVvVNU3tmDSOqtZFuBhIQTZQJYKizgrlPCtk7JfdKaviFaQg5UBKuoCJWhKmMi5dEfgN3pOiwmiHsqJT54zeTOycZNOx1pSxnkkpUtcN3nv6XlrtiIEUSjgZMyEKQzDmtQQCSQCCphqxt3/E7OKMxWpNzplxU4mGnRXw17iK5XqFFdtHmQclUPRRAhxVKqqbtmREpD/GSIhRxO5RAiRwM5GV599MbIWlGTebZujl592K1W4LQAgBay3OOaq6QivZrGNMBC9Mwr7rhQ2mpC03ZmH3iskKkDVF+1iAkTK2NBmjHBSQVSPP0UqceY0x1MWJuKprJpMJrrK42gg46D0pSnv6cr3m4cPPyTFwfvGcyiq6fkQoxguj5kqS4qwwJuJiLlU7AU+MNbIpRIv3RloYnCmBO2AMPovxxartaPuevjCchdkmLalKDQxFhbYWmzODaUcc7lthSIMUE3JZW3MJ1gcALydhwVrrsMZKcJmlDVMN1a1SZMgUA5Uk7ZY+R6LRVIsxk7aV4FrOqgROqgBPGh/BJIUpLK9sDKapME1FdoZoFD2ZdcosQ2QZApVyYBqoNb3J+CQFlLZfcXp5ycVK2J+NzkUbUAA/SrJpjaEyislohCGh+4Bu16SUZE9Kmq4XFjZREXMgJSkIeDLR601bVoiREJN8BYKPxOJEO5lMuV50GGXQxXEsl3U7pXij0BB6L+31w4KfMtkPjmRKmN7tmsV8wXSyz/HxMSlCU1ly8rSrNcokoCIrTbdaiVOaHjSe4maNNmmgabORJ8gpE0JptbKW7HtZc7SRPdVHrmbXLNaB2coLYGgqJpOavdEIY+Dg6IT9wwPSakVjJRA+DSucDkwaReqWrEJgUWf2D/cx2lIZhy16iNp6MOuymcl8BbYuZ0r0dkL5yxCE/3MBdLsB7rAmDT8PTBfY7sO7z/lnP7axucQrQX5hrCqyA7mwCQAFwUui7ypb2gklPIsogoq43KPR5C7Tnq9xxmCmDYwigY6UNW1vmC8DjQ206zPacMbdu0eoecfFh58yWrfcun8M1lNdvkCfn5FObjN6awqpQfkee9Vz8ad/xuWLL5hPM5+mGU8uT/HrBe/cOeLe/Tc4fVLxUMMXy0ucW/GtvSPWH894+hcfsP+7luWB5dnTGQdG873xhJPZHPX0Meeza66ePqK9f8DtB4fovQl7t06YTCvUt0eMDw4xY4XqFzx4+4gf/6u3mS0vCXGMTmt+8bfv8+kP3+N797/Pw799yvt/9qfY60v+4Pd+j3fvHrM+fUx3fII7uoN2E5QVAJ1sUdmiegWxAZc3DvQ6adAnEBKf/6eP+ZM/+09EE/ijP/59TvSUHM7RdYRYFXaxBj1FxQr6AG5o62rIJqJNLzecjhQMftGzvlqxuJxhJpaJltjG0aB0htjBIjL74BHv/+QveB5X/Ojf/luOmim1rgsxQJKUDWlsWHV0YpBEpxS7hgT/mxyvmwsDw4XyeiFB5yPGK2wEjBpwns0xcFmlpfs3NOdLKJVSka+RRKUUriQ+3maN7Ew4vtkJlAuWcipi+tKy1a7XXF5ecnV1Rd/39CHgQ5ACzS6gkIcYWaRbcvn9ltebCpOT7U3JQ7eALrGyKkmnuK4rk0BLMS3FjAqOHBvxIkHAJa8CQYuhXErC4gpxRUwL4LoUNxuyasRNWCsUCVIPoUOFjhwSse15+OgJnzx6wsPPH/P0yRMeP/yMy9Nn+PWCLkYWIbLo1pBF27y2DuccKWcxvcoWY6S4l1Jm3bZ0IRKSJ8VUcgTQJa/LJXYdZFF2yFebPHTIp9MQl1iN1rbkvgnlvWijm0rytmyIviXGgG47Ls4vWa/XnJ2dsT/9nEymqUdUtaXvI123ou8CIYrZQyYSB4DORzHWinJt2z4IAJ5FU5sCvKcSY8aYJK8TBkDR8i7GBDmhsy+tpLqAj4pBJEkrGI/2iMkztKUrSrseFqUtJyf7zBYL1usl1moqV9jgKmOd6L9twE1hX8h7D4QExZcYiikX/cEcSqoSiRlS0mjTk2OFigFli1mGkkKlKvPly4BZ2tzEG+DVl9aXvEPIELfZWEgmucyBIR4eCCAxRpLRO/pyXwa4JCfZntfrGGsvt6Rvl4EvP1btLKZfXidfBRLefI8hT8Js86uXQbLXgXa7r/uNgKzX/P7l93x1EWHnveUNC3h7E8x7mWm3XfteD9DJnqRe+Tm/ik0opJoB9GKznxSoS9bbIbhkG5MPGI1C8KO0Oy+Gt0tln1Svvn8vH18J0IWcilGEJGuq6DRJQptB35woMYu7q4BdEKNs5LaqyodU5YODcjJxlVEFWc8oHUsCLax6XaLMnIX1EkUJVQC4IgyflQAzoVgjkzNGO6wBq1XRX3OgzKY1KAS/AQ2GmxFiJusomj+qIOgpgU6SKJXqfE6aBIg8k2gTCLMw00s+urH+rc3QQquJWpLWKEglIYv2i9QVRKfNKUPWCZ/F0TbkIBTwlAhZ9NyG9mKlSrvs1yCw5e5graFxBqcCfZek7S9movaE4MhaWgN96BGHo0zvMzkb2aiSQilHbQ1BaZxWTCdTfC/6BTYmPJY+K+qm4UBXhP5CqlSIflHTNNTjUTGpkCQOJYti3mkbHY5+1W/dPhH9EaskGY3GlOpIxJp2w2bbHfDOGKZNzaipSLGnb1eQA5U2jCpH8J3o5jlLbcTIwnsBtYzOrJZzEXzuHdqIQLCr7MYdM8eA1oakDTa74opXwDyyFFcz5JwKQFcCWYSlFlOW+6o1o1FDVTcyXgrQLJ9vB5DUsjlvAKxhTqCK2KtDa03f98SYqKpK2lb7gNaa2WKFeXHKydEh+3tTfN/St2vmq3URBxaWVTaJpnLgrSS/SUHsEfvxoveGLECDCKixAlQ1zYg0DlTWk6LdtADusuWG4MhaewNgT1GMNNq2JYRAZevNRvOqBfX6+pqmadjf38cU4xelkujrOEXoPK0P9H0oFcHSGlMCBlNa3IfWY6UkCTFKtDJ1ud4556KnYjbCzLuUdwUYa2msgGkpRdF5CxnbtbTLJeu2JydPSuBcTVU1KGWZL1YsFmuMc1jXoJVHr9eS3KuMc5amqaiqCqsElNa6oaorFNKWHBOsu56+7Vn3AtDFVECMKPqZNqdNe+Rg/kPJOXwQ56/gRSJAZWFkGaWE/ZallVbpQdND2i1yWatjZhP0mi8FMYUhijhvxSQmJHSes8sr6ukU5yyLVUvb9bis8N7TqUwbA5NqTMYyW7VczlfURw3NdIJtaoJW9Agol3zk2Uzm3qquqWpDUg3LLAyvCFzO13zy8CmPL+aM9/a5e7jPQWNJuiIbi5iw5OIQJgmPc4bpaEwznXDrZAI6c7XsSK1IKmgrm21K4nKZgrAbyaKh1vcBo7fudjFLy1bKGteMQIs2XUrggwTQ8t6y56IV3ke6XnRctSmMyByoqpq+9wK+58h4vIfWmqYeU1nN5WJGSvuMmxHz1YyLsxe0Xeb4eJ+DcY0zYgIUSstt13UoU6FVTWUtWkVZq2JktVyyXq0wzlG5iq5dC1CfRWsQn2jbjrbzNM2E23fuMBmPsVqTQySFyOTgkG9997s8f/GUlsydw0PadsXlrT20PyCvZqT1UrRm98dizKNlf8xDFlWuSYrCDNlIUyhhwA/tVlv47p/veDk4/DqduZcTi5d/95s+FBADrBcty+UCpRTTvQmjaS2JecnV1uuW84sLMnB8fCSdBhoqLRYTGouotSZqp5nWGaU9pk70NnC16uh6zeXZnE9+9YiU5pxf/opm1PLv/rs/RF3N+fu//HO6Lx7z9vE+xnrasOTgnXc4qRvCizNOn15x9tFDqosL2mdPmH3+Mc/Hil+uLll1niPdcL3sMKoiM+XwzT3M/RFZnfHDb93nW/sTTj+85K2TGenQsD5fMc1grxc8+enPuby6pLIV796/z72De0wO9qBWhOC5Wr2gvVrhzRtMmlu4JtAc9bz3gzt89OkRDz/7lNHBGGsmoKecXq/5v/7D/83f/OSvefv+XfamE84vzvni04/5Xr3Hm6OpxKeItmcIlrHbZ6i2xhhY+RZXGyaVI89XnH3wAT//i79i/fkLbt05YXze4RYd67CgjzP2pm/Q96IFtl5ek+Ied946lhsYNaRiHGGkApizmGaNJiPqNx5w6+4JuVIoazFmJAKDUk2E3hBnPf3FktFexf2je+yNj9josg4PhVKwKcB40RRGSUzzjwHn4PWshmFfoRT+xHU9YDzYoDAYhmXw5mjffqHEYL/uIcX70uHBFszRBYz4TUCBu4X1nEX4f7VasV6vxViqFE+2hg43E1e5D3mT00isBSAOnwPYIHGUADWKiFJSBNwwnzRFM0tiVikCGeht0SK05Cw5WtRJANoMSQdyaCGuIK+gdB+AAV3hQ6ZbLJidP+Py2SNOnzzi8sVTri8vefjwEV88ecblrKWPmtk6sOoj1o2xtaV2HQcHB9RVRdf3wtqqKtCa2lVkFOv1mhSydHtYJ/FxjGL6h8SVKUUB3UIgplCIHDVtKUxsrycMYLPSQtpw2mGsFTCn90QgFyJA8AKOptLqLPGZYbFYMLu64rnyGC1M/BgD7bqVIljRVS8YsJgY5Fi+psJ0A60bjHZQHOpFO1ATQ8L3vQjmGynaynAXgykBXTNGyR4vxVbJvbXROOsw1uA7L8w1FMqJMU2IAZ8VIUfWsxkZqGpbCuClfRYp7m6YmoXIozdVMVkTUpEJyUmkEVKS8aOT7NS990JvyYpkDUaJ8YyKAWIkF23l3fE+AC8b8CVvY3rRV0xF+9MwmMdsCvsxoqIwnVKOQhjJO0YCRfJhmGKSu5gvzTngBmg3vMcucKTUzRhku9cLI/DV4NrO+6BurHG7c1+XWHwgebz8Ortkl+1rvxoYe/n77e9EzuebHF8F1L3q/F/HplPD+MmvO6dXX+/dv+0yG7Wymw+Tb77Qto6i1NZ4Yuf3m5hTKdAJxTa+k845mfvKaJTVZC3yYGSNQNDSMTfc1mHsym15PVC4e3wlQDcIVSqtBOEvlXaQwem022wqAwU7DOh9EbTU1mK1IaoszjwxSwJsjTBWlTQ4qiJgqYeLRiT6ILpgubhxWgdG+vQxYloQsxImCar06JdAPQvtVaksguh9oOs8MSvWrYhzrrteFiojnyGU3lVnLdopXAplkUkE3xM1qGyxSjTy0qAxRwYt7Jw+Im44SlwC0YqI0GVjMmCVVCmytKXGkEqrnREHWOGbCEprjLS6lus9DNobN1WVfucsLkJKS4sYSoAcp4W2nFImBAFBFSLEPYATicy4rjF2xNqv8X1LUhqlUkmqE8HL/Y095BBRTjNxjoE9UFWOFDUmwnQ6Jq06qqYmrVsysunEAo5mxCWyrmt8GTfCzDCbAMV7z7hpUFoLiKUUOooYfS7AVCrVsJwEQBGdvSBgqtXUVYXSUNcVdTWiXRmCX4uLYYZRVWONOC06J+LNsmkajFW0fgVaWkv72BfiQSKGiA8igOpshbGWoCzJ+g0dPOdI0tIykDOkmGXsWiteeH0o4KVM2K7zoCz1qKGpG8Z1pGvAR1XaNGPZ7ACtsHUlGyKANkj7G1gn7R8heXyMDH5qXecJ/hrfiW6H1YqDgyPCZEL0rbQ0xVjaFSPtek1EWitr5Rg5mcu1s4y8xroWvGjP9V0nGnTW0Hc9ZOh9JPptNXfYzF5eRIdNNoRA24lD1dC2sSlOv7QQD89TyoiD7XxBjHnjmCiARke7bgkp41wlYKDWxBBJiK6XKi2DIrIr7Z0x9NIO4SpQIsJrlRaThzSIxgroFFOisRa0oi0OqCFFnHOMJ/v0fY9xjslkjMqR4Dum0wlGS+UupiztpcZIRXjdFmFZWwI6MT9x1lLXFU1dU9XC2hOXPpkzpoDE4yzac+iWdd/jQyS0a0LOwugrrS22qgVMyCU4LwBb17V477Fa45wVh7YyvuQcwYcg4r2liJFSZrlakaIEO+QC6EUZqz4ELIhmG0laYbXoM6y6wNPnpxwdHZIyzJcrKjTjZkwXI2sV6XwirntaFel9xMTMJ59/zuMXpyz6jnVOrObXmL7j4uqCzyvHvpWxWB8fcbGY432HMpp5G7hceFZdxtSQtDjTRW3JRphXIpJdYQHnHKu1MEyb0UhA8spQe0VVJfpi0hLTir7XqK7DmiQFkOIwvmVCWkJhaKcEdTNiPNlnb7/DdhFXN6y7ntW6LSxzKQL5lFE+QNvjioYrgDKOrDyZUESNRbS6dpq6ktZ/pVUxxEjkEDl79oLFvCN3R5zsjdibHlBZXc5RyT5Ixhmpo+coXDSVE85s2bt938vnLtqD1tVizKDkmnW+YzG7xp7cQmuHRmNM5uzFY+bzCw5Pjpi3a1q/orKJEFZMphUP3nqP68cPWc0vubi+wq8LAykmgs/EUBHbSmKHbGSv3AAIEtjlf4me1q84/sUYct/wCH7Q2zU0o1pkHURjAbRisV4yn4vj7GQypnYV63WH1jBJIyk+Okc2U0hrdLXk6C3IKvC0fcp//PvPePhkxkFzzKjP1MGDijx8dMrh1HP66DlxOcOcHPLs+VP+5v2f0q4XdN2Stx495X8cH/CD7xvy7DkuX7FK13yyuuQ0RMb79/mdb3+Ppmp48ekpv/zZGUe3vsPk3ojjyZL7zVs8+VTRLfc4ixWrucb+bMm9txreunXC+eya+XLJ9WzNsWn4w9/6fb577ztUa83ikwsWYc7z0yc8+fRDZucvGN26y70ffZ83v33I+MRzdLeichWhd8xXlqAOiPYQtX+XeHKH66ri8+WS//yrj/iue5N674D5vOPs6TlXnz0m6Yo7995mMj4gpp7rsxXL045cedRBx8lknxcvXvDig7/j/Jd/w71bNb//b/4NqAz+Of25o7n3JpU7Jq4Vq8U1Ic8g7hO6zBcfP2G0v+Lw6C2MngjLSWtADGFyKnqp4wqXDclA0oacTCnxKxEbVBXWjdkfHTI9nLI/OUGrWkTdGdjxsiZR8oGSasjvGXRa9W9s7g3TSJXCzqrraPYcdV26LvKWu7flN+wevwFwrqzVxmxZ6KaAHb/xY0gyU2K5XHJ+fi6kiJQ3RcGoCoMr6qGDr8RVu8Va0WKTWyWifztb2QAAIABJREFU/SJlIW2LOUEIXoAabaS4giSawbf4IOeSk4BBTTXCaUdjNOusCOse7cBZLS7iIUOf6FcVue/pVwtcXZPWgeVixS8//AXv//wjHj/5gtn1JWfPHnF9/gV5fUnuZ4TlJSp7lLXU433untzCTG+Rq300msZkrJL4ZLla0XY9IUl3hC77qdNGikWdl+6ULAx02WtF903vMGm0ktZbKXRn0DtOmSmTjTDOtJLcIyrJAVNMRabIoLQlZ4+1VdFoEz3xWIrMmZI754ocKWQHjdUTicljcVcfBnqR6slm+2Mpo5JiFMMsKK20uRBRFLYUrEQeqnSGaCl8Z5WIOor2eak+Gy0upT5nehWggqwMKkPU4p6aBuAth42BheiZR2HUkjfSL1sTBbWRw4qhmF8VkoK05WYBMNDS2RMDqAQhiPEisndX1lLXDU3TEK0jKbOjHbdrAiGxiC5tBRol+UoMJS4ZMPpt/py12TjCCsCyo3FdgJlcAJkB1B7ktbbTVO28f5IcvQDoAs5uOxJyzriicbf7/J3p/lqgKmfJma26+d6vKmrc1O1WXyI06JLfDOc5gPqw7STcuKXu5FlS/Lx5Ti+f4+573GQUlo4CXc5n8BvYATXNJqZMGyLPBsIs52mMEQ26nes+nP83Me7ISXJJNQBvJe4Rr5qiJ48w4mLOkIeuFMjaCNhdzqtEnKQgxnZioJYhJqIKYGIB6QSLSBlsZWWb3TACXwJwv2ab+kqATi5E0Xt4af+TC/2S/sHOABJWibSy+lScXrNgkwmZmzLXpY1TwYYtJY9SOFtjlOjbDK20IMw+Qmm9BLIuukeDGw+FUh26UvETcCjGRMyKru+E/aDLwrsDPOacIYAdqscl8FdDC2WJQbQqYqdZqMhFJUiALS30YZVjQWc1OibsIHQ5DJThhlMqKQoJdUreYZUmqIEtVwKlGyi6LnpwoquwAUoLKDcAleLsGAk5olSAGIghUOtKWpcKsGC0o27GaCvtc84pYtS03pNTwHt5bJeBAOtKtNmUUsKeiJBtIyyWdkXb93TRU2nHVjtPbrCC4riUybpMxLx15rHW4kMoQLDZLCQDg0sHv9G+GlpcbwgubioUWQRorSEaQw4yVgzgjKZyjto5mspgjCsLtwUDt+p90WQrBiQpBgFmUyJ6z9XlFSkGQt8TfcRoy3g8ZTwZYytL9L0AsqXtssuq6MQl+t5jXI01Fdb0RC2ou7g+WsbjhE9OPI1K5dQP2mq+tJr60t9eWh1VLq0KWqOS6IUN5e+UFSom2r5nMV+WIG4frVMJOKTdD60K8A7rviX0nhgDfQZiS6c0fW/ouo7KiD6D94GUE0bbDcNHwFNpExjWkpdp0zedd4WeD2wYamKylL/0+Jdbydq2KwK3HVprafsNAW2szLlhEYzizGpRmExh4Uq7ioAc0tKlgBwDaF20MqVaUtU1TeWwSoImjSaFwHq1ZLmYsVqvaXtxQ3NVzWg0oaoq6sqJsQDStjkeOfb3p0QvenK99/iuwxfAtjKiCyfAZb+pqDvnaJpa2JZVxXyxxFYyZ11dMUKRlOh8RCCmToxD+l7W2bKXpYTou7WtXFeVxUzDR2m3VsJQUmV9zkaMc2JSxFgCRavpgrSsqrIGhgwqiO5cSFtHV60NKilyDOK0nWSDjDGxWnc0414CxCSBY+fFSWxkFX3SxACdysxXLZfxjKu25YvTF8wXK0LowffYmOgUrPqes5zl/K/mzFarImwOSVm8qkhYWp9YdT2r3tOnkWzG1qGMOL8NG/NGxIKBOSH6NxIoSuCjbSUMuDbS+54+D+NGWJvGZLkGMRGTIkSRgKhHY5rxlL5f4pOShNhYYu4JKeMzRAzzbsWLi0uM1ZwcH7O/N5FkIQNaSxHMWmwliYerSvtZ3u5rtXPUtmKVOuaX13z+2Wc8ePMtAd8sOKuAIPffaGzd4LSitgZnbHF8Li3PWTbvjflFL4Bs8EHYgFH0LxeLa0LosPv7UuTwPSvfMUoT2atiEFdxnTk42OPevdvsqcCzh2seP3+GTQZFKeT4SO8T42lN54Vhbofiet7eH1kn2IAK/1LHPxaY+5cC8qzVjMcN0GDrgcUtlyzGSFU5bt0+Fqa00aSArFemNN1l8K0iZIN1FldljF2yCi1/9+Ej/uQvPmG09w73fvwGRyrC/JQnj8/p2opl1Pz0bx/xxu0R+7dOuP2db/FkveDxR0vm14n5Z+dMfvLXzGdz1qtrpuMpozt73PntH/Ljkz9GOQu1o7JTPhw94we//Q4//sP/hg8e/zn/2//xP3P5i+f4S8VVc0XOieV6ydLt8T88eJM70zHP20ecXj3nzDse/Pbv8+5v/QHj0T559gzrFR/+9Bf85K/+X/rFGf3iHDttuPXFz3n+g1v88F+/x9tv/Cu+9953+Ml//DlXsyVnly1Pnl7R9xUXXc8KxUgreq1J1vHdH/yIu2+8y4vrGU9PX3DrwTtoYzAu4/Oc/VsTRrom2Q5OLFfLU07PH+Px3HvvXd68d4AZKXy35uJ0jlnPOLEaXTkSjgO9TzYaOMHP9nnx/AXPFw8ZjY4ZT/Q2yc9ZElqNBNoUowClN8m6GTAto0BbcDXKVaCcSHJgxazo5WFaYqpX8lPVbrL09SDW17W4yloD69ZzcXlNNdYc05CKdECt7abt9tWHRNff5FxefR5ibCv6rxljpbV2aCXVr2Cv/JOOgbKRIrHvmc9mwgor8gi7EiFfkvW5cb6D07yAGClHMazTYK0R53EkBqUUofuuJRNIuSPljr5VxYG9wlrHul3T+5YYLUo56UjSIneQg2iS9d2aJ49W/I2/5tGHPyWs18zPWhZXnhdPrvn08xecLTqCtvgcMUpzNL3F3Xt3+cGDA24f1Jwc7FGNxnRmzMxrLtZwPVtx/uQJKUTWYc1AEI0xo4IEDClmckyi4Z2ixBYD+29gr6jBhOrmXp5TAZVR2w1DQuDSYSCpujKmtFQbkTWyjmwDOfjisBsKOF1an8u9MMrilJNct1TTd0Eh2b5fPTaF2SOt8NLxNRS2xZU+qkzQuuSnkltJjC3MOa21FHCVkRY7PUz40nZXTkCkT0o7aC7ZacmPCjsFme9qA8QXBKLE3q18Hj1owedN151WJbb0gZgU2orZnt4Uli3KWpIqXXpVTd00jGopIBcoki0AP+QQN4EqtQkChnxixzzh5euqSv49YA6vQUhkLm2BlNcdWzDs5mO+6jlDXspXvP/LjL1dYsOrmHevA892waDd8/q6+OP1rLSbj/kSY/A1YOM3aefcvG4Za8Nzy53flGOALUB8A+QqzOvNuJWvw72WYaolZi7zRvSbh9cs5zmAtHl7DiKdkApIE0XGKkJdNaQBGCUVYD7z8mXYMiq3nyHnXLowX398rUnEZsEZkPOX0b9dNFSrIkKMAFmqtDSFWGQbCpik1OYClum2WZB2b44xRijeWgA66a9PCLEjkymuRJRK4YB9KjkBY4q9NpFBXFYrhbUC7g3gnY9hA8JJ4pCFYqwjlc5oJ9KituiIiexoQBknzrZZnFclwZdFNCpFUrlosSmq7MpNURsm3AD4hZQRCK0shcqUhChhKeguEmoMAzOnjB0MMSiVtZcmlda2/MtoK6BjZSw6R5K15F4WRu/FNCOmhK0dMWZhXAURL+1jxPeRGEVfMGTZKLU17O1NqaqKuFzSd5FqMkaNxqirq01clCnjukwKPQQYA6JfNqmh6pB2qhgDMp+1ImrRbBnA3kRhBJUE1WizeW0Zq9uFc6MttgP2WGOonRHdtRRBa5yr0ApCDFKB1hbnKiprIaeSoMu2fri3B0rOu1119F2ArEje04WOENsiKCmfwdgKV48wVoTucxHeVdZhCvMzI/TzqoFx0SEEAY9CTgTvy70pjNCi9xajVK6EyVmQ/ST3a6hE2NK+0HUds9kMlSJNbWjqCu20aGWlROMsjbO4UY1vO1QXYdPmqDDGUlUV2gsdPKSEDxFlLdZVeNWVBfnL7rqqjIHN98PdyoMZzU7QudtK89LriAh/VRzMYnGkkhmUkrBCXa3lNXJCKQEYjBZNQ60Vse/KGiTag9oYaiOtXD56qmoEiB28MxqnMyl09CGIQHZl8dkTvQAy67aj8/1GX3PultSjhr3JhJPjQ8aTPZbzK5aLNdPplOnePilFur7H1R19X0SAU2LVrqWlum42BKGcRWeubeX6ZqBKiZik2hOCgDKutowZi1tr15NzQFsJ8rJSWCuut31fXJIH4D8FUvTElOnVsM9lkrIkbUhJKlZWG6IzZN8TvCZWVtrWQwKtS+FCTBuMtWAdli0DWEURM48xEFJisVhhraVPiT5ELpcLbExELaxOXa/xxvL06oplSpwv15xeX0khAmFYOaUIWtORwXsZD0sBPW2S1lNMzXivxlYNMcFstuB6pFnt14T9BmWkTVoHgyJudE5DCCy8p4mafb0v6/Oge1k1uGqK1pG4uhJjktJOZG2Ncw5rwdkKlXwBObMAwdqhTUWiJSQF2pG1o8+KmBUrn6itAHcX8yXLxYLL2ZK7d24xqmqC74pLdabtOkZ9Jy6yRb8mpiCsvQx1NeLo4IDcQ9cvuDw74/BgH9+vSalHqYgmEENPCGKyYWwiBzHOSFGYk8Peo5SYy2glRkpDMK6VsO9i7Hj4+Se899YdDg/HYozU1MzPOuLlNdPphKZxrPtAJHO9nHNxWVErxfTwiHa9prtaoJIkPiF5UupAZ6JKZKMkuMnb3EMp0WwXSGKIIv6Fkbr/Px0KtBPAX+IbVVqoEOlAFBorIHyhbRorrOj5bMlivqKyIyb1mLoyqE4xO5vx+PJ9Tlcv+Pu/e8xHf/uM7/74iL3xhLrpOV2t+Pz8HGtvc3Jwi9n1NYdN5s3bDY3q+eITx8OkSX7M1azi0WkmffqCZ2ef8cabd/ij//qP+NFv/Q63ju6Q22swiSdftDR3HXe+/V3C3RVPP3qMrioO9/c4u37G88UVra7osbx/+owfnd/l9354zPFJzZ0395iO7vH9P/g9Dt9+E79cs7z2PH/2lI8fPuR0vqC9WvPm5IQ/+PbbfO9bt2j7BeufPeH8+JCrs+fE1BFj4LOPP+F//1//PZPxhC8+e8LEaR4cHzC1Cqci9964y+Rgj8urC/Ynjnfevs94f4LWAZU7jAJzPCYZw2Va8+jqHDM95jvv/ohJ8uQww6cZ4/0p7q0lKi4J8ZK09FTuLnpc4deK1fkCP2+o8gH15D612WNLb+sBD0a0jmMx+dDImuMpibMFk0qJ2GWoNWY6Ru/vQ9WQsiVr8wpuWn7p39cdvybbTEnHSd+LOP4mQY/C4n5puN88zV936hfQIqVciuYiBzCfXWO0Zv/gYEdi5p/+OXcBt8V8zuXlFW3bbnKTmLYdSruPzzkXo5dtEq0LW0wAOoX3xcFeZ5GfUJmQo4BXUQptMYrMRe89apm4vFpwfnFNHS0+r/F5xbpXrELmejXH07PyKxarltUy0K6WRD9D5yUqrUldwISaUQqk+SlYjd57g/1777H34Dsc33uDtx7c5ntvHPNbbx1zq8lUqifOZjx5dsbDx6d0VzPatsclxTpEQhcIXSCHKI6s0tJF7MX8TQpWg9h/JsUCyCnpjEgpk1PcFIElMS5JegGWyKowrYS1Kb+HqJLoqg843iYHHqQVZIyajLDFC7BjACdibMVkgw3oM9y/ja7jK8aqQkkrtzUYLVJKqkg15ZSks6zIxGxfN20YR1oblC86lwWMCC+BvWnn56xFdmn4GnXC5070snY01gqBb8MWHIAHcfItU09LwUCVBwuLq+geKoUzisoZVHGezUpDVVM1jWgMIuxHjCtg4quZZl81n3Z/3gBT8ovhRm7mVN4+eAfjYHOfXga6NvNv8+/L77cLWN18DRhAxq8CCP8pn/NVn/urrsfXAX8vr/GvAwdfx+7LLz3u5X83tTRvntfm9TJbbdFc9EZzFswjb3+f1ZZ5qsvzNuSOgj8YBIw2Sr6KGScbIJqyng6vO+Sjwxqai+SYVgZiKkD45mrIGkTRsEMkBbjx2bb6ejnnr20h/nqArlTjhq83aI0MSLPe0ChVuUBZQUriBKhBnEOHG57k0w8o5iCuP7DEhq0ulirH8GEpgMyApA+0yO0FkMcppVAp4MqiZZUGm1FosjJkJYBf1/pCrxS++NDPnXKm66X6FA1UKlMbwW8toAtWqovLXtayKkUl7MDAAOhZnBKn1toobBJwK8XyfC2soaClVpARxpFACIVBpwOOYhFNQXmL+HqKUSjXG4CudAQhQYUb+veViIHXFYxr0Y9TMbCaryBrQmHGdH0gGYWPkbb3LOeLwr6DXDToBt2BXDYwbQyuaWhyJrtMc7iPGU3YX61ou7CxGt6l/Q621bo4mGJ0oUfHzWQdxpRSW/OApASsaapaGHZeBGqdsRvwbQCApTpVxkSxTFeFCq6UEqBGSWXRWRFBt0ZE1bXWdF2kaztUldGVkwmdQadCLTcGqorKWTGhOFbkKJVd33rW3RofKmL2tK1nsVjThwXaOIxr0MZhmxHt2hMjaCsbXIgRmy3GWpwT1tJ2EdUYI0l/CImq6guzJGx06bz3aG3xxhDaXhYTraiso64qad8sLMDVckmKDkVGa4cN0PUZTYVxBsFUpCWU6CXhV5qYhM6ekYQ8JWEFDuBbzsXppohLD2NFlpN8A6AbFvahUjI8JudcAs1hpdkew/gY9Lpi3OraCcvJ4gqwopS0dlij0VmjkGBLQ7mPos3hfYAUqZwECBFLzNIeYa2hMpoceqLvcMYymYzYm4zEOCFnfPBUFlZtFpOG4Fm0YtqgMrz11pu89eYbPH38iI8/+hXzxYK9vTFVZanqiqquGY3HjChzOMaSlIiGWgj9RkdlMKkIIdC1reiQpSTr5Q6LNMVA34qTtVDNyyZl+tJyXjbHskar4gY2uORmUrnPlmQcOWtiQgC6oCH0xKhI3kAW85CkRG8uJAkerXH0rqaxTkwpCkCvsyZECa7blcc1cmdiTsw7j0mRrJW0wK97snNczsREoEuZuh4LMOx7fEgkkri6aoXWFalyoDQ2JnLfEYNoeKakMFVFVgJqLZcr1u2Irh+JyLORZNYV8IckIsadD7RKM01ZNucSmDvnaEaNAK3OAZZsNU5lUpL1LRfX8RAiXdfT+kCyWph0yoJx4oiaNX1WhKxRtiFqQ5c8KWuSrVj6yPzZC+bLNccH+0xGNbWzwmdRmUQUdrApbGWtCAX4rirLwXQflx19X5PpRWcyJ1LsIHdo5SG1hD6wRmEbi8pRQMDS2pyA2HtyErc50b8DZy2VdeSQSalnuVywuLrksy8ecft4n+OjfSaHh5jLPZZtx95+LW3dbeB6seZXv/gZnzvLvemYd966z4N3v82nP/uA02en+KqiagzNyBJTK+ut0cQs1rmbqjiiZ+uUJBgbmaj/gg8FW705IJf6pdLCdARZBzMUMyJhCcUY8TaiGk/n11x/MeOD93/Kw6e/JI0vqU80s6cvuFcpfv+9B7x3fw8zzdy597vsuVv85X/4Bz771VO+985dKpV4+skjxo3nvXvv8FH1lBkJjeGzTx/xxYuHXHfnzGLi7tunHB4/wJHxeJ4/e86f/+VHfPDhnHvnT3jjB8cc3h7zne//a9anz9ifjvmHX3zIbDnHU/GLTz7iT0dr3rttcEZTTxxqOuLkzgkp9rx4/Ih/+Ms/55cf/DUXi0tC0Jwc3ea/+vGPeGvacP3Fc+btFflwTNefc335nLqBqaqJ7ZonH31EZR39OrAPxMtTzsJzbp0o4uULYjTcPmg4evOQ6X5N1mtQHcQ5Pi7p1g0/++Dv+fvHHzA+vs0f/M5/y2T/ANtek0KFbW5LobE6AGakeI7yLVgPKZHbAJ1hVDsmeydEHTC6kpg6eVA9ymgyLRm3YYQDG0ZKVuV7lcjJU4004we3eK/7Afb2LY7vvyEsaO+pXVXYcq9JJDfslZe//vpHTtIJl4uLaIqZFBOrZUsMmemk2o7x4V8hEuzYr/8aJ1CM5jQlZoqslgsuLs4Z1Q3T6QStC4tud535x6455fEhBHFvPTtlvV5Ly1X5e8pbF9fduGmIcTZMHnWTfWNNJSDtJoEeEsdc4gpxC00pFHKAIwTNchE4z3POzk45vb7gxWnH5XzF+fUFbVixapcs257VMlFZQ+UyWvXksKbSNcfTE751Z4///nf/mKPbJ+Rb78LJtxnd+w7N4T4HU8eRNRyagAtz8uVTrs/OWV5cs768IizW5Lal7zratqVtW9FWTluTh82/XBzjS+vzpp0sI0YoA3PsxrgsCXNph80xblxAh8dLmx4okwsgQAGYBq1pi1KlvZWwMcazWm0KV7YMylwKSJKv78TCm1LSl+eRaFgOj1Ob520OLSCc5N7DGyRyKZRZJQVqHQftd2HUQXGPTIqc9eZ6pVCQN0RbNumIpycpMTHZ7VLKBTSWnHy4urIaaK0xTggBlVEDYgfaEAuW4Kymdk403bPEjArZt1XKknvHjHqJYCTj/+bP8s2r5pW6sRoNTMENGITa/HED3O18fR2A9jLw9iqM7ZVA0/avDHHLoM398vO2H/DVr/tNGG6vetzLj33V83fP+XXx08uA3MsMOQHnXg8+Sqr4ZWbgBsjKCAhdxoQuubv8V143bc0G8/AZCp5UcPRC2kLW8vKaIvGohFkaB1ZcQWBiWWeHkaMKG1cZAfFTiaXSALKJ4rHgLoIFZVWkndSAWr3++LoW3a8E6KRxsmx8myC49OdSziDD7gjVSpJ3lZHefKM3iHiOEFJkoBUPNPvixCxAnc7F/UWYBjmLmP7ugLGbVtbthqURV0ZZwwrNMElL4qCZAbmsEwprNd4oks+im5c1DhHZLxgqfQS0wWVLzra0k8oml1Ui44dRQFAGrQxeKaLS+JLkyUdT1EqTlCRLPoeyaWaSFgOIIrePDC+LyWC0JFy6DLpUQAyVsxgl+BK0KUUmSDuhMlLpiaLBF3QmGNERjhEx+CrXZzQayQaeDW3wJXk3ZGNpkISSKAzBnASe1KXiEGPk4uKCvm/JRomGk6log8elKA6PlcP0felFRxJVLYuuMkAe2ivl7iQlg8EMDrCxTJNyj60SHTQAnQ059cVMQW3GqC7tvSrnjUMVClRxxBT6t2huiKusoXEVkKhdxXTcYLTBOcM4FcdgayEFun6N94GcAtkaYtfTerGV11pTVTVNVdEYS1VrrNkj5sBq0VLZBcu2IyZNNhUozfVswarrUMZQOweU9tko91SXxClloeaa0gabUsRYEWfXRuaYCVp0KqwB0wtgmQaWohgy1HWNMRmL0OClBW7QbLBbQLRUV3Nxn9SluiCsxSEQiqQo442ciUF0MlIUsE5rS4jpBjgH20V0t2o8OLfGslk5J05dwYdSOYPBgWo3QPW+ALpIS3mSngVJcIxCR2nxcsaiScQQUCSstjhnuHV0i6ZuSDmyWCxo2zVVVTGejqnqmrPzM0L0VAoB2K3GjkYc7O1xfHRY2EsFtMg13jcs24r5csWi9fh5J3NIKY6OjnnnnW9Bjjx6+Bnz+TWz2TXGSgtf3RR6/2hEMx5TNyO0kWuTQsR7h/c9MXhyzgXA60swVDHkJdKK7sX9KxUB46G4oXY2wuJkOwT9KRcWlEShDAxp0fgQp7SEtGlqBBjV1qGQNhNpS9cEIIdA73tUDnR9j29bgqsZGYsrIrwhy7gVp0tNSgrtKpIyJK3R2dAzaLZJ+LbqA8uuJyhFVTdYlcl+ABqTyAErYb0aY6jqihQDPiaMiZAUvuupXEVVOSpbCUCbIil0xL5FU6FUhVVGjDNipDYWayvqSti0KpYW1kEbUymMVYxGNaMReKeJq57lUhU9xUTbd+S2Y65XrNY9ZtwUQFITsoBzSTuaySHHd/bQk7uc7Du0v2Z+fUl2DbaZ0LYr5uue3l9wtDfh6GDKZFwzCIWL+LAY3WzNmkBhGNUjGtOgGGGqTDNuxB1VJ1LqaNsZi9WM3hct2GoiYHYSbRelZD0eEiPfe/puXdZeWU9SiETvWa+XaKfw2fP42VM633N0/w733/4Oq/kCrSw5RFx1QKLh2YtrzpYLlscH3L57n/tvvcHbKRMUdH3HenlFjGtqXaOUp02htOzZbUEmKVx2WBx6QKK2orb/5R5J9kDMzYBbGy2tajFhTQllS442mkzI+5ZgO0w7o/30IV989BFX15k73/ou/vKK227OD373iEk64+Ev/4E73/shnprL65bPP/k56xenvDH9AacefvX+T1A6MD3ch+iYjmA6CljVgVJ0beCLX3zBz6afYNaGT5uf89njT/n4i8d8/vGMxVnmZ+//hDe+e8T3f/t3mC8Dzy5a2lXGmBFHRuPsCIXiVx8/5X/6X/49tw/3aNtIni748z/5P/ll89dcP3zOxx+/z9OLzzk6uYXpFROdOBjBB5+f8Z9/+pTxXsKNnqA/e8rj5yv6MCMmDeuWoANV3WD6QI1iYmsaD3p1zdmjT0i3A/vvvkVzUqHiBegRpBZNz+LZc/72//mAf/jlB7h7U+4/eIP7k0B//vd89sFfsV5e8+Cd73Dr1nsoe0TvV/RpSbN/RGiXdFfXaKOZPriLSpbF1Tnz82c00yl2bwL4LUtIR5KxIkuX865M4wa8RmeUTpB6Yl6zDEtc15BCh2sU6EF15xVJqoygGz9tYbLMb2LSKdiw5GLI0uaXS1uTyjfeRt55R87iN3Aaw0tpPSScYIymaWrqyhXw5+UE65sz6QREKoke0Hc9FxcXnJ2dsW5bktFQVRvmcoyprPGleL15DUoNeluYkLZcyT8HY7mcU4n74uazpRy3e5lW+C5xeb7g0/+Pujf7kuQ8z/x+3xYRuVRWVe8bgAYIUgBFSqKkMWVLFxr925aPL0YzYx9bM7K4AyS2Ru+15xYR3+aL94vIrEYThE3eTOIllRMGAAAgAElEQVT0KVRWZkZkxLc+77N89pSvUuSTX/ySL373jKurzHK15WpzQaTHxw6yQespW9+Ss+bGzQPu332Px+9+n48//Cs+fvwuf/mDB9gbx1AtCK4muErsJHJmpnpsv0LlDTltybkHm8hOEU3Gk+mCp/NRklnLXrCP4vHaBV8KUAWkG0ATlYu9S8Gs3mjDuzUoZaPN7j4MzJ6hAG0LAIYp66tMthntklhW+K6Ue7WQUbLsRWWvOCTo5hE9lnXv0DCH8Kg8FtlK7ClD9OM+JicBBkWNgynjuhIZvS0F9SQprhkhi1grRfaYZB1qis1QIhWliQCTKSVC8e3LKRWySCCoOBbirTEY7YSJPRTVlUEx+FQWiyJrsJXFGc20MvJ5SpNQospKaWQZlo4AUcIj8D0q+uL/tTPu3+/HuVQZRiIA6RtD1D57683n9wG6fVskaSXDd0tyfwY10n7b+cbnyb3Z39N8m6RTnn67HPTa6/a+9D7Q9jZm3NsAwTefG0gMw3v3wcE3z3UAjgYJ5pssud8nZd3/vKFd7HwK9U7Srb/JPnwrsw9GDGT3t28pBQ1javmbHvafpU9Lly/qOzWMlTKnkIvNUnm/UhJIOYwP4yXaA3VzzmN66/gdjPiUZyVqu2EPnPc+dwjH/ENeLN/uQYfCDugZSBVLxeIVpwTpTmJQmnwoDLjiB6BkoHDKYR2QNT6nstkfEh2HlEp2bBk5d8gaXdnxBsYY9ioJO5PDoSFdo0ySIHtMbcrkKgyPnDNJ5TGptcD1ULTDA6qtNaAcGVuMJe14DaxSZWIWQEA8vxRKWdC6iAkkDdCrgEqZCqE6K5KYficBYr0emIbDNF8g16jKpKsxuSDCWQkDpcCbZLk/Q8NVY4VjWIUNiZVy/UKIeDJt9kQlGymnHaky44QlMsOMMpqqbphnIIJPInVNSZODl1S+HNi0LV3oQCuagwOig/VqiQqRq/Wabd/ho6QYaWewtRuNNKFIF40eVckF+xTOVkwYZHIcjPSH8wwpEn0x7B8R8p0UUiszTgDSZNRY3RoGAa2VgDjFHF0aXZQNp1YSqKEciYTOsslWUaFyRGeNzYrpfCqJR70n+A4lFwmNIoaAtgI+TpoGZRyzIAzKkBR9VLw+f0IXeuqmQZWFQFbi5SATbJJJtXj0oQ0xJmHN9b1IZY1GBTW+1wA2GfHlcI5QOu4wOFoDOic0CWNNYRlC5RxNM6GqhucU2ioqazBOoZLDB6H41ln8THIn9fmB6h6DMPlClKNqVfwm9wbtnWxd2kAocfFDeuvQh5Uq/gDDuMBucGb8zN1qfBhLxoVWGsTBwnglJiyZpq45mEyZThqODxdMpw0KxbK2LJfSHo+OFhwdH3G8mLDZrAjtFq1hVlXMJg0H0wnzacN2vZE1UpFCJmDbVcxqw9WmZ9tntlEAOGstg0Shmc4KyCFgWwiB9rIjXVxgjKGeNNT1lNnsgKaeMmkaZrMZqQQ5dO0G73uRGZeqpoTilEkyy6asaWpSzPQ2gJLzQ2u0ttLPh4VWkKQtVQxSpfBR0l61QpkaTEXKYsascsZqkNzTIO3JiIdVJEPvCWVUDVHYZ9kHsrY4dPEqNNSzKa4WiV0foowzSjYaSRvpQ0Va4ZTBo/HFM5Nc0ukCmCh3OUZhjFmdcLXBOEmbNcZIqErIhBhwSrM4mDGpEgczTeM0hkSOPcl3ZOVQuhI6fBLvR5szIYo/T865+PwYchameM4Z6xxNbfGVZh0zm40X0CqJR2Qqqa59H6kbhY/CIOtDQk0aJgdH3Do+on64INU3mFQJ7S+5OH0F2rLtPCFDjoE29FyuN1SVZXEwKRsUAdN9aOljR+thmjMZ8c1zVjw2nVG4BrLJ5Owhe7zfcnnxmhcnLwixRt9VVHRcnp9ydb7ETQ/IWWFqi0Gq864wOVUB9FMIpByxRkKIJtMpN+/dxjUTtn3A9YHJ/JDFfIHfREhg6jmTyTGYKZ3fsNkmXp1esbixZHGwYDadcHHxgl9/8hve/eGPeLRY8MWLF6yDZ7o4YDqdMqsmss5QWrjnWf1JCDT/wz/KACgbzrKJKXrggbXhi2G8MRMZk/pE6CN97llXSzb+FB863Lzj3fcecnt5BLXlN09eM51N+cmPP+YqVvzrL3/JZ1++pg/wxb//jOXrzzmwmavTz7h6esnF6Qmbfkv79GsmB473v3eTezOFWp3z6sUZlZuwTA2nn73gdyHw+L3bxAxfPT3l2YtLXOcwmy3PfvGaqxeXRDPDp0S/2eBShc0J5w3T6ZyztuNXX7zgeX3JYX1AtZjwL//8n5mmKepiQ9Ib7Ezmn8bWPH7nBo8/eMhXJ1/xbG2Z6MjV01dcLC/xxqLUgtnBXNgXfosyhmbmWNQV924cUs0iF5fn/Pv/89/5s48U9b2bzNQBpJbU9eT1OZurMz79P37F2afP+eGDh3z4d3/NrQ/eZ3P+mk/+7Z+Z6zPef/99VGp5/dlnvHqxwR0YpvccN6xFpY7tas1sfgO6JeuT57TbNfPpBGMSqGVBs8S3MReht6Yavbt0lnUR5XeArCM5blmdveDXv/53fDXBzW/y+M9uCWi7vxEbp9wBiNt7/hsN70+AiqvdPJ9iIoWEUppJ3eDNjnGv3nYSfwKMUMEoQRySUSezKVXj5BsW6fgf+5BidKLvOy4vL1mv1nuFyTxauogkcl/OuJPMqaxLsENJni/Frb4UsNCBGMHYXIopu42n9z1t19F1W7xvef78OZNpRQyBl0+fc3ayJvqarvf4uEbbSKaTIqz3TKcTHn/4Pv/xn/6Bn/70b3j44AOO5vc5aKY0RqOqStjwKFxhdxiVsXiUiaBSKd5rktH4lFhvN1ytN7L/0iK/zErhc6LzPW3o8VFsV2KhNxQizK6JQukHu3X/PqC7S/5UYzsTc/dAzvra3wRsGPZXA4tOocVEhsToZleel5/yqXLcfSt3wZiuj8Wyqdv/CTEnzPC6ASFQwo43WqA94yRULFP86oIoICKDfFOUCpAIQYgiaUyNpUiDs5gsJUbv6oE0MrTBNFj1aLMDslKU71Y2caP6qVyjwcM9IuGQaEMoa9QcBQxRBRALIUPfk0JAmSzBJTnvea/venzev8nI3k0agB6f2wfp9sGfawDdW8aNHangOvvtbcDYm5+9D/L+PoAuD9X0P/DYcRK/eax9Fu3bzuH68Xbn9OZzb36n/c/bB+W+Kzj3je+tvnnO8vl7oKDagZFvMvNGfGPEtAWsHYixTgshSaVc/Pal1apijxSGcTML5qLLGmgE11KWsWUAmUfrNZFnD4SzQWJbenMBDfV4/kYrlBGfe2PKmKXMCGoMyeeZN675HwPQ5SjcC+kYBUzL8iVSjEWqxGiux35jzmB1MZuMAUbZ5+A4t9MJS/BaHsEWhZy3NoaEhkhBfIcvp8v5lMYwoKalSjBey9IQtC4JHANYkwrFd0e5k0GoDLW6yJfQriD/eqQ7Z5WwCpSS16c8xO9Koh3D99OKkBU6ZZwCErikUFG+a1QiRc0a8TxIcn7DNc5xWGrJP3HGKhTiAtqZMgAOk/W1e7cHVGQ5COQgYGeSuOnkE+tWKmFJGzCK1IOyBmvEAL+M2QVcKN+vdKKqqUY6p3YOXUkqYtKyOfcx4KN4u2kjjDXr7GiUOqYYaZk0UEpi0As4YEpij3mLB9k4uJRmP0wMxpjrAN2AV+rrA6syeoeaZ6Gii69bu5vhkwMtG1Fri218lkWi0wadEUP/WuGNHuVlRmmsrem2LVmJVDuFSC4b/G0XWLd96TMKpQ3WOJTRxYdwB2CmAmjnssFKWQz0vfdo4wqQEmVyHUDIAnwO10RlYVJZrTEDLZYkbUeLhLOuKqqqwjkjvhdKjYxHYxUqWSIalWSClussg2Xf95JiWqq2IURUFHnI4OUhi8uSNpnSCLwO98wWLw3vxV+v73sxpx/8BY0p41AcJ2qtTWnjpcg2gHTsJliVZaC11jCpahazKYvZnNpZSJEUBOiaNjUpNMQsHnwHk4q7xw/o2g3r5SW+b2mc4aBpqK2G3HN4Y743dkAXAoaA32ZaDZWzBGOwlcN7z8nJCcvVCmstrqowcQDFA0PaUcyZzXbLtu05O7/CaEkSrCtLbQ3NpGE2bTg6PMSHEuDhPduuKym/whC1RpOVLNyUMlI8SJmstITulPvBMKbnQq9NSYoZSB+MKpN1RdZO2J+kkuqpsYIIopNCDiHBJJWrRLJJREWP6hIqDAzGIhmuHJWpqF1D0oZ1u2XTdyTNyNywZJwSRqXRBlNVVDFjYqQyFV3ryV1Ei/HnWP3FGaxy5CwJzkbNcSkRu0DvA8eLOTeOj1BpS20jziisAUNExR7oBOzWqhQ3AknJaJyKwfJQaBimH2H5Jaw1JKNK4We3CBLvUrEP6EPApMy29fiUWRzd4sGDW9y7/5Dju3c4dvdo1QHJX2HSnKOjhaSBoXn25Gva1RXGKZppQzWZgDGjf6N2GmMleU2pUplHjeP3jpEd8QUkjsmTsyfFjrZdslpfYpShW8949eI5Z5crpoc9bjpn0tQi+TK6eHM6SIEUJdGclKgrx3TSUE0ch8c3uLU4JvaZV+dLbID7xwuaqYUEfYREBbqmqhd0XeTXn3zJxeUlf//RY949vMHENuh6wicvT/j3y45/+b/+jYvLDf/wtz/lP3z85yxuNjRNVYpuCjR4AgPT4E+wh/4f71E2NQN7Q/bFg8yryKeSME0EbJbQpovTq+JNFdmoF2x5ycn5Gc9+9Yqr55b7swlVCNQ5EVTNf//8Nbm6QQ4TOLmge/WUi1/8KxO/4b2Hj3j/3duEVvPuOw49tXz67Auulic8fnyXv3l4l8nVFat7W5I6xt58h/rOHZb9Cf/5//xf+fWXX3N5ldG5JleG5A3hcsvy/CvqaooyFb73WAPTakatZzgzY2tbVvoCmxr09oCNn6D8jONb92j1C9abc+o60McNvda8alf84ouvePX6BVVa49c9fQRlG5KHbZ+wGOrJEUxmrE0mJPGbnXmPXm54+fwVN062wC2ao9vcU7DdrDh78YrzV19y+uIzDrLlH/7mJ6TFHfx6SbrccnGy4uxsQ3NLk8yETV+zCYbffPmEu+8u+Ms//x5VHenXS/p2S3eVOD/7Ha+fP+Xeo/f5wY//mrjt6FafoZuGyeFNsJaUW3JoqGyNzkZM9Ys/Fkk2NEl5sm5RNlEdNDQ2s1lfkdotKhVB7JiKPCIeu//fRxt+Xxv8I5swCCN6MplijSP0pR0nhe+FYKb2NnjXzuePRenL+jKlRPA9ulibmMF2JBf9K/D/h0mnxmNIUXaz2XJ5eUnbtRKARqbrOgltyknWsiP4MTCIhrVx3t2iwgTJmcL4jgWEESJFSgEfevqu4+JiRbsV/9uu7/C+YzabcHi0oKom1LN71O2W7QZSWBUWyJaQOoxOKKswdebGnQO+/+eP+eFffp+D6S1QUzSV7C1GoCRDkgKayR6VerlXyZBjRZcqYnIoLJWyVAUIikqLKiln+pBoQ6BPmaQUMUfx/1Y7kaWAX8O2MF1vG8PdSlL8VtpIGKC1GGFmMEjztJKC33BN5VzVsCEqd1mJjHzsC0KmGIC6XAxVZO+Qy9p+RBuknZR5eodClDE6J6AfVWxDm8vj/lThbEVdi+1NzKLGiqUQqWIgJUnwRUvwVwi9jF0olEbWNCO4KaojVdJg5TiZGNW47xmB4eJ8nrNngDPY8+Qb8IJU2PcRYRMVQytSjPgUUVkUPuUCiZrGe4wVRucQETA07lzWNfu/j31pH6wqiAJvAmpqp7ZSgizugLw8PJ/Lcb7dJ25oR4Ps9q2A2VtBs12betu+9vrj9w+i19lv18G3a+fAdUDuuz7eBMv+0PNvA+f2z+NNUHH3xh1w9Sb7T0EBzUSaKnLTMt7r4f4Nr9nt94YbqgoYPA6PA+RT9kkDcCbHZJzy9K4Fyd/ycAxZTF0Dast93sdHYgH7x/eP1+W6Lx/f0rbgDwB00XuhApaLlIvXU588KQYOKitePcqQjSFYQQ1jLpa0uRKZTZTJXiNBEmqc4EoVbK/iLZ1GbkJMkLSAK+AQ6Q5jo7fWCXth32jQ7JDxbddSO0mPSVCQUkH8kxou8m5QSewmXVVeA0pYbzGh1M7RQ5uhkwnQlVSUz9aShprL5BKAkPVYLQEB2JJSGJUKIJiJGkiqTPO2sKfYbbCUlk1fqZ6gSpABpgyEwlkJoSRyxiwbZAXGWSqjUcZirMbkJP4JWdNvW9q+RzsLWeNDIgeNcwa8lyoHCrIWJlISWWQKMhGYShYM1jjctGE6mZGrCh88V2eaYMRPL2eZnI3SuAK4tH0vDdsokS1bi0kSepBSQke5XqmwNPcbuqSpikR4SB9VqvjbKQGiDGoMcxk6UNalDaqSmpoyVmvq2hVgSCZ55xzoRlJioXRmiWXPMSCnYzFGvL2CD2x8xNpIPZmIOXxdIrZDxGoBnJ3SqCqhq4qz7boMAgZb+EhpqFoBdd2MDLMY47g4NiXlk1J5UuOghkjFjQDQfZLrKECuKSb4FFBmJ30Udl15fxR5nLYyQSctElJURuUGqxW1VcxshTcildj6yNbHcfLMSaSBfZcJUaQpwjKSWyEVtsxsPqcyhsZa9MEcYzXr7Yau7WWciX3p5wPrcmDN5hGsl2TfvGdiqwpokmispbaS1Nu4ivlsynw6xRqRL0q/EZZjPanx0dO2WwHjYwQik8bgzAy/1ViVmdWueHdFnCmLrSw+jVoZUmUJkxqvLOYqoL2hMg6TM7Hbokk0k4a273BVRY4R77cozUiVzjGTAmxDIGRP3/WsAKuhaeR7TKdTFos5pqpxJT6+7wKd97R9z3bbYkwu3izFcmDwqWHwr6F4pmg5flJSbU/F001UG7K4jx4fC/iqiqWAQthTJCTCNRZvI41zFmssTXaoKqH6iPIRh2E+XbA4PCJpQz2f0gMXmw1t24uBdRmHVU4SbGNF7rxargg+CuZgsxiFy9xd7kNCk5k6w+F8Qpd7ppOaZtZgYqLbtJgucDCfczCbslmuIQZydtK2lClhRAHwYiEQM3VVczybcut4xnw+YbPuML7HpFzKTmWMyrnYASSiFzCw96kAycJwCL6n227RdUXoE9N6wnvvfcD33n/E8f0FbjqltjP6YEHX9FtLtDU3779DVIYQE8+fePAdk9kcVzX0PnC5XKIIZDWlaqoiizdoJcBsTDKyCBFXpG1Wl+VyhqqacHx0zHJ5wfbyhKvXL6nCMXG7xW/WhLrBNrUUeZIXYC96YrSlSKfEsDxF2TSOBQBhbNrGsb24xHeeg7rGTiTtOwKT2YLj4zscHd2mP33JxdkL/Oacz03k7nzC7MZtOL7Ni4sVX12+AnfAh9//AXduvMfcHlFRoaMWhr+FbCEXye/oHsw4/X7LYwCwhpd+s/D1/+XxXeCBP/zp333B/o135qIK0Pot51IKfJUhRUlldK5mcXQDqxTnl694/uRr7PSKrmv59IvP4XTGj77/txwvDjlffclrlfj8q2es2ldULNi+fMb9meaf/u7HdO2KH/74Y378Vx9z+vJzTl79jhcXzwhPzujaM1J3C5MSt2/e5oP7Nzk9iXz+6ooXFxf0dovGopOhUhCrIq9PFhtrDieW4/mc1kOjK7ahFale7qlVw+3jY5LaYLcKE6aEFiKel5xTqYw7OMDVHRfrFU/Pz/jieeaTT39H3mT8OoHq+cG797n96F1+89Upv31yyjaKpURK0IZIcoaN0TxrW5zpiJXDa8fTV5fkf/s19W8/ZbvccPn8hCdf/pz3353zN//4D7A94+WzZ6TpXV68WPP8bMPrkyVNhLs3A4v7t1Azx/0/e8C9h7ewZkbqtzz54hN+9q+fsr00rM6X+M0K3cGdg/ts/AXr+JrpwYzb995jcmOOchGjZuhkUdFCDGWNrSFYiAblEqoBlWXdmlIvSpiyjrje9tR3a9B/4oe1cPN2Q7YPmc0NjYMcDbmShNVRgjssmqCA0mWvUGwvvtvjDVAtFzlojKQYiD6y6T3bzRpjLAeLBc10unv9teO8zVvom6BdLnOXb3uWyyUXV0s27ZZmOsWHRN97lC0FWwXayPdMSoLplBIOVybvQKqkCDHRRwhKwrOi9/h+gw8runbJer2U+bYTHzqVlSSvG009WXDr/vssbt5FmSmTZ2ecPDslPH+G2gTIkZg2EtIgNGjcxFDPKqgyXm1GmWdA1iDWVmKPo3oMElwiu2UjN9k6krKEaAgeSf4MnpQEyPEpluKWF1k+sp72ye8VHnZD/dAWchz2UXp8egDLQGw9nNYoq3ekjTywvzS6qHh0caDPo+rHyLhaMJixa2RkPZ6L7YISkG6YWwboTvA+WXS/yQUdZx8lCfFqBOxkTywoAmA01jmMqzGVgxTRvZd50EdS8KQcaOoK5yxBK/roSTmV1PeqpFgKIKULLKH0To0VYoIcSnDiAFaJn9cg0EWVPXphd+pBVQaFQCD77zHkOZdAwCR6M2MLMlKeDyGQYw+mEqpvfmMefnMcGsgoyBpajP72+9qODckA2hUBbSrfSemhuF8Ualohq5Pf9yglrxi/SYIagmPMToZ+naG2A9T2Aa3rDDf5OXjMC4go55MLeeL6GCP3ZT+pGBjDEPYfbwJmb2PejecVB2RrH9hkxGDiGx5qI5yqBNNJA2A2+FsPhChKGGQB2RjVP7ko6ootQ0oMjOBdv0U25JlRrTVcI6VUUTkKkGbivpS2fMoACkT5PJXknqnShoQrNCiy0h42BftsV6XyOL4MSgWKxFxGOCnYU3AmuYcliHQ8p29rY38AoDPaYJQSMCJlYb/lzDp5KqOo0Sgf2fQdvteAISi5eJU11EpLLHeSQdpojVOW5CPBR6wumy0lE4PSevwyaE00ovZk+OIpYQtyOrAbVFYoZVFa6MAhBhTiLYYqdNpQJhIrm8eATG59lHyrhCVFhQqZqjEYo0lK02+CAFpadP69EhplyBkTssiz0NgsA6/NJW0uBgIKjyZj6VAQFX0CrStEl5XRdLgch1maiCPiJGFVCYAZiPjcE3Bom8QPTcv5WWOl6cZAinKPUogYI5NL0oE+bTHeYdAS+60bAV6K4W5Q4v1kaoepK6wRP7o2dFQl5Sf5AFkzqSxN3RBVoPVF1uQ1qtJcLVco3zE7DpjU4PCoGNApSppRTDTKMLOVUEFzoqoqgkABsomM4mPmnGM2FXle7D1934mUqixC+j7QRy/BGdZgagNWicdOyJCDrM+SwilhLYZe2JVaD0CY+O9EV0nsfO5JOUpHNYZkNNrpYhop473D4iIYhHWQQkQpUz7P0eXMxicqHZloJwBy32EzHDQ1eVLhyZjWo2pNva1pV5kqayY4snN0KpNUJORE2wszJRaTV5N2A4zOmhxjARMNJmlJeS309pR2ibOqTJ4ChlpB+EOk22zIjSPNpuOAKkxUSQ4Tj7GKPni0NkRqUspMSNShR5ks7L16TnN0h9nlOU5FjPLgMrHLtCHS95GcypRYAEWjMg6RzFWTmoPaUdkZSmV8CGz7Dqct3vtx4DWmjMt59zOlvKsQZqHtKwxOK6ZG02hpGoezCUfHR4ToabsNVV0Li62u8TqzXa1Zb9bCWtSKjfeEdU9TayaVo2ocue/pY0CnwKypcXtA6bZtiSSapqIPNc57kqoJKXM0mXNnPufOrSP67oqoM7ppSH0seLuGFMjaiLdadoQ+42xis9nS9R3GWmxV41Pm1fkF/vUJk7oha7DGMptOmUwmONcwU5aqmtL2iUgkdj0+CvAl46wccyAASK5KkYGURYX4pcjzWSGBBikTs8ha+r5DG0POkemkwTrDarUm5sxkWpNyoqqtsEpTSQP1AZ0NTVOha4syjqQNTTNhERNd15Oyx5WCQmi3WK3xviesr6iNQqeED4nttqfvAyTFrJmSorDAYuiYN45bR4ZeNbTRE1BEY+iNJtWWPke5pkqM5HOu2XqDtgpnMyH3bLwv8j8FsUdni8GhI8xyzyGeNiZCF5kvGrZ9lEq/dhgVcApcVZFMRcoWlSbMmsTUJe7ePETXAqwdHd7kvQ++x4MP3sXUkU3sR/ZXtoZUzwhJ4SMsbt3l8fdFRr48fYlxDltVpNjz/MUJXz9pmc0m3Lt7m7t3H6CyJXaeEDzZTqAS/0h0D7nDbz1Ez6QStsPELXh4+yGzXLE8u6KKkftHh9w4XEDdsIqRzfKMlDzKWtnEWCsJeyExn82Bnrb3GKB2Fq1gMp9isBwfzDhfXdFvl2xNoq4q3FSKRhbL8fEB9cyRfntCvz3hF79b8+ToBu8c3ORWfcDq9JxZynzw+Pt89PGPeHTnNjMMRidZ/TtNKmBU9kEkf2avury/0P/Ger9UacctnCz038a++C6PXA53HeK4hiJcP4m3fwLXP+GNE+ctEp3hrXBtk6LKJnFfUDVUfvVwGAOTicEaxckFLDeZL3/3O1588TnLJ5f84O6PmFeRuGm5f/cxN+srvjfZML95h+n8Lk+//orUXvLhO3fZnJ8Q05Zl+5KL9Wt+9ouf8elvf8mrV0/pVi0v3DHPJrfpppnFbM7Lyy2fX5zy8Acf8uDRfcLiJl8vHRfhhPZqST3VNMYBkY1VZJd4+PAB70+P+PVnn3C+OeW9x7f56Hsf8PXT1zx99gTjK2LfUZnExgcu1xfYxnBrMefg5m3Wy3NSk1j5wLMzxVwZbiwsH966yU9//D3M0QPOTjPPzRUHNtLkFnoPdU1rLVpn9LQhpZbt+oocHCm/5rTtqKvE6rWnzod89KN/YnZ0wacvz3jnxgNs8Jx8/iXVkWJhb/LZkw2//PwF6UXLDz66QN2eMD3KVIePsNUjutMv8esly4uv6VYG02XsNnLx2y/4+TKxeDDjwfcOqUKPe/0C9JRwHGnXHVVzn6Y5Iuqe2K3J2hG2NcrfgMkUXRsxk3cKO5tQK54UP7cAACAASURBVEVPJuvEYD0ytrnMtzfZP+Kxz/AY2EoaYQI3E83RtKZSY4kOVGFg6YQs7C05aDG0t7kAQJROKGu/mAJ926KMoqrqUvgfOQ6l2+xt7HOC6MnB8/r5C7786gnvvPMui8WRJMar6o3X739S2TCr3YWTZ2SdJb5HBbRQ4HvPF19+xcuzMzYxyphdCmkEkYFiCuM+Z6KKJAN9CcrJSfZSWhlIirbtueo6NrGnbT3dZUu3uiL4KxQdWgfx6dxmprqmcQ49cahpQ3N0i7y4R7p5HztfSKLy5Altv2LVX5FaTUONSCd7fApkZ6kmU5xtcBgMAUdLxoKxcg1KUjgIg6tU7iF7supJuUdpi3PNODxuu078jX2gbzvCsB5MUlSIPu3AGXbyMV1QM7nHuSwaFag9RrXKKB1RJMgGneMIYicMKcseS0y9C2DmJDQqGU02SsIPrRPrmziMyAJ1pSzEjVCAwFGCO/QjBYPoa18FNZBXhAWYyTmUrxALOKmxVY1zM7KxqKoC41Cpx6DJXSS3HZXRGCPe6UK+k2K7cRVVXeHqWlh1cRdUMUKESkgTKbQMEl72vdVUJKSAcaqQeBIUgDhnJX7zSPiavF4SiUGXQp58R5UTXRfIWkkAnQk4A652BK2KeqHs26Ls3weG38hwG/vtCBmC1ihtUcZCTCT0ACPI7ykXoHgAZcsVuDa9is1QUmm3B9EC2sYUpA9jRnuqGAWITIUkUBCl6+PccH/3rrOcg4Jc1mcqo7UtgLHI6GXtLViBQWGKgk+URalIOoVUoBBg2Jhif1MSj60q6qpi8WW1KanGmYFZyLVWMDDJZJxUavAUT0VJkotnNTtvudLXRp/LEfzUUlQfgMdc4E9lhJSikgT5aWnnKQVMzmJxk/eAweERAhQwbmAwMpC+ysVXWcb83T3N0k6R9ZAoW0z5vXzzkZUo7AThJ6lxXwzC6BZGn/iZhwQxKBnSMgQSWkWUEQJJ1lrC1qCEbSqUSqOC6dse3w7Q2SLriUkq8jmTiWSTJRkxKZSPdNsta+XxGtoUwVgaaziwEjagEHTSWYepajG4NKboiPNYrTBaduBaKzAarymedqpUxGQDbpOVgaAMGFppoZfkRI6ZrCMxG4ypSYgvTiRDElpvjJkQsqQbZotSuZDoBJDpcygJMg2DTCiiCAWZDToXynBJP9QKi+wFbGEtdSi8cgRjIRtyNgQE0TYajApiZJ5FUiaEuUKPTML88jHiM/QJQk5ieBgzQ+/PWTB1pczoRaV1McW3iqgCWEkW3aSIionkZWBKwRd2hSeQIXpMjKjKghJ5nI+dVICMxRSg2BlLPamoTUXbtwL+KEPf9XjfknRCbx3LVUvfbQFNrR0ma0Lv6batgIzOEHLCI1UpdJns9unhWTypxJxUBmAB9gCr6HLG5oSpK0wloO4w2JLA2Urka2rwrxMAOJKJGWptMVrklUYLkJzIAvQGL55qUeLpjQKnoNIKVzvq2mGMEylzgqQtvbGkmPDaFBAtoIJn4hNJZbwOdFk85JI2uKnD9gHrDa4sCoJO0gdIRfqoMcUzRCVKxUvvvFEYPLDSyJhAxdEnURUzSrXHEg0h4LtOJm8jMtEYAtH3o5xYK4g+kbQmJBmoA4YUEy5lXI7EHGTAsxVuuuAgBmEubWp8Smy2nQDaBdwxpRJptMgjayPpxrXROKdRFppgca1B97okRcn5SlrZdQr0IIuNcZDYl36kFM5o5nUl4Q5GYxX07YYuBJJKmErYWwqRdl1t12y2EhLRhEAfA71v6aMiTybMJxNcM4EYsGVRNtCZxdtSJqmqqai9R9uMsTVKJ6a2ZlFVHE9qXjUWbTTZinTfKIU2lhwNsbCFTbIklaks9KbHq0EG7Ag5kXwgJLhcb0g5CRu18zTrVoJFUgY0SVkRUViDM+ZaZTQluQajb2ORHw8PpZQs+qGMsWlcxHhdlrBGwh2UNcLOrSqc1hwcHFBZ2egTPbHvCWUJqI3FOE3Wmba0t8rMmTQTZpMpZI/KgeQ9VUo0ztEnzbbviAh4C1r8E6PGx8i2XUMKMqaqSGU8tY1MJg1ps2XbFWl/Oc8uBi6vLjlwGm0bMDU+V2wCWKIsNLUktcr8J/LPnGtIGhNa1HaNDglTW7RxYAzZOGJWArxGiCVZ2biajEMDlVPMphWubsjKcnTvPR48eER1cMgmrtiWzVkXZOyPypBdgwoRh2K2OGQ2n9MtLyRwyAsY1vtIu/VsWwkIuVoFptUNqmouw6GCLgZs9lQ6YK0wW3WZF2OvUNEydVPcwSELDDFlLtYbttuWLvRsgqdDEXLEuIaqagg+EHwoHqV5ZCQ7a2U+TmWhphXz6QSQ+/T6/DmzxTHz+RGT6YScJfTleD7nwZ0btKue1+enrFeOWRfxJ+c0OvMf/vLHfP/Pf0I9v0HlLBVZZMmxx8eaoCssMmcIIWIfEPu2xz4Po2z2vsO7vv0T3/bMUHEewIFvIIUwVse/+Qk7lORbgLnxCNeBFbX7X3l6MEhHQnbW25avn5xwdbXi9elrfvnlUy42He8/+iEf/dV9Km9Zt6c0E8P8xgHZJXIVuFi+ZN2t2eQWak199ybTw5qf/7f/wief/ZyXX3/Np7/6FdvlayoNPmh+88svef7kJdomQmro4pSbDx8w+eg95rMJ3eSAlVfEPlDpyHZzhveWo+qQbOFss0K/+pptdcLV+oStCdx49zYf/vgxV90abTVGV2hnyN6jc0/G4KPmah3w/ZYtmWl9yLZb0afIxz/5mH/86x9SXzyne/WcX3zylJdPV/TtitpkQhepe01tanLqSZXCLhqgpjYH+OB4ebbm9GzFgcncWbzH3/zk7/nr//hD8uEp3eUr7i/ucPn0KX33a47vNLT1FPXfOi5erfi6/4rV06/o5oHLmeb+u8/5yUeBWzct7zx+B0NmeZI5SAsOmPHy5QWfv3zOeYgc3/uQ3kcur54yuZxx8y8eMZnPUHkDOqNMh6k7lJ1hdQ39hFwfknSEvKGaTHnv8WPc7DaP3n+fqDWBRIUZrWe+0eT+BGDdvnf0dW+eXLxBFFhQVp5SoeyyTQ+qK+BbA1nGYKUhEwtAp0c2fFbw9MlX/PvP/o1bt27y47/8Cw4OFuUsfo8cVSmUzkTfsby6YLW8IsWEtQ5jG6pKgr2u97sdw4IBnMtCHkBBzsL01bkUThXEELm6WHJ6ek7W0BzMwYihvtZWlvsqgoaYxRPVx0QoMs1hYxr6nu22Y3WxZnm1ZtlesfZXGCx1qNExoOOaxIakPFY5bh/c5p3bD3n08AFHd2/TzxpeBDhRUzZVjZtOULWlupjTHM6IFxP6fkOMVo5vVAmJExkqyuJwQqQgopRl59NmAEPKCZ0tKukCflmUrTC2QuEAR0ajjBHWaudHJUnwAV+CzVJKo88xhSQx/jcoZopOBIqH3CAJzrHcpwRZXBtVWcMZY0mqkvcUlQpZQA8QkCgpJZ5sxgn0mkzZwKvyPkYkTul8rYmMa/Us6yHBo+UeDoyuVCTKPgRpzxkgFd9zg6WW72Es4jcu3udai6pNaSehVpX4YMUCwgy+36YEsUVjRAI9BAaUY+vyWqP0OB8N60SthQyAGrDL4QpLe1aqtEtEkSHgqexbFKa4Lgz9nBJSIWvolGIpkwmYohQj/3DQKg6AzLgXgOtMsBEUBzCFcSbXiDy+4PpcyDA770CkcsnKWwrDqqCsKmt0YQbqsk7MY7uioK7DQd42UA5gLGNIohxM7Z2YGoPzhtdeG2cUggilPIKjFMaaxo6g1lDwUFqNAuvh7YI5XF8b7F+X0RqsHGMYa3QZ10bAdpSqpXKuA1jGuPYYbs7AJS1E05Hht5MX5xLOUPy0985nuBAj+EdZ412/OAVcZWwzY7Ey77/k2++LWJUNs1/ZY6odyCevLuzMrDDZENVwjaUPjIcr90Lt7be+i9z42z3oCutGrnIa9brWWibOca9aMFUZf2Doa8U2RtZdS0gRkzOHGWogJ0l9y70ndj2p0BrDgBxbDVGLqb4zGMT7LUeF1VoqaSqhrRZzam2w2tButqU1CRIuVYphs67JSjZMIQmN2BqDMTLTOxMg9uQoyTtGaSqFBDEkYfiIv5xIC1PMklCIgjLh5ARGlwAINSy59yZsI5u8nEu6IZDGKr5UKFQOAnQqyFk2dD4EAeIARcZoCAMQUBbug/+W0mJQKOmnJVTBikfYuvNkL95AEdCxhBioTA4ea61IkMmk3pNCQHeyidVGoX0ngE2WcadLAU1PUzXYumLSOLSVZFUbWjahFdBHa6yVQIOcNMkqEkWCFEWCSR4mgCztzGh0FMQ6BU8b89hxrRGUOw0ToNYoZYtXQRgb/fgPKZZZWyS9ThJQU1Yic5bZswREmNIuxKdp8BnNKLoo1QwVAyqLlNLHVABWjaen2OQTlcK5CtMYtJHrohRYZ5nYQM6edd+z9p2EKth65xc3TFQlXGU0oS3yw2EgTKF4hCHXrqoq8H2pkMRxopLPTYUwIjIz4yT8IcbiP+I7rMl0XaBrDe3GUmsxk89aJh9lFMYkkhaGa07SJ7SSUIlu24/R9jkJJdkZQ2UkoKRyhspmgpbwAavlb42z1FZxeDCjqRRV5ST1SUHUijYmYobNaiuVlpSFai9mY7J4MJrB4C/nXSiFUvIdrAZXALq6qsga1tsNxjkWi0OmB3OMtWy3LevNlrbrhRmbICQBcDebFr0N+NaTQmIxm9IYRyYLyKpKMhJyW6x1oDTOeaxLGNtjtPjBVU6YY03lRBYdI7YUJijGomhhKcsiSCa3wZ9v8OEDjasq0AofSgJvFKbddr0ZJ7WhepjLxCSfaXbBEIVhOQC4Y5VonE9KZQrGBePwfCrBByCTcygL54G1KT9lAai0XE+dIelAytD2LX7bkXVFPVT4VMY6MSX3XU/XrqkQebIxCh8DfbsVQMrUu0RmI0BTU9dYZalcxlY1tqppplOWvcevtiLrMDXGGHzwdKstdjElzSXARduKTKZPpVhjDK6qsb5U8YxFuwpbNdguSIHKArai63vatpfXWEP0PUpD164J3QZTTWWjEDpIuoDsULmKg4ND6mZK7xM+KzAVscxXIXoSaSC/MiZFNw1106BjR2n4aGfR3tB3Hadn5yyXLXdvKm7fuT/u4WLKBRA2aB3HRLfgoyQkorDaYicTptbiQ+Sqa+n6jl5psSIoaZ85RLJOpN5jlMFU4pIaiw2A9x0qFGsGI9Lhqm6YG03UgeXFKZsIITv6UuFO2lBPJty59wCb5/S/jby62nLy8gWvzlYcTycs33mX89dnxNMWoy33bh6xmDvQtrQJCDHh1LAAu7Y12vv9TZBrmL0HaO6Pf4yL4bL0V3sLu915vLn8fBs49/bzuc7Iu/6Xt79ngOzKeajdJokMFxeX/PznP+OLL55wenlFqDM/+bt/5O//4vvcmzr6qxWxS5xfbvnf/9N/4sXrz3nw7i2Sq3CHtzm68y5t63l+tubRYsLi8B6vn51zdZaYVLe49c6C27crzk/WfPHZa066M3y/ok8zuhS5vHqF/83/zTvxCr/NJDpse0WdWpLJVG7K3M3RynOgO+7WGZsvcbojqJqfff6cV2drlqenHN6d0XeK0/MOZ5wwckNH9i3Zdyx78FljJlPuHTXcvt3w8Ud3uHHTctXWfN0aPn15ytnVGR5FYIZJNTEbUmvIOpBCx8XzDdZqtLpJyDUpWUybMFlx49BxYx7Itufm4x+Q/CPS2TnzhwfctguieY3LHTfuJdbnim2XuFXd4Gg6x1xcotUr9KNPmXzwDnl6yN3+Mcduxo2bPyDZOeGLX7G+W1FNN7gDxW9//gUHzT0+vv9DjLsrN7XywIYQLunbjtlshtZTUBMpGmSNSlPwFTYgFg61I2JRuO+0ifhTPsYi3F677Hzgq5dXdKsVx73H9RsOpjA5rIg2EzE4s6CeHqHqgdW2t6FPEnbkvcf3odhjGN6y9bt+LmUXq61mNm24c+cWdVOxWq1AO1zlmM6ab7zr2hiTpRiWcgl4UBT5326f4DvPi+fP+frJV6xXl2QDwQtJJHY7X2GihNz5kOl76FpJn99u17SbNZvNms1qTegClXXUzjPtTmiAqZozb2puHE25++ABdx7c4PjwBkeTWxwd3GTSTNDzGZcKwrPXnF60JBPwJlGZCjub0BzMiAczcr8m5lakXFo85vqY6LwEUWCLg1PW5GDIIl2S76AbNHXxcisba1OhJxXTukXTjt+t7z1977laLbm8usT3Is8cwLlc1oGGAvSOOMduH6DyICeV4qcqfpvDelvmRlmrj4b049yhRhLI8BmU/x+41mpYh8oHjM1p9C7L12eTYX02BDkoAoP0bgfMFfJFHuyNksj2hnMcpKRl3ai1JKlqLbYilatwOVPXFlcL26sLfgRTRq/uoV2xB1aUtfyw59ClCJ32ALD9149FXT2Ac6qEJpbXlGJwjKJs0QqxVHlLf5P9cR7/5VzAqQHk2aE8BYSXOVvmsR17UtbjA5C0+2zYs7aiDA97/VWV45fLwABVqRGoHdpZGZ+0fOBwTQdLrCFlusBQe0f/PY8y3g371jyAsUlwB12UhVoNkNqwWmFMb94dq3yT4VoVApAubEGNBHYM2gClpC2/GcIwLlVMOb8kzFFxDMkFlFTEoQD75jwxtAujyrpnd42HKzKwB6+vVn5Pv9l7Td7/uQfWvW3FpMpefvx9Dwx8sxHK51x/cmhPw1yQy8/h/ULQGN7/xror51EVOgByOwbd9f70+x7fCtCJw9mAcu5O2GpDYw23j465Oamobs9hXtHGwNVmxabtyH3L3WZChXTOrutYb1ravsOHQEhR/M60IvSJPgoVOBtJ4USVkIRyPKfFy6myFox4vDltis1mQYqNGjebylg6rwlZ4YNM1iYbhDGX0DkJYy9GYkhClc8ZZVVhyBh8jqhsMAWVzkkIpB0Kk0tk8DX/vGJSKq1vvDmp3NhU/gkQraR4Q+n0SMKtjhkbJVZeaZF0aCUTeyIJcg8Ys0tw1RRKbRnFRLeexlh2lMJajbKWbKSiIh0TlBUz1oE+H5KkPgUfUSmQcmHWxEybe9q+p6l6nJUkRuMMLosFVU5y3toUsNA4ep/YdD2LFHF1zXQ2RRkjNFBD8d8ro68qqaw+kKJHV7V8p9JZheWTyqSZShJmHOOjMxmllch52en08zCaJVBK6OZjNSwXOcEAiBqFMZZsNPNmQswC1lZaS9XA+zLgKTHwBXxS9DkRsiEpDdaitKZ2lkSkiqJX19pQ1zU5K5yred1dsD/cDNp0GbCH6sT1yXPogwNAlxET+wyonNBJqmypJARTgh5EviiHUwoBUENHUOLvl4uXYyKTSvKXtk68QXLEOIsPAe8TMekRNFAghq9di+9acgiFbahpqppQG1LIZDTO1jSuYlpbGqc5Pjqgdlo2ONbglQJn6ZMA2X3ryUFOePCyVIJUYJQilsj2ofqntQYzgFEUYEtTNTUpZ3xOVE3NZDYllw3pcrlks9nKpt5ajBXWWQwiewjBswwbVKl662kjgSJJUk8rV9idSjEmJyvGcWiga2vAaaGd5ySptyRhJ6g0LJyGayqFicgufVfrIfk246wtSct1YUP2eO9Fjl5iwomZ5L0kG0kMFjs2pRqbkxzTjCBOLmbAQztjGJ9K29OF7Tr8bq0d+2BIsmzdbDZMKoezhtoaJlXFpKlp3AFaazofWbeedRdHw+q23dK1G5xBasGFUd00IncRf7dEn6Q/+xgxxjKZzJk1U27dPILYkVOHUVES9rSBnEUik0Erh1ISfuPbjq3VtLOGEIrxstVjolki08zmJF2j+g0YRRdg0wW2PtFFWHc9ykQq47hz9y43bh2jCTx78juePntOt13Rri/wrsIQiEhfu7q6pAqJVMYJlCQhZ6XoQ2a16lC6Kn1aLAyi71EpYjTUlSvJ1hnnTDFa1uRhM5gSWGGqGi0M+N4HYgJT1xiniFnaxnDvGBmRMhYprTGVxTQNbjLBVRWNNWxCxCdJ/tss17TLFfPZgul0RtRSZMpk+tiTvcXHTChFKW0stbVQwVHoeXl6ztnFVzx9/pIuRHRV04YIIbM4POT48JjTizWby3M2/orLlPjn9Zb8v/1XvJpwdHSDf/i7/4mf/u2PWCwmElSiri/mxv/7xlpoXCaO/9Q4i+/een3h+90fw0JUjccaeRx7f9ln0+2f0z6IqPc+7fc83vzTsGL81gXg7k2D15DWmjt3b1FVFWdXK5jN+eHHP+LOvUOqsKZyFco4Xvaf8HL9lOAUj97/M1Z9z7PTC+4+avjwg4+Y0XP+1a8I3YaH926zvbxiPm2YTj3vf3CbLz57ya8/PcFni8fReunb6/WGGDzdZsX6YsuBDhzOK666nqV13Fg84N78EfM6Q36FS+eslmsMAaXmPHtxxetnZ3xw75j/5ad/zauLln/92eecPT9lEiM3ZxXv3b6Na2pevD5l0wbS9oKT5ZJ2afiX7jk/r2qO5nfpQ80SjXKanBRBV1DXeJXwYUszy9x7dI/F0QGrZc/Ll0uW7YrJNHL/0X3+6sGHvPPoFtPjyJdf/oIvll/hjOHFZ5/y6OGU7/3gXVLeMrFz/uf1x9gVLJdLbr1zn5vzBXodyNsWrl6wPkkcfPgui0ePyEe3cc0Dkm64f7NGXSw4ffkr3Lziwfe+z+LoMc2D90T2phNJdcTUypq7j2SXUX0Pl69Rsx51KKzw7qrnyW++wDw/w83f5fDx/0vbmzVJll1Xet+Z7uAeY0ZkZmXWXCgUBoIg0ECDk0BS6pZZmx5kMr3oQWb6hzKZSQ+SyaQHdosUB2NTBIEGgUKhxqyqHCJj8OEOZ9LDPve6R6KqKJpIhyU8ysPD/Q5n2Hvttdd6iKm/hFn2z/wQ+QopOHvvBUDQRnR9Etxcbfnbv/0Fm6sL3mgsjd9wsrScPzzHHbfcbLc4s+LkHA7P7mPtNG8QEwCrSSny8OHLnJ2d4+qKtl2yP/e/CCjPOUIYsZXh/v17HJ+coE3N548vUEZMtRbL5kswPjWDLpS9TO3F6WrKWXMmBU+3XTNsN5CLMVuU3ENlRxwjYxjphi1dt2W9WbPZrOm2G7puix97chrFTX3oMT7QqJo7S83JySEvn5/z1oOv89rDl7h375Cj84bFUUVVLzB5iaFm2w146zBAe6ERd+9EiBFjLbqusIsFdrHAbBuUryBFjM34BGOIhFTaG9FF91ajdIWaGUsT0lIuWc7gR0Rbe7rqYmgRUxAH+HFgs9mw3W7ncaLMznQulXjgy1a6fSAU2EuUp5hLMRko5cLkiiES1EjE7EC7aVSo3X2VeD3dMkmUDhyBHUQr2e9GVN6BSGQxWhzHcQdyTcesYQKenKuLOYmW2DGLJJC1TmKkAtIZ5cgqkFMl2nI5S85XvnfSH05KMJc5TlBqPpddy+gOoJuOe3qefj8BiS/O4xfBh2le75ecci7OwinIuc771Tz1JI8GlCr3d+oFJyFnML0/zfOqNEgxZ9s5oNQOuJ0RqfIfudz//d11AuP2z2kemxNwWL5h2qn1/J4yjlWJGVL5Od+OB3bP5TprJssNAdAUxcQnMplqmL1YRc+vcitkmBiKE3tLxm3CZNEMNFlWpelZMI1d0TCTRSeOPRYhc2JQwL7pNYlX52uX8wvnN12fRFKGueg4G4LsjZvpf2pai+Pu9qodwJYnZF3t7pFoyU/XGor4slxXhXQJKGHIRkTjrpCyZwLQfNwz0Mat85qvj2KWNJ7IL1O+NxsIa3ZEnzJmXlyb9skO/xhI99UMOhCzAwO59DwTA1lF8Jp+swENDVBZx9IomtSyyQIS3Ts+ojZyEWMMbLuO1XZL3/WMWQTFk1H0IbDabumCtEJBFgfZoS+916og9I5kDFELfbNpFgJ4FYMBrJGLpLScmT4kK7PvKY/oysijslbkHbQAdtkHxtETEWMFU7VoNNYqoTJrRSruNiErnDEkJPGfaJkq7wabVVrYYcKiLkNU+u7zfiyepxuZij1EEA28kIih6DdFScA10i6bskIruX1pbonRhR0jyZqzNbos1JWTagqqCPYj2l1aa7Rz1JWbF9Nh9ATviV6TgiTR0zhNMeB7AY5iitjK0jQtSotra50TISaGMRES+CTMAR8TPkRZKAoIEVMU18aEAGtGUWkBEVOSNrMQBZCbUGgQMdsQ9sC5abMoC7uYEAhAJ4U62Ugz0wIrwVeKmRQs2UbRGkhRGGyjMOIwPTFnamcwiwVNXaGtIYVACoHlckECQlRsg6cfE758v7WW7XaLzQHlRZw9NYWyT8YHoe2nIoJptCFqcUZNajeRdy5duwlttdltgOW7UAqd5XOnpB8jFH+pwpVrrDRaVajaYGhwOnPYNrR1Te0MmmIqQGJ1c0nWStqKa9FTG3wiKyftzFGAJzMFMClj0FTGYBR0JmFVwBU3prqyNFVF2whAt1y21NYIjlLOrcmZphmou5GqqglFWTarycVV1hMBlfKt66Ln4E9WUakSlvZeJW35SmnW6y2XV9fcrG5Yb7Z4H3B1zXKxJKPR/YjWmmW7JAVDGHq2vceZHqcNy7rGaU2OHqUM1lUlBc8MfmTwxb2YBDlK60Oa+LcSmIXRiyFHBoMvzN8SDGUxFgixbHqlKjk9lNY4pFVBaz2P/1SE+XIxVZlEfyXYE/2R6XpIQKNnIf9p75u2WYUq7ri7oGza9HVpOZfg3cwmLqG4+foYqaxF5cSiqjlcLjg6aEnLJW2zEPOI2tCHnpQi49gxDJ3o2tWGysjYMEnamDOZqnK0qSUikjxDyDS1gDR3z+/x5huvUTkYujXPn33O4DuG0ZMTs46MtHFHdFlPxnHkZr3lsq3EyXhRAO8sOj+uXVLVlnEcuFpvxD01BrbdwNW653qEOtao5UBjK9Ay569XW1brDSkODNvnbJThqG6wVpNS4PLyVZQB5QAAIABJREFUObYLtCeHkrwZ2bs2fccnjz7l8dNrTk7ucnp6iDNZmMMFENZToJTivG+kaT0Mwg6xtmK5PBBNQmOJKbLdblmvevKiYVkV7URicVqWKmsu1fMUJXj3SpGMoVq2mLqmSwnf9Wy3A94n0cFUWlr5c4To0SoiTSrSRvzZ549xbkHj5FhsZaito1ksqLcdN9sVIWVcs6Bul4xhhe8HjpcVp4dHvHJ+DnXN+uqGy8trwmZkzA0DNYeHx3z9tTdEozMqss6gKW5xfAGulV98gdvJeVlfZnxrmtX/dIBu/3PKxnT7O2+pmee9r8+337d3XLdO44te+CceZiptXqbM65PTI37ne9/CKFh1iU5rVn3Hu+9+xlsvLVnWDcEnzs7u81//N/8trVlyfvwSzy6f8fXXe15+7U2WywU6XnGWj3iK5vmN500O+enffcwvfvUxnz9+zPYmcLQ45407X2e9ueHi4hqlHK88eIU//u6Peevtt0j9lut7/wl3/YzL0fPXH3zGg4e/xe/96I+5e7fik/f/gr/89/8rN+s1bX2I8zX9qqPKI6987U3+y//s3/LpJvHk6f/C5mYD3TVLV/PNew85e+Mhn4YVv/7wPT781S8xqSd0lg9+OXB6cMr6vCFqzbObNZ13DGMk6AGlPKPyjKZjaCpOTi3doeHRk2uuxxVf+/Ydvv97r/Hdt3+Lb73+Q5ytyHnk848+5Od/+VdcXm/55PkNnz57lbx8mTvO0q8vuFed8m/+6E9Y9U9JyxU3Fx/SHF/T3STe+9sPeTmO6CqwPH2IqjS4jDYtrj3i2L1DtThl6TbE8TGVXaDyDSmuyYMhNfcYR4uzx9THFpUa+ueP6B9fcfDwAfb0PgRhwq6ebugvB978zsgZ9S7p+eoc4v/XY9pfpthz7xfyhGKz8nz26TULq7n7yl0WcUlrFIfLU8yiIWSNVpbaWZGcBCaHIwXCUNeKhT1kcXiEujUf909wL+md/rMYUVTOoE1L1hWLRYOrFxwcLG5PuSl/nH+Wz56T/KLNNb8nZ8Loubm5ZnVzTUoeC3RDz2a9YvSZzSYQfKLvO7bbDZvNim6zIgxbyAGnMk32HC1rXrp7yr2zl7l755iHL51z/94Zp3dOOWqOOG3POWgr6iZiWzEHycqyfj6Sg8UTMaZmaStad4DF45O0TYoEjRFZj9qJda5z0lli5dysqVm0h9S2pUThSOFpgNyA0mSdUGqDIohZmY5gB6AnD9cEc0E01wS1ok8r+rCV62+MdFdNHR5TIVIBeg88e2FsTaLvL443JuAUdrrLko3fYrNlssjYAKrohE+F/P3vm/KKVAwXtBJAStjkE0tPvsNaMdCagcIJtJtyMT3J1xQzOy3dE3VlsbaA2DFCke0xpSvMGina5pwYnCOMQzGuimLqV0gM+9chlNfEgK4wtdIOQNln3MUY98DFFy808/smLbKJiWgKfqBKJ5lCz7lsjgGlJQ/ESOyq561wB54UW/HpphbSAvN13Z97uztTQKU9IC1N79NCqlG5EHrYxwYnDcydvt30KamAfWkCmqavLMFzLgDtdIlyDlRqp3H2mwBdgbEyEtOpOVqfr0WxKZnPY7oyt5xJb53BBCqWvHEyQijfpvLehcpp/t0MsuXp/guwmMKUt6iStzOTSDJSSJ8AuYl8N7fWKpiMCPeX1hlMzbvjnbCMKaadWKkTEGoQTEkMNuU80jQGymdkJQSdXMYeSrop5HNUyWsFVIvlekyX/xawOOVCinKOpUSrCnZkhKQwmYFMY0COQV6bx9reY1oD0h4oau1XQnBfDdApMwEBIhBIaR+NweNTZNis6HRCX2qs3wj9OgZEszkyDBuS1TjncM7S6gZMxlWWmBJ126CdZfSBVd8x+BFlZTEmJfIwkH0gDKPoxYXCDkkySXy/FcdP6fMkKhiT6IYFHLY9JysHyqNUZKgMlRMtoBAiYz+QQ8YqTVU5VDL4MRRhWAGabJVQprTUGYdSdtczrZ20IOWET1HaY8kYZbBKkTQCkGhhcMiCkoQpmJNUbkoaLTzAVFozZTHQQVwJoSygShXRf7k/KU2L5rQyTZNcBorVBjJobXCuwjpBw1KUwV3VDoVonqF1aV8SBlpQBuqGYRjwo1SBtJF+/hRLqyPSDpmtMHB0zuSkSD4RfGAYA1FpsjH0o+fZ5SXjOLJcLqgWjVDhlTABUw4Qdy13SmmRu9xDy7WR34WUMNngrMVPrX/TBNkbv9oIOKsKF1jYd0E06GIgIiLmtauorBbx1yTszjFlae2KidALGynUFVZBFsEl6rpGW4cxmlo5lFW0xlI1C6qm5tmzBH4AP5aKlYyJYfCsBi/X1otGovw+ErMsaRMVO0+rB3tCnKXVPJQWR601pgAz0yKbkXErLkyKhLRkGq2obUVlFY2zGCKtM7jaYW1xHK2LFppB5rkf0bpYoAdPiIG+68QhtNDXUwikKNphoe8ZQ0RHg04yn8CgYsAo0fPTqugxWglIQoqFuVS0aLSWltG8V23Yq5poLeNKhdIikCRoAfn8pBXeD1hlGf0412RCTKy3K54+e8am6xkGjw8ZOwo43y1ahuAZwkhdVcK60uK+uVmPOGWpTU3dVhhrqWqLdVaqocNANwx0fY+PqVR5KfTyuLchAhN7KYNSSTQDy+KdckQlccxOU2A0LfBIFSgrEcqdtUKKYYVS0j6rhgEfhBXjnLh4zS0AZQM2ZhJKva2HML0vFQfeuYJa3jK13MbJ3KYEen3f40dPROQAdFb0aSCOI+vra9q64uj4mLY9IBtLSpnB+7n9SGrvArBYY8lemIYhigahs1bmXISsk+i3oIWZ1S64f/eOJC2149Gjj+j6DSFE0SPThmGM+HHEZAoLMrHerHmiIqMfaZetsEaVJmuLCw4/RjZX1/RXF4TtNXns8SEzREWyC1x0XA6JqMC9a9isr4h+SyxabN1mxeWocCdn1CYx5sTVzQo7ZNb+KQeffIxdHGIPFzxZXfD+e7/iydMVb7xhOGwdpgKVo7AvU6b3A0Np9UUlYjTzHJla440xVFWFMQq0FGRWG8/nj59yaQwqHPLq/eVOPzEXgNWPgICuGst29PicydaSjRVntSxFAWUUTos7ssmZYbtmdAasJqgsx5Yi3TCwXm9IVeZgscBYYR4PvqeuG06ODUfHxzhnxY1ZiYh1jInGWe6fHhN0y6ru2aLxXcQtKlx1wOHiiNPjOzT1QvZmPSXHSkDNXNj4e6nUFwN0e4/9oDf/5q+/9O++8JFf+HEOv7/6vfPnS2BJ3iUs/6Tv/EceE4scJWtV0ziqSvazagGjgk9+8hFPP/yIu83bNKdn9F1g0R7xjXfOMbFGR8tieQI6YS3E6ysuP/l7PvqHP+PJ809o751yeFzzyhvnHBzVfProgqAC/+p3f8APv/cHbK49f/UXf81mveIP/vD3+YM/+hOasyP88094vH6CeeOUI1Px/hi5++p93vjeN2mWI5vxlPPXX+Z6yDz+dMO4HWmd5eXTI+6f32V4vubm2YraWb725muMV09Izy6xSvHd732H7712xM9++f+wufg6Rwd3+MVP3+cv//Q/8vTzNZfPt9QnNQOGVZ8IWRPDyNaPRB0ZdWLVJ7Yff0yOI6bXfP3V1/nPf/xtvvm9I6oUGLqnbHJDJqG3HcvNwOXlNa+89BKvfO3bHC1f5uy4IvER/Qef0x4doQ+OuczXtOcND+6dc/3Rluefb+kvVvzqp0956bUbTo97qpMNeXFCyoqDg1dYtC02XqHPLN2zC64++CWLk4A9uC/u8jcbFgvPsnH019d8+sG7dM+e8eaZwtKAdhgVaZxliKkI8Yu0xj9S4P9neaSU5r1oSlhyEjAkR4UfFVYd8srLd/jWt19Br68I2y3NYoFZLKgXS3SlMW4BSmphxkzsIEgxYWxp9et6nHVYZ6Worncg+G+eqjCSu5srNtseYytOz+5zfvcMZRxV7X5zGbiFX0yJorq99Oxl6SEFPvvsEe/9+lc8evQxmcR6veL66pJ+DFzdbMjJiMZnTOQgRZBlZVk0NccHLffuHPOtr7/Fb3/nHd56/QHHxy11a9FVzdBndG6xvkKlkZBu6McVIY4IWaFm9KJdNw6eawzDtoeQyGFqMRSpgmwMSZe9wDkyIzEFTFaorEk+SQ6npL21chaMKvGsxHZJeRRj4foEVNwS+hv69VM+u3nK0+0Vl8Oa67Hnpu/YdB1xcgaf96tCFplYX9xe9aZ4JRXShAbRjUOKo3Lpd1pZwpKWCHEiLQjj0ZZ5sGOXzV0FZbwYY8lJjAykAymis5gQpujn+CoVMf6cjXxmOWJrbWlXldxGT8XScl6iQZYlNq+qWXQ+Z1P+pvwrxegULLp0lsWcpGhXNOG0MSJNMnWZKTXHbjNErdS8L0wdKROAPv9ea3TpIJsIBl/U/lpmENYosrbi/BpLw+MMwOdbn6t06X7KWTSa9WQmIF1Icm3iHtuK3d2ftP0m9MpIi2hmwsqn/W4fSd8dc54/asrv1bQ0zODZhAzmGTqi5JciwRRTFldgJSNK7YFT8zWenmfK1o59htrryMui/qUnoLCcn1z7LB1oWaQ6f+PaTzlDyqgiMaO1kGNiwVDQhU2Z9wHDciAFpcpR9My1MnsA4V4bqtktZ/uRzT4vMSFmdRYZy3m+0Lt7L5dYU5xAbgN67AAvMU6hMNXEsHMC/aZnWYML7DgDaeUDkwSJSgnBIc0u32o+EVUAXBQlHxGIVMZIlHlXul/i9Eal5nkFOxxiak+epRv279ELP3/R46vhu6TEBTDs2siMMSIbqxTLZUNdO1CxOPBlcRW0NdpZNmkkjhHrheWjM4x+LLbembzdipsSiiaBNY5F01I3jaDvCqIf6Lcdw+BhjNLznGTx7bqBkCNJGbLV+JxJfcc4dvgY6funKFOhjVT0lQZjy2aSIHqBSduqFUFN5yA3ZKuxRhHGgMkTeyHPAYtSAkzFPA1JNc99yiDTWuEQG+msMl4posrEYjoRsrCNxC1ItJfEnEEGlTD3tOjGFRRXGRHflkWdGcXO8xjJsngbqVxEHwuVe1p3dKFoJ5S1aOfQSlxhQJGCtH8aNNo6bOWEZs8gc8YaYcCR9wCkQoHXEyVZPmccomxayoieV9cTfaBbrzk6OuLg5JDjkxOMNmhjiMmTYyKoKDTfBNlUt6o+MacJuUJbI2LgIaCN2THNykKbgNnSWZfloLBACylWzlqLrlJVWZSuyQibzpNRtmEMgRxDYUdKNUqrjLYKH+SYEzL2fMokNN3Qo9ZGWEyTmK1OGCPOScY4vA2orp8XoQnAi8QCEBQ9QyRQmtYPWXZKa2exwFZKFt8cpT1g2tCVYq74TLphuYBfzhqa2mHR1EYLKw9pi6udpa4cTVsxDB032y1ZK1Tv6UMkREm+tTbEGBi6Ld16Tex7MdTIChUDja3odcAyMScTTmes0eI8azTOaJQRQf3sC4BVWkHVpBkybZiFgzq1vRsL0SvGGMnJo7TDWFV0OGwJLPS88Fpn0dbhEpye3KGqB0KMjD7Q+4EQA90wkoHReyyWg0VLbSwpR7p+xKhMW1VUlcU1leiv+MQYRoYxMHppAYw5Y43CaYXVlOAhCNBO0fLKAroYEspkskqFEeohSxEhZQkuBXyU1oycZDMQJziLNVmCbSVgfDay+8a0EcDfgDUC6KGm4Eg2WtlXbrdSy2viZqv17WBqag0xxmC1gGx1Ldpu2+22vF5KDsXwJUURCnbWslwuOTw6QVlLuF5xfXnNZujl02NAK2nFMigpkoxBnLmtwdYFgEyyrvkQ2G47rq6v+fjjR+ScWLaOWIKWm5s12+0WlC6GR5BDKtUzMd7p+kROnjF4mk0jLAEjotXajQz9wPrygvXzp/j1DcQo5iS2YcyR3AXcugfnyATW6yuODlqWbUNWPV2/ZdgkVFJUJtLW4JzGDonhYuRmm3n0+AlHd+8QXWZ18YTUJ/LYEYeeiMKoBMEzDFturi5Zr27EwMMW9lthBJqiKaqKBmTOiVxc2oYYeH61wnc9OVyj0ykHbSNYf0a0KWPAWYWpnIDFIRALaxxkHLZtS1O1JB+5urhkWG/YVBU+ReqDBcuTQ7LTGKs4cEteunvOay+/jNNWGLpOkXQkdSOJzGilDaPbblg7TVUlnFaM40DerEldB0bz8vkZh+0x3ajxuqWPhuODIxbtgYzliV07rahT0LUXKN5+7BW1psiPF956KwnYf3zZ61/0HVPo+hXv/5JffRGcOL//Vn7xQgbwVR+6/w7F3KqkNVIsKTVQZyQ5evvhfV4/PuD8zjHW1SzsUlqJrbCYyYoKLQGzFlmN7cU1YRU4WbxMTgd8+OsP6HzPt7//Ju989yGrVeQb3/h93nzt+4zrmgdf+x38oHnlzYccvnwAZkuIFenNM3zjicpxd/gWDx6+Q32vpuuucQ/u8q//3X/Fvbcf8+x/+3M+uHmPxEB9dofjN19hVdVc+8/Rrmdp4LA+Zu0GnvZrNnSct4cEv6H3I4e1IzQ1r775OotBc31zyefdJYOO9IwkU6OyJfcK31uCMYxY6rbmpTtn/Kvf+Ro/eustTP+Mn/7Pf85Hv3zG2fLbnN57ievuguHiOVxv2aiRb379db7zzjkndzVuXLFaX7DNEd1UpPYYvzqA6pj65HXePIncPfk5T/2n5Kbm9N59XHUkGrxao9xA1p/jEqgw0BwsMCkSb0b0dUDhyH5F3F6CGfBxzbiRvTubA7Q7Aw5I/QZ/+Rn1QeSlw0OqOqAYEZfSf/nHtO/EGOn7HuccVVVJx0qGlA3WHXF0dE67dPTr4gKZFFpVVE2FctIpMLkDqly0oMprwhI1OFMJK0oWSAkO5Sh+88Byxvc9z58/Y7XeorRleXBEtTgkeDESc1V9a57uktSi3VU+ef9bcsr4MdD3PZ8//pS//pu/4e9++vd88OGHqJxYr28Yui1ojQ+RjEFlkUZJmCI/ULE8OeCVN17loG0JzR0+uhx5Pn6K1iM+bfEpM0YLwWG9xeDRZk02G7IZRAajU9hUkYdE0JaV0jxaeQbfMpoKugN01oQS94lwukWbCpMCeE/wkc8fPeav/uwvuXz0GQfG0liHbVtivaDPNcNoS3fBDd4PpNHAMLK6eEwargjDJU8+e8yzx1uur695dvE53o/kqKXRYI8pNeUZUMCGvf+eWU8TyycXvW4lchchSqHeKiTBLqYEOk1dTYXFlUsuMzmXUrS+CgqglOhZK61RWkwPmQroWjExorQSTW4tlK1ZS3jqABInWWlfnQA6Zvgno1XRVi3faYzkPRHpGlOFzSPdlLkAGAW0LECH0iKLpAtzJ+0x4/bbW/dBlXK487lO1/iWhtfea7+pq1XIPDEI8FayzJ00ipEWXZUxSaOdJRlLMhXOVhhlSUKHL/nQdGyJnOW+KSVyLPstx1OfmuSjutyvqQNE/k2qYpCLrn6aSS97KM38XbeAlXJ+U8uumtgG85iT2HuGYPOXl+QowOYkqyJv1rNMBxlyKGYmCgGvJq09Cnnnxc8vMYtSIp+T8iCdF/IqOpf2c3IhCO3Wp6l5OBVgCiWGmqqclxgPFk36EoDo0rI6M09VAecKRS4hxIkXY5np+kzA1/78miXAdBlHc3wz3ZvCblSqtOTu3Tp2gJzSGptl7BjEHFLLtiHO5UZjzNQCK+u1QUnunctzAcBtmdup6J0bY6RjE4oJSRkfpWNyGmL7Y2a63188X7748ZUAXQyBnBXBB4gZp0T7pnaJWikOjo84qB0sDT0DKYgzqastWEU2Bj8OdN4XByGpYBDS7OZotBHAbCz0YAc2KibRx1isq63S6KowPjBklWnbBVkbTF3h2pakFeu+Y71ZM/iIoSkuOZ7ed/RjJ1ocIeFDpttsSUkSXWU0zoits1bC7FkuFpJIV1P7rBFWYXHO8aG43ShVxLeF5aOQBdrlgFEylbTSjCqXJWaa5IqMIWUrEy0JeFaMvwVYSbFoS0WMErBCdK0Uzrl5YdrRcXf/b4yRNpacC9OpmBAg1N5hHHHOYJ0VwKNMWFcAOmkfFoaLuOAK8KAMGCuAZooiNm5KUm6UIgVhdVVNSzckhnEkE8iuQqNwXUdS0C4WNM6g7E5fbqK+al2Al3ktLbbqU/WsrEqz5wa7RWOasLG08KSycOSp+qOYK7YpSdUvRS8bpFXFVMKgtBG3VsnsSeNA8qL/V9dOGEYpMSbZ/JzWjCkJMy5GnHPYJAFxSpHkPc6Zcv01TdPgvaaKTlgrWss9dqIJOA5htshWpbIyUdJDCPPiYRRlEZsYnBpl9QxIqhyJCFAYlYjFpywadpXKNEbTOoNRIkwvVYyAsYiDZRjAVkLDN8j9Ngpta0BcUMe+xyjNyeEhrc34MLLaRsKYGQdDiEoMG5xl0da0jaOyCleJQUdCgC2dxIHVOXuLPTjpFMg6LRp0pqpIYSQYcVmuDDRVTds2tE3NstIcHx6QFYyjx1Y12lY07ZLzu/dYdz0oaXW8vL7ianWD956YEl0/8DxdoZWmOWqwVizjt31g0w00TUNVVYQx4v3AGEZx/S2t2dootJbWZlXYm6m0aytKa7Jz2JyxZTOISVoRYhD2T3phoZ+sv6WdXpFjlCCwbB4xSpA5VVSbup6DOrI4YO+3IswtDOo2i05eE927NAN6Zeyx+wzZfBJt21LXNavVimEY0FoLCyNrnNbUzrBsa+6enXJ+9y6ubuhGjzMaP/asu16CAw3joCB5AcSToq6kWLM4WM6tFn3fM4yRpnXCcOh63v/gA66untMuKpSK3FzfcHl5Rbfd4qqGdqmLQGuGKBo3vjC9jEoMgxG7dAai0mhToytP3w+sLq9ZXa4I3RYD1BW4OjEkT9951HakWR6IhACG5cExBwcN1+s1nQ+EIeAD6DxyclTT1JbKCZh+vf6ATz77lNN7d7j36j0aY9CVI/Vr8ihJYc6JcejY3lxxefGM7XpFZRRGi8tX8r7oyO1kAFIZZzlHYpQ9ZUyw2nQ8+mwNfsXrD18iFiOc6Z8urSbRB5I4nJC8L7II0i6ckeKYURrXOKwx+M5L0KhhDCPBD/gUqDScHbVoRDNyCp5j5airGpUNi1ra09ra0dYZ6x0VhqRhHHtu1muWZ6/y1utvcf/Vdzi59yqXqwFb1bzxtVeoWyuRjIIcA+hcYma1Cxh/A+raLS5fFiLl+bdfBtJ99WMuBE+b1IuH8EUfkfee1O3nF4/kVmB6O0r9Jxyl7JsxBRSSbM6E/Ji4d3KEPj3ETiwMjRgtaY0IvyMBaKmkV4tDXvrm96kWB1w9fsznl5c8e7whmoGuj7z1zbdoF3c5On2b+ugO1ckBb917jXGb8MOW7XjN4jBQn1hOl2/w2eozbjY9b/7gW7x6/jX0ssHWR5wu36Z2NRw/5vynH6J+/h53z8/44Q9+yNe+8S36baZZNhwdWh4/fczLpw84blt+9uufsfqf/ke+//vf5enzp7z73gcMP3mXbkj84fd/yI++/h1++Q/v8n/8X3/G4w/fJzlNHwNNfYixFaGPhGxp7SFnyxPuHjScL49xET754HM2W8WRus+JOuBhe8aps1wFxVgvuX9+yNvf+DaVtYzXF6gY0HXFNdc8vf6Ut15/h1ffPOeTX13w9z9f8bU7LadHd7jrHPr+MZef9lh7xfmrJ2wfP+Yqfkp7es7p0R1gRI1eikbdkicff0L/6SXt+SGLO4ZmcYhKisXZkofVQ8KzSHXyBlBz/fwJ733wAfXRAd/8/vc4ePUuWfdlL7F8VYr5z/GYmDzX19c8efKE5XLJS/dfojJVkeLSGNWiVc04RMa+p3IOe3CIqhegR4hbpC1Q4jOFEUZ/ThhriUFiZOuq0gUkheWvnCFZHGC7bSdmDP3I6dk97i+PBFAxU1r74vV5gdWVpYZCzOSQ2KzXPHn6jI8++pA//Q9/yl/85f/Ns2dPCKHHacXm+goFotUbB7QSJn/WFcXLh9pWpGT5+NEF2tboj2/kWumIMgMhbxljZPSOOBjUkDFpwLgV6BVJrSEGzGiokkV7cQ0fqopwdI578A62PiCsV0KIGIbZEAttMLoi20irIpvxhvf+4V1WT59yWDkW1rJwjmwtG20IekFWh2AM2gSMttjU4rKhIpL8ihxuuHz6nNWlZxhHtkNAKymeVZUTqYuciTndSmxzIY682L45tbKC5BJKa+kmQUzzUEVORtAFSfRTkC6W6MWNHdGnm8ZoLuYHWU2sGDMn7BlE71BLccpkTYrCNLPWzuwb5yzGaLzPUnhUFBZnieWNrPI5R9EbrxzWudnUQWI2jdF2ZvFM+70Ub1NpwSwAR1Kze2dSQnLQRbpkHxzYac+Va7cHfMyMxMnAgp0G3ZcBDhPQEn1AWbleKeViRiUAXeUsRiVSsujKkY0lqorKVVhjJU8uRovx1rHEuWnAWjUfc96ToxCcatIhn+7QBPWUgjSKFH2ZsdPnK2YXWMXcrTR1nswkkAn8nWRmiMLeo3gVa1WcXtmTuviNZaKAaPI9WSGGZFoVc5NEDkJ0EcAzFUSrFMCnvHfv83MB82Y+cGGQKoRthyoElbwzgJvCh93zblxYU+oYORbN/Env27ww76a5JB80abXdWh3V1Okhx7UjXuw1NBci1A4o3YFb0/nl/aPda3GfpY4mtm0x45lMeaaWVvGkLGxNbcr10rv3TZ+5lyvZqSCQPKHkXajiLzAhnOzAuh0GkURiczqPvMd61ZoQwpcMjnL9v+qXC1cLCOVkskcv1t5Ji6jvkAKqbolOiQC1rYTlYITa7BrHuBU7aoPCYYUyGYVFYpSIcGoUB1UzX/T1upf2NCttnDEJE0ClHRiGUcIC0xbjqtKapKiqRJOhip6ldlSVQemKIVj6saLzA10f6IaIHwNdH9HOYeuGlCNZJbQT98/GiWNsMpZkteil6V28LYSUPaFADEoLo01nd9B1AAAgAElEQVSliMsZpURPLlorCY0xeBQxK3xMRcRdQ5JgIWd5LeVMKDd233VnGojTz/taBpk4659ZbVCFXDEtqCEEss6zULzRu8/cTQLZlGKMKONwVU3Mma5oa2UlTp2ic5YxtjiOKjFXUEmmUF031FGz7jeAwpf7CDAMA6ZyhBDkb8sGA8w98tYYktay4OQiNBogBM/oR4YkDlIx7S3KWpdFTwBDYZnI5hynzd2I503MiSFGsmrEeTF4tl1PRtyCkzLoujjEkgSwylA5jdEKn6RdVPayXMZeLQCZK5PbGPI4UEUBa0aj2G47+mFgFTKDD/T9wDZAo2tiW6GrMvGZqn8FLCzVguk6SVvwbuNMeWeUYLRQBq21DINHmRIkaDl3pw1WCWvOqCmRT6BM+S6pZkv7r+HwYMl29MQoDLCYYXGwYLOS+1c5xzD2VCZzdnJKWjq23ZZlK9RolTPPb1ZYA3dODzk4WNJUlraxTLR16ywuK2pjqENEsZVq1CQ0oKblWao8xmjqykKqiYOYXbR1TdvWHB0ecnZ6hCt5u3aOykdW6w0qwfHJKXXTcHp2jnEVkLler/j88Wd8+vgzVjc3pJgZQmS97rDKsKgdxlYolRhDohs8Oa+FmWaVVE+tlcU9BJQOhOgZxp4YA7a46DrnGIKXFtg81QQRNoA22MqiTCXrgVIwasZxnFuZpxbWCQgz1sz6fVDaTZGWhBR3LbD7VcBpvZhA6i+q4uSJ6VeqRDGIrp4AP5rohblqrMEZudBt03BdQDTnHDobnBYH26qqpH0pi0RCCCMxeqyVFsSUBFgOIUjiUK6JYmSxWFDVtejE1RU+JlAR0fMTJoK1hsurK1YbRdM4mmYx68cMw8A4Rgm+cqYymtpqFlWFITH0G/p+4PDkDtVyKcPOGOq6oRtGhpAIaLxyZBRO1wTt8IVR4MeAqTxVU9EuD8nKMoRMROOzBlOxDUgQth1xfc+ySdjCFHN1xdCCGQ84f3Cf7aAwYcBGj06Z7XrFxdMnXF08ZX19hS7rOdbNe4DWhmTy7LptrJ3dlrXWxBAwVYNbHNL554SsSWhpoUG0RWP0ZCWgH4WhGmKkHwbR5HM1dV3TbTq00hwcHFBZhw8eM2q6rqN/OjIS6TZr6uaA2mQBIrKRAlgaySmydJpRGcKi4XBRU5lMZRVVpcCL7id1RXKW3GqcgVdfeZkf/OEfcv7qa6w3Aa0Vx2cLtNUlkQeVs2jaaIN5sSp+67HTmFGlfWZG1KYYcdrY/7+gXLcmz5QOTG056vZnyvawY75/0UfsvW8+dIXIAMzY4g5g/KJC7M68av9zS+Ja5EpQewHudPBlrdU6op2wP4kJVACriMZKG/NMGhFgOqNRdklVvc4ph2Df48r/hAcPX+b+Ky/z9re+ycHJKdoeUjfnKL1EVeDDllX/mGdP3sc+3/Dqqyc4m3j6ySf83d//lJPzE370e69zsBCTClOdYqoWq+HOQ3jptQc01qK7zObRiifmIy6eXPDZ1SOeXX6MaqGve8Zxi7Oa9/7uPS4+fk59fEBHZDOsePOtV/idH32Lt77xDY4e3mPdZ4ZO8/Gzx4QwwsIxmgQNOJdJ+YZuk7ipjnmy2vLWm2/x+o/+DfdfukfaXBGeP+W4XrC+trz62iHUkcWDt7j3Wz/GHN1B6YBOHebBBS/ffZ+PH/+aD67e56h+FZZ3eOO1Nziylrp+g9p1fPz4E3727rvce1hxcfUuq8v30e2S1775fYKzKNOzvXzC5QfXfPLoM553lyQdeXh9lzd5mydPt7j2GHtkOb77JvmwgkXLJnQ820Ryc8758TEnD38bfXJONn0ZeEv+pQG6qeh4cXHBhx9+yIMHD7h/7/48FJ3TYpiWtBTANTPbKGfIQ5C4qcRjMh8UGCvGcKoAK3mn3auN+c19b6/YrRTCPg6Bqq45OTHcqxcsDw7k87QixiAaW18I0u2mbQZS8Dz66DN+8h9/ws//08949OmnPHn+hF+9/z6rzQ1dv0WROFq0vPX1b/D7P/whD48qlnGNGjOZY2Ku2fQRT8a0luwsq97TRUcfHFE7xhzYDtcENli3IAwNjT5m9fSaD379Mz5/9h6YXnKSEDjUjioFKhI+R3rrsdURKgZS36PGgE8duR+Kl4PBGkfUljQmUkjooBg3PZf+gq0GmwIO6XLI2rJY3uH3f/xv+de/+wNcm7BG07oD0hA4rCoWzjBsr/nFz97n3/+ff8fjzy/ZcMSoB3w9MKZhbsU0WphXwhTLEMRMIsRd4XAC80iim7XPFBNTADENEXO0VIzddm6tqMmJszCwCjAjrZyTDI/oVIcYxVSwJOyThpxCUVmLjiXO0qCUKa2iae4CyaRZwzknkaHR5dxSyiidZ0OHmMJ8jEpLbpCiGL8wwRZKY6wVnfQUJD7MUgzc15SbXVz3CrgT61BPQFSe3FT3WHaleDv9/TiOc1vuRHiYc8pc9NZjBm0K4C9xrrNGQDqryTkSsmgLY6VLJHgvbbExEVXeuciW79Gl5RZ2rcxQTNiytPKmcvPkHDVEPUN0Uy6h9FT03q0BAuJkZqWwpIrz86T5J7FWSkmM+gqg6UzRyS5Gi6I/PWm0vQiCyXNMeS/EUKWQX9ailDGomQ0c1Y7xlrNoBVsn0h5mPvwJeM0zoGj2rtVOY67cny8KHL5gBcuqgGdFdCurSd6mrJV5j0VWQLRIxlhH1pLjhMJcnboBk0rEqfOukLdknsfdIUwopNo9T9dAvj/OWtrG2L02cZmPzrnSJbILqKbrIzGOKa7IO/OYNGEKZfynGAmlWJOSuCSIxJIA5VMANutWlrg8K0Pcyc2Vr1fzfZCiOl95D74SoHOlJUrrAhgUtmUCxpzY9B1bX5FGzSYPVHWNstJ+aJxj2/ekkDA42kocHFVWEAU97botidLGZg1aGcbg6f2W3kdskomimVpsZCAIDVQWz5gzvhsI/YAvWnAhJ1T0NFXPZNmhSTQ2Y62jco6mVaw2IyEHXFsLA4+MihWuslijsEqSqKQNQZeBmSNkTY55nlSUttU4UYspYvwEdBaTBIVUXsSlqFQHsoB60gibZnRYmlrTDnFmWrAn55zfTKonhH+68TFp8ijsK13auyZK7AR8jKGHaLG1K2w+PU8elCl6oVrYfIUJllVGBUHztap3iwGUKlRxWS3imFlLkmhTKjoJssE5I6xBozXGCFsP5H7nlKSddpRqkCnnZ62Z2XApKvq4t6lOR5HlOZa2r5gR/UIdiDGX8xTGlU4RuTuI4L6zMzswKSNj85adtIi0Jy9sOuccMSSptinNMPSSx2iLdTJJjREnSmtBWwh+IKZMYxWOAWsDOhY9h3IPKRXe2VFzqk7paYOUjTDEaSErEH7Z2ENKM4tRAovSjiQjRyyy9ypeKSdiAHRgcr4lByqdMbbIi04Lu9KgElpbqkbAlWEY2G42RB0hdig8ldG41hGPUhGej9SLJXfPTjg6OsJZTQoDMXpSnnTpMlYx0/2dcyUoUjIuIkLX1uJqrPJkzJIgRgyZyigaq2nriqbWxCAApTBIpVXZaEVd1xyfnFDVLcoolocHAswi7kTb9QY/ejZdhzUaaGmspnYKZaR66eoa6wQICMkT/Cjaiz6wHQbREyy6hlNAKM5knnH0DJ3HKWhsxrhEdjIGVMoQI9noneV8jLd1QJQSMwgvcy8WHbGUBIQWgBphg+4lI1OwOgW8t9ePF6qqKZWqsZ4DX2lnLwGaleOJBbhLe3+vlMIZS20ddWWKnp/G+5HkR2klzgnnLHVdYTM4rSGJYYYGjHKlUiymMSF4yAJC1ZUBdCmSFLZ1ub45eZRqUFpR2YqYwMckBkDBk7RGVxbT1CyaGmcVIYi2hPeRIWZ07ehHXwC6TMjS1mqsJRrHdutR1qGtQxV28jB6QlRovcWNmuvVhm0/0jZLjK2JQYmhUPIYHzhsNFZnLJ7Ubxg3l4RNje8h6IFnn2WqumKzvuHpk89Z31yTU8AqMTiyurAs9yqHU7A4rct6Qmm0wVQt9dKgR8/y8BhXt9KeAwTv2W63+BAxKVM3C4ZhnMeE1QZrLSFKkkCp8prZxKOCHPApEaOYDGnd061vyONAQmOSAHBZgydgyDiVMVkcww0Ji7T5G11A75w4XLa88tZr/M73fpuHb76KO1jSHBRAzha2eUQYq1qhsEXKYb8WPK2TtxEv+X0q+62W+ETtvS/vvf3W50zzZu/lvHuS/b5oak6Frxf+ZgITBCybVDKlCjglbCJnvGvV0jOyJ/vo1LAzH+YesHcL3Hvx+Pcq2LunvT/IGVSUGEQ50LXo/FrPqBIDGZVHHAmrW3y0+OipjMXYivrknPOl5eBBzdfX79Ac3mNx+jKKlpy1aAMG6McVq+cfs7n5hJvnv8Kqgecck1Pg018/4tkvH/Hg+IyTZok2lRiCJIfVihg3DP0WH0dQiu125Bc//xWbz66olebZ6jFPb55Sn9QoK+7ip/ePuHN4jN9kbj7v6So4PrvDyeEZT6+uqJ58ymsPXuFP/vjHhLXnf/8Pf0a3WbHuB4YhcOhaDpY1yngaHajwnJ5UvP3bb3L39W+gVcPNBz8hLW5YugZay+LOguq4xp1+g+bO28RKJC7IG5qDhtMTi7lvee+jD/irv/0blvEuP37nv8CqQ276jrZRrPuBdz/6GT9/9Etef2h47Txx7/wljs8eopuKfrulGz3L+gBlKp52G4yO3NkccVTfI5pTvKnwHhQHJNfifYLmlPuvf5v7B3epcOj2nBQg6yiF+X/Bx37RSGvNcrnk7OyMo6OjXVKppcicknR9GCsxeLftoHI0dSsAQEKK9qrMhayKhpSeQfZdjrcXP++B3/OPk0mkMrRHxzxoG3IWF2ptnMS+2iCtW18MzgGQMkNIYiYXM2M/sF6tePz4Cf/wi1/w0WePiBqGGPHGkHxkqS3f+u73+e/++/+Be2cNxj+HPkA4IPuKmBTJGlQlsiVjMvSpYaTGNBpV5VK8jlKkHzWbC/jp37zPn5vMex8MeBYkc0jyPY2qMT7gUmRIomtql+fU7RlUC2mxTQMpeGQ/KXpnRvI1nQ3LqiXlkdD3kKRwp63FJA+M3L17jx+88wb/7o9+F3uoEJZRSwoKW/bzOHQ8XLzE9lHm1+4zrtfXbFjzPF+w8jcMXY+f9h4gx0TMqcj97IzSpjgmpUgK4lRutbQGhyIjkxE5jRgj1gigZozkF9KyuIuP58VcBsQcj6ImCZaiGacVxlmayuE00lmUhZUn8fQ03speA3PdWUA9Nf+3dMsIQBBjFGaVykxGCSnr0rYYd90QlA4kJUBJypJH6QIa3NZ9Ywb9brGgpJ98974JoJiYcy/8+7L5PM9phbQRToCqUkwOeJJPgveh5CyZgEIrR4oJlabrtGtxnIpLSn0BuD7fpqkT5Dar8pZh07xRFkmdPQBHsWPdK6RovPuzPF+zW8SWJPrb0z2TDqTymZOWJ18E0En+IkuXMOl0OWs1Mff2ht8Euu0/UkqlMCEtq3tvleONCSkVC6NM51nUQ3LuKC65upy3RhWnX/lmVVqLdS5adTPDENCla1AmQLm1qqylZS5O8jl6AtD0DOQCezlNlg4gNCpNryFyK+xArP21e58td6urZ//92twav9PrKUt3ng87ncj8wlifcme5ntPrcUeUmsBFdvHXfHsKSQZ4Ya7cPo4X2b8vPr5agy5O7h1K2kZ0Lk4xIlD/5OIZVRxIB5qODlvXLJYLbNUIGBeE0m2Nw2hFZQ3aulLxVlRtK2iksZiq6AT4kX5l0c6ig8cq6RfOBXRQqaDYRc9IWc3oA+u+w4892mgq7TAJTOrI3uPHyJgCSSuyrYjZiri2LhdSabIp7BdjoBIGIDkgGgFStYl5ArkCKEmOtNHo0usesrBijJHBCKIlpRSoHMThNStpWcwG8WIUPTzpKBejCz9TQfOMsocYZdEwGpedDItiMjElqEoJlXpybwzBC7yWMylC1OJ8KTbLCpPlNK2yWC2AWcwJ0bPKorOXkIntLHUJgETMc1r0ZFjusHakWoKi956UI1o7ad1UU5Ii7c0xCKtuHoQlqRJGEUUfQu02LUREOGlFbTV+GEtFQxdnSYMyIqI5JT4pZUbvCUz2WYpZJNUIwj34sNOlK4qfKWfqpsHYSgyGUoTsS84YBZDJiTEGjK0gw2q1wvuIaxZoY1GAI2GiULF7q4g5CPvLVFQpY23AxqkqY4DiTpsDkq4JJVkCg+k6TQuV/GxL4qa1iF6qwAzuzQtBlkV/ovEaVYRelVQ0KDJncxCbBIgzSoMxuJSxLqODbAo+CCNpDCMhRqq6ZlmD344yPZxBYeBgAUrR9z26MiyaimVbozVEkwihsHKDRhupeFon1TWrtTgzkotVeXEiShGCx1YilFQZAT9rA401NLaA67N2RyYbRWU0KUvwU1lHZRy1syhnyVpxenrCGAY63zP0vQDIxpCRll1xaDYzIDVRmVNWc0uE2EGIu2Uu4z3mNAPcIUaiL9cvMN9jqyFrUUDUvjgXx6kto4jdl0TDaot2Rph1JdAKOc2mHTlGfBkX0ZTW9Tloke8U4F3WCtSOsj5VKUHJ5r4nN2D2KlPTZB+Hga7riDEydH0Rtt3pGUorgytMLghxxIfIMHgyCussbduQEBp5DoYYtbhPm0ZAiJwZhh58JpYg0igrgGTM5JgJQZhZMY90OtNtLCGMSDtIUbywCoxmUTuOli2HywWLtoLU4H0gastmCAzDiFGGlGX2ubopjs8JW9dYZfGpQxUGYQwerUUf1FiNsQ6lEGOekjgkFIMXwOaoFWby8fExOWRS7NFppLu54JnyXFz2hFxx+PSU0/M75BTpVityEJOVELzsg7WYqEgApCWoyyKSkDNzNRBAW0fVtmjbYEPi4OSEut0BdClFNpsVw/UaQuTo8JhkHFZZiJCUOMX6MZTW50DOSVxcXU2jwObIEDwhJYhwdXHBz3/6U+4sjrl3dpfDwwNs0sTUSyuP0pgcUCmgSOISVv5pI7IKox9pjOXkZMnJS+e4ZQsgLOUi4pz4f2l78x/JsuvO73Pu8t6LiNwqa+ulemOTIilSIilqm7FsYTyA4QUY+K814LEBD6CB5ZmRKEqkuPfeXV1bVuUaEe/d5fiHc19kVrFJG7AcRLG7urIyI967795zvue7tOKxVhvoOUeqVvTN3qivlrp2pliRri3F1gCp6z83D5/flq/d+A6v/He5/gqpiCRsjhwQiYi6uX5tj5D9fCEDuR2eASE2r8cEbkJqG9ZUDyE21poD/A2Qzr1coL/6duff7grC6/fyVS+TpzgDs9XjxVOcMIknYSz1TpTIBldHPIXeTzgpuDQgJdJ1PfH+A/bv3gWNiOtBBmACdwVZiVI5PFqwfnbOw5/9nLqplNfuc/L8Cc/Oznjjtdd47d4DtA5UHVCJBBHy6RnPPv8VP/3VT/nFT3/JqEruHR+vT/j4yWfsVQ/OswWcH7l1/4Cvfftr3Dq+R3/Z8eF/+Yh//NWHXJTE2cbziw/P+ejxf+Httz7hr3/wXVYpketzVBLiIOAIYclAT7nIxC6xNwT67Qnbhz/m7FeR+0NmWL6Dr4lE5snpI050xbsPvku89w4id8B3iEwIp2w444KJva5n/9ZbvM8+3fQR0u2xen8BdWC/3sXVwOvLff766JhaT7l/G6K+IKdiDe0Y6Yd3CQ/exN3r+IN3/5jD54/YPH9Bd74luDeIB/cIvafLa9CIcwOIMuJY7B2z6HvIHuqyAQ7aRqPud66Rf6nXzPI4Pj42j8thsFomz47BpnSzvXbi/OKM9fNnLC6vuOcHimT6qkTfITt7ObkBtv+ONd72SFVTgqhWak6Wto2x3pzzhGEFOzXHbKR+DQZem9bc+Eztc8VgNVboAm+/+4Dj4yO+973v8R//9v/iP/ztf+Qff/kzTk7WSPD0y336/SPuvfk2D772DZaLArqArFCPoA6AonVC3QaJgOupOlC0pyhcbRNPnhsb9er0S9KL55x+ecpv/vlzTr74BXX7iNAl1BeKgGYhTUpOsM7ClQuEdU/dDMgQjQHlRuo4WT1g5mxNHhaQ2FmtkoAqBGfgOSiJieVBx3vffYt3v/Mu+3eP7edWReqSLobmvQ34Jf3xlv7OHfzJGapXeA0MtSdLj6YbA0VVC5CrDViAHWvJyAlWB1nSeUWDb0Cvmn+vXO/Wcz1jaiK/G5SbR1dh9tn8HQuX4P2Ohdd1HcvVkhiEabOljFuuA/y0ESiEOcRgrsnc7LksslM3KWJDWjUftyytYvOhfd7G4PKhsclc8yu2s7A0okSz8do9Z2DgoamXrqWaNEBP6vXv5796E5Dbsfka2+lVoOwmSy84hzRblUpTQbXvP1tr5Gz7q5Ec/I4JJ/MASqz/s+TQGRwy8opzZhe0q/Epu+d595ngOlxO9fqsV7X7UBpItyPDtDoYRWifQaweLcqOgOJozLGazQ/TjPF3TLbZNWRHAPvKXzP54EZVoS0ttV1XhV2Y1/VrBniviROq2piNgM4EG6vFfNunRNu6mvctodVwbS+0Vbn7CSJWvcykoDYLnv8PIyXd7PxlN6sQhCrSaEcwB5jt1mBT9Bkj2uGDI8SOHuzezsQUH34ngDUrzOZ//8p/1pvEphtXUNt99C8TFmqdw2JaT7TzNnbXN6kNBMR7rIV75RnAeu3ftW3M7+/3sxft9fs96Mpsp92u+rx5qBl0F+eNoql2Y0tRNtsJN2Wy6ziMC3PrTokpK7pN5rsUI3hvgQkiuE6J0eF7j48DnVMkRvTyil5aqky5ThUaWirhcrmkXy5QUV6cneGvLhobyhOkZ5g8mjest2szB6/mSzbVynYyKVYuFb/T29uiULEUnCqe2iSAIk1XDqiomVbmspt8lFrJJV9vNCJkWuy77mYYOCA4naNpADMnLC3VNVc1qm+xII2i88ZuMJhWpbR0RtVi768V+855S31pSHnX+WZ2qlCyIekCLpiP3wzWOYzaOR9sohafI7En1UxulE/nxA5fAbTi8deQ8byxtsNFnDP/u1LNFFzF0nDbzxOZWU213QdtwJPa/il2H92NTRFqS3CqO3nnDJIYQm8AnW/afzPbLEyp4FpwRwiWfqTOI+JJtbIeJytEKLhinyFX6JLQxYEQTNbqvRqIINa498MClS2LxQrxgU0u+Kgs9vYopZKmEc2VccyMY2YbIDsQF8i46+TW4q4PFJn9FLDpnrgdEDpvNrsNR2f8vn3WRtU19iY7oFZaYcmNzWZm1KF27ZxvAJkDxOOqI1KJweOjo0igU4dP7ZrmkWlbrCgG+q5nufQkRvI4oTmb3+BqwAfP+fk5BcfQeRZ9pIoSfLVrqhVJjtye85grXYj2dNRryYnD5C2+rb88bhGtFmrhYX81cLi3YG/R0QVI0xYVJYaOEITl0DNlnUfyTE0m6rBCwXlhGDr6PuJE6RYDi24wuaYTqlrKbq6VKWfWm5EQzYtGUaqKhVAMAwMBd5Z2YBfijHnnAzixKagzhtyYRrY5kZ3xbVyOoM6S5mymtNt3Sy62I89TvDbA92q/n4tUo2mbL91EZk5Gnv8p7prNap517sZhZwsjxB7hujiY30NuRXDasa4yijEpZ+msd54QHCHYM+ebtFybGW4Iju2YySmZ1ERtaCMVA8OqJS6r0mjlBcUAFBWHSKQW8/AUSbZva0U14byS0wxYAc6Szfq+Z+gih/tL7hyuiA6io7G9HBI6ChObZEE1Q9/TDwv2VgeM25H1eoMXx2JYcFucMfJKJqWRfrlg//CA2HeW7kfh4GDg0aNHnJ1OTJMVpc5VQr9k7/CQ+6+9juTKtDmnsma56NCaOD97wcVVYRonlovOwE2tdN7MptOYGjOjDUxmYOgGuGqFqkHGVox5fFzggyPWymK1j4+zLrRJtnLm9PQFV+cXnK3OObp9n25YcrDaY8yZq3FkO5oxempgcKeV4AIh9PZcupExZ4KLbK62fPzhhxwvD9m8/S7vvvs2+wcLpKt4F3bFt7ZiqbYCvraawgVPHDpcEEpNVFchztXJzAywQtDqJ5OVWHnod191Azbb7XnX/0naJlhoUB9QWyq0tH3yFVBO3cvfb/6zeWIC2AG/ARJCRBgQ6bFYPAEtBszJBGSkrW002O9VzVdLtqCZmjL4iHMLcNEazWprW1yHZa/PBbi7MWlvU94bb/dmiuDveilCFjFWg5NmTO5I7fsFhEDET2vS2XPW5+fAOYs9T7+4B2UFNSOhUn2FnIEFQkfNawgbXOjxznHy+VN++g8/4lc//w1lHfBpydHhXW7vDbz7tW/x7tf+EB8PUOmouZKen3P24Yd89sGv+MmP/5GPf/2h+c12yvIgICGyX1bcOX7AViviLtgfekqZePziC/amBd2+sn9ryaOTc06ebnn6AhZDgI3yny7PSaef8fDRhVlxnK+pORNRymg2KF9/7z1++L2v49NzbvcX6NljHv3k79HwJWVck8ozHk0T4e1j5PZ9dP8uOa1wVQiyRfJDxu1zMgPaH1LGjsP+Dot3IrUHt5dQzbgE4wYWd+7wh3f3cXJO9Oc8+/RnfPzJB3z67O/JsuS997/F26+/DcFx6/h19l9/izJO6FWC/hDiAnOeWYEMSHHmJ+tAqwPtbMol18mbpUDnf+8y+f/8utmwzeEQrrEP1IFlZgldZ4ytzVXi+bNnvHj0Ca57xro4uuWCg+OB436B68y/dR602g/5rcV9/fObHJEGyJQ84Xy02jN4ROwMLtn81+YgnjyNgBC7/qUfITd/hNpQIwazGVgMCxbdgsOjW6yOj3nt61/j3X/6Ef/+P/wffPTZp2xrpfZL6FZMEvHqcf4IFUdJA6qeLmo7SwvKyPrqhPMXE0++3PLllyc8evyYJyef8PjJLzk7+Yh69Zx8MbI5V7bbRO+zndvbLZst+LqPbh1ShG2trEOk2yYYEyx2gyIAACAASURBVH474aWYF9w0ktNkyaBqIXFz559H26+GYaB3gqstkb0L9Acrvv3D7/PeH36HEjqqZtQFvFjAlm/WD2gkx4HS92y8Yy3V2OYY0KHV/LiQVquKDVrzlMi1UJsFh3mMZ8DY1AYWGh/Ii5hNETRbEPP9dRIaoGdgjWppwHBlNtfc1dFWMBsbDKu5dyGKIdAvBrroKS0BPfgG0szAXGMUmQrBNenktSfVtQWJ9ULiGjhVivWcrbGRJtXd9T04I3K8xKrWXS0njZig2Rb/LK+zD6E75pxZFLHzEHPSztebj88NFt1NJc7Nl/ee4B3B9ajOSdjzGSs7LIEmCUav5aPBe0vHbcPDwqwsauCScUisRphVRaiV9dr6RzNPvsFqq22d1cZOU6Td79ait+dXDf9DmWlj1i/b9zMWmdUeYF7Q6PUe4nZrRV8ehn3FHrHDurA6dQck8jJLzBhy7f1j9985SyGdmWreBZyz3hekWVs1wlFTBdHu5Uv3qn34GZ4sv3Ut5nt2DZbaAMeh6lognVzXSjfkouKaxYfTth/PARXXa6g25ZPEuQeRHVAu4kjlmhk497431+Ds4fZVLDlgx6Cbr+WO/dbqpBjjbv3a95gBuvY8amPz7fwXr63FdHfX5/9jphnODwpe/G6v2P3cG6+brL+vev1egC6X1MyAww5hFbHCsObK3tEeB3v7hMOeHEzCWFqzG3D0IeJajm/RSt5uSdNoHnUCV9NIEajemZ9cZ0X7VBKaMmEqRBF8g3gDxnyJTghBEKl0wdDXtOiYkifVQuwcfViYJDL3VN9BHMlgjJJsSafudMLEjsZQcS5SpTbU3EA4S1W1zTIAtU0/1CkhYp5zAmS1798QZ6ceR48nIJIRKdYLUQlqYQCuSAuGsLNhaql7NKaNL4ZvKjYNtnvepL25MeNqpebckF9HcGK+UMD+om/hQg2V14JXwZf2AwXUOepkKYlVGnDlAhLsXlZubEYG1RtoUvJLi01Fr2WtDdTtuojbbKmaTaIQQtt8PZZSa4lGzMWUmJeQhXQEQp2BBmPJzD5ypRpQMqbENE3GkMuZXA0FnzkP3vnGbKrkYvJJnDd2mzOvpSlXhERwNHp7Y/qJsB4nUsbSdAW8V7rgEGdTLcSxnRKhr3S9p18sCBUWiyXTNLFaDJAmottSywQRsrfNMzdWaM6ZlO297A6+3ciLlyYk1JcpuNqmXi3Os4G57eo7j0g11ga71tKMSxH77w1YN9DFiqFcaovmTgwzrxlvAKv4uaI1ppjaBrJNEy9OX1C2gtM1vk6WOpkmun6Br+C0WLnj7HqKE6q0JFg8EhzZCTVV/JiYGVjq7FCM3tJZgxhw2QXPoov00aN7AyVNDJ2xPMu44SJvcZ0QOm8SxGoA+xA9QcRksbWgtaBZqDlRW2EXggXdODUKd4gBL0ZHN5lni7gPTerhrwsip55QlagRH68sUVPEADFvycmIBRJ4pN1TW9dJjOfpiyDqSaUgztJPXStw0pSoxUC6EMLOfy745nXRPOiK2sFp4IxrnjutMW/3MGcFMR9BV+YV0pKt6jW9/yXJQ1uzufngbcdxd9DkatPSGCPBh/ZPASq5TIwTbZoK1c1wQisOUQRvSZKi1MbQZFdkzGWnAQyqLdAAxwzW7NjOruKdIxWTtVMLEi2ZO3iBUhg3a7plj8OSxCQ4XPTE5OljIC4WrFZLShVYeG7d6khTJqVM33Us+oFazMMm5dFYvDGgAovVkq5zHO3dYy86fj1+yUagj/uIyxwcHHL37j329/eZLtfU5MF3LJZLOwu9yQVLyZATpRbSZmOBZsGZ41docpfa7rOCFVGKSmtsbDNu10ZwPoJ0+DKBj82z88Z0Vszj8+rqipwrXb/HUgK39g8YS2H99CmKkopJWasqSW2Q5MThgiOI0sWBoV9wdHiLw6NbvDi/4OpnP2Mat7z3/tvcunfIZruh+mKTdYydPuXCVMzvJc2TcO8gCFPZMKUrcp2oYuezF0dRsf1DLDiKWqE05pzMgzEbSvxWpz5PlOdTSwqQUEaKjm3vs/3SClC3K+xeBuRu/ppPn4xyibJF6BFWIAtEo92UmoGtAXSSQXMbjEegsxpr+wLyBVCpKSOxw8kBxEUDDhxobMDkLOUzP7jrz+t260N27/f/3asgZGmDSWEngRGAXCE5ZDsQNhvc2SVPvvwpMV7x2pvvEt0CDQn3xiE1DpQy4F1P8D01C1o7XOzYTld89vgJHzw7hzv3eevu27z9rT/m9ePbPP71r9jbBuTzM559cYYcHLM8OGbz7BEXTx9zdnLOw89OIMHCZTRfsiiRfddzR5e8e/dN1kPkyfljTk7P+eTLX3Jx9Zg7+yvuHt1B9z2cOTaXme3VJauVZy8UPi/P2XeJo1tvMHVLXoyfcrE+QdIVmhN0A3feeJ33vvMDzr78lOn5pzx9pjx69oyT7TmPTl9wujmlrgbelSOWn/6SO9NzYn/Afn/M3rjFXzzjYH0FrhCXPdX10AX620tSuWB7dUbXBVxd0TtB+oWd6ShM53RMLPslk9xC3JLLy4kqoe1BExquCL7gXEF8NiDjNDHs74N62Bb8ytMVkDxPDm1NqyskN9q5S4/w/x9K92pD5b3f1UDSgGYLBcsgtfXamc32jM3ZC4pfsjo6pnDI6tYRw6pvTVRv9dO1kxQw7wPt32Xu3Y3NU3Mi5Qmc0jsD3kpVNtstZ6dnVJSjo1sMywV4/1t7yrzfzv8uQmPQadtrFE029Hzzrbus7h3w5jffRQ4O+V/+/f/KRx98xKY6NuqY1BQEqVq9MmXHdAlDJ6TtxGcf/xNffv4rdBq5OBk5eTjy+IsTXjx/SilnTPkR55fPmKZM2kAde1SWZD+yroWzlBknpfcJkhKoFJP/QDgF9xzwlNQbkzYlshaKKMUp1Vnf5iVSS6ROiZQzziurIbK3XLJYDdx/803effuPODp8naqOpMWUEbZr4UO7ZrVaAF/oEbcAWeJ8hXK5q5OncbQmOwRqtVqolGKSPBH62OHFsVWzBDGB1HWXMluDoEpubBnXhqRzGB+lGKDgrCfUam9vN9++nom3isT21tJSW33sCVHAm13KPFieF4efw5iajQftPbn5f3PNheJaME97Bw1EtBBDw31s/RmIY3Xu7MM9M5ZooV6zP5hI85idAbpr9gdgmNQNKPIl8O0mGHcNJL5ii3JjmOudJ/oZXL3+GfZ2TSXjgscL5No4az42Kwf7lWcWrLanasdqv3Gm7QCyBt7UuYdyO+UN2myPmspgBsO8cw18aVdZr+HNWrF+vJhllzgoWRtgbN/H+QY5tnUxj/Pc7ruwe1+/83Wjb2O3h0hDdputE7M6WHdnsa0dkw9TeemegA3nZ/np/PXGAr7xo+cbwjV4JDPtD8sJwBlQPYOC0khM9rVWexkxdUaj232RamQaqdbrzHWTNmlrtQGGQJOsK5m8A7iMSBKgJenOnoLXDEmuAbd53c5r07kGFnpuBr1YySu7tSqOJtsXBAtkqzfuhZ+JLBiAV9QGNdr6cBfi7nLuLmsjfFXqzitRXrnG8+urwO2br98L0CXa7lRLm2zPH9QYOjUXpEAnkaHzhBgppTDphACL5WD+Y97M/POUmEoipUzKxnxJNTPmxDRuSRfWxM9xuL0PxtArFSmFAOQYoBRyMp84m9JakmQd1+YzVTtK35OnitZAZTBGGG1jwNEpeNcRXCaIJ4hRnA2OaMd4iKibk0Xb5ogd5gJIDMxmy9WDD0KtdvAk9SjRfHVcxkvCu2KbsBoAZBsFO6W9cbhMomQmlfaA4rJNyhVD07E91YunaCEnm4rVUmaAHUdF04LVMrIcFgRvjZ2oIsWuKd4YezU1MNIHNJgnHNmRWvooJSG1GrAhTRmsiiu5HVxWtt8Mq7PF2Tz1SsVXdqwebSyfGaWG62sws+vmAIbd4m7NhdCMWNVoqnOiac5592DNBxJtsmDBG9ds0Fk0inMUZyxOdSZNc8E2ISeOZVjiJeC0UsqEaiG1aVYumc35OdtxZFsKwzix2YxUFbZTYRy3HB4cEEumE4hdxC0iGi391zyvGqBZm8xq/ve2j9nE6QZD5pUH+qUpwrypNu8Cp9KY9G3aiE0D7MvcbjOzGvNaPmtyMUv+LVnJtvOTlRZqYnT00phUIdjGOY5b1lqJjPRktGZyMqBE8Yg2sXK1A8Z7Y3GiwQIYvSdWSDU3jzwlOEd1BadCDJ4hRmKwtOXoHQfLBYf7ewQHabttgGQljRO55p3cYhy3bLeJ6AdWq0NWqyXLfmAYBnwXjXlYrSRRNfp6DJ6aG2Z8U+XmDIwuKMNiIEaLurd04YqvFbynxspiuSTGc6AlNbVvVoAxJepmwlcluox4mgehAZ9ULFnYG2CdnY0Mc86UZMlT5j9hYKc6Y/EZk7c02as9M5Yka8BzKdnkoG26f5P6beupNimIUnN+ac3tWHyNDq5t6jQbBnuMrr4LqRGb1I4lkbN5jdWaTbEjgnTL3QRT2gJ3Yj4wlUItmHSzFQG0aRwqpl5R3w7vGajSnU+j1tIkuYALdF1H30VEKuN2TdlmBn+LxWrRgjYcqVbyOEKtLIeeWjIXV1uci9y6NXBwdGxS2HZ+LRdLVouOWjJTnri8umI9jXTDwN5qn6+9fZuVy5w9OedZ2aI+Ir5yfHTE22+/zdIveDEmGxgExXe9gZGxh2DSie1mbRHvNVtxpLSJ5DxRLCDXBYzOxZ1cr2edUwXaddTqWhqgXP/dRu0PIdAterrYE7zn/Pkpl5cbNAZqLixXe8a0cSPTZqLgyArBOYIEnJjUrO8Gbh9HHjx4B03K86cnfPDhJ6zHNe/rexzcOUSdomLSBvGhgW0O8RGVQqqwyYlcJy63a84uz8hPvmA7Tewte27fOibEJQVjvO/On9oaEjeXyDcKoOuqeRe2YMdLbRXPBtUNyoi0BmsHbMkMeF0XmrtNobHvr39WptYzKmvEDRiTLoF0jcE7QtmAbszjKW3RUnFuwPkViEfqBXAFWvAlIcWDbiAuIC6RuADpbdCQzcZCfGfp2hLs+QBmU62Xm6nZs3aHy/zWy934RW2NSDDvI+8qdUyMz7cEIquDY+RR4Fe//g2PP/uc44NbLN+4xdHttwlRKVJBLwhuwHXSwEWI3ZL3v/k9zi6u+OKzz/n+93/Iu++8y2e/+jU/++Bj9lLmNx99yKfnjxlef53v/8Vfsa+RzaTcff0d/vv/6S6/+MU/8Z//9n/jxdkLbt0+4psP3uGt4dvkgyN+/egTnl1cQp6YzjODRKTLnF485/RcqZNj6T0uQKBweZH5Miv1aJ9uWDJO4H3P0A/WcNTExIbHJw95evKUvcUKVveZ8siXL57x4cnnPDy7QF1PWMP6737CdPmU1x7sszo4oqRIfnZKd37O/YMD7r32BgfvfIP9b3wLkUTZnpK2LwjLHl0/58WXn9IPx+y99jZIhJqgTCyXgXffeRd/8Mdot0JkRDoPWnCMOLchly2+C/gwMo2J0xfnxIs1LiuMI8fvvk7fm3yQUQyc6cFckJs/1W8vi3+x1zzwuRmANv83aCWNozVNE+ISy72Ou3fvsD5fMp2cEbxJ9i4vr9iOGwYGVALoYGebzA/8q1J1dvueWHeHj55QPacvnnFxccnl1Yb1mLhz556dQ95T1OEktDpqrknnb/jyP+eB/DRu8dUTw4DEa9bTIi65273Jv/o3/y0Xqmzr/850ccVlrqyL0jewZr2FZ48KZ0+vGC+fcfL4V7x49s/4ekYH5CtHXweG4sinGzbrUzb5lLOrSybXM24ddWsYfomeEnrwB4Qexk1FcgFXUDeDNufU/AJyROSQqgHN2QY/DmoQanBoFsYpma2Kd9y5c8wPv/9d/uKHP+CN11/Du0Ds9/nuH/0p0ZnPdJS4u2bGnFLQgmqilIlaUhtCX9e3cKMhdo4utL2tfU0tudl1mB1T5wOEYINgqtUPzmR/4vxujamyO/tn9vlNPyvvPPnVB6ANGGdgzEmTxM6+zrOfXPP91VLaGnRcM2uua/75xJD5PGmgk8x/oroDjqzuslTIEIoRDqTVds6DU8RNVhM2iaDJHa8Boh1zqbbne37WrmHMXTF2ffZ9NTh3M4Xyq0A6JwacGqd9fg7b0Kv1fm4HrLX6JRdcLuCr2T61IdoMNLHzn2s9UJ3/zFaArRv73DPrcRdSL2K993zwoQbYtgfWGITtd2p7Qs0VCdpAVCPM3AQwDagzKbXUZkO18/G2evYG9PZbr/m9fNVXqCqFwkyimPczY7EaUWX2gX7Zg67J9ZUWLtHG2E52zEipBvjFFq7pEPBmfTT/fmZwmsWN3/Ua5u3ZPlcbomjrcVS1hVy21dTwYm39f0F2xIS5l5/BVdVq9ix1Dk/wBG8QlSnDXmEWvrLuXl2b0rwntd54iOXad3Gun+1Zn/sb2UnL7XsyI9WN2W37B64pH9mZjDEzUC0ul4Z9lF0DOT/XN3v4+az7Xa/fC9DNE4Pa0GdVwUkw7wGxFJ+yTdQx4Zzi1EGpdFkgttTTZSAMA1EhbUdkdLAFxTbiGAJD8wJTFB8Cw2JB1/ds80TJhTwlpGSTVXjoxIzZ1xenXJwmlNqM4E1utLkqiO8RHUA8WQspV1vsvpLVk4rgnPmLeQkG0Ik93DNrExfQ2Zdm3ryVXeHuRaliwFpwQomhNT9CUU/VDhGITERxKObVAiBaGuClu2ZCKoi35erFk1QoWdDGwLPN2jap+QBRVYhqktVQdkwrNFNqAhW8U7wXOnHElqqppVKlpfStN0xaTGjjQF1reLygVIIa8BAQOufovLM0ljZVUBdQ73ceCBkhiUngajX6u1ZjZ6RpCy6SpomcKyVX1Dd2OfODbe+t1mKfpbH95q1x/p/3zTSbG4eCM3DNtYfNtUlObQyxqk0sV5QuRvNd8x4XHS4EG4E0aUPfL/ASUU2QzD+rUtBiYFVwntj3gCMlO6hKKaRkAMnZ2Rk9Nj3yDmOM0TemtTZ/M3bSLlSvp3Tu+jPN28tu87HfoW3a7Jzb6d2lAb86pzbNSxlpQJ20CG/ZbehtLrGbFNa2gUvbWEq1ROFc2AGb1lBPII7FcsnxndssQ2K6PKGOGyiZ1bAk9h1ZHavFQBLPMHRmpNt3pOSMQaotkSgUQqkmjWysKVpB4ghGmQ/eGLROjI2rBrYRPTFY8WYJXJX1dIU64fl0RtpuWBwMHB0dcHBwi2GxJPQLM6xXpQ+BqYutUTBqtkk0LRlIazZwq0AqmSlNTTJtAJ2x9IUoQugyZGW52hJiZ+y4ko39LCYvVhUuLi6RXIi+4INSvElcyREqxCgGZLaDz6To12lcOZsXpjhHpnmw1LqbllatJklLaXd4zoxT5onUXAC27zuHPVC1XWdnfpru5WfsZvO2A9qFXTqeefSZkTUUk1Kp2QCMU2aqSrocyTjbK3E4SXiR3RSyKnZtnWK7SrUJGJYa7iRas6S2fo2NVyl1YsoJkYhIwN5+K+CdI7pAdI6h6+hDA/CBVJUpjYxbC/ygwrjdkOuI+I7VykY3ecqk7QhqE3HBDKlrye2XFRmHy57lG/f44u5Drs4fs7V4LqIPHB4ccu/oLgfLfdZXR1QZ6fYC5+MI8YTqJjbjlhfPnjH00aSpLiJtgjhNEzGaP4e6dtDXudCfQcrGDFCTODjf2NG1sdZ29hTsps6x61iuViz7PQ6We5ydPOTJ5w85uHeH5fExYbFo1g/tHrhoV9ZZkqyrFYfHS0C9o18sWRwu8a5jGjd8+MmnXKRL/uCPvsXe8V3UedQHCo6xKEmFIIEiQIhICNSaDGMKcLG54PNHDwlSuH/nLm+++R6LvbtW3NsGeJ1YN9cxbW9rffiNAgfmhkQpqI5UXVPZIjLh3Bwh1MQMcr2bwiwjnuUb0vxKWkVKAT1DuaKWHkdGJAGL1pSOwBrqJWVzxubyjLRNRDew6I8I3YD4CdwWcqJuR8YxMRVQN9CvDun3jnD9HvgBLQ4tDmJuqFoFjag420NveundBBJeaUDn3qVowTmlF0VTQnNGQqbqxMQlOW0Z/G2WR7eNddHvcRT/nKOhh3HD6sE7HL3xOt1qQGLBhxEkUeszxumKmiqd3MPlY/x5hzsPXH2x5vHwOenpE37xsx/xi09+w+HePd5/Y8n+YU93G/w+HAyvE/M+BMfbR5579yOXT3/Ew4eP+dd/9h3+4M736Kc3+fVm4pNPXnA+nbPYZuRFZuWWUCNpo3zt9i3+4G7kg88+4/HZJesNXJ56vH/A3aNvczklnjz7Dc9fnLKplYCnJ+J9pq6fsZTnfPu99/jiF895+MkLLqZTdHGKu7okXfVMJ5n0xPHo6UMujnv29/cZ15mzZ+fUaeLr7z/g/W+e0z/+gtuf/4ajuz237kckj6xPlRdfPuTZh7/m/oNvshz+G1SXbB4+proL9u5G9g9XuMMFKgNoQK6ugAnZs2cIvySHwEYz2meWtxQ3rYk6IosNEgSkMzbJVYXQgYsUcUzSE8OSV56Yf9HXrpF6pbm/+XvnoOuFbvD4KPRD5M79e6TtfWI/cO/BAy7Wyvn6ku1mCzfgAHQGnlvD/CpIJ3Z21ZpI44jzwrjd8sEHv+EXP/8555cbVge3+a//+jUevPEAEWm2E86a4wZA3ITlX3q22pYRgqXJzo1qniq1d+AhdoG33nmTP/3Xf8Xnj5/xix//lKTOwvZEiRSCV167FXj9YMHF6SGRjrfeeIu33/kBrlvAuGQ66fjlTz6m/M3/ycefKrrekl1iQ2GMQvVKyoksiVwTU81QhT70uOpwZlxMUCUUwRVj7WYdKTUhuZgXplPz8vYOiYFAj4bK7cPb/MWffp//8b/7t/zZn/6AveWSkjIh7tGHA+rU5HPmcWPKnps3wmP7o4542eL9mlq2Bkg3bzQHRB+s3pPr9NF5kh9C2K0p1HwFTd4HFupg4W1mFZJISSx5U6zGCd6ArroLBrCbuGuo5XooIzNAuwuKaA23VmPdY+w96ztmKejLy29e61b/XoMB9vbtHK9adrXXrHSRokhn/aA0VpNzxnR6Vb5Yq7H1lOvabk5a/Spvr1ef9lcHuPbZr6WCr36Pm8oeVUtUf1V6qFgiZpGWYlozuZi/sNOAxGR7LGISyt2Q1gAhsaaz9Tnze7Z7ff0Izl3Pjb5JZ4GtMHPnpbI7B12rhWYc1ZhxusP4Z6BUtD33zsFcj+v1gy83gGUrL37XHtqsPH7Hn+ebn+jGsHG31tqnvglk2725VsuYyqR5ibsGULZ1pgJD1+2Y8UhjmDrZAadVSwPo3G5iUttfrpj0vyJolWuv7RkEpZjKcAZIzfCcmY4qzM9sG7zjmrmwxzlwLti5RnzprLh5ZtyUat9cl/PF9+qo7maYpLS20u5jbMEVc5ja7pSYn29r2nY/z8I0ZAfQzffpldvKDACrawDsV01A+e3n59XX/zNAJ+wKUTBGQmweRrkUxpyR9RqXPTmOzTtIkCEy9o6x90zOWxM0WcphzoVajPYoAp209MRaicCedyyGyOVo5pilpQV13jcDcqB5IG3HLYqyt1oRuoBeXTJenDGtN80E0DbkqSQqxTjVPoJGuhCZQm3m3h7f5IdBzGOhNBNsVAkNWfbNAD20dWbbTUEkEoJH1UIfKI5aA0JbyGDm9rCbgtjltYM+uIJKaUi/a4aUQvKKiloABdABXixFMZdi4GAMxi5yBkxZ8mNiEWF/EVkMPdE5gjgWXWeeGCIMw8Bmu+VyvWadEkkbMCZNcimKtMQgzQlyQUumFku23U5pd0BoiFRn8GNGSeLQbJr4zrm20MRot1RjStbS4qh3EJQ1BzVjptcNoKPuNs2i5uuZdU73dUYprRjoqEJt1HDzwrNDW2rz7KvVvo65cWt+XCHgYzS/gRjwPlBr3iHxHo+EDqRSi4NsUe2dtwYVaX4kLlOq0nU9WjJBFd2ObJIxTiQnk7CpJVPWUpmqpQ+7JhcTb8EotSHxcvMZbiAyGBjiUPOQofmCNJZinn3qMKNbNwPMXAOaxrKyKY2FiZgUMaUJmSbCcqAGT/aByZY6TjA2W9eRF5Uu9uwtltw+PCLIhqcXTxg3W7wqXie0quU1Fpv2mBxO7JlzkIqxOBOTFUnBzPVjH3f+ErRJzuyB6J15TgxdZLXsWQ4dOY3GfPMmgcUHQu9RL1ytt7iwNu+wW7dY7R9hsbrGEpVa6frI0i/o1hZoM4OZ84RNZ++/atfKzVO3diBLK9S17RHRwbLriM6K+bE0+aoPdCHQxUA/RDQJWq5lgwr4anDSOFqqWkrJ5BvOM3uFuOYbiAiltr+fE6UxEmKMxC5SK6QpMzUJypQSWo3Z6Gevn91EuUna54JDPZ4WEkRLa9XKLpbc2/2sWnaFjMe830rO9G4gOAtPWA6R3gspb1mvRzap8Pxyg/pIjJ1N5HI2MN6babLHCmYhmxUGFY95VXgzmGyMOWPvITbLmlJGJ2U7bqjq8T6RpkQeehZDZLHsOVj1HB4c0HlLE9cbIMY4Jc7OLzm+e5+9vX0u140dt0kM/YKh6/HRk9LExYUNIUIQYhdYNfn25cUFm82a4/097t+7w8PHZ0yXl2zzxNV6zeXVmnfe3GfoV4zjIVlHki+cPXvGVYLzbSJrZtpcMcTQ5LZKiDYx3UnByiz7mqvLGb7H9ruSQSteIc5rxtueN5WJVFqYTJP0BN8RYyZ2HS6EJuF23L9/n8P793l08pz1ZkMqivOBEGI7xx2mwTUWvCqUnAnOcf/uHQ6GJaenz3j+YuThF1+QpfL2+99g79Z9pnHNdrzkylWWoSeWiY0kmBK5KtF7fD+w2j9g6jpOnjzls49+w96i5y/+4r/i29/5ISEM7Hzcdh5rtv/tJmxfJUmbv04ztWzI9QrxW7zLiEzAaF+n0nxt5jAG+yVtys/cPOx+VkFYI7qm1pFCBkk4t7T35zL4BDJRQRI8ugAAIABJREFUyxXj5Qs251d00uOWW9xij1o21HxJLhOb9cjVemS9yeA6Do7usH90ybA8JCwOkbDA+c5+fvV236UiwWHmIPa+ZuG77JQADUlo4KLS/HTrhJYNWq+QMhKD4EPhfHrOw+ef8OTJU+6vvsMfvPGX+LCgiGP52tf5xu03yeOGoe/ol0uUiZKf492IkFifv+D05AleIr0TTp+e8vOf/Iwf/f3fcPL4EU8/+IjFIvH0+SecbpTL6gjPE9/87m3e+cOvcfu1Y5bxiNXqNbQkXFyzjO/z2bfeY7z6hHfe/TqH+2/x4snA3Tff4MHZh/z0n/6Zy6stq9IxrA7ZX/WEReEH3/o6faeM2xPcAs4vHY8fjRzeuc83v/dnXD5/wsmXnxIvhCwLHANlVLSOiI5cPn/Eb8YzPvzZp5y+2HDv60ccAI+fXjC4yOuv3eVbb73OxfkJp2en6HbkMC5x4ZDPXzzhZ7/5mOfbE/aGBfJ3/4lv/cl7/NX/8GeM44Zf/uNv+OyXP4bxKVVG7n39myziXdYPP2OdT0n5kLPtCfce7EHqKVPi8uqcuw9ukzYjF+szDu+/Tti7Q6kTuA37tweYIm4rXH75hE9/9At0v2c/7rGaBhaLAVcilxIZV/c5PFo0H1+uB4DX3UFrSt01SCIGwNjIwlo8PzfQOybb/LfbdxMx/+EbYNrcY2mxv7NcCscHnkW0wfiw6Lh99w5useTeO6+RPz/h0ekVV+OVDetRRC2xU3bdltz88fajK6Qx8/TpY7748iGqlWm74e///h/4hx//I2OBd97/Dn+axAJ15neutaXOt2HpV4azzBWmfXY3X0RRXGeD8CQwFWVdHffeeJNv/9Gf8PTRBS4ubIiaC95tGaKw3Pe4Gjg4OOD23W8geZ/VQQdhD8oR6fY+mzTwkw8+g5NHTOkF43jGppyTvZBjJbX3VRyUItRSrNfBALCqSsm2NYVU0SmT8pasDimGEUgRqs7sEaWWxOH+ij/50+/z7/7nf8ef/+AHHOwt7RIMgiOaA0u7C1X8ziN7R3sWh3lzuqawKIiMUDeIlp33csoF53Oz0oBSElDp+47VasXBwQEInJ1dcH5+RkojIZhPegViNJBnN9Rvlhwird7w3gb0mDdWFuGG6Skzb0pak25+0bO/lUmxc86tB5s/tBizbWb3zOf2jQHnTcsae12DZ8YLm4HkYtYfXujUWPQza+8aJBTrLdXQKJUGzM0AXbU+W6Ex7WTXX4jOK1bbWaDM6bCvAuf2ma+ZXa5dG23ghDablFkOYm7BBrLa0FBJtZo3em1hcYB3Iz4vcDWhM1uJpnlyNiC1a6e7a7fTl778eDdSu2ugXrv+M71B7d9sgFxtuNZKqBlUm6+vrZXZg8zYxRbI0Bx+dQaKaYhZA0pbXfCVHKnZT6+tA+defv+1zoAbSLDPMd+D2atvBqTM7qXgq+AITdZpQG+IHSLWc0kjr7SZ5A3gyfbv3cBeLaxOTcYzvyPDq2je8Y2BKq6FlmiruVTwKEWu748DvDfyRBciDqE2xr+b+zvs2qpU61fnkAYipsC6BlpfXoM3wHJpz1Jj42kVUwO2UBC5+Xfcy6nHM/OPmaDQrtHMNJ0/iw2ya/PJN4m2o1WZ8zKcz8WveN0EGE3R9NtBKzdfvz/Fdd40nMcFMWCiJGI2z5nkhUsyear4LOQYjXqZlZojCSVdbgnuzNhG3hh202ZL2mxY9ZE+eHpnLC+H4kslJyjrREwTrmZqVhBPkI7gvJlHuojgiXuOgpBCB7HD7/V0bkCvNiy8oy0fUmk+xSEQ+xXSrdB6wpQno7AGYyLkasbu5qnUDtZG6Qg7uqSQZ6R8h+o3tpaYgbIxljxSBNGApkLGPIqiF3wUWziu4hp1OdSE1kLUYghzqYioWdJUk6Y615JiSrEpG56oSodjcI4QHBmlqLAAet9bqq0z6ZjGDrxHHayODvBrb74R2u0iy0vKpGlibzGgpTR/BfOsSnkyaVyFPBkzJ6uStplN2rAeJ9ZpZJsV8QPTZiIMA6uDwzatAj9E1AtVlFQSXjq63szzkVnCq4iYrMrSGu2QmQG6UkELSHUEAp3vyb4l5ba2w0ehOm20d4eoM2N4MQagtDVesQfSeY+PlhBseGxBZWsPa2CXMIT3dNFZsEC7/zOLqFOTc4OQS6SqMgnQB7xrKUziiUS6OhJCRwqJCxKhpTxGPNF35EYrRmdwROdL1A4SZ9LGebqWC6XMXl4B5xNFR3z1iCxxlBbK4UAi2zQRgmOIAY+Sxw3T1ZY8bvElsTw6sDAXLKnK10KsBacjtUxEH+l9h8vtARsUgm3anQsEMV8Nb/pNciqICsFb0dMNA0xikk1xhBDpfc+QIHSXJiWtmb6LLRkZuuiQnPFeGboF0SleGrNKFFHz7nIEI166Ht8tkW6NeVx4ulVPDYH1dtvWh6NQWuKwEp3lK+dih8iYJlDzA9uWibLoKTlQpi1J2/zEOZwPqJjkT9TR1czg1dKMfGBbGkOuTLhsIEDtDNTPxeO1mEejioVBNINqVdimgmreTSeh+Ti0zX1OWu18MD80aDR1q4PHlJhy3nk+zEMHY5oaMJyrfRbfGIx7oWcIkc24xYnj4OiAXAqbcdsKUStSSirUXI0lPCXwyn6/ZCEDfRfBF4YucDAEKD1TzGyLUuWcy1JRLCXSdw7RTPXFGFNqrCTXZEXSDlfaZ9LmA+hDbNJak7ToZaFcXpFTRdVRciZNhXE7IQd73N7fZ3//kK63Z8J5Gz5UVUqBlJVUYLV3wGE3cH5xyenZBbkU+s6xWvXm/dECInwMDIuBEAMxBrqhp+Ytl9vCahFZ3rpFf7gglC1ddpxdrfnlhx+z3LvNrYNj1uuEeseoytkmoPGA4q84314yVdjWwmW+4my9ZbUcONhfsRx6nNTGkmZnQwGzoXEGyYjLBDXPmUHFwEudyNozqWAurLNfSWS5OKKKIy4GSudh0eHHAWmed6cvThlT4fDoFovDPbzznJ+eMyysKbvabBpwC9uLcz762U8Ynz/hzvExdw4HurDHevI8f/aYX67XPHjvG3Sy5XBZWZ8/5Lz23Fp1iDM/Qq2FLJGLq8pHv/qU3L/g+WdP+OyXH5OuzrkTBt67d5/j116zQqeCdz3ZQzZ+5vUgRwJahZoaC9SZJFDTGtyIMBHciMoVyhVVr5C6sUJUelCzfhAJ5GySfd9YG6Yj97vCTKWiOuIwH8lpe0qRNd2wh5PQBv4VyoQUpZeI8wPnJ2fk52vC/TeoNbO+uGAqW8YpkSZHkJ5pO5HLGdt14iI/ZP/W66zefh930IFsqeLJGg0gpSJ+Qa4ecbGdryOBRNDUisnQmowmgfZCYktKT0jjF5R8znLZoWPm6ekTHn7xOeenifv7X0f6RJGeqh7nlwz9Cl1V0IksI14m0tkp29OnLA9uka4K6WlhuP8Gy6N7bPMF6+4Fn19+yOnJl+TLfY739wm6oNcLXnz+z2xOA6H+IW88+A79WxGmU9wkNmEvlZIqZaM8e5755RdrPtm7YnPl+Vd//IC/XH+Nsw8/Q90bXPWncBz54b/5Ljo94vEXH/PG/a/zrW//NfL5C4arDRs+Z719xDI85i//6juMjz7kxbPHoFsqwuTA+Y7V0X384j5fPD3l4bMNF2fPWVw4/Kpya/8W53kg7Q+Mrw08HxNPri44mISjvQUb9fi9W0y65uGzDat9OFtf8uSncBJ7zp5f8tHPP2HantIPlc+nL/nk6m+4M9zl5Ncfs388sHf+nE8/e8ib977gG9/8Hhp7nj57xudPP+CTxx8wypa3H/wBt/feIvaOp+efslwc8e5bf8zx3n3C4gFHR3vI8cB2c0EZlGl6wWc//ilnKfLW9/4t/f4d8xsOvQFKDXMI3ppuXyuSGzhXJugd6oSJQGpl88A8YFSQct1Y49rwuzXCNxoYa/RmuxuHJ/PWUeVWtwHnya4wVugO9vD7jtJv2fpLzjanTNObDHEBWmy43KTZRZUoDkqT+AVhWk88/uQL/vPf/R2/+PhDEOH147ts15HTC5Bhn5NLz8TSwAPAq0JNFoiDNC/fVxssAw2MQd5kY7VAkTYYhOqMeTGKMDphnSAubrF//ADv94gVFk4QCQabu2I9kHcs/YJ06akaENlHGXAxEPsF6wS5DgQ5YHAXZE1symT2G43FXkrFZUu8FibwiSSQVagS8ASTzE/J1CQakBqMaVc9UpWsUwN5Cl1f+fO//B7f/5Pvsre3ap+53zWeIu6l0IH5Eu1mGQpCj+ghokdoGVoitymjVIRUCyknCwxqksKUTdGyv7fizu1jVqslKWUbEE4jJ6cbAyvEAgy9b2qTWhAt1o/WhB8WCJYEL6VQpJLFBpEGnrgbn8V8iaGpA0TJmqlUttOW9bhBXE8qGRcC0S8sFFCvG/ZaCkWx/tKZx3qFFjYGuYF+LnhTj3hnPuPZBqs4b72jQKGaLYxKA8WEoB7Jjb3nZz89Wnic7HzsDTO/BiDmVayqLRjCBuPGirL3FLU0wMT69eB8A25tEOYRXDZLoM2YWv1aSFp3HnAz20xFGgPNpKQFkJDQmlBJVv/WTJVGIJulo3O6pghQd6FStSQjdaDNj1aZ5lCIGYAp7VFSAecpdWqSTbfD8M37THZ+gDMwb1iSNrWMAT7eNzlptc9TaUBms7ryLaSgNnDNzR5uauo0bXV7dtfyx/keOP5vxt6ry5LsuvP7HRcR16WrLNsWDUMHcMAZLHLN0sy7XvSgj6jn+QB60BIlSjQihyIINkGYRruqLpc+r42I4/SwT8S91SQIXaxCdWdn5o0b5py9//tvoDZKSCOHAKzWWOOKl7oeLWfkKEVVY63BVRUiHT2YTBS586DiijEMD+D+YURJcw3ocp+JKk3LumeMeANnCVmIBdiV4xdFivceHwv5Scv3GV0G+oX8w6j8ko59DKEYFTpCmMIIY5ECzA7vhRKAVauM1lYGO0mV8z0AqmoUNYgNDAXXKn0Psjak4qudYzpQow3BJvL8oCjDaCWPsz4YwpS7ROVhRHsgg87l+VdCHhLAvPw+bd8dHH3r9TtTXOVtUwHDo2jE+4iOmVQbmDjcxGF1xhmHUfLh9XTKUkEfI70P4BTOOEF0o5YJESUNMMuJtiS61BLDltZo6lyQazRZGYL1GBvBWrJ2JKUJygnrK2t8kKlZ1A5Mj9IJZxJOa1Iy4otpHdP5DDc7ZrnZsOo6MQwdNOxFKhZKUxjlaYRUWAJyiVA5DVLjgrbKxVZJWCSahFUSv02ClBVkI9HmORGUpBKqHMhZkGOtMraksmoFOy1eXTlB1Em8Q5KkX5IVIecy8ZHjcVqM0LOBHBUVDmdqkcKWqU7nJe2IHOH2mr7dEtodhoRpGqqqwjmNNRVTqzGVnIOAIhlTJEdIwyuu76Ss6ENi27Wsu5bWB3xU3NxuiUjzsZgtaJoaZTR1U1HVlQRRpEgIvcijNcWEsshycjFFZkD/RbooDjaKnEMJPZAzrsvkKaHknMb9pEBYV3JtjBLja6eFNSkTKCtG8YXyKh4smUwY9fSDzjwNvk4loUWaPZmCSDMnMmeQf09ajOidFi1/zDJPaqKirmo2fWKbeglF0RmrLIGMdsK2kE0uFwqxArQsTEbOfSIX1iTFA7HIKIUPURg2vgw55PhDDNhUiUcGkqQae0/2EZsk4GU2nZCNUIx1Ah17qqDpSXQRll3GYLFZo6IwSbOSez0FxG9FabQyGK2x2tJMpiyOj6jqirbvUCaiY4IsnyllJcEeKWOKv5sAkpGcowA1OlNZQ2U1ldU4W4r9wraM0ZOyI2eHSsKmyyWKPiPATjDgdWF95bLWKlmcjVY4pYvscggIGZiUCWcMViu2m9W4COsi+1XGic+JdqS2Bd8TUk1QShhyGXSK2OyxRpioMSvwYLKhchaTZQI1zHxkGFkSLnPxqcwZ5QMgybvGKCprcVaKeCkS8pjqOiSvJsBqg9W2FDVlSpaTyEutpMc6a3Ha0tQNKUWMsxwdH9P3LSF5Uo6E2IFC2HpWbAJykGcjek/vRU4qTD+wRuTJJipSCjjrMDkSixS9kmEaXiehhieDyWLka5QZ9yJQRKXQVYWyIuPLBSSx1jJNE3zvIfWEICCm0ppJU3O0OOL09IyzB2dMGkuORf7rA74NaCsJ4z5G0IbJdIY2wqiNIYn9QlUTfMDWDTYYlEaYbSpjK8tsPsHqKeeP3mOxmDC5WTE9mXNEou08y9WGF69eEaPm/WcfUk9m2KqmjYk+OR4+fZ/Z8Rm3b1/ht0sJXOlbuq0kpEroiGLe1BhT5CSooROGXAoUIqhUmsqMy8W0VyPDGmXEnkBJ0a6zlRTiqsc2Fckqcu3IznJ9f8/druXq8oqz84c8e/qU5XLDcrlku9vRNBPqusZYQ4iSiJz6jquXL2hvL7l/cMLjZ49opg0PjhcYq7i8X/H8i1+xvl9xunBoD75bskqG8/lDuX9DQhnHy9fXfH35f3C/Ddzd3tOu77Fpx/N/+Wd+c37EH/7xH3H09BmYCkxEK2m6FBnfd/S9p7IVVTWVIpmEih4IYCIoD3GDyneEeI/SW7TtILUQMjk5UrAoPUVXU/nZwq6WDbrcmtqArgAj/qe+Z7m84/5+SVU1nJ6e4VwtLIeQibuedr2m3+5QQewk4m5Nf3dJM1swm8yZqKY0pROsXnB/fQmhpfIdOoLZbWG3lgFJBdkplJlhTS1MU5WxGaKSIZcikmJbhjceul6OuZqhTINSGmczhoSKO9p4R9tFbu6u+eLrr7i+WnN28hHTWtOHJZOqQatK9jsAZQgxEfMam+/wl68IVzdMqUg+41sFoaFqJpw/Mzz74JhJ46keOP7Dhx8xa07wWnO7uuAXv/4Z1xdLlheGiidMm4eozRLyihwyr379OZ/+3V/zy09/ye1ty6+fX/PJjybE+ZQuej4+P+fRZMrFBmYPjpg9dbTTjifvn5Lp8OvM44ffZVclzP01y+T5+jef8uuf/Xf+5OMP+c777/HzxZTd7Ru6GOlj5vGzp3zv937E+ePvELav+PCTinZ7yc3yK0zwOL0gZMtnr9/yYveKuFwx2xm6zT1vr+7Fh2dSk1SiW+2Idzu801x8veLL619hWtBbOHnwPnmq+Pryjk5fUD+b8t4nH9DFDc9fvybGyNXbN3zn+3/M4w8/RjVTfv2Ln/Hmm+e4aWZrZrSbG7b9loefnDKZHbO9vSKvNLmtie6U6XzG/OEjTGzprjzT4wVHk4ecnz6UMLHC3BA1QukLSgOi0QcWb1qG0FHRa0Mo0qZ3FTx5/GlA1Cb5IGqhfK8q4JZ0wAqr4cglGh0ATaRm19XkLJoS20yYLI6w1YycahSVDM01FEpLORDF4MOSM2w2O3756S/5v/78/+b53Q3f/e73+B/+4x9w/mDF12+WLL3CzR5h6mNptsmiNCBLAqTW0kIdfsbSyA2e0kGlEnJXmrnCnPPInw5FmzLL9YbluqNPDmVqnJZBf1KV1LkkSfHMUocGL/WRqacicUb2BGk8DUSNyxV1rok5iMpHi8l9SrlIcxUxtMKUUYZQfKgxTmyMUOiSpKqzwmQJzpKRjgwmsoWm0Tx+fMZ8XpchmmQ878+J2v8lNK3y9eG6iDw/pgk5TsnRFXzgW9wjpSQ5trifJBWJKaB0xjqNc8XLMEZ22wnr9UoMhTIjSKHYM4V0IWI4I+muw6AyH4Bve/nKAIAMeihRnAyMHTVQTQuopbRGWUP2AsYlMc0VkGHw4tZD014Ajm8BZejBI06Nf6zRUNQPZgha0KWuLadUISqMHAuQgDAWlVIYWaAxSktYR0r7Zy9L/SwMoqLmMIXBNxr3q1Hmp5VGpyCrflJjr5VjUY0pCU4ki/IiFrYaWToTSeItyZ6p3B85gZK6BRUlG1HJeR/M9+Xn90oCeZUoBZVQuigLEDWXJKuWkbUaehILOsmzosdLV9arTM5SI8jbHlybA8ZZzuLBF2McpZYDkJUL+JQURCPrjVaaXAZ6gxWRHbwKS3KpLooQYZlmVPQCpI9EDAGmJEhDQKvRH3okYEQZ0mtTPksej30Y5A+kDmNLAJtif1zjlUZsuDgAzoYBJLKOaiVYR7m1C5ClyvEXohIUD/+BkSks6az0O+C3KccgvnQGlLBmVWGiZpChJgXAVMJdy1kVD2XxfEwpEmMQ9lveS2Clh977z+WcUdqOXvairsvjOlMg6uHJkvtIS+Ce1hr0nh35jvmWUuNzON4z3wLh5Pwxro2/7fXvM+hS2ShhlDAlpJkb4nets0wmE6zKJXVR4VzFZDHD+S1tn/B9wpjI1GaqSkywQ63ROZZJV4naVUkklKFj23uUq+UQlaRIxjYQWUtDpi2t92QjtGFjjNDrc5b02eClqa4MygoVOeaMLYXGgDBbawmeIvuS5nxobIeQBkHbpXCggG+DnEaVh1WRRjq/QRp98SyTm0hkYQ6lDIlATALqZUxZqGRBUkRZFBI0xhJUllTHcoONBpdKiQ9GMWzUWhYrZy3JCj3VKIe2VmjVlHOQhGWVkkwqVIryZjnQbXeo4NFNTe1qun5HXUkceh8jCkszqYrfAYCTpEbriAl2YcrCS7x4Vo4QXxEjhYI+5/T0GFtXQjU3mkTEWjtSr9EZjUUVlFmT9ok8w0KkVXlIpfmSRUGNzLICrBd9eB6ntznm4g+hiUqQ7uwKqi7QEjEldBLdfU6JphLGBWXz1HqfnJiQZKkh7nwwrheUXRIdYyibnC7pumVxjEkAarWLAp4CPngUUBewI+YkvnYFHRxo6magxyb9blR6WSbksS0yRS0UX10a8JRVAbMUbfDUuiETxddCg3WOejanUjCxmr7vSTrS6kzIBpOEKeV9oAsBHzLUIlcdFi5X1UTXoTygLQkl/nWZAoSDUgbrKiqSpCIrRVeCYHIo1GoyRguwp/VQxJfEK61lclckZUrr4v8kExdVqMq6+KeZkhY8xHfnUhBpI9CuTTIxNJRUVrU3HDZGixzVaQwVJgemk4ZJXRH6Xoo0iqycATCVDXe73tJ3HTFMCHGICM/FF1HjnCYZBxFMElmOhC2AyqZM2crGnjM6CUtS6n2Rag8lnSqFgS6Fm9aaWLSGKYnnR4ppX1gVNvOwcxgjfoq2dtjKCYU+ZurZFI+sfdpZQg+7viOEwHQ6Hd/PaWFZKicNS/RRJny1JblEnz1tKp4TWvYVWxJ1dRqugzhASBCJHFpRHsh5KJ+TUbYg/jG5gNQ5CxuyqWryfE63u5fCV4uXYDOpRPZLou07KpPFn7NyOGOotGOymFO1nj4G7u7uRp+2lCS9KYbALgTatmU2m5WJfKLrAyF4Uoo0k5pmNkVrw3bXsVytiSkxnU0xLrBabVmtVtxWtxwfnYlJtfd4FPV0xmIGOUbWriIaMdV2WpF6RYie7baltpZJSa0rBh8MPiKSSLef2g4FlgwlNElZuQ457c9vaWB1OVfOOZm0a0NV1XR9x261ZrPZcvpAvEdubm7YtS1H8wV17agrx3QywXedpNt2PVdXF7xJPVdXC9puw/njR5wZzWK+IGnHVy9e8uKbl+zWayZVhTaa7fKOt2/ecqS8SCHdlLZLvH7zmldvb9htttQ2U+ue5198xr8sDKdzTXPSUB2dghqGZxXCe5c9Wc5Q8TtJwjDE9OSwJHYX5HyJqdZYswTVovpA3HVgJhAyu/s1rtE01MIULc2aqgwYAQIyRVaMIfYRlRypd/Rb6Hct0zpQLybgA8l7VA4YFei6Jdvbe9rlCr/d4fs7Hj56iqlrlBHPpBA8ffKs19f0uyXzSUPtJnStxW5uqE41WD0mERrboKIDrVAmkPFAxiqF7wKp3WJMhLhFPPNWoBYYt8BgwU6oJ+dMyWxWV9w9v+Kbn3/JctVRfbAgf7TBnXtUbslRDLIzidX6nudf/grWb3lSR3ZvnlPpzMn7j5ifnVLNJlTTKcpEnDKcmobTnbBsf/zdJ+ijI9bGsftNS6rPWDz4iOMHP6aefAfDGW23Y315iV9fcn/5gnb5hnazodtG3rx4w+OPX7NpL/h//vIrmn7N9cVb1m3kg2dP+f6HTzl7+oxH333Mw+kV13//nEePjskf1Lz86SXOOCqm/NPf/4rz5i+ZTOacnJ9y2d/Rh0DeyYBiu1qxvLnh7HjBn/7pn9CnDb/4+V8TVMvnn1+xW9+y3Xl0qHl28hEPporLb17Q9ltwiX6XicmgzQwfDX2nCKHCpQUP5g/5+Nlj5qeW37z9OfMm8+Mffp/jKqF319SVgskcrY75+c/e8L/9xT/xZ+Y9Hp48psoLnthjPjhveHpywovra66vLzDvzaj9juXLz7hs36DrZ/gm8OhkweOT93E8RD2qeXz0CFudoN0TPBblRMpIltpDco5Lw6YUWpfm21UQMt295+J2RX3aMDsfZKHfrlGGuC5Zut5Rdx1IYZWWIAKjahp3RKVriBU6PaAyBWz25xzPJnzw9IyjkwW1PSIHXYg2Q1HICC6OBSKJvuvZrrdUVc3Z6QPOzx9ydHTCcrUhxUTbBU6dI2stBAXpRkamxiBTe+ejDX/GL5X12SiS1vQaWgUtsAU2KbHa7ri5vubq8oLNZjsEMROzGvf3oeOTYasixUyKSrztBr8pMs5pjErkKL7dqe/AB1RRvaQiP9EqYYwkn2YNxlUkY0muYTpdMHE1vTH4IikcaguNEuZxsUXIWeOMDEpj7MnGYpSVHjFroT59+zyRh9vgnS9JX0IJJJQ6PMXCJtN6BDBQhRVJpi/WHiEF0lDTOUkzVsaU+qwMuGMsAVgJxj5clcF/ATtQBz1zqb0Lm0lehZGUBdDLOaMHmW5OAkgMvtmx9AhlSCpSRI0piisJf8pjirAqvtQUACUUT69U2FWDUf5wPYwW+4kBmB16JFHolZ8pGKPWg6pLSajF8KiNAGABDAqQnEqtbrSCGAUsUgqbi8l2N2VmAAAgAElEQVR/Rj5nAcKGu31gRJkByix+gMU8hZzi6IEWGfjm+/MuO3Qej2O8NlB+z2DP8O10TEotRgGbEKDImNInliGuoshKhzVMnk9d2G8chNuCsB0HaekAHo2y0oEZZswohTyUn2Yl9e7IiCys/UOftDG048Ajdu+Zt5fr5wIyD6ECOR8elz5Iv1bj13Whfb17/LzjHyhsUEbg7h3/T8DqAXsYnofhOEuvmfaPsyq/Z2DwKS3LpFJyj0eGa6PK+VCjlHe8f3IewWylc+mx9+cpx/DOepBC/84xM6wdSQA6bd2eXZjFS/0dgM7Y/e/OGYUuLF+5V4yRz6uz7CcCnhZJctkXVLmvBJ94F6yTflMxhNcNAKes5d9C7f6N178L0JmCBKuh6SyNwIB8xuDJKQiopjI5CYUzGU2OPRMCRnX0KmAVTE2isaCdJTktOv/gibGAGFa86kLv6FoP9QRnK7RzpKzo2o7oe2k0c5LkDa3ERDx7YgiQIhWgnTysPmVSH4nRk0OEPtImg+0zbduVIAMzTiqMKjdwVgRjBWQodPQRoMtRwAGjoCRe6qKzF622LIJZe1LSwvoySvTKxkLBxHTxd9MkoZKmQI6q+DohZuZI0zkkyw6+Q7IgiaeVVbqACQZbOYKOhAMapTJm1P8Pkx8x8lZMphWNc6gciG0vJuch4LUhJU8MIvsSMFSzS56cFV3fQ0CYNq4GpeizAEkZMUesKvH208ZgK0szmzKZTDBuLwPSWhMpgJxmBKxkCdWFAlykxAWwUIeLVT7cHuT/BHDX4/RFESGIf1guQ09rxKdHoYlpr0eXXyQr/EA0TOVLg6IwRykidBakX2VdigpZNI3WmCyNvaz/4nGgy0ab1OCDsU8wi6UQqeqKqqrYdR0x7QE4ATDVuCArlUZfhH/1OlxwCnofYyKqiDMVMWZ631NVlQS3WQE1JnZCpaBWitpkfLsU7w9tCApySPQxlpAET4hKQk2skYmD1ti6pqobNBFT1WXqISw2pfaTPpTCuVqShZSiigHTd4UqX85zLkbM2uCsoXJOhgAF0A3FV632BgqD1RiDUZVMRpR4aNl3QLoh3QmsdVilcWVRt8qOmytKgDGrNNZomrqi0qByoKlrmtrhpg3WyqQqxETnPW3n2XW9SEr7Xnwis4DjqfiECZipsVkTtRHq/XBeKI2PUsLGKQVKzhR5TpF3ljteKwmSsEbtwdthCqVlVTIpl8lyOW8EMUhVRUKqFK5yTCYN1bRGOUlI6ztPdo5sNb3v2fmOru/IOTOdNJw/OJXEVlMAHyjAjmK92tBvetrUybpoM6oWSWr2IuuztXiKRo/gA4XpxficlH8lM6S0jZ4rSstENgnDSpoEYdmlmEoCrYRTaKNxDpyDmHvuV7d07T3LpmYyEVY3xuLR2KbCNTXr+xXfvHxBfXUJaEKQcrKuRLrjvRfzb6uLZ1IixkBVOW7vbjiaTWmXt8TQ88XXX3F1e0MznSFeO6nc14rQtWxRmMpRz2YcTWt637JZ3dG2G3zwmJyprAVV47uSqE5J+x2YIeWhGWQQsTAfSjlbvm2QSgjcLX40A4ArIK7Ocj8ba0lB7h81yikyxjnatuP58xfcLZecnT3gwfkDNusN7W4HZJqmYjqZFJA4EXzg5uYWbRQ+JroYOXv8mLPzx2y2PV9+8TW317c8PjvjyaMn3ClNe38jHmhtB7pHNVMmVUPtKrbxnl2/4+GTE549ecDpyQynA3QrCGJ/kWOPUlOUmRaWgZN1E/E5DN1OknNtIvkVm7vXpHTFfN5j7QZlPNkr0jqias1m1fL6q1fM5hsePzPYpha5WobcdfShows9aIWra1w9ExZIp7BBc1zPQSusrWW/8J7N/R0qBHSUwWTXbsi5BxXISiyi+66l9Tvul3d4XzFpzog5YZxjs92yilsmXUbPFrgHE5Qx+M6jXMK4GTnXCLktoVVEMsw1Ybfl/uU3qO4eq3do7akXMyYP38MuIEVD2tyhUosJmXizg1XPWTVncTxnhiGtN9DuUOzQOZKNRcLCdsTtHZtvXhHWt2wuv+LJ+yco9QPcwqEtkHoIOwiGymuOc83MJR4cGey5Zbdr2aUeVZ+w6ye8uul5+WrFk/cTy+stf/sXf8vq4pe8/3jCbnPL/WpJjoHbqzf8zV/+OW+u17htzymw6tZMz495eP4+P/zhd/n+n/1H6uMJuAv4/I5vXv+SNyc1z1/+I2+/eYPqNMubzE9/9gU//JPf5+jROf3rr9h4T7KGNgSwht/7gx/w+Ml7PHjvKZvrt8wmhovbK37xy7/Ax8yzRx/wRz/4PX7yB9/DrK75u7/+33l7+5LL5SUxwemDx1TuAZfLFqUrPvrBH/LBe98lr3v623ter+7o2g1uGrl88SV3asPm/pLee67uPX2/4O6uxjWJr1/+Nx7MGybxjt31r1BXNfPvK/AZYxxffPkalOHh0RFuDo+++x6tWZGna3LaotQptn6AaUQymZKYh8dSv9gsotWIG/celDTxIWcCmk0Hby9bXr2+51HOnD+YyBo09jsD4DRw5vIeKBi/LCwYYX6A0orpvGJiTgTQQGPrhgePn5ItWFtxfOSYTucSJKEKvWrf7+4Lw7z/95QSfecBxWJxRIqeDz/4gMVijrGWzXrDatNxHnt88kQmDGKawVvqW6jTv3oNTVoqdV7MmvssypiOzCrD0nsuLy94/eol1xdv2W03xBRJWhhI5hDFGupfJXYtOo/jOVnLQ4dKHTltiWFDClty8OicqArrKpXTY4wMCGsnypG6mZCso9cOM5mJT3ISQCkxDEMFIMsaskHkZBFSiETvCW0H0xqI5NChTf07z5F8LvYARD4EwIamec8SEuwmEXMcU01DivjBAkZD8Tth9FM7aMhTkVkOXxdMqtQdAwhmLAYtYXlJvXOgKlMsE+RvCe2TPzlnkg/0KtNvt6gUUCXkIsdYehfxuRsCq3zwMljTeRx2AsXWJ461/ACIkymSTkFFZN8Wr/QxyIJB5VP65CTDZgv4DBSgMiXKMAmpDdT+Ygzeb7ELB4CJQeeMjkJ40LHIRw8v5Pj+AGkkSgwg6aEaXC5zHk33R5xl7F+Ge2Agy5RTPdaAeybaO3XvAFQqqf3I+xC9/fsLCBlKcJcuSqVh4ZD6W8Iax77hAIA7HIQf9lqHx5CVjPAOQyMEE90zQ4X5xt4f8IDtpnPCAor9vfttoDAVoNAO/Y1JI3g43EuHx2bMuwEfIXoGZt3wvuOxgSgLMwfHtQcQD/+Mn2kA34xBaRnu5PwukeQQMOt7/875oJwLwZKG4fzhsxvHz26MkUHrwXGrg3tm+F3De8nvYDzPcl5+Sw/9zrmQ+zdGUXPGlEWaqqU2zgPiPG4JQ5erx5p8eDQVg/dkfve8/ZbX7wDoxFclFw+BIdFi9BIo1MAQPDEHuXmzxkePDzsMvRjU5ox2GZ1qTE5CrbWWvlAeUzaCuZu6mFI3WOPZxYhyU2bTmaTYTT2h98QkpuR1U6F1KqDejq5vySlgjUZXjjZIY+X7TpDTJPK3jV8S1x33257dtsU4SVZLUZKKjFFkbUV+pjXxYLEXM8CMyZAxGEAr8dZSOYofVhLKtG4qdiHhQyBnCCmSsy401IAzGp01TqAFjBIAz1YGE8Xc3aqE1RZXkhTNgDLHiHLCCjJK7xFsrQtDpSyAI7WMYWYwLvg5REzjmDYNRkMwHb5vqZTI3Kh1mUwpXN2QMvSxJ/hYJj+Z3Hui78TnwYj/Whc9fbTlQYgiqxqmQTmKkbxSuKoqU5lEyCIpNQXRDilhy1QgIZ8jF/wzGpFCCvAlhv0KAeCiGI+NIIcq4F8KAZUi8lwJ64s0TCEoAEraL4KUlL6CAYofRWmGBePEFtBJZSlSvBe6s9EabSO6rgooI9TuVJ6bgCFkMc1sKkddVfQ+igdekZzFGLFVLc9fWUCSGoDJ4st3sNAdLj6DrHHwBssZgg/orDHKE7U8EzFFrNVMJ9XonzjUsSkKgy0bUMVbSaLq9QhgZi0sJqWNbKCp+PkZTbaKrQ8yXVeOiBqfp4Haz1B4mTJhKNMzCfOI4z07MM5MYYmlshbJc1DYYTnIc2DKdS/g1gC2Da9xWjKY6iPXZAAKtVIjhdlqAYrJuYQdyD3nrAQ9CNAiUgEUxCSebILbi3R9mOwNtPJh4R8ncVqL18oAwg1rfClapLMpE/Iy/RoAX6ULE88O7ztMmIdN3OKQEAkTrDAUfcTHRCaAthIeUGTEMiFSpBDoY6TtA7bv2PQdfbtDGYjBM59OOT8/ZTGfyb0WIjkFXF1zdLKgmkw56Xq6Tc/t3TX37R3r1QZ8ha9qYuvFCNuAKqAjZSghAzhT0mArkV3mJOsqMpRBC4ik0AIeuapMiEXO2G22bFdLVPLCXDWK2imqymAMxBjYpchue4+zSgBlVxO1ZRcyMUf66Fmtt1I8Z1W8+zR13QjrtxTAoyxB5zJxU9zd3dBUjouXL4ih5+b2hl3f0cxELuv7wKSqCf2Ou9tLZtMFk+kUrTLbyrC+uyX2O5rKoWKQcKUYxScjRAkDMmZkylqtZI8u67+c0D0YnpKsp5EEtiRHF+b0uHaUZ04pWwJErATExCxBEoJ0Yl1FyrDebtlsNsxmc3a7Hcv7OyrnOFrMUSlgtHhMfvDRR6gcefP6JZeX10Rg03UsNztiMsynCxbzI/q2Y71c083mzCYzFtayu37L3eaOmLfYUNN2iW67pe92VDbx4PSYZ88e8ejhCZWDtLsjN5BtBXoHtgftUdoVglsen6UY1oTNjmpRY6oSurTp2ezu0WpNU2tUNOxWiaruaO+2rC7W0Fp2zZp6FnDTGl1DTC2+W+NDj64szmZ0MmhX4Xcd2+UF1lqOFkdYmwm7Nf1qSe46rBLm5rSaEZojkp5wt73l4u2SdmvpQ+Z2ecPN7Q3WHfPkcc3x0QJna7rdSgZtesH6pgN7QX3qaGPGK890PmFyPEOpqvjXKlCR3K2Iq9e0d89ZXnxDu75hvmh47+OPmM1nZclx6LBEtXd0m1vC8oYHk4rT730inp5mwcQa/HZFVR+jTC3sdmAxP+b3/uCHbKZz3vzj37G8u+V4kci7LWrV4Tc9WQXstAY1IbqG6uSc86PIw48f09eK7uevWF5d07aBTbfh7c0L/u7v/pwvfvPXrK7ecvnVZzxetFRPZjx8cM6zDz/hovuSqzbRLhOoOZo7ar3Du46kV1SLwPmzKcdnlTCaZhXqfMrty5dc3azZ3P4a1e84P/mYxj3mV9+85lVc4SaW62VHyppmUrP1kYv1Gn084/TxCRWR1cZz9WrHP376gtcXLd/70R/zpz/5T/zeBx/w3vkZpt9yeu744vWv+Ouf/ncuV1s++sMfcHr2iL/5+0/xEWK94u3mn4nrFXF5z/Lumum84en5I8wuodyci8tbLm92dL5iu+0gZaa7NwIg0nJ2ZDB2x9dBcb/LVNMnTN/7hG3lyLNnnD85IWI4fmKY2xlRGaxRxO6esAmYqsFMJvt9KElomiIAEYvYWugsCexKGSya9VXLP31+zW82PdSOZjplpeAIcAdbsKgNyj6Yh9pIj42NgDOxoHYGFFgnA60hcVErTVVVFNWasNpMaeiyyJQG6VZm3zgWOyXQkHzi7vaWF9+84O3bC0JtOVkccbRYMK1rZtMJMxQSspXG45cxfhl2l77sW9jf+CoVjjDrFeyy4jrCVkFQmfu+5eJ2ycsXz3n94jnLmxu6tqX3vZDb32n2hnpJ+g6rK4wqAWWyoJP6LaFbY7VnMpEBdzVZEIigHdq6UQVijRixR9+DkYCzoA27pEjaEqOwXrQ1e4Ds4NMd4BzkGAl9X1QiCuFYDp6c/75Ia/hlqtTGQ1BT0c0wgLhSQ1ECBQJ9DMKuMUVqpoRRJ99cyBDWiA/tcKi5gCXs5/p7T7wyzhpqJ6ULcy0Xj7Jy8QvHS5d6VABTabpDSMSuJyRN6no0UWo1lcdzN9SwIHI7lfI4hLZGrEVSLOFtMRa8S2rQoQOJRcZH+R6dYvHg0mPv4L0n+gDZj328+PkidXqOxIH8kPd3rzCpGPsW+Rmp0YwSKxiVhe1HOmQW7u+H4R+kXcuj/6CA9Kb0LXJdUwLsMGBktOvJB7/u2/DJAEQONbUa8JFvgUYohSpEjFzqRmUMRrmidtO4WpheA/khpwP/eA0pB2zlxAuxSGy1KdJMo8dAgdIpSJ8b03hP+DEYcqizeKcPcCVVOLHvDwaSgFVGlHUZUdkUObPRg0+6xsc4fn8q0ukY92QT8cgb7u39czD8PfysUvvrP17BLGFv6uBnVD4AIZEUXuk5C5gcYvnsAwAu/dzQ7wzHMPTagh+9C7IJ8UaP4SccnJsBqBtAxgGEHJ/b8pwNoGBI7/bIw/3624CxQw/AQfEkxIFU+jl5ZrSW8+8aVUrKwePkgJFKFHJK8TdEK7IqnnbFeiF+W8b/rdfvWD3VCAyOj3CG4bEReWPx3wKck4vQ+sB2u0VnkYFoY8ipJ6Y1u12PMRVKWQFzQsR7YQeZViSzEVmTkges+KfUykooQlZj2mVTVRgtZGevEk4JCb2qLKaZsEmObduRNoUmaypUTrQ+0XuRCoUQShRwIgc1FgzaSFJTOpi0hDJlGW6DHMUnYqD96nJqVDm+qooEIwVHSJkk4ldhG2rxkDOJcQPUAyEs2yIpliwsqzRWiSZb6MXDcRzekKlczoFGqdDWgjZjumeKwjDMKZBij82ZroVWaSonSZsyIXAYZ1AWvBd6cDOfClWz3WJ9pNJWANuwp6EnbfApEDuPj2FvBopsJD702GAKq04RwrBAUORZjJuRHuCanAo1X87ZwJrTSlE5J8m/zo5a/mGj1ciUJ2cBEHzfkbwnWCNsxxypnQWV8T4LcLvZlAso2+liMpPJoTJYWzwfykJgUcLIHMwTMiNAJLJK8euLJSV0SJBNShGJxKxR2RZ/B40xwiId6NxDkZDHTajo+rWW4ulgKjj8c4wZ7z3Be7z3w6mUDafElMeQyFokhH2IRBKm1+xsS06RXgmDLhpoKjE57nPCp0QMvRRGKRZ2nyaWTQetwRmhGVqL1Zrl/UrSZI2iz0K/jglCFMBg8CvIZfPQVkugRaHtO2uLeapQzVORJujCanNOJNbGWpHG5r2MQcIGCiO3+DLEGIlBjj1m2TwzCauGSU+W4s5o2dhSecjLRiNTrdIXFJmtHRqBKIt3ZQ25Fnmg1a0wzAYj+QHiLdfWGBl+aMM7G84Q3JCHynVcf/f3hhQTadwsjdHlXs0Hm0wai51hejP+7kwxiGVv9JvlPIfk6bwnZ03rO3rf46Nnt1mjYqCZTXBkaq3RTrNNnt572bRUwpjE8cmc+rxGV4nd5Zrd7Y7trmVmFqgsUhxdppdKK1Qqk0mj0S6TK0PGkbMu672wXVECTGZtkOxWRfAChibf49sd/XZFt9tiTSn6U0ZFLewqp4pkSRjPfS/hGVnv8NkQlRN2rJbWiiwr7JBkm1MUH58s/iHWlHToGIpsFELf08ZA2G4k5MBLUtl2swUUlauYzmtqa4l9R5szwe9Yre5xyxsubm/Z9b0w+3Iap4Eg+1jMma73wuq0Fm0LNzrFUrgW9mTeJ2WHGMc0dAGT494qYGgEFfsC0BgChpgUKIt2NQSRlmgjz51SmuVySd+1qJw4fvwYZzVr39P3PU3T8PDsTLwiq4qryzdsNy13918zu75ltfE8efY+JkvQz931HX614XQx46Onj5mcntOtt/is6LY7VpuOzXaF9x21NVxfX/HZZwGVNxgTyCYyaVf0GZStmC9OqespxtWYugbrAEPfeu5uLum2O076BfOpJW89aSOMTOcs1taEXtOvd2yuV9zdrQktqKmlX/WEzmM2K2ytsHXC2oSbVOimxhjIuyVKVTitmNpADC27ux1Nv6VyFbpvae/u2fpEYxv8tid0kHqFZYJJios3d6x3LUkF8fYKgcvLe2JUnBzNUHrC0eKE8/Nn3C/XLC+uOUoNUWluVze0i8zj5pSqqRhnt7mH7g7dXTIzK/SkZ+sTM6tx3tNfXxCXN7hJQ1Vr0Dt0XLOYKE7Pn5CNEYsGvcBUM2xjStlRFAApYYxmcXrGFFCrW/rVc7TesL28ZT59j8rNyfUcqilQ05malWp4Njc0j85ZrW65v74ldSLT69oNl5cb2v/3M8L2nrhTnE5PmD21rO6XfPDhJ+zMQz678by5vGWyeB9jpnz/O5H/8DDx9z//e175npdv3/D5Z79gfjxlNj/hy198zoXvePjJJ1x++Sm22/H4aMr3vvNHnD7+MT/99c/46a//ilffXJOCzEuS36Fsxer+lvvbt8QnR6xWO37xD5/y03/4nPnpB/xP/+P/zLMfPOF+ecH/+Zf/K3/24x/zk5/8hI/P/hN87miPptjZEZ/84Ps8ePiAH/zZT7jZtOhKsd3ektdrzKrl+Wdf0W46nj18j3az5fa+o+tnxF6hOsXMWmLuUf0Nx9Zz/mDO0dmcmBfsukinZkyOzvj4939Emh+R+zt8Vpw8mIHaoEKLUpHgI2Gzpr2P1JNHzKyWQM0QIHRoByCsrlyV4VeMhF6CgiyOft3y6vVrvrKKo8VjurrGK1GMDdEp7+BNQOlW5d7JBQFRAzi3/0Zp6KMMwlUS01iEYYopgEkGpSxixg3gGDY+aaLNiCopwHvP1fUtVze3GGfBGF6/fs2X8xOev3iOLV6uKXty8daVw1cFWtiDVWNLq/ZfUFBUHwNzSNMBywRLn/Gp5/ruli++es7XX3zO229esL3fEDsIoSvBbIoByOCgFhATeyuD0ywAXS6sYGLL40dHNO99zGZzxGa3oY9RnlljQQlBQLzHMjFUAiJkhQ8DcyWDlsGjh4PPKfVXLsMpSoiezprV3T3b5T0z4wCN1g0y3TX8/3oVpvugAiAXgDAPfU65ZVIutZzUoQOLKKZIHzwmDSb0iMTVi3XMmFTKoMIpYIi1aFXY6Glg8Em4lOyNe1ABpeT3FDbhKCcV42WRfIaE1iJfdVqLjRAU4EOJLHX4uSTAnS7sRqPLtRzZQrkwwPY3WUpRHIlSfnf4rPafLRUQU8LSIskiA+jxeklNLao1uR9SiOONqzLoLF6F1gj9IedUwMW9B5361nNweJ7kORwIDyNMfqD+2P89XNucB3seyAMQpOR5SwfvI8DJQbjFQC4Y3/6Qcbm3hRL5s6NCi1pGyV6PKYCgKhY5CZQyGJVJOTAkrKZBLg2FCCJg6LcH8AOcrrXUPWO9njMx5XePjwKEHXy+kSEmD7YoX/ReJjsw94Z/P/zcwqTLMHq9qbEPHF7vSDqLfHToTwYJ9/BZgvfvgGOKd6XF4v+WDv4cfK+inB+R+lZVNQ4IDvvX4e+BBaiUwhoJjpQ6No5AXhw6ofL+toSXjedzYJey75vfBaB599yXnx1JMCPjbt9DZWKxA0qjH/kYRpIDY4rveH4Tgz3RAI5CWXMO1vT8rev3b71+Z4pr+Qd5iwM6q9zQ0lBWlcMoTV0X+U9OkpqKE/8zbfAJwqYn5Q6tKrStSgOR8akY4NMJkq1FNrSYNJi+R202tG1HjmUqkAT86ZuKyiLGmYiBrDagiyQnZU1QhlTMsFECnOnCvEINdM+yYWkxhNdkUgoUqHAUr1ImJkmJrDGUR1GMIYMsKhRdJJB8J8MBpcQ0WykxCC4sKd8FkpZzIGaUSqaEgQJgaEH65UqjsvhkDX57UB5WEC8AJci/LiCRUG7UwcoJlPQZrRR15VBK0fc9pMJQKs2rD5J81cZigOpj8Y1TxWDTUGlZhLMpaSgadFTi6KY11nqE0CETnzFZpejUvfckhJ00AG8qy3RKG4P4umV8CHJ/KLnhB8DFWkuMkcqJOb4rnhO2MIlyinI+bIYgBVJKieB7+t4Ivb9Ujd4HupJOqTQ448g+YLXFOjcCc1rlMn00ZYEbpjXF5NPokSoefSDlKEaqedhiyqKLInhP33f4INP5mBUhBipb4aq6GMLvI9eV2k8hhE7vvwXQFSCqLJZaCWystaGqGpn4BU/MUphYV4DHqhItPlLMZhnpoZ0q7EdV2H/yDGhjMVVN2gjQFjISrmEMgUzIck1NLXJalJUUSGNJaAk5oHgkpGGRFNZmlRVVZUdgyxbASqmhHJBJmDEaYy3OVVS2wmhFzlEAlOzHTXlgRg4A3VBYUoxbM4Pfxp5WPzyvKpQpSC4JbmUqx1Cgai2Fove0fScpQEq+1nsJUJCiUza1VBqSwUdvkJceXtfhec5GU/DIdyaIY9GiJWFoT18f7gv20gjkWRmMbIeNUA+FluCRci6H2PayntnCmFJEamdY1DMaozE5cjyd0KjI3dtvODpaMKsqpq6mmddEv+X67S3T2ZxHDx7z8GTO6fn38f1TzmrH6WTK5csLblcdut2IZKI87yUUSVLRYpSk5qRQKaKSL54cFCanSIYE8I1Fmt9C6FGpx5DkeDUokzEGHAlCT0yyNkwnldzbxecyBk/QGYXFGUMytjAkBQikFOeUQlIrkRbHIAWbPOOKFCLC9xMZa103qGiLnELRVAKszSYN/bZjt1pKE+kss7ig29zT+0CLJoQk0/UifTdK9q+27ZhPG0xdY4rPRk77ayz9hoDTIUYCkWRKUVs8a4aCE/YFG4piyluaoQy2Etn6ru1R2oh8HMV8vsBaw2p5j9WK1XrFerXE9x2TSU2/m3B0fMJ8NmU6nXB+/oCvn3/N1y+e013dEZOj62SAcnZ0ytXbt1xfXhO3O47rmpnT2GZKDIngZUrrnMU6S9t3vL28IoUVRgVyDlxcX6Eqy6bvqCYNH334IWcnp8wXCxYnp0Rj2Gw7bm6WvHl9QU5wMlswrTRxe4dNW44nYE+P0HaOyxrNHS+/+Zqvv35ZCsAaaxqOTqaQA37XgunRLjNdNDij0MqR+46wXaLQzBvNdtVz9/aGrppw9la+ZkcAACAASURBVOARVdbUOeKD7A955+lWO9pNT1NNeHC6YNe+AiJnZwuwx1zftFzfvWXb3dP5E6Z1QzOp6foNvl8T1Qq8F+bvbssmZFZvj1ikluroRICNcI+KS5xe0eglrvEsVENTT7AxsL26YNVvUS6zOJ7QOEXfrnDTmsnZlBASftOjqyzT4xowvdC7dak5VGa3WZPbDcdnJ7z/nY/ZLF+xaTOub1D1MUnVZDPB2RnJzdnpCVe7Lfe3ay7fXvPVixs2qx0memq9JPQt29uOOmWOmveYNXPm04rjoyNuty2/ennBi9sdd32NtufMTh7x4cc13z1b88vPfskUzcVFz1/9zT/T5Cmnkxl/9dN/oZ2espjN+fLTG8J1ReMmLKan/Of/8l/45IcfsPtffsE/XH2FrhtizoSuxSYNb2o2v/oZ+Shyd3XD55/+LbdvOj567wf86Hsfcf605pf+Jb/WK1LdoY8s9fFTninN0fd+H7eQkIzjozPc8Tm/+eozQtTMZg9Zthfc3L1mdXfEcrkm9XeovOXy9g1JJT48eYTZwuRozumHD0G3PDqbs+07blY7Hhw9xS/h8v6eoDV3ly/YfrXmxcvP2PzoI/7zf/2vmK4nrO7pwoY2Z9zkFDN9hHGnEh5DZre85uqLn3NiGo7rB6SHZ+weT8R2JHYQeiyqeOn1uGaLSYl+PSVuIhY7rC5DBzHsYuMgfM84GGqjSIpeSHWmDKB1QpVIBfCgPKRO/iBAlcKArsnJFPZ8DWiycsUegf1RZEW79Szv7slK0TQTVm3L1cUVbxZvuLq+YrfbsF6vOLcJdJReJYsX6MDs2zP//q2XAIcWCe3KStNnuN5FLnYb1ttrXr96wVe/+YKXX33N5u0lu/tALCqjUXL2zu8sKZGoYQ4tk1bvyX3PbnVH3y158uSUjz/8iPv7S55/84rNtmXb9oQ02M56AeNyoq4qCD0hZtmLGQY8BoNY/QxDVEaVSamlYkIl6LuOT3/2T5jQ8ezhI56cP+J73/8DzMxCdr/tBJVLPiBnxQKm9Cs5s697gUP+3rC/Da+YRKXU9uX+SBkfwzvfIwqRolo4sI4xgxw2F4uHmArGW5Q7pZZXyH5J6cekJ5P6VSUgKXLMosBSlto4mkoTQ2n+41B7KfZJjwWcKmCk9wKHxrgf6kWZsJOD+F5nEiHtgQSr9Hhe9lJgPVq9GKupBlUHAhJoVUgfRrxmowoSZJHzwYBXkZQWK6CUiO9iceUalP8dAGUDMK20fM49hqDLkDiN1zQPSPZwK5NHe6fhnhj+Hq7+iEgcgExaDzYjw7cfgOdFko0SNq02ZThe+pnO9yUYoNRMaQBODVrBdFKPPcQh42x4b+fcv5KTwh5kS2qvoIE9hDL+9/J7Q95/phG4yoOv//5zqaK6SQfvJUxLNQJ0qgwlvg2AHZ4bYbYlfHsIwQ/g0nBuk6zFh/jBAKeOz60ZQfsYE4N/pPy3TI6pYACDFLf6V599APcGH71BSuycKwDdHgDNUb9znmJkJCkIQ5ZxjRoGI//WSx3cW3sAkhHQHZiOShUEKB+Y7TGEagy/LQmuogYp+nCjCuAJBw/PgNEVyFn/luMbXr+bfzy+2b/ei3wQgEWXaZNzTowRvUHFSrwLXIXWjhQDMfXEkLDGYWxFbR0oTVIaHwNtL0CM+EZBiIG2j0TfyWcrAJ1KQh1eGU1tNVXtMEYhPisKZXfgOnpbse0iofWk6IXJhHiqpSym+MqnUa5ktMKZYfIRMIhsLiHHGBWAKdTnApKoAayUgIecE1kFtBLfO1t81QKKPhXjz5iJShFKNHVI4geVyjRFOQdJo3yUtLgskjwVc4kcNiXNqPgFKTEmVcZgnME6g4mxoLhCsTTGlBAMU0zoPdOqEk03AjrVTtD+lAI+BPqEmMSmTLvakpQw08ShNdEoix02PmOEbITQPytncK7Qw8vDvmeBiUw65ojKeo9WF9DDapG45cKLDikRQhwXncGDoe/9OCnKZfIlkwtVFsyMsxatHKmqCcGjlWLaVDRNjdHQNDVVJY9B30vyX1M7JvWEFOMYVJB8T9d1BN/ji2ci7BfVmGVjHBhvkYyua7TWOJVwRgw3ZSERvwNdIqkHWSpk2taMU8TDzQj2U8CBKXY4udnr3PfTmeH7RAIt5ziECCbhdKGaa4W1FbaqcArxV9QC8iorJYSKMiHI2mBswuaKKmq0DePnTUCIieV6ze52SaNrUhbPqUhks+vIJrNtO3a7DmMN08YJqBiGAAW1T/FFpoUZAeCrYlxvrCX1pWAejPCNwZmqAFQB3wugElWRII/nhBIoItLQavBuzIzmo2W3GacfA7tPfN4kdS4PBVUfUSqPfiFKSVqwJxeQjPH9cx6mMfsNPBd5pyzqal9gDUtuQdwOluCxANMaMft9B5zLpBgIqcTbuIoYk3gGBk+IobDDyj2W0ii5kJVCNhmjFbWxhJDIvmPeOJ6dP+BsPmNWO05mE6LvuLx4w9mDM+qmxmcJlNh2HW2Y8P4HH/Hh0w+p6ormqGK7u2OSIzNt+Ln6OdvPviT7jiiWYOSkyCHjfcsmt/QkVLaopFDF51SNFHNDVgbxuy6JtjlhiVQaGmepK8u8VrIOVQ22mWLchKQVffD4PolfTN+hjcXVFcmIpD0hYTFmuF+GIUceJDgDC1HOlXYGpcQnESUSB6MMVg+SYwlPUlGYbc4YovdUC4NtKnYx0nYtyUdIDSoFiNJEhZjJxuJ1BmvG+2kIfaE0HSgZZgzroDRRYjkQUpIEz7IGDR5+EpqTRo/EwbNmKM6yKtNzNCGKNFobh3UO7z27tuX4aEHTNDSVI4TA8u4WozJNXRNz4vTBOQ/Pz7i+tGitxuf14vKa2+tblsstjx495vj4lJu3V3S7nlWIvHjxkuNFg1aZajYl9AHjPfWkBjWna8E2BjeZ/n+MvVmzJFeWnfedyd1jvGMOGAtAoQtd1SObrSaNZpJJDzKj+Av0N/UL9CCjiVRTbLGnKgCFxJDznW9MPpxJD/t4xE0UqpoBu4bMvBEe7sePn7P32muvxZAM37+44Lff/lDWXcuTp09x2dHebTk5O2G3a7nfbHjx6oLr23vuVxuImlnd8OhoztTCRAfSVNOoIxo3ZbMduL7e8fLNPa/f3jGbzalvt6SsGXxPVWdy3oEeqGqoSNSuhj7g12v69U704JRoLVYx09/ecd9HlvMjFtYSnJb7bhSnizmxyUzPHtFMpmSdmWwaposK7TQxW3bdPT523K1v6IaG7W7H27eXaKU4ObGErqeuLI+WFfe+5+btM7QOVNMIMcDuCswOxQaTtrTbW7b3O84efcj06SPyENi+vmG1WpG6ijCpiLEnxUA9meBDoO0GjFf0vaLKItMANSRhIOzaHS+//x42W47JBAJH5+fM3/8MFu+x9aBcw9zN6JPGzRacffARw+Ybvnv+mhcvLvj2+R0311tyytQ6MG0MU1dzPp/z6NHnTKcf8NF55OPPjrgLlvRsS68n7LLneDJnkxMX99f0S5EjOZ2eUh0fc3O74e//7tew2fLtqmX++RGvX17w8qs1dTjDuprV+o4vf/OfGeKdaK+FQLZe4qqUYRig3dHfXvHm2Zf89ptnTKzif/2f/xc2XcPN6zecPn3CF3/yR5x/cM7R4kSe0+kpi4/PmdtIVC0xRHZt4ofvnvPDs+/oB8WbH+64/Po1dpfJqcLV73GzukHFNcdHDR9+dMYyWvzFlp//xed8/Df/il27Zrmcs42Zq3XkbPI+u9ct5vmX3Po3vHz+W26eP8fOAq75kM36lpffXvP6u2/ZttdsUos7PUepp0zdZ8yPHjOkgburZ9x8/w88NUf8/PxXnP/Zn8DJDGUVykZsjNguo1Um2QHyLXXrccFRb3sqbzFOkpRDNFOS9gyjEPyobZBzJvie3ndkrTDOoZzDagUE+mFNv7sjxY4ce6LvyICPicEnlKowpmY+P+Ho5DFKN1B6WwpvSorDOdO3PavVitV2y/1qhbaOn330MfPlgrZrub27wVSO45MFk0lVdI3L+RcAg/EyHuzPe6YgwL6lUIrzrY9c3qx5dvWWq8vvePXDt9y+uWB9eUVYrRjWkWwVlVKFufQj7SZk6MYkdF/B84HYtaxurri7ueDR4485f3SEtj2XN9f0IaKCR0WJT0ZyRY6BSNwXBp3WBC2tnT5HIQ7YMSSScUs5PfiRnGS12vAf/6//yK//7r/ywZNz/u2//mvef/yExXRegNI/8BqLQmV8VZkfoyTOAeA5vA5dA+LabksiP0q75JSlWFrmmfAB1KHQO96/McZLqRTt0j6W1krIDQHR8tZ5BAY4nFMuMKxUwwT9LB0zTikqY6XpLUUiUWLEPF6nvEZwwnu/LyLvgQvA55L8x4PGuUdJzrM3kMuY4nwsxgWWpplglcPagDEimi9SJNKeqqyMoTNG2ve0FummAihhSvcYBYcscyAWcountOGOA/Eg/xhrmaaAkeJCqohqHK9SqC6xmxxhlLcpLLpyv2KMJKWkWKwlj9Fa7xl1e+22d0CkMU5mr1UXC5iYUiSqhGTbQTTYigkkBdhSWnhiSh3AHmCv9Qbs75W1dg847efTOIdK/PcwT8ul22z8/d79Vb+rZZdLzr8vKhRESGn9znvHY4wg3XhsH6QF/GGX1WFcBH4e88PDo3gAvvYmCipJ3vSgRfWnclOpXctEeZiTjmjVgcV2OIfxGX6HSVjOJ+pCPviJ5/6hFl8IYQ/ojUSIdwDa37N+7Mf5wb9R5t/Dz4lDci4eBcKMkxbb9A5zYrxUkdopre+/+9X7YsNeC/En3vPw9Yc16IwpLAoR3445IaYHcnStDN57+mFAVZoheKnOF5Q3ZofSjegYZNFGUzqWzUCAEnEZtRLwu47BewH5dKZ2lhQG4tATYyjI9oPNIkFIGpOy6FplQ8yJ6COxa/E2MIRIGHpi3zEKDOasGBIFIHkX8aa0PlVaoVKLRqN0RdLSbidulIaQM85WJDJDYb+lrDFENBabAzp5GWAtzLaxMoQaxV6lnhCVAFtqFDxLcq0jCqyL2EYi4GOk0gmVNCFJWxcobHmwQggEJawSYsJpi02qHDuJBqDV0o6W076VFIRVVdeV3NN2S9aGSdOAUmzaHd4P1KbCagpjRUACbYqtdk4MKdCFQJ88fe8RMw9d6lFCKdZazt06J62gypSJK3oXDy2QR9FWHSUokIoXZALOQdd1B8HQwnMOKVHXNSD6Ds46dKUhS7BQO0NdO2IYyiYl80AVE4JJU1FXDuemaMSSPiVHGCrC4KQSo8H3AW003kd2XUcoIGmMkU3boYcKZx3BZHqy6LcYYXSGrPHKlfawTFM1pQVBFleFRhu3T8Blw86lyiYLWyoL2mFRzXsqsACXcl9DCGIKYZ3EximhlCVn8D7IMScVVV1jVaLKCmMzPnqCyoXxVjZcLQw6Z8WcJaaMsRUZGLwnKYWPEUJPiND7QFIGnxIpBy4uL1EKnjw5Z5hOaCaOmNLeWWvU1hvp2aLJEUn6ILhprMWHgeA9fdfhq4qq0oxOqsY6AReSlrF8EJQ558SYgLGSaDAkohY9SGOE3RpCEAfPlEg674MyCSRlvvV9JwBwYYA9BEpB7okfpN3PWgkm42631zOUIFCc8bRW8jwV6vxBtFa9o1shRy6gcAkErTWokngYI9oredQL5UEQkw+bnLi+HiqZIxVbU4R6UoIYiN5zNGv4+PEZH7/3lKNpw9G0obaa7fYjMokhBEJOuNkUN51RN1MWy2MeHz3i5OQU3cB6c43f3JF2HVNrmDpHbQ1p17FrPSlq8hDxwdMbjyeioiEOgdqMunuR4JNQ5a20X1NWR1dcTU8WExyJRWM4m1UcLyYoV6NsA6Zi0w+0XUfXO9p2h3E12hkR5u49wWe8WHDvq6wqj/dfFUKyFA9SzgIcqmKoFIuzedGukYkwgnLC+NxXJsNA326pjWU6qVBE+hjo2g3J96iU8X0vjAebyCngtTAbjVaEIXF/r2maivlsKkDxMDB0AyDaITEWVqQZnf5UcaUVNrTJh+1fKV3aYSPaSQLQD6kAfzD0QarL5doF21f7hEK0N0coOdO2LVfX11zf3PLRRx/y+MlTKmdpmoaj4xPOL695+eotF1c3vHr5WopYMeJsRYie27t72n6Dqw1Lo8BUJKMIKuNzBuNoQ+Tybkc/BGoLkNBW4yrHdv2auzct88mE+XKOmzasuy1d8BhXsWs7NvcrTE5szo45ndVMTGDbgA4nhKHn9W3Hb5/f8PLyHq8qgnJ0QbHaDWRWnJw0aBVod3c4m5jP5qigiKstYdUSu0S7DkTfM5tOUNGwvd6yCvekRwOL+UIKjmisAqM6MAmGa7A9zdSiWkvnI9O6YrFccrTLvL28oMqW5fExoY/s2p5KW7abnmt1z/nZEYvlhG0fuNvd0J9OIGh2qwvWb5/TOIUaWobVFp0ghZ7N/RuqiQHb4LRBB8fN6w1vui1aZxbLObdv13gS1WTCbAmmkuIPswomc9rNmm51z/36nm59y9JO2K02vHn5NWcnR5x8McPMzlBZ9G0p3QtPnp7xZ3/2K7oLGPyaZ99dcrfSdINj2ljmzQJdaxZVZjFpmE+X6CqzHW4YMHz8+Z/z+d2E//PvvsH4DUGv2bYX3OxaLlfg6p5fffqY2flT/v7X/8SX3z0j7HZssIS3r9hd9AxdwpSeiaTveP76/+afv/mG7+/vSJNTUjSYAEpZOh24jBX/6dkrLvodCzdhtjxnsfyIenHON2++Ql2s+Msv/pLBzbn87i2VViwmRxjnQGt0DthqIIQrju2U9+ePabsdr9a/5ubFP3LqTvm3/9N/4NFnX/D1P/8/PPtv33G+qPjTXzymzpb6T8/44C/+hubJR5zqKYaKI9PwKDtoLcOJ5/EvP+Plxd/x3ddf8ejsKbrZkHXH1etXXD9LfPfsDZt4w1245374in4baTjB1k8IzTHu2JJZExcT/vKPP2b2/jlKDeTYkrKH9Q7/Ysu671nNLWfLOcr16BhZtnfU/QxrIyl7VGm5UihplTcSaxM8OQdZN6zMhxgCWYOrpQ0NZK/L2WL0HJUaMgGtPUrBrDYM3hNzwjUNzXRGxuODmKiNupw5g7ZKJO5QDP3AZrfl9v6OJ0/e49NPP6Udelrfo7XiaDnj/HTBtLb7GFkBKqt9Ufl3RbjGTVraIXNpfUxa0e56nn3zA99fX/DyxdfcXb6mv7sn3t2jNltUrzAECEH2n+oQm+9fI5sGSq4QQSv8esXN9SXOGnbdBh89UUWCkuKMqyuSF0a91rYMtcH7nqay+JjpugFMRlsx4hpCINlGGJOA0uJYmscij/Dt8UPi9maF36zRYWD181vRJ4peWg/+UBKq1VgTLT/lPmlNpV2RQfLowqhBCXgXvIAP1lrq0kaYorTEpjI2+/axByBBLG72SimsNvtuilEPeGyTSyqRVCSPoA3ChtdKk0MoxTqRe0jaEzLleNIarnOm27X7YlmOgQR7QGd/+QVg+R1NLPWgRS5niVOLSV3tHE1VE0PAZfax8iiJU9c1zKbk2Ms560SKYLTfgxepxNzBeykml+8au29GFDEi5OhUWFBWgRvjTaMg6wPgu5+fBUzaI9e6MIxAGFcFlDeS8+5N0BBwLuVUOqskjk1FkkXnjC652t5M5KcevX0+FA9dAmPca4XQUmmNVoZKOdGaG68hFeaU6ELtW82F3Wn2phcyb0XrUIAo4eiSYTRTEXDnXcDoIftuBNaAvRbZw9+llHC6GAiOlfwhFNKCETmwvame5Cxd1x0IHzkTCpNUAKSiza7fPR/54hLPjrk3aq8pqzUPgLQft+e+qw835qpycGmTHkG9n2ztfXD+I/B2AAPH79cHrb6gHnyXKrI+8m8xxT0AvP/hkPOA3N9Rw24EQcfjPRwTIeCMAHhCj8h6PtwjXc5Zl2c0J2G4jo7QWhXy0Th/9s90KuuHQui3v//1BwE6OeDY/sXhQSzIdDf0bHYttVMMXvTh0IohJlofSHhsnaidQ2XRQDMq44g4NJVKaOXRRLQKaOOpUhATCq2wOhN0Iihx5EMLe0wVRlAqoo2xWHNqI7TUWJLRyWTKRINOiTDsSH50rJH2qPXO0w6h3KCi+VReRkEeWnGAM4mkavmtEt0tRRGIzGNdQ9hQuiSMKcGESrSTotBULYX1YsUdUilhN/goVObDpBJKvnUGHRUmObQV5l4u7cAqQc6FfkzpDB/xvSgLi06yaUUtunOkiNFZKNw6M7QCUNnC1PfBk42SRE1pnGswtkaYIg6nwRhHbSzaRPIQcMUlM+dEDqG0WQkjxhUtMoVoMtR1zXQ6xdROtK2GQQZKq71Yq2ADheaqRUevUgaMpioLf8wCVA3DQNu2WCutFDGJ41AOSItyqWAYI6xCCug5btZ7gfdy/1OMZB9og6fXwtyyRR9MIf0BqtCkjVKYWglYVVpFMRpXNeSc2Q2ebihuv8mT4oDJ0voZlbQv+q4j9AM5KmxjsIVxJ526slnuKxb5wOoaKxoPN9afqpJAWTCjBDSVdUVzQWGtw1jZhHQxG9BaCyMScHashGVUMjKPYiYmcXAdejHFCLpsMCnRewG1ZPOy1JUja09UGuOljfju7r4EI5H6gw+YTOsiyGrF4FJplJaWWKVGl6QDezKkiEHmgK2KWysUR0slAUCRPQkxic5eTj8apySs5LHKpiTUVFpjrGgkKmMwRthmeqxeKQPaFlqypnaN6CQohQ6DbIpaYVOUIHdf/5L5HGNk6Ie9ZuN4r2QffFBWVbI56jS2tJRzRaEKSzhR1kgrDrWKhFIC3lij9gzDsbUipzHI1vuNQao9AryaMges0SIenBWKxMQZlk0FQ8vV6+fcpsCsdixmYiwTY2SIRRvi3lE1U2w94VK/4qo+4vT0lGpZUdcwN1BlcZRrjC0VVtlbQggMfS9sP5fxOeJSJvQeVxkmtWPazMQkwVWgNEOIKG2wxjCtK45mDR88OaVWmaOJ5WxqWTQVQ84kXeOzQt2t0VkxrRu6upFAU2V2bU8YEl3s8F3RpVMWraVVPJWgRfaYQ3U+UmjqKpWgSpLNqBVDcXdWWmHrSoxplOyRWit8P6BtwhqhKaQY6Dctq9WKNnhi1mjtCCkyFFakc7aAzLDdwXY7oXEORmMVJQUFrVSZy+K6GGIk6oRKEvyGGLFjK8C+yjpqC0VSYUONwbYkTQatxyBK2vz3Qr8anBP9SK0UPnkSijeXl/z2m2ccLabMF0um0ymLtsVWDcY1uGrC8xcvaXc7tNZM5zN8u6Mftgy7DuMhGI2pJuz6VtiPKRJjYr3ZsdGSz04bJ+07BJpGo1Lmqtuy0j2TeY9tLEElqtmEeVVTGUhxxep+RWxbhuMZc5e5jlt29zd86jPXneLN3Zq71tNUU8xkScTR9tJKr3TE6Z7kI7pW5C6R73vCtkMHQ82ELhq6dseqHxi6lnY9UDlD2Hq6vEMrsNaw2+14+/aClBLz+RLjplyvIqtdZLZo6IfMZtvTtgNDH8kzjTEN9aIhNwrfdoS0ISWDthOMrdG5w9mEMz27m2dcvPwt7f0158tjJqamMgY9mRGGgRgD3eaeaqrRuqayYguw3ezIKVCrVAqhIrRs60TyW6I2cHoELuHSCq03ZLtDuS2zKtOcWWx/TGWbkiwaajsh24QPLV27YnN3he9uWEwdCz0jR0tdnXD26JjlUUMXBokDaGnXV3z/4iVDzkzsK7atxv7DM75607DbrTDaU5sdIa/4+pvf0LzqOZ8t+PjDM7KrCN2W11dXRBQhOtIPb6nClOlkiYod2/aGF69/g9nAD2+vuekmqOYUGx3D0IJ22Frxw/3A7svnuMWCf/8//jWfPv4FTf0JbTxi1UTskeXlxSVf/tM/cGyXVNOpPEsJ0BVa1ZArtMt8+PGf8fj4Azbrt9Sh5VwNrF/fcXoGf/U/fM75csOpe8FkFnj/s084ffQRtnkPd/whuT7B1McwaDk3bRlMpmoyy+kMfdqzfPSYiZ3x4tX/x839b1jdd6xXNfe3mbfrO67WL0hqYDmvmU+lnTwBjXuPPD9n5yriXGHiCrOKtE3i1f0VN795Rfrqjlg38Mljmqnjg8mER9Mjlm6gji2Sxxe+S4bdes3rH16w3Wxo6ooPP3iKcwbTOLRpMFYxMTPRFrZVAVcQUNM1YI8KW6kk+aWoNDGQCNJh7Qyi+CytuqMKWiaLpmYSMKVpJkyXC6rdluVyyenZGdf3t/RhYNfuUIPD6IDOXhg15RpSKiHGg8T7J18574s7KWR2dy2vv3vFm9srrl9f0G/X6K6HYUD5gInSfplDLBpxDpVFH+6AzD2IE0rmMdytefH117x8/kNZz7NoQQPaWrS1OKuJOkKUwmC32xKixxhhlHf9QNt7djERu45kJ2grsex44WoPtJREWwuTXWEwpqKqVNEFNrhKlUH6FygiHOL+QyI/7qnFpRL20hsJaa0l5/2/6X0ckRDxxFRMwATIEcbhu8ygXHTCxa1Ti2urktwtllgzqkQ2I8JY9OgyY0RW2H7sWX85ZYZhQCUvMy8Nok1c4s4R8BD9VrVv6RtfIzCxJ4sohbLmwbUK2KScw04mYhxY5pnkgRFKTmC0dBikPBpjHNqCx/+nnGg7L3nsHpg5AB/JKKJoPhEjZJUJCpJRgAFb9PQePAZ5BHXK06clbSerRCpFZLU/BxmDqETaSSstUjjKSH04Hdrf9+zFdx6vvG/pHM0uHjLAYowyX/JB1kmM+DTWZIxOhEgx4ZUkWI+FhALQGa0ezJkyn8pxtDFY5945H8nJSptufpehJsDZu3nayHLzo3ZiPoBCOSV8ibEeMuvGOVQj4Fl+8P0jOWN/TowAFIzGCQ+dicfxG+PZUUMORkaYdOsIwKV4sPj86M/j3w96dgdg8HfBufF84dAC/JAB+FOgpvqd8oURNAAAIABJREFU9fZdsJHMQUbrR7nw4dgPPl3Wmv0z+GB+HZ7LsTgvLGyl0o8+/+457cFj9L6wwYN8fHzPocWWP/j6gwBdytImOe6FKeXilGNkUdMVVTWhamYYLRbTKSeUFqMGHxKkgdAHdE6IJW4iR0XwmiHvCv1VxM1NYXntW+aCKzpEsSTVglCKu0ehmqZMDknAI+swzmJHxkmGaVMzqRzJN/i2FdF+XaGcQ9mOTdfTdwXYySMSDoqEVaJHp1UiESVBRxWbbAgpkCkut8oW/aIs4BmZmpqcpaWRJJPBqkJfN+IS45NCeRhSJJbKXNZCxR86Xx7euGcHMQKUxb1oBE3HRIoCXAqgIomZk11HNJyUTDWnFAOgi3OqUkp0tLq2aC9oOr8lbXZ7kXFyJtgAxlEphQqJjC2sMEVW0hZljMNZg2l7KaBlcVH1w0DXt+gc6P0gi4ORa9H7Eozaa32JNEABR7QVMFRDDqKZoIylnkwZQgRtZC4UoCNmYQ+aArpYa4UyHNOeMaRLi6OzFqMc2VWofUAnFTRrBKgIgyd6LxpDWpPLGuh9T4i5nKthyKm0QYLTxck2JjRQO0td19KyFsHnAWs1oTC7qrou4FYuOlwJpQ5iqOnhogE/CjgOtGqjNckYcZ5UkEnEAhzUdSWOaEaxb6EojJ6hz3stRoulnhiiBhUMOkoFYayaorK0plrZOGPK+BwYfCBlVVwmhY1ljLgOUVxu/SDsyrFVWchzSp4VDm11UnwrVTolLshDUDgEgNBWmHDtMBCiKq6mMjwxZXzMDIVVOjLEhGEYwekDeywdqi3sAV1bxu8g7jrqv6SUGXykqZ2AFA8qIrnclxhCYSoeGJBDYdM9pKOPr7G19dDmqvax+AjQjeK8+2rgg+CW8v7RMKJwnlGE/fXtPye3TzT/lKIyWoJqK58V52NZD6qqotYK3+64vtkSfMu8caymtXxfGSuMIWxhiNcMIZMjzNyC87NzTp4e8+TpMZOjOY2tmNQ1k9JiLmu4gLshRKnOG00qY19pxbyZcHw0Zzad0LYdOYssQEgSoDlrqZxlMqk4PlowMTCzmXmtqA1kn4uYuN4HalUzoapcmQMJayoywmYMPjIMoq2p1SgLXoI8KHqH7NcQW9YwhSXjGPUlhsHTth3eB6qmppk0sukqjaoyrhE5BWWkIuqMkVZk7xn6fu+6ZxTSIhIjfhhIIRK0aAZt1htUSmWfyjij9sY7trQAVVVNpmZQh/aMtu1olEgDjMLYsoXkfaC+n3sckgcxEZLEIitpNRnnpAQio+6pGH5kMqvNmtXqhvmk4fhowXQ64/TsDFM11NMZu37gh+++Q2nNbLlgsIrtNtL7DV0/EDdrtBnofSCjsM6RE7hqijOabCaErCBGdE7YYIhBEciYSokOiTJUVUXlKobes17v6FovQImq8aFi6z2hjQzdBqo7hmpGlzRdUjhXoZupuP96T4gQYsdiqjmeH1GZzPamI9x5fNdjlEObxPX1iovLK3IOJcsJHC1m7HYDIcg8sday3u64udtRNw3H1RxlpwxR2KUBGG7uuL/d0raR+7uOy8s7Xr644f33fsbjRx/h1Jz5Ys506um2mdXdW25ajzs9JseW3d09freiNom6UkymDdQZghLTg/WO9S6hhh6fMsZMeXy+pEmaod1yPD9mfjQjxIGUA3ndoY3GGQdXl3B/i25bVhdvef3mNX3fsqksjdPEdku9fFJMw8RZUA0e5Vfk7RWr11/z8qv/ylK1NI9OmbuGs+MJTz77nMWjBW+urtFGcz7XrK9f8OzZD1zfXHO+7EnqjBAyu9UtJu7wq1tWry1DHti+veVi5sid559+/R0xKS5fXbNtI1RzVJyzu3GErDARrAoYE1mv7ogejqdHhKHi7brHq4S3BpQjpsAEy6OTp3zy8S/4+Z//ax5//AuIR8x9w+znM3x+zfOLf+Sue8HjTz+nOt2RqzvIMzBigKO9RfVHNNQ0iwXTk0fMT4/55P0nvP76n3H2jv7tf+Ozz894+sv/nRg8zWTJ7PhjMKdkJihbgRV95SxbJQqNi4pcT6mrT3l89hTjW1reZ37e8Gj5S84fLZgef8XJN5avvrwiqcgHHzzivZ99TOwarl56djc3BD8nT3reXHzP97/5O+5vblFPTnijAsPFhg+Y8PMPP+Hok0/Y+p43z5/j1Jp8e0mY7KgnT8E5oh/odz33N/dcXLxlu15zfn6Cm36CrrTIaZRiiLOmUFqqIuFTYg5dYp9RBEw2Wwhe3LxJkGOJHYRZptAPmDxqT3QxxbF8s9sKq0xp2q5lvdvw5vKCq9trmtMp5yeLouElu61OucjnHfb73/s6LJ7kIbG73XL78pK7uythr8aACQlixqSDtphomY0f1yM+tC+YJrK4AaqSLw09oe/E0T5BO3gubm65X99weX3DerUjZkVXYjNrLN12Q7vb4rsd6+2GkBLJVHilUdWEZumonGUYY5Ex7izXJNrIog+V923EI6PlkJL8ywDdYaz22rz6XbfH8RDSihpFAjCPhXP5d2HPlbyvAHiKwmLMjD2aD6ZNhiTdJooCLJWcMY9JeUrC1CjgaBE82Z+yxKzsC58Z0ctWGZzKJY6KRRNb74ujDxlCewOxAtiNP3udLqRFVqcsRbgcSUr0wlFSpI1lTycDxSRtBLQkBpd25FRQ0KRAUZzdC6A0iqNpI4w5Vd6blRQhY5mEIpkvJI9sxhj68BqZQkpZNGKapnISfbnD7N1/yGpLkWDHag2uQk8m0kqaC3g2ArF7MOZdIOch+AGHQvx+/ozFaaUwSAHzoVupHKswLvfmFEA+fGf8ERlivH+jbuAIDI4dTeN5HUwHCnCYx+87sLpCCCX+DTwElnLODDnIXFSqdKKNS0osMkdiDJlz+EmihuQeI/CXGVs0x5fRdg/i/TiXHMHnd2C5w607gFsFjFQPQODxLv13Pv3vjMdIZgghSPdHPpzb+PuH3/+7wB37tfnQMTSyNB+Or8zXh8Duw+t7cLCyiIwjsR+Bcp+lCzCjyxJyAMTHbqmHLEP281X9iwP0BwG6sfc85UiQvWAvjq2Npaotk/mc+eIIrTJD38miYSu0MSLuH7xUaIdIzgGVAsFHYo6ETjZK54rDh9UYYAiDUK6TFsZKqVCIbTR7IdMhiJMkxkmrUtVjq1oARJVRNuFMotGgkpg3SNVE0HphfxkGE8u15iLDKUtybZ10YWgBoMqoyCIJ4saKKlo9EEsgobVAH0PKZLQ44pTAQhYKEYGtC6CHTcWlVwbcI4u/9wIcDjHgc/F50QpV0PvDg4VQsnOhKCsRbCRAMmMViL32ltNCl27qGmuEEq2toe17+t2WhKKqG3RWhEGMDtxojBATobSGJi/VPmIim2L2kcQkIKaE74Qh5ypHZQ0pBPq+xxUQJObi0lR2OV2CoFwq7SmMyL/aL+Ky6ZTW4CgtpbZUWrOSz2VVQLIHQrtKSQCo0siako1vFNR3VnSo3OiylALaKqxTxOBpyXgSzhicEZ0+ozRdEajPKAEyu0GAVmXovSwsKg04DbZywijMGTBMJw2Vc3Jvjd7f01goyg9pvu9sOOV6HpofCFjHAWhCgNFxERuZg8bqUrnxZVMS6rjKQIrkGCXgSoah7+mVok8Z7xM2jquXOlT4SuucYqyCHFpRtdZYFMo4nBX9Pwp4TCpteD6RwqhDVzorky73WIIKce1U4toVMmL8oel9QOuBlBK9EBjLeQnbT2kr7sml6pVSxg8DfhgwtoFSWXxYbRutynMp/2QE7A2FQaiydOgQIrqZMJoGoEoApikutKY4XcmxRED199Pyy03dV5wEJnv3vfu/jRvx4a/7gGME6VAancaNqgRfWjQ0nNHolKmco9JC96+sFb3FcSPLSYw+EKDnaLmkeXSCNZmmMggmp7FVRd1MsHVNiLBpO9abFt9HzpeP+OjDDzn/4IzZwmKGljh4AYCtoWpqrOuJxQU2p4SrapLVEDxWa6azCU/Pzzg+PkIp6HctvZdnzIC0jioFOUIKGKQy7vueLmVMZYRV5HJpzzZoI60SoxuY0YZJU+8D0VSA/qgMqrjSCYNOSxFIjxqJ8nw5Z7BudNdSwuJQir4bIMIqbNhXuUNgCInYe/Ae3w9M6lrWyxTwSYpcWolenDNyfJQiBr23sy95Hyl4dtsN5EhlNKoRvaRkoGkaJrMZVVPjg6VvB4IHpT1KHdgpOWUBR4u7X4oBjd2zP0bQWPQbD8mGJBMWo4W9LkmKImVhuLq6pp5OsHXFsOt5e/mWN29e8fjxEx4/fY8nsznT+ZKr61t+eP4DKmumi7mMpVNsthDalWghhp7BR6x1wjisVDG+EcZD7zNOGWFtm0pYuUaBSXShpW87atWAV3TDwNXVLUM3cLw4pmnmxKTYDjuIC1KXeXXpcScZbINnw64PrHY9oW+xJOLEstm1aDvnvD4ixZ7rm21x8svE0JHYsm53XK1WhL5nUjnmswkRh4+G0CbatsO5CuOmTOdP0cax6Sz3mw3fv9qw84ZTPafdea6u7uh3gW5o8XHAh8xssaOqt1Sm4uT0iGwNu92KYfD0MdJt1qg3nlmTmVRzat2I2ci0xueBbr1jvYm8fL3i9eU9Pll80pyenvH+yRHt/QqVIouUMUrcyYPvGXYtEPEh013fl/aYyLdf/5avvvwtIXlcA4keo+GTT3/F5MNL5se3xNyidMZoT5M9an1Nd/kDm7tL+tdHXF14dsOS23bL6i5zuVox9DvCkzk5R97e3nJ1ecOjs6f80a/+HU/e/xSqL3n1+oKb2zfE1QprHKQZmzahc2K5DhyfLPnwow849eCZEf2CRp1y3DTQXdFuWrRz7GjobcXZ2R9hVcvlxQ90scNMK5Rx9JueTz78gH//v/0H/s1f/SmL5QnKKdRCU8WI7jTrlebk7DG/PPsrjpdzch1RRuQbghaHdUdNt/asb3bMlorF41MW84bpYsL5x++ze/uG9vaKejnn7INfEN2SHGuoTshZHK6zVWQtMgwZ0RPWTtwis7FouyjgRMvZo/eoqk9oZj9jcXrE+aPHfPJhw5Njx2r7mvkHR3zy53/KsTvj1d+/5R//6Ttevr1l9tFjbIo8e3PB9y8u+Pj4KScff8r8PcujPnFyckpSgeuLC77+8mu28wnLPBBNx6OlpV4uUQjwdnx6wmcKNtsNs2mNqTS6NtKqGT0YSe5RFrKVovmoESXQCuImVBKSnEAFhH4P6KpI3xTwA2QdS1GOWfZYYqbbddzerWi7ju1mw7Nvv+Vidc3d+p7l0ZJffP4Zf/zzzziaNqgc0bmUePY6pOxzrHEvHnMHNf5rce/MKeG3Pd3NhrBuMT04Y8hxBHkKeDHq8kER2VflqIfvgiQAXRbA3x0vefLRR5ydnfObV294c3lNl+9Zb294/eKCu/sVKRt8kBzBGkv0A+SARswHlvMFzWKJV5agLUEZNkMP9ZQ9RDcCbop93qOVI+CI0dP7SN8H+kH0buv8h9u3fif8KfGb3o/vIbElS9wYgoeIGIgo6QAgZmEdatmjc0yoWGBDrfcdOfviZ/mcj5Fdu0M7RzI11taFJMJ+v5O7MZ4Dh/texiSXE1dKiADaOCqrqY0w3kiJyjxgKZmDocAhhn6XPaRHHeYRFB4Bo5HBqEfXV8OoxzYCKXmc3w9QlTGORkEquiaxgAbamX0MIHnSQTtsNFjIKh/ysJKf7M+fEbzgcP4lzrKIhMxYds8gOZjWe11Ao0aAThGsBldD7UjOlXMd557kFj9mWY0twPkB4Da2K4NhiANq1MAswOQYj8szJHkHOZcOtcLGLP+WczowGpFcagSPRlDnnbbhH5ElxtceoClzcmx1HsGoH+d54zWOMkVqnGcPfsf+Nw+6797JL3JhxB1Awn3uxgEE3Hdo/c7nyyxXPDi/d8G6fbI0LncPvz0fzu+nmGa/7zWyQYchS3fQWMh/0A67P/4fAunkzQ9+9yOg7PCuw9WWhO0AQirU3tG5XG9Z5dVDxdFxLMpzur9r6nB/xu89nNfvHYL96w8DdOW/pPT+Ac9Kg84SHCgN2qBNJQYEGdHbqRqss9Qp0ncdbc5FRN1isoYUUTES4iBIe91QTadUjSQqqe/xvafd7rAK6kKlheKQlyToit4XAE8Tck/yAd0NmErca1SI9DqiwwDRF40gJVWGwdP3gRjCPqnNubT+lGJdyoKIqkRp8y3+k0qhiDhTEUnCNEwHMn3SmqQdbY4ENEZntBI2iyKhohfLa8BmJYtucf/sM4Qg7CRtLTlrFBGMlkBMjY+bYlQhjDESVSbqeOjhjwmTNeN/RoFGRNZzCuQgi/4oKmqbhqjAxUBEUTUNU1fhh4EMVMWht29bhrbDpCx6cmQRLFXsRTyjj3TDIG21IYCVliNyJPhB/m7E0Vbng1uNVqILMc5bX1xMYxJnR5DPjaKhPgz4mApwJ6wr0AVQiSQjLqQhRVTUjCOnC0Dgh0gMkaHtyIMnaIXXBqJUM0xtsZUhpyjaTjFgnMx9EAaSUZFKO7SrULYS8EZptKvZdQM5Z0z2GBVl82cE2fSePu0HjwsBEwOo4tBTxmL/LO5BtsPrp2jD43sFwJHgq9hMyRNdTEqGriWTsVZJu1eQgD/FsG9ZU0pA50MFRsZ5bOO2ShewXFhytWlo6oZkB1RWVM6hdRRzCa1LnVUqZc5IhSyXdmgp/8vxC2+VVBa6UdxXKNwRYxVOW4YQRNutks0uq1Kpy6qsVVIxNwiDL4bIZrMhVo6JhqqZFgBNmIOJg2GHMIELU7WI2cYk5660FYZrCMSkSFnaaM1DsM5IhfOgE5jQ2ojOX9FT2C/u6t17qcYIq5R7R4B5DGr2Qea+HUKCF51LG2LRi4jxXSv6UcuhKgy7McAdWxr21c+yOznniFnGcXl+xnvvPaKqNDl7UvLM5rMCeFmMdSQM8y5w1Huiz7z/6D0++/mnnL1/xjCsuPrhW3x5/rM12KqiamqqOmP7hA+eECJBR1KIaGuZNTUnyyXHiwV936MzpCAC1jFDNIasIyYrUrDFoS2JALbOoAQ8SimJpmJOBDK+7yCLlqRTGl0Z6lxRDVVpIzUYZdHG8g5AVwoVcqNt2TuEVWd1cXHWAirruqZtarphwJTWVJUhRKm6tm1P3w10VS/6rjnjlVyXtpLQWq2wZW21lQJs0R9xTOpKBPpzJAyekCNDL8mowbHdbri+vsbNbkh2iY+aoDJKiyaltUAB+qXVQlpHowrYPK4VgZQNOmt5WMs6QpaWH22kHXx0d0WbkhBIIUkZjbGayXRCip77m2sury5RxnB89ohmOmGxnGMrC1bhmpqUBrSagi7u7jkRQoIYyESUNTht9gGrVsK0bKxm3lScHc05O13SzC0+D9xv7kQXbbtlwhzvE8PQE5PGe8Xt0JFDRsWIM/J85J1lOtFEm4gJ7lc7+j4UoM3Q9Rbfr2m7HSkmaq2IQ4/JGpMUfggEEqax6MmM7a5DYziaLImmxlNTWUfrI29vtvR+TdsFbDXBOM/N/YY3VythFDSBHLbEtCMTmc0N1WSBsjX1THG7uWS37cCe8ORRhYkDRlui1txc3/H6dc/Z0YyTuWXRZGoXqNwOlRQ773n24oqvf3vF5W1HVI5tt2OxvOTy0TG0axpn2ObA7W7LbDJlVtfEUAKRNHB9ecHdzR05wd3NHc7XzKoZjdP0ccvt+o5/+Odv+Hr7f8CTL8nVkuV0ztJAuH7D3fNvWYTA4uSMdvDsQsd91nSbS1yeErKn7dd8/f0r+t0tV6sbMpZNW/H//sMFJ88HXr+5ZLvZ0W56UAPGVMzMElX1bAcB9E8fn/CzP36Pp48/YFI/os8n2OqcBQM3z/6W7767IU8MF5sZv/lhw6aPJOVwrqZWAYo5jZmCOjpi/uHHPPr8lzRHc1nrwz05eoJOqPmM88d/ybG2EhXqmqQ0g4JuTA4MpCZj5hVu0ZCdBVdjXcV89ojmaM325p6hjbS3E5qTc7BifJAsDCSiTpJAkzBAnbO0lGphTtcMZBVRtaMy52g9kUIYW+rjhsf2V8xPlrTtWzZ6y+nPPuVs+TMms4HO/Gc2//y3PF7UnDbHnD/+gt30Cz7/03/HdD6BsGVz+TX/5av/wmazIeeKatbgTh+jnSEtT7CTRjS+lEFbi62ljXO9W7NuNxz5JZN6Uli3MqdC19NuNoSg0WaCSOwPJDWgjcfYRKVF01cFaWX1W8W2C/RJse48ppnw8UcfUo26mAecR7ReAyilOTk744tHj/mTn/+Cs/Mznt++ZbKY84svPudv/s1fc356tHcvN2MEoyjxPvu2xXdhunFTj2XNhBwyufOoNuD6jPJSYEq5aFAbRdLyZ+lGGZO/XEC6McHPkg+oCFrMZXTlmC4WzBdL0I526JkmxcnZE4Zd5P52zW6zISa5ZpwQFSa1o2kci8WMn33yKUfnj7jdtFzcr7nbdrTbWIrJ5XIOZ1QST4MyFSCs0mFIdINnCLEQEv47MtCHI6bYxzMPk9rxlXPpTFAZnY2QJxKkEAgFfCOLrI/vA3EYxA01Z2LMB5dIDnFUGEYjQGGp5TwSBVRxlD3c3j2z7uEYjCy3st9VtmE6cTgDJgurzxoBj1PphFJKivmMII4aW+2KI20Bg1XR5yMVh93ynGdlDphYRuJEhcxN6UfcE07Exj4zSgkpK6ZvI5NHlTYGZUaeYNrHn9qIaaImFT0tMRMcmYsYIy24BzhjH4saZTFIT4HM5FJuNhmKlI5SCpIuubBU2LOxe5fSlHNhBKv9GB3m4mFuHQrV74J0lDwlF2BuZGKJ5E0Airlgodjuu3lyJsd8KMBWTkgsxpBCYvCevu8IMe4N/eQGjs+nKsws9szEQ0vju3H+Q7bdO++jAH7lqVMlLxyfx5jl56FH8u+Aa+OkevA7mbJCZFFKjMXG9x7aWMfnQ94v2IGM+WgKuP+OQyli5Ebul1s1jv2Pru3h538SqMsFjFOSiyl9ABNHcsLhrQ9MKR6c+8PDPiS3vOMyy49WbFWuSKl9TnZg8MpYjuu/9O9rUhQ2pi7Sirk8O4qRvDK+0gPA8kf36A+8/kUG3XgRSpVecyWPWk65uCdFusFjtSKmUv1IEIO0wPRZ0ydNwKB1aYsqYmmh79BVTaynpMmc2DSEHPF6IKge5RXaOUnkjJHJGYK0P6VIXU0w1qKtEwvxKAJ9VVPjakvwLSpF/G4r1aLSfplCwKdE8KLbo9QoLCpJUsjCyvBJiQvNOD0N0vKoJLnNOjGU7/WxNEdqh8KRlaLDErTB6tIumyImR3TypZkqY0v7jQytQGhDKgtquQf7mlo5zyRrD2hVdPCknTWZAqSU+yZ6SUXfLQNJxEKjSlLBVgLSDWEgDoiQv9KEnBh8j4meHALWOZyu0LKeFpZJYDaZlV75DM5gawfZEDoIeWDSKLZqRyog6Hw6Y7pcoGphjSXeRfJHgVKNAFXgSFmV1kbRdxJgD2IMpFJh8DHJBqmN4DxhXCwOegQifHpgn2mtqSqHQRXqfJbWYw3Je/qhR0WHGUr1KoGKYm6Rgwiit76jHwZSUgIClXuIMmgM02aK0lDphFOJFDwhDLK4aqkQm7EIVZ600c314eI9nrPShS3EYVN4l849tu4eFplxARvZPWKHLcl4P3Q4q3A6MTgxSIhFv0Alj06GwRg8IOavBfQLSRyhShKfk2hkWGulhcxaVFQFDE4CbBcXKnELdtRVLcHVgzbXFMv/08hiKkUCdQhWRgHZPG75WtzenCtCzupBIKsspERUGbdzhHbg7u6OHkhW00ymwgLKGa8Om9N4PxLiaqWygHHd4KmMJjhx+NztOnkmVAKVsUminCF42VgooGIMxJgwRlPXAgBRwDapvo4VXvZBrWIMMkrQWJ5rsS0/iM8efiigCe9UgIC9kOpYRbbG4rQhD30JguRejQ5eWSkx2BhbbrSlWSxZnJ9hnJjVKJ1xlZU2Th9QCdE1XMw4PZtQVxNOj06ZP31MfTrHrz2xtqhYUx/NqWYTqeIqqOqKyUSR+i1DL4LLGk2ltYBnGQgBkxOVNbRA1+5IiKOT0aArS/SO5AeSlaqKMRbjHDEp+uDZRcUQxYxh8LImWavJymKVIakCaatSjVcjPd7I+KNLQCabsTHlz2U/y/nQjkPW0naDoi57mHMVY2Oy1QaTlczPIiEQydJSrjWVq4qjuKK2lqqpi0DyOLdhPpsyaxpiGBg0ootGWWd04sWLF1zf3vPkYsPpez9ncfaUaj4BJYYuE13aaYwua4TFGE0Yq6pJdDlTyiQtwuGiMVrW1JwPQYcS1qguenq6uHDJnK9JOjOdNpCOycDl1SXrtmM6P2LT7og5UlVOEnsN2lmqWFOZmhQT1ia0k+E1WmNdhbWuVK0TtdUsGsfxrObkaMrx0ZTmeMoudmxjB90GnyImDORsinC8YvBRChbtQE6J2WRC0JZ+3TMzUM9FGiHFxG7bMfgeHzRtD5XN5E3H9tuXHM+nTFxNGiJDO8h+URnskNi1A9s2Y2rH7TaS7lY8edyQY8fLVzfsWs+uD8RkWBxNoU+8utpytx3IqmM3XLOYJs6Ol5wdn6FN4vLmgoubG2H1xZquC9i6Q7kFKnSs7ja00bH1is1qzcl8wqPTho+eLmkqy+Z+g49wt458+/Kab1/fcreOhKy529xiLzObbses0jileL3uceYtZ8dHfPjeU6a1o7HCsmiHhA+Jbtez3XQs50d89NHHzI8XfPvyO755ec+3F89ZP98Sz19w9OhDPnryEW7XcfXVr6mGNX/04ROa5ZKNXzPozPXuBnWZmLdHzKdzjucL3l7ecHO/YbY4wtaWt1dbrv7Tl5ws51Qm07YDWRt2u8Ck0hx9+JTarrh/9YL17T0pRz794mf8yZ//K5w5JuY5qlrQv/gt/asrzh8PfPSXXxDqxxz97Rt+++t7Xt6/xITz+WN6AAAgAElEQVSWudX0KdJFmMwbgg68vH7LXbvj0XtnqLwj7i6IeYuu59TuHDc5JaQGPRZ5cyKbWKIeCyYzWRqayRRbVyQDQWm0shg7Q81Pmc0TsUsYapRqIIqkQtCBZEtRqRS07L6mH0GJE7TOAVVpUHUB9yqUMriZghqojjk5O2IRP2XW31Ifz8GdUR1rnn7+S/76VNiUjz/8gn46sL0baOZP8due9eaOt/ct319cU+vMH//8Uz792a9YTI7Feb1O2GoAIiQp6Md+YLvbcn13TT/saBaO2TAhJxg6z249cHdxx+XbO7YbT0iaIXmG1BJUizIDzmUaC5WGKitUNrRtYNNFbrY9q97zxZ/9BUcnS04Xc+xeQU6N3Y/UE8dnn31G8/ic06MTPn78FN1Y1LMvqRqRGml3O3bre/x8gqumwp5LuQiP8v+T9ibPkmTXmd/v3MHdI+K9nLMKVYUqEAMBgiCb7Emytm51L7TRQtroX5X2mhaSzCQaySZBEMRYI3J8UwzufofTi3Pd42Vh6JYpYImsl2+K8Lh+7znf+YaVUSGyCJruPZYmsQW6iYIvSlcdffGGD7RmHhGqE4qH4jCfr4WFcd/gqz2q1GYnY3U8JSHF2OdFHXOGR08/4L/+1/+Mqy9/Qyf/Bz/5yU95e3NjiqU+MMTAZghQM2WcyePIaX/HzZtr9vsDKpEuRMb7UIi8+7eKWU1YjS5k55pNT6tj/r/hcytwttRAv9W8awNPhDZQhWkcjZXuphbKIU2ZUSy8KmdU7H1flCaCEQKk6wxM6SLVm8+3b+DW0pTL+vor6D1grKGVItKGt8ZeFe9wwSHebD6k+X0tK0QbIeEs4WtDLs5Be2bxZJ7uWhRKMTC3VaW11UPkjGuKKYexyKykbFYrqs1mwoAe9Q4XAl6tsnYi+EWS3PoLew3tVRuDY03HhVbjtefrxZ2TM3kXeHLirSdotbQxNS18g+V9btdjEb5WrWgp1IIFdjhoiOk762Khcd2vfeVroRELu805Z6w4Gke1mN2ParJGEUXduWdWNZ93qnn3SVMLLL97STBe6m/nz8mhq9wT1q//XSy6pX66r4L6faCVhc8tP8/6t+USqJ6v49eBnuXamB3L/YRYzu/3CsLJO9+3/O2bKkk4qyju/877/w0YMWK5N9aX8M4H77xGqw/9vecmDYupq1rTgI17vpdfv07Cb/XJKyvw3u/ha//NvWe1gpec19g710Wa6m0hYOgCELf3pr0X901hpIWCtov0W6///Ab+9qfuP/4zHnTmKbY8cZUWwFAKkjP76cRVZx45XTSz0BgDGppH2DAwzYU5O2oNOGozBjcQYlbF54795Ll24LOi4plLb7h7V/GiJPGWuOIsfdQ7O5C8y/gWv+zFCP44IXox34h+g2qBMiOYlLOLAwnHKVWSTnjvSSlTSmrFfwM9xFOleUw5ox374HDemEPB26ZvKK+9KVWNxeO1UF0g+UhyEEhEMrGBaU5sGiIlN4i5GWdWIVSI2ATtlBMzlbkUktomp436bDRimyp9fSEuXkFuITu0xkqrOWI6zDcpOEN/x2mypFyM/ZBrYZ5GUk6E1kBDIjiTqYpm0EIp80ofd87jojXVvTNw1s2FLkaqCMMwsLvY8fDRI7qLLdU1CSNGJy5laQYX2M6TkrHiQi5IMk81baBPbtHdKRdO40RuE6kFyAzBWEpG0ZY1tnsppFTNv8qmPNC5wK7vLOl0npqMqgdRfJOakTPzdDApnbSIbLUDIE3GDiwilCoUd8KCNAtDEDbRmZdhLWbe6ip+M5h/lq9Eb+EDpTW9soCq9x73ac1fp3ovvlOlGPMLWoJZO7BjF+i7gG8StbDZsBkiIrUxcXo2wdhGUZXoPbf7O0bnGSVTqwGltc7cpYm7U6HmCF0DziqUXFp6bMGrb2BGSyMuqaWxGttnOo1orm1SpbTQYXJV8xcr5VzQiAGfS4z38n76xtARsULCL/6ALTnLSQCn9MHR9R2TThwOBzRGtunhCsCcfduMHSXO9pFamnSoVKZ5hlItSVNsjzEPOsG7YAlnLc3Y5A4B5y1lTus5bde1okacnP0V14Pg64esyeW1mux+Ae0Wrui5cD4fgqsZctv/7fXZ5H9NaWI50Oz72tylAbt20IqYTCCECF3PpHCXCjFA6COxCxyqSb8tEVuI6hhcpB8uGB48xF9ecFsL4+GOcTpylxO9gN9uePDsCZcv3+L3E0VPBvI2E2ZRk/94hCjgVSlzotZM5z19NODVt1Aguy40+0aTgIbo6IaIC4GaKhqM7V28Y66FsWTyPOODkGvPMERyMYmpCoTO2MveLZMwY87adL8VzmvZbfHrVnzX9rnCNNn+2HUdIUZkBUpt+rYUEeZ5brYIi9zDNdmMgbYW6BGjN5l2MqZhDIHNZkCLp28JiFoSeZ7QWpnSyPXdnqtD5elt5vk3Jx49e8LFg47HDx/a8KcuBsQGOIIZF6eUqEWa5J11cqlLsnFtne4KzhlARwOVcZamikAqiS54hs3AOJ7Iqdj9OM8c3rzi7dVbxnlisx1wXcAFb011UbwGRAtD7BhcMNuMWgmYvHkqydjAHgbpmNzETT0wnt6ib3ecFMY0U0oPVHL2BB8JoTCNE6nMdN0GkiXa5RgJUbjeX3HSnsf6AClK33WtplLmPDPnzJPn32A7eK7fvIJT5jQ7xv2Jw92RWoTQm4/c4XCg9yY7f/P2ljSeSLpBUF7fjVSF45yoriJ1IqeRk2bGAuNoKa29H7j84Bkff/Rthk2gG7Ycp0/5zZsD0+xwruMwwt1R0bny5Ve3vL7JZB1I85Gb+Jb5uOXJg479PvHq1Vv2p0x1O5J2xIsLfJk4HCYSHSk7PnuT0ZrWtHeP8vzJidsSePpwS+cSURObKHz8J99l2h/4yd//mOu7N/R3Ow6bDT+9yvzNrw68vZoJz07EuGeMd7yub0mHE1d3I75M3Hz2Kf2nv8bRMfc7LobI3bTnen+ie/QBj599xMP+Q+RRz8ZVjm+uePH2LRMR310StXI3QvI945QYdo7hyROCdFzuJ/wMb758zc9+8SX9g2d88Fx58jTiQyLxJcJr/FAJDy549vxjfnT1iP2nf8uVm/C7ytV0y10x65Ch7Hi+fcR7O0WOb7j5/MhmM+PDG/L8krTf0G1/gB8emNS6Rsi2z8YY8GFJfysEVyDU1gx7LJzFE1yAGBBXkN5TUyQliNYXWOCGjewtFV68KVjamVJFzXydDaEqTJPV70OPitUFpd4wTS/ou0dU/wANQt9vIAe0Cv3FEz5+/uc8evacbvMesVxRrr7g5vAGrYFpd0nl+7g7j5uPDJff5r33PqavYiBAnKEc4XiEsGHMI7/++a/5yc9/zo9/9g+8uX7NdtcRo6fMiemYmPaJu7cHDtcjuRgTd6qJuU4knaliQWedg1AVmTOCkEtFup67knj64Yf88T/7gUmKS6ZRmgEbtqoHHzybzYZ8fcXf/fjv+eqXv+ab3/0WcRj4zve+h6+Z43FPmo4GdIrYpNwI2iaff6dEa53a6ld2xtZE2iAROyM79ca8r5DFPL2qw9jTYthdvf9j18cymizo+kRsr5UY6boBXKDikdDzwYef8P1vfptySvS+46c/+zl3hyPROUiFVDPeKVe31xz3d+1sFB48f49+N1BOCelbs+pao9mAq4UpkrOdiz70xM4TYod4/7twxd//WM+RZYh4BlgWAMd+q9V8LIwazJNVc6Y4C3oQtUGS1EZ+CAG8o9Sz35uoyXxtSN9B9GQ8NXaE4CnOrcCaoX5L37D8gYVTVBXrMbAzfC4Zyx0UXK1ItjW89metxiulMOdEwPpn55v8utUXzgleWvhcez3maZep3lj03jU2p7Zz/GsEBAPnsoUztHXiY1hr5oW8IfcUFGa5dx4WC5BqK85ZaiH7pUbGOTPGHG2YCiY7pppagqVSauEJGMEDDCNTsaDHVNRUD5hyQUJjPKquA/q1f+NrMtLfCoJrvbA4I29AK660BYgsRAiWm7StPbcCnQBVzn5z94GkEMIKAJ6X8W+DbEsvtnz/4kH3Dovr3vP9LUXUGlDW+j/nQe71+M5xTt19FyQ9g5rLIPV8Xy1MyaUmXRQ951ZEWk+gZ5JPGwAt9joNilrvCmlWX8s6V1HkngT4v/xxJhys2KQ9+3eA3d/73Qsuxrsg3Ne/xyTsZyajMWRp13H9KqR1X7L+xHYP3OvHzwjv/ednYKM0XEpk+XfFwL1FWfr7r8QfZtAtT1rEzrdG/8zWQVNT5jhO7I8dmz4Quw7BU0tmLIV5zuYBVXJDQR2+LVJj4C2bT0InRUK22GIfrAmunlxnY+l5oXeBiCI1o7kQgwUDLMk1KgakGch0bppMmhSQbkPYbFGEmJTB9/jDZNOWUky+JoHqPVU81XeASU+bAq8huIVYhBAcvjpKEHw1CmrRtjGJMU9UMAPrRq5wYrIl8Uqt44q+gtGYO7A3VZTsvT3XahLV9tY3KV7BubhOkiqc48+9SQ5rNrS/FGN3GHAQ6TcDQ2evq9REyom5ZGK0hs+Xwnw44EoiOhCtzOPJGDVq8tuSss34mszJFUFmk1Clkii1GHNIlakkXrx+xdV4xA89mwcX9JuBbujXTXGZUznX0gcl0HU7M8FUJTjBx2DvcS1Ihrv9nv3+wN3tHafjkZSskFLVJkPLBmZ6O6wEMelbm1qYxEDJYsVTqhmKBT/sxxO991SFTdfR9z2+sxRXLZlNHxjHCXFKtnCsJvm2Q7OUjHOOUjPTrDgNdKGxLXMhUxE9MY+TeRDcO2Bsk1kSZs83/NoYwzp9uf/xu9MQJbQIcBFrDLomsXPerTK+qongHZvNhiF4JGdCrYTguQiWthzoSLni5sKczQsyTQmHASQ4LIEyT4yHE/PphPeRQ545TSOVQJrMj3ApHMQ1cM0ia625V5DVJ8ZbYaRLsbvQ8RtLUiB2ncmA1WxpfLS03qqVNNsBbK810sWeyR9sP8IAXCs06nqweWziJA10MMawBzEPOspMFmHoIqqefojgLOwFIw6b8W51aEt8NfNQIbfQiiXIRJw/+yHC+nfTijeArR2sy+bT2IbSPnfftN+o1xnnZT2hbO9ezpGWglsStXjUeYK3onaVbUozOa1KLhWVQJHA9XHiF599yYu7W7qNyb5xVrzkUtAKXqK9N0UYuoGLywf02wHxyuYisOkg37zl2WZLFzo++OYnfHd2vNwXvvjqlxyOB6h27xfMviBXA7GHPhCCo84Qu8CF35FUuT2eSMX2oVo9tQWQdAouRqjKNE2MqVDjFhXhNJ54c33FaUx4B0PXNRaZN3lnA1K9c+RqB7hvMoulSLYpa6G2Qq62MyFnUAolzw2stuo9xHietEIrdrQBPi1IpBXN1YGv5o0qAvM8cjrsubu7pe97+q4ndiZt7WMgeAcuEIMQvG+edEJOo+3Lrlhyuqvk0543L0f2t46o7/H4csPxOHKaJsY0M+UCc+JwOpHFUXRY9xLQxtrQZqPQvHB8wLnS7ltvzA+tFIVxmklzQrCGJOWCCx273iS6BcftaSRj1g1djPTBM3fGaPTV9q2iSh8ixTmmaeLusEfFEbvOzkotePGcpkyZM6cA2+2WkB3J9/hoAR3TdDKJfGMpKsrN3S3OHVtDohzGTKqeuYzEGcb9ERC2u01jEyjTPDVEO5DVsz9V3l7dWB0xJuZxZpoqRfdUtaCiLjhevn2B08ym89yMvyYGG5CJE1KtSBCOd7fc3t2Rq8P3D+j8DimR06y8fDtycXHgw4/e5+GTj/hg7rjaf8rV7Z4Q4PpmwjslT0devR65OXh835HSzN3+mn7rOCTHy5vCFy8nbvcjcWPjw363Yde8XR2e42lmf1eYGmDcBU+gUK5H/JeveXsTCHoiMPOtj57zre3HiFN2j3ccTnt+/eIz5tuRn706sKcnbh/jZWCogW6GV5+/5Or6hlpn+t6zP52ohwNRPdodODlHmitSO+gec3x1S62VQOTq+ivu3v5mfT9evrkiULnaTxxm29+Tq1wf9mxCoXcdaUx8+dkLPjte85Off8pf/sVf8C/++Z+xCQX2L9k83PH8+XfpLt5jGj3j7ZH5zQv++P0dfPyI//Mf/p7D7ZFtiIR8yweXwl/84Ft8+NF7BE7I/JLp9lcUrtg8/CZ+mJF0g4TNuqc7FUgVUSF4bNApBUKrsdVMu6UV/NZ4F9Q7Smh+9ctZjyDZ7BssmMu1YSzgHRlHFju7a1YcEfVmWZEQOpTT4Y4XX/yci0efMPmO45jod1u2XvBdZPf4Ke7yEXJxwRGPXHRcPt3RP96QnPH1Zu25mm/Zf/Yrnj4o/On3I5KPvPjlZ5zG1wzbzMsXX1CI3I6Z/+v//n/5+3/6KV+9+g3HdKRqxokNYDSDZE8dK2QbnOU6GWOQSmmv3zmxsDW1S4cquWbirkd7z48++Et++MPvcbHpCFogj9bqBEv/pirT3R2f/uJn/NXf/x0qgQ/++V/yyR99yF2Zub1+y/72DVB49PAhQ9+3Jq2NyXxrRCvNxB4rAFrbujSvsNxP9tFqP3OvT1xYyIqd7ZbyWNe6Z+0JG8jiUJxCWZNdBYJHoqe/vODb3/su39lFPv7eezx9/xs8e/iI/3b3gD/9s7/kr//mb/npP/6MN69e8erFVxz2NzakKZ7b/UgW+OS7P+Df/If/wCyR//X/+XtOp9Z8qgGNVhOtba+Vu33EbyOxr3SXHb7vKLLUVvyOx/1/XEaP57TU9cSp66+2S+Adkc6ILGJ9pKmAzNOr89FCa9oeVkU5ZvO8zdlA0hAXG4xA9EK/6VHfWFbNR3VRoFgvca8uQ9ZSzF7Fwtax89vYWYVclOA8wRCO1uNpI4CcAZuGZtjAVYQF3arQamSH6jLIbWyLBaBoYEJwzYakrSGHnW0+eLR6ZrVzuzWfFr4AK7izAFsL0GQy2tbnitUUFFnBP60L0LTULw2oa4OB5d+02cRMzZe+5SSu69woUlgir3PkapL97BJOLfHehx4lvLNaVmuYBUCrS6DV8lUOd9brojUb6NZe01ITL6uwammhjrZHQwPxFiBNzwmq9wHBd1hXy/q8h7Tcl7DeTym9zzTECVoas/De0NO3gSfOpNCLnH5ZecvPcO7c93Hv958BOmf2W18DLb8OmJ2Bq/P9Z31L+21i19U+1PVLZWlclEWc9u7jPs74O1Co+8y2hVF4DrVo334/pfUegLh833323Du/Wt6VuS4fO5r9yvJH3/X61nZPoq3nxEI0VNpAZAXjpGHe5pdq23S1Pr7Zv2kBddruy+W9WW77Rsr6vXukPf4gQBea0XkRkJag6VZ2iRBcBIkGqHU9cdMROkcVpa+OoAZwUB0L/XNBEFVNmrgAafhlwSlaja2iWijFDl9BOZGNyprNmH8zbKAlzdoFpSUwKj56JFSqJlv0IRCzEEeTcRZ1HMaZQ1VGHORqkiJRNHs0eKRCH405hxZLXUIaSaAiFaIqCcGJkqoxBGtDuLsyU9pmpIC6nuwGnBEoKLVHilmTO2x60WtBSAgZ13vupsRQClVhrrlZddgkReYmwROHusIkld7yqvHBMZ1mqgRqiBYghzA7x4SgFZN04iyVNURrJJ3Rl8PQQy0M0aMqnKYRqtL1Hb7rSMy4aJso3hLzKGrJPgXynJm1Mjlln2aur16z/80XjKUwbDdsNzsut7sWTtA2KCrq1AxGvePx7gFdMyJ3IeKiMWIWAOPN2yuurm64vrnm7u4WrZWu64jerzeZmQ6bJ5gapQ3fQBvNZd3ri1b204lczJ9oTomQC0PXmwfddKL3jr6z5jnlbMlXTepVsals10Vwjq5EnHeMo0KtxL6ni8Eo9y5DhTlXPI22qxkv5mOVtDZAcfF4a4eiOMDSLms1ptxyYCw+Z9bwG3Cwbuxq5qdaDXiI3rPZDDa5JhCCx3eRXCpTSTBnugy7IbDpIo9jT66OkjpO04DuA7UemUukH0zOXEQZpOfCDfTDjEc4TCcUIc2JcTTPp4vdBR988CGXDx7Q7bYUBxqCebi5DqZMdBGpQpRgG2Db2UoxNlsQb8BcUVyFqIKvmI9gyTYRdOdkKxO4CEkAcRQR2zMa21Db5qq5QlJqdahEVDIVh4uxhcvYBL5S6YbBTPC7DvGOXLP5OXhw0SPZgUu4GMl2a5BxFDyhG/CxgzxT8gxqRWGpbZreWLJUXVQGpFIQ0VXiDFBcXr3LYmOIqSpFizH0oO2PBXE2iVQtjeIf7o2YrIKqLQjMcl+Eop5SPa/f7rk9TvRDhzplThO5ptWjZanZl8GLd4EQO7rtjmHTM3SOB4PncR94stvxjffe53s/+FPef1652H5m0+SSiLE3iXOqBBU6YNsHnFS63kBFyR6vnq1/yL5aujK1kosCjvk04cdCvstMXcB5GFUIuwfcpsLbN9e8fm1Js13syH0lTYVpSOYBepwhmxebhgAhMDVLhflwNClJtd8VQmdMM4HYd4AFITlRC0ZSb0EGwUEtiHMMMaLRAjkc5vnonCMXS6wathtO00TFmJ/jUbi5nijzCfHCsOm53PRsNgOXfUDUpLriPLuLC8Q7jinhnePyYsfjWvFxQyhH8v4VbuqYR8+X84HPcuL2+pbTnLg5HHh59ZZd2lLFwwhTyYyzpeYFZ955uZhvXxcNkK3qSBnEewbfU9QmvOKDMdXE41j8cHo0tJRk73Be6KiEYbDiOM8MKCebMKFe6QYhieB6R54rUy3cnU6kUun6jQVV9QHxniKFuQF9cbu1faAkNkOg7yIeez3jeOJ0mpmmmbu7PSlVdtsdF7sdNVUO4wmpldN4Qg+ZzXbL0Fe2D7aId8xpApTjMXHSxDgKNzeWTI14UoncjSfmecT7znx7qIhmhugYEW6u93TBs9l0iBPmksFVQgenIlzf3OEkMfQXCJnxWLn+8jWfXx9578sXPHr8kJyVSV3LlirsD5lxPFLmkdPJQrZqUqYixOER8uRD3ugFX315x+EAw+YJpwKv3rxhHE9c9AO7i57q4EDmq7uJXO3szOPE9mLDxXbDNI6k0KFaOJ723O4uePXqhsE7YthxGhPH8UTwD+m7wPZiQ/aefntJ7waYlLQ/MO8P5DozHRMxVqhCcZWLjTXrN29e4WrHnTqm8BaJkVMe2d+9Jc8JReiDZ+gV5yLDZoObJ0Qi0zhz/eo14cLjy8yNFuptZX878flXL7i7O3A8HfjhD77D88tHpEef4LwwjQ/4yd/9I7/8m18wn674o+ePOLgNudtx0kzAEV1iG+Hx5SX76xOf/eJv2coLyv7XzNM1H/1w4PmfZKuuxQYfxkqpOMk21MFbQy5ju4+CDQaWKb4ukxUbLIprCoEGFDh1KB2oNX1+pVgritU1HqxeFoEuIo7VE8qVis9CDJfs65afvDpQ5plvvL9h6DrC4NnsLhndxE1OhK5DtwPD+88Ztz1fXitT8by5rfzmN0duP7vjZ/0r+vhjPv3sH/jVr3/Km5efQjpw3O/BR/anma9evuUwjsy5IL5HicZed0J1xWSbvdi1yxNSYZon85ZtdZB5cHbgHOodVSvznEkps931PHn6lKePn9DXDIe34CM6XFCYKTiieOp8y3z3mnLaw8Ujnv3Rt3jw7AnDT79genvL67df8tFHlwy7HhfMowwH6gXBUvsMMLb62HCYe2ySxpJYrBFUKkmUU82MZJI3sC7XxRcbXIEY7L2UWmyNCM2k35iCQUHU0+UB6gb1G+gc4ckF3/nLH/DkT77P8HDH9mHHw0dbYvA8H7Y8+ehbfPCdP+HPfvU5v/niC/7mr/6av/7rv2I8HZjzNUWU/vKC7fM/4l/+u/+O4jv+6pfXvH5xQ6qs0kutlVwThRmRhPeJ5Gcef/9jnj0a+M6DS/7oh9/Hb3e29qE1tgvT6Mw4ss6+tHukDcvLbPVtMRVOLpnc/IBj3+NrxHsjEi11b/CBrotNheIXNI9KoUwnMmqA+GLfIWoDYIGsrXbBgNFaLCnVlBo2ELZK6qwkw5k/mwZHlUIMgouerJ4oEIMFyYkqPrTnU2kKodxeem2ki9Yf1tz8j0sj4ZhVhDQlkIievdAFAwDEfLrxnflh43HNd1idgQoL2FhQtBZTT4WwsnmC8wYS1HtgCdaz1KpkySa9zUa28D4QYiSLMKfUFFTn75cGcGgL/5Pmm2cEknsst6rGyqsW9FcRiigstZE4q8891Hom2qhwDyS7j1bYfbkEewgGnKLNzN8H1CULglABsUA0V+xr29i8AXU0ON7k0G5dte8COVrVfOKbhNhIOO+yGIMzqe+iVjCmY2NKtn2D5WtDMNCn4SQ110Y0aXdO88qzJVjX3uA+O86IgaZo8843abLYe7Q+/3sg4z1kbWE5gl1zcUpWIxj4pqRcv6YFZJ6ZgPrO/85YjyzLvylFKr7dezTLpaqVotZ8OFmYn+f9fgGTl0DEc8gFv/W4DzYu12MB8ByGlaw6ulrbXn5eQ8v32Y9waO3sdbJHa8JVA4BrdZQEfbcxcwmfyC6RsyB1CaMLZkPhynp9zkCnmNdi+V3I5vnxBwG6RYJDNWmlmQuaJw9uSSAytFB8NOZTJzhvaZnUBgy0xbwg/csF6VQozZjaBlN2c+RaqZqNzu9co+Q31FmVIhVc4HYyCZlrtE+7Nyq+ZiRniiRSTedYdD8hzlMbU2TO5qFXso3CcslMWcnFkztwvbnP2e5S6ESNcdIWjjSdel0U9iKWpgo4igVItJuTxlApOEYV5mJNlWiGPOGpRArOw9A5QvWcquc0V3qtloxaFMKC5gPFmj5p6alTyYwl4cU3iWCheKWIgZxTSVYUlIKX2jbslhgYnW3yzfRTasU3Jk0tkObmmRcM4BIfwAV81+NiwAwXTBbXqTIHIExoFErxHLVwM0/cjSfinNgcJnji2MaeIXb4zjPmkdvTnuv9LfPpiIyzFSstjESbAbCeRpUAACAASURBVKohuSb3miZrtACGoTdaO56sieCbJ4ss1OWyTgXswC0EF4jBfJiqqMlO+mC1dVI23UAQoaSJKSVEAtFBKQnn3GpsLzHgnJLKSJpq8x2DlGZbmrVSygCogV16Nuhk8XjQageqLlMQD1XXBNDSJgsLa6remzycGXTte52BAut2tUxQ9CyztGmiGnsteMaUuN4fKNPMRe95sH3INromA1RmPForg4/0zhhHSmKqE7lmOvH0Eq141kqVgaCOGAu1OoTIe8+e8/HHnzDsNqQ6mxektFQpsaRVR2MrKfh2KKtiAQIpQU70wAabevqq+GpyD3HmpSUuMksm1WI/S8yAtqiZvoNN76q3dV1qafe5TdJUAkVzS9Nd7uEm2dWKeEc39Gy3O0IIpFJIpVq6aEpU13MYC11/woyE7d4viIXuNNYvpba+yiZl5oPmrSAjN7mprIfz8hxEYNTJ1lkt+Ebbr82HzKjvkKtJQ1VLWxtN3uAWRliTLbQ/xtpoIF2LD0+zJUe6w0xKM4fDnroEBsjiebgcGro2l77fMmwGYs3sAjzbbei949ePvuD6ZuSknuPtnpJmY2ctQXxOGMRzsYnsNh1e1HzmmkdkKpWErRv1/v7cDy3GmvRV6Fy0Q7GU5h8EOSXG02RNmzpyAWoheuiiTYJLrkzjRO6skZ5OI5om5ru9SW4xBmbcmXdoDB3bzRYD6BLBKZMX5oy9/tjdKxxYpRFd7Kg14r2B2CLCZugJ3pi3fR/pop09aZrZbTY8enjB5WaDQ4hOVw9VbYMzFyLVBSQ6druBTbumRRPkI14Kqo6704FpzpSquNgx18Lbm2v244muiwwZiFtSKUynkxXZat6bFjluhaeoiR8Fj3PRzldtSc0F64CWIYYLJs5SiFUhgIuO0HdmkVAqoRYL0mgJcq4TNBfmOrOfJ+7GkbFUlnR0ckHEhnxD57nY9vQXF3S7S/JczIOrZtKUmecT0zxyGmdOp8RpLlQxCXitwnjKaM3kecJphRjoNp6ZiXyx1BrSPH6EeW75eX6D+ExSSFmZizBpIGFhWE4rTmqrb86Dl4IVelSxxG8tdKpUemYdKTmR3BERJVaPy4W3pxt+c3vg4YMbNn3POGVctMHQbjNALdzOFddFNheXHIrndHtizo5XdwVe3JGnE1dXb4ghcLm74G5/5LLv+MajR6TrG1KZcDpyrTPXWdAaEE1odngGHlzsuLzY4upEco5pFv7pZ18iWuwskwtcrGw3l3x0MUB1vHlzyzQrh9sTqc7sjxPj6YT105mJhAuVzdNLHj97wu3dDfN0Io8nxuNMFzc8ePiEywePuLz4iJvba67vbrh4eMHlo4dkhbvxwOZwohCIEvC54mdbt0cqNRXGlKiniV/98jOzU8Dx7//9f8OHP/oW0+HAdBq5HK7pOs+t7rnZz7yYeq5H8MMTomQ0XZEPJw5vvyKVI+P1DY8ulXmKhOmSPHoDXny1hPTFQVoUkQbIqDHFK4t9QWScRnK6YjfswPW4NmipWglirWJd6oOm7Fhr9fbzF5YrGEDnqU12bpukZMFrpY4jjInHu6dcbx7z+Y8/Jx336I++jbTU6OqFefGOE+E2Ca8OjlTg5b4yDJ6aR15/9YJ0fcf16x0//snEX//Tj/nlq19x/eorNqVCKnR9T0WI22d88GzL4yePee+953QhkNJoyoZpIk0jWgp5nrm7fsOLLz7l7ds3aErGqmmDp1oKh2kkdJHYRfymR4Ln8uFjPvroYy53l4yHW+T6Bb6POJ85VkVDhOxJ4xtquUX0ROyfIpveFCi5crq+4nD3hlxOVDKZTCAiFGsx1PyK3T0T8BbWeq63WHiQjV0EqBOSr+Rgth2lKlqbvJBAFKWKN6bkoqiQ5ecZ89Kpg+IIJYBGquuoAuHRBc82A09lgx88BLuviioajW38vN/x4Ok3+O4Pf8S3/uiHPPvgE/63//1/4SZBLpkQL0huS7h4RtdtcXEH7M91ZNuT1WEe1r6CK4wus/nwMd/5k+/wbz75Dn/+ve8x7B5TxHOGF5a6+2sfqrKk8C7WJCZtbdfQR3woxL435uJaxxj7uDariyW8QNU1IKSQNVNpgXHtlFpr5lZrlXnGhWD5xy11VWmTSq22P7OEH7RhuVi7XWkegAuAhtkXmVOEebcWNejIrIh0re2Dc/Z6DO9DxIAgbcCFD7ISOSQKnmBhCtlZcBvGHtOcMUmoPaNFwbWwNUUs6EBFDPhp6qtzYIKuzK2VhSbSvKObzBglZaulY9fTiaW0jzmTcrGaUhc1SEvWbedzcJaiWzGlhSUVGzinteIqzTak5by252LPVXHiDTdY10xjjbYezjAg02CdjWDsjzZyEA1iq0b5IksDRx0t/KrtyUvi8tLft+G1uHtSWBoIpMu9b32aE7OlciyMRHuPg/NIA6YWuGpVJTagTlRxjT3piq7XwXq7xuqTBeReXmODEPUMvC2sOfOps2vjxKPSkmrvAYxL33ifgXafaefaYkxl8YqvZzxI2uu+/xOXvW/BthqQLAv+0YA6k92ykktyzisouHzf+iPvMRDvS3nvsxPlbAbQQNYFpHtX1vsOy05h8fKrJbPMw1bF0vp1ghBtpTltSd7gJKw4mNMOjwM348ht9UekOpPXu7RueysADG2NVTP5+wOPPwjQlXYDF22bUpXWexnKSbEl5zCJ0DIZl2g3dknSmm5Do7WdNsvF8uJMGkluF7sBCNUkom7ZGF1Y5Vx2UW3xpmzsA9+aE9GzWhgHRQO+RkrWpsP3C9ZGqcUaNbW0zEVPrzmTyogvhVMtlNmbkX5LS4rirecoheBMAkVDZf3iv4NNRIKYpABawYOQS6O110Ln7VASjGkoTkzKKZ6giuRsxt1uYc4tK9cYNpbWqI0mW0klM80zvnhKTqieEylpoI04aT52yjTP64FSKqScVj8uJzCIZ06Zkg0ABJjmQogCzjczd9Y0qtw29aJi6HE9NbzD45YAjpxJ84lZJnahx22VXb9ht93Rac+smXx9xd3+DjdmyAZe5UpbhwsjLpBbbLwTYRg2+CEQpLP2p2obzJ19xtZGsVRyymx8sEY7eEuHbdNO5+2GHjYd0XmiOEr2aJptouIdXb9l6KJJpOaZuDFW1GEayXkm18o8Wlqjc46UUsNXPFU9qVYzQm03rQF4leqNtapYYU6T3ZVmIn8/ontO6R4497U/iy570dDLfTK2PWKM5DmvBY+qMk4n6pTYxg0hBmK796qaubXdb82rzfhelFrajIy2Wdnr6oP5iAVRclJEIpvNwLDp2QwD6TC33ZI2BmogWytaaFNbK2CNmYpWSiqcVNmFwBw8yXtqtskoXpHJg7fwiDlnyOaP57wHsQlgjJ0BF6Wiar55Wk0KYKp5Y60u6I+BYussCNXSbIPM38+pR7KSspJViS0IYwFS7f6Rs8edyHrELfvemYZuN3tte6IsJ19j8OGtWfPOmAeuHXraplZroYOzPdY78wyvuobwTLg2ZVsAuyY7bgOGUipTznjfipe276Y5keZ8HphgoS20iZe9JPNeCL3tqVJtXXiEOmfevHrNT/7hJ5TQc/XmrQXRILgFlHWOzgmb3vyJoFJqolTzCh1PE7fHkTRPrGas2ATcibDpey6HC7a9SSDLPBO3Oy43jsdZ2SeIw5bge1t+KTNsNvRdtL08RDRM3DZfspoTUjJ9DDzYDGyGgS4OiO/MGmCz5fLyAqRSyoRo4nTquDuMxGDsreW9LdUKZu+8+d0UK+ZiF8k5UbJJb514a0j9Bs2FKZzY9gMXuwu2fc88js0XRi2bu9p7o9h6MAA+mjdsm7jXVW5dbaiAvYc5TxyOB95evUUwhttmt6PbPeD6+pbD6UiaLri4uMDHSBqPiIt4IsErvReoM2U+4SkEV6kU0jyS5mQ3VJMkWSo5iBR73S4a6znGNpxYbjqTO4fgcV6atHRmmmcUsSRk7L5O7TX3nbGtq8Ld3Z5ajFlVjyNTnjjNJ6aSLGlwVlIRcAEXPXNRUpoaAGBy6e3jCz54/zmn6UhV5XQ6cUoTp3G01NuiBAlMUyJnu6bjlElNxm4kAWNdSXv9kuyeDo3xKsWKxTlDKpm5jE1qFYwFrGqKAd8UDbVyGo/c7Y9sh4E+RmuCxTFst3aPvr2hircEzTYsmg53vCwzNy++wkVlLgnvHfOc8FOijz1ddQQX8ZuBwzyhoZJOIzPCxW5DxXzynrz3jCEGbt6+ofjegJvbG6bjie1mS5ojh8OJrd9z8XRDt7lAwonD3Z79MZGKDQDmnBB19ENH8IFK4nhKvH5zzThNhP6BBSoRyX5grJ5nF495+OgREgemqvjQGQM3ZdI4kabJmkAXIIMW85/K1fa/mh3VKfubI5/++gs2ux0ffvNj/uxHP+TJ8+eWfNr33Lz4jF/89S3l5Cjb9wiuo44zMLLrMxdyIF/9EyLP+OhRx/PLHanrCQ8fEb7xCGIg6ZEgRzwdQuRcLS0ncYfywP5dPCmPHA8v2A7P1x7M8ocquIjzbR8B2/+XgnDtPZRFgiT3/tk8lXWVC0kBKYVyd8t0e8X73/2Af/fHH3I43PFgKMCRJB3FBbx01Crc3cFvvpr49efX6A4un2z4xkXl7fHARTfx7JPHfO/j5+weDKT6I+Lmkh/fVOY3b9mGSMwR1cIHz77B86dP+eAbz3j65AF1nrh++4J5HlESSUdyPjHOJygnrv2SiFjIua5yXudsDzB5nK1xSuby8iHf+uTbdL7nq89/zuuf/0eyZtg+4KCCxEhIyv6r13z26lekeuBygJyOoBAHiFv4YPeY999/wib2eHWNDbTYwjSVQrvS7fSx2vAdiK5y5m4YqCUeq1FoYH9ju+A93gMtNXwRgCwPaZWilQrWDZu3rRqrLxpjBg3gTJqXtQXgqVnESAj0m8Cw27C7uGT3+AHFwT/+7Kf85vVLplq5uHxo9bNCF0IbcLc6pDE6XRs0WC0TgYnqAu9/+C3+/M//FR8+fErwrr36P9CAvtvRW1O/1DBLTegEaWFGJq20tbCua9VzgMEyeMRAl8US5r4sUrVBHO17Q1jkh25tsJQGTNUKzoa+Iq49B3u6dTnLF5ni4uXKPaBEMguisXpFL2CDmLTRe2lsm2UQXajN5gfnzeMPA6wW5k9tPvCaCx1nM/yq5jNeSqYUC8goOtFym9DWf83zvNabpaXcAm1wf1bl3Adscs7kWuy9qKWtdAtPExGChOaDLa22XWIthPOOpOs1XkFDZ7WKqjCrWVflUgiLF3ljbi3y0AWgQUwls1jhLsy1+49V5thew/JsFpBQW++4AugsIIquz9lISUutC+e6V9f31TmHhCYZZiFMQHBuJRtkrWhOLHWqrGQnY0cv5KPVI27Rwa89wfnv5YZZAO3leS197nL/1FpZlfewvs/r7fc14Or++70MkasufUddn++9C7z2je/+ZPvcGYg8X7ezjFXPsup7su/FYuY+eLhKWe/9zuXrf+u1/dYTeffzZ+ugs4pzeb721z2upIA2Gwqh7flOcXL27zM7M1ipj06BQtEM1QJWcPZzl/1T0fU9cuH/B0BXa5NmtZufVrgauiwrk8f7QHCBLkRicLhgGvWkRnUtpbSLeX6TwAqm3zJFbBfeO4eXSsCS4JZDuaohs6XCZhjWi2/sIvOm8C0Zp2pHrqUBdGAMNvPQK6VQsx14wceVUpuTbXKa4JgTyXv6vsPtBjT0VB/Imq0Jb3IeQ9mt8XCNLqwqLeHGkP3agiRobDetlUSbA7TNTUXJFOaaGiBj00u8Bx8sSQ9W08ilQKsKJbcEVDcaKFAs5adzDjvyhRgCse8Y+oHYGnvnFm8Tk0iKsIKhVZU0Vea2iasqcx6JJRKGnpozp2IS4zEnkyE416jpldM0kmajbhsbyprwlGamrLyurzh2dxzv9lxeXuI7zymNUMG73mQ/pSVEZfOWETEwWNRo0bUlJ9YKtQilLNel2rSvNf1OFqcCk7rWWknNbDxrhdYgTTVTpDETuw1JIYo3ijxWCLomb+v7nm3ZGjjdmdS7166tR2W32VlyY6Pnam3egCrG1AR87PChrAdZ1XvNXUpLgvoaArHcK+8y52SdMLjmoeHcGSijKeBbmYKNsuwADK3o9AJd8FwMA9UHtn3PeBpxpRBCMAanthCLtoWZb9Z5RioiiG8hCGKyhIBjrpa+qZg01bdjMIawvGp8dq349jhfrVBo+6ZzQgwegtA5Y8t1Yl5wfRfNa49KrsYmqxmKmFT0NCdK9dyNM7kIF5cP2e52rQBqyVnVpCa5FlIpzClxmsb2vpsZt6hCbT4EtrDQJh/V5ZBs9zW1GGDvGzBfyrmAWw50cSz0cmWZL90v7tdyrNHDW7EqYf3aBYgxwKfZCLSJ2/I5jR1zzKScSNkYi5SKporreiR2lODbNLjt6dUo25oyIp6uC82zD0qZ10JsSaAyhqe9Nks2tbVna94Ro+fh5QXPnz4k1MLxeCRNE8fDSBlHorOkyGWKK+KIjiYNpAXcVGuS1PaP435PrkKMlkJGm9Z57xg2A7vdwOKBIyqoj2w2F3y4fUD34Blx2LLdXEJRxsPBfJDyTCoZTQlcoZYTKRfqPNMJPHxwwftPnrBr8p3jmHDzzGY7mDySTM6CVptijlMy5mMbyIDgZQGegiUPt/327CWi67VO2QptxYJIQjAZTxfj2vaUZIFM6hx5MullELt2tnqEGMwaQFxEnadU8KkwzZnTfs/V1RWHw8GCPoJ59NzcXKPuJVPOILCJSukcFM84jXT9Duc6wIzrS63oPBIoBA/VA7WZtC/gMkbmCcETo6O0JPEYwzp8Uz03VIu7jPPRvCrb/br6AKlNiJY9b+GspHnmOJmkPsTBhnF5pJAZk3naIgGVaMO+VKlZ6ULkouvxzuwkqiqhC0SNnOYTaZ8pYhYMNWdO4wkKHPcnjseJKRlot+zftTFPapP7lFaL5JSIsQ1inJmap8ZG0NQEOmpT5qqZnJLZHwTzOqk5QzXmf+ptiGRf7yklc73f40Jk2Mxsdxc8efSAmxe3SBoBcDg+eu8pXT8gVfngw2+yc5Grl6/QlEzOMmzoVGCcqDkTegNL1QvVOa4PRz7/6iXTccSLo6ZCGhPBnygpUarykIEcT1zd3PHqzTV3h4lUhaIGSPW7raUG14zzgaHrSWnkq6/eMKcM4QI2WyBSJLDPjqtjZebElBxxeIB4NY/icWqG8SbvLGlGNBDjANWbp2VSnPR00QCQ8Zj4xc9+xf88/k+8ffUV/+Hf/RsebDeQj7z3jcf863/7X1FOM5+/LfhfnfCidM4CWbyO1NMVT5884/GTx3ggDo/pv/1ddGcKjFk8wowxoWNbnQbSqVqoWJVuZcnF6Li46HGMUF2TgLYz35BcUPNJNvaEsa9WJG7ZCO/9v/2e2uwI/OqZ5qJnEwKhJjbzHX/y9DFp19HnV6iPSHxEaVYwUxKmWanFcf3qyKf/8DnPPnjEk3/5fX703Wf8i/f+Le8Hz6PdBh8D/2r6Nn/zk1+SvvqSX79+y+Pehh9O4PsffMDlbkOYZ/affc7125e8/OozpsMNlIn5eMfpeMc4ndgX5W2Bu2myRk2ElGfb44cB7xypFJxYc7y7vOSTb37C44ePefnyNX/1N/+Rz376V+zvbpk0csoK7UwZfOQ4ZYbLHVoTb796wYuHT3lx9ZLq4dvf/jYfvv8Bu+4Cr7FJ8YLtM2aba/LK1mCdW/yz5MtGV7rWXwZZmWqlNEYVsgrkQMxypqo0cd29xzstkgVrnZt42wsXkv2CRy2/18vXmkAVul3Hx9/9mP/+f/wf+NOf/4JPv/ictzfX7C4f8+zRI+6OJzpnwVteXDvf7TzRxRu4epx6nAzU2dPHhzx88D4xDpiu6D8D0H3t9a1AzAKUafO7LktyeGmeYItUsqwSu7Mnr3mDI2JWH2JBW7S+wYDN5Yo30NUZG1Jd831u11qVdaC6yOxUaaEDZ1CvtrUg0MIErW5EHF49JpUzIMu3Rn153qsXXAM0DKSrNEENKRvZI5UKJaNJqd7jJRJ7C7qrbWC/iBdqraSUmaaJrCMdFg6nqszzTM65bRetkV6WhbsPMJ7Bl/ViVD13E633dytY5lbgbIHBrF6r5++/D9S04fPigawqNjTQiiwsOuds+Lt8zwr4yBlMVb33fn0dxAIbprMCq7+17NbnvP7IVq7YXnofXLoPcK0kowUD6WJ7L5fPL7y9xkbMGa+WILuApQv785013567rQkb0P1e1Gl5ur/zxZ2lpv8lj6/71C2P+/559x/Lv9V7X79uO+tHWO37O57jKjtt99aKCTUyCrCGuixfBwas32f/+Xef1td+B+szOg+2zvArsIZ4nJ/TGfyrApoTSj7vxk4b67UB+s6Sie1ct1BC1YKS0QXzYQGBpeHU57X0+9bl8vjDElcfkVpgSXJD21SkVchiYg1jyXg8jiAB1bLSNu3maBeh2gVbi46lkfRLk3/evJw6gs5EEfPIam9iQZFW5Dlvm05Z0lnaxMh7+5OzbQBnD0shtxtapLKLNn2l1DbJcIS22VILRY01gghxGOhxJDEXjyyeWrGmlvMmIbUgtaWe6TKxKaxdhVp8dm2L2/hNjtwWEarkLLYxO6VUK+JNUmqFfy52sJs/fl0bZQqM2ph8WpGSGe0uxzuhLF5IVUlOSNOE9w6rMZQlLTC0hkfFkcQ2yWUh56bpDj5QaiKnbL5tpYBvXlptoj3PGS0GzgXvGUJk0w94Elntc4fDgel44urqitBHCJayKBLAdQv5ENcYmEsBYx2gvedVK7U4SlFqsUQsQdoUTFvx0qSTDfqQZddWY7P4FiThNJClrCBxmWcSGSeBKGbmPeXZGuc2yUNgHMcm42pLvWS8iwaAejvEz6le7d6RJX3VFqh9jflk1FZIBHFnwFRYJ4Zfp/veB+qcc7h6ngTIvWJPdVln1Z6j9b1oyXRBePbkIWQDxnMemdxy29oXem9JUCHGlkKsK/uvYECNAfilNagOiiVx4ix5sYuBoeuMZaMWkDGVjOJWBbPz54QpcRCcEEKP7zqCVDpgM/Rs+o5t3zUPSE8htyQ05TSbF1eqymkc6bodT58/471n77HZbC2JTW2SaVxA28DNyNkKfwGTvZWMpbEt4IOBcJTcABkDmCyp2gI61gRVpyttvpRCmjNpzgbmnXdblrrDSBDVaNXYHrqYyVtmRSvstRjwr9p80dr3sww4zG+wj5EpRabUgEJs7xlnA4C9WPhCpwYOdaJkzaSS8QF8EEK0wtx5aZZK5uO4NIHS1nIIxoYRD9uH/4m0N/+1JMnu+z6xZeZd3lJbV093z5AciSODlCUKEr3IsA37r7ZhwLBgybBhwSJHpGZEcpbunu7qWt92l8yM5eiHE5H3vpqZFgFn46G6Xr13b97MjIgT3/NdVqzXnmebNV88f8JnT6+wKXJ3e8vDcaLcqw+XtzXGXLICUcYSKtDkrM7/6iEWGKaJbhzrxkOBdyuNWaqybd/8H0phLoUxQycWazWsod9YVpsLnj97yRB64jSS08z93Q0PxxEzT0jUQirHGWJU+dR6zdXFBd57djuVw80p44JnnkeyJFKesEY3X8qEU2C35IQ1ymAuRcixUKo8HqigXNHmUUxg1fcz50ywlqEb6PtBt4Cl0IWOnKNS9EWbPBTtqvdeAa+chWgL1husCXjfaXiGdXRZsIcjDw/3jIc9lIwzhlUfmKfCOI2McY/vOtabFYGEzEeys0ixmFKIEfphZL3eaCdcIJBxCE4SzkiVZle4rTExqNKXarzbOtQNnGtNiZyFOamszDi7sLSLKKDlK5u3FA2FmU0hDx2+H/Arz/4Q2R8nDtMItmCD5TAmppQIndfGQymMc8SKJXSWKEKMiZQzH+5uWL02DOteZWS+dj+zbkBy1g7mPM+M40iMyrpNAnMpZAzYav9QNCxrLpnZGrrsMNZpaE05mW4XKepFmUudW5URYVDD7hYWpXu6TC4TnXckKeyOR3JKTDHhCpSHB667nr6zxDIxpcLzZ094erFmZQxmmrEu8OnTpxjr+Oa7V5SUWXc9uAHfWYZt1EbfsMKK8PBw4Ne/+pr5cOT+/R1linixbPqBrV3hxWPdivXVBr/dcpiFm7sdN3c7NbavNgM+BIbVgPOecTqSS9LUzmKJyRKzZ7V9Cv6K/TFhMmw3GyRc8v5+hzXC9ZOXWJs5Hh44jpOCvU7XTiuF4AMXF1c4A2mO7HcP5JLpVx3FZHbTA+9ev2e/u8OTsfHID3/wnOeXW378R1/wk5/8MWPy/Kv/8y+x/+E1ZT+RbM8Oz9c3M7/89sDF0w3D9hO1Zthcw+YHSPCIc2hrtKMgYCesqDoCqRt9UM81tJHoXcfQPcXKDDJDiphiNWxMwHaDAnS2egcvBX8rKlhW+9rqQNv3bWtS/Y8Ak1XmGu9uMLevme9/xf3dK4affIr/0Y9IOG7GHYfJENMKZGB70bPdrNj97XvG4w23Xzzjz/+Ll3zyiSW/fcNFn9g8ueBZFnZvR14M73nHK0J+4OnmBc+ePePzF453717x5vaOnDLHw5EP9w/M00xJiYe7mYf7UZOqvSEFWzeyWtcYVzd0ztEPPc8vr+iHgZgiq4sNXdfzF//uL3nYP/DXf/lvef/tlzzcPzDPhhjVM8qvAj/+yY/5/Id/RDBr4kHYvd3z9vKG37x6T1hf8+f/4n/gD37wKT6vNPBJWiWl63QplaR+fkhriNba0hQgsjDoiBpwVxKlJLKkulHPlJSJWTRMqtkBLfVbLTLPNpZSpW0KYsjp38+bVVXZ085bDBjP8jNhFfj8Dz/n+vkVP9n9hLv7BzCOl88uODzsICVdw6SqpBuwYiwGD5Io2dENK4Jfs1lds/JbpFRZ9vftns/qnnY+1jYbpRMIo6DByetZQbUTYNDYOG3D3hqhKeUqn2vXH1FECQAAIABJREFU7qRY0OunG/VcG6piThtzU2tzmgy13r9ylu5hrfof5mhPNW8db6owVM80Y4wSArLO86ZKGXPOpBRJKSkj0aJeolWmClRJZgX3csHkrE5CPkBX8HICG6zVBFVTPcty1iR2adJJZ7Fy2l+3ek3OPpMxRjcDZ7dtkbpWSWqpKhNQ8MnZluBulz1ixYux9d414oep99rUZp1UXz1V+9Tk3OZnszzn+orSnu0GIv1eZKN9v8p2rdFkX2toycNtDC+f2ZhlfJ+/6sK8/Og4Meg4a7ibR98/AccnME5r78fHiU2m5JaSq1+9qfuB+pGMOYFUH5/LOevtERHAGGKeWWTFH/3eOej2MZPuY8bgOWj+W1f8DMQ9B/kaE1W95j76Nzldp4/f7/zPtmeShmucvaeIejyev6bh/LOY5TEp9Vk0PP7Mzjy+v/II08w6hxu1mnCm2n8ZW5tCjmzV2stKXY9t/Ywl1SZAdwYQ6/5R5wVbH7vvnyO/F6CzTlMaFQXWTvBJA30avMvPY/VMcntATxeIen4ihQb35TP0fqEZU1FpAZsN3laWj1P2kzOAE1ybbK2qhNXk8zRojLU463WhK23cK/tGF0lBTJUORu1whtBpWiC1UEYBmZgL01zwEcRbjcQOhrlEnYQQrJQK1Knb1AJiopLPhs44DBhXTWWpiYmJVDebBsg4svHENKpHl8CJMK8ptdTXAbC24H0HjRYsLYlIgbt5nnWwRojzTBonvLPEacQ10EUVN7UD5avRe2DKql/X0AJdRAyaupNx6h1mPX0/4LsOnOE4TdqZqfLjzqiP2XrQDWb0mTlMWHGUlMlzZJ5nphQRp31HJfqoCUMxIE50n0M9iQwt8ts6lbp4X7sYTbcvRufpIvWrLBO7xRIqk6Xznm7QsIti1UcQAx2GeTpCLnTBK2DgDXE8kkvh7v6e9Wat4FmMzClqgloWxnFEZGSaJv330iTeCsjOxRD6YTH5V4q6gLGncWB9lTY3xN08esYb0P1bX42hReNa1cmxSDUctgsV3vn6jMZI8I71Zo3NhTQdsX6gCx7nFJQu1hFwdNHS9REzNanB42JSaoGaYiRjyDOkacZ6o55RogT54HVxSqXJPZapRQHoc1mpFJxV2UVnCkEU/HBG8JVFY6wlG0fWeoewGhDj2Y2Jh+NMPww8e/aMJ8+eYVEmlqALoxFbC1GL8/rsEwVypqRESTPWqjdDCQZKYjrukRSZjh5sNboV7XrGAsfxqN3KOreVUqUC6cSow8jiM3fewUG0aWCoxYMB49SbxBRTpR7lBGqYtjDZWjhV6X7tQk4pMc2ZkuPybKSUmEXnHTfNdMNA53p635FNYU6x1i1C6DzDsGa1Gnj69Cldp/JQaz1d19N1AyEEvNdwAOPg+pNrLi56nm/XfHK1YetgvL+j7z3m5o5dmhX0qObM1mrelrcWZ5oMCLrgGPpABPou0Hn1jVQLB00Gb9IMY3QtSDljQ89xf2A/qTdEHGfePRy5O4xcRmFOjs2wVoZejuz2I3MR8AHjgzLWzIRxjlXwXFXvrRwj43HP4XBgyqJeacHW8I1IF86cd0Tq6ye1ZMuZOM+LPMYYS86FvutZrVbEGJmmiVwUxAh9z3a15mKjht+SE7vdgVCDgIxRhkTwnlA3ZH3tPIqgLMPK3ShGGTfGOKyVyuILmp7aBdZDx6pXzz/vDCEmQtfR9R4rGUkT3na6tMWJPBUShmi16I/eICYzDB5vhGC1HlBwWwvOXAoxFpLTlL0mu9M5pK1rZgEdpI4pIxqUkktGJUytyFNgK5fMNOmGf/CWru9V7jpnUi50XdAUcI4Y65VxKMp0Ns7RdytC1zGniTSP6gclmf3xgatnF2yur+i3G97f3fL+9obxOOKN168Q6EIiJx2PXjsMTFlqkJCO5yab0sRvA+OMm+v2urGHrTZqYtQNHCLLtSv55CXprSUltQRY9R3FGKZxqkFJgWIs4/HIm1ffktNI1zv6YNj0jk+2a/bvb4jjzPbqCbfv3mLXG8J2S+4yfbfCTJEy39N3W3xv8T4Qp5GH+zt2N3tWoSO4jixZZbLF4HrD0Hes+4FhtWIqwn4c2R8mrSG8IyEYC8Mq0PeBLFITjqWm346klBk2lzz/5IeY9Sfc7ya8Dbx4+hRv4PWrr+kDXD25ZhrvOR73i+2JNLa+YWkmrboOt3F0YSDnxGqz0gbhnSGK5+mTDfu7Hf/Xv/7X/Ok/+iP+5//pv2d9dUmZO549+ZzLJ6/xneUwH4muJ4UVX9/Av/mL33C3/xn/Ig189sVnXF1eg+8Q8ZTcgw20VPIFPKkNjeZjJSaSpWCN12fADiBwuH/Plz/7GW+/fYPrBp59+gP+4T/+J7hhjUhCcCqvO9vcLLU0Jx8o3QwbbegoT7pBexz2B26+/oongyPf3fDw3S8Zhg88e3lBdoFcYNtdYDtd/6+3Fv/PX/L5Z/8cY4U//uEFT4bM7Xff8vVf/X/8+EcvGbY/4vhwQxy/4eoysbnIbDfCs5c9z56vsP3MId1yP98iOJLL2MuBlVmTc2HsdM4LCMYJSWYuEdbrNV0ISC4471lvFYxrLPU3799xe3PHT3/67/nLn/4V+8Oecdxz2N2Tx4wrHTY7usHSG8fFyy/4s3/533G9esbddzu+ePFHhM2ah3HG9x0/+OwPeHJxDcUt9RitzsmoF7Q9CVrNUu9Ta//6F6OWGPrdBCS9f9UXVihLengqhpwLqQF0bRpsm7uzQyoKcu5DiLWcnZBuVqUlwyac9epN1zahtROyuVizuVjz9Nk1kg0hOGxKGphXWWvtdXUu0uaCtQFrM9kKpRi863DGY2kKAK12f+/Rzr3O9W09bHIwZU2d1UT1l2zds6gU+ATKKZN/RkSIKZNKxHpfiRSVJWfAmLTcU2WXOxK2rkUKFrW6GVFgWxVQ1CaKfr+1+xsAKKLA2zQpI9uKaJhLhpIUZGheeTFG5jkSYw3bctB8jLTirauhdRiEzqhHuLEG8crkFJFqnaNewaaSWGxVzog07EmWGvQcAHHWUexj0HFhRTX0WT/w8qdpIEh7Hs5gz7Yf0emmmvGXxiI1Wsuf3VtVIpdlHW/kmgZEKbhTFoBUj+bF9vGD9NH4QBalmSxn2mpkPRdjzKPfPGecnUCv380sa0djexmkkjbOPg/UxnOTROcF5G3n0gA656qtDlRfufpM8ZjF18bCxwDb72PANSD6ETZy9nUObi9X83eAZKcwiNPPGE5A22+9729BkR+dU/1qZJN2Lc4Zhdpkzstrq72SeQTqLUy+piiDer3aM3Q6l9OYOL929ux8Tj/b9uNSvbbNMmdCaXtoofpxonNGEYpV5d6CJ9e1YAF7C1UJ1fCu/x8AXfMq08mzot3mpC1uvk1GtAhQ7y8tPiW3SaEN4rp1F1mAvWmeTzelUjrPqX+m8fGNPS2O9ZMba2qXufnU2ROCiwIRrsZnNwKfSE2oqYaeKp2qErViMKKAiKv+MEmsTiDGIdZrco1YHMrEaFHixRTU/DdhAOekmnSyyAiL0eU5i/5ZKuAWi0rlSjGVOk2luGuQQBLFulJRNp3BgPV4p0WziAPjlIHRvDFMNZVPpm5066ZHnxsyCmwY5xDqJkVvOJKFXCKGzHi/J5WsMtGzLoO1Fr/f05KSxFjC0BNyRirzYxrn6n+n98OJpXMOM6xIQZi8J02ZYh2lsl1SzqSinmaaIml0INTzpjLjFN1UsAOhGoqe7nHKagzuqrwU0YUg1w6Dq4VyTplJCiQF12xwmgpm68AMoRqq6uR7Ao9M9fgQlYsFRwidMhwNCAnnLc6GZXINIVBEn9kiYJxT75TSWKZtMrbL5trWxRlqd+RsfPyujsbHHZGG3C+LjdSFv0pcnakB5KJLoKbIgsdivVvOJSal2JcKHM9RE9OkPlfGsKTvOO8hBawIg1UN/pw1iEQjqJVh2mYwY1gMrtvCWRn/NG82XbUKFmUlOQymFAUlQiAE9Wuyy8voNckYcI4QtNNXSiGlxDTNLCxfx9Jlo7ISrbV03kOMIMoscs4SrIKBwRqCs8zHA/PxoOC11dCUmAvjPDNG4eZ2x35/ZFgPFTjSgIMmx0ilLj71GbHL5qlahxtBDYt1LrVOgUwxFnLrNFdgv/lg1utmjcOZuqAFQ3BT9dugBvboDiBX0HCaJu24iqYRh+BZrQZEoOs6Xrx4ziefvKTvewyW1WpdWWuBoe/xoTsDlh3GG7ZPL9hernh+seEyWOL9LR/IbI47DiniH+4RowwCqQxB6vqiU74Cj80YXHImxpmcIkaKsretejG1QiflpAnLovLLiMOvt7hhw9vbHa/evudhjNxPmZS9NgFEOcwmdFw+fYYbBhJvsbs9Vgq+3u8+eDqvzQlKrCxKQEp935kiEUzAGZU4WmPBQk7ahJjnmZgixlouLi7ZbrZM08wwDDx/8ZzjceT9u3dM88xms2VzccnFWv0uS45M+8h8HNnHmcvLDV2n9gxd8HincgpjNICmAMXX58Uo8zgvYFGT96jBthqwW2VPd4GhCwz1NRQENLVZR32WiyYHUxgPew7jUY3DvaHkQQGlkpnmiWkc6TZdlWhGckqUDCbYs03YWeEKS7MCW7d7bW4zRkOJCme/oxJgI5njYcSUTNd12E7HXfCe1TBgvGW1WnHRDxQs+/2EiNCFwGa9og+O8aCJ1CFYvBOct1xebdlcXTDVtSyVgvGeec7MKeGNpwsdBp3/bE3+vTsceZjmJU26FcqqbC+IRKzJOodiakKeUlxKMZTcWCUK+KYcyamGgngN2ALB2EImkmKuQF4mGEPXBXa7B3ov/Ok//DFPBstVb3lxsWXsAlJgffkEO2zItmPnj7x6/5bYF1wYmKIlHzVBHRxlTszRkkxhvV1xcX3Bfn/Pzft3PIx39HHPZT/xdHVNWjt2h8j9w0iJib4LZGCeZ8QaUp5ZuQELHA4TUnS9C76nJCgJ0pzZXq/49OULnj55wqrr+Pbrr4ip0HeBlJQpY61KHtfrNYfDSEqiDG9nmNNUAf6B7XYDYtheXpBN0WvtIheXHc5F0nTLu9fv+PIXvyS+/IxN+IS03/P6V9+yf/uWIIJ3K+bScXc0fPXdhORf8fT5M17+8AXYva5TbiCYLamsiOLwLtSaozbhrCoqtGbMCM3XpiA5MR0mfvXXv+Zf/a//O7/4+V8Thp4/+Wf/nJeffcb1D75ATCaJLHK5uuVc/qwr6QnhQSNJmsmHGAt9z/DsOf3lBW9+80v2uxvuPvyGwy8esD/8Avuja8JRuA49fR8Rm8FbLl4mrtbwcDxQpjv+4zcf+MVf/4xf/PTf89kvP+XFf3jBN6++5W+//DW//uoVR3eJ9E94kwPv3twRbg7sdjtujztyTjp/rYXVqqfDkDqLvwoEHwi+p/MrLjaXXF9fk2Lk1avvePf+PXd395RSGKcJAR72e8ZxxDqtT4w1DBc9E4UwdHR5IGTP0Hf4zcDTL17wx//0H/Fy9ZzbZzd8sn7JfZzpNxbjIQRdbyULcbcj5sz6cgNelQxN6tq2rcvGcGFE1NqlBg7oHCoLq7gFPZkK1ioDyVXQ7+Re972HKKtL9zd15qy/JnV+VOBfn4EzEc+yN8FoYx7RhqfvOvXuLIUhhHN8cXnK2n/WqOooZwXWY7W5EaneXiX/vRWuyzuY01pwqhHr5l0c1NCDtv19DBDIUvNaYwk+YJwqUqhKFKn7F+O0+R9CwNRmX13NT5v10oAJJTs0gKQxuPTnGphRFUYpMeZCttokMHVPJbmxyNAapwEUpc4FVd9jF4vKurEvgnUGHzydc9hiydZCVWgoOy9jcqK15E9qBofUtaNU5nn7MlKBNFdJDpwAOhH1bJesVjNGqnWOSPWfPmMZtmsvLZTCnoFw6j3ORwBzA7GknJ5yg6ne2g7beaxr15UTOPI7npXGyj+99olx2WSN7Vk6f42Gl5w9aQvovRAaKkDXmGm/i3HWACapVh6PADpTGaco+Kw3Qsdfq19S8wCsjaWPz7YRHD+eDc4Zc+fntNgpNXDxe4Cy9jq/73vnTMWP36f+zwKmtutlzq5dw3M+foffx6Zrz2072vPYklud1+TdBsj9fY7lvtb3Ud846hpJnZt1DlaE/QRqNrREsWQdJ9kI5zYVWEMygi/Vzsy0+ULnimZZVZETThWuAal+nt9z/GdDIhqCXKrHWxsAlEwz4nTGEWwt8J36pmRT/VWgGuudSVg4PWDtJjQdfjsMBu/UD0Z9zaiDRRckg9AE3zkX9X4RXXVc7ZZMVXICJ8DiBADlGnEu9SKJMiCgMk8CphgtuEPPsNkQhhXiOmLdShvTYWymkGonq1QmjsNLIdXIa6w+AEnUoHsuhVRULqoGzw0M1aI75cycZpVD1ZucSwWDrDIbXY3fBqN0S2fBVrTXGL1uRvNl20Uw9X3EWLCeftUtvlqNSWeNXuNcMnMuFFNFSSlV9L8tgoXQ9WSp4QXTiKm+OqUAGeZJ2SDFWFzWFKJQDVBtgbHE5TmYY9LJrFT5jvXKiKpSggbqLliO1UmvyQRNpfw7awhOEwa9a4anVVZW/bJ807Mbo0ETlf3muwDBkY0gkpFpqhR/Ic6tbDKamouCa7vDHj/PuBo0IRSsswy9+iOmpOftvMMbz5wSpWiIRswK1Bmn3XOajwMGmmy5gVgVCDvvGJoKSvzuo47V0kC6s4mnjmnvPIIGTXQh4J2tIK9h6Hu0C1WIaWKKM0U8xakMQ1CwUWxW8+2aHGTa5FSqh5gx5KVm1YWoAYTLpsKcCty2IXWtahQW0M5ao55wxQBVQpsTIoG2NdFCWN8upoTGqTtNzDQarDHFWcNskMWbsM13zQNBiqivkFN2khVHVWCz6TtWQ69AQgUb9d4LsWTm6cjhGDkej8SYWNWFRwRiVOmBLqLni56cgmDqHJtzAmZKqVsrUV9OxCz3RkQDPEQUaJLcOkmFaU7YCgTnIjrUqcWgsYt/iAbEqE+dFAUMGuin3b3AMAxcXV2yWq3IKeGDZ7PeaFfXQIyRcZpISWXFrgv45BhQNl2msB8P3B92HONMIjeCq87ZlamJmBOT1OoGQIvI6l05jcwV9BBOgQjGKjttnhNp0E5kykJ/ccnV1TXS9exef+AwJ7JxfLjb4ewtmMB66Lncbrm62mJM4e37NzUwAYJzarNgqc2O6i9oqSw+LaCRQkqRmCakgrpSCjaYyorKlR2U8N6z2Wy4vr7m6uqa4/HIarXii88/Z7/fk+LMbn+g7weGfqhpr5lUN54uBJW31paereuuBWL1uotJ5eUuBP05oxvAUwpabQDUAign9dnw3tHY3qFzmoJsDf2gXlkxRUouKDMoKNhW9P47r4XIbr8jzpH9fkcXesbjgbBSMN2HiI1Zr5k7FfuGE/gHOt/nkokpK8uqAQv1/GNOhGZxkcpi3luKBTF0XtlZUn+u7wJiYLte8/T5C6aYyfNbSlTmuzeF4BzSWUos5DRTDAzDJav1wO7wwFffvmJMCso541WWOc/gdRxt1mu2FxfY4Lm937GPEZfUF3Mp+JfPpk2ZEBxGjNpExImuCN6F6l+i0jpVYWViTCod835ZMxQstRzGWOcQw/FwZL1eY7rCIU1crFc8v7jgk03gundshkDsPJuLS1y3wvk1N/dHHj7c8vrVa8QHTL9iv8ukufqdDQH1+O1xQeVXhzixmw6MZSRsHMN2wA6GOMzEMPFwfOD9m1vmJNWb1NJ5fX6tKUzjnhA6ri+3iBh2D3uC6eltxxgzb799xW7uePmDH3ExfIrkxO37d5R5JrnCw13CmJk+dPRPnlCKsN+NPOwOylKWjJhMzCMpewT1Y+z7HlNZLL6DobfM8Y716goj8Mu/+ZLDhwM//uHAut/iUmJlwKZMooDvET8wm4539/d8+/o1b9684W+/+jmHdOTi6hnPXv4Rq+s/xK5/wGp7zRBUwi4SaZIZkUwWBfGLzHgDMU589Ysv+Tf/6v/h737+K+L+yO7+jm++/pJ3b75j8+QpfnVxapSfgdTLpuNs21srgYZ1aP1mDMV7Vp9+xot/8eekD1/Tjzds5QtYD/QXzyl0XHjPSjyuZDAaOlb27/n2737GL776mm++fc0v/+ZLbt8/cPP6hpT+Hb7r2B0PvL+/o9Q5wrzdM/RvoKhk0lnheHhAykQXLH1nudyuMGQOD3ekPNP5juDWOBl4a3vW6zUpJd69+8Dd3T1p2YQWnetq3eSDNogElaq7VYcrATkYYpxBEuwTx3KghAR+wpoDeb6jlMLFk0GtWnzGMHM87Pn6V18yzjN/8Mf/gKvnz3XdNzW0agFGa4Nw2cLUe/NojWeRcFqnTEpE1zicw+E1JKI288rvASbOX0/B/dNz0B4CrRUNJRes84vslDpnNnukqj2q6YQVSEgFW7IGNxlXn8+WrNrUKLk2rzVFO3hD5ytrqqYZqm/59yB0p6768vdl815r1BamBKH6N+c6jvR8WniEbtrr/qDtJ60jSV5AFyVNyGIfYxD1Xg2eJBY6wYWMWKeWEc6Scq7uLm5plOj1O4Fs0Db5lS3XmKrWVG+7Qut02cqga95ztvlv1eCN8+dHaB51j6+NqmRqKGPwpJqVW0RwomChdRomh5Ml9G5R1zQbnOYnA4uVTgM/FHg6oUOPGY200pwGp7a9GVKWpNpihOAUxRaaH6s+q43tHCrgZ60lWIfxATP0uC6QXWP9n9li1X0Q7XzM4+de788pBOIxmHpG4PkY3G2AEo9/Vs/1t8GkdjSgKOcG6J3UAtbapaaxC0uIpQ4gZ1IL+itne1lzks2meFIptQFzfn7nXuTtnjaATkPTapDB2e+1//+YOfcxU65hNefHx++1XP7fAdAJSkQQcwLbzl8D1Geu/fyjINByIjKc2HIKaLW/n5/n6T7qA2uWx7SO/XICTutJ0Bq/upttNfHZuDaiJB8DHs0bmF3BGU+XOnCBuYG82QK+NhBUpSKiXvxS625jT157uqeT6tP5+4/vBejmaarMtur1IhrGUGpikhPRm38Wfb0Yh1fPlcLpIYLqbVYvurfqP/RIw21Oxqa+msg36m7JmnKnIJrHB6cgYs4aXoHC7cYYpBjigtSfHhiVySSyKGtDrC5k2mmR6iGTcVInZ187/2e+H+pNV2OxbaV868jDnxm32pp8qYCSXfySQCV+DZnNxuh704AoBT6nOGnHo2jMtakTvEjTVJ/TPO3ycGoyDljxtSA6K9TQE0o1uENBj47QvD1M7dpIocdVYFBqh0AZEyVDLhEXenyjZlsDVuWouYZt2CS4rsPWhdTXSGExlt73eDczV0aTC2CdV2lzSsSk0l4cywBrjCNLNe+3VAt9AZKyV+jUV89ZXShLjXKu48DZk59hinGRhllv6LqefrsCb8k5kudR/SkKIJqYldKs18cocBFTZJwPuvjWxEFdtIymrKLvl6aZlEbmlMjioJq2z0m9vnxoLEapYHOpY4NTpwo5FR+0xe7x0SbZFszSjFy98/pZrfq5LRTikhFRHyvvPL3TTpnebzDWEfqBletIBeasMm7jlGXX/GBs9fET7yE5DG7pDlhbF4hy6saM00gyUJx6bnjvNSHYe6xtE7ZREk0NiaEuGLYC2VKDLqypZtmUKiGvpLucSXki4dUDCi0KnFO5gIJRy4X7rYX8vLsRgmfoPd4qe0MZM80Hrn4+a1SKmTumrAxeKuNtjno9c5XpSa0oTG3jGFrQA+SUlNpv6jVAC2318jBVCnJa5JaFtc2Dbb6tCWBTzkzjvABf0kAaowzHPqhkIs4z8zwhJpFFu3u5GJxL7PYHPtzcsJknlSFEwzjvGFYDxmihG+NM33f0fcc0QboH0xlWXv0v5xyZc2R32LM7HphiJFGQmlLSDJKt0fazJuQqC3Ucj0vxPY2jwoxW5aHOWpzzWO9o9MsodZsQOmwYyL7j6YuXzK7nYYxMU0Ks4zhPGrrgO1abC3xnud3fqyG4q+wyDH3wbNYDm2HgIIUQHM4nTGrFns5DKSdMFELfq5wUqmRX54XVasXV1RXDMGCMZb/fY6wldB3THBmnuQYMoCDXbkeOkaHzKl12HoKQk1f5fG1U7Pd7lcVWiXFKGeNcDQ8CTDWmLy09qxVE+t7OVSZhXYOdtRTnSNHQ9R3d0CGiXqM2eDCe4zGSUsQYT8pCzBEXDA7DPM3c3z8wjRExM8/LC566Fd4FetvhneE4R0bRItU7jwqNqgzXKtCm6cfa6PPeY53XZ7mIouV1E9SY4s45un5gs94qYEzEBkuZR/pVz9Wza3JJHO7vlR2JkOaRyQg2B6zJXKw0jOPZtaa4TtPIzcM9t3e3HOZIKtr9jGPE49TDtWii3f3uAescMeWqIDN1s1pqk0igBmzhPMb6xZJBN6QBaz2mri9QmOd5sUgoYvTemrR4LoqxGFeLVAM4zxgTcb/TUCNjGI8HZNjy4tkLtuueb159y+FwYGUCb958y9ffvmF3f0+KM2/evyNZh2GFl7Wui52y8oOBgGHAwH5E7ve8HDb88LOXDJ2lxIngLDbBmIUPjUVfNIHTlkRJhWHdc/XkUhtVc659Qk0VfHr9FGs7bsdCnB4Yb18z3V2wWW/45HrD3idiOnJ4eMDajLGZlBP7/UGT5I2GGaWSqoQ0M87Q+xXOdSqnN2rbsns4Mk0afrPuoTee3a7wKt1A/iWfviwMK6EfdHxMcyK4wJRndmPEZuEv/upv+O71HYfxjosnAz/6oz/kh3eBi5cd3RPD5z/aMOXCqg8qc5IZbOHu4YE5Zq4ur1kFr3VOFm4/3PP6u3fEOXO5vSCSmKaRDx/e86PGmqyNW/fIFwJOW+a6Z22gkVS2fAWWkrO4i0uG1Y/h5SWkWzAZug0SnpDsBdIF0mHmyy9/zS++/jVvbt/x9atv+Ouf/0e+efWWu9sdu7ujqi5SIqcJ4zKYjJQJfMcsgWlW4iinAAAgAElEQVTO3Ff2vLcK4pQ4E6zhUPR7D8HjjFBirDL2I7ADcdWnUxGlNo6k1tTehypfrPI+0+R9ok3f4CglMe4n3CT0wcKUeXi44zA+cJ8KX331d6Tt55jtFpGobL6uY5wf+Pqbv+Wv/uNfcZxHZDXxj67+S0I/EIsmAAbfAQaPrqsn8AemwwM53rPdXkAz5RFdI9p+pIiuX3gHSQkE58DHCXg43de25is54vH3l6eg/c7SjD6BuLVE4uS6q+CE86cGa7DaLEM0ObrUh0kanamCEaQJ28OwcvQ9WCIYjb9TUln4rXN7dOiirnWdPRnDt71NrP5wvgtM46h1mbekWRlLTfJWRK9bliqbq49MbsAQBpGotY9IbaglpjirmkUsbm1YrbJ6E9dnSK1TliKRpdF99tX2X8Uo4KqCDw1xMW1vax2N2aWgQ2sMN98xba6UGsCR6+dxTgih05CHGkYmRZAKoIkIPrjF/826WodXzIkGBrVH6QyoszXQTaWD2rQtRQMNJGlIkuS8eH+VUmpDTnBWThZXy97dVAWfrm/BGrbBU3KqzUv1XpR6/TH6mt4o29t3HTZ0SO8pTu2TlgCNxWTf1PtQlnpw+XRn+6EG3llrNQlXmt+9xZbK/qygruEM/KuenWbBLM5AHRqBwGJqwNtiX1TVOc0Hv+0dzNm1a6fYWGWNW9bYzudyzlLn+PPPdw7SteOc5HQOWv3uOeExqNWe79P10me+7R0bS7I9H+evs4C5LQD0/J3rfqQ57j0eJ49tmho7TtU4Cke163C61o/3O6ek2o+AytaQMoZGflh+1zWA1D66OgWzzLF6X062bILggsVbz8r1Ve02EXzPldkQBaKtc0py9GFFMoVUCsF1jDGC18agNY/vkePxefy+4/tDIqj+FZUJp32BXLskCjBJZdjMccbPKLuogl8xnQA6vdintzPGVIbNyUcAeTyBZNEJ2haVXSkfrC2CqERSNPkF3XtixVYZT26Pi8I3FYRIOVX2XKEYoVT5FO7k19Uoj+4s+jrnBDljrK9SIZVzGqkpR5X2qA5qVYVZBJUxiAIWqFl+WP698l1F/dwQdOJNiZIjkvPiQaabj5aQaTByGpy0BY3Hg1BZiJrC6JyCiClpsl7OylgLxoCt/gW1eDJOQCxd74k5YZJ2lRZwQJQRYiqN2nhbQ0q0HzdHTT7JSfClI6ekg9y4ilxrYlIqmqSqjUQLzuJEwz00aamBb4JQEzNFlEYI+j1aUIOyB/V7VoG5+vOZjCu5Ape6CMUIEtMyaZckagYfA0YMc5pxpTJFobJgVHKMKPbd9wPFGDX1zbkyMSqzj8qQrABLrsyLVEQ3VEXohqBy6gpsp5iqkWvtNtTneplM5ERpPp/szjsjwBk1n2WSXbpuLWjAWI1bNwp05cpA8suzVT1rTF1U0KIhZmGOM3Ocly5ErotTtFb9teYZkyN9H86WN6n1YktctAoMot/res+YJ0JQXxMtWOxJ/mGMMv+KLN47jbXljcEH1bgaqx4B2ajfh1S/tmWtNafJMivSrSBbLTRcK1aaN1bRlB5ngjKAajot1BRa48ii8uxcGabBaaiB7wLWa6XYkpAVoFO5tambipxq04O6DbEGHzot9K2yQRf5c/3vVHCovN1aq4BkSsqGEpV55FyYooa2aEe+JcTVorL6YqjUNZLjjKDy2xgL6nNpKcUwzZGunzE20HrlczwAGesyF+uei4sN1lr248QUj9w93BPIlKEjZG2MzDnW9UTHsu08xJPs1hoFn5tnMOasO05NDH00JgRrPc57jFeD9ozKiUmFMs9Y47h68gS3uWSMQqmG7Q41ivUhaPp0EWJWM3HnHS4EOqoHpXc1pbZ2EM3JF4M6Tkr1pFOfN918qcdbx6GGNxhjlFlcdEy52gBKSSVb0xxV7n88Ms8zRoRVf0HfBYJTf8FyuWE87FHvk8w4aqDEMAxI0KkjC5iY1Ai7XbdccO68LDCnhLqi454KBE/jzO44qT/hXH0ZjcV3PcYEQlEpszEekzzTfFQQPAtjZRVba3jY3RNfRe4Pkb6/5MVmRX+5oVsHMjXBHGG33/PgDUlMlVWqzBOBFHO1tNBCqqWnNWNxI+qu5X1gGFZstxfKlHoQMMppKPPEw83E7jBifc+Tiwt27HmYZ9J0YEzQBcOzJxd89oMX/OCT5zx9csXbDx9AhIvLK47vbzgejohYKBC6Hms1oTLnxP5wxFi99spSDjUoLy9gnanNR92MKlhurCzddpGsyEptCumzr8Bpycq6avYCzmt9Y53XddIIzijDMo7KylqvNwybLdlo+ioIQ7/mzZu3/PSvfs6r1x+4P87YfoULli5oIrAPnlVfcC4yHkechYvtmufPnvDi+VNKnHjroHeWF9snXAyB3lm8Mez3R6ahMD2BqRTGNLOfJ6ZZwyBynOlDoB823MZ7xvFIilGtUUTYrAa6leMQIR4+8Pbrv2W/3nLZ93z2/DPef3jLl1+/Y86j+g/GiYf9sTbAtBlZTAafq41BwQeLscI0jdjkiTFy2B+xo973aA3jZPEYJBd+U95yM858+e5bHuJI8R5vAsYK47jjNs+47Yr7fQ2qmA9sdwm6GRMiT3jg0/WRkvbMCUp0fPnlr5nTyGdffMq//9nPeX9zx3/zX/3X/PDTT7TOcD1dPwDaxAoG/ODZbC9Zbbb4YVXn8DaGT4zYx0CdnHAdFFA+McZRuxbnELum99d02ZLHicMObnd33D984Lif+ObLX/EXP/0pf/f1V7y5ueX93QPv3t1o3YvDGkdKE6VErBN6Z+i8w5qeWYTdPNNZ3ciJKCtL/9/gRZsGMkOaCtkYjPS6That7TKxejuph6F6Zjqt8erc75yuwwA5ZXJdX2LKzHOho8McUDagNZgIN+9u+PUvfsm97Xj/3Xe4SwsPW3b3D/juisNhxqcju/EB26tFx83uA9989yXr7QW+XxG6gQ4PuNN6lg0P93tuPrznzTe/JE83/Nk//TPC6oXO8THW5onW0a0mMRV8VABXHt/NCqg1YEz36W3DfvZDyw+y1H5aR7KYzp9euVVljSVzYsCY0hr+DiOnkBv1Tjp5d1mjLGvXQT84bTKbiCYXV7ADx9/3MKZ5y5mlVjQVyBHRoK95ngidU19tq7+jwEhj0SmbO0XdV1gHzR9YioDTxFNbg3ik2g1kkSrHrPW3ANJqbL0+2iTRfdvCdmrXHVU4tJaMsqQEKgCh6ZNnjKcKiCGVbFGksumax7htr6rr3FKTV1lsybjq3evCCYw7NW0rj68W7yKyPFeLXNB7+tA9bvbWhiOcaq+WYmsq0UGl0RUbkJMXn2BU1nsGAA7DQMmRedY9dUlVktua90Z9wH0IiHcYpxLeFmaTS64qLgs0NpldQteca5iCPPrT1POwTcXDGchT76nUPbip9U4bO2c7qd96Pn8fpHIOOp1/tbF69irLnuz8KOW0dy/1iRIpCxhllgngdDwG5h6/HihI2PbAp/M/nev5nvGc3fYxeHcOsJ3//fyzfwzSfXxtPj6H8/NvzLnGhl18/T4C5k4g5elenIP59er+zmskts2Fjz+Ts6f5Scfs2VgAQt9rzUzABkdn+pqRoM3EfujJxjMUS+d69jkRp8jQWyQKxp+vz+fnfnqP7zu+F6DrfFhkSKWBFVJT17zFJkWyNTFmYoqCLSBWC8WWorMM+AqatY6qpgCeJqhcwZBGc8wiKhOVUpP6lCooUs6M1qvEzCoEpsuNdiRSTCj7yqCixMoVNoCp72fqxFjpmgLLhOS9IxX10ik54SXT3CFKUfp2BpLRSc2JRXDViNciVJPnItgzwFB9OQ2hmcdWUXoBrBSsqIdaaECPUQ+gYhxOT31ZYhsa//Fk0LoLDZlWdFpBiJmKcJrmS6HdWDXvp83M+OAo1iISKaZUNN0tDKyCbpSMb0R5LYxdqZtAZynoBtFSpS3WUowlGypQmphyYs51cRR7orfSJiupxWUFhut3S6lufuaU/lRXI4qcEkZTNliflF3hNY00Y3EVWGlGtskWyqiDWQG6WSdvWw1ms0qupKjkskfviXoUKu19uffGELqayIqCe9YbvBgFO61jWK9ZTYnjpIEkRgrWiCYuOUeaR0phmShPg5vfOyE2EPV0yLI4S5WHSz2fmIuyMDrdSOcszGQFF1wAtOOV6ueORT0Uc2OCVI/IXOVXUxHmcaKMM6amBxVgnBLTFKsnUCbWDrNGxNcCyFisTVgrlW1Zp6Ymq6dh2W02aYa8E0YKIVpC53GdQ9wyaam3ZDEK1IiGYaRYnxsRxJTFy6GlUjljT8WziHof1nEUrCa1phTVB9Jqh6XUjmwLvun6DmsnLbhasWYMLqjUz1UPlHPGo+ZJGExxUCwly+IVApVRJNV6YNm4V+ZQlS4456rHHzXYR38+VVBQjBaGYtwyf5SsclPveoLXFO4SE/MUsb5XALXr6LuBEHr6PhA6S9dpkI1I4fLqiuvrS/o+EOOM7wP7rEbmcZ6JFmxRttQ8z6RqiOu9p+97jrOQoiygb8wq9Z/myGajAFoLT3A+IHEm13HYWIFinBbjbczV+TPHSCmQrMe4jn5wpCQE12GKJrUexyNh77FemNOE6zw+dNiS6S34+pzGrDJDTRU0dM7j+55i1YxaE23to0LCWGWb+lqEFGF5VmLKlHLkcJwZhp2O+6QMxpKiyv6lSbk0EdD7jj4MeG+ZppHxMKqvpVXG8zjNFWjMhJgY1huAhXGoz02d85JKukttwpkKjqacwXpsCOzHA/tJGIaefliR8gQmY/A4F1Q6a62OEyeIqMfearVis1phQyGXxP39PaUcOHjHzeDZPr/CX14uBf6HDx+Idx/Iw4rhyXNoXXJqSjJWvT5NM7/X+a2IaDp7HWPWKtPuhy8/4f5uw3evv+EYE/MYOUwjc8xcXHmCgc5ZgtWUz5wTxnp6b7hY91xuV1xdXHBzf08umfVmw+ow87CbFfTHqldqUbmweskKXVAndlcMHTpfSgHjHM5VYK3WHAqINi9DBRUauNKAcyoDUxCMVE/Y2kTQ8Z0XwN0aC0bTy51YBufxXU/pO95PO26+fM/Liwv+4Rc/4mIzs7v/BVEEvxnI1vDZJy/5cfgRb759xe5wxLhEIXE4joSuZ73dYkLiYbxXBoozJGv4zf0DcpOxReuEOM3EKRI6z7rrycZwfzxQMDwcR+Yx8uHDHetNYRxn9vsDpajtwu5wAGO5fvKUYe0pxbDqhDTd06+f8Px6ze6+kMadBjpV+wPnrbJfFOmstiiJbDLFOFKZmMcZKQec6xjjzDzruuRchzE9Ip6YDM4HUu559W7PL97ccGccZhtY2ZXaT8wZsUIqlmO2TBIYU+Dmw55jeODNeI//5Q1/8M2viPNb/vRP/4T9LvPz//A3HI4Tw/CEV9/c8ptX3/Jn/ySRi6bQe7vh08+/4J/9+T/m+gLi8cD64oIf/8mf8vkXf4h1XW0SqrS/tWy0ovxd9UCr5+oDJuqFVTDMWMbi2CfPE7fG9VtKcmQ5cHx4y5tvfsP/8b/9L/zf//b/5SiGZOr8aS2ds5Q8Yb3gu4TpLF2/JbgemQrzw5F8PNJnrbENqhgwYshiKUb9gHCqWFFT/zovGaNzOaIe0TTth1mqQYwyIGNKZFMwJi1hYIpvVOhEHFK8BhjgEWOwGe5f3/G3f/EzfnB1zVoGHg4HHm4PpBHiAb779oardeDZJz9ic/mMw3jUazYbVhLo/ZbODTiUQadWxoJk4f27A1/9+jXv377l+aVVXEGqH7bY6mfkao1u62c61WssjenffbRG/H8mAPDxL3x0tLlzeZeWnim10YTFGK/Nh6LAnM4/TWKpU77DIqYgkshFPVhznrQ5g//ez/HxubV7fAIeFEjSYKHE4XDgeNwTOpXFNeLBqUnNYmWTkpJEvFFmqv67gpR67lpjl5Rp0tITqGIW0HQBM+ruRq9P3ZFIA8YacNE+ztJSPf1+Bbsaw3+p39t9bHWC1aTwBg5ap8zLFjIgAsUo4Nfe8/G+r4Ih9bIrgeMMXNFv1v2tNiGBpanfam0ae8lq80dQkkwL4WukEdsaTB/dUCna3E9Jk+abFYz+SP1TRNdDqrKEUpVwAXGebANZKkC39CDaPdLXUMCzAVqKNjSwrQHJS4O/AXMVNxCjz/MJQFKY8Vwia05djvr2Z+CbnEsnT/u0xaPP2qXx38asxT4C6BYwvoLQudrUNLWRNeeA0gmUWhhu9T6VLB+do6qvTDZ1Efj+Qz56fs7ZdLYCpuevfw7Qnf/eR12D33mcg25d19Earr8Ftv2e8z6994mpuJw356B6vSdniiNDnS8q2NdSj9srNsmpqRNF6FfE46j1lAvV93+mc5HLqw377TXWDWz2ULLlcD8zMdLjyS4zNWbto/MGY8rf51L9Z1JcTWWZ1IX9BIYVlfLVDZJzBust3luMN2qUZ6hSKmpyjhbYjxg/9d9N3SCXZc46Q5/P5B/6cNbJqQI40i5ulXbpGeba4dCBakU9IxztvKrsc5mQzCL70oFe2WlG2USWUuWq6rkgCM6UJaTCiKZ3ZAwOjzMKV3nbo2b0aBEulSKun4guOIyA1jB1cbQQnGqUla5davKlApUiRouWyvRpErhTA0AWsMpVhk4zxwcqc8dU6qUWalmkGleL3ithKfj1spykgI8mBlM9okz7RDrJuSqRMs4tQJyvIFrwvi7+yrCccmLMkTmK+veZWtBQN2CmnpNp+nCWYqFtZOA0wOA0EGxly+UiGlNuNaFXO+wFU7Rz5tBn14eOru8xweFzhynHsw5U7fzaliBjSaJAn7hAM7gvtSBWVazRcI+cl2SegjKmssyI7xbfLqGQXdR7ZFJ9dh9r8MWcuhVSN9Hn3Ynzz+29x8ya2Kmy5SaLVlZKdpp0hQVTdCTlnJiBaK16b/WV3QdI3VQ6Z/Gz4HzRFMHWcTDqodB8IqmbZqGGnzit6MToWLPG4YInGWFKkZgic5zJ1RhdN+INilWKe2OAtfvhnFd5rXPVq8wpk8RpsTmPcwVNgFKweLxxBOdqAQkq722U7lpaiSjNP8tZodIKJqNBOIJ6oXlbjZJryEnK6Frp9Tx11Oh9cS3Uovqp1K5Zk3Uos6nNkzX1VzUDdfCpt4GzEIJU2YSyBorTDdvQ9xBUnjJNszKHa9G5eHhZhzFeU3xrsRasYd0PrIcVMR6ZazHhnKPrOoZhVb8GVquOYRUYhkDOK3ywXF9fsd2uETIpRy6tZzdDyoaezOAsIUNwHiuGOKuXmbUe7zusmWu3vAKRWdMhD+ORK7mkCz3pcCQXQ+h6zFQqO1JDdGIRlbUW5SplasEbFECT0CPdgOkGprlwf78DETbbDZ4VUrIyM4wybX1w6kmZIs5BCAqAzjGqj1wI6k/qOrphxZz13lmrm0FrnRqrG2XJtdTW0HWsV2ttPqXMFCPH48Q4zxQM69UaH3oEiw1CH9TKIcWZw36klETfeby1rNeDetNJwfc9XdBgjHluKYFykqVkBf8RUeC8NAa8bgbabCpYEPVnNaEj+MB4OHIY93R9x+X1tVoidGtCsKf1zZjaldexEUIgErHWst0OhCEgfsvxWMi7e969u+HN/XuGp0+Yc8AIjIcj83wgdSvCrKzFPEX6rlcgN3SshhU2xcqSmMlSCJW1VYx67+0OR7y74/pyy2a95uriinT3ntvbGw7TSL/e4KzV9xsn9XebJ4ITgu8wFB7u77jrA53zHHZ79rs9thP6fuDi6pr9fmI8zJRxhgh99QUUYLXaaHrg7sC8n6rFRpsHmiRLGcJ1x4B3temVNbRKWYWtgK8bRtM8W6z6Mhll8pd4sjMIVSai7HmPAW7vdvzm1RtsGbHxSJoSq35LcJ6Xn/+QTYocJbM/Hvjisxd8+vQpbzaBw3RkyokPD/fsDwasR8zIh/ujNi7FqBVFyqR3Hygx03k1+JeU6EW47Hv6QVlhCcNwjOxn4Thl9q9v6IYjLnSUYunXG7x1zOPMw36vAFhNkl6vtsxz4v7uLbe3r3n7/i3juGPYDHRDz8oNGO8YYySmop6gIsxzVL9YW0iuEKeCZMd6fYFYXYMw2jQTG7B+wHce4wJRBt497PnNh8iBDqwlSMKnQiiGDk/JhSnOxPmWw3zD+mlg+/wJo1nz5c//gt989ZpNf8tP/sFLDneZ27cfiMUj2RPcBZQVyICYgLWCxfPskxf8y//xv+VP/uQfsLt9YNhe8uJHf8jF02fUCCc09VX9Cx+141pfaVl16p9tCakqGI9lMoH7MXH34Z689TgC374+8P43bzC7Wy5XgetLz3Zr2PaXTFl9l7frNatgOBxv2M+3TFJI3uA7SyCQMupfPFX2rgirQee9mBNTGbFea8hUQfhU7RmSFpk0/pWCzsqu1ropgv4UTV6Xi/5dpe6V/V7rtrUNCIa+7+h8z+Y/cfamP5YkWXbfzzZ3f0tsuVRm1tLV3Ww2RwNxRiOREghKICgC/KMF6JsoUaSGwoCcpaeX6amuriWzMjOWt7i7LVcfrpm/F9k1RUCv0B0ZES/8uZubm9177rnnrHte/OiGf/LZ53xy84TPXr4gSMf924mvXn9LjIWLV58w9FsuLy/pvMF7ZTzfPzzQhY7ryxu87yjZUKLGO32n9ABx8PzJDes+EH/6is0ahssLUtR9vesGOM5a4KxxsBLvzyIFKTUyP7uJH7weM0t++HUeG5sPfmMbuGLAVLY8iBZnKjPfmuaA2jIGg8VhsMQ5MZmZw+HIcRzJkrRFrUrM/Fc00B9fEyy5y8ImWmLK2qUR1YCt6zpWqwHv/aPWwKb5XLLuu8apXrjGzrWLxHmcFMSdDPKUzdYKa7on8kGhu51kY0E/Yi6d7aHU42mOr3GDlLK8tciZwL+pWWfLke2JmabH0febpUVSy9626Yrb2qoJuhacAX2q3Z5PHTQLSHXKwad51uPUy1scNV1jP2rsbkUNkBRIqXkIVZalgY9nc02KkEnsdvOCHSQpyv6uhBtQ1rnJmqdNCMVaKB22H7R4v7jMis7PJSdteUGjcJw+vV2nMU3X7VxLrcVA9XZau2i0L+uOsbXFVZ/Px0yu70dVWp7WPn+Rx1mmyAn0bbkzQKl5+QLyPp5tj747ByDb83FO3viQqWZtJSfwh4DaMk4/8P1ynBpbfMigExEkf6BZV06/KzRWa1ly1vNW1/Nx+/D8vv/cTgDlaTw+PNeT0YlQ8+dHAJ09nRv6nMrZfGiAqALluh5YG/BG8D6QxVFy5tlHK/7ov/0j1j/+U4b+iuF2z5tvH3B/+y1f/P4bVsyUEpkWPuQfvs4xm3/o9YMAXUmxHonFXtkabfnCZEVoLRRTddMq+8ZUTbKuOXKc9kv9Wm9ISmc6WdZi6yRdUG+E4LUNSd0x9By8c5ig78+izmaNqqwHUGDBOdfkEclSnTfJCgAVwRkVZy4lV1FPt4AMuWrIWB80oTcFSoRck2Qqii9a+Sz15mZjcegESICVXBmA6qSlOiA6jmJMrYTUh6oi9lXOjZKktrxGGtKgbBd1TrWudTJ/MAeWfF4TALVx15ZRjfdPGiZKvag1SqnmEpW1I0V/7+wJFNXxAayySETRz6oBodVP1ddT7UCxDVhR1zHvXAX/wAevwGd0CpbmypizGgCouqgmN67e0+VREnDG0ZggkqUSPBXMsc5gvYei1tdYFZs3TZi8XUtNnKz39MOK1WaD6z0YwbEBKZRUyCnXzb+CocaQ5owLhU7aGCo4TAX+5lgUPWhOMJVZY2ob83GcGKdJDUFwGBsW1kRJClgvVPC60Jy/mo5g00HItcUxVQEOzZtbtaYtkFTjB7BR6fRZtNriROqoZ6LNrJy62OaibCftPdTZmou2TTpXgT9acnACT1NSxlYxFhMCxnXgVdvLhqBOWlIoeSbGTJozRSxwctzS23NqP2/8SanzO3Qdfd/MZFrbqk7rEhOSQay2o1mnLd+hstiktpOUUnUp2twqba5z0tlDWwCltMqu1/tc9U9KBbeLqPbNXA1nrHkcbOZcmKaJ4/FIzrVFvYbJ3lULL9EqqnM6D9vzYl3AuQ7nPF0n5JgpRauT8xzpnavAuN4M570GcrV9fxm3unYZY+m6ns57nEkE5xk61eESY5jLRLZGWa7zxO6wZ44ju71nGLrKYBX6IfD+/a5WDBPWQuhXzBkslk4SOwsbC/NhVDHtVDC5ilOLWdziNJxWBmPMhSkmimgluQDGOfrVijDLMl9iKcwpM8XMlDNzETpgzonewHq9or+8xq03ZOP57t0d0zwRJfLR06c8e3JDTjPWZqY00t2q9qn3nohev6kg/DiOHA4HHUfndH8InbZBO0/wQuiUiUhtW2lRmrGeLvQMq/VSHAkxImZHPhzqM6bsWiGy2W5Z92rekqtLbEwzc0Q1T+SituzDELxquBiHCyd9FGPUIKHkxFxq4emMvXVaE1har0uuGqXMpJTZ7x54++4NAux3D6xWWy4urtle3GBtp9qhzldCrRCztlnHeUY2RZPjoaPYAALeXmK3A7t0ZI6R3WFkmkZCCGy6C/Zi2e0PjAAxIhuhK0Ntc1WzjHEcyTHW/Ur1B8XAnBIP+73qKY4Hri829H2Hcx0xFazxXF3esFpfsD9MtWAhxDnh+hbEKSM158Sb1695uL8jTZF52uNWW7bbS6bpnlJmciyYnHQ/LQXvrd7/rkMeDozjSJxjZcJVQL0mZ64xRRCc8Vivz3lZohbViGxmU95rO3Uz92gB8lKs8h5T2wCDd9rCOY+Y+x0mJzYBNoPn23LgzS9+oUzNmNis1nz+6ce8/Pk/5snVBXk+sF9bnr96RXIW+e0XuP7AYZw5jhPOD3Rdz5wEh2q9HeORfNTY0HQQTOBi0/HRs2v61YoxZfJhpl9f8nx1zdvdnje39+wfZlbroC3J65U6Zvez6oblyH53x3F/ZOjXeN8hAml2J2sAACAASURBVN9+9w2lylvEeWRYDwyrngyEOwWYyRq/pGRqolCwJRGjSrRghdWmZ+O3CqDnRBRNEE3oidkwjo7pXcfxvYfYwTxh44GhGLZ9x8XliuFJz2c//pznz54j9i0ffbLip3/8v3CI/5hf/s01vfwdP//pJ9g5MT8cON7f02+3DL2Crc5cIrLCWqc6ipIJwfHk+SdcPXtFPE4Y5+kvr5Q9nQzWBS3qFsF/HwJSQTqaoVCdY1q6iBgSAYvLwtvv7vjN3/4W+/FHGLvm3/8/v+BXf/H/8kQe+Jf/4z/iT/7ZT7j6dIVbP+dXv/6av/2bL1iFAZ8jA54khilOjPMRO9/TmxV+9gQyky/ssyGJZXV5yfXHLzkc9qT3b+mDxwGHwx6fDdnq/m5qG+GChxhTY4IaWdQ4v4FzhjNdqBpD0+RyJGHNzJNnH/HZJz/m2bOPuLhY8/HHl/zo82tevNjy8ccXZAJh3fHF26/ZHgsvn254+mTDxcUFRqTKFKwZug3WWDrXKegeM4fbPTElVq+eYjDkCNvVhuvrDSLPIB8wBnI0OLfCur7mC4L1Tp0v67VZlKlWA40/vK+P7vGJOfl9r+9L+cz3fj37/5YEWzAu4Gr7o63FAaDmLC1mqULoUZinQopgjeqLGtq+dwIQ/msvLcLX1k80T0wxUUzV7K3xsPOe0Hf0w1DPiSVPMEXnReGMFVUBkpbPS5EFJDuxfhoj7rEpgBbGG7BiavxfW8VrB8ly7GXQdb4uIEl930kN7gxEr2CS/kyPrx0vZXlntkbNH6wapGVOcdwCRi2dFqebeyK5NMCmjmspxJSw87xoiPmag59PnKaf1hJS6/3iiruQAYw5u6o6f2q+V1JhSjOGyto1NU8ydZ7TsAA1TUpSyMZig+a1thIq2mGtPQPeWi5Yx6uyYOp4nO5dalp95wNfwT0lsdRTlkoEqU/VCYg86et/Hyh+aq08HwMeAVEtplie17M5R1EcgLP3L8y7CuYp16gs9++RsWa7rHPQ7Ay4asf6PoDuw+s4Z+Sd/3thW9Y8M6W05JsLuNmOU5+dUsqiIdgAunO24feNZRuzx8YQ5vH5neVS56Dfh9cAnJip33Pv2o74fatsA8pNlVOz3uJFNNeNhiyZJ08Df/bf/4Sf/E//hovNC4a3b/nbv/4dt/GXHA8FDu+IaaeELmkgXTuHxhBu0+D/J0AnWR39jFOBYGe0JcyggFzwms6P04G7h8J+tFgPuCryXNOthS4pPLpJJVcRzqYpJuZssKsjp280ZgVrnDMqQuwsg/d6jjktbYfWattZYxuVOjBiDMmo9oBUHalk1RQBY/AYnGi7SslKVc95pgOsBKxkJMW6GGgLoCQF9opRAMA0fQOMJp6pYMVgKXgK3licAWdVHyFV6m/KRceGds6g4TlQirK/ENRPrUmLUoWXTUWBFSDTAB+sqM6S1IXR1E3r1BZM3WRqEGScgoalLvRy0jFzNfiR3Ki/uqAaI5gKTipDRBcbqWYONFB2CRgralw/YrPdkudAMgZxkRQFk4GsC7hI1AWMUgE6WcZICtVdSvUYSlb9iVJsHSnTdnxdGivVlbbwUN16jVaIjPXV5EErbMYbyFIXVUsqOq/PTSacV80zV5F7oSBF9QpzKfgkJ62YRkWmtoSmRMGpzmLOxLnUZ0yvPRrh4f5Bq4BtcXH20aKUcv6D3n2ligvGetX0Kbm2fpQKrmrwUGLCekdCVftCcDgX6uIMSGFeAKBcjVFgysJxnDlOI/MMXV8FaZeNjmWTP4wjCQPZMqeMlayti3XDjCmTq8h1G9dcHVrVFemsjaDeh7o61BVKg7lmbpKyttI1R7eUVCy42KS6jkWNQVJMuiqY2lLTqpH1P29tbbFNaLuvB9M2OmqrmkdMJlcnYqrbWGoAbG6smXrPKjjSWgpPTq4sg9cc01SHhBOLpj6HRSzW1hZkawk+EFOhRG3nTkUwweM4o6nzuCiixxOS6DPlnJowSFHtvM1qzXY1UCzMvOX+OLHb7xCEu/s7ck6kqikJcqrgSnNdMlXM35JyZPCOwQhbb3lxueVyvaLMEZepSSLInCFpS541KlXgrK8FIKfOc5X557zHi6PrM1OOCmhXMG+OCtLFLKRaQS+UOlYaXNw/3PL7L3/P3/3dFwTjeHJxwbObK9WXs4aCW1iFPgSi0WM3tct5njmMR5Y2GFddh509CYY3Bp3MVSMJnPcKhg49PgR9dq3HS73Gyl5IRXUeD4eRq+2GLvg6T1UTFaP3K8ZZW1VCoMTMYRwZ55mu6xm6gRA6uqCaS6thYLtZg3HMMS37cIxRGUY5U7I9GUpQXaiK7vXb1cC0WnE8Htjf3fFwe8+7795ycfGEvt8wrNZsthu6IRA6hyXjDayGjqc317x68REEw90+Mx1HnHOs1xuc9DykmXd32i55sd3yfDNwGwuMM9PxyDRHjhyZ58TuYcd+PDJXxi01qS3UhKtKC2RRw4bv3r5nf7+j7wOZwsXFNavtlu3lFYfjTEpS1xSrbBGn61QqmZsnT9iuBr768kvu73dIUW1Va2ay6VQ2Q5o2r6lSGLqu7vZ77Hhk/7BjOh6JqYIJriZhFUAotehmjTJvjASc08RPaIWPk/SHsjN9TRR1r2GpSuszbo06cjvv2I9HxIIPgXlMyKhaXvuYuDvcM8eZddez3u+5XA385MVz1lJIVnj10RNkGPhuP2J8AOsYp8RxSqy96vymOSPZUGborepPrsPAdrXGIIQgXF5e0a/XzHcPZLFc3zzl8tlLVt+95yH+hne3d8RjVuMTm1j1gaubJ3hv2N2/x6eJbT/Qh4G+H8gxMU4jOauJWZomhjjQScBU7iwlaeE4FaapEGwtDNair6nJsvOWzeWGmBMPuwfGeWRKPStBmUH7CTkmfHIE0yMkvAhBhLUb2KzWXDy94qc/+zE/+8ln5PQbPnoJn//shn1+xcCP6Yrl5fNrVq4nT3fE8YHV1lPKDkuqTDivdbwqI2JsByao1K6LaoImnv3hyMPDHmd71quVPtMt9q/5HmfdBS3s1y1GaifIjCkTiMEnYXz3wFe//JJnEugvHK/f7fj9118zlm/ous/4F//8v+OfOcthDpj/7f/il3/1C8b7CcYZJKqGFAOdA1ztNrET3mWMhxlHMg5/teH65XPsrec47Vn1HeTENB+QXHWnagGw5GosgqkOeCf2yrL/14Rc917NH1JKGCkMnbpyX18NPHt+ydPnL3j+6iesLi7pB8erpxs+e7rhZhMYQqQMHZfdGtNF4rs9Eg9M056d94z7ifvbO9Zdz4uPnuOdY7q/R0omTTO/+i9/hXGWq4s/JWy2qi1X8+aSJubjkdX6Av2hA1GH0FIspcRHCb56Sp0AOnMW6Tx+1T33v0JPa1Pi0V/K9xyzxVd1PRNAXMD6oKdt5YNMtiWZHu8HnBUcPc6s8Gz03lMDN/PD53h+NtZp8aGxj/JZy6Weou65UvOY5o7dNKvgtGaeDl0BHbGPQKFcqqHWo/Fo7Y2nHBXTupNOSb7ik3Zh+i0qfnKWP9WfqYlVxVvlBJSJ01gmzfkRmHoeq0FVaanyK7o2VHYXLMBPA8QolSxTSSZStGPrxOozyxhF1VM5AWEN4H50Liq3o/OhdlTVvWkBNusxKi6qfy9VZKo8BiTPj9uyXLWUqvf1FOxrHuJddTuX04FpOfLp7x+/HreHPwKs2tyoYKepee75e+HxA/K94M73/VtO5/bh7z8E0ltRFKSacrVn2dAZo3FrPZ4zvl5/A+jOi69n8+57wLl/6PM//P2HXxtA1z5jYWS2+X+Wa7aW0JPZxNlxfwAYPAfT2vN77vR6Drqd3v/BMeB7QT/TMJGajy5nYJqcklnu1wJin33f2KfGOsR5bImQIpiEDytKAecPXF4I603g6bNrrrrC/Xe3XG8v2PaXSHzgGAI2zsv56v2xrctd8RNzNmjf8/pBgM4b1eMSg2q81AndJnLwDuc0v4w5qm7OnBcL7xQL6uZo66IITWNCz7ih4Y3KfBpwawTfBGetOthYIAR1AzXAqu+Q2hIC4J1Wvr11FRSyNSgzWr02VHZSpqRCGicQcDYoAFTAFAhGE67ZFoK3eKsgG0VNL7ClVvMUdEw1kDBSbYANGDHY7DTZNKpMF0j4kvEC1hQoej6pNLuDOhmtqa5UGVOaht1pAawExbqRnwC1JYBpgEmbnPWe5eo4mStA542BynLUiVk1TeoCSzkl4KYi4aUh29RNS7TKQP2d1GSdqnvV2FUIdePQNjaxqtMVEELKuAyl3kekQjHZKSirkAOa3ijzr7TFXKgafh7wiHhEVOcp5/xowFrHYDMe6FxQI4iqWZNEK0sEgysOFZ+v4J4XnKmMSlu1E1XOrjIh2jgk/VsBmYtu5ksir4uYVu+EYizTnHh42DOXo+ocOmXvSMlM00iqm5wxBtP0Is4AuvPNY/m5lMXtL8asOkg5L/ePevy432O8Oih1fYAOinN0VhUUhQySFyffVDJzEmLS9kRjq3tUq1i0+1JDw9Za2dqKW8yGMRgfSHEkGzUDwCooY4qcAoy2r3JaaKWCztTjiNTA3CqjMFVNIilCFwLFGRK+rgl1vuZCtqoriSknkE5kqZh778mmrVVNl+S8dYCaPOvfWassLy+eLgjFGUTa4nzSq1AmTKDv++VnqieHMkhLWQBesDruDVC3nnCYaousZR6PSAX+fQsgFXNQ0L1W6ksF/RtwSQU+izSgQK+7Dx2b1RokUqzhYrtmLIVxmjkeD6SkzKXdw54Ytc3ToO29OZ1s0bUCnul8YdMb1s4wrTq2KbNGk+V1N3CcBS87BeiytppbsTh0jfdeRfixjnhWCc2NZbFUEGvjlug+lXIh5qzAXhFSjsSoLM3Dfsc8jbX447AWbQetpZw4T6Q4E4Jn8lazJrFLwuhCwIXAPCed+3UOWuvwoVsqnyKoBqoztTXE0/WWrl8RQkcqM94HiiiAMqzWqhXadcSYlvU2l4SrgIwu19rClYqyOPvVgJiZu7t75nFmWK1Ja7DjhMmJ9Wrg8vKKZ0+fEvqB/WFkfzgyVWfQruvoug7rVL9ujgmqQ3IWZUg+ub7kYttzHI8cjkeOx4njceb27RvgPcNqw/Zyy2o9sFp3hM5Bntmsep4/e87nn39OWHfcPiTefveAHHaYPCLxyGhkMT8ahhWr9YpxTFwPa+au5/0UlUUW46JfmEXZio1R5qv5j62uyQXVfTNimGJhikf6Vc/lkxt83zNOid1hYn+ctEjoOkJXsF6IqXAYZ2IqTOPMfrdnPIxMGaYEKQmpeKZJNQQtFhG7FDOETLm9Rygcj7PGOG3daskGlVkBUE23ckoUZwhBCwK5ZFJdV9SVTxOHlhzpulH1eet+TG1nzkkLEA6L7zcEb5mnA1MW4jTji6OYgO88vu+I88TX77/jb35jeHm55uOnl2z6wJvbPbf3E+PoOB4tMQXEOPZTIaYjMRascfRD4GpzzRACJUZspzq102HPt2+/I+xW3B8jhyljtw7jV7gu4rsLCInDPDPd7vA7uLm+YHN9RTf0zPe3jALrYQUuYPuei5snvHj+jHke+cUv/pK7h/fMccKNjjgnbQ82p6Q4zuCCpxg1jrLWEIKv5kyFQqKQyZKY08RhOtJ1npIyMT1Q8gPY94zxQJGsCVNKlHHk7t7yyrxkXr3iL/7mHX/zF/+OP/rHG/5t+OdMvOP/+N//b+5f/zn/7M9+zr/615fMx3vyfMTIljjvcG7GuAPGTRSTVc9LKjOpFtSMMxgnFBxzFO4fjjgT6UNfW+E4bZJncf7S+FVjngQ4EkFGTD5gsiDHSHr7nvRux/z+wOFwy7u3d8xpRMweH3dsc2L70Qvu9jOfXTk+vXGMd5lcHHOyHIun6y5JQYhWNeEmjhQZSWXGpRnnBB9nzDzCeMCVxOAGUoHgLKWuaWItpWn5nlNn2gXKKTkXWrdJBbydtvxfrDc8ubnh6c01T67WPH9yQbe+oISO3XHkGOEyGFgH7MoTDxOrq6f0oSdlQzwkbl/f85f/+RcUK7x/+5r3777j1fNn/A9/8k95dn3FdH9LniasKXTmu2rS9BbjRpxv4HnB+B4/BDBhyX+0O8ArsJ0K3jdgvbZ3GgFqTvMD2Nb3MVC+79UgktNO+egoJ4CiVVhNjRO8dj207pqmMi01GBOxIA6Kx7mB4DZ4t1bApDiayc1jaObDizjdWgwLwWLR7a2yEN8HcLRukaZX3sZEWT/K+DmZE9ZnCsBUAFBO2lANIDqBLR8AL6YFnH94OY3B9giAquMkcmL60PKgR+fZ2uuq1vDZwDSAYenAeAQk6ac02ZV2egostpbBExGjnXZjUn3IsPoQFJEGfLe5oCetbCrrFlBB4812XbIUuMXUjgjRTqolgIeFGXgicLS+GE7ft6JTAx7PxvX8OCK1m+5s6B6VJeQxeLMw/5YxsB98385nSS9oAO+HQN335V1NZ+8R6HU2p/Rop3+fzwXTdAuNxudNY9DWc7SuzZf86P6dH+P8Xp7/bpm6Z3+3gLVnIOqHjq7nYF3DZVpr9ofHOgF4ZwaFRR59/h++9w/HtDHuzs/rMRvx9LOmm/9DrwbAnX/GAqJ+8EzWfyjmcZbnGYRcRihBNW5NJqYd8/yOkie8Lbi+0Dk15zMZjaFT1DXdnualUBZJA0PtDPyBa/hBgC4ETWoToi0DUqheYyDgnWXddzy5vOTiYk0IhkJmLmrFPB5mrPHVmUeZacXUCWAN69WGQlFx+hjJsdT2PPVFshLxDbzLqg3Uh4AxQk4zeTzSMlsRKEbz7Ej19vTqzKcBsiVLYUqZMSXmlNkfJvquZ92vMQFtkyzQ955+6Omt3hAFYARrBWMrep8jnQtKqRYhG+ULNsYWQLHoBDItqNYE0lazCWdP7S6lLoZqYtZAgVNAomtwq4UoktgWaGqALqZujFbvkXXaxlRyVvfanBTksycarYO2M2Kqy66prL9cEpRWpSnqckRRTQpbrZfrIkZbWGkbRakCobk6YVaQNyuAko1hTlETipJPwp1SlyrTNmZbk2d9+CtvB0M5JerG1cpbqJu7sviM0apcGyepsFNBH75SV8tSgJwgOWyKkCpTS+qD6hzeWLAoy8wo7di1BaOCNUWAqoFiMfR9FaoXoQlVQgW+0feGEJYFyxqpiYXes4XtWECsweaTDl0T70y1ktGYZKAad3PUdp6SBePsyWVUqIxHIaYJV9Q5ySZLNAkq8OadAhdIqS6lQsyFlHX+WWcWxpMRPd8oiZgSkgsWwYUeKdoerPdKdcVSTJVFotthqeBLydrjbGiVpTa39OvpubBko8DfnAsyR9Uvs0ZlM0RdP3un1fcmbtlo2bmaROScdH6obW0Fyk727aaajmj7tibO6rgpaNFHe/xLBfhEVINOtVf82eZjFiON1jrvrJoBGJpzLGSjDK1pnhlnNQRQUwitsjaNEedUhw4pDEPHsFqrYHLdTBCW1ugF1rbaOqppu1WjmgqWYwumaowgwm6/Yz+OygwWoWS1C8d2iIdsJ6YSyVGwIhjRoDyXzDhFjDiCE4atx4jgjWXbr9mu1nTWUbLgsQy+0yJBVqduKvsYGkiqybQR0ZbFFIkxM48z8xhBnD6DqPaQreYeUlRPMY+FxB1Yy5QS4nvynLhYr+g+/YQ8J5wR7u9uyWnCe8P97pb3b14Tx2N1nzY1wdfzW282GGO5f3jQFmM0WFSDDn2Oc1HQpFXZtXVEGeHug8AuFzUxUgCvV7fynBWUq4xgZ42C4TUSd06LUDknDNqm7L0nuVzdaGF32JPGSQNs61htNgzDmpb09l3HxXbL0HdYI+rojHA87pnGkZQi682aEALrvmezCmxWgXHdU3Acj5HxOHOcEjEVHh5ueX8bcd6w6g1GCtvNDfd3L3m4f+DJ6hkfPf+I588+xswT0/6O1++/w93f8frNPZKjBqgVaNv2K5IPjLsd8ziBMQTn8bWCb0Og67T91wPBqPQkkkkpMqdEbwP9qtd9xQUygcNuYnc8kpJwHLMClINnmkfSFDHWMcfCN6/fsvYdwoAPhuPuPYckJJtJxTMeM9OsrrlFIMWIqe6rKUasFbyzSHAYozFObkLcyzNf46LCknAOnSd4CwmkqKadda4GcSrJIZJV+0XaPNM4Q7UHj6Q5LXuB855oDRih61ZEq+28pkpuHGXiYt2xfXpNdPD24Z6LwbOKA1988Zrfvj8yFiiSefbsGVjPm7dvifPMxXZQXUXvefbiiuvLa+ZpxiBMxwNTHtkfIvfvXhPFMYtnfHfHLn/FMULXr7m+sTzsdzzs75nGmWHu2R2PzDmyH2eOc2acHvAu8KJf8cmnn/E//8t/we3773j95ivG+UDOmeP+wJwykjNtl3DW0XdDbeFXtnDXe9abFZuLjZpYZS3UKPulMI4HHqCuaxPG3YJ7YMwHoKN0PVMxTCJMx8RgLjiuPuX111/yxbsN17eXPMxb3HpgzBsO04rD6CGsGK4uWa07hqFjve4Y1oZXH2/ZbLTAYrFIbqYjLXnX4l4RXXteOi2i9qFXdk6NrfWiW2pZziANHQ1DwUgEGSHvISVkHGH/Hg57ju8feGBm3u8ZvGHaPfD3v/kNGw//CM9xmoi796zdTCaxvbxhjo6yO7J58gz37Cn0nsPunvfffMvd+B0cb+lR9kG6v+Pum2+4u3tPiTPh8kI7PZzHONVULsaQjT25dxeQyh46uT5CE6MwFrzXdfHq6oJPP/mUH33yKVcXF5QYSccdh4eRcRL25cB9TJjgiPcruuMDhRf89JOXbLfPeLjPjIdMKJZynPn7X/89t8d3vHv/e663K3784pJgJoIfsReCrHT/ffFRD1YY91+we3/A9YEpTgQTePHxf4MdnkOyWJMIPoCBcZopInRDTy4TjY1tbdu/lR/0Dza5LsDJY2DoD19NJKQO5gJ4noqoy1dTO0xq+uC8FhLr4kKpTtgL0CDaxRKzQBfwYaCzHisFkzO4jgUkOT/1R4Cyqcc/nZmzqvWtbai1hVKDZZX18Q7nO223tBr3l0b80E2VGo3r9RftcilOx8waU1uJNc9KFUOxlRVnatxpjZCtHk9ZcY+ZWG0gWl7WkAM5GyOAhXVmlbSgJAFZ/ieiAbk0NagG6hi9HcHVuC6lCiSppIrjVLwxTnWlTzlTO7MWQ7f8pAErprL+Pa0zrYEuxkiNM/VcijnNETUJPOWOC6tVzj+1GTXWQqUxNGEiQXMmzU3rXCh1jKxRyapl6pYF4Pug9nDC+xYgVc4+s3YytTOonVOm5rFFkcraTSZVu/Z0N9un6e7oKM6RjaMYJWiAxUjASqqyIJpjm1zb863UtKACPMsDc7YimzZSy7auc1BYOq7au1U2iiXzV1BRddVLNflTgFnq/dFOm9Pt/0Mg/7wLa3EVbuDl2XuM0W6i9rtWFEQEbx04R5zj45bY+lGn46qGqAJfGg8v0h5nOUrRMAZXxzJnzRmbpm4r/jdm4YnI9fi82523FYuw9gxMrXNvuf42ZerxxbTZc1qPUsoEY8im6sanRIkzqcwklznISCRiyoSPB0wakTKTRaWhXGex7hwIlUpEOrXAP1owPnj9MEDXKcW0iMEZIRejBy8OjxBwrF3gug/crHtCp/TMWVTsfao27N7rDUlFQQfnHKHr2Gy3YDRAPc6ReJyIJeNcoHeWUDK2JOY5MqeIc47VasA5S8mR4/FIjIkS1S3VGkXoUk5MJTM7D87RWxVsP44TKl0Jh5KJ1iC5sBZDIFBE2wNVaN7TDbqK1OdSB1Uy1hSMcQhql+6KoRhBJKmbijSNJ6HJFBgpdSNRgV8Q5pwRfAWgqk6BFJJEcsnkSkNuTkYa3EMx6kjrXGUFioIgpVBbNA0Jix26ypqjbnaK0pMzJGVKMKhukeSZ4qEPtZ2vFNVEoZz1lWdMyQuYl1LG2NpKVoq26VhbARHBlKjvRw0D9nPGRrDB44awLKIlztW11pFKIeWEMw6sgkPFVRANqtlBBTVrGFOM1dVt6RSrLZQi2rqMqwCRBnsSQKwjSYMv9GkMKLhXUm0BrBsftlW4zNJya6D+m2WjVZaLnDZY2zaPVnlom4Yyz5QiXI+TEyYE1IC0YK0m9FVOTue2lLONtFYUqK1VJTNHXRRTLuQEJVskq7iqRVtp1B2tUJxFxGG7QOhDdaWEPlhWviO4CgpYmFMklYLLCS8WPxhSeWB3iNVMQed9lMx+TqQp4p26+SqwoK6sKRbev3vH73/3O8bpyOZiQ1j3BFv1myqW5tCN01mHDZ0GWHUSj2bWthccyVim2pZuMHgcwfq6ACeKc9jgIRuybS1pRp3fxEDJuhW7WvkTdQ6NJZElAxlvqQzaE2OWAjFJDXsb2FsXWkMNeHKtohpt1zSONEfSNCNZkFTIsz7fuWmBGcuUM7tpYhpnTAiImGpGUyX8jcXmiDPa6k8Rri4vWfUdu7s7iuimrc6tsmxo2tpS57vRYkIxLAzGYizFaYCk7aIJGzakY2ZlPc56kt8wyYqyTrhgsNnjsyXvRzpnsZ1l2id8ASvasuk6jwuq1eZ9UF23WcEMh64x1hawbV1R9mLXOVY9XAwdndXz66uu2/HhQIkQrEewCngiuJTxJWNyRKK6veZj5nYccXd3ZBcYMyRx2g47TYz7W213N8LldqVr+/GAjxMdiWwL5KwES6PujtaqKywFJCeyHTGho5RIMYL1hmQE41FNMqfuXFYKjoQpUR26pYno6vqg+HLBmYQ32qYbU9VXQ9dWV1vq56ysQymq1+lsh3EZExymt+RZSM4S8URxFNwikB28GvkAdDcXkCdub9/jXUZS4u2brxmnyLOXP+LVq4+4uugx5cBuPyHBMVxcMUyJw/7I0xDYHSYO08w4TxwOdxz293QU7o8Tf/1f4NvX3/Lik0949vIzbp59xMunT3n64mPWyrZ4dgAAIABJREFU1zfcPNwxPYx8+Vd/zfHugXHosEBOE4jQDx2khDeBEhKzi6SkLqqxaJFw1XWEnHGi+pvZFmZbGEsiHidtuT5mdvGeInCcJtWezVpwdEZISeUFSjFMc+FwKOQQSLHjON0xTkKUQjSZOAt5MnTDoAzdcaITGLoVkBhCwVthl5Lq0Dp1pKQ6XwsGSUK2RTXlRME0yRDnghFTsXOHLaf1XWgBXdU/Et3XFxK4aPyQ81wNnBw+1UDUWEr29MEvzCspEfEJTEc2sIuZu8OR9w/fEGzg27t7vjvs6QbHjz9/xc9++iNIhrevV5ScWa0CxmZSErpuQ99v6Z9dsbnY8OXXX7DbPsG7Db/5ze+42x24vLhiN0XevX0DvmPVB/p+zao3kA+M46wyCNNEHxw31zcghd3dHZlInEeuLzc8f3LFfLhjCIHLzQUiwvE4kcZMjgKiLKWu6xj6nq7z9N4z9B1dF+j7Ad/1gIqUO0FjRBswCfIUF3F5awOd7wlmxg89wXZMWXDDmsuXr/BPP0OevOLTm88JN1d8/DSQn/+c1ZOX/PG//jdMb37C5x9tCRefcyl3fP6Tf8Tl0ytWmy0XVxf0W8966HFYGmFbrJqO0eKvCioMXdDW0JpkauGvQTnSFvkFkjPidCyK4Kw6nZoyQrwjT3ssK57eFNZrIZYMKfKs93z208+xR+HtaPjiwfL+b1/z5euvuH2YePbyBcbPvHzxxwyu4z/9h39Hjrdcui1DL7hg+O27kbHsOeQj5KjasCXy7v079sc91lmOKeH7jlAKkxRknhdG08IoMHp9BWXTGKMyNLoW67WuNiuePH/G1dU1L16+Qpzny9ffko4j4/0dZToiCBOGvXi6yydcXf6M/tNPkesVNx+9IAw3rI6Wj5//iKvtPX/2Z39M3l5zd3jg/f13vHr6nI/WV6zE0vVrcmdIc8HZgeHuiLHCeBjZz/d4Y5mnEfyWIplkLc6BsYneZ9UMFBYGkYITroIEgidrIbMsUtlndcozgXQes1++79UMQfSPVXLoA7n8CmmYWhTyC87rk2GwHU4CxcxMNlEkE7B4I4hJZIlMRSjVEKkPB4L5FmsF5AokoH3PZimG28qMOsmVeGr9GuOFwU8EO6vEhFkhZaxt0+oWnsSqlqkLZBx5zvXqHFKqlEmOSImQ0fliIHUW6RwdBpsEHwUnraPGaCuoabFRBQgzWLdSwEMqWFIpAks7ZiVtNDM74MyoRTuhSpVVEgeTJC2QeqrkAeSYCT4sBWTXedXyrPdKiiBJ76OzjSIjKj8jRWOp0GlL8pyUzlCZh1Xgp673suQf1mqe19h4CzgHVeqokiWsA+s0BipKFQnq1UA2Qs4RjKvaYLbCTkZjYLEU4zFopwq0HFRnp5GC9YpM5db50th/JdUCsoJeTQP/HMpo3TnQOrrsci8VKcoYX2d40yZH4yVLRlIFbkG1qwWMrfBMsRTbcfQX5OD49NWKDpj2Pbdv98x5TzSZYxGcywxEprijeDUmm/KMRTWAW4Gh7but/VvqOrYY+YnmltK6RU6YEirC1Ugmra1ar0sLw1V6DKn7h/5tSXkZ98Y6PBEQKrhYj5dLIyO0EaXOnzr2UjSflyq/hdHiMS03gaa5KFhy0mvMUsHISkAJToEzMYYueKaciN7igyeMgksQLaQKrnFKqdtCeGL7Lfm73r/Kz1x0M52ztShuGyzB4qxrNOYyVW9OiyQqqaNxmnoNJBGKV/kyxkRPQELHuxJYeeGVSXDYYd6/RfZvyWVHtBbpB8RMLISTOkdPNMDTVvcPvX4QoDNttT578EQSphScQBBHKOBihOMBSUIhq8abQF/ASsKhkyiVVB09HUEKIaIC5qLAnyUSjDIuOmdYDwPkzOQdMSl7ZbVeqcA7gg875nkmjpNW2irjI8WI5EgJCrRtw4DF4HyHK+qmmscD5jhjs7qubYctJRZiKbhVjwuBuoZxrl3QnDXVbLDgsQtLqonOU3RTiKhOjqkLYdNHUF0+iKWglJ/KHClaaY8xkyroqFmbQ+ys5hyA2EIxmTnNYCxzLIzHmZRqS7FRI4YQtD3YBzXsCBZF+XNUPb0yYyRXgLPgrGc19HjnKWmmszVplCognvXhNM6pdbYIxldgUU+Mc2TY1EpPqRM+VnDNOxiMo+s8W++IcSbNkSza7qMbnhotZE5VBvWsqLUnowGssjq1wiemuqy21jdjyKIghq1JigIiqn+GM8u5KotQNy6Luuta0flkco2SioKwDRg7r0IsQpyG+vCzIP4iCpaISE2S6/2fY9V5UvCnVUCs1dYPZ0wztFVGTR0LMU00v9GFnYJLpW1uBhGnbDgpdaNwdUMz9fxU+9EFdTR1tdLlQMFx7zC+CpdHi5OCyQ4nhiCO2/s9Ft0ULFW4XALiPSUHRM3xtLUUyB5yjOz3e968fk1KkZefvuKq93gHxVk6r/YqY8lIUXZbKy02XZAY1XnWW8OUNajKdUPRDhIF01LW47heyDiS0U04VjbpEDoFTEsj2ddnvL4vlYyvAZsxUHJmjrEGMBC8JacIVgjB44NWmnMuxHnmMBemeSbFUjUARYG5WkiwRjXkjDVMMakjL5WGbwx4p81v0pJxfRa15fy0cVljudhs6ULg4e6enHWuL5qWnKrJVmsHGhy0Ta4GWkgmijJ5fdezNo45ezrT4WzGuIH1k5esV2u+feP47psv8EnRgb5Tl6NsE9ZlehsgZoo4jOswXpmAqWr9Was8PmQGyRijgRimYLxggyEEQ/AwhBoQejWw8M4pC7FUJ6y6+Xdi6I2ht4bOCt4UvFHnsGOcyJKY8IxJyDawP07cP+zUJTIntkOHPH/CzXbNyipQlkwBIjHPOKsFBaomkmj5lJKFbLV4VETZk9nUIBdRpqlT5NlKdRLPCUTFz5uAdJGWXOt5OwvznNlJImWh74yuTUZZrS0JT7HpGDplQFsDAWxnsaKOvlGEKSW6nBXoKUnbu2uL62boGUNt25dCnKO2bBII/ZaSZ1xJXG43DAbMsMGvwHllFtsQGLLhME/4YLBjYWUy49HwcHvL+9sdv//6Lf32t2yurvjx55/z+aef8OL5UwbX8fLmmh+9eM6v72759u136qa9WiPO4TtPCR2D7UhdIZgjpURlGdWCijeOm+2GdTC8u33NbhoJnWPOlllVfkkxM+52dN2A4GjmY3PK5KhO2hebgX7oKJK5240MwxUxOnYPieMYiQ4IAS/VpMFaxIPzFheFNM9Yk3lyc0HvhPF4q59jlSFgxGjFXqr8wtLGp8WbWAoG1bR0tsVcGjTnCt40blRruV6SnbpOaf2gFm+cV5ZYUsOeNEekN3Sdx5BrHOM4zpHv7h4YfGA+zBx3k7ogGkFcYRgc1zc93o7klPjJqxu2/cB0vKPIhHUDu4PhcDgwx47gAqt+w83Tz7h68jHSX/H733/Nertltd9znGfGGKG61XqZ6Z1ggu4z5MR2veHq6gprDNPhyNAF1kPH+3dv+E9//h857B7IMZLnVE1Z0OTVJt0JpMVcynTAecRqMW1OCRmnWtDVRLg3Ad+ZJWKWrAmmDxdcrBObQyTOmULCdR22XxHxfPX6lv/8y19xub3hmzc7djzl47ljd3zgL774Lcevv8bYH/PzuMbITN+tEGPY7UcEw/F4IM8Rt66JSE1WM1kZ9eexuJy1wi0t0hWgM+fci6VhCopCIVYSMIOdoY8YM2KYsN177OqOQ37LeDS4+Q3dama4XCGrnv1wxS56/vb1Axvb8fJHP2HmltT3moiO7/jqi6/p3v49N9crPn22JRzfEtIdtkxazLTayTLnpCCctcySsb4jW2XplLp/K6NIQEpNtGvhU3RzNmJqsVsZVnOMZIQxJe72O+7e3/LVl1/iilCmCTPPDL3DdJ59CazMAJsLnvz859xcO/qLC4LfsHKOZ5cf8fzS8LOfvYLrF4xTZjdOXHRbyt2R3/ziP/PbL3+N+IkkI6tuzTr3rFYDd3nkzf6WfDeSx5GtvcCtfsTFq08IRrBmppQjkPHeYcVjTNvzbY1tRQ2apBDFkoDuLB8TeMSGEWmdNW2SnL5pMJ5ZUtb6N8a1SGeJV0/8pvpFBFKmzLMapGFIVgGqLhtcbQ3PplC8Yz/PfPP6a15/+2u+e7KnT4bL6z9S9mADt+r8bK229WJqPKonbJwQ/Iy3M8Y4nOnIMmksXq07c2mxjSVlxaeDtUtxxVQSgUGdQl1W7dloC8ULjkzIQqhgxiTteFJd5EtlbSXdU43OuzYvH5t4tK4fZTk1ra52PVJBUUPBenXBzZKIRFwRcokM3Yp4zBqjU0ksCZJNGutUnS9r1DihSnFrztBkKHKqRWBbDW9KU2zRGLHNiAqMmqrnKywHqzE8eANUlt5CFKzgXs7a/VG8FpGERkqwlQFV1xxxS0wt1tTxqoCxaJ6h80/PK0upuqv1v5JbsrcQXKjDruer3RKlbXgtbm78KWmQEerg6VrOo2ClM3YpZhUjlQmn52RLa92FjCOGKz772Y/4X//Vz7j2gd37NV/88hu+/u2v+erNt5iUKGViNe9BjswosaHkUo3kzs6lrtcZwVaduVMbpXk0n5bn17YWTeoYVdKQ0SKUoaj0ErouSgXYtOZna16px2/zo60Q7WfWnow/2j05Z/xZWFqts6kYQr0nzrYI5ATQlSUesZqLtuzKKJbiTO3coagxnrPI0NOvt1xKIiZhJDFKphTHI3VEc2J36hpYixbmrFhRYzNnGhC5PJKPJJjAnCTJKjaA9TrmxqpkmfGKfYSgxToBoWOSwOsjDHnmoYyw22PuHsjjO+b4wLFEJuvIWYFJPa2aJ5+deyk/jND9sItryYtW1iOKZX0QS9Fg/ngo2DLhgpBtJjtt5emiOvXh9Mn3RrDk6gI0MscHvbWVBUNW0WeMY4qBuFpXRkdUsXwRjsXha3R9QDWnZqtBrK1JfDGFaFR7yJZCEoerk1URVYvPASYFD33oGIZBg+2coO8wwRFCQ26b/kKpKGtlzbSFpgIsqn+jAFOqhYTaXVcXAH3pZHUKMkmhKTwY0RbXIoYsBm9BpE6clrjXT9SkTrUuUlRnyGmcdZNqm70zbFYD280K73qC0WTReY/rHUigDw4D6oQrSct2oiyUPEeMVyAoxbiwH601SCp1AQ1Y60glkeJcw0VRZ8Vx0jbLYjHO1yrV6QGzxrLqe9J6za6KNGNUr0mrNbJsfFLptbnOybaGyNlCcgo1NPjoO2VFqkaJwTsVTZdcmJLqbFmjOgbeWkRpZxipLr1Ze8oX7bK6Sddiizr1lUKm1tXqPdLzUtCjucWaukKZmrzr81VZHPWYKhCvbB3qg+xsdZR1VYfPnKj/J/T9hMiXSn0ulXlhWqVGpLIwc8UYmiCvq4uTqDabyboBe1edoczyXtc+I7dzcwtV3jllZvjQA5ngDYNVs5QoGUgKfuXMOI7cPzxwMz8lxlgBRr1vubrGZlGAVgExU1mA+r2pVZpUlKXnKkUeYxagNIuasOQo2m5hIcbE3f2dMmrsFZ1v+hWVlWgtPqg4MqZSuxF1xq2DnVPWdtTOa9XZG5wSuwBlQ80xMU2JlDIpFwXfRNlv3qmDbBcCyTZmseoJUhNrZ7XdVIrRRKxuQs60rywAcTMzcF7BmaWVubVW1/mnDLwaHLXgq5qLtAgoZ3VL64LHWsfh/oDzjjkatt0Vf/xP/gk/+dM/4Re/+zv+/D/8e9796lfEd+8x3YqEfq53qrs5TxO5+GUjL0VbD2NKCgR7p8WZuk4a0+7rWSsFapxiVegR4/V/4iDlgjfVSMgYus7S956hDwxDoPPa4h+sxWUHocOLU11Ip6zlh32hlAoS4gheixqmQBStZC7OyBVYMcZXM5VqMERSAN1Zdd1N2vaqGjJK6bVO3UuzaCudauS18W/JWX12a8nUGMs8J47HB+YYuLke6LpQHctFHfZcIOUTu8p5r8FyzpQS9XlipDAidsb6HhsEM9XiQqGChD3WrqvWZAFzSQiCdz1SEvvdHesu8erlx+xz4c39EecHri468jSxGgbwA/spMW6f0+UdIR/59tu3mDExZ219uL+745s3r/n9737Hf7m84MXzJ2yHnouh49Wrlxz2e7744kvuH27B7ejWa03csz5ztguE1YqVM7h1x2q9Io1HDuORZxcbnr98ju1g/GbkkNREw3iDOG3DKdYwl4hQWy2cV8Bm0pYREwLdak2cj7y9fdB1vHiQPd0QMFbYzzOkgrcD+MrGTobOOmXEUri6WnO58ry5fyBPM4hfAtQFTGv5sNHkJ6kFuf7CqWLSnIU56zooRjcZbVVqwujaxmlr8N72S2sNxjmMUzZBQvXsTE1YtWAl9L7DBCFJZk4GYz1TgV0sTFOs7NrMLIm/+91bdveRq2HDxYsbdvsjr7/9FlPB+7fvJiJbwjrjDjsun11zub3E9Stunn3E/WEkZd0Dur4j5kQSTcRKScoGDgEtsKYFa3ChZ3N5xcVmxdXNNV+/ec3vv/oSZwz3uz3jPGMqoNuJgcOEWNWTnet6IyjzfJ5nuuq63Hltj7bG4K0mxyE4mt7OOE61muEwxWBTwcSZEFa40HE3Hnj35ReYwwHrOko2/Oov/4off/YZn//kYz56+ZQ//4//J8fX3/GkM4y7f8r8cM+3X39FeSMUb9nFxGES5jkuUCst0l7E3d2Jvb+85/yl8fIJymMB8UrRdLBzGcoIeYeUe+L8luP0jof9ka/f/T3389+zS9+RJ4fxBw7zgULis89+zqtPr7h8+jGhN3z161/y9vUbjtMEw8QhTkxp4jAeKeOGktc409MHbfssJJwLGs+Wxj3SdTSmAlNknKOaRZ3pGp3KZTVxkpbQV82hWvQsBeZZ2b+pZFbrAUFdo3vnKV3g/yPtzZ8lua77zs+5S2ZW1Vt7RQMgABIUKZAy5dEscoQdE+EZ/zYe/82aCEuyxmN7FCHSIiWS2Hpf3lpVmXk3/3DuzXogQVIReoiObnS/JSvr5r3nfM93Cbe3bE420DnCPmOhDtBm3Oo+zvU443QIHTKX+2uePv2KcrHlche4uN1xvjnh/rDm6xdf8/e/+jvGtFWf4mJ4cv8JH37nQ97u3vGLz/+e6/07CBPvnT0k9x/wx49/jBOtfXMOxBRQORq1lmv1mSzNeQMff3/rxmI//bs/dP2wQGOm3t9loeiQtdShONSGxUAe2e8umMKOZDPiRG07sqGUwDL0s5YpRN69u+Lt22vevR1Iry/45I/e52j1eFmzhW9/PXdlZgo8NCaXguWmZEzOlSFcaCBGThr61Rh5QlEj9xiRFLE5q6imKOkipajycRS4l3rGtnWVMxXgoDKaQGp99E3ftmWUW9nKicYgaImXlsqrEFHcQOqA3RQkC86KKj2SEKMGAZhiajieoo6Chh6mlFR54L2CcjmRS/2Z0vwOC1Rpf6nSwOb3bUpjSFWwQpr/V8Nz259VQdNEmYtHV+21UvNQlSaWruoMsSxBjO3dFlFgTKxuT0VlmboMtQqVgspBqeylssCIyAJsat9RGriYi6qkKghm2nmKVPxRKphfn6vceq87fYzU56AcPBXbpR0WZX0UKJh+xWc/+Rf8X//h/+DRsGG6OeaXP3vOf/tPf8l/+i//Ga6vuLm+wM4T3nUkU1U2YnG+J83KYFNZagvYVN0cKCAqtJ41L/eARnKxzQ5JtCepxbIstkjV974c5MdSVI2DND/oQ+KpXsvh1S5eiFXF9M0BQCMUKERmF6XQt3+Pu76K7UaK2Grz1UCqurYqqJiMaM+12XB6es65BLa5cDFNpBDJxi3P1rJu5UBSOHgMHuTEi3+eNGblYZ/8Blxan4emMNCh650FQPs0g7HNRsuQsrAfE++utnSv3/DKn/Kdt++4vblmP24Z5z1jicxZ/ZOLOVxX+/2gnvn9LOjfC9ClChZkURCJ1szWNyimpGjnnDASkYQWw97SWbApIRUsMFiskzueT4l52us2n7XIkOrtFXNmwrAfRyaRqkdWCZTZWmwNfJjnefGUylEDFQp6splSvXqKYEULvjllcu8J3tWvVS8WlY7WRW0qECYo6CBlOTwVXWkLksUgvoocUcN5BTsEQbzXyVOqG1qdbCggUmUtJdcGSxeNFCVpmFzqF5bl4WzLrG2yzvm6wSSid5DqAVMU/HFGOO4tp+uOdd/hDXROWHfayHbeqh9aDkz7PTEqeGNw5OKQ6Om6DimoqXhK+F4n5BoAkXF9hxj1KLmdNGGvscKkTl1yUgjSunqnkiZgzfMe4y0lp4Ux1h6aqRpdLycJLBTwtikfPL4OG8/hMK3RzikRgxqtSy6UKHUqpgCgM4J4R5RCEGEGTJfJTqfvC20Xqt8g2PosWNLBzLZuIm3yULKCTUXadEqp4hqpnckxqLypMnJSgRASxupEyjhHNwy4BGLqw1xU/tskkcZIXedQiq0gdvML0DWl5qiVOl2LG4pucMUsvFi9Nykr0BIjuTYrNptlQzSNJpx1i8v1nTiEduT6HikDKC8FoQJGbWNu0+KUM3PUpjbmwjjNzCER60ERUsTUpFpnVW7rRNm4TnRiJKIeYZ139NZVjxFIeLLpNNrddKzWA2PYcXNzTWPCPrh3jjX6HFIquGoMznqs9RSSBt8UXbOlaCqXc3pwqp+GUSAuK/gSojKeUtHXjKlFqIjGdWeVelrnNJBEF25tOtRbqlRgVSolfwlqodz5Vb9OjHqe1alWzBo3Y7FLwV84FCNt8qovppAiOHdgvJacyTGpTIeI7xy7sYBZcX56xJ//L/+SDz/7MYUVX0jP5Re/4tXbt8xJfa+6bgUhkwjk5MkxkSQTjYZJaEhMlQdLTa+yegBKNSF2xtYpp1ZOVdhPotL3bSFKRoyabFtr8J2l7x2rlTJtvM2IVWntSizZe1wUcIlie4wz7MeBqTOYkjhardhsVvR9R4nj0tS3Yl736gaAKChtlFoMolI4YzJQk1uNqBcmFuN6KEIMmtDqTAEsznWUooVsrkw5Bf1qAYYyN1OspumlSgcwOvSyTkMxYktuFgqJHBMlhxqs4Ch5xlRWng4PlDVQMIQkhGTIeGJuL+cIa5RxNe1v6GVmvTnm9N5DwnbE7OD09BwXI/v5gqFbgx1YeUfanMN8A9MN2+2M7SMRix02zBlu9hO7/Y43b99wdfGGPI1sho6j1Zqr61s9U2sRPc+JMQSm/cjORw1PsAKmeg6iRWMisw0TuzRTXAVEw0zJiSQzOer3NF4DblIsGGPx4jVoovNkiYRSGHMiAWOOpHhLDoXjjeHBo/tMJLbPXxPjxNHxMfZoxS7vmXPEdxvWw0CMe3JRD5JiWjMV2y57AF6krq+szaCm5SnjIObMnBMhF0Ju3qYCxSzNRzPblpKJWcAo6476PBUDIQeaEUSxWtxnozIl4wzZqA2EiCObgSQrokR9Jt3AHCdCgrzLfP50x27vuLeBm5uvKGFke3vJHEZ2Y2C7L3z06Y949MFjknH49YZ3t1ve3syEkLGdZ9wF5pTqhFqHGVaEruvoh560rwPWlLi5uSWJYX16ypOPvsODe2ccrwcu3rzm6uINt1fXRBFM12vSWu8p06RFMSrzb7VcjFG9clDvxWFIlC5TvMM7tbqQWjjbajpunE7/U1LmN9PMYAwr74gl0jm4v15xfP+UT+6fsNtP7O5veHTWs7GJJ2cn/Os/+zHxdsuPv/89Bq+MGVutGiRpavX5yYl6kzUEIwvYKofCUKkQv1Wb15L/8G+/gX4oZqD7VSYijIjMjPtrnn71D7y++JLswAyB7322oesGSJa4F8IeVp3jo++cc3JeuPeg49H9f8Gze+dcPn9NsT3ZD1y9fsX27f/M/Q8/YnP/MffOTnl0vGZz9pSrYNjlV1xvEyXVeigqwzynzG67x4eoFjV14N5kV4vx99LQWETp7kujqGW+rpX9dof1npIi52en3Ds9ZdMP7K+vefPiGavjDXQDdky4/og47nnx7DkfPdooAJASkmF7c80//uM/8OXTr9gVx8UY2U2J73/vE/7Pf/Ov2Dw6YvuLwOdfPWeubOQ3T7bYsyMurm75/Mu3XFy8xZaZ6SF8/OaS97dbhiOvwzWjz6kxWieUMFXjzAZaaKNdSW9/AHz7p3zUNQQoUFfvZ+s1KFAiIUxIznjbQQIxPbBlHN8xx1uy8fUKa6r8MjAo5JDxvsO5I4bVA9abJ7z44pI5sAAq//SrrTC1VACgMtWkVMCqZFVy5FjVWjDNCvpbCiVFYpwoYVblSNEaSGl3Rv2vqzVAqSC2hiA2oKYyWgpkUR1nG27/1rXKt/z/nf5F7SpQILPVdSXV6zJqa5CK2sGIpZOeECPOFPp1z8xMjJFcaiBZHdS2AQ2UKo+UO4DrnZ7ot66vrrNGMPnGazh8nYJEDXBrfRV3V4z+m6jtCHUQmWn16eFeLPVrW3l1bSuTzdbPr72wMaq2KqhvvThMBS4Trc8zy7W2e23MnQVWMqlKmtW7va2jurYqYNXAX/UO/panbGkphWHVc//RPZ58+D6P+w3EM+4fP2AwhTFH+Md/4Is4I+MtvuuIVsMYTdH0TKOmcHXNVVUFB6DJO1UupHIHrJEKWxqt/ZsksxEQFKOrcuzq65aLKgibukv1BSy+hHfBubt/ts2n+87H0kdnWayZXPWCa0DZt4VENNnp4fvcwd7lAGA779T2QwyxRGyxdMPA0dERRymQdyM23ixncuv1G+6iJAW5A3DVVVna/qaryzSQThQELlDB76pEaMq5pTaThVBVF2x9jlUr1rCABIzzzNXFFeXrp3wxOc6/fMaL16+53m2Z0kQ0oJXk4T59G0D6hz7+AIOuglP2t79pAYyzGo3tPNZbRAOLyDVp1dqCSfXmeYPvO1znFvPEsh9VwldQ+al15KxgUJwjRJVIUgSpZoykpFJPEU5WG6BQumo0X5kqVpoj3bFSAAAgAElEQVSRZYIU6aJRxoURpOuhd0wlqyS0Kk1iVmaCchGanLDRMw88WymlilIr7VoO0wD9FH3IYt3407JxtYgCWTYxsaaCLVUy1cCdpVAR1FjULFOKOvNAaCbTOsVf9V5BC2OwOJ0QzHs23rCyQm+yJskWoTeGtYWTTa9y3wRdDoTqpaDVnWHYHNN1PRZhiuqd5TuPqwCdiOD7DnGe/TxzPI5MMdSYaHD+Ene7Zxoj1niM65ThEyNjDMxJpWdd13GyXtHZQZlPQYG9kKNOZX7zNGx/J+3fvn3Bh3nWJNRZJdA7o6zEznuGztciw1Rj24hMgWm7pfOu+oYkNJhiQQgB6sNv6PquAivaYNmqU8/U6c3yRmu4hDiHGEdBJ3khJlJSQM45vT/FqjbUuY6jo2Nd+xWBT6UyeuqULKP+ZD4kYsy4ELUZcZkYI9Okxu+LG+syOWBZgwfqfl1d+ZDU04xbFWWURhOkBW4oK1QTPENMmKwpizHMOjk0anLb5LTUr8MaxFsSRYHemIhFve7mmPW5NUrHLuQ6iVNwv4WTGEFZnLWp6jqPd745YGAlI94zp0ARy7Dq6edEDJnduGe723F0tAbTI6b6rpVc0xhBGa7VVLYeWLYWALbY+v5rwEfO2miknJQJa12VDgda4nMDMRUq1YTjxUuwHm5StAS2HFiPYko1X5VGJFjWe0v63I17nDEqj6igtFki0kUleLHOPcuhdjMCOUdIDaAr2gilyLTf0a9XVUYshCC8evmSm9trHjz6Ph99/BnncyJ/8j3+4q//H56+fEYqhq6z5LKvk85CCUl9TOskTKoUNFXmaQvHac9ym3oZDofrwqwsVVZjC9lmNV52BesVoOt6y9A7VoPDESlU8NoKyRliSfQua9BFafJZj7c9R5sV63VP5w1hUjaNNSpDdmLwzmmCGQZjHZ3vmeeaqFVNZ1ogSvM6ibF6nFmnwGeBkjK2E7zv8P2gEjTnyLP6ogGa2I1l6AcG7+k6U0NFEhktZmNKxKzMxBCVlaFqAR2E9J0jTeBKhykdJVrmsTDuA9MYiVHXVUxqlRCyPoO5SmQlgxAhTnQrS7ca2IfEdkqsNsfcu3ePePkWTMKGG24uX0N/gvhO/xx33N5eUYylWx/hB8fRsOHceHb7PddX1+QY2N/CxeUFz58/x2DpugGxPWI7ssB+juzmwJRhc3wMvaPMe+I8s09zZUoK1/sbnr7OkGYymd57xipJEleHdZ3Dxcw0RaY5EufIIMqWFzy7GMjbWrNZi/eeZANiIrmEKg8p+jw5TVTLuXo+lqwNIInLyzfcXs/s9hMFp/KupVjV1Z3QfU5EGcBaYiiwFlFGXSwKrddcHppFQz1QFl8TTfstlXlrq+wkE9JMKlGlMEa9nkKesXh614PJTGEmJkjMdM6SghDwZGtISZBuwA8d+xR49S7x5s1rNl744L0HDMcPuX73lje37yimoz8/48FHH+BXx4yhsN8FchSmmy3ZWoo1mM5hOo9MtfBWaoyeNaY++SJM00S8ueWo6/nO/Yf80Wc/5MP3HxOnkd31JV9/+SV//Zf/kadPnxLqAGWKgSkp86+zVlN9azOesvrZmpiwIdb7lij4bySpWWvxzuG8A3F04rk33ePm8lKHj9X/8JOPPuF/+vN/zQ/+5E958Ph9tvPMu4u3nJ4c8YPvf49H9+7xnX//78gpc9Z1nDqhi0f85LMfYo9WnH/0MdspYvtjjtZHLEtDDvs7tdr7to+l8fld/wALWFCKKj7Ak4rh6nrLs+ev6I4tp4/us3mwousHnHjmrWO6MYRd4PLiS3KeuXfvhEcPv8+94TPypz9Bho592vP62QskJFa//AK7PuL06IwH6w3iBl68e8ebqy3bcVtBuYJYFKQrmZGZlHTwn1Id9S36vVaB63u3ADelLE2eesLqMG+eJrbX11xdXHC6OeK9R4/wYnk57bG9I4owbI443TgkZbZX7/jy8y/50acfYHEaalXZeK+vbvnq1XPe3my53kW61TGn58ewFu5/9Jh7T97jV0+fc3tzC16IQ0d//5wjt+L8/kfYckyet8zzzNvLG66ubzhbrTXExoLzBu9Vmh+r/5OpIMfd3vCfD85BpdzW72ka5lC/twJgMU3sx2t9LlZr9eqSQk63hHBDKmOtgUyV5mt/oHYbUErC+w6xA0WO8f1j/PAW3x+1yeTvv8TWbiy1Tf3ryoCyKBFAmX4Zyfo7SW134jiBgKsAXYpB/cuqJ1XJQnEGW1/DQd4Ya61W96AGoNShXK5rT4vkeg9rX1lyVpuZRekQ9GtUtlJB2GqY3+6BaP1uTME7C7EwjxOd7SlEXFbVQrEZ3/z00NR4jCHWurAV7rUsrXWT9qLWqFLJmQMI9Y3VYA5A3MLm4e6zdgDhcmnPHlXVVq1WjF3OIWOz/n8F7Fp3uzAgaXq7w6Ucfmr73ahXpmZYV3WIqyCmDnHUtkutfJpVT3tPbHvzluvXV9CG0lTQ2yz/NfDxzsqUBuAdas7KrMB1liyJMewIncMZ4eTeis9+9BlBCuVowzxPXI237LcRKaEuYKNqGwzNnqcALaSkBYop46y9rXIAuUS95BdfGjEHdVZ7vaJ9p/ZphhYk1l5LW7NyeKXfeL6WvroBgPXvG1CYpdUWGionFZDF2DuAY1sfFawtqGqNVrsfPuTOM+Oco4jBhIKpgYfqPa2hU+2XkTqMNXrPjBzeN+1d82FPq8SLNuygXrPUXkJVbZXh17AbqwyK0q71N27Vwk7MoMGc+qVzSNxc75ifveTzq8zwxddcPHvJ1X7HXCIBQyyZugl8477f/f0PffxegE4BqsPiadRz9QDTxWG8xXUe13tMp98xe1ubTqXlUlSWVPoeuxo0qY6CNfvlQbVdR+c6Zd1ME2aakdvbBQkHlgYX0QL5wf2HCowgKp+aAjFFnFiMN0SJmqg2ZeZpJhXwp8fMvcfu9oAhTgGssvaUQaRGpgAhl4OOWdRLQLXaevC1e9/ADalgiJIgBJNSLYzU06i5hCi9vU1z6mbbvheN3WSqhNQBQd+FxmmvRUvTl1vqIM7pRMFXbXoIBV8yjoQtNRk3ZvIYNWEtjQjK4MtzVPo9hcbQS0Z/cjHqf0D1Eyg5YRuKXn85a1j1A77r1McoZU6Os2rIy4gYh7M9MUX1DcwJUzK7/Q7vDPfuPcCYgVQKu9tAKcLF7fVSTByorHVTl8OEpy34u58DQgwzqSXFpsQcI0YKsloxOLv4efkqk5OSVaJWsjIJXaExHVLOlaGoHonGGEqcl83NWQudJ4ssPmfWewUam+zNqK+biCeL4XY/MQZNUBSjKaGxsg5dUoakraEfQJ30FULS15OKMrq6mIkx0YXMHALzrECdStQTNfMcHV9VCWiq0d11A7fOakoi6m9nq2+SNbbKtg3Z2DoU1WvLdYNMSUG2UpKyRFNWJkhNmbPWoYEfCk6FUn1NkgJVYjW5UazR8ITG/jOVjWqVMq/YVAO66nN3d5IkshyGiE6U1GciYZzgO0cqaqybUmK3H5URaw3jHNjHyBgi0xw0YbVQJycVHHSV5VVDbtbrFdZaUo7MYSakg4yXkPH7HXZMdXMoy3Ou7MFUDX6bpNNQsh52thY8B7auHibN63D5EPUs2e13+rzmTIspb1/bidf9swjznCvrVwsWbwzkWGW0BgtquN93WCdchYk5zRgzYIzl7bsrfvXrL/njB9/n0+/9kBsxPNxY9jbxV3/zlzz9+jlRHMYZjKs09KzveS41pMC5St5Tf7jWOOc6/MhNnpG0+FLJrCx2AlI9i8WhAJ0pGAvWFZyFzkFnBAukGLBFQcmSUDZmLkDA5oAlYa0w9J5119F7W0MYwBn1YVz3PSKCM0LnOn3Onafre8x+XBpG0QOqKhRV2n048dtIRwsZY53Kkr3HxrrOC1UCQd2DYdU7Bj9gnSETmOJcU8VVTplaAWItYgrWFpwxNWylw2GxZYWXDV7WWBmwBAwBkYiIqfviTCpBS1ojiEvKqOgEqQB5zPDi3SVvbkeG9RFv377CXL1kzS0SZsrtFZ29DzIw3XxJihOmTHTDGX5wjPOeOWeMH3DOcnZ+osX7/TPeDD1f/PrXpAyuGyipMAwbcincbke1R/CG/mRNlzOlRGxReVbfWcYpspu3yC6ychpKNfgeWzLblHCd5fHjxxyfnvPm3SXPnr8i5Ki+dPNU0851f88FTUS2ysN1veizcHPF1X5kmjODXTOOO2IRost4caSYa5r2TFkn1kc966lwFTWxPLfBQ215lOGtZ0deDKKLFtq6oVUWrdqELAl8YpTBWQcVCsVp7aEJadVXJefqgwgiarqdoj77zhRk3anMXmCaRvb7LUZmOrdCpKssYPUpmlKnybW7G0wJrB6e4DYnrDfH+M0D7NGFgm+rNa9uLtmIJ5WObDtyTowpUpylO1qzcSoj8UNHipHdbsd2d0vKEedcHQpr8zDFyM3rN6xOTvnwo4/4ju9578FD5MkT+tWKn/387/n6+YvqylFl3tYR4qz2K0EDxpyt3rKiZ3BMmRB27CgMfU/fd3hn68BVW0Vtdq0mzabCm35NnC7JKWK84+x0ww8++ZAff/wB4jvwa77z6Ih+cLx31rEyI/efnKgMLyXsPGNPNvzRjz5D+h5/fEI0HcX0ytZuPdLSIByasAVQqRX43dK+jWu/rQnLGRJCZw8Ssn615tHj97mdrpjZYYwQ0o4p7HHDmmGtPrNvrnY8++I5pydvuHd+ztnZA4bNBzCcEiVze3vNu+srXl/t+OrFW/x6y3gSuZEb3r55wauXr9htb9XLr9YEJtak9JKJkrRBMsoOLnWfzlWZIU2xQvUarcClEafvJ5WBXQK5QJxnbq6vsPIdnjx6RA6BV8+1TRdnWZ2eYxJcv3zBdn/N6nxgvx3Z3e65vbzh2bOJp8+echsiWMdcMvtxx+bknK53zCYw5xnpPG7YUG4CxYI/PeXkyfsMR4n7z6+Z3kVmPQpIqBJC61EdNlKSDh2NclwKjdmjAEW1wmrkx3/mx28DZPKNP1XL+ZIoJSAkbbznHfN4Qy4TWSYylpI9pdjqmSR1YCsgqiwIwXC7FXbTitXxBwzrM6hDuT/0cRg8tl6rok5Uv96G4t35xIKWtU0xYfSLFZ65I6lsrDWbBSkWV5QPo9WwDtFLBfOgUF2HaqhYk9Ydrm35vYI51hjd32vtIjkfFBNGyRuYTCwFa4X3njzg+5/+gKP+hLcvr3n51Wu+/vw5JMdq6MgyM497UkmsjgaKddompqSvubJ+vtGPlwNoU7ujBfjUSz6Acm0AuqyFep/v+mHVSotCrWXa6xEW+wSobCojC3OpnTVtbWn1XN0lheoleADn2ltqFgzTaOquAEloZD4NyTGLBUqz19J62hyWWM7QrCJKU6HopSxhGcuiu/sk3Lk/C0Cm4NiURuY0glQ2lCgh8/zeCT/68WfsveXtm1f86t1rwrglTVPdk52W1qYCXo0IYbhj5WJqzVBhUDkw0cS66msca7prBZHac2IWZLYCzG2PrH/fALhv4kPf8uzpIvltKXcDqRdtX32PG9Jxl7lZwTmUydcsW6iv0RQdlDYQd1Fc1b6tXUdjsKtiyuFdhylNxnvAWb7R67cTsN2XBnaWUkG6BkR+s29qz7AVU20yzdJTLk9SUeCzZJWmF7TfECfEWNje7JleXfLMBPyvv2L7+oqrUZiKMl9DCtBSdn/jnn8Tq/jdH78XoDPSHvJqWKxjdcWKyMScCKkQkmVOgonoAZwTRizFOD2EsspUOxKzLXTVD2bMVlHGrJus19tCwqNBE7W4EQ1lUNP0ytyylk3X4zuLE0eMMxMTc5iwWExnicYRrcHUMIdkDP1mw9Q5blPGOUcKCmLElEipkER9CwyOOYOTaqJftcltobWGsroYLMtV6tK2RfXlTd+c5QAyCKIylAqwmiKkaqbojBbgyaAG4LnpyA+HbQOg2uGkKLb+LGtYkn5M53BWDzgn1KAITa0rOXF7dbX4WjVtvD5v+tBud7cgO/Vr8ZqEG6Pef+cdMRXYGmU7cXidIUQ1gs7q+9W8/7xVeXLnnG5+syHnmZX3rPuOrl9jXcfWz9zudlzvzMLmgsPD3HiopS524ZsbS7s/zlkoToEICmnW8JDOeQbvWbmO9cpzvBroO4erD7K3Bueg6y2FqOzMqDR1gM7oBjJNU2U5Cd57fKeWvlNUJpk4yxRmyAqc6fcJpKJg8dX1llgsU0wkUYnyNM7spxlrLKfHx0pbdq1pu/NgthUh2vSrBj9V8Eml1s479QCsU8i2reakckpXDFEKyQjiVEbqiiwSaVNTm5tcNlMPitIms9WPsh3u1VMvp0Sug5aMHpoxNV85qV5XOv2OOas9bt0wU/27nLMyINTQSzf3urFLXQJumQaybMKlslxMBe+o8l79vlGZLkAsWX2QopoKz3MgVH9DNcTVPQPRyY01GrQiUsFMrz5kzjmkVO87qYeoRKQ2nLZKc4sxyjgWW4E+qe9JlRXkogVJfX1WDsCdsaaCbIdCjLqXpKJhFMZooy/WQNI1KU5wxtHXAjKmQEoRgwJQoPuASueEkhLWCCdHa7p1x7tnTwk54Wyhs4aI5x9//ZwHH73mg8cf8PT6huOTFf/7v/13bKcb3l5vmcaRrt4jDb6ptUlqYFyprDlDksaga0Whvv4G0Gn+sspdm8yhUfoRDYoRUxCT6yGdsKXgUD9S3YebxCpjkia3mZJxJdLVNO7eCJ0VBfZEBxnkhDWFzumeZQU6Z9W3MOWFxdl3nr73iBOsgaH3+H5AjCHNOlhJdf9yztXpYfP9nDXpqhaVUg4SZoh4t2cY9DmYQqAwIuLBdPpcieD8gPUCWRO2JBpImbgr+isVwjaRpoJdObwd8DboOU0h5kAukSKRlKOuM5kxTkgpkFIPZiCLIxShWEuRxOXFG+Ttr1ivA76MnLodvR+J1nPaPyM4GFanrE7PceszLrczc7Fsp0AKQZ8fa1ivVsSzM573A9M+qAdrViDFFfBGWfpzicxpIsdASSObfuDk6Jj7D87YhT0v3rxg3G+ZQ+DYOR6d32MbZ+LlBcbCwwfnvPfkA2IMvHiW8Fb9PUOV2ZXW6Qh46TTlMydWg+d4Zdlvr4gJnO/xplfVKtXXc5rxslKZIJnvffqEH3z6Hn/zt7/k3fialB0hz7oOS9bhgXYNiFGfHk1xNAvgWxBMSTU4KRPLNy01qEwQU89FW3fjEqOC85JxtbHR16ZAuMoFZvI8YbwC0E6Ebdwxz1o7aYiEPl/6Iw3gmAOUMHOz3fL1s2eY4ui6Y/CO9XrD9X7k5c//O1P8JTENhKBWDpv1isePH3N+7z5hnumGnqNxJMbIq1evKG/Lco7mFNUvCq2xrm9u+NWvP0eAVy9f8t6De3gjvHr5nDdvL3G+W4YWznlWw0r38nlmssJohaHvVUpk1DI/xcC43xPCRN91HG3WbDZrVv2AccpQrVsxaU7c3OyYdlVB4BzZGV6+es5f/8e/4Jc//SkhJex6YCIgJvPR40fcW6/pj47IYhnE8rBf8fDoCMnw4u1bUr/iox/9KY8+/O5vEl3qR4ElgbPdjQry/EY7XlCAh+ZX3CrFor0qtkCZKXmLc4mHjx6Sy8hXL7/g6y+/JsiOo9OBrbuEGabrzHgdefzwHh9++AEnJxZjZ2WRxkRxwrvLC/72737Gz/7hK37+yy94/8N7rH3Pm9cv+fxXP+P5yy+Jqq2uZ1r9r4ha2nBHzlkOvxqLaQEW6mvUo6FaiYhd7oMOA7XvmOcJa4R7985I84z3ljnObPqe00ePKbuR3ctnlJKxGIZuYNqPfP35U371q0u++uoLbsOefuNwQ8+wilhgv9/z+uKC3W7L1XaLdZ6+X7Fngn5gdXrGuhe8H7h+ewV55OTxmnvn9zk5OsUbV8+1SIozIYzENFEksUR5FoOG5VE9iX9PV/1P+vhDwJjWTdY6VqsNhBmxnjwF9heXXLx5RwwThUAmUFKFwWqNk5Y1CiFkfOmYY88urDk7/QDXH1eA7p/wOlprY6jD4TYgNpCNKoiW2qGqMCowY6zXWomCRPXSUrA3LXfAQFWCaAt717g/kRYrFlP/VBpjCbv4Q0MDEQ4XXTgAyt94v+SgksLkJRxiver4wY8+5f/+9/+B7374x1y+vOGv/uKv+a9/87e8fHrNNI/swyWEPVaE3vdMogoBkxymJHIKNQ1cFi/mAtU+KS8qH0MDO7V3sLU3ax5/7aOxlrkDzlX5g6o3RGsiKE19v4QOxKX2BlNKxYgWOg1SItq163uhb1mDK+rPK/VMpPoh13O41cRiD+zZklWWU6QcesCU2i2HCsaW3NRqLISOBthK/f5yp5+++74VkeW1FLIy59KIWLUIocAcwZZI13vee/KE8/N7bDYbLqwnRT3H1N+2dkwiC4C2yICL9ifL2mrIgLRL0XrXFmViiWgvVZqFUdG+Rllzd2HrdlPrTyp/eCf4nR9yuI728W1AXvtovXpu6qB6T+9iUA2ca5ZILVRFREku3qonvK2M0KXx+c1XUZ85WzGBhlU1pOSgwDHLc4C0zytKgKo9VpO1CoAxy47VADoo2EqKMkZ7rJhgezNiXl1g2OFevSPcjGxnT0iZbIwGPZU73+9bwLh/FkAndcEYdJOkMsjQPxFSZI6GaQIkInMmGr04jCH6qt+NhZAjzlnGEOj7Xm9EKcwxVOmOUhytqVrnFDAxIDHUHUDp1SlHNesrHfvrK/LQkZ0nxci82ytAJxaTHdEoG8cGKDkixgOKaIc4E5IyZZJYkq0+e0aZPMZ1RIRiLBhtJqTB8aIbiDLo1F9NFgCECmaCyVlBxwowUKceVPAgoRMXI4lYN80iCq6lYkgS6wJvy25ZRroAF8213iKLSuSkAonOWXIphGnGloxf9cr+sQZDYgqhLm7BWl/XfT2ErEGoJveScVbNx8cwknOhL30NtKimmMbguh6xhhAj4zRxO0emSadyDUd2VvC+Zxg8+8mS8oT3DlIkh4nVsKIMHZr2IsrgK4epujYvLJtAQ/h/c8EbqaEQon/21qqf0zThraV3XlkyxtH7jsHbGjJSjelFZcMUowCV0wPDGsvgPc45pkkBOWsdfd/ruhaYwsw4TyQ0NTRFBScKGmywD4kxZGKGORtkDoxRz8g5JPZTwNnM8d1gkTYRkMP/35W/VJxuAawKRSPMo0p5UontNIScSHFm2geMFVLoMWWD6WqyKUKcBU2zd4QcyVbfi4QhZsd2u9Ofu3jeRUqMhHkiBk3eorMQ9FmZg3qIib3TABllRmlQRmAOgRDmyvq7c9QXZbWWw/LXNVpNYMs3xs56z4yRBVIES0qREGZSBms6xJoaFmHR5Gn1tvHGqlRWdCChrBRlU3mnBWcD7JbAlCRtQFYBJNdY4IucM1c/SWmT1TpJqyMhSk4LQNcmswBiFXxtvi/q/SEKANWp6vLy6z6TKfX16+8agmKhTOSsonvdfyuYiZBTZnt7y9XlBY6sZXkG1/eYWLCS6FcnvHxzy4unz/mjT97Hr9a8vhn57LM/4emLX/Pfv/iaL7/4ipz2rFZryqQSwCxqJzDNgWkOiENlhZU12SS+y4GKaMprVK8mU5Tp34b2UEgxYn3zplFprkn5YCrdCrWsrGgoFaAz+u8pY1MFr2cDQ69+WEbwUgGPokW7TnV1sGFNTbfMkVIU0PTOUozKqgevcupSDJPbA8pylVJwztH3HdYaQjWwnyKk5pkjReXz+gqh7El5Ws5eI23a6jBGz0JXCt56kEJK2tCkkon7PUSY0543L1/y9Xpge/9Mz52iQDUG9ZvUh17DMFJSENd1GGPphw39eoNxg1pZFAVHc9qxtjuOuxkfr1gNe0rZEbLhw/sB4zfc5jWxW2OGDfgBvz7h8nbPfgwMw4oSg8rbc9bU5RBwIZCzYZwmXEGDe0pinPbcbA2SIr1JPL5/zEfvP+GDj97nYn/NnLc8e36NE8PZ8REPzs7I15cM3pKNcPnuLXEOXLx+heSoyYre461lHGf1063FvjWek+MzvBecycRpy35KGNvj/RqXnTIFnK69mDQB2Qpsjjb85M/+jB98+ohffPGGobtijoYco66bWrg29oORouEeRptOtQKwdUCC/rJta9PhQdVKKyBvBN/kVDGSiqZcd05B5daLWWOwnas/P5OnPWMObDZrTtdrfIGreEOJYwWbBRMjlA5SZBwra9HAmzcvubkSTBnw3Sndes39XHh0tEKsY9wG5mCJqTDPamOy2e2xzpFSZIqBkEK1JMm1ttIhbikaRFVKUUXDnPj6q6+5urjg+ddfcXq0IVbbihQmndbHpMnOBTrfYcUqC7sUQojKAs91X60S+3GamcaRaZq1/K+s6JV1WO/1HDSOXYg8f33Bbj8pi7yoHcLz5894+usvtbaUohJ6n4klcG9Yceo7infI0HHqe3748DH/5k//jDwn/uvf/Yz+0RP+7XDKoycfV8PvBk4VBdSWZq6dYdrsQrpT7+hesBh9tfKgHNgvYjKGEeQWuIZ4gWHPpu84Xh0h0XJ5uYUMsbd0eK7fXTNeRd7/4WOOTzecnW+wdgICxlkihe048e5yy/VNBLH88Acf86/+7H/j61+84Or1F7x6nrHOU+gxpijj2DltzJLBWr+8z9o43ZFctaauelG2HbGUw5BO5Wt63jYvrhwjMQYag6Yx1YbNhnsPHhGvb9hvVmBXHK3XUAovnr/g5z/7Gc9fTEzjhPQd0nW8//AB3ceey1eXjLs9t7cz85zY3e4oKTF0jjknMJ6MpfeW3hskTVhJOjzE1n6mXb9CQiqJjLWPOChRlteXtWb650B0S1P4O/5dJZDK1/duhYpELWG34/Xzt7x4+obb230d7qZq9N5qDKHNCowYwhzIxRBzx+3ecu/4HBVq/ubV/I5rqVjJgUFTTdmTQck71n0AACAASURBVI15FOLJpYY8iEUDmHToYGlNuLJzpUDJLcQpwzLYr3ekoINUIxpco9UGtqh9UamgDiLLewKHPoTl/crqI5yaXQlay9SGLNfhsFita/pNz+MP3+NHf/oZHz/+EfsPAw8ffch3PviU/++v/p6f/vTvmC+2rNcb5jKRYgZvsN7TdxZSJEyZUoOpjNUBcawD4pzSwYbH2GrJUWF8ab6W7S2pti3WVnulOkltNCVjMUV9/7RlVSac1kSl4al3wLn2tUILPixZLV7U2LDVdrWOr73jkp4phz72UMdr/bowubIyUHWYbxfgjroXgO4WVFZfqcQTae950d5GdLEhKAGhtL6+PqO6TGqaqkmENDHOt6TVmga7hWnk+vqa7Vbl7CJVKVTAiMU5IaZQQb0Gv1UQWFrerRxAMAQ49LRQg+UqXpERtbzIQi4JY/R8RapV0zLl+M1H7rcBtW8wQet1NVbZXSahqZYZd33nmgR2CZC4w4CDA0j328/44WcuXqPWUqKujaZK6uay+N2p52j16G09wvIc5gZp6vXWgbWu1YNU2Tm1jGpKRipZoVHyWoq03LnOBuySTVUOmsWL3Ro9iWOEcRso6RpHz9XtRJoLU/VxbwB6S0O+C8Qd/B7/8CDm9wJ0ijCaxS+o/YyUVDLZDwPeFbzXxkMkU6oc0xhhcNrwiDeaAlMK5AnmBEYb3q6z0BmmkNXgM476AnKit/XQyvrAOycs+Sc5cnXxBuuUkWWMJcdImGfdcEZLd7JWuVnShyGXzHa75TJFtuOOcRopqWhyZclYU6esbVJiPGI7ktXbJBWhd0ZUPiI6qa5wAxp5LPW+FWyYsDSebruBZTnkTNEUyCRCTIfNwRaYyAy+YyKSrcM7T6pBDVkyxaqxvnO2eqJRQQZNaMs5LYe+SRmbCyGDL7qJOUw1aTdVxthAHz10xbZ0z2oa7S2u6+iAFBLilJk2p4gU9aJbrTfK9vIjQQrb+XZ5uEHNlsUIq95xtFnReaGUkRAj87QnJTg6OsZa9Yprm1fbAHL1R5OqI18e9robtY1FRCfxpUrkVK7mEKcsEEvG2Q5bN+aSk24EFHIK6icWCxadwhpjliASrCVUVpgVluTLlIQQasJejOprZ5W9YK0Br5r7OSaKGMTCerUijbNem3dMEco4a6MmytIz9X3QFCBtIHNpU5n6cFfAmArSWWuwxUKgTipS/ZxESgYjhb7rKDlinVH2DcpWcRZWzitgmiMpzNU7w5NC4Ho/sZ+F65trQKVBKSdyjHTWsh56RkaM6NRnnif2+y3b7UQujgcPH/Pw4SPWx2tW67UWSTEuz50y9pq1uZ4zLZp6nidSznROGbZeTH0fFFAtvat7VF0rITCGCXFr9fBKiXEMbNYdLZHYVjlvRjC5UFKVVUgDjMB3bglUIWe8BUqqpvt6X3NlQVWOIiUXQk0+rvUnUL0VY1iANFohroZwmAb45ILzmiab60TM1EAbqXtIWxshJpwzVdKmU6WU1ZNTBFJCm+FcE9BKriwFlcSnkjDWk3Li6vKS3lmys4i19KuOME9Ym8jJcNSfcXVxxfZ2z5QiL9++4fvfvcenP/gTvvfDX7CbJ/bPZuzOkGUi5sLgVVY2TTPTHFj3A7FOpLq+Z7VeMc2Fkswi3cs5Eybdu61OZyhBpZeSlQnnRaWkaQ7MoRCcSgkNyhzMMWkojDE16aym58VEZywm67NEShDrr0pJV9actGE1nbc0WaKxgvcG60Tl6zmimlv1E3NGPU+bSW2rlpyzdF4LqhBGEk7fV6ONhFDoOo/JQkiCsz0pJFISEK/AsniVzdUUWZGoIIcRxNbUxBRBJtQY3jCHCy4uHXO6xthaRNd0tDkEdruR3X5S8No6bFnXMJ0ejKeIA+uJWUipUNIMYU9nMycr4dyv1Pg9RfY54Vyh+EiJsHeWy9tbdnPGJJUGrFYrjo9PdP+cZ6b9iLWOcZywVpO8U8qaypcCzmTCdMu4S9w7PebTR+/z2Scf84PvfcKTD97jZ7/+BZ9/PRDjOSvjWBXL7fU1cZo4Wq1IYpl2e/a3e9IU6Iwh5oitPUlXJ7ZxjkChcz3e9fjOcHP1jt3lFSlmxA8YO2CAk6MVexOY9zd4Z5HgmKfMycmaB/cfcX5+j/V6zXpYkfeJMSs7w1hT5UoZax2u0wRslT5r4zkHDTVQJpl+nav1S4gJg7KjBbC5MT8yVtTfqOYjcHZ6zvHRKSHM7Le37LdbjIG+70hxJu4nYon4wXP/aEDCSEiBYchMIWiTmAPWGI6ONqy7E+b9nu0WJCe8t6yGFcPqmPVqw+nxGZt7Z8SyIecjYsm8u3rNq1cvefn2LRdXl0zjjjDuyVFDTPbbLTHMddCSlYnuPd45imhts72+JU8Tm84Tdjv2tzcYCkPnSTHgnWVztAaEHBPXtW5wxmGMYw6RMs0q+e88CISYmGOqa6EwzoFVTCQEX4ObsnXMc2E7R0LW5zplIZnCFCK3F1fk7YgYIVrIHWST2KXMm1RIBlJnORJLePCI94c1p+tjXnz5Jas5E/cj1Cb80Du3llotTAqVuaGnGtWRsLadh5a7gMpCayeqTU3BMEN+TUlfQ3pO3L9ie33D5bsd28tbPD0m9Fy/nijHAx9+8oQ+Dryd33Fzc8PnXwSKX/H4yYqTkw/JYvVZKB2UgZw7VqsV3/34IX/+5z/iw4fv88u/+8+8fnrED//FT5jNmp/+/Jfcbnfkon2BLYfrbuyGJq+zmnilDKjamInUGqUawrcGTYoOyxpa0HlPDDP7nYJoMSojuB8GCkIIgfPTMx49ecDqySNePHvOVdjz6tUrbq8VCAwx0onlw48+5n/9yb/k4tlbLm5v8KwIMZLnSJr3lBwxFPa3Ey+ev+bMecbdJZt1IYVAjhPzOBLngPR9lfehDNEcsIJKoBZwWkEGZxWNjzVAqlRgcunvlppPWVNNVPHbHweW5W9/tL8zaAiJ0fMrgbhj4uR4+eKSeWqe3Cx1VUUgyOKgqI2KbQw3LNvJMK4sYlURIL/7An/zciFRLTrqzzQWiqUYqy/bewXRhhXKmYtY4ygxMyftvUwNCcoxktIMRW1BSs4QM+L03DZSiBJIIeOMIadCZ2Cwln1hCYywclDu3AUZqnpO16H+5QEUrZ+XS9HBnVV/MNc71qcDm7MNWYR+c8R3f3jCxp/heMSLV2+5mV4zpYiXDL2lGzz9qqMXS5wn4jzp0NU5pOiATZxDUC9HW+pz0Hl20thjBzDFihAqQ9VZX0Evlt7KVCVNoZDDTDGiBNxSpdA5KhnFOKxBWaHlTv9VIFWGmrdaHxsj9ftr6JoRgxh3eO51Zq/gS53CGgNi9Wu1gsqHpS+29jmWkurwtOEvRcA6jC8UY5Xd6Tw+ReL2mvlKB5TESmQxQiSyIFqi771ilIJzEMOOnOc6LNG6ere9Zhx33N5ek2LQ+p+C7zuiFXLWwVBJURmI1bKCIlUpwQG4uwPe3AW6UkqLr3zFSRdgR22YKuGolMOakFDds2ooVOs8vgWku9tbH+7tN6+lFGi2GnpdBTFp2YdKXfuNodY0INTeIldQvA1RDj50GuA2h6LPbAXgnAVy0dBPGmvNHHCERiowTRIstZfRdZ5iwluHdarOXHzmnVMAGx1UUkG61muUBhjTMIW6GPTqFS8pmUhUSWwxTLcRxpHsHZkVQaq3fkx0dRhaOFiG3b3f7fd/FoOuZN1gGkDHnYLAGKHzns4W+r5nvRp0M04joSgL7HjlGQw406khfIzE3JJJE5v1mr7vMdaynyb2+31N6QSThQHI0S6G9Wqkr1O3mBO77Y44zZRoWfWDmvuLJ4VIlFwnMUa9hTBMFK7CzDxPy00y1f+nlLJIBxs7yThPNp4kumFJNezXSY2emkUOKYmGOkkoBSO6MUijuNbP0ne8PShJN0MjhLox5KKHR5aiNNlSSNZWULBK6XKp0r+ygAgK0Onmp0h8IqSsjSaGUIQpauhBiAkvmRAyRrKm3Modg8xcIEWSVQmdzVDGESO2glcRamrjHCOxQC+Ad5CE/VzDIrKyo6Qo3VNli4KVFUPnkGLw1rDfz+zHEddF4ukZYBd/nbsLewkvKIcNpMlZ725Ai9FlAWfVs8SbKrmzCZPBW4szGmLgnKXvOqTEep0FY+uBZRydc3V2rWy8Zk5sRJlHmhqUGMf94ilgnWMeoyZPJpUnZZSdPsXEblZgIIaZnAyus4TKNF80/PX1G6muGeXwmtv7pVTyWgi1sIe6ceWafrwYfOpdVInvqmez7ul6D2ROjjac9D2WwrrvyfNELpGMskNc38Nk2U4Rkep1lrSAocoJ+66DozWdi7r+O8N+Gpnnykw0huPTUx4/ecLRyYZiiqbGBp1yql+dQUL1DbEKHBtb2bbTRAmB5JTteLxa4bzKmEtOpAjK7knKiiMRUkTSTIiZFCKd7zg/P+f4+FiN/63FGHSSnTOxqL+idw4rmthqTZWbJU0RMw6kRHIY1eC26IxX03kVhEuxAmQok6GlDi0AXa262wHXKnBr1cg2xkTn9fnXQAk1Km1wz3JYAzFp+jKlMhmL+hrmrOmRKRXmOZKV6ro8M7ZNpHJSL09bwYFacMSQSAIpzxQiJUTOjs65ur7lv/z/P6WI5+tnX/P//rfAw8fHfPjd7/Lm4hWvbm+ZpoLphBgnQin46p+nwSKmAobK/lr1K3ZdJE4KNBljkJxroIzKXXWoWPBYNr1O/HMWiJp2OZMJg1/k3Yjg7KCTYKMOoJIiJSnzuxhN2SpJgx3maWbcT2SnDX1KLRUr1zRzVxN6lWEWc6p+MAqUuq6j6wdN9ysa0lKAYpR9XooWp6kCqOp7Rj3AqZ6ECe89tlhS6BCzJoYEWc2idXqvwJy1ykg3LmvxaIRiIsXWYJUukcaRzvf4PhOZCMnS+1Vltgj7KbDdjey2E9vbERHHsOoxrDAipGzZzjNutNh5qEBFondq9j6PkTRDNwz63JWZGEfyODLubglmgmNhniNzyMzzDVMsdIN6Ia6HHieGrutx9lDQUGqDGgIlzZicGJyld8JmcNw7X+Ndpvdw72zD6fHAydGAlBNMgm4unPRr+nzMLgeOTs84Pj7j7ZsLnr94ybSdmIpQoibAl1pEdlalmuthzW67Y3ex5/bmijKN9FaTNkOa6Uth3TmyV06xAor1gMBhXI91vgYzmLo/SJVt6F4jVUraV2aRyv41uTpOM3Gel3uhTbrBFPUXNKJ+oVRmiymFzqmX4tA1H0XhvQeP+O4nf0TnHa9ePOOX//hzpnHL6WqgFEeo/nsuBTonHA/Cfs5IvsWWwOALxnese8fZ8YbOJm5NT2+PGIaOh/c/Yr1+xNV2JPwP9t6kx5LsyvP73dHM3uBTzJFJZmYnk8mxq0lC6EYLakAFrdQNCNJ30b5X+hZa6AtooU1vJDR6VwK7uiRWsZusJJlM5hQRHhE+vcHsjlqca889kqwsEl2AAEEv4RnhHu7P7dkzu/ec//kPY2JzvWd1+oBFt2YKjhgntPMUpbjebtC1Evc7AQenEao0x13XoQhMbTIfYsCGQFIWSsEbQ289TpsGdGh0yeQgCaC+GzhaHaOA/WaHVTJ1dy3oY5omYpQaZ05AHCepT5SWYLJxiowhE1LFFpl0papJQNGWbFogmrZI8dVMuVsye0XCkoqWNS5nRG1SZE0uU6DThu+8/y1evHjNtbbNooEDXiLMgtaktiandQPCoq3qIHBVylCroaAxyjSjdWQY1xpJyaTfMO4/5fWzv6KEz1B1w24zUsuadbfgedixeZXZjDe8stec9fcpU+by1QXPXpxz+uQR7uiYo3tvsZ4rWGUwdFi1oOvX0hznDb0LrNeGRacZrOUbT56S/Ipn56/IObHdjQJOFxk26mIPflby+u8E6uTGiFBtrzuwzhtjo73KUmqrexXeeWqBaZyIITBOgVohxsD5i+dsX5yznvbSbJbC559/jttes70M7HYDMRW2457qBbB9+Ogxf/btH/Li/BXn2y1ffPIFL5+fM25uBFhH8fKLF3z8y19z1ntefPkpOV9TaqRmL+yQrA6yyVniWktCuyrMqyIskVnpq61kf+aSDuelVhoQ92bbOCc5/12P2RNbTuKdRvDQp7cEx5kdVxXKWWrtuLmOlGIppf3mWqk1CSusZEoVhokqWYDrGiXdMGv2QqqErz+8N17ffFxKNVZTpdmFaJTzuE6xSMd0WnP68AHOOHKIlDHhlaZvapm4H7m4eMXlxQVTnLDtflLzOahyD1uroCscD0cYq9hfblApoHOic8JKTtwCWzMwcvfsQiMzaN3UIE04ebDhQGo8Ley/YkA5MN6QlQTDqQL3nzzkvW99hweP3+LVze8IVxu0sXTLBW45YJ3GVsVUC3ulWoK5+LDllER2p6Q38V5Y4dGJIuqu/5aoFHT7VBRKpQ1qZ4Dl4ILPLNuVn5/x2doSScX7XYgxOUuBNlt0WCUWJ50zUIIkpes5RbiBLsqKyi7l9vuUhPjIQbcUbo3ytinf9MHIXyuDcV5qJWVxLfTPe0/nLN5ZfPP4zVUIPCpENufPePHJb3j9PJGQ3jjmsQGsjbmpNaX1gNLLZ3IaUTWhm1y3pMBue812syXFPeJPJwCdsc02KGUJg5l7ANMWsgbKyUDl1kf991lt6tAE6sPdX8G0qIt2jrUW0GoOWZgJK7MtgLqzwXyVrTX3kpXfB/Du3pszcWe2IVLNE27+ea1/Pwl2fq/feN4GNurGkBXLl4LJUvulGElRkUKQ5PT6JvtP6t9yeF0SekIDdEVKWpUAfsoYCZJBLJVsrUKKaucl59r2rVvfT7nEpebKpVBb8KgQWCJFiQUaRlGLoYxgVKK6TIiWPLMZmw+jgJm3oOSbgOgtg/XrHl8L0KWUwShhv0GjjIp80ihFTaWZVlspEhTiB9eKBGudIOPWHRBLcqY2+rDWjqqMeAuU+cIVnfxMU9USnyHeBNZgvCRluvbdMURQ4DvP0PdopUgxMuVEUtD3PWu3QCvFLiamUdNp6HPGt0ADlW9p5ih9uOiUkeMTGqQ+JCNWRJ5alcIq8Rg4fK2IWb7OlcE5NKltjsKQkDeoXWSNMlyp4gvXgDY555qIGGErJTIvVRtjWFaxtqCYWxDwUOhJJ58b+l20FHKhgkqVbKDoCsaKJ0aRJLqu981kORJiEBaIEap23O5lApLrvJ8LvbpkchHAdEwBtHjQTVHo3zFJ3Llqm4o0KmCtZpoEoNrtNtxsNzgXWZ+c4t1we8dwh956izEdFo55KZhBuZwzh7QiBECyRiSKRgnLkjiLQ0X+573DOdNufoNDjk9R8c7Te0/1FVLT12dJUzXa4qxukdEQkngZOefo+p6y2+KbKf40TS1pz7YCbisTxuZ5Nk8rSpvcJVXkXDdTXpkst9fZvAi0VpKQSWMWqnmJkc01t4UV3RL/GlDkrGXRdSyWHd47ckkses/ghEZvbfNEDAWrZ6aXAFdD11EpOGtItbHM2oTSqEpnDX4xYC0tPV2ToiKGStUdXdfju56uX5BKBJWYU1uTqY25lN5Y7+ctRmtN1brJpRXeWlbDwHq9ZOi9TEVLCwWpGqUsnRpIVXO1vSHHzNHpKaenp3jvW7Hfzn2Re8xofUj5NbngGxgXw0QJEaeg0z3Ka9KU0arHaUPSTSLYAjtyrOQYpEhvJrG5TfXnj8OCTWO10WTcLYZcEoSqeFR+ZW2er//SWHG1eR2UUig5Hb5eaiVFCSqptQH6bSXKpa1dTdasrMX1HdZ7Qqnsxxt2qZJDYFEzZ6sFH773DjtTeXl9zrtvP+XRg2O+eHmBWQ/ce/AAZzUVC2ZBtVBqIuuKshbjhR1jmr+YLhVdwCqN1ZZqhB2kGuilsmZKCRcT1SgoYJRlNSxRKrLbB8FHmmeMyK8VoUpja6wHZZpNQaWkid20Y0yRaixRhuqkkFEmsQgZMIRUmVIhVkUolZQyOlVihYQmogjt76kWQluPYlVMsZB1EoN2pSW91jqMUB7Ibf8oWiQLuQqLuWo53mG5pDMG6zpKtWy2e5TS9LonlyL+HKZS445cMipnMUrXElkvRIHKvlQCGqcduwS7l5doc8PRyTGr1VHzAZHrSllLtxjQSva3NE5UBX5hGZOlixqz23C52VGNYzhekYonxI7dXnNJhZSZsmUTFDEkxgSbvoDLoG1LLtRMOcm1uB8pWZiMMQrr3hkprOeyxDrN9npPSYll53FKY6lYo9jurvni+Wfce3SC0dBZyw5FmCZU1py99ZDV6RnbGFgdHXN6csaX62fEKXFztWVT9pSasUr2PW0tMWWMc4Rp4urmmpv9DussnRkEbMOSiwINu/GGMY5gxB7iIP0q4iMVUybETEwI8NkvyXGCEtukWNP3HYvFAq2thNRMkZTkYjLatDU8N8/Q2u4N04yXGzhRM8ZqjlYL1qseq4uAbcs1jx4/5p1vvsfbbz3l2Re/Yxo3vHj2GctFh8ZRBo9zFqUq436DMxW7tChb6Yc1Fzd7YjacHS+oORCnPatFz/HqPsYYjo6O8X7gxesrXt9sxKPQLrBdZrODXRrJOkodoQ1OKdxqSY2OHQWnNUPfEUNku9kJkz4mYa7YiaIzhsrJaslqsaB3vjGmWz2IAOwUqCk15W8+ePOpVreJebUsmjlEGcTk0sA1zRTFS7kfEyEVeowwUYswqnIVNh1WHVhch2Q94yilNm9VWQcK+uCtKSwchdOW4/sP+fDHP2E/Bj46f433Eu5yoEHV24+D7Gk25ldV2KylkmoWdqDyqCqieNWAYNkwC5SJnK64uvwtz7/8K6abX7EeRhZdpTOG9fF9Qj7ld59mXny2Z8rgHHz80SvC7oLnn39Ot1qwfPAQbeQaKURhQlRFjaCKNN6aQo4btBqp4RpqIMfA9vqae+8+4OmTh2Ixkc6ZgjBsYpRaUtt68GoFATOF/SF7IK2yrpQG1kl9I3XuLdOh73uWyyXed8SQuLm+EUautmyubtjFT9i9fs1RnjCd5bgEzo46VJy4uLhgjCdy6nNit7nh0999xt9+9BHf+GePeOvREzi/4D9cbrl5fUGOW5TylGoxY2J/sWE7WCCzPO7pzEC/OmE1LOmMR+XM/vUlr16cE8OIElNUtFYHEKhSG6teWBpF3ZUy/+mPubJoUEBDzO7UD8znbgZspH8oKbPbZ8apUooFHAqRO5Yix1VqIReFzhrbmOMlT8Q8sU+JKQtoLfX6n/YqbjkrUpOlLMnt2nqG1Qq3XPL2O++wWok/sy+GRddzMixxSvHy+Qt++Yv/yD4mQpGEV+Us2jgqwuipSmM6SZw/XR/hbGXbebYXEyqOWL/CWUX4A4P/r9alWsu9VxtIp9EH7+C7xvi1DQdSjW14LKCsUaC9Yn18zMnZQ4bVErUVeVzXe5arpZzDlMlKN7VUO6624JScBfAwjqHrsTmzbSw4mH24ZJGZQxoEt5OeV7XhuPS2mpRbP66EXYySEjZVha2yj6s2BO37FSbPLP5ZEmnonMN5jVLCtO58j7HCFBcFupzIq5tNWzOF1KOswTqHtY5qe2q3QPkF3ndoI4CgdR1dN9B1HcthhfeeRd/T9x298zgvAyxtLCGJbUgeR171nnB1wcX5l2Lj0VhX8+U/1885S9hd0kBJqBKhBGgmP0ZVpnHH5cUrtvvAOG5JKZCLDLazKuSSUFm1nrdKn1cF6AaZu8zEo1vga7Ynkr1LyEPtHp5TJA8fMKs5KLoFKbRAOYScMvcTcg2033MX7OJ2DT0M/P8gSHeb7HoXsAZhMd4l0NwFn0RVdHvDzDZfSjc/7jZAMymSi3hpj2MlTEGStWebIhGsybWr5+erzFBIbedTwjVEdSTMVQtOMA3rBMD11st7lStD72UPago7ax3GmsNrKVlEHLpWVA0UVYhKU4yS2qQ4dOxQe+nvcR3Wa2wBjWlA6u25+uo5/WMeXw/QZdGd5wqlaczFK0mMWVMMVGupKVOiTFdyYxCgKmMqJK2aKFUm8qUIqokCU7UYD9dCTJVQ50mShBroVnDIGyEAmKS3ybEka4jteGwtWAXWNbmmEgq5o6Bti+otEudrjEY3OdKcjHlg1KEOnkjWSSz9PElQWqYlwuSShr60i0aipYEG1mlV8TRDfVS7L+TmqofJ1y26PcvWaPLHUhUpNc1/aR53MyjVjvdw07U/Z9bVfAuXKuCpsh3aywRYGbDO4gwYVcnBkXPCe8discA6R4wRHSaKKvhOmo80xdsGURt5z6Ik/+UiKYwFoIjk2TjLbrdhNiQ9FFNVZGZWy5TEWYWzAmDGOLLbbai9NPbc/dm26d16zt3SQ7V6UyM/y/8o6dZ4fR5oVQ5y3hgi0SpCMAQLqkZyjmidUUU8DkKIUBrlujRWWkjEFMi24r0Vfy9ozAiNVrKROC/s0FIao6bdQ0ZJKuScODR7JMxXBDQPha/cw6LHlwJVKURCS4t017cThdkrUWmJtDZzPHkRWYRMdDSLxYD3ErDivTBzSsqU0vwjShJJSrtmrVEshw5tFL3fMOZ0oKbrdl9qqzF4nJNis7jC0FemUMgtSv6wYWiZ/hvjQIM1wii1Rh+me3NakzWWoeuE0WEtuia8sfTOMnhH5x2aiq2KbMytn5FzZAydHfHGseh7hr4HRB4Q1a3/1iypclrjrRZ2jylYJDSiqEJnDYM3LLxp90yPdR1jjNzs9+yngJriQfY7e3zVIgVVaeDugS2EFCdVBpPCrtItBVtVirCyUVmatPl7qY1pyZ0S+DDFpL3HVsIxaj7Q9g3NdLktFKpNeDOVWCRtLFWIDeDLOUvjSWXdWz585ynlwQn/4T/+nCeP7/HBu0/46d98jF+d8eE3j/n841+y+3zL6+1LNmmSMBwLEWGdKq2w2hBjhlSoMVFDRuVbevlsblLRhJylSHRWwOjSWMIFDAZlFLoo0JWsLAFDUBbtLFU52zdUZgAAIABJREFUKft0KwyTYSKwywGjteSZNo8InWEqct9NWbGPlZArY1YULJ31VNeTjCfqSlSWZDpy0Uxo8QzVjmwQr7Leo6qVJEvr0dqKL55RxFqISmTExji6qknK0K9WLE/vseo6FiGy3Rcup1fkXHBuIYmzg0ebTNgkMAmsFAHGd2RdSVimUhmxVD9AtyDg2I1bcpqY1MhUffPognE/Mu5HFIrFsKTremj3IaZSdfOyrIn95gbcgDo+AQaut5bPc+aCSJoiRRvGaMiTAH/TOmHtSHErqjEoLWFD2s7FkhEHLSOpqbb5PGrthJWQE7XtuZaKLpXVMPDw0UNO1gPL3pOUNBKd7zFqSwh7QsxsQuZH3/0BZ/fvc3WzwWjDN95+j3feeZ9/+2//HT/96V+y248Y51guBtbHJ+ynideXV1xeXpJKwbqOvl/StxTzjMIacCZRyp6QtuSq6HBo7UmpMo4jYdxDXZGrYkyZUmTin0sm5UhnNcZZmfj3HVTFNNtXNEm8TPKlUDdUqZsa21MpYW8zMxasZb1eslw44nSDUjAsPO+99y5/9qOf8OjRA5brnvNXX/Dk6T3efvqQ5dCxvdkIGzQFXl28wg+W9cmC03tHrE9O+Olf/g2/+e1zHjy8x/5mx/Xlhq4f8N5wc73hNx9/Qq3P2EWIGHZT5eXLHTgIWZObTx9VsVgsWfUdNUc2VxcYpXHOipS1hUgd9jAtTVpVms5mOtuxXg4M3jHtkwS+WIO3TljspbDd7kg5sR/HxkqScJYYosjQ6+0aWSsY68W6IqtWpFemkNnvE8NChso5F8bdjpACSUlQjjJgi8I2x9+sMokGtDN7LEnwUkHYtkUbYZxudugpcro+4iwmvNNt2n6LkdytAUCCA4QEJfWIUVIraiwKJ0yJCjXLcFjrCCZA3TJun/O7j3/Gl5/+NU8fGI4Xa3LYsbvacfXqOZ98/im/+mTD5euEdj3JZl69nHAaHjx6wsO3H3H/7ac8evyY5WIlTSMyuI/bkbDbE8NOzOCLSP2vXr9gP+1w3jEMHW+/9ZCLzTWvL16idcE6JPBkTKCrAIyqeQu3QRPcyqBS2yul3mv75dyvtkF613fcf/iAe/fv03U9+2ni+npLmGRguru5YboaSbs9zlnGMfC483zj6WPctOXZp5fspiQDkmIZ48jzL1/wF3/xf+KmxPc+/AGP77/FT37wQ84GRWVEGc0uaZQ/4+1vfpPVwrJ95xGubumMwpgVj979IZ2zvH5+wbOf/4Jff/Qrwn4vvUyOzJSEevibVAOpRAHp+M991Dt/1q9eYAKSVOR+miVqtbKfIlMsVGVReDQW4Y1mOPhiO5QCw4SugVD2TGnDPuwZk7q9nv8UfG5u4A/H1zxmm9em95bFasHx2Qmnp/fofMdRv6LGRKcMVmmmEBjWa1zfo0ZHJYPVjfkws38MxilcXzk6XfDw7IjVN9/h4otLXp5PvAqZGgPaCXmk1lud7qGxnllFjUjCDKgrDuENsw1SQSR4pRZC2JNzs3SBFsAga57RHqjkEqEFVDltSCmSQiSFIDY7cy2ZZL8gC4nDNLKAmj3AalMslArYQw81J9Oq9n7XUoQQY0RVIB6P+rBfWyOVoykVYz3dIH60BbCdnAfdJKemMY2t0qJCIsnnxotqoF1zWjuMtXTrM0ntdI6u8/ihox96fN+h7UDxC0y3wPsebSzGOFwD6Kx1ONtJr64bWaAxlqnijW6SBC8WbRiXKxaLJZ3vCdrKgM5ZDn6Pel5rbxVKMYyQJ8gBJe5jWFPJaWJzfcnVTnrWlCYhGlAEmG6DF6+s9EUHkOtN0Hdmw331+rqrHJOecqb41HaNcSDlaKNQymBMadYnHKzJVENODv3z3Wv4zu+Zr4WvgkmzvPuur9wM0N3tyd8Ih7gD9L2RwNp+Zg78u+1dNTR2PFU8sY0SzCbXlp570MvPoweR+VrfHfy5MVoIMCVTtcINA93qlNN7D8RuZDmw8AtimlAYln1H1w84axi6gb4XAokxYolTciUXAVV1CmQyewqhin+8iZZ8kfny42d8/rsvGJXCFoNRsfWRXgjw7bXP7+8fC87B3xcSMb957ZQ0gs6BcVJyoiRhrIV9liavFvEs05qLmxFjFMZIqmtRWujrrYW8CZv5spSpTJWmuiqFUQWnsmz8tUJRqKxRcUb6hQEyUxCvSqYrUfy8CtTaaKf7is1ykYdSGsMlNX8okTKVIkWOGEweDD8wWibUsn7pQ+R0nWmepg0huGWnCFtJWG2xZMxhciWy1xluBCg1ykVX24QUaUZMNRiTMTmJMTr1YJAu9+QtZVt8/QQwPMgfkIsiV3HsK0p87gxzWAVtMRafNG1EalAaTqStodNDAymFfmprlNS9fpD02xy5vrqiKFoiZya2okMbDdq1BMtZDiJjmLnYMkaxWg6kfIRxwozc7UU+mWYGELcU3HkR+er1efc6vft1jW5eo4U5TKAgHiB1XkiQhSemSAhgbRWmDeJPY5QlpYlpjO09MFAzpQUvbLc3OG/wfsBYWSS1aX6Co4RETHEkxdiAjkKIiSkkKahzbpIPxIi+tHW9gZiScHO7QOZSDp4edxfB2423vNHoaCN+HDNzseQiZrulMMuLrBEgUDdDXYXIntHgOmG+xALVWmkJqkZbxbLvyCEfki2dnu8PJUb9bSPSKDrrGPqelG2TVbZSv94WDLW1N8boJj299c5QtclEtcUaYcvoKt4YlCqSdi1MBWXke1tJj9UWbweOlonNvoivX2sAYpuUqdmTzei2MSiskvu/s4rOWkxvcApWXcfRasGi74hhEvm806AMMWpSEjlsSUEAQyXFim2UbtcSYK01ch3AYZMtmCabmgFb8dmhTTFBaPelNA7cLKOAFi8u8kmrRd5gtGaKiaQzugHotW1ypQoD1nuLxoLRAs4ByjmsMxhnUbFgjBcQYXdNmC45XT1iuVqQUuDJo8c8OL9G9QPvvH2fH//on6GuOi4fvubLF7/h1flvyTcXYqas5H1Xbc1SpVKmyLTbE8YJtJGBT5WU3aQ0AUhaY62nGiOG8FkCJ3KWaycha0NQmlGJvMP3AwVHzhJZjzGUaqhjpGKpzuNthy4iW1XOUXxHspbsOmqX8C3mvdZCPwzQr5m0J6hC9gs04snZDUuG5ZIhV/ZjaBNCjztuide6okl4qxl6YS2lUkkIsDZNic1mS+c8Z6cnLHsBc46ocLIgTIHB9zhjWC8XWKvY7c4oWWSQuWpcv2YMhpQ6xklhFxMq7Fl6je8MixQoJdD1bbIdA+N+y028YbPfYrXGL3sWnSZXyDURS6SoHt9Zhs5idZsMo0EPjHXJZSxcp0iMPd1iQXUZyhrnFVPpGHeJbq2p2lKqyHpVlbXSWo9XyP3ehgxWa7SVoJV9ihRtsa6HGtEkSszs9nseP33Iyb1T9DCwOj7lww+/z+OHG64ut4RQODo+4ejBE97/7nclVatUFosFaQqsjs54/4MPiTFRqPhuYHV0xBgmPvviS15dXJArhCqeYx4NuXC1uWGKW+r0GqMix0vPzTSyfyl+RzHCOBb2+53cw9ayS4VaDZ02JKWJVRijylmKFtad8x6XkwD7oUn2tMEYi66S5l5SkXpFzSnU9XC+XO9YrBf0vQa1Z7lw3H9yj2/94EPe//53MNZwGi75zo9+wLI3vPv+e6yPj7h6fs7m4oJKZbvb4HrN6njg6P4x2hqev9rzu8835KLplgvsODKmREiVUDyhCpB1dHyCXx2zOLqPX5xQjKNq8aHKZSKGEacrg7fEcU/cb9mrmV0w4/HqIG+f1RJKaYIWL0+q+GxGK1JH33WsVytCCIRxZB8CMUZSqXK/0yxVqkjl2yrZBhNzCE9j1ABKGaapcHW9w9ieRdbsQ2Lc3hDijlQDBYeqClMKtkodFitEFCjb2OQyrJSvFRkOIJP4Z7/9lI/+/V+yK7mFeNRWp0rzMSsT5mOVUqmFkalMrQmjxZOzNkaFar+mUlE6gN6D2lHrBTW/wruR+/eOOFo5cihcvox8/PE5N5vXfPTbSz75cmI/GhyKEjOvL7d8+4OHfO87T1ie9PjVwNnRmsEPaNVCs6aRq/OXXJ4/Y3vzkpzEy3maEucXV2zHCb9cYLxhs7nk5uYV2+0lKe6FvZgCOUutro26lfvXmRVKk0EJLF5yC8tQpckIZ0WPDCGPT095+xvfZLVcQsrc3Gy4vt6IV17RpDEQwg5VFHpYtvXi2/zkh9/i5stP+cXPP+d8I9depyHlyn438tFHvyHdXKNK4b/587f48Y/+jO99/x+hEIuKkC1TsByfnWI95PgBVk9oMmSLXTxgM1U+/+Q5v/lPH/Hi82diQ4L0Ttqag38XCqqWPijVSKpztNE/1EMAurtQ3TzsLa32ohEWEpGssgTWVCNMyZrROlNNBjzFdALq5QmlCrlu2YUrttM1UxKSxNeLt37/0boaZhBDrn1RcWgrQRFdZxl6Sz94vHMUElfXl6TdHlUqV68uuLi+ZIxTS7WX4Ajx0lMC8jhLNZWiIsbBw4dn/BcffJeyqfz8Z5/w019+yvnlTtQjX5Uu3Dmb8zFLT9yg1jYAAOnVNApVFDRD/Gm/JYYJ5YQFrYCaIYwQpkROGWpuTLdCChJeErc7xu2OHAI1FqoSv3WTJclewLgiDPIQDgqN2nyR5XjuNFAt5UO1AYdCo53CKPG2q0gS6bAQAEMZJ8QDKyoItJbBbZF+b5axatMCECpUElXlBsSIbVFuNX/XLemHgbOzM7phYLFc0PVe/J69pxs6TNcT0CjXCX6ASGON8aK+a0qJRsFr3q5NnlragN46vHVQFJ3vWS5XHB8fk6YN+/1GCBsH0HXeCxDZJIo4jcRxT5pGagmohkmksGfc3xCmSC0RYxSd03Sq9QNt6DQgqsHDYL6ppWbflqpbD8ethRPI758H6rccN27/TUugVJnvZT0rzQzGCF5R2nmZ33vdkLKvAkR3e+u/j0EHdxVt+vDvd4Gn+XeVO+wB1b4+rz+yfoM2RsIVFwPDMDDEjHdOBsRNbXBXIlrV7fGJDVkjHinFMAx47xnHkQqsT8747p/9mO//8Eccn5zQd5ZFtySXiDcd3iic7bBG4W2Hd4bO9cwkxDkA0wC6iB/wXhdCreRk8dGy+2zDz7q/JuwML3Zb7P4SVbaoKr1nVIW7qcl3z/kfA9R9LUBnjGh5dZk1yLeTBAUtFtdilG7+KwWrGyPGCfpZC1TVJjTCYT0goZvd7kCZ1FY2ZOm5pcjR3h/e4QoCBlW5iHMziVVatOo1F3b7CWuS0BVVpTMQc2Kz3UBR0uAZd7hIRbKZKdm2qes89BGGXQ77OSCaZvIkk0ylxEvNGkpJpNIAlCopZ1VLOlIUgSpzIaVVumU3KUFoa5uI5nZ+jTJUI02MNRWrM87M5ulIeqSezQ2FiiypRbe7gmqeC9qUBmSK1FRcRRUURckKVcSTzmjNVANpGw/TTDEbb+EazTek8x5TNVknco7kJpvLRcxia2mMupSouoiPVRGfnNI8c+QYRVLbDx7bKY5PT1ger3n1+gasJ6Y75seHgdVd5qE8Zv8E2RjfnELMCYsHk9G2WsiUVp7UdR7jmpRQ0QxSRbOeYxRjyVSa7KiItLJNJ6zzbLZjux4NrrN0XnwPlBLpr3aOFIIkyZn5PZMrqu97Nvsb2UgykgqZFbeC+HoAx2dT0EPggL77Ou8siocmZ6bB0+4/WWzmz2dGl3hBzNHpLSlVI/4cGFSWZEJhz0Iq4uOWpkIMk2xCSgaUSrWsrVooKVByYcpZ5EOpmfRXoePnJJuoQqa2c1lGRUz2G4vucA1QDwu8aZNKVSp919F5jzXCcNG6TeJLpiiF73qM8Sjn6P2AM1tSTMSQ6PsOb23zhsgYU9G2sTa8+FjopPBOsegcvdUsvONoMbDsOqG5q4T3Bm2lWHGmAXtKZl1Oa6zmAPrNnkjWmpZoLa9PGwXVNDZi20SbfOHuf7dMu8L//M+/y3/95PTrlu///Mf9t978PP8c/pf/EYB37nz5v73z928A/2r+5AHw4Aw4e/N56gQd0K3g/gq+840/7njOHv+RB54hb974lMZElEPx7R8Mb7YRkkzMEXDU3fm6BiY57rF9aX33922gbuTbFne+/Id219A+5sf+zvdW4PWnv/8zrv1bAq7+wHMCxOdv/t7jv+P77j6nA448cP/OP9zceW0Rpmv47PM3f/ZXn8mf82n08993soEO/e33bp/LB/DJcMr/evIBOUMMmWQyxsyFijSuqCqAeS3CuvMdZDFkrznz7MsXFF05ffKAx++9g14seXr6kA+/92Nq0kxjJhRNLJWHD+7TH9/jqJP3MocIy8I//Rf/gn/84x9Ta2k+IRpjHalkrrdbrm827KbAmBUhFnrl0KXyq9/+mt999hGX579ivQgcPVny2y8+5zd//YKUPUPvObu3pBs6MaI2GmxHKZasEcC5VkwtrJzF9Z7Fesnx8QmL/UTkBfuUJCDGe6zrmWIihB2hAdzad61eqM13EBKZSGSwnrOHp3z7g3f5Jz/6Cd/7J/+Yxf1TSik8eu+bHD1c0ntYHR9jrWVYLhlvHkCpaG/RvUGbhOk0+/3E4vghmQXnF1sUieubPTUrrPUYs8Qtl/hhwC/XKL8m6p6YtABitkAIhHFDTQFdM1c5omumptx8xhIKqfFUG0gZw8F6wFiNc5r9biLEqQEyMqm0zuKHHowkMYaUhPWrFcoaMe7OSeRn0OqUeph15FwxRtibwqpx5KLY7RP9LmJtFsbKtKekkVwnYdMkMLlicm52JMJIVhgMVva/momIJEcT256nePHsJb/8xS+5/9YT3PGRDA9rYhbrzCz/RmVqrIECpJaIKFJwiQwxVOxhuKtMArWHekWeztlvv+Rm8xnoS/pBmv/Nqy1XrydKGlitV5zeM3x5+YI4jhjd0y87ihk5e3jEB997h914yRgmdleXrO0D+lUWkGwKhJsrwv6CGK+IccfmcseLz1/x7PyC19st1/s9X7x4xkXY8vlnnxLCjq6zxByoJUtT5J0AcVpRZqLEXM+gDuykWkrrIWYmx0wZECXAyekpT99+C0rl4vwl2+2e7W5HKUg9XQu2JBQS+nZ0co/333+Pb733Ls/KxOm9e3x2MYo/XJiocYLOsZ8CX56f85vf/ort9XNOHz5iWN+TOrRqKk78g52j6kruetAZXSO6VjKeadxz/uwl2+stNRWMVuQpkmvEOnOo72QFrGRk0J3KPwSD7vZRD9XU/PlcZxZQmdl/G1vBZpSTIW0uSnz0aqbogDIZsI3BKT9ubCHnLZvtOVe7C0JazL/kT34cwIIqIGaMO5SvWKwMI0zB6opqabipZqrK5BIZtzsurs653lwS84Sy4gs3e79apRp5QAImUo3EHFA18d433+L+0RPW7pTnV4GPrj4mloxS9sCo+kMeXgdCRL21qpklDqoK87sgTFFSYNxuidMevRQF1oy9lCgfVmuGrpP1sxTx2QyRHBOmKrwRz+jQyC6U2/CElBL7kKjjSIzxoFJRdwCVg8yxCgtPaS0p83dAWq00xnu6YUHXL0Vy6jza+IPMtNQqgHfrR2c5s6H1H1UIKKlKkSsseSPq+1rBDthuzYPH7whrru+pqpBzIFVRNNRcCSVIL2AALUQP40SFU1Il5Si9TOO+aMSrXhkrhIwqtXbRsc245y5/HlTPCNbMSBOUTlVZlxadw9RCjQFVEphKiRNhv2O/k+GVsNjBmtafdIZSnVh2FEWNLYaiiq3IV73SZ/BrVlqVKh7oSilyyhgjISiJAtaQyWgMMSV6d4vJ3O39pJNo5KZ5iK/mf7tz/dbKXYBuvs5n0svcYx9+/g5jbsZu7vagt/dI80FPc7Bje45GHDGNsKCsI3cOe3TEer1mkULzpBWFk+u6tke08EfV+veZ4Vgy3ncMfc/ZvXusVisury6ZpsDR2Snf/v4P+C///M9Z9wu0AmecBH0a24gC0v8aJf2mmb0YSzs/ql0pLe0320pAoYvDRk1yge1nW379yWu2+iUuBYxyzRvYyv7/lYXwq36DX/f4WoBuRlxvC4eZnSRUd2fEoNFbK40pGuUcphvQnQIVqErovard+GqOyEZhcbJ5WovvBZkviAyktheotGnpZgLchRSFAZfFi2Q22UtBtP3OOAbf4ZzCmIitFSZhH2G0FH3W4+uspRb2zIGe2SajmgppkrCCJnGR/UxopFUpZnP8W7Ha3c9E6ljmSecMMlAbkFEpjSauajncdBrTKJGgdcSoekiplPOuOBA9qyxMjWsmx9Am0FYJI0wbaehpaTlK6QPFNOeIdwZnDJVMjEG81YzBm56axehQXrMArvu8l0VIIxK1Ug6UYqvEUyPlRMiZGCMhJDSqmVrPJYdMVIahR1skKajzaNMx5cr1Jszcb6C2qOK7i9mbi0xp/gd3F5CcE8q0iGY1F8szc1GeKwRhdgrTL1O1xtVKqZGcIpqOlAolihQ1VmFbaS0AUimQknTOhYpSrknrZvakpLyWUg/ETGOaO85MB9caXdp7VEVzf5g2vDGNmynFCnSVRM/D+TyMSW5vXqWoZQ6nmD0e1aH2L0WK/zn6XAHzLqdUMzUtktxkUBIVXzJhGhnHFm5RyoGNKECjotZESXIvzlLwObmI8ubCLri3Imct4TGIfFErATMP9PtSmx+l0NiN1ugWge20oXcebVrB2d77FBMGQ9ZAgjBFSsrYTsxUu86TYjgA3iiFdR1oyxQS26GjTBPeIAbs3rBadBytBrzRlBjwy4G+78HIe76fBJwzCrzVLR1Szk2ZjXRbRZYbZUBpYU7MFgIFOU/SBMi6NXvJldreSyr/+i/+E//66xbv/w88/t1//8//3z6E///xD/h4Z3/B+p0zUkpYq/FO7C9M81uUyXumFuF7VmVIGGJW9FoGPGOYCKVy7623eP+732fwA6tuwWJxAtGgsGTbEUtt0pxMqlVSfauwG9dnZyzXR9QsHruqrUUoxUNEGhlSJWGEPaZE3nN8/wjjA8/9FffPKkdPFlxPF7xYO/ruHsvhPu++/xbvfus9umWSRODOY+ipKlGyFemj0QzHax4/fMiDe2cMyyWvX1/x6voadS1Mi6xkAFgrJKWIyB5g27nSqjavHZGRxhqpxnH68IxvffdbfPsHH3L6+D7JSQbZoj9heeZRNbQGWOPXa7QbqCljhx7lDahAKRN1guH4EY/f/pCUAl9+8TEXm8ByEHlzTBDynhqv0GMk1h1TuSIUx1Qi2mZUnoi7GzqrWXhLmvYsOsfZ0eowNKs5yQCwiu8eTupDpRTWGTo8aoOwinIQ/yYl3nypJQhmIDQP3ClGQhG2OtQDOzPGJDWBEsVArQqU+BnVCtp2aOMRPwGLUhZV9gLW5El8Y4uwYUxROGiyI2EzShWq0UVRlKJqTdHt+03F2Z5aFCkKsCfCiYRMDsrBYkjqOtr6L2h+zluULgcZUq4VqkfrDjBt6Bup9Yq4/4yry4+5uf6UMbxkP71m3I70dcm4iez2kZN79+nXx1yEwPpSs9MV2yfO7g8cHXkevrOgP4lsX16TtiNXLyvH/m36QUOTrx0vOh6eLdikLa+C5uXzC372f/0tn3x+zjYWplq43t0wqcg0bekHYd1MKeNsS7W0sx/VHWBOaUTaK+dBGxnGiRVNG102SV5VwvJZLpccH52w3+8ptfk/hkhOUhd2RqO7ThiQ2mK7Thg6FparJeujY4yHlKJY39RCQa7DrOBme0nOWxnQSDUL2gvrqA1CS1JkbcnKYDB4lSlJc3MVuXp9JY2gFtWKDLCbvYlqki9oqpcsEtfy+43dn/5Qf/Dv9fBnG6rqFtynFeiIdhllJSCs5FmdkuQaK6kBk6Cz1NRWQ847rm5ecHXzipDvcyfJ7U87YjV3SZVaE6omrILOyrXgLUAipZGUREJoTUF5zbhNTGFPiHtKDVQktKVqTa3iC1lKG3arilKFMe65uDwnTjtWneNsveKo72WQkBPa2VtV1d3z93uvawbnbs+3qqCr7GtaK1QsTNstcT8eesH5ja9FiA1aKRZ9R1UJVQSwqrngtGWxsNRFzxgT21SYmjpLtbpXUr4LJQTxj6cNs1tfZIxpFkW6JV82ppOuzdvUNIIMLPqBk9P7DEdnYBzWerR1IiFsvUWp4rdtdZP2l4ypYKzGVJGaigFAFdBMG1JMjCFincf2S4ru2IfCZn/NdnvDzfaalALOGvyiY33/lPXxEavlkdTZNaG0EF0kR0YJcEcDqNo9SWmyUiWBMCCDn5gS0zTinefk5An9QrNPe0IIwlqtbS9Shtw51P1T1sOAqrmd50KOke3mmpvrSzbVMu63TONOrr0iXmRSVxzwPgFvG0lhHkTM/c0M0M3Kvlwrplm51ElsucZmP1OdolCoWdN5zXrwh/tGzSDjneefL8i74Nmbnoq3Etc378F2Vas5mKPJxBt+UBoWYlpIyuG+rYjUt+EVB6KJ+sp1aG1TFPVie7Nc0vc93jdimNaH/dBoI6n1ShFLw4asQVtLyhFrLf1iYH16zOnJKUlV9HbD6uSY9ekpZ6f3cUoGdc3pFBEeg3sjVbeRRRRivwDtDRQ8ByUJtroKjmUB4z2ny1OOj+6zDAm/vcRoL+AoWewf/o5z+8c8vhagyw0MmeWTakYvG8MtxEjMkkCmm2+QVuLxJgmZwk4pbSIuemsDRQA6beXr3sNiMDhvyRUxkE2VMFasEqNrZURTHbJiqhCrYMS2WiqVWEUk15uepR3QVqHMBIiMMKYAVWSoThm8sXhjyVrgLQETMqraA69i6DsBJrRMUXLhwGBSWpOynHxnlGioNc30V6Gx1DSna7VkvaqoLeGDKoxATbvhUGTB/+Xcq+btVhO5pvZnJteMquYOewxqVU12Kwad2tjGTEpSQSoxRPbO0nee3muchjQphs6TIhzVAAAgAElEQVTRdx21ZMZxZJoavaNIuAPNx845J7TZlMTA1Dq086QUoUVpa6tJNTNOoxiBN4Yf1MYcq21m1xDIWVOuFN53LBYZkwv7qczf1aZ0spDVg59Cu4e0plTxUrpl/gnoVRK3iUMI83BG8UtJhBRERu2kKC9VUti0msFURUqJnOoBWMmlUFKUtFdrBCCrzUi6JVsVxBMo18K4E48fSkEFYQkUKimLbMd5i+8cUVeq0U39rER2rOf454xSIvcRWnObtFZJCHvzxm9cqzr/3C05uvWfzMWPpFDKc6Jn0LZJoHWToLYoLmO0BAegECaRyCNDTIfnUI12rTHoaoVBrw2JQqlyHKWIJ1stSRhmRmRQqXkBzQcq17U6TLxqvfVtkH1BroVxmlguh2YqK0yKmIUtGmKU3seIbni/2xFDoFvKIpxiIoSIt2JYar1piYqOIUwshp6Ut3hTcUYxeMvJauDhvWN6q0lhotZC3w9UbSg1c31zAzWjAasNzjvM1HaZA9Iqf81Nuq/aPZLTLatRWAOZedg5y3/azf7GGv03l9u2Sc65TXdw2jqD0a0orQKWzhNhaEVNO+fWGrrO0/cdP/+XP+a/+jd/ya8vbiC1RGmlMN5xcnbGan1EiCIP7b2jUEhZAkhKSKhacJ2RAJjrK9I44mYJ4x2QOMTEGEIzI1aHVDStFf/dX33C48f3xBdNK0LKjCFx/vqSXDTG9hjt2/VSMAaGhWOxEI+zUhylOvHzLJJEXhoImorcK87LWilJZDKR1oDretCelGWNtdZgjT28h6YlOBpriSUTQhY5he/E468qlGm+L6rSWY0z0nTEGFsggUdbT21JZJ3zlBTJIeDcguxPMf0Zw7CiN4ajZcfgCnm6ZLmEriukGsBZTh89xQ8ndMMpXXfMorOsVKarIrVWs3RwDsyxwvYsqRCnSNwFKQorTLtAyoFcIy+ef8xvf/Hv2Vx8ydX1FUV7+tU9pgjX1zd434tfH4rje2eE8ZLd+cecdorrywk33OODD3/A//B//xsA1sfH5FLovGPhDKYmako4191Ob3VFaWE5xQoYi9KVmEb+p++/xz89WcL//n/IR3vs/lD98ge+9qc8dPuYjSm+3T4A+Az4GfxLDDz8Jn+b4X/rj/jg/ac8eXJGiq9xVrMYeny/xqhMWhriynK86vnedz/g2996n6PVis3Nlv040fWO9dEC7QwpiSVG5wyr1QLjnIQqFWEdLoaek6MF64Xj6Kjn3Xee8uTxGe+885Tv/OC7nDx4ILJ10SK0xsW3fV1koBWF6np0b2TAVytaO/bTxMvXN1zvMut7Tzk7XVFU4GYb6LtTUtYMw8DCBa53rxhTYTMFYnYUHAEBn/J+x+7iklXnGO6fUiuM454wOJQWg2vZvzOq7TMaYaQ1596DuqIUSZ+vKEkh1mJgLjIUGdzu93vxAAxRTM+1bunUUqOSMs51DMPi8DxdNxBCwFrf/IVpAK0EZeUcRSFQMwVRIWijcE7RdYaeLOsjsNBgq8j01eCxi4GF63na97w9DDw4O+HBu/+I/uyEuBww3RKUPaTA0oYvIuuciOGa3eYFYbpiMXjW6yVTGLm4uSZVw/HqlPXiqFEAEjW9Jlx/xubVJ0zpJbYbOTp1LHtDvNbslKRd91oYGkfHPY+frjl564TqHG89fcj9E4cyGz754pc4Xbm6vEJPgcdnAVRLsK2pMeAMXWdYHx9h3JqrTcb4NWcPn8rwd7Fg6DuMkjCdnBI5xoOFQwxBEhqbfAgtFh9K39Yxytzx9jpgPsJ8kJpFLHFQwtyUnkUzhcg0Rjrn6DvPMDgyFmU1pUoQxOtXL9FZ9t39tKfkyNB1KKvYGiMBQbVQdcb3Rbz9VA8YJHBAhuetrUMr8aksIooi58rues/25lrS25vszjlD1Z2AxDP40yxGKpFaJijieTX3A82Z8iB4Yw6OO1R37XG3/6swUwK4La8OaxooAXdVufPjCWNFepfnczt//8y8qgKs1yqessYYakzsr6/Zby4peXrzOA5n6LYGuos9qvl/CqqyVFpgW/Ogk2NWUES5sb2+Zj9GUs445HenfeDm8ort5pqapf4ojfGEUdLLFEm8n5UoVhtKSFxfXfP69QUX/UteXVywHfeUlNFW/HmrEHqZ5ay0oQD1jmeXuj3Hd19TlcYZo6CmTNoFyihMYlUt5ATFkuOecXdJHPcio9cGXWC7FRlmb02zFzL4kCj7SVjAM5g9/865Tm7svlJmQME3X0cJYqq14r1vQXcF7RwYyxRF7josF5zef8Dq7KF4ETbmnFYNoKug0KSU8NaJpzalBa0pyImUoqjMahYbJa0JIaKnCec7lqsV0xTZ7Xe8fnXO8/NnXLx+SQgj1ircwvPWe9/g0ZMnPHryFN8PGN3huyrrNVb6FLSEVSojEl2gpkLOhc51xEk8I10noZD7aWJ9tODbH77H2f1jLq5e8fLVOdc318QU0Nqw6gfMagmnR6wXSwF4i0KrTAo7bq5fc/H6nGtlub6+4Gaz4Wa7ZVQJ4xXVWIqRfbwWGUKJHdeseKPV52LTVbUo0VJKUpsWYSXqUsR6qhRpfbNqwW7SCys1g5NahvxtfZxJAfNQZ37M1lEz09Na28C0JjKR2+xw/eoGyOkmX9Zo2QezsH2NoHdCWiKSsuyfQsoSsLZUIaZYozFGoXU9gHreSVCTsQIcq0bacFrjjEbljPEO5SxKgU2yVxgvaci97ukXS9yixy86huWC4aZH5cTZas16WOCVbrWgrIDCAX5jceIO0fBwP9c7a/t8f8/GHIceS4PrlwyrI/zNlai/mli0qHq4H+88bfuzrat/zwDjawG6apwUDK3BLEUMH5U1lBwYY2AXDJ21eDHlwBXIWeMTpGkHJUiBqI0ENWjZvIBD5HotGeP14cauIaHnCy1mUppE/qoVOmdqmMhhAu0oTb5SYhSQxWhCtKhc8F4Tc6GGQszt+HWkKAVjRMWCSpmaC9lmci3YBuD1xjJYi7WNwl4rU2rTXipGFbTKks6jGpygQRhI6sC0IteDz4TRcpFVSjNGtiIZTJL0ao0BLb8rq0wykC1kp8hWiQl4i3LNGvz/Q9ybxtqWpvddv3daaw9nuOdOdeveujVXdXe17U7HsU08BA9J5IAhoAgIiUSioBBZSoJkIgZD5A+ZMEgRAUXIOAiISIyiYCeArcQQdySI7bTdbXfb7eruGrq7qu50zj3TPntYa73Dw4fnXfuccuwO39jSqVP3THuvvdZ63+f5P//BB4q15FRvXlH/M10BLKSqFxdF8y0W77z6n9gCydEETb+VXDSldNDXZlFARbJOoKwJWFvNqosQc8E3oSZmGmUlTVtC5fMNkghBYKMUZ2Mra0u0iIlZWK4GMJecwLadkLoBStKbs7LvkXH6UMMRDDhMlc+OjDO2AA51smpLxolVYC5LTcZS0DMmgw2WEBz7e7u0wWBKYmfaMGk93aZjveqhGf3blBGYjUUyGCPs7MwZU39EdAI29Bu8nTOZTlhddMQKaKs/mIJYQiZJxk2mNCL0XamJUYZilS3hrEVrCmVRaZK12QJ3Y0KOsjsNWfJW4mmcQaKa645NJhX0GkM9UhE2MSFOw2Daat6P9SQx5Ho9GAMUQ+wSuYAzASkZjKt+JZZkDcU3JOeI/UDulTIfETIKnHVDR2H0o0xYEs74ujBWOa4YYi6suo5YhFh0SpbFkKQKEo0WTeqXKCz7HrNc4b3XY5NMzsoaiUmwOSEeco5bE1dEr/EmtJQciUlwQT0dbGNxrQKHjXNMHez4wE4TuHfrgLvP3GBn6olDz2azIYswZEOwBefUj7Pk0XfRbpOKxI7wtF6nk1ZTM6115FLY5A1d3ykgXoHrLFI3PJVnbdOuCypP1D/GJ27u8kOfeIlvvb1PYy2H3cDPPTjmr3/hPZ6se+16tjZMlyES253pyoajbKIt1KcbtBVuTlv+8re8wnfducZQhJ8+3vDX31syDAMimriXh0F9sYLFxMSfvr/P912/xdwZPn204Ed+5R0Ouzi+C/yJ1+/yb710hxuTwOGm58e+8B7/01sPGYNvmO5wIZ511HOD8ZRmwuSgQXCIjF5MtVExQo8QO6AbVHJftECIBZWiuQqKWV/Zi62mPTn0uq4FUQmNgnmuvk1eCy5Q4D8htWCtKbFByNYxVHa44qBqEeGMoTcwlNpV2gYmFkKDGC1QsvNE4xEfyBLIJmihFAcGOpahYWUtbdT9zA+J2TxQrCcby1FZsrdruXXdc2/ScHMy5SB0tLLCpYTxE4r3dAVOjs/o1pE2TNnfv858NkWalrZAWvV0iw7rPDdu3uDJ00ecboRiZ0gTVVruDC4LvUEBaW+1+LRJ91wfVL7gNK/u9OJsW1OcrxeAIeaANVP2plMmsx2cbyjiSFmwkgg+6PrlCtlmsoPkhP/wU5/7eiXL/6+P9XyO+4f/F3/lrymr6WLZs+4SPqi3Duig6YmzvPvOE7z7Bb3/cybFRMqJfhj4/t/3CdbrDSZZYgdnm8Tj0yW5bIgx40zmzo0dvuUTb/D6yy+wt7fD3Rdf4M5z99iZT7m2v0OYTig2VG9RC2VCKQEjLdZp0Vgw2NBiTECqz5ekgYuTh3zhc2/yy5/9DdbDhOs3p1y/PmE2bxDTMOTCnTu3uHFrwuMjx2IjlBND2kyQFBiaQu861mlgMt1lOm3w7VQZYzkjVj2UUs6EoOfYkBT48S2jR483lmIsFKthAsWTs1BSwtLibav2Hf1A7Dpi1yFDJIgQjN2yX0IIhKbF+Q5rVdqbqwG0876mtntCqGCIDKw3hZj6mtbsKNZTrKM3Ak6YzCe8tHOdVyaBIQ20PnB9b5+J9xRbuPnss9y5/zrt5BbXJnNuNJ79puXafBdjG9bFkmfX6WWCxRLTmvXmFGcLk0lgiGuWp4ccvf8Oj9//ClNvuXNwndRteLo8p0wC9+7co5/uqArBJaS/oD99xGp5SG56/L6hPZhzbW/Co8ePWa5WuGu7DNZyfr4k4Hnm4BZ5Z4bZmfDczevk00NODo9YLQwp9sRl5vln7jGkiOQ1xqzo44rHJ6c8fHzM+WKN9zuY5oAoE7JEmsmcZrpPHw3OCJtV5mK1YbXpKNlgjSOngWKUAa9ezIL1CoCaktXfWkT9z4ym81nLpTE+AIYihr4fuFit6IfIph+gCP0QFahF1//Z7kwl0iSG9RHd+Sl2uIVNLaEYHBvwERtaZq6tvo8J6xqKK6zjOfOyg/W7OlAzWpAIGVME6wKOgqSC8RbBUnJPWp8Qhws2xjAEVWo4Z0i2IYkO/wsRGyCYTOpWNLLGxQsaem0HjSWPHghGw90wdfBbB7gFVwfAQK30DAX1y1FlCxSCqDucFVtr71GdZDBFHf4cniwNSRy4QvGFJCggIgYvBsxAsZnBFvqhMDUt5bgj3VyoqX4dBFdiDZYPB0eIHV9nrqwfqzZCdhcxM7x3GFO03i6w7hLnmw3rozMeHS4UyBNlGTkBUiKu13SrJUPXYVLGSlFySBrAuOrV6DDS0piATxmzzuQWHj4+JbinPFyuOE8RjIa/ZWrowKjiqAoyYw3WB7o06HMYMM4huXrtVjubaHXQEocNu63Hdg43CEY6SAlyQUqgT4/o4mOKRPpuYDZv6NYd3XqljLpS6LtCtkE9rlPGisOVGgAn6u1r0cAv0LCCmAc8zRYZEDEIDmcNTWhwxmgt75Vh2Bf165PG0+7OaBpVgQkZawRnqV7y2gtasoZSGGU8KXiDWh4hWHFIVpsl61SZ1gRLcOrl2S0WnB4f88F7X+Xhg/dYLs9BEtYUshMOjx5x/+UX6bqe23fusrN7DRcanCiwlHKpuIKGSwHkmFXuajXQcDAG8Y7swTUW31pKU2iuTdi5vccyLwhLh+8MXSoE3zBtWqRAv+nJfeHoqKe7bZn5JcPqkM3qmNOTR6xsy3p5QT8UYjYaqJkL0SYkeCYpM5mq7Q7i0DydTLCeIUesVyZnplTyjtbjUgM1KyNByUUjOFQsVjRB2YitQJLbkhvGe82OhAssMtpLiQ6AlNkn1SNegSdbSSJpJMJYg7Nq/eSMV29cqDJZozLgKyGbFnT4a4WcqymPGS2UFGwKAawXsqmBnxIJ2Jq8brAOQmNonKERVL3QBDatR0i0knGmY1K9cpvmWXAN+7caXnnpBW7ObnN37yYhD9x97gVef+Y5XCyEYLd9hwL+IxuESlrQT9VpoW4voz876j9vNagzVNhMLBQP/cSTPCSU8duajDEDMW1QCwF9h/UWLNu1EZQg8fWsDL4uQIfzyuSwYwqJ+nqJt+rNhSGK0EvRZsIWUhZS1ERAVz1HDGBdwRmHtRmDq6wNTaEsMbFZbYidxhRTUJChSiVLURmMC019L5XFpJptPfiYBlLRxMIuDlAKwXvSoJKOjJBLxg4ZnKPfbJB+gEE17Doh1UewjokNtFZBQ1/TEzGQrYBR77bQ2C09XU+YgiwIiORq8KjhBKPXnFL4HcVBTqXSuyr7qEpntdlRI2lwkDwElX+Qi06/ndUEWRFSKSOLV/+mqITW4QnGK0CTCzkWSkwMZByZkiKDpcYIC3lImFwUGLNCybFOJS3SKyqeck09jJZWJiolKKn64KghYqrsHyooISOokoWUC8t1x9OTBU1lkFnvSNWsfdN3dH2PoQYDoO4I3lidrNZN2QCxpqDqUEuBLC3evE6LMHjjalCHBVOUFOZstZ7SsIomeBqngHFrAvMwxWUDqd5GImS9bLYLp4gQfCCmiDIEq+deLOQm0VhomgayVeq6vUyqw1mKc2RjlGWVkioI0BlqzkUTlCsgt/Wc41Kqaoyr1+EI16ufhHO2Rl+numHLlmo8+pgVlEm06voaCR7RpEs1gi24Oh1VWXIpkDeDbj6igJFU0MyImmQn64h4hliIXcKarJLyJmCcgtcqi0e/lxN9HBBnyUXXjCLqqxMLYD1iPIVMLILLhQA01oLzdepuiEVYDZFQBOs9xnrE6fl2ImrQWhmYoH6Lmr66g3OeYegBITQBGwKu8dhgKCZjUiK4hmvzOTcP5rx8/y43bu3ReCGtlyxXwnLTIV3GkAhu9AxUFnBK6pVJNWy92li0TUsTGmV6ZTDJELtIzpVDIQqiJtFCqFxh0l2d9Hz3s9f5W7//k/yNNz/gP/mnX+Kwi9yeBv7Iq3f53c9c4+999cmWZr996Kh1/LTdQEQEV1O86mWv4KmBv/btr7OKmU/+5C/wzO4Of+u7P84iCj/+lU5TRp2yLXNJNG3Lv3//gI/MAn/iN55yerHkz714jf/uOz7Gv/aPPkcR+J5nD/jTbzzPH/3U5/inj0745I1d/s7v/118Zdnx/xwutMXwExaD0SKiTrPUV6S+nzleWhMY9f8ABdpyLlt7AmoDUzBIEmw2GO+xLpAI2KQM6SI1IdIaXLGkEmuKFFg7fMgUt1Q29vi+beXohiq3d5SRDWRGnsN4TyugeXVYpam9Y0DIOHV7ihhPdlPEa4Jx8IbWFYLVBDJdpizWHnFt3vDC7WuEV+5w5/4B4s5xoaOJmSSOITQsY+HR1x7z+IOnBL/DnXsv8uwLLzLfuQY9LA7PeP+9D4hWWKwOePfdr/Hw6Tk3bkxpdw1pvVL7BAudVU+1JjiiUW8g7x3JeDwNhojgWHf99tI7W5wiQOM8knYJRhNM+5h0Gtn6uqZHvDE0pqh8E0PTNuMyyfLgFtPZdOshZq7cFeZDV/uHOA2/6au/ieYBfOp33uIP//oxT4bKm6vyBoOhpEi3uiCXgaYJ9MPAernhVr1GvNe9DIBiCD6wM2uZTKeEtiLqkrdguGC21wYTPefrzZpXXnqOzWpFUzz9MrEpE6aPjvnyV99nA7Q+szcLvHD3Jq+/eA9nA7f2bvLcnZcI7VTlGUb3xJIHrHF4M6UOu7WoJqlsxaiPVsW3oVgaZphkWK7W2J0d7ty/ic0f0LZCMo7WB8Lc4WcF20boivqndhYzaNJsFzYIhWsHe+zPJrTeIMnruo7KEJFMKwZjA8JArm+5BndlJtMdjAT1cpEAxRP7gX5TmE5g9ETOcaCkQVm7yheFDKbYup9mfKN7Q6mDoCIqKUqp1g+jysEIm76jbLRuW+eEjosd2Th6QIwOf3euTdjbnzL0a6wpXD8w7MwCTWN56dXbvPbGR2nm9yjFMvEeK47TsyXnZx3LZWS3GCa3zhniOYfHX+Vr73+RUjYcHOwiJXNx9JRHb3+Fx1/9GrYfuNnOaKXQ+0I3M9y+cYM964m5p9jM/qRhPwQG6Vm7gVXTMbs15RtfegWiYdP3rAw0NrGXp0h0NMyxk2vkacP58Yonn3+HlJfs3r/G+cWGGzvXadp9EgZcBtPTl55HJ2d88PiExXJN0zZ8zT9lsUmcnp2yOD9lterJMbFCOD25YLneMKQC1tfzW3CNJ+Vhy9JQIlydylZzbVdrGuvAFDXtliwUowbpqbKwNxv13VqtN5SYNQCq7sNdyrTOABGfBzbLI0yK7E8PWF0s8dkS6Ekm0hWhbTxeIo3R0CoXLMUlPf66eBSrChgtbXRgTxE8CcQyZFivFnSrI4a0ZG1g8I4sUdPdvabLOzFbZqaxCZOX2GEBq1MF6IzDmKZKbpX/4ShQekpEwRGnLEy9ffRqrrsP1CY9oWAKpJqWOA7hRmDP6tqEw4t6aGXjwESK1VpP70PNL8YORJeJLpOjYZpmyGmkrFcYiRUi0lJ7XFtGJuRW0osCpqpsaSqsOEeY6F5rQZwnGaFEYbHoeLpYYI4uVAbvHEaE1nv1eht6St9DjpUgUUMHi6hkHU31cxgaPDZ5ctczBOGDx6dIOOGwG7hQyg/GmOr9nNVvrdbdY29iXN0XkMq00x5nZCdlyThvwBtKl7CmYXPe0y82wAbBkjYDcXD08SHr+JhUOlLKpJhZXlxQRH/PlMIwRBJRmZ0JcEqscHXNE1TJJAgxJ1JJVRovOqMBctYheBEILpINKjUXDePKAtE4khFMULDFZFV72GKxRr3qcikqva2AEFl7+bE2Ah1QSq4WNUYqm7mqrSSThw2rixXHjx7x9P0HXDw9IsYNzqq8erCZ880FxQrz2S57e/vs7V1DRL3jrRcFBo0qGqy1GoSXK5PP+xoEBQRHMgVjCzYYBhsZfKI3kWQSxiSgEKYNBwfXud7scrpYsDo9Y3Uw58HDBUd3VtxqCsuzR3SrY1aLE1ZmSr/ZELNBqmVXwZCnnubGnHvNnAM3ZX3esTpdc3F0wepipYOJMRXQgDFl2zNb8VAcqpSqQXHYmuDtqnxX8QKDqzeXLqCGUVparRaqtFMtArS2Veaurg2mMrfVPsOA9dhSamiqkoZCCLhKJNCmQN9zxRNHC6rx5zUQzKDYjQZe2RpqK/gg2r7ZMRAzojd69er2hhAsrXc0GHxRZr1Mg/pI5sTERZ65d4NXvuE1bt34NoptuH7X8vLzz3HD3IKzDVOTmB3cYL5/B1sMrghiSl17aqFeyTtXq0AxynxTkHtU+xik1lSuStYpCt5FYI0oThUTxIzJGWOrUkxTRbfqtXED2Val/xzJ69dn0NVmUVFZwNraIFX0vDYZYipEaiv6niJDTEy3+nRl2xQcVqQW1bC7d61OzAwpDfRR0UXVuytQNUZWj7I/nGXqHO1kwqbriTlXtFYN/YYYKZ0CdFTfJ982GGtIKWv6Y5A6fdJjU3me20biYtVMtAkNgiY9lrr4uSpbKpW2lZHtxWmq10FKSdlnRbDWE8wlc6kUaoNmiXHABUtoghLesvpnZTJY1cHb7KvEsabD2uotICrBtJg6WFMQRkl8Sqsdk4BMReEliZqRYrAUxfuGnnVfPRHEbrHdlCPWKzU1Z2Hdr3V6LcocwVqGkvBBPQL7OLDu1zoEsxaxDSkXilFqbMESs9D3PYfHJ6y6vurQdQGJRVuvQuFiveZ8sd5K/pRNphvj2JCPoNxIHx2/vv1cjyPXSxNLXbAYuxQFbQXWXU+0BV8Ki+WSFIc6wTPKmst5q6Efr39EGJIasVrnqgG1MgSHOCDLlUYtlyqtTQVTPY8c0GDZdFEnrrHgi8UXQyjKQtVhsgLTIXhKKcRRLluTQEuUy1VFuJRBUs+BjIvA1aWBrfTHqks2FGXHpZQxORGNhq5ooqxSlmNWf4tuSPTdQByi3k85b9lduah0MBcFZVMWlVIX9ZnLObPZDKyXHSGswVoN0ihC10ViUVaHOvx4vJ+QkwJAkdp0+lraiShY0QY1cq0goPPqXRd7aP2EFPV1bgEu9P5zXiXbk+kEZ13dGAKhbWjagHMqe26cZX9nhzs3b3Dz1k1mB1OMiXibcV6LAOMi09mKWVeYzmb0ucemtGXG5Kz/n/OHr9dUMo1pCcHjQzXftRoeE6MODLJUduy49V4J6gH4L779Y/zku0/4C595h7okcbgZ+K9+7Wtg4A++eJs/8/EXeH5nyipl/sF7h/z5T3+ZdVSg4DP/xnfyv7z9iO949oDfcWOP//gz7/CzRxcfeo77Oy2/5841vvP/+AyLPrJMF/z424f84Gu3+fF3MqmCGZKyBqIAv+/mDX7kzcccrdUn6r/58iN+7vd+I7/zxi6fPb7g+fmEL56v+JWn+ly/dHTOm6cXfPz6rgJ0Iqw3HaX6A44SvFxGya6ijqUUXZ/lig12XQeMtYS2pZ3s4IIji05cZYgqfbEZzFAnZTqkqfRYnDXqeSWJsYAxVwC6q5H045rDuFfWYUuqDWXdRPX82NGLdTSv35IYdUpnxvJKcCWDQLQt2bXgG5yFxmS8LfgqSynW473jxAv9fmB6coud82eINy13b80xoMMZH1gvVpj1OXM30A9nPHzwZQYz8MzdF5gzo5eEbT39esnR0aF6rSzPwK65fbCDC14HDqJs7GwcpUpcgnNYcVgTMCVADpAC3l4mZ3invqvBqf9ryhtS7oAB5yIhqH+llIgrnjZrU4NodnEAACAASURBVBGMZTabbqWsjXN8bNbwx57b4xvmDcHCSSx8etHzE49XnKRLMPxy5bv89ziy+K1AuvFRgP1g+XPP7/O7dgNDEf73RxP+67cfMYhhKI5sJ5BXAJyuCgc7E/7sK7f4nptT5s7yK+cdf+39Jce1t/+2vSn/5jNzXp5pmvRXusTfeLDgcxeDppeLAbePbwLX969x8OoB1289z90vvc/Zyc/yZHjC/edu8fFPvsH911+l3d9ndbZmdb4hroRmMgVnyCZRpK/+tcrK1bVDoOS6l1UZ4dXwL+vZPbjJ6x99gztffsDaTLh55wabkwnTqWVIEde2zHYs7axw/brn5rUd9n3hcbesYQ09xHNaenZCy8yCE91zirSkjSV2rQ6wzC4FS3ETsttQrCORSZKYlKC/xxSDV+ZSSRR6Chuy8URZ0ZUVXe402dY1UNN3yZace/phjfORlNWSwtsO0GFjb3q1oaBsfeE09Erle1kKkmOVGylLpoiw3iQePTzh8LHWWn23IYRH7OzMmM+mPHy44atfvSDM9+n6yKSZ0bZTlosVR0dHDF3H3WfvcPjoPqvVGY+fvM+7777JZr1gvtPgHGxWicXxhrjqYd0zjZlrbYu7NuWINU14l30f6OMGguXujevc2b9GXyLHcUEXVtx5fo+ZNOxkz/Ubtzk+OeZ8dU420K8yqyjMzZS4yawfP+bigwt29lr8smW1cJw9esLixPEN33SD51/UerLvOp4+fcrp6QnrTQcG1qtTdo5m9P2G2HeUnHG7c4oIXbciDkMFRgsl18FhKWr1MHpp1WAoDUqiprZ7RM1062I5rrEqPSzINiFcPYwaNsNK9wPrEWNJYljFSEor2jJg3DM0zQTvJ0he0w+ZTdexlgGJkVgCeB02GKpn7azVOq/vKabBBFOHNGh/mVQlIUbouw2LdWR5fs5qeU7Xb4g5E5OqMFJKSA2ry1H7hJwTJmlgwdCt2CwvsGSsdhjbNcuMa1lJ6OZyubpttxq2jtSM4woFzCpsJleHc1d+0gBj+J6Mw8ZSPUENIrUxF10dxWaKE6xRIscwdHRxTTFZGYYVvDSXr/pDr/XymCrAIJAy6qNWB6oKPBooUskfKiI2zlU5npCtVo0iWuPrcYzHrMftxidTCpCueeLIsbBcbnh8dELxc05WhWU3VMFB/UMjeHzl/deAvryV6o2N/1V5qaIOunC4YEg5slp2bNYbtFNOXJwds+kK680JXX9Gyh2TaUMpWRPTK+BmrCMEtZDpO2WiGgfbIp/agxbtU3LRwYTd9p0FG9TTHdTSY4ggtYdz3pGc08BDp8MjUw/a1kTT7a4oY091uX+Y8SyP8ldTgQn9h4YqukqyyKUyB6Hvey4uLjTURQQ3ElRIqpqTxMXFORcXZ4hkQtBBcCmFJhhyUnstW+2lTBGsrfJvMw6Z7TbsVdCgFjtpme/MuXf/LtPGcoTKintruH3jFrsyoe8jZbXk4nzFowfHvHvtA9aN5+jBBxyfPGWxOGdtBzZdp7JUY7fnYba3y/Mv3+d7P/aN3A47nB0teP/th3z519/mYR9rDeT1PqHiEOMFtmX9aL/puFzzTPWxtFZtj6x1SqCySkAZz9N2jTTm6jJRcTzFPLbns35dfYC17xWrdaz6xAXFeup1ZEyVsdZ7TsEQ2f4dqDVwsaRq6aU4Rb0nLFg32maNYTgqk9VgKH3OEIJGIQXPZG8HFwp7fmBmHXfvPcMbH/sor77y7YTJPtP9jv3ZhD05wN4quNJDaEBafVElY2yp6q9CZeroWihX1jaj0wSj0j0FmWvaNWRKtRbYDtYTSAe2s4Q4wQ0zUjdFgkesBdMrIeXKOlvnFLo+bUkbv/Xj6wJ0IwMny+UfvYr4BR+26Szea7yv2IKQScmCa9T3xmtRLoWKpCuDwExmtG2Lw7Jeq3kmCMUrYNXsKJNHp55SJWFWF2fnELshrzfkHHEVDByGgSGneuEUnHO0oUGsAZNwtQm3IbDqIi6Lgn8+VABSUesRcBCjC4lOy6wu7s5rhoF1iEQtOJzFWV9PpkWkU7Tepq222lQvP2tzTaUxyp4zmjYrosCecYK3FolWG55iMdkoszDpBFdsro0elR46Tm4MVAltShnjKysDUamrGDxqcDhrAjlGUlQZqnUWMYZYPdpm0x28c6SUiatEjLmCpLoh9alnurPPpJ2w6VasN2oI6rxDXCDRIYpikLD0WVj1ieVmwdHJoi4U6guRSlLJWLCkLHR9xNAg4j7U/AJbIO7qv3+rR6RUD55SWUQa9BBLxmaVhfYpsew7pt4ybwJdSgyxZzabEnyDRG2arNXVM5Wk76tVz4MQGkxjMU6jzcfrM8eMdx6nhEaVIaNFP9aq/8UmUlLB9HptBTG0Yok4Qk3+tPaqrFVvZofbhmLo8dePIlf267oYbkfTVxcIBaDbtsU3DlPlsbkUyhCxRSjDgJVCsLpRSw14iDGy6Ts23YaULb5chkHknPVcZgWHo4AMmZgSm66nZGG97ri4WIFt2N3bxVih6zoWZ+eshwHjHX2M9OtO5e+MPgumpt5drktWIJiCk0juE8ZC66Z1mqZFb0yFIesalgViLvQpEVKqIJ+rkeS6jXgXFEQQmLUTZtOW+XTKbDbFN0317PPYyVTDKzOEpBvnuHYk0fdAaiEspWylOSVrEm7f92x6lVw1Qe+BLOpFqWyuGgoxfm08pVzKUZ/bmfDK/pz/6Be/tC1KGTdCo6D9xZD5wf/7C7x5suD5nSl/+/d+kv/gEy/xF3757e198kdfv8sf/9Tn+cLZivlkcinlFH3/3ziYcz4k3lttACGlyGefHHPvm+7hho5lKrjek2sqduo7kGeJQ89yuYZS2PN69X10b8Znni74mfef8odeeIZvvrnHLzw65ttuH/Dy3oyfe3iyBeCXF+d1GmsuQTRka1BsjdtKzEfw02BqUWvIGEISjJ0w8VOc92AdRbwyMKTo+55H2dQ2SlAbFdQjtXaFXPV0vExru7L+mFFi7lBG/GgErL/vjH7dSGVVS/UfzKWC++ZyClqEkCNIIRJIpqFYlf1EyTiS+rsYSzJO2WQTSx8nrKeR893CtclN3isD3cU5Hsuzz91jZ9LQ3r7Gi/fukN2E1VDw0xk7E0vjA9PphPZgTtetMRIxnPP4yRd5evqE/bll1jRIUmmZ8QHEKpvBgkEBOoNHkkEGiCkxrC9jaycuEBpNJPe2YE2iDcKkFTA9Q9RC2/sGFw2+CC0Oj2WnuQT6vu1gzo++cYv/9fGSv/qVM45i4YY3/MCtGR+fWv7R000tyC/3iMtG1G7X8/F7FmHE9Pp+oBtUnvujL99kXYQf+NVjrrvCX339gLN0m//hAwU5snNbgG5TPH/+lZt8ZKflj3zmKash8p9+7Ab/+evX+ZO/cQoi7HjHTx2u+eyiY5OFf/n2nB999Tr/9uef8N5qIA4ZE27RmAG/M+f+xz7CR177BJP5u/zqZ77I4nTBGx//Jr7/D/6rvPLyPWzJPPnaY3yeINKCNKhg1JDJBKvhXYzyaqXrKhuo6L1lnVO2e9H3K0yn7N+8zXR3h6PjcxYrlbK7MnAwd/RSsHnJ/s4erz3/Gq/e+whPvrLmFz/1Bd56+zGSepLpaG1ib+KZmIGSlVGIbek2WaWLdgcpc5IISQypnpviBTGJbJq6bzty0SFeMQnjM+IiUTq6vGY9rFgPHSkZvG1xboLBK3OlmoKbocY4i2UtG4ypnsZdV32pRjZ+qY2ODnusBV8EbzxeHAF9TUNfWJxtGLqOpmno+0TOA22b8X7Nl946YTJ7Cxe0CA9+xqSZKPO0O8PQ8eT9Pb74uVbrgwhnp2d06yXCQEwdXfIkmRPshBCF83VkGQwmGg7LGjHCbtPggsFPHRszcHhxTi+RLkSu3Z6ws3+TjOOiT9AENsvE6nzg5HTN2fGKbCdc6z1mZ8rFkwV2FZjtHbBZtpw+FQ4/OOT0UHj2mSW5N9hpIKXM0G+4tjfn9jM3MJWROZkGSp4Sh56SEzeu7ROMIeeei+WKUteKGAuL1ZpNF2msDqD1SlTWiJWsDVWxGFGf4ZI1/bekvF13vfO4tiHHiPeOO888Q+oGHn7wAauLZQVYLeICQ4EhF2aTKXfuPseNO3exkzniLogFNjGxTonc9yQpNFPPxDcYlOVaUrU/GAayjfj2MsfMjNBNUTnk8mLJYj2QhsjQ9aRB2aFjqBal2kyI7vsAVFahNY7YR1YXS/05q82hlVEcVX/RjCDcVcuKEUQaIbHLXu23H0UohKXqHi1Wbf0wdaNVsK76QpoKqNnqhVX9ArMk+tzRpRWDbMgjgw/hw0npv/1DBFL1Lhv35C2oVwsclT0XrCm1Aa5DshESqwCBoPWeDtTN9nWX+reket2KsWz6yNOTUwYJLAenHnTl8l0bgSdM9QeWq4F18iHEcWuEb13tHYUh99iSSUZqb6XXcIkDJ6dPOTw84+jpI9abC4pEJpOW5Wqtiq/KXBILLnioCocsmmhtvKkEpPo6ar8w9nKpFGxlwDnn8a3gQhiXfgU164AzFfVxH49p22dVwO3qIPJD15bIFvj5UPCArUO8nBBT1AcU7YOsvyKvRuoeoM9byghYKeiUsw5NdIjuKgir6qdxKG1sUUa4E8QLtoB1gjOeJHnrizZpZ+zOdyhBmZveWPb39pg//yJ3bt3hPPbkPjGcbMipkGLm4nzJBx88ZtcZjoxw/uSQ9x881mumSaz7DkfA1F4iGw2feeGF+3zv93wHz073OXl0xq/Of53F0RknT450kG2d3j81kGAMWqgU1y3gO9byYxqviN2+xxqoMAZtlS2SbCpIluTyHI1+8NZe8VDjw+fMmnEIbRhDQjEV/KVa3lw9x1eG1OP1P37NA6kkvd9E64uctFe0qATZ2qI2WlJf8+gj7zQEwlohOwjTCbefOeD+3es0ZcmdZ+9x49ot7t6+Qzs/AH+BlaQM32lQlDkL2FAvVYuUpPuKGxGysda3lwMLwImoZ6RKORVIM6OKpzAy/kgQBnDLAbdMNIMn5AklTRisJwYQk0fXvu2jcvC4FLr+9o+vn+LKKN+5lBFST47dmsPXYALrsRak+pQJBVwDocU1Tb3BVW6WDIh1DNQUD+/IqTBEXZRtZRzNp82HFsNUVP/sjcqWsJ4xRluTH9W4W1lmDlM0GcwalYNq82uxXoETaz3G6Qak+2uhjxG7WaGeGHp+vWuUHSgojd2gEd7Viy2ngimZ4I1Kc+vWlE2nBpnjDeVGDzptBU1Qj4OcM0OKxByJkvWJsiN1MGwiscvkISOpgr/GIBmsHycb6oFGUVNXTRcbi01Fw6npeKaALQZrCiSjnjAystJ0A4yictUuJ1ylz2ejPhaX4ao1YbIyyNQjSldLY9U7ptTfs+jGFsUQxZJTJsWI8+ojEXPSaZSrU2qsUvpzqUWBreh+3TwQnYq4MelTWQLGsP1sUJZjMmPfq0VNAlJN6nLAkDPdMBDsRDcuYOgTIRcm04bGgMs6pclZiFmBTuc91jiayRTvPS4ETWhT5FYTQ4PHpqzzA1EfhnGRqwhtnVaO975OgCRlik+MwQ8f3hi5XBAriPMh5mAFy8oWrBttdq+UcEojJKWCzdWLxKrDAa7e017p/9ZUY3+vvn/OZwWWhe3yst1YpDK+KvvAB4fYTMw9KXfkIqy7C45Pn3C+PGVnsYtxnuVmzdHTEzZDj2/1nr9Yd6xWa4xx+Omc1ltsifVIauBGHMjRkk0ip4h3lpaWtvEEGzg+7eh6jaTPYhTMEJXFpKLyGaFKOusK6pyuF1KEyaRl0k5o2kDwGp3NEPUiFQOTKY1xtCVgQkMUYciJWEo1VlUg1prxPFfwrQKdOV8GOKg8v2yHTLnItuhSkPISYJUKWl1rVDb3eKOeL2Y0iWWc0Ag/9/B4K4v+ysWa//7N9/nDr93FmLe35+9//tIDfu34AmMtmxhxdbMt1dR67i0XMbH1lMmZ46VymfzQsdkM9fWNRQD8nw+P+ZMv3eSXH32Jdcr82W9+mVIBChHhuI/8wwfH/MT3foJxmPef/dJbfPF0pQCbCEOn66e5AlzJeICADY0y9Uui5LJN9zOoqXHMMCShjwVfoHUB17SIXGHTpayYRS3xq+mCNirbQlWfdCxC4YqklYqH1rXHYausXgcZWWQbDDTuT4hOtZ1zFAvJZA1PuYSQcAhNbcKSFAZGA/SMl4iTiCQ9hiFDk3ewTJiIZyqJkBNlEBZFWHWFs9MjOmt45ZUXOLixp/H1zQwRD9KSkqE4wYSWcG3OHvu0uWd/J3Jy+i6/8OlDhmHDNARK3WtxykBIucrm63CSYkgpYuixJLy5jHBoQ6JpVIjYOIeVTMkRKSpNyUnwje7vyWZSyIjoPhLdJdPrh16+zs88WvCXfu3x9j55SuGLj09ACn/g2T3+1Cu3eW7WsM6Fn3uy4C9/4SGrpIOdf/x9H+WnHpzzrddnfMP+hB/+/AN++sE5cJ/TswVPu8jdSeBfuPYc3/2pdzjMnr475ifKBf/OR57j737uLXb58OPFcsK/9Mwr/Je/8jbxySkN8Dd/5ZC/8/3fzMfXT/j88YKf/QCkaZFr1zDO8b8dFf743R0+OvO8d67DiBfuvYKkxDKecpEzA4bpbJed6Q5kw+7uAXfvv8juzRukYWC2dtA5ipuSiiMXQ7KQS8SQ8dv3TYFnUKaobtdSBzuXzAdjHDZMmMx3sacrlouO7iKxYxtevvMMJ13Po6MnmLuB1156lY/eucfzsoLXNnRP1wznF2o34JT5n9BMs2nrcSaw6jbE0jPzE0JTkBTxbsARMTgap8BzcJUdEUzdtgwuNHgrmBrg0keIxRLF1LzAQjFZ113ncMUjRddO53S9j0OszWTNUK2DBbV1UOaUtQ6bdDAUTIM3HpdbXGn0WdJA6g3DUNfzSmiSWLC5kLuIrDI+GLyfqCyJNdPWIdLhXYdzjrVTya6TCbFviP2MnGGImWgaxE8J7R7zxpHziiElujVsfEM2QjcUbdK7npNVYd70uNmE2c19bu3O2Dt4hoPbtzn66iOePDji5HCJpIYHHzzl6HAFfs5BmtPsZi6ebmijoZl5ZgSCucWtGxNm0x36jefRgxP2DlpiP3DjxnW++3t+D6+89jKTSaBIxHtL33UsFudcnJ1ryJoUXnjpeU7PFnSdmojHKDx8+ISvfO19Npse40IF03Vo4kqGYqqi43LPG2seVWhUf1fvaz2ZcNYznWmyurNW/bRK0WGH0YGC8YFmtoPxTSXlOU0kjoUhFtJQ1I6nAcTQNg03rh0w4vm+adXcv+6xo7heUweFnDKr9YbVKmJjVd+kap8jdY+wtnpG14HSKFEsYIwn9Ymzk1PWyyXT3RZjlDWGCCIZJOrUV1QUaj9U3ZlLf6XfEpKrxeb2R3Q9MCODYkweLJpoa6oCZGzqtwDdmDpvCjH35NKTjSXKhk1ckNgQtnF7v81jrG1q7Ugd8o4S91IqOGGU/GCr7FSk+j4bleAp+5065NJeQP2kTFW2UJMapRIopJKUDFhHLInFxZpBzuhKw1DPlxTRXtxQa/bLureMljrj0LDuQVeBUj3ECmY4R596Vpuek7Mzct9RxHJ8fMzb77zLgw8eEYcNIgnBsenWxDQgpZrni8GIMgb7vld/x6A2Noh6mFmjYQnGjaQMVbmNPboLAZdkS0QRUe9NER2sZ8A4s7UQGu+1MZxlBOBGgEbBoitBGbD9fyUQVHZh0TCgXHvUEYixzhImDbPdHSbzGav1klgK1ni8s1Wlhtbh06n6dW0DJ/Q6cF5BS2PqYMUKYsd+T9f2Ugu0EBrm8znz+Q6drEhD5P33HtAg7PqWvd19Sr/h6OIxZycnrFdrunVPMXD05CltGng89KyOT3j46JDz5ZrYOoZSmLoWiwYqJgPGWfb2d7nzzDWe3bnOrm95/NUH7M6nNMFXtqfyKM0WbB+BMwGT2ZJtKltrHBJvP9evjSQOlYWb2p/p+klRz/MP4armyrkybovpjCSOrS0VaguWymUSrtTn5OrruHINXCpIFCD2Fa8gVw90qWGbgj630TAXJbCg9XG9dsQaTOvIwTK/ts/HvvEb+N3PHjBhTXNtxt37r7Iz3cV5tVcCEKO/j/iqiLSVnAXgt5YzhvE9rO/Xh4hsetMbRDEXo/eRqltSBecEE4GLHnN+DOfH+O6CIB3GRgYb2UhHcLK9f0YG+Ah4ViT16z7+OQBdHbKWEc29BOiMdQwx0ltL41QS55xQzGjCnzA+MmRLU09SzuoNITUOddkn2tUG4yxd19WYdKlGnDDrvC6stZjCaAMdfMZ7z2bTse57hn7AxIyzti7wOh32RRiy+tcUq2BMYy0MiZirJC9ryiRWPdlyHlh3PYvVBV3uadvAZDLDePUvKqbgXcAGp1K0MhBjRoxGTjuvJuGl9DjbU2SoC7QluAasAnvq02arf1yun4vS4PuBFIXVWWToEv1mUP+BmFT3HxjnOtXbod55v4nhkamLrphqnl+g6CTMW7ud5IEGLgxDv007zMayXq6wfsAaQ9/3yqDL6u0ACn6uNp0ypDZr+r4jBC3KS1CPwlRRaluNfo0POtG3frvAiws6GatSZgG80emhQTdHQbbHIVkoFPqOKplQQ1SlAqvHBPWeK3ZsnnVhkSsf1nmSQB8TrU10Q9IZrrGkAquuRyeVmiI3xLT1b8hZsLbgctFr3ljKOMnCUKqUrVghGUuydjtTVMs4IRtN3kmmEIt6Ccas0keTdVG+GkKxXYRRivpvDc6VLVOLeh2M8d51WVKAOKnPnS/jdNJqs2aULz5OFxVEynVqpxeXrcWxVFbYGBeeM2p2HjPZFmLSzbLb9HRDhwhcrBYs10v6vq9grjLmFsslMSvDVRDWXcL4GbPZHnZaTYNjqsdHjXHPOp0rtgKJCkSXFMnJaEJoNogzZKtNX65FmW8a2tBgKujsvCE06gnXthOm0yluo7T7ro8slitOTk5pArQTT9sG7KTBeIMJei43MbOOib4UTZ+sm8AoAHDWEmyD935LHxfUj2kEDUvOSDEkyepxNrK06n+uArVngzJf78xa3l5025p7PO8A33XngB/6ppd4dX9Ga9VT4WkXYbtSwPvLzUi6q+BgBegqg285JHaDR8plIMuu12c6Xa0Yhrylvo8A5A///G/wI9/yOn//+74Raww/9uZ7fN/d6xxvenIu/JmPPc8P3L/JH/gHn+E3Ti54bW/O3/zeb6Ivwt9+5zEAzhlc9XoYp10jgCkCzlXvi6JTdf0ZszUVT9ZSjGdIBddHjEv4JuhQp1TfOqvXRzHa2pgRlDNgt88pWxBurHBUdvLh99pg9Xe2UmZlfZrx+zquZtRbG6cupbaaRUMFSOqIor5SMJBrc2DIeBmwRESgsXr9mByQBI3dZT7x7M2m3L51l8n1a2zKhk//0s/z7gcPuPfCPW7MJ6TUsz67oCQDKTBET3P9DiGEmiZeQDqu7QReeO42n/u8Z71eMm2mKhF3gTDxOJMu1xZnceI08XfoSWmNeJi2N7bXrKvG5xZD6yc0rmEwLc7OkNwy9J6cPTE5NiXTGalBPZpCCcocfX4W+OHPHnG2uNiObC7RW+HwouFP/ZO3eGvZ8+LOhP/xO1/l33v5Fn/x8+9vB1J/6N4+f+yfvMWvnaxoHXR13Lxcr1ksB75j94DzIfHm4zN25ju8c/yU//bklJ/52iFvna245HdcPv6Vn/40j9c9y6iWDe8u1vyLP/XzHK57Tuv9Ckvk+AxjLa21/Ovvv8dXNpG+alH+0l/5i6SUWHcrsIb5dI5k4enRMcvlkoc/9h4/+ff+Ls0YkLU1MLfbAaAwDjJk+4XLRuryJho92Yy18PQR//j7PwHABPh3x4P6+5/VzxZ4/4uXB/vpL8Knv8i63hXfCnzrDrADcMl2vPzlQT/2XP1+Vz8urw59jIk2Fdh9dnLldY+/q//+RTvjB98TilgwgthCYah7clvXXZXHOqup9CXrsftRPm/AiO5f2rnXtUCsGnJXgMAUR0kVys8AhpQzQ+pr46jrClYDqIwPNNMZu3vXCW6Gtx7JiTSscGbATTQoqFucUwZLjk4LFjw2TAnWYcKM6wc32TGBRX9El1bkkjFmgphMnwdltG0i3g5Mw4DvErvesXc9cHq64av5kPPjC0LY5eX7z7A4j/z65x5yeNpjnGeVzmmnmeViicmRxcYwnzcEm0EGnM0cHv4SX/ryIXfvvYjzDV3f88lv/hjf/l3fzmTWImiowuLklKOjI548ekLOiSKJ6e4uFxcrLhZruk0kJSFnw4MHT1iXvl6jdYgtY1Kgrqcw7ktXgYHKkE6J0sFkljg5PiEOAydPj4mbXs9zKWRxxKLMueAbutWCR4dHPHj4iLu7d1ivO7o+EWNhGDJpiHhn8L3BzOe8+PyLvPHGG+zu7OnraxpMaXTwIqIyv6IyUAWa1NqjHxJm0DVayqWYXoGusSYreD+Gbunw0GQhdgOHTw558ugRe80M36htAIjKpUaA/TclnV7pCi6bwu0bOD7GxaD+v05UrnwvIXnQYIWRPVXraVUyKLg1bsQiCtAhA8YESt4w9OfkvMS4KdDy/+kxLlr1xY5AAZi63/jt8DQL5JJrYa8exhRNwzRStl9HLDnr/WpNvbWqHxtWwYIEpAKbLjKwJpqia0m1c/pnVnhz2WePQtbLQ1D5Zk6ZZCJFBlyrr91albgO2fDBwyd85Z2vAp633n6bN998k6dHZ5ScMEbrWqFgnaUbIoMPOCKSsvp79h1Ddrg4YFLC5YxtPE3TMmkCpKTAlTE4r0CY8656U492HlrrW6cqBFNM9ez1FO/V8xtV9lwdxI/9xxZ8G8EZ+TBIN/YqKVXLJjOqh/TnlD4iTHd2uHH7NutNp33AWeV/OwFf2N1tuXX7Djdu3sT7pvqJWnwd9FprtxLL0UMU3MKYpQAAIABJREFUW6oSV3sp7VeCqp6sZ3e2y429A1576VXmOy2527B48pRHDx9xslmxOD1neXTGet2x2XRkCouzFUcp4VZruvNzFhcdQ4EuJvVc8wo2OathBjEpK1tKD2ywDBgi1qhfc3YoC7LYrbfkhxl0VOBO7+ntHbot1MdrTlleZus9PuIApt47FhGn74/Rv20MjHjz9uYz43031gRF+8DxWrk8ufpp/F4FqaUq2rZMvArQNVUyaxM6tKsBeFTbriLqA5tS3sqE9UMBsUgBb5ju73H/5Zf5HW+8xK6PZG+Zze6BtOpmZuvfllDVNQZ80KC37bHWurbKtLWoH1/PlTVoZMployGVUOvwDESt3weDDI58/Ihy/j5m/QgbVzi3wDcraCKxrHEYzW+40rddgqu/eXH5Zx9fPySiLnzV/nF7knRaAcMQGayld45sdZqj4m+9+dbdgE2FZohKiUabP6kndR0jLNRvSGWOlYpbmScXK8WOlb5p8bYi7/XmH/qelAY1giy9Mnycr4uSNjk5JVIpmDpBCKHFuEBOQ/XMKjpJcyqB6IZI7COZzNlyQdM2TNppDa1Q0Es95JS2mUtUo2ErOBc05Ra9IS4lUrV5MUppLVmfV0QlcWMjj4EYC13XEYeB9fmKNKikEjTZM9S46lKCItEVLNHS9v+l7s2DbUvP8r7fN6y19nCmO/UdepS6Wz1ICDWDkAREuOJghB0IMQ5gAoWTFFSFBGOGimMCMcakSoADCYMdhthFsDCT5QIcWwwGhCwJsNSaWj133759p3PuGfe41vqm/PF+a+19WyMppypeVbfvubvP2Xudtb71fu/7vM/7PDqXmLIIfQp9sgnkgFxQVAOGhckbrzg7LdqaerFg6ZcCXqqID5Aa0bnrXHui97lIVwwqw3QmYtC+aQnRU1UFaAHMyAEcBba0xFCilktS7mZoJUHWpFV8T93P6EQml6JiFtJNWZ9EZ1K+MyKin3QOZqAyXZ8MLsTUsWKEpK+1zpu96JuQ8mgyisWypjIaq20ejVwK6GoEEHPeiytpTNl8RNG6kK2qxY1PrK0FaIyulXXtnIwu56QoegG76hCoQ2QeglC8MbQq4Q3ijEggZXcenZkNqCzr+UlYdT14kDJNHC1OlorMMpSRHQGCfU405NlTKUHwhLqFIqHymPhtI4BaE3zHuIj44Hp9NR8CbQy0bYtrWrRRLH0EYwnOEENJStDUqtediEmCekRG4XyMhOQI3lM7z3DTUOoa39biXuyFvdYqi01JxoQRdpxC0wQI85qUIo2H2hl8Ev2EZCw+irtb3ThaL2zGLr7Z0mDrFr0UkV5bVDTTGUfTOTFFDo8POZmdUFo4tbPJqTM7DMZDXExMli2HsyUni4Zp7Zi3gUUjmn3Bx3z9JJEpjBExeW3EHKcWcKxp2143RJy2pPBOMRHX9o/1yH51VvP8yZyvedUF3nPzRJ6fDr1SilIp/smXvZ6/94HneMdz11mGwH/10F18+2vvzQ+b/CXFhozyB3kIJc5HyUSfOJqxXVruHlVcnizQSvE5pze5Ml0yadzauId0AFNKnNSR73rPE5IkKsXDp8b83c9/kH974xDvA4/ujHnXy7d4+ngOKJ6eLPhXV27x5Xed5R0v7CIW7JW4enc6c2t7R0rQ1o2A5UkcGVEGrQtsOcBUI8pySNQFSltMUcpaianfoxKyP1ujSDpLLatuFFD3+3Q/at1d+75AUWvXnNUOGSIiHycpvkbl6cKQG12yiXrf6erJtadLbHNK1vqalDxOFTgVkN5zLprJ+jcZKPepYek8Lm7RhIbJck61scHZi3fSRsfm5pNcu3aNxe4J6ew5FPDii89zdHjMzsZZNrfPMxgPqQYllQkUWqOWE5Kbsj0cMqxK9o+neWqgoqo2GJoBfrbEtQ0peWxhxSii0Lg6FxpeYdZYFINizKAaIyxjRestKm1R2bOocIvlLAqLt7AEAy46IIpUZk5cO+bo9cWSkBwpM4LXM57fvzGhG715cVrziy/s89fuPYMxBp9nWX/phVt85GAGJGrP6shMlbFVTFyAKIK/3bMCoMtS9oMQKfN51RFO2sCpquDEie7lhYEUqNpaYch41z/CVikuVZZDF2hjYvebvowvfOf7MYXFRWlyxhQZlAMKW4reZW50xhBXxXinjdifP/0enZKYIaQopllaq7x3xNz8kYRcW8vzN3Z56zvfy39ox9VpnfOsFcDdfU3q4pjExLvvuCTFEFlFJl800UiWkbQUyEMp4oSutAZTgQmi1dpNHGhPwuXxK0NhRbA7BVl3xWjA+TvPc+fd97C5sYU1FZOjOct5TXSB6fGU45MJJ7OEUR6jk7j6YenGaCqt2BoO2DQlS6NoEIaejeAyC7IqSiotQvLReWYnM1o82yPPtW3LnvaoqHnwVRfZLHY4PthnMgvsHs4l/h0nrJmKBIeBycJREiA0JER6xHuohpe589ILnDpzhu2dLdCaajREFRrvItP5kr2DE06mDbNGdFS11UQ7wo5KxmqMrVq8CwxHR4SocC5ihOKUGx7dpEuXy8gN6gARlWR/icHTpkAKnkHTsH/rFjduXGd6fMLWxqZo2+V6QmnY2dnh9KkR9eEem1ubVMMhtijF6dVYqqJi0Tb4pqVJooG5tTHm/Lk7OHvmHGU1yJQXARp0Wos4+Rxjkvw9BLBFBd1Ya1pNeJBjPyY3iJTGR49RklcSEqENHN465MqLL7FTDjl15jzlhhUWWUqrAjJl7cS0kntZcQ7VKiCstqpPcghrTr5FmOMptMTQEoPrf5wuJ2HVFI5BPtuQIAW0b1DtHO0WFHGJNWLH8FkfWZReGKx5Iievhc6R0+amllUaaztChDB9Us9QUVkPTLMqyruxy/zHiAadCFkYXFC0tcdrT0hFHr2MAhSkFRjXXdKuGUl+P1Sns5UZuV7Gdcl1YlSJRIkyA5557mXa5vdwi5onn3iKl1+6SlNL/ida6SWifWVE09tKfVI3Yrzighe2bgzYDNBqpbCFpRoMwDuqQUWxLCDrkwmTTudlkcdPy1Jy36KiNBqMxWFx2XChW0vrJIAVc667Drd/zzqIB8LiToi+p7KqB/Uylk05rNg5c5oQFUlbDm4NWC5nYkYxMFy47xJ33HmBS3fezcbGNsaWGFVgTSlMOmTapRP313QAqtxZawcCJCnJxZOHrY1N7rrvEp/3uY9Rlpr5yRHPzhquvHSFk+kx05MZ05NjFvOGoC2ujTS1Y64VZt7SNgEXNUlZXIyowmSttyDa3lrhnadeLiA1JDdjMTthMZ8QXUNH6AqZqNSt08QaS1WEzrKcUfc9a3u8IuMA3RjsChTt9j95FLL5mIKUQs5J5f/Kj3Xjqp1GbSZqdGD52v3t/l6/vykD4x1AF2PsR2ONMTJtmOWaxCSxk2cSplv0idZ3Wt0RyAy4PObqTUAX4uRqqpJqPGBQDghJoSmkrk8JjaUXAehcbU1AK9H1VLHBuyUxLHKci6QgWI6ImaZVbCbkRrqCWOTXaqCB6EUoc5mgLpjfuIabvAzuZVJsQc2w5YKiShjnpQGe6dMpB45eigfRZP10x6cF6ESXI2W9LWkdhCiaSDGCKS22qMTCXucgbZWwXayMyUWlZNzASkDtnG9AtHfqVgpio8DGSOvabMpABlc0IYreQ0yAC8Qobq8KJGBbCK0wv6qqZDQaURpLrBtcKxR6OyixVcVwY0OEx+cLOJlAdpDSRkYyknM0vqFuWw6OjrOLlABvJLIm1Kq7k1Ls6aCCpJABMQOxRCvbBzFJhoXqG/NN6n6+NyFISboOviW6Bu8bUoxYaynLgmE1oEiRoIAkToW6sJjS0rE/QvQEDzqWaGtlEwlRDAFMgbYVpqyI0WNLcWUz0RHKGc30hOV8TuM8SovdfIqZcafEgjlFmf9vXKRpZWOUGsXQOrCtwlpFYQqMaSkLy3BQkXwgBYdvHNYWkswmGa/Jtr59CJLkJ+VCIm/AsRs/k+utNZgI3cy+yskKKY+rIN0/rfLGrw2xKLFJUZUFVmkGhWVnc4PSaFw9J0WNLUqcbzBlgS4q0RcLDRj5vXyqqZdzrLG03mGtiHaGIKOLWmvsoGLROtoogJMCrBWwLcRIGwIeRUNiiXztY6JOgYaAVtL9AXF/7YVoOxbR2tEFZomLkfXEliRBVJok3QYpgNjJZELtS0JoxR47JXTwVEWFMXJ/Qr68xggzRumEzSON3S3rtLm6AB5zZ8iaIcpWqCQi2855AbWDIUbba+sBWG3RSuj7MUCpDTomgmtoFgt08AIaFjqbrkR0UdAETWxj3lQSqZFNKESFC5CMpdIWZSIuRqbzOQdHh8TMoEjeZwadoagsxX5B3dYc3DpETWdMg2c6X2B15GgygeTY2dngzLnTbO/sEJRm3nqu7R2zP1kyWTrmTWA+d7ROBKx9NoyIUdYsSRGCxFLfNKKvFISF61Mi+vxMSRDME8krAELcP+W6fe/7nuQd/8lj7NeOX3jyZfaWLWcHJX/9wUtcX9SURnHiHG2CR05v8d88cnefrK8X9H0XLK2h5chHvjyvefeNQ77vDa/mb73vSU6VFd/+unv5xWeurtZgbjB0QOTdG0OaENmrHQ/sjPjxtzzKrzx7jWdP5iil+NO9I/6L+y/yi89c4/npkge3xrztnnP8s+dvytalEEH2zEZLaQ2wzGY1PohjltIy0mqLinI0ZrSxQTXaIJqKoGwXGehYFeSEpB9rA7RK3aDRqljsgMpetHctIc3PYl/8KFBZBL2ThSB1zlw6J+4ZyEyr55bM/pNNW8vXuckSkifgxSU5F6w6J6EKEUNOJuEIaDzRGqgMC99y7dZNXnW4z5k770YZQ2FKZgczdp+/ys7WFrO45OPPPM+1Gze4/577ee3OWSI19XIf4gRfB/zJPhbHuCw5vX2KvcMZk1kNsaaqthgMhhStR/lapBIIJCW87ca3+ABt7Tk8mPXrJDgNgxLvIvu7h7SLFhU1J8ct3ldoW65YEUqBbwixRllPyqy0jjl6fmh48kjYJ7L/6v7a/kfnt/mu197Jg1sDSiMOaAdtoDCGGOR9rs7rvMd0+0cPwaIVzL1nq5DOeEryM51T8OtOb/A3X3sPX3jHNpXW7C0bfuflfX7yoy9Rh8SDW8Ly2q8dm9mg5Db9XqW5e1Awz89I9yyGEJku5njv2VSJv//YfXzpHTu0KfHrLw34gfedyLrKbFbJKQU+/tpzA772jiGnrPg1PjP3/PTLU55cLElRcgUStG3LV50b8rcfOs9PPrvHP3rulmjw5mNx7i5CjMzrmp3llEF0/Ps4/uhr3sLXvesD3Fw0n/mb/xzHo5uDz/xN3Tl85cP/Xj/7szriMUyOV/8e5793QOiGG5/Fmzi4dAG48Of77L31r68CErO/5dGH4NGH/nzv1R9HUB/Bb/1jFr/1jwEIDz7Ky//x13D92j5KWYLahDKhCk2zWBBNYLRTsQm41nHj5gnKVIRMb1BaGMuZwChNRS17nSKBsZLDdm7eMeQcUFPXS+bzLI0QI9GLZAo5ko6qiu3NMXecO4XeGvLaRx/hnrvvZmNjzGI84tzZM9xx7iyBQH1yjG8aiA2T44KXLr/Es89ucfbUA2xt3SGxwgdUabFrGJhC0dYNN67fYPf4mGK0zUDLlI1WosVlVeyZbUZLwzVGIRHYjLt1Wm/1vObm9RuMleKe+zzn7qqohhvC4oiO5Guc8wz6hjS5d7R2UrDGCuEVL65/Hda+Fh3sGFtidL1J1Wokl2xqIYW5jjDUhhBA1Qt0s2AUPMPk0bElK6p/8mP9fBUonY26bCFsIFTOXSWnsMZQFQXaFEK2KEth+cdAS56WSrlAz5dENLM6aSSpBbRRaIM0522BSLwUuJBwMeJ11iGPQtqQXz2tAMq8stAqaxVlUCrrz5o8PaOTolk4qqE41DqfOJ40fOBDH+ejH3qSZrFgejShmS8x2jAej8VnN4pOewgN2hQC9npP0zp80+BintgJLtcTgdi2GNMwLIQ5Z4uCqqqEPVcIu87YQqZiYtbEUhpjFKNRRTkYEJVh3oqcEqzXC6nP02/Xvl6BNsGvvqdjcomBns5mk1L7qgz6GGsxtqS0Q6rBBpvbpzl38SIHt25wfHyI0bCxM2b73Ck2Tm1x+uy5HqBLyRCj6EWT9KrmYGWI0i3xzpwxZIKH1QZ0IblaG4QFHxJVUbK9ucnUtRweHNG2raxDtLjkRohRSDaogqQsQdqkGCPgnA/CvtVKE0OgrZdEX9MuA8fHB0xODqnrRc/gb73UM0Khy+7jmWLTpQpiyAgqd+m7phMdXhAjnSadxFF9Wzf/9qdv9ayt7tUaoE8H1KU8Kav7z4FVs6G759IoEVLKOjibUupMYjOzXwgURpmeJdg7sIeY3dR935gRxqxB2wJTJVRpCFrjiSgr56SVBSy53MVgCWQyg4aEIaYlhiUpzgn1Me30kNBMMMljoiN5mUgkgQoJJdRcVPAQvDTRXdY+dQuSW4Jz0EZoILmKyUHL4uQY53dpQ6Rpl8QwR1NhU5Tx7jUMrjPP7kHUPMb/qY7PYBKRu7SR3gWlG3uKIJumtdiypCyMsIQ0lGWBrQoZH1IJpQy2MJRFhSlNZgKJYHxSEaUcIYlWl9fizNIGR2EKkliJkjLlOGSR/uAdZVEKyq8tXX6psx5YaQtq59FFidKWohow3t5kON7EpYRPmnIwpAngmq570asJEGOkaVpQGq1dXrgdOr9CrVXuovTgXQ+gaJTyPUCXUqevsBpN1NbkELK6QynRF3gxxCwuK+wLFTWVQtB6ZNPyXQ8obz5yEQR1L424+xVKCw1aa4KyNFGJeUFUEuhUQbIaXSV044ltIIQWnQwx3a7RFbFEQp4plwiQkghRay3dqBAMuJQt0A2DwjIoLLGwWKD1Pm/I+YeTyuCbjJ52BXq/JSYB3roR65QvlNZkA4+UxwGDMNu6M9UKoqXQAwnMEWzSWFtRGYNRisoUaIwkR0HROkdoAy60DDY0LtWQ5PUuqLVtS+08Rd4cGi/0ZJ/ZUsYYdIJ5aPF0AvAi1kruYcTMVlr6QOMjkUDTevmc7ndl1bmy1hIR8BaydkQH2Kmuo/WKQJm14IyyfUDsgI6mbQnTwGKpiSpQGM3AWCpraF1AZ8FTpSxkC/CUsqaLgqIsKH3CGnEVisET9Uo8NCaPKUtQDm0D1UDLCFAImBQZbXShZ7XhhxCoykSMVjYsLY4+BtEpIkW8S7QqYLKpxbzptNGSULLJnc8IxEBUjtIH2iSswaap4fp1Dg8ORavEe0wSZ1ytFaqQmOWbhsoFCiLz2lCYxNIL/f5o0XDjZMZ4dIgdDHBJc2N/wv6xiEPPasdi3lKmSnTGQupBOhfFEAUFtih7Zua6+G4nK9CF8STt3L4jtl7o/9H1I/7K//0BvvtzX8UfftUXURrF3rLl964e8M+eu8H/8P6n+f7Pe4Afe9PDfOhgxjsv7/H190uR90qqdZeErjQT6JOe/+7fPsnb3/gaHv+rX0wbIr/83A1+OjvFAvzoFz/KXRtDvu5dHyABD5/a4O1veYQzg5LD2vFrz1/nf/3wi/3n/MzHrrBRWH7lL76B01XBUev5zct7/OQTL9PpvElMFjjKO3HBk7F8LV3pmBNlYynLIYONLUab21TDEaoo8THT5ckM4Lz+icKU6zT2ulUjWquh7xx3U7Mxg3avPPpGTXfd8r+7okERZY11hU0Ggvpwr5J0uNU686EDjGK2eye7T6sM0mamY5LERqlS9matQBvK4YBqPCKlwMHL17hebnBQT3ny40/x4tXrlLZgzzuuHd5kb3+XejbHH3l2im1ONy2TtmZsKkbRYJZLCmOpTp3izgt38+LVfSaTBYYh42GLMXV/zZxv8MHi2pppPcP7ljbAoomczBrI+/PB8YTaR5q64fjkGLdsCa3nYLLL0k/Y3DiFssjYF1r0EINHx9jXeldnNZfnNV9911n+8PrR2hKW+1Bo+Cdf+iA/9KEr/PILt6hD4lsfucS3vua86LXkWNg5/3b6jt0ossmF4pPTlu3Scu/GgFt5/6uM5g1nt3j7Wx7l5558me9931PsL5bcMSz5ugfv5s0XTvHOF3f7Z2loNBdHMPeh70YXWnP3sGTqAzeWDZaV01pMkcWsIabIP3zjq5mHwFe853nOb27wYw+dZve1d/MLz+0KuBxEKN37gA+Rdx8kfvegZhGgUPCfnRvwIw/u8Lb3TSBFmqbBO8cp5fmGOy/w1Mkys59jHp2To24dj4wLvvXhu3hsq6TMAOTvX93npz76InvLlenHpztqHz/htScPZ1yZLQE4Oyj58S95lC+78yxNiPzSM1f5u3/6DJ/4pMnxra+9h2977b2cHUhh/vj+hO//k6d44lAA4DdfOMXb3/ww92yIXtHlyYIfffwFfuvybv8eOz/3rz/hfQdWf8Jrn+3x0M5nA67J8Udf85b/15/z//fDPPtxdj/vbVy7tSAGzaVL91CWFW1qmZwEmsWCyngqY4kOZnUkUqBMKbpgSlhNQUW8E1F8W1jK0hKDImWDsk4XO2hkDE9rYpC1PRqPGA1HEn+D5CnWagalxiDGB1ujARfuOMuwFE3ZrZ0d7r3rTi7evMbx5JDKGoJv0Ckxm0548qmnOHeu4HMevsDWxllpcvsOBEuEFLBK9ufgPNPplOWyRg+2cNkt2RYlbZDGvDBYU268RWIUFl3K2sYq71XBRSbHJ1xua5xP6MEmF+8aILS8QKznuEa0rzEdLJHzQCkihCiQ6PXX+lUe5PomEsm3KB2yu2ck+RbnakJwKCPML4+MDGpkqqVr4qkEFtWbNVUhMAoBf3jAYneX7Yv3IhX0pz5ur03TbZIuqqurojRgC2sYVhXloKIoBqIpqSQf19GjYkFQihSEedwRCYR9p6UBl8QMBiTua2tRuYHX5Qo+M5t6ZnwGPGLqxgN1HqldvY+cfaKwlo1BxUBr6pho6poGL5p4SXEyqTk8nEDjSK7FJhiVFaUtGRRD2hRpli26ADCEkGibFh1C78bato6gwQyGxBRkpFVb2rbhZBIwXtjbIqNSUhR5aiOP8wlwJqOv2hqG4w2qwYBl64i5WdQxqtYBuXXArqtTYfX3el4ao+QnArgoGd9OmaWcQFNiBwXj0QaFHVCVI1JKnL94B/P5FK0S1ahClZZyOGA02sDaCpE40KRo8C7rinZAkQoSE5TNWviSP2sjuqyFsQKe+cRiOufqS1cZDCyL6TFH+4e0dYtvPUSoygqjLBENWW9cJZ3BuZKEkckdKyYeUYkUiWghJ6KLzKdzDg8OqKqSg4NbnBwf4ZzHZ0dnMXHIbPbu+ubmrM7X2liNMhC8EDTECVcALBBiQXePenZrfv5lOiPkBmPMa1Tiw8rU4fZ7DLJuUvd3rjF9rlE6HKN7D9e22aBNNJU7EL/LpXSnwQ09htAx8GIUEw7vV3WryRNGxggwaazBJTHtbL0nZVNNRQHJ5CRciZxdlwVmohNoKbJ9Is1a0rShANFZ9EkMIJc1OJEjwAWZd3eO1DYk58S1u2lJTU1aLkjLJSookjcsWs08DmgXjtovWPicl7tEWiYKVeByY7ev7jp8J0aR84rxlUHwtuPTAnRG676A6ToH/UgRwuISQ2QRkE9I4PIhoaMUHimFPC4gN7UTpY8pslwuaX1L2zhx+4hIwIly01LK2lJJCTuIDHL5QPARrWMfyLv6qW4dStfEIuBaJ7TOEImNpQyJKonuVTkYkpSMyMUEzjv6Qd4McBRlAcp8wgLuAlJIt4tlrncaUq50ReA1b586Yazq9s/cXVoPdh24l+fAdYFJNhdBokkQEf26NrQieu/AhTafI31xG7ukPxqhlhoJBrPW0/oZJxnAsYWM/gYNjffMm5bGgU9GFmJ2UXUZaU4oUtLCQJKnIa88TYoKFQ2tS6TU0sYGFT1ET2wbDDAeDrBAaaWLkXIhLnoh3RWWbkEIXvRckkZlzYHuT3cfvFbYmOfcs3Bo8C53KLUkMEFEMqMXVpLWSv7OV2o5X+IUMirm2izqG2gSss5D96dzLO3GFmQzSz3DjxxkLKppabWAhzFGQszOVEnGrbWS7ktTO1zIFG0ngr/DsmJQVPjWsYgCDI7HY3mgM1uiEwYNIbDqfKySoY4pp7PBRhdkdXcdY8QHCf7KIKLiWkBUnTShEBvsFLwI2GfA0EV5z/FoKCYwuiDFiGsbtO0Ce2eEEXCxBRJFKayktg1YFSmqgpTEfchqk88vEEKRGSzi3KeNRWsrEEpIhJBYNq0wXlWkc7SSbnrM2iIJYqRIAoibtoXCEJOAYccnx0zI3bQQ0TFApqYHHbP4tGGsSywKF5NYuteJsixwDcx8y8HkmGJQoYuSNmiUGVAME2VqgQGVKuUZipLl1U0jJhLOo40km845nF/TLNPZij6bLYjWQwcU5fu6FotQig8fTPmWP/xoDk7kDUyerV965hr/1zPXJEYqeW5/5PHn84aR+Lxf/eM+MVg9ffK1sDItwXsOGs+3vucp0fzLAa4fR1eK73nfU/3XAO96eZ/f+dX39KLYq1OWL3xK/PAHn+eHH38REY+Vn7XW5M6grHNjyl5XygVhIFgtYyN0gL6yqGJAOdqgHG2iikqMC3rKvmBbKsloPIp+PWdKgIDMSUa9k0qyR5HZi2Z13l187l7pAO+1X10KL7LuTdfByJ09leOlgn7Eu9tfOsp7N76ulMQ+0eAU/axufD+F0Ivd6qTwbaBtWjEPUgZcy1Mf/DBPf/CjXJkccnnvBvVsDqbipnN4nTB2E6Pg5eevoRfv59yr7uKknXHpjnNc2jnDMGhG1QZmtMWFOy6xvfUSs/k+MSi8i9RNTYgeYSw7QvK0bU3tWyn4yoqBHVONt6AVXcHpYkYdGozRjLcHqM2S+WyGGQViURPLBZjcuY0KE1uMBqMNpV3pGf3A41cxdFrvAAAgAElEQVT42Tc/wN7S8X8+c5ObteNsVfANrz7L9UVDqTXHzlPHxGu2hnzzq8+SErc1WvrnK3cfun3FWIO1sOs1796d8gOP3cvf+ch1vNacrgq+6w33884Xd/mhD75AiIEC2Fu2/NiHX6TUiq999Xm+43Pu495NYZH+wbV9vvv9T5NiotKaP/naL+E3XrjJG85u8djZLb7zvU/xL17c7Z/fGBKXhgVfen6bt77rQ5yognbR8HPP7fJtD17kZ5+5TtPU2LIQHVUfcD5yJUBZFBRaCuumddxRGYx3LLxn7lqauuYnvvRhfvRjV/nmBy9ii5LRxgZKa47yNXnTqRE/9frz/NLVCd/zB8J4O7sx5hsfuMgXXTzLv7i8t2KNIM9SiqIhu2IzAl6AOKu1aNHCSpydxM/9hdezCIHX/vIfcaqy/MbbvoCjxvETGch/5fG7V27x68/d4LBxFFrxg298iF//ii/gkXf8IQDPHs/5xt99nJdnomv3lgun+I23fQFvfed7efZ4TgdjrB9dWKpD+sTX1dp3qFfm0fKvKzE/g6orHPL4ilZYWzAcjinKEqXgyrf9z9y6NUOpDY4OWz7wgY8wm885mt5i/+gq8+aY48kBdd2gUPzJmy4B8F++cMS2KpnsH3J8fEK0mmA1U1+zdC2VsQyiAFU+eLyGZJU49SrH6PQm9z/8EDt2xNPv/ygvXL6OG5QskyFSUgRNmTSN8ix0y/ao5IELF7lw4U5SNWJet3ifGAyGbG1tcerMKcYbYx555CEefM2DPPbzPwjAySJxNE8cHkwwo4gdRCbLJfv7S5rZHBUCA6NJruXqzSPmTSCZAqVkBA4r9UbUKY/gFQwGFcEpfN3m6yy6vVpLmIg5Trq2xRWWQhm8E9MZaxSDylKoiKvnzKeKjY0NtkYjxsMKXRaYKjIoCqrCsjEYcO7UDikNGIwKVFmxcfoUo40txlvbkNVkk7XCishFoVaa4D2L+YK2aRHjJ3n+BOxQNG2L81n7tRsvl10CYyxChlKQlNQ+PtHULYfLOa1L2NE2ZVkxHljGcYmvF/hWyyhWt6rT2hrOkglkhoaO4nwq2ieGEIPIjMQjxmPLYDgW0KmpmS/mNE0tjeWcw4a02uNMBx6gxMgtRGg9w0IxTtDuHzC7eZPtC/fQd64+05ExzxAC3ovWsO51zgRUExOmRKGgUIkUvIwbRy9sFa0JRiRP6PbkkMBoTCKDOA0puLxnJakNu1FkI6I6ovEo5yQjaLrPz1Dduaas1ZurxmwGQkxYYym1wSlDoQz10hFUoigLyb+VwacVO3BUjRnYgkExQMfIYt6K+2pR0Lqa5bJBy8ysNHRdS7KKssurVMLkZ2e5XGJDwLUN89kcWyQGw4KyHNGxzbS1oq2Zm/6Nc7gYWbYOFyLGFqIjm0AbjfIrgA46JhVrr6m+xuhAvO77ADGYU6tr572A5bYwDDfGlMWQshhJc21QsbmzLcZfCpK2VMMBVTkUMD6JiV1CZR3wXPspTcg/s5o4Wv1/n2szEngXWMwW3Lh6A/AsJsdM9vY4PDniZDknhURVFDgXerMKucyKFLUYHXTj0VrjY2BoDMYoYvSIOoBnPpmze3MPOxpy69Yhx8cT0ZtO4ljbgU8dsHb7gyJ6ecoIyy3lWtlYi0aYjyozEXVn0JjBbJkz6/CIcFvHvSf/ZLxBqZUszQq7kHpF7qfUmCtgNq2NtSbRxc+gnDYyLVloK3u+gk7tujMz0Ul0DQFSEJmk2I2nI5iTzXqJWishZ4FMVSQkNwcEulpJp/SMvRwNo4KCUv7hDW46oTlSjGxBWZWYoEhBk1rwy5owdySfsEFhvCYuIdaBtPTERYNtHGoZYB7RSbOMnmbuaIeGEBQtlpZE0pYUHak2lKXN4O06hU5q0zzndhuu9MmOT28SobXYe6s8zhZzAaxUT8WtvadsnQS1lEB1lskBq0XLJbyiEBGgL+Gcw+eCNRLlAegADZ9wXoEKaOcyCpvBkez6KfPOGe1nVdQ0TSvjhD6QYsAH0Islc9eysagx5QAfE8u6xfnscNJtOsZQVqX87rZA9bTM7vquWE0dyrxO+V0XDI9RujIms+x6NYQU18YVU9/t6ebD5RppOe+8UGP0pBRyB0eApRBT3jzVWqNqbWwUQwqgbJJOe4rUPoCXBAZkfNJYSzI6MzYSESP6bB23RK+x19LtCYHqOjJ9galofJAxIivdvrZesogBjWVzNGB7NKIsCogr+nSIeXw6daKUkhSIK+OqkFKrT++ptSuKczeKabBarrbJhbmK0oEzqpCCPeWx32z/HBXy++oCY2VEeV6LaYYPHu9iD7B1JxDj7exHoeFbCk0Wh1SkzHSITjpKAtAFlLJy/1ymwaYGAlRoDIYiQrNc0moR0FQKBqORrI0YcU6MBcR4ZeV+5Vx2v+rWUUpiCGE8sbDSVcrPmA8eXSh0VBls9yQn9tSxqtCtgxhyQpPZQFqu78AUmFLjfFyN2OZzi0nuQwiKthWNuxQTzgeaRkDXsiz7UYD8MKBSwqiAVgIqDzo77JwgBWXyugay4UPSK20UGb/OG4VR4D06BQFkjQT+slsvIVCUlWwYEcjgJ9ERY8K5hB8WKKVxnc1oUqIvkzIDNSUmSzFSKYdjhqMt2mCIqWFYaXTQqCisQZRisVyKfki+HtYYuTa58RAzCE5ez7fpyfTlpbotD18/FCu8HEVv1ia/b75Ka4lm515Nfq7IAL+xsi0MqgGDoGlS3ce1jG5JU6H/oNUZdF3nVVKgVshV/jVSbkCo3KnriqzOpa3rYJgcf6NzZDXSnExklqFSdG7eypRgK5KpiLpgxWRWXdsQgQa65HKVnKisP9H9GvLxWcpBKQHt1q9v9ysiDaqEJA4K1bNB0BLr9RrzTk5Z/q3zdRD9GJMB1BwzAEISN1gMKpn+xmoSNoIOMj5CANog0h1VgfLgl47ZfMrulZc5OjrhoF7QakWhC04OZpjBJmfvOs/pM2c4LHfZv3nAR597gdPTY9CeYRs45TXLOhC2A2q4hbYFVTEkxuyCGbMZjw+oGCmskTZcbiwlkwgmgDEs4opxVSdP3To2xmO2tjcxiPvm95wv+Ny77/okq3p427/e+rj8/dKtfb7j3Uu+6aG7+KOvfD2F0hw2Le+9ecS/u7bHj3/4eX7wDffwD77w1Tx9NOPdV2/xtnvv4H7dQAmWxAUbebDwn/CJdymHS549XfA3P3iVH/6cC7z3Lz3CV/zmCTtlwb2bQ777/U8RY+DSoKDSmqvzJSQYaMWG1bz9g8/ywmTBuCj437/0tfytz7mP/+Xx5zkzFE2mv3b/Rf7H9z3JsydzSm3YKbMLGQnfLnn4nBhUvDRdMhgkVNJ8rI3c84Z7GKjIfD6XIs5YAaMjGGN53dkNfuqxi4yzc+v/8cwu+9M5ikjbNnzDfWeZO89vXz3kmx+82LOp10PK973mDL95fcKPPH2TkzyOurts+JknrnCuNPy3j9zJN73mLi6OKxY+8q6r+3z/v3uOuRO2+Af/8y/inz57gy85v81j57b5jnd/jH915RYAr9kes11azg1K/sJdZ/ni33gPE+c5aR0/8eEX+N7H7udnPvpS7xYfUsLl/ezF6fK2+xRT4tJ4wKnKsvSROgS2S8vrz2ySEpyuCjTw8M4GL+effXBnjFaK/WXL3rLFKHhge8z3ff6DvO3eO/jKf/ln/MneCUop/uKdZ/j2193Lo6fGGKV4frLg55+4wscOJyx85Hrt2Nk5LWNzCv7qndv89btOcbayXF06furFAz4yEzkItOZVlza5dHpA0pb5dM7d5+6ljYm6eRWTwwc4mR6zd2uP3b1djqcndKOHh3VL6z2NjyzITP2YqJOi1YU0HgNYZUlaGlHei3GIMprZPPL889cYOMXh3jE+SMEq+VXXlI4o3eUm8nXSiWQ1xWiE9kmmUYoSTEkT4HjWsHc07e/HwaRh/6Th8rUDTprnGe9ssXBLlssFuJZSRUbGEOqak9mSug0kJUCcMomkAirI/i2jrhFjNASJ+6RcevbxXK32txgJTtgi0QsZwCgx0Sk0JN+ymEwIVcl4NGRzY5PUBI5u3eKZZ55md/cmSsFwKNpxGxsjys0txmfPsnPmDsabW6ALCJ3zO/m+Cxjk25bZbMpsNmPROvRwifXZHTOGPCqazz87u62P14PUDB3bO/iEd8Ie3tvdw+snaRrH+dMbXBoFmqNjvBkibiUx50UrHb/emTBIXmVLqTvwjqRL2mXLSy9doW1ucunuO7hwcYBJgWY5ZzKb0ngnWJ5OPVglzT8BETsmf4wJfEDHRJkMg6QpQsA6L2CgXttYP8Mhj4leNU1y6iA5RSQ5l/WIEbmTmLIGctaT6kf/Vw00oc1HSD6/blHRSeMeAQ5S6pqeK8F4lcX0yRJEfYMun2ufWqQMrHbsp64+VAqjoNCGOrb44EVqqaoolCW2EJKQF6yyotPqI8SExhCSIfgkoFpIRJ8d52N+rpXO7q7clr8VhaUsS+JySb1sKHxEq4QbthhT5CaqFnH+ENEGJtMpIUZUUWCrDYpqSFFVonn6Kerb24+V7M76/+/HMTPCJR+90lkTEkyJKUusLbNGuiZEg+ilaXxUFMWYohyIJE4GGX2MvTmLLSxKFyjfQhL2oNTJWbM7pVwTyfnU8yWT6QEnR/uE0NAsptQnJ8wWM46jYBK2CWICWVhUWZHSgJgk9w6JnnHpYwa7TNYrRaFjxDWOerpk9+YhDCpu3LjF/uFEasoQ82VRdBN7qbuBdP+QKZHM7ZdcD2kepiRj2qojBuX3WDdpWAGlcq9Trt1TB0yT+phKlEaByhMl8kdOpaRYadt16yAKSJd8zAO5dMMgEKR5HDOY5oPUczpAimblppxzfNGBzaBfDJBsbr4YrNGE5HLerbth/vwMGjptffLn3451CSvWpAqSJbnTuPmUJi6wxgtekhQqlLhZy3KSSG1kiKH0mrjUhJmiCAPSLKJrhaoVamFQIeKdYzp3zLYcM1vQYGl1pE0JHyRfU1r2h3T7ifVAKhFM8eltID79iKuSRaA7nTalJLBpsa5dtI0AZS5QWI3J2j0CkqQ8oS0shPVF08/zxxUrqQtwfUCOEHKPqTsX3c9ZymvOeym+VR4dzMV32zph8eQlHxNE71g4z8miBm0JucMVQ6TbgZSS0RZrLTEkBiGi1AqlXWfJdWKJr5zTXy9kU5CHWOcAngeUujJ2rQPdgXOCbndOJ8s2yKy+MSSyY2UKsoBt9wDmIexOd6GvJGXEVaPzA7MSOY8IGGeUxitxHksZNFRKYXTRi02KHfDq/CAHIqVuo712763ILopaflejNM45ZnVDVVSMBgNKWwoz0pr+dnZBNMXYJzNtBjbWA8R6h6ajwrO2lkDWnzUaFSEGj3MtUWsKW3ZyUPJZISLj5qLtJLmSdOuiEhdal5KwXmPnxJIfPK0JTkafu/MzxlLYIq8hg3deNAySAE4Wm++vQhkpulMpLMjWObwCrYUVgZbZ+5iT1PVnvKOPR0QA3PmYRyiFJRdzwdaBtaLXQn+OELOGVxB9FKWFXRYSyeRzzsFfuh8RlYKEQ5P12qylLAcs60YYZ0UhI8VI4l8kTWErvJb1FzrtwCh6fYUZYzJbSqY2Iqgo3UBjpUOkhZ8ruiIqa1tk8V+laLzvdQ66zQcltOrCGumyaoMykrAWVsuIdoz4JICtTgqTdA/QpSCOxgmNTwI4tyGiXUDXAasjVosmpiaxbBu0Now3A8kWLCYzFnWNwpK8xigxlClzriJjBdJZWTaN3HcfMgsmO1xnUfMugq2D0imttLLkBXoMquPBKdWBs/SgdgeM9biW6uzG6d+90wyx1vK/ffwai6R67cGYXdM6R6IUYz9S322aPbjePSd0zBKV129OSPpzzpGw22fWGDkgY9QpKRk7gDymovpNvfushCRjShdSoKKzRmdev3ThMd52LSXpkXUpefna+eVYLb9TfwF7gE7Ovovk3UurZKlL4NcGC3rAdHUfM+tIiEf9exqd2a9RCsEQQfejIRETRVg9+hYdFRUarSw/fl/Jm9NluJE/ZEfBzg5Z7Gp1NLvw/C48n/+9aWHzVD5PA5evyJ9XHPd3X5xch5OnPuH/f8LRmW36g/6lHzp5Rr44opPDAuCt//KzMyd4+nh229e/uTa+uH68b/eIf/ChF2577e/8yeqcH/vVd3/Sn3vrO9/LC5OF6Pww4SQpvvHG1T7ejwvDW9/5Xp46muFC5IrzK2CXyHEDv/Dky7e951/+7T/l9KAghMCNRY2PiX/67DV+88reqrDrvzsRQ8NIB6ZOwPy2aSEkjrJz8kgrbi4X1K4RgDoqYa8rxb85OeENL+9ydmPAf3pph5enS9q2ASKXhgXf+bp7+Su/8yExsAmRtm2ZzRb9Or5rY8B945K//8wBVbHSpdu0hruGBYet57lpzX/9xx/nhcmS1+2M+MkveYTvfN29/L0PPJe1quCbX3OJb3jXB/jIwZSzg5IHt2UxvDBZ8MJkwVe96jyT1nFrbVz2w/sT7t0csVkYDhvRvauyfmDHwHvT+R1+5S99PlsZ0PyZj15m4SNNiNw/ls/47b/8RkaFodCaP9094l9f2cNF+Q2fPp7zyKkNZk5Mm5oQeeTUBmczcNo1+ABODUp+4alr/PGNIzZN4usfuMSPffEjvPmd70OlxD2jkqUPRKX48vObfNt9Z/jvP3yVy0vHV1/c4u2PXuAb/+wFdhuP0oajK89JsWQ9KS45M3YEYzFqSDx1ihDO0Pp7mc2nzNoF/M67ANg5fyfN7hF1qml00esqNWhaY4k+iNwGmjYFXAJVGGlSJYdrEns3j9BLD3UrEilZA1snGTuT4icis4wCrEQlgKuyAnSiNdqW6KLCh8h82TJfc1c5mbfcOprx0vU99qeBex94NclCNIUUcjEQVIdDyCQLQVOWFUoHWh/6wjEScb6lbjSp9eIO7z1GSRXQjzAplccNA845KXpDFLAGiN5LUyM/v23T4rMbfHN8wnNPPMEHH3+cy/t7mEFBO5/gmhmTeYmdzhk2ngeOL9G4iK00aNMDDt3+YJLKWqkyttaNnYk6Q6fjlFkuqtsv87gfEvVXiI8iBnHmds6jU2I6nTFrr7BY1OydGjPd0ZSz64xPX8gAHXQ0+27vkj0/gQsoAqYsiCEyPzrBxSWH08jTTz/NfHENR40dVgwSHOwfcDiZiLSa7bvbqxxC6XyqCjB53xWmfWlE11nFhI6x3yc/66OvN1Wfy6xlF6jkwefxzmhFAiibmcn4qjDekg/oEEVXqtuzCXgixioskSITO5wWwEMp0bENKYvOawGKY86FTTa06xppMulgZNJBK7QV/TmdjVus0iRrcc5hVaJFcELnRSOxaVpC3UoDTzRNcHWLi5HQepxKuOhRRWagGSN5QWZwed25aHoZO0akGcbDETvDDaYJTg72ULRURYm1FmsthbFYY2idykw2xOyNRFmI62U3AmztSj9dllNH/uDTvia3MjOwYge2J1Cpe2s6zbSYa5HYAUZaQCWdATCTCoytsEUl1z1muZIUSEpqHmNWtU2MayB16sC5mHNhjUqKer7k1tEuMTX4UOPrGWGxYOkb5lqeycLLwknDioJEDOM+N4wpaxrnuokcN1USw0MdE7HxLCcLdm8e4YqCa9f22ds/ZjpvaJynjZ10iZBsuusgZ67piCMpQsqGMlJ3mlWNr7OmcpYJ6urSTjuOnOenrI0cMgBHvtZS7+bJuLgyeVjHMYxWJHkYMktSGtoqwwGmz5mTGJjERGgdWimCSjjEJ8AkQyzFcEHANJkwMVqA7FziYbQw7o2WyZuOqNChP5H+l6dj0HUpuc5IYY8ZIUZkKhqqYpvN4RmKOMCoBnyBUg6ip/RAk4AWG5U8wyqiiZTegytkxCzazLpraRvPskkczJacjGCRPDXQqg6Ryc+EZkVgUquGEl0tlhsCn+r4tABd6A0RoswI6cxY0ZqoNUvX4r3DNS02X2htOlZZZGNUyevWZAvnDjLLQVdJENT6dgAPEjGJXGrnmrZemAowrgSEUbdTNFMOkNL8SNnR1WREV0viHRw+RFx2DgnRZ5ZPEKAhJ2nd1INeayHFmFa6Quuod1dudg+ZEkesREJlurVoIK11ANVqNLNjVsSUXRwTGFGNF/ttnbA5yMl+EzKCHvvP77toWR+i0ImUKaQt3efkIlitzkcQ3ZRZdfRsKKJQh3UOBioDsErn7oeSArJzkkm5+hcmomKxlN+1dQFXNzjj5Z6aJGOoXfDvzonVvY4gAB2sNu2u9bJ2dPc/hpV2RchUd51NTujYjBFc3vyJEgiiVoJ4d+Bv12FQEVOV6CTAM3qli2GMAJiF99lERNaYVmviqEpTJJPvl873Nfbstq4wQ1sZF1vKaERhK7S2uNSiBpBsHgO1tl87612qjg22DtAFvxrFjXlEXajDsknHKKyXjsYckzwrhEjotnslvRFjNFbJvddROt7aGshaj03rV89uXl/WWqpBKZqTRtG0Ptudiyut0hrR5s2ARkyIY213/2W8vW1aYnLiiKkUwmbNAB3ZETpvbCF27EQFPneXgiN4B0YTjKH2Dk8CpfHOM59FCALwCZtL4XK3W2nDYtHK9Q4egkNFh1WR0mgKozBawE+tNfWyJmFYNg21a0lYmlY0GUfjEaPtTQaDAcpqBrMZMUXqeS3AoxJ3ZtEr7IAy22s3q/6/XRND9XpGr/q1P2VYWUajEVVpRT/LyPvhxUG4jTHrbiZclJFoqw0DW4l+TooYpJszGA3Z3N7gY6MN3nTpIYILHB4ekZLExrpe4lwrOnrIXpCUGJy4EITNA2gMlZYRj8IorElAoPWO2nuaEFj4hFeKajRiZ+cUw0GBW85x9SInaIam9YTgiMGjVaTQ2X06P0siPJxkQ8+jkUmpjLl1AF0PmeULmgHQHNeF9SbXWCkj4zESFCV+pxUAmdIq6V9vKHWxv/v+bk+i++QVMkcH9umc+AvzVF43SlgvCUP0eSQprliTnRGOJub7JlIDKSX+9u+/n/9Qj3Xg7f+L4+bf+HK+8Nfe3Y8/fjaH7pPBDiaXqQGQMeteW/0Vx2ZhuTCqqHJjReWfi7mYA3hpupB/K9b2cLmPPngmtWOzsDJdkBkUG9vCJpw6L6zuKLq9KklC3wHGbQvLZsHPT+Z85Ks+j2eOZzxzMuPH3vgAP/HEFW4sWlmx/X60EqXuHHInyVKaVXp4vrKcuMBu7fm960fZFEbxwaM5P//UNb7h/gu8Y6Oi1IpCK/75Czf56IGwq84OSyYZZNdKdPnG1nDceCbOs1Na7hiWXBzJCPOrtkYc3TohIo2xh0+N2V86NkvDwgfe8CvvJqXEdz92Px87mOJjpNSKzdLy1NGMh97xB5Dgq199ntef2erBPRDdu6UPNDHiYmKnKvifvuBBvuXffIQ/+Oo3rQoU4FefvyGPakpc2B7yD5+4wt945G4+9/Qm73p5n0d2RtxdwJ/tT3jrI3fwzy/v8b6XdtFK8bO3Dvn6S9u8dbPip6/tA5qf+ke/QdKewALPjJi8GNyoMaQxZbXJaLzJcFRSjgremM/5C9/4RaSjOVdevMxzL73I8dERbV3TxkCdAkVMVFGRdKJJkTYFTFIoVWB1BcnjG4dqA6UuUMYL49IayYsShAw4qdSxcaTYM4UlakvrvRS/iCtg7Vpm8wXOrR6CqDVmUDLa3kRbSzkeYgeWlDzKO6xvKWNA+ZqyshTWkFyiyGMObc5DZM+NtLVjGZKAiGH1OR0puWNriy5SxHsn+6fQs6FrMmfnibppOTo+YXf3FvfdsySEyO6tAyazJT6JidViWeObhjo4CIlSWY7nNY2PjKssxN03xMgxwTObT5lMTmiaWljVKTtXKpXz27VWWN4zulwuqdUO1bHzfAi4VgwhvI+0swWtu8HxXqTeVpw1M+4en6abGqLbo9bikAqROJNxVaVGeDfnpWef4+i45mBpeObpZwgcUY4NwRjGKA4uX2b34IA2yjQNKUh8kaeXDKUIOU8Jk0gp0XrNck9itBDjK87mMxxrwGJXA6Z+f5eYYo3CqoSJnVySSMJ0zciYZCpE3E2RmOil6ZtAnEFTotSK0oiTZK3FhDBlvT55n5w/A8qanAuv1adKGr5Kr8CJ7gqlKKYfAUUKDp08VaFJhSEajfeO5XKJr5eY4KXhTcIkEaMPIeJbh8NTB4+JCp0sutCUVswRjAm4vN5LBOjoKtTCWDbHY3TTsjXepG0mlNpSaENhLMPBkI3xBq21MvKcPIXVlEZjB0OCUrTe04ZA/ISbdPvRATqf6jar/KCopPuaK/WuoUrGp2OnwRZyzicoTU8OiQXamLxf0YNY/w9rbx5sW37V931+w977nHPvffPr8b3ulrrVg8ZukLAFWIjBcQwuJzHYgbiIHYtUigAxZkicYAbbqSRExMYBO3ERQQBjI5dNogJjLMwkMYRuCSGp1ep5eN1vHu67955p79+UP9b67XNfS9WpcmV33X7d7717zzl7/37rt9Z3fdf3K2CROrhmMaYSt1gxAKkO5UMfFaTS18WQYma9WDGEOf2wEvH/vmfIgaFRIkYdZrEJ4w0pq9a1fjLRS0flrXTCLCWZLChALyOQ164c0HvHpSt77O4tWa4TQ8oE3a+0KDhX9euNjtKaMa4ZV/EBjSH6vGtzuj6LUU/Omoq76We2fLFkZWQ46g88TC6qezCpQ2sxeWQMGwXmsGryV4xMoBVE31In0rKeKbloLpUtllxtV0QjVMkOwpGQPe6s0QhTc11ZS7lUmL1B3oHRNWzHpyI7eTNCKiOgaxgukpavkNe7DHFOCQdYGzGpF3255QozBHIolD5ilmvMqieuB1gFbBLJr3Vcsxh6bqTIvl1zM3v2c2Cel6yKJ1oPXuJANlX6o9xCQMhFWM2pPr03CJNvCNDFlLlrt28AACAASURBVMRZ3KI+cQLaJQVQjBG2zJAjIUiRIs/NYG1mOptijcXZRhhDCuiUcTHqcrCHmBb12DOKj5Y8buLxc+jiYUQkN5sG48ZCP5RBOnVZWGIJ2UAC2BQG7VCH0JOSzJt3XYfXufX1EMcC+ot1El5/bcA5GXnIJVC19zZvfqzUhLmlri/GySeX0d1CTmCZkYJQ5I0teG9H15WUo1iFHwK1NmFaNlGOaxVrlDuSbBVDlefXNs14INqRdi+byQJDHCTJcRo06md2Ushab3QUxctzyLL4ShKgISq6X8ZtKWh+SnLvvVKQjcnabRSgxBmDAy1U6yM3X3DvK7gJiKmB3bxHZyypCMPS2WrUcSu41TQeTA36FehFD2dhtxjjsb4aelgZCXZe9d+KsHqcw4CssRhVPzHSWKeuUR4jyueYETjTJYwhGUOWFrWwU43o8tkC3lma1kvhFoMGZR0VPzTeKweS/mwtBouuO2MrsLVheuachU1nK7gt772kxFKTlRj18G7EsYySxF0RS7Ye06zoQ6SbbhFCgATr9Yr1Yk5YLzFmV0fYEyFF6QrrvV73c3I94JMcRJIRGC0CLAl18E2bEeYNSCXAeSFBiVAy1hYxojCiizlk0bXLzhAwrMJA8eLSm1MR9kMo6sSlAJ0moNZ5AehLQegGEU9m2ljspKVpZVR61rY0TUPOhZgGZs4wcR2hWGKMLPseFzzFSJfVeotVUfIQAk3bMZlMKBnikFTrUWJhHkO4rnF0/MNuqN1NIxp9hwuAlAvWiHi1tVYOUyOMMW+oDb8RoBpHXSkMw8CNGze4du2G7EXrSap35pxlGHpSjDgve1doGCIE27hGxsidx7lGtCZywZCElYGTCt0YjPUMw5JmOmP7yFHarhMgLie8NcJ4KNDnQAwrUuxFdcJUn7uqPao6pdbincVZKFbiL0rKrM2TmkNo1vgFcbs2CowDcsaZQ4k3lRknQaIo1lK7YSlvAL26SqVoKvpStYLSeFzQ2OTk01SdHedwqrESrCSF2RQdoZUKwiigIk5XVoRsD2loXfVTSo70fSDEjSlR7dJWnY/Ge6wVPaIYIrU1671nNptydHtG1wpjZ7nqeeJrHuH9H/00r83XY2G8ONjn/4/rsKHAo6eO8P2P3c+fuP04rRODgl8/d5Uf//RLXF79uzuA9rGMr/PFDAr+9uPPcnnZs4w1zoFovzr+80fO8IGH7+b0RJhWT9444Pt//6nRoODOWcePfcVbeefJI5zdmfI9v/s5fvrpVylFXuu2aXtLpzSXcst5NGYFBYaY+PS1PY62njPTllfn8nwfOnqCcwcr9tTNtiK/RUe06x5OOZFCxCtYdu9Wx7M393nfHcd5x/Ft/uY776MAR1rHoye3ef+dR/nGjz0DbBxyj5bA/EAAttZaWmu4sJQ/e9+dJ/i+d97LA0emtNo8u9EPnFsMrJNo7FxbSdPWGkOnzxAEcFulzP4QRxZcKoVXDlZMlVFfgNtnHReX/ViQnZo2vLi3ZJU2LNhfeuEiH/mG9/CZ6/tcWKw1F810zhJy4SMvXeY/e/ge/urDZ/kZZTUe7xpZQ1rsffDL38qHnjrHM7tzvf9alHyR9fPQsRknJ40+c/kbD263fPz8IA3CmFks1ppHyNn8wM6U/bU0gn/pD57FmERijWsCOUe8bwiDxdstDMKeb1pLO3V84GEBSy++9ip3dke44/QpulnHerVib2+Pl29c4fzBdcxyIAcBp9RfiWJklNI1LXHImCK5V7GivWMKeNOMzMJihJFgS8JmccizViYCYrFEBORyxko+XMSNNYbDI+KJo8d3uM+c4frNBX1Y0OwcIQ4DrS+0ztIWaJlwZHtK6y2xiEYwJJEcSUUkNpLEq2gS3shZVSUxko4H1TiM1hQiEyHx1vrKWhNH6CqvsZfmnLtwkQf355zsTrFz7CR33/Mm4sEul65fZR0y1nppuPoO205x3QTjG9C8qP4CsudiCuzt3WR39wbL1YLgGkIYhKGVZMw1ZpFHGfkd2hAqbCaKRrkUOUqEQBB6ZEzNMJ8v2VvvkW8OxFng5Jk3k1PeKDHJI9J6Q0bN+t09Ll+/SplPCGnB008+xfmLN1iyw4XLF3Htguac5aAf2DaGg1fOce3GDVKR+7ZpBtW3JjVEApIxVK9cAwzWUDqPmUzIjafWaG90FT0TTc1VFQArKFkhZ5y3YyzzRqoNFccQwmed3FBwzKQsTCbVooOiqXXBloynSB6ktZ1oGZpxHWXVo00ZKH4UvK/6XZhDtZDZ5NQmCztuWMPKBuJ6TohrcI6m6QimEPuBGHswhaZ1+CxeoA6dUiNDjjJGmWQv5iR6hbFIY7KkRC24HFanOSArO49caJuWremMHJZY/cc7j/MdzkluamxSyDVJftq2JONZxkzbTQ595luj4WFiirCCDte3m8sWaZhKhiOvZBB9VHOorq78l9rgt5rbGyugSAVCa21TUQIn7loYo4w68jghlXKgD4mcm41emzkEFGq8ziFCyriiZBQKxaIGmYUh9qToR4kpq7HIaKPYWplwqLpsTrlMJmTCKnDlyj4r33DtxorFMhKSISHEDawmkQZKqtMtWn0XJzBTzro25bXFgVdAq9pIGicU5cZxS6Jx65PTr1u1Ag8/S5D3Lw0/NbxggyGgMbiSeJwRpuOYE7PZw1bzEecsvljVriwq+6KogKm3YJPXGv3/kmV03qqEV06JQCIj8lsymaj59ggUSHEjTzoDK0q6Selfo6zOYZfXoN+FYR9jA4SVOLYG/VpHzLqH5QBDhGFQ3ZaGUAy7ccWVMHA1wTUKN03HnMKKnkBDth14qadDkXrm8L0dJXwOPY03ut4QoMsqYJiyEbF8rGgqKEjnrGUTuxOp6I02DpzVgCIbIgWlsioiKwlSxumo1ThSqa9dDMooSaP7aSm1eaEsJR3DOyz6LAhy3ewa5IckumgZjBNHllSEbhvVkCLGQE6SUGYjgF4/BLDuCwC6uhhvYf2ZTffHWgfOUKIBqrOJbgyLUlDlUCrJ3PrQisMW6WQ2riG6rI6t4tTqjBp3jCi6Pqu8OeCNMXJApSjJjXdkBS3EQl3HwxqngJxTSqnBG4u3QtFfW6RIdzL+UDQIOmf09zbIsDFiShFjIhbR/TPOQ3E4jxxGzmF8h7WGRpF4U4OLBm2sHUXlnRaV43rUAC1nuiYheXMyVIhiBKFKphSLrQlQZTHUrZGqe+YGnMzoesuRdYyH3p/YP9tcsDFjFKCrlOJcZMw0hEBULR7bKP3fiyNS1jl7cb3R8bYCMQktmOKwdo0phpAGZqZj6lsBQzjEVJSHvGHJ5cqsE5D0MO181O3LammNah4CrmlklIUEScxShhDIQ2S9XrNerfDe0jZOmVbC1IupEIohG3FHPn37HQJKmsxytWZvd5ccet3XupdUR8xNWoxxhLiWEVWlcdeTRmK6MiqtVco9uoeU2Ows3hmaVsZqvYIa3hWsLXgrul7OtKK1Y2ARI2UNxVmatiNnWM+XmjAXqNpfBT0xlDmqgIi1ntbC9qzjxJEtjmxNmU1atmcTWu/p+15G0TTkrZMFv6K/ucDqyNRytSbmyHK1Ggt0a8W1yPuoenwIg+twYmTqiPNm3LteXdeNXTOJr6LdmKOeKzkzpEy2hlycKmsbZcuKQHKJgZISxlt802CsNmKQkVfpmglLLUUxDWnbduz4oaYnxjay500hmYRrGmE0kGgaeY+xwJAy6xgpxjLb2mJre1vArVLY3tqiYUK/WsqoTEnk2EPqcc6rLpswXK0TFydjwSvDWM4HraI0HmwOwtdnkTVOyP95TTzqWisKiFVAbWOKoXiIZpbSPFLNzHoWqT7MuKZGkK4GbPnVIk2OrFWbM3LeZCvZTvLyc1MtYpDxrVJqtzgTnAhk1yuVxCPbLd/+pW/m3aeO0FrDlfXAvz1/nZ/43Mtc7wWM89bRtO0oKZBC1jguGlUYAYMteayzXI33BU60nr/1nnfxVXedZB0zv/Dcef7OJ17Q7uWtd7tTkdRoZdTzQ+9/O3/hzXfwZ//VE/w/l/eow8Bff+9t/MzXPsq/eP4Cf/U3/pjP7x7greVbHzrDV9x5gl89JyOtyW3GLzGbvGF8yvovb0R7Vd68wziPMfBTX/Mu5iHytg9/nONdw7/8M48xD5Gf/OzLNFoMAgxY7rjjDp5xM/78v3qcG31g6j3f96438Ut/9j284xd/B2dg6i2fuLrHzzx9nn/0vrdRm4VTbzit4FzXdjy0I1MF1jp804xwbr2sd9BHXpmv+e0L1/nhL32A/+r3nubEpOU73nqW//PZ8xssDqjMOYzhrzx0F//m1WucXy052Xp+4LE3sU6ZT1zZhVJ49//1B3p/pOD/qfc9whPXFvzj5y4J05jqkNvz752e8tGXRLetMt2HAo1z/Oz738bf/aMX+YXnLrIIkQ88fDff+bZ76HMtXmE/RAXO5Xurjlxd/k/eOOBY13DP9pRn1cDhnSePcO5gxSsHK050DRfZgLHX14FVOjwwLmG6tZb7j864vOxJWZ53vaUpy3t/QMdrnRaDN/uIM/AN997GfUemfNfHP0samyCb9ziuJiOMyZ/66nfyD598hZcPlpzZErbfjjbPfv216/zIu+/nX7x0maduzPnWh+7mzNaEV+a9mI2FgN+G2GdMadjePiks5yJ6NieOH8caWCwOGPo1cRWA0wD88e//LufshOl0ys7J45w+dYo7Tx5lemIKFwp52WNuLFitVxRfaFqPszrOZuVsMllys5C1uYpR6Zra2Jbc15WCNQ0GD8UKi9dUFr4wcVrrmTQtnRcTqXrNF3sUCrOtht2DwHx1gzZabt68xsQaorNsGcO2s0w6hykBUiANAliYJOOipogWp6lC+odiG7XZUPKoCaV1oOr6ZExmFPyPCWI2xCxs9hALr124zIuvvIq9vaWdbnPH2XtZ7W5zZX+fUFQrOoEogxmydRQjsJC4nTLuW3GVhRRlwqLrWnzb0bYtPgsbNqgDZ8oql6DgkBQojMdC/aNR57RASgWMI6bCuh/oF0ua1QGTVc/+/j4hBgETDrH0xmZzTiz39jn/0svcfDWQyppXXnyJS1cPSNNTLOcLkr1JaRK7iyVHGk+4eo1F32Obo9gscX/TJtc9rEV5NpasUi0SBDu2Tp3kxN13MDl2VHKN/88S9NCluXaV3pBaUhliRqHBkjBZmDf1/PZq5BcQbbKskzRO/zFG5HuCEca5I+OKAtjW4IulFCfTIaVgs76elVFyWyc9FKAzxoyMJ+scJWjdpc3TnDNDjIS1sLSKs6Q4IbiGIRRiDjQmiVRQyeQshn/eezyZkgNZp5caJwZpMj4opJVSBET21o1TTwapp0MfGPoeZy3b29ukYYH3Fm89zjpc09K0XqZNTFJwMslkhW8ovsVlQzebUBQQqrnorXrr+shqYnCoPt78HiN7tWK1h6cMNoFWfq3a4/XnWStAmHOVPJFHgFQYdhVokp9VRzhTKqQ8kDN03ZbgERmyFd2ykjMpRHFtDRFbBNCttZKlTldIvpVLJOVALjIqL7rwOlGIMswKkkMX1W0PkX4ZuHZ1j3U7Ye9gzbovFDxYJ+urgeKSMDtrru3AOEfJDnEjHbTmlc8dcx2pZYN71PrUVP1GNqO9h7Iwg46sYw49xy+CrI5/X9d1rZGRGroYHYs1hsY5GV/OClKPhag8cN8qqSVVwR75M6cAntHiNMVIjlGY0lkb0Ehe7HStpBiJOZJtQ40rzrzu/ZeCjOQreUlQRWzTsX30OH7bwtDCugMTICwgKhjXD7AawHswHnyi2ImwmJNhETM3iuFKSlzNhmu5cDMm1si4MjoCnG0mkghZ1mzVWK9OteOKL+Wwz8UXvd7YxdWZmp9QddI2nV8p+DcAmxROTllobSetvGzE7KAcAtqMLqqcRM9JWB4VGZeKS7TrdIRSv0oRFz+hrgPFCOChNGZrvb6XRrXDZQUkU3BFC27npRivBQ7gvABNXeOZdB2mZPoQNdnbmFvIySldm9cHrPFoNEYdSDwmt7IkNTmVoG/GRDGrQHxOwjqrZ7ataDg6j22MJDCmHlpyvyt1t2SQaX451uqIqwFlMIomU8qbcTBrrOpKSaKbjSQb2Pp96qhojRTsqRCU4WKzrI2Ya4CUYCXuNpkcCzEbUm5JOYvBggqcYnoaJz2VGAJ6MzeBdxxnlfs12kNX0O3QjLytFNtD9x4YdQ2NtXpIHKLolw0YU8dGMcr6NEI3jjEIEBHiyJCTwtTdEtjy+PlvZabJSxTWpbqDOgXoRIsta+DI2UhQT7ImrGt0bYtIaldavScKpBYBVpNqb6RclDWnmnNlM9RX36MEAzm8Yowi2KmU9qSabY0VvZ7WWXKIokeQC91kopoASkPOss9SzoRUWPYrtrWT5JyldY6dnW1s6SEEyAXnGkKM7C+XZGOZbm1RMMznK9brhQA4RYoGV5NMPWAE+5GOvSKPIvzceDpv2Zq1zFrP1qShbQzWJJzJNB4a3+GaGSE7QinsrZZcP9hnyBnrRVjbZ0iN8LGNkVGGqMFWGJMSJ0gRkxKdg6NbU04ePcKxnW2OH91mayoFdxh6+l6YuDEllsmwu8r4phcTFmBvf5/5csHB/IAYko6P69rUBMFZJ8VHOrSuQfemGZsZ9SpZtCUGEimiwrmScDiqno+VJLHqh2Y53G2Gxhjp0sVAiQafEs47kmo55Fj1ZzKtl9HuIUVyMHr+ibS1tYiocM7EPBBLwfhIscICpenwTthaQy6sg4x3dd2MxrWE1DObbnP3XbcxaR1XL19i7+Yuy+Vq7LBZI7IGNht8xTJzoajWqaFgShqB3spL2Bzhh+AQQey1syGQklHdi3qul9qhHL+9/oH89MrkzMZq8QCKSI7i0yOaVyrgx8gWqGOtFtUNQABba4w0pgxEa0hZ3KRlYEC0Fm1J2CIjP9lLV7peX376CP/7lz3Ah567yN/8xItcWqw51Tr+8lvu4L23HeNXXr2KAU6Q6IalfJNDCyokJsclXF+SEGBjqrHkVBqIRcCc/+U9b+HyquexD3+M413DL/6ZL8VQ+OfPX8QgumGLkLixHogVBM2BP3/fbZyaCMDWUOhMZg20zvKjX/4IH3npEt/58Sc51jac2Z7w7M0FP/YpEcz7lgfv4m+8683cvTVhlTK/+dp1vvcPn2UZpdL91De+VwwK7jjOY6d2+J4/eJY/uCgaeA8dmQh7wjm++u6TfMm//H0WqdDFxD9//gJ/7ZF7+JWXr3BlJSw6EJbi0SPbXEtl1EWbh8Tl9cAds47HTh1hFROpFD78/EWurmSk/FjrecfxGcuYuTlEjms+NO5pY6FsbGDqOecqsGwK3/6xz/Fj732Ez/6lL6dPhV944SL/4HPnxvzog3/iYc5uTfhP/u2TFAyPntrhex+9l53GM4+RT13b5y/9+qe5sQ4YW7iwWAGGXKQZ06fM/hC4slhTDaoA/vvPX+EfPnY31991L//bJ5fs9oHjXcN333cHryzUIbePrGLioaMzvu2RMxhjePP2hM4JuFlNL+pIcAU86/XKwYrfeu0aP/SeB/n+33+Ktxzd4vsfu59ffeUyd2114/fVa0iZv/bIWX7t3BUuLnpOdA1//V1vZp0yn7xyE2cMX3vmFN5e58W9JdYavuUtd/HY6aP8D3/0nKw9a9jtpdA61TX86Jc/wn/8a5+ksAEhX/eyANwx7fjH738nv3dxlw8/d56Hj21xdTVwtJU1UUzhwy9d4rZZwz/6yoc5MWn51+eu8rGLu+zHhLUZaxKNvYlrDDl6mgKd9ZiSOHXXMf7UV34pp08f4dKl85x7+RVefnmjY7jdOlwIzPcWXNu9zOXzE7a3t+k7y/HtKTsnjuOPDVy7cZ2rqz0iAio0Vhq1rhQ8oqVacDTtRNgjRZqORfMugxOTJRqMbSSfjKKD3DiPtcIiK1m0hdvGSfNEr9VilyFCsR2nTmxz7PRJto5ts9q/RB56+l4E8Le3tqQpkcXsLQxB8kxQhhRg66hTGf+pbEr5YzvmCZXhVEE6MwJeVhoXxpGCmC4YDBcuXuPzz7xAv5t55cWX2V/2Kt3jsE2nDdIoLAxbVU9FdqCYTU0kdZFh3Ud2D+Zc39sn5QFrHV3OMoQ1Fs21KL5Vb5LKPqmInCJ0BUcplpIsIQWWfaDPgWxlGiX0iWHVE0PC56wN8xpcBCxKuTCfLzh/7jwv718Gn9i9ep1+ANOIJEgIkeVixVCAriOvBkIC27gRmJXekX7isUhBv2QCpDiHnW6xc8cdnDx7hq2Tx9WN+wv30+ZtmhFsqfWNrZp2RkZQZfpFRxZjJkYZ+XMK5DVto1IqDtdEaT7QE1MCHN56MJnSCONPdPU2o6mbRqie21TgQPP68c2aEVCqMhfCks9jvmzNhhFPrkzJwhDWrEMgupaMk3wrRWGPGggp0FiPdY3oTSMGTNbLzzKlAiNyztd5AKmPJZew1lBSYVivWS4WTJ2ncY5p2wkppBTRY9SazmkTupQkTVnEWTZiVV+6xTf+Fj0yuQ1mXLqbXyqAtqmH6r0Twww45C1KRgzRJMetWdrrwSJZb43K8hhkpHVsmEoCBRXYQhvUJY5nqfdWDGeyvn6pBB7V8VM8Av2zEX8YwcMiICFlbMKi9X3VQ86qG13zRZRdnILoDO7tLchTWKyDaFlD9YAQowfvEGML1Ri00lzI2Sk8aDfrLiNAGHmsOTf322K06NdZi0NElY1O5a1oBeN63mi51z0htY+v9fghAFNY5hEomNzIrzWn1fdUMYw+CS4hBtJWtHIRwoqzHlOs4AUh0A8D67YhKAjurWdApHTI1RAmkSz4MQB9YYDZCAY4it+imMIQO0rfYcoEF6eQerByj0syxKFgeocbGmzIMBTKMkKasViumA+B3Wi4uHJc7BuupcyVnFgViykNxltsdpgkgLdywuUZH8r7NmzpDePwja43NonIKuSnm01wsKJdqiIUDWuEPaEv62yD9y1t29E0TgOOdO8MCec2QbFpN5pdYkVrZUkaj7FyYFm8FOb6gRwaJIzB20aop1kWn7MiyO6sMIOiEXtgny1to8BMMeJ+pl0t4wrGNmASnfdMJi3eOrqcWGfRbRpZEcVKMZgVJa/z95okjOQIq0CJ99Lls0rNNbqx9ABYp4DowFQaZBmZhFBIKYwMLvlzfS62HiRlBOg2G0sBvxzpvBN0PiYJkiWPc+FwCMUtUiQ6K1pnvgJKCFtGmHMiSBpClGekVNqs72kkQY1bxJHSmpj0/qmuw8p7Wi+d4xFIK4e2VRFmmXOWlAcJjLl2TTWI6yL3rhlp8PX+jN0tEfMaQcxqaGFVi80ZM3anpPiXFGwMPjlDLljf4EXQQlgsh54DutGMc6qjgrLTZF+kfk1OlqbZIpeIM4mmMazFqow8GHyyTBKS7E4M2TvIDaU0+KbFuhaMOCQWI2BiSdKhK6WaV0iiqL0pqoac4AUy656MuLJVSrhFOhIGmLZTYenlVmjEw4DDMGkatrqJdPhKoJTEeuhZDYn5KhCxtF2HdZ5SBEw+uj3jyEQK77AOON+yCgNu3zNkmG5tkbI40ckbVH28OoabzYb2bTOulWcUQhD6fhHHyK2uofWG2cSzPWuYtI7GFryD1lsa3+CbGatQ6HMmE1msHCWAt57sCjQd2WQxinANxlkBpawkihllPYaBEgZaI0VVaz0T13B0tsNk0mDJBCejwGL+sKJfJRllNYyNhnU/sFyuGHpxBSbL+H2OumdL0XF/jZlVwBV0VNptnE71in0vSUQ20u3MAmQOOhoiwrFe9m8ySpuXhEOmqiUhLU6SwCEESlRDCJcpNinTOItWo/WEJCPkTdNhjMPgcK4hO6+JpCQho/CsEQCiMZ5VMSxCJkbD9nTGVrONjR5vLGfuvZ+3vuvt7M73CLMTxFfPsX8wgN0DK2xm40VbT9tFUjAJMiasVY3X4laM/K3D8giaTBhNxmv3xxrROrJqKW+MJRinBZoZAe5RbqEgoxAjYK8feixkpBBQBJrqelcOnSeJLJ1rg34GHQc2bsP2SgWTRfRazuJCyYaYjBiQGA/EW5gsP/LOe/nl8zf44FPnCUE0Iq8NkR9/8hy5JP6j+27nr7/jPs7OJixj4t+8eoUfefzZEZR64pv+FP/sufN8xZ0nePTUEb7n957iIy9dAuDF/QWvztfcsz3l3bcd49Ff/B2urgZurAM/8emX+O5H38QPP/5sbaBK7lAYtWSPdw0/9O4H+Q9+9Qk+/c1fpWxKeT6PnTrC2e0p3/nMk+QCN/rAyUnLsa7hkrqJ7vaRD/zmp/ns9TlvOjrjF//0Y3zv28/yw48/R020v/Utd/ItH/0Un7l+wAPHtpgoOPT8vCcX+Lq7xCH1/FKAp9s7y8ev7PPffemEkDJ3b014eX+liSJjV/7LbjvGz3/dY+wo+PS/fuYlnri6T6dZ1jrLyZ0LXFj1fO7mckx8L68GuoloyH3zR/+IZxYChG+AW/khkvyL9MJugA987PNSUOo6c66pEZ7/9okXAYNV1+Xve/wFeOKFseQxRllIIE1MGMdOTIFv/Ohn6m/gG09dQX94c80HPn2Zb7vnCL/1H76X1ll2+8BvX7jBz79wif/m8ef4wS95Mx/8k2/hU9cO+KUXL/NXHryLvZC4sUiEXLg5yFrqkxg4HPkibmXf9luf4Sfe9zY++Rffxzol/smzr/E/fvJ5thpxef37X/lW7tme8kOPP0MBvuT0Uf7rx+7nSOuZh8Tzewu+6V8/waXlQGMNp6YtP/u1j3L7rGNImZcOVvzwHz7Nb752HfUJ4vp6IJfCW0/ucOes45f/nCi91Yj6z/70o/z006/xt58QUO+e7Skf+fp38ysvX+EHH39G/5Zh4ix3GcPvXb45fu9Pfu4cP/m5c0ChMfDEX/gK/sGTrxCHIDqx6xW+rF0+OAAAIABJREFUmYBLhLxgezbl7jNnePjh+3nP+97DnXee4mB/l+eefR5+53dhcQ2AyfY2po90TChhYDUMrG7u0dtC6hxmltn2E3ZOncbGHZbDwDAMTGzDluug7wmLhZxzjbCcxe1SittSJ0SMxJSEIRmHbTvRZrUiSRCzIRQr+mQlQe4pcTE+z6nNbM+mpNJi/Yzt6YzZpGV/q2WZljSmoUkd5EaaSXgSjpAsjfFYF4CAMb0M5TmPc8JQLEbeR1IQqwJ0JiUFSeTMqlIvGcnDTevl7OoTwzqJVuoAVy7f4NLL13nlmZcp1nPszjtonIqiJzE9IBdMDJQhUAYDreTzxhRckQZNHzM3DnqurTP7STRF6SO+H7C+EdAGCQopRCEKZKnQK/hVVAJC8jtpyMZSSNlTUsMQE+vY09sIU0/THGU6gIseF4zqGYlpmC2NgjqGnA2LdeDy1RtcvHqF4jLDEMm+pTWJVCIxGYZBcssBQymNnKs5SrOL2kLyCIdNNbdywKSAi4mUoJiOwbYsimOeC+sYmJBHNuAG0JR825hDRTT1uDT4xmNNI9qsOKzxmvtbkhGNp2wssQhTqPUttFMm0wnEQsiOPhuSTQwZaFtcY0UXyiSSLwyuZa3gXbai2VysTISZlBVAELjGemEIpZIVFNSYnUSGqCRpdJKK5HJJagCMJSQIeHqtI4UQkYWZqJqKxcv4ePINAU8kYZTJ5zAj+Jxzoc/S8O+zJVmrTNOCL4UUkzb/DIv9ueS86yX9wZKmVfmfJKxYAeVlD1uVBDE40dtGGrgNlolvMDmTav/QVDCngkNqWFCrvrEJY3SyWJn9TtZiGemjjpKMTHmkLM0brSet5l4Oi7ctjgaymgEUZbSyYVKHKDIg4vwsr1PzL+tlbYUUKVkN5bzV6byiIIr8IPVCltiCUXBdTEBstvI5jRF9PItMUiXJN41KN2VEWiFaISqlkFivI6Sl5PIeSom4Vl7H+SJGicGJ1lwSXEXYcok61qrlpE7Y1MawHfvFzjkBwZxTaZ4idbkxMkY75qwVZ9iceBt8YdyhiimUES+ggpelQn+oVJjouFoj4LRVcHvUi8dQ+kRureJHhZw9xnTgPca32DKB2DAMiUUI7JVMwJCyEbzFtUTjJDZaxYiKx9Jist+8bbTmMAZ0BBYMxhlKbkgHkK71tHlQVExybJKlrAysoCwLeR4x8wwHhTJPxAjX93ourpdcT4kr68DlIXAjJXYtrF2myQXjGiEuhYQPmXXOONcK21hLgbFfX5RxVwr5dY3L119vPOIaJSAYJ6o/UvzUL8ZxHKvbRjJxQxHJJtpph2vUg8Op244X/bcQgxy+FB2ZjSKMyCEGWVEHD7MxOHBVJwiETlo8JRtAHFcLmaQCi0EdWF0W3SJbJEgUAc0JIZOMuptQhUETE9+Ct4SS6HOR5FY3RkF0DIQBJvpytSgmb9hUpRSwdZRWg7NE0JHCPQzDWPAZs9mEOYuTqTXC4tkYD5jx4ABh4EkXB9WhUYApQ06BbtLJ+6UW/lLMVkvjrIV61uBjrcO6RovugnOF0K9F58KL22eMYm9urSYXutmrs1gumVLn4IuMxDVeXDlzzlgq2ydqV0xMMao7jTCLHK5xpNyLlorO548rXD9/zGoSkOTXGpO0ySndYOGEa0cNJk1DN+lovRMGkbLJXCmUmCgpYrwXfShj6XyDc44QB3E9suC80GetF0vobjJhOhGXoX5Y0fe90Nwnkkh4f4Q+RJzPNDsNN8pKkqFVQ7OCHRKNh/XEEFpPjp4hWrAeTEMujpjFFh0gpEyIScYtNmfieI9K0SRK12tG6z9nwKpbUBIWjkkw8XakvadoiGHAO8fRnR3uOHmSfliwWO4R0gCuUFxmGSJt1wmIWKn2Rdh23oM3hpYG37a4YFnFHpsS3bQhRcNk0tA1jbChsDRemHi5IGPVjSWkgXbSkHOm7w0pyHryzrAza2kby6T1TFtP1zmm3tE1jq71eCc6OjmtMRj5M2uJFrqmJTswEUIJmIQk542l1HEGI9bjKTlSEjMRX8DlojIHWZmHDaVESSZ9gZIgWGLuycjIhdFmQBVU9a7BWU9OgRRFX6TvewEh84Yxqt67Atg5AafM6zQmZARDxkxyUpahdURTtcuQRDtpMjwCAhmcIRYxxzEjm89ikHVNjjgryUzOEHSvFzfFTbbIbiLJu3HgtLBDACdvDSX1GAdDyQwx4K1lNURWIdHalq3pNrNmRhgyJ267ky/5k+/nbe9+O3/84otcTQ53c0EyrxFpyLYdQaucJXEQXR+JyilnccDVoFQZiFllEmrcLMrCdFbc5ihGhH1Vn89qA0CSRUtxql85iHFRZSkn7ZwLm/EQMD72xCpIV41aysiWLFYL45zlM1CVfTSOW2GOYpyAczGKVo0VwCsVMTAqNFBaXAnjGNKZ7Qn3bnX8rU+9pJqS0jqx1ojEA4aDEPmO33+Kpy/f5N6dKT/7dY/xXe98E3/nE8/L+wP+8oNn+Nbf/DRP7S2YOAteGG/ZegKWB08cYW8IvHwgY5DeGj59XVw4txvPQUi88+QOn99dMGhx463h733lW/npz5/jwnLDvKlp+506NnhxuTFyWMXEdGT2wW+fv0avHesX9xb89Odf5Zvfchfw3Lgvfvbp83xGDQomzvCaaqflAlf6yNHGMw+iT3qkMezHwtAnXS/SiDnSeq73g5ytJYP1PH7lJg/9099i2jZ88wN3cnE5sBmFOZSY6jk+ArJ6VXdQEPa25sPj5wdwvqVpyuY3CuPPLOP/b4DhEXk+BNobBeFylhwhRQGs7Xgub+bqckRzg83IcC6F5wf4gRf2eOGpJwFovee+7QnkzIdfvMQ/ef4itsCxRoqSX335khQJ1vLejzzO2ekmtXxt0fPmnem4TjAiceGs4bs//jnu2Jpw7mDFbi/GGFXn73t+9yk6JyOsBvjOjz3J5m6LVuHTu3OMEb3Rn3/6NX7u6dc4v1iTS+H+o1tcX4tLrNMmSa9x4fHLN/mSD3+cVDIzNfX45W/4Mr7jY5/j4xdvAPCWozP+769/N//02Qt88I9fwFtLzIXOWc5ud9zoA1eiwJo73nLnrOP5/SUnu5YfeOzNHAyRDz9/UR0+oUQHXs6YaAdOnz3O1/65r+HsPWdw04br6xW0HaFrWR5qrU+OHmNYrSip0KQJJiWJX8Oag+WSKwc3uO48060tutmU6daM2RZsW8ckZdbtHgfApG0xtlG3VAF25Zlp09gi46TOYZqWZjIVA53iyAhAl5wjWUeK0A89cViN77MjsD09Rt87bu4d8PyrrzHZduzvX8GUwM7xM7R2mzDAus/gOhKeUDyOFmwm5YFETy5eAAjrRV9TdX5lfHFzRrrKaEtFdZylGsrGEnQkskmS5xAEQOn8jOW858XnXuTcy+fouinvOnWMibPYIqOozjlMFghxmM9Z7a84vnVyBJhsjhjjiH3m2u6Sg+hI7Q6tGySeu0aJC3oiZJEQsIgLrNFzyxQxqTMmk0hY5KgeQiYmi8mdsIJcJjaZdmvG8eNHOX0wY2ZmtFleQ+yvVLs5yR7LxbAIgd35kuWiJ9pMtBZsoZSBIQ0MIeMGqT2iN2A6EcZPEafTHgUHtNJPpWizNmBzj4kJQgHXsgqGS3tzXr16nRs3T3Hs9rupNY8ikXXCTgpuBPzbpPQG3zjJP4zDaW6RS6aYBnwH7QSFccmlMA+Z1ka2J4ZiLQvX0LdTnWaJ2NmUZuKxXnIT4wyhnTDYBtd02DZT0oCxDtt4TI5IDiSnsrF61pYs9a51mCwa4KSMTWhyLbVpqiZZbUMolmX0ot+oumElJUgRWxRYcZ5oPMG15CzSEqVYiFHyySz6cDFmYomU4qV2QGRFGmPJSAPR6XtYL1eE+QIbBtKqp6HFzKT2siZjfdZJp4AxXskLRqZ3ZJNRQoKc8N4TozSAQYkYJaNUmZGUIo/VjLVXzllyUy9a5SVKXWJU641iyRlp1GvRlsky/Ya40nvTUrJFsfcNcGrE3VPqYJGukjrUaU9UgSgFf1OOqKi7jDrnzJCigOCa78WcBcDV87DOolsMYtKlQEuRz+hKxoasky1WcnXrKDGRCzRNMwLL6z5gvE5nxEznnTDSLHjnyVEatSYptqKGgnUyRDSgay4pn8MhrsIYg29EJ9hYaWqkrD/cCnPNaAP7ULKgeYJKBFRpFw7lFZVRf1hWqUilP064AWIiswH5DIINCUSUIDsBrk0W9+7swbQU58lYbGzI0RNtZhUHbuZEyJkQMyVB9BWgE5NBnJH4UFqsUOkYg4vuMoHxrGJEBZOgW2T6q2vo52TXE+ycYla4vCIv5pgYYTUw7C6wBwF/EBjmgTmOq4slr63nXMuJGzFzPQRu5syi86xzookR6z0mF9oIk2RYpwLeMZSIqBlabZwK0FP0fuXDbJ8vcr0hQMchdE9omWbcTOMD0ptTgaBcIsMgoo3OCbJtcSLYbxyudRjjgELfCwAjAF1SKmqCInTNFKu1gMxsO+vHsVKLBK2SowCC6n6Kfn8xmeKF2eOydCPqok6lEMmElBlKFIS9iC6Jd4bWOIy3LHImVvDr0Dij0ZniktSlozACeCNEalTzzSILtAiYV7ULTCm0baNAWR6JFxZkbK3ocXsIkZZPvUn6BaDTjZU3IwDiJgohpcqgh5KVOVJUy86Qc5RQb9woLptzhFGQPOOdIPPGSFeh6dqxdhhCFIakd1LYRNEyKLV2SDVOFBrfiPacl2c4mMLQ1+5QBQuVBVQqEy1SrAAipbIBTD3v1dp+FHrSW4+ykZDie2MuIKYYs9mUrdlUGG8psTXp2J5OcEDqB3IYsMbSeUfbSAc050wIEnCdU+1DZPSj7Rq2t7bY2pqBgcViznx+QD8MpDwhJEPMMjaxM/PsnD6CSXOuz+fEtTjhttbQuELxBtM4hmKlgVJHVo2j5CTrHTVMGPWhJKeuLsDlkKhJBXJlrx4a9atsnaKCyqZI4WQ9yRhWqsN1ZGeH207fRohLrl7LzJcHwrhqYNknQo44fZ5Vb86jgJsF14oeS+kDXb8iJ3ECtdbQdi1pyBCla2SQ7rXV92eKsC5NNtpNzKAmEN5ZvLdM2paulefUenGFa7xoNzaNJ0ah/ntj5O80DYFM4zyBTNu0Av7miEVHMZ2sx5BTlaKTMeXssDq+KOshEmLER9E4iTkRcyTkwBAGQgqiRxgjvhG9tgq+NU3DbDZjve4xxqgD7yHn3SyjHAnpiPmmIXuvo5RwmBidbFMjJIki96Zt8SRS7lVfUEcBFDgYR4Zykk5d7Ugai3cN3ndqRmPxXpzZXCxgW4prwHW00x2McSQdT7ba0cs2QgZjhfWQidIAiYWM3BPnHFvTLaZbM2hEB+muN9/LA+94KyfO3skDOzP2+hVXX3iWfgjkXPC+EWDc6J62FtRxl6wAKOouVcxI7S+oPo8x2pjI4orsGkmATNTEQ+6NtUZAjZwU3c/EkNSgRZnDRYFko/tO46r8twKDuSgLPFHUEU0ev7L3ihshPZR1O8oFkMmpEIOMgNmqw1M2+9pbL/cdK0w6PaurC+eFxZowDJQi4+cj7ojhNy7ujg2Wlw9WfOjzr/Itb7mLn3v6NWbe0VjDL798mSvLNQ41cBgTCTn/txvHvrKkalpwYy1A2I4CdDU7qNe/f89pHji6xd//4xd56NjW5ufptdcL2HHnbMIzNxfj9zfWMnUylvnVd53ku971Jh48ui3Oc9Zwsw+88+Q2IQv4em6+4vB1SwpUMsuY2Kki5nodbQU8m4fE1Dsm3kIviWfbSROiXgch8aGnz/Pct7yPZ3bnvLQnBgNjE+wLXlQuSYJ1lFUbEvUzGgw//tQFlvkQeDe+f3MrmPc6gO7wPT78spWlYpIk9+O0Qi7KdJUzwR0C50BAf4yCxHrNQ+KlxcBtE899E2E1x1KYp8J+KlzuE3dMHHfhWefCQSy3fO8rCuTef3TGduMIubAfIvt95PxizV1bHWe3hdG5N0SOdZ6Js9LgPPQBGyufYVCgzSA6dAa4tOq5fdrx4LEtYWCuB66sNgDd6WnL3/vKt/FNv/ZJ5iHRpx5n4Pik5fk9GfW+1g/sDZEzWx0/+O4HuHtrwre//R7+y7ffM97z//mPnucXn7/I5WWP7zpMKew0Hf/H+97G2e0JQy78xvnrfONHP8Wqmp8UIBkxP3BAiQypp5023H3mbkwp9GHJ/v4er7z6KleuXYVOXu+e++5htVgyPzjgYO8AQsAay9TNmEy3mC9XLFZL9ucH5Pk+vhWB+Ol0irWWI9vbhFhYrgOrPtEv1+M6SlEaNNZbcsoYMm0zo2mkYWd9QymePKi0Ro7i3ukn5KYlmnZ8Nge715j4HYzZol8PXDh/mSEt6SaF48e2GMdEiZjG4TpLtHIGDSXjC2TjKNZTsnyhzcacxCxq06zVAtoKvUeayno+Gjm4cyn0oeeI3ZGcXboaLOYLwpDY3dtlf3HAtoG+X2IRlom1MoXReMfWbELoVyz29zF3aC6uDZUUC/s397hy+SrXb+4R+hX4hPcWMoQcqCNflRlrq4RC0trCMDb4q+EXRY7smESXL8YItuAbx4nTJ7j/TW/inhu7bDXbOFuhpAR4PU9kY+SSmfc9++s189UKWkvyhhQShIFYxN291cmiygKKQCmJnNN4ZshkQ40rh76KjrFbS8wwX67ZvbnHcrk8FFEOR6jX/94Gct/0GuqIqypJabFZAWTrBLhNSA2yzpGyXovkjTM0swkUCHHATFrovApaGYqzwiD1jbDx6wiyriNbmT+5jFI0t7zlIsBwxRRvGbu2Fryj8ZZJ29FOpqxWmZIMpUTNaRWP1H8Z5zCuEekljOZj8ueWIgwxIzkJtuYHRXVi861NeoR4EVNk6AdM7NmeTmgmDU3TqNmCaPTlmpcUg/cixZSRvMYZqfdySpim2Zw3uhbK+Nwkz6nXrSy7auymJna6X8camuqRrqOwJSv4V9EEaaiGqHm4aiHLiLmOxpYs46+VeEKdDquooRFmrU5DSYLKoXxPmLZG40kpAgqVpPJFpZAMmJhHA8TxsyL1cI55ZNVt7opqkakEUUGNEYyklKUIsa3qxnmdIGqcHyftSjHjPRFZIzXTMaIVbZ0hZavGOW6zBMrmfRgdex5zzsPkocPL+tDv3WJaVcoYr2558nUNK65QUMkwTJUbHE0bDCIjFXMkFTWWs5ZcEkPsKf2gpkyFWCLLOBBTT4xhxEvka5DmTRbnYzsC+/Wz1HzTUR2ZDbqd1hm3u4LXrhN3L+HjHn2+Ts5zPD1xtZA4EwvrRY9dZtpVYbGKXLeeS8Oaq3HNdQo3Y+ZmiMytoXeWQddhoweqd57OeZzWI6PBiU7dmMrE0j32xSLj4euNATo1F8hGN291HbHI2GYS5oISn+VBp0JKMAyFfr2vRaHQhK11UmRZYbrJfqmi9VnJefpBSiYF1RYqZhz9soe0wEqqB5voq6HfTzEUWzS5yJgkjphOFfkyCOLtHX2KUuSZIgdQgip6KsZ4dizsJKYayFYcdqibWxNtjee2HrzKWirFUB1RjDLuRNvKYErCKsAnC0x+iC1yX6nAnKmADaP4o2OT8NfRVihYD41vaLyVxMBoVyGnESRrvSclTcyNUXKajs+ZWjxGJq0IYgqFWtg5opGUCUMQof62BSsz/UOU2XSsZX9vxboHZx2TrsN7T9OoyG4WjbdiFfBEAQEVys8I66VYNaQwFlvsCNTlujdVC0B0KRAGlDL5nKnuspmYEo33+qWsucYym3XsbM3wQGwsabB45+iahrZxyCiujOQJQGfHTWcNtF3DZCLOa4ZCnjSUPBWAKGX6YFjGBhcTJ48c4+zZszR5n3ThEnsHS7wDWgMNNI3Qz0KWJCqkxDAEff0ggJhq2jXNBnIzVZ9Qs7OiY7+3bP9DTI+6WEUbRUefbF0rlsY5KgLhvMPYlrZraWML3mGTZTqLLIflWLTUg1U0Fj3OqsOo81iXBFhXFiPW4LwnD8IYi+rMZxpl6cjppuC7JCQVTCy6BzcFqhmNFipLcxgGGUdMhjAEkrqcSocJ+dzKrnTOyV6u5gsGHbevRbD8vuj0mPH9JU2EQpK9GXIkxCBMyxSl+DWHEhJTmcBWtRP9aHwzgkbej6PYBZQebcA5inPjAWgOAXR4PzqpZWMx1kPT4WwhhwRRNURKkXiktT4GmrZlNutoGi9aNMs1KSUcBd+0NG1L17UUaxhCEje/IgnldGuGdQ1JhbpLUaZxUG0RI/EpJQSALSKGbGJk2jYcObpNtz1lsAG2OnZuP4rb8SxK5NSpozz44H089duWFAZMSbrujWj8hSiJoCZClNHqg8pFq2PfsPmqoxkyMl/zTRnBDamATcKcUMRJCrVEyQFKEDDc2E2hQh3V0eilZ1TVtCklQo4yBiEB/FCjxo6uYgIE11FvKbakGB2gZHkNU3SsQMcJnGWjxZqpGmLVhfP2ScPn6rbXk6V2Od93xzG+9x33jS6czhj2hkCfMueXPSEXPnP9gFQKU2dvAVvqdTAkjjQVtJIXOdJJOjEPtbCr8Qh2Os+PvvcRvvPjT/Lk7oJO99LxbgMOvXSw5JWDJd/0wJ389oXroqFlDCFnVimz5S0//bWP8kN/+Aw/98x5Omf5G++6j7/4wF189sacmXc4Y8afDYWDkDit78sauG3S8PzegiOt5+zWhL31wGl1Mz13sMIbQ2OtrIcx1sioy0PHtgHY3znGtJvQOcfb7jxFqy93Lje0bYuzTps2W/TDAEiB/r6v+Tp+4ZSAef/F+m7RqUWZjcbwnLV8xf2SR2062hyKGRvt1VqYpDoqVb7wGVnrWMznvPLSi1y7cpGJM3TOsJzvE4Ze2M1dx/bxE2yfOMn1T/6u3jbZHO6w1mUpLEPktSLx8/XXXirsrarVglyVrYaBvRD5n/7oeZ64ssfekMa6fOJg0UeuKZBWr/ObyUmeUodVEKbc4XNMnFv1/xPMw2FgYHOtU+bcfM03/don5fMAg+Z0z+8tWGc49qGPju/3tWXPf/qbnwE+MwIet1zl1l8vLHu+6pefkDPXKKhaC3r9T3G4F/ZaiXDx/AU+8YdPcPLocc6eOcNsMqOxjrc9/Ailj/DZJwA4c+ZOJt2Eoe85/+prXLxwib2be4TQ002mHDu2w5Gj26z6NQeLBfPlkqsHByyd5/adI3S+5fiJE0yGxMFc9MbyehgN2zJFtb5Eu27aNXhjyGFgMp2SS8uwGkRHWAsy005x3QzTzsZb4n3DdDrDtEc4yYSbi575asnR41OcGzhYrPHFMmktkZ51WlBsEAYQWmTaVoT3s57ZxW2AjRrTy2a9i2mDuFRS5GeIVpWTeK85b9W/TjFy/fo1nBPtpDNnz9A2ov2FkTzL1lFaEhDJOZLSAEjTsOSELZkcM+vlgn69kMb+dMLMBJy3NI2n9GvRucoCONQzqEq1CHChLJZD+sFSz8hkjyuRnBSgs45Tx4/xlvvfxN3HjrHYXYLyM6rIkGFz1qQcmQ8rFqFnGQcxpTGOkCI+BXUGFVM3MYUqWijnMS/LKZGNTFuUDMXpWaQQineOaJTilDLzgwN2ryWW80Mb+N/lUhJIVvdbVHu1mIQxDufltbtJyxAi1gqw4bwAfGS5M8bIeZ+KNFEpwuySl6iyOsp4thu98Y08QNG1Z5RNWiUyKltaCu5sJBYZZ2m6jsnWFrN+xTwMmKLN0FtCl9ZcWHzbMJ1MsBgB9I3k8EI6KSJNUwpVu0uAZCFERG0CGyPEl9G0T+zsaWZT/NTh2laBkYJJOu6o+rveG532Es1GAT9k6qieOUXPhCrvMYKqdR9Whsn/y96bx2qWp3d9n99yznnf995bt5au7qrqnqV7pscrtrExYwYTJmMbs0gBE1tiSSQURYSQRAomfyAlKAISAfGCAkJIKIpBwTaxokSJDF7AwHgwMhl7PGtv9DLdXXvVrbu9yznnt+WP5/md962edpvFKBLyad2pmlt3ed+z/H7P832+y1RnvNslFY8+IeuZqY4R4Ej6/zo0qp718pt0H8FOVk31dWyHSHWQY6b62CAMMhnFGSgq08ySDOqKpdIYbEEAWrY6CIsh1GuwswlM94cwIyaAT+o67TcjQGJt1kr8kT7B6l5l9T6wrZtAfNnn5R+cN6IOLGCSqCwk2TpiipE1JoofaMlFvc70nBp9rzD1iNP9vgPE1d/7zqP2CyDvpRKQispaa+1cCvJ6VcdetCavh6hUk369yLul30zkEmU/JCqKJuSunINcbyv4hwzaPSkFQhgIYSQ7+VnbhIUd1HXn/dos/2RigfVI++gce/cIMxzThoeUeIYpPYw92RgisOkjacy43nA6RB40LQ9z5ITAqTGcx8wqJYJxJCsKGwkzpKK+Ez4Qc6JojkPZuRJm5xzVe/xXO97bg65pYEJx9eExUCbZjl4lo3836AYmC0COIlG11qrXhSGGujAhwJA2V3pP6X0uTZ5Xw0UQmiJWwTflvKaoyYfG7LAXiqChIGOoJBJJQb8V2FN/LpG1SrPXNgJaWYoyZUQWW7DEEoUZVuT95Cy+btOiPgF0RYER9ME125sHvafQ/s5CHkf5ZNEJt144FULJ+8Y8hq2UUtltW0S7LvYCxBUaB13rmM8a9ZQzsrmljHcidZjNGrquE2+UJKy36sMlRULCmci8azHWEhTpFhBHTHTTGPCdJFYZZ4k5qTdaAeuIY9RG3THrZIrTNE4YSKODtoG20+ZWGCEUo9PFiG8MxYhRZTGSyJQxmCLArskGHHjrcI0V/xAjUreYExYvm1KMbIYNOUa8E5aF90YYGA4oQYEP2eC9KzgnjbG1wkpqvJWNvWRN23HkFChEwrAm5zBNM+edp2s9ORs2A4wD2DFwcbHPjStXWcWG24+OaWyPN5A9JC9MERQkCjkRl0s2681UbDVtw97+gsY30pzF8TK2AAAgAElEQVQpCLcLzj3+wVcswAUEJAaVPNdGoihYIF6EGGF8bfqenHoBJ30DoaYloWlGQnXHiD+HMx5vhHkp1oNZJ+/yEWKeng3rRDpa4jtea9myzd51qy+yKZSiUhFjNZjFkIMk8ALkbBiGQGkk0EQJTNT6omiz6dw2cEVSUDOVIr5DSJzWKnlIhdYestzvIQbGGCRFzBTxUlEPOIz+ODTJNElKrhSWVoFQL6+BWjCCdxlvg86ExBtyKgj06Mw4+U+SEtZ4lYbotNFYEtoUVimAgkoH+/tcunyRbt5yfnZOP4z0Qy9NA2D8nJkXoM74TBmTyvoV8rIGm7dTS0nV0g+rwEaSabQFbIy0wP58xv7+HDcz9Hnk4pVL2Aue3vTsmURrPY3LxHFDDIM8cwhjNuQoTZRvBGgthWLUv1DLq4JRcNRWTYbcS1bsGIqR6bN8ztG0OoTR4spqMIx4G0UsSbzfqIXybnqUzGANUJzuGXWCnKIUG6aS0XWvs0aT1cQYuE6AnYZN5JjUt0r3EgoSDlGfCTEun4JtcsIUuedrCufvf/9VPnnriApMa2lJY+Fv/c6v5y/8yhv8+Itfpk+Z/+abPsQf/sjT3NkBSUIu3NsEnLMcNp4nFOT64F5DiA0vHJ9z2DV84GDOonGcDZHvfOYJbq/66b3urjrffPUCT8w7/ubHv+Gxf/mb//438L+88BZ/7tOvsImZv/yZV/nh3/513F8P/O2XbtI6S+sM3/+Nz3FrtaGxlk1M9DnzzVcP+J7nrk9g4CqIKLxx2w3z1mrkxkJYPs/udXzx0ZLbq55fuHPMn/3Nz/GnfvFl5m3Df/m17+fvv3mPzlnWIU2eeTlnVqs1/+H1C3zmqOVBP3K5a/gvPnSFIRd+5cE5+/VOGHrGMMq1DwETAyYXEvkxsEtPydRST81O3vqtPob/TEM9BZazLGLWVolRmf5t9zAIK7lrG0zJ9H3AtGJwbZoW13jarsMZRxwCn/yejwHwQ1/3ffze3/0Jvv7DN/j2a5Ik+uL5QNvNuHr1KvsHBztDh+0avYMFA/DC539F/tLOKAX+ygu3wHq6WTN94VftNby6jqwq48a886fVhmL797pPfPJ7PsY3/PgnefMdjMl3O2beTl6H/0rHu33DzucmWXPZ/Qf9UychdSBia/qckX1ldbbii5/7AiVFvvqrvoobN65z49pTPP/sh/CpTADdpYsXeP755yFnZm3D8vyMo4cPJPE1DBwcHLJ3cMDBhQMuXDjk+PSE46NHDMs1x/mcC3v7GOuxxnJ4eIifzXl0fMZyvaYQyCHqcDfjG0vnLa2FpmT2G0/xHf2QMaUFu0/MDc4kUugpuZvOxRM3PsT+5ct03T5PPrnP9RvvZxMt5+sT7tx5ldXZEu8hlZE+rujDEqwa+5ukdp2eUjw5ObFLIEIRKxdrnQJx75BdFZiaimz1/CrwBRqqpcw4Y+j7nr09z40b13nqqRv0mzWtbzg5UaBAjZecg4O9ORcP95l1HdXPSS5xJo0956eP6FdLchKz9zGv8Y2nxJGSotYMKBgnIM6WiaP3zQT0OIoRhmHS73FFrktlVF1YzHnqiStctp60uSfNgqke3VV5IK8/x8j5sGYgCdPDiD1LSkWAByNNZMpbC46ci6TNZiFfkDIYGQBJ71cwVhhJlUkjhYUlhJGz0zOOmjXL01MI8ddMKfxVjyLMlJSD9ptJ90AxjRc/byd2rcrYB1FDlKJMuJIJISjDPjLGgGss86ZVQ/96+qVWqB9WwReTKjBV+zupdSYTCwXCxPqiEErGG3BdQ7uY0W3m2OU5NidyErmlqEPctDaUUvBNw97evvQ5KeO8I5dESnHywLVG7t3J0kgtNiStU5lGGLIVYCQqmDgioVk5q1IlZmyxcp/kTEqBbBusl/rdgoKNSS1EZN+ZwviKeWwtZjp/5jGW1uOf069B9yyzXcOdq0GLaslgpJ6KJeJskOA8xG8YBeVy3pW5Mw1Eq+xV6miNdtGQGKwQjey0XpTt2pG3gFaVYBt9hoy1k0RgknHWYX0FwyrQqyBhLsp8LIVNFnWSd6Jqq2QcW+uLpIqdkFVJI6CNBIMVkaPr2c5GlRlZ06HDVn3TOFnfvfbnxUi9b9i+9npd3iu59Z2HNXYbAAEqzWeqqWt9LUtweWzXziaTjIbh2YJYjNf+TLxQvZfwzkJWLEYCb5w3FJsV1JP7PaUgTDyjYPeECefpt5rpY7dLKlASbgi4tQBybhwhjpQ84sPIYA1rYNkPrEIhD4XjPvEoZ2HO2cy5sywzrHIm48jWYhpPsU48DXXwXhGfuuYLIF1xKjsN7Kpf6nsd7wnQFWv14WdyI5D1oTBJNaz4ifjd4sSVCWgxRrz4vJcHMenDJFQ/ZZ+iD/AEDklB6suWK2Jqw8m2hjONTNucc8JqiVHApiyFmNXVxuiE2tPIdM46sjGshl6YOhbms5auacSPKaq5OWIKOyjAkJN4Nbmik2uTMbjJCBsFIaaCzFjZ2PQ9WwXbaqpTlSpOFw+pdh0GaxETf4M2vVZrO0tRg1RrBTQqqs3PORGi+Jk1DjqlWzdWzqvxGe9hb94yn3UcHOwTg0jyZIrqNMlPNmlvGtpWppJR/RVqQ5myI3vZ8G0DxSS8+prFJA1O62C0whTxzghzrXFict56GgNN0+GclwmuToiKNuJNK8ClLIIakV0nqHUJNpLK57yyDNXfLqYkZr9e2A/OwdivldllmHUNM+8kDTIFEgVnhH3oHVibmc9aGl/lcEXSXRWkbNqWoV/vLNJZmIJNS+M9xnhSKPimcG5GNpuemXEsjMdhROpnrMScm0KyhdYUmVwjnl39sp8kDsYULlw4YDGfKyhs5R4sRdv+uizoR2Xd7C4B+k8iEy5MRvZ1DdsBxYwxeO8JKRHHEayjnXUECkUbYTN9f93glG5tZNGNWRbtnLeraZ0lWyeebS46iis7DDZD0vAZh1N5XtkBnHbYc0BMiWEMeC8m0iUGDJrQa2SRL9PaUZdveTm5Ah7WgRXAs06S0YZ6l66P0pazNSLpKFmkmzkRk4BziTwl9zrvZLJl0ZQ6+d3DOLJcr4gxKfPPbTfhrLIzZ5k1BmsCpgR8cTSmUWnndo3uci9+IhlhayUHoVCyUxBANwV9ZiTdWC5bTEm87/LI+WrJEAIFMdUdlueEksE3zGwLKrfPReLQxzDiihbzutFU35eii3Qq4q0hUzgBEGddy95sRtNYhrRhfvGAD3z1B2gPG842RzzFVVarE27ffIPTRw9ENqTT+1TyZFwt4ISA9cY1UwJyvb6SkpkncNXqxB0vptPGyj7T6hCjlEIIYUqnyqZQorAXDFnZmTVJql6ALbgCNV1XB1C5MpCsMr1lL2y9p+s65nPxrZT7W1OZozYSY5n2zTwBjEUHNRX8S9gWTRN7/PjzX3ibv/Gtz3Fv/UF+5JXbPBwST8xa/tCHrnFnM04pnH3KfN2lA77vw9cfk288tlxoU3lzLeDdzfXIwlnOQuKf3Driz//Wr+Kvfv4Nnr40449+5Gn+5pfe5GSM7/hJhpceLfm+n/klXlI2lDeGL/zhj/OXP/Mqf+eVWwAc94EvHJ3zvT/zy/yJr/sAn/yDH6Ozlrvrnp966wE/+spN/srnXuO/+y3P85c+9jV89sEZP/v2A37vB56klMJeI3vVJCtEiqSbK/G7e/VswzImrpuWP/GpL/IXP/rVfPYPfhtDKvz4a3f4Hz/9CmB47sKC/+obPsiT847v/Qef5fj4hOefvcgf//g3sNc4VinzpbOeP/7pN3iw6dlXCt2bf+R3TL/3hz76PD/00ef5nz77Bj/wuS+DMXRtB8j7byogL1XvtAxbs4XtpmJ6akLqvSB/irfc9r0+nkoIQ07kGMSmwhiGYcAVj0UDmkZoZ/JczLs5INe4a1qOjo55dGXLjqqNVkppkqlb50RUuAPSvfdhtmwEbfIejMIw+sqjvONvX/k1f+mXX+VUGaO/1hFzeZef8K92TE+72X5mAr9Nmdaa3e/YbYysa0VJgjBz4xg5fviIF7/wAqcPH3H50iUuHh5wsLfH8uyMj+lP6fsND+7d5f79+7z04ovcvn2bk9MTxmHEOscwjpjNhrbtmDUdVy9fZdHO6Zcr5rZhf7Hg/HzJ0aMTinXMFvs8feM6m6Hn6OER682aEJWdYBwlRZzJHC5mPHv9GqPxrIeHrGIm5IYwFkwTKEnu43p0+5fYv3jIM9euc+2JJ8l2xjo3vPH2bayNHDX36Jxn6I8F5CER00BjrTAtlAUne43sc4ZMZbJPXlKoh6gxMvCrC1X9oxid1cuemxQoa5pGgW3LfD7j0qWLXLiwL0OOGLGWyXZGWjxZt0mRMGwoMeIaeRBiv+H4/j0e3L3NZnlGYEaJIzkvadqGcb3CZ7FEMK5KKBXkqUO/3QGprUNBCe2Yah0ra4JToOBwMWNvNsPPA03XSg+ibKgtSUFq+Jgix2dn9CkSjHLLjTT+MUcFd0TKWgeoJVWQDhmsabFgdZnJ6LDJmO0gRgGjGDL9as2yM/SrNYwBZv/yT9buLLkoQFdKVn/EslOuCiBlSiSOUtM446Gg0jgBc1PRgWI0xBwZ40BTvDBbQAENT3JFPV91/96+ramJNrpOk7eAiVH1j1EGTfUZL9ZiG49rpd/cMq0sxjZaHytYgtQqTdcwazpSSszP5mz6jfZ7OzVvQWusNHkyyplTr2NgLJGYAkMIhHEgno/Mcsu8JFwSr+emcXgFNvKYiVisL5jGUDykGoJiJTRtF9CptXcNvTJs17evuKqmAt6o/3jFDuT/Sw9dwc8d/EBBuJQirhEmbGWdytdu97wY0/T5XaWK0F3E99aoTDOr2qrWUBgwzovKL0MyypAtVY2GpP7a7TWUb7I7H6LmQoGzyrSbFII6OMtZFFdZW7NsBOAOUSxcKiAeowJ1Wa5DMRIAQ/HCzsLqIFzOlahwqkWQDn3VlzCXKuPdnpfda/XO67Z7/kRKXJl92+dhkiabPG2GdXBNFs5xBWQzEEuisZ6a0iRy1YRFAjK8l34kpage3KLgMUatNh577Wifq69/p/DdVYtZxVr0/0g0uCuEEiEFXAgQgwwQKJSYGKxUZkcpcpwLPYUTEstoeVgKR76wxLHJsFaFnilFTQVk3c25qF+l2hp4T0hB5Nk7MvrqTS6KvPeeYLwnQJdKZb6UaUDljHg6YQyNKXQGOmvxJiuQ5HDe43QasJVkCDtLAhFEqztW2rk+cE4XM5HKZlwRmuku9VhuSzkJ8705vtEGtxTGMDIOo1zokgW1LgVfhBXU+BnOO6UlGh6enrIeB1LJzGYtnW8kJMAJiyKXRgq7FAn6i9tGEkCjetJ450QSiHh4iJ+cvubWTpRdTJHJhalm4UUfANGRS4GnQI+xWIv4pCEPeDWKtkZoyOJr4RnDSErScIaQ2Aw9JpcJpGtsobMCCjpjaRvDXtcwn3lmNpGbwsw5ErIZVtACJwtqjr1Ic3OmKcJaEz868DNNhyxhMsO3KUCM5GQ1KdXq9TY4iy6YjtQ2BApdK35hZDtJfYz1uKK2DHpUSnNNpTN6X8lpKZgsflNZGTYSPuHANDiE8eLVv6xV4HI+azEmSxoPhcbLfemtw5rCfNao9LOeb4hR5EptKwlbGLMNAVE6cJ3EiHkmurAaTEqYMZLiSIwBb8UoeyxiUEqRhieTGVMSj7IooIBzypgzwp8pWhDknJRRoSCdThzq5ml0SX0MqNNnKJaMp+ywAKTYERaowftmJ+XN0jSOuRE5ymyTOF/FSe6ZUiImiCViS5BUKyPeFsbK8+ec3O/GVjAeXc11o6uTMKpvg0oGqQVBZQsKnd8qcJpyISaReWAsjfO0sxljyFg1ti4loaJxKlBXC/k6JZWSXAYSdcOZJuDybTLTL5lAYYhJGDIpog4aFCOebbGUybfTarqSBE/IJl1fc0GYA7mamilQ6Z0THzcF+DMSOOLK1hNCrqMTra41UCTdK+BF4qHJnpU+X9+rbPKO9WZDzJLOu+k3lGzZP9jHGs+6H1kPAbsZoUm0XUczaynWMY6ykRI1kEab7qRFl5w6q15+URKvstxre13D/qyFHBjThutPPMNzH34/zf6CYXPG2dk9Xn75JT71sz/NvbffJI4bqoxUNm+m6WVB7iPrvQxekAFQrveV3Dg4q6m+3mmTtG0G2k5kvJmMHUbGENSIPELS6XVx0/06jiNpHMVSAUtlK+wWhbleGStMN2kWhTnqfMN8PufgYJ+DgwO8t6QUGYaesdd1wXmapiXEtPWIokgjmwvDGFn3I5UxbuozpMc/O1rxfZ98gT/5/DV+7vd+C601POgDP3fnhP/jzSP+2898mT/7zc/ygx/9MJ8/Oufn3n7Id73/6lcWAFpsnIc07b0hF1Yhcdg4/rOf/wI/+G1fzf/z+76VMWV+7JVb/PUvfHm6P7//m57joPF870//MrHAvfXA7dWANeK7CSLJXQa5T4eceeNsw9P7wqQLuXB/PfBoEOC4tYaf/PJ9fuTFm9xe9RhjuLHo+Dsv3+Ibn7gAwLf/n/+Mh/2WCdhaTZQDOmd4ZtFyMkbu9oE/9k++oPWbYciFmTFcnbXkUvhT//QFCoWSYRhGvv8XX+E5BeLu2YbZbCZ1UdlKLZ74Wz8nLE2jvln6i40OtRbz+fZ1tc3UuECtbaTYLaVOXHXRmYpA5J6vfQLIAGDn+3Iu6qGTNI084qzUGBK2M5BTJIYR5xtcO8N3A+18C6oO6w2f/8znWd+/M7HqPvHph3z02z/OJ77r9/Ds88+zf+kCi/0ZbSuWEd7Woh8p1Au4P/l7AHjrr/4UYSycnAwcPzhheXZKjpHFrOHCwT57iz2+8Qf/EAA/+x/9NY6OjhjDyNHJA9748svcfftVjm+9xTOXnuIP/Hu/i29/5n3cf+EFvvbtn+TPfMuH+RPmN/Hal1/itI80i4u0JWM3K5Elemnw6lq+hdPl+Pmvlc/8ts8HeZZgajhKVC8nY9jb26PtWnKsdicqV80CZpmcJPQnjJQUFJBvyMUSUlLZJto0CROnpEQcIqvTJbfGm9y/dYeivsB78wUcyGt8+cWX+PxnP8fbb73No6Nj8T8dE/PFggsHF2majpxhveyxNujvMphiCSExjJHNMHK+WhFTYR4S+/sHzNqWa09eZQwD6/WS9XpJIdM6x2LW8eyzz/Lx3/k7eXS2od/8Cv3qDiWdkmPGdHuY9gDXXpzum4WNPHV4gQ8+c4PLVw4Zs0C+TXeNcbOihIESB0zxzGadysEtXdfQ+EYSJEtR1obsX85b9hd7zLqOnBOb1Yr1aq3+mgXrJLFvum47H1W9M9XPMLGBNpsNDx484OzsHINhPuuAwmIxY7PZgIUYR+7fv8vNg4ZbN9/i/c+8n9nBjDQmlsdH3L35NsuzU6kn2zk5Oeb6vHsLqJIjF7GXqOz1eheWArX7LVOVhuxtE0BnME5A9LbzXD08ZNF12KanaRtKCZiaBI6cs7EfGYeBBw8fcP/owdTjhJSwWaSiOSWpD3JVGSUxuE9iXZS1+XYTg0txxFpTGrGAcOqhVYx4BZtSGVoViXzvowrxpjXNbK+RkA/kHDjvcb4R4A0JiKskh1ykPylFABsQKWGMiVjAWLdlq2iNJSQJsUaJNitYVtfRsgWE6nBMLh1bthBbAMYw2ZHkLCROASfyZH0G8jqcEaslT0FcKqryKNB1M/b297l46RJjHAljJEUZ/BdvKUp8oIhtUNO2U91eShFAp95byhY93awJLpOcpSte1C/ZCYiYM/2qp19uyMZiuznBe4YC8xCY7y2Ydc3jABiVFadkCiqYqh/vGLbUa5kVWC9Zfcq098/6JztscPGURPfQKryuMVjsTEp2JYJyP9YwyTqcRTEEUUjp9a2vzUiqtS3aU5WiFkmlak52+gW95gpOswNQZorKmIUhJ6mdlSll5LUXM91+k10X4s9vFYAuRn6jsRabCs5koupTDF6BRqlh29bTtl6VQyJLnvq9ev6UkRt3ZMlbQG7LeNyyR+vnap+VtbZH16gyPZ+6POB0yGiKyLJNkQ7WIGtCLnliDQtLF1lF1N4F7VBzKYwhMIyRptoC1EGLMhzr42h27oWi6+Vj8HDZ6XUdlMZC49g4OZ8dInXPJQKZZc6cGHhA4Y4pPHKwLo7zprCJmeOcOc6wsZaAYaiWR0V6v5Iz3joFRdUqwKgNTc0Gzaj09fGjPhe/2vHeKa76AJUscbDWiCdMq0XoXuuYW0lP9Eb84pwztJ2Y55eE+DHlol5WctomBtIYiTESUmVFuEm2UZL4J3krTY3zKmMyRVHpwt6FfZpWNveChE6s+42ko6bErNXoWxyN9TTNDOs9xTuSNYQcMOvCECONswKIGaeMrIaQLT4XUgwMTgCn+ayjbZoJYRevN5Fn1qLcGYv1ViWaqGQQbXjy9KdXg31nFIVW42arzKrOWwkHqCmQdZquD5Z1HqdSROs9qRUPEUm6tHSNo7WG1jqcMzTeMGs9e/OOWecxJNrOCyXfFDW1FymA9TNSCAzDQMqKAnsj10Wv4d7egpCiSvsK2Ti8LXhTGIyhaw0xGwVoRc4lQLqh8Y6SHN7L+485EseIUWN8Y5IwFdW7T4ytdavURsU6IyESuhEaBehKihqgUVF9WUa90+mRMyJZKwmLSlkVTRN5XgWtZCIXQpBiQAGIOrWwRh6fkIJIKUshjImmaTEYXHGEJB5KuWQIkTKM5BwEYHQW46SZqSBEbbMTEkTQFKv3g0z3claPL7bTzyqNqoXO7occO8uXYSoyUowqw1SEH0RilbIa0xaR+hlDPw7YBEbNjkvRjaY2ZujGVqnxJtGofFnGwJLEUzeziZlGZYdKeEGl68Puxi9sqdqsljphtx7IMuFyXn5HhoQlGU8y6puWDWMuhCQTDiH06bQakcB4jAZeyMS9mpObomCQFQA5GQilMKREnyQdNeckUy0n05ExFzYhMoQoDDOMblaaNqpATQ0dDTESo9CjrRUzX+scsV6wOnlLRTw1dsCY7BcCAhqJpS3GEF0LRVh9njx5XW0LHIP1lqIBDikFDXxp8E1D28zIePp1z5gtIRtmrmWxmNN1idVqSYiBoR/kWufy2PXDiCQ9lURC1iRToDGGva5j0Tas44B3cOP6k7zv6aeIpvDg/l1+6cuv84uf+gW+8M9/mfhoBTnSepHopVSnzsJgzug94Nzkr0LKWqhWjxMJEWmaZnqGi1GZug6OsHKvFwO+8dA0mJLI3qkHHvhWfIr8OFJWa1Lfo7judI2MrclmCsTmhFHJVU2GxjkyllSMpHsFFKAbGIcRKHgn0/SFNRPw33rHrGkgFU7PlpRyxlif1VKbUnQftbxwvuE//YUXVdqlJurGYIzjx167x//2yh3ysKS1lmcvLPjvP/0vpu//xp/41E4VYNj3lqsqS/zQfsfdtQChD/vIf/wPP8vXXNrn7nrgZIx4Y5g5+b4f/uzrvHKyIhdJTnPGSNgDwsr7wN/+h3zwwoLdmv48RF463oJFrTV0Co5GZWumUpg5y5V5y4XG8/LJkj5lZs7y3OECZ+CeynU7Z3lG02Hft9fx4vGKB0PQQaA8gO87mNFq8bQKibeXm22jAxpGBBWIE3/LPE2uHztK9cjZaQW0OHY7bCNJ1a0A6xZAqP4cMrPYTr3RBkL6A6N9ppu86qamNiVhvurEXhjMUb3DxFcohSD7hXqcxpTV1F3oLm+//gbHx8e8frjP79LX+9yHvolv/a0f5yNf85s4fOKQdq8lGtgA65AI48BqvebsfMVqtaEfA9+h3/vG7Tt0roGUuXAAlw8OaY14BeYwcPzgzem8fOYf/zR3bt/l7PyU5eaMTTxnHJfYMvLc9Ws8f/Vp7r39gBfeuM3X6unMi4v4+WUIZ/RRmrquANaj8UL1VtYCf/eqKZhr/E4zLn+mUshZ9sdiJFFd9jDx2qsgikgxpYZLZPmv+g0XSVz3pqomEEuUIgENw3rAI8FkzVzarJIzoR/hQKTZb7z+FkPfc3p2TgiZFKHtFly+dJXF3j6z2ZxxDByfnHG+Wcl9ECIpjNiSOTo+YbVaEULE+5a+71mvN7Rto6z8jllrmc8aYhyZdQ178zlPPfkkTz/9NLPulCuLBYdNIfke3+0x2ztgcfAEi4Mr05n0cc1wcsLx/fty37kZxTo2yyXD5hzSQNdAaRwH8z0uH15mZhrmswXONsQQGfqBoQ/EEshF0pydb2k7R06FsS9gIrkIc1L8EMWXTJrmrQpguuyqUokxTs/IyckJt27dwhjDpUuXuHHtGrkkmtax3iSclfTQTb/hzr27vPbqq1y/ep3u4Ix+teH84QNuvnWTo9Oe3B2QgiGOPaYRgDUVYSKu1yv6cSDEWDVIikLVAs1MS3cp2gNoIyplirDonLXszxdcvXTIYjYj+zVt21JykNqxiFJkWJ5x5/Zd7t67z5tv3ebh8SOG0JNywsRCTl6AppQhSwKjzUXknPpBqmDU1ivLyAvc/lnQulNTHbVttsbQOCeqHbOzsP9LHLs1a5VuFiMghjON1ttOWH5I3V8wxJhINWVUCQQFS87yJAuYKKwx771amqiaQ4kP8nZlnTVKkLAG9SHX61P/nBA3MzHXjDUCMin4PubIEEZCilNSuzcW550kWGswmzOWISbOVyu6bsbB3j6Hly/Rh571csNmvZHQoBqOYi3OFJq2ZT6fy+C066QGxuBdg4kZ3zTEHCnBURpH8U4UJggzyWrtkMbIZrNhExKl7RitYRUj++PI3sE+Fy8cUIPLKiHisetl7dQPlerlrudqAtuNYTKQns6lMKliDNgUNZxDroukvEo/NsYgQJDa0ExAe36nVLMCTbJHm2LVv0xel4q0tx/WkJ3U5LbIjR4zJCN+gs5aKOIDbkoiVtB2t7myVur4auulY/qkQKUwpQrYrP2D3dvLJNwAACAASURBVPGjc5PlSgWYZJBf8ClRsnj+F2c0bEZqy6xA36z1zGaetm0leK8SHvQZtcaSrZCCbHE6LKzs2m1tIX8XwOzx84gME+qz4czOAwB1KJLrtar9UgY0vAxVucQkgHqTChmHMR6MI2d5P8Z4QFiDw5AoQYA8gyOMIylmUpKfk5LAfzUIQvh0dkLB5S3ZOs/URbSItYG1DA6yN+Qsfto5JU6s4ZjCfWO4ZwxH1rLxnlWEMRVOC5wWGLEUC9FIv2FM7R0L1nqyE4/AmLNc91zZfqK+EbsAuYeMvtjd5+ndjvcOiUgV39yishEmRsBBt2C/adlrHU4T83xjmM3EVD5GYVDkICy5yqZru5bZbMbeDDZ9zzCOWGtpmg5jJY48pUAeEr6xtH6Ga6yCYElR2cyl/QXtrKVpWlLO9K2kDva9IQ4jrTN4b3DG4Y1Tg3iZSCXvEUKFpOQ5KyxM5z2ddQJE4Bhiwg0W1zpccczmM7z3U+PTWHk4DIachVlmjZtkvc4bvG0mg+BcqodG9UkAlHtRb3JrigJ3UmCnIgVwjEllsbLQOucYkzDfPFkSRBrLiEy0W29pjJ081LrGsJi1LGYNs64hp5HFvGM+n2NMph8kPcU6R9M0jGNgZQzDIBsJBk2SNLRtw8FiwWYcdFpWxD/AOIwrFGNxLuKcNMPeO2U4yev3zpBslRFYOQelUErUyWLCdR2UNE3TTFY2lnGTJLUgDFYJJ3BYK2wd42TqIosVeGuFzWPl79ZADAHbqAQw66bvnLBtnBBl69RDaMgIUKQmxPt7B+SS6dcbxj4wjiNkMXH1zgn4EXXBNsi1yAabCiYJ86Ioo5CSyDGBjUoDFlYkWScSdlvUx5hkfzDVa0s/puVAvXFQ4KyCYXUChEoDqzynlOnrbVGWrBfpn7HyPjabHucNzaybWpi6ITqr8fNWJ991elrK5IVQpycyqapJgRZnPbii5q9bs3+D0IBDFFl5phCSgEHWSyDCGCJt02B8RzaepMzSWDKrfhQ/ANuIAWiOFCNS+JAKzjZqVApFARM1YpyeRRRYw3qME++VbCwRAfw2SbyljPVKYCrEIunJY07gGuaLfXzT0vcDIQgQFoKY+jon4KNMATPON5r4KucnKgBVUsLkHSttuwXonJsJwISycgykYqaiqiK+Mqlx099FXisS+aige86FzaZnHDKbkCnGY1wLrlUWpaPrHOPoWa/PWS6X0/c631CMIeWCsQ5fWoorGKfnMmUa75m3Ld5AiYH9/QXPXHuSJw8vsBkHvvj6G3zmF/9f3njxFcLJEjOqBwZlCtZRt1NSyti2Vd9MpwChMJKKlYGQLbIO1qK8KOOsdm9GwdhhB3R3ToYZBkcpjsbOJkaj8w3FNWxC1vANlI3n8F1L183oZnPWqxXnp6eYnFWmrVI4K0DvkAplM7DsB13z05T4a60VP9TGCbu49Xhr6Zyn8w0WSKmw7gfCepiaTvOOZqgWWfX5N1ZNyWNShqichjFnhpS52HllytXv337N+xYdDzRh9Y11YDlGLk8BEXIU5Pkdy+OvI6rsYxUT1kjWYE3gdNYypjz5vb3bESbZyHZdOx0Cm5RFqjsGev15fcqcDJFF40EBurOQeOFkzV/6zGv88sNzzkZtBvQkjLnw8skKYy3duzWURX1fZMeV5yRlYVrWZ2wyWqrDim1xKFCxNBNj34OS6MI4svvF2o/IHFwl8jWMaZvOvgXZLWqNQ5mGNkmf6xxlQIXeV8M4yNBBKHsTo7myTa2xpLAFRW+++SZHJ2fc29uDD8rnvuk7fx9f/W0f5dL7nsA3AiD268D5+YbV2SlffuNVXv7SC9y7fYezk1M2/YbvUIXsP/3pH+X9Tz/NjatPceXCJUqIbPqRt+7e55UXX+HWzdt8m/7uX/rFTzGOkb7fUFKPNYFYRq4/+RRfde06e2Hglbt3eXW5AiVv5cUe+zeeZWyOWJ+eQhpIzMHkSd5I3l6b6puze8QoDdl2z5SBQFaeQx+ygEbKmqx8BZSphoJ5ORuy2guI2brUm6V6UyJrVwYohjQmBjOywhD6gZrYvEwZrj4FwP0HxxhjSMkRY6LvxTds0yesE353CJkwFDbLgRDEl6gksQkJIRB0UFQ9go0xlBx5FEbms3Zi8qYUGTYblmdn3Hz9NX7+Z36K49Mlb719lyGMdPMDZk2LC+eE08CJ3fr/HZ+s2Wxe4+jkAb7t8LND7OwCN2/e5o3XXyWMG568cglbEjPbce3yNcylp6AU1usVp6cnpNgTxg0h9JQSicGy2WRWqxPC2JNTJIwjxhnm8wU5iwWI9y1R/aS978BIreyMxzlPURVNzoWzs3OGYeD09JT5fCaD5jDq+xfmaU3rHsfE6ekZd+7c4bOf/zynq8T5yRn0G5bnS8bcwPyQ5PcpceBiV8AUInfl3ux7hjAwxsoG2vHhQuq2tINMyWuW/XyIgaZEWf+MYTGbsd91uKahGJFRiod1ADxpjNy7dZN//ulPc/TwhIdHZ6xWK1KMIsEDUcI4K3swloTIV23SUKks/V4KCZNlIF/VHZhtbWfQ16nAf9YhsZg6GF3f3rv7nFg9KOglxeikQqJIA+9NQyli7ZOyIUbpvlOJWOsZYwFlzuVYg9yEsphz0sWSba08QfBMbDnDzh6tr6+CFmb6/2a6ZuLjLA13imnrq2aEZb9er8WSwxRVSMnPmqxckAF94zwpF85XG7w/lbCwWcvBxUMwTi2b5NX6psF5T8qRokGJvhFlmtgEuSlArwJRzXzObG+PvYN9Omco48i4XhJCkpqibZUkY9iEQATGFNishaWakrDn6yBoUojVEvkdIBkKegpoaUUKqD2jVSVcMUh/7ewkC66ARQX0CtL3moldLud7ImXqwL7urXUTrddHl2b1TLSUJOEnyUCyhmQhe0tWQCPlTBIxKcmINNYaI/dYFsulVJiIBgLuire+ScqUROypDDpITIlYKgsWsslSR4Omq4Kvyc6qHDTWSjXhnISDNII7CEAHqci907WetnUavNjI60VsdTBMgKotVrza2RkC7vx9F6h7jAmpz2N2Tuxhpv1LlUqIn9/ENlfQXsI2UBmvxRtPsWCJGNeS1cLAmhZDo7eMvL8UDCkZsedBn/lkKdkSo4BzIRZK8cQkeE1lJtbpm61AY71PArAeIWSMa+iNkYRsY+mV6X8SC0c58RA4LobjbNgk2ETp81bW0lvDgKqpjPTIlSVc6nu26l1tLIkgfZtRIFHXC7nW20GE3enl3u14T4DOGaFOOo9ECZeMiSK1KznTWcuibdjvGmyJlJLwHmaNyBZ7U7BZzNTHmCip4L1l3jR0TSOpn2pw2lhL56WJCpVYaiU43JlKAJfNz4I0JDlSklHUOpLCQI49OfTEcdBi12GsF9STURa22FKaZkr7tKYow0saoblvSIC1nlAKWHDe4a3Ht36iJRcKIQdQNH8y8TYWF2FWPAZPsuIHEYMyBfUhzTr1pkjCbWU2iYRVmH6y8GRSGPkzH3k/v+XK4Xte0H+1Qw1+oxbntpHgBpAb3Hdw2L3rd06Hb2Cx/95f8+/soU/mwUI+3ut4Wq/b+Zf5BMD1Z+D6v+WX9298JMgPYR/Yf+Lxf7q6946vrUJQz6TNqZ/vHFy4/G/zhe68BtjyEKfdHPZauPxOGd8B/+bHO2UcDjrtwi+987mY/Tr9zu3x2d/9kV/Xn/fehxq5X/PAVf341zkSglQU+Ed/Tz6A3w6CdXz9B/81f+6gH78+xz/vG/7c6grFWEIupJQJGWIxwtAswpo8PLzEB559litPXMX7htdff50xvUEeNriSdrxHpZhIxjDEMoFqJRc1mnYifR4jQ4z048i89XTeEc3IaMRjJISwDSMqlQWwLb62BVb1x2Qa/mQNeKlTW4Bbq4FnD+bEReFhH4hFvDAvzcTiwRjxLfqBz73BJiYuKTj3br4zv9oxpsxyjNxYdLy17PHG8OS85ahXJkx97Tvf01pL5wzLmGmt4QP7c/7zr/8A//U//RLGef7Ah67zR56/wR/6B5/l9bMNHzlc8He/65v4sX9xhx/83BsAzC9cwhjDX3v9BNsdcHlhsBXmdZYxShpz13U8XR5PEgX40nnkxtPP0RL45CeeBeA/efWUCxcOOD495+a9B/zct38YgN/8j16X6bFz2qStyCmxv7fPjRs3WJ4vJ4Cu32ymJFYz0ebqAEoDj4oYve82SY9NvYsA1ynGCaitFgXiiZL1ey1t24mcK4hnbVBm/rwIwDEVtsDp8SOWqw3nO+6GF597nrWf8dbRGbZkvMmcPDrh1ptvcvO1l/nsp3+Rlz//eVYnJ+RhJKYAf/CbAfjJv/U3ePZ9T/ORD32IG09d5+TohKP7R5ycnHP37gNWmwjfcg2AzbBittinnTeE08B4esK88Xz4qQ/w3BNPsrx3m/vnp4SrT0J4GYDQNDRXnuGqOyS6+wyrFZsQGMIam4bJEB5t5kQWVwNYRj2fLcK1r0lzEhBlvUjMrEpYJCjNCmdOnzFnkXtKGy2XBcDztjLf5c6ezO6nD2E6WwxxFLuEtvFgzGNSsZRqCI409W3XknPh0fGSzSaDOSPnQhgCYcykWAgxMY49aRmIaaQUUXvUxNKi1hgWHVTaRgeT4um8Wq549aWXufnyy6xjIs4P6M0em7Lh7Lxns7xPYzNHl7cMuvniMqdnt3lw/y1OHj7Cz/ew832OT5Ysz5Zyl69PaVzD+mSJyx5yZrU+4/T0iNVqSb9Z0/crURlYQ6EhJksYe5ETO4O4yySMTewt5szmexQ8j07OZbifA5nI3uJA7nvXYZDmKaXC8fHJpBYBsYqR8yxWAxJOEHE0lAIhJpbrnjdv3uLVN+5w9uiEGZkwBLKdMdoZqdmnsYUrc1mKzzYjTdNycW8PrFOgWGqkGiZmdV02u0zcAtVvqR8HOpXGWmuZt53YC1m5l61vpCPOibBacefWPV548QVeeulFSrGcnvXEMQgYkDPONhIkZQ2+oBJ3kVM768kxYXF444kli52EIjGmIKxzIyFYxZRKVJFGXq0tKl7T9/1XrKXvdtRxbP3GokNoOQwlW0pxjKGQkgGV+lnXEMYBjHrKTo0wmOKkuUf6yTpIm36XsRMYV1ks1ohyBxO2Sges2CEqkGj1OlBUAllEHRUr3RHZy/u+x6nH1KwVb/Oht5N/dbXEScZAY0kpcx42DOPAEAJXLl9mfnABp8Pafr0hpl4Y2E2DGZMoGIYBNwqoP5vLEDJHiDlNyibnxQLFOo9xhjH3nJ2dk4eRw719Lu4dMJvNwGZivyFbQ+vMFEBRz61zSpTK5rF9/yv83+yWMFCMYQg9YLayTgXoxDrITYQT6zRR1apvmkEJPXXAoUy4XP3crLLtzOTzJlY/ygAjT0ChaxpSQALpvCM7g6wQBRpPIco9YwrZFlIsRMSGw+jAORUBPZ33ZJcoRbCFHSWtWhppLVYq6LyVkKKvyTovVkZq75UnthWTtLqoXLkk4WRXBYTDKk+vrhPVv1sIKk0j/XvOWRR2VsJ2doG43eOd13KqIerz6Ay2a/C+pUq+nQ4PJ1RSny2DYEZ1/RJ2XEsNFm1nHmsbCh7rWpzpaLzD2pYSAzkZxF+/kVo4GYxpMMUTQyIMiX6QUKEhRvZaYce7UjvxCXaXwRlIe7YcsWPGG0+fxLIlmMIqF0o2nGTDwz5zhGHlPZtoWI+FITtWMRI6T2wsA4UUZM9ocsakiCvK9FWUroYOEpMOPyUUU4BMWdNEMSa4z69lA/CeAJ2WjRUYFyCpZJzqgW3OuCxecY4tsmhRfW4YZbOMwo5qvEoagZwCcRyxpdBakc7W1CNn5EFIeq4l4tzhvcYPGwl6oGSGfs2IlQFFTpgcccC8cRAitoikoDYzhUwMhVyEQeK9oxRofaOpemLsDkYMznf+yzar6bzQN8cw7hTY2+ZCXbOmYjmjcuEsF7f6OdQHYjIgnWY7E8VDfpfSaf/0z3/pPS/mvwtH9bz5jeM3jt84fuP4//P46CwQTiIpj6RS8F59w4rBekmCc03LUzee4Zu/9aM8/fT7ODk95eh0yd6jU4bzE0ijyoTqlHjLFm2c10kxZKn2hYlTMjlFYfDEyKCmzR7ZIcYxstmMGOeF2TxKAVvXzh987qPcv/M2N996jdPjR8SYMaZl0yfW65Gcxa/v4OFbALy0DMxmHT/8rR/ho08eTp51//j2I37ktbv8rg88xR//8FPse8cnPniNz5z2/L6r+3z3p17lwa23+Ynv/hb+h8+8xk+8euex8/fJ7/kY3/C/f4q3lz2dLQw58xd/29fw8RtXGHPmR1+5zV/45VdxVlIj//Jv+xqe3pvxfT/zGYyxfMf7r/JDH/tqLnUNj/rAj796hz/9Cy8AAlD9tS/e5LBt+b+++5u5PGs4GSL/95fv8z9/8S2VvsFssYdSP/Al0zqDSeIz6JuGtkX2cWshfiVAV3Imhoi320Lqf/2wDluePITntwOzX/nEc7/GHbWc/rYLukmfqBLxUkTOpH6wNaUNY3Aqy66+nElZczk/HhJU1Ig5Z/E9nM1mXLx0Cessy7MzNps1QRMCE8LUcTuF+rBZS7OWoSKKb37xi5y+fVOYMzZDGemXj7hz63W++NlP8+qLXyJs1tiUJeRqpxe4/9JNzl67ze0v/AuuPfkUKST69UAqhlU/knbK0BQjTddysLcgmJ6z5T0u7h3w4atP0mZ46+5tbp+vCNc+ACf6Tc0ept1nP7UssiEejixLZDOsSONGADplvxsQLzj1bYVXALj65FPiyVUSxmZldwi72DkBBcTIW8+r3FXiuYUW2jmIh1wR2Z+3YptS/brkNhSWWk4y0HbW0LaOWSfBXfNZh7OWECL1DV66+CQFYW7GVO+JTIyJzWakhjDFkMRupfHCPLdemGgKQrlGLAwa7xSzz1gDbdMwX8zoumayrbDe048R37Ys9hYMOJbLE5anZwxj5Hx5xpWLF3ny8rXp2i1s4sHZuUhHHRzMG5and3HFcWHRcvTgPm89vMOwCYQ+M/SBcewZxyUhraVJLyLJtl4BFQNjCMLc8MLmv3zlEteuPcV8MePw8CKpGG7evMv5Zg3Wst4M+GbGfDHHWKfM77mADYg/X86Fruuo6e1t2zGqr50QvhOODt91zPc7js9WhGg5PRcfvzQMrM6XZDtw0j9ilRyuRE4PHLNZRzCei4cXcRf2qUb7JYv3kSkynHFIimoF1KtMTgBCDQzKGUzCFWmKXRH2Nlk8xrIR/7jbL77O3/v7P8tLt94kGINv5ty5c8TZ+cijRydAJuYRazLzRcPlG9e5fv0aVy5fxvuWBw8e8uYbNzk7OSUlkfsNNZAtV/GeAlsow0nTNMW3V+5NSWj0Al5PQNuvciiLcHcqI429MoKTtP0hSMqldS0XDi5imwbXeJbrNet+kGtsa3MfsGZrHeRaj2u9yAzJYnHTNFhfez0Br2xl3SmIn6rn9Y50UHo59VIrW2N+Zy1kTW/M8hUxBEzOLBZzDNA4Twgj1hii2gpQ4jRoiTGwzpFIwXctVy5f4eDiJaxxrM6XGHfOEEbGGBnHkWAcNolHrfFlWo92WYs5Z8ZNwHpHGAYokbDZ0J+f4VLCW8vBfEENzfLeE42RRGB9L/rmQYeM1rm68in4ucvOKlugToGqyhisAE5lSRYdIE2Ibqn/I2oe1IM15SSBa/pzqjVEtUo3O0pqwZKz2iFJkFYhi5m5FUANbynOCFMwJ6wHo4oJ8X2V7ylRGJfWi5rOaM9vquLJVqafmfpzjPzOTKH4oix4lbMiCiPvPW03o2sk1GQcBrnfkgZbGrVq0fPLLkvNIIMVCskKr6YCahIEkSdQsJ57jMMWGTAJHlRDd+rpLtTQSmerUWbagkreYrzHtH5S/DlrcVRfxy04XZUmk1y5WAXVDcUmmtbTdnOc7wDxhnW2xdsOZ0dMlbAXKEVor9ZYQoJhCKzWa1arNUMaKTmSTSegainYEndm1HoOkoUAZQjkcaQfRk7GkU2KDK6wdmJjcGY8xyZzjmVdHGMxhFKIpjBgiEZ8zIUZaCfw2qTEzDm87vF1GZPZmn3sWbT6zNS1o97ukw3Kr3K8t8RVN0gRGVWkV24W+fVmovBaJHKuZPEyC2kUn4icdIESo1OZDEdBInMS7zfbUhPNjLM0XSe6arONEbbW0s06ulmHb8SrLoTAerMipcysbfF+xqz1hDFiycT1RqfRkpiWiyEbR8AQnGWk0LYNpcjiZNl6vqQqrTHi/GisoPnOC6swZ0MMOpNRz6Oa86Dak3c5+aWC67K4qxeC0QdTvLb04czigVJAvZa2kqLXlpstjFe5xjp9L7qRSKiBavp3r6e+vFo07k5AXvre38F3/NSnubXaSDBE3bCQBc+wnRBUlkadmkw3Xn1hxhJiIqSsi4CZAj/QTbh+ccV/KfBHf+nLuuhnupnHGJGWxhihGJWqiSywMhCckyAH64TSPo4DQwwUmaPgvLBO+n7NbNZycGEf7wyH+wcM/ZrQj3hnmXedSOKsxXtZrKwuwDklfQ0F7zzOWTrfkXNU2WkQMNhWg9PCYn9B7DpeOl5yfPsB33n9g3zdh5/l1XLCvRw5Os/cuvOQ5fkJCwcXKURjuFMsD9cji9Jw0MzxbYtI/WX6WYu4giy49RkZongKDf1ICJHV2YoYJYHy4GCPg/09utYKEJ6DBH1QuHzxkHnTkkPEW8fefJ+nn3madv8AnOHhwzssV6fkEliu1iyHyOn5wHI10jRzDvcv8sSlC1xcNOy5zMwHWm/ZDBk3mzMWODo742zoxbg0w7AZRearXpNCgZd7JxWVYxkj/o3WCXspinVr283oupmEdTQN3hlyElaR0wIgRZFlJOPIVrzjYq6edxLG4YoMBqxxOtkzJCNFc8xJ1zp1OdAp3+T5hNLp9d6vHpHGVglvoe3mSl03pJg0JTRjraNtOjCOGMQIuPqmpCjADM4yWkO3mONywuXIxUUHw4bUr/iRD4lf1A+Uq/J+1e8tYRhTYj303Dt6wM07dzl6dApFfJSGIUzPjkS4y38ipXA41zGfH9AuDmgvXKGZzenaBm8yaVixPH3E+fEj+vWSPKV0i//fGBNDSFjXMtubszhc0IcVrbHcePIqlw8OiGGAktk72OOjH/2t/P4/8B9w//49fuLv/l0+90u/wub0nLgZiSkRraN4L0WRFgNVZhJTBtfg2wWLwytcuvoU84PLJJxISDGEMUgjnHd9vqpXl0pDazGq62YdlHzygwKmhBAIUe7H1GSsa2jbjr2uY763z3yxx9PPvJ8Lh5fwTYvzDW07o+lmjMut8fdUFO0YOeftykeVcohnmEobc6SPEZuT+FwUkaimqNI7t7MO++3+0IeB9bAh5YLzLTGNDMPAMES1xajUh+2+8Lmjc/7Y/8fdm8damub3XZ9neZez3KWquqq6unqZ6e6Z7vEsDrbjeIlnrAgcBQFKhKII4iAQUoQQIoCEhAJCKCCEFMlCRCARC0MCSAHFIpGRjIUZL4rBnnHG24xnenqrrq79buee5d2ejT9+z/ue257x2ErC+kql6r731rnnvMvz/H7f33f5pa/mNV7Ki0IrysLyt++f8D+fNRhrp8CYn3qwoSgq3jhe8uM//xt80xVUi0PZb/JUF2A2P2AWKz5RC0v7X/nV90jqHiF4ZkZz8/iYtut5uVb81Nc/5L3GcbRcYouaX9tZfviLH0oIiJKC7fUjYe4+mN0F4K89gr/26Nl0HlNKLG99jLHY//P/7k9S2ZK4WTPbnfCpGzXF7oRH770D5Yzi+Drv3L/Hu/ff5b999VunmW/87D1U3qt+5WTLD9/8h2eL/5a74oOUp+WjzcXYHMbc+EwJ3YmcHmwyayblwj5M94DUK/JH5XpQKY0tSg4OD7Gl2BbojaUc+skAets0KNUyMmK3m0uCDxjXMwJ097/0izypZxRliS00nWvoug2r1TM+/OBtLjdn4hVpNUkbSl1Mn/fAXCP0PZtnjrLfUlclRhUoILYdg9+zXl3bEPqWwSR2uwtS6njh2pIDIu+//x7vPnzMdv4canY4AXTWHhCjsAVmVYVaWLR22E6RhkrOVQbodF77U27iRoDuuedu4sNA8AMQJheBGEQSLoy0AdBUZUk9JjAH8YgM3hGd/Psx6MEo2V+kPsny6GxtIlWxFPfCsrMSKOETQ3A0bTOxLY0VW5UEOfRhAOfABGxRSwBIiFL3KpEXGmNQpkIValr7TJZtpcxYGgObfIxsdw27VhpwkxuOjS24dnhEMSR2u4am32HrEkWV/ZyX7No4JXU2F08oosIPiVRplDWo3tFu1kRvOXnyiO36gt2mw7AQ354kTLiYclKhstTzQ8o6sW12xCmRVZ4GpRR3X3qJL/zoj3D37gvUsxlf+vLf5933PyCmIAzGwmBLS9/3lNWcGBWLxSFaKdpdR12vSClxfHxEXdcYYyUcLIcQiA9tRJmSW7df4lOfeJnu/Cld5zDFHJUsu82WwQeiFi9U5wO7Zo31iePjQ6qDa8xmNdrI3h/DkIEFkZ6rFKbaX6VRfSBrVkwis3Xe0RHQOmCDIviYveMg+EjT9xTBUXjP6skpv/mlr/C1J+9z44UXWB7eoOl7Ltcbds0u2yzIua7KBVVZcnx0yPO3bhEjnJ+es1qtePz4VM63sfTastUGryxGzzCzCkyB0tIDhgxOjwmX4r052i2Ix913OkZA6WqfNGEHmbwQpFnAGEs1m1PNZtiqQhmNXa9Jq0sJrUKYT1oL47UoCiwGW2psZUElfHLCQC2LfQp1ZseOvY4AI0zN82gnJOBGXqdzHZGuMOfyYp2xAUWRyR4SMCdyvF5D9CKjjFEklzH3HiEmvAvEXUO52VDUNYdLxXy5RGktYRdNIz/vLcoUVFVFVZToQnrXFCI55H4a5PRdi9aKdhdxQ08KA2noKZUwjt6FyAAAIABJREFUNJu+I4aEj0osZLQMJggCkqYwMqpGsE3OUchfH324p1OQ9vtw/Ahha/x6yOmcEefc9JpKZRuhEPHJC+tTAVbtPddzLT2d6pGyqZjqFsiSw5ggBmE6Tkw16SmUNdiyorSJwUvYVNLiy2ysBaPRnYDpKdOU9lZBTDY8Exg2gZAhg2VSQ8rnMlkWmy0RRtAmIbZMxkzWVQKUMjHwYoyyb1z5zBJG9lEgdHxfY4CF1ppoyWD61T8Cfn6Ld18GwpTe3/9yKwdQWuTIRZGl2rKHqKgnr11SVpFrjTIWZcSDTiVDcvI5MYmiNBS2QitLmu4hwx6n3XuNC+huZGYdE24YaJqWXbOl6RvwA2EmtWDAYRF7g5StigT4tzAEaDa07ZaNa1kFzyZ6tni2Qe6VbYR1TGxT4jImdj4y+CQMS2tIRkBKo2WGTgjSZ1rx27RXhqYjTCf3/l4iP53xEUxGQOs/QOH6BwB0+YKP9kwyKdnD1UI1zpM5JZREFwe8G/DRUc1EH60z7TXzVkl5cyq0nRbJmM1Ry6JgsVhQz2ZorSWZdRhQSlPPKhbzOVUtC+x6vSJFTz/0zKqK+WyG8xV91wvtG6FcmmwgG9F4NAWKXom5sQA+uYHKD4UP4iNFYWVzQIzZJUjCoJWRjbYohK6bgSBi9nTReYKcIz0FdR8nNEz/RijV0hRoJQl9IY7xzOJdhkJ8yq7G8SrFd18/4F//7Mf4ozePKLXmpBv4hUfn/Bdf+4AnjRS802J5ZTo+3iy53b166+SXlgc2qchf+vQrfOHOdV4/nPPrJ5f8S7/8VcZE0o+8/pUhyJ9+5Rb/2ne9zM265K3LHX/5S9/kty82jLfuZ48X/Eff9wneOFrwrB34id+5x9+59/Qjv3/0Fxi9lYTWHHMQovjzGHNlsbuCVO/NLsf3lPYLkpKJAsqQkFrH+5y+UxZoU0ISObYLgYP5QoDp/CCrcTE2spiFJP4JWoMtxLw2MYKjOiOhY6KfTFa0VhwXC6qDOYPdoldr0mVmUVp5TozSVIWlCJaitFR1CUTR3+e2XimdAQYY/aY0SWLfUfK30kwyunERJ3sZ6Zy0NZ7zzHxNMeKHgbZp0bOFSGe8+KDEBIN3DL3DeYcPAaVClm97vFe4FDEENNA7R4rQusB219B5hy5KYlQMzkkjL7nUGVjbp3KJzADQBmtL8Y9E0oJdEup5DI7OSxiId4OEXuQmVe4Vm0F5SEaRlJnSt1JIIvPQmiJ7TqIUDvG+Czn9cEySVUqRXMhN75iaKmAfiMxeZXmbAPwRHbMnX4iyGSVQ2ubn3iDei7IRaT1OJo2k2i0XDGVJdXjIjcMDjuYVB4Vi/ewJyrUQ7gHw5378x7PsHrRR2FlNRLFar/jq736VX/3yl/nN3/wqpycXNK0kFYqcAGlcM006oYhJ1kpjK8pyjtZWJI7Ok3TCD07kFm6g0AKSqSS+mwmyrEsandJYdPBUQF0W2MLiVKT1A94PeB155513+Pmf+3nWl5c8uveQ0Ee0KjCFJuAlGKgwYoB7taiIiUjA52HQ0Pf0bYcuGpKyBHQ+p1K1xP2umN/pXgaotcoSNrUHOa4uiUHuU2HhBLSxzOqSw2vXufvSSxwcHFHPlzTbLc2uYfB+GmSo3BBrdVWGKlYNipinoXGaAIvsLQ/AlOwpo7dv1AKqCRDM5EMUMoPBmo8CdO3QMwRJkjRaEVO3D2NQ3ybcQF1ZRTNwE2ImBuXzqYzN7yFJAJTf+5ZNxer+RMvXFRmAGr8la6cKAgz4lDJLIf+M1jKQMJkvmJGmePX6Q56+/571XX7jVCgpYHO+olGR1K5otw/x124yqzsen77LyaqjOLzFg5MT7j/8AF595feeFYiDrH3AX/ryPUqrOTw6Yn5wgClnRF3w05k492fulxiTgZB8HnVuFkcmgTGjb8wYEgFj8tw42JFbYL82j+EmAi7vP6dIj7IZdhxP/37wFpPC+4h2nlRVVLMZdV2zPDzEB4cbHE3TiO/ksGcP7nYb8bA1+/PanryHq2fYspbhgevZ7C45OXtGt9lSKENpKw4PrlGVM9pm/3rL6y+yOjthCA4XDEXUtG0j9ZF3DO1u+lnfnDFsFf0ucnn6kKVykBzvfPPrfPD4nF055/k3X2Rh99YbNSUmBawKpCKirIPYkFJ/JchB1gI1qRIQoIHxtpM9BSXPpTx3Ae8Ght6xa1rariPGSF3XRL+kLCW8JoZA9APRu6wCyXtSSnQx5mHoyMSJE4MuZr+zPAOU+0MrghO2ALdeBODi4pJ6PgMU/eDohi77VSpm9UwM4Z2jUJbgnexdRlhiZVXtn+0o7LQQgoRU5SGumLV7fPR5CC2hQlVZgipwzrHdbSnqgqP5Au8CyXm6zTmPdmt4Uz7axWXPrnO0Thh+w8PH7NYXrC+2WLPg9NkJfbvCu8jh4XwattaLij5k/yFrqWcls7mmWF3QtU5qsqERyTGB9bbh/oOHRBQvvfQS1288x2yxYHh2RsKizH6P1bogBtDKUhaWWT1nNluglOLo6BqQWF9ueFo+Y1yIVb4/ZvMlL73yKi+98nF2sxm7bcPl7hHPnpywPjmnKgqUjSyWh1ArTroN22aDsYqe7LPc7Ygh0vhI0OKprMdmX2UvozzUU3l/SilNsvVkVfbQTbjBT6FAvhu43G451GCM5nh5zO3nnufpsOb4+jWu3Xieuy8tOTm55Oz8gmFoM/PVYa3i/OyM1eqc3zK/xWJ+wK5pafoeXQhz0vUDnXK0WuNVQZFD5kwlAQojqGaMeDCquE/0jHFkoP0BDLpvc4zPgchPx17RYq2mKKyEyfmB4BJ91wlTOPvgjVJJWxgJ0EgBZVJmwCZSiLk/yD2MGgkI4y8f19vRSe9b+4mPvFfIG2T29syEhcoULGZzCq0I0QGR6MCr/X4lYVWaQJBaPA8jh5S43O6ISrHebjiaLWQolyQIsJwpFhpiUVIvl5iylCRSpSRZPgf6CjhoJT0ySliWqSqgIBYW5T0BxbppSF5q+9YHgrUMKWJ1IfLKECaADsZaRsqxEILUIdP+O7LqRmLJpD2DXNMnrXOPGzHesU8OzQmYMQhAHPK1sCIJ9jmIjbS/x8TmY+85N9Z2U7mnQPlEMjGTh6RfKoylqirp29tAdCPTzlCVhqANyvUk10vtGLNHXdx7zY0++PJ79vfQXkitkOAOYV4rlUDr7CHa4ZUAk9aI/N1nb9CRmSf9jwyKhK0p3rEjSHf1z+hpOWIFElYiXwtxXx9N1id8tL6AkViyl8xOll1JsIuiyMO9LLUljAza/Fq5uBwTaydwNvfKSmdCkpFrFXO4nHMRHxzeO0zwhOjlvEpDAdmTPnrP4DravqHpthhafDrOkvselM/3pBPpuRIf0pQcwa1ZdZeswo619qxsZKUSay/9eqMSWwVNiqxjYpfr0phAFVI7hMyMNUhvX2qFsjozaFXGgEaUlGldCSGOjgRynvcLDWgtg6nvcHxngM6aKze/LEZJC4yoRuqnteiioCgUJliSV/ROpDm2sFnWKtPUkFNb5UFXFNZmFpTJBajIMZaLBbP5nN4Ne9N4lc2+rc0MJjP9rRVT4iwhJ3k6Jyy/KO1ZymCZ3LTCqAlhRLz3DcUIDKQQUYYs6QWrFYXSkmCrDRiDKcaHkRwlPF6W8avTksFepS1HkKdkr8FX2ZBeRjCyaGfDQRIos9/sfuTOdf7rH/0sP/WNB/x7X3qLx83A7XnFP/+JF/iB29f4O+8/mW4Q2G+mIxyn4COLqlWK/avvb7D7247/7Gv3+eO3j3n1YCavkkFDYCosyHTw733ukP/ge17nX/2Vr/FrJ5f8hddf4G/86Of4kZ/5VXYhcVAY/sYXPstPvvWAP/u//Sbf/9wRf/1HPsP9bcdXTtf5XepMrFP53kh4Hz+yMAuQr3/PwvNRkG7UwU/fz+dVpgTymDifQFmKsqCuaqpKpu3BOWLylHVNjEIpT0HuIaUSPgibxSc5ozovpn6QKam1klIce8UQIkPfEWPA5y7qqJqzODrmg8YRC6E0xxDEHzEj8lbrzMiT6ZuAViKT/lY/gf3Ub7KCS8hkUGVz/ZEKnQtBmTDIxCp4TzIWawxWG0pbiOGsUoTo6fqepmmJePphEEaRD3njEYaiJAkZYf10HS4luqjwybHtB5q2xWlFaYvxDUzXb9+45pCHqAUYSuIK5AYnE8Yg6Tita4ltmzcuNcVdxyDMhb0prZhDR1KeggiwlnKijgoRq2Qd0VoTM4POJZFsaJL4CBkx/fZ5iGBsgSmK/PmlgEQJu9YmAX9j8Kg+YLJrqsr+EMZYAY0iCBhXURQl5IlSWVbcufMCt155BXXjFrGuuHPrOs8fzzDOc+8bX6O7OIWv3QPgjc9+Fm0Vw9Cx267xGmEcVZpPvvlJfAxYW/KNb7zDvXsPSU2H0oZhcPI85NtFKP4GbQq0tgJuRkkSjkEkLq7r6Lue4ByFSlhGRptsTOK5JNPJypbErqEuNctZjbEaR2BQkd4PNKuW1fmKr3/1GxTK4AeHwsr01Bi0rQRPt5l6LwuArNEhm7T7mAsaJoA65ARLqRlywZcfDpWQZ8EHUpQmMikFJgqNXxZGRi9RgFJGg/hcAKkUKLViWZcczucs5zO0NUQ30DtPPzja3ZahbcTvh71/6gi+jUUsGbBXuXD32dh9XFtlHbPZ10KSvUlaWAJFKT5YEiTMFSyFzjuGEBiGiAoiZ03JoTB54JEm5ud0pP0+sd+9QBcVZT1H25KQEv0w0Le9+F35wDW1X2NHMG0sUn/idx9x6YLId/MRIQe6CUN8CJExiRHkecfafG9KYReDmyQg4+GHfmoI0+gnpqbRkTyPSnHx6D4xdCwqR8EKZWcYM9DEcx6vTkntjsYFovn2zWQYGpxS2WxZSrT1rqUJcHS95uDaIbAG4ODoGKtAZ+miSEf2he9VVvtHwbl9GvZ4Mfb7c7rSuMtXxoZmBPbGMzsOkSafOl0QoqLve1IGo+vZnOWRk3R479lutzS7HX2/Z7EdHy0pjKJUe5BtdfEIZUu0KYUNYAxNu2N1eopveyyW2sy4e+slrl17jpPT1fRvD154kZaEdh1mVhHxdGEQz+DkIO5/dxzWuEbLGtFs0Nbw3oOH9Jf3eLRpKe++gjt7wnNq74Gq1pcsSgvG42cRjSNud+AdtliQkgYXMwMxJ5SnIEPWfPpWl+f0/Q7nOsjBYX7o6XY7SVsdBmHqxMjGWFZliTVGyvIQYJK2gr4yFBv9kfIXpjWIaQ3YX1ulZGCUUqQf3PT5np2eUdWVPH99jwsu1/dSQ1dlBSlilGboe8LgIUUJ8CpsHhQg62KQ9XFkU2gT9kwIbdC2wJYlEuZWY8o5O7dlpzQzM+OyUzSXa5TvodowJAvcAuC3vv4BF7tzts0l1io2l2f4ruXmjeeZLw7ksynPjdvXefGFF9k2jvVmw9GNQ3RxSNNGLi4CQ4gsigW371xns1rT9w1po4hhICTHg4dPWa3XXLt2zKc//Wlee+11Xnr5Y5xdbBl8oqoXXL9+i8PlNbrW0/cDTdPRqsR22zAMsmdrZej7jrOzC9arFYvFDG3EE0zktorz1Zb3P3jEK88dU1WHXPzGN3l2domJCRMTOsFiNkfrkra5RHUDPkbWZ2dcnJ+zsHn4VtTMDq/jKaf1amKfJUhXBl0pg77Oe5x0syRgt92xPjtj8/gJJw8e8uj0GS/ETxD6Htc5DpcHXLt+nXqxwBYFs/mC4xuWcj6n2a3ZrC9omxaiZggO3zv6fmA274Rle/2YcnnI5XpL04jsP8SESQarS3RmsycNUWX2ee5ZRoDABU/bd3If+sAVWsF3PMYnQWwc9VRXaCXqpUSgGxouVhdZHQO7rss1YaSwpQxHAVTEewm88ylQhAKtEQDaKKrKEnzM3q9XUtCvkk+mvWRaZZhCIq70SAqFzpZP5PvBak1ZFNSFYXBx37MQx9J32re0kbCeEdCJRHbDQL+65FLBSUoyQE554K8MvfeEmKDtKJUFq9G6mIaC1lgZLieFRVEZy/xgwXxZY4xmu7lgu7rAh8iu70k+EKOi8x6KUkISynGtujooGpex/Zo2gjnj16UXyXUOacIi9ktg/l6+b5wXRYcycdrXTN43x5o8JmSQl+XhsJd26qw80FliqnLtJqy7BDFgYmY4GSPMxjGoIQrbMmVGqjKBZBUpJKLz+N6hSqlVYn7PoxyUDKJN8tPpPtEonXJ/Kj8j7iVq+vzOueztq/YhHc7lNVjus5HxF4LLtUOR72+TQwYixoyecdKXJp3BX5BaEjIbd/9nX6eNt7r6aM1wFfxjTxgpslrJaBk2EoW9T5JMAmGaJ9SYgDDeH0ZqOKWSDL9UlNCZLJGV20E8WV0YcK6n9wFci3MDfejxdSWWaL6j73d0/YZa74jJk/BExqCiQOd2VNpiCgWxAzxbt+a8u+DCbVirga1NbIxhh8X1kd5qdoWhidDGQEfEQw69iHgfGXTEKxn7am0wyoLSEkgZUxZNjudM6llRbYrqi7E2ngbwOvfXV6vabz2+I0D3V169xvcf1N/xBf6vOQYYJ7o2/yEBPbQ9tFd+dFHLn/EorwQd/EHHrUPg9j+KN/x/y/GF98Tj56/+sTf44oNT/odvPmShNa8v5fP/zLvy/b/4xl3+/Bsvcmde0YXIrzw+5z//nXtT2t3f+rHv4Wc/eMY/dvOIN64t+atfeYcvPjwD4IYCl30X/vcHpwD8sesHlEpxx37n7fZffP0F/t6jM94/33LLGH7u/af8y598kX/htTv83P0T/tTLt3Ax8bPvPeGFwvLgcsevPD7jL75xl/9kJxf1v/vel/4Rn7V/kONqMEYBiwL4aCjCN9qW//78XCYOU6iJR6kkQE0hZp59biSLopg8hXRQpN7JtMNqSf4JSRKzrIUgLLbSVOTtYZJHpFysyTOvp40qhJjjqOMV08qxcWYqQmQBT6gYJ2ZQyDJkYyylLajKirqSIl0mQBEfPNpAVVegS0JyDL7H2oKqFHCzLLSwNbyADdpUkNI0fQpomRQhEmUx98xMo8wwUVphkELFGkvb+8lrZHCekEbJQQZllbAGE3mKCZP/iBgMZ/aENTkdcdQuJZLzMhHJxY1XYv4b0SQdUSFkYCSDbtnM3xiLLkqRnCgFygpTpqyoqpn4ZHiPKx2llaZca4M1oxeRmONqrSmqmvlsjlKWwlpuP/8Cn/vcZ3n1029QPH+bJ+sNy7rkxsIQdw3N2UMeXT6Z7sNf/7VfpLIKUyjWmw0ueg6ORM62WW+xVvPxj79CWVaEpPng3od0vZvuhZSSAD26oLA1s3pJVc0w1uLSCOnn850loUz/jRRMIYGOGGVQppACUSli74RNpyQh0CNTZK8VQ++wXtFc7pjZklFR7GJElxVDkGsjPCqyAbCMOEZp3ugDo7Mk3VqDRljPSWlpRvNalrL8hphBEUavjAyeBS+FeUrSuI+H77PSOQGGoAY6pWg2M7aXF8TgqWcLFgdHEBztbsNusya6XoY7KntUyZsQaX/2sRLWDnItYiAFSX6Sn9UkZQA9DZXIgChamCcxg1w+ROwVI4Oma2najrZzKKcJPtL3Y1R9xHmHMWbyrPvuL74l13G0HUgCuB4sbvInfuxP8fl/8h/Hl4mvvfU7vP31r3P24Alh2xO94X96VQDF7393DqagVJY6KBZlyS+WiTtHDcdDxy99XECYH3x7SR09fntJInL7+ZvcOKj464ey//yJ3274wS98gfnxTYI+ppzd5tHjUx4+eIgPkb+7/EUA/uTJD03MSud6WQdGxoVW1GVBpRTd6TPu3r3Dqx9/npuzgTvPWXan9wna0hEZQgtFgal/n70tRsbHICEg/dC0+LYnlUvqo32xa2wpuYhBgBprxf+K31MEj0XyKO+5Gv4wHlflK+PfwMTAm5oSrg7buPI6Kku3hJkbYpAiUitKBJxKKTJbLgjO5eZUQLU33vwkRkGhA2OxtWrEsqQsQ/bm0bTtjl3TkEjUdU1VlhRWc7BYUBQVcAJAp3rsQcmtoxsc1SXNxTltv8HHHo1lXu6Dc5Qu8FHRDzA4y0WbuDw9x/WBvgbbPGXzza9w/OAxfEb+zere15gfXsfPImnuWWhQXqGjkmYJ8fgh+wOpJAEZMfipAr5YnbBr1gx9QwgOUhAZaT43AvjLeukG6Ju89ihpMFQUUE+M9dk3odrujb/z0AnGdSxm+f1eqi5G/HpiZwDsuoZm6CQVMLhsVp6bzBQ5Ojjg+rVj2l0jVhReGjEXIgQzAdcqJUbSoELYW3iyz55hvlxI6nhZ5mEbPF3tBPBPGhMTzXrNbrViZhSlLjg83nsw7pqey03Lg8dP8T6vfzFgdUNZHHPz1m3K7UBVFdhaEZue1faMnb+kmpcYe4ytj2i3HeeXnrt3bvHczUPOz54So6Jt1lhdcrCsOTpa8MLdOzz//F2KcsbLr3ycdkicnq0o6wU3rt/i6OA6KWiePnlG0zYELzKpoXfYQtgs8/mCxXzBs6dP6PuOg8MlSskgcbvrePvdD2ibltfufh/bfsPZaoOyJbPC0O22FCYPSGzB8vCQYiZp637XEd3ArtmSgrDn+gDBzuiipkCjiloG85nNLSYXEsgzDmN9MhLeoBVd1/Po/odUm57H752yTq3IgFvFg/sP6NsepRTb3Y6z85YhPKJ3wj6KwdF3W2JoOVjOqE2BxpCcYogiLy7qkqrUVCFBMWNeltD3hGTQ1MRC47TUSONNJMPlPNRSiWEY2G57mqaR++rbr6rfeqQ9mCMqmlzD6piZMELu2DVr2qHP1g0e72P2+BPntBA8zvWStN33RKAYSpSBkCLWilWSz2BFHAGjmLkQ6UrtzFh3yPMi2qI98WL/dWnmMwFXggWSXMvSaDzCqra5DvQ+0g+DSPvYD2uSls8eQpKQhxTonKc0ksYpHmiWIXicNgwqsdQGRumgljTXZGDQ4vU+AlJ1WXHt+JiitPjQsV6t8MELKJiiqImCpEInPdZFe9BmHNlN7zXtmd17cEduiv1+JKPfCcC6cm5VBru8d5MX3dQHmGxFkH2/9gOM7N9+Zb+TYUNWKrDfX1MSZYJJUKhRzWSxSkOIhH7AhV4Yxc4Te0dKAZ+8pB53PcEN0rflD6eyZ6EyI3Epn5NxbQcJ41DiXyasf52pMtlDjyTeiplo4XIa/HjfxfG+JLMss6pCIUMDcipqjIZRveS8ByPWIqgrSscQPgLQjb/nWwkeVwes+2MkDqmYAcsEUWe1Rr72Oom9QvQRnyI6RUYpb1QSuJR0Enm9N/hgidGhsfl8gjYpD5rl2VWDI3Y7um5HH3tJbQ49g2tpmjXb5pKq2OJDi9KeRJdJET39sKGoZ6QQaFcbmieXPH3yAc9WT1jtVmz6HTsGeg1OK7oEA9lnTieCUgQpf8XKzDnpIUMkqnGFVlO9nLywmlVW60wM3CvPzDg8Rqk9oPv7XIffe3xHgO7f/vmvfMd//P+H4/9roQQvLmteXM74id987/f9mZ0P/Idf/iYfbFpeWNT8xz/wJn/hjRf5yd+9P/3MP/Wx2/zlX/0Gb1/uKP8gIfQf8njtcMH/cv/ZR7729uWO148W/BwnvHY05+3V9iPf/+Zqx4+9/A+aBvn/3PHmbEZViwcdKWbfFpmOGmMpipKmEx8RQ6IuC8H63QBtYEOkaxuS9/lhzy5EYxqV9yRVEEcTYWJOyEFStPKmEGOaDKNH8+iQDXbHBVkk1+NUUomEZzQpV9L0TcyOFBm8ox8GdNfiVBBj7CTee6U2+OiJSQyVQwj0fU/TdthksFGCWUxOjjIabBGwg8NF8WGLSRG8F+PfJB6WNvt1JCIuOIaQsPUcZSzlYoFD0wzrzOQT3zNJX0tclSGkDDx6pVExZSAjs4PilcloAt+PvoFOPEGixLDLGFcJuKJy+pTRjDJ5FRLJBbyPUmAlkRDUVSQkLYzhFAl9IKtis1G/oqpqqmqGNQVhcPikcAFm9YJrN57j46++xiffeJNXPn6HC9/zuDmh6xQnq8D6yUPuf/O3efjuW9Pn/dt/66dYzko++cnXODw4oKxLiiVcX97GO8u7mwuazYbbN2/yqTcCza7l4aPHwkxKVySTFmw2EVdaEsciwpSRNO+0l00ojQseOzLwciGhjJboc1L2fosk52l3O4auoVVB/AB9wHc9M10ImJEibdOBLWQT1J6QEjp4Ci8AnLBP8rUer1VIQKRzjrYfoOhJBHwClMYFMdQdWeDRh6kAEkK1FPTB5Smu3EH5eZOt8fz0FB+yRbaxuRm7pGlbVhcrynpGvVgwWyzpekfXDTx9dkLX7jBhwCSfmRIA+2YsxpAbAmH0xWwgzljkokSumzTEgApeGl4ANBEnZtSIkT26nO6J3XbLxcUF280O7Qx9L8/zaLLedd3kiQXQ7LoJ/FRKLM4SltnRgnunO87/17/Hk4vHPDt9QBxaah850JXI8/JRFQUuZvAZ6F1ExyQ+j2FfhKTBQN9RB0VvLIPX6Cvv3bdwdrIh6AWLxZKDReKahUs8TddKojTQnTzOrISUWcV5Ehs9VVUyOzrk2sEBH3vpFb7/B76HT3ziZQ4qqOj43a9Eyvo69aIhoumdyzPTbz0KYyD6fI/LM+xiYoiR3rl9swpYa7FyV4mXqRE7BWP3Vh9jsTz6GAF5bc73XtpLZMafCCHgtNw7xpoM/I3NjCZFu29a8j+KSUykFYp6Vu8B6nyegheQvq5qkTUaDU4Aur9566N7NMAvf/7Vb3N2bgHf7usfDQv5hTcBjvP/BXjlCPj2afS//IU3vu3Xv/Vopv96du8r6NnzdAtFcbPkxeNbXFMzYZkaRTIyBPLZnkShMcbh3f7alZUmRWEI9H3A9QOlUdTlDNd3ouYY/XSRhmJksQm4ErNslGmAkRAf23GYMF6XkX4ySt1MZv7ElDIYMzKPVgzjAAAgAElEQVRM5HBXwkuUEjuIlEbLDM3N55/jc5/9DO+/+z6bzaXIXIOnrue0Lntwju8tN10gTYgMnWBezbh27QbzxRLnPbuuZ9s4Lnc96IRWEWN6wtBSzwquHd3gaHHE0cF1RmA3hIEQEr1LtH1gXpccHx5z69YLvPbaJ5gtEr/91R2PHj8gJsPlpmXXbwl9ohxmLBYzbt9+jmoBze6S5eKY5DpigGvHN6irkpg8N28eU88sVbWg6z3vv3+fGBN1WXP92g1i1Ow2DSpYSlPRbLecX5xjrcYNPW4YiEHTth0vv/wSBwdLtruNADsxYQuDLSo65yCnLVf1jJOTc4qyIpQO1++ISpJ9+2FA2xJrDaUuMRruHBxxsJgT2oaLs3NOzjfsNhu6uGNIhnlMlHOx2Ui5hhxDIuSY3CdxIVCpkhAiH374iLN3H7B62lDfPODk2Rn0F7z19tust8I4lf1aE5xjvd7Sti1KRarKUM9qirLAh0A3dAzBoZIlKnEgisjzkqwoDrQXp2KFyYOKxKinV1qTtJ9CABWyx7ZNT9sPk6VCHoHta6/p1k75UzIpwyIQk8/KjoDXPrPARUsVo8f5gZgEeJRAiz3w4AY3PWvOe1Aa5T0qJnwGIqML8hxPLLk0MX7GujrFLNPMAT06yaBsWjHyz3gnfmk6W8tI7ZPrcZUwRkBxoxRVId5xCi3syMGLjyGQH07IcI68JS2sRSVSuRSkrkbtmdmiIJAhTMpri9bZG8sa5rMZOklC8Uhkl3ChKLJaJRJqxvCDzBIak66vAgly6XMKfGZAOeem7wtxKubTM2rfRGYZQxDr1My4Gz/tOJjX2WoqJlFMqeBQIYAxYvGk9kP80SNvHG6MDGGV+6KY9uGLRa5fQ8rXxQW8D8KgDBHvHcIuH2sxqccKpbBliU+BFDOwohQYNQWljHdPTOI/PYUBadBKZMlFUZBimGwORoYf+f/HYbN8rhwyMTE72TMCCQQFCS9MTRXxHkljTwGNlWG2kmAKVF7fM2HjKmnj6nEVIPrI9zMjLMUo/m9XngmbQ1VIoJLG57CSmNLkMy82WSpLbhPKRHxdUFWaWZhTUktNbEbsKqGiJwSxTwreMYRBJOAEVPL4vme7a7jcrilnl7RuQ2ECKezoVWRot+x2GwwLhiHx8N593v+d91i9+4T26UM2zSWrfscmOhoPXfT4ITC4QB/EEs2TU6p1BtR8BuqTrFajRN2HAC7l+3AfzjEyiRUj207n0JN9H5piGk3q/uEAuvF4u/VMSURZQkSMOfVUpE2TceZYaDItNfl6q/3f+QO88+c+z+d/5td4uOv2KP00rc5SNbWfJk83EldQ+Px791RPYRaNQQgj+p7X7CsnWxbwP/1b76C0YlHPxIQ7yATcOT81eV03MPQ9VVVxcHBAYayAIc4RFahM8/Rh9EOQo6xKitJitJUi2zvxa8jvYUxNUfmmzkZWWGXQpUZbTVSyiHSdMBCOMzvwKxdb3tn102spxRQS8c67T0HJQ/9uN/BfffMh/+zHb3O/F1POAPyP7z3hl8428u/GuFzgJCSepTH2WiiyWxID8PgKRVYpoVKP4SExRSqredD3PHAS0OB85FHT44H3Ok/Umme954M+NzzA+01PaQz3WqGt/3NfuU9wwhZbzGfSxEbxOxo3h/FekEZHEP2iLCgKMZ52XuQoPskpNVau4zB0VHXFcnkgEiQSi9mMWVVCFK1YaS2zqqIoNW23IyGGkFoJ+BBC5N85kom/QTbHNKZbaZ2nMGIMPDiHUZoyeFxyhCLRmQRKpGKqaSm6AYLDp4Q3YjKvXMCickMuEjnZdHJgxnivBXDR45xn6OVv55ycqyBeNyl7XewnfUpGdclkMLEApNAMpCznafCnzyjaGUEF2raVTdNCjJqu8zRNT987SD1Ns8USib1lVmrmZZHTj8TLpggBYw3Gpyy7NYSk2fYNLoi3RGE0MXgGP9APDpcSx/Wcg8WcWVGjyi2d9wwhECLCzCrK3CCNja8jqZgboURdCIinjJgAp7yBoxB5i+oIPt+LKu4XT7mioKOwswphWym1L4ZiSpSVIaQcRFKW1POa2byiKHMS39BDjAxuoMum7A4DZQ3K0IfI5fklpW25+ZxG24Lja0csl3O6zZr1s3vYs8eUy1r89s4fcPnkA2KznqRZHz57iiVS1RU/9H3fy/Vrh1SloTSJF24ccn9Rsn7WEz3cPj7iuz7xOn3b8fDpU5kWkhlCOhJ0xOFJwUGEuhAWZVUUlEWNy6boysgUrDAa7wYSipASXd+TbKSoFb0bCBrC4ODCMxDpCXsvsRAJJqBDwmnxolAx0AfPsGvBaKwisw5HcHm/dvqkIBWkqOibgDMdVW/zRFjYQyF7Wozs0b1viawhCWG0SpjE6C+YJdAcAnDR9FOKpjYh71Ge1gWenV2IIa6VaaBzHqWMsE0V4HtU9B9pjPfT5rT3ChvtGXIRKICiAOEh6dy8eVTyMr/LHjkxyX4pwRWzaX+8vLxkt9vQ9TtC4xg6R0gKYwppaFwkXpFUzusjMf+3hq5r8H7AFjOitTx49hR//pTV9hRtPPPS0rmAUg7Xe0a2cWXKaZ1UOuG9F5PuVFJEgE7WTFUQMERlaP1AGQac3TOoXNI8fHjK6Wrg7guaW8d3qAuFxuHdHjjqdqfS2AbxaSm0QqVIYeDFmy/x5idf5tWPv8JnPvsp3vjUKxzMy7z+RS5u3+H1l16jax0XbcPFek3T7WWWV4/lYsHQt3gvkl4JgpKBhtEJlfZSxEJHCq1Bi7WHtgXKSCBMWZUi8bvKnpRafpJwx/F7CLg3AuVd19O1UQYaSj5jUVhsNjtX0/OxZ4DEBL0L+JhDXFLEO5f9VDWR4kqYlKUsK95JR7zuL7/tefh/6/HLp1uR0xYNQ22om5plE1gub1LUlfgFa4sppWGNSaGSorAVvmshY18zrSmrktootijakKg1zK2mjVK7xeim4C4FEJEhQWZr6L3PfLaTkCZQjtHjkn0BOtWoceQcMLJ2rgJ0IUZKbVAx5kCkIGFGWiSShzdu8MbnPkcXEu++dx/jFFaJLErK6r2k1VhhbabgpyFBXVQcHR9xdHRMUZRsNg2u3dF3A7YoJEwoBLquQSfHwcEh1587prYzdt3eP/DZ6VO2/cBiMUdZRQoDRzeO+cx3v8HzN2/w7OlD1hc71mcNrjtj5zzF8ogbxzdIyUjKdL8iBQ2ho9SR4+MFmxOLT4GqKEAXDEPg7OyMd995jxQjhbXM53OWB4dcu34TbSy7rmM7nNM3HSdPn3K5XlPXJc47XL+lTwlrEsZoXnvtVZ6dPuPho4f0IeJwaO8Z+h2lH1C762zPTrEpMi8NbXQ0XcesnqGrCjsrsIUiZcBXa7hx44gXbj9PHDxP6yVKPeFis6Ff74hDIBhFJDIMHYEMjkRhcqUoEqiApovS6BvnaTeJDzcbTNthKDCnPW/9zls0u4H3Tx7Q6cBivmBIiqJU1KZCDT3b0Od+I2FTwKZIZUuMqnKtKEMDE32+vzMbGek9AqCUJSWpfSaAQitCkv3VRmkmyyFRWEPXidqBrC9Jatx3BSwmujy0EsVC0KBMIlUBXQwUSaO8QRUW7xPBixdqjAqZZQUGP2SfbiXBSTFKAJaW1M2yMPgQp6AWnYKoImIGYLTCqNGYPz/HSqEzE0725wy0q7HJEpQu5dTtlBBJZBJ5XyDRese62dIZmBVabJZCnOxqKmuYlRYVJdRtGseoEXzLOKhS+CTvYaybYopEPVpMWUxZYWwJWdZKELahhK3K3ylDpCEGVBjDCBWFKSffvKQSuhQfvEQiFZo+yf0w/oyY+zNZOSklfc5ot6Ans63J9Z+EhHeQhO2vY8yMM5EAapv9Io1FeYgYUtREH6X+RmoKrXRONpbXiijGAANlpAaX4C8Z2qrRK90aXMpgb4gMETxavKCDQSVhuhujQMtAVxswZf6dWSoakFDQqNX02iYklN/LbqW/yKBmCrl3C1PIxXhdGUHRKIxpUPJckCB6xkR2DZg88Awmg6gZzFDIwM0nSGbstQw6CTt3yF7YYl07hkRkpde4B5HVIrmHGr3vyHubIMMCQodhVHGJJUm2iGRigI4swCvXS2mNI5JUEiGLr2lnNYsgfW1Qck7RUMRIGjyplPejqhLV15AcoNGDhy6y3gQudo5Z2bBqT5n7AL6Rvr/b0mzWpKElhsjp7pzHqxV9ivSLivVBzUVfsm6j7OmST4hHMSiNUxlzUyqTYBRRm70FWpL7oE9MIHMZIyHJ9YkJgolELUM0lZ+fqMch7B7UTSl+i23Ltzv+UAAdwHdfW/JvfOYV/ujNQyqtedb2fPHhGf/l1x9w3ruMq8mbTvlmVKg9Ksa4vumPNCrjoZTmxqzir3zPq/zQrSP6kPjpe0/5ia/enwrQlD9YyidoZg3/1qdf5k/evc7SWr58esm//xvv8qQbiMD33zjgb37+s+yuxH2/tdrxZ3/ht6eJZfABqzSqihnzFF5CSoGkK/EFE2ME8QINSRByFCGNZtDsP1PcA4h5t4UUUGSzwCwjkilPnnwgr6sTWT5nMCQxmc2P1SiTWmVvkjvzmncu91PkK2MpfuTONf7Nz36M14/mlEbkfmfd8JFr8WAMkojjhqymlxHN/f48k/9OV39PysyZCbiFrQssrd2jySlxWFo+2EpztnWBlxbV/maIkcPCsnF+OmeJyOSqqO1kljlu9gK4jo7Y8t8hgo1GCgANKtNQyal32hhMAhXFX0DFMT0lkNKAQuUQgkhKkgaqKUleigiltchBU9pL0IAwBAHClSIFMdlHifl5cGKserhcMHQDm+RRM8tQi4mx0ZqDpDlC0ypN0BFvCkLSRO8plWJeFCxnYsy92W1pti1N1+F8lBATLcDwCMo557PPQJRpVcwb4biwR5GDGmVRVhO9RNL7EBh8QBUF2shmvesaSgZcnmZqU5BSIcVUsuJv5HtSkYEGJVLJoAxOmRzAMRBUpOt7+mEgxYQ1JdaWRA3Oy4KnjQAc7dDTDJ4uaNDQR7A+omLHru3wMVLWdQb5FFbnaPSo6GPERxiTkIP3UBQUpfi8aWNkghbHgsKiUqLrEqEPUqAYNQF0UtSJX0lVSsq0NGECOoqPlxYj2KKkmM2YHx4xP1hiq5KEpts5whBp2x1eGWIMmLomFSVOG4KGbb+jjhqfklxTFRl8w+reY07v/X3SsCIeHRCNITRr2ssTlPeQSUf1c89zefKU9x485ge/z3J8dIOhCAzBUerE5958jRtH1/nSr3+Ndht4+e7LXG4H+qhYtVt0YahmNfVsTlUvqaols+qQqpxzsDwSgLkqmc1npBRp2h3eD1mOkLJ3VWCz2/Dhh/fZ7gT0d5tWJDFBTeetAjEyTknMyYPoSoK2qKoQpay2lGOTmwJBKQGYtc3gqCbzVTCmJGHxpqBNJcFns2U0JumpJPFhTC8VaevYRCcgGgtVlfNc9n5hoydacXhDTnUGn3JXIYVMniqHlEg+p0ghMhoB20aBbh485YCEMTFr9B9Lae+pMkpGlDKEIJ9D/Ds8CQfJo5XBKAEnNRprFNeOl4A0y5vLS4Lv0Qx0bkPfDxhTkYgYLIvlAQcH16Z17Ad+8POgFf3Q8eTJI9abS2xZY+ZL6kUJGmKoibFHh4jzEVtqvLnCfOuCAFj5OVEJnIuZGrG3nAhBMVDg0XTRY1zDeVtPSZCpsKzXO+JFS7cVye/l5SVPnj1logAAp+cfiMF6TrgttaK2lps3b/LDf+S7+MLn/zgH15bceP4aB4sClaU5u9UZu/NTlmXJcVERdg0+Jtbh25dKVVkQg6d3ElADiWpWMlvMWc5LrNoz7yxepsu2oCgttqjQhaUsJJXeGp0BVqmTjFYURmOzVDdkPx6rNWVZAYqubdnEAF7uBRTUhWF5MKcoS7SR5Dk9poSO01zApcjgg4RPOY/rI65PEOR5ijHhfUBFTWkq/u5zf4SAkkTPZkfwjv9UScrpP/N24PLijC4//97n8IyYcD5w/fpNPvfd38PtO3c5OTnn2bMTfvplkcf+0283DF3HdrNhvbqka5sxP0vk/2i++me+F4B/4v94xGK55NnJCeerlQAI46UxSRhCtkbbUuTrWmHKmqpwVNEzPN3y+HyHPzrj+s3nWNy4TrU8ZHZ4hC4W9EOk2TW4ocVf8XlrVheQU0DTEAhDZOc6OtcTosNnU/A4Pvsxy1mDNLDCvFHCwFZ7Oawi7OlB33JkWC6MNSFAmkDA6UiaAgPeY3wiBYctxU/VpcBqt6ENkRvPv0A5P6BdD/LzUWFtJQBd9gxDJeLkBZRtFkqRB243W2KAvnMM7UDyEVOKnBZjicFRaElNb3eXDKrFD5GR1jqEgaJUEAxFUWPtjHqmefTkPd5568s8ffSU89M1tT3kYH7A0XzOwe0XWB7f4oN7T2guHuGevo8OUCbD+UPPtVfuUNDx+PFThpgo5zNCimy3G/q+Eb9HYD6fkVygNhX1bAne0TYtYeiI/QYdGlzbglaUNtH2PdvtivsfvM/tO7d45WMf4/xyxXq7IaXItmmxwTE3cKIdp3ef5+adFxiaDW27EyZQUVLMZ8wXMxRJgL3sp1loUUHsmh5lCq5fv05ZFOgYOV+todsStaJLGlsv0KWk0idt8c5Ja5+ElVsYhQqedtsTh5YbizlvfOpNdt2Wt7/xPk9Oz1ltWopyTtErdrselQzz+RzrHGboSUQKUyDZpsJ08yGglRGGU0IkZzED/TkgJaXR6l/YMjrJmqswpOQyCO0pvGZmNUsqDkqFDwKEpGgISkkiokoYIlagNqZ6XoEDko44uyHpLSZBSYU3SpguXgCrGDVgsUVB0mIVUhSWpt1JXaE1xEhVl/iQGHoHRaRUFdEkYhgwMVIoJQCKkgFJTMLR00qkoTpBqQxei1/5mHic0RYBSRQUpiSqQdYOZfAJXN/TuxabIgur5XeNTDEtJAerEhqPjmL5ktL4mnsWWyISjHgoj7JKh3imay3Agk/SWSYUtiiJscWFnhA72nZDQmEKCZsbfU6NthDlmZa9RwaZxayiPjyAqqBczrHzSqxcnIcIyopSRJCI3MvrEbzLftAZhNJ5jwtK5IE2hxmlfL2F9Z+BnDxwlP8vmXzcNKBy/5KZcynGbIUjITBKMfVBQ5ZxaiNMQwUiye4dRhoNoi2JtsLoEq16CmtQOu5rtyTAUUpizVOWlmDEHifJzSKewFpj0QJMaw0mB5cFD1FC8rRWxOCy1cSonsh9bhKihZ3Ua/t0Vp0BQJ0UJmmx3iny7w4RFVLGA5KEhxXiDaujRntyzSg+dkYX8swaiGqsSzOXK422SSPzS/rjFIKAziph7IhpyPXeKy/EM5AoYJTWKvO2pDq3eXvzeUilPCSbAziVosrKpqDkc5Uh4fpAqiOqLDH1Eu0T2reyS3YB1SY2a7jYRo4OW862T2nCQOEGoraEoWGzWbErLKowOKs4vnuXcKw4PzplVT+gU9BcXNL0HQOagZ5gLEHpzNVLU1inj4lkCmS4Jp8tpIRTCW9VZrKL6ipqhTcJbwTWMSlBDLgsI08gOITSE8NTav3vbALwhwLoPv/8Mf/NFz7zf3L3ZrGWZed9328Nezjn3LFuVVd1V1VXD+yBZFOyJmqkbERBbFiyZeshsAIldgDFNmAkQBLEcmQhD4kUIIYdwAmQwY6iIDCiyHLiBJHhyDJlirZkhaQ4s9ns6uru6qqu8Y5n2sOa8vCtvc8tmrRoPwU5wO2h6t5zzzl77bW+7//9B/7mG3f5mU99jQerlqcqy59+5Trff3mPv3/nKA8W1IgWkwG6NKJCGwZBoaUhffKR+GsffYmVD/zQr32KvaLgl/7ga5y5wC++eS+Dc8N0WBb6f/ThZ/ng3owf/80vsuwd/+l3vMjf/IEP8qO/8TnBZ1LCx8Qf+Lu/Leyj8TdtTPVTTGgrLA2rQIRdmmA2poAhCas76o3fV/RxfA7ypjSy9HKh9YTmm8zOGCZJAw0XsiH4SJjNBuaKUomcLBlDzDfG3WXLO/M1P37jEp+8d5SBqs2j0Ir/8Q99hJ//vbf41Xcf0oXIn3n5Kv/Oq9dHUBKytC1msItzrLQ8hRLkSd7ceKUSGxRYDQw2KTIViTdOl7y2t7WBCpXitf1t/u874mX3+umSP3ztYHwugNf2t/jqyXJcK8OkQQ6kwfgzh11HRtPNqGKO604CDiUxEtV58kT2ZxO9/nBNzvkAIbRz7z3rJmIUGDLSHwPOtWjED80oARliNoQcP2trx6IdBqPSYdiWsJWwqaZ1JSagCprg82s0hNZhQsRaS4pRINxcZFRFwWRSM5tNMUXNqlmzWq45OjshBPl8QgZHB5PO4f6Q2Wyey+mRbCWR6SGNBuab2PTh3lVoK4e2NhUKkd8qrXFpoL7LddXajH1HUYr/yrQsCK5jseokmSd2RISN2nYdBoO1kWGPL4qS0lqSMvTe4YIEcPiY8MHTPjqkXiyFJp7XWl3Xck/kNLOxsRn/O9+sKGxRoK2VpittWIhyFEcBSotCDPbzz5h8vxqjqSoJrhAzcLDImowh4FIg+shsZ0K9tY0uS6ZbE/Yv7mPLknXTEXogeIy1GKtJPuKiJzYNih7fScExgFW+73C+x7uO0K+J3QKr1ngXOZt33HrzHd577212Jnujz+bzr77COyqxOD3m9r177F3Y5coLV5nszTBWUV06IO1dZPveCfe+dh+YceXah7CzqzSuo5pVbO9tM5vNsFVFWdZMpttM6hnGFpRVKYd9LowS4J0kkwXvKKzBu46z02Nu3HiRd9+5xe133sYVjmjIUmGDHuNz5b7TSgYeSZHZpwLEDelT4tUgTSQpg3RoUEaGJspgbU1SAniX1UR8r7QZi2itpchNiew9JXeGyAoiWotEOiVyiFBmlySAuwDsXLqSG1s1SgfIa0mrQY6yaa4HphyQU1pD3uvyQTECfWRvwzwAIY17YIoyda5S9qAjkJQH5YEsu04GjYEoPjc7OxMGgC75Hh0l3GirnlJSEBPs7+9x8eApDi5d5tr152D9BgDf+dFv593b7/H+vftUW1ts2xIXIvsHF3jmxrOQAodHhuX8lKZZUdspk8k265Vn8ChrWym+0EruK6Mhuiy32QB5TlnMZIutasa27imrwKrpIOOFy+aM9XKFSppmdcSj+zcJMfDSKy/xgVdegSPxX+z6owzQRfqYCNYyKbc5uLTDyx98ngsXt3lw+IBHp/e5ePECO9tbuK7hjc/9Hp/77X/C4uyE09MTlosF7bqh6zYywvOP5Wo5WgegNEUpLPq9/X1mW7PR/BmyjEhlVpSSiXxhc4mVh4EhidzFGmkuUozCws9nX5F9MpVK9L2n7SQgRymDsTK9N0XBkAatjc0AXU54O+9RSsSWUUywncdpTa8cofO5ABemfEgJ1ztMGSiqGpTi6OiIs9NTeE5e/vHhMSmJV5nCUlo1et2UUaRnd99/j8OTY07PFrRNA8/KWX92eCRDoFYYy2LQLkC0D5umBWC9XpELM6wGn1PuBQnPZxDZl0olirJmZ2eLyWQL1/WcrVbMTxb4xYKmmbO7nrP31NMclDOqYkYfAouTYx7fv8f69HBMIL3/3jtorSjrCS5B0/akvsH4HlQiqETUItlJA2Ng8BdKG2+5iBqHnpsC8RuDv4MMRt5e2vw75b1iWFde4UOiCJoYxLNIWy0gC4r379/ly1/5Es89+xz7B/ssD09QPjKdziiVfG9VFqAi69WC5eqMlH3nhMgRmc/POD2Zi29rDp5J2gCRtulI0VEWUNQFXdvRNx0KQ2lrBoCuLC0xe6nFBFZDs15x+/Ypi5NDFkdn6GDZ2ttj58IBz7/yKh/6yHdy684D3rx5W+q+VvynJ9MtVqdH3I5rjk+OhfkmNxmT2Yy9qmY5P2W9OKVzHWEp7Oam7VGmEHN+5zAp0DVrUSwYTVlX1FVBTIm273nv9m3q6YSnr15ja2uLo5MTuQ7Bs7e7wyR5QucIzlPXE/b39lg1HfVsi6KssmxNEbwTqae1pARt0/OgP+T+/SMUwtZKKAkz0Jq27yQddWtXGPdlRdIqM7nzgDNErBOj8uQD67Zh1a7Ymmxx/bkXwSj+1t/5FW7fu0dEc/nyVUzrmJ+tmJYTbDXFqZKVF3Bh287Qpdg0pKjwQJsE/NEYYh+JUeGSwqNxEdAWKCCV8hUiSrvc10WKQuM7OcumVUlVeNCeaCFoTURS1UOGnHSS3s8mqbtSInueSbPrwppVe0zjFkTlWbdrnI30Xq6fD2LlUtYls8k2BxcPuHhwkePjY46Ojzl6/JjOd2yZGhccuqxFVmoMWkWUKrG2GAdx2hiUFTVKUmkMExwIJ+R+ZNiCIucksFFCHsTDOBKT+NhGJME5+kBsWyxJjPaLgqKUVHVJ7sys/aF+iMLUT7l/GaSmKQ1J0AJuRTb+cCFEYQl6USQMIJDULIw95vCPYXsZJJoqSWqziVDVNVevX+Py9asUswnGWCZ2Qq9agk/ZCsHKe81gmbU5xTUN/UccQaYBaAT5WbJP2/AiBh/mIVk3JrJP5iCZTQyMRemLRK2g87nhnENpRYwq/16NMRJMADIMFlJNRGOzr10+M02BUhprLWSftIhCRRmOJmVQUfpBPwCTRuf1oDFKvKkLbYk2idRTi7f3EMITECKPrJ8McmcgzBgjdi5pQ44ZZLwDFmBQ6CjKH2UEyNKyCPPQRaEGya02Ob1U+tXh+YqyJAVNCHrjaYeEMqQkPTwM8teMr+QaYjyKUmb7a53fT66FIdsIJVIKiAf48PdKhiGmJMZIEUT1pI2oXFT+En+6zFoNaZTkJgS809aidUJHRewC7dozX3SczeeY0LPuOmzX0ZkC3645OTtGVwWmLljMHUFNcUbjSktfWvqypDOKdfC0fUeIKQd8plFuHPJXQo3rT4hCIqtPKaaO2gAAACAASURBVLPXFZhYifotRQIqe5jnei4NnnMCqsv/589tGNSf847+Ro9vCaD7z7/nJf7uuw/5+c+/PRZTj5qev/6l21it+fHnLvPnP3SN61s1ax/4+L1jfuELt2iF0sJv/dj38qtvP+D7L+/xkf1tfu4zN/m1O4/zpicX+Oq04gcv7/Ejf+/TLHrPvHP891+9w1/40LP8ja/eAQZwaTNd/CPXDvjLn7nJ45UkOv6XX3yb3/7j3893Xtji049OR3nBwOhSKXP8NijaCJxpZTKrSv5cW1BR0I0BmMh1GSHIRR13cgYq9KbgGgC6QSNPUiSf8qYlG+1ocj7+PNlQOKCzb88ATg1NH8DPfvpNfukPfYTHTc8vvXmXh+uei3XBT37gGd5fdZRacdZ7Wh94eXfGv/XyVcZ3dg7Bl0NqeN7NTT18X6ElPbDQcsNV2Rwy5IYyMWykwh772+885Bc/9ho/8O5DPn00599+9TqV0fz9O49JKfHrdw752W9/nj/76jV+8Y07fM/Fff7I9Uv81D/64hPrTeXPVBn53DTCmPMpU7xjJA7SPCAlhUsJQkTHvHnHBIVFIZsX+fpERI6XQsJWItENMVEUBWVpRWrpPb3r2ZnNRKqklPgVRDGK3LzOfJMlkaSo4SZPg+mnkPiUkYZq2XYcHh8DhoDm8fExi9UK7yXBFZVpslEmMU3XYhYLUGvOzs5o2gbnHCEqUgg5yj6NoOu4lhmYOTHvL3H0PvLeoc3ASE2byUkSqZ1HPCOUhxQEYPMRXISYNC5pSfaEMTUraU1V15Rlwdr3NF0v5q9ativnxS8sKehDgOAhhy1EwDmH88I+2d7awXYti/VSCvYMuBmtKXKKcwqJPvQjWKyUJP6OzU3MziZa5rQ+s2c2k+JM+dYaZWXiNBwuA9CqtFDdQUxdFTK1VNkzzPcebSxlUTCtapKRNGSb2Za+60jeQYgYlZiUljA0tdGPk6eqsFRGEiL7rqFZLmjXK2IKVLMZk0lFoxW+cdx/fMTRYklVb/ybrt54nsXZnNtnC9589y7KlvjZhFmS5KH7R6cs5oF2ss30SoE1+1zYrrj4dIEyGlsaysqgC0GWnXes247HR8dQaHb39+hdz+NHj/FOwKa2bSFIanFdFPi+xaK4eGGP567coDlaoBz0REKWqBMFeC4LK7T9OPj35bVrDNoKQDfYKagkbNBB2j14r6AtWlu0FUmI0Yai1BTZN0ch6V8+JjAFhS3ymhdZSoyJED1aaLMbgA6dfeE2e9Fkd3fE1kbgbQCCh5sujd8i5wyb4dRgVhzTUKpugDidp2mkNP6/1OmB5AJFZuDF6Il40GJGqxKQ2XPReZROlMXmKN/f2WKmE8uwwBYGs22wleUDL73Iqx/+INu728IP/LwAdF/86me4c/cBy1WPLadEXZBITKZTDva2ibEnuhnFwH0zBcYURG8YALqnrz1PUW+hbD0ycbXuSbGj6zzwvqzX519me2vK7nbFpAaV1rz/3k3gHQAqk6gK+YSqMgIBay0/+NHX+O7v/Sj88icAuPvHXuIbP47gN/9r+E248A3+9vvyFxPgmkWQwX3g2jd8ts//0Q9/k98zPDbAnsg9BlelQEw93kV84bFaM/jAGS3pm+JXtpG75po2J4JpnBeZqnMeH/IMISRc8vSppXRxTLYfGHQjz10ICbLSYk7rzr6kPgSiC8REblYMTddwtlqgi1L2dq0pbMkgTU5RJNLaGJIxoKSMjVEMzV1oOTy6jzotWDct0QdAALrlYnnuPJIpfEgbCxM/ykBlqNp7L2dtgpFlI+8qywBlKh9SRKke3/X0aU3yIt9LCgoF+J7V6SHOO1bLNbqasVi1PHz4iOPHD2kXp/CqhFItTo9EplUIoKCNZW82ZXeyR9u1nK2WkETOMvhPZY3P5vrnujKRxlpVn/+Gb/I47/2z+e9zfxbFysJmIEPrQX7lCSTmZ6e89967XH/mGk89dZHDO/dIjSNFjzGKSVWyu7ODtnCcPMv5KV3TYqylLGsUIaduQl3NqKoJlZYk5bZvSZ0jhA4fNV2M9ERiBK0s9bk8rXa1lrtARQliDtDMO6w11PUeaZaIbQPaQWl55vp1bly7xhe+/Dpdd8a0NEyZEtrITGn2draIFmHHK0VR1Vy88jQvvvQSFw8u8OD+XT79u7/D4vSErm0J8yXzVUfMjGsL4Do04hkU+4SLkaqeUFYV2hb0veOtm7ewRcnuzi7T6pDlasnu9g6XnrrCljHsbG+zagInJ0tuPPsCexeeop7MWDctbddijKSoh66D6Olax+PDBU3b8+DhMVobqsJgNbTrNes+4CKYIjGrp9SzLWw1lZo2QdeLF57remLrqTLY27Ut/brl3uNjbr59B0rL7UfH3D06Y1JNKGYNITb0rQNbYX1kpWuWZiogBCXGgzKKqijElzHI/h0jaCdAVR+yLYo2pOCkiUx58IUwaiDmgW+kLEvqYoIuDat+TVosOVycce/ohLZakMxkZFkJ2Ch74HQiicHS6IJJkUoZYuhZdStW/ZKWFT5EmqZEW0PXr4UtZxS71YSnnrrCd33X93D58hV+6xOf5OP/8OMU1uKj1NOT2UwIGIUhRZGJm6pGGZv9hgXgGM7wpJL0fEoUMCkNd/AGbB9CAQY11LB3j+wnaymEQoVWYnXSth2q66nrmrK0+BDz4EUzUJSkTx10aMPQRo892iipHV6Lyh5rWosNUAiomCjLisl0KoOW4xPxcct9j/iUDX2rqAtsUQjrPwlRYWs2ZedgH2tL2vk6D5BDBmw2+1JMETvWRGSyx6YXYazPdR5mZthj4H6c60WH505Jejvy9qrzSxXmOfL7tEYHUacpjJwHcfDQFfBskAXLJVaY3Jvny7vZsPNnkYbXq0Xuqq2AUSl4kkYA01zHSdBO7vWVDNOUShgs2kaIQsRhZBVuesYEWdGkRj+9DTln6MkzBoEkqCaVMEaGQ0Y0y/Ia8/oagqZ07unP/z5jZKg9rN2UiRYpKWIUea+AghmPUNmqYbjGmbgwBFKd9xvMH9n4UW4+VmHfDczy4bF5VRvvwUGpQhL/c++9/Hn2UcVkLmmIuK5nvVizPD3jtCzRTU3RtpiupTUFoW04OTtFl5ZiVrNeB5qzFt/C2ckpZ/M5q/WK1jlcjATEw2/wEBwEp7ld2ZzAiuzPOQB5kSgE0E0AYorEmJNpz/2cVllYnIHPgYw1fHDxXO3zjR6/L0B3bavmhe0Jf+lTbz6xeOR3yoEy7x3//u98lbfmK65vT/gbH3uNv/Dqdf7ql94Z3+WfevEKf+6TX+b10yWVEblXfhZIiVd2psx7z7uL9djsfPl4wfWtmqnRLJ0f3/i4SIYfz6ymYRG9tr/NZx6Ll4rVin/8Y9+H1YqvnCz5a1++zesnq7zJRqwy4zRBkGKhoxqj8cpk6ZOYaQrAQ07z2LRbmtHzb9zG5fNJI+KeUhg3jGFDUJkWmjLdkSQEkMGw8zyqneLmQv6TByf8xK9/ln/vI8/x8R/7XgqteNz0fPz9I/7Xt+7zl/6fN/m573yRv/J9r/KF4zn/57uP+NdfuLJZNec+x/M3HmT9uJbN6Be++yV+4sZT4/d/+Sd+gLurln/1H3yOlODPf/A6f+zZS/zYb3yeBHzmaM5/8tmb/ML3vMxTdckbZyt+6hNfYOGFNXDmPH/mk1/mP/uuD/AfvHaDh03Pz376TT57OB8/PJnMqEyZ3RyNww3khxskT1Rkx7UkrYWmqhQBAdK0MgNpJ0uFh81JGv6QcrFhBGSyRSVBBUkRnc8ThgycxifBMBhMaGWzCiGK2XyKwg5VWujO1hFyIuNy3dKHY4JHjMZDlKQ1JelLkrgHvfOs1g3zkzPxDAzQ9T19bshU0rgYKDMzZZTT5HtCZ1ZkyL59ZKAghIA24JWGKIalIQ6hEoE+RXC9AAIxYJSkA8U8UU0YohKqfFFYMRuOkXUrElSAzsskQVcVoAnS7uTDw4CyMkVQIs0S0FAavcl0RllVtG0r7zt68Q7Rm7WpMotyBEWGw0xpCltIilZWFsakcLnRE+AjH9oRMEIT8DGJrFgJ02tgFiqViMmRiNh8fQpj5WApKuoKqumEnd1d6tlM2IdFCc6L15NzVNZQoCUV1UxBMwZ6xKAwFAQn0pO6sDSrOTe/9lXOTh4Su1NsOCOlluOuZe0jpwuYbD9DF6aQTe3vvr+gdVNMeYmjueYrNx+zVO9iZhV9ijw6PsWYKVW1Tz3dJSGUbaMTzkva53zeEkMPIdKs1jy6/5j3Hz7AzCquPn8DpRX37t1jvWrwLtA2DSqC73qMAtc11KbguWevs7ezzU5R0tcTFinSa5E4iNTaoAu5OCkEKYwz0KzzhDBKDw7kSWoSsOIcUSkXz4EUnUxzSfg+koJDB3l/HkWPxdQTYRyVNu8Xcv+b7Ge5IfY9CbgND11Ps+/YZudUKQ9ozhcfw3k0AnT5qXIhqoeNSG3+bniuYf8P2ctTKlErst68V4GSpKjhDFYJra0EtxDY2dkea50P3HgW36y5F++xPFox29rihRef44MfeYVLT1/i8PSI1994fXztn/rsP6Wsd9BmStc1OK/Y2t5htjXFajg9PeXs+DHNao5rWybTbbxPFHqTnv7yq9/G9oUrqHKGH/3zlvTdkvW6hfd+B4APfdsPsjWZUBcQ3ILQnnFanjIAdE8fXGF7OoEY6NsVZWGp6oq96QS32vhd/X/t8cVQyrodNpsU0ckTXMD37lwBLmyCQdYq+3Zu8vQwT80gcjL4qAm5AYl5iJJUj2k6irIUoM8assv1WCBqhfgrDX5AKUGQwdRgsB18AB0whaV3nSSyKo2tKqbTmkk5AeRsvnjhKYJvSckRfEfv1zlYRZLawBMDGBUwygmbJD9iiCNTwhqbmR7jB4ItNh1BWU8oyhIXEsp0IumRIo9hoj2wK0yC2PfMD48BaVZLY9meVGxvTZlMKrFYmJ9wenLCqgvMly3L5RLftyjfAwLQyY4BXbPGR9jd2+fZZ57muavXuXfvHs07t3B9y9A+j2yHuNkHntg68s0/Avff4PEE0P/PeRjbkZwnKi3Al9YknbDKokpLUIaTwznLecuz1z7A7dfvcnTyiD71mDJCcKS+R+nEer1ApURVljLACoE+KrwLlGXNwcWLXLx4mbqqabuWx48eYFWkaSMxerq2l+uuxaajbTe+kCfHxxSFYlIbrCrQSuNbT9KKajJDVY5Oe5IV25eT0xM+8bu/zbt332anVsSVx68c26rk+YOnePq5q7x1eJ8mRNbOM601ypaUkwnT7W12230uXLyEApbzebYdMBhTMJluU2rN6uQQrcCaKMmYzuNjK0wmY1EJ1qsljx8+4oUXX6R9uuHWrVsU2tL3nuriBXYvPkXUJUfHC4qi4qWXrnL16jUePHzI4eEhkFivV/RNx9nxMScnK2IINJ2jD4rYe1brlpQ80Tt8BF1UKFviEmxVNdt7e1ijKcsC33f03ZpmuWK7FI8tayym3mJWTAgkPvu1tzhbL3l4NMdUM/aeeYZqZ5f58Rl9jLTRQ9+wcp5kpabxSK1nlabvJYKiLgwhBAoDZSmJ3zgvA25lQDlpMLVCJZvPNy+gUdKEYNDB0PSe0Hl837HQDV996z6//pv/lP3LHVV9QKHB4DDREWNHURsuXjrgwx/+EFtbM6lHHdgOrCtIoQQqJrPIOq7pfUuKiojPQ3UZPjsXWK0aPvyhjzA/XfP6V27SNi1t2+Kio/WB0lrqYoI1E6yOVNMtkrWEKIDt8P5SrjuSkhojIDYxIYMw58H2gcgQczK0Hu9lJZYgVYlOEe0j68WSvg8E79E+oK2IfIVFJEwblVlDg4IWtelztc5mHRkEG83/fciWNpmFlbciW5Xs7e5Q1zWdi/TBj8MjkL5I5UmktVZULCnRtQ3v37nD2jXsX36KC/sHTKupZFflAYv3Pm9v8trCmDo/7HRDz4pIJXNPIwCXFFJ6AEUZbJLiCHAqJSy2mMSWpMjSYquF6R28kyY5xhF4i8jwXRiNWR6KIipNoQzJJowX5uaQQouS1xijkAiiiqDTyPzTygoTTSmihmRMluMalBJpotbiQZ0iJK3QZGYY5H4iwdeNaIbrNABb5xlzX5+kKgqoYQ0IQKdTQhkB6ExOcx8swPR4Nqrx9wzBe9JDZbwhIffvwIKTC8gIrg5r8OvOr5SHT6ivGypxjnl3DqdQSCK19PVqlHUO5JUBnB4DT3L/75ycV13XgZXeUFmFazu65Zrl6YIzU6DXE0zbopo1K20IXcfJ2QnKaorZhLaNrOfC8F2cnnEyP2PZNFl2Kv6HsReySRrCnDI2kzQyiMufgCjMdB5MjqtcgLkB4BssMEhEpcdBZ1J5lpdrfTVMYmGzHr/J4/cF6IZQggdN/0QhIhdMvj5x73hccO8u1vytm+/zJ5+/Qvri2wzY8K/cesDrOcGz9WGzGHLRNSs0C+eJaVPMrTK6uF1alj6MhdGwYP/R/WP+3Aev87mjOY2P/IcfeZ6YEtuF0GDfXqz54//gs9xaNkwLw0+/fI3/+Ydf40d/4/M8WEskucjXBgPfYWeURtxojfdJGsucnkMGF8TLQ1ZzHK7YuSlLAuLgHKx1jk6X5msIXhZdehplpSkJrVVQ/HzDkmShPIFEK750suSnf+tL43VgWAwp8ctv3eOXb93L4JsASH/9K7dHSPFj/9enxo3/GxeQsqL+4mdu8jO/d4uhYWAAgfII5L994w7/zRt3Njdugv/99kP+t3ce5AU7MEeGiRB86WTJn/iHn89N6QAqZbYJYi6uclahjz6/v6wNz02MUkbSbVSOuzYGBhlwPjiVkiSrUcKbBiqwzgavEeeFEi4MqR6SsHDktcG6lenrkMBjz3lQAcKa00J7TpmZ4GMcX6cpSpS2lLVlprQwGBAA0YWAsQWmiJgQxZgycQ4sBuezR04yaKOZFDU6S0IHD6zzevYR6A1+nIZ4lzb+tiN6n6FlJdLFzvVAoiyz0Wx+X9YWKCO0XJ3vi6QLMUe1JU3nWK/XzOcLrLE006kkFqGpqpIUReZG1KhCoZImKo0Xywe6dUOfwzR8FMPT3jmck5SlEDzRyz0vsvZNYZCSpIGSzk2pkhQhQ1R7SIEUFEFl0CPBsBpVvn7aGPHY0SI/sbbIU6qINRGrI4WxlFbkuIWxWGOwxjKZziiqElNkGa0ReaaPke3JBK1Kghdz4HpSYQpD38k0NQQgGFzn0WgKY0jecfe929x//118v6TSER89qxgpp1v0fptqegEJzzwC4Gtfe0x0EWMvo6sZXhU8PrWEUydFfdijrKY4B6fzE9Z9R+da1t0SkiOFntC3JOexUZNcZHG8oDk6JlaGg52KCwcHXN6e0BQG7yLdtEIrhesc0Xt8X9GtGx49vEfs96mriul0wqJpSEphi2w8HCNdFNNmZTPFXSHS9WyCK5LxTB/X4tllzIbpG6Ng6kLTRwqtKN6RIfToINPiPipcMaNUE2proShEtqBlLyAKi1PUpvIiYhpGLueKpKJ+ArBLkPfjjXREZdTt/M+l8+fBcEAM9yGMFZoiH9I+A7s5oKcos1QhRkyUAn0gMCTpDBCg1TCtFJcuHYCoP7m0vwOzisW9Y1aHc2Z1xcWDfRbzOW/ffpt7jx9w+/078OqzgKT3FnUlDVnU7O5sc+PGDW48e42JNTxs1pweH7JenEGUoATvDQEPl/PnZGqU3SKaKT5q1r6h6RU+GDBb4+dSVPs4H+mbhtRHYhfo1xpyTsT2ZItyYK568SwyFu7cus3J0Rk/nJ/nJx9eAmVxXoDK9boV76yup1m3KBVJqqXtTom+xWpQwaFDwGpZSNYYdvf3mGxtYSc1v3Ljny2H/vTjbZGsaJUNqYfwJNk/bZawCjNuKDrTeOakGOl7D8i+ZLOXTQybAeWwV8sakvXsI6AKki5kReahWYzgY8CTBPxLoIYhUoiEGHK6nTBQVfRyVibE00blwKAErhemmiksSYnnZkgR51pAY9LGO3BaT1mvHa7vcU72fe87hrLIGtAmQfIYJUbJm5shNzF52aaYJ/hZDlUUm99jywplDaYosUUhMrC4CTVRQ7iGks86xSjMpZTwSuTVRZ3otCKFlqAjq7Zn0fQsG0fTi0/iTl1Smdm519hnNryHlCgNXDzY58XnnqdvW956+xbB+ewHtWmIxs3hfCE1/LEabtZ/mcemcLfFmhgdMWmUsSQlFhQHexeYXbyEqWbcvnOf17/0Nj/yQz/CB1/+bj538hnWy0OUb+j9im61zI1oDr0y2f84M0NMUVBUNWVdsb27w872Ln27Bt9R6MTciMecpH4rMAVEk03u8ytOGUj2Aa88hS4pdU1oREKNBV1oQnCsHx/xzhtfY62hWZ9hXYPvPF0XsJMZO5Oa6HrmyxXzdcu6czjd0Nx+j6PTU2bTGhUlQTSRJWgJqd1tibYFxhrKyRSVAoUtsGVBStC2nSgRMnCslWF+esbRo0MIkbooiT6wXrecrVvsfM2ernBhzWK5ADQ3nr3BUxcPUDFyenaGa514KGGpJ7tSV4UlhRbpu+tavIskZdGlQdsyX0uF84GmbdBa0ffi+6a1hIT5dYfWsLIWmzS79YxgFPfnS45OTyiqbeykYufSZS5fvsLFC2tOD4/oOjnny8KK4gKPIVIVhqqAJiceXtjfxXc90Vk0W6iUML4l+YgvJEQgDh6G2uaaOomfcyxJPtF7aOYrrA6o0KPwLG8+ZBl/k+nuTSbVFrWK2NijkoD7FHDl6tP8KfuTfOTVb2Or3ickqIOm1jXWTMGscGGZPRMTRVVycPGArW2HtjXT6YyzszO++IUvcePZF9ne2eXVVz9E07TECO/cucv7h49EwWJrtImQPC5C24vEtOu6HJJVytkq9HTIoWFSawyd0wDOk783WzplQCrkXsPagslkggb6dYcuKnRZQbCYssQWFUF7fMghMkkKYg0wSD5HoCaDGcMriEEYZjncynsJuEomjb1WWdXsXbhAioFHx6fEpsnsr1z/Kj1soBL0EcU6wxpF1zY8uPc+664hOs/lS0+Lj5myIwgzKIeMMbjeo5TKZ99m9BhjyJJBfe59CINLj+SaDWCmNVhlMFhS0KSQC8MkftAKqYUEEJQe0eQwAlKSPWkAVLIvnsm9mk7IIDQESEHkqUaCnmJm1oxBYnmgKkMnRaENWiXiIMtU8v9aZcmsMpBtGNJg1hblnkGJ3PYc5pZ7+m82snlyaPP1wN4IwmVlmzlHdRtr0PO/bOyq47imzxlWCavx/JBIDeCcDMBSitiiAqXElmYAE5VcGzWcb0mjUx4Qqo1sd5Q6J9DD6HEA8nJprNQmkVjmmvJZRu/x3uNdBwVYq4Upv+5Yna04U5pUtZh1i18tqNBE5zg5PZFaZlKybgIq1ISgaVZrml4cHJNWJC2kDFUYdBS7HVHCZZwjZVJLrttRApDGmCXomqz2GfqRJ68XKUGWsSY9XI3z11cNF/abrgX4FgC6IZTgyqTkzdPlEy9DzBsVH3tmn3/3tRu8sD15IpTgPL3z3rplkBhJszK8QHmelY9sF2ZcJUprdjI4uHB+w0Y79/5//vPv8DPf9hz/x7/2HWgU/8PX7vIjz1zguHOkBEed47gXltNZG/mrX3yXP/zMAT98eY+//c7DvNlmgCXp7HM0oKWgsCidfQEYvAHUeLEGiSdJEHSZKg3vS8tGYYywu5KwZ5Q1AtBlhFVEPAk/bAyZneGzHE/KfTWCVpvHZgP/5g+1WQfnvnOcdQx+NeceQ9IhamBq5EIxDtcNaYqHV5BR2gE8HVDo8Ue+/uXm/xy2tH/mL/PzDP4ow1YlZ4owBQbqs7WFIOHnvpLSclCGiElSLMrrjAM9MU8REvgozb9SuJCITYcxYI3B5JRNC1ijUcqSjH3ifjLlNINdkIIHlESDg7D1kib2Aa0s6ILgvUxxkICR3ntJl/USXhDR2ELSSafTKVVZZKC4BK3wMX++WrwJfTZFjVEmWyFvatFLA5hiNtlPETTYQlNWdmRpaCXf16eA0ZraVEwmdWYRBrSSTT2isAODThckFzApZP8GRdt1nJzN6ZzDGE1tC2JhshksOB/xPubQjhz3nhRd3wtrKLOH/KrJn6zsEzozQAb2yLA2hpCHFGSzEww8g+hajMdtaZlNRMapjZa/V9Jga2VH7xBj7SjdKoqKMgdCpOgBD8FJ0pMxWG0ym85QWEtZVZtpiNZEJXIvSVosiVHjEGnStK6whUFFj29lPcYIKoofREySvpVCIBnZl7oQ8rS9xgUQhamiLDa6opPjBdO6Zmtnn62dXSZ1SR8Tq3ZF8CuCc7j+Ec5FmtbRdj3Oe3zyoCKm0CPoa0pLsV0xO7hE9fyzEAJPX7vKlWeuUpalgAVaQhq0MYQgCbV93zM/O2Mxn1OXIidNh4c0Dx6yXK7o+17k01GaFas1ZS3XBoReLywgJQwflZO1UspNsxqZsEpQ/8wazQBt9mtReU2j5QzqSNKA5uAIf247C0mNyUqy0WjGlHLGjYeEJY270PAlYN4YKjFMQNWwReb9cQgEGp5s3Dty0ZKn9gHx0IwoARGNFQBT0Dv0+NrSuf1QJG6WxPbODpefujgCdCYFtrenXH3mEv2qofcNN2+9xaJZc7YUc/L1cKsBIUhS3v7BDts7O5T1hLJSHJ8eEdqew6Nj+q4nRDFkT96jkiSSDo+26YnzFcEmvCpYdg1Nv0LRYvXm7Hr0+C6p70j9mlp7dFjStWcjQOf8guA9hUpAR9cFUI7777/LBbdhc7/68guEpFg1PYvFivlcPOX63rPOAJ22AR8aUnAYZHBB8FgFpbUi59QWj2asor7uURQlwYexPVOZYa4QUE7OQQXIHjNIULQaCmiNSjKQ1HpgSg+ncV5vQ2JCyhNZNmxvWScmn4diC2CGJsmYsQaTY1jAU7SAx1op8pdjygAAIABJREFU2UnDcEynEcBLMUqDFwI65AEXInF3UabaOhaDvZjsVyHQu5a+b/FegolUBGNzonuUn/fhHEMOUOShV4oj42Pw7kLlhjE/Di5eZN1KGJIamOUqG1mfK2xTTGii+C8e7LG7s02zXHN0eESzWtC3a0mFNIl119N2AZc0VVGxvb3FwYV9CrM5zE1mGVVViUmSlht9j0oRnZTUEXG4ZpLsJ55FKfvwbG7tJx7nC69/gcf5p1G6R2svDCKVCAH29vb4no9+lA986NtwyfLrv/EJTCq5euUGV/+V5zC+4M2vfoamuY9zIuOxhQzcQsoyZwBtqSdTtrZ28RGaruXho0esmxZ8R/AdRsvwLnnxD43KEjGkZNiabu7tS09dxuqe0kasTmg7Qdkd3CrBqkMnTxkM+MhEBaqTFcW0ojtb8ejhfQq7RZ80R92C99enhP6M+4eHhKgoKwnLcj7w+PEhp0ZR5zTM0AdRCcZEUjLUcmHFWoGJDms0s7pmd2+PuprQu57lcslyuWK5WAgw1nS889Zb2KJgWlUSoOIij49PWbWOB48PmVY1fd9y585tHj28z/Xr17DG8uD99zk8OsK7wKXLTzOZTGj7nnffvc3Dx4cjwK6N+KVqm8+kJCmpq9WSddcK0OA7+q7BpEBSina9oM/3TN8HzrZ3UYVFWTnni9k2tq7En7H3FEXJZFrnPUdTVBNOTjuadYsmUZaiSNIm4XHoviO2a9Yu0C+XEmijZQjZYCCJGT8DWQCR8BEMeEl11yqiioi2nuA6vGtZtWvi/TuU8wYVDbbrscFjdKDzLV1yXLh7hxdf+SBXn77OrJoRu5YYT7F1g5qu6LsFrWuw04KyrpnOZihtaVpH0/as1w33HzxCq5KzsyXXr99guVxx9ep1PvTh13j37j2+8MYb9K6jWS84OXzI2fFjrElsb00oS8P2tqg2yqrMQFC+s5NAS+qJu1Gd+880fqk8QBzOZ5VtNxSK1q1wEZIu0SrkcBuRjIqvVRzP9sHDWnAXeSUhe8rBMNxTGdiSmrEsClEbZaKBVVCUFZPZTAZpWUprE9kqJGb7lzysUUrC3EKgmFTYqqAnEPqWrm0IPojq4Unch/OsqfNEEKWGWiWfOZHxfIqZ9Tb4REv9MKT7Mlo5pSQMNJl/5ImsFoBOZbBkGDIJ+CMhHDEayF5oaPE9DsFjBloU5KGaobSW3lpikET5JLqrsTeMRGwCdMJrxPM6Y6nnWWijj3cmpOSWiSHTcJMPtFlJudUfPsjxId5rG5Viyq9X/XMOkJGRNlQi6hyDDnCuIwUJn4pJPm95bmEuGmPkHB5/l84sNhnkFFaGCANJZWTp5QN+YIQprYjGbHz384JVVs52GzZkFvkUFOL5fA5UPAcAKyWJ466LaDQGjY4Qe0+3bFiSiLbBND3dfE6Jgeg5O1sRk0KVLYtVRwySbutDEM+5DCJGJduYKSSIMvnMgs39JjGQNIIJKTLQLMO5YTibBrLLsLxUBuvSZiaPUhn4k28Y1DijjFl/Pa7z5OP3BejuLlveXqz5Ezee4pP3j88tKEG1C6357z72Yf7KF97l77zzgDZGfurFp/npV689UXiFxMaLLbMkhveQgNdPV+yUBde2JtxrxNfl1b0Zd5Yt805SPlWCmH26VUosI/zlz7w1bmAv7Uz5ue94kd99eMITctxhw2CYMox/LIyFwWTxnFZbjIiFghnVeDQRcj8e8gJj2Egim02bQbaiGNIHYwKfZIqtszdWynKYiM7NnTRqKhve+ShJYRFNOoeWD0jscB0yFP0NMLtxV8rXS36f2vwU524X/quvvMdZ349X12z2tbwJjR3oCMidh/g0ZCnw8KufpG+OdNHNDjW+hWGTKIoCowtsYbGlzZ5dwgyQAMiNVxPakLTk7g5JijEmYbGFiAqelOW1Pnqcd1IcRQE6eueY2op6Mh2NW0P0qNxshRgk/VNbPArnGRl5AH0yOO/onSckYbTEJMETznu0idlcXzx3uraX40iRf0ZetwvSmCXn8nUUQ9lJXeXNSjbJkDQuBEIQBsfgu3aeZh4zuDoYCYhcVa6SsUaK9HzAD/cegLKGsq6pp1ORl3pJ55SCLB+jqhDZMHI9bFFgiwoXxFtOdU42VR9pghx4PjhhBg3BI3lxKFQugoXCrI0Z1442wqqoCkuKnhgGb4RNo6r1cB0FPKuqSgyYjaEsSsrKUm9XAsiNB4usF0ny1FRljS2yaXBunq0txPPH93Ttmq5Z0/e9+JxZm81hB0ZLbi4zcy8k8WjTeYOOzuM7hy60eJ8g/5bkpIhG/IFSQjybtMLaAmvkE9K2ILoObS29ixSlxWiRFo9rsF2xuz3FFobGt7jWcTI/Zb46w8VO/JfmLVZVzKpdiJZSz9jf2aaoK6rtKfV2hZ4V2O2a2YVtqq0JhVJcLKfslDWTyQxTlnilCVoRtBbj5ySmqX3wI9uzrApiCKxOTrn/5k3e/erXuHXzJienpyIhzuwo8V6Qf4+wU2Jk/xhlcphI3Jx6KZHG/V/Yx8OweZB9Kp3EVDY/YcqVUhy+XyGFQk6uSrnoHX53jLk0Hrfb8x5Ym+ccvoZzMCk1TiRiPMeKyjvfxmtus/fqpEffjeBl/9AZUEZl1qhC9v6YiEGYzAMQabM/6IX9HS4ebHwJu/Waq5cusPXBF/HB88bXbnH/nUNcUOhygikn+LCpuCeTffb297h4cIALPY8evMfZ2QKjajQFKifHKlNADPgQKQv5TIdHu17ThGOcXpPshHmzJKQl1vS4c0De8vQ9kmuhb+hVR6F6lJpv1rObU1pJIHWxFSDNWvqwwPnJ+H0prdC2wPkzHj16n6OjeyyXa5wL2Z8r0DuPUpEYxHrAKoXRVq67lveT0PQu4HHAOUOt4aHEk2ZYpZmvmyX4eUg3vqi8LoKM3ZSRybMdwkjGYZnsHzFtAh0GsE7AOT1KrgamnTF6NF4ePH3EH5exaDYK4jlJjJWVzzBAG+6JmI3Wh/TvECMu70lKiy1ISgiomR+//MIaiY6+mL++9cfn/uir3/L3/uoLGpjlr3/Rxx7wzL/Ez8HBU5dz0xwIIVIVJafHJ7xz6xaHjx+LrD978Y5JuYMfXgboVB7anm+nFE8yKL71x+aHQggZ7BXZlk+BCxcv8kMf+xjXnnuJd967z9UrT7Oa9ywXcz72vd/H3szy98yao8cVIayo6wnTrSld1/Lg8SOOTo7pfaCaznj+xRd56eUPcjZf8YUvfonD40NOz87QybFVWepJRVFXLJqGRdtJmFMSVqktN/eMKkz2Z5awlC4FQkzUsymVLXiq3OKCOWBiFNtbe5iiZtl1aLPgTBWsXUevLd4Y3uvmmFjQxMhsaxtlSnRRCNtsvca1DcYaJlq8X6MWVUVSOQk1JProsCpSVRVJaZq2IyWpLabTGSmB7x19L+og1zlSiMxmM0ISdlWhLU3XsVotWWhFacX25uabX+XRo/sYrVmt1qBkCCSMLE3b9azbjr7rgUjyjpQiRWGpJjWmKHEIyz/4juidNL3Rk4L4ByatqEsNvSNFaJLjpFvQnnosBl2XmNmEK3u7pKbnzpu38MlRaJgYK4FavkP5FhN6Ocm6hFeBAtjf2mF/f58QdlmvWrreU9dTJtMtWhdp1wHNFJMM4CAby48AXUxYLcmhyni2dgtirOjalq631LtTXvn2P8DDu494ePMdmtM5ZaGFRVnWeDul2NolGsODe3e49+ZN7r/3HifdXeLOEhM9VTehrKeAYr3uUVosQrwTkNn1jhA9X/ziF7h16xa2LDk8OQID5WSL61efZbFacnSoOH78mOViTfAt7XpFXRmskkFdXQ0eZeKTp9NGxpjUMEjMDXkaerBITIPMX86FDDcJeJ+yX3lEEiFTQjlh8YS+p3M+p9qfu+vzWQDnmU0bgCQqAaqSlj1eKy0hXMbkAXCgbXsePjqka1qWyxUueBmw5notZGWSLSyTqqTvoOtaYvAEr4gqEJyj77px7zrP/ElIz+Gc1AVP7llpZNgllUkmKqvGhv4kkgHFHCwRAzH4DHwIi1/89yCFIPLTJJE8Oss6RXTgSUnqJZtVLyHkejGfe9Hn2iXbpNgBPB2snYbzVLQawphiA7KEIFJnlUS6mDKwMzRP54HZmJDgk1xjDh7qKZ/PaXhizgE0eb0kEKVekqGySuK3p9LXMfDyc0hgYdx8pt94vogLMvQfpLyDncY46MrN/ACyqTwwVzGOdcrQb+k8+Bafc/mFwQ8Mys31Tedq3Sf4SueCEYwRexajw2izNZBNrM1rmYqUPLayVHXNpCixKFLnaVNL1B00Pe18Ra2EQNH3jPdo10ofDiLF7nsnpA6t0dbI8DKKj+agXBD4ZuiNBSSUhaiIYYB5hGGosjHihvGqhk90/GzjwKY87z2nhteYxvf+zR7fUkjEz376Lf6nP/gaj9ueX3zjDg/XHZcmJf/GS1d52PSUWnPae9qY+MDOlH/zJSmSYnoSM5LArvNpigJ2h5i4vWj4x/eP+Y+//QX+4qfeZL8u+LOvXOV/eetelvPlZ0mbxX1tVtOFyOOm54Wdmv/ie1/mV99+wNuLFqUU3/fUHvfXLe+tOiZW8dOvXOdiXfLJBzmpKSUy/o7PscJyWSAknfXFAh7YqkIpQfYDSKOfkXSXBoZPXtCCOGEKk4GjKFp1ZTbTcSMG33J3ZIP0JL89KA1RWB65DiSi+a0/+QMA/MRn3hVQZhg9kFlDRYH3PX0n7JjZ9gSUJMNZbdAZPJgUlklVS9GuNUW+CR8Fz49+/0XKqqCcTEja0LkO73xOXlQkLwWN9x6jJPU2epF+aC1sEOccjY8ctZ7Ge6wSb41SGeqqQMVEs1qikkiErKky0CSHgNFGmIdJZME+SGhC8FHSUvLmKGa2ubFGft5nTytCpFACdg4mmABlWWRQpxCT2MH8M69XP0qOE77tKazG6IKEFAWRAJemADw4XeBcR++FBaW1JamE7wPO99R1IITMVEPhg0eOGTUy5pQ1KBPBx3EjRCmwBVVdZmBSbmqjjEBmKY1pfecpyiNwdW5HH5gT57X/wyGbosi07ZCOZESqGZIUFZv7NicZK/n8+168XEKMGFtQFLL+Q5K0wC4GdKYT+yjgw5AwNADnKm+MYhCuKAtNXQlgVpQlVVUyqSoUYtZrjMKaIhv6DhMj8Xsw1lKVArRZawWgqy3oQNJxZGqQAVGRVwlPT0BrkdQmH/DOC7vECejU9kEYCEbkkEkjgGGIeSIlwEpSA5182KAj3uVUrcEUL0FZFGxNpzgXCS5ijQCLsrWJJxTB42JHMlAVBUVZ0ffCoAgh0WXaN0BhhMm0WM458Y6UEqtmRet7eqPQ9QS1v810tsvFC0+zt33Awe5FLhwcsLe7TVVXTLdrJlslpowUtUZbiK6jdIrQ9DRtx/zkmNNmTbIFUWuOFgvWrsvTI0VRFmxvb2NCjdaa3VlF8czT7ChDqTRvvvkmZ4s5MZlxQDMkNim1AeZUkkGM+HowTl7TUBWroQnOOuksKxyo/wlJAHYhkHS1mQDF4XeoUR4n0y1ywSNrUqUMueRzU42mFENhJUWTRo/zh6TOeYuOqPdQCOWxax69DsWRAilMYyQF8b8yWouhNYkYfJbbyBkVEyODW9KzE8ZqtmYFly7ts7MzHe/5s7MzVqsLoIwYulsDRQHa0LpE3zTYcvP9Vy5doywN67MVh8ePWCxPZdBgJfFvNq2Z1FN6Ba5ZiTVA6lF6AxR3qzP6pqeNBcnWLJuGGB06OpG5Z4Zcc3pI6pfY1KNtoChgVm6K/ODFDSz6gDElk50pRVmC1RydHUPGId+8+TqmKDk9m3P/3gMW8wXeyf1b2DJ7rHmmsy1m2/vE4GlWK4Lrc0K4BI14L96F5TeJuxfLBQFzI4nkA0pFkbomNgzKlM/CKN9DSnjl5JrmiX/Mu4PSm8JXtoXckLFJEwNIMRf4UUP0BL1hJwwhP4PMVgYGeY1lfxWd2ebk+8NmdYN4kYptgvEG5z0pKAwWKT0ife/QAb7UWz5S+m/42fz/5fGpZWB3/5IU3CkSY6BQhq7teOvmW5zNF7jeoSq7OWPH4S/5bMnNoVJgB0+d3DyloQaRc3lgCZx/fL2/8xNdWZjJHmc0dTXhwoUZu7sHtE3L1954nU995rPcuf0O7drxid/6NSq74umLF3jhA8+wu+0wKrKzu810a4uT0xN8DJydzVn5jlppZrMtrlx5mgsHgbZz3Ln7PsvlEhUs29s11mj6EGmJ6BhJfSQlGag1biNxbXyPiT2T6f/L3pvEXJam+V2/dzrn3Hu/MeaIjByqMrOqsqpc1XPZYrIljIwFwivEwhJCYonECoHEAgnJEkgsYAMCiSUIg1i4ZSFwd6vdNobqbne7XUO6KueMyMwY8otvutM5551YPO977o1yNVRLLFhwSqUv8osb9577nnd4nv/z//+fhtOTA2bz2xwfv8HczZhFzy2nOYwelTyqNWyGgbhUHJ6esOjXbJbXzJzDtQ1jDCQvGZG1Igu0rqGzCR0iWz+iQiQPIltvsiYgBXE3W5CNIQSPIuCsxgcvMlKlaZz4lCrg5PiI9VqKcK4pnr4hYLVhSJ5x3JKIGCUF1l7BzdNjjFZcXZzR9wPHJ6ccHR8TYqbvN1wsr1mvN1xdXZBTkIg/y6nljKJrLO18BtYRsnRNHcYgrP0Y0NHTWsVB55gbg0IKCbYP9CEz+hU+Jg4bx71bt1m4ltXVJcuLC+aNoZs1JDWwHnoChtF7lJJYKQZYLYU+PV8cMD+4wWx2gOtmKG1J2TD6xPOzc9Jwzqyx3L99C6UyF+fnbFZrUFo6UifZ32KEnCwpNMxmDV3bsdle03QnPPzqd8jqC86+WNHmhuPDjsvNBebA8NZ3vsE73/0WV9trfvT3v8//+Tu/x8WLC85XS5bRs9wEbLbEdcDHQIwR6xqapqG1DW2jGQfPdhjIOTOMa7bjmnffPefTRx9y4+Y9XHdDLAbyiEowbztWY4/vBxrVsL5eo1G0diYFS21otGZEE6UUw87aqCz5KeYuRZWUiuVOYcUr9nySJV8MKRKjJwYEREhRVC5lqetSuJ/iDIECCuNGQABrFSlpISIU0CsFUZcZY9DGobJitd5wdf6Ci/PzYpuR8d1IiC0qCrBotBZGvNEC5KpCEshpV/yzhTWkduCLFKR33T/397W6j5VNsXyn0vkTCltrJ2uNMU62DKbYCiUd0FnOYlXGoXrw7hoLQAUwJ0sSVT0E617KTj6rqshFTQ3yBEdItcIlUtkav+eMSsX+qTQ2Kvy+4k9vil1SucOcdzmX1qhcJcR5wjsmL8F6dpRxsE5k9z9bw6lgFmGXt+WMeBnXESjvmyvQUuICY16OZYRtL0SPKsjc5YP5pdeRd0BZbS6oTFWx5AlsrI+5Pg/5u5fPsRrV1NhcFzC0vibXeD7z0vNSSuZKToamE2ugtmnEcz0kYu8JUQgRaTvge08k4owlpqLE8YqcpIGKYAaxNPYU7IZsUBaS99KdN5fGOJNeVe7eaifPNkvXa5WF0ZdFN10KM5KI1CZyumAlqczPSr6StWWopK6fJTj9vOsXAuj+3tML/tr/9o/4d//ca/zdf/V7NFrxfDvy25+d8T999JT/8A/f49//7hv8jV97ix+cL/lbnz7nX//KvZ2hfpYNK0yI7y6NFTBA/vzv/IN3+U++93W+/699jzFl/uaHT/kv331ElXr8jV9/m4eLjn/z7/6QTObtoxn/8a+9zc3WcT54/uePn/Ff/OgTAQZz5uvHc/7T3/gap61jEyI/vljx13/3B3y26qdJhZLkZ9MPsqXoYjypNaWfTGke4KaJl6iyElW6acpk01qkkVoLy8g6SQBS9OSs0LYpCa0WxlCllpZxzqmChiJDGL1kqjnXTUqukAXUq/evjSZpCaJSVmRjJiAGxAw6ETFoWutKkiebdMpZGhXIgIiZbFLE0RNdZhsCw9CTcxIgh8zox/K7KGa7SgmTrB8Ipd12SIqEwfsAymCtIaIYh7FQqw1+HPCjJ+ehfLMq1u5LC2q571gWr/if5J3pP2paPMK0kyRIiAZSlTC2Hg+FeZcFKTdNg7HCdrsarnbUXFWSchT9ZlsqV3sU772d9Gy5LlRzgby0FtBU9n3Ltg9yaCvxLxI9v5FDJecid7D0fU9Kia7rWCwW06EXc2TwI74YUfoQ96pGBu/9DrygVEHKJpFzSdT2KLQx5ZLcF8PYKtlBACsfEoOPaFR5rSQrPmZClHnS+0DvI6OPDIMvHjbSIaoyCrS2tN2cxdEB3nv6TaHKKzFBj0EMc41VzJylbTsa19C2wmZcLBZ0846uaaZNHZikvN5LsKYryFIfilIyx2ImDp6UetDiiUcxcZW9V0CeMfXoIheuCm6Kp0WImaiQDrulmUj2EZ2jHCBKYUIsf59KwiuHZdZgjBWjc23luRuhUjdtR9ctCD6yXm7JWYv/j3Plc3uSMRjTkLTBugaDonWxSEoDm83l9EwdmrAdiDYwxsB6s2Hettw4vUNz+w6H9+5x5+5dTk5ucLg44mhxyKydYxSo2DOst/TbS8LW0y/P2a4uiX5DGLeEIVOl/733DCnRLA7QznF+fc3F9RUxZwFEG8ul0rgSAIVxhAD3bt/jO998B5UCP373n7Dpt2hrhX0XgviIaIUpgpJpDH0gZAkiY2noUJmQArRKdVNncE5Jp1Yt9PwQIzHpyXS/dgIW6UMpgdbqVaZI/gRUKUWx6cph3wRZftZamRzAUmncP2ZrbVKrDMRSJZdgsO5FUpCQda2hSK+l9l4P85R26zhE8T2xWjOMAgwba7hz+wYP7t/G7RntL1cb3vvgY1abkc+efslqCCQrLBJrLSTDfH60u+GYub5YslxeEnxPpzuyMUQ7R5uW+dEBp0cztkvHSmuxDhgTbWOpnSmuzp+QzJz1mEmmIURNDgYVFa3NE0C3UJFsPAbPwcyikieo3egZvcAYR9MonMt4PH7MxGHg6voajk8A+OM/+qMSR1CeoXQH1DESg3ioOe2YOcvp4YHIVL1niOJBk2IUD7YgTams3YGN+5cQkgtrsmwRWSPNasreWyU2ZEm05MTZBbKqBGk1IE5R5mKVNFV2nipGzhMrOksJImfwsb7n3gwryYNKReZUjMOVUkRrGVVNpOSqAF01YgapAqdcgePCVi+FB63hv9q0hMsdmzPESD/0bPseP3rpuOqrhlYhX02C8r//z0pn3H/u7z2WDoNKmgtpY4gpSdGtBDa//y+8AcBf+dGSYRhYrlb0qw2hH/ExEnICIyzwRhucUrQoHty6w1/9y3+Zv/C97/HDH/2Q3/xf/jbvf/QhUSkWJ8cEpVlvN4QQC+u7YXFwyOAD3XzOrdt3GL3n4nLJrJPmEvOZVOrHsMb3ntF7jDX4KBIo1zSTzDmVOD1S5cq7Z5/L86rSMlOKjjWxeqmLX2VZ1KRpj53amNvEcUBZw/0Hr/Ptb32T69U1f+fv/DabYcv55QuulkvWyxXnLz7m889+xL1bd9Ax018vid4LK71pCClyeX3NZrsl+MB6veKjjz5CG8eNm7d57bVXOb1xypOnT+nXS9K44cX5GedX14wZbDdncXxIxuKaGcY4aifjr779Ng/uHPJrv/JN3nr9NRb2FD0ccvX8jCeP3+Pxe+/x9POnDH5Drweuxw19ymy1Y5U0dnGAs6l0QjdcLzeMY8Q1FmscBOk2G7ZbdAjoHAjRY5RI1ltnMc2MbrEgGct27AmxSvXCxOwZh4HgPUYbTo+PmXUtOUaMVjSukXWBEvmsM2SC+OEmYcFdLxPHh4fMZw3Oao6OFhwczrC2JWJYrjcM/bYwsaIUt005NZJn7NdyPrQzlHWQMoYszVKUYvSJ1hgOOsdBK0WbUSnaoAhD4uDwEGYz7t24zWund3j25AnLceSwbThxjjQMjGEkty2jabGmJSTP6EfaphGFw7ZniI789JJukXj42k0evPIG2rRcrbak+R3M6RmrcMHb33gDRcNPfvIhn49fFDWEJw9bEtLd1ekj+lVm0cy4desuw8El62x476ePOTq4zbd/6Xu8ev8GhwvN7/+j/x09h3/xX/qLfP3tt3j+2WO+PDvnpx884vGjZ9DM2SixTunUgCGJwsEarBfVhTVjOcsjYexJJIy1kBPGGlLqOTt7QohX5KQ5OZpzenLETCe+CAP9+gqvMhf+jGG7JvrM8ekpWYk00sAESqUic5O1vgMR0LKGPWmKCyuAVRsyxFLwLxE6KUXGGNEycnLPMHXgnAKJ0mBgAlMUgJG8JpXCpo27fDpnIVIg7LLr1Zrl9VI6vObIegW2MaTcklJEacV2u2XRNmLZ4hxGFZJKilTTkBgCEprWPYzJJ945R06VsV1UVgWcq2OkprMq0zgnqpIksUsFOOW1qjCSCis5l7rHBE6V3LvEgnIu17h/d3+wA41C8AUU2zHRVJUl11wp756lFGNzRZYkbqxFFZUn0FQXD7+Us/w7rSayizYalQQ7qMy6CbDKYnlVRwkQJn0U24l9sEs4+NJ1tz5jVZqc1a7vTGdInM4OrTWuceR1nhhbVVpaYwcZp924T5YTe8BmHctQ9kXDPsPNljQpT2dYrAz8vBfnqPpcFSZLLlo/I4TImANNUqSYJzVUGYDpzASReaeiivPbAWJEhZYUPGnw4DMxBgaVMNqQQxaWbZD78kny5hALqzJmUqTcc/lcXQBlbXA6lRxZWLRGmUJYESWn1loaS6o0qdi0VmQsiljUVrJ2ps5t5TlmQe2nZ1Kxrz/t+oUAOoB/fL7k3/q9H03ookgNZZL/d+8/4b//4MlLic1//sNPppzmL/yt7+9uslQKpoVL8VoBzobAv/333qUWE6Y3KI/uP/iD94AdRvLbn73gtz57sXvNz3zX//rdR/w37z6Sf7EHXEyLA8UYSnhcNsCKbGbYnZpfAAAgAElEQVSlRJ8sj6SgpeIzZXLpjGKdaPkjkKNUqVRG56JpLxUUaSZgdtiO7GbS/npKxvbYN1mhsma5GST12wvKAULxyxGzYvAZCJlcjC3RQukNiUkWo0uC4WMiD2UyqxI0VIAHBSaLDMho0ZRP4yrSZJUTOSpBlrOjdQ2Nc3g/AAYTpeuTUZrtJkAWIDJFYRlR5MTaGGKQSr108Uxl/BQ5KUIOhQ2WRAOvdQEjxRwULQ0O0HbynquHw4TwR9HRK+SQkuRamg94Pwq4llKhfO/L2EqIZhsyEJUq8krNvmxbtS06Szc5kUiasskKWKVCbSuu0NZgtN2ZjJfUvm5wKSXpWNi2Iusyhu3Yi/RIlU6rSRWmYJw6sO43h9jvoqMQ4GGa6fVZqyoRBVQmhLEAf6kAnpoQE8MoYzOOgfW2Z9sP4iWXMkOUnxSgKyUwjRPTfxSzxQH37t/nzv17rDcbLs8v6DcbSdCKN0eKka7rsFbTNJ0wQK3FNk48QRpHa5sJCKmUenlOqfh35N2yyBRGG6AiWmcUI5q9BEibCdSDwiTRiGdaXVOl6hWiGMPmYo4bsyJ5YagYa0R+CxMLU5dnqwzlULTonNHa4NoZrhE2oFICYFoS7Uya0GSkGxQo8SqxtnQ0dUSM0LKDsEg3G89yNU5MIpcbDA6FoW1aFgcnvPX6V3jj7Tc5enCXo7s3OTqaMw7Cgru++JRPX1xwcXbOdrlheb1ks92WoCiSCcw6x2Ixp3EdbTtjfnDA6Sv3uHd4xOX1kourSw5ObnHvlddZzGccLeY4rcijdK5tjGG72bIdPOcXl2hjeOdrX2PY9nz6+BHr7VYYpEno7QZdAijpnpUR2WxWlSUqwHpMEihppcU6IOty0BmRBebi66lEwtw4R2ttYf/WgDEL5T+Gnb9EkUTsgrg92nkOlJZOJWkuP5V0mUXtLKQVapKyqhxLMBjl0C6RYM6xdC4OE+hoTGkblCPEUkHUWuQJpbAgBshB9tgU0QQ6a7h945B5azl/8Xy65X5MXF+fs9wErtaJrDu0MyidcE3H3LUYswPXnj77nBQiGsO8PaKxDp8zGyVslJu37nDrdMGlhXHYQhBWStu0gBRXfH+JZ81q3WObGa6Z09ljbt64xeuv3oaz3wHgr/yl77BePeOzRx9gVULjuLzaGeL95jcyNeF/+dKIhFGun/zVX/45r/l/um78mf9FDEGkKlOFXeZLTdRqMJmjImslnZ6twxUPVVUq1vUYl2TASJBdKt0xS5RR96mUJCFVKUmiVLbxGpuInAq0dmgtxa/JFy8V2XiJc7QVea3WWiT6FRRMUnyJKYpULARq9+7K+kop0ljDweKAmHNh4ihO7SkpwXbbs1xv2Ky39NuBFKEzThjwe8Hg3//nX/2Fx/t//fYhgub+GWS0qx/A7/yA7wJ//Vdvwa/+2SS4It39eZLaOt9u/xnf7//lK2pCkF0mREXWFte0XHz+mCfPPme9XZYmVSObcWS7vuCzxx/gskXH0syn2HWoIp/HiEph9J6nT5/QDyOnN25y+/YdlNZcnl/gxx5nhKngowfjuH37NncfvoZrZpzeuMPh4Qk8+tsAvPW1r/Pg3jG//Ku/zttfeZ3hMvKTP/yYJ48e8eTxx3zwwftcX27wfsvGX7FWgZXRqO6IMTmSzvjU45RBj4nVak2M4GyLBlL0+KEnDiMqBjRR9mcFVgnIbp3FkBm2W9abJQmxLZHCi7BMBWyTOb7ZrHHWkVKgcQ1d24j/qbFEEjRGzqGUiCEQC0t92xcQzlj6YcsszJnPD+jmC2bzGd6PxGHL9cVlKVyOZZ8IDFtPP45ks0K5lixBA85aWg1tARudNXSdI6SAVYr2oMOedJxkw+LgiJODY9ZnV5icuH98ikmRuN2w3q4hBJSxdK1BWUMfPdFHVIpYrXHa4oeR87Nz+idnnL1Y8vS5nOuLoxNu373P62+/ymr4ks02cHURIBxh8prRX6J0pFtk4jiy3YxYDlg0c+6c3OBXvvsqyp4TnGN+86s8fPg2r9+9y92bJ1xfPuf1V29wcDLnl/7cdzgycxa3X+f6nV/l0VuPWL/wPL9cMmQBtjZhEC9iLY2/rNZoqzFCWhELlRRwrS0kcYNrW5xzhKCJa48zHaTAZnVNGLY4rfBaMw49zlnWq0SMZ2y2A0pb+hBF6dS0MJXjag5ZAKj6a50nsCSX2FGsMkqxXzZlFMXb2opPmsoFBMvlfEg7sIqaE1cgaSJbMOXeCVClsWBldeUsxIsYRXWilGbYbmVfV4l205CKdDuW2GIMAVc+cfpae0XxUHytyZKnaF3AHOQzY67nltyjYF5ljPKOFSVxUp7Am1qs16bEZuW7pSQecjEUHzjHRGSpiir5HD3Z3NT8VO/bziCsQsr9pJRLsXdnVgETLED1l1V7xZEqa5Wiy04lMf1/71JlPqoKWBYMQQgkJS/VqnhhK7I2ezkbpahTcIa0Y95JLKAkRjRSxs6hPPfy97VIOd3LNBZZYuOiVIGE0sLgd9qRlS1jLmOa0g4Yk7fbyVt3meQOMJwkz3kXoORcdQJ7V967M1UtOopneIl7ppdO+IyAiD56yJrog3iNGtDlPIphJA5B7IFiAdOMeDZXm6kxeAHoil97ZUjWrrKToqvem5ZYz2Ll+URd1nuerNBKylHiuCL91ZqExpQOv4pcZMDFA39aI7vv+XITlZ9//UIA3TT+eS8VqWB/3SzKg9hhSeqlf8vuV+wQuApWF6R3Qpr3P3z/DzvE/OXX/XwUMu+vorz7XfkUMjBWOtbkHTfl7wTC9NoaJRd4gEzG6Tx9hla1y6s0eRCfwTTJnxKC+meKR1FKIquraHGuiZ/cp0EJjTnvQM16RSXVfl1Q/FpxSEpNC0pr2Hqh5huld0ySaiRNYQpWtFxRtPkCGJTeWIIGG3HCI+VysAjAk1LCZIfpgGzQWYCCrEU2mfwSkGCncTMarXCmdNdJmbbtZFNUIpMwxhVqqwB01ho59KbExBRWWO2ElwT4NMWfpXYRqxtwlM3fKEPOmhRKxal4LVgr3X53G3nFn2XxtE1XNnyw1kq1a2+VNV03za3qEVGTJ5UzWYfpsQnDatcFsB6ONWCMUZo85OiLBIudF1VK5WAy02GYkUqWfOc0BZzVb/wlGjFyyKUsiH2t7suYKZFTJwG8QhCJ8qYfGYaRvh9YbTYMoycjgEhSAmiZKhUzhqbthGUQgrDEZgusbXAu0s0WONvSFF89ZywqZ2GNFW1+zCJ5jWSRZ8bAlkGo1SAgdqzVFNn+QjkgpipZ+d4og9EZaW0Rp0KCLmCu1qb4BQnTrZrc5rIGUBTJewHOqhw/CovNWkvTNMVXQzZ5a20xNKe8lzCdlDbMFjPamSX4rQT4SRGTwrZzMJkQineUVqCsBH3GkmmJXqGTVKyHIXG99lwvK+MUGtOgscwWh7zy1df56ltv8dWHr9F1jmg8z598wsd/9AmXL56zvrpkeXXJ6nrDMAS62Qm2O8S1M+z8iO7GKfZgRnu44PTmDY4Xi9KspOHo+EgA1c+fcf7eeyw3PW+/9Q5vvv4ap/OGFphraDUQik+ngn/y3vs8e/YlbeN46803QGXe//BDhrHH6EK9RypqCukoqop8NIZMHX4JfCV4dI0VKXRb/AXLM6wGxSolVNKYZoazxaS/atiV7L8xeDTVAFbONU1tVb93nuTCfpsKFfIzl7VWPewm0KQEdBKdBgGL6ylS13qQoMJYgzN6qjwLIysVqE8OdwkoDIoIMZOjR4eRrlWcLFpODzty3HLx4svpln3SrPvE5XVgM2i6eYuxlhS3jHEkKE+/6gFpujD6NTlqnO4gWXRq0SiCH1FmYLvdcm0T19dLlus1JsOsaenHfvpMazzr5RWdNrz68B6L+RG3Tu9z/84rfPubX4H/UV731/6V3+DRpz/h94aPiT5x88Ytnj/TvPvZU76p/78opYxyUqoqYxEAlr2qM8XuQiNobwyeFHyR1Mi7vGRFkGX/bKzBWIuuXnTl0kYapKiU0KUyPc2NvUjZmIbGlcKGdThbih+Fba5LQiiF/l3nOqWV2A+EIN0ex5FhGBj9gAqgVPVWKieiRvZsrYW1agztrMN1M0zT0TQ9a7cleClS5pQJ48jvX3m+d/zzmYn///WLXb/z+RXjuMY1lhgD73/wU/phxSsPH+DHkeXVNSkH2raldQ3OjCWeYJLyG9fIHlTkdxmm868yOlerJZvthsePHxWmeuTO7Vu89dZXMM6wHnpsO+POnZvcv3sbrVsODg93CTjw5IsnPP70PVbLc379V7/LkTvl3fd/yrt//A8Zls+43q4YteNyO7DpN8RFw5KMX/WStNpAcoG5ES+mkDLWWEIM9P1GzvcUmbeOuWuZN4a50wXMcviY2cZMMpqucfjcSjFbNVP34r7fMg6BGIX9773GOUm6YvSMY4+1M06OD2k6xxgHYhb/on7IDDGwHQb6YSDExHy+wLg1ylh8yizCiHUNx4dzwu0bjOsNyXspGllR3QQlTeF8TsRxAO2E1TpANAZLJHuNzSOhl6TdGMfsxNIcz2hNSxMVw/WSy+sLbswPeHh0ymZ1zZPtFtNIAWbMic6JHEs3mkW7wGjN6D0OQ0gKNIz9yPL8S9arFQHD4uiEh298hYev3+PO/VOefPaM1dWC1tyjazLbvieqS7LeSoyrE406orOOw87y5sNbnN7uePudt3nl9V/DuiMWzQwdM6tbjndev8fBwSEtFtZw+WxFeL5ikTSvHJ/g0Hy5uuaq7wlK1XqYqJVSIEaFz0loZyS001ijpBiLwpqMMRnvJf7WKjL2ntXVmjBu0CpJcwh3yGKxYL3d0g+By8tLUQ+haWdzjKv5ya7TZEEoKES5KWnYhyQmmE2WXGFmOpyTwq74gkp37X6zEauLUoSHUojRBoq3nMmWxhoaV84fo4sRfid+XUbsGiamnSoFJV2ke8nje9islvRjL7Y5STyNK2BBDVmQglGMEo/rAkJUppxS+zmmIalIlbXuAxD1+jm/KmPzMslHU3zqcyJGUdlklbHOknMkpeKNVqwoJruAXKWu8mm6en3BZHkCu/N395T2QLopchNQJbGjc0luXGK0PZBDPiJNuEB5CwFvlOQnqXA1xNva4pyjKUz92tTPD+N0z7raOukyx5CCripnt5JNasrrXrJFULsxqeMjzZmEKBSiFOC1ET9bYwymNBepZClV5gtaS5fmvYen9M8AWeXv0jRny/NQxU+vjHWdiqoUo3dzqMiVi3d7eUhUb0JTzhXBN4Sc4bxHRTApS8M578XDPIjXqSFjohS0gxfSyeg9IccJ7NwpGdQ09ytjvQKr+/NlN1fqMJd5WFGdSRK7P612MaJC9tis1PTvEkhMV8f8T1sk/KIA3b6+sq5kwcOne1I1L6EixkxI88vfMxekFf6zH3zC9bgLyneAWpmA02fVN3v5bV6+p3/6P/OUU/0splslCBRdcmXVMHX2U0oRqrxJVbpoBeTk3WJtqJBqcpemDmkxFqlImYyJgpgWWY6QMXaShlgR1+KXlop2Xwzny8IpVx/K5I+pbFBlMle2UJHAJ+/F+lIVGeAeiIMSk3KpJkLWGWOdAG85FiBEAjxbqcs5o8lSxdLgx5Gu93RdU7xrkISkbGS9T8QgTAFj3IS+CxAUMcbRthrnmkITtbJZYYgkjC074R5AV/+clZrMk7XZeZsJaFWnaTWcF+q7Hz1+9BilcU5MdGsVoFJsZE7I4dRYi66sPSsdZCfZNuBcaSpSDmhV+bJ7C77+XaoLMqedR8U0oRNiDhylC2yK1K6SqTLlZHKKsbz5U9Z0XXZqjzFHkWJnkTp7Lf4V9YqJUnHIbLcj4ygVtk2/5Xq9ZRyFuaW0Filq26BNQwXp23aObRucbURakBPWOoxxbNYDIWUaNwMHrXW4xmCUBCnaGJGRpyRS959pHjN4LyypgpprZVDWYst/V4PfSR5Uh0FppF9vFicRJeu6NpnQ5bAz1ZCfPT8nZzFa7NEHnxiLHK0CwDGUTcXoab9SSoOV7pu12iSzWCjtImUpcvjCAJPgYwY2o8oa0VYS55Q9SWmSasBonE0krQnDmo3fsPLr6fmFsOVgccDrX33At37tW5zevMn66pKPPnjMp48+4vmXTzk/e8YwbAuAaVC6ATNjPr/Lva+8zfG9V7BHh3Q3jrBHHXruODheYKyibZoy94VJeto95EELn374ET/67H3sgaNd3BcJktBVZS0bmddvv/kaxydHXP3RBa4xPHh4n1W/4tFnX+CjdNhOQbrfKtS0lqd5W55NVagabdCmpWlnNG2HNlIY2UkWRG4tjXfKvhT9BHzJViaH6+4QLvtbkWtPrV5BGnrsBXn7PyefGVX9VqbIDZSAcYo0VTND8AQvZshGaxrryvyjNAyqAUOSwMUoUgoT+JNLQmeS53h2xP1bRxzNLP3qksuLs+mezy7W9BsBdMMIA8IWDjmgyFIkUDvvKNNqctDEkFj3W7xKZG3wcWAclsRxyVkD/eaasd/QtS1GGxq3Cx9W6wtmXcs3v/UOX//G17lz9z45Nnzx2VM+/fTd6XXWBGaNhuhJo2fWzLh5epu/+WLLp4+/4HotHTybtpXnFBPGaLbbLS/OvuQP/+JrAPzyb70v3qdFmkwSZmJKYQo4xDKCKblKKRKiVNbbrqNpGvp+JMTIrVs3+T/Ke/8bnxj+hzdiWdtMHipUsJgSAeTamVUMio2SFwiQH6cAbKrM1mBIyb/33uODL8WWn6k6I8tJ5wzF2kHOMyhaoImtUTFhpTSmyJiVqvYhIp+JlC6qqXrX1nUq+6CxBpttmdsRUkYbSY62/YhzDUpbQkqE7YiLiqbtWCwOUbohZc1mvcEPcsYOQ8+/9+OepMrcK7YEtfg1sZjKOfKjf/mXAPjub/1Ixm4UD7IcJTbyGqKS4ptJCZugi3DkWm7MDzk8PMA2Dc+vznl6+YLYOOzsgGwagveEccutGye88sp9VusNz8/OmR/d4N7DN7h1+x53795jfX3Nn/zhH/Dss0ecLub8+V//Db7yla/w43d/zG/93u8SVObw5ITTG6fcuHEDP4w8/+IJ52cvGPpe5C4KxnJeGi0yFzP5Ju3O5loYnbyJlOLlpIBpL1FWE/NISIG2azAO+n7D2dkZZI0zM1TWJA8qNTjTSQMnrSBGkinSLyMMAIq3kI/ig2WtAACucVwX+evB4pBbN0+5d/cukPjgww/p+4HHH3/E08+fMIyJlAzb3vMf/XlhH37/H3yfGLc8fvQJz58+4407r3H9xSVnF8+4vnjG1dWKRMc6B/quJViLPTqmaQ6wZkYMGw7nN3jl5k22F2s+XA1sg2cIA1klnLFYq1g0M+6eHnH/1ikP7tzgYD7DKMPzFxc8fvYlV9sRFTNRJza9ND4RqZ+YgYstSZpUBNZpyIpxGBj7nhB6Dg8WdJ0BL82iLq+WXF0vxQtWslwpQDYzUtZcXa95cXmNMYaulWLksB0JPhBGT84B12hhd1ktjceUEQBRW/phJIyiUvDBM+ZI6OEaj9HgupZTZXDdAckmNltPZxv+3Ne/wfFsgeo9H3/Sk7Kim4ufwLi5RhvN4AeiHzg6PKDrWry3jINmve3RNqOzJRuLcqrwPbY8/fSnfPb4XebHc4ax4daNb9N1t2RfiYGQBkLYYrOwR/IYcTnTYRivllyH5xx/85vcbg9Az2QPS55OJ04OO9K4ZvnFM67Prvjgxz/hd3/7d/mTP/kB635A2Ya58gSTGK2R4mWGQlGa4us8NeKTwsjYe0KS4kLTdvQDjKMFB04rjAVnG2Zdw/HRAYeHcw4ODliu1lyttqy2A+vNyGYIYhUUS56bd8d6XZeq7OPVM1QV0CTDLl/JEllapbCt42DesZi1xZ4soUlcGy1ATRAlUaZKJRu0sTjXAIrGapxTaJUn/2psR9M1OFcL9xply+dq8dqejp0kYGDcSn7TOEszm02dwHMp6kQlfnk+SDFalaJ2FeNMQFfKe7H3DqB5GdSovlw7npCq/5tylCn1kiJ7KnBZGW+thVSQUrELKlYmYjMuuV61QKlWQ6n4DmQlAFdGlBeq5oravNxdVU1/ks/Omgql5JJDKk1hSO5ybKrvdGndqgpASBbGXW0mYYyhaRq6rsNZscpSxTfRNU6A2Zz3xkMGxJR4ONWmdDLA0zlS2Vs1BtF771FZlrnEzTlJfpRipSjFQj7ZAWbGaEDL+OWSc+4BsrIC63mVy5uX/LxgFFNH2MKM1EZeb7Io0PbvuaTo05yq4FyMeww3Y0hZGglWS5zaDC4Xb3HBVii5pEarNBUpvRf2aCXupIItUe2u6nzdI67knKGwDqtFVK6WT8U30pR4XPAeGddMIUsVOW4M8SWEbV+S/XNgrZ97/d8CdLUpwV/4u++VCpRsWpXx5mNAmYwxurQuls1aUyq9uSxNJUlWTcJzFv+w39wq7r3xGq9oM7V8zilLF9NcdehpAq1qkpwLqqqN2SU1xJcqGVlJG3jxTY7T6yhMB1Um3xhlsqoCRlUEfALopkEtyefeIAfvJTjOhUFVwJkc0xSg58LsqEBaLGw3tJLmA2VjF/AmgxZKa0C6huxAyl3SuB4E1BREOO/vMUiOKtXvFJn8cCjlEQEOijQMVRZuASiyRWVpaJBUYkSaJaggm5TV4mFjskLFRMwaOyZ0GqV6UAJ9SCQf0Vnotd5n+m3A5MroE+kQKUkCNFU9BBTUete1iPKzykenDQiplBqjpRuM2UfnRW5U54tRWg6obc9Wia9c2za0jZvmBLpUPXJGpdpcIOG0whpQOhOyL2C1mAmrvEtyFQXIrug5RpgSSe38CMqpk1VtVpDLPBoJwZOjFzAhGJKWDSnGODXP3a3rXZOVekl1Yzd3J1YYBbSIGe+rKW1lfkmJx2nFMHpCXJJSZhxHeu/pRzHOtG5G13XM5wtmsw5j3TS288WBdD7N4NoG55ysI+sA6a7kTElMkzROkMS1sEQp8hEkmbXWFImJQ5pN1aYrakq0J8CumKNTNvlQ/SDLPK9gOEhlp+hZJ6NTkaxrauVG1oQDI2bgOY7kmKVaqTXKWFRhnviYsMbKYV/k10o7lBVPO+kLkMlaE5WT9awbMHVeZ7ANJPEhMdairSbkAClIo7SYMc5ikyLrSN5kRkZG1QPC3pwdKB6+cZvXv3qHoDb8w3/8Ex6//xFXT89YXq5KQgabQbHpPcpa5kcndLNb5MPXmd//JnfffBN90MBMoWaZ3ERSC1FnRu1pCw6dcuZw3vDKVx8SrOe9H/6AH3/8Q5ou8vX7D2lKt8rG7Tzimtby4MFt3l5+lc+fP+H5+ZccHB1w595tvnxxLkyEAlprbUAbCXrr3qSqjDqXAMyAcijdYOwMLYtTnmnZ2aufi4TAEHLYHaxaF7BWGLjkyv+Ws0n/zIkZ8rhX5Nn7STUnrqFcWdi7qpCw9EqXwZjEPzCEINXLpinBh8i307R+ZU2GEpD6MBCzp9UOkzM5eVqduHnYcetoRmcyV8srri53AF0fwWfxRJw1ikwkh4ixZd1ZTat33Rd99c8x0tRFvMwy5EiOW67XA0pHkaO3jqa1E5hcr+PTQ7729lv8yq98F+s0J8eWq+uBs4vPuF7u9qqrqysuLq7xg2J9Hfj0oy+Zz095eO9tlkvF1fUjbDXtjT3kgRgy3q/x4yUgIFqMqwLyyLk/dfpOtaSpccZhlS77QBZj3yxd2UMOqKSLpYMuDTnKM/cDNTSqp83UwbDMgQIhS2CnpWmIVqqwnoURK927KPYGEuBOFf+cSTlOjUvqvNqB/ko8U/aKo8I0EBBOGU3IiuwjMSt8TGg1TiC3NZqUIyH6KcaggHsTSJSloUoNjFONTVCF5YewlYsRurYZdIMiiam9DnRtJ8yAtmEYejbbke24JYSRmDyZIAW/LACxnGMlkaGw/eJu7HOUuCAXCfyUHas8SchKqUvYfE5x984dHtx/wPVmxRfnX9L7Ee0MtjAoYgxYozg8mHEwa1gtrwhjjyYz71q0VlxfXrG+vsYqzfF8waJryDGwWq9ZbjfS3U9rtsOW/unAxcUFpEy/3ohHXW3SFSU2M1rTzbrS9VMzFpbizgh7l6DKlMov/VmOuETGl050csbYpiEyslxfMY4Dx0cnzNoZ3ke22x6jFK5t6eYzmsaS80g/bOlLQli7oKM8aENQgdF7vB+YzQS8iNFz88YRt26ccHx0xHazpjGG1WrJl198xjgkctaEoDG2Bd4EYNgMzOcN2/XAn/zxD/mo+YBD3XJx8YLr1ZIhePzYszUaTg8xswV3X3mdu6++Tjtf8PzRR/gvn8EqkjcDVoEIUxRScZPYyoee5SrRGk9nI2GYo43l8vqCy6tzrrYjQ8xs/cjoR1KOxUZAQLgUvSgnrMT7WpU9UTmSF4BntV4S/Ba/XYsdwuAxCax24BowjoTi/GqD1r2AVmFEK+hc6QYfYVuUCApPhzS7MdpimhbbdGTTkDHo1uO3W/I4EnoFKeIaw3FrCH6gWcw5ODhGB7i4eEFKmQc373DUzmiUYTAJ3c4wbsYQA55EaFoObtzktJG45fTokPlMbFn67YZHjx/x4vyCrrVkHRnjBqstXWvxKXKxXnExLjHuhHXzjMurS55++QWb4QJlxXvSZoVTFpUjB53lztEhfrllOL/Ev0jw+oysG1Aj2lmaWcPm+gXbZ085++RjPnv/Ix598piLFx+RuUY7RcoBnSKdUbTzGUE2TkmIsxRacqwdU4UR7/uRfpTnHWNkfqDIqqFpLX7cEHJiPnMczBccHs64cXrC0fEhxhpM1zE7zvQ+crXccnZ5zXbrCYLKTCDC/nqt+ZgU0/YNtOqpIcCVRjzojVG0TsgOKXo5QzRFCik5j7UC1jZNQ9PMsK7FuRalFM6ANWLRkzIikTQNyTTyHuX8sEYakhljSwwsHuhDUaoAACAASURBVIjKSlOgmALeD+QcsUbTukaaHsZMIBGVNHubFDa1kVUZh+k4zFX1tStwT6dlLuctteu8hEYa+b52Ij3YKa9Ruvw9Wvy9qlWQroqziNLStR4ttjoVoEtxr3Ba2OhC4sil4C2WSxixGqqNIuo+u7t1JTYn0zMs1iY1p8xJcAY0SmcwkEIB51SN3+oQvTwe1dZkP0etzRwSAeKuYDxhNwXIQu/igzoD9R4xZQd4/Zz3KHLSXMaRAlAJYMV0DkuObVEUsElLHKWtLQz9AhWVeCcjRJsp72Z3ju8shcA5yaVMEluPn20qsl+PEpZbnIrxiYyx0pG7kpvQRdlXYucQQxHDVi92XVRPTHFNRuZvLLGDfLPaqEnVaVaeerGbeWmNl/GK1V6qzI9cezaXdYxBq1CUFHlqOJehKJd3OcPu+7+cx//s9Qsx6FIUw+ucJAlOWYFRhAI66ZywZBIWqxFz7DrKqAkOF0euyiaSxFgrTdKBVAwwVRmMaZKJ0kdo2lmM8+vmoKMMooxXkherim9WfzldPIvKAstZpEJ55w9WH4LSVapUAthS3QYBioFJklcnVIXudQHpVMpCuczC4lDalEBrV20BoZAmXemquw22PnsIkC0TQLEnVfYFtU5VElu9k6AgdOWFpiHnilQLkigAXqEto4TZo0puqXa01qQgGchR5Ly27qBKizdHCpjZgqwhkAgpoqJ0YM2l+rTQhpwjfkwoRlTyNFbTOieU7DhSu7tUAE5kOFao1VZ099bsUXzLs8+50K+LZLYWqRWloqaLn1oS48oYI34cUVqYirZ48qS9ca//lw1GDDqdSuiyWaniHVGvGIYy7mUOxV0XSKVq9UJJBSDnKdeSQ0MWu7HF5DPL4ZtTIoSRnCO9H/HBU7sApZqIxpcr7XVTrrTxlzfpUjlJFAZDGacq6S6tfaWbqi/jm1HKMDs4wLiWru3oZh1d2+KaBlsOhgTMZiID9t5jjZ3AOgHoJWHVShIlVYB2Zy22sG9CCiWx9tO9CoMxYN1MWHNZT4dkWYSlo6WeAJxMAfIBZcFgiEEmcrF3K/Rr6UhLyuKDUJ55yuLTSNbkQqdHW6SxbUYVD8HgMjpK9UlwdjMZfmIcyjbStCVJYJYyJGXls7WVCqeRClRWVrpBa4uyDViBzw0tyhrmpsXahoMAYb3iIGXuKGgPD4BHAHz729/l3sP7bPqeP/mHf8iHH3+Ev1pjx4RFs16tWa+XbPqe1WbLGBUnN9c8eL3l8uyMF8/OuH3/VY5ODsAYoYOHQEieg/kMnTNtkmR3TB4FHNqWN195lUM0P/3hD3j3hz/gRBma2zfFBF5Ly3tjLaGs0wcPH9B0lmdnz3DdjNObJ2z8iL9KjHFAmH2GoKTAkRD5dS6B0k7abYgRhjGhbRI/UImjmViUue5pqfgHFTBEtmphUhbQjBxR0yyQ47RWAQF8HHaFn72fFEBRwfR7mfdpd35lYVhVr0gfRW6gC6Dni2Q6FYBuKlRkOdVSTfqygNydtrTzjsNWc/NozmHn0MkzbFZcnJ9PnX190mSjsFbYCqjMEDzJiLwqJVUCa7n6IWAMNNZgdSaFkdAPkBIueVQa0EazmHd08xnauakAUK9vfvsbvPLgPpvhmv5yxXpzgU8ad+A5OZ7DJ+WFNnN+ccHlxTXLy57nT9bcugX3H75GTprtZoObCTiiUkBn8UrTMaH3ihKb5WaXE6m6b1Rr6yJ3rrJxLdVQOZclLkk+o6ym6cSvqJ0103tLpW135ZxE6VzCbpSaQF6F+Gn6mIi16l0KCLHs+0ZlqsdlSjvWu0K/JPuoU2s/a5GOY3ufXeIjVYLDoGReye8lUVRaFcZQIOUApWAq974H0LFf6NmBZvJnI01iImjdTGBh20lX6Tj0bPuREBNaK7p5Q2bOMG5Yr0d87EsyI4BHSMISEaaABMHijbOvzqDEhAiQgezTsSRbMQemESrJVdfN+NrXvs4v/9Iv8ZMP3ucnn3w4NUNyWVj6RoFOiXGzYnlp2C6vxHg+BVLwtNZhlGbRzbl5fEK8viT0I48ePeLJ2RmfPXvCGDxaO2Ft5swwjnJ/exKfWApEzjoWBwfcuHHKydEhOWeurq44Oztju93+U0F5Tf5fPtOLZBppzOXalpgz/ei5ul5x5+aM+/ceYLVlMV/Qb3uur5eMPqCtZT5rWRwtUEbiD+894zgSvMf7kWGriOMwFfaM0TSNYzbrODk54p133uH05AYXZ2c0xvLK3Xt82vf0fY9L0LVzsjO4Zjbd84Nbd3CtZgxbXpxd8iJ/yenJEd1hB2pB0hvmR443Hr7GYOcstxGj5qyvpYnUsPVcXfS8WD8j9Nf4HNCNoZ0vsEqTvHjJ5Rw4e/Gczz+94kcp0DmDsQ1DTHhl6A5OaBaHWGcICYbtyNDHwryRZFoIBXaStyplaGYdzlqky6XC91sYR6xSdMaROyv2Mk6ks0PpQolSYlfgHE5TQJiAjxLrS3IoZIDsI1p5rLJoS/EEFPa8m2misqSUaRScHC145fiI9foaO285ObrBtt/y4stzQgy02fBu/0/omg7tHFfXK16s1pxdXaHmM+48vMeDb3yHh6++wsnRIYeLGaFfE4cecmT+3k/4g+9/n5ASfd+z3Axo1+AOF1jnaMYOHx03ju9w89YJq2Fgtkpsxsg4KIJv2G5HFkajwhXjELm6iHzhFY1Z89nHnzO79Snd0RGqSeiZZrO+5Iv3f4p/ccEBmpMbd+ij4a1sWTz8KpfrgednF/inLwh+pDueS4HNC8ARfRJLEIoHLFr8apF4OwYIYyYnQ9u1NLM5y9WSlCJuZpkddjSzhqwSY/AoEj5nsjVi5xMyzegJypFR8nd7/98vplABnpcS7hKnZ2GNaV0AOgU5RrbbNf1mTQ4BrRVD3xNiIJRmPrbpxHpHK6zddTNNWWTnKqeJFpd1YeokaeqTUaCN7K8lj0Ip+V5dg+s62hzRG83oR/pxoHNt6a4qIYHSxQ5JF8VQPVOUmqSRAEQBiaSGsq9Qy9N/pwKoSrgkhav9/Fry/Z2KQcAmVWIr+eYpF/CrWhvlRMqF8CJZBbUbq6hg9vJylQgIPhGzpvqjUWLJelVWnyAIahfjldNGlY6dOdcGSlVuqyjeKSUPL5LXCl5K1XNijlegThp15ZfAteoXvt+gIUJRr9X8vQBchomBNt3/PjmlzJn62spO09ogXmoUQHEf8isgwDTj5Ge1Ztp52snvp/i2qtdS2t1nOQ93sc0OZ5Gfe3zK6b4rTiHFnzqfJG+uc2SPfVf+nY+RkGJpQApZ1yK7/PsdICjy4BpEpeleynwrcZuRAUMpidvleK9AXs2zy9iUONAowRNSsQTTSpR9RtVmIWV61ACufu/pLv/06xcC6Gr3+Al3y7JgxKhdEpKQRAfsSqcwUsLqXOerDDZMbCCt6gKuDLIsQBNMv1NWTCjJtZV10Q2XRYaSKkou712yH1n0KuNDobYW/5YqExWkuxpOMg38/iUbcQl8K92zgI1KVcCgTMjC+qsAnWyalVYLUxdKtVs4WanJgw52+VJGldvcsaB26Fu9ucIYKW2wK/hWvepUaWYQg5jX5xr8F+lWgZAIU7AszzJUKU35PKMsxpRkRCH+AKUbWo5gLSgj9ytgaMJYWbQ6G7YxEsdEoxXaZCSdtRjjsApc21E3OGstrnE0jXQVkkNIDgTX2CLbKVLNUvV31uwNTfXpKai1VoS6nxRAL6coIJo2WFPMOsk7+mvOu67CMdIYpLJUQLmseUnuHf0gB9ceC4ucC0NLuk2luEvUpNikCqJfNs09QDUlkT2F0RMUrL1nyJLcmcIWTWU9TKBg3q2jODEPdmMxzSlUwcvLBpElec1RfBi1EeDMNdL0w7gW3AzjhJZtnRXKsq6Hv0HUzNLhtmlE8lF94mwjvn8xQVbFFNYYnDUiIzVGqMBaFoYqnXil+CEHpEoCqhl2nnkVTp2YtlNSybRWRMbaoIybAoO6unI5vLMSNqtUMQXYz0YTtCVhIEaZ80ZhbGHrFQCnVR3GOIZxByqKAameaOUhZtF7l0pVUhplHUY5GmtljmWDThKgK+NQRkultHWYbg6zYzQNdj3Sp0tu2Dk377+GcQn+0X8LwDfe+Q5nl+f88N0P+Mnnn9CPAwtlGTYDq8szzs+/ZLu+KiB3Wesrg7+YE8hcPZ5xfeuY4wNDDo4xeaKBTIBFYNFZXGNxjeHQWXKWUOawnXPy8E382TWP3n+fn7z7Hgff+haL2zeJWmFLg5lMxucg8jSjeHF1yTxEDg4OuXnjJinDGC4YR09OgRRSscFwRXJvUaZ030WVhjuWmGD0xaNL58KyTmWul+eJNJqYDv5UqohK7/w3ClAySTV4eav1YdzNu3IWlONWALpynlWALqUdQCddshNCCirMTiWs2jDWclUWP4q8A95rUBMYyUkAHh0js7bhzvEhp3PL6UHHvNHkccvm+orN9RJuCsiUYmDetNy9fcibr95jHD2PvnjK1WYgjJEYFWnv6I9JC0ifPCmPxDSS8ogOiUZp5jNHM2toFzOUNeL9mHMB2WVv7BZz+nHg8vqce/duc+fBXbqDOa9//RWC38Dvy2dtxy2rzYb1ZuDqaoWm4eLykuv1lg8/eI+ryxcc6QOc09icIShUVDgaWrsDA8KAPHO9Y1BOBYlSCAhpBIRBkHIm5ERSqQRfkUSm7VoWBwu6tpve2+xJd4XQWK0TdkG1qUxYmNgdqQRudR/KKpF0KnIJNe35U6yRq7Rk11RhOsO07As1zqk5w76cypQkJ9SguAR+JDkjlUqYPRw113VQCnoiDyr/jXSuryCxWAvUeb1rjhRCKVRpC9mz7Xus1bRNsS+wkPCk7KU4Y6xMER8mlmOVf6WSuDizNxejsOyMUsJORs5NrTImG0hMNhudsywODrh19w5feestnl9eYJqGtuvwRpJLmxNGZZRKxCAAlSZjNGzWK8I4cvP0lO265/LqijCOpNET+i1fPH7MaBTrsZfC9BDRSawxNKUYm3OxesrE6AWgc64ks1IE1FoztE1hhhYmxF5Ctd9Brya3k+4hKKxt0Mox+oGmbdHKsV5t0cg+2zhhSDWtQdmMtoajGwfcuHMHbVsWBwcYpVleX3H25XNenH1J33uqF3LXNcy6Fms0XdcCmaPDQ/rtlkcfP+Ltt9/mm++8w9XFBedeJP8GUVU0e5tlGkfWg2fTb9j6DfbA4O4ec++V11i/2HD25Alvv/Eq/8yf/0v8+Mef8vt//GMuNue8eHYGDKyXKwwG51r6Ub7H7Xt3+Mqbb6Kz4vNPH7G+uqazHckAecRET86RMXrGmFDO0s5mzI+OQCnaviOlRL/dUtmlxrQ0ztK1LW3TYK1lGHq8lzhabBCkEYEyhnEMrNY9G5+x3QGtW+Bcg240R610mk8pEMYNyf9f1L1Xk23ZdaX3LbfNMZl5fXmYKhRIgg00TYvR6uhmR4cU+gF615N+kvQrFIrQox6kphhqSkEGHUgAhYIrh8J1eW+a4/bey+lhrrXPyQLYDT5qAxl5K/PkOdssM+eYY4w5CPiSEQZn2xTAGFCKqUgHdQSXNa6vvs9FaYPCx0zjJIbxPnPYeabNwGECnCViiT5xc7tHR4PVGymSxkRsLHq9wC4WdBcP8G7JF693PLvec3G+Zn9zxXjY8ejhfVJ3xuLhW9zc3jB6iI1Ctx3BNqQEY3QYLtjdgjKvaNctb779ENsZrq62hNET8g4TB7TZsR8u+fLpDRurWTWO//M//gX/+NlzHrz/dcy9JbSGm9evef7J55xFy9vnD1noll9tLM+nno1z+DNF8kvyvsOMGyJ7oKxlRorrzhSlA1II1dNEUhpjGlCj5Bemw7mWEKaSPziaviXrTMjS8V6HiZxKh0alSnOpao4isUYt4Av56FSqyQlgIs0Fcigsv1Qa8ORcCk2lUKCNFOTKGm+1gLqZjA8iK/VxLMV0hRt9WXtBUTvFikWFdo5sG0y7xBmDtgIoBozEC0qVn2tU9hgSXWvpTINSmdtdIvjAMI60TVPmRfH4zmIvIsCI7Bu6gCgVqDkWLO/mIvJdYjUyxWeMuRFETFIkyxFUOPqTljKsEAdqXq0yISRpCmKkIZLkOF5yBl02MSiYgLyT7NMlTygAT4iBHAQIPQUEK5RQfQ5BHVVpBUfQJTCc5Yslx5bOzLmw7qr5VT2bPFsYCEh/qq7KM3ZQPQqzOfoW1fwtpiQKHPGgEiILpkiLT/GTEk9UcK6OS128A4XJRE2XKhB67BorsXEsCrZY5J1ZIfZGsXozA7rKeO96AVbQ7sgQ16VolYptVi4uf4LWVBGxKjGcKkQORZzfVymEEZsiudh2aF09m48MuZBiWV9LsUurStc84gZK4ggh8WfJXXU9C2YVpKLm4zWmklfock5iqaWKPVI+AoZalyZzFZM4zSLKY6+EU44Aay6eiv/U8dsx6EDAnCxeJsespCQ/MZdgL4i/jZaqH6r6gtVhWyijiKREzW+Vyaow8+YgBUyuAJQkMNWUW5arTIUIcqG65hjl/IosTZuWnKMkfTmUCVHZVprgQ7l5J3A5RySe+VoFNMhRzltop3rmXQiYUMHIJA8CYc4I0qqPwbOMMvmqTSTU8Xe6oLppPodcQd75UCpSqZMVvVcFeJGETzwKYrClRXiV1zBXHhTgUwYVSzIhLKJ5wueMVQGrNY2tgErEDyPTNEhbeqMFoEPaMSeV0NaincJpCUaEzWLQWeOUImZTGEqZvumBQM5imG6dxbqSPOtCXdbl+k8WFF2Sa1cbLlRAV6sjrVQpMdSMMj5zRBb1WXKt8akwK7I0J4hZFvcpRogRm6X6MbtQ6cK6qc8hy/iLmZkCK5TuDCrgp8Kgo4JCFIm0MCq9D2gt1W3pEDoxTlIVC0WKlYxCaydkylJxKjnjvODLwii+KqQwL9xwYrpppCKmqhFpUtjSlMNahy7gXN/3tG2Ltg0+W7RxOCPehGVfQyuL0XXpMBirZ5lmiMJIatuGEELZpAq8pg1ZSVVbWIVpBlyMciKFrWNYaYTDaGbgOZ0CbOSTeSXk+LrpauVQqiG7jqTMPE8USiqZCHMqhiABTKkWojWp+M8pHUlxkG5h9XMEjsRoLYBX48r6UVqX185EWU4nK2lL75yibS3GNBgrMt6YFDkJHTslRdbCIDNW0/Qttu0xrhUPsQmGYHHdBef3F6zPOvg7ufsHBn7w44/4/NMv8OOBGD1Xh1uuX7zg9vVzwnRA50g0WrrUdQtcSgxXl5wvVgwvP+XlzwwLdYtetmz9Dq8CVkUWZz3rsyW6P6M5u8f64QOSaxhKcOlz5Nsffos4BL745HMenl1yf30fGmi1JmsxuPVxwvYtq4cPGbLi1S+e8o23Gh5c3KO51zCME08PzwQUzbJP9EXmoUyLsQKyZiXmsDkZSRGTEulZDmVdFK+UXBjUMUxoE1GmjLUknadjzHgVUKUDm/inijWDUmluVgRi5jtXf6n7UwmgVBXZlMAyH4E8RZZAMidyVJA0BieMS8QbRVtQOpHwmByJUZoLVPZXVMJI7pXhwrU8Xi14sGpYdxnLHiYlXnObPXEIVOn9+2+c88bDe3zrG2/x7Q+/yedfPOX17Wsub24YhoTHkvRxR8kJoo8QPCqXRjUpYZShbRq6vsU6hy7sxRSj7H/qyCbebg/0/Rr0Ctc9ZLl+k7YXOd7Vq+Pr/uqvfsgXv3jGbh/ZHSaMzuz2z3j16hVfPvsSZWExGlrbE0MkTX5+to07stx0YabUtQCOPoDqBIBKuTCyVcaWtUDY2pbFYkHXL+i6BU1zfG+R8NcxVcHVuobV/byuv+X5azVXYeturY/GvPNaPccAuRQsBdU9+vlw8vuUShxwMv7q2k/hp5X9S6Fm42eQpj+6jsVcPXCZFd1SnFLlHsq9UjmXAlRlOUgQGpOAmlopeSbBS0FNWxIRP0xk70XVgKF1nVQjtQAibatpm8BwGBjGgzRDylKgyEm6Wtej3mtJJjS1MZTVCnIkFc+w1lg6ZdF9zy5FXu423PiBUPfjWGMyadqhc0JbR79ckbRj41+xm0ba5YLHbz7hh//4Q37y0485XF+R/IDKHh8juRQpu9YxhgmnYNW1NK7BjxP74KVQKo9xBv/8dOD2OpOmA9ZohnFEEela2c/kmZrClhH56pzDIc9C/HMl/tG2YZFX0hBHKTabLSlGHj96xMNHj0AptvsDPmeUdTx6620evfUuY7I0TYcFsrtmP1j2u4Q/iBG37hyLhWO57HDOsFwuOTu/R8qWn//853z680948PARH3zrAx698RZXmwLMxFE8jInAGwBcvX4m8Y1WkCCOmf1m4nYzYLoF3/zW7/Foveby+Ws+/+yX3Fy9Znn/Huu+ZbvZMm1vadsFqtPoxnF+f80f/tEf8v4HH3DYjuw3B3a3gzAVujOaZWQaNgKn5IwOiaxbfDb4IHYrxrZSrNOhJJcCvFknQGDbtjhniT4wHvZMh73Ip5oGgyZMnmEM7KdIyMKQMNaiG4dTmqaxxBSY/ESYDoRhh0qB1XLBcnWPwwSXKTAMvqwZAtjHlLBGS7HSNhKD5AzZk3XGqsw0DTzb3HB7s2E3Jppt4uzRE9YP3y/S6YBqFTGOxPHAkzcf8wfvvktEERK4pmPcvObp8+eEEHjyxhtM08Sz50/5QbFOWSyXqMYT1cQhjGynPbspYF0LaYEPsBt3bOPIOq6xfU/bP2Cd1gy7iX28Zdpdiw1IDKi9Zmgarg6GT/7+GWeXigcvRmJjGcJIDpl0GGlHcPEVxMxmd+B2GME12L5n8gti+4CsLNvtjiSBNI0x2NIh0ZW4NRSrEYxjigltBpS1tM0Cow1+OBQyCOzjjvGgWa2k427fLTkMU1HUiLXJZITQkfxEQqGbTpRQs/nFKQxDATqqjU1CFXWRn0Ra7ccRnTOm0UXul3FNQ+sazs7OyDlxfX0Dt1vSYSSExGGIxDCgrTR8MUaLiiQlyYd0QjUK0/X0KtO3LTpPKGVpjIIU0TnSmER2ijgF0nQgjI6m62isptGGmD0hRbEeKhYNSmtRTdW9ImuRExcLiDzLJKMU31OStQtTNyZEESZWR1Zrco7UbuYFuiJGhQmR5APZR5Qv3S6LnHOWqhZ4RCsBQVMM1E6tcr4UD7fa0ELsg1KKczFVYUTmqUpsLhph2VuqSmPGHQqoVqW1xVfZpoRLCHipFMFokrEorLCxhYlQ4pE0X6lKEa3Baosz0iRRUBAhpjhrsFqRrUi7yZkY1cy6CoV4Y8rGL+cmP6tEhsqcT7o0ptBazipJFCMhigZdcldFAcbK+c4sqgzFT69695EUOQUB5BTMHUq1nhWPdd+WPzDkwuY8Jcenso9FAjl7TE44I9cdi02NSqoERUfANYc4h1Ao8cyVqVBIU1lD0sUqRHIEkZImks4oI9eWJBBC6UoUKtJypWdPflXisdlKIx99Fk1VJaQiK/41CK4EiyXHT9WSg6P8u2DGVDmm0oXc8585fjsGXVaEJN3sBOSqfmsFCRVTLTGkjbmKyfFHWLpcfCrJsdyw2fZFVaR5frn4dPnKcJPnJgBhuXlkkqpdViuAVjzuYiKiUKqyHRSVApxznE3yaxOII/3yKHUSRkMQzbq2BfdVhKTQSReDe1WSdzk3AeZkscgacNXv6mhkLpdTPq9Qi6mPMGtSQYEjGWMidTM4Vd3kkpDmWAddBfjEdybGSAxg1EKuuyChWekKoQtMakvTgyygQ61kxyjVZj/uhT5tHbaXLqwZ6aSUcqIpnd3QmsZ1cu5awAY0ZGMwnUyaqbDLbFQMY0RZhZ8izqjiOVZkf1m6aymlpUqvpOpCOKGrlqpJrElMYUhWyqpC5KQ5KZLP0rI7FSZLhCklzORptZEkJ0vH3Kwk4Y8I2OazQmdTvA4KC+/0OajmpNoh711ZMDklJi+Mnjtst/Ks5TWUjSSjMQw+Q6ieQUnAIVM8GWI6gpW5BAQxlQ1T5lUFvWTM1EXheML1ngpjTLNcremXS7p2QXH0LMmrgFRaObRtpCJZFnSDxmlbqO4ihVLGzCCvbRTGOvF3MaNsOul4PujSRdUgkjXFnLTWBU+mkpJusfOcrGDkPCXmgOD4w3mVJaXEAY9XMh9Tirgirza6PCdUoctrVLakJBWnnMWMV5VqS9bih+QquJ8zKXpMraeWOS5rbXFIUgllFbrRuEaJwa/WZAVjiISEMMGyMGBiiKAFyNYpYfYDndsQDontDcRouL9eS2exE737D37+9/z8s59CUriceX35kuvrazbX15jsBbiNiRSVGFb7EcKW6RC4WK1w6cCluqGLT3n89gPOe8MUD6RpR7xMXCW4ePQOD7/9HdZnCtyazjmG6LFGcXax5u133+PLLy/56BdfsFye8+1vvlUeS8IoWQ+0stjVOeuHb3D5iysuP3nO6h3Hw0fnbO7f59nrp4TDAWtsqWSn4pIv1S9tZI1rjCFlWwLAIoEoVbechSFLlq5mKidseVY+eGRjEtNlozXikSDBmixPieL+eZzjIZVhe7J+J9lrauBQYWXKdwGyEzkfJFhOBp2c7Bu5rlAZHTOYAKp4vKlI1pOwLpMG6+iannu65Um/EC+h8ZrduMVvA3pY05gGhoCJxzHxJ995jzcfnvHWu2/y3vvvcLvfCLMpSVdhlQ2nHpZh8jgtsi+DFT81rWgbSWCNFYAnTLHEoIqYPIYjqHVztUOrPdebPZ9/ueH7//AZKU3s97ccdrf8d+V1f/5nf48lsx9hP3l2t6857HccdjuCP+CwxGHCK03wAuDJ/mDp+qNvXtc7ORdKFVaVdUWd7IU0hYEtsvPqReespWlb+r6nKZJ96xyFh4+zDRWgq7YYp3KUygqolfjapCkrLZ3UkTjAaAHJUk6lyCBBbarBNRLnhCTcW6OLROJOsiBrippbGTKDwKnEDnU9TDFRtdM9ZgAAIABJREFU+1rlgsbJdl8LdLmwU0+r5ydAbT6VvJY4TwnbuHrD1Mp2GEeMtTTaMubAcJCGWX23wj3s2O62bHY7cs50TY9dWPp2ZL/fMY6DfFaMxBBnUBGgbXqRfCcpwWKsgFIodBLJutHSeTOj2ObEj3/5BTfJc3l5yUBmDBFjFU6bWSY7es92P/B6u5fu3Lah7Tveff+bvPnuO/zjD3/IzW5D8AOGKOu3k73XZMWidbTO0LQN675j0S+YmhFb9tsYq+2GrBM+BKbDhuvhVrohGlPG3Krc88Kc1sLuEQ9TjXOtNOTIMI1b/LABEqOXJl62EU+q9WrN40eP+Jff+x7f+c7v8/T5C/7yb/6OL19cMvnIeLnjdbpmM2oO+5FeGxZWw2Bx9pyLdcKeLbl33mBs5jDumXyk7Xp+9/e+x2dfPOUnP/uU7fUVn3zyKY/ffJt7j98gfvIp+2EU6V4OYgEwj5iDlLCypTWO1vYsQk+XF7z/4e/yzoMn/OP/81f83//X/86kFP1Zx6IzhHHL9vqaRsOyU4Q40riGexf3MUbz6SefcnW54cWLa4ZR4clM44EwJRq3EKkfmWQyMTl2Q2YMA8YZMuKlG5Iw8a2xGCuSrVT2+rbpcGvNTkkyKkyKTAC2EYasCNbJOGxbbNvQL3q0VgyHPYfdhvGwATxORbRNrJcNjx/fYzvAfr8jhoGUw1yfRyVWJtOkgd5KwS+GSGgh2pYYAtvtFeN+T9+f8fjxY2x7wereG7zzta+zvlgR046+j1y//iXb62e88+23+P3f/TZd27Hsz1jblv2LSz7+0Uc8e3lJiiN0LaFf8NNPPyMohfeJYQqECNY09J0Vry6lsW0DOCafycqwHxPrvuONNx/hfeb5s0ucW5HOHuJyojWKlXWc9Qus0ozB0z96wPn9t0WKdnWF1Ra9gDHvCcCi6zm/H+h9YAqekBNNTHSriTBeEZ8e2Lz2+MNAtEhMM4001rJaLEpMoDG2wVokhnINbb8AMsZKXJwtaGtQVuNcj3W9yLOTw08BVewudYioIH7ZjWkIyhBPSA5SQDlK5erP8knjkZQC4xAwRdFC2fNTTIyjx/vIatVycf8BOcFmO5LzgDFqZt4lrOSzMYi0vvSOQ2WmPBC9p9UBFVoaFYgx0NhGiibTRA57VDrgTCZoKfgrP6CswebCICrx0xSCNFzL4gWfkdDLlkIWM/hWDEe0gD1KiTXOqfVMlcnprEAFdMqkWHx8jYCZYvcgr50OI3GYiNZLLmu1uFWVfcZoK2zrUGP8LOzuLExsozQpSxMwoxXOOXyMTFGa22nbYI0wB6NTWO1kLy1pkipFoESkSmBrowfIpXCvcEnRJIXFgrJ4bfHKSI6UI7kyAykNmQogRE6y7uiMUaLYIJWCAhlnGpSVCHKaEgGPIsueqBSxwBdaIYSecpuLTlXIGqH83GiCUSQjAN3oJ5Iq+YjR6GKXUPNIqUyXIp4SH06dq+9gUURaIcwYUy0wyjzh2HW2glpKWWHrlf0tl3ghJ0jCVcKrSMwemwKNhoMFbyRXzwkB8E0iGJGwa2VKXwM1f07tYyCFrgK3VbWjVoUQFkkEshYrjVjwB8kj6nkf5zC1MFrGWNL1OovaUwm5I2YlBcQkRflqWVKLrYK1CBknqlzy12MMBupoWZky+r/Q5P63Y9AVEOEOOkENUAXBnRk7MWO0mL5bN3PM5tefUjLniT2jcnffXxnNcUQeAQ75tyLpQsHNJQhOleIpfx9C7RBbUHwldHKtLKiE1k4WhYJpy/JT3keBNo5KG03ZlPfWgCFVc0wxBgPE/FKVBc6nY0cTGQcJXaRUOhc6bDoJtssPUixSqXo+Fd49ZXX4Kq+VRE8pJTRro1HKCfUShTNOJk31b7Mid81aqga6ACunnUpSki4qObWM+0yaRmJMjNOIchbbWKDFTx7XOJSx0sHRWrIxkqTUKr6OUlHOCRNl48tZjEinlBi8dGdpkIpFlVymHAuDKqNTmn2cZoZBSSLmpOvOuDlq1nMyTKV7XAyRyUd8yGgt1OkGqVSkVAxSs0i1pxDIIbFPY2EVlPerIFA5DvuJ2r5ZuuWlsnhkci4m30Dltd7xh8sZY46zU0ZQbYggIKz4JRbTzCAbYrYlEa1U8xMWB/VbZqZsC3NOKvVohXOWxWJF1y9YrdfYpkWX7nwxMVPdUVI91NpiZ6+tI50bEo1pyljKswdYymKOH2NGq2KqmiARISmiziIhNSKhTgjLsX7PSuZNUgqfmOd7Kt5RR7+9Ok5lXVIYmd9KY7KW4FNbvBLJX45RQAfn0GRiCoRxkqRZzGCKXL6A7iqj8oRWCVs2KFcM/01JXJ1tZo8EpQVEUMWDMZCK9LLKV8SHRpgs8m/XdKX4kYQ9rATs1IWB3GmIE4zjktX6AYt1wz29LACCHF8++6UYwUfN7esrNtdXHPY70GBtIx3cUqYprL1x9BD37Nlzvl6iGGjsyPY1vPtmzzeevI1PjqdfXDO9vuZwGCF4HjxYcnZm6JrHWLMiB0XbLFBp4u3Ha/74X36Lj3/wMR99/BHrZcPbbzxm0Tfk5DHaoZSiX5+Tm4ZoPLeHl3z59MDD/JDl+YL33niLl188YxomdOM4DANTvEapBqWObAfrtPikZS8bqFLzPcsIJV+S+AltZL0OsacxCzIKH0a0Mqz6NSGIp2pWAVQQMLsaFpajUXpmOamyTxkl8iWZqrXiW5jcRQaRiKA8mjh/KTyUcWq0FvkAEhj5yeOsYdGdE/yEbSy6a3DGcJ4U6nDL7X5A2wPaDfQt+FZzdrYQD7W2rzsNDx89oHMJHyZuthsO0wTVViGKTe7coRxwRtE6S2stNsv+JN2rRXIlNbXC1FJVApkZhwEQ0Ox/Hv8anp4sw1t+4/E/6Z/IP+4D9xVwVr7+eceP/vs//mf/zX/+OCn3KnXnN7oAgBWkA07kKyfreonxVEnwqw1N9USs9hhq9rCpQZ2gbUKuKUqB8r9CDJ53nhpU1i2vEvZmH5cSZGYlIHLOzM9Mk8klZjp2UK3XWopbNUY93oySVJ3GWce4zzUNdrEkb7P4k3Udy/MVbdthbcN+HPEhMIwHSAnnOpzrEFArEEO4U0Rqu6VYUSBhVUjH0MeW5j0VEItkXt3ecPMPf8/6F0uUUozBsz47m59LCgmjDVOGm9ut2Dk0LVEZnrzxkHsPHjJ6j7aWew/uMTaGsN8ybEQOa3VTTLJN8fER5cM0jsLANpKMGHuUKccg9gg+CrNKa41FPGChFlKrL2ZAm4ixRa778CH37j/EaM3V5a/45Wc3HA4HDmMgoumXa9brM773ve/xp3/6pzhjeHl5ySeffsqXX/6KT5++wPVrglqwz3sOoWO7GbhYNpzdP6NfK5Lx0EHf3OONx/fQOvPs+TO2+wOtW6NNx2azZxwnnDW8evWKj378Y2Em9T03263EwTpJU49y5BSEdUhkte5Zrs7ougW/87UP+NN/9W/40Q9/xKdPnzKpLPeLKCCfH2mtJlrLlEYmP6Bz5sXz5zx9+UyYsJNiOkTClMgxkuKE1gllFK74vzV9Q0qOGAwxFuYQE94HabikDc42tI08T3LksB8l6io2JlqJf2pGGl3Z1uJMEhaUtri2LX7WEpdZa7g4X8O6xaiI0YkURowx7Pcb9gcpQK9WS4bxwDSNKDJd23C2FLXCcrnAGIefPNNkmKaJ7TTi48S9J/f47nf/gPfe/RZGL+jac+7df0hUie3umsDA/vYlEcsXT19xc/M3LBdL3nz0JhdtD7sdL16+4tnzSwKK5b37oC0Kw+3NDZvtgcVqwaptOO8d69WC5XJB17XodsEhGmLWDCESkqZfWVbnoHTD2dlDcshYZeldy8I19NaxcC0qZaaYadcXuG5BTom3Hr4LOUujjpRoW4ljQpgIMTAFafIwBZEtR/+QGDYEH5jGS0Y/QQgwBZQDbyeMNoyHCddFbNvhlOSrOkecc4TcSC6lIRerEtf2uLYThYIHiDOz2CpD4xx2ClK0j+UXZf3OpVBMroXfAhopVQrfShrNpkiIEYPkrJOfQEmHXqUpzEnDfj8UuwVJ7I3VdLahbTthhwXprO3HogAzSYA/m1Em4VohN7TtCo3D+xGVRxY99LTYEUYtBQpnLK1z5Mmjs9gEhBBJKKISSwCHxLhWSz4rhRKQ6rPIO2tHcIVCGXXCKtfk0hjvWJQXhZYqxaxEUWqlRA7Fy7mYoNc9DKVQOhVVF8ViyZTCWJ4N98XWR3JIydEhxCTFEiixuDD+Q07S0GEGj8ST1rTi06YwkpfkWrQtpB9Vsr5KXkTO8bQYp+o1oKiroaT+Rwnk8YvjGELiz3nfKESQgujO+z5KeuTU5C4XMCkX0LdCM7VgmSpZKCEdTCkqRpWLYinP5yI3swBuZZ+vrNCMNHSsstLTIrVVwvz2/qQJFcdYQn4mBUeogNoRHNQ5yjpesQ+E/GFUUUUiz/f4mdXeSmTescjbZxn2yfiRe6xmcFDyOWas5HieJ3HUHO6cYFIlFpq93Wt8X0FihMwUUiSWZiGxyNsTFau6m0/U/5ojnq9gXl89fssmEUcpaDUhvAO2le+hDLqcBOiKJ4vXbzpmD5f5XE/AvHLydTAf/7t8VwoVVZFtSHtoVZomiHF7AV++8veibhVktgJyZfWdUdk64I21pELHrR55Ak1IcGULs45CgVSF9pl1xCRR5+fMnNjMi1q5hzHmObiuMj5pDV2Q4cJom7XR5Wi7izIYixyvAHTSocYUcCbTWFk8jZHEXxk70y+rnFX8xOTao/fCsgnSZlyrBZM1DLsdBy+t7ld9jzUdUSnGGDHKoCxY53BtJ5+h1by4KYTmq7xHm4kUJqYU5LlNntZkojK0BpyxBaQrLDNdksIk7NwanFOShVY1x8WJKo1JxBAkgM+a4AWcCzEwTV5MlI1BT4G0PRCRBT3kOAN0PsrioUOam3/kOihPhvI4+Zl1MPvAVV/Dk3Ffk5v6VXX6lZFX59WvzYGUqN1pktal14oBI2zI42ultXSe/SFKM4giBY4p03Ud5+dnXFxcsFiscK4tG37R1KcCQGtdOhEqEp6cAymo2X9Bqj+yQXjtRNpenrXQm5nZpykFSbZyBfnLfSkSd6rUNWZp/jJv6rIoT96XDT2TU5BzqFKCnAhBDPZJoFSR6iqpuIWUyEok7zl4UpIKmzVC0Y4xoqFITBM+CkiWlVRja4ODrFSRtCpckccIswRyzPji+aeMpXGNAJ7KECm+nCWIq2tLKVXJNRornodlvKOq75TILcWE3KL1fd772rd5+51HhT1yXLZfv37NFAJxUgzTyGK5ZLFe0bYN437LCz8ypEGo7ykXsMuhleIwjPiwICbY7Q9cvrpite4Zw54vvnjB/vKWs65n4CXx4++jWs+7fWJpn3DWnhNUYOv39L3h8e+8RRN3/M1ff8Tf/O2Pyd9d8K33zjAoTAPeKJzrMAvH1A4kf0vavya8vOFB+03evniM3RpevHjFLkW8ioRxB2kHuSXYSJ7AthFtAilLgcCYFpOXGNUBCmM86BFjS9dIWixrGt3hmpGQd4zDRBoHGrOmNZrMDpHaa3I2d+bgwtnCjpGfzQwtDakUHE52MGojIBmpBq2Q6qnxqDyVwF5B9RIxLeRiyO8VvWtpbYOzCmsjrfH03tPbjFWJdtFy9mDN2VnH1956lycP3mb8/k/5/k8/oYJlP//iKQ0j6ZPPMT/6OZ//8iVffPmC/X5iGKXCG046Z7aNpXVWWLKSJ1A7aAtQkmbJiLC1ZTwPw56/uNH8m/P/Qhnw/0fH90dzJ15JRd5JWaOrf1BKaW5s9FX/WmCe7wld/Ovqb2QNmJsCJFkHSZWDyRwjSLGwWlgwx+055+M2VAE4jj4/9XTy3VDw+JnzddVwUc2/r6dY3yFzd2+aPfLKvUgp4Zyj67q5U12MkdVqRbdYst3vud1sOBz2kHPxjZWGCrnYQaR0BIsXyxV1o82IVDeGNINjwnyK+GmcO8ttN7fc3ATatqVppJN4LcpV38eUMiGMJMB2CdP17Hd7Pv7xx4yDZ71e893vfo+nn37KZz/7eGZYh5RRZf6nGEElQgJbmrsMw1RsHWTOSFxX4gfEP04pRYiZcVsbmxT/5qzQ2mKswzWNgD/G0HQOMpgi/bm6vmUMiX65xjUtf/hHf8R/+9/8BxTwF//pP/H33/8+r65uULZlveixfc8bjx9x/8n7XN0kLnnJ+UrzzpsXnPcw7SyHG4dKifOLJzx48JD7D77G7XbLGCaG/cg07FgvW9KUGcc9T3/1S5689S5PHr7BcPCMwwGtEsEfO9kb25DJaNOwPl+zvn/O+aN7vP3oES8/+gl/9ed/zueXv2IYRpYYOufY58AwHMqioziEAT8N2JQJaSosRotTLV3fMOSJw3iQeICEDyJJb5qG1fIcrTr2+8Dt5sAwTMRU7FiUwhpJ1MRwX2KK5AM5BYxC5GbWkaL484lPr/hNUubcOEqjjJTE4uXexQXrdY9VLSkOkH0pHka22z2bg8wZax02yn4hTSqcsIK7XrzSfGQYROKodWlw0Go+/PDrfPCNN3l4r2PcHRg2r/jy+Y/Y7QcGH9hOI5c314whsL2a+OnhV4Dm8cNnOJV59sUnjDthRzZ9z1vvZVzbcQiB3f6AtZb1sqfXCk1iQeJ+a1kvO2g1m+TJ1nK93XG73XHYvWIcvyQrgzWOrulAGUKyDJMma82YIU6RKWjGXy3EtiWDm9nHMgtiShwOB0Y/SjdGJUwvYzXr9ZrVsuXBG28yTiPTFPCHHXkSm5moYAoRTWC332L9xCIFkpbxoqNFOZFgx8Kq8ZP4wdnQCUtN6SI0PK67WR+VDsI4j6X/YGb2p8p1gReli6zLqrDPCiQnC4HE+jky+oGQxItMGpMoIOL9UIqNYvXjnOPsrGe9OpMYeFyw3+/JfkPGE9IeHwcW64433nrIoyfvsjsYdG7IURH8wP2Llq+/+wHvPDhnESxf/OwzfvDRR7y6vkYzlm6olmQ0efJiBRCTfE9lR8hZQNIYiDkI+EgplGfJl45glTCHlNJzbpAp9kHIfS4+IpJf69IpNgPaSkfaGm9kCBw7yeoC3mk0maMsUyVhmEuXTlXWVCEy+CTAn5AYZD2OJX/QqnpU6zuFL7nomueVYmuxoPqqp7cw9E82e6VOt1mqHY6AEcWf7CSeqntzzhRlhwBMcp5HNtb83gWwqrYU1ddOzbYIleFvZj80TvbsPA9a+fGx0UL93a8f6jRGOTmfahNWX3PHA+8rRwXY6u9rE8jZ1qEUBO+qE8wcY3IC0NXYQ77k/QWgqz8rXrAFCJ7B3tOoShX7qzkuyvNadOd5Hm97GSCVEFXy+yyz/KtXfTo+6rVGTuKqO2/+2x2/JYPuRPLzG0A6Tk9WlWtWtWJ4DPBOu2+or4Bz9cLqZ8h9KQ83i66cVENG0TSrmIunlyrVa+lSQjH5VEbYALPq767F5yznON6vY8AqCXPpSIpMIApyXkEhMTwWHzapDmQZNUraA1MCNlUfcq7yG3ng1h0N91VZMETSVdhARbJjijlnPc7vPymAnyqGiGURKM0VhHYZ6FxVLpZJq6WCEbOwxepCU8B+nHF4A2pMwv7qWrJSjH5k8uIf4VJLayyma5lG8UuKSiS/WQsIWLu0aVsqJylSdc+xPEsxdoyEDMkosjHzfayTIelcTL0l1eFkwaAg9BXATQXh9jEQJvGPiUm6vKSQCmPKi0m0MTLRhlE2Eo6a9oig+qSEiqIwtVrYZwpVPFfqvDiOx1luVSkPJ7lOHT+ni1kF9u4sfCfPWKpDRRCbVanklGtNmRyP58FX3v80CXNNw6rruX//Phf37tF2nVxHGIkpoaM9duI5mXuZzGEcCIW5llIS4LR6DZTNMKTSAKPMv1yebYhRNusKSv6GhXz2IqrT83QWqurnVKsk1QmwVjAKvJ4SJKHCR9cIkyqKrBF8kdjWdUjjVQH0U5oZcCEkplJJ1NZhmg7VdGjjRK5NSbyKn6BCADrnGuHezjJjYURIhZHCri0JHpzMfZGeTYd98UEsY0cBWZeSgQR+OTkat6CxlrPVkrP1isXyCIhstxtyarGu5eLinOW6QxtL01gOhy1KRV4+vyROEZ/FbFkZqRLfbG6xTtH1DV3f8KsvX7DbbdE2MR48Bx/ReuT+8oxHjx6wMND6PW3Ykv0BjWOtDdouSSjef/sxX372lI9/+gmP7z/kzdU3eXK/R6MJGRZLy1tff8xPfrhgmDaoKcD2BnP5jDeevMd7D+6jpsynNzekGFBEdBLT+BAzhzBhRi3VVQJN02Ktp3FAI/IlqzJOW2zTEpQC25F8Rwwyjxvd0WDIUQBvkSCKJ6UElJrTYdoqZB2af5gr+sKpREBmp/jSxcoK9RaVNUYHrAkY59G6sIizImcLyZOSZaUN1sCF0zx8dB8QBuG9dcc6aNa6YbFsufdoydc+fBvXavCJs/V92tWS4WQ9+MeffYYKOw7DQMya7T4wTsICl+1CPqsenbMS3BT2tMDDYntQd8PaGa36rOaUCNPA//jX16RsaLsVKMswRXIW2Z7WihRHcgz8+D/cB+D3/49f4TTkFIh+EgmwzHLpvOkcZ2dndF2DJpKjrNlN07BcrflffncNwP/wWcLH4jeidVnDqhelPI8qPagPtHaC06pWio8dweAk4DxZjeZAkrsFlNNK7NHoX88xi3y2/J1gcPk0E5w3hlzAuVJyLz52uuw10oFV/doKeRd0O/53XZtlL6zBL+R5Xb/7d19936++51Hae/o3NQasQJvWmuVyiXOO6+trdrsdTdNwcXHBcn3GcrViv9/hvRf5dq5eQsLGiydjd7FYnQCBksQHHeZOeKefX/+uaRpplKAUwzDMVX1rHVZLV8wY8/wcq1/sbrfji8+/gKxpXUMcBq6upGFN23VklTkEjx9GvPfEJGy8pnU0WfaRcQqM0ygy1lKkrWBd1zR0bUPOiWny+Gmai7LCSJGYUhsB7PaHA6+vXhNTJHjP7fUlkw8sV2tczKzW5/zRH/8x//bf/juuXl/xZ3/2H3n+7CkxCMB0c3uNz5qVsUQ/wLTFxEint+Rp4NmvnvIrf4sfbsne45Rju79i9eKcHBWL9UpAsPEGpUZWq4bp4BnHwDTKPbg4v8ftZseL589prKE9UQGs1hfEmLFNw3K1xBrFsNvyj9//W5795BMud1uMyRx2NxA1qWlhr0q3VkcMwqazUr/Ax4nV+kxkwQH8KI0Cut6So2Icd0SfiFZYNn2/pG2WwIHN9iCAbBIrBmP0LD/0XtI05yz3HzwAItYYzlYruq5jPOzZHw74mBn3gxTwatE3CPtYGnzBOAyM4xmayGF/SwpjkVslMg1DcPhQVCBxAiUSuhAGUspsN3v6vsN7z+3thhgTfb+gcYaFVbjdgU//9m/50XBgs9ux2+252dyijePi/kOUabDtgt4tpGv9qNntPc/9lrOl4fbmmuj3hAQBz253Q4fsW13XslyuaBuRpF+9ukTlxLJvcdYQdUJ1Btd37MeJw+TRzhEC+JgwxjENg0jRUmbhWi4WS/E3CwkfNS9eDwxjQKHo2obVes1quSTGyNXtNbe3tyK/NBrbGJQ1uK7l3r17PHhwn7P1ObrpcIuVkBAmR3AOF6XzePQilw/TyB6x+2m6jjQahhQYksIserBGmraFwBg8PklTvNF7kWkmTUqecfL4EpcK6zjNNkSzEmZexDXVvquyqFQBlGoDoCNQkErjnmILozPej8Q4onXCNoBKOKfoe0e/cISQ2G48w+HA5AeUnkhxwFnNtz54i3/97/492tzn7//hU7Y3YnFgcuLBvQv+5E++x5/+yX/FhbnHT/7uR5j/9X/j//3rvyJFxHs6ewgRW1hmOSaICZWKAUNMpBCEoZYDxQVNdqMssmGqf5f44ZARdQlIHpVzlMJwiZ8lBS/3JSEsNFPA0AJYxZhmJlxS4Mr5ETLZJIySWOW4zxd8rOTlqcSKShlsybOr56PKuWBmBbxTao5r0pyvHXNQkNhepWORnUqGqWDzXDir+2mppimR8h5Z7XdzPvnYo6VEzsxAk2xzBYzT9VzUb9ipyz0teEoF6LQ2sxBkBgtP4pVZhfQVnKjuuSmdkkiOwNgMlFVW3gloWY9TUK++Z0riWXwaNxxfd4yh5F6dKOU47vfH+5NOcl11557NMUOUuZQzQhabVV/HJywx0Wlx/S72MP9UH/GaU4BVBhJ3rqc2UZEvyedjnkf+V2K1er9+w0M9Of7ZAB0g7LVcwKjTD6saZVWACjFsmk+pctVkaeOfCBjlrOVS9GxOGYvGuBb+q0S8gn0SGBU0P9cA3BSAgPJpugTugninzDz45f9mBtK0Em+q2bftdALKGRBCxFpB5HPOIqlN0mLXOoc1dp6Icn/0nQe6WKwwTpIZVRaqrIXFU/10yMLgkfuzA6BdiZwjJmaAjpIgxJwJwUswYdT8ubl0NomkQvuULnypsqJ0Nc8GksgUEmAaR9O1TJNUigfvC/PD0PQiLM9GDCljkvuuEKNmYWUVQNfUKoiwpVSWKkfKkexBylQBp2QpijmRdNHgn8DbdyZPPpDI6FxosqkAf16kFgklLKgIOQapxMeISrk0e5CK6jxOS4VCK7mf0mFHY41DFyNodSLpOGJVx00ZlWfkPpX27+SjROguiHaXQVcXwVOvK9QxSZrFpSmTiEf9+0kV4XQ+NW1D2/SsVitWqxU5JTY3N0zTRC6mqvX9oCyANRHSmSkGaXFfF+jq3VASPpIq/kp1/ssYjsWLUvwr7ia2p0cs9O7Tr/m1OdMYJ3c3A1WAqmV8aAWmzq8ERieMFu+gpDJRebQK5DSRKUC6PnZHUjERcwBtcEbM8JVtCpuhRZcutrm0qtel+jKvWlnmi7AFYwEKDIkTvybEO4kCWFeadE5i8K4SWAplvSaqnEpgAAAgAElEQVTXlTGZM1YLQOiMZb1ccr5es1z0nBDomMLIsluxaFYsl45+2UoirMG6NfcfPWIYPZvrDTFmlLYoq2m6jsMhst0fuL7e0PUtymR2hx3n5yseP37AatnR2sz7H37AO2+/wfbqJTfmGV3XMm237HYHguk4e/geqrvg4XrNd3/vQ168fM0nn/wDb3Sae803aBYO2xnefLDmX/3Rd/nxD/6Bn7zeEMYBneD26oqltrz58B3eeXKf1zFze3WNipmmSEMTe+IEoNFaOnr57YTSe5zdYZtrYUdqS9/1tMs1arEmZkP0O3TOkCzJa5gyTkdUupWxUYHyfBf0BoiHTQHGCwiXhEmTU5rnIwVoqA1ZUkoQLC6sMLFF6YRxCdcHbOPRVmBeZx1+DPgYWK/OOT8745vvf40/+IN/QdNlLq++wOKZXm7ps6VdtHTnDV977x1c7/j4hz9meHnJYZxQ5ih7/uLymjxs8CGwWJzR9ucEPON2IqVI6/SdwMDqVFglqTCnwZS1Ud/Z9ur8FJbydDhw2GxImMJ61gxj9TMSH8rgxyJvEYBuGvZkI55LIqmoAFKZJ6Ura/2s4/am7pyzrhKcDNW/a+56ylzrInO38q1rnJA5qeQyB5tfrQjX14P6tQCzrlWzhFQfXyfSVV3sI9IcK6UCxNUCg55ZAUoYDFBsBDjef3VXhTCvt+Uz5+ubQTWkqFXOaw6kS9Su9cl7nQSQd39W84y713t6VJCsAmVd17FYLNjtdhwOB7RrME1L0wq7db/fMwyDxGbWSYElhvLMaoxkBKiJws6qjLwQgoBksZh/5+Nc67uWtnGEENjv94zjiNaatgXVyB6ntS5FPPGk0Smz3x74xc9+weXL16SY8Ls9h9treqNpTHEtNqb4yhhpOqCNWKMgliJt12NdI+NAK0zt7EyV32uM0thmQc9JIqJ1iT9FdjdNE5MPvLy85GZzSwyBNA10jeWNt95Cu5Z+seL8/Jwf/PAH/PTHH/H61SXTOPDs2VN8iLhuQdcuaaxh2N3w9Isfs7ndsrl9BWmLsSNKDViTZe6EzJfPfoE1HWTLarni7OIMSGy3N3h/QGXpBKzI3F7foE1H1yykqZNtsSfmyMY4lAHrGg7DSFaKs77jxfOnfHn1nIiwbjtAE9ncvqZfrwhGpGphGGhS5uz+Bbk1+Oh59PARjet4dfmacdyjFKzXPWHyHPbXUlQNgRTTXAyMMeLDRAwea8FW/6Qsa7dzwrhrGmmSlVG4xtEtl/Rdjw+Bw/Ut28PAdn9g8oFQ1vuUBERQJS/wfmKzuSWGiWnYoZV0DaaMh4RhmjwgXX6VFlmsUpI3hBBnUD9GKRamlGlcx0Vj6bYTu5srLrfXDCoSrMIzkbNn729o1YJeOfzultuNZ/Ca/T4Sh8y97oJ1q9mHCaUzy37BvbOebtmz2/aklFmuzlDGMoQ92xEUmqAMOYYCCmW6hRcigWtpdA/REyZpCjNtvMTfk8f0gF4RU2IcDsQM2o+oaZSiaXJEGxizsOaG7QajEsaKt6PrDFiDdpA5sN3dsNnu2O8P2K7jbL2iIeL3G8J+Qw4j1il09W4ra1EOgXG/45AyqVuKauRsTRsmRj/S9D1o6eRtjCUbyeUO08R2u2M/HCR+NZZcO6dmAaaqwqmWcZQSllcqMb6sw+LnhkqkPJX1Npf4XtbzaRrYbGGz3RJTwDnpVNo2jqbTZDyHw8g4DPhRClnKSLOLrjV8+K2v8e//3X/Nl08Hfv6zlxxuB2F7KoUxibN1ywcffJ287/nwg8zX3vs+P/joYzb7AzkbKRwqS0pBus3mXOZQydGzABgCmMiWfCTjpDkGL1kMKiUwmayqjP+Et1WL/yQ5x+JRHpHuqj5GXAxlvywsO/K878ZU7QDKhphVrYui0SVTLYWXnOf8FgzWKPFoyyKNDykQciEVkKn2XdWeiMIg5ORZ1s1QyDjCjDLWziy88sLyWi2seSrz/cj+qnvq3T1WH+9rlms2hdiTa/wyl0mBuTGZHDFL7DZzkE7jhIxIkClSzzuHgGPVT5cSx/xTbDg4xkjH9z/GVf9UrBCCMEPJGTPnqUffeFGtlNw4QdIJnXXBkNRxzvDr8Uctah7PRawv5OoqqURxglaii3XWaQHy9BpOY6EsAJNIuAuoe3zeVQZ8jAEz3L3+3wDOzSDdHD/+2oO5c/x2AF2RZn5V5pNj0ZefBMUCKsktEBlpPLJnToJZ6bRRB38Z3Or4HrlU8VPWhfHCMYfNhSGHJAXyr4KI13fNR6BOYFFhwpTlRD6/mpErI4twsc4Uphxzm2NVF46KgJcbraxBO6FsiidFWSq0OlYKVGUAnQb3srk3bYtx0j0TLQ0iEhkKzdOaCmPeRV6TDoBCOemsklUSyZXKYsaqAkFFbCr3sAAMJKGgFyXw0VutVANnUMpoXDIFFNU0pbvnOBwIwTOgpDW3ttLJZwrynLTFGSsyFq2OEufimSaDUth9uSxjRAUxwBiIIeIU6JSJOeDJxOrBR5oBsPo9BLlulQtbEi1SyShAXS7NDmQ+aJLO85BPSRFTECrqzHw4XW9r1UcaR+TyRicYGD4ezWIlIavzo7S1rsyAfOIXByeL9V2Y+tcTIcXJFJFrKGM7lkSjdixKSRbuyhJRStO6luAT2+2Ww+FQgGShbqccsLaRzpG+AomywWml0FbkASGFufGFPjkfsjBt5h8p2a1VBqsgG43WidlE6QTYrpfsywKmtZrnfwXTMxmVNEf6qzSkQSlMwcw1Ud7WIJ2UpIe2zLUcwGhiUoQadCR57ikb8UtMklS4bkXb91jXifxZW+mIRAELyqJawZj5DHNZwK1DqUxOamanghQxTJXJcNxEUgElzLHx1fz8ZXzIx4TgIRuUjbTO0rctrXM4e/yjtnU4I8GCKQEliN+Otg0PHj5gGifpRqo0Kip5pkSWyyXeT9xutqxWPV0n8jBrOrp+QX+/5b13HrJaLvnLv/wrPv/Jx/zO+9/gXwdP01i2+x2vrg/sNp4n7/0OzXrBu08u+M7vvMkPv/8DPvvk73jYZx698YjeXfB4cc4ffPgv+LvvfI8Xv3rFjX9KDIlpDLx+8ZqVXrG6/5gnD9a82rxmt9kzxAEYyfgybCxGL9C6lSYwKaNUpHai0rpl2V/QnT3k7K0lqYHd9iXj/hYTHXhIwx6jJoySzoopWWIWsFWqg8CTJQBPP/tFmX9HQCamKM0jjlCJjPLixaK1xmSL1QlrepyLNL2iXTqa3mBbsSJYLc7xk6IxSx4/epP33nuP3//93+PDD9+n6xWTf83nP/+In+9+TJccPkc+//wLgvF8/YOv0fdLxoNCKSN+qeV8Xl5vsHFCG8u6O+Ps3iMmf8XkD5ARH8UTkEanSKwNZpQVNvdchVXzuqLq+I3CfvPjAT8eMLYhjntiEh8RMuQQCKE8sVO6Xoklgp+w1e+yzCmtNdbWBjRlaislMvMC6tUjpkguXZNzhtowoa5Fx4DrFNg7rkECXMU5QPpN87A+73+6ynkXNPw1gE9V0LECbXWxzHNHLyG262KqLUmGmvdNASI5uZYZzORU/nE8//o6Y/Qcs51e/q8VQu4EvvnkW/F8yQExRK5/A7VxRl2rU4rsdommaWhbaZy03++Zrq7oV2spQJZ1NKZUvNuEzZlClCZQhRHhCwveT57gpzsB/QwGZgnaTWV+5yMLzzl3B7RMKdE0rXSgTQHh3gro6oxl9J6XLy8xaFqjaZqWzhlymEp84OgXzTwupdmUsLm11nTdQvykSsymtfgbjaMk11HQahb9gsWil+ZixTNJ6aMP8OFw4Prmhv1hz/6wx1lD5yxd1+Fcy+rsgvXFBZ98+inPnz2jdYbVome3uSGMgzTscOJ19sZbb+LajteXl8T9NYy3WCsMJ9u0SN06EYJHoeiaHmMaUki8ePGS7WYvDUeGkc6VrT0nrq+umYI0mnK2BbQk9eVImWKxovHB03nDva4nLWD8xttcXb6i229RxjCqQLNs0EXlMcaJGCasbVj0HSwbYs6sztbkIHJDazWLtqXvO8IwcHtjyMnPz3maRqZJ4h0/HlAkAedUmq02bONo24a+71BKsdlu0VrL/mgs+8KifP7yJbv9QWACK1LUrusIMcvYnoIU+khst5M0X8qJxkl8bY2h6xaYZk1iR4pRmlPUpDtnkbK2LdY6cs40TYNSAWMsbbtguTqnX50DC4JuGOIW22WwK6aYGaPC4Ehesd97rm5uiRnxcNO3hKR58OQx6WXi/2PvvXYty7IzvW+aZbY5JmxGZUaayKzsKpalp6RWC92CwBYECA30hd5CV3oMvYOeQFLrTs0bQSTQVJNNsousKlZmpYvIzPBx3HbLTKeLMefa+0QWqRJEAQLUKxF5/DZrujH+8f//8F2PbWbMj06YLY+xr66AHTZbOHgvIKLNHa/LGoSEd4G6mTFfWqLz6AS1krN3WTcYFJ4kwDaJvtuxvrgg5fVYN5bROfy4ZXuxY5tN1Kum4ubpiRiya00yYGtN1dYoq3Gu4+XLK2b1jDs3T3nj1g1arVhdvOT8RWDYeup6QTPL69t7+r7Hh0DX92xc4ORElCO3790jqkQ/9mitmNUNlbG084ZB9XSuB9/hhhHfj/iowARMVRFTzI17NKJmUXn/lvNemuIZ0JbivVbqfcEnlBHAxo0OlRmUsGa9XkshI9sQWWtompq2qUgJ/NhD9FiridqC8lgN81nFyfEJR8slbrgkRo8xmeMWI+v1iq8fP2a9WrGIDSFqjG0xtiXEnmEccKOnrmt8n5vLURrzSLCcKD5zKYtLhRSQzdIl/lYZOIoZlsoNEVCi+EGXIlkuTFNk49JkMsRA8CPRO1G8KJOVSgaTgSWygkdFSGR/umQgAzwkSNn3MuXnMiSRJKfC+JcAIcYgYH62jTDG4DPYKrZVGaOQG0BKwm6Woosw9HX2OLt27h/4lUmkWHp87oG1EPcevlqpKa7Sxgq+EQqJ4DC+ECh0yq2mGEJRmj2WeCOGRNRZuZPPPiksZ8+S18/4jCuYkoQcgF3lx4nrKoPXi5Kwj5WKR90h047p5eqpGLqPrbL3fZDPhSe0V2klDiTl5S8K7pT2BVym39mDhzHHKDEW9WR+rRoSGfMhUdh6KV0fzxL7pCTzouSdpVq9j/NyHHqAi5WfCzgoHql/n8T17wNE4dcE6FIKeTJcB+leZ8AUXlyRrIgmOEzI6zfZMofSiTQBWuXroLI8JJXQO09skwPTLEtM0zxMe7APLYFsQp7HqAzsZBnMhI2ULWgPHEwSW20xxgoAp7MnQfZLSaok2ToDkZkNE/PynAZLTYNZuIQJmSMhBJQVI24JYOWnWmswCWUiqHA96AcC2UtFV/L+M3CTkoB0UUuGH8v7jdKFROlEyuaGKJ0TqIRReup2BuIFZ5QmxAEfPZW1MGtRKTB0vSRXWlO39RQ4JxzaVNSVPJ7SUoUrLcrFsF0JmJgqORBy15sUpLohJWXQUUDNPoy4UpE5ACqn+ZErJVppCJmhpxKEJMy5BOgSFClhZ8jOm2m8ea5lhD2lmKVsh3MclI+A+8ZCc750j/zmIhPAt4z2/vGKlPX1TW5id6jC1DAyjmqfQKZcwZscbTPgPP1t1r1XOfHt+5F+HPHjiDGauq4mD5DiDUgMEyirszwaBPBUJZHIUl49HSCCpMUQJ+CQ7Dkhy1p87WSelXWVrq2thHQ1ksOTCXud7oXSGSJVIrElTfmtLhtqjBNrobTDBqQTamWJxhJ0hVcOORBEqqytxRjpTmuqiqqeYesKpaQzpM/S6ZBN/MsYyRc5KCBhckc4Yy1RIdVv73LLdVBe1lsBDKQXJlT5fozjSExBjnSlppKDyiw7QWOl0UplxeC3riqqPVmKqrYiwxkVi0WN1vK9qqpABW42N+j7jvPzc/zg8cGx60Yqo6mX4vc09D2bzZbjkyMWiwXjGHn+7AV33rCsd4YnX37Jz//mFyxMRd0sefH8ghhHXIzoZoHJ1eVuc0kyiR9++CZqfMnDv/2Ejz8ZmC3+gOXNExZY7s9v8x/91u+zuljx1/HP2T55ydgHuiHw/OlLTDXj9knDW3daHl6N9NsdIe6Isc9jUOHxkCq0rvLe6IhKPFHqeompj2m05Z0373Pv/ft0/VN+9pM/49Offk532WGSQ8cthk7211gTYiV7p4q5y/f3AXj15OvpTFIlIE1J5l4OtqQqrCUZqSp0VWNNoGnW3DhR3HnjJjffOOHoxox2UVPPaqq6Zbk4YewCx4tTFrNj3nzzbR48eJ+rqysu1wM3jzWbqzXDbkApxdnVFZ989YhffvkFHz5/wbtvvY3rFZvVJidTcpy7URgeSVesdwGlOxnzqs1NTjSVPQDo8hwzWmeGZZULFjmQ0RJ06lzM8TESvMtsUA9Bo40UjbSRJixTE5tY2trLZY0UnkpDhZgBJK0U1trJR0wh1ewQgzBClaZp9uxlEpTu5ftTNdfd836YEFZN2bYnUEmVYHf6i4NA8zCIzHu7BqPNN87hAhBd7yS+byahdT7MctCv8+tV5Qzy2YfNZNnTQcyplCIYJQ2XDhh0JZjcB5XXPdwOY6jSyVWVvVNdb3KxP1MPbmu5QdfuW8zPp/LfhmuSU4CQJavCTLI4b+lHT7frmC8EiFhm75+u68TnNkYiamIVAvzR92fAjP//XTVw8vf83AOvpJ/Kmwe/98GbwJuv/e6lfLgBsMz//u9cN3+N37nzje/EIAwZafKhGPueR599Bq2lqzW2styZL3nx6iW98lSLmdhBBI/yo/i71gaXAsp5Ru958fwVRhm89yyXwg5MyWOsom0rtpthWnPjODIMHdvtTsCy2qKKv1eW4Wldo7Wa5q/K9i+D87jLK5RSbLZb+tGLgsRqmrpiMZ8zm8+z1FUYdSlLM1OUDrtak5tNRI6OT3njjfv4aBmcxBLtTFh7Xbfl/PyczXaH84FhdOJXFKUr5ma7JSVNdJZhmFOlGcujuxybBbUWiWM3eM6uRgbfsN0mvGpI2uLjFjPriWrHxTbh9bfYxoaNC6ROsR0VtapRppYxGgbcODB2a3ToaeoZ89oSjaE2lsvVhm3nGMceH3u0MdhaGIgqBVTyWGNAO6w2aHpS2JLClso2nByd8tb9t9ntdnz28HM2mzW20syWC45OlphGzPZdCngCla2YN+Ld5IaRRicakxAugoD5IfjsbaZIWmNUzfJoQV03rNdrzs8vGPsRFxKVqWiqlnk7Q1lN27YA6BSIo88FkYhVitbWHLUzgg9sB2lcoYwwyolC4EjKCEhXiiTKgE5SIIsQQ8JFkUNLfGxBCZA9jALoau0JoRNgzRqmJn1J/qelawGoRN1a6sbiBkPfe6yuaKuW7drx+ScP+fyzR6yuzomxhZBIKbDZDHz15VMeffmcb9+7w7ob2A1SsKuaGdthpB8c82YmTOYgssGYkpARkngrez9C9BB9Bk58Jjkk8QXPZ+VkI5FKx9pCrwkYbbLdg0heU7akIYEKHkvCqoDJ1kHCnhIMQSubx0cyHYPCktApe+LmokzMlgYq5x3kSNqkiB+DSH9jIuVz0hhD09bMw3wCYj0aHTTJZyJCUKhoCNpTGP1yVmXP8hDEP07tU5xMu8gHs87wwoG8NwloiNFTQxGUZAURlX33Sp6kmBhclNwjXT+yVWH4l4N6f36XPVHyrfIK5Ew/bPrA9Nr2wJJ44WVg9SDmOcxXD/OtEou8zqArnxujcqOlQo7JubnSOe/MACAaVCFElfxP8lGt9+9JnrPIgxWvS1Ul75T75uWmyvtGSbeNpHNcZNgXcg8LteW+CUPRpDJWZQhEgaei4vC6RrAJEhNi7bWfv369rk59/fo1AbrrIXAZzMPv59ANSl6ZRDpEChMIqg7+aaWmF1cmoAy6TKKYkdcJlyOj8nlSozQhpgn4TTEDgBxMkonauGezcXAT4wR0cIAgZ+ROKWzdUNU1dVMj3m4en6UVKcXchS8J6ISwEpIyMEmmrgM3k/Y7S2ZH70hGYaooXftUoYpqlE24OKKK59nh2OZOKClrwUOm7OYRIClBtAt6LPTrLJ80ewBUI75NlZbuNSnKxqqNFQ+Q0aO0SCGVNWAqgh5JPuCHkWqxQCfNmJksOE8anVSSTBJZYNmvymJXUjWAihQ8KRgSjhRl87V58zLWkLx0aiwNGIBpDMmbJQgoqSIEFSaQIyJVC7Fqz8CYDIJslDEDwhmBT2X6Thva/qaXr8vc2o+n/K8sZg42siL3LsnW6/Thgtz/qqv8zOgMvOYW1GRzdvLmrbWZgEZjDHVd5+5ksjmsrjYYY5jN59S1hZSmREoq/R6Vjd9NBtRSnt/4RNI6T7UsHZ+YoCZLNSqUThiV2UchId2RRbYVpzWwn9dFyhGjx+ZOQRwEO6WjklYK5yJ7cwi5p6XCJHR0AWWK50LxtZRdwOB1hTMal2qMNdiqxTY1TTujqhuMrrKBbe7gmgprRI5XpSSoUEplRmx+vcjrmM2XNG1NVdUEpIvhruvAjbIf6YA+2GIUapK2QZK29qW6Q64YKWFiBp+y/18QQCWbCofoqNSekbTb7dier2jMnKa+JWyRzLwd+p4YPJv1iqHfkaJDKWEVGC3d5bSuITNT+q6na1v6fmB8uuKrRxv+3Z+uSMFz99YdvvubP+Lmvbt8/vgpr169op3N+P3/+D/h/oN30a3l7PIMtOHO6Q1+48E7nH/1kKcvv+Duk7ssj06Z3XqDo9ry4w++Dcnhtlf8zWoFvcOPkfVmw8tXz7mz/BbvvnFKf7bg2bBm6MEnQ4oJaxQxDcQ4oFQNUaGSdKyu65bjo1Nu3bzB4nTOe+/c5J/+4T9BMfLg3l3+Tf2/8bO//Eu61RUqDphyxuhANALiB6Vy8UWuo1bQUJ3nn1aFNTqd1nKeZelGVVnq2lBr+O67p/zm9/8R733wPnfvv83y5k3qtsXWNXUjcsBus6Xf7Pj8k8/YvnjMK6357PNHuDhydJz46uEj4pjQxnB20eFijRt6fvHxF1y+WLNbD3z28Dmr8yt4cAuQDltEh/eJ1XrLZj2gQuR4vkSnKEyVA79Lk0F1pY00MFGFoZ7nIwLoT7FnEqmYyFwURE8MTpLzkHLHaQm+YhLQulxj3xMzW2AqRiBSQmstVfYSK3uV73u6rqMOMUslyyapc8GgSDQQZsN01ueiV15r+yglB/Aqs8xy8F3Mll+/yrhOwexB3HPNU1epqeCjyqGnkWRD7cMWo9IejNfyWFYrrNZ79mGOrzyBoGD/qg4KfQdnjj44S/Zy17ztlJgHNfmjCaM/5tf7zcAx5cLMYRA//Syla/fi9Y/e+8wsmxFiz67bAbLXLOYzqsqgUGy3G0KCqhLp6F/uPL8z/zsOxP9w/X/++t+fbeh2O6ISWxJbVSRrYNihR8ugwcREoxTtosFj2boBmywqJayPmLahOj1i1EA3MPaOjoGmaWjbltmsZex3dNsNBE/MpvrWCIvWjY5xcHjvsFYYt+M4SKKXE1MQZvo4CjvTVmLDMWSfQVTupoiw640RMG+32+G8l2YD3k3xTEzCVrdVhbUa70ZS8Byf3OBbb97n/HLHyckARI5PjmiahtXqgq7rWK/XeO+pqorlckldNyglnTWHYeCVO2fYgU6eoybSWoeJPa01WFWT/AydaoyL1DYyq7IVx0wxxMBVf4WqblG3JxybOU0zw5gGa1tCSHTdwHy+QGvFfNaSgsNYTd1YtLbY1uB1Im22eD8yjBFtrVgaRJMb0Qmzq7IBxYBSDbOZIriK5fyY9x+8z3sP3uf5i2c8/voRKzcQlSW5ET/2aKuwtTT3qrSmbVqaumE39EQ3cOtkDinhhzVnLzYE1zP2nYBHWguJA1DVjPb4hDFZ0rrDqY4ArC7XPH/yjHEcMXVFUlHO8BTEgy1pVACViRWLpiVGKUZ348AYBhQaXUmzEI+aGoppJZJ3ZaU5hC8dNYOcj8EnKlsJ6JASWlWSZyVNjBqwzNo5xmhCEAn/OAZ22w6AFD0np0cYVbNZ7XBDICQYdobPPn5MSv+W5xcrNusdSi9RwWI1eAevXm74m59+Qn9mefTplzx5+ZKgNaZpUFWFriqSVpjaogaXyS7Z+kdL7Dm6kX67IfkwnR3S3MRglcl5R86lkhZ5aASlBBxTmXmoknS1TzGACuJ1l5B1T0AFh0nSETtFh6QRCaVlXqRs+aBjOUsVOobsYwo6+pz/CjCmlEIFAWH8mH2uAyITVgqdJdVzFngMkRGfNCYYKIBONDREdm4rIHDuIB5izJ1jCw4g+98EkpHBNaXlZvBN2WhMmUGvwIcDaXbxo8vnfIwJXR/S9dSUbxYY0hgrRVGT1X9IHEsB9SbQLk0xyFQ8fO0q+El5npJ3vw7S6RJ7FQDwoIB3DZzThXSS8YjsXby33LhuD1IYimJjIwCdiCnkPotOJdtRpb0yUe55juWTYEgFz5lII3lciq2E3Cc93a1U7m0enzJiKYk/YSwxXY61IonDYRX1tXrt/WR7Ec0Ug8ko7hGFmP4BALp9x68coMbrTLrJA+baz7N+/WAyFJSefHOL6SYZJIkqZYAuA2RWl3cu1fpU6I8CwTufGXzFg61wT1Wuquu9VKYAc9M9VYdLav916cRK/tzaiqqqMJWVw5rEOA70Y08YpEogUrdU3kbh9eaJIjKWGAuwCQUV9s6BVlS1o1Y12ug82YXqGUIAEwW4OASG8mYpABMI2yDj5AUQAaLPE1zl6qZSVFqouhM4amTSB++lkqR19g4LtNl0fUyR5OR5Z1WFS4ngRvrtlspUVBiGbKLrEtgY0LaG2uSKKtnLzJMIGJ2w1uauaFJBIyJ/qxSVrbE1NATwA8M4TlIKLZogQbdD2oOOMJUCCkCUuQrTnEuxgDfiW5FCELCrKDGRZL8k6P41hmhi79cG0piAgw2sLLvi5ef9OM2BshAPmTvK44UAACAASURBVHO/KsGZxjgJoyWzuCEvepR4D4aY0FrWhHQDq2mahhgjm/UV6/VGklitSSoKGByCJNYKdBTJaGH6Be+FqRb3Uu0UsvFoBsO9C8QU0AZMZUQmlzK0VHwhiqw8g4AFoDRZggni9+F9YmqxjkKo/3vgSYKkwiCxWKtyB8DcWj24POjSeCSmNB2uMQZcSgzGMGqLbmrq+Yzl4oj5YkE9m2NsLR34Ytq/hsxxMzHmLnxBHEeVyqAyUutSGl0ZdDvHzFqqpsWkxBgNaYwia9ARKbamiTmT9+nJ3laZ7MqYEsFnX0AllTpCgCRVzLo2NHWmiANNvafQXV6ec/m8w6Sa6Dre+NYdZrMZMUW22zXPnj7hxfMX9JstyQfqqmY+n6NVxBok4A6QYpAkZRip6xqjaro1jGPk9GSOsYYXly94/OIpX3z+JW+99R7ffvM96uUJqjWMwzm71QtSqmBw3Fjc4Dvf+S5//Cd/wp/+H3+CHyI/+MHvsbx7h/vHRxz/9m8ybM4Yrs54/NNP2HbnaAyvzl9iWsO9+/f44J17hHHD2VliHGpCiDSVJSlHSg6tRkLQqDSnrZe8cfc+i/kcYxMqrVnOR2b1yC//9iHfunmf/+Zf/tfo8Tk//8kLTLAob2Vx6RFlIgkjycMBALqcNVPXqnJJF19Zw8V82GiTZcZG2ImM/LPf+x7/8l/8c27dexvd3gBzBMri8l5sKlDHA1999gsef/QTut3Ixbfe5Wo9sNptqY8t6+2aGTOa5Zwbd9/mze/+BvObDZ9++jOeP/qKZ4+e8fjpOUMfAQHoTBQPuXo2Z350g922J44hg2myr+gD2EdrabJglJH29aWSmYBUvC5lv9CpSEsSwTuqLOkLfhSZXW1p6ib7Z8KhD6VM+5h9nMi+HnuPlrZtsFbM1KMbGbot3WbN6BwLba/vj1G8JqV5kD4AT1U+h7N3ltoHciUILPtr8TeZQLq0/71yVVUle9xrxRXZo2SPK91cQ4jTcxmjcmAm618A/4TRisporNXUthEWXpLipFEWhSL4bFURPSF7fB4Gyod+ed8IBqfvMx2K02uffJIk4DWZbfCNTmk5UIm5iHXIuitxQ1VV03MePm/5uTGG+bwlAsOwo+8sR8sFR4ulSD+A0Y2T/cl/v25gtQcatVKQfJY/vS4jSdP54pzj4vyMoe+k8+NyiTGGYRg4v7ik6waOFnNOjo9pmgofPC/PLrhYbdF1y9HpbU5ObhJjxHU7+s2KcbsiOscYPaZtOTo9YTabobWm73vW6zXOOWEFLpfM5/NpLIo8e7vdcvbqjODkvFgs5ty9e/ea3FvpbGmSry7LWwFhSmuN9yPdricbOnDz5u3cQCVhUkBFT7ddc35xQT96jk5vc3TzFoOL7FYXqGEnzXGaJT4qtv0osZoO+Dgwup6Tk2N+9IMfo1PFRz/7nO3aUZuWrnvBMD5DmYSplyg7wweNUhVH8yWNNWzXl7i+IyVptNB7x+g9PgaMajBWY/sBvKf3gZ3OYX9K1NZQtTVHdUudFDffe5tb33mA6wfi+Rbfe9bdwPnlOXVl6HYd3W5FCg7X92hguVii0LjBE3yP92ECxFFFipfXN+DcIEqYDK6NWebrvTBjlNbChjOW6KVJS4pSfOv7nr2+Lc/3nMwZrZgvlixmM5SCmzdvAZrzs3MAmrZFa03X7RjHUYBFa/FZlmmMIZO78vlsGPqRfrwkxcCmL2qTAC5QY2hNw8nCsly0bIc1TRpZnM5wjclS1JblbMmsOqbre5Q2aBStFaZ3Xdfigactp3fu0BwtGIce3Ujn4dTvsG3LwijW6xUoWBzN0QZ8P+S9MzAOA/OqQSkBYE5Pj1jOZ5we3eL+/bdYr9c8e/aMFAPzdkZU0jBhHDpsJXYPylqsNlhlxdokAM4Bwm6zWmLToeuIUeTCpUh8dHTCnXv3mC+OCPoMfb7C6x2RxOpqw273kOdPnmBy1zzxllbUSlMrg04aqyzaWJQ2+CQ2DkRHCg5rLPNZw2xxhIuKdTfiY6SyTfZy00Tn6IIn9pllaCw6kYkdoqSobEsioo0SILKp5P1r6Hed+P4NnrPxEmLEWMWuX9PWc+lzRovF0m1GHn3xgt3g6GJAVy1aKdrqCGNqvE989dUl/+pf/RHvv/UJq4sVr169YgS2/cCYEradiQ+ytUQnOVBEvL8rY1FGmgO+ev5cJKxK5P9N09DUDZWtBagzMlesshhrqax8lCJbBK+IudBdvHp9EDWSNprkO1y/hVkrBb3osi+jQaVIVWWbphiz2tUJEyqBicJkkwy22P5kqapWRG0IUZh43gcBhUPAeY+LAYyRQr6XrvapeD/l3MNWFYv6iECknc1ommbCGKwVldJAEvXZVLTdq5qUtmUj2ud4lONLPL1jYPJSnc7y/H59CDSU8yLHSlmqTwaItbFoe1hAJFuCSRdzXRvxX6acT8W3NvuKZ697nZlsUYH1URo3Ri/57oH3XFENaPYxR4mnDnNlle+RMgpUiQ/yWOWYyRhD9MXuqcRU5KYcAuxZa3EuiKd+CKQkwLA87TdBximGiZlhpwvmo7LKMLfuynlpOADwBDAL4uuXx1Ibk8FpRWlGxhRLkrHMHEuyV4oWafDgR5LVE26xBwdL0fjvL0r+mgBdZpFMCG152BLk6qxfz4FkyAFyDhRlbNW1To77R5OvJuQ2B9mycMK+KUTK1eNUME9yMKREvlkeSeV2w+X15oWTSjn54Ou6qoXtlytmB8DrVJ2O2Thc6+ynYDPzxEr1dxx7xn4g+IBBXVsoh6bRKinEGl4GtmxG0RtpZe09hVIqd1LLZhuzqf1hkhgF3CoEwZwJCDqekWap2GQ9Pjng1VoqbCnLZkOZjCPBjcJ4NOJxJUbhGST0HkNC1xZLw5AiQwyM3Q5VNWht0SHgo7D5XPJinOwNIQehsRB5M6sgKjBlQCcgDcaY5LHGxGKxRA+5u2KIE3tJpKxkc9BS8d9vgCFmKWhmKsaUik66dPuGJB2HxGMgJyB5jhSwrbBFVQZe97N/vy6k615B1su9ljlWVTVK7wHp4pOzTzb2Xjn7hHGfKE58rYP5Pd0ywPtAO5+xWMyzpHXHer1mt93inGc2m6GUykadKTMSc31FKawSoHJygs96eqUVVluMbQghSeU45A0qb+jiyVwMS+O0iSslQJo5AOdUDopSXksic2um6kvZO8rtTZnZqvT+cAKVW1zL+3Yhij8VKvtLyBwpjOGoFVSaumlo2wXL4xOWR0fUTUtSRvYVLYdXLJtU3thUqZBkU1iZG/JaQspUbCwBg0tafJRCoHeB0Ud8tn2ojUWb/f0ujKNyZNgiXYsBEMPelMGQvbdBQZZT3hpT3kfKkAnVfrvpsDoym1lhxMXA46+/5PmzJ/S7AZJIDWeVYbmoUTHRViofgGLmOgwdblwIE9PUJF9T6xMq27DaOD59+CXz5Zy777zJj373t3j//Q9Y3lwS3JaURq7OXvHpL7/kxtGbfPDhd7l7+y2a2ZIvHz/kyy9/ye3ljLfMhxzfvkXSnt/53oe8+vr7/PGzrxguHN1uh3E1r56dcTw74daN27z79sh28wnddoNSGu8ctgqMY08KEWvnVCbRWMP3v/sdAD7//GMWC83poubJw4/5m7/4K378vR/x+7/zHf7mg7f55V//O7quQ4VKzJ/1QFKepCwxZe/BfO02m6nyVTxPcrluWrcqJZqm5ehoKQUOpZjXFe+8eZuj20foRQuqJgSLMhZbKzF+DgFjPDFt2XbP6fuRza7hatNzvtqRdhofI95aKu35wW//Ad/9ze+xGs948vwJz5//hMdfP+HqqsepfTfFOIwcHy949733mB/f4MWLMy5fnaFjZntoTUp7VpvKe55UTAOJ4lMaM/sk7X8e9s1oppqg2gc8WiuOjxYcHZ1QV1X2hhqn57LW7Od12etCEOIeCe9GxqFnHAa67Zqh70FB7Xxu7CNXXTcSXBph/pVCgtT8cjMovOzRJheytDAe9mAcU7ArjGQJyl5nlB3GPt+Qv3K4n0vifii7lWNIgDmjwWYvPWs1daUnxrgU28STRemIigaTDZTL/YVc3S0B6IF3nyngY8avUMJALGeO4JbCjIw52JXXrrLctFhJlLNKzuu9tGSa8lP8UsYbCpM7TEujFGQSCjMIQ2SzWbNcLvN5pem6ntG5/Dr3jSxK8CpnaDXJRPaV/LwnIvvWYfOtw+BfZ3mwyKkPmIwUXyVywiAJaLQSN0q4mqY919rcREhr8W3L46GVorKWtmmuJSYxRvpJKp2LzloKzlVl8+tlAqeZznc5D5XS1JXFKnBDItbit6i0pm2yjD7lpk0hUlUVlbWsN1tevXrJbnQoY3HbDWnopJOsRvxqcZjcJCaMgUpbGjtj6L2YqIeBXX9FrzSknqppqNuGul3io2azHSAF2rZi2Ta4fo2KsqZsbWlo6dzI6AMhRkYn3SC1j+hKo+bit8w4oEJg1rTcuX2HDz/4kP/sv/xD3vnBd/GDw11tqZTl4ZdP+B//p/+FLz7/GO863CCdWVVOkmKMVFY8u/quP/CWznmL1uKzW/YsLfGkrURtoDP7ThvxRkxq33hFK1kxIYis/xCgPmRZJ6QxRmVrbty4xZ07t/DO8+XXjzm/uMB5n58L3DjgnHjCkteJyPhHtBFCAAiLPqlAH3Z53WfGKxpTN0BFUBXWzjg6ucPN9h43dEdzFGmWiVQ5TGpo4k3mzSmb3ZbBe2azhm6zxo8jp6c3SNrIz8bI/PgWjQKTu1BtX75EjYZKzZjNa9w44gbDbN6wXCxJIdBtNvhRsahaatvgR4u3lnl7zMnJHdCWh48+5cnTZ+JrO5+z7beEGKirRs6eKPmRRpoXSFM3yWGG7QqXAs5W6MqidKKyhqpq0Kbi+PiE3/nd3+PNt95lHB1Pn70QduYYUVeX9KsVPniiH4m7wnyRwquKicZUmCTeqVpbktGgs6WRSejaiH+bkvzPqBz3xYQxNSRRmSSl0Msl88oShhHX7RiHQRJ8BFhybhTliNUs5gLQidF8xI+Rvh+lEy+iBEJFLq7OaOqB2M/wTrrPD97jGND1CrusqI3BGE9SAW0M3sPqamC3esmzJ+eMfc8wDPgYJKdWCUxurlH2IXKeicSEygopZuh3VLrCVC3EKB2tdwMlJldKznONprLi7zj5gCK+kUlngo8WGaoy4u2mjGFczOiNZWcNbVhIfpGkYC+NG5R4DScxSyqMSXJTLje6HJJFAVZTnIoeIdt8aGVhCPjNwGazZtvtGMYRpT1jcLjgCTGD8SFCgIQnYVC1zuoJK0VMbfP5ojOolffufA8MxTO3knwcNXUBlrMr5+rlDMnEoskyohS9ktCQvC+xfz6tooCoKipUOvBlzSSCyta5gYUUjJOOqLjvil5kmXuALsssc/xWsJqCo1xDaQ7OWH0QJ6WUQO8LhvtiXX4sLb7c5sDSrDyONNmsBGzUBkUs9Q8KYCav77D4KMQtlfNspYpU9TpISCHrKAVJo/TB1xzGbvuvS1SrUFIAzhYIqgBDOb6UcSvC2IMrB5WHseFh8XICIgsu85pC4fXr15S4HnpeZYBsymlFw79HipmAJpWDYJV/ed8FpNAe97dEgtASMsnjexcmhdt+cu8f39o6/738b6pmK7P3oNGyQNRrH1GgK9k8U5ABSrmLWsrJusmBvVRwhQ4psg8BvyqrCV6Jd0YGeowxJC/gUQghB6qyoKPPh1EOFEPwKD0SnCU6i1YNHEwyY2aQk3UBHHsATGogeUk28v0W+WYeF6XRKWBswOQAu4yLMCISKncijBFUSqLjj4GkRnxm5vkw4r1IWq1OKKvwUeFSJI6DMMySHLSCQnn84NBhJNY1tm2mDTOplDcNta9uIqBNCclDSqLdDiNGRY5Pl8wymKbQkywh5Oq5yV1IDzHoafxgMjUn7X9LQaZhZzCYmOeeyt0cBWyKKTJ1bSqmn+lwuwIyg0JPG4KaNqlpueTva32dYVAkUd9caweMvOnJJGlI2bNHKYNRihs3TlHGEKNns9mw2awZhh5QtG27N7XOm7hKxTOuPKzMC587sGhAW5OrYDUhSPIWIkQU1jZoK515fT4wZX5KOlyC2lQwJZVAR6G/l0BMqeyVp3HOZx+rA3ZkeYFKGjxMybGW+yHjL9UyjJbGD8hhIB3QotDwK0vVttSLBcvlCYtlBucS9MNAN4wZICgsPnlqm5MkmTfiC3FQYsEXY+CkicoSoiKOApKNo1T/ZB8RwHxK7UsSlueGzFPZU1BKDoKkCclPHQsT5iDZNVSVyQyM/YyXxDPhxoGrK09Tw2a1wo+Oi7MXuK5j2A7MZxXvvPUm86Zl7AdMbdFKKlq1ITtFBkIYSVG8YBazE3a7NetVwLaaW/fu8Nu//0M+/M77/KPvfEjTNIzDwOr8gvX5mk9+8Tk//8nHfPggcefGt/jwvfe5ceMNPhp/wbMXD3nYJHZXj3n3hz+kvnWDarjkjSPDB+/eZPvqK85iIPQB10dePl7x/snbvHXvbc5eXdJtd9kvx6GSIYwClmmj8K5neadmuaxomhnPnra4buSjv/4b1Ec/Z7k44t23ltjkMPlvu22HTn1ZyKTccS2R5Qn5csN4PRA5AGNilORYK7U3JA4BW1Xcvn1TgsbNFSxuQWMI+TBTGtSsguRI2qHagNc7enaswjmdivgqcHG1ptIzkm4J4xVX645N7/jy8TM++vhzvvr6KevLK/yowO5f80d/+Bv5swF4BveB+8ff3Gzy9b9+75t+Uv+w12L67KN/8YN/kEf8H94KcABU/791pcOzQx3s7wc/33/voKvqQSHEaDV578lHlWXmSRre5C5hKU4kYPm7si+p6yy3/evafyy/B3mPzDIic5gIFC8draRCnqezQjr4KqWFFZ4fI6Y42WUcvt9Dplh5/yEExjFlsC8Rg6Zpa9p2xq7rWG82rC4vSN5xdHxM29TCUhgtwzAwJjnbdaY9xuxVZBRTAW06F2PgcA+s65rgxonJeLhelRIVga00VW1QLttoIMezNZrKWtmWg2W0Srx6NQIaKEVtDW1dCUA3WqwW4xGtkGYOTT3dD5C4rjIGoyHo4pMqfmZVbTFGxqGwcFMudBMjfnDS0dQYAXAzoIiS91lZK68/SQHVB7EyGEcBfTo30rmANjVhHMANBCKpTpKgVAJoOZ8Y8SxmS46Xp1yerbg8v2CzucDYEVuJZ63RDd0wMuw6UJWcUySGcUdtAm0rLAifO3XO5nNmiwU+Jtw4MmzXpBCpksLYGj1rBVBPiuB37NZbLucLbr37Nr/1u7/He/ffFhAtx0cnNz7jX//rP6bbjXgvclVrDoA1DMZYgk/SvENJ4TylIF1TkfhGGB1lbudiA2ZKjKVImL244r7YwBSv/6pIU66T01Pu3LqFNZZ+GFmttwz9wNmrM+lS6R0+OBK5sJSiyPlreT0hBUY/ErpAVTUC5lYVyUA0YAzU6JwZaQFFqFFmSf3G29z89ofcv38PQ0cYXrFsHMt5olIVs/omIWievXjJi7MzfIoMww6jJEb0KNIwsu579FxzfOOUt95+m1u3b/Poky/4/Ocfs1ttUMmSYkffBYyupPPw0NNvNSq22KNTajPDDyNOt9jZKc7VPPrqCV8/e8ZudJwcH2GsJhrF6AZCSNgk7EWrE6axYjsSFcmBRgAQP470fsSmmqZtqSpLM2tZHp3y3vsfcHLjNs/Pzrk4v6BuZrzz4AHd4Hn19Akra1Ex4DNxQDzWNAFFTI7gRkgqW5yMok6yVsBAFNv1Fud6YlJ0gyOqmt5HgjIYW0sOGhMqeEwMWC1ei9FbTPA0TSuNQCpN8CN9t2MYpNO0cwMpJZqmRilNU7eoJN7EwziSnEdXgdENqNCgTYOPCh966BPb7cCsBtvUBB1wPlBbKVh5r1nvNnTdJeDFAqeqhW2FytJKSGoSROKdwzlHSy3sLh+wtmIxn7FYHGFtTQwJ7zzB7wFj5z3eO9woRVbyepFicpyAaGUzuJMZRcpqYgzMFxt22y3tfJm9k/McVwaUWG5IBqimQjdB1nDXddOqDFlqGfN+6ZNiCAmjahgC46Znd7litbrEOUfVUGgYuagmuVVkT6QILhB18WWLU6FSmkqEDM7lszG/Di1eFgJAmb36rQBFh2eUtVa6Tjt/jYkeQqAOgUI8qUwuIkVFCF4YgnDtb7Q2ucmdznGHwU8QUokXmEgAEiccAnQH+EtmAh5e14pghbCQ2X9FOXDY1R0iVWbjvh4zlWKHMbW87qzs09Pzy1iGIJZISpeu7VlhOe3B+9dZwEXBl0r34b09SymAyXvJuRd7D6IJyMtg5QTKqYm7scdRDoCAa8zB8t/BvfpVUeqviiV/1fVrAXS2UFYnAE4+l4FQhxk/15DIgmpOEEwJnOQ3YkzX3tw+nd2jvPv4syylvX630AytEvpgCUKnxzrMkEtifPBRbngqFlj7m14WlAaImdmiUSpIJ7mk8F4TNbRNjVX7ihsJaeMcc+coYOoAY8ldyASIIopMKLiR4HMnMAPKmJwwa2EyVdlcMl86CdKc/AGzoSQSGTDSKpDiSCRm0FLl6pHI99LUbSgzj3KjhhDyhM7vPwQvAXGWmIboQEfqxjCsd3gP1qQMGgVC9Hg3EmKPsfMMvmQJmLJYVcDdnB3EDMpl9ob3Ms46Ri4vL5k1NfMMNm22G/q+lyWYZVrF1LSgnhOCr0o/3zy+8TpmJpW7DM4JypmD/73HgNxs2dAE6L2+mIyaptX0L2kxYUfDGEqlaV85OPQtOjT3Lj8//NwaPVU+yj9tRYKhrGU+X9ANIrnZ7XZ7cConVIVBUZo5mEy73Xsd7hMhrRW6qqgrQ2VrUCZ3xJKxtcZiK4u2dgLltc7gdQhS9czda+Ugi6Iwz1LcfYKYJrryMIzTfZZqyp5CrfI9I1efCmZSKiyTbD6DljGo/LygtKWqGmpbMW9aFrMZlRWjaTd6BufEX1EVY/YDCWredxSKMSFsODVt4TJlS6fFRDZiR1q55w6DOknyptO+pEDK+5fas2H2W1OWRBuI3mc/rD2gaSpD3VTUTUXV2IMDCnwGOa3R4AOri0sBbZzDdRv8MHL75Jh3336TO7duMmtbKmNIMfL0yVP8OGBtzWJWSzfp5PG+p7aaxfGCwQ10Q8dytuDu3ff4/vd+i/e+fRfFyPnZS549fsbDT76iX3keffYVbjfwrTs3ef/Bt2hniju3T5gftby8fMXn48jTLz4F6/j27/6Ymb/k3kLz+z/8LsPlio/SM1696DHJcHl5xvMXc964f4cHD95jGLc8ffqYNEhr9lk9l7mJxoeBe986xZiB5eKI+2++xWcPv+Dy2Rnz05r/4p/8DrdPFX/xb/+UJ49e0Ngj5rNAQqqmOtncUCdl8HS/DttZy+Rfmm/7HpCH2WyGzQb/VWaMaWM4PT1h3hjU7gp/8Qx1ZKgWbxB0k0+xiDFieDxsOy5fXfDi5QVXFx4XG6JqMnDhiNpw+8ZbrDY7/vwv/j1/9bM/4+c//wUXFxeoGJi1C1Q1499c9PzjGwc+bf/h+n90/fUoslqxcP67K52HIBmUQA9J3OAgjizxizACXACvPTZLq1IUU+OQzY+NFiayADjXY6Vrnnil65kqIFYkhpCVD+UF7APJsiuVwLoAeBIQC/gVS1yRE9rDgPLvAiqvv66IUkm8T2nx3rHdbtlttxhjODo+Zp5lTQphOwXPXmqTROrLgfRn/373izHGmNmDRVYjDXJKcVQp9t0R2xZnHFVVCoUCkFZW2BAEsTHJoQTGsGc9ZhmRNVoKsjKKaCWMcC0U8RxPGAHRCrsij43RajLFlqMg5dphInqPG0d2uy3j6DBa0daWRVPhhh5lK3TTkGLIcjd57TFGVqsV4yBebZ7AGCIuOIiayi4JGEavMBZqK56pMQRMUhzNb3G6vM2L5694/vQVKM98WaF1buKVDKOX7vVVI0VoPw5cXr5k2BmO5zO08hiTsFbRtg31bEEIiu1mg9tsicrgKoWyBoPJ+ZTM7xAj5+sVH3/1iC8ePuL9u2+iK02owcdEPySCgxgURIWxhspqimdybZs8P4XFrI0wG5wbZR1pDaH4g1aEGAUs836Kf0qSmRdJTm6txDc+THlBUTKUPFChqOqK2WxGSnC1WtHtdjx//lK8x7RmvpgRN9LoJiY/nRNN04CCcRioKsswjIxuJOQ4u501wt6qpMOqDwqixo/FTiAxW9Qo7ZgtDNv+ivXVKzarV2xWz7lxOueH3/sR9uiEJ0+ec74Z2AWNbRpMDbbZcHZ5TlIaoxLKDcR+x9ze5DvvvcNv/ujHfHr7DTbPL3g6BJyT2G4YdqIYUiNj34EPtKbCaiuKxtETq0T08OrsnCfnz+lDwM5bNkOP3460s5pmvqDrtoQoRABMxcLW1FXN4EJuSkBm2AZmsznvvP+At++/QzeMPPrqMU+eP2M7jPztR59wdnaB95EP3v82d+7cZrmYcWk0J0dHBD8yuAECVBqUqSXn8A5cbmKUi74+SXNCbRXGWo5aC1oanwxdj1cBhwbLBKalEEjeobxDB0caR8I4ELyXXCIF6mjxfmC729DttqTo0aZ0nd6fDVNUmHORpBIuOmqTaGyFUhHrNVRF1qjQugJlcS4yGI+NFSlL4pNGAN+6kqK9NoyDI/a92NposNoQtMEHR/Se6KRpVAwRKqhnLcuTY+qqJYWE9zHvwWLz472fwL2+63DOobQA5XKeZK/0KKzuWPJWBYMLLJY7ut7RzrZyDiIN3ZSxJGWF/KJkPapkgJxvJAG2ZEPRRJWQBDqSkOJ6P0p857YD28s13dWa9eoK4oiydV7LBxmcRkgZMcf6KXHodVa83iUuKEerxOrS162o8aywbLP9RgHMCnmosL7ruiaEQOXClHdOOXmSogHkMwYl3ovOEYkoI7jCvnmn688oIAAAIABJREFUKANLOLIHAsveludXxmSu+9ApigXGIUlEPO6ud3ItCrNy/h7GAoUkJYChmprVadRkgxZjFBWUC6JQmjg0JYeXz8t7T0iME7OEGUKef0Ig8QUsVnuLjAkcC3t1liAghkn5lfY/Kyo4YPJbnCC8PNa5hLMf+/K3+T0XYO7w+Q0GchH2GlCZ580/CINuPp9fGwyRt5XAtIgF5EoZyBCDv4DWCYWh+L3IY+QXmuSfutahYw8EAtMjp0RuxoB0uck3XBoK2H1iXw7djHIWdDzftWtfT4st/+zazVUyaFLljmglCVVli2GkSPRCbQl1yBXrTE/VEVdVDG6UQQ1SVS4gW34b0lg2SUc8P47STTWZ/LxyMCdlpYJ8MCPC2AFREm1EylNwUp1pxEZnEEUFrBVjzKQMwWemUV4U3gn7Ryehu6q8YWtrQItPRwyecewZ+h7nR6Ly2EZTOYNSgRAHrLHZ0DHLUV0kDFkyqTTaWGySwLi8lVg8w1IUFlfW3ccY0SHw4uWWO6fHnJ6espg1xOAIQWSa2igJZrIfkpo2wjwzShBfTBhTmgAYBZNZv0aSAJF5xuv/VNntVNG/XktKjC4022x0nlN7o2SRjy5ln6QCOF9P5A4H9TpQLb9rjcEnYRZKolFhmpqqatFVxdXVFd3QMwwOrbV01lJKunKNQ04AUvY/2uviy3OEKAenNpqqFg87YXqKQezgPQoJbJWRqlDxV9S5GUXKG1uRFIs8VMbU5jGIMYlps5XnLgwx59zBe89dl0udWInh8SGjVKjUoKMYq4PKh75s0LJ5C4Au1SSLrZrMBkwCzHmRmddVIwF8TmhLIpXKnJmKK8KwUyl7XeVx1iTIlSyjxNHLxIQNQpOX26UpeRtlzuV9RUFOxAV8NEYI4FEJxZ8kHSyNZjLPt1bkUfFAnuiGHj8MGdBPDN1OusophY6Jo6bmx9/7Dh9+8ICh23Hv7l0ePHiPV69esrk6Y7XuaWpYHs1omhkRhVERbSIu9agaatVS1TP8oDl/vkKljsvLJ7x8/phHX3zJF798go0tfkgs2pbFQlEtA8QzPnhwm4ffecBP//KnPNudc3vWkIY1tRl46+ac/uWMxXCL8we/weXGcrn+CjcM6Crx7MUzmqMZ9966Rzde0Q1XrC4T/S5St3NmuiJET4wrtHagRvphw3KxYF4vOJ0dcXpcs718yp9/9bf82Z/+gs9++RjvDNbOiWoHBAgtJlUoFRAvxP26rHN1e1qurwER2hhCSqQgzE7nHINzPHuqefTJJ7w998xOzjHHZ5w8+AHNyR1K3qgY8d2WV1+/YPV0y/plR3e2YjdaxgSpHgjRcf9OTdtYHj/5in//8c/46POf0W3PMApms4ZZMyfqhv/u40u8Vty7e4d/+p/+AW/cOeHTTz7mlx99ynrdkaIhBOk+ba3hj350BMB//pPnEhQCYNFaTIeTjoS09wCzSpLrfrdjdbWSZCxLE40VNvfoHMpoFvM51lgIUvD46X/1PQA++J9/IvctI54pSKOa5XzGnds3GLod66tLun5gcJGQg1xrKk5Oj/mrfy7swP92fUplLDY3nDBWpKIJiCGKnIfMwFf7qm0IkeD31Vn5XsqvyWSfzLxvZSbtIWB7/boOUBWg/3B7TxFSEHPioKIwgTWk3A1Pa0WwEaUFJPY50CcpNJ7SA+51b5PDvXyyVshfa7X3l7uWUExKiMPXHKdET9jS/prcBnXd226/X1/3vjsstEx70zhKLGMqTo+PqK1hu9sxdB1N0zCfz2jqSsq30dMz7J8DdT1oPgxutSalcO117RMK6RgrCgub91adm7fIc8k5lDf4JF1upfMvedyFnWmMdPCsjJHzRqVclMtFzJiNz/N5EWPMBtpy/43OBSWgxBRKKAyTl/KeCSkFnugDMct2o1ZEq6SZFjKuIteUNZM0kzearSoWszlx0xN6hzGNNATQDSE4dsNIHRV1NaOxS2wDOjhasySNmjAmtJLzxbvA6HtZdxUo00hzD6Vlv02elEacSwSXWMxrrJ1jqwbbtKQMArh+lO0VcBlMjLuBFAOj88K8iZFgNB9/+hl/+4uP+MPf+8cisY6Kvo+8eNGx3Yy4UcBQkng3pyR+qlqLR5FS4nlYN5YQHJ2OoKQbodJGkm4lQGzd6KljtPeerutEYaAUZNlYVCF7AGd52eE+kkNCW1lOTk+pm4azi0vWqxXzdk7b1lhTYaymH3oBn6MXQNgaYSZanYE3h600Ccs4Sjwyjj2owLKZcXq64PyiZ7V1DIPBj1rUE8eGRWtwu2d88fGGxdGC3TgwJnjrvQ/54Lu/Ac2Sv/z0EZ9/9pCmEiaUTZrgRpKt0VYadLR1hfU1tXKEixc8/+VP+dpE1mdXaHqUHqkqiUW887hxR9KayoK1ikVtSbHH9R0xeKyaQ+y5urrkYrNidrxAGcXVxYrddsMNe8rJrMHSZL9NT5WSSEitJQ4u770aFxJ33rjHgwfv8/0f/5g337zPFw8f8dlXX/P0+XMeP3uOVobaNrRNy4tnj3HdhtXViqvzl1RKEYNnN/b0vkcZRd2IBFMZRWWkOV+KZPWOzMkhOkyM3JyfkNEmsMIotcaCrTG2JiVFdMLMc66n73eEvoMQsFpW/+AC/RDFQsQNKA3L5Zz5vGXWNChgu94y9mPOyxKVMUQLnduhtOZkWbNoF+zWHWOIRFOAY4XRDcrM8FHyuxgdIXnQgaqRgnxdG6q8lymQ91nODYWYWSRQLkijv2SBSD+OuCjnaNIKpQ1VJcCD2CUkdFVR1Q3WO2nUNA7Y2jCfzamqWgpGMeBDwEeHDwW4FIbVbHHM8ugGTdsQM5vRWnnMZCppIKXFv1pFxBvNy3l2ZKsCiWW/AlGYhCR5gXOR5OHKXXIVHLvdjq7r0DrSpOzfhiHEUE79fFsl7hCV5574I43qBJCx1jIanYv1ak/ySFDskUqnUTm6EknHCQtQSqELAy3lonzxic3nSDn/Su6kkMJFZiLk5hDXQbUYU1YwvQYKUTACLc1MMyiHUgc+9+ra4xmz91Y+LJIVe40CPB6e00VFYAtTMkt9s1yAQwJGiR0mL1tdCF1qnx7ntzs15oBrAJ3EcSX20Pu0vbzhksyVM1/g4j3AqtjbPE03CpQRYpnAVPt7ec3e4OC+lwLpXilaxuBXSOXy9X+Bz/16AF3bNgAT84UsJVOTmXYe1BwcSrtckRYosmZYSYCDYkLfU54c001C54QYJHgtTRPym8mJvDFWGD8m68GtzVXo/aCLwDVeA+T+ro/7G3lQoVbSsMAWieq0QP5Pyt7r2ZLsOvP7bZeZx1xbpquqGx4gCBIkRU6QQ3I4Mw8ywxjpVQ/69/SoF4UiZEdUaCRCHEoihg4E2d1Ad6NtuVvXHJNmOz2svfOc2wQUmIOobnTVrXvPydy591rf+kwqYAnFkFOTTGlSlMidcoSQEnfbO/pxZPRSABgtpprWloeZsrCyPLBGqVJINti2RbULARmsLcv5JQDnZ058ZPw0o7fVtNIUzbTRic4sQMWCgov56TAMjKOY47eukQJvVBAE5bXa0LoG0zTophVwRivGqWdzc8Pt7TW7/YYcApeLc8Z9zzRNRETuZXUmqAI6ejF4NdpidUIngyoBIxFKKmhl9glgFHKRsOaAjZFpnPDTRNO2OGPpmlZ+XowFaGMudpVSYA4aeHIsRucViKmr9QDYlq4birz10OyoI4JvvgeQzmumNG9GHb6uzljnDbgGKdQJzJHE9R/hdV961Y0bxPewaVpM41DaElPi+vqarGoohSPnJBMFMbKYGVgVvZdnLh/AaAVGO5pGJrrWOmIMDN7TDyW9yhrxJCmD71imYTLVKLH3KZJzQGWFqVdXqflqVABOgNNcpidBgNty6M0eUuV+xQKaifwLKuU5ZQHL5UdkQg7EECWpOOoy9pBDselWuG6Nsi3eB6YYiFnYKkobobglWSfV+D4TixmtSJM0NY67NFYF2FVZk4Oknrny3EUtPnkpR2xWWHWQRmcQbw0kLl4pBChAGgDtytAhK5keR7m+yoqpv9KKpIpHWD7a8JUAedX3MyUvZ1GR/z578piL1Yrd7S1WS+hBDhPbu2viNGBUprGarrUsFg0ZTYiJ0fcoZ3ArTRoC/XDHT37yLru7VzQucHP1nLubK/bbDftNz+n6hKdPnvKrv/oVnn3lAmUGsp94+8kZ3/31b/CjH/81d1dbHq4eEXrP5rM3XD57yrOTt9B3r/nm06d8crvhqt/w+mfPIWu2ww2v37ziweNTHjx4wuNHN0zjJ+y21wzjjrbpiDFgrePmekOMcH19ze31QN/f8vrFiJ9a/qcP/obN9pabm8jLF1fstiOY+kiLGa7KEmqkVbp3aO774QCwk6nkxTr99EH8QxWSXBh8IIQt7++v+d+G12x/9jHnT5+wfPKM39COt7+dsF0HSoJ23jz/lPffe5fXr664utqiTWA3acYYMd3INO2I28D16y13ET65foVXPSoHTk5WnNqGGDS74mHTXZzz9W99iwePHzHsrvni4w/55MOfEIJiuTqnaVfknMvkue4zpRCpLMKcqLRyVQH4Mjypfo8HoL/AD+loyug9d5sNVhcJ+tF69d4fztmcZDBEJviRYb8nTD05elRhn2dl0MZhnDsApcCrV1diMm+tGNGXZ9AU1nwGPImkmeuCWuSlUlBXSXwtQFT1nqrgV1KlYD98zvuShPvFbC0OD3WEmQd9upwJ5FTLoPK1xXuOwtAq7LGcMqRAToHjw+fYz+S4EJzf0dE+X1lwFEDheMKb0uEsOgRdKFIyc5NRDbGPi84vA3XHP/f4jBPvQQFjFosF3WKJNSekGNnu9txeXxNjYLVa0jQNNSF88sUntdRD6mjwWu+LMATlZ5mZRXD/mtz7RX3PZfCWD//O5f/rI0lWTgLUt61lvRIQUc9uBLkARZGUNCSx6Kj3tl5slY9/jpwxh5+vCrAHNczKqLrKDkMjjXgX6iKvdFaYPCnJ9w5JErpTTqy7Dtu26N7jGo1pFqANMQX6aQ8pgmpBNXTtCe26YewGhn7Py5dS07l2IuUoQ9gyfFMZzk8vOL84o1s0aJUZ+g1vXn3OtLulWxguz09pXcswBcYpMKXEMAS8j4QsLPQQMyl7QpYh61T84WzjiBGmu54f/+hd/uRP/5zFesXy8pTdLvD+ux9wc71h7MV71OrEFEXq2rULYfkUw3RnpXYWH1H5OaP3YsGh5eov2o7FcsF6tcI1DbvtVtZO9WRE1lZKHrKRXqaCqKmwmrS6l+xsrTBtQ4x03UICIhJstxtub2/JRNbrFU3jZtuYcewlYVbrOXBF1rAEdvkJVGw5sSsGNbKPe5KyrE9PObs858HDC84vlnRdZt/f4ZvM8mTFd776Hf7wj/5TQnb8yf/6b/np++9zsl7xve9/n0XX8vlnn/L+3/+Y3fYOp6UVNiiW3QKjFJvrO/78B3/Oj//q78BaxqywNtG04EeP1Z6kPI21LNuWRhmWTUOeelLIOK3RyhNCTz9uSSoTdfF6XjTY1IhcddyL5NgAWWOcxjipa4dhJMaEazsia9752nd4/M47vLnb8dFnf8G7773Hh598AsZgKeylOLG53XHz+gXPy36y3+5Yd8t5GCLNtgx5jROATpcwJJUUxipiVKgYyl4ivpnWtRglvskCzlhiGQ4bI2EVqkrzS/3nrKJ1lrZ1YjwfkoRXaUPTNJyerFivFiy7Bd6P+HGkN5BDZTALiIUytF3Lw8cPuDx7xMsXL9kMgTGM+KhRwTGFTKMtSregjdTiNtEoLc98FKsjq0s4hjFk1xCLp28cR9LkMTljc0aFRM4SYBhtJildfkkdr7UEaeVytqecMNrirKZVCTVJP9osVqLGyeW5yhlUsTwq3ZVpHOcXF1xePqZbLgSgUmBNg2s7aBqUseLtljO5+D1XRZdrXCHkHMChpBBfuRDRGNLkccmwv71j7zZzn2ydk2TfkCCWcEaVSqKvnIlBxbmeqN5whyFSGYqRUVkUXSnrOt/7udjD4fws53b5utpzHM4v+cOYvPRApT6YPTCT7Hvz9+VQd8wAkjq6JqWPnI/sUg9UT3FdfNUqE02wDlXW930GO1lk+Yefl+/VFvVlzP36QOsSBnP0KxxfF+qwqrDn7oGOqch7xW+P6s9b/25SM5vruCTS5jCgrH9YlZjHX/vz5Kb1HgjmVHxtC8Bdwbg5gbfUWvW/Y1Ew1Nrj+PsrVbwfj2qmX/T6pQC6Q4pXvv+mVKXqz7fhPmqb49FFl4IxpkKPVcwJIuLzRVk0zFOEeVMtf2aUkoa4gFG2Eb8GKS5L012Aj1Q2OWb0s76/8s04uiG5LCClZuqpRmHJWFfkDVbjnMK50ixn8R8IwZAiGG1obQsJpsEzjEMxdZeGzxrHoutonfgMxBCLBFG0451zrJcLVusVy9WSdrEkd2KC7YyVRFsJhOKtx2uRP/UCiq0WHd1CJkjWFhocESOxmlhrMc4SYpIJwjgQkmxKw2AYJ0fx75dI7QzoxGLZ4poGYzRNYcspk+iWDTkGmqbhzesrrq6uqKw24wx4UEpSc7ByiFoUzFp1Wf1aG8SQXFzekikELa1lKpbFd+769pblckXXNqyWK4zW7Mfh3gY4w265siJKmleF2b70IOT5a6v0sOjp56K9sjzu/Q2OjOHIpblUukzh6oOfUzErVodFzT9+GOtDfvx8KA6b+BSFASEArMM1Emqy7/fshl6mF/bAYqjmw1rl4oWYCjNA32+stND3G9OWjVIKxClEpmkUY1JdAiEKMFSEwNTjtWBgJJJM1WMsfkel2SuFk0YXr0RFDGUDC5LIZJ39EiPkyNcgi7GyQViO9frkFGZ5OEqS1qIPoOxMHzdGWLWuW0LTEZRjzNKw5+L/E2MWrwgqyxBhJaDnn2WUEh9NkgAGwmFHJGqZbDRWNXROk7UlWiVMgZiwSt77vCTqmpKVIP5LWiZmzlicdTJl1GaWHCSdyr0vIRtFim7MAfCo/kY+BBK5eDhlvJ+4WC9569ED4jTx8voNjx8/YNjv+OiDG168fIEfB5Zdy8X5OWcX4tNnmpZ9P3B1c80YBkxjMY0njAP77cjn/Y5GNaQR4riky4bzy3MePz3jW9/+Cl//1luM/oaXHw+cPbjEnZ/w9NkJj56tud7dset3fPDep9x+seO7v/F9nj15gj7X/ORnn3J2onnnaw8I/ZbNyw0pZd7cvObjjxc8ffaMZ29/h7vtxL6fGMcd+3Fkmjxn6wdcX+/57NPXrJZr7jbXxLzl1csv2G4sr15v8QF8DozThkgihQajl+VueDK+NNn32crjdACU6p5QGZB1ellTy5TSGOdAKYZp4G9+9Clf/ORz1g/PefTtr3KF4bv7O54+e0KzsFy/eMFH773Pv/+bv+LVzRtuNnvQI2O26MaSdnc4A37c8/nmI+6CQi8bHlye4uySZfQ0ATYb8ZmKZJarNY+fPqUfJz7+6U/44Cc/4erlc4xdopRFaUfm4LsIVQAhz4LKucjGZfEqIwAxJQSADK5pOTWOu9s7YhykJgiF0YQM6AQXymWPPUj5Ywz3ADqZSItUPviJnDLOSvqZpiHZFqUtbduwWBzku9M0kaI0GNrIs3QIB9CgFUFlOUuqbDIVOeh8f/Nhn1dVSiq2HVX+/uX66XgPV6WuOEhgxMftUOCW9K9S6GWVIJffozDQ5mAKK0CkNsIiSNJcGBqUOparHIN0wtxH1SL1YPQtHq0Fb50HVvX9i7wTimwkV/85jWsc1bO2HlK5Ng3yww//ns8TPe/b4kGXZouPFAN+Gor/nmXRNfgwMY4D223C6MxyuaZrG0IM82eQQUg1WE8Hm4na/CQp4E1pkGIQGaFM1GvRXJhtMRKCZ5qsfE35M7mu5ZdRs1RWbCBg0S04Wa9pW7GNSDEWoLWAbwWcs1qVwIc685N7I6EZAgDXFENVAbpcALokrJSCGxQvy4wuRum6yGPQ4KyEboTy2aukTGrhRpr/tqM76VC2YTvcMY0bku6l4W30AYQ2YF2kv3nJm+uXxOxLQh04Y3DKMUWNbRrOz8959uwpF5fnLDrH9u6K91PPy2GDazQnp0sa2zK9uWMY9kxBiyTVKHCWPEVUlHsRFXjEXKCGZPh+Yv/FFT/4t/8nf/v+eyyaJd948lWcXnJ3F9j3vchISVibyRQpXpBaz7kOo8XLKfiAEXWpKDNCEFaN1qAM1jUsl2tW6xVaawmhyaISMVpjdOlHChAray8fjSWRVNzGiaVBDMTgaZsGZx3jNKK0pl0s2PU7cs6sVivOLs4wWnN3d80w7PEhoo3l9PQEpWBzt6UmNMYkg5DYJ7Yvd6hBs1QLVsslD5885dHbj1GLzC7c8aq/I+HJd4H/6Nd/h3/2h79Pnib+5N/8CT/8ix+yWDj+5R/8Nn/wB/+E7WaDSQPv/33g6vULGutoXQurFY1rcE3LYnXBfrfHTxOnqzO+8fZjUg5cv3nDm6uXDKcdftwTp0mG7wm6xjKGCddYWuOIYWS3mej7HaZrZPioBYxxyeGngX5UnLanErSiGpqmRWvFMA7s9zvQmtOzc772+GucXF7ws0+f89HPPuT65g2bzQalFOfn5+gsu+zt1Rtu3lyhc2ayFqc1bdMwhgnTOExjcaYhG3BtQ+OcPL8ZaoCcxtKgIDtUiMQQGMeJjAbbom1GWznTVNnvhPVabIpSKCCP9BPKKAEIUibpVMCCDAS8H+iHjMqRaZrY9zuGcZA+KGYmPxFCxCwsbbvg8sEZjy7PGcZbujeQvUIXOxphxpfzxmjQGeMktCUFkdgKiy2I/YvRmLYlWUvQhmm/J3mPayyLpqV1jQAkKqOKnY6ypvjcapSxkAWMdF0nZ3DpRWzXMI4jSimsayEVfZE6+F9rI/slGrS1mGaJdi2uXaGNLWeyAWvRTScAXVGR5ZgwZVBYSQZqVqJI7Z9l+o3xgdY6sovsVj1t2x35lBaAz1misuQSdJSTeLRWwKomc+bK8oqBlFwhhRRgZ0Y5Snt4JPv7R8Sfo2EhFLucikeU/x0DdHWAmJIpX58EO0iRrPJ8VqLUDHrpMlgTJZEuxIA6hOVwlpc9uIJxlcwhnnCH4VZ9T/fqoPJvIzRxARDVEYAIRS1XFXQVoyn9YSENVeWjHJyCW5CLrLSwLA8Mu0rggAIvlveqZgynfCqq7Pf4fUsNVHbxfPggGVVm0oe/V0OvjK6f5nAvy/ErSpgvX5d89I3haGUc9frUn5Wp4UW/6PVLhkQcFuMxHbHcBooWrEjSqpdTIofiwUYg5sIuSeIhlGAubKgMO9TxdROWVVm8It3MRWaasSbTdVWymsqDkeUwVgUQilLsRFUL0sKwKY15pbSLP5lIeJzVNM5J2pwSHy1rxdy56xzL1UIMP7Mc2oNX+NFDSmgVSiJTz9DvyWGC5GmMYtl2rJdLGiMsC09mSGCdPBDOKhqrWDSO065jddLh2RPyJDRwf7iR/XDDOIwM+x3OWlJYktOaFLOYPGtBusYwYAx0XcfC2Ao8F7Azo1tDwmKcQitLztD3A/t9j+8HpjQdAV7CzMo50zUNxnQ46xgWe3ZtSyjSJmNEe6/JDNOEjkmmEOHg0VflNtZ10rSUSS1ZiuQYs0Sfx8w4eTbBM+TMY3vGulvQGPGZ2UwD3o/oLM1FSpLImQGfRAIcUzH9TYcH9ICexyKRqg+esC45mjIc/qNIG44euJgTaFO8DyrTRAC6LCMdZheyyr46DDfIuSZ1SpcQswAw8iwZQgRrG0zbYpwjxMzgR7b7Pf0wsFotoSbBUQ/Ao4lBBRZKIykqzQJeOfFOqd5D0+SLca34pDSNpELmkho6Bymo+kQeJhLibC6fQ5WutjZxlY6UOWrg5mTBejjU65HnJ75usvX71UYrhyDsMoTxk2sjpiBrBdaiXAPWCYXfJ2ISHxOlhbEnaupQ9ptyv3JlL5QNe26wi/lsbRJj3WQTRAOVfVW98ZSS58BqMZGdv2+aWZVGS9pT2zhyzlgjBX9MGu+EEaSDRiWNMYrOZFYGXMo0SqH0QeI6RoNOCldS0DSaaISt0S4a+n5HCp4cI9vNludfvCKnzHbTs+xOuHh4yZNnz7h4eMn67ASsYbPfMjERru/otCXZzK4fGPqewERrlhg0i26N94pu7Xj85Bkn55fc3Gy5ud1y+fCC5fkKEzVvXaz4nV/7Lj++9Ww/2/D585c8//SKTz99wb/4l/+cs0fnfPjhR7zZXHN5cgpf+Sof7D8hpJF9P/L5qysu33qbB299hWf9SLaON29eEfxE2u4JGF5f3zL+6Ed88+vfwFm4fHDK7U3P3aZn8ophjNxubkBn8WBKVc6oqSwaWXX1rsur67p7hVW9fzU5M+cMZaqWi19W07ZlKNKyCZ5+B5tPrrj5N3/Kv/vLv+Gdr7/Nw0cPePHpZ7z6/AWfffwCszjnzJ6TsqEJGW0VSrc0TpGDY9ktaW2D6hqG2OP3A7vgCcowpQhas1i0nK5XOOt48fwV7777U148f8M4JRqV6McB0/TYpkMdmdrPAI+s/MKCVWW/kK/TiLeqVpqzszOctYzTxDD2MpAo0khdgDJyFo+bWkiXl1VIMldM5emTgjohUmHrHK5xtFmRTEPSDWiRhTXukFSrjJllgArkjKeeL7LhSKCOGE7Ln5bnvGxdRgmIRZHG1nMp5doA1CK17lCHvZuyvc+74by35/lXyoE68BWGlmAWMWtMBp1jGVRqdFKYrFGmFH8porPIK8wssal+LbX+slhXzp2cj7zohOGjChvs+F3JN5CmyQfPOE0lHVuaSkrBnos3LGXfOy7q6/dLxavTzENSNQ+6HJowJZE65gJsdR2LxqBWHVsio+/ZbzPOaEkLtZpgNcmnw3Uuz6aCwi6GHAKEIknygeQDKURyPLxXyCWItQCXsTC3Q0kGpFzTOhyWeRNfAAAgAElEQVTiwORGiTTWug7XdhhXjK5TReDU0fkqFi9aWZTRpXDPJMzsY1QbrRpEIGdyHTqVNXvkraq1eK0pXZ8NWYvC3pPUgIRItENSuHaBaVoymq5raZdLstb4UDzLtCo1k51ZQUO/xU87Nrs7fJhQKs5BZ1prfEiYmFm1DTpHht2WOxUJy5YYPItuQbtYshs9/RTJJKYQGYaeccpyDZWwklSApqQf+uALU7QAjVrz4OKClWtJrePu9pov7j7lxU8/wtkV2q7IBE7Om5KKmWhtyzQlfIK2OWF5ci4G/MOONHo6pYX5rhLWaoxrBbwvTMtxGskbGcrtdjumaRI7GITdXoNKcs5MKZCUmu1AjDY0xnLSOU5WJ+UsgV2YcNZIKqSGp197hlm19H0vPozaMIaRYd8zeY/tOparFcv1mmnXo+KOFCR8K6eIUZD8yN3ta0y3oDldsFxf8Nbjt1ksOl7tPuX57gUv97doFfm97/0a//x3/iknvuMHf/ZX/P2P3mPRLfnNX/0m3//2twi7PZ9++BFhCjx5/IjN3WsUltPVAx4+uGTRKC7PT3nrwSNUVvhpwnYtD589Zj8NfPzRh3zsDOOwJ4wju7sNY79HhVSsRBY0rsEoI5+xHxi8pz07oWscOQdQgZhGQpzIqhFwJCqarmWxOiGh2Pd7QhxYr044Pz/l9PSSjz/9nL/727/i6vVzzs/XnCyXxJDwu57GLWmXSxrnMWqHIkoCeBgF1NUNnVDHDqV/pjC7KamsUodnJQELVoHSmagjMRuy0oQYUH7EaoVVoAlz1rsfxRcsBwH/Y0hEBUF5VGsPoH4q3mIkvJd6ot/3TMPI7e2GYehpbCM1uVaAxU8QvWIc5ayVFFjLujlhdXaG6VZk3RJiFti7sMwVCpIGn8lTJGWY0oRydWwkYJ5phMWorMJ2DYvVim7ZIcZJmdi083BFhmpyksYy6IpBkmFR0GoZphlbWUOKppGhmoS7SXLucRq7s42EjmiLUhZjHVnVfVSAMtEY6hIWpIR1WfqaGIIw+HIWuaotZ3mUEDfK+7VWlEdytorSZPKGruAXiTQDmTknsfEik+0x6CKYhVge6FIjVQqImX3PlcqgYqk3ao1ZgLhyzkraK4XZXKG5I0JIhqQqWFZ/T96HqgO6AlSS83x2qOqHqo/BKYVCmJXSiuTaxhR2dh0qloGztmhVGPgc8MY5CVuVgRSgnZN9s0hkY0xghYWvjEHnJPJoJam6FrmeQWsSooSUoYghZE1M4uGtqOVHKgnvSYKAYkZrW8gceh5g5VyDPSjD4QK2+aKEUmIzck91oMowOWdyDvL8z+Cg1D4ohYpZxt/luucESQvBQykt67UuyYJpKV2Gb/cQ23IbK3GHmQj6C1+/FEAXyxuLuaZ6HTNy6k2jFLRyoVJMcyRZzHluf1AGZSSFsgJJUiAfTfVzKnI6GXEalYsXnABz1kScFflkVkrSP9HEZMiYcuEVLnlUidOWyFz5ecrIBdVKS0EX5GYo4+jcgvNuSbdaQKsI2ZOmCZ0jqwZOF0JJn2JkO+xQ0TMNO/b7gTBGks8zXVXjMXlCo+hMpM2BPE6kSYIX0uRpFx2JyLjfsk2RPI4wDJi45vKhYTduuNnseLPZQDEA/+KLj1FK4Yxj2O/Yqz3DfqJtAykZIgYai1s32NaQcFjdiLeU9bSd+JJZq1ksGpQxuKYjK8Nmt0Nd37Lf3NER8cOeMEz4EJCEE0NUiaQUYxowynCyOmG32xFDnAs93bbkfsBnIEQwmUXbsmgdMWdGP6EcVbpfDk05wLTsomhlQWcG7YnTQHOrsCGxWq9oL84Yd5n+bkQX/79pmsjaEI2mHz2jisLa9Ek28nigzaJA5UTXNSyXSxpnSqNdphmFGXHMnqleTPWljCYrkTNzxDI7fIGl7snyvER56JlbuMK0A20sKWWmKB5pzrZkZWiWa9rlkhAmNptb9vstaMX6ZFEYHhWck2m7LWWDAAo1BKE87CVFaZ5aQEnfm/DeSzpQYYFAnhNb5UMVaVhli2SRYhACOYoJqCtFcCpSeOsErM2Fml33gXqNjk1MU0olXl1+GWXL3iEMidpkpVBkJoi5cCxFftYKyUkrDaPS4jc3jWAL6FkmnwIgxsM+Vqu2XKj3qsjyy+aqgRoGMbO2U8SPPXsycRwhJ8ZxlANGS/ETvReJmy4swBhQOWOVLQ2Tlgm/CrIXaQVKpEtT8oTSnK4tPFhYugxL1xD17XxPp9RAyLRZk3PLMGZyq1ierLl8/ABSpB92dE2H1obgRQ6rkmG1FONfYyyubcAqXl6/YJgGtsMGFSN6UhAMccikoGhOlhLYstnwaiMFce4ueXG942Y3YrTi61//Ot84f4oHGiIntuN3f/U3uXnvih99eMM2Tgz7gQ+++IT24Snf/JVv8/pmw8cffMb5W9DaJcvujMEPaKfp1qd4FO3JGd/83m/QnF6yeP4FL58/RzV78UwJgdFP/OSn/8Cqa1mfLOnHnn4Y8cWUeLE6QdgjwkiVKXCG7EA5qmnu0SPDarXi+HU8Dc11PZVXZdJprXGHB5+kM32yvHyx5+Yu8vKzHa5xRO8JPhL8CegFGCm4GicT56gs+1Ak12FHGnfEHWWvEb+33C0IchdYtg3nqyXZeza3G169umOcAN0yBUjDiGkHThcdyhx/KAGmKriTS7qJygqdDCEmkdCngLKGR2+9hVaKl69eyf5GEnP9LI2Iy7l4uqiZvTZfIyXAk8wkqnRcdoTJB4zpZGqvRUqDcbPNw72kzCRguFFaWLHGyj5TrAlylhupcymiy3RKFVaVzF7Kvp4r849Sj9Tk9Lo/HeqcsgjkOpHmw6v6nx7LJFPxk1Ny2JQBikKVbC1U9T5TaO0LOFS+TwGKNcWPU5tDwIBSM9u6NirH7w0gBEnttkqSRrUxJQX7IPBIGayrM0slE21bTKEz4ANpmshRGqGQYknslmshyXYRE+zsUyqsvuIt5idhTGZNUDCRJaBBRZyOhOwZe89N9KxOz2i7JYtlS+4zwYtSIwWPpkgKy/dMIVDDjkJKGBSNtTRFKijG0TJMFMarLp6K4quoXYOyzeyVKmw32e+1tqSsyUkRMaTinyuLF7RxVPdarSwJxeQTFtC2keYcwHZk7UCJbDRluSc1hU4sNPL8OYRRXj16yympNVlrYganxPC6acr4O2f2w8QYoDtZYZoFPnh86PF3HussLoP2GkbINpFtYIg9+90W7yemcUQRadtT4jTRmpZl10nIV04kZ3E6EfoNex0hLPGDIwbPNAaUbtjstvzss1c4Y5j6kf1mh/ceYxy6aUhZgPHFQs6faTuSU6JxllXXcX5+wdnpGY1zDH6i9xPrszV+nPB+X4ITBkg7YpwIHtpO0y4XpNSxPn/CydkF2+1r8FIPBB9lgO8MTbvCtcsCJshQru97trsdyQemcUBnOF2fYJDggBoUMoSJkCKmdaQgtePSORZWc+oMZ51YQgzRc3d9hWo6sYY5WfD2977D+euH3L0UZtfdeE3wA3EYhal2ekq3WtP3I/vdSAoKqxtCmjBK0WhQaSQbTegMzcUZq9NTjF3w8rOX/Ozl+4wLRXvxkN/4lW/zX/3xf8ZbXPA//Nf/I//v3/+Uzdry3e99kz/+j/8V68bwg//rz/mH935Cu1rx+MlbtCvDolvx9NE3OF0v0WnLN7/6lKVruXl5hR9HPnt1xQcffsDV7S0vnr/g+vUbpmmQAUfUoJe4hSX6iOlaEoohBMas6QNkZbFa2GxN12FtkHMsjkBi9J62OeH07AHr9Smb7R3b3R3d0vL4ySnLteO9d9/n888+Z3d7y9my4cSBy5N4pqWMw+H3CVKL1mv6/R1GW0yjGGOS1HvnisoAIW0kNavNbWNRWeOrlVIqCZhotM0oLD544jSV3iRhSAJYkRn7gTh5rNLYXMEnSaMMU2Tc9cIZQWG1JZEIPpDiyGgCKUpQiA8ZZRqytmSjS9aBgDD93vPBT3/Gy+cvidPEMATWpyseP3pGcpZt74mDlwGQsegsKb9h9GThiBBjZD8E9mlHKH6L1lrQMCSPagzKCUtONxJIpqwlakPbNHRNMyvbxAUj4UxLCqmAMxqVxdJCZ1vO+vL7ZWBkkqYmpMpAXkFkrvuzApSk6JJBxVy85ygcINmfhbUlvUlGAs9AEpxTwRtiKQCUtvgYcc0SYzvEV9MVWxKpUyhgvrCaBb3NZHyMJKWJOQj7MCUao1k2jsGKV3TOCKho5P1qorAsdSr8BFGDzX1HghpiMdccGVRhzlQ2/1x3VOuF0nOpLMEiKoE4k8rgP8dICHLeJtGIiLopBKkVspKfkQ59p7xCUR5YuR/ZlGFQLFYgUhNl9Fw72KOaLiqFs05skECIKWUqKV53mhQjPkMkoEoarscQlcEpeRCDEsfdkCImJqn9tBW7E6XJWRMjBE9hzR+UUHX4JSSbolHKyIAsZIxVJYyq5AFE6f1EAVPqtTokKwBrRghhthJEyuugdBMWIAX4iykKIGkqSFp68CjyZ5QqoLvc9FnAcly7/ZzXLwXQzckiX/I9mSeV6j6lsD7EuiCZh5UI1KZclUkVFEmNNP+yNVIulNAmrRF2WaPBOUVrDdaJb0c18CtW+LJxU1k5AsgZXaadRrrlVN6Sr6BAueERGKfA3XZHP43ozoqp5TBCDvTbgWGzxzqLz4m7YS8eG0GMp1PIpJDIJQJaK5FwqBxJfmAsiHesxtpKvBMSCuOsTCr7DcoHdNrT6CUhDyifcPnQnDRWZK8G8DrROnh0ueLy8jEJy91u4G4YabuGZBRhnNhNIzZN2BxZdw3r1YLTs1O0s5hFQ7KWzRRQm5Z00nA+nrHyI+PNDVevr7m+vZPUzVKYojWNa1ientJ1C2II3N7dklIoaZDCgNRKfMy6ruPi9ITlsiPnTD8NbIY9IUuSYa44UIZcjLTHEIgkdDFi3aUepzW2tSw6y0nbkbqJ2/6O292dDCOcYyKzH8cyia9hErkAjLXhkSlGglnCYCraXenY9Wli7rXv9WnVnQgoATlHCz0rYYkU+m0um2T1MBBQzshmmYRtoqzDtR0pZXzKXJyeQtbsdnumacAHj3F2DlyoDB4J3iuNp5J1r3Vhcc1sV2ZwrgJ0vvj9/LznurJJ7iEQ5WCWRB1hyJIEcFKlEVLomUVWDVWP95GUyrU9/v2jm38v7UYJ01aXKYPSBq1r3DnFey6TEU8MZR3aNWjboq1M5XSdKOajiUiSXUKXxjwf/e/Lr4PcK817XV0VfhqJwTP2ZgZscpa1m3yUkAgtTTup+sdlYsgSf16aQ5QmGtl/Yj6AwMaI5Ka1ilZLk+a0Iqbx6B0aYc5EaQJVue+LheXy4pywu6P3A41NLDto7AQ6YbUUIW0z0dgRp0dyTNxdv+L69pabN3fo0KIaS9u0nJ9cytXRmX7Y0fuRKQceXJyzvrhkN0Zu73asFgtevrol/O27qJ8O/Npv/grvvLPCD5nGLQkobvYbpnEEl/n7D9/j+faGq9sN+83A85c/YbG+5OzhE2wH3fqEy4ePOLs4Z7k+xVjLYn3Kg0dvsVp/yIsvPmd3/YYcPDYnspdJc0xB1oBpcPoAtB+u7f17ds+a4ehl7f0j8ufJFY7/bH7OlFSLkjJd1vmUmcLEfuvlHDx6JmKMcg6WZyHrzJQDMYtshuN9o+xhyhhihslLcdB1LY01DLsdb67esN+P+AApKZRlniCGFLjnsFneoNaVOyhpazkK2K2NEfsEIwOHGAK7cSCmIJKSqErhXU3whfo/GwkfbSKmTF4rW68ys2Raq8Q2o0ggVEly+3kAnbFG5DH2IJO/D1ZVGahIFcQHDmEPqMN7usePq3+3SpTU/ff+5ddBslD2yvr+6/3O4iGqqxelEumcTGizJC9rKTJ9mGT/LhNhpYTVRTxITOuaU+YA5IGaJ9riD1TDG0BnuSsxmHkoWa89qvplzmjl/WdAyQDKGksmluGF+Dzn8vNmK4liKaC0nlnK5CzyxtiWiixjjHj6to2ldYbFomO77xnGkaHfc3J6yoPTM4YpcHtzR9/viV4TfRAQrZ5zcvFnG4nVconWimaxwDk3f875TFKloSvTbooE+vg4r8xYVeSQUogDtRpVAtwe3/sq3xFQ06JKyIhxGjtOwthA3fsetfE67CFlfVhJvJMU9uneXqLKgEspac68D8IuQBfWjxipJz9JemmM6EkTQiKMkdZ1LBbi9Tf6iWEcykBOjOy1cnTLjseX55ydnUIKRB/Z9AM3u5F+tyX4Ee9HFssFkEuSrGXyGT/thF2Y4iw9nqaBNI3odkHbdsJay1n2i6xZL1e88847XFxccH19zes3r+n7AaUVbdOyXnYkDHebvZyzU0/0E0YZaDoJODDCftqPe/ppIirB14dpRKWBmDPLkyUYS6TUOVGYcn6cmIaBRdNiG816vaB1mqnfE4Ze5E3FLsY2hs5oiInWOTrXkKaJN69foWzDfppwXUNqGx5/7W1+/be+z+PLB/RY7r76Na5fvxaPsX5PYzQPHz6CtsGHyO31LYQqjRdm/ewJaR3YlpAN56sVXee4e/Oc69dfEIeR9dlb/O7v/Wv+9X/yx+QXn/Pf/Lf/Mz/92094cfeGsF7x9e99jSdvP+WHf/Zn/PAv/4Kr62t8VvxG91s8efYVvvLO25ydnPLis0949fwTXj//lP5uy6vPPmO3veX59Q0v954xHvae6ANx8nRNy3q5JJhM5xqRZvpA1ha3XJGNRcfA5eUFzdLRdpoQHI3LbDYLMsVT2TUoZZh8YBgn2WOsYRx7dl98zicffcaw7+mM5qSzrBsNORMVoDVWaUYPfkooZemWa4z2xLCXNZckkAwjgJ7MXwqSk0Q5IueLI5GJSZczuBwAJRQhJU0KgWG3Q6dAu17TNQ2b1xu891grgSVOlYksIlUdhxFjD6QW2ZNM+XNwriGESGYiZ2n2NbI/SWCJZ+gH4jTR77c0yhSlTaYfRpKPjJOXkEIDKVlUcqSYCT4x9QNh6mXPiJGUZNAi/YFh8iNKK/HPTZHNbssYxd92uVqjjNjUaCUqp5iRLjvLW6zgjdjLCOBRj+LZIkcKD6kPSm2Us9TuoZAeSuyaDLgBY5zYSzlX6qJSqylKf69LHyFneogJ8ZJWUNlNRmGUI9hUQhAKA4+Dr6r3kwxtjEaFUn/HjMFircHrJNu+ymVf9MTgi2fxwTcyI30JcnRKb6YKgKRzea4P9Wcow722kUAjlQ7SSfFg42j/rxY8FIsCMNmIuqn2aTD3ZSnHsnxLgEM9ywo4KBTEyjiTvxPI6BQgZwnxiLlkDmR0yrM6snw4qNgNojDSx0qJ0v9JYJLUcaCwNmPbgGsc2jqy1mQkPAdlCnvVFkcCQ0Rjs5WhVXYYUwILlUEIboegKKUqi78GZyHsRDQpCrmmrjdKGZGOBtOyfmv/flQp1RoyFyVeVuUylro96fIean8qlmYVkC2OFkWJlosyq9aCHIZ/v+D1yzHo4uFm/iNNLwUJL0Xq3PDInZSHAXW/4FCyAI2qN1mkCTVZUyYYQg3V2tFaQ+MsjVU4q0sggobjyTcU7w5hNSkyydj5a7Ip3hJlcpkLeq51Mba38jANSeGHAENE7ScpjmPAAmmMhGFC2QmfE9pZUmXhlURRaxRt02Aa8PT0047Ye0n5SxMGVeSzCkIim0zTtKxPTli1HdMwYpWibVv8pDBuyWphcS5RTei+8fQdutbSGEWOA4uu48HDR5yePcAnuN02bPYT2yhXx8aI8R4dPCZ6FirRGnCNod8Fdm88ezJbMtsU8EbRtZr97RamCaOhtQbdlntpHArLNE0szk5Yr5b4aSjGt54UIkoJ+yQXv7CspNFr205AK2fwZKYcmaLIFpMSSXLSELSsTqMsa5+IcWLIEzcj2MGgrGLhHMl1vMk3XO826KjQ1jIkMSfWuTTQcwJr3bRU8TSI8gCVw1oVVgO56u6LMCodqK3HiFX1367/PLAt5Pdiqk1CPaQOmnN5oIWuCwLIqRixTYdtG3IWLzKZsG7wweOcZrlosdagqkdAjpBlXQtdVxo1o6UpixVQUgePpvrs1hTVL6f03Xu+8wF4qNOeWJOHgvhJVXrz8Z7wZbCjNnOzDE4fpjA567JJHl1HdXgPwngUNkQ2mhC1+DdmkcgpLQ2Sti22W9AtVjTtAlsSutTx95n3Jw6f6f/ndWAJV0n84RoppeUz+cLoMDKpAblGNXlJlyFClfNWJDXnA+gnQGuavZPEI0kYiY2zNM7QNIauVUzDUaOYfGEA5fkQUErsQja3G9ZGc7peQvbc3b4geUfTanLytF2LUj0paMJo0KljuLvl9tVrNnejAFu5QVlHt2jRxuD9SEgwhcDp2QVP336bBxcXbG5vubm74vXrK168fIVrDaePVii74vNPe26vN7z3wcd8cfWatmlxjWGaRvavvmATPFNK2HaJGSPWnNC6U775na/yK7/+Pb7yta/QDyOfPX/BdrdDmzVNY7EaWqP4PEXG/UYkg06TvcjtRGqvS8FnqF6Nddp2vD5/EUD380C4X/S1x2smkQkxSRACCHM7VzBawAWRBEqzkHIBvsumkoFAJBZwZJbgqvIPLeBD9fyy1pJT4u72lhATX3z+Of1+J0VMliIzVQPbENH2/vv9x2v+kNJZz2RrLRlhiu522zlRK9YhQBl0pPJGq6H0fTmtSEHqJHKWTWhJNU5l6m1tIxNOYwX0ONq7KPfzuCA7vh+zHFMdClCx1Ci+mrVIqm/pGJwSumupK+qeejyQ5N59/0U1Uf3OGmYfUFUGgzUdFgS6ydWWAAHuqnxIoeYQpLoPa11AO8U9YDLHii8e7TNJ2OjHoFuui+jov1UpqI0Ps9m5AmwGm/PMiM4pzX6+CjUn2anS7GmtaXImuTTv8Varct011qrZu0spxThN3G623G22DJP4x1lreXhyRtt23N3cMvQ7xmEkel+24SzWKVG86qy1rE9PaFdLbCOBVqHYGYgMp4LAB5Bh9pYt2OR8Ha0pyXPSXH/5ubg3qyrPiLWWtmlQhVlnjEZbR9u0xdNNzSuJIqmttwCkuTswd5Wc5VnNDZLSYrkyjhPT5FmtVGHqTvNatOU+aC3SolAYhiFII75crVguV2Rg9NMMzunyHEzTBFozThN935Oix0+ebT+y7z1jOd+6vqcrPpAhTAzDgJ+Eda/LLE+jZr+hqITRGhHWsgSlaU5PT1mv14QQePnyJS9evODm5qaArSucFtYTOZP9WFQnUpMZCwaDzpZsFN0q0zSZ/ZAZw4Qf9+g4YrU8Q2nbk4yhWy9JWnO32WKyMNFP16ecnZzQdo7T8xOsyWyub9jdXhOGkaA0XdNhYmY9QfAD+7s9wU4415LLQONu7FHdggfvPOXbv/lrGGP4+O/e4+3LRzx7+xnv/v2PGccdNC3LkxXL9Qm3Nzfc3N4y7vriLSgSQJ0zrXO4rgG7Iuk1q3aJHixTvCUNgWG44eGDR/zRv/zP+b0//C9Qd46//n8+5Yd/+QG7/Q59afmt3/9Vfuuf/hrvf/Ih//u/+wGfvf6c0/NTwjDys08+RdsTWnfDRx/9mB/9zV/yxc9es7/ZsN9sMASM8uxTxNslUZtiByCDBmctqhF5pLWO5XpF5xpyTDhrWXYLFBBS5PT8TJinNpLSxOVwzn6/K6x2y24vSouYMiEkclaMPvDi5RWb6y39XU+jFKtlw0lrcARC9MV+IRGmgXGUoZG1hrZb0zaZ4Bt2/Y5+nAjTJKSMnHHOHtWe6vBUl81ZgA2N0Q5lDQtTrGfChB9HYRLHKDZA48R+v8d7T9MkmqaVQYTWpSYr0rwo55HsgZYq/8w5z55hx/3xPGwyAsjrLGBE8IHFsqFpGowx7Hd7xpyJGJSRkAZdvFS99wzDQL/fk4rSRICuIISFGFExkHOUUASlCdqTk+wf0Xv6fmRxcUrTdiWUryhOUiqDL5HUGqPKPirPXA0H0FrqAbFQiKX3qiWPhiR9IroQbRCJt81JPGWdsLkjuVqTFTZYkQwfvaqEsbZwcg6KF66QBmqvIesglrA67RPa5cJQd8WuKApgY2RYo43ssVpDLmw6Gfrlgo2I4igXPagw68RrX+6rkQKnvLkZJ0ECOHIZTpYWUb6mHDipgF+yZoBcYvryoRU51C6HqkbOOSs9UlQzMaqe9/PQ+Jh8Vc61cBR8EGPEJkMRJs6WEOqeBEPsFiqgVQfJ8ksGl/U81mUPmYkjhvlcNsaUULD5NBSgr9Q0xx57FZNyzs3n9HGNGGMUrCcqqldvRIBACsgrliBKbL/Qpf80hxqpAMeH8/5on8iCH9l6HbT4wLZRY+2E0RFjsijMyv1KiTl4SzqTGoD4i1+/FEAXQphvZiogQ72pcttq83BUuKJmnXNlzIkhspoLYsoipd5QDiJBRcAWBppzjqaENRgrax2dZ8ptnWAf/i6lMHJkdMFnygMry+gwOS4XC6VmKVtIJdHFS0SwRZoFrUyZpBSGdGmUcgGCrNYs2wXn61OWJy3BDGz6W7bXtwx3OyzQNYvS6Gj6cSQZRdutuLg4Y9G27Ld7nDY8enTJ2flKZA/WMYYAXxSA7u1HLDvLybLBmUjbOpquJavI4CNtZ3iQTwlmiTUdrYEWMMGTxp7kJ3KUg25/t2N3d8vOTwyqSIEU+Ax619NkxXrR4bSiH4Owf1KSUIDWzfLj5bJltVqQUkAbmYIqZZnGidAPTFNgGD3LKBPdRitWCmwKmOhLaEYSOerkGUOSJMOsWWTZxO60Z8ye290deZx4fHrOSduyXq2wdw4/eCY/iv9c22KUEcAqVTpxaRTUgS2idaEfHxChQ1F/xAo9Tl+d1/jPAbVmMEsJswkEqadMzDKHKUr2Cecaia+PgZwUVovflDaOm6tb+qEneI+xirZrWHRtaWYD0ziVdrgAXmUD1koa0ZgP70V9JzUAACAASURBVNkYKQ7EJyDOMqBjCdfhvZcGItdGggIaVKAyzzLTQ/Op7l2nQwEkDXyMX04ouv86/OxSrGjKCVQOq/peytelJKAXyhYpnMU2HV23pFusaLulsJ/UfYPTn3cff/GrHopHp2G9GOWay9cICMIxwyMlMDI5qhR+tEbnPAPEAuZpqiemSLrKVChFlNKQRBbmnKFpNF2r2I0H0/0QJgH6ykZfZdQ5O3Z9hK6hMS05Rvow4nXirJEkOxrHmDKvNztux5c0ixNut57bbWLbRxo9YoynaT06WBqjcW3HIkWmMLFcrri92/L69RV+GIoXH+z2I3pSpMbxw3//LjfXG3a7Pa9evObq9RVPnzxhebLEuYZd3zPd7Tg9fcTjp2ecPbA8efpNvv6t7/Ld73+Xb3/v2zx4dM4nn7xgt9+z329RJNrG8PjRA6If6Lc3PN8L48aQcVVXnA8Aa/UCq+tQlUJcrlme//vL66L6JcqiuQ/m1P/+0oqRL81g8mHoVAvMChDnnOdAogrYzUbEtWhSRW46A0OHwgekQQGDdRZrNNM48urlC/SbK25vbsk5leHAfYCcI4Aa4Ae/8/SXeBaOXwFYwHe+/h/49+D9//L3/4P/zs973UsVgxl0vf811UvyyIetXsD6OFcgEQRELaDpATI5gH5f/v5fHkp8+WXK0ETWy8F/VMKRigdfKmBWKSxT8Xo9rLtDcIEqIF9MUkSbXAcjuWJX999fTsJ4qIBl+bwcfY5asKMPa7nChVapmQl6z9qhfI8KDAk7Ist+pygBOBLs0XUNjbVYbahynXq/jLGcn1/QLVa8urri9vaWmDIPHj7ENR2r9QqRg2r8ZOd9OIWIHyexVijnucsZY8XHMPsJE2Tvr8OkewxEVetMYfapslZUPgQW5eqTJFs9RzDbfOXm1PcCtuXCoBBgWNhaM9BbGCZKHVLgZE0crvlczxbwr67pbrFgHCd2ux0nJ2c0bVdAztJgcWjSjlUsWmmarmG9WtF2C0IM2KGkpauEsw5yZBhGxhTROrPdbPBeGHZTyGTTMXoJW5u8vIdUBoMV8G+ckwTMEAkxYbWm7VpaZxnKWs1Kc35xweNHj1BK8fr1a16+fEmMkX6/ZxwnjDE01jEOVliTMeKHkTRM2KyxrqWxuch9DSl7vL8l64kxDKQcC1upEVl2SkTjCClz5pacrlscklTfOoczivX5Oatlx9lqKZ7RiwmbElM3osKAVsJeubxY0xZ/tRAjU0xsh4FsNMvTE+x6xbe+8Q2+/ewr/PhH/8B7f/cev/vb/4S3nz7h4vKc26tXdK5jdXrKvu+5e3PNsNlhtfQQEUnDbLVm0XaYtmFSBp8SC+NQSdHvd2QSF8+e8P3f/iP+2b/4V/T7W/6X/+6/582nn6LdBrPc8dbbX+O7b3+Lj9/9mP/7//hT3ty+4Ve+/31Oz9fsdzuuXm8x2nJzc8tf/vWf89FPf4rNJ+SscYuO5cIy9FvyMOBzJCUYw0RjLQ8fPuSdZ884XZ9glaFrWxpjUSkTvIecieXMSiqz3e8IYSLkkZQmQvQSZhYVZMvkFT7sIFt8FO+vMHjGcc9us6XVlk4p2pRR44hPoiTJqkEZiGhyshhd2CkaMJrGNGSVCUn2i5AkBEQ1roAXWphW2kjZlhMxi+zNOIsxjYQjKIWOkVRsMaKXwKztdk+cRvG19J46GKEMdbTKxXe0JLsGeb7mfb7sSRUIubc3He3PzgggkaMX8HOxxLaWMYhvXVAK03Z0KydDElMYQ/8fc+/6Y0l2Xfn9zisi7isz65HV3dXd1exii80WRY0owWOJ0GNsaMYjS9bYmDHgLwYM/xX+cwwbsA2MDWM8kGB7bHD0GhkjitRIosgmWyT7WVVdlZWv+4iI8/KHfU7cm9VNmR99gezKzsz7iIgTZ++99tprpSTGYSXPcLUxn2zRgpf3NcZN5jrGGKwRear11TWX19ccZdGMPDm5hXOt7Evl+TFqhMytqdIOKF2YSLXhFGWHrVwJUdSUPdUYdCEbxCz7tDbSjE9lwkRPkz11E5Y1MjXajRg56BLHK6mg1ioiS0TJsaWFkApzMqsRnzKmzRhnCiuuNLwyxDFCIw12W0kFKNFIK+Fw37Qr4KiyMouXxQhPm+LSXu7zUl2SsiISMUWnVMgGdcxXQdYonUg+U/WrJWTJGZxkOZQ6+KoMdiFo5Awx7vWqZY2ZaZ3+pLyFg9e98f+1GXhQ9+oKMB6Qs8qJIRGJpT7NWuSjsi/5zQuNciE4lKmBmoHUc8ueSV7jc411lZkKTOz9G3EwRtEgD/JpDIakEjlkQtUh1xmVNElFUqrMtpLn5DThAPIeBvE82GspVp+HanSoD0y2arM0T5nAIXu+kjR+8uOnAuj6vi8nfa89V6mdUtgznRT5UJWlM8Goe+SwFuIoQpDFU5W/6vYmF8jTWLDO09gW6xLGCjqXCn1URBYLyFdGbPSkMVM7FQXhzPVkqCmhFNqoKp0OuXl1AQYyCoxhGsFJGe8DKWm00ygnIr5N19B0Hc5anLYsmhlHiwXdwjGagfbaQAjEYcAmWC7n0s1Uis1uWwS+FSl7xhFi8sxbx2LZcnSrQ1sZu2iCgkdyKLePW+aNYuHA6oyxIymODIXNo5TFmhlOGQwKpw2tMzSLBrJmt8n024BTjmOzZLFoUcrgjJWRiSRo/8bvyFoxDJ7ziwsuLq7Z7Qb6weODbMY5iA30rZMjxKEsstluCcELlbXo2uSUGEOg9x7jGrDiEJaSJidNaiRRTjET+xEbIpura3S5CVXjaI1izIHdOBC3A3faOfPZjHvHJ+zCwPnVFdfXa0zMzNqGGApeUgoyqakFFVcUXQRVqNIpT3Ph0tFIoMzeJYfP37Tq47CAmzZMxJ2WXLR/cu1cqBK4BTCQzrvFuI7GtaSUGPotm/U1qETbNsznLV3XYKwmRk+Onhh9GZExKFtES3OW+0+pognIBM7JeIkUFjEWx6l8CI69cOPXwFgBxUKhlnZALiBT6VrkPdN2XyxQgkQiFoHtGhxiTBNAOJ27ck6kQ1Yw9ixdOJCfx1S/xHEVLd1DtMPYBudmuGaGtTI68Xdtf4d71uc9pgLsM0BsBlJhtpRonYsAcKgs4fJ3h40AdXAuZRmU8yH6QqMf8aOfxF+VAU2mdRZnQRdn5uT7/UcpboHiSZLQCtrGsJjPaVbH7AJs/YgzC8Cx3Q3sFNx2MxyWMQSU0sTrntXxgp4F27hj5wNRR2YpEsn4GDDJ0M5aFnrBGHpmizlGKUnMlazDGLPoUh4dszw+4tnZOR9//IScDZsBguoY9QydWgavCEpDcqzsjHu3T3nttbf46s/9Ig/e/CK3X7pNO7MkJV3Z2axl3rWonAgx0a1mqFdOGbeXbK/Peby+lHNnDSpGUKY0lNIE6BwCyDcMVWBKLA8fKYbPrJnDr89bPZWhefielWV2A+g4/E+uIEtl0uUpUZS1I8CHMYWRpMTNXOiZWcYa/CBaORm8HwUItlbcDHMF6OT+1Vrzby57vn7cfebz///98a3t/vua5H0ek06YCMXGocaRes3UHgzJWYo7EojZy75ZcOOuP4gD9VHf+/P2EQVSpNUmZPl7ZTRK2akZJGOp8oxckut00ODYJ3u1uZFKQSRj7eQ90KbKepoS9pqIf04j6fDnIr+T9vsSMGYpkgRgOgDwynOCNpjCwn6RzWitFZ0z78Thsui9qAoE5gxKY6yMdS4XSwYfBCAKkeOTE2xlTCqRhogx7IuU0vXGyEhaKmrLUosJo88UVhlqD63d+G8WNgZJ3Bhz1Vit92qJdTLdI9dRxsvLO+VEip7gQyky5dpEhJUm8VBNxyCM0j2DTmezH8HNRZeo5KF1HMtow3w2Y7fdCSNmt2M+X9C2LdX1fGJAlJGfqQmFpmkcXdfRtA02Gnw3Y2M3RY+NqWAzZVw8lWxaGyuNTNuitMHHIDmML029EuMEZJbRt5QyOYoObEgZFZJImFjLrOt4/Y0H3L9/n/d//GOePXvGMAzMuxmr5YrWDfR9zzgMYhSmtRicFEMQrVpsVhiE8aOUJoQdz84+BGMZ+obGtmiliT7i1By3bFnePeJLP/MWX337HQiR733vu7z/8UecX19wfn3J8/UVy6ZhqQ1pt2XsN2QtTB4fPYvFkqPTO7z08A1evfcyt7slOSV+/PHHvPveDzi7uKRrO+7ee5mHr7zGYoTN0+dcbK/58Owx9+7f4+VXXuHRD38kRarWnJ2d4beDCNRnGatDQ9NZllr0v3YKdrEnktiOLYv5Ccp2dMuOl157B7d6wJ99+zu8971/zXe//SccuYbFseXV01u88fAB46ORb3zjj7nyn/KLf/9rfP1Xfo152/Ctb/0/fPTRn3Jx8ZgHr7/D6Z37/O27H7BZrzEaug6SbaCZ09oZw7Al5cRs0fHS6T3eePCA09t3IGX67ZZdv+Wy7xmHUcb/vIyUxoM8MUZfwJtIiJ7Re2LSGNOwWN4ihAzK4mNgt9vg00iOnjQGkh+JJMYhkk2A7KXoV4GkxYAuW2lo65xJaSQGhW008/kCsmK36+kHLzlVRti3Jb+sDJaMEt3GpkU7K7l/Bl/YqKY0W1MIhJgYR0/wMhGVc22AFd3LopNWdSYFMIsoJY3qm/fsPg8+lN2om1l12ZQp0b1QfQhRTPi6bjIsSCmRCmNJG41zBqNmZGIBKhIpKpySfWofsxSQbrCTog+MOXB1cUE3m+G9x7m27GEUhKrmViV2lamPGh5zzsRcQKEy/hqLFEvVac5KRiRjFDDHmMKk0qrowJa9zOgppuWaa+eETftJhJzFmC2VcVrJzyU+W1NiQRZpnGEYGcNI2o1gIrbR0/i6SopxiIQ4oBqFMeA0pFkn+7YWx1o1be2lfjOKrCLaRNCGqASQVVriVO2NVaafRvRNBURNJQbnKQZQNP9Lm48yZ1SCtYA+lTR1s0Y1EksSN3LOitMIY/sF6lbeTzMZYxDSnJk0hLXeG2nxOf9WUG3KjerESCxyRDUmdXsSh4DVB/l1yWOoNWqSegdVnl/qy3qf1Jqxfo49w3/PlFfGoJIWMy0DVlmyziQv+3vOiqgi+MyYPTlkUKIxaYxBBT816xxiZqatJSlx0a2atPvzXNjvYS8fopTcLpUYqxSo4oRRz8VPevxUAN04jOW7elElwVIH4yKqZnXTtRJ6O8VBbWoboqaFLbRZeb3JSascgCYz75xYNztTnJU0GSPOV9ObIUh7SbYE4c5TAusnFF5BqjTQLCh4RWFjKu8tG52MIimSjoSUBLxBdFEtpWvUNiyPlswWM5ZHS+Zdi0Hc2WxWYIQ+LAucQiuWi9I0DbqIMW/GnqEf6fsd2UaUygL2NZbl0YykJJjosKeUtlZjciSOnpRGIIHOeGXlc6ZASltWXYtKnpQjW6sYWot2miH07OKOrm2hUVjlaJSm1ZZGORbNDNu1bHVi53uen1+wWxvmjaW1ljCXALHdDVxvtlzt1hjnSH4kxcDQb0kojmwrCXgngW0Mkev1htEHlFXoxuIR9lHWWgpPA8lqEezNEY9iNNAoRaMMKmW8Cvjo2Y0Dza5nseh45c4pQSt20WN6j0WxH4BWExhSu2fiwqimcRe5ufbdE3KGFwr2z+0qvADy7IvFqiNRgOFSC+Vc1piWTTfFLMmZ7WjKaMD6estmvUZp6NqW+XxO2zm0hhA9IYihg9FMo5VaVR1IqMyhepdMFPq6ib3AIqsFLIebfPlZOtiIU9w//5CeX6vYz5wfDsbfKBuyNlMhMXo/MRlvshsKSF8YakrVBCjhvYAzEoJlhE7bFt10uGZG087QVsSbs+IzwWkfSHT5/id9HWyenwHopIgXIfz96xYooARxPV13lJ5eI+csYFqKWIq+UBbdBz+O+DCSyr6oc8IoaBuHMZCQkfs07qZPopUikETHUoM1ltXMced4hWsb2m6BtYZ+t+Xi/IzdumfY7Li+lJGQpnHcuXPKfL7g9tFdjFpz8eyS2FgZT3PCIs5Icj30Imq8W2+5ODvjeLWgbVrRKNNaxqVSpm0aclY8v7zmcrNDq5YhOoJecr5WLGho25bFasFyPucLX3ybd975Wd758ld4/bXXcE1D1Pt7eLma88pLd/HjlmfPPNutGPesFnNeefklLp4/Y311weZ6LQVP3o9iVHBGl3u7rtN0wOqs+8NnGHQvBNA9+K6Y2HJTEV6XSyn0TVkXZRyvCJKJtINWU3JV46MqC1WZIoFbnKqUQsanNSLxYIxogeVMCB4fPAlxRNPG4MuIitbCcorJoKr7cNHLAPhvfngxAVCpJPAK6JqGrp2TUHgvHehMYhxHQvCo0k3OKbC+vmKz3hQwQ5jntekhe52wEr7/n/0SAD/7L/+d3OsHiZQxBuscTSNrom0arHNyjyr9GfBHF/bpZxoiLwB1dVTEGLO/T0sSOCWYcKALKjtmPmhYHLIs6+Pz2HSfeXwmcZXXquNVWlcDjcN4AgpbAKXKtBPTgzr6Lp+PiSFQGcmpHFidSMg5F120wgZL9bMUQPcG05mJZSfnST5jSrG42d04lGmhp5xFhxTJb4TBLX9kjCEm6ZY7YyaGt7EGZyVvbDvRJws5oYzh6OiIs+fnPH9+xvV6zWK+kNcJER8C4zDcZErmjE4SY0IIYj5SfmWMpZvNiUeebjbDWDN9Zqlc9sBtKozynG5KDFRH35z3rI/9tar7AFIwIcZnMVPYz2XtTAB7EQtXltoYYAJcD5reB9cmZxmz6mYd8/mccbxku91ydOSxxYndOXcjh7m5h8n5EG07KThmRadvt9sBWcxy2pZZ17I6XgogEh3OOjIW5TpGH9nutoyjsNxc40CBH0eGvp8Y+SpT3BIVvjTggoI7x3d4+PAhd+/e5dNPn/DJJ59M464heLpuRts0xGLsUd2wlcqigaWEtZ8C+Crm7hIQGIcrkjKEcUb2I3H0NMx5+OZD3vnqz/Gzv/RlXn3wGrOm470ffJ/uk485DS9x9949tptrPn38MXkc2A490cv6MiX1aLDYoFifXfH9/vucn1/y977yVd58803mp3eZnxzzox9/yMXFFW/df8C9o1u894P3+PjjjyAnnl88Z73bcP/V+5y98QUuNtf4rBh3QwEdZfwqx4xxllnTMdMNMWf6sWczBhHMTzNsuMODB6/x1V/6Ctkt+OY33+WH7/05V8+/jaNn3c9ZHN/nrZ//GnZ2j2/++Q95dHXBr/z61/jt3/0dlvNb/ODdd/nOX3+Xx48/Znd1xVtvvsIXXvsSPzj6MR9ff0RTag6lFOM2kbViPl9gG8O9u/d48PprHC2P2K7XnJ89Z319xeX5ZdHiTeW+kvojZ9njrdI4Z2k6h9LgfaTvR0IAdGSzi+SssU2L0mLspbShbRusdth+S0ekNYbGWoxpi3SLI2jHEB20c5JpGIJnGHtCGlFeRnG7WSdN8ChOoykmhn4QZpOR/Csbg7UNrmtxTUtWqrD5ooz+2wZDxqeED0kMeHQx1iuuxLUOkP1JgHlTdLgkVzzUeGbat4dhmPbhG2wirXFKYZuWlCN9Ev240Y+inZWygE7WYo1BIeZsIQcMRnK7tiXZSMqBMIz4OIhemNJFXkpy3aZpBMzThuA9YZQReKWl0RBiBfOZjkfXWi3KXKeacm2Z/Jgy6RpXSu5VoLOyB2Z8CPgYy7SPaHpWGYWcBXBUZTy0uplPLLkDgLFmirmw+1JOqFyO0wjAqtmz6MiiHT/GgZBHlE3M2pbUzTHZEIbI0O/ABozNqByYtS356MAA4SAHqc7otepSxdxOwMOikK8kBiYNuYz1OidjqDolgs6oQmaQCaUDhvZUq+liQJUI9d/ipCv63vu1WIHffTOOkhvsMZjDZqS0W3QBDKsRQgHn9pjxzUeeQtxU95Y0C7K4rh7GzbrWa0732ceLNff++xoXD5uy07MOcrjDPLC+ojAADUZBRu/dbuXOEa37CClHqpuwswZjTckLFDZlmkSRcZG1no3gOdZajBYDzX1zdH8sWu8bwzXey7+fcwoOHj+dSUSsCWntzIm2nFE3k+QK4AmFExHkRU9aDfu/KwW+qvbDaQL8jNE4LQtj3s4kuXYOtCUpS8zS2cj5QB8nRxQRTULsr2UDEK0XuVmlmanE9nYCeXXR/EmCcFIdGwUA8FGOoXWOedvQOkvTOhbLOd1qRjtrsa3GNp6UAykGvI+omIlK06tMyKCU6OmF5ElZBDBFi00TtSInjbKGeTtDK4VzDaP3eBIhjcQIJu8XY4qaIQQZeUkyphCBqAxRO0F5U6bxIzYnxhwZVMI3mtRo+uTZDTsePX3M5not3TztuLM84u7qFvnomJmKnG2v2PRbzp9fsL2+pjGW7miJyoZ+GJl3LeTE+eUF424kJZh1DWHs2HqP0mCcwznLuOsZdzu873FNg2kMzbwjkEkqoa1olqic8eNIP4qludKawcjacl40BpW2pFaxDZ54fclta7i7WrHGsw0DSW/JY8JqszcRyWLSoFGTLpmqtj/1q9BaDYrwQofhJ20qhyyMur5zAYiNNpAUcU/Nk5xZl4H+LJuEbRqca4lRkpfdbkdKiZPVUsaXW4dWEPyI90MBtkWrRN6zCGbGmtzroidwALbFSKhduxjl6A67DyUxBqTjU8G8vAerUpRi5lC34BDEq8ev9rv/jY3T2uKyV44hJKFu2wLO7llJSpK9LIyLuqH5EBlLBzNmJWLy1tG0M9x8STdf0nQLjG6KHoSM4shHqYypCZKoV/Dzv2oCcAOck9+VPXZfaGXR3pno6dREQU0W4womIXdTtOqsk6IpxshIIiQBXit7Qwp2hWusOPCqiFYBnfddJ00WMVz2gvmdMywbxbzLLBbCoLwaYcgCACRpRLMdd+RZpj12nLRzbrUtYX3BQvcsb3XErLBWkVOAJGMam3HHsNsxbDcCGA8jhMTx0ZK7d++w2264OHvOdruh7QpIqg27MTGb32F5smL0itnqlPuvvsqDB69xevcWv/SLP8fDhw9YzTu0ioAnZwdKYcncPTnCqgd0rWHWaj59+inrq2uUhtWq4+6dE54dLdmu14yjF/ZNLfZK06XegzlLsXc4Tjp1414AXvKBD7r8DYJxcHPs/TARurEnKMqoe96PUFbzFMV+dGECeOs6K8lkAT3E5Vi6zaRELAlxSnH//JL0x+BJwQtDSEmcjgdJloyo7IGOeg5sYcMaa8k5ich8CFRtqxQDsRQk3XyOxjC6htCKg1wFCswkGyCFw+GeeXR8fGMvnQA6u9eZ2++z+sbf1HN9mHx93r5Tv48pkpFCxlQ246FObrnedb/bA681+a9J1ItJ4/69X9z/p58XVWZJXM000n4INsq5FpDuZsJZP58UOraY/QBTJztRpxgOjG4OPkfOoLUAdFV/qO5pubgDTucKDsApmSwwSmGsQyUj66WILMv13eu45bJXFkgRVQFtJSCZyCqwjxMhiblPSux2g4jEa4VtHFnV181cX1/R9z3z+YLGSaPPpEwaxwlQzUnEygVk3d+r2hisa1gdHdG2MtJmS6ysoD1QRoEKE2Iy5xGQuv68AnW10bwfM9/f8yllrJXjVUV6Qda0mc5F3Vv2LEf5RdW8knVuUdrI8HMBXVVpHHddhzFrNpsN1+trZvPl/v1LMfbiKC8IIGqcmZgxMk4tjPoKhnutmXUtzjUMIaCtYbFaYlyHsjPGMWCdYxiGUsxbdrstYz+UgllAfKW1jK3pyvVQLBczXr7/Erdvn/DkyWPe/d738OPIbNbS73YMY19AfYXVpYGSIsrKyFOMSqqUUJh5PhOSx2RIOmBbYUtuhgE8nJ6c8rNf/AV+9d//R/z8L3yVu68f8Tc//hH/9lvf5kfv/4ih33Fy6xav3LpDqzXv//gWnz59zG7cgYrEYQd9T5MyRluabk5WcHZ5wXf+4q/41ne+w+lLp7zz5ls8ePV1Hrz5BqvHZ9w/vsPl0zO+/Z2/5qPHjzDGca4MZ0+fceeNL/Dgiw8Z3nuP7dUFDZaoNKSAUkUPLJWxNKMZhkDvEyFobHY0+jYPX/v3+Ae/8uu89farfOsv/ozn7/2A4fFHdDlg3RxlX+buy1+DxRf43qNH/Gj7mF/4+lf4rX/yHzOGkd///X/Jd//qu/zlt/6Si2ePGI471pfPuXfrizx45SHry+eMfgN5jrUdrlEMY4+1mnt37/La/VdYtC2XZ8948vgxV+eXjMNA8AHrDE4rxlGA9IkFnhLzViYa5vMFKBnPZwgYA8Y1aNOwWByxPDoRBmdKorGmIa6vUBdndIjrs1bizosxZOuIpiXqFm9aAoad92y2ms0uMQ47/DiWxqFCte0k8zCEAds4GtOiEIaXc26aMIlZ8kYBGoordBRHTKWUAF/akKNnCF6abkaX51f9YNm/lZEJD1viLGkvY6GUgEnGiN50jSs1Htq2oela/DiSfWAYKvMrkBToxpUaokgNlDHC2uBOZELVeTNG7s2yd6VYgEaS1NjZFaH84vCqNdlqojnUi65khEOmej7YzyQiVLm1mGTyQcw3hC2ujMi8iDleKmQbJHe1EmtCDGWCplhH5ErkEVX7+v7GFEPJAk3kKGP3KVY3Vk1W+YZ3n9aapmnouhlZGVzWjHFHSAM5V6dtCD4QfCCnET9Kk2lY7Iqx3p4FSbmuMYyEOCASTxllZC9uOploqiOcRkQ0ZW1oJRrFpV7MKtdePkknMSKYmnVy3uRDHmi45ReMAF/QYbOuNB1rtE7SoEoHhkf766ZqiTqRG/YElPJvwXn21782oUpEUwd5VtzXUnLNbuZLVYOunsup4V0bWuWFP5sfMT3/EJSbALn62qow21SRuEpRJna04ECy9DTiqksBWCVPjhlMFidiisyGMhqrRXNXzk8gqUzbuskkRu6fMuqaSu5Qm3sllycj7HCYrtVPevxUAF19VI2eKpx3+OITwnpY/8pe6gAAIABJREFUrB+OaVTOQelg5rx3WlF5f6KtFUF0a7UkZsaAMUQsuYotFkcNY83BG5dFUF5fZrXTQdGVQe+RTHn/Um0VNkQsiUZOmaxEoLftHMfLOSerFfNZh2sts1lHs2gJ2RNzYvRb+iCimyYLFTIbh08adEPjZjjX4L0IR2olHZsEdChSlBu4bTqM0qQQOXt2zpOrZ2DB2ZbVYnVwIZagRkkMlQSTGCJj0kTtwDQYldjurmh1JJmyGEdBgnMKqJBZdStSH4kqklCMMXE99owXEXV1znbcMfiRoR9pm4ajoyNWRyeQFJvdjvVmR9c1vPzyKcY5UJqL6ys+/PAjPnz0KQFEO0sbovd4IPiENpHGdOJ6QpLk2xkB57wXOjiKo8WcpERDMPpEGzMqZpTr0K3o5PlxYDEMzI5W3FoeMXjPLmu2VxtIetIroFxpAU4KkFbHY9J+GckfGtnSVBWOLIw3bW4EoxvsshKAYkpl/l+J7k7ZdHRhSqVQRki0RYN07tqWTGaz2bLdbjHGsFwumM1ajJXo431gGHtiHMu9t2dfVG0pcWpKUlArjTGOMo9TCu44jZUrpWnbpmjRxZJUFAyxjL6mIgIPFKHwNP1Mlc0PmICAepwgGzyF3oySc1epyTEKwC9GCrZ0Nyr/t+41B0EllzHZmET/EEXWFq0d2rSYdkbXLenmC1zToZQlpKrFVrpYmXpwL25Yn/ulcik+pzNw+P1+RxNgtATHstfIiGLA0sjaJWNKADFaFRde6c5opadAswdJ85Roaa1wjcE60ftwFuxBcFU5Yqw4VBkn1jomBbTvWSwtcfuUi6stu23ARs3COXAGZ1v6fiDFyLDeMMws1xaunj0h9Wtun94hZIuPGdIISVwKVZJGSNc6wjjSbzeAjNpbo7i6umK7XnPSOBplcO0C13pCyhzdPuXOndfwXnPn3n3efvsdfvmXv8b9V+5ytHICzKkMBBQKoy2xOJsZlTk9WbGafYFZo7BEPp3Eeme8cu+U0D9kGAaenz2fLlnd4w+D9yFAcxj0P48Re8igu9mM4jPfvwjUVM2wrA7XTdk3DrTt9uDKwdhJlnVeMgGqfIOq2jZaFW1CR1Mc6FBiaR8L600V4FgBOlcx3HJfx4Sy5sa5mET/URNQLPdf6UrHQIwiodCVrrsfRxQivl2BJ1vuc5K5ce4Blsvl5wJ0h2MLh13dCl4c/u7F6/R5gN2N62FuXruaJE2AyYsAHExJ3YvA2+FrH66pw7UwgXsloa2gzqF+inx+AYsksdsDdXU/CEVD6PB8KVW4AmVdVp1N1B683H8+I06fNZGtuVgxfZh07rIAdhJ7U2liSdzVRkMZlUmkqdjRxkzNnsqOyC9oEVdQrh6X0UYaGTkTk2cYPSpKQR5E9Z2mcdy6dYvNdstu1zMMA9bImKZzjtE5AQZCNQmS83qoNyrnSrTbmsZBTtzQqpFVKQwfY7Dl/AYCVQ+wjrRSYmEu+pp7lvf+vE3NJMHeC8S3f6cpqJX3lbW3XzNVFL4y3SZWI6qYx6SpqLy+3nBxcSk5SlkrfhwxVk/mD4drVoS19+C2KQYO9Z6r61jG4qMUJtZiG4drWlAC8qYUhQnVtGitGMdhAjxkzK2szwOAzjUdd07vcnp6ytnZGe+++y5XV1fMZzNSSlgnTL3oq7HYHizOKZGNxudEMhR2pobc4qMn+B7MiHWW5CM6J165d59f/+Xf5B/92u/w1htf4cNPH/Hf/nf/E+8/+YDnFxcYZ1jO52zXWz4ZRmZNg21lhLexjuVixbhrWMdEDhHTdtA0DH2PUZZX777CVRw4e/yUb55f8emnTzm9dcqyW7BLgXd/+EM++fQxM9fQ4NDDyJNPPublO3ew845+HNlermlNQ2hbYtDE2JNzxuUMKbNNkT5mwNFZw3K15LXXXuLr/8HX+cJbb/EXf/1N/tUf/J/87ft/hWZN6xpUXvDg/hd56wvvsL7YcPXkA37xK3f5J7/zy7x094Q//MYf8Qff+D94/70PUEFz1B2h48jffvd7vPT3H/Dlt97k/PmPef/DNaFXWONQeSDnyPHymFdfepnbqyMuz8959OFHnJ89x48jGpg1LYsCvm02G8Yy8h68ZxjHYpYUS1M10w+BlBWz+YKjoxMevPEFFssVTTtjHD19P5CB3XbL7vpKmL1JxmJJXu41A7iW5ALRerzxZC2ukN2sxVgYGocfR8ZegJfKdpoc0zOTVrMuY4iZogGK5PORzDAIO0nFhDWaxjU4a/F6xPcSR+oIrC5NN1QS3EKJ/ps5ACdSlLHtlCqQJ3tXU5rXtQa21mKcKc2HwuRNYtCU5E2xWdhv9d4Wh1SZMko5TkYu1azMWItJQkoR0DGQY8SYouFWjqPrZhLrFTLpVBylao5SUybZuz7L2K2IjdRQst+lkrtUwCXmIolTMAVTp47KetEZcd0+yE9QSB2m9jIBU5uoADw1h6qkn5QSBsmLEntQZLVaYZuMcpEhbBnDTnTVMRDAMxYDJ01MgxxDAZ0nLd9y2DFGgk/C3gwj6ITScrzBj5DzBJxZ6eQIM9MYkvfFmKqcxyTaxZQcQhuDQWONsFFzKqPKKRRw8nA0+mb+NOUiUzzYN4deJJW8mD4dgmgpletw8PvKlKs5VI1ZNf5UszNV06CDPPswbuxfT918XeQ+SNHL69f8p1z4+m8sOrRaf7a5mqBoDu/fN5EgQpWBEgKH5IQhZULtL0ZQquZIEbLIyWh1uMTV/n0zB+erHF+pIxVFDq6iyQUFrUZff9fjpwLo6gXQyk6LAZiSnvrhbgreFVvumpDkNJ28CsjFlGhbcaWMJdFzjWM2n9E1FmeloA0pi308BkxTbtZCD8yl25GlcyPjBpIYRq0xZQQgpyA9PStJorhAIjCpEiZCDuIg2rSO1mqazjJfdByvjljOZjgrlr/aWlICn2TjTOW4rRLByhTFkcm6GVFZUJasHDmN9H1gvd4JcGQ0MQr4sN32hCEw7zqSD1yfbenJzFZLjm6taLvF/syaYxQjZGEIZBIYSaCMbQgYQuhR3UjKO1mIZdGrmGmSwyqLQrO4vYDblSorm/YQRlAZP/TkJEFgNpuxWq3oWofKiqa1OK3pR4uyDd18RjaWW/0R1ihCSnzw6Dlt6+hsg8kZm2F9dU3KknCuTm6hrWa2XDBbzPDDwPn5c3zblpl+EbFfZ0/c9ui4Q8UBq4Uyfz3syDpzud2i12uWJ0vun9zh05DwowefpVgtOgUVWK5i0FTtAtivTVQpqE1xnt0Xf6m2F8oj106S0UAk1sBrFRZhcKQQUVrGUbI2jGFHztC4ltlsRkyZYRzZ7LYMw4AxhsViwXzeYbQAyzJCIqOtqVik15/nnCdwLpQk1xQGgWsa0Hu6c32+c47FYsHJyQkXFxdcXl4CeUrYZfPM4vZ4UJBUbR7RdDA3qMa1cwKF3RFlHMDY4shYmQpQkiI7iYVma8sElmzqRikBFHK1ss6M3jP4KNo2usHaFoxDuwbjWlzbYWxDZQ+iNTkGec8CmO2ZaQWwhxug3F4YvI57la5f7SZNvQBR6lGpjC8W7RFbi5MDgE9E3xEdNaXAaELK5AhxJ+BWipTOXC3GdSk8JaA3bYNpLdYprMnoVCUHIAVP0zj89YgxCdtYZkaxNJrTkyXzZcvV1Zpnz67QqkWbls16YLsduHt3xbOzMz7+5Mf0/hZXly2X18/pxx3Xl4aj26doa6TwCxGnG1rnMEqAQZU0/U5GNJ6fP+fR40dYa7h3epduvuDo+C5X3mEvAp3RdLMFpy/d482HX+bVVx/w1s+8zav3V8waKRCd1igVZN0ZQ84Rg4gOZyIpBAgDNkcanWhMYowBmxNvvvE6v/D3vorWin/7Z9/k8vwSLaIDUNZ/jWUvAivT/fw5YEx93t/1ONSaO0x62q6RhtALwF3928P3/ex716RIvVDil6RM6UIbylOXUCH3bC5jsznt84Eqyq/KMXU1sSoFgdZa1ncppKRbWphrZd9TIAml0SxmHV3XsdtsMRpm89mkQVnvAaMsWpkbn746eMqY7IGWCGUEpLBMK+hTz9WLiech26ACX4fXUGuNyTJOn5Ok5hMzLzOJsqsCRukDYOzgTN888S9cu9pcOAQ46+8FtE9Tc6EmbzV/qvuhTnszgLoH130YxYH7XJ7+rn5CcwCG8AI4VzWRYt5f5+qilguTM5sqKK3FBdHYabxeoeijB3IZN97fM6routX31EoYGT5WNsg+L1RGCxOsBNmkiitsQrrVMeNHL4wpK4xCYzVd15IzjINnt9uhlGI+n09jncMw7N1IC0Pu8PhjimVNSXOorglhygjDU3TnIlqB00qccHMSOYEkr2O0sE5SjpMhki5Fp8TDfZNJro8po/S1AK/yE4UhoNXE8DjMKxQ1j9iv91r8WFuBMUNKcHV9jbGOpmlp23YqdmphUF83JilKMwmjNU3TFsM1RwqR7WZLLrmYNorUiyGRsYZdPzCEjLV1JF5A+MpIXC6XxBg5Pz9nGEa6rhUQcTYj50zf7+i6jtlsxna94cnjx+x2O5xzBO+JBObdrDTDMiLYr6c8Q2XwMeJzRrVW4mWEzi4Iuyu09eKyGA2bzYbXXrvP7/zjf8zv/uZ/wRdf+zJ/+eff43/933+PP/qbPwYTMRGsUuTjY26f3uEqjnz49BHZj6Rdz5FznC5PuIqZTT7nKgy0zrK0lqANF5stq9mcY+Vo57cYGs0wBp4+eUr7cMFjv+Hx+gKTFS5rtJeJmkcffMDp6R1WqxMwGptlTmdEgTOEJGPGc9eSlWYTIx5D03TMmpaf+7l3+K3f+Ye89fOv8M1v/wn/w7/473n88Y9Ibo3OkSE2nMyP0K1hff6ITz75IWr9Pv/pP/vP+eW37/ONP/p3/MH/9Q0++eAHWKVozILlbMlyZtld7Th/9jEPH77Bhx+e8uzpM4yak4Mh+IFFZ5m3LfjIxdMznn76KeuLS/IYcGg619C6hkYbMpnWSq1VGbvSlE5cXF2yGwZiFsmE27dv8/rrr3N0fMLx8TF9P/DkySOur9cMg4zz+8HD7opmt8b4LSoltBbiRcyZpHZE29AD0TYk48AYnGuwzsq+ZoQ5XllrGall2q4DpQkhoQYxX3Bln4hFOqgfR8YgovFaGxpXRknLfq7q/q8NVktNuOcAsS/O7WFuXFhKCIt49L7Eg+pybSdAPQbPdtvL2O4wEEdfwP1cGHvibu6sI2fwPkCUaQp7IEUVYyCEfeNEo8p4pOQMso84mrYpzZgCqikDulzDtN+r6n67j2MC7qQcZSS4MOKUnHZhyGlLIalzWEAJy6nEqqKlrLQ5eM0DPU3FBAZWIE7At31+VPdKVYCXnAobEUsqkzS1Djo6WnJ0MqOdazwDKQeRJoiKOEZC72V0O+0Yxh0QxaDDujL942m7Bts6gi8SElpL3THVFIk47ohhBCRGBVN0sbUp+sCqgLG2yHhVILLkNdQmqsTsaiyWy0htSmk65iqFIUSKcu1KDJf4t29sKvZ5i9Z6Ms+amni51p0KrRPG1BylvoYconW14VimLIpOIlUGpU6gaIXS+wbX1FRjxBr5/GRpCGak/jSNA2UKwlEAS61vfBFeACVLrlqlbSRf2csoZQ7Axpw5nDYj66pKRQKpNfN+YirFhE9jicuQdS7TUKbo/WVyqjqyanrPwzU/NWf5yc3fw8dPBdDZ0h3QpWtXK+3DizmhguWiZ7IkQCX5FWZhuZXKyasLShfx1LZpWB4dcffWHZarlhR3xDCw2Q7kKPpmWiHgg9Z4P8pGTEYnQTedMlijydZiZiuhQifwfiCJbjhWOzIRMb0I5aRGcR5rHLNuzqzRLGe2CLMrKbiTiDiHBMpDSAptHUa3pOTxcSSMEae0sP5iLqNICtDEmBl6sa9WWrojvrhoee+xWRGNAGBWO1SC1eqUV155naPjIzj7SwB2KaJTZIw9ndPYVqOzIhTKZVaGbCI+QlTiZqrKMRAjKciNZKqjGAZjMk47Yg4CNOVE5xqiD5PAox/H4oqTSaGMvSSPjpkYFDlbINA4w2IunayUA123YNl1XCspBChuRrdv3Wa2mLM8OaLpHP12h7WGYbMtVPeOnQ/ge7YpE9xISJG8HVAbyDNHIBOGHfHinNealldunWDQDD5wcX6N1plkmMZ6Us40psFYU5yVClCnVRHyLKyXlAhllPSQofFiYV2Lumk86KBICNFPwSyWkWtlZKM2ztJ0My6ur7i4usSHwGzWsVwupStWqutcR7SzaB/IBi0bjDi/FjcZJaPRxkiwtdaRNVPhUJP4tm05PT3l5ZdfZhxHhmGYrOIPgXetNdnUezwXRlPRz1P7gCiJj4jXGyvhUpg2EmyrVo6AfDL6UIvZapFdmRu2MMGq1lzNM2KSLmZIZXBGG9CWplvQdEuaboa2jqyMJB5lSxT8Ne87Lkx1pRxA6bQcdt4m9gvsRxOnJkSaNIxyGYuHMspcXm/S21NKKPcl4RHdKGGwpigjEaRUgqaZinGllKy9kLCukT3XGBkb0AmyF7e58jCl0xRN2Usx3Du5zcPX7/PzX/tZko48ff6cl14ZcK5jvRv4+NET8mUkE3Becx09Hz77GHtusM7QtR27CLOYCCkw7HYoFE5rnLXkGBn6nvV6Td/vuHPnlEzm8ZMnvPnwIb/xH/4md196lSfryKgfsRtbtLV86We+zC987Wv8zNvvcO/0FLB0DoyWPkkZagTVFqv6LMlBuRbPnj/jgw9+xPOzpwQ/spq1XIeR5mjOO1/+Ci+98irf+c7f8M1v/jmj9yQ/oFIN3PsFIOu3sFP+P2LktjAEP++RYjro7pWivbyRQqENe7DlhceLDKzP/k3pQh907PK0bCsLKaNDBC0aq9baAjaZKb5OTL3yOloVweSiQ1M/rbA361qvSY4vBY8jZIQxudsxa1tOjo6w1nLhnIAaSku3vYxuiB6Kmrq89eHc5zHlprKGMiwu+46ql2yf1N1gwtVc42AfPjyvFZBPh39fmgmafYIkueFBElV+uHd0y9N1unH9D/b+z2P21eeIgoLozEjnXU33uVLICGKJO6KRW8ev80GHWkYmjbFS5CkpFE1hgde1sT+vUmRZa/YgWmG85Sl5LSBOrM2lOJ2BqplUndMoDZnKHpgMiEpM2I+hlCmH4uonQFk1Vkj7Aq5IDkhxliDmMuKYC5BmiVES3hgT282WGBPz+aycAy+M9JyJPlDBOKP3TMOcKCPaBegqgKbRIqmQYiSHgB8GcqrO5qOYURDRWqY0rC0FXtEGilGKUYeMTpsCyCWxzBUWWV33qpwOtddb1JU9V0Bqyj0dQigNQVmPWika53CNw9mGpmnL7azo+54QArPZjF2/o44GOucYx5E9K6UC2bJ3G01pissYuzK2nNMynicfjDEEog9YI+yQOoonrHdzA2x2TnKOtm3RRosjpHMsVkuW8znnz5/z/NkZ0XuJ+Snhvec6BFTOOOtYzOZ0XYciMwyDmGykhHINxhnSWFhYCZQ1WBfI2TD0kTu3XuE/+oe/xX/1X/7XnM4f8M1/81f88//xX/CjJz/m1mLJs6tP2W3FfCIhkiqmcYxDz7BdM3OWZtahnWMoRma73YAPiuXyNs1sQdSGZxcX2AzKKrxVdKblza98kZN79/ibd7/P08dPOJ4v8GnLtpdGrB92bLYb7r36GndOX2L39Jqx3+Ez6MZi1YzOCgiSsqKZrVDJctSc8OU3v8Q//d1/yltfeYff/8P/mz/60z/k6uox2W+xSaGSwnWGIZzz8ZPvse4/5uL8jNmi4b33roj6e/zrf/UnfPyjH6NCZt402KRpraE1HTorzp5+xOuv3eFLb73N+x98yvW1J4VRHJxLzB93O8Zdz/X5BXHwNIVx2DZNYQGJy2/rhM2VwoB1jlUrLu0hZEI/4lzLndunvPHwTV669xIpBZ4+/ZTLy0surq7o+0Hu+SgNKJcjfugxwYsWbxkz99HTB49XgUHBmHd4pVDW0TgBodtGmG7L1ZIwenZbqSmWRytWqxW7fuDZ2XP6YaBDmEpNMSaoEjJNY9HKIa6swmBSGQH8VGHhWYcm45xDHEYDSSVSyQf1QRO7NrKTidRxuPpZu7adGlvDMDD0O3ZDz2bY0TQN89kMrTWhHxmqa6wW1izWoQqYRi6jklRQqxhlRfHTTCVnrSBNZdRS2UeKvdC+MijrCoOuRvOIGEHIsfggIGON1dMkQMyEWHIUkZ6WPc+UGJEhV/Y3ScxDxhGbVdE+LnV8bVZkRD5A5ylXkRorls/KJCFjVZXJkWacVjKx4qzkt1rvmzjGNmgNyrSSh2JQUUEoRJvUE8JAiB7jFE3TkhGWcbuYc/tuaSL1gXZsCjN9kHHXMKDTUEBZaWoHH4vBnSognMFX4KwcQ23IWOeIGHKEMcOo5QIOXvACr0TDL/hIDPsJRGsdINM9poDnUpsImQkgZqkdVYm5yth9XaQOmm8Hzc99TjNBQCUHkhxuAvsyJMrEVQgSAFHShIn7Ee8YI+6gf1vzN6nH9ky7Csz9XQ31F3835WPmBUYh+5FdyUh0mbIt4+21SasQvbqcJsAtljxOomQiq0SDKYzGmq8X80ltSn0gpK1ac0gVXHOg6sPwOV3g8vipALqmMGBePDmaouEmUDlkNX0ICsghIq8CBB0ME5FBxKCtUOiNFVHf1eqYk9u3WR3NcM6TsmcYA7u+Zzd6QmHmhBiYdSeCkBuFKSfUWUPXOFQ7p2cFrgM03g94H0kiLQ9kGidASIwiiui0o2kszrV0NrPSgdCv2W160VCLkayTuMgaSFrTqEaYEikS/I6Ue2JSaGUZx4RyomUx62ZEJ3T2fhhQRuOaZmLfHSKtxghYaazDGUP0I8EP09l7vn7Gdn3G6a0ZGEvbmNJZKjPrumje9SJ4mLQtIwhyN7iqFRgjKkZ0zhiVcWTIkexHxhgIWhFcnDQBKs1U50y20gFUOpOVxmpFImGNYjnvODk5gvc/YrO55ni54GS1wmQRy9TaiiCrlW5XSonr6zXb9Zq+7wswouj7gd0wMgw9vveMCkJjaEYBTppmBkS2uw3h+pp77YrZ6phVN2c+W7DrPcoEcj8Ss1DXc5aeslOqOANVJp38K/F3L9Z8KOL9k1gwn2VeFPCqjFhHJfpwGWjaFte16Kbh/PpKQA4/oo2h7WQtSTckMYZiEV1GyyBLAq/r/aWIVM0MpMNnxIEqZSbR5brR1QJBOtw9V1dXbDabib1VP3e9101hm6SYJtH6w3G9VIGseuwHnYGUEqawFaRTAiHs9eqkg9IUx68w/cwYsRwfvceWEbyQIqEUa8oYtGmwrqVpZ7SdGEMY15KUKs45eQK7KCBdRu5xpi5eOkA+quxnoo4bygHKOqt9JdGS2zcjKpuxjk9UEFTiW2VRZHJhueRCXU8xS1JQAmoV6G2SxQeL95owJrSSQO0aMcuRrqQnDHsbS5UzwQughwqQHDNjaLPi0YefcO03XKwvZczWOXyOdLctX/7im3z06DE7syWPmcuzLTo23F6eohfH+JQ5u9yisowKO2snCngqVWeIGdfOmC+PMI3lNEYWJ7fY+Ii66vn0zKPNCV96+x7379/l7S9/kbfeeoNbJycoIlpJbDDFnIWMCPvqTtaPyqJ2mhO7fsuTRx/x/t9+nziOvPTyPXJrUdFjm5aT4xWuJHTDOIj4csh8RmqDffB+0QDi8x4x+J/4uwoGCQhcwvwh+HQAHB2+94175uD7w3uQ0oW80YCryQvSUZWlqCBSDEs0aAul2M9xr6dlyphCBVWE7W72wM0BwIXkJwXwg7HfsVmvWV9fEUZP5xy3jo4Yhl7AOQRWM1oMIVCQipiK4WaCZKZ7rrCTilBv7cbvv2R95el3N8dDP3O+XjiXKYmjcbnR5FrkPXv/8LnT6T24RlopEXJ+IXn6DPhWgawXwVaFNABJVLMK2U5kv6G0EZTW5KDQxdVLFSe36vZX2RdKRbzXaO1rengjUT6EF6vWndLSzHCFXaZgapDUhLeyuSpwp8uemVVpaqWarckEhRzHCxqG0wfYj3oAZLW/flkpUskPqxuxroZBORWdYAHJbNkbGycaeH0vjaQUd1jr6GYzaV6Upk3MATF9kHsmVYfB0uSqjpK1a79cLIi3RnLKHB+tmHWtHEcZV81JGpQxCWCXcyt+USWWFKIEdRzWOitubkqDkvM+NCPOigZuKl8UeYMsIijitGhsGZ3K2KK7hmIau5a1KOu+aRpu3bpFznB2fkHf94VRd0VMcxaLOU0jRaLoG0oxeui4GMMeZPWhsisyi8WcxXKxzz+iL8ZpBl9ysrZtysiwRMqqZ2VK4esaS4gRHwXYX8znOGtYX10y9MK0VYW5CjKxUWPsMEqTfGJgIcB2P4y4bMghEnzG2BZjCpskG46P7vIbv/Gr/Ce//c+4c+c+f/aHf8n/8j//b/T9hlvLOSoNpH5gM2xQ1tJfe9a7NXPboHLCj4NoM+fEqBJnF8/ZjSOMiWG7Yd1eYdqGhCJYQ+9H0npgZhztXYtbLji/vOTsg48Jz69JszlZKdy8ow893g+sry9pu44vPHyT4dk1jz75BDubk22kH7c4ZWmtQduOaOeczG/zq1/7NX77H/wWpyev8Zd/+l2+9cd/xdOPP2G4fIbaDsxZ4Vzi9h2NmiV2+Zon23NWL73Ew7e/zp+/a/nnv/d7bJ6/x+76KV3b0nUzjE5YE2SKKAQePf6Ql5+ccuvWq9w6OWG3e0RUiVk3x1iFQeO3Pevra4btDpUQxpW1uML+so3sC2P0094n+a8Y62WrMcqxWB1x7+X73L5zj5jh7Plznn76iKur88LmlPWmsgXraJymOz4G36OTaDuJmU1PjBBIKC0yC1HJ+CmAH8WNMTUZNSuhWSlClImj5WrJYnkEyvD06VOGfkSZHa5pWa1WHBlN1pKb9P1e8O8fAAAgAElEQVQoLqxZcgmVM9GIHM8Yo+jlGSXgsgbtBMwXzccM2SBi+6KvnsukkuyWitY1tI2wT3PO+LFnHHpxNA4jxhQn5pnok/uEyLykNDke6yZiS9w+rEKU0rTNjJS86NIe5CK6yO+4SXfWlWcVBq41hbFlUOqAIVRqDaD8LJXYUgG80syuIGCV5MiSyFTfvRItqaYPwprNZaQ5TzUHpH3DA25ODZS/l1hT8sfyhwotAF3KaF2c0ss1EFBKxkSVSmUqTmGNLs7aCm0FJfDBknIjwCuxjKMWDd6mY7G8zTiM9Lsg5igpEsPI6LekcUfor8lRJF5SSgQfGUYvQHSJlzGLvmHdU00GjAFrpWnkZRJLIziLj0Xn0VYBE6a8reZuqjQYUzGJrNMNsoj3kNBELFFFBqJenJID7BuiVc5MT+CZNB/3TVbJ0SqTrlzT8nJT6nMAjr2YS01TAge6dIe/P8zzDh8v5teHNWya7ogKzNUv9cJz9PTZqq4vpBJntPgEIPdslb0QsO5m7qWVRuUqw1Z1jfVUE0qmLGs/oyR5+sn43E8H0B2K40tSVz9gunHDHp4go4vWSylgKqWyFrcAzrppM7fO0XTi5LMZevxlYjYTQXVtO5arOQtVUU5hdVktwJ4tDAZTusyzrkW1C/rcMSYlbIothNTL/HHZlCpo4VpD45riHidjvI2KpP4ClQvK2xhytihjoGnkNbqObj5jNu+kizB64jigizPn5TiirIEh0DWOc6XYrjfEGBj8QDdrydlgXUKVSx1TwGIxKtO1jsZEot+wud4XiuebpwzDBW7oUG1H0yyZdxa0JWYYfCAM0GSFjpq6V0WVyMairAgdJu2lWxIDjZZr5pRBWQXRTl0KXYplq4UCqoseSy6LvS7nmAKKTNtoVsuO07u3uLjuGfotQ9MxDDuGYSCnkXF9zcV6zdGdE1ZHx2Qttu39ds1QGIUoMaMYvSfWm805zNxhAdvMpHDJibzt2a23bM+vMUctpye3GH3gerNlHKTbLh2KYu2dhH2gi6OLuN3JOYqpbhR1Tn0fdA4Xe4aJSq9UnEZDtVagNdoVIKB0pm3bYpsGtGEInifPnqKNZracM2tbmrYllO5TToFx6AvrL5Zul3zOqoUnm6GMtqYIUYFOoleXc8ZY6aLEJBpzpnTn1+tr+t2Oq+trOc/koqFRnS2jaCEoM4mLgyQs6H1hWplwijJqVdgQh4LmE5vhYAOW/bFu7KrsDQcdlFyZMQUsDVlGotT/S9l7NkmWpfd9v+Ouy6yq9j3Ts+N2Zmd34QjHBShBEQwBIhWK0CfQJ9Q7MSgxQEoAAwyQcJIWEESsVoN146dNuXT33uP04jnnZnbP7MYqIyamu7oqK68753n+z99otHFY22KaFtcOuK7HNo0Ak1lRRYHKgCrFfy67w8LKqbtFvYhf86oS1YUxWBZXuQ9EpiAFxulCXw9LmsPK5jBFUqZSApUKMKUW9kHbtsvEMoTIPHmiKVPdtsO1bTHNhpwDfjymuOaYFjaFsT06gfKJ7YsrvN+wTwfm5LGdw7qWYT1w77VHPHnzG/Rri7aw3W/Y7A4Er5h9YCyyjxQSfetom1ZAHWXwERIG0w48/sYF/bBCGcPq/IwPfuO3APjxZ08JH19y7+F3ePud9/jme2/y5tuPePRozao1aD0zTyPO9WjVAA0qa1JCQCmALOuRRrzP5v2WMO4xOeGcprOKzXaLzoEwJT78wT+wnwIf/+ynkCLWKFJ6BeDitOA4ghS/6GV/kYmrqmmkxw24PhPaCCvq6/bG5e+v3H5L2ZRZmmrpdxQVaTyCeSzBCgKii2yaJLL+6AtbqbI9S3lRE1O1KuB79aM5aRqql6LWijDP3NzcsN1sxOtKadLs2V5fC/N7mkRqU45ZFYBOKbX4350ec9sIazZTJBecrANaLXiaPFsvLbcvAWuVafXq4HDx7yrX9+hkWX4+nRSChf20nNcKiqCKPItl7T69hrzy91fXueX91HEs+XIpV45Za9mHy/WsbOUjeCjdRv1osq4WMGtZY80ytMllnZbzpEgpEFJcgNMq5a2gTz1msQA5gm7CrJRpciiFtn7luTnam9SO4HhcqQDBwnauPi1Ua2CxjigstowjRo9PnspGxIvUxZoG00rgT00ZnsYRAOssbdPKPazFt1caNjmZOQkDWxhdcfHha1zDxdkZXWnm+0586kJpYJy11ET0Cug1TVOSicWyQBLblpYRZx1NN6DLcCwD0yRJq5LEJw0hxTtSq9O1oDYMr95jAsPGGJZnNKWjf1JWmsvLK66urpnnCbWX56lew5TSIp+OMRIQ0/N58vhZfJFsAfFAoY3FOocPkZACq2FgtV5z2B+4vpoYD2NhGcYFZBDWnpNkXufElzAGsoJ26GnaRpJ3Q6RrWlBivq60QTtF8IFqUiQJ0eHkHGiSUhIKljVGJdAJa8qwKMwobbl//3X+yz/4Q957/9f5q7/+e/7Nv/nf+Pizj3n84D5+N3L54inTfiveqSrjgyfkA3O5nioDM4QUAPH40tqSUBzGA7vtjrjZM4+ebDVjiozzyN3X7/PkyRP03vPZj36MHSMX/Yrb3Q7lDM0woA+eGCdury7Z3t7wzpNvcvXkGZ89fYpresawJxlNMErkoU3Lw4ev85u/8c/4o//iD3nt0R3+3b/+n/i3f/pv2ZlAYiYZj+odOa5474M3efubDT979iNefPoFzXrFH/23/5z/7l/+D/zZH/+AH//t/8Htsy+wOqJsT5oAMxF1xicBWhvjeH51jXF3uP/wHl9efsTmdoPVK2y0BJ+IeMbDiPceg16GP2H2oBRnF+eS+HmYOUwTWYEPgYOf8CS0dQz9GfcfPuL83n28D7x48YwvvviE3fZKgs+CRyuDsx2tk/UlpsRsJUiPIH0HMTHPiTlCUBmsAFbKKfqhp3Et895LDRUCMYtnuGsbpmni5uaGth24f/8Bjx8/FrD7+pLt7S2ucVzcOefs4hztZA087EZSLFYXBeNIPmBRstcGWQ/arsc4g3GWmCUBVZhNqgwd5XmP2VO9zEDRtuKzLhZDXoJTxlFsPpSmG1oaJwQZozSqaQhkxsnjfUS3ZcdSegECjvsYNM5B1iRX+rnSw8hWIZWs1CvVQwyRJyJ7RtKyL6UEKUlNVZ2scpWlL3to3T0Rn3WdQTtRvNR9LYGKMgwXnydFjQqvgxtyLoxuReNKf1EXhgrm1XWiDPiEHFD3viKzzAmdLUlJAm7O8owL26uuu8ch7UKECGCwIgfOWYZLCULO4i/nRK3UOYVzmqbvWa0rBz8JIBdGchxhFiuqGEXRME6eafKEIPvt4XDAJ/F/9cGTogSRhBAJeSIlwzwHop+pPEfxLhPAsWnEykhUSoaYQ+lBdfFwi5A0WUnNUMGhWqPl9ApIVs7hwk6PSYZKOS8+cpU1l1IElV4iYdRhr9g21B7QkJQmEl7q8VIqCqPySikVYPyk51K1/q3XmJN64+vbt6ryVIpFrflS7Qgsfs9SoC4y2NNzEVMWpZMSgFLnMqpMwoKTmWrFxcoHy8djWfzoy8JRYUGFkq+UGvEXtSD/v0Ii6gfPuRqhV0PaIk46SdvIxT8LIyaZ8nAW9L58zzCs5P/rFXfv3+POnTu4tsPHgA+JL55vpRAnlUaXgvYrkXuoCCV1RhdfFK0NxgWUnbjdHxhDYBon5nkixywAXt8XOr00I1FrgtEcijzHaE1DZK32dCrROMfQ9zjb4NoeMwyYpiEVqaIrZretkmM1KRNJPCQxJ890e8DojD/smKY9YfYSM65kym+MNFgxBaZpT1Zl8nkI3Ls3cO9OTzbHRmR9d+Dx6ozzleXOece9izVD15JzZn+YubndMfsJvxtps4VGEU3Ga8OcJC0yGZk0RwJKBVogakVnBBTNyZJnYSJpSkx0TXMswIc1Sq4F4hNI8fyyVtP3LXcuztls91xfX5FGT5oi42HCz5H9NOL6Hp+iTLCsJs6eeZbNKcWIT57go3jCWkuwlqA8cwz4EGhDIBtFYxtUC/vZ8+LykrW7z+P797i83bA57JmTxyehlRslTVHIsYwIEiSFUhFbGGNydEoAKiR56iUD9+Wl0UX+LaaQvoQolAVGy4ZpEE8403UkYLs/cLvdEnOiaVqGsxV9I0ERUqzIJGYuhXRGFgqR5VF08WqZeKQs8K5MF4vOnigFeCv6/lhMwIWCHYQh6Sd0AYlqkRCzsAhkw1cLuAVljURYe3AE4LQSFqQv0lbgOK0yLwN08kbS3MRUDUCLbwRH/x0B0BMxZGISoENrWxqJBmsbmq7HNa14z2nxM5KNIaFSWTqTLMb1pZY1rIBrpamrhvpLu6TqlJEFLFGpNPjp6MVHbSopv6yukykv9Gqla2KjKdO86sXA4jkFwmyyShrXyl601mIaiy4+FYpMjie+aDEVj0VJimyt4/75BQ/WLbYNDHjcsObuw/uc3b+H7lumnLh+9pw8B3QWhvT5as1u6xl3ezo38PjhY8bxIACssyVBF5xSGNOCsyRrsKsLtDXobqA5v0/KmZVuefDwDb71wfd448k7PHx0zmoAY0Z0nnA50bQacoQcZP9IVqLpl03zxJAe6NqGJ48eYdPM9dULtjdXPHv2jHH2jJPnZnPgervn6vJSfGacw6fEMWShXMoToOGle/LnvIzSP/fflsK0FCTLM4EqRWmpI+ptf3L/kY/eGPUTnMosKzCzvMECGB9fGo3OUjzpkhQuUztN3w2oHJjH/SJHpIDhVfpWPSBrsy0F8tFndjzsOez27DYbwjwLGy4ndpsNn33ymRx/zPR9w9D14qGlNRnxknVGACEBd4T1ee/eXWqp8qpR8CtnV9iaaUHs5By9Umydfq1ek69MWmtDUkCkDMWTNZekzNPfWr4jF6Pwk/et/z8dVn4do65OpWvTVH7wCEYeLYIEnEung58KvCkZMJQFaTmuUixW435h/7M0Z/IZ5H2UojBlqiS1SmvzAljVQn4Bx3IuibNG6sckhXiVPtXjrI3Z4hG0+AOVQxF35KVehJNBVzmmau6cckRnTSrJfsUcWDw3nStrvxFm7DgxzTPDMIhUs+8lcU/7hQ1Y68uoIiEHGS4ksXfo2o7cCGtF5QLIaY1WgdykhW2YUqJ1DWdnZ6xWK3RpXKqtSo6JXGpPqcuPxtj55F6p95E0jYmk8mI0nVIsUiM5HzEFKeYLy6ECc/XZn+eJnOX5GoaBEMRvCAWzn9huty/tdRSgZJwnghImxjxKErwu95GzDbaxNK5ZpLvGGYazNV3XcX11xTyPTNOBFAO7rdTkElrRMfQdMWXZ57U8U8Ya+qFDK8Vuu4GUC0Cn8Ig9QygSWlMN8WvoSJFhp+hJyuCsQ0cB6iBimFE4kbY+uMtv/+7v8d63foWPPrnif/njP+Ojzz/n3usP+PCnH3J9+4zb8YqYPL125DkWFYxiNgIAOWc56wZWTcfKNCjbMuXMlBOzUoQMY5F4eiJT9nT37/Lk1z7gjbfeYv/JU/ynzxnalnTWcshR5JeHfekpM9vbWz776Ue8++R91g8eENuGGAKjD+AsU870bcOjR6/zu9/7A/7wn//3aG/4V//6X/Hnf/Y/8vzqC4YHb7CbtkyNIijNsFrz9j/5Lg/OJj56+hmkM+7feZ2L1YrrT/4z1x/+e1aHn3DmPDE7wiGxmXY07YhaxbIIaZRXPL28IqmOB4/vcvF84Gp7Sdbg2paQpd4Y55lx9lgQ8AhJ69ZG47oO2zqSUswxMoUZHyOBhGkdrm0Z1mv61VqA5asbnj9/wW6/Y7vdYC3kFPBhxiqD61doNIfZswsjkLDaSnKv98wJIlosabQGq7FNw2o1MPRrRjOxY0dIXqyDWicgRtey3ez49LNPCTHx4MEDLi7usB8nDuPI5uaW560jpij3rzU4o8EoGfiF0uCXgcA8z7KrqQpwW5JK6Fwb8qLQWdxaihd0CdJwzi2M13mamcaR6VDsZrQwE40xtF1L37ZoNEFpQhagjBRL3ScKqQqgFL42WgnA4rTGalNAx7iAL3X/MsVCBcraVfYAdAHJtaUmgsvSdtzAVPGYPcoRKay7oqUroQ6ZIgkuHYrU2xUkKgyyWAPXivVLFpm1IYvvcdl7ytYCShFKuN2y6uWigqmuZUnJEDf6AtKVdTgFYtTFiqeoczISDpDFoqGSIXJWxHLefJQwyFRkuvOc0NphjBP/MhTohLENCos7E2k0WUnQXxCZayoep/PsxdInzgTv8X5afPJCzviQmGdPmCdRrqVILN6mFPsKa83Sw0XCSzWIJN1GclQkVQZFGdBylr5CoCjs+BCjDPe8YC3WFnZjLuBVKCFW+BLg1CD2RMffa40pISSlXqEmx5dKp8pjOdYVtW4ypniJF8xLlT1W9tlSP6uXa+bTvfh4f1Yg8Pg9ipPvyeIbq3L9/SV5tZI7EqK8Wn5WLaqPCjrX48jp+EzU75VXfbgohecRXfzF3ccvCdAtzBdY6PhfVxS/WhzrinjKm0C5IbpiVjsMA65tuH//Po+fvM69+w9Q1nIYJ8Z5xvYrSY0jY8goQjH1TeJ5pTIhS1RzTJFUUk5SmKSIzIkmefFp6zusFYNzrS3GUlB7udApzeUiZhrd0rnEw4uO3kLbdnRdT+MGTNNC24O17AotFZVQyovQPosHV0gJrxL7/Z799pb9fsM0CUAXfcCpSAhj2WQQv7Y5EEPGlyY9zRN5vEevLc3Zejmv33jzXYaVY905jM7M08TV8x3bzY7d9sB+PxP3M3rnGZqJbrXG9gNaKxpjcdairGJOsUgvMgZh9/gcCIV3GbVDK1ssZUTKXB9wIYlpmlrg5iT+dWSUs/RdyzB0dF3D5eaK2zmwbleshp7JeJKC/TjirxPjOKEbS/YB72eRZilNtxpoGkOrHbZtmRVs0p4dEyF4+iRSVasVOsImzPhbj25a+tWAK6CtgFhF8rQARqVILD8vbE+9GJdrY46Ncz6ms7zMmDq510ujUxu4mCVhV5Uph207YobD7sBmu2V/OHBxcRdbkprmMJFyJvhJKPvFrNuWTUIkQbUxLNO4lMpkqz7oMgmw1qG1AwLDMLBer5nnmevra7bb7UuG+fU9l2QiSvKb0iSvFrZDLgvicZES8E0bA1m/ZFRtjEE7MbHVhY1xGijxEp2ZI9An7JDj5MXPZSNLMlG31tE0PW3bYduOtuvQrkVpVaywRR6rSOQIRpdp4SsAzatN/kvyvpPrWqdN+XQhTkc/P6PtMo0RYOTI6rHa0DgnLDBdWcR1raw+fHLufZAQBj/5xY8vl+usbSM+GUVGpDTUiG5A/CptU9LCDKu+57233uHX331AamYm7enPz7l48ADV9tyMMz/74hk/+ehjvnix5+rWoOxrnD8A3J7r6w1er9D9HXIQM/0YpRAhQ2+UpIYay8F7/M2OYb0mNoqnN1te/8abfO93fo933/2AOxevMbQNzmTIB4izyC10KYYKw0blTI5evPZQJfErAmYJ4VDGcOf8jDzfZXdzyWdPv+T2+pL9YWJzmNgdZvZjFBawNgvjRPNVlly9B75Omv7q6wi+fvW1AB5fBxiV+73G1VNAi1oA55xLsEOdur0MFB2nfRUyThx30+N3Ll5lSX5XSpnWdVzcOceqxOXlUza3t9JokRegHu8Lp+sIVmt9TIbMKbG93XLY74Rxk04+X5ZUYpEhK5x1OGOFLV6YXJ1raJwY7GtjqADd+ZmkuGpzktBGrb6Pz5KkSmZ8LB5uJ8/o6XP76rN7+lpkMifgZC3ayJysQWXqz8sAfmXc1QN/VVr7UlOijk3K8WvHq1WnustJLGtpXRvrCr6AsrlIhkpDkjPL+lvlrbnIhmLMZZpdC79MIBXpvF6O9VhWvnKM9V5brgfoUihrdSyka/Gbs3jMgEirMwUsrrLZslegZWiUcrEVqQVqlnUFAtqwrHf1fNS9L6uAMpa23l9aLb6p9Z6t6a5aqSKZTDgjwTpRReY0C2CXobEizQveE4oHo7NSCFZjcmfdAp6l4s8Xgj9Kqcr5qSxQEOZFmkZSFuadMgbvZ1IWhtiSglyY7PUeeOn+TeV9UqQyt2WKn5Z7JRV2fMrSBDeN4+LiAmMN1zdX7HZbIIs03UjgRs7CsCbBPM2EKYjpfEmnc02DK/uHQtMPArbs9nu++PILLp8/I8wea534AoY6hGsWZY3SGV+GaSFFKIycEALzOAloWoZOqgISxSdvtVot9433Xpjn1gqztUiFVBApXraZxmZc50Dd4f33f4X/5l/8Sx4+fsKf/Mlf8uOPn9IOHS/211zurtiOtxgLDQ6rNPMcyGEiG0tWMuxarXruXFwsyoN5Fv+5mDKmbdjOEzEJC0zHzIOzu7z9/jf59rvfIo8Tz54+FWbnfgep4c6dO9zudlxdXdEPDdY4Ugw8//xLLl9c0p2fc/HwPvsvRkKMMiBwmtfeeYfv/fYf8E++8zs4Gv7m//w7/t3/+qccNs/oO0WTRzqjuE0Os3Z893tv8vjtNV/+4DMuv7ghhZlvvPMW7//K7zDfbHn+8ceMVy8Yip/sOO0IeZZn0yo8AtAeNjPZWnRrufP4He4/fo0X11v8pGl7hzaIl3XOBQwxZR+SuqaCeArQTUPWit00kbWiXw+c372gbaV38imy2W7Z3t6yPxwWxpQ1FH/BmZRa8Rkv9aUyHa5zDLZDhcT+ZiNejzlgnCZpsVJpWmESNdYRXcJoR4iS9DyHkgTd98xT4HAYef78uexH53c4X69BJfaHA9eXV3jvOTtfLWtLTTYXEuwRfK97pjEO0zTYxoEWL3DIZBXJWotqi0Tw8uyEGKQP7kQ5EULgcDiIEqKsbU3xaG4K8LgeVuSQOMSROURccoAl62PIhDrdQJSw6oL3wkYuDCel1Mvy+cKuVkUZI4r4Mjw0RphwVgL7JGRK1pfquSVMbdmL6hZcMaBaB0Es1hKJrGUtNigBucgSZDHNhN5jrJMBkXYF9DliCrkALXUIecpYr9ur7FEyatdZF1/QUjstvqhBUoEPHnXjUS6hrfTcTlucasrwLImXAZLwGYtyMHsvDEo0PmZMVmSjinixKmsApfEpSVCItqjij+86ykBVhmcClpaaIQmwnaOw330UKfM8Hkgh4KeR/XgQsoU+1rIxnhAFlv0FyKHsK6V/V0fVTwVQ674Wcyr7UPxKTXD8j5e+7oP4ir7aGy8MPsA6Ryp1vTG57CXlflJxCa/6+letWI6/91VbkZfukVfq8MWharlPas9BCbMpPWL1hT35jRqRH6uoFpCu1i/IVSZnGZLVITelDjLGYrPs6bnWtpTbo5ZCma+ct1dfvxRAV8G5nIsJ5SluqdSSLHI8SaXoIKNSaSuyTDbbxrEaBvpezAEymf1+5OmzF2z2hyVaOWmwjcNaRd81nA09fWuwJKxOWKcZ93tJekrFF04ZAgofImn26DCTSsJK0xTTyRiIYU+eReLSty1Dv8LaQaQ6rWPVr+lXmrsXCqUCWonxorEdaMuMJmQgm8IUlAsQpontbsvu+obdfuJqHNkfJvx+JBwmdvs9k5+IPpB1Zj/uMM6WFL1MViVOW2k0DTllDrcjty+2XNhhuR7+0PD0escn857oI+PthudPX3BzuSXNCaM7jIPVhcKFPX2caMYdnW1pTUvX9rSNozEywda2QRthIyYlE9toFEqEpMgCJUVmDh5LlgACL7HFVllyivjiXRczzLNnGDru3rlgf31LGGdMv178jzSFSZkVrWu5uHOOMZLINR3Eq69tOzJgVNkgJ898mLn1s0xEfKSLmQ5N0hBNImvxTNm/uKWzMuVt23ZJNoTKslIlXS7jE5LgRibbYmh9AnSreo+/8lwoVYrqKICes7KZhxiJcQalcQj7KQG32w2XVzekrJak1jp5H30QJk4U+XbK4t+EFY8bCWCQiaw2emHtYWQTETP0RMySQm+1mFGLz09aGptanNfntTYJUD3gapMuT7h5aeGUBrMmC+riH1gn78vkxFpMTbbLNbUvLP5Hp43x180QloFAOi6q2lratqftV3TDCtcNAqKWKWGUkdrL040CbtTN5FTS+NWN52sa/ByPYEo++meIR9bR7F5rScvKWc5h37R0XcPd83MxGk3S5M3RE6MHaqqjNNYiPZrxk5f/B5F7KWNxTYtrJNREGQ0qvQIaybFqBQZN1/Y8fu113nz7TZqHa26TB9tjuzM2c2KcJ/T6AeevvcHe3dA+SChrSTlzs73l5uYacqJZdZzdf8R6aHDGsNlsuLm+kSITKSrd0BNRmNU5T975Ju+89x7f+s53efvttyVcQGt0SqQ0oZkxOZF8YJxm0hxpzYBbNZBlYmh6h1KZmCaMteRkOGx2XL14zub2mnG3Ybu55OkXn3NzdYWfZOooNP5UIu2lEBSpNi/fX3UUV/apXwagS7/g3+udpsuUWBWAXBWdpoIFXMnLn+sXpGAsWuqyrx7fdwFDjpf4CNAtw7AKEomfnIA9mWHoefz4NTSew37DdrMRoKUUBBWgz6pOuIs/ixLj/HGamMaRcb8jegHndCnmJMBImgc/z2IGHQLjOKKtkXupPufJLN4miKIQP4/C0kumTFmlQTCFAaAqAJQV4xyZfSKcGAufFmYhhAXQ/sV+gsfhYYbirSa+ZEtzczpM5Oi3s9w6Fbw6KaYqUP9qcbl8T+0Y6nurE6CuXNuUChyzNBxpqZ9UvRFOUL18vF3KIOaYSvZqDZai2CUcj6H+VhbW7mlhe5qwRqElLE2WKkzIXM+FvFc1Vc7leKvNQS38UQIwGsQyQVfAMNXzJ01TtaEUaYhI6HJQJKNo2x5rDR0tab1CH7Qkn+92VDZXTXGNIchzmIsna/EsI0r9mUrQj1aqgEClTq3G5M4UU2+5rw+7HZS9zRpJtBbmQRkm1DWkrOcZhUo1gKbeL69UDst1SGhdBmzFJzWnJAMYldh1jqEAACAASURBVMlZL8+rVoXRGSMpF7Z32W/awmYTcLh41Wphw6YE0yTP6SKb63oqo2MJrtASNnIYD0ybG243NxwOB4wSsNIWCwysKSCt7GmuaVAaovdlmJYw1pBzWoBUtxiQK4L3TONISklSXoe+rAsi8TIlVKptWkiKaTOSY6J1rqTmJYauoV/d44MPvsu3vv0en3z+JX//Dz/m7O4jgr/kZz/5mNxqXHCs2pbk56IwMFC8hAxi67FqOrTSXN9elec+4OeJHBNN27ObPT4GVm3LWjtev/+A9x+8zmpM/P3//UM+/vJT7NDw/PISJsXrQ8/5akXygZCmMrxWjJstn37yCU++9QGvv/UmT58/pWlb+mHNN959k//qv/5DfuOD32Tz6Z4//vP/mb/7+w+5uh7Jc0ec9/RN5G5/xjYPrB463nzPcXn1Q37yD/+I8pmLe5qzByvU8Bqf/fSnjPGM1fo1rucRZz3e7YlhxoeWuAsYP2Mbh2s7dvMt3aR5etlztn7EvXt7Pv/8E8awpTMr8T3LoEtysNUSsJaAqfg2phDYTeITjja0fUu/XnF2ccHQrwhBC1sIFkVTioGmaSELWOuKdDylQETsEtzQ0a9WDK7D70fS5kBQCmtbbGfZ7G+xrmHoC6BmG4KNNNYQswGThdGlJR266Xsm75nmmdvbDU3TMQw9rrPo62u2u01hjI/YxnF+fkbbdTjjyLEMTeClveh0IKutwmYJP7A2oZOBmAkpkDmI2iVlnLV0bSeWJvPMPM+ylxTlSVsH0NaSUhQPTh8JURLurSt7+RExKYOTk0FwUXi8VPPWsrq2raXOqMMrlCqe3LInoTVKWSQow5CVkCeog8PMkT2dXw5zyjmLwkMpcllDM+LvB7JW+FlCBGIfljVb6gFdesUyTFuS0KvvndhNmVpraZY9Z1ltk6jTjNayttuj2in4mckn9lMmG48yCmcsvWtpTItVThQdJW0164iPE4mI8RbrZKBsbUdIscghS6KvpoT3ZZKPJGUwWuTDuXyf0XJOg6p4CULUcRZnzbF0MEiN671YC0wj2/2Owzjic0Q18kzWGshay3q9xl9MYh0xbonRlkDOQCQtvWJlfKMKWFSkxrEAjFIXHq+lSFc51glGQ5A6bJ5njLHLvZayXF8NGBMLriPEH62OXsM1uVhr9VKxXAfHpoQZCej5VSBrKdFOhqGyHReFgDWlJ6jfeEylUBw908mx1Iby4VWmeI6/sn3XujzJDq4LKSvnEoC4fLBSV1HeSAr4goWdfnbNV+qDk9cvBdCFUoCUMnApOqp8bWl4SsEjCot6hMdi1Vih67Z9i+s7IDP5mavb5zy7fiYli8pY0+BaS9M1WKNYDS37sxVnXUNrFb2T6aF1jpQlSl4piyqTVBTMJMakGGPGTxMuTAwFlGoahdWWw3ZLahSKDmta8bFKVhgjXnPrFXPOZO/JBKyWCVKKIntcn61pO4meJ0d2ac/19ppnzz/l8mrDLjnmoCBGVEgErUha48moMLM77GmTQ9GVRj6QUiChCVrkk/t55Ormmuzccj2++Ogzbjc33NxeiTfLHPGHiRQynWkZuh63dnh3YMwTm+0e4h6VwGJZtyuGbuDuvQu0UbhGYxwoZ8lGFzN9hckWlUSK6OeZMI54f8CQsVZBTigtXlnBBza7Qwk2SBx8JMcgpuA54v1Iyl402TnIVCNFpv2OnBPGKoauR6NYdT22bRgnQad9CHgvU6bdfs8UhWUXyYScCWSsNqiuxTUtyTgOwdP2Lau2Y+56dJAJVo6xgEzqeD8jcqKkQEUE8FICfmmjUTiIwryokw4QhlYMkZw0WlussWSliIgUBW3BWEJWjLsD1zcyOVyvz7l7544whcjFGFmVmHKZFivrxMS5Ns/FG0ep2lAroW+rjNGVYpsJwcszkBTDqmUcRzabDbvdjsPhQE1jq55EKVXGh1rYgzHJPWVKE6JN3egrxqGWhNicSxpzYXXViZxQw1NphjM5hWWhkkZjPiF8yHmtPkcxSehBVkKIV8Zgmxbb9rT9QNuvMG0nlO3S4KVi1F+ZEApBJHKqLBhZHU8b8eqH9xWQpjTSophKy89J8QFKlWWzUM2VEtlgTBGnDX3Xc3624uG9O1ijyDFyGEfG6cDsJ2bvZbobI/PsmecJP4svhU/iqZgQtmtvG9bG0ikxsZ2Tl2Tj+lFzJkWPVVAcnJn6gctuzeQdz6dE1NAE8LSE7pzufMUbbxhewxCVLsyriE8TIUiioUkzKxW5e9bTNw1Xl1f844cf8uGHH/L88opgFclYXnvjG3z313+VX/v1X+ftt99hvV4XsApU8oRxhx+3ECZ0nGGeSLNHhURsJlaIx2BIiYaOrCIhzWhnmfcjT798zk9++hGfffEFh3EsHn0jfsrkKB5WWkMMM/v9AT/PwuxtHHP0VB7scep6BFOUEvlK3dBfGp/V7zEnG+gyjTu5V4qRci1kql9GUrVgzMJuKz+03Hvl2uUC1iTqHltsZJUqU71SsHwFtKkebQsMLfKHmLDOMgwD07ghxECIQTwxiwm90iIhqJ4kpjQYOcm0drfZMO53hBCWf9fGytqdErP3XN/coIqkdho9IWSUNWgn0sgYI6OpCVYKzuWZubzelCbElGJcUmWttQvIUh4twkkS3M8DU0/BsVcZsAsYW8CB+j055QJcFS8alZfGa3mvk/vg9JZYPkN++d8XZrHoTcssQ9av+rtfgtqULmBMAYhqTZWl0ckUO4Mkd8dLhW1lCJzITOMJQ3n5nFmO7xTMrZ+/JgrW9biet8ourwBmLoFJWr3yDJS/1ONd9hP11dTzKo/Ny/U47gMqlWuT5TlJSZGCHHNkJhlKmEKDdZq1HbDOcLvZcDgI63ytxEvOlEFDCJHkZxkCOlnHBcTPAjrGhK6ypZgWM3SMwTY9w+ocZRuatiMWBkMGsrXEINYY1W4iJZGXW+cwKLFVKgxRChi5sGRzbSBKI4L4jopBhbAMNbqsF0l8g2NkmmestqWxVsteXM+v1pa+W6FQ7A97pmkkh1zKD/G9zTmjfBSQVClM07LZbLg97Ila0eaOEAOH8YAymrOzO7z26AnZR25vbhiniRgV1ji0Kw1VeX4keViaMauhMRpLLmnuCW0aYTimIB6n84Q1VlJftTDJtdZoK4ycthXfLT8G9nEnw5YpEfOEMoFgdnSrM9Z3B7SL/Ke/+Ct+/JMfcv/eHV68eEqKMynOuMaCyswpioex0Zi2QSMMB/EHovjx3tK4hr5rCHJisUbRtbqEugkI7FqHMYrPP/uMD3/8j4wE7gz3SUbhZ89uv+Xi/Jx7d8+5vHyOWCkbZh/44svPee29d3ntyUN+9GHHMDzgu999n9/9vX/Kd3/lVxkv9/xff/Pn/OV/+N95frnFNJrJIIES3QDNwEXT0ZjAR//PR2w/fca0zTJULwmJu89f8MPv/4DPnl2zixC1QbtMgwOdidHipxkfAi5GpsOEn/YQJ8IYef/93+D1x0847Dbs9nvmOTDPwka31tG0LdYYpv1ePKX3njOE4X673TD6meFszfrsXJ5H79E9GKopf0bpSEwBPwfp4YqvrTKA6ZiilgA7p+mahvVqoDMtYfIkrXHdwHoYaDvL9nBgvVrz8MFDHt6/T2MbxuHA0DRsDjtuDjtsKwnIKddnXewdxmnPOO1ZrTu6Rq69nxyH6cD1bitqrZTJ6wT9AJhS/ylRFhRWbPSeOE1gNY3VaOvIShFyxJoeYpEczgGVMkaJpNxay+FwKPWz1Ki2bSUUoutwrSPkyG43svF7yBljBJBxxpBCkvCALMMUVfetOsAzFlP31AJkxdMeXleLglcGSbJDkBF1TM5HH21UXo4751JXI4CSLZ7dR+/d02qnbJspk0JavKz9PEnNQMBocFaYe1J/y9qWc1UiSL9yuv8abQqQoso2qxf7BnIhMyhEpmnMsg+G2ZPmWXzjmUHJ8z7aFqcdFvHus67BtQ6M9MExeVEOWWFOrlbnMihRhaXYOJyGhKx9atnHJdBIJanbghKvOdtImEj0AkhJELhaQM+a+m6bRsDIrsMOHcMUCATQRgZ1ZeBmnWV951zAshiYpgMxeCFn+Il59kKkCZ4co7C84XiOF4C3BnzlYukjIS2VkKRMSUOPkUyRtqay5+dMngMjEaMUTSsWRIFAow1+9sx+wiiF1oP4a9tExkPKmFxsCbOosaj2XkogtVpjKwogfVJXiMS4svITURkSbpHXUoZ3SmnxFqw+OrkE+VG0KlkBkUab8hUj+Fap0+VhE3zAOOnHlVZFOUOpbyRboFqmSLV16oqsFoD5571+OYmrRhJpEOZIbcDr78m5ZiBK4ZfLhdVRC/ABmEbT9wPDekXTd2Ays58JePGlUBRfi56m6bBWsy4L56rvaa3GkjApo+dIDplDnogp4skka8kuEI3QTscMB7tiXHUkNzKnGa8yLnj0NItfWkjs5sTNZgbTEKPGe4XSDuUsozkQTfHB8gmdNYPtGFxLqzRvPLzP/Ttn3L3T49pMnPfgRxQzSXmsOyOZhuhnpnnPPmTmrPAZwhwwLorHRplsRO9JIZCsJRDxOXM43DBfA0d8jo6IW3ccbhPPbp6zvd3QNIY7qzW9g9YllGrIpmX0hhjFTy2S2c4RrxN55YhTYgrCqkpl4ZG0SIVOgUHLObdGoVQkhhEfDlK4dJIsSQZ8wvuI10YMXbWmc4Z+BfM4sxo6afobRdMYhlWLnjwhTMz7iWkf2BmFbw8SCBESyjgOowhpUhR/s+A9SWV6Y9DOMTQOpxWt0QJwKgHZtjozk2hjpM+GYBzZWuayyajStockUhrxWNBHrwVUAdr2ZG3IqSFmi1aGlOflOojUOmNsg2s6UJY5BWxjcMMapR0pKW62I7c3N8w+sF5fiMwrQ/KBmuyZoySaGSUMFWWkiRawkMKoKuy2nIsBbQmloPgcACol/DTigZhkIQw+lIma+Cb4kNAJGqtlUc2lqYuy8Kgsnmlh9suEHS0U/2NzIdN9HwIhHVNgQ86oLJMkRW30ErrsqClKIlH0HufsAo6kJIt+ziJp9CGBbosfowPbopoW3XYo10rYSWlqs6r08rrwRVQW0IkozYxSUKmRmiK1jRynSCcvXRpXmd7oxUhVqeJNUdk0pRjQCpwtTRDy51U3EKYDzkm8u2oMzvSE6Njv99xsZg6HA/vDiA9B1lQrBvopRrJSGGVps+ZcOQZl6PuO7bRnF44fWAHWJBpTwLq25aOY0NEwnD+hf/CA9dkF2jrxbVFWZMEFOJ59QOlM0whTdp5HtIJ15zjXik5B3zTklHjy7nt47Rh/9ENW6zX3Hj3kN3/7t/nWtz/gzvkF664n+Jnt9pazviPlmcPNJfvNNSYHnMowz6R5kvTobiRNG3a7vRRCm47NbsccApnEftxys9/y7OqGrZ9pzu4x9Cs2tzt2z5+SpgM6ziID8EV6HKOkWFqDs1IQkBC2SGF2CvgjG2ltXr2XKapI7o7n1zY1wONkI62MysKWW+wdqGBzKbZONutar+YCuAEC4hUwmDLBrNJXCX/PyySRfJR/17FCLlM/bXQBsTIxR2GXTyNXl5dM84i2UjwYZzFWE5ME8Kjy3BgldhHTPHG4uWHa7lBkYdQh0IFPIkUxSrzhbrd78V1tuwIoiZwkISyAyUd0OFoi1FJjt/eAX1jAtdiqvmdS5OTS2Nul8BRJuDnKKAuz6FXQ7nTKmlHEcl4XOY+uht2SIEqmeM68LLMQqaYY4it9DFeo/1YLWaO03BvpyICq94HA7LqOSov0szAM6lpUrgNQJrCSiF4HMVQJaBYJigSfvPxZ64DgJZlHzst9WVeKyo4+leiG4k23+MQUv9XjOZR1U2S1x6+r8hzo+rNa3reSQqH6pBYbAIrROKkUyCdFtSCCcpqykbU8S0Xp08xuyrjcCHDTtHR9Q0w9MSemeWazExC37we0c2JIHQUE0DFJemgZyKWyRghjXu6DHEvYS9Kszu7xhhuY/QTJo8ve5pwTkCwJkJZi8VJSBmsbnG3xORGJAtrbslfHjDVagNeYMZhjs5PqsFBS1622tK7FKkvwM9nkZUCpGvmeTEmlTQGtDI1rSSZJ4FqG/e6AxmC0w9qOO/cf8fqTN3DO8fzZM148ey7pr1qj2pYUArf7PXqc6Ieeb7z1Dt98731ef+01bp5f8f2//j46ddU9vqgBIgmRS6UcBWQtCSAORacVLZqQE7cHX8BuGR4mwDUN1prigyQ1nnEatEGhGYYOayxhiqA1prX4OBGVxjQ9Y040NoCF7//tf+bDH/4dd87g5sVHPPvkZ0z7jQwtTeZAZh8jvpxz68S7VqGW4JE5eLQSu5bONvi0FxVCjigDZ73l4nyg7wcefuM1zHnPD37w92y8sIZ3ux1d36I1HKYd7qDou56Lizs8e3ZFVpaQ4er6OdcvvuT9b3+L8L3fom0dv/c7v8Xrjx/z6U8/4j/+6X/gb//irzEBHg6ZF9OWnQnk/g63puXdd9/nSXeHj3/8//LJ332OzoYp7dFNpskOd8g8/f73+bt//yfsQiT0DQRfbHjEo8ooDWVdswHGcUc6QDqM5DHzfPUJjx69zuOHr/OPH/6YpKR5d7ZBq0zf9zTOEv3MZudpW8t+t5FBVoqS8L1aM/Q9h3HkcLMFL32O1YaUPMnvRcLadIQAjTvHrlqmeSJphTcGHz2DazjrB57cf4BCs7u+AWB1dsZ6dUaMAa0aunbN0K1pXUvjLCp55lahJundtDNkZYFYVAiZmGb2s2ezt5zfXdHbjr5tUBdncJuZDjLsu/zyGfiIRuPaHuXcMsT2YcYoRRwP3Mwjcd9zcXFBu+rlnAeR5iilaaxDZ0UqKehnw4qcE9NhFC/YnIg54WxLe7am66XuaLVlniamNAtAbyzZSDCicYo8jfJnFGUSQtKICsta+vU5FCVIqN7FSi0SVgo7OpfhpUUV/EGBMrLnAHMIDEbq4smPAjbkhDFWWLtZCQCGFkZ/YZ9pU+WeZhn8pAQ1AVuINqBUICcv7ynbKSFE2q4r6dhFiqtq6qvs61ZpcijrhHISbOBDGSqBNUhwTSren1kUWWjLYR5Rqigu8kzMAZ92WG1xyPOSjQQqyp59HAhWm4mx20lISCu2XdFZfElRx0jXE2JEKwEvjVIk5FhPa43TwV9MAaUtVKCz7O2h1JQxK7ERqEBSFtBJwlsiWMXq7hmgWEWpdb33EjwRgnj8e0+MM5vbWyiDqhQ8IU7E2eP1TDKRo2dfLCnjpvSkwiZtuqGEaSHXCGFLztPMPI+kONO0BmUNWcOQevb9gaHd4TSE1BWCxUxUJWAzakzMeB+ZkFAOay1W22JJJrU2SknQVLn163VKClTx0lPZyejVxuIVfSRVZKuKDlhBCU1DK1KQgWeOCZMlVGSRq9fip9gn+ejpbFMk9kLKaTqPcYY0CRFJL76IkBc3ylIjLWzUr3/9cgw6Mb1AKVPor1LQliHrackmG3fO6JyloAkRozN907IaBoa+px1abGMYJ4PSnYQyNA1N29I5SSQx1mAU0jxoMDlBLEWx1cVPLpNp6J2DxhKUYR9mJj8yT4FQfMRUNuTomOPMNEXSFIjzxLofMMoSrEEljfewn2YSkr4a8oRyRhoF5TBOAAKtpZG+3RyI88TNtQI1E9Oeg7/lZr9hN87s445nV885bLbi4RYCYRxRMdI3Jd0mwjzF4sMESlmMFVPTXLwfZn9gGnfL9HrVGLLWvPbwPs7BTe+EkRQ9282Gm3kiophdz1im8bYcgzJWAilIjOPMFDxhmpijNDtKF3+sHBmMYtU4zs8H1mc9rjG0w5ph6Fmf9WJwSiL5yDh7pjkwj0HYTzGxWnWstweafoUahfre9B1dv8Zoy6effgZK44M0mtM0s9nu2O/3xKxRqhXzVWtRRtG0bYnDtmgtMx5yZE4eYol0dg7vGmxjmKaJMIpcoXMNq6HHOUlaS7mwM2IkxuLBlmrTY+ShzDB7kZxKgmnzUgM4+4m+bXG6oYh4cI1Mz30KxJi5vdmx3W4BGAYxXiYjUpk6DUpxQdhVAY+MlujzVJrRmBLZe9mk6hN3wlKosrVFsmIUkw9F/sbSmH2Fgn7yPnDCEIEyfdKLoXrdYMVH4mgoWnX0yhZqujmyOJQSOLQeR/Vb0nUSc/K70+JjtyxfJGXRxmGalqbtsK4tBvyKU7Na+X11ulaAjBOvzAVUW1gieWm8q8l/PfaakJh1ZZzUBluay1hA1QWAVEoSWstnHw8Hds5ACyoa8XzUGlfkyjElmmnmMM1lwiY7nEjc87EpTpJYdzasOB9WONcQZ8V8IltbDeeoJMmv62HAtGte7CPvnT/irQ9+jdX9u3SNJGvGvASRyf2bIcRG1tRCFIuxQwGdzrgMNktatjWWt958i9///X/Ge9/+Fnfv3+Pug/vce3iftpcGzs8j036P3+7Y7bYYJub9DfN+Sw4eokcFj1WZvnGMhw3j/iDDH2XQtmN/GHn2/JKQAiGPbA57dgHWZ/cZ7jxgCorISNOeEbMizYqcD0XSmkkolLZY26IaSnKuorOOrMTJVK6FFIAxC3BtWos2ClSSIqeuBEWq/arPxem9tHxdbujCmBFQ7SWZ5CnlCgpj9oT1VP6rz/uS+lieE11/voDaC0ClLCiRwVOA+JgivjDnbNMsTCb59XL/d20rrKcQ8NPMPE4ig8scp9jLseZlwh58YL87MAyKppHSo96SqnhsqCzBQa+S3nwsaxBS41ScJuXiX1XOb0oJHY/y0VP5ff2eJZCmFtCvXJ9Xv7YEGhTwqwZ5LNfnZA04ZZqd/nt9fd37nq6nFXTNrxRfp7/jVcnGq/cVxVbh6z7Xq1L9+ufTz3V6b74E3p38+yng+fPO3avH/HWfe5HD5hOZRwnJqNLrRJLhSHlGlj1IyZCoTu4zZRCiE8oIu3r2YxkBU+RqhqHvQCl2uz3BR5Q2dP0gsqAkYUiGKCE7WgtDOXhyyjjjAE0oNh3kjDECu7pWUh91DuQwopBGVGUp3iWdW9b0jJIGLNVwI3t8hmNNAFbLf0pL6JZZ2LVSpFtlcFqaEKtNsR05+hwKQwFERia1uLDUX35mqvxYKbF62dxuefgo8vqTN3jt9Sfc3txwdXnFOI5sNxs+/fRTDvs9fd/zne98h9///d/njTfe4LNPPuNHP/gR29stYfKoJEz2nJWEQpR+JqYyENFl0JcyKkUku66AiaUJKU/U4k8kCbC1Vijgb/GJkjVQwgzavsMpx8EfSFqh25bzO+eM056//uu/4PLyS9bdis31M8K8p7EKZw2msdzuN2StaGxL03UM/Yq+7wnec3t7y83mlkbLHp1yWkAM72f63HG2XnN+54Ku7XnnnXf41V/7Df7TX/0lP/jxj4k5C3kgC6hOzOwOO2E13dO07YquGzgcJpQr9/G45f6dNR/8iz8i+Imf/Ogf+Zs//4989rOfcfvsBc4mSaw1iV7DzhjM0PD4rbf4rX/6e3zy8Zf88PbAmnPG+ZqkFWMYadUZ5+2KMzI2TEw+MqtIwmMUWNtgbbk+RKIX/2bTSYOsUEzjgeur5wzdQN/2PHr0Gre3t2TrML3CWU3XdigVcY2j76uHmngmayXM9dbZUptL/bK93TD0LbrrJfRNZ9bDwH43kTU8fPg6b771LqOf+Pzp59xub+TzNY57d+5w9/yMeU7Cgmocrm2IObM/jDjXkmJmvz2gcsIZmOc92+trbrd7YjIw+zKbzaQkEnCxGYHxsGe3vaW5MHRtI6xuJddyc3NNOszsr2+xysBF2XdTwvtJfBGZZbCnRLr7wj/ndtcyrAe6biBEATlSCOwPO2KINE1hos+TpBcnL0Mkp2n7lmHV0w2iJlJJ0RhH29ZQH1PSNUsAhG2wulhEKEMuYUGxMJ/tEr6ShZFfrBRqurYAbCXAQSuck/BEUXOEo8dnBT9SkcUXCf6xapElLtf5fc6FDS4AiCpghMJIjVVGja4RkM45K+GExYNWFjHZn4WCcPQGr8oXWU3KPl4IAUoJC1n6/uJnmvNx+Fk/ahksxTCDoQzgxBbIZEg5kEMWwgxHWeervZI/jNgiy69BDUutYjQY6QeNkfqscU6wizLUWvbPLGPXlFXZ52QwGCNgjKj7ymfOSmFMIpGJXgZIOQRhCStRNjbGomxh3VnQrqEtns+y7SRSDtzd78mIdVX0M8HPhHmUYUpKzPO4DNFSqfN8FDZgSgWDLoCpSkJiCiHh54z3ElTi/UzyxwHlfPCyZtBJgFEIJBWEmZiL57AXUNGnAE56fuss9mQgqHTxii8NjCgYClCnZK800RZgLZah69F+LedMKsSW4zMle7JWSuSxixUTy9BThmp1wKiBQirxM7qSWGotpZKcX1jAVlQVlC83Iz/v9UumuB49UmTie0xuoTwU0k5nUiyTVqWIQVhBXdNwfnbG2dkZtrXYwg56/OgBdrlpTSleymRTKw6HvfhLxZkcAxrFuutomxVN37M+X4uRpWvQbUPWljF6duPEOHn2BwGFVNaoiKSEHjzTQbyLtrstPpfQsKSEBtkqrNV01tDmjlXX052taFYrmrbHuIboI2GemTY3+DkSt3vG/S2z3xPzRAgju6i4ma7Z7j0GODs7Q6fMNmfCYRLATwvYoJUVELdMKFLWpFA8UwowE+cZWrkajdZElbh7cY5rNXcuVsToydPMYbshHPaEBM/3I8oUCUzwhDgSY2brnxVwSqjGzliavmUYBrq+FQ8xFNNui3GWth9Yn/9/jL3Jk2TZld73u9MbfIiInCqrMDXQhET2gm0000aSyaS1zPifSovWTjJtKJN2YoskmmqhgUY1CoUqFCqHGNz9DXfU4tz33DMyEg0vK8vMCHd/0x3O+c53vu+G/X7DZtuz3+/Y7TeCkkePnzwYj9IzMFNmTyiROWqialCmBzMTi8NnIwLisIikQAAAIABJREFUbcf++jlTKJzu7hkeTiIbHVMVhhYXHFN13bRZBrEsXmUN/CpoVBfiUBSTmgR08VEENoPHGI2z4t5WtGhotV1Tra8jeRaX31IKuoJzKUuCqrVoDbimw9gzi2B7c0NjHCq1xIhU0520WYY5MhyOHA8nvPdst1v67hwkkc9JERSxzlbC1NFar2YWlDPItEzkZaNYNJAuE7PlfuRSVhH4y7anyyT1g8oNfPQ9C3tlvccXuebCELo8vq2tuMJOuWBbXACJS1J9qYG0tGnFdDZgKLXCZ40V1kTb07U9zjWgJPk35jKJVCyLMMgtVmppE1zAE/m/rPWM+vdPr4/nz1LZH+dPkWPVDdRaKmqpOtDlCCVSdg0hmHpfDK4Rs4eUqurHspYWqZTltKyfVdPMGrpdz+7Zns31nqwtMRUmf25x7TfXpJAI05GuvWa3ecm2e8HPfvyX/ODmBudMFTs9A3TL9fYKihFyRAwXiX6Wa1FaJAe8SmQlYrifv3zF55+94osffkHbNISSCMkzzRMPt3cc3t+RZ0+jwdlEjidSkLbZEjzEQGsNjVYVcHU0nSVE8DETUmGYPcYZtrtrplhI00iYA+PxxDBF4jjhlLSXJy+ankInN2jlpKXKtiJannxtu26kdTotrmWaghWn11zQVsAEIYo/DVg/Bkc+OV7W3y+B159+3+NjrK2Ieml3Pbe9LmcmwJkUQliDFdk9UxI5gBACxhpssszVEdpai2vcec7FQPCBFEV7KaW0MsGwFg1n8FrLOJU5HNbzzLmgKwCogEq7ePJeXYJQK7D/CPy8/NnlOrW8//G/4dJoZqnS/ylNOtbPXJ7jJ4G2Tzyrx8H65edE04tVd/Py2pZzfWrtfQqMe+r1eK2+fJ2LMOWD733q/ZeFmuXYl8/ok/p6T5zPp+6NLqzOa2eB1w8+XD8EYtS0uAUmUhbjACleRoKWIlUpBWssQhKS1snj8QCl0HZddaHViIgza1uLc40kw9Q11hkR66/WgEZrWa+NxqpCCQvjTZGC7F1pqcRnYWF57ykotLOSTK3J3OWV1vt68dmUZP8wOq9A5jqGltbP2g4uxlPnZOFJkPTRGCylcHv7nr/99/83v/6HX/Ly1StePH/Os5sbbn7yY5TW/ObZDX/3i19ATqQw8//+3S/4v/6Pf8cfvv0Dx9sDIklcCwUGUBdzY9V/MtV0rZB8IIvzGNZK4T2mRIy5at8unTIdfd9i7FnHb4kNBJzz5JLZbLe0XYtPMw/DgTl6jHNstxtCmPinL/+JGDy5bTid7pEdrsYZCSjQOmlx7DYb9vsrttvtKso/ngYi0DWNxMhR1r9YRMDKuJ6b6xtev/qcXb/lP/4/v+CXf/8rlG3pGkfXbwgxE0ZP8gVTdZtT0RjX8NnnrzkcTzx78ZxXr19xc33NuzdvGU8nfvvlb/jP/+k/8fvffoXKic46XEH0WK1osBmTscrwL372M376kx/z9VffcLi/E+fGRoNryHlmu9mw3+/QHoQVFEAVeV9l22hlxOk+BUqKKAVt08kcU8LAubu7o223bHfP6fuew/EoxmPKiulMZaD1fY/WhdPpJGMtZ8mbuq6aKggIMByPjPOIttd0fS/FMW0xWjGcZilikbi63vDMXeHTiVQmytEzjwPffvsN83AiJDidTlzt9tzc3DAMAjC/evmStnW0jUNy0rRKmmityD6hVCIbhbFiDGONJiqZu8FPHB7u6NuOvu+xxrDbbIRpFSPD+zvm+wOHlIRIYo10PFWnTe9nural6zuUUQzTyMPpgYHAjdI4syHFzHg6ri7L2jSElBimUXTBslictU3DZtOx3fV0fS9sx0TVX6ss5iyMLB8SWksL7nl9z+KcWep+mFVtK1YVRKoMaWohvAiz0VgpVijMGlMrrTBF4xq3uiwv+6EQCGTZzrmAShfrUq23FEBlFo1WWa6qRrRaYD1pYVzMMJTWooOWRJtTV5O/DwpwFyDJB/tOqX8/70CUShZKWRhMj/dyrVUtCuazdERBOonK2QApw6rb/fi1XN9qmHMRj6AF5FG1ICFFiWb9u1IK5xyXBbFFwztZW1mpquoQFkQQVIDvkiqLv0r8pJQkVwVM44RRSCVuqHpdqoKm9e5oRKarlAQpkpJo3aW8gZqHydhCmNw5EWIixIhPEn+N81zZd5VEEmt+Xg2TFkA1RGFdh5BI8ewgX4qstalIgTbFTE4Qg3TNpeLJOZGVIk6X3QPyd9c0EncaMZWxTtZNyXE0nW7RtQVbKSpIXdlspUJnRYh0WmUUqX6fsFStbN31btVxqyWOURX0t0pYmjFGdNYC7pIopHPyWKQzRpVCrjGQWifMR8Nqff1ZAJ3WUq1TLLoly+xgfdhCPV0Wx5pQ5ETjHNf7Pc+urun6hkiCnMQ8IgWSTmt754LAa6R1SFxuwNo9Viua1tK3Yu3eNg27q6s6FyXQUtpyYyxZegLZdG0NhDLjNHM4nHg4HDgdJ2Y/84V9weRnhmlk9p4pTJgkG83Gal7qns5Kq1GYRkY/MuXAcZgYhoHj/T06F9LsCeORHEMNSizFNrRNx8v+ht2mY9P1hOEEITGVg7QlWhF/t9U5LBZdxeQT8xwq1TdhmkaS2wrQ5RjwyaNaJ0FCs5e2nZyI8xUqeIrSfOEjWBFuHOeA98K0OTycuD88MI0e6xzb7ZYXL5/x4sULNtttrYrVhV3bym5saJsGrZ3o8z143r9/zzR7hmEQ0UofmIIXOm2IvHt/xzCM3L0/4ueMtYomVPRYF2LWpMq0iKnUCSZuTG3TsmgYqrqgp2o2EKOAIH3bCaiFEuvzEJmDWJWDqq3DEtS61lUHpbQiFJdJhTYaW0Q7ymLBWkwpZAy2eYZpdijT0G8tiEoJP/zpz5iOI+ODr4GqpajEPHvmcebw8IBCNvyu62TDzYvTp5JrvtiAZL6ek8Lga3sCS4XnQqwzZzEbqRvWAnhd/t400pZLNSMQQfXFsU5Leydq3TyXOb28PkrcFpBgBdXSmtQum5StVfYPnPkuzukyAV8TOqVqBUOej1IKo52wCJw4KPf9hrbtMbYhFdF1Kwt+WFRNbj5McO2aKJ95DCvIyYcOnet9ryDIshh/ADR+lDCfQVM5pmyaMUXGecLagg9nfStlxJFXdIXCWVNIcT73KlCqlMG1Df1uS7ffYrqWpDU+ZcbpDNDt9y9QWaHzBqM2WLPlL37yX/LjLz7nprcUJQLPoj8JaalyIiBPLhB8Zho8wQeUQjbgYcDpTKOAJJodp8M9p9OBZ8+foV+9JKtCCjOdM7SuYZgn5rv3mAK6MZQUIM9YxFFS244UNCVG5mmS26dNZbZJi+Vpmtld3fDZ56+5utqQvvwN3737ksPpPeZuJBehblhVUMmTgycFX1vzaqslmqy0VPe0QWlDKDCHREZVXUBQpiERSSVjMJJgFrPg/ufp8Ah8ePx67MR0BkPKn9p7PwCnl899eJxSn9XF+K74hjhsiuZVLgVSXKuA8zxyPEDwc9WPE0DDaCOtGNaRcuJ0OhEqw1guRBJxtKoGA6nqbiy/zuvYXNesCniLRs2y5kq1/p9jYS3XfcmMWxOAi4D3EjS6dG1dzuHyOGdwSn203jwO0C/P51MA3FMA1uPrefyZ9d7I4PngHlz+/pJ9/PiYZxbeGRB56vXUZ5dzFb2eDwGbp1hwTz2n8ui4j8f343v30b0+V0PkGItW3ROg5mXCpPRiGqJWc5AF8KLUVvW8tIaI4/jN9TXHYWQ4DTykzBWKtmuxbUspmpwiIaVa7KkurVEYd0YbiSeUgGJVbVyeE4msNbqybc6FsgWgWlxnI0SNKlmSSiNmCCUvc2C5RzUJWhK/IpIWqu7BS8Bf1mdGbSG2FXBUoteVCzmHek8umfEC5uXaZi2mbKLLNxyPfHl/x29Koe86nt88Y7/f471HKxiGkb/7xS8EmLCGxjZ0bSPtpSEQol/1rBdBdqMtrhGwJSHyFDGIDmYuBessXdcxVO1beV6OpjWVmWOkjbyev7UieaEQjbDZe/xU9WW17K/WGvbbDdc30uI4jSeMsjzc3RHDjDEQ5olUMj7M8oyVxihFSVkcfCvrZNGNCjGwMS2lSBsWdQxuNjueXT1jv7/BNR13x4Ff/upLjoeJNGV0gTHOdE1D122hhRhmnHXs+j2vXrxiu9ujrWGz3WCM5v7dO7781T/QtS1v3vyR77/7jtPhgW2/4XA40FlL3zS4rsUEhVOZv/qX/5J/89f/moe7W77+3W/xfuLuYWJ/3dC1jmJbdtstWhem8YgxhcbKWGpsg7MaYTcq0qr1W4uEWthS1K6IaZp59/5W5GWW+OSD9TehdcE5i1IdxwrgacrFOBX2Vtu2tJst11xLDjL52qaWiVGeZSmF3//uS8bxiLZa9LYVAgaqxO3795ASm37LzfUN19fPuLq64nQacE4klLQBnZMYT8SZECSfy1laIBcXT6MKrpJDFGrtqhjHkXEcxJQgKxrn2G562O1oppnDwz3j/T3OCWvQdh3kJJ0S00ihYBqDNQ2xOuQaDVErGg0lZ8ZxZB6FMGGsJkbPPI8y/hEN7r6T7+87yX+01rTGScdTBUBSyoSUMCbjnAB1sRZUM6KLvTgmY6SNXKkKfJdCKSIfInGBgJVN28k6GBIhiK5Z0zRoY6UosC7ZF+2FF//W6gyYKJZ1bNlzWMEQ0UPVVYu0xjblDPZnhPmsYxRg0Og137g4iQ/2jnUfKrXFUy3tlxI3CWahzm2q635dja9qN2AprK34EscI608b81Gx8fGeu/xucfL8oMBnRLdNq0UerMplaBmDTS2WrvlTdcu2lY1njcM4S2Mc2tW2VpbzRGSOKBKrUTsvgYi0vYp8bdU2VRU4YwGoROICCqqk1aTCyrvrnmyEMJRzLfLnKlUh8ahoaQuInFIUPenakZHShPcHQhyZfSTmSNO3tG1bQXxD07SkCNa1Ql4Ikq9k5N6lMJMJhCy6sbruu9RLs7YR4xIMygnLW9s6BorGa4fBoHWpbcdnqRQQxqbSCqM0Si3trsj7FWyaVgBKQbnkzqlC0QKIozLG1DkhNELOxbkaHyw7ujoDyKIqtcQQn379WQCdtW4NBuSiljGrK5NJFkkJOmQuWWPoOkvfdey3G7rGSWVSFWzb0PaOooTOrRBB+qW637oG2zRo42jbln7Ty8LoxA0q5VqJrAt8KUUMCTI4pXDW4ZQlno4YI5XI3abQb1uef6bJZS+IbYqcTiMP9w3H48DpZPBzRGHYKs3LYrEUBj8zholbP3A3T9yPI6dpYB5nOtvQ6gZdAbltt2G33WHajtg6gXJSJnmhgfddhylF7LSzJFQREYv2XmiSpRQUGWezVDYL+HmEnSASOUVJoGZP1uXMLlNKzATqgqCcAF4oJSLeSvQD/Rw5Ho8YI7bgTdtytb9it9tSgOPxyOE489337/CpEL2IgIuQr6tjoOBDEJ2JcWT0U23DyCuD63A6UbLCj1NtBZzxXgaJM5bZT0xzwDUdnZGKQUqixRBjwGm7LkQLKdoohTKKYgyNq6BULKKFFqO0aVVGjVZK3N/aFpB2i5AiTsvi55dATWts2+JaoRGLXgA0bkvCYdw1xfRo1/LZD54DvwHg6uUrfPyefEoY06BRHI4jd/f3TPOAKoVt38nxtQSJxjlAWimWY5tFs0BVl7mqAxRiWNtBFgAuxkjwAqYs9tbkj4GA80YkAVYIiZSWBKrqDVUdroUdu1Sh6jew6A+sCeoTie6l85/oWekKFEglhQuA7jEzZWWuIFKaqf5ptEFbi7Zitd62XWVbNShjyHnRnFuqGpdJ/HLu5TI/rP8+BxcFhEX1xKZ7ThjrQr8muotWXwUlq3W9WYOUjK5t0kXD6D1eydwOC8tLLRRtmYsZeR5VaFAEYmvbo3EO13fYtgUrUuJzCJzmsw7izbOXdE3LfhMY7hXWdPyLv/w5222/XqmWiSPjJ2fm6o7pY5FK7jByf//ANI3kLO574/GATYkWBSkxDSdu375hHE785Cc/4vPnN9jOEKPH7Lf0rmGrFJPV2AKNRtwTnRVg3jmsUoRp5nA4Mp5O3N7d0zabypyDh+PE3cOJH/74J7x6/QWbTUf69Zfcvr9jDoXNJmCNxRmDsZreQtZwStXEJoWqZ9TQbTZoLVR/axwpZbqYoWp7pCzvm0OgKIVtHJll7TqDsqa2lP9zwMyTAF6BwsefW16XDK5L1tL6nsrqoSB7BblWRsVRTXRk0ipCrbLsjSlGvJ+JMbC6tGqNc9Lq5+eZYRiYhpEYAixgVz2uVpqiZawkqO14am09sc7hugbjpM1PrmGZVaxY/6cYW0/9/aNbVy4Zxh+DOZfV/IV9c/mzx2vUU2DfU6yyp9aBy+Nf/vxT4N3lWvTUMS4LFo+/7zGY+Pg4n7pvj8ff+RzyR/fkqc9fgobS8vTx9z3+9yXA/PT4L2uRxuhzkvUhKLe40JYVtCInFjFlbewqmZCrNmEp0LYWa03dZxSbVrSH5tlzODyg1J7NRhw6I5oYIzEk0JXBtenWc6eyOmT/rU6pSRgmJUZKZdDli70QgzAXYhCtuyJdJTEtDIBALnK8cjEfFuaeuG6LhuESU5sFPNBSsJF7ldcYADKiw10ZMCmve7epLG1jzFp4SimitZXCa9+zaA5SzZuG0wFrLK9ePOfYON6+e0cIkU23l3hbiybb8bg4KVN1bCWuMEaANgUkLffMq0BIwpRQWoy5Qo2zmsbRtmL2sYy1UFuprLVror8+Z0plB2ua1rLb71EatrsNOUfevvkjfh6xxvHm+zekNEOJxDCx3faEmEglk6JoUM2zZ5pmpnGiKIT5WKT9zVVWnlaKeXIYZ9htd+ikyRH6/Q0P/i2nOWBsw9Zt0QVMsbLn24btbsem77juN7x+9pyr6yuSyZymgV/9+h8YjkdSjJyOR372F3/Bq2cvuP3+DRpFigEfPV3XkIzi3emBUwjsX3/OT3/0Yw63d/ztf/jP/OOvfik5lNGMxxPJOVQneot3t+/JfzxBDjgLPsVqOiJ6XWQqw6zO7yLaYvM8UxS0raYg8cDsM7koul72UcXiuCgMkZTkc9M0ce4G0LUtzmNdS9f3bJ9dsb/a8/bte27f3WKdI8yR43GoRYxMijMpDCjdoHIgpUjMYuhxtduy3+24vn5G2/YoY6W91BlevnzOcDySU2QeJ07DET8OlBLRiFMqWkuMVSqIEVM1ahCQuaRcOx+i3JtUiCqjlaZ1Bts3mNhxezox3N/y3ii662tiltZVyEx+wt8HMdlzCtu3NLstbttTZig5VcAi0bbSLRWCxwcva4QqWNfQdQ1t10i7pxW9srZtKmlV14IxwjiKmZQL4zhzGkemyVOqDqSsNaKN3HUtFDEkizFXbWtz0ZIp5nY5Q0kF5QrWyO+ykpZ+2X+qnmxdo1XtADLGURCwe8khlAJlFGoR4F9W91JWkH+JjYRsQf2/LH+hlJoj1E9nxLRg3TvTmYX1wX6mRI+1VPaQZE3pDHZVALlcMOdKkfwyZZH30GVhC+qaD/5pBvqfimlKKZAKqRQS8Qx2P/r8JcHBWGFvKa1F+7RxtLbBtg5nLGiFVVJMEOMU0FliYYNC5wDBE6wF5LutblDOgKrtoci6Gpf4AJm/GinAVEoBKVLNIqStWHAlYQcW6t59QXhRZaFWFiiBeXrA+4HRz8QsZjL9psfaBlDMIYjUizW0XYcvkRzy6kYua1UQALAgsh9Zr3G1D1GkgdAoD8rWdmclG69PCz1C5rRxGmtrflmLqMYYTNNIgQtdjQUjEUgaxBRKs1TRqpygdLgoMYhobJUyqsxxU5nvsRau17KbqnIx1OL2xVh46vVnAXTivLkkwvDBNz5xAK01zjp607LpO5w1lJyw2rK7ueb62TX9tqFpnYB7SqG0OK00TUPTiOjuXNF8ZUQIPiIBSTYKjGHM1WVuEWqmUk5DIuhCaBqZ2jkKq6x4Sp7JMYhwZFGk00A+TaTTRDh5xiGRIgQU8XTium8w+w19Y7lpt+je0vSW7dzQNB2dbbBYYWRExAFGW5JSaKuYxxPzOGG1pu9btk3DSSseQmCaJpK1ZJNqi5EnZaEpL735JQlgGHxiWW5K0Wjj8NmTFeTq+rUIe1MrFFYbVIpSCavU6tY4+t2GbddgbUMqiVKkncOPJyY/c/f+lvvjid9/8wdGn5hOE8dxIkUFtSUvpiJtBlEqnXOcyTnWxbpWSmr1wkcRSB6GwJg9KQapampZfK1eWkPFdtrWxbm1ZgWIcl0oszKUIqxAa2xlzQX8OOEnL73zUZo7MaI5hRLhyFSQPvmUVuOFohTKiqmBTAeZjAUEwEwaRYN1W65fvuTn/+rn8DsB6G5PJw7TREThtGL2kcPxwOl0JOfEfn9D1/YYa9ZqujWSVATvV6H0NXlUVNe8vL7/8v8lqQteXHlcNRVIIXwA/iyLfUjCgiwXyeC5evAJUOFyalPW9ranWCjL8eC8MS0isHI+HzJe1u+orTF1+Vi17EqRio2pYv7GWKxrcW0nC7oSRymlTb2P5yS4WhB9sDY9Tuofvx5v7h+BAFo0EVb77+We1Q17/Xx9ZuqCLZNLZo4COZYirLQkfdDC5HAymyUQkbbokgtJJaGiIxVQ2/aYpkVpRQKOoxdX5vqyrmG7u2Lbgi2Rly9f8cMfPqvPpSBOGHLePmcOw8D94cgwzgxTwHsvRYrDgXEQR7F5npmHkTKOmJRQKTEPA7fv3jCdThQ/8pc//oKrmz0+TMTjget9T6czn93s0DGTk0eZjHZO9IDq+BWPahFGp8gGfXt/YpgTh+PE/eFEv7/h9uGEMobjaeDh/gEKdNZgu5aN62gaBdaRZk1JEe9HfEigha2gm4YSE9uuF/0opXFNT8yF0zhKuKcNw+zRxtC0LakklFaEGIHpo3GxDK6VjVSrbXWynF+1gibj8gx4V6SP5UOLsP46P1iqxef5JWPh0ffXz5eP/r2cowAzuRTR7LhoUZnnmePhyDCc6luXYy+xs1oLG7kC/0vguJx707Q0fYdtXK0QCyN4uTcL8+upefcUyPU4qH0M5j9mFz8G3JbPXAL/j1tIP3yO52f4pwD6x39//B2XcgGXv19+ni8e/2X7LZzZu3+SGffB+X18D2FhDcsao9SHFf1PrXufYgL8ucDfB+f4xHmvz2UZ65VpqS6+I5fK4tBqvU/yc4m/oOCsJChQ1qJVzud9RH4XmaYZhWK72QIwzZ7D4QAl03cNRourok/C8gfoemGNiLP5WZ9RGPpSANUU9KUxhloq6Mu1q9o2H0VrVOvK2BINNLtKRMhnVkMNqk5rFnBAWP0fzo2UEjpLu45z0sVgTIEiRUzvK1MtXRQT9NkEJGVJxtvG0bUWZzRJFbqmo+tachQNrJQyPke2fQ/Pn/Fwf884DuSY2G42FGvIRFKJUm/L56LDEufL+iGgvtYzwU94H4l5YhhOVadO2HRd3wB51Ti6bKFOWRh4OQvAp7Xl2bN9FQAXpoNtjHSolMT93XtZd3JiON7TWEvOHmtht22JqXAaAnOU4mSGtVAmQKbER5vthv3NDVd7GT9hHgnBs+33NGbD559/QWMcX/3jl9y+eYdSiu1ut4IBm92Gl5+94vMvXnO926GyOP++f7jlfrhnGAf+8de/JofIzdUVjdVs2pYffP45b/7wLePpIAY8zoLVDNEzRc9Pf/5f8F//9/8D4xT53/+3/5Wvvv6O8XjCpETftcyz5/7+Dj1lDtc7/viH7wh/uCeEGZwlxYk5Fayy5KyqJtgZtEcpQvDM3stWoCytdricUTlTsW+sk/i46xytcYQwMR5PPDw84L3HOVf3MmqBz6ON4+r6hr/+r/6NaDn9/a84HQeMdkxZWKR915JUxD7b8/nrl3R9R9to3r1/T0qRrnH0bYufR+7voOvnypihJsxLZ4hnnia894CScdY0ZBT3DyfR7NILg3dpY9ckDDEE2ZazFKtdY6XQH2Z0mmmswWw3JAV3w8C7t9/jxhPtdoM1jt1mI26QFAJ5NWcoVuNToASkoB48SmWsEx0372cxhsgR66TrqmuFVUoRtq3WBj9PIs1TnWitEyABFDFn3r+/k7+HRMyS9+SUBPhKnqg1uubWi274wlzMubDZbETmZ5wBcE46VVIqDNNE27XVnOEsUZPrvbqMXXItJFzy65Y1b2k1rYHoupeVIm2Ncq5JjB0uvrNUd9qs5ed5/dwSulSX1hrbiBlKlY9QglWWJCYdein6L7pvSUyItFLSVbLGTdRijVxLysJMeyo+eLzHXhad1n1w+c4KMlJTFZ3r9eRCUQKc55RISqO0aFOjlQBuTmO1Q1tdgTkxG8KI+aOqf6LBoFFWM2lbtfplH7K2wUTRKqaabaDFlTsrwVVAOh8oIu0E0uUnUliihbOU0lbz04v9USJIMT4SVpnBbffExtDGTkg2WmMaS2MbtNa1kGPYX11TsiLNiXn0zKOHMjGPmhQGQlikgySfTUnGeylilJiq8Y0YHVYh5FJoqoHG0k2wdK9pe86rjTHYpqNte7SWduJUDTXiNNXCmri8a2NEs1ADpmCdolHi4Ckpl8T+WgtILbK5NQe/uFNqGXFL4PaJ15+tQXeuCCO2zggpow7V+i4qSGRorKV1lr5pcdbSWMezZ9f86Kc/4vVnL3G9bLRpaXldqoBVpLMUjes3+BgZ51n6kSkUrYSZUdtwFsFhMuSYKTES8oTXhntrGFOmzAN6GtHzCNNAnk7kaSaFxOkwcvcw8nD0HObI4BUJS6fgL7YNrbO86LfcPN/xatuQG03UAkLEmKU9d4xC4azCiOMYmMcj2ipy8nSN4dn1Ddu+ZzgcGQ4PlJKY/Yyiw2m1VlqVBm2lwtG2ThDkKJTw5XU8DQQNc45gq2OhUzRGQA1tpS2jbzQlBnFYTZEcE2M4rYwjpSZizqJ9VUTXLSTshYfFAAAgAElEQVSp9pRcuL55RpcKYzuR3j/w8DASfTUHSIpS5JnkbEjJCMUYoX5qDfM8C5ASkgAdlsqOElCOLDoYtrqwQqa1EvS1XSMja2mNpAo5lkyoxg1hEpHVaZgJk+gKinMLtfohtOlQg12nxFVOaUG6m8ZVXWMtZhdJKNYpR3IuGDQ+FBzQ7Ts++/yH/OSnP4ffyXP4x6++wqRCS4+fJw53Bx4ehPJ/td+JlkUFVZVdqouT9KqjasVcgLlUchW6Fyq7Qtp3NKrqIlTdvXwGxlIUcdmFWn2ZyC5gnlQJPm6lDSFIO8uFztzjlyD+a/mfJcVav7uex7LQLElyXgXD8/rz5byUqu3IBYo2H2z6y+ZqjENrR9P2tP2Gtt1grJMqVIgoQ3U8zR9slrJBVHC2btaXS99jZs1yTsufl0BiLsvakqt7HuvnFh0DEemum0OpwNMKfuYP2FPKWqwWDZBMYfaBtpHkVNeWQUUh61xp6QpjGpp+R7vdgNYE4P5w5N3t3fq994cHNk4qP7t9z+vXzzBWMY4BFQI+HkVrZ545nAZu7x+4ezgy+8DD/YlhmhjHWVowpqU9PZBjwp9OxHEk+4BKkXk4kfzIzb7j/fff0beGlGZO/oRNPVd9x2bbUebAOIykHLBFnMHi7DmdTkyTB6XZX+2xTc/b23uOp5Hbh4HT6DmOnl9/+VvGEHn+7Ipvv/0DOQbZV8rMF68+4/NXL5jnieA9OU+8vdUoXcikqg+l6yYpzB1jFLvtjucvP+PhODLNM011C3fzTKagrSEXSSJTycB3df2pjJPLQGwZy0sb4yOgRy3UzXIxcChr8LoGqLUKvDBgtT6DJKkCw9KGQdXukg+X+nuVL9qQqOeRKxsgyfdZ51YA3E8zOY+cjkdKOWvxLWCQRhIZ0fzUtEpE8BcGzzJ/rHU0TSuAgK1ix5fgGWqNCD5VH7ych0vCfDk/l7Xkcs1aXkuc8Cnm1uVadDm3L9vxl4LHY8DpU8H3U5XxpwoXZ12++mz4mBF3edzLa/tg/cm5PvJFY+jjazw7gF0kF3y4jj2+d4+1QR+f9+PzeioB+RTA9/hVaku0MLakyLrUUajFH/LlvJK1cWFb6CzMAAEzVb2XwrSYJk/Ocp6Nc+Rc0LVDQQHzNHEfAnknxkwyZhpKCfgYOQ0DTdOQKcQsjCDZeyO5AmamtrxaI4VjWmn/nIOA+kprtrs9TdOsTDC96AzV9jGgtnSxzhGZznI9MZcqVbHgmWqNdQoLq85UYK+glDCSjck1iUjrHro8x6XFVZFRJbPbbNjv9kzTgJ8D5ETXSuvjPE20bru6+oWuJUYpAN7dTUQf8JUNKPpRkmTLHDLkmoBoLY7L1jrGPDHPM7q28i6MHVvlVtZ2aC3xz2JwlXNajW+UEhAz5SzMbgJTGNm7LU1jaYwm5MRu2zMcRW/TVfbh/mpbAVKF95l5DsLMdxYQmZGsRPqk7TueP3/GzfMb2qaRzg23aBZZXr58zcvPXvIf//1/4Otf/SO7tkO1Drvref7ZSz7/7DUvr294vtmR5pnv/vAHfvP1b/n6j98w+oGXr55ztd+y221ELyonwhy4e/+WlzfX7DdbNl3PcThRFAzeo4zmsx/+iP/x3/5b/pv/9r/jf/6f/oY/fvMtaZ7pG0fwgeAnmrbhOBWGYeR4PHE4HAjHh+qGWchxFoaMtpRiUMqyFofqfPVzBGpLJFIkVFrAb1PO688ZFLdo3QBFmHfLGlhbJlUV9tdas9lueP7sGd+/fcs8z1AU3kdSFIOivuuYy4lWW673Pa9/8JoffPGSr373O775/beQAuMgmleUB7rNjuvrazH5SIF5GvHe4/1MmCcoha5r6VqHouC9sMNF263HNZZ5nmu7c8TnQpTZhvcTKfZ02y0xyZhrUqC1Ao7smj25s9weT5yGAyElNrsd/WZLt+lR2nAKMx7RdSMX/OxJUyROM97P9b4YQpiZg1+BflNdQJtGTOhC8LImKUvIHmcCIQSaZqZte1zT07Rt3YebCoZUcIvq/DzPhOgZjiOlJPq+o2lfcHNzw36/XZmOTdOILMZmxs8RV00aZx8lnHZ2jQGWwsGyX4M+A3YVnNXrHiq6dNoocq45AVXJq1Bl82WdW4hzUiXRlKLJWZ3X/azEyVVLXLcYBFCdXa21snc8Yn6XIj9bbPW0rsDUEguscgICGgoTdOGOybnkEtf46jFI9xTb/aO4YZ1xrAVdjQItenHWGioOLnFlvR+i5y+GJKRCxFNUZtHKVyJQTGtbMGCVBQMGgzJgtcNYAdWda2hci45OACitUVpivGgiVC1ApRRafbjHbztbQbBcZUwKpWghTikx7FRQwXnWuFcpYYCaYqA0yEnJc8vU9+WMMuJBcHW1p21aFIbsM2GOKCJhviOGgXnyIg8RpfPRe8lV5lmKTHNKhBREx7wkYikVUFR13zQrmJdzplQzIDFU1JhYyFmKzyXDNAf8PDEKxQelpMNPaV3bxiGrSNc5+l0vUmBOnNRT7eaLWRzdKap2idWxVmN7ypkh+qnXnwXQLcHDUuVcA7Slcl8KpaQ1ybBWTrZtG/quYbPtuXq254vPX/OTH/6Q5y+uxSGRRKlgTtM0NM5Ji2YRfVdtO2nvTIk5R3wMjMEzzxNzEP20HCPeC4MqTZ7sA8TEZBXf58IxBvzpBMOEmifU5InDQBpnwjAR5sg4BWKCpC3GdWw3HdumxfWa1HXMaFQs2Fjoe0u7EUvlkgp+Dvg2kVNBmY5YDMd54jgcOQ63bLaOrum4vrqixMjx7j3eD+TihdmnC66pToFa+re1AbQiiiCGVDDPrsx8//6IV5FQEAviRrRANs4J1VJpTAlEP0EK0hYZM5MXSroPgtYPg7SextoWJW1CVRPPOqZSKNqIRtXk8SGSIyjdorUVLSylAKFb5wCxJKk864JVmkYZcqWEOkU1YMiolOVcnTAFtZHKEoBxGmfq4NWGlFhZZaTKNoyReZgEUPDpYnEW9ockplKhKSFiG3HP6fteKu9KBCxjqZXCnGp7i9yrkgtRS8W6FEW32fLqs9e0/W59Dn98847r7Q5lW8bTwN39HTkGrm56rq+vhBETodTEO1UqvrQTtOh6jeSyCk8vYCTIQi7VvaVuIWxTZy1aKaIPFxWti+R+0VarbGNZYCp8XjfZEALN0rp+MdE/AhrOu0tdnM//r+2tSpgEa2KalwX+49a99TwoH6wpuQblS+BvjFCeXSNOvrnUCntN/43OZyjuMnu9OGVdkx25RFXb/fT6hhgjlAW4qx+u1y4uQBcin3KDzgtiXesWMdSc1UfvK0p0eUTjoF5/TKKDk6I4DJUFnJG2pyUx1kpjrKPdbGg62YwjcBgG7g4P65He37/FNxaL4ap/SdEzX/3ua3b3bzge3jAcb7l7eBDHugcB54ZpJqMYTiPzLIGZD57oa4IU61aiJFAt80xrLbZkjC6kOBPCRJgHcokolZmmjCUK42LyDKcjSkecbQlz5DSceLg/knPh5vlLNrs9p+k9b9/f8/b9He/uDkwx40Phu7fveXd3h7MwD0dKTmhV2PaWn//sR/zwB5/x/R//yOF4JOY97+52fH93C6M4Qo1zZJ4TJQTCIFoqRilUeUEKoh3aes/ms89wGuYQCdETc8LV4s9Tr8fAxGMwZxnn63sWkI6PwZ/Lz63zbQGw14lBTRQfBYGc2Qp1ZMv8rBXFGD2QaVoJ4GMMqxPkIkPQdl0FhVnBN1MF+nV1yVrkDASUq8LGWqqJWlenNFhO8oPzlP1MrcWg5fUYMLp8LcDZU6ytp9hyl/dteV3ew8dsssv7/vh9T7HgngLRnmLeXbL5HgP9SwviPwdmPT5PVUGd5a2X1//4vjz+2fLnUwzhx+d3edxPMeieAkE/dfw/9R0rOFfBqhU0PV/kmrQKSJVr5TyvewKIkVSMsof1fc9msyVG0YRqnMMoTZg9w2mgYn1sNhvapsVaQwhSLAgh0HXdReIpxbmUUt13ZfyaWmVfzA2MsVxdX6OMpXENhcLd/QM+eHQSR/aFIVYRyno9SVhbFEhqmTYXN68WFezZZClfOJHLz9KaRGpjaPS5pXVlhxRJAoSpXPDzhK8SM7aXroR5nqSNqMg+qBVcX+3ZdB3jNHF4eOB0GjiNx9V9T1cnwWVc5yIyJxQwSnR+UkriIB0CjbV0fQeI7m1BDDvWucHZ6EoWsKVIb6V1rmhZxyvwIM8kM08Td7e3TOPMq5eveQgzfp4wJFqn6PttbUeuLVgpU1RtLa4uuwsw3HUd19c3ONcwzTPTOGCNpusacoqcDg/8w/s7vn/zHT/48RfkDDcvX/Di9WtsI+zB77/7ll+/e8f97R3TcGJOHpIAhsM4MI0nUog4rUne47Th97/7mvF44ng8cTqeSCrXeFTulW07tDF89eU/8Yevf09rDJMS4z2jwM8TXb+lsY45alLIBB+JQVrDVJUO0hQxOynCTl3AuKz1mng71+DaBmMd2jiJPYxBFXk+yihCKJQSaapjr9ZnprJz4qzqXNUtq46uD/f3/M3/8jc8nE6YYrjaPaPRLSofGMKRaRpoG8u792/45S8fuL39npubG0oMNFZip4f7w1q8GQfpzlkA3XmeySmSYkCpQtM09J04y05DNWVA49yOm5sr+n7DNI6UlJimgRCCGGcU8F50s5cJmYkkVQgktFO0zZbn1ztK3xFvb8V0aTiJcZk2GOuY/Sz6cAUaZXBo/OyZxlHiPQNK1TmSohTgTDUQcOf2S4nBIKpIoxwpZIbK5DHG4ZpW7rW1lKJWTcWl2EedLzEEhtOB2Q/oe03Knu2mZ7fr2fStEEDqnNv0fTWSErMsY5zowmnpPLLWrGDW5VofY9WdRxh5Si8FFlnQlr1vaSWVBWpZ70WqYylGLKxOlMQWMWcZx0/sP4/jpaoQIwBlzRVzKpVVKIVGVQuhi/agElQRVQEXVdcIVUSLcAnA1pj8omj1VCHrMmZY98yn9s4VP5GXXtbsel0LZcKAuLKWQiHVTp36fSqjs2KaBgF1L87xzAyD0yFVE44W27QoU+e4s2gj4J0yUoTSxtTut/OmNKmTgKhLfkrV0MPJPhRrkFobSaVFthbGtaKzHdYrQooUpUjE2moe8RRy8qSmkRyqFKzWYiRqpRDlzJYcp5WEInIumVC17o/DREjSfThWqZs5BHyKlBjJw7Te31JJSDlnYqkFObR0fEXQIUlFsCgx/omJEn1NB2xdF8ralo5KhE6k2vp2Q9N0qALBX5yvXoa6Wp/xOd46T4VPvf5sBt2HmaqE5grRX8pZkOrGWra7Hbvtlm3Xcd1INeP6eserz15xc3OF9xPffPPAHGfaTtB/5wxd364OQAZdmYETumnR1uK0QXcG3ThKzkyz5+3bt8zjxHQa8MOIjhlnLJ210Bh2nWWaHnj/9h337+4YThNhDPghknyAkGTBare01tI5y3bbsel70S8wllFbwjTRkOi9YRgMVgtsdvv2PdM0kwu4bkez21Nci24abGd5vX/J/mpL3/b4aeL9m7e0jWGz6QjziMoIy07VSoEuUuAj43MiTAFQ5BARPqy4RLx9OFWX1EQ2pTp+ZkxBxB5TRudIiTNGS6V1nGahgYfAVLU4xmESh60sLmICYAgw0rQ9unVEFCUrctaVNafJQYLQEELdzwS1lkBN46yq1eNEazSmEbaCIUsvfWcgFdpWRG5dIy2gqZwFOXOOpArepFpZnb2033kvCHXJizCnVGNVko3k1atXvH79mrvDPe9u33E8nfAxCLXY2lU7bF2Qa9JZyMwpkLTg7uM80/dbitJcXT/jJ3/xMw4X4Ih1DZTCcThyOg6gFPvra/qNZQ6B1tb2nFi1rSi0natBrszxFY67AGpKTc4l0P4QYBORVaH366Zhnuc1YAEugnVd9SCLCIcWqVAvtgjWuFVbcgHJVL5M1OpGq1i12lJl/Bi1MGrO7LulhVGqE2uBgIXtd/laNuNCrZ4tjj/aoI24SmlrKUoJABuPFK2xbUfTbcTV6BHIsQDcssGWGjRQN1+1Hnlx1pHNXYJhsXeXNSxVNq+uhh6ltuoumZ6mQAX5FuHr8zVWt6iCJDAxyLVWhpE4cUqVWVUXUqXUeq7WiiD3Apq4tqHpejE9QHCjEALDeFoX/tNwz3zwUtTYR/7+//tb3p++YbvfEA4PxGFimiaGcWScRybvJagqVZcwJnFgiqm60srYywpio5lzRKXIpKDVms9ePufFi2t2217EmVF0fU/fOUqOFAXdriHnlmkOaG24fX/LMAwUNLvdns8+/wF39w989fXvefPuPfeHI+PsCUlJgmIth+NR9ElLwBAgRfbbllcv9jS2sNu1PBxu2Wxbdtdb0YXQhcYZXAgoJ/VaqzNWF1qraK3iZrflxc2O0Qf2m46usYyzrvbxHq0i83TW+JNk/WOQ4ilgR9atM0Ppkr/5UeDGmb26BHYL3X4ZU/LVZU1Ql58XqvN4KdWUpUibXC5YqwnBA4Xd1Yama5mjR1lDSVH2V31m5XxktKDUqvu0gOXLcXOpsstKSJ5rcKoQ9gRnZ1ZhwutVSmC9J3UtWdi9i5vZEmgv69djMO2SHXx5zx7/brl/l4yi5d5eMngWE6RPAYZPAVuXrSuPf/5U0L4w6ICVGbTc98trvGx/vQz0cwVZHrPvlmNe6tg9df6X37V85nGS8ZjdfHlt63p+cZ6PwcrLP5fPrN+jJGv64PyLaNmUy0SgLEj0RcGmAndqMTSqz0yOpzFG9nCpoKfqkCcJp8fTd52A9eMkUgnWok1b731T2V+JcRzQlTGEsuRsJfhf1smL57eAilAdW5UiZKmc9puWLrer6PaizRxrG6Wu8yDGiNVKWP3r3BAAcmGqIVxgVM41xqpjz+nVbdu5hhQTZS0wqWqk1OK9uNajijjiliwsZGMoF/fRaEVIAWMgxwQ5Y42ibx3sNhitmbxnnmcREDeaEBPGWmYfCPkoheosrO8QAn6aKTnTuJZea1ydcynF+pxECzPnJG3ORgzoFn3rheWita2AXcBaRcozfTHCOImR4XjEaLtqScXkOZ4mUmtBR/quE0dvLU6yIee1GFfgg9Y1ay3jOPDmzRtiCPR9i1JXvLp5TqcMf3h4S7N3bLsr+m6DM6Jv/f6bO968fctpGrkfj0xhwmpFbwwmSYdRmEaG8cR+s2OaJ8IojqtjSPzm9jcC4BuN7VumORJKoQEeTif+3b/7P3FF8+Vv/on7w0C/v8JYzf3hgTx7Hh4SXkX6zZaC4v7hiB5EC1prI4Z1ubDb9vg58XCcUNriXAMaQshSsO4amq7F2AawULS0iypNJmF0xzyNhDCjstzbqbaUWmvZbDZ89vlrnGu4fzgwjlI4/+P3fySZhHENr5694vVnP0Rnyxv9PWH2+PFIDgVKZJ5Hvv76xFdfFaztcLaVVrMQCH4WoNwKIOf9jBS8xdRAqSwi7xQmpO11PB2ZpwnX9igyfdez6Xu0UnRdz2J0pJQmhoTqdAWFhU3fdA1pEr3xm5tr+usrstbExhG14vb9A+MwcjQPOGNpmpY4zNJeXECnQtGWME4cjw9QEm3bk4u0xFMlTWrQXwkZBZtl308pElIQx0ikdV4BPmROgwAAsZTagigMIaXFENE6SwNM00DOGVuLG9Mw8u033zKOJ66vr+m6ju12S0TYcGpJRXJGo+i7lmyUgERK9nbravEg11StSBtkqfQpbVQ1iZHjphRRxuKMtA7GIPkcmpUJl6LITRnXoI2AvEoZYhSN+lJK1TxDYvQkWoEgMVQu5VzMKhcEBSW5q7aWWKS7QqlznGJtzVNgSUpYQMVlP7ssGF6ayi373qLX+rh4u+yBucYZf6pA97gIdo4zJJ8+u+5+CPAkqNqk55/FD+KnjDWFaV7abcWQTRnJr7SxGOtE19k5rBVzJvm3aJpGW00uaj65rJmqxo7Wulo8qf/p5f5qSIYphrW1PqZEIq4FnxKFxRzjwtxspChWaixAxkeRMdOuqUV2eXYmRlzKdFdqfW6h5OpwHERbOgfyMEJMpBgIMazsu2mW9asotXYEZmUIsbJQlQZjydHXQnsmZRl7mVz3w8I4RprOEWdPmDwKSDGv+86Y5/WZGyV6mNTxViir4cWnXn8mQMda8VrGsaIKAipBArWGxlk2m47NtmfTNLTW0XXixjLPI2++/443bwuxBFIKi6Qaxiisk8qZ1gqVaouPdri2x3Ut2jmUs4SSGE4Dh+HIN998K4KfWrNtWm72V1zvr3h+c0236Yg68nq/5YvNntsXR06j5zQHxkkqfK1rMZXuaFOkNZqrztFaQ1aGmZY5K47zSDzckeYRkseWiM2Zh9s7NCI2bvoNdr/H7ra011e0m5ZN6wh+xBpDnD3D8cRc9S3EpWogZY+vTkCpitqK/pRjnosM2LgE0sLe+u3vviWQOU4j2UiwCxWgSwmdEzpn9tsOY1gZUwuqG1IiRJiTDAHtrPSoV20DZR1FWcY5VAaEqiizFjZbnlG5JkMFCQSLVCKMsTSNwVlNYxXOikhjSgmN2Nm3VlrymsZhrGgkyGrvakvDxOQ9KLMGUzFIa2sIER8CKaa1+hu8/F2jaZ3YpW/2O6KCKUZipe2HJLT3y2Rlqc1mRDA/JKHB6tVtCLqNMOJQcHvRXni12zEeDpzuJojQdx3Npse0YEjVdrmKny6hvjozuQpgSh3rNeFesHAFWFXP7yJZWp7lsllctreuDBNB4wTRqYmSvtgkngIW6pohfxbRR5CFslbkylJlOifui+7jCpLJ6GH5atc0HySB5eK9l9eUS6nitVJJtNahjeV4PBLSCW0sbb9l60QzMafFGe+iPazSvqWtSu63MYqq9LYmfmfA48MkVQC6ytKsAgvL+1W+qApWfnZZn+flnXt0Lxddhyw24kXJXJBWEIcSqiGliChqyEXWhhjRNtXE0a6ErlgKk5+Z/QTdsi5nioocT0ce7g788e33bH+7pet7bFTghZ0p6+ySJPrqMigbZJ1klCSGA5RCUhA6S3YKqzStszx/fs1f/auf89f/+q/42U9/hE8eaROIhDBxfbVj0zTMxyM+B2Yf8e9uOZ0GrHVcXV3Rb3bc3x/4xy+/4vfffMe7uwOn0RNiIRVFyIUoMhaknGmdwalC9jMlB7rO8uLVc5rOcZpH/Pt7bNOy3V/RDIEpDDglrGQMbLoNm37D9dUOpxVRw83VjufG8XD7jjl4+s2W1jUcfGCeZllbFgT0UXvrMt4XkP386M/z+4Mq8+MS2cWvdQXa119VQEMCRglkP2Yv1TYMtSh+ZFY35co4KjnTNJbtbotrGx4ODxU4qQCg1lhj1yBQa2FVqFqs+Ai0+9Srriur66XSFdBeAJqL6/rgYx+2zHwMTH5sivAYGNJaf1CMWBKIy4D5spX1U2vfn2KlPW7zfLxeXAKMl+va+UQ/BrQu3/cYhLx8n4wFSXpKWZpgPgQJF2bCeVyo9ZkuN/9PXd9T9+DxeSysjsevy3v8+JrWZ5irJMMFc1kvwJ06s8OWsaJVbYMtwgATYPx8PeeW3gsW43I8FMqqtdWk9D0UGKepgniRbdiw2W0qqJ1JlSWRamuWSIXUsa80Sce1DX0B6KhnkBeSuhEnWGMMytbPVb0bhaz1wkCoOks5oYzDOmHBpxgF9NZSdDKVvXr5DMQ0IV3cg49ZmXJeF3tZBQgvmbOX7z6PH70WRS+fn3OOzUZMrQqsLbwgrWLGmaoBiBgeKU3fdeSNtMvmlBjnkX2zF+04p+Xa1eIib9BIi6vUvs4FyCWJLkhS07UtxrbksrQ5RXzKOCfn3bUd280WP4+EGBgnmbuts+JY7agmbREdDdrZegyJm2IU07Tj4cB2u+Xzzz/nxz/6Aa9fvKRRGmUL3757wzDN/OGbtzw8HPDDVLV/C0VLnGtMg9HgkL12/v8Ze7MmSbIkO++7iy3uHktWVdcyXeiZBghQSIEI8QIRvvCFf5+EcBMCGM4i3TPd1UtVZsbmi5ndRfmges0tIrMGbSUlGYuHubnZXVSPHj2naCH57u6OseuZRegPKgA/X6a1eOe7nlrUyK3bjfjY8/6nD5wej/hUmc9KVEAUeBvGnjwtXKaZ3MPoOvr+QN+NVK86sikpQBi94FEwuAuaIEpz1O1HdmPUemNoRmRRC/KCSq44ZYw4j7X+ujWXcM5pi2XUvKEWsVhXi7oxePa3Kk9yd3vPF+++Ygg7bXvLmefnQEpHLpdKzouORefB3CVTMj3IWqxgBPOkEhXtUNartjGOw0Dur0XnuOkQaWuZapwPpksbCVHIaeEyTZymswKVIbDf3SDDiNwecCGwBG1FlRgoZuI1DAPn4wlfoYs9s2kHTpczl+6obcO5MM8T3piupWrLrogokymqS7NUR1oyUiZbD3XtvCwvSp5w1kJsJlcqlYICgs5AsrbnrnM7sN/vcTQJKdWgfXk5ruN+MdbcGu/gVbsNW4sjqtEdO2uDbEypaydKYyU1wKoVrnHK4lNNfIsz8cZm1UddihBpOrfK6mrxBKKsSNf2IA+h7wglrHJA2BxyzimQJMo4KxbnD8NA5wIFa/sWkJo1PyoFecP4E5xptuv1qhGPvNrH3xa3Pnds98ef+x28lhja5kQ6Zls+9anB3vo3+oef7MENoEtVuxSlYg63AfyCI1Cd4jkhKDgXveqzRaeyWd7Dfq9uws5bsdVh0gQdLjqGQc3ovPcKBNveqUzAQBcGA0EFkWKFkWuM0A+DjlczOAnmNlotHy5SzTRPSTTOGVfJK3RVctK8zzejjY4+DsSxQi28+/qXSFGTiWxmKalk8py0JTZnlpxJc2ZOmZwS85LwS6HESHICJVOK7VsxGlpQEckqWbWo1FRwCm4uARDNsdY0Yn1+rHEEzil55l8Isf8igK61HnrZbObrwNCNvwuB3Thws99xsxsZY6CmZG9RmZdJdfr2i4UAACAASURBVNokaS+1EyqturttOQGKBRou4GOnIqNBEc1MZZ4z0zxZ9dARvSN6FVpvbYNehH3xvLv9in/33V8TxpHFO17mCw+nE8fLmWlOUDPL5cJ0PJJOF+o8c1kW5iQcl5lUoOQFyTOSZnxZ6IswAN9+9Q3jMBKHnkVgDo4aPCXPXM6ZdPHUoi2YaZ5Zppnz8cjjxwdenl9gReSvmmENPKhEzhfVbqG+npQfPj6xuArRo10KpoFVBVcLvhSCCI+PL4YR1GuVwQIyERgP99qa561lqSiQVpIjieXqWLIqDidF2Rfe4wVitPYn5/FBxWSHXrXzui7Qd+o25Z3Tdt4qBB9UK89pJRWuAtCCAhTFnCUVOPPkWkjJkPFlZk6ZUpSa72LHENTpKKXM6XLm93/8A4+nF7ph5OV8ZE6JLuqGlEqlM0BLHVagGm1+TjNzThSB3nsKmSVNHDzc3h7ou0harsHBfDkznU/UAje7Azc3d8TOk+VCkUQHpp2wThugaVHJdbNri6oJhmKOMdXa0cpmbKjD3FVUdbtJtOBanGo81GzJL6/Hz9vE+5oQbxKxhhnWK6iliQEryIi83qycJVYBNeXwTsVGV+DQQEm1986U2lgj6twaYk/oegiBIigYlYS+GxlvPLHvrFWmgQ2N/QfONcChAWpcb3pbHLfVMvvedmy7/yZie6Vy4BpeZ6/XJ1evyY5wpeBbItn4Tt4F5dRJXs+FARkEWeeliOqGFQo5LVC1jWXoIofdSIx+XdOXRbXXGG2DKplaFnNWK5wuL8wPF/yxow93SPYsixZEQnSo8XFRo5yc1PU6BLrgicZm7mLEdwF3O7K7u+H+9o6v7+/5d7/+Nf/hP/xP/Lv/7t9wc7vnx/d/JuWF4+WImtAI5yOcXo4MwTPsD5Q0c3Pfk3Lh/eMLlz++5/3HR374w595eHrmNGXmVMgCuTqKaJIWo+o5OKfB5TjuGIZBA/Xdnruu53sCx/RbxP+kLUE+aKvWzYFx3HE5z+zGA4fdgS50LNOFNCfGLrI/3JCWmfmSqPMFvLDvAn0YOJ+OtJ21qUS8PRw6dj6Fz65zah0vWwBkM1dpgdjaxgbF1c2cUk1PWYMyZ1eka2XwXh3NzRFaQc1C9I6+j9y/u7fxdw0GgwEqfgParyZLxpxb9eC86hKta0ULyO1SVu25DTjUkoPr9X7m3m2SpvVebQLPt2APfKqd1o4tkLjV4vocGPY5oGl9Fm+Ot3p424D8c9p325+tn+Nnzr09z9vPv70H+u/nGHBtTKqzXgPlFGzdngOuI+7zn/ftfXmbdLwF7z6/b/Dq/myfmwsGHDljxIEFt+sdQrD2Q932lE3vsUi8ATft9a8uTvegUimSlW3YeTrv8GOv+m0x8Pz8zOVy1hgpenb7UbdYrq3iVSquQvDK9gi9o/hgDvZa9ffO4UxXzllhSUwYvpWvvTMNL++N1dPK2qx7jBib3odgpl71kzHQnnAphWVZ7OceR1R2GcV0UjfANpuWL1NJ37bIljdA9crkrPXKnjGQsO97qgjTnCilkk91BdG8D3TdwO3tLYfDQWMMW08Q4XI+8/79ey6XC2kcNEnue322YgxDS9iuY8WAOacmGphDoOrkOoZdpErP6fTCNE+Uoolb9IHD4cDt3R0vz3A+J/Jl0hRqDBzGgX7o1b08JXIpdDFYgUPbM0VENU1zZrfTYmwMgcsys/vyHfv7AzfPj1wensjHIzUv1M4xA8u8MBCIWYhVcyHXBWqnQOBQI7/48iuiD7zgCaJFjGUpDOOO3W5P1w9MaYGUePeLrwhDr/fvdCHkQh9VIL6kGYmq3d3tBqbkKK6ypMKShIqnH3b0/cBCIQSPN11uijb6VdEEVfDEsTfmqO4xytSyZDzr2G5rgnMK1jrnmOZZNeVsHFWT+gEzvsuFYOY1edFYqZiuY+ejEiN8MLdElTgaBnUw9a4HicyLmCuvUGrGVaeAj2lqV3c1PMsp0YVA3wUEzS36YYcPAXGYtlvAuaAxkJktOD8jkpEGDOJWEEL1RhSYigYSnaaFy5LphoHbe48shYefPjAdLyQ/656ImrgtbgJv3U8idH2PXzxVqrm3Ct7BPCe6rqdWZXu1eqn3KjvRH/Y03SyN94VcEyULuWj+oq2LXFlmmGaba4UmBTZyzuR8XotYKWVi6GwxbXu9NxQmIE4oXohDt8ZfwVqgdfneuIOve2RewVsRA/VMr7OdP1jOj2udLt6eTUcIHbgO57V1cKtxjWjBA68dY26zf+KscOeuruC1VpPMaiZ8BlyGK3tSu11WfM7KP7b+WOx+xVN+Pj75uaJXc7nf/mx7vO0YePt1lfLJ+29fUwzs/dz59RkYuaYK4irNhVsJAa3gkshtv3JcWftOOB1VusC1bojWDRQiLsBhf2smn17N4EJQuQMD6IZuT5P5EGzPNP1u1HBVXZZdIMRC33Xadm6xnA/OCA0eT7CihtdOJm9ECyPyCJqjeX8dU4t1JGm+OBA7R0eBnbVCi5CKUFNmSomyFJasBkyOynR6oCzatVeKGBidSGkm51k1L8edmd7puPGtyCTO7vvnY2BnSMBnQuT1+MsAOmlMj5a124Co1dxXI8PQs98N7HcDXRcsCV8opVN3vOrAmT4C4GJESqLlwkirCqrgt4ICUFJimWdrzXMQPEUcUVTHqx1pnnlKicvxxI99zxB6DuONCmkOPf1uYNwP9LuOb3Yjv7y5YdjvCDGSa+Xx+Ykf3z/w4f0HPn58IB3PTJcz8zSTl5mAMMaO/Thy23XsYuR23LPfjcQ+cl4SL3lmonKcZqZ51krfvLCcJ/0MKZHmhemsVd3Y2lSKBuJVNPWqtVIkMScVzKSaa4sdL+ezMlykrAkdUgi1EsQRqgp0xq7DVQUtCIpCxxC1/QGtuqSiDlc5q+lFqso5Csi19cCCaeectvd6pd7WmojBMYSOfoyMQ8/Y93R9T4yO2/s9rcUyZ0Wna9E+chFlykhldUIrtbDMM8fLxPkyM6VEtUFezO1nKcUCZrhMakLRdyOdVZ6XWlheXni5nBHn1Z47BLwfddMCdYvJhWKrc3VCMur8tExU8dAJsXrIF+asgqslzau4JMDDh/d4EcbxZm3PFlcpmP5LUMDAps/qiNoW5dhdGWZb5kcDweZFKz1FGuRznXvee9JGpPft5N8Cvg0IXOezAZRd160AggIGm01iza9fJ3CvEsgNQKc/ZJ3Da9//piIk9rnWSpZYpdUFQt/peLUqXCoZEY+PgWE/cnNzYLcbVC+xCpgWioKIHqhrEue8KK26MRd1110Zb9pqqgtPW9RBRfLFgEXfnFvtcwa799p25qi0Vi0F6OQNFuHt8/qqOhLr/bP74hozywDCWvQitVoMnbXC39yOxK7t0JWcZhs/cR3Ly6xFAM+Ic560ZFIWUj0TuxuG3Q197xn6wG7s2A0dQxDuD3t2Q2TfRw7jwM04cLvb8+7mlvGw48vvv2V/d2Df73i3P/BX33zDF998AzXz+OE9JSVejk+8nF/w3rMsE1IKu27g3bffsutHjs/P5FJ4eP7Ab377Oz58fOTh4ZEiUF1EvLbHl2LPwml7fIwBhyOnM7kW7m/37PcHbcstELodLs48PE/8+f0zp7M6PA2hZ78bGfuR5ZyYpoU0F6o8QRM1xnO4TFrprZXT8YV5Ohm7TN2s4f76vHh9vAVR3h4rcPF2fmyObVC1BTdeAUttTDf0xYLodTWoUChIzVa5bMA/DL0mxudpurZzWiDlmuGMD2v12dnvWvAaQliTlnV9UZHET4Caz92bK6j3+ddt21K3rLB2Lz63rn3uHG2ta/dyC3Jsq9GfsLverGmfA6LeAlDbZ9nOf01GXrfBtjVu+6zfnq8dn/09Wrlv7Thr8cV+/1m9G16Dfldw69PAfnts781bI51XLdtv2Htvwby3bDpnIO4aPDeAV17fz+uVKmiG0wC8gcufAJdisN72c5nzWvGquRNDxDtdQ0opcFaH1vPphFAZx+EVu7JW0fbUKoSgbAAfnTrCO3OIs8/YWIBO6ir/kEsyzLSqhpd3qiXcrrOxPKRSclJzrhAIOEoVnDdtvVJUmsISIWnJVPIEHz9hXGzxV+eu80kLWVe2u9vOW0NCnfMKaoggIZKL6knpS1Q4OHYqh3K6XPQ9vF/d6Lq+Z7fbaXxl7++dw3vh5aXjcjmxpJmuj8S4zQuS3i/aWuHxLhooESxtsUJyUDmI6D1V1FW95MSyFBzaBuddoIuDuk/WTKmJUiun85mSPV1UlkYrhgURY/Rc292LFTO7riOnxA+//z2Xy4Uvv3ynzHIcw9Dz1Vdf8u1+R3aOp+MLaVnocPhclaVXM5VKlkoy7WcnsN/toQrzZUFq5XB7yzCM3N7fMwyjGag47r/8klLBVc/l+ZnL05MCXDmRqFADsarTfQfU6DinieeXM8f+wBd4XIgsaUZIeNNFLKKMRWeFp9h1DMNIzknHVNCxKa0N0Dm8R6+rVjSN9ub4qaYcV4bx1aislLLul7VWcsrE2Ovcrgp+BBcJXmPPpi/ZD5H9/kDf7YhhT6mBy+XMw8cfkbN2jahrq5Ba2yBRW+moxNgzjCPDOGrxP6hEiIhosQ/rLHGtjdq6c0rFh0g/7ukHbX1NtSA5I14Lg/ubG6Y58XJ85HyeGLuBIY5M9czYj+RppuSs3R9e2V1JKq56qnUONaB9BenblhoCMQyMw55x3BNwawsgoG3viOlUK8iesjLlcxVCjGZa6Qx8Ue1F3fvUlMNbAUCkrGteydWYruvqvYJzLQcXJyxop5vI2FIP2xfaOqFjoDGtRdR8olob6zgOn8QAIkrOqUULNZWmG1eRoCBcDFc5Cto4Q0wn3VaIDcO9okWT7XvpZ1UdOu1SckSTmAheZ8M15dH4W4EpbyCRIOUaAX4uD3pbWNu+7lXst8Zw9u1mjvzs+Zyj5ETbQ67vcT3/6zilFera/HUgiVy96u+FSAg9zgdyLjjr0PDOCixW/HUraUFbRsUpSae6iuDNJM9rrDypgRvOK24QwsqkCz7Sxf21IyNcWds+BFyE6XRRlpzz9P3IMIx0UXVdc63sdiMiKr2lAx3TB9S76L1f5V6026+uYKtzmue3Uru3fVXcVe+v1mr6e5FuHGFtsXW2TX5NSYvONyvWlTST00ItMykthOjou0GZfFXwou64XeyYzDhzHQKWe67/2lj+ueMvAujcitGYxtI6Eit917MfRvb7gf1+sA1fab2uZFKZybnHdbJqPYkTJGcV0m1BGEqTX83qWgBXtAUgep003ikN1pkAq+7jQpbCJBXvz8o4iXuyy5Ts8XkhSOG2c3xxu+Ord7fc3Yzc3N3ixh437tntdnz/y19z/+2vuH8+8vzwxOM//RPnp0eOxyNOUCvs2DMEZYFNqZAuC8vziZfTiZf5zDkvPFyOPB/PXI4n1XdK2YI/pS03B8/b/Y4qzU3OUWzS5SykWvCho+RqwMD1eUynF4o3kDNcWWzRDCI6p9VBF3plKeoUpxVHmn7Q6TKpQYI4nItUg+ZwQYNbl4FswIYQPeoWGz3Bw7vbd3QxsOsHxt2gjr1BdT/EV1wfmNOkgOTlohv7rCCdE2+buS6iKSVS0Q1+WmaWOTGZ+Gmtrf9b1NShqEOeD55SYU4LSzLGmVStMPvAsmRccMZAslYTUbCviqc6DSaqa+w6FcsvVStBY+gRAtN84ocffgfR8+P7318fRE4M48hh3Gu7bh/ohp4SPGWZkMkAaNcqxs3ZUydU2QQ1yopry44GILWkV0nQlZnxuiLeft4Uc9qUb848Danfbhgi5nq0XeBNH6hdhY+B0gTsUIegVzpM9tpgHAEX7HeY7bmva5JWqzahKvDgNeiOmjjErrfqrG2oJemz63aM3cDt7Q2Hw4Gu7ynzjGrr0FI4u16r8DuzjhAxwKKBLHpvmuyHSDUGRSs8yJoUOnFYjy+tig9ifU1it8MYAG0pfLVg1oZpg2yaB8wghXJ95oggWatkqsdX1mrwfhe5OfRakUFBvZKzPic7lqWQF4Ha4d1ICDvirsPt99TdV3z5V3/Dr//mV3z77Vfc3e24P4zc3Y7cDIH7w8jgK50TeicM3rHvO26GkX7suf/yHTioKTO4QDfu8DVzfPjI448/Mk9n5vMFila5j8vC7eGGr77+htiP/OHPP/FPv/knTucL799/4MeffmKeM8fzhdgNDLsdedMWE9ZKq5rWOAe1OJx4qgSWJCxJkBq4XCp/+/f/xP/+f/4X/vN//UdSylALkUAsnnJZSHPi4fjM6Xi2tphrFbeLHdM8kdNigV8m5ZlSbc79j/+Ljqpa1sD01SN+A+p87vdUVn3BdnwOTHkL/rSfOScGGIe1fcViEQAtMFCRoiwFH7xWskuhlMTlfOblfFpbghq44LTXGecae86tjLkG1OFMn7OB2f76r+rd+CuID+u1tfOKJUPrvXhzbz4H8m2BqvV63wCCIq/ND2rVRGP7+rfv+fbrFphtz/lzr9k+m+31bQHA7Xu+Aq/kui68DeDb5/jc0fa8LctxC+S2f1uw+/a8V8DwUzD17T3+3Hu/vRefAylboal9jlUyQq57U9un2lgIVln2m3u7HhVwWlxBMDbYWh+5Dvp2XXK9PkP8zOSpqJaOMd26GLi/vSF2gePpyGU6k2qicss4jnoPY4evyl6eU9Yi5qAxUPCR4F+DoYKshk6uvbfNQSlVwaSStTgl+lrV0FXXeYIWXSmqCVuqFkOztaJWa2n2vv0fVu3CGAzEct4S3O3z2iRANodDjMS+V71ctDh1TRobaw3wkdBxZfVagig4ur6jOVY7p0lILgoYzCkzOBCn4FmIgXE38uWX76hSOJ3PLIsaxvV9VO1Apw/wOgc0ZlC9y2aNpUU+J0JKE2VRTa6aq7YF5cwiM9N5UnFu56wd7g6xzodlWpiWpO1QrrEf9XnonqJggpplCDVnTs8vfOgCOS0cn5757f/3dxwON+xub8F7ivfUJZOWjJsKv7h7Rz929ONA33cMUSUp6pI4ny+8f3zEx8B33/2SeZ75ww9/YlkWBltXK47dzS3fffcdu8MN4PjTn3/EF4evDqSqLEsX6cYeP/TIImBsmIqwZEHOCz/JI+I0LzqWE/3g6MXcNLHnJ1owDyHocy3Q9R3iHEtKZDNccGir2bbdutaqhn3LvK57TUOwrTPBgPEQVR8wpcR+p4ZxzkJMZwCIwytTpQH3Fs11/cj97p56n0nzWbWkquWL0ph9jR1U6Ds1bxnHcW27Zl0zrV1XrvPDeXUmreJM9F4ZdKpDW3ABunGgH0Zub+5JJXN8uTBPWXNQH7XF+DLjnDoZ16It8C5a14irqERJsCJ1ZkkF52wsigLzIUTtFpkWbagqBkIJiBeWsihg5XUehxCJvcN3QjRmlO9U+B8fQAr4gLhKFUdOFed0f4xR29KHQckEiHu1rGpMbDJHTVpA8Spzk7Y1vr4GrLb7Xs6qJ7gsC0WUxamulxXnrH2xViqFasYoYABmSgRZCBJwLYaGdZxpTqT5cLRiRduDmoyL86xrH2B5t44z/b2tjaLvKc5yiFfbYfvgrQX1U7f3z8UL23uyzpntGT8TD30OmNvuuX3f8/bYxgFXYPTT+MR77YILBrj60Glrt9cxHMqG9W85hXmyUiXr2u8j1Uw2vJO1I8wZYJemjJrN6DmcU6xG96mID7OuB2Ern2KxSYBaxPLWQN8P7Pd7um7UGMEJJV1s7+tWRvqqUxqiaqfSUi1nIaqzNU/lD7zFIdWMIWrJKzAsteKjU1ypdbtVFOSm4EIgxoE4qvZ+mhdK6qGq9qXUbB2mHkegirlYgzljbx8cLRk13XtnsmI/f/zFGnTt8P5KC8U7xrHn5mbPbtcTOxXaT0vicj4i84KQCNGzj0JFKce11DW1rua2gg0DnVuGdqJMpCby61xHq6A6Uacc7zwSBBFvtrbgoz38NOOdLmi9eqJwms4sP5758F6X+UrFjSP3X3/N17/6nsPdPTUUvMvqojMEbvt3+KHDuahaBaeJl/NMmmcupwunp2dezmpEMOfEadE22eiiBeq6Ubism0rXa8ByuSxXAMoGS6EBUhUfTUqsqktWO5xXPYksBS+e4ALROwav9vOdaJKVxFOzVvOuE9valYOjFEW+Y+hN7DHgW0XFuzWZ64JXRs8YGIegulDBcdgNxODpY0cfOwYfLVjVQHdOwvPxiYfHZy7HE2meyako+OEC02XC+0iRBtCJVks2wEgFavWkqotAsc22Iiag2bRsKlIbC61A1mBRgi4uyTaAKloVdN4hRbXWamtxRLViFhPPL36mj4X6/MDf/d1/5bf//FueXz7Af9Rpsx96DuPI3WHPze07xptbXAcv8xMvedbrKWJ40YYxYJiXVprqlelxJefZwr0FzF4Dc1Vet0d9LvFaF2o+Dya8EvSG13pZbzYMBYHjq6pVq1J4ebPREEyPULUNimnoKPtRwSgFY3STDX3PKkLa2l6817aH3YHdYU/ovLLWvAV4wZMMkNXPZv9bEErVgMDL6w3tNXPDvU4i7SgNXSsG9hmqJ1ZVagljlbIBCK/3rdm3r3hnewbmGqWOyXlN6lw1IBMQUSflEBy7fc/NYVCgsVacV+BFNqBtzhD9jn7fMcZ7dvt3fPHdV9z98ju+/df/nm/+5t/yq1/9ki/f3TL0jsPouN1HDp0n1JkgM71kOgqdCNEpe9YBu17bcWoRoquQZ9VuOB9xNZOmCYoyRpaq2o53d/eE2PO7H/7IP//mn/nhhz+wLInTeeI8F2p1+NjjgmpcLilTqwp1hxjNuTGbALrQdx4kMF0Wnp/PnM+Z+VL4/Z/f87/9p/+X//R//Gf+4R9/h3Oe293A13e33HWj0u19x/n8yJ9+UjOfFXCyxwtNpLVoa31NiGiC346U0icA3ecCrc8dztjHP8da2rKtPhdgtcmvhijXFrM2pq6v0MohVdfAVCqXy4kPHz9wNMHuBs557+lCoGkpqrNhoOn/bOe76n5osCu+gXPaIvJqnbbrkXbtrCHu29mxHp+yrbbv+xp0egsSbTU3V32tN8+k/d3be7w9tiBd+5vt1/8tJt8WmNuCZu3raoDG9pm+fc6N0dCOa7EEBazca2fbt/dre/xL43H73p/bI97+bnv+LZj21sji7X17dX9FVMNtcz7nG/i2uX5hZaiJvvhaaNp8pO263YA5XQtt/ONsXW3MdV23h6HXzouayVnd387nM6BdGDEE01EL5KpC1ikpiN/32iqbaySXTLa20XY96gSpc2FlsbbPI6YDV9W1MZekBi4l6Fw14fcqQuiMLbIWX6Thjpp0bFvP3XYc/Mz9N8C86U5qQm5s+s1YtXfi6tx5bafD5ro3rWGxtUJEYybnnIGhUa+jZqoUYnDc3d+QysLL8cXutTJCD4c94zDAqIlnLRoby7pP83rNlLoWplJWYCj6iHeqQ1yrtgrH2Ckw4h3iO8SD8x3vDu8AlXkQ55S5aGDcel+tlbHkwvv3PzFPZ+5u1ewunSdOxzNzqYS+53S+cDmdkVwJ3nM+nlaThXE3cHs4cDOO9D7QxV6TNKeC4TF23NzekHIm2PXWAofbO379r/8tX3zxpRoqvVy43d/icyVI5jIdyVjBRrS4p/cDTnmhOPAFHl+OdNWx1InFZ/aHHZ0IfdeRs2eWzJKEVB01BPo8cnt3C9GrrrMZsHmvGDKwtryCFsBLVqfYbWGizQXnnAFY3uZiIRU1djBM1nzwFCBJqTBdJlwojEHjnHlOpOWZy7kgtRB9YDeOSK0cz2dKzcYEV/Cy63oON3tub2/o+27VRUa4mrw4p3O3aGwloGBF1+NTRp1VhVQqowsMQ89hr1rWeam8//EjD09PxBjZ7XeULFyWk3UElTUO1jhedWFbRGoZDKVUShZiBKLOoVwKIXQsc+KlnEEupr1prprR46LXuWEAorLbVGIIj86FEMHH6/pqY04c65qiChYdXezpYqdgQq3ahdL2JNOcdl6UPYZKAgTfWPWtVfHNerMWS/RTp5TUQKTvyFLxNlaalpuOE0ezrHNOWYMpZyqJihq9CJ5+6F+tX8WcTN/u1eueul6OqH57ZS2mCKwxkD4vlVwSX6hu+6na+cx9+jMiYW/3/k+W3zdxyCvwbfO6uNHeff3e+kGiGQ1+Ev+0v9vcg/Yorj9rxVQjHeCp4kxSZaPDbtrbToxM4B1eguWqDohKEMBd1wN7ZhV1drWeIpVOxOIEn1lS3hRabUPD9h6n7NzqlPwxdz1pmui63lyEYT56Yufpu5EQA9FFGw+9dqxVsVHkjexl3Aq7D94HbYx1mltVwwf0Wag0mxMFkLG9W68/USWz1Ape23VLztYmnnBS8RT6LprWuQeJlOyBop2Bpa5AYW3jUmSVggBeGTx+7viLADqRYvpM3jZ8u+HAbhzY7QZi1MtYllldA89H0mUmBMfhcNDJIAYytMTeaUtgG4CqE6DiuR7QU6qjUwzx+qEN5OisAgiVpEiNavJQCVK5idpS29g7k/NMrSIvwtAFdVo8XUj5SDr+xL6LnF+eeHp84TRVqu+Ihz2hC0xl5uPTE49PL+Sl8vTwwOnlzOXlSM3Fkj2n1F4/krKy4SyjwmUbuCVrW54SaEi1XHXGMHTZO9WJqhW4VoQBBnN4FO91MnmvOnzOEQFfVY+CosBloFVDhILSfYMLhGhGBUEFXZ0LSNBNNsagwGbnGceB/b7jsI8MvWeI4F1ljBrsKSW2UFJmWRLLtHBOCx/nIx9fnnl5PlGzVp+U9OGhFiZDv3OpK0DngrVdUuh9RFBXUBFH4246Gx/TZdbJ469tWS46qKYS5lqLSCFn1moL6yZjmmhoYOfWhdsCi+BJSZjTA7P1oJ9OT/Af/y0Ah35HHyJD33E47OiGnpflheNJLeT71tsqlmQXQ/KLboDr9djPlV7blm952MMHugAAIABJREFU9cx1/W6mFrpId0ErlThzE3KtrVadlKRV1FvS8DYZlmvbUiPKOJubeIxpUNdK0jWZY02U2vO4rhVyBQtEq4j6e61+KUPArYmG4hDKmMqlWiVM6LvIOI70g7ZHTNNkoqbaCjLP2hbdwFmdYXrfWuIWqzcmrl3XG5Bu3cplu60ZYFuFoPEKrYL2CswTqG0CO1Zgrt2PCteqndvoOgC1ZtMXFOIKxLxOmmMM3BxGbg6DmjuQtY5loqntCL4nuo7d7sDd4Vu++PI7vvnVX/H1X/+K//4//M989Ve/4t39LUPniD7Tx0IvCblcOL58pGMhDoHdfuBm3GlgnhZyXqiXk46LUpmnhTSpS9/pfOZyOuOdcNjvmEpBkq4TKRX+8R9/w29/8xt++vEjT08vypAtlctlJmWtNoUsOB90fQpehYARo5AnnR8Vqg94cYhEliQ8PJ3459//if/nv/wd/9f//bf89rd/5s8/PkAVLvsdo3h+cXvLfjzQ90IVdR4sVQMIHQMNMEAr1lkr9DEa+LRhvZX6GiTfHm8TlO2/Ols9WyOV7bEFdD53KABxbS9qY1/HmV/HaQN22x7TrmlJmZeXF6aUdD/tOsSMdUKIWmyvsgJuLejamkQ0wXatol/3GtX2UQYKtV4BFyxBEDbaK58Hkj4HWL26d29Aqbe/3wLuW0fat0DcNlDegkqlFAUlYnhVPH/7ui1g9fbczSBiZUHXpunSkkR5VUj53D14+7P2t7VeU41tUWS7fr8F+94CbD/3Pj/32d6+dvv9WxC5Xdfb+9u+buBu4dp2UkVMb/Xa3rRidG0+YEkoLV6+ttFtQSkdV6ZFY61vUislQ/KOKmo4pm3rqpe73+8Q1Ggn58zpdKLWyjgMqrvpI94LUgvLos6CY5NeqJYApCtzQZ+TzeGq9ysEZ2Lnnv1uz2G/R4DDOHL/7p6bmxtqyizThZy3yZvGY6q3o0lEtXigmZE0Y5d1P7P/3Hp/rkZSYveFBpzYGGrsESwx0LBZNAFzbmUw6xqoiYeI6oy5AN5pi6vuUeaO65V2UKWwpEwujuAcu92Ou7s7Tqczp9PJADHH4Ub1fB2OJIWmk6daW7pfO1FXS1cyvhlIGBvEdx0hJJIlTMFHYjcw50KRRAyRXX9L9JEvb9/x/HTkcpmuALp9cuc0NvbAbhg47PeqEf3wgJTC8M3X/OK7b3l8eCLnQp0zZVJmtgBJKpenJ/yLW5kZzml8HruosX1K7G8OzLMWY6cl0Q0DY4jsxj3TtDDNM5fLRAwvPD8+0cWO7//qe+Z3d5yPN5wuLxynC7NUZhFq0LY/cU2HWVv3ZJ4hRKIfkKJSGFkK9E5fT9XcresYdjt1cO17Usmas51O5CQMw04dcoFaippDiF/bWKvtoVsZlTaWWi5XcmKeJrKklfXkg6cfR4Y5oy6dibws9CMKhncDU62czxce5jM5LYxdoR+iuo3WisxqouCjjombmxvu7+857A9WEFDQxXGNv+a08PzywjQrYeNymUnFQCOvG2joIrvDnpu7G7o+4oJjWRIf/vSB08sRV4RxN9LHnmmZqVkLsDkX2z+dGjY1cM5rR0tjK0YX8JjcTtcjAr4WZfESCMbg0fZ4m+sRlrpQKSw5Qy74VIzNqnv1/jDQYmtdY2091a3Z4n/NjfrY0cWo7fuAF49fcwrNg4yUj/NQxa3rTFtzN7vINT5oK4WxfWOMVMkKmFoc0VrtWzxUpBEwdJ3R02sO6r0yl3G6ljYwyDu35iFSVENPWsHEGRgTIuJBiu4x3vtrfgXEEBn7njKOmkex4J1KhWjNMoCEzTr7Ol7Z7oXbPfRtHLP+TbjukdeC1KfFwuvRCrDQuKLtOq578PXV1z1X//deY2ktrBgFQfp1bdUOMtb13qOdey1PxUFwDu2kU4kHZ6AzXo1n9LqusYi3zirBgGPR8zVpqtriSeoGoNMxVKvuYNV5NYWZFnw0xlxwqznN0KmeoTeAro9a8MDp3zoCxekYEgxrNvAwrEx0vf8+YOy+xj4slJTA6ft6a+OtoiSfut5b1WaNQXEjzMFdiSCBQsN31Pxkm9YpbNcef0MyxOQTfv74ywC6CiqIXZTV5h3BV7W277yaBXgh5UIuidPlzHyeNQioSnsOPtqoswnpFQipjflkDWZK4dLJEVxQneCakdQQyIp3kS44c/KshvheEeBQPbFCdBqEVOchBvCB6j3JaMRpzkQcvnqOx5nz8xG/zCyXI5dzYvfuaxTTnznPleOS+Pj4wnmacT4yJ2EulYxTsKxWo8tDjKJtSRY0Ree1qmdJVqlaPRHJOKzfvQWdphNXJaHMwsB2cWzJUAydaRkIUiDXSi4VSqaiDDoVXXfEaIsEns6j5g2+BYCFzidi0KRjHDqGfqDfDWr8MPSMQ6TrmzaXAofPL5M6Qi6qgbXMuilfzhOXtHCpiePlTE6qC7MbOrw3SngWogvKnpSs9s9aeFvBSke2xawNG1nvn3fQHfYWKKioY2kLsgHB3qnGS8DatRpgZ0w7DbJ1IgWBzgc6Hwlu1skpTsEUd0Gy0hl7o4sD7IeRKkKeZy7HF55fnnk8PnCaT8TgcBLUEatqi27bNKolc7XpKVTbBOtmAjtAOgN9/Pr0denU4EQTAU2uW9tQ+5w+uJUpqNuxVk10cbTvq1a9dKMVXNCFKYhX44wlU0Tbbxyslas1GROlwle/qVyJVqeRSnEFZ2B+KXUNqJ14JGKgv4lmtw0EoQ9RgQIESmY+n1DJTl1Y1d3XKmlsNzuBJrJvAJ2XxjQStBpvr3fGcGj4OY0W3xKFigQFez9hr+gkZHWrWvvyX2M5sgleqzGDde9qQDGIj/pE2xinw/kBFwbGw45u3yNRW8a9aBO6bGjRFU8NkeHmK77+/tf88vtf892/+p4vv/uG5Tzx/NOPxDQTbkbGnVfQOJ9Zzk+MLrPrPIcQ2XXaxlRLJp8vHM8vuF5L6SVlTi9H5vMEooBO3KtzXnGOx+ORTuBwuOH9h/f87d//Az/9+SfSUpiWzDzPugnjtJ05dBZwRQOD3QoyN+acNwp4zZniKnE44PuB47Rw/Off81/+9u/4+3/4Bx4ePujmKsKyeE7TxPE8EfvBAHAFfBU/VcdTGsBRRTfvGLWyaIlx2W6a69B6E1A1UIMWj3wKJunzrmxmx+aUbS6569K+eYtWjdZnXLUsCFpIQBPYT9xLvDEYDMQrFlx1Fhjnpr2BMzeuTWvrmqxYIGrPxTln7bVNYsLjLJlwUkzgtxUC/HqfnHN2yW4N4l/fvlZNtvvQknRryZJ6rba3/a7dIo3D/Lp+NqAATDQZNq+/Po/m/ObFUUvGx4irm2sTWZ+ltLOsC6/euXUN8O0Z+03w3P62ASFNq0cBC8frFOeTe0KDRDB2+GtAbvtK77UIo2uQXucniQCvq/ufAy7fAnvbr9v31yTNvX694yrK3fYEZ+MLBdqCQ/WHRaCyCjOXXPD27K4JokOVxTROWTMVNjFAu0vOxpVNAycgTnWZJGV8qXR9pI9hZRz3fa/74RQ4nk7kJTGLwxWHGy1OjR05OXVaNFZwjPo+sh2vKHOrd2gxQcq6/4YQ6WLP3d0tCyp0fjgc+MWXX3AYd1xOJ3LKlJztWbf2TkcMKmivTvJaXIshMvS9JRTO9u2WgMvrhK62LlW9KUUFjg3w1FhI53a4rkPblrUm3o6spk9tz/R1cx6nLJ8YlUHgxOHR5Ng5ZZcdbvZ8K9/w408/8fT0xPl8JkZN5kruV6aVtggWLa7LOsM1LkiVPpr7IAEXDHTw2lr0cjxxuL3HB786DI7jjt1hR0D1WU+nI6fjCxXVzVPNvFbIWEjLTAieu7s7vPc8Pz3y9PKiYFvfk+cZyVpEZF7owbplMuNuRz90eOdY0qIEhTzjsrprdrGn73uen595fHwkl0roetWfu7vnfDrzxx/+wPPDM0PfM19m9rsd4zDgqbz78ktu6x1LSUxVTc7ynJiez5wvM4tzzFUZ4FIzsRs0Jnh+4eVlonjB+apZaddxGPbsbu7YHW4IXcfL+ch5OnN+OTFdZi2y9APeCUXUhAoGnHXwKG6r+4FIpbmKiohJHmieWIo6sV6WmdNx4ng6c76cGYYzcz5zWY6cL0dlxYRA5we87wmhEHo1uSsCp+lC19/Q9T39MJBrMYBO96JhUN2qEDrTAFRNO2mMGgfLknh4eECA4CIpZy6XowGOma7v2e8VTL473DDnmfPLkXla+Pjwnt3uwP52ryBASqYDnJV1WCtd0OuhqlSG0gMCXey4ufkW6ChlIZWkcyNqvJ2Lal46X8FVfIx41yHOk50Wl/HXvc97RxcDsesI8WrWIKKmX2IJlHCVEOh3IwEz9+s7NWxw15b5t+u+haWsTDrTq3UtjqpFsxHD8VtBBVrIUJTtFCK73WHDaNQiRrX4XLUisyI+tre0Tq0QPbFTkE47axT0qE7loRCnrLiyKUjRXh9NpgbrOgkWGil4F7uoAHkeEBxZPF4CSbLFIZ7GmGsyFW2fbJI5ury2CGGzt665xXX/DLZHfW7nb+QRXGvbvMpStL/ITQZn839jlm8Zec4rENV1PV3fqdttCDgbBzkX5nkmLSrZ4u255WqxVzFoUH+hjwWH81EZYLbJiDoQ6n6PbDSMwbeOQRqAr3u+NHc+1xIk7bRs5CoH1lZamKXiFvAhKYO0VkJ0zDHaPDCn2ag6llg3lXdqyuNjUK09r+akuebVndYZqcwHT+iuTLxSCilb55JP+jnE6RjNovqF9cq21v1DiwEtHqgCtahRpXbdqGO5azow0rJQ22sblhE+jY23x1/W4loVJQyuElylD5VhHDgc9urY2evCcEmJ5+OZ4+lMnRIhZUoSJDskO4rTZChGhwtqKx4rVMNJQtC+6Vqyuhr6XtvnxCEkxAC+VDNLutB0xa5Vb0VosyQSjmJOVMVVQhBiFAhG9a/gY1DdswwpibqtLpk0eXIOpN+9Z06VjLbrSYgsuagbbck8Pj4aYmpgi1E3fYAsGUolOCEEY820Gd2caVxpcasGnL49jua8Eug61dco6UqFjF0Pzlt7QAvKrouJRwduETFhSGeauNnEGxVgHXoVCR6GjrGLDF1kHAeGsafvenaHe5ZcSTmR0sR5SixL4jJNzMvMdJnUojhlA1/qasFeazEdKYd3HSV7zlXp5I2inlIxyqhW5qRuAnG5Bp46g9dfrInPVSyyjfPGy7dER6oGs0FFMEMDAYrgY9TNwZxSqcLYRcLNDUPnScYYGvd7kMB0/kjOid1ut77n0O1YloXL8aTi0061ZnzJVOe4SKAUc9qs+j4OoCpwUEsxsLvl1zpAQqtEBV0QBa7ub3YfvHNIUTK9942ZqA5PYAt2CEgpJAMfGoW9Fq2BrIE8zuaeMhZLMiv4TWKYs1DIa99+a013bNpvLKlMSzHqbkW8XPGJsmnNzW0TssXdt8Qm4GuiLpVzquB7msswrtNAsSG51nbnQ8CFxopUhoZQoUCwCopD9QKk6sLpPcryw/QdXQPoLOPzGmblmpFSbXPWrEjbBDur3CgQIa4BmFUr/7XSh2gJtD4jcZVUsmoH+tbu3gOOnEQrmt1I7Pb0wy2H+3fMrvIwTwRXeZqfKPNM9c15C3a3N9zefMXX3/01X/3yr7n5+htk2HO8LNy5GXd5wg3QDZmAkJ7POJkZfOFm9OxiYPCONE3MpyM5J6Zp5jJfKMfE8XJkvkxWYFMmRd8PHO7ueffuC46nE4Lnq198xRdf/IKn5zPPp4njNJOWRMna6lc0m7SWHwg+rOuXsyJCY0hU0fEXnFLPfdfhO083jtx+8SW/++EHfveH3/Px40+k5Uh0WujJqfL88shPjyNu6Hk+HlkuR7xkgqhYawy61uacrQ2ugRjXzXOrC9G2zytOJ+2LlXHbEuDt6xTAK9aW8ralbwt4bM65gizGi7Fk2VnUfIX5VJfVe2uVk6J6RKJgQYwahLvqiNWrDosTdraXNLdoZcOoPpEzHR68FZucU61Xp0BJY7wrC9oq+LVV4d3K6mytDBVlxLdKJSS7P96afpSpBGLt7s7WFAU0W+BZbN1rCUW7x947K1zpPM7zst5jD1qZ9MFYmmKvrxZnBMZ+1HNVsSJhXQF/Z2tREWXkt8dWVoaP4KpTZ+RghcphtwbcjbkRcIxR2VQ4R2k6L2+0SVrgT9V7ot0BgcrVQIV1vBgDobR1u3sF4r1OuNjERlcm1du236Zjk7M6QTfGFgZIrWpAG7CygTr1mqV9AtY1fakQlE1U7ftmYoDFLs5fWzcbeKzMLdVJ1bc0huYKEioLY70s06URgSwCubCURKppNTKgFGLwjP2AE8fpdOZyntREpgiHww1dNwARYUGoCojUqoldP+BjYFlmZQbUvLLQq6ssc6KIQ4oj+Miu3/NXv+hVdsU5lmkmXRZlt3tl9mmSE8g2x7rY0/lO2fRAzVXd2HFrp0JjHcTOGHw4QgwKPkUFTJwPONd0j4syrwnrGGputLWW1Wm31EWZQQY2OvG44oh47X4pKp7vvFcNt5LJpqnnqIhkTeSdkLJKA3Rd4LAfmS4npmnicj7Tdx1StDOk67Roi2iB229aurOFLHPW4l4pyt6t4hCfcbFwmiamtLA77DjNZ7rhwN27d8TgOT4+8+HPf6akhc47xAcFyqUV94CayWlRt0fv2e12VBGOL888PD5ytx8ZQsDVQlomfM4M4nCSwWUCaWWtBAd9iOv8q1Xousj9F3eIqFPs89OL5i658rQkzqcz4Dg/P3Jzc8t+d+Djh8zT4yPTdOH+3T1dH9kf9rz74h3v3t3T3UfOhxOn45kiwuly4fFx4kLiLND3AzeHd5yOL+QYeLiAuMQ4jLy7+4IvvvqKeZ756aefOB6PnC8XnHfcjDubx0JeJpUQiR1FwIeId5FSFsQkdGpRA4acMy/Ho+5RXudvqZUQO/p6Q0qOh6cnht2fmPKZ6TRxnB/AF4KL3N7cMQ43ViytqtncFTofiGGkoAyxpp1VSiGKalaVXLmcZ2pxFkNqbiRtnQJC7LRIKELXCXOaSWliWWbSMtP3qvnrSuHh4SMPjw/MlwveO/a3A13n6ZqOdRFybHL6Bd97E7kHFwSXE1C4GW/5xTd/zf/w7/9Xfnr/wo9/+h2PTz9R8hmRhJMFJ8IyXxDUiTV0HRIduEAcRsaho+sKIYgy34xF23mVh0HgdJqNGCP6jHzUfYrMYbdjeLejpIllOjMtM5fpwmUYuN0rg9K5QkuySqlrQT6ioHOVDCXT7EWkVjX4cIHLZbGChDLlcinkLPT9XvMa56FCsLW+6T67oHFzA5uC5UDBmM6VTCkLfd9pDtkY5ECWtpdpC3ApZdU9VJJGVhZyNXZgUU20YdixG5Ux6oKnOG2XDf2Ar4FY0loI1ZyrEpxDigdnzORN8XGrb9328K25xzVWw+In+1LaPqZrbogt57cWUPy612kTnOb9zvZbV18X0JIUBahiJPYd+/2e3WFPHAeC92oEUSvTZaE8vTDniwJxNjdSTupyGztwOvdKKwzaOokVZcWeYStCau6HdUn5tcAjgIuezqvElkUkr/J1RNu8VxfyVevyGr9INlboIqRlYVNfXI+t/m0IAR9NNilEBci80FpqQ/Dqymztsdp1EQ1cVfCvndNZoduhpkIV7caprWiNFihS0ngpYDFJtZiodc8BiFdpDANvxWsvnHNubXH+ueMvAug0MFcOS3DQdx37caDvPF3UoZhzYVoWplkdLxDY93tiY86BunsEzTQaS6pUTR5C8MROBWRVEwxOp7MxS4xtV80tqCi4MM+LIbX12r5m71VEHWzwDm/tcrudWkV720RP04V5XrhMM9M0m3PktfViPqmob64tGQssuZCNgVU3yZULGrSsrAY0UXrd+mFuKFXbBWJwm1l8PVdzyGnBlCYm18GdtbRl1S2slaNTRx/X2jorsQ8WlKv7agy9is8PPV3viE7Y7Xpu9iO7caAL4CjWals5n564zNoKcrpcmGcF4lIqLCXrYKzmKlS0BTRbYE4tRKcIfhFNtBUYkXUybfVs8sYuut2L0ibvZkF6W93/F4+WOFt/vVRF8is6FZu4eGNhBBujfR+oe1Er8zlzOV/wUhg6x9hfk6vpPDEvCznNBqIJKc/kXBXcKU4ZdGJVbAt+nKWotep8WkE6e+bRqRPaeVpYldgbc8Zp6xBeQQYpjm4LELVErgpLyeuYa59a2VuOpnWmi1cg1apBh2iyUqxSiQuEUnBOGXNFVFeviKxgC+Z0Z0U1DWJqM+xoLbTKKARz5PE6R1ZWXtZ2Fpea6Gdg2A/UItS0aJ1FirmKOSpCMM0RF2zD8w3g1Raf4Dqjy5tBt4i13zm0S8SqwsYoAGULVdcq+QIx0PW2+OMU7CiV6j191+FCXI1X9G+ht+25x61thKHT6yxGf9DqpdeELPR4AsuUkSR8cf8V3//qe7rxHX/88Zk//viBXC88PvyJ3/7mB/A7QIX/v/9Xv6LrbxgOB7IULvOZYT+yd5Hp/EBXAnOYubDHD9C7whCFzglDGJCaOR9nddy2VrScK/MycZkv5gYHBGW+znXhfDryeJn4ze9/YEmJm9t7vv/V33D7xVd0445lyTy/nA1w0LW6tWq9nsfXZL6uLMrGnoVSFgiOEHoONwe++MVX+KHjw+Mjf/zznzieXsh50TXQiRojXC68HE/s9ifmZabkRd3CpVzxBRxhs06/AsneHK80Fz+z7vw316C/4Ghr9r986O/9ui+As5aa7bVYs5q2uDRjIpFXQczqpowGyD42ANyt8hNtzq4i7s6jOhtu/b6J4OpLWzVVXl/xFpikBT+bTyXXNo1ai52r/X1jK+m53t731tourQbQKtjtdV6QDfCDBdbe9FVLNqMW1z4TNHkJHKsMQXtGbZ9g817b9tZXDDa7BrG2/ca+aozRLZjW/vWwkiEbwKatI7y6BtbfXxltb1lw7fVN4+5zentvW3m3jMX1PrZraOt0u8dvrmf7OdrXwdZUt+4MLay3BKeNNVsn7Y7ZeV+3Bn/KDuRVW/GnLEPBGVDEJOzG/dqSXEq1ToFRK+RFhfj7PquJQUtYazOw0RimQ5ndGvxrgC61xXeOGHuCgd3admUsUN+q7/p3ncXvDRB1OBB/Zb+J09jJXWOjlJK2YzsdJE1777q3XsfUyj50bNp7XrcjtylSrWqs6izaiKPE9mpa01GTc0voFJrQR2nLSBuw9hz0f2VdaPFrHEct5hyPTNOFjx8fuL25Y7fbsyyJcVSwpQHcDdzS8W/tZt7jGrNZzL0yBJaUmJeFcacmAdOycDmfqSKcnp+p2ToXRKAk1W1uLCLvoZr+cWPeBq9AZ9+R5swlLcQ4srPzO+s5m5aFqSTiOEIftZ2xmBmc3likCEM/Mo47FgOINPdJzLN+vuBb+57mA8PQ0feRmvfghMs8cZ4qT89PfPjwXkkRY4+aJmQzOvHs+kgdIjkLNSUbf545wzB0um6FjpfzmcuycD6feX5+ZlmWleE59MM63nIpa4E/56yFM2CeZ44vRxyVvu+5XC70fY+PJnngUdA1OKJE6HUven5+JpXM+4cBKZXz5UjsIze7W8ZhxKEtpSlpsc15LZ7G2K0FnLGtZ6VavhN0jNkzVBfySB/CahbhnFO30qqSKNj80XqUrsnLMvPx43vm+bIaD6irZKTvO0vIdZ5G7+m6qE7RMTLPF3Sn8gp2OYGSycvM+XTi8emZIpHdzT1CIbgbQshM5weeHj+Q8wTe0ceRw93I4eYLnO8RF+kiHHYKECmhXlS+RyqkxQCuihc18BOqxsiGDTjnmKYzy3JhupxYlgVv637gOtdaq6qIshad92raFhxV1KUypcQw7NbCv3OBrouov4qsa3ErHDlz0FbGG1aMUb2+sNlvuhgVfLSOrtaA7lYgzuO5Mv0UwNos86gEVvCRghID/n/O3rW5kSTHFjzwR0SQopRZj35Y35kxm/3//2F/xNqa7djsvdN3Zrq6uiqfEsl4uTv2AwB3J6XsLtsoy0qlRAWD/oADBwcHErvI2eOdSGYFEo3jOAYM04RjPqIggpHgiseeqXYgZpZybHYF8MIIvT/7zAiyfRZAz452/pj/02yunX/t89/q291/zSBIYr9+ZjW6/SuHYcB4EIklH4PocBoYxAImuRBxeDgihhHbJpVuy7oqPsMgzvU5QVKwIZyXlnw11nz9+O1Tdo+trzV/sv88ilryzctbdRg6v+m1T9zihf5qSVGZe9q2+vtEAAWZPxtv72JtOuEUuBV/TdesxmbOBeH55VbGWpySWEjwpawEIOekM7WdW0WTd4XT6+e181I/UvkHfv9vAuiCF2NG7BECtBPMqN0xZWGv64Z5Xmoraacsl5KL0vkzwqg6Z2RdQlh8fhZh9m1ecb0uOJ+lnGq7LgDEaREDICLzJYv4Z85FjVErL+kXuXdSVuqdxziNGKcJwzAqBVqM0batQhXftkqflpp5wr5IN5qUuWZvkjodgJIzqQErtojE6ZHXcZ0VCYJcRy925MGO0cqZbp3NygjonCsAktUAqnivOUWi3S2b0ntxBEJwGIaIaRgxThGHccJhjJLZnCbEEDAOAY7kkJsvF1yvM5Z9xa+fP0uzh2VvYum66aW5gjh40olRRMyLAiEoLCKgpQU4NyVLJF23DJi7DxgAybhXJ75z0O3PTSnaG5cFoIAFJFb+offRjiuZpVS6MQxGUCQp1V1nvFxfINm3iEUNAAD88vGDsv9a8FFKUhtDyIVQsgVTAvI6w3CpwJYrQ9gZ8lsFmzrV25rBdngSEEwHSi0ocwDgULQoiHWpOfmAzVk0hoyJzKvR9i5Wx5yVLcKspSLeggAIdV71MaQ+X95NVQVgbeOZoWV2jACPbc+iIVaDXguQvaxT3AZ9rb21A8gj+xG5ONX2QGX0MAnFPSWIACqMUeOpr2BNAAAgAElEQVS0UlW6cxUaAR8RtbzdOlEFb0ZYn8sF1YqQDkdQRyEGIDhhwoUY4OErGCDORQS0TMBp9sZFX58zuqbZOQxR6PveI4TW/QwMjGEAJ+DL568oe8Eff/9H/OFPf0I4HHFZBqQkGornS8DDwx/wz//CwNf/CwDwz//0L2CW/GbZr5gvjMPI4KmIPWCHtDJmd0XIHuPDiGkccIiEUlaABWDe9lUyhERIOWPdVzBL9tl5j/EwIYYRy7Li55//hr/85b/w5Vn05X783R/xu9/9AcwOH375Bcu86LojWGaw38MGxolOSgM5ct9VCsI8dcqA8iHgcDhgWzf89Nef8Msvv2BeRFfIOw+oTtKadpyvF0wvL8iQMpt7gMKe5T6478GOakMMaPn/AcTdAgYNVHnrZ28BHeavfAsEub93/3UPdErg4itIZyXFrIdHLXWhBtD1YKqByRK4i60z8Kg6QkZAFYtgD4u+VBfoX9cF32xlzR7tnBCQQ5zTDoiA3q4ertBy+M4RBrrycg0YbOzUT8isjaoMLIMwCkGW7LgdW2Oa3QNk5iC+CXBBk43ajIhcB4a+OW9QsWbxZwqxJB2+Mff3TmwV4FZA7a31B+DVuXkPuNn6IdiZ1fw8e23Pwrv/zObkil9/O179s/af6S2mX78v33re+3v0X8tUM1LaRWzexxpUC7NPylucC7hcLto44gLmUkEBR0DOUsZWdjnDfVSNJC+ls+KPSrnOMHg5I7LuE+/gvAR9jrTznNNGNQqaSTfvIuW/OUkTIG5ly7dBoc4HqCajDMjST18/dylQhV3qxqztRbMrlbmopamlEHKGVhVIcGb6PeajNCC8KAsdsK6sRX0dGGgMjxhGPD1GBB/xKTMulytKfkFKjGk6QMpVdQ3ktp6Cj5AOkARWMLN2L9TE2/lywdcvXzCMv8fhcMCyLHh5fkHKCfs843Q8Im0r1rRL0wAWH9C5Nq45ZWRNJg7DIA2GfEAKARgD8jgguYjAwmwNPsAXxlASdmYUJ+Pl2cFHZZ5kBjzh/bt3OD084uueQUyIPghob+X22nmbFGAprOzo4BAHKcfctoJtW3C5nPHy8iwltQr65pwruBlChGlLpdT2rLCMPFJKeH5+lpKufUfOue4JZtaOtrLHQ4yYhgGJHEpJyFnWQIwCjHHmmuwYRiEHbPsGJq6SA7vLWJ5X+CBnbdp3XK8ZJWUkjRPBytxNGbtWmnQRu/jGmVX+xSGGsUat4neJ3VnXFSlL1UeMoe7hcZjqejV2Zp90Ew1FXxvHWGwrTT0kKVVKFjaPA8jrfcdR/JFtqWetuK0S4xZmzMsFf/7f/47Tux8RPGMYgTFGPBwfME8F23bGsl6UzV4weI/DFOHjiKLnai5bbdwC5pbA0TimlLvzQwkmpr9ZiiTyt23Dtm2IrrGEGivdEhJSDcYMpD0JGIhW4XRray02fW37zf6H4MAQXcbClhRoNsgAnxu/zAXVNGvVZO16bect8es8Nf095mpDGdrQiriOyTgMoHIA0yAAZN7hs+wlxalASRKAhYPe69avqhq4pfkn91qxYIb3dPP6um7r+fmt5Kwm9JwmXB1qyTOjJUpijGKPlIm17ztSyVV3+Hg6IW07UFZkH+CiNjIqBTll0ZSrPi7BzhAiB8cKIHIHork2B8zSZbiU0n3/9oy4Txp2hxWAW4Cq3qH7nr/zt27Gt7vuAVQ778qWwGj+DhkOo01VpukIS2rZ93uADmxNDQMQfAfQKfahUizBSQlu2Qv2JA3nrKdArc1R/xP9Or0HOO+u39zFNTiP6ANiJDVeAZKAEmDnuixYVmGbMTkpBZh37FtC2hLSvmPIpCV6ivY71AzkmnZcvs749eMnfP38jGXZpMMKAMcmuClCnPI3ITovLXoLREMI5o7rH3KAdrUrvGBPBc4tUvbK0lUnqX6Z6OFJpqQUApcEsJRpFoYE38VU+NpmtHbUsh5aMAAAKQOtc54uAC+OBWlmmzjXQ8Pqz2swVQ3d7TRJJhiwJS0LVzqUSLviCSESpsMRk9FeVddC6tIBZgE6t13awa/zguv1iuv5gnmesaUNL4uUbRIrMKTZiMzikGybicaKY+bgYdlUMV5WitUOg2hij52D37fLbpsLN+Dct5z033JVpoMyNu137X0Nmbf3MEfw+eszLpcLUkp1DuZ5rve9nGfZ/Cz6iYQ+YGABfPoAlduBKslGQiIBg8RxzO21xAhxAmv2yPScGA2sdV5KWUxrrgfpiB22LG3gTc8LtmZ0XLeSq2Yd4HRsqB54rMbJSjiiIMCSHSc5MGoI4OTzkBehUCaHXBzYBZDqCxIsgCQ5NGWwZMkocGc6Ec5HKQ8WURt4Lx2onA/qtBXJ1AYF/ALpGtMsqwsYDo8gN8DKhb1X57oCFVqa4gNciNKgRB0w74DBF0QnTEHJkokhD3ooUgU0vMxF8FJmELw63oBzASE4tZ1iM4lYy60TSsrw5LHNKx4+fsY4jPinP/0PPD59hwTJjHmfEXzC5fIrDocnvHz9APyfAtD94fe/F10ULSv2boMvV1CW+aJC2OcLluQx8AhMJ1CZkAuQ9g0xOISBhI2gpekOjGGMmI6PGA8HDMOEAmC5zni+XPH5+QV/+9uvWDfpirzvPwEgPJ7+F/7y00+4ni/CAAkKsljnWyh2Xx0WBQNyubEjegzL9+Cw7huu84zz9Yrn64yffvoZ12UROjt0bTPg1P7uJeO6LnCBvinSf3/1TmJvW/4RmHF/71cOyRvv/Y+epb+/c7fOxz0Q9y0wMZWMrF3wzN7eO0cGvlRbbAAY2f2kDK3ZYK4JGLGXaH9gt6f2Ns6+7p+x1DcwIMnWiCXHLJNJJKWtBjIakAYLUgwcAunPUQG4QiTMWEsOgCpoZ8Cl9150bjWoESZsYxeRcwjU9Hx6EKkH7PrvvZpbItEbIQ3enDi7pZsrky3ICs5piwRNYPTzcQvG3YNa9+//lkN7D5L197l/PasOWr8e69lzxyy9/5vsTOLb8bl/zVvXW2f+/b6ywCzG+KbD3nnBYEbrLFhLW8Q/jDGCAVzOZyyLdDw+HI44nU4IMWDfHXzXgAF7hgseMaqmcVFWfCAEF0VzhwV44cBI2GvToRakNVZC1cy9CUAAYdjfrrfqvwCwRjFsYHVlGLTghNEqMcSfdBr4qtZUHV/1URRErN/KkLIk10D46kaAq26kNaiA+gNOgWZm1H3pHHA4HPDdd9/DuYCXlxd8+vQJp9OjsrBGDDHK+V8bTwh4A9IunKl1D40x4uF4RCkFl8sFD5cLhnHAtm24zlf4EDCOI8ZxklL6JEwIbw1xunUle8CWi5xH0gHYI0wHYIjSGbYwCjyyJv5RnAImDO+kQ6eJpLPqIb97eq+gsJwj0yT6XKuW5Pd7ad9FLw8AtnUXcJlFjiOEgFwkbtq2TXwI7zHPs5ZvkoriC+Bk0jGAAG8ppbrXmRnDMCj7TvbZtm21oqSdCVCQr2ArK6KXSiQwY76eJTExDKKFbHYtFZTY1u6+SRWIAJ8RPko3Ug6yDq2SygDX6CKYBaxLKSEUkcKRJIg1OHL1DPJREng5y97IXLBtsh8kiToKcElU11V/xhvQNgyhAm/jOIIISGnXLs3QjuwZWZPdh8OIUk6Y5wv2fZNy75QgOtnCdC0lAbTg/fsBx8OI5887cpoRojQlOR4PuM5RupfmhHk9I84DQtkqUFBUQxIs4JzIA0nSlxnIeYEl3NkAKVKWrysgGpD2iNWL1t+g++Z0OtX5l0vLyq2KAdJltfBez7lqWzjXxGkfg/ZxnXV8LYWVQaw2xQHUaTsbgCwNBRiRZY69NS0sEJkcNP/C9qmt3dvmRLcyCyblU3JrRmN+D3QPe3iQH+Q+qhlOzvxWqTJisw3GmFe5Hq7npqEP+n87k4hV1o1g3ZDBfXLpdTMs+ZTiWGU7+4H2HgxY5BVjRMkZ8+UiXaGZEWJQ7eGApx9+h2VZkPeCOV91jzOGOCA8EOZlkfctUA1DK1kVSajELJrh+hTmT99fb/o+3dX/7C2f9Vv/tkYO9375b7mXfa8nglRDD5Hf2vf8CqCzElcPFtkpEoKW9x7k5dzPEILBlpLEpl7sLhVLdChARwBbzN1GQx8Oos/3d67fBtAxY/AOh+MoDQTGQctRCfuesOw7lm2tVGImXzcnd46I1AB7wLF0cowel3nD+brg5Tzj66cLvny9YFsKmAbsRSS2PeRAZNXIIUj5RFFxfWPh6Weuz8zaNYQZ2gFWN5hutGXd6sazDVCKNFuA6n1YiWDNEmsphjgKMnxivEsF2IJmR1lrtwXXaDor1QEGSwDhQ2UA1Fpq5wQQwau4CiaSuO8b4jAIm1F/JwQ5RMcpYAiMEEj1Cxh5X7HOO9Z1w7buWK6zlKtuCduyYlsFSJWMYgINMuYEDaLyVjXcclZgqhpCcV7QfT7vSEtCGmsQOnbMXPXqmjNxu8l6xkKb1teB6revW9ZOKhl7VxaTSq4IdoxyiKeUcJmlS+XLyxlJ20RbSe7WMeiYBYCpJWWMepABDs53gbxFsdWJVq9Vq1EkiAs1IGPyyG5A0ZXByoxEF2ibYSFQZWZCgV3A4Tg+1HG/yVJRA01FYFP1aNT4egXjvbajDs7DBQG5RM8PwkyFlG16cgKQ6f6Uvz3gjwAF0bSyz63PABsnM4ok5QkxRIRxQIwRMU6Ak45EsoZFWNr26DhGye6TAIrGkBNNIAHpMksDlhtKjAyo6Hh4B6jQcXFRmXYewTtEVxCDBsOlKKNQ2857LT3XuSzMtdRVPDrVAfJSapF1X0D1KrlkFU+XdXU971hKwMPpO7iHd7hyRMGEdVGhdeyYL0DmA+LwVD/Kj99/h7KLTkgpGwgF0+TwEHccwo7BMUYf8HCY8HTyOB6AEKTkM0bCOMrn2HYpaXHe4ThNCOMRx4f3OJzeoTDhr3/9K/7f//gz/uM//oxffvmAy/WiUgYO25pxvfxPeBckk70lAezVZveZTQPnBJCXyRBHRtYUF9WrKwU5JRVUzpjnBZ8+f8HlOuPXXz+AIcHOum6VxerIazdYYM8JUbNkPcPnW7bjW07AW6/twYL77PH9a771PveveRtAedv2AXb+tITAPXBhpUoUZM+yagNxX+7WjU0Fv9DDaQ0waI8lESiRCGI3e6IsNEf19YKLsQQqepWSYLoflpQxNondh0uB68SjK+OsPkhr5AJAykdZ7ZGOB6ljV0xlT01w7cmhtqhAxLqtwUEF9DrAyzTaLJiQuWnAZj9/N/OkY2ysRQMZe0BMfAoFf1SEH0S1NFGkUW6Bsz4Df5PN7/ZXKU23z56tZ6XZvy1J9VYZbOu+d6tZ16+1tz53vT/RTWOh+7V7v27vz6b797rfU/bM9+NP3VnrapOijOv1inEcEeOgvyPPcTiM4FJwPl+qVlUIHsfjEUMc4LwkU9Zdur96UAX6YpQOjJIkEGFpBqkGHKAUerRybEWCua0h78VvaiCCsls538yNlbcxoBUdyn6psWHvF+hWUb1VK3P1zkC4tqaKJi1Z/TiZAznbSjcvNclngIG+V+1UXZOLem/tZGfagESEh4dHhDDC+4jnZ0mAOucwxBHH4xHTNFXfkGAlusJAD8HX/ceQBOWednw9v+Dl5RlP7p3aHcJhGDCNIzgVbMm68jJiFJ2+xnoS1hwTqj2Sqg7RTl1SBkUpm3TRq/aklEvmUhDGEcSM6Jz4QQqYUimajA+4nK84n68oRfThQhDdWRn7pD4LpOFa2tTWiMzIvtfef3XNy16QfxvoDEgXxJLFPzXmeAi+VqlYXGF2QUo01wogyuulpPXL168I8xV+mBDHAUPwEhexsIK899iWFTOkLDTEiK3qTzNc3MAFiHrvdV3hY8DgBjgijMOIMADRe9FUg+j9oiiDqzgwGwtww7JsACQpG6I2luECJNUMVf1s2RMtASgll4DJKfniqp0LIeB0Oum6G6qtjzFqTLKLDl6QJLPtXSKHaZrgnMO6LpjnK5b5in2XZofOsXTSDIx37wf87vcnPBwPyPtXfPjlinV9RtlWGRPnkZCR045lvkiJ9T4gDqMwI7OAJ1QawFBtuOpmFQVuCznNeytAxgFEEdNh0vM6Y9RKBNF+dLqfJPlmRBIiaehVqCAXL52CSWNXK7FVXS9LNPW222yr/Mzu7+CDVOJUP4P5Vkc9y9/Be3BpGuuAJaSa7bezwzllOWd5X6tMyTmjbAKYihB5OyP6syInrlU3At5rbEaMkiGATP+8pUlaWGKt1K7kHQhEgPPC5COV0vGQe/fnGt2AF/0ZpoA3NQylnYGssaZ8b1m0ctGSMUEAOqSMy+kRDNHDjc5jK4uC3dqcxUv8iowKaJptNyISa3lUw1i46vFJgqvxwO591h7Qe8vHvf/evT/V++5v+SDfrqJrCSoLsolQwTp7D8MdGrkmtfdF0S6rQt5yTkgY5DzYScAiflZUfyBIAzJdIwKYugp8ir8NNGFr8UD/3vWbALrBEaZxxOn4gHGKwgQJAmbsuUib8GUTHTLtFLnnhIcwaNcLAY3evXvEeBpR3I55W1EIeLle8fnLV3z6+IzLece6ZjgSA3VdpL6/gIRTKPsG2ZEYrEq5fx1IERcJGkq3MUl6hCU2BzZIsM+NVUUuIDjAwSMnYflBBSm5SFbfsjO9g+5cQCBBfINTwGQ4ooU7pJnzhKzZlpZtaJkHu69z1OTscKvfFELAOAYcjxMOx1GyIeNYM0TOG2MvI6cVad9webli26Sd+rpu2LeML5+/qgaLGklN1thhJl36UtPwYDMSXj9zyxqI18gWv/VRHoDmWPRf923b5X53Qc+3AfnfdnmqN2FuzLjcMymoZdZSEif+69eveHl5kc423AKP+xJcr9RiUhCC0A4IKVNQ427zqocfF2GCWlZF2C0NuHJadlkghsBBgDkDwKx7XfRDLZUJUVqouzCouCUke9gBdKwPaeN8mA6wTo62Qu3f4mwWzckR4IUpwJAOP4UY0QfRUiQHqKwcOyAQAW5A5ghGuOl+l21SFWCnCqoIqOWdCnkOAeuaVITW1ewTF0YhabKw7dqIAkAtowRakBtkf5uzRuoosJM5IB8AkmYPpQA7S6rPWHwPDxOmKWIYpPU6ZwJzQSIWJoVm9opaXCZoYw0I89QRyIk2H0qBY0jrcxZ2XdFSonVLWFZgmJ7gD++wcETaGDFK1z0mj7QBS2LADYihNSr5/um9NEGgDJQN4A3OF0yRpJmP9zgdJ/zw3RPePZ0wRAeHBCKHd0+P8N5j3VesmzzvdJjw7t13GE7vkIvH5brgb3/7Ff/2b/+Gf//3f8evv35A2uX3067M47IhaSBh7Grkgm1P4tR3l9gXCzoFjb5n7sjPcs2IByJkZnz5+oxPXz7jy8tXCaJjQMoFOUmjhMIMp4DPvicN8vv3bgyTto/p5tlk6dwCbffAwFugzLeub2X+vnXP2/s3YOytTOE9aNF/v5SCTQWWRYuxMbiDskgsMSWfsdN2qeCdu3sWqiUx4pxrBlmdFSlDag5V67WhWYf2pHeAjNknVHtNKsJ7k/20sSm3trhmzcHtbYiUcSQZbGMHiv8odq4wo2hyKDNrMknOfty/Z3dZkHuf9LBxfzWf3sbxFrCCgaK2xpSFbAxfhiUZ5T72mV+V0nTP1Y9Hv55636J/7m8BbBUo6wKuGnygNW/oLwN5oABphxy9er97R/vt9S/gkUmOdNnXu8/e9rKVY8qaFCkTR9JFvDnjGtiyaByFEPDwcAQAnM/CZHp5eUHJGaeHkySG4iCadJnBRRqcCSAd4IcogVrKWmGBytS3/9pn0/G/CzbNN6gMPyKpCqE219VH+ObVMeYEHVT9OgEbnCsQgo2wl/qAp5bj6DxLcG3JRuha7ZMLag808S6v1bCOWIISFvaKJWZzTiBIIwYZ8wdcr1dcztJAYlkWhND0vYYYMQ5SUglnotw7lnXFqoyvy/WKZV0wDgMeHx/x7ulJgDlA/aKWGO7XtO1T7z2GQfyofZcEdAhSBoeSRCZh3zFnRiKHyE6rDsSW5pREUZglQecZoMy1mmDfNjw/P2NdFgwxVm00QBhaOfdsUbNrUuZpSYUKwhsoBa6+5zBEnE6PiHHA9TLj+fksrC8nIFJKuwbydj7IfFs5a5XbUdsQY0ThgryuorGUdrwb3iOEEVlLJUtKlSFo5ZNEmpjVrszi28lYZXTM0bp+S917xKKXmlLCzhmsBARPAXnLul53BTdQjxNmIOVUGygcjwcMoygA56ysfZZmCuu6Vp0qS16EEPDu3TtM0ySMWL1qXOYcvINWPgTkvSBllY3QBO2PP/6AeT5ivp6xrFdNQEEkUYYBuczYt2eUUSRTvGNsSwIYGIYJ27Zj0+qHbd0Q4gLngBgdJPmZpFmLjR8aAA8ixDDJOZZZmms4SFmkSlHIXjrh9CClfI61NJlLlWa4scUK+pgN88ELCUDtltU32F5KvEPYdz04p2XKOYFcBrOApvJ7av9V182ptq3FnUaAsYRYH/faOu7tpl2l3CbOWiKrSd9YNVvdb6TMVALgAuqnYwZcQSnCHCsSuIvudXGAdwhFqvr2ddXfImQoYKd2gAvBhwgDM+XWzU9gZvVz3vIPPYrOUwEL3kEMKKvdsIF937U761ZtQik75nxGXjf8Vy44PhwxRCmDJ2akfatswhBEWxr1+ZqdAIyEpOOsvlMFuxpiV8+4e3+7P8vfOrt8B4D+Vr+6v09tHNb5FfYcBjKSPXB3vfL1K0Zauv1gGs4Zuei+y60apdTPtSs5RCQubLAKRI6Ju+Hj+zH4ByHEbwLoTocjHh4eME0TgoqdC416w7qumOcV27ZLtkLLSUqGdiiRw3s6HvDD73+Hdz+csPOGr5cXXOYZpRS8vJzx9XxB2h0YQTbGLp1TCxXptAcB11CNhOg09M7aDXjCkpkGa/c11hIoykiWZWbWDoMqTNtNGLGUuHIRFN5pyp3BEuSEgBhHMQIG2BnowgDDaUagZbjFKJRGtVU4tddoEj9eSwrkpwghihOh1+9+/A6TdtEVNp12LXEkh+h6xXXfMZ+fsW0LlnnGvCx6EOzY96K6KV4zF3ZnCQqyss1cLjBLYHvYeckYey+6DYymi9AcOJmXog7Hvu91XO+z/XbdI+jA2xv1Vab871xUBY8IGYCv8676gWh6RyllzOuKdZdy1sfHR1h3w6p74kWfos7D7/+oJbvCfBONjkEzbl0gpIGxMdwsUCMi0alxoYpqNtDOIwxRnD7XlX3ZAURA8FEMkDr2IWippgV0eytxAyxDXNC0cJp96IM3C4LJygNZ42YNmIECRwVQLce9CJtGNAOLMszECWQE9TQFY2cTs3aiBSTpfV9Lzi3TS97BBxlb70SA2KjwgUgp/qrjxxrIKjKsZh5OO1BJVZ3cL4RBOl6SQ2ECKAAugElsj48Dhuko7DLvMLiIAC+aNtuuTox0oyrq7JCXTpUCzvvKwMm6c0oB8p7hQRiclHoQecznK9a0Y9kJbjzh6YcfMDw+YrYmIJkwqn+y7htS1s5S3bL//v0jIhUESuC8oKQrnEsYAmPwwBg8nh6P+OG7E8bokbYVoILpeEAYI/IuXVtTyghhEE0GF7CvG/7zpw/4r7/8gj//x//Gf//0E67ns5Rm+yDBmBNR7JJauSqr3iZzQUrSAfE1kAQFTm+R/HsgvJbZOAGDXl6e8fnzZ8zzrPqK0r3Qhyh2PBXsOYNSQsgJIQdlZ5lj1J5R8JHbMj21GjfPeusg3oIIf08D8x/ZJnvNt5wR+ZmBmLdgzLecnnuAzjnVXgwRLkSxL/o9aED4ymlFB+h4YZaadSdqpStEhGLlIDf36EFF6NnWALrGok5wTpq4CEOVkWs2UfW5SrM/BKostz650jtoxVrX388BN4hEhkilACRMgDUXM/sqvoDci3CbXDI7CTTGTb9GbudIAGHLW5Wuk5cJlKfS6QQCVVSbWfXruhKIt0DJ+yYW/Tz2ya+3rgpYUQuu+mQZuFTgvY5pnUl9DvNZ9HtZS5iJFVRzdDNG/fP289hfvb6PrBMHRisPFhtfKkjUtpCuZ4I2NRCQq+TGCkj7joXl7HTOa6UoIwaP08MRjqBMugWcxWY7AEFL+TgKYyKngpJ3hCCMDwP9pJxOgJnMGRQgs0q349yXeEnZGqvHlAEq9TwzUMqYTnFQiYfCmuT1ol9nWXvtSs/eK0CmyQ5jLLOwZSygvtnzkGRTLectdLefLMBWINCxENCthJx1bkja1wi7vn1+5xg5SadF76V752E6IoZBdKfXtVZThBCwbSuez6X6x7Z+tm3DlkRDjZzDEAO2dcHLy1d89913mKZHXK+zAB563lupVGGuCVpb57JnGME7BC9d30PwOCAjQcYTKUuTCUgpc4zC5l/2XWwFOwgbQDqRD8OIOESs24br5YLgAg4PB7FRRBjHEdu2KkiXZK2QQyDRvJKyzoKBfC1LY1AFO2Q8ZbxDcJimEQCp7EwXE4GFjUzit9mJu6eElEkAqsOAwgk+kTDJnDD7XIjYVPtbGtJJ0LquC/KeYL6W2YxxGqvkESqAw/DkEHxACKJHbIzNQkDKGZ5Rwb+tZMCTlEGyEztCqtet9tkapgGo7HAiYctKzFm0cZiHp7FKaBjwYDY8hIBhGN6s7jEmHfIOT2oFnO2fZnNOpxOmw4TT40FKXTlLybuXgzBxweXyFY4ynAOenp6QD9p0ozA+ffoELr/i68sXbOuOYdwxHSYE5zAEj/gwQJSfbxPpTkGtLRVl7ZYKLticZE4YR2X6BrU7aiMoJwVojE2KGxCjrp9CN3sFd5ppraQUCqg1W8JoZbE1wVMI3nOdi9zZegNxnfNwFKR66NW5duujNPtlsXaCuWU+KLBp8bVrv2dgk7BPVZOWi3S7JmnaB/IC3jHkHgbC63ZyJExVTwC8YgRFXiPVNkW6R6uGnRILFwgAACAASURBVOyTuzM6v/YV5eeaADTpLsfa+Vr9C31tUqAdhUWPm1n0HFMGMePzh49YlwWHcZJY43KtVWBEBAzGJr/1L+3rrDa9gnMkZ4LBUKUoyaZLkolvKHsu+tasxd7nrevet34NuPGr178F+vVjC41N5ZnsFbclxf04t/ijonfSxwAMr2xe0ywuTCBlFIOlYSZnh1zlWQjFRU1Uqf/bHsuGCrdP//r6TQDdw4M0FvDBqLVAKozrsmBeV9GeSwlMHsEFcaZIXIw9C4siDhHv3r3D9z+8x7xdsJWMT1++YJ43XFepg45hADhi3520r/WyyKHIL5EioaQumhkPPYhuEF1F5B23YM8OJ9KFVrLUCBsi6knKcoXhAYxDhOiLiaaeo0HQ8zhJq3gWJpMzN0Z13WxxpjUhFdusInTuvTjjvtILep0XaCc86eQZlTo5TcKUs+sPf/g9xlG0stZtxjzPKvoqDLnL5YLlOqOkhLxJV91cSu06IsaSEKKVAWXNmGqwQwzyXFePZTQMbEORDV47JVXWg5plm4/cQi677jdUv/HuWXaVgXb3+rey7W9dxqfqjYNpXvSBi5WDMXNlek7ThLyLI5rVMI7jEcdjYy/967/+qzpqvstIqGAnkUWo8v6q0wFlH0qbE1dFZU38EwpeOQfpIKPBhhhGYXiZcco5yRosRZqXlAKXkxyqpSCSQw6SfSlUkLeMjKxttQmkmo6UAQRCpAAEQdKKZkhlfwlAIltfgDt2GcEHFM7Ie8K8zSL8S4whBIQ4AjwAzj43AGjZt45DzjI+TA6ZpRN0zkVZEJIdtuyuBTNEBAQPilGA9ZpZZuOxacYvY3AMx9oVjDw8ExyTiO7CYd0zyAX4MCHEEcMQMR4OOD6cMB4mIASACtJ6xfV6xXydpeu0hu6krEjTCyFjvpLYmvF4hDDNMrZtBxcSrZ1xgqOAPWXkQqAw4OHxEe9/+B3iFLBnYIgAZcI4AB6MtBWgAKMLiGNbg+9ODwiU4bGCk5TPBucxDoT3jw+YYsTxYcLjwwM4JyzIGGPAw/v3WK5XXC5XXOcVBcAUPfad8Xz5jK/nGf/3//O/8NPPv+LDh0+4Xme1o74mK6wswVFAnKIeXMC6bViWBcWAEBabY3vC2HNEnSNoh6M5CsxSWq22OZeC7XLF16/P2PYkDogC2TFqsJ92pZdLNzHrknifvHnreovJ84+u3pH4rb/XO5b9M70FtvWMh/t7fOv5+8tp6ZE3TSe6K6+gW1bVPUBXWat8z8ACAJMukNfcOsxc/9x/LpMKaCWVHXMPzTmqpTdaGoruPv2z9OBYGyD5QyqHYUkzmMahAnavmHA6DnZGmM5kX/7ZA1lvJZhePQg1cC7r2JMylFlBFUcCCsCZ3oGWHVJ7purDcAsw30pM9s7n/Zq432sGlvUMmn7+mQusU/xbY25z2zvLN4lG87a6tdUD8N9KxtnPrCz0/vP06/fe8W8JQD2viiRg+3LefduASJimCO+t5E2YMofDpE15RO7jcjmDiHAkwAURn085YU+7AhcFHGP153wQ9lvad234ceuFFwVke006vrGLBibIn32XztQCGATkMsB5L0weEsanBLqtIUDKGQGqMtHJixgYLg/Szw9gyQpjrhIJIEHd/NneJMFd1e9VQLnudZsLgqMgncuTnNsxRIATlnWRUuEgZbSmkWylkPMyK3hcQB0gYM2WjsejMGKcllZuG758+YLPv35A9B7f//gjxiEir5voKW8bUsrqy+XGkFKJD2Op2veEXeXBJnHDwn5HLuCk85R3MBUM0QnI7oWtEyhgGgY8Hh7gncPleq1+5cPpQbvFyjh5T0jJC1BXxHew9QhIkyZHje0nyX1NfKVcmejSAMLskaybZVmRS0KMrTqnB8dtLa7rqmXfLdYAgG3fBcAOQQL6XESbjRtICgg4nfYdeRiE/aS+NBGBQoRzASUllJxlT4jKJ0YvpAMPXUcM+JzASTvqEgDy8OzVf7NzyGwPAILuu96mCHNPmod5pC1VUM05B3JtDTBzfVYDlqzKxrSyc2YkTtAaEBjhxM42Kc8epPNusdJPAYUKF8z7hm2e8ZxUyzvbPQKGIeDHH36PeV7wfL5gXzdsS0I5ZjgGoouI46ggTWcrC4OzPI/3KqWTIGQWKsickdj0KkO1z8wSkzGXmpgz4L5qT1b77bScNXf7XjpLS9lwAzLNnxN77er569QPNltWsqu20JrKcdf4Rc57bcLjFJStmrW34JwlBQA0n5tbCaqseX2m+jydIWaoDIuWkmuDHlLZgwAzcqrZqdZSoAP1SwCJE7o9ZfvKmLAl7XCabLMmfURWUIlbJrraYaLOb0GbewGnISw6oK6/m4Sclar7IHuxFMyXK1bVqlyWFdYIycDZ3q8j18aYi8nTdGXVBtKpm2fVdIY6Odd8AWe+gW5Yi2/1wLi53vJhANSE5puXnrN17O0Z73zFWx+ksx826DUpaskn1c0FBGwnhSQJyuSW/df7PoY3WTJJ7IuDZOi47l6bS0sZ/6Ow4TcBdOM4wCiqMUZsKeF8ueDl5Yy1CosGgEK3uMQQ7mpcwzAA5PDl+QX//dN/4j//8t/4+W8/49cPX7DMG6SRlSLppvNU2wt3JYA6EDmnCtYB0Cx7c85EnFgGxEpois6MGaRSdjUUAiZaZw/yAT44BF+kLW+ImpkJSlEUx8L0FUpqKLZkMJSZl4EYIlyMGqRKVBs8VW0IEdB0emCLEyaCub4TMB10cckc5Jzx/DwLILfOuFwumOcr1nWtYq95z0DWwEQPlaLMNqjw5abCuawIOfNec2zitDil95qDIJlIuouJ6maQmwmAQ+os3wV9dt079/a9urld06HpWXd2rz5I6O/fv0faci25M+aclfMamAYAmRNc8HgYh/p66WxFaHQlQds1PyV7YZzEoQWEmcKirUhMrXOpjoyUA0o5hIlNppK1JNLDuGmyzgXILdsiGjddUCvdY9QJN102cspc0CDCykd9QEk7MpzSowFRZfISpBYCU5EMYVGR+MQAy+EGOCTOyFtCRoYnB/KQLI2CzgWMeVnw8fMnXM8XuOjw/vEJp5PX9uZUHXkuRcpTi6sZPxGlRQ1aSTPyPhACMVD22so6BisFArhsCASInqXqYqjRVdI8yAIv1Q0QBoa6WeTgtUHLcRrw+PSEh4dHmVPnwJyxXC6YtwXrssp4k4jG75blH0aQrqEtaYmlMTJzwRxVh448AA/nB6HFlwJhFgY8vHuHx3fv4McBCQ4lAWEgwAMxFIyR4QkY3kdQOmLyAwY0sH6IhLxtIFdwPIwYhwHTSJhGjykGPB4njONYy6+eniYB1plwnXfsmTAcHsEgvJyv+Ov//G/85a8/49PXMz5+OWNeNmz7jqzli1yka7MBKD6KY75nE4aWDF5hUm0wnZ8OfLc9LQA/I5f9DTaaAG3BDxiGCdN4wOcvX/HTT3+FaMtkUGG1R1Ze1fQ7912crRbs8419sEPYnJO+5OlN0OeN697e2PfqJzAG2I2z0P7+R6ChPMe3AZf+GfrPGKMEP4fjCXEY1LGzMmw5B3vm3Fs2VM4qVnFcd/P9YgmnjnVg5UqNKZU02YTOyb59Zvs4jgiasgbQss3SxAF6Zt+CM/cstpv7GvCre96Cb+crWiYhVnevmsDrzh+npUL92jXWmZXi9WfT/ZnVAkigDyJ70FUa0qiAf3U0xVYXtg6hDRyz6z7YtrG5BzFtzizwtM9r55+t/X79t/XZxuZ+T/T37wG3vmRFGINtn/XPue9NfPxeOqIH+e7B437u3wIo730KCcrD7fiQyLPM87WCE33DqHGMiOEJ67Lh/CLMrm1/wtP79wBRZQiFEKX5BBKCUwBVNFVEqNsTEgvTyNa0JB/abrCkYdT1ZH+GYQBzwcPDEcfjETFGXK8XfPr8CcM44vHpnYq8q6C199iisALN5ko34AwfHGIM9bUASbK2Amo2duIvVzF9OFAhRGVZ7NuKUnbxO4ao3TKD6LFlWe9tvkRzyjuH4tv6k+97TS7qHGmnQ+cJPjiM01BtTIyi5XfjJwI1kU06H1ykQ+nl5RnH44Tj4Qh3OuF6meH9UjuVWndoQBhX0zTAEgOmBSdabgnRAYELOO8Y9OxmV5BJmBkUA2YN7EvRKomRMB0HhOhxOV9wuZxr9ct8nZVFpnprgATKMcCKPZgZKSeQB4YRGOJBGu3lDOaEbVuVQd5YMNfrFdfrImdvttLF1gTtnm3c28x5nrEsSy31Nbv7eDphzxlb2qU6CgyCdI/1nhBj0wiz/VyTYWA5dyAARNr2uuDNzlb7IYGRaEB7Jw2eXmZJ9E4HlWUh8ZW5ICLUeC0MEpfmnAFHtbJJ7BXgXKyNo4RcsEmiVG21aWCbfw3csYrJgYKWJFsnS4tDq6+h40pFNdgTyDoalwKnXZ2LMjRJtR29C9rZGTgeHvF4WnCdn7FtGZeXBWM8YBoYQ1D7bMARSfXMzkXLq2UP+OBhFQJZCSZmc4w1VHJW8MKJXA23MlKb+2XbRUsrUF1H5tf19lvWbsCu2o5E8m+GxCfMQHcUiH8QI0RaoMkN+SAJex+0iRrp+4Lho0NKRcEPK9+X+bSkYQiNERWj6ZaaxrFo1deklkdl6ObMSGVH4V2aYXRUEksmMzvklJqG2N3nB16XWN6fZcwMHwM8EXwpKpNFlRQC34DFGjOSdMzOWUqFiwqc13MxOJjOpZwTjJWBdVlQkvpPThJRe95EP1ztX40jS0LKXuWyRlk/gDKB5VBgKlql1cArQ5QsVgURvAuCKGhiiUiYZxRC7fwNos7n6nAb6sDjat97P7dB0/0ZUH/qb0k8FUhlOWT7pJI8etF5IV3DHlU+QME5HxxccYBWf9izlVIAtXEEdVkV8KtNv/RcAinoD65NwuzJqf7vH1+/CaDzwRxoyUau+4Z1WztGlqCQFog7FoQ5+AAQcL5e8Oc//yc+PX/Elhf89OFn/PrhF1znFfO2S4MlFdotusEcCCgZrDXulT0HAKWA9CDR2dABVEFeBfkcOzB8Zd60hLcsEPJa5keE6KWFrnPCanJUcIhOP7aUS+QCpNLe73w+Q7J8qkcC0emSaWNMY4TzQTrFgcElidigVKvj3eMJ5El0wqLDGCaMxxHHw0EacejhsG0brtczAPn3X/7yX1jXFc8vX5HSrm3KU13YUp4AeJ5kw9n3NfOjKFrtjmPZkGqkalDUgijFmYGacW2eHSsiXlsmM2rZ0FtB5b1D/a3rLQe8/30zTr1R7JFy70WLilm6VgV1fsdhxNPTE5Zl0d+JGlyJk2VjmPaCGEcM2oRD3hsArjoSQjOXP33pDkFERg2AU9HVIiwKoqxln8ZYVKOh40pBNNI4i4agJw8fPPaUpBRk2QASujwOjDGO0s2SJLPkScSVS9rVMnkxElqGISC6BGhMTrP8DFJdCCpS4JV5l5IXRyCWMlTKekiSw7onSMcthnMBYRgRoofThhvjIE0cvOoHsRcAk7mgKFxMnBGcNHmwrJjzAd4TwM1ZErFogJAkaM4Fu805a0maLroCDdLHQZtAaNcdLWl1NMCHiO/efY/pcMTxKIxJp6XO67qgpITr+StSEmaaRxNt9zlLFnPZkJTdI4E0QFF0N4X+LGC/cwHwA4ILGAPBD6J9dzg94fD4iOk4yUENYT04YgQAebsAxDgcA8bJYXQHjJTBW+skPI4emR0OMeD9+wO+e3/E6SFiGiLS9SLBv/egCLBzYOeR1xWX5wuWpWDPjG1f8Hy54ueff8VffvorPnz6hGXbwRR1vhz6g41hwIv8yVYSlbmVWik435wCAybEQPSsVTsYbzc5SeBCpIGmlExfLhfMykpgzuIAFFf3rzUcCN5rOZVlM/ty1VudrWZX3gbd+qsHQH4bwHb7O/293wLH+tfXspC7e90zmFpQ4mppzjiO2lE4agMhFa0nPaOrxum9M2SGW4L5W4adzfktKCTPeMsoYzYWrpXAte9DQQDme+Dx9lnA5SYI1AHo7nM3HjDnTll33TTKOgNQ2redAgl17DVAJmYdGzkvv5UQ6hNKt5/PvrYkVXMa6e49LfuqKTHzeQW8qumEdt9vnZ1vA1xtTfW6PN8Clu9/ZglR2+jee8QhYhoF9AcRrFwt54y0J2z7hn3bb7qlmw2we7/V/Omtf9vZ0evCGFuirkm0sbVEgJV1goqc6WRMXvUVLHGrgVLRQFRAZwn6ChgleAwxYN8TrpcLAGA8HOBCuAEc9m1Hwo5p1K7bzqHkjC3vQEYFL+T1Lcvfg6JSbidVJgCwLN8BYJxOR/zwww/w3uPjx4/49PkjJEDVdV7XrqsMOedMJ60FEzJERW2cf7UGbreXsinYWDV4tZdKKYAlBZVt1/RxSZ9R5WV6kCh4FbwnBcVCC/5gYIqxdloZc/++tscZsu6C93h8fBRG2LLiy6fPcN87nE5PcCQdU5dlRWHrgGuWrrfhYq+g98xph6ci/kkpiC6D8q7+JMDE8AgYo4MfJmQGMjPGKWJ6mECFsO2bABdeO78H0/Hiqq1bbZ4CvCAg5ogQPdKuzHmQ5lcChsGAlb5xjQTG8jrVfdOmAMVkd6jNf7/f+zm1rq/Gpqq6eqGtq2ESZs6u0imm7ZgUcHcd6L5vO/Zl0/kUMkOkQeRJICD5wzRJpc8ugEJaN+SUACfgXi2aqlE6NMeSkRJjTyuOBwGxhcggoJnshYDjQ8T1MquvgZqsX9e1dnYex7GSICzB4kgqU1pZ6VvVPFrpo1rc5Ahe35/BgPPY91XLOBO4OHgEYSUpcy3ngmGY8Hg6AShYtxnrsuN8vogWssaArgLSzU+Ryh3ZgxK7mY8gXxK7mzluwIE2emPWRiGtyy8gAFtwAx4eHlC4YDoccDweQSQEB1s7xj7sgfMOIlF/2+JEPVOcNGzyetAxl5oEq3a7ZBTK4kMDd2fHW2ce1/VBd2Ba1sQxSmrxKQz2uUdNbK2pn0oFpdxKytxfb59bjd3uYqs8ZOYbgI5IWNy2j1u8KzMqMjIEFIdCd2xyHS8h3rfGL6z+btmTaG2Db+7f+2iAaiX2msQVPyNkrnwzHSaN/fVzMhoDsNkT9attjWgjsX5s+nG7f55XSUa0r+99IANe73+/vyQ5cguYyjh058kdxlB9xToXRuq69c/lJepf2eexdW2fxalsiY5JD2ze3+ut6zcBdOSgE12wagbnfD5jW5NSQ7MACirDoI8u2kEoOC8ztr/9FX/5lbDuVzzPL7guM/Ytg51XPZCIkkkRYFkZOW0A7+IouG4yxDIiKygC/Vsyh6UGjo6iaE7UgMzXQ5Acicg9EaCOjdfFKlltAuetTp6UTEmDAfuI0oJbRZ3JgCtIm3kAKUs7dPIRQ/SIYUQcBkzjpKL+HuQVHAyE6Aa46KSDpnNYrpcalJ7PZ+D/+B8AgF/+9jes6yoLQjNLr5xv78CpCfdapQWzZHPkCzOmpf1dJ1Ad+krp1rXw5qJqoRGYkfV92qZuDtY/WpD9db950b23Ze2+FXRY4Jr2jHE64I9//CMeH0/1Z44Cjg8PN79b0W897Aoy4jjhcHzAMERYeYEBdKzt7HstIjnYrVTTtYMAhFbnDgF4yA7XhoKaFhE5h0iEbd2Ry4YwRGx7wnydsW/SvTftks0uTlvPq9YZVMS47ElZbAHsGB4e7Bqaz0Vo8Q5ORWWtrEWcfYZkEZ2WBUj3XqHxOm+Bn2ReHx4eMU1HxCgZmTF6OCY5bBm13EywOi3xgJaEwsFHK2UFmES8uCTR5xH9PsHaGFIKCwP2nOiaQAMKWQ8eDIfkI3ZjUKhDE4YJ0/EB0+GAx8dH1SARrbLrquXi6wbkDWU5w5UdXrvnEhMKGK5kOJMfyHJYEovmZmSHoFpTpQhQLyxSed5xjJhOD3BxwvhwQhgGYdFraURwJA69yzgE4HQAHg/ANDhMwSNwxn5pnYSHSBgPT/j+3Qk//vCE6TSC3A6kHZfPnzGFiDEMAJGUIi9XvJzlz3Ve8XK+4OPnL/jl4yd8/PgZX59fpBzIBbiopQuFUDJVJ6gUoBSxK6Lpw0i5qHPQysite1+xsWI7aFuTGFn3r8EF570wWZQVkvZdGQlXrMsCr9m5kopS/qXbcNk1KA2k3bWk1EVM4q2T3coOUIEILnTb8PcOsOhtTA96vGWj3vq7v8d9ieT9H9bmRT2Y2ANm9gxVWL4D6EKMSFnWnmkk1ue0z6fAmVOw4qZ00imAheYAWSBtc9gC6JZhFPDB9P5Ikw5NJLoBXFKaJM6Q6ZBZlrxLAhUGa4e01mzhFigzMI4ViLUzisihWIDAFv6jJtMkkZcFaKpYVCuztiYwr4Kc7v37uesd4AoeqbMnjhn1vtnNmJoIsSMVW6fX7/UWmPstkPh+jfVA7z0D7f736toiCRCkIoFBkORPDA7jIB0QHx4eAKCWYp7PZ1yvV6zbjj0X7MmSuHzzDG/5BL0TDgiwJExvkQ5hZsBLd0TpQhdeMexaQkDLqkquP8uMWvolfmHBjgLOASGHytikIAAHIQB8wDwvWNYV54uArcfTqTLVKvumiG+Y0iYSDI6ki7QzHSYLKHTddwGTdwTRThJw8OnphONxApFUrxAJS8xE7UsRMHfbVmHQeyggJ4G8o9eBuXOWoJb15rpurm1udH5IABSQgE5KTkYzBZIcAWd41dtyTptBaUdz68RNQJ07Yfx7lOIlEIeVJNq6Niar0zVzC/LWDq6l2aGg1TGnY4Anh8+fP2O5zvhCn+HIYxiPev5rkulm7wlQM8QIKEPJUZA6Ewe4zMjLjBgChlGaR5WUkfIuFQlc4Ei08hADHt894f3TOwTn8OXXz/j48QM4i4QNF+sALeNK3qkOJdXPDiIJ6IgQ3QTvGd5lhMRipwCUIuxKYe0Ja0uagJgPGsS3VGkPjyYh0DOfjQE8jmM9i1NK2i0VlclTIM8qzLwd0yg+HjEwjROu5zNSzsjcWMW5FCzzjH3b4YuWEeeMfd0xTANClCZS87YI8yZlUC5IBmKTVFhYLGjJvcJSsidVfspQDgFJAd3agTVn7Clh8ITD4QFf92csy9JiAm0UYZ9btKNjHZeiDgsX9R1g+6ZbO/3XFX+i2igM6jsThDxR0g4KIonitVJL2HeEw2EC8xNyydi2Feuy4EpCMBnHUe3tID4+WZCtJfwG6tTNroA50c1aJ6JKFqCbZ5e4noiERYdWIUIgHA4HTJOwjM2Gmb0tmW+Sb+1+gEU1Akro+avncG36Q4SkoHi12yygOHNWNmKrHuobVLWEjUpsuHZu2OcFS8KdtZyt5AQuGUDzu3IW+R8hq9hZzZpEAJh93XtvXSJ/dA/aNHva+26llCpZ0X8G4DVTXMyndVe+ZZkxM4rlH1nY2o4nEICVhdDDXMCpICHVhpn2nM45uCisRR8DXFRtdKt00CZ6NpbcryVzYNSv6gEyIlLboykUumXUf8tPfut7ddxs99352f249f9+/bWst/a7Ni+38/Y68ckKkcjm1qeqRBj7vV39i37/MTf9fcP1uydv/zbf9O9cvwmgKyi1U8yWM66L0KLXTaiiJamFKhlwUuJJBDwvX6XEbfHAs9wnKaGUWcr8pItL0G6iRpeV+3FOqheTtVuyIeWS5S51c8DOPbg6SHogMWrJiw+CFIcQQF6EVRkQgT/dnIbiJwCcdjHUzCgZtbUw6yIfYlSHmlSgUQJyr8HB4zSBnLQyhncYgkMcRhymES4EHMZJqNkg7GVH2TZclwX7krDnjOv1ok04FtV8EIDONOe8D7Cww3TlmJP8XaSpATxJSSGVuihkuXkUbYBRx1CXoDgymmG0LKOuo34h15Ic2QXVELet8Ha2vr/+LiOAXwfIN+uyy6zcX0QiQPv+/Xv807/8M/70pz8hxqDlwDNAhMNxvHEA2VnZjgBPx8OAGIUtYFRYovXmGS1r338G+0jSAcfV/AM7mQtYqbEdmAXaAUjGUnSYs3ZcjEhFNLdKKRjjiGk8iCCvdjLOChKl0gGFzoltIIfiUjW4zFAEmbCnFazrF87JuvVeGLPkMMaIxJAgqx4mYsRLYe1u5RCCOEbiAKluHIp0OCoM5zK8j6Ix2c1VYSDkAnaNVWEHntNOUHI2iACr2E0RpiZIQw6nnYsA1wRtyYNdwGUHCkmjCQFaDxJUnp5wmCYAUiawXK9Y5hnrcsW+beCc4Dhh4oxAUKH2XNm9YAkcg2ZbTccyhiBlDKydppmkxDVEuGHEcJgwnh5wfPeEOB2ELkcAYYcjYPAkQNzgMATC+8OEQ9wxBCBQwegJkQj50BrG/Pj993g8HfH+3RFxCgAl8D4jbQtc8CBt6FAy47pseH6+4vPXZ3x5PuPjx0/48OkLfv3wCefrLDo2rOBmNiZlqVnIxuZpZWmm+bHnLGV5UJUZUtPSORX9fpe5NseHKyBhh3r0DjFIZ2LvHJbLFS/Pz0AuOIwj4jBhTxlzWbBnKb+ueoXVuWrv9ZZ21ls2pwcPANQy+H5/96+//95bX78Fvt1fvYPW/vSZ8zY29loD5G5Zbnp/3c9MrnYFbQCd2Brbc0J+ueugzcpEZZa0oEMNpA1gvR+r+jVB175T5ke35y3YKnwzJ4BkG72xaEH6/FRlEuTzhuoE3ZR50i3AY2NQOSosPoMW7kkpDfQMUeehOlZ6Dzvv+zns5/4+OXT/vXZ167+bAwGQrV2ROoFOSiuKZquZbvdPv5Z7QPBVBphvtd7u17x9z5JLr89ZASCou6cxXnPOWJYF5/O5aldZAGIlY9Pk4VMBeAMX1N/9llN+D9JZQjhnKcd5K1ln9+t11gTQl/GRro3bzXhYJ0dL7vWl9fd2wpPD4HxlEey5YF1nEDlMh4MEN44Q4wEoBXvacLmeQSTSJsM4ytmjDQVatl78q74c1J6rHx9j+izLUj//OI4gcthqd9YEp3uZLZiopeWyxshxLVOv6witQVZfDKcoowAAIABJREFU4nw7PQaUu5ux6UN8acoh2rIFsr8ra4Wlq7YFq8y5MnBC8FDyuQTEgPjQZCIi3MowzG8BNAHfGNFxiOpjOGns5QifP3/B5XLBuiW8e/8DlmV5017V/UMyHv2eAUuZ/rYuiDQJO2qICFywQUBjJmWPM+Pp4YQff/wdKAOfPnzEpw+fsG8bxhjBnKTrb1abqA0MQL6eqTJhEH1gAog8QiCM8SBNDtgA3YSUNuQca6winUAFqMtZKmFYg9B+r1fgXWMg0Vt+V/fz9XqtbE+vGdEt7RjcIA2YUhK77B0QPB4ejng4HPDlyxe8XGfZi5VxRgqkyr4qpWBbVwz7gGEcK4i3ni9wAAYvTO8QAsZhwF5SHRanDaFLB9rYe5Duq6xN73JmLMsCFwL8NOA6X3E+nzHPs4Bdg5znva0opSULzZ5kTby/dfWAmPz+LUHE9Ks4i1VPuwCpOTLcQZiTIQSVwCE4FwEasW4T5nmURn7bhsuZMYwB02GU2PYusZI1hmCSxHcmSFEOEQgeji2B1j13KYBzNQll+xqAlugWDDHCD2Nt4DDPM7wLOBwedA2RsJCrr9Dict2ltm01hlQ9LpAdxm3fE6EH6a2ay+7lnCX7bs+/era53ka0M1DsmTReYc6Q0epiPRvDkoUoQVLZxOLE6vjoef13KHQ3a6Szr3L/LBV42df3A+Pm+fc91T3a+3BBS5bFroskUSotDqvMyar3aJIgOpdlbfgHNwaYcw5+EH1vO7ubth0JC4KoJnxaYyYb5VL9KaDp0PcAa/+MIUQ9y1HntYK43IEFdn82f675ZDa2hnVUrFBfY5nXGx9I/2/6p29JZfRrqTH7AHsoARz1XGRCW9/tPb2WnBtIR91ntXFisHF+5DVWhelun/mt67cBdNqlInPBmoQWvKei9eE2OmKMqIgjL359wcYA79osgBShJsiBXhlcooXmoTpU0ndbx4FqkNc7+13OQA/4W0SaGaL/wa5uCkYR4VzVsopD1FbiIrq6qSaBZVCmONQOltUgUwAFFZJ1OskEkEqiBwdE70VLYxQAYRyE0RC8MJ0sIMi7aFvt245lm7FcV1yXC9Z5w553ZEAFbtPNRNphsq17CxCsjBDoDIqwE8QuMoyvJeNj99PQgAtIy4x1l4AgLCozbr3xMbBPB8Xerv3F1kL7NbBW5+0uAPrWdZ9x528YB2auWhoxDthDwe/+8Hv84Q9/UA2XK5ZlBXltJ1/EoUxaSunh6++HMGAYTjCtg1wIQNOzAaDlf1SzBXYaSs8HyUpwoVZmpwZPxlYCZxAJyt6Vs0Ed0G3LcORVi0HYcMfTJJ3iHCHtCUwkzDYSfJwZyJ4R1ZFiRxBFBwHEqnF38nvkAHZOOw0SHAOJldBP4qTu+9pKVHzogqfW2twCAAGNpYlMGCJKEo2JYZowjZM4Wmr09n2veobi2LWD3jkH0Wcukn0uGYUVnNASBpCwHrPqTeacJUfgCOwBPxzhhgnT4YjT4yNOpxMO06SAHmO5LljnM+brC/Z1AeUkoqDIQNlhQr9V20OPp0JiVqB7OahjGpyAlKwgI8PDxQPG4xPiwyPi6YTh8RH+OAExgLwHcUFAwcETjgPwMDocI3CIDscBcm9kBCqYXMAQPSh4WJHrP//TPyEOQdrZ5wU5L2DHCNOIp+kIzBtevj7j5XzFsmz48nzGz3/7BX/95Vd8/fqCy3XBsm6iU+g90p6wbqLVEmMUhn1prCJxQlHHI2uJmLDRhHnB5MEQ9hoqWNYAHsBAbD30DNdR53gcRwwxYAoe0zChcMHnL1/w/PVZEyPamc+F2kWW9G7Bi5yAlSj2YJYFCnZJt2Xd3x0L9h6guwcX7e/77OA96NfT+e+dhPvX3r9G7NttKVr/+wbO3d+vOZ8F3g+AMkyc6zTO0MANA4beei5JqPYAoQnIK6RUnxf6b0k+kGpeCstESpHsMuaRyU5UMAitFJZIwW1F6Cs4Sa6Cdr1mTP1M9U56lQamV4dQlx0r69LslbqOCuLJ1xagvAVCfitj++q1+l79e9izZJZA2lHLzNrrigp50917fOucvP9+f07237P1bxpAPah4Wy4iIARBWBL2M2PazPNc90wPGtf14zxIG4b1AdH9Wn1rDNtg2bq43TcGTtr5YzpKtleNDROjSCXc7+2cc9Wf6j97D845PRPhMjIPgAeWdcd1WbHtX5BLxunxEcM41E6ATAUprdjTJiAKZxymR0k0xFBBuJwS2HFnm0hZ9/acUtJopqoCCQpOkXM4DkNlPLV93QNz4vH1pY9i/2z99YH1/XppIKLW6MjYeClPI0IV9rezGv8fe2+3Y0uSrAl9Zu4eEWutzNy7frp7mDkwPxJIA1yAQNzzDkg8BDc8BI/FC3CBENKRgOk53X1O/1RX166qvXfm+okIdzcuzMzDY2VWnxICNBcTqqzMnblWrAgPd3Ozzz77jIM2WRMCcQVzBaRoV0R04GjR5g9MpvXD2oxhW73OzkXXwZjBIsqSIW7josDTptPEzDieDkoyYuD55YYPHz5gzbqutVTTS82qAckTDtMEqdu1qbi7NjYQZtxEkEoGQChM4GFEBJCGAzgwHt5/hXdffgkC4U/ffIMfvvuASBFfvvsCLhS/VgfCI1C9lFAboelUt3VZDbwlQmTSJvOBgLo17yBKINLyYRTCMDooHbAsG9AEbFp/fQmdz8N5nnG5XNp6dAZZmwtVmVpqRL1DZmkMxhQD3r/7AqUU1apdFAyPKWGaJoxpwHrRZhMrBMu64DbPGKYRUiqWdcV6vWp3+xAxJtN/DgEg5S2llFS3Oa9YqwEExvysIihzNnmSiHnOqMSYjg8aexHjT3/8Fh8/flRCQwwoJTY2VmPcZWX/atygMUV2TXGPLeExp/3f1kqVrNIqnU1TX5/AwliXjNtFq87ScEMpGcej6V7GgFo0EZ8S4+HxgFofEZhwvaim+I8//IjDcWxl4g7Q+/4bA5t/r35qtf201qIgMut60YpdZbV600K3LV5mfrlcAA54evcOj0/vcFtmfPuXP+O2zFgXLcVNaURes7IzTeNQDGjZ23AvlTYU3tAJsS6nRSoqVB/TQTqvIiHSta4u/r4s8vU+t4Gj28bhB21xWfsVtbXnLEFQx4DS7KJDNCDc7Uu7zxZ4mX/76G6vExFkCMBud9URZreHtSCkuLuv6s6yACTa6M+zjN79VkSa91PNBwzJGKzm711BmNcFyfwlkGvkEoZR53kcgtoWNr+dAQaDYwIqIUiAuwQNEMWesHN/3D8vwBCHhqH0a6nd6vYM/HV3bs7mY/pz2UCwdi7/ub33ta/0ll+0e55wf0/nbJNgUDF+OA5tW1/zhXdAohPMAFAVgKv9W20yfFQqLPvw08fPAuhKrVhrwbIK5nXBsnbCzFCtGvNF2w0AaqizZKu7VkaRAGpQqtN0bZM1nI9JqdR5WRFjasFWBUH7Foh2RSEAVYNlFqBC2woXEfsOxDhADF3TRbA5L6gVz+cXc1jJnHG9Xw7BusZ2wqDWqIKZlWUUAmJQECQGMuacIEXGNI5KA2dpgr8pKevFN8acCz7++BHLsuB6m/V366rXYeUSxBFLyRCpCki0iaTfUxp2QY4uInOqoM0PtumLzZh0GQES9vDIwDnoJBNYdsKdpp0Js+t4HQCofbPvwN44Yr9A7oOOtxz39pm0ZQI3vYINbPTg/ng84vHxEcfjCUM6IKUBP/74YzMww6AaYas1NwF0TvlmPQyDMubCCDIAQLPteiv3Oks/FXh7d8NtK0QLNuHzqhvLSFpap1pqagHmebEshzYpERFwZEC0y3GIo+ZKxYRWE28BX3QmpBhgYsL9pv5GIuCo+ghMOlelaglnEZ0reb61EnY2Efhobew167i24L4Z72qbdSXwmFBkUTDFsnIiBWVdjSlntHsAAitFYtkBBvq8rRTXGllomTqjkjagryBkARS3JkRK4DDg4ekdxtMjTo+POB6PiFEzo5eXZ6zLFfP1jHy7IK8XcC1IgRCpQrCi1IxSGauVE4mzVjyLCUGuZoZN/LdaJykmBocECgeMD1/i9MUXGB8fwccTeJyQY0CVgimqeOxIjFMiPA7AKQpOseCYCAMLAlSENQXBEFWXilgaQHf7H/973F6tltfHo339AsB//I+9mICuDwXekF/5f/loFgWarNnWZvvxHYD/8p8C+Kf/X1/M7vi3/91/8//r5/27e2zBsh73k0J3dvyVchBAxbEFr4Gv3Zlsn6qVgLrfI3z/rh7Et9fTtjGiAyor2u81CWFab0ItWabMb/1qDnkHGvcOp9v6Pin0FkAnIs22tevs7rFiA6uCNYggu0Y/T++ztPfVPXvxLWCuL7PtG3ncB+hvPYNtbzZtNghcs8yzzX3JkZd43o9HFYCDavDmXBsg6yBYrxt7fx3qR0iTD+m7UvX7bAsAunI1vw4F7ajt2f56L8O7b/DRP8d2fq0dwThaJ+QwY80Z19uC6/Wi+j1RSye1w2hE4AOWhTDfVIsvhAUjT0hxaz6jPvLGgCSglT7p5++Zsy0hGVWbDKSlYSKA9h9Qlkr/KDV4MTaaBYP6ySY7UApaIr2VBHfPvlbroPwagG7BUUsqaqknQgALgyMwWNJXs4bGsl5nLKt1H4Uxd3nT8d3m3/a1jcF+frQAOOcGKuV1RYgRjw8PGIYBw/iMb7/7iFwqAoAgzrzqrBmzap1KAGcNrgozChEkMkrQqqHzqgBd4AieEkYOGE8n0DAipIjrdcH3H37AD99+AFfgeDpgCBGCrIL+xrRDLShSUbLaolDiFphanEPECKEgm7QDW/Ky1tICZq/YZWMpB47G1vQO6ZqMul7PDZD2tQKgsUhfXl5ao5QeAC910w1UUK4iLwuen59RS0EA4fPnz5hvC56fX1SLqtbGwoopadIyKimhSEGu2sl1XVfzcXWN11xwWzPKmhWMjkETMAQFhVmrNBpbnKixnInUfl+vV8QU8eXXX+EXv/gFnp+f8Yff/wHf/eU7ZdQxN1Zc77e7jp7bJx8bAEhphOS602qVbp/R+bfa81AAG7aexCoo5uuK2+WG8+UMvgqW5YbbbUBMWp4bmHE4HDFNI06nAczvVMOYBC8vFbfrBbks8HLm0+mkov6czCclgKrDAfr5Iii5AjWjhohauzVrz5Sh62ee5xafamwbcZgmMDPmWbsj35YFKY5t3zHNq2abNsDG12xvh4AWW5KWr8P24Z3dt70OpP7txtbdJ2ju95nSV9F1xwayVAUI4Pu3aYGJaEUebxCcx3j2L5Xjqapn/1MHd+SULdLubFW364sBeNt+qZqnfdKpuwMluIi0TuAsaKxJ9xeiN5C0Eu9g55GsdsYBboEmMEJkDGNCjJroZ0tSS7Uu3X4tzKCi+JGIqAxSv0e2e7rbG3owlbXEuPel+u8Ato7PP+GH4O78/d/6vfH+vH6M49jec7/H7/f+fVwvNp+9hBpcNbFir9v8OjK/cgMIHXB076+P9fu9p//5p46fBdCtOWPJK663jOt8w7pUywihiXjCfoaoEDyoYl0WFCu5JApNc0GgkyFG7QpEwsh50ZJLAdAoqUlfC2dwbCjztl9TC54ZtgAb0JbaQvSloh0wBVTJGj7Agn512GOMlg1kIGsXHhiowlCmGkV1fIcYkJIKCUcmpEAK0A0DUlK9qVIycl5xOX/C9TJbx6ULlmXFbOy4shogZw/WjZ3qTDjToFvoVbs/1eoTTefx1mpdmVGq6F/72WSHZWAaOKe/o4rmDAs5Sm/0WHBndvScfWef/gPE+JwbWNv9bRfQ2OR5I07z58bOb0ev5WKhfM4Y0oDDYcIwKrB2PBxxOp0wTQdM4xGXy9al6nA8IMSIatlqZmXShSGqQzdoNkn1WipKXlSot9OsqFIU6UAfsFkWFPocmQMIjCKCUj2rHFup2RbEuO6J3aF/F91sYxyMLUKIg87ltRSgVDWgHADRjGK07GML0EhLuLxNNBA6uS0to6KonbM4RGj7dM24Z1EnfZ1XEAFD3OjQ/fVLKYgcNOiuur4CTKQ4VftMVkCQozIqKlBkRREViFZQL+vGU5VyTiBwqOAUTQtCg90QGBSDOWVk2jL6xeTM1QHHwwOm4xGnxyeMhyPGaQKBcLk84/LygtvtBSWvKMsVyAuiLIhcMQjApWppe60oMiBDSz45RISUgMDGGstmV3RusoGEINWyHMdHjI+/xPD4pYJzhwmSEiQFIBIEGRQ1S34YIh5HwrsIHKngFAjHCCRmMI1KYghqCX0niP/5f4X8t//r64Xz749/f/w7evxvN27+lrL/tAxExNmGG4wlQuZbmzPaAWPu3IttMKXbh3rQZsfCg0ky2OZBgDEkrHTMfQz3E6AegzucbvPumWa9k/gKNDI77DIIDYRxMM6BoAYmOqtxf963gCz/uQ9c/HU9iNe/Z8dw64Lx/t/b9Uvrev2WY6wC5Rr4Vwu0Hfwisi50FdrRucqra3cGn3/+vUwEAKtc6NCb7RX+3wYcduBj7cA6ZZLqvrsa2NGXhisjqdre590HrRsk6f4VpWrXTmu2MlxvuM4zzudnCIrqNB0mS8pFxJogg3a7XpZFkyoY23kDc9Mkc3BMWXPW3c+6LTq46sykNKQWUKrvWAAPnOCNeKibYxtreF1XwPxwwFg1rSOiANaIjUhQK8OXSi3aSXgLivUJeFWFMwi2+SH6uSGCUFuykRlgHhECIxdvmqGyNttzNykkq1qoLWnuzUGcxesAlFbVhBAguWCtAEtF5Ih0OACIWDPh8/MZ19sVVAuCJKs2USNApJ0enRkYCKghIoeIKglLZiy3G1YrHxumI+IwogggPCBnwQ+fPupnvLyAiuA0nRAoYL7NoJgBrhiGgArT0zL/tVabH0SA+ZAVLoQvLRgMQbB1EyRtqIWNQV2NlcQckJIBskXnPocHLMuyaSV2608ZmDdt+FeKCsVXfR9ZLCVkDbCga+q8Pqv/QQHrmgErw5umCQLgtsxYFu38PsQBAycFHCGIFsPkNZsskIGB0FLdVazzN7TD5loKcq1aiVGV3UikrB9vYMJs1SUhYDwccDo9IK8Z3333AX/6059wPp9RqwIVqqO8dQl3IE5EWgOPUoqVdAYMxwEORziM4/ixAy97kIBQoGAsVy19cz3eWpQ5W2TFda4g0gTC4+MjOFTEBMQQMSTC6TQCeEJMAZ8+/YhSK87ns+oNzgsOhyOGcVKAhSNW0SS9cGoogM5xvZ/IWn7eyvW6PUbnV8DhcAARYckZL+czLh8+4Lvvv8fnl8/W4CS2e/U9tQH7kAZG7Gx4i037pLs9N9GKtlcJErGGPRa3aMKfG8awLyntZJb65wObw1a+L3VrpqBM4NDKi/1ZViMMCWrTyQY8xn3N1t/dpx1s9+zXUQCrvPJD/D+4Ruo8zy3R1e/N2uyPgZybCnwhm4k+38jkBSxBGewZlVyw3GbwupgGqoACI4aAZOWt0Sp4tGELoxSTEbJxhWE5XcpymzsGCBIRXP+/7dmg3esU0N2P2StQr8XR2z7ScBuquzG+94nun8E9SHcP4u3u4c3XbIQn/RV1s1txoA1hIxsjqyoik2BwoNn3RqLuLdu/37q2++NnAXS3sipV93bD7TajZEWVtSskGqDkDRpIbPO2rjbwzJ39pBsAQ3JGZQPN1hkiFSkyYtS2z8U3FLsObyolUFBtiKMBDAGBQ2vrrkY+YJ0XtIx71bbLIqLNKEgwjgd7AJux9YEnaIcTBf3Yus+YYH1UpDqmiCFFjENEjJoRDESgGCBM+PHzZ8zzjPP5gvP5Bbfr0oE9+rlCDBoiRg5NTyHnCqyztkc2an0fFPh4lLplgwA1ysFKvzgQ1vXaxtx/kPp6spO9QDzraX8zEwWdtACaqbDARzbnql0Z7f7VGe4OlNMpDWcIeJbevT8244ayGQyt87aNhrTL2uGk3ZvevXvSLk7WhTcELS+9mC7Gl19+idPphFIKnl+eUWrBOIwQKEg0DiPGaWoCseuaUfKKKgOKBBW4rQV1zVhr6e5PwWEy5hkqoUhFVK4w1rU0Z4wtw13t3otlqL1+Ha2rYgWKOlzT8YA5r8rEiEHHaVXAdhxH9WYNZXbwr1TVCFDtGae8k8nO2ZXYRllFWocZYHMCFQjSMrUQE6ZxQBwHBGg3VCpVvzPrmiSBaKd5bULBhEBRS585gTkixBExJpTCCKxNZSioVSgIEFH9AtV9quAKTEkDIsWNAxADNAsXUMEoCAouhgGRE0IYMB1PeHp4h+PpiCEGxMgACq63M84ff8TzyyegLoiRELCCYkEQnaW6oSqAKRwBHkAIyuQIETGNurF7wwwLUAQBVRLAVlo1nTCd3uPxF/8B0vEdaEgorK9nVMSgVPNjrDgkwfuJ8cVEeIqECcAxEMZArgVsA+ueIQMUcPgf/idzaBa8XD9irRcMo2ActQR5frngh28+4A+/+z1+85vf4dvvvsP1uqBUwZorLrcZueg8hHjJMCOXTZA9iAIkYuBFKRW5OIMJyFWfVxVXntNV4c7vEJLFGQKwgMk7iZq9EVH7OU0Y0oCUImKKyhRhxqfvv8fHHz7gcr3i48dPWC9XxJRQUHGeZ3z8/BkhRAw8YF0KLvOCNWdEqB7gcDyowkGzM+asMCOGLUgls3uq+aFB7P/y3/5LAMB//T//m6bD4Zsr0+sNvm36tneANpY4LNghcCcS3OEO1NvJbR9yAKHLcsAdCKJNoLl32Np9EkCsCS7fR902Bi93K3XntAdVKLc9WvdsskBqy0ib/TaRbmbSJk9BRee3hgCaSCNCF0iafhQ8y96ch8b68GFVgE33BWYVGgfQ6RppCqg59j6G3XbjAYEHJP6JDfSXzfXuc0ZuEkU0qNle0zN69o7VW3+7z9qic/VUB0/nlQN4Kkpuz6kbr25i7Oaa2h495w5Es7l3r9G6d4Td5r+exyJkNtwSg2R6fLXjCOiFerTRtAK10kFUM1XUbvefuzEftrLT/Rhtz6EQTE9LH5CI34MYGLb5Kb7mCGhaDy4K7k1U3BcMzEhpUBaBSaq0xBYsiCQL5EIEQOBcEELBNB0wHm744ccf8fzygvO5IA0BRCNEqtpUAGEYgVxUdH/RPd+ZShwCalV5i03DdgOJszOWarEOqaqDt6wrLterdlMna0gF1rVKaH6l2hlAmY+s58oFUgFmAZMmyrx5EWAyLSYho+W1qluam0yBtPI0DzAqxJodqG+h+lsCkJiupYBqNf+OkZL6zKVELMsG0vn9N9aMrb1axRLU3BakusFePicQGXS/ASOxltcXAaRoJ++vvniPUlYs8wW1rJAUFHg12xrjgJASnNuodmU1m6/7UBFrqhAHDNNBwd7bgvl6w+eXM56fX7RDaYiIzAqM1IpKCnDGQd8fScE0JRgoS61cZ1vXVqEjW8CqiV40TUHqE7ocm63yTspqswkVCoIKBNPhpM2r0gCBMWFIUNaCJS8Yhgm5WhfVddU5ElRLmI3dk/MCQgCkYM1GzAhqd95/8QXGUSVhbvOM/MOPuJZrYz9VrmafNDZiAaTkVhJdYHMqKGunsFZUzUvGvC64rhnTNBoYpCAkk7GwDcxmYjw9POKr919guVzx+29/h2+++TPmy1U7wtrccaCZQK2S6Xw+g1mfz+mkTLrPz2cFM0JSySWdaRonmg9vgS5iSsoedJtsWXBPNMcYMIwRQ05Yc0EpK9Z1QRXtmno6PVgnXOtcXNXXPD2ckMYBtSopZr7NuJ4v2qQkFwxr1qaGcUABWcf2sFV4ARBo+Xjv5zsDMVjH5ePxiFqKkUGA8/mMzy8v+Pz8jM/PzwhTwvF0xHQc0fT2bJ8uRfdyyLbnt32d1PYrMzFAE+m29gBw1TLWplnWMXgBtOSb2zYCWbUQGYin8VDgweL3rfOziO/nVeNdEY19GGY3PeFnACqqNoqQAoLq9hWYVM4de+4Oa9oAy24PBZyeYkx/B7tkY8ORjRmskRdAqKx7Z4UgIKMw4xA8cWLxYXMlxDwxbeCmY8RazRNHSBiBMCt4y6IN0qKWr0ZrUhlMx6j5ZR5/k0CQtXOshPaZTvQAxPRCN39LK5S3BI65/BpT+/rxPUQaz0xtne1l1PmzYsCvU4ZJtrJe/5DG6Hx1uH+jMjtNbscul9yHZ+227sAqGyDncHw1f4XM14XtS+5r+DVogzGNidp4AIBJhOn+us0RtvGh7l5+6vh5AN1txvlyxXJdVIwTLkboND5jFxn84AvKmUB6VLvBzWllZjWapViZNaEWIMM7qKkhaFo8wSnqqq01xmSdNy0AEMFSKuoqEMlY5qwbLgcweRChmzu7a0tklGI1WK63EpgQISBRQCSkoBoJgzOJgMGyxQBQyop5XbHcVNQ354zr5dZKJHNe0YSxk9LRQ4xWMRQMILDMlgWL63LbApbSPUgC1pIhUB0bD0Zgk8+F3akvW+2jQTscJNP3SV9JYgbFgzcH5syZrrpIQckma+3OXdvkNW9uNwfUATJDsOsA5MwIAMIwOApUNAtTC4DA4EHBojEGfP31L3F6eMDD6YTAjHWeseYVVRgDAqYpKbsxaqetnLM651E1O6bjQa+DGJIrSsmo0C6ha64QykrNX1cd7y7Lr1fszDnAwwQBI1cNVLXtqO4W1cyUd1gLxt4TY3xWAlA8u2ogyHqzrLtqAVAFDtNoRkOQQsAQojr0WTsqa6aEga51dimlCZ6rUTXrIOq0w4GKWsCiTU4CA+PjA0DGmgNvmmO1KFMwkjp3tSANCSElnXuimc41A4EnHKcjEk/IS8WyZJSs4zBf59a8pBIBMYIQ9UYFkLpiXas2WpmOWtJLEQXBKP4RHAcM4xHH4wnH4wPSOCEFXZ9SM54/f8T58hnLfEHJM1BvKHlu1OoUAhAiRCKWVVBKgIjS/QdzeKI5xVUyJGvJ/RiTdaBiEA8gHpGGIw6nJzw8PWGZpiFzAAAgAElEQVQ6vUcOE6oAQXRsQygYiTCSIKHii3HEu0PCu4lwTIQDAyORGmWxjTsARBWoyoBwzQKpwLxWSBRcyhkcZ3BKmJcbzp8X/MPf/QE//Pk7/OHv/x6fP72g1qi2aVnBFJBnBZA1wGLUIihl1s7YNs/XqnICImKNQiwwAFtWXe1oyRVryarracLpypxWkAWiZdtk5QUOEHsXOddYGocR4zjier3iLx8+4McPf0GpBVkKXs7P+JsvvsJ/8q/+JRYS/N0f/wHP5x8RAoGxApIRD4Q8A2XOeByPylSMBj66bYGyckNglYZoa1JMc23vcI2WWQa27G2vd9UDM/1BsASP6i+obTN2Qq3q5LemFuboiIl1OIjHxj5QO3y3kZM7sWj7qptpTybBcqLuxLjjU2EsobQxbkXEQJjNwQ0m3L2VWJm1IzTWrYgg1wzXjPWosXWIBDURdzZJCLHM/r67lt+LOTqoxopSMfZg+pNiJfB2Gc3GuW1zMJEoWMdtauwRL8sEA2VdG8PNYwr92UpgCI3Zhrvn6w0G/OjBpz7Dv4NViRpYrT4JKbsIBtaxdnTvD2/M0pJO/gyBlgxl4lbuqz6S+i7OTtmBieSJJIMKDYQScZZ4sc+y5kYECIqtdbLEpF6HNoZR6QcOyqBe1gIuJu9gCTep2/j0WXLXL2qav9gYDg3AA0xCRBkPDtgU8XtQhp7Ymo5xfz8KuCfkqgLbOu6smqVrQa5ijHmY/6ifkWvGki8YhxGH8QAiXctpgGl6Fbx/ekRg4OV8xvPnz1jXFcfTCcOoGrHrmlFZECNhmS+Y5ytKOeDh4REUkzEMBcQBMbAymW4XEKsNrVKxloIUgulPafBJHBB9DxBYckVlL5pweK2QIkiDdWf3Ki9iY6lnW+PSnoUmAH3WKm8ihohICYNJMdRSEEftLsuBVf5CCjhEpBQgCNCOgz3Yyy0hIA7YcwAhtjl3vl2Q82oyI0MDX90WeqDuQWS1LqXeRETjKwYFvd91XbAsuSWaqBbEANNOrShFwYUqwPl6Q6mCZM0JhqgJwQT1oQSCaXxsa3JeMi6XF+1WfL7itsxaueH7mPncq/vWlJDNp9x8bX1NSAMeh9HYkNVAioIlr+DKSHFEiqkFu0zU9DSDda9flkWJCoMSIarpkG2agwVDmRSQW2dNykC0wgdHzLcZuRas84LL9aJrPSg4CAuotTSYW6VRFa2+mE4P+OLrr1U+hCM+f/6My8sFhDM8GVVqhXC16IHAtYDWFWEgDENSPa5BbX2Wqp2fS0GJBA4jyMB+Ei3j9ZhGhHSOs/r3dVnxlz/+Ceuy4tOnT7icz8imoelgfckrUEYtoa3KXlLTueLl+YyH0yPSOCDngstZQcaHw4TAwAYEsO2xFnibvcq5Amb7A4J1nM2YDhHj9ITjw4jnl0/49OkTSiHEMOFwOOB0fAemiJI1BiCyuJSVpPL1179sDXm8iUfOLwi3GSFFPDw8ASkhkiW5a4VI1qodAg4xgUivK6UEtj0+RsaQIkpe8PKi8/l6W9QO5QIOjNPDEQ/vHzEdDxinAWEw5p0BGQLtNCtWsqYVQUZiMZJEJq0wAhKkAmutgOurUgRRBmRjVPs+JtAEsJNCtLO5AlxKaohafun+kgRUychrbXF2iAEFWg3DHZCW7bMagYiqzSll3aPZUYACq93wvdrmgeUJmqfY2MUdYEWkHjJpNwBI0ZCQiUFRf8dBcY3ofmBjUFdIKSjBqqSqA0Jq88VcsZusIGhHVkD9ucIj6HBCIm0UMh6VvZ3LorZ7OKi9lmJsXWwy8ir8rXZiCJhnZfVqiWyfbPUmKpvUCcfQgKha1EZX33dJEZfaYQ6AShfpD9sYi7gPqRVGVSpQYGCng5UGsL2CTJ2EJSbroMQTS+duvn0V7egr0mwTIJCam99GCBCXaOIAsCAX1U/0WML+szeQVoq1ORyQhTQZ1lBOY4iab57SX4fgfhZAd5lnzOuiD02ksYGkqIMrRqP0MgPYQ2DpEFFR0Gx7AM2eNUe7Lz3wTAA3R9NL8aykAayilkVvWgzdVA0vHbRh1E6NzBpMMLszpkBdztWo9yomHEJCjGyBCUDlZtoOmmHR5gGxLVLveLQsatiuV+tuO89Y1wIWzXBWAMyDnTtswpAcVXi+1o2hkk0fprrunGcX9jXS94cHij7OjR7cDg9c9u/bZ9hfn1O/m7YdYLOx+9km25s0YOl/ELfsDkG3jm/MmumtnqG3zxpYW3tnIYAjhumEhy/f4/2XX+JwOuG2rKAYsWQBY7UAO5pANeFg3Y+8pFVBYQs4LZsJc8rV2Gjd/rquWHMFhWoNCDa9u/5QFJwAUs1CsHZOJQvwYlRQq2UmeN9JMqa0gdUGyCQESLTXRweL1fCxQvXNAPj4xsBb+SpvIpwKrFnwYRujZy+kigJPzOaABBUlNsA6qEgGYM+G+vmAbb7t2UNmsEQ31qAKavreqiCN1j2p3RjTqABoyahQVp0y8gICVbAEpJERYgKlEZUDSqUGzE3HJ8RhxDAeMR0mDMOk4FCtyEvG/PwRt/MnXK/PIBSEWBGpgoIGv+u6oJaImHQtCkdltFIwEAIgWBdBH3dodyUd38EAwgfE8YRhOmE8PGA4nsDjCcUYSZFVEHtIwHEiHEfCEAq+ekh4HBnHREgERPF8nTIYfX2JDon9foXUiFKBkIAVGXFgTOOEdbnht7/+e/z9r7/BH3/3J8y3C87PL8hFg9XbbcE8ZxBlAwpYtbhasCZwYWRDExpY09jRRQNiL9kLxEBSULWUgrws0E5Y2thjSAlpUv1NtZeClCYcj8cGdPXl09frFR8+fMBfvv22aVkWqPM6Dgm/+OILYIz44eUHHI8jctYxC1FBnwzBNCSkYQCNEcWEhhuW1WWRS1b2iDOK3Afvs3I9+6svGewBh96Gbowu7+YNA5UUPNGNmzrGmOtUSjPAW8mZsmEcgPPrt4QfPDHWrKz97MwpdADYW3vGKx0y20Nh59/2g/69m9TB1vAhN0dMm8WooycIZhJ07tRduUd3dABS28hBzQnqgYf92+T1+Lf5W5VIhT3brZU4iZijb4EvNvabanYZoBz2DLD+Gfe6Xa+AxjeO+3nj57q/nzbSpmN6z3LTse+ArLtx6K+vZ6jt5yle/V6B67CBegJ4y4wqarsdfFHwLLZrqlT1+fr8vmtOcD92b13T/c/69NEEM9iTCcbcFdtz7aLQugNAy1GlbM6+33Otzt4sVgpYd81W9DXVEqsVy1qVjZs0eVvN6R9SwOPDCSDBy+WCjx9n3JYFj09fYJhGFGjgNU4jAMF8W3A+n7EsK6bpgGHQRNu6rsqyUTE5LIv6j5ooYORivnQwKQ1r0sKsukHraokQ7IExZUYDgYI2QFtWAKwaah1gqqVrsCDGF78y1ZgipARtbtbPWXsmtVYseYE3bfKOi15qK7VASm5C5v5cHWQrNp9CiHCwfmPNeadX1z0suzXX+1Qb+9EBbAWTRSqWywV5uSo4QwZCEkGIsawZ3/7lA8ZhQAykjYmGEURWEhdUgzgXwZq1qUheVbJmXdXmxRgMAKooVFT+JlpSNagd1PElgPv1EAxEiNqN19hduWgTFl9fgZMZhwpCRST110LQc03TpECnEDhGxDihQjAvq+nzJWXPjYK0DliWBfM8Y7Ekw+F00nk4rggptZimVvXTBg4oVQkB1Wwxh4A0HTAcJk2ap4jDdAQHxnJTLe3b9apl1YH3ceBaDKRIGIeIh9MBt2VBESBNIzgkzMYUXZYF83mGFE2Sewk6ESMFRo0RZc1Yy4JP84yPIijWxEZEFMgOapM0ttvmsTeDI+j8ut1u+ObPf0aICcuyICXTS68rdDd2kPlO0qC4Bufr+a16bwwOwHQYIHhoiQmPJ0NI5m9ws08+1wFgGAZjWqtffr3puCzzjDrPkMqIhxGHY0SMmhBV2QYFdq7rrLUmHDBEZVbn+dY0KNdVG2TM82yJ9YhhSDgcThgPAyh6bEwt5mj7smygc3u+1WsplAWs+tcKjgi294sZ5Gqxj8bDXSd50WRmiAFNA1U2/VhLpUFEIw1mQoxs9qfAqUNwWyXKZstSmj1x5q7ejViIqvOlfRYZQCabD+L7H3R37uJki2+ghINKsKaXCmoHHysRs88mIyF3Po64lAawdQjW/bT5M7LXAPZmhVUYlUhj4JTwdDphPI6IIWDJs66/qAk8hxRcPov8fqDLBiDQODbtcsd2/Nj5PMSWTOkkF0AImul8td/7V19mvj+vrzqVz5CgYHDukjTaZCftfVvrOaCzjV/3YPCYxp66fh5a4hGVGo5VzXdQUHobI9iTUL6EafxC2t6iZ/ZyT012Vdreq9usVam9gSv0x88C6BRwWrVbDbAJNBtS7RrNu3HoALt+Q+03ap/sXoPdf2k22C5PFLwpVVCbCLVTPvWBeIFV63xGQMnaJVI3FWcuBQRzvA9DALEJrCZGDMmyxBGRK5gGQDZjua4rbrdb0zI4n88NoJvnuXVc3UoNtNzUSyz8q41PXhpYUgxEKtY9ikRa/Xg/fn/t2GUgPLLG3UJ649/3R3tWjN154EFkCyTcqcPdz/6EFODwv1fqlwVAMWAtOg4hDBiHCZEZeSlYlwyOhJQGTMcDpuMR0+MTpscTjqcHxGECBSthrrl15RtSxBAiYiDclgUw8NMZBWylASKCZcnWEWo7ioF1pQhQV91gfmLs+7nsm2xz9AmblgiwM0o9g/SnAhY9h60Pz9pYFoLIWB7VAyt1/sg2BREFNDwIbcCgZdhQyEpg+3InqAPrxp3IRM6pgXMiTthFY0HB7p0RWqZoC6orAlWQVIisxr4zxgSp5oY2myhYpQAsCl6Dde0Mo5XuRghFVNH7SIcJ4/ERD4/vEOJgHZITVPdS12ieb7g8f8RyecG6XjEkQqTBwFkFvhOTlcgq/T7BnUk1rwPDtIKy7bt6v0QBwhFxPCBNDxhP73A4PiFNJ8RhQkwG3KEgJCDFihQFUwp4PA54OhFOI/AwARMDAzRDvtv2xdciFJcLFUQrANV9UTKEIETCmCJulzP++Ns/4v/633+Nv/zxe9zON1zmM3JZzSERUADSGCBVYdM1Kwv3HnDQOaabnJezqj6XOgpiga0K7VsJMKljWJ1lhq1Lqtslb8Li3dLcSR6sI+HHjx/x4cMHPD8/I68rxpRa5mktGZ9fzrjMC969f8T7L7/C+y++wscfP4KKzptigc00qSYlDRGVfaPd1gGbo9mSWrQ5c74f+dEH77B10M7TOZQNbGtjWBzJ3p03hNAAumZHxNgdfGc/yVffHqAhQguGAbGgHe2GHFyhsHd67u1YH+T2dqndo+uR3u8j5O/TcfI9zzskbu/R/YIEja3mR2uK4Ffd/kftd77FvOU/+Gc4CPUWgAYLvkopG1tRpM3RDazdOtz2DQ8gAIs0Vtbr8+/H8h5o6m36/vntgah+zHYgJOk67EGJ+/e/9fn9OP01ELH3F+7ncnfm3ev7+X9/T75u/Hye+PproGSfsLq/tyrSwOn7z3UG9L2G3lv+0ltj5ffjvlzPUtz0qbQDu4wDGAkhbp00h2FAGhIQCGspuM3PeH55QRXCqT5hmiakGHU/GUdIBa7XG67Xm/oWoK07q/nRCtJVzPOt7cMQNg09Z2hadGGlkDFGZYMVn7NWIcKCWldcrhnLsgDQYD+G1Pbc3gdRoNcYB0wAWBtImU6Rl5724yciKGtGpgDmjBBgLERlCUOkBZ/3wGs/zwcTOnfAsgn20wZG+98dPHJg2pOnO20q9OuB7D9q9lS6Oa2dHIFFMq4gXKPq0QWGas6CrbTO5mAAxjjgeDoYsDq2z/LvITBSCiY3A3jDk52/ZfdHRECMCLafxxqRQ7ZnE1RH25h1EJXR8ZLXbU36VqO2LYvO31IKQmeT79f5dr0bKcIbSizLgrKsjWUiojGK6MJs+nXfffcdLucLHk8PrRIppYRlnrEW1xMLbR2jVNRcW+yYUsJtmUHEOB1POD09oYpgnheUXDBfZtyuV7x8fsblcrZke1YJFcD8RSCvq4JMOTdmfkwRMXmXXGrPG3nbs8T2j1IKXp6fwVZdEwIjrxklAp4kEXEbLU3KQdlp6gv3c7vNy5pVesm60zvg7KSP+zXRz+3+GKcRISrgf7vdcJtnLEXLdNm6+MYY1GYAqi9eKz59/AQpi3YxDgRv/layMoHbnsO0Nck7aAI1HpL6iLJdm7K31AcmoDHYXR5A/VVXqNS/V/YyT97ZDi1jrM0V8yQmiBqAJ6gWg9t4+/uBBsRVVID93E5E0Neo5JU038sJGruEgFX36bPwizE/Cxsk1T+T9tPdPuuseFgMqPjIPqmwO08/Hm/FgPbp/WxQ02ExocDiJb93tM7ZiYHTccLxNCENCfMcMV9vqDUDVSuBvJhK/TFNJAup3E0FYRwGBf26Tts6f+/3Wp//rvsHZS7a+L9178B+//dxtEdv993FuPqO1rSmH6vtvPY602xwhmD7u5XY1nY2/SwnuBAHqyLZn9/PwbThGATPDwq0JaMzvQVaD1m1H4NV2qHtPwYE4x/HdX4WQNcegrjzb8K2hpY2fuT9+3A/sTZtBd/wtfQlNA0t1X7QTWxdncFR2qbvWRSBYBym9kkBaOf1RSmIWn9ttdlDiAgpIoUIigFTGoBASBRAKWIIpk1h4tHL7YpqAJyDcPM8N5DOS13caXCHR4O6gJL3Tn3vaFcIalZQgs0qUK1gyf6oUcp+c/2pZ/NTv+Of+Z7ewej/rg6FLbbOjLp/0YKw/mn3H0ldsAEPvWAAiR6BtZyygrVEhgEKCcNhwPvHJxymA6aHBxxOR/A4qU5FESznM2JK5ngxKGjGLFkmJQbC5XkBByCEQXUZONi4V6zu9JBmYFyfoJVMR6OaY79A7x3//ZeXAus3FVBFM+I+Zj4X1Kl8DeL5OTTLujmowQSS4YbDuxASNqDMmKeVgMv1qsCdz0kzrkI6X2MIDbjGduW6IQsgRdmYcmdICPtg7D6wEj8BFahmXDbAczN6lYBcKnJW2nAFI4aIwIOBPqalwIxqzSZCiIjjhMenL3B6eEQaBmXdSUWeL1iNyXq5XrAuM7AukJqRWCnkOscJqAqePxxP6mAQkM2pgJXxMQFcKxhKxxfodUoAOA0IwxHD8Qnp8Ijh9ITh8IA4HNWhjjqfEgNDqhgHJZkdJsLDAXj3AG0CQUoQ9+Y6+lxtkK2DMhVAxEoIeQbqDGJB5AhUwjxnfPftB/zm17/B7379d/j+mz9jvd4gRZMZVSputxlrLpatDdYB0DZYdLap04cQkSaaXz0BIwRgcz5FGFSzzWvNRpJrBEEdnD4x4R2SPSAbhkEdYyux+P777/Hx40eICE6nEyBWUk2MZSn481++w29+81v8h4FwOJzw5Zdf4eV8wXqZt9LxEDEejghR51GmAjFGRhOGt/1jF9xj2596gK5/jf++Z3rfz313wBt4hnvwxF+/M43t87c11MFXbemZG0v7/cCDBzVi7tlohvkt0Kb/7iwav+6+jNc/Vww8u7/mDeDTUra9w3EHilR1tt1OqvNrrB97HmTZVifzEHGTXXhta/eBfg9utdcwN40bv6fmBGM/hvegUYCWMtQOWOydyHt9t36M+1LNHiB7PW5boNwDFs3Zb9Jbex/AX/8WQOW/68GL+4PaHr7f8/dBfx/U9x3k90CggylvAY/VSlj9evr10p+nZ6buxpHQwHW/Lr+GngV2H+Q0UJboVRDcv+b+d/fXsL1WbWE0+6LMVRV0GccJ7969A4jx+XxRUXqoYLdrcKaUEEwjdTZdZG0OwBhSMoazWBA/NZbN1vRhP7e8NNiBAQCoUpTZlRdl/VSva9UE+6dPn8F0RkqDdbG1sidjGzU2LrmsgSW2PdA2v2TLK7itjDYPsq01tQUEatqt/Tjf2yB//q4R2M87ZZSVLrjWZ9437XrLpvn8Cszg4wmXyw0hZM/lt88YhgHH40HZUkX3iciaEGWS1uEwWkKJraxR97VkwR+jlNU6Fa9wGQCXy0mB702hgVyAQHUGqwhcYJtBSDFu4J35pCZoZ2CFN4HQ2CPEiMgBtQrmfEMFIYZgEjybvXIQOKWNddLbqmzss1KKEg4uV9wuZ3BK4CGp3ETOKCJYlwV5XQEQPn/6jB+HEeMwAkUrUIgU3EVnv9TYA1K1geDlBbhdb3i5nMEcrYNrQhwSpmlCDBF5XHEdR9PsqliWudkCZ0KlmNrvPLDOxvgbpkH9etnYTzFFpGnE6XTC+eUCAFY6mtuY5lysRG4LkZVBix3E6zGQfn+9FxQpLQzy+donK/pEh99Df3gHWl8Dk2llD+OIpWS8PF8VeE/q2ykuUiDQyoh1XbHeLsjrCpJqVTIaczJvScM4JJvnVgIMYJm1soUyGmjLpKQT9208cuj3Vv2N/r1ULWNXVTwtRSXqo55tbNzn1FW17U8iHSlnP8DNZ90Aep1j6lOQgXz3sdvukw00qdtFEVn1xj7J1tusFu/3QFWfdLXva8lA2T/7exv3lh+hr2dsEGGfEHOAEeBiNrDtexllXSBl1bLusqJWlQ4Kak6AXFFRdHsQA4vYdArV8wFgVSam4eYznkhBqRAS3M45AcAlWpTxyYAEqEjAllS59y16u99iw/YaQV9Eu/k3ARTNZ+i0ZQ0RBkEQxIkB22eJPdM+zvV5rMtOE1ztVGgOWD/lTJfUztv7TyKAZKviNLIMxTae+hxDN0H+OjgH/D8A6IB9JnVDnvXBeEYHsBjdxU+74McDgp6d0G/ixRDYbCVIMMFqJrKyV24fILQt5sCEFO38QcsuKACR1QANMSEOESlo98rT4QixunbzBrHkFcs6Y11nfPr4A3LWDORq7cHvdVOAfemijwWgFH1fWH0HITVeClhwYq1xJkEQR2iByoR5Nlr+G0GAP4e/dvTXdA++vYUO7543RHXgxMmbysDq728Pzt0f6vT1m5bj/LDzLXnFMExIcUSuFctcQEg4nZ7w+PCIX3z1pbWDTqpnYZY9mKirl8jqfCKwigUowwfKpFGKttLVa1GR21qkLTDVf7IdwwE5Y1luLbzfHqfXAXqXZXu1Prg5016m4t3K7rWYdL1VsINSVmISxAJd24t22YaihqZvS5/LHTABm4dEkKLaMQpk2xjAsrxW1iqkHq2L6/pYhMDWPWgTqocOPbwcFigqJMoVZOwqgShLiJzWrUaQw6DOfIwIMYENSKpBy5U5JAyjNoA4HhSsTWnQDoJFGXPL7aY6g8uMuq6gUoC6IgYgBRMEh6huXFW2LCNAqjKCq9ZDtW5PkVQYPLKWjlSCAoVpRDo8Yjy9x3h8RJyO4PEESZPqRQTNvlMAUig4DoSHY8I4BhwOAY8HwhA6YM4cWCnZmIZkm96q2RxL8RAb4BkzpK4QCXj5dMNvf/sN/u2/+Qd88/s/Yzk/I9aCUj/hOl+w8oBCCcJVqd5UseaCtWj5vJcjtcytlE201zdLoNueNWAuUsxubuUYvnGTs8AEyDXjkI549+5dCwo8k+xd1T5+/Ihvv/0Wl8sFItJKvXyduX6Us/g+ff6M048/4vDFEw6nE2JKmOtNyyNiREJozBRN1qCBEe0L+2CR+7+96Si9BqTfsq1v2gfHqi3YqgbGqrOgOhRePtEcdzKVTtlscRtUc9b6AOv+evVCsdubezCmv4efZk7hlXPSzIR9joMvfRmwl7pqUwxlz7FdedP4o44FjDvGE21ueIP6fuL5+HX3gFkP3BBFOEvglXPcjZXI5rO0cQ0BXgPbP++3xvB+b71nmvUOaO+Q93OwnzcbwLgBdP299UDq/dzrn7Of6/68ev/bnN72kWrsri3wJAvk+vvflRPefdb9s/S5cQ/O9dfbX/9bh7/GHXxfjw7W3N9b82349Zzpr8+fT38/fq2qMen+aEGujIRon6ti6VJ0/h+mA2oFsgDn8w3LPOPl+RlUDjiOSbXcgga2IUQs86JaSeUGmaD6rR3bnpnM38wopRoIurFA/L7VP66IMWEcE9alGAATkfOKGBOeng746qtf4N3TOxAxci4t0eyldrqGfUy17CqwloutZVWGXvUyMWkbVwgJ03Ro/k5tkgFiuj1bIqOfN/fPq3+mrQtw0b3In7kzJv059WvL9UDvzwkBKkfEmGyf9bVOBlZFPDyccDweraRLMMaIGIP+mxnLuiANeg4HAkR0/ySCsR8jQtAS7xb0WTILruMKbCyNlui2ip7Ob6TOJleCyvAQmRtQIab9VExqYhiHNqbMhMQEsqYQrTHKG/bxfs16JZCfO8WEmVWble1+1qKfnYt3INb7qDnjkjPWZQaJCq8XceAKVsalvmIkhpSK2/WKy+WiWou5IKSECuDzi+kwWjIv31bkNeN2u2BZZmWC7cB5q46CMvZX7aOKecmoIljzYutGfYKDdXo9PpwQQ8T5fGkabwo+atfkGCPGcTJAyoF5gP1hdvZqG2Ox+d/trZR2dsfnaOjj2Df+fr9GfE77Z4agZISHh4A0TYjRtDfLimzEgAjB48MD1kjIywpmIAW158lKV339cFTNOKKgzX6qNlCgyAACStlkTTbAlXbgPfWgg6GS61osftQ9N5A2vNF9FSDyyhvaQB4BmGPTnPekAdHrMbnfX9kSiRS1AmierSKtbBVq2pDD9xIloWhsZgy0BiBuDbbafm/Pqvj3u66yzUEyP1PD/O0c/TP0edP7YP28UjKF+UitHFXhRDatZGryBmp/12XB7DJbJeP5I1DzCeV4hIhgnWeUvJjPCwgURPcGUHoNDAZQhFDWFRXUEu9AJ2XCwJCCkiyqx9RkchQRIDGAbmNp+1j4/TpbtB/LbXz0M7NrH9v1MauGH9k8bQAd9i6r3sudT2ESGLAYnFi1+HJRggIJTCubQZEa467NeWMvB7vOBvbB/SXPi2lp68aBdCxkzya/K+B7dfwsgH8nNwMAACAASURBVK45Rq32e5tgCtxJu8j+ZrzmnIzFo3T5+Mox8qOnBtcKxDD6qDbHnpmbtg+6TmPeiWeI0QwOYZVZmzsMEw6HEdN0QhoCUhystDUhlwXLvOI2X3C7LrhcnvH8fNF/X67mnJTdQnzTmaY7CrQUMLk4+eZ4EaEtDgoByTrPAvq+BtARYaEtQOiPV4HU3fFznN5//DymKdMmuKPATjf2gBXYLQKBLYLa/QKwyMvOQQAFMAvmpWDNM8bDA37xq/d4eHrCYTyqxkaKWtfOquWl4JkiAczaXp2sIYFi9kqZrmvRLr+HIwRQDZFaIEW1a0JgRLYsovbUtrmrIrsgk7+X7Zm/FSC+NX5ts4CYw7sP5u6N8FsAtTNwxIx7tVJX08jcLfAtGNqAMrJ5c5imrqzCHSV3Uk0HoUCzxQwQuUajgomBg3V+teeH3oEgm89q0GtVnTO9T3WYOagOH0exMYYB4a5TJBBKBh6RGvJqnfM4IaQD0jhimkxQ9+FBu++KCvzO1xvysmCebwrMlRVcC8YgADNuszYASFF1dLyzIaCMuvmmXf4KCYS0RC8mFRsPzEDJEAngMCjrdhxBwxHD4xcYTo9I4wPCMCDEwcTZVbZvGBhTEBwD8HQMePcu4XhgjEmZdExAyYIULZ8iBZK1FDWTALUi5xUcCMMYwJFhVDp9bvOMy3XGr//PP+L/+Nvf4btvn1FzRazAMn/G7fxHDBMj1wesuYI5YZwSlllLIsSeXS4akLu2qHbiU3C4mONQAdOrU0dhYwFbkNJKPRQcDuTJmAAKIw7H466k1YMG15r79OkTbrdbC7p6AII5olbguiygEPGrr7/C17/8JxBiXK43naMxwXUSY0jINs/IwYHA4LqVTDQT1a07daQ7oPnOZvYO96t11637/nDHnpnbJus6Ib52pAPafA0zt2b36vDL5vTTG7t5D3K4rZHuPn/q+u6dRP28zXmstdqYbHwB8oG1Ywv6qO3nYg4uE4Hq5rMSrEGB6BmdJw+7RWpOjfoXRKTsEuzBqS0Q2CdIfO/dzKGXo2+gkDvD4c6pV9/qHjSSZkfvx6s94zcAuM1+S5vz+8+ynzsbfv/dkynaFbPs/v5Tz/P+WfbB3itATPbz5q1z+FyCgwl319hr3/VHz9pjDhiG1N5zP079nncfoPozvb+uvhzVqxj6+dHPCe4+8/5zepDObY6/xs+vwJAzgsxvsA7cyrKMjY2eYsTD8YTACdd5xsvzC2peMHz5BSBAMC3MlJIlRdYmi/L+i/dWKundHD2ggyWEV4jUnQ31MSVS1t0w6D7JzBjHAfM8IwTB+/cn/OpXv8IvfvFLxBhxOV/x7V/+gu8//IB5njFOI6I1eQjRbJZYUgHWtbt6iZqh/p2N9PkNX/sggLwcVl499/vg2p8jETWw0JlcVSpiDY3V5a/rgcp7Dcb+0OSIsmycBcmWMWjgYVCgTruvAkO0Ltewsj+K9jn7a+ivQxvf7OdZrRWoFTWX3d8AaJUGwmZfaNNB7sFnEmiVRCDE5sOFbXxqRYzRJIiy3cukjUVEy4W1cgK7MlYRaWPKNvfXdUUuuT0XbeAwIthkWHMBrdp5diQvMy/WEE99BPUjtnt2gLy63auA98Qu66rNzQD18avgdr2iXG/GZENj4ihDWxPv3rRIh40Ro8aIqvkcAErbPibKVFUwNbf54ntLjBExJKSU8Pj4iJTUl5jnGUSE0+kA5KUlm1rCy2LQ3jY1DeuumZ/uQX3FwQbA+hzq7Y4veiezOOOxilijGJfPABKgQNqBQd7NVQwYFi1TjCQ4Pj5CjspAjEGrlbyiQH190uYqoolQYQaF2KKpIvskHNHWqBHowCn4VtkGyfbxjQWtAJ0x7l2Uardm9+tE53u29WNgsenJw+Mzitq8rRJEMsheB4OGFARhrU5rGISWuqodLc0fFmusQY7SwaMe9KiPfdsDqb5u+jkBoBFJ+vvqtX/v48H7n4sBZ/clna1xiWys0ZKV5TjfzphvK9ay4LZecbu+4HI8ghmQrNqBIQQkY2QWIgSCyj1tbjAg3kTJr7Fse7I129zvp44iBvXjSM/h4+J7Re9Pz/Pc3u8+ZIxxWy/SY0J738v3lns/ZzcP79ykDVz2x0kKEJuX61xF77PQExbsHbs9jVwzeTMPCkAzEMFQnj3fX0azX/cJ9vvjZwF0r51DRy3NkfAHeJ8NB8wVd6qmMQgsG9dnP/W8/n59bwi+LnSZBCJwICRWoz2Og4FdAu92OQTNDCCwlpwl1YlI1jkV0Ilca8H5/APm+YbL5Yrb7Yp5XnC9XnC93qwEwXV+7h1e7CaVA28+NgrQAZUKCMEAkK2kNwZ2mAqRyJzxom3ga0EgBRTyQK1JwT1A+NaDvTcY9wDovVH5KQf99TP3YKWf4F6e2b9uy0JslE57fmSArZppAIzp8IC1AikOeHr3Jb7+6pc4PT2Bqmaxbuulga7eScVL8XLNSEHr+/OibKMQGUNIIFsWVQhrVcZchVjJRfIQUC+FyS7ZmJsiVoH7OvN+f/RlMu0++9HgbeH7mO4CiDv2RR80AJszs4Eim17SluUXy7LYM/OSVbHMcnet27NlNdZF01W6CRDAmxNFVTspq0BreLPbjDP/dD148LwxDGLS94UYUEx3pBobtohAjC4fLGsnpKLJ4zghjROmpyekYUIaBgwpASFgLRXz9YLL+QXrbVYqd14hVcHaBtSKIEVdm2oOtMxBrBSEamjNKpi0vVKMBI5szSEINSQUiQANQDpgODxhOL3DcHxCmA7gOIBjQAwBkYEUCYfEOEwBxwF4dwh4PDJOB8IwGGi0zR51JmCbkKg+31or8rJgWS+gsODEB0w0QEoG8gyRFZfzDT/++IJvf/89Pv9wA60jynzB9fI98voNDodP+Of/6p/hh+s7/PYPN5wvF6Q0YL5lXOdFO+jOK7bSD+iatQYjpaizxpZBFJEWFBYo21A3J72PyAbEsna7HVLCOI0YDo8I5kCv69oCAS9n/fz5MwA0jU5nKqt9CwrQmBB6BfD1L3+Fv/mb/wjP6wU/Xl9U48YDLdIiinVRIW9KExw8UnC7otTyhv5YtyZ9/+lLFTpnog/m33IM+nWva1C3e9cw866CQqRi7UQaxImx5GwN9CCPA1x6H3v2w2bfN9Dc5xag96HA895x7O0PsDVOuE8iqBmpnXO633O29/u+7WLx+/2jgZ/93tNdLeHOvrZgFCgiXSLAg5ouY87qVzShYysLEUELanyssukTsQVqzX4bgOjjo2xS0+jkPXOuv/ctMPPL3s8FDxIhfaZ1u+e3vvtY9//vbbePeb93/JyjB169C1w/f3V+7xk3ygClnd6bz//70t23WJjU7Yc/BZL1vsr93HQfs3/vfebdx/3+c4gcWN7A2fvxui8B7ucK+36BzVedbzcwBOOYMAwJIUZQsY6oIHBMSMMEOp+R189YbgvOL2cMKVljiAHMSUEHC3SXZcH55QWlrBjHYXf/KSUb2y2oc0CyjbGBR8ui6+P0oMnNZVW90nEIeD5fcLn9oY3P9XoFRQJVwm25aRApEbHGBjg1mRYRbTzFGsjrObTaspaNRZu65mnFKxVQm/RA79/0c8lLkPvn2MAzlX9vpbj9mr1/9rvn7vPKll810EJBFjKuQ9VYIllpJYC8LljXigwNRsmaOwlvSVcOsESeVin0HaL79VRrVf+qqJ+Vi7Ihq3WKbMCFrz3Qxq4mrQCptr8xmfQOEcga2vk8dSabS5L4/rkB7frapnlme1E2tpyvZ3+fshgDhAICjRiSBsvLsoBnKx22ph6q36fNPkquDTCvUFabPlt79pZocIqUJ65yrU1jGQAO0wQOAeuqlUsBWnJcK93BQQAg1qxDWY0hBozJmmZ4U7ig3W21AV/G5XJRTcaPP4KIcJiOqldnZcWnh0NrvNGstflILWnQ2c7dfNPZ38AKEWkdQ5v9tmdTROON+9gX/qkGLAFb9dn9OqkE7f5qum3ByAshCioiAipCzYhhVF3uqHFBLkuzKwDAxr6jUjTZGbW5W65l6+tHRpAgav6WrvXNfurl2z1UJZqklBCKANV8A9riExEFC9nJHrwl1RwgImgyWNl7gNfYa2lrRWXrxOmfTRs2UUWTRBBB7KRWopF4QLp2BMpWRUv+qS+o4VQA1dr8YO+Q3DODiZQ1dr+XCWS7X3Rx3S4W2/sdfgTN6FisqjZBG2rYC7yU3pMg9jxJCqhq9ZDUFeutAlWxhRC4tfgqHCAhYojKWGaybry7EJYQUoD39uqrI/3aPVGw2fTQ1ok+X22481O4g+9xvc3a+RXY8I+cM3KXsNS5aPe/8700xhQ1/q98jt1epC/xkQRI/10AeMMW1E4+JdzLl7y2Su3uHPAkT1Zg/4VNVuSnjp8F0DXjINg9wLYBNAf+9aXqQLrP7YvtLSovADjYZUwFMfIUE0JMSCFiGAJGyyS6rlTsMmIhBM1uMCGMyaioFefzGfM843JRnZDVdOUWF0TttHdEBIEJa9ZFsM8gbIvwfrLtwEnpIFUPAjzY83sWzbgwCDkvqHnFOE2YpgkgRr1lzKU2lPmvHffg3E/9/Wf/rf2uP2/3OsVM8WpStFr5Coh1EWXTSVPVTBAFcIgYDw/4+ukd3r37Emk4oFTBy8sZTBExMabjA7zkogU79tkE1WRbS4XkjBorYjpgnCICMYpUFEpADshcEIAGMixLwbzOqoVl4JIWi+h9Vrt9CqZXdbeo/bh3Cu8zQCLSBIY3poUHvtI5tFutPLCtt8BaguoOHYuy2lJMiJ65hs51f14VLk9pgVZ/7SLYaPhsoNtmfAKZMLyda1kyKAiSMZGClb2KMVw9Ww7RTY6EIe5Wx4Q0BHDSc2ZjhgqCwQes7LMwaMaOtC40jgMejw8YjwfEx0dw9JKXjHxbUNYZy+2C6/mMsswgKdrpB8qwRRWUmlFqRRpHoyR7UKvggYiKd6ZhApFAgpaAEldzxqt2C+IJpSYIItJw0mYQD+8RpiMQIiio1uEQCWMUTLHiODCejsBpYnz1yJhGtU8eLASyLGLS7lAkFU6NlFoVVMgZZZmBsGKdAcoF621GXq5gKrjNV1zPM/JSwBIhRXD9fAHRin/xL/4J/vW//mf4z/6L/xR/++uC8/I7/MPvv7X5BktqRFxvM0oulp3bnmUfoAtIaevWBcjnrTugtdS2PplZ9XuCChmfHh5APCAOAwDC5XLB8/MzzuczXv5v4t5tSZIkuQ48qmZ+iYjMquruGZDC/QF8A2U/H8KXFSGwJLhYEQgIgAsQmMZMd1VmZUS4u5npPqiqmblnVLMJGSFjpjqrIiPcze2il6OqR79+xevrawXmdp3j7KXKGABrx9qUCmIYMJ9OeCsbtpSwrhsIZA1CQjN+LUIstt8dADo6iWQ/K4F3ERTaGwi9IX40pvpMlqPzycxKhn0wxIpb+P5eNfJb4OuRM3ssTepBi/71LX30rWDDceyPdNrx+j045mBJX7rgOlxQ6hwf7/VL47Bfvgv/HF+9E3occynaidUvuctU6J6hOR7tusXA3EAtC8tfPRDloEEPOLjsdnC1BVfe648+K7P/TJXHUXk5ezDqWyDZo7U7Xnf/fgNr+++3tWjfz1bG/qhc8Tiu3mmBYAdgHsf0rfcfnbX+HPT390Y0/Wfr8zBrM+xfsJ08I89fu6AmtaADINhSAt1VzwxDRAwMsNIxxKzZdsxaQpVOG+53lXnn08nKUGcMw4gYNBMkxojb7YbX11fc73d8/PSMy+UMzxgiIgyjdtZcl0az0vY8Q2QzJ6nxfYK0hK0UJdtfF82OYg4q/zt7tt9zOSvpus6x2+3U7e892JZSRtoyhtFpNWKdQxHtyl74PaDWr29PF9NzgOlnUbPdnLbAs9gcqJym6Z0cqLYqE5hzA58tSFs6O82bFJHpYLbDYbEV6xyMOvb+Z1+e5S+fW1X2AAmhpAxaCdqBvZVDOvir89bLd4IYNx0XA+sOcq7PIDmdTpik0ego9Ys1Okj53dicy8yBaAfo/N+ewVKyljyLSJ0nl/ul6HrN82z7U5tG3G63Sj0zDIPdX6CN/oo5zAp8hBhR1rVmCI0h4PuPn/Dhu0/IRXB9/Yrr2xsAVB5wlUPvM0+GYcTT05PdMyEntQHf3t4AqCNfs7whSGZzLPe1zgUAfPj0HT5+/GhZkRkjE3KRSoNxfD3Sz34PzZwND/d/r0OPOvEI2j2yPzShQINjxcvnjGSMSJuAHSjADRAuFotVqpplWRQE6HjkSxGUvGHLqSYrODijzxpr10vvxNz7L+4HkQAhRmQqcIq31ngK9fsBLasqBA3WZyngUjAZFYqQBzH39+hpl5yDXqQFQ4s9b6lzuV87peRpNliVHSLa4IeoPllvc1Ubz85lML+p78p5VDvHs3sMnPZ7Qp+wyYNqR/kY3z2HBXWYrDS9+RkK3BUdo3/Hbc1Sainxe1uRFMwOmmzjPMKgAi5RbeWqb7nuDZXHACS/yxw76mintPnWawgDinFfbltCshL/djascVNnC+nZM/oeyapIusBO/8eTxNQu97OoFXcEARWzCYpmVLPsewm4fD3qBjIOcdtUupoHX8L9qV96/asy6IgJVFPHNUqk3enaBleQSxFEzzCjOkgdc+gI6puhp1GtGCMgGTESRuNT0D/AwJqOH0hTsccY7JDogm3bhvt6x/3rimVbq+LwkgIH5KoxaYpZRCCVJw676J6/jpu4V9B7h8P+I7phBC2d2Y88Q7AsN0zDiG1dMA0B/+e///f40z/9U/z13/4d/sP/9edYvrzAu9D565fS+g8L12GE752unTHb/b6uYXe89DuNrBOgarhIjS71LlVAgXWjgh5cBYcYz08f8fH7H/D9d78BxQhGQLbMpxD1wBYRMzIT1qSp+mMYq5G2bRtWS8kfBm3ZvjMQYYCQHeBSNIopEO1iyQOkEDK152GL4IhdYxpijTjU63Zr7MahG5U9l9QQo3Hd2XcCWWMZqnwC67LtDEruUowrzwtHjGacRsvm1Pnk6hzWA4Xe4AaI3PA2UHwnC8iiQQHeQp3tj693GENtQFEAFXoiTq2CXBTIHscRgSZNn6aCOMw4TWcMo2DZ7ljWBNiZlaz8BJFGgKMqR9bmLfN8xuXpGU+XC8Z5xhYCMgQ56Xrf395wv31FWu8oKYFy0rJS9qgeAErwlGIitizKgpQ3BDjfo0UM2Usl1KFIqUDj5wDCiPtGoDhjvnzA6eMnnJ8/IY4TMghpu2Mmzex7mghPM+MyEJ5GwscL4XJinCeAWQyQr0tUT5BIBkPHXqIgifLAqKEcwZHx+uUNVK6gnPD19QsYCU/PJ3z69IyPn57x+pLw5effY01XPJ8ZT08jvv/hjDhFcCwQ1kgrRHBbV+QEjCMpr1sRGBuern9Wk0DniZAg8AYAuvd9f6vnFDlo5q85ZNN0weVyUaXLAXEc1dC+vuHLly94eXnB7XbTszVNdZ9VTp1OtsYYESjiy+urZjMz4+eXL/jdjz/iVlZ8+fwZn6+vNbsnhIBQMphJybHtvBYpdc6rg9KBHDlnoBSNlHbZSf56FPUEWrr+twAi1S8qfytIQeqU+GeSAR+tpNUz0tVJIjiZc6gbpzcOnMevB0HUwRr1WtQMpN4wOAJrj4APMgM+BELOqHN8NHL667X7WOBsCDX6eAQLKyDga22lbv4s7uCxdJn63T120etujfpsQHRNe2KX4dODqzr/zXCs4FhXRnd0oL71/A+zHLpn7/fIt97v10mk1PLzPsPMZf9Rf7/X4e21A7po302+ncHmXPT7oUgzbvv18X8f738E/frx+xl8BG76vNQsQVId1Wco9XPclxz6NXzNAEBKxrald4bxca59LvoOonofLfEMkbUEEgX3bUUWbYwzy0nLw+BlgRFxnKy5gJZJ3t40GKGZPoJPH7/DMMxgjljX9iy3m4J5KW14erpgHAeIqM0yTSOmcdKqgvsd25bQywPPQvAzo8+SQGBM44d2rsgD3q00ycEZlb8AQe0BTXRnhMGCc3W/aJZ8KTr/wzBhGgcINJBcSjKZrKW2ORfloRKpe+ZYOfPeB+AKQvR2uAN1fg112rbd2rosCyGgZOAudwWEQsSW1jpP2bIXct6U7gYB4RQ0SJMzIBFbXq3T5N4J7s+BlizvOabsYUBCCOBKERGHCCre5KPJGj0XXUMTJoxhNEBc7ZhsnVNdBogocD8P0yE44gCFcgemuAfyezncA0vHtWDWs7eudyidBeN8PtfzvG3KabYsW23+9PR0xrqmynG4bhuydZQlaZnQyXSug4FqH0fM84zz5Yynp2eEoCXjb6+vSOuGL68vyDlV2e0yUDNYCog0E/LDx6daVrhtGz58+KDZr8u28/9SWTtgRucjpYSXlxcQES6XC0Ig3NcNMTKGgd7pkKM94Ocv54xcNkCAYZh2eu24Do/8t+qfiSg3nP5j719CS1R5iEjQ7s5qvyogwMzGkwUICdacICkr1zkT2LJhPVtw2zaAjerD8B9m796Jw36HnTtGCEOnN0sF8t3n0o6hyj0XQgCyBV9lg6eDCQAxrmci47tLBTFmbH72AJRcKiecz7UYJYBbQN5s0jsf52JUE3BQMu50Rc6b+orVxkDFGh3AYmKMkdtzFlEO5k7eOFinZcGlgok94Obnq1Y+dfZQD9a28yiWMEiWkNIDc6qXGcpXyih2xgBCaeCYaPaqFAJBy1XdXi9FILnUOXF9oEEeLY2XUjSzEn5+WwYkcu4ASS3lLMpYB5jnzIEBeg8y++tbwbEqa0FmN7Nykbu9goZh6DX1ZxbVZ8uyoKRUk24oDAgBO4Btl41tnKcK1OkYvexXqNR0U6+obGfV7Zutjlt9UQ0QacKwB+U1uKe+oCfnbPil168C6CrJfE8I3228Wq7KD4w0GDjfKwG/LhOItUQMRIisJW7jOCJGxnyKemA6I19bJmunTu1SJFi3uxLvZuNxWlbc1w0v1xty9tbtHtVo11qWpXtKGx+hCu7GJ9AMCKL+CfagXP1rF7UnUK0f9wNrWC2WZcWWMyJnzPOMeRjx8+fP+Ju/+Tv88+9+VCDxgQNYR/zgvT/ei1G7EXzjNiFop1pNMdeySaZggCQwzmcsyx3rWjDOjE/ffcLH737Ax4/fYZovGj01cM6JZ1WwqWGyrnfEqNG+ELR0Kedco1+D1dEPIRjPlUW7rSX9uqy1RFgfiSxTyNaTHcB01LsJW1VkAST7KGm/3mqk9KUa3fdjQNqUR6tfv6Mx9cip8p/jPO4ABS6i5Xd2/ipoWyMusIyPplw88lUBuMM9/X51jEQgBIAtOkJAElFATLLOoWgmXQgqNDkMABMykjZeAKEIG8eIpcxzRBFrNy0BEgaAI2IccTpdcDqdcDqdMEwjYgjGFRiwrQvevr5aSesNJW+awi25csV5hb+IIAsAIWSQAol25gmkqoN0b4joGeSofCKa2VeQxaYzjOD4jPH8PS7PnzCcTyjESFIwzhM+jGeMAZhCwWUUfHcZ8NvnCR/PjDkQmIvKN4/8+Qp1IGkghmehaoBBy/AzJyAz8prBYvORrtoIY/uKlK/qCHJGzlfk8oYwCpJs+Id/+BfkkvC737/gb/95wO9//zOubzc18rMq/JQ0u7VGIX1f94YJSc2u09JQA+jIArUEpLzifD7j4/MzpmmElxGFMGA6nZEL8GKg2pcvX+Ck5O5EevbEEcAB1Kh0gZpKwdevr/jdP/8zZggwMbak5NGb8VCyldkqv6RmLWomaXsdm0FYmAH9ovj7dY26LDn/+T7S2F77iBh9U3YCzbjQaL0DTEUB3RC15PawRv1c9XO3z8Tay5lHxlFvCDan7EibIDYH+y54RFQdkT1wYrMpRjDdleP381JlEDXdqKTwNq8WKKnWshupvwBA+Xt7GaoBBZ+LHZh6WMPj/vP3IPu90cvPfv/6vtiNsb+u6f26v/pr+T3erdN7AGPv1LdAzQ6oPgCpx7npAYb9nO7n2TO5cmnj6sGIR2Pb68psgaI9YOevYwbhu0wUKY/XBNjN9fFnex7WJkiyB1T7ZzmC7N6kIMSoHQXFAUiVhSUXrCXhq2hm4XQ6AdAsG5Wvet1xHHA5X4Ccsa7KEXa7LpjGBSGMFdhJKeF0OoOZlUs1Zdzvi43FADfRgJg32AlBg52+H5uT0kBqpRzouOpY99kR7A0d+OU8bZvxg9G2aYDl7VqbSng2mpZ3DtVhIyPfDp29Ex1cQDsb/f293OxRNmjNEKP2bL0c6TPIKqDRgT7MRq2j3r+dJQ+N+33QnClpoAQEVoUjKJK0eVLZ8x9qAoEDF+9lk98/kgUpOlmuMglI3bntu6q35/cMT93LLof7Z1UevdAybOz5Ag+IUTCCsK0b1o5iogexswW5gXZPuIMdVK/qkd/LEc9oVDoTC1ATEAcNtpzPZ2TrIH+73bCuCyRn5OIZK5pVFa3TLEBY1hWfX16AEHA+XxA54MPzs2aPpg3LouT3Xlata+Ljt0qrEDBYV9N5nsEUDSTWktllWXC93XBblMbodr13++gYgFGQvxQvh3cHuwF7rfzfbee9Lu2DbnAQBp1uoX0AyGWN7v1uT2kGia6unwEACNqBOEMDwexdf+FZY0WrNqKXlwJUrCzaAoSe0MCigI0ycGtWmJSsBnHlxDbQT1wG9zLbK1QU5CDen/dSAFi3Z2IBBYIU3smFXLTDKFCQhRHcvgDtz3wpSKVgjMNOLjj4n1JCygVb1jkfTdZ5xrX61itARTMQe67tur77FMR+3V2WPvo9N26u+nr0Wd8nj4J1O50psL1g627+HKSnaWh2gQc31MnZ23y+r338KSUNkFqSkzf+0EaL/ZgFXrbdrofDq+iYQDZUa0737nPt1Qf4+rmpgbzNskPR6GqIjHKhWw8xPS3kgaNBA1q4WUJPrnuDWWlnxjiY/Ghr5uCfPCXRrwAAIABJREFUZABSwCza+CZpMECfrRgdTmnnmoY2BpitQJaYJWIAu8sQnRsi2nWpf/T61SWuml3U3hN0WQTSNsdOwIgpNArmCLOmCULRxcj6/sABPEQMQTNpxqjE8fM4ALXkSKDet5KTMwNv16/IZcP9esP1dsNqRkTeEtaUkSWY4eTZGQTfByoQeoPAapDFJlAMDMF+4/Tz+Qi0ccfA3xckeFqqyeNakhdCQBHvxgWs6w1/8X//JYj+H7xdrxifP+yMr/716L0//qsrbQZ26w8A2+oC3sdjmR5az4r7siHGGefLCU8fPmg3sU/fYYgjtlSq0yKdIS5UWjo5aRvrYB1btnXVQ1ZIy51DxDAGjJaBISTYSgJyQYbyTpROYZJlbCqhqCCMXv/ePtP+EJBzBXTrNR44GJ4B4vX3fi+mBIRGjN9nDQCP17B3UnpDyiM3akgevicqmOo56dZPz2JX/sHKaULogEtbXIEaXFraoyBdLtYkpQisNrVyCoQwWKdRdajZOp0RRwgRtpwhpPyBQhFCQTMJeUAcZoRhxDjOuFwuOJ2VC0R5uTJSSVjuyjX39eULbtc3SN4UHAKsvbWT2zq/iYOXhAKuoDyJAseeUEN2jS1tYAooCChUkApQEE0unXG+/IDp6Qecnp60DMaaacxjxGVinAfCeWB8Okd8/2HE9xfGOWr3LF2oAoKgGITohcwuLrRkWABhMAs4RkjRlG5kwf1aEDlijAEL3wEkpHTDst5RaMayfUGiV9B4ByeVeX/4ecXL18/4+3+84ef7BT99IdzXBAiBWTlh1jUD0kqufc+UOn8eIWStvk0FJSc9j8xGJs748PyEYdBuWxQipumEwVLc1yXhxz/8AZ+/fLEMkq06Tt4xrjfMjg4aQFiWm2XcZYzTgPP5hGmasFJGKUnBWuiaDIEQRAF6qrPd4Pfe6auGFDFqK12dgOak+ak4nNFHRpS//+49Mq/PWiI098lumXVDcrCyACEIsskjc9ag8tENxtZx9FHZDODNZURIS31FdoBNP8/9+8f50c96pouO2sH+GId33EvukGonNuObDBXxrePss82cm88B4Ao+FY1kagOonlqiBxDNKdo9v5W5+ycsi+B/mG0GqtmTzaHqjb/35Qn9tXz9H4GeJKjZD+Syh3QvRAM3vISU2cqtQbVzcg/CuR2zl9vvjX9/r1/rI4B7BBPbMvXX0kwLb2bS2zv9nukB4/53xWysh3Pue4D3zcKO4+w/7+8fr/Nobeq8dGM+fq9/lh74BIBlXc1Ys0ZJNjeeFVJMl6eUEMcJIEbK1liLtDT5crkgMuP19RXX6w2vry/IuWBdN5xOp+qEhMAoRZ2FXLRkVRvnDJhmJYlXwGuo2bGegaXjzbszCmhWRIzRgsyeGd8IxWG2gAffQ3Gen6L6tLRsxzhExHHQ82pnvNnOPtl2jjS812+outYAauZbL4P6dez/LrLnXu73c5/p6ufbQV637dm4jo9Au487BDaOXEWhmAgsZI2lBgTRDPEttdJiCEN5pSM4vE9I6B1JBlVeU6df0CIxtZFjKVi31XyUguzlqMWrQAg5JcRomfhEEJdT3oDAHHcyZx3SHF6fixAIsZB9FoDkun5t/mEhTM1+gug1p9Ncz4hn+wdmeMmqz0sujcMZvn5gvF1vGE8z1nVB2RJy3oxe6K62sDUQhBDStuH19RUctWvucLlocwwRTOOIyAGLla3RoDZWlbksAEm1sdgpiYqCx94UwG31METc7wtKlprR69dKyfjvAln2EMGb5B1BGw3Miwl682c6h0nY/L4u2EKHvdzb4gK38c10oHbIVJ/t91tBQPbWpzD5GZSzr4LipFyu7tei+gldNqX7rDXoFiy4FmsGLXc6ET7aTi8AvS5q43TARySDSqtS0PJxrutvZi+ARrMjxrXm3G+eHVfthTjUu6kt0esSPc8+rgqkdmPVfW+KuVppHfjTrfUOECpi4GW7vq6H23f6fqB959Z+7Y72g7+3C1rtfL3OxrTL6ZkrDfyH1CzK45q0tWo2wJpXhBLAYnMLrZwrkpXmI7wvQZX95fcv32M2Ez3GctTPbQ0eJyBphWa/3/Usa+ANphM1kzhL01etBBYYnkettjNw3kE6GF3TOJ/erUMuBZKtPDoX5LyBKSEXbUYCKnbcrXyfih0nS0Ahy/AzmgST0FUF6fxJ9ZV/6fWrALo697vDuX9f/3L4RSngyJiGAcMcMYYJCMDAAxCA0zCDIiHyoD9J2xoGBAgLluVqGXNBt00x3ot0RykFLy+fLX15xbatSCnXjSoFlfwS3rCgdhdlABkhDPZ+MJPYulFmOxwU8EuvbxnG3czsf2f/0Wia4lgxjKboMkIMuN1XlFTAU8R9XdB3Bfrf8+o55fa/cUGu0USLsJiAJWaM84jvvv8B33//A+b5pM+KgPuSsWxruxCRcoBF1qYZpEBqtA4ybgjnpMJ3GmbNsiSPZmr0Nqei7ZKlGW9MHehmUQIpKjTDOBgAd3Cwzejxf3/LYdfMvtClvnecAgSM42RNG3R8aii0UlgHjfeOXZcynTVjqbiBLabYwLuzKLLL0aqrRiGAwFVgHUnmWymJC41OOYHBgVDMaSxWIqORCyMwZSUQzaVAOKgyZM2WAxu4QwBRgEJVA+IQMc5PmE5nnOYzeBgRo57/XAQJCr5ISXj5/Bn39Ya0XEElabMVxW1BYsTEsPsXAaTNIZhRtoxCCoQFM+CYrGORCEqwiWICJAIUEOOA+XzCMD/j9N1vQOMZwzBgHCOmccAYgIEKoiz47nzCd08nfPfEeJ4JIwGUVwUBazyDELQXpUodAvo0aTaFSKwddD0qvSAgL4JAgpIXpPUOSIKUDW9vG/LbFb/7/e9wW6+QsOFebsjbBpYZy9szfvzDFVchlHgB06BgJOve37aMIQZ41B7uVHf7SU1F6rg7PDqtBOnDEPH8/KzyrAgIA2IYIQBeXl/x008/4cd/+Zdanj/Paui7g9YD1b1jX8+CdUdKKUFyxvl0wr/9N/8G/+5Pfoufrp/x5f6i544ITEX5SGXTzrtpg+QJ6ACnIzhHgIHVZhybD/4tu6OXDz7GY4MA/9k7mEeh2etKFSvtHDvlgwMGoVro7WTvHcEjf00HAB0MonfzezAEHwE5blocX2UX9Gpj7D8p0HNZGMbl4lcTuLbVmJuVwkDUYLfub+WBFdjLvH78/dz3zxK6bIX+M0y0+2xPhF2vyaxBjw64OTojvRPcv1fnF+o45sN3+5+PDNPdZMp+3Y8Gbc+d69lznu3zrevv5qID2Pbgh8CK3XcORv+zB9iO93FnSLrf9UDYI7Ds/dygchZ96zuP9nm9VlFOz+P57/dNn8Hgr1K0tLQAII6gDGTWznlEnmVVqsF/OgvGedaS/wr0CsABp9Op8ufcb3crY1XKhvP5XHm6ci7gEBAHtVmXZVEyd9KGOYE9CKhZVCFIdTxUhOuahRgwjB4o7Z1SggIMXta+tzUAzz5JlVeKqGWLqY3jAXkB5daspVh2AJDt7Cq5fCa28vjwcH/4c/r1+7JrraZoNspxjd05a5lU+1J/d1bJonLaBbIvxVcus3meMI2jBnqIwUJWnTFoTgoVxLQixrFlW1hGGzNhHGdkaxRC5EEPmG7NO/+tVIdVQLDSYYmAzbcG7PVCRUTLbbuzrEkSmuQwcESx8lFtHOZ6tDubpFQWzIx5nncyw3VwT53TB6h1kpQHU/0WrSiJrJzCbjOrLtVsL89W3lJCyhlrSkqTMQ24lGI+XMa2LLhev+J+u+N2vQJMmlFYBGXbcFvuuG8rxm20ctg+Exgg3mdTqW2t9/d94M048uZ7ou2jYRwxk4Jr49Boc7T0tXXfJtIs1hA8CO/756gTVdf7Hu35ylOH5x1lVF1TNNnoPyvgHDWzp/KHkeswlzMGYmWlM9FgtO6TQgxd3owsGTkVsGgn4zFE7ei6k5emqzTjAhIsGGtVD2xgAllTJj+n+1cP/IkCsAYYKcWPA3Quh6V+qwcZiQMiD0DZ+0vvwJTUMpzdd/FrRA6IUEAvr5vteb1WjAEhjljXBei+ewTo1A5tpoi/39ZqH4DaZT0CRqOzBza/Za/UjFhunayjn0930vzCdU91vpv7b1wVlX3VMnils2PqJcUa1HV2js3HbpfTPpjf7n/EhHxv679K2SekHM9An/TyzkZFwDBMj+9p5dFx0Exmz7T27O+clIZiGDVbfZ7nndxLJls9Y76f4tiNQ7Ig5Q15SwilNYBhAQSWlVepavYluk6f5RJjB9LXOTlWrexfvwqgO2Y3CPRA1Lp6dwrQTb4IAgFzDDjPI06XE8Y4GvHxCI6aSKuGOwMs5qwW7cyYLY1w01bZKa1YVyMiXVaktNXJ1i5yAFH3OAxsyR0/z5jQLCct3/S0d1PgcA41rpv3uCl+6d+PXu++U3/qomwpI84jkDPSlhCgGYQhCuI04vV2x046/E/c+4/z+uXN0wwSBWNKUeU0T2eM8wm//T/+LZ4+fMI0nTTFfNmUrJIDYpywrIsaSsyIw4AQDH2xSG8gqlHlUhQwjWFQwNeyTojIeN6sS6MR2nPQjq4uML00g4hQvKuKofsMi2wSYHAYAOVboENabw/WaVe2rqTC0v0BOxNgLbWsn4/wspY9KT7VP2pEGtmpM6tqIX0zoEyBvAdvLWPOnBsQTJCY8OawF4jFRIfY89duZ6ZUgvI8IhCoaNfRAuVWAZOm1wPY1BtXbiMKAEcQsw47MIqwRo1pwnS+4Pz0AafzE+b5pMZoKcglGci+IpcNkhNu11dISQgooGDp90UN6WCREzVmgFSacmIeNMrLmrvGROCB65wk60Qah6jluSGCEBB4wDifcXn+gOnyjDCdITFqIwgGzkFwmhjnMWJixr/7fsbHc8BlACJpY3cnByXVGhXjsNGZrNtn4gKhZqJzjBhYO5cNIGC74e3rG67Xn5DLgpQTfv78gi9fb/jDTy94u234utxwT4tyQZYZeYl4u4+4y4bhoh2qtFwgaMmqlUBBLBIopuSrsatjEXOSAml25DAoT8w8j8YFBIzThGk6gSnger3h558/4+efv+Dt7StEAC+z6oE5ZuUS8uj7I/BDpGAY9VylJYEESEmJ0tO6QVJCZAIogo0jh4pl+sUCtqyXykHRAycdIAMDUirgdDA4HprjvUF2AH/2spmMn9PgTv88BzvPzcGFAW4xksmItRKUtw6lQDNCG3eWZzCTsUKT/U7t9DYmB1LcIa6OsLQsoqMBRdT2BLNn5xVsm5fnWpZKaTPlXaLRydU6DiIl7icgpc34tBSc5Oid1gE2Z9NBgN4QPI73UaBMM1Da3/tMv4Z92Z4T1FLbup5VWVO1NnuAqZ+j/rXbD/ZerzMejd/3Tn9NgVTguN+TPch0BLt6J+/R/eq+ZbO96iP6Xt6PX0SQtqyAy2FvH4Gxfhz+dyYGxX1X4l2ZMVqTBn8ej3C7XQE84uZ5dNber4f4OLqsTaAvC2pz0wOcgILLm2UlSxEFCZgQAyuHE8XaJXjbVoAZwzBpyShpANiBGw1O6LlYF62Y0C7aC3IudrZYwUDyUhhGyQm32w3jMIJGRgjt+frMMx3DglK0tGcYtPFO2gpCGLrSHgIQzBRwmZBAiBqQ44BhIDCpzVxKxu1+x31ZsKwJW8o7moC6fmQWNInhR1zdu1K7wVOVPf1eOZaJ+1ppB3o5No5+dwb79/t9puXIjMUykl3+Etrf2RophKinQTs2ktE5oPomMY4mz1wmFXhjr1IE26b7QAGcaKWoBTlL5R2GwLq2w9bCQBxmBCmI2falAYsQtXeEMyBq4wBkdoJnhA3goDRAOryo3exJ6YOYWStV6j0b92ivj4/2bdUPEKSSFKgDW7f6oPIyq2U7T5PKJ6htmLJ2npWs+nkogqBRPBunoJxmTKcJ9/sd4Uu0Ut9Ss9VKKbjfbmABQlFy+5wSVJm2LGkiIERLDOjOuxgYQWWvN/zZBmhmYCkK8PQA9Zb3MnX0bEg+crD62TcORHYw3Kk21NeUrXHPPQJpjlnrrpN7+oj+lUUq9QUAFGHkkq00FNqYBGR7MZjhKRVo4lK6M+w2aQfMEOk5Biv/p8kNBxKOKu8YBHJbstqRiuSqzc6WZe/+huloLVfNlZO3jor6+1pWbNUDZmMIduMi5z5zv57IAvi5ckG6/eSNY5peMgY1krqHGI5nwG+wW686NnRe3ME2fKS3/Pe93HNwrj1vSyrqCz3aWLrgC1lZLav/BdYmdNLNj1bIUAUVBfqAAqll/CICBLY9oiX+JJ7YBNSupdTOFIDKq94GZ7pWvm3jAO+bNO1sMBHdw7BmdUAFwfxay7YaoK7zuaatUREQw6kAtAKKILS3AVLH8VukaPIO3H7RAGPOG/KWKwemYlSaESoUAMmW7dc6P+u5BrasmbeOKZB7gb6v8cuvXw3Q9S+3Wev7TG5Hou2igvP5hMv5jKfnC86nE0KMyhllhlBrCZ4hqSAjVSPNN8tqhH/LslgXKxWixTZfzjqpmi20j/b6JMEOHRDQ0HjAS4n6Z1JnmSBkhhmwO4z9vx9tuN081Qu3eetfbpSxaJfLJXkJS0a5rwhDqCm0/Rr8rwHoqPvTv9o4dK4VTAhhwDSe8PT0jE+fPuH8/AHThw8ABaSsHYfYM81EwSclpxSEqJEqLRloXARp88iscl7EOCoYBi0L8uYBzYhTB5tJAaFAXBXZOxBATPah4wIKriwDAkS7tKF95wgiHJ0EBSQsAsmtlLZ3SlwQ94amv7+PJsAcs8afVMgjHvozxrEbC6pCc7AnY99MhI2vxO+TtlIjtDolHUBCDup5t8aojWGIQEUNBTauOGRoZh1H5c4KDCEGh9HuN0IoIAwTLk8fcX56xjhpiU+SUks8lnXFutywbQuQF+UWQwGkIJXUZUMQQtD0+5S11j+IqJIKESGOCCHiNA/KvxEsOwnZSsoV6NGy16DPFiaMwwnz6Rmn0ydM8wUlEHgImMeAKTLmoeDDHPDDB82a+3gyPjYR44VwgA4qO6jqMTtHbA5CJxZM8br+FTMcwmRcbcsN99sVRa4IQTDEASUFvF2BXGbc1g3XJVmqP+H2kpDuM0An5PwFshYrvTJbKavz1SK1esZEvBOujlV/CiIxYmTEacA0T5jnWcnQY0QctOwpbRlfv77gp5++4PPnF6yLZhHO86QlLdtaM+ace6kRkz8GOQDBcrtjud+QS0Ikxv16w8vnn/F2f0VOGzSRk4zLFCArKWYCYmBs4dEZNeJdaV1IcTC4+pdzTBzPfAOn+jG3v/cyY3c9opqR0BvlvXxwI1WNx9236zUc+ASaAd3KZNp+87k8AkJ95lNvFLaIeANq9/dvTrCXulpFVPeMMMNKzKFCU35134s6p8ZTxRb19WiuOv5hF/Q7yuHjHB+BqtCRej/SmfU6Xpbbr7MYqNhd/5fu2b+avDd+S9lnSvRjfLQ/3Vmq98t7IMmvdSwv7cGto47xaxM1YfPeWVTd585bzhlb16WsN6T3GSzYXcf/TqTuUv/M7ng+arJyPEN9DP8dgPnAHutfKs+6UjdqdlQP0B3XrIF5rqvVpkBJEANKCisA5dxfORek2w3rljDPGrXX86VAr0bprTMqbljXDW9vb1iWBfOsFA/TZKU6RbO0pmlCzox1u3fnnaB0zWrX+jorEJi1gkQSRAyED4xSAnIqEOT6vePZllIq/64CTKINtEpGWbvudxAAfUMPrT4oosALsWbwsYidWkYOamj12Vn9mWwA47br/jkMA4gJuaRf3CPOAdbbUO5cl0poDmh6kdqSIqXKlXVbsK4jGIQYIpCBZV0QSLm3QlBbTq/dmpARmY+y4+SqqL7ZYQFAUUCcSAPBRfdTCBrE9OYcIs2y1v2ZISkhbUDJCQW+PpZAQAATWTMhXVPNc1BnWmlIGNM0VDDO58rPb99UzWWuyxF1ZtWmEdIqhBIiCmtWYOmaXBRR7l/Pti+iQRlmViBMCtK2IaUNRQqGIeISlXf46ekJ93XBfdGuoTmb3I1adzCEUG0okU5u5oxinYaPANw4jihQ22ZArBmhTc/q3ISgVToOvJVSkPpsPQBcM+P28tZt/Zr1WdsUlA5cy2AeHsrIPvOzB+n8M1UOxX0Glc9vHYfNy14+um2j/MFsJaqRoGdTspU6FVD2IJVUYC+S2g4ZgnvKkEGpUUrRqhWdK/3ezgYq0sBvDnb/FbqzPYtWNNhhjRYKBJK1lDv5vEmxzOvSyEEI3fM1P4s6H17ELRTnTRaslu0/WLboaNmRRQrKtqk9Lpr9JCUjO1bQ6cad39fZf0SE6DQdnW5+hAvQ4Xvf+l39nojBOgY297ag7xOgZiSCNKtbMQUy55btfDhIjE6O26yZPVoBUwICrDQbpEkR4tcwX8G+0e/nCj25mfcrMIp+Xo86QeV2MJuGGupAaPsIqHab6tc9v2ocB3DZ212g/bmdqDX/SAbQ9ePIKaHkAXnILZiUNwPRk1aDZAKKBhfE/ECB+yaD7nF5Px9E1NbvG69f18W14uw2sfbANSJpSr9XxIGB56cnPJ1OeDJ+KbEJhR3E0eqFPSUxl6JR9XXDmjPuN+UcW9OmPG3imW7BQB4XVlT5JnJxLgFg4BGCXOuGGwGipenbgikGJICQkXGSlu/JVhf3aEQD+031yHCsjojNoX+2YZiM+31VwTHN7V6i6fDZaM6PZMnHspr/ta+WgQCoohvGGU+XZ3x4/oQPHz7hcnlCnGa8bRlbXiDQLlohdiUSRDidTgCyRp9YI2Mi2hlGPE2dCJGVnzCGaMgz7TrLSbGAAVl5AQyICFJBVo0WbHYgHMW2QxLMAKM+utuXhujrEUDXgA7jbeiFNPXlVHuerdq9CnuF7PcppYCDHfgOxIGIdjPKWaOn7sZQU2RVyPG+Ax5x2Dks3r1VjNjaDT+tz9fsViFWXW4OJ5kCiATloFNLGKCg6xPUEclSjKh4RpxOGIYJ4+mM0/kDwjgqR51lKSzLHcvtivtyRd7u1ghiw0zaqTVn7QI2hDa3Ii0zVglwAxBGxHFGnCYMYcBpjhgCI0YgDgxGRs6rdfNakFJGASOECcNwxjR/wHx6xjRrJ7Awafeu0xjxNEd8OA/4dAn49ER4sqw5C6yrgRaoCmI3Ft7h2xD7v6+bra80tUkkEBYQLcB4hYSvyPkFaROsi+B6i1iuE5Y7m4Gn/AgJGZkjEg+2JhPuy1eUrLydRaiW2+VcDOhtRp1AqpIlIkzjgBAipmnAOE+YJs2c48j14V5eXvGHnz/jy5dXLHd1Djkq8XPKWZ2LUnZK0TlejuDE8bWs2nVNQWJ3qDasVq4AoRp1jEG5TCOpMVNtiAeXLxbV9XGxy+/DOfylc+/vHz/3re+pw2PZ59KBc4friQjAuqdR3NBHczTr+lggp1jHX5tfdRqldtflQDXIcwRE+oyhHrhwh8TH7U5QI7ndNyNwmbN3ntvcu9xCldeN07PvauscRm6g9tHtXuf1oOZDmex7mXoeTx1Qv7J1nRlQ/ryOa5Qtu7+7z+47j65zeCnR8mMD3B3CXLJRObA6DbkFPf3VA0p7J7GBzF6mVcHJQ+lanRsxzuDybQehraGeox4A8fH0Y5DDHO3W0G7T67sjMH8MVPXv9yeqt8Oas/0YnAPQSnXkPcfcMWrv46pZmUDt6BaYAQrVKRVRHrHAuqNyLthyBm3aLEsECEZG7lkaMRJOp9ki6W+4Xu9YljtyTirbaK78g1SpDgRF1MFft9V0CyFGejfn46hd6tZtQckZP/zmN5jnC37+6StKEaS0apYqa2dzoGXAqo3BFqwK5rTq/oxDQMlaChRDRPAyV8DsulAz5ur2EaBAGwyEEGtAsV/jBvLH6vj0pZzBAu6eZd1/r98v/d8f/6mLbB5tcyjdNitFACkK6FmmUYgM8uxggQZb3YZH0M7x7M8T25koZMFLVpvUHfcQqhMMoJa+gdSW1aok+xNYuTAI2iAqbwpGMaNkQpKyk+Ni1gSRcw3CmhtkELv88HMKk+c6C94Vt5f/LpcEajcpAYFmzpGtt/GM1OYTnvlHpN3QtYKBzZaEdnO8C0pOoBAwWDD8zBdM9wWnddPnUjcPgZXEfRQgb9ZltAPjREQJy8I+Q3oYBgzjoPbQZt3ZSatNmtzS/SkiiGHoZNVergZmSNoD+TtdwppYoPPnnKypkzfarMPlqJrnVIO4Oq4Dr6h/7iBzxWQRy34OcrLS6OKOgsqtAM0QLdYoJ7AG7lksC41Jg98oyCnZbaXKIBFrDif7AE8/DyJUfaYKckmuwY3K6w49V/4dc6+0iUTRzL9e5xcpIGhQPlDUyhzsX1TH2e/tpg+ycWlqh9uMKNYp2QA6TeLwBliauKMq8b2er+eW9Oz3cixYkxuItPUyU40O1ElHHXsMPLwDmGIEVU+hO7QwOWY+HlWfnBoXuWElZEEVsX/ntsVApIkWxfwNwM36HoBjs5ocfNL9f1C53VWxe07PFPumjbTbU+0RRfS5s/tTnjnnVUm8X5ucue2Dwz7ZzW0NUuv8eYOmLASyuW+AmgBkVAQclSNcBDCe8Oy4Vd6wLSuQSHlESQNeFBgTRWTRRne6tnVktqZ/BIAukJZEFG/dq2Yn4uDGYqmGXIxGZhu0VfU0TxhmbT2fUsJmTkUxQ1055Cw7zsA55fbI2oGnV+5OCm9OXxwm3YDGLcXEoCGagNUOkJqWrqnHtsIKwLUd0jaKqHNDwM6ArRvmF5zJX3r5FnNjETAHeBpwvV4hYkSlOWnKdVDl9sj4bAP6FTfuHMBH16lCzT4LdwIrstCdmHe10oTT+Yw4jDhfnvHxwyd8eP6IaT6jCPD1egXiCLG2y0TF2mRnxEEJ3wMBuXir4cbPkY2EWMl4GYFinTstmSAwD5jGaIoJgCsEEWwpqdLxzlji5R0WkyAAhSGFK/AkaMTc2t0og8bMzdNPAAAgAElEQVTJMnP0eb3kps7AwbF24ZWzgn8ibog158kziTTa58K+i0obYCMiZvgqIBaI1NgAULgYb4ejQPo8CFLLX4kIcRjNiTfi+b7MTIwjiDSzDMn2AbNmEgZrce08cta5lSjUTEOOkxn3alC1jAKdA4ozMD1hPJ0r59w0T8gA1rQa78cVt+sr3t5esS03iCQEKhiYkLYVlNXgYYusg4yrzvjnBAo8UpwQxhnTfME0nzAMI0iSgb/Kp8chgiQic4DEAbQpABnDjOn0hPP8jGm+aKkSC84nwcAbTiPh+8uE334f8OmkHUJFm1YjMkCBQIhWjp2VU48ZXdzn/bklA+5dIUGJSQUWUeWCvC7Iyx1f3xb84fMVry8LPn/e8M//9BU/f1mwZAADIfCM+/0F6z1pRm5gXO93SNToTimWHQAy8AbYtmTFAKWNx8biIPM0DBjHEdM0YZwnjJNmRBbRcoTf/dM/4evrG95ud6ybNp5QR0RT23NJtYw8dw5YMXnnAFWVU52sEhHM04xxHHC9X5GK4LYuOM8jVmjEnsBKtB8iKGg2H6AOaBLlFD3KOzcE3SB3h01zK8x76WRdXwLan3kiqoCSjxsmS/3vVbe0Ja/ntX9250lCJx8CMTI0CuzZZXpfwC2Ulo3UATdqstTrkGeB2I0fARV73dbLS+w+4yn+nuXiJf2trJsso8QMPH/eTsfWRiRw/pb98XCjN3BAyWJcIp2xXM+L26jUtjBbpoMbp3bA2lLount3Oy0TI6CQ6ilqmWlami9t7bp1V12w1wP7MR72mKASyvf7W3wfsEWsCxlHpe3cagzLYY1QdUkppXImMTOy8fB6d0kfH3vmjWUn1H3m59D0pDa2USNYSu70917X+V46no0dSAfNMOqzpo4Ai++tHsSvBrXsATq/rrodvUNxmHtA5RygJTwiVt7kWQ7BzkRH09JtLh0X1WCyzoc2B9DsCiuDzUA25weiAPO6rqqrULQckKBczABO04QxjojGhXu9XrFuC97eNFNsPp0wTSNCUA4iAWMYT5ZdVgBJYI4WWEN1OEQyxjGq7Vsyhhjx2x9+g+fnT7i+/TesywrnXAtB9UEpRbnLAltJpzZV8Ey9ItCmAqJ8O3W7e/c8qD2hWfnK8wQx3W/jilF1L/Ie+HfnBvBSvJaRqXskYdt0WUsHRumRKpWb1+VQvwfZbCliNoJ4ak6RB+hNPAeOuMxnnMYJEM1O16BPxDBOYObajKMHb3vgpM++ck5AAAglQKv1XafAaGDcWUxmyxjAYDVyHhhAUb0wjgEpuSxiCGVgc24nbdhTbVz7PoHMbzJ/hoJ19TWfApoJVUpSQI2AxglmHKNWcphKApdiAC4DHNXOMxu1FAFHGDBHna4yMCBtOp/MCELIeVN/0gBob2Ki8isComMchxHzMIJywbosCG9Xlc9GtOYgOoRQJO1kEJkfQ0DN2I+sNgjMAXfZNZjTXYqVGFSwhbUc10AQppbtQo4QiI9fQRBtaOcZjTqWtCkARUIKWruOARsNz6jlhakgSXJDBIG0Oc1uz5lOC3AbhbDKBoAhwcqvSeeEoX5SoABGgqSMDB3LwITRZG0Wzwrqe3ZKtdd44MrjvQeZtLFZlb2drrMHtuYSxvMuDtQA4uAWMYQThAiBooJx8Kxry9QUxmByRo9Fn83WgioOqpKmGGtjDJjtVIpVM0EpcjgiBm30UgiQkiBJ/TAma4ZSzwJVmiTNA3Slg6oXqbf1TD/5fHhH3Yf+utsvZid5cNV/x2yAVGm55EKdzWQ0ItUJ6/Q9sLc36xDMn/Ox1nuKArlU/2e7mhko7iOjmnV9UhOsJNoDUtTZ0FL/gwonEDXswYFcHbNnOHtWcQZ4rBepKQwCUEepkmRDSZ4laPZtLsjLhjVrIkAk82vtSjBuwuV+NVuRzf+35IUMSCngMHTj9GCvZqCWohU7jmGt6x15S13GMSEZOMe5YMsJkosxknuFTOtC/ej1qwC6IYxYZK2gFRGBA7BmVUZqVAdM44T5NGKeZwzDgMvTEziQ5oEla/Odk3HKKb+GZ1NU4WnRm1wKEjZVSm6US+4EsaDkrXI8qDHoKDfMYNAoBtsBIsoWvTdtadepG75uZhW61BkNvwRw9a/eSA20/2xvbAPAIhnBCH19Lima8SmNHJNdyfu1PMoOvLt///pWC99mCJV9dD3rz2BcZSJiZcgEiooiJ1P4FAaMpyf85k9+i0/f/wAiwpY1TZljAI0B23ZXvrnACIPOBwfNwCHSNU9Zu1iJuLETzdGOQCbrdGdZc4pLIYPAgXDfkmXPGBdaaig1hwEpqXEJA5ZiHOr6aE27GhIQUoCL9QAH1qgKEVXjNYTg3kCdx54Mdu+AJKQ1IYRonV+gwoyo1sqXkjQuWXkHOvDWeG7GcOieZwq2iKiSiwEZSo7LYM1SNAXlzr6urT47W1bFlpOCXJaFN04DZNTn4hCMP04jMyllgDQyGQYFzLMoN4LESQHAXJCJjQxZMI4TLtMFp6cfMM0XjOOEGFXpFRLktCKvN7y9fcHt9ob79RVpWxCoaEmRCMqWlMDXy3gFWMX1YADiABJGHCeM0wVxnJRPzkpWSwHO4wnbesXL9YosK+IUMYwDOAzIYUIRQowjTucPOJ/OiEGNqxAFH04BZ77h05nxw6cLPn6IOA9kUUjN2KTdblCDP3Dcv7k7eIIKzAFW7uIRnwIKhHxbQOOIlAv+6R9+wv/7V/8Ff/mX/wn//b//I96+XrEoKodpPCOOI8qiSomEERCwLquWRQXdh4wBxApoptW7cmqmqtsCQ5dJRaS8MpfLBeM0YBjVaXNnlxi4vt7xux9/xE8//ay8IQ6EEZCyk5YLgKxOvjTeEIEGWFJqvFY5l9ru/XQ6QUSUa/R6w7ZsyGtGGQk/fv6COM8Yz09ItxsiAuL8hMARmt1510HGiI3VWYRkZSPZyXGVKV5emYsSxQWOlrnUFnBLBSGGmv1bZYfJDzfaBU1HeBSaO3mD7vdVb1TF5giTvpeSGZ0FahTY87kMyG6w2TMokTMph1tRmRHHaLwYgpzEQHoNXPWAiabqb3VMDh704y7Gf6RlQX22VurW1ufNs5Fp1y26J3EnNj4j58EruWZgbEUbozDTnvPIQZvSdSwTBR04sDkUScu8SANyIglls4CIgwqwLJGcANYyNjcgyZwxiHJMJcsybYaZrx3qevhrr2r1s8Huq56TgQtoBSLetKjYBSiw0Sr0IF+o5T4igrwVICiIwJ4xqc3GwRRqY6wWjdff5a70Cu4geXlO0TI8Jw0vYo5kx1X3CETr13VX7uXGt/HHVAcYDdRznengRv+Z6ghaILUH8fyYkAUbwL0z1UoXU3GH08+NATymozwDX3VjgKCV9vmYIGo75srhppnaIkpor/u5YJpGkDDSuqCIcuJMowapGQBZlpqIjmaeRpSnCwDB9XrF9X7DVgoQBsRxtCwsA7+F1OmUjHXLyGVBLoJxGIwjR8NU67KCRJvlTMOA1y8veHu54n57M/0ASBaAGAwdT7EUagJpaaSggvAQdQ6HAgzClrlTkAzIFiq4lxVvyw2XeVJAKQbkkrFtgmEYAY7VifW9EgIZOOp2jf4JQWVHShvuywZYme4wjEoWb+tbDNAhmOPq88qtQ6fKtaIOeggarFHDDllKt38y8pqwYcM8DNq5D4AwY8sGTKIByf3efW/zNX5P3asZZTOdvhVwdo5DD5SppBg7ILDSPjDV7JtMwLplzVSDWAln7OS3llyVUrRBSbQu9IE185xCJwtaxRGIQTyYzWg2sZ/jyIg0IuYMGFisnIYJOW3YuGCwTu5Vdgn0vnY/LdfWzKVSNhCAOURIiObLqDPMpNVhTBmBu27VW6rZMyFqBn8YBqUyItKgnHG4Rp6qvirISFmz7c6nEwK02YCwzi9MnhQRI3g3QE60nHYahn2Vxhg1812k+wM4xYQGkMTkjIX5zCeJHBEoYRStekklaRDWOc7IM9ms6oxC8/+KZvl5p1tnsfKZM3cC8zQDy4YtrVo2yqxJDykjJEFAArHa64EAgj5rdiAN5scwVXkp5k9E060E1FJH3eesejZEeOJfldv22eJ6PZh/ZXuPTN5IEuXwJIIwg4o2iZxC0HUlDYxE7zRX1DdxWS9WmpjFspUMfE/FzCh49/GitEhScL8nXN/u2LZswRYFYSMryCxFz51KSG0at4Wk5yNwbaLWq3mXB4Pp4h7oFAPmizi/mezkhn+/mptsWIbbXWgAcymKXZAhz2puid2PjKYD9RzGEJC2Ddm6FIO9FNt1olY5kRTNoiu6pwJQ+YJZWgZlA2cHwIKrIt5MyPa/AaP6PPY9NJ4/6crHPeITo2dFK3emNiJSn8ADOUINHPYEFBaAWAO4nmgjqtTU3jHgN5eiwVa3Z1xmmV2QVqpdir1DdRwYEgWQASJsFSqO7ahtKNSaunEICDFinGYFBk3fZgbelhURBZwFVDJKMq7F0hoLfdNfxK8E6J7OF6BoswbnTNDJnSwTqHXK8IyLEAIyrOvq/V6BOM+W8645VSAfwC6pO9j+3QEV9TOH2urjyzvriOw7ibR71aP0jZ//49ej+x5Bs6OD9v67x044j69z/N4vgXO/5vfDMPgFazkHYPX7vSEdAgSMZdMMneenZzx9+IjnD59wenrWbjhp07IPq/vLpeB0mhCMzFOdGs0Qkpwg7Ia5Rkr01tz+OPTujQssaoOKzlM16nJxB8JTb5szSUVzAEQ0muXPl3PBHE7++IZ/NqeL2fhViqP6UgGG9hnPwKMmINoqKQ8HsSlk28P+sEwYzFipGTQwAWkCaSsqQP35izuJpEo8k6cuCxKAkkU7A0c10LTCQAFVH2spBZFGEGVwHAwAYBU6Nm9gVUbq8JlAjBGgaEpBFWBOauw5h9sQlJtsnmdM8xPGy0fE8azlDCxgKSh5RVoXLWu935DWGyRtYGTli2MzdYgRhmhNDURBQQGIzcjliHk8geOkcihECCmQVEpBEMHbNSNywDjPAM1qvFUnjxHiiGE8YRwnDHFAZAIHwTwFPJ8j/uTpCR9n4OkUcIqa3F6N9m6dD6eu+7sbbf2rfd5J/QFB2VYFWOYR6XrFn//Fn+Mv//N/wd/+/d/jH/7xH/H29lYdgRCSAhnb3ZRKVKVkYDIoQThDtoxiII2Sgdv8GkijIK6maIcwYp4ncy5HDNMAQUaMM05PZ6SU8PLlK15eXvDy+hVf396QLKNaTHYIGiCgRp4aTX2UE2iyPOdsHXLHKos8GvX29obtrpFlEmAcBlw+fsL88SNCIJyfPmIYI0QKtiKYJu0yRmAIM4plyh4jvxWgQw/Y6YoW+02/ohwaiOCf78GFPiPqmCWra9x+fkseH7OTamYRPCLZTPMKOPSfkwbGNwey7bTeKKxgFe2fnwg7kmIADcDpGtr0Jfx95Lp3amD7QF0PHYjAS3zb89aMSngmN4GsNDfnBFAw/rWuJNb4Y3JpAG/NyuJmNJVaRqkyTYEe1SGRNGjhjnQFVpmNw6kBbscAXa97H4FNbR1draly0eeGruPBKBMY6FuoRrcNywc6u2efwbAfkwME/ed8/Y570f7ROe6a4bALBollqu8y7Noe77PnfP39fc+yJOZ387YrIz3YMbQbU1uTR2fJn6vvXLsHUS2LPA7tmbGfR79Pypox3wOENkVg6zBZ91GnqwHVNff7qp8lRilZqyJKRpkmDIEbgCgKkAUGxjHgcjkDBHx5fcV9WRBvN4B1zEMcMYxagVBsfG4TlyxYygJmwhiV9mOIVkFQMrZlxb/87kcUENYkxtMsFWSq+0CgnRtLsczWlg3MRCgpY7nesS2b8UvZudVDaQHWhHVls7EnAzEFUggpq/Pb9pXAKS779W08XgIWQhCVz+x2ibQ12cmxLtDg10zJSwwFJZNmimXLYiK7DimBPhVSYLMIkLICqRxAMSAMBlpkDfz3mZ++v45nqz8HgNpKxKSVR51sJ3PcmRmn0wkxKtBxu92wLIt+30u4WMCDNo56fw/l10VXdldEsLlMExhwIHUO9bt77koFZZofVgECt3mteR457ymzymYrn/R79Xr+eC2NbHI9OjoexjgpIFZyRsnZOnyrvIYIlMOt7HWZ8QICZHZ6CxyEEDUQD80uLOteL9ZrAMadHerer/uLDFCqZ/bbPp3z+Ol7Pq9a5QFkKwfWCpMorRR6/7OVvnmRjodxhmDNUkyX8UEWFi/dzZad6eWJolXSE2sDsmD2gJeKenZVLWt2HWxJCjAQveSsgFtxsLDUMaK4bIQ2rhCxJB5TfgVG0gyDF335xWhhvCrIKKoMwGHrrMXICDxXW62fsyx9sEea/2W+WCG0QB4BA0eEqIA0k5ebZwgyQFqWCzKuz6JgHQy8EtrrhbpP0CoSjnvD98d28Pn7vdPbZUf8w1/3bVUdyFoBxHAOwFz3n+85XUvLIi5iDfXMFjCjo8YViQBqQY1qz/rY4DbA+wZNzRalatP08q2fK+p+1+sBL6PdtoZ9ELW5EQuk6DEoVcaUzt92OoKq16Vp5spdCABF6cKokFafdeMrpMEpMrBSAndwDEMwgEipK7xhmuMO6gtbti1zzWYuRYMOGQIJURPUiumRSnVlcrL8ETLovvvuO5zP5wqu+Z67XC62yWJbYGqHaV3v1eG63+87cK430tqGZXhpTd3XBwPv+Pfj6xEIdtz4RwP3X/vqv9/f49H1/zX32gGWv+K5/mevGYKh81Ka02DOhEA5pIgCogmu8zTjdH7C99//Bh+/+x7jdIIwWecUjU4pyKJKdgjBEG/PUlQqVRGB5eDXMTFR1+UHAGsKdez2ValOtAtFPcHOKdgLSxGx0ke2zj8A0EURoeCEGjWhcjM0R1OBsGLOnRBXB7O+3GDp5qwY5wa6PR1MyCWPgljUn42fpV6OWuZiFoF2JZZqzFVj1OpnC7m6laoPswgCkUUam5Olc0IuV5REv2hGDUjXqICsxFbPZJg0Kw5Oxgs3JLRro5Y8BI1wjifM4wnTOGKez5jmM8I4gizFPeeMbVuxLnfcr2+43b5iWzfkpCsawoAhWPMHOP9QQBFYGYiDc1rKGsYR03iqvDliZcoewikETXvmgIEHCCeUVJAlIwbCNEVM04whDJgHxsDAEAXzacD3H8/4zUfGnzwBZ+eaI1daR3Cuf4N2P2BzpS/fmx1oV51UQrBy8OXtK/7ub/4r/uzP/gz/9W/+Dj/+/l/w8+fPtVuYiBgvnAZBTqcT5vkMz1RyA0blbDNsORi4WM+/ZheHwBhDxGwNIKZpwjgNGEcllxZi3G8rrtcrfv/7P+D3v/8Dbvc7AAJFzQaAHeUiDRTS/W6djg6yqwc0gmVSecBmXVfcbjfcbjfktSn9EALOZy0ByzlhmiZcns748uUzcio4nbiW2FTnn/agVx2HGafNaKBqHDwaa29Q90aVP+cj2f4tAKJ/9YbP8R4hBCut+LaMr98TAPk9WLRzaroxt5ICB0B0Uo7P1esxd4KOANG3wBM1PpRHVAPgajwTmiw7zqmfjFy57oxPr+v0WZ3Tgh3QCKhhWp19d6zRQKRH46yOx+Fafi69NBTYc6X5v781LyLuYpnApQauOWdvtT9FI9V1jsgz/vb765ildnyOHpg7rt0eHHgPhPXXrD/L/gz1n+sdVDfwXe4cQdx+7h7dz8fs7x0B4f7n8XvfOmeuw9+fvcMzmuDqeX79mtqMyA1z1y2on3G5VYpnEEaktCEXbYwzxoAhDHrH6uTrNSfTm8SMLWe8fn3Dy8tnrOuKp+dnxMtQg4GPnte564bKbWY3sX2rcw5sKdWdpuWhjePNAzUkAmSrLnFbStSJGucJa9bsBuUu1IwDzW6LyEXldkkbuIh2y/P5LqVyngIeQG3r73umrq0AEC0l9j2c0nvn+BGQCzSAyIOqEO3+KVkBxepfcKMNINub27aBcoaEgIARFKy0u8jD/d/v6R44a/IXgDVY2Laldfs0W8ud2vYMe92lYImWRfoZawCb7GTPUf/476SIUpNU+fFeVuvcC1Lay0KXXxzb+/1zO0DSUzL069NnaBOpAUWkpfylO3vaUT5q5pfRVeietOcQzeBTiMGyv4m7c2BcexS6sn6TuVC9E/x5XQdYcDL4ekjzReD7TA2CCgwcZY2/xnHcrUcfuCj1HnY2ScFn/RyZzPYkvt6XBDyoxaRccFL1jOsA49zqGhxJ/xw2EzpfVkUGK+FFqxrLWSkRev3kwUHAqgtw0AH+fMjNHiaAqJhuAZyEoMI03VwXMxTrJ4hqSW1OqdLrkCU3uAL2+c0512YerSu0ypje5nEdoNmJEaM1n4nWPCoX5QCkIDUgQFadoPZL6XM2dtd9n9Xd7KmjbOr1dq/XQvAmCL9sI/afD9ToK/T8+5j0XEi0io8qLzpbzam9XMZgv59dbjNz3TtyGNPRfurlT//MbZ4ONhU1Hx3QjOlmD2plT7vXvukPmaOxG4PZd7l9yTa+rmMPgPpZ7p+lDySUUkClDzRbuL4MQAgg7QgIoi4ILsWasERNHBHvRl+wmb70/QQoArK36X4Zw/lVAN35fMY8z13nvS6abR0Y3cHydGh9P2nThy1hXe/YtgLvMqVCwroc2U/fdC18VP/z0Ak6vo4GZv/+IwX/v/v1awC2XmH/ce9HbePXDd82Z7FSVpBm6FyeP+DTx+/x9OEjTpcnjOOIdctaFmDGnRp9QDRus7jjMQIc7BNkZBGkbTPU250SN3AAyVqq0TsUzk0HNIcR2J25+u+cc+2w0ztPRF462jkNRZXUrvQFGrEQqK534KpnaiilQJjARbQsATB+E6rcAy749birIgwcwBbZl5o92qIYOiaAQnt+bc7gUQ6dj5RahzAH1QQKOBaLqIRO+Zbi4KSBa0XLV4NFjtFF15NxnHAwPhqP6ZFm0zFFxHFG4AnDoGCZAnQnTNOMOI4Aa+Zbygl527De71jWO7Z1Q0qCIYxa0lUyiBQ4IxIlJpSM1TL0CpQ/gGPEMM4Y57OWO1CAgOyZ9aeYoiIAPEaIZFy3BbmsIEoYpwHn04zT6YTLfDbui4AxCs6nCd99POM3PxC+vwAnVgHpORMH1YmeTNU2V2ci+OrXE2Dfcvmmey9tmxLlM+Htyyv+4j/+R/zVX/0V/vqv/xp/99/+P/z00894eXnZZRuHoFmKHz98xKfvPgFCWlLkIxNBWpPey8oUmKNF0Uo934EZ8zzh6aTy3Q2XGBU0AzNeXl7w889f8Pr6ahF+LcUfxkEBXZFqYHrTiRq4MuPvCEK5Ip/nGcyMbdtwu91wt0xrJwbPudQ4WM7qRN1vd+Sipe1DjKqTUkuh1+c3Y47e2Vdt6ToAgyrQ/T4i2hvc7fNd1hbeGzBHAOrRZ/zn0fDfgYDEVT734+3XGYCVTmD3fjUeZX8fve9+OtxJ8+/2JVu9QXocQw96lQeGvFZwN1Cod2C9WU0preSyjd9LQ3Ur9YBTqQ4GNQPOR0/dOvmzALvSlEf78JHdIObQO4rm92168//n7U2bJElyK8EHPcw8zqzMPjg8ZEmZPWT+/3+Ymd0vKxwu2WQ3m81ik80+qvKMy91MVbEfAKjCzD2qihzuWklUZLjboaYHFHh4ALYVWPdggQAiykIx45SsZa7fiaRolooTM1qIDFTZMm/ODF8/D5ye4BXn/T18K4KxWvoeMcaxqqfX+s0/3wPsfnySJs327bg0Z7aAsXPCuTHZK/t+DdkY+rDU/XmtbdmsNi6+r2Q+7MN49utTZAqbJ0IPInQAT6YrS65BrfYp8lrCfI2JawkOAgEREfN8wFdvJCTt4+cvaO1ZwjpTRm0ihwOhA/ZBPUVWAfT5+QXzlCE5UBnscpMhAKHI3GrMPVR3FHdhrMvi/laB2VhyGZYGasBSVinUVitCknJssn5Y81NJYYRSyljjRBo6Nfq2G0G7/aDnT6xbFiP16Itx/qW54Y1iY9AFzddrut2+mjRraNx8uEYkMVYtP2atBW0R1lMkYQF78Gz/443VXpyFDJg5X4PMGoXApsdqGondfhEodH3Uz/tLvy8B2I1Hf4x1dr4XGZtjP++BAaZ5eTLSBeg9MOSQ3cfkQtJiKazrQ5jCozCA9ZsuIAQoK61J9FWkUeXb5KiN4QYMa22Tn0zCPbkD0VZ4D40tbRa6x5XH5wEqpxnCNqpDTux1gz2b0frOz0uok9SkrjjJzUlPyDT18EfWnJ3CmgUIVUG41gsrUbR0F2In+fYFIjG09Ceofur376CggthRbs/a6Q8GsYUYBVx1uYrHyQNgE+AnAJE7BiPyTk9xM0a1UO16Y7tWTX1SJLEzR4Aq0BYEjloMYbDkBGPdg1rU5zeR2kwESSXBwowliy6KEYEj2FVN2GY8dfuo65vX9hq/h3n9oqJu8lT6NRRCcEULLhxEXSe3yACvP7CTuQbehjAiIUKIqJJbQ+SM3tOUYnNMpCRFGMk5AWxpWHTa/v0vva/XY8/fBZo+5HUcg4h6TkeTVVNMm/6086yIUOnEB5Nf1B2fRFKleSOv3fywPtzogRt5y91RGrihQHI7Imiu9yjMO4DUHyAh+0yk2EcUUg5ttL5NF8zzsNsuHT8IoBuTIMISOxMRHh+f+yDbTymlG0ylFDQuaBUqhKSBpMmMauFe0QZAz8swNpR/Gyh3SdHeK3b77///OvzkvWSsvXa8Bjr+W5+9/VsEvAhpDz24PC0NyNMB8+EK93d3uHvzFrc3d8jzAUSE02nFsq6SqDcSUpwkIXmyCk0BU1S6MFj3DMmL0RohtIZV6fxiIwR09ZUIHAfTo5kfhuRc7ZlOy5YFTaCoFHL1Hta69qS5RARLQEvqQbIcQH18evSKbCzC2JONipqYTpUboFEzpQmoF0S6ywZJAYjq+SqlG3jykCh/BE2IC/RcCoS4MVoDASFmrabqNl5rL7MAW3GEK3a2AgjiNJb4/Qd07wYAACAASURBVI5SsFJ6QwKFgBwygiYYT0aRhhg1EQGlqgfJ2GkhSj65fAWKEwLJv+fpCjkfMOUDpjQjRs2PQwuaesWsUpsIzRnxOiFHCckRISe93VrByivAUt23MSEm9R7lLOGsOQMxCwgJBYigfY8A08BSjlgWYfCCKq6vM+7urnF7e4NDnnA1Z0wh4ZAirg8zvro94Ks3AffXoiNYwLVfPWLXKCTAHqQjBd/QjbDNfuqE9MaMVwbj6XjEP/zqV/iv//2/4+c//1v886//Bb/97e/w8PCI5+dnkasaapBzVkbyinWVKtX39/eYpkkVdTWaGql7VuYXQlBvj2zoV/MB8yyhrdM0IcdRZfXp6QnPLyd8+15Yc6tm7R5s6SjKYVeyhq5rbykyb6tI7+VYKQXPz88S0qrAnN80JadiQCkrnh4eMaWIlBPmeUJZFpTTIiEtLLntAHSD2x9bJ8dQDk2uGPDy2rHfR86NycvG/V5+72X/uWz2QEjoRRek/dvrxz3lbw+iNRsQ0NkzDWjYXn/eTlO6fBjh3jDeK6QbgNFuvusbBvq9rT2eaWVsam4MiuLQANALj3DvJ2PbDNCqFh1DrXooOW92wKFT9qzfzsdGZK0AfQbO+X6y/WkUyrIKt2YsMYtBGlhZ3Ko1Mhu7Dn2/C5BcvcSDrQvYdrQ3RLZzpY/37jNvtPsxA+AYGdTzD+3DpA3kPO+bodz6deC/q7WCoAxtd/g5up//r4He/tivUc9I8O9rwMM5O3LLSAQ0jG3nSLBndTZWf/4IR/N9JfK5SP6uJMz302lBq5KLNucJKUs+UDZDXcGJlBJub2/RGuPp5YiHhy9oreL6+hqYJ8yzXNtaFTaYsUtQcVyWHvKdUkQkQikVL6dnKeUWU2eldhYUERKEbb+UFZm2wKmFJzeSFAOIASFn5MOEOE9AAFopYG44zDNu5wOqFloAS5GuYynKThnhl5dkhx9HX8VV2kIImn/O7x9+nhgAZOM/mCNqfA88ZzNmVdlHIUoxCZNMtUlVy1ZWgIIU9Qhx8xy/R10CHPu7kZj8pQ6QrNaqIOB4FxmS0f4xL7fP8f3n73cJNGBmcGSsp9Lf2fQv35927iigMn6IApZa4Q/Ro7eOL4IAYkX1PM9KORwO8jzSZxr7RL/vlcRZQBpzhrCSPqZ56ueFrisP+SBVgNFBTus/lkztWk1ZgFYpTChpTiRt1mgHlC1MBvIwAKgN49b5d8kbv4908A6nLjMs/7luiUMuNe5seUoagpwYtQUpiAigcRnjyg0S0jj6LwQ+A+hEn66a1kQAOwI6IHaefmW7zipY5MBu3QWnG/iqphSgTnJI+KFWGN1q0LomNTSaQoSEMI58xcLcbhIVYxWOEfr4iDtx6EMbHavvy+bklEc2MNYq+b8M9DbZK2GP6szT94iQfK6NuOcg3PfT/tl+DpjDQVLikACwpaKpQ0nsutbnXb+vsw72OpZ8P+ZNjLEXR/DXbCwX5p4nEgCCK/gg43y+pw5d2YOx4/57gOvS5/u91CJ29vu6D68dIN/4PoSgOfel3f3erHLd5FlvnwBk0HeLcHOEzGky3nG/B3l9mSDzFIpdcZH5xVYcRbA6SaHQGLVGCdHXEGPpPXWSjx7UXzqe5T8gxNXCFkqpLg+dxA8b42FZlsGwkxgOrOulhxs7TuKoAyysNUI84KOqx95j4Q/7/tLk8udcUvT+owC6H6pEftf5/962fB+wd/kc9QY79V+ErKzf2lpnPMSccf/mK9y//RHevXmDOM1oVcqqm/0bY8Q0JwUGVEGhJmT0ICCVeMoqagWsBHlVQdk9rvALfLAjmhpXZuCZt8r+XbxBFAJAsSt6lapU19wJky58AmHKIz+N5Iuz/ghCkSX0NpAqGBsGnX4h4JX87iw3Cpr/ogEcwMGgPAKzJOhMcZIFaE5XBeACRHEAJUlu6b6DbehgyZPihHhiZ4gSg0i86JXFGxtUgacwct9Fo3vXJglbAdngWWLoY4hSiSkkpHzAfHWDPF8hphmlEPJ0hav5BlO6QoxZEu1zQOUi4Nh6QllPApoCCJQQJ0kinyIBqJJzglmqg64FzCsYBSkQcpB2h5glWX6QUAhoyCsDYFWESQWwKBQVayM0YqQ5Yz4c8PbNNd7c3+AwCZvs7pBxezjg/uqAu9uA+xvCIWnajC7c0SFsA5A14/ZubYmR11R9IN0c9xtcQ4Cx6BhSUGVZT/j53/8C/+Ov/wq/+OUv8I//+Cv85l9+i8fH5x6qap5gDzI8fPmCVltXNMzbRiRFPbicNOeBtJmCrNWrK8m7d3N91XOa2Bu0WvH09ISPnz/jw4fPOB5PqAoKEsUeiuCZztznnf+3gK6NS6fxW745M3aenp566gMDcO2nlAL08ByR9cKwu8HhIEmhnx6fwI1xdbgCmPtzmjLpYhDW6Rn7SzdXz/TpfeCAHFkK4ezftu94hXwoz44u7/v2lX3pNe8kIIod6Pyc8+Nc2dgDK5f2Qg/8edjY5lOt56BPdaG0co+tUm/nhxABHuFI6ErTAONG2IesHQtXIFKWMaw/zHE0rg0u908gl5/TKV5EUTyeOjetvdZGMRDCcFF1hZI0d9QWXN4bwN5g98yz3jcbKH43Yq6/+juTL4jFqoRuwblL+sLeOPRz0I+N16nsLmaIswMDgtvDfNJqEGEw2G3eqANLTuiGg8iS1gtP2T32oT7bKrLnAL5/50vfb42AARpeun5vRFg7aqvDgQi4flCj2XLPsc5S9ZqPOaH3Ykbs78hopWBZVoQgSFHSHEg2t8AAVsnxdH19jflwBXr/AZ8+fdK+EXaeMZpJ92fLmRVjQjwELKcTTqcTppxxe33Vq51zFQNYwBo1pFnDkrWo24/evetjUGoVtgAFzT2leUTfB8Rvfo91LVghIVQpBMzzjPu7e9zf3IDXBVwb1tOCh8dHPB9fcCqrAix9+sCiNEw+7pmeNh9J14KB73vZ2sfOgXqbHwpq+1J/b2LqYCy3gNaAda1oJJUxuxRiC3luKI20QqZPf7ING9+AbzrPBBhk1Q/P0wV406aUFa1hw0aVfQqgONja+3ns57ZfDwQBrDiw5iaza8/XmbGnibZrU973PPKoj+NumZKCvFZV19pYLWl+V6JURri1VnXeicOFu5C2vuggHsleQVy1MBT1dbjfIw2siclVYQRQKyH0oi8K+LUmznnmzm4xNlX2NoKTp14X2B8bWyZI1WS/v9p2CBDW9dT34LO9OkB1eMmryyx6peVlZGaJvrDzWXcclS1ms3D/8Fx/6H9vdAd0Z30kAUNKKVKMpE7gIHqujeP+Hl0m7vUNNw1ApIWkaFSJDU5X0AuMOSfzmvr40G7cu1zo++9wvlRuQOFOFCitoTQtIpMTQBomzWJjSaoUWwtGsXsdyPIyae94sBDcfSGlH2K/y31FjpidbD3Y93uG6Dk0ChF5+RSgDFlrr/5nc2WA8ef6o8jotHnX146+77vziEaKAj///ftb/3h5NHRSEnCOh0zot6Jt/7O7b38iSz5DIxHYvPJzxr/vWTNZQufRBNcANxRmYcWGAKpSsBIAChZIRWwBrYlixxQaCZAnPgPRVSM0Kq+xpjy5fPwggG5ZJH/Cuq5aTrZ0cKXWirI2rGuVzW7jnb4YYOQ6RpBeCzEanmpbxq8DdD8EBNsrt/66vQHz7z0ubZTbze/1a/aK5Hed+32f/ZDDNjT/0xUm9cyGlHF1dYX5cMC7n/wUN7f3mKZZwhVQAASkLKj3lK3ySUQIUu6cwD2kqJYmVUscStzAWIuAucb4iSELEBRGBUEiRaKd8dGlc68yo4tTNzdhWqTh2cQwNhokrACAU+QsdDN0cC4EoT+DpOxyqJIPQ+isNkf1PjGrF07aBAXqLBOnVXFsCAKQYISSSIEskvv2cYGyAqXKIYUoOe2cwgOISSShIfoda83VEM4YCwZeSULfpGXIRfllIqnKWhtWrbxkTLU4RZQGTIcZ03QQgExDTEOaQRQxp4z5cIXrw52AeEwgJpS1Yi0rXo5HLOuLVlvWcQwMICGGYeAMr2pDSgVAAmJF4AAOwizslYgIEs6qxjW8ka3OQ66ilNS6Ik0BN9f3uL+dcXcz4/oqYY4BiQjv7m/w9mbGmxvC9STESALDiLsB6P82k1uT8sFC8oe8GjMc/dMwvu6nqfyBGAEBhF//87/gv/1f/yf+7m/+Gr/6+mv89nd/wMfPn1AWDXFwCiGzzON1kQ3/6ekJ03TAzc0drq5uYMliLd8OgRByRM4DnLs+XGGapk3uqGVZ8PDwGc8vz3h8fMTDwxOen06gEJGznGte/05D13lLkLVYYUwi8YgCw4CycBcLgau14uHhAcuybAwvXzQIBBRuSBYi2BgpRhzmGafTCc8Pj0gx4v7uDutaepg9E0vI+QWDHED3ZI+9wCuzryj77vD39ff3yro36C4p9pf2JO99HUrKOOcSQCT3tZCVoSBa2JDJ9/2+GMIwjuX6kRfWzhGFS9PW2Ix28sWUQfk8iUGK4RWtpaHqmJhSjt1q8X+TGsnmcSRdL7VZYQJnAMMbu9zBxE04SS8QNJRTOUJff71viLTynBmroQOKlwxjG28bgz2bwu4ppF7ZF6jvY/K/tgu/VFeZAgSiXIfdmO9DBvdtG31ynl+uM1usF2yuEPq6w+47e0+7hwAY6gRpYsQbINeN4qCsRi6opW4ca77ttt7PDFOcr7s969G/l53v2U2XdD//DLt/1fxkZvT451qEgRihUVEJ6yPuawgYufhMjsQYEHNGXcXxudaKGVpFOQpYIIAzcLi6GtWgWwW44rQsOB5fxHDUNkVLjq/9bM8VkLFhWVe8nCJu0g0OVzdIYGFAMUt13zZC3hskguCrt28xz3PX5S0C4XQ8oawFIUTct4L7N2/w+eUJLUjereurA37y45/g3Vdf4WqasDwHtFKQ1GmGGECnF7BWGux7kgEgjgE3xtFATIJVCbR1vZ8fNvctxNmMX2P6AgCvep1Mzj5/QWL4IxrAA/mMNJdaiFLQiiE6CHOvAOqfb46M/Vw0MAwkDL0e4u7W774gj81FC0klkoraQa03xY/sZIzcRaSAjAGPLMZjQNcJfX+ZfuLXpLNd+9iYfhVckRW/X3rLhjW3hQdI96Bl31vpnD1ZaxX9MEq1YNNtQ4goVdhiHaBpAAdGooAQLYfUkD22b9lYXZKP1icGJpoc2sg59zNYeeeydf+53UvsGkBdFa5/7b46epY7TT8jl6NLtHqpLElZGOXougSdPbfVOipUMqGhYoqyp0jlyfFOVjBJWoQ+/7GTu9RBUZ0/Nrf3/QR/uL+0ooSqczL/SOcz6d8QBidZG1gAdA4GvhjpYNiw1p1S5EkLyVWtyo4BPPu+1nqDNgp9nVrrvaNp7AENDWMO7w/voLPx8PuQvbUHmv287CHxu4N9ow0NNzuCPeYQ3L/HmuryhWV99hJDfo+1nKBm7yiIHcyuIupFovza8Id3Suz1YS9v7Jz9uVacyTu9u2zQQhdQXWjTP/v+t/HSfjInVmu95Mwww3a60+adTFckkavruqouKngUcxVbniTMWgBUk2kBgRKIBNhjAFX1cNa1hSApIsSJBFAcc/LS8YMAuuPxCEAaezyecDotnfFgBqMXWufg197AMfG+N5S2Cih9R8PHvbYT4jUF7z8S6PLv55/52v1fAxMv3c9/dsk4vHT/77vH+Nzag7O+NfR3Phxw9+YNbm7v8dXbH4GZcVxX1FKQ04yrazHuAYBgBnXRBWITWD31Wllob7ha/o0QEhKlM/bPSOYbtw3VTWlUcNIqgz2HjITwJWjcedPwWSsVrishRAF9Xo4v2HQvCbhGRJpTTpJyAkHANAqIVnUWUOWFZRPiBvScKSJ0pcokq4LBvY9t06ncEFkYe60LDTUOSenXEGCPMeQ0B2Ev9Y2gWSha6CAniDQEV8GgIAl4Y5C8bZK8XSpENmW3IUbEecZhPiDFjNPCmA/XmOdZyoVR0N8RpTGur6/EEJgnRJaE0XVtWOoJx+UFpS5dwR6U/9ABzbIuIDREYyjo/QMiUrMkrQlIEawhDkxGcYdWxSmiCJMUvjCiYWsSwnN1SPjq/gb3tzMOmXCVg/xMCT95k/HVDeEQlDXXAAIjaSJQIG6M9E7L7wDd2NxGeSRjPxnDjGXearLbriSBsdSC5XjE3//ql/gff/VX+Idf/Bxff/01vnz8pIphAlC78mibQEzDq2jMspOyKMZGB1Xycwfmrg4H5ElCdohIWBcKkp+WBd++/xafPkmi8tqAlCfEmMFMyuQz5S72d/dK+1CIBjOLQkAEdUPmeDzi4eEBj4+PGxaFKbZmCImCNbxjBjRJtd2Ih5cjTscj7u7vcHd9iy+PD5vr9iCON5CYqXtg9zJ6L2/9niQigtz7bRUS74n0BsH3Ha8BQJeO/fPMsPP75lByXHj/5h42N8y7WzQ/7OgrMZKsOvCWpQIM9oq141J4T1nrBjjb94+9+x70Gg1FZ81t9YKtcVebullckRBR2iSE5FL/egVtc41n9rjz9oDPdp6fA8GiLlPv641iDAELu860v5YB1v3EWAV+fr62x+8NS/9uvr/s897fhM27+589KOZ1nj2o50EXA+38dXsAzp65nxv+3S69157pZuNl7+XX3Gvrz7cjRJd30x0yPnCG4TmIaM8YRQ0KyiqgWooJCA1cRqRJSgVR93Nu0ldXV1dYa8XpdMLNzQ1SSvjm/Xs8PDxsAMx5ntU5GsFchXlVoYV9ZizHI9a14PHxEYerg6SCcHOQIY5YyxfNreH6cIUQAm5ubno44qpRMq01TNOMn/zRH4FzxI//9D/JrhYJh2nCu/s3uLm5RWDGqT31sby5uUGeJ6TnhIenBzdOUEbmub1gcyqE2OeUz59mBu5wKoxr/Dywvmq1oRUB+YRBYmuCbEARKCKnLE45jMiKEEZ4tzHQvLzbg0CvzrM+58/nogGKzKzsyC3YIp9JVWDvdNmv771ssOtNV9iTJPayVmw7YWnmnLeMmhA64LGVP+hgWAgBjSQk1a9Da0uvBG5hntZ2XcfTNMmez5IT1yqGtgbEKAycrVOIQc1YeKm/R9Q1nHPWHI4NMcSe8mAvw62t0zSdOQY9YNeLYDgZZXuujZ1vm5ejTZ0zIypoy24GBsC+BwdbkwIrMc46V0mS1I8ZBBDheFw3RfIaIOGTTAjUhCGmielrZ4Ox6lODbUw2X8jyuwGRJVyUyYpdiH4NEiDV7AjS4nNQ8JvVoddhLzG+AGpdHyGg7zsSqSS2Y2sFaOKIqK2CWu1TeOwR6PfxURPmbOlMUAiDEYEQERF9xdIQQA1YWwGooRlQyGZXjLnQLhCFbC7tizbt9yoP7AsLjrosMiCXIP233+NZQU0pHqgRXtjOT8LI+SljKnnXurOitJ5ug0jIJAHiZiK37/e91UBEexfsKn+7d92vq0uf+SIIhkF4OVHKtmjCVm/Wdl/odxOqe2gYNM7z372mV1/S9/vzGViWkwBpJGCiGeEhDHBQ3kUcS7DoKLXx6lrB+k7Wr905ouO+ZyP74wcBdF++fNH8ciPpqNBJLzPEbBNk3gJD+yN0Ads/cV4ln/flcgdf2hQvKdObln3Hd5fu9V0Glh9Mf7/vM7S8l/e7DABvlOyfC5wro/vnG8sxxrx5rvxbgKWmbJhpOuD6+hr3b7/CmzdvcH11CwawVplw0+EaszHCTIEANO/EKlhaq6htBbNuqizJ6XOe0ZrlKpQJm9KEQAmS0yojBJ8PThcEqcfMHAjqjZQ+AFjzEgRbEFqcIqaMEIDl5SRKkgoxRMfQCwHXN3fqtXRegBCkRHdraKyKXLBKfgZ5S3vW0sTLqeG4gaQyqyxc7puelVpvylKTPU7yaqnLQtB2E5RgRKhikCR3RK2tb0YpJcxTQilFKsXNk/S3boRWWGHl2kEvigEISYA6EpCrsICVOWplo5gQlOkUwoR8dQVGRGFovsiAFKX66dWUcXNzByKgtIJaJX/YWhacyglrPaKhIk0JRBlomiAYQKIgleAoAFz7ZiBtBxAyELT6lm42VkF3hLmJYZtSQqsLUgDmTGhlxcvTE8AVf/xnf4LbuwNub2ZczxFzYtxeZby9u8abm4ybTJhoMOUkR0cFkWYetASgBBBr/h9LfkBQTUT/ZvPESY5N+VGQTnb9nnzYNvxECf/3z/4Gf/mXf4nf/e53+M1vfoPjSUJKA4DGW2N6kDflWUSymZZS8P79exAR3r17h3k+oLWC+TAjJkZKwnad5xnzNINZmAc5Zzw9PeH9hw/49PkTjsej9q0lpRUWZTdUu/yBKIAhaCGgkRycWpO+DBGnckJMoSdCPR6PPazVy7xRFGJ4KkspyrqMWE4LWiJcHX6MKWW8PD7h5ekZMQS8e/sOd3e3+Pj5U1emTcZFt9698QOMMGCTi7aBWtl6O0yJ8uEJ3mAzI8QcDP4wJcxSRIwcgdBiHyMkwPrPVzinyPCVrGwb6ACYOcn0eV7JFw+dQskOxAjKrgB48257gETupazqHdPDK1Nwz/b9br9D2I6BB7Ss/+09/D1F2W59LxDlJ4xxyrkbVSKfWa8ToC6lCQ0FTCNH096w997r/WemIO493H689u/rDS1hMA5DotaKtRYBrGOUEH81zpZl6fsMIIp40j3Tqhx649zmon9/37++jy+12YNurbUexmrKps1bGw//bD+GOee+r3cFlKjfR6rOjbYZQyjGKNU+iXA8HjcKu+9Dvy592w0Q8+GxNo+283HITnnGeJfj8dj7NuVxL7++WMR2l0fj3QWkYpb9PKeM7uhiWbNSwE1YYzFLHq3jaQXTC+b5gCnPAA0GGDfZc2IMOBwO+NHbt0gh4OHpCQ8PDyil4P7NPUI8gILoAB1cMKdMSqit4bQWNJwwIyDNGWyODgcU2L7x8eNHTNPUC/b4OdeaFJEo3HB3d4+bN28kZQI35BAwhYiyLIgMzNOEU2OclpOExmXJq7eUE2otqFVtKhJQIycBBpdl0fk7clxKxI4By6PNfn7bOPm17QFjIkLgAOKGOWec6ipgF5IAC6BeXToHOW8tK2pdxUmYorKXInKeURVA3e8Hxtjz60JkLIHRUGrtFbY3QFpd+74pcmbc086tVfL8GRBkcsBkhl/7fo0OWdV6miGvW/uqsLYOWit9LfY8fjEOQIV24FIbDMKzd3d7SClFAXC19WzL0HNOp5M8097DZEgRvXzYG1PfC4lIZCaGjiRyqKA1KZAiDsqAnEKP/NrLeMub7tu7fxcvW21fMhvYy0m73sYlxoAaCGstiJH6+L0mT23sTV6t64qlFIB5VDUHaUVagkXyzPMMBmFthMKSPIVbA4Lpoyo/tX1QAoCEPnrHnoAOpUc8yZitZe1pQpLm7BYwTZLdc5P3gxav4FIlasf6hy0FQuuhrEH91k0BLE4BtRWcTgvy8YhDTEgpI6WMtla0JnO5VupzILg9QfaiiGlKkkKGDVhjpGlGDAEpJLScNSch6TrIKEU9+uZsFmQeXIfsGcULhy62rzTux7PrW2q/9nt0JpeSPaKwlqXQ37Btu65KLE4eABYubwBPa2ovO5kihauCyonY34cAYQaz2E+11M4mTCSFKkzm22HrZFm3jH37t/3tC0TtMQuT39tD5uTQQajbMWMdmXvT5uyA2oaMkTEpZe2sOLs9acFGRGGfojm563RAQGxu72DuUJU+MERC7jZ9wVpE7sQYu/5jItdsdyF3tG4jNnVScw2AVazW830I/aXjBwF0z8/P3athiqshwQbEvXZ8F8Dl9PtL3+6Mwsv5EP6/PPZtH6hwOPt+3749ALe/x6X77o+9wXfp8AvHt8k29VYrTuXU7yd5wizW35heB9y9uce7d+/w5v6tTDy9B5MoTzknJJfjTdD7psyaIkY5t20lHKc4WZ7CpmyarAI4pqShqTbeUkSiEUBVGWDGeHF5b9D/JlBIiMTgEB0zgTBNsxp4wrxqrlpbn8UUYLIhhCQJIFnCW8GugpQzeMyhIsJplLP2XkuRnXJ/eEFKAiwSSMJFNbmtCKoqYaYhiUcwkrItGCkSAuUuOLsSRUkBPrJuQRd6aRLfsIVXUQIlCxcNyKShSFbaXgsAhBgRaEKM9yBIGACChMnEKeFwmJEmyWNT24raCkpZJMfDuqCUBaWuAsAr25HVK0cMcCAtCcEYiT8Bowkaq4OJXAUc7vPDQpzlLhWZAuYIUFtwennElBg/+dFbvLnLuL+b8farW7y5O+B6Djhkws0EHEg9hOpR0hkog8tF5mHRsOQYBWeDAIosuzlQi6HJAElobLMd0byGFuaqni8EoTYvbcU//NM/4md/+zP84he/wD/9+tf48OED1tMJAYScJgWOR2ie+EMG09IrCcuy9hygss4nEDGurg64v7/DYT6gsSh+phB+8803eHx8xNPL80aBJeEToLIxEMZGKT/SU3UdzL6ed6mZokI4zAcgcFeQX15eRAboNd7o9Yml7WetKw5zwuHqgAjGelrw+dMngEURZjTM0yTK11pQ1yJe5ymeKc8mE4mGw2jIzf3f22t827y8vWQg+utGwvIti2kPcu3fuxtJ2H5n154ZRCoj7X5d4WCcXe/etoOEKQ3wcWt4KEvZEt522UL63dYZZfMSwJDNtj5cO2zO+nAua7vvDyIL7bK2i9QGEaDJyGutIitcX/a2NB74Oc77Yj8ucp32ESn7W9eu6Lo8FHk9JD+e9JOBux1IAGAVH4hsFY/rbS3Ay3SgJ6r3880bj17/2CvOewaLN/624zT6wLfJrpM9Up9l0fxeEeemoaxxjMxujndjZ9d+D8YbaO2v9UwVDxhN07RJRG9ghX9/OwZY7duEfp4ZVZ3VsFsjemY3CEffVHCvfrhnD/Dm2tZkHXR5XQqOR3G2ral0g32whqPq1A2HwwGNGaU1PD+LfH58eECtBVeHrZMQIAAAIABJREFUK6Sr8/lWK4HTYJLVWoEihbtSSuLcIidnFUB5//49vnz5Mvrf1mFjlFWS6pfAaN04b5KDLiZkJkRmkI5TbZLbiZcjmIwllbvByMzdyBxAreWcGyy3AdBt2St+DgEjh6Gfd7b2W9WwPy1EZUCDhFlGAAHLuoJyFoaQOiOM8dOoAJouw9aEMLRynz8e4Nmsr2a6BDSnlZ9DjFb3RSbikNvuCEEZzK0JmCUPAkFCNJOFoO32FFnvAcxl0y9dD9ExyFokrLURqtdB8lKQ57F/dXArSjL0Dlwxn7WdiLZMWnO697DF85QQDAE1cs6glGXNNCl6Is7DwcoLQe5dFwuRYyzLgmVZUMoqIBSp/uzWO4DNfr530uzZQt65ZddccsTZ5/M8976NkZBpgDl7Bqb0qcmwprqTtM2KgkF1ymD6/Y7sYmMgBRnVcZEyWIkEy3ISpm4IgGZItnv4+dqhIzL5q2kqmjjKOGmKCXZgUZDUFr5vSPVfAY+oV8U1koSF9MrjpM+aA3O8jtDXhkNN9rrS4XAYe5f+z4ogMule2hrWtnM6aY7KeZq0gIkWqqgAr1IUoGqu784F3Oln4qCuZ/uGrRUBnreM9P2cee1vceIBQBil59ThF+NYe7UUBBp2KBDQqrDockhY0QAa42v9ZOaJb9tGfuE8v+Jr2MUlXMZ//n24zWv4BwGdmGGbN7NGVikzljTlg+kfZC/o2+fSKYkqOk5onlxApNWFZY5a/9hh8tJ009E/Y49A18fGfXqHq93o5+/3pTn7gTnohpCXjjNAxRYbqSK0gx9tau02j/HS/zaQbW/U/NDzv+s4Uyr/Jw5vbNjf33fPVyfnTtl+7Zp9n/hF0UMB2TYp8eLAlCGQVGl98w5v376VSpAq8FqpIAKybiRJ81nZZgAIBb3nLQF3QiWFESJaqxhiy2LJ7jOm6YD5cNA2qSIBVTIg4YpBlVqo0OlWFiQgQd7XwDbS0EjzeosBAc0JYzlIFEgXFltQ+i0lSM0EUiEnDDR5SuiCbzNO6qiPMW+MiXEP3WyIFOAMYmlSk6qCVjo+SghvU+9GU0Ygh6DVPUXJY+1Y6kBbBCOoNyh0odC6LCABguKMRqKoxBhBSZTLnGbxECuAFiwHpBphISQQEogmCLuRgBgQc0TMEemQkJIoFqU1nMoRZT2iFQ3HxCK4VFB4yeSU/oiQkhAumavDQGNmhNpQSJgwNaC/H0Mo65ECUgCIGyISqJ0QuILXFyQ+4UdvvsL//hd/gikTDlcZX91l3N8EXM2ETCL0IqAVlnoWBwhjqAAsnssYkn5ZYUUPeF1Ql+Us9w2lDAlJi0AQj6F0goB4RAC3IrKTCY+PD/i7v/sZfvazv8Y//Oof8Iff/x7PTy/SjLWCy0nn/jmDBzuFpZSG02nBy4vkB2UGUoq4nhPmNAm9n2QtrG3F46NUhv3m2/c4qccXcICFjlPTamHMO4N/PB0g2RyrebqaVlkLASCRFcfjsYNz/lkeoLsEJMnkkF+1STjOwxdCDLIZTlPG7c0tAMbL8SiAD4BJw6Q86ODfz95xVMDbJq6/5Bjx33VDBVtQaa+c2M8ZwL87vOLnr9kXozUlYK8kykMHU6grGw0DldjdiOHDWbfK8abdCF1BsgIBpuigV2ij0UBm1CJJdSlaAShZaB1sNzlsRrYDkLrxTQRjj/hxJCKgbNkH+uL6cjZfxegxSU4Gv3dDyLydF4ekd2lQxwJ3T67+Z8/loJVXhf0BEFBHMY2mYLZdE9w7GkCZUhprJwTEEFCcJ9obLdYHw7A/B+j8OPo51t9rN89AW7ZIv46ExV7NSUAApSipK6roB8KqsDnl9BFWBjS2688Obyzt1z+59pixaixV4Dz0cTu1PQgsBmEHEkCbe/vrPft0v1ataJnM4waJpJL5Jcnrdb6Cz9oiKXRGyExbG7itqKVJaOo0DVYWEWoRZyaFgKurA1q7BzNrpetnlCJ5SfOUETN3I5lrRWsBMU0I0fKZVd3f5ZyYJI9rHw99z2VZ8PDy0oFPMJCTsFhgjhoCKnEPjUIkUKkohRHBI69OIK0aK7JAwg0Hy0yShgsIY+wrmQMW+urtBHTZ3PvT6Z9+/vj5ZXOmlYpahDkHsDJhkoyT5f8NATEm5BiAGNBY2PqSfFZ100bdODPQyeahgas2h+x9UhZdu++RTo5JKhIfUh/UUevyh0KkqunYEsFUpJCbyX53bmDuaVI293AyYm9wS1uMPbpNM+ENSAOffPt8mgwKwsjyurBf66xsph5GibGPTdMk37cmTEvLpapO5mm+hjD6zSkdhz7o1mpr3CO8apU+bRBgdtMe5s44tbl0ab+2vXVZls13HowzGWGyzNgwXY6iSQE0EJhL16NM6xRyQAQg62ij25EANNXJ1SGTxnk2RmVdBaDTSJgGYUaVtSIG6qHKMh4sOblb0Tlqt3PARRP2uiW3H/tHtzBkzM8YpNYysVsokm3/IP8EFllKUdetk/+taeEOZQ1uZbLYRKYXD9miSisZQChzvzbZi1Ea2rKClPkZQD2PKqKtE7UfzXZhBqWIymXbtt188OtsD3bRXolzh7d5LjFwZf8bfYM+P8Y8bQi7Z/pIiQHL2aV9DBg9Hyi7+/f3tDFCcPNuOw/3+u45TmEy2dss2/f2f2++56CXlA0413/3PUzmdXTX2vt1fWRvUmz29m3/ef0MzEKAUQAuBEIKCSCgteL2LjbVF4yGUXRL8j7KGJAGUhWABxvc5N1rxw8C6PadzxgLR8945UrzqBuQN+wFosu2w/65/U5ucP5ngbSLLfXv98rEudSWfVsvXftdyuRrhttmkV+Y/MAW4d5vynb/EKRUuGd7iYYVMM9XePPuLX78o5/i9vYWDMLx5QgrmgDa5aWBJOW052alaw6jjRVwY3lGkxC4ntMqTZgP15ima6SUIZVDAQEyNNEjm8Oe1KNmIJnzzpm45wZuAlpxG+uQVSmulUEWaoqmYJlJ74Di6ccGpIkLRgyMlIfngey54xgbMfVEwNIAgGChraEDN8mqtRJ1X1afC1DAMBibQpQ4bqK8EglIJmMcNW2pADik78MNqNb1iIhpRtAQ0Zwz4jRLDrI8qfdHha3OC2M/EiJCkKqdIYiXO+YISlFIl0G8eqf1iHVdemUkIlEoQ5gUHIhooE4xBjvjnkgS3vYNaSh3XEWxiTmogWxecUKIAsCkwKhLxRyBeqx4efiETAU/eXOLP//Tn+DPfvwGb+4zciJMGciBkYm1LM0A50b8v32gYHD3NjWgVpTjEceXJ7w8P+Ll5QXracGUk+Z4m5GmGTnPSHmSdZGk4ixqBaas/SzMvKUW/Mu//hq//Pu/x9dff40PHz5IjsecESF5VyThbQbRdt0PQ3ab/0Lyg46KqDknHA4H5CxevpeXF6y14vHxER8/fsTnzw9Yqxm6RsEPkDyPTdlz6GDdXv7b/JcQV/GyBorq3ROQ8uXpCcd1VPc2GWU5kPaJmf0hRk4GAzgeT+C6IoeAOk+IISHnjB/96B1ur6/x8PiEp8dH2fCItEoi98TGe4DOnjfClUQeXpKx3vNu7fKGyL7N3pABth74/R7hwwEu3R9n+8L5vqMX9VM3ygcNxsLmO5XlA+Djbpyb8RdjRIrCbvGsad+flxTSzffc0C4AP6Y4+nGOYVsNlbSd+31fgDUCo3YlnFQZtT3C1ofXSy7t73u2xN4Iiy7sxNha+3Pt/AH2jnFvVYCNS4yLbsg70ElA56mDAJfaZ9cb+3E/p/z9zdDcAwkURsgZM3eAzuZDZ5WQhgG6yr02P6w9XmEX4NP2HymCFHGZ5bYPJ/Pv8Bog6dm/Nia2Pv3cfM0rvQcBe1uYsUfDSfM41WYOBTFt2IQivJG+Z+AN5+HeqLdrWmvd+PdyXBhnU5/XgYLqBqEz6RgSCtta6ywqy7VofVObsMeYGWUtAAOzsr+sTQaMD+AoYl1WVBcWKexQZcEE0nonhBwltyhxRaKARDKvOIhRQVrpXEDy8X6l6BrnEQotBqfpriNUVfSJ8/B639f+7+Dfh4ESxBFoRYFkj9P9i4WxmPMsTssYwJXQWgBCA4LqnyHDch/bM32IozEqTD4amCxMlwgEQmtl4zAKgTSZOLk94DxZPIMVbKJe1GkPnhmjrLPVePSfB3yGfXC+F4oc3RqqImeB0nizZntfd5VO5pGF4u33ptakSrKldtlX5e0gba1SiI417xsDRA2gVY3joGGPwhottWBdg/alRDhsmDCmV2gV6UQCwEaQRMyw5GQr6yryD+KI6EXB9Letl0t9YH261zFMHhVhJyAEaIiuAB6ARaNVXQ8WxsnqDJAicSEk8LIACpRt91mV81XB5xhBKvcaN90jWUFlaL9VAb64odTWAToAvdDeSDVkckor3hJJ35AhPeKA9oxLP3daq6jckENSoES4YK2JnYhWla1EmiTfgDefhkmdoju7S2wFaIVc5+DFdl9nACEmoNSeXqgDiyQARC0ruK56P0ZoWnnecpKmgFJp40z2xx4I3x/eEXXp2OtU/f4dgwroociVe5oCC3HezonQ5aofk0FCZjVzSB2G6GH2rY1CC5u+3u3xl76/pAPKv73+8hrGsf1bzhR7FoAwQeGBxb0j3+w0i/7BhpjUSt3cv1cBdg++pM/K5xKWzW7cg5mHCGBIzkMLye3PsOeD0FoFqbPHnOKbfey7SrjiBwJ0/iAinJedGA3cfMpeofXnmgL9fc/aGkLfBYz9Rxx7Jff7EkBenpS4eI219ZIReEnJvnSv1469AdivrWrEx7FpBApIU0TKE3760/+E2zf3uL97g5AS1qWKMCBl33EDVwnpk4SllidFy5+nCTHLBiPvz5JIXxlspVYcTyuWpSDGjDxd43C4RgwJRRObhiaFA9ANSTWWKfSqqzGaIg0IGKjx/FGFfxPjgMiBDWSCnEEhdvCBQFpRRasiKcNiQDc9nkeUXhUNsvC2Yxxpy6LZ9D8RKss9WcG6GDVcEughAr2qkFXrMkMpqlHAVXLhgYCguVogm8uyVlDIks8vJMnpp8szEiEdrhF0o5mmScYrRsCUKfUcIVibdLOJAQERsyqv8zQjTTJGlc1Yl5xKay1Cwdey94GUCVJF6awsYaFdTLu+WpelM08EpNO1lQjggDgnpA7QNaAJYJeDGAicgEMkPB4/oxwf8PbNLf6Pv/gz/MWf/TFuDxlfXRFCkBEkagioHZgTfUCeoyOgXnMdc2rgWlCORzw9POLLl094evyC0/MTltMJ67p2xXjKB8yzFE+Z9HfOGdc3B0h48jUoZHBdsZaGf/3d7/E3f/NX+OUvf4F//e2/4MvDJ7RWMKWMKWc0SpLzZt0mUN3LB28YmwG7KLvPjIcpR9Ta8PDlCZ8+fcKXL19wKisASbI87hlghCspFqZznjxrSJmy1ogmSh43NaaSMitrw2lZ8fT0hKWWM7nak5S/Itf6HAljbMS4E0Vlnid8dXePP/7pH6G1hm++/QZPT09q7KoXFEOBPgPCXH8ag8HvVa8pYfs22rmX9gd/njfM/Xd7b6x/99bEsPVKL7Bt17jX5f2FMK7dACHKeB7P2wJQ431EFnowZT+W3lC5ZLBYq/0ubQZyNzRLkfA5A2SAnq+mkSk11leaQ49GLkEzLF4bv317/dh4BdSPkSisDXwB67FzbF4YgGmf9WfQUBT98/w4+f6y577WTt823/f79x0AxytKsesPYSsOcMiDXiBjIxqzaYBSQdNdFP8cHWzbn23+Xepv/5lngvr384nYW2t4eXnpTCY/ry/17yVjSmTaeQibMC7GPXofaO4judd4QaIBrNqLj+VJ/ScoY8gnCvfsK5PVNzc38v7EyFPqRbiWdUXjiPu7G8lFGwhfvjyIE7UxltOKm5ubTW5LyxOFUBEgoYhlXVHXFai1hxACQC0VdS0gBqaU0UJEYKBi5Mxai4BVjYBKkHDWXklWQbEgbJ/SquYYBsKkoW9VGdVkzGOR48tSUFat8ExJDR8FA5m177fhY36cbY7ucyWO1B8BVA20GXKOIQyqyCNqQICwilaKGlRQSgahVvQIA89kszlgufsuMTLXdYV5Aff2gZf9XjqezWm0/k42f7yjYF9FcXMPnDulvGHv+9TvIRbCCyLNQe3yYqmsQ3V7184htR8neQ9hd/UcYGE4fqwvU4zgZCx9AYdPi+RDq3Wkz9j+oI+3ML5V1weDS5PIAQgAR+roTjrPkBKQZ3FMNxZdtkhho6rznmJETNsomr0zwOsXdo7MlYJseVNDkoq+CGhcQABqA9alDiYXzMGujOUAeTaGvRBB3dYR24eRUsY8izO4kjLDtAZszgk5Q/pWiL7ii1B5ZzkgoYytqMxZKwS2rA1CvDDbf0xXpqbECO5kepGbChD2eSiywqqmM7Hojqjouex2WoJdV0rpBVz6fFJwjhia29KAnXEYSzLnSZL0KzAlzEy3RhqDm6atoQZGhMRiWVG6rYNxv+d+r83v1vtrx2aduDkE3jL8TZfwMnG0LfS92u+hSp+B9bCBXNBPylq6TPkufdz/+zXdxNpjuirzYNCNa3ZgqwfoOrzm35k3JwrZxn1fWweATefo3+10A9NpLh1dn4LTtWTjUHtInAYmXeQaUlDTdHf7n4HcAgyDgEBtOz/U3r9UgMQfPwigGx7J7ULYjuc5uGRG3sXN48IEv3RcMnr+o4/9vf+9z7oE0u0VjM0C1MNP+o0h5T7bn+//7Y0L/6yemLVvJEDMCdfXt7i5vcW7n/wYhIilFFG8KCLnCaU1tFqEwRVkMpkQkHRcPrG4sKxa95YyGlfUwjielp64NeeEFDMoJlQmrEvpGxa0olAI6uUhQZmkAiDphhXAxGhNwZag4YSjJ/T/cr+gi4vIFr6BDCR5BrRLg7Z/GAhDUA6lHGp40Ea+eJZDDHET6x5iQpMqDPp3ROiVas07YIMlVPA+jlGNoAgAuYuExva+FpbECElAoTzNSGkCgoa9BkJI4gmyPCMx5B5jXxmYpgjS/HPsjF3SqkE5SnGBaQqIWVhzvIqC27hCkrEnAQyjhKoBAJeCiqbhNFAv5i7EEMCcpM8SBVAS5cMKQjUIg85yOIkUFmZjIiCHhvn+AJxesGbC27tr/Oc//2P8l//tT/Dj+wnUxJgIrQpIxw0Sm8zoHPaep5T6viBrtqC1go/vP+Lp8QEPnz/j6eELlpdn1FKE+dcaahXAO6dJwLlJqqTO85UYSvUO09WM1MQL3+oJHz5/xi9/+Qv8/G9/ht/99jd4+PQZ6/EkIWMAqDFiiJhSBmHF2lw1qQ3QRN1oBgZIYEwoA1eeX17w8eMnfPr4CU9PUmmPYhTPYpcVYohKOIUqYQhbYM4Ure5cES82NxYwWSt8tdZwWk54fn6GgS2lbkupe0bd/vCfCRNwwjzPaOp5X04nULvB7e0tUkr49g/f4P2332JZ1r4mAVV4vVIHL0eHMmHvb0bca8d+/9orXKaseyaVyd9LwIy/b1cwXzlvnHN5P9gDcP09CN37Z+0TGSbj2ENM40hA7uea5Fe6XHjAAyS+nT4nmVSKdsCXtqXVtlEe/FwgkhB2CcsBwAKe2pzvbVBjgVUr9ePhgb5zQ3QLuHZFNoyE00PnaF2fe00n8H22KfBBpPLQK9tbb7NdY6BTv58ZwW7u7gHePUi3fx9rc9+fXFJtn+gcUAYZjX6xvmy+OqADb/eG6oaNh2Fw2Pd7A/YSqLZ/tv3bGEKWe86cTTZv/L09EG79awa8DsKmPVu25k6vAulnMobbd1MDH7b2Lk4NlQFxwz61d7L3AXxxCzHITK9YF3Gk5Dzhpo9d64V27J6Hw6E7hVprnXkHc2yaXMLI2WbaUjK9pYljzapoyrwU9nxIUUJcdY+QDVqdtcHmlQDapRTUAMRKmLLM6wDrR+sTcYqcsEjxKapgHozelJI4+qDVwrEFo/0csr7z4DIRIWQCigPBSAxVm8+CpQ7j0QMKgjpo4a5apHgUbZkpNl+t2FIHNBXAK7WgnIQllFLYhMZ6QH+wYLdMwTEfqJ/r57rPb2rt8PJAgFPJK2z9JW3ns3uZguWv76B52LbZ8uENu/lcvvr5D0D1/KEf2nf2btsCS+qcUqA/BAlRNJlVSpE0JzEipYhS1jG+JrelQWjcMKd5qOwmDwGwzpVNXjgKQJDUNvZOEv2zzQ/oZZSXo57RHIKw4BIZIF8Bx+ZhNeyJCAFR7Cw3NxsXgDQihSxMXVmrmvqGmBFjAohQeCSeH2qt6QxjbxW7iXRfHvK8bWSjhAkbe44Da8ohjUrod9RxFARCc/4REMQ1gCAyAywArTh7m97D5rRVaeVNFJLotlWqkGLLmo4cO1glIYjnIF2A7muax09iqET18T9cGyqqgHStgpo4n6KmLaApofIIZ9zb2TaWHhTzn+cdY9TPf/ucdHyHXjPAZ78vyXlbOWHAtEE5DSNsPcYkDDIaIB1B9X3agonfh3lcemff/nPMYnvdue577kz24KEdtc83FvvQ61Fy48095Pd49h4rYdr2/V5P8VVnDea2uYZGaFK+eACBm/eS3N3kVoixcn0fy/4b3fp8/fhBAJ0hi73/etjVa2ikCRFt9s6Lsw9teO3Yhyp45dbu9X3t/q7j0vN9J+4X23dd9+89XpvAtnl91+E9a71PYN5H9E3LqKvTNOH25hZv3rzB7d3dUGILi8dIGVVNjR4KkkgfUZJdBhI02bZ0o/M3GklqiRhra1hKxVoaOESkmJCmCRyoe8JqY8QUe1lnBM0Rpw1nbVugBERNIixbFyxsVOLUqRtiRCRsOWXO2SZoPywVCvqmEvT5SRPqihCGGvYRXLdGuN+k5e/c0acYNaRRLALElIAEzRGgOTRSklLypkwYvTWa99fltLOiEyH0hLlN87RIcqQEpAkxT0j5CjFPmn8id+WMUXsug9j7UJU10KjcGoSVZ0q0vCeQ4ooUGSFWAduabGYVBY2A6eogCltpWr5bwcfUULlJMQhuXX7EaBuczNOsOfRiACiGoSCpIGUaIYgEEs9fawhoiFxxMyW8f/qEtp7wRz99h//1z/8XvL1NSATECBDLJi8sxCpzh8ccgOYX0IUC5obleMTTyxecliN+99t/xcvTA16ennB6OQK1QPiWAhSKd49Qo4QFlaVivqpAI/AU8PwcMM0EcAW3FcvpiD/87rf45a/+Hv/0T1/jw7fv8fz8BGZJ6ho0dxU1zXeQIriRM+BsLmPzt4kj8/CfjiuO0wkfPn7E88MXvP/wHi/HI1LOUiwCkv9tEKEF7LOwVllLAtpJRS4FAzTPR5dTzFplWHInHY8vokgvK5bTCSGN+WyAjwfq9nL80ibPtp7Uc7YsCx6fnvDw5QvKuuDDhw94eHmW929tk/eq1doZq95IsZL0A1iQPtgDyHbdpVDC7d6wERNn5+xl9Ebhckq/KQwmn/ZGqRlSZ0DczrjaKzBn+yAb8DoUwZFni/q/m+ba2h/7519S8IbuwSOEZVhKALYGDQEKzDlGK+1mu/UX0fCs9lcyAAk93QHtQBc/53pYx0Y5u2Akf8dWvzeI/bwWNod4tS18hFvr+9U+ubTPyVVVrtjDPdDm+8GDBV5/8Ab2a7rKD9GdJOn/NnRw/7Ofw2P+AeAtYLh/Bw/Yma5z7ojYFrbx3v79O+yV4H1fWS44b2B3htWOJMvqiBj38aDnMK4vPdvGzYo8WL6zVhuqVp8E0AsOlFrQuCCGiLIQoIzoCgHPAVkr11fXAMu+/fT0hFJKL+AGABQCKgNAlfDVIKFQAgIQ6lpwwhG1FKQoxbkoC5hXuQFN9t4YAsq6gmvD7d0sIbQxonCTUFYAUGZ0DlGjLQDiKIE8xu5hYQAFwPW76FBWHOJ0LDAGOLMVyIoI0eYGYEw6v35tT9nIO78GsJVPXRa5dW0OVZP9ISW0RiPElcWBaylGTP8zQMnmonfKjLUiyem74S2PB0jYviGfz8X9DJR1LCBS4y1wZmDfJdB9zNnQYT9r254pLkCsrFdbe9bemFJPC2PPBtAZSf4eTMLY8iCGMC3RARwGeti1AWkbu7A2VHPeMSCO4gzmEX7fAaYdIcRCY1tTHY/kuU1Zo1uQcrzjsiwjv51j5whoNfQIk097MG7PGLdrJfQ8gYvKPz6XWWR2g+uDWitKLaDatGJyP7mPtzHsGjNCzFjLAC8bERDVqdBDaVX2wvQ7c8aMebTWIhV/da1J5eVV1n5ggMTREFtTe0zAJMstCEDTimhxqRiQELGs1dTrPj+ABlLIjIWvK2Prx8mcvxQNuhnzpDXAoqtyVl11q2ux6njruoJLFfaku8c4FwhMkr9R7RKJCDMZXsUmCaPKsrXR7zV+TtkhsiKKQ//C2APojpOuRLrfEpIr88B0ueDWFYOFQcY2PQghGnitRVQcBtShmy6OHCN+59wah0tLhAu65IXjMhDZscfRIAw9YVvMaXu97cUVQ7anvS6tlwRrI6MXKPmu9qWU+toxEM7Liv7UOPROY6gCuo6YNRWVJzEwmCX1j+ntQ5+grpt+X3/+G3LQ4WyT40tvD0C87uf32HfORsGRL/0FmjsC6Ciy6qy21CymvF9/dnzfZPKden7+xggbLqPRduuHfrfXFd+9kXbW0u8ZqEvXeQVcxohEoGhi6lWFcwgBecq4vr7B3Zu3uL69xXR1LZtTmnpeh6Us6rEOiCmgBQKzJB0zw0iKU4nBUUFoYXjbrSNqZdS1iVxPSQsTJFAAKktZZATxclKkzhhDk6pN0h+AZBAlTbxPEtWkC8UyXLJuEBQsV90wNEWKC9iGEJHJgV7qPTJqt+VYMTBJPhM4xvo37gA6CbHVBZ2y5GBRDB0pY0pJk21a2GpEDEkTtDawo86HNAA80kTLKR8Q0yyGGwXUxpKwOyZJBp2yFHqIub//eI6Elwgvl9FUYc8xCsAUCKShxQiaGNkULACZGDkBITI7sXndAAAgAElEQVQoyHgXZlCqQvMNhBgzuFS00MBVvEUhRN16K2LOAtQZiJmlshUCq6cnGBkQIVJXFG14S83IAGaIR6yQOMSoiA7dKuPjhxNePlX8xU9+jPn+Cu9XoDxV3B0YcQFCqaj1GcvyjPV0BLdVIhviBGozYpgRI1C44vRyxKcvn/Dp8x/w9PQFtZywLCcsxwW1rghWRVg0np4cPsWGmkQexhgFsAsJp5dHlLsEric0Lnh6esQ333yDf/3Nb/Dttx/w8f0HPD8+I4eMKR9AkUFs3lNJ+uzlQ9Pkvl2ehDD8NTzCXI+nI9JLwMOnj6htAYMxHQ6SJ0bBzpwz1lK0srHt2MaWE6NVQnOabkBbZg4goGJKAmSv6xHPzy+otUAFtnhhnfzyik03uLB9R8gslL4E0MqKhRmRgDlPYGI8Pr/gd99+gyllfPr8GXmWEK+2rrDgOq80eSXapKwpJHLeMGAGRV+XeIwKnItSVFtVdp7m5rL1rgqhbFNhY9DvAb+xSZ/3yZDn22T/8nvXT/2+Tguzz5lUcfZhl+58VvYLKQjcwaWgRgNQ0LrB43cna9eeiWagoDgqpH8aGZPNy9HY36sz1wAJoQcDjVHqqqFF37EvkvXXZgfuwN4eQLHftm/6Cn5EtGERimLY9N6Mnuh/d68N6EdSJCJQcMmFR7hmnw8YuV/2QKEdIcbhyHHPuQSO2qt3D7Iq8TFKJb8BjNe+rgcgC2glg81nzAOQ931l6Q36WMbBoDqbjjsjpr+/GUZBHTpdSd/mujMDsJQioZmah0uM8aZOjCy6mdPFunLNUMfUACdMSfbPGf2qSnRDZ1rVpk6iDpZq8nLizlJQa2mMHZk8FsfQBlDVvDZJdSJAqrsiBWRlq1EIyCkhE8mezyL1U864vY29zx8eHnA8HsHMfd+nIOxuqYQZRvXSJGt0KSuq7iKlVZyW06hKSkCYM0JLaC+MZS14enlG5lnWogNeDFyRKs9ixIQQkCiDuKG0irI2FFSkGKSS4rKAGYhJ8uallICZsCySI7E29Kq3MUpUQYwS9gcX9eABIw9OM/NmDpkZZSwIYeDL2EiRKEZZF2FnBzH0yZzUfX4nrOswxmJMCGFbWKCHs7p1mVJGghSZsraVImC6AbMAcDqd+qIZBpz9FquWQkAycNABBAbOm0wLQRxvtUiBkFoY0xzgQ1A3TojWsJ4WcFS9zBLtM8BklaQF8Im6TiXKIXRd2arYNkDTuITB4AxOHmk/oYke2ceoMQJaDzU0hp7IBQbQEANhThHXc8acJ1RUrE1CUXOUENCiOdwqGjgwMglxYC0nmSc0Kh4PUNTMafnpwL+dQ1Lso7KCh82lDmABny0HoWcYd7AjEgqPKr9drrofANpfOj61AiuhRX1eKWJPY+gEon8DVSAIcKtYlhcspUphPAPoGoMjwBwESGPuRTPADcRVQnoxAHIjE0gBvYZSVqABgRJqW1E5IyCANcUEo8FQkLFfyF4YUwKKT6cROnCGVhW4FSCWNBw/AiCtlN2oYcrTRk/SkeqDFgN3QknrleyVoBAJx9OC0AwcqRALSoCXGBhTSrr2qpAJmoAtMpyM2hZZhkwgNBBHpy5zB+m9HPJ7dIA6KAmAzzdGQ/OtLGNzKS95rZIKJgDqpCYEDTNqClqxjqvtpxtdFpKeYOi+Y5MmADlExBA1j+hwrg1VxMujLQN2g49wh0bc+f5aj19Y/5idD93/z/EWA8A87nSxD0nnHrvveKxvt3lsbuGBSQHoZLxYZVXV9TucyYptkTIcnRNVfg850PuMAVXm9OVoYFrfc/wwBt0ukd3oKN79bd8boCYIZmlVFqQpQsEno7RnbJMUBhLgpr+4f4BOOFBXn/vzAPQFO8oLXz4al63CS2MaMVt88PYOm0l4dr9tf0RXnXJ7j21upH0IiB3ZGRD+kPwAhFLtHUfDxxuLkK6aR+1qusLh9g7T1bUwsqIk2qioaFUEc4roYFkNjJcqileCVMOJGmoaYwIT4TAdsNYVS11BUMS6MtrS0EpDClnCp3JGSgSKLBVTY0RKUt66tYZjlVLgIsk0cwoRKJEsvFaRQkLMYjjKZtm0UqR65sCgPcvAWGNapCHGSfuuoqECoYFDQ6Vi4kAZeFET/GseGhJvYtQE3vis43h1hUwEpqjgHEQpYYEG8zRjjlJsobUGhIQYEyJFFGbkeepJyCukKuV0sAIODdeHt0jTlXh2iVBqk7WdM/I8Ya2an0Rp1FYMIhl4uYrCeSTGggVzCshRgmYPKYIX2aqWRKhEQANSAyYCpgCkKB74SgDHjBwCVp4slysSAG4Z7cho6omcp4jKDadlweF6EgCRIjgCaSLQJPvUygWHm4BF133MpKBcBTcBQ2Nh3BPw1QpwYTwlYCHg9Mh4/gx8+OYFP//b3+MdEh5/eoX/9v884gGP+PL4HsfPnxA+PwPPT3h6+j0+fvoDnh8fUOsLUnxGICCWO3x19yP86R9/hdubjLdX73BzdcC6fEGeGI/HJ5zqAq6am8BkvMq3SEFCbhuLrIgVdFoR4oqYGIfpBsfnJyASSoj49a//GV9//Wt8/PiApy/PWF4KDvEKrVSsbRFlgYAWGp6Wl17FiiJ1b1BToW/K2lIWxJAkJDZErKXheDyh1oIYGaCK7ikNEUyhe9uHYVMUOAG4SB5KCsKKE/1cNvDOuoIBdBnL6YSnp2e8vDz3xN4SMh5RioYx1CZKHgckSj2c49JBe3mLJmuECEsVI6hRwJfjETdXhHxzjbWsQCMcrq4RQgKxgIeEgNZIKsw6ucskuRSbVfRKamzUBVx4A9xYoQQLaTZlIxB19oGi/X0T71XkVK4ChLKqqUhiiFlozjzPAojpta01lLUpu5WwKKPAsyY6GMajEIAUdhnnlFKErdAEXJI8nkG9wz7sUSuyN3SKP5N451OM4CBe6Kpsact3tTV00EGo0sa9RaFkcGmoOM+3twkXozFOzNxD6gy4G2kBWmcIm5ystSKG2NkmZixLJey0BUjaqNzXvczKvLB8PHsPsv34CpNRWddg6rlM5TIZ36rJu40xkl0S99baJofZ3sgffass82Rgbes6h8/PhiBrMsSRrF2KxqwS0gatyKcseGuXOFkSooWRlUX+jsJs5shSZEkNCLFLDVhHDzMDJNy9FWl/AMOqhQ5ATN6HApDzhGnKOk9rlyl2voXigYCQIiZjKnEDcRAgCsL6KK0hRDNw1bhQHaE17kwoQLYtm2MC9DGWUjqby+fYbFUMNhChKSOsg4AMUFNl3Axyzb9LJH3MHVgT3cwzrwAJ+xvpJhjraRV97ibg+mpGiAFFGYwmX2y9zfPc58myLFiWBZ8/f8a6rri6uhJwzsyxJCD8uq6ih9taXZduYDIiZmNwsiTWTnQAyorTckI5VeSWO8vTDHICsKxLl0c5T5iT5E89Ho9YyooVALM4OWIKo1KsjgETIyTCFLOyxRvWugJhQgwRCILMyXwTJ6SBqCoaBNBpTYDRaIowYQXhyIQaksj6JvvQRIyJG2IrQF3Rmjg/ZfzVqUOS9qOU1nNy+XyEHpARWbBdl7auhaktII5VMw/hhHkWx+softZUxrik8gTN4aqFSjQxOWlBJ3Kyo6g3PscJKTCOp1Nnh3nQPanDnHT/ur256UQHD6SVUsCVMc+TzGNldAHogJ8HBddScKoKZsQgtosCBDHoXsDCzOJiji2x8xAlB2FrFahlpEagCHAT4KYsmFpDqycUBqarWZO4VXU4CGh4qiec1iNymgGuyLOwkteydPAkaMgxSFLtiE2qGZgVNRjrnSXdQkTvl1IKyrKgrgTU1vvBhAM3cUi3EJDnuff1ouxZ0v21aHiv5UuDti9Nk4wVM9qyAlropJ5WrFWiKQzkra0gJcLVLNVGS2gIUaRYqw1rlfxz+f8l7j2bLUmSM70nRGaec66oW6KreroxmBmMwAJYEsByyS/k/6Dxj3JJM36ikWsGMy5Ag9jBYAYjWlSXriuOyswQ/OARkZF5z+0ZKGOWlV11MjOkh/vr7q/rBLI6RwyDZKEk7AAvWRLW6KJrRaUwTUMIDqUiIQpnJkbTpmJ2ghrmtZ/uS45zqY/hwKhCNxK1zp4zAlKARwIctIAhzhGGAaugNYZIgxt7rG2Tg0DOLqXAWgFPtRK6neADo/MQdaqEbktWTlQBj8PFHseItp6mNfhRyxzmIIxUsCJokgsjp1onKDJ44WSPk62PzdF/0j4fJ70oBJcc1IGoNcpW6a7JCaRsK0wEQXTfnIlG2utuPCIlBUWX0tqgYyQXHUBbejeitTgcgh/l70YRCDhCJvYBYkmXLvh3jMRU7Zq4wqQgiQwghQlYYHJ+mKJz1mBzSNzz0v66GELC/aOAurUOaG2KEMYXu6K+NFUGTPrdzOFtdDofYtHPsxNJkaqKhyAp5oBV0xks5+CY5Oak84ckB6JSTDzMc0d7TA4Bk/dBARArTlAURCPgbtBF94pEQqo43a2ae32ur39ykYh/0pX6c8qTXT7yO6CIy6v2PvxLrhrMWbZDKQGl/rnPg/vGJtRI6wMGatWWxlgUpCikh/sbY1okcsJMYJ3XGN2wOT/j0eMnXF1d0XQtPkb6fsQ0dt7OCuhWKEzbCrgVVFLU1RSroWBwIy7K9g9iUeFTBcrgpQplQZgTgOqTnyAw4kNKmY0Z3kv/lIYUbh61KKhi7qS3J6BBlMvkgSkpkkoEbQwYm4WBKRXGhHvBirGSw7dLnI8mYEW465ambVOAqwB3TWPRdtrIq80jtNGpuqqdgcTEiE3pKEoZIvJe03TCwxeiKM82eTo02FbTrla0Vqfqp5a2XdOuVxhrU00DVaqpjqMnKPEmRTxWQ9uC1WJAhAbGCG0bWa9b1g00gPGeJnjalQELY6twGsIIaozoEYwLbNZGgEGlGLUiWtiY5Gj18nkL2HNFDB1KB0R/1wTfYqOnbQ1dq4gaRh9wcUCpgLJgrWNwwr9mgqbRRlIltHCn3DqH3wcOHwf6fc+HGLgZHDfvtnx8d8Mvf/UFP/vLv+Gqj/z0P7d8Ez5wp3cY5bB+YOMCa61ojUPrSGNbmiYS7I7gA62G17cHXn7xdwyHO55ePOYPPv89nj0+48mzK4bgcSqDUgodJVI0BlGYbBLqrbGgNMZ4RuMYvMM6AdZCCOz3B7bDyJs3b3n1+g2vX77mzZt3DMdRwL/srVOeqCIuSCJxFvIKMfB8DKgwAdJF2QmSSqUS95xWSqIztLQ7G8A+hBJpV3vf556wfNhO0U0ZZKjJ3KVf+8K5kw9hSVfy5CjqWmzNPaHT75byb2rLnHA6pvb5GPAhchxdAoaEyFjFqqJjOqRFHJ4+Y+q+L7/Pl3PSl9zn7BnM0ZI16JLBiJxON4u2XfSvTidYjkl91RESdduW0T+1wQRi/HonxlydZpzT83JqTYwZUMoAVCxgTg77z1Efue25Kmh+5ynPMVA4aTy+9KWuNPjQWShzIQAxzA3eKeoxLIDCXEhCDPmYDqkMHkzzcn+t5/+zFA8VSzRPVm+T7Vbu9d5BTCBXTv9KiqpzXqqYVjylD+kb9TjWc57/LVM68/iVqJQT6yu/0yRHXLJBJ0M4G+0pyhwqBTT1USfKgTzGxsyrpk48Wrqsm+kB01gL8X+KtkDGbRxjeU4SZHN5oZAowASok1NIEkgo53yO6NLJYIolfV1Zg4niXMjjUae31eu1NjAWg5nakWSeDwQnKXN5zsueizFFA4m8ESrbVBRKMRkO1RpY7gPvfYmoyuCNqlKLYeIXqmXG9fU1h8OhgAebzZoQA02wNF1L0xiarkspnBkIEvDQx8h2v+PQH1mv16zXa2zX4I3Gq4hNzqis040JaBSeuHkF3QxgGG1YtS3WNoyD57DvidGXAhhKRZwb6fs+rf2U5YA4Enwygp13aCVnmtj44rw1WvTDEAL98Zicw0GYP6I4+tACmIYYk4M2V/wUp5fVSpyVRuPdwH6USCtxmOTKxx6lGqTy5JSCXa/1nCKd11G9B5XOjjQKn7JNRRIyUJl/zjpokVVp/6sEOKt0tozeowhFZZVUZlP4B3Oku0T2i/NBFSMy0UAk2VHzk0YmeS2AvAAIOb185pBJekdubwbP08lIzCljyJxZKw5EoxKsmKqpyoDGFMWV4jqVAHqolNqGx0aNiZFGiTwQfmMtjji8BFRpqco6+ASgoQijw1VODQHKLVErXJSU3JDMJZXlQYxFFkGUTIlKphaQL8mEcRxn50ZJL7Sy9/t+KAUxAhV4oKQQAanYAWl9z2Rw5lDEoI0makPQAgoGN7IfB47HniF4epd4oTGo4HHR452nW60Lx6SLnjiOqDBKcpLRBZxTqGSjmeQ4yKCKjEcAbK4gqio7EFKmW3J0uaQHJoobyP2VsR1CAoMTuBd9qnYehTrGaIUp+F+OUoJISNmCAlaFKAUiQnKueR8LaK+iLtl3xSaTsGYh6Alid7j+QEz2ZwbTmRVCi+KQrICjDEDNgalpjcQiF5SMyXAUxw2N6Kgpyyt/cEjAUQ6sicURDqCnllTvKvo8kcE7xuAxFoyyEjWmhb/cGJWycPLBPNeDZYmHSg5kHaqm+siBVWauz8bKYV/rZ4vMD2Oaqt3zLISl/lpf9fmmK1kztWvSASdnc8WhnPqjEyCYI5az3lLau7CBxPEt2Usmr/VF+07p6rUuMX0Vrrms31Dtmty/7Cx+6PpXAegeMrqWLz4Fzi3vO/Wcfw0w7ne9vs1Y+l3BxIee8dD99XjN0lZ5qO/T3wWcyYdKrSDIpmrbjrPNOefnF6zXG9CKsR8Yx5GuKrIgcqzyJMSIaYykVCbvd4wCo2WiymHsBSBKwtcHj3NDitxIwJjKgcvCeUCIqaqsLgpxSH9VKpW4zqkvGbBJQlaqoYLSEoLdGEkXTWE+EjVjBFAMUVIZZTytwIJRouOMtZhGi6ep1IsXYxXVYFSDNg1+DGlc8gY16b9cJnFsWWtRNX9c2vCNyikR4o01psE0ElXmI6w2G4kKtIZUIgbTNqwaizKkqEONbUGZmA4Z8fJGDat1lBRUJRGpq9bSNcIN7RPw4xWEtUIZGI4DLYHLdsXawLCLeKU4GDhGSSWRVEJoMcQw0FpNbHQB6FQn6QTDKHPWNtAlue0RjjZUxOAxvacLisaoNG8Dyo9YIm1U6KPDH4/0+z3jbsfhdsvu5o793Y6tH3lrBm5vb3Ffvmf77pY3Q8/HwXFzveXmesvt3Y7D2/c8uXzKM/uM51eaT7/7hO998imPN5oXz8/pWoWOLTpaNA1YwxAO7MfAyzeOt++2fPlNzy/e/SPfvPsN3L5kfP4Zdvw+brPCN1bSzaJizEaenwA6hSLamEBfS1QjyoxoE3nz9gPWRlTb8nF34OXLb/j66294/+4j/bHHeSd4c1J4QiaszYpc+SdV8kyCi32YDjgRAelQUin9TCXlS8mehHmU7r3DvhjKcyU0K981eAGUarGHw+EeQFA/K4Mk/5Sr7lf2Si3lZiarrivBLiOSYgbmTsjcGej3AEhUv+shEG2eJjtPAZZ7TgOQs5B6JuVrCRig5qBbPbZLQC5fuR3Z8BdbSgy9EBze5/SsDFy48vmQFO2sXMb0npp/bAlyfRvIuPxdBuhy23Kfl4ZtfX8Gl2oQsn5fVniF1HoiHPc+5cRVa/gU0DdfrzVYHas2ztdEGYcw/3naW6Lo1mfF8vn1Psx/W47ZEsRbfnZpINT9yNGhRmcSbZJVlAyCnNatbJrnCVD3yWDKoEZeG8v1PsmGKQW2boOc25T5i1EMLYn88veiFadMAAFS8vd5DdZAUF5Pyz2T5zan99bgfK3IZ1qPmoC/3lvyZgFZrBGvlMs5FWpKhy4p4MloCkqh8tpBjFcf7oPMNUiY+1ZHd4YQJA22MUWOhSARiFrrRE0iqYkhenxwHI9HRjcwjD1nZ2es1+uUTdKVyDeJlI1pa8icjIOnDyPeRWJQCURLlSftPHIoR392qSiFRPrqUoWygNE5olXHAi6pmHgmjYxRHremadI6FSDX+1SR3Ek0VDZsrUnR10SIkoKZo7YzOCtRkzLvud05tVsAurSmjFSs7FYdmeOugGMhp7rFxMFa849NYPvSaVKvwRrUqYuZ5HNqcmzle+IsyjI7UwKhAHQzWeImA3XVrWjyeq5A6OzALsBZjMQUoSaFNKezWzHJ2Nl+qDnjqnVenw0q7bfaWZVTVbP+oSLF6a7VlCKNViUjJqqkC+XMmCiUHlppQu3QIgNpSX7GBGjkr2VO5oB4vmpub3Gex7kncYKe7tEgALPf1cB/joIuIHoqEFOfQTnVXtqY5riSpfl/KcbhpG0KSW21JgFeXhwtIcjYBe8hJEqbkFNAJUXVG8DYUpwhhCi84UoVMChE4asjeILKkFTEmrZUqnQuwuAIqieYiLLCQZztP9kT2U6UTJnMC6xTFDDOpXToB8ARlezEqEpARb2GiVHsH59A7Jj3nSYDICH4tGciMUdGImfd/ZT4sjvS3s5fSXr4xEtY9nYKgMjzL89wkOSe9CvJG20pFXeLbhBLBFYupJefP31O2qFzRHBYgERMvJSk55LSjZNLbhpT7utmZf2mPTY5b6u1rihyXRVVOp+39/XR/PeHdOXcx3ovLfXnJWC3lKf3ZFF15tefr2XrKb1lCQ4u9a/y+xPyY6mvPwjeRRbvySrp9Pl/c4Cu7tDy9+mbWaPvGzHq5HNOPbN+xkNK/awNv6Xtp943V9Lnyt3J/j3w3PrrctHla7moltfgxgqA+5b3LJDZtPSxtmHVbVitzrGqYRwiAYfzkRgVbgSUeC6ynSeCScJ84zBK6GlQwmUSDdEqRAMSQalVFI6PkLxm3qEjtDaBU6UQQeIf0BlV1ig1hbyaJECNzvwVSSGrhiWqZBSnA9CoFI1BTF7dtorQUNg2YqxCYwnRErykdDVNi21bfOgFANEJQIwGpRq0bpNnMqcGVSH9dQRduy4es8YmoA2K8M08K1qZFLLfoBOY52Pk/PwS3UkFUGVS/6ymW0lxAdulQ86H5F1MHoMUCeHDkMZL0hNXTaRN5MN+GOnaBq8i0SoGNWL8gTPdcNl46B2rTQsaVhoOwDAAHloPrY6SotyAU4r9CE6LwGg1dB0ce1BO1oqsGk3IQdHRYjBst7dsr68Z9jvC8Yjbb+m3d4TDjrdfvsQdDhxvb7n7cM32wzX73Y5hP7DnyJvmlsNwYP2xh0NgbzVD0+I8OBcxRvHIOn747AX//Y+f8ul3H/PdT694ZhXa71ifHyVfd4j4Qy8gZrfCbBpG2/Lffu8R7/YjX7655K+ewy9+/nNuPnzg1197vO04f/EJzdmm8BwVxT0rhX6eoh5CwIVOvGCqwY+e9aZB+cjH61vevv/A23fv+Hh7KxV+U0Sq0RaipDOFBGppLUahSiejhKZLGLuvDjP59OShlMM/e9kmg/bUobJUCuv/tfJeH3DjOHI4HDgejwW8qpWeGsApDnf1sBxdHuSnDvclkBFjLMZibSzX0SW1wpDF+PJAXcrlkn5WgV46e4ErJaBu51zRm6KFJDJi3sdTht2ME+iEElW3r/b8FeOoGs8anHOJCF6riUrh1DjW78zRIMXAS46QDHjVhp8t0Tinz+5sLC4Vn/ycJbBb93fqY+ZCnI/Dcmy01iXSI7cjK5ua+Rwt1+tSN5kBbdksiPN3yh6LiQNSDC7nxIlVr5XM0zTth/l4LN9bGyyyNu4rnqeU2Poqc5KA/ckbLm32IVcbdol7zKY+Q6rMUxX2iQuAOxaDMvdh5lBctEnmruYOun9N61/yrbKXPcaJ+F2n6HpiTBwxsRjyeX3nFDwtYTIY0v1VWzM4kuXEXFbNFffUA+lfkiE5kjSTmed0apNSbosxkGVhjMTghbYhTpGo94DJat8si2UYo7GNTalwvjiIpHqfYtV1fPL0Cau24f2HD2zvtux32wKKdcmZkSNg8/qZ0vHlb7kvNzc3NE3DqutoU19rOZPlbo6gbRrhv83ZCrV8jlHoDtbrlay/XOXVi2Hfdd1UxTOlriosxmSwbWQYfNJ36ugIAcdzGnnm9KnPYQWMo0tAmM8FW8nRjEUeAG1KQxzHsVQYJzvG9HKeprmaRc6UaO+8PwXkcIviAjkSJf8vlVGjn61P6UhkDFLJV9d7rdpKzjmckUjeDFjmzSfcg1oq7zpf1leew6Jf5L1b7cuyNxZnpqzriY4oxhxZb8s6kXWbQGefPuc9vXPoao+YlAUTFOJYT2MtfGnzMV8a6T6BhzrNdZbRWQ9omoag/FSZPk8+8f7eSzZWDPdllFKnbdJTBn4GJjNVAkaXtHG5F0LQSUeozsmyBhYgAeJo9aNjdCMqRrrG0rUNQSt01JxfXqKPAzEecG4UzstgBBTTijD2RKtQrcE2DehANMLBaLTGe8QmC6BiSmlM+iZaYWIgOscwesLoMc7T+kC30lKQK7pEQSGpfDpHByqxBfBBKsCmZSmFBieZa+ozL50XPla8wCbjf7VNLRke8n2BzxAZo4h1MTgjIJ9RhsZo2lSx3lpL0BbnDimLarKylYpJpESEt65eD3lsBLyanem5JQns9D5gosi9vLRiosSICSBvuhX5eMzV1PMGj0roIIgBEyTS0qTIUaI829iGzKOuk5DTIIEwqejPVCsg6zJzvaEGiKc9Nz/fC6qQ9RGm4jTT/pgeLWt4Hhgwe166Z6lL5HfUe+7UVd+f76v3df272p5ZtmGpU9X3QE5+Pq1/5vbVbZy1O62hGtS814fT3SvXvwig+zaQank9BNLVz6kHZmlM/TNbyLfdvhzc5d9ORWCcWgS/7VlLYZ6v7MVdGkuz50zY27xtCwVfvqEcckopGtuyXp+xWZ/RNC3OS/pciEq4wUL+fDowyiEtbBfCwRKTpS2gRHASjaWQdISoxGPhgyc4h0qRRca2qMaiTYs2FqMVITlbModG5vHRWQFRtpF6u4UAACAASURBVCjdkqZiU9RYtS5AiiRUVZckNDqTJEuuvjINtvGoJtGPRovyGoWAhhLJYYXfS1sBDXWDio0UDtCGoAeMmgwlbTS5qhpAZxtRoK1O3mzhWBCSUSFhlyiEDNBJ9J4ytpQBz6H1whEG1qaotBWYtQB0MfONaIPWCFeggk3iPomkwEQkhVWpmDjljmBgBGwMXHVrLmlYodi7vaR72BVWgY2RrYV+dAy9xwVF2zWsGkWnAk0DLkZGF9kHOOwiF21k3MP1x8jN+yMf3r/j44eP3NxdcxwOvH3/mrvba/a3N4zHPb7vGbZ3uP2OMPa445FVIsMOSWnsuo6z9QV0AbU648ml59l5g+kjW6UZjGGIMLqeMDquVOBH3/sBf/S97/PZJxsulMO8fstx9567G0VcdzR+Q+gDjRnpHrW0V2vsqqFZf+Bxp+EzRc93cM2Wv/vpV3z11Z6br17zw9UZl2jWXVYoaqN5Itb30RHR+KjxURGVRER25y3DGNnd3fD1y1e8fv2Bjx9uuL2+Y3u3QyMVhCVFPVcaTIs8bfwYZL9leSQHcSwVxkBS+moj0SewPIPoNRBXH041UHJKftWfzQZc5jxapmZl5bREbqlMtDs/3JbnwFLeL8EsmAyhJQBUH6zZ4CmfK1+5Z2QsFZAiatVcCcnfZ+VaVUDR1E7K1/p7qoN5Kf+X50Ddl7oddVsmAPA+r+kpcCy3IyZC/amP8t97N1NirK2jvSalMkfaLdtet2t5bpX15n2Sf1OKbamWtRj/h8YmNUfOp+odeS1nDrRUc0+85KkydB1xdUr/yGvmIbBQPpvHrWpXKsKUgYHlvnnIkFte9To6dd9yfOufH1pT5fNqAkRrBVoeJVxeSpmS2iyVKjXTq6Y0rzxPOX09K8XLinb3gFBlZu1WagKB671U3pUOM3mGGECxVEszAoYqSRXUyQiMMSZjREC6iE+5vCKvM3/jch5qQK5eE8s1Ep3HMUXR5vWagY4cqTkz+s0Ufaq0EgM1Tp78pfFQgyb188dxIOLZbNasuxXaKMaQUqu1pM5qFTk7W9M1lsZqPljDbrdlOO5x40C/3rDqVqzX65K+mQEMpRReJ+L7qHDBEXzARccxRNw40iYi/KZpaNu2yPi8UMZxlAgNM1UOzO0PPmJLRIn0ZxwcEv4jn2+NFePUSzVXUoqVVpquWxHwhCBj40Y/rZsosm0cBQwJWosMiBKNhtI4nw2zydEViaJTauEW7MdRAF8tnH+5yEIICgg0rUWpyVFbr4/87GmPSoRjXtNZTsznNJR9kwG5kAoeDEMu0pb2FdNzYzqHJridojeGEBj8MOP2k/shJLkrKkY2mtOuUqTxTnp/2ne1JRnvgdYTmJfXbgbzT+2n1tpUtEI4UT1VQQslAFBOCY3A6CQtNb+HGDEqzlMWs0yu5iJWgFd9Oe8zZDHTEzIljfcT95WAdDMhINGGJ2REntMcAZPnM+sLIOMtPGQF+hHAV02OP+HqBeWm8cv7SGNRSBtd36fqy3KPD5HRjTSJsiAk/dAoRYw+FXJQybE8MowqxVWIE0MnugIfJL3VZEckBmWlCJ3SiuhGGVsn0Xij90QM2oySCeURQE97tJIAiOwT0sTEEenLOrNaE2NKi0Y41URnTXZTVImPLaJVQDUpGKQAXKJX5fPEmAKhpDN5AshIfS3F1JIOIeu8csLlPaYm/rSiw6U1yHxbCPiixVKerYnMKZ64caXIR5QIRiV4hNLSx7z/VdL3c9Sh/C4V7dFpdaqUvRYznYRGR4k+rv+uCFJAzxjhvVVqVpsi90Mx12l8mKJYcx9Qqkqdn+4X56NgF865+smz9yglsuYkVrG4HtKXpmjicHL/1eBbfX8+54B7vKd1BOypdpzSFZfPz+2r+7+0DSZ5JJQND8mRb8O4/k1TXKfNcAJ4qn6uO/YvA+ROXL8Focyfmb33gXtOtW3Z75mSzfxQqO85NRbL31kzJ2HM1zIVo4L+0+5TSaEQ5aprRTHzRWAJgaRIhqxMi3jWRXGW+4XENZHwAgpJJQJJ3RNDTlL+MlmrMUbST42R6DkjBJpaKajKsZvMO6eSEFUCLk2rx6Gx6T7xiGeZqSvlwig9eTtCJvkVZV0HBcqK5y7PS4wE5yS6TktqrNYNSjVAC9GiUViVxL6SSEGrFdZM83DWCmAmAIm0HRBBryOq5qjRiV/BCl+eB4yR9OHox1LZalDgBkPvFDqAahVtS3q+pKGOvXgYb4e0+d1IGI9oIl2T+F9UhPHA+eUZq1bTj54+9HwYHFex4fnFJYcQSTU3OXrwhyPu6DEjGDRnjWH/ascXHz/y+sM1bz/c8P7DLdfXdxzuer75zUsO2wP94YgfPMMwMo4Dg3N4PMM4Ah5rDF1jsEGUrYvNJY8/veCz73zK559/hxcvntO0LWMMiUfEM/g7YrOlcQf45pZ3v3nDm+2Oo1b0wXHc7bh59RXdumHTGmwYiPsAeqQNPavViubykubiEUptiP0RFW+hc8R45LB7DxbUyrLpHnF5Bc+/85g3d45vPnzkm+stl+9vZHDOpoPCWEWu3Dc7eBCy7+MoKQA+ePbba3wYePvhI1+8esXrN+84HEYhs03QstgUGdBIB41O6Ux5J2TF94SxPweuKnAkSgXBWkblw6xWLpfypz74sqI/DAN939P3fbkn8/TUqVkZqJsABFNE0qmDORsfGYycrkkuzRWz00BRDc7Nny+PUov78vOKIVUin+6DdstrCZ6dquJVp+Cdeta3yf+HwJylJxLmEXV1RcOs5McFoHKqb3WflVKlSm2MQmLtvS8pbzW4UQM/Zc2l0Vx6lfO9xbhfjM2yv0uFLgNF+Wv9txrwq8cg79caSF6OZZ6/U2dzAcs5sfaSMZPXex25mSPMvPcowNrpb/VYnVoX9RoNQbhwgs/KXW67GBSZ1ydHGepUZTGPl1aTMa51XuMSvW6NQTUid9zokFoKEsGSozskAkjk8SkFO49xNtIh3tsDEVJkVEE5k14x7fdpL+TIxCynEoiqdJly0YfmgG4muCcZHQnFTUWn7ivaNWCb982pOZB2BeE5qiJHJXU24t20lqn2RtknyTimWosxxpLauJzv+v0ZxPRu5LjfpeyFVBAkF05JbruIgJPWGh5dXtK1DdfXHR8+fGC3P9API33Xl33SdV0Z86aRolf5bzmyBAR4O+yP9CrSNJZutaJLVXS1mqKvM/AyIMZc13UJ0GvRrWh4ck7I+ui6lohEcOZxkDOERC6fI/Na2sai1muOvURsj6PsBaNt4VcbxzG1OTlR0rwVAMTk6D7JOsiGvEQgWqKKhSpBK0nvjQ0M/cg4piqW6Yyo5UteS97XaapU1X7FiDYpq0NkgsM5kRkZzJG1YfB+ikwscl1LQYN6n4SyV9KeSHK+pKFX8swHjx/9rDCKrtYYCABd90f0eFX6UYNu02aar9VYxqKSFTFFn2g92YBK3QMGRpcisKIhAC7OOaIElJJ5RU1nHSmFXes5Kb2Mc8BrDyHSNqtC3J/bKPLf35vLIqaY61RzmTDJkXzO1uNZ6BsgceW6RBFE2V81iLBarYp+laPAVbLLAlL0QArlrFAhkphLcKmf12/fMDiH80GylWxDDAEXJOtGCg1IoTijIq01tLYhdg2NNozOCUcjEkQQQComI+faen0m/NfawrFncCPDsZc13Pc8evR4ipQ3JeY8qVRB7Jsg1YBzdfdMS6F8EJ7mBGTJ2AkAGxPILin6M9T03hk01x8zVUPiAY5G0vpVKOdy5tDNeztGJ5/PYZs6A3RTIbgClC/U2FzFV/brFDWfIwCNTSmuuShmpBRbCTFOJBhZD8trTcJMCHEkx/JDpm4hyf50rsWcBSf6QGMVqmlZNS23ai/rOZ2vE6qd7OgH9JB8Fce7y7Z9amycAK/5/omzZ8yqxVfPzHsp339KP6+fs9Tb7+uIcxwm96vOiFj28RS2sryW2FVtd2Q97bRdke6vonLn74llfL7t+jdNcQ2zjXX/nuV9y4E8NbD5+t3AvGyAPfTXhVKVn02cbfn8mWUflp73ZfimRt1bePXizArKcqHkz7oquqV+1/2+J9Oo1uRRKYrA0fcHXNCp2mi6N6ScdaKQZMZpPIgqCSt5pk5KoYBnkaiS1ymVplZRiGu1lgVn60otVQW+LOBSmn+JH1UgXnDlk9dPPqCCkpTQJoF0Ob1wHIlegxIiVmWlcmMYQ+I3sBibjFNDStPISocQeAe88LsYI40JjshADAPBy+dWXUThhLZHa4iWGCaF3h8+ErRUa3I6KcxpDkNUGLMG2wASWYVSaGNBG3wAZTQhjRXa4ELABS/exO6C1z3E7oL1ekXTKoi5AtOAJWCUZ91ZNm1DZ8BqxbqDro00KvDINJi2IQwK7SO73ZHf/MMvWO0G/sMf/zFPv/uMFqG58EpxFzTueKTfjoR+JGw/8ov/+tf8xV/+F372q1/y9uaGu+GIC8K5JkTzoni0qxWrVcvm3GBWllW35tPH3wMHbhwERPSerrE8f/GMT7/zgj//7/6M7//4Bzx/dsmg4Ju7G169e8vrD++5vlHsd4H+JnBz7Pny5oab0bG5usKrhsPdDS9ePOMPn17y/c+ecbbWGD3QrKRIgvYN40EMGGM90Sia8xU8NmgTsaPHrd5w+WRDp85pPh5preLZ0xc8fd5x6G/5eLOltSqRMIsibJxGKVGmcxSCNuIBpHf0bhSjXSlCv2W1ajgMA/vDQH/0HHuprNi2HVJeKZaDVimDTpwzZA98jElhyRtFCDDmB1RIgB5EpKqZMSZFlMxT92u5u/R414dflk/OOfpUEa42LDMwl0GXpYdLgIsJADvlpMmAdi1ps3Iq32fQQxS3XBFVQAmDtU2pSmdtUwofaC3e4xz4S47Incn5mATQlG6i0ufqM2N5PtVXLbOzsbWU1fW9Dx3kc1Czioaq3pGfmY3E+2DopDQUADZIBI44IvK7EphlMxiThLHK2EaS6Zjyu2WaVm5zNjbq6uskpTS3qwZtliDqtylGcs+EsUwfvZ9SnQFInfhcMrgmBTLuc5bVIM3y3J0BLZUhPAFfQtidf1dHoKZPksGCvO6X+sXyekh/EtskK9Y53TLtaXQFfilQwgMWcnCTuf/ObGSUKpBKS423VK00R9DFIAaSTo6nafwUStl78yZzOyfAH8exgHT5XknTS8bn0smYtQ+VgBWdxjDLJZJhksGuEFK0vSAv0UhBIzGCJrA4ZynMx2Cu6Oe1uVyjoqvEUu0uRDH683US9FZTJb4cuZTfV6+XbKA3TTPz9GfQ2FpLsJqxP9If98TgxOnUNKnSXgZcQ4kuskZxfrbBGo3RCnuz5XAYcS6w3e7xPtJ1Q5GZq1VgtVol2WoKgCBARgQlBY8G7zmOjq7tWCcALstTo1Uxfp2PxH4kBGhbhbbCn5dJ8E2RkWoqVpSATq2F3sQNThx7EWyzxuiGpgkQFEaNlUNoSDIoZy9MhiIYUBqtHVN1XzFzMwW8J6ZoEZOKPglQHYMSLuPGoLVEX2XnU11NW6kJlJL9GVJEZ20Ygp0ZmHPZl/uyjDIpawSF8gUNkzWU13xtyCogp00SiyPbhyBOzxwlp6Qae92SzKmYDX0Vp/cplSOyJhlZgK44gdZZz8/9zECEjgmw8gGbAIrMqRWqfR1iyuwhgZJqmksBPYQOpzw7Ha8ZAC8FHhLIUYIBNNimwTNxbAlX6eRUXHLMlfu1yH/C5ASs5yx/vkvp0XlsSsRPFBlt0bgg1a2lyI5EvGU9w/uRGH2ynfI5l8+ngA/Q2hYtTUGTnJHGgjHc7XYSeRlBWyOFYHJ/VX6XjP+6a2mtmXgOk91hjUEri0cRxkA/HEuBxBAv0Eqq2a86eW8uoOJcoGt2AhqpFBGaeClV4m6MXgI3FBoVkuNFi/GXx8o5h3ZOgL4QRO/TmYbgvoOtvqa9kM7kMNlgCgiJozIqiMFP6zt/IpnNAn4HSBHaKq8lmY5qfaTvE3+7Nskp5yOEMP1eQ4yaJgNF1XtIRQzn50E6lwpkJzLFk4JSSodJRRUVKgRCcGK7pj2jk95j1BRJW/rBhIio0p+cITLfv/V5lYHTWsfJsq/rukqOhQlKzOOn5g6w5Tm4TK2tMZ9atzoFyp26Z/m5Oip1+bvls061M+/7Wm+obaUlXnUPD1PzZ+bP5Rz+b1vb8K8A0J0Cln6XF9eGSv2c3xWgO9WOZRtIB9ZD7/6tbeP+M09N/tL4KgYT89/XizEv8G/ryynv/wMtvveTAGYK7wa2u2u01jRtg+06TCPEqDEMqX2pPLUKomCogIoKPybFPUfeJUmR5aEoNTmiLpQ+EwU0kLEQQZ1LaUcVUEETVUSFSDQRgibogImpqlIqJa4RJVGrZHiHXA0LohEDw8QGqxLXmpOKX1I91uLDSNQaqxTKppLLOILzOD8yRI9pNK1NBP9B453CewEF2sYTfF8OXmvn0Ra//oe/Tn1MRjrZyA1EDFF1BNMSgsJ5SR+JWhOiRF/s+4GQqj3aphPev6ikWmx3wfMf/Tn2wmJDpB011gQaG9m0HavWcHW+4smV5fljOF9JequOE7gcZVo4HOHZRceLy473v3zLV7/8W346WP7IdmyeXbLewEpHcAPv3rzh9T9+zfH1W/7yP/0n3nz1a7bDnqvnn/AnP/wR7XrFcezpxx6rLatVx6PLSx49vmJ9LhVnzy7POD9/xHgMvHn9gdcvX3J3c0NwkbN1x/OrDc+fnNMqR+i33Gzh7fV7fvXNV7y5fs/dcU/fD6y7Riq2tQ3NxYbnqzOuXnzC7d1Hdq9+w2Ot+I7VPLWeTdxzvjY8enGB9pZxe+DwdoffOkYL8Qwuz89pP/8Ezq/YxMd8+ctf8IkJrPRjwnbL+OHAJlzy/PyC/dnIcb9lt7Kcbc6LsebTVsvAlVJK+FOMZnSe3X7P8TgQvOPZozMuH33KxeOnONvw/u7Ase/Z7490TUujbaqOLCZ3UIC2oGKppCUetzp15lvkUPo58wa5XFKLSZbmw3F5qC3ljFKK/X4/i5qr5dg8+uF+YYlTINVpkO60LJ4Mmvk9tazNqVfZsKwVEjFowgnpP3/vqcP3oXGujfesuCyfOTNaeFi5WI7F8t3ytY7mmj/nIdChnt/gJ1CvJqLPbazTB/I76nc1VipQ5ciOrLAsI0nqr3Xfa6NmeUbWa2SpMC0/U/d1SjmeIpPqsa/nZRkhV0eH5jVfp2Q/pACWtRzn628ZGZfQ4JlCmM+OU3NU/3xyfhc6xak1dkpvyt9rrYkhG8RCDp7BfGNile4xB5l88GJgGyHvD2FI705FCap1lAFCrbVUvKs8y65EbyZQXc/nKPcjj5noGhM4BmJU5fFaktDXkYv1upi1K8nVekzynlheyzWgmK+Z/G7RBab5reesHpdQvXOZSl1HMdf9yv3UWihKNJHjcc/hcCj31UVblgaVUorVasWzZ89Yby74eLPjbrvjeDyyS8Z813Ws1+vZWOT+DEOea+i6DhsswyDRPcSIUROwaI2hsVIdL0ezOec4pvkZzZiyG+R5xdgjZWCoHImXZJS2jMPk+Ikxoq1mbddsVusih47HI4fDgRilymKW/TFW3KRaIcGXon+GIAatj/L+nFYqHHjCoeZ64Ve1xtI2LV3XAYZe9aVN0xzVhtvpUyZHzcVoytzUKVYhBPq+T/uDe2sYJOIyr80SfaUVJTXQZ7BneqdkY0gElzoRofHQORcrmZR/V6/bAs6FMAu+0Fr41mbnrzyp/L2cOSYB2E5SP00jgJLLThZj5qCg0jTagkmUMUoikKL3ROdmQ69TxXJrLE3bEsdYIvRkDyqsnTt2ctXk6cxSaY+lfKEwBVHMxuoBe7XsKaUki0Yp4WmuzoE6kmu73ZZ7a8A+A9mDCxhjhQduGAjRF7BTaY1tGobRMY49eAdDn6gpBKCMeLRV2Lbh7OyMx48uWK1WaJUAlqBTsTvL6CNhf8API8djj3Oe7XbLarVis9nQti3rxhbHrY+Bm5sb6a8x2FYiaMs5gPAcWmPQVtZzCBGCR2nhJg9eClyE0U26B9V57r34RCTMW9JAEXwjKkTHybqvUhOYls7i4Ee0CqAzTRATR6GzRCes2TKNSdeIiU5GxVQkpVpjibYIPfEtZvAXKGBl/p9ToKPOGWJzwHdc8ArX90eVHU3Z3TStM4VCGyMpxzFFDaOSkyHpTd6VSNgyKjGn/Kdoxlo+KE6uaaFfyo4j4VD1TvbAXNednA2TM3Q+Hsu9k8+beg/W1yndsdal6mc+dH8+37K8rLmzl7pnfSmlJj7ZhR5at+PbrrnjqHp2SYU+jQHl63cC6B7qwKkGP/TZb3tObuS3PW957++UhvRbBrBW1OvJPnX/qQXStu3sffXCEzT5tEGV2/9tRiyAMrpEjs37t/zsQrFUwtEyugPOH4h7uUkbg2kbTDOF12eFwRiNUfJ9ayzKNsQMy+ko/GcqInwAifFHweCGpFxDfzxiIpj2DGskDccYzThKCLfwxsEYoWkbeucFsIs6FY9gqlilIsY7dJVSGoP0y1qLtobRefHKpvzPEFLFtGyUKM0YEe4502GVxccAweMZGYe9HF5G2hmDgZi8oCFgbSCEoRyuqGRYfSbt+eKXfwMoxtS387Nz+lGUx/XmnKjW3B1H2mbFatXR930qIKDZ7fbEVN7b2AbbdLRNi+06VNtidneMv4x42+E1bJVHRYfFsWqgaxTDowuGyzOO52doFdhvb7m7uWboe4Zo6bnk9hBAWz7/wfd48vlz/uTH/46fbD7l7de/kTLsnfhsPr79wNdff8Wrr77hF//17/nZX/4tv/7ZL3n2+IrPf/CH/P73v8f3f/wDuq7j7dvXfHz7FuMVz66u+PT5Cz779FPatkMbzdWTJwSr+Ntf/QO7/Zbj0BMUrFcrrDYMuwNdUBze3vDRwSs38MVXX/DVq6+43W3RjWVzccawiozHI3o88p1PnvAf/8f/iT/9D3/G//tf/oL/4+Uv+f7xwI8svOj3PG4VTfKexUcN9sxwaS3qeGRneoarFv/0nHjRoq6eQnvJd9QfovuPxL3iRfeEV/4D4x6+e7bhsLnlV9cHBnfGcRwYwoAKkbZtJH07Og77nnHw+BiwXcdqteHy8RM+P7/k8vyM4A4EN/Ly9Tv+8csv+dWvf8N2t0ul2yV1tEFIX32qbmXbxI3ifIk8WF61LNJaY3L1ZeRA8MGz3+9lvcbJmK4jiOrUwAy8ACX6Jf8va5+pemrNOVe3qVbslwUMlp851ZfaIWFSClP+vhRjOQHK1Ydg3dYQKBxoy+itJeCxbH++nMtk+lPUWm7TMmJ6mXL60LxlA78GqWqjLcYo4K+Znp9TKbJCk8vG5z7P1kN+d1LGasMpG+JZWcltmYy9yYh0oxix9Xwu+7sENeoooCVYlhXWOt0OJJ2uPoumudGztZuN75qMfAlW1e/JUSz5eXXU3LLdy4i6OfBn7oF9s3O+ADETqTVkT/kENC4jMZZrrzbwYhTjPMR4b23Uaz7vx2U0ljwjxduFOBV+UHU0ZU69EpAsJg+/Sfxmwm00X5vOzQ0K6U9gGFxpWw2GqWTsSr+m8ToFlsYUYaGULs+tdaR6zPJaq9dD3h95v9hGivvUa7AGF5d7Xik1A6KV1sXYyX+vZUD+WVcyJvcp72ldATm1oVCPUZYFeR5lrXs8XiryJVnY932JWjg/P8daS9/3s/aUfao1xnas1hdc3O24vr7m9vaWPhng0Xn5v5IU9mxUF14/RBfDGNbrNc45DocD768/sjocePzoEefn5/KZSJHLubr3/nAgoUSs12tWqy6Nv09zTAGaGiM8rC542lWHaRqOxyO3t7c8evxIOK2UKnvQWkvbtgV0zKBABvC898lhJv1w45jAjBaFYnRyZgvDhKw5oX6xAuL5SB8GGB3JTVzJpQUgY0ROyVqSiBZf0tJN4mudQF2tJ6Nda816vcZ7V1Jl62eDpNjlaLN8HgcbMXrKwhF50JS15ZwTh7BSdO0KN06csS7t8Xx2ZvmilPAIRjetb61SNOHCsK7B98LBVtkTAiBIJcmmawjOlWwg8vli5d0+eMlOINAPA74fZzaTjuB0KiuX5NAwDBBEb/VS0h7PZHw773DOpOg3k6KL5o62fH7mvZXB5XGUYh05Vds0TZEzGQyuHQw5xTrL9ULjkIIY5CSYzpu8fmu6h7JmlaRYl0hZ03Cx7giAHx1N04qu6OVc1tbSrde0q1XplzYqZQ9IFN9606E1tLZhtW5Zt12xWyWq1dKuOqxpGUNkdbcr1DyHXsZkeziyPRxZrTq6VAzPWsuq6YiNLWPXHyTaN/ej6zqM0sKf6BPXoLb4DKSPDmUMbdvKevAenQpr9GPPOPYYY/GAVRpjGvCRGLzwzDaW6CQS1qhQONSjHLoSdBLA+UHeozWrpqVr2rQGLcYofADnYPSyf6MXoMkBqyYXuEmAX5goBmRPqCIHZnIh74UUsTn6qepq0TtO6IlxJl+y815STGOMNEgGlrHCuY6n6Eajd6ybFpNYITJv6HHbQ5DMIh+nNF0BI0VHjloxOMch2Wo6AZpSyEUqgGf5qrWB5HTIuluWV/VZZ4xhGNwkTyodqnZ21fJleZYti5HVZ3+tb9S6dH0er9fr2f3LqLzacVKf35NTdg7EL3X+Wt85BdrL+ZCDDapxT2NVR/aduv5VOOgeuh4C2JYenG9DQP8l1297Xu15qttVfl6g4UulfgnM3UNZq7/XX+v3/S7XQ5+aFMf0OibBYIxGm9SfIBvfh0Ac5PAS4vlFhEoQD4NNQF2kk8OqaTBGIcx1XiK9rGZzfsZhHFBRYZXFasum3bCyLf044oYdTiX+qn0C8qqNud+L92im4KZUUU2kYSx8laLAJ+Fnc0n3LCqZbdIYFT4qQtQ03QalGoZ+xLmItQIGej/QdlIJValI2zQ0phGjUK1TaQAAIABJREFUHikIcRyPGCuKyhgr/orP1gBcv33Jei1VeA5Dz/X7b8SzFEa0acFs2B98Sr8z7I8HQvAYY+mHzJ0iJ3mMkUg2gg0aze72IIq2CqjoiMFhlKPTgbZR6DCmCkkjbugZhyMxeaxozjmaZ+zGFWeXj3jy2Xf44//hT3n6v/zPfGYarDaM7oCPj1DOs71+y8d3X/Pqm1/z1a/+npe/+QVXjy/493/+Zzx6esX51SP+3b//E66urvjbv/4bxmHEeEC3fPy4ZRxeizEaYbV5wz4c+OnXP2c3bFmvOlrd0fcDhsjoHe/evef9m7es245+OHL94R3H4UCnNU3QtGaka9bcDkfiGFg1ZzxanbHGQN+j77Z8Yi2/bzo+b1esGk9sNOr8DPXJmSyaM8BHzuwOs1Y0zz9FPfs90I/BfcTYx9y8fsuHL17y9mXDsIPd7cBtv8NuNHYlHqp+HDhrNigDx77n/d0b7u7uUEqx2qx59vQTPvn0O1xcXoEWr+dh6InO4UbH9tCz3+859gNj8hSKh06nr6CNTclxIryNNTDG2YEd8y5Xc8+2yE5yklj5mo3yfPAsAYr6cAI59OoiEKdAi/p3p2TZUqbL94seqOVhNQe1BFTRyVCfA3NyQOpJ5p06V8h908VTdd/4mV8xpnFT87/ndy6vbJhPD507jU7J9qUTaAkS5K+FT0ghxWWqSJcaiKojG5bgRTaSpjOqmpMohk0saUqWzBUygVkx8RlNHCGlH+mBy3FcrpdTCk8NSNbfZ+WpBgnr83m5DmtlaxnZmMe5VtrqZyzns4x3BW7WwGA9l8s9NCloaffWypwy9+6vr1Ntzv0pwE2tjzzQjmwMZULnXLkye/blc1OE33I+l22ST4luEHPUg7Tg3v5e/q/HW9bgpJzGGCuZpBbKuRLQpJqn9JBidNWO3OU85Csr2tZaUGpGEP3QnqzHozaytdaLlKjpntq4AJG1OWKVOBmJKsydKTUImwGO5fNkPU60IfUayFUid7udGOy560m3cj45LbWRRGjn2Ww2mAS07Xa7EoG23+/pupaLi0vOzjbkAljWWtbrFabRjH6KlM4AXPCeu7s7jsejgAm2YZ1AhQyeOecEcFWypw+HgxicCmyjC6iRI8Cms09SAbuuY3QDd3e3tK0YmkbPyf6ttfRDz/F44HA4FIPONjL3fT8Uh0FUgX7sk9NYQKfRSTqtUxJpolKqmkS1Cx2Jtkh19UrXnqLpfKleO62fKSI3JnB8AucEdM5/z7JB6ylltr6Ugq5dl6qpcbEOlFKcnZ2ldWNKVFbTNKzWK5QV/ufGt4nLMhLcBEbFGGlSu8p+1nG2TueceumM0ArNFMWZI+iW51+ISBRQAvCyKIgpEyfTUOgYEyH+gsc1xKLb39uDMkIir2PE6yoSJu0/FYVuIUc71TpMLadKVc8Q8H4se2wYhrJ/jTFlzOZccdNcZHC0yBMjxVF8rInk546KHBVZuOti5HA4pP1i6NZn2KaRqMCs/0QpvtE0DZvNpnDFEoWWoGkspmkwBkY3gpJiDUYb7MzZGtA6ELRhtRKHxsWjS2zbcP7ogqH3bHd7ttsd+91eoupUP4FbjeXq8hytpmICw3hkfzgU/fjR+QV2psN1yXnSoY3h0A8S+eUD3jnG4xGiZEzbtoUgJT6MNlM+hY8E5QvAqPWU6qqiOMpEzUl6AIEYPB4jEXUa2taiWDEcd2Vdi64T8CnNFe5nSYDI9Vj0cDvT4eq5zSD1KcdgPmezel8+U+21SEgUUUpkZ5zOHuVVsQyyfJG+zyvBKwKnruxQiBEIgRCmdtYAfcaV6jNZdMjpHfUZNjlyq1T9SlfI8jhf9Z6vz8Z8Xx3hlvWi2u7J767P8frveWyX2E3991PYzRIUrPWepb55ChOafpZJPqmHp5+/7fo3BeikAfeVuof+9tAA/rOvZAA/9KwpDbNoObM2ZPTzVNvqNtYL+9sWwbId9SIoTT71nt+CW6pkVBqUCENthDdLGykGYUVBiQgHgoTbRqmYoyLRC1cdHoLzhFSNpx8GjLE06aCJ0RNUgEaD1Xz95hWqsTw6v2TVrlGuxw+e0G0kXF47fN9z3O/ZbQ/SNx8YRzeb95zSoROSL5x3AaUDJk6GXdmEelLuQgi44mmvDMKocdGi90eMaYgBxlEqghmtaBoFAY79Hd6PGKNotMW5mKJGoDXC1ZQVzOwt4I//GwC++OLnGCNcbEMvQGT2sh36Adtd0A+TwXk8Hsn8EzEEUR5QiQNFicdHVh4Rw0BDVClyMQxyuMSRRkesiXTWSJpxSj8pBo8L+OMOT4OjJ3jH9fYjz/7gU7RVNNpg24jdKNAD+90H3r/7Na9e/YIvf/1zvvni5xxu3vD5D3+MCjc8ffycP/2Pf8SPfvI5r16/5mb/jkPYs73d07Vr2nbDKgZcBGzD48sLdLPh7MUz3F1kvVljUBy2O6yHY4Rvrt/TWcsTLWBsBMLgiArOuo6r9Tk+Gtq4wsSR63d3/MX//f/ws7/7e/7h7/6Kt795yeMf/JDndkPbbFDagbFw9gm8eE7sVrDtUM0KzB1+vMHbjnU8Q7kODg03Xx64exN48/Wer76JvLrt+PJm5Iv9LQfgdhxwxyN2t+PYHxn7IyoEHl1e8JOffMpmsyYqjVZiEOx2O6mclQ/7AKPzHIaew7FPUSbpYE9VmGKJihEF1kXhzYgp7UWRKitGssdAiNN1tSeEeCKlZEEmeQzeJ/6WKZ2vjjqqgZGaa65WSpZK7UMA3ekrKweVE6GSZcuDb546qTCmweh2Fi1XH44x5rSX6qFRy1ionI6wTLuhgKPSF9Lnk4LHxIEjH57SeZRm5jBAhWIoKFRJ4ZMx86jCcTcHB/P/pdcuf53aOwfyct9nsrAai+V5YnVKUfUT6KmVLkV9Qki8YkZcIqIEV+mcqdrhvSspkhO4ktZIzCkiQPB0KSpnmUZaG+fZSMk/Z4VL5OjkQJLXJl4e78o41IBtHbEnYzAHj+qxgXlEVR3hNgEHiszRMukqp1JSJyC91nFUBZov90zdjqXyO1NWq/2XrxrsnCn8FbiUeRvn0zYphZL+nRTZoNNazX9DFjvLsbu/3pb6zEyf00qcXVX0nA8+nfV1NbVaB5yv5fJimIEUJbotxikyQediAImbVk2g/9IAqD3f+XOndK8YMv9ukq9KlQraZTwTHUHpe+avOnHV78wGRo5CyvJA9kXApHHS2oKejC8fIofjwDD6Eg1krZUxiRHnI1ZJyp+xkaZtWG86zi827Pcbbm9vubu7Y7/fsz/sUvTSoThB1us1ttFEZXHeyZmjdYlUy2dFPw6M40jTNDjvBEQzhqhShG8kRWI73DAQ0tw3raULzcSFFaVKoMxvirRoGgKB/faO4dBjraHtGnRjQQW0gaY1XNpzARyPPYf+CD1kbtGhr6IskntZay1R6tYwuIG7bUBHSWWySktVxpC4vkzA0BQ5NYtcDWNyxubUNcq8Flktqx5jJifPUtZk+cFEFz+TAW0zAWFThJjHOV/WQ35fCMI7ptL4GSNFAHSkgK9RG0w2cmMEn0j1F2vaagNBuObqvV5kXjrr2raVzldnX+lHCgrQSrjsROIEpERWOqOaBqlsmoq4xcnwHvww7ZlK3SgyPZ1p+SyKSV5N55Bk+xTOLgToWMou2XdgjCJGPePQdL0rwG8tM7JOUoNzS2BAZVkQ6iibdA6n9dC2E4dXmd/y/p7jIPuq0TlbQOZVRUBb1mdSZOJ43HM8HvGDQ/ckh00uaiOaWPCeEV+deZG21YS+J8RI27U0tmN9vmF1tiaEyPnhktvbW26vbzgcJPrWe4/ve1Tf8/L2BrSsg6brMMay7jasVhFU5Nj3sp97g1JHtBbHwqpb06w6fBQQwhiN648c9zuxs6whErCpmmnynAp4TgSfCqlpl3gKYwrukOqwsq4jBCm0lCAnRi/yaBzHUjU5RgXKILQPpHemdTLWem6UFewjPsn7rsvrkaKDZ2ddiBGz4GCrr+wEq68CKsUAqeAF0aNcSMEjAh6GYMErsZfzWRqFGocIQU2ZLzPe1CigYWllEC7XXAjDh0AxNx5odw3Q1TbBHLyaczXWIF0ddb58bu7/Eoxbfr4GyE5d9V6MMc7k7VxHexgfeuiq9YjflqKaMabJSTl/3wNlGsr1bw7Qfdt1CphaKq7/oitOXrlva8PSACptMfMJjHEu3GvBXE94QYX9fPSX4GT9/YMAIA9vlKntuhj9xfjP3rlktWtjUiqlQdlGgDpEsBllidGjgiJGD4lAehgNISDcBirSNAbbaaLVODxBK95+/MD13ZZV41iZjk5Lqt/mrOPueE0fRg6HW7Z3N2L0Rc04egzztKlYIl0SiBAda6vQlWepbDo9RTl474vnXTEp2R4FumN/HFHKsFmfY4yVQ2Y80rQaYz3eDzh/FGXDNmjd4Jykkyg8jVEQI2PwNMbSrVdl/L333FzfJYVUlNfLq8dsNhvutnu2+54QB8ZhTFw6oVSS9c4RXJo/gQWkWEcixo80WANRC9gScVhG4QPUEaUDu+O+GN62sWjTCOGuExC0NYF1a9BEgop88uIZumu4+fABu244f3LBUR148+4bXr/6km++/oIvf/NLdrcfubq84OknL/h46Pnjx4/5/k9+wtuba/7qp3/D+7sP9Ay0Fy2fffqCT559Srfa8P5my74fuXpxwWbTsTkPfHin+Pyzz/j00xd8ePeeb754ybvXbxn6A1cXl3RnG8LQE4js9jvG4xF3OAIwWs02cba9f/+B169fo1Rgd/sNT847njzacLFp0QZidHgMplnB5pK4eoQ6/w4xroAdm3gnayRq2PfE6wOH65bW/j5OveIXL3/Brz5o7vQZx7YjNg164zgeB968fcN63fH82VO+8+I5nzy5YrPq2O0Osla1cDSgSBx1uQojHMeR3W7PNvO5xVAKs8cYUX6UfWiVGBFBQtWj0lhyhGWSP0IuQiYYneTHZDzX8itGCgC4lBn50Mre4lxFrI4cqlNZ64M4P+eUPJp/1fd+X39fp1tmL+BkUEPbrpDklukwzgdtVpZrDxxU0VRRJYfE/YP522TvEsyr+1yDg/U763blZxYDiIfPuPlczQE6Y0zS8Jceucmwr1OK6v6cAnyW0Tu1wVm3ve5XZJ6KeWquf9sZXY8ZzImLHzo3l9d8XdfjNPV5OT61MVnPcZ1msWxj3ac6DWN6FkzpbJOyGVOxpfzMeixzNGc9F8sxXI5TnUYZoYpgm+4pqWVKSTqk38/OSFHaJ9BNAiySgRGlmIQAcHncNaWiAYk7JtVLye02Rt+b92ncpt8XwIsJ+AvBz0DMWgGXdmdkcLpiFCNvmkdKlIAxFu/F0CpRr8nA9yGgvEspTFMqcL0GZus8zp2rRWYkw1YM7QpIT7I97/t6my/X1zKCswar85X3ck49y2OitS6GoEpVemVuPN55+nGEIIb1arUu1ZZDDITBkQnPczubpimpsavVit1uJ2mvfc8uUS+cnZ0Ro3B3SQVUydm21kqWQRUlNw5Dqei42+3Y7/flc13bYW0jXGCAj06MS6Vw3hMOYlg3TSMOJzQS7SlzY7VFKUPXrdjvdxyOB2xvWG3WtO1ktmgtaUzaGA6HI30/FXBqGotzI8PYgwLTGoIKAvyMklJrosGHgFUqrW9JE7QYMKpwwNWX1qpEM536e95DMsf390y9NiZZUd2Z94eqgb5JtqlkFIYQUtpzwDayCJep75JuqUp1SEiAlFICwFW0EjlqDSAw0QosAfMaoMuRqnWRiCLjYmR04+QIJOLilOqX16ToSlKARakEwoQo0UfFSTDJmUmOToUIcxtLn5PtNwyD0PPk/ZtAj7l8Dzg3AZEZjI4x4vWkA41jrjDcFNCuPpvu2YapmItepOrX520BfaOAMhmgk+rKXmgmhpHBCydcjAJ+Zx66TH0RY5RozqAL2JjnL4OTQsFgaNtJLo6jZ3Ajx6FH7zXNKqX2aimAtl53NM1TLs4SEL6VfT4kbuJxlDb3h6PIsRS1aRuLNQ26TQBVEHqEnAK/0yIrTNfRtC1t16G18GdqN4JWuHHAJkAy2JD0RUlblSXmGcdUmGMBDHnvS1EfRZCsIqUZhiN9v2e7veWw39Jai1KJE1OHdJbodO6Aaea6lcxjqjodQsURmc66E7qk6AwP60lznSDp80AuJkGaQa2yLz4BgTE5NSSeT/ZVlEOr1q9VOhfr9xXQW1HAtvoSG7TcQa3H1s+oATTZA3k/hnJPXu/5/KvBs1oPWzoi52M4P6+zjnGKZmWpAy+ftXRslDFafBbAn9BJTumX+W/LrzkJZGrLvOjEA/hiuf5/BejgtKH3r3XlhfvQlQUsLJTqfKAS7g367PknFsHSOPpdrlNjMNsSD3QhH3yaCZST/8Jd4UOVAmukUo+1LbZtMVYTEaWkMS3W6lSFNKSUWM2hD7TdmqtHF3z++Wf86Md/wHe/9xnt2Zrej6wvz/lf//f/jf/r//zP3Hy8xZq2eO6apuHR6pyb3XuC27HdfiS4iMEQXIriUbb0L5K5JLL3y3NUChVDdUjncU0Ag7VALJxzUakyViEq7MoLYa5XaKd4tL7iydkV/XHPdnfDfrenWxmaRvhRLs4f8eTJM4wy3N1tefn1r9FWKjU9PTvjydVTzi8vyvh/+tn3efPmHaN3nJ9fslqvefLkCY8fPwUUL1+/5Hg88OHjO67fvyfgsaYRX27wQqCctC+VuMiIKZJRDTh3lD4l/6+KIypGnPJ4HWmNlUpN3tOPjn7o0VqAunbdMR6O2CAcFk+efM4PfvIjrg9brl+/4offf05sG775+A1fffkFr9+85dWrt7x6+RqzDzx9/nv49gmrJ0/x6xf85T98zc/+8ae8ff01jYXjOPC9zz7js88e8+zqjOA9Nx8+8P/x9mZPkiRJet/PzPyIiLyzqrq6Z3qOnR3sriywCwgeeInwkSL8h/nAdz6QQiEBEQgWw93F7MxOH3VXHnH4YQcf1NTd3DNrCBCHt2RnZWaEh7kdenz6qWp394GP/j0PONpkaB462rOBkXve/8MfePPdD8QYubi4oK12nA49dx8/8PHDHUPnwcN+f+IUf8Rdbfi8f+D4eGIcBmpXEeKJPt7j3ZGzW4e7SND0DPZEsBUbC8Y4erZEXjCmLS5sOatusbGHYU/6+COPb99yffErqnaDff83PNQ/sG89fbuljwafDMFVvPrqmlcvb7k623F1ccb1+Q5i4OHxXtJ8XUXlci0aK06pzykxfT9wOJ643+952B84DidG31NlB7jB5fUX9qd1Uk/OJxjHQSJb2nqkcJTVDpiBnlmOyJdGwQTUW0eQVBFqpLbs0KpyUOuurJlz5bWOgC3HoIDG+nfzVwnQ6c+z85LlG27tt0/v0c977vOtsdJVUGhii7GuDek/dq2fe32f58CX2XCZS/SuQTldh/XzQJHyuWpy8RygsAD0ir/FzC7SG8hbxViUGmSl3jMTCLQAL1dzsQYY14bQ04jo0xSAdRS0NHDUYZvWxSSMSRgrKZEmpFyftGQKRmFp5L3tKnkWiWw+Bd1K56hMM9KrNDjl2RYzMN3TmESIGQSZXqThNDPpKGWprdew/FnXT0HxielqlR2oTKpy3POYxPCd2YNP6tpki1+MRRmjMSXIqfWhFKSToJCA3Hxxj5cGbznX+nuX5aG8J2FjLmGxMthlz6pjklFBnW4rjm6iIAJkPCAaCOQ6PUbYQ8mYibljYkQZkM9H+ZeOffmMlXPUTsAmswAKJBqyjpyvz000iq8/BftLA19T69ZySOfU+5jt2DQx86JghPgQSTHR+45hjGy2YaqtZXO2hJyveV8I+NAII62pMQYeHh44HDx93wGJEDxVrr+1aTe4ukJlB8ZSV9n5boXdF0PA57TJEKTmWoiJupbxWGdpNxthIFsZj/cjox8Zj0epeWWkKH4KJteUctRVRWOlIzzA6HuOj48MtaNq5rpfukdFZiZGH/BBQF+pcylppG3bgoUhjlhrqOo6s/cSwTh83veynxI262erdZNIhDhO61ZVDmmA9pwOmp3kMlBV1g5dyvN5P811BEVHaKMNtfGFETjf0wePT3PARefBALXq2JxCOOmzrBt0PMiTC6iBSrpZXs/yME1MNT2/IkyeCzblTqm5lEIEYgqiV4wRJlS0mZU1QJx5hCX7RlJizUJ2q0yTb19yxB0hzlkKpcwyU+MIPb+ablwGECqay2YClYZhmPRIyXzV4KKujU5g1OdIcyYXSOmDkvlvst9iUsKaCmdrmnpOCSQlxsFzSh1jTlEOCvjk7tlN09Kalpg8IYzTHEp3UXltMORmDQbj8h42ljiILhuCJ/aRcRxke6REszmjbbfszrds2ppNU9Fuavwgwdyx6zl0J/p+lK7BY4AxMPpAVYW5tqUzGBdx42xXhhDoD3ts73BHR0xB0tk3G1n3EOgxVFVDXfm8t3O9VPX50ryO1grhRNY6A65egv3JW0ZgGDtimpsEMJlISXRfkpOgV1UXIJeeWyQIlFKS59X3k9l4xf61dSO1022ZrjkDzRCzDp71ct4VIj8skAy1AvalTWNgiAmTA10heGLIJaJSmkB0kMCWana99AyVz1fKsgmkWxmCxjCxtWHZhMwHn0FuCRLCEpA2K91pi/s8B7JpIH79pbJ1LouzrJFZyo+SdFC+bg3slZeOSf2i9fVc9kL5vi/dMyU/2eLy2qc+VHn9VwXo/kMAq/9QUOv/z7VejFJZ6vfyS8jYy81bLuxz9MpJaVG6Z0/HUG6O/1iQ8onjxLxR5DC4ScEYHMmanKtfiQJwUtPAx4A1FZpG4TIYoOmwTWO4vLzml7/8Bf/iX/wV/91//9/wZ3/xCyIwAB8e73nz/j0PDwf+8LvvePhwzzAETv3I+cWWr756gfnc8fYHS4w90YvTGv2cQqBPEFXhGE0pQYzzaEjiK2CNEwNc8iGkw5wB41QRGpJJVK6ico5q03B5cS4pvMHy4quX/OpXv8IZ+OHH7/nHf/z3VG4+INfXt7z+6hu22wuGoaNpKo6nvRSlNS3J1rhqC4gC3Jxdc3FtOHY9PsHDfuQ0fqb3Ndc3V/z057+kaS1v3/7I//Obf8fD/WciAUPC2sjEDkBox1o7wZiEM5HG1WiqkTFaDyRmVpE44BaI1hKiJUQIxgijy9Wc7TZUpqZuzvj1n/6aV1//hB/evudsf6I9v+JxGPnd92/58fu3fP504NPHA4eHwGt3ze3513hfcXXzNQ+PHd//63/L/f0H+v5E6PcY3/Px7TuG+3v+zo8EHzieOqqmYde0OBupo+N4PPLb3/wN7z5+5LsffwRnuby9xVQG6wSofDg+cBoOpDRSObC1odnWHMeex8Mjx8cjDAHjasbhgcPhjnMGBnpO4ZEq1ITK4JoWU23AnLFlR58CmJE2OswpwuFE+vw9/vCetqqof/Yv2H868v39b/jUwRAj3f6B7hAxNHz76jW//tNf8otvvyWOPfefPvKwf6DOxvfl1aUYyKNEBscwiENoaxIwDD3H05Hj8cip73J6lwCxYoyJ8W+yAVU7R7NpsL1j8HupEULEuBUZPjuJaxkyK4ulM6hySb9rNHgYhkUjCA1YKKNO771mOi0d85UsKhShdqcr31eyldbvL5/F5fb11iwbMugzKOtk7RTofcWJiMRkMAVTYa3kn2NPraPd5f1LxsGa8bV+n9LXn1sjNVTKcavxP//M4m9lakBZrHpt4ExrH9bjVKNgjiKWe2S9T6QT4lNgVa+ybo6OfTGXUQquP7dP1p8ZY1wYeik7kCUwvdbD8z3SBOiW80B6qtPL9V0zH9Wwmw0uM91/OWYFSJcG5lIzR+mmx8xcXO8tvfTvZQqV3k4dybL+YHm/cn3X0WCJLuu4I1oDbG2oys8uO2Umg3QKzpVdSp+mjsyfm5783hizWIMJtCvWVMFGNVpl/HnpjQQtJnAs/xdDIOS97zNA4DLznAxipQwIVIVjXxrV03iKeS2BvBgTSTMoJpB7KQPnPZkmh1YZ/SaKtlbGXzk3eg/dr1rcX18zySVrsm+tEITJNWkdmECVn6nvew6nE2MIU4mNuq5pq7nL6rwvEsrS32631HXNxcUFj4+PPDw8cDweeXh4kNplTUPXdDSblt1uN+/NBFQzoG7qmpiL6SvjSmt4qeyu64q2qoWRh80dVhNdCNIp0crzhZgL/zMy9IbeGLa562w6BR4eHvBhoNk07HY7NpsNfvTEmNhsNpxfXAPCDDqdOk6HTqym6DMDXfa0dfL852dnpAQOi4uG8TTQnzpiEIDcNRUu68UYA37RNMcugvzr9VWQqJx/Xdsy1VXOVKn7MtAGjH0HeY8qKGCMpa7nPTWOnr4fJqe1qoX9ZIvPL8sH2NzBcZYR2b4oAnHTszzjo6Q4g3QxCiuyBAOms5LlRcxHezri2rVW9ZVBskdQ5nmGMJydAAIFC7XBjcoJzNL/mT7CWmyyAkChZ7S0h+QeugblmdZznGIC10yMU2W4ypyPU4d71cf6ZYzoTa29Z5MGetZ7wy5sqxlYmO0Sl783daKua8Yx4GNgHMSGe3i4l7O1aac9IY1YPCkatvVmahoRQmAMPcNgcEM/PZe1lqqpqQu7ZBwHhjFyPA1sdyObRho6pSBNanabLQbD4fERV1V0jdTl8zEy+BEfJdB0OBzk/LuKOsulhR2X2YV+9HTHI/uHe6ruBAaathVCiasZXAdIV3JZJ5m7umplnio9N2p3GjCBceylg7kPEBJ+EID9/PycdlMTxnEiTKQ0oMwvDaxpOY05UCbRcZdltnPLNM+YWKynAtea3bb29UvgVsZcYgOZSY2kALs0A3QpJYIhl8YxBD2PSQ5LjBKkothn6kSUeo6kMvEZ3CWlLJfLMSmw+dT+WMpB2e/abbq0CWb7anmVds3arl/bbuXry5/EFi9gAAAgAElEQVRLVnJ5n/Ln8jP+uF35FLhc+wnrYOh63KTw5G9qL/yHXP9FAbonAnv1t/VmXTuT67//R3/+M1Hd9WLo5z3rrJil0bR+htJYXV/TYdGxrIy65xzUL93rGURu+nwRGPNnqDHvqpqqanLsFWGRGAHi9Ks26viK0ZTI0R1XkYxlsz1jt9sRQuDD+4/827/5DT+8+5HH0yM/fnzP73/4jvv9I66uuXlxS/fYEeLI+fk5Nze3XFwaDl2N1OyNGKNdyQykSIq5wGM00s3VmLlIrbXSgjr/fYqmY4hJIoTR5+ilMbnoOdhKunU1uw2hCkQGfEj4MXHo9uxPe64uL3nx6hWPhwf2uR6LdEkzdCfD5cXAbrfh7PyWUzdyf3/PaXjghzcf2e3O4We/BsA1F1zebOk/fOTjx4/sT6JE3n04cXmz46//+Z/z829/zs3LG/anA8O/7xn6E+Rx2yxUrbEC1lhwSVgGCSnQPJ1zk9kYSap4pKT1RiqMq8WITI6YDD5ZwhgZTj1h6Pnmm0u++ckv8N7x9t17/vLsGmN3vP34jt/943s+/nDHu7d73r/Zw7jj9vJn3Oxesbm55aquqIeBcxL1Zsun0wP7w0jlDHc/fOIxGeIYOdudc3X7ip/+9Od8+8tfQvK8+cNvCPbE4XQk1AMvv73BNjXBWPq652ABAoPrGOyJcTzBOLCta1Jzwd2p4/R4xAwJO0RM6DFh5HKz5dvbF4x+ZH/qaLaW6mxHxQYODu6By56m+Y6mD3Cf4KEj3X3CH99y4h1H03L6u3f833/3jv/1//gNf/sP37FrLC93N/z6/Cuub77h+te/IJ5V7A97uuMBkme721IBfujZ7x+oa6nb0TY1FRKx7YZI10tjiP3pyGEVfa0y0JomZSMGb1VbNmdbTGXYH49iIMJsHJisVPN5tzmqZialrlG8mJ3pWYGqki67tCpLbpJXWWlrgOFJWgtLMGANXpWfJa97Cuo9J4ef+521c3rvH7ueKNriXjaZRQ0+/f06LXTtYK1lsD7nGgx7To7r351zOe3n6TOs9c5aTy7ADDsbzfr5ZaRe1+054AeYZL4xypQj7xebjdI1UKkAX3jCAFqPvRzveq5SSpP+LfdWWcep1InrfSh/B0zMgYuE1kFdGMBQ7DUFe4RxF8MSdCufofxdyZ5a2iUC0pXOlf5ez+F6H6lzkFJOI03LFI/SjigNwvW+kjla2irryPB6Tcp1lGeaQUaDno15nOJkm8lR1tdq5sHaDluuzfy7tXOrr7OmcNJX7ys7q4awlDNJkMFp77My8Nf7ac0Onj4fgxaI1/kv16J8HpUJChBrSCSEMHVOjClJvcYCCJXxzaCimirTmAuQvbxKmVF2dl3YnwXAq++Z2J5JeH3OQvDZQYwwDp7gxcmsc4aBLcAT0QnCFHKuom1rsXXOtux2Gz5//sx+v2cYR05dR9f11L10TN1ut9KRtZWO83qWm6ahLWpyTTrGe4acKjvGkTGOmfWAsOnqio2TjrUpozjWGAKj1KSLWrdQZIB+Vuy91NsKYbKR2mbD5dUVt7evuLy8pK5bHu4f+L+GfyUpskMncrN2bNoNV9eXvH79ml/+8pdYBOAN3cjn95/4+P493eEEJjM3C1lR6oJFYx1jMLovEVkVo2EcDVUloIQwrsoU17nhQLlO0+elZUpYiBmQBrHhlV3CzLYaxxE3CPveWUtwVc6MEWDbGIOtCiBmGJaAvQ/T2KJbdklcONqrTtAliL2QZxpIEOdCGn3kM6TnslZQK48xeimBY5xjjGHSI+U51/RfQpwAhgmEykCxhLHNk/TbmOLkN5f6RMatjCaxuTVtex2MW8vfmQkcc8F+m9lt9VQ5YM5GSFlfJbwPxdmcmyRFBaJCUK4kxkittyqCQWoFH/Yn3Gjp/MBmU7PdbqiamsY1Um8sNlikjluMkTH7XFm5cuhO+fnjpEcn/W8Tfox03ZFx6DAYamNzKr3DYri5eSH1KP1IjIlh9OxPR47dKZcfMPgQ8X5gDJG2FqBO92Rb1cKQToGqdqTg6Y7ybH2zoWm2Mp84QpBAHECKlhgidQYOrVU9C9bpXgmEOApYGYLU4k1mYuE756ZyBTYlYsFmUiA4BD2fs77ThhQKGE+s9ySNScpzoh19lzaWmfcahS3FhIfnTxMbTVYq2/1p3sMxIB1yC3vGVlJLUzpKl0FTuet0NlOucxeFw7U+40LRKQaTdM+rzFsGA/W9c5BQbYd5X5ev12fWtPEvnS3tgC32i9gxZXCgfO/SDpUshhIEfw54W99D77POsFmDkKWdsb4WPkHWtzr384coGPtfkUH3JUBtPTHryfjS+9S+NPpv/eUUyNO4IpNjVl52YUjJK0vHahgGHRXPDcFMH/68Q1LWgVk/mwjXwig1s+G7npPyWUTZrMfxdGTT+7KC1s8ROrF8aQQ2irdONh0W8zNt7jzZxkh0wNqKQGQYBj6//0h32PP9m++xteXDp/f8+PYNh67j/PaK8+sbhsETvJ/qRuy7PZfpPKfeZCEpBLr8CNqBKSsuLe6e0yCME0aYdIIir28er7XyKVlYGpPTYAxsNi1nVxecX5xxHPbcPz5weOwwVIz9wP3nz3z14iUvXr3k+vqW/X7P/tjhh4HTvufx/sDLmxOvX7+m2VVU25bN2TnGdQxD4PHhcZq/ly9fkozhNI58+PyJuq6IyXLqjwwfjrx9c8Wf//mf8Pr1a7756jXvfvie4XQQQykbyVJ3LOIR4e6jGKspJSq3yazrIMBcQqI9uU6frRpiBO97EiPGNmAqjJV7XV5dEI+BP/vJ1/zLP/snbLY73qWKF19/C5Xl48dPvPvwhk8fP/D27Xvuf7jHDhs2ly/g/JyrV7ckM1C7hgj8+OkDw6ljt9lxfHxkY2ouzi+4PLvg1avXXN3ccn55iYmGT5/vuLt75O7xQO9Htmc7rrZbodnXFT/75S8wBh4+39EdD0TrsNuGaBMPp47PP3ZE19AfE9ZDHXOkwkMaB/qDFEId60RqHTa3XOfzgeTu4HIgsOfx/o7jj0fSKXHW1IR04Lfvfs+/+f2P/G//+n/hx0fHg//I+dUlf/arb/nV6295vXnN9uyW7/tHHk8dMXrqymFdk+c/UTcNxlgqV5EwDCFKR7gxcOoHTv3A4XjkcDhwOBzoT1LAt6qcnDYf8FG6bmUtjbGGpm3xKVI5wxgSxkSmLm8K7CWhtZP3f3mmS7mgHYNdjvppl9ZxGKTujnNEa6dOh2V3srJrGcZMUenS0FgDdIKh2+lL0ICc6mkUaFSlNMss7aqnlzh+FTFCSJJOMEYxVMXxcLmxQUa1QYx+yKwvAbyfONLT57mpfsUacFr/u2StlXK5rKX2nLKfqPmY7HwWRl6WtyYDeDYHUQwIu8CKvihTftUBE4Uv99GWDprOqvtgMiZQQEYNPzEKtS6SGBmRsknPEpQy07AlkDI7g+u9pt8VmCMto5UTaKJr4ixR91cx12UjE2McmIRBCs8L+Dbrx6g1i3K3X/nbXIONlIrXLEHlcp5kn0e0eLemAC+Bz9mGAH2unEbMDPy4qZuhwSc/Gckl4209XyWLotxbMaapMYKC6yXbVe9RGsDK4gA5Z9blzpExZvkh8xfjcj8KHqBFqfMe1QDBmgGWBGBZ2nW6JspOTJkVL+yEmOuvinOy7NYrRrQqeh3LvNAmMTEHrJXgXDKWaAVI0/2rAVGf09qGKA0VMBBz0fA5hS7hx5EQw9xhcHGmZaLGPK9Sa5DcfGG24eSMFzLGzoyJEDVVObtY0zqbJ+sNOZ3GiB3js82me82qU5cRdj0nWg9OP18DL6Y30rExg1ulHCnPga5DXdfc3Nyw3W65v7/n7u6e46lj8AKGSSH6jq7rhLnWSM3d7XYr81+cqbppZM/YmiozZIKPBD/iqkBdS/pq3dRstnWuo5brhoWETeKM142jrRv8ODJ4j6tbrm4atuOWh8dH+r4nHXuqqsZZz+PDIyEzCi8vLtm2Lf/yr/8Zr1/ccPd4j48j9abh/OKc69trvnr9FZvtjspUOGM42hMP7jP9MPB4OADQbNtnzrbK48AMbGSfS9cpn4Xoh2kNy/Ore77rOowpA1tQ6pXJOo9JUllzylVVJaqUMLbC2Yq2dfjRM/q5Q6uzFtcaceJzwwejAEcB2FHs35g9QpuBreccXPURgAmEURCqBOrBUFWznlUbwrlKnTYIklJsrRWgICYSgWASzqgPJs3jUmbH2qyXbIqEbI+FEDEe0tgTxp7R1wRyfdBFQxr5jIjomKqqETCVfLZEbmqNwb4fcykaw+jH6fxZwFY153XD4D3Re3xmA/uYqLK354s5nurjMdsVJYNP5620OWKUMj1xlEYHdmoWYWk3NT+9+JbR9wx9T/CB/f7AxLyKsK13kpJeN+JXTbJd0jHHsZe6gEWwzmUbytkK21TTXk4pYV2b3zfifWS7NVJX2TmqxtJuWpptw9koxI6hF6C/P3UEHxhSJPmIKpne9BMTtt22NM2GYeiELToGen/CuxqDy9keJqfDJ0JInA6HYr9FzJSijPhB1iMdULPNqnXKc0kIJYnE6EkpYE3COEeNwdjEZrObzrPaNMZkdmS2PaZzEQXWmjeapCZr8z+bQXOKtTYZqBFQLOumfJa0uYvLgQu1qwgxV71N0/kzgHFQ2ZrKWILxArLZlE1kBYrm8589/iU4l21Cop3OdPmeyS4rAk+LAJxV21dt60QIY1FCp8oybtZ5qt90XubL5OZBwhQ00zj8NBbNOiv9CbV1zUomKXuwfJZSpk3QUprtqqqqiMbksgdy7mPx3hIDKsHXeSwGkPINcx1CBVufzy4pr/8kgK4U7ouJ4GkkpXyd/ruMmq+dKCCjM/oOnZQZngrKRMo1LNYPa8xT0PAJhRt1GJdRYKm0yGQkrp0SeFrrZf06Vy8jo/KcTAf9i9cEXJvFd70qq2yIHFkT0Y3LSiyQSNFjKnH+k0bTDVRVgzOeMA6TM+iocFVDMobOd8RBCt97amyA/ZDAJIbaSNKvD5CdqN3NhrbdMfSBnWnwY4dPnnq3IVmDj4nDqRNGTOsIY5SIIpKqEdSZS2LES1v2BCGnYuSSNJHEQBKen5EDm8ZBOtVmWnWzqTm/OqNqDPcPn9k/PJJCoImIMzgcMabCjCMMnsuba17dvuLDu3cc+xO780ts6hkOn3Hmiq9e/4TLlzuG/sjnj285a89om3nN9/cf2Z6fcXlRsdkZjscHDAid2zm+++3f8X+6yF/+07/kot3iEqRxZNe0dKcjmgQtwtcCNrMEkUhJlUEHFOhI5OTgzHcE6yoaU5GSgLFJi3GbRPD3nLdb/vrbF/zP//Kv+dckTn3PMcGbhwO//Ye/4fDwPR8fv+Pt2+/YDYnbF1+zeX3L/QXc3FjqxvKpf2B//8BIhLrBGri+eUkKnq9/8lMuL87x3tONj4R9z939Z96+fcO7H98SvMfHkaYdePlqw+3LF9y8uOH24iV1U7NzZ5hQUbkz3r15w/3DSD86nKmxY00Yoes9rTPgpA/YZdPwYtsS6sDe9pynEy42+O7I+PY9491I2F6yj4n3n+94+HRPzDX6Pn5+w/u7t+z9yO2LC37+T77m5U//W85fXlE1DSnBoUt86k4EZ9immmQtOVw1gaWQSDYx5HPtQ2IYR7q+p+tGfAgMwXP/8MD+8RETE1UymDHkWnXgjOWrV69pNw3393eM0TOMHdbCxcU5jw8nAb7HnLpklW0QScaSfJhqzHjvSXkfjF7kQltLh7C+6yfWnMonl9NcUohEP7OzjDHiWGYgRp0OFAgsFHOKadq/1lqcWVL5kwFjazn7pAyyaDqPGDfOWYyrwAjjKJHZFFk2WQA7gz6YREBYLaI4I866qX6fcZqeYAneg5FC5xrxV2dCdlKaUghVdq/lfLvZzXrFREyc62rIuotxC0w1MYzOlc3qw6YpOBMRwCEm6ZQnNZM8WKhyYAWTGONI8L0Y48ZmUD9KqkMeTgiRylnqzQZbiRwcvWfUaC4mp9okQgaLNGXRJicWYBJnxUcvewKZG2nwnTBksBVhcCjQMTn5hVMqtUsFdK6qhhiCdEesa9qmIeQgEVmSSfO07JyEGbRptI5WBqYhYxVZRsYorzEmd1e0tYDNUYqSa5oUidzJUtlR0kzHmDnqHXKwRD5HCqHJM4ocVraiAmTls+sZiDGBk8/yYyRGj8l1o1JKi0LyJVhYssDUHhKnUcE3YZCXqcTlPWQPzAwbvYdGpa1zhAxSpqjOVyVnxaph7XNAqHDC83si4kzXBaDt/TxO/S9vGgyGpmqnZ/Xe55pWaZIHmrIpKbXCiLBWGhlJoEAM9rZpBMSNMQN2c21BGeec+h58kG6AlaOpGppKHNvBjzNjBLHzQt6nzjicy3sEgxWvFUPEGJ/n1ZCSnHdX1VPjFmPtYk4mUMwYAXusk71jJHAEYqummKgrabIQfZjT9J3IV+uqfB4FPAFDnbvzZVR1KlptdW8aCeronjYpgXacjJHD4YgxaZGCt5Z1ZTfl8/Nz2rbl/PyC/eHI3f0jx+OJ0+mEHzqiT4Qxcqo6mqYhhETfj1Ph/AnMqlp2tQBto/eMo7BiUjB45Nz4cWA4DVSVo20aselCxHtHCF6e0gBO6hb7cch7yXJ1dSNAZp73lBL9ODDcjRyPR+7uPrGrG+pkOG8Mm+sLXG3Z7rZsz3a0mxZOJ+7u7kkJTqeeTx/veP/+A/v9Xvalc8ROAyU5KOIcrnJYJwCWADIBi7DCkgmMIeCMBJ6cs7mDrQTapCZfncEO2LRbAUAQYCrluqPWImyc7JfE5AkRMK5IpSzSu2LCGUvdbgVYGgb6oacfehKZEWqFXRPLMg3JiX8Q5rpcGgQHGMaBfGQFCDBmEfgq07NLpr7WtQ0T6JMByAQmxDnN1Uogfsi11iwGVzcqLKmcw/cjpxAZggRrHMJ8Mrm2l0+BFEZsitjoMckzMmKJ1JXMtdYdCzEs6u3ObFlxpEkQk2FMUhIgeBgQPeYLf1TqHAY2bSu1+5BGfCbb793gCSnQts1CXqtcnEDUgqG41hGi97KsTXmPeLXhMms8GhpX0eyqLPtzE4bDkbHvadye3W7DxfkV27MNTV3lbKGBIXjqpqEy1QQ2DMPAmDw4m5muRubcVbjKkZJhf+rzsXT0wWeGYsSnAQzUG8vmbCeNfE5Sl67rOk7HE6fuhB9G1MkNPvB4v+e4P1G3Un5J2LKVNE4zls53RUBH0821ocY4ATUa/NHAYyJQ2UhZB9ZN+lfWXMgjcdIzTV3TOIdr6rkBR5w7eZeZC5MtnXVBuY6amdbUouPEJkkwNa5QjNrMLLkMbmFlXFLeyZCkW2W2FUcBU6cjOTdvkvEEoom4WmTFbruh73r67kTKwayEDKWqcg3OnJmj+3JDQ1s7qrqGMINVczCwRxsydt2Q9+wMssv85GwcP6JMQWMMxJCbVM3zXtf11HVdySmznLESqA9+mnf5wDTZYnkaMzlpDvDFCH6MUsbLkcejeA5AonLNEotJCVRemcw0R2ybWOyFylpM1v/iG5W3KM5zUOE5f5+JCvx/Xv9ZGXTrCPEaUVxfXwKppvc/89oShNN89tJgLj+vjEwruLdGkZ8bgyK/Eox/PnXoj41//Rx/7FrfYwFiZtXxBMmGecMjC14+v0aqnTMTbi5DiZmyG5CaHW0+BJHT6ZBbt+c6Fs4Roud46ulP0uo7mRpqx8XlJbeX11zsdmxur+nOW3of2VQNgxVnxJtAP0gtkH4MDGPAGXLTA4GXhuxI6fiEnUGu/ZfEaMbgE1O6sXQ1kzc4a+m6jhiEHr3ZbEgESSs8HIjDSBqlDkBtK5qqprYOZ8XAub6+5jT0XFzdMAzaoclR14b9wx3dcEOyiXZTUzlLGHt22/NpHX747jtefP0VIY7UtWWzqTEpkkLAdx1jarn78ImPb96RiDRVw6beYK2hbTay/5AIiiWnGU/PpwwKyN0jRNGYcj8mCCMJr95u3jGiHLrkcWng2599w6vbM6o37/jmcsdmZ/nxww98fP+ej+8/8eb9Z+72J3b1Na8ub7i+vWL7+pqzmwtiGhlJuHZD3Y104wntPP7i9gWHoePt795gSfz6n/yKr15cc//4ju/+8FuO90cq68Sb8AP31jCcDuw/v+f+4zu++uorun4gdCeqZGjqhrpu6DtPGEfS0LGzDnfWYF3C93vG4UiF56zacXl5ztWLM25fXdPszhhOifdv3/GP7/89bx4D//b7T3z/+cD9w2cgcHFxzutvXvOLX/yav/j2G158/YrdxSVuu+OUEneHI58eDpz6PafkaQAXVRmLk0gSkCUlMTKJwnbo/cgwjvR97ojqxSjp+54wjAKwKA09AVi2G6mtc3ZxRkiRu/tPDMNA0zRsNhu601CwvJZyyGEIi4DCDCao4dkPvUTOi8gtsJCPGtUvZdXMpKJgr+b6F8yGuAaEXP7b1Kgmv75XhzGzDKwxi5QzMYrs5GhYm1Pu1EmQg6AaV5hxhZyb5KQaZ4YibTgS0sx20atkG5UORRmcKedizbIrDWl9X+mc6u9jjBkslLpFkOuIOCd6JQSJwsoE5rlW9pwgeHUuzq61gZx1U3pIDEHS+mMiGDs5UdZaqgwQ2rxiEm1e1xGcYk9oOgRoSkQeq5Vi1uUzleleWm9Hf1/Okzjj0hk85N0bQpD9NBX9z1HQvM/UkXOLQFtRQ8xI0WvR25bKafTaEPLvUwRNC6zrZtr/sk/mdJByzGLorSOgc12oL13lXpn3DvP7dH2Lv5f7Z5GyyJfthdIpmNfPTA5ymY6hXzFJDZoUDJGYSyi4zCyE4DVNt6gXSQJlPJkoIFZOz3vOpls49M/Oidb2WY5NNlSapzcx3UvnaMhpeorLa2oPkNm58r4Y5ppVJJNrFGUj2BoRY85MQHNlnDyXyfIoZSZ7yilQRKyJUvvOOXzQEiGSGUCClJ3rNWgKy7IA2Oy0K5iaXx+8Z8xF1rVbZIxRGDiUMvfpnJYAr75vWvOCyWCtZRw9p9Nxcl5Ur2idOh3vFOgtwN62bbC2om42HI8n7u/vczOJA30vrBeAw+FAXdfTPdVZdS7gXI2xtTQPs5UwKYOAsL2X2k91ZljF4AVArxx17dhuW2JMHE9jBq8Qtpgzi3FrEXoFwvtcWiLFQGMsO1fh+55+GIgII6JualztMsuvymwwS99LGYrtdktVyX19iBlAZ7n/csDFapDeytgTDlfHzJZxmGAJLky1swSAENa9ngvnJFABAiTJOggDxpmCiZ1mMoHI6xlUW9j/1rJphTF16juUzWKMIdjMcM+HT2oii6+h81iyfVWGrckTzwWzdF2m58rMvcScyqmBg7K2aYwRr40kjJnOs83piCFIKp76QVMSDpKNUSeDcYmqEuC8ahyucZAsx8MR0SEzG13Ph3NOmihMz6RfAs4B4rf4OD3bHEARpnU/zGCbAjgCFNSkCF03kFZ1qJ6T+8tsrzloeDqd5HfOUhey9jk7SEF25yrqSho5+FH29P7wwLE7TH93ldZ7Ddn/Kmqn2wpXy7k9nE6kKPtW7RzZEE7As6GHDDGrn2JGAR4rW7NtdtSuYdtuuDg7z+ChdILt+x5jjQRYsuxxrtjTVUXdnkmpikwyCaMANTHNAKcI5ZR3MTmAaee9UpxdLYMRo86jmdY8JfAh4ILU+bPJLZoyLDLOVvt/LZfJ7wspkCZFV+iGpMER0QMhRoxNBLUjNfiZASoJmIfp9zGo3R4W+wUyMBWDkFKQOtYSSM1BYqO4h8mZKHn8VnxuExMpA68aoJqfW+3JvA5R/zbbP/OeLPX+stSEdEc2BP88/jHPuRJi8jqTn8PO/5azhzAm1WfIBoLNxuU8LrXRltdzWI4AhpkBa/PusjM5Ru0g4RMs368g+pxBojY2+blmX+GPXf9ZALovAV0KPswDXv77S+97cp/CuFsv4Prv+vNaeegk6eFZjn39upwKZEC90C+N84+NvzRknxvzcwbe+r4qPJcPI3DNvImL2nNWmCTkrlgA1lQzCBTlYIqycTSumYybECNtu5MaHk3LQ9fltKpIva2pm5bkDJvLMy5fXLPFUVvL0SWooK4tyc00fVs5YcdkZh/GYJMYCGXU35o5PWVKw00zxb9CakkISJeFMIbaOPokgGDTbNjtzrGV49R1hAjWNcJmiGCqhqrdUbVbYjIcu4HHw4n98ZQjYA4fLbfn11yendOPR96+eY9tHcMoqH83jtzebKZl6HK7bh8C3fGIHzLjBSAbbo+HA2/evadta0JM+AhjkJQjq8ZPIhv/2SlMTOwVk38Gm8HNUthnx22q31Jlh8BhaBjNGWZ3ydmf/ilv+0cefvgHfnZ5QZ0eePf+H7l//8jjh8DHd5H9qeXF9QuuX97wzbbhcrdl155z35+wYaR2LbEOpCqB91TO8vnzPUN3YLdr+Gf/9M/5i7/8M/b7Ox6P91QN/MnPX+IiHLuDNNt4PNA9Bk73G4bDNZ/e/A4fpONaCIlqHDmrI6YFzMiZO7GtHJiK0UeO8YhNI62BOkldin7wvH//iTj+yIe7R37/w3t+9/Yz3+0D7vWf85M//xV//eoVr17ecnNzyc3NJbc3V5xfXuCjRD/6lBgfDxIdiYnWOWzbELoRWJ9PWQ+JiolTM4yerus5HTtOJ6nbM4ye7nCiO0kjBknhMZRR7yobPJcX51gDj4/3DMNAu9nQ5vbz2sa+lAsqO0onrXSCyTKj7/onwIleyrRZpvEVilGd4HxuS4BuAqO0xp1ZFcHNXwoOFTdfOBRruWZWf58c4sIZ1udWeffE8S+u8nUKcqgh+JxzoWBdOR/6maW8LuV5CYrqvGqAxxibaykJG8Y5pucycb6HpvzOazffO8acSpUS1uUIa1ymwYAYTDFJSmTMoKar5hICJYirhuZz0fsv6avy/fre9dqV8yXrFgAAACAASURBVJVSEoDOWgjC6tZoo8lM4fk9uWB6vq9TI67QEaWRXEZeYe44vL7U6RgGKaDukstzFRbrr4+5ZlKWAFRpBK9BqfVcTeN9FsBb6vwSuFuDMDGmSf+XzqW+p4zclw6iAJWREMPUyd3loJsyq5TdZrUulFjwAjtkZhZmZvaVYyvZfyVgWz5fjKLJpeOongfV6Rlw1zkymuaVU1VjFEA5+Nx5VtPEl3sByjRnHWdeRzOPFQtOqlpNOlftvqrKwYjsdBgSVWWo64qQnLA+V/KiXK/1Wk4pL1ZYiioP5XeiO8JaxsRcUyiqzZnHr07Fylb8knO+HqMxlqZpp/MyjoGUetG1VZj2VNn9U8+DrEUC62iaVhownJ/z+Pg4BZ6GYeBwOEgNurYVnVXX079jBGzKe89irMu2XcAkSb3uxgEGqUesgZymrmmblrpuhOUR0xQMkOZBcWL7DENcnANrcyfo6KgA52ph8kb5DB8Dvo/ETlKWmqahyoFBW1dsM5tIHcoaYd6WAIYxTAHj+dDP/5x0E3ZK3dX9MY4DME5sls1mI6+tFKDToJHYeg630O0zQEQGNGbdUzKzdF3btl2kmAnYlybQwJjCnS3O7iTzqmVwYK03y/O/7oSMkblTnVuy9I2ba9Um0pQyuLhvTFQZ2BUxUNo4aodpzd0lyyemKOmDGvQvxlk+x/IcZbZxcZZUxyzmpLBfuq5bzFlp11hrJ+bXGszR72t9u/7SJg6lvi19p7Ueck4Ylm3bEn1g7LsJuO77kb4fsbX8XfaOgqUWW1lsxbTHnHNcNU22P0xmks5ZYcZIl2kJhgjRQ+R8mtJQj8ejdKVtGtqtfO3OtxwOB2HVnU506mfaRCaLIQypiPe5OQT+yRpjhO24xBNWOtQPi5/N+tyytJ/LWqQahH4OV1CZWtphSz0o+3KSGYp3y46cADolYoQQpESPE3kpTLCc9l0A7MZKbWVhd881+eTZZrmQoiFjaJM9qr5z8egTliDzMDMTY5TAiTJ1zRQkzPZX0DPhpnk3xiB4mgZBy708s6t1n8YYMc2yi6tk2+i4kRIVBQNN5jlNYFyMfnqYLHJIaQYSRX+W4nlp286frb9bBuq9Z/KB1BfU0h8hZdKA7g1TnOm85jYHU2KQgIqCmIrNuKJJ5XPXfxJA9xzAtjBI8zM9J9xLp+dZ4xamKM+XAK7pc1bGyfQZypZLPPn7M/7cEyPa2KXjtP78Lzl662d5znkEFgrrue9PnqP4OaXMXMkRMTVSKusmNDcM46QIKRxV/bsfxoWT1lY1V1dX3NzcUNUN+7dvqdtWIpdtS+sahhS4Pz4wek/92HN2c8XpJ5cMOTVlphBbaV7galzViNGZpAtPMJEYAi4DhylpLS0jjJskVGVrdAek6f9Ccp8FTZ1Tatp2y3ZzxubiDOoD3keGbsBVdca9HJvtGTe3L6m3G059zw9v3vH2/Tv2h44xSk2uq5tbvrp9yftPb3g4HPAPnuF4wuZ0tDp3cgI4Pz8nhMD+sCeM4gTG4CXdwDkMjsFH7u8esQ6OxxNeaobifaRxlRjfxhQiiGm1K+Yo+iRkJkEx110SC1DSkazJrBYajNny8z/7K9zXX/Ovfv/3DI8f+MW3N7z7+A+8ff8HHj48cHrvOd0ZLGdcv/yKl1+94OXZhtuzc0x7zhgc3gZc46g3BuMD0Y40lYVoMGmgbWpi8nz//e/5/R9+y49vv+fy+gzrJdZgjKOqtrkmhGXTbthuLyXKQ8KlSEhB0nXritjWeCyjH+mGI5/3PcdTYBzAnxLvHxIP/YF9+JFz12PHA37saHcbXn37LX/5P/2P/A8/+yXNqz/h8tW3vHr1kt22xSRxwIie4EeGoacfPOFxjw8PjMcO33fYEGmsw9tIspEQlX0rV8yU9a4T46cbRk6nnq6Tr76XAtmn7jTX15nWLp/hJDWQ1MDb7XY0TcOpO02Kos5GUlmXa+2orQEWBd7UgVGnRWVQyRxaAxU6LnWcbabbq4OpAN0k04w+1lP5CJn5Y2aQg7SsqVXK8LWRCkyOyBpU03+vmz2sx+CMnRgO5ZzpzyXosTDKinuURoW+TtdAXyuO1zjNp9aI0SXXKKDO+cpMyttCnQk1prRAeJoUPjmwoulxJegYsuMFSK0e1SFp+eylQ6fjf8o0z8bJ6J/VzXqfEhh7VteV+yRNv0JBh3Jdyr+5LGsT5PpCSwaojnm93vpMOv6+73OHR2HOpBjxQdIXy3mYm6rMTpbc2y6eq3x+/ezngGa9NCViudZPA4ml86rPoI6r1rArHUS9vmT3mKxPUnz+b/pb2Y+S0js3myH/W9JBU3bsS9tEz4qOW3+3BuhMZhWtAwBaT2dKM05z4xOd06cg+VMbTtdI7ytsvTxPYm5MYJ0+dMzApZAKhDmpRnmMZIcol5tgyTD7EltQ16tsrmPzHvW5C2CKUkwnIfNbNobQy5q5lpcwj8zic9f7Q8dQ7ifdj7omdV1PabiqG3T/a9OFUvaVz5VIkxNf1zWXl5e8fPmS/X7P3d0d+/1+0m+qb5qmmWqaVk2QVPcUiEbYOrJGAecMzm2wFjmTfiSMAzEE+jQSAtQ+UtfPN/Jo2xZj4HDcP3GSN5uN6E5jYRxxVUWzDVP5FN23Ps7Ag2tqsacqKSMRgrAZq2oG/6Z5tlJGYbEfs2O6kEeTzJvtB9lPyojL+ixFaXYAwnixwlgCcoqZWZwNlfs6LxMoVfxMpqE1TU0IZtqfKut1X9Z1LbusOF9P/I/iTOvf9TUK/Om1BrLqeu58OhfTj6SRaa0wLAC6Sa6EhKnl7JRNJawR2YiRsguJSIjCvg8hEryksjln2W5bRADMzXOmgCmShr48PyVbWBrOzeu2lEnl2ZxA8+K1iUBVSeBh/WwKpDaFP6FjeO677r81W32tl0r9JHLmnDqMNE1LN/YMw4gPnsPhSCKx3Z3hakflpPSBCkuxOzyuromxZLjNulLHZG2R2qi2CwGbIilJ2QfvPW3uxnxxccH5+TkxRj5//szhcOB0Os3nYQKJRk5Dn59vyXxXm0kZZPOnl2cAbJoDinJ2nvrjpT1UlpsIIUxlP0r9Vupr/fd6rSYwkVU94GzPaXDbudwMzeTnJuUUdIMxMZc/0HIl8oz6WTNbucr7VvWUIzmDc4kYutkOLP/LNmXMqcBl4HO5xwqGWj4fztVQqe+hc61IYa5jW+iusnxGCYQKFuHmUiNmCX5L3decGowDM2dwGObzoICo+Bhqb0ogWGsE6/iXtteXfZdS3ic9Z/n3fmUTpNW9y3tULshn2yjZdTmgoGw6Vy3P//r6T2bQPQso6b9L8HP1+vW1NoKBBb30OYWxvu/S2Jf0IkC6gC5fvTK0hOGQklndYzYAy/uuP/dL13Nz88feuwYyy9TWxXgzQ0IvZ6SdeGVsTt2wWUzmmg65YLtGj/Uacm0M2XSClle5tlUXThiT2O5abILaOHamZmMb3tHx4f4j5w8j1IY0tuKEGqRt9TgQg0RsA1IrKpLrOmRKbPC+2Njz48g6qrMyiuDKz6IMsojJVdgEkExIsdeb61tefvOa5vNnPt09cDrcUzdtLsQZ2Oy2fP3NN9i64d2njzzuj+wPJ3wEWzVYV7PZnXF5fc1pPDF2jzx+/iCdseqWFKQArF5nZ2c8HA903TGnBRu6w4Fh7GlocZWhtjV9P9J1Rw6HU66XpXUTl8a+7rQpASFElDogwqRICQZ8GPPEZRZN8lkgGRI1VdPwF3/1z3k82/F3v/93/NXFGVdt4n//23/Dj2/esn93x+n9ifAY2Z7tOLu54PzmjJeX17y+uOVtFxiPPeNpIIwjY+/x/cA49ASXuLrYUtVbuv7A3/79b7i63mFdYnfWsH/sSGxwrsXsztmeiVAe/MhxGOj2jmGI9IPndOp52B85Hju6bmDoB3rfc+KRwSQ632KqS85vvmL79QW22fJ4vuNvN/Czlzv+5NsXvH51xc2rG25/8oqXr19zcXXFpnW0tsKaxDD2xNHjjKGuGoyTOlbBHIkPiTCM0uQkp14lpIbkmAIhSlpOiEEK8g7Cins8HnJ0cmToB7wPuXudGGB9PxC8JyaleE+tw0hJoq8PDw88Pl5xfn7G2dkF1grTxY8h1ysap3O6dg7XslGZBWoAlqCeXrPx8zzAUd57/d/6moxOsmP/5BVLuT7VH0szA2iaj+J+atzUrpnZdKtgyHNgRalsRXnzRPmXxmxpMKyd3PUzlvNXGsfla8uxpNxlCyOFshVg8N4vnILl2qRscMp8lqm4VZYXIQSprUeRdprNUqn9VmHr3OEvpLlOa+HAroGVcm7KZ4hR6vOUhn+51/TruTlYr70CuyKb1mbBU91vTHbIo9TVKw270iEt97f+rWRsGGMmZg/kDn4pTQ04xNCeg2WLbp5p6fyuz1zpjJb2xzRfxkxd1tZrvX7e9Z6yVozLCaBdrdW8z57fv+JM2ZxirvsrELQcrTFY41AGSmA2OHGy38ipR2XdMmWIaRH1Nfimc2GtNqqyT+emkCbCdklTw5CSFav7IG+RZ/eXteVZYp7LqHaS3EQdk1yfgCnolwqANZOCQ4q5g/zz51THPdmpBYBWnitgahQ21W7MD6UApwKZZGAUa6f6nzFGqWVWyK0vASXrS8cyjsNCZq2DOsq6GoZhapahr6lcRdNWU8DHWsN227LZNFxdXXA4HPj06dP0Wd57TqcDfX9if6hoNzvOzs6xVY1zFUmbW5lE5Ry1SdSuyowRqXkUgkcb2Yxj4HC4m1hBAmrW1HXFZrNhu93SthtC8PjMElPGrPeeyjqpFVg7KltRVW7qfOq959R37Pd7/NBjvc9sojrv3aVrZIzJAIacK7XWjBFWS2TJ4krZzm3bNu927aIeFgDd4XCQmnaV1GOd93m+3zgDDnqu1BEv62jq2VGmob7H53qaxrhcq0rBBY8y8BSgW/tgpb7QudU9VDLGS/lX7jMdYym7p3EXzyDGw1NdmtBSCfm1sXD0SZgkLHFh/aaJjFDKUIdDSgDNtUd13OvxZWGx0IV+7J/4nmUwZZ6vPIkg7GW1v9plAyydy9I+e8731Tnt+36x/9eX3kN1n45x1s0OVzU42+DahnrwHPsjYz/iw0g/eGzwWCM1y511uErKTFgHjKPUKIwRY2Oufw7Jzoy+2bbIzLEilbOpN4xjtof9iBkdphLAv7KWK3ODrStM5aZz63vPGDzej3g/Yt2K3b4AqSTVuKrtEzmcomHbbPLeVwbXGqiZ11UvY8y0Rk0u8VCeqfU+/1IAMSVhh5ukGi8VAJ3cb2IvK0sX3d8KlArLXeRWmAD+Eqif7PAMlkm5CiFsLMeNlAmZUlxlvZQko/fSe8/s/EhIY5YRmemeiSDez/V1rVVfvpQhaofHSd6UgfXSXpPXz6BnCGFqmJaiFErRVH+m6V6TpPJcG43KaYGVJdCdV32xluXaTnYuJmcTmClIUJFJAwjhYAr+scJ7UsCYGmul3h/MbFzdH1+ynfX6L5riujaqSuGz/v6cgFoz3/4Y0FVO6jRJRg+0vvC5cZSLYhYv1M323Bi+ZBiVlxpr5TOu7/GlS8b/nDPzPGgo7LU5iiMKe8mY0MOgYwOmCJi+53g8Mo4jQ/D0KWLqioB03PMW6t2G1tb4quf2xS3nl1eM2zN8CJyQzrjp1OFPPcPOkbBUdYutGkzQAriVHCKlwLI28sWxcElYmI5ENNPyidOYELALQ4hQuYarqxd8/fW3jMZR17/Has0EkVwYIykNrpU6LP3nuwn8UCWoh+bs7IIHf6L3I92plwpxpmIoCn7vtlsOxyND19P1R2nrHkYqA9u2YXd+TttsCcHTHU+E0UsEBzPVvwGmiEaYGKPS8NvEOFGjyS/XH6VOjbD0sLm4bRBDR2tH/eTVK169fMnv3r/l+/cf+PXFhh/f/sDv/vD3fH44MTyc8PdHNh6utjuqjWNwEZ8g+sTjp/f03Z5wOBKDx/qO2gxYO+KIDKce70/045GmsVJAPwUOh498/vxA7S6Ioc71JgZCiByOB/b7I9HA6diDFdYfGFwlqTQXN5ec1xWbr35Ne3vL9uwV28ufcHn9U87PXnB9ds2rq0t+8dWGV5eW8w2YlIjG0+wq6p0hWeghV/aLuLqlriscEsnwAwxjRzd6qRnXD9gETeXwMTNLCJl1IIV3h3GkH7rc1a6j66XunEZklarufcRHcY7GovgxxkgXrSxvBu/Z7/d8+vhZor2bjcgVItYmmrah6oZJ4X9RTjAbBVogPsY4WR+lfFRlWDp5pWGzcKTtUi4/kXdmdrSNMQtBNbnABYgwpc0yO7UmyzhWsrAEA+aPs/llZvq+HE4Jwkk6lbKIShBHXzNMxbuXKUzPyfvS+YIlU0BlR8l2HEdPsnPdOWtMjvLLzAj7dTWfpa7SOUUKZTvrMCkJYyAt0xtDilPXWmF9L9k2pX4ujZ8yCriuozet0TNBshKs0jqw67lKKcm+T2mx5yFpj5VJ1kpjDDOl7uZKBhLEcA5X7IVyfAvG0hcMHWUJWStdNedIetnsQ6K+z4Fu2gCkPBvlPJbOkT7P5IyuotJ6n3It1vttsdeKz9VnLEGxaf4Kh7J0rm1ylOWLtQ5hCZDJ+TOTflx8WSnqXjrbJVvgOblUjkPSVJfOrIC+TIpsqq+3kklqv033K5q8mPxdmX7zvitBOqnPKHoy5feZ3HgmB8BSKR9AgigKXEn31aqqpnUo90EJOpTrp+dNWYJJ97K1UxdXAanyflVcgPn8mpRyfVGJuMdiTnSO1GZ5DjTXtXoKIiwZCimlqXmQsuraVnSwnGu7+JwSkD8/P+f8/JzLy8sMzJ14fHycWHWn04nj8cjxeKBqJOVV2WjO2mmhjJmZKxiRN9L9Nk0Ojz6bBsYkhU9qYglw4ahTNZ3JKdUTsHn9ojFEK4xqcZgsu8rRbjYM40B3Guh7D0hgrG0FEFk43jrgwsbWtTR2eRYEHPPSTAIzMWlFrwJIV9eu66hjTZXT0xJBgoFBmoq43BRC71mOR++3PpPTXijOUAncxRiJlZ90QCoMy1J+mS/I1HK/l/t/qatFpmh5Af2dc9LQqSrmL+khKT6/yvshejkDKjsmQDJ76FWqJAiQLY6Y2Sli/kS6scMaR1WlCXwuga7TsZvOcpoAiFnP9H0/jau0E9aBiVL/rO2Ete5wzk2ps13XPWXhFfNV1nUsZdDad1yACgudI6VxrAHjaurWcV5VhI08gzJge+9hlMDVxmxoakfTVjRVxeh7SDlDx0oWVIyzPC/1qQImOr4Q0sLe6Pt+sl/KOatqeU6fA0LC+PUkwiQjljbjfA4ksDDr+XkOoHUbKMB0ZXrN67GyWfTfOR0xjHOXzlL2y1jslP6+BFyUiRkJClhmgzjFrKREMUngI6f/21xtIcY011z3vZBfFuN+et6UGUoyGYSUEzXNs1GGXbGH9B7F/E6ZeLlW4bxtlS0XZY1tgrRk9hurLOdESjUpuVy6QOz1cv7m97HYB+u1cNqFXv3faUAKwJVgG6RU+ghfJiHoPOi6LVmDsyx1rpoCV4u6rWtwr/R91D5AAO3KgrO1AM2mwtgkpDGjQfwv16H7z9okYn2tgTj9t/78x9DDlCT14kv3LK81u2J63fQr/bstTNOMa+Tz8tzi6DjK7+X4/hjApq8pjfb1VT7/WogXD1KwD+RymYqskaU1uDkjWQV7JUbS6CHE3E0tM1iSpB06J2BXdzzRcSIYeBxO2XE2uJAYkqNuG4aNdIJhd45PHtsNNAYGJ5uOYSD2AymeYauGtt3SNBtCP2aBXpGcBROz82awpZI3NheslNpLhiTKeir8aXPm3exYhJA47I883O8By4sXLyAGYhgZDj1E6PsTd/ef2Z6dU1monKGuDDEk6aBjEqfDnv3DA7aCuqlp2i39yZOGgKuXIEXTtlxdXfF4fKT/cCJFKfrcVhVnuzN22w3eew7HR/x4pLISCfC9n6INefGRXIZsrVtDSjYDcLYQQXk5UlY4OTqIsdJuPsrf2mrHttnxp99+zfD4gd///Tvq85YPfuAPf/tbPj8+sL87cLrriA+P3NaOn7w4p9kk7v2eHx7uOfWB+/1bfDxgx4EwDMQwYMMI0UPyjN3A/vGBx/0drkq8/8Hw8HDH4bin3Wy4PxwIiUlxtG2LqRwX1y27sws225dcnl9xdf2Cs/MLzi+uubm+4fLiimq75eqbn9BcvcRUF/SxJdkNKRkqCxct+L2R+kI2UbtE01g2W5DmxYkmz5KoSI2+WIgjMSU22x3j6LGuwmp0fRwZ+4798YT3ia4b2R8OnE5HxnFgGAY67Y6WEuMo0b4YY643lrtheWkYobUdKzOneQLYJA1PhmHg7u6Ouq45O99SO0c/BKytaRqo636SE4szzuzgl4ZL6cinsDQUSyWk8mv9Vcom84zBKOfPTOzmxd/MDLYZYya27BRxL4yBhTMxiav5Xikl/CjR9fn1sp6l8l4GK4yMTo1sY8DOMngdvdPPWjOSyqsEf9a6axqndmNcOUgmQXYzMZnhTHbY1LyQh8oOcz7kUyQys4Mnx0xBIWYjP6WEiXOKq6yx1Mx0xk21AUvDCOZ0Md0HpaE1PR8KMiyDX6WzX+rJ9VfJ3poN6+VrrJWoPSYRopyjkASQiVk3qVNVAhA6x+VeXjOFgpFTLwWTpZuYOjw6pwmmbrLyRpONr2WqxnP2xRrwWDvP5bWeQ3WsS1BKn2n+PDOBQyVAUxq46nTCDKDNBuJshIs9FfA+Lfb8tO5W0mSMnedAnftyjksG1tppKS9rhcUf09KRNdme0a6y5dl6Mr92diDm7IakSlAY9dmWkfHM8qDAHJh+Ydb3tyQrLFO19bXDdoziVNnlsJ7IjnJt10CFztMEpBkzp6NrKq5Zyh6X3++MlZpMK5mie2wNDD5nOwrwvnkCKJQ2bXmGNO1O97I6m8pAret6OoNhlPIoF2dnpJQ4227ZbTbsNhupL9X3uav5EcaerquoshOtzk6TWmxV5WfOa+KkC24ltAlSXaaS+WmPj+M4yUkNMFRVRdPIZ8h7EsELAzcEjzEJVzmqHDSpqorriyv6YaCpuxycDmjTNTkPsxMewuwrABMYWDfShTBl+02WK5BIeD8g6Uw2NyfTQvuzLpo6bQc/sfX1fDXV3HyjrDGnZ6eUZ/p9ZmfZnEI3yxw9fzQSfFBwNowzYGfyPv1/SXuTJluy5Dzs8zNExL358g1V3UABhqYIEZAog8QGKZhJC5m0kTbaST9BO/1dLcQ2mgk0qrvZXVVvyOkOEWdwLdz9xInI+6pLxDXLl/ky7xBxBj/un3/+uQdgWqHGON6v/ZWFvE0INVvdJWfWs0PG1sawsoDQfeJHT3JJetnnkWhZs4rX22sAyNlXWJKjbZwIyKIwbewiu27bk3aGrnZ11Yzbx4D7sw/YBvh7OynX/FrCaa8b2L9vf0YDayPE/m/75GpvFza2h+XsYyIpr7dEgOnNQXxQ5IyKpPcgsVTKMqfZX1FKAuCVcc1IS1GAzrW53Jrv2u5/6ZuDOAfOwjTq/bAKkQ4ykG/VUawoed6MWwNJOO+Yo+51spAdPKLG+NzeswfnXvksJOCa02Y01/Pc1mkpWRmwnY/NATnXbjxMzsQAO3R7oO7GSXTfZJtJg8Ci61Sqy+Q1ovO3TVbXmlErtf3NFaidko6ElGv3bpOQ6tchEWml7etzZAUCV7/bW7Khw1VaotP3+6N9ihJy1kTa3s9xYS3d79d5P0Y716DdIyBAsVyvXlGxDN0aa6yAJl59NwkONS9yTYaHOI84HloTpBijxuad/xq2Wszt/FS2pIcmqZvOtGvjIZ+5Xt+tx/8vgO5rYNXXgKqbTtfua/9+/c3XUrA3bv3Pe8PYfwYzAztBSGr/7D8L6FHp9T3W399ygL52D18bg/3i+9q4tYWsXy3YbK9Tp+7mq9GcUa7bjGubP7dqGTjnELqyBst40RAwxgG5FvhKCJ6QU8FyOsGniOP9G/zw+SOm6wH3VHF3dwdPTgSBmUC5asvVCPIjQB6lLigpY64MVFYnDACX1X8mgghOAtpzWToAUtEsmdPsicecZ8k8ksfpdMJvf/dbnFPC9PYNvv3wC1xPTzg/JxBLm/SyzHh6+ALnHKZpwod3b5CylDkwM3wpOL884+nhI45v3+Ddu3dYakGAx/n5hJC3TA3HwDffvofzQIgO+XrF4J04BIVRS8Lp+QUvTw+otWIcI3IWFswwShdXhmb6W28YNWjMgDbAaOUU8lcAKmSHrCVUhFwLShXdMBcjxqniw7uIhx9+j9NxwN//3T8AbsF/+PQjSi44fXrApx+ecXq84s///Jf48OEe/i7iUi/44fyMh+czvv/0OyS+Ii0LzpcXXC4XzHlGTgkpz7jOV0xjQKkF83KFC8pQDN8ge4dv//o73L894u7uDm/fvcOHD9/i7s0bHO/u8eHDtwhxwNs37/D2/QfEMIK1055z0g2thgnVDXi5AuWSUIsHKIILcKqE48AIEUIjp4whBBzJRkjWl2MFMuHhUMBlQVoKUCqeX57w8PAZHz99xuPjs95fwnm+4nQ64zpL+e35fMb1etVDOmFOGUvKMIZByWspWq2rg7gsUhpcc0H1UJCuwjOhYA1wT6czpukZcQhyIJeCEDyC21LNvwbo93aoty19qVQPzu2fv///HlBrf7MgFjsbvvsd1MEW/ZWO4dO9X+/k7q+7d/b3nVt7e9azAm6Ni4ixb4PmPrjaO8u9k7a/JmDLvutBkT5o2p4fFaUwwBnwATEGmG7klqnVIEr9PHO+RaQ+5yIsAga41qaLsmp4SdKFNONkQB0zC/Nu5/j0Ge99cqv/2Tl3M8BqrKfd+bsHajaOubGENYvbv5YcgVg6+Mm8MAgORcEN0/20vbUHRvp57a/dzjQrz3JOAK1pmkQ78nrdzPceDCG4V2u0WHUnCQAAIABJREFUBzb7ALkPUjcBX1du21/XyjjY+iz7AK0fw75EbH///d7o97w0kLK9s762n8MtO0Hch1IsYbRdIxbQ2rj1WnR94GhBNu1KoQBZ5QJIKNBCW7C3fddrr2zZcd7co3O+ARey1kML/GsVQTk5S6zssIJJQU+s7BRpqNGNt4GyFcjYBsR9ibVdx625sGtIWqFg80ckrAl0gbw1d0EX+AlADmESdXOw34c9G878uX4P2trvkzl9coGZWwBi9mSeZykRDfL7y+WClFLrAGt+orFvrDGEidPf3d1hXhZc5hlPz0+4Jnnu5VpaYkEAtYjnpxcpd9VAMgQpYY0xSmmv90C3Znu2akoZKc1tH8YYMIyxCZs7F0CjQ8GMpIw0LtCyOKmW+PL4IOvSEcZpQozCMA1hgPcOpeYGguzBmFYeidUmNnvHIkVAVTpoA2p7xgOGQfxheY+MJS+4XC+4zrM2kQC8pzb3/bnTz7P9fy3fW8EyW9up1Ffrw/ag9zJfKSXMl6v4Kx3IX1lY/Pv10o+Bjc1NWwa0hj89sOScg1PR+HEcxVcqebPG7b3uhoOMKWizN+S+O79C2cKa6kZlwLMI7qNu9fJ6OzMMQ7NfwtraVhFYYmh/5vTjfisObfPBebP3bS+bPvC+m2tvS2WNp9d2ceNjvAbt7HkFa1nwniFkD7v+3qe5zjMu1wvAFTHKeS3dhx24OkloQNl0bOdLf30GypAmJ23utj6YVBeEV/MqZ1UB4FBL6taahrZE4CpfIcT2vjkzmpiBxpN9pTopALAHksyvWO26sFwdSZxof7d5sec655BSamPa+5I2jzGIT9/YFXphdsbYd5ufbJpuThhgRBIvgumVHSglN4BO5lJkuvpHz8CsxbAOtVPkkGsGWmJ3y1KVvW1rymtMbmCb3JLgEp3PXZLej4xV8Csov8dKzHfuH33Dl1ozHK/sYcZrsFuGVBhpMm5VQc8CUEUIrzGW3r5IE53V5hFRY8x57xHH4+Z8I+daQydmho/b/WtfuVagVkxhAHdSBJY46c/hn3r8bICud0Tse+9k3nLMQVuDYj/b45bT0d/orQBufxh87UG0FndsA1DWoKN3qtRlo+2Akf6lN2xmbH4KXHs1Djf+divA7gNFQDILbvceAJBTaswG26y20H0VXTav5UFSdqfGB8DqiUoXSefFmclZdCmICGGIICa8ORyByqhLRrpcUQAMccBEAXyasYDx9PkT/vlf/Qp//1/9K/z7+9/h6fERfE2Yny9w9xdcS8JhOiKEiHN6QYgRgw/SWctEEwtgYvTN4awZBI9KujGhwCR5EEvg6L1rKGWtFS8vJ1T3A97kBcc3E759/wFvBoeLHsKD96C6gGrCNBxxnEZ8ePcWnsToHrzDFAacT894eH7A+MtvwMx4+/Yt7qYDaMmtzTcA/OY3v8G7X7zH8XjEu3fvkMYByBk5LUjzGc+fP6OWguiFslzzIpoywaGmRUsQZO1xN+85MxgL4CpU2m8l13nR3YhBdACGacQ0HVs5lQsexynizTDiD9//I45v/wK/+u7vMP/4Cf/342/x6fkB88ePyKcLnk4vmAFcqOL7p0d8+cfv8ThfEc9H1BTwh5cTSnAgzTIwCDHet/KWwzjicDzi7l7+f3d3h/u393j/7hvcvYn49oPDeGCMo+rFDAd4H5tzCkjXnjMryMgVJVdhzlZGTB7MDoUJCFEGgITyXlGQeEGhEeNxwNtpxIHEoHkAEQ6e5WetcIKIhnrkDFwvCU8Pz/j842d8+vSAL4/POJ3PeH55xul0wjwnvDyfsCy5daxLWl7Ipm+EqiWH62Fjzr50zZLSAOnmVhtgRWAEH5GrtFevOeP8ckIIDoejsOiWZQH5gGkaMQxDAxNMVFgCqFVHC9hqw4jDmVtZ1S27tLfpe6dP9GlWFpWYDdoESman2h6FHtokpdbOdU4Ltjbva481uFizwvYevaPb281bQJ3X0oDG+riR8LFAaw862aMPenp7259RvdNkrwnBq34KITYNJrQD2oCO7cGtTpaVOLguIKwV0QeEGEHdegshYBwmkCeUWrGow0hECD7sGOOvWX59oGUBQRtniBNn66w/a7baPSvgaFnsnHNjRTsvnRlLrbjOV1xTAjlxjsTpVwc3qC6nBknDMABEzQk258mcZLunft7s82VOHazs3K7d9pHds5TVrAFvH+AepiOYpQypD4ptfznnWlBra/YWGNevm1rXJi4bRsBNBoZrCbr+Hu3a7Xovl8tmP/TvVbmuXcawDZLtPe17Kayi2sIEtu7wtk4MFCKdE7tv++qDF5ZIqjmjVYFmIml+E6IHUW5gVS2rkLPk7Vh0ZxRkq7WzSfr5dg19wLp+BVQYm1QDQiJ46u0ko1bZQzHEnW+JliDd+2rreMk8D8MgiakbwHXQsq2kjLNuMwLOIeuYmT1IKWEaRwTnsOTcusHfAgV6far9+u2BlFqNCWGMvbWcL0YrDTW7tL2Heb62cXl+flKW2nq/zPKc/jOHIWIYBxzv73D/9g2u12vX+VU0Vc/PL2AGpunQxnCaDoihYAkeIWaM0cM7ao3HLIFhpVqyFiZ937OcS5c1iA0xYpgOABGm41GSXtYtNRekLGekjVkMEWOcMI4TQoggMEpZGgh5PB7XNa7TOM9z+7mxLCBVA8FHHMcBz88vOJ3Outapzb/ZSx8m9X+heqWM1nnVOby8vLS1t7fFy7Js7Exv651zmOLQNMBSymDO8M4hBmv0I/rbVh4oZcQL5uUi5xUf25zbZ/QyGbYnexvWzmgWX8Ds1a2ya2FCugb89vpsFvw7J+XyFWs3X0nUKPu+AsX0xpnaGBtYIWO11ZszQM67npkterH9+q95eWWXRYeNwLyewTGqr2r3yOs5YjZD5iBt9nEfWzq3Sk205Fu3n+28an5XZ8v3Z3plZWG5QRJ4HYDXP/bnyTZZWcD1ipoLluUKAPBuEBvCVXU1TaBoK/lBUe1bSXqfqx8h2IV87uFweBX79+vjMN3rmliwzAnXS1I/Sde8g2qSrwlgPUCMcoOVZNMgifZ/YzJbs5z2EmPa6f0YSGNzaGf45XJpCY5+/J1z8GGAleiScyJVoPfnrZlf6c4LdOeymQnyqKxdzdm0c9dkgO2FUqzUODYfACCUkhvglKswH80lrFzhfC/DsWIKPRjYn/8E0xrUNdKSyeZb7tn2t7WjbS2XvAPum6yOKVMxWjkzC1lFnucVIFXZknZOd3FNNQmv7Zrq45xlWTZ7oQfIY4yoWEuypcoQonvv18S39x5MaI2YbB2nXPDl+REota2b3m81f/GnHv/JJa42cbeCpPa9ayG7Dwbt533w2P++p4jdAvh+3oN3/6dXv18NBG2ew7vn7D//1rXsA7X9vfUL5BbYuAmgckYlFsbEV96nBYiaaa45Y+EEAhDjIOLhkIOsz1RANzbQIfYsc+YbUJThGaBS4SDU8hgC7iiiEnAOjHEI+O7dt/jP/vwv8DRfQWNEfjqDU0YtDtGPuL//gHdvPyCdF9SSRIupVgRvzSzknx5gYHWghUmmz1IKsWJacKzNIliM37JcUZ6BS00Ynh0OAaCyIAQHTx6cCZ8/f8L3P/4IJuBwL51EU0moOSPVisdcQEvGzAXu5QHZOQR4hAJ4Bmpey5h+/8c/4MvzE+IUUWpCTQngCqoVZZnBSXTNKnXBk7FfbI27dZPHGBGHAXHw8AFwQ0UcgGm8wzRNmI5HHA4HDJrJfv/+A4gki3O6nPH09KTBGmOgIyIf8e6bd3hLhPnTR3x6/B6fHz5h/uNnPH3/hC+XAn98j5c4Ynk+4Wm5gAPhm2PEFN/hb//2v8D47j3ef3iPt+/f4/7dW9zfv8Pdm3scj3f45S+/RYweYZA4wjsgBGAYgCHKonOaNKoFWBZgnoHLrKyjipZha0wdCEjhQbikrCRYEailwAgOiI4RqeL9mwnvp4jjAAxgRJbOtw6iW0gVIKa2j7lUXF7O+PzpE56fH/C73/8WHz9/wqfPn/H08ojz+YqXywsu5yuSatOVZQ2os3YKg2bonAalANTpAIhWweycOvFTqE5glcCTABwOR80iSiY5+KgU+oKUq7AD+TYAdSvA7u1JldPtJ23VLZu0ty1yKJPJeLTPbkAVNAtIt95rdbr27/01O7r92wqgCaNpncveiWhj7Fy7FrGJHq5jEfeASH/u9N/359HXzih7P98d1NvD1pzw9axkm39eu7CuQLVTx1OdnB3LbHPN3CVxagWKBH6WLe/HwpiN9tw+025je8tBNoDTrr0voTHnom8yZE6+PSeEgKIAisulJRAaqOlXdltVGFHsuug1ttHmNfvfM0VsDfagSA8gieNaYBpycv9bhkK/zi0LbK9fz27asBz242fjtWcp2TXZ5+0DLAPq9s/vx5i7n3vgrQ+Ef4qJR2uOtO0f01m1+e7P3P3892xFGy8DJu06DCC46b/sAtvVAa7KlFaWSc6it9WBT6UoOAena3TbLMYAzNqtecbKRGOu0j0UvJbZaoCxfwjDbq8hJn/pbdOeJWnf93q++7/Z9VXalrBZ8L5fS6w+j71+Hzz349wHUDZPe6ZM75P2rxUAbt68135kbNz7tWlrw4CVPXvHrjuAMWnnxpxE8mG+zng5n/Dy8oLrdcbpdAIAFYgv0sDJSQneGDzGOCCGlalqwZs1B2H1DXNer9321bIUXOaMEAPGcQBBGzYQt6SA7e2cRTuWWPzT4Bf5HJJxMKZPs4e6HxrDhivmeWn7oQdaDgdhzS1LQk4CqEdlZFyvVw3+B8TokbMAZNcrMJcZ83zBMEwt8WDrp9+3+8ScrQNmAHW7PuQ9gJxWloy+AICVTgeApCrAAKUN8HZj7fbAfPvqSA/cref1+pSYofFL/xm23s7Xi5AEKiNpEwG7D+c84CVOo1pRtTwzZ2lQVwlINcHBb/b3xu7zvoS/C4mZBYjqr7ULym39G2Bj825rcF4WYJEyvl7fun+v/mzp59HOjVXf+HWi1eZrvVxuv/OkpcAQQgNT3TzvlU/RPZrnRoQ4TighIWduJZTMEJvNDGm+0QNfjFITyqz/I0L1q+32zs5waXxgDMF+LcnPwrJkLU+uVeI80eS0pIOWMUPOB++lkYuUsMt+MQBG1i2r7XhdibKxfwrQCdC3PStt3dvevp1o1fVFpN2JIV3Fa23JR/V4sHZeNWDOa2ME8QNLZngH+GGbIDL2+MZ3AzbnAMoau5gv089Vf982Drbu1n3mYeeAvKb3Y2VP2dquJlFSVDeQpCx2Kz2xKwdHnwB0Ekd6a+wijQ/3j9WXri1516/jfj0taUHfRdbGuvdZ+jPZxkJseUKtZ/gYcZwmDNOEaZowjkKacDE0+7g5m9s1eFyvC9J1xvl8weVyxrIklTCwBiAerzGq9fFP0qDbO5S9g27B2y2Hpnf6+r/Zz+0QaiU/W02A/efbcza/7wzG+rANyO1PvWO5vmf3M712cG49bgWct5wvYBvg3rqX/ncimL39W8+qc84BXkoBmrOmn5euV0Hi6xYIcs6BtONf4QpXRB/FE4GqBxMhMOP6MsMzELKyOgYPDxVf9wQcxAh6eHBiXHPB4/WKer5gmA7ILyfgbsQQDzge32KaXnA9v4CQAaIVce4CvM3GKWiZIClwlY4sHlK6yORE9B8OlYFUK64loV5fQK7gbvSgWuDYyYGVCddlwZILCjHqp09SkjuIDgqnhJoyKFcUByzLBewIgQJGCojw+6nA5XLBZbmIACQzHDE8CHEcEA9Ruoh5D++FReN8ANihkkMcBhW/ZBn/4DCOg4A1ccIw3cP5sTl7BM2cFI90AX73MIsPwR6pjKj1WzGKwSGEAdPxHtl7PH3/RySeMZcvOH15xPzlBK4Rf/Prv8ev/ut/wF//zd/i+OENpg8O33x7jz+7v8eb41vQ8AY0TZgOhGEUnQEJZ2RXXa6a5VBwqiSgXIDLM/BSGLXZVtvvAtTl7FGrR2mU604o2DuMgUCRwNWBda0QFQRiTA6YYsXgKw6hYCAgsMl+O3hAQWXJQnFmlHnB5fyCp6cn/Pjxe/zhj3/E54dP+PTpE55OT3g+veDlcsL1KmW7swIPedHytNxR30m2EwMoS0Zl0UkzlTvJ2pQmaF2rdOlzmsnjKtlMR4S3b99iHAfNiok9SCmhZkapFchZtaE64LoDIszG7B2O5pCrI7C3cb2d2n9tfq8sGiIB4UzzqX0W/Yn3ovV6m/2m1zZXgmu8+l0IQR3ozh62FUUAOxVbJZ0Dcfbdhgm9Ht4GMPTXfMtB3T/2Y2OPPSOtt+m1qnYmA7V02UeGSA9wVWandomC6J6QOocVr3IynRO8joc4h6p/yJKFhV5TLbUxHvug3AKh/h56sNN+zxAd2D7gt3vdl9fdCvDtf1U2hQJOyt6IYWVL6LySHdskZTP9Pe7no2cZ9PNhgGOtVcxJXDWYxCFymjhkZGNU0hq8Sfc6PUv1uswx78GPfiyBbenoNtCgzTVb1tTuvQ8WX61D1ix3B9D0/lHvX+zHB2DdB1JuwmoPvBM7aTtJknVWDm46buu+cNgCiLeYkxuAvFsDpYhtfF2Kvm2wUItINhgTgZkxL7NYVILIbKgNzGpTx3HEEKOqYJjTr3a5il6ZIykR5Q5sM3CYSFh7tTKqghHWHU9KeNSO2V5o+/q1/tOtLDgRiX8JEn1b71XMHgJGAK1qwDr4gVl8MLURXBkFZWNnesDT1mW/D/p9fAtYaMyTDjDZA9+bPaei5hZ0cKmokEYuGUnAdPXdYoyIQQLWDEJhAbY8eSAMOExHlGMSyYs397heZ1wuV9Gsu15xen6G1/LWYRjAJYiOahcE27qxNVWVwe5aefMKgpRKSIskAokJNVfpUOktyeAQgkMIplNlX4wlZTgwhkECqFISiHqNqtjWuA8eXIXllbMw5b0PGIcAqvL/WglEV+R0FsafsiZjjOJPZ1tbst+naYJ3HpfTBcwFKe1BUHlPmSdphSXrs3RzHpS9L8H0Cj5V1FxRc4H1bWq7k2SMyAmj9Dq/ZqpaosHWo7FD+iSKjU3uNGJbYA4GSpcELMWQ0O1eYiu1X89MJrRu8DIexuhxzR9p63eHxb/ybzQR1MemZsvsAnppoR5csEfTgaU1vrJxKSwsVyOq3Epk7OPG/ny1pL0BdX0Z5p759soP1DFy5OAdgd2WVQuT/nBbALfZLPkrvHa/JTAKWSO0/r1WwFSqtla2YikJLgwSW7H4xYVys1t2hu/HotkhYhAFtY1yLU6Tl0LdYq38EttPZCxEj2HQ0tvurJLPWOeyn9e9f0NOgP81t/ManDVNzpRSAwK3ZyVUfkc1RcnD5JsctFu8zkux0k4uUopqawIMY8ytQJL55W4FcFsZrfnbFcwraLiu0Q6H4e3a6wG6NTFkcTkAdii1O2tZr7ADwIT9qn6V90CxsvptErglXV2/v3o2KQHwKMq6B7sWTwCrRI0xXlc/aGX+EwEDTag178ZhZeB5Hxqg2O8hAymJPGrOuJxE5uisMg7TNCEoa7Ilj7Tk3uK9GCO++fAtLqeTlIaX0r4vc8KSrvAu4p/cJGIPhP2pgMYmK+268P3Uc2/9ji0w3Ad5P+vxtZtel7592wexdkj3j1sB2k+NQ78Y7f+3DFG71+6rvYcGcA6SibCHbBjVpVLBWx+C1Lt3ny2UdmWgiLePxgDX+TGL1UbFEeI8w0dZuJ4Cgg6lL5rZQUVyHjQN4Frx8dMn/Nvf/Dv8ux//XzzOZ7zxDiEOOJ0uwnkpBcFJyZAngqtSTpsuVwhBuTuc2vqSskBW0c0KOfAAIEAc2VDROtfCEQpBvpxsdueAkliAIZZxmo4Rd3GAixGnZYGPAT5ItzpOGY4rBufhgscchZVClRHYI5JD6DJsv/qrv8K1ZDBVdU49HAv4MsWAki7wBHgXABdAJOLAsuiEcsbKEChgkAOq98gUUDniy8ckJWaREINMlQQ0Mq/39/fSXp4I0QNhkM5LMQa46IDImOcL0tMJhRNe0gt++MMX3PMb/Je//tf4X/+P/xP/7F//S3z4pQjxeg+gyJgF9dpSAVJinK9Aqlg7nLI4R3JgC/DG+j0twJIrchVNBOq2GzpHWO02AKf7kOEqkCuAytrdcYBDgvfKnAtADA6jZ4x+wBgdJhAGAgZGY39yrriezjg/P+Pz50/48ccf8eMnAeU+f/mEp9MzzlfR1LsuCszNC5Y8IxdWvRo16kpGs/GHUuibGLPfapiVkrV0prb9SxBngQEEHxGjx+F4h3FYNTRWvRH5f04FYdh2CO2dQXPQbj02YNbOKbyVLNiDCUSr87q3uRtHlrB5zR60Qvcecmuv7SB219WcVBeaLd4Hmvbo7ezefpoTYM7nfvzsdTYO/Xj049M/p39tnzHt30vvWj+bQaRsJ2fis1vZhOZQuHWNWSKA9O+txr2773a/XMHox1vtdC7ioHdgWu9k2vXvWWkG4OWUV3Cx+7zegesDsp5h0gNetpbsXuFXJhnLxQqjhdCYVc1h5K2Uxp4l2u+Bfp0YMI7NmlydMOaVHeL9CtBtP7uAcAM469aWjcF+LRhA3mfdW+DWjZ+J2xPRBmhhblzRVwHEHrDv73tdA6I/52/sXbSx2M4Nd0CWOMXrzz1rrr/vfh76a2j3QrSW/sPshIEJ3d6jbUmw9wFcNQGClcHXrzdmtLmWz0cXtAbIUrNytLq5PgFwCkqu7RyT9/At6+90nHqWy54l0YOPt3xZ2b8r26FgBaKbvTUmhc7NJkjZ2aoeDLWSMQvSAOzGh1vn7P2a6VmpfTlTP0fMFWWn29XWto7F9Xpt4zCOY1sb0PusSbXF9D1iEFDq7ngU3yEXPDw94fHxUUrJWQLwlBLKklB9Qo4BMa0MtmEYcDhIaawBQxZsOdfZYfaSQFSmx3xdQFThlVFnMxQ0oWn6oBJwp5Z8qxqszbPo3fkQEMLQ1p7XEkcJmGW8ci7wjjB4UtZEBVAwjBHkCEsSjVrvvTZJYEDL4bz3iIPHMAQcxgNOpxNOp1Nj7KxNMNbzrbc76zksJd4lM5jr2nSFpdJGkiNZ1mg7n9cA3tZTiyFqvXmG9mu/b5jEzK1JwAZkllds7f+NsM7mWnzeipANLFKlMbLLdWAF6uCcJDW9aBw6Ir23bbfZtsbrVsMN6AFqAQCccxvmnoEyZrMNSLPxZxZdx6Blk8agu+Xj9GNtP/dj22uImbREXwZ86xxmln3FzAhDALqky5rQkvtre7Uy1ihwdT6SyinJc40txyBs2dz6JjCtMplWabzXn302jmZ79jaZgKY77FyBd31CIoherQwgoHZCABipTFgZmld473AYx35FtXV3y6dd7wMN2Cv5No7Qn3PmLwUfAJLfWYI+F2kI6EjK9UM3V7aW5CN1bVBVMFrmYdV4o25+sBnL3idodpyD7CstASVaX7+JITqWuPmer3CIdv4DXfNcmQKQnuPrmWWNMuz9+vPSHs0e4DUmIteI5q8RhdbFfX2/HrBcr3Hd27K+heFt5/s+0dw3qZLzrn845zDGcfXZ5oQ8Jyzzgvl6RTDijPeI3sOZdl0MCE6aqozTgKhkqOPdJImo06QM8qD7+OuY1n8Sg+5rAVLPiPsaePWnfr/ZOBoL29/3weWtg+LWa7e/xPoHq5XELS297QLtH/uA7ObnY0V594Flj2i/MnL6cBDU3bk1n9H+ZkGMTrwPQRZFiHAx6KEiGkKS2StIVYTxS3df8zw3Bhcs+FJRh6VKmUCgiqR/87mA54JEAYsjnLzoE/xfP17wm//nH/GP6QkPT49w7oDn+QKOb8BXoXQCwPF4xPEwCgevJuC+wHtq1GSwQ26UcsKyANoeRlh0LCWPgQSYi1X1dQTP0w5PEHacAypLxteTRyDJ5HJlFCawc7gjKT8V5lMBlQoq2v2TGIejZrwLI8AhUkCgda0cjkfwMqOCMao4MbjAVcAPAXHQbFolZIZk7BlgErAuOGXQBa+ljr5teOcivrl/h+CPGOKomi/iBHBV/YdqGmMVrKDndVlwni+oKCghIXCBeznj9PyM//j8gvP5gL/5l/8t/rv/+X/Hd3/9t0D0+P6BkTNwdAKypTpjmAbMZyAtFfOyINcCeBJA0wcweTw+PiLnKiwmeAQEAA41A0tl0DCAvR0CmtBk238SUJFjnX8Bh+EY1TOyI3gviVWHAE+MEBjROwyOEIHWuah4KRmeGTidGaenJ5yfH/D44/d4/vyAHz99xMePH/Hw9CiMx/mKOSU8Pz8j14xcMuY0K6iWNNCrqKb/oU4+6UKrHWgkf5esloXU4kQVPVxN2VZ2tXOMECKmccAYIoILMGr4eoDIfiylImA9fHq20N4O9o8GXNA+6Pp61nb/N+dcSxC0IN3sngJt8tWB685YdnrwYme7vvL57b26+7Fx3Ys23zrM+8fGIfCu0y/Cq/foS9bsWr52Pm2ud3+drxx/tfM2l7vX23xaKZb9v+lR7tZXj6DUujaLaJ8LywwbsFTBWdiTX8vam3O4YczdcMx6RkD/tWd89eWWDfhrY9pdLxHIbYGGymYfrDQX4Cod6KgDuPoykt4h7XV5+msvnT8izDUBS3tASZ/d7mcPfNmY9+PX+zp9mXNf/llKQSVCpG0Zbn/9xgTq/95flzPEFnh1zfs5sdf3v6+VNTnQ37fYXHK+CWGzrlXnrDRcA7pSmnZRf3378dhfi/1O7ktYrqUai8cDTFKC1kB+0ZAphXG5zDDwrKKAnYjBG8hiXWtTSrheRCdrBQ2EDWXNBqDaSwRe7SuEUW+Brphmr3vHpMAU0Ia+nlZH3uahZwv1dqhnt6Dbu7Iv9Yv0tGAdby8VCoR98w4pd+r3ag+AGRNz0dK/PYjGCl54t4JwtVbkkhW0MZF1fb8i9kfuF3oGbX3YW0llY4WazpztieADHHk9MPR0JP2uR2IMHh/e3eP+7tAOY617AAAgAElEQVRAsKeXFzw8POB0uYrv1spbAR9W4XYDb2Tu7Xqc7nNZfxZg5ZyRirAmUSo4F9HErFdZL4NoNREUNHBOtVTtvS34d3C5IPnS5t6YETafYKDkihkJ7Bg5J9QqQO0wRMQhIqouXCkZPgQMQ0QII4C1m6sjh8Ph2ICYy+XSgBmbzx4k78E6qdZgpJx0rnVulTEn9Yo6ZrLgxNRsSkTWkngDpey7rYvD4dDsWH8dbWywrpsGwK+roYEEG3GI7lxrJYSuImTRvHTeSxaXt6QNtnXf2Up7J2DrL7Rzuvdfuudun4fG1JHrWzXBANEeND1VAxXknCMM49j2sF1DDybs73n/sNLZW+zrHjDt/R77nHaOrJCoss+smygrgCYs4u5igMoaK7IwgJ1IhjAD7AD262fKOk5ABbhkde9XEMWjIjip0iL1bVBzm/XVvWH1BUQOgMgjuS0AaawuAVK7Em2QgvFWagnkvCaoeuCm9wGNcbo/v6DMt5IsQf/ab+jPPbM3UunkwFVsQC4yDp6UOOKk3Jp8aaArwCLn4CDnMhHIWXdWSbTWKme2Xbv5QgIM93Zbx4NlrKkw0jwjBIl5mGtb89A10AN0+/2x9Wlf+30AN4BU5kkwi8rq59jevOFbOudQy1o5Z3/u/VJjpfV+mVz4qhcnrzcgfuer5nVfW7mu+UYCyJk+MVDKqtcv603mh4xFbr6N7g8uFUvKqGDMWIF4HyOmQSqjiovwPuLt23u8e/cWKSWJQS8XLMuCh4eHn4w5/kklrreClT7I2Wu32O97gOrnvr99xv6xf5/VWAvl86c+YgO0dY6U/W0lmt6+np97zbRbpPv77++rHWSQMk7FdzfX4tVxt+y7t1bKweuXZBu9G4SxA8bI203CBBzKcb0mvB7fnIUdVLgiLwkuCcB3KhmXWvHw5QLvHH5/PaOWgo9jRSXRgPvy/II3hzuUaxLQIzOij3AkYAvYoYSC6AhBBUVrBUgBusKiQwYKUgJCDKcI90DAQIQpOKFq5yzOPAHVMYoDMhjkRuTq4JiUzkvagEHAzVyzHBGO4P0IHxm+MlzOqJzU6AIxBkw+IroAz+tB/fb+LYaSwARFzTWjxuJIBu8ADxB5VG2EQC6A/AQfBsQ4QJh04nQ4J+XKMuUegJROoBISJ1y1K+gyL8gpSaYfokFVakZR/ZKcZ+S64OH8GZ4AerniemZMb3+Fv//1f49/+B//F/zVf/MPGN4EnM+Mp0sBCiFVh5IZL+4KP2Rg8eBE0syAGZUz2OWGsTG4dVMSMEDGGArLXC4XMK0MAvly8NJaFTnPKkJPiNHDBwVr9bAKUfSoBhBGIilv9TL/kQBKwJKBRzDKXDCfnnB6+IyHTz/g5eETTg+fcT494/HhCU/Pz80oXpNkty7Xiwiw1toYb2DSLj2sOJkBJlZqWRtAZ4bdThZmKXUtxYSk1yB/tQ9egHdjUqJjb6EDHapDRlEGhnt1WFrQsbc1/fc9GNbbKHuP3lncAzneUQP5+r/BkYre7j7HrUBbZW1wgy3otbd5X7OpzKahtX7uHsz42qOBTsArB/zWzxvHZvfeexDvlj0HOrvdvY+wd0TLRM50YWUS7efTmEwdK4L1dXKhAhSpk4CyZ46t924OBGMLHt0anx6A69eEPSRgpeaE27llouFWdrMfP3vvWgpyrSAvsgjSFIcksNLAg5lbh1rLktaqzVN0DfV+RQ+Q7AG6faLLmpWsDuAWWLLXyd/XUonmIDJtALpbAHHPDNuwArECKGs5Xtc5fdccYjN/9v7eyi23ZU32vH0W3n4va4JAXGEitITXALSNM4haiSuzgHoWtDusrJz9GPfO+t4XJNImJcpc6TWqqq4N50TDyTsPAhrYVLTkFY5bV+8eqO/3r61fGYOVJcJc4ZRpb53XDEAxoEqu1e7NyUBhDfisvP6WflO//279ra1BA1G0lIwtc0zr2jKQ1Ji0UFth9m8fFPbB/Z5B0K9HsnkEXl1Xf339ZzQmXvAg1M287h89+66BYMpKDSFgCAOiEwDQed9yVQ1YIAFfg/MIg0fV5hPGTDifzsjLWtaXUtLO6hecTqcGjAACoJg/7JwTcBAVS5rF5/TasRdSgmUJAOaKuS6Y0wIiB++kEU8cBgTv4anA+aHrcit2zGQsoGPMylS2Mt8QIogYKAuGYQSRU7+gKBjuMU1TK08Tv00CTwPSKjNOpxOcc3jz5g0A4Pn5GeezNMQYx7H9vrcBbR1WhqMIClL6KgGRsnpVpJ6cAcnc/r76Ea0Ssu1p+zK7b4B535ynNaxzHmH0m9fqzmoazM1mYnuuGqBu+7q39WuH0NVPYJb3LNm6+4rvNFBsCeJ9nEhkSbSeQR/aemYuqHmXROvGwtaxNRGz0jfnHJaUMF8XhBAbYLwH6XofrLev9nnMUiJrIv92/+YL2TynlFrn5T7xAwCFtz7Xxo6QEBhgPk2z/YQKBSS8jXmA+THmE9uYOuewLBW1in6jgSB6t5sx3yfT+oRDP/f6P1SsQPj6vBXACTHg69VywOVi5a9amu5o4xeZRu0G1BGoV1ZqWeel/7JHX9nSz4+tbZOWMmCy6ToWZS8HaWQGIiUokGoyqm5bIWTKW7+B1mYq4g9/5XyqwDipTVUZnVLWU4HMv8QWJ7l1xrV7Jsh1YX2OJR1tb/YsVNd8otdzs/er5XO2WtrBvfb3V58ZsIZB2DGyNnsJ1JKO9gxGf1ZrzLqzD6JbvzYEgvrVpOBcodLuf+m0Ysk7XFun8oBpmnB3d4dpnDCMI6ZDwFvFXj58c/+TsczPK3Htfm545M7Z6wOXvbNm37/mJPxUELm5ju69+tftD4B20X8aT9OHBAPg1bn52jX2933rfm/93DML++92ENwKGIlIRe7N4O3GgVeRaKNVkjonDBIQoaq4si6avt57M0yWmXCujRkzYxgmsA9YUFGXhHGuCExYHHDijEAZUxxwn4SCehgYp8sZ9HhFqR6n80URcDPCHo4qiEu7nwJZ7CjSYcZq8Zk82EWQD4AXZ9Gxlos5IHjtplSlnXIFi86CAyTj6QE/oaaKmuTDPDwoeIQQ4adBAEDn4Lx0eJP8KcHXCocM+BnOVQwUMLoBgxOGGPAMAPizP/sOhRjFsXTclcluuj3XdAE5B+9HVDjVVAtwYYQLESkxMgvIlXNB4bQGYqhI6YJaEqwzDWlHwpISSsqYL2cwi+NV8oK8nJFL0ixWRhg8PHlEf8D77/4Z/sXf/E/4N//D/4bvfv2f4yl4vPz2hLJ8AXnGGO5wXQ6Y2eHlMGK+VEzFIVQPuAA4qDir6KQwSYk0qxfnoNo0QPv65nAAeQ2Aidb9yMKOiKOX8t2BECPBB4LzyuTwjBBJdISYESowFCBkwFdI+UwBLhk4nZ7w/PlHPD/+gNPzJ5yfPuJ8esDL4xMulzMu1yuu84y0iFNtDpX5pFUdZ64CqnhyYF528LzaLjK2ku4zlObF1lrk8KzWBIMV6NPMEyRrk3PGQoSnpydM04g4BLjO4aulczheOZWrTdnb0n1gSO1ab4A4X7E5/e8cuQZy2PuT/t8cvP75plHX7BttGXS3gngbUztlJMOL9np79IyBNQjvAULoqluDi1y24NEeHLS91uv53TrX+uf/Kdbe+loDfqRMV+ZV7J73YdONr9db6wE674NmnrlFS1xrf5sAafleQXNChD2t5W14ff7YdW6C+d2ZZwHIHmDdP6e/d3s0EE1LHA18bCNTubHbzHn16pAKeKLOlb6/MQ1vnfP7//dj6I1d3lgmr8EOe1gA0OspORU3pv4eOnH5LTi0/XzvvTh0/BpI3+/jHgDsg0BZ4687D+/f47a/Aqwd15SdpZlmm4cGQtp181b712mGcw8ONYZqdw123wbwCNDEG8f/FtDZACdsbZEF7PYVgt9oMTnnlLXiO/BSHHwpMUpwBEwxguBkfYG1w72NS0UpUn7E3GXwoRqROmZFNWzsPtdg/rWe23ZsOtYQrQGaFGx0ZdWmQadAVmtExWu3YAv4+iDX9sNeQ67tXc0P3GJ+mKZiv5Z6myxrmF4FwEQMcUzRQHfWEh2G+GLMItJfUsGChOBMo4cAWtlS3it70NZOFWD2eJgwDhHp7bvWgXRZltYNNqWllZBZB2PvXQtEreNipAEoEtSGqFrD3oO86E8CknwVUp1eA2dUgswFOzBnkS5xpluka9NlIIk+ra2LFViy4FbmVsArpx1sNanhWDvXTrhcL1hmBaZREWLANA0IPmDWphMhhNZFlpmbH9P0iWmbJGlrEFaGHfW6ZF1Epx1hLd/c/M4esACWlDdrw8CfPp6z0ksDkWy9Vq7qS61rta0vXhMtkoR6bcfW1wkoQO4GOE0MMruOLXuV2DX2Wr83bHyAFvJ19i2sNrgSlpLbvrG9v7FbRC15ZWw3O0dKLsLk7e4HwKvzoLf7/Vlif7Mx2/s89pq+2Uf/PrVWsCPRAQOEWWj3Q5oFr53Pro0CxfZV3fcRArj05xWDICQR8mgAbS4GbPW2WxllTe5lBVYBSOwHqyAw0LQq5LI2A7DkCZH+Hrb29p+3K3WEJKvkbJI9WUppgFV/fjXQRv10uQi3AUDs/fdruk9iNr+qB8IZEvfoGTEnac5zOI72xtoQUeyOnN1eqod8hPfrWWqVmHIdxnBewaoV2AYchs36dxrHQZ4JskMCq/0y8G/tzs5gzuoL+eZr31qL1ZiltPrM9hyTttg8LAkF0zuW+7Czsyir27mtfWA1GvIr7u59CyCbNIzmu2GalZ48vFuZ5hLPBAydZjGYReeWpZop6/wVSJMUp/iLNUypyrngnJGXhBwjck0tmWJffdLce4efAqp+HoPOFgbauOsk7J7XAlj5Q0l5w/xifQ5vXmJIuwbwzIrs8vqazb83Ls8Cw81zqXVgwtrHtLuOP/VeHWK7XuEm0m3G3gaCdr8351ad331g0LNBeseuHxuwgliboFGcb0GjAuC0CUGI+vr1i81IqgaS0YwruOkoeDUGvumpSATo/AAK0kIYPmIysMUBTAWTdxhixJ0b4WJA5ITlj3/AKT0iFcJdCBgxIEQp2YT3IC3HrHAYB4LtWcloVrhaRUeCPA5+BHxsmQjmAgJj9A5D8OBlBpcCDCIe6qMDB4GKMhNSHXGoJMWXIcBRQAWJXkVQ0WZznEsGUgKViiF6RE9wgVF5hitZW10P6Ls8nV+eUZ1DVrHSUqvUnusBkVBBHiDMWErFMifMScpL4KTpQ2E5OHJJSCUJUFSKMPh4BkO7A7KtJympqFmMljlMzjmEw4gxHDEOA8IwwAePYTziu7/8F/jLv/w7/OL9r3H/3a9wZofTM+PeMZAYcXSYc8bjl0dkeKT3Ey41oQBS7usAIjFQjApQVe07cdgluFLWAhUpqXEEKi/wAKILiCFijF4ZcqLX4oOwtEKAdK11UCdMRVBtJ1cGpQJKFflaROclM16eX3B+ecbnjz/i4w+/x9PDj0jLE8p8wZLOuF4vuC4L5jk156bWipSTHpRiyEvKrSTVkQQkVWnthVm09czxaHsLTdxb9mZpwGrJa6fIytKtwlcWAIuLNCmpBfNyxfF4wP39HWKM2r3tKo66guUWuPY2p3cOzKBZlticHTFjBKk7WJ2QW2y5rz0kAFM/Re1h5QrP1Nqr23sYcMe9rSJabe3uYyogDokehM1Rap8MAe3t/kjWhjFxWumVA8Cq39ZWjCKvLLa/d9psHOTt+5KgfffOdbzJPv9GANGzofcgFYXXvxOAbutQbpxEDeIZJKXulnGurEEUgQsJq8dpeWJlFUKv8MEjtCBdx2m9mc38t2CyA3t7MM+uN3pJwDj49lw7OywY2XfxIyJpQARCMGceUKF8bgwKUjZRk21wDnC1JY0qcwPMLBlle6AHJex6+4ed18ac61mAPWDB6sRZc4CcDbgVBrX3cv3c6S9JIx7f9rqtFbsGA01KXgWKAWyuNXgP7wNSFwS24FPvu5TawMAWhJFkhPt7fgU6MhQQAVAYxhq3REET3A8ibkqVQQr+Nn/G9oF+XwFlp/Gdg5Uv2lrry02dq3CuNMDYIgvCWva7LDNq0MYeLIkfpv5etPTUGVNjATPj7u4O5FzTQJMkkATk3MC6ghmMWKUboKxvKCgh4Kv3DgRh6DVLVxlMRZKAIaCmdf76ILBnS/QMx96O5JoB0bkXljZXARR0reyTAyBqOsEGfvRg/vYz7ZqxuY62LhgwBqOBp32io08QvwIJ1N55ryLtyv6zlAsRtbLL7XjIPPTMhlQWXJfSGn557+BjhK+EXHPb0y2dwwAqIwaPN3d3LWjsQbp5meXrMqPUjJxWDVfvPcZhxDRMGIKUzroSMHIH0rkq5wYF2ecAsjJ+cy6o5YIEhkNGDJb0kORuiBHBB3gX4Y8SlRW1L6UUnC9nACLdcZxGVE2UMwMhOIDiZs8a2Cw2ycBZWZ9v396JFmhlHKYB0/gtDocDnh4fcTqfcT6fmy6dlWURoTHcc5JutCEK0MClylw6LX4v3bnDVUERPX/ZAm+NB5QBOQxrEsGqEuxepmlq4K81xDL7wZ0NIabGwG9N77ozGoCWOdqZpSxn9SkKl6at7qyTK6OBeFGTYKJtumVorZ+xJhx6EK5JiWiSrNZtF1XxOwgKU7WgvpSC83wFoDqCUToS9z5cD4LUWjfn2R4oJyIMw9SufS3LX9k6K5juWhlur4E2jBPYiUZtz240PcJs/o9dH0HZvsp4V/1yR+u1i72R5F0tcsaQdwh+UF+uAx2ZGijVj3EPaPeJwgaOqhfnAMCv5/zqx9lcbJnxZoe893IesFQF5bK0hJfYwgzmtesuEaES4IiRuYr2EZRdqL5lK/HVmF58ELPfbsUMOt8CLFraAuzLWmixQa1Ij0kaDJrcxRBFE5MA50lBX2PgW/muAtMa20vcpJ9fGeSlrBjVocwFRSYVjgIciQRTVTAnkMb3ei7ZOZFqga8FA9S/NL/HmTPPbV8bA97mFo0xupZoi5uhWAQxGunEG5AGBZeqVLQ5wAWHeU7Nr+rPXhlzGXcdZhgb3v5mGJXMhVw3syUExbeovDJIrdLL9lAtBcMwQrodugYsWyWIxOmSiBbwWU5IW5ulJJAjpLRgXhaE8wnTdEAYo56rFYfDET8Rgv1MBp0xr1gy3/3BYii1LXJUYaPdJJ3uHL4ehFsDSpl+ZyAdUXtZ90HbzbwDr9p7umA/oYV+MsotkDRk13XsMjMewXUlKx2g57rP742NCY72hsjGxZgom0DOFjIMtOgCcRCYGHPRoKKfxRAwHA7wYYQPESFMcD6CEATQY4C1G5TTEkopMxImnQPBe4c4Tq8ODplX6GYeAPYYHMEpw0nuqSKiqGCnQ1HMIgL4cHeH8vaMl89PcKcXpArVGRNDImO2lkpRJwJKaogcBEiM3qMioaZ5s+ZSBkpycFgDzForWOnoBQwqhDEyIjzgJXOUIEFjYbSDCQAcKsAVoUpDiGstuCChuhcUFjoysUOgQcb4L38JAPjjf/w9GAGVPZbKonkHyfQvOYF9wcxLW4JyMEtZhA+E4LxQ6Gtp4FXIBVQrnCcACS54MDm4EHA4vAE5j8IeIU7wcQCFCX4Y4HxAddIldhhH+HiEG97h+PYX+OaXf4Hjh+9Axw/4Up+AL0dQIDzUBEcTKFUtvQOIMnA54RgHBA9wV2YZIUw/QkH0ArA55xC0LJWIETwQo8MYHQIqPEmQFgIwxtpEd8kxDqOHsrrlUNcMSM6MkhjllDFfrpgvV5RlRr5ecD2dsFxPqCXj+eEB59MzXp4ecDmfkOYL0nzGfL1gnmeknDZ6HbZPBeDsWAVc4IgloJtTc1BJD9Wmz6pJgJahqgCaQyuHZloWXM5ndTzk8JKktAYsas9SlY7BDAZ5wjSNzUktOSPPMyh4HEmurxbpmOdASLkAleDNPlkEKC9XEJT0I9csstkiNUWdQRUqeS9knGtBAbUsUW+bnGxkAVPUdpBzWtIJsGqMsdk7TUJqEksPbC/BggbB5OVJRcuPZEQZ5AOCiGUpsKNAEJkDWeHIg5yHDxZgipMYQwCzOaYFlrRoCSDWAJ1655U1qyWPMQ6otWoQUjGp5s6yLHBBQNSigEdW+xFCwDBOqLkgLdZ5MGAcY3PMlmVWxsFarrxo92DnHMIQUaokUrz3oEgoKWPRxg/DOCJrOZkE4QLEOmin7cooyZxxy24WMAngE4eISlVL0tD036o68iVnSfgYCKiON3nVscppLeHQyM3uzez5MAwrUJG3ZRpjHMCltsDSAgpba9YhzUE6cc45Y8lZnaYBxroqOSN1LJOkwckwjqhc1K6pGG/RZgUSlcp21M66JRUU7ZQ4hKGJ1TsvmiKiqZLhVPIgDkGBnAoXtGzGBZRcVBBe/AofIpwPykYV0MjYBFwYlQs8QZgcpDICgH62IDWS0GKQdn4rlcG5b3KxJoWcM1BXdNoqQaQUYICsJRhEW6hCmKbeewzWMTBnELREVZ1RdXwkycciPs9UYc2qABYWvzq4ihxLcFYKgvc4TAJCpJywLFkDOenmbQ09fPRwQYWnFUAkErHuZVlQCyN4D8eEQA40DDq3BS7YGq4ynkytDMi5oCwtBus8OwqoLNlxW3MAkHNSrVpJvg0q9m5niQGRFuz2wbJ9OSd6rQwr7wXiGBFYuxkW6T4vGsHiO8zXGUst2iFuBBeGL0MLQsSv2Ja3A4QQx3Z9s4IlEqTq2uGKaZpwOByQUsL5fIaxXowRn9XPrFz1vBBovRSPXJQNjmq9QpudPByPOt+8aiWRQxxGOHLComWo3yk2vYJROIEK4Nghm0aVsi88STd2OELVteligHeE+zd3qj1XcZkveHx4wg8/fI+np0eUIsFgTkDNCSUXnN2CGAeEHHBeRCcoRtGBC1E7QSepwLASVx+8nKU1AWWRruqcdQ8VDZSBWqQEELRyDIl4TTpzwbJckZ3517Leh2EEOYdcGJflIqW6fkTOGddlRikiLg9mFJZqEU9egciC4AjffniPDx/e43y+YEmyn5xTexqCzFBljFPQILjCMRBUPJ2ogmuSQNHJOnLssWQgKysn+AgH14J2bsyplWkZQsT79xPmecb5fEJKs4DnRCg1I7gIMrDHoSXdZC9qh2rtpuuATt+3wDnCMEhMwxC/xXmAAsHFTpPMi572MARER8hJxnC5FAxhktDNiQ3pEyg9ON78wrpKJ4AI12VpMRuR4DbUurysIABYzkFi7YhdKnJdMDnanH09AGC2pAfMzZ9STFzBFRJgpmmCrl0nB23mZ0wq5wyEXOCIMV8v6Jm8rQKBK7LpgIJgnD5jZHoilJwQhnEtly2i30lkpa5obEogbgAUYacBQ1gZvZeL+OW1FAHDc8bxeFz9R2MGZ21coon8NiZFqgKGOOg1FLXreS0zhUgaOc1y5EX8luAigvMoLM9d0iJn+tXkGIYGvJM3ILigLld4R/AuQljWBgQ49SUdSk1SCqll5GS68YoJEBc0dMN8G9UTXMEj0UDjOYMzo/oA7xnTdFB2puiMNkDVe2kQqaCs+MZih5kUjPMEFwfEocCHC5gvYp+Z4BV/BLGUuZO8R1XtQQNq51wQXUT0HkR2zabtKjbGWTUbVvDR9oW1eKyWaVdmpsgBWTzUgSvaDLFCNNZDcKoNt3bwXkF2+5LX7OUWqrLX1mSR+fsrw7TtZTBKXTsl25m+FOlYDnJwcYBzquNfKjIzoiMFUA1SXucz5wwKosPJAJZcML+cQGeo1iBwtwjr/GuPn8mgoxX5ciQM9x3s1/6vzzXO2u23u61L9Oo5f+KyNhO7e+8+mw281qmzoOynrqPoIty/f19GsM/G99dgxrh/7/4zbr1u/6hAC3Tb83QhhnGQ7lM+glTTSjSuJMOTaxKxSoJk0UlotJUg3UadlbRSAxENsPQ+aHCwTn/uEOugarMCKhDYEQJ7DCFimEZMxxHp6aylgxpyEJrOEDT70I9Ltfdj0gyPbGgTd7TadyInnVN9QGFJB0jH3y77zgDmqgGGB5NHJblOVs2XlHMT0fbM8EbtrgwgAT4hU5LyMRZ2leRaBaD7/PkB5AZh0VUHkAP5gMJitKXbrK4Z74HIyEFKaoN23XXMcGAMwWvgYaVIAAUgRq8VlNIZFy4CFBEPb1Aq4OKIMN7BDxNcHOCHCePhCBeP8OMHxONbHN+8Qzi8QQ6AowuoJrgcUJ0HnJR0eldByCBXEZ1HCBnw4sxJNtAjBAK5AUMgjGNEKaKDF4OUqoZACNrifAjA5CZ4BeCiN4aczE2tjMnL2FcGlgRcLgvOLy94eT7hep4xP8yYzzOu5zOWdMZ8OeN8esT1/IySFyxXYckt1wtKSqq5tFK0FxUz7gE6gFHzWoq0Adj7vUxSQisMsi1Iz7wyaVrWm9bsa2NTlKJH+GpzAKDWDCYgTiOGw4QwRmGq5rJhoXAtIhhtrBSse6UvZ9k/BAB3Ck5osEXbJMDG7ryycmhBeVUAvB8ru5ee2bF/z03yZGf7ZK9KObt07S0yd/v38F4ZVRoWsmqzaSAXo9fEhNiTUqyDHAAFzWTGqRsHJx2Hq4r3qli+lQJqgVs3NgoqEiGXgvP53Gz/MA7wPjTnoJXBAY251me5+7OpZ1L1f29rpFTJ2FpQIIMqAJauT+edlAXo+gjOI2pzoKWUxrTq58gePaur17hpjosjjNMBDGBepKSs6DXGsJYBlVLETnRH8Zod37LGNywdK+O78bBr6FlDBiZE1fYwwWy7hnmemxMXQhBGNLGyLg3U2JfACORtAdE6H4AI6AurXFjOupacsBdTksCtlfupbZEx1LIvHUen57Fr5y9rAloceD2E1jFTx1WChdclxmZn+r24H2NLgFVlBJk/VUuRpIDOQ1b2nr1PVkBNxK4BEc2X0v02TmCAFJRmbapTbbdYeY4sWoF8RUJCmhGva1+ul1RbUUBNtkABknXMTAMAACAASURBVACpaXWWzd6ARZtpnmfdo5YP1xI8H1oswG47dsI6I103K/ux10v2ftWpu16vm2B+P+a3xl2ep+U+Tu6FAS1R35aXc/eedg2pJJRKArXTmiS2M8ASLrZeDSg0m2NsDEdqY22u9Vzs11DPwNwmlgVUEckRApNviQ0iqF+Djm0kJeR276VUVKrNHnt4Se5XbfTS2YVSiviTDAQn/hGTA5UKUEZrnqTMPi6SNBvjgF98+40Cd2+wKFPhdDrhfDnj5XSBCwE+DK28aJomsalgLEkBYLORbt1r0Xt4D1ROAFR7rkJtichhVGU155xRmBGixzgOGGLQM64iKKAjbO28NvXQUvlpOragsLbAUBIQAQwqGUGBF6flZRFefFGSxM+yLLhcJcFhoD0FATfsjHUKJEKDVVNNyOrgMwT4B1lTHg9UVjA3SEJfy70EXJPPGcexfaYA2Wj/51JbAoqZgaKsuI4RZbuzD+orlATBwHy9il2AMFGsQQUz664qoKoJSW2EM/iA4zggpW0pvtmRUqT7cK31VbnunqVTNQkLXfekOmYmrbbaRGFICrDhYE2ZN9fbMWz7/d77T/1nkyMs19zFZOiIGqvNEH9EkyTewznGUA/IESirAsvNx3pG6DngJTkIAI5FM9wZCJS6ElzWBeRo9YvlotXir3Qbu98mx6T2NIRw8xxbx9UqDlb7a2fACngDVvq6vlx8PFtXBq5aN0HnGFEBt8sya2Ijg67SNGccR4Q4SMI5iNUDAd4DYBuvqh1eTVt0ZfHZaJhtb9Ui+tifF/3akGZKksBxTrpCb6Ssur1SSmlzVcGajLE5qpKs5aDzL3Ni549oTwqWAK5CLqFi7JymMzyEEa5KZUExVqzNlyMQS4zdsJBivoDG70K9hDVlILLzRsYolbUTLYlDrs2xZE4DBSUdrNqFt3AfO6/7MbbGQoZfSGJxPTsBS1Z0GESL8fSzqoJp3iFSBGKBKwXZFbgqFoh0CxCt7HK2hoL2GZJyNddDJdVEU/SnHj8LoHN6K+1Q3XcWtR/txm1Edo8emLsFRr1+wU//+Wvg1hoIbzfN1z6zN+IrOoumxdJ/Th9M9Wy5/Wf3B8JPXf+fWnS37s82dyt9AWnHRQCo8CT1gsENNwNpfUespT3yvQ9gHIUWSFuVnDUwFaq5mSA9WKoswBAGHKc7+HvGlStQRPur1CRrojuMMnf6DpZlA2B6B8QWSPlmiBv6TnUtPeu+mygnAFDR2nNUKQkjD3YG/jnkLI6AZWuZhL1HDmCKAuSRazoLwfnGEgKA47tvpKmDE7o+k2TN4R1cHXDwAV4ptS4EsPfI4kGDwgB2lukIMJFq78Mqml1fME2DABUghChsDPIR0+EOlZW5ESdhTTgtZfYiinz/i3vEacIwRQwDIURhOcRByhBSSpId0pLhEIQ1F72Accej0JFtrYXg4D1hiMAwQIN66aqrOO8GVB8hGRTitU+ysI2AlCoSAcuy4PxywfPzM758ecTj5y94+PKI8+mC6/OMNC/I8xWlJqQ043o9oSwXcZKz/K6o6KrFYmvJnZSSruVHdohopr5pU60lqW0fOtKuu1u2re7cm/vWHJ32XpVBWEvx+r3snMPhcMDhcFAtpf+PtHftkSVJssOOuXtEPqpu337uzO4OdpfSigIJiP9Av1+fBAICJFAEQXKw3O2Zft1XVWZG+MP0wczcLSLz9vRCOai51VmZ8fBwNzc7duwYYc3bcjiwgRW1O6/j++SuYYy5Bxs+B47sXwS6s5+mX8bOCO+TEebg+kSEdzIfJUX2YN7+Ov13pBRpHLvbqhCA1nqHT7Shg+MdVtPaCBS1CYuUKBjY0Msmom1ZrA75AI9MCFzO7sq/QhAdtVqlMY9L2lSdT8lKchyzDMDmWfjAwGcAmbkD2eAqGlrmGNn4hKi6HaN80jdM8Jo9+5IYCxw8+8r+JsdSrSEFbXwZimW7h0NE3Tn2znjb3bdnCRizycbMA5T232ADvLZlyHatfj5bgHE6ndSxFdY1N+7OLtEAWYxVxW04yNsSRRGgphBQSlYgQpJDrYkkQrcJTQCFgIBpEkbgsiigie0aMB/JnpuALgpqKADdbI7E+3my/92vuzsgWMEsE3Qnot68Zpo1ANEgy4IwuLUo11mFNcTC2pBKAwlI+nxreu3WGU3BtqBBJJMAm9C5G4jAHSTV520aRSFYQlsrMWpvKCFzDwqO1H5fKUZ9jg2taXBp7GeXnN0HgD7A9YC5X6N+/I11Zv+9Z4z6z7ZWpUMdJDiTYRH7wowODNVagdYkgRcjWqvIizBCBVwcCaXOtNGEi2hFUd/PoAllKcakzTzxIIFfQ4+CY5sHjauynHZ2Se1DUNkRiUSNmS1ARdNAk5kR02Bbd2DCn1PLo1utaBRQQ+xJrYknhMDu2QSEKJF507V9PJ6RpkO3a5enC15eXnC5XERSZM24ZAHG1nUdZW2BcDgeMc9zBw5AUlZWUpKkYpUAi5uWc3MCQRjf0o3awM4s49EigCSQktoRwFjZythrI6iV8n3WRA8BphkN2ZOmKQrbRZ8JN9XwCsLANHYna4Mps+uBGYjC4PX2xwBuUjDndDqhMWNZM/LthtaqzsOCkrOAX1S1mZc1tpGOuIAxqBqAGTEK22dZV5RWhOFvjcN4VC+IbvTWF2CW5LrMIUeCUAYmELAqiwhNZU8CepUTxYBWKq7rivrmGWiM2+WKkhoISweEPBAPDD04vx5svYgdGJ+XmN5AHp9wZQ3kDcUR31HKe+sGPKjOlnkGn/9XrkFtZdCScI0BpAGUdv6EaVgB4rMMXXLhvwpobvqKHsCw8+19A/u+xXSlrohB1bl3saRJxIxrBsxJlSqIhlvJUtat57P5KuMmCRCzQXaMAVy6zsjY2m9vw/Zj13b7bn+uoWFcJYEogWjt80Dmp9jSuUqVUyDLPrWNjQdUM9PKa8hsogcXdc4/8G/NHqqzDgPowSMOhu4NVsLu7abddylFfArnf0V7rix7c2eaEnVgzvts3CRpyMxCGAmj0YhgAcL0JhLG/aZzsJP3YLVPFj8Yq4xZGutRZ+/q5wF0pHukwjsuwQxwMLmMkTy2vdh8B58YG+uWYOz+kXy/9538/NnHTuK7VAQwUl/vypS1tWSGWucA+rNRDEwTNNAYmUljCD1P3a3J/eu3lbjunEOys/3adz7z/ucAq88BXeZgPQLj9oP7a0DcIwDMf/aRk0Kguwfr/9sHoY/OvWGRPLiOR9f0cPLsjms6Z+K/VUFmXbccBoNqky5w7hr347W/3vuxMX5ZvyAhirJqqFQt0WsBrF1PiQjHaUY4nfB0TOCat6CFG0cvvMys5Q82YRshYpKOtQ6F65pxn7lscXJkM5mTCnAiqNETlh4U+Atk7AY1Yhr7B4amZma03nVTu1oGAvAOAPDN3/6dPOMgmUgiRkjWarthboQktEEgJLQQUCkIM0aZdiFOQIgohdVRmxCDlCgcz3+N0/OpGyARnNUNKEY8n06Y5nkjQCnO5ow0JYRDAk0RU5oxHWbM0xnTfMSkOiGBnhEi4TgRDjNhPgCz6sGlANSVQVBtthSEBaGEphCAvAKhQTTkSIkgbkpPHcCVvSYvwOXyisvLK27LBa8fP+H19RUf33/Cu3fv8PHjC15eXnC9XFByRb5JM4zWCkyfoOYFzEUyEjq3mjLkxIFlVCfEbBu2BU2sm6CQVHwQug3Q0cxYYzDo+toZAZ5/2bm8JhV4azdsXaUkAN3xeJQyhVY3QCARgeLYFAD0wOzhnAd0M5BfmKWDUiC6W/9jUxm2ZXNcklJa2aD6QTdBmj/3I7F8v2nuATudFff6S+6zVmKyPYZmUps5MtbKfZuJ7KCOdcR6kGjpzwwVBh8HLfVpPMbCviMMi6BMKu7BUCkFsSWn37k9+ud3ws87nLVWYW01lYtggPVZ2vyV8k91hpTttAcZ7Ll50MHOYUL7FrSQBaYG5gTCsiySmHNsNjveWutmXu0BSMmOlx7A+me7GVOiTWmRvS9PwgWVbpz2e5fNQd8tVZI6hMYFQ0NFbOy4H9uH2l0mPzzQFzNQvLkx9s9NSkUTfGMJGZ9R0tWs3Nrtf4GkEUi3VRDORuGC6kTbN2ND93P0IRvKzYvOdgJtrt+0w6S8vEnpOTVQ0uuCYx8box7uGZCBpZKhtnOwIY86ZsFd0+Ye7HoNmTN/1zm83qZaObZnoJQ6pAz6fNnNt72dwe46Hs0rr1H0yPZ6m+j3BDlUkwCJxea3oPdK0JJnqQQoxvzUBEItVaQ/UkQpuWsw7hOttjYedZr2XcT3z98fwwP4fv/iPh4SUMnfRS+XZWj73uTnpiVBhEUqDHCQAG49wRSGThQAtFKQq5XHjsY09jPYTK2z76KW9i3LsmHkHA4HnM9nPD094Xq9YVkLlmXB7XYTpsxVNOxs/G7XG9IsAbAFwSkl1BhQiDDHoBIcI1nMIJhGXEqS6GaVGci56l4OEFccpoAQR9A6pQPmMLSPrfzZs+jkOTBSAL44HqBKrCjcOjNIWGgCFsWUcI4TTqdTZ+hJN2RJTBvrakcmRYwRX375JUqteL1cUWtDqQJ+5lbATapUhAGYxQ+zOaarWACWLbNJqqyEjdIbibn52dxc9Aw2KDOXQtiw9vWEAszB2XwQoHY1Ol/p+fkJ3373LUL4gNfXW6+iWNe1M4OMMd61bGnMueEL+hJO3UOjJtKSxA4M1TZVf7LHWyzMqYMD9L3cigfE/Lod12HnDzD9TYJVdMkzAaBllcAQ7w/onS/Ikpzb+NX/mMalaeGZL1dKATSJFNgSL7b2LQkXui8s14/xLxtLkzsoub/PGCOenp4689PvSWKXQjcaHmDsYNPOF9jG6EpasaJ8ruDKynRr5hpp0mUWdiJM0w5YlwVYqlb+kMoFiH0ym5ei2QKdt7p/WYzg73fvA9nvPu6w19Z34A56rzkD5LrQur3NvzpGg5G0Nfto42z7tiSct/veiM+lCmiO1JudND1nzlmSanr6/uwQ3fNjVK7b61JmnJWFpnQAo2riQbTppYQWaKgoa+3Ji33C91H8Nfbg0P214Wbcj/Em3hLHZjMPN8fH+GyX/SH77GDE278sJ9XPyT7a3HNn5t6F/HOv3wzQ3V2opwbuGBgsV7wxyv71yLm03/1g2XkeBcN+cX7u7597kPYdv9D9Z73D+7kF4K/Vn2O/uB4BbvuJ8ugcALSryzCK+3uT0gASMfLATmvKqLcsrAt+PAbTNMmmp4L8Yl8U8abaJx0YWnJCQCDR2AlQPcIGogqqQ4A6UsKUDqJhh4rEbTOmre7GltpmrJgZxAFTSAgsNPPWijpCNCa8NWRQenl1QQNRVF0fEgaNljOhZ7EHg4YCFMy0QFw3BSjYx6Y3oOlvBejmp687K6SybeBRWG5EaLmgUZJjEyGGJOWgSVhwUdluBrbGKM0U5jShxgPK6QtMz19qCQbwdD5Kd68QMUXg6ekJ0xQxTxGngwB10yRlxjFWPL2JoNTEGJoDo3gLAShVGMghCTA3JWDSvwUAOGiDDYz3SAGrBiDNfXqAIdqHuQBlBUoGfvj5XXdKbrcLXl9e8OnTe3x8/wGX6wvW6w3LcsXl5YLL5YrlsrhyVGBdrTxAmlO0llHyCmbJarTW0Lh0wMYYMo2b6hCa9sAOYGcB6Hq5TxsBerclBJlvDqAba3V7TBPcNYfHjhUDaamDA00gWXFzEkWHUSw3YWzeIQRhO04Tci6bLNidnSC37sn+G0hOgLgyAyqiSrssFlEABysxd8GXBWDuHN62eYAHwAbk8FnjR8Hw5vp392H32iAb5nBudB3qptiKlusn9HIJb9OTslTsGWzEhKOscKO8Wzcl26T7s1KNKxuDEAJKrV2QHAC46jx02nWAlLgZZ97uyQcB+2foX9ykMK5nUwGwzb/GCpzId3qwS4SsjFFSvTDQEP616zAHY1Zw39hu9r6VU6w5I2uG9s5pVC04uc/agTxv58027p1QOw8w2K7GqLP3RdNtBOZ79qfNazu2BRkmkC0OaUSpVl6BDs7JEi44HIw9iO7E+flnbBcLpMjKq8k6UwckZUyIsLzopMUQ1ZkfAUojUilKzQC7MaBA0oEcAEK/EAko3Hj69eGdRL/W+vxiRm1Vyh2npHZJNQD1ea/LKppyUZJ5tTZwkZqoVhuQCNOU3PFFKB5kbDXTZgSEQRW0PDT28SslK1hhNmgbIA4fcvg7pGFVawwO9uzJreFtub5IQkx9zW/n29b27IHmfQOPzqQCNnID+zVq71kAv0nuyCfGfZEBV6aBy5DehsJ8Kq3IjhrEPwkQ3cWkZfMGBICFlWOshREUYeh/uig5EBCmabNu9z6pDxg386cxmMe6J+GE9WPX3mn83u+1IQghIKRDf98SDtSkfNmUBAQgH2znFKWrudkF/32KAY2A1MTWH44ior8p35UD4XA64vl57jahd4F1Zb6lVlxfL7i8vGKapp40SymhhICq4EiMjBQnoDSAMgDpaih7H2GOE5rqenFWti0YEp/anqjVDSlK6W2MmOeEnEUfs5Yq2q9VS9yYRYoGWyA1pISkDeEM0DfGM6BVCZcLrtebVEm0BlIbZOWagJSX/fP3/yIAwJJxuy2AHr9VRi1FqyNGMGvgmq2xtRTRZU1JAL4mup2JVGIjMPKakfMKhjQc8pq2HgDfv7inKgBisUExiCRLC8K0XW6LAFE0YU4J4XjE1998g7/5m7/F0/kLfP/9D2h1q/FqwvW2z/k1YDah+3Dq/wyvT2e9ARNqB6syCqO+XyzGOEwbew2MEuDWWk+UeeZaX4cAQJqkKw1SI7ytUDAQKgb17WHNBMQ3iiH0kuLuC7rnZ8eyvdr7TyAgYup+S86izVlyBVSPbZ5n9OhR9zuYC0UBU4z9GdrY9nJaIhyPx+Hv9UR662N0OJzuYuxHbMQ+ZmyNsUZMK/NWE+AQIoW4tqItJw8OMFtWa0WpypBrrAxAbc5X5XMxiu8eg2k0CoMccLGs2w/2ttfG1NvNPrf8fIRWLjpyi2eDCrsRwlhsTUB88+UQhaRCvHm2fT8DMEpz0f08m5elZKQQ0Kgpk83ibk3AUoVJhMDmkyXDIfGTRY4EmyKqhRhEI49BYJZSUGZSFqHhDltWqwfV9niBH+P9WMrzMv287Rh7Ri0zi4SJO5/Jbtm8BhEoRSSS75Wybp4xsCWBhKa7pvp1QX+364i7mGH/+k0A3fYCsPndFnJ/Av5mN8eQv9+ZYRtw/7m+sn7L1T1+7Z2qR/fwue90A7ATAd4HKfv3Hh3nDtzcOUf7SXYHdLrPuqP07MR+8uqnZQJqlta0iDbn+RXgs7uYtQKuAx/YAkaWzDCASiROc2lokioW1luaNShUxFz1n5BGNqR1fQf/Grp/CUIlba2hNJ2qmqWFCohSCAjSs6Znd8UABKxQloiKXEbEcd/KVkOEZL2iMOGkdEEW5rIaMBJF/wOQe8M/AwC++d1f45AOCCn1DVo2OwUGQhCRcd38UkrDAYwRdV2603E+HHE6HHHQTC7NT2hffof05dc9e30+EZ6eCYcEHGYgRQXYoKWk5OI7LbMIQbJ+ksX18xA4jwQVmHWPYu4FfmcMXmbVH25AzhXrUnC9Lsi54HbNuLxc8Pp6xevLBS+vr1iuN7y+vgCtoLWKnFesyxXX6yuulwuW26WX7jQVl+bKvSStcMOySMkFS4tKPc4C5oaYnIlQgM47AsIw4n79PoiyIHCszy2zU+wRgyh1cG4LquwyXhileAbQATLna4eNN6tMMs7RmA2WydqW8VlDjb4uu+1xR+oGE7oezCaNLtHjPiVg3duthzYgKrC1f99t4n5cx7zaBt/3thG9xHwPUm2O0UibdGw3Y3FKRQtjMK1ES2hswqyAzQyGit52sHaMzbiHtgtGtwGrHXNkLqXs0zfVYA2eO5jF2JRS753Q/T3ZuSw4EMBfNCv63/VZSqWAlLhZ6VHXknEdT60DrL9GO9++vNTfo9xX2FyTfc9sq4HLzIwm6ed+HPv84TD38/h9zo6z0Vvc7YOefe730f3f/XUTCQtwXVccj0eABti2Xa/Ur9EHRTIfPAghATMFkiSLNmyw83NtWEqR/QjjXBL4zV0TS5w7XWs82AP9fkgcw6AgqxXiVKqCudNgGfr5smeR+bEFC6NPGq0EhKYBvj5f67w4AB0pPZ00Ux67Mz8CeqKtvdnv3d4W7AE4CdrEyW1uvMVPcQipvmR+NNRSkeawTQK0HYMxBJwOB5y0gcvlchFNwsb9s/vkho2TXePehtUq3cL3Gfs9SPooODD7KzIBpsiEUQJXxxgRRM9X9BBFqw8hIoWIOSWwAo97ps8d4/MBgC3dUrc2wCeQvE3yz28EMmKDQU3BEjuOjRc7G+yTIA5ktoZK+t93gaKOQQxB9JP03hqkjNmPP7PoPtl1JtXw9c07AEmSRtX0BRNOpxOen5974G96mgY6fPr0CR8/fcLlcungVgyi5Rx1jQnD7iABMcVeKSENaSZMaZbzEUEYTkAIDG5ZmcSmvySNzEgTtaZdKlWfIhEzz5JMjlSRiGE1LNQdvG31iehdDnvg10YHmKV0pB8rKKvq48eP3QeRL0WABExLRKBJmMgpiUh80ePZmE/ThMM8I82zAiBbv2OeEq6XC14/vXS5CIpB5/x2PQUaAIwBj2yxDCKm3s16rOPZAD69ptAa3v3yDn/+859xu666F6a+dxtIZyCRMen265tIUCZjzKNbZcC0rAQsjX292/MY/phvUiPn6M9bn5GBhfa6Y4sDkFJxiPPUnATThqkdYN4ai3CyfFzZYo+qFWzf7OCb2gkbkxgirOpk79f163sQZ4/3uINW+2dtY2G22RLWtrZvtxuWZQHROhLZ7toNcO0No2gkIkcyi8EFmljQfUypvKRgi/mXmgHp9yB+3oRWlj5WktSyeVPBfOhJwZHI2ZJNUrgfd/+vlwJ55A8/AiDtd2ka4Rhq4NGYjST+9/6Vt+99LzdyAekRGvXGRbUyjnPqx7HzWpIvaPK7VWkq1lrrwK3/7Fg5VqmmzT9Ym8Lt/WEMTCmFLUT1iIkJbFlv5rfUik0yQPY6P1+35Ci5BvS/hxC63yISXK0nK+y4khgf12eMyn5PhiGBupY0O8BwVDU8fv0mgC6SGTCMCe1eG3DtMy/u398FhA+OReqw2abvFz2Au4n+a6+9QfHfeeT4+wDKf2dvgPbv7x+0/bsfK/96xKJ4eJ/wBazOEaIGRkWtGVIjr45okAArhfSrzBsDqqSTpjJrLICPEaUBAHeAz7q+sTdk+2PSeL8GcebkJZkv75y1Ctx3MLHSrQZwgQ8CxBlXLakgQqDBAlGIUeFAct8xoEWWDrIUMBEhISEi9LGc0kEclUhAFECtaVcDJrhM2yRZetJv//R/AQD+8Ic/YJ6OmMKE1nwJSUKjADrOoBS7HkxKAU+nM86nA6YQ8d2332C9SIOD5/MJp+MBocl443RE/eYZb35POJ0ZpajF0r0kCW4KtbGYSH4SBmAnxX3Sic8cs+5k6GNpLMBba0BWY5vLirYW/PDpBVwq1iVr0JNRc8V6K7jdFtxuK/KqLd2XosCdOMClZnx6+QWtZYClI09rDaWuKOuC2gpul6usAXUoCAIA1mrsjJs0BSCWGlqu4CoObuBRUihMo4bCpv0mNxZ1EwZG9s6CAr95EXhju5pm+mPkDUA01vUWVDCZNMsOdhagc4o8oNDncl8H1k1rVxZPj7NFm8BXjibzIBobaAvGbgO4e3sWtGvfo8/v7aAPau2eP2cPQ0i7DdUH+Dtn2F+TrVAi9au2zDriKBqU5lSAEIJo4RiLQjLjoesAPg6ytwE7WUDtBq+IUKUwilic/UOakTRg9gDWppy4W0nc7QF+jPxYm6M35shgx5kDyrovGmMQaps7Ow+4O69/LvsstReN91n8dV2EUYXtnPNjM4C7gGkK3dn2e6h/bt4JtXEzJ2dfososjCvTVIK7J/uO7HUS2NXW0HKWUsoYwZBSQYYvlZJubd6Z9+eDPTnVU2ta8hkoCOMpkoC9XHu3PmFSVsyzyAukmGT+BkJsOp4GRMkDUMbufUlysHsC94YcIT62GzYW+7k11giL7qk2YeAgmV/TSzUtM3O0ffl0f76tSemhscP7WjbQS9hAnbFhz61KOodCEFYpS+AgbWyNWbgFW4loMKEhtowpohVhUNdqgBJkG+xBGJDzIlKw4aTAgyR1AllmfmtDPRvR1patPWNsjVK0rRadXa+tE2PAdFtugLGOqXUyba3qBifgPViCRQFFtVEBAc26gOqxAt3rZm32Huff+eDUSqBszRmIL2t76Gr6sfDHBAyW3ZYVAdAkpyRYrfxuq9csDcDQGEsuXVMx7dZ4U3ZgVP+tQH0E6+TNrFUNhn4IK6uo3asuIdaDpSCJiznOSGlCvgnDPmiZ1pyEJecDztPphKenp14GayV3ax2NfpI2pxFQzTShBFDIJWGeD51tKmsYwnRu6EGhaSLVJgltQkMzYEw7xQSSKoppmpGCSIyAx74g7J6CUq5qC+OmbLI3UgGQUsQJVkLF7gdIWqr5xRdfIOfSgbCcay9bleqMCVOUeIAoIrWgusUCNFG0rs4Nh8MR0UAWZgQS/aw0RxyOM67XqzAY14xmyVPLD7TWE8Oi3QZtrmLzU2NQyWeI5nAUYAeBdL8qWPOKP//pe2EGMWGez0hx6qDTPM9bBqDrAO6ToyEM2QECoIuyD6Wt9dAkrkEc/g0RIZlgN7xNlJdPWvkSN7NBgMXAMH7E8M/IXFphg0ln0a1tAKt0D2337D3Q5r/jwQLzBQBgyWufT2YbfdLL65vKeYadHD533fgx/l+fePC+kOzvsY9p3yNdLOuTC35PN2AIBvRggKYdxFQbTOMtP4jS/KCDoU5/0/lRpayQyi7v/z6WPPClzJ7ws2Es7l77v2/AhfeFfwAAIABJREFU0GDrNW+eLSD+TzLdtjLexybZrTFDCGBUjSvv95WoFQKBw+baOvDVlKQDAcntc97XCxjxplyrVveRygOo29EZ2sAdlcueqU9s7jEbP48MGJZ7uT+W/nYPQLfBKJX9RxrZsPoslhQHl0H2cYD8SKqzNgRz81tuDBFB9vkHz3z/+m1dXDEmiweV9id4tPD9Zx+BbY/Os389Aq38+5+70ftA9vE176/fHvgU02cXzzACW8O3P+avvfz1/aUgbvM3O2dtymrKaKkhBGGORJ0ojQuIJzdO26NYQGXn885eUPYKKI6FEQZgqEzfzmJqfSMzBJzAmNCYTBMWkaJkJvUcaTqgaymwgUj6Qw2NFw1SlYKt+j4IQY8v4Jl0sSXovo6YJqSJwFQAbtppBkgsvLLADv3WDrSIATWSdukTmDCmI0IQwVVxKEd5CQCc5gnzHBGIpZSHM1oRx7WB0aowkdYqXf8Awu14xOvhhEQRP/zzH/HLzz9juVzF4QuS/S9rBo4T/v5/+7f4n/7d/4K/+zf/gOfnhPkgwU0gYIL82IgRAO2W3QFdgRuAdWXkXLAsGctasN4W5FJQckEuGSVn1Co/pRbksqKuFbdPC2quuF0XvL6+oqwZrVnXRNkY8lqRb4tor/Qgp6LWFdNMaJxR89pbWOe8oOQVtVYcknRmLFrKakCdGDkznrnvrVwrcrUNSWpkGOKQMA8olyHBUdMOQ4BnbTYNGD0L4x5Ub9xAmsVt3HZ2YseC6pvClo1kHY8CbQG6vnmDNtfGGE6H/9yWAbd9iR0ideJs80cP2Gxtm/Owt2cGmHkXpv8tBmnQyduyinHeraPhs1Uytp+hb+8cMq+T6f+1jkr++GbfTBQ+51VKLKNoAO3BnD0IMI6Pfn1bGxvgm0TM8wwKAcu6dMfejumF1/05/H0+upbtM9nqZHVbHUSTitSxNcYZMyNgtJs3UGXjoACb6/Ln9XPUO9r7a1nXLCwTd70+U+27MEqQne6u065hDxDCXZ/vymrsDysf3u99fm748k4AXWzaBwK11b4WbJ5boGbgpDm42yBje76xf4ruU+tdhE3HaJTYEgiramAGLaFizbyiWQORhqpBi1yn6FAlErC1KUOGK3e5hUf+gQcZ7+dPQEKUpjwK/nfntqmzm4JKP2AAc4wOkjRn22x+y/xQ2+vmtH8WfRyJuq5sXxvmJGBcb593aGBjopM0f6iIyKrDthGudnN3XcUGGAi1LIsyV4ZA/mAOUP/Xrt+Ey/04Ji3Z82vDz+k9YOf/PmyO6Na2UgTgJ/Gn9An2plaDYaM2vsmek0tBJGxs2v6ck5Mx2LDINPtfat6wVPy1W4Ds55S/r8aMUvfPSctzmrP7IXQdUFnfw7cKXHvDB3t2pZT+jOYkgFR0vqfYMFIgewAA/rkJM2qU0BvIUlrtLKnz6QkHBWRut1vfQ7y+ZIwRX335Jb766qsO2Ly8vOD19RW36xXLckNtBbVl1CWDbzIeUzp0eQB7DqZhJ4DjaG5g8ylGJ0cQPAtZQDzxmxi1rii5YpoCDhONzrcG0Kn0gLy3tc0292ytKMGs+yyWgErRBaIyY/TZ3BTwY0xRWGsEC7aFYUIx4BCkKcfr9YLXywUURM9u5tNI/AQG14rTWZIXJzJgo6AVSTalIMhWc/aLRsiLkqtUfzi/qLWm+miMTMA0yxo4KFgXUwLAmKaDAMLYjovtUQbI+71kxD9WYmgM4tGxEYQO3g29RgMFBoAvftkWUNjYO+YOFndmZ/fZ5DzWCTlGaxKx1Q+d4qTH3cbDAVK9E6Je/W7ftc8x84ad3Nd+a7p/jn3b+zuEAp9k3RNB5D6FlGAkDfvuuq59D993kTUdSWmcFlBK2yQTiaivuxijsuy2chd2rqAVUR18AoG57sCb7fM2OyqJwdJtpCW0pin2sfb7r13Pxs9v43l7+7o/n7287bVx9RqNdpwYIyatTvDz2OaPJTVTkERF2NvWzY8k0BpqdwGIpPQ36rNJKWk31TDYZOYHt+0eHrGd7yYPY4kymWeDwUe/UuLJPDQ5/ZjY2vHxzf4zMheq7lcjwUbkx3wb9dzBNr1J14gh3dXpvdgaoXFP9rxV9qMzoJmHHqjzlX7t9ZsBOh/o7IONR0ywh06jm6j73/fOsTdom+Bj5zTb74+O5z/3KFixz+9Bxw0deBe2/qXr3Dutv/YAPBX80TgBwoJiJuR166TJwpQ28ikyQq3gkJGz0YUb5umEWlaRQY9x48y1Jv2QuSnjKEAcomYLAwiT1K8b1BPYnGzSzlLqlDZZcFNMgzFGAhExA5ikhGNKBwHUNAh48/QM04VqXFFWcQ6mOeJ4PoPpjLUuoJhwPB7BTYXLgwjsg0TXhRtpSZiM/RQDjnPSrk9SJhmqGOjcBOUnBniecMurAARzwi0X5FZB04TWJiw3QgwniEhuQckL1nwBTvIM/uP/+X8gxIYUANFHW1DzgtYKUohouYAa47ZKJ1tKE2oD1tzAlPDp5Salo41QMiGXhkaE0/EJT88n/OP3f8Ly0wccLg3rt18DIWA+icHNdcV1vep8YQQDUqzbEAGhFLSSUUsR5tua8Xq94fXTC16vF3WSBOi93aS8o7FmaNeK22vVttmu9K5WcKkC/pohLL5UTTbAxg23taFwRsvi4FpgBzAoAksR0EO8PjHGEpwHzPMENAFYW2toaAgk3XRkkyjOoQjdo2HWZiMMRGwp332NMW+dDxPA9fYGEuADYoq34IwApQaS+AB0k33RQIV4m0ElDbpyyT1wi8o0WRbpPnk4HHA6nrDmdQAW7josMNnaIujfdCNT+rUHKAhbjS0AynpFD8at0xDFbfZur8fkM8G2GW/trzAp/DWmlBCSsbTEoSDmzbHMJlaWDoLGuii5ouSKGLRzIwtLj+CDU9n41rVod9Yt+0X2Crn+UqyxTdzc3yZIBWO9XUE0OpCZ9ondt+9k21pDyQV1NxZ+HG0e2TOwcYsx4nA4yFjaenLzJrB0aPSOGrOAOBvHO4Re8muOvx3f5qd3Ju3ePRsSNgd6sDECpKjHspKUmrdaUWNuDEDanF4br3me7+aMjYmVjfjS4L0vsd9nYxxMEgMCch4C/LZmaqj9HMAQZwcUUElqJ6yzZGs9KRG11EvGbwR5YELNBR9vH7CuWRh3acJ8kMAy5yIAGcZ4JB0/AEAFKjQIZ+4C8jbX7FmbQ+5BRbsv29vNUY9RyvDALBpnek7relhLAUIApSQBi7Ml3Bhcmjjt0fTeRgm/jLeJ9N/u5hKRjGUDUNtoUmApCSJ0ZuYWOKf+vthe2nSP7MCceNl9Pk7TBDDj06dP/TMxRmFVF7nmeR4BjS87k3tYOlhiwaDNv/6MsPXnRte/2lk4BtC01lBLwVIrUoSWusvdWcOBLvceDKRtKCUDxvZt4nNUeHDTxm/LSjU7GkLA8Xjs86LkjMoy/w0QS0l8qXVdO/vP2yNf8taYkXSN7/dPY6LmtYATIXUNJwlopLETIUTRaIshaNmodtZthNAIrTDWmjf3ZJlf8QeK2t2gzMo900ZKY5d1wXVZOlDOzHh9fUWOuYN3HegIpn3kgnoXOJ3PZ8zzjPXpjGUVlv+yLFiWpQODRIxSr1izzNO4RkzrhHk+SsKBBLi/XJfeuCLGiHggRNMmoeE7JNWVa6q1BgZKbShlQc0rrldhzNk8PhxOXQPU5rRdv73HzIgUhYVLwnoca6Mg5xWJ5flwG+zz4+GIkgvWdcEv+YJpSpjSrMxk9ZEQui1/8+YNljXjw4cPmK9XzMcjWpN9M0RgrgF6aJzPZ6SU8PHjR7x8egGa2LY56fp0e0lMcr7KVZjAbbDRWmtIU+pAyDRNqC2rz5XAAKbDhAAtw3OglP3rQU1jTdr7vWsmpJS0KVgMgjAlIfaNorF6gvp0PmkXJBbo3W8HKOR9JgC9SUnOGdfrVd9nFBTESAjhgDRNQAWWJYN13IgNHDC9GjGulVmEoKu+5eY6EXV7Z3Nln0iz+0YT/zwwY1Y21XiJLyxAlAIzGKBVCIRKAwQz22p2xPbPvV01uySfG76O7X0+WdMBQxqA1tZWs5kTLaFWwMuSbFFLVLOunyhJlcM8gTltkqxyzPG7nVt8m6Yl34duwwRcGmWRkjhtWpqs86AZS/ueuMTMncHoE4/mF3QtP0iV4+lw7M+4VlkXS1l0X2kIQVi1gGsGRJBkQhxMs5wzbjdtnhMlqVOjjiOF3pxN9t4ZSomQa7aYSnV/U5o3iTq5ZgHXiSClWza22tRtrJUG02jzwJtPAHt7v5/Dtt/ZS8ZwHKPWoUkn5wCMqdqaVGSAeAMu2rX43+1P8u+oxiJuSDr5OqDbDMu6l1559PptAB3f8yt8MAV1mOR3xRZ5sOVozGpNoFKf+JvLEy+mv/k5EM7O7wOp+0u+p/LuX4+Awb/02l/T/j077l8a+N/yCoBSLNXw6qsH1nI20blipdM7RzmvtU+GlFRwuJezScbOgqhSMkRLhzWJq13XwoPMEgcpHQ0TqoJ8IAJVRmhNrldazGKaZ8zxAKChlFe8vCzIS0YpFT81Vic4IibR9GmtICZC/JCAQ8KSswbqw3kGB6TDLNT8JuVtYrS4d42aEsDtCnABqpYqVUYgBldGK1IqseQiZUuBsNSGzA1MAa0GEL8BeIKBSqXe0DgD/7vQ0v/T//0fwbyCeQWxsvWoSEUmizBvJEItDYWBQDMKA7kCjSa8/eo7xJDAaUY4HxHirM8vIDDjh//+3/BPc8JTa3h++wU4CpDIzHhZb5jm2Tm12zkfueHl0zsp+zJwqTasqzgAy7LgdDyLUCcz1rx0B6ixzI3l0jCyXwaASslFL7vgilayftf0OpRRSVHmZq0QKrh+hlmPoWLkmqRshfumw9zANYNZwEAL/MQF0HwY0c4O6JprrYOE7Nb3WK87a+bsTf8BI2c70/bvtpH1DA5GZtiDWCCoHsjWdpmj8eHDB6SYMB+mfkwDo1JK0uyiSic8AQimOydBnr+WqbqyZ+q/OGdlZ5IG7X6s7Y1NpCClg86x9KCkXcPnXvtEhh/DzXgSbTZXeV82yz3AZcc1MIKZEVVH464UDNhcq7yMhTHYINYxrc//1mBbowW/HQx7sF/sM7dg3qxL78wNkHc8v32CCFBNJrDoaCmjL9Gg+G/GtjEq6v11uGdmv3tA1XTI7B68ePOaVwmwaYBkdj/JgjwX1HVQwGXCbR6aE3U4HLpTnnPuwGZnVewA5/3Y+vW+L3WwcfVzxe9Zfsz9HNyMP43xIiKkyiIWrRpS20DHlbn7zHNUlniMqlsyHFLZo1u/LwNVZO1OChwAlbf3bOfxQYi3afa+nxu1NVTb25RBXhsDkbuuVgcAmdHpqjqf+oCoLfYlrL6Mbjxrc3ztOca+/jCO5pxTFyRbAAfns1Vnu3a+p60xsRGkScQxHjJ20o3ZABC/Juxa/Rzz2XgfDD1yzH2wYM/DAno7vyXJpCsl9Ww6SJjdpbrybw1CLFvfA0uQdHmXi+/dg1luvGv3MdA7OVe7L/d5z+qw4He/Zmyt+CRFwAik9/62T0r54KiPj5ZWt9q0DNLON0puiaj7JgA2wbqM7+f1efyznLTRktmiLhPAQEAUn7QHSLUHgd2m7Wy6gQtpCjgcZwDC1M45q2aSMZ5uWBaZ82tmXG9SxpripKXjE+b5hHk+4nA4dEApX1cUljH44ou3FjDpNbL6XMJUFR3Ee51Os4vGJvLNUTrbrlQsqqFVNahOScCnmEagL4CBJW8ILUq1SG3W0KCBEHCYhO3YmLGuss5Pp7NcE664XC54fX3F+XzGmzdv8PzmDYikYUbOGfM044u3X2CaJrx9+xbvfnmHH//8YweZp2lCiEEE2NWsrkW6x5bYEFVjd5omcGEtg5dxyTljyTcp5T8EnOoZc62IHDb2edjce5vg/b/b7QYiwhwm3X1lPlEQP8p8s9Z1rkxTypjFZg/z5rmZvbB9x++5fd/p9pW1Iqf2xG1E3MxZn6SWeSsML/OtmVk6zu72cJ+Q83usT4iZH+5fm1CfrExV7a1LxtmxUtpqXHb7sPvZMM/6XJc1aUz1w+GAdV07UO5Lbn2Jsp1j2JRxzbIHM+BKUaWxkehNQ+MMk8lg3Nt9byvMDpYyGvfY2LbWMM/bJgT7Y+3n4SPMwjMT/WfH/ZHKQnkdXAWAl8cSDP77vYsrACnNHNdo3ddtHwDQ431JggqoPRjQTlPPykEx9vr9vspQllxr4EZA2LLUWrtv5un3Gz9f/DoKYUsgGOXjw08IToKrj4kbem5S6WDludvxVc3jNeu4aWlrZ5UCYELLqzQ0ZCOCAJWGzr6wOT+PFf1GgE7+rzuF9rbdVKBuMEaAK+97erHe5Z0Tbq/mnF1m7roWNjh+cZgTYb8/AssegWT7jfjRcR8tkkfHe/Q5G4NfC1x/7fo2x1K6qDm29vKOFQBhwLSxqGpVMA6rNIkICWU1TSgIiy2MyVH1fWnVzQCk7holg2kLzslYCZCX0tQzaiBCJRFNDSBwaIihKOgkhvXdL5/w7t07rFcVqG5ADAQKuhG0CkNrGgOXyqh2LezL8IJqMZno47YrYFPdqSksSFTRakXRhRTVWe6OKgIqJNNDQbprUYxgjqjlA2rRhRkJIYimEfC3AID1+h5EorVBJJpnh5iQYkIMQKsF5/NZxaulPJco4Xj+AofTGZdrRmUCU5Ry3zhjqQ2fPn7Ey8cPwJpx/fmEH/9I+Ph0BocIniJyY7zcrkLBjckFqxiBlPS4h5SuWvmolsJoCdYH+oig/ktVQWbpDmkZFmFzdHiYWJ+PjkMtaFzUCdFAz7JqILQ6oSr1X8qtBZjjJkAsmNE7EbA8/s5MaBlKZoEBZjDj1xlvDa230jaheu4/3Aqam7f70tSx5qseb/xI9n69Y+DJGoNuypZ1Cx08NqDBAwjjewZi6PoFYXoS4CKlCC61OyNEhIqR6ZGNR5ioNufNBsZom9FAHsnwOAfQeXPj7efngDD7nG2G5giZE+YdojvbRaTPy5hlVoonjFcAvYOW7zLm7XDdgXQe8LL7b62BWLtHxvH94Sx7rTmX3QpQ8JXR6hYQ9K/CrZfB+0QFMESOuW4FfjcsHve+Bwu9s7EPvPrfAel4WPV5x50zoa8uus+i2kNEXaPkEUDXHSjnlPtx1xGS5flgzzMQYPN9PAAq3T0ZG8mz+h4FDJtxUefNOmQbUMMMEPS+WNhgEtOSsD6VMUJBmNLGgvLBhgVCBioxHPPU2DkpAupMVS7dkZV71FIsHvcdEHBIkwRJULBEgSizqYFEV84YKwJQQUEDdSZLwZozpjghxXRnQ8wJNT/Arx3TWOoBXUy9S3UtVRryOGaYl2ywuSias1oOnMR+WxAm60m6UQIVRJaF1qSKs7E6CGKvQhgSBNpxrj9/9RcZQKOxgwS24hADhmj8rnuH7PX7IEeux+ZiLyV2IJyfqzb//J5g2jvM96DxHtSy330AFGIUx7zkXuIJkvsx/LO1hrVkJAwpFUsJ9XWsc4dZgVtbC6xFd83mhHpOteruiw4G2zV75oG/b1uXfkxilIYHa1k725zCJGCOdh2XuZB0DUpJW5cyrdKttjEjstxDqbU3yIghIIWAauwk3tkitX9SZq+seOz2bZ0SKSVECKixapKsNgOltYMyN4RWEeoA5Ww/s/N1CRciUEpIFDFrd8wTFJSvGeu64Hq9QjSfCaVElFW1Z2vBkjNuLAnZ85OMUYyEnFfUVkQ+IwbUmvH+vXRCJAXJKCSEMEkzigDM8xGgCYFM8y2itqYSBHy3v7ACDIc5geZDt+GlqG8C0ZSsRebANE1gYiQAKSatPrHOuBnvP/6CdV2wvHxCWhecT0+dxQcA63rDNM04zhPePJ2llBiM5XZFLgvmYxL/qomtXNdb34Pnecbbt2/x8vKCZVnBWe5XQGlJBocpatfQNtYDQSpduOJwkCR1RETiBE4BaZ4xHeQH60hMmo9mc2gfC+4Deu7+LABlsk4hdYDOErItRiAqS67aHlE3dr/b1h1rxu97Nv+FXa6gcjCGrHTaPSTbB6nbWDuerB21Nc0qQAz0Hc2qbK74e7+P8Wwcijd9m33djml6lP59+90nPT3AYdfhfSV7Rh1gh/hktk79y1jOHjjal5vKPdS+X1m4gTCaIF2vV8QYEDvQu3bfg0xc2u4dRlDwc8YY1oTRLb6h5IbaMqJj/D4C9x69BngFUIpjX2Tumu99vmhVjD/2Zr+qW19/s+8JiqTzSNaJzR8P1MId2ydwKASwzo8KqcTrWrYwv1ArPCSA1Pil34TM0aplx65Zmw7EncwNMP7bzuHnsp9fMdrfHuM08vlt52Tf5AKs8aiTvIlBu7wrcGj62Brt9OtmBqgp1qJ6mIJXAERN7Lwm8P7/A3Sw8w7Uc//yk8MChuDe+9wA+ePuX37w98GRD6b3TsevnevRa79w9sf4td/3hu63nO/RsR4Bh75ryOaY6tD29xpLiaodUw3iFCZpfNCM/STZtBgTUgowoI1I6NRSyqJgR5DyAyYCHIvCX0eKFvjId/zCBhUgLMiF8EoR1+uCDz9/wKdPL2AVyI4UUbkhKw2XW1HkG6gUsLSIMJ2kVE9fVt5SS0VMc28+0Jp2vgIhBUIiMWaHOCFMM+qcdPNLCCE5DaWIXBsqA9PhJLX9Ye5lTCVnMEEz5Hb/QtP+D//h34vB1CA9BYh+wjRLd6dIuCyvqJUxhYgU5422wnq7oZYsTRhaRVCaP5UVIb8At4rLh494/+PPmD69oAWgxYQCwtJE2BfRniMA08GwNQNGq1mANs2mwQCo1jCFaHa5M6Ua10HNnpwujnxK2pUrg07cj+oAujY+ywmlFlQOoitkFksBOitRJTWKMo/RdRtabagx6jmgQJPdmuiENQUvKhGYshhAF+Uxq0Zjd+rVVtS2W7v3Ja7CPuAeYO/Xpg/MgmbvfRkfEaHjAX4DgFxXKQ3n4wmn0wnn00kcPm0QYsESuc0qBClbJNAmY7nZ/IO7R+ZNiSp0DB/ZG2gA6F8WxNq5/XFCCBsg8XP2rrV7m+rHvX/fXedmXzChS33ZfmDvpDRJ0NTZSXDaMAoqkAT1FsQCAAUrH1S6fNs6ghsbp46hlEbtNVaUAceD4r+xgRh7354dtg/oAWwE58X5G+CVfd+DeN4e788pwcyW9egBOrsn7ygbaAWIPSDtLNr3I8fQWHmUW0EDBf99K2GtVUouPWPOSnl9OYu9bw53rbUH7X098SjNqLXicDhsfIuu7+TmmjlN8rkBuNi5etCEsSZGkkCYEEGPLaLcDuRz83obgBCKsRcoIgZC4dKz7SlGtDpYg9EFTTbGFiR87uXXkg8q7d58N8fOsdj5Y/t52Y9JkpDqpbzBwLkBfnmWwAC+nE4ZKkCmnaRThFmTM62Dtvt1AqCXVZvsgTybcd8A9UCgj3t3jrcPxwcafq7tA2a/Fu13CfK25fH+933Zsd0DEQ0/pRb1obThgTWdiNIJurNAopYesQBBtTWAAqjez4E9sN24DdajBgu2YxOGLffzxfvW+2Bnsy/2Oe/LsuTYBibUUlDL0M7yx5ByQw2iGR2s9EAoQwoubO6azel270Gi3r864BJ2zNrGfeIwC0BItfbP2T5vjc38nj4CfJm8ZFqB0RI+wsCYZ5OKGQDQ7bbidltQMuN6vWFdC15ePoACECNhPsw4nk8AEq7Xq55zQkoTpumElACOAlDPs9q0eaxvA4ZyKeCdyLntzfNkXWen7vuUUrCUBWtZOyNwWRax3UoTSWHq5bYhEE7nMzgAfKVNqaiNz/V67WvwdDrg+ekJrRa8vFzw8voJ8zphmqXLbc4ZHz9+RIyvMIbZV199hRACXl5eux1pJMADKKCsAtBSFD3WELeAzrIumOYZHEYYXblhVV9spsNmbe8BOL8nGtPb5lmrookp+4FJLmgCEFvgSXyKMHwM/aUnQpzd6/f5AEAfchIS/x3mkwDPyna3dWY+jXS5FHjA7H4MQQAEDiBilDbkaezZ7bXN9qCM/UzT+Jy/frPnsocLM8n8czue+FK+KmZUFpj92ndn7cmxaFpvch7zq30pPzN3Np3X7/PJQCKfwMIAtdmzGUlFnCTpXIoiebUgTHPf89j51uPBiYyTPLdx/+CAUIP+99ZW+WP0/ZewsXPjHDQ+53wZ+9eeo/9Ot+sYjTj2ZCYDlYQpGMQAB5N/CX1+2TPx88XL9YQwmKAyK2PfFwMRuGrlAQaxSBiLYj9KC/0aGzftE+XWq4sV9j60Hwt/7+ZnhRCcb2LfG+DzdpzNB/EHFx/AdPDsPmstvYPyNB+388GOyUBg1kQmIRFQdKPzjTOZB6vv0etfBdDZyf+i4+hu2N7bg3QjaB7O2cZJcxum/+zdudxk3w/UHlD6S8DZ/njgAUbuj/3ovc9d57/m5b/fmmmW7CYlG8U0GaegB0jyBoO4oXEBOOgk10y2Zrqhi3C4cZKJDhHC3FMNGRAQKKG552HX08Iob7KJ3/8eGJlfEVRUd7kVXNcVCAkpiAHNiwhIztMJ5/MZh1nKXacpIswHcDqAKfVAmkgaTQASRB0mFT2GZP+sDJaZEbiB2oKn4wFPT08ISTqnlJaRNau/FikbzLmgMeHp9Kyb/4znNwccDg3X6ydcblcQBczHM47HI4A/AQD+57//e3z48BHvfnkProzT+S2++eprHNIB15rx54/v8d/+xzu8++VnnA4HfPPV1/jq7Rdo64If/vQ9Prx7D+aKUmXzPZ/PePv2LVIKOALIBbhegE8fC+ISUMColMExgEPE2kw/iQydFZagaho0LGiwxhW6KTWAxCMD0ixZ6SZspQoLDmSNT7ox2ZyxUlMpV62IQcowahVwtbeDB6NxAPiIxgZUSYbdA3StCshn1x+7xCi0ucXaN7gQgJHQsuBb/6upaL0G3wTJ8luFzNYRatoZ0K1TvSSLAAAgAElEQVR1p1/Ybcku0vNrvdbhbIyNmXvA3UEX3WzIfd8HSMfjUXRkiFByQSlZukCqE25i8Ra4xCC6Nl2/4bOvexvqbmXzvq1dv37t1XhbeiH3PsTYx5g+3g88I23/72Yv0KBpf70yMwRgk+tTx4JH9lY2+azBnMuy8WCBjGNal7Ktw8xN9DfIAjWk/lkP2Im2IQONpQOhy+gZi6yPn3vefl/wgJh3jh+NYVDaPNPQSPJjuC9LMSdmPJdxHdvnMtiXHrDzx621YE4qsr27Zuz265pHearPiHen2Oml2Jj6jO4joJdZyns8yGgvDwzZNVlg44E8IkIMIzO/Zwi1Jra3Pz8CmC2YkSy46GEeIFpXxsyV7pOdpQgCNRZw2Fh2rEkOivfrDiOACSHAOvoI6EF9zE1433/X7tv+22upGYPXsxFKkS6WIGBO0pDDhJnXdR090v3Y63+XWrAUTbqwOa+EVmX+y/OOKCpxkFLo41ibdGO3TpdgA0MIbTf3bbya+pZk4wegOM0WOz9ZtgYBtRYJSDU6GsEHIQQIyBxCt8327G2t+OswUMgDdB7I2vuHNt/3x+0BhpZTdqFr82FiRAyTSIjovQ7imXTOBVi7WbYuM2LnoxhGowMSNknRZgp9vbKJ7W8ZMX4umQ33tseX6bZq9oVhJUNRk3qyN2hpWAAQdZ/uLN8g3eEpIBfpYhsAgHRed+BR/APBAIWpPDr7CRORYtxco5VPq6uLUkSnTJ7BhJRmxFhQct7YBG4i9D80VwHTFRvHVqADjMgRVZOLRCxM9UQgmnA6RczzUQBWAKSabDVXLIsAdGuuyEV09KQjfUWMhFIz1rIoiJBgJd9CMynKMJHmWJeFcTzMHeROMSJNA0iprYjfzqbNVkSvmityLkg0DRsD+Xw1FuOUlJUIoDK4iuzBuqjfFQnT+YA4zXiez4OpjxFsP53Pqm+YBbCMCdM8AU9HhFCx1hWoAUwZzMpcjBFhMuAw4Pj0hJAmtV/CpkxhAiWgRMYhHnGcjkgUgMpI00fESZld2kiHdfxCDEjzhOP5hPPTE7ACeckb/dPBqBaAp9s9BwbYy4P7sldZyliY1Ta/ujZsit1O2XzarzFvS/w+6PetDuRF6TwejzI+LXu5Fzu+ASuDxRoCIaiwP6nWrt2Pl+swPTefILPrAYApJQC2dxJak6Ywdr21NC2Fl3JG85sDRWlWGO6TAXtAbm9Xvf9wu60P/+4BUAONvI0ferRbhhWzJP+bJr/nFESKAhUUCHOakEKUpjCtIaieIasNANkYDT1EIaeYnVEfiERHlamp9Iz6JSzvF3bJdeiuton1x7zxMUkDeidjIm2yYvNLf/r9EoGgxJkI1WSWawcGoYbBaDT0tYcMSe2NMkOKm7ncz4dhMy0W7fgTSaKOGyPXoZO5B1DNtgYO0tSqNpg+vlV97YE2u8c9K3zrH9l82cY6HgC0hGv/O7uOvQE4zjNaU71h1M29MzPqmndzeszNhtExnYh6w66mew+FUQ7+ude/mkHnAboRjDggC2My7r/76PdHAZxfzJ8Dxvwg719+8e+d/kfH2v87HsL2M78FfHt0b3/ps4/GgNxnHt7jg5iYwnDExBCZoPx2EkpJqXSL6gGcnpNagDDqIE4vAS203SQEJMdlGgB+GulnIklpgc7GUqV7Sy4FawNua1FDEzE/HfHN129xPIrgZJoIh/MRmBI+vb7icrlhngPePD93UC7fFlwuF6HWv3mDL56fQUSig/HpBXkpmKcDvjg/4cuvv0Y8JHz49BE//fAz3n/4gMKMZa04nZ5AFPB0fNbNAuAq7KbzqYHrBa+fbni5LXjmhjdPp05JePf+Z/zxj3/EH//rPyEA+Lf/+L/iq7dfoBTCpw8f8Z/+3/8HP394L4LtRPjTn/+E108f8Xw8gSjgtq54++YNvvzqr3E6n/D8/IzpeMDr6wt++fE9yo1RCLiVjJgIa6uikRcJYUrd6Ri9FxgRLA5qAkpb1KSzAmCAbeYAcL19UtYcwJaRb5axBJbFSjFtbTggq7XeCVXKY7lryQGSxWTkDlJJpoCF7msgcd9UBKBr5HTRGkMwRTu3TlVSsLBVWAOgsT6CBm/ogLVnDphBtpLEHqB1vsF4dWzDBX1jLQpDy5oGWLmap+Zvr2v7ijFKV7PTSTN22vUM5IANwrou4pS1UTLrRc4fgRr7+xjLcjDIfLBnkYqBBmJ4jLLNPXtm57bz2Nj2DPLuWsZpt+/7DbYDf2TXt/tpW1DTHqxgwFvtlL0DZ0C7gEvKDG0BrVWppaYRYNueZZ301Arr9QrgsNlr6mBxEVHXxvH32SwY3DmV+znhs9b+vc2Y+eeFcd3WrdC/18/NWp7C27Hx82bPivbgFjNQSgUj34Gn5O6llIKy5h5s+HmaUuzzx88DA+t8sODXjv3NNKi2gN92rtmc9ONo9z9NE2IYpaC+fMbGp9TsQNMBAopdbNqkyRwBfb9WcXHNKe2O9f3aq63179mcLepb9PlgZfmWpSdrLkN39sQ7q+bk+mv2LLEAwhSiNNgJAVNK0lwFQGm+IYjzO8y5VI2logCYdemVhioCqPTnChKBdhIgDWiaNNHS4N18j3pOO5+BmWwAuAYyVnJoY8/QZwsSoK/bHvcxwY/7sT3r4dH82c9bs2syn8Z7e//Q5p79t3XxDEF0edYi2l0pmRahlNLbnDaRfXLgOtsm2J+FMMIpDJ0+6HjBrjFG0Zn19xejdoxDZyDYMf2a92vIs3dsbYhem6wLYb8KuNZ4gAjX61XBQPEruO8jDOpJYGNceJaOsSlsDZnVVV4+y5qIO/vkbfYIypwtjnFjVwLEXjGzgn+DbceEDmyafWgsoGBKCSnOwoxml8CphBgJKU44zCd0DUedozgSnp4aapES31qB6+2C2+2KNV9RyorSMq63V7xeGFOa9d4SpnTANF+R0lFGIhCmg2h5RhfThLCdvzEGpJC0S2/pPkLOBYEF4JA2W4xaMyoYcRKmnfkgSeUmkCWozEvGWjNeX14FUAumtSTjN9mcmGfMs5TGin3N4AqkSHh+PqO2Iy430acLIeB08tICorEXY8TxeMR0OGBdpXHW4XDC+Ysz/vHrNwABKSQslyv+Gf+EH3/8EcsiDcaOx1nkSEpFhTDOvvr2G/zdP/w9vv3qO9w+3nD9+IoPHz50tp9PChnTe+/jhRAQAwno5+agrPnWO8vuZT4MpLDfszYf2CfT9v6Kfd7mcWsNpRZwGxpsMUaR9ra1YIl5911mBSGjdAquTZJYh8Nhc42DzbRtGDXuwSb0Nk7da3uVXDd7qt/LSwHitPVd9sfzdtX7kvZ3Gx8/3z0YZ76xdX61v9nfr1dpjnM4MGKQ/SulhKAVX62sPa6JPTnGqDWIRnmpG9/ZxsCecWul277+3DXmDcQIU+zBuk8s2r16xqa3b2Nv3Mt7CKBOOl+ti62d3XvgEg5stVUHe9Edz/+PgaaNppZlwWFK/Xp9Ilh8OLm1sX9KYqVXQFTxtVtrWK2zM4bflFK6r5LwPhSN+5ax3pYEPxozr61oU641dvNqGxO2tj0OGYDHDagiXdXXKQ3NS1bg8Hq9bq5HZMSG/9W4IlJACyJfAeZOPPhcfOhfvwmgM7FYhnugPNozMwQkaD2+M4q//HXEv37wqQ/+PrDb/7dfxN6Ifg6c84HMo2NAJ2J3Rl1gYP9K4I7uSPvz/6XXvwak2x/Xfz7EgKAiYP6YtrjZkGI1tLgbDxatklpV+0ayX2iDZt2vcxxckW+AoSL7zBsjqVcnGSQKW6Og5WLcAsLhDFBEY6CUBesK3K4QPQoKmNMkgXM9I4QvwRxxuX5CCECYEg6HCbflitv1gq+//gq/++47zPOMj+/e493rK77/l3/Gl2++wPkwIcVnAVpqRr6+4nq5ANMJrwE4nE+Y6YBPLy/4/k8/4N3HD0iHGRwiWoiY44TTkXHLK979/A5lKeBa8Dd//Q2Wa8TrJ+DDyw1ET/j2m7Fk/usf/wv+/C/f4/3HH3CaD1jLO6zlHSoFXG7vwNdf8LdfPuMPf/gDmBn/+T//F/zwy88oX3+N+XjGF19+i6+//St8+e23eHrzjNPzG2BKuP30C/KnhsvHX8Drz1ivN8Q2AxHIXNFWBt/EkYxEADdxxJs4ttwiSgm4FimDjerQRV2HOpu7Y8tMoElBGBXMj0iomq0bT9hRgJtCbRxQq9Ks/bwHIA0ebPMWMVJhSGjGT2cZV81qIQApQZgRLCLfUNCfgVArANEUlMy3ZvbJMTfgAtoQ+obowTUweqmVAXQW1PXrJ7pbTsOh2K7FxqUDdJs14hwR+6zYTXGMj6cTwNw76s3KqDEnZLktveuV3N9WP+QOpGN7uMOhsKslWbLyDLVbUoO/X/k0GboJiJ4kbbPJfhw8OLQPfOVythto/y6GzSOiHojvnROiOtgnsPsyLcHBfomahbyzu2Y3P2OC+/miAvNspQSAbY3eNtvvVnLfavX69JvPPTqPMZ18mcsjwGCMH48gEzTEq8JWWw4Ye1hrqufEnTRxFwz483kA1kq0a629M2wp2o3RnA4FBQhjPvtj9kC3NeTMm+dkgYddp5XImjPtHfcYY8/E+gBmH9T413B+rYzCdN3GceT6qpSxYLuv+eBK5pV1L63IuYHACsZG9MYR7hnafBeheKA0Y0Io2wJWkri9fgrKXjbnzUqLMcZ07/P48fTPwe5D2APC0OlaZKzMApbkHbw/ZfNXnWSzmVbWZYAViXhjt7JEGtQ0e+4M02tCk+QFjeULdnPljp2Gjk/p/kC9lASA7BFNgI/Q529UxhjcGI1Qxd7zukR2MR6UewySb+fcI//PbP4+OLXjQp8nqfYRWAA2YwE3ZmFTGMDln68GPSEIexr9eqAAlj6FQKKdG0iTqmLfQQBcedm4r6399uV+G79ajeJ4/so2cRqoHRBWu9xsfyQAIfTnacwr7+eKDl0ERb1nA9YCoVU77hhrA9aMIdjHmAZge8eo0O2FQkBopgtJPaESlGXNDsQ3vxpcEWGdg5M0M6oNtWRwBKaZpKKjA8IBkbQrqoY9RMBzfULOK9Z8Q843LOsNt3yT91ZhvNnvYclIcYXtdcenud9naw2nQ1bwftjRVho4NQVQRV8LLIBwtxEsgudrziitImnXTWFQJUyxiL2r6M0FQIQ4TajMyCVL5+cmZY+YZxABr6+LlNOmhBSAvGbpWk1AmOIIVIswUICraBGSUi4RkJhRgyQ9SymYj0d8+e3X+Kvff4e/+4c/iD3hgMvrBcc443a5givwenlV2FGmHAJhOsx4fn7Gl19/jb/63e9w+v0R6+WGDx8+4Oeff8Yvv/yC19fXzmzZA1d+ryIioElFCju2aSkVga3k2UoMq8ZO2z3HEhQ6nHd7tt8/PXg4rgnolU+NO1g+5mrTvVoBvcLIJYCnGXGaQBQ6Y9qu3wNhvhJib7t8wsuSdhtNYAqIh7TZv0ejNPX7y3af3tuivU31dsmvZ8+Q93/f4wDmO9ixjOm2LIv4OlrCPbqF2l4DoJk9U//akoRdJ4wxNrM2vt+hMe34XgT4AZr62tt79/uJ+ff2Mj/LJFUUgNjYTothum/h/Oa9V+Sfpx3XGlowoA0f0H1YWUsjmWnSDUAY5dpQu8JNGH0uYdDamC/EUsknFXvCSJM9r6I2ApcmhBBbcyGobytAlxFPttiNxSrbWM2qEfw+HoLHe2zfuo9X/I+Aq4KbMIBW5PlTZ8Zp0g/CFAy0rcZqrUqpMMsFqAofwFr+3vcjgIOVqH/+9RsZdBYo7wIP/X/p8iNlzCCMgF4Hp08r3n6ZEMaENwsGF+iYtpD+Lg4MutO/uRYbZDwOeAxAUDdQ3zO3ebd47JJgQAPfHe/Rax9cjYD+MVD3q3+Dxq0OrbdXMyfDykxgBpU2nzH6L9wYMKAAR+0MoKGdQD2IJ6DrtFnm2iaofLQCCB2sHdctQRlxQqsZHGRyMkdUJoQ0Y0pvcDye0VrB9XpBwRkFJ9SWcVlXHCbJ1pe84vZyQX5ZMH99xPmLZxBVvP6Pn/Cnf/nveP/uF8QY8e0q3ZTmCAQU0YOIkh29LgXXtSI9JdRpxvT0Bt8dnvDdN9/i29//TrQMqmhvLHnFTz/9hJ9//gnHOeLp6YBlXZHXFW2tiHVCoqd+r+8/vCJMCX/1u6/wNB9xOhywLDfUtuCyvAfFgnlqePN0wBwTfnn7jMgFxzmgoeByvWH98Ud8//5HICa8/er3+Pa779B4RZgCODKWtoAz4TgxUooCWqj2SCliBA2kQ2tAYywsa4vjBMQEozk3hog0szHawtgAtVtNqRUMEu2cYs9WHA5SYKsDB8EYh2ayvDfNIGpgVICtIQSrUYPtBBCg0FghmmUEkEsDc+x2R7JcFa3moQMQlS4dpWzBlpGw+gQAqG10ygKG9sJ+vboLV0M6/o0iHjSQQgWxmpVDVQHoWFkyASTAuHP2/PliTJ1tsS4LrtcbmBsShZ4BvF6vyFo6ZhtQTBG0yjo39soWqAh6j+hBa98V4DOTEUTASEjxQ3uVvA4PsLkXcww8yBW1JNOupVR1Ltz3dCD6c5KhlAAraAAadBdrbFonBOYKIgOCt6BPSBGhDfCrO3C8uvPbPQBA7B1vpcTASgIl+PVDyn0XMCdHgjIOhKJT+I6VomBBVeduz1TzmcDPJpPcfmgvn8kEoPPS9BLFEaq1omnHOYP2LKiXbOB4jrVWhEhIqgG4rlU1XRhpntTejGdsmmS2v2f1buNhhont5pJlaQdCLlK2DR3BQGMcogc5d+PAahsmFd6WQEmdRut6B/TyVbaAeOOEBQ1uxIm1Tsf2VAErgXZBmQE7LHuWdfk1J3qaEqY02AbyHJWNo82HJAoTwKK1og1I1H8w5lCThhJEyk0mIECys0QjOCJYoF21dHbYXXsuvqxV2CBALQoaWekOGSgurDaQJMCIRZdrEzTZ96pppkWdQ6RsQNPb8uXvMt7AkKKY0oSQYmd2G0OOSEttrBuignqsZoFAvSxVbHV0c9gav7SuC+hZX4JLCxPRHnXrWNFWiNzWnnfoPbtB5odzj82B17nndw4LtHy5igFSpRQFEwU8aW6tG6CHIB1LyQA6HgBW1GYo5FhjnRHqGiv4RCnpcwSFrsG6Z6fY/e4DYv93m0OBgkqLhL56GqTRQYoTouosSsJPuo6S2UDKUu6mrD4bO3u+ax2dz0kmoDwrlrVCmnx5zNSW79g+tXLe2P8BWsukJxpyExb0n89ne4hqrwTAjuo31bXq3JHrLUXKJTNWTKtoOVvJVkr2XEVjtjFjOkRMc/z/2vuWHkmOJL3PzD0is7KquprdzR1iyF3tjGZWO4AEAQIEnQSddNZB1/1p+ic66B/opMdhICyGMzsckkOyu+uRmeEP08HM3D2ymtyBpF0eFEY0ux7ZmREe5uZmn5l9hmm+wr7OqHIAdLIHSq04Hk94fHzC/f2DVvsk0XbYqp0JD48Vp5vHprvp6gr73dx0f5omrRg6JhABkZUnz7MbuSa10UGrhoShLY/cdTCnhHRONnEejY+OQ8Dt7a1HS7qPS09qSi3IRVDKCWfXObelIJQCpOUMcMDV9Y1Wt2TBckoIM3qCpqi/o6AZYb6acXtzjVevXoGjVo9dzXu8uH2FwDMqIu7uXuP9+/e4f7iHkCCVBalmXN9e4c1Hr/Hy5g4vr2/wYn8Les34ycdv8OXdHeZ5xpdffon7+/tn3QgYdEM3gJ2frCC5DgQqDYSBtS8WS3Zr7OnxJAz5oebreqzUK7jXcZh/vuNATN1e9Yq3dRK3gW9WpZuXbOvP2HFAiDqQSJP3FiMwYY6zDkES3cs168CaapxhU4jgKTQgq5+tPU5m1v0/JoZGXk4AKClrS76fdRj9n9EfUZtQq56PSrNDK59uBDeZGdOkFXHAmruViJpuhXBt1WAJy5KwSO52kgiHwwFREXbkmiHJKvIDQBwQQ7QjzADHNpl9PDMjgGz+BoxaR+8tnTMoWpcFFCNp1CX2mjKcJqP/zWYj+vnqe8y2d9WWyTGx4ivriebxXgleQefDyxx47AkGQbXP0hjGzzS3m3HoMiCjUoJ0YLYNwCPzowkQYURMoDbUUW+gFj3D2jqKD7LsXML5vKBTHlzwtYmgZmlnxqWPrf5JB5ndp3ashOy59/ZkQFhQq7XpVmqJMQK1vaZFA3rf87zvfkMpkLouSJBcURjgKsrlSWhno1fnynBLl/InAXQFUKJbC5xYVGnc8PDorggZnEcWtKMFicrHwv50ETiqcajKUaEPoNXMaIXHkA3toTU3R9YVeQQH1Flz5VMwospzhVTFcOSZAdI76QME0F7v7zt+/aGy3fHvS2Dv0kHqrTXrCoT2HqGTjo4ingG8+Hcl5b5WgZHKUE3kQTjM8JCOhlbkmjp3SAuqdZ2dEdhWUhWb9eDqhvn5JDipGqhxnCE5Ih1PqOUE0A5VAp4WgURgiRG7/S3OYYKUe2Q6IyDh7bsFKd2DU8Wnd5/h0zd/Cdpd493THyB8wt3djLfHA67vXmOabyF5wnxFuLu7xVILeJ5wul8AmpAo4EkijjyjhmvcCuGj8BI38QUIESHoxL6nfMRJTjil9/j2W6Ag4eWLa+zngjoBMc3g5cp7RfGLX/5z5OUej9/9FuXpBFkYEVc4fLTD+/QeNQrOteDd/VvcHW5Qyxnp/B7T6xnTbof3jxG5JGB+xBwn/OF3D/j6t3+LP//pLfYEPBHweKqoSIhhh3y2/RHMSBtXjXPpW74d0Fdp+XYpKE2JlO+FTP9z9smqQM5DNQi0qq24Q1Ec2F7rtneukOmGb5dSkk5thX+WByX6PtkGWYyZNNelVJK2kFn2t6SEc0qtEq+PpReclwXTPDUuN3cOAGsFPeY1aGhXUC/3JgjO60bkVaGEEPR6S146AEmu50C2bHReEpa0IC3GB0NsVaIKWizD2oagnDxv375vWT1iQuAJwoRz1ilxp3RGSVn5Gmdt1UvpDEFBCIycF4QpAhB1EJodMB7AFU+ek7UbMFOBXCumqCT7Ph3PpduU8YgHHDfyB5tTxjzP2O324NDJ0iECDurYJhJEscqjwVZF+9xK6K35dkhqZlgQQ4CO8tV1r1X5goJxhbSqq2Es/MinQoGRqwIDPTjPkKLTNDUYAwJbooPQq4u8gq7W1q4W/HywMypnm0QqESVr9nyeZgCEpSSFCkTJvNNybtNqG7Bt6Ezj6hMoAGL/BYadW3q+TWFCJXVE85IxTzbtz9oVQVCOpGptBrVoxpiVS0njeTYOwYBcEkpRUnNm1loEJoQw43B1g5QWtf/GTzLvdiAITqejttFExpILlnQCc8Q0zYi7CalULNWqZ63KdZ5mMBHSskAqYTfvwFDOoCnq0J4lmxMM4/wpBZNxBpVSkFMGESv4E5RDrhZteVSAEDbRN2A/78EUsNRlyDT7OQzEoM6lcs3puTnHyQaxBFRJOKeltYcykYFDmlmfpp0Nv8iYpoAQVB+XkpFPamSrWNsqWWWV2bq4m7VyiJQDa1kWnTIZjb8J1l5ciwbVpGCFITfwbHQqC4gI026CVOC8JBCxgQumA8P0QwxBktRe7eXVYvoL9YVIqk4ktP2prYbm6bFgjuu2qFrUxyIbWKC1zlaZZc56CACxAKLTZIkmEGk1otgky1wW1JwxTRG7aQeSoO2CRblsArQFCdAzm6Nx4ElFiAG7oG0zS0oQASLPEJDqZO7VyCGwVuSlPpjF+WGECakWLEUHGzj4GacIKQU5JcDbZtrJamBH6a3DMU4ICB+oqrNqs5LBVLGbtEoxpQQpRYdccQBMNzlGhBgBJj1yvUKBuQf3ZrdKzijnRe0qQW0QCYINySqi09nnaUZk1k4DuK/ro27MR2R9xill5KWA1Vg3WyPsgFtFFvMBYkSQiJITkrWPx1mnmKJ0oNCOKIjxApdSQJUxkyau3D+otWKatQWzWMUy0ZhM0cBaJ1ZWMGuFR6saS4Ka+5mmtpa19Sqr3uaigHmIEfsptoFZRMDuarZAcNFKsnRGSgtqLTgHnaBOAObJp7ErYKFDxSJSPhuXpgbIE8/mn+h+vrmacdjd4NXLN1hsaMPT0xOOxyPO5zPOywnn02PbZ8vpCbtpxjzPmCbjlKTevrbUjHIuyFbxxsS4urpS3SU9B6d5AmgYsMLB2k2TDoaDDbBZlE7FedtinICIDlKLIISEbFVzI8Ct18ZIApABOWGy6ttaUWrGspwxz9HOUcE8CR4envC739zj/Xfv8OUfv8E//eu/wouXryAvZkwh4HD7EX751/8Cn336MzwdH3B8fFLwn7Qde7ebcHXYY3/YYQbj9HiPh4cHPD4ccTpp8v/mcMDD+/d4vH/CvN8hBFnRMGjST8/pVKu2jGfrWBLlC9SiBWAxm+9rOQerdq0VIgSOuxY7EbT6zsGmaYogGJ+n/0es+C20ejQv2SoPDcAinWBba7Hn2WOxyAzmCbUAuQD5aQHkESEy5hB1yIY+OJS8oFTlfS4oQNFyC2aN8Qn9fCBmhKj80A6EFfvTijisOwYMTLsJseq02+PxpB0yBkpz1NZhJkUIOoCuZ2FOVjBAEWBW/j2PDZaEnO3Mm4L6MKmAA4MsfvWBFeAJwcDrCgaFgHlvCXAbuEEgnNMJmXQfM6sN8ORHKcojOjrAYnanlF4xOMVeGRmIEKJ+zlITMgSyAAE6RHBuQG+FNmUr8MMGlkoV5JRRFh3cSFETXyxsPtekMVYW1FQsdstW1WZVidRB4MiTAoWpGJ9rBLMVWVTtQhKbwOqT5PVeI8T4BgkVKBlpOaGKcyzaeeGVe8zmWw46UjSpFkIEhQmRd6hWpZhrBlDt/LqwzwigatXtsahPUQQQS0IZYAaC0RUByynjbP4RMyPMinGUnNYJKnV+FEQ1p6dTd+hebkDzFGzCrCURxROSniyDnYOEYEUwqGMSj9zZM8MAACAASURBVHRAZSnKw4oCH4Ip0ERIyvn/HqCTi++bL+dA1vAzeCBrAUb7F+YAkANvGAh5YRlWc569vlAPal69O559BwOQLq9y/HUHFjrg1fB8qGmq6MtB9nNqTv3q/WR8vx4cXgJsq2seMyQXP/vQ6wA0cnB36powtZ+v32N9LQYC66EIgHEpQy96dSyUOqoiQNeeSx6u56DkuD666XPPynEBkypptU2YkJBIcMoVpwLsJkYojPPxEafHI4gz/uzuJ/jszWf46PYNHqcFhSuqnHA+P+DN6zfY7Q9Ykhpmph3CPCHOAVwmvLjbI057XN/cIFxfQZYnnHNB/voJ9Fjx69/8Fi9e3eEnr1/j9cevNUDaTZgmwjQpWHRKJ0w4gmqFJCAvBFzpff7ir/4aT/ff4Cs84I9Pv8fx4YS8ANf7K9y9+Rh33x3x/rtHfPGHr/B0e8LxeISI4LDf4e71S3zy2WeQiRGv30Ek4X/9j2/x1edf4nR/xmG/h1QHLNToOTxAogcoqmc8xj3pqqDTm4QK2NnnHBCA66r9cPjbAXV3KrJnOBrArf+vdSAjhwFuUGe9kA6CmIbSep1OZ4FWqasW06YzA7eX1IpgzksyUK4CSrzq4DgBQJ8m+kwXB7vwod+PlUyjbvu+nqI6lGUotR+dUH/tyHkBaOAyZsJWFzRIb3ntXCaeidLfC3ggm+xr5u0G7piPra6hZdHYbMBlhZde99BiB+otP8NrazGduLA1pkY9q58zMHCCAEBlAkjb7isAEkEENW4kQDP0ZMNoPLPka8Sm1BoM1naN9tHrSokPPVciQKzdc7h+zXBWLZLyZ43xHjsQDWg1kvanKRReitpl3UO6H1Vf1Jaq7hoJurH5Muv7s1fCVGqAy+q8aBXj7rB69tRb0oz3w9oMS6mgqoChGOIn1P9NjEEBPANaHYhWXhxtza41Aaggiggcdco07TDPewMRktIjMCGQVe1NWs1ZAYQC5a+0oEXchhjozKKVQ6Roo95fLihgCLOtlfE1mcNfxafVKXk+m8KJT9q1trTi2WrSAMGTE4QAiHLIpCUr+M7DXhTjzLTXRzbwGgzYMGqpnRTffZqWnbV7EBn2llX6kNtBEePhNN2K6pgTzAmvWcGEWlvqo1VUkXNlBdP2ZtQdZ+tOZNBn3AJsq3IjD4CIrN2ROmhoIGirTDCH18EPJsIUggIozUfwFntpl3O5f3xi2covZOg+YQWKIMa3Zw4yUQRxAFNARQGKrrH6foSSNFDyysjmoUOQLDGie4vMlqjvAYY5+aP/6PtK95ImhXur9VhN5hOHvbXFp/215/UBH689JfJ2NTH/ldsr/GxxvQjUKwS7bTc9FigPZ64oyO05MmlLqwBtSAFMd0pSn4shoEBqgv3T7TW16P4KHGyIRz8HV+1jVUF9bQtlBClgFlRTFAKpjhNa1RUB4MAQREykNoMFuueHdjXQMPDEwIXxjPG/Uz4DPujAgurAAYEYWTJSWvT+TBdLKdrWGrTqr7YWQ6Mn8EpaAwa0jUpBdKnVbJJzKBbkpDzNbPu/T6301kLj7fV7sXuIMWDeTdhNwSbAKnhHCPBBM1IEHANi0IEm1wcgV6UZWNJZE4DLGafTCQ8PD3h6OiItC/Ky4OlJ7c5ut1MwLO4UwLmg27i6ulLwH7JqRfNJggpMdb7Fam1hxMoz9fDw0Cr1FKRTkGCKE0DAuSyoYMR53/TGifMrw7o9FIzWdtcAscFhAtXTQFpdAgBVMh7uH3B//4hv3j/g8y++wMtXr/D61Rvc3d7iZneN3TRrr0AlrVSs2fxg9S1ryUjnBU8MLOcjnh4e8fh4tGEqyq94uLoBhPFwfGrXrMC92fiifkqYJtU7qz7r+15J32tWTr8qVqVWMmYOmIwOQHlcrXqTYMmFbtNXsWSLnC1ZaWEfU2h6S4PdFhRtZLK9yMTgEFFYE+ytZTYLsgBUcvMNmo9HfdCfV916ZZH/Gf3iMdHgdm7cs62ay/zWq3lvfoFNmwasynYYakPdRgdSP8Wr/nIllFAV1DPgW0QTwedFB5bBpjUT9anQtdbWnumf0+yK+UpEpMUEIJTSfd9eva1dWM2+S1/Tar6Mc9mtRXszdfm0Up0BRCYdumDrUYRwKllbIqnzrxGRNeoISIoNwGHkrHaWiXXqtZ1ZGg+aVgY9n4VJMyDobdHMjJL7RFzVNOsjbcLW5qkSSbn0iKgNNWqvM6BQEWUDFFdgmyc1xXxlP9uDnRfFfFxpusOudxdnNnsHk/1I97+dp4I2YKr50+aHaZzcfWoX1wNvse08/X72CHzoIIfQinLE8SkwOPZKQz8fmm9Qu+/HYbIKeG9Z1kE9ejZ9P24F/B9McXWwyIMkAK2d4PJwHfXW9EEX0RanBbnmVg2fou/h2a7nkJwaMjccH7hOV8DRoYQHEM3JG67t2Wv7g/IA6cOfsVbI0ZiNa/EhuXRIxq/HirtLgK793MYiu5G4vB74bUGwXsPvUwppz6m9bHjPy+rA77tHf6YVBULceunG1znfSJBe7j1fz+A645tvv8LbP36Jn376Me7u7vDmzRvwFHA8HpFzwjzPOBz2oP0tUhKczieUesBut8c+RqRaMO0X1IcMihHzRKAgCEHAQVBpwXE54uVP3uDF649w/eIGPBHKklBrxhwZV1c7yDTh8XiCPL0D14DDXFDQDbYOYUiYdjsIM/IiOGXBVSZQ2OPjjz/FYX/G6f4RD/dnPDyesdtf488++RSf/PSf4FzuIBMQDhNOTw+Q8kcF8eo1Xr58hW/efgvJBZUINccOvLBXSsnAb2BPeNR1b40iA0XEzjIDXXxOA4BWSVcdsCNoBSdg088qVnpjmTXAW2b9jzoytRQ8nXspuDtBpRQLbgegt0oDnxrwIoLJAyJ/HZFlyl2HOqj0IQBO0AGmvw9UvvydV2Z51Z47vf5ndExGx2WUS7sxAgSq897uN1QQptTI/6VWYCDwdaBQ27ae2w7/XrvUCWH4+ffZmsvr/T579Sx4ol4BfNkq5cHUeMZ6Vdj4/s435IGr6hHMgXEbb28wXIOYXep68fw5FiOnrZDWpuEOpYi2ULiUIq2F6ZKLCrB2DyJ4JZmfgMxsAWbV11glAkSdogIjGGZurSlAr9D2xWn3gbUjUUux69LJViVrtVOMNrjCWll9Sb1KvDm+8JYRak6RtyEww4YFOaBZEdjAKlIwTVuNuTlQ3v4yrrM6IGzgn653hfJe6j1xAyhh7+MX7HY/W7VLh3UcAep7sbdk9J+1CrAhEdBAtFqN2zE1Z7uv+PM2Y99PveXSgLaBn6uBNuREwl6R3gF2twvFMu3wgIhI+Ug8+K/aSmFXrf9W6sCnr+umlXvmTDfnt9uqsSJTwTFNyhApf6Tbgn6USwPnRh9D19qm2w0Bip8HqzN/eCZeDRIu1kBELMgq5k3LyiZUW1u9VaUnYCEFc/QJQVAaLx/YbR7gMVYtFblmrVyyaXMKQBIgtk4t0dr9s869qkMc2lRq0hY2oBOz56Qt2FKUt9DPAtcJJ8d2gBaD7+a2YpRLH6gBiw2c6+usgPmkn09QR5tJefxYK/5Szm2QQmAGxWi8fwrWl1LNj4ZRVBioVPTadOhWaa1G2RNx1Xhhi9j6SbOrYkTxIAOzQN1PZLZWdqCCGgjt5+fYDuc6NO6rcZ0EgpoGvmQDMWBqVKtWGFJgC/yL8psxIcSAOUadqlk8CNOJsKN/rVNUfcCEgqkj8CzDPtb9Bq2aI7tmH7gmgpJSs8UxB6S84OQ0HFPGbrc3IC0gThEhik2ThHWmECioPQ2RUecZst+1YTDzvEPOw5TpnHF/f2/VeicF/nM/g+d5Rt5BO2myJXYtwOx+lU6cJDBIgnL+ERCmgClMoENvEVuWhNNJp542PlL4RNDeeu/Xy5EwS0AqCW1WjJ/ntaDkhHenBxAB+6sdbg7XuLq6wUcvM0olUJxwzBnL8Qnffv0V7r/5RtupyQbNVG3PhShIB6q2T7xKmnC1m5GWhPM5GRjQz4oQY2tNr1UrcOedDXsJnuyRFrwS+zlivFLQ6cHevqmdHxW5AnEKCCHinLKB237/Aq/+dBsrRhvgfpKflRXapaD7w+A7s6NU9ZkpMKmxsidjyD6DuQIU23nR6R8A5k76r7bRk0GDP0e+D7uP26suuZ2Jl/7Ss/gQOjBQRNvg9X1ys7P6ftFQBfOZiyaoK4K17foEXtW3nDOWdNaEoifIvL24VpS8YLFg36/Xbk7t42W8js4nOnbifCjmHd9vBP/GdWDWCkixNuUIBegCB2PLUcBLYgAiA/Y68aR41bOryAIWtvWoWIp2O0TqFDcO7AIOjFpXoz2fOczIXLHbnRHjEbTYOcoRFandh29Px1a8ul6neHu/asdPFDDWdllt+7WnTX2QjepaACFAnLaACGpnXZ+GuIDJClHcV1C7GNwAux9R3f9ZJ9b8Hpx3l30PXzwf/9p9SKeR8CIAVZWemOl7yDtaYEmNIcEP0jZW06fWygq16WyxTrXzAoJne+dS/iSAbjw4R6djBOIuwTl9rS9aU5/mCI5OstDY/qncVWjkwGtw6kOL/H036SCdLlgH53rQ1yPI9jMBxs9bBQw/IJfB7YcC3cvrHIGFy9e7Uujf6wENPkWmfSatP9N/bjUa7VB049LOnA+swRAPYwToRrmsPPqQqHHREl62+j3N8IVmNKdpQpgiSCKYxFqMJqBWG3+uVQgVFUyK9NdaQVUwccDh5oD7e4IM5fVLesK7t2/xcDpikoBYZtAxgEJCqSfsrybcfPwGr69f4tWff4rd7TVmJhRk5KNOZ7q9vcbHH3+MEq7xh69+j+/evsWed3j1Mtl1qORyhFDC4fqAq5sXONMN5t0BYbrBzdU1rq9/guVY8fvf/A6/f/c54u4Gf/Hzz/Dppz9D5Yin+yfsrnfYEWOOEYfdFW5vb3C12yOnDJGCUhcgF6QU21qzZzk1Su/Bb3OUCYCVZxMBXJuxoOZwdkJRsSmqHngpcFcBmhRkLbXtH2+hrFKs5Fk0aylN2xo3m0/Nct1vh/bQeg3AqizXIIyXvo/fV8C4iwQhRuUaGpyMS9207f9cN+06xhatUbfdQTmflqZXY8WcG+oR+BqBnee24Pnne9DSOZxIqwWTctxctr+PAKe35UutqEQg6jwU7lAB/kjXQP8IkPi1uNqMYCiAxj/VMrGtWoiaI6NBW23fd9sj/Rou1rgF2l4FOQT+/Wv7+fPH1677Q/a2g76e/OmH9Q++/nuenTvMIeiwG5GyMpt+rVrZFRsoF2I0TgqjexicCHfOL+7ocpFsAptfj1Z0NKYppga0W7QKn4ZMVmmWU0E6Z4gQpmnWNurmOCtopJWEyu1GBupJLTjXM6zXxuJ1tckWNoMzNV4srwAqVVuGhdXhr9XhOuNX86DEWvYC2RAXsYpERbP0+gIMOOj8JrWK8jy1PXpRHTr4Cl6tA41dbD8ZYTG6c0ZkgTGo+SWAZ3PVVOXSEw3jXh+ddP+7OfYAuFgLUHvWA0Dp+kMECtyHBljig90Grg7ktd6064+82rsdoBXAqm2rWJXQoONtz7KR31dAS6YMkBXjx5G1vRBLwFyCLf7ZjSOpWoZcgACB8+kSCwIIWciGVTjAWPUcE2+ZZW1fIrTn0v034yEKk1XARSWfzqkBOuprf6jLQcm/ibhVFlyurSaJTI8ACzDXFSQeoOr3z6cIj9bPfdnLI2cMEkefrpaqgwgma7nyyMheX0y3xrOMQ0Rgs6FVkGpGSWcwu+5qW1ijdSFfd9czOzvJQWsFb5gDENb35qCM+6mXvmLXsbV+OKjp56cDoZe22a/Pf++7wCvbPLhhXk907eebPp/9bo+SslYBAiDuU6PdDvfPXtv+NV9T+2mzAyF0gORSL4h6kkDf64wYHjFNOwXqph2IlO7C1xKEVnmkHSiCaj7C7e0t7u5eqp5bm9jpdMK3334LEa2KOeelgcdxijjlhNO79whTxDRF47ElRLM/RE6Kr2AcMyNwtEo31YZUSwtAR3/J18nBrbbnDBicpogYog5XWNCmNUaOamcDoZSAKgG5nCFSrF1wj/1+j1oJhQhxr/QdUs44nQskC6iiVQ1lo59QHSvIYtcjBYSKKSh1QZx2ttfXVb63t7faSnw+r3y/GBS4LxCAjaKjoldIg6y7IUIgneuYKwLWAI77EF5QgUt9hdtVS2KCTBeggzlMb9W+KFelVxs1jjARaHyUAFhyloP57WvOO0AGAMX9167j/rps1cW+vxzkc3/Vn7WL24f2OULaEux6bfZ4XGc0s5Ga/ulF2P6OESUXpHw2oLsP73Guu2znIBtIpwBetWEk1M5DZm4JNxcfrtj47mVo7R183A/Zp9FOXJ4N+l7KL0ZEiNayzBZ3ifHvB5D5S0YfIWIDhdyXCUo8wNoxUZNXT6rtmybjkW/ImfpsbEMJxs4ZP69ijKYH60GRoN451Z5P8DPAoK42Zdwqy43aQytTy6AHumaqH5r4qIXaz/UZeofkmvu0nZ2yxobYk2AWc/rniEibDO+pVR/ERHDihrGI5Lk/pZ0E3X90P0csYSlmz3N13yKa3hrFmnSchqB+nUAsQeaD5xRIDhQxM6MKN47m75M/CaD7w9/82z/lZZv8I8nf/sd//WNfwv8DOQ5fXyjpL38O4Of2TQXwX9cv//iF/gGAj32zfQ2Ur/XLNwBgY+ORAXyrXx4A/MIPlCcAvwaW4X1nAL/6CMBH9oMz8NlrAK/t+wLgv7WXxz3w8HjC+XzC/eMJj28Fh7sn4KM97t7c4cXtS/zut1/g9199h8el4C//4uf45a/+GfY3B/zm87/Dbz7/BsIFGV/i6eke332RcZhucHd3hzixtiWIZeVSH6fdAkSy7dumLhgwJ6T3LQXEAq49kKrWtgIRJQWCGyP9mQe+AkFJZ21hqcUQfyP1LBVZtBIu2/dVCgiiAXp1bgpq4A+A1uLVxKuYLgymVwTIhfEiMXDPshz7eYc5ThZorA2vGujnANxlRmz8+rJCLqcFY1XDeDBfgnsrR2sVwDz/fL9Mz9wAjFrH8eS1BXyeqXYSUn3cvApMxutbL++6gqMFqvrNsF508WDQ7pFAQ8ttNSdKuTrGn/n7jwGnxgH07PmCrJ2igb6XAJ3jdq4w/RAl6sByex6qsBfPwJxIch3sazROfXXnxp3OS5FcUJmQRYwHUYNDHvTAs27exi3ClrmzigygV4Gs1snWZ5UoMVCwbY1eFcdaUtW69gjUsBOvkBXbf8K9ygsXe6NdA5PtETZnyZ16Gx5DFaDSOGZT1WEv0abeotY2BbH6/hHYexJaWYBX+aK3LpIFv8wM1GItC1axRVahQdoih1FHdFEsOLcKGDbHso6BhII98zzZWitXiT5vdV6bI95Tjvp/ZkQKCNMMQcV5OffptrQG47u+rQEqBVgJnpAs1YM6apVTIs7lREBdA4ReDTDqynhf4326zRgDBL8fAWxqWjWwkht/resNNSDLkS9zVI2e4pljO/ysgfRDe19bA7F2W0jbg8POsgx8VGzZnqfyEQI+qZmDg/iuQRZAQHQwjbJ5a7VJEWt5M+CG0HRQdc3tgwOpun6n06kDTejtUVOMmMIl8XQA2bnrAWFfc+cB6nyFblfG34828lJv2u+rqPMuBJA/M+Nuq9JoH7x6UicDk4GKphPWRgYQavUKrUn5wOxMOZ0UdFfWFG6PnwO0akFUZyqMX69qZYEYsBM4eiwFsQDOn7LvOb8/5zIbp+d6sHipux6kjcTwfR/rvgkhIMSoALrpX7Sq7L5XyDKaGN4XBqrJal/7eaDXY/+EHPzuwWcH5593lcBtCwSQstofOWWcTgseH48IcU2uXwkIgXUQjbWSUtRAz4FMrzRUACLjcLjCixe3SCnheFQKldNxsaqwgnOteDidgdMCJrQKpDkwpqg8jDEUnVYZCqZpBgJB2KYkeyswK7BMQRr9iJ/TFZ0uog/6qiAp4CmiMlBqArKBJIExzxOYdmAuiNMLHI9PWJaz7uGckSshLRXJeNYEyhHIFrezaAJIKTxqAxsFAFezzaKtlA/vH3B9fY3rg+7VEAghTKAA8BRxt3uFnBNOi/rxy3Luz0Rqaz9k0apwEk9GqW7xNGmCqWjL+Ngx2Pk/0aYZqz1UX5YudNNNLiyx4ok/JlKf21urSXnOmAPIJpYD2k5cjO8wWhLMCRR8uma5AKjcjjGP4EpFKUnbYg3oW59B/u+LcSmuz90RgJvCbECXnQ1WFe6DEpgYpRb1r2qFeDLNp3i5TTAfQwESp3ew5EXjCasGAKtBVNdRrz0g6HCv2MHTEAJKMhsRdN3Z7wX+WMj497sd75XT3f8VkVYR3uIzUZoeIsIEQhT366T9px1BA4t/NQAa1AIm8so71nOvltr0sNoUpOrJVPs3zAqya8eSIC/FOOr1+nLOylE5DbZ10ImejLGEods7pxGx7sY2gM+nlMIpEhww7jGmAnSdyoyZLIZ93n3k+pRrRYherefSgTQH3vyn6o9a7GP+iwJoptf1YlJx7OeL2Hnn1y+1g3wO0LUGLvb7dL0ICtiTF1Z1317XUteGSQHryAwgarLkB+QHAbr//Ltv8O8/e/1DL9lkk/8v5X/O1/j62y/wd5//LerDEd988x0eHw6I377F+YpR93tIOOMPX/0R37x7xJu7N/jsZz/D/uYa5yKIuz3uH97i6fgO98fPUUrCFX2MTz/7Cf78Lz7B8fgIDl9AkCBFkLNn+rlXNK0CKpUeRFUQEhjF2gjRON66g9Ur5opQz8g7EapYlr7W1gLjmYUiFZGVN6gDcj3zUUUdwiqDwz2AL27I+qmgX3h7q2ciPih2SB4Oh1YiX4ZpQA18kg+3p/t9OBfDZeavOzF9nca1HZ358f3GtjERsVZE/3wFYRqeJL1VgMi4bYxDSgG5itkddbIqNgeYgjmbHLQdZqhcW60ReSk2rz7Lr6YdVMMUpDHjFqgTwvq9+2tCCNaKa05EW4shiLGKKD+sWgDP6vS09jX4IecH2wjQrdfYLhhxNb3xOQAl0OmDDbwZnp3qXX/9WPHg37uMrVcdgOj3ozxVwSoii3GnBd0v1nLj9zKChMwKyhJoCFit9U/UwWDqAZwGNx7Yw5yQasOQ/Bn534SSBYyAebLRMcYxRHBAcQAsDSQOMSKGCUBELYJzOsInxQW/XurVWdn2CjFaxaLXscHAJvY18z1ijlPOGWHS4EOrEzv4X0UB6slAFQc3HbxwnhnYhEt9vlbFKz04j0YkrWBcDyxUhwferyrG8+LOtwYUgopsBOCjDWjPndbg9aiD6oix2Ud9ONoM+LyV2oEEb9VD2x9klRF9D1aroGgAhQXso43RYDkjjPfoNtU2WYPwDAzSbaWTmEMI1roo4CqrSmXAqvFKb2nTqXvr1ieC8u0oTUFBRbZnpOtcRTXFHWqRXl0lYA3qBdCEk+9Naq8d125Zku4HSy4pByHZUJRe6ThWQsXoAVZpFcsx2lRYaxcL4DYhtNRie/MiA8/azq92m1afVSuMJ8jXuj/H1TMdAobWpmVt7YIMJg0utUpOKUP8rB7ttZ81Xp0tAOI06bqJtph18NeqV4tzRXOz4x2boc69VgqUQ1L1omTtdKFpPOPWfshokn2/uH64To5fj//WvlrZZCZr7yWditiqHy2uEqZWPVEgWEoGnp7QXtDW2ANtagBDe57ktlL9hw4e9iqPXonSkxluC/x9dQ8CYBuSMytZey0K7qR0Ru8q0bFDOnGw00WEELCbeueEt1f7GhFRGxax31/h5cuPULJW7Z1OZzyljOtUcFyUy05KQi5Fk72omFg7NwiCGJRbjoy0kCiAp4irm2tQZPNDYL6WtEqkGJX+QULUwRRFh/scc0Y5VYSdVppwZUiuoAxIDgiBEIPawykyAu8hIBDp+bMsBcfzCcflwXxZG74lBjpUr9bbwR4KRBgEQZCIWCNq1YEjVzsb3LQk1ArsdgCFqT2zECMOUX3J83nC+XxqfiBNpOeQAYIKdhtNAQkIsele93XMAYa1eHsvU+OdRQOCvHrbgfQOhOlt7fd70wev4tYJ1lUqkk3oVrJQ/Uyu1q1EArAOUBDf07b3xkqn8Vxo9lwEAqPUER4AmnWHyeW+aYmZtpHRkmH+x0x4ayFuE1/L2pdxQMjt6n6/R4gBWmm5tNeEEFb7wn+uoJz5QujnIw3g4Ghrmo8MBwQNa7VYaLTRY4JhfI9nfmJxCiqzo4D6RSItsRqYjW+Qml9H3Ku+stkn5QvVwWclV0geEpFAGyQi1k1Dsbfyj9cXrDJUuxIwmkYdkKTOlAKBDugGb2GPdt3uwwScjUKELanpmWX93KJnMwAR4xsWdF3j9RBMjdmex1QoUOoLB11bLKmf5QO4iAxEN2XTalT7PPugNjH84ln53qpl/Tx5fC9ir2lp/04r8XV4k1jxAtDjA46mQ+0ss6o846/+UBfXKD8I0P3Nf/nv66DPLqxlAex3wTcfLjfw2OJKgNKE65CIcYEYgFWbiBQItDy2wksqLzikLowD6sX38CDauK5aKadvxvHBtOcNiPfhM1o2eUB3P7gWeO60jU7T5b8bfzZmB1dOn29wM/puWH/9H/7Vs/vc5MeRXy2P+E94BD65A3AH/OIT+00B8DVw/Fqr/j4C8O9e2u9+DXw1vMm/vIJOnPgEK5EvgT2Af/PpP+QtbLLJJn+P7OcdvG1ARIyzqNtzrRQKSCkjJZsAxayARQA0g2tnlzvGNoVNy/25Zf6EqGXpBAVTANha1ef9DA4TUso6oRKEImiZOcAmk9rRKiIAK1k5RMG1Wr2CS6B2yjJ/rNcRQzAS4wmFoBM1JTdHuBZ1GmPQiVcYgkXAz3IFjx2wJzYeNOm+gVdVOj8gKaSilcLWMhVjhE+ZGh3Rlr0mau0N2kpQlKvNjtFSEjgIYMMNRvCEfdIeeia52kRzm7jv3QAABHpJREFUTaUrqHdedIqiAADpv+Ogk3KDTTGl7G2QpQ2w0emkUKdWyDuPVwkHTbD4NN+eEVYH1EFlXSt3+YooENqy6qS8Nsp7WA1MUZBNqrRqRJijyYFbNVQtvWW9WnVQYG3TZiLk2is3RzBPr2QNeruMIHSVCp0ePaEUBfTErkfQHeQKBWO9YliBoYgYqLXItsDWKg2YGQGMakC2JnV0DQNHOB+i1ILISkJP6JUdWoESLThEr75yonx4S4r5jqITX0vunHilAjESwqRgfJVsoIG7tpY4IL82wEYX6vr7/gBWCbMwBsoBujYWzITA4Bh1YFUDBTsVgoNdPmjF+fRgQSmRVryxTfbTqnetQNQR8AIaMv5aScd9sIhUCx51gJADgczBhl204t5VNcYYKPo+9hghryoB12DlCNCRIgytCjKygdOQxh8o1u7m+iUiOC9JgWzilZ3yaXyq38pxNHkbofZk6/01ot51Aqz57zQktezhM7NSBmCw8S0odb5x1dEl5wZaiFSkkrVK6KRtlfVwaGCgrlsn9fchBCFY+yoHyETY7Xa4OtziBoQTGO8fjzg9PSKdn7AcH3B6ekJKZxQIStIWv0gRU8zQVjSr0IkB37x7h/mww+Fwg8PVHvM04epqbmdMOp91aI948FkNsNZKc+VEg/GfZhxzwvkExADME+Eae0wTgQlIraIx43yuWNIJgRSkjhwxzzZhuygATxTAYWr2GUR90J3bo3oFFsHptOB4XHBaEooA86yx3+lsrZWtXdlAH6/gzRU1VEiLlDWuLKUgoyrnI6vmO4cogTAHtS9LWUDSOSYHa2lgBqmPwGwcpYa1WUwbbSIo1T7EQapAcrX7iBA7hwMzwjw13cpLRtzP4OCtmmgT0dvZ2faEAyf9HBcCcpJWADCCWV0H1yCQ/77/zmL32oc+jZ/j53mwFsnkFXnUz0u1gZrwL0KQmqxDpmo1JAKCSAP4tL1XAcYqQE3a+gygrQOMPqM9i1oMaPVr1ySbvmfvsAH0vPeEw8hXN4r7JZCi9tXoYtyusdszItRgWIdBJmST1IGCJARBgdigiMABPAVUWBJGPHFn5xtEK1OtDJoJeqZDtHqZF6WcIEaNAYLc7JrjIX79FWqfa+x8dzo/QME5HT6xptBxXdI+3WiDEYc1sa9rVaCZQv/dKA1bCtG2c9clpUTqA4dEOs0OWAsLHBAvWacwywf0dPWs7OzJtke9c4mE23Xr817rPCvSvnqffgK6j9PPQzIgvVqpLXsJ4/cIfeiCN9lkk0022WSTTTbZZJNNNtlkk0022WSTfxz54fq6TTbZZJNNNtlkk0022WSTTTbZZJNNNtnkH1Q2gG6TTTbZZJNNNtlkk0022WSTTTbZZJNNfkTZALpNNtlkk0022WSTTTbZZJNNNtlkk002+RFlA+g22WSTTTbZZJNNNtlkk0022WSTTTbZ5EeUDaDbZJNNNtlkk0022WSTTTbZZJNNNtlkkx9RNoBuk0022WSTTTbZZJNNNtlkk0022WSTTX5E+d/mOmLlpAK5PAAAAABJRU5ErkJggg==\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light", + "tags": [] + }, + "output_type": "display_data" + } + ], + "source": [ + "img = mmcv.imread('kitti_tiny/training/image_2/000068.jpeg')\n", + "\n", + "model.cfg = cfg\n", + "result = inference_detector(model, img)\n", + "show_result_pyplot(model, img, result)\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "cgSKHJobQpt6" + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "accelerator": "GPU", + "colab": { + "collapsed_sections": [], + "name": "MMDet Tutorial.ipynb", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.5" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "1489fe29d91748cab449718d687f4ee1": { + "model_module": "@jupyter-widgets/controls", + "model_name": "ProgressStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "ProgressStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "bar_color": null, + "description_width": "initial" + } + }, + "171ea927699a474084c49f8874942ae8": { + "model_module": "@jupyter-widgets/controls", + "model_name": "DescriptionStyleModel", + "state": { + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "DescriptionStyleModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "StyleView", + "description_width": "" + } + }, + "7189ce8a6634410a9e633832e8151070": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "767c8f4fbc924027885851365ceb6292": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HTMLModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HTMLModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HTMLView", + "description": "", + "description_tooltip": null, + "layout": "IPY_MODEL_7189ce8a6634410a9e633832e8151070", + "placeholder": "​", + "style": "IPY_MODEL_171ea927699a474084c49f8874942ae8", + "value": " 89.9M/89.9M [00:11<00:00, 8.22MB/s]" + } + }, + "aca1c388eeca4c87b5b6306302630303": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "b9b75e2d894e467289cb83070b8bb998": { + "model_module": "@jupyter-widgets/controls", + "model_name": "FloatProgressModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "FloatProgressModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "ProgressView", + "bar_style": "success", + "description": "100%", + "description_tooltip": null, + "layout": "IPY_MODEL_bf1e5d0665a141ac9c2085062ba77801", + "max": 94284731, + "min": 0, + "orientation": "horizontal", + "style": "IPY_MODEL_1489fe29d91748cab449718d687f4ee1", + "value": 94284731 + } + }, + "bf1e5d0665a141ac9c2085062ba77801": { + "model_module": "@jupyter-widgets/base", + "model_name": "LayoutModel", + "state": { + "_model_module": "@jupyter-widgets/base", + "_model_module_version": "1.2.0", + "_model_name": "LayoutModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/base", + "_view_module_version": "1.2.0", + "_view_name": "LayoutView", + "align_content": null, + "align_items": null, + "align_self": null, + "border": null, + "bottom": null, + "display": null, + "flex": null, + "flex_flow": null, + "grid_area": null, + "grid_auto_columns": null, + "grid_auto_flow": null, + "grid_auto_rows": null, + "grid_column": null, + "grid_gap": null, + "grid_row": null, + "grid_template_areas": null, + "grid_template_columns": null, + "grid_template_rows": null, + "height": null, + "justify_content": null, + "justify_items": null, + "left": null, + "margin": null, + "max_height": null, + "max_width": null, + "min_height": null, + "min_width": null, + "object_fit": null, + "object_position": null, + "order": null, + "overflow": null, + "overflow_x": null, + "overflow_y": null, + "padding": null, + "right": null, + "top": null, + "visibility": null, + "width": null + } + }, + "c3018c8715924d2b83d817cc6c448a2d": { + "model_module": "@jupyter-widgets/controls", + "model_name": "HBoxModel", + "state": { + "_dom_classes": [], + "_model_module": "@jupyter-widgets/controls", + "_model_module_version": "1.5.0", + "_model_name": "HBoxModel", + "_view_count": null, + "_view_module": "@jupyter-widgets/controls", + "_view_module_version": "1.5.0", + "_view_name": "HBoxView", + "box_style": "", + "children": [ + "IPY_MODEL_b9b75e2d894e467289cb83070b8bb998", + "IPY_MODEL_767c8f4fbc924027885851365ceb6292" + ], + "layout": "IPY_MODEL_aca1c388eeca4c87b5b6306302630303" + } + } + } + } + }, + "nbformat": 4, + "nbformat_minor": 1 +} diff --git a/detection_cbnet/docker-build-context/cbnetv2/demo/create_result_gif.py b/detection_cbnet/docker-build-context/cbnetv2/demo/create_result_gif.py new file mode 100644 index 0000000000000000000000000000000000000000..6646c6b3d45c36f5d356a76aea97fe9a5a9cee06 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/demo/create_result_gif.py @@ -0,0 +1,162 @@ +import argparse +import os +import os.path as osp + +import matplotlib.patches as mpatches +import matplotlib.pyplot as plt +import mmcv +import numpy as np + +try: + import imageio +except ImportError: + imageio = None + + +def parse_args(): + parser = argparse.ArgumentParser(description='Create GIF for demo') + parser.add_argument( + 'image_dir', + help='directory where result ' + 'images save path generated by ‘analyze_results.py’') + parser.add_argument( + '--out', + type=str, + default='result.gif', + help='gif path where will be saved') + args = parser.parse_args() + return args + + +def _generate_batch_data(sampler, batch_size): + batch = [] + for idx in sampler: + batch.append(idx) + if len(batch) == batch_size: + yield batch + batch = [] + if len(batch) > 0: + yield batch + + +def create_gif(frames, gif_name, duration=2): + """Create gif through imageio. + + Args: + frames (list[ndarray]): Image frames + gif_name (str): Saved gif name + duration (int): Display interval (s), + Default: 2 + """ + if imageio is None: + raise RuntimeError('imageio is not installed,' + 'Please use “pip install imageio” to install') + imageio.mimsave(gif_name, frames, 'GIF', duration=duration) + + +def create_frame_by_matplotlib(image_dir, + nrows=1, + fig_size=(300, 300), + font_size=15): + """Create gif frame image through matplotlib. + + Args: + image_dir (str): Root directory of result images + nrows (int): Number of rows displayed, Default: 1 + fig_size (tuple): Figure size of the pyplot figure. + Default: (300, 300) + font_size (int): Font size of texts. Default: 15 + + Returns: + list[ndarray]: image frames + """ + + result_dir_names = os.listdir(image_dir) + assert len(result_dir_names) == 2 + # Longer length has higher priority + result_dir_names.reverse() + + images_list = [] + for dir_names in result_dir_names: + images_list.append(mmcv.scandir(osp.join(image_dir, dir_names))) + + frames = [] + for paths in _generate_batch_data(zip(*images_list), nrows): + + fig, axes = plt.subplots(nrows=nrows, ncols=2) + fig.suptitle('Good/bad case selected according ' + 'to the COCO mAP of the single image') + + det_patch = mpatches.Patch(color='salmon', label='prediction') + gt_patch = mpatches.Patch(color='royalblue', label='ground truth') + # bbox_to_anchor may need to be finetuned + plt.legend( + handles=[det_patch, gt_patch], + bbox_to_anchor=(1, -0.18), + loc='lower right', + borderaxespad=0.) + + if nrows == 1: + axes = [axes] + + dpi = fig.get_dpi() + # set fig size and margin + fig.set_size_inches( + (fig_size[0] * 2 + fig_size[0] // 20) / dpi, + (fig_size[1] * nrows + fig_size[1] // 3) / dpi, + ) + + fig.tight_layout() + # set subplot margin + plt.subplots_adjust( + hspace=.05, + wspace=0.05, + left=0.02, + right=0.98, + bottom=0.02, + top=0.98) + + for i, (path_tuple, ax_tuple) in enumerate(zip(paths, axes)): + image_path_left = osp.join( + osp.join(image_dir, result_dir_names[0], path_tuple[0])) + image_path_right = osp.join( + osp.join(image_dir, result_dir_names[1], path_tuple[1])) + image_left = mmcv.imread(image_path_left) + image_left = mmcv.rgb2bgr(image_left) + image_right = mmcv.imread(image_path_right) + image_right = mmcv.rgb2bgr(image_right) + + if i == 0: + ax_tuple[0].set_title( + result_dir_names[0], fontdict={'size': font_size}) + ax_tuple[1].set_title( + result_dir_names[1], fontdict={'size': font_size}) + ax_tuple[0].imshow( + image_left, extent=(0, *fig_size, 0), interpolation='bilinear') + ax_tuple[0].axis('off') + ax_tuple[1].imshow( + image_right, + extent=(0, *fig_size, 0), + interpolation='bilinear') + ax_tuple[1].axis('off') + + canvas = fig.canvas + s, (width, height) = canvas.print_to_buffer() + buffer = np.frombuffer(s, dtype='uint8') + img_rgba = buffer.reshape(height, width, 4) + rgb, alpha = np.split(img_rgba, [3], axis=2) + img = rgb.astype('uint8') + + frames.append(img) + + return frames + + +def main(): + args = parse_args() + frames = create_frame_by_matplotlib(args.image_dir) + create_gif(frames, args.out) + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/demo/demo.jpg b/detection_cbnet/docker-build-context/cbnetv2/demo/demo.jpg new file mode 100644 index 0000000000000000000000000000000000000000..dd613cee3bc13a3677908d7d6f1899e8278a4b47 Binary files /dev/null and b/detection_cbnet/docker-build-context/cbnetv2/demo/demo.jpg differ diff --git a/detection_cbnet/docker-build-context/cbnetv2/demo/demo.mp4 b/detection_cbnet/docker-build-context/cbnetv2/demo/demo.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..6c06d15d941c640e15785e0416818181313d83b7 Binary files /dev/null and b/detection_cbnet/docker-build-context/cbnetv2/demo/demo.mp4 differ diff --git a/detection_cbnet/docker-build-context/cbnetv2/demo/image_demo.py b/detection_cbnet/docker-build-context/cbnetv2/demo/image_demo.py new file mode 100644 index 0000000000000000000000000000000000000000..95de4fd4f18abe7a8b0fe497d0dee2557f943a90 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/demo/image_demo.py @@ -0,0 +1,49 @@ +import asyncio +from argparse import ArgumentParser + +from mmdet.apis import (async_inference_detector, inference_detector, + init_detector, show_result_pyplot) + + +def parse_args(): + parser = ArgumentParser() + parser.add_argument('img', help='Image file') + parser.add_argument('config', help='Config file') + parser.add_argument('checkpoint', help='Checkpoint file') + parser.add_argument( + '--device', default='cuda:0', help='Device used for inference') + parser.add_argument( + '--score-thr', type=float, default=0.3, help='bbox score threshold') + parser.add_argument( + '--async-test', + action='store_true', + help='whether to set async options for async inference.') + args = parser.parse_args() + return args + + +def main(args): + # build the model from a config file and a checkpoint file + model = init_detector(args.config, args.checkpoint, device=args.device) + # test a single image + result = inference_detector(model, args.img) + # show the results + show_result_pyplot(model, args.img, result, score_thr=args.score_thr) + + +async def async_main(args): + # build the model from a config file and a checkpoint file + model = init_detector(args.config, args.checkpoint, device=args.device) + # test a single image + tasks = asyncio.create_task(async_inference_detector(model, args.img)) + result = await asyncio.gather(tasks) + # show the results + show_result_pyplot(model, args.img, result[0], score_thr=args.score_thr) + + +if __name__ == '__main__': + args = parse_args() + if args.async_test: + asyncio.run(async_main(args)) + else: + main(args) diff --git a/detection_cbnet/docker-build-context/cbnetv2/demo/inference_demo.ipynb b/detection_cbnet/docker-build-context/cbnetv2/demo/inference_demo.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..d18dced2c20c82c07bf2321bcb37a38712f961cf --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/demo/inference_demo.ipynb @@ -0,0 +1,100 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "from mmdet.apis import init_detector, inference_detector, show_result_pyplot\n", + "import mmcv" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "config_file = '../configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py'\n", + "# download the checkpoint from model zoo and put it in `checkpoints/`\n", + "# url: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth\n", + "checkpoint_file = '../checkpoints/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth'" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "# build the model from a config file and a checkpoint file\n", + "model = init_detector(config_file, checkpoint_file, device='cuda:0')" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "# test a single image\n", + "img = 'demo.jpg'\n", + "result = inference_detector(model, img)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAA1cAAAJCCAYAAAAsrj1sAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAAgAElEQVR4nOy9ebDm2Vnf9znLb3v3u9/el9lbM5JmJCQ0Am2IEMQiIZAlsIKBgAOxcVwJJadSMSR2AYVZiqQwwQJkjA2xkI0AKQqgXRqNds1IGs3SPb33vd13v+/628/JH+e8b48pK0tFU5DU++2/3tu/5ZznPOc5z/4T1lrmmGOOOeaYY4455phjjjnm+H8H+dc9gDnmmGOOOeaYY4455phjjv8/YG5czTHHHHPMMcccc8wxxxxzfAMwN67mmGOOOeaYY4455phjjjm+AZgbV3PMMcccc8wxxxxzzDHHHN8AzI2rOeaYY4455phjjjnmmGOObwDmxtUcc8wxxxxzzDHHHHPMMcc3AM+bcSWE+E+FEM8IIZ4VQvy3z9d75phjjjnmmGOOOeaYY445/iZAPB/fuRJCKOA88O3ADeDzwA9aa5/8hr9sjjnmmGOOOeaYY4455pjjbwCer8jVy4BnrbWXrLUF8G+BNz5P75pjjjnmmGOOOeaYY4455vhrh36ennsMuP6c3zeAl3+9i6UWVsfK/RACrAUkUvqomhVYW2G5HWWrjbtMCOEuMRYpBNZaEP5RCLTWVHWNmP4RMHD7l7UYDHEcYfLSjyfASkttamwl/N8sUoL076tLixAGg0Sa6UQEKrAEOsbaGoAsKzHGovXtcVor0VGAqSo3ntpgqUmakuHAzVFIi1ZqNmZrDFKF6FBRF7Wfn6KiwAKmqqdXIoXCWuN/gVLWzddMySmQMqAoKk/y29dOKSMAiyVQAaZw/6NVRFEVKD29D4ywYEGpqZ3u7qwKZmOQGqwBqfyzhV9JIRDCTpcBEFSFna2VkBZjQfpHK+WulzKc0beuPQ8IifHzc0tkn7PIAoHFIsHT3Eo3bonA+HEKI2h2uhRl7telpKorPx5/DRpsRV0JhFV+7AYpoaqsH6d2c7SGqnL36UATSEtlFXXp+VhUji6ep4wAkAhhkHo6ace71ogZIUwJmAq0e78KBVJKbAVVUTi6GDub/vT5SOtoY8WM5lLYGe3xay6koK7tc3YMz9mXbn5BaKiNBePHKdxv+Zw1NRakkpTjKe0kCAUYpkPSQUgQSBBuPdNx6V4jZkwxW0MpJVU15T0xe8ZsMgKkFdRTTlaghEAKSTXdHwIQFuvHLYRASjDGzPgujhsYYxhPJnQ6bQCyLMVaN3eAqqowxiKEmPG+wNGgrmra3QUAimqCEiWFYymqyq2BxBIF7lk6DMjynFa7R+35ZTAauv1Vm+eMU2KmTO7Xysm823sNIfgPyWKRSoJ9LjktFoFSgqp0dFlcXCKKI25ubgIQhSFSQlEajN9rWscEEYhaUJZOVna6S6RpH2Ny/3qNkBopxYzPRqMRSkqMlw0AUgqsNY6vASHdDnWsY6dTwVgnz4LQ8XpdFUCA8rKxqgzW1n5Gz5m5sJjaEkYhAKayuH+Of4IIZOC21OjAvS+OYuJE0T8c+/dLLAZrb/OatU6WxXFEEAYAjMcjoiig9vu/ri1gsAh3sWMONzoh3cZwREBgkdN9bS1KuTU2/hrjBJx/hN/vCIy0GE/fZqNBWWZUZcnC8iIA+zt7nl8VdV3P6CmEnMk7wK2L5+PbnCGwgDXT+4Qfw+39KABjLEHgaFCWFVprrDW33yclUkpqv2en6yylmPHxlLbGWHqL7dmzqsrO5P70rbY2lOXtMYVhgA4l4GXHpELoAC1vj7OuDdSzow8pIZCSLM+5DYlbqtmbiKKEPMtma2OtxZja/RZ2dp/St4+ZsjBOBklxe8zWIKwgSsRsHYusBuRsb7tnundMbxRSuLWq/flI7UWiQPgxKamoTeWOuil/MpXnU7nsnmutQevpeWWx1q/5VA5rCViq6dnkhkQQCoqpvmECpDKOz/2+na3lVOcSCoTw8uj22S6wGANB5PlMQJk53WR6jTHGybMp7Yz1e9DOzpQ4icizyu0L/LyFkzWW23ztzlFmQk8IL19m45zKUvtctvZjf+7Z5/h+SnOB1zGfA2stUgrW1tbY3tkGoK6Me+dsr7l101rP9poxNVVZIxXowO+HWlHVEiFv32eM+avbz/GqELOzfTYm/3sqR6Z7Gdxer6oKKeVs32ZZipTK7ROg0Wigg4DRcIhSUyoIJ4dne8EANXEcz/hzMsmQSro1k1NZ4nhfitvyzViLVAolprLSOH3U3qZxVRmaPTUTnWUKUU84HddOzwGDMTU6groOPU0USrq1BaitpcwztITaP0yHAcJairSe2RdSun1Vm6nurfyeun2m1KZGKgXclnlSCCaDbNdau8J/BM+XcfV/CSHE3wX+LoCKFMsvXgIgbiRYISkygTUDAKQJWV5u0B9vkKZOKJaZocodAcEfesIiA4GsPXGLGh0G9OIFTOkEvAwUtSqwtVNC8zpFyBZVnvHN978QgMFTm1yr9wkWIt78n7wZgNU72hzuPc4Xv/g1ALafbKKbEjEZkd90h3G73aA8CVcup7zwxXcB0IwqPv5n17nvoY4bZ6BIzS4qSqnTBgBh1OTWRsbCmQx71QuNwyZBb4SoY/+cmqyEuJOz2Hb3Xbk8QsQJ9939IEtBC4Cd3WcxVrOwGgHw+FeuE7dTpM3QnjHztGZcFRTVbQGsI0leeAUY0MKQm5C6b2HbjanIx6hOxJHT7jlxUpFXDawsUXUCwP7OgGPHFgiaQzauOKbPUkGjUWEaTiFrdmJCk1EoSaAdXYrxIXkI1AG1P4vzrCRJ4J67He/u3hpx8vhRDg4zBgN30cSCJKGRtBlPRm7sQUAQhKRpCoCSFkRFUIfcunAFgChJKLZzJKBCT4dOG7XeRU684tgLOb26RF7V7OxuOZ463MMETZqZJB27+zY3xmgEC0ccb6quwY4C0j0o/GFiZM79Z89ykO4z2Bu6OcsOjQ7YwP0OlCXfCxgNILeOP+NGRG9lAdkuEKrh6bKHSmNEs+merTWdVsKtnU2GO44uihqBQsgagVuHRssSJgVF6nijHGka7RFFCUnurpFBziSw6KhJuuf4WsuQMiigdPdldc6xlZCd/YTx9QkAp18UEK0UTAaGuOnESpZXLB7tcu1z3mkxtsSJhVoy9IejLQBrCWPhl+AYo9EuQpZo5Xi/rjQW94y6ds9WSrlDza0ctSnRQqKNwYipAW2IE0WYdNjdc7IEabnj9EmuXnFGRKPRIs9TQi3ZuuX450fe/iOsrKzxy7/2S3Rajj/jSBEEIZNJBkCvt0qWlwwGAxaX3DoUQ0sUjKhEQthad+Nsb5OnY/o33f7I6RPUPXqdPivdrluHvMs4z7jz3EkeeuilAPzev/pD8qyeKaFJkqCUYjQazQ5SQ01a5EgMjdg9P01zlBI0EycPiqqkMhPuecECV84fuHVJWzS6MVVV0Gs6ubu7e50Tp+7lllcO3vbGt/KJRz7N5vgQ4RWpPB/SSiIefOBuLl67BsDa4p3kY0FROXpOsjE72wWLa8t0245fkibEzYhssMGVS45+aVYTyYTCut81KS3VQTZqJhPHU0ncpqz3wQacPnUPAEKNaMXrXL55HoD77n0hZSH5zKc/TK/X9TxSUWQVUpWYyh2ErVaLnf4tjp91PKUjRWOxyyg75MZj/lCvQx56+D4uPXsRgM2rhzQbCms0eeHGFGhDXgk6nXh2yEaNgrUjPUYDx6MH+xPCSDhlXzhZGYYhkYqojSD180Majqwv0x85OSWEQCCprJk5EfJJSlEUtJpNZNPR88SRdZ782nVs5p6z2FtmlNZk5S1yzxsLa+vk+RiEIUudLLGmItQGHfj55pZOr8fhQZ8plAqJGgmj0QAh3Py0dAqlkm7v1aaimcRYe9vZEVsNVlGWJSq4rZomSULhHT5aa7I8JQgU0qseQmmQispUnL73tFurToNLVy4TxW7vDYYT7jh7huWFmCe++mVP4yFBKDh5+jg6cHTf2R0gdIt20/H++nHFzrWcnY09Su8AacYLHD9qeeyL51HCGXOmKpFCeaMYwjBmealDf7TFqbvcOCcDw/XLGVprsG4d2r2QO+9LKDju+OX8JuMqQxYaYXY8AToMxkPqskR7pXNxuUGeGfKJ+x0EiqqqCKMGp06fBGBj8zJ1pWaOzHTijNfxZEDg5WKcRExGNUJlVJU/L+IWQlhq4/aVQYFpUnFIq+XkVKvVYuPmDmVpaLabnocFUSzYvOp44eTxJrf6gC5Y8E6EwX7NWqfJeDAmr9x9ZTWmGXVnBrUOS+raYuqAJHH35bUh1AnGFkRNN85BOmbt7iW0cOO8+WyBLROkqpDe4IrjmEF/TGXtjC79wR5KhZSV431TW5QKsNJQ5NLTMyEILdTMxhXHoXOIedaM45Asy6iqijIr/bqHs+sbDXfWGmMoy5I49nJDhwgLURTN+NramigOyIuMVsfJYVsJ8iJFaXeulmVJkjRJi5yV1ZXZfel4RLer6C66gW1er0C0KZWTw3lZIghpN5aIA8d3O1vbFGnhHEzeIFCBpqoqlF8rKSWhdwROxzmZTGg0I4QQ5N650GonzjHrT9I0TWm0Qk4uLTOZDP2zJb1ej80Nx9PZpKLTOkpZlkQtJ7t0EJGXFUhNrbzcMAGBUGS1l3eioInE1IpIOvqWMqWQCbXt02m4/Zjujnn1d72U/i23LltPXubs9/R4aqMgzo4CcOI+xTObn0beOjpzGrabR6l0SZm79y0f6XJw6VmuXRmyftydDf3xiGasGe0GtNrecalyhgOLX3riKKCwNY04np217V6DsmqRlbssdHzQIxBc/HB2la+D5ystcAM48Zzfx/3fZrDWvtNa+1Jr7UtVMG9aOMccc8wxxxxzzDHHHHP8fxvPV+Tq88BdQogzOKPqbcAPfb2LbV3T8hZ+XlZkVYWoJFo6UzIJDbvbNzCypC59rFAKZCgoMvc7jkPqusTFTp21GSTOW5FlKUHkrP4oisAIJpWzihdFl7wxpibk/FPPAHDfq4+itiwXH9nhqU8+AcDWlZdw4XHYnziLW1Axyfb54b9zB+9+p7MbT72ky1PPXidIFqhqZ9Hfupghcuj0nPege1TT7y9Sxdtc/ILzViysFCwsR+xfG7K26Dwf25MRtYkopfPsKNFGhoZmYtndcR6FdqfFeDzi0Q98iv/uv/7HAHzPO36C97//Q2xu7wHwbHIVU2kXehaOxpHOwRrC2C3/YFJQldJlTE3TdqxBUmEaNUunnddGZDULqx3q0Hkwqgq6vZIsK9E+JSasAnY2hhy5I+TsvW7Ohxsttq5UNE/5kG6dUWaKcEEwrJ03XagQoSrksCT3HoRWQ9NsaDauufedOnGO4bhg6yAlSJwnopX0oLSIytBuOG9abgryMkd7D6o0FUrGlNQsn3MesFazx60vXUIMxngyMLAD1HZJy9NpODzkC1s7nDzR4OiR0wD0W8uMd/YYxgWhcV6+E6dyIiVYPrkKwMbBPqU0FBQ0dlx62N4o5cbumOGw4MQdznMl1yvG4xQrfVhbSLotQbljOL5wDIDdpzbJbw6QRtFedd6mvWyCLtp0F70nOUvZvblFcykiHfl0yTQiCQVJWzMaOzpMRpAkgsWO4/0xBika2CSnGHrvcpGgl0pqm8+8fGVZUNkIq9z7tYg5HOSoQKFwvCHLmCDcQ8cxmU+rVFJg0oJOpwfAwf4IW5SAQcfOu1XZEinimdcqzyxlmRNGepbmaYyhqkvCMPwrqS2GyqesJXFMmqYIaen5ZwfSUkjF3kGfzKcFRlpz9crmLD3MmIKqzmk0Oxw55tbvo5/4JEpqrIAgdDwlpGU8HlPkPqKQ5xw5coxms83mzctuvknMa1/73fz5R/4SU7pIzt6FPktLC5w65tbq0jWJtSmliNmfuL0+GWbcc99JXv/67+WZpy/7tRqjZDRLfyvriuFwiJaKyod2szzjO7/vjWxc3ODJp5xHP0kiqsqQFT5aKA3tdsLm9QF4z7lUJVUmqE3GRDsv39/6wb/Dxz72pwTKPfstP/4jvOBlL+Yd/+hneOVLXwnAT//DH+ZtP/T36CYnOXXMLc4XvvIIL7zvIdIdN869/i16R2JW13qMRi5aaDNBu30nw8Y2cduNK0pC9ic5a8suytBrLnH5ygaD/Zx2wz1LCosgIjc15y9fAODUiVXS4T6ray5a8MqHv5N3/vZvsbDSosgdD5vKgFBEUYvUOsfdweiQKIkY7Tt5WvQVIgwZVRM67mhglGsuX7rBsePLAGxe20HqkKrKsdJdlJsaUxeUZT1LLS1qwdVr2zTCth93QF0ZrJEI74UPA8E469NtdglCd4bsbO0yHhWzNKE8z1k7cpQrV67N/qakJIkjpBRM9h09zx+MOba4xMh76rf2rtCNGiw3Okx8BCMb9jG2pCjKWYrx6TOnUcrOIjSXLl6lqCqCUD83042qygnj6Hb0rChpxs1ZNEQRsL83JgiZpT23Wz3C2HnEk6aP0hYZaTYh8Ge7UgqtApIkoS6mqcMltjZopfjyF54G4Fte8wqkKMknbi7ttubCs09xM2ojcOsQa5fqWeYVUjqeWjjbQfYDbt645WgwarC/e8Dy+irLi24fX/vyNofRMXTQYnDg6BkoQTNZReB+HznSIopTBrklSdxc+gd94jhGyALl91FZDnn8c33ysY+0rhYUZUnTKkzs5ldVkraShI0l8sxngKQZZal5wf33u/W88FWSVkiW1+zsHTpahUNGQ4WW/pxrN+h2YoaDFgf77txZ7q2wL24xnuQ0fbZAXRsmY0MceblMRhCOCE2PYuTouT08RNaSVhKjp9HXSNHtNAkfcGmlbF7nZNjm7F0P8alPfBaAZqNJL+5xeukeFk+vAfD4419hc+P6TC6PhxWdTpta3E6Jp7bktsCSw8jtkThsoZWgyvw46wwRZ0gbotU0lRdarQ5VlREF/vmDMYEKCX0Ux+iSoigIRAvPdkhREwc9Ujua8bAxLppjfXiiLGsfVTYuGgnkWYEOFI1G6zn3GZKkOUuJGwwGtBpNJpPJ7JokDkknObUpZxGvST3xz3V8sLy0SJpm2KrmcG8XcBnC1kjG/RbDQ7c2RVlRiwNaHac3dBebDAZDxoMREzFNV3apbFIyk+nT6JTx6dqrq6vkeU4+Sen1XARYKUFtKpRWRD6bRWsXNa1qd18ch1gMN29u0e26uWSTnGc2bxCIFT8mS15uk6aawPOZQKAKiwktwnraVYZKC+Q0pVM1qVJLHqZor9dmQBPQZpncB9CTtuDepXP88ec/CsCr//4pdrYLfu4n/3sef/SrALzvU/+Og1IS1BU6cvpFs9oh60cUfZdlVDRyel3B+KQh8/Rd6TQ4OOyjdYCZpZ9rsqIi8nIyTyvChQShDLX1WTiThHi55vgRxeSGe9Z+6fbm18Pz0i3QDVi8Afh1QAHvstb+/Ne7NkiEXTnnFilotckKQ6hiAukWqaYCWWOLbJZ7mxUlRWWpa5/qllcksaY21bR8AxtppIVQaIKmW4A6q9m9voMIbuf+tpWguR5xuOMMovRQ0mkFsFghfI73A2dDyCMCeQSA1vEuhwfPEEaCaxfdOEdhTJmPyCvLXScc4auNmK2d63SOuZ2fNCT9zYJ+VCJqrwQGNUePLzA8MGxvOA7TLUFV1Agf1QvDhFCFxDpjYcGl8ly4uEmjmdARS7ziW14DwI/+6Jv4Z7/4Th772scBWD0WMZmMCaykzhxTNENJHgTsTdzcBkNX56JDjfC5qcq6PNSJCAh96k44julvl5zwqRJWV3SPRqSlpfbKnNYV2zehMg1E5A6dSICoQO64Db10r8LGJWUmqPx9FVCVIApIEncQoiqyrGB93YWCX/Gy7+DPP/QBCiFodZ3AKEuLqSdEoUZYx0OTLCduBEzLwOpCYkRGTUHsFYu+FDRExHhzGzF2xsBC2GZQlciJu3HtaMSNDA4GfRaabuynXvwgVhgO+rt0/cEUmhH7o236A0ffwHRIGpooHiAyx59pVnM4qmmohPaaO8BGiWGytUNz6lhQkiRMaIoFMm+cT4ICO56QXhNUOOGqVhLKUUXgaVBlOUW2i4wVderT7SYR0hq6a5pJ5lOxJpZWAt2We99kXIINsLpGeSfF8FCxeDrhYDKk9Jl0QkItmgSBG1NRG3QFYauN3XFjOn5XTBpPqMtFtHT0lLagoiYK3TiXmksUw5j+zT67l919dVQTRRHNplu7qqrp9/uEQWNasoOxBXlR0G61WFtzh/rly5cIgmCWDmOMIYoixpMhJ3veIKprChkxBsZjTzsp6XYW6HS055+a/f19zp07R+kexc2NQ5Jmi/5wl4WOS+8LI4mpJdeu+iwAkRHHDVqtJfYP3GE5TgccWVrj1t5NYm/gSdXAqgIt3F442As5ubZGEdQ0O+6w2tvYpdGQjMZjdrxTpNfpEEaQZtP0HotEII1lbc3JoIvXr9JeXkaVgtHYOSmUsmh9O40tSSJMHdBprZGXTnHLy0OoA3QoEMorq9ECdTWgLJ38WVn/Jn7799/NYZGzcd01eS32M/7BT/wQb/uBN/LRTzj5omOBqQUL3vCfTFLSTGNsztG1uwE4snY3GVucf/aL3HHHi9yctyy7N66wvOwM2tLUFGWf17/+9Tzy0U8DsLVxgzDREAkK71TLBhVJpAm88rHSvYOb288QhBOMP9TrTBJGmvEQhD8MdBRj6hZB6FI/GzqhkDWmUZNETgapKmdrM+clL3Vj/MrjFzB5gNUjjHH8mVUZQa0JggAdxp73KtbWl7h1yyn1ioA8TxGyIvdOvHN33Mt3/8Cb+Wf/5Od5+DWvAuCrjz1GbSvC2CkxRVHSbLbZ3dkniaa1TCWBFkwmE77lW18HwBt+8E38zi/+OksnnIE5YkKxs8+zX7vE2p2nAPjbb34r7373e3jNa17FV776ecd7/UOkCFhacnx36dKzqFBTZBmpNxCCICCvK6wQM6VTq5A4kKSj0l+jeOvb3sKfvPcDHBw4x5eQFVpHCBTS1xcb62qU4sidfePxGCEUKytLHHgjIq9Sms0mRVVx7PgZN59sQG8p4saGo+eJ02c4ODjgcCclTvwaFyCqNlGrprfgnq97AZIa7XX6pe69FNUuK6sJG9fd+648vUdWDtCBmdHh+tUbNMIuw5Hbx6997bfx6COPEsaaSer21eryOoiag/4OAU4G3XPvOZ65+Cn89Cjyis6yJB41iRO3jyaB4vqzEUdWI7b2/B6VXYQMsN6RMZlMZsrytDxNiQqBJo4W/BpUBKFAi2RW73j9+h6dZsTREw0Oh+7Z/UPQukUU1X4NKqRwtdJTQ3E0GqFVQI24bVw1I4oiJUz9NYe7PPDil/PjP/2T/MFv/lM3v8ObiO4x4vAIewc3ALh58ybdhXgmXwcDQ4WitgUq8E5DW9JbbqOjjN1bU6OlCYGr4wKoswMqm9Ns9AhVMuN9ISvSbEDp93+32yCgZjD0aeIG4qamKApCvTjjV8uAsrpd2zetdZrqu1pr6romy7JZbZ8Q4jmpod4hIAO01s4pDygtSNOUKAhnzy6ynKLMUEr5uhznKIniAOPrFqVUZGnha3TVbG0qk2NrQTtxcjBqBNzavknSnqZ1D0miDloJxuPRbJwYgzFmVpekVIC1lrjR8nQpfV1iSFH41OuiBOHq17ScOikFZVUTeSbWKiaOIu6//xyf+MTHAHjwoft58CV38/4/fQSA173u9ewd3ODSpUtsbrrWCqOBgbpExgrhz9EwbjE2EyJ/kJdpQbga0YgEB1c9fY8YYiOIAoHF6Rf/9Jd+i1u3LvNrv/HPAfjP/4s38ZlHn+F7fvRv8dgjfwLAxx99hCIPWO3dz9KdLknuSOtJtjavsvmMWysbpeSThHFZYXadPh5E+yStiMpYKulkXlqWmCrGWHeGBVKhWi1sntFadM9+3Stfy/s/+YccX9WU130/heVTXP3IE1+01rpc/r+C563mylr7AeAD/7culoAXUFZL6txQYzF+48lAQaBItKX2leEr3TbDfkqj5YTPwsoq569eQIoavEI7yitOHzvK+soal67eBJzyFijhqpkBLQr6haXagNorRK3VEiNzGnVEETvufdnD5+isWi5fdYfJ+/5yk9XY0FER7cQt5qRwRcRVBZORU/BOn+myuX195uU4ehyOd+CZS7Ad+/qYLGA4GBH3lln2xf951ifQEuvzV9PKIJRCyzYPvsh5kvPqS+wcbHLyheusnnEM/Nv/8s945tIF4rZXqPMUrEQHimriiw9tzTizHAy8IYUikM4JUvtM0cpIkqBiUmaznFYlBY2mJox8zqm2NBqwdz5zufOAbEuEMZCn6Gqao18TxRKx6Hbd5kVYXmijkwnCewuKzLii8NqwsObmUtWa6jDn5FmnRKRZRShDVJljDnxd1JJmmA45HI1pN9x1jbiBlpLSF2kaFUFtqQPtty9EgSYSFdV6TGjbng5tFvb3OfCu7IMAFgwU7RZF6Mb+9Mee5AWveDmnlle5tOtqM4KJpa4MoVfu9i4NGBZt7lhfwnaG/oWwcKZgcm2AzE47PosDwiXP+LhmDEHYpK4tjYZT3MbjCUIL0nJA7GusbNCku1CRecHd6ayjOopb2ztI43O+tcVKSVpmxG3P172AMp9Q+MLfWgmUsoR5gmy5+bWaBWmeITI9KzYuqhoRpFhvgDVaDYzKyO2Iri/ltKHzA4XJAOkPdi00ogDjI3wZJdVCwNIJxdE7Hc2/+ugITMrujlNstNYsdtfZ2RsSTqNbdUUUJYyGE/b3nwKg2+6QZdPVBB2G5HlJGLbY9x7vXqvBcFySlhVJPKWzYTRJsUZ52nW4/74HSCcTvvS4MyKSqMtkcwMVWirvYb/zzjspcjFTUMp6TF4eUPcLul3nuGk1muwPd9BWUE3z/+OCPM0pfH3H0fUmkSg4fuIsjz3t5jIejBiPIoLYcvyUc5zUmcTYjJVV93trext8Q4OpIiMR5KMROsxnTTWk1FR1Thj6xhulQAjBYKT+gzYAACAASURBVLQD1tcWxC1qmaNkTJo7OTHq38CagEbPPefa5qP80A/+CA+86AeImy4K9+iH/xypK1700hfTWXV0+MN//R46nRb73psexzFVGpDlmtwbsNeuXSKJDevte/jUB92eWV5eoSosz553Ean7X/wSkmab9ZXjKB/JneQFOoyps3wWAep2u0hrGXhFOB8PaTRjhGlQZPtuhWtFVYGlQPua1TLrYwJDmXvvb2YIooTJXjarayk7LUZpxpVr7tndJc3mpTGdlqTytGtIS1EX5HlB4JswWVOxubmJF4GYOgUMSdQgiR09s6zk0U99DhUmaM+L3/Fd38O7//iPWPVRByEqjDEEQTBr3BDHMWWZkyQJ5y84Bbrz4StcvLjL+etX3H2F4IWv+Cbe8Qs/xu/+z/8CgNe++e3sVxGhMuTCRTWvbmyirWQyU9JKjFEuSvtcJ2ttqUyN9mck0hLIiFP3uaj/hfNXePb8DYpyRNPLpMkkw9V7S+pp0axwjROmdSxSapRSTCYZLe8VDwvNOBsS6IRJ6nhoPBnT6jRYO+L2zJUrV+i0F1FJTWvBG7nDkr2tQ1A9+t4J264yFtZjWv1pAf0eo0QTVAWlcDK9tdRBD53CnE5u14vced9ZXvHNPwbApz/zMYblkG6QoIUbZ6AT8iJlsXOKonI6yI2tC3RWAl70IjfOz358wNpRS3mzYM07DTYnE5ZWU/a2odNzcqKqNLWtGPqaFiFjJpOcRkMSeKevKWJ0EFHWzigsco3KQno9Q6Pl+O5bX30U6hGTScDejlurJJGU1QjjMwoQCq0FlYHh0NG3rmvCMERaQeX3/zhLqeuayjoe664fY2c05Md/8qc403W0O72+yNMbmxw93eOV3/xaAN7/gXfTbS2Qed0iaRQYq8kqPaujtXLMeK8gigNCHyXO84xEWfLCjSmI2lQ2xJTGOdMBbMGrX/VKnnjiK6ytOQfr1SsbtCJNq+nWIEqabN7cJgpDrN+jxlqE1CRJOIsAF0VBlmUzwyYIAqLI1R+VPtOiqlytYxjEtJrOIWiMYTgc3o5SNSKMMYxGo5kBJpTEZFCWGa2WM260DhHIWQSxqiqSJMAYFx0CaDabZOWILB0SeY9Ao5mytApp5sa0trKItYpBfzwzqgOlfLMJiZ2FnA1VVaPltO6sQV3XvmGQPxsCjTAWqQSJd+gMh0OEUMT+9+LCKlEU8czTFzE+Ml+lq2SjBZZXnIHy0Y+/n+2tQ+JEM+1k0mi06JdDAmFBuf0+LgZIHTDK3HMaS21OPSi58Ik+ouf2zMkHuix0BWv357zu3NvdfcN9skXNn7z39937PvMZys5HeOyJT/DBjzjHW9Jd4E3f9noOb1nWj7oz8oMfeYTBYczaWfe+K5+XINo8eP+9PNN/3FFJtChGJWVZoBpeTmRQmhLtMyaStqtbzGpBpJ2s/uoTj9KOQhqyzcu+3cnBd37gEf7P8NfW0OK5sBaqwnsBJiMQAa2OZOKVpHzSp9lso2qofPSlshWYmsobW1EQsNBsMpxkGF/UF5eW3Z0DxqOK0ne8CUJNa7ExE2J5IAjSmvFujk2nlXFQV5BbjXfC8dv/8gk6HbjrZY4Jz92fsH89oxgWxMe9wnU1QJsSaQJ2B86iv+/sffzET30LI/MFAL70xc9y15mI/f4CA1/I3GgYrInIR33OHnUpKZeeMdx5T4OvbvkQpwmJEsmkqvj3/7uzWVdWlkjagmeufpnHP/M5R5dJk26vnBUzRgFUVlNWKdPmg6UUDMZmFhkQwrXAsjVYzxI1grJIaAdjRtecREzrjKWTgtJ3BuvELURukTKg8i6Ug32D1BCbGOXTyLJckGY1bR/KV1KwcXXIsQcDlN/A6aWcat1gWzWHW96D0FAoAjZuOBqYlSZKpQwO9ykmzshttM6w2F1jsrNJmjnFqRmFCJP4DkgQhhItQqq6IM3dGkcjSSgEdbMDu05o7W7tEYs+xbrjn/5hxMn2MqIq0L5zjrIp5x/5JA9978O0G+66Ub9kPKgIS0e75UaT4WhEliZMdtyzM9Fg9YVgdcm1bZf6tdZoYVSPdOJo12p2iLoVygomu97Y2M659OVbLKwsER93Al/oDvVuQbPllMl6nHNwMKHdbTIaOW+MDRWmLsgnElFNjakxQSRnHv44UdgqQtURk8o3A2lp0p0SW2ikdusldYkUEuu9evtbE9ZX2qTlCDwNrDbUNSRScXjJRx4Tw/LJCYHfRKLISRJDLgJ2tsZ+bTRpPpwdZoiassw5dnyFwdBds9I9zf7eDkFgCPyaGmOI45jMR3aKovSpG5JB6g/nwCCCkFgF2Gmah7CUpmToO2Vub15l8eWrDAcZ3aZvMFErFhaWGI73GY+dAnTt2jX6hxOi0PNBVBIFkq1b+xhflJ3EHRrdHqO9XappgXcNiY5JS3fNQntAdzHiC194hLT0zWpail67SZqWlH5cvd4Kt7YOWWk4Ja3d7LCzs4cQiknhFCCtNaI21JWeHfTWWmwVUhbTzo4KKDG2Ipt2zsoFi50O1ze2eNmrXw/A9/3A27hx6QKXzz8GwOe/dIGtGx+mXd3i7/3UfwPAF/68T7fVorN4gquf+BIA42FNFJUsNV8MQD+9xOk7emxcLdg9uATAq177MnY3JZeuf5G3v/X7Adg9fIqwey+Hhy48et+pc/Qnh/z8P/llfu3XfxmAz33ms/zRH/0RC4sdtPfyp+khg31o+ewla1NG/QwlIQqco60kJUszGo0Wae3WL9EhWils7puyjA6QjQrb0eTWGxEDQ0zE3i23h87e02XQrrFUMw+0JUDKAisEppp2SFSuwUrl1lgq1/2ukbRJfaricDTh5s0NSlPyyCPuUH7Ri15CEHRmxnIYxs6jbspZ18877jhDu93mqae+xn2+sc9yZ8Q9952ke9z9bmG558FXM9oW7N1y587bvv8tHD2ywKWL58n7jgbdbg9TZ4xGbq8HOsZWhVc0ffMYYygrw/rRtVk2QiNJAMuli+7ZdV3y8U/+BWHQQng3dZwIqhrqsiBKfHMc5SIHg8FgRqckSTCmQoXeU05INaoINNzactGQqJFw5fIG3/rac56HN9neTjl9do3r11yh/2J7icnwFmme8uDLXXrd3oXHQCZcuuLG2VwYEB8/xdYTE/qpM1I2bu6w3DrC7u4t7jnnvNLppGLr5oA/fe8HAXjpy+9gc+sZbH3ItWcda2g14a571tnYOGCU+45wmWVRt/nUX7r5VbLBxuVFEnvIYtvN7/Bgl+V1QVkolHTGVWp22T8c0es6pVCFgsWFDuNRMVubJC6oakM87XYpSpI44PBgwPpRp5iKMKOetAgCNUvZTJodWmFAlLizaGd7wCR1HUXDWaTVkKcT4jjGem+9UorFhQVGXr86unaS8eQ6y3pAs+EiiuvnXsYTH/pLGkmbr15yKZy7wxKinMJ3aJsUOe2OpppUVLl33gYdqAW2VMTeaZhNJtSZnXV2tACmIowS2t5gPzzMeeSRTxFFIYeHbv22tm6SNmNOn3S80VlMqCnYuL6L8ftYhTXW1BSTyX/Q7VFrPftdliVpmromELOueLC+vk4UJvT7zuibNsUSfpz9ft81d1EK/B51c9TEQTLrYgzTrqFe7wxCAp0Qx/HMwGu32wyGXbLRBpF3sMhgmyipUdKXOIwNxpZILB1vuFlryfPcdzW8fR5KCRPf1GtpdYVBf8RoPOChhx4C3Bl2sLuD1ppwmrllFSpU7PedLpFlGZPJCGtrjqy7CO2t/iN89l0foO0bonS6iWswZZNZt9eqHhPrBtoOqbTTYZXeww5q7njAZX+87M0h7/uDZ1k5tcCRM+6s3SvavOCbQh75+JM8uOIaim/2/5TxeI3f3XBpgZ/58O+wdGKNj3/yfbztTW8C4MUv+Fbe875P8Jaf/AHe97/8awAuPLnPyprg5lW3r3ptTR7s8uUvfo7ER1FrUdI/6HN0fYVh6rJErJFYIVFe3zl+6k4uPfFVpAyZFNc9v0haLeh1NO1p1kQvZIPndh39DzHvJDHHHHPMMcccc8wxxxxzzPENwPNWc/X/BFFL2c7ZqSUtafYWqMUYYZy1eeZoj/H+hIOtMX2f0rC4mpDmBmudh8hUFiVyrM1IvZVqygojJUoFtHzBPEBR5Uy7xeqGJU8zcmtQPrRd5ZYIhWmGKO+RycwQm8FCx3leTq6f5aEH7uHxTz1F2nEejP7uPgfFIWXlipoBeslJfv1X/jl/+RfOCv/QJ/8FSVKjLDz5Rffs1lpO0g1oqYAHjjhvwdUn98kTw6jpvBxZOSAb1gilke3pR3NCRBlCIyWaOFeuNduYytJuu/nWtnCRiqpG+MhDiWI/NVRimndbIo37ZtS0iYFQAi0aqGpM/4Kj5933LiEXDqm9ta4DCTqgFCWjgU9ZGuUEKnAFksZHzxoVgRKMtt3DdWzQOYTLS5S1j5ikOamAxcUGOvJ1NMKyv1/zwnMv9fQtMYy5+Ox1Ml8XJaOM1WNtGr0GxrfIVapNqBVp5jy2R9fXSMcwKvZ52ns1F0QHPdile7JHesN5Hg+zmrUjCf0DX0tVGMJlRZYl7F103spT93dYuU+xswuNwHmORRxw8dpFrI+OLugakeYIGUFwO4e7Vim2NhSBr78jR5Ztth937+uuajhbQFZT+3bNRbcmvRSihiHLLz8LwGBjj4MbKafPuftuPDnG2pzOGc0wdfSUuO89jG4K3v7Wtzh2MXv8bx/6IEtrPp+7HEEJET1s09Eqt4I8lYwGBdLI2djztCD2vJgONFG5QJbsc3zdebeTXoO8tug64tJH3RjqQPHA6zsUbRdpSZREyAJjE559n+MzFcUU5R6xbnueqikLSXexy/pR51mOkxUCWfKVLz9Gmk7bwwtf0Ot47KB/iNYBtq4RvlYEJWn4iE6aOs9uu91GK6jK6XdFakKt0IEk9S34y1JR2cp9YsCnqBV5ytEjJ2dtrJUuePiVL+eRT36arV2XchybgEklKct61kZehhnjrJp5TF/1qi6HgzHZYIUL5x1PiVAijAVbI4Ube5IsMU63OXPKrXn/YESYNHjJw6+YFY8fWVnml3/xF4iby0T+OylCWurqOd9iEQVFUbCyfIStvouGhlRoG1HUKTZy3sjF3hn+7R+8i3vvOA3AXWfWka2EULYY5T7KP57QbpQcDodE0Yq/TzMcZfz+v3kvAO9+z+/xR+/5PU6dWp+lfu0dDNjZmxCLgl/6BedlPMwv8a7ffM8sFePosVPIMmM82uZvv/0fuLVaSPiRt7+FlaUWuzuOp97+n/0QYaPNu//wXwEQ6AY6qMnLPs3IpQWfOLPOxStfIYwDsr5vy7u0zrjISH1TgaXWAoeHh8iqz+me2w9lCHsj2JumS4cSIS2ili4ECUzSEm3dGjdbbo3DICLPa8LIy9cqpSoiej3N/p7fj0EDaknUikn9GaZDRRKHs2jz0tLSrPX6tEA9yzJ+9mf/B971rnfx8MPf7Oby4hdw+Ow+577JfTrkynbF937X9/Jj33o3K/e59auGBUU+5mBvD+F5ajweE8aaspqm6YWEoXQpYrNvzFSMs5SVpUUGB86b3W21ORikGJ92FeiYqqqoze0W1RZJUaaEOiT3Yy/LklarhbW3G3YYUxHF4e327CqgrmvXpt2nZy2tLrJ25AgNX+fa7bX40Ec+yYmzx9B+P+7d2sXUNZNRzYMvc3QQIoW4weEzLtwUhore2RUuPzFiuO8yHawRBN2YfJLx3d/lPrPywb/4BIeHm6hg2s48Rqsm3/mGN/D00y5C+7WvPEUctUmzAaEvFWi2e4yGKQWOz9/w6m/j1vAmm9eeoPbt78uoRMoEYywT5yinKMc0Gz2M/2QMMqeqMhrJAvgCelul5OUhoT8rdGAJgojRMJ9loEzGNZImrXaDYd83j7E1K6sLxL4N+uWrV0ka/htA0+Y4ZYm1lnazM/tWWl1Ds9Gl8pHkw3pIEESsxjFJ18mIUSEJyoB+1meU+UZUNkYoZryhZAiiRlDRbPT8+wrKssKi0Xoqj4fUVUHo5Y8QitKM3Xc162kqbeE+YRAFrmgbuPvuu/hHP/PzfOozfw7Ar/zKr7O2vM4oPZilACICoiQhe07TCaXc/Kd87tIBS+q6nt0XBAGNpEVZlrPPQTjay9k1ri7Ope52/ac0Dvb2Pb+Fs8jjtL5rel9tXC350tLyrLlaURQUWcbu7jWOHXFpj4YJR08sc2vT0ffWzT2EtEgVzXS6NE2xxqV3Tuu+XG1VMcvkOHbyBIPBgOHodvQuDALKMqeVNBiOb+tw7jtP008aWRQCpeUsClcWEssE4fk8iZssLrX4vje+md/4jd9w85UGZSuUhoFPiddaY8qKltdp1+9u0W5lvOLVmvf8qvuUxwtf+d384E/fy2/9j/+ebMl/QqV7CbVzDyfWfFv8U7vceLxm+8aQsy/8PgCWuh0+9NFHef13vpG/+HduDI1eh06Ys3XdRRsj00S0axo6Y9D3OvpBRBx1maQHyMjzRhTRTzOOHHfjvP/+B/jYn32IU/fcw+u++0EAvvyFrzA62Ob6+X2SwEXmwtWC6589/Lo1V38jjKugIWzLRZ6JGy1M2EDGBl379KLhHtVOybCERnP6TZIUITQSX4ciXBcsnZSMlC9irGtEKKmwBMYxdLPVpRQK7TewKIdsbmWcOrGEjp3iVmaCncMRqqrxn9ehtbiIUinrPnVo5cwibXsvzeVDzt9wHUw2vtpnT0tUmaEqb+BZxQtfcYRG0xUa5+mEa9cf53Q75PJTjnkOxBgi6C4rjoy9EXjhkMEpyfqiUxgoLb32ES5tbzJqeoVhdY0bl7dJxYDxvmOUbjsklGqWxrK8mqDKJQ4PLhL4GrbBoGSiNanPbY4bEKuQycjM6pSC0FCagCQo0QcurLu63iYNLxBG7gAYDgRiocLminzovyNWxlRVhgmYfTeg1RS0IkhT33lFlIQiQY8lBz49THUVa3cJlDZkXhhs3ahYaHVpBS5l7IlPb/Kil6yzu79BlbvQrOqNaXUrDIbB2G1+qRZYWV2g4cP9i41VtO5ya+8p9vz3qiwBi+2AK9s3aEY+RH04piEDmtPDZH8XddglWogplU/r6h7FLHWRk4vs3nIHdrjSoWkTdnZdKkGhalaFayZQ1tOuOBoRZYQKKHx63y3BqIbxwBeT1zGt4xHqaMbY51f3AsXwsmR0GFB6BTq9OWD9eIfuccdjT39+h5N3Bgy0RUlfWzSuyCJBcTDhR7//7wNwz93n+Nlf/S85epcP5ecCWTbZuzmk2faG/pJi91DQHxYsNh2N00NLnIxIfW1DYBXRsEEVyVnx+OLpBN3qs3dZM9z3Q9ga88BLl2jeNRXkIVZkLJ42pE84Z8CH/nif1SOKYd8JRElFI16gIqeyU+NulVPHF9i4cQ2m6Vm2ZjQazDoR1lZwOOiz3GuTm+nhnIEpUTpE+pSYbquNLDOMnn5HKCOJIv/dj+mHaSVh4gtcvcG+u3eTE8dP0fBdKl3+fsDDDz/Mv/lf3+We3W0jRZO9dI+GH7sKJIOiJvDpaFpkNFaXWet1eOa8q/EKGi1MHUJZE4Y+bbWpGQ1zzpxwgvH8M5dZXF8nk4KfeYdL03vyy4/xB7/zu7zkm+/n2ad9ow2rCUJBljtjuSxL6gpOHD/D7jV/TbfFAy9/Bd/+6tfw3j/4LXfdWGGDU/zqO/8nAP7hT34v16/v0ZZN9sau5uqOe86xu3kDHQbkB+7wN1EAOqDXc9+hsmJCt5fTTNZ56sJXADh7TlEOUi5dtjx4/xvcEDqGXq9Ft+Vk0nv/+AP83C/8HOvLPf7kz1x+/Ft/+Dv4uX/8Dp5+/GmqwvHHg9/0Kj78yY/zjv/qJwD4zd/4HRaXWxRFcbtOMa9YPy3o/h/svXm0ZVd93/k583Dnd99cVa9GleaS0IgkBNiAIQaMIbHdNm2MnRXHsRO3s5KF0467nXbsdBK3223a8zxjaAO2mCWBDAghUKkkVamkkmquevN7d75nHnb/sfe9JdZKZ61eqzuLP+r8o1VPdzh37332/g3foZ1y4VUlmDP0ufPYbTx3XPoUVhyTpMx58PU3ce7ZZwE4eMshDt90J05T7lOPP/F1+p01htvO1HtHCJkMoGvTgNJxHEzDI1cQxCSNsLQGmhVRUV5NO4MheZiyfGAvKF+dTBOUiZgaXE6CmYnvEUC3O+DAocO0Wm0WDt0EwFvf/zAnTzxJ05WCIS88/yqto7O88qkvTyGqFVdjd6tHZ2cHXZPfNzvT5tLlHRYX5bOXi4ThIKZSqUzhRUEcyUAtDmgofqFj6HSjfGpqn6YpjmOTptk1488io+JXybOMbKIymCS4rj/lc4ThmDxPsR0LFISrLBI0LAxTkBdyX1ret8jhG/fy9SfluWpoOrV5G8NqMBp31T2ZxMOcMOtRV6IzTs0jFzk1pcbomTBKOlw4v8Fi/YC8d7NLEuaYepODeyW07Nz5Z1hZabFvnxyX86/u0NsNqMzv5cGHJOTwpRdf4Mr5y9iOQOQqSREjbLNBpaVEEvIlUquD62QkQp5rUViQ922oFIw7ch40HDzXJknUnqdJuFiaDWi05L0HvRDLrDEcyjOl4tepVptsbmzTUmbLujWk08s4dGSJeCQ/q7M9knC/Ugb5huUyGpQYVj71NvJ9F9d1CYIRtuL/BaHczw7sOSzXnT4gudJlf3OOWHGEdKfKj/zwD3D86Sf52hOyWGx7OlFWMFZCSnkhk3bHsimF8pnUBIOxgV0p8BRdoSwsLO/aOqfIGY4ibNskVx6kvu+jaRrzc4skiRy7o0ePsrETEqh1sHr1LO3WDP3uAE2pS7u+h657xEk45SmZpklZltMEs1qtUhQFQRBM/6ZpGromxWpeq+CpG0wFLZIkUuOofwvksCgKTN2YFhuyLCNN46koTK3RJAoz8lJcE8LIM2ZmXFzDQ21vlCIiSrqkqfxsz25QEqCb15Q5dUN6Wr2Wm51lGXmeTxWSDUXBiNN0mji5tuSLVVyP/kgm0Z7nous6seJ4xXGKY3tYhjaFRJZagW4UOIYShbDBNDziIEO3FOQXl3Q8xmhYU2Exu5HTPGxiavK+g1GANTfDe971o7zroMxHPvvEcWpv2GXzwlfpKzXLhTmXfbOvo3tFwoT/6uMvs2dpljQdkgSyEFb2YoJGxLzlc+6kFL7RFk2E0HAzOZ+9qwmmbtGa1bBa8rdsr0KrsYc0D+kncg3lQicrChYXpVBUFBbE3R6tpXm5iQCtukXcX2O0E9NcVGqoSYXL39j59k6uKjVdtG+Xi2Gk+TSrFnaREyr+TxgNMPoJuiuwlRRrMtIwiKnNyIEMRwZ5WZBnAqVPgKiWOFqDbDQgVRVur+LjiCqW6o7srAYEQUS97lMgD6ajN9/E1StrxOGIRk2RFscujUWDjlLz8lyT9c2Shb1NVpbkIXfl6il2BxFOQ0NXHDJLd7jn7mMMlZJcPxwTjS6wtHeO9S25sVw638F2DJqLBVV1nxUBWVYSqUN9b3SAva0mAwqe2b4EgDvv4JYlhZuzsyYfDs/IqJvGtHKWliaGVyHejTF78vvyRkno6ui+3Iwqlo4mBMPYZjSWn1OxNTQbdMNAKJ6SX5a0mib+ggo07ILx0CMlJB4rXs3YIsxSbEMjU1KXRgG24yFKRSC2LeYWllm9ugOlUhS0aswshWS5SWdVdevKEL3WoKI6ZaM1jdatdVgLsScGhW5OmhsIUZKorkaW2+iOy113ynlpOC6jjsaePS1Kpbj31PHjVNtLWI7NUFU19Rz0MGY7UwdjrtF2qxCPSVUnslpZwLZMuuUunjpAyzJHN2yCkeI7YSGKkhlLw1Ck9yCMwdIwbA1DBfHZSGe0LQj78gCoWnViL6Dl2XiqajOeiUiuuCSnQXPkxjK3p4Z9NGC0Iedz91zE3E0F4zjDUW7loZbhaGB5HlpHCTy09pDVL2NNnqskRaQWD93xEKkalxMvPYNmVkiKgIo6VNPBAdJwRE/IDk3ds/Bim6vrBaZaG7c9UCda2GHr7+dwlHz52tYl5hfnuf+75EHxpUc73HjHIjfcbnH2tLz3E5+6QrvdmB5wwTgCIZQq6LWuX16E0npAdSKaTRvb9vGU9K3u5pw/18Wx/alrfVFIPH0cx2hKxEPTDNm1Ut/nmeAZDp5nsau4IbbXpN6o0N0ZkuYTFT5HBg7aRKQloyhykuSaCpdp2PhuRcrrq6pMSY4QxVTwpSwMZmfaZFlGr7+p3lcjyzJsL6emyESj8RBDN6cHcZ4YoOWUIsKxZTCZxDmFts3+/XewuS4PItPUCKMxnhLZiZIMITQ0zUBkcuxc1ydE5y1v/e/4pZ//KQC6O+vMtBf4s9//IgBXNi7zyKc/zC23H8NRGPPtrXPcfvAOLl89Q3c8SU4zkjSk1pTrdWXPnYyCVaIkp9+XnbJKw8Wdz/mx9/48e/ZIc/WP/dHfUJ2v8colKerxoX/5s9x4481sayO+/OcfAeDQbQ/glAk/9oH3s7xXjsva+jY/9+E/hQ2ZJH3644/w8sZFmq5Brn5fqs1hlznWTEx/R+7pD953L37bYr51QL7vI58gDTWO3XcbL70o7Tbmlg7zw//0J9jsyHX+yMf/EEcP6e3oROk1JakkydA0MbUKcD2LildhoPbXogwxLUEUCWbbEv8/jLoYeoxfWZwGoqYYYphNPKU9MOgPsR0LQ7cRupr3XBBFY0zTwq7JAt3e/TeRlcF039LiIWm4Q5oIBkOZ4N1+260sLNV4/NHn2H9QVmT/0Q8+yB//wSdptmSV/OgtLZ7++1fQXtNRiJOxFAPKMpotmRhatizITQLOIAhAqQlOSfaWhWEYJEkyDZgdx0HTmf5blPJvuq4TKQUzU7PQBKRFjO1e4wRZtjFVeowCm3rdp9QyxmqPbc82cawma2vnpvvNTXccXyubKgAAIABJREFUYm1jFdeWQeBse4Znnv4Gi+1FZpXk/9rqBgKLKE0oJhMo74Q775S8wQceuJ0TJ45z/uwZBqEcl7m5Nvffcyuf+8wjKAAIUaDTajRoLcnnY/XiZebnNHKRUSh1SaGVoCWIQieNJl2bCNPwpiI0Ii+Zn19meU+bXcW13VjfRtNlog0yoa3VaiwuLtLryRgkSSOqtoPtNNnYlO+r1goqlRELbZXUb2asXkmIyvya+qNtUZY5SZJdQ7gUBUcOHSHKFYIoCFnvd6gDtRm5Xhptn7e9+V2UluD3/refl/NQr9MNYaDOzH3Le1jfWaXUPFAxULXWwjAMdne3p9+XK67fROzEMAwW55pc3eySKBVjo5phCQ87CanMyU75lY1dNGIqKnkwTZOkKNizbw9xoKTmt/oURoqhlegKilNikxQlFPI5dnUTbJMYaCj+n2VZNJtNsiybdoAGwx5CFFPDcFkwcCnLcvoa1/WJ4xjXdphT95nlJd3e7pQLOzc3h66ZdAf9Kc/U8zzyJGV+rj0trAThgDRNlK2QlI+XAiTeNNlxHZ80TRHaNVXDopDdyNd25ooypxCCalWOuRCC8TiUz2k5sVUxZFNQmREXZUpZChzXn+5vugWYFkIJsPiVOmUZMOxkqDoDtqkxtzzP7M2zWKqjf2VznWq7T65iShODy5c1jrTv5m3fL7lTB4769K7+NqfPBtx8kywe9TcL3v4PPsgn/uLDAPz57z/L4t1Njt14Hx94h0Th1OY8/vNv/AI721d55ZTqVM0YGFpBU5M8ZVNk1OYjklKTysiAVmr4rkealQxjZdlke4z7A4Ti1eUUWJ5Ns97CdOW5MzMfcPEbVzCqDQql5FyWJaOz2X97tcD/N1eBhqXkt+cMj2SUM4xdZtQG4ZopgRVjmtIzBUAjIU9N+h2105k5hlcSRTZaTy64VqWgt5Fy8OgSV3fkhhSPh9iuTqaqeY22Rlm6jIMQtbfz8guvYjsSqjPuKyWpwYBR4mI3FVE1iGnXdXLR5cxVedDva1v4dUEqDDpb8sFLDJ2XL1/AR27ucbGL63ukSUAayPu0LZPWbEGR1YhzOXGXz+jM7bOozcmFuT28yOb6Ama1ZK4tV/SoHBNaNfQ8oaLgNXZhoosYS/2WZm2Osxf7eHZCZqhA1M+IgpK6J9/jOx62ZzPa6lKXt0ld1CithEzPEUrip+LqRNGY5arcQGKjQxCEZIFGnioREVtDKyWZUyhIpWUaCC3CUypHYVzQ7XfJi5CKSpJcPyDJSkbDgsG2UnKbt8n1IaWCF9b9jJbfZFXEuLbcjKp2Hb2ik8Yj3JoMIoYBWJ7B2gXZet57+wP4CxYPPPwwz5+UwdyBFdjZusx4J6LuywCoH6/TDQJW9ssKRp6WxGOdstpgqNoxej5GCINsVOCpxHvl4D5sy+PsafnZgzDBrVQwPYcimjih+/RGI+ICbFt1DFsmCzM+AwUX6a7GeFQZ9GCoquAztQpmM6F1h8vlK3JTbrUzrMKhr1r7WRIhUptKraRQ6j3lsCBKDGbcGfAUidfaoVpzSFVCZGQ6w7WCE1GH5YOuWgs+l8463HZflayUN6a722ycn8FTErnjMMKIGnh6TKxPxCpqiC3B7s6AubY85GYaMN5NOPGofLazjssLj/e4+JzJ3IJ8tms1D9u20VTCaZrSz8p1rGuqeLqGZzUIR2KqWDQaxuSZQacvA8wkzXF9hyxNQUwqihqFroGwyFQ1tNVqcPONN/HlJ78KgE2NuEhw3MpUlhw9YXc3wLIFdWWeEgYZvl+dKmfmmUEqUnzPxVCKnqWICNMuGCZxOOnWCSzLpciU5HAak+VjDMPFUgFCxXeJIkGS5lP4AppOSUmiglAEaMKiUq2TKW+4ghxDa3D54rkpKbsoBUIU0yq1oWmkRYFtm2Tq9+UIGPbRw03qB2XHSdt/EE/Y/NyvSxnyc898hVMvfowzL5xn76IM6nd7mzy9O6TXzVhYkvPn2CFJnHFw3+3yc4TOwf13EicJpyIp4nPrrTfx+KNfZeVf3Y+pSP1v/95/SHOlzf4vfQmALZHyyG/8Oa970xt574/9MwAe++TjvOkfvJn3ff+b+OxnZNJXdeHVr32J//13fhuA73jH6/nv3/MThKKHZcjNq1oWDEQffVSl2lJB9eY6N7T38+CbpNLq+tolLp2/xDDc5j3vltLoZbNJe7bKo1+Q0uWd1QHHjh1hFJ3EHFXU/EVYto4QBpqyATEM6e9Ua0vSfZ4DeQvb7XPDMRnY+Czy9IkT5CJkoa26r7ZDESTs7sqEwbYcDEOjLPKpsqNjeti1CsE4hpHcz0597SLzK/M88LDsqpx7ZQdXm+PKpc6UhL6xfRWh7efGW2bY3Zadx1dObvLP/9kPcuH8JQCee+YCRVFiGClqq6ZRbzEej4nTQBaEgHlvmbxYn1bAC6VoKL3m1N+KAiFkVX4SxBdFgYFxzZOoLPF9nyAI0JUSYZ4nFFmO69UJA/l9llNS96rs7gzUGJTEyRjdTqZiGf1+n4NHDCojh2gkx3Nru8v87AKoqv9LJ1/AcS1qc03Wd+QYpHmBroGuC0wF2Y7inAOHZhmF0q/yNz/8PI1mFbdiMbcg1+t9d7+Nj3/srxCUzMzI8+Kuuw4y6F2BXO4RzZZPmNsIYtJAnuOzMw1GUcGor5Gqs92xHMJxxuy83Mt0R8L4t7cG9HpyDQVBgOPaU9VPqcqos7W1MxUtGI1G7GIQxxvMtGWCN+zGiEgj78vzanlhnspBOHlZTD2RyrIkLwWGZV7rTpSC8xcv0FGqnw3HY2AkaHYVSyX666++THcrQrcFodpLRJqyPYyxPLkfXL06wDIrCN3Gq0x8LTOEKKhWq9Rq8jna2QkJw3CqNhkEASPXIEj6oDwzm9oMG90u/9PPvI9nvvQUABcuZlTrLpkqloXjEt0QbK3vkGYTcYwM1/AwDcE4VAXlYkQhQDMm/oopepRgGTop1zpXo9EIwzAYB8PpGtZ1pkmgacqu0WvX+UThM80z1tflOmu1ZyiKYgov7PV6JEmCYRj4ylgzy9Op8uBEQTBJEizL/JaO26QgOPH/gomKaIalinaT5Gyy71erVSzDYTAYvEb0KQUkxNFUTQqEDjrThEEzwDR18iKZJhuUFmQRuZqXKBfsuWGW/bfPsP+IVM4bDS+yEesIzyccyUJfpRqQjSOidbk2j95+A3fvr2EvBGSjTwNghW8kGi+yz+zz9AmpHOv2DjPzA0fJIllE1Gowv7DE/Q++hUOvkxBgMYootwziIkHV/jA9S3b3NKXQnAvGAw2vAXWlmByHgjgZI4RDWxUNSi2jzPukqjOoUaXdamDb+6nNybELh+vEoeDwLbNcOKV+X81kCm37L1zXBS2uX9ev69f16/p1/bp+Xb+uX9ev69f16/+D69uic5XnJYnyxnAyGGz3aS3VMT2ZNWaFICpstL7AV8m768BgJKhVFNZX6Oi5hUbK/JyqAvR0rDQnHcNMTXYjymKHKOhjubJi4vgGjqeh6frUSVrTQdNSKhVz2rkymh66liLGChdfgcQtYFjnD379DwF4/Buf4PzFF/E8j2e60lskKHNqtTFlINv2umEQFjWcTDBUuGHLsMnznCwPMRVfzK7n5MIl3pH/fvM/dnnpRMyZlwboynzYSGyizgA7ypkUImbmfbqXSkSuukZRn7yMiFKT2+9V3hu9MXaeTSuv/U4KZoBre9MW7mirxLJtjGaKPSPHpaSg4lZ55vPKm8Kdpb7Sw7QzcpXBJ5HAdqHuacSKdzbYSphdBF2JAWhpxqA/xrRA2bngWSW9js6ob6EUR0mjBMurYioeyny7SdyPMSsauzsKMiI20Qxoz1SnxrDDrS2soo6mIAGa5RDFI7745ZOsd2R1stNZY+/cUQ7t3cfp87Iqtr66TcU1sUxZhTM1mzQZYuoWy+09AOwONymrBntqFQrlMr7b7xP111hSlUg3DhiONbpDcNSC1UoD03dxLUFZyI5MGEWU+YBaQ1Y+9UpE/0WHZq1Od6hMdjc0nNmMqJqx/04l2JFr2KJHMpTvm2nUoCjonzfIlLdIe7bFbjGiDMIpXl0rdKIdwfytygw4sSBp0O9vcOWrsmJabVQZb/e4dLrB8oqsurtujYX5Kp2xkmYvEqKNGMtJCHK5Fi+f3yRJS2xHw3Xk+8ZjDd0Y0uvK77N0DRubtBdwbl1+n+kYEATkSgykUqnQbrdxPXvqwSSKgizLKAUo3ja12izj8ZhMVZsqlQpZqlOU/SnHA2FOifcTCE63t4VuHeHYMQkZDYYpFc/huedeotqQ9z2KRliGQx5niMrks2yEkJAMgGHaR9MFeZ6gq2fW0Bqk+Y6CAcq3maZJHGRU1OdU/ZyDh/bxwvNnpveU5iNpfg7TjkVZxtIWQRHONaOkVpWQP6E6ubJjlkgj8bpcs71eZwpbk59TUqIRBiMMU7WzPQ3HrnHl4hqFEhHoJTmRlxOHcgwOPvgAH/zH/5oHbjtK6cq5+dDP/yIvPPci9XqFwVDuXb7VgCLBteTve/a551lc3svqxkVqij8yHpcc3HMjv/LL/5EPfOgXADj16qukTxnc/z2y8pmmIwr7LL0rFh/+e9lV1Eiwjs/y7u//WT7zMdm5cpZdPvt3H+emh94EwIHb5vnBH/8+/o//+NvM7JHfl8cDbNNDLw085S92/swq99//bkLZ9OP4157httsf5kf+hx/lq8flZ6eDnCurGxw5KCuaTz0WEY0Fvlcn6E268DolGWlsYzty3rNYoyxz7nuj5Mc+9ffrNGoeQitIlbmz5ddot5oEYUpjXvH4tnq4bhNXkfrTNMUyNdIkpVqTrxmPxqRpjOP4fMe7pADD2UvfJEo1vvzVV+X3ZxpzbYOoTKi5ynNxlLG+vYNlQmtOjnFrdoHjJ65w9ZLsSL9yfhXD9PF9l8FQdkN+8id/hC88+nlGw+pUjjouAoTQpqIwlmWTJKmCR03WuUGW5VK45zVVfrgGC5xU6SddLgBTN3F8hzSLlW8OrKwcIAoTXGfCUYopS4syrOPV5PMwHIYcPHQrpnWVk8/LivdoHHHr0Zt4/puSQxeNI26561aipKCnOCZVp04ehxiGRaa4GRUfZlvz9Hry9zZnBUWcksTmlPh//Lknqc3FZJEgyWVl3HR0FuZv4YnHpaF2a26eWITS+sOqqHkvMWONuZk2i4ow/9JLJ9EMCcEC0HWLC+cvkRXjawIWauxSZc0wHErjX13XcVSXwzAMZmf2o5mC9Q3Jp/Rdm1j4lAqvdbWjMRplFKJEV8axSZZiGIYUZVCc1TiKGI1GLC4oGW1d4KY1bjyynxMvKP8/XefVzima9TqzSoAh6Ee889138bkvSHuYMnNxfJ80jclCFZRYUrAkCEYMBvJ58H0fy7IIw2tCQ0GUUySChq/QQTsj9h8+iutZBN1t9boFqnaM8uZmfk/BvqbLxmpArLpSwnLZ3RmRuDPoyHWNsDHJMUu1pqwquSnQswLBNcPl3c5gyvWSc4P67zXBFyEEWZZNoeygT3lXE/hgv98lz7MpbG806FOtVsnLayIUhga6EieadJek0FLOxD+qVBwt1/GnnCvbtqfiHJP36bpOkkb4qhOY5znBOJKm4KqbZZqSk1SWJb7CIotSwzAFo5Fci7ZVQzNi6a+Zq46znmBpNl5T/raj97oMUoOhXeLMSthcfWmW888dZ84LWGnJtRsVPs8fd2gpBNpdd303b324xf/5Z7/KW94mPa3CsYmxcICzL75AbEs49up6zsXRRWarch3snbdpVDTOnH2aTk+OwerqiwSNV9B2S2oNxY+LW9QqCe2GnPNs7LCz7mFaAZ5CSBnWGEN3yXN72p0UpUGztp/dvlxjS3vamEWLsBCUQhkw52Pe+t3fSb8YUmYyhiz/n5tWcrz/6//7v81lGTYV1dYsopgblvYSmxGRavManotrjxBOMfVFqJga3bWCrvJgasxl1Nsmlldn87LiTlgFghJEimsrkQvtILZ7jiSZgEUTDKtESw1MS7Vi7ZIk1smFTm12koDEFOhYKkgqixRzoJGMQv5QkcKb7b1ku7O8/XvexOCS2hStsxx7qMk3H5MT3moN2TqbcO6sjTMrH7zxKCULwapqNGbkot93MOLSGR2jobDw1gLjwZjF6jyDsSKTpzGWMCg0A0uROYNxjG74DFVwLpKMpIRRknN+TS6Kfk9jYc8KhikX3O7mLvF4TL3hkgXKdC/OyIZV5vwWRV8JLrQTwqGJmcuAJdjuMhjoLB/22TonN5/mbEZ7vmC0lWOrRb/3qEE8LgjH8jWupxMOLbAyXF8dvEJj0C+xDG3q2eE3PLIopVReKpubVSqzDpo7wFab2DDSEbnG9vYYWwV4WVFSJDm28i3IypSXL12l7naYa8nDZHXQ4/EXP8fr7n0r3/WdH5B/Ww1Z2z7LSBnVkiekieDu++8inSSPnQg3qiFmBGPlp6TrNnuXD0CpDBhdl1xklHpJrhIpx66SZymiLMiFOlRtE92GUFkbO7aD4fQIxmMUGoThesJcrYKwQrJU/nFsjtBEA7+mDBETgzhJOOAErG7LjdRu6XjVnGBtSKqI4kmnJBvBnpskrKW72icJU4q4pNFS3imlQaNeYbRhcLkv116tLuj1rtCYk8+Q6ziEoiDNMzR1wNiGT5LmeLWCWG1IcQm6DY4u50HTCpIopRQZCwvyHsZxiCgFXmVielkShCOuXO1gWYpM2mqx01lDFw3CSAkUlB0838Ka8NdSnSwPqVRa0wOu0+miayaO400PuWZrDxfOX+GNb5QmmAtzc/S6Y2qtNi++JLk3Ng00UlyjiaZU/8Kgh6AgUtF5VkjOgG5eM3KMwgjdMiV5WsFWdFOj0fbY2pDFlfe//728853v5Yd+6AO02zIZH4+GWJaJZRlTb7YkFWiYaLr8va7rUpRSpc5Q41kWAsPKGQx65LlcU7VaBdPUp7h+0zRxLYvaTIPutryHMtfQqz6nzz7DR35T7l0/9e9+mk8/dpZIqdvd94b7+Bc/99MYpcUrJ2Wh6PzZK1QaGmWZYangOE1j4ijnxHPPqLVisL15hf37DhDEMog3RRvL7nDPER+xIUUKzpx/mfNPfoXHPi/X/s/94i9z276jDII+8VgGy0dXjrBcc/jqk89z6EapmnipvwppyhcekSIiN2/czF23HeW22w5wefOS/D6rjluW3HDD7bzw7NMAHL55P9V5nxMnL6j7tsnTAedePMv2qgqqmwaeb3N1TR6ywnS4tHqWas0D5Gt0vYKuVcitHpom95KSiAfevMy+fbJ4NezukIQdDCcjUXvE6+7ey/JyTKOxxOmz0ueusxZhuxqeL9f5ocP7iZMBm+vbJKmcP8d1mGm36XYHfOJvJJSmXhXYmCwqyOOR227mscefIMkz8kCJwLgFUbxJVLqs7JciBS+8fJ6Tzx7HrSu10pk2ZTwkTsopl3BxcZkHH3iYq2s7vPCCnPcrq1ep+d63QLjq9TpRFE2J/kIIWq0WWZZNhRomimwTZUDXdaeJlq5grDomvlfBciIM5U+5tr5OkmR47sSrscJoGKIZ1lQBtlr3ePTRxzl4aC+WrXyDvAanTr3I5q6ECh+9+SCjYMjO9gBX8WrytEDXpRLaxCNzeeko/V6EqcZgtrnA5vYF8tzl0I1y7CqNhEEU4TsaopDjfvlywrB3kdkD8t/zrXmGg11EvkDpS3hYp9/HtapUPY0dxdfWNI3DN7mMepPAOwd9hKX5CMX7Fjos7dlDGsuzN0kSBCWGYdFqKaGhKCKIdvG8a3OTZjFxmRMrsapgu4umaTiOd02h0bLI8wJLaFP/qLKUZsrdnhzfhdk2uii5cvEyuvJzu/nOO7nl9pv5m098kmRNPg+ZqHLPPW/DUgWmT33i8xSlDkY53buyIqcUuRTCUuIGkwR7ksRImJ1NxawTqjnWPKi5Ph/+8GemPOHmjIU7DpnbL6HK99z7MP1uj05wnIfvkZDmLz/2FT704z/Os6e+zie/Kp9l0zWwjDqhiilLEVHXHUJSMgXdL0vJj82yZHpeTEQwXiteYRgGQohp4msYFmVZXlMrVCu70WjgK8GQTqdDHMcURTEthI3HY7IipVL1psVFXdcJg2tCGKWmUxQCs2LjONf4d51Oh0qlMk3UXNfFsixKlShOYIWe502TqzRN1bhrZIniunsecVLChKcsUkSmgTCn9+B7Fr3emAffK9V7R1mHeLuJVdF57jkZK9m2S91xaTgh88r39bFHrlJvtTl6tyw6XbryNb783B4++H0/xTdOyURqOLrM0vIKoppiKVXaPQduYtmpU1c+fr1BifbyNl/721c4cKMsKFX2h2gVi/m2xtkvp2puAgw9Q1NFvTiNyXIHhEWhYiBTr6CbOZqeoyhkhOMcs3RBid7dcsuDzLXbfPmpp4kjeWZa1gzzB1Zw07UpErAwI/5r17dFcrV3717uu+NWAD7/xc8RFgmVRg1NRZi60wRtSBqBUIplR4657N2v8+XPKfxxoOHVDCp1A2ssF6FbLTFNi+21MUfvlsFjWhRo4R6SoSSTj0canmMSDBPqSjp4ac8yaVISxOsYClvs1Ax2LvURSq7ZqZukZYxVhxdOykrZHcfWGYY5wghxbRk8jrLzrK1q2PNygR+42WK2Al//YkZpTMxOodHQqM3YBIVc9FrssOegxiWldvXZ39rCtBPae+qY2TXjuIicimNTKOLrOI4Agam6P0WRo4+qmHrI9qbqzBUFg16f1pwck/ZChSjokWUxrXkVrFY0hr2ENK9QNRQJNbBYu5pgKilfYQoodTZeBk2JVVimSRLLB36kFIzqbRfdzqkonlSRlczM6JQ6WEq+eDyStRqNHFN1oLrDiH2z14zqzne7LNg6sws1gkRu7l7h4VUNfNdDZHJDXGzvpRv0qarDeXdzg4MzDZYPtXnxFRnYzMzOYdspl15+nD8+LwO+jEwKhKgk23JN9Dxj89wmB249IseOsyTJFtvbDsKSiVO7bpGmEdW6nHNNzzk0YxKPB1y+rA65aEgpCjTNnXYVtYmqpTIMxEhYvMEhH/nsbMggvtLwSXMd0xF4E6nndIZO0UFTHSK7aKLVTPZYfWaRY3ByR9CYtwjiDJQaouvnlHnCuZOKj5NXsWyNpT1VglB2It5w/3tY37jE009/fcrDKPoW42GJacj7rNXroMcUWjaVUC81h+X9Dda2VwmU1HQpCvTSJVOdFk0vMV2PNM4JM9WCQscwTdCucTf6/R71em3afUnTGE04GFYwDYDGowRNq+H78p7CMMT3amiYU8WmpaV5GYAEEZ4i7G9ubpKlMZ965DNyrSzNce7sZW697aiU/EVKTQfjEUEIpiOfR92UleaJspumGZSljmna08PLchKyvCRNSxIVkMzMzMuAUgUVr57d5Opv/pFMzDR5n8uLs3S6Oxg6U5VPy6iS5SFCKR/OtQ+wtbWFaRRTKWbDMAnDWCqNqcBpezviyJEj1Gpyzre2trBtl9tvv50nvvi4vPeyROQCx67x27/+JwBcuOJy47EVPvzL/xaAG1/3Pdz/xjchhKCzJg80XYeKPUcYjxglcjzf+tbXMRrkBEqw57777+TFV57kldNdMOXfTr/4ON3dkINHbmH4TWmg66RbPH/qm3zPO94MQPPgPhadB/jkx3+Nri5PvQMPfjfNmxdIv/lp3vF9PwjAr//nX6bddHn5a/JQ/65b38zxpy5y+I7buXxWFo/stiAvTAZhl5lluV6qjTF/86d/ya4KcG234PSZc9z1wBZvf1jew7lzqzz2+S9w5pxMsh/4zvu5545j/MFv/h6NllxnZVEjTgbkqUOhyXW+Z+88b374B6Zy0XfeM+A73/Jmfu93/pREHdhff+qz7Dt4AK9qs35R7kFuy8UqDQr1fDz40HcwGO7y+BceI4wmimIaIkgpS5OFqvxbKSxuuPVmVruXAOgKjTvf+GbarYLBphy7cy9fpenldHYzjj8l5cR1J6PZaoFK/KOwjy40HFefdkOPf/M0nX6PE899g5Eq4pkKWXH33TJ4ffbZZ69x+tRinCRalmVNq+lZlmGa5rdwsHRdV/9WXJEyIUnH5AWEqnt2x1234bou33hKnqvC9dA0ge0PGQ7knuBV5bjruk6zKfcgIytZ39rkge+QHLo4H9G9eAk9K7En92DoZHmKYVxLaoPxLktLe9id8LLjHNds0x92KBIZE5x5cRXPW8D19Kn9SxLFWBWDpRWZgGXjkmE3odU+T3NWJkD7Du/npRcvI4wuW6oQnBeSExlFqntneuRCkJflVMRDNwws22A4Uh2NPMNxXKI45PJl2aVaWJgDLaPX7bI4L5EV3W6XPIunHBzPtClLKdYw2SPKUiY1tm1PO0dCCGzb5t577wVgc/0iV9c7HD6yzN5ZGVRnjs/ZM6eYm1nCimX8FBi7/OJ/+BCGJvcDv15Fzwsszadw5fc5pUMYhmiaRrMpCxJ5VuJ53lTttdvt4rsVsqxHqoQGZmb2sbNxEbuSMFSJjNgc8MAPvI+f/qeyIBoPdnjko4/iGg5hLAPhfbcs0bE8dq5ssKzioI3UYlyGVCwVg5Qao3CEr9kU9jW+k+/7OI4z7b5OpNon613TDCKVjE2KeEII8jzHcRyqCnZTliVhGFIqnpLv+9i2jW3bbKsiV1GU5IXkb00SrqIoyLJiyu2VSZtBGIbTe9J1k0qlgmVd4yXnuRQsGfRlXGTbNqamkyfpNDGU969h2w4GYnoPllVnOJQFifseOMawX3Dl0hq6NeGQeriVBv6sXPdp/ybqnk4cjlmelzHP7qCDay0Qj1/mrEKl3PqGFe481mRLca70usZK+xiZ2eTuWXmudhuvYz16maH2ErWGfN3hg2CLKp2XZIwwupJQjsYYcYrIzst5v1ihOmdx9LtmuPD3W+q3xYT9lM1cp/GcAAAgAElEQVSJUvZcg6uX+vihh6msZvLMokhS0Gw0NQYagtGgg+7J79/azNl3xMc2Bmyvqj2w2mK7mzA3O0MxEWbzLa5zrq5f16/r1/Xr+nX9un5dv65f16/r1/Xr/+fr20KK3XJ18dCbpVlXUb/C5Z0hDf8AWSozYNuokgy6bI+GJKGsujWrLosrOeORwhEnNv1uxsxijK2qvZ0tnaVDOf0OFEJWHg/evkKZ+xQK+3vqzAUcZRLXnpNVh9nZFc6+ep77Xn+Yi2dllaFRbdAbdlm9IqtbjTkdyy9JNwW+KTPet7//TZw4/hxkizx8j5Qc3k0/SyeNqO9VFdRqlfVnA04+3Z+ggdFLF1uLmdljoJo97Fw2cewE1RjALHUczaC0coRQ7eGopNRLwjFoCk5U8UrSwKJU8AvLEgQ7oOkWNSWhXgQloxgqSnVQ1zPqFRPfM6W3DxI6WKQVyu4BmrNSpcqydS6fFQgh58UybLIsxRQ2tlJx2n+HzcDqkncthDJbdBoFRlJFUx4sg65JxZcVIWHIKsNgUJCG4Fcsgljeu6YJ9jUbRGP5vs1uhm+67L+1ztUzstLiLDj4NRfHrNDbkdWQUrcohYGpcMuH9izgzs5w5eWIumoSrdzksLrb4fSpK5iqElG1BWYJm0oKNil0nHFJ2suYVZCfvBKQlAE1z0czFE4ZHc2w0BVWvOLX8V0P3U4Zj5Tp5c4mZZFRKmggQNV1iIIYeyLTXY2xTZ0i0ibFZeKioNAcDMPGLCe8pJwYg5Ymf0znbMDFzYSHV6CmeCCPXXFZnHMZ7AyvmUfGJo5jIVRNpSgzKn4N09JZUJXP++95iC984TFKhmxvKunuakZRZtQr8jUVx+b8pfM0FmbRFa9O03MKI6W7M0Ivr8nrlgjyYiIh62E7FkKMMHRZETZ0izhOp5CK15o+TuRoG40GaBlhkJFG8hm9+bYWa+sXCYbyAanWLJb3LLC2ukmaqe/zdNI0ATR0tRYs2ySJRpTGBOJYoRQJOzs5hw9LvkEYhgRDDSH0qYphe26Gq6tXsMwJ30FW6GdmZqaVwa2NTRqtJnEcTjtxlmnj+z6Rgvf0ukOqnsvS8gz9gVzXS4v72NnpMBx28SuyGhqMcwQxhuoap2nKwYMH+dEP/gT//pekDHJejigLSyoBKhhIURTYnnsNZ2/YEsISx/gKVpIYDroAz84o1OcHwZCas5eKK+97OBgQRim6GXBw/8MAHDpi8fxzT5JGGXEg7/POex/CMEtOv/QcAHPzTZzqOq3aMU6fkhC8vEgoLROGLmUi98+5Iyt88F9+hLSUSqtbJ4+zct8+vvbRJ+huyb35ardk380HWd9+lXe++x8C8JmP/QbhTo+GL2FBf/TRv+Sjj32O5dkqv/W//EcAhFaQGzl5CKgK/tIhk6W5Fbauysru7OI8r7vrNj76scepNCX/7kd/6L08+czjnDoj4XAzM/u475a38PG//hUqiuuD5VAUA0zDo1BV6f1776JRbyEUFOvUqRf5xX//b/nm84/xqUceBeDAQZdTL/TZv3KMmXk5VydOvEi7Ok+OHMulhUXOvHIS36leg0Z7DUbhQMqNq85YkWZomgFK8VbXHAZDk/d9/3s4f0UqNB5a2c+P/5Of5EP/5qdYvSrHPByCZaYk0USGuU2hDaRMf6nUOg2H8XiAW7V53V1SjfDlM6cJg2QKASwzyTsxzWv8qiiKqFQqitchx6Ver9PrdV5TcZfeQLKjpfYNTVCKAtt1ac8r2eO5GU49/9w1Ppcw0C3Z+c5VJynJAypVj8M3rkw//9KLmwQi5M7775PrZ/UCyXBMPs7QmfiUSRXOPCvwfbmX7F1pI0qDy5elwbyh5/jOEnEWcODYhDtl0NkoGHRsfAV3x+ihpT6ZUgLt7G4zP9fk5ptszp1WcDRjntaRy1y8krP7qnyf4xpQejie6k6mOZQOWTHGU75oWR5iWcZ0X/R9n0F3hKZp0z1SWk3orOzdN+1AFUXBMAywHGXJEYbS02o0nnYQNU3DMWUXJY7lGWYYBo7jsH9FctM3e7usvbrG2975Vo7sl/yq3/+tP6PR8Jhr7OWWex8AoFlP+fhff5qR8i3zm02MdEAyihCKq9msN0mSRHpGqb2yyIWELCt+bJ7npNGQCI2mK+MprRS4dociL6i3JbTs4NxhXv+27+en/4WU8n70c1/hvtffyq/8h3/H3/2F9N5qzteY3TdLOR6xncj7urp9GZGWVFVH37IKgnFMmJcYYtLRK7FtmyzLyDI5xrYtrQMm8DvTtKcWBJN1rus61Wp1ahwMUsmxyHJ8ZRXi+z5RFNFutxkqyw/X9+h0VDxZk/M+GAym0EOQXCxd17FdZzp/nucRh9G3dNR836UoCpIsVetA+l5pmvYtkEYMGetqE289Bd2dmBJ4bgNdL2k0PEaKWtLfiXj9uxtECgm0sudBfAf2LNzATTfKmOArX/8aZZmztvp1qsuy0yncgIeO3kw+lpzPm+++k3LQ4a8+9Sy/8OP/IwCff/RX+cKFP+bQTXPoStE3yStYLYszn5Id/qef0Jk/rBPt1phTqpimu4FNQaC55DtyX1q/1Kcx67J0RI7d9vaYYNfHrxjMrci9UhQheQKa7rGxKX+fZ7ao+rOYqusoqg5O0yBbD9jalJ6LWmuW19/zLuZnB3zkNyQ8e/aWFhtf2/72lmI3LMHZHQldmKs08RsLxEkOQi7C7Z1tPN+jVhN4ngrCIsHGZj6FLqHHuFVBlklPA4DbbttLb9DHdMfYthzIrc7LGGKBYFse4IvzMOgITKvEUDCPtUtnyEaCE09cIFcQh15jh1arga2+PwsyrBysGYOFfXIyH/n9J5jdnzEMOtx3z08D8JXj83SzP0MbyaG+cG6b4ZpHrW2SdCYbZ4IobcIww1E/aGV/wua2Rj5Uhmz1iFLoWKUgUuIfIrEwCx3bNrBM5QlkOoyLGNOWnzMeFdRqJkWuMTOjeEpWwnCzoOrLRVmr+0TxkDAVCHUI1Vot5maWSMMKnU25uW5c7VL1xVRUICsN6pW9pOmIVHHY/BmHoDSJdjNm51W7vSzIw4zWvPy9/W5KqQsMzWc4UIseHcMyyVKNREnpzjU0uuMxhgqQZmZrBDsaQZKiKdllv2axtbNLq20ySuX85QLKwsMsJ/DJnFdPnqfeqHDXQxLaorsmwm0SJyWF4ghsXt3Gchzayv+rjAviMsMwS7qZPCydxMa3m2RZgKupBME2wSwoFaSTscVgJ8CZ9wh2FMSwqFGUJbavkQs5f3EmEI5PplrLIvAR7hDwSFPluVAmZEaCrSd4ijdQUKAVJWdekRNxy4FjjLIdXt3ps2dFwSWtgiyLqdXquGpOx0IQpgl+RUIxDNMhSgZ4Yo61dQk1+a3ffZYbb1lg0NUQhvz8JHbIUpM4lDwC33NoL89RZuWUAxkkQwbjIZ5ZoVC4b9OS5oK6SlBEUZBGYBo+sSLnur704ZgcSpZl0e/3KcuSYmIog45fcckzkzSSc5yMm1TcylQcRBQzbG50CMN4Ah9ne2uAZVnUajUy5dJY9Q00RydKZVLR3UnxfJ/Zls2G4hEsLy/Q63QwDA1beRo0m7OcP39harYqD+GENzz0emZm5Hj+zu/8MWWZU6tWpxK5m+sbvPVNDzEYyv3m0uWziNJE06+R1q9cuYRhmezdu8JgJOE29YZHOHZJczkHuiVlfn/hf/4ldGsi1y65Mq896EtNBrqTAzXNYqrVKqXIKQpv+lm6llAWDkLteY1qHdfrECh/vOqihyeqBKOI5b1yPu994J/gHz7IYGeHM09JEZjnTj2BY9Q4epM8ZC2zyup6ymxTx/SVJ1HlBihyPvCffoq/+92/kOOyvcEv/vOHOHTf6wF479veySOf/QbRaJerl87JOTZixHMXcNw6f/dbfw7A7IE9FOmIvjIx/tPPfpVGy+LCyU2GyoSyNqOjCwetDNi7X4oI3P3wLcy2buNvVv8agLtuuJ3TJ6/QW98k6MsxeM/7/oiTJ79JpHyEOqngT4//Gvfd0yZVnj3nLvZxnApxmpIrQYRxepXB1pUpv1JzQ/7X//RraNaIkawBcZEMx7RwbI2XX7kEQD6CY2+8ma98RUIlL8ZdKr5OGPRZWJRwmyiMMMyccdjB9xQE1whJUxPfkK/53d/9Vf7Vv/4ZHvnYx9h7QCad3tF9/OgH/w1FqdEbKPji0jyd7QxdV9AlfYciF5SFjq1Mtg0rpd50scwKVy5IuA25wcrKPq5elclHluX4jkuep2RKkaherxMEgZR5Nq+FFYZhTBP/iXS7YRhoCt6vaw5JlLHvwBy+Mm8//szzVKsVTFV4Gw9iLArIbZb2KsuWhQXOvLyGicVIwQkHowF7Di9z+ayEDgly0ijF0i3U40iU5LiWg0bB3Jwcv4q7yMbWOVDc1zxzmdtvsd03sJVnVrcTomlQaxZkiYI0ah4b61cwlUWFrlksr9g8d2IdT3nt7Aw3iC2b1z+8xJM7St47MylLnSKXsYXjSF6Iq80g1Nlw6MgtnHn59GvgYgKhafiuT66KThIKJkUNtrdVMmfZCMekNS/3JDPKKPIRc/YMQzVOhmFM5cPH43A6V0EQ8NJpCZEfJy5vv/8mTh0/ySvf+DoAdx2dY88ND/DkFz6DVn2LHJd+yL233ciOErg5cX6desPGtAWFEt4a9EfkuRI7mRjhqoRlInBRqzXQtBquVaCZMmEIBzFBVFBxG1OT+dOjs3yHGfInfyKLFj/wI+/gL//ykzzy2NPo83L/HiY2P/ODP8lH//ZP4KKENLf9BmtRD2tCadAcSiPBI6cU14RXhBDSFHwqxlIihMbCgkw6w3FAXkp464SXNVnXURRNIbGOZTO/tDBNnoRwEKLg3IXzU1/EvCywLIPhcEiaTsSbJMdrwr0tyxzDsnEcZ1rcME2TWI+pNxoEIwXBL0tmZmbo9uV49nohvqfgja/htfm2Q5Qm18y/swxBga3EjqJxgeXE7Oz2iVWc94b3zvPw+xw+8idyjl/c3eZ7vvdt3HXXIZ4/+ykANoZP0NZXuOWGB2jvk3vQiRMvceOBd+Er8bi5uX189tlPMSd0Hnvl8wBc7P8t+w85XDrVoame/8a+FFHZZW6f/L12DG1ajIqc3jnJ1bIWPGYXdAanRwyzSfFGQvRbs/KZWVuVNi7LS0sMlaiW7fcwrBAQFJkcl9EoJQl3MBVVoVFbIhjbJDvbRD35mpl5DZH1+dIXn6KqdBCqXhWQz91/6fq26Fy5viFuf7ccyME4Ix7W8St1Bh15Mpl6RpSn6IWOpzokWZxjusWUqGZgYJk5RuEyVopb9x65le9+5w/zF3/7CFe35QGWIdjdFTSVekjdsaVbuBkSKBO6ZrOCY+V0VwtErCqft1S5sjUiitRCjRzm9uZ0tkvqShGu2ahyYPEurErEXXdIQ7S//r+e4vSpc/zkT/wjAHaS07x8/hT1ZgUxkgGJa1h85RsvsnJji7wrN7vb79F5y/sP8LPvldyCxT0eST1BkJMNZFBf2CEi00mFQNGU0HWdcQiZCnBdNPJOFUMPMWrKRLCeEw8KXEc+YLOLPnolJAxK1NvIM41G4xZmlw9PfZleOfUYDcsmGMqx21oLiIYCTYDjyTcee4dBZDs4YUliyAU9GvhU7JCm4nhdOjvCEoZMbCbVH9MgjQrKdGpzwXK1xeZ4SKOmFCHHBhVXw1h02Dw/eaDArpbMzy1xSQUDaZFimi62Ukz6oe/9Xu65cx9r3QEnX5Gb3SOf/QrLSy2qNYO5ZUmcLK2YzbVtdJXsXL2wSp7omLqFmPgwOdCY04hCm+1tuT5rVRu3CjVbBi2jVxOiQcHiHXsYKhNaTRdouomGM+UEWSbkRYzBJGGPKEVGaQeMBmo+C4v/m703i7YtK+s8f2utufrdnn3a27cRcW/0DW3QKxAhKIIimE0laKZWppWpmUqlDkmrrExGgVQpjaKomZoKiIhaoKCCBEFAQLREd+PG7Ztz7unP7pvVr1UPc+51wpHDt3zg4a4XRlzO3nuu2X7z+/5N4WTkmmB/Q35ukAY47CrZhDuziMkCFzeuUVfKjm6U4tdNJoM+VVNmhHt5nzAVRGO1SXsptDPMukBjajCZYfspRTJLXxlw5UWGlpsYyu+kMduk1pzHweDyBZldChmTRDpkObqtKle6Q8EEW6mhGUZBFMiKJergzYr4H/Ay6vUmvV6HOJZEX4DBYIDnV3HcAC1Tbu89gbASfBn7kCQuOzsxrqeXnk8gnec9zwN18V1aqjCatJmE8oOeW6G9M0EjLblMluPQmmsSRHlJ+O73O5iWUXqNZLG8/GnFrqLU4RPzXL24QtCzOLRPZlqX9mpcungB25JjIITF5k4fr1LjVa98MwDffeZRRpMRtWqLTkdWibM8xrZtcrW/BVFIGE4whF5i/YUQ5IkU1yizqKVHyjTrlJek7MxU2djIkUGucDAVvzDXBUG4a85p2oKkiNEQBIpPlSQRttdkdmEfhiEvEsP2KrazezluNfci7Iyt3kXyQgUfucAudF71xh/jzz4jhSgO7N3H/T/0BpY35IX9rz7zN3iAXjehkGN18uQdjPOM8U7MOJcLIg1CrNilF8pD9s57X8ett9zJmacf48wpeSkLkwG6ZRIFAXe89FXy947MMNrIuenEYTmeucW3v/N1xu3VkshsurP0VreIE5loMMwKQggWFiz6I4lgmJs/yerqKknWozUn9+FOe4iR78dyZFD4oz/y43z+Lz7FtYtdvvi3nwPgtz/6B3zz0a/SbMyjKcPudifENeoYan8djwNqVZ9XvPy1/PWX/kL+3uwSi3vmOHP+Apoi+mfUqMzKgBzALKrstFexPFduKoARDjCExzgYoCwkaTYPYJsGaSGDpEsXtjC0Cq4nGI12hQ1arSbjyYhE8Vxq1Vluu+sEDz/8sJw/lkWeZKRpulsVTzIsy0LXRcmVzJS/2pTrI4UVZMCXKAEWOT/BrzXJ1f4yHPfZs7RAoLzietsDjhybZ/lKh3As23T0hn3UmgbdbYMrV2UA/eo33MuVSxe4dlUmKJr1muS+hBEVxac2hc1o2Md3bF7zGsnNWl9f5/Tz5zEUnzPJRsw0ZxkXEW5dXs6DZMBM3cGzdLZVwDzuauShRqzL8Zyb8RkOOmSpQaT4v/e+6nWcfv55hsMVciVIhJbj2FWybLdiousGhZ6UlY652SUuXbpEszENFDdwHAe92K3uC9MgznOErmOqpGyWJTiuxWg8VWh18W0P4Wt0OnLchW4Sx4ncR5Ss6XT9RspstdaoYhQpO4MB3/9KWdmNEp1wewvXneWR5+UF1q41ecmNN+Mq7721do+r3R5xrhGkMvB3DJdcmRhPK0CWZRHHu4iFRqNBWoTokaAbKknPwkA3MjTdI1FKlb/4K78G8TU+8MH/DsDP//v/k0//8X8mGq1RbSh/SjRMo0kUtWmpC7pIOrjNBmOlHnpts0+3N8L0LIp011PKsqzSx0r2i0aa7opVeI7LaDSiWq+VY9Xtdonj+B+qDFLgOA7d7q46IkgD56m/YbffxxI6QRDsVsZ0gyzLdoVfdB3Hc8HQScJdcSPbtktfOYAkipmZmZGoCaRYRl6kUiBDeW3ZKu5I8qxUs6RIKHKDfFq9KwIs02c0HHP3y+Ul6Wc/fJxPfuIiF07JOdXYv8TRG26n0ezy2HNSJfL+17yTlx+/i1iv88SzUkTIaDu88yd+gjkl3PSpD3+CSbXOy964wFe/+lEANpZH+HeMKUYxq48p/ljS5J73+Jz5G7nOus+d5Mbb5/nspx+kNT/1w2wR9npEgU6qKvieHRJONF77Q3LNrq5E6MEchmbx7OlnAYlwmWlVsWwwhYwv1le36WwHUjYUOH73S7C9iI0nn6a7I3+vftTCEw69yYCq2mMP3tTikc9d+t6uXOUUGCp7oBU5mqMRMSBUGa+onzPTMDFEQp4oV2zPIIw9NHXIo1uME5mR1YSccE+dOsflM79OGLmknpxYoyQmG+ZoaiPfGkZUKxFZbKDrchIkRY4hIg7e5NO+Jjf4cDBksQaxmpOba+DqDgf2hBDILNXVUxvcuM/nn7/3X5BUZFbM+PI5Du5fZRQqxZ2jP4pj+1zeeIpTT8jJ88Fffz8z+/+GB7/xDUwhf2Cg5XjWPl7zLjmhH//KNaoGZDbkFWX8NwGBjaGlxKqMLYQMkMX0zpzriNYYW7cosmlWxSRIM4QSSLj0TMy+g7PM7u3Q7imooEjZWDvD5ukdjIrcuCdWgraUMKcMRBs1hysXQtrLCfsOy+Bx1IErZ4bYsceJe1Vp20pJCoNAKeIYJqRhgaHpJFOXbCPDNX3iyMTzlemdITOWhSrtZxpMCoNGWqM+K/tgMM6pNqp0Rz0CdWCbho5rgqsqNCvtNq9YuI+vfenvedkd8kK7x4Yr584h6innzqty98EFck2jUZHwsPmZhI2NDXRhEausdJ7nZMksWhCy2JB/hxYgcgfbVcHr4YTlsxrjqIetNrE4G0gipJYRqE0y1XKKPEJTRflcG0NqU9EEwpXzZTROyRMNU2gEyl1+oVYl1VOMXM7Xbm0L0RuyEGZsbirVr4UKvmOysdZnJJSEsvComAUzTTmew5HGyNQp4hihJkwRWQRjKIp2GcTnuUFGhtIVoUgcRp0O+0/cTK4uhhcvvYBfdxkOx6WlQZ5FOI6PoWBPQhNExVgqVZnynWtOnSzL6JcZuD6e5zMzM8tQ3TAXFxdJJil5bBMkKvvqFhjUyePpvA9xbA1LWGTIwyRLbIQpScbt7amRoiBPXfbskdXYfjeltafL9pqOphQ3250eM60F2puXUXxhfN9jNBohVKTqOAVhFFCtzTLTlJu5NxNzV/0ubjt5O5/9rKyQVMeHSUOP/YfkF509n7Dv4F6Wl9s89G2Zfc3SAvSC5dVTLC7I+TmZhAxHw1JkI8kTbrn9VtbW1uirA9vQBZpe4Fh2eRgHUawuU3KMdV1gGAVZnIAinaeFwLJiYMhYZe8sq2BxYZZUZVCDvkacBwijil2xVB8UGHGP3kYbzZIHkWtVmIST8oLZDa7g6DGWrZMpY/FqrYVedXnnD97Hd/9WZixvufP1aNY8X/qMtLG4+87jpEJw5sIy+1Xm87Vvej3XVjb4zH//U2aX5P6ii4R+OMZSBugXT59n34GTvP0n3sv5ZyU08b/+7m9Rc6Vs9hSCs71pMbd0kGdPXQHgwW89QG4WuCZ4Qq7HSb9Dc9ahOadU//pjRj2TdjsHdaneXF8mG4UUuklnS87hAycqrCy32RnI+fNXX/kyzcU6leoMv/g+CVXstFfJ05j2Tr+sdJhmztx8jbk52U/nr6ywszXmZ37pFzl+QLbhQx/5BGPdoVWvsqlEUrJsQhxqJAr+GgTrCCEYDUZUq3JcxmnAQnOeVAsJI7kvbu6scuONN7J8eVpJhvp8QZEZZCoRVW/46EZBliW7YjHBgO8+/gxCk+0k09B1gWmapUql41hoOmRqHsL0MuVi24oIH8eEkxDdAF2bGsyC5/oMuz1MJSNfMask44y6ypyMzITLl9aoVhrMzk5Noa/h1m4gYMCb3/EWAHphm7e+8238wW/KKmdRZBREmKZDoCxGEj2kVmvieR47HXlZ7XaHFKREyuTX96tstzvMH1igXpH7aRg7mIag0+3RUebGVb9JmMT4yDYFQ41ez8XzEjRVXXr8Gw/RmDXYP3+Ec2fPqXGvojsOlVmVAPUyLpyeYODjq4Dv8plL6HrOsCvXo19xKDIHTS/YozL63Z2MYhyQZ4U0Swd0YdKcXWQ4kvNcEwX9oI+d26XMu+Pa2MreYjeIl0G9UVfCH0FEQoZdq/HU8zJOObFQ5eDNRzj75BluuUWux81NjQcfe4TjxyUN4vhNN7Dy6DcoEgvXUfFUEiGESVZk2KraM5Uy372cJ6RJzmTSo9mU3x1GEZPJhKWlGfK6XGvPP/kQX3vgIY4dlGfvf/vk+/FrPsPCZ6ujEgQZuE5As1adOlswsTWOhxO+qiCysTuLbht4WspY7ZVpkaGT4HpmCTXVNIN+b1RWdpIkwXEs0iRgrCrlRS5tCWaas4zUJdB2HDRNY3ZRzo2NjTX0Aua8GjUFAQwmCUUx4djRw4yH8sza2dnhxZqDeZ4Th5GMkVVfaUVOHAZUfJ++qkrbts3O9maZpDR0nTyDOEnQ1eeidLfPp5DmNMlAC8qzz9BtktDDr2e89R0SXvtbv/ooz31rk7v/iVTYXX9ug3j/s3ztiWcRyDn13Ne3uO8lN/HUc08RqxjyyLEjHGgt8MR3JFR5y1vl1tsWuLL9Jwy6cj12VuY4cM+IdmRz9/2yX65da/PUZ8YQyovwd5/8Ls+f8mi4LokcYnrhMgiHQk9K8TjdcRCiz2UFv93eHuPZBsPeeDchabl0egWW47KkqrsH9lcZ9l4gUcqu4eom+r6YItAQ2nQ8LTbXuxhZgdgvx7SxtwVc4h97rgtaXH+uP9ef68/15/pz/bn+XH+uP9ef68//hOd7AhYoHK04+VolF92AwcDDtAxWrl4DYNLWWVrKce0muSL1BuEQYfpl5qUgxnEK6eFSKLNVLaS7BsEIFP0GYdVpLWk4vvzc+tqELMmpuhY1VYJPCTCMMaIAS6XrM2wMO0ZB7xlNTEYDweGbCzwl4TjXPInIXZ56/DEybxGAO27aR9URDEKZ+bj1hgM88uh51tIHOPUNmQE7eVuLl7/qZi5feIHtjsxwX13NCdOCm2+TDe+twbgfgjBIFOfKdX3iKCMpQgwlpWs6GVkqyswLeY4pDFpNj+FAZRUNi8G4j45sk2MKNi7AoaM6i8fl37T7IbYOO20NW/la9Vdz4mGGqUQ2Wsd9mgdSzFHBdx+U75KHNSwRkRcWN3y//LfCd0iGKWOVGSCRt/o8l9l3gDRL0fIKk0lEoyXbPhnkFKnAc0Y5rZsAACAASURBVKaGihF2xcPybCIFOYijGQwzZxIMyVL5d5alYwqBnikJ+SxAt5e4dm2Nd95/LwB7Z3x+4zf/hNmDBbqqYkYDm0gvUHBnDh5cIJiEdDphye3LizGuZ6BjghK0qLgWmmaUxrGerROMIraHBVVbZqBzUgo9I8uKkqdQZAFCpFjKp8UwU/I8Jc13zYeFgGRiUURz5KpqMyq6eHoVf0pQrvfANanZBctPyuzreLNCowqTOAGVmSvyGNcxmW/JrN/6Rp9OV3IJDGOa8TIVkTwv4YvS02MK44FG02d7a4Rt2+xTQh8vnD6P65m4rlvyfabyu6byG0vTlCiKqFQqJQxlPBpxx513s7MjYVeDwYCZmRk0TSvx+FmWYZkGm9tbmLZcj3sWjjEZDUkTOccqtRadXgfD0MjVmnWdOuPJDgW73lBZNsLSPUJVDW3M6lRmeqxe1SmyaZ8LBsMhlqgyo+Rn+4MtTp68mbMvXJGdoKpzhu6U8rvuTJ033Psazp39DmdPS9jaT/2rn8S3HT7+8Y/Iv2kK+r0Ow57BbXecBOD8+fOMRuD5VinFLP35dgnLtVqNm07cyOnTp8tqjK5JEnoYhmV/poqoPJV01zQN0zJwHIckmMrKa0xGBZ5rkaoNbb65n5/+1+/hv/zf/wWAn/25n+P5577N408/VpqWd9cH5JaL8JzSDqIwY0yL0szZcjyiMMcwYzIFjfrYR/4rX/nbv+Ut/+w93P1SmeF++Atf5L3v/Umac/J77jr+Mi50TzMZZYwHci767gJRPGYSrSGE3DvuuvMerl5ZYzKUEKckMXGbLf7lT72PZx6RXluPPP4XuJ4g7EKRyfkRCx9L+PT6EjImErj9xE1o1Zgr5yQ0MS50bAdmF+RZFAY5wVgDLaOreFhpYrD/QIUwDogiWdnxZwOGgzHBlL6SOCzMHaLX3WLYl+30PVdCMU2DVFULJ+OYg4eW+MCHZHXr//nNX+WZR87y7n/287znXZKw/9P/5l1sbW0Qxxl6KaZilPLPIHmKWZaRRjFHjkrY42137eELf/4w9dpMyad6zRtexqnnn+L0UxL2ONdapNqwuHL5GpraIxYXZ+l0OuSZwHGnEtVj4jgtq6jT/UAICQOU/yYNVCVXUp09SPjXgQOS0H727FksZa8yndfSRkDCCwt9V9RG18FSvleGYRCFch9Y3Cvf5Y57buCJJy5gWwuEkRK+atR5z4/9PL/x4f+kfruHjgeFQFfVyTxDcVoErYaskPQHPTqdnTLr7/s+o9GEWqteSl3bnk1WpPi+h6+q11vrO8RBijBkdru1mKNbKauXXLRMCUzVMqq1I9x+90n+8s8+L7/L8TlydD9bO3JuOHaV7a0+abEDiXy/dBIzO99EU1DejY0OhuGSZAFHjsn9O88TrLzG5Ssr1Gr1su/SNC4z9ZNJiOtIMYXpPlWpSn+knZ3dd47CmFarRaq4vsOoSxqbVG2fQwryrzfrzC4dxPE1Tj8nq3CXV9apNSoESmis6gocD4ZjELaMp8Kgr2TOnZKLOvW4mhr4uq7LzlYbx9utwk/Hy7IsWq25fzBvpt9Tr/pcW19D0wrSqY2FaTLo9TEE3LzvRtl/vXPcUanwdF+eY+vjnDwNqfk+mYKRR0GAaRqcOHGCK1euAFJsJE9zaTwG1GoNTNOg3++XJrTCsEA3yDOo1GU1pDU7h2kKwkCex9vbm5BJE+3puappGkuLs1KyXcH7onBCgV6+59SY2DBFybnKkpQwDBGGwWgk52el4uO6dlkl7vX7xEkImoahApo0n/LdDAp1ThjKAHkqkqZrgjQfUGk49LtqzUQ2+4/NcnVL7p03HfU5er+OMWriCjnG73zr+6juK3jo63/N3Jzktb/yla/k6qMDPvxb/xqAN/5vTa4+32f/cYevfUSe7Rc2LF79Xpf5hW1SVbPbM9vkQz+5TKrEsoSRYwuHerVBpyupHwURpm0SJ1r5zqNxD8cxabbkuh72h6RpjhA6gdqcZ2ZmKDARlkOhhMzCQZ9wkhIGat/SdI6/ZC+bp7YJIyWqdUwwHoZYmoFdkd/v1H2uPnj5H4UFfk9crgxbK+rSI5LRROPIDQsUxoQd5dkh9AJSqDZdRj3ZIboA2xJM16FeODiuPHAypXhn2ZAHPr1tjbtvl/jqH/rRO7jp9kX+31+TWNFnLj5MOA4JJymWEoFwqzqGBo6pUVWXsGSUo5mUwg0pGoOhydw+QZTLoHff0k3UPcG3v/UcB5t3A/Bj73kNDz6wwlJLGTCGE06dep7N8XlGW3Lgbn/pDJV6hScfWMNUgX7h5RiuAakiGiYBSaZhYGAoCFe1XpDmKWRmCUNwXQ1d2HRV2dV0DGotjTBMsG05Key8ycbORZSwDLYO+niO4U6C7snFeuxWi0AfYwkLU1fl7xkYrzbZ+KZqo1UwLnIWDhksKDLi6ac7GCJnEsMr3i4n/aQYELUpCYR5nqPnggxJ2ATIMgfXdoniCYbC0EYTMAqHnuKheRWotnwsz2WsoC5ZXJXlciMp8c7CMUjilCxUEIR0gp2NMOo+47Fs0xte+Voee/SrrCxv0ZqXB1Mseoi0iq1EDDqdbZrNBkGYl74hmhGTpwWGbhHEsl17WlVEEVAowYncyDCsgDjxGXVU0CmqZLlBXiSk2UjNz3+IdzcsA1OE6FpOqDxQXN8hz1OiicNQQQcIDSJRYCq4XbMeYrZSXA/MniyRP/21ETMtgabrTELlyyAENd9jMil1KomzFK0Qpa+GNN21MS2jbJfvVel0ekwCeZhUaxZRIA+A8h1ynTgJEcLAsuQhMByOsEz7RUpL0oNq79699Psy6F1ZXmHvvr1UKjU1f11qtRqXLl1iPJbrP0kSfuAt93H7bXfyu7//e7Kduk63PSh5YJqRUWCi63GpfGZbHkk2QtcpxQeCSUSrtUAUyt+3/YK8iOi1c/TpBdMwCaMx+os8TWbn6rz+dd/PV77ygHxfJAcqiSV0BODk7TcT9uHCpWfYv18Gue/44R/HMgQf/LVfBWBp3xLD4abEuatLoBCCar2gPwggm1FtAEPsBqmu6xIrta3oRRfdkh9gTEEIhVRkS3cPZ993mZ+fZXskOYLRIGahOcfm1g6G5atxd8jjhEh5Gx078Sru+6E3sb7xbQbbSqihcHjs1N8Th0bpZVdzKuR5iq5gnnFkIhwNtyoPcQDPOMmHPvHbLBxaICpkYLhQHfHMt7/Fn/3J7wJwaN9+/uCzf8mkv1mac+p6hSzLWNrT4iV3SeGLC+fOcWX5FI4K9NM4A9sjjiqkyhWy1gDDMoknY4QmA0rfsGgHPY4dlxfaSt3j5C0vxy5snr36RQCeevgF9u5v0GkrblNiUa0bZPqEii8TaNdWNslTl1ozL9sZjAxcp7EbiNsmo0mXpaV5YjV/pA+YRqHtwntuPnkHp555gZe+SsLanjn9TbZWLrP/+DF+5F3vlm164mGKbo9Ty1cYtJXKmCOVwabraqpSlkYxN954QvZB3efRR56gVvXZry43rie4ePk0idpGHOHTHw2xX6QuadoGRa6TZ6Jsp2nn7N1zgA3Fj4vikCiMMU2bJJJ9sLCwwHg8JgjGuwaoeY6wdiFWEq6qKY7RlCO0yxuZGuEGcYAQRikO0KjPkeUJFDo3nbxZzQ14/PEnJXyxohKgRpObj76J9Y3HAOgPz2HQlHyS8kwpWNozSzCJsMX0fIjpdrvMzsq1NxgMyHPJvQxUcFxoIEydpaUlXFueD+vX1onClGN3SyhmwYDVqzHDba+Eejeac+x0R0T0cHJTzQ+ffQeaXLp0Rc7hyMa0NFqLKevLKnG6J2a2dhChvDAb9ZzOqk40NBgPJS/TNh064xDLsso9SKo46uXYgeT06JpRCjBMg3n5OdV3WcbCwkJJX2j3+lS1kDtm6nBAejxazf088Fd/xzt/7J+SFTLI/f++8NfY7i431LU9kjhE2A6RCl6LWJNCJ/quct14PMayd7m20wuvpmnlOsqLgjAMueuue0hjOX6nT5+mNdukWZdjtbW+QaFrUnVyqgg5VbAMR9x+TKpQt8cbaJ1N+ooJM0LgCMF4EqAb8nOzs7Osr2/KfsqnSoBSkGWqGuu6PpZllZB1kKp/juORpQVuRc5hy3S5trryIqEmeZ4mqn+n71zkkhflOHJsxmoPm45LHKXkSA+y6TryXY8oikiTpBS+kBevguaMvIT2ej3SPCEIAix7l6M7Go0xzV1eZJFKRcpdjlmB7eiEcYKpyz3PqYMR5iV08fir9vDD/+4uBmdtagdknHviFp8nn/g09pKOsS25b9dOH+A7D3yLG++U+9bia0Kafk6ycpg//l2pyLpxpY0WCe559wz3/qD83PDskD/86A4NxZMcDDvksVTZDEZTsSwf9DGmUcFy1NqOIoTtUFF0kCRJ6XY7VKoWuuLe2bZNFBdYjsdkJMew7nvsbHTLpJeGQWufz2AtJFFndG2vyXjSx/MNUOdapxvAKt/jnKtco7upDL1MwcbqDvMLLSYDOdFmWgJhC7IiQHn6kqU6RaHheirT0g2xRIs0HSNqKpizUlaumPzU//rPue9+qW4zGMBTTy9zTqmOaPoIYdgYIiFRSjJJRwYoRgOG6S7R0LcFwpwaMKb4Zkr3SoXFw0pRzJ1w7/fdQK21xKmvngfgS3/5NK3FfRzeLyf9KE84d/kiZtjAdWSQ+/yTI9KkQ63wKVIld2t0yIVBNJEbTSYMNM1AJ0ZRIOSOr0uJ2pHKEnc7Mboe4ppy0ZmGTjqJ0VOLrQ2ZKTt5fC+BfYxrfUkAd5sCw9tm/wGbwVW5EK89ktC6eYF8oU9fqSPl16qMV3XsmmxjGHZoGTV2zo4YKmUw32sQBn1aXoOVx+Xf7bsZUi0qjTFtNyXPDTQjwlTvEg8S+v0Y2xTEmcIWazkUKSo5iuV6hHEKpuTTgDzwfK+OJhIGSrFoPJ4wGIxI4+nlw6ZwfIZpQq4I/M+ffoa3vvHN/NbH/oj2puzj6uICtt6jUC7u9YpDGI1JswLUxk2R41o+WZ7iK1EUUx8yX5HfDzDIQrKwjkUd25pyiRJMMyUvEixbBnxhkGNYRinpnqUFWu5ikVOx1GaX5ox6CZ7rM694GMPhkEECyURlzvQmdX2G0WANoyWzuDe93OPsY2MczyVTwgJZAJ0oxnGmClSRlE7OivIQcF1bCkgUbmn4mOc5jUaDKFYVE3xsd4RmFKSJbKfnWeiRIw8YpYYURymmaf6DA3xubo7t7W02t+Th7Fdder0eq6vSAPYVr7iXKIrwfb9sU6/b5vSpSxzYdwODvjLZNkIc1ymlhIt8QhpLVSWhAovhqINhGJjCo9eVbb/1thv55ff/Cr/0/l8EwKlcI81CxsMWlhLi6PfGVOo5ceAwvyTbXq34fO5zf4HnKgUNLacoDCpepTyYzjx7ijhKOXikhaXa4Dgez596mukkftnLXsLCwl4++du/w6HDUvRibnaJy1fO4PsGwVgevnPz8wwGg7JK5ToOSZKQZdl0JjKZBLgVjyzPEaq0mqZZqXg17fM4jrl2bQ1fVSL6fZ2X3n8XZ889z7nTMmDWRCxlf9WCnJsx+Ku/+AN6nT5vfOPbALhybRU9NjlxyEb48pL0zGPbGFaEqTZm13MYDHKEnbFPBfU/8vZ38MkP/Sof+tTn+eZ3JC9KCzUuf+dhnnxACqI8Xn2BwWaEELvjV627jIYT1te2edY4C0A4nuBZLabmjUKExFGCpreZmZGVak03KESMZmjESnI/DTX0POOn/82/BGBx73F++M33c8vNe3jTW2Q7v/toQrcfMlSy1nkaM5rYHDpWJVbBpOODbetYdoE2lOto3+H9XLj4AgZq78577NtzmOGow86Gqqx6VUzTpNvv8La3vR2Aj3/8o7z0JXfz93/3RwCYjk2zVmfQ7mNZ8rtf8uo30Tt3gdid5cnvfEPO9aKQ80CtD8uyyPOcarXOmTNn5HeZNq3ZBuNxn2vX1tTnMhb3LPDKV8jMcnt7jdWNdcYDg211gY6TEQePz7G5uUW3rS65RoXNze1S8CEOYyxTEEVBmdEfDAYyeEx2k1yO45CmaakWKIROUUyNTOWZ+WKxi55SOjt58gTrWxslET+Y9IjTgrnZJR5/XCrXJUmE65o0KnVcxafeGYRcvfoocSwVvNJEQxMxwtTKBN3+vUcJwx7j4YjKvORdDgcDXNsp2+k4DlGU4DkOmRLQSdMUMo1wHBEpa5AszdHRuHRK7rk6FZLQQBQJoFSGNydU7BkqusVEfX8ajrl4OsL35QUhtrdJIov2tRxLVe/stsOVC1eZX5R78KQb0R+YpJnGvW+WXJiN9TbV7ZQrV5bxPHkWTaXup5dl23Ypcij0XfSBEBLZMv3f6b9tb28jlAqvERdU602upQJzR16SmvqIt//gW7j39XfxxKMPyT7IwfVrktOJNAdOEp1ci8k1leQ2KqrCPtm1iDB15mfnyr1zbW0Vv1plcXGx3PNM01TcoYSLF6WAhmEY8hI/luNZrdblvEvTUrk2SxLSLMZxTJbX5bmSkIJmkU4F0HQNLZPGv4lSbdze3paJimw3ARLHUlioVF9NE8bBhCgMy3mXpjm2bdMZd/Cqsv/63R20PKOqqoWO71GpVMjihF5Pvp+UWbdJ4rC8xFuWPC9DdQnLil0EybQNo9EI0zRxHOtFiJKCggzfl3ug4/msrFyViBrF/onCGCGkUmSpcGlI1UYxFTYqEpLYwzWrpfBNMHaxKhB05W/duu8Ofuntv09v/TJ//PhvAPD3jz3IY5+O+alPnOTD/8efA7C5MotuJXiX5Ty/+5/OEa0U/PnvXGQwlkiARt1jplXh8nN9Jl15udKDCtV0h0iZQrtihoSIJB2UIldZmpHlkKYDsrFKUlomSZAxVuJVrbk5ef4lQ7xpHxZyrkdRVCYbhLCkNQBTjnnO9rUdslTHVlYB8QjyyCXVCypNdT45AUP+8ed74nIFBZquAlWRMhkVrMe9Esu3tZXiuCm6thuoxUHGWGQ0msotPRsj0oJGYw+dkbzYZImJN5Px2DNf4+vflNLBQRhx9Mb9rHUkNMKvFOhGCPouRC3LZDZhNM5YXJqqg8UEYVbKPLu2iyUiNgc58UAu6gvPbdLtw/z8LI15GQj32ylHjgoOHZeZ7L/98kO8cPECxXCAUMklf0HgzhjY2YTty4ogXIHJOAPVL+E4xbV1ihDm5qWPQGXO4ezqRYQjiOQ6QNN1TN2lyOViCYOCQRcaMxmqoMDlzatQVNCm8rRBimkZGE5K5ZD8t7HQWXu2zdLdFs29Mhtz6aEx2baPq4i4rj9Df7CFZeekihCdxm1sQ2BmfXbOyYPBdAzqNwgYTRVpDDTdAj0ijVUAn4X4FbCtlIlShMozDV3ozCzKMcjznDgziRLIkJtPoUibeRIQKkn1PE/R0hR/6suSxVCY+FlBpPx5eukmZ1eW2XO8QXtVHo4zaZdEK5gejJ6nE2UFw7GOLpSoRh6SZgGaoeN5su22HbK6nKG5iuQ/C712RDgeUJ/z1ZzKiOIJwswolBqaadnk5LtVo9zEcHbINYiUbDZFSK1h0l8ds3Nejmlzn8Xe+l6aR2R/Xt7q0Vo4wUsPvo5HHvuq/C5jh2qtQZyM0FWl0xAaeS4lSwGiJIN4WgGeKpaNKIoUw9AYDGS/bG8PqNeqCAXv63Q67D1QpeVU2dxQLu5ZjDAcNHZVqSp+jSzLSjjKzs4O29vbspqiyPJpKgUYplnrK1cuMZnIbGy9rgjRUZXl5VV+7/c/SaHGhgwcOybXVFVOq1MYI+IoJFXE+9tuv5P1tTZbG30OH5YiCb/zex/jAx/4AKNcElHn9mREQ4HujJiMlArQ0Qr1GYPnn+piqkXaaa9h24JElXt1Q8oiC1FQqCBiacEnmAh8p87zSjjh77/yNRqzFve8TCa3ctHn0SeuYfmC2+64DYAbjt3OUx97nNl5G00FV4PBRHq/qKBlfWOLerVGEI53DwVbkGVShWt68II8hKeqhuNRv/z/ElXdEqbOV/7um9huzN5D8oL3s//2F3n/r/w8fRVI3XH3S/gP9/4sa53L/M4fyuD/9HdPccOxg5w5t8xMQx4rH/vND/PpP/kjvv1tGfQ26hWE2aWzZXD8kBTn+PQfPohwh3zit3+ZP/vkXwJw4+xBVq4+S6cnq2lGrmN5GnpukigY4mgwRBc6hp6xuiklomueQ5YJCXcF6n4FS4/J9YzZWfkumjAJix2ubI6wKyqrScpb3vouHnn2lPy9Z1/gs3/+STA3+ehH/xSAeNTEdWJuu11m6qMQTj17gZ1NE9OWFeGq51GfG7K9MeHkLfL3Lp27huvr2Aovvbwy5Aff+hb8WsIXvyDfl9RgMplgezbf/KZ8l//4vo/Q7Y1o1OXnokAjK1LiMODyKQm7+oX/6z/z/n/3kzz+yFexlSBBEAQl7Atkxlaq8CUl7O4lr7iVOAk49UyP+qxctzfedJid9jqHj0iYiKFViNOMYNQuhSkMw2R9rYsparRmlABKkrM4v8i5c2fU/BEsLCxSFAVrqxuqDVLxzLFtUhWwh2Go2jl10dExLUEQRRRK1UxCGnOEMLGVN1MYhuxd3ENDyZD3ejtMwpDBoFe+t6GDZQiSOCKJ5fozipQgGKEpKxZTdxDApDehohI+9arPhSvLHNh3oPSGMgx5IQkncu5nRU6jMaNEF6bWCxJat7m5We6VvqsuLkMVuBkh1XqFKJswRUYu7m2QpwG9bQ2vpmIcU0J5B2N5YbeFge2m7DtMWYWb8W9lOMh54glpUWOGLnE8IU1NnnhIJiRuudvmhTM96X2VTKvcGXmRMaXTh0GErgt0fRdirKkqVrPZYmZGBr5ra2tsbm5SEUoVzzWYZBYbwwy9Lcf9J156F08/f54LV1ZKyFaeFRRpQqKQJIbQMMxEXjx1ZR2gZQwGAyqVXTU9YeoMx6OyyuR5HnmasbpyDUcFKkmS4LseLzx/ukzYUcgKkG4oW5n+CE3TqDXqpZy5sCyCfkBe5GSZvMjkQkfYAlFMBTRiNNMgV0kakMqAtm1RZEUp+FCt1PE8r7zwRVGIEIIjR4+Wl54kSUoYYRStqPdx8Cs2XbW/7a8fZjgcohd66aFVrdbI8wRdK0hTheopEiWTr2gepQLnbtWPXEEqVfURZMxaFAWDofJucl1aLZnITNPpO+rYyscrVqgbx6nIqqMSlHM9izwzmEx244ZKkZC1NQ4dkfvdf/rAL7G5bfDI8lkeOyUVRE99asw4aXHxwQlXz6lq9v4hm1fHLO6Xytn7mvv439/7VwzjglZNxrAYDnndwhvZrDwnk0CTLKLIBoixOv/zHYpCQwiLuFAqmKaFXpgE4bhU/SM3iKKwRM6Mh3263S6Li3NlRRBLYzgOOHDwICNVfVxb3UTXTTIFibVMgZ7ZGE5BtT5FW6VopsOoP8JUZ6seV6B0q/0fn+uCFtef68/15/pz/bn+XH+uP9ef68/15/rzP+H5nuFc+eoim6fSkDWKijLDbtqpvFlrccmxclzJwdE1eUvVyCCD1sw8Waiw8NkQp54QxSnK8J43v+ktPP3sBo8+/SQAMzOSYByHKbrKGueZgWVmVGsFjqPKSxMTTcsQljKFtWQ5dWfdRKiMW3NhTG9bSl0Ot+R3vfW+f8HdrzzEiRskFOO50+u8/4PvwxBdtFgJaFgDDAdaFY1YcW2sGiwvg96X33PyngZJrHHp7JiFBZkxOXLTfi5d2qQ/2Maclq2jHDS9zPTYrkeSZeRFxOJeCU0sAsHVjWWCkXy3hZZOHmXghugqs1SxPPpnPaJgTOOEbFNjSRAFMZNtmYnsnCkwLJ8glH5GAIYIMGgQZW1MQ1Z24iLhxOsj+kM5LoZeoBuSV7K9NoUAmrRaLlHQI1d4ZwwdYXhkCiYYJzo5AsPQ0MSUAyXQCg9DTwlDmUXQdEjiUSliYFoGBgbR1oAdhWm3qi55LqjPW1RbSjgh0DA0qzTYi7OAwtAZBZTO7pNggBCCNNcZjZQYQCOi6ggKJdcq3BBSm61NB9OaSpz6MuNshwhbTkbXd0hjm2BclHO4Uk8JgjGFgnnppnSqJ25w6ZT8vZNLHka6TmNpXvXBhLS+wOE7X0HneZm5evALX0N3M7QiK/HVEoGhYxq7EJIojqEoygxYmiZkeUS9Xi0NJovcYNAPqKo+0IwhWZbTnKmUvIgi8/DcKkmSMRrLapZpmji2rOaBzEhLyE1UZv2yPGFzs01NwSkknn2oqi9y3A8eOIxpC9bXrmGqTHJ30KbW9Mr1P+rHBOEQz62U/IqFpUUm44Qw7vDj7/5fAHjHD/80P/Ijb8NelFmyhfkm435IraJjIKsT99xzkm984xscO/Q6vvu49MfodDdJkoxCVa51zYDcwPMt9u6XGdpRe8LaRsCrX30vt94iqx8f+c3f4ZY77+Ezn/oaAB/8tX/Pp/7ov9HcC8eOSBjE+nJIpz1GL3R0JW2rWTF6bpSwnZm5edrtNr7v0u2qymDVIwxD8myXt6cVBZVKBdOeVhm7ux42ijfomQaTfswP/+jbuLwm+Rv3v+XHMLH4/J/+ISAl6r/1rYf55oPf5td/4+cAuPnkCb7+jVOM4w2EqkbcfNO9/OS/eje/8B/eJ38jHlCt14iimMlEjt/3vfkVvOal3wduTGcgs+duMuRPfv8P8ZbkPDj1zADXtSkMvYS6FEgPFt+bwXVUNZuALNYZK9nsulMjzTLCVOP4IVmRedVr78O0K3z8Y7+MpeCLetLjnnvfxOteLeWEjxw7ytrqkJ1rF/mbB/5Mtj3dprszwlVVKsf2uHz1BWZmHfJctrNeXeTypXPouiBRkNSDh5bwPIcXTisuTC3D0ueJAsHP/FvJnVpdvsqff/7LzCw4LO6Va+3ypU2SwEMTau80fKJIipNUdTk3jPoc7Y3TzM8aDHfpGQLDQwAAIABJREFUGmiaRqygQ7quI4TA0Hb9dEZhyPyCzdzcLBfPS2hUntU5edPN3HqbFFuaBGusLrdZWdkqZaW73QGa5jDb2kuvJ+fZaNyVHlbqKEySiIrvcvjQUU6dkpVA13XJ8oQ83eVQmcozSCjBHl2XlaskSYgVUXwqzvFiIQzX9XEch2pVrqt2u8t4FIBI8StKDAALUp0kDne9frIRruOXVRVDJGhAngmaM/JzvUGfw4cPk2UFG2uSGiD5ay/yNtIgyST0cgqvbbVa9Pt90iIpeUKTcYiuG+gKapZrIEyb6kyF2T3y7Ot0egw6AZ7l4Cs4fbPlMLfg8szTsiI07Mfs279Iv7/G+gXZB4tLe9m7b4HnnpP7jykqoAegJeSKh+04JkkUSYiT8k4qigK0YpfTluZS+CoOy6qf51XIczmHjqgq5ng85MyZMyXiRTNN9DxHKzRayrbCTVLOXF2lUTmEYSk7CKFBYbGzIyuYjq28ovIMTQlmGUZRSoBP4W+aPm2jOuvzAsM0icOonAeNRkNKjO/s7Hp7CUH6ItiejoFm6FiOXXJ0bUuQxjFCmCXHyfRsNNPEVdDd4XiMbuRkeVz6asKuRYBQiCEJYbVKvyohJAR3YWmxjLEGgwFBEJBlyVTniEkwQryoulzoBtEkwnX80pDYdT22t9YlZ1WVOaI4AAo0df7nRUG1Wse27bJ6NhqM2b9/P1E4YTQaqHZCXqSE0S737tjxIywvL+/yKU2jnOuagktruoMwLHT13/OtRXZ2dgjDEcJUQlu6xSAbsn+fFCO6/1X38bkv/zVh1Ccbyt+fO3gIK03AhNWehPPVnJRxoPPad8jK1aWnTnPlBZ25I3u44YSMXa6tDFk5c5U8TknVmVKrGGSGz0SJuxX6BArQNBtDxS7CsEiLCYZhlfENeUEUReiKhzYzM1NCqG1PjtV4tEMYJMzOzjOtqEdBjIHNRHHK8iwiSTJSHYQl90VLOKRZQTSJUVRNNMMj6Yy/twUtnKpRLJxURMNJzHgImiplg8S5WrYk5Jb6/FlGs0XppWLpGrbmYhQmqXL4zo0hbsVA0woEyrNj1GJrZ51EQXmMDExTJ0u1Mgip+iZFnlCrWhhKZShOC2lYrC4fBSleBaJAsH5FHmhHbw4wjQQj2c/tJ+VEvPH4q9jZcXnhilSyag91Hvra53GboOy4iAFsD9uaYDJVJywwhY/WlRtNrTrH/kP76URt4kz2Vb+7hSMigoFgiCxxCsNCFxHjgfzymRmXPQtznH1unT2Lsqw7X9O4OrpKV5EDKy5kkYXpxmihDHK753OCHGZ8l9FYBXjHQmZuCtFVXxbX5jn78AqVWk4cKQyvHlMIi6Aw8NXeokUWrRv6VA4p7L3mk4QJvR2TQvWnaZpoYkSeJ2SpUiLTwLJqZNPf00zCcIRe6FhKPKIQAWlkYYi8hBhomsYk3No1KKzPkUQZRjAkUQcvtobQ+ox3nFKswjQTcqETKm6YMEFYGgi7DBBM0yAKC2wrZDxRF3stwHELFEWAmfoMcRATZC5pJC98mYLaCbOg3lRE6ngoISJTlZrCQhgZhpaX6n1ZVqfQQywbVi/Jd25EPideM8sV5evxo0du44m//BzjvRpJRQYkl17QaPf60kW82D0w4jguybNBOMa2LfJMKwUQHMckyyNc12Gk+H4HDxzDFA7nz0i4bWNGQGHQG+xCzvLURtMLLMssy/JJkjAc7HpMRFHE4uIicRyX/AbdkCTTrU25IUtCtpA4ehVkLy0toekpliEIx3JsJmEb3TBIFUl8pmmjo2H5Ab2e7M9uJyeOYO/BFj/+rvcA8Hdf+habnWeZWZRzqtcbcvTIPCtXtkpOmWu3eM0b7ubcC9uceVbCh6s1F6/ic3XlqprnElZVZAa6obgFpskkivn4x3+DHQWX/IX/+PO89gfu59ZbfgCAh5/6IIYZs3yhQ2tGEoYHOxHtjRTHsRgM1DoWJrVaTV2I5SG7uHcPm1vrBCM5p+o1D1M32N7qlaTlKTl5GiimuTTA1DQNU10M0yKm0HSq7kzpG3T18gZHDt9Neyi5DVG/Q6otMduw2DunoDQIXH+Gi+fPgKWgs9V9vP61byAYy3Y/9ti3WF27jO/O0enKdt5598swLJ8zT5/jhttkkumumxo88/RnuTSWvz8JNCoD2I42UMsRYTjohcH8/CLTg/DihUvcceettFUfbFxdwXGb5GgkSjkyDmwOH70HLb/AtQ0ZQLsVk9FWzOveLDlXN7/8DuYOFHzrO98mVUIR337gC9x2+0m2rsl3u3xxhZk5kzAuSBK5HvceqDEOxyzOLXLsmLxAf+mLf4fveyU/x/MNxuMIdI1koi4DRca7/8lbefKZR1hZk328uNggHAhWVgdqrApm6jWwdMbbkpNgVywM0yIYakSp8leq1lhYWODCeQltdWyTLEtxLFEGaVnuESdDXMshzhV/I9U5sP8Y9933MvXfAYPxMufPtHnyMdWmPQu85a1v4NFHH+XqFZmACCYZaLtqgVJERXJNphcgKTiQQ1EQqISLEALP3xXLMMz/8SJlW5LrlCRJycOybVuqtKk5/MYfuIWvf+UMumbhVeQZ3e/3SWKwHQHqLNcLiyQGoeCTBQlZGlP1a4QKBlUIOLD3AP1+v+TtJEkE2m6bavUmnd6QrMjKpJMcVwfbdfFVIsgwTOIkJ00kx6u9uc1gy8avuDRmZUzQ66VMwi1cV6PaPCrn3t0GO70NtjZULBNH7CxHvPb7Xkkcy33j7NnzVCsNOltyzINxTJaC41HylMnl+p9MQnR2xaKEEJJcInuFNJH8vGlCq1KpYRgmo+FEBfPyieOQQl0uLAH1WpVeFGCrWCIQGZlZoIcxrrrkFplBHJrce68UjlhdX+HK5XVczyi/V0Oqmo5GI3kZU+3UNK3sc8syKZDQt+m+f/jwYUajEYPBoJxDcRxToJeXGE3TMAyDJIt31W3zjDSN0XWTfMoz8xw81yee+lC5Du1hH5HmFGrRlEbsul76cfV7Q2zb3hV3MW31e2l54YvjGEOT891UcUIURaQvupwneUGRKrimSh5rmkGt5qHrepmkRMt3xxCZkKxUqzKZo6CfcRhRr9fxHJdJIPfBTmeHoshxPbkeB4MBcwuLpGlaXj6GwyFFkUk/MxV86povIYKaUnustPC9BoP+Dkk65TfrVHQDRUWnM5xgexa2IRDqXULLpO5AlAtyoS4kScHEcTGrsk/G5wZklYzbbt/H8ZtuAeBLX3yecfsqC/tNRkP5XU2zSabprG/J/cfzHfIcwijFVHuEX3fxKhY73Q7xQKng6qaMH4xdzqemFQRRWO4taRoidJvxcFze6zUdbKtSxiS1ikscx0wSgyIeqvlpEecJRi7QVPLW8B2i7fB7W9CiKAomwVSe2iZLoTAilBIrrmlKGecANFUJyIFgbOI7SoHHCBjv5GQTQSCTRkRDjRtPQKMlWNmUk7DdD/CqKfOuzE5ixLTbAZomD0CAMI0QGvQGIbW6IrmRkGJgKElgUo1oYlGpZti67Oy52iKLxyokyzV+6ZffD8DPvPtPuf3VDl1dyso/+vgTtFomYy2jUDLInm6SZhNEqJeGb5meEIQhjTllNGhu89CXNmgszTN7QElWtvYyCiOyRoY5UYFFHJBHOq7itMTxhE5vhSM3WrTXrwBw2daoNCsYipuS9gbUavP0kgzTVQpN+2zecc+r2ep1+OaXJZ+ifzahUa8S1JSK43yP5jFB+6pWVmjiOMdzNCwdUmVyqZspjb0zjPpTZ3vw7CrLL0Qck7QTNDMlShOKXMdUUWBWWGSaQAmaEYZ9DEtHaDZFNq326AhTYm0dRx56uq5DZuApHo9mWgTJCF/XKBQGezKRF3hqEUEgN1cjNylCHZiSfAtyDbIYApWl9isCz7NxMh3Nlht+FOcM+gaGqQ7UrQG2sCnEqJwvGhlRlOK4VZRiLH7FJUvSUlmqsCekGcQZVF25KW+e75GPGvh6hKv6MzIGnHsugVl5AJxfL9hzuMFwYS+P7siNpjLfZ3MTzKpFvzetJAl83yVNd8mzRa6jUZQ498lkRL3hYZh6GdxkWSI5CKm8bEVBk3rTxokqJLHaQrSAOJay3HG8K6mcJEmZ4ZOqSB7NZutFmUBdkXPl+pQmjQ5xHKOpA25nZ4f/n703i9L0ru87P8++vWvtVV3dXb1raa1IQsJiEavBBLBjbGyPjZ3geJbEk0wmvhh8Ep+JPU5sbM+E4yV2AozB2BNjYMAGzCIhIYGQkBAt9aLe1GvtVW+9y7Mv/7n4/9+nWudM7nLBRT83HDVVbz3vf/0t36Xd8alEiaGqod1gke1wmTSRa9GdmcL3+0SZIN5pqc8e8WPvfZh2Yz//4f/6YwDuufcI01ZAoqRZbX3A5tUm6bCkOy3NgA8eXSAZdnjqqcf55X8kxQe2NxPe+IYf4w//5D8AcH3lNBghlunQ6cp5tx2PvNdjeTXiP/+Z/Hu33HEnqejzF1/+Z/Lvv9ahv+pQaTqXL8uE0rUs3IbBsDdg/z5Z5et2m5w6fYJKtcoKUbFgGpJfKOSaGg6HaKLEcRo1Z0bTqRXZAFxbJ84ybMuiVJ3VhmdhVAZptkk/lXNqtyxOnnmKpiJpt6ddBuureF2XoSLLXz8n2DfvMDU3zYWLUmBicr7Jyuom3ZYaA8vFNqYRAlpNOX8nX3qGUtPwLIMXnpRy1M9922PvARvfUpydfIqNOKIqm1QiUZ9l4FgmrmegCrRYVoDfDOhFY1GBgrLcQnM02oHc/6GdcW31UfTUrDvcRebSWmjx2Nc+DsATjzW5/d7jvO7eu/jKdyUfd24vtFoNlgv5xwyrZGHhAFevr5Io1b/V6yFLR/ezZ98UG5uJGnOXJI3qwl+ealB5OAFolvy9wU7IV/72W4yiENuVAVDUL8nSEU1fcQTSjPnpLtZ0wMktuT9s0WUnGlDY0ggepCXFxsbGDcGk6sbbZl0k8VyZrKdJiWWMgznBpctnWdgjE/1vfOOLIGySSLB33xIAH/zgB9nurfPiifM1D1LXSiqtxFCcSzSDqtRwHZdxKzdotOgPehw7crgOmF988aU6WJR/X/KZpMKZStiLDE2T6nbjYpgQGoZuYakK9LcePUMYDnFcIJbnTbPRIU4i+sMRQiW+nY7B0qFZLl2VHUTygE6nzZEjeznxokxEoyxiZXlV8pTGBrqaIM8zHMWZnZqaYnO7j9fcDbh832NhcYGt3natvopeEI5i7nmjFETZn+jMdwK+8v9cZLAhP9sPCjqB5B72RzIJO/Fczsz8BAsL8vtOzjh8N3yFa5cuk6XyPBsNTO68e57b7pbvtHYFXjnXR4iQXNlI5AXkjDDNXXU7mQSIWvRGiFLZRghGqiBRFIDQlZy+KuJVuZTSV0G2VmoMoghPh6GruFqGjZNVmIFDkcm1EDRNHnrwNYxUcWUwGGC78h3S8b1q5ghKaR6t4hvTssizjCga30U2WVlxaGk/O1uyY/ry2bPomkaz1aqL2rbjkKZ5nQgJSvkdDY1C8ac0pZgqNDFeZpRlSZ6kUl0Z0LKYXFS4poMShFNJjRIGqruonipY7Y7naBTjum69/9I0xbQsBCW5EngSlZQ7r5VPLRNBha5Tc6zzPEYIj6qq6uRxdnaWra2tes9UVUWaJEpUS42nZrK11SN2o3qvuUrZcpxsuZ5NmsZqX40N5aEoZGezSBVnjhGGYaGWOXEyAJFT3FDcKIVGbGtkY5PtZgeRZ4xyDUNJuptRSjhhEw01Fuckjy81R+gClvbJO+bMdkan2aYvhvzglccA6AZDysinn4GuGiVbyRaxqeMpcY68TLEdMEsT3ZX7w/AidoaTBK02nuKnJWECGHXiLYSgEjnTUxNsbSkrjaIEw8A0DQz1g5bjMRqMmJyWyC7dKInSAlEW2KpIIPJKdiJDA0OJ2qXZrprv/99zk3N187n53HxuPjefm8/N5+Zz87n53HxuPv8Nnh8KWKAVGMJbUlny0CERKQ3HJNWVSaJmABWFDoG2i28WuoljjOVFDRzh0r9YUClYoNn1GSQFP/JIk0jhcbc3pVyso7wqFmY7bA1KNnvbKJQAGiVN38Rz9Lp6VhoGVVog1GeXFbiGS5UmtCzpvdFoTvLed76O9//yB3j8Caka87U/fYIP/PoDfPhXfweAExeeZWbKYXuYI5RHT5UZ2EFJoeu0lDqZCHO0pobnyfl58LX386X/dBLTzHjTu2SGfemVmGDqEKbucE21UAejFdK4qA31HM8kTaDR0JiTBTQ21nI2Vxzm9yjfK8chSgbotJhekBXTjWVBebWF4dosr0hVI78RMRoKbnuN7Ay4k4JBHpFfb3DlZcUV0zXKKoYqwDJlpcP1TBI7puop9UArIBVbtCdtJvbKeYlSqdBYZhqWalmXukOlGwjGEE4P09RJSotclxU9Q+RYzQ6D4RYt1blydIuclExJ6+sIisEKQjMplR9YmlXkqSnV7I1xWUqHssBUXZwsFwgtoxEEFEqJ0NEtgkAQbVpotvJFcAV5VWAoRUHLblChE4Yhjq34TmZMNMyZac+xdEiO+5XVUzT8BuG2gka0YvKRxajKseQQow9mic/bUEUUCr5gxQOwK/bvl542Bw7N4Jsuj11fpvBlNX10eQV7aJMl6zRcOe6m4XBteZVAyYlrusA0Cio0NAXFNL2Akh6+4TIYdwcyA0GGVkkYm2FldNo6lumzoSBVQlRkRYF2Q9dE00osw66x90lccPjIQXQdzp+XleRut0uvt4WnOnVZllEUFa1Gk97OllovBr6nYWsGmZLgf+Tdb+frj34BIYtimLrBKEtodzuUCj4xOZfimJNsr5usb8j94VotKm2HfUty7vYdddlYNShCC8eUlfpGo8Ew7TE3dydvfkRaODz5ree4vnyJbz8pu7hB4IEwMeyoNkk27ZRGq8m1a0NSBT9917t/lI3tZS5dOQXAvfcf5Otf+D7NZoBh7ErUg06eVPgK1hG4ARs9qZIEEIVDHrzvdWxtb3D+olSSa/g+SZLhB06tEieqFE3b7ToOhgnNRpdCG6AKgQRBk53+ECoNTUmGilwnHhU1pNO0MwJ3D4PoIscOPQTAj//Eu/jcZ7+EZRk89dR3AfD9DnE0ot2Rc3z48D6uLy+TpFFdPdd1nbIsKUsNS527k1MdlteusLgg+QfXr62SJyW6sDEttWeESZyWfOT3/oDTp6RtRFVFfOxP/5j5vbJbsN1bJ89LJrsNNE3ZVpSCojARuoGj5qY/yvG8EnL5M/ff91a2spBjt7+ON75ZKkn+zSd+mye+1qM9IdfPzMwM6+sjHnjgQa5fkx22l19+GU2zcFxqv8EorihFhG6MTX41igImm02qXK6pX/jFf8gf/N5HcT2D2TnJebp8fY2sKpmbUR0iHbJUvnutZGcKqqpEVPputb7KyfJc8gsBhI5tp7ieQaKM2h3HIRwVLO5fYM9eCQd/9BtPoAm49+67AVhd7hEnIf/q136VYSzhrt/6+gqapvHUd75Epy0PIU0TRGGO7YyhymWtQDY+82zbxXI8fv3D/4o//qM/kWv93nt5/PHHaw+7CxcuYBkmVVWRj7sMQuBYNmg6mYKyHTx0hKsXrzBU3bs3PnIfo2HC899/iakpyaHLi4SqKui2J2g25edfvXKd1z9yS60M9s2vXuB3f+/f8qUvfYlHvymlw+fnZkjKnDga0nCUkmsieSjK85aFfXtZXUnpzHkMBhJWGg9N2cXAJVcG9mWeYluVVPUFDtw3S2si5sm/WaHtys9OqhLNLLGLBkpMkmyYUxgG7/zZNwPwypXv0Ql0+r2Mc6fleXr41gDHSWmo8+A7X9/Bc3Uoqflxml6QFdDyJiiVimkU97FdC23MOUBQ5SZ5WWCZu6q7VUV9RoDk6kk/J8URNgyyLKNil7+VpTm258pOjpr/yYkper0hkTKz7XQ6CFFKDya1H4RW1Z837ob6vs/G5lr994UQFMJgad++WvWvt7VNo9GgP+jVXMI0TYnTuIbgGobiGypO1/gpigIqboAd2jSbzdpDK89zKvFqzzWopHdbluAqRFSz1aW/E9fw0EG0w8KeaQa9HeJIzYMwsRydIAjYUmu2KnMEu/LppmmCEIgSctUxNQwDUyFEak/JICBN012Ioqbk8rVqVylT0yiyhLzI8LyxWmCJY3tECg2V5TlpKvmBLaW6mSRRbeGw2yUWr/pfTdPwPI+q2DUpT9NUciNVh6/dbtfG9ePOrhAC2wgwjKI+9zXbxGwIFo7K/x71ZCd5mBb4HRWnRBtY0T7WruUksezserrO4AZpfdPRSJMKLdbZd0RRI+ZK+iuL7BBS9GTXNIl3sE2n5mFXIiPLBYZp12bcnmWzvd1H13Qsa2xXIqHKhYKQTs1MESdDwmGKqTC4VVHhOParrSU0nSIsf7g5V5qjCW/PmCkuqAQ4pomh/JWSNKW0dNxWWbf8CsMHs8LT5CQVKVi2y0QTts7KYPz175nh1rs7/NlHXmZiQk5KY6akN9TJlLeBn1kEk1AgyNRiiocaemGAEDjqoveDSbRgRJaqiz+vKLUM07fJBzJ4fdu9P89tDx1l1N/kG09II0NHVBx//e185q8+Ld8zuIIIXUahRkNB8FbOCsw2GAEEmQw+9i10sOdTRiO5MVa/O2Rlc5ulxSbv/xXJ5/rmU2fYfLZgcfYA4phUBFlZPk+YL9ffBWFSigzHgkBpCzhGl+WrKZMTMsh2nDZZOSAphkzMyrHrdHOWrwg2z4IZK7JsqVOWFrpKMA/c7aC1YypTp/eiXITL5wucZoBlDqhS+e65VpHEId1JOZ96aSGqFsH8Ju6kwsIDVeaRpjm6Mk4shQysdU3hZcsQaFHhUSl2taVBqmXkVYmn4DWu04JSo4rkZnUrjUnP4dK1FRRiDc2BMJFoCkVvoswBoaEpvwOhF5RlReA5FGpDtVsGnp+zdZVaWjfOCywnwFTmw6CTFTF5VdBpy0Q4Gm5jiZLD+4+yckUG+lYjIzIGxJV8gYnAIstiRpEBikzattpsngjxTI9BpbJ/o8LKbaYXJRnZDgzuarh89exz2LaEu26sbtNsuYQ7Azz1Xq7TZnuwjK9iMsv0aDZshhsWWVNeCh1bB3yGA51KJS5RsYPXaJKGYzNgHceuEJVVczzifESeGgh0KnVBW4aLruvSxBN5cE9OTEu+hNp/plWBMGveWas1QRRFWJaGP16vHoz6GpY9rIOIVmOeVsfi5VNyjjVDmmfHUcmsEkmI4xxBghf4FKn6MD0mzxMWD8ugpb8RcvlsheMVvP2d7wbgd373o5w7e4nf/j/+Jc8+9z0Amo0W0zNLdKYkZOXMS9cokhaWN8RxFM69n9KZCjhy7BY2FR9uNBogKpPLl6WogB+YmLrklfX7csxd18X3G4z64S6sRAh+9L3vJHBlQvtXn/wEH/rQ/8ojb36Yn/vp9wEwvdAljof4nkGNNUXHtbtouoLpliPanTn2LM5x7owMZvrDHSkvPNwZq5xICEO163fmeBVVCRour3lQBuf3HH8nm5ub2E5Q+w2F2SWqEgJLWk3s9GJuv2uKH/zg+8SxCgbMgiKTQXWsJNTn52fZs3eR737naQDa7S6iKMnLPrapijeewfZOSJr43P/AHQAEXsBjj36FZkcZziYjLKOFZZQcOiwD7+XlZYQICBoz7JmVe+SZ577EkWP7mD4sL2K9WuL6xQGnT17m6G3HAJhbWuXF5/ocOSL/22ukHD92H6ONWT77+U8C0G0HJOWAoKmjPChJk4pSG+HasmgSpz1EaVPikOVynT/8up/jO9/9PJPT0N+Ua0juj4p2W87x9PQk589fZDRM6U7I72K7Er7W3xnWQjSzc1MyGVVFBFODVqsljUbHpux5Rl4WGJZT8z7SLJQy1spyxNRMbMvjR95wO2Ei18YXP/MCnc40FSF7F2UCWxQRV6+s1rYnRZETjgps2yZojL1hYDgsufvOH+H06ZMANBo+WR7TaCofqs1VoniIrlNDIw008ixDAEMV3Bw+fBStqLjwskyo733NcYoi5/zFC7XkuO/7MqjWtTrwzrKMhcUmlS7lr8kXuHZ5SK+/QrOjDHRHOhMNkyRnF0aml+xsp7z9XdIQ9b/70CP81m99jIML9zIzL9/Jd+bY2N7g3NlLZJHcI9cur9DwLIS6QAbVDve99m4unFhl/aocz2bXZxCN8MxdE1jTdXBdG2HJ+/dXP/xLnD7xHJ/6o69w/I1LAHzwp36GU2e/ytf+9pL8LqKgSBPCMMbU5Pdd3DvL+fPnCexuLdxQlKmEu+u7cLE804njcNd/MMsobxCFAGpD4Wik5OjLHNezMaxdUR1Nk+eD5M3J39M1A1Gyyy0q0ppPNTbeNS1ZXLGVDDjIpOPG5E4mEIZaS6pAoIxzhRAY1vj/q4ijpH73sfFwWZav+j5lWeJ7Xg23832foihqblOelRiWqZImxeONMgK/ia7rtcejpguiaMRoqAqNVYzjBBRFwVjh3HKkIEUYJuiqWGXoEsKYV+M9ahKHEZ7n1QlXURSkmUzcxuM3Ht8xXDOOpBelaZq175uoKixLp9Xw67EqigqhGXXcJ4SgKEvKUtSWJkKUr+I7yznVXsXxStOchYUFNE1jc1PC1nUM6T2puIZTU1M1zG485qPRCN+zMDWPYSzXvm7Msv92i+2+LEZEIwNDK9l3uINoyyLC+sWM+dnD9AdbvPK8LGB1prsYXo9RLs8Wt2wwTIYcO+5z5NAbALj13gM8/fizfOfpJ8hj+V7tlk2WpOTROPaVayJPdgW0WpPSHmY4HNW+fWmagmbU49LpdJiY6PDKK5drc25DN0HIRHzM1UTXIBI/3MmV6WmisTD2YjGhgizP+JX3SIWvex46zj/9tV/DaWrkKjo2mxWYLmUkB6TZMKE0mZ/X6KjPWlvPmZ1tcv1G2knSAAAgAElEQVR0j1jGMeiGheXl6Ib8vdGmx95jguEwIENeenoZkGcRhqiwFfGryHImZyy2NxS2OSjQA5O0MFhQhO8geYh/8fP/AwfvOczH/+gvAbhw5gXmX9vmrz4vMaYHDveIBhU7kYYYyI23dc7BbJqEZoirYO0zHQP0Dm+6T1a3PvTL7+Mv/vrjfPV7X+fWg7LSmqVD0qrDxgsFhS6DDacZs7UtOxsASbVDVYEpNAylRJZkKYZZ4bAEwPyeOQbDlDgfstmTQeDh23WsyYyd520uP6M6UK6H5kb4ylityNvsvTPFXuihKSO3008YaGULoad4ymMmcNusrfWpVOWloXkEDZP24XWGmfIMEBVZGoNOLWSiY6ALSQYFwKooyzZF6WCpqj9Bich8yjyhMOSGLSsLx9RpOapLteVQbPRIhcBWFfZUrxgmsqJmKA6LKHUwqBUpNXIajQDNiEkiOZ779jYYjkbohYWujJqTLMW2bAxFpB4OI7JcgGnWh6ZejSjDEhHNMO3LRPjw0RYvb3wLoUzpijBHw8UQJQXjKsokay/mJNfaaG25PtNC4Fkl/WKs2HaA19gWnz37MrbdUXMTMRqVeJbBqC/XWVkW7NkXsLOtMPTYBI2CnR2DmQlVGSxHoHvMLx7m1MsySNLzElsU2J4MApN0iKFneHabqUlZhR+FO1xfXseyDCrlhq7homFgO6obWxUMhynt9iR5NnZar0iSpL7ALUuKbhRlRrcrg+w4GaJrHre85jCnnn4BgLfdfRzTznnsBXkBhKVFma5iaiZveofsJJ8+eYmd7YSjtxzltQ+8EYDjdxzjDz76v9eXg14sgMjZ2F7h1jtlUD0/d5jnvn2WoLMCyqulE+xhu7dJUSlDOWBuT8C1K1vcd/ebAJidP8rH//zPcBwLX3GXDBN812eo/EeKLCYrJBnaVpvddV3Fh9Dqy6osBN1p+W4Aa8vL3HH8Xg4fuJ0vfkkG+oEbYDkFeaLhq8KCblTkmV53f4RW0WrtZXp2htOnpPKYpkuem67rBCow3draoL/dw1cBWJpHGFqbpWMVb3yrFG74/KevML8wje2DqasusRsy2PI4e1aqxgUNG9+ZIYw3sH25kaKhSxwlNJpB3U12HI881yWxGLj9+G30+1tcvHCVZkPdBVnM3n2HuP2uwzz+mExyt7Y3aPoeg4EMdub3NSlim/nZg3zgAz8LwO///kcQ2oAsj9FVNW56wiWNDXrK7uQfvv9dnHzpS5w/HRMpFb577nMoMovZvTKxef6ZVUphMj3vs7amuvf2LEHTo9dfrSu7opRCCuNnYjYlHDgkIxCV4rnkOV6jjWYbUkAB8HWXjt/h3DUpJtFoOriu/yoRmLk906yvr6NhEfbluPzP/8sH+eQnP45nyb3e6/XoTjSI47DmXJWZXAu64RApvqjra0x0p+ltq85LlaELHc+HqWlljo3B+ZdXMUydBaUSp+tw5fIKnQm5xhbm5jn50jkMw6TRlO8ZhjFV4ZDmSV0VHyt7jYNoz/Nk0O7aFIqrEQ1HiKqi1e6SqGi10e4w2Zrg9Evy/NGA7mSLMBrUVXhds4jjWIowKIU722kwP9+l0VZedKLL8vVt+v0dUPf9RHeGrNQZ9TdxFOc4SRJm900xvSjv0Idedx/9XkZj1uHYASn+Mb+nxZ9+7A/xnClc1fF6+lsvkmU7FD15Pw6jmE5niTRbwVWdjqCpoxkd1tZ79TiMegPcwEWoqvjDD7+B7z3/ONgxd90rz66Nyy4Xzp+qu9uWaWB7EbZbMtiSfz9oOFAkDLZTbNVBHA5HmLZZnz9JOqLMpQ/eeE2V5W6SdGOSkuc5+5cW6v9+6aUzNJsehbpnqlLUKoPjDtc4ERif374vjaMty6k9+sadkhsN0DVNe5WRqxRZ0OsOGkAaJxiWTJ5uFJso8rLee9ITynlVB2os4CPKqvZYHI1G6Lpef9+iKNAM81XdLNs2GamO8cIeqWZnWRbxKGV5WXLmG4FNnAg67WnaU/KzNtZGiEKjLCscV/5+FGbohlkXk3yvgec4zM3Msroqi6tpnlHmErEzFpQYf1ehiGCapql5u+Hdsxy0iixLaLWUcbqmMxyGCPU5uq5LoYx0V92yKDKp3mmaOCquNQyzFt+Qj06r1cI07ZqjNzZSHs9BGIZMK37S+N+G/QG+35CIDk3OTZ6bGMGIWCmDNltd5vaGzC76JIXkN4/iiDPf7zM9N4EoZXJ1+VRIpZdYY9TGdsL9b9tPmC2zeU3u/5m9k7z07CWaro6j4iff99jpb+LYins7yiXXLivQhFJDTjIm59qkcfIqA/Msq3CU0JcQGq5r0+8PayEMz/WJ4lAJ9+yu4Wr0X+9c/ZAIWoDXUG3eSpIH/UBjfVNmRG99wy9yaO+nOLN2goYKVrVcoJPg+DLgK8uQThCydg6uSI9Gjr4W1i7n5FXBxLz8vfW1HDKBU8tMJuiGYDhM0FCdliKh25Gymwdkk4hw5OHPxUwfkT/z0lMD7rntOC9duYSjgsAJ7yx/8tgnmP/2Q0xPykn56V//7/nYf/6/a4U2UTWovCEi1ckDJTl6S0q0IdBzg0wRsK9uCtr2Dne8Ri7C768PmTm2j+Nb+0lDeYHGW5CLkql7m8x15Pdr+Iv8/Vefpa+CFssH24BwG3RFes90aHcsTFtu8tMnt/GaOrpt02rJJeF6Q1xHI/Iz9HHno20T5YJS7MprX3gelqJZbnlQHmwri2tcORFhWrswhJw+gWeyMZRz3A5yKA3Wr1tMH1KmqRsCs/Kp7IixNqqhOaBVaApeWImAUtPQHQtXtV/KUmOoJbiBizGUYzcKeww9GAyVbGeyg+VpVIbJSHVR0kwexpom3b7l3zPI8gzbUpCOOEeIkk7LxdBltSLNoSx9IK+7WbZlk+cliRJXEIVM2izboFTBThiWiMTCqEZcXpcLdHnZwfE95lVnLpkI2dgo6XgVhmqCbYpNmgcbJP0dhKJIWgxI+xrNeRlcXSsTRGLi7t3HaCjfYXa6S3RimcHIBKGU5PSU5asxqIPG9VMMfZbSHKJVcv0sLs4TJwN6qz0UqoOZiWNcu3wCFATXtm1cx6C/PWRSdeYoKwLHROgVhaqslmWBpsuDCyBNI1odD8eh7uiVhc7hQ11Onn5OvWOBZcNUe5YslfMSeDP0dlY5Nn+AA2+/DYDnX3iCpaVDiOI0AHoK733fT3Dm4gucOikLBAeWbmXQjfjesy8yPy3VrFav/4Asj+i05LkxsdhjFDpcf9aRUsfAiR88yuQem4sXQvbtlcHGW9/6MCd/8AJCkxiHmX0dvv7Yl8nznLtvkwHY4888gchTDE8nV/CzqjLYCiM01Z2MwwxhVvi+X19MSSQlnWH3sjItnXgQMlJSt82mzcsnX+LCmdO0xsWNosSwfJIi2YU4iBFLB+e4ek3ubU143HnH/Tz19DfQleR3kuWMoorJiVnWNuSFdt+995MnKd/+9pMA7Nmzj6xcZ9DT+dvPSBji3kPT6FUXx7Z5/DHZmQ8CgyIb0e4qeJgH+5e6nDy1SRIquf0yx/U1wmSIo9RpDMMgjEJabZnMXb58gcFwh1bHpchVBiRsHKfkpRcu1BDRIHDJioI77pbCH/uXAuZmDvOWN71PyevCO975Bj7zN59iYsIjVmfeKNcptR5pIsf52988z9ve/R409wlOnZCdjme+O2Rx/yKlKxPopVsm8d1pnnj8aeZmZBFBlAn9fk4aa0zNjVVbdyhyi/F1OtguSCMDQ08oUgWztARFXDLXneGVyxIyPhJbpCW4dbGqJBaxJF2r55ULl7Acm2ajwcSS3JDPPXsG12pQVLI7urS0jygOMawYx5Hr+tqVFSzbJUsTPHVnWobGxsYWsarsuq6NYWYMdnS0Sp4tzW7C5NQEg/6Qq0oZsywFttdCN5QQR6rTnZhEE/oNFWwH0zTxGk1MYzdpCcO4hvxqWoll66Rlgq/WQeW65FlGv9+jVMT7siyx0Gk0fTXmslo+utzfDebCHeI4YXZ2hlDtkTgZcP78kGYg72PLjvB8C0O3MQxHvaeGmY7QSg2h5P3jvMBzbSkMAbx0puTI8Qd49MSjnLsmizkNbYMXn36BrWuarFgDjm9h2y2SRL7T5ESHjY0VJuY0qbAI7Fwd4rqCpEy57x6JyzcqnWef2cFXMORHP/819h7Yz4HbJ3jsiy+omU+YmOxiWHL9hgOoKBFCB0O2TN/4+l9Aq0I+/5kvkqkE03UdhAamI8ey5QSEYYKpuzd0ikTdsaglzVUwPjcr33Fqaopz5y6Qp0WdAAldQsV0U8dTCWaSJDiOg61U8ooyI81yTGHS6ciEfau3jYaGZmik8djIWONGI2xdl7DXnZ2d+u9pulErHTYURHU4HEJJ3VEYQ9NM09y19yhLkiTBc/xXCUNUVVULaHgKnhjHMQ1P3qOGO2IQZnQnJ9BseQatrQ8xyi6ugsR69jxl2Werv8z6uhKKcHRajS4afSkYgYTLxllOuyMLNZ1Wl16vJyFqqtNpigrLkEmtZe2+e1EWdbIqk0cL23Z3lXmrijhOpEnyGE7o+QyGcY0kEUJIZUVRUZa7DRRd0zANo05Ox8+4i2NYJmEYI0RU/z1d16XE+Q0Qx8FggKHtJnxlWVIWgjgOMQ25bxttm61tl7yUf2u1N2DPQoP1azmXL8ti1aAa4DsWeZlS5Aq1teQxtWRy9lmJSqlKMDslR2fupUikgfZG7yx7D/ts70S4LRkLdrpdGt2gNgeOwgTbtCnTAsXAYXrvrFI73YViappGEHjEqsrmeC5xnMomz1jFEdmdNU29XnsSNvtfF7W4KWhx87n53HxuPjefm8/N5+Zz87n53HxuPv8Nnh8KWKDhauLggzLb7a1H6DZ0mwEzqjJ/aPYdnH75eU6vnEUomV5yjSwr8ZoKR1fqLC06ZMOMA523AfDOn76f3/+j38awEtyxP8ZQwpWyVFZxuy0Hy0vZ6VtEsfK0SsGxNfYf8okGMpueaHskTsr9b5C/973P2gxXp1h6jcOZi5cAeMPDh3j54klWz3f56Q/8BAD/8ZNf5ugBQXNBtlh9u825K5fYWtXIlflguCNwK4ElDEpV/dSsFBHZtKflz8zvv4VQX2VrOalll+9e8pgrfLbdjOXLsnr31re+j4XFQ/zm7/0+AF5bQ7djRn0BCsOKbpNlIfMKCpKnJlubI7xWhu/ITpnhbHHw+CR75jy+8yVZQTj7dEHQyrB1WUWaX5giLyJWVgYszMvqwZ1vcNgeTPDdr16mTBUpW2vQaiXgye8WaLCzqZP4Iw7eLTP/lbMVjmPSmClwke/VH0WklNhK1KOo2ui6h+EEjGmTJiF5pZFlCa6Co2jZEN3IyMfmw6Uk/opdZVYMUyNLBXkO5rh/K0xKIdBUS77MQdcLgoaBUBWKcAim7qER1x/mejoN36kNEqsK8kKn2eqyvjaWANUwcAi8Et9TviFDyLZNGsrsuHvUxZ0bsH4lx+/Il6oaJYOXpxHXCoIpVYXvZ0wfMNhQBf7Uq5ienCLtu6S6kl1PSppam2tXV9GUVUAWaXzk936LL3/tUwC8eOIsljbH2qCPOyXXeba5wcG9k4hUoI09NCKdXhRLFRek14coU3ynzWggu8szcw2isJCQDVUBzvIEzw1oNOR8NhoBlcjYWO8xNyM5OpXISNIRQ1V9BomBnpiYqH11qqqi1xvQdQOaip9ypfcKYd/m9a+Vn3350kWOHflJNtIr9Hty3V27vEl3oqTVarF8WXaSev1lbrl9vu6+6s3r9Hs2oz7cd79c+4//3VUWFycw3ZREzWlvK+Ujv/NRPvXJvwHg/KUfEIUpgddgMJSdjjBM6U60qERJoPx/4ljCpAY9OS+33no7o9GA69dXag5EnKWUZY7fCGqOQFVV6IBZy1/ndBtzVGVKksixskwXrTIQZrULUSNjemqe3o6s3nlBlz17DvHS6W/j2GPZfINHHnkzL544yfnzkteyf98+br/lGI9989F6Tfu+Q56ZaKoGt93vEY8qJuedGjp79DaDNIGuLz18rl0ZsL29TZ6neP64Ai3HIM+gUnzGPC+xbRPL2sW9D4cj2p0JkuG4ghiRpQIvcEDtkTjKsK0G7/4HPwrA6sp5pieOsLTvOCdOqi5DS+dzn/sU7WAX0tQMPEzDZ2G/PLseedPbefvbf5R//Iv/EpRP2eR0mxdOnKo5n1QBb3nbw1y+dI3rV1bU0IXsDHPyJMf1VSVXB02ryGuvv4rupMPOICUtdyWy8zRGS3yO3yp5YLrt8MLJ52g05ZrOs4g8zzAMs4bW6bqGpukITO64S3LfpqbbfP/pi2ysKTsBL8EwK5qNDtMz8s5M4pDr19alf2DtzWjW4iIASSbYu9gmDOO6y7h/qUuaVKysrGCq9ZKlOXazi6tgbLYt+SPRaFR3BtI0ZaI7xdraGrffLqFtQghOnX6phqj5vkuWpxSiwFfm4nmeKxGbojZOB2nTNIYz5bncw6PRCE2VoB3HAK3CtlzpPwksLDZYWd5k1FNdVDdH0wxE6TIxpSDbgwiva5IOBSO1bztTM7z2zXdwZV3CEB0xzVSrwxNPvsCRIxK68oZH7uav/vKvQZikyvbEKMA3JyjVOs/DHo7dYmNnE9eW51JguORiSHNmksNvlJw8t5ly9ukt1hQ33LI1tKJiZ2OIbo3h2X38QMLB5GCmZImGbum43tg6ZJFKbGHiEIXyrI7jCNO2UR64OJaU364K64Yug6hhdOPuUZ6naJpGovCteZJjmOA57qvgU4YhPasmuhJ5sLGxISXdFUTedS21xkQtOuH6LlEUKY7crn9TUdzQFRO7fM8xLNAwJN8rLcoboKYGvuPWd60UJ3MwTbvm48s1Ks1+x+fiYDDgrrvu4M67pc/eJ//8UzRbLWn7oXwtM7GJbng4nsfCXtnZCgc5a1djgkDFBJVGhc7M3Dyf/ztJ/fh3v/WHfOrP/xO+G5ArawvQKSrqvb24uJdLF19had9e4lj+TBgNoZJctDEEL4pH0udRxSDS69Mmy7Ja3KQoCrIswbT0sUK9hPcleT0Puq4TRRHVDeOqKx14CY/c5X3ZtoOr9uMoiutxL2uoYPUqqwSQ0MQx3228NlzPJo4rEuXt2Wq1KIqCyUm5VopcIwgCjt3t8r0LEr1TJB65Jtju9dg/JTmed91zlJdePFV3K7vTDs3OfvbNLfLt730BgJ1+TBGP2HPc44oEUdDbLJmeaVMoKPagH2NpPlkSEzTk/q9KKYCSZ0l918ZxgmXu8t4cz6coMsIwflVnTtd51fmpaQbJZvTDzrnSxcGH5Xt07DanXu5z5EiDD/+R3Ahf/swz2PpeonWbEyelythddx3B97f51J/KS8+xIRyYvOV9c1hKvOLRr+zQnegTGDpxqIiMpYumZ4R95TFVWiweNRgMBZubiizfsdjayLjtrgZN5XPV22kwHGXkymjUFS69axm97Yy9t8kL7d43zBGNrvPtUzscOyjhRNtXdnC1mESRVycbLsuX19iKSqotOXFvfcdrOXt5jTMvX2Ciqw5TvaIooKkgI5NHJ9CqSeZmSkoF64jSERvbIfPaMSZ8OX7nzkZEUYOBJqE8ZmeLEputjQxvbIhoVRgUJHINsndxgapyWV65SLujJI0qiPWSt739YZb2yt+7/PwOp545xcKs3ASLi7fx+S9+gVhkFBtywU0tNHjop27BcZf4+09/U35WKEiKHF8p2Q37Ma2Oz/67KzbWFUxvs8303gGjrKQKVUDplhQWoJK5ihLHmcb1vfrOGcUxjukw2LpKx5Hv2buYoZcpUwcVNKMsKEuLsjAoMmU4CcSJQGhgqSA+iiscW5CrgMzUZKKUxaBEzihyecDrlQUKwqkZOd1JC1Nd/GlUkqRgO63638qypMoyyrzEVFybys5xNZv+5V1Rln33dujO64hCqqgJLWPl3EU2zti0ZxQkZlRxy1v3sbYsE5vRSsbU3DxFEvLKQMK8bps7zMHp/Xzp777BAw9KKN0tx27n2088x8we+d7DcI21ZYuWGTOxX67PC2c19h3J6a+6uMoMcGUlRGglWaouOB0avkWV67Vp4p7FKbY2++zs9PEUr204SEjigre8RSru+X6DEz84zfz8Hs6ckRydza01fN+vzUgH/RDXdeWhpo2NsNsMhhE7g4R775IKiWW0xcWLl7AVpv7QLUs889Q5jt35Bmxb7o9nnn4S3xe4VkAay+/SbAuKzKfVloFOnm9TlTlHj+1na0fCw57/3iX2Ti/gWAk/+8GfA+Bzf/f3TM7ewunT8iTvra5CDrpt4ilInEuDUShVoiZnpLBIvz/EcQ22tuW83H/fg6xeX2V1dbXmU9i2TZJn0oxSJRtVVSEZCnLOy6Lg3nvuwtBtRqE88ywxyaAfsb19rRaw0HWdIjNoKe+t/qhHlpm02o3aCyfPSzRdx3GcmuDdagZsbazTbCoFtVKgaxaCpIaDDPoxi/uXQB/hd+ThsblSEccpc9MSLrm+2mc4XKHd8eogKWiYFLlOnkFZKHVCTWM46jGOiLqdCcKR5GAYSiVWFAZJOkJoAntcQ8tNdDOl25H8uDjaZNDvkeUVc/tkIPO6H3ktO+sRTz72HSbm5F0QbqTccs8S//TDvwHAA/f/KD948uv8zPs/wL//6L8DYG+3xS/83D9nblFBiTSN61czXC9AaMrbz3HQRIMjd2TESiFt5bJHnsHUrJzPW+70ePlMn7VLFkIF/qZTUBo57//ZX+Shhx4A4I6jh/nlX/hllrdk4o3IEaIiz6s6kRmEA8XJcJjsyjW7f/8BLpx/no4SCMoSja2NFN9v0FYiRYt7umysDbh0+QK+p7g2hQZagaU4kOEo48jRQ0zOViRDOU6DXsXVq1cxDKsOTA0LStwaNmtoci51wFMJ5ubmNmWh4wZ2vWYbvk8cx3UA5jqePE+zQokTybPUshw8z2NH+fH5vi9NPBWB3zRNiryU0FdtzNkz0A0J/dGV/9fSgXnSYp3rryixKr+JEALL0lk6IOFuL544w57ZObZ2emQKwhW0mhw8doRH3iH5zSfOvMDBQ4f55lOf591v+ScAXLt4lk9+7BNMzbZxG4orvTnife/9INeuypjk+999FoRBuzOF7SpOaQXD4Q6tyQVGA7mG4iLDNXUatrzXsiJHc2TCMxzKM12rbMIwQkMufN81SROB5RSIUvFHxDboBpbuYalgOUkj8lxjalYGtKPBDlUhFepuTFDGMLnxv+V5LpMtVQDVhQrGK408HwsE6JimLXk8NyidtlotUqWmq+kCISosy5bqu0BWyH19IxQtTVPKsqw5UXEc1wH8OCatKukTpZm7ohqWbSPyov6ZcbJn2y5VMfbazCS01NoVXArDkKO3HGN7e7teY2EYEobhq3hYQpNG14rCRhAEiELsFk5LnaTs03If4Cd//J8DcG7503zj618gcKZJk231XhboOrr6oGajzaDfJ4sjlg7I+Gk0GpFEIZ6/KzozGAwk9FK9UxxLT60bFf4MXQdEnViCLBxnaUFZjdWzJXdZuyEZGHPsLNvEVDD0JEnodLp1ctXv9ymFFBUZn9+WZSh1QDkmlmlgGvareG6u62JZFmkmoekAeQZZGnFgSRaTLFdjcz1hEPboLgVqSRlsbvQwvJTXveZBAGy9S1RuMTEr98f1K+tMtebYXHuZNWUYrFkeDd1lyHX0ntwPw0HFYDCg1ZXxR6XppDFMTE2yfEV63/muFDZpNBqIcrcwJO9e+TnSxDwhy4p6bUjOYE673azXXlkKBsuDH+7kyvI1MXO7XCTH72pw8NY2//HfX+TH3i+J1O/9+QkuXR1xkH/C57741wB89/EXePCeB/jffvMnAVi+vsLFs+t8+nOfZTWRJHcvcPHsimgd2pNyoVy6KJic1rntVjlxz3xzh/n9E+w5MM1jX5fGmHsOOkSjitFIML+ozPJsnWBiipHi9Wxd3qLb8bCasL4lJ/w9/+AhEm2ZE9+4QqHEFGKnYP1Fh8VjcpyvryVkfZibdGmpYOenfvp/Ymi1+T//8DdwFPbVdaHd8Yl68mem9hwkjbeYP1RwZUXJpZ9McG1oHpvGTOVhesutc2xsrPLUk1L6udl10URO2NNwHXVouSUGoPYXk1NNAucOhuEm/d5YfW2H7mSL1esh97/mfgCWFhf59B9/hkBTpGK9Ii5zhOiiKTEQs3Lx5iY59JoZzj0j37PciRFBj0JJ1uKUzB1yMP0hyzIHxNM8pvcJtocJWSRfbGLGJSxj8mqMibYIPGlgN5YrshtdVoZbFP2YBYWrjTZ6lG0b0VAKSklFIaAqrRvUikAgD21tfFEUOromapJ2nuekCZSZxdhYmFIq61QiQ1eBsJS9F/i+MtT0bERZEUY5pTK4tEyYm5M8m2vLyvi20SbwbdKRXD/Vts4wTlnYr7PvkAzO9yw8wMbagBe//zSjTTkujl7QK8FT6j2alxGHOgszTXBk0BKdnea9P3U//+Wzf4nnyrVhWwGvvHyBRlseIvN7DUoKejspt+yVIimhYbC9s0PXa7ByWV4oR+YcmnrJD1blwe25Fo5b0dsYsaQOzryIuXJ5mUIUGMqsNklSSXxVl8Lq6roki3oacwsymTp+y0N8/etfq1WPms02CAPbdrnzTukw/dRTTzE31yAMY1TRj0ZrmoNHZjhzQhKNFxYbXLp2ilHaYnZSFjZKsU6ejhhsl3WFVho9ZliWXAcLM9MYTPOLv/Qz/O7vfxSA7qRNVRT8lz//e85cugTAh/7Hf0Qw3SPw5bz8zr/+U06efpKPf+yzbPUlv6npNigLncGoT7Mr56bbmZHBqj7mvbTobV2m0fBr8QFJQPcZhmHNtzFti7y8QTJXi9FwmJ1u4alMf3VlmyrXcG2POFZO8o4JwsZ25e9t9bfQdQ+oMPRdjLkQAtd16wA2CAKqIqsvDn0kX0oAACAASURBVMsyqEoHQ7cQSpG1Egmi9Km0HMOUnx+nEa7tkSRKyUpoWEYD33UJlf2F67o0WjpxlNUdL0M3KW+4wDV0krzAtg3uuO04AM8/9xzvfs+70HWTZ78nk3Hoc/1yn1J1m/TKouF7RFnO+39edrMOHp7ls59+jO21bYpSXvRaUtCctekeULrA5SR6P+bNjxzkkpw+NobbnDjxPaZUgenSK+f5t7/xbzhz6grnL8quRre7xP/7hb9m/6EGQaBEkfo61y7F3P/QvQAMspOIdA+97ZDN61tqXQcUbsX7f+mfsW9RFhve9bYjvP/HXs/a6pYagwIpnb0rg7zvwF6EELzyyhWaDaUqONPl0ME5Tp+WZ3yWmAyHIQUxR44uyXnAwsBk5fr1OvBOs5hKmcUCFFVFWWgcv2MvjUAmZT947qKUss91vGAsNpCTljpo8jwwdTCNAEODNN9R6xOqwiJMK+Zm5HmTJgOKIqNUSAsBeF5AGMY1+kIIjU6nQ7fbZX1NFjfGfIhxUJhmCb4X1GsJQDc0BgP5t8fB/+KeJXKxyrCvFlkRUFYpt981w2BHFdp6IZERkw8EvtoPri/Yd+i1zB+QncHL15/h1InrtNtt9u2RHfaXz7xA0LBJioJciXG0ZkDLl5hwZdJbJiGHD82z00/5/gtSgEWrpLhCXpYY1Vh0pkBYRl3wsQEcD911aaiuaaVJpVFPfe/ADegPNshjA0sJJ01O+Vy5dhXfbxAq6UrTNCgKfbcjRIVnByRZXBdSLMuqOxPj4FGMjewVN1TXoRk0KMqs7lw5jk1eFjSbTRbVuJw/fx7bdOqizMbGGkWZYVlWreLYVwmDcQPXR9O0mmcF8vzJsow4jm+QRqcei/GarapKSmAXu8INtm3T8INaPXPMqTGMXV5XkiQ0WgErK7IwZds2ZSnFOIJAFWDCVCXjVp0YarqQiaNQnCvPI8n66EZQC2FYlsWtR49x7txFHEeOVRwVYEnTd4CFhQU2NzbI4qQWg+j3+7RbDQzDqJEbpilVVcey57quE8ex7ECzy7mqyoJDhw7Uc3r9+gq6Ls1xAcqyohLiVby6LMskt87Q6kKbEIJ2u4OtOF9FUTAcDl+1/0pRSTVJlSqM147nebuICSEwTZe8TKjUe+rCRBcurkrY0QuanSbb/XUMX3UngyYdT+PI3TY7O/IOOfdcxD/+Fx/iC3/3NQDOvniOW28/iGfnuErQ5sryCquvjDh22372LEg+bJrmnPz+y/Q25LnguS5lXmBbPqkS1ciqBFM3EEKr79Y4jDEs44ZOeV5zEMdVfCGk/USz5WOotrAQgp1rP+TJld3QxNEH5cGzspny4d98HV/+/DLXX5SB/i/9m2nOr4yY94/SW5VByzc/9xjH9t3OR/5QVh0dDrC9do1LV1b5lX/9IQAakybhsCINM/YeUhLDgU23YaJnctCe+dqI9ozOzrCgKOXg7jtic/mVkKo00FR3Ik/BwMJrqIy/FOSlxd59Za3+NL+ny733P8zXPnaa0pBB39G3R5x8HvbMyIuxtyk4dWrAOx8MmF9Q/iPfGTDKUyb1HuEVBS0Jcpxmi2xbJopJU3DPnQHzkwmvyGFBaLCzrnM9zjGVLLCmg9+0GCgVxSSDNJP/XvvqZALbNms/l9n9GqbpEw7m2dmSHx4OSkQvo9UySJRnz8LBu2i1Njn/pIxGshFEZYhdmXgdpU6Y+9iaR7cN6335WYIJfLOgUFUVey5k5pBBf6XFpRcUYb9R0Jp10byENJbz0OnoRFnG2IBEM2ew3JRi0EeP1AaemkbLLcJwlUy1o3U9o91xGQ5VgpuUVCUIoTPWKyorgWGYWJpAKLnUIgV0HU9VenUzJxppUjhgHOSWAr2yMayMcR3OdqU3z1jgYmaqSZ5mJElGVcpLJ8tHaAY0J9o46iAbrcXkmc7krCTil3ZEtDIkXCkIZuRBM3Nri+b0Qfpr2+xcUsHEMKMxcYAtBcmLkiGihKxj0lQH6Vv23ckrayMuXj2JrWAkcVTRcHVyFXB2J2dIs4J8GJGoYGeqo7OzA37Lq5XOdBr4Xodr12X3ZWpyBiFSTMPDUsIwjishk1eurmIp/y/b0aQynZLWtkyfskoYRWvYKtE/cvh2zp07R6shA9rRKCSJCyYmpuuO16c/9RfsXZwkyWF1Wa6pZuBy+NAx5vcuAfDlr32OhfkOG9sxppDJuG3nWJZNI5igtyPXbFVVZEVJNJTf9x3veZjryxu8cvEat94i5b5/4n2/wNmLFzD9HT7xiY/JdxcakxOCbdVt/vGf+FWW1y/xzHe+gBbLPRt0S+6+83Wsbmxy+bIUA+htD3FcjVglH5PtCZIo5847j3Pu/Bn1nQc02y1Go4jyhgtMXowq0NAqdDxsw6XZVsT/IkPXMvLi/2PvTYM0u+7zvt+95+7v3uvM9EzPCmCwg+ACgAB3ihRFkdpoW7GiJXGpUk6l9MGy7ERVkmlJlbIlJyU7UpSSI1lLpJAhJVmkuAkgwR0bCQwIDIBZe7bu6f3d737PzYdz7u0el/zNSfHDnE+Dxn3vctb/8vyfx6i1hEajAaW0SbXjJg1FnjF3ULDymjaOHYeg4TGZjLj/fuXIXL58GVsY9QEzmQ7IUxshjH3QD4FkhMybeJ66zm36bG1t4XqadcxUGk+yUEY4qCiu4xaYApJYrZE8LxGmi6Fh0IPBANuxOLB4kLW19fp573rne+iPdzj7moL8nTx6ivX1K8wvqszVA/fez9b2RZ588lu8+QnN7HZoBpG3+dAH3sov/+q/BKDMIgzT5qH3KCfi9Vdv8n/8q7/hzJlv8y8+/r8AsHTSob/u09PyEKN+zLue+Air65fY0kGnorBJi02G21atv1dKQ0WQdaa3NSNIIh8zziiNar+xyGXJsWPLXLmkAlG97l1sT59DaDIJ11GQFQpqoogDB3vYnsvWZh9PsygePniMNIrZ2VVz2hQSpE8YGXUmeeHwDBs3b2AZHotzCu5qW5KrV69SyorgJoPSwXPmcNyonotzswdJY5vBRN0/8LuEYVhHyaXMcRyLIk9J9Nnnuq4qzPcOkWn6+fF4i7mZJuOROpyyLKEowRI2wtozFCkV3byzj+Agy/PakZIS4mTKzMwMUx2ImptbYGdnhzSLOHZMnaMYkmE/JkrUvpiEJa3mHO/+wDGefvJlABreMoPxFWRm7s1r2ySVBZbe9w8dahNNJ0xzmwfvVUQ4K1fOMRitU5gJptbtm+nZBJ7P2mX1vb7TwXdNdrcmNfphPOlz7MSdpNmYwbbW7et6xNEUO9fU9iQkWYZn+tiVDInlUhQ5joZYN7wGhUyxTINQw7WbjR7TMEbmJYbO9gqrJE2V8wkqAFoWJmmekaXqmsrx8jyvzhxXhAV1sEOz7Um5xyhYSgPH8xFC0Gwqx2l9fRPbcrGtSgMtU9IbstxjJ5QqO2dZtxIg7DfgVaZeYgtxC9GAlIq0oWKJTNOUPMn2kWEosoVm0Ljlb4qBdu+6RktlQ6sxn06VLIEBNVEURkGz0SIKQ6pgapoXGKW3B60TEt9vMBnHNUlZo2EgC4Hr+oxHO/p3OaUwiTVSBkqEYYKUmDXlvKTTbNGd6bK5uVmPTZZlTCM1z5vNpqKQT1MCt4Idm0ThlF6vU+/XW1s7+F6TvNCEIaZQAbvx+JbMnOM4GCa3OLkyl7VWYxXYKCn2CFB0Nq+CJVYO8H62SeWASxA5stS0/FmGRVnfx/F8/KaPLCPGuRqHO+8yOLzcYjCQNBw1p65cGxKmFrnQ1OzWcfp5Trdjc/yQckzXt0esb73BiWN3E441GYeIuPz6KuPdCo4qyIopRU6NVEmzMVmW4btB7YwryN/+DF+B0Ppiru/V32fbiqRsOlV7ZbPRZLQ2+c86V7cJLW632+12u91ut9vtdrvdbrfb7Xa73f4LtO+LzJXXNsqlh3W2wMyZDHx+5h+fIkpVxGvxuM9wGtLwSuJEc9rbA9Jpg4b8MAA7r7R44h33M3G6/MZv/lMAovIy4a6NKCV3PqiiDDMnPDKzz5Uv6UzL1KLZyXnjVfB0Cv7g8Qgsyc6qRXdWefiDCaShh3S1+KkB2cSgtyTwdF0WRotOI8AwDK6/qigj77mvzWZfMh6oiNB7f3aBb/67DQxPYPoqEpDEMd2FEZNRC2Os/N3BRknUGHH0kKY9FzGPf2iBRz9wByMtuPr5T1wj2UlYCBa5qfWptrZHJNJkmqj3nsZVFLnch5N2SUSCm6q+bB3M8Xwbb8Ojea/WCNixmG6XjC/fwKoKHhYanH74BMeWVfR3tC3J45uceeY88Za6d257ODQpsgl2W/1usDmh1bCYndXRtJ6Jf9Dn6jMpIlfQoWFe0Gm26S6GtZp2aMFy4wgrmr6159sk6S7luMBrKIrTsLuImPQZFkOiQYVXV5Hb/lDrQm2DbVtK26ISETYBaVGW7BPetMizfE8zyDQJw0jpbskKZ6sF5RDYGgIgS4M8lywsah2vRkmZQjgtSHSmzCxUQSdGSSNQmZX5hQ6jyRqDvo7elQ3S3Rw7L9GJB2bmfGbuiBibGdNIzWGfFs70IOu7Ktq1MOPQ3w05eMdBMl039NgDB3j+5VUuXpnw+NveD8DGdp+VS9/iQFNFslvzOds3xuxOcu56VI374EpB48iYXrfL9TPqJbxek3wYsr6uOq/T9ZmGI8JJgSV83Z8S1zOUKrzWhsLISLMJy8sKYx5HksFwwnQ6xdKRa2GW+I0Aw1R9OR6ltDs+4/GQf/gPfg6AJ//2m7S68xT5gPvuU1Dhz3z2SyzOz5FroXHLSYhjlc0J9Lzrrw9w2w5JnpFOdI1l12NnbcRjTzym+qDt8dWvfZler0UUqneIpx5u4NOdD7EKfa9dFY2Weh7EkQRy/MDB1FCMPNN7qTBpVFS/unC7KgU2TRMhTQyjRJZ6Xs+rKO9o1yKMdV2LEMicWqC0MAskJSUJWk2A2U6T+x4MePnlEcvLWhNsCrvbI0xNh52lJmmeU+QmmArGUqQGp++6nzhN8RrqugsXzuNYDo2Gjk6OQvJiimHmdXQ08DsYpk1cpHRm1XW7GxtIyV7GLS8B45YosW3bOJaNbSV1LYPMBEls1Lo3URwiAVdYlGUFjXRpt2a5fuMas/Mq8njkyBE2NtcoUtW/SwcXOHniMGdffxVH1+2tb24R+E06rS6b1xScsDAk4STmTW9WtTdF6XH81A/w6U/+Ia5U4zC3tMD7P/ABPvUpBT3vdg9z+cJ5ZhZbVJ3e7ro6cpxSBaWTOEUIQawLpBfm59nZ6SOlg6HhVrYpMEwVzZ2fV9DSyWRCXsp9op4BWDnCyCkyTQqTTBFWQVF4zPe0llAvZ/NmmwOLag/0XcFkFJOXgs686oONtZvEcUKr0azHr9frsdvfIgnVPIiTEMtUUK2KIt+1PU6cOsWliyt1HeTWznatawQKujSdjgmCgKTKUk0igqaDKff0cHKdOagyUP3+nmh21apoelmWtUBwJTBbRbxNy8Mg1fo/FVxLKiIiYXLkiNINPP/6ORpBj0LDBB03I08d7n/TMpcuqL0yLydkmSSJ4IM/qn732vfWOX6HyaXX1Pdubm7iuW3ufNMc21uqr3Y3SxYWAkajQa0T6HqKZGJGCz7H4ZgoTPG9NsOROnuWDi8QNFtsbQ7Z1RTRVm6qd7SreqcCUWYYhqWQGkCn2VQ6bTr8nSUOCwsLDAZrCK2REUc52Kou05TVPpxgWwJXQ4d3+7s4roNRSjwtdl6aBWES4nvtmib/5vpNup0ZhF6Rk0mIEDZlWeJ7mja/zOtMyJ5mVoFlWXXNVTW+VeYdlH6ksHIkRZ2pFsJGllk9V0pp6uxBtgeFLkuUvtNe9tyyBbIw9oSxkwTbtmv6bVB6R1mWYWIhK9hh06eQsiYxSNO0pjnfL+DbaLRUFn0fDHG/PtZkEtaCyxVc0vddhDBI9pEk2JaHsH3SRI+5aVBKj6xIwcr0vS18z8G17BpiaFBSFBlWpa+a55iWg8k+aCQGWVaomitrn4SHUdZZsQqGuZ+YIggCtXfFYf09tm0Tx/Fe1q9Uot6msUdWoXTL7BquKYTA8zxdh6XPw1hpku3PZuV5fgtRied5SvzbtAlaah6ExS5uA3qzPRL9nhvbE9q9WX7wfaoc5aVnr/HCCy9w7GSba1dUf7ZmXaI04cTxN/HI244B8JWnnma22+Psywo1MjvXZDqd0p3vsLmh7Mx0ahDHU2xLIPVeUmQK4VLtLYUmzbOEU0shpLGB4yqYYDWnHFcwuZl8f8MC3ZZR3vGE2gyiKObgEZu3v/sQXS3SNs2nOH6X5uwE21YQqjA26LgeZ/5avf9X/p9V/sXHf4Gvfe1JPvv0lwBYPj3D9niXJPUpdNFboz2htWSwcUFDl0RG4FtYjkmSqc6dTBOErdKzhjambBx2Iwn6IAw8ixybVlPSnFUTMzHVwhhGBsmuuteDd8Hf/+F7+ff/RkGAPvorHme/1eL5P7rCrIYq2vMWhWEwXBVMVnWxap7w4z+/QKLH59zrNzh18i5WLiaYvlqINy6nlLHJoUPwlveqQv+/+vS3SSSYFcxV+ITTGNsqa1ztaDNGBEYtNBo70MFkegHmT2rNlaBFMd1EhjY9XTidmBm2t8ghrf0zLWIiN+ShZcmZb6rD68qLIXPdHhv9Hf7eT/0YAC+98DrXzp9HIxw4fHqOKQM2zxT02jpNLw1aLWg350kddQjLRo7cNGgeUeQOV8/exG/D/JyBdFR6eGo6NGXAZLpDy1RwibgYYkgfr6U2urUtSV6AaVp1OtyxfQoZKSdLwx6zPK1xzQDTqcS2TCXCrAvUDLPAMNRGVim0C8vGss3a2ZpfEJhIkolHVGg178zGFAayTNEoRGxbMDMT1AdTMTLJRi6DbIqjU/CtckxpuMydmsEUyoHeHQuuvxEwnarnLS4vsHTkOEErxjfV+B22mqxmr7N6sctBnRJ/6stfxrAlp09rXbYjBi99PWXumMPxk8q5unimJGhGHDpsMNpSh2OWqiLQ0UBvKo7DZDrAdS3iWPVxHKd0uy06PYfhSDl4yBbCaDKZKOhnWZZ0urOUZclorOATZW6TyZHSNEMVwTabTTyvyWNvfwiAV145S29mkTfOvcwf/5FiOnz269/mS1/8PGsbaq7MHehy6cpVEClHjmgyiWtDdocxs4sLDEcKXlvaOYdmjvODP/gjAPzRn/wenm8hy6jG3icxuJ7CaZv6vYSdMommda1Iu+MTTjPy3KkPpnQaI4QgyTP2syjtP3CKogBZYBkBnq+u6c5ClkrCsUeaaBZMLyKKLWwtBo4ZkSYGZeHh+moOu82YLDWRkYXUYsO+H2BbBo2G6oP1mztYnkGWOEwyNQ4NU+AIC4RDf6qMQL9l4AqTQjNnJYWB6wqKMsPWLG62cIkjiesESG2EGXZOlhXIco+cI4p0jdZ+XZ3SoOE7tNoa259mjEcxgdYkyrKCvEiIJglV/Xy76xJnygjcY64qObB4FEM7pjuDPv3hgKWjx0CLwlqm4ObVq/i2RRirvbJ0chzbp6nZtGY7C7x2fo3FIz3+5jOKgeqzn/9zXnn5IudXvgbA9k2XOBmTF2BpQ7jd6mAYMXGcEU3ViwZBmzAMa2MrzyRFcavhVqQZslRQqCrYMIkmjMfj2oA2hUuSZ9imqMWPi6LAQAACSztqfqtJIW26aoiZbApKcix/iGGpwNe1K9cJAg/btGuGtoqdsnK2ptMpWTYG9oIBeSaZTEJmZmZr1r+nnvwKM7OVyLA2pISCHRVagL3bbRNnKddX1lg8sFBfN51OaxhakiS4rkue79XaWZaldXyc2jAcj8eKgc6q6o8DTFPDdyq4myEU6YoJeVGJ3DfJixDb0Hpc0RiZWdx5z0FuXNfw+myIYZhQdPjIj75DjfPgPNubU977DkVe8+//4LdZPtVmHAfcdbdyxl9/7TxF6RCHgu6sGofegT6bNxPkhjoPESPyvGA0SOr132w2GQ6n2LYB+t3zZIoB5JWuV17S8jpMwgFBW+3fqlarVAE54K677mA6jVhbW8PRLI5hGFIUBrZt4GrIWKPpsba2Vu9JfqPJeBRhGYrEBnSAx7FJkmQfk5wKahh1LY7W8nJ9ut1uPV/CMGRp6SA3Kh09w0AI4xayiqqmq2pZkmOYOaYJwtJQyFxSllkNS6ygobZj1Xul6wSkaY7nOXuEGYbYB/9TzxLCQGqYoeoXJcLt6No0UEQRmHu2ruM4jMfjusap+tYsy25xHis47J7G1F7tUq+nSj2CwCMMJ6o2UNeZJcMpUznGNj3dnyZxNsUQFlKzGDeRhFXwdZ+GlbkPtieEjcTAFma9PpT4skdeSqTeLC1LBSTiOK3/u1o/1fdZlmILrWrSqn6ogj4Ajq0gnmmyp7VVQeaqcfF9H9u26ff7t5Bl5LKg0biV8bZibayuWVpaYjSa1DW63dkuCMl4PKzJeIRrkeY5O1oX1bUMsiTEZEQ4UPOyN9PiyLEWTe8EZUPbJcJifW2Tc6++UY/L7HxA0PS4unpDT0YTYXpQeKQaJlsUCRglRV6dV4q0w/Otek6lSY4sU5rNOUyr0kotmKxNv79FhA1DUQgDuJbFaCfjpa9Lfvin1KbliDEJCa8867G+pgoSrz0/QU4C2svqEzqdHm9/6C4WT7b43vkvA5BbI7ozJpvn5siGqnMXTzlcupHg6xqBKITtbcndb5JkUx212SnJQ4nvCtA01sOKHlhP2MjMsfIcswgYpup3bS9gmIxwioRGSxsb7SbHH53jN7+i6gFefP4KL317jHOkQ95RE2ySpIiJTboT0+2pe/3Tf/M23vKeO/n1X/oiAFevO8jmOeaWG6yeVe9w8rDH7nCDD//Um/nK1y6p5zUFgdVkrHGhRZESNGziOCXRkcH3/dD9nDlzgx3NNufMCzyr4J4PC5Kx9oD8hLMveyTRlI7G8fteA1mUxDpyZ8mQ2abJM98IOXVaRQGbcszlc6uQe3z908pIyUyTzmFqkTi7MSJfh8CTWIZ2bOKE7sGjrK2u09HshJYhsWzJjdeUsR5vpxw52CAaS5JAfZ8ZeJS+JM5Tmo4WlBYZflow3NLOeVJiCROMHFlTlwq8hiCJCgr9R9+HLDFqYbhuzyJLJVEo62ihLAuyvKTT6pHZupjbzMhimyxVG/IkyGh3DIRTICJNctG1GGtnSJPSUBY2o0FBuzLqZ1P8JZfJNYPCUEbhyPLIRi473x3wpreqws3jh6dYhcWF7+l6wCxjc2OLze9dVSxFwCf/1z/n3Oomv/Pdf8d3z6viamFYBF2DQmcGr64mNGY8JoOQ57+k+7PlMTvfZPVqn0QqhyuQY7zGDFGs1l5eWEiZI6VJp6uusaYZ/eGQoN3jwJJy5mTWZv3GBEOTtDiORRJHKqtR7rFiWqWNY6s5JUqbpfnjtLqtGmc/CYdMRpJcwk/95M8D8M63vIUbl64gdbHsPfc/zrnzZwkaDm+cUYbUT/zIhzl0tM0f/MGfcWBOWaKFHSELyZee/Gs1Lg2f4W5G0DJYPKQMzIvnNrFtlziacvKUmtfb25vYVlYboUmcYqDoWw8eUDUfrmVw8fJlHHPPuCiKAseySHWaw3GUiGSWxaAPy/XVkk6nTdDKyAu13yRRQW7EJFO14Td9D8uI6c3Z3Lipgghex2f55CxbK1MCLao9PzvH9dXzDIbawTUykqQgjiZ0tOHmd2wMBMnIpOurIIVRRGRprg4eIGgWTCcRy4eP1Qf91sYmhlUySfu4nq3HS4lb7gnHJvVh6mlWzDxVEerxNK2jg822TavtE+p9Ki+g1W5w370PEmka0zfOXSVLPIwyrymG06SgKCc89FaVeVy9cY1zr73Gxup1Tp9WASaAGzKm013gQ+9VdXsrF1d44ZkztHUGbDtZp3VAZZiP9xTJxda6yV9++ht8+q9+B4D/8Ke/xZOfGzI7s0iYqf5cvb4LZDQbbSb6TEiTKUWhmP5AEQUFjQBKUTtcspQ0fB/Lsri5oQQ0pZR4gV8zkdliyvLxBXY3+0xH2tgxfeIkQrgSVxtuORPCtOAHdEb6bz/zLFmSY8uCjS0lUHz40BKDwYAwi2rnppA5CwtzNYmJ59tEcYllGUqcFS0mKyRhOOa5554F1LhkWVaPq++rWo5c00gDpDmEYUy316oN4fF4VNdiVa3KFiSJ6peKuS6KItyK6t219TU6NWgUFIWJ6/q1hIrt+iRxgu3s1XRImdNoNBjsqG+5/8ETvHF2jZ3tEbmGAgihjHfDCfna00q4fDQMsW3B75//ffV9TZvxEAxSvvQpJaqNMFhYsihywc6algrZtRCmR67F4wuZUOQgTB/bVu80mQ7xfMU+ZqFJJnJBnnlYQTUOPqZZcHR5metritVsfr5LFOb4fpXBhLXVTVrtBhNNFGM5BY5sMhwNOKod9jc//Bh/8qd/hKNrIpuNLsIpmPYLhKXHz2uSZQVBYFLmleErmIaDOrhYCdemac729nY9Vp7nsLu7W4+xyjoaNSGRYvwrtQOoyQBkjmnatzDeVXOhItKwLRWkKstcCSWjWCUNqjWknY/CRHiizqrmecru7i6maeLrjGhFER6G4V59WeCTpmn93yDrDGkl4l2t1f2OYbOpCCcq5ydJFAW57/u1szEYDGi1GnS7XfoDLcuRp+QiqJks0yJHWD5GIUk1jmGAjZWHgIGpa5crZ8+0KkISgzKT5EVWB9DUeJW1IwzUTmElXyJ07dr+Pq+cqv0skRWzYvV9YRYqCvUi3efkures/4rwQtUhVYgzE8cS7G+GUWqB5D1nuCLnKHS9YTiVZHGKic9gVROEuCbCNXFS1ZdRqBibm90mUteifejDH+PkfR2++PkXWbms6nF7nRkunLuCG1QSEoLxULK5uU2n19bPy3GsLkmcYWhSJGEm2I4iNANFEFRiEEZ7QRLTtMhCmBDi0W9s/AAAIABJREFUaimdvNiby39Xu11zdbvdbrfb7Xa73W632+12u91ut9vt9l+gfV/AAr22UZ56rKIcLvFcgYFDVwv/fegfHmK7P8GkzR/9798D4L/+b+f4wu9bvPJNxSjy4MMn+Ow3/prf/J1/y8ULCjq0NTbpX7S48p2CoydUFHXprRPeuJLSKHR62ooI7AaWXXD9utYbkmDaCl/puNpzzSWTFBxd7+DPF8zNtMm2BY3DCnM9mYwJ2lBkOYZUsKdwtwsJPPIxdc2sXOQTv/cS7UUJsypCIkIP+gZSFCwqhlPc5gxnr41pt1R2xLFLjt7pMHcoZ2dNXfTgyY/xmU98hjK/jjOrmZXKiDR1WLuhhUYdMOyCPIMsUb70R3/0fi7eXGf1OyoysNsveeCBRR59zyJjUzNZeYKvfDFmY1NiFArnGjRdYsti0VO03W0LZk4eYelEh/OvKqriC9+4wmRnQpoblDpaWBrw/h8+xOvXVfbQMlxunrdwyxiZq6hGRITnNkinJgdPaRHIXkK86XDjqs7+WCmnHkoZR5B1VX+Whapxi20DT7PUjdMtGtOI1Ze1NsaRjCQs8RsgdKbMIKPRAkGL7ZsVjbWJsF1iHR31mwZ5BpNRiSH3GHAMUyJEgNdQ0bq8SPFcj0IjI+IoZnEhwG+EhBOdwfQEaQ5FAoaOahZpm/ngFCJXczj2hjRMmyS12NJZt1arRXe2y42rV7ByNaYn3qwo3fs6gholHjc31glmHPJC9d2bjj7AS8+8yGZ/g9mOuld7wcdvG0yG6nslbXxLkscG8USN1cRNsROP3qzHzW0tA+CWZEmP8URhmS3LVvPKEEynFabdppAhrt2to+C21SYKEwLNsGkLk2azw/b2FqbQGhrCxyxh4aDqk5s3xliixdzcMidPq3m2evMGb7z2PZqdLi1bZR7MYgXLt1nRNNqBN89Mo894N6R5SMFIn3jHRxiEfb78hb+kUarI1eKR+zGw2R2rfSTLp3z0I/+AxcV5fu3jvwXA/PxBhDPm/e/7MZ7+ymcB2NgYYhhGnU0bDmK8wCXPYw7ozNWb3vIAzz33HMPhkCypIuWirgsAkHmBIZR4Z01jmwtabYfunCSJtdh5PyHL9jJgvmPT7lpglrTaCqbjNWf4r37yF/nqk5/gs5/5GwCWjxzD9kpGY5VpKXKLLDexbJNMC43+yM/+COs7Gzz/1At0hIoA96c75GaB76so7sLsHFevXmdmpsudd6vMzuUrK4opqbTqKN900ldZhqyqI5B4nofnuXUkOI0TLMchnmY1m1237RI0XDJNkZtnShPsbW9/giRW3/zKqy9x8tQyN67tEEZqH2y32/R3xxSWhsMIgWc7WOUe05kpDAyjwA8cTpx4AoCd3Ztsbb6Oo5W/A79JXAyIswDLUPvpnfd3CDyHTMMnVy5cYWN9l/mDXdZuqn3xzrvvYu3qdUzTrLMvaXprTUIFhwGluwIqwm/bNlIW9bjbrkMcx3UdmjBdTt93inAy5PqVm3puFNiOIE8Fjmau6s0HTIaSKBrqORXh2j5JmtPpqQzt/Xef5sbaKleuXKmjr51OhwceeIgzLynmPMMQyDIhDCf1O3luJTps1vDldrtNnmVsbas+aLVaxHGC7TiEWhvBEAI/cPFsqxZENQyh4UR7kev/dD34vq/gbVm+j41QUpSyhjGWZUkcFbi2U8NypZTYDgQNp2bvKsocUwZgqrX33/0PH+ZP//Ap+jsJtq8ZYTNJt3mYML2BqbO08wtd+v0RcaZZwNodHn3sTQRGynV9Zo1HPteurpLJiIVFVbM6nIzJUpMim+j3FJhGVVtXsbYpGYgkLrC0/l6n06bZcrmyovbJQwd77GxvEA5t3v3eNwOwsRFx48Y1Gj0tYr6d0el0KM0xm+t7elWWKPC9NuNRXPdxqxfWdsvGusq+jAdDlg4f0PPMUfBCV5AmFTzTQRYppq7xdF0XIexbxipJEi1PkuJqpjzDMMjztGZ/FEKQZ6oOq86YZhJhGZRlTo0UNlTWaE9LyVb/zyjqLLglmvi+y3C0tScwnArmF1r1GbOftr9af5WmVlmojFV1//F4XO+ns7Oztb5Rptdxpa+4PyNTliWu69Z1gsPxiCxTY9rt7jEYRpFiH5S6XrTd7bF+7QZlXnEEuNiNgp0wIbA0dDCKcXyTopA1Y6FpK1SIaRr6+1SGyraEgrOChneadV0VUMP2qnVcFKp8oRJ+rsZqP0QPNKOfZdUQwjzPSZKcRuDQ6Sj7Ioxi4jhmf55G6T7tZfiklHhugOvtyd1UtVymsUdxLoTAsjxcnZGVUpLGEWJfXZuysUzsthpj8hne8Z438+KLL3J1VZ1rJ07ewXC6ztbWlEPHFWLp2PJxrq1cZ3tdIQNEqcfeMXG1bEYSFySRIMsydMJSiQMLiLUtE09dDDPFLB1KQyNO7ADHNYjilHa7qTtBsnNl+P1dc+W3zPLEI2ryGiLFFApKkyRqgn/wR++ifWCHtX7E3/6flXaKQ7LrYGnhwdLrcPCOu3HKPvY9CnPZX3P5zl9IPMvFaauJcPrRgusbMURqIzfcgrl5GO4UJJosYzTNsRyhNGd0itoRBqVTYmjxYek2lPq0nXLkAWVcZekC08kAezahdNThOLrU5PDBkFPvVBDHr/7f13GMNqEE6Wlq7XXoZC0iCf68MuyjqYXRTgjUvCEvLA7fCb0FUcNRtq8JitBlZyOlOgmFBVlWUmQVPtfA9lUqOIrV8jh2pInVbtGt0qfjlPmF47zvXR/kwq6uUZArXHv9Vb79uS0O3Knrm9xVGmmTC8+pa574oceYWAlmJFnsqO/zvYInP/VFylwwMNX32QhahsnMUU3Jbdrs3NgksAzGU/WepusziSaYhU27gvfbHkk8INRix8ePdTh1X861lYypp+4lzCZ5NKa5dLgmCCgpCK/dZLqrISSLBq7RAKNkqutB2j1FTZ+FDuORVqAvXPCmNXxCSrBsh93ttO5PpIGwSvLSxNQCwZYQOI6JoRdiYDegyJmdDzCEet7GVobrZxSpVZMNuN4sbnyc3Wuv6Dkcsrg0Q+n2kaWaU3nucPyOO3GCBV549QvqPaMdDi4UuD0t7vyGy/oqHHtgkZ1VbayOd3nwRMKlaxPySJMbSJuTx5scPaLG/dU3BiTjWTbG25y4SxsymFw+s40fQOkoIz4ITHZu9hFFoOdYTpqm9LqL7GoCjd6MUrvf2NjA1LUvSawOtZlZBaWbm1vg4PxJnn/+u3R76nm72zvYzoBmUzvUYpZmN2Rno8uRE6oP4jTklTMv0WhalFO1T/zYR9/F2cuXuKB1hLLhkDefcFjbSjh2t9I7Wu/f5Mrm6wRGB5Gqb37osQdZWVmpCTTuvPNO/vWv/wd+/Td+lT/8498F4B/9/E+zvXuVrzz1Aj/w3g8C8K53v51f+7Vfq+lbszxClgLLbJAXaoy9hqcojcs9DH2e51hC1AcHQClMkAmUumg5y7AdWDzQxddrcnNjyHRX4jbV/nxkeY4ss1m5epVHH1WFvvfc/S7+t9/+M/zGLo6j+iUMQ5pNr6bbd6wWaW4hrIJIQ1Rn59pkacj2xiaOpuC1HEGch3zwB9+j5tTlFd7y8Nv41Cf+ivvvU3TU7Zk2Tz/9ZTrdNmPtoGOpb8t0ZME0TTzPwzQN4lALb0pJIQ1MQS3c6Dk2vZkGpSaTiKMcSpfBZEyeqWsazYA77jrCK2eu1lAMx/awrQCpa9OkkVCIDCnTPUMNS9F6y5KpNjpN4RF0BJGGZ87NNkiTkGRq1vIaTrBAlmVMJupwnul0abRBmPP0dzU1s4yZDHY4fvw4q6uqBrLf72Pbe3UZe8X88haHSxmQ+/5GieM4JHpPKlGF8iUJ7ZZaa44wSeOMonBJtRHf7jQQtoupabtNKyVJUk6fPs3lFQULnGsvcO3aNQVV0lToChbl0e0oB2wwGtLtKMO7cgayNK4hRZaGJinIVaMmpJASOr3uLQZtd1bRSZdFUo+f0jKyaoO6ot+uRFFhr+jdFlZtrFmWRVHu77scAwdkSVFUEG4b4Y5oNj203BWygP5Oyr33HwPg/jf3+PIXX2U0kDhaV6eUJoHvMNNd5vF3nQbgq199jvF4xHCoaewPtjl58iS7azkI1ee25TMa79Ifr9/yPWkaU2otQ4wEwyyxbYtSG8dxVCBMG1Pk9BZVIOPUyXvo7xQM+ipYFYYDNq8X/LN/9o+Z0YV0v/zLv8HRY0s0e2qd7WwnZHnC/GLAyiW1dx1eOsbuYJXxeKBEewELH9fPqBQcdrfA8SWmaeF7TT1fdzCFQZoU3H23kjS4fuMKcZjUzpWClClphqq2qN/v18Z41YQwyPK0HquqpsfA3FsPmYlhFNiOguYDmjDBqudKURRaT8ki08RArWYPy4bJtF8HagwcTHPPKXMcp6aR3+8QAbhuo/637/uUZVFLHARBUEMHhXZaKuO/mv/VvfbXkFmOImhRTsge1Bv2dLcAoknEEx/+CO/+8Y+oOfbJv8AxC7aMkO9+/TkA3vfY+3j261+tbRJQ4urKEdX2nKEItGRZYOv1WMGIy3JPx6uC39bacPuc4urdKoKL6rfV+5rmXl1WmmY0WwG2bdd9bmlB8dr50VDC/d9bOXEK2qnrlPbVTaqxU0QYSZLV31eSU5YFaZxx+LCC4PcHI/LCILbUwj5x9GG8oGCnf423P6HqsFcuX6PR9FhfDVnXZCazvRmSaFA7V44tKDKHvIg4elwToIUR21uxqqPTR/LOdsjiYrfurxtXt7EtkyI3kFpwx7V9hFXc4lRajsl4Lf0+r7kySyxHb+65RKYOphTEai/nt//H1/jYzx3iv/nVR1l95nkAvv2nW3R9A19BjenvhLz8hedZvtPgjpM6QnNozD1vh1e+meNoTOuo3yALS1paQX2U5Iz7DYSZY1k62lQCUhEWePpQDxEwNXH1fSZMaTkmUVZy9rw60AIv5PB8ADLHksfUvZorhInF019QRqjrtIlCSWGCEykHwbLGFKWD4U5IdcRp5mhCo+ERaxqu8XpCkFks9RzMo9qobkuuvDbFn5jIWBtzRYEjINYTYPFAl/EoJpUZrUB9y2g65rA1x9q2zuadENzzjrt54/olVlfVZr58aoF73nQ3g/MpF67r4tWgwJoO8UM14Z7+iy/x9z72IW6WF/jbT3wdgLnlozitBoOb63S1wZynktjKmO7ogvqmj2VCktskqWYUM2Nc16fIYlxHL848JRp7WNrZmcQhr51x6fWaCK0xM+73ySOHmeN5LcoqRZ+dHYO2UAvKLQYMtsdg2BzUfVeaMTtbBXGU1gxGBmNsg5rJrsghTco6ggtK58IUkiIpcTSjTylT0tTG1Jt0YkwwSrj4Rs4Db1FjPLsgKcmRSU6U6hs6mwhrwvyC8iZLDjK30CFPZpik6uA9ePI0Z1+9QLR5g9ZBdTiG4ZB42MbqaQx9J+GIfYTXz65QDnR9HAXNB5r4nmDlijIQZns+vjCIdlWfJ2FJUvYpS7hwRY1pQ8zQ6MQUec6JY8rAW1sdYUUtzEALt5oGC/OHyGVRR0Mdu8XuzpgTx+9gXYskl3LKqZN38tM//bMAfOrTf8YLLz5Fd7bD9pYWc7RtTMtnOlX3jsqUtJzSbC9z5PBJAF586RkKaSILURfn7wxzrq7u1iKKp062mVtIefWGYC65oObG9kVOLZ7k7Our/MIv/Pfqec2A577zHI6uodvYWuHNj5xmMBhx5JgybO49/Th//R/7mLbkiXcpR+3Rxx/lkce/xuc/+zkAgq6LUUBBWjvLcRxjGMog2R9R9Dxv34FYYhQOhUxrnRSBgTBdrl8ZM39AGWCWFRA0J5y4Q2VVDGGxcu4Kza7LCy+qWpGnvvgCnTlJ0JjF0No7rZ7Fww8/zKWLKuJ+9epVHNdEypLSUsGG6W6kisdbLn6gHN+e32ZntMtb3/sBAJbvG3LulVcIixy7rSPJvsXi4YOYmHiu+t3OQBkrnmYUTbKUOI6VM6H7wDRNDFMgy6xm64zTjEF/xPyiWh+gsleOC4ePqDmVpRZnXjqH53QwdTaiKCQYk1qYNitSTNvEdV1sqmgo7O5O8X0Pq6NFvMucaBRT2TGRn+PYFhgWqS4wH22tYZRN3vKE+raXnhly5XLBzOIE01AG5tb2TWa7C7zxxvk64h00lbNQZVUs28SyBFl6aw1CVeuwv9A8TzNsW7OxGhlC+OS5yuwBjIcT8hxMe4JvqvVvIjDKgvtOK6e3Pxpw9co6M50TrOoxvnTxGo12k53BsDYCTVPtbYPxtn5PiyhSNVnVntcIAmVwCqM2kooiY3d3l6UlRe4wHI6ZTqe02+06intzbVVlEGROFeI2TZcwjOsMRp7nBEFwC6FFJU76nxbMx3FILiuSBMUi5wd7hC9JknBsuYvtSEZ9LcJeuHgBtWDo89+6hut0kGzX4riO4yALwWC4zac+oYivojDlyPIJklwx/E5GFt/65rMYhiDNNCGRUeA7Aabh1ve3bUGaChxR6TllCDMgjFNKU613N3CJoohjRzvMz6n9bGsLppMpk5GuWzLm+PNP/yoffP8P8lM/+XMAdHrKKNxa14ytnodtu0yGBS2tdzadruG1IuzAYawZZ6NwimG62Hp/O3nyMKtrK8RxWGeb3cCnLCVJmtJsqvF75JG38vRXvkngVQLwKVIWJEnC1tZW3ee2bdciq6BQDBV7XjXHaqdY2y7CVgEC17Nr8fEkSbCEqDPZZBlpkpLnJpRaB6pMieOcUkJZ8fqIAiH2GeyGUaMAqj221erg+z6eF9ROg6r9cWm3K3KOsc6wezVZle/7JElSkxCp71FOYpVVTdOUwW5f3d/ZYxnMi1wRP1ROmWXzrS8/x1e/qWr27PGQucVl/OOzZPrMunbxBlEekRUZQmd3DKMkTwpsp3JyFdGWsO2a/KOqFTOMsu7rsixv0WqCvf2GfTmn6reVc5MkGZ1Oq94jXNep+6Ai2kmzBCEEjq7nkmWBlHuBFXVfQwtYZ7c4XlIW9fOFMHA9G8+1mUyruky9TtyAaRhXg4xhlPRsFVy9fO5lilLy0Z/4CHfdoZyr3eG3mTuc8tK57xFN1Jx1DAOIkVUtlGWSxBLLdri2MqrfKY0lQkiEnmeWMBkNI0qpkgaWpRgB3UCA1gR0HOXgIh2O36WSDdvbu4w1V8Tf1b4/Mldtozz+qPq3GxjEwxazMzk3XlOHXBamdGcTfuhjj3Bt5yIA0SDne5/v42rxvNxxiQ2Pnc0+j79XGUne3SMOzczzl/+2z6xO5bdmD7IxusqMjmQduHvK6mWTtZWUUhMbmI4EsyQvBKZ2uHILXMejmGjoYGFguw2CLKfUpBdZG3q+y2CjYLKpFsfDp45z4FiDl19SpAKu26BoxGROm2SioziiJC8CpukGvRk1Hp22jxBRLWI2mUQcvdPi1H1NhtvaMFwfcO4Vg5IC7QOSyZw0MWsV6WZbsLuREI4lBw5puta5mGhFsqQV6TcGHvMnA+6/9yjjdRUt+PJXz3L0dMHS4TkufEl98/lnbtJe8JCGJh8pm2xMxzzxwSM0tNVy5lvrmM0G0yzCLNV7usIhysd0O1qZfDpi6YCNGRSsvKEWqG9nRIVyrJeOafrpOGX1ZkS7qTbExnzMlYsxc52A+bvVobC7vgnCone4yXhV9efiaYdLz6bkG2rhHjy1QKPhcuHidY7dreZULEcMxybC8ok0o1i7C7brMtEQuSI3yFIo5N6mZVvqAIli6j72A4O8SOssJ0AjcCmKveiFaEbIzKLlW5jakOmnIccXm+RX1DhcWr/C4fkjhGEMvtoMZk/YbG14DDf65EP1no0DHr3WnWyOFUxn7kBJfzvCsm0mO+qax+97B3ccn+UTn/k0mWb0s4TLzGzGw3erQMvzZ19i6chxblxKuK7FDzsdG1dEhKOM43dqWEfqMl5JkS01N3e3JJ4vKMuc8VDDeaVJo+lgmS0GQ2W8YRQEfoNSVnN4wu/87m+x29/in//Sr6jvm/MQpk+aqu9NYsnMTBthNfB05uz9H3iM3/vdP+Od77mPqytX9PMcNrd2MPRBe/hAk7YTcPbKVU7codP2YcRdd7+Tbzz3bQ4vKQp3zAUcq89ZzSjUmw04eGCJV155hUZTE4tEyklodxrYrhbLvLnL/HwPqdnRJtMEIUqKMkboomWkKq53hFWzmgkhmE6nNaxE0dcW2FZQEwu0u8pgmQwls3MqIHDs+BLnz52lEaiMcIEkinfJCZmdUw7YE4++l6889RyT6aiOAActCdLFd9UeGMUjkiRBWMYebEaYeI0mgzDkoYfUYXX06BEWjxzh2e+pLOrRkwdYv3KF737zGR59iyKP+M4LZzCFy2Q8ZKajnKLNjZvY9l5EFB2xD4KgzlLleU6SpVjmXgYvz3Mss6TRrGQPDCxTEKdpDRVutQNm5k0uvRGTodMTmAizS6oPwqYfEMcp0WiPde+OOw9z/O4jHDo2z5mvqe+ZmVti4aTP4hHVv5/75NPsXtvBsX2GU/XDd777LqQ02Zo8A8BHP/CLXFg5x6c++Td1FsdveCRRiuNYe8ZHmpMmkqoLgiBgMpngOHt0xhV0SQhROxau61IUBQuaPn04GTEeKbbDNz2soriT8Yid7ZhCZjWsO8tTet0mYaijzdkU3+vieRaNZsVOVlCWKppcQRO9oMloNKI68j3Po5ApZrlXzL8wN0+/32cyHREE6l5FkSsB8rZ6zzhKaLfbCiaWV1F3hY6QZb6PXluoLI+OktuOYt+qjDfQxrFQf5e1M8UtxAeYQkeac/KsgqOm/MzPfpgwWefyecVUe/7cTUzTrKG7G+sDDCGxbFEzgQnLxDItMEMsWw1YOC1odXxaDQUnXl0bgJHi2ia2pRl2RyM83yWOJzUZj227RPGkZo31/BZRGlE6OQeW1B6U5hF3nL6D8WTE8uzDAFy/NmZ17Sxo+OuJo6f5J7/0r7hy7VX+5a/9IgBN18eyjZqg4MjyQfr9XZIsqwkmirzEaRgEDQNfy8iY0mVnK2Yy0oyGvS7bO2uUZUFDQ0KEEEwmEwz2nJuG59Lv9/fuozNQVWaxGhMlbC7q8ZEyR1hmHSgqioIoSqDco0s3TZMkypVDogmIRqORfr7qu0bLJEklg90US6g9t5BTMEoMrBpa6rqKJVLo/XU0GmEY6lmRlmxpNBp0uzOYlkUS7UlbWJZVZ19d16XRaLCzs8PSQb3HFhn9fv8WEpYsUyRGFVV65Vg6jlPvecIy6ixY1SzhIMsEW2i2WcuhP9im3Wxz34NqHnz1608zt9RmsDNCoI140yJJ4vq8KA21Nt3G3n4qpaTMpdpDrQpyV9wSuFFwzVw7SXvZc5V52cs+mqZ5K8RPmFimofeqSrpHrdn993Ysu4b4QgXFTGvnu5oLruvXe0scpzQaAa7t1OyyWVZQFKXOlKm95ODSITZ3Nuvgapjs4je6vOPdb+Vb31bJlZXrFzh97yHW10eMhnqMDQPDLJGaYC7P1PvksqgddkOWlEaG5Yg6e5ckGUbp1YzFtm1TmhFS7gWBlpaOcPDQHK++co63vO0eAC5fusrll9Zuiwjfbrfb7Xa73W632+12u91ut9vtdrv9f9m+LzJXQccs73qHxnPKnJnZNqP1gqvfU55lyzZZPtzle69e44HHVGRl9n6P4Y0lzn1ZFaZ7HmSeINxqcOC0xk7fbXDtu5CMIJhTv+sPTbAdCp1nvv/xmENLJk9/Zoc01vUHQZ8ktnGaWZ3NyqcOs75k6ZiKpg8nCdOtkMFqTrulIl72XEn72AZd4fHCV5U3/fbHP8zf/0e/xG//yv8EwJVLr9I5NsXr2eSZitBkuyWtjo01O8aIVMF+LodsGVMONzREzZYYrk1hFHVk1yAhG7sgkypgjGnY5Jkg0XoHhiFxDIsyswjHWhxtxsakpKMJIBJ7Ccc02R5f5vhxBf04duJxvvvMGe54pGDGUcQCT/9f32Hnxoj2vHrvKJRYoSTJDQ7epyItW2dH3Hdvl0y2+O7LilbWaTmIsqDTVGGqcdxi6cAM/XCXWKeFRZIjvQzL8JikVWasxGtZJLrGbPm+LrvbFpP+Jo6GExUiISlyDncPc/05BaVzlh2SvkujHOvv69G2Y3Z2JamOji7d6RPlE7CsutC4NEySqaTUFNlRVGBbFkm2Jz7cCFziOCHP9rTEgsCjKCWFVO9doqJallPg6vmTliYG0GlAe15Hdl2LYhRga+2GbDRmENt0Wg4acUQ5U3L4lCTut4lSlfmbWch58aVLCC1wO5nGENv0Zm22Bwp//BPv+1GG1xJevPA52h0tEDwtOXg0RmQ6ChjYHOgdYjId8PyLGre8EBMYPv3tkkxrLCVZymKrRV5UVNMFk3FCw58jy1W0znE8Bv2Isixp6uj5eJQzPz+L46rvvXBundN3PMZwtM1gck6Nu2Eh7AhhqKiqaUV4bovxKMXSMKi5RZvt7V2Fx9e1PVFYYAsLCp0RyrfJpKBlm0wz1Xm/8s//CX/9uae5sXaN/rbKzJlmQLdtEukak/neCYbDPgsHPYZDFdWME0kj6DCZ9vfEFh1BmuxpmxVlriiIi5K80HBioaJ5eZqS51WhuIq6V9E7z/MwDVvpp2i65CSG3qzNiTtmWLmo3kGYC6zvXKgjycPBhIcePM3rb6zw0AMqk7S4NMNnPvMZLMOsa5Isp6DXPgoa6razu0ESFwgRYtmqP2M5xTFcXKPFYz/wLjVWXkIhDd7zsIIFrqyc4y/+7I95/xOPEE3UGv3aN57FcjyyLKOkogWuori61k8WNJtNFdmWezCWNE0oir2MRFEoEU9DZ9wPHpojl1MoTfq7Csa6dOgYj7z9Xv78T56k2dOQ2zgny/fkcU6cXqbZc7GEpNT1cPFEcv3aJo+87XEeeY+iZ//Os8+ys5vw9neqGrrrV8/y3Fe/SDRjLGPGAAAgAElEQVRKOXnXSf1OEW+8vsZb36pIMBbmD/PsC88zd3iHnU01xls3TaIwpNXxsTVEPAxDKC32jtISx1V6NnsF5ipanGVZTSM92+2xtra2l6URBr4fMByOOXZE4d2Xl5d55ZWXwSjINRRaJioqbgk1N07ffZKXXjyD44DvqHWUSSUe6jhOnVUwDAPbseoMUTVmCrqn1lEaJzQaDcaTvtLzA4QwiSLqWiMpJUVRkOdZDfMKPBfDLDEtQ+tyqbqO6TTC0bBHy7KI4lBn/XS9b5pqaKRNs6nPlSjSmSxN0+2qOWc7Oeh7D3YKfuZnf5xWL+XaioLAPvXUd5npLmIKtdZm5+cxDIOLFy9S6LooRdMdk8TQbKi+ajQdnQHRWRsrYhplWIaNITRNvuWp886J64h3njrIMmZZ6zBubcXc/8gM7fmMMy9p2LPXotkO2N0U9LTu0ubgBo7Z4F2PvB2Az/7Nlwhay9x17z20WurM+sJ/fJKZmXZdF2KLLkIYhPHmPuiXQLgqy1HVJWdpwXSSc/zYHWoMrJjLl67iCIegpZ6fJiW27eP5JsOBel6WFVi2xNU1mPtreKo5vF/DaE+HycCyRQ0zq2jThbkHjc7yELP0sERAq636eDweK+IGS/Xv0eOzDEcRKxcHtDRSJZdTpMyxzBaGUe3DBbbt7u03OrtWzTdQGcXuzAzT6bTOurUaDTY3N2spjeFwWBNVVBmh6XSKECrTs5/gwff9mlxld2enFpGta7s8G8uyGI1G9f3JCiYyYj5QdsrqdExpmCw1F1g+rerczr38Msv39Xj97AUcnbmycSjyPY0pz2+QZClO4O8J9oYRlmnWUEnY09+qrmm323WW7RahXyFuyUo1m0potybZ8VyyLMUoSw3pA9OyMcpyX5YswxaKsKSC5avsmCIXqfoqDEMtwFxB6xym0/AWYWHT0FlrLUYNcOLkSVZvrjHWpQILCwvMHpgjLm5yc13rMvoug1GkiYS0SLFvYUq3JjtBCrAmUFo1jF1mOUWZUUqLUuOKcjmllEpAHZR9M81yDi0eBQ3vHQ8VF0RRFMzOKYj48ROHeeqT3/rPZq7Exz/+8b/r7/+/tv/5X3/8471jJaVR4hAw3o65+DL40kJIgyKW+ItdgqDEiDLysGT3cso0tHBnBN6Mz+5GRMNsEzPGyByyafn/svfmsZYl933fp+rs565v7dd79/TMcDhDzpDDVZZEShRDybJsC5FkxwkcW84CwwgMBEEC+z8lCBBHSBDkDwOJ7RiKbMQ2lESWLcuWQkmkJIrLkLOQs/T09L6912+969mrKn9UnXO7ZSX5IwnAP/oAA8x7fd+951bVqfot34UXPp2yP9/nzlsNF3YuEpKS+3NOliVNuaSuC259s2L/AVx5fsxgOGBtPaVWC2TQkBceIjQYCb0BzK+FzPYlxw80mztj9j7I8EXIvK7I65LnLr7C5/6NS1y7usf8oI/vJdx477v86j/4hyjVsJhPSccNVVWThAJfZngUHB7Dz/27P02YSPaPphjPEKdbRCphMq3Jch+lEpZHOfLQRyc1Wtk2cJwo0KB9wIMw1IS+j2hqAs/g4dEogR9VyAj8BHwEoVRMC01ea6piQj/o8aN/6tPMqof4iULNJ9AMeHR3l8MPjnl064Av/fSf4M7tB4TTDL+qMU3DUvj0A8Nk1rCYNAhP8Wd/6ku88cYexycZxng8eyrk2e2Se8eSqjEMB2MuXTxLphcUszkCjdYema6RtUfSDwgD564uDDIIkIHP0cOCOIDxRcnUVCivQVbQ60nr8yIion5E/qjA9xqUL1FSgqoI0oR+kuMZQRxA6Id4I0iEIBcCjaRuDGHsUWcaow2jMRRFjEHhS4EU1gOrFiCRCGH/s611Y4ULpAe6xvPdzyZGiIC+V1OlHptJQzyIrZqS52FqQTPx0UqQrkvyzKf2oRdZoz7vaEIURIj1Pol3mjhK8GOPk70Ja+ubpGnKuJ8Qeg2LwhB4fQI/Ze/GB1y+uOSF557h4ckI4a0RliXRAD64GnJ05NPzPPytY8plztZWn+3tkNHQI443KXVJlSuEBql6LLIl554/Rzrqoz2fvFSYWrlDT7KsDUWjkNpCvLQ2RLE14q7KhrKo8TwNck7dTGlq5UjPBmPA90EIhdGCutZgDM9cOcd4rcdkOqE/TMnmSzaG66RhYg/AQLB1ZpvBOGUxn7K1DX5qiIKQJPE5Poy59v5N6nrJD/3gj3Pp0vPEaY/NU+vsH93HCI2QkrqpmE0XHB8VLOY1kpSjo2OSXkrTSOfkHjJb5uRlSVnXaGP5jRoNQoKQ3aEhPQ+Eh/R8jDJoZdjeOkUv7bOcL9jYGrBYThgMYoJA8MyVs2ituHP3BN/vAT7rWyMkPrH0iPyA2XzBRz/1WY6nx9CU5PmMb/3Ba8RJiDY1nvDACEI/YXIyZbFYsFjOqSuNH4DAkIQJvpSIRlAUBVs7Z2hUw3w24+qbbzE9LFlmFXfu3GX66IgvfumTZM2M+3sHLOsCL1BUuUCTEYURvu9jkBga6qZGG03gRwRBiueFeD5Iz0P6GlULp15lEAI8YXjhQ89zMjm25PpK0UvHCK8mjARxYiE833ntfZKeR11rVKMRwqM39FGVTVIPD3aZznK8sM/pS9vEgx5rG2vceu8eN9+9StQf8/D+Ia/85A+xJuAPfuOb3H7vOl6wwYPr7zPY7LG5sU0QhHzxT32eghvsnF2jN1bcv3/M9RtXOXxUsrl+mcAb8bGPf4TJ/IjJ8RKtbTC7PhqQZxkXzjzDaDAm8GImkyOk5zsOinCBig08qqqhaRRZnhFEIY2uMMIgpFXc8sCOBR7Xr71P0tN4IqFpSptUCmtELvBdMWPEeG3A7t4+URRg0ESxYD6bEgTSGpwWOWWzoFEFWb6kKDO0VsRRQhC0cB9YLBekSYwnfeqmBgRaQ5FX+L6HEPb7GN2AXMGK6kY5FUvPmkpr6ymVJImdb0+yWMyJogghZActK8uM9a1TJIM+jVlgpEIIRb83oCyWCDRChiA0RiUWli0kYWC92sJY8f7Ve5ycLDg5KdBGM5/nZLn1VCvLhul04YpjliebV4o4jhASjNB4vk9Z2T3bmsR7LtjTHayxaRo0FUpZXoqUHkJ4pHFM3BuBCJgXR7zw4ivcuz9DBgFhlDJe97l/8wRVV6iqpCwzZsdL1kcDPrh6l9u37xP7PhvrKQ8evo0Ijlks98kWFfN5TZKMgID58gilc5pG43kBQng0taBWkmIJaMsli6MBUZji+QKlG/b39jEYfOnTS/v40kdgyLMlcRrT66UkaYznCZRqaFSN0g21alhmSwTGfVfR8bMWiwVCWo8qT3pUVU1dNTS1Ddq1Vk4JUNM0irryiOKIyx/qkY6npMOCw6MTtG44tXWZNBmT5xm7uxOi2McPrNea53nUlcLzbCHA+jxZWFtdK5pGWaU9A8ZopBQIYeFoZV4zOTlmNBziufN5NptSVSVlWaC1Igh8fD9ASIGQAmOsl9pwOKZxe02WZdR1RaMqyrIgSVIWyxnSJZXSEw76vSCKwk5xsNQ1RhuiXg/jeaRBhK8NtaqZHO1zfLBHGEXk0xpVtx5U8NJHX6RSpYXOSgFCgzZURU2ZV9RFTS/pgRFo03TfOUpijBAEIsD3AqTw0MoghUA1CqMNGJsM2+9rn/eqKpFSEIYefuDhewKMnTfPc6bDdYPv2/fT2hCFEb1+nyzPu/EPwgCtDMPhiKIoLTeWVtBGO4GclYnxyjvMAMZCA+ME4Xmc2jmNauDC+TW2t0bc+OAuYSwRniFza9LzQ0bDPnXesJyW1IVC1wIMNjbGqj8LE4KRZMuSqrRwQCl9yyPzfYSAutB40hYIrWm7QErDNKvwVERdQtrroYo5x9mck/2Ck6MFgb/B0YOHu7/wC7/wd/64vOb7onMVrwnz0hccubsMePC+pprlqMY+zIqC7VGfeOhz4vDGw/V1pnnOK6/arsq1t/ZYTBRGaCb7Fh/7qZ86he8P+OqvXufUKdsR6p9b53BW0ZQOKy5qdJ0SBAnnL7dy5vscHO7T78f0hjYLn9xb0tyPKVx3YvvFEE9X6FJTe12bge2NbWbzOxzcs5WV8Tjk5CDriL9+mrG/K1i7rOiP7Ofd+KDgwjOXwaspja3CvfTSi2Rzn72btwEIhwkyOSGKFLmDplc5NJViOOxRGft9JvshaWBQrjKfJiMrY6zjTp61IkdKSd+ZUm72UyYHUyq9w5mXLSehf/YBwV7F7Rt9qkNbeVg/H3EyC1m8bu+xF0y5J31UWXTVnGwi+bEf+zF2J+9y5+YD9/49xv2U7121XY6f/vM/zd7de0ybt7l13VYiZDmgaDKEsfhke+82iD4+tnO+Nh5RVQXJaUWeOCnRuWZ7Y0h2GGAcHy6QNVnuYVyVczZdMBqn9Ho9TM+uDZ0sKRuByCF2hs/KaHwjOJg5x++wR1PniMhgHI5X1hKtJJVousp10yiSVHRjEATSmfeBcRWpk6piFCeIxnDhov3deKem0D43f90JeOiQ2dGc4Sjiyjkb8JzdWfLNNwwbHxrw4VdtpbNu1smOjnnXyd/3Rmusb/ncu3ePB3ctv2KQ5nzukz/FG699QLplKzn/6Wf/Au/8zq9z8DnLP3rty99ErDXEwuP8FXsPt98vOTlUxD2/6yDkpVX6ee4j9u8O9ifUTcb6cMDi0Jls709Q1MSDCOXk4Msqo9eLOpPGujCcPn2Wk8k+y8wpj5mawLfdQIDNzU2qqmR9fZ2Dg9ZEsCYIDZfOP0e/Z8f84YMDHh1OiVMn36wUfjQF6eF0TYjjkLX1MXdvLvn5n//3AHjje7/HW298gHLO9j/+pS8xnR3y7POXeeNN2wUva3j+xZf49utvELgO1HK55Hg66Sp1lsS9UklqL0vWX2HhpVmJWoANTLZPrVmHeycGcfbsWQ4PjqmUxvdaOWprRlpm9jXnnznH+uk1rjx7kV//X61qZJ1LpB/S6CVrTmWsLCsWi0XHd6jrCtNYrH2DM4r2rExt4McdHj/wJU1T8+wzH7XzcCHm+GjO++/dJYyc3LY/Z7I/RHiyU8GsCo8gEJ3BZa+fUBY1Unq4YjZV2SA97ToR7bMVEgVBJ3Ywn8+RUjIYrVS/louKqrSdDO1aXHWluXjxIkNn0/HeW9cxjWI0PMV43XZ2//TPfIa9g6t8+V99E+VvA/DhF87xyY9+hMGWrYon9SY333+XX/29f8irVz5t77PZ4q13vkyc2M/ffzDj8nMXCEOfxdx+v0F/xHR2wtHREcbx75rCJ+1rNk/b6ujuvZKqzhgOx12FPc8zPM8jTXsd8V5rzXA46KwftNZo5ZHEmihybXEtyIsl49EWmeOGGqNpakNVOouKXg+lK3RTd92lfj+lrutODhmgUrYj9OEP227e8fExh4eHbKxvWZ4njhOoYDQadNXsqqrwvOAJRbHa/VvbdQuCACFXpsDt1e/3O0GEVujFGNNxIIpyyc7WObJs0fGbfT9kMbfBD0CjcsLIZz6tSHptJ2JEtvDY3I7Y27Xng/CsUuXKENnv7uVxOeo2uGvX3h81W63rGiEsj6Zx3VAbIAo2NjbIC7vHCmktTpZuTzh1Zp2NzTMM12Ju37cc682Ns0yPApazmljaffCFj55nfW2Tf/IPfwWAC5fWCcSQ3b373fNYFCC8susCpsk6ftDgB6LjFoGmrhVxGFFXTmGzN+DKlSu8/vrrAIzXR+zt7eH7frcnFEVBUyuCJOx+175f21UFODg4IJBBNy6+77v1tOIN2nGV3Vm4zJZcvHgBKTyuX7cCIWkfaDYZjgUf+2GLkPjGl0NefOFFDg6tWNbDBwf4Ydsxs2O+XOZoZVxCvuLgWfGYlcqg5VM9znCRLrl/UvXPGPWEvH8QRNYiwXXrjo+PqcqS8+fPdvNwcHBAWeakLlYKggBtmo7HCLbA0O/3OtVCgKqpn+Smue8BVhCmHbuqKhiNRgSuuzwaD9jbe0jVKQjKjjfV8j59z6oXaqM6npKUPnHao8lXlhgtLOBxZc5GW9uIdl0bYzrxjPbnVjGxFSRp1U/b94njmDAMqaqq62R6nodwn/G4gM3jnXrP85jP5wSB3533rWCIFH5LzWRtbYMk7nHkzn8vCFCyZrgRUat2j5A0qmJyMqOqnKCMahgMexRF1r2353mdcTHYjl5RZNTNSvEQI9CNWfENhaHOCmSconwnUiRS8GowEiHs/l2c+NSz/Cnn6un19Hp6Pb2eXk+vp9fT6+n19Hp6Pb3+/7y+PzpXI2HOWO88JB59E3LvOzGNcKojXs360CNOPGqHA13mDaYWjDecJGgwZm/3mFAqCqeUs3Nli6pW7N0+sbAZQAWS0c4WqTOXmy4FeXGMEAGnt12GTc7pC5JHD5coZfGVex8c0k+nPPMRm5XfPWzQxrC1LjB1W92OMKFic7vHzAmmzacLwkDje/bz/AgWU8l4w2fiPLoqDdp4BLFHf91lyqEAYagdfLSY9UEqhucqPOEy7gCkBk96xE5l8Nbbhn4iCB1fRhUeMlBMDiFNXeXDr5G+ocqdb1Fa8Vf+kyt88yuP+N43bCXk5Mgj3JE8v5Fy756993PPbHJlJPj6r70HWEWqedlwoDQDXHeiyfDWFP0hTO7ZakEv6dPogo11q8rzoVde5fr3vs2iuEc0sNWCW9/LOX02JOwJdu85ec7Qp9frsXCA8igy5EvD8DzUfbc26oCkGTC7J/FcRWE89mhUwXJpx1KV24hgyeBMgdezY7eoYRDDen/Eowe2mhYPISsEJ5lzlxNLYu2xLBStWnKvJ6gyQ2P8rrIrPfB9QVuVj2IPpSxEKnMd0lQl5GFOGAkitxYvPruNX2S89ltt9UmTGJ+TWc5nPm9fs78/5vTlTWIlUa76El/8EGpRc+Pqm24NZzzz3PM8uHcd5argy1Ix2xX8+BdeZRDZas93f+UbfPHSJe4+dwmA3/zy1zhzuccyL+j1bGdl85ShyOc8uOWDbLH9FuoQrrtKb2Mlh8MkZv++rTx+6qUf5OhwxpvXvk3f4ZsbVRLHK58bo2zldzY/QnoryVjVGMJwhfUfDofcu3cP1bQV04gwEmysrfP85VcA+Na3f5+f+Mk/xXe/Z7lbV6+9RhoPKOoC1dj7DAKPXt9na+MZDvbts1Y2j4ijAeMNuxbPnTuL0Zo/+RM/ydX37br+pV/6e5y7cJYgCHj00NosLPPc+u+0vDO58hhpK2BtF6ssqycqpp7n4WDh1HVNGAWMRiPypZurPLPqa0VF4Tpqa2trLLMJfacW6oc9LjxzhaPjPT547xoAo36PwVrMxtq5TgXr+PiEslx1K4QxZMsc3w/5kz/4OQDCnuArf/gaRVMQ9hwPI695/tln6TmVwUW2ZL7cI4wM165ZLuNobcD0WGBkSRC5/aURhGFE7FQV54spEvtMVGUryWt5PkmSdKpbLacmjl2nrmmoa8WZs2sdH0A3hmVWIkTAdGqf0dH6iHyp0f6qq6Irw3hjnTPnbZfq2Rcv0eslgGQ+sdyX0TDl6G7KF3/yJwH42Z/9c3z1t/4p//nf+stklR2DvRs5f/t/+C/5+//TLwPw4G7OpYtXOJ4/4Np7H7jVGuD5If2RrdLbG/V4+dXzhKndb9773pHtQlVBx1sQslXQE91ab6WuXcHUwvx0gNFZp0QYB0PSXkwU+MzdeimqnKa2XS6wHYWqzvE9QRikbnxtt1Rr3fH9kl7M4dE+L7xg/Z2qsuHh7n3iKO06slrbztHjnKeqqhgOh53hbFmW5HnZmZQCxEnkzE9lx31RSj3hEfS4bLRx46HrCmEkvufRHzjeSRgzX5TM3fj6BEjPEPgpzzxnESjHx8fcv7XE0BBFdhziNHK2B2X3OVlmn4X2Ph83Wn1cEt/3/Se+r+d5GO13FjFCgBEhZVEj3L44WguJE0GZ2/eZTGY0TYAfC15+1fKwbr8/R5LwzMUXuPfQ7i/93mleeOUyl563n/fLf+efMz+qEc2IwdD+7vyVNd59+y6ecIqNusHzhIXdtfui17PG7VHczXFZ1IRh2HULiqKgqEqyLO/gn60cdxBH3bwLYfepfn/YzV1ZloRhSOitFP6MUU8q5XmBg3zZuR0M+8znc4q87LpLaS/CUBIEPrmzcRmPNvB8yfHR1M25pCEn8MPO56rKa3wHLW1cB1hK273ueG9Na+C96tDYDo98Yu23HctWSbLdi6zBs+PHBpL5fE4vSbvvY4xhPB53sZpSCr9DpjjfwI0NsmxJlmUr6Xdl97vH15QUnlM1XHWu0zS1Z4T7vOl0ijGmU+psjO6MmjvLhsaq+fX6adc5wghnM+B8GOuVRPiK56pQpnFGz6tu1uP+V+3rW7hvOwatSmT7761RelU13Xcxbo9vr6qyvNr2d60/WJ5nT9yTlPaZbNWXvSDs/LcAwlTQaEWj6cyHPc9CNhtV0bSWF8Ipsgbt3KlOhbD9vKhnzdyXixxU24mz6q4ufMPzQFQRQapoPHfOHWvG50bUhaJuFcrzgvqI728T4XQgzYe+6OS3vZo0BDUf88437YLWi5jNU5J+LyUrbauwaQx1tjICKxy51pcKVbbu05Km0VYW0wkLlJVGBJILVyycMBitU1RHzOdzHFKJXt/jr/31v8A/+l9+CyXsgTZKU7L6FrWy2U6jI2QSMhAGWdtD4GRqCOOAstBEzhF+0A85eNAjGdgEIelZ/CckVM6byvg5fqKpq5DYwZ6iXkM0rGmbi9OHGl1GsFFy2nnTzKslftCj0Hp1OOYRVVZwaseOZzZvCGMQyqN93rRQeEicKip5I1GxZBQP+LCTsX7n3YaHByVxMOHUGQu32d2b8OlP/TibjW2Jf/Of/j63Sh8tGsSxa0d74A8HVAtN4nygCGZMJ5qf+7f+PADfeetNJo/ukKaSC5csuXN9tMXXv/Z1eiO4d9eOVezHBInqiMZBkFNkIcHGjNE5e8hmdYM88ZncnXWHpdKSOKnY3HSHwiQhvDhEpjGJ2bN/5y8J3eY/dc7cfhiiq5K584oSvkGakL/6H/47/PN/aaFYe8cHhFHNyYMQ4zwOlILAD/ADF3j7ikEvRZiUj79gYVbT6+/z9sEBIjJItzaK/QB/MaARduHJWBMuJHUYMHQeYdm04LmPnEPQ51FsP299Y5vjwxN8d3AEfs31mws2himf/bSVcP5n/+Jd0l7IC+uXOXPKBp1yKPn9f/UbbDq5b29tnTu33sYzPsuJHavxesj5SyHZPCRfOl+0xQzpacLAHtjDaESeL9g8u8Hdh1YG+fLFFzjcf8jN2zeI3LxL6RP4ofW+wfGqUE9IyErpUVUrw0NjIPB7LBbLxyA9krX1HlUpmJ7Ytffiiy8ShGFn5FqUS3wvYr6cdRC8g4Nj/tLP/xyvvPxx/t7f+ScAvPvOO1x+5govf8ZWc+oq4w9/75tMDuYMxnadCa+gzJb0R4PuHsqyfIKIG4ZhJ6v9R6Vu67rpNvNWyrgj5wcBwmgkAiFk97uysn8TusMjiiImkwnSKdU8++yHOJnMePDwDmMnqayqmrPntpnMSo5PrHFimoaEXkiW2bWxmOckSUTdlHzsh79gv8sy4/a175J6MdqZ6p45d5aN7Yv89u98BYDTp8+wvr7O/uHNlWeX8anKBjzVibfUdY1Wq8Cml9hgoaqLx7xwfFRj7Gs7kYsWirKCmp05s8MXv/Aj/O7v/JYdgxiWy5Iy9yjrVlJZgEmQ0hk3x+scT4/xQkPuiimvvPoJPvnJT3Kwd0Q8snvztDhinJxnZ2wFez7xI/8mv/3lv8+D9/8xX3vDPo9f/PSf5Rf/6/+ev/E3/2MAbt5+h4P9Y/YPjzl9xj4zZ85us5gp3nv/bSIHf6mais989uPcu2cTuePjQ07tbHL35qMuoPWD1vtnBVGra3tmtbAkbRoQJWEkSXy7vwkdUZQLdNOQDtqkrGG+XFBXrVlqQhhYHh3Gfl6SRARBQFGsfKaqpiQvlmxvWR+xoqjIi7kNppzPjudZCJZAdtLSeZ6TDvodrGkxndnAsJeQ53Z8R+OehR3VdOIDbWDbrv3HYUUOlURZ5uhacWpng1M7NrEvqoq7dx+Sufkc9cfMFxMunL8I0t5TknrcvzMjSRIyZ4iZpikYaQMlbODfNK389MqguPUEevyZbGFS9u+8zuwYV+CNkgGDMcT9OQ/u23uYHXqM1gVl5uSooyGLRcFoNOj2oP29OX/yJz7H/bt7vP7GWwDsXBzw3PMf6wLTd773Fr62HEPp9o0LV/rsPpgRx4lbKxVVYfmfYyeJX9c1cRI+8f2apmGxWBC7jL2qKrsPB16357bz2kJt2zmyAgWroDwMYtJe3O3N0+OTLolpE9TRaESe591ct3L7UkpSdw9/4gdfwR++wfe+03D9vRYSF/DhVwas79j9/A//j5owDfC9iNnMzp9WgtALrZCR85Crqspa4jxmhmvnS3XzKYTXwVg7C5Ug6Oa+vXzfFkhV44RbfJvEqLpBaTvvcZQ+YXot0OR5jh8GbG3ZuOjgYL870zpBifHoCdhca8VgBVVW8FPp+4S+z3Jh5yRNErI8x3cwS+n7GKlBreDnSmmEXkms2/G0HEDhTG/zPOsk49u5UcYmOovFovOLtO+3WvutCXBd192+8UdNmrW2MvBRlDzxHrpRzk9sZebsB7J7Fuq6cly8upurdg6iKKZ2/p9RYo2G17ft+5+/tMYHt+4yOVnFwyDRjfWfMqItXNl7axNVrRvG4yGLbI5pTZ79CKUqzl84zcy5jxfLguWywXf7VhQlVLnkmeeGGHdGX9m5wG/8y9c4/azP8dzuec9deoG3v3b1+9tEGGkwrsLnS4+8VvTP5Fz+iPWmuvH1Qzw9oqwUgVMe8YWi8Qq0Mwccpyl5WaEag+8mVwqD1iV4DcpNZhh4NFjfBQDTtypBm5sjstB5aGT3+V5x2QsAACAASURBVPVf/wq9YcKpizaQ2T+4y/K4QRfOK0IapsczZOqzldrulhHHVEvD7l3Ncy/Y91ob+fQiRVP13XsviL2ISkSIyE5uU/oUU22J9TPXnUh9TgeS8SV7oK2PZlBp9Cjg4Jar/kYx81nB4YGiqe3rtrYkvi8ocrephAlgvbjaA365jKioCNrF5CtU4/PO7y147ddsEPPjf/FzDNcPeXj3HktXGbg0ushXf+krbF62B79Z38bcfchofQvtgq1FLqnLOVHgr9Rl1IiLZ4eOnAuH966zc27I7RsnZDPL30qGD1nkFf3BGga7AZ/aWeNk9gjhAimlAwwFa8M+1dK+d2/QcLKY00sleeFUjYZ9FpOCY1wwIhZETZ/saEixaz/PjD3mM8mZyxG91H5eokJ047M1st9l6mnysOAPv3YV0Tg1O/GQfgIz2dC4hD2JY5AGIdo1FlLkDdIs2dmylTKz+zLp/jWa+QGzfft5o7UeVb1EZHbzEXnN5z+eMNMpJ8fuoEhyspNdtL/D++/YRObsGcGVj25z/wPbNbr3YJ9Ka7ztMd997yYAg3GKj+atN79B9dKnAPhzP/NFfuV//xecTm2y9fD2FFknyECAsIfA0VFBEPQZjkSH2T5zts9sXmKUPSyV9PBin8ODA7Qr97x+7S20KTl9YYPJkT0cy8ySSFvzQSEFRhlX+Ww7HzVxtPLGAEmRz4mjsDtMynLJwwdHhKEHgZ3jG3euMznKusNsa2uDZT1FyqDjj+zsbLH38ISdzZLdPdt9+bf/8hd48/VrfPN3v+LW6z5SSja2E/DtvFRNiZf4lMqw7gKZIAgoy7I7FFo/Ha01+/t2XlbVb9Mpx7XcjRVnwCVoniRwJi/WtNWjKCq2tu2z5Ychi0WG5wy1b9y4ZgM+HxYLe59pNODWzYdICQPXgVK6otJFVy08f+4Ct2/dZDQYcP11ywMpioZ+P+ZgOuu6GEE/YVmXPPfSJQAO9w+YLmsGwx0ePLRryg9AK0kkU4R2hz8+QdyQxM74U9dkyxLfjxDu+SiWBcLzEEISBKvD0XJbWkU6wf7+Pv/gl/9x18E43evj+6B8j8Cp4C2WJ9TVglM7Ntmp6gWR7+GbHgR2X7zx7jUOd08Io4hXPm2Ndj0mPPfxMxzdtYH47/z2/0wlDwn7pzg9tM/fxsYmL3/04/ih427GSzyZMuz3u85nGAzYeQbOlOvsPbSBYRQHjIfnuVPan+tyj9nJzFWuV5wdWxH2uqDI8n58checB0HExtaA/UcHiMSpefUU1BnTRUkQt2OnEIKOB9I0DUXRuAq3K9yUGcPhDnmeM52ddJ8XBAG7uzYJ7PeHxHFMkVdIlwQqpWhqWwDRrQJt4Dshjqr7WVfW5y3ttQFXDaIhDHudyW4b9DeP8bOCwAa90nlFNbVi7dQ6C1UQOk5ZFAQYWbPp9s4qr3jmyinKoqBye/xymaGUYrlcIlygWBTFE91lretOqazr5Lpn8fGCh9YaIS2XBVYBn9ERxrNzun+w5OhwSH9ckzjUi+p7FEuNdGu4LhRCGGuyPLHvHccV9+9f5/1rV4lTO6ChhK/99tfoJXYNX37mNFev3qdWms0dG0s8eqAx2mc5dwRrk9IfxCi1KkR5viAvqq5LCeAHAaPxeJWgaN0lu4/zpJqmIniiA2U9i0KnaNgqW2oFeeO4mk4Rrg3Q7TrLQRjWN+x9TyYTosh2MRdL25V653u36I1C3nvziP6wDcglD+5OuXvHFWA962e1XObUuR3P06dPEQQRh4f7+L6bm9TvOED2vgOHGFglNnWt/rVuS+u/9EcThDRNu73y0aNHnDp1iqPDfU6ftmqds9mU6cmEjQ2b+Fv1wDXyPGd3156/SZJYfpFaFQ3nc2tS3O77RVFYo/NgxV1sv0eRZV2y06oTZqUznddWuMzzBbpTRbTiRHlWrBKuukF4sluLYWjVHuu67ooiWZGTLXOMhrJu95ygS4bb9/Z9n0bV1jQXbMJvDPVjXUBPSoos7/6uRW08nszVdY3vr7p37ecJ12Gya8rrVClb5VGjNFEUsLlhC8VVVdBUHmmSEjluWlVVFHlpuWmPeUpKDdrtP0YaqrIkjqKuk9fvR9S15P6dh8QuOQyDAaJXUznUz2gcs1Q19x7eJ/YtwuWv/rWf4O0P7rA33SN18QzNY158f8z1fZFceWFCVbVysTP6Y4+b34Dy0FZjB6OQ+WTJ6BS2NQI0lUTKpstaERphJIEnO8lopRvCMKBuDMbpVmqh8KUiX1iieLj9AvPlhIEfEjhoVCJj7t59iB+WvHfVbq7jccxwMybXrj2chyTxBnDMg72pe++Eqqg4+6xhtGO/z3ShCOMMsEFaHBpUI1hOJwxHzrS0DDCUnJxosqUjQCvN9e9qXnCZ+kc+sUHlHWNSD+3G4Gi3ZOdMRBQo7t52UpqB5OWXL/LWG7cB6PUaNLVdYK5KnUaaIBE0DoK0KH36I3j+UxVv/qYdzwdvXeejP/IxDvcOOHTEwpkoGDyXEQzsPb744VPs/6OI2XFBz8EZfBmRyIgiaxBuDZbLGiFrvv3aa3YMAiirAiGTDs50MPMYj30CP2ZjwwZSjapRlYef2vuuCp+kJ6AWPLjtOiYfGhD5IxpZUFWuYlnM8SLNycSZZyYh2c095vUhI9eSF2XKfFkRxgWjizZpWB7M6YuUkwNntnoxYLSe8e533+C5Z2yHTW6u27a+zHH5JIWqiHor6GBdawIvpqpyHp44o9hTZ9l4+AbHCjbaqu1RyXgMuUsYPnxaMJ8qDsIhu9i1/+Ez6xR7NdIzjHtO1vXqTcJgbqXIgWKp6a/FeKGCwMm1b+U891xA/dEBX/tn1kj19371A37gIwOmmV0boTdhfT1hXiSUsQ0ot7fOcOpsxP7eCS2n9ks/9SJf/8oeylWW4oGPaDY4v7XD4YlNVg8WU+aFplyUaOUqR0ITepLQEWbtAWIoygVtDz5JJJ4U5LldP0kYEKch82xOzyXjQkZE4QA/bshd0WAxzxmMA5rKPS9lhjaKJI4Qzgzw4sXL/MHXfp9vfv3tLgAzKuH+7YIf+AELL7x28012dx8RxQHp0FXcy4TlzMMXAcfHTnDFwSTaw6w9vH0/oGphRoFECq+DoICFE/m+j+8OIaUUCo0QhiRdQX6UUqS9mGMHPwkjn/5Qcva83dw/uHaXelFZlUl3wDS6JEkMhlUVXjX2/xMXiNf1kiDwbNDinvc0jairipdfepYit+vztT98h3//P/hLLHMLZ3rv6i0uPL/kta/tdkbmQirqwlA3GapxwbFS1Kpi4OBERQH9/pjFdNGJObz6iZd5/9pdmqZZVbgDD6Xs4QoQhoGtYKYpde3gkllJmsTUVU3uiNpBkABlFzwu8wI/9Cn1MVs7du0f7R9jZI4XCIQzivWN4a23rlMd2XE6fS7h1Jkt3vhqxuzEBkm/9msP2Nj2rCE4gEmpa8H6ZkDftGdKzsbaFd5964jQdWjDCL7y1d/kpRet4Evc32b37gleGKyquNiqruCPmKuWOYGzxKirAq0jXn755W5fHK759OZbhEGD5zl0AJJEig5iBRLP952Es73PKA6pm/IJE1HPs6I7rcBM29Xp94fdvEwmE9Ik7RIr+7qGPC+Zz+0510tiwlDi+YIkdLDSuqCpNFo33bxrTScH335fY8wTktG9fspw3GOwvtMF7LPFlFdefoHd+zYJzBcKQchyedIZdpeVQXqCui5Johae2waWq58f7xjAk2bO7Ty00Kg2mMwy+5zNi/0u+U/jlCiJyZceixM7xoNhjPIOyebu50GMbHyr8urgRIOx5tbta6Q9gXRiNVWlGPSG7GyfdQOskTJj81TCpz9jrRGuvvk6hwcKz18lh2W5tIUq181uChvEW2EEJ40eBlRVRfiYbLbtnqsn4Jm9Xso8Wz4W+Frl2zbAbZwceFUXKLfHWoizg1e5sVtmiyeUBB830G5FvPb395lfL4l78jHjXcVsatXbAISJmU3neEjW1sbuPm2xSkr5WKfKdAbtwGMIiMdhp4Ig8FHqye5Omqbd/p3neWem3CWdQvJod4/ReNBZaRT5kjCSGCekoLSmzHKXtK6gdUEQYMSqKxjHcSeMAjYBawUdHu+m9Xo9hBCdQEZZlyT+qtujlKLRCqVqhFkZYS+zqVVGdM0FK87h05T2NXlWEEZ2LVZu39fKODjlqsjV3vvjYidaa4SxhdB2vVjoYNPdk+8HeL4gchYOj3dP2/GM4xghzWNjEnXGxo93t9rCYmsZEccxZVnST21Ce+3G2wjTQ6m6gyYLKdBG4YWStpsVSI9a1SuJ/CCgWGZsbG90tiDLmS2AeEJ23dBuL3CvmU1zNrdPcZw1JJENgv7mf/aLJF5B0tf4xsby1crz/I+9ngpaPL2eXk+vp9fT6+n19Hp6Pb2eXk+vp9f/B9f3RecqDPpsn7adgePsmyQYwlzhunSEQ8GyMpR5Rdiyf2WIJ2qUajtES+IwQuiaVtNReAJda4zyoDXeDRp6vQjd2Iz06N59+oMBvpFo7O+ioE+xkAx6a6yt24psnWWU2Qmndmy16WB/l0bOODo2xL7N2E/2S86cDhjEm1z9jq3yXbwU4msfEVsoTyDB92A8khTOKVY1Jb1Bj6YucD6KFDPJbKYo3rWVlsOdBaMXI9YHfeqhw8Lfr8jmBf2tlH4riX1YcOfW7U68QumKIASlSxxCBRXVqBwCz+bW/Z5PXQh6o4SNS65zdeMWb79+m5d+6GM8d8FCle7dvwVRzb25ve/lzUPOfHzAnT88IS8cObjWNE1F0utRm1aoYUFR9ti/acUHRkmMqgcovcfFFyz3rbc+5P1vX+NhtdeRTh/tHdHvDRG0eO4G3Wj2d+f0I/sab94jm83YOb3RzXsQBFRm2Ukl95KYyPMJs6oj8PeimsDXVNOQ4oEztDzxWaYN3sh2Cy5fPs/dw2+xti15/11bRU0GA3qnKrY3BhweudY9vpW2Dd3S9EB6Gt+XbG9bCN7+/ZLycMEyDnCFOZI8IDOSviO4f+ce7GwGXLv3AYEz/n33OOD4pGBtTXZEyqQvmD6acO6yrdR7MTSlYnE0Rbk13O/F3L69RA8betsODhqs04gDmpbAm2yy86xg8e4ET9oK1HRxyPJ6jO+H1Maqsnzr6/fRRhC6xSkaj43BgKbK8F0leTQMCQYNN24uMawqpFrQ4fiFqJFYT6PESah7XsTJybR7n9kiJ+6lhGFM7sRA6kox7GnKykNI1yH1BiRJxs4zdj+4cVXSH8TM50dddfL27duoRtMfNV1V+7e//FX8SHP1xrsAFGXFcHia2WxG6CS4dakIRUSaeBwUq++ygi5aztXh4RFREjMeuwk1jcWUVys4ClpTlyX6MXiKQlIrxWTyeFfMdnMaV52cz+f0ej3uP3zkfs4YxSmBr8md/K4RAVXVYFhxmAaDEVVRcnRgO2AH+pjBYMCHPvQsj3Yt3/Av/qW/yGhjg0V22IlOvPfu3+bVlz/BP/2V/82uu3e/TTo6S5k3HYE/CAKq3Ep1txXspB9jVMjBvq28ejIkkBVVWbPuOtA726fY3ZvR6yVcv37dfWdBVdUdcdxzVX0hFK5AyvSkIU19grhEadf5aySeJ6gXFkI26CWE8Ygsm/GRF54B4Hfu7ZEGMS+9vMmBg8Cl45CTe/e4cNpCTa5df0T2+j2auSZMXcW1VBgjO/5R3VQ0SpNnIR979TOANWB941vvMD05YrTWCjA09HoeFbabjrdgtJHy6OGUrVMW+tU0NXlWui6i3b9tVyfAFcWRnuHk5IQvfulHeff9bwDwyisf4cY7BbduvM7mlnt7IVBKdIIBUgjL7dCGCxcuARbCeXBwQJYvuypx0zQkvZQ8azsRmqaxXk6t/HUSJ7bqrlUnae55ElWVpE58JM9zoliwWDS0Heh+f0BT12hVPfZ5+gmY0OOyyMp9XjYvWE5y1scbNE4fpF5omsJajYDtTkxOrChL5sSNjLHwvjgOu7W4vb3NbLoA0xrcruCY7WWMIY2tqbd2EEprtBx3r5nNZuRFxgvPP8fcGWjvPXpAnAji2LSNIxazDM9LiGMH/VQeUjghjdyO3drmGnFvThhIlkv73A76Gs9POcns2iyPj4ijPhcvB9y7aUWK7t65y/b2GZaFMz8PBNlSoVVIXbXzIhgMBpaH5DoPxgi8cCXaYZT9/r4freCEnvX6iaKIxaKVrbZw7XaPK4qCyA8wjXjCUFdryzlq114YRCjdcHxkO611XRPHMWnaX3V2dMPaxpDFYtbtw54XkkQxlYOnCaNIQmsNsZKHt15J/X6/m/cwCKiaqrunVlxBKd114YxRGOM/YQugdWvqvhL1mEwmFhab5d06GI0GLBYLjpx9wGBovQFb+X3PC22HBAjcPmHQNNoiGUZDexZUdenEGlb9C8stirrOilKKfJkTJRHjsY1n5tmSSlVPvEbgUVWaJLWft7W1wXI2xRjZ8RIfF1Bq57i9Hofl2WdmxVkry5ow9Lr91/c1VeE63u17CIXnSYRoBWAahLA2Cx31o1LMF9MnnqW6rvGF7ODLFj2m0dpQutg3jhMnZR91a280GjGdTllm9gwbDteZLSdoUSC8FvGSUBQLGrPqWFZNgx96HXyyrmsabaXvW6Zd4Nt7bRrBeM3GtUVRUOiCJG0FOzTKV8zmS0axjd++8MNn8GTFb/7u2/TPOyPt/we9iu+L5KosSrY2LfZf3cqYXrvKcC1lvrTBXVmW+DKhmIR4qYMc+TmybqhrB9MJoFAlsTMxBQjDPst6RugZmqoluUmaxlA5EYPy+IgiWRIchexcshyvrFjihYp8OSIO7eaWjCE7iblx38KgYteeH28Myab2wdveMjRFzKMTwSixGOSj3Ypzzy4YuGAyTD0mxx6eKDG1U9PrG04Oc+oi4cx55xchStYGKfMjO5HTPcPhQ8PWSCMuOMhPYg1TQw1b2/YBOn7k1F7ccvIjMMJHCENd2UUfJktMArVrXAoKS+ZTMNi2f3e4HzAMG+58931eeuknAMjPxeSLu4zXWkfzIfffnJCYCKNa2FOFNh4KTUjrDVGTVwt2ztmH5/ZVzdnz54mkInEBSjyU1FXJxrmUQNpTNkkBb4Lvko+tMw1HDzSXn9vg4NCOy+Rgl6Q34MbNR4Qd/GVO2t/oIA5BWKGkjxdnnLgEzMtjdFNRNA0/8KLlJN24focme8TZz9igMFcjZouEcLhOHdpDaHF/CWaHhl0Cd6gXTYUgpnQqVYOBBK3Jl5IHtx0/TqW8fbJkuLHJpLHQq9oYhIaJO5jEIMEkPV554UXe/cCus+PjAp1pjvN7eOHQrf0UT0r2Hs3d2A2Jdch074BG2bWoBgPieo2bt+/x7Dn7fV771m1+6IuvYObWf+TNG4dUeUK28AkctMbIjKJRqEyw5jafh3dLxqNB56VyZVuQNvtM5pKsset1UmtOsilKZx3MqaklUqza7YYa5RR8HKScPK9QSnYQ0vHaNicnS0BQ1i15PWQ5zZChT5vfFJlhMCoRrV9OXlMrTeCnjNfti/rDhigZcv/uIX/mT/8MAM9/6BL/zS/+d0S+gyX1UuoqwvMC5ksnOhMPyfOc2WROHLTkcRvAtIeScibe/X6fzAVNdVOsAhTHKbH+JCs+gFENBuN8slyUpqyHSK1VF9hjJGUO0sFmkjgh9EPKfIZwBxpCYhQkvSHGEZmbxpKGBTZZ1Uqxvr7Bzs4Wn/q89XM6mCw4f+Ei/9V/8bcoHPf07LmL3Lp3SOFgdEJprr21oKkbGmMPwmIhMV5JHPS7BBrjkS1zUgfhBE1TlYzXBqSxLRD881/7MkHkvq9aCYSkvdUYVKUd30ZVHWytqhoW84z+SHZJoNFWsGOpWnU0TVgfIZuaN75u1dg+/6M/ws2DqxThmG2nAHt0+JDqxHCnstw7E8VkJwW6gl5q1/msahwJ2gZIh4fHhL5VKnznHesp99GPvMrJ4ZzxOKIqHXw49Fkf73Djqk1ef/jzr/K9t24i/UUX5LTGqk2jnvCG0lp3XB8ZCGYnNe9+d58XPmYFV/b3Ztx/eMCzL/Whsn+Xpil3795/wn9IyNBxr+xaPL2+QdM0LJdLjIO79XrWY6sVd/L9ECkD8rxkbcMmgdPplHm2RCnVQemrUuPHXsedHI/t/BtjOoWvxbxAiAApTaeG5vsrTo9dm0237v3YPg+j0YjZ5ITD++A5OO8g7nO0f0CL+DdkKBXSVEuCyK3zKsD3UvI8Z9C382XVAbOu0OA5P6DHA01jjPXyMgYpW4GNXgddghUMajo7onQFHt/3mU9rBsMUXAFLCsBUHYQMGSGksbBIbb/LB+8d8+kf3mI+n7N40CbjYz76iWepsHv8V3+jYGu8Tjn3eHTPngX9ZJsiyzHa+SQuSqI0oNf3SSPH/5U+u7sP2N7eZuIg8HVdYdRKoS1oY6EkZeCUeSeTifWVC/wu8K1r68tk3YrAi5MumWqTHc/zmE0n1kxatl5GrVDISgygaRrKokJ6re+jR62mFsrlxkppjad7RLF7/guB1oIoSla+lmtrFgYdeJTubJ/PZ4TxKkFpBYU8T/5ranaj0aBbC5PJhMlk0s3zeDxmZ2eHft8au7drY3190xYF3f4i0dRN2Z31CGG9paTs3tsgCX2v85UDkJ7oYLBg97umaej1ek9yrhqNF3g0riAwHPaZzWZdDOvhU5Y1UTjo7qnIa6QMWMyLrhClGk0Y+OD25SS1BQSlmu4e2rmMoqjjRYZhjO/FGLcnRGEPVbfCIHbMozhwCdXjiZs1bW6hvPNZzs/87J/htde+zcOHjosW96jqovu+o9GQPG9hgSulzlatsFW4XCwWCOFx5ODaUgxBaDTLTtHPCM2Zc2OqSrG/b++hN+qBNh1dYjKZgIC6WRVYillGEPr4YcDEQa+DMER6sms+eL7H4jhjYzCics/6Zz/7WW7v3kCr18gWDrJpXDHt/+L6f6UWKIS4DcyxM9oYYz4phFgH/glwCbgN/DljzMn/3fvI2DfP/4BNrrJ5QFzfYj6fk1R2M6h1Ta0ailp21UJtCgQeZdlWjYTjQEg84TDenmaRVSihyQ/tBvEzf2HEX/mPXmLiVI6CRjJc3+Jv/PX3+M537WCdfm4TPzJIBnihrWDUZoKUmspVsgWaqgIvkORz+3lrm4LpboRfliTGblrpek5ydoGX29dsPBvQ6y+5e1uwOHEBl4CwF9KUA770Y1Yu+df/xdcYnT9E1Q7TqkriQOFteTSOd+KJhAfvhnz+T8fcuWE3pO98S3HxfEzo5GnrYEkcQzmHOzfsw3LuvKusN64TIStEaGhkxdARqW+8FjC7rxBG87EfsUHZzkfOc3ww5dEDKwXdTw35BLL9jJM9R+6WPiaEyqiO9B5EPrUwBG6DzOY5pk5sFcuzCUmLUfa8il7kkj6vwmONqml5KFb95tLzKY8e2DG4f7MgiDRVIUhim3xECdR1ydbmOTtOvs/D3Q94/qNX+MAZG0+nc849s43SHsZJ6UZNwdF8zprrTh4dHBOkkqPDBZE7KEo/Zn3Lp9w9Iu7ZTaPpGfLCw3fV9bRXI0KgCRhFNmg5vf1xvvHGVzh/6flOVfDh7gOyecXpbVuSNqOGcn/KYHSO/Zm9z+WjOevDU6yfGvPgjh13Lwo5d+osjcPjT08eoksNZcLCKWcRbCLyY3Qa8gmX7Kx9+DN8+nOv8D/+3f8WgIMHms21AcZfkGf2+1189hSHd2eYZgCJq7SOA6r+EOnZZ2Zcb5IX0Du/Se0qqw+v30LonGwZk/quClcs0B40rkpFo/CR4MtWBRVfNGAkxnXlTB3h+5rtcconNu1B+K07KcteSYDPYmp/t7HVp6oTjOtcB8mcRk2YH0cMx/ZZP9grqaqKc+d3uHL5JQDuPbjL3Xu3GazZe+r1tphPJVl53PEGjFlitI9uyk4h0eLzZ48FyxojBefPn+fQGZtny5I4tgFWmbWE5AaB1wl/aK2RgS12tMabOHJ1EASdcpUMQppaMBzaPbColwgCoiDpVKpuXn8PIUp60XZnMNvohigKcE1pVNFw5cIznBwesTuxybg2Po0p6I1CamfmOBoNAJ9dZ0sg05JRP6VqMsZrdp9YTKGujE2etSOme4r6Mel5mzAoLl+82CllPXr0iKq0SUVRF914BoHfJWlKOdVFE2HagC1QGBRnTp+jaux9ZfmcugwYDmxAPZ3O/0/23ivYs+S+7/v0yeEfb74TdnZ2ZxbAYrEBWC5ILkFQBIOkkkmJFmmbEosuWRRt07TLssuhVI4PLpdl602ssiyrig+iaPBBJESKASIIgFgARNq82DT53pkb//nkc7r90H3OnZFouiySLrk8/bTz3/s//xP6dP/CN5BmAizFD/zQdwEwOT3h5W++xebaBv662UOyAktZHN410s+uYDiK8ByLuKfn3nRyQraMuHhJk9m3trf5/Bd+l8G6S5aZ516FBH2B41idPLNUFh/6wNNaSRHIkiV5tsSyK5ZGWcqxfZpG843aIEWqCiHAMvuV3/PJ50suXHia7/rzTwBwsnwPu/SJ++eouj1ryJe/+NvUjSnciBFZXiGoEKbY4QcRlpAISlJjqun7AVV9FiwnqxW2E5DnZ1LFtm0zHA5pmoaDgxPzPdvwINqkycIzJsdtUFbXmID6jHuhE6kz4Yj7OYuO4dppKW8jf2w6pEVV4DoBnquf3SpZdkqdbaezaZqOF9aR+mv9W+o+TousmwdkuvO8wPEswwdpK/qSrKhRUj8Hz9VJ26OXdpkt9TM+PVnQ7/epyzMSv5QSWfv4/lniv1rNePLDV5nPDIJg0Gcyvcd0espHP6Hn52gU8Na3X2V+rNfvKrWo6imeG3U8pTJrtDBVpeOP0Nsl7mneZ2mKXHf2bmBbNp/4xCe5dk0XzGazGYGnBSUAo6asWCarrqOgJJRNHuqG1AAAIABJREFU/YAxrFbYs/GdlheWdwWkTm2yLLuOWNu58n0Xqc7U3/r9PkWh192NjTUzzxYUpaSpJcIkYZZl5kJnVKvfKT0PDaKm1yNNF8im7BKZqmiM2qQprpjkIfL8LqguyxIsaOozIZNVmhh577b7s0kcx9iWy3xpFISX2sTdD0pqk2CUWYHlnCUIjuOzTDLD0TNFCs8lCLSdQbvmtR2d9l1reYaBfyYM08q8H58cdX/fGg+3z8WyICsLY+VxJjphWRZ1XbO5sdV9bzKZPNDxahUg24TScXQXWaMb9LtcFEXXadTDIi8KELIzKI76Ea4SZxLynoW7zAi2xswW+rO/9P0/Sryl+Ee/9BmUERKzXIVt5dhGbwBRmN8LUUb9ta4ljS20cmWlry8ORghXEYb6ONPlinAEVW2DKfRFcc100tAbxOSGCz4ejJlPFyzMPieE0B3RMOyUCJXVitnUnWmx77tYzv1qjA2VKonCIaXhsOV5Stx3cFzVvVtSSpY35Z+OFLtJrp5XyuCH9Gf/EzBRSv2PQoj/Ahgrpf7zP+o4vY1I/dm/ql3jb79bk+Y1yd4eeaoD9lxC4IfkC0nPyNF6fkNRZlTlmQO2SbS7BbjKHGoahFcxvas/+1/+9w1+5Kci9u+aRV4lnL/wFP/Jz9zjV39VL6RblyyiqEdTeTTCGE1xjGtph2uAurHxnJz5FIQhB65tuGSJQ3Lk4puAsn8uJ97y8MyilNqSjeGYUu7wkQ/rTfxzn/s8btzgOg7VQlelJkcR4dYxuxcNdNCSVIlEuDWjoV7sJong1d8uePQJh8GGhsQcL/bJ5wm75wzZeiDw7BoPi9e/oT87f6FhOHaYG3iKOwCrCpCp6CTkU+Xy/u8peoWH1deb+HN/4ROkVUPPJG6vffsl1i8pTl8NKabGAyUpkI5DGHtY6N8rqhQ7cBmM9Eu2mC2pshTV0Pl/rZYZkamQrK3pTUCKFbYt6EWta3zAcjmnqAqk8abxXR/Hq8mzitIkCJtbI3Z3d/naH2jp2+3tTUprxTx1cY3+fBRBrWyUEPR9A20LLNK84sYtnaBcvXiV2pW8/c57nL+iu00iiFjObuOUFpmBR4YjB8IUE9/iBoLhGLKDkBdf/AQAt26seO/GWyihCwMAdSXxPadTc/KDADuokFZNletrtpqcQWgzWhuTmLl4fLDAloJyYspWWzHkBbZ0wZA0sWOSdMJA9egb0umP/bWf5dbLX+H3v/WSnq/rA4QrEcLHaowcvDrH/vKARmZgFCjdYU3WVPRiTTAtlzmlbeH2AwIjbFAcHxA7ayyWU1ZG7Sl0XYqm7jZkz9GV+rKuiYz6h+2NKas5tmOCbhkja0XUE/yZD+l78Oo7SxYOpKscYQKg/kixXGVsGnUtxILlqcNy4eD5piIdemxtbVGVisVSH39v7w6PX7nEx1+8DMD/8cufZ2Mrwo8aDvf0eQaBh0WFLfqdjHxZlqyvjzvoQgvFKepKe2agoVGOYzOfzzFLELbQZOx2Ia9kg+cGWDbYJuGqmxIhBHHU74jpZVnSqDNy9+l0gu8H/Mxf/1l+5dOfBmB6eoDrCZrCpTawF9dzdAJjOkRrwxGe7TGK+8yMQsn1m7dY2xrh+Q6N2eSKXFGVNbYRV7n4yFW8oOHw8LjzRClKLWTRi9aZLTRc0RLuA54vLRHeRnSdDtu2sYRLkiddRbb1fWrFTkDfJ9nYIHWAYjsK2/KJooi4b+ZZkZGngjayTtICsCibkief1HtIli+5ffs2w96Yixd1oWSZLlmlS2YLfeydnW1UU3Fx6wJH967pe97ULJdn0vq2B5UsqWqBLYwIBDVCSISluqqtH/RoyjNJ7jD0CXybPKsfqLDHvfDMl8Z81uv1KHP9XivbJXRdprOEP/9XfxiA8497XH/7gOPJlLHxEqsWLu+89g6Dof73dLbAcjwWsxlPfuApfQ+KlMnpEUJUmOUFJQXNfUoVsmlAuFiW1QVORVEzHg+I47iDrerERnTwuzTNcRxYW1vrlDmrqjlTE7wPCqUlzU0X1Uhje57XqZo1tWI4HBP3fN6/rruKzzzzQZaLlONjUyhSgkbWJqjUH7WQq1YBUc897Wt0P/Srqc4SgvYz1w9AlJ3HWt1oEYjA7A1JskQpQeD0O2hkTYlju/TjkMyoQsraRuB087zIS+I45sVPPM/Xv6b3nlWaEUdDprMjLKftoAnqKsVz9F7vuj6WrWFt4zVT8CTn+CBjY1ffp9mJQ5aWeL5gtWhFIQRh6LJcJmdqgY5nRH70+z8Y9jQEUqkHuohZWTwQxLuuq5Oo5uw+VVVligHmne3+XnYqjFVVEMVhNzfaRL3X63UCKLZlURY1cTzQgTsQxSFKKVam+CCEjefbWJYkWRkJ/sE2VVWgVHLmKSlC/MDuFKFnS/0sQidgYDof0/mEqq5xrDPxj/tVIgHW1jY6IYfSwORbb6+8mDOIdWK4XC6p1aS7byh9n7K87O5n3B/oLl+juv3Bso3vVKtS7Xmd2myrDCil7KTJ28DeskXXeWvPCc5U/PQ9b2Xiwwdk3bU4hr4vWZZoyXjnzI9TwyTrB3zKhAVFVnXrcFXrjqFlWcznJiZAYHuq65xVDgx7fepE8uILLwKwv5hx7bW3wUkRprEQ+DZVXlBLU9wJe1pm3g6xTAwEFnmj949WHAdlEcQOM5PcBa7Ecwq2Ho25fajnVCUtfHySA1DGrsBzGqII5gv9N00JrqM94FrLJtkY2oZFV/AMQ+0HWJvWVRRFLOYJjmN14imO41A1RmreFKuDwGN6Lfu/TK7+NAQtfhT4RfPfvwj8xT+F33g4Ho6H4+F4OB6Oh+PheDgejofj4fhXavxxOVcK+B2hcQ7/q1Lq7wHbSql75v8fANt/2BeFEH8D+BsAXmwRxDrPOz28ydalq3iyz/EdXZVYK0KWtcawLua6wjYcG9yngVRUTYnn6CpQ3bRl4wZb1XhWjCd0hWTD+RTWPKHnvgdAtBZiOSlHs7v4xu8k8NbxrAIVlJjuPoFbo5YVrunauLGFKyEpXBpp3LQzC0/1YVyQ3TKtyUCQlzmF0cSvg4i941N2Hg9Z5MZtOnSRqoAGLNMEFI3HWjzAdgzBlBI3FFi5z/yaPtbaVcVf/smneeWfHTKpNaRxtNlQGEEFgLhvU1WCMlkjHOqMfrIPWxckroGQ2LWN7VhUUUpiKldbWz1uMuHi1Uv8/N/8eQD+h7/z33Dz+jHf/ckfBOCDT36cu4tvYHk582Xb0g2BijwtaM26+yPFbK64c1dDyEQNlx89R+A63Lqup8rmRkyaLHAs0RmEulafoqiIhrort7e3x9pWQChdFjMDZ7AkQlXYVkm/r5/DyfERtpKMB4bkWyfYAx+Vwsamrm6l6QrfHRD3A+7cuKm/V9usbY3a4h2rJmVnewc/8Jnu6fvbOD5rG30cz+H0QF9PNOzh2Vso11Q0OWV65JAdwTPPaCjIYvYtyjcbyirtJPgF0ItcsrQl3aYs9hukkFRMzPMLeezcNm+/e4OLV/S5R1FEOilxYgPlSV1yJdg+F5HM9HyZThIiNyRLSxLTRf30L/19LpR7fNxUt5twm4P3vsDdYszSdCeq4gbrj+9iWUOGtBA1h9NZQnlXH+fKBy4xawpmywWh4QjVhGSJRaACrHVT1ZytiP2A1Hho2ZaDchSe62C3xWR1jMBBGUhA0JfIUnA6S/i1r+vOQH+th6gExm5Dn3sN29t9JidGhMYLiMKYyfSI//Jv/vcAvPzyy3zlK1/h/IUdTqZ6nm3v9jmZHvHr/0TDbcbjAVIlIH1cX59nltb0oog0S7qKnlIN0+m0w4V3pp1FycgIsIRBxNHpsZbgNbCpMPApiqL7e9lAIbUpo+e0HiE2AmEc7fX1+X5ImuccHOgOURzHpEnOL/zCL4A6807KsgKJ20HNGiSbm9usjEHiyemUzc1N3r5xjR/6oR8C4IkPPcLXvvUqaVK0FiEMhzFHR8d4BlqzWq1YHswpyxzPdD49p0/RlKTZguFAdwyn0xlhGJIkxqPI93Xnojmr0DZ1Q1JmCOtMft5xLM37MNyN0AtZLhMNG/Fbj6CQurSZzWZ4fusvGGLbVQfJCf0IZVWIAlLTOVosEuI4pKoKhFGZcRwHVZTEptIr0xV5UVGOFbbhhi0O7hrel6lM9hzcRrGYW0gDi+j3A8N5utkhJFzbYvv8Jjffv6mfJ1q6XNF0sMcgckEUOK7qeBJRFFFX0BvojnCaLlHkWI7kxtt6H5gc29y9c8TmbkS/p9ez/eND/Eh1JP0gFORlSRj0mM/1Gu/4FmWVE4ZexwPBsqgrydqahiqnq4Tc2HP0jFjNYGAxn8/J8xQ3aEUSFEKq+6BRFlUlWSxW3Xy1bZs41jC+tjvXdgpaEQPtc+aglOw8F5umYrVacDrJ+NSnvsecw4Df+9wXO5l+lKCqpfHNOfPV0XC/hqpqPY/8zosJdNe4NJ2ytrOTpimIUnfrzLWX1RzHdphO9HMv8oooDlmVOc8bs/FXXnmFra2QslC4lu6GHh3vE8UC29Lr+fbWLqv0mHevv8mRgQpHcYASKefOnePk8I6+HhkTh+td9xahqEooakWS6PjG92rqpqIN0bIswbYDyipB0XLKo85vr4XurVYpQRB0nKssSyjKAsd1u8/a9ajthIDhXCG6TuRwOHyAM6Tv75mPWgsRFRbG49JA4iMto53lSwIjEGQL7X/ke363fh4dH+O4dudbVOQlVeFSVinf/wMvmmuZ88rLb2KLAYFZgxANXuR00uVC2LiWhjSmpvvadtIlDUK1XYZWCl2vU8fHh0RRj/X1dVbLaXfuWT5DqQbH1e+D4zioRlI3Z8duDd9twxtaLBZ63TPQStD2Gq7rdt3c+XxOGOpuXSsBblkWa2trzGaz7h11hUsj7+MJKtnJ0bdG142B9mo+ob7H6+ubJElClurr293dNdDeA7a29Pt+eHhPP8Omoqz0fRkMeyiZdAiNKA7Icu3R1wpoiEpRC5CeuSdFzWomCV2PP3hVd2gbShy7oG7sjj8t8bB8Sb3Sz1ilgqYWBIFFYxloclljOT5OWWCbDlu8tsZJMSXs63vQH2ihkdFYsH9bP4d0EeCtV1x+7lHSyap7pnku2Nw88yQ7OZ7TNCCs2MzXFFAIYeG1Rs225mS5nTVDie85D3A1ARxLEg+irjuY5hV/1PjjJlffo5TaF0JsAZ8VQrx9//9USinR7vr/3DCJ2N8DGO+uqcLTC9Zz37/JwWFNU4RUQgcItsxB2Ni2Q1Xeh9WWeXc827ZASKqqoTA8At/1wHYoatVygfn0r70MFyysnvGmeg+ufLAhSwZ4ob5Zy9WEStkot8QZ6yAsX8WMCZidGKjCMCK2+ww2cvIWKliE1M0KPI9oaBIuN0dVdO3bRiaEwsZb9Xjr7VvmZkjiDYmsLYRJMi98uKaw5ngGHlb5Na7tUVYV0pCIb92pOXBusf6UxdQEN8L6MJcfV9y4oSd9mPV5/5UVi70pO7v6e8nS5Wi/YusRHbDkxYSjOw7D9Q0GY93CLesJux/Z4GMf/gC/+ulfB2BxInEFfO13vwjA8596jqYYMYx97hT6WkrhapibU6AMqR7hE0XQM7CWxTTh4O6c8WDEaGjMQIs5YRhhNy6nh6a1bikGY4t7B7f195ZzpJDYVoQ0cLRKVUi3xrLoSJJBEJKWFUHP8C3qAmHbRD3JzZv6WFJGxL2c0+kEpQxEza3Ii4LaCDAcLWdESR8ha2xjqFulFatTl6rM8IMzPHeTLwkMiXm8scGFKxsMn7nMYm44F45DU+esjcYd0bfIU6rG6V7qctWw4wc8/fHvJh/o5/flz97h9g2Hi4+eIzJt61mWc+Gyz9vX9PuxHfgkOdjVGD/Wn0VyimNBGkf4htd25+1D/srP/Dg//GM/AMDP/dR/xkc/VLO2OOablZ4Lu7u77L+2T2+8jtjW53V6NyEIFdNYn/ebd69z4dJjLA5zatfwMEbrpCdTNv0x0nCzaqQOcNuAtqqxXJuirvHbpUcKHMAxcyVLJZGyWQ8U/mhHz89VCraGwbTO9bYVUOR5pyiarSQf/sgavaHHb//27wJw7do1Dg9PdVAWGk+3ZQ6WS1mY97FWeKEgqaEX63etFhGOC55jU9atv5I2ET5TULPNBm91Xli9XkngeSag1N9LkoQwjMnLs3mgaolQredIq2gGWklJX09dF9j3mQ8LYdPv9WhUjYXx+qtryrIiCM428fXxmDLPaYMdx3M5OjmmKBqmc8MfmU6MCIeNMte3uTXi0qPnOT3V/z46OiKMXPq9MTODYbedkqaGQX+Nc+c072s6XZDnOdvb2931rq2t6Y3t8Mjclx5KaEGTNgl77PGLLJZLVgZSKX1JkZd4novjtGIZFgqJZcsuKBoMhgir7iDgtmPjBQENDdeMEuEn/8yLKNXwxc9/lTu3dEBrC8nG1iZ39vU5TRZL6qLklW+9zOVH9LU4rk7gWw+vdFHgxw7j4aATO2kKQUrKYDDq5kKWZcxOJ/T6BrZXlJqkXbtdNaCpFVWp4T8tH7ZpSiQNhQma4l5IvkyIPJt3vvUGAH4guHh+g3Q/Z4GGJruWJIpKWj2UqilxhKIRDSenuoigBEShjxA2cayTR6kE02xOZoR3pNTzuIV/gQ6q4zjuCPjt/HQch9Sofrq2jevYoGzsVjoP1QXt7Zzt9/uURUWanSXerutquJJZA0ejEdPplKtPPMGGkUP8zd/8TVBOJxTRKFBomFXLU2z5V7YRjAFYLhL8+6BYaZp2fnRt8JokOb50CQK7g6iNxxvMZgvW1vX7v7E55r333idfObz+Le0RaNFw+9qU/nDEpcd08JbUCzzfYXGy6M6pETV7t0/pG+i+Y0Wslhm9nsRvvecKiWU5XcBu24o0LfT89/U5KBmgrBV3bpv1x+5RVRkIu0swLZNkKXUGURXC1gG8gY61xY6qKbtg0bZt+lFMWuS0+Z1lC4oy72CXRZGZZ6k6I+yqEh0HS6rCPHWJ650Foe37HffCThClqirqRiEsp1OAfeGFF3jjzde78w5Cj0uP9Xj9tRVNrQs3TWMRRGOqrMEycGWpMuo6PIPRKS1goKRkOtMx3XDch1rDSAPD26nqkjQ9g4dqbmjNnTu3sM3+6Po7yMbFsiWWYzYWe4XrVFSpKWgpiVKa79QK37huRVnWNKrhfgjldDrtBEP6/T6O43TX2w4hxAMKglK2yWH7rAR1XWBZZzwuzY9zzTM+gw3med7Be6fTKYPBiNFoDc9A8EejNdJ0xWg0QJg5tFjOkFbFxpqe09PJAiFc8lR1fqpVmSIsj57h3lJP6G/5LGZTFuY2+QQIp6bMbKLA7H1ZhqBmbUevW0l9xKVHBxxdF0gD019zAuqmwQ4EraJEOp0SWlCv9HNJCSn8hr3bS6ShmjgypVrC9dX7nNvWxU3Lbigri/nUUAwcQa8XkzsFheGd2rYAy5gnt3zfWs/xMm3zCYGeXh6FSaAsF+LYJ69ylNKfRVGfhAef5wPP9o/DuXrgQEL8t8AK+Bng+5RS94QQu8DnlVIf+KO+29tYV1c/+VFAb1THt5bk2SkjzyxaxRK3Ccgq1amMrW/5OF5BlpgFq5QoSlB2y+LEUhJLBNRqycywwv7iT2zwn/7PFlmhk59VJhmtR/y7f3nOyUIfazh02Xt/zvqORJrFvFAJ48UGzz33EQC+fXCLNJ3iuDNagkWz7OH2N3jrvUMujfSL7u+4rBYFTovTjGxCCdVsi2yl7/08nbDzWIwTJjR5K1niI0KJbYj+IrbwbQg8j733TBIoJEEAs9MSIwREo0Y0RdIptDkuJEc19YkN9X1ynZ7DY8/qSfnY05JvvwTXXp/y7Iv6RRheXRH0Nnmi/h5+4x/+KgCXnrrC6zf2Oyx16ISQ2ljDeac+c2F3m+UyZZEsUUIfv5EZrkdHGK6LkkG8wWw26zDmiAarAUvogBwgzwSuK8jz1qXexvUsmqa6T1bWBZERRl6nKDZfFPj3E5Ytm0Z4SMWZ6bQryIs5NAGi1hW9nUs+tqi5fd1gjYVLL14nqabUyrycTUhT23hejmUSAttzCQeKnsHsnxwt+eT3/gibgyeQnu483Ll5zEtf/XXCYNRt6mHfJy8qgljfg3Sy5NzGFiuv4Onv0wv3+18/ZjjqcbRo2FjXycb07tv0RiGT2/r3z21usnd8CxoHYZQMPvAdFkd7FpMbc3KzYfpuydXLT3HrLb0BPhPu8eRzBYvE45980/DchoLNccX43Ee4fV0Tfbd7NnvJKZkhxlaFxm37IiAzSkvC6yHlisdHl9k/MkWDWlJJRdPt2JI0z3F9DxNbIYWHVWWI1tzRtXHqhhc/epHXr+mkJZ2uEE5NVSrWNvRE7wXbzJf7BKFRC8wLHCvmeLLAM4IaH33+Q8i64RvffIXYVMGSVcFwOKK/pv99cHDKcDDEImY21dcbBzvkxQolC4JIz43FYmGq30ZByfdZLBadvC7AsD9glSZU93EbLHSluE2gi6oi9nR113bOjoX5u3Y91pVo2c1p4bh4nkPTnBnxBkFIkmQM+1EX1Oxu7zCbzTtxjtl8jue7pHneGRlHYZ+0SLFF3W3YYeDy2ONP8LWva8U9x8toGkGWll3RQghdJS/Lsqtwaxl59cA1tHyNtpquifMCaIhigzSoc10ZN8W/LKt4/PLj7N/bx7Zbc06J7SgcV7Fa6jVgd2cHNyg6XkZd11S1Q1lquef2WVki0IFLaBKLutAdP1MAaWRBVeQ0heTjL2ixk/2DO5yczHBsXWgYDEOE7dDvDTg+1UnaZJpTyULzqoyqYZ7noKxOXlwHoC6rZNYl0GEQU9eNSYhb3omH7/tkZs13REngDyiytOt0Oo7Hh5/d4Nuv3mOeGqEmIYlChbBahbEhdV3rrow643OMRiPSVUbTWlIM+symC2wzh7MsY3NrjTRNHyDil6UOxNvOVdNoPpXnthLLNU1ZEYYxtanC+4HXcVba5MZ1vM58u50bCEUQBIyGxrg9zRkOh6YbqAtfmtRvI03Hq1YSpSQWZxYASjXY93G79O+FmtNpuBpN03QGwWf30wHLpt+PGQ71cz4+nXJ6MuX7f0B3zrZ3NvjsZz/Lv/nTP86v/KPfAuDwzjGjwYDVatZxylw7oBd7lMZNfjW32L0UM51Ou/sZ+EMcZ06a0BH0PTeiyOuug+FY2p7i3PnznXXHH3z9ZYJIdPMVHDzPoqk0x1B/z6VROU2j7jOKPuNCgV5HVqsFURx2nauyLKmNRH1zH4dUc+H0utgmplEUdetNVdVYlk4GRoYX3YpXeEbRt660SIbr2V1Cu765yXQ6xbIcXdxC8/WuXLnCtZu6IHJ6ekoUuFT3cZmxtehMUWa4RmjDUjarbMnAFMKEspBNpbuh3pnlQFXVKKG6a26VD+83k1bG5Lrf18lcWSXYlkdZ1kSGx5NkJ2xux8yO9fxJiiWe5zEajbpEqWkaqkqrZ7ZrnhB2x6kCjTzI89zI4OtnpdcN0Z2bfl4Brut2IhS6OBLjOFb3WXsd93Mby7I2aoSh+RtAWYRh3CWiSbLEdgRNU/GBD2v0ShT6vPHGq908qEq9VrfJGsBg0yZZ1Qxj/cw3d3xOvJL6nkROTBLoRKzqOb0wwLH18yvzhLoaEpm9tn/eorAX7L8iCC39N+N+TFFXVFWBa4QwXBHihDYYRd/SscjtKWubF5jc04F8UyY0hd5HR+uGD1tBllcdms1xHPxAEMaCutH37mi/MkUD6PfCbi4kad49u6Ko8AIby1bdvatriTTFjF7f2DpZNafvln/ynCshRCyE6Lf/DfwQ8AbwGeCnzZ/9NPBr/7K/8XA8HA/Hw/FwPBwPx8PxcDwcD8fD8f+V8ceBBW4D/9hUpRzgl5RSvyWE+DrwaSHEvwPcAn7i/+5AVZaytWbU9OYNSb5gtB0QmcrV8b0lRVNQ1yClwSCnOaPQwbLO+AfCYHtbyAiVUZPy/c4f63NfOOHfnj9OluvOQOzdRVVrpMkJtvFX2rujeOr5jzJdvMny1MjIhhLlVjz/g7oi9PYvVZzOCvprJVWiM+Cnrz7Kwe2GgYwQQnc/mrzA921WpqppNTWeiNk6P2Tvpu5qVLOIO9csLl52CY1Md7508e2CwvBqlicNF3d75EXD4lBn6kd7cz7ywhab50tWBmu7qmY00kdhWt2BTX/N4eTAJTAeIR6Q5RbvfVV3Bvx6nXKSMvAjXvuCPqedPZeN4YKJ/WVmtq4yXnDX+NR3rvHF39MGl0fTDK9fs26HlIa/tVrAcp5RFi7K1tWQMOgxcvuUrV9OvcCxKsKgoa50F6ztakVRg2cq11UJVS1Qlj6ObUmi3oCqsjg9Np5EUUB/ELNcpigDlxoMtiibAmWqI8L2qfMFrh+gDKQya2o2Ni+RnCa45h7fu3uI43n0xvp6++shy5OcAI/USHK6wibouVSNgzLqa8lyQZ44PPqdj+pjy4zf+dxvsTP4Mj/zH/xHAFy/dhc3UKR5wvaW7g6WZFSKTqVm3Hc4uDelsAu+8I/1ffnYC49jqQmLNybM9jSsa7S7wcGNjF5fc75+/NmEX/6NhIPwCYJeC6FcZ7I4oXEaht7YPJslNw+uE182nd1eyEtvKt68mTIyHaDZYc1BFbBq7jBa15WqZLEAGTM2qliTLAEjqzyKtQn0vDjl6ec/wTuvv4JrOseVrfA9p6vilpT0Yu3H44cGQiUKsBS1qSyvuS4btuLicsVM6M8moctcWkj7jK9ydHzK+mbQ+as0UmHZLps7Gzjo9/H4aMJoNNJVQ2Pq6bqeNjVemkq27WHLmJ2dC8yMv8pyNWVtbUSS5CwWep6NRmusVisiwxlqJYeVUt315a6WL87zsyqY43kMvIBVZsw60ZK2wjnrsOdl69VyVtXUKlLFfTLTGTUOwnbvq1gqzRPIkk6G+M448unyAAAgAElEQVSdPeI45uMf16a3X3rppa6CH7XdrOUU29HKoJ7xtTo9ybhz+w/YvaTXRcuKubu34PErj9Lv6/v52is38LwAqQqGhie0WJ7gul4HR2k7Ba7rPuAJpqWDK1r6TxhqSWLb3IMgsNnc2mIym5AbCEe/H2vp9bIhMhXok9MjhqOg66aVZU5ZSgb9NdJk0d27um746Mee7eDSb73+GpfPnefgzqmZLwWubyNswf7RXndOrpNimuIcHR0RR2vMJjMU+vr6cURWCpqqIZf6M21KWXcdmktGhj4apLiuPu+De1NUbaEage/rOZRnJb14RCX0+pZlqfYDwu2gWHE05Pv+7J/j7sFnqE/02pylDZYV0u8brlayIu5pA9YyN7AkV0ta9/oDTk/1Na+WukN1vwFrmqa602ZsHdouRV3XSNPyDuOIIq+6yrll2cSDIRvjNe7d05YRi/kSz3cfkO6WjX432u6B53mUle50ZLn+vV6vh5Rw584+jmO4GRLK8gyd0Bh/LOcBSfUcZeZZez1N3RiolD7v8XjE+fO7vPPOOw9wh4RtkSRZB+uKQos0FnzxC18BNG+7KFPu3bM4d0Gv1Ye3D5gvE5559iNgL818ybh765iTe/pZjTbXwK65cPkSi7mGZy4nkrKwCDwL2ZrqioRa1p0pK5aD4wiS1ZJX9vUa5No2gTemMs7KSkrKQiCcrLM4aaiJov4DHRloPZX0by2XunOt+Ub6vP0wBMPjUeZ7roFPtsiOKNZdQMsW9I08fJGX3ZrXQk3zvMC2nc5CQimBF4ZIWeKZTl2aF6R5SRha9EzHcraY8/71a/T7utuclyV1auE4NRcu689WaUKa5Iz6m6SZ8aKy67M1HEDpLgR2hdVC95uKKOpTNfV9xu+68+lYLRRbUdQFnhdQN/q+lIUAVVE1c0LjvyVrl+W8JOyZrrHbo9fvM51Ou86O5vR59Pt9jo6OzPGtbt6DhkuWRfXAGu+67pkKonUmsz4YDFgbjc1xNCTa9/3u91rjb8dxundNqawzeAbd/dU2Bw0rY7jeviuz6YLXvqU7hqPxgEfOXWX/rkab2JTUTetBqN8/f2jzoadtMmOjkdMjW83ZuOJjHel7Pr9WMnIc6ryhaP1F+x75aoUXa8RNtlhyclKysfUIl5/UEOCj2R7NskCcCgoDSfe3fZxtl9mpQQupimbqsL9/0iluCi/Qculj/z74soWsFdLEfY4rcdyAqqrJjH1R1AtBah5ont1nLC0FeWrsWfoxUpTYtnNmQo1gOByzWs1JzV7un/mO/6HjXzq5UkpdB575Qz4/BT71/+RYcc/jkScfBeD4xhz/e3wGj8Xc/rrmOzU3DxF9myYvO3yvbv02SBOwC6GhBGVVgFm0bGVjuzl1bWEZed+hE3GuJ3h37yYAOy/A5LRkfir51F/QXlveWknS5HjXX+DwPc19ufpsTJ0X/No/1RhsKWIuXnZIFj2Uo1/8u4dz/q2f+BHeenWKFeuJeOvWS9w43SOOzMtZw/V3Gu7mh+DoSb9cFmxe7nN0JNg1ppfSm2I5Md5KT9SRmzLZm9FYHhcf17/n9n1u7824/FRDb2R8WPKAXs/CwNwJ/CGDSJG+l+Aaw9W0rrF8iWdEDG6+OkOJPo2zwjVk8uR6QLa24sM7HpHB6P/YX/oJXn7jyzx25VkADr76FYRyyVYlVa3/5vTExrHAthTY5kVXDX2/pmj9UZsGoXL6fRc/0IvtfKJIVwmOsKjq1rDPQaoVrmvkRmvFxsYGR8f32D7XGs6VCMsB4VAY+KDvAbKkMYRvT3hsre9Q1Euk0/pcrLFaauhSnuskMwq2qJRO/gDOn1/D9yzm92bYyuAu64pSFdSl5PmnNJzo0qVLXLv3Bleu6ETjOz7xAq++/A7F4YC3vn1dP3fXYm1jE0uG7N8z8LNeSOja5Ab6lSsbO1wy8kZsPKo3/lu330VUfaiLTl57NZsQEWOoIeT7ehFOqnuMI7OQZSvWNgfUj9jMbxpj4aYhiPoUrr4v7zoZwa7Fpc3z3HzHGOJZHs0qA9vjaJGb51VSr05ZmcRCuDaVUkxWCY7BJG9tD9l7+xbVMicwYg6D7Q0ObtxiaDbntO+jKolYSa1qAgz8CCuCk7ne4KpVTrjeY6XmCK2Gy8HtgkjECCtlOjO8Dyvg4F7ezfNzl30q5sCQRaoLG6s05ubNPQZDH7c2kNSmxHIcPGGETfKCZZ2Ql292iYxSJWk+RSpBVZmChOdpwYpDPVeWixVxHOP7Z9ATWVfky1z7p3RkWEGe513Qats2tut0EKr2M200LM+EMuoSzwsQovV80ka0TSU7DL1qGhzHQlqamwQQhCHzRcJXvvo1AP7W3/qv+Ae/+A9YvPUmdUvoEjVKWQh8losWfuIwHPU6aM1qkuM5is3xFidTndQ3KifPE+raRikjNtLXctUtt6ATT4COcxEEPgipYbrGXDlZ6SDNN7DrLM155513NCem5aY0K8IwJF2Wncm17cDkpODceT2nAl8gm4KmyhCNWWAaQV0VvPzNbxKY5GYYhNy5c5tnntOG4cNhn9/9nc+xMR7SayFARUqjwDVEFMuGND/GdV2S1IhHBAWXL13krbfe7sj5UtT0h30aIwCzt7enfX42Rygz787vPsbJ4YmR/DX+eLVgMj2hJb54Dlh2jGW7nWhBmi346pff4/B4jlXqYw1iB9sKOsljKVc6sbMcHOMR2B8MODo6od8f0BvqYHUymzAY9zUXGZhP5liWxWqxPEtkmgZhWwRBhHUfhCqO4y6IaRqpLTHSpEtabNvG9wKK8gxi2Epd3++94zgtUVyf58bGFrdu3SKO406Wuw3gZWt+bOZW05z5Kdm2luOWUp75+NhBdx2gA9o47lPXshOmkVJqqKtn8d5718zxR1CHhL5+xus7IU3j8U9/+R/im0Dx8UfWuH1vighsKgNXOioFP/rX/gpHt++ZY3sMhj6//+Xfw1ie4QUentMjX0n6fX38VTohCHxKs18J5RGGI5J02SWYwrI1/N3A4bzApa4s8nLK5rYOTIukxHEjpJSEkf47IQTLe/vta4btOWRlieM7NIZjUjclZVUSOiFStYFpbQxdTUDt+1RV9UAy7noOCm16fXig19gwDBkOh5zO9L9VXQOSuqk68+rlqsAxsvi5gRO3BYaDA70X1hqjB43dFWA8T0tkJ/m8Ey3peTG5XNGurgroj2PcQHTS/Y5tUcuKIAg7PqyFot/vdyIiZZEjVYPnW0QGOqzkhKbyuHjxKqM1wxd3pxTlihvv6lgm9HosZssu3tRz0WU4HDGbzej3NdRUKUFq/LAA+r0B8XbMfD7/F+B9rW8V6HdkcnqMMFc4HA7xPEcbmneQw8aYgctuX2lHK9zSNFq6vaoLaiOhnuUJjz92laZpOtPpk6M5dTG7z5BcwyLn05zRWK9vJ/cyDvdqNi7rmGT3g0MqNSNHEo9NTODcJQgH5HZGbuJMrx+yfbFmfqQLBrFf8ui5C8zLhuvXtDyDrHMEmhuK33L0Cpp5RmMS6Mk8B+HjuDmNaaZsrjkM10OyRDKdGDEQt0fl0AmNlFVKepwSBSFVYbxoN0MmJxOKvOlsB1ojalNfIlkleJGFkpzZ5DgOtpA4lgTD+7KlC5yZj//z448raPEnMhQO197WFbC6sHB3H+HwJGWV6k3PEg1e01Ded7plWWNbPkHYdjkksoFGNp0Ske0I6trBsnQgC7CYFyxnDd/7/YYsT8X+bYvVQvG139M38uf/u5/k07/z93n19W/zhCGvZpMcd73gcKIXscXpiu998XlmxzP2j74FwDK/zee/9iqPn3uG2je/d/waru1gm05Scttj1JQUtkNllHceecLn3JUer3wj7QK1NJ9QphZPfad+8V/5dknVuMxmWev9y/YTDjtKUdQZrR1XPvfpj61ukQ7igDoVFMyRhoNUOS62qjSmG0CEVEWJVUtCMzEbu8JqamazGZalJ9Df/dt/hwtXPsC1ff1irG+HpJOC0rYRwmDDnYxnnnuE2++DE+jFbm2Y8PxH1vn6u/rf81ObSnlYTcbcBG5CRaAaVC3ABBYCB6kcjBUPvu+zymYorM7MGUeZTd8y/h6QLFf0h25XYaizgpOiYDSOSRJ9D0YXLNwCjk5nhJFRyulJFmmJY8QOrr12C6caEPd9Vkrf4NhzEI3AtWuefErzBIXr8+zWRX7/C18C4KPf+QQHey7X3/s6p6aa3uu7TCcJvZ7TcV+aQiGaFFHrez7eHDE5TCG2mZiFpTgK6PmCOD6PK1p+4RR6knyiX/IvbQ2YnrcJFgskGrN/tF9iJTOSXo+rT+pFsYpPKKyE6bEOYs5VI4KtPrvrl7i3rzeh6qBmtL3GfDHDyvXCafdsrF6PftCpLaAIUP2AJtZV8cNZQuT5+FjMTPXs6PacngW2qYYOt8Ys5ysGXkhqArBGZrhOyNq63pRO7055ZTJnvw5ITTAXihFKafPV1psG5SAbh41tfY6PP1mSZSPe+Ibk49+teZG3b+7juT62I8hWdvc9JWtmM50c2F6OUi79QUxpkmpkwHLm4XgpvUgn1afHp1R1zfPPPwdovkqWZezf3euCZQuJF3ra08pUSJtKk5wDE9A2qM6Vvt3QABM0qo5H0ppEDkdt9bNBSihy2TnJSyVNUePMeLNWkstXHuNXfuVXAPjMZz7Dy998hfH6sAssrNKjqRtWC2gq4xs2FEhKTg5NglnlPHLxMq+//m0qU6SIY58sT6jKM8f7oii6TR8wClkFlmV31XMppUnoFFK1yphKB3KGk+T7PrP5FIsQhX7uw2EPQcjp4THCadW6JLIOOT7Ua8louEYcWhRFiaz0exQHMa69NJwHEzxWJeuDiNN93aXyrXNcurTFvXunJKVOPlzPoixkx41xvAw3KKnKGtkYvsHmNnf39xkN+1213nG0KbRozXN9B8uGxTxFodeNZ575CKfH93Bs0ZncBpZOsqOBnvsyy5CiJssWDIx5dZInfOmffYUnroa885Y+1tZuwGq5YmYS4w9/5FEc2+P1N64T9vTzm8+nXWW85WF+7PnniGOfvb275hnUTE806b5V+ptOp3hBiOd5JOY9bpRkY2ODxhQaDg+PtPJZfUbgL0vt+7O2ttaZaldVhbLPTH5t28b1nAfMhN9559tEUfRAMuU4Ooi/v5ovZf1Ad7DtjlZl2RmECk93SO/3ufriF79IGIbU5m8sy8L3tOjDtklSHMdiMslow6HjuzPCyKO3YyHN5jNbgRfEvP3OG2xe0Bvw7Rtz/re3Pselq/reLVcTlumcrfMCYZIy26soVhV5taA+0e9yf9gnWeVd99V1fKS08Hybwpi7nts9z6XLm9w1Cq3HpwdUdUZduiznBsmBx/bOOkEQdElEXqSMRiMmE70u+4GLH9iGr2l4NZUxOxfizDMpDJDVmeBD29Fsu5vAfV1JRV3quR+vj8izs2fgeT5FkeCFDrlRqXWEjx/YFEWGa2Izx3HI04ymafl5NnlTsbU76lQiy9yhyFIkKaLt8ucK29ECFnoeOCT5kgu7u/TX9HM42j9BVpbmtbZFLtkgkCiDhgo8v+PiWbV+t30rIhMZs/kxN27q57BzfoePvfAC8/mbABzcNJ3vpqE36Jtzyjk9nbK+vs7hoe4uh0HEE0880XWykkR3nxzH+Rf8qtp7C3oddGy743MdnxxSVTVSWV3hqi3e3c/pMhdJYwrTnq8FLx4wq0eyt38bIQRReMbNWyxmnYLo4b0jgsBD2LC3pxNfJxQMw032XtPXktcNg76DKtMuuVu7MmL/bsmVpxXH+hYwnc7xg41OhXfnsUscHxSU1SnSQFCefeZF5tMly+MjMst0EGUDCC49pr0o33lzD2FpwZCybL32YhzHJl2lRKGOb5aLBM8LOgXUqpL0ejFKSnoDfb1B4GA5sHtuk7rQc+H09BSpJK7Zs3sDn7qygRJE60lqsZiVyEacNXjKM271Hzb+xAQt/jgj3hipP/fv/WsAvPW1faQL2WRObVxZxfwQS8xIcp08gK5ObJ9zUWbzzHOHui5RVoUn9EuWNRVWXSNkRe3pJGU6z/jZ/9DnP/6vz+lj+zkvf3XMz/30IZkJNB597DKf/OHLvL//NWyhF+Aqm3DrtVtI0xlYnJyys9knjLawau2OPqlPUckaJB7X3zSVHR8eveyQmi7RfClw6gypJIGR1hXumLVdnyTz8AI9WQdjm71bt3nqu/XG721UXHtjyew0ojBJw6AHDAZkRc7ALLjC71P6CYERtMgLh8P9BvtUdBVSGokSHk5LDjbVcb90qE1r1B33+cEf+CR3Xv4NHFPZ/drtCFnMMfs+TaI3qUbm1KZdu7YeM/Jc3nt3yZMv6Ht19YlNvvn5V0kKo/Ti6O/VVUVmoHUKsC1fK0q16kNWTVkrWg/IIAiwLCjzJYKWXF08sOnCmYN5W8XJ8xxcQRT1OuGNNC+xXQc3DDoFo+3dXSaTiVFbA6H0olfXdVcRtSyLSxcusneyj2egjE8+/RzffOk1Ll3RL/nB/ASRpFzYGjNy9Dm4W0/y1sEriGXNwARAp8uKOqkI24qNqklKi8HukGSqYYGXL18lCBzuvbvPnakOWnY3N8mrlP6oTcSXbOwI9t53qLPWYNDi0e+6wvvfutZ1LKOxT7poWJzoRfLq05ssZMWIISf39ObcpJIqk0i8TpWqKBMC18Xx9LFnUjB0IoIYJnM9F1XtEsbg1ZK5mUPrcYgoi04mvFCKZZZjo+iFpqfuVjS1IA6NAeR0qp+3grCFDlYVdQWOV2Ebxcl0JRGly+NP62Ty0o7H5auP8At/97N4vpnX0sa1PPIiwTGbsxIQRlZnXq3IUZR88FmL998wCkpTQdzzAIdSmtaYEhRpxQvfoU2hpXK5s3eTO3evdVXjJi+wXA3BUS2d1SiytVDJNElwXBdFSWUkv9fW1mlkyunpoiNqF/mSqoaRgai2Kmx1RSdt3QafpUyxWoiRcJGNze6ONii/desOjl3j+jWqVX+qCwLXIV1VPPusJjbv7d3l9HTSCWGIQDLs7zCf3+tgSMukJIwc6kp2ATuipixkt5YAyMbI5puPHMdB1nSJDuguXFu1ba+vrhs8XzAw0vaykvT7fabTeVd0ao0t2/e913eJejZV2dAYJdk8U9i2IXwbZENWpNhWyPqGvr71bcX81GMymXQE5dFoxI1rp50EsRKSSqXI2tWCGMB4sM3xdMmL3/0RXvmGDriqZoljj9g+p6E8+3cmWobcLrsiV1loiJIbnEF50iSnrhtc0Zpe29iOoqkFddHCQRWVKPjodzzFrXfeBcD1Bww3NrhzTydJvu8znZywNR5TmT2zqBVW4yKkizIwa+lJprMZnkkiQjtgkawIQpeB6S5rJT8fsDpRhEbm2LZLXZsigqnmtgUA0EaxlmXRH5wZlCZJRl2ddUOwVEfEb5O53Jix+r7fBZSWpQ2lI1NIkaKiyCtTVW/FXCJsYegAtB1Zsx+1tgeSbu1plV3TPKGuJGGw1r0z0+QQFMS+PqcwVhR5TiVtctOZH/RHNI0iyZZ4pig5HMUI26Iy0NOd7R5vvzNFWRG75w1EPaooMsHhpCKg7TzAcp7y9LOPA3pOH95b4vteB+G6euVDLBc1H3tec+W/8NLvcnJ6RBRb9Aemc5W60ARcvjLm1nXdhZtMJviBh2gh8S6oOsRyKizRdid9sBIs7O5+DnrblPUSWd0frPOAoa2Wv7d0MaluSf0VjTyb57br6fltlzRGzTlPapAxtpt3qrtpWmF7ujugvyhxxMg8RqO+XFZUlY2wGuqmMM9hjKwEi6WhNLgWluOhsOkbkY26TEjnNl4fCnM/RQM2eq3QvyGRjYXlCEQ7p7yLLPMj6tqhMtY9YbjOBz+8w+OPak22W++/zbfffp+qjKilTgZkU7K5fpGiKqlNJ/Cxi1d5+903cdx2fVPkWYHrup0xbV2XWOj1oH2PhqOA1TLHN/vjcqFtA37yp/4Nblx/C4Df+c2vEvdCbCvA8/VaWTS6e9oUZ8+q7V7dX2woy1LHRwY5UlUuluVgh/p6L13eYnYkdWJbmCJJLXBFjmcsK/rjHYI1h+iC170LanaX5bRBVT5Jowvm4UhQTBV2K2sfxNx+75jxZsD4nImnmpBiActFwdysXX6ksMqAXqjX0zt3boGscW1HG0OjLQcsy0JK1cWv4/GQopp3oh5VCatVRhRFCNNt2rywxunkkMuPPMFkoqGQRV6xWiQ0VSseU1LmAsv2sBwj2ONFVI3EdiS1kbGXqqA4av5fNRF+OB6Oh+PheDgejofj4Xg4Ho6H4+H4/934V6Jz1d8aqY2ndGVgfpQy2rax3IjFvuZchRTIbB/ZOCSLtqJXMl53u0prnlXkuSaUex0+36dIa3zbITdV4slswXd8bMgv/qZpv4Qxf/Aln3//p28wNl4ts3nDD/7wd2LZp7z0hdcAeOyDL7D38pfZWdNEECFs9idz7hR3CTxdcRmFKbZfMXQCCtOluXe3ZBxKPKXbzEniUJQKwZn/yHyWM94I8CKP+UpXykbbitEWZInxbrjUkB7FyEBwZ994QywtpANiXRHm+jkOBttkdk6p9PdsG+qTdZbvrajb7o4NddV0HiWWLUE5WJVN1Wr9u4qf++v/Oi998Uukx/pYN6ce47VeVxl459ZNhj2fuC9YmRZpmf+f7L1ZrCfZfd/3OVWn9v96t957unt6mjPDITlDihQ3kRK1y7YcWQKsKE+xYQl2bCOwDQSRHMV5SPLgGEESZAGcBLEUB7EDMpIg0VooihyS4jYznH3rmd7vvvzX2qvOycM5/7rDwPaTH/jQBfRDN27/b/3P+lu+iybxJVXl0UrbVcxK3nf5UVzLpbh//z5KNTiO0+HzwQFtKisrA2TP64NTUlTKfrcBjtuymBSEkXn3dGnEA96LDQcjp7mSh/Y8j1YLijrvujHJIAFHUNS6k9hUrbBdDtu2LwwZu21bXMtFEUpRVxWR9MksEb0fj0mlRgrLq9EpQXiG4fyYz10043ln/BO88u6rBI4g823Vtj1h4AzQFs6kZocEUcJhNiWxhtYXNy/z2mu3EU5JPLA/V7cUpcuTHzZwoqs3tvjON7aZHqY4lmwdB32U59FWEVVm1kvs9vnQhx/hkz9pKqZf/JNnOT4+ZO9+hbRtBp1p5iclaxvrhNaUeb5IybOG2JplVijGW4rB1ojtt41EtSRAjjZwqpKlrUB5riAMJIFdd9PDSQd7Wxn9LSrjfeSuOi9Niy9dUHXH41lkOfMMelIQ2Er0SVniJ5KwNvt40U64dH3EbK+mrCwctHFQygEMbBTAkyFNW3YVRd/3QXt87ic+wbN/9iIAWT5jbcPnYC/Dc031TPoZStfkqfl/1x59hOPjCYfHx/TsOEVej8l8Rq+XdDh342PiUVvvvaatQPs4jiIvT6E0fqANSdsaoAe+s6LY2e9S2U6B6iTAw9Any5Y0tYvG4s4jSZHXOBZA7nmCtm3RrUNrDcnRPo4OoVVIu9f8QNLv99nbNWfuYDQgXaQE0iOzYiBhkBBHEXm+pFWWB6mMYafvrrqcRQd5XHE8hK2Qv5cP47qrqrn9nLomSfpcvXq5gzgdHR2xubnJyckJ5+zZ7Ps+N2/e7PhO47UYzze8m9KSlrUyBHvpOd3vzNKKsloShWbPxH3JxvgcVb1kPre2GXHF/rbCt53zPM9xZYLrgxAGMipFTFFqfDcGu/8nxzV/5+/+PfYO3gLgd3/3D0h6DiA73omUZu6EcNHv8cJpVd15jSld4/sSpQRNuTKKDTg+mfELv/yzLOZmbr76pec4s+V358Zs0oKsGY3XsRQd0qpAFdBUp/C6BoV2FHLVwShrRmsjZtNlBznqD2LW14ccHBx0XUXH1Wglu64/2nQwPF9y5YrpkL755pt4nsdomHRnetuae64zFXZNRyoMw++DRrmu930dErMuVDd3SjVW2j/sfOA8z0M1hhO06krVVUvb6vd0xQorJV5TWQiWEII4MTYuo3UzLoupIE9lx+1NswUbm0MaL2NpTdmrQlDmxuS7nxgIlSNdEA0Bdk2dWfD4jWd45fk3qCxP+XiS0hsIFpOsi0vAeCRdvGhiiXdvvYPQIcLRHbyuqlocp0cwtl2VQUNRehTLlrWR+X6HuxM8xsYrbGa6mI4w9h6OXHlFOiR9A7WfT0/3YxBoytxYBph/bKBVHcdk5Q+mtejmIc9zpJSEYUiRWcho29Lv9ztBrbZt6fVialWztjay8wf7+3tIxwgKgUGpSD/shLdcL0S4KQKfo0Nz3jhaGuEGL+q6qL1+SJMvqFt7F+kaLXKogRXPLXaoUTiNz4XzbvcO87RBWV6kROPXgmYOdWiRHb0+XgyT2YyVR0Tg9fD7mp0D8/0+9aGnGa9t8LWvv4DrmnsmXzacPbtFPHBQtit86+07eL7ohLoCv0eWT3ji/ZfYt/5/y6Vk0DP7ZbX2srmDFvmp7UFT8vQHP4Hnh8jA7NGv/NlXiXsug/4aQtv7L5tSlYK2OfVcE0J01hiruVmJYLgWxZTlDWVR8+kf+4idh4rbbx/gkJBltnNVgdIu0llZ4kjGVz3OPzlmZo23US21KCiWDZtnjAiMdCOOt+91xuatCgj9mqPbLVltxvPK9XUe+5Dk23/+ALcyZ3xZzKgWLX7PjMFonHB0uGctPlbdc4nSDUJ29lj4vkTgddwp6bW0jSTPMx65Zm0X9g2qqSiyroPY1pILlzbY37Owx1zjyoI4ijvvuzu39/H8CK1bhtZ/U+mao3d2/42dqx8IzlWbNWxsrtqgO7iLIbNsSt+6ch/M5px3A5Z1RWAJu7UD0g2obeAtHGW8khzZtboDHBCatJrCKods4dxlxSA2/29BSZ4revGZlXgXjsz50z96lnPnIhBmYcwnX6d/rs9n/6ohRH/1i3u0h+q/rSkAACAASURBVDtcDLbwEzObH7v6Ee5mb7C3uM+0NAenco1BqpY2YYgcXO0yT5cIy51ynRpd99i9M2GZm58L3LOsjeHyDavC5ftM9xbUhWLznPmPeZXhiIBZVnLuvBmrH/2Ji/zeF+7iWpjeYCiZnRxT5jH+wIpVlBWhcHEtTMcTAtVqFmVFaxfmlQtbvPTKt7h985DP/txfBOAXHnucw6MdPv97XwNAi4RWLVEqpljYgLYVqMojikJc35ItlSSvPSZH293PjAYGm7+aFn8FJ+AUi7zCfV++ZtX1qoq7d45wBQThiuRsDhFXmj+wggXStdp9P6TKM6TndupyAMvlEsf1upZxXdc4OJ1wRBTE1uFdoy0/znVcQj9k3pZdu1u0S+pyyZXLJmk5e/YCz97cpsWBzcfM+qmnzOcNmwNYHhreR57XEC8JhAngHR2QtRmJn3TwjDv332RtzeNolrO0gf3axhCnaji05pXLzCGKexyIHUSzUmgqSISk9E+Qlm947vGAwdUzfO0FgxvfmxxQTDTj8xLHwki9ps/m2T7v3ryFszTvEIcuF86GlK1J0hY7OWPncQZews3aQGKTMGS6e5/RcNh5+yzrGm8QcWghjnEQIBUgXQ5sQNu0PgqBtIRz3/HxVYNDS5UZeIFEEvoSz3UprGtw5MfUbcvP/LgRFfm9L73A/ZtTQk/iWuhAqwWuI+E9gVsQ+njqNKn3vQR0wDe++hYI82+OA3u7c6IY1jfMRT85dPBcj3Bszp/JZJeyFAx6IcpCZCpd4WhMkGSTKSE07Yo0iE30G41wNL48Vf1L05zQC0k7/yjPmkWa9WpiUQeoOh8fKSFOQuqyIs/MOyxOKsZrA1aM77JoSOIBi+WUniVrlnVNXc+Je1E3LkXasDbepGcJ2ek0Q1PRCp8oMsGc7zssixzXc9H1yqhME/kerT1LfOl2gXLon3rheL77fbBAoYSFGbl2XIzwwcnJhMPDIzt2gv39A5RSHE8MHLBpGrQQZHYdlAc5g17MeDzGc836LIoKV4IUbpfkulITSDpfpsW8JUvvUhQZFy8aaKlwFZ4siayRa1EuadoFunUR1m+oqgUb5wVFukRapcUtf51oUPP7/8x4Inl+Q1k0CEKkXCWhRvGurlTHMwnDgDCQVO4KOiQQSHzPg44fUxGEgm994zlILBwvWWd0fsjEjlM71bSVw+H+MZtbAzt2ID3DR13ZrknXI08rZGwLlLol6UnieI3tbUOUUEqxs7MDQnUw0sCJyYv0FMKFgd45rma5NGfQmbPrnJycUNfVe4pc6vtU+lY8Edc95VwZn6QUx5FdodSsE6dbm1oLwjC2kCbRfXZVVTjiVK1Ta00UR2T53K6VFjJFq00x0byDR1FpYjngYNsEeHle4zg1RVvYd4LZQnPm0nnGQ7vfj2Yc7J4QBx69nlnH0lMsFwXT1pynou7x1s17NEJTtOYdNi76tDpjHITMj827x5HLeDxm1xYyXMdAKbVqbDHIJOO1ainzFVdTgOsQBiEnJ5YDJSH0Nct8imdFGXByRmOPxlIclC4Zn0nYvpMRWPhyUzYIHeHJZVdcqaoKrXwcG0CfFkM0lU34gsDDcyWqqXGt0bfvmXhrBbt2BXieSySNHyQYY/rJyR6u41HkK2EfjdtqxpvmbsqzEl1HBFFzyhFqApSe06qigx0uFxlhFNJYxZDYA5yATIMMrZCZWxOIinDdZTK3MHlP4Tt+J6jjBZJCt3zkZy6RXDBJ4OHdI956+Z7xerPrpcorHCdkPDLj+51v3ySIX8ILQvrDUw+9K1eu8O7tWxxZrlu/F6LFKQeqqiqqUnByVHDxwhUA3r19j+OjE1wn4smPmLjyu899gyLLcS00cjRc4+6dm9zb3qE/XHH0EpqqZXJcIO0+6o0j5vN9PJswOI7xxTJ80NN/6xIuC8+U0kcmCS+/8JpdPyV16RD4ZUe5aduWUrf07fj6+KQnmr1bx0irQK3CCV7kstiFk2Nzt/fiIaEz5dI5A2O9e09w6dyYG9czXnz+jvmsYINx8gTp/PPE0nKe24KnPvg0d+4b5eOT/SmqkTTtqY+bEkZFNA5DKgs/bUpNEAhKezfkOfSSmDDUTE9Ku4YdyrIhDDWNbbjIHhye7NLfsHDCo4IqDygnVSeuFPg98jwniCR5Yd7Tcf/tnKsfiOTKDRyuXTHktahq0GGGm2bM79kAhSF5e4J0BcoGuZ7ngiMoa8sRksIcSIXuEgu0QvoSxx3RFubiDXzBN56dcrxrLtTB+YbF0sENVKduNeqdoddvkeExly4bbtYwHPK97+7wR39kcO+vv3TAIHDZWtvizruG+PeLv/EPePab3+R3/ug/Z2AxrNXSpVYtia1MKEdxsmjxhcOqDNe0NetbZ3jyyU/z3Ivmgr7/zgFCh5y5bsfIj/CSGec2PHZ3bUIZrCO8OVvjgMImZdsHOzx+/SyLfbMAXv9uSRInxKO2q04Gro8HCNu5Eq6DaARRENHUZmHGy5zpIuJHrq5z9x0j2HH9/Z/l4qPnyOafN+MUh9R1jzzLcTAHjStKZNBQNYsuQAhDnzfeeonIkiiFEOjWkOJXCn9SNmR5hUZ3FdIw9kiXFbOJqWRrrenFMZoS6ZrDNkkcqqrAD/zuEGmahjAMO46A1prhIGQyXRDHJpHRtUsg+2RFhrCkxdDzaFo6YQy1qp6qlpX3cFPXSCEYJD0ae/jkbcUgXKO23cLj3GWrv0YUt1z85F8H4Hv/6g9IJ3ts9a+yFpu1cVS4jIIBi3pFHJekiwUXNh8hbcwhrQqf/d2M9//IFaZ7Jtk4mKY89vg6mxfMxn/5+W2aJsT3NjuFRM93aGvF5mbCSlCoF0ie/84fc7hvxmXzbMBg5OF4Da5jAu/BWsTJ5JDL4VkWe2b/zfcnLNsQb2QNip8asnNnjzfvl4zP2PEsHNb6ISkF0hJFXelwtH+MZw/EEgiShJoGvbqMaQnjU4KyU3uITCGU6Axmo2RI33GZpieIsVXKywOcasGdA/PZV66scW+3gdp7j2hCRatL3EDgvcfQtioaHNtpmc9SHDdjPs0YWUNrzwsYj/pcuuZzZBWoPL/BEaGR6wVmywW9aAOlK9LMXgqAlAKh2i5xchyHumoJglPDQum2aLFygQfQRMmANM25dPkiAMvlnLIsKcv3JmYBjnBp29MAyHF9lDKGtwBPffBJbt++S9PYDrQuGA+3yBYZc5vwDYYJo7jHfDo75cM0mltvv9uJObRC4vsSRwoaG9A6ugHHQTkSb8UlUBmNsCQ5TPKaZRkIl/Y9HJ3SmgY7KxkzpdBtS6NW/DGjlLi3l32fkIEQ4DinwXjTtARBiLJIgMAforTPndvbDMdmjj0poHFoakVlu0JBGCNaTWXPJOk31KUDOiCzwg1xsI7mXnd5DscBQSRZpprZoS1srAsCz+N4UeHY5OqpD17gm8//Hq6tLPeCiGxZ0dQKpU7VbdEOSpkgFaAoMjzldp0y1/HQWpDnpeneYiTJpfQ52D/Gt8WcttZQnqU3MEnvfHrEX/6pv8pkep/vfPtZwFoHZClRLLvkitbBdSXaJneNblgsFniei/WApa01nozpD0ImtiiSpQ1rZ9bIUvP9qrzsyPQPHpzK2A/6I9LlvDPnLYoFWotOYAqUEf4QorMm0G3bFcNWDgOrothpx6SkLGtcV7xHujogDH3c95gU13UBtB1PsaoqI9rQaELLG5Z+yPHxMVIKtJVbNdwgySDZsmtlThi7zCcFleWwpssSgU8cJ+SWU6qUMVuWrUnGj29V7NZv4kiHNRuMu2VNU2uSccjJvjn31zc2mC+m75G2dxACqkrhe5b7Vi0IfUFrVc50FRD0GlpqMmuoLRwfn4Jef52Z5X1FwxoRZEZuHVhMC3Z3a7Rwuf64Kfhmyym335wTSI/aCgR4XohWors/jL1JSr+fsNrbTVWbmEG3JMlKHbRE6La7Mz0v4Nq1G7xz6zYH900xdfveDutbPm0jOjRJmpX0I8lsbu65Xt/D9WuqTFCvBPBUSZz0jOGyvVOUrshmJZ4tgNSO6fLIpiawCZ/WmqyEOCwQlVXvcyqyNOPieYOYIJGsn9vikb98npuvmiBeJCfoyiHPwLNCRnEiaasUbblMo7NGxrvf84ltsWOt/xjPPf8yabokHq7sNRS6cbtCiu9Jer2Y+azm3l1j1O5FMEo2SLM5f/7Nr9jv1/Czf+FnODgy7/STn/sL/NZv/TZ+IKnrVWe+grZBiKaTpC8qE6sIfWpNFIZGbXHFQfR9n7qujWKn3Wu15auvkvqmMsXkqi5wrYCW6zj0EsnixMTQPd9FzH0O9wt6I1NAC+NzhGcKNscOJSZOScJ1ju77lNr8nUHDsZ6jJxVXbph1fnx0wOf/xV22zg+o7PmymA1IxR79kVnny7RACRfHBccqevqeBgR5VhDYIp52BGmWESUmrZGuKdiXtaIszLwkcUuRZwyGPZQt/Eivpio8VGXW5uZ6wsHBgpqKwwOzPl3hInBoaugPzdp3pGZmBYv+dc8PRHJVVw0vvWoW3Eg2NJ5HNtXUCxPYqNZj6fUYOylWCAQpJXmancouVi2eJ2nd1rSLALRAI6mr5lRSORbMZ0Neft0E7H/x/BmKqeT+7QdcfcZ0SNLlAj8fEMWXmZZ3APjWH9xma6y5+V2rwpN7pFnB/eyEG2cMKfz3vvHf8+U/fZu18XW0bwimo/hxqpMT8sxUBh0nIAkFjuiR2cBpMIh55eVXKB6VfOxjpoLxxd9/lp37iku3LXxqXvLDP7/JPE85eGBVBq+OOD726Hs1mc3e//ALOSOO+MTHzUG6N5jy5vMNF6/6nQwqbYsT+ZSrPrqjEI6gLSoiexF/6OJVNp55P9/+wucp+6ay8+U//EM+8xOf44c+ZjpZ0+qYV7/5VcKej7LJ3Xh9SF6nLCY5SbSSWU/Z2FqjWJqFqLUycKZWMbKXUFnmKGWIzUlvBZ9wGG/ITi66rlt6fY8iF3hWjrqqGqqqQXpOF5Q5joPA6S7ruq5BK4LAp7KVs6woGa+vsTXeYDazioVKEwYBhQ34mqZGYRSohA3u8iLjRz/9Kc5urfMHX/qK+T5BhGoLHuzbQG4BQ18zGF7BiWy1x5dsbp3laJKSWs8czwuoygm6MQlKUU45u9ljLT5PNjcJ+8/8zIf55rO3uPDImLObZhyW31nQpB5+cwkAqeaoVjLuj5nMDEkzDBJKR+EJrwtyb39vwWh8js2+9UQ7nJsuiG7wrXUAznlUM2RZH7N+xbzXaCsg8B3u3DPBVnHHIRgERL6gSa0Kl9uSFw5ansJ50JpWeyirqlajOZie4IcByib2/XCI0zTMF2YOAs8jDlzasiHqmwnM6wWHaYsctcQ2uMrdCr/pU2lbkRIJzKFMUhwLWTtz7ixF0TBfLrsKetPOqSqNa+EpAgelc4ZrnlUIAtVmRJGmLOHW22bMw8ihqReo1grMyBAvaEAobtwwqpFNXXHz3be/D5KqlCIITo9Zz/PQykAAV2phrW47L6qlrbovsgVx6CPtWBZ5RZ5nBH7UXYR5bnxTylbx9//23wNg+8EhL3znTUZjSyJ2ao4Pd4hDlw9eNwWs8do56ha+8tWv8eQHrwBGjWw2X3YiMNM8RRQNcuAh7O/TbYtoGzwZ4mL2kY5CBKUpWWMUWkfrEY4IODk2+73RLaoV+K6kWhXDPI/hsN/9vr3DAzxfAqqDKjVNQxzHNI1iPrfVQsdBCAd35ctUFxRFYcfVkpY3zrKzvQcoXCsoU1UNDgNUayEqjRnzNC07AQvfXRIGCVlq9nHP7ZEul6yflQwGZv2ki5ayKVEqIbYZyf2du0T9lMHYdkfFGqpeUsuCND0l4mfpAlcqpJ33ulFoJbuilysDXDd4j4+ZEWIoK4Xv+5wbmUCxoeRwf4/3P30NgJ/+uZ9ie3vCuRvneZ9j3FFuf/t1kn7MdDnHtXL0VZUjpdftTy2UUcDSdYcWcBwHzwuYTE5lz+OeZjabUOUr8QrZdR3fizZIUyOhvkKOmKTJ7X5fURrxoaIoOo83kzQbVceV+AQo6rrt3snzgi4pW/nMSSnJsowwCMitH0Oa1jzxxOUOwbC/v2vEMVRDZQVJaGEw3GBtbcDx0QP7+YKmcjvo2WgTkoHmZH+Oa9d1GLZURUleNF33o6grgsBD2W5Pf9ynqBVV63CSmbV/aTRGVQm7hws2N8zZ0bQ5eZ51iWIQBGRpgee7XWFRWOjSxqaFtvtwPFkSRD7D9ZU4ToMioFaKzbNmjneOpoxD0XXhh6MtA6MPWl570XQ6q6LExadxFUKvIKlQlyGuPIWQmUKL7lQGZSBoqtz4ldlkyveNKt3Kp/Dao1eZToxq2/lLBj7V1jWLxQzPF53vW3/okOdlJ4wT9hoCD44WGmU7Co5sSJct0u11fqa61cjwVKI+LVJ0myE98G0SnxcOo8EZCnLm2qwNlbckwZDtXft31ZC9Oeflb7zBZ37FxFwbW0tCv6YFMtsZq9IS36sQjollNs+uI92aYhGwfceKHbW3cb2KXi+hXHmDCJDCQYiVT5LxJ2u07ooPWZkymR2RxAmtvcd+9FM/x6//+q/z7Nf/3KyxfMaD7X3Wx2u0FkJZlTXCMcWNVln4eeugG9Wt15W9xyqZAnOe+r7pkq6UqsEUp1fFP0eEIBqEaCnsHDu6RtQugRWYcH2F0CWb6xHLwvq5Tjx2Fg7DJMSx5+JMT5kfFVy4ZuLH3pokkDGakFsvm/g7GsPFS4LxaJOlZ6Gl8xmDZMC16yb29ZKS6VFN28iuuZJlCld4REFMWZl4yg8EfuB3xY/zl0bMZhPqSnH+goEc3nlnBzRMSd9jodLg6JDGegQGIuDiJcnt2/eIQquEqxRu49AoTdOuikzfL4P//38eClo8fB4+D5+Hz8Pn4fPwefg8fB4+D5+Hz7+D5weic+X4OZuP3AFA70tUvYVbxeTWRbTfGyDbQ3S77KpgTaupa8OxApC+oG0KhPBwrLCAdo1vidaq49Uo7VE2C/7lPzdZ6y/9hEbVmv7GOeKeyczzKufO9gM2swG9i6Y78MkfO8syX7KwVdwz8RYiuIvXVvhHJpv+9nPvcvZGarDVlvw/efAOy23JuSvmuy7nJX1/zIIaV1uBgspha91j+/73mC8NDPHio+s8uHXC9l3zM31q+qOUduyxec2SZW/X1FVOupD0rfnwxfwMr37tJT79U6ZK/Zm/MiKd7zA7XBJ00tamgrgSF3CFQLkuBA5LK43+3MEdbtwaUmhBuTBdlPl8yv/yxjfZ3DDQpWq+Q28Y01Ru1w7/1Gd/lD/70jdwRUVhK52OCMkXy66yVFUVwoHI9zrCdxBYbLmqO9JrUWa4yuv+7vsCx1EkPYfUwiAGgx5RZNzLVzK2Bkp06neilEbj4ziaylY5hv2YxeSYKkpo61Xb3BB1VQdVckC4KKFxLNTM9yVvv/02h/sDBufMHB++mzHquSjbRtfLiv1iSVGFNK5Zi7Nqn7JuuPbkI9x6x5LchWDz0Zj7L5iKYuC6OMEYL9LIiVmvc3fGuacC9u+mXHvGcLpuZIq8zDi2xrs/+tMf463X95lPCtbGpkKjOMSRQybVMTIw3UFPFuweZkR9A3/ZvOBzvHOIN/QIrFVBU2VsriVsji7z6ouG4NkfxVx//4jK7quTe1PQgsBRrPBErVsROQ3CibCNMlRREXtBB+kqytzA5rSDsJwy7WXoVjDsWYGZWtAISaNjXMf822w2Z+PqI4i9d+hboY8wKrl23efbqeEtbExu8R/88i9ya77kj37/i2Y8z8Xk6Yyq1NQWcnD+bMzFC33euWnGvNcLqSpF3dQkiZUAbgTH+wVRXHDtMbP28hwWM0Vt+R11I/B8wWKR8/SHPgbAh5/5AP/ov/hNtFakqRUW6UVoWlorJey4gqY1VccVET8MQw4PFwSBS2WJvjIwHZ8VOT8MA6aTBU1bsxKB6MxYlc//8Vv/EoCdB3fYOhcRWg7b3i5UyuHGjcd48pypJE+XOev9AZ/9xMe5vWM6nfMsR/Yiji0X7nIyZC1Z4yA7ZFk13e87e3GD2zvHnBlZwmg7Je65pJbbUJUt/TChyDS6tSa3usRbCWfYTkfTVAgRd+aujqOREsqy7bhZw1GP+WxhjGitOIbSpmKplT33PWMFcfmRa5wcG9iKdAaE4YKinBF6K/l5QV0oQluhbRoYrdWMNwsK64HmBjluac4Ps16XOL5g937NaGSqr65/wNaZxzizlXTvfnfvOaJowKBnkA+vvrDNIAnwIknEyrR8k8GjV3nz7ZcoSmsC74W0zamkvqalaQqkdCjtOWwEUQKWWc6R9cx7/KlLHM1SZpVZP6+8c5eDvQcEcth5UTWOwpEuP/Thj/LyS6+asfdcKyJkTXelpFINaJd+38zV+x6/wgvPv4RqIgIrhS6cin6cMG/Me4NA4Ngu1QrC55OlOUnikuennfmVMMJq/Uh5yok2P+OZCntVsWJTuY6DE5zWfVXTghAkg7jzagMjwtI0VdeVHg2HLOYFR8cHq/9pzZUFycoo1nFJ+i233n2XX/ylXwDg8PCQr37lz4nsuTU5lBweTvFF0HUCsuWSOBoYSNXK0rFyyfIGJze/P69ntPg4NQTBKbe3quYkwsUPzM/N51N065zyzoqaOO4jhCZfccOcgFJVVCtoVuCiQ1gUDY9dNx1L10958+UjymYOtmHiKJDtRif80TQuTW041drysHxPcPZ8j7WNhNdfMvu/mAniOEDronsnP5CUTQ3tiisp8f3QdkMsPFsoyrpkzXbl7t+/yzPPfJReGXPr9k3zTjS4jo8AMoue0Y42XQAL/R47A4o0pCpTlFj5eEF/1EOgyKxghic8isbFt1D+zbHLZFqRplBaZQOpfBLfY3tngkU5MxwNKCtJs+LJ6BIvdfGaHu9+xXRR5JUtFtkhy6xgbc10l5750NMEPR+GFvrdVBzszjjY3Ue39mzRS0KpEMpFWIio4zVI6eA4Kz/A3NhTUJOmZn88cvUxHn+8x/deeIfK3odrm2v8i//nd/nn/+f/bcZAQhyHtHpJFFqoYtTn+Hhi6Q9WIEQqijLH5dS+5L1IHsCKu1TWZ24VGzkkid9BVD0Zdf5YwopMaQWO8kmtuJoQA2JvyO7+Np/73NNm/Rzf5eZBQDUvV968RDEM3IB824zT4f0c3y+o25rFgZm/j37wR/C8lL2DN2nsftCq5t7bGccT893SuU9Vaaqi6bypwEXR0OgF2nYHqwZ6vYQLV8xen89PUErTNJrd3V37ThFh6FPrOa2yBsCVg+sWlIX5+/H+hEffd4VxMupQBcJReL5LnVcU2cyOHf/W5wciufJ8vzswdnXCvZ2Wg/l9Qgv9cIOAZZuwXvoIuzmaosYVQQcBjEKHLDdqgauLWPsK168R+RDP4t0qVdCPBV/+A7NZX39XUumC/KSknZvF2080bTyiTjXTA3OhRGe3kJwhseZ5x5MdPvq5D/Hhjz5GfmAU037/T77LdE8TCIfV9XH2CcGduaa0CyWSAUfZBC/odSIbVdEQRi1Jz2exMBP39Aee4NM//gnefvA9AN746jFP37/McQqbG1aR5qUjRvFl4nHA/gODbw7DY5Ik5NkvGpjVsmw5E8e4Ww3HBxa2Fjk42kOs5MiampaKsmpZs+TutQ9cpZcFbF25hNuzJPCb22z2NIfaLNR1PSXQLu3oEh//sZ8z36VuKNMaKVpcacZT+hJXpCzmJsDc2FgnjnvsHex3eGBjsOdSlG0HjXBdhzyFIFzhln2WaYXnK1zLuVoul/i+b4OQUzNTx3E6eIirFIoGT7q4zkqtCHwX8sWcyKo2hp4kDAfM5tZg1nHJ6wo/DDqIYasMWV86LcVKnYwS2YvY7Jng9Xh3G60VY29MIMxnV9OabHnEvTsOvhUDuPjIGRqv5emfNcnq6y/fZlFPOLue85T1Kbq3PWe2vI8+3oR37AX2bkEykGgMRPVGeJWabQ6mt4g88zOO1szSGunBCqXX1C2onFCadvvhTsHaVo/jVHWEh6TXcvudXZZHkpF1X+8NKr779Tv8yk/9hwBc+xsh/+3/8D+ijivGa+Yy2dlPcVqNXJNoCxFdLEo8XAIbUEvHBccEZSuuT+EIJJLYYtxnkykzVaBVi2cPtqVo+aFkgHrfGiMbmGVTxezwhKElwVdrl3nz3Zu8cvOEvv2sydG79IYeRdWS5uacmE4bmqomCFcwwYymAUfENI312fBG9ENJWx8x6K/OBEA15NKq8lV9hAoo5j6/8zv/KwC//du6M25dBWVt05gCglhdOcomGKL7GWiIQp/pfMG5M+ZSL+uafNl0kNXhcEDSi1jMUzyb0Pq+bxXRNEVpMPp+kJKmNWVhxtzxNGEoyMopz901Y/erf/PXqJqSy1ce4df/4W+a8dzdI0J2RtUf+8gnODfa4LjN+PCnTPL4T/6rf0yDQ+PkHBzbJNCLSaIYz0KMqzonW6aUZctgZIpjcRKys32IUhrPXUEhochTMqtk6brGly4IIhYLK9SyXOJKh7qp8H0bZLoeZVki9GoMPJpWsLe3B1Z04s233iIMYTCMu6BBuj6tzDuomSDGDwRNq3GtqmlZL8iziHiwMuLscef2MVfft8nWGTOeL3/X47Fr19k7eoP7r5kzd/O84INPfZof/rCBS//Pi/+GWzdv4Su/SwLv3r1Nr58QhxGLpYWo6QYhZFfMUapGSg/HoYNLlmVJXdeEkUc2NWN+sDtlvHWeu7cNrE3cbRhEPfpDyWho9vbWj32aapaxc2+PM2OjbleUKZPFvOPVKQWOqimqijY1a+ONN940CqmNQ20hnLpQlGLeQYeaRplxVNBPVsJCLXEUYsyurSqdEN/nO+VKSRAERtHXngnGc8eoD66CQCkNVLK10Yt0XeIkQbU1No4yCZXlfa0+v9Al9azqILlNaXCAcAAAIABJREFUUxFEIW2ju0DIsQINcc/lwbYR43lwfxdHthQ2uIrCAf2eQmndGZSmy4JWZVR5iygsjDSMcUSBtoWpq09LSqacbCt0bc/qnZQkTjizHrO0NAdXSFw/pLGZRdkUOAIaVXXwc9d1CSOfkyMrvLP06I9H7B4ecnJkYU+XeygOiROXycTsmSiKmE+z7q7tD3wEDq4Cx7FnV+DjBorN8wkDs4Ro8oS2zpDOCm6vadRKTCSxa7EmXeYEgU/Trvz/WgIv6OBaAsWL3/sOZV10ME8/EEjPKPq2NuhxcJCBYKVdEQZ9prMZShfd3S79gKbWZGmGPTaIewqvcvCtKIv2FV4Z0OTgWE6pH3nc2X1AHEdgIemHu1PCcAi2wCtkjETikLN306h8DniKK1evc+bsRndOvfbaG5RKcuaG+X6zk2Nuv+iQ9DX5e7i2SbxB4xwx2ljtW5f5JKNdQRwdxY3HL+J5HvfumeR/PNrg6o0b/I2/9Xf5B3//NwC4fG2LP/7inzJfmJjSdyWe9NncOEeamvV5dLxPU5vi3MamuS+m07nhzHFqxL1cGph3Bzt2XcIw7Ey5AdpGGFi1PV/bpiYIItq2NIIwgOv0cKVPZAvMTXVCuN7jv/z1f8zuA0N/+cPP71JWglaXrJ81Z9D0eImuCpRrYlqUT7X0kGHN+oY5k96+eYtBqAiCDXKrsKl0QbosmFqRtCiWZGmFL91OF2Ew6COclqpIufaYKWqVuWS6OOzO88FoDdXkCBRttYKtS/K8pFWawdDMVZ7VuEIT+OZ8+/CnPsqzX/sKySDEsWvRxAktnhOhrSiSQAIZ/6bnByK50k3EvuUWTaeKYvseXgOVu6rwwdjrQ2XMNwFDAq5DGusCXjcl0gNVB7TVCmOqCYUPoqJWq6y4IfQ99nbMYP9P//SA80+cpUnfYfdNUwUv/IB+dIYkcVHeqnpeU+X7nElMAB0PYffBIf/1n9zj2nWr3pO53H92xtqjJZc+Zrkhkw0uX6iJLEdoZ/+A/iBC1UXHA0t6CV4UcbBfdsonz/7xi3z0s0dcfdxg6Ec/fp5nf+c2TrbOaNO8+2jo4Ls+g/4ZXvqmJWUGR/jA/K7Z+Eo67IgUv98SWrxzqyoaWgK56uZplGqJXMnVDZNcbSRDXnzlPq0TEuRmA7u9iPRkm8OlFRXxPR65tsYbkynPf+85AObLDKUUSZjQt4H3Ij2hXZgACmAxmzGZzKiqBqv8ipSSNF1ak0nzb9nSOGqXlrSZpQVr6yPSLEM4pvqjmsAGUM57SO8NjtPSWLPVJIkZbvaoiprlibkUiqxE1RCFIa5jNuzJ9IA4HHXKeXXTEIShqfqtEuGqZtQfc+OJ67z7rMFFB32XZaFxSiOH68mAS1vriErhi5V8uoeUCcuTgroyc1O+rnn/Mwn+I+Zy/uTFH+P4jbtMSpfpifnOPWfOBx69wZ7nc/vbZozf/4mrTNN7HNjE/7tfP0Arj/MbWzx411zg48GAfq8ichyqxgToznATyYhsaQ67p268jzuHN/Gq9PQQWY5ompZnfniLj3/yGQC+8IUvcf7cZX753zfiHK8+d4+dd/47PvbD64xttX5SCAZhSLbcxjYQCaSH0KILcHu9BOGajvJKyEAtC4TUzJYmUWypiaKQSmlaG3wkccBL995iKQsii33/SHKeUN7l7n3zfX1cJt5tor5Lv2/W56ULPWbznMlEE9vuXdU2tMrpBGAGgwEXLgt2t+cksdkfURjiuhOmJy5UlhNYLVksa8YbNknLKja3xjx+/UM8/8K3zP9LXOIo4OSk6GTUhfDQ2qFtVhLLDYFnldDsmprPFvj+kCCgUzqL4h6l0J2aXlmWCCUYjvpUlg+QZUsje95qsHyxQCYopSirlWy2RKGYpTs8sPv2f/vtf8anP/XjfOf5N4lsYPh3/tZ/xJ99+U946y0TaHzjzdf4mR/5cX7kp/4Sji1MffSHf5iXX32R9WFIbYM+7QjSdEESmCC0KQVlNUW6EcuZWcNNJXjqA08ynU65d8fskdGoT5IkHB2beW8aRZ6XFolgFbCSPlVdWsGa0y6flKdKcuAQhQPyNKPft8UVR5GXOW6uCaxAgNYOriy6M0FYHlgoz5PWJtgZyCFz3aIac+kuq4aLV8Zcu/o+NGavbZw/ZPf4JmHgdrLVJ/suX/vym9x528xLGPQR+GgFsS34KE/R70ccHR6ysb6SHM7xPI/UylpL4ZKXGUEQIThNJhUtdVng2c+an6TEXsP5dbNe07Jhkfm44xSRWEXYoubauctsv3Wfo3sGeTBY8xnEEfNsZX/hgwrwfafr/pS5Q5x4fPozH+CVV0zHazapEI6mtRwhpSAKQsoyJ8tt8TGKULpBKX0q/f4e83WAVplOkpSy4xvZycEVUNjuRGeoarkpbVUb8/iq/r7OFcLIra8QCq0ucGVgu7vGhNxwshty+55toxnKs1y5Du/eMjzvpo4ZbwQUVh3VcQRt4XEwzYgiGxgSsFjmeK5EW55SXS1wHIlIrBn48AzzsmVfVAyGZq6euLFJPnE5mNyiaVbFFIOoWWWKQRCQZYZHsyoiKNEQRg7S3nOOrqingsAV1FYt9J23FgjZMp3kXbdVNTVozeaWLT54EcvlnMAJuHh5y66Xiixt+NM/eMDaeNPOaU7baFp1KpvfNIokSbrktSxaPDckzxrGayuJcxeBSxRYUQGVIoRCel53dskA0mUOQhFbsQHHdWlFSTIw33fvYAdfrrF1ASZHK7XHIWWVI9yG2ha15xO4eL1htm/X1MIlkCFtr0Rp20lqXca9hDITyNB2vEXMIl8irNWFblw8vwSvhdaM1d7xbaTuc3J4ggX5GLXOQFPvmfNtMlvwxIcHXHlkyJd+3yjsvf8DV3jtpV3qyuHK0wZ5tLa5gUvE088YPv5jNy7zrW8+x6sv3+IjH7kAQNiHr33tXV54YY/NLdNc+PJXniUrD0hiazG0zPFwuXd3l+HQxFO9OMH1HJRq2Nuz5sGNi4NHu+LQqcZwq+q6E3wpiqLjXKWpFadI1lF1BTYpS5KEJOmzvX3vPWiiBSJIcJZh99lZ4eMFF/mn//s/MmtjluJ5mmEyZm6l5usyIXAiIq+xa0Phepoq94isCl+ZHrHYF8waD9c2DVxcnPfcfU1Rsz4YcTSZ0rMddikdZrMFTakQrAysFW0t2d0x+6rfN6rAjVXnBUjnCj+WnN28zMZZ81lZWnDr1h2uXLINnv13QCnObI7Zs4qs47UeeVGhaof5wtxrkf9vT58ecq4ePg+fh8/D5+Hz8Hn4PHwePg+fh8/D59/B8wPRuXKlSzCwcJiDJfHGJlEv4+jYVKXXxxsUtKiyR1KbqkaqfbRfoGublZceUSCN8pGtipM3FI5C61PpSUdI6tbF9U3153d+a8JP//UQEUuayLxD35MokZEVLsIk68TumNH5dYrUZMlr/YCj/Qesa0GxZ6sjk5B4WHF4r+kwyQ9uLvi1v/lzLBemK3bn3iHJWFNMNW1loANeT/Lxz17l//3tN+hbuXLlNTSTBcvcZM7N2ojNxOPea/e5/bLlG/UjdP8e4f0Jo56VRp0rynaBZz0JpBBIL6AuG7SwEq46JnQVlbAyz0rhVS6VlBzNDVck/cPvkC1L9PVHOHzV4LL/2m/8Gm+/+Q53f/cLZnjX17ld+zTjEa/f+w4Ajz56gbODMctJw3Kluuf18LYEwdT+PtES4pCxXHmmslhW+J7A1eBYHLEQLucvnOt8fXZ2HuBIgXQCXFs1FoFGtab6tzKKxFVoIbB0LpoSTtKaqA27ql+ja5wAtKaDNESBREiBdK26VraEbILn9ShtN/T65phf/aVneHU/5PG1R8zw+VNefXuPrTOmujUvMu7fL7l+sYezMhyKQ4qTlM/9pccZXfkAACU19+7UnHzDjMtk9zsIV+KIEdpiyqfekqyVbK0/ighN5fH17024fPEaiZVrLw7e4dKVEZNpghta2dN+yZMfErz7fIS2g5ylc2SsiAZmbd7dnhDEHpUXUVsT6kVxjCsljefyZ183lbn182sc7e/x137zPwbg6PbrrK057E3mvLVtYVZtH9/dQIQF64+YCtu7t+4Q94P38Cc0+TRnOBwT+bZK3Q85Opp22HEvDJgcLtk8t87GmulS3blzh4UzYFx5KLuRX1QLnh72CK0xZl9ArTxG0SbDgTk3Ej/h3qHC8Vo8W80OfJ+2PF6hQxAy5uK197Gz+0027bzv5B5ek+C2c64/YbpuJ/sF2bbi3IaZOzF/wNbGE/yn/9k/5Nd+9ZcAOD6eMJ3kDHpnKKsVP8V0Z11hYcFRiC89EIo8X0ETHdJJznA8MJVnoFhUBL2EeFW9qxRNKmkaD99yEJPxABxB25bMF2a9fOZTP8RituCV114BwI1cmiqkaVPCwJyV9999ka/SsHXxfXzgQ4ab6Q8lR/tLIoPkoyymvHx3n4Pf/Qof/4lPmnW91uPGY0+w/843OLpg918G0pXEtrNUB3Ok1ycvWsYb1jz6qOLg8B6T45bAdhCLssT1HB551Jy5O/cXZGlLGBmPM4AzZzc4nuxQnTidLH/jLAlln4nlXLpuzGIxQ4iaqYUTbq2fxSEm9IYIdyWV6+DhoGprzllqFhPoDxVnzpvuqyiX0B7SlBZWpmuKRcxLz93B653Yj/FR5SHpcoPArkWlc2bH9zneNeX0oJcTxAEuAXlxygN13TUjJ275d4vlkkF/i9b6xwkh8GWIalTnMeN6Pk3Z4roBKwzVIkuRs3sMnFUnwuXchZjDox3C2lZ2ey0vv/ImzrDH+ScMxLiscsZnxzA147TzzjHjXkja+rS2cyWqkib3CXyHH/q44Xh+69kXyNIAZTsvruOiRWPgOhbiWOQVYRjSqrLrooDhfqzMwFWZU5SlgXJWK9lscye3bdspeDZ5RhzHeLYjVRYZTmv4WSu0h5Q+i8WC4XCtg/MZE/qW0HLF+sMB82VKUTUIS5RyXM3B4oAnf+gDpJnpXB3vNISOQtqzWrc1s8WMUTLuJOodR4KuaFVlvBDNWyB9Sd/arHz7z+6hvJYPfrzHvbfNunvrrTmSHnUd4FouUV56SF0iXfP3ReMSxiMS36O1d/S8OCKf+3iR+V0bWzF50eK0sLdrur/JIOLsuR7HRzM8K+e/dWbMg7szcisdXrWmu6UzyZG1Z0kLxealmPGG4mjPvGcSFfiyT92uLBw82nZJo0pyC38fryW0TcaltStUdkPu7eyS+D51a41/3Za6lmipWWTmHc6vRfi9hJNJij1icdwa6oDxwCCBNvohr7xxi6apWR8YyNiyWBCdkezdl7ie7ZRn0DR9vJ49O5dDsmpKMHA6r63j3YxrV66zfX+PmYUdJ4OEjf6I+ezYrjuFUzu0OqHft3sNgVZTHmwX6BWkxtGgXHzP3ClPfeQiV28M+fOvv8wnPvNBAN735BaP3niMK9cuIy2cd35i/Afvbpu470+/9ByvvfZt2hZiG+MF3oBlOqFqBY4dGCFTHjl/GWnX2NmzZynqJQExVWnmr640ng7JshRhPR0dx0GJBt855ZhunBkBLnMLGZWeYLnM8HwILX0AlSKdGmz3p6oakoGi1w87mXfVuvTjAam1v1B1QJUe8Z/87V/pbCXW18YM+hssFjPyhZkrT5ZUbUVxZH1DwwTtuAzjDVrPfPa5jYjpSc38oMVbLFeLg7bpE/asRY4e4fVb4qVHZff6Is0YJCMuPL5BZffRwYMpG2sDjmznrFxodCtIegF1Y86SRy6ew3Hm3Lm3R9A3d99jTz3NnZsP2Lby936oWN/qcTjZobDw+qhUxInP3nbKI9Y4fTY/IjfL6V/7/EAkV+Dyva+Yg2779oTrH3yMu9vHjKyXis5cRpfXqOsFJxPLLUoC0mWFshF000AwSMjStMP6tkojXeN71cnbCpemVZ23yHJR8/bLLqP1s7hW6tIJHHTpI/wAbScljhzKBTSFNTbduECchPh9zY3ABAhfffVbVNLnMz92lT//V4bM+bm/cpXHf/Ic/9c/MfjuoNXoqUCICN83i+LkqOT+zjEX3tfn7qtmYYS+5vi4x9Dii+4evMT6SNPrDXjic6Z9+fJ3HpAEEUkAR/vmcFOVkWdewUqUNmaLOAJn5QOllsjWNQarGInzIIoomgWqMkHEmYFDPL7C2pPX+OS/9/MAXH/iGZ7741ewCpZ47YSjCUx0ygefeRyAi5fO8q2vv8NsUXD2ktnAW+tnmU9bVGze8fj4mEfOb1He7dNk5vCJeg2+69GmktySudfXzhFFwXt8RRzm0yltC8ISG5O+z3gckWVZN6coiXQ9hF7BPBoGcszJ4UlnIuy0PjgtDS2hNSjM8pLWmbFm/WMG6+uoXCICuH9o1t2TH/0Mmzc+wHmn4EXX8OGQLlE/Ym3NcKcWDw7QegcPh7K0UqWHKT//izdow5gHh+YQuf3aASc3px0OfbyW8P+x92bBll3nfd9vz9OZ7zz0gG6gATRGAiTACYRoDhIpUbRkjbbkOIoHVexyFOtBTipVTvySSqXiTI6dYlUSO7EdURNJSZRMigABkiAxNIBGo9Ho+XbfvvO5Zz573nvtPKx1T1MpOU+uCh96VaEK99a5p/de47e+7z8UZUEcb1Oksg/aS21Ky2Wi93nuE3LcD8ZdLr6zwVNPy819MLhNpEeIIGT59BFcQ3C4EzA/3+TqJYmLXnmgyWBUYqiEgeUNsW0oYw+7rnxnDJs0jenuTGfk405rjnikU/S+BcDqUptJZBJGOfetK8PeUNDvb5AYITWFqegs14iSmPVFKRRz4vQprm/tUKZDencUFt2IqdV8vNqRXHRGZ6HDZDLhCKrgBR3MMCSsLHShYDJmnfeGGmlLvu9aa5V8O2b++KMc7EkvuvfevQOdEr1ICVqKzF3XuHrVYLEj18dcFfHut85jTRLyM/JmYelT6oaLs1jHDQfqGeDYmsGgd0n2kyi5dvEcP/WFz+OpINuv+yRRzHg8RLeUBK8mOVZH+890PKGqXOI4plFTsKdc0Ok4FKIgy2Wf+75LWuW4Sia8LArcukcynRAqWFctaGFZBs1aQPdA7ku39zZwqYPy9VhbalFqFYOw4rOf+zAAX/6d7/HQA6ugp9Q7kpB87e0rJNkALPlMNg2uv/0W4bzLN//odwForSzy3BOLdEVOfKCCAb3EMmyKSI7nOI45+8j97O32aMpcA+PelP5hQiVMDGWmXgmX7sGI0VCRpivpW5KmGb5KcoVhyHiYYxk17EDOBdeqkyYCWyU7xqM9nnjyKW7evEmi4K+mUeLYFh9/7ln6O3IuXL1yHb2qEAqikpSCrBJUoxSnktCo0izwanWK9IhgGZHlEjKrKyjmwsIDdJbmuHjxIssKzrt5Xcf3CoSSgi+ykiCoMR5PZ5AqyzY4ODhE13QOu/I5n3j8Kba3DjGP5KlrLqPRECEE2swOrMAwDHTNIFEXC8vQmU5i5hUP7P5jdU6eWeV739lkbkXxqXILsx3TOxyTaPJ3zcVFCiFYnpf923Fb3Lh9GdcJEIqcH2Yapu0QTiICBW1bWj3G9cu7oAI5w5CeM55rUShBC9f3sG2bySSDmTRFRZELckvdjNGxTQshKhwFI4tjKa4kFGQQJDSpXq8zHCo4s64DGnmez4j4ZZlTVSW1modlH8G6Cyo9wnCVqIcW0hsO8Fx3JpxkOxZz7Xmuvb/LJ5//AgDff+W77G+HuAraGiV9WvMBrh0wHMnzKY4yGo0WUSz+HGSzFBmlgpUtLtY46I/YuhbTVLLVk3xMkRRATqqk3nUsNCtDU5BJERtE5ZhKs3GUdUeV1ShFQqW8dwQLTMJ9RKVhK05ZlWeMhtIg+0h8YOdORJoIPF9BM2ONsvCw7JyJSkhYXl3Kb3s6tqvktg2IoxCBglQ1LIrYIInyGWc2TXPCUYUuEipTwiU78x5hMqQUSmyhBm37GNt3tvEMlUzNGxx0+zgNfSaAMjc/z8H+IfsHMt5ZXriPv/ubv8B7F1/k2iUJY617Noap0ZzPySP5zksrHTY3xzOrCUNPyISOpdk4nrqAlSW3tjYwNQddzUVHGJQFlNoRvNggrzI0LSRPZTJwcfk4lpXxt/72pymVXcrG7VvEE8iVB9Dps/exfvIEH//4z5Go5Nj5829xc2OLt9/+M8ZjOWcPt6dQaaBk+k0DOp2mFGGzlPCOGeNpNu2GTqREbvIYur1DKqVQkuY2vj9PkUek0RGXMSeKQgnxq448zwqqopol9aMo4lOf+izr6+t86Z//M/ldsYFpVCA0HO+uQXeRGNKRGtCMir29XdqNFkeq8q4ruZNH0uWmaWKaJq7rzjizAHuHm5R5MvOZrTSLqgJLXcCyIscyTXKjYvm4TGgZrQOC1UPuyxdn1BYzN3GsKcOxivFqE+zQoDnnEKrLThGF5OWQ8dgmUQnJUhfc3NmdCbWV/RDPdnEbbfya7LupkTDtRSRZyd6u4lzeuIJmeOhHbg2moMxj0kSfcRCnw5Jm06HWzCkV5wrx/w38+5G4XIWjCdMbklnZqtfobdxkqW3P3LvbrTZzS3XS9sPEhzLIHfcGIFwMhXsURUkaxVLYQKlwZZmQwgZGxVGgpgNVUWHZKhjol2xdL5lbbuAoAz2tkXNwW8d0gxk5/iAa4ERjUiEP1IHfww8qitDk9evyYqi7EXW34uSHHmfuQYm93fdf4UtffQHfVDcSG4RnoUVT6oqHlU5LhIBf+7XP84//wb8EpEqM7VbceF1uPvef7XDhu7eot+qc+sgDACwcX2Nhxee3//n30RSJWOQCQ7uLoUdTpGHu8oY0TUPXzBnJV9N00izBDnxiZax4qx+x9tgC62tnOacqGP/6X/0r6nnEceU/1M0DnHZJSzfYvyMvTv2tKR45UTFBC+WmZfgGRjkkU+ZytVbOYNAlywqeeFR6TNzZvcY0nTKtYoxMLqB+v0+vvzPD1C4vzSGqmO7BGEOVFOuBj2lpJFHI8orkMmxudSmENsugBr5LGY5xPEGmiPcaFWYl8B2TQo27UfepUVIqAmhvP8Wod2gvwpmTMkt8J9vjxctXmITQPCbnRj4q0dKMC+/Iy9bKsQeYTmxEPqWfyh3q0ceXeOYnWnztD97k3AuyEji/6DK3ECBcuRn5jsnOVp+gaVBWcp7ZnonhVQyjhE8/IqsmC6Me129scH1DbkaGZaMbJSsnNDxLXhDCUY397QlFsoUXyHEYDmIKYRBO1MXGdxj0+wiYVa6Wl5fJRJckjVHFFsxaylpjnuvvyzHe282Ym+8wGB9Sa8sL9NXdXaIs4tjJBZqL8nOaY6JNj3PnUFXYbtxCSyPiAip1qItCYDrejMxaFBm2rREEHsq7ligtKU0dUWR4iUpI3OzTWVnHriTxd2mpzYZh86FnnuVP/kT+bjxNafvw1BNLnH9fVhW6w4zl+jxznuqD6QQvSPjsx07x+xdUBaEMGLtTOmQEiVy3u1XOdGzSdmW26wu/9Em+9H/8M5yGB6niOzkBQQC9/oiaL9e2aZkYmo5t3z30DCvBsEoi5RHi2k2KMobKwrFkgJekJYXj4NgyEB7lQ8b9Lg891ObwUGXv+hOSuMQPbGp1+ZzRuMaZR0+SxrIPPvTks9z3cJPNrQknH5LKpz/5mYAPf/CjvPHmq2xsyLV95bW3qaw6nhIaGQ/HGFbG6vppQqWQOBr1uX27JCr8GU/hw89+lLfeeodIJRECv8Hm5i0MU+PmTZkx9esWUWRSVuks5y+qEss+IgWD42ryImIa9HoycDvz0Ap6pbNxvccHPiITF55n8cq373BaZQ+XV+b4jd/4Tf7O3/n7FGoOjacTDg8i9g+GhAppMC0cdKOcmdd6luRgalbFXl+iA5qtgCSBVHGg7n/wBBU5O7vdWbbZ0lPeeOkyTz97iiVVvdu8fomPPv807751Xc6x7oTllTmm0/HMqLKqKuI4xHWcmYJYEATMzZVUulxDjq3TPdzD991ZEFrkBa5TI8sFlaoq2I7OeBgTqUxvc17n/NvnZsq5AFFpECcJk3GfdlvOqdacxmQSkVWqyuiVrJ5a5ODmFBTXxjAqDEtH0yv66hLo+zUsy0LRmyWnlRzjKEuDFLSYTsfo+t1EZpYWUpxC+cFYtuSpmZo0YgWwLUm4X11dnfG3QCYh7CNOsBAUeYGu6zNyfp7nrKwsoxuC+UW55+12bxLUAnRN7suH3YhWc456vc7hQCUtfR9NS8mzkltX5DsPe4J602QyVZc5IyCaZiT6eObLNjffIssT6g2bcKq4hIXAsjwO9uXcrzUN6l4Nq6hRZSp4zQqyOMO0DVQxkoqE5pKPdrTWximaWdFaM2bCfJ1mTjTNSUP5i/3Nm5RlSa3RxlKG4ZVeIOIKy9HJhRLjcQKyPEQUR+gPgWGEDCPzrq+PllOWMB3ms0u14ZlYbnFEA2U8ijAsF8s0KJRHmKYLmh0D0zkAldwsY5dGyyHK5LpaXL+PyYGFaVVkE3mG9bUerlUnS8bMLclz9MSxh8mLt2bCCrvdLV76TsnyagOvJtdfOC6ZTgqabZfEk98fpgmuX4KqAlq2AGFBaRFN5X6zsOZSqxts3prSrMuzTxQ5UZYg1KFSaCmlXlF3mmTq4rS5c40szhDVPCdPPwjAg49+jCgZEyXyDHvxpcsk0/dJ0x79AxmL6ljEccHjz60xp5Sci3GFYegzBWFN05RAT8XquuyX7c0QwzAYj4oZ53nlmE+/O0UoJdAoiRlN+lAJ6UGF5K57vkUlDLKj6hJSlTlXUr2GBd95+Xs4lsZkLOdG4AaUVYRleZQKVWQaFn7gzapUpcgIgoBut4f1Q+u7293HVma9nhcwmUxwXZd6vanm8JCyMND0BkJxJktybNvFVPO80qWQRhjtc/u6Ssq0C+JMo1PPZiIlehWQiwmuutikVYUlEoSmEx/ZcQpIQoM+Ic22HNN3xywXAAAgAElEQVRaq4Wtw3go++DBsyvMLcb84OUd6orDNj7c56f/2udYaDs4dcXtNTy+8vV/y7V3ZT898/gK/UmX6xdKmnPyu0bdmLNnn+LCu4JDlchstlsMubtn/b/bj8Tlqqpg/cz9ABx2b2FnHh/++JPkdZn9yYuC7eu3ac438BsyyzjuDTANh/Jo4QsDDJ1CRLhK9cOwTMpCSDf2o5sFupKZlD+Zps10kpNbOU4gD7mHH5uyeWMP2+6iqWBVHxgIzUfFqXzw46C5Jov+Ge7sywl27itQNFxeeWmDY0phL1hdYHP7JtOunCieayPiMYlWZ79UBqWBxd61lGtP5jz7U8/J98nG1O9bYE9JJb+3f8jKU6dod3Ju78pF/Su/8Gu88cZlmq1LRD0lqkGJqMTsUDeUCpOmV1SqCqYbuhQVyI5IxRW6Dpppo3fk5nf98DY3Lr/F5YMdglJVdjSdtp0z15Rj8N6dgnljQppZFAqemYU5ZRHRbDaYKNLkOB1z6oF1Truy8nJ7+yq7kwGLp1vER8KdpUXaS2nUDXQlYz+dTglqDir5y3DYx3YMLMOeyZL2BxNMQ0fTHMpSVVEqg7wQLC4vqp8zjDlBMM1oq6x0aXns9fuYhiBREApXs3EXakxV6Xm1YWOv1jkY7VJsy6VyJw259M4NKs3nV/+6VAez7YJRqZG+LQ/wcLBJkSfk9TZ6LN+lc3aVa1shH3nqo2wr4ru9VNG9PcBXl53tUYQTtLCsBgi5mWd5RZzlrN1n8Orr7wDwxssXcQIToyV3Gi8QNOpt8lGd4VgZU1o5pUio19YwNHnZmeQJ9cCdXaT29vbQDZu5ts6BciI/NFOELojzPo8+Iasa2zs3MYpllk9JGOTejeuE413W19Z47XWZ/dFKm4X6AofXcrRcQj0eeLDB+7duYA/lGp3upjSXDcKdCX5HbnZOw2LUSxEKuug4HmmaIpCCzwDNTocqizDxWDsjx/3U8mNcvnODcigD43e+s8Hcmcf43ksvceuqvOR6TY+gstg4SElU4GlEGg8+XLI9UqTwyObMsse7sYkvZDb0MNzFzyrG0zYHa/Jzu2MdqBgVkkD8/sZNnnzmEfrRbbJQjvFoGFIUUKvZCAWbqSoLx7eJwqNNWKcQGq5Tx9DlZ4oiVibCd001a80GYS7I1MWmElKC/crlHg21T+VFytLKIkkREo9lQHJrcJki6fK3/94XAdi8ETIZm9jM8/57Ek505vQ6755/n1Mnn2A8kUmt7/zhLdrHTzEdykx9GIU889wqmeFy4oysSl+5fImdQcJ0EvPg6bOyr3p76O4WNRW49Q8TvKAiSeHkiYdVvxwyGU+xbIFWyb2yMnKyJMUx5T6ZxC6Oq+M6FnpLjvvbb1xnccXi1BmL/W05zvMLa5RVxPOf/VkAfuZnvsBv/dZ/zOe/+Dy/++UvA5DGAs81Saclt2/L/XOaHdBqLHHipMyY7m3uUPdaxKKP2zzCfk1Js5SV9ZMABLUmUZQQh4csrygSurNA09xh+1KPy2/JMa2KnGLSwkDCmcLpPrdu3SLwHWo1KdS0vLzMm2++hW0b6EqxdOPWNQ72+zNBBtczWVhYYDodz8R5dE2XpH5NI1AV4TRNcV2b3R05nkvrHRruAiIN71agBj30sqDdbvDAEzIZd9DdRVQ6+1vyMuAHNRaXTpN39ukd3gQk5M8wNAy7xFGKjKaQKn+pCkLLsgRdUFHeFZMopemwEGIGA9R1qSoYRXKetzs+lmWQ5zmVCnKFUg78YXn2Isv/nOiFoR3JWadU2t19YjydgpYjkOvB9S1cz5nBBPM8ptnyiZMxTVUlLkro7eQ0Wi7fe/VrADRbPoddg2PrMgYxgwNGw4LB4WSWhJ1MQgzdJI6SWZBZloI0HtNUqpiGlTHfqVMWOqOhRIAYhjQAzhKNStnBNNsBSaYziuVnGs0A22/gtGNKldx0S594pDE3L2Og6XSCofsEbpPuodyDhMilAWoomF+QEM7DwU3m5psEdXmmRROH0Uhg2IJKQcbqNZ9pdEgJCF0JJ7UXicKUdKLU5pwE08pJk5yqlO/bnCtxvIJkrLMupzq3NkMmSYVQ8+DO1g6eW6DbDrq6EFWFhmEVNAKf/qGce3fufIfWok4QyH0jj0p6ox7oLtcvy+/+1KeeZWtrk63dq9R82Q+jiYnjBJj6kUm6IC53KdKCLFXBf91ke5jheBCp6pIWxCx2NPo76lJoeFRpRTjNcS0FPzPruIHB1avf5/2rUqzqT/8UQMdQCVBb5OgaOIHG0qpS5qXioBtjai79LflccZjhN0zy9MgQWaPZ8jh2cp6sksmjOInwHFkAKVPZfwd7EwwsQpVEWJy3MfQ2o8FgZkhcFAlVAUWu3VXwLEspsKOS7JZpcbC3g2nqBIGCOec5mqkzTWJ05Dm6uFCnyCNclWyMEmmq63kOlThSuDVn6xykrDzw5wyKdV3HsUpEaZApY2PLqaREvFqPtmMwt1Bj805IzZLnTNM+zu61PRK3IFCqzeOsR17quAo6WMQwv1xDd21KFcPGXcHS8WMcv0+wdSArnd2DbRDeDMHUXpjnsL/HIx84OVMQvHoponP/Wb74iY/xH/0nUqBrtDvGzFzcFfndF37Q4zf/25/jj7/yXd59U54fc8cavPTyeWnObcjnjI5Eo/4d7Z6gxb12r91r99q9dq/da/favXav3Wv32r+Hpt2t6Pz/12zfrx786FMA7O9tYRkNPv3Tz/L2RVmhOehukQwSCjGic+SZEw5IQ41S8auicUhnMUC3daKxvCnrhkmel/i+P5PuzQuBEMyIseE4x+7YGE6HSGnhf/LHNZ78sTYvfCVmZ0N+V6uxxMZWj9MPyu/5zGc+xUOdJV6/c4PausyqlHdifuf3/pAnnniWTz//LACjdIuv/5s3CXfl7RprQhy5VFaJNivhahzu5+h2xaM/JrO93pLF5QsX6d2Sf1aYGh/64CmS3JyZtNq2SxSm6NOKg6vXZv0pKGf4da2qMC195jkCoBmS53FUIjc0naLMCBZbjBQMQtcsKmGRjaaIeVXZ8T3cOxmuwjsPM4d6ULDXH3B8XVY1bMNmb3efUitIFYdt5cQ60yhkqaEkiPtjovgWReVQKq7Gcq3Jzq0pqRVzSnEZdrbGzM8tkyZHvkkTSpEgBDRUtrnCIo0TRFkyUdyseicgaNfpKY8gz/NYO7XAZH8Xbah8b5aXCLOYYBqTK4hKEUPmVxSKO9EJHIwFh2ksmG7JbFPp2BiFhtmysWoy+3P2sWd48/tv8qEnZFmzGpW89PV3eeqpD/LBn/20nMPXttjqXuHYEy69a3IuXPz+AbGh4+lHWOqA8XiAYSfYlszCZYnJc5/+IBt3rtLdkxmjWpAynZi4Dfmcfl2nFBnJxGOuIbNp7bbDlduXqTk+hZowpZ6TZhVFrMq2JWSlyfrJ5gzKE4c6w+EQx48xdaVukGvolEwjWd2aa3n4XsBwnOGa8p1PnwzY2dqne6jhz8ts/QNPnODCuTdnksrJMKdIQ8yWzmhPjVXbIhoLJuO7Xhy+X2MS9eksyHWFZjLNCvwc+qXMeK0dfwq/PaStNHnLSyFpcIKyvsqt6zLr6C2beFOH6nidg7Hsg3Z/QnvOoZeo9Z+UmFQ88vhHePJ+mf39+te+yqg6xCh8siPvFLtB4FWcXJNz+KCXUVSC0eEmxx6RGe+blzawbR8hMgoF4TIsC89zyFSFfTIq0DUHTbNoKThDXkQUqc/8sk+g+H43b97EdZZYmJcVkxvXLyGqlOWldQ4PD9XfpTz+2NOsrbb46h98U47NnM985wSnH5DV5fmFDgsLHtubIUsr8neuZ5ImETs7fb778nlAwmuiwT6iJuHMpxYXePyZVfLsOF/9mqwImVrOT37+i7z1zutUyotumo/wAkEYymxhFEV05ir6hwlzLQmhrLSYrc09XM+bidMELRuRe2SpXFdVVdGszZEWPSjUuFcVayd0RJUzHh1x9Bx6h1Mmajw/+YkfZ2//DknSJ1LGzQ+cPoWuC9489xYrCkJVa9SxvBorxyTc59yFVzBMjf5BSl3Jl9fndQ73SoJAjrFuZYgqp9VZ4JGz0hJjOjEoh9/ncNRj44ZcM89/5jk2bl/j0nuy+uMHOlmqY+g5poKDBkFAHIdU1V1fprIsqaoKPziC23iMBkOZjVZ7c5pn5HmO7RhQKqiw7ZLmEWP1vrVakyeefgrDLpkoo+HJJESvBA8/epqx4vFlmU6gW1iKdxalE4YHNpYN3QMpu354J0SnxdPPLWIogQdLuLz73gb9wZFwhIZhCjzXmhmwF6LEtC3yNPshmXWdQr0jgK5BvV7DskwyJWgRRymaptFqtQiVOafnuVBVJPFd4+08zykLMfMkjJOMoipYXVuazamq0iiKbOZlZtoOlumTJnflqEtKRJFBaWC5meqXjCwxqNXkOPzY51d46c+ukowDTFvRCXRpcO+6Fsn0yObAwzRN/LqCa2mCIk9JonQm115WAtCphMm6Ek7Z2dvBaS3jNJSNzHRImunU18yZyX28b9DvFjP5/VbbYzKakucCxzwyd82wdIOsAMdTUui6zrH7WggFV9q8fUA6bfLg2TZ3bsq90q8ZiColyQSraxJu2x8OMGyT4a58JtPIcbyCNNZJFd+p0dbRzQlJDDUl4lEULlEeoylkgF41SJKMdsckUjBLkfi4tRRsQaGqL16thm7pTKYK51W2qJyEwHco1J4w2PM4fd86W1u76EroYzIqWFmzaTQk1PXqlVusrcyhlzqbt+TZYDhyspmadUQl4hd+43kOrr3HW38qq4WhqUGeY2r2DCrsWA38wKLM05kAmufYFLmOps7oNDUwtYS8MtAc5e03FXidFj/5y8+zvi73zxd++xVefe01agqWWIiQRtPFDyzGYyVMkQQUZYyoyhn/PU4KPM+lreBo02FGlfmUZYUfKEGbyRRds1k/fowb128BcOLECfzA4p23pZVGrR5QUVAJ7qK2zIqihAcffRhbCXbcvH4Dy7gL5R2PJlS6hqnpRMoM3HNdhBC47l2DYsMwEELMKs6+71ORkSYCxz6yD4mx7RbJkWm5VdJsz5FbKTVle1IWCdsbfZ5+8nGuqZLlNCzxajZTxflGq/iFn/lZPv9Ln+P73/sKAN/4/ZfpTksefewBmh0J57t+IyNnSjmWa2H76h6UFk7dZuUBeabsb+3x2Z/7eYL8kMvvyTjsx7/wAP/jf/lVjn1MxrDZQZeg/TCWtzeDCmZphBcIglZGZ0GWbbu7Ffvv996squqD/AXtRwIWqOtQxRJS5RRQVj3OvfIGRabwwJME39TJhUek3K3bGBgGCBUnGoZOngk0MsSRkZpuq1Lq0X/KOFE3FA8L3CBj2reYW7UIjssg8OIbgqxYxXXvML+sNPSjMb/8Hx4j1g7VP1ixV6zz6qu/T6BU//7q3/1Fnhsecu4Hl7mmoHuVN+Ujz38EcXAGgG/+0R/TaEM1DbGVya4oSlY6JoarEYZyg6hZNZZaDc48IQPcvSjj0rvbmJZOQymrPXT/GrfTW/R23dmBLR3p77pya1WFhkFVlTOooFYamLpx91AyNWzfYzgIyfKjz+RkIsMNOqwvy/dL8pBulSL2ZJDdbC7R24sxTUgGcpHt9vf4wFMPsbO3S3+oeG1bI1rzLuPxLQA6/iIf/9hzfONbb2J5RwbBbYJ6hG+XTKZH3iIacRwzHh8pg0nMflWVhIpIaVoOQkivrkZbLVgtJy0jPCUGkJUpV9+8iTDhqQ9IqNvQEuTbm6wurNFXl7CpiLC0FNeXQWiqaXRvD0lLQa2tcMNJDNYcSTcj7MrD6uLg3zLXaqM1ZN/d2Uv5hb/3GY4vPUg6lGMV6wfU2hYX39jjySclXHG4U/L+5R1oybEbjCpc38a0bAyFN9YoeO/yRVaOuxiO/P543GBhvWLtuAwCr1/dJYymuJZJpgQfLl/ZImi1WTtmcv2y3MwN12QyiAgU16+qNKgKDvdiTp6Wh9WwO6ZRD5hONTqK+G64LhQVn/3YX5L9kiW89c5lThxrMlKKnnnssrr0FH4tZbcn+T5XX77EAw8tcXtHQs+ay3V273hUg4hCKQ9NxxGu06TVUZ490wRBReC3OdiR88xxPBxbI9JznERunL0b1+haEV1bcm/OPJxjjwp6kz65Mue1pwanl0p2egMydTBMbQhFRa54J+gl4UhgJGMainCeCJemvUKYHlCpQ7VpV0zDnCtXFXej6aObEXMNl/G+3BPqjQ5pmmPZOqaCZ+VZCW6B4vjiuS5FHmOa1uywdLwFTh/7JOfe+S6lEtCpeyeotQ7od5U/VlzQ7DQZjabECmahaXD+7Yucfz2ZmTDHuUZ3EHHjG68B8InnPsbKsSZ2Z59cARXSoU6SpZRmxKAroWUPPflBGmbAT/zSfwDAv/7f/1euffkmo1HIww/JQ6e3f8Dl9y+QZQmdeRk0pPsFRWIy6MuDyrLBdZo0GwaWLfeE/nCP5ZU6o4FgcVW+38lTZ7hxdZcolHN6bs6lLGIM3UXocm0PBznR+zaLK9YMnjUd5ZhGwbFjcm5evvoDhv2U06cf4OPPfQqA48dPACW/+jf/Osm+nHuj0ODBs4/x3/2TL8l+iiLW1wPCkQVKZKNICgzDIlSXFr9uIjQ48+BpXnj5jwAYjxKW20s8//wXcNuSr5blDlhjzjwq11Vvx+bhM0tcfO8aviP7aX5ukZXVBc6ff4ulJRlkHx4eksRTHOVt5DoGVaOBEFCofdg2LWzbQlQZmqEuZSJBN3PaLbUWumMOtrt86nPP89YF6YVXa1eMDkOmQ0G3K9f/nZ0eT37gLEurMqC+fu4ceh6CY/P0xz4GwItf+y75FJq1DqG6qOnK+LuaQc0N5TuVzVRb87yk0uQefXQWVVpFVVQoNBqGpiNEia7bKNSj5P/qOqXIZ1DINE0RQmDMIIeSC2Q5Froyhl9eW+XW5m12dg9mimWmIWH+kfKwMzOBoadkIpv5bTmOTZFlVFVEqLwEDT0gqFcMB3L9f/sbG2SpiVczmR6pUmIhhImtYJPyBSuaLZc4D9W4FMRjCc/SLXUBKjPSpMD1Snb3JYQ6jis0L2LOkUHavhLuSIcBoRLMykqL5oKJoggTTyIcG3zXJi+P/BwrRBmj6zXSTM7ZxbVlJtPDWUKtVnOoipK9rS6TiUrONT3AxNVdgpo82zf39lhbquOq5+51++i2xVzTonco328SmtTqLl4zJlGqm0VpE7SgjFUsU6booiQalziKp5gWGllZ4Or2zN8IBGVhyf0RSOMxukgYH9ZoN5X5eH3Czs4GohSk2VHyzSaJfDRd7oGWY7C/n2NqCZZS07SsgFwk5ElGZ17O9bfe2ic+iJiM5Zg3j3t0dwvICxxlhF0xJcoFJs4s9tT1DN1wSFQyKTeE5LFgIFRSlsri5Jk2N/qXsBoyefP0xx/htQuvkSmTWd0WJFlCpUEWy75zvBItqZEVI1ROm/lFj+EkpsJX71JKATbHJs2OeHUOZ848jmlJ/yuAtbVj3Lh56a7ZumZSkmMqBU6AUstIC/jwRz7C/q6kGPQOPQ73Qlyl4gomKyvL3LlzZyZkUpYVJ06cZEfBkE3TxLZtxuPxzOy8LEuyBCzLmInHWHadIjeoNZVpmBnTGwx47vlPsXcg+alXrx1w/2Onub6zS4qMAYK5EcuLPk5DzoONq33Onb/JYfw6n/qsFGV6+mNT3jh/ke1en5NPyP30wbpDq1XjYEs+5/FTMdFhi8VjDV584V017Zq8f+5dnvi4wQOPyXNNC9b5zK98nBd/8BYArmmx8/4b6LqOa8tnP/FQQJpN2brRoK282Sa9uybSf1H7kbhceb5HoRb1MB0Th2Pi1GBOBXfhpOD0mWNs7u5iKo6dlgvSYkQqlISzaZAmgnbLQVMXKV3T0QuouKu+BFAJbZbJsm0XczrFNBszhZ0o03j//BYr6x4f+LAMtM+/u8XhtZzlRyVv6Otf+Ra3b/zfOIbJL/6aDNi//c0LvP69DShDtFxeENy+g/fYJt1MvotVVPiGS2xXs6xfVCUE9VX+5q//Cr/7Z38KQK+/h2csMa/w/+++9Da+YTAeRaShfPbHPzDPE/Pr/NmFl2cHUykE/FCVSlf/X2k6hZr0hjDQ9HJm1muaJpVm4pj2LAg0LQtRJGRmweDa0STSscwA11bfbUSYVkWKTXciA0zL1rhw8T0M3cJRWds0jtjbOOTYCRnA52XJzjDHXWjNeATjvQmNIOH+BZuNWFY+psMeSTqdHWaaXiCniUGlKpFaBWgFrvdDqjEV6JVOXZnujcZjnFYTvTK4syOfc5yOEVVKtLSKUCa3ppOTlDFRKYP6idDRNZv5xjzTQpnVCoEmdNI04vhZOTYVBetLbVyFwc0HE0qRs5d2abTkBejcH95kfHiDzlKDH/SOnrMgWHbwlemtUa843NVZXNQJlYpirblIZUekpY1myP7TrZDhaIS1Iw9Go6qRJV3mWhqDvvy7yqioBW1q7gJ5IgVXhJ7hWXPYhjyodDPHxsE0K6KhfCbXqXD8MaNRzGhwZFpa8Fv/4L/CVZLq//S//1/IRML8wiKPnJUV2m/96csszZtMkxHZSH5XbcWhSAt8QwYRjqURNPaxqjYoE8j7HzrL3t4eY1UVWz02x852H0N3CBR3g0pnkmZotkdDHdiZpZGmYxxH/t2dOylpbPDoA3Nk6m5eI6WBixbAbVWpMpY1dBGCqt659grByQHhjTf5/YsyWI4NF9NZRdd3WW3JDV8vatSdnP2+fPD1zjHSMiV3SqaHKpHh6ei2TZlNcVQQqBU6rqbNhIUyUWIbBqbpEis5pqy02drdRNf1mUKjaer0D3SmyhrBCQA9JoqLWbY5jEaMBhMCv0apNriMMWVeYjlyfXz72y9yc+MMD39gibQp5/nZk6dJS5Ot/ja//vcl7vy1Vy5xoOl87Xd/G4D+YECrXcOwMoYq05ohaLY8tvc22D2Ul0xfc7nv4QYDpXheVQZ3bg4xdMhq8uBfXTrD1uY2uiEQpdznL13YZWW9fqQuzp2NMUEtwXJMSsXx+OW/+lc4/85Fbt2+gam4WY7tY5oW+0pEYGGlg7BCdnrXuPUnUslx2k/56b/y10iEzld++/cA6I5GnDz1AZ5U+/d9JwMuXXgXw9yi1OR+UzKhopgpNGZpSq8XsrVxwE999i8D8JWv/g65PuZrf/wNTKX26nrvcnztPqpIcg2r8gqbt27TbvlYKtjZ3tokSUd0Om1GIzlBw8kY09IolFLENC9wXRfbc+mpDjUsiziOsW0bUcnxK0pdqr9yZCbvc+3K+8wvtumPZL8sLS3QPLYIjoMZyGdYv9+ntphy7p2XAKj7TVaXl+kOp1y+IRMirfV5RlsTXNNE0UUYh+GfQz4IIdCEQENg2ncVBLMsw3e92efyXApKaT+kHhjHCbUgoFKmnq5jkSQJ42GG7R5dpkqEELPvidMM2zbRTYNMWSoYlpTXdhxrlj03zIrJdArqvCj1ELfu0KnXmU6PjHAL3CDB0B1slaxybJsknTA3L4O58biP57tESiEQIIszyrLEsjWKSO4lOjZlXmCqMCrLpQl2WebEKmnguAae64EoMQyFAPFsynjMhdfkGvrJn/0JdnZ2uHjhGvW2vKS4mk6rPUem1ku3P6AWaFAUCFXlJ0vJNB3Nms4unUFryNX3Q+qKl2mbDRrtLuHYmMl9p5nOaBLiOoJuTyYfiqwkGmczSXfLlwI0RZXTmpPrcXBYYtnzWKZBqckKUBoKKXGnLjt56iJsndWHIxp1GTtdOTciSsCtmWiafIbhIKTSEiw1f9ymQ56ZVEbCdJKr+RmAlVGkuuTUA5owmYz6LCzLpNrh6IA8i8nCkrqv5Em1ENcxcWoWuTJl370y4bkff5z/+j/7WwD8w9/8hxw7eZzHHtF595Xrap7VEWjEaYKhVP7StJKiRIqWqZUFeqaRW4JxppIPpUfKgDwO+f7r/wKAnbe7aJozU87T9IrRKGNuUZApK4aaE1CUDmtrTfZ3Zf/V2x6279Hryf6db5mM4hyRJ6ytSiTAz//8r7K7u8+LL79AqKpuF997h8lYVjblPzjF810Q4m6iXbPRqpg3Xn2NIJDPsL29j2d2MM2jz1Tcvn0bDQOhqmmuY7K1tfXnzL2liqk2U+au12v0iyGVZlAdWQ5kYwy9ycKanAenzixw6b1rfOuP/4RHHpWcXaPSydOMaTjk4UfkZSdJK4b9IadWpQCT16oYJud5+YVrdIdSIfmJB1s0tAr3RJ1z31e8qPbDZMWEsFJK2a0mNza2iXoZj39CXsDMPKW/N8G1nub003K/Pvf6JU6t3I/YfUXOzSKi0WlTaQlLyzJub8/Z3LiiU2tqJArN4ntNItRa/Avaj8TlKgwjwpHasCqB43bIopSta7cAKEuLne0Kodnoql6rCRP0CnHkTVGU5EUlZbhVybrMSxV9351gui4zYaWS5ak0HV+3CCcFdXXhqhwgjdnZtmhcU670xoBEXyTclTAd3TjHcw+e4Z2dLi+fl5C88O0hZmeHoSgJhzIwPDs/x0OPPc528iIApusSJV203CI1ZbagyiOqUpA3TQZ9uYpbsU5t2eHMCQmX/PrwZVbnNDS3TXcqs8Qv/9l5zp55EL3QOTLuqajQNciVCo+umwghD8GjS4olKpIsxVaZibwA33IQRkpzTW7uSw8tc/sHB6T9A6ZKNk4vcjBcIrXogsqmMnO0sMRTErKaXhJFCZapkRZHMq+CoNkgVL4X07xP/8IEo9IxlUxw1NqiN7XJuhmVdqROVFEUBZW6QOvoNNou3YM+R0kjs7BAVGj6D20iho2pB0yVr1YcTtEdgyqBRD3DWnuOUqu4dOs2tlKzsi2NJb9NqCTczcog1aAx7zJQm59mmuhpRlmVTJR0TRiGjHtDAu8HLtQAACAASURBVEduIj/9+U/y8INn+NK//DbPf0RuBh/6yIM49bP84OU3GORKotYuWGp5oDbubBJRr2XEY5vltXnVBxlry4+xvb3L9h1VAWq3MUSLg20ZvPpBTt2ukYYRNV/5Xmgeuj3g0uUhlarkxqGGzghNEeonw4rGvI7fcciUulWzeYybN2+xsriKpzIZK60cPd3l//q9rwMwnu7TbJvcvHqLO1fkM9T0mKa/TWC5bERyHBodjZ3JhEwJNww265w8doLeTg9PHbzbt6dopqOk10GzctqdGpNhNMvox2FMUNlMwwGJylLXnDqd+gK6UgFqhDliXVB3HD59v+w74XQxln02b1XU1IaYZTbFWMdSfTBlijmxwF2gUp+Zb80x36i4M/GJVAZ4Ot5jodGirfp3tH2LWHj0vSZaTQbLWuhR83xs3UFXUKya67O20mFPSfmXScbDj56lLGpcVd5wC0urTOIDSmJG6jJlGRqW6/D4Bx4F4NKVi5SiRNdMBn2VDdVl9b4wc9QZh4h1FpZaxApK2+jAYLRNeLhKGcpnv2lPmJvXGfcOOfbYMwCcXB9QX4j45h9fAKAdCKJpzOraCaZK8fLE8WV8dwmnuE0Zyj3IWcoY9DJCBZXyHIEXCNKJNoMTbW/dxrBK4pFBrlTpRJVxe2PCr/zq5wD4nf/zJSbTjCTWEJqsXC0trzG302Vj8zaOkp8OpyMCb4VT98sL0e3NG7QbdbKsQFfZ5lbT45UX/4Dx/jMUiRybU2tr1IMeB+rScnzlNB/+S8/ywjf3GQ3kBV0YTRAaqQqebU+jUbPodfe4ekntLamHnms41oh2Rz7T6vEm2zf2GCihgXq9ju1FJGlIqoKdoOYSJyM8z2NZwTP3dndpBy6ZqrQ4jkFR5BRFjq4rGHueyeQXBhpHmUWBobnkCkJm2wX1msMbr7/OE8/KoMWuNZjGQ0bbm5gq2GnOW8RZyKmHJIrCoCIeRUzGCY4Sj0qDMXNrNqtLHUZDGfyPogjLsmYBWFlUEnpXlVgKvmyZDkVZkuflDCLmOBpRJH2tQKJLqqqE6m5yE+RlzbbNWYJQ0ysqYZCrM9r3AnRbAwSLi2rf2N6i1emQlwWZkkZOEoGm6bOkjOXYVFpFiYPtKT+8jouZthn2Qzqdo2fXeP+9IYtLCvFimuQZBIGHrdSIW/UW/f6QJI7uwut1QZyMEEpICWFAKShEimPLPSivepw5dT+etcDbb8vgzfQ0qkLQUgqfw2HGxsYuDddmOpRr7dNf/AQXL+wwGsr5Wg9chCjQK3MmNS9tDTJqjRp5IefCZORRq6cMD5TYkV3h+QG2WxGreVZvWaSF9HqaKoW9mu8QxV3I5Rh77gK2XaBpo1kF47A3pLJGMtGnrBB8NyEvwa0pGwIMdGFTMseVm/K8Mp0AX6uhkXOolGNtpZCaKyGsTrtNEkWUhYmvPKx8TzDqeqRxF3UfYW7eZWFlEa8u197TH11naanG7vWM11+UQXatWRKHLq3lDq4p576d16g4xT/93/4FAFncozP/MKeO3cdrlYSjCSfEch3iMKFd78z6OAxTXDWHNV0mnU0DfAXTzcICF4e5zgl0da5Z4++ycX04UwYd9iv8oE29NZwlb4bdKV6joL3soLROmIZ9wshjflHOnzIf4/gZaQgnT8qz4OrlbYSeMxwOWVyVc8iyCqrKw1YIkGk4wrMD0nSCUAkJy/TxrIx3zr0DSi3QMH2EXrC8Ktf/+Oo+lmVJtJOKG7Iso6qqWZUqDEOJNqsqIpWYLooC3c4pUoeFeXlpeebsCb71zVe5pgS8oiyk2Wnxl3/xGd54TZ59Z04dZzw5JDssqQt5YbaNHFyBAuHQnuuQDEfUFjJuKHXbOWuJ0+0616cx7T05OczqJv10TFopdeT+iGxYsjWNWLtPvt8HP7DC3/gbv8yl869y4aKEca+uaIymN1i+TyYkpqkgGlckRcz0ttwD+9NlPvfFL3Lu3IvsK3qGsP/dFyu4J2hxr91r99q9dq/da/favXav3Wv32r3276X9SFSuqCqGI2WCK0yqPMOgQneODMNS9rd6NFr1mYFmaSc0awWacpsXtk+VZFSFhWmoqhQGrm2SFwWFUFLMGghidO7C6PJaiRj36F2VN/PWyhxrD50krjJuXZXZl/aCw6X3fsD6GUn8PbH+AM0lH7ZH7P9A4jmNxSbFsIkuQhwljtHuLPD6pS2GSmwhaBVMRha5yKkr13G9dKmMmBe+8U1GIwnh+uATn2R3sMVX/0jCBOudGt1RhlYILIVNFcaUc2++Qt32QWVINQSWYUpZTqDSNAzTIEnSu5j2UlBaFpqSWLeNnKwJaWbjqGzBe+duY/kaeujjZbp6hgZlVTDsKylRo6IyBaZlzErGQggs00NUdz0zNN1ClAbTUN70W20Px/fY252SprJ/O50VckJWzj6M1pfZAkqb7uCQUqUwnFJHFzpzixa9vSMZTIHt+WRlTqQ8kGp1nTjukSgpYW9xHt008J0Ghi6focxy8oELkxGmf5RZDRj/UBUlDSMMw+LWezdm7+ItBEyTEfWFFtlUQSGKkjweYilZ1Obc4/yj/+bfcGJ9AV3xJG7u7vDw8v1obR9PyMx8MRKMpzruvOy7sVay2GphNAR5oqqMesLtW3uYlCx2ZLb+8E7M40+f4fx7EiNs1TX8+YA0jjAq+QxJlBJHGkGrwVSNV1BPqMwlRqNKffeIqqoYDqaEI/kukVcw36pTCXMmJNKpNXn1++/SrinT6cUVPvLRJ3ntjdfZUxLuaytL4FjcujbAmJPzpT+esLC+wJUbMkP0zNOr+NkJbg1CPCWSEvVvkWbVjOCapylpmZOkYCmosFNzcetNJgcpmfLeyEcmSadEG0gY5LGgxiS3eeuty9QaR9ypOu6BxfWDGP/I1y4ZEuR1ZsnmPCMvC25lOc89Knmpu7FJ2b+C6wruV9CEV4fX0G2HtiWzlW19wo5uIIIGWl+R+h2XST7m9NIxarGEn8aTmNsbB5QKQoZjEUc2mTNGV7mtWsdjMtxHeDrLx+T3G4059q4fsK5ENnY2NzmYHoKjM1XVGC+TQjWGEMzXZGZuoEfU/BWCupzD125co643ePW1N/jCz0rrgBPrJ7l4/h1EMMcL33kBgMc/ej9LlU9TmTlffmeXOweHbG1u0wgUD8MMeef1TZ778OOccmWW8cq7rzOcaKzU5WfWg4q9zKfRgVRVhLFNKDO8usdEiRTMNxt0BwPevSArYP/pf/HrzHvH+PKXf5uN2xLe9z//T/8DvtfCtVzyQol/CIvCSGdGw89++EnOv/19fPsEi0of+s7mTQaDAfedPsV//o//EQDNAOaX1nnpBy8D8Pr7N5gMNlk/dh+6LXkuvf4BlmuC8ggSqYlpuBQi5fa2zG6HWUQ2rHDsRSpHVhny8RLPPrjEu4Ykk9/udalrKywdL7lzS3lvxQXz8/NAxfamhJaaNswv3MeDpyQH882L77K/Neahs+szHl93mmG7gizVpQEoYDsmRZFgWke7UoXn24wPI4pEjkOt1eS9S29x6tgSeaoQEgKKOGBjR54xnfrjxOMRd/b2WLeOuG82O0MDv9VCL2QWPhptkGc6IldnqJFTVSaismYVqKyI0YAiB11BOCthYRouZXkkkqBghVox83iqtISyEuimgVBwnjw3yMps5ssUtOuE0xTHNSiO4GGWRaVJOerAk2iLIs6xLAvXU5X5yQg0jyIR0hgHyNMBrtUg18coWhsH+2P8ukM4VedxZVGUMVGeEKmdP3Tl9xq6STQNVa9r5IUU6gAQCJI8Q5QWWSHHTzNsNm/v8IlnHkYoAlWW5fh+TerCA69880XswGFhaZ5c8T7fe3WT/mDK8VMLak7vUG/4JPkulopvHN/D8hz29izqvtqrl8fEiU6grErchk5vK+b+RzQ6C3IctjcNbNvGc20sxb8djEfMLddpLsuf9y7B7nBEvWby/7D35sGapXd93+fs27u/d+2+vfd0z/T0rBqtM0hIQhgJBAIkIMIhpooEqhJTlbjKLlf8R+y4qNixQxwSL5jEgHGZIijYQkYSEpJGI2n2RT0zvd9e7n7f++7L2Z9z8sfz3LeZIFOVKv+hP/r5a+btc895znOe9ff7LoNbKjOYVnnfhy/w9ksvYFVkdmDS1TEtn5Hir7mmT6EntBd9uiP5/TzHIQpzDroJrltT/WWEjoOjTG93N/YpTFhadjh6TL7z9Tc3yCOXWmWZmSVhckHbwi4tLr18B4Cf/RsfwWrk3Li2haP4XCIx0EXOaBKydFS28dqxOle++UfMQrlfdJou+3cv84o9wlcQyuF4gKN7BFV/Di3VNA3b0vCVd1poaeTmhDIDx1GWA0sp3YnFqn6EK9ckr871mkRhwlJT7l0GhU7enhLlOkunZdsde9TCESXf+A9DVo4qyGYMVlllZ0PugXRhUwlWKbQRb7z5KiD9P0uR4djmPJvU749JipKTK/I+aayk17VyzkUTZUmYpTzy7ofobqj1aRaSphFX31SWKqaG5TiIJEVTe2RNk/6Dh33TxEfXMyotl/aCnBO2r4aEacli2yBRtiBZbNBuu3MEg2dY3HyrR/fWDZbOSwjnxbMP0O+f4uKTVZxM1r125AxaXadzV8E1NzaYiJDStdAUJ/HN7YIHTz+EP+swlCh5XM9Cz5u0l5XlwbLOYstDaBaFWn81y+PZFz5H72BnnpWemgvkSZPHnpIosTjf5sqbHRxzmYcekuvc1Tf3+O3f+B0WVn2qdSXBH3j0OdQH+Ivl+0It0HDssqFUVkCX5LjpdJ6OLooco4Aoj4nVIaJVM1hopEzURlEULkkY0aibYB16w5gUQmcWxqA6ioTCFaTK28hyTKahgAxQxp/YNqdOnKa9tkBRHqbbxzx8NuHIBdnYnv+DfPULz9HbuYlryMl2Y3sP3a2hG8Xc7NBJdI4/5fHE+z8IwEu/9yrD7i6lZZCpQ4MmDAzTYT8N+dDHPiSroLvs7W9x6RWZBrV0F9e2KEmpVRUnqR9TZDGmkXNPcl863RsKg20Y8uCT5/kcWlOImEyHQPlJxWnE8oVzpLmGr0joek0jjQXpFPaUCk/FdeQhSsEE87yAQhCn8fzeh8/WtEPmmyQotxbaGEocIIpSZmFKsjNh6SGZQm5Ul9jbu4muGTSUj0BiGPS39zh7VPaNWMsZhF3Wltv09gaq7gW6DUlizxdVzRQYtkdVcfa64zGukWHqVULFvWkuVNg9uMPZ06e4dVUe8GaDnFIvsBX8xbIsDF1yHnLlhG4HFs12hTjLWVuR9eqMe7hxypM/oHy1zDbbL+xiOD7v+cGfBeDffu63ONi9hW/4FGpxzJIUXbNZUunoE2uL3LqScOzsAnmijIy7e0SpQdBI6R2oNHSoSVK8LheKwSim1jJxazMO9pQ6WVUn7PmUxJS52gQmBk7FYzZW5pzZDL9mgF4QxfIaz1ogjEY8/OgZKoHsZzev3KBZq1NvykWhdzAjTQxKe4tqXfazt18uWGxVWVqwuXNHfhvLrZKlcOKkbKef/PQn+fef/yJ37u6DIQ+YR47A/s4MV23gl1crbNzdJ0tM8vLQKNIgNzICtwmOXBTsvMI0NYkiOdk+ttzCDGxuvLFDNVC+bF1BEZhY7UW6MyU6URg0Wk321ebHSDXqdsB2knK8IQ9SH66YPHf1bfbjjGMrsg1sCx5oW3xHqWl59oz9SCfTF/AVFyYBiumER069l1ouDw3bWzdYPfM+rl2WqnyFnlFdWEYUfUxdqWdqNZ564kGefelFjp9QYgf7Y3r9HVaX5DXdXocsDHEdk+lM+YZ4LuGsROgCW/FFm3aNZx5/ii8/J4UNQqOPHpWYusVsKvvdz3/2F1g6uUh/f8BICcN0t2/w/As3+ciPyQXm7dd3sOs2o407nDsl59PN7Yyl6kmmWU7j/bKtrLtX6d2JCQtZpxOtJTbGGtXTFYQij/umzt3Le5x4aIl4JvvGzasZP/ijH+BTn/4MAJq9gOe4TLd2+Uf/y/8AwHA4pN6oEiXTucm0rkOWaiiqAWsnWjhONucxAQQVm9moghDl3Gh7LDJ2D6Z8/ctfAWA6WWe4NebGm5e4dPtlAFzXpcxcslz2KUqDLNXBnFA/5G8OU+oNm0EvZ6Et55dT51qs3zwgV8IG08EMHZvF5QChgjnHT5/mtRdvYOlizpUybYNSq/MDn5B8rmzQ4T/83/8P7/nQx+gPJQcqzId0N7u4vouulCHSNEc3DISCwxmG8o4ybAmNB576wHvp9Xd54smzTIZyvHc6txgNZziBvCaOC8azEM+r46u5s9UyuHVtxGc/80G+9IVvyj48GCLGydzfRtd1hCgxTX0unKBrJiUCkWtzT5my1BVnRb2vUeL7PtWaT5ootc7ZjLIssWyDODk0Ei8xXW0OY7f9KogK09GYWkt+h1ojYDDs0agt0+1KArtBCVrJaCoDC+12kzwrmYwjbFe2QbNVQRQJpdDnIhciFYjEmKsMinyGrpnYjjkP8KRpLOGCeUKu9iBFLmHrhwp4RubiV3SE6fCrf+vnALj29ov8yR9+lzgGv632M6VBlljEKqB14vgS42jEdBrTqsgAwWB2gGF5uIFS16xJblmtVmN3U877IgHNyRGiZO2khLEdO63z4p9lNBfkfLCwpLF+bczyskeh1u3WSs61yyPsskmtIa/rDvq0j1SZKWU3T2tx926E64XUDHmwqC0t0litsX/rJn6gzM0nW0RT7x5ktSiJkpAkdlk5Ifur347Jxy02b+5Rr/mq7TxKI2WmhCpMV9CorbK6LKjWZD17nYyNq/sYpUtQl/ev1FNuXxlxKEj5gR95D9u719l7G6p1pSiYZhRJyVMfFAih+pAVcO1yQqQ8144cWcFyCkxLMFYaZZVam+l0CGZKV4nAWFoNITI8NWZszWIymuFUPSbKTDqwXKI8YprltNScYJYjqqsex87Ittu8MsEZG6SWSTdSfPx2k+lGn+nQwVZQT79uMOynVCpyD6QbKYZZomkGlsqFuIbNZDSloCQ6NBHOS8pCB8XrW15eZDgcU4gEx5Z1iGOdhdUmFx4/yda6DIpeubyJqaU4KhgvzIxYZDhGSRIdQj91klxQqGCgrQcYnkamxzz1tOSwdjZGbN3YwvdrLLTVoWWcEwqdxaOSStPvbjPdGyF0l5UTcq9U8QU7O3ucWnsfk4FcM1fP2OjVMxz0n5PvWzUIZyVGHmAbSmkxqZFqNfTGlLQj5yWzZnB0wZ8rO47DCM+s4HkeUSoh27pVwTJ9Wu2SWMFkR8OEyYHPuUdUgHJyk4XqKS69OKR3INtp2JsQRSmnzgeMVfB9PKySdSff32qBmqZjKhd5oyzR85xBkZCpE0OZaUSajo8+VwKzTJ/ZNCRV2YkyLzA1kzCMqDaVgkkpEGpTfJhBKIoUy/IwVHRNiBJLt8i1nDSXA902TC6/foXKVoXzj8tNxJEzLS5cvMB3r94B4Nsv/xNW60ep1y2mA6WKkxU4WYTl5ZShnCCaSxYnjq/x9S/IjJcexxS+TtJPMJTqiKmZkOssNRpcuy4jpAYWSZSysixJfmZpYloloiwYdOSGyCkKNKNglmWYKqJflhILe8g/EllOrqQz54u64ZPlIaUKuTm2R29/SBgLVpflfZaX2+wPBkSxwPLlb0mUIvIUO1BRxyzHpJhztwBykaLpFnmeY6jFS9M0xqMBtnF4+CkJfIMEGPXV4XVwnYofMBmFnH7/IwDceuN1WrrJww9KrPGdzha37+zgEEnOHeA4At3UMDSXWBHDzaKg1DK2tyXPRXMsTM+k3hSkioelOS4nzq+xfnMTI5fv55cCYdgIFVFMsgzL0Sh1jaVFucmtL1bZ2rmLngoKTU6Svi3wgxqVo7JOnnOKz/zEr/CNr77IwaGKmuES+FUatSaaOohm+YxoNJlHMLf3e+gtk0uXX8JT33PtdJPJ9oDRCHShTD3tjE7vBk9/QB7Eb61PGc82abZP4lhycRz2p9RrfbKJT4Zyfz9eIZ6lRKacMPyKi1YUiMwEZcAYZQmCjI3tLVptudkJlg2EFlMo2XW7njHY67LcWCSM5ESzcsLm6BGbcJjPJVsNK6PfGXJkUYaW2g0fKxDUVxIONlX2Tl8h1iYUytC6040pdReBhu3JBUYzIooURDSCUtVTE+hagq3JTcX2zKLoJNQ0i+O2rHfoQeY0GekFtbasu5ebbI/HoIjibm7RtRKCEm5trst2aZjYZpUnLy4w68iIXrNtcXlapdOXk3tjsY2IY3Rji9KUC4VHQBnoDCddHJQITN2F+gLLy5Lncm3rEkulQLds6kvSduFgOOGFF76DmHWZdWU7HK2uEvYzxjuy/6RWycKxBdByqrFsl93BGN/W0QPBhcdk8GbU6/LmziZhLHcMjqhgujmJLrDVeH/j8ku8Z/mHCCeRPFwD7/vgaVbqLt/8lgzmlGEHK7FxcptsJCPJNWPI8RNH+Oqzr2G9LdvzM3/9h3n95bcIB3JclSkMv7qOn9jYx5TR9+0Op548QqkVOFO5efzUT53iocfeyxvfkM/75re/wOrRNY6sneVv/52/A8Cv/Mp/g+v7ZKmOoR8asLqMBzELyly13+uhaw4f+ejHefU1KcEPCVke8su//MvEAxkBfum161x56xXOnJKZwD1rjR/5yWf43//Xv8+Ker9rtzapBv6co+A4OoZVkgubsTKdLguD8STFr5oMlYralWyH7rhkRfEdfEfnYKzRuzFmUSllrTYirMIhaDksr8mA0vrbG6w2C176hjSzfeSRD/GZX/wlNvsdSsVpOXnsLI5ZZW9jB9M8zHyklOj3OLNqfs/ybJ5FufTGJR596iF6E4MbN+XB/uKFBWbTlEhlXpLUpup4MvusBCamaRW3brPX3aZUPJ58KucvodZMTdOkqIXG/ICiaw6iiNHQEYrva5pSlfdQPawoSiqVgLIs0A41sjGU0q9AV1ytRtsnSWeYKrKsaSVJlFOtNonUfJOKKa1Wi0IUREoyGqAsBXUlQjGdJIovkqMfbh6zEtfziCY5I2XZ4toWZSmwFD9n9UiLQX+Ehjm/d57n0iRZFPesLYQUj2qo79CPE0qzimvlvPBVOQc98v4f56M/dY4//N3fJ1AR/LyIiMPwkCbNxt1dhKaj2Sm94SEnyaHIdDz38F26FAVS7EsdEHzfp7Lg41gZP/ZpqfZ46eV1dO8yin5ISUCaW1iuxX5HfvfBNMFzXcLJiKI8tCqw6HdCCpVdTPOESt3l7MM+YUe2UyQiLKvNqF/iOEpmPQ0wrBlmGahvnmI5JiMBkRIROXXqFKNim7UTLpt35PfzXI1cjPENeRjRc418EjExHJZqcj5t10p23Cl6IjjYOlDfYRW/NiLw5ZryxktXMYoZQcWj21dG7ZrN0ZWAgy2NcCr70I98/CNceunziEOBkP0ILxDY/hSUUNQk3KYgp+4vYCzJcbu708N1PboHst5HFitUqg7jZDBXhMyyjMDy0IsU4zDwZXm0tWXSbfn/Txx/kP7GHgeTEr+rBGxGU3RMAj/G9pTcfdVFpAl+RaGKHIsk61GWPsZhIiRLochA1+cBCM/xcXWLoeqvk8lMcvQMC0sJi+mWQ1KkDMJ1SiWzfeaiR9jNKSdKTTeZUHE88iyl4spvM4771FqVudhZkWQYhoteVOmp79nya0wqAdMoIVb7qTCdMhvrDHsywKWbYyp2jTRPGGzJb9V8YIH3PPkAz379OUzk86qtFqZzlaZa/5PUpNmakgxsPEMe1CZJBKZGahRUbDnfbG6XrDYtyvxQv6HBwnKFwbCLb9bV+wmicU697uGoIPqRxTUm7oSgKus06i+yceM2t9d71BpynXnvh06TipTXX7xNpvZKtXpOXx3Mv1e5z7m6X+6X++V+uV/ul/vlfrlf7pf75X75T1C+LzJXZVmAgn1ojSpxrlFqOro6KluVgHQYodsOK8syyliEU4zCpTxM0Rc5egmmY2OoiEKBQBc6jmMrZR3Icwlh0PVDjpBJWSQYho6lYHImNo2KgRA5r78qM0mrnTVuXOrhKYnjDz/zKC98+xa7G7fmEeEEQb1pY4s6xoKMWCw9aLN/ENNSyZ1BPEZ3wXIqMvoAMtqnyajbaCAjQvV6g0k0ol05lJqvUxYaUdhDU3VPkxRR5hSHRiLIrJVhGBhKmjkTKaZpzGF7AFppEARVpgN5Um+0KqR5TDxJOChkhDaMEobDCUG1Sb0iT/29aQff9+ey9pmWU2olvleZe5uASZqm6LqOqbJnsySh4TWoKF+fNNdIUvls1zk0O3bpH0wRRcLXvvCn7+gff8CfzP/72HGLMB1SqOiIVQo6WxlrZz00pXSUJgUin1GoqOrxBxaYzPYocCGX7XDtKzd46rPnuHDxOPGOvNfezQPGWZ9YRaTlN5XfcUeZMs4eaGOYNo0THgOVkhd5TpgPeP6PlQRp+DV+//98jicefjfnz0v5VJlBjAkqNoOhvFehZ+gmTBU8xNWaGOWAT73nXVzZlBmTab5Hw1xgsCuoHWY6A484T/jOSxLOZJgLGI7O3p7AVBnaquMwm2XUnQD7UIloKkgmCc26zMaWxpTh3gzLMTAs+S55keI6LuNhQolMpduOYNQ32HWUOqHVpNRHrN8ZzXkRR4/rdLod+pt1Gk2ZTer0d3n6449hBjKqud0ZUGoRg/E2j7/vJADrNzs0ax5GKfv5/naHdqsK5hiUEW8pKlQCnWSgkytoW2bpNHyHTMlIlU5AzXZJdQ1dRfhPntXJaj4vXZ0QqqifuyR5gYarMnyZi186GIsWTQVnuLMPZxZXCGiSh9cBuP5dnai4w9KSfJfpKMLyDBzjBEIpJlXthF5ZMCOlcCQUcv16B7O+T3tZ9jurB8NqQhCcYLsreQS93dcxjAzHNhgrjzet5dE6/yAHW/L55BmdgzFeEGBo+2ra2AAAIABJREFUCp5lJmjCYmHtyFxSef2Nu2gi59Of+BgAl95+lc2piZvmJJ5sq2FvSNEfsLpYcvbdEvaYtwS9u4LH36XUrpK7bOxcI2uaHFV2ELOD77C/8yqNRoo5UuNhWJIaAT/yo08AcPnqJhtv7hHPZoR9+U0fPn+Gb335VU4dX57Pw5evbvPsK/8CW43/Dzz4BOu7Y5ZWVxir6O/TH3yay5ffotKoc3AgI9d+YTAcCz772Z8E4MbNy3z729/hu69fJ1FeNOODkP/jN36Ln/ix/4LasnzeD6w0sKsZl5XZ+pmzD/Gv/vmvc/vaLidPyQziZ/6zXyIISv6nX/sHss21UkJMcgtdP7S20JklIWVZkKqM86PnnsLeuE2uZPQXl3w6/QH1uo+t4K67vYSgbVNvVDELmbmKkuv04xxTJV5uXbvJ0pGjdGd7PPKYbM/v3nid2nKNg/1dhIKtFUWOZbtzDoRlWYiiwDJ18kJ55nW75EnGdLLP9q7M3tmOhuMEFIoPXKlY5Dl4FYtKQ0auk9ii1CzSqcWZ4zKDMOi/RZw2MRU8rNFy2NvtoesWuYImRhFUaiZJLuZQurIs0FSbySJAkybKhYIvWoZJGCXkhcD1Dv+uRDcNctW+eo7MgBHPrxGlIE6mjIeDOWQzFwmaVnLsmBx7o+GEyXRAveGSqsx496CL0ZMTYqRUIRPLpNk0OXZ8RV3TZzKdYeoFhbL8cG0HkefkqeAwgaDrOtV6E0s+nsWmSa93wKm1E+yMJFf6yu9obN+JqVSh35Nr+3t/qEnVX+P5r11T/Qyai3VsL2VfmdVrhsAxPEZ9CaMNE5nt0zTm8vcYgiRLsEyX3//dF2TfEDG+3+LoCcVp7bm85wOnWb9xmWBBzadphcG+wDQ8IuWdJDKTpYUGE2Xzsjc4YHGtyqDnk0zVvdIRVifGr6YYSCSHY2cs1ddIlCx5GEVUKwazaDz3aspGHkfaj6AvJbiORAf0e0NW2uc4viLvs9A8xmjW5djZZWbKrPrV164xDQWu0DDUVjXTU37+b/4wWzdlu7SrR/jW1y4z6Vk8ck5ms9wgZW8z5ea1IbYl6/DvPvcNLEujvSTXpoODHvFAZ8FxSBUUkkKqTe5u7ksPSMBzTUDQUJD4SW7QCko8fKJMwV8tnVQHp2qghcroN3TYClPOH5X/P9uaMCxjnEqVXA0H3XOwyhzTqM+5i7NhjmX6dBXn0rXraIZDqWfU3UOaTInjOGimhuPKbxNHCaPJFO3QyzDXKDWBZjrz7GtCRKXisHBshSOLEkkxinfY1ja4fSD7Qb1S49RDZ9kf7rBxWe5Bjh1vEUeCaKoUBjWdWRSimQ5rxx5U7WRw0J8xmUzodtRe3tDwAp2aypLlZZVhNMUzHPJMfr+33hxwsFejWW+jK737eJqyVC3Z3ZHjxbOXcUWTUk9AcfQsU2NtMeXOtRy/Leeli4+ukIoYQ5mDO9UQw24zDqFSVTZAYYdJH3Zyi6VlmZXKtBLX0dleN1UfDtnby1leXmY0lGPmuT+7QXuhgl/RMGryfeLkLz8+fV8crihL0kx28LITMeoMydFpL8qB4AQ6RsMm7AjMQ/hCOCUoY1xTTjSzJEEzXUrdIFHCBoapYVkWRZHd833SDTRNmx8GsjzH0CFOc0wlja4VJZopKITA0OWHS/ozckfQV7hzDBecOk+8/yL9fWUYvJMznk2oOjGPPSU77/bgNlEccqwuJ5EDdAwtZ5TMpBAFQKkjtJiVo8epKBL47s4BetWi3VYdrphw5/Y+i/4SkYIvmo5BGhnoMIde6LqOppfzReEQulGU+bwNHDsnL7S5qbBfDxB5wmxW4Kn0NIVOEiU4+oyxEm4oNIjzEk1h4z3Xp9ANsiyaE2OzLENkOY7nziEj1SAgiSIsdcCrV5cQhceI2ZwrsdBe5uHHF7n03I15t6gtVFiu1vjsz0je0t/9B7/O5kbG0RMaUw4XdTW5xCGa8vGwTZ+UFKFIzGE0xBR1SG12v7s+v/+tq/uYjk2opK2nO/cOVQBW05V8AMcm2pfp79GNHscePc2gn2MqovaZR5q8+dJtPv6p9wPw5nfv0nl9A+N8nyKXHL08L2gsNtnrb1Mo0nk0m3D85EnqDdnP97fuYAU6RZHT8iR8avdgTLViYDQTskTJkNd0iqyGLc+87O5sU/PruH6BrQISpVsQdz3CaESG8t/Kc1w9wPLkpOVXXLauCFbPCylpD5TCxXEEWZIx2DmUxPXw7BzrEFqb5KQ5OHqNNFOO97OCNAnIgW5f8kUeuGhy5smEcCShWLf3tqnXDZq1Out35HfvTSY0a20cU/apj33io3zrm69h2lW8inKNn2SYpY6/7NJUggs7/SFVX8OQczR5mhG0XO7uWWiB7GfdfpdZWaf0TWoVha8exNSaLnkpJ9vAisiRZP24IzfwlBbmaJ1EtNFOy2+zOnaJ0wGbyr8ms2yMoiAtR6BI2mFWgnDJ3Iy3OlIWeK1mYfevsl7IOtlFi0FcBaeEibzGtw2c0iJ3QoQyCLbEjM29ETMlke/gUJoWBRp2oHZzswnOgkkax3SUsbdrwKmzi6w9IOtpume4+yfrCGuKoea3JM849nCFxbWYqbJLiO6M2XvrK/RGKqDlnUVnif2tO5w4K/uG//BPkR0MePqpgCKQbXXtxm00rcmXvirfpbGi8dRPXOSNG9ucXZSH+N6NAxaNgvWNIR89I9vqKzdvopklDzTl+NiNE9549XVef/3l+aL3n//VX+L2zXUMDRbVpmg42ieouTz3/JflvbsDLj65wtbtMcsq8OZ5Ds9/6wr/7Dd+lE9+7GcAOHZ0AaeasLsrxSRGBxFuaHLhwgW0hhzHW1sbvPrSm/iq/wiREIcGtlOQKvsEXTOxXYEpKpSK29tuHWe/OyBU3JswGXJhJeDNzclcjGeWjEEriKIZWSrrcPxkQDjLyZVZ7nj0Flp4h0ef/gilUJYRy0cZ73c5ev4ot96QBO8g8Ck1sBW0LgoTKZ+slfO/a1RsvvvCmxw7ucqFM8oXLUk4GE9xFQeqyHNyzUK3TEy1mUyzjPHsgP2OwVEFjXrw0Qtcfusu46Ec/ydOrdLvd0miDF1xPCzLYjqZYts2QvlkaFqJ49hzvpwfmDiOxXiczA3eJ90popBGw57imXgVmyQpiVL5d8msRGPGbBbiB7IfnH/4MW7fvUIUT0hTdVArHZIsZkuZ6DXqLdAs9jt9WkoMqFJpEY2HGIZGQ3lKxeEUIUr297qqT01x7CqFSOcOXUkcUxZAqclgMBJiOBwO0ZT1RKMZkKY5d25tsbws1/tnnnmCZ8WzRFGNMx+T4+jTv/gLGPkRXnjpf5RtPogY9EPOX1iGRPa94aSDrofzQGar4pOJFN3IOPSFzVOH1aMV9vdnRCO5dpmeTiF0wp5cP0qz5OrlSyRJwhNPyCDi689PyQvJN89iBRnTNYajLpUFOT5PVs5gVqYMeyNGXTXn5RpaEeNjce2ynOOXV8+iNQ1SNY/MZiV5HhMEAWfOPgzAaNqnGGUsLVR55KEH5HXTFLJV1k7Ig7AQgnrlCIPhlL2+FGo6ej5gd99h0uvzgY9Ibs9ev8tLf9ZlciDHVbXt8cAHL5JpA2pK7OTNr+5xsDHGsiwcP1b9GtaW1wjVQcpxdWah4GBHpy67Bqalk8UJgV9nOJDtaVs6eRlRVyIGO4MeZZ7j2D6G6gdlqZPmoNsulYYSJJrGxNOMoi8DFIFzgnh8nScvXGA0kDLke6MdCl1gBz4jJYlvWRqiSDm0eHTtgjzXMbAQKpHg+C7DgxG2oc+hiWmeUVlssKCMd7e3Ori+jWGYjEZD9S4mFjadzRkbN6WJ8MFOj2bTJFB8wHg44871LUJ3zEPn5fx98vFHSaMpz375W/Ld0Ck0h1Z1kb4KRAf1DH1pRqUYMNuT/dwybLIsndND0iShErgYmkYey98Cu8p0MkMzHFxll9DraHidNsvLsg9Hk5TAd3G8jN39jvpNo5wVGFObIw/INhjEKXlYnUv5T/sT2t4SU2uD7U25UTh37mE2xBa1RoDhyb3L5t0Op049QH8g+7TIM/yGy2AvwtDlPmW57bBzd4Bb81l7QK5PcV+uuf+x8n1xuNItE6tQPKlU0F5ZoLXocuYx6XY9ClOuvb6O3YRMKe4sHztK015kd11ulqsVhzAu0XVjHk3TNEseprgXbSoLKHWBqRY0wwS9MEiEQKiBV1loMM5S8qTEcZQDfSHQspBcTSI3bm2jFwIzX+ax938CgIvRFLMo2B/doN+TC4MRnWWhNuW6EoXw222mvW1sp0RX5jSFyMGAbrdLrrD2CBsDk4X6SQB+/Gc+zOf+4PN864uvs9iQu+okn2AZ0gGlUPIRpmFKfLhSOTIMA92QOP3Ddy7JEMJEaJZ6NwdK8D2dOFbkwNykXmkQuA6jsYxSp3mGaZpzTkKSCqxqFZKSUnmS1Ks1hkKaEjoKVH4oqjEeymtE1ueIMpY7zKhZjmA47L+jX9i1gH6SsL679Y7fHeGi9gLEpICgzHRKdZiK8gjDdIk3ZJtv8r2LnruMJiOOrMi6THnn4cpxbdA0bMck+nO/b1669Y7rehxw4cMPI5bkAD7+yAohEW6zxm/+2v8sL/q1712H29wB7rzjt+kDGzQr6iDqLRImKe0jC2yvS/6NFqfsvjSZX3/ifcvcfWEfcWGGp9zmq7UafiUhGQeUpbzW9SAcGvzge+S4evONTdAtDOqUhpyATVuDXCeelDxxUUalCpFTCIPOUBLHZ9EY09KZTcY4vtyg7N+xqAQOjh3jCynK4Am49PId6styDNUDl/E4ZNQtyA9NL02HPEyZjZUfyFLMcttn0J9QMeQGpbYQM5rt0ly1SBVHr1FZZDDYxFXXREnINJxxdtWjqw5cFOcIszHBCZ98og6PtT5JmjPuqfssFxhhDKWFpvD/RRZhLJ5ATxJeeVVOro+er7HfWGSaKj8u06FFSZrMGKu+X6l5pOMULS2JVCTSX2ziktCfyL6lNR3S3oQiKllSJqa7RYprefRTn7NLkmP5yQ8d55svX+fbl9Xf6S6ak4Oeo2ty0fEcm7Cc0aj4/LWffjcAd4e3Ea6NZcu5rN/dJ9ZGtEwfQ2WOy2jCG6/d5amgRawi1be/ssve9pRpU24i+r11psMBH/uBH+Dlb0nvq2OPvQu/6ZLZNXpdOSYnWUpqjTB1OUfcfjNA0wtabQ+hNoa3RyMWTp6jMt1nM5GL+IMXz2AMdd79pKz3qNjgve87j+dEdHtyc/zHf/R7HFteZf3Ozbkkm1naeL7HdCqfN572eeuNKY2mw7FjMgP18ouX2Nu/wmvf/VOEOvz/0i9+iu8+/yp+XW52drtvsbPfZZimLCzK7759dYOrb69z9ozc8PUmA9A8hNAoFDFeNzXKzABTQyj/qM/94edwdR1dmeBGScqHHn+YZ856tOpyLLx97Rt09hPGcUpbRVrPnXo33/r6d7Cq8ls12ivUgwDd9Ni4LrPEsyJmsXaGp957kWdNmQ158fnnWWw3iZTojWFqZHmCaZr4alc2SyMMNNYvb3PxfTIjs7TcptaocKB4hINpiiZCMpEymsk+bAUG1ZYgWDAIlcn97n5Cc0Gbe0w6niCo6mQp84h7LiRnSopTqHUliylI5/5YnueR5xLVkKgAneM4ZHnM0soqpqOyUgi8wKVSV8I0Vkr3YBdN0zh7WvJxl9sn2dndRKfPyqIcD3GYkI3juW/ZZDJEMwWeb1GojF5exBi6yWyaYJtyQ9tq15hOQmLF56IooZBB2EIFYTV08jxDlAWFWnwWlpfY3d9nUZlQj/YmuJbNYm2RW29LnkmRrXHhA8fY2brKrbfk2Nq6ofPKC88xGch7N1oCUUZsbm7SqBzO3wGzUCDUfiCKQqp1HddjfuCKpoLZNOHISZ+da3KeuPDEOXoHEVs35RjymiaTUYrjFbzwp/JetqvRbFZJs6lUggNEXKMSFAz35c2DVpciE7i2Q3Dy0CQ5Ym/dJtcMbF+2QW1RMAz71Gpyw9kbdpn1bBaXHNZvyMzc2gMrwIBbe3fJVNb0wfOPUJQDXnxbBqZ9V6O/E1JzdBrH5Pd0/RrPPN1gd7TD+Q89LvvC8y9TD0ISxYEqwpBo0MKsVfnal6Rq83A9wazZ5HFOM5CHHcc10awCTch2Wl6tgRFzZ33KZKA8QrWc06ePMuqFjJUYT7UeYNjeXHhjcdFCS6sYtoau9kpGWhAnBUlkoBuKg7RSJc9zsi055+64W7gzgVfo8wB21i+o+BUG4wGKEkgqBIZuQynHkGmXpGmBoQt0U46H0biP41gUWY6mtvDhNOFv/e2/wfaWDHL95j/9LexKm+moj62yPVma4wibW6+NmSgjaqcwCYSFqNqqzW26Wx2SCOJIBcI+9x1MbYJVykqankBzTSbJAE2JXqWOIBuXHL1QxX9Aeu3FnYTXX3mNUikBl1ZJOEkoSpPWgkpkCAPXaSMKSFTQyTYqtOsNjpySY+Ht1w7Y2dkhaGg0FxQCxc/YX7dorVSJFfoBXyMTGYce25V2iWYPOX/mKTY6UiV2v9/hzCNV1m9uECnV7VyPuXrjFseOy3n/7sablEMfco2jSozL9VPax6r0+0Nc5W9VNeqMv99NhEtR4i7JDnD+kVMEjSpJoTPqyYpffekaRgGrJ9YY9uSi41UbaERzBSXD0HAqLoaIQW3csqzEMHJ0XacoDk12C4oix1bmh7lISVKBbpSI7J7EqaP7iO4YU00+XmWRaXhA4MoP0q7VyJOUg/0pr7wuF6t2tUB3C1x7lVZD1qs7yLh7e5/26iFxE4RwSPSCREVDHcvBxMRAny+YCA0j13nhJRnFWb95F60o0fV7whsUDoaWYxkJcXaYmdMwTQNTHWyKosAwtHsZKUAvTHq79zrFXeSAbJ5axXCUUp4B03FGrhfM9u4dOkKVMQJonQyIZxM8050LaGxevneU0Vflb9Pd8Tu+t7NmsXlHbtSbLTnIwkk8j4wGxw9hD/sYgcvXX5JE9eNHm2xsD9A0DV1F9GueQR9BKXQcJYUuxJTxzsH8eZUzVabrE4yjLmL73ntPJxNsU2fafeeh6rBYjkmh5fRv/+URCoDLX38b8UlJKr728rN88Ece4wu/9Tl+6r/8rwD4sy98kTDaJVMHzL+s9G4MyJUcre/Y7N7sAffq8P+t7UyR7X3fp1DfZzIdYVsOZkXDVUTxaGrTWNEJp7LNB7uCSiNlZ7NLrSH7R7PlYZg5/kKdulKuOnl6FcdpoKks7r/+N7+JZhQ0GgWJSo2LuMlgtkM1aFFbUMIlhs6DZ97Nc89LyMqxtSXGo5gsN3BUP5tMMvwA3Jqs0xtvvUZ7MSAvLLa25Tc8dd4gHxpcfX2Kq6BteTnCKAwE8hrNNjC0I5w63aK4ppTjHCgnLdJkBLG87mAroVE1qClRlul2n4ZTAU/D9lSDCp1h5HP29CM8cPAF+Zt2AJHByceVdPH1lGnfYTbTOb6m5HAnUwzXx9RKNBXg2Sl0uoVOrqJy9SMVFiKX8f4OAxX1dxttBuMpo9ziek8uzkbrEWorHisDWandzgFFWCLElMlMZjDyosQ0Kgx6CV++LuegVdfgkw+eYqMr67n27vdwYfRNrnz3bSp1lQmY2qzf3aa+ZnDmhIxOmrW7tE5U6WyqnVt/wM//wo/zkY//EFf/exktXKhVyZDGo9OxkjR2faJBga7JhTAfpViOTjjxeWUq62TUW3RmY/TWAnlDtnutbvFLP/ur/OvP/S4A12+9yrue/DFE1mM6k88TaYHnR9TbApRM93QGUdJnOpPPK0UN204QqcG3n1ORVT3nS1/8En/9v/45Bj15r3/3xT8iOFKnMZbfoVL1qWg+ywsL7Fw+3Di5HPvYu0Acmrv2sD2dNHVQywciB9NwiPIQW8F0agWsNhY5flZmiD76iZ/D9Oo0g4CXX5bQ3Y98/ON8/vOfp92ucrAtx/If/dGXOHp0gVW1+TgYaXjNNkZg02jI7OTFs4+yeesO0Szmx3/yrwKwtbnPsLc3l1guy5K80CjLco7IcGyfUIzw6iavvXQHgJNnPBaPGgxnso81Gg2sUKPWXmA8VGarVYf9fkwhagyUGI9mhDz68BNsK9Py/f0DJdOcUhZqrtZydP2e8TvIQ4AGcxSF7ZiUFFKAQwmU5FmKrrnopkOuhDAKNKIoxfMUfC+eIUSJ45hcUnLU2/sbiHKqkCiyHXTDlsJOyny8Wg/IsoxGrT631xBZga7Z2JZGpaaEqJIETTPRlWlqqafkahN+qCCoaTqFVlLqJpWqnKvqzRqdwQGdVFkjWDq6Dr1wHyuQe5Cd299m6zaYVRtPzeG//U//GYNuSDVQUvNWg8EsxQsgK+W90tJAlBpCZWOarRajUR/HruMrw96tYRfDNSmKlP0dNb80CvTSm0PBLMcn8B0GvQShAsOtVg1RFMwmzFXwDHdMUdjMRnJcx5mLF9g4jkagDrmGlWD5E0zdp1Dtc9CxOHPuKBVl6nvzhkWznTMa9glUoK+zsUuSmggz5eGnZPCoulYnSvqcXpBrStU3iMiZDj1ELLMvN67uY0YRlcUqX/z95wFYq50hDVNOHVciScsuByKivRpyTgVFXr97G00U1BdcDP9QBEawsrJCpyP74vWr6xw75bO8WmE2Ooy8GwxHU7y24H0XTgIwHscMpyNWjsqDRhJqbN0csdxaRjMVWijOSPIEE51QQf6blkdrzQdl09EaBzy69AEOugMSpZTZXDxCmUyoVHxSZc9SajalsLFt+d2HkymLSy2SdEoSqm9q6RR5iW6ZaCrg4fsOt66v850X5RyIYRCGEUKUxCqbvbTcZuFIwOD6EN9X71NO2O12eOziXwHg4rvWePu5F3jzyjW6ql8/84mLbFy9xvZNJbEe5VRqFvFoxv41hZioB6TlSW6NDnj3u6XoRLtVsLl1i8lEBm414XHkSIPF09DZlQGl3t0ZhuHTGyS0VVZ6Oi3Y2bqB1jgr631smSypkyR3GXQO928WjZaNIOKgL79fsKrj1SKSWPa71E55e/M1Lp7/GE8/LdEzN25pvPr8AZrVI5wpY/qxQbWic+u62ktoHq2KSTjS5nvSXBdEacH58w+wfVcGBKJeHVSW/HuV+4IW98v9cr/cL/fL/XK/3C/3y/1yv9wv/wnK90XmyvUMnv6w9FdZ3+lx49JtrJlg68YdAOI4o9pYYHb3DraCv90Zb3NqrU29JiP8w0kHYQncIqNQWZs0LyiEkiVXhHlNEwiRzyVrDd1G0yIKMnSFmZ+FMbVTxykSgTaREQXfgrzWoIjuQd0yt4KnTQn78mRupDa1lVVmg7t0t1Q0u9OhuSjQFbBsGgpyt8AOXXRlepcLgV7YkBQ0WzKSM5tmmLqOqbIAu5u7kGZU6z4z5aFlAbauI3AwFGfFsg0cx5/D7QaDHlDguDazmYo27cj3bB+XkYJZPiXeSRlPBnOPktXlNitHqtx4eZOlR2XKPxmkmKVFb0u+b//OjOqROlkxN9l6RznMWDWOtRhu3oP8WVY+T3vn8aG+Z0CrqdEj5JnHn5HvHHXp7U9xM3nNfiyjJOub4Txb4JYyQ2BbBtOJimoYhw5bsij7GRwcwj+fxtUFhm0Tpd87tRsmU5Ld9Hv+2/cq1557EYDAzfnmb0ufIUcFdtNs9heyViuPH8WIwZ0pXgY5ZZKx34lIFZ53NCswF30822ayLdu9udZgoL4BQKpgaYPhDF9lVnWjQHMStl/5i5GVLSRfpX18AcoKvp0hFOdxMhQsLy0wmUy5ckVmNC9dWsd0bLZvKnGFf/i93r7H2oPL2MUquzMJiTn51Fn+7f/2pT/33Ht1VhRiasfqlIk5l4KuVeoM91N03cBW0sjp1COoVkmiHnZFfndDaASmj6aycqk2xvZsXn2zxxNKoOBW5zKD7h5+XmGsIuy1JY081DBd5QMXuMwqFmEvplmX98otl9Fkm+duHeAEsm3joY31UBtLcRTqPmRFQo5DXMq/M0uLcBwx1ROaS7LPxlnJxLI4d/IkAHef76NbGrrbYDdW+MW4pLRsTNciVx4d//j/+ipaPEJRzMj1EK9ZI8l0TMWvjMIIc5whGgaDgRz/UzPk+l4d05B8h698/SV++MOLDDsBvaGM3us2TDZ2qX3kLPvbEt7nPvgQjaUh77mozJbzMZdHE3566Swf+aEPy/bsddDSnJZTY0/Jz8e6IO6PYUG2r43LYNih6iSsuDLC350OEXGGwGaqMo/17DxVbY+GLft+LQu58a0/YFev4FXkeJxOBOMkB2M297Wp1pZJxJBcwcpKQgLbZ9gfcfashIj2OkMee3iVC+cew5jKPre9c4VR4NMNZXRyY6uLQZum18JYlBm28dii6q7xyityHK8sL7LfHVES4bjKgD3JgQhLM9DVvJSWgj5jkrsys7P3O3/I3/x7f5/17auce0xCa3/vt/8Nz3zwo6ydW+GlV6Q0+r/4az/L5373X/LiNyX/YeU4/PRf+RQ7syE3rsg6reHz2LueorV6nAUlMf4zn/kFfv0f/V1Wj8jMspYmFFpKHCUUSjDEKQtc0yKOHBrKYLa3fZX9DYsL75J9I9JicEE4MFay5Mu+TWBWSKYZnX3Zdk7N4U/++Fs8/KjMOkyGOmkivalyBfM0jAQ0ARhzFAOaRiHKOffWtm1yMUPTtDn/N45jbD8gz1Ny5DedzMb4QZ3hWI6PYW9As9kgjoY88KDk6PVHXUaDIVph0usoEQhbo9Ko4ym/ymrNZTKekaYZywuyv3a7W5SmiePlpKnse0oThyJTmwLNIiejKJlDIXXT4uTxk8yikDiT/fPO9iambeEouL1bg+lY4Os2toLgikpCFJXSFF5ld5/54AO8/MoL5BO5tiS5gWkVdPcSKr5sz/qKRSFSSldB8IsZ9abNcDgiV2nU4yerREKQTU3csYuFAAAgAElEQVQeelLCs9bfukaemTRXlBdlmDKe5VTrHl5VPm/QGxFUdCoVn1yTi6PjmhgaVHyFgPEsJpOYScchmso6BPUAr1qQTSwMXc7D03HG1bd35jL6rZbB8nKdzdtDMrWmZLlGEnpEcU53QQnhjF+nuXiODMlza68t8J1/v0vz3CJbb8t1Ru8KgraG5y6xvCIzLZpzQBhpdDuyb16+lpDnNmf7q4z2FUzPBM0JCIIaT/+g3B9efmOL/f197tyWiBnLdDnY0am1LCYj9XdWSTZLWfRbjFPlYbWs4Swd4c4t2cfOPVKhGY8YjXuIVCFtbAfD9yiSAi+VY3Rzr8vuqMap0xIWfNRvsuNN2Jp0qNblpB6OBmDqeF6TseLajqdyPyx0OVdbro/Qp9TabXJHXiNmMcIQhFFCobDXge/x7Nf+jNFIjpnldgshBGGWU6q9b6Vd4fiDNbZH28w2FKes0cTJHFKFfDp2/n0cXD2gvPQ2mqL2Hjn1QR575P38k7/36wCcOr/GbJyAVtA9kHNeOU5ZOPcE9OsQyXkp84a0Th0nvCLbN50kFHnCe5/6MVxlMfDP/+G/5OCgz6/+t/8dvsLk/IN//K9IxqtUFTKgWXVIp1WuXtunuyPHXms1RzNS6pWAbCbH2tbNfVrVlKAq5xvDaVA6Ll978evoU7mHdSo2y0smw6GLrzZnJSu865EHuXFDZke7nZy+roEToyXy7/ykwvHjBlt7O0SJfN50vov53uX74nAlNI3nvy3xspPuhOmsT93z4RAf22qQiJCVxSNzyIFbagxGE6yZItRmJlmUMjNtilh5BHgTylIjyzQstYhrpYGhu2S5bBgt1cgoJC5VwQLHnV20ioXjWcxGSj2n0wUtZ1mJD6STMZYfQKERKKK/oaVMt7oEfhM3kM978MIZ9vo72I48NB09PuLuW3toARxaxFumQVGWCKGRjJQHwiyhVnUpEwVL9G1Kx4SixFNEZiEEOQWGIdA0RZi3PEaD4VzFTddNkjQlzRJ6e/cOHZW2jagrCInlEpMiujH+SeXQvhNy7CHFo/IlZr9wMmazGccuykVi860dJjv/cYfq+opKzxbvBLL1bt+DCRaafEajEZCrCe3Ln//6O65fOKE4Ub17h6CNrXceiPZ2723ca0tyVmgelwvOQJH9w+131jVeT4jVgv69Svn/M7HbPib7hp1ozNjnk5/+acwTsi6JUuNrPtBkcENOgOV0zMmjayyN5cJRtnKSesIX2SCa3ftW+UFIsigImnLSym3Bny/j67J9Gw2DMJT/trzU4trXNtQzlQdabGPo0Lsr27+30aW2ukzBDJQwTGTlHMQ7NOurjHtyjPiez+3r1+fPWzq+xMLSGlE85PZb9/hnWumju1PMPdmvv/hbEqLwsc+8F4Dn/+Qt6icrbL9978BnGy6WF+PqcrLVNZdx2kHzC2aHIk7dhPayTtA2MRRcIold9vvDuUHpyRPHmM4SihS+8+Y35PuWGrpjoGkJS2pBS7SIQRYilFKYZXroqYlnCCx14I+jCbFt0evE+J4yhbR0Wl2dOx3Zl6J+RsOuUfVsZqEMHFSsClYVJuGU/5e9N42z47zrfL+1V5399Om91WpJrd22LNmyHcdL9oTsK5CQEO6QgUxgYBiWywzDnZkLM8AEmM+9A4FAJoEMJEBIJiH5JGTDdmLHi2zZsiRrl3pT791nP6f2qvviec6RFMmGF7zI/Xz0vJFOneo6VU89y3/7/X4awrFIgiaVcpnmmjBQ9ty5h9ZCncXqKrbEA7huDTsZxDACkKXCrzh4G75ZJ5LVraPdZTpuk4tLm/jSgK7kcjR9F1uHbXkxVk9ttPjSkUvs3CLG2z0HR7nl5SOUsxpf+vxpAGYuzNDsRBRyI+w6JMoCTx87xaR1mM/8+acAuO1t9zGVi9mstygPiD5wFleotVUY8LBkuXS32Sa0NQjEujG6RSdezfHWt+1k/qQwZJa/G7Lj9hHOn15iaVHMt/2DoOoT3L1XjI1t40W+8u2nyag+nivew7CTkMQh9cgkL8sJs9kcbuyzTeLjBscyrK+e5a5XHOCVd4sAXW2twcc/+3ecePo4eyUoe8voFGXTxZbUblZ6kWxsU8m67NgqNML82nkePnqB+6TO3t989mECW2FsKMKRv7fQAM20Mc02msSeOpqB3w5QZADNCuc58b0nGNy/i6U5MT+efvQR6rUab37Pj1KU9adrK12++w/Pky+L6xQzFQ4/8Gb+9JO/x5At5uz44ADOSAbVzHLivCgH9SO47/7X8PgRgSMYGCiQdF0yptPXr2l1PGzbxrTiviOja6L0bu6cJFuZmmLJa+CYG+TzwvkI/QA1aXPyhYtIqBTJZpuuG7MuS3J6ovRC20pqC+pGnyCJnny8ooOa9Emo0kQhDFSSJASJgTIMAz3VUKKEdkusz9t3TOF2I5Y2pGC4AYHnMjg4hW2LvaighIRhSqfRpeOK+yo7NnHcQpWsvyvLbfIFB1VTWavKcls9g65Bp5n2bkHoVyUJiSQoUNEBnTRJ+vqR3ZbHwuwciR6QyCBsJpND1xw6snRQ02yKhQjCGFcGQBNNxczrFMs5GnPCsTg/s849D76OE8eE9qWiKJRMFV3dxC6LPVNxfXQUNF38frORoNgOqhKQy4v9otPV6TQ3GNqxg9qmuLaimlhmglsT9xhGEZajoBgunYbol1IxR5RU8boRiip+z4stkjSk7UrSqyRBT0xiPcGTTqDXAkXRyWYN4p7Au63SabsMD4v30nVD2k2bONWxTLGmp46Hmmljdk3KGWGsFioJC6vz5KQj/MgXT6BhMPfkMiMTYo6OHhAjqbGmoEm8kW3ZKKECtsTsFSw2VpvMnl1AM8W97zxQxk8Cwo7PwmWxBq2seXRryxQKoj8brTppqNNZbYEcw37HpLi1TAq88B3hNAzvtGmHVcpyPV85W6O0v4x1LqBxRmpaTeZR4haabRLIsVBULJqtJjNnxSDrjMVkojWmJoYxN0Sfd5cCjLIJahNV2odOElAs5GlLkhTF8jAzCoHTxpZ7WOeSj+pqQIIinSuDLputEDURfdD0NqgUSwSKhaX0rmXx6EMXuOPwy7APyvVs7ghtu8LgsLCxwk6HF44cQ1VUEln6/dj//jqHXnMLiiybHRvbw4x7hlD1sC1p+wYGca0BUYIuS+5rHZP8wDbueVAwJqf47D5wAN3KUi4JG3bL7mHWZ2J+6P538Sv/7sMA6HrKa1/xKtRJSTCzmDI1eDulskKqfFn0uTlKYnRxfZWixG8ZSQEv6OJG4nO4HKEbPoOFAZzhHmGXT9PVGB8aYWZGOqLDPieOPochhiu5go3nutx26246vrAru5HO0K4RZpdX0CJhV6rp/w8ILeIwQUnEjQ6UDWxtjGa9TaKIwbTZ8dm5fzdTO7cxMyMiel69hduqkUrqYkdJCBJQo6SfuAgDFdvRUJS4T8VKGmOaaj/SEqkhpCmKkhLJwaPrKqnrEmkJqsRvpTHEQcpCU1g7E9MjJJFCPlvElExrtqFD6LEyt8CgFEVOTZXBgck+EYfXdNEUVWS+ZMJHVVUM3aDtBTQlne/wyASbGzWyPdqYJCWJIpIkhaTHUpP22YyQxk4UhQKTJOvQc7kSa2trRFclcwYHbJzRLB0Zzfe9iOJ0gcbFJo4tNtk02aS9kjCwrcD52dMv+f7e8N77SSUj3Tf/+qn+8aldImpz/FFBOlKQdcthHGE5Weoz6308U5UXH6gbc6LPSxMD1BerL3perzXXxEZam//HsVIv1Ww7Q/ASgMXvb5snBL4ju1VsDmrW5i9+//fFl/KfnmMFsHqhxSpX+vYDb7sb8/L1z3fPj+/ldGmZTCIWm1rLvuHv+zUdJyvGQc+xAuisi8VGVzVU/dosY0Qdw9BRezA3w0AhpON1yOTFRthYvzZ7l8latFsurU6dHQeE8Xrp+AK1TY9cNqYiF+oNWY981+2inttOS6x3llm8qk45TVMMfZCmZOHLltvsuGWUuZnLFGTaJuvA/NIimcwIyLpzTYsZHi/SbkkWNyOgPDhEbWaWwBOzYmi0zMj4EHtumeLpZ78HQLeRkjVUAkkFnc0YJIkCYR63LbPS2KhqSi6bJU3EBjo1tZOGt8r0bWJ+qGnE0oUaq5cNcnJRjjM2GjH5TEIsw+FxHLCxXKMtCQOGX76LQwfGKS5bHHlGzIuJiTHcVsDE2E5+4RcERu+F507wmnvfz9MvPAdAc+15MpmYP/qrb+LK7Hnb88kVilRrHQ7tFVmbHz80yvLZhKFCT0U05Mh3OlQmClT2i/l3x2vfzvETx3jkkW9w7qzIrNT8kPGkw9ZlgZlUTz9Kbdsr0bUcq5JCuqHHqPmIjU2FQEbPK0OjmMoqmxeEcTfvBYxOjnL0+AqFgsBA7HlFRBB1KWQqlAdl1FY1qLqL5EfFJr+hj7P/0Ms5fvkM1YvC+H/FgX1cWp0j1iyam2LcDmZLhKuX+dCvfgiAlcs1/t8/P827fvJtfPozfwvAlkqBA7sqLC5cYN+UeL6FRp3lqEZLGh85Zwd2qc3zpy/hnRXje2AIAtunKtfXO+7bik+MGutYvliDcoMDNBY81roGqTT4VEPDUBQa66Kf3vzun2LvKw+iJgXOnxHR0KmijdaqYxSLOEUxry4tnOOnf+rH+dIX/gqA1aU1/v6hL2NNDOJURKCm2k7QG5sMjGXwNoShmBnM8vp3v5UnnxFBKMX30BSFVE8JfOkgqIJ1VFGUPumEoiioqtrHH50/c4bCWBnHKRJKA8z1uuQyeVaW18jKjFPkB2iqTigj9X4QEqdAHPevnaZpX1w4kQ5ekgiG2nJRvOMgCAR2LlUJwkDep0Ki+HihRi4vgkc/+t6f5GN/9Afk8mKdC0OFII5ItYjFJTE2NqoLmIZNSkSxLPZyw1Qx7SyhvLbrtQjDkCQWxAwASaTR7VRl9gxAZNR6BBsApiXILBQjJuiKa2mGTraQx087ffkHP4hI9JTQlV6aoVGrNkjjkBFJsuEnEVESQqBiyEj88oUFKsWt9EyS2G/TwcLXdGxNzFFzUCXxPNodsYfpeRXbibDiLJtyPnZaCoOVcSzLYGND7D0kDoqagtITGs7j+zFOxqcpKyQURSGTV3BySt8xDbwIP4gwZdWP77sYhkGaKj1zA8uy8X2PTqfTi8XJyiCdqlRTLRbLVGsu2UIWLxTH9LCMJglgjjwlguhTuyd51Tu3cfoF8XxTOwt0620mt45gZaUTn2oQZ9k+naUl6dlXF5fwqiajkuKvazcg69OswQEp76HkXU49Mwd0OSuWTyYGd7OZnMGSmcFOxyB0fQJN6+nJY1fyxEmNnJpnelpc//JqneEdFW65R7zP+YVFUtekU00YnhB7gdttECUxSreFKYMivu/jZHVSGXBZnD+Lrpsszl3Akvh31czQatUoF4t96vdctkIuX+zjuaJExe9CxSoyINmzqx5cPr2JnskQyiBhEKdoVtjHLZKYBG1IIth1SMg63PXGN9FqNyD1SOQ6kR/JoVsldOkwbK6uceiNe7h4TmVzXoyhQw+aPPLZvwPJonry9OOorYCiXUCVrH+7X3aQ8vYSB/c9SCwHdhi3gZRYcs9b2TydqImuGGjSPi0XC6zGM/yrj7yfFcmn8Kb3/AhpvsTFs8K5u33ffej2CkE1Yd8BUdV04dxFRoq7abRWGBoSG/BzR5sMjufRZcBgbGILSgqd7gJry8LuKowEaGbI7KLNvfe+BoCtOzS+9e2HCQLh4LbCgO3Tu0iViKwtjiXBCs8/U+fQwe2cPir2uuX5vvV9w/YD4VyZpo6hCuOgsd6h29nE77bJGmIjbjRaXD59HktTmTkponeKqjJc0WlpPZ2rlCTWIAlQZGQp9A10PcFywPclcFPVSIlIkx5IUydr6LTbXRK1F7lS8BttKuNFar2VxQ8pFPKYcnIuXu4yNqFSKmfo1axokUOaNJkYH0XTZCYg8YkTnVgSDShJgXbLw7Tos/fFcYwfBFhOnm0yAtRqe2yZHGdjRSyaSRSRxomonpIbmqZpoCRS20qyxngupiRiANisLqGoCXKfFNdSY+xcQiizHH4MdsYg2KrjIjbe3TumWa8usXKuxvitY7LvMuhqysyxK9mKA3ftZdHo0pHlk1e3S6fWr/lcKkuGGEXByeSos37d37xU623kV7etu7YxL8tH/9lbkt7wcH7rAFHi416+MRFGZ1704d99+jP86Ic/AsCXv/kF3Jm1l/y50ssKXPxy67rjT/3FGXY9UOL8o70j1zp8pdEi9ZUGzTWDPu2QTFlPTI9T3C3e89JyDV0bwtgmFunabJXSYBY/8PC6om9tLUAzwVCLdBrCiMgVHa4WIs9YBeaW5ijmx6jXrnxjaApe12V8ZGv/WHlkjN9a/Y0XfeZifhDfC1Hl/bqNmD3TWzGzEYPDMvPY8CjrLo01g7oEr/peA40h1Fgs+HOXasRxndsPT3JJGsvNqMaI4/CdR4/SrosJcODAHcwuPElVOml+awjPb2GZOoYlN/UkQFGyjExkWF4V82hmcQUna+KuyhIkp405kmN3eQp/UwBcO0EbVbNptAKm9gun0+gUWby0zCte9koALs5cYvueAwwUdzI0JDYTt9nlh173Rp5+7nF0xPz/7d/5IN5miDMijC2v5vL0k+eJPINSXryrjaqPrrsM5DI8+6yYf/sm7uSuO5usyM2k3ryMuzJPbV5hZUPMN7WrMzI8RmbEIJElTjtGy6yfucjEvWJsnFrqYOZnCdwGFUdqw+RbrK7OsXNsF8ceFaVsxp4hdkyNs3FaZDbNyKa9HlFV2gx2xOaYKReJWh00zWV1RTzzhGlSMk0aHRFM2D8+idPRaV1e5X0/IzS6PvO5r9GtdVGUlMCTDuXGGULf5+gxwUTmWD5jqsXpb/4Vo5L2fPXcGV77vvcxODpEMxakDKtqwEx3gVCTTq6TsrnYJcgOsiFZqmoXqwQlF60hDKRCeZyxyX088vA3aXfF+vqa1x1iPjjPysUOiiINct0gjduUC2Ij/vRffIrS9BYS20CTLFy/+Ju/i6akXKot0ayL+7wwO8PE3kMMT24T43XhNH/3Pz/GWz74Af7hYVFO+9r738bkzq0sdmvs3nsYgPmFWbZM7+WXfun/BuB3/uuvMzI8QJQExFIbKklEhklRFIzePpMkaKRo5hU9qerCOjlzEicvDBItNiFNCAOPuiwjt3SDRFFpS0M8DiMhH5LIPYgeaUV6rTOXapQHCuim5A5PUrrdjiDgkMEN09ZQDaFpqchSs//8n36bYsnGlBo+immjJwmlgQKqrCZodiGXhdAz+lpGnh8BBrEsFbctAxKdIAxIJWFAECjYto3ruv377HY8kiQhV5Q02rpwBG1Lo1oTz5zNmCimik4G0xDnmYaD2w3J9HR8gpAg8AQ7rmQCtm0bxynSbrepSCMw8lxOnniEye0ioh9aJg23RSlfIZTVG3mnjGHbjGwT683FU10aTQWdCF0SmTi2TRi6NKoRBUkI1G54aEbUk9Ck3W5TqhTQjZChYad/LJPL4nYTBjIiENjphuhBTBzJfcAw6XY8NM3sa2YGgY9haMRxTCzLQXUjJkw9bJmlWlmusX3HOJGySUnOh9W5BopSxMm7VEypNxrX+O43YNtu4bSQBpBqnD62zIHDgnZdUWBudpaBu7cxMCTHp56yGiQsVyVB0WKNnJNFU11OPiNsw8ZmG93MgKUyIB3avBMzNDHJ8yfEumEoNmbOoDBosyqN5JHdAQVnjFRNKd0q+qp5pMnauZjp3WJsvuzWWznx5BnWljewJKnGwZdNERgpmyeqzMngr6rqorJIEsxkTJM0jfG6Ll4oxmKsgGFkadbCPqMmlkerWe/lftmxa5qZhXN0fdg78HJxbKDJ5pnPE4dpn0U1jiKUQCEjExJtJSbRIWsaNKqzADz26BfJlIaxrTyDFUE6kWgjpLpKIjM0RatCvnIb47eUuO1e0eclvcDWfZs0ZTXNxuIiza7Pjl3bePUbRMn4J//0z9hdfz2HbynQjhblO82RKgFza8KhHh7czuTYTlqB3n/e4YEpqsNVmu0NjIJ4V/tvvYOxraNoF8XYXN04z+mZh5je+iCHD4kqg3zxSc6ePENtVWffHhFAKxZXUdIshZz4O9tWabXqlMsVVpfEe2k0DBobMDg5xal5QRb38BMRt+47TGNGBKSj9gpzZ2ew7QTbEvvvrn2HyedfYHm1zmZLrrFGCV7Chr1JaHGz3Ww32812s91sN9vNdrPdbDfbzfbP0H4gMleB77O5JKKH3c0maRJRzpbpdkTUJpMxaXfanDryLLYuoiFpHGHnE4YnhQd+7vwMhmaRRCHoUj8qUfC8iFzRxDBkVkrRQUlJZXbLtm2CIMDUdGJZKhj7AVHXo91oEhs9YHrE2IjG+Dbx++2kQ6dZo22XyRZlPWeSkHUKqJpGEPbqzkNSJUTTexkwG8uwMKyErkzTq5qCoqp0Oi06ntTVUAyiICQJRAgqiWKUJAUlvSaDo0jw8MbyjcF1o2MFEq33vCISWRgucv7IxjXndYCDr7+DVJEAcyfLs4+IlP3SyeX+eSP7x675u+NPn2FgarAfUby67dsjyoKepsmbf/gevnryqevOubpt2TvG5TPLL36CjALlRvJ90PT88uyLnl7cIsvaLtde9JzK9mF0GZlbvbRyzXeBe2MyiziOcewM7nWk6Ne2933oJ/BSGe2Vdd33vOkBnvraozc8/w9/7dvwaze+1vlH6zf+Ati+fTvPcQwz06VYEhG2pd53+2zGJsW43peW+cLHz1zzt0snRERnx+Ft4j6DFvlshZWlFqEcn93mtbi0U8deAKDD+WuO11bFrz57VTSntrrMB3/qXwKQG85y9rlz/MPX/r7//aWTpwAY2y7Gles6PPbYY2zfM87zL8wCML7bpZzJ4jcV4q7IVA2UKjRqHsWyeHf1qsqB224lh8HEsFhLLl1ImDm+QohPInX0Tp8+TaFUxtJEVM736pSKJZKkQSjLewkHiE0Pz9VIZblQYPrYdh4i8dld18gXVMyBJTqyzKrAAFGsYOoWq0tizHW9DoZqcfqEwFfce+89vPsdH+APPv4pbrtNEAvcfXgvUdfmffs/QLUh7v3kC0voOYczpwThxOHbb6E8GVBbb3DHFoEJ8rqXaTdqxKUulS2iFPObjzzD3p2jjE5I8fN2RHFAIb97mDFP6I/MLq6ysrGOtaoTSir9F84tkHcyDN5/NwDtx2ZQV1bJ2GV0U4y9ktZkpeugJwlDwyJS7Ucax5+exZLrZNa0abc1soMDVBfFfIqbw2QLQ5R3u7Ql7fnUlkH8OEOciPu04xwDoyrveM8oXkOWxrbWeP2ddzJczjM4LrJnk5VBvvjQoxw5JsbxR3/jlxi280zf/SDLK6Jk7OKx75DYKR08Qlesi4u1ZeLI584D4vlmLhxnZXWVgZFJxisi0rl9epqHvvcdChKrOT7qcO6FZaorc+iBeL7WnMPi+hoGPpYENmdyOdqxQZpKUiE15Zt//Tnuuf8VWDmp1bSlTCkzwI7SJF//xlfEtRYXmGvU0UsiWxmcO01pzKHRUhkdFVHxnbdvZX49IDU1OqHYD7vdLhcvnidVxVw4ePguzp44SjZTwJQaU6kaEgQBSZL06dlB7BdKTwQ3jrFVjdlzS4xOi/FDqrK+tI6paARSK9ELElRdo9sRz6dqihTTTVH7e1GvgkLrR+uz2SymYdPtiki9Y9koitCL6mW84jgmVVMS32fHtKBerlQqnL94nKB3ncwocRhT3fDYJc/ZuSNgeWmTOE4IpV6NqkR4bkgs0za6rqKkHo6hkvQwQppKGISkcYIia6HTVKFUGiArMy2tThNF00kVneJAbz+GbuBiZ0uYslRI0yGjuShS3ywIAsYzwwRRSCJhAH7g021WKeXytFJZ0uzAeCbH2qIowdUzBVQ9wavWKJREpNzSNmm2NinmREVP3PYpD0CjltJoiPfnZBWyWYNOp4Pvy/JFPSaXyxLLzKCVTURGXqmwUROVE46TZW0tIImyIIEF2WwBM+f2M4q+75PJORia1SeK0lBQEoU4iDGktEQYhhDpeK7scyOmVLFZWU1ZXxPjZeuWHNVOnfGJ3bSqTdnnKXFg0elI3GKmgNvpoJtZVhbEetNqVxndMsTc2RYL88JeGSyXsLSQuJcZtGIGBzXOXagTB5KC39JRzYBEc1jbEDbM7Qfv4tf/86/y/h8WlSRzK2coD0dYdoacFK9ePF/HLTUxhxUunhZj9paD25h5Yo5HvyD2vNad23A3VSwHsMS92+UcmD4DE8PUNkS/NNsdEgUMWd6nmQbtbkdovfXmX+CSJgpBnJDICqyw41KPfSJJwd+qXyaTc5i8fQ+KxKuZeobSYIWl5Tl0SxKXaAGG6hCaco62BGmYPpxnTa7f8fwarxt4Ha/cdw/fWxZ7d2PtEjun70UbE9euNRbpXp7Fp4Y5LsrmcvoE7/+3L2fumChx/rvPfZF/8cs/wcNffYhdd4ts2o/h89DnnuaFF15g/31CR7Ndj7DMHDtzgmDKUEqceeF5suUSuoQO3P2mN3L3Aw/w8d/9XSZzwl5cnq9R2jqG2xEVUiuri2ipwdzlo4SpyC4FbkiktxmaGmZ+QWQsO+EsGf123EBiG48/Q6lUwA3KjE6KOdvtlFGsRYK1c8SpGIs5cxeFcgErFNUmyUxMfiwg8qCYF3bJhbNtivlJtEhDlxAmS6tyYwtRtB8I50pJFXZsFxvcBVqgj9LqekSIAWb4KpVMFq3o0O2KwaOEMcVijrgHKi7kROlIcoWJSFGELkkcqlfYnrwYTbmSlvQ8DzVWUJMrQryKoqFKvQ5LKnMbWY12N+KsnHQTkwOkroWuRkSReHGoLioOXddDk5uchgoJJL6sd45iMpZKkMZks+KZw9AnTv6jIGQAACAASURBVGN0TaNQEhu7bWe5cOEsthT6Je7JAqdXnk9NURWN6uqVMrGRLTkgYfWyMCpEVaNCx7viCMye2mDPy6dRLWEABkGbi98LOPbNZ3nN+0VKvr1xY6KK1VPXOz/FkSFS+R42uFL6ZkrcAMBX//Ypdt4tjMkwCsjm85z6zrPXXKdUGWTyTdM88bXHrjk+uk1sMCvLwvBsr15fOnej1q+Qe4kWJrFQlpYtO5ynIzFbtu3gfZ8DVdpeQbV1qhevZ+Eb2iNqsNfPij7ImAZeIp2BzBAr1Jk/d61DMnLLCKsvXH+t/e/YDcCpL10hkpi8X7B1EXssPHHlb9q+uMfyUKkvxIlk5nv62XX8h+QYksQLB18lrn3s4SvXvvTMrHiGqSxzx68cv/pavVYYsWiu+kwf3M7FYzP94/c+8Co21ufYvmMbAN/82kMA/K9P/M/rnu/7W8uTLFVmB3e+wSmujL/eiMvtKGDqwinaXPdIUo9V6Q/nCwMoakg6MMTB20S5zdHHP3Hd72hbx6huQBKJ6zQWV2hmXQYGCn18pZVzwfKYPbrY/7vyrjLVC20GpPGjawqKnqW64dGp92q126DGGKaCHUqgcaKhFmxCqcv2zOPHOfH4r1MeydNsiQF6+vgl1pZcHnvsYd76w28F4CtffJg9d97OxkWxmWTyk3zyE3/DXYcPcv+rhZNkPhqxY/8b+OyffpaBgnjvh24b5J4D9zMzI0oxxgpbeefb38HHvvInXJZ4vrX1JbREx8oXyEh2sExRY7gyRNkWa5JpNvA3irixRqiLdSJvhRiodKOITFGct7ixRppAwZSbl51i5FTq9Q10Rwradjq0/Q6mCV1X3Ge1UUWxO0RyfoWphmmGaIZKZUSsgf/Xz/w8lZEcKTqbDVleM5phYnqJrzwlHJRTF2bYecetxK0aZUl6cf+bP4jbCvD9VZCiwaNpyLPPLXLupHinQWRyxz33EcYJ1MSzaDrsmpqi3hbzd3Y1TysKqZRUsop47/lKjS1T23j5cBFPbv7feniNzJBBaou5bishbqPBD73rXVSb0kFpQ6MVks8rDI0JZ+DAHe/Ea3oYisAkhfu2sRJYNN0q+24Xa2VsZun4i0TVgMsbYrDvv/NBSpUM3/7yV8UYS1UsR8WP4v4+13Y9uY+pV4RwFQVNUfpl5aqqopFgGQ4rs2KcqYqKSYYkidDk/huQoKQQyJJDS1EJoxglUYRwo7x2KnF4veBfkiS4rkutJowYRWIToyjC0IRRmIL4OyXFkuy5Z8+fIZsz8bpibEShTy6bo9V2cV1xT7Y5SbfTxnRSgkicZygGihZj9Wrgk4Q0CVEUi1SWsQWRh6KpxGlED4icyxZJ0HAlfsU0cjhZjYQYR+oBhaFPmCqkit7Hb6txjKJG9MBbmq0RBAGaqaFJpyUOEkxdJeh2yIwKA7a63qTiDDNYEjbBZreJoRQIY5+mZJTdv2saW89z9pRYi0fHbXQNimWLUDppbgdcSbBiGVIjTHVwnAqaIWyCjc01TGuYYnkb2aLYj7utLkHQxQ1TgYMD4riD5YQEV5XCq2kEyZWwaRAKDJ2u68T94LFBnLgoSc/ZSrk8t4luWZTKYpwHcZfNNZ2Mk1Asin6ZPV9nbFrDlU7h8Ngwp6sdtm4dZ2VBCmg3U2obG/h+h9KQJHNo19FzU7iSoTlRbdZaPlu2jTCyVThJsRqwcH4Fw6tTHhQG84f/za9T7zbYf7tYWyrD2zl+4jyev4mbSMxeZBMHkDXKVKU+XtAwsXIZMtIJrLldLMdm97ZxhgfEmDp57BIbzRaOmbJ36oDo426XuaW5PgtgkkQYlk4aJejSMCnaRWI8BC5J9rlqksYBqi6ZFl2fxfWIvOlRGhHOeHbLGGO33MXMwgyWFNBNNQ03DuihsX1dRzFhMGiTi8TRvdseJJ0v4gxmuG9QBOguDRZYD1MKkuCtkMmS230vk3kTU3oOa50V/Pki1rDoyw/8zM+SHxzjh987xaokCZs88Ep+Ztd+Nlo2bTmGEz9BSQ10hCOVpAYjWwZIIwNNMmd2g5SsY/OTP/sLFCS/wGprmeWNb3PirMBJT287xK7dUyyunObUySMADJb2sWVyJ+ubl0EmRZSkQhoOkpVY8X27x4n8HIraxpWY55KdJTOcx2+tMbpVPE+UDLJ8aZZMToz9YmWQdjtA0yzqtZ7957G5qmNZXTyJsUz/EcnSHwjnSs+YtDQxMfLFLbTrddSwSyqjP7mxUfR8BsVUsPLiwQqqSRDWaEp6YV1R8YKANElA61HBhiQJtJoR+WIvUuaiKkZvf0FRRA120O702YJQFOIkhjAkb4gNsx2ZRGraz3IsLayRMSuMDAyjyckZxgqpEhEbcZ9NR4lUVMXBtGRts7dOpCcoiYLnyYUtDNFMBRSYGBWZuEsX50jjmLgnx44QKOxFCHut51hVhiXxhck136+tNhkYNMiXLd7yUwIM+LlP/ANnH794w3fR8oUhlStMvuQ7u7rNHDnN1rt3XXf8e195/JrPkdzMqvUGqm5cd74fhsTN6526ldkXF2orDxaobTRv+F197sUzVv17SsI+4w5cwRAA1Oc3rju/PrOJM5K74bUaG7JuedsgjdkNkjRFk0J8m+vCsF2+cG127EaOFYC+em1M5MAHtmFM9ShHr8WCTezKcB6odwPC+rW4tEIljzYonm8FcQ9+9OIUoutzYjHZe+AQFcmwudxsc+nc5f45Q+OjNJnjnnvu4Z2/8WoAfu8/fpInHn2YQ3fdBuq1Wczf/F9/DEDtUpVvfeHznDjx3HW/q0pjpDkvNrXK9CRWRgRSliRRSPtSk9teJ6JbM+cvk7UdOtKxTDWP8/PPMu5d5tQT8vd/Gx54xd0sLndxpfO/PH/2ut/+xV/61/zJH3+a8rDEBFpN5p+9llhEtVSGKnlcGcVt+zVi38NRchzaJ7IMi4sN7KLJpUuXKGWlaHiqkqoxaOL5xgeGGRrYwkZrtZeI5dzpKhpinTl1Qhj/pr7JN/7+L5neJzbro89p7BwbIzs0xqte/ZOir1Y/zuJql7e//U42QmHob5l6Fwfveyfr8hUvLzf4g49/FzdbYPtuYSiyuYiXQpxzGBgSv2vpOnYmg6cIg2Gg6NAhSxx6IOUSghAwFCJNYWNRjNuiFqKi0FwXc80pldFtDSMyyBjC4dq9bS+PnfwuA0aRMUmpXK8GWEkHNZRZTs0ll4RE4QDVtsTjFGBppUZO1YkkJs/vTjI1UuGDb3glAFuzEK22acYdNmUkMoxaDI2OcGnpIuWCpODVs9h5i/mVWQBGxoZZ3kg4f+EIZUVgBM2gw9SB/ahtkQGr1iKxHtpDpFJwNpfXGJwcYHJigJc/KAyUmvddTp45hpURxmsSpswtzPFnH/0THnjPO5EXg3yZRkfh3j2CIXGx3sbXfdqxeLbbX/tjtI4/yulHH6VUFBHhRsvDUR2ePPc4XWkgnJ5b5dChvdgSYF7ZOsb0rW/mq1/+FkkiZToMA1KVKIr6rKdpCkEY96enYRhEsY+Shji6eMdKKhyDRDNwvaushyQlkRIAimmgqilcvQypCiRXiC0AXNdFVVUcR4wp3/dRVRVVVQmk/IVh2ThOFlSF2TkReNL0BDVVycngY4xPFGv4QZesJLmoN2sEcQdbc3qFKuiqRRy2+4RPoRdhGimh76LKcQ0pvh+SxPTlSjL5nNyLpTRKFKOqGqqpgpRZyOWKBHGEnwS4rljTC4UCfjftswdamoGh2yiKQrcr3mnghQwWSjTWN2nOyaCbYrLpVzGlM+7oeVquj25b/eDxegsUw6IgC0VGxzKcfLpF2LXJSHzV8HiZjfkq41vG6Ur8X6B5xHFIR2YLNUMDdKqNNST0joHBQVrNNZqNJUxHvPfQV3G7IaFkdrQMHRDEHj0HLAoTsrmceL+S3jsIW+g6mKY06xONVtNnaMSi5QrbbH1ZpViukNJkY0n0sZPNsLnRpeOLPiCqY2dNWq0WWSm3oashK0sdtm6dxPXFXhz7Dh1lk47XE4UOMCOH6qpCJNlerWxMt2ty/y2vpyH3vI/8/Id53atexlNPnJTXzrJ77z6szAxnz8tAe72Nl+aIQo8hRfRxc2EFP2jilCV5TRAS6SEXz3aIJiTObMMnSTTMXEShLII5tdpZElLMjLQNlZTI9wUDpSptjFglSTXSNMWUuMQwDLAtBRCfE1JKeZ3l86eZXRP75t7b7kTtRCghZCTesOm3hKyQTBDoesRYweE2Jct4IoLTxoxDcedejj1zmsG82J+MrSrZXSU8ib9Hh9hQyAUFOr4YQ3/+m/+eN33wRylPCBtvx+g06wtNdEcjlTwFnY0UX89jGA5xIMZ+MVPCDdzeoxD7IcXcsLCvPcm4qaukgDk8QiDJVGwj4Jlnv83wpHCEZ5eOs1YtknMS8lZvX+2QxhZppJIdEOPFsA0anWVavrBtVSPCc2N27xlDl7jIxuYasxtnqBQLKF1ZNWG2SPwmZ2eE85rTfOIkxFcjMiXxrrpNh2wuIpcZRBYMoJgvHb2/ibm62W62m+1mu9lutpvtZrvZbrab7Wb7Z2g/EJmr0Iu4fF5EQg0U2p02KAaTe0X2pDhaZn6tSh4DQ0YCGusNLAWQ5TdZxyLohqSpLko9gDSNIdXw3JCspHU2TEVkhGLJaKRBo91AVXX0VPqaukqapnTbbRS3F2HL4Plt9uwSUWrbmSShBR2fREazTcq0/BpBHKLJSLUSK0SpTyQpK7tul9qmQybXwNBlrTgaYdxBw6S2KaJitc06pmJA2kuxybIpVSWVlPE93NHgmIVuqP3zImKGx0REY225S6cdcvCWA6xKprPx23ew9PwVxr+rW74o6mWPP3OW215/C51uk0uPLfS/n757DxePXIn+D+8dpOO6rEeL11xnfM9ODFs889zzgm48kvix4cogGVkydHU7f+QM2w7tYtehW8Xn507e8B6vbi+WtZrYPsbizEvgt2RzHAe8K2WBzav0spyhHO769SyIaqpcdywzUWRkQETlZ86L/vqzj3+y/31uaJgbSy3fuFUK5jWfj//l7Iue+8jfCOY2K+OgS2xhDyWlW22uqghl+51TrHeuPNOO+0ZxNwKWz16bqXHbmwxMirDpaje+5rsgFhGbs2cu8dk/OXLNd889fYJXv/lKOehPfuTHGPtp8T7XTn+Lg3fs5QTXZ640SVnba6aW9DNWV7cLJ0SGRFHyaLbGoKRhrtWaZI0MZskmXb/ywGdnanjJJnpW9MjQvgEUEtZOi/f8kV/5dzi57aQY6LZkBjy6TnZHjs6lK/3k+wqF8RKjO0S06zUHX83f/++/oTDosFQX8+q9P/ZuTpw+w8zcIk2J1bJtE2JIYxH93bd/mp/9xZ+jE3XxJLV1GvpEocLb+A6/8d//vex/l3/J4/yf//HfAPDh932I3qz7HF8Q/3ktlIaGeO977uWOcantM9tkbXmD2QVx748eeYJOaLFl1OLrn/3m9/XmLNN3iXdjOyErzctsHhNzZufrphjMq2gp/Pf/9pnr3sPV7eU/dBdJRkSSvXZEd61FZcsEYSDm5iPPfY+BYpG9W3cx0xBRxUplCtfLkRoiE5noIjPWTGLGi1JI3XNRs1nCFDS5Vp65tIKV38vOW0QE85JSZutUCa29wLnnhTjvlonbOFc/jp2J2XDFvmJa44wPTdBcl9IdbovIDxjJaH366VyaZe7MMRqu1JiyC4wqPqt4/NsP/TgASxfXOXXhMQ5u15mdERHh+x7YxZOPH0OXTGgmGQYqKQ8//HkaOdEv73jF28ln82A5LM4LjMD5hQW2T0xiykzy6bPPsDg/z+GD+2i3RUbva188wUhOw6vXyE2ISO5UZZDnn3wYPSdCqKmpsNbw2bFnO7MXRP9qmobb9QnDuC/iq+umYHqTWdwwjFENkziK0WRJn6qptLsdweAr/y7xfZKrsL5JAoaqEV9VIZGmKakiklk9jJemKwShx8CAZJtcXcMwDMFgKLEifugxXBoFTaEpZVVsJ4vvepiydFBFJU4M4tjoY0yCsEMhnyFrZWnLpSNIPKIgQVFlmXwckWiC3S6SlSRRFBFECo7joPYYdrtdMplMfz/tdHxU2yKMYsE4CERBgh/5aAb9iprQd1FSFUUytrkdj5yTIVUUdFnxEidQ73Yx8jksWRqsqQmp4ePLiglHyRCGm0RK1M/MzS3Mouohd9wr8HilcpN2Lc/z3w2IYvF83VaHIAioVetYUlvINKDWbODKDFh5IE+j2SZKAwyJ+27WAkw9JZvPgSJZjBWTJNFIJIrEzmTJGAadVhdTsktmHBvf90WWRfaxpRmQZtCkzqZhdhkasVGUlE5VjjstTxqlNFc85NCj46Xsv2s7Vkas1UqsYFkxG0s+g2WZsYzraHrM5uYmnba0sfSUMOgQy7yApeRway5D27NYGXHO5uIamUKOmfoLVOvSdglslKTG5obY5wzbZW1jnuHhPFvGZQXRRMpG1cfvukxURJZGMztcXo9JZUmsW23TSVtkcltYlrpvlRGdXEGhODDIyReeAaBVjfGShFSODdu2SFOFVI1Joh51f0LqB5iWRRSJdSIhJYnAlHp8hgpB0kLNeJgdcezUI98la2sMDebwpS5iFAfC1lTl+zNUapHL816ANbFNjIVml+NnHqNZNDCrYi3RB0pY2gCaLOtMgwSyBmlYQ9OuVLiURycoWlI4uhthWQaeFqOpwubJxU26WGi4WHId7MYbKLZJ2kuKaSqBB7qhocjqslhPhGREqEHQk3XI8vIH38jqmihVbmw8z8TwHSTqDFXJtNput+m0bZqbCStLonS2VHbw/CaGFKr33BQUn/pmm2JBzKP52nl2b9tPba1KBpFl1LDYtmuITlNULOSUAq22SWo0CaVGa2XIwW3UiLwQ2cUYwY3ZpHvtB8K5SqOUQka8uDhKGavswPXaKLI21a9FjOUGCLs+7UgYDcWBMtHaOq2OHOD5LH4akiqgpD1HREHVNNJUvcK1b1m43RC1V52qRei2jo5OGEpq8ihE0VWcVGd9XkzOtDzE0PgQm5uyDNEM2H3rDvZvKTM9IhaRxfXzlMx9RGi0ZXlbEvjEaYuuVH/P5vfz6T9+iG4Ygio2CrfbxcnoJEnCvKSDTKMUyzaIeo4iCWkKcRwJHQugB0ZVVVWUQwKKlqIqqSjbAEoDGnUv5imO/5PexT988sn+/9e5vqxuc3WF0g5h2NQvNVg7c33p3NiuLUSxj+teazBfljT6L9Vmnzv/j57zT2n/FMcKYPPci5ccqqpKfqxEa/lazFEPk3V1G8rnmTmzcN3xXmuvvzQN+/e3Zu6fhiu7um2eXLzu2PIz1zqfftBh48wV/a9L31v5/j9h2+2TKKpK4IvxdeHUte9k4bhwzI9+5wg7Dwj8z4Xjp/rfP/TV7/T//6k//iyf4rPiwyF473s/cMN7t3piUdTZeWgHF85doiJBtpvLV8bh1klJQ77qk89k2dwQhs6ObWN0u228apc9khxjiUUiZZ3hyTwr62IsJm4H27xS1vnQw9+AWAe1RaMhAzU7cjiORYcrzpUVNdm81CAYEAvy2I8cYPrgJuvV87zrLe8CID+c4cTnv0IaxSQSF+G5HYhNBofEPX3n6FFuO3KG/bfdT60j5k7WLhBLbY78kMDayJ/hw+/7UP8e7nvjKzAUi0e+dsVJqq+v83G+3P/83373Pr7zxFN898nePNbxuz7TxSI9hOPBB/exst7EyuhcPHrjAMaFb81xgTme5G38/K+9H4Bu3eW5Cy2OfvNb15z7+Nef7v9/8q6dZDINwoZHcasocY7ZxDJMzq/OM78i3mVJXSO1MqgyUJR0m2TzBawoxJWyGWE7oav5aJpGWWJfZy4/idl8gk5dnHPwHfej58aoZPZw+2GxqT577jEyxjTr7U0KI8KwjyKP2aU1Do2L4NiW0hDPnznO9Nhh5j2xBpQSh3Ix4dArBbVvK21w6NZfpFRwGdkqQNn/5ek/5+f+j/dx+dmH6ayKjfdld23jR997kM98STqOA12CKINmKVTPirky8sGdzF6Y59zp5ymXJK7Nb9Jo1blwSYDl236LjXqbvHaIW24R+Mqp7dtYmFlncmKM558ShpsylbBz/2EuL4l5ubK+Sqvlk3M8snIedeotFEVB13V0/UqgRlCl90rkI1JFw7HsvsPlBwFGxiQFQqm5aCg6fnpFjDjyAxTDII7jvkMCvcBf2N+fFEXF94N+iZyiqcQ9bSx5D7pp0mzVGBwdIahLXJ2TJ/BT4qh3joHlmFhh0CemUHUbzw0x0jZKrxw0DFFV44poMTGhL3Ss2l2xnqaxSSaf7fcNSHKFOMKTAZFiqSAcxETD83vGXJNsPkPLC9BlSXvghYKyXOJq8k6GOApQNa2PcU5IcbIOhm5guOL6oWqgkUWXpXyqGVJKiiRxiCphAGkcMTo2gCGlUR7/WsTSpRZOPoMqy5GiUME2SzRbDej0dC6FAHJG0lFvrHXI5oqYToIfyD0ljFAdBy+I0Ywe8YVOJm/3A7CGIfBjvu/34rqYhoaCgee5QrsLyOeLBIFHknbkmEpZXgjx/bAvD+MUYhq1JuVsCVUVfbV9t832XTkWZsXv+bU2bT8i9NU+ZENVdfIlB0N3cNuSfj6nkNQDBjJiDVdthbVqxNDQJLsPCWzP+fnTrM7HbERrHLxVBItfefvbuXhpgWw2K7sgwLEydDox0YawlsenLUpRhOK6FPLi2OJai21btpFEYi48tn6aSmUELekyuV38XlbzaHku88eaNKVT2255DI9XcGX5YrvdxVQdTDtzRVNNg1xOo9lsUiyKQI3vesSJR5RI3Ldh4+h5/NDrl7tmBk3C1KQR+RhyvBhaBtSEVO1hGU3CMMKoBHy9KvTw9kw+wJC6g/GaxuNrIhB1x9CrCZWUWHoMjlJACyxaXOTh714hnto2fStuTZaHGj5pqvIHv/yz3Kj90kcFDCBVC2ihjibBSSkhiWMSpBF6v1Q5RVMsoY8ngWe66XHu3JN89qMCs/3un7uL5eVl8hXodiU9/ECWr3zise//6WvaobdM0Knr1KnjS7266dt2M2jqTAyPs7gsHO2F1Q0c1WbaEVjtSSvmKf80lpPBT4UT2m54uN0YVYeuJH1Buz5BcHX7gXCuIOlPViVjE+sJpp2hKbVovGoNy7RxVzfwZVSso9qUzA5FyVpjhSkZsoS4RD1NKUVD00SNdbslOqRccdANn8iXgsFRgmFaeF4gyA0QavNhEEOS9uFbUejSXL1MpSKyaSEdvHad1Y2tfPBHfh6AXVMG37vYohmEbG4KIyJjh8SuguqLjVBv1fjS4FGal1ukuni+OEmFqnwakMpFSkEnihJSOXmSOEXTVVAi0j7xhnQQ07CvERIlsYB3SB2vNHWAKl/51i/z1tf9Xr/HP/7Jf8GCNFqLeZ3jJ0/zl594gcpWsXHcc7CM17EZGB7h8391xXh6z9v28+gTwgE0pnOsX1pEGygSyzp0uvDuH9lG1Ezw5UT/M4TR/8v/5S2i79pNoiTkYx994iVHxQd/+j503cDRxaT72P946cl0dRsey7O2fL2DUtklDXYp5pufqODIyNzazCpOqYgrMxHZjIUX+Ndd4/vb4cPbeebMzHXHf+Zn/zV/9LE//Cffs17IMX3rEGcfn6GxfG1UZPTOLCtHb8xOaG/N4c23KU8OUlu43tm9ui2duPb76ek9XLx4LQ5p9vkFbtl/mG9IA33Pvh2cPX19pnP0wFaGRsWmcIFT3P3g3Rz57pHrzru6/fVf/+UNj3vBlWygJuf06A6hgbJ5lZN/9mnhDN72ilsZHqmg2sJhD6IOxWKekmkyc1lEoIp7NWwrTz4/xeKs5E+sQ9XdpLxLGKGxE3LPrbdhKS1m6+KcNI1JvGv7es+2u7nrDXfxB//PnwHwex/9D4xqeRI9iyatj+ePXuYD7/wxnnj0sxx9XkTTxnYcJvJqnDt2FIBf/s3/yvSerdSrtT7TZ7sbocr1ryV1wzQq1/VRZ1nl2LHvzz7Brlvv4bxk4vzVX/ktxrbdw8tfLTA7SRzRXFwk6F4hvVndDNEYYu7oMbbcIpxj322xfmmBQYkf2zh9BWP3P35LZK4+8vNv6TtWUw8IUpS5R8/xE//qlXz6448AsPD0BX7nkz/O00+d4uKCcOI9z6YVeIwPg7EuDGFD1chn1qlKspRydpBacJEMeWpSX8moTFKZyOOduIwisROOqpItl0hNqUPjB6jra8wtnWVqSjgk4/YWnj95hnIlz/IF4TiljsK4ZuJKw/sCUN5zB3FznTFbRDXfdP9uqo0N3vqB/yAePFmgtlLHyar84e//qXgHisHe219L3tbYPSTG7PfO1ZitNRnJywx4kkFLY+y8wsVTYl342z/+FOPjA6zMnKRqiXuPFejWZkhk4K2+do6COsDsySNYbdEvXUWnsXkZp2SSNYRhutmIKI3uoN4Sn5u1JdarLgwPUqmIObN6eYVcJoOd6rQlY2kQBzhphkQaNpqiopDQ9Tp9zIeu60RBhKrqqGoPSxyjq6rUshL7aiI1rq6QR0mHJlH7BA9pIsTse6K+Gdum0+mQJCmGzBaqZkoY6axsroItdS2DDqmSYMt7So0IN0xISLEdKXLb6hK5LfzE7M8jU7UIAw9k1khVDFItptntEMlseybvYOUsNDUhcMW+YmgmupGQ6RnexKRahMIV/HIul0HTdew0pu1KPJOmYRgqJUc4Mq4fgqrgtTu0WmKcl8tlDEUl1RXiUDpXUYJqWfi+BP5HKUoaE6uQSJyLo2dYn2tx6um2vM4Iw1tU2o0WpiUcC78b43k1oakk2eWyeYc4TUgVsYaatgNKiuu6DEiR5jiIabc7uK7L4IjYDxUVms0WTkZmMJQmuUoOP4npygyU0oU06UCiYmi2fGYf2zHxG5KkoWCyY/84cV1PdAAAIABJREFU+UGNel30wZGvX+L2B3YxdYtG4Il3OjpWwYuqTE5JId5iB5SIc6fX6LhiDfTCkMDNQDb8/9h77yi7yrLv/7Pb2fv0M3Omp016QhIgNEGCdKkqKAq2BxWx8egjIKLSFEVFwYYVRAEpgqhUAekQSiCQkJCQNklmkkxvp++z6++P+55zZiD6vu9azx/+1sq1FovJPrve+97XfZXv9b2YKUgiKY6pRDMhyLolXymSzZh0b9tDNCq+o86ZrVT691AaC1A98cxvbhsgO1Nn8QLhbI3kRnGqJQq5IqYi1jDNGyISZkHPoqVlts5uoFTOMdAngqOLO2cQ+CqaFiffJ+y3zMyZ2KPd5Ebz6IYYv6a4RcJwUSTRjzbm4DoO1WgAMqBUroQYehQjGWO0PGGLJYhqaTxZi+ZUyqi46FoCVda1eXaVeMym7JbwJVOm4msEeOiyIMjzK7Q1JyhNr5KQdhjmANrITHrVHKEqAsHjuzfS0TaDMdkfK1BsLAUSfpIjDzsSgG28TnlcMGgCGIHO9Zd8AYCvXyfIqoIgAFXjuq+dx/VfF4yMl15/M+DhM6EjNDTfQ1FCFNnkGs8QmV6tymBeMMCqVaXmWAEkYx3srnbhj3sskjr+z79+jKNOP4iykceQdV9KGMfxyzXmymiDSec8hWrBJJcTNkGpmCM+bxHNnUVyG8Qat2LJqShWhU3PifBjtilGsj/NprBENinGJTqsozSYtLYkGZX2RVXyKPwr+Y9wrsIwxJ1oRug6tKUbqVTtWoRZMXRKlTJuzADJYqdFdaz5c6kUxQc82rUdxQY9G4NQTDDXK0i4XFArwPS9gDAw0SVlZRCoEDhomoIq05dBEIgonBLUSBgaYyZhTGdwXH4EqSy267Ju2yp+8fs/APDza76BPzzOHXe+gJ8UL7h1Rguaa9HQcgAAC+Y3Y2afQB/YTTwuPF83EuJ6FfzAIR4XEdpS0cfzvFqTuDBQUUMFXTcE3JG64h/s9ZjWLovrtADfCWvshLlxsbD96q5BOLE+5v2V/ak6AnoSRnUWH3QQ8C2OWSGMsjPffzB33LeSWCw+5V39/oZ3OkQ+U0kofnnN3p2g6y5/aK/b/5XcduML/0/7T5bBvgKds0WhetGo4JRcwZYVmwrp8ydBRAGYRLTh2z6qniIzLUlURl+q1SrzD+rkkEOy/OoHTwKwevU7HatPnH8kaluZYz50FgDP/PXe/+M9e/kim2W2ZNuLUzNKe3Os5h0rPu7qjDS7KOJFzCm/ZzqaGO/9987WzI6QvVKbGHXHNOY5mA1xqmNT76F/XQ/99NT+rb+NyOL/RWbPFuO+Btj8Yjedy+bgSQN6b1LOu2wfGyYeFfANw9D54PvfSyypUuwX89r1xxkpOdz227+yZJ7ICK3uW8eCg2fSvEAsqHHaeOn1NWzfNJWi/u2UHx2z5vPkX57kgg99GIAbfnUTE3nCNQiH8vj3v5+Pf/kaZr9rEc13iWzdrbfeNeU8R685iROXHzJl263/fIGqL76hCYO2Wh2Zss8XLvgijxefm7Jt/0OOYN3ql7BSU5tKL1+xFFUWYDtlgyNOO5FVTz1c+70tkyVfEGO0e4PIOM4+bDFDSJgssN+xi9n4tIDzXnCpmMOOVs/kafl6NqRpcceU66/bPkzPkEdeLnId8+JUdrrs7i5hu8KpjZkZ1GKIJmEtdthPhDhqAiqj4jgz3cyarbtYqEBEFWuBpyTIVzV0Xxynk+Ll11/ETEVoU4UhVcFg1sJZvPVmFy3TxHseGixy0ofPYNtmEbHt3dhFdt6BGPE2nH7hVK/cGqBEVDZd+ysAGlua2N27nq3ruynKgJYW2Hz7yh8TbUhz2BJh8Q3lx8k0LKSkCkcuHjPRS2PYnkq8Uej4la8+yVHvOwWzvZmkJHgYccqMd/eTTgsDd+6Sgxh9aw8HHrOCgi2cK0uB2UsPo3vrDjpmCsOiVBjllaeexJNrmGJlmNaSRFNVchLyO3P+HLq39eBEA3RNEicQw/GLKDJYhWeiqoL0ovZeNa2Wjao7TvXGwCB0ZhAIYqWJT14cI2jfJ2fKwtBHkQFC4YwYhCEYE3A7qri+QzaZZFxGl1FCdFMnkNcPHA9F0WhqzLJpg8jyDe7uJRoxCEJvwlbFdR2i0VQNgeJUy7iBj25EaWsTWQYvLKMqOkZExZLMqoXxAmogECQAmmpiWAq6odRo5SuVqihD0FQs6QSGoUK16lGVML1QVTAMk4ip02w1y+NKRBULQ9NJS6r3XL6IpgQkJPy1Wq5QtavoEb1GEjI0MoZlRbES0i7SPaxkknRTgmEZoMg2pgjcNEODI0hODVRVxYpYlEoSZhaAbkbxXY3hARGg0BQdTdOIaHXIVqFQoLm5CTMmns22yxTGbBRfR1ElpNK3URQTx/UJZTlGpiGkkoOEJBU4/KSF6FaO7ZsHaW6WQYuPLKBxWoqqUkbXBOrFKVTZtq5Ia4f4d9UziFmCMdKMSOKdsoNphWSbY2jSse/b3U8y1lZz5l3HRtMU4gmVpibhqO3ZOUj/boWm2VmeWy+CUQfNN1l44ArGR0Vz7vkL5rK7bye2o5NMi3uo5BpIpAL6B4bYvUcS2MQ0KhWPWTNENnv7jl6iUZVkk4Fuivmyo6uPUrWfRCZKQdLkt6c7UA2fsbLQ654WEE010dY5g4FdItAeBnmK5SK6rpGSc8Hxq5hRHUOSncSbJRFa7xCuIr5TX/OIxRIoBYdQEjU4ioum+SiheO+mYZEfUTASFtGMhD06IxTtbgq2wXCnPJfXheWMYllChyuhTanqYFgZGpLyPQCCKU3MxVCrr/PORCJDU9DCqQFhFB8IUGWWWlFD8d5Co6YTAjXAilqomk3vNrG+3feL16acZuO2LmbNaSaV9nHH59RPn1QoDpQxTUlg4zpkWwzKjpjnAS0oGkQzGplmoauj+jxmdc6kd3g36bSY/D27tnPA8kNpbRXvpVTIU7WSmAM2vny+slYm4pmMjgwxf4EgmcJQeLq3bv+8XfYRWuyTfbJP9sk+2Sf7ZJ/sk32yT/bJ/4L8Z2SuFJWkLFies3A+w/lxCsOj5EZFlqi5rZVsSzuNmRRDgyKV5zg6Y6MlUo0iWhldGsMdHGdkZzfx9ETEO4aCQsR08T1ZcxHE0DQN2xZRv0gkIjNVGrokoRAF1Dqh79Uic+Vxh865s4hJ+E5pzGXXjj48Qm56WfTT8YoFbvzZDzn6iFZeWyciFo+ve4WRoS0MbhGR8WJ/ltyenURMBWQ01A88NE1BM0CRYTjf99A0Aybw46rYfzLVrRibKEP9Ffb07b0v1YqTjmLlY8/z6KNT6yS+/d8X7nX/v94p6mX+yrN85O+nc9vN78w2fegL5wPwwnOr+e2Pf8L7TzuSV9YLD/7wZfO49blBqmqVqEwFJhSbM49YtNfr/W/KKYcLiuNH1q2CMlRkGDAeJEHJ44UBg1umEjeU+8eJyz5aAKpWJ7fQozq6HSVwCpiWnBtBmjNO+SjtMx2u/J44rjjg8JMb7uUPd94AQENsAYsvyfDMc6sY7RPjYuf2YKWncdanP8GyeQJaetVlP+D0kxawa1jMxTde6+ZnPz+Lr/7PvTx8/zfE/akmH37fd/b6vEvmCTKCF1aJDEOgTKVvHw99DjzhVLItAur15J030rp4DoevWA7A/Tf9le/88ma++tgDfOCkH9eOu/b6Cxk+uo/bXz4HgOVb9379r1/xfrq3ief79CfO4vkV63mRl/e67/9J1jxT7601Z9k8Al/F+deM8ehelKpvoygSMhhG+NtfHmLx/ivY/+AjAMgmY8xLZ3n40Q2sXl2HK85fvIKUIaJy+bE82zeJnlD/81VBHtE3NMg9d0zNON17yx+55lc/5bILxHfz/dvvIhgbZe2q1dx7u4AKPvnAAzzJA7AArh69FYDzvr2Qm7/97dp53p61Ajj3vUfW/h4piHk2AZuakK1buijYIgv5kc/+t7jPrIAnrH/xDd51pGj4uOqFJzlmxbtZtVY8UyRIsWNrD907Bug8WDTQHSuO4IYiEzTnIBGF2/76OpoWdlCWUcamVhFJ/fVdf2Ij94khrtb1zvY36rVazz6xic47DwZg56rX2LS+m7iZosES31/fljKVok/SjFEpiYyZYrsY0U7MuMhkKVoRw0pCqJCSFNwN2QzOzp2YySZGpf72LAet5ODrIos6Xuln5pws67evpiQhr6O9fRy6/HSajmjmza5nxPNEM2x/5SVmtIro9vO9LzIQVGkLYyyc3QnAow/dz2fP+wAvdokWEk+8NohZNolrZQqS2nph57tondfAUG+Rvz0qMutOwiaTyaDKTF0sEqcURjGUAF3qk5H8CM89/jCf/e8v8cKLoon4nGmdlJIlZsoaQS9XIdU5B7VhOpos3yw544wFDhR0Dln+XgAKI2/S3/0AVU/okRnzMzQnDHZu6iaQWamxHp8DDtmfru7thOMSgqNCqFqokliBKEhuhhoJxduzVBMyuRHxREYrDEOCYGq0OgiYBB9UCEOldqwRi8n6K0U0nwWMiIoXKAzuGUK3xJzUVA3XExTxICCk8YRFbmSYQk42/rSiVMs2YegTsSSk0YBioYIbCj2oRxQSyTbSmRiBIpRJueCgqQqqpuJLaFI0E8X1yli6yL44TkCxWEIxdEETD5gxUS+mAo6EiiuKSirdQHWinktRKJeLghxL1nOl02kqtk2ghGRkVtg0TfK5IhEJs0pms4yOjmLbNrokEck2xtCUOCVJ8pEfqeI6HkYCkCUAu/q309rchJWM4vviPVSKFRRFIWaKjGmhUCQ3OoamqfiSSCGZTlGxbXw/ICdryH3fZ9AZrrV+0RWLMPRRMYhJIgPX9/EDj+NOP5BCWeTtX3liN4v3n8b8Q8RczIW9NFopUlmLtpniHvr2DPLSS30sXrKQqicm9o6dJbRykq3rhT0XxhpJmAGWmcSuiPeXSiUoOaOMj48z0CN7pflxSsoQmmw+bkZ1FHzKZZth2fSwUiniOA4DXTnR+B0Y6Otn7dpNjIyKLLW/NYBAJ6qEjIyJbZ5rYlVLaFGbXtnLQiEgEWkiJ1tGDA0UsawI6YZIzYYsVIbQVIuqUyKdzcqxCgk9F90Q31IsGqE4bqOi4MjMqhGJktR0HNfGkc3pYzETx3OIJcW3sPnFyXXcYp+O6c0i+xNEavWNYQhDfZOJp2zApuVAC+Li+yj4MVIRk6ffeHLSfiMc9sMYv/ja+UyWL//ol4TlSbaQatR1gPwuL/7xb2slKpqhEzg+X/vxb7juki/Ke5K2qjKhD1QgRNHqMGLHHmRkvJ9t29bz5O0iY7X/+6Ose6Ber+94w5QrGr3do6Sj9XrpvpGdbH1qKsID4D2fFFD3fD6PXamQTmexbQnBt3eTTc6iJbuU1pSoow18Hb9ardWUNTUbDPUMk1YsNEMSZtkllEBDDXXe6BKlCIb6/4OaK8UPal2rN63fyHj/EFo0wrS5nYBg88HSGCmOYzPRJyGB6Rap5IRyyDY3U/BLvOvdB/Da68KwMAydMFQFQ+BE/psAVQ0IfIkjRicMPAKFWqGvSkDgu4RKWGssWnHzjI2PoEvYXjIdJ6anmNHRyonvlkw92Pz9hcdJlWfyzAOiiDBxoIXppzAd8eE3JTwUwwXJmgIQVmTHelXDlU31xCIVomiTWALDQOLVJ9ifdJQQ2mfEsT0JpUkmUQtVIrL/QWmJ2HfJgoWc8dOLAZh/0LE89c8ups8X6XBFqTKUr+L4AU5OrLbJ1oBc5xBXXXss37n04tq7OurkMxkNhFEXK4c8+JfH+cCpx7Bzh6zRWAbP/PMedI1aY7yZs9r5Y24N5pg47mMfPaN2vj8/Korxjzv5NCq2ythYMAG9x6VAnAp5yfDTl3M45eDl3PPIc0QkxGLruhe45Itf4bRzzqPxOPHMyzyb9bxBRBUn0swxDDeC71SYPl8YMq7vU6166FqEGjYCasWXABElghdxKBOiR4XSHN69kW9yKXuTz3zsy1M3LAA+K/68E1Hvdu8fb2cyQPChx+pOxeNPPoRysAncy+Zu8Wle9JXvcNYFJ7D61RfZ+cpUb2PNi7JuLJJkmHFiijWl5XEsYZEvdRMdrSvJgbe2s+xXXwfgfv7KPSu7OHjRcUDdubIrs+irbGf54cKp+t1fvo09VOR/viSe4e5Hv8XZJ3+fH333Af7+5PcA2Lx7D51LO6fc36OP3sT23p1s3CHue1tXnuNXdODF8nzzUz/f6xgCbF//fyY+UYwAK5KgWJKEL76LFQ159rHbWfmMCAjofhrVdIhG6/VLR3zw4xgxD78oFqqH7xPwvbsfuhT1BDFfPnz63u9tcLD+rh668w46kyb33v1X5r5HOExdz61m+QlLWPPEBq782LkALD7mqCnn+MCZJ3P/3x+dsu2a667isq+JsR7reUZsDKbW+s1oylCelmCQAba/KY537Po+hUIdmvr1L5zH6R8SvbCaGnT2bNuAPboHNS6gH/vNmMMYo6T3O4CNGwXT5PQD5rB783ZAGBoTS5ZhxjDiotBX1QX86ZOf+wJ/uvG3tevNX9TMxo11EpPXH9iIOSNJWjpOI+MusYyOF4Zoshj/oUcf5pM/XUEge+Eoms140aUto2E5gowmHFtAVktjGcOkJPQjVYVQ0clVhMGwYOZyiDgYuspgTuhYvS3F0PgQnudw2H4fAiBqaPj2Lrq3i3cYS0TYM7aNlhkLSKaFju1oSRIWPXr6hb5pX7gIM1+l1NdDMhABCkeLsOLIsxgvDjEwIOow7rnvdjpnJJizQNahbeoi2dhAqTJaM2jjmoo/Oswjd9/D0uUiCFQZqZDIpihKJtk5c2ezbfM6wtwo6YwwIuxCkbhhMP3wpXS0Ct01mimyfOw4ogmhIyJWE23T5tCQeo31zwsDZcGCKrPnLWLmjDn8429/A8DUVMyIgaMJw0VVIwSBOoWUwvdDFAVUtR7EmzCSJu+n67rooSV9qyAIBOxa02pQesMQ/bB8WQBuGBFAOG8TjKuhC/ghhmERkb3ebLtKJhUnn5cGNSq50RGUIKxBKsMJ58wwao6a42qoRrVGfhKGcbzQRonoqLIHkmZoKAS4vlOzOeJWgljcqjH1Br5GY2OGku2TywunOh6Po6gh0biBGav3tykV80Sl8+F4Hm1tbTiOR7kgjlMCBV3VGOofoCIXtkQqjWFYWLJBcUSL4Dpj5PNVIhExVo2NFoXxcUI5dnE9xvjOIVKdaayoeJZMLIrv++RyOaKRCeZhhUqxVLMRUokkuVwB3/Uw5X1OvKuJfQACL0ANNEIJPUNxsCIK1aqL708YxwZoKvEGA1eWKxx5Uidz588jj4DENloZBva4NHWYrN3whrxemkxzlKcefpO2iRqvIIriezi+GCe1YFCo6CRSOnZZQGI13SKRaGZ8ZJyGjNThPji2hyIJGDw7gq5bNDYojIwKvaEYMGuewuAunbjs41V0+ij784k2iTGPNusM9w1jxCIossF0JJ7DiqVpaEszXhAaMLAVikN5YoYcXxWam9sIPEA6eJlmm3I+SjSRoip7wxWscTxbJybfsWW4+FbA9PYWBgZF/bIf2mhegB7RalBazxVlClvW152qmYsXsvyYo7j/N6K+yXcKqLEGbMWsNcIeHRyntVlBMcR5FD+gb8BlcK1N9HSht6tajGokxWHvnskrL9bhbL/4xiV86fs/A+DX3/qqGJ8Q1KDuHriKW/MWlKAOFw7kRs9VMDSjRn4C8KOLP8+l199c1xEhgELo+URk4K3qVFi7ZjWxWB1K3Jw4Eniifu2CwZtr+mhJNxFrqNdfb/3nCDMPb0eVzJWaatG1so9+6YhHLAclVBmzS8Ri4nqplCCdcaplVNlc2YyYuHaewqDQI1Z7EzNb+xnIUaudnNEaJ2IZDPVHicTF3DO9qWUYb5f/COcqUJW6MoropDvbKQ+N4cqC3eZpzaQbGxgZyxHWCrMDYgmTUlEsXuN7hrBLLqedfQ79srFu/+A2EnEFp2oSiYhJ7zgOeqig65IK0tNQFcHUNxGv0zSlxp40kV1CCRno6qOhXbykZGsC2/bwKiYnHPsBALbt2M0LL68kGM8zrV1+jG4jcb1M+34yKpdSmD1tAate34o6KdMQhiEEChNITU2TzRYlS42qihpOVdUY7pvKwjdVJswiMZ47EYWBG555hg08U9/t6H9zin8jjz3wR3zphAa+TiqiUfHLnP0+ccJzgMs/dRa+b+AmJGVt2aFoOzTud/w7zveqJMd45PHv8sLz69FjMzngUFEfozRO55pzj2PDm4Lp8B9re+FgeG7tALsGhQL+xKdPZf3I2fztkL/REBNRqjO/fAwf5EMUZC1FYnojmhYS+vUiZj8M0FDQFZWqUydJ90ON1nZJWqKDrVewEjp2dai2zy0rN/Pu+RleeUU4lA9uynP3JcfWfn+mv5cOM82Chjgnv09kfx598M8A3PX3h/jomafX9r3i+kf47sWniOfb5DFSKsEKuOgr36vt0z+2h2XvWsBO1k4dvKh4lpg1DdhFYyLF0KSfG0yD/qFdtLfPnXLY0/dK9+4COGBahk8ffwr33icM9rPOOJmrLv/qlP0//+Fvc+ON9ftxh4SR950rP0tXWkb9Ag/bnUogcvLJIhr2X5/4iLjdeBM7tgxx9z13wKemPsqMGTIau0vM97lzOxkeEu84l6+zNba0y8aC+a0oRNBl0XtTcxbN1UllE8RkAa8dlkEtYhd6a8d7uRGWnPhhxvvq7I1PPPdXXtr6D664sE6df9VN5/Od82+aco8//85v6DxQOCgvrn2IiRbZXc+tru2TMeIsPGoJm58XtSGV4lRM9jB1dsof3iCc8dbpJX53yxV8/lPf5V37C52QG5kakTv38+/CV5ZxAlfw7avE4EVi8F4uB+Dya8Uc+hiiXujI48TcaGrwScbm0JRZzu8fEE7reWd9lDd2DPC1j3+pdv7PfuhM3IuieJI04dprfwKAio/nCiOiWJIU3r7GbbeLzN5/feKjJEc7+MQKUZd1I4Kw5KQDD0aV0fvR0Ty6ouJ4Id22GPc8YKV0jFA4iNmkglpQGFFyFCT72vSGFEZ/nqKSJqUInac4DpUwSVVSv/flxxnoH+LAg95FLC6Mqwf+dgv7L9MZGdxNVRpXg66GYvjEZohvYb6RwNyxheGRcYpLxPNN328Gm3cNMTQgMmCpxtnMndbKhoFedoyJb33JYYezdecg3UNv0tEs5kJprBuvModTz/4oANd86ypi+V6seMMEKR46Pq6usmPTJtSq0PFzjziMwb5Bjl8qsodmUzOxngRbu3eQaBBGaFM6iWrovLL1TTLbxfP1F7fhOz5aQYxdyS4yWPVYcehSNrwgdMS2PTZowxx+7Aoa/1tk2O/69R8gH6LKGhPRFTpA07Ra5mPC8J6cvRK1VeqUWqw6LfuE4R3WsjU1KnZNQ1WVWqbLcRx5Lb/GlBviEYYeZiSCH8p6sVkzaMzG2fyWcISjRoxyKcC1q7Vm4ygquq5TrlTR5TtuaGkmEi3VMgOlYg4rauC6VXSZlYpaMVEvpqlYkvhCNyP4oYMhCQPKRYfRkTFUU68hGcLQp+pUCKjUnjliWKLpr/RRylUbzwczEqGhSeipwngOVdNRQoV8UTyfH0IQKhSKYp5nG5qwolFijkPJFvN8vKBC6OFIYz3TnmIcCF2VqGzTUcr7ROMGc6bNYmhQOgOBh6breNLhHCvbWLE4SmjU2HvNiIFpRQgmZR4Dz4dQrTeh1Qy8aoDvRia4I3D8Cp0LZhJN+OzcLtbaTBu81bOdjjYxlru7ypQDBzOqkW0WbKFjIzbTZmZZtnQ2rz+zQ45xkWoI7R2dAFSKBVzHo1RwaMxINuJinmi0lagZoVAWAY+IZmLEY0RkYLqQt6lWXHQjAY4kfAh9Uo06qWabiqx5Uv1OFE+jIo1lzVKJZy3Gx/I0ZkVQ1vGijA6GpJuNWvPo0Etgl22iabEuWZZFX38PthPDV8S2RDKN45VIGo01R8JsK+KVoTQs9imVLGJGI4EbUJQ8AZ4X4DoBDQ1ZkK0QPNemf1sdXbPg4AOZsWAOZrSltm1g0CY6owKah2rVTXhFDVFlBkw1UnTMNOjtKbN4goStENLUmCWS6YRJtdIXfe8GNE3MlzPO+wr33fwLfEunNKl3jBpS++Yn/q8GYa0hchAIwhltL3XXk4+rNaCWjdOz6Vm899hz+e5Xz+Pyn/1anOtAiycnOVcpfTp2sJmqV2D9JEbkxUe2UNVVGtKiFjWZitBFH1ueFbp6+fGdJMxpOE6AmxPfR65SRrW7UIditczx3IWLiBkGrU1i3nVtGaHsOLiNKRqilnx+h9HRUdQwS1VWZIfev2+u8x/hXKlhSCwpJng5X8DJlSlXK4zIu/MNGC/kKY2WaZ4mCyLdkGLewZXKxzQ0ym6J11/fSEenUBBbtoo+FZoWoknaxGq1hO9p9V4fvuiOPbGAgMgSaZpGqPh4rswceR5m6FMeFMZBgIoVi7KrZw8PPyqyVPOWp+nevZ1ZTRbZ2SI6mauWUMYH6ckLw2a0P2Bn9zpi0VhtQVPQhMJWVALJalSplLEso+YEqppwuCYKAQGap0VkpEDFkK/SKcVYvHwe85YeKO7TKXHLjbfS2KQxa6lwgEb7FLLjVdqSQklr87LomQXs3r2TT31ZRHq7urp49tHHiVYTrFz1t9o1e/odknIxc5wiRksTimHx1oA061vh6x87n0w2xtJzPwbA0NAQMxJNdPdI6oSr6u/+mBMFtK1NreJsX8VbO5/m/juFkTu7bRlXvPkUV1wtjMAXVosxbJ9eIpkRH+zKJ15lVfMMVpx4GIPPCcMinxFKPTVNGChaJcAzVLnYSwPAFYaFqiioSJrXdIxQ1whkhF4JY1gk0d0x9HpCi95NXbym78cJ7xaEGS+suYVTPv99HvnigLUoAAAgAElEQVTdt8QztdUL/CecqvW7drBsxuwpjhVQc6wAfnrBGexNCobCyhvWvmO7LslqNu8WhnxTaxub2VD73dMgE29my5apx77wdF1xnX+GuOZZZ5wMwLayw7xJUaQJ+dznLufe5wRRid4pAgVXXf37vd7v22XGAuEsb+rpJtvWwUjvO1kcJ5yqAw45iDdWv86u/gHee+L7AHiIewDo6JhNQbKonXLy8bi+zRtrBSHDji2bOGrFCgZzA/TnhXGczigEtkoYq0dozXLInbfdQOBKeMHV8NyOFg59zxWAmHcLDzyIv7S8AZOQEu8+6UBefGwtO9cKR+34j5/K0O4+1q/rIRyrO0JPPzKVLbG/fypUc83munO1sUtE4caLWXp37oJPwfq3JGwuaJhy3Euv7cIPbDga3lgnFphogwVHww+v/yKDianXiSAmx1B/AXNWnL/d/yaahEb0Df6TlSuL8HH45a2fAqDYlqeo+mzeJAzAW/7+TT515g8474Mf4cz7RI+nUl4YZ4O5MSKZejb0xpv/yM2/F7S8N8ptJ570Lvplj7jp0zqpBoOM9ZXwE8Lw/R6/46k3xtAlQ1tkrIQ/7vCjG3426SnurP11xdXCuVfb59Lk+chacgwtwq49uzl4xZFMywro8Zwlh6CnQh77zRM8xt7l8t9+g1RGpbuvl8q4mC+vvrmZdf+sF1Nvo/sdx5tXpugb7yLilykO7gQgFU3T3DyXprQomr7sG9/g7vvuoqdrF/GkXMSiGpHAwgtdNm8VwaKy5nDSGWfXWPFyw6Mk4g2omk/MEuvcQP8ulEqVbEsTTzwpxkM1monFA9EbBshXi+RcF7/gMXuJGIOG0OHoE4/nphtuZ8mBIrP6ma9fygN//B179ggnrbEpTaXs4nneFFigQEmENSdCUJX7U5yrCSdsgoFhwvkKQ7/W43ECNjjx71whj2lEcBwHX7LZBYFDoPjE4wbtMnCiGxq7e3vINIh/j/aNoIQCTYLU1b7nU6k6xDMxko1irge49PWVWLBI9qKMj1EpaWhqneBB1+IomsnYeI5ss9AB5ZJLuVJEZ6JXVBzHLpGIaxhSyebzgnggYhg1avmqH2BoCbSIGJdsMkWpVGJweAArL6PafkAkEqGxsYmq7PuGGhKLWLiuGN9iaRTDMGlqa8SSTJm5QgVF8WuZiIorFiAvUOnbI3RQVI/iFFR2bu3mX0mkMYqKghGJ1ByucqGIYZmkGzKU5PUURcH1HHyJXHGoB7Qmlr7M7Cw7tvShawaxBuG0FMoBVdvl/p++E541IYedOZs9O/PstMewHRkY9kzcssbubpmd1Gwcu0AsmqahQdhq2bRBfmycaEypZevGB6p4blhjxVNVSMQtqraHK6EyVhJKxTKJRhtXZo79kkaIjuRNo1q0Gdsdw6lqNSIzI+rgKUU2byzQNl3opUp1GEVJoSPWdM/ro6EhTWPWYmhEjGdAiGk1kojPwI0K/a6oKZINGrojnQ8tilMJyOVtDMnyrAY2hhkjcEIKJTGn4skEMMohEiqutMUwG1IkQo1zrv4uAH++8goixNDDIZxArJttHSb9A5PRDvUWLGZVkKnNnTMdMz6PaNAE1MmRynq11nN1+vw5fOWH11FVXCaDDJ2qzwR19sT3bPsO+iREmO9BoEx1rjy/Osm5UoWtG1ZQJjLPmsX3L/4cF15zA7ZEaRnqVKelqXUmmdQ0XCVkVBXPdcAprxDNJhgbizMiEVEaMzn93Pfy0K2CxGTNkzuBnVPOdfgJR5JKNpEvuKBNQPANfnD5lbxdFr/bpOyINcxV4sRjM1i98m2tS96JoK7JPkKLfbJP9sk+2Sf7ZJ/sk32yT/bJPvlfkP+MzJWuYY+LSIlbrWLoJqbjUd4lskRqySHTnMUvlhnbJTx8L9DJ58fIZgShhV0p0NTYQK6YZ+4SER0plC22b9SIJcv4MoKgKBq6bmHI5nnlchnX80WmSvqaIlNeL9oFUDULlBBXNoXz8xpuEKdMhEceFxH9I/y5WOk8/cPgyahiKhsn8Ks0ZEUEas2LQwwPjBKJa4RID10JqFarUzIrhmHgBy5GjdY2mEJrCxCJ6OiaSuArDA2J+1q6aCnf+eUtbBuRjQbtYW7hVr7+qxvp2igi3kM923lj/Xp6i6JGwelyUJR+0jGH7337egBiegpPyTPYPbUx7UDXVgbSIkpe8Rq57Ipfc/UVX6G/V0ZKWuG8X32NnW9tIykjspn2GdjRCtNjUymbAZIp2Vx15Wt8/JLrmLdkFqM7RDZC9zQeeOR5Vq0TfZiWyBqxdWu2kzZEZH/6PJWXX36StLeIFpmp6tm4Fj5ADV5khi56mEZRJqW0FR3XK0uISz21bQQhqi8b8+kqQahSIYrjTmQ/RpmjbaNrfYk9W8T1LjjnVB585nUW/FgQWrz13BAHHTefH174SU47W5APVK7J8D8/+Sk/v2jvRCIAF/36d6TUDMXSBq67+Ora9jdu3bjX/bc/Xa9Najt4Pi+89uSU3wfe6CE5t53mFhEpG8qoMB6wN7nuSZHduqn7JfjM3u/vrPes+Jf3/u/kmivrqcq/vu23aCrD4sXzSSXE3NjSvxMAp1SpZawmpLd3B2ajiGZHM8uY09HJQ/d9vvZ7dloH5375G/z0j3cDMLSzm/LwWuKJOqVsp6oThjEC2cyyG9i+7gn+dPObtWDe5rWv83Z58bGp2b/1m7Zw+iFp1j07wtWXiAztlT++k85jl3DK3CZ+83tRg/SJE/fj99QbWr//wFn8WdY1hY74tnZs6qYlK76DXXtE5LNvz1Ta+29d/DO+fa2AnYW++P7yspeertgUx+pQ4Usv+zilqIQu6A5jYza9uyscdKAo4B3Y2c/cDtkItSL2yw2q2LZNRFKcD05CM/79tj8BcOKHz5bHVNnd/SKTZfd4Hcpy7S8vpOD3YTUI3TlSfRMzVEmmTCqJekR8bqNC0pQ9ntC58LvXivu/SDROji87mvZyP+df8HW+e6WAP37s7KMwqxlIyfocP0fZ6+WO228kkRTwl/bWBv54ragJO+erIv1YHO2HtMlDNwhI7Pe+8EM+eekHyCxop8EQlNHr/nkHh526BFtmbOZ3NDOn9Vhe2vgIDRkBy3krWMfM1lnoaBx+rEACPP2PlfR07WLZQlFfd9KJxzMyOsTNW2/Gk5BjRddwyy6uEtLYIvTYWNcOHrzrL1xyvYByDr/5KqueeZIPnnQCOySUdbw8wMHNLfhxlbNOPAmAO+65h9M+9Dma2kSEf+2Grbyw5nWmtSZRZCh1V/coz7+8FiXqMdAvajzmL96P875yFTf/VGQBewf7iUbFOE6sc5qmoak6flBP1auyx9XkGqyJZsAT6EFN01CUEFVTBBETEz1PFbQadbOgag/DsFYUH4lEcH2HGTPmouhiW7Hgs3S/Zbz2sqjZqZZdQlx0VSfwJvSXgmlFiMQjuDLSHSiQbU8zAfBPp5rJj/cyUgpq9UUOOQLFIpFITHBF4bo+hfE8MUtkK/xqhYipUylVGauI79DzQjKNacJwEvzRA8fxSMQT8jwuiXSCZMoikJnAseERKnaZuKETk8QbASqu5+HKbFSo6pTLZTRNI5EQ59INlfyIN8F+jS+fu1qtoErEi2Wp9G4VusVqFMeFClgxk5zsMeeMVvDTPgpRElFZgB+ENDU1USgV8RzZuFXXqY7Ua3qjzRkCH9BcqkOSPGbHCKnOJDu37aBjuqgvnLV4jGfu6af1QKG/QqoQqhx74lLuvu4ZALZv2I0Vi1EsKiTkGDi2Rzxl09stximacolaEaqOw/iYsGUSqTSmruN7dSimZoKmgymzhb6jUal6hHjoqnjHlpbEo8jQHgVd1mrqMQXf0km0iG+9scMl39cATiMV2UfRMpuwohqVikoxL3RzMmlhKhoEsp7T1KlUfcbyY8QTYl1NN2gMDDhs3txNtl02U85XqRQNTF9cL0gMMm1GI0v2W8Qb60R2vKlJwws1qraLKslVPNkLrSpJPSLEcGyf4vgYriqyqJ+48EL+WboR1dLRZKPt/t4iLW0mqmzPgGNgGC67dsHMxceJezAN8opGRJ9KLqZqcVQtLt+fi2UCoUGg1DNIyUwDqiR3UHRxzUxHB0o40dvPxwhUNKXuUnz/9/dCtq43CHXCUIGIwwRv2KWfEiUDP73sbfXqk+TBP4n1/ORPLUctivt8/ZEch50+g6aGGIEiEEQRrZHB4altcVa8bznNLTP5+833AzBj9gIiZpr2BhNDQhp/eOk3ueSSi7A3CX1jhzluemg1b73YzYEHizHXowaOl2PpwW28+drUNjn/Sv4jnKswCDBls65ADYmrEULLRFL4kxsbI1fIYeKTl86Gqacx3CrFsmSIUQKy2SgRxcNBOGXHvM9ly5shhm6iyAJa31eo2gHxhPg44wmLSqWCpmuoysQ+oq/G5CJeJ7TxMVAk3EbxQnRVRYtEkDW2bHy9j4P2z9KTG6DbEfCsWS3NxJqjbNkklMjqF18lkTWxy45oCoww6kVDRoVqdQKvrhIq1K6vqGKfyXlIyzLRVJVK2UH3xSJZCD0+ds75VCTTSyptwknwjbPP+zdvQBjp/zqxXxc/8GhNCCPmhVdepyVb5oFnn+LUowVb2D1PraToWaSmz6vVFnRt3kUQ81jQ/s7GqA/ceRsAPTuHuOfOvxBrydDZJNheFi1vIu9ZjO4SH8yFX/k8F/AJfnfhpxmXTe96ChXOOamRMmJhAZj9gThXciHvPf5UAB67/x8kG0U/F72WCjbxA1uOvXiBhm7iYqNojQCU/H50oxEjsHDder+oxad+ltT2Pk49QtRvfA0EecWEXAITrV4fvls0EX6YX8JF/35sf/Klz//b37OHiUBCkzIN1fcY6BULa/usGaTaMyj7OTgSsjKyc4iGeUkC22XPbqGojznsaJZ/aQFao4DNXPe9K7j11juwG9ro2bMGgI6GFs4494sM7N7JTfc9CMC3zjuXow/OkEl3AhCNaeR8qI7nUOVHaqSiuK7BfY90M2e26D3VZ/cws2kGpiWgC1073uDBP97OgYfNZu0r4p3GEwb5fJElC8UA3nbbbXSQ4t57niAi+9Bc8o3z8KoejqcyLBnzNm95ih07I0xfIKBYqWiUYHaRz559Og+0CCjmbT+7j8cG1zGtJcuRh4oFxWhXOOE9Z3LYSWLBOY2Duf3675CeNbs2zvHZrTTGYuzasIOGWeL8yTaDHsnKCKBHI/zhd2KRfHhNvVOYUlZZ011n7tw0MrUHlZmaBLaQ5CmGHuLI/kRjo+JbHBwVc/Lr3xN9tX50+V9qxACXf6teG3btdZ8ljCg0Ztpr2zLZFN+86DcAXPXd84hazejWALZkVmyOt5KRjWUv+oJwYK+4+lNkUiGt80R/Hi1ad4I+8pmPA5CXvbhWPvxXPnrr12u/f/nbH8VqrRf3enYRz6uSNMW5AnUEbXQ2o+VB9GhdfzXEwloB/4499R5aCWlY7FrfzYIlU+sFm/UOXGyKjhjHvj1b+PJnvsBzz66ie0B4hG9JQpT3febDjOXEu1k0ey6Pr3yW879wJgA3/fbvuH6W0S095GfWHYnGdITyoFh0N49meWX0LZojGl0bBTPgoYsOon84ZM7sxfQMiTFsmtVC+5wkAwWxFvX0xakGY/gRn4ghe9FVKriKTyJm4sq6mjBqMtS9g19fcQ0Ai49eRGJeG+udEUZ2CQNzTtsclFTAo48+wDe/9Gmx39KLqFRbWS8ZIX2GKFQr5DzYtEOsO4ZbpWfNECW/WnOcHr73LhYvO4Qb/ijghZ89+71UvLAGg4c6vC8IVBSlHoixLKvmoFSrVVzXZXJ5RR0KWK9dnswqCBCNWpQKRUBFlbB8PTQol0I0J8khhwgo+5pX3+TlZ1dhFyQ8W7UIvQDf9zB12cMy8HECB91U8KRzFaKTySYZGxFzt72pAyUw0RStdu8hAdFUHNsuUbHFnDONCDOmt1MqSkZhLcQPHIp5wRwIEIsmcO0AImDKpvNVAkxLZXRUXC+ViZPLjxONGWQk3LVUilDMFfE8l0pJnN8wowSAJuvOLDNGzIRKpVKrM03HE4R2yNy5IuiUy5fIAe1tGXq3Cftm+uxpDEjHwJdGbgi1XlUTkkwk2G/hopqjXxzPUciVqJSLaJKYwpPOS7xNYs01F8evYqgKsTZBTFPuHxeBHV+he4f4rna8KpyJbLMYk1w5xKtQc6wA5szp5M1Xd9KYacAwhPHvhJDLOSxeKta00fFRAj+OZgYgmUBV3aBsFwkVj2hM1sOVCsQTMTxJaGMY4IdlwiCCGRHO4/h4kYiZJBOFki1g1joaSqXE7i0SdpkLKJdHSCVamTZbOEBvrSsQT/qYUQ/DlM6bH8M0LdyybPgcVlACi1hMJ5QQtdERA80qkduTQ5P3OW8RjA+W6JONapfOPQC3Osza11/C8cSYj4zq+H6FMFBRAjk/pbPb171BjoHJaCLDNt2icaJ2Kpcj1AMULWB4Vz2oFigOrvSPY/EIJU/0mdo1INbaTW/uIdmgENgK1DnKWP3Agwz2CN2ZjmZRFNCsCEpEfv9fhsfvuhdPfvBVHzgPrj73LFZ84LO186gRQzi40tR85umX0LTJukWDUCfQdQxVPOeHXvoeqq7hqwGeDOyFrs2DN11XO2/nEYs49MhmdoxsoDJWX+deeehNTvrIewhUMT/NSIJn//Z07fczPnM2A5H1OFqdBOMvN/2Rj3z5S1imiWXW16JUrJGyN1HPCcv3m86ajbuxJKuwTgI/WWFg7P8e7Pcf4VyBguKIBVXDJ9EQF5SiMoujWyYRINTitQaIHi5BqBORqjwaeviVquj4XhQfXqLZYf4Sgx0bIdkozq/aaUI/hxeIj4CgIlJVoUEow0RmxMS27SlFvaZq4TgOiiomgFvRsCoGI4N7mD9bRIQjYYKh3ToL5r0LRbIRDg0NU8679PRINr1QoxoW8H21ZtSrCmiGIq4pFwrdCHFdydICxA0LxfVRjDpT0VjeQ60qFEsF9j9U0CxrbXMJdivMbJe0krMamH3Wf9H91h7OOV8YlKvX7yB0y6hy4jiqTmFogML2rZgyol9VVEbHNDSnwsyZQsGvfnklr61fQ5Nku7FLVU485mh+dtu9WLJLfW68zPjoCIP9vSgy+1dxXUbHy3RNOFfvq7/5l58XTYlnzJjF/PlxSiWb4ZygKn7+6QiVskf22PcAcPc/XoJTYTA+g/4+sQht6i5y41P/IHA9ihWxWHohcBMEhijk9rQyoZpE1SI1ylMzEqJIIpMJumQv8AkIIBBjFwYNRAOo4mBGJ7px5/j1HU9BmATB+M3rQ2XmJ3V8OV8efmkrth3hvJPn8b8pI68Ix2KEqY2BR6lnDaYtmFH7Wy9lGOqrsw49w9M8w9NTjj333I+/80KCg4OlUj3c8ZWt9K99iNGi+Pa0CvgBoCRrvn553EVRHILy66xZJ7I8mbYW3hzehiO/bVMyxbUfsIK1CIXvhiqhFfCn2wRb4Z8mWAs/Ur+dhZcuxNEjVMp9LFwmmgg2N00nN9hDIIuKGxKNSCIq3n+sfADJMfIa40xU0hxz9CksmDWfnb3inZ/4/c/x+F03kuuuR7xKOwZqrItj3ZJ2df+pRn7vyno2cdUTq2p/71i1nsmxs5UPTG2Efetv6zVZ974gshPpSohhilYN2QYRhevqF5HTlnS9C/x3L/szb5eIYqAqPl/6zE8BuPbHn+HSS35T/92MY5RD7v3T4/z0198E4Ivn/+Ad50k0aChukpGiMNxiXl3P3POHO96x/wXn/qj296z0fnzti1fU/n3Z127iOz/4JBdd+Mspx1x82UdplY1+AXqGA2bKerjeXjGHf331z2hqE/PlwacfIty5mR/uvJxvfFdkW3KFMaJmHF8R+nTZsg9CYh4FdSWvviEy3BPplIa2GWwcFO/v1bc2cfpRx7FnoJ5F/PN1f2DBUW0Mj9fTdPmKz4tPrpH/EkQlu4DlZ4ga2t7CAKpmc8aiTzIu6291L0Ei2giaWFO2bdiOXVJYfsAR7FgvMPqFCsQiIWHFZ0RG3VsdFas5yo7Nogayf8cbLDr+cGJKnHFP3OfmrteYObOTBdNV3pIkLIVhg8MPb6N7s8h4WU1RXM9k1/YeWlqFjjXKeTJGwO7eUWxJEf9fZ5+FZXSwfr0IEpz2X5/kDz+7iYZYAi+ccCx8QgSFeiij0LoRYuh1xl1VFf8FQYiu1+cJiAzWxPc+wSgYSMNRn1jv1BBV0omrWoSq4zFz1gLKeXH+bVs3Uy0qtQyR61bAMAiqMDIu9Hfge2Q7o0TdRvKacPobEwmqbhRPEXNpNDdMJGmhB4JVDiDWUiWmKsQiyVqWOAiKNKQTKFLvVys+ppklkXCpusKALuwcpu7+C2lbNAdDc2icJow7t+qJGsA1OyflqoXoHT4TVRhBVaVaLWEPieuNS/3dMK+Var9Yfwdl0Xx1mVhDF06bz24GqAyNcPhyUcNaKYt7i7U2oZmyhrwUYIVpzvy4qNn5+x13QzzGujUbyEr0DLaGH1EJDANfkoZNm9PMVooE0tlSPI+UmqLq5PGUekCosKuKkozQNlcEIPsosf+pLQQTgWmngKYYHHRqA5vXieOG2gsocZW8X6ZJFzZHygwZd1zGRsXz+V6KgjvE9GmdNE8Xc2pk0CXZHqeQy+PKktL+TTYLDmio1b1lMgaVcoxA9ZGJQGLRJKqq0tAR4g2J80diLkHZwZaEFoqXwvcCwmgFxRHvb1pnifx4hHQiSqCJCxYqNmayiuaJ+3Yp4fpV/DBbW3sa2xP4gYHmR8mPifP3bfPQtSTpNuHolvJVPDdgxcmHsWq1WI2SsSZ8TUMLAmwZbAikMz/cIwkf5sco+yMogYJpift8/fFVtLZEBXsjk+qsfLWGfPL8Uq1OfJ7M1j289jaa2loEeckkmdcRBVlrl0iOE9pgBzahUw+YrX7hPlobxQJbqVRJxePkSyVef/RPco8AFJ+yHWBKx/QZ/1bQ8gTeBF2gy8SfpiKexVccUdOnuBDIzGqkAJO4pBobBhjoqRKNW0ybLZ5vA3DOhYewYSRHRRdrZcKc3PQYeraPkqtYuMEwh37wRABe/dvjqEGCta+sxpFsf4se+yBXvHA5exNFcv/YSp7YQJxGo4Ht9O5137fLvpqrfbJP9sk+2Sf7ZJ/sk32yT/bJPvlfkP+MzFWIwPcCC/Zfxu7hIaLTmvGGRUTKHh0hGbEw4kmKMpJkJFMoioouIybFsUESkQiu6xKJSspTx2L+AQrbN1aJWMKPdCsV/KqJU5WwuaSF77kEgTdRZiWoWsOQ0QGbVKPESbseuhqpRUXLFZsWtZVFB+xPWTaYPOLdBzPcu511L6+l7MkISUSnIdPJ7l4BujNw8Cs6QejguuJcE2yFE1h2qNPdxmU0NPB8Qs3AUOoQlojiU9EMErEs8w4WdL5FL8aGtzZiGwKW0Lt1mGqhxNDYKF394j7f2uMR2FE6posHbkibGEYDRxx7FKcecygA44M237ziSvyCy9EfFVGw1azkkq9OpekG4DR4nNv+L1/2VDn4CNHQdsOm9bQlZ2AmQjISemmXKyzbfzZrV4lo/09+dCMMwM9+fgcl2WF2WiZB6Iwy0NtFR5OIrOzqFVHt3V0CNpMKTELPn9LXw/O8Wo8URfYyi1lRKp5NwAQrFlRdBXQdZdK450d3kYrV4V7HHfxxjnvfoZz+CZEi37h1gJRdhJPrz7n8tJN47/6HceoHzqGYEHOqrb2JjRv7+eRRIvPZ2HkKF158Bm+8tonnXxTRcz3l0zxtGmvv/zNL3i9qPMa2dHP0EUeQHBBzbPOadcSTHfxjyypMq95kb/Z+IUNAYq6AZ7VaBkoYZ/dbIvJihyVSkTa0TAczJG3u3X/9Jb/45TWsfvEVXn1d1mFd/xPOOGYhpqzP8DyPUAkIwzpls6qahIqCqlskUyIC1djWjmXFGB4eluMrjk811bMXSmGUrr46g969Tz7GCUcexmWX/oJf/VzUam3etBmrMcXsmcvwJRxsa67K0hPPYuzvIqvSOSfClsLUpn63/OozXPvLh8iPKOwZFNd45tlHeIZH6judBJHmZpyhIYyoCFO5lXc25B5c10XrnHb8qshpDe/Jv2OffyezTxCw2R1P1NnoKusEtKYCHHLiYnYxTKkoIsItkmq8NS4in3fecgFGQ5ryeJlzz60z6l14cT1LBWCQ5A9/uoLGrIA5RuYnOfVMAS288Ev1jNW5F/83qUw7N1xxGQCXfvlm9iZ/fvBuHnlMtHPoHejl8b88+I59vnbhFe/YdtU3/8QHLhC65P5fvQrAeG6Iql4f2/n7ZUknZe1kVYyn2ezhSzjK/vs1c/IRB/Gdm35SO+aWB/7JaR8+mtkLRQazd/sIz6++nt27evj/2HvvMDmv8v7789TpO9v7qnfLlmyruFvuDWwwtoGAKSYxMRBIfhAgBBISEgKJk2CKAQOmGWww7rZc5CLLtmSrWc3q0va+s7PTZ576++OcmdFKwuZK3j/e93p1rmuv3Z155pnznHKfu3zv7/23HxW4/TcPPcFvOcrLz/6BM+eKiOPR3h5Q5hKJTgc/dzVGSR4TfNn46C4+9s8i6XDvG1tZsfxi7v7X73G0T3iWz5hTS9gPkslkGcqK+UsXB8jYnexZJ/K5PnjTlRzotelcsoT3XS3YOL9z979h2w6+ZhIeF30INipMOQoR6RGmWGD32mdJzV1EzTzhbQ5g4w1M0ByYR1Syy206/DqhYC1aTOzrgOailiYYGthPdkzca0aNyfmr20kWfXb3CHn487ufwGxp4PRZ4nPnX3kei+YvYW/3AaKSAVYpWChBDVfR0MtMgL6Lomh43vSzSdOUCuRHURQ8z8Oy7AqUXdf1Sq4MgI+Qwa7rgKxNY7sWNXU6T629j0JBeOFDoQCGrmFJEesoQQz+0RgAACAASURBVHTHwVWinHORgA8H3VFaF7Sw/62jJA6JvZKOmjiRHF1NYs7TiVF0U6do6+SVHgBGXoell3YxOT6KX462eArDI1Po8ngI6CFsp4CVL1Iar8Kugi0BVBXyw6Kf+eI4mUyOdFLI3HAwwmTfMG0LO4nXin07lUkxsneM3FAWs1XCCZ00M2fNJnapmIcdLwr0RvLwKLWNEto2Ic43Xbr6O+qEzDxr8VJaZH7myJCIuNbEIhRLAvpU0m1KXpZ16x+v9Hvq4ABtHTOZnBKRz7pIA6FokEIxTTAs9IS29kUcoo+YJynOiwZ22MVRFLSyYiRbuMbFzVQZZevDtaz/w8Fp1wwBkSbJEjnu09nayGRyglRCROl0LUgp7+DIfmuagZXXCPgddLWJ9bNv96u0tjaieAVymeo82K6FL2uiTYyX0NUwRshDM8uU/3lUVeXAHodoROwjVYtiO0U0mWvv6x6uX0IJREh7srCwpqGbYUpuhgmpKwXVZjTdwimJOfetIIrikkvn8GUR2cSIhaqXsAs+umQCtPMmOcsiGJQIpuEktbV17N3zFvWN4rVQWCWbKuBhU1snoi6ZTJ7GhVEmDojvTxyqolRGEOukc2YHnpqp1iSTbWLMBaZHpQDu/LqQ8Xwd6s5byN6N0xltf/u7h1i1REByFTdC0XZAM9GPY8ILBMSYB8MRLMsiTY58aXo9RoCmOqFLuEoBX4ngSwbs0dHqs4Q7xNipqo6Vt9HVRhwZhcuMTT9/t69NcsaVJtmEQikn1s+lH1zIA/dv5WRt6fmzADh4ZAPZkRKqHqSuRqz9+vbZJFYcZGDXHiYT4iy3nCxrFi/i4jMF0ujA4CD7uifY2ddPICP3dlhHC+sk0sUTv/CPtHc0rhRFuRd4FzDm+/5S+Vo98DtgFoLr8Bbf95OK0LTuAq4F8sDHfN8/MTv8uOa4LlMS79zfP4hqBihmcxWaUNU0sBQP27EIyORRYZC5TGWEYHWKJdR6A0dxCUolsFDyKBQdNEOhILGsZlDHLVnYbpnK00DTPEqWW0nOVagqjbohjTLfx/URGD4gEArS29vPhz7yF5y3Zg0AK85bSYNmULAnKNoCf//G1i18+Yv/iivBsJpiEwp6YBmV79A0QcUu8O4yTO4Kkg3LkYmN0RiRqElqrIofJe/i2S43/fkXKQWEYqhjccv1s3ENmYxYAt0vkcllKcrCqStm1ZFMJmlvEkI6Eq5B72xm0+s7WPsPAo9/4coF/PO3v044rBKWOGL4NY/c/yCBmIT7YVMo2Oi+NE4RBCET4ylyWYtMRsxfMGwSrQkxcLQHgB/dcx//8I0v8M9fu5OWplkAvLphOw01BexSgbQvA6pujp4eheFeoRjf+v6r+D77Gex5tUwFQqJbp66tiVQqxehhAa0qZiVVbVjWQEOnRo6n61QTeH3fR0HDka8ZhkGmYOFK2I5uaiiqheVYtDYKgTFGP3NbGmif3VKZhtVtBbyJ7Xz3X0Qh2PPOuYJJfzoRyFf/8rO0nrGAvBNGQazhzTtTbH3iNZB1Zi+9ZhYvvLSL3t5eLn+3yDtramqhp2+CHUC8Vmx+rX0OBx2X+aYQ0jEfvIJUTqPCMG1fNp9BWcNkni7WRnO0Hj1QS6JBjF6RHME5IZyREdpnimte3vEKc5deRVaJsEXW1Trn2tmgBvEk+YGveCgIR0BZ4CqKRqHkYpphSlIhcAo22VKeQt6q/C/mpyqQg+EQUE2c/cFDb/DAg1spjk4H4fzZua3MmBumb1jsqz29vZymn4XVLPLHXnxxF6YilFbFENCogc+PEIrH2LfvCG/XrHGByz+ZUXVsGz16PNjn5O3LX70cs+wtMhTe2D/Ks9KouvmGFeRyGUq2Tna22H9v/OJ5jPIBNCX2TGebqN2RkYpNZ207r+1Jclp7kN8/8AVAQHCMeAt9B0O0dYjP5WZY6NZcNr8gwImNM+fzxo5uVi+v5pTd9sXPU9AHGBg/NK3fs5Yv5bL3CNKEzngj//Q3f4dl2WSLYm3lrCIXXH8Vrz7+LH/2WYHtHbWHGZssMiYJLUafFXtv0eVzOGKJv5deuZA9zx2gt2+MZXOWV75v3d5h7vgLcZ9OS+yJcy6+DlMVisam/SPsODhBV/tCdjFQ+VyiNExhh9ijBX0QVbepq6lFbxLKarBPjGvvtj5O/5DYt0tXdDKS2UNNpOp8AJjTOp9Ne6rG/bnXXkTpLKFw6sEw27YK+GdquzjG6v9sFVogTG//a/TvEesqMTZOIzkymoRLF/dTyo3BVJiuC6WxXNtAKRJgMlvg9AbRh729KUIhB0vWvVGNGHo0zEDfIYqHBHTvvAuWo7TpLF92EfM6RdmKF194hqef+W8ausTaT1g1xCMBrr3obD70Z58FYPeOp6nRszy691Hef7Mg9bhoZgcjB3byh81CudK1izEbFNjvEggJ5T1PGs3xCGouJYnFUjUDy7IquVNAha69mlelCmIg30WRdOn4qsxbFv8qvo+mqZRKLqYmjA/X0lFNG5QiYZkrpPkmjmVX5LKne+gm6G6Qc1euEfN06TAPP7KNG65aztjZYv9u2NjN4KSKUyMMukxahYBNbVMN0biAtqcZYmoigeeUCIeF7AoHajEVk4F+YaxYloVhKhUiB4DmmbNIWxlCIZW8rCaY7skQjEVQdUmpXhBns++WyOXF/g+GdOrmxkkeSTGnZRYAuXSGVYvOZNYsAQHeIZXmufPm0j8l1mJdSy3J0SnCshZXPiX0HBO1YqAETens8koVvUhTHdSARtQUsKssQq9atLCVwTGZ4+WbdDbXkgAmxsQzanKsx0bEM8SbA2TzRRQlihmablxpXpTcWBUatf4PB1n+LmHE1Mbr6d41Qu/uSXISktd1ToxgMEC4EKEgc5dczyYc08mXpH4TDBE0G0lZOdauFXJ2cjxEOBYmFImDWs4BHSObc6oJ1pqHh0Pf4ZNDtaxmMTet0TC+p+BKYio9rOCrYJiNRIJC3uzfvY+mRo9DmwaOuUNmGsQzUl+HqmkoPjiWNLg8n0TPyTPWWzrFHKfSo8yZcyHYBqWUkAmGb2DGIDtpkR6QBWx1FTXq0jC7DCoL4xUCYGYpZsU1JSNDJGySczxau2RqR7GIonsVPSwUiJJLF1h5wXtYtGwhAL//7W+gXWfpBWuwHNHf01esIdNzkLR8Stcq4SklirZFjTndPCjvR01RcTyPutpaklNTHN8GRkQqQmtrPb6noftCnsbiDk5AoTCWpCQxnIFACHyL5LEMSidpg/sg0T9CaIaQgS2rVnPjp64m6lr86sfTHYx7XuuZ9r/nFElQNoommP/19zN/xWnc/Q//WrkmvmAOvdKRougKliuuX79RQM0vOmsRZoNGrvinMBOI9qdErn4BfB+mhSa+DLzg+/63FEX5svz/S8A1wHz5sxr4ofz9tk0zdOK1YsGlx9PoKOSTWQxpyOjhAPhCOTYkW15AD5LNZojKwn+KYVAbiaPj4co8pZLt0d4VYf7yHAdEaRHqa1RKloYvjRjH9VE0FVV1KYeuRGE9iY+W+FTNcVBUF18y0mQmxMb6Jn8/7Vn+8vtPEPEdrFEhDN9c/wKjhycIyvoujh8k55UImib93WVlLs/JW9XrN3ueTjGdRtGqbtaRSdGHH/GNaZ+66o6/B1mR3nR1QoEAGEEUpJKLQeeCRjxbCOdsUcez8jQ3N7NkoVDgm+NNHBiNEWs2iftVLGtNVysFSwjkmBqmIRTCD/p4shioaSgsNQwMPVAtMKm6uNgYWo3s733889dEwuJXvyRZYr4Eb/L27XWZLzQxkiESE/fy/TQ9u47gpjMsWCo23lmrb+WrfJrXXxH5LhE9hiHx+5ZbzhtQK/0rRwt13aSuNkBe1kRRfAXVMDFVncRY1evywsMPoNRGhMsBOOeWC+nsamGyR4zL6auaiXYs5d5j+r7gsovZeyTNtt0J+vqE0fLSfQ/QWF8d2yNumLSTQWtsZut+0YdZVheH+8RanJyURRJNCzcT5KAhDKLoNXGaJnPAPhRXPFM9JoW86E9YEos898Z0jxXAJedcglOa4vzzhVK9c1+Ohx/5BaO7qrWwvvUXgrzguz+4S4658EBrmsfnPjM9knnJ09fw0rqnp712+W1CufPlXvJ8hTXXXAfAfm87kKbhLFGLYyD0AgsuqMWsSfCP3/woAP/0lV/yN//9IczsLrCEQjk4OkTY3c+BN4VHauasIAsawvwK+I+/FoZEc1ua8ezJDabf3yUKWt/yuRdO+v7/pn3rX57/o+89+Nix3rZqTbIrL5rJJvZw1Wkin2LX7vUAzAiKw7lLLfJ0z06CXatoC4kxaOicR19ujJ/+7rtsff4pAF58+XV+9MijXLRS7IX8+CArl88C4Pv3iZyttXt+ypDfw3AuzYLLxX4/+PxhbLWbHTtE9OVguJH3ffyDfOTnH4b3nfgcw0lxqA6N9xE0wsxErM2ymZJITeFNivlevjrOos9dDHMLrFsnGJn4P7Bu4y/oqhdreE6XNP4Mh6RkMY2YcW69+WZmbmnlKarzVKKGqaRQBpRAmrERj+bOWu596JsAeFmbi25ezoYHd/Dkbzb90bkAqO+op7FfeN3nXriAXHgAt188yxntK3lx8Ahd111B/1PrAHjst5u59vqz2NLTw4rTRTTylX29jExOEgvLp5+w6R9SUNzNdG8VMkvRINxqYviwuEMoojX+GDtHAxjSu61aFrY5hRprIB6QBEjbdrB9b5Rnn/gbElJhT+STKJF6ZhmCwXB3Yh94YVztNB55+HUAplpCBLNpDLuOWl/I+Z3jPvMv/XO+uEYIrjvv/ncSYxmaamJYJblPDB8UX+ZTSdSG4+H609kDj2UOLP8+lhgDqDgMK05L1cfHRVUVHFecF7qm4lk+mhbBd8WFJbeI5xYxA2JcDEMUdW1d0MVkXij/e7YViPo6F61eyJgr8zeMAE8+mSHjCKeBE9SJB2dTShdQ3Gq/YrpLfSCIa8moUHsnz62t1v0BKEllTJHiecIbwtAj5HMFampFdDA9lUT3EEQXgKGb1HfUMTJ4cifMl74h5KeLwu63ejj/mkvlO4Kh98jhqhOoXLVuu2TKnXGJMA4100D1ZF20tJizdD5HXUA4lK6+9ip6RwZoahLG8iOIYt8fePf7+MxXRc5lNFLHVC7JrPY2TNn3UF2YYEOIYkIomKmxsgNs6oRcs3QizZnnnwPAm2zivI+0kddFJCJTyBEKh7nhti4eu1fs9VCtT/fhfnACsi4nhCIBXFwam2RSi+7hjWRJDByhWJJEI14AO1kirFiMHa32wtcK6LrYC3XxGvZtFYbVzAUiYlksOCiKxUjfMNkxYYhGLpiPhoKsG4vq6QR0MI0SiiujfqlheuXchdvF+MUjwrl8ZKdQ/nOTSWqbWgThS3mEhoS86ljQVXGOl3Iw1j/A6ID4XGtXF6WcRcDwKZWEftocjqMSQYulOTwgnuFEPERW/sDFV4m8+sN791MoFUEDVzLHKqoNvo8pw68KJpqSIxCCnKyhVSxaoLlYTo70lNR5COAUqs7OkuOiaA6Kp2C7Jy/i5NoWKkwzrDo7OvBxMVWF7n5Z7Hxkkva2Dly5xjQVPCUCJFGlPLEKGpY1va6mEo4wq2MO3Yd2V15L9AvZaslcuN1bXkTxFLr7j3LaecIRqaKze+OJjtT581dhSOTa3t2vc/fXv3XCNY89trby97suXIapTieFMSMhClMZLjzvCnrknnqn9o7Gle/7GxRFmXXcyzcAa+TfvwTWI4yrG4Bf+ULivq4oSq2iKG2+77+9y9f30SQdtutB0SoRi8cq1diz+QyK56MbPpqsZJ3P50mmpqhtEEaZ4qokxkY5Y8kMgmExmfksJIZLdM6BvsOSZtU1UVWfovTKFQoeuq5XPHGiO17FM1J+raj4BIImmWMKtdXFTCzLIVInBN/YyAA/+sy7QTVRdXH/WI1KJBKhIIsda6pDJBjCOq66c+fMWvL5PJPj04uBlttUcgwjUMPEcHX7zV+xgAUrLuepH9097dpUuhdDhvdTXg5dD2LbNpomPXFKPVOpEq40wAJGHM8W9KZT0gOdyak0NrsM7vQJB+XB9AFIKTUEZJFZp1DEd6ewCqooOw9Yui4xnvmK8VKm8XV9YTA83vcSwYDGlZdedNJnfaf22rMnJvaDEPQA5ZLH118nqMN3bj/KZDZHMBisHP6O4xAwQ9jY2LKgnW3bhAImZplZyvUIhmoYG+7jYx/7CAA/5sd87YtXY40lKZf89Uoxwu2Lede7BMQx7MK3frYejrE7PvjxO0lPgO2MUXCEoAjH8xSVqtE2fGA/qhrEc0KoqlgH2za/TjgsvJLjKXGYxmoCeH6JiaT4bN9QiFZTHFKJhPCqvv+GGxmb7OMIPWzsqUI2zj/vSj5y+w0AfPJjn+ZI9wgFo44dvxQGUSI9ydSBDcxcdS29m4XAWbnmGrasf5rPfvpzAHzv7u9W9sVd3xeQrc99RlAhHm9YATgyWdeQe9z3FdY//dS0axLbxUEcXXEWU6XlxMxGmjuq7JKnLxAQwR9+Uxygd3xFCtEbxa8PfHABnvSs1Um672yyhdyUWKv33bMGgA/fvh6AN1uEIv653+e565ZL+NyPH+auT954Qt/XXHU5AL0Tedpnd/HaHwQt7Mxr1jD21mE+9Z5b6TkkCC0eevrFyuc2/Fzc65G16/nvByeJLBZwJiWZoBQG+2iCMz8hojbOvhF+N9kLX4fnnn4SAFtCTjoWCyM0EIKx7B465l5JQ1SMo60rzG3voLUmSiYr1siCRWdwyerdvPSKiKK865qLcTJTEIN1mwQdbYIEjuOxpGUGXkkcvO7qWaS9MUZGpLwr9vPKjm089MRaXnldEMy8dXA/4LHuwccYl5BUIxDDDJtESmUngfjelsYYB/uFwpBMWxzpO4RRG4V81dhtj3UyIEkF1IDof8Sow5NRqrrmGAcT3fx+7ZMVBqp5K5fg1wXxZfTALMUIxXz0hlrsKfEsBatA0Iqw+JK5HJawmlhYZenCdlolM9jvv/sCN3/ycvarQzTNFtGt9d/bwKobZjCvTkTA0ulJsi/3wLwqqQjAig6DQMccXtkrolq1eom3dm7no+eLOcv1PUPP0CgzZ8/CtIViFIooxGMew2MKvpSLf/auONprOTbLxP+a1hqUlEIxmUMNS3IlPUYu5fDh9/4N7TI5fu7iDl5+fT9FCYOuCRpk+kf4w8N34Uoo/YyzV9O2oIFox1zGTDFW+7a/wf3PPcWKxcKA/9od/4cf/tddrDvSR11MjH/QtiiZCkXfwZQyXfUUFFU7Rp6XSSnUaaRP4v1qGreq6uD7eJJ2vXytoF8Xe1XVC7iWim0F8P1yUdYihhnBtoQcLqYKxBpNwovmMFkQY3VZq09brJPBkQJxqQjH4kVSpcM0mGJczGKa2Y0hhscszEi1n6vPWI5fcihJRt0HHhPOkJlnCdkSC4QY6x9nbGAUXx63Zp2Bpouke0+W1dWjGrbmETTFvKSmpshJwoRwvVgLtm2zZP5Cdm7fw8evF3t5wZxFWE6Y335HEJKUPRLRujiuJ/QLTTfJJqpnvSqJQ9KpLNGQdCxKeaqpKgV5NmzfuBtH0RkfqsKXbrjpszzivMhpFwkHRinpMNmTw7Fz6FLHCqpNXH3FlUS+KqP4vkV9Q4Sje46w8QnxWna8RFPHDMb1MXq7q2VARvtiNLVJiGU+Rayjkf6Jqn7z5jPDhDqCxKNK5VzTFBXFCTA5JBE9mkYwrmM4NcxuPl3MeykNfpRZC3UsRXzfYfLMXVDH2JCEZ09UIwm+JteUqYI7nU7ADCjk8hlcyUhXKPjomo7iFMEJy76La+s7W7Ec8fliTsW1Pc68RMBL3nzpFVAsgsEaUqnqmmqfNwPdMEhLh3cg6NHcOYMxaVypqkooGiMQcInXSdIJx0H1HYpe1fBfsqqByZxLSRq3hgmBeJD+vUmWLRcw6307d6OZGr4r2AABdE2rQHMBPCysko/nmaiSlToQClIqehimjm6IvnuFPIVUobLuAnoNpaJg3nOc6RC4oFyDjuvjONP1VNe2URUXJTgdmu8pJQzpJCkVQVPEIKuS5EaNatS0tjN8DElXOBRnODFKIC6cR6WUcKCetmA+AUncUnQL7D0kKdFHhTPMCCqcPn8huw8dmNaHQ4eqTuVlC2ez88B0uvbj25Ov7DzhtedfeZPLli3m1z/90wwr+J8TWrQcYzCNAGWMVAeCXKncBuRrJzRFUW5XFGWroihbffdEnOipdqqdaqfaqXaqnWqn2ql2qp1qp9r/l9r/mtDC931fURT/na884XP3APcAaMGAn0oLb3MwEiYSi2LnCpWEU1tVMfDIa34Fzuc6DoGAQU5CuAwjQCwWxVVdypX3DEOh/6jL2as1Tl8tLPMtz1sYAfAUWcyuZKEqGih+hdBCeNjKifrid9QM4VjTH9PGIxiLM5GaXlSsuSOM71ThEnZRwZD1OVTDwXJKHJPnS+fM2sq1tQ1yShSPqQmP+k5JXxtwcY/zFgQJnRC1AohjkSsID40WyFPIB6kxa3Bl+Nc3JrCKDmZE9KlUnMBQHZy8Sd4RXrhg7SjP/GqA5Fgf4bD0RnwbfvKL+1hztvAsLelqxtI1dNekvARUFFzfRtGUSnQjEAriWC6qvMZ3C1gFj2eeewLLFvOHlmFqyqK7Z4iphPD2HjncQ1fXTG64XkDWAjo4hSlUZZCwIbwVuqYykZ7CcVVUSXYQVgukUy7zviAiVx+9/V8YGE0Qi8axZBKo7Vm4vocRMKtRRM/FcS3MgIhyKq5PoZCjLl7Ly+urRVOvveb2aeP9ja989oQ54Djejz2/+/oJlxyPVg77GmqpgI5VoS+1jDyuJEeJy0KGVg+MFScIywhbJBLn8IQkaFGE193A5eprV/LAMWV7/+0H95B/v8EtNwgP6if5NFtfXsuMFTdilAsnSwz8QPfRyuciNVHuf/CXfPBmAdMrFEooin9CwjrAhZdfghYSa2j9EyLypUnniSJzwJ78wfe58joRtXlt/TpyuaqHrHfrdm76xvvY9uowcb3+hDErR6y+8x9f4a//9puV132jgeyk8HB94h+Fl/PeH7TREA4xCfipyWn3+eULYh3MNlS4Be765I18+aZ/EO9NxRjX4jjP3s76Z4VX+5xPfItoXEWkmkIxGsPSmll85kUsOV3kxz1ENXL1b/eL/L+6lrOA56mVybqDI1VP65s/E+QQF1y0moHJYR567UWeWS5eO2vRIp7kZabywms8b+4qgkaQUE0ThswDxSvxg7t+jdUSo/uorMEVCZDKaqx78UEAju7czKafPwcb4a0e4cHzrRINHbXs2HWEsCq8/KGgRlOwjgkZ+Ui8JX63tHVgWTInCAtH1tMxVSE7VtXO5JH9m6ltmI5FLykmLTNENCTuGnSGZrFntIdLVoh6Y4cYpaNzLq4h5mXLYfH5WbM6+c5TYl6f3fosP3/8x+hUvaFjI4M4JKlrnSXuc7CHxliAiV2HSMmchIYGg22PCUhwtEOsxYZ4lPHMJJN7qxAUJxzGmRqmeEy9k82P9bHgEzKnxoryF7cEWXegt0K++9H3r6R5kc3hnn0MDoixa55lMKtWZ0LuvdY5BvHWPGopTVoSdaheiNZAjCF/AjUja5qFIlx7AYzLPu3vtYjWqtTVB0imZY5NjUbAyDOeOcSQLPJaO9KM6il0zRHfVziQx1GTKKkCSPr0xJEeEr3DxJd0cetNgtCkkHTxS7vYsvlRAJpaIjz10no+fttH+Y0sFB2vieN7JXSt6gXXNANV8as1FxWRkyzku6TurhS396ZFs3zfP6bOTRkJouI65XvJc0OzcWWkDD9OKpmplE+54Op5JAp5+keKLJorojbXvPsGdhx5A6cQZnabWB9PvdpPfdMigp4Yp85YOys7OtnlpFEiIg9vF32UEha+oTGUmQ7HL+cEX3LFlcxdtJTHqCIkAp6Cqeg4vkJJIiRNI4LtKpRkNAS5N05btpBedUA+HwT0KpESQEO9TUgpoErK+jKIuCFsMJmTsHXXRwuHcPPi3tlyXz0PX45TRuabF4ZShOvleKYT6IqH52Uq35dIrEOfH6RFll4Z2jFJU7yOsK7SskDoHq2zDe7+1mOVz1z5mdUcPLifybEi77lNMDPdx2OMD4pIzCRVmR3w6ihJAohCMQCRNPufn54DVRgsEp0RRpdU7I7l4noWqaHpsO1Yrcvw3t5pr7W992zyiar8LuZ1PFeW26mJMM4oLXPbCdcLmZSYyAM2s89oo3uXiAEEog6UVML14nl1I0x74wxKdh5FnjNtc+YxzGFc1ycSEXOmqUU8O0xjtJpjPTWWpG5RI5ZbHeNCwUbxncq45/MGKpHK+0O9vVzwkVUkJ3KokrCrlHcIhWMoavUMtYoGmZzCxRcLyOjiBQv5z28KIqLvfltGOr8NsxbUkc2W0KSu67oKju0zlSwHKsR8PM39vP9zosi9WZPjyObpUZsHJTR9+QIR1dyyfQt/rB2VxFNz2loIIkm5JHJsODEmr0qw7CKB5Nq5YYiRoWpEqq4hhpUXsiESledIOE7X/DaGgVCTWBua4uKrUPLF2uhcMI+Bg4chaJMpSfIxNQ6MsPr0RRRk+aKAEcFzqsQnS+bOZO+RXhbOm1OJ8uJV5f+5EtbtejYBA17Zsavy3sL2Zg4MjVX+v/jspUSCQc4+ezXbtlVLr7xd+58aV6NluJ+iKG1AuReDQNcx13XK1962eZ5XUbz8ko3tW0x1T99gLjDjsnkY0r5ID0xQtC1q4+KAGz/aSy+DHPupj39dY8lZKormMneJGNSnf1U2UMSENLUFGOwtcGxrbjdxZe5K2UCwfAvDqOIwY/UaBDQKXolAUAyj1+BRSngUpiwMQwh8Dwc9YOOWk4FtWWhNrRpKqqrS1z1d+Su3SVkAjNRLFwAAIABJREFUtr0zhqpPZ2bZvXUn7/vwR3jovulMfc/+5vdcfZswAFw/R7QmRq6QIxIVkBgTj5xdwpf4Vc0L4bkqeS9PncwB2rfrIN5UgquuXsTIkNioWwClOM6G54UCmDujjZrGZiJmiFBAPG84FEfTogRidVhyTgv5DIbuoZTrGCh5dKMOT7HRFKGAKTTR3uQzs305ji+ZkowYze1RhoYltryQhkA9tdF55GQlc9MNY9bkyGcmCEpZ5pkWsYYAB/rFmOatfKX+Q6XYou+TzWaJx+OYpoQ5Og6up6Ephvw/i6KZBA2dsUmh/N1z95eJaJBNjPHJr4isqvd85us89/gLNHdK0ovkGCvmNHP28jr++19FsuXVt74Xzcnz0svbmLPoLDEujo6laASk0WwYPq4WoKiMoEulVynUYehSYKel06A0xoWzm9i2TwjKaGeclroGtiPqfgC88NKLbNpTCzfDv35TYP0HSmNMJNv593tFfsGCn9zMwf0PEnCGSYwJJTAScLjs3dfwwngV3rf+8QdZz4OV/zVNwXFcbNvmi5//0rS157ouqjkdQ60GJIytWD1E8pLVyDRNchT5wlcFScOd/3InjjVFNFAiFqnl+HbH3wlmuj0u3PPwY9x+o4A4Ws4czNo6oIq5vu3Tr8Cnxd+3/u2uafeJ5YXy07WwqmC9XhSHh5kexSnUEDv3m2Q2fUW89+IoNYYFkrhu9MEnUJuu5NCeHOdetoDj24sTIqerOXYa8DyD+4Rkuuqvvk1ofA+PPvDryrWvbniDW6+9jttv+TuC7WKdHu3dy8+872LNF4fJrx+9n4ODh9n25os0Nso8rMbZNDY3kHozQ3OHZIRsauBJX6FdMtAtWtRC81JZf+kpMTazzulg5OAos9ui9HSLfIPBbUlq2mppmFcmfBB7bu+BPahaGTMfIJUR11tS6fvD7l14PtTK/L/yUTB6oJcmSSIwu24xWbcPz5rEY4687mXCcy1SslbUyy+/yeprz+aNtdvo00Re2rzTa4iHJ3j4xzu56VNCQdjXtZ18oIQv4XaGmqFn6zBdZ3aiS5mazVcVv3BcjOehLeOcc+OZvP5wNbPzza37UbIJWue0Vl4775pl7DPFkVVT187BYCM5u4gAaEDaShBzMoRSSRqkUqbE5rBl73bGZEHthn6HXEph0eJWJiV8Mh5sxbMsrCzMWi4dhFaefF7njr8SfXztlSEOHA3S70WJzBKycuToCJGwianoBGvE92ULDmmrxNMvij4FwwFOu3YV7152PoYtXDYTmQTZiR7WPvcm//g94YA779yzaJ57JoFm8bnnXtrNg5cf4n0f+Dj3/UasR88sYDo6paJCOctKMVR8z0PVyg7HKpnFiSQXLpXid1L+B4NCHvQcLruTqpCxeKNONBohlUpTKggDwXF88DwyEvL0LMJxMO/GGRQmBVToG//2KhlUfvKf/1G5F/8EF277AK88WTWKNrCNL3zsL8l5VTWkvamNZ7e9wpHJKpHJyksuYKJlSL7fjhGfXjOnqb2J7kPdxOMxkPI6ny6hhwL4elkvEHvCxUCVBW2jwRiWPd0pu2nrEa679Br8yjiIfVnSDDK2uEedoeN5VR2hrlFApGPxOhSZGtHW1sGcuQWOHhlhxRniTAkHagkaOvc/+QwAN119CTMvifPMK9uYkEWvT5s3gxdf28aCz67CcsQ+33D4mWl9TIyM0RwLMnTQ5b5vV42uhRd20btzlGK62re9G95AlbqrGm/DmRzGqDWxp2S9s7iBk7IZ75ukqU2oh6rm43ke7TL/cGhQrMnMVJr2VrHOh0bE+Z2dzBM2ygCoI/QdsamrFfJmUs5hvK6WVEZc73o68do42VSV/OvVx7Zy9QdvZtUaUfzQMOMYeIxMHaowVy5e1cEwh6mJNlG0xGcd18B1i4QDVUMJoFBUiTeJ1zJkSCaTBFvqMGWNp1waAuHpsLqa+jCHD/WSywtdorWlGd1XyBeq+WQBq4sv/8OXuGiNmM+L58ypvHfZu64H4IUnHwdfxdQDFOVndUNnasyhsVkahQENw6xl4MgIv7vrt9P6cd6HFxD2RQrD878RzsIdB4UuseqM5WyWdSovkHnQr26fDpM7OjzKzFZZIF6e47NntNPdJ/bPzg0nJxdJJjI0z51FkSTxVlnfsMdGOyTOgMK4kJUFmXl2+mUCprv7BeEsfWtXzwn3nMj6HOk+OWHV3iPizD1w+OhJ39+0e9tJXwemGVYAxVKKgB4jkTqRHfGPtf+pcfU48FHgW/L3Y8e8/hlFUR5AEFmk3jHfCsEi5EtKx1yugJUUC3vF1aLw10Q+Q8+G17ELRfISV5/P5dA0jfGjYgDbliwipJmcc26M394jvJY//7rLHf9mcmS3W8mBOuvCANtfqQ7Q+HCJ1s4gIwPVjVD1ykHFM+d7aMdSX3oKgYBGwSqiOpI8wqmjRIJMJk+8QXhRRCFFF00r09MCil1lVAL6uiepb5LJwzLRb2piekTA8Wxy9on5WMcbVuX24uNPyT6VsHwDOwhIb0/ACKEEdAwpERtjMeK1DfgRg+GkWMjDuw+x5pJzaF+6jK7TxHVb2EzP7t10dAqM+8ioRSbdQ1AL4ZcL7wJoNbR2LKWxTWwO31OwVfCVsiDQcL00qAqqIg8mN49SDGNoNuVlqeqTDPRNosmq7mYgiucXGM6mQSsXqkyiO5BKF3Fkcrxd0mjQ43iSNnsqm0HTNAqFUqUwpaIolBx7GuNdoVCQ+QCSxETT8dFxHL+isA2PDrN84SwC8ZmVsX70+1+H7wvazHLbIH/K7ZlfP1L5ew/PnXTO3qmN7H628vex2Nspqvj3eJ2Y44LfQLAkniMljdWNL7/IWxv7aGoQB3W0xeCMc64m5xRBJq+ffe5iHv3tH7ju+g/z1OP3AXDZ5e/m1ts/xMduEcV5FdVD1yU72HFNVXU0uWeuve2TrL33xxUiCxyxpi+77Q5eeGo6w8+d/1KtyO6kxskWinSPnyg0rYI4hPVgkO/d+SOavyyiRpmOATxVeATfe4PwUgd9lU37gvQcOsLtHxR09/fcLxS1q1aKa7bkqvP42i6xPs85p5NlM2fjZApU0ly7/5tzb/4Czo3CWnvh4R/gjT/Ht//IXCopqeTuEUVkjcUid2t/WzvzF7YBv6b23FkATG3qYcLNo3OYVskSuHdokL7SGjY/JfIIDw2uZbCQ5jcP/4aCKryBpzfMo7a9g1Syn5KMVIcjcc5ZuYx7fiDGPGMVufG9d/DS1VvY+ow4THpeF4rmsfxMDWfUU6BAKB3n2Hb7h05SZBrYs6GabDzjrAbeeGI682D6iMXc98rcFy/CoVEHRWvk4GCV7GPt/TvIeUIhalJjfOCaJbzBNv7rLx/m+DbYLXOsgq2kRg8QmiEM7+Q+MWfR2iBF6dUseCWQZaDH9la9y8caVgsuaSA51kNdWweeTDA/8/LFbHz+WEWiSjgSnCOUwsjqEFbUxm+aRUqyyaV79tKdzaAdEuuvdUYASx1nqjQDIyiUR9uZJKi2MiMQpEE642xjJuP9R0WOCLBqVT3XrdF46lWbl3aIw709GiFZVMAslXV6VN+mKaxz4KDwMisKhGNtTLUtZOUKQTSwalYAPzPIh264hC3bxRoadVO0zj6DRFIoRm9kd/DxT3+G81fOJdYojBYjX8BVQTd1DBlVcmx3Wk7ysdHqY6NZruuiKH6VwEJR0HWDwd4q615DiwG+QWJM5l6gUyqVUBWTgCwo7SSz/OrBn3P//eIMUyJR1v76Fxx++BWWf0qcKUNDRQ50d9O6JMTI3qpz9JUnH+DaK0Vkfu1z4vPjyUk0r6ocD+eHMUI2M9qFAXWAAgtPW8Ltn7gDgMHeHoaGhrjjP7/LDz8vUAlHpCMrSXU9RRuCOLqB601Pa9i/c8+0/yeZjmwBSBdGeWXTdCLlkb6qqpQ87vqU3N/ZYh5P2mSqAa0NbRxlhGfXn5yWOh3ReHW4B9/U6ZojnDIv/kHIgfqOeWxcJwhXcvnpOeDb/nBiXsq8KzroWhLDIks303WR8hB4knXOnrJoahWOGq/OQJllMtEzSrhe5tHlPQKhhhNo3mctPg0vLm42a8kMevbu4OBgkYb26hh3tLVjSuM1XtPKm2yiVPRx5b2iNbVkMylcNzDt3nqwmdSUMGzcYpK6GpepUoqSNMImErKUy6E9NM0U+10xhTE9ONY/7V6a6aMHjtHR8kWCi4OkpAM0EvXwj8u6sbwEruvT2SkIiRRXI2NlcKk6I5tmNPLq1j6+dttNldf+6cf38o+fvI3BoaqDsOdQgs6ZDRiSqbLMbqca4n/bMXC1PFd+9GM898tfVMf3imYmFJjRNp3G/ezFQqcrupNcfdUynnl2J8XU9LmZ2y5k2ZGhEUoS7VM2+suFw9+pzVyuMwZkdGFA1XSpHN0icsLrZ4i93dge5eDrO9n9gjj/F6xs5+CWkxtsR7oPnPT1P6nVimdWsPGn3p4p+I09/UA/nYtO+5Nv/6dQsd+PIK9oVBRlAPhHhFH1e0VRPgH0ArfIy9ciaNgPIyjwPv6ndEJRFDTp3cpLppGmpYuYqBWLvgw6HN64i/q5Inzpuxbx5kbyUgzlPIe2xjYUkvyFyH3nJ/8kGMoaWhWKebHQQ/HpQiRSA0pco6ktwLisX+H7Ppok+bes8vUK+jHE/7oRwLF9XFuvhHUdvzpBFSiEBo7rVWCAqqoKEaAeR2+qaKCWmByZblSVm225tNS1cJTRk75/fLMmhAJ1siVfNiPLR9Ifq9jzHI8j7OVqW3LaXBbMF96Ud737WiZGxzBUH08angXbwnVdEpPjRILiGQ09SsmyUU2Z9FoCUzNwvCLokt4bE1VT8JwSmhwbz9aJREHTxMFo2TYKMXS1hCLrXPi2jqe5BIONmGWHqacylvfYsb8HgEyihGJaaFoY1y0buQqK5+L7LqqEOOmqhuKAros5V3UTfJOSZ1dYKbu7xzFUhXwmzZdeFTC5ji6Do0eL/OwXwseQ8TRmd4W46dbbeGGzEBxOOkfQr0Vz36KpQUJSHYWgDr4kGnHsWjKZLNF4kE2vC0GaShp4KCxZPpOBEeFIaOw8kxVLz2fvtlcBWLo4Qk1Ex/NNDpYkq1HGZUIeyP/+94IlqkxsOXiMMXZ8e1RyNpYNK4BobQjdrTokXNfG9xXwjBM+r/getqRZrTc0bvn05/EDYo/aktjihXt/yOXXC5Gxc+sGxoemKx8lHRzSqOlWjm+GKvrxzMPPoqHTqIsDfPueXSw/W9B8P/KYkCEdMxsIhoTCe//66cJzYUSM+d3f/y78PbRc8GmineIAv+GWVbiFFDqRinF159pHMD9Sg+6Lw/m6Z99FfsrlvNkBHv2xiOp992f3ADBr+S2kg5LEIxylAERdoawkNn+RiW5B5KJGy8/Xw9PPvsTNd1zNmwNCqW6uqWPHzsfY+7IwPuZfdh5K8nUCMY1aRXhxP/Wpm9i09wkyjk9zhzDKHB8627toaBB75nDvJP0jA/hFmHGmiCTlixnMSIjEiEVEOhY01SESrmdSQp9nLJlLYzDE33/ta6zb+DIAhw4MEAgY1DQ2sXGzcBYYoSh+jcvcq4S3uVTM0l5bx44393G0VkQH93EEP11A8XVSgXJZB1BtlYCMViw4+wzGSo3c8vGrWbRIGAhvbN3C3kN7GZ7oYbBRrOv21jZKJZftOwXsMj43Trw2TLQzTCorWT5LBtrsZjzXISDZXSd6Rmmc34IREeOS9gucef4ZPHfvyZVSgPjsGHXhJjLZSSKtYlySepz7dgdIJhxqGsW9xo8cZUFjC/tePSqfV7QhqkyFq66aTf/RMZoCNbzcL8bqdz99+YTv/Nu/XMA550d5eKuY9+RomrqGGIYexJMS23cU/KJJXIbqHc3GslP86Ic/5KeuiFLrZiO2kueTt3+A81eJRPiLW2dTckO0tYkz9HZzJUNHE6Ry/XzpVbFnR/MmuuFgKD6OLeZG010UVHzKMEGtwrBahgC6ThkOGMD1ZQmOYB2jg1W4aENzGF3XKRarxlZyokjTzBiZiQw33ChYDB/jITZsHScp0QnxdJSPf+qL/PzufycozwK3sYSXnOTSFWv47TF162763HsZsYQifNF5Z7Nh4zZSURM3WYUnRbIW7TX1ZIPCkXCAUQoTaV58SsxH3klhFR1SB/vh89W5+dy37+RX99xJbY1cU4FJiqpCUJIDhJtipMbFGmw6UzhzCmMpoqpCZ0cTW1/fUbmXS4ALVqwA4NWtYg3WtdSTmZJpDopGoVh1+GYK4u8iHoqkyPbwMLwAq85aiaeIucnlU+iAK+dlLD6MkdOZHEwzZYiz1pylUhOPs6FhI5OScbBYsJh5UQfNDcJo6VrQQKaQZ913dzH7QqFAx4MBJg5MMqs5wtzbhDH+/L0HuPaTHaCKsXxjXYmAZzJ0dB/jI8fzDIItWTBbu4LkR3SSw9OV/FkLDWxbyIi+8RHql9bQ0OhQE6lqKb4eplNGt2wJA0uWkjRKFsd8agrHLhI8Do6ZLwwTCAm5PJEeYGxsilDEJxARfYrXmkTePZu3nuhmvHe6MbX1uDOzpsFGkYbiyg/PYst9PXTve3uihLFkgrHhCZpbZR1Py6ZolYgaVcKcxOQYHKg6eFbNXMWGGY/CJyEpo/wzu2L09mdEnS5pwJm6QUOLx9hoWV8Vc/0cv6jc67RrWkmPZBh2LXKJ6eyYdXPFWTQ+VSDdNcXcKxvYemQ62uPIMed0ICrOjQMHTh4VOrY1LxF7dmxvji0PiXEMSvbOcJNbKbIx2SeMqeMxXH/MsPrftqDUaaKxeiZ4e+Oq3BQ1+s4XyfansAV+8I+8ddlJrvWpAHFOtVPtVDvVTrVT7VQ71U61U+1UO9X+/9P+14QW/080RddwjomgRlpa8WscSmnhlVBVFaWpHn98kmJOeMGDRoBSycGQocr00GG2c5jjKxbH4yax+gLjA8I7cdoyhdeO/W5FoVAoEI0GaemsFEHAtWUYRNKVq7oPxyXqep6H79oihwpwjgtxl6/Tdb0SySp/TkGltVPi6gfyjI8VTvjstPtYGk2tAd7ZTyDaez8qcq6MgIluRFB0A2QIOWIIqFs0JKzwUjHLjp1bKOZTnLtaeI3j8Toee3Qty5afRf+Q8BwsXrKUZVevJCMTsp9ef4T6+noIKKjSo1gTa0TVbdrmLakQcBRtUEKxSrJ1TW0I1ylSY6h40vPilhBUnkqJkieu0zSVQMCgJDOII0Efz/bRTRvXLZNQxDBMSKXHMXTRB58SHe1dtMwU4fdAncJd3/oBnufhyCJ7hibmzHXdCrTFNINYTglXYuR1zyUaCaMoAfrHRPi5qf1WrrvxJnr7BvFlVoJTTLDgjHoue7eAo0w5UzSHw/z6gVdZuVjAC1Ys7SKXnWJe+xrwxVznixaGAhK9h6/n8WwdwzB473XCd7F310EMQ+PM5UvJZcX3bdq2jZrQBMvedz4ArZ21ZFMFgkaAc1aeAUChMEU6keTv2VtZE5/78/dTKJVIJoWXxvV8Hl67nmuuOhtLrvcXXhRe1HMvWE5Rek5rlvjYx3hRBTGJXolEHduKXoEYwovpmi5OcYpcv/BNaZHmynVOSawpXTG5+ppreebpap2JA1u7SaZz/HzdLwGYPbeL7iPCk7hrp5iHdMpmcrSPcy8SpCUeXWRT0z2liYk0HR3Cm5kZnu79+s53RJLw2W0qbwCjr/6AfNe5APziXgU17GKWPLhVXP+jH9yH6Trsfaaaf3DmVR9n6PQ61q3fN+3eGUVHU8R46TkRYbl4jcCvt7sGv39qJzkgXap6bVdedAabR7eTD4oIVMIapbb+DD7zfREN+8aPtmEYaZzMPlJZ4el84Nle0hON6KrGT+75GQCheAOeaVErI1mb1q3lFq6jUjeg2ksA3g5B3ge8j/fDe9/mopNE0gckNMiSdOnVZlOkKudG91a9w+t4nnWV1P5nOL71cWDa73IrUSBFij5Ojj7PHPP3xKHpfR2RnzEkaVBzrIZwxODQVvF6qjtD5PQQ0WaDplohk946eoBcRqexsZ2JhBi9GXPms/GhVyv3bVnWScG2UMkztVesyc3PdrPg0mZohH391b6uun4uH7pQ1vjSpvjcj2Qk7SvVfkaUAgMTPmZM9DNSA04+gFcST6crAVTFpr6pgYAiokKWm8JxVO7+/q+4W55LkXA9jqawZJmIZK0+dwXts09j1eoVzFgq0Ai9A29SH66nUCrhlPOgNRPdcypy0vf940gtQNUUfE9ABgMhWT8xK5798usEicl2vxvXFbWuYvXiPMxMutglFVPTGeqv5jn89NtfPH4qxT2nJIQrO4mXrmWof3pey0TvMHZB+L5r6xs584pLGa31cUpVz/SUbjOUsEkkqxnaQ0ObOdoj7j3adzwoT7S7vvQF+NKJkL3icb8Bxt+syoQsMHJc6vnGTSfWX0uOVn32DtNl62sbxfp6feM7VYP809oESSaOe5LsMTnrW6jCfJuaBXwym7YpqlPU1nYx2S3W3qIrZuDMb0KVsPmVZ2o88+A+rnr/6UQ0IafSgxNs2jlCbipNMCL20fhUDiPqUjouL0mNKpQrv9SpLq1GO8l0kpJahfgl7SP0p2TZhbSkEHddHEkCQwGwfXLuMfL1iquwawdJpcRZZGVtDF+nuTZORhJ9paxRLL/E7KsaUCXNulJQsR0T1xO60sC+/YQaWoicFyCtiO+bTBU4/foOkn1ZJnvE8zXMjNDe5fDGk+LcqVkaZ6oDouEAGVkoHl8loGmVGq8g+m1kq4DtCbVAsihmxVPl8/g+M+c14WgFrMoZ4pEYtWlvElG/gg9tsxvRg6ex6xWRTuAbLlpNM5HCFO5xkMWxsEQepYvoSYcjzyVY9j5JTPGQ1P8WiX28b/9+enuELF1xg4jQpsaSHNp0Ivx18fK5BNuEPnDaB5t56X4p20tCVyumfBrntDJx9MTP/k/aowe7eY8k5yi38y5ZU0nD2fzmdmwpQ2oaRGrA2DGlat6pJRN/ej//X2Fc4Xrox8iSSCRCQ2szA7IKuK9AfXsTDRcuYiIuDuTMWAIzoGDLUGV8UTv2mM/qlTH02h4A1v3O4vWXSsTiBrmkGNzJIQ/uqILlDEOnWLIJBNwK45HjOBh6GLAJRYQBpKpUlE0QSrnr+hiGUYFIhIJBijjUNYUwZBK/54kE3zK0sVxsUVVUBgaEgGhoMlE1i/ERaJMHve9pjAxVFZHll3ex/uHpeQ1v17SQYAbTVRXf1nBzSqXCdrqk4GHSJ2/v+g6tjRE8xWD3W4LxLJe16ZoVp2+gaoru2naIF57+GVGZ3GkVbZHLZPuYpgxtaxqeq5JO5QjL4piaaeArKmFDFlJ2VVTdRNN9PFsYfO1tMwmEIgTDAfyArBuSToDiEgkJbGzA8PEdg2AwXMl/UzUIB+tQDAdNGky6rqJpR0knxKCfdc5SFixYwKHDRyvkFa7rouumOOzLuQT4WK5TqeuleFGK+TTRSD16SHxuqmizrydN36RfuVc8NI+C61GW5Wq0FTVcS157k9kzhYBafckNlJwSqh0hlZXECWoY3/KImqLf0WCIZDKFbni0SEa4My6NoqoqyWSSNkOM+6p3f4RCAQpFaUzoNqpioBsRdEncYRg60UCYDwz/F3PbhIFx109/d8I6mSgkCD5q40uDNoYY602vViEsb7KfXx9DaPH5v/4qAP9x150c37asf4PLbxTBbheNx3/yY85cJQz2WKRat2q0KOv4BILsOTJduL2yYTpcKn+M02LJQlEcd+MLgrEnHBXrbv/uw5y+4Fw2U2U7KuZsjhwH57j3e1/mtr/6Fofemg6LAMj0C4VnDycqPoefeuiE19589ucCRHncMNiTR1nYIgT3VFrAkR69R+QRfeGLX6Iu3s8E0K4Jha8PmDvvDC6/6GLWviGgGPXxmVx/7hVceo7Ik/jFP/8XTfURli1YU1l3g70JHv/Di8w4s4N8RsjK8dEBlq9czQVniYTlTazlrFWXE22OM6dB7LUN2zdQ19aKGXNJyfo4mhrFUy0Kshh4VLNI94X4uy9+m737hcI/fGQAQwsyOJ4iKOXiZddeypmzTmOmJHMJ1cewPQPLUJjMCUiYroJpeLiWQV2jYN2aHQnwN3feycEtYn40r8jpS1cxWspSKMh6SAENV81j5zU0TfQrHHDRVYOYXK9GKMieAzsxNAe/KOTpxOgEo4kRBkZ7MWTNqskDg5x+xRImc0KxiSouB177v+y9d7QlZZX//alcJ9/cfUN33w50bmiiBMlZUQTMCRM4ZtRxHCOKYRydURwVw4hZRAUZEBGUnGly5xxvjidXrnr/eJ5zbl8anHl/76z1c72r91ou6XNP1al68t77u79fcdBSFbEfFNKtFEdn4GMAY6OTZLpaOXaOYEgtBpvIew6j+/dQLUvGq5ViM2/rl0XeRg0DhTBUyK8SY768aZLqmMGQXiWsz2y7e/YP8ocnxLxOmTXOfW0/hmNzx5+EU371O5ZgvSli3Z4Ku58SfbV1b0DS6pDLiDmkOyZqkhATUZbvYmomtqVhdSn4ks3ONmJcp8SmJ8UBZ/1Td2Ok+znllFVNOJGZsggSH1U3sOS6WHNcFHUmsBhFEZqmzarDUhSFGLGHNj6T/BcUp8UhVNMhjmKSBGJV7AUdeQ2zNaYUhZxxtoDLPslDXPP1T7JdOhtteY3RwSK//dGN6Jq4rrMbbEXFzs6GlWFZeO1ivZvQy9QrY7S1+wz4M+vAFneaUr2CkZqBY5m9WVZ3iTE9yjTzjl9CSybDhvsFRGv+y5YyXa8zv7uPsf3iDBLbMVlbJ0Gs344XoKFhqypVSTSQymXAgxYtw4anRS3WgqWLKdfqdM4Va8T2p5+hd+Fi7EyaQDoWZmLhelUGtm2d9XpHn3QavTJwMr+3B2tuB6EXC5g2kM5mKFUdyjLSpJwVAAAgAElEQVQQFwQBbr1MSk0IGhUHBqTMHLpaZ948sc88+OCdlKujDMta0ZRu4dYSxtYXmZoU4y6VyjC3ZRnrHx1gwUrhEBTyCq2ZVoxI9NWvfibqFJeu7aS6RzgkC09pJdOX4laex5csdmuPXYkSO4RiOecuhPj1ts3bMFJizgR+iKLWUA2TsQMzYZL2bp1Yks7oGTHex3ePc+IrXgbAngND9PZk6cy2c58M8FhZG8eNmJKBxZTVhpGPKHsOFSmoGwYKI0+L9+g9RdR0agWNrCkg1A1TFI3RXQ6ahGfufU6KCB/VQ26ueM6uhRlqlZk5M2duG3EY0dk6F8evyL5x8GMdOzVTc1XoO4bl55/DM4hauLpW5syXv4EdPN8UQI78CFNRiFGabJtjw2L+1qUWla1FTE/VOPbEDhrgPmdAwatPE/g+1crssNqezQLS2LOwj6dvFvC8qjO7RMVqOVQy6albRRBh1RmLD/kbwJx5vWTS4kxRrScsW3sW2567F1+yUldGJynuns2bvOiEBexet++QewF0LBfXTWwVa9oZl36E+//w7ebfX+hYATx63/0veq9Inp+yrb209OYY2Lj1Rb8H0N27Gt0McIz4EFHtl7K/C+cqjuMmxTpApquVklelo0tMstD1iP2QUrWI21C3ntPL4NYZZ0NNLNpbNI5c00PUJgbHX4ElK00UJWavxCxnZtdrY6dMEiLqNZ9MRiwGqmIRSrpTQ+KpwzBuRu4AKtMeuVZLHtJFMzZJMJRYOlUAMYbRkOoDElH4G4YzA3Vy3EeeOdBNSQwhIyWLjxQL/tHv6GDB1e38/IuPN697w2dOwDDhV1fPiKQBvP+bFxItls+vWKiKgqGpzeyZbtiC0MEVA9SyDFTAdx1yeXGY03UNx3HRdR1fiuym0kKEt16XxciyObTIIpC1TJVaCcOw6e9dM4PH93M4VRNVEREDN9CI0InigLnt4kCy/vmtPPLoEwxPTrKsW7Tn6qOWks67VGoiWp0QoEQmxIpkpRJU+YqhU6nWm/0QxyEJEYlkfHz+2VZqTpUg8DCMBhNgSCqVolQqkZF1GKqqoqo6vvSSBENzTBB4pFMSU/7U/Wzdux6vVAZEH+mJAkrYpF6ddBwKmsWy45fxwAPCCbj9938lm2+hNj2J58oapNBA90PQxdis+xqVooNt2ySyWq5rbgHXrZPEOrmsWKT0gkPGbKUyLeULTBUlgSDWiOWJJlfIoSgJQRBzriFqGeIwIvQNgmCmXuLi8D0kkQ0y03ISr0JVLHQlTaNyet6CNCedvJxiQyDRsghCDy8p8a///hUAPvnxzzTH391/mBHae+2b34q5WPxe1ZkJTmx54NB6kxfal74s7v25XTP3/vF1IkNz4svOpjAvT1kRm+XcJV10zesH4NNfFJR+C3vmc+21n6Wjr4sH7hLO1Ls+dKg6+/+2lfc9fpCLN9v+7ev/Cl8X/73/PrF+fe1b32Tpq/rJZ0yG5J73srUrsclx3+0i6uhPP89mTD50xqeYHhfj+rg1Cgs6Le54ajNHrhHOVDY3h3sfuY8tW2Yi54k6Sm/PURyxWszt54efxWWcseGARBERYdMIMXWDVsnQeNYJZ7D6bSfTf8RqTr9A0OZXy5Ps3z/Arh3DVKbF4WZi606+fNM6WvoEu9Wc/gWEaowTpCm0iMPj9z/64kXAmx/fh2mKsWFnW3hmw2P8+aZfHvK9177lH1AkTWxoWZRKLtfd+NNZ3/nEl77BNz73iVmfveNTn+a+P/8OEDj+ieEKiS/arpidWX8VGWEPohKLF7QyOU+MU++AQ2drK8PTAc+tFxuvQx09qeJ6dWJdRImHxkSwpN4QZHbraJpC3spTc2YOMUMbB5l3/FzS1gwRTCE7h1qjnsYPKU9EXP6Gs7kD8XulwkIKHVNMrq/whktFoGbcr/PYRpsnn5Biy+0xWipP7BdRZBG/H8bEQUKsBMizP7XAwzKy5OS/PSXCqu/h4dv2oclsk2JBKEXfY0mgVEilcQKvub810BeaFC5tfAZCzLThVIWKWD98r3FIE06AaWpUpBhwKm9SK0csXr2Ab8j5DjBaH6SnRxIieB65VtFGN/3s0ADRwVYPJyl0iP6MQpdFi1ZSnnaY0y2y5tsYJ/IClh3Ry+DYTIbggd/Mri8J4ykGR2eCm7lsmv1PbGdTs0JE2MECBD0nLGJoncCWLDxRjPlUqo1cewsL5vSxAeFcdc5dyIJMDkvuO9t5hpVrRDZRkwFJ35umWncZYPaBb8UJJ6GpYtztGBlmassAYRjjy708TnxMU0dv1BGToKsRjmagyD1F1yxUQ6Ne89i5Q2SlFi44id/f8BMsSZKYbtWYOCDebuf90uE4IWRoe5GWtE48Ie71xM37mD6vThLP1NEBPL9hAnyxP+0cgjnLxP7ZlhFOWVgKSGyHofHZ1y05Ym4zuJJOtTM1XiOizonniPb5HX/Fr/mkNNGfvjvIghM72ff4OBOemIfti0OUKMV9Nz9D7/FiDIV9JeJEpVwUYzLXa7LjrzspHJ+hvVMc2OOpmeNwJGu63bqDX55kdONMVlE3NfJdOkEwm8o/MR3sWJxnBg9MMvL0TLbUqU+wuG8uk6UqimQ1zWa7cNwqU9Mz2ZDS4Hbuv34jSHWXkZ37+I3cMEZ2i+vmz0+xe8cYvfPSaNqMAwdQHD64PV02nDyzz+55Zoz+o9sZ3jr7uQEqG8U6te0gtl137wsIOZh93dmXnco9NwuB+TCanYEEWNk3n9H2MRbMF33vRXXcQLSjL5fFomQQbV/WzuQ2Md6OWn4Mu3lx58r3ZjNzHuxYvePNi7j1Vpfp2myUyvGnHo8l+/Phh2YQBr1tYgwVK9NYUeuL/l7D3v/B8/n1L2+lWDwUrfNS9n8qInzYDtthO2yH7bAdtsN22A7bYTtsh+0g+7vIXClKQqFFRGOqwJ7Hn6LvlLXEEqZXd32K49NEI+N0S9xnJpWedQ9VtWmZk8LVBmk7CMLauygmxqe1U0SzrFRl1nWBH5HNFHAcj5QtvqNpGuNSFM3zhbeuSLrMfJt4zvKU28yUNP5/erxGS6eJpqvQYFZSVTRVZWCvYLvp68+TJCocRMUOYNs24DZ/58BekXz8h0+eB8BkOaRdnx2l6OpoR9dSwOzMlR3EhJLSXdNVSHTiMCRwRbggjkOiSDAeAlSqIZohWAyLsvZL0QwSz5ulZaLqGr4fNjNghmFgGAax5+DJbI+d0SmWIQ672LVXRB+q1SKJoqLIeifN0NF16GrPs32rgITd9OvfgxZipOCJQdFWO/dNc8VVZ5DtbfS1im4YRGEo3xtUHTy/TCFsJyUpjqMoQE104kj0XUf7PJ56fLxZ/wZCCDcIPGzbPkgsU0NXgEjWbqU8Ik/DVDVcRzz7+eedzPxlKpVysQkHSUIVy9SJJaSqEru0mS08tbHMol5xr4UvTzM5VSRjZokDEckpFFrIqhDItjP0BCuVolypNalOexb0UK6XcYKwGaUyDItEjTAMEcnWTYsoSlBiFS8Qbfext8+O7P9/tV/99195Ubvphv/TK+Fzn/3MS/7t8SfuOeSzLRIA8dWrP9b87NrkF/R19vKWX4qoXy0uocYxqir6pS1nYega9ajChKy5iHSder3I+OABDAm9Ghkfw6mHHBgS0TU/CZgeHOaSMxZx3PIjANB6e7j55sf4wj9d3hT17W+fy2/u+AtWTvT5G191CmGlzt0bhvina/4dgAXfcyhXH6QjewTegJgzW2OT9v6lDE+ICP9r330lv7nzBoLgWVJyfSuPxPTOa2XyL6OkCyICeOcf/8Ta4/pY1COy/rcDl7/zZH7xX39ALwjYTLatA910CYbGMSXbYhg4+E6KeR2C2j6dPRq7sw9Hz7Jxt4DWVOsVRqaz+HNXkbQIPM/ytTH1pbuaOnCT1U1Eno8S5bnhi+c0++G8N3yE5557hrFtItJ5+YevYtpy8UI5H2tT3H3TLznzwrfgSjiKqmk8cs+vuOnXP+CsCz4k+qsti5cMcM4b3w7A3TcKKYpvfO4TvPsjQrn7+m9fC8Dw5iFecYGgNP4+XyP2IgxLamGNz8B8Ek9EXc1UK31LW9gooYEeDsPbJuhYNZ+JKdEvmpKntb2FlJUmkax3ntT8MuS6aCltRLFLxQ3Jt4t+r0kwied5eFKLatGJS+jo7uDAgIi4p/M1/DjFP3/ke3Qs6Qdg2WfOZ+jAjei1EC8t9sOjju3D6GyjvSDrCNYNMjbokLbVpnZSJqOhxQqKZhAb4nthGJD4QRNCphoqYcogbWkkisx8RBD5CqoKDaUFHwXDMGb2gYPggA2LogBVVdF1jSCQtNAydLv+aVFjNn9RlnLJJfQjspbY5yYGHNI5i1dddDbf4ifN+/mhS31Q1k7levjxvwrh48uuFJDjbTvvwcp28PRtMzWlAFqokRTFu3iRy0AwTq0ek0nPsFTm7ISxfUU6ukWa5pQ35nnkxtnQ5Lkd7Tx71w4WnSBq0Tate472VV14XoCZEtdNbdhHx4p5KKbc1w6S9wt9sTcMTIxDPM4Td/yx+beNu3cRhBEc1IZ/ve1GaO3AkJItYU3l5aee2Pz7OZdcSc2tscndRmubmLNT0wfwiiGLlq6gWJNnAy2NkiRoqmSkjUJiA9J+gqE1WGqnSWKLtvZWQimBk0TtXHrp67n9FsECqhcUzj/3DP4s4WkAg+vE3iJmuqzXWgDV3oCWtgYkSHz+4A2zM4ENW3+nWBfXAytPW8DmB2dnKB747W7e+gnx3lOTJTIpC9vMMzW1t/kdM86gyFojO04RSyjsznv28kKLJStd5Gu0tUBWIoN2PiRgolbVIklLWK5t0dCzG3nkxVmZs7kOkkzAjq3DBLOTbgw9Oc0LK/JSC8S9u4/pwa15VOsesnqBUm0fGSONkZ7JmgwPboTgb9ff79/v0L+kjVAJCOO/nUnZ9+jsTOveZydf4puH2uDm2RmgzY/PrhtsZK0Atj10qJzt5oH9fPRT3+DUU0Qd+PDUCB94u2BYjrzZmS7tINr8W35xCy9lhixbOJRTEH52w4szEjz50IvjSJYfKeZ2ys6RTefY8SLlAA373Kf+HT4Fre1LXvI7LzTlhYvk/w1L5exk5UliU9+ybTfOvhenRWxduBjbliQQg3tJypUX/d6L2Ts/Ixatn37lUKrz1g4T0zSJmgWQCokCkyMOXVILIwxDwjBsitEWJw5NrYIQRbRtu4mF1TWt6VgBzF/UgqIouPUao8N/WxvgsvcupSrz9J4bY+s2d1736H/7ru/42gVN+JumRChRgG2kIRIbjGoKLHajDixOInQtIgwDdElfqmFQT3yBrZffU1WdOIyabSD+W8UNJ9ClxlA2Z7F3R5mH/1iipakBG6MoGposZo1CsRHkcxm2bxKQP6cakLIsNFMllinjmlPlyo+fRGSKxSaMDIIoxNB0Ak/GBXRXQBwd0HXRf7qu0pE/gppUtU/na9z80w1MjE+RtsTGJDStdEqlUrOGxbZt9ESjLItzFduHSMdQDcrT4rO3vPtEFq2JKJbTJPL30loHvlduUrh7CvTke1j3VIp8i3iGFStDKl6EqWpN2lo3dFAjH9MUB2EjdDBMQfERuLJuQbWJgxjPrzYPhr7XQqA4BNKZCxUf9BDTSCN9ar5w5c/44g8vRdOzRJFob9etC/iibLokSdBUG8WsNevxUpaBrtm4btDsJ9O0cV2XdE7qQsQGn7n8e4cOvMN22P5f2OpXvpYUIkiio/DYn37ByhMvQJdQbEW1UTV49t5bOOECIRataQYtLS3s2iXgK9sfF4e/k859M4GEPamGzro7buR1H/gII9tEkOyhu3+DlU2jS/hbKq0zsU84jR0rpcZTAIkZ48kD5/ROsb/0re2jt00Q0+zau4fFC7uZmKgxWRQHsChUqAzO1A1ku9rQzQBX0Zr00AMbRE3DwuP7KTmiZieMbNauXc7AFrE/uP40Q5snWHFaD4O7xGe5uJ2TTijgu1VWHCnWWMPO8Y4PXM9tN/1Y/Fu1qXkFfv79bzJVkkFBR8V1fYIIoli8s2Wp2GYKXWpWBLFDoCookYImqU0MXSUOUwSRjybPO34QYWmz47BC02oGnh1FEbZtYxoagaRQb0DmJ8cOhQw1LNVi8N4Pvp65HW3881XfecnvvZgtPX0J2x84VFbihIvEAai1I0N5QkWJTEJVHMbW/WkHZ759DaPbKlg5MfaevXvLIfdo2BLpXO1ct5v5xy2hWi0ytXWmLm/RKUuoTIl7W9lOBp58ad0ds11siNn2dmIFVKn7V69VcEcmXvI6gHnHnUF3TydzOjppbRNngo7OHPff/gB2voNcm9hDnFoFNYwwJXlUrClgQODpRJoMAqRy1OsutmaQkhDVKKmRK7RxYL/QwGrv8lix7Ai+9clfgRR8Pf7ENVS8AUaGyqRahNMwPTJNOrbpyopx3junF0eZZmI8pFvW+o4Go9SKCvP6eli6SkSGRkb2s+6+QXoWij1lquKg1lUMVDp6xTNNlnzmzLUJSiGj+8U+emDbOOe94ehmjfDw6BCJETHlOhgp0Z+2FlEwNXbsGKEom/WUUy7CDQeo1feKNtBNtm8ukk7lsFKiH1J2AVULmKqO0iLb060FBNWYlCL27MpYFS9KeM8/XEabhOlXq3Wu//nP0WyFMBFrUEtrGlPJMFUWZ4Jlx2RxB9OoXhY3Fu/ix1OY2KRzc1n3sFjHTDuDf5BUwcrjFhFlLba9gDSpf0kbgRvhSjH1ydGIOd1mc6/XdQg1m4kDJVafJ+DgBSNDUK8yusdl394ZsqEVr16AMinGRnEoZmjPBJ/92GUMyDH5sxsEtHDuClFXGyc+Y1vHOf7V3YRVWVLREjD0fBE30pjee6jj07Cla1azfcNGuuaKe42NHOqU/W9be0cn01I7N4kVkuR/Bu2758H7Ofu0M2Z91tUzn7GhgxQiFZ5OkuS4F7v+7yJzpWp6E83Z27+I+lqH0kSROGrQqKk4dYd6fpppuXlYKRNtUS/tWTFZK7Uai+dkOOWMKaZiESmbGjbItnjM7TXxpO7MK98FJAbzZU3SDd8KMSyIk4B0RkzOMIibJBWJzLZEoY+hm02xtFROEQ6DpmEYDfKKGEWVmZ4klo+usmBxoZkdiZNYZFAMha5usVkmSoCqhoSeyty5/QC876PvQj0zoeQ2sioe5ZLH0d85q8kaE/keSVIjiAJcmZVStYSpqSl8T6rFxy6a5qDrZXRFTAQtAdM20PRGXZlGpGskidZkVfFiDwsbgpnMSqwlaIZOFDcyWTpeFJIy5uG6DWbAKplcigOD25kqC4fWdQKy2RZqJVkIHBkQJcT4LFgsGP2mS2XiMCJ2I1RZv2WmNVIFi5qMhGq6gq7oxHFMuiHgl+RQ1BBVd7DMguwzj1o0TCgPaXZeQ9d1giAilgHMOI6bTIGNvg7DEN3UMEzZ5kYeJygTx2rzwDA6MckKazl+PEk+LfrPc1w0CzRZc6UmBk61ndYug3FZDDw0FqFZKnXfR1Hkim9E6EmE54qxqSgKQamOpllYMjMXRhXiGCwzTVU6XJZZxDQMEldserZuEasVwqguiiak6XobCTGJ9Liy+RyaNpOJdOoRcZwQ1rLNz2quKg9KeRKpneK4EYliUyqLqJ4ge4Grrr0IEikiqoa0d7SStTswpO6bafcxuKvK+qfEnC0nVe7++a+5+rtfw8iLjamjbSWarTLXEO+b9hTI5BkJA9xYRnHjhCiOee7eO/jR14Rm10ev+hAfv/pz7BwWC/nw0BROfYB3vepVfPYawQT45c9/imuv/S6mHTM0KKJavb29pO1OXBnht/SE9mwbm4dGeeB+EQV3a3VeflQfmpUQSBpTL04w1AwFS7T58Pg0I9Vp7r3rId54/gXi3ov7KXkVDuw7QEM9zrIiSnUPXQYMht1pYtcnrRpYmUYARMeJbWr49C5aBsB1/3INt/z693z+YwJ837P2RDrnzWdupofRYbFazltZQQ9cWgtzmRwUm6yajVHCmDgU/fnd//ghZ1/2OkJ9kLqMclbLB4i8iMBp4cwzRT1VOt3G8NAYS5cKJ+LEi97Mlh1j5LoL7N0l2rg4Ok6k6USxguGJZw+iqshay3sHqoqnGdz+iVdxxus+CMD+Dc9y5Scu574H13PXz7/bHJ95coSyNrVngc0xZ57NM48fyhQI0JYXQZGUpaEpaZRodkF2GFVIpRpkQHDs6a/FWJZm0VHifR4C3nzle/jZ938GgGLMRIcTp7FOGIxPhhj27JC0YUbs2i3GT5BUOTCe4NRDYhnA8gOHloWdFKXQcXVs5nAxwOxi7cpUCSst6jIyaYugqlGT2mmjm8V1dcWnu19kpaNKlQefGSDTlsGRh8BWYzF/vXcL37pO1A9c+b4Psm10G139q7DGxAH+Y+9+I5HusHXnDiZGxPts3v40z28Yo1YXi6BmhmRthTC2iBPx2dR0hRdySBbaU4ThDGKh+S7TsyPsDjXau+xmbXIcR01HdZYp0OCocYoB1/JrXn/FK3jjFUKk/Mb/vJHXvPsCapItWFdz/Pm3t3Pmpa9GkYiQbVvuZ9Kb4KRXHMljd4gsydGvmMPg7iIHZG2vH4g66YKdIzq4OL8OrfMjHvm9OLCe/Jq1PPpfMwQ+AJe942i2DIwzXJoJ9EZJPMuxAtj9yE4WSobW3vmLqCyHMPLQdClEr4NXConcBEWyywYVh4iEOGicLUKstjY812XVapERXnXa2aQsm/vvFtmB9q52Vh+1lHw+SyC16IIEWrqyVGoOVmMsagZeGONI9orATQgTDzWOiOriOtOooOgRVU3Fl+RUppph796dWF39ANz5k19wJ6JemBExt588iIu5eBA3okudKXmC23oQK+J2Zus+HWCAF4aGp15EX+jgq14sF/GX374UY+JLaxU9wu0sOaMfbNEvZk5j6QktjOyKUEzRLkNDQ+i6wZyeeYwNizlpaiatdhYjEO1bjlLkMi2cdNqlPPyYqKGzs3MJ0Ai8Klk5t9UgRaIkdIlpzOTkGFa8mM5WG7Umzkqa14ceV3DKMyyZBztWAJufevFszN6dhzowhwbrxdzb+Jf9h3z3YNty26H1TV/+5qEETiNbZjtCT9724gytB9vr3vwuapbUFitZZI5qYTsbOeYVVwDgX+lx74/+hbPe/UnGp8R8N1WFTEuC6ov1/P5f/htvff9H+dV13/pvf+/FbHLihay1/zM72LF638c/wm033Y4ThS99wQvs7yJzpRynJLy0luNhewk7/qI3kQQKesqiRTLXTE4P0bcgDYaIqs7tLpDJ5ERmQpUilJpKHDOLHj4MQ2zTbi7cJDqJ5qBpWjOroaoqcRw3HcUkScRB3LSIZFbMsDy8eobv/8tTtHc2CqBV4tgmiuXho1jG0DQ0XSGVEVG4mhOgEmNoIbE8uIWqzwc/dQ5uLBmwLLHJe46PLgvhDU0lCGJIVExbOhaqIgquJUV+JpviR1+9i9CPSKfFIS1wPXzfFeQXcg7YdpqEgFAe6tPpLE61hK1lmSyJhWXZ2qVc/NYzKE0W0WXxcRg56FqCEgmnwzdC9j6bJckrTO0VC8vyFTGt3TW0KEMko1sYGkYSEySSnVCDhDrESjM7qCQRupZGQScIxQE6UlQRYZbQEzUxyWR1As8hkcK+13zkOj773fei6TGyLh3LzKAQYkg4aBRFxNSoezFpKe4aJxFhkJBN53D9RsbQE9ALSSajm1mu+cB/8qnvvQ2kAxbHAbpaIHE7mRgX95oYHcCzPY5afjIA81Yso0YKqxZS0URb6WENw4Sp3WJLndi0Ey3dhrZiNa4hIp/esMNZHROs33IXp5wuGMVOOO1s9gyON4lUWlMmuC5rj1zJus0ienzCymV4BIxN+qiK+L1qdZooCbAkRMZOGTz07DO86Y3vo9MVMIh7HrubvjVHU/MC0qa4v4FOoIDbKPI3DHY++wRrjzmNX/xWQNPe+trLqEcGupYQquLwkQ5ThEmdki+crdCfpOhPMz48wOiA2MAdd4Dx4hTF/R7X/1hEMOcvOps1F54ONdEujz9wF8Uxl4LtsGmXaPO3XHESa5eniMMali76NNTyeJUyfd1iV3/bm7/E2z5zEffdsw5Vjo26G7J65ckcfczJbN4kSDXaujo56axT6D9CQAcX5VR2TlTYNFxjTB6uymMjRHGWejyJqcmDYqSSifRmcMXIxXS3wNffcxmpbkE/f/4rz+Whp/ZSKxVx9wi69RXnvJ05nW3kUqKd+uct5ztfvIqTXvF6OtqFQ5Qt+Bimyi+++W1e+65/FO8XlqhWPGqS1eyxe4XI+fJTzyZlNor4U6BlGBgbZeUqwWJ1z6+v4zVv/RTpgjgQ3PCTX4GE+nYcIdiESuUJ9FhBzYnxW9tdpWVRgdauVhocFElgUynXMWydiQlxiDeNkMALCBwJC0xrJKikCiYVKQ/gDNbJ9LUQxyGFgiD6UOxpbNumWhfPMf78CG1LW/FR6JZF9p0tHagGVPyIrMzMnf7y03nsuYD9WwST3UnHH8e9jzzLks65jFbFge/Eo8+iXHdpb+1ixSIhV1LoGOf5DSNs3SqgQrt2Pcn+vXXsjEp1esb5SOUMTAtKE6LfCy0FYsVvQqrDMKQy7dDamWsGnVRVpTz10hmq/79Yx8ouFFdSzccJzt4BLvnAewG4786/EiQRmc42ZEUDlcERFD/GDQMUyeSmRoLKXi5dGIZBRrPI5/O094h5a5idtLdleOppAfm/9NJLUTSF4ZGpJnW4aoARFXnysQ2sWiYE1MuVIrqlo8rMVdo2sUwNVVewLbX5e9lUBttMkc2K/bClNUMQeuzaKc4N1//H10kd0YqzY5r+U0UgLKynqFbLeIHO3DlSjNsdIpdu5ZhVIng/p2Mpc3sX09PXTVgV+2o6p1MwC2hGnqIqHInJooZSc7Alm1/asv8+65IAACAASURBVCkGFo4XUi1J0pmpfRRLHqmWAqEMKF9/9T/z5qv+kTgW99mxbRMVfzdO2WVS+hC6FRObKfpXZahMijm679E6x168kM07xHoaeGnWru7AjGM2PCJlM7IJrlrDtC1yWbEG2GZArZwwPiT2Cy3wyFidLF2zlFJdzA9Vq7Nr81PkCxl6+0XWVFcSTKuI3iqy31u2lFjddxZzsu1s3Noo47AIEkjZOimZxXz0GeHAXnaxkHVZuCjN9P5xKiZs3SiYGKsORIlLElq4kbj/2IBHd1eaUPZxUFEoSkHqT3/58wBMTUaMDw+TZAr84foZR+Xqr3yfUUf0ww++/HkuuvwfKE2btHSJsfHHH38VgLe9830A1Ov7ufm3f+L1734FsTwr3fTT2/m/YRe+8T10WyJIsmvnFnbsamFoZLZDetkHP4kvHbUN+9ZjRwVaOvK0pMUau3L1AvzIJa2IwFTZq1MZGGaiGLFgoZjr+7ZvZN0D6zFSMLZ/hvTjb2WuDhNaHLbDdtgO22E7bIftsB22w3bYDtv/gv1dwAIBHpUF4HEqIZcqoEUZNKn140QB1bpLrVwBSX85POqyd9cgg1IcdHhkH3f88Jtc8oEufEdENf70E7js/QbdCwJG9ouo7chgQL4FFosSL27+kUkcxGikUBrNoTt4dYPx4QqLlojoebVaR1E0XE9SpaOi66bMeoiogwIoqgpKgCkjCKpiigJiifOMY4hCCReQkAlNjSExcV2Fcy8WsIDO+TEtnVl+9xMRraCm8v7PreHBe0a5/ZfiM3X8SeKMQ1ILuOd5kSZetaKXJSuOQFVERqiIgxME6KFGIOFuZpiQBBDJzI4QhATPc5uQP0XRUEONuhNhmsLD9/0ITVEIQ4nPNwwgJggNolAWMetQr9ZQjCJtbSJqHIY+4xPDIKnR05kMfb29TEyME0riBlOLCYKAWNVJpJaJGljUqxGq5LMoF2ug6ySqiuqLvqonAZqqoigKdQlDNGwBJUsSEY1JggXUaz7pdBrfF88ZBj6KomAYVjPqDipJZKNLqlTfLWGZLcRKhXRBQAB3bJzEnSij2g6+zOSouoPvGCSJiBQZmoqvQuJksFJSaFiJCYKIWjCC2YAvhhFV30dVZAaKgJiIJAnQJNTU9300rY4S600dr4SYmjMj6hlFAUVfwTRtkoOErr3QJXICDF1EbTyvTBiG+DIjpeoKYeRjWCmmXTF+VFUlRsdLZuAJmmqTBBlMKUJNQ3qgHmCYonMyLavww0782GbeahH1O/K8k2m1FlHTBVzDTqeYZxXIzk0oDzbEH7OYLd20nSiyHCsLaXRUnt2+ly9d/QMAvvGV99G16GjepL8SXUJr6vWIlb1dtLaK3w+k+HJEjLpiJht/y22bOPOsXrRQtKfZoqBFOapyLFZLZTY9cCdvO38xRx4vFIMzPSt5+tkhUmmDRMKJg7BKSrHxfYkrtS0Cu4tb7runGb2745k9uJHIJBoycuwTomgq+bSYj4oyH13pws6voPdoWRMYxVhKFs3WmNcv9MQevu8epp+6ncVHi6DYBSecxV/uvoWORUupxQKeuW/XAEe/7HgmRqfpyojoct1zqabq7Nw2U+i/ddNuPv2Bf+Lpx0TEbe/EBHMX9/Onm2/kotcJwodzX30JR+TmkGkR0crHdg7yu5sHOOKoNKHTqBFQibUicaDhOVLGIUlQqbK4R6wRhXyW8R0iy/vey4Ry8RFL+uhDJd3u8nUpFDxRLtI3r52yhEZ954uCjGLjtmHmtEktqmwWLSXWgr37xDvHUYVYiZtrUsOmiyENcI2uaHS1xCzsyDG6ZwZk9MiGDVx4qojwX3LhxdyCgL44MnOUtnIYZky9NoPJT6dT7NtRJl+Q0Fq7jdHRUQxDR2tkHqwM00Mz9b/JnBSmpVJ3dOK4wXJQx6tUxHpTEXPNmXDRbJc5bSKzNI4kNgpnxHKV2KBSD6hNlemVxa/+KoOXv3wNd0yKdu7o6WPewv1kjBpGWawRxbFBxiYPcM8fBzEM8QzHHXMK49NjLF0ufm/pBa/k0fs28uyGmXoORQMlGxAGM/DiUrFEvs0mlhB51WjA/sJmvSpJROfcLOMjMyow+S6NxFeaWZSSrFu9+PJ3Mi6zoUPTu9m77lGWvfwYkqr43vbn1rH05DOIG/uqm2HHM3/mDVdeQUeHgF0p+Q4mymVG9o1z/69n6j/f8pnPNskylEBhujKFnngM7RdZvvtueYQzL13JfX/YzLKzRJYhZek89+cX6EmdfyxuOM2We2ai4ItfvphaVCYyZBGDRDze8r0fiv/obGPBwn6cwKckNbpsLyEwNdL5FJEc6yii9rax91mmQVtPNyeedg61mvhOPq/z5B33sHq1EGn1Yo+BbXtJLJu4UU/lGWhKjoA6I1NivMzp7KDQWsC0JeRft+UeolGvNfY5m9FSSBSVCUORPQ+CgAXzFlBPZvqvPdXFANNM7hf9FlDDNhNMTcetidnWOWcF5176IdYcdz4AJgopPUWixnTNEc/pRSGPP7CB53cfgLrYnzy7RjrbysVnXSra1lC5+Y8/4r4HnyCRZEPpjEa1EpDNtpFqnZmTU7WAqCzKQU488WKmD9zP/fc9TPsisSZkdI3JakJ1rIYRNog26owP+/TNFdkJN9RxxhVKyRRqWswZM6WiJy0kBGRl/dbUlI9tZHn7O4SS+l2/uZUwb3LqaSfz4ENCAzS22pnf0YGrxEwPCQjW3LkrCXWbXXItTNwUuXSGQFFIEnFv04ipFCeZ372MWnU2F8CGTRtEH/SfQiltML/b4PnnxdhQFI0kzBIlHmMDM7DbSAND6tHF+gx8rdgj0B57lEHS3SvIxbPRatbyLibvmcnGnPmGs9j51GOM7Z+dezlQEmOsc+4K4E/87vo7OPnUk5p/v/DVF/Hn22YyWCe97m1UpwdwsxLV4Fp0tS/j/l9/n5Pf9wUAVvS3cP0nr+Lff/4byMr1zUtT9cb4yjuv5L+zP9/44xd8cihc8ubv/uvfvMeLA9Fn21e//z2ef2IAJ/6fqlz9HTlXk46ovfHHYTgYoTVXZ3hKpocrIRu2bGPHjkFGdgtsdOgXyWoBmbQ4XOVlgaUdR/QubbxWyJy+hEKrgeeIyWnY0N2bpSEFtuZEny1P2Nh6jlpdHAKTMEGRdSMNaJSmaXj+QRCylI3vh/i+j2VJjRAlQVUEKrwpaKtZ+L5PEDScqxhDT+E6HoaEHDluTG/fQoK4xrGnCxE0L3aYmjQoT4n7pG2P4XGLlceczu0I5+pl5x/JwuVtmGhE14uiw558L4WWPDt3i/S+lssS+BEGPlYgHARf84gUhQb5YBgG6PJQbkiKKAWFKEywVZ1AOpSGqpHEMVYi4SFehGEY1F232QYpXcE3IPGrDA7JjanBxqc22PwiBgYHUZhh71MUpVn/FCfSUcPG1HSCRk2UE6NbMaahojbw6oZOFMcEoYOhiYNvgg+Rj62LA+3k0CSgEoZxww/GttPUajU01cCX+jTplGB5bGgw2CmDOA4JQqWpLVKvTLN31xSrTpjPtHQoEzRUk6aulkoKS7NJ5QwGJ8RkN40spmkThDFRrMkxlaCrCpYcB36gY2oWcZygyQJy1UxQUUhUpdnGkV8jUYwme2CiaU34pqLM1EXEcYKqaDSYS0QXJ1hSpNmyLOI4RlehEop7q6oOUQ01DJs1AZ5SI9EVIl1sTF5JQKsyhR40U2z+ua4lpFvbCBSTMJBwUM+hqI6RlXCt+arKwq48uVyaeIFou7FAQQ3K5GU94H2P7qalN8dHr/gQkxPid3738Hlsv3EAXYnREYdY20zI2CnS8t4dLQWUpMzyZf0E07Iu4hxoSce0ZQuEss3j0KJWc9EM0Ve+neF1H/4sug3FmhgH6/YOgq+STJcIqmIwlH2TwJ8i9MW9p9waKDZKvUzeEmvJ4Og6IkUn8hMC6XhrKYswDPClA6/pJqm0D0kKxRbvbCWQy6cxCypzO8U6+KarPk9GLRAa4vDTbmV4/XsvZ9OezTz+ZzHXn3joEZgu4E3PZ6IkxsJ4+QA9q1sZGJ5xrhYvP4W+zgv54f5rAGiZb7PukVv52Ce/wvK1rwBgWV+a2Az4lSzuf/TeAdKFDF7aZ8qTrHhqzPT0OP2ZPCtXCuheqDq0tS9mYYeYa7vX7+DaG34Jn4drv/tF8UwkmEC9Wubr/If4bN1tbHjZa8i+ADsRRxHTctwNjYwTyXqoDc8KqKdumbh+TO5gajbAH4vIZMTNsvkabWmdxfP70WLhdG4Ern7Pa/na98VB+JWXXMzylw+x9eHHsCTcLkIjDlTqw2LN7102F03TKJgJijz8FCcGicoBuflZHLk3VJzZcDjFV0lC0FMB1eGZOhDFUFBNcGU9rGkY6GrM2PRMTYBhKkSxi55IsXMn4KiVXRQnEy49752ACDbedfd9TfhUpVZDwaBehygQY2rt0UfTN+8iqk6ZkWlxCEzZJp2FiylKwjBFr/H+Dy/jCr7Ml74mavuGP7Kdn/7wHnRDJVcQz1Ap+QiQi2QZTFSyLTal4t9mNdMVnSAOiZXZB8dHnnqMWkmMqUJ7H92nHEvnghYObJ2pDZmsFElJPb6uXrEWdvb28fxmweqnmVO09lhExuwalbrqUCiINWF03wTFkofi6SyYd27j16Vj1UNSl8EwvZ0XWt2JSZkza+nqc47FMjoJtQNMlUWfHnPsSeTfMoc//1rWEY5Psb9LJVfINSHjPj6hH5DL5wlVSfRBQrlWJp0R+3HZ8Th97bHkCwViTbzP5P4BpooeKzvFeWBksoKStzF0nXJJCsYGKscd0c+ezm5sGYxryxeoFCtUZW1IrVrH92MUNSQlyVxGR0cBFSUCVe77mqYytmc3Lb2dzXc+ftEaBtiGJuGuft5G831KpSlWveYNAFx48ZcoVzS2bJHMclqIW3Jwaj5TssYrjmo45TqRYYIt4NFqaFHfN8hXvvMzAFqCOvrUNpb2riBqOI8pB6vfYGxgik4ZPLr4qs+SmmOQzog19+4HNnHaea/mVdlW/nKXYJB12lWSkg+mRlidCTZ26FmmpM5SZcRF6VTRUwEdS0Q/HNhdRVHrqGh4B2SduQddfYvobe8HIAgiLC3N3Y8+hCdFaPVQx8inWLVsIaeeJ9jw3D01Ht+wDqNLOHfDZYPaVJFU2qdrjgjYh14NXe1hcryI8wLqwe07xVxY+OoMOUNjaCQA6ZTt2jHE4qVzCJ3Z9T9JohK4ou1Suk5DC+rhO0Rt5qIlXaxY3Iutz65N27pxIyODM6yCrXP66V08ycN/urv52ed+9m22LxVr8PNPjvIPV3+HH3zxQwTezNwek6QRS088E4BogUumq5XuLhE4TaVXcutXX8cln/02q48SQthLV6W5Hvj45W/iw/8u5pGmJ/QvnM0G/oZ/vpaFPQG1CbHGf+eajwPwtmu+xd5xMR8662XC2l5uu1EwFL/x7W/ixl/8hnd8+j2k5PlmTs9CvnDVP/K2qz7H6eeIWmnH0ak5NBMittlOygLVVLn8nH4AeucuxKsHKOn/eRnV341ztelZQZfY2ppi6aJOgmyee24UePp7b/0LqahI38I+Tl8lJr/dtRBFt5oUpI06IDVXo6NvZtBt2xBSaDGb9JeKAqNDNfoWilc/8XSLvRsDqtOlJrGBaVokCcxf2EaQyA1E0chkLIJwJrMThsKpiKUgsGqoJEmEYehEzYLSusyQiAeIoogoDtAt8HwxKFYdeSTLlh6HkhohcsRCH/sq//Wre3jfVYLOeOOze9nzbIKuzBRI33/XNtrbXke6xee1bxcD5bqrb2XtuW9g9UqxGFVKkLHyjNWG8Vx5IInKaApoMmOCGqESo85IHUMIiqELsoYm9bpJHIEbNTJXNl4YY2k6imSWC1zQVR3TtlAlRWqiaOiqQRjNUJ4HQYBlmygy/BsEPiQJqqqiyPqYJNaJY9AlgL2jrZWAkCDysE1ZlB3FlEsutlXANhvMRzaWUWDBPLGw/eAPd9HW1kYUBVQrFfkMwrHwA7cphBmGIXZKJYqkY6wbhKGPpprN8WWnVR68+zlWHr+g6TAnqg6qjqGL9627HlGgC0cuEZPacSKSYgU/9Jt1SplUiihIiCWtrKL6qLpJEilo0kkyFJ0wDKk7VUIpFKmiCIIOOe4sy8J3feI4RlVmTqt+oAAKgWQ/1A1VRMYlT8zERJVcLo8bu1hSdFKNdOJYJ9ZUEk3W1gU1TC0kLIr7tJj9AOTaz6XrCLH5q1qK4cEpPH8aW0agujvzdBc6mV8QTlnONpl2YP8U1KQQ9ch0wqkLW7jhdrFwV7N5lhp1wrm9XHjF+wHYOTyJn9Ih0MAX7VINIoZrw5gyQrDC7sRwyzz2wCR33SmjWedAa2fAv/zbnZx1uig6P/n4XlKmzdPbxAbzyNPrGR6fwNI1NjwjghbF6RrFyUnqpSFKNSnwHNTwq1Us2S9JGFAuDjN/fg9f+KYg0LjuX77O/sFhCplCk+7aiTx0TWkybgaqSuSJSLIv+0FNdNq72rjywx9g43axqX7p1mtoS2eoyVqq+fOX0zNvPstXLORVbxU1Hudfcgk3/PCbPLvuCa74qMDo5+e18JcnNjN2YIZKeOfm/Xz45g/S0ivG68i2Gpdf9iGOPe4cokhsiDs2jXHnM4Ns2SVO3u89YwHbpwYoD09gS3bhtozJGcevpNCZQpUZYCufp8PKMTogDldf/tp/0NbZyilXvI/f/+f3AehsrCvZ5iNxzBnvZMvkAcKDPgOo7X2UFzAcA+BNif3BA6666otce+3Vs/4+Pfow1/z4BgBSSh4vqeErASRyjeMHjMewWBLobNu2nnNPPZetPMbU4IuzzroVB9M2OPbk5dx/nyiiDyX7l0qIIfeLqDabtMGbrvFCGofMnDwhDq7rY9nCEU008KMENZlxUiaHprBSFppM1zsxnPOak5ks7gFLOGH7No5SrzlYtpifU9MTTE5OM7/DBEksUCsMMKDuIUwpaNKJD9UMQ/EujG6xRiS4bA/FmlVqEZ28avViOu98nrHdE2Sz4jCnFGKxTjfOFYlCtejS0pZp7g2KkqAA42Mzbem7HrpmYkrZlEpDGL2nG7NX3Kw6XqQ4VmJn6HDcy0WgZh870OOYRTJr09HWxTPcy/79++npaJe/lyUKY3rmzKx3p13+NowFBtOTYmy2dnaRaauzY/Munnh+djH+tnsPppk+lHZ724PPsvb0Zc1/b7z7aezuObR1zWXVSaeItjr+SB5+8L5Z1x2zfDnT09OMyyi/Zpl05lqIAF86U0YelqxcjmbIDP+xLyPf1kmxXGqyWW56bhO983vZv09kXqvlkLHBCcb27eHCV4rD69LVnZTGBomcOkMjMnPVJpjRTEPuoUaErifoptE8A7X15MimCijYpOQ+augK2bRBIS/G5oPAsfOO4BagVhRrZX/7AkZLMa/7xNdZukwQ4WzavoFY98nIMeaWfapOlXrogyQ+KuDjWS6JauPKujyv5rJ6UTfnHi0WgKcefgh96RLiwCUMGvuoQ7maEFQjSnKdilMqlWJAviAyVytXZLj3ljs57pQzWSGZ6x/6y5+pxh74bVT8GYHeaujhB1I+x1WplVxSTgtKVczjXAiGbeEHGnL5JjBCOvpaeG6nCOqX3CK5okLSZTHHFG21fdN6jFaVWjHhoSdEI/d2pMm0LCSHYJu0zCk0zSb2TYaHxHjLzWnFj4s4TsSaYwTKZzNbuO6nH+H97xTiuHfddiiT5cJF3SRa1Ay2NuzgrPHBtv6mL4j/B0746S/49DvfPuvvv7j6atae/qbmv+PIxzBtli0VLKobgNFxiwsuuQiAc47az3/dKvbrJ9c90bzuaZnJWyKdq84lfewbK1GX9U5/+errADj31KNpl5nB/Qd28Jmf/SdfeccV/MfHP/iizw9w0jH9mOl2JrNiffnM937HVz7wen75+Y8e8t13flqIka84aw3HfPti9uVrUJSET5Y4i5x27mnkJBqptcMknXMxDDE2bDNLxZliZGLGcfSo4zoBmez/3Lk6XHN12A7bYTtsh+2wHbbDdtgO22E7bP8L9neTufrEm4W36wE/+ONG/vUz16BqItp01vnH0LP4FKYnfQYHRCRnfP0o9UqAKrWFQglds9Bx6zOwwLk9aSbHXVrbhfdsmDHVSogqManFcsiceQW8mk8iMzu6ruO4HpGvkTSzOSGmaRFKKJimCRib73rNrFQcgaLGJInaZNgTdVwxocz2JEmCgkUYqpRlEsqvB2zfuJc4cXjiHhE5zqRs2vQU658VMKSh/QGRU6Y0MZO+3bZxil/+/CaWH7WGdEZEoIJI567/2szCfgHbqeIQR1NouER1Ea1bsqINRdcwVAmVUlWRhVFnMoCCkT2hpTWH7zaePURRVWxbQlZIsA0dLTbx/VB+J8JQDeIoEZTriNqsJOYgit4YXdeJo4QGu6+iKKBI7SUJFazUagS+S0rqsvj1Gn7ooxgJkUwFRFGIpSlEoU+xKkPsikE61cr2XSJqtXvvJJ1tOeq1GqZkVYzjEMPQmvT5AFEcEISgSgHGes0lnVXxorBZt1BoSTNyYJQN6/bwslOEoPWWHZtpbZlLJKFglqqTsmwC10OV4V4dhZxukegadRnpjuoRhqo14ZJBDFEYE0UJSeQ02ypJElQlISXhfJEboiUQx1IPKBT/MxuhSWkpPyJJElpTEg7q+1CPpGA1JEpI4ngkqkclENFJ08ph6wUir4aqRPK6OrqVJWeKSG4uJ7LHa45bxZbdIiJUq05jp1wW9mdZIOt2utu7yYU6zxwQA71u+RBEKDkHtyKzW2qen/5pgL2jUsh1fJT1TzmsPe/NjA2L6+y4QDrwCOISoYS/ZGyLdtumIKf6ZaetYsn8HhTN49JLBeTgCG5jeCzA1Qb5zx+LyOOaxZ9GtcfZsFFkIsZHd9ORTXPzL37Gkw8LomBdy+JFVbJ5nUjWlyiRhxGrqCkB6VCyaSw00oU0NZl51DIxqFU65vVSnBbwF8My0UhwJeTQzmdImW0Ylk+tJvrYKfuMT+1m19YNHP+yE0TfpX20rIol4VMP3nQd+a4UN1ViDENohJz0mlfw2nd9hEs+WGfXdhFB3LdhE1NbyvR2i4ziJkY4fe3xXPThY5mcEM9ppkxCu53RXYNovuj3kbEhFtoac/rE+uP5rZy2bBH9/UtxJc78wXWb2TngMzE0xKo1Iqtgxhrrn9zAD78nIq2lkf2cvOY0nn3ieVp6jwWgPevTlUuzYGkfN94g6pyOXtrLyUctJZWXGkyffxVqCmrVCrohIsJWOkUURVRqVYp1KcLrRuyojHPhZR8RY8r10JKYBT1t3LDrJvh/2HvPMMuus873t/PeJ5/KqburszoqZ8mSbGzZlkZOOIENxianGbiAn+eOmXmGMTBgDHewjT0WBgQGGRsLZwkrWJKVszqqU1V15XRy2Hnv+2GtOiXZwHyZex9/6PVFqtMn7L32Wu96w//9/4FzC8sUc2WymslyS2Lgfhbu+txfk5OivlHQ4JKGpMKX9kxxAiwl37MHqZWgGgqa4RPJ6q8zkMVzE2p4qLJqajkZ7EHBmgqiiqPpCn6Q9KQ7QiJ0QyXnOASSFlyJQ8LQxyiI/ZjdYlIojdGoL9JwxXWPjm5nfcnDjXRanRkAtuzaxovPv8KuXWIdtForlAsWI+M5zp0QNsj1OyRdlzCwyUvZg3q9g6LVeieaqhhEkoG00xDrdVVvsHvnANWpDqpkhBSVdKUHia+tN+V9bupcKSokP8A8bFgWSrzJNtsbmk5oCZsUDmkUMhkatXXyI5vwvDe84XpOnxfnlS2z3N/4whf42d/8qHiDk6XmrpLTBnqfSZoJV+6/nG99434A8vkikRbimHmuue1KALb9bJbZExGXXr2P/XvEWph75Rif+5+v7d246Ird+PZroafe0gr5qw6guuL173zxPrRk895+9Tc/TmvXOi+89Dy5rYIRst6s4zcbDAz3Eyjic8Mjg5T6ipw8Kfpcoq7KfH0Vw1SYmhLsnZ3Aw2m26VbEOpifO8P4UIFrrhklK/tAjz51DN1QGR7ZgpMVcLdOWKc4YKFIWZBYSVE0HTXWCGS/uoKA8nfaLSKJngn9Dq12Hb0pNTn/EM48L2zkm35BwFHTmso1193E+JZrePaYgPxv3TJIs9uh7cmzImpj5Ww0tQ9VE+vBSwJY7RAnVbZtEWv2wPY8Y4MOLz8lqOZriyHDQwW2TW5laUnAwRuVDuNby3SdDqoUmc0PluimbXaURAU6U76I/Ttm+fqXvse+ay8F4O3vGiHoVgnaFR68X9hFnwbrU16v79wuaahKFtUO8XxxnYXxLLWVDqaeoVySmpV+yuLpKQb6xZmXsbKoSULiBdQQz+od799NNWlR0Hzu+tI/AnDLba+nVFKpLUn4ax4WpmsUCoUeY3FB6aPhBLjtDl6vHw4qlRaf+FNBVZ4d0FlZq9DvHOSvvvB3ALi2SxzpBJHHyMQGykdF0yFRhC3xWy1qVZ///qUH6ZO/d+aVY/h9Eb/8yc8wtyj7NQfHqJ45znJms+I8/cgCuhIxPrIZHpS9iNPPij47NwjZcdkE7/9PHyOfFdWtz//+r/PbH/8TPvGx32JZ6tpNn3meUn4r2YywMTd86PcplfP8s/cEqYTpZxwNzVJ5/x99Hl3aErXZIqqu8Pef+2Tv9z/9ub+jdm4aSxUVy4tvuAp+Bd71sT9ksiAYNncNDTJ75ClePicQKOdPPsTiKwvomRH0WOz1g9cKH+baKy+n7W+gS1JWqxXOHBNnYXVtmm5a49z8FNwofv/B759AzyQoiUXfmPAvDCvDCv+2Rt6PTHD1xSfFQr37L77Bw1/6B9724es4eIloxHvp7CL33vcisV4nI5vqzZxO1jbpNjegZ8IJ0pX0NcbcynbZ0Wew0eevmRF2RkHatgAKWQAAIABJREFUFHTVRlEDCQkUxs/12iipgW4kSOQXSZLQbDYxpYPreV2SRFCqbvQNCf0qFV3TCCUeX1ESIEXTJcGFqpPEGlHU5ed+RVBUf+Orp9i7z+SSK8ZZXhMLuFws0nFdHv6OMA4TW/s4eXy154gDHDjgEKVVTj77CJrUMtpzkckLzzyAuiLKn1reoNWtcPs73gAZcRD5kdAgSeRiTkhRtRQFrad9lSQJaqoTuz6pbELSNANFVYmlhpeWpiRRSGwZsHEQqwGKbpGqGrHUQNJQ0DShHwb09KWAHrROSSUZSJKSSoIATTExVBtbBkRRkGKZGXRLR5OU8XVvnYFSFt0AVW68VPPoK/bz93cKKFFSa+E7ltAlkzpecZwCKq7rbgYbaYrvquiG1CjRLBQUkqRLmG6IQkOx3MfX/+5R+jLC4F9x+QEW5hZ6mj1BqhP5MVpGJ43EOsjpAwxlcuiKSiANfJQExIpPFEv9EUVQNaOKdQSQxhFJIoNeaX8TXVDSO7KZPA5jSmZRBGHqZjG6lCkQ+CGqhEalqoGuq2z4QLYpoAW6ViZONzR7GnhehShUMSRsZdvAFXidAfZIyupDN97Ab/FzPPZ8vSdsPDyiMzk8xkjRZDIvGua7aYfpWkBYlIFGx2Sw3+H0eZsxmdw4u9RkZrmCLgUuvQZkcyqttSoZCamMaaOkKo6TQSIhKWRzOPhccXg3ADt3jqEkEUqcsn3ycG8OXjlxnisOXU2f7FPMFGJOTAUszQrHLQk1lhbXee7FIwyOioPCjQK0tESYJqSeuM6R7btIwghTBsudbgu/0SHdDmst8V2FfD+HrxrizCunQPaL0AXStKeF16jWMMY90lSj0xZzlwQGxDHHnn+Syy4V137NlVdx/2PPcsttwgYOTA6wa9cwaE2mn58B4MG7Ps0DX/wct77zfdzx9g+I3xsZ5uAH38jBq8X3fJfreef7fhFDt9l+hXieOjFL7SbdZoyZk4Krl1xFnw5GWSY2am1WFtb55B98mjkpxBt0VjCtGju3HWTmqOh9mV9Yp1FfY0OSsLX6Cl/mteQAdeAc8CSbEJIvfP7j/H85Vv+V12ZOPvKavzeuMvY2HZuIH4QItlhgsyfKfRVoUd4yXan54/XUGv/1EQDdHwIMgssmnKfD6df82xyn+B/8K8K0vwg/qPbzzKv+/zM/oDH0vxuf/ejXAfizv/0Ihw8YPPytVyjIXtDWSki53yJNX9s7paoQy+BCSTV6Da1ydL2ArOH0ArCN0arWGRyW0K92StQMueHaq9iz7Xr5jr9m2+RuphfF2XD9gUPcfu+3+Ym33EZHynR0whrrrS5f/Zvf5ap3fQiAmalZvv61b2+KGIcBj33lb7jpPR/h3AkhNjwy0I8y6XLu6AlIRQJiYte1wGuDq2YaY7i117y298r9FMsJYSyc/507yhSMHOd5EoA7776T0dIQRs6ESKyjwb4sY5cdYm21Tm1BfM6vNlms1Em6Yl4e+Pq9TGybRElDXNn3nehtjr/4MmM7BazsxptvZKA0TKCqtH2x3vrHLBJMosBjWIr4ri5XIOngd2SQpK+TJDHeuo/lSIkBr4vrdsjkHCwJpXe9jtBOHMj37vdsKO5hek6s61w4ROvxWb795D/QJ6/rlKnQaa+Sk1oFtUoXxbdIgwSnLH7PGciwffJKrtzpMLxVXOdaxeXUk89z5qSYu3o7Ze6fz/JU6tFsiz126NLL6JvYQUTMRltjbalGFCa8eFZoZsbZJltLW3jLj7+ZhkxI5tRxnHKWO974Jia3fwKAP+P/oWgbtCMpfrwWUsy0aHcibAnT7dQjhseKBC7suXgPAMeOn+HWW3+SQlms1yeefpyt2y7CSItcdos4D3ceMDFWbUYKGQ7tF2tv6vwqt9+0l/GsuKaGV8fzXLzKCmwkoi0Pxy/iqx2+9bXv9ub9d3/rr/jh8WUQKHkOXXwRsRoTRn7vvE9RiaIAXbZi5PIONXy+9Xefxk7FnLuhQdtboZOk7BwVwsKHtpSwC3keu/ep3i/9wxf+guJomUsvvb732hNHZgjbIoCPfRc1VMgWLGatTd23c/Ninci3Ydg2lahFty6use41WF5pEUQ+xbzw1bIDo1APGKwmWBLqWRybZGLLXmAzuMq5OW7+yLt7BEFqV3xnfj6gHgr5iRm7wPL4bv7XPaLXt69bpW46fPuf7+X8EWFXP/Jff50v8secPF/n7KzQ93riiWn2XbaVp58WJCKpYmEqGn68iVlPaKKQkNBCkYGhIuWH/q3xIxNc/cqHBU5ym7nGH37+I5yY8vjsXd8BQAkWKU6MoFk6bYl5p+thEpL4Uu9I9umgKhRLGzftMfUKjEyEtKri35cXA3bs13p9WU3fxe1aONYArmR7i+KETMYhTttsEKsosUYmY+NKlixd01ARwrqv7qfSdYUoCnssSoqi4nptdNmrEQYpzUaH6246wL4DgrLwq3cvsbTs8Y79u9lpCIPf6gSYzig3v10AiaePNVhp3Ecma3EcYfR/5qNXEQTDhNE6bizmJZdxaBpPMDYoNv4Vt++nUqvjtqEjWbEMTQdF6WFMfc+DRCEJw54TqCk6sRaRxBGy7QMv9FCx6Mp+C9vQcRydpt8WJBKAgoeeqFiO0esbiuMYVdusihmGgaqqZDIZPE9isCMXFQUlhSQRxlXTVFqtFonEhUexAkaKoSe9/qpceYJ2x0MNNSor4tlUqssUc03mFsU8FfrzeG2PNE0xcrLKmAhtLNt0SKJNp8DQbQKvtbGUSGMNVdWINpie0hhdt1EtlS/+veCZadRvY3ysxMROYRxWqi1cJcDUdUJZ/UktjXbkkvghupzQkJAg9pGkUbL/LEHTwZBOfCjFWXXD7AXxSZCSagobcXaUxPiJL1gf080m7FbUQbdMQhl4J6Skmt7LQFv5LEkS0Wl3SEJxGPtugpMrsm33ONU1sYa/fc+LXHr4Iso3C2aghx49Ae+ALXnIbxBKFDKUcgXaLY3vzwrrqigdDEcnbok5L5QC7n94jnxhgGxZfO6pmTWCpIUrHYY4TdHjFDP1CWRDbmwqpJFB4vlIYida7ZCOWeapc7KiwWmuuXiMnGJw/Lh0LC+Bptvike89zI4xkbnSdJOTx2eotITDrFkmK6vzpMS0u+Jg9JOIRAGFFE260HrZQSnncaQeWDg1jzq3QjaxUUPxWt0LyBXLlCZGqEp2uzRNUXWN4qBYG1vLBSrtOdxuitsWz0W3TYyMyeLKEvOr4nPlkUkmJmdpNMRa3HHJ9TTaHntHiuz6gNT6ek/I97/xXf7lq3/Jc8+Kqtv7P/xzXLT3dZTKO3vrwHRsfNXh+VkxV/V2F6+lgK9wflkkIJotn7jVxlDF7wW1Do5e41Ax4Za3iWxfal6E53nc+/A0bU8QZhhqQKR65AaLvd+75dYPMH3qOGtV0VswPLKfjtEhCQ3WTj8KQGnXLdi61mPqvOn2N7BlYAI7XSMTCQdzJNtHtpQj3z/Iln5RCei3NRwcrBHhEGFn8NwW5148QVn2byZ+m+efeglbt1AVceC/95Of4O7/+LvohtTecYqEisLbf++3eFQ21butiDRuU5TMjn7QxvMTCqUBMjmxXnO5HKoZiETNRl+d28H1uiytivtdrVU4v7DA+aUFzsyLDP+xqVkuumIPTjHLY4+IsOj973sDq4sLRJL5VMtonDq1xK/+p/dSHJIHeKJRq3aIUFA2dAmDiJGhIp/9zL0AXHz5QW5900V846tHePC+5wH4zV//IEPjKYEvyH4ANNVCVdWeQxZFEVockyomP/MLItj9jZ/6gpjX/wEbXUl9Y45MxL22MlVZ+/eZs/xmSnZAo7b22oB17plneddnBYPX2RdPcOllVzO2dydrK5u9Zw/+y1NkNZGkOTvb5qmv3Advgbs/88NB+eU7RFJkd0Gnb2SIcllUGX7vd0Q/xhtv2AuuPMR0kx0T0Ox2mZ4TjuEf/dpHfug7bzhwBQ899GDv77e878Pc++xfASd+6L0DO8Q5XhjNkLNUlusVJAcMy611Ks2I9epyz6muu3UGB4YZHBbJTi9I6HSrNOvrlEsiuInChGK+nwNSP0rPllhYauGSksiepKTt0uzW8H2f0Bdz7GTEmaZrkixH02h1fNrdVfxI2Knh0Qn6B3cxtzDbO5OHR7fSbnZYfxUyZqYj7Pjsk8JGKBio+GT6FNTzIjWRKwwyki+QtUUyN7O7n1xBQde76DIpmjcGWOo+zJGns2RPiveZacDY9n4Gi6K6/ak/+Atufuu1FEojmFLYOE2LvHJ8ilRJBBkVUMqbaJqCJv0rSyuxsD7Py2ebFGU/jUMd1/d4/v4zXLR108WtpU0SeV71GQZJGNJqKYQyiMzn8wwM9rE4t8LzT4v7s+0SLz7zHDPnxT62SjkazVUayyvc8ZOiB6nVXkVLNLSswtYt4rx+4PllTsxsZXhA2MXE15me+h6qqmJlxP6rrCyj6ybxDxC+/Px/+yjzx0UwsLBc40233c51+yZ5xx2C2VVX86RUpT8g9Q2jGEWxeoWFyM8CdYqOTUcSUIVRwsRIifGtu+nEAhVy30NPUDKWGCiIa6gA73jrdWCFTM1uikbvtBfwArHfSzv6OH36NEHD4oprxPp8HFh5Ra4JaZvDVpuCWiCXF9dYHChhaZDPZdEl8UY3TClP6hycnOD0ObH26nMLTE114Jc252T/zjHc+WXmj4oiTGNdhT+G2eMP877bxXNo5sYZndjD+Snhy5wya3zuE39DX7DIre8XlcA//MJj8Mvwnaef49xp4R++7uYdTJ2Zp5gTPvPAUMT6uSp9r2JbymsQ+wFOXieSPtlG9e3fGj8ywdUNu4TBuO5tb+R/3f09FqfWGNkmWdQKFv5yEz3joEgB1jSjUe/41BrigRQMKb6nhDQbm9nB/ZfoWLZCPi8W3ZZdoBgRcSCCLcPo0qiC6wa9A7TTCQhCD1VTUCVERFV0VHUTGqHrBqmWkqQKzbbYnNlsljCOMC2zV2533S6WbaFIxjbP89i7Zz83vOlN/Pmfi2yFM7ROoOl8+lN3sWOvgPOl6PQPZxgcFgfMPXc+wTXXb8E0MhxHNL3XqjrPHz1NueDQ3y9JGGKdO955Lf/z/xIHw75LRnAck7YWYJYku01qEMUewQY+xE7QVAtHM0nkQRyGIZqTQ4li4o17tjW8KKBYFpsn8tq4QRMnm0fTHDkvWZSoQDZj96pEgR9i6Dqkm4KTSZKgqiq2bMpMwhDihDiOMXVhXP2oSazW6B8XjlUU6jS6AWvrXWrLYgN1quepLFfxOzotibNU0gyKGpDKMke2aGB6osoYBB15nQKuqbzKsKVpShx5WPbGpomwLRPXjUgVmaHVwTBVClmV0BcH4T3feBTdCPmxtwgB1v7xPpxsAS+K6MYiaGj6TWK9i6mrhFGnN8cAiqyYqnQhSUj8hB69v6JIwg2PVNKsKmkgRDs7klwFAfnbCL42RrMjDPCGwbVNC0txaFZbci1G6KpOf98oE5Oi/r197176B8Zw1BHqFeF8vPHW2zBshXZdzMvhPcIITQxbvfR96ppUZzokdChLFkPNyKBoBpmSeMZmWuU910wy39C4674Zce1GHSsJ6fa0q7ukXZsoVUkN8T1xJ5GZOo1wI3vuuWiqD9LgPzgbce70Atddu5sXj4oMFJeA7ujMHZ3mnW8VlZ0ggpmZV+jI7PZAXmdx5hX0ICBn5+TvRShmShp6yNwNS8+dItV0kpyE0qYxKSkaSk+GIEw8mmGbTDlLc1FCWTs+saqzsCAyrY6xDSvOs762zBYJg1ptVjCzDrXlNaZnxOFx8NKrGC9tIZCBuNeqMTA4xCutEHtVSgV0K1x8x1vYe+AA9/zFXQD8+X/5Lxy55fvcdOt7xIX/Nnzlm2dJkjaEwubVgha+10RRk57chWGq5IdszK5wUNrxEqYOS8ZennhBXNPqyhK11VXSbgtfFWu3HXoMKRphukmNfmx2Ha1QxsyKZ+Vj0fItsq+CUCWpSkKMaYqg89FvPcKBosFFQ30MHRDVyFYQ4XUSVqcXWMuLg3D/xfsIaOGfE65//+AoqeFS2rKlJ4RNGHP7la9nYHsfuMJBfC+f4M0/97NUKmLvtVotAkPaOoQ98/SuEMhdE3YkiVJUTaMZ+ayemxH36/u4SUyaKsQSLaFoAgpoSJHYQm6YoR3bOHiVhmNJyG9Xo9xvcezUcQYjyXz25v/A1PwMa2viOtbaDXLlvcyd9ynkhMPeqc6zpTRGrHio0jGs11K29PWTlSiK0fJWrLCAioki9162ZOB1fQLXQJdohCDyUTWIIokMMFRcVDR1k+b6c5/9b1h5hd/9vz9FvSaTG3qXBKNHeJQvmq9hd934LkUR4u2NithbxZEcURJTGJTIgCimXBhm/w2v5/m6qFhkCwbPPn2Cta8dY+uosCu8DwoDKtGy2Mef/uO/5v0/8R72/MJPoWtiDiYmDtJWIhpBl9QWc5zrK1Dr+CxL4djbfvKjRFrM99MlPElMgxHTqKzTWm1z+RWHAPip//ifqdaadCVs7qG7v8CVB6/CNor8LXcC8MijD9E3cRC/28CSdiIIu8Rxm64q14sbsRpEpFok9htQKjrUVs4wMDTGrn0H5foosry01qvQeL5HmsZkC1kaTWGb+wvjDPfvYWZKZNeX1p/GUlRSJSKWCd4kCCj3l4h8KBXEPkJ1WV1eY3hU+APtbhfV0Ni17+IeQkI1dJbWm7i+hWluOOc5xiYmGRkVVY5HWcSRFZ2P/hfBMlruM7EHHUrkKTjCThSyRdqJh5qRznKjTdtVCWIVV57HjdU6Z14KUIwsO7aKZzw4Mk4rCZh9UTDS3fHT72Bw+0HmpxdRZBWs3Q3RSwW67WWmzwoY267t+4lpk8bibPLP+FRq8/iGQlcKt2vVDn3lccZ2j3Pk6U0qfVOzaMtAykz7CWKPkj2IF0tphFbIiRfWULWYXEHcz3BfjleOP0DkigA+a4ISeegFh3xeMqYqJRwjwEBn/04xny8cP8bUK49yUsIlTfKMDw+wOr9ILM99Qx8mU0x6SZON0apbfOTXfhOAYnaAi7aOMSbZWAFKpRLNTgvP84jjDTRQiqropLpMHkvkxJZ9k3Qk463fSiloNkeePE9BPuetQyaDhYPs6Bdr+hQvs7LWYaDfeRUZEMy1U1zpT60tJHS7BcZHSlQbm9X6jmwfmRwWVc2B/QZx5LMi6eJb3SU0Lc/ikkd9dQaAd7/7Wi7fu535l5/jbZcIP+9PPvsk695rIblWZo25s1UWlgVM1hkVsND3vP2NHLpCJBv33LyXgpVDkXbqzju/Qb9e48r3f5B/ekysg/Kg+N7VtQbbtoskTBB6zE432LVb2OVtkxNcv3eU+fnNinvUbqGSJ1FaJPL7E+XfF02/QGhxYVwYF8aFcWFcGBfGhXFhXBgXxoXxf2D86FSu7hCZ8+986yHyeBw4XGBRZhnjpkJESDNwsTagQpU+tCjLT39AUBB//+GjLAJJIaD+Kqj0zGnI5dVNMdcWrK1qbN8ntY0yBqXiMNWldSxZNtdUkzCIIDVJZB9NGArY1Qb1bByHaJpBmsQ9CGC328V2dDqdDrmMbCiNY6IowrI2sgAqUQQtd4Kf+YXfEd8dLOF1l0jSOtXquvyuDkG9ylMnRUl3eAAW1xbJD0z07u3BR56j5gbUKxovHZeQsVaLXHmcLYdElure7zyHWsqQZjpYkchuDfQZZHIOqazKKapOs90kmy/0KnOmaaE0AyxDoyWhSfV2F9V0SBIxwWrksX/3JGvVKrrUmMpmdVI/pLJeIyN7mdTUJPYSkFMQRRFJklCv17Hk3EVRhKGo2IYJksAimy/TbVjMHBWZnqXzVRZn16hVW71+HM2KsR0DR1Upl8X9BUlKoiukicjUx9SwTR1FUYhj2YRuObIhO+1h9AXxhkGKrCipCXEEAwPjdAORhYviDoZuEXsaiRXJZ2OBqvLM4wKSky06vO6md5G4HoOy1Lw4tcpafZpsNtej6dZ1hYypocZyTekdNM0giRWQ2WVd10mUhCQNellpTdNI0/RVz8pE1zQUJXxN9apST4mjFgVJOeq6AZWVZUYHBd76msuu4/CBy1BL20W/G9DtQq2TsBytYTriuqx0iKBtkSls9DWK//aVt5JIPThDCUFJSJJCTyPODxOs1CaVvXaOPUI9bvNPj57jsr1ibayuWswteYSq2NdBGKOooMQJhoSHJpGOpiQomCgyyR7HDqnms1oTFQxTzbNyZJqjK0tkvU0dj7QVs3N0gAP7RTbt7Pws683mhqY3WmozO7OCoup0EmETYickScA0Mjim7EsMPIxII6hLrR9bJVJD9FxKIkkL1DjFsiza1Spj/aIq1dJaLNeqZGS/w/TJ0+QGiuiqSbPalWtKQY2BBJYXZgDYt/8ghUyG52bF3yMTBzj1SoVWUmEgIyoWF+3aReKnZLbs4OEHHgLg45/8ff7py//I+rLsE/ptGMytENWqHJ0WsLX5Shs7SfGCDstSs8dsu/j1ddZkBdG0ErTIJOzcQ+qI67TMIjlDByWLI/txxvLD2P06L79c6c25F9TQGg6mLfbjelLFwcKMNuFhhlLHbsf4joTgsoaRDPC3jz2EdVQ0vSdKgp2qxF2vR2VvOn1EoYuayCw1JraqkXomsYTgZssmI2PbmNg2wY4tojpx1/CXeWr5mzTXRSVgrtMWtNe3wnc/K0Ro3UYN3SnQkOiI2VaDttWlFimEqsxiahla3QaKYiCPBjzPI1Uj2lJsGcNkYssk3a5HKkWvDb2P/n6LbWMj7N4ryBWmzlWIuhpfuPPvxe/rKgPlPr4yNcXFl4h+3F/60B2Uy1ncqIYVyOpgpkrOMFDk/s/YGkN9/RTzNm0J5SrmChRLCVEXlEQSEiUKQRAQy8zru3/6Y/zg+MVfkhT3HwBD/lzJydP1A1RZmVOk/UpTpaerpyiicqW+qi8rDkVlfMNuBXFMGIZkM3lMQ+yPMPZJ0g7jow753GYW/MoD1/JcTfTIlSZKFCb3M794Bj0UFzV3bJGOt4yhxKwuis+12j4j2/JEEiKnJDq1ZpvQ07BlxWtwvIylqxTGSgyNiz155lyTif4SYbBpO+3yENdeVe5VrvLFEdzuAsV+h2ZLoCbsfA7HLNN2RZVKCTzskoWm2myZED2P9XqTETtDeWyYUGptnpg+S2W1+ioq7YQo9HBMi9F+gVTRzJS1+TlsiaYp6TF9fWXiOKEjqxOBoZAkEXEasrYuzifD7mBZReoVWU1TEjTFpjp3voeYUHVIFNDQma9Kbc/YRVUT9h+6ojcHg8VxzgJ/85CAqE3kbTKphWLlSX3xOUW36O8rkHUkpX+7RnWlwY6tWxjeKs6d+fOzvOeWN9FcPc4ffVGQjQTJOFfvy9GZF1CylVWbldUAI9CZqYgqg6eYBKGCHtWx5Pk0fepZktTvnZmqlZIzFRzNIivPtcJoG4wsndAnk9/U7YorHSwpOr8e1bB1DS1eFdqO8jmESYtcLkvbFfczPVshUxijo4r79aIEQhPDNrjrLwV5hZGB/lKOddfgXf/hDQC89VKFB559iq2TYg6STkTU7VLsL+FKyFCSdolaBiavFUT/sRvfwkVXCRvxwH1PcfqlGS47tA3ES5yeeomMYxFHom9cPECVOA1JZXuILvvWRycOceSkmOOtW4fQOzO8fORh+qrCjwwbK2iJw+HDF/V+f3rlRY4f9ZiqCIh638QujqbPYtjiOqPOcUYmxzkxVePkPfeID90JyLPynj8VNuTDH/1j5qe/zyP3fBsAKz/GgdveR6tS4xd/Wti3bCnP8aNTFPwmT78gzqeXZ1uUh4ZeMyenzjVYmq+SyL4s3RPX8uaJK0j3i/64PjPPK8dn+M4TAsb6rYeO8+Mfeg/3PXIOiZIl3WibCHT27BJr4cmHpjmwa4BbbhKV5bHRAaZPLhC8isZ/27YdtN1vosYOiiKqfJrxv9H5+3f/9f/HUa0Io3zlDa9nvVqhU+1gSgFNP/RQooAoa1IqSBXunIOmQGlMrLid+/s5wv0YkYOibRpp1UhQtYg4EgapUo+p1VK2SEZBzbLwYg0l1lGkOK5djAk9C9IQRZIybGhfbeDVdV0XTnIU9QIwTdPYaNJqdYQhU9INGJy4nogUL4n43vcexLKE4z08MgFajlLfdiYkfnxgyMHrNjgg8bKryQJrZ44yP3O0d2/dtsvW/BB1r4bhSGPq51hr1hneIoy04q/jNdrooUVki2taXYoxDJtEwgvarS6eGkEb6g1xf8VcQpRCX3+WWBI8mB643agXTHYIWZpaQosdHFmS91MD4jyjoyqW3GxREmC0I8yseAaNQKHsmKRxnigS160PZlDSEA8Vc0Nwtd/hmcdfJqkK58AcKJCz8xRLGTISpqfGEa5t4MZNcllxDTvHdrE8v44ne2g03USzLWxdRdPFNbUbIW4YEyYO/RJR0WmDrmUgEYdnNUwhdIlSj0A6FlqSkMYBZFT0jV4mTQdChoZkb5iewwtVjNBnwyusVJpUVpu4ZqcnSKwabRZ9pWfcw9Ank3XwfRfDkKLMdoYwjMll7B4pC0YHNS0RytJ/krSJQg0NBd3Y1Co7c3oWXdVwGyJg78sN8OGf+jnecvs7AUjtHPVOm6DrkspelGI2QxYbwxlCkaQTBjoJkGz0j0mDvm3EJpX4/zSx6AYRuqGgSRZDT3VRVIVsLHV2gjpPPlPhx27eR7Am4Lwz55t4UdpznvNKhB/rhCmE0hBGaYKRpERJiCv7PpLUJ/X83v7sGl3qUYfc0Yi9ezb1P1Y7DfZN7qS/KEr+Dzx+H82ui9RoptOu47dqKLbSc1bjJEWPIVGC3r5NDZ0gSdDl79mxRpDoaE6hB+/sL5UZzvZTMzNM1cWB1nabZC2jJ9gdZrK0Ww3Gd25Dl05npqXS7Lo4pQznwSeeAAAgAElEQVRnj4vG++uvd/EVjx+7TiSd9l5+mJnFJfqsPPMdYRcndh4gckMWjpzhSZkAUQ68iUtvc5l6/qHeHLx48lH2jO7HqwhbktM91pdXGDIdDl4nbNDP/sRv8OVP/T6fu18Qf3gdjYxjUh4rkQQCAqRqwkHXNQOvJR3KQZt6tUvY2TyIMq5LHFdRJUykrIQQZfHjTTIIvb2Om4ZU1sWB+oFrbucXf+vnuetb3+DRR4Xo5cjYMAE66wtt+svCScn0Wyyfn8PvCMqKMFIhN0xg6rQbIqCMwi6VxisceeYkwde+Jt6nh2yLfK4ricDNyfQxsFP8v2Jv9KLmeMZdpybXvbY1R+xMEntVshtQZb/Gob4dxEULJyf6KdLEZ9/Fh1mXDu6Ljz9G6ClkBvpprwsyinymQb2dsnz0FA/dL64pbwxx8WVX8+F3fgiAerjEPz70dbZtHefyA8JR3DZRYH7hLCoOcSxF5zstkswM1ZrUiltc4v7H10m1gLe+SQSTp84dwbHzGIaF00vsRdiOxat5KT7/hY9jNOv8zG/8CQCf/Zsvs7q6yn/9nV9FU2UvsaZgm0YPApiSyMRUIhpT5YjjGF4lvGugkqQxcSheMxGsieeX6sgWS0qm0IQs7SoxX9nUonrksePUZTAyseVS/ukf7uJ1113H2ZOiD6S6WsEyLJIoIpF22NYMGmfUHswqiELK5SJrnSW6sk9itpZgajZj2y5i6pQ8M/FZret4raXe7z/wpS8zu9oFISlHszZDVhG6hP0FmbSL8mixL2HcYFo2nTBhx4EtVJbFur760PXs2DVKuWDQ7Yj5Wzn2LAt6jftfFFBhz0kZHxxGUxPOLAn2sSwldu3ezbmz4m8vSlhYWUFVdSJplBIvQkkSdN0gSMWZFTQTVLXeO3csS8ewfYa39ZME4txZW11FSxWa7QqqPA9N2yF0PWr1zTkolYXNtOaPAJDZOcT0whzTx+cpZqUBVX1ss4hTEBC52FB589vezJbxCXTJRhykVe598CGWps+jVyUB0tgylfkUVRVwt9is0FwRUDdbJo+ySkTJMSiXx1lYEGvDNgzS1N5McqsxnueTem3CiriXI3OL9GcGuOGKi/DcTWhX4ETIdifM1ERJU1LNIiNZdhueB2pC2HEJZRLWyKvESUIie66NyMJXWmQMh527JBFGq41lhBS6HXKJgLZtPWRySdgmI/s3G7WIkyePk8vliOV68YIaenaImr855wCf+fPP8cUvi8TCe95+M2+74xJmZzeJI7LZrCD+SuNNWK6uEkcamiquM9YsoMO9X/46rhSBPpc8yfrSNLu3D7C0JPqSdUOhWltl/vwmOcPc2SVMw2JC9i6mSkqimj1YsF5w8DsNml2fg5eKoOwYR6m1xXN7z6/9NgArTZ/FzjCXv/Pnxf0Wt9AMUyKzy/ePie+qNRdYXavTrtZpLYn9MNLfR3Vl7jVzst7qYJVsHJnM2ejV/d7yGmfvFsHbiFFGG0t58gnRf3zo0NW89PQcutmlTyaYzj+3AB8BRVc4/ZLwQcqGwo1XHMAyRdD06HeeQ1F9vFfB2L/9L48xMFLEKWg06jJJGPyASOMPjB+Z4OqsFKFcmF2jWWvTCbqsr4pFt2NyErtcoLrYJA3EIVddjWi6Le57WDTgZqU/5UZdxjaLO1z/BpV2BbquMDbbDwOpTtAVD8dttzlw8BbWzjyDqsnmdSMlTUFRNTQJ/Qx8QWu9ITgrMOdKr4ogXtRIkhAF0KRhiaKIOIoJZHVEVwz6+wYpDw0QSOrubnsBTbU5MbXEqRfEIjcthWIhQ6YkNmfZPkx52yF2rD/Ms9wNwMfqBrNzLpQg8UQwNe93OTiWI5EUzqvhML6hUSm4XJMV75mp1ljGpzkmArlt+iDZs4v4N/dTKUv6dNdDyVhoqUlJKleXMhkqGnQkzTS+SqnPYHVdxTdENSuPzrDqkNXG6NqyX6Qa45ctFCk0uLi8TOvUIqqj4hUnAVBVhX2OTinbx7Ilrn2t1WbE7CMnWbim/QDNV+lUV1jtbDAWlqmFAduTPlZkxWuxUyPKhqS6pCBPIW8auKGLLw1EakHUSmkUaxSluqlmWdSVOobsQRi1NNzAZpWEUiiraYpNUwHdjEgTYUzUqE2sljEMWQ2NYxRLodMROGgAXTPpHzLRFYNUChmGsU4mJ5rrAQpZjVy2xOparVdV9MIOmhrjpimexG9r7ZCM3ez18ZEqqIqKokX44SbzWeJ3QC1ywxU3APAHv/f7FAv5nvhwlHTp77NQ+3KwAR+2EhIlJg4UDFmd8JI2muqgxxvU/cLo9CmgZjaw0SHEGn7s48kqZkktsNiM8ENxnUem15k7t04aeTz4gqAhthyTiIBEBsudMCXyurhhF0eyV6ihTZzGRGmAJoOb1FOJUx0kLbFfa1LEIS1FJLnNTODU6TPcfv2lJPKZTs1UUJIURVbf2i2PTsdDVUxi2XBumDYksSSokSQJaSrmmI1qb4qiimx9oyGyms72UZxt4/Tnc9RkBboyu0Di2HRkX6adzRClNrWmjy4dhInRbWT8gLMnz+D6ImipNRvksxmWz4ogbWZphW1bJrFLBRxpA89+/2lcTScKFc7LKn9leZ1rPvghto6LoOnr3MlX/uRTvO+Xf5XL3ioSUZFmMHVkimtuvJqzUkD32NEzHD0yg5WW5JrqkLouK+s1IukJK4pPEkYYhkUsK0dzp56iVCqiRJtZvDBeIUXrMa1qhk2S+KivcsQ7RpNw0eWjHxKU6h/4pfdSHdzLluY4t1wkspojSoBtZqmnYMmejnzOQk9jzpwWwdzO/ixbyhovnTzNzHERyKx5Xeam1mgma+gjklXUVUjSLBOTwvmptjpEXTFnvnRWn/RahIMl+gbEe3J9fXS7Vc4cPc3+KwX1fLGwg04XciOjtBrCOc/qMZWZtV6y6vDYKKePzlPXIoZGhfNR61Rw+k32TFyCL6tLx557nocfu49jx4Vdfvc73sttB97Kvxx7jJdfEX21F51eZ9/uSSrra5iamHNVy1JwhjHlGbN7+w6GR1V2bfXRLxV01G2vTZx4JJFHHEsKbkXFbwWbchtAs7HExPAlvb/PLMzz1tfdDNCzgxYF2kGrVxW3bUHsFEURurnZl6FpGqq22WmQKqDqOki22SBM6cubfOjmS7nn+0IaoaloaBmVxbmpXu8pwNatOlNfkTTatmC1s42AQCIIkrQjbKQeofVE4FMUDJQNwXfbRlE0SqU+PE/MQRAEmJbF6TPHyC4J52pyzza0xODwIRGY3s+XyJczpJVNxsWBco7OcoOLrnkDB/eL9fGXX/hbLtq7q0fl77o++4ZKVKfXuPZNrwfgY5/4BN/+1ktYSoPxrEgIjGQUqmunURMRMAwPHaLaWqezdB5NqmrXjArL8+fYs0dU3Nv1GqEXkqYKurVBlgV2oYRhaAyOiTWUyzlMbBnbfCaqjesmRFGHM6dEBaobBGiqQRAlWFJiJCElURVa9c1+9e89JERh610ZuJ3usj5/nqzfwpOoDSMtYjkRtTXxPK+95XZMxeaZZ18ikPIz08ePsXLuDFocsm2XcNjnFo+zslijlBUJjq3bRnD9LsND472Avdvt0mq2CXzREw7geyHEek/CJYhMbCvDtp1ZXjohiGLazTW82grTI2Uuu3azIuN2DUzZw9rttkkJMTSXlgycVNtB1VIK2Tyzs8L3zJh5FCVCk/1qmqHhdSFrF+krib6ljCUIwHZP7qJRE0HB0vIshhbQaszIazIZHZwkxacp0RZJbJBkAgazg5x5FcNq6/xTnHxaVPiU2hwvPn45B/eZcJm857BL1i7guj6bDJ0pKSnEm0E1wCsvfpdcRvh5+YJNzrZYX13BMoWdv/qaw5w4cZpOd/O5q0aGUFF7foKqRLhBiCnPQl1VaDRcsvkRDl0uqj3HOEqzIdbz5Kiwq/NzNd5yjc3X734OgLONV7AyeTTN5dTzwlarSYeMUSMOa0xMiLUQRU2yBf9VHK2gKyYJEMXivBofGuMk8PLcM5ycF8nGQxffyMj4ddz0biEdoKgFpl98kXIQcMkVoiJ7zfYm3wbeuH+YRFb3Sn0GXitgcVH0Fo+M2yzNVLnzk38JvyZ+f3lhleGxflyvji59SNP598OnH5ng6sgTggqyW63gt1oEaYj0o6is+vQ745RyefDElJecLEMjGa7cJ9hmsk6BO3mAqH4FZ44fkd8acOKliPY6WFL5vBBGJImCFCJnfb1EGNpYto3rC4eokDfRTY0g8jd1PJTNipX4exOitZE9UJQUVTEEnbasYMVhhG5ovcy8YTnousHaSpsN1mxdT1A0DzsXoWpi8SRorLfW0BfFxl/IOpRXiyTlLb05a66bbHNyKO0JXpZNvOOqTp+VYzgQXz43W2fbSI6r6mWmpH7EqBZyzWQJNxEH49yZMwQrPrsKQwQtX86TyZn1CK3Q5mAqMliLS+eZ3VukVBPvObjcpO4qKCULe0g49c58QrqsYzhd9smmyTNRSt94Fm1YbL5bmyUWulka+/KoiyIAGrZqdJ/WWJ1f56IbxbM6ODBOdO8pvGEJxRrI0E663DQzQt0Rr82yws52wE67nz+Va2Mw8bnJLhNIooHjwymH+wySoxHzEh76VBJx9WQeuwH/rEg2OyPlXaUMR1fEe55gmXFN44PqDo4aEiqkJ1hRkwk9T1U2inYy2+gPWviOOBi7agHNT8hqJvVUOFyqruG6LjkrRyBL96ZZwskadCRrXDuO6bSqBEHQW1+GmsEAAi+gZIqgIVQDSHQM2fhrWwaW6RAnLpM7xbN6mXlGR/roz2/nY78rtGGMjEIzCnDkgWqlGZIQVjwwNxqwbY0QB8tMehluW3VIE4NIHmiRn4AFs6ttjA366Qwcm+qwZXI7cUM4JY/c/R2WVuCXfkOovy+u1Nk6VmC5XmX/XuEM1NpdurM+piQWUQkYGBxg6+QOjpwWWdv59YjQqGB5GrGMHUMlJVRjVEnqYaY6Hb/DWD5Pzt3MOBUsnbf+2A2cnxMB0MziPKESoMn93PWa1Lotyn1FArnXwzjAMQU0WCKgUFJNVNCkQ6uoBmmS0jcwgCcrNKqRI9YyKFoGS9K05pQ8hpUjQGS7I9dHNTXcptvLvs64c4xvmSCXydKqCedxefY8ew4dZu7ctLwPi3MnnuHl5SojewR738rsNIdvvI6vPXYvnioC6IWZ42RHIs7Nb0LwCjtGWTn3PHmpd1K2LH7jve/jy4/dT6UmDtmvfO97PPvKFJEt1k/qQKtRoa9QRitIpkUlIU0VfC8ikxVOkpKoNBp1olfRbUehgm2bm+Q/QGJr5FQLydKL1nD47G9/lBs/KIg37ltTefzxZ8nmSgyURYAQF/OUy1nKcQ1XatGkrsXRpfPsGhYH8fved5Bz1Tp2NMHO3TcD8LrhDDfvn+Ds88/QqIh1/fXv/gsf2Hsls98XkBEtqfa0s+ZlhWRRd3nrrbey3hZr+sTpRcpaAT3tR3JHcPHFF/Poo0+wd8cEzaY4RNTUo+0mNBoycZMrUA5HqJ49gRdKaLuSoukaHb/J0IiY46tffwMnTh6lKVnr/uLuz/LO22/nva9/C5//6j8AYJQ1dp/fwqEdO0gkXLnbXccZGcSXG3TNq6G3LVqNDqkqNoiu+VJLUEXTZIY1VfHjzaotQMHMoSql3t8//rbbOPPCMbiRnlxJgoBMbwRlG0lGy7J6jnCSJBiGQeRv7r04jomSAFtW5k1Tx9BMFuYWOTG1QTlu0m2cJ1lbIGuKebn29T/OWn6VoRHBeLlYWcBSDUzd2Kwkqxp+GAldREnKgpoydepltkj2vrbns7AszryNvaYoCh03giTC7Yjz4rGHjxN1VFbnLxbf898hPzCEMrWZPdfNfrbvHOaaN72No89+C4B8PguxTkNWzkZGS6zN+ARBzJ6dQq/uP//qn2EaDVRDOOQAZ068zMunlrGHhF5O0GwSV9bpy5V7c+4pEXGSsFIV1xiqKTsPHmBoYHgTPSNtk+eJBBFAZaXF0vQZGpJMIlF9oriD142wZEUoXyzRqNXp6xtA6dEhJzTSFimb+9ixIzygel6Qj6yGYBoOTqkfXd1ggB2mf3SUK/YJaJaW6ePo2ROoSkRXknOEXo1cNiJv5FBiMVfrKw1sK6IiiQ1UJWZ4YpDZ+Tna7bZcL4I4pd1u9+5ZVVV0VaErK1JR6hP4JsdesJCFY/btP8zMiVNMT81jFzarC7u35yjmZZUxM4KmWhScBFMmRW1rGNOMGB52aLREhr7plXj8yQW8WGpM+p5gO9ZCRkYFtFUhQxKLCqxuiHW9ffswjjPZe89nPvWPzE2vsWXrcA+BomsW7WaHsaERXjMKJYZkS8WLL3+Zh757F4cuPdyjYvddFU3roJshoTzrVNVG01JCeWAFEoqfzyi0myKo9z2LQrEfP07pl+yu+YEChQGb+tRm1XigVERRNFKJnU/9FCOnYJjSBkcGlfkVtl3Uz/BQdvO6QxHkbGhYDg06hPEcXVe0S2iujZk6hJGGKVEbtqWjRlDIl/EDsV7W1xe5eM9VzLxqSvoKLmHLpm6JcyecF8ITxdw2bvxpUV5O+vqZf+Fp7IJ4dvYOm6HLr8ettTg5K22XJKQyB6/pkZRVwyaxHhFKI68NlZn65qPEzWrv99O0wdxsgKEoqBIi7sWvDv9+eFwgtLgwLowL48K4MC6MC+PCuDAujAvjwvg/MH5kKlcjEsN79R3vEBhvI4dpiKxDlKYkYUgzVogjKVTW7tJJE1Ylnam/IqLmI88Po+qyfspT+KvvInRPsjwnca4n8wSsY8nelCgssL46jW3FaBJn67YSlFRBV1Vcie00dQ1VVXvaUIKoIn6NbsgGQUK6iX4BRLVrQybJiyLOTZ8n0tZA2RA7NLHsQSzH7FUjVN0kjFxsSSpgNEPaC8vMDGxCJ2bDLkt6E299lUcqAkayb5vJx58JOSyrI97WHLftKvPtE6vUZW/n5ZkcLx5ZoOKKORu6ycGZ7OfsPUdoSqrSxkDItYP9ZIIij6yIbEzLrmOuq7iSOvgpXeNMbHPZcot0WsAXXppaoXs4ZWTc4rFnBXTnZGqy4/gJ3tEnLuDuE9No1yV06xFbj4gs1fmpJm0l5EGvwdvOiUzqxN+fpTU2TOky2dswPcPBdoZvPXWerf0ia3zN7hJm2OHEqscNRfG+d0xs495nVkX/FPCTToHGos/XTi5zxzWCtvPns/Ocbpu47jATGZGJmBzMMLjeZIskRLns4kl2NSrsiPrZL5XrH7SXeP3YGAPVmCMXi76Ite4i73JHaDtijX1zsUVN8dB1HUtC6LaN5hkb2oLnxnRllrHjVug2VUxdZkwcC9dtU+zL0e1KsgM/IJvNYloaqsy0lDMlWk2PWFKsB0pKu+Vi6A5HXtjEb6+vBezcsoO1FZFJOj09hwdkJWTgwPZxCjmd6TNr7NgnYGRtH1oLq+TLGQzZW7e6WiEIY7KDYj/+4zeX4KfgS987ysS40GVrr63x+GOPYg/0cdgV8Az/5Xt598fvZnhSVDAPrtmcWqkSznUZlNCWF188QVtXyGREpm7X9jHa7QoPfPNpzs+I9TN2xWXYTQc9l8OXosVp2ED3PJJY3EvTj0h1lVfOnafgbDbDvvmm61Bsm+enRRUsDtsUVBVXwicMVfSqderNXlUq8QO6figgv7JU9v+y957hkiVnnecvThyX/npTvqqrutp3q9UttdRqqWUaBEhCAqQBNBoQ5nnwgwAxD8sAO2gHJ7QweAa0rFYIJ4sQQsi32rtq313e3Kq6dX36PDZO7IeIzKzq1gLLMx/0oeJL1c2bN/OcExFvvOb//v9pnuP7IUU6fOZG9yeULtri34498QSL3RZhOeDCOUOcUGmErEZbhFarTZYleaqolWojUc/VtQssW1j0UI33wtkldl1/gOqkmZfpUpUrrr2KzmATbXsgg8oB5iZnuf3qa6hWzVoP9u4k655jbt7sheeAg699I9GZszQsxvmDH/5Tdt1yK4P1M/zd//glu/YOkrklHJt9DIsa2yfm6LZ7dHrmNUfmVq5A0Fyz1To/YG5ujtQ2afcBJVw8vzwiKVGFwtMlmvkY1/8H7/4h7vyut/Kx08beHDvWZ+fkDK7ISa2xTDfXuGL/bl5xxXYSC1vpZZIDzywxt81khN1Ucea5I8x6kFp4wL2fvw8nv4ubb72L579kes9uvOU29k3M8amnTc9O49qDxCesfbKQ7YUrr+HRR5cILCHLrnIJHUWUnCrK6smsx9CNHe69535mZkzlIY5jtHQIK1a3KA+Z3b+T5kafzTVjO3funaFZNAmrNaYmzLUvRz3m9hyEwNL0n23x8Y99jDvvuJP3vPunAPjwZ/6SY08/Tu+1d3LshIHS7Di4nc2kj0V+M3Akp/sr5JkLvtUIzHJUJJDCHZPciBzXc+j2xj3Jx3pwcHFM8xwlAxJL7OJau5RTEATByCZlWYbjOJRKpREMOcvUSJh9OKQDQjukw/4VUjr9Hh976NgYXt87R948hfRjupkpPTzw1cNs37mPqW3mvHBbJfq9DK0lesiKREiuCtJcoy2hjLQw6RGxiCPxLNW4YzPlWZKCG+MIxdaGOXuCMKRwC5576snRtR8+fYyVzrjPZf3CJmtpD/7+yzzxqJE5qQQeK6fXUDPme1dXQq6+cTcHrrySr91vNMhCX7N2LqPZd2gvmwpCPzmL59aJrf5EFK8T1spEmYdft72vmaIXDUZQZRfJ6rlltlbXyez9xv0B3b7ZY56tzElXgdS4rtnrviyhxDSlRkJuK51RPwYcVJohR5pnKY5kZIe7QK0KTeCNbzLIg6AsWFtbYz0SxLZKu75xmlanx/PP2Z5LR+P4CqVyhionQq/g5zmOX3DyjLEle3dfy4037uUf/8Fomfb660xkkyilqVasNlRRUBQF0pEjORGkIE5dhIXDSa9MOaxwYeMce/YZVM83f8sbeGKqyqHHnmW9O17rv/eHv4oeSs2IMr5XpSRcsJWINJGUAp80Stl7YA8AX7v/Pj7y4feybcFU5gZFTLfT4SUv+XZ+8keM5tozTx+hGswy0fDIrdRLnMUEfh3PVlW1+nsq1ZzuoDlCQy0sTnPi9Almtl1auZqbDFhrmzPl2utuQe+NafXGGnOT0x79fkwyKPCt/IQqMrJMI4ZV3Ny8f9vibs6mpgKTFRn9fp9uP+KGxf32Gnbx/PPP4/pj29yN80sEwnOlUJkD9mepE7qJwg+mKFfGZ23WMvvpD95nnkvhZEgEnq1uZ7pKXK4T1CfAQpX77ZgiUbR0RKlRse9zOL++dMkzeeL+I2hf0u+atb/tzluBJ7jmbe9g6RkD7/XTgh233kGnZ/od3U4btMvMfInA2qmBhR5HrePIYRVcVckLjZsYWxGttVg/v4zv5yPJ+CzrUi41UFkfZauvejCek683vmGCqwfvM5pPD/L5f+Wd//JY2VxFx+OG9n27Xs2xk5p+z0zA+QsTaHcCNYRr5C54MdJL8K2hUbmDA2Qqo2SNsyo0eZ6P+gaGLIBCXKT1IT0KrVG6wB0GXNIxi3MI8/JLZIVgsLFKpi3DjxeQD7bo6ALpmwUWBvN4QR2nZIzBpCijpgP8sDy6t+Zt83i755jd5vD+7W8DYGZ+D1sXNnFcw9KwsG2Gw+tHUc8dRfrGEQ68kOjkaa6umO8q723QWy8Y7Osy7Rvjd9X2BoN+i7V1n9AGCHsbNYIdszz/tMFup1s5103mlALJIdsDMXnHfva8fgdLK33Oa0uksLnFS16zi888aTbfU3qJ1902h1R1np42TujxjT5rFxR7v2mR/AbjjH/mmccozc4wt898/4nNEhce2yKdjAhuN47+Uw8+yoaY4fxGmzu2G6jQ//b8Ke5pJdy21xjpzx1aI0lStO7woSdNo+gtcw1+7cnD3ObWuX7GfNbjR8osZwM8y/50p7eHVjfhPYPTXBGZ+fzeq7cTnxjw+LrgqhtMQHm7mKL78Gme3TAG4WXfcSf6zbfzxXseY+mocR4np3Pa3TUGgyaTVWNMp2cnSROBttjfLI2pBhWyxBlpTBQyJdYxnlcCORSrzvD8AmwfmHRCSiWFKgZUSrXR+iiU5NobDtJsGgevneR0dJ++xQyXsoLp+TK6rnngcYP5lgPBfDUhXnKpTQ9JYCJilXHuqHEKW4kx1utpn7P3GVHYuhvzmtt3MV/zCOvfbeb9nT/BTdcsoKzA9K23XMmtheRcawO3b9b+a189TXMQsrZu1t0jjz/A2ZPnuWKqznWzxnlt0aa8f4H77/saqZ2HwJsg1YrCisSWfUmAg5dHTJd2jZ7Bf/jut7KR5Bx7zvQEiM6AraqksIQMlcDnzte+krXz5+h1h06lS5on5HlOYlmjHMclTVMK68AZGJQk7nXxLBZ98/ApeudXKFwHrAObxzFIiGNzWDq+R+B5xIMeWW6c+sAvkSeKIs8J7Z5srm3hZoKpCbOGzywt8eTqKrJcZddu01PhixKf++zn2XHtHCUL8d1956uplgY4ibmmr/KnvPWb7+SJrzzAsXXDwnXDHXfydx//c247cDv/5weMmv3v/vGHOHFY4lfNIbswO8HG5ipZGlGpWJ0iBEL6OI5PHJvnV2QFrY11pqamRs98ZqJGMojp22fsug7BRI2gOWYUfMOv/mf+5B+eYDU3e3ZyStKPWshKmekpc+p9y3U7ufbANAJJwxJKNIRi4Q2v5qTtx/37ex+kXKpz42sOgGVxK3kp89Me68unefB+I/T9vS97Hc/e/cjILsftNiVLLCMtlHbX7D7q10wTb5r9UlYB/WiDI0fOjAg1WpvLFI5iY2ONwDPX1B/EaFHg9+39epAlOY1ylTXr2PSimJk9u9B4HD9jIDhaZ4Rhmd37DcwznWgzdeQoX737C6ycM/fyI2//ET72mT/h0IN3I5bj5M4AACAASURBVG1wfPJ5WD19hsz2DZ89dZiZ/TlJ5uJklhk0b+HKgCxTRjQeA3cVDoiLAqDOluDpJ0/Cd5qf19a2eO2drzH31TNnT5+IuYXGCFpXFAVaa+v8mr8zTKyKtBjDyig0rivJs6G+khFSJ+5TshnIrc55UAX95pjspFFfoN2O0GXLShtYUgGpaPfM3PT7iemH1B6FhS8JG1A4lk1P5QWFKMhVRtcmO77euOGVr+Dxxx9HXtQTuLo+IEvH97J3+yRPP36c040voXrmrLOdx+zZa/pOemnC1z7zMU7f8DpcC8tqDbbop23cfAoPcz+t9dPUJ6bxLINh4VbJVMTC/AID26uZKYUjJKkNrsIwpNfvGH3IYc+HdJmYrpHnyUUsxgW6kGQjAq2IXLWQKkCOmGQLHKXYs38/8zPGpu/cs5O/+uuPQDGGeQlpbI+z+FoA5nfWOP/lj/Dc/V8d92ops0Zqtdros9NuQRwptLX79fk5QNPpb1CumMTp/oPXUCrV2bvPQD+PPH8UpVPiuDe6Z8cZa6cNWygcx0ELhzyzWl+OpNNtUWjFzr3mPD6xtE59ag+6OIInx3O6Y/42Nm2iOHc9FBlpkiKGJFBuSOJ69L2IZ88a+334VIuFHbuo1WywGlzBVbfdTmXhAD/5C6bf/+iRUyydPsO7/9O3cb0lonHSFq1mZ3T2bmw2qdUrdAddsJDKTr9DgSJW4z5pgNnqNIefMeu1vTEgp2CqOiaKyTMPKVMDC8yG9ydxpYsaMuVKy7B8foXMEspkaR8ZKgKt6ayYZN6OyZexY7rB4/c+NPr8c0eOYNqthmQZgkyDss+8UZtAqIwsjekPxkLimW3LuOWlJlkjZQUhNKH1WUsTJU4cXeP0qeeRlpjiwM5tvPzVL2FmMeSzf298iaPPNZnYsY2Lw6uf/YW301zfpN82z+rO7/gBvsInmHRBXWXs9/7JCr2sT2bDmqju4wjNIO2SW3K1idTM4+K8Q9nqDSbxOl3VQ/XMfqyFEikU+qK14yhBFHWM/qV9LWTcb/r1xjdMcCXrJkM7OTmN60gKOY4KhRAMspwgU1hdWpRSJFqhM9vki0DjEjttcjnuN5DBJq7rM+iZ3dkdnCUs1ShsZadUbhhjnAlyu8izXBA6IWFYJoqM0S9X6sRxPMq4ub6HkEYMd/ia40gcQKXjpuGh0KJr8ZxT89sISnOEu/s0t4x53twaUKBZ3NYgtzTAUbzJjh0TFMo4/pks8AqfrD9+LsuLe5lNZ8g6cxzq2KDv2Ba+P8vqGRO9Hzv2EGePPkXU7hIumF407Xk4RTaigtWxwhcB1bk62A2kH/XIKJifXSAW1pE6HeEf7tGwh4LwGkSORKkJfJvpyUuKI/e1cMUM173CGLuSdNkqSszdYa79O8rQjmLm/JCZXcbBuf71MQkRKtGUtMlYXvWTd7C1vEGwZhbxVdfXOHBjF6k0c5bJqHZ9h+3tJjeVCmTfLue0y3eEir1TBnu7dKbLIFmm4QcsLlhSj7Uu77z9IJ4W1BrmtcHqKqz1eelB8/3rvR4PHj7N5MKrmZgw13kybvFMM8bbnvLIkqnQrC21EIC2CvHbF0q0v/JJ7rjyam4pm4C22VmlHe9hcuaVnD5j1sb5rWPUaiVyG1y5aQmtNZ4XjBjogsAjz3OiKEFah0SGAaWSRNjNIJ0yg0FMozFLfzCmIX/Vza/hh/7j9/P4oyZwOruxTml7DddmtToFVCpTRL0tpm1TtvAzZNBgfkLRsY7M9NwMqpjisdOnAeh1jHP4momccLfJFu6+ei9OGJKIOpurZg0tLPog5Ei0VBQ5ObAwPYs3YZzxXEoO5gW/89u/b9bYQPGBX3sfjekJcsuycebo43z/9/0XXnXna7jx5aYqXa2VaS+dY3vDBOKv/aY3MHCgrCXDlOn7eA9h1SdqxczXzD7SV+8m02UKafaZylP2XH0VJccls0kSF0GmC1SWIKyDoJShvs9zy9RVFCRpSrfXNk37wM/+9I/Sbbc4t3GBraZ1kgYx0WBA1DF7qJUMyOKMLO4T9SzbIyCRKJWOKPEv9DpcOLc8qqI4cczTn/sc0gnx3/AGAI4++yTveu9PIoQk6Zn3DZpdyo0rKJyxnRhM1jnf2aK7Yubziut30H2+zXH9LKcic/D90I/9Rz76oQ9x4oQJWk4ePU+SdgnLVRj6Hp6LygvifotyxcpPoHFlgCrGB02RB8zMznHbK/cAUKmUEUXMf/vvP8cVXAvAe/+Pj7Nw4GXU5ZAeuszi7E5mt5eYnrKC5NMCcHEVo5Oq3++gNShrl/Mk5sjJDiut7qiXMO77lOImzRNHuelK4+zcfsuN/PKvvB9ZtxTgqz3kjCVHsoLyn3v4Gf7vX/8Lii3jePzzhz7K9MwMQc2l1TfroN/KKIV12qubo2qa42oct0TZZl7jfIskiqgvTqMumETU6kZMjKIxUx+xoTWbCQuTNRZnTLCsDrromw/S+Mr9PPWsYW38i79e4Tu++x3ce++XiTxzXmzE5wj8WSLrQK12VuisxlQmXIRl2KyEdYSTmr3gGttVkiEFGmeYsgU+8ZlPct3uO0Y/f/yvH+LYKz34ES4ZGoUz8u/MmWaqVbbp/SJh+NHfaEGe5/g2IZFpiRYar3WEpm3gzLMeu/bs5hhNXv16I6vyUHIP5Yog6VgpBjRCShxXkNvgzQt8/MChSH0KW5HR1mHTFwV8Qgi6q+PAambXfvIioXVu3E/1yld/K3v3X83H/vL/Gr1Wb1RoXRiv6X1XXsvTPMJm+yQ/899/A4AvfvbTPPXwIU5nz1zyrJaOHKIyZMJyNI6v8XSXVnPcw5E4AbG18TWvQX/QZq5eYdWKovc2ImQQjvyIfhIjHI0XeqS2x7NQCifJULkYVUOgoNAJvu/aOZB4so52shEBkoozPM/j/OoKBw6aCka728H3fZqdMfKh2TLP6NnHP2nvK2D56MMUg3Wcig3iY40MHTa6xt/YtmMvvnYpZdGoKhb1c8hB6QzfMldq6ZHmkpkZU/k4Kk6QRDFSjoW3pZSGJMUxDv5wONLDFcZfDGsea6sX6KbFiGBgfscUG+ebzE77vOQ6E7wdAs6d3WBjzbjs2gtIVUq1BO2uWTeNusupM2f4m7+5B2V7rHbtrTO/czfNjgmMs2bKiY2jHHvyKOtN42fWJ30KlZJGKzRXLEIqUnhBxqoluFjcvotut0u3F+PZHshuN8V36zjZRdpBwKc+80leOLYu+v/R506+6PcvHja4Wr6UibCPOQOe4isAvIevwE9f+pc/+FPvQEqBNae4eYmg5tFtGfvzx7/3V6B9Ot0+Z5fHeytJTUD68KPH/w3XZ8ZhVjmM8aP41fHrT/HPl7zvv/3X91/y8wf5SwC+6aU7+PeMqNel2TZ7LRlEFCIlGiIBVMLK+grCGRcyUC7lwCUhJ7fCzeMw6+uPyz1Xl8flcXlcHpfH5XF5XB6Xx+VxeVwe/wvGN0zlKrCUzv1BC5VrCuSIWcbRKZmT0ss0jk2faa1xtRzhybUjUGpAL+jRCGbsp66QpxopNI5jsgOVQFMOY3xhsnnCj0iUouyViGxJtVQq4zshuXIIxbhELaVE2QyRg9EZ0JpRj1WWJUjPxZPuKOPkSpMFH16nlB4qF3gTszQ8iymvDiiEZnOjBbaHxPVg+ewGltWauhdSmZ5m/ew4K/Dlv3uCXGboLCa0FK5KxNRKPluZyVhMlaq4eHRFgm7G9hnMEoQ+XQt/K2cuvXJBtBoT2OxWnrfQTkBzdQU8k6WK+xu4JQ8vMtc4UAOEFzJVzWnHJk7v9CN2T88SOZIt22+g3JygGEMXRKGJ8pgpr0EnMM9zfnKOQZQQbXVxrFZKpRyglMKiX/AcgQghT30alr2rEvj0ijJ+1aFs14Y3MU27eYHHpMm0TNQnEUxybKNJduG0mWPpURWTROWcwarJZm2f2EVlZ8KDHfucagvsuG4n9YkalQlzf+e8gnK7YGahgRyYydn1+pjYnaBuq36DKcnE8iZX77+O6stNpnxmZobF+e187SsPc/6syZC+7VteRbuzSVgy2bxceOgiI0kiHFuV8p0AB8nc/AxbVhNove8yPRuS2v64XMUEYdmIYFqV3X/kQV73ulcDuWG1AtqbPTYGGSqyIpgy4MkzK9SKEguTtvrqpZxZclhcFIC5v2hjwD/+w9coTpvn9OPv/WZ+B7j+dXcRpWa9rLdTkq2IUPQoLJ04+TyH4z41bfa25waoQlFNHHqW3j9JoaodvvdnfhiAD/3V3/LgM+fwqgW79lvBwPJOghC+/0d+mEVbKQtFQU3KkZBrAUxamblhbxqAJ30SlbL9pabipTeWkSsJLasR5rvSMJPGySirKnNwHI1kLBoupcTBGfeKSGMP4jgaVYB9t8B3HLQn8TwLxYhiHASOrXhtDPrIfICXa1oWxhZlKSpJKRxB3/aLiDhDKk3T/rxt1yQve9nLSLMtNiyt9K1770KfbxMph8lwCP1ocvreFfIhY+Ib4RM//34GrTU8C0c51G9x1fQCCzMBLZutO3roOD/x4++hFpq9PTU5z7kLp3nwgUd485u/HYDHDj3EqaMnGXQznn/W9KdsZm3a7R7L50+Mnnk/6iF8h9nthrL6meef4i1vuotaMjN6z1S9xqB7itOPGTjK2vJ5egPTX1i3mmTXXHkFr/2W2zmwOMmuBZPhbroh/UGfIxfM/JUnJrhmp6S/mrG8Znpa8qRDuXIF937pcd7zA+8y93zvQ6ytn+eKBVPJ6tQ94opZu4NNW9nxuvzij/0BB/dZ+uutjM2142x1B5w9a/Zap5+TSMOIla9bGGmtRL+/xW4rLjmzuIP11cMsbcZgq/CealJSmposjdAXc3MzNDdy+l2Tlw58F1GX7Lnr1aN19uhzR/nkZ+9m355d9Czl8GtuuJ7nDz+Ba/vc1EZCu9mCuUlyKxQ7KPkkaZ9KJSSKbBU1BSkDen3bh/IzcPPuA1zojWmgl9ce4NE/+OcXVa583ycdimVfBJ8b7g9dGLihYLz3cl2ghR5ROKtcUQkDQgHKMknmXonvedc7eeQHbubauwzL35ml42ycP89sw9jOxFFIxwec0X53HJdCFaDdkU6gGhLfiTF0f1hJW7AMgo4Pmyvj6j7A7//GL/G9P/5j1CYM8qBLi9OnT9PtjatbaWFs2Pf9wh+hrBBtqVIFNOVZc52q3yYZ5DieS1+beXAdQT7wiOIm4ZT5/HirhVSaYghRlRnX3PoSWq2EGUtHvW/3Pu6//wGCkrHdEk2eZmRphmNz4tL1QEMYOCPZDN/3KbSgUrFMlq5LEHp0k4Rts6bas768xpHjJ9h18Eq07fs8f+oU0zOzbK5Z2nU2Cb2QDrD5tGnVOD+Q5E7OxNQucteshWolIAgCtm/bA0CtvoAWBUkRE1lsolh6nIo3ycpaRiYsjHCyTiWoWCFccEjQKeicEbW+VgVKaHDFSIDZ931y3RutsqXTA3y/RNxr0rf2tN2apNXa4srrruKO1xlI4wf5Qx57/llUbsWPCckSh2ZrjViZuXV1Qqaa4LUoV60O03rE8plVAlup70drqDzFcaExZV4rT+Tksc+ZUyvkXeM7tNoS6aX0BxYJVISoPCHwS/T7xpZMTDTodgd4qswPv+v7ADhxYRnhuzzzqKnmDDoJsYrxKx5795kqTX/QRquMJIlHWmJ5rigKEMKcRanK2Vq5tCL2bx2//1t/8i+/wRRueZLP8OTX+fVNL7kNAAeF44QId6gDJ3CEYmJmesRSGfVbZp8WwagnsFxzUcLBt7p+//CPX+C73vbN5HKWG29+BQDrmy4v+aY7mJwoc0Vg7lOG23n0SI/ZspmDtCGQsoYcZMwumOciVZNDD5+hPjnDrqvNa/1MkfYzepH9/kbLVHqLcWWqUA7KSRCOQhTWgHtjyZevN75hgqukZ3owhHRwZIGf5aOHXUgfXwe4QUExbDp3CjxVENtGR4lHRQb0dEoSt0af6zhQJAptncDO5gWSjh7puURkKKHwpgSeLeUneUZeaIrCRbpjnLn0XBzLD18UOUIIpHQY4mbiPEcWGt/3yVIzwUppKBgdAEmU0I+3CMuNEQWuG/iUQgWFw6kTBkYyUauyvtnhztvNYtoxOU8r6bGlxxjXvdtCJsNryUvNEda90ZgkXh0wiYFLlSqwfGqZuQmHHTcbg7ttooTQIa1V8zed3EMNBmSpJk7NgqnWp4jjLVAlhNWrcP0cXRJI29w9ISYYxJsUeoEgMO9ZFCGRKPC1pmHFxxynRt0VBLb5ca29REVO4biCOft8m5vLOLpMfbpG1fZ4tNsRWZxQrRmnMEoVZd2gGvToJMZBKMIqRQSbWz06gZl3dcoHrZBWV2tFnUTkbRDTWDQDa5nDoNticWEHExb9cfj0WQgcGtIqy+uc5moH7WuK0KyfchogsoQBmtRCL/zUNH2WLPSzUH26GWTFx0cU/rPTVbbNHWRjucntd1kHPYypMIe2mkE6V/h+Hd+fILXrWosCRcHJlVVczzwXz2vR3NpAaHPIC2qkvYxSySNLx1CWTJQYKId9+42Te6dOiQuo2oO/7Ho8dmaTq/dVSId6TqnD/p0JnjsNwhyg6+e2+LZbp7n9l98MwFLbzOvXjsZIC2N18xqOF+NVPFYOGfKItLvOseYWYsvqOd2wB0FIP+1hNf3QDog0oFo3z/KlN7+SB+/+Eo35G+hhHKKFuQn+6G8/T7O5iWvPC4WkcHJCSw9b8j1UoihLaUWdARc+9bl7UF6Z+/7m0wAsr2xx1Zuvo2T3/1p7i1QqfO2QWjhhBZdEK3zpoK2JzPMC3/VG81lYIVWhCyKrb+TWp3CyDFyBN5RucCRCCHILARJhCNKhJBycsk2uSKhIl0wX1GyAFxQCx4Epm+zQgy71oIHAZW9onasUEtXD0Q7COi1ZJtk7HdLMx9Dopw59lReOEy96Bf7s67zG98Dv84fm/3d9vTe8eGytHWcL+DMOjV57gM/yC7xn9PPv/dL3/Kuf8xUYfvOlowrM/xsu5K3w1/wP8//XYgVhz7/obbsc24cR+myceJh7rF5OUA1prZ+g0VCcuWBsS7fngScIc8HRo6bHwGvU0MQsnTVByp59e5iZ2kaz/RCBMoFNZ63H7PRe3vLWt9NrmyRJkRckWcLxJUNU4fp1Kr5mvbkM20wSYXqpy5kjh2mvrbJ7r4FwLR9e5Rd/9sf59Q/8PwB80523I8UK3Z5D2zqhk9VpdCGJBimUzHp5/wf+4kX3/s//9JlLfnZ6Azxv80XvcxyH9ZXeJa9NzdRGScMszSmKAvdiLTPblD+46G+cRp9eZR95ZvqWrrn2On71vT8PwD/Z9/zwyk/xZ3/0R/RPXQq34ydvG33fysnDvHBs22/sRcnqKw76BVvLJhnp2X6sPM+pVCr0uJRGOVWSxW2mx7NL60Wf/08f/Z8APPzAZ1iYMufY8tJhJAKs5qLySkCXhl9CWTIQlQ2olhR6coZ2c0yQIRUMObkd7XHo7q/xitvfwgN3f/qS761sM3aqWq1y9uhhbrnj1fjWqfO8EmE1pN3qjgLfPM+p1+s4lngrTVMGUUy9XqVpeyBzDTiCoFYhtwGeV6ng+iG9ztHRd69tmXXQtP2i89PbmJmvkrdTYtsXFasubiAZDAyUdmP1OeJBRqEEWOHmVqeP5ywj3ADf9v9cuLBOVDtHt2+eSTmYYse2MnmeU1ioablcZmJigkajPuq58nxJya+SWOKIR594lMPPnsfJp+k3zRzv2lfj7NL9vPLmm3jiEQs5ezs899yzODZ5HfVbFEWOpoxvA0XhuSRBitxeZ3nLipR32/jTDnlmRdPDAaHwQGoKa+PTIieOfXItmZg1voNb6lKvbufkKfN358+fotXu0O5sUK+bOS2HZdbUGRav3MWuK8za2530CKTgW95ooN/NZkQ3XaHuz/OFL33KPs9VQrdE4UIx7NcSyvZBmnnxnRLbd9bZv+cApdAkKQuRk+YpjqtHvZLSLVCpJE1sH5/vorVCumU8z/oSbo7rBaNAbnF2gaDkkzoeWWECWk830D6Efkr4HSYIlEFhCguWtyCQMRV3nsIdtZ0hkbi5RjgFxahXUuA7eiTi+w98gRvf9D0c3LebL3/G7I9Wuocjx45QCq/D32f8oL/6mz/jxHOHWbFFg15/nbozQbOT8+u/8l8BqO0Muer6XfzZB/6Bh4+ZOS7PxpQzj8Ceq+WSYn1jgO9aXQRABgkqCRAKfJs4jfJxn+jXG98wwdUoA6ahyBxSfIQ7ZOEzzkzhjC/XVRKNxMJzUUoRa/BxyS7CQnqBT1Zk6NSspgAIdZnEmny/EBSFQjpVcmEJJkQFjUue5tj9Q0FuWP8sk5WU0vbF6JFuQV4UlAIX4WiiIcuP6+KGksQKVm6tnsDzAo6veATTZtE1JqYpuVcQVgpmajZb12kSeD6PPGaM3SPuY/S6MZ47xoEqXWNlcIq04zBpD5TWoMNGc4ueFVsUfofGfMFLX3YFV19jMrJR3zChhcFue92KPE9xPTnCbhcqo159GSsra4jCahKUAqKoP9KhEEISDRooXZDnZiIcx6XINEEQmoMHCHyfjfUt2rZXZy6okSZmI1eqlr0lk+RFahioNi12ertDtTpLp28zExLcwCVNaujCGKhSWJDnBY6YZNAzzyZLC4KgRGvTan0UgmQwjVKKatU06bp+wKYv8ANJz1oaEXiEQY14iHPvDlAyZtvsPMIG+qFfQuIQoUYq4Z1Ok1AWhDYA63Vi/Cxix9wc8TDIjiOOPneKSs3B0UYDpbUVM4jWqVZN0OtqgYozcl1QtQ3CBYIozXG9At9W+aYmt9HuNNHDTE9ZkiYms+xXx03YazE8dDLBt1WiK6/czZFDS3z8i0YsrzI9TW2q4KGPPsYrX3UDAC991U10o5TM8dlqWVaqvTuZu/qlfPIhY0gvtM7B22Bt6QSRZatSuoCiwEcRPnkPAOdmX05VzpFOmmdw/NAzzEyW2TlVpt4wgWJaKAZOhLCVT5m43HzDjTT7HcS6ca6WTwWcqFTxS3D0lCGmqVUWqTfKNFvGQDq6gV922UravPIKg7NnJ3zwE3fz/bdcTfmhD5t19swp7nlwF9W7TJ/JFS9/ORsbmwRZgWvtSw8XdAYiQ1rctdbK7n9LTOE4ONqwkMWxzZS113BdF51rCivAmCQJfhhQssLGSincwhk1a2NneagJNdLVEthsPaPnq5SiXK6O+lyGOjCOCMa9L65LnFygVDLf9xt//resra/Qbm6RDEzAtW//AQQeWjiUwqEOjEMURaO/S23f6FC/CCDL8xGRwZC9K6j6TC8ssDBlkjnf/dpbuf/Jo2ycPcHv/8EHALg5nuEm7TJ188188+/8DAD3/9oH0Ylgw9rFvB0jCPHDFD1jIqddb7mJVrfF8sNHcG2fiVSQ5indgan2dFOTUKr7AtW32jdb6xROn6sWb0HsMWQDv/1bP4dcXWKiauyGXGzgDhSlQcGgYbUFdUhcVuiqFUgtQW9iEVGqULb7MXJBeh4qLRMNn0G9Si4Uyp4Nnc46FXeKPTftIolNJeLUmWU2W2d46L77mdtj7K7bqBBOz3DHHvOeZivjVHOLHQt72HuLmftveuub+cT7f5dWnNLYa57L8uPPc/vL/hP7dxnH8e3vfi+62SZJ+0TW2YoHKb4jyPNsVCV656Ef5KabX8XF4x3f+Tb+7uPjHo8zZ09zYenFoffZ0xts32kqiq7rcubUClsbXSYsGZAjBQoHLtI7q0+VQeYjsc7eZkyvrZi8IuZs01znK1/5rVz/86/lT37rF0d/d2GjQ6U2R7B9XDVeXzqNE5ZZOWnOw9nFvaxfGIv8AiSWUCSygt2OGJMAtDaMEx+GIXE04IWjECFHnxnn4bdt30+zt0XUHne7OI7PYf0Fnh/6F9JBeh657XPL20288hSxm6Dz8XNIFWT9cWAFkOoU1+7vPDLX+8B9n+aGW0wytd/vcuHCKs2WsXfDGsS582tIS2zkSEmRKpIkGSV4wKBqcsuAGXou9VqFlc1lUluxLHkVJspTPPXg03Q3zGs3XbWfc+3n0O5YFF4WOQoIbTJ1x7ZpojTDq5eR0txfRVVwEKNrqleq5DpHOOOqoedMsLy8wrmz53EjW6lKOiSizMASDLzmzW/CcRN83yf0hiLJNrGNw7CDRemC1JX4ZbOP3/mul/Lnf/KnHDtyhmceN31EZ089zRUHr2TXtQfZ2hrP3+KeK0aMl3OlHWjHIBNc258qKEAIVh/7Iq71D91SmVRHJLYsWjguCrPMc+v8h9IF2SGXgtndRrQ4X2/SmGnQesIkmJwcZubmGUQZSc867VMQBJNUGtNEqUUeqCpxrkakJZXpOmU9z8ziBLUHrE9QFBQ6RxSCYigGq0aiqQDoIqMXa+76rncxaUXRzZoo0Dg4w+qgfTbjs8gxjNdWyw4gVVZEvhhXhIuiAK0v6vWDQkvAGaG2lFJ4F32XSkOagz6FHle3h0MgR99XFIXRz7uocrR17DzPb5V46w8YFtXW0hLHH3uYs6ce4Dc/aHS0LqyfJ6ePY/2iYKDpZgWdnuZLXzOizAff8Eai1XN88dN/zEpsUWmYc03bKmqlXmNqoY4bjOMNUQQIR6G1GAX//rgJ9euOb5jgauhcOI4cQfCGAZd9B8VFD9vRxugOD37HcRDaLBJpI8slNtBamQyz/aisUEiVoywTSmFZAJMspVw3RiRPBRrTRDlmB9RE0RgCJIRZfI4jiSKz+FzXtVTFesTQpAsNzvj+tDbZpVQlpFvGAEatAVLXCGfKNBZMxnK5F+E7BdpWD7LUo1bzkK5iqKW9e7/LFddOcn65hepY+uI8Ytv+EsIqkUt/nsZUle07t7G1YZxjVfhIKWl2jEGuVqtIGZps1IjtKaDVTxB+eXzPToGQih3bLQxCFyilibMeoRW9YN4p3gAAIABJREFUk45H6AfmWVkq7zxJmb9ygroVI82ylDSNSXND3gBQaIEjC7zAIx5YWF6lghDQts2kfugZB7NUH62FQbdrmmAvYlGUQpAkySjo1VqTxsnIaABkSUKjto9uu0PZ0pn2ej36W6akDqDyMkVRoDQ4YiiEGdJut8l6GuEMmeMmQbtkluY1iWdJBw61eokkHZOkCLmTpbMnufdBA4WaWSzxlre9cUR5utnvIaRDvV7nzJrJ7JbLJXSg8eo+fSuK3B70cYIMzzWOTeRpRJghNaT5ONvS75/gzDOQZ6aC97gOON9cRjVMMFcgSTpVtJvyFx/+BACrnRpnVzqUfY84NSutVK2QJx36m+aalDXJa8eeoWzFnLMiwdEO3VShM7POFlRKd9CjF5n5W9wxy53fcjudCwOesxCnWjDJxERGtWwOjpyYufkqHhrPCjDHHePkb/ZX6OfmXo4eXeGx58+hR4G/DyjSnuTuZevIfC/cdHAP93zkD5GOabzd/4oSIl7i2b82WcD7n3qKW9/xdrqdHoVdi4mIKQqFK7IRxNAwhTqjDKrWGm0b+IdrKu+kyEASlgLcwLx2/Mwx5hcXEFhZiTghlZeaXWNHnFEDPkAhADUWLc+yzAQ2UW9k886fXKe5sck1N15/UaVTkCvFoGcMXrvpEicRoCnXzDpf31hB4+B5Pi1nXKXI8xy3a76vFPomsHIDinQMxRZC4jiCkpWICPKY3tJpDj1rAnZeC//4+XuYv/oA//uffgSAwd9+jHO//leod7xx9F3r8z6N0x2q8yYo62XLqGSTOCmz8YQR+o22e5w59BylExsEDRvkpjn9QhPY7uOSKuikh2mXysSZZZJ0NFOyzJFSi5Of/XMAJpIzTPvQs1Cl7vqAStkndXO8yASUQaBw4oR+3wZ82qHquCi1gbS2bNKXpEWO9Dy2TZhrV8ublOsVcltldL0AZ2uAkiUKe3689MAt+JUSuSMQF2wFaD0mSi7Qqpt5KbsuB11B1Iw4dMYkFiq+Ytap8NzJI3SWDEzt+muu5Zd/5ue40RL2PPypr+CWfWpTE8zMmCx1bX6a5to601PTWOovbrjpZbxw9F7gArz1Hf+BQuT8Ib91yesHrtpBYSHqQwd1uCZgCJt3GKXEMWdknmU4llCjNlOiuxEhhGB60WabP/qnHLz2lku+a2pqhiB0RmLH/TUTfPzZ+3+HwAq1d5wOLxxR27wvtkykSo1t4RBJQgG5Vep963f/KACf+ps/ZvfePZd81mZnA/GCfvWiSClV6qP9V6pWWD23hLKEzbNzs6aqyrgyYPwEyQvlRrUojBMDFPbM3XngGs72zRzP1qq4OiO0sL2ZmRnOnT3HVLlAWJ9Aa01fpVSnQiYnDNy1Wq9dwihYb1SpVCp0+hmrSwbWfeiRR5Gqz0R9igNXGH9jq7tFM+qwb5cJ/I/zHMrCj4dwu4d54EXP/P/vGGDm5quXcMHBUe7993+oJWSIWBn9u8Iz3McnLnnbieNLI38jy7bwrc+CJeiSjsYLXLRTQdlzpdfrkEU9hsHdOKk+DkgKLXBdn14/YsvirAf9lA3dYmPD2FeVZfQHfcKST9kyQrc7TSYnp3Ecd+RDqgKkFKMkW7fXJ45TClngaJvA1sLuuwJlq4OuFAgEyi4835VoBL1Oj9SK3cYDs/eEM0x7W+ZPuESkefT6aBG/OIiQ4uLkoD0b3NTumTGVw5Dd0XyO8al96Y3sxuhvLzr7tB4WVca2SZY9Dj/7BKtbxnbuvuGl3PBtb+FNCwtklrk2ThKypEfcNkgEHaVsrl2g3UvodmyC59RhHl55kru+8+VMTptkVd7NiKI+UWKeZaQynnzuNHF8URLGERRZARSXsKb+S+MyocXlcXlcHpfH5XF5XB6Xx+VxeVwel8f/gvENU7mq1002fRjQaq1HDWVKWVE6V44iXqUKtC5G0CgAbWnR08647OhK8zdKjT8LyShrRAGe59n+KROR9pMEzzWReZJYoV9lGnmH0aqpggiEcEZVmzwrMPGqGvVYxXFMIB3KZZOtENpBOxInSUgTWyXyFKeOP8seuY+5PdcDsLbVRjSXcfxhliowmWQ9FntzVInpyRqlYI5qaairJYmz/piyXivSNKPd2cK1GZpaLTCZePv9RdZF4qHyxIgsgoHnpSlpmo2qfHmRoXWBshSW7V6XLMsIVIgj1OjvdBERhj6tgclSNRoNVJ6y0jIles9z0CjiOCZNTJbBCypWnBL6halY5PkFwjC8aM4zXNfF82LyISGB7VFJ03GTcrkcEieDURZGSkHhlkwWxWZxg1KFzSQmqFSYmLe9L50u4awyzSxAoVNypfG9Gto2MYYlnzSrUvICoiix9yMJgmA0n1JK8jRFF2JUvet0OoRhSJouUtg+JV1I2oMuKjP3UCnXSNOUtJ8wXTVZcUeaNStSTWihF+5ElSL3wK4FpSKE0BQ6Q2TjLFOoJiiVXZQt689ISRr16GhboUlz8qRDPZhAatP3kSVnkE6XohB4NoM02MiJu20ym8r1R9XcFhsbJttjKoWCmfkZPFuFOji1iy8cP499BHgln6QZ8Ys//XM89YyBHtVmq3i+g8Bk3KWr0EnM7MQ8dVvVaCxcxcxkFb+SsrhgsvU7dm1Hhg6nT5pMbyXURImmvM1j+dS418fvHOfkqQeZmDPXHnYiqtscpubNRbWOnuDYvQ+y/eUvIVq34BvXI/WBNENYjTetNYUaZ/aUUkjhoG3VCSBOBvhumebmBso2ldXrVQa9Lu2myf7OTM+h3NzYIzW2XYXikmyeWUceyRC64ECepoSlMqvnTSayXA6ZWGywee4IfslUAlzfw5UehaW/T6JilKUUFp7V73VwXY9MjiEcw6p7ZCEPaalmRNFzjS3kEgQGfhj6Ab6dVFWq4niK0uT4KEnyTZae2OTk0wa2dvvrXkPnkSc58Uu/CT9u3vPsRz5P2m6yZStutU5CeftuSjMu84umpyW67xjTRcbktip9a9OTqYAwjanVDEStvdYmvGKeW97307RXTHXywtFnePa3/4ld0S7WHrzPPMtBxoV2xC5l5mFyepbz1T24IeS2R7ezBkEpQFoB5mBmAs+RRn/O0qevnT7NwoH9xFlBNNQuizJWV9exyW7ibp+sn+AkKcpSIud5TJxHBGEJbeEkleoE9cYUmdWTCmJFMSXIKlVunDKViGP3fpHFzgbfs+8GZm8zZ4PvBqSbA4qOsSNPffkRvFziVitkU2YdzDVCenkGoaY6bCq9Cf7kj3+TH/nR/zKaq89+/KOXrLlPf+ILDArFCwpXZi1YGoEL58dQq/amydTXJktIKWltRhf9bvj/8WsAS8efB2nO+29765285OUv48t8ZPT7ztYmW+fP8cLxjne/my+U/xGAydkazlUVjh8eV0B2757meZr0u60X/a1wrMRJElFtlOlhCEWG43d+/scvfT8ZKnsx1XKuC6629OW1Wo1VlrjtVXcCoA+M3z8ibxKC++7+Eq95/Tdz95fG9NLzszOsrAzRCVUSwHWgUTW2c7PVI0oF0jU/77/2pZzjHPW5faNKgnYEDXKKYqhIBO3YIcs1ma2GrfQ75PkWU5UGbtnYWCco0Y8GdKOYf/q06WERvsfr7nodfmjWyv7vvgnfk3z6wx/m27/3nQAM+rY3WI9puoUGodXIdhXCwBU9zxs/A0eTpil5ZuD6YBBKWhcjO6IFaOfSCsYlFX3rTwkhyJU3omZ3dEGeZZTCCsKWOgUO7V6XsOQx0TD3vHL+wrhSBXhemWRYgbJyG2SKLM0JpEPJollEWRg9RWuqsyJHKQfXdZHS+HR+ycMRLoNBypkls26zFNoiZWrKrLGzp06gCkWaJpQsskqrgkGcsrm5SWIhacqSn40lDRzyPEeR07PaiVprS66WjZBV0hHESYayZ4rr+Ajt0u12qQpznSo1SItCCBxnSE7jIJzx3s6tn+y6Ll4wJMu4FFUxctJ5QfUJgWaMDhpq3+mLYICO45Cl47NOOOZe9EVrSGsDO7x4+51YOUtKQrkwc/zo3ad59sEKE7PbUPZeVOEYeQJ7D0IVHLx6EaZcdMVcQ3nQZs/CDVQbr6JrAS4D4VKmP66cqTZPPPtiUo8hCm44M/9y3eobKLga9jINg6qLIVxDnH/guqMmUNxx+XL4r9baONDF2MH03IA8K0bwoVK5SuCWKLQV8PQc8lyR58UINwwZaZqi9Rjy43m+EUm0MC8TkDkMBoMRZAxMMDfUxIGh06JGpeRce0T9mJpXQlQtUUPUIxAeK8dPUJ0xuki79u3l9ENLCGF1IUSGEC5BMBb589wyvpxjs7+BnxujlWQpnjeBHoqdonApCDx3BJPobKUIIfADcxCncYKjHIrcwxfG2AVUUO0uE8Ek2H4qjYMflkb3UnZnmJibpJ/2yeymFkIQeD79fp/JKRO0ZEmEEAGO3cC+rAACJ4gJXIsNL3lkysxByQrT+V6I1mJk6OJ4QJZl+NLHGSqRS7M5nbI7mpsoTdjW2D7aLFmWIIQ02kT22n0R4nk+3W6Xk1uWoU24eOUcrcwzztMKOlcUnkNuIXJRkRAGVTZ1j8KyDDnSJcuSkVheoQYkWQvf90lsqblcqtFaz9HaHZXbPSkoigRt12J33QSng3iAb5s7s8zg6SdqE7QtHCcrNFIKotT2ihQplXA7UZTR61uY1w9AtxMTViI+8jfmUJ+Y3E6a5mxuftF8Th4R1l0Gm33mFwzU8+mnn6SfCgLh4tqDrz5ZJXFaaNsMPOwja3U32dq0zEe1KfYd2E8/7/PskwbisfHcEa78iZ9i9ZQJpKLZjHvu+RqIDV7/xj3m2TlV5hbnqITGiWiuLbG+ukZGH6wmx9qRp1lPQmTN45C0z9gt87o3fCvVqjm8NtfOU/ID9GZGPx6zJJ29+0vsiaGzYg+0Sk55zQdt7mEh06wfeorwlquIXQuTJSNPBUIrcuXZtSHsAWKXnYUEOo6DUJal0nNwpKBcrZEmY6ip60ka1uktPFBxysWmWavCJnfgYp0gV2hyG9wFnk8nihGFoF4x89DtbrI5GDA12SAcQimyHJ1HI0YxMM6GUgphyWMqjsB1HNIhVAoQyhyEhd1Dihzf93Fdn9SKHWdRh0IZVqUhbj4WkqrykeHYBlalz8CJSSwL3iP3HIJ9U4hPHxu9p/zUgzwf93jYfvatt15Pp3eUk4eaXIhMX02jVCZJJVpD2TZFb5uZZd6doCXNXO0OGuyvX8vHf/l3wUIxw6THqdYpbl+8nY6FQiZLF9g+uYdVuzZOqSadeAKpMso22aAnazR7XcSWtfGDAaFXoj3ojeB93eYWJ86fJS9gomb6PkLXIwxDMmvfvLqPU5b47jTaPs8ii6mjidJo5CimUrISdUxDBjBZC+npnN6Zs/RPmP5UtyKIwwnEQHH2s2ZfCddldnaec9aZDKemqVVqFHmKZ+e0v5GRyYygMQnZwui5SznNxeMDv/I+3Gmf//xTJuA6c+IRqrUyLxxaa04eMwy023fOkKYp66svhuZNztZorncveW1qrjr6/9aaFe+ummf+5KEnRgxnw/GpvzTEG6FNGOzYPcfxw6fxpMub3mRIdVInJh/0OH4RvOz55w15xdu/7wcB+OiHPjj63Vve8TYA/v7TnyS1dvN//t77XnT9wxF3v36zejbocYiHLnntwXu/+v/5OcNxcWAFcPbUeC+0Mddz6shzL/4+++9X+XvzXV/+3L/6Xf+eoYEv8Xdf/5eBsc216uQosJMW6iUdcC+CiBXCBEpCCMNWBDhCEBYF0vFGOoHSJr0z2xvmSol8AVxsdG163BIihMAT6ag/ptAOvlsjTQSOGCe+SrJGqRRQZEP/rWR8OPt3nuuBdtCOGPkl6ALHL7Ftag+Tu8yekbrE2uoF1ldMQmvz9HGKTCEKTWrbAHr9FgiPQE5RtSLwAycj6fXZ3DR/NzM/w+mlMziOpGv7TIPAJMwRAsf6sw5y5O8O7106HirLiC0U2kDRDVTetcQlRW6Yq0Pbe+9KSZoW9Ho9/HBMduK7LoUoUKMWGzV6rhc/7zTJGJ5RQ9je8GwaBlomMHvRdI2G0Fzixw8FoREuxRD3mBv4odaagrFmnkZQ9sZ2YU712Ug91q02bNn1yboh3fwCOrSEcllMHPdQ6XCOqyzuWGRtq4dr13AoJylJD9VJue+fvwrAqbhJIMQoB+QU5yiKHPn/svem8ZZddZ33d621pzPdc4e6NaRSlUpIQgKEBCJq0EZAkQcfEBukfWycQUFsp6dFaR+7G7XVdkCwfQQZnUEEQSZxaAxjEplECGROKjXdqrp17z33THtcaz0v1tp7n5sE2k8/vuBFrbzIPafO2eecvdde6z/8hgUTYfdYIaxt+M1KfmXg31dNcrVIALWw52LW+OGyKvdUNR466u6TFLXBZW1kqBeg4Iqy1Au/XCBFgK50c+MpFVIZx7MRqq0yWGtb6XfrJkEUJg25WwjRdF9qZZUoCCmqjAMHHKnwm572HE6dOs3tn7+DE6ecGVx/2MNMNaYq2LjfKa09/sYb2Oz1KHxAW9qJT47aoKzUU+Igpt/roH31JU4SJ+DgBTyMDog7CaWusF4SW9uKOIqbRDWMAsCijSXwJanJLCfqrjLNc7TvZkkVUkxGDHo+UDRw/PgJKlEQRS6ImUwmJEmHyXhGL1kg8QdBo8a4qwu0kVirKUq3YBTljDBy0vqy5o8YhbCiJetjm2PVz2FcYmyMQflgQ4qAWW/OBa92NJ/POXz0cjYvnG/PnzIEsSTLpqioJVJG3W5L1g8jlgddzp3dJfGa+MVEkgSaUm42Xc3l4SE2z43pdOsuWs7SsE+lt5G5q9CmQcZguUdRTZzWN2AihdGyUX+K44g0zRkO9ze/r9OLGCy5e2Fp1S02XZUQxiVB7JOfpSFB2COb22bTexM381MveT733Xec37rDEX1vvusuZLSPrp/7wijiuIdQE85/0c3X2z74t8ggpMh36a24qt9PvPwVbIzGjVLn7jkXNM+3R1x22OGWpbZsHL+T1UsOs3bj0wFYW+kxOrfRBOeDbkw/1ox2ZiytuOrvvquvZu3YZeyO3bXakpLeldcTqJKwlhPvj+hfkXDJoWuZZe4evee+E5w8/iBx4BP2ICSvCqL4APG8vUceOL5JaNdQWz5M2b7AaRvQ6fprPOxR2gw9Ose6N1fVZYkJQqQKCHwX1VqBkAtV1cAZpEpks07FtsBkKVHcpdbe6a70KI2m8ny5LJ0Qh12UChC+ulxpg9alr/r6ayMseoFzNU1nYAxF4XgBAPu6K8AKQtsFrHzVJGvuOF5URgmKhoOiqKoCYW2j5KQrTYATUAGIhGG6c4Fut9tW6nRBrCzGaKzfCJe0RRMzKRaEVM6cJCUm9pXNaXYvhw5cSn7powBnjntWT4mQXLd6FQBP+IZv4fgD9/BvfvB6zmx49b6lkG4Jp794Hxu+aLB513GuPrxOVQvFnDrHXe95L0pU7NiaK6kJbriG6DueyeePu87V0Qe+wFae8ylq0ZKQ4MHjxMWEqXKdKh27LresPE8yz6iCkFwLlldcoaiXdCjHKT0ZUHmT25kQjMoLBD5gsNISRIpdUzXGxoEVBNIhJCqvDjrXBbM0bc5vFkSgEqhK8sidz1kQIjsriFHGzHMLyAommxv0vEy3zEq0gM6+1YYfa6IVVi49gIp2qUSbgMjdvfvmf/zF/8xDx3TycMGHe+48xWVXuI7ivMgWCpFuTHZSlvf1H5ZYQZtQLY7Zrgs6Z2xyitsf9u9AIzxxL8cB+LM3PaKe5cPGYlJVj7e/6Y/+Re/9cuPZz/tO3v+ud/Ktz/2O5p786798JwD/5/Od+bFSiihMEELwjj99OwDP//cv8Ea4kj//o7c2x/uBH/7hBvkAYKzr7tTdnjiMeduftOqO3//SH6XUFTIQWJ/oSwmqClBh0FClhRCEYZuk1ImOEoHjeeE4xcIIZxlTd0yERovWiBljUEI6rqefi6YyCGGRMqCqhXe0/wzlA2hbS+LbBhxUmcJ1eoSi6bFpDdbSjUL/+yuweVMkr39LvX7Vqs1KKUwpm6Si8t2EIBRN8TiMnT1JpDoUXlgsDBVJEjV7O1R0QpfI5D4uUsrFHw8+eBdfvPsL7lyNC0w6I6qL3AGORyjb+DSOJEZbjM6pZXDDAKb5vClEz23FkcOXsr2z2zR+lAUVxXS7XeY+lpBKNYU8d5osViiSRDbBvhQGo0uklE2SpLUhinuNiqIQEhk4RE3S9UiVzGKswVKymBcYAaKZU0HTLWyVsYs9SRK4rqmBplBsrSV4CDerlgkRsk2yrbVYCmSduCjpzK2tRdr6OwgqYwlkW+AYdBUf+7uPMPKc6+5qSNxdZnT+FMZbUiRxDxWFZL5Rc/2NX8PhS5aY5lPSzH239fWSjTM7xHGPyht298ScfhSifeE2KxN6/YLZZO+65QoGrdDHXk2Ih4+vmuSqzpXqibV4MQ2AsUi16P5uHzHBMsaQZ+2iVQtj1JUPa4Qj1vkTU+QVSsWA3ZMkSSmc07upCbuCQadPqVuCu/PRCJpj5VlGFMdusix4bWitG5GE/fv3c+/xE1x/09dy6YOuiviZT38SZI9SFZjcbU4qm2KF6w4AdDqRv5kXoUSKc+c22RnNkaHx33OGihRjD43o97vsnJtiKal80DDorLIzutAkB8YYyrIkDNrfYkpDEveZ51mjINbvLRGHCfNtL0u8O3bVJ5PS73vVqKKimhtiA33/vnkxZ6W7xMCr4s3nc6rKMFzeV6u1EoQCIUuyfEKl6/as5cCBQ404SFnmzcZRb3DduM9kMnFVHP9cTbi+5ipf1TEVVhdcc/RYq2AUhuBl85VfqLXW5FWPyrrgrtS7CKHR+0L2rTribycasL6+Thx0iDzZOIl7BEHUJESBipr2vOpEzTmuq3X1vA3DCKniJvGXws2Vxc1RisB5o+l2YaNKkapDmfs5bHK0zgmjNjgH+Pit97CyfpQfe/nLAXj3X7yB6YVNpiN3rN35LpvbZymyDspverom7krNs5/7fH8eBFFnwPKau8Y33HAdf8ar+JZv+5YG5jEcLNPpdfnMP/0Tcp+D84nLjiA2Sjq+Q3vbrR/jzL2nGA5WsctTPxe7HF7bz6YX8NidJcxLy2VHk4asW62uES2tIIIllHaSvzddewnXP/axfOLTjmT94MYuhZLsG2Rs3NdWhdOTJ9jqBQyMm/urZYAREu2TtBkFoyVBGVpCUVfAwKqAMDdUYX2/uWSnvsauCtcWdAACq8nnczqRxdQFgiBEEBL7YLTXWWE2niBt4NYJPNzAVghsU0gSCBAC68VOBoMeo9EIKaMGpltWLrCwxt0r9fcS1jRVXOW7VlpKZFMY8r/ByiYg0caCVM06lZSWXtTFlhrt1xwlDJKAOI6a+6hCIHXGkmyrjCIZsUwXlbvPG0vNNBjtEfKI1w4gzpyl45YEhrpiaabIk8t47BNd1VgEBTpZ5sobK056r50P3HEPRw4cY1W44Hz7wRMUVjHuCtZ8B+TM7iZ65zTb997Jdzza6ce/Nn0/5+Jxc68dySQr+w+QTQMq/5tjGVHZnJB6LbVUOiOwgnLs5dNNj35cEgvhFDKBUlg6/QjjhRIqYyiE5pIgovBJNSJEF8YpDfp1dyVSdMOCdOq7W/OI0p5FdRRTXKW1SOfEwZQw6BF5wY6QCBsJdoUPyCLLviuOcO70BvEFX6WWG+yeP811T7ieVW/FAPCil7+o+ftNr30Tk96YXpnwIy9+GYvjGc94Fn//9x/c89yD95/nK41H2o8Hy12CsK0spyO9598vv+oQWVmxcbyVfLjsUYcRVhDiE9N4iTvu+BwAj77aqbEtr/Spsimf+edWMv0F3/0c3vG29/Fd3+O6W0qGvPWP94oa/NAPfS9BonjDa/+wee7l//nl/OYv/2bz+Of+y3/m13/pl/nZ//rKFv76RMP7eSdP/sZvbGgAf41Lrp7+rO9o3ht4hMQ7cMnV137D05ok4c9pk6vHet89cAUUp/qpGvQFSvI2/oDf+v3Xu3P7BEGeuUJPXUCTUlJJ9iQkyhd4686HCFy3IEQ2IgnaCkA4VEWDOHHxT9CoNluCMKSqqgbGinAFpcqaxmrCVro2F3SPmyBdEjTfEw8Pa4tAUjq4W+uTptFCgFAPE1cQtHOrDvxVUFtkVCyKTdS/paxyet0hx+93ipLr+4YMlweNsqsrXmmssEh8sThPiTsJO9USw5G7/8qBJR0LhD9PMne/S+uqWTfAIKwmkIKDnmKgjUFPdxrYnsGyublJmhWsrrvXFOOU7lLCcHlAteOPH6hmPgDoyqCxDJcSl8nirrdUIUJoalaMlK7gLBu1RyiqCgtY2j1MSotQgsp7cgop3XyqE6mqxAqv/ljva8qLSyzYLNTxuZStoJxZjFHci/y5FnveU+qCwAuumcqhQaRQmDYdI5AQdNsO+oZZ5vkv+27+5184ZdOd2S7DKGQezNnwMPkoVuRlRp76AkUw4O7HP8hcj4i9Wu/5zS7h0gGiYcGpM58H4P67TxN2OqQ+JnjUZUc5fMV+7l9IrpRSlJVT0I3DvYWlLzcuClpcHBfHxXFxXBwXx8VxcVwcF8fFcXH8K4yvms7VIr+qHnsqYUo87HVfrnu1eIxKZxRFTll6ozgMSkoK38VRQUBVFkjVdkPKokCbAl3JJkt1fKucbA/nylVy6m9Q++B0kpDUQ+mSqIMp5uSe/GyE4Et33sF8OuM5z34WABvnNzl1agsVKeYzhyk9fs99XHrsCg5f5ompm+eZzaaEcZs1Hzp4hAP7h0RRRVTWGv0JSknyJf/7ZIxcC0iSpKm4WRVQFlVTlYuiBIwljmPK3FU0jDFM8zG701HTCdoejelHq6xf4iqhx45eTr/fRwhB11cZhFJgJWEY7yH/Y0TrERQIhFAIJLkXgQimy3pgAAAgAElEQVSkQiqDQC/4+0iy2XzhGjv4X2Vp4C+jnSlXHOvthQpKV4WrSa/GGIQMCMMYW3eUGpy4bDqWSimwKbGHOGqryPMSGYhGoCDLZkgFRaYoJ35uGIM2Y4QXmCirEf1+F1uF5P56JkmCKQuEUCQeijUv5+hq3BhcatFWEetz5ScNQshmfubVjDjqNV0GGViiYMh0PkMEngexDz776U/z7ne8kh94qatW/8lffgJRjqkuuErPaLzD5+45w7ndEfnccVHCToIgYLY7RngjzAs7IwZLPQSOY1Ibqt5z3yazqefepJqyzJmMNrnwoOOL3PDkGZceuYJi7DlmYoV9j4bq1jP0uw5OmNFjNs0Z+q7GsbUALTp0g4A8cfNOBXOOHbwKaw2f/2fHS1hd7vF3//Ax1lacwEXYDQitZjLZZenylmNyww88j/HuNnbLsVe3pYS0xPhqXloZrlpbIZYBm75rLLWkOyuYS0059yaJYYiUsrn/pJQYHGymsrUAimKezzG7htX9DlI5nxUYa6ib7lFokKLylVpfrcRgpasw2rq87HpXTQc0nWusqRAYdFV3z8Baz9dawMPrBXEMIyxIJ7xR5H6+KLdWGasbWIdSrloa1pVWM6UoCgJfSQVXJT5x4jhbF3Yb7kScdFjpJxy84srmnE9OjCAe4YvEKBvTD9bIjx0BnJfQ+Ow5srRi7RI3D5Zyw+DQCt2rvo5sw8Glb3j0pezmM/r7D3Hre1zHcjmDqFcSXXBz88TuGNMxCAtV6s7LSFme+X/9OzZu+wzv+d3XAXAolFwRhtzn7+MqzpmON9BCEPsOu5gUFFlB5aFKorvC0uqQQ4cOcf6kE8sophNQBi0EWvg13WhKC6XvCAaRJJEh6XyOqu9tDKobU5RZA7OyIXTCkMTzwuaVIlGCyFpM6dbTWBrKMKc0IaGHrYTWYmdzBp6LOjYlh578BHZusVhvjHnp5VcibMnxk+cYD12l/O/f+Tc84ztbOfwXv+zFPHT8jze/hZ940Q89rGsFsP+Qq+af33g49A8euXM1GT0cYrg4/p///qu8+Pk/uOe5B+87zZv/8s940fNf+LDX33W361T98bveRref8J382+bfVtZvAN6H6js/x8dddw3wLv70fe/le57z7QC85S1/8rBjHr36MXseH77c8Z5/4xdfye+8+S0A/OSLfgiApeGw3Wf8MEXLF69swc/9h1Yc4+d+bG9HsB4vf+mPNn//6u85q+watQDwih9+CUBjPi5FgAo67v62dTcEpC3AOksY8EgdGbSdLKUojSYQAY1XlHYd7143WJCTloRSkQl3vYy2KCH27KsSgdaWMAy9eJcbwohGzAVZI490I2WvanUt5fjt4H0CFwTCnHhOfV73dkjqz66HNhmmcL+lo5YdHC/IsKYW3JDEdKgK3fzmKOogTUji4YWV1U6+XAhszQOLusRxh1jEUPpOmZEoE9a0SLTNqaoSYyuC2nNUGfKsYjqekoReCCMwFPN5I2iV5TkKFwONvQ9boA1CKJaXVihr5IFynaTaDFhHrnsVx3ETEygPHTSIJi4WVpNnOUHQxk7CGHrdLv2uQ5yU0nmwGiQy3surNAu+A87zSaO9aJgVAoTZc39b41/nsZ9WalgQl6uv6eI1rK9sP+kR+s5VZjOqSqOUaLxEnQuJYT5rY6BsMyF69FFuetZzAbj/3gfIphO2zp9muN/FJbpw3UiVuOM8ePfN/Pov/J3/wl7MREiQA8JwjvHcy0HSpQo0VnrEW3WIXpI0ndd2OOSYrbuK/wsp9q+a5KoeD9W/hxYqWP9dj4fp7DfqMgtkcWsIpEuqwEHEZBhgPUjZIrwaTWsGrLW/2KjGy8hWLuCthQbqF8dxzMyb6ta+W1mRE/jXZXlOp9NtTP4+8rGPcu21j+XU7Z9H+0Dt2q/7Wk4++AEiKRqN/wujETruoH1S6CCIkkYGEPiGm57Jk77+Ss6fz4hVDW0r0da03lTUmOai9f2w1rWLm0BOekKkJfRKNliN87SxGA8/sQKytGA8nvrPgtOnzyKImM2dalsUOd6QlJKuv6kHgwHWWpKkxjaHKBmyM9pivw9Cx2kKRqBUjA3c8U1pSJKkgTNGUYRQUBnbeBBEPUFJRl6YZqHudDogLbO5953KcwTON6tOpIwx5HnOcDhk7q9Np9NBVyXVxN10YRgSJxHzvGz4XFHcZ3t7RCeBpWWH/03nBVpLhHf0DkNFmuYIMUd4PPf25jZxEoKU3Hmf450cO3YMVMCkcL9FqoQ4dkqOnZ5bDALhOT2mJbn2u6tUek7ir3FlDYVOEYFCm3bRfO6/ewZP/oZr+J1XOa+fN775DehwhbjrYHthvEoQG/pByXLP4bM2R2fpD7okcs658y6grRCMLmySjV1SdX68Cz8Ov/9bv9zMs8I49bxDy+v82m87N/TtnSm337nJvv0ueJ1Ndjh3z30cvvGJPO7RLqA5sLLG0oEhR486uI80htko5cJ2zmZtUCgUmaoo85D9h53J5oWtL7JyZEjmzTnDsENV9rGTlP6h9eYcXPm0Z5HvnkJPvf+ImFEWE4Q/dmklpAV6MmWt3gwSF5yYwDAQLnmbTKac29xsYDtZXnpIYNhAYpeWFZcc3s/m5hZnTrhzt75+wMFPtLvG0/GUIHKqe9ZDjgNVY9v3BqcqqL01IAxj5nMDxiL8RtzAPZyMoX8O52PiD2Uq7VS0hGhXDu3WQYxZIOUKiiJvfIq0FhRFwWCwxi23OejlP3/uDsrCYCpFu3VkCBmTJM7Ikd+Fu04VHHvsddx70sEzRTanf6CPfeqzgfe7c3XkIEemIdkFfx1OjlkaCE5+7ANs3OXUHr/4ycvoxEvc8PhjbJ50KlyXHzvKFauXc8dx95020i0uUV3mMmQl8hyv2Yxy/Ri7eYWs3Jy9LFpmlJeNylmsFMJUBBaMVzUtlmKiSBKH7h666ponsHlhl3kqWX/UYwG4cOoEo427CftRy/EQkmI8Q/miCZlGmpJ5YFEejlqWGlW5dbEm8RsbOL6DqXklFZUJmJGQRG4NtCbDmC6yLKk6s/ry0emqRpDkYNjnC+/6AJHqcvjxVwOwu7XLZ0enOZWOyB905+otH34bP//Bn+JX3/waAJ6sBE/6wVfwtd/2dbzweQ7a9hM+ifil33g1b3ztrwDOQBhoinEHL1nBLnBhwe0Nla5Y2beE9PybrXMTltdWCEL3uq3RmB/4/pfxB69/bfO+2U7Ea974p4iy4Cdf5j77N1/3Jkai5NW/9wZ3rToDPvvFzxKvR1x3uTOFvuvu02xub8G3Nofi9i848/LI79kf/YePw4/Brbf9E79z4U/ddahyslnGL/zUj/HfXu8gd5lpaQQAJi34zdf/IcZmZLWSnB9lqZtiw6te/yY0Fu8h7GIPY3nNm/5g77mxDu77f7/UJbOvet2bsbY12bXK/W2VWEh2/DH9nJIe/qeAGksvhMAWgjhoTcStsUgVon18UxqLFBGlBOmfCwBrK6qiQNbcJS9oI+s1SbpiS1UuqLh5/pYuNLVZtBNgoKFPOKi0dTGYTxAQEdoaJJLAF2+KKkfSQvl0pVvwmt0blDv+D83fWgQQ+uSDkTPFJcYYXxQJJJXJ6cZh4z0ZSKema3wAL63E4GLGsjYRlmCVxYaWtHLrdZFq8vmIQLffLYgUtrLNXiADQb+/hC5hntUCVoZItUlukiTYsqDbS7wnI+zubpHktY5AvWDXCUtNX1CoOgb160QUd8i8b2fNXSrzkigKmv2iruYp2RbjhRUooRwMsIa3qzqObtX7oigkjvsLHrJQVaZNsmXrQVs/p7XGaMlDmyN2QQAC48530gsbqkmeZq5BIUQjlJVlc4oy4/DBYXOsMyfuYPI3JzBePGr90BL7Lz3KE274ejY2XUGpzCZYk1L6azCfaQqTsr27we7IxXS2mlFNKyZjRf+wWyuL0ZTddOpELQAtxhRFwUJ6gdXG5xYL5/MRikmL46souWpvqNqYc9Gsy3ELNLUQYo3OXDQ9c1Vb02TcALawlFXaTMw4lGS6aFX4jHBu1zpGyDo4jj02t81SozCkrNpuT1bkKBm6TpXv2hRF0XQZmoqM1JRYlK/K7J7f4inP/ga+9mk3sbvtfsUlKwWf7N/MZDLDDFyQG2CZ7I5qUUT6cY+kv8L5zY3mt1155SGO37eBQFHToutYae6DyTAM9ygp+pO8h7cUxyGVzj1utiUoygCqqpWVB8GgO2B54DoDQSD9RtpuCFk+JwgC5vN5IxCSpinj8RjjpaCr0tDp9PjMZ2/jCTc47Pna2pqr/JVzIlWr8Ammk1ZifeuC44klScJw6G68cZphjDveeOy6DA888AD71g8gg5Z4aHTB8eMtQbIoCiaTCZ1Ot10kEVSm2mMSmCQdpAiYePUoFUQURcFwudPOhTwlioKGF9VJuuR56QwLfaVsPB4RRwFJ0mU0csHcyQdPESdhEyQppeh0OhhDM6fiuEMQRM782i+UttAYT+QFdy/M56m7Z2rlyqvg0x/7HGEkeeH3vQBwvJ1f+7Vf4yN/73gISdJjOkkZLMFb/shV+P/4T97Bh959F9/5/Bdy+yedKtbZM/dw2SWX8or/5Kqtu7tjXsEX+e5nX4+xXn0xjNnZnpPEQ/7qPbcA8L43v4MX/Pj3Md/vEsW11UMElxRIFGc23P0w7OUsF4Kj665aHAWa6FIDWpB5QYvSGiSKKlaUj3bX/cTW5XzqE7eyM3IBfCn7RKEk0AUfv9lzrn4a/vY9t2GKLTqhLxBUM+ewLuuqHERRiBGgZM1JSjGloSw15846MYD77ruPyWSCXqjYysC7tfvrMBwssba2zk3feCNR323Op07fx4H9R8EL0wRBQFU5DLuoq5MLLvd1hdYaS2Hc7wYo8pyqLLDCNh2vmmdopGjWKVc1Ns06ibBgCi/B64MPo8FolGwDZIWX260/r0gZDIbcdus/8clbXbcgTPrEiUSYEkEtEd+lFBEsmFf/86f+npOn7uewT3JLs8X7//Y4h7oWft695tDPvYIrDq5z9z/dBsBkrNmdVpz/yAew3rT4/J0bjPOSz763oOc/b2Vtnbs//nHG51x39Nr1Y4xkwWqpmW25hOTrnvRvmN/+Kar7LjBYconvhpBcKCD3QUcw12AhVQIVujmlUomaFAjl7sdTd9/DPbvbnNRTOl5EZH3YY0V2sbnF+PWzEBohW+UzEQXMhXHCxP7ahhEIW2GFbKr8hoCq1Ki6y2gkVmqUnFLW+5wRSGFApeS+M5fIgFmZNfNgInIGcQeTCfKR+w43b93HzAryvA12Zkz5VV7TPL5FW27h13ik8V9+9qfhZ/c+d+rBrUd87Vcao62dPY//gNfuefyTL354d+rlP/rwjtq/ZNxy84cB+MPX/o89z//er/zXR3z9L7zkJY/4/E+97KWP+PyrXv8mRLg3oPrZH3kxr3q9K15Z7VgjD02sgIZPAm7NQSh0E4h6fjetYlo9GseY0u3hWtAUYIWUyMhSmQoRLHZ2KoQPukOp0Lpy4gl116aqkFIRiKDZ72MlyauyIe0aLPIhojhCKV8UMk1xSgSCSpsmAhAIrPV7bq0giIs96o4ZOG7Y4vkRvviztzuyEKwvnHYXxrh7TymvuIckXOBOBypx3CXfsRAqJAwFuqoLvEFj3lwLMVSiwiBJGHDd1dcDkOZzZju7ZJk75+NzJ5iM50ThgJ4XiijKEqFihC6Y+6Lvai8hr1Iivx1XRclkNHES6rue72Qs/cESUQfwBeww7rlEtRYfM4DICIoQXXeXtAGtEUq0/r4KdFE2iRhSoUVAHHZZHbp1fxa6jphmp1HnnUy3QWWUXrhpZ2eH/euHScL9HD/pkpZ+r46H3TlYXl71+1jlRUrwwiCafm+JTuQSp/F4irVV8x1FIFAyxBRTLuxu+etQK+0q1nyxWuuAydQy3NcmV7d+/J38a49tHm7ZAPAA9/IA9+59UliMrdz96NXIA/GV06eLnKuL4+K4OC6Oi+PiuDgujovj4rg4Lo5/hfHV07ny+HUhLUJKrDFNC9fYuiVnG8iBUz4RTeWl7mUFQYAvKFL6x84vyf17VVUEcULp4WHSc2+UUiSJV/1LC8IwpCx0I3FqjSYIwr1te+GgaE1L3lqsLVCqQ1F6DyJbEkVLzWuue/w1fOYzn+XYYx/P/LSrcgz3ByyvH2YyuoD0nYD1fYfZSXcp574lXKRkYs4Tn3Adf4uDHA36XVzVKyQIaziPxupWOa8oCqQMXdu27qYpiVJB06E5ceoka6vrlFnRcCmMMUxmY4bDQdMRAkmRV4286e7uDt1eQlloSi+3PZvNGkPoyOOPd3Z2yPOc5eXEv2bOcHmZR11xNV/8opOerxXyjDGN4pBSiqqqWgUlHAyz1+uhNpy6XJnlDTStxhtHvT6TdE7lL16SJJR5sQfKolTM8lrPwboWdEmTTtt1lFL68yc5cOlBfz6dAk9V2AYWtNxdcjLuvpM1nc4xWGZp2Rj4BkGHMi1IJ7vUZbjSlsyn06brUFUQymwP38q1zCVZlrVKh4GgKEvyojXnFEJgtIOtAPBCuOXWj1EUWeNJ1O8n3HTTTXz+8076OM80q2tD0mzE97zwpwG44Qk3MtwX8da/fCPzkT9+EPKkp1/LyQ2nmLblq9GD5UMNZFNbw8r+FeKoy913ums6743ZPHM/vXXXhZuVF4gGMbN5wdaGqwzde1+FiD7PrJYl1jm9ToBQEXHsKldSwFK3w3D/EqFw3Ylwbjl37iTW37Pm1Gl2bMbKoMvPvuKHAbiZN3PDjVdw/12au+9wXK3J7g5SgjS1abkmzbepdEHqpZ+DQBIGAWmaMtpycLROp0McdQh7tf+YM7QuioLAe64VRcWDDz7I/cfv5f941lMBOHTJpYzHo8b/BAOB7GIW7kcpW1hzXV02xlCJqlXlKwxSOUWlRpRJSEDgjNRr7L1A0Hq+NPMDEKaFGAeBpPRyvu4SK2Z529ntxRFbm9v8462fpttx1yGIQgpdEgRRw/+x5GijKBsl15nrTJ6+n+XQzf2jouTG/hZ3q1a17vQHP0znmmvp+Gu8drTP+nKfa57+JCJvXkkg6C8N2BrtNucqnc6wuuIxc69Ad/IMk3NnMQ+eZu2c+/6HH3U5w8uP0bs84M6v+VoAPvbhj7B1x+2s+e5dVaRUoSSKghYqPJ0Q6pjtvvstH9v6EsiIOFScTt28u3+2xeXdVS4LOyTe7sJKp3JWdw8EjjflIFq+em/dupsVOXiodllZQiKU53gaq6mqooX2AEIokBHWBsgaklrlJJ2I3MOJS0qWtMQKyWdOO4+weSgRfs8brri5p4KA7VEOZetv9kjj1a97E9vnN3nfX/yuO5ZS3P35k1/xPSv7lrDWMtp6ZD4WQNiBMn348z/+87/ikCe+G/L//vorH/H9z/y3L+BFP/T9ANx888287rdf9RW/0yONJz/jOVx7zeN58+/+Cq9+vZNsFzKm0rOGv/IzP/wjX/b9D4XtAfzHl7hO22++7g17KAz1+JmX/vBDHjsO7G+/oZZab/elYEHtDVjwhRINoqeBE1rr0RLCqbmx0Llu7n8JWIfQabqhCmM934iawy5cJd62SCBjaR6Dg69K6+KltlPuvpda6DhprbGVRcnWRBhapT+oFZnbY9fomkV/LAeP87SNxYah1a1qqzYEwqngae3uj8B3dKvKEvmOcxTIPcdwyr2eRyRqzlxEEls6UUw69mqyc81s21BYx1NOM9/5U3O071TPU0s5SRFWkng4vyTk/MaFxhZotHue3GqSMML6NT4vBP14SBwM6CV+X4u7jktb1bFaSBAl9CWNnYcxbl/Q2OY7KBzvrUbrSBERBZIHj9/O1tTFb5PJFFNIKl2wuX2uuSbdbktbAcjzW6iqip7fs4RwcVgd39QxUSeKF+4Hw4XRhF5vwKDn0AK6zOn1OtTIptqqiMAuwBUFaZpjaeOuLMtIOr1GFfc73/1UVnp9pqNdpM8TVocJtjCku5qJ37eL0jCezQk9V7woKsqsYD7LmHujdhkr8pmjBo123DnfGeVoRKPG+PRvfgbLa/s4dfI0Fy44SHQmM8KOQxnVa7qQi5Py4eOrJrmaXNj5X7/of2MURbHHPNZaxyVoUjRrsTiztTT1mMtSIFGYqmoI7FpXSNqWfxiGDyO2WmuJwpiizIk8Ya+onERk6uXhjx8/xVOf8+288y//nOK0u1m+5+e+hw+9d+QmgyfeP7AzRfUsK/td8JHEffr9Zb7n+17I3+IMCd/93vexNBhSFKbeF5oFrIEjGENROWnWejGLosiLcbjF6PzmObrdxGOl3YEc18klDHUS4RbSoBX+0BmV1oRx1JynwZKTl5/P59QOc2v7BgixRI2HGS6vURQVo9GYffv2N9+zPqdFnfhKSRRFTXJljEtmi7xgNqt5UTHzLCVL8z343yzLGulQIQRR6JKeRaKvEIIoisgyd/yiLFELmNrNzU36/S5BEDSy9UEQuKTNVk1QFsq9i4/WGpQL2uYeTtjv9wgC2eCL69/j5MZrGdao4bxI2mKAkk44oebDGVv7qdXwEMcHtEY01wZga/Okg4TUVgGppMznPPfZjtR+yy23cu7cJlHYpyYOfPofP0GcSExlufzwYQCe+a1P4ZrHHuTkGXcODl7iIHxJr9+aOVtJGA0Y7cz43p9wAdB1d38YsSM5uHKVnxuGYj6jGJbME5ccV7MOkRlQprVZdkQ211R5xe7suJtnYYdzGznH788R3lbTdBP6nSXWpOPsZUODrGacLyy33e4lo58MD5wynDqnCXtunvWjHlJCzUyrqoIwtZSVJPFBtRLOpHk8OddcU7x4xWLiXXuulV6C25KTdEMg5EN/5/yVrrn2UVx62YGFuWGJAmcBUEPGKuPmY15VZFULTcYG1BzvshKooIOxMVK0wUqlNUq1PDvxCH8Z4+ec3zxEJDyxvOUElLmF3DZE6u7yGh/98KeBoPHsK8uCThKBChpYiSGlK+Lm/rjANhJDGErO77i17OCBISer/aQmab7Tl/7iHVxQBXkNCzJ9SiKi5Q7BsueBxEAUs9Q96B8AKqISluWhO9bScECSVwz7+zGH3HO37+4y+8gn2bxwgdMPujm0OR0hAlCez5GqirCT0E9i0l0PCxYSOejwqYmbm72D+1CFJi0LDnTdupYVJffNt0nkGlf6pFOGEhF3GiimtBKhHdy8hYVpKmFQJm8kjkMCkjCi8JCcSCm6QmLRDfxUa+24v0Jg+/592lLNMuKo709JiJ1ptkXFfbE3ZbdrPOUZX8P7eC9HLnP38QP3niYJ+gx9wPm0Y0OOHJ5Sysfzmv/p+Eq/9trXsn99HydPfoneAXeNq3NzjhxbbQJ9gXJQ5Uq3QZnn4yytJk2xS+uSsrR0O+7eHgczfvLnf4GrH3Mto7Hb7ytrCC4LKPOsgbS/5e1v5zOf/Wf+/kMfcXNqc4f9+1Z41kufyakNJyySdAKe+bzn0esuM/GB08FDK/zJ617Ltz7vuwDoJD2MMSSdiMjjs+zllk/c4o67suzWhNNnThB3kmaf+a3ff70PICNe8R8cdPA3XvtGv3213ktSwqvf+JYW2rYAzVsU3nrV69/k9+SHiHKJvfFDfb0BfvsNb8aYCiNbwJ2t/7PtZwRBZ48UO9BIv9dDSonynk7gkuz2+6k9r2vep539jFgwCHavd0lRDVd2e+BeHnwUqD20jkp7A3ZpH5ZQNbYkSqJttSeRMqaOY/Ymq4GKCMN2D3UJJw2kMQicUEcca4q5C6A78RGENA3sWpcVQeBk3wtTJ5QpUnYYdCImu66oNhqfY7IzofBQ5Y61DLtdyrJskrlOYOjGCfnUoHyN8PzoDJPpTsNFs6agG0I2326LweWUnemUaZZRo3d3xzuk83Hzmtl4ghYZwWwLY2r6gBfiykus3wu0cTL6VeHmuYhSRGA4e+4eOjMv5lZ1iYKYpKPYt+yKi/1+n+HyoBEtc5YAAcawUOQ1Xjbf3cfz+ZwwDF1SNm0NkZcGXWphFIClAwehMSQGoyWD1SFRVDT7RWUs8fo+jBWkaeafc0XiuU8KOyhm4wlhLDG4xGlrJjh6yRUMDyou9WlMWozJTc6jrnKCNnGY0AkDlnsxeerpJWIIGoKwZGvLNSlm05LxbooxkZ+Ly3zxrvOsDvuu4g1MpruMRiMEGrxVUPCVba6+epKrZMkpfiwqANa37OLCUQc3i75G4IITJ0ShyOfuIj3tW55O9PQII+RCgO6Uc+rqr7EGq50bc5K4TL1MNdY6vk2rJBc0XkXgJlOapo3xHDhuUVlogkggPS4zkonLdv1rBv0lVteWue8fT/HTP+MqWIePXsH5sxNueto38bgnuI3w6MFDXHn5ETJPhJciRsmkIeYBHDqw3xkPSkHlL6U1Am0MpiZSdrooY9C69fGqqsqfP3ezHDp4tLmRap5UVThSZTrTaN1imY0pyXNfZQlDNs9tkudpcw601k3gX28mjX+Y57QFQdR0YhY3IYcFl01ikee5Myz071/EbNfHloFLLLpJp5kbtRlj/dqyLIlC5XhydWATxMznczqdDrkXDQmCACVpvDAmkzFCrLguZpOcu01DETbzUxuFFILc463DMETaCiEqOn0X8DlFOUllDVXZJo/dbrep7GldUlUuyayVHTHanxPRBOjGelXKjic7h66DUpYlcdD6Db397Xs9Xv6lo9b3egDHXfp97nnE1/23V/73L3OEX3L/e+b/1sf/q46PvPf3ECpFqtpUW6LCLpI6iQmRYYCQgiRu/VXSWcY8nRIrb4RtDN1ud8FLxQdLShLEvoNhBsynu4Qq8Fp/8KUv3sP6/iG5d24eDNeQ2nepwpo3ECBUSNKPqN2HjTGIqEvs71GjNXlR0Ok4U2KASAU+OGh/b8th8E8Y2wQ2wtTyXa4DrUTrq6XzjI7sNFk/VoMAACAASURBVGqBKh6yvZsSRUnjjxMHEXEcU2jD2U2XgASqhCBkecGTJDcZSnSZ+0BjI1kiUJLBwjoerayhpCGoFfdUh56WSKOZeR89EUE6mTLP76brK8CVzcnJ2fC+WrnVGKkw2rjqI2ADRSAVwlpibxAedazr1nnuVtgN0WXOfJSjU/cd4m6HT+tdinW3Fy2lhv2ry6i1HmfvOeVPsCROOpwtp6x5E+9qN2VuKwJ/HUKpsFIgjEXUiAohvVIXpAtKtY4nGTbXTlp3ry/OM22dp2Ipa35M5BTa6q5jpRFxyObuNuGyuw6Hl4asH3BKjLd/7u52gjCj7lv9ObWPy8eaf/1PL/Pqdi/g/8fY2xmb0FbFf4tf/Jcd4rv2PtwGfopP/Ive+nfvevu/6HU/8F3P2fP4lb/tVPvScuL3j3a+umDQNOgKaGOVh3ZjFhMIcLTHhyY7e/594Rj1/ZsXhVPvrBVYMK4jvSCSIKWk1BlgUbWAQSCxloZPBKCkRFfFwvuCZn2oxb/qjphsUCO1+ii+M17HXhZrRcOnkoFokpnm8zzipBbckZFCWsfFkrI9p9a21klCCJTnR7Xfs+VjLZ6/Qrcm1rXaaaXbFKzU2glrFLIJmCstMVUJ1icfSmJFRVFphHX3TKSW0FlEEnXoJHU8tYYsYmZ1MpmPKMvCiTzouoAd0kuWuJCdpfBzfzZPWVpdYzp28VrS7bC763xQJ74wvL6+TpEe58/f+mqUL1IGQUCWjhvlxF43IQg1JquatawsLWWWu26jPw8SS1lWhLXSqQChLXEo6PfcsftJjzjsUumUqHTJVTrNmVQlVeV+33QyQypBUWQseb5qWsyZTVM6XmEwSbqYSmBlQuDjTp3DIEzIsqxV2csF/X6P0q/xeV5SzHJ0kaBNvdcK0lmFlJZu7Io5WlsKCmxSi7QIhsM+YSTZ2XXnrhP22Dh9BiFyJjt+PVUWLSrOPui6ckk8ZN9wjU4cMPBx2Obu/Sz1V1heSlhb88JequLAesDysivU3v75+zl0JGB534Buz61daTrEmAFBEDXcbKVCjj9wH19ufNUkVw15UkqseUgLeXFBq6v1yrWma+iD1U4+0lqoPOwhEC6wns/nC27KrqVcJ1cWQxwqiqIim9emugHpPEPKcE8L3hiDUO1j11bXTYcGQAUWXQUEQa3opREyIc09Sfr0A8wnOd/x3O/i277PLe5v+sN3k89SnvT113P0qKumnToz5txHT1LYmf9OhizL6PXaIOaWWz7nYQACIVvFIyHaKlVdZRBCUNQQA1tDChbhjNYnDq30PKLY06avk7JwQZ4+SRKCsO2UlWVJknS82kpd4ZO+lV53hTLyPG+c68EFuWmROoWyrktyjTe9qxcepGi6bq0Rr2I+nyOVaqppQeCk52sSsVKKssjoL7UVmjhJ6PS6rr3dQAUVYRg3Xc5LjlzeVNc6nV5zDoQQIG0j6x7HsZN5bxJF19GwAkStLonx5oCqFaawbnFZ3CxroZFut27JCy8BHrUbsDa+E1m3K4VXISubStmb3/o2rBcpqYU2irwiK4smWZ1MdwnD0EnEerfyIpthtSGdZWycdaIB0+nYw1/r8yn4mw9+lKc9/UnN/ZCXBd3OEtujMYcecHP9eZc9kTvEhNGjHaQyixKKKmVSpQQzt/RMyxnKlIy33XeaFxXT2Q7lXGKll5CNM5aXV+l1ulxy6DIArrz6KqJBxZFjXgCmSOlWCV84Ybj9C04dbXPjTvIDFxC6A40EsEu6p94wUAmFziuEUJT+NcPhGpujXZTpIn2FNIqiPYFTnudU1gVbtfKRQbM07DKfpg0sJ4oSjt9/juWR2wDSbJve/n2sLK81ldayqDDCUOmsmbNx1CEIdJP8ZLkThVnfv596VGXu5ke8N8B7aDAipAvuEx+8GCwyiPz9hb+mEbM0p+d/i84kaakRkajzPaI4pigqhNQ89jHH3HmYp5zZ2uLCbos8CLs9RBVgMt/lNx3mOxNyv3kCdJcUeRVSaR/8lDMm8zmVCmi2pVQQRPvIyNj166cVEWHcJSnceVpSobMkEDTiP4U0lMoShYqZX4eVNRTatIbd1kF3jBRNkWlsLCdDTeIDsG/65qdx9KpLef9fvZsnPuXrAfjHj3+SOJVMAk3mT/Fq1EFJTeCz0ADIKBFEqFqeWLuquRW2+aJREu9Zh6UXFXEdet1eKyURoSLRPgBDUgjRSvIbmESSbV3QtW7d2Ng+zQMPPMDi+IEX/3ve+c53Ufnr8i1XX8elVqIueRS/97cuIfml33kN+5ZWuP9LX+Bdf/VGAPbvP8LGxtmm2hxFDgqutW6g17Wabd3NBdcVjqIuUrl7ezbNef4LXsw3PuUpbG5tu+sSSobDIbNpztv+9B0A3HnnnbzkR3+I1X1uvnzkwzcjhKTXHRIon1TnOUIJhssD0tTtkUXuqAK1BUenE7O9vd3sV1B3NeBdf/xmvv7pTmrwmkdfzx++7jfBdwaSpAsY1z32o9/vNsnWIhhICIF5BHW7pmPh99XFsbjf1o9rk91mL1BuTwwbAQ2FNXVXpxYREMSx71zV9Ajtkqm6u22MwVa2EbYCGoEq91ntdw+Uan7L4r4kmv2pLVjWBtouDmpL+C5GcGep2a8qi5XSi42158hB29r31YqIi+floWtZ/Xmmql9TqzQaYq+saIyBQGCVZmfqCjVxr0teGILArYFKSObpCBlKlIf8IRXWzOh0oRPXtioFnU6IDdz6FpQxunIQNounOUwrdJVxYF+P+dQJjunJJmZ6jsArO3bCkqOPu4zKlA10L+52MEWBEXlj3qxNwaVrQ1aXPFx6eYXR7iYbm5uceCD1516CFCgCCk8NiCKJUhJra2j7gHResX/9MI+93u2Ru6MJs2mFLGRD3UjTnPl8zuqq+7xez0FUlWzVAstiiehARBy7vWF3d0ySJAhjG4XGKApJ05T+SreBwKfpBKwl9AJlKol8kb8k8Il3WaSEoUIa3Sj6SikRWUZfus9TsiTMMyIz4MrVowAs9SJ6iWDY65IWdfesQ5mVhDUtqMyZzCb0+yFdX9xcX+qzszWlY2O0N28PwhhbVWxfcAU0GUy56tKYwWCdC5tuDZqOJ1xyySVUedl08LO04F1foX59UdDi4rg4Lo6L4+K4OC6Oi+PiuDgujovjX2F81XSuqryFXZVlCQ+p9gBIqZs+shNAaGFigVTEkSJJQsLAZc79gcNRx3HrA6F9y7hut2ttfEVONjLEWLnA92khDipsW/J1Z0YI0UAOBYaqMgShoqhqgmpAUZYNhnZjY5P3vOc9fMtTbuK2zzqzzL/5s49ybL3D5MwJPnL759yHdQzdoIMRcXOcLMuYpW0VJyt2iMOINJ3vlYhegNtVRqMr50VRVx6FDBoMOjjJc4HBaE0Q+/MrBFkG/f6gEUnoN7Lu9bn0FTEBYQ0LTFMqq+j0l5vjB4Ezr6uLa6GK0FoTx3FT5bQC1qWrCAVxW1mTMtgjyV9XU5rrHnRdpcrYh1UoFztCCL3QvaT5zBpKCl4+1uw9j1o7iGVd7QvD2F37QOzhXNXfr3lv6KASNRTLwcjc3KtJrqb2rvJVwCBwleuyMkzn7th5VXlYVytAkgQSjd5T0avP1+JvyYpyD2zVGgkiQXr42/rSOlJKYqWoG2xlmXsZXcnjPblaBpHjfHmzD11Z/oaP8r0/9soFqVzNYHnAbbfdwvQP3gpAfOYLXBatEz3VeVgdn06RMqA7jDHLrnJ9aXCIcLBEf+C7jkwYdhOqck6VO8z1eOs8Z07tcuHcCTbud3C0dHya6x/1WHZrg+LDB/ncqSm3fuiPyWYjf11KdJU6iITvJJXaY/R9hVYXJQ5UoQnqLrXOmae7WJEhvUy3DAIshvGshsQqlgZ9sixj13dtOlGfMIlRgXXYbJyJ+dnz5xpoHTYgPTFmdOYEVrRy26UxYNv7NitytAia+RNFEbvjHY4nieuW42CBWI2S8R5+4UOhSVI6fxldeUgMqpn7TXdZuS53XZ0/cGA/cSchnezQ77hu+Xw+Z2lpiasffayBUF5x7Eo+/anPcXbT8atO8ABr/XXObm0iPKekOrfJtddfxf1n2u5WYUOKQEPX3wtxh8Fql6MHjnLinuPuexaWKkvpyWGzDtvCkM0z0qiGAGuwCiMsY0/4VsYQiIAUDR7ukltnvhqJmj9WYQKJ0ZplvzbfOx2TDAZcuuq6g2dPneZx11/L/fc+wNB3DJ/x1G/ir973QXQYkk29qafsIIKgblZQWSfTqykQ/t4zSiKCgFJXTZUaA0qD8tV0rS3G3+sqqC1HXIfIVBXSv8/iLDM6/n3dsMMDxZgLOucZN34NAHfedRfdQdt5Abjuhifxgb/+MKPU3XvDlWW03aUYtnucNK6ifubMKUIPQw5CCD30GDyhXgmMEY1RLNaBMgPZrqGdToc0dZV/cMIfRVEQJTE1+Pjokav50D98jPe+5/085tpHA3Dj1zya4XICXojniU+8kfl8zsaZc2SlNxYPFYP+ElpX5D52CJUkywpW+84CwGiD0RKjoOvn2XQ2aYj6T77p6wB473v+GoCDBy/x18EZzNaiVwBRFCDY24lq0BN7xCQe3m1pLTP03scLx3noc1VVNXLX9bDCYrQlCOo5XFLkuUNzLMDyF/nVtfk5UjTdLWt9V0rQ7OVSCDBgdAtZl3X36SG/RVhL6LtERVFgtXHeSdAgX/bCJT3VQ8pmf6q7dfXepLodjM4Qol27pH993c0Ft8cOep2Gcy2VZDDokRczxmM3r3d3d4jjmAtbWxw54M9xdQLK3cYnyVrLak8yne0Shy5WUUlBKDS66rPccR0LVQYE4ZjZjuuOzGyONYpOOGSeeauQKkXrCmENt3ziUwD01JRxOqZbW6p0Y86cP0Wv16Xy+/3s7EnWOkOWVruNANGRQ8c4sG9A5XngZVqxMtiHVIZPVQ7eq+IEW0ypKkPsZc8tOUZD6L1EhdToqiCfG6a77tibZyp6vT4Cy7lTruO1srJErCTpxK0B3UGfMOigK0kUedPyYUU2n1N42sPaypLvMlacPef24yXVYzgc+mvlvnuoFLrSdGuTdqEIg4QozNG6Ftm6lF4nduuI5/sKbYnjmAPrHT/rLCdPnqHXW8U7vbl4WmiWl7po71O3trKP6e6Y3HO3Bks9yqrCqgC8T2lZ5mQHXPes41FgAklRFeyMnDz8jdd9HVFoydOCa6+41s2pnRFxolAICq+fMBgMgTfy5cZXTXJ1+LCH9wSOR7AYeHe7XaIoIgjbhCtJEhCmWSi7ifMdiqKIsnQ3+Nq+deZZhlJqQXFOo1TYJBqhh5yZyjSLtDES4V2yY4+rL8sK4WEP9ag5WPWNH4YhFklazunHDtJkNCDm5Ll737XXPIEXvfjFfOofv8Af/anz/JhcuJfv+P7HsW+pR+xxrlaUZFlJjQoQKAa9PtPpgqJX0KcsM9ZXVtHSq9RI6RzUa1EIn2QsJh5R0mL8AQ8tAKxsIDLWClRQYUVI7f7uNg4adTRrnDN8UZXtIum9vxZhCLUxbq1aF4YhRruFuYZU1eprldGND0y9AD8SRr0eWpcN365+ndaaCprvnVcVgUyYLygLutcDSCoPMTBFtYdfJRaEA0zdCi5ShJKYtFVkqoShJn263xdjswpdtSat7lg+oNd5c66EdApvAKWuhQda57AgUA3srxa0EFiEUQ0c1SkfWRAtfLbUmk4cuY22VifUpSPQ+3th5nlvO0ZivUqlCiRKKPKsJJAtrBJEY2XkTyubZ840x1JKcebBMxzdf4zPfPMzAPjIyfsZX9jhQ3/2Wv+9IwQhBsvQW97PIihmIcobwMpYIMwKve4Kg1W3Juw/8igec+QxPOPbn8/vvPqXATh2y82s3Xk726k3mA5WOHnoEsbzESsDtyZ0AigLSLNR62VmIxQR2vNcjIdtZFnWzKvR/8femwfblt31fZ+11p7PcM+d3tiv+/UsqVsTGiwJpMgWGFAkEFhgynagQhGcpJxKJcZJ2RUnirHjcgi2C9sVh5iAHUohCilPsikQIIGMQIbW1Jp67tf9xvvucO4Z9rzWyh9r7X3Ofe91yxAnRVJvVXX1O/vus88e1l5r/X6/7zCdUVWur6wH6NPplIsXHSzh4sWL5IsZW1tbXLvmoCBf/eqXabUlSeIefqpkgDWSIu/GjYbBKECKFdG3rtsTsEt8T4mCxAd/oIgZSoMw1WrxKgVNXaPksu+LzkNHINbgzBa3uO/yL0EQYJrWQ699n2oFsW2pK983pop6sSRSUf9eKSW4cOE8e9dvEPkkzAP3P8rXv/pl3vnN7wZccNWWBWmsWPrrE8OEaLRNWz7fX9/ufa9hXu7R+InxqLHcuFHw/PUnMV4kwSwWRFYgAoXt4NjCoFND4CFycRhQNg2yatnygZSwiiaKaTAEPsllI4UloOyUAaUlFoJISgrPBd1LBZtZ2kOJDg9u8q7Zuzh//4O8yfvxTeKMOE2pmyWVXwwcBg1aCYRfMNQ0JAi0CjGdGBCSti3RZs3gvTVIYbFefdVgsT44th1P1VqUkARhRu2PtbRgwgDp96ltzbSuQCruv/ciAB/+nu/ld77wWdbbl774RUZpxE3PMZ1Xmkdf9+8wjVdJoQCDCBRVbZh5mPxG2Xj/Pa/YWJQOmmVWxu1SBFjjxyZ/rKZxBqnd9R5Mj9FWEIYhuzsukPnUJz/Nx/73X+TRRx/lzBkHH17OZzz5xSd5/RvfAMCF8/dQtw2f//znuXi/E9M5OjrC6MRB4P2rJUNJliUs5i6IjyJn2GqtJfKKjDIv2dwc8b1/5kcotl0/e8cfeT3P8gSTseNgXLl6ySUk1uZMaxxg1xrRj9fd29qu+dR1I/cJKKAwtwVbnapf19b9OgFUMKBtNOvgIikdb7AzH4/jEGG7+cxz+9YEp7pnUNctiGolgORpFy7p6PYLlEv+pdHqmjvoeUeT6uZYay3Cmysr6QSTumds7Qou3wlhJakC73XVrbccx5remypJErSOTohqdcGqGxu7c7DE0hD7MR4Mpq1IpCLZdpyZ3a1NrNXcf++ELHGBthNeOYXRftxvLcpq5Fj28+jlvZcYDkdE45pB7IK3IIWHTm1R3ed+L19YXrz0DFZPMX6tdGMxI00HvOahB+mo0rW2PP7gQ/1a4u1veTu/9Vu/xendU31iezwes7m1i6VFio47JVCyZrlc+vuSEQ0ibhwq2rozTm5RQYQk7JUOi6ImCkcYL46jW+2g5UpwfOjmp3I5Y3uScDQ7ZnPc0RyWCKvYmjh+ZuQLEaPNAVXlFfYkjHY3e85327ZsbW1RNwWnT2/6bTXWKMK19bDEIoXrs4AXR7OoYKvvi03TYGTAZLxB7PuCaTWRCogGXeEk4f77N9CmpvEdYTGvCIKAZVGhPOwwLxryuiHNvBCPiShKJ5RzeOR4WLluOHPmPCoKe5VBJTPKsibwY8QLLz6HljE7O7u8/JxTSZ0eHjIYpqRp3PPh5+IO0qdr7Q9NcPXCc1f/X/utlhU/aX1YfJZXl6j9/bTp2m+st8/yST7LJ+H7gTUl2Z/hyu/7N97zwQ+tJjgfkeR5fkKRrm4boigiXFMLatr6hIR8NwmatcVdx+VSKnQVDzp8edO5+GEtNNpCGNJ0GfdGY430hsFuv7LwQhL+HJdlRSAkUirmC19NCxR5XTmeku1ItQqtRT9Id0HEekZPSoEx+kRFTUkX2HUZIiEUeV0SxzGVX6xK5RQh14MypRRN2ayEIzolIit7Yrq1TmLZromGhFK643VBaNsgZOAy1L2UraE1biKSfrARGKfk1hF3cUbV62IcQrgMbNuusnydmEvtr6Vd+04nluGCRO25WG5bEIQUXuzDfY4xbUsctAg6wQCDtS3ZIOwnNCkFrS5Rg1UmCUAri0o7F3eJbipqW/LgIy7b84tf+m2OKsvO2C2kovGSsizRZdIv/sdtQHBaou1q8YE9RJuc6VWXFbv6/G/xO1WE/EXZG7U+WBXMb77IoHXfG4oFRDPOXxhjfXZbaMkg2SCQI+zQvx/FlGI5pak7uV7jq5dhb7vQ1BpB6GS4/UBaFAUXLlzgPe95j+vD+Zxf/8QnQMAHP/h+3+80TzzxNTY3E5RfpJRlg7TQtG6yjEKYzTv+lq8kSafIac3J6mrbligfUC+KmZPHjcZ9oF8UBWEQULdqlZAQfvHWBVtox/eREPr3o8hdwskgeiNTEUXkRdVXwOz0BroqScKI3E/0O6e2yYsZeZ7z9re5YMpgmdUFz11eEXun7TEDIurac4Q2T3Pt0iWSrW3A7ffZg+eQrSSLOz5lQBpucF5J5MhXSE+7fq2NIO6I/bpEipYvP/sUALMk4PWPPUZ7aY/Zy24OCYMAU9aEUYBPVFPXNZuDDbyYFjqJiMsGqTSXWqckpa0hFoqq9Fnx8ZBnrl3hzY+/mcC/H19/9hnuPX+arz/7dbTPfN5UmjRKUU3H3VJUrUELQ+0DmTRKKasGGYUYHygWusEYS9CugiukG2+6ZpoWqS2BlDS+QqJihao0W35BlliFKhpOjbf4+L90VZh/8Uuf4PFveoT1VpQ1k8kWInD36frNK3ztkzPqfqEKUTIgljBvZ8jSE9EbSZTEfV8py8aPa6vFv1KCtmkQUva8tqoqybJhLxDUtm78PHv2LH//7/0SAJ/57G/yLe9+O6HnyQLUZcOp3V0af++WywVVU3Phwj0UueuLg8HAGdarqK+QWhtjzMoaoapnZNkQFYS97PJksktTCoaDgM1NV5WebLhF+fTYcW+cUm6LXqsaGWOcaqMFbmNTrPOIwp7H6DauZKi7MX0lkrWq8rReeS7yyVzDgqLMGfln07QF1hqElMy9NUqSJATKiZ8Edi1o0RrjA7BkkLA7HqP0Zi/zbq0lDGJCtRpzlXJIDN0ZmzdtLwzVJzc9OkRKifDrmy5IWxePwtiVIBOgfdJDr50XgI3okzu60ShWSVz3D41AEMqQ2PPfRKQo6nxlKyMEQsRIGVD4tUSapg5JY0Nmvmoj1ZCyOqaLxGfTQyDnxeefYexVOIMoYrpXg5S89qIL4rcnKfWsIPVqyNuTDQ4P70dK2Nhw/ScMU9IoxbSayXDge4S3wmm7NZbm4e//PobDYT/GHxwdMS0rlstjMh8QBFJhbMFw0yUWK60Js5DmxorrMxxlLIs5ILB+zZYkGVVlUN6aJJAxy1nO9vYGb3+bGweuX7lOFg140+MPMPcIDGsE2XDAwMunL/LczStxxMyjQgbDhLYxzGZ+DosT5ospcZyw4RMSh4eHKFUxyOI+0D9z6ix1kZ/g3guhqIqco6Mjf+9CwkBQzI/A3+NASVAhBwc+8I5bjK5pq5q8cnP75vYWB/tTRBGThG6camqNUEGvCGvqJSq0pEHCqc0dAFQcMV/mhIns+bCHh4ccz+acveCSO4tqwShu2b96pX9HNwZDEIKj/WOMdsm3blx7pfaHIrg6dfUMf+K/+PcALyrglflOlt8NjYlPKN6tkyaNMV4eU3N803Wc17/5QfLldS49/SyXr7mS8TNPvYQul2ifrQyFdCRO0XLv/a5Dl3lFXYCKa1oPpZGyGzw7CIAjtCml1tTeNG0DYbQig0ZBTFnmRH6xVZSao4MSpGU0doN6OtykrAxKpT2JNkkypxY09uTHKCWOUx8ouUHyfwp/kTh21bpODS0OQuI47AOEQLlKVpqmDLy6XKAsUZj0VaPheEKSZFhr+4zJYrFw5W6z8pjSXghj3fsCQJg1bw9XeKFt6hPZO9PqtYDBqaxZITGd07tZy4p1CzzrMmddZsJt8wFN2FUeDFhQa54D6/C4bp8oDJxXmZIntq/L1ltrsVL0amzd9XQSuOD+LSyePOorXrhKX2dFbrHgq3xGrCZoKSVrAlRYQIhglQI1BoUAbTC3eCgIsfJCscIJSHT3yYV+xk1c/vIa3bjt1vSoDq1bT1T2mUVbO4K9kPS1MuEy/+tVFGMsSsbYNQEUALEGYxNCEgUpTdVyxqutffh7v49P/MrHeeEZ52kVNwNGwy3EELSXM21ky97eYQ+fCILIT8LzPpgcDAZsjhSLfEno+/Dun/kBnjk4ovAVooODA27cuEZ2kBD5gKQsS4aZQOsW7QPMvCgoS0vTrkjaVos+qAWfbAgEUkLj4WhxJHnNax/ii1/6PADv+2PfxrkL91EUBRtjlx3d3Nnl0uWbHB0dszF0k2VlnIRyt4ioG00QBQhh+0BRKnceMlK0axLcRiuM9Pe8NaRhhil0P6GqKHRKnWGKaboEhEAFK1Ef3VXXW03FGnzJyyN38GvbVoRtTdLBfXSNNmAIaX1FZmt3i/e97718+tOfoRVu8fTEE5/nbW95Ozs7bvJ6lucIdIBM0145a3rzGlv33MOP/NAP8sP8awB+79JXXdayl201yDAGXfRKi8qLF8RhRFv6LKo1CKDO3Ng1EWOKl64Stw1i4rZFVhDJgECusuCy0ewXJePQQ1TyhkYGGKOohVsQ1W3FaHOL/+g/+XMAPPPcsxwcHFFbzUc/+r8B8KEPfS97w6uEOiDwEvjaNNS6IvFjfEpA2CgaW5LtuMVHLSzxOCIMAmKfEDg7mBBoi1xbgHfJsk7WPUtGbq4zLVq4+1lKQWQCoqVXeBWKQkAbhNS5G683RpLHHnwdv8w/p2tntk/zW9c+ReD7ShuFDB5/kNc8+lr+T/6VewxWoRtJuZgRDN31LKsZO/EpGn8vo9CpF2ptEaLyfcrlzqQKaFtPsg8VbV0RKrdwCwg5nl7naP8an/nspwB44xvfyNbWDtPptJd1L6qc02fP9M9uvlyyv3+De++5wPPPXXLHlhGl0GjTMBh4CFdgaBqJ6mxQaomxAaGyVJULoIXdpCk1QaKQWeuP5b7/uc84/78//h3v4uh4H7lWuRqmFmsbrCjpNUq0cGga4SFcPhgUUvY2snXGWQAAIABJREFUJ9Y6VdeyLDGdMqb0vkR+HtVtQxAH5PmCxlfi20ZjjGSed8GBRtvWQYi9p2ShQdcrJIE7liUIErQPYtI6Ye+4xVaCsV9vjLJNxoMxTbtK0AlCpAxRPomwvb1NoBRlmffHTtN05cHm71ldN4RSEXiITSm8oEIY9wks4SG8NKuw1KJRdj3QdMkmKVlb4zmofKurXpnPWkukEu+B5aCms/kcIdq+QtLakv39fZblAdNDt4g3usVUhtQvlkeZYHMS8br7ztF4OPH+4U1AU1cN26kb08fhhPGF8+xsOSuGRdNS6JgoklS+yl9pgUoT9o/2uDx1v6etcirFfr1R1zXpeMjxpWf7NV6WZZTFnDiOKI0LmMscv17rAuGIpiy4sZhhhIex6RSh50gFnVi6EAIjDNJXt4JBQGNy4jQhCN33snSbzfE2QQBbXj3PIb2aftwwtWZ/fx+VRNQeqjJblCAL5gt3n8YbO9R1Tjtr+7mv0DUXtrcZDYckHqpomhYpLNu+ong0XVA2NUEYcfHhB919qQzzWenGvc5+ptHURYNK3b2bz3Ni5dQmw9it0S9dOSIapIxGQyKPBGranPnNErvl5qIohWWr+cJzz3H6vEvw3ty7QVNV0LaMvSdZNojZGQwpajdGDDdiDDHz42Pw69+N8SZ12xJvZRxMHQSesEs237ndFbS42+62u+1uu9vutrvtbrvb7ra77W77t9D+UFSujg5mfPQfugyb1QZjW0xT9wRQB/tqMRQnPCb8H/EbOl3O3nByNPxeHn3oPF9fzPnc7/4OAIFMiZTE+MxSIyVWhBhtyL0/ViBDgkh4SW2X6WjbBmsNXTJLCEEcRc50s688WLIspW7KvspQtg6OZnxlIIoi3vvedxAmYQ/dkWEAQpEOkj77EoQSKUF6TLVSCqUc/0l1/CalUIF0/kzRoD8HIVeVGyklCosQpnf41lrR6oLSZ60OjqbgnddP+HEoV4Vabesywb6ytDKp6DPg4hXkUzsfjZNt3W3e3CZbeytxeH37rduAXjTA2DVPH3iF376dWNzdr1t/67bfuUVKd7369WrnDfTZmbWzWNufvkKGuf13u6aUcnt1xcKVaQlmrTTWca2+0XmJW/brPt7qL3en59BX79YIxzeuubL5eHyKH/j+f5986bJ5y+Wcf/JP/hmXL1/j7NmzgMvSnT17lhs3bvijlt4/Luxx/FEUUVROprzLan784x8nkIrME3jDMERbw7WrLzHyPEznVzVHCNGX8I2wPZkbvM+d0bRtfeKdMWVNGCfUnhz7yGse4fmnnuGFSy8B8NCDj/SckM2zLlP21Fe+xPmzp1l42I4794SmaSh9RnqQZbR1ebLP2BBtNI0RfVVYiBAh8l7sxIqWol4QhQnCn2deLkEYima+IoFbia50D2eQVqK1r9D5jLcQglZo1186fHwYYFXA3GffQwKkNBjb9AR6bMCLL1zh8cfexNNPu2rkbDbloUcf4oEHHuivWSqDtjUXH7oHgPf/u+/nkYdfjwhWmb4oShhmWU8+DpUTTNg9fw9bHsLx5JNfIcuGFMucd77rne45PPQQRVFweOyyh9evXuK5554jX2qs758qgqrWQNi/R0mkOHNqi6MrLwAQINiIBpwZ7XLs4W9hpFkscn7tVz/jrm0+Jx1ItrbGqMBnhENNbnO0rTnrzzMxglhB7SFrWoAIwKqEvQNHkhZSYkPJrM6ZeJllk8SUQtM2fhwSrmratjVt69AXZnHTSR5rRVt4r51MY0RN7LtPmA6oY4mxNYmv2r7nj37HbS6XuWkIR0P++7/0d1xfTFJG4wFHyxW8xQYGhGY23cd6PmJbzinjjLk3RM+yAdPpsYNRN7764kV/jDErw+zWcWorn00/tXuG1zzyKJ/73Od49JGH/LZtNjcn7N+8xtJnoDcnY7Y2x/07u7d3Gaud59NDj9wHwDPPPEOaDCmrhtHYZaAD5biGN244eF8UBkQhCFomG25MmB7tszEasFxOKRauGpF67aXPP+Ge+/vefw+H86+SDbZW965+mjzPWebTXkRgONxAA8ul96KUfi5Wqq9iuRYxPT48wc1O4qx/Z9u2pcgbGl332+IImtr2/KoojJEqdmOUrxrPF4eEKmIwSPuxq8gbQDHIOs8gjbEtw41xL18+mx9SlwuWy0W/xlIy9FO4t4y54T3t6pbACzBNJluUZY0UAdr3jZ3tsxwfLDEeibC1teP5eU3vwaR16yDqQvUcpCh2FfE0cVWVtmmAljJfwd/DQNC2DZhoZZyuLNbq3oakNS1xpFy1J3PzxXS6z/7N62ThAUPVcR5r0s2MxB/77KldxykzMWnqjnXvudNYCYPRkNl84Z+N4dzZcxz49/jy9RtY25AXbc//SbKU6y8+haZFe155iyIKQqYL1zfG4zE3Dq84f9TGc0rzJYFN0dr0MD2pBiyrkqm3Fzg8WnDhvm2KZdlzoNSWRNTSVTDlSgAljWOs55S1tSRJJxwelxS+Uj5vao739whCi/XzzCDNqKqC0h+7qgqkFOT5ksZDRJWwbG2eJU79teklVXXExniXw5t77n7u3sP+fM6sNAxTD7kvC8fpzh3nq2kK4jgljRRf/upXANiYbBFnKVKFbJ654J7f4TGDLOs1FubTGUYqJ4LhI5brLz9PpsdURjDyVb66lexevJf9fYdS27t6k8ZabCB45rKbr47zY7a3JkyP96kDDxWsUpp6ZbO0WMwIlCFNBxjr3uPp1WMMiqZdUHr5e8kKTn2n9ociuGqagtmxw+LHcYwSAQjbK9CFoUKpmCTcOMGPCYIA1EpZJgzdSzbzJc7TF85yPM+RQcw73vMtAKTpCGlXgcEgCWlb1/mvXHE4/qe++jWiIAYhCTqjT2HQRve8Ie05PuvGwh3vRQjBuIMFVQ1VU/XEzabRvO9972O8c6afPDpEWS9MABivhBiJleKTU/m5VVnILXp7A81uwXsHqEnXDCv8K3Di7+vQPa0bF1yx4li5wOZkAHCrGfArNb1G4LXWwevWg5l1taX1dqeg4ASUrwue5No9MPbEtdwalN8xOLvlt0+oDd5hn1v/37X15yNvuR3mtp+19FGSlNwWe8EJKCGsFKHWf++Ogc+rBGiv1NbP79Z7rG4JwkzTrvbpzsEYoqSbxI/9BOsWAxfvv5d3vfOIX/iFX+CF59wit2kaTp8+zSMPPQzAiy++iEwFi8WCtnaLrSUOuRhFEXnh+DBKKYpWk3sf1DiOibOUQRxT+/eq0a33j1kJhHQDaMehc4GqxkqxxpMqiaMIjOgn+iAIeOaZ53j4UYdfT9MBX/ryV8iypFdD+/zvPcFb3vI2nn12JdxQ1zXO2sjtEwYxTdEglSTyqph17RJJQaiwfnK21r11hg76KUFKGqN7g08jnHhK0HLCxDuN4h42WxclURS7dzxbcRo62HVnxi2MQBhB2E8JDopt7CrozPOc/+MXfpGHHn24N5jM0k2ydMK/+Pgvu6/9JBxPa77l3W/lP/3zPwbA3v4Rh0c3e0NtAN1UJNmEfNb5q1V8z3d/N4fzBVeuOP6psQIhA6xU/Kkfcobrm5ublHWFt8tBKcXR4T6XX3qBwL//aRzTVCXZIME07tyzYcrG7iZXrzkvkzAM+Nmf/hk+9/QLhBsOahLrgFBpPvEr/wSAnVOneP/738/zL77Ahz/8A4CDqN64euz8y666OUtoTRTGlF60yMROlGlWLXjDG98IwMWHH6RsG4aboxWJPwqc8p7nCMVxShA5jksSek8w68SJVNgy9gvR0sLp8/fwD/7ePwDgN3/9k04NKy/Jth0x/f57L/LRj34U1ny+n/js57hwz0UeeNCpd37ta1/jqKgxa7iyYlEwt3Pm0wqh3e8ZAhaLOZveC0eKiOvXr7skRG8c5ji7Qaj6RWAUhGht+3nur/3Vv8F0PmN7d6eHAI7HY/I8p1jOePhhNwacPXuWulyg/Jg2mYy5fnmPJAx7BcPTO9scThfotupJ/Wnq+F1F7pIbZ86cYz4/4tTOJp0aj2kahCiIAsG1qy5R0nE0j2cuYH/u6ecYbBiWi4P+vsymNwmCiDhQnNpxQddskdNqw8hz75bLOcJCsVz2AaZQIfv7B2xtbSH8u11VFcLqPjE8Hg6ZTqckgzGx91cKI+mUFT30KE1G6KZB64o0c/vsbLgAYSXWBeNt59EZ+qRwbTRWSPJiD+sH93JZM61rRoNhP6IHgcAYi/VAs2WxwJAiUBznDi61KKbOQNbgDHmBg8OrpFHMMnfj8rwcEAYpgUqJoy7J1TDINkjiEaEXczC68aqZ7twXyylJmLAxjvvraRtFrBIINH1C1wTYoO7XNwHOwzNQIXXuzmky2uJtb95xys3eBN7YglYXNJUPEIRhsZjRaE1QdbSOluX8iDSZUviJJVIBL199uYfWJ+kEXVukVIR+YrZVwyTJKKt8JTKFYHtzi9ZDVo/nM+7Z2kXFST+eXrt2DZlYrKmw3mOtqCpa2xLGHSQ+oKqOqMp2xbm2LUjnqyjpYKNQ1yvRkmE6pGxa9g9ucPW6F4ZQlsVxTpKlVJ4HZ5RhvpizseHOszQlBIJIJBReBdcow6Jc9vBJITWDjXtojSAYuGd19eh5bAxmGVNf84lMY9gcn2bo+6uQhrK2XD26ROo5XpcPr1Lv5WgEl/YdF1QQUDdVZ+dGKEPqsqKqil4BdfP8FrPjHKuXXD9039O15dL0Jm3HuZYNKhJIEZIv3HMfhDFSSy6ce5DZ1PVZQ0UQBgSd/9jWJihHMUo8pLKuG9oG4mzEhnLzRRS48fCV2h+K4Gp7Z5vv+f7vBiCKYgIVoVTYd6bV4laeCKbWyZZCCIRy25feHGznzGm+9Nu/y4X7HuW9r3Mke2deG/bkzjiyLBY5u7u7fOY33bG+8MQXyLZSkFEvEKAbkDI8Qc671fhPCEcKNa1mPvekaJ/NW6mjVFStpmwKyqbrrBJlXZDXZa61dovV3GeSuky3EPLEIvdOAcDJAMH4BbM4USVynJq2378LkE4Y+KngRPVjdQ53Dn5erQkh+qqMRPRiELdWj+5UObpThelO13pimzgZ/KwHgLe2LgCynKyc6TscvwtkmqY50ffWvyeEU1iyxtC+WmC4Jp8N9Mpw3+h+KlaVQn+A22IyI+jlxm9tt57LOn9MndzRHcs/knVZeTiZDFh/LzrSchBKjG1Rnjh9ONsniBLKosX4bFqaxCznc776Zcd3KApn+O1knLtsmjdDrhsaz/9pgGGa9bylomhpTIPQq6CzKAoGwyEgex6d46A5XhtA2TROAl+tFooaS9u0JHFG7VUUWwn3PHCRh1/7OsBNjPecO09bl1x+0QWKs9miVy9t/AJhOEjdIO2zqtq2SBEzny3cZAwMhgPSNKYqmxNqpLDGaQuUH28E2pPfbaud+IGSVHU3obXYQlP5Y4dRRDOrfXG5L0kilSJQUZ/AstYiEX1Q1no+xCgb9MqqSRgRZxlvetM38fLLjvuSDRN++zO/R5Ks4c9ly8UHHqUs3LUcTG8yTicsj1cVkjDIyIuG2Jtlz+dz/vknPkGqFIXnfW6NMg6nB3zgu76boedYPf3006gwAC9igIiJ04T7HnyAOPXBmw1pW4MULUJ1z7ll1loGO+cBePDCfbzjXS/wwlM/w8jPKXmdcTQt2dpxxObHHn8j2WiH3/iNj3F45O7B448/zuHhPj/2F/5LYv9i/MTf+tvkyyVhx9krl9C0bD/wAO/9k3/SbWs0SoYUi4LYq7Xm+YI4WAmbFOUCs7AEKuPIV2iHI6dmOop22Z+6Z6pNQxYvee+73w7A9Utf5OH77+XmzQPSxC2Sblz5PG990zme5av9PX/4QsQf/4738fzTrkJTLGaEgWW8sd3vc3ZDQzsln++T+QC6bgx1rXs13Xw5P6EG27VuXOwM1+uqoW4aPvCB7wIcR/Dg4IAsyxj4ZxXHIcZEnD69S+YXYZubzuy9EwxIwwGjcUpVz9HG7bO9vct8cUQQKRYzP9ciODzcJ/bE+KYpiJOAMFIMfAC0tZlhbEOeFwhf2ZseH/Z9BCAJLrA1Wfake4DRcAJIBoPTffVlPNgEBa03uD59+ix1XTMcGjrGhTWwcWFCUVRIb4q6OTlDXZf9/FHXNRvjLbQVKB/oFgtDEEX9u3e0f0wURSRJ5MxZAd0EJImrrvQ8Jb0gTVOWCxcQKeU4WmEUkXt1NGtaxqMBxjR9orSqnTpq7rmN6SBGG2dc3qnb1a2mKZ0lSOITBHVjkOEQFXrkgoXpbJ8gCPqxLI4HXNtzvN3UC10Y4+aF2o/fxhjGo22iRcLxsbfgmB0zmWySxMP+nsdJiNXZSpRJRQxHE7QR/bay0FhbU5uqNxYPohBjNWHsOJC6tWzt7Lrxsu7U7CTyVEOeL7ATnyixGq0bjA/mpNIsl3OSKFrxxYSTsh9NRP+8WlMzrwzHh24sC8MIa2OqvO7vS0jIfHlIFAdYvxazrWSYZr0hcr7QPHL6Pr4WXO/H5rIs3PQvA0o/7keBIJABuhviRYBpDJENeOwBZ3Hw7Ne/yOk0xrSS7c6c/uCI+zcnlEuPrAgHCCXJ85zdbYc8qNEIIdE+SWqs438bVROnfowoJ0TxMXWpEBteyKxaoJtjKv+MrW6ZjEYkyZilR4nFcczGxpC2bamqZd8XBAbT+D4mlmRZxHg07IFqs4MjpJSUi30mY4eCMcrStjNKX3HLojESiW4blK++itQSxxltIxh7IZskDVFKsLfnkitSBtAqmsb0EvlRMAJtGI/Tfj2jm1Wy8E7tG3KuhBD/ixBiTwjx5bVtHxFCXBFCfMH/9/61v/1FIcSzQoinhBDf/o2Of7fdbXfb3Xa33W132912t91td9vd9v+H9m9Sufo54O8C/+iW7X/LWvs/rG8QQrwO+AHgMeAc8KtCiEesPWEEcVtL0yGvee23dMfAYk7Az1yGzGAJ7wDr8pjTNc7O1sSrAKkIU7ZkowmhV5yzUmCQeMgwy7bBWEmR16jQYyhlgAwcT6EzO1v3eOhaJwu+/tlaixWQpV05sSWQwcqDyVrqunbZmHZdTtRibYvy5XwhLdaykkqWwknBCtFXFGDFiRJ2BXvTZt0cVKLoqiFrUKgT18FtsEApJa02GGn76lVXRXolGNwrtf6Ya5/vBK1br/Ct389bj3NrZaf/vKYodKeK151gekII1vOv69wiB690VaHuGNrzxNaztneqJOnuHNbP85ZzF6b7fV/pEV2JiNvaN6pm3Vo9U51sI6/8fGynaKh1X/l6Vb7YLX/rKre3fq/zXBLCKWJ1ePxmrrnv/gf40f/wh9i74SA5n/7k71JVFZubjviwt9dgraUoltieT6kJhMRajVqTPa7KfOWvglO4PLW9wwc+8AEATp06xU/8xE84yN0tJpudolEShn0GW6+9Q1GaUJmW2g9d4+1N3vmeb+nVNJ/8wpPs7e0hsVy411VDtrZ2qOv2REX98OgArOV45rPgArZ3NnjsjRf7TOvNvX1efvlKr3oGDkao7QpyrLV2ZtXCYDo/tShy/ly2YTD23h6t4IFHHuH8PS7r+MQTT/DY6x/nm77pm7j0/Av+9w546aWXaI3m+nUnd2+wJFm25lvmIELrFdrp8T5vfuOjDFLFhfMuWzifzzl7ftJXGZ51N5Kf56f5eX76tj7UteOjm7dty6nJT3x2Gc9/yj/mn/KPX/FYf+D2bcDPwE0O+01LVlCwX+cyv86/gJ+EJ72a3pP8K/hZ+Ot8ZHWcv+r+t86yAdjneT7Cf/Zv/7xvbR+Bp9k/sek3+PJtu33sFz/Fx/jUqx7q1GbLSy/tIVUNyl+RFdR508PKjOkMqG0vO954v8FABCw9z6Rt4X3f9u288c1vAeCll15iNBr5Sq5XdoycLcGZM2f6vueqHg5RAhAnEcNhxtHhIdtbTp0sDAJe85pHuXTpEoe+SjPDZZ3TgZvrp7Mjzpw9zSKv+spHEKUIGSID0fMgq8pdVzZwcJ+vfu0F3nvh1AmpZeX5TkXe9BWLNFOUyxrtq+BNXSCkPWGFEscxTd2wWBSMx26Mq8qW+Tyn9siVMHQUhzBO+6pGUdSIqmY0iPt7Mp/PWSx0rzZnW0VdtSeU1hzMctnbl1Sl7mXkO86VFAFSBFT1yp+y1TVZFmN9taAxAqVSYqXIPFQR4RSZx5MYdKe6J7G66XlE8/kSqQxVU9J5J9blnDgaIIWDvIFDiVy+fI3h0Pt6WktZF9RVy8xzVkfjAVWrOJhe68egsnSV3/4ZpCnmmnD8NF+ZH4xHbq4QLYF0zzRJMtI0RreeD1iL3lcp8tWX+bICo8myjEB6SXyboII1ZIdVjDbvddLvdaeUafu5I+mWg8rQ1hWeHYKQjvdelEsiv64Nk5KgLNBtiRduZrC9SdM0FJ6DFUio6oCrV47oVlBZNuBoUQOm7wtGN46i4p9nUzZgQFrF4XV3LKUjtjc2OZ6WKK+WjW3RdU0Sd8qYAhUq4tGAbtm+EY4pyyXCWz/MFiXDgUQEMR210MqAgxsB2XAHYxb+WTUoVhYOKgiYTedoUTutAhwcfblwfbiDjAaBoaxyku7ahGGZz1FhTF113qkZQgoabbjh4YST0SZFXTIYuerkIN2krktEW/YVL6Mt9bJBUxL7Nfpyf0lRFL1q62icsqwWBGHUKwNOJltYJbm2t9fHJaH6vwkLtNb+phDi4jfaz7fvBn7BWlsBLwghngXeDvz2q31JG8PSk8ScKa9dwXe6Ew0CrC5W2+zJBaAEEMbfcP/iRc6XyQqD8rh20YAIZAfTRhGjJARRTJZ6TLmUIFrqciUw0C2YTC/Tbfp9uwHRGFcWz9KVt0cQOGf3TrYzkNLBnFTUu4WvfI3sWjAVIIUg8d4m3eJ5fVG/euFF70Dt+EereyuEwAqw+mSgcrvAg7zt2Er+mwVSbqJdBUm3BjfSP6tatyf2ubVJi5NnFycX7Le29e2BkGBtXw4/ca4d56gLCm/9+x04U+ueHev79cGntUghMYITQhCvBB/sg65X+31/nmYtcODWIGcdfnoHcY5bITruK3cOlO4UEL3Sd1/pc7dtPRDtuW/WBzK2wFKvDKaDgKIouPe+h3nXu94FwOy44dd+7dd68qzzXOuMI917dfbcOY72D0CKFZfBQ3C7ffKi4LE3vJ6//F9/pOfsTCYTvvU7v53P/vZnen5DkkSc2tnlOS/pbK0lVgEaS2fsZX0CZDAYoTxMZjld8IXffYJnnnPE2IcefYStU7u8/g2P9QFXFg1ZzPPeFgLgh3/4h9k5vdMnRMqy5N57H+DixYs97HEwGPDf/OWP8OxTT5P6AT9fOLuITv5eCEGoHIm583w6Ol5w/vx5/tJ/+xf78UW3lq3dUz006zv/xIeJIufb89jrHP8njmMnIb+xwU/91E8B8Mlf/1UkKyEc0wYk4ZiAlNaPeVUJO9v3cOXyDT796d8CYDQa8+73vKO/ljAeEIcJf+Mn/novmNPqgFY4ft5/9Z87mfO//w9+nunBPn/37/xtAK5eedmNr4QYL397+swpvu07vpOj6RzpYdxBFGJMS7vwAbspmUxG7B/sIToeRghRFHDj+nXe9w7nS/biS8+h6yVveasTIRGmYbacUtAgvKDFmVOnybJhL8AwGg9YLo85f88ZEi+ccuXyZbaGWxSmBu9hc7xcMhyM2Ry5RdqymhEHFqRlPJr456eoGnMiYM7LJcsyJ/XmleONFK0b6kpQ5d7HT0EQSWcz4q0T4zgkL5f9c5EokiilbnRv4RsnAabV/Okf+ghd+5m/9xe8v9lqkVbpHGuC3nKjmrS8ePkKrRFIz/dtyiVNo9dks1fzVh+0BA4uZo3h3D0X3fWMtzBWslh6ifVyyZlTu1RV0Y91nQCGECuz4bKsCcPVAtp5NCqwAcJbEwgpCGRClo5oxp0w0wylYuQacbQqa6SMmM3dOQQqIorHLItZDzvKvUz7+QuOUP/CpZd4d3OOjTW4ZFsZbABV3vTwxdnh1M3jHuKMMQSBpFisuC+mce9uGmf9ghlhyRfzXrxqWSy8gTy9zUpd14xGI+YzD5vTkjQMaVqN8CT7OI1oyhIlA5SHlt68sUc6SJF0Y26IFAFC1GRpx9+WNFVLEg57c+WiKmnrkM75szOJDoOExdz7R2UxwoTMjkrCyI83dcVgkJIk3cJ4RBw5kYZuzpjlR+RLw3iUcXTszXHTiGyUUbWrdZ8Uktoe45GtqEhQtgu0XGC6eUa5/5TycMZmipSSmprKBzvzfc1wmCHskLJwiZPBMEIfVmgfSCtCJJClY1rVJQ1a0jCgulqxMXKQMdM6sY8eCh9ltI2hqhrOnHHy3lp7fk6S9F5UsRwSGNvz1Q0SKwKG6cZq/o0sm8MYhEZ7S4OqgiRTDCbuOMu8Jt7Y4MbeDOkFQuq6xtSW1poegh8FCWES9lYa2hqCsKExAaNNZ3zfasHSVNg4I/EJCDVomM0WhL4Pu1Rzi2mr3mJoKVvyfMHmpktsnD1zjqODPdJgxHgQ+D58nUFUE+oFZeOe8e5gjLGS2gfe2Iq6qbFNCz64K5YtYTRisrEDQcfju0GSpZQ+cjMIWh0zK6oeonq0f4OiaAjChHnhEoRNFVE3iu1td5518SKtzkniqE/wZllGnIQURUHiI9owDNGm7RMSx7bENIY0CTjydhc3p9cIgpiyXK4Zp/8/53P154QQPwj8HvDnrbVHwHngd9b2uey3vWqT0k2IAFiLChVSRP1itG1brJF9lO52O6nY5o8ECKzH2dcYGl2TjBLovG/qxmHzbRc5S5q2RNikf4HatkWJCCODnmDuojnDOrtFeZ5G5wmktXaeM2Z94a6oqxoVrI5d5UviSPRqKHGsPJdC094iQtAFc0IIhLEIVoFMd4MkAr8euC1o0muLM9lfX806n6M//toi2xhzW5XHWuuCilsqQuuu6uACEH9/KYWrAAAgAElEQVTH3LGEAAuhXAWAwgcxt1emBKLjx/AKgcEtnCEhBMEdjiXkyYX/+jm/Uru1crXiqZ2sir1SVW39nLrjrf/9VStxovPdWr/g9X/6zKMVdwymbmt+sdkd4tZqrxBdoPuNjyUEfea63yZtr9rUFe+kAm08B0IIAhH1E45GEAQReVnxwssuAHrnu97Fpz71qd5zzfnnOHXQLijb29tjYzQhjIMe057nObpdI/kawwc/+EGu7d/k2Ku2tQI+9OHv40Mf/j4688g4cJ5v1664bNeP/diPMZ0eIaViPHZZToUgDELaqu5V9+bTY47mBxweusl6erDDm97yVv7lx3+pV5L6wLd+O9PplLYxPTH9Pe/9Y6gwYOaz8CoMKPKaL37la/25b4xG/Ad/9kf4az/+V7h5wykrpUnkzLS9EIawzthzMBhR+EBq+/Q5/vJf+e/IRkPKm64SNBqPOD7KuX7VVcoGgwGzo0Ostdz0uH6sJQpCbh5N+dM/6PwFP/TdH2Q0zPipn3LBzpee/BpCKIpygfQJnnx5zLPPPMXW9pAwrPzvtVRFzleefBKAe+8b8eD9D6EQvbBIbeZYE/f8OIDPfvaXkKLlW7/9cXeeyeMEgeLgxh4q8Fn4yYjj4yfZykLOnXXTyI29Fzl1docwdsHjYtGyuytYLhS7u6fcc2+cEImQuwy8ktTZe+8nwCCCblFYk2ykRJunGBl3j4/LFiEbfOKTpjkiHimu7F0lCb0iXZywN52RDMDbpLAZJSijoOnQAjFSaZo25OBwxSlBKLRpsLpLzAmUjVhWXhmwUZR57YJEH+yEYUh+XNMYTRK5Oey4UmijejQGumVZLhFGkMUuOC/yuk9EdG2wMWGxWPSCKMfzY1pVE6gBlecNUQqeeeF5kIraB50SiVKGyidAw8BXcYqyF+ewQlIWOX/0W7+NsV+YvvDiFRZ53fNOARcQGttznpuqJgpClJBrJsIVMlD9u94nMgPFYuHu1alTp1guCna3T1F5L6YkyimqmiTI/OeUoqhQUiLofOZKhEzJl9O+ihGGHarEj1OlYDQ4xf7e51Y3r62p64bJcIulF3gQpmBjlPXfm06PCLOMU9vDfq1SVZWv8Fdk6YqjMQ6DVfC4MfZB6kr8Y7S5iTGCqvT8nNQld9s2JvECOuONjMU8p9FunQEwTEdYaXu1QtP4qr1MqD0/B9OSRgll3fbVCSFb8mJJGHTqxJrFYk4URWT+HZLGGRQLYzBN5+NlmE5nRJ74PxgMWSwWiFBT+0W2NhatXdWmM3hu25aABOG5MFEQUZslG6NzvY+XtY1fi417NIRKGpQY9lwqTU3TNJzaPNvfTysFYaQwTcnWhrtXi+WRU2n0Jr/H0ymT8QaL4yWqdfvEAZiqYZQlVLlTvD2eLymqsk8iNGFA6D28vvTM7wFw7syuV92rqDyXyMaCJExovMjN4cGCsmgYDiYMfeJrYzRm0Ryws7NF4TmySbqNDEKkf7dNG1JdO+TG3lHvBVnXrkq1zpFVYUTdNGhf2UkHCVBRFJANXHA1W1iixBI1Ca1fGxjVsDkIaH0iLFSWxXJOlqwUIbUoCYKG2leunr58g3yxT11eJ4ldJJwkGbujTXZ3zvaJvsVigZQB0cip8pVlwQNveJCrVy7TGX9HWUitNVWpib2H1fapjEgpirYTV2mo6hYVC4Sfw5atItkaYKyCyB3/5s0blK3l4NApD4/SIUEyRgjIJm6cms1mlKVga+K89QBqXDI39++aFBGtXbr+5BP92rpAPAhXXPZQvXr49AcNrv5H4Mdxq7QfB34S+OHfzwGEED8K/CjAZHOb0MuL0pdgbV86lNISBYZ6zdzVrRNlv9BeX6iG3mwtVhFNXqKMJLCuY0zSDcrWEvmOYgJQQYQKRJ/t7ciKUiR0PymD1kl8+5e6rRtftTL9wBaoyE1KddUvkFtvTtqXEsOQw8NDr7jjgx3Zyc2b3hwPXKlesDIyXMHy/Dmtwfm64A3bnlz4r1VO+ntl3T3tzvtOTQrJuihE9/uBECsJdrpgQ/fKTt229Weyvu3Wf68Hf65CtIIvrl/z6rdur+hwSwB2G7zQRbu3BUSvJJCxHkh1wdZtFgDf4N99sCo6c16BNXcWBFnHAd6pqtRJTHe/oPyxXy04Xt//1utbr9i9UhVx/TyFECcqtN32IFiDu3q4nZMjdhn8tjEIBFHUDVoWMERRQlG4CfTChQt88Lu+i4997GMA7O7uUuQ5jdXU7arSOZ0dUzZlP5mAE1johGP++Hd+B29885t4+oVLpH4SMtZy4+AAKWWfKOiUMTc33Dl+5K/9OMVswY1rV/hf/+HPu/srJUpKsE4+G2Dv6IA3vOF1HOw76dksy7jy/PMc39jnjIcqDQYDvvSlL1HXNQ88/JB/BpLnXniJNHO/5x2D2Tp1uhfVEAjiOORv/uTf4R/93M8C8Ku/8glUZHsIoLASQcB0WqB8tvlP/eCP8uLlI67ceLqH10hx5FRTfWb36HiBlMYFwmJV9TsqZoSh4tqem4hCJbiQnuXxt77Z3duNA06f2WWxmDPxGfwwyIiTgNFGwLd+4DtdHxA7LJZHPP4WJ64wHuwQxyn7s9+h8oajW1tnMNVVjqbH/bN77WNDtNXE0abvS5K6KHn0tffSwQoaXSODiCjK+uruufsmIJq+6n/6XIKua7Jss4dit9YQBBFNa5mWL7rz2txENLJXX9wZ7aItLEwDhTvP/Pg6w8EGykvsjuIBra44PY477R9AocWMykhk5RaiaSwJIo1k5vtlgGksWdLQQXmKpkGbBiUkmRfxqEsHy6oqL4RRLYhChzKYzt29OnVqF6UEsYE4c1Uwqyyz5ZyJXyg2lWJZgoxVb7w5qFaCMF2bLeZEUbAiZFs3lgyyhKWvCI1GI/b3D2k1hD6g1E1FEKxMr9vGrKnkevjrwTFve9vbSJMBv/GbDkK5Md5hON5iPnOL7OEo5ejoiI2NDYqle283Njcd5DVS/cJbCEFVlH0WvkVgbcHGxoDSK8IZHZANnIDAZMMF1fv7N4lUgPWLsiiNyPMSLWqGHp8VJ5Yin5FlI5bH7nrKpRtTrF9vlDkcHl5hkKykltM4I1SCJE77+y5UjW6doTvA7v0PMBpuOGicTyTWQe2r8YI48qqQoU/k+ve/rmuEECi1NvdIATboPxfl3FWbB1v9+FstWpYzzWAw7OXgq7am1suVQIJ2lcGq1n0grLVmNjtGhUkfzIWJs0JouqSztEgM8+N5319UIMBYiqJAG3esLEuIooC6cUHvYm+KDAOCVlL7+6J1TRBEzKZ7fXWybVsEqodCL5YFVVsxF8fotkMLhRgDUdT06IDhYAMlDvtAUSmBsJKDpkJ0iqxhyOyoptZNb88SBJLFYsFw0CUcBHvHRwgd9VYTaRZTVYVX3fNiakmCqTVB7CuK0zmlbVAqJLLue9evXidOArIsI/e2BuVecUJoBBmwyJccFwf9WiIQEhmkfOWpryN8JScJh66y6/fZnJzi2o3naRuzssTAIj1dIQw6GfKWKIpIow4m3xAklqee+3pvTSBsQL6YoWSEtl5oQwXUVeGEkQARBAxGE1A4RUsgYZtRJnshpfFoF90CokF4IZOqELRYruUG/6gI5ARdVxTHPikTxlz5/AvEKA6nLkm5fXqEClryRUHmg6udyQRky8hXzicTgZAODdJ6KO/k9D0siyV5vQDj5qdTZyccHC+4uOUtVcKW5aJGiTGNF6vJBltoU7F/7RoTjzQQQiBVgM38+6hrGty4VHvLprbVKKEI19Bctnn1pPQfyETYWnvDWqutS3n/zzjoH8AV4MLarvf4bXc6xk9ba99qrX3rwC8M7ra77W672+62u+1uu9vutrvtbrvb/r/a/kCVKyHEWWvtNf/xe6Bnz/4z4KNCiL+JE7R4GPjX3+h41lqWvrTfQbHWSeFKBVjhpINVh2X2sLU+m84q217VLiu2NT4LQUIcZR4KB63QSGsxdERxg/Xl+C5LFIQjkDlluQBf8VIqcpKcnjdkhUEFijCMqbx3gjEG3eZYo1A9DNtgrUB1FQxaDvb3kbgsfnfu4DJDq+KDRLGCEThIQOsT32v3QEnPeerlIu5Qjejwzx5WJgM6EY31Z+D+30HyLILb5XaNMZhmBXVTSiGQfYHMPQPpBUBWFaeOS9e1tm17TG933liFEE7+tLtm7Eku2HqlBFwhwKJ9ActXWqTF2Lb3eRLWYIX02PNbqzv6xLG1bnsIyXrFbCUQ4ouArKRmgyCgbW+XZhdCoPvqqwDZwQnXoIJKrvhVpiNz67WCo6saNUb3+xlhQNHLhKJXz8L0PkkrX6r1/rJezwqV4/m1axXQ7jha6z5TZkzrTHa9D1QHVbXSov39k0GAvMVoOI7csxO95LzEWkPbVgyGLuN27eCQD3zPhzg4dNm13/jVX3dwwiAg8jCoqs4RYpM4LFC+kiuEYLmc8p53vxeA//jP/iAvvniZOIqwa/1aKcm6fYEUgjBJWHjoQroxYnNnk8fe/EZ++Vd/A4Ab119G0WB1SOTxyvPjBU8/9SJ/5Jv/iPs8P+bGjWu85g0P8453vAOA3/n0Z1ksp2gs7/pmx/UpKqjrgLx0Y1LT1FQIqrzqDXTL/JgiX3C8P+09gu65eJG6vNlDo8JgiJSQphbpB5df+LmfYDTOOH1uROj9cBAB586dI/X8ChV4ARsjCPz3JlspStbMj0vOnna8gcEwYrp/iXe8xUG63vaO70K3AcN00kvG54uFg5oJw76HXkRJzs7WKaxw6cqqnBOEMcfzOYmHqM1nV6jLGWm2qgQsFnMntz/3UCUbMEg3qCpL7g00pY2YbA7RhSZQbiy0tsDaiDj2HLai4fyZi9RNyXLpK0dJhjWSKEvZ2nCy6svccTNE6449L2YEMiRVimTbZUwf3nyIum5QPgPeGs1oOCFJEmbej2tzcxtltp2QiMf/z+dzrh0c9rCdSEOelwyyEXHiUQ2hpJjPiOOE0nNDiqpgI9nA+AJTGEZsjAekUczuxD2H0cYEMFy+/jKzuXtHlMyIZMhy4frUaDSg1S1ogap9NcsWyOBkRXowSNEtNLW7lioHGVhy9vHTIxFnmc9qpLTowO/XOj/iOPIE8MIZestAsX/ooDWPPf4G7r34CJ/4xC/x4INO+vna1eskgcV4mBcm5Pj4mNOnTzP3fb9tW8qyJImzvooiUH2FA6A42CeMA+qyII1dP6+qgjiICaVCJt38MGA4bFjOvAiEVaRhRZm3JLEXnSg1oZaoOGbhSWyDoa/IFa56MFvs8fWvzPjmd17szyENA4p8wcZg0g+h08MCrW1vvyBtyLPPPEOSplQeDjbIhlStg/wVi67i3zBfznrj1izLyP8v9t4k1r4kz+/6RMSZh3vvG///f2ZWVmV1dbfb3UUP7kaYhbHALJCwkFnBFmEww8o7Vl71BiFZmB2WNxaoJQsQEkhgJttYbYPb2O7uclcPNWXlf3z/N9zpzHEiWEScuO9lFjayG6mRMjb/fDffu/fcOHEifsN3aI/UZRU60LNO0POB0XsLDXpCipimuw/EfyUNUZGw6/ZYD7OamBi1IV88pqKIPC9IlA1nnYgNmB1C2HCG9YeONM0YlX+OW0lRKeSUOb4bYOYOo10HIvb7fNuM3N/tQ7e5KAqmVoOxePoY2sDESFlWAcI5zxNREvH23WcAVFVNhGIYR5JigSE37PcDWVaSeE5Xp7eM/YDyuMB5kCRxQSw0feeef2M043RATycBtK7rOByP1PXi1RYzDZo4TtGzl58vYrruQJrUxNJ3J4mYdM+i92BMTFEUCAnSN6VSkTFry8NDF/aAs+uCvh947yHjSkGcxcRxHGIspRRCJghlQfquiW2Iyzqcx5NoGGfluo/9Ikef0bQdUZQg/SRfbi6ZppnJrylrEso0ZpoO/K2/4zwI+/GBPN0QJ4o08Z3cNKXv+9ABK4uCOEpJ0zSsDSULoligohNyJVIJVscI75clcHoCKMPF9dr/nsXKU+w0jgOxUsyj4OJjZzpvrUXPI1ltaFu3Nl7uR7p+ZPLoBK01WZbQt2OI49vmO+wP91xcnDMNrgsmmRHShnO1bRwd5+HhN+kHt1c+e/4VBDNZljD6OFapmLnv2Zy5c6CQKWWaOGEVfz+3uwO90cxjzLZ1c3x2cRKg+lHjH5tcCSF+BfjjwKUQ4iXw54A/LoT4Odw28wPg3/UT9Q+FEH8F+C0ckOI/sP8YpUCAsir4pX/WwVG6rqPve2cG6GENXdfR9a0z3+NkhKmU8gEiHi5kwDozRnAH6jBuyYsoYGbHYSbJFPPsdalEgjUzQqlAMJ3nyZk7SoHxrfvFdDMKZsAGqWLsfCKBT7NTNZuMwfqHZZ6t24w85KFtW7r+i0Q4J6JhgvKQtRopoi/A0R6LWiyvPVavOwW3y7/Oc+NpTvGUD/Q0ETt9jpDmxKnxr9nPBdDzPJ+gb5wSEYtTLQyvz3Pg7AhpQbgkTATxCkAsSaA5vTcyzK+7Zgd7lOqpqqBLIAlzIATh2oWIWDhmXzDffSJKYZ8k9Uti5z5v+Z0Ftjoj1eK1Y0HoR0mEAl8kkOSP5hNnGB04c14Vc/k7tfhnKRKfyGjtvm+iknBdeu7ceveYB6MMWk+oKHnEkxMsGoCLT5UxLnBZvvHs3y8W8glZ/THXDFwyniRJKBgs0NX/6N//M/y+jX/96Y/DF35hUXHbPXn1f/fqZ8u//1TjT57+80dRVV9x4L/hu194/Vs/QpntL/IX/H/9hS/8v3+y0fzIV9/T8D0+r7z3679Pn/n/3fjxT/4Q49RzCPC353TtgFSGeu322Crf0HYH5tmSxAuUTZEkWeAW2RqnLJWlKOVgc3Gc0LVemWx2h6rQhiyTDMb9Xb0qEXIkUekJ0mQVSZphlyKJkTw0B3bvXgbu3XZoA3m9e/DeYlYzK8PRk/Pn7uA4X23LMHrhjalzarOJZXt/WsMP+t7xBoBp7HnY7RBCcX3heATNwwPjNFGuzilx8Jf3t28xOg5Qt+32lrJcM3SCSXt/Mzsye3jcMuQ4c7m+YNI+UI23kK/ByvC8N+2Ww/6OoqgZjp5zJSJkpOmD6JTjPk7TxC94k+SPPv46f/Wv/g9Y68QEAEY9UlRlUAEc+omz9Tl2PhV89OgEb/Q8BvW8RXBlSbCqVY3WI0VRBK8tbSRVXqHn03lhzcxmcx58p6y1JEnCNGp2OzfnVVXRHHaUSR2En8z4NAz6hV/4Ov/Jf/zn+am/9OfCa6OeQUXcP+xIvE/ZoT+yWm3Qxpudt4I0XYO1ZD6p7vojs4Y4KkF4nhIzm/UKK1wwNwwddXnNMAy8eeuAPtdXzxHKEi3CFEowm4mkSAJEbt91FPmGSAlaX+BNIkWc5QGqKOeJ/dij0iTMkx56jJi4v7/n7MzBcl2MPjN6IZU0XiGmmUgojFl4Us74fBw0s//ORZm714LKaAyxU0w8+mAZGVHXNQ+7lmqRxRMxwzQifNFkfxxhdLx67Z+Z/XGkaWZ2D9sQv7VdwzDO4Vwty4Jpeknf96w3LjF9/vyaadgwzS2bzaLolrHZXAdfrbquT+JVfiGcrWu22y1tM5DEPsmdLIUoMYvf4ThT1IU3eHb3ZrVaeSioDOuzH0YsNhQRAaq6RikVYJbGGGSkaFpN6ikpds64v+2o68ULr+ftq5Y4kRy9IMr5xQv2exDSBB6msBPzNIT3rjbOI+14HHj50hmnl+uOh91nCLJHcZf13MzFl83x3q21obC3WV0gRYqS7nksigKpDJg5wCeNgSIpHeTXF4/m2UFIK6/CWVUrQBKnMtxPt64046hPP88Wa2Mmb6w86ZnjXYfWc+BqqvSKTXLpfCp9sUEq0NNAXLu47HKtyPKYqw+/GgqEU9MxTT1JmnH/4OYzjl1ip30yF8cKHSuUKEIR4dkmZdADt/c7XhRuTc3tPzq1+X+jFvhv/oiX/9I/4vd/Gfjlf9z7Ph5t0/Lb3/pdwN24PM9ZFRuuz13VMY6dWouxY7gpx66l7/uQ7U59R9eNTOOI8DdcJTHHw8zNuwc+ufJGY7pFSad6AyCimWFwvKFFicRYjZSSOI/RnqQ8jQ5jrv3nCxwxPcuygG9WsfTJxakiNM+zVz46JQz7/f6Jwt4i0vB5TpAUArFwqbwOlFNSfMpZcupyS9fk6dwK+bkXOHUePj9cx2W5JsM8T0/U85a8RJ5yHc9JenTN4fNO3bPwPZecQuC7WIbHSaBEYrFBLVAId+g/TvAW0YtgbiyXz3mMcF2k/E+ft7xf2ETsIn//lLdkHxnvCgkqOiXz7vu535umGeVJp1LiuUanx2kROTl1f5ZE9tTRkzJyXTf/N9bMxLnrrmnfIY2i5bpPXcUsSQN3CDxHKEmecKyEEMQ2hUcY4ZkZKR8pCxqLsKBt/0gYQgee2bLW48iJEcxywcIrfvnP/2cuwPTcqVglrgqlNTI+caWmST8h1ndd54oWj+YDG3PY+ipulvF7v/ttfu3//FUeHpwM6tD3TLrh4w8/CQWXu7uOn/+Fb/K97zmloA++8oJnH5b88Hde8uy5C0zj2EmXl2VOXi6CGY77cueFKbI8JkudYflm4w6Bw/7OB0CXvN8uwc45eZ4Gkv9v/Pq3KYqKPI+pai/rnkc8e3bN2Xkd5IT7vnVY+PTU9avrmvXq/JGQyUzbtsgoDRwyy8zF+Vc5NI7jdTg4OeNh2rNIHK9X12wfOrAj67U3xzROcXHpuCmluL9/4PrimuO08/N5pKwKpIixs1tb63XNMAwki2XF6FSRZiPJUhdoRLFT57vfbll5krIiZne4QeAO3izNqeqcrh3Z+O6Ligz7XePUF4OwwMgqr8mDsIFm6HuEsCE4f7g/sK5zRttTebnkIosZhil0MKQUHJs9Yz+EZEMIxaQ0sZTB/DNfOblrG/vzw3dv0gzw50V/30BGOGOGwck+l3Ea+KlinrFa0+12J3notqUsc6wnhRdJQhonNKPh6PlGcSJIVML2fheEU6LImXWOPiFKkgwzzyRJwmdvXUCUZClpXnK86yhTd51n5xfsdls6X40VMnXBnLB0o1tTW/1Anj+trI5S8PZwQ+kNrQcRY7qOdf0CfxsYDh2HfU+1UiSJr54zI0VGlLrv60QxFD/3s38kJJ1/46/9rwxdwyc/9lMLlYisLNg3R84uL8IarqsXWD2HzqoxxpkvixN/M0mSJxYA0+TO42ma0L7r3x+PVFWFnXWIAaIIHu73LDSDN2/ecHZeszmrgujMEPcQzUyjovBKZz/4nitO/Jl/708B8Cv/xX/NH/5DP8mLy/Mwd3mi0Hp2icXk9uFNVSCwDKMXqxETUiR0XRM6lljHq0tTsLP7vCzLgrWLm6dF1CFlVbt7tj/usEaxnI9KJeh5QI9j4LAVxYrEFx+VT0Sb7oiUkvoxt0kIhJ2J/MEdV2uSSFHLLKiTTtPkLDEun7k1RcR2v3My8f5e3W970jhCWImUp7PPzoYyP/FXbDQzTYrMc0PXVUzb9iT5aR+UKqbtDZNPpPIqZ5h6mm5k7zk6wzhh9EjTdEHtLZIZxVkSEmNJxsXFBVa0TL4Tocc1Wk/Mk+DudjmLFPdzGwymu649FRSN+y7v494rU0oG6VUNs4TmcAxJ2TwrPv30DVF8KgwjLHmekyQJekGzoFBxxCef/Fi4D/M80/f9o32jox80s65489LtzeNwIM8Ltl4I5+bmgSRKGceGs7MLP+cR6/WZay74zpWeDMLK0NV8uN8jiNmsz/n+9z8F4Op6zTg1ZKlC+/JhVZVoo7G4vauuS1br0henl4RPk2dpaEgoNSKs5wr6kCdNIpSaudm9xfgYahgmsIJ960Bu6S53hZnRkGW+y58kHvkkgopp1w3kZYEH/aAHQxTFCBEFPt6gHU8xjiN6H0skMsaqHOXjJaME3aSY5xxt3J4bF5oqjZn0SK7cB4zjSHmuaBfO5b7l074jjeIgoFHkCSqK2B5UiPeTR/zvHzX+adQCfx+HQHtZ24eHLe/f34bkBE4y6FmWhkAtKwuKonQSjjhyu/CeTP3kApu/8Td/lbouKas8VPSG8cCh2SK8XLQ/G5yTu1f4cd4bA8YYtF4yfJdkBJWjyZEaH3v9DMPgg+wkZOJKuUN9+fw8z9HzyKSH4E3lAj6edIAApnlAxSc1tEVYYenkPBZaWMZjmBuAfaRceBonEY7lfb6oyud8hZ7KlC//noQOrH2aFC6QuB/dTQufePrck5TdCVYXPu+LML5Iqi8KU3xBMv6pUIe7psfQyaeQv8fX/kVI5UlZ7/GIk9N8WGuxGBCntrkSFskJ8hdFS+fsKc1RoDCLBLgQGAuz1uH+LBC9x4exNTMCFdQfZz273zPzI1+kCWlnhDnpW85mBqWCWtny7SwSn8thjMUY6+B0ATo7e2GNBRIrEZFlmHrikDRoJ8gnLfiAfRg1SZLS7P1hOXRe9nQKczx1ljdvvh8qod/61m9ijSHPE37u55xc+93dDVob/ti/8E3axgVC7bHn+qLkX/kTDta2WpfYeSL+l386dP3u3r+hKDMOhwOJDxAurs7puobLy18CnHpQ1zWsNpdYT9Ku6ozdbksar5kmB19I4oz9/sBm7TbpP/Ev/VEsE0LF3G+dMl8a1dR1xeG443ztuihLAhqpRZks5uW7V2yP44nszOy89zpNVXkfn1jxcLdH+APg8vwZdk4o8hzj+3pS5JxtVsy6YeXhLrv9A+tVFQQgrBFUaU6sIkrrDsvNpiSJJF0zkvsOkNkPnBUVdqlSxy6gFsgg0y1MxLPN15HzPXG8PMMx6bk82ViomXHqKS6uQhW173uuL8/o24HM3xurBXroQ/EiTWOSCLK0YpzcXrmuU6yd6NuBxHsujfpIEuchCTVmZnNWMY6ah4cleZzIi5QoEgw++Z+ZkCKl693Z4OwyJtJIIKVXqbMTibeiVKsAACAASURBVLJ0HlaCEowYjs2RNFlke1NUJIiSJOxv1x9cM/ZD8ERMchc8G2ZktHTcDNOsma1m9DC5thvpuo4k878zGVb1GV3f8tGHTqTh2DYcjg+kecXkn+Q3t+9o2geur93vSJu5PTeaEGYhW2+I48/7sBiQA7cPPnFTzzEWvv/D3+GDj926+zt/9x+ijSuQ6NGT4+eIUQ8sPjfPrj/i6uoZ796947PPXOAWRW6vPjt/xrjAieKYtzfv+OhjR8PWWlOWOcPYeVj8SWDGwcYX+enDE9RGmbtO1jibIBgQRRHH45EokuwOLhB+9vyCw74LiIIicz5weZ6FfbFvHfyt7e7Rg3seisqLpXzoEoS/9r/8Kv/Ov/1vsclPm/55kdJ1Gm1BRkuxyxBFEb11a8NBrDrKdRnimXp9SdsdKcuE7c4l9O1+S5FmYT/f7/cOkmRtIN4zzZxt1hz83xg94FCelovNWViLmVJPilerImcYBq6v3f28v793ipNtG+CLTdNitOH68jrsQXWx4ubmhsF3kqMoYpUmWIYANf/w4pK7h5dOBMLHSk278+eWWxtJkmDMhO77oCS3vTcMvSDPK7yytRMvsXGgS8xjTJYUlGnMB9fuvdu2pRs78jwNRc+26UmzU/FqnmesFDRHEWK5cdwyTGNYk8vQegxy23HquttSSpLSPY/7bkYOEUJYtO/6dn1DnpWYBXWjJbGX4l/eK0kSDu3A9vW7MMdWC+Is5YevnXi2lNIVDfqBySfnrhAJ24eBaTyt2eY4sCBp07RgHDQqMSydx1evP3Ww+Tjj4cGfPWlGXdfoya2pSAmmcSLNBNKLTry7ecnmLMfYLggsSRn7gqf7u7upQc+Rl5V3388ayTHpECwCJTNdN6F7HWCQlpnRvuVwODD55z/Lk3DugYuL5nlGd3MQppJSMluLQIV7rA2s12tynziuViuETJFA5IuGdnBKkkNvsX5f6qcerGT00FMVeVTXrIKa5cDMsR98YuZjFwEgXXUGKNYlaT1hZsL5OHY9VljOn10hF+XxH4FAezz+iQQtvhxfji/Hl+PL8eX4cnw5vhxfji/Hl+PL8XT8wehcCZBeOlhFDif5GLIVfIXmE+ltt9sz6FMFPFGKOI6JlOL83FWXV1XFNB348MPn/PQvOiL6ftugzYReHMWPPYfDga7fc/CYVufrMoCViNBNWrpM7qeTxLoKVbE4dp0spRRx8hhOdjLUc+IHmnnWJ3EGz8eRUoXPEUI5R+JFBMI4bs5jIQwpncS7q858roP1pBvz+e5R9CN+d5EPf9z1+X/ycgpANqQUSKInXarQufq8oEIQ1JDhd5bOVeBqWRu8fZbX5COBCWvnJwITy9p4IvsuvtiRiiJPbrWna18ghgvkYPn86JF/QTCM+5xYhdYqdHbMPCOJWZyMrb+uGdB2gaz4ObKnztg8OcxygPcp4btEioVmtsBjokg6SfnH8+C7DFEUOTGU+eQ7pXzX7zGMVFrhAJXqVDUSQmGYg7RtFEWOQiYtkTpVaLIsgzn3P7ckScpsJqaFhyENcRwxjj3Kd7jyVGJMy+WFq/QVxSVds+N47AMsKM0tLz6qsb76+/O/+M+xXq/p2wYlF4GZr5ElZxy6T0mUgwo7cr1FGFedH8Yj1kSIJGL0nIvivKIoCmwSkWTu8/ZtxzBp5reuAzbNB7AR27sHjl50oj6WzFoQpz3N0RH2lY05P3tG5zkJjS/B5mWK7t1+M5qW5jgQRZJ371wFOM9H+n4kz1ylMMuUMxRtx1DlnM2IEJKqPGfyHW+JIIpGJ4sLYCOa1rI5u2LSbp+aNcR5DkYy9a7UWeWFMzH2EDUpJR9/9CFxHNM1mV8/hq7rePH8o8BF2VxecWgaisJ7fc0lWk8UZRTujZkVfTdzsVkzG3cN3bEjjRLUsvZnS52co7Vhf3jnv3NF30Ce1XRH12Xom4712YqbOwd7zKsUMOipp/QQTolhmmaiSDAt0KvIMI0N0nM1hmHg00/vuDh/FqCtUZlQlAkWTRQvfJwCay119dWwzm/Gz9jtG/LCzd35+Rm73T7Aby8uLlzFOUrJvSz31dUzfuc736KoKlIPwbHAyBy6o8exR88jZ5sLPPeZqipomo5qXZP5imzXHqnXRTCl1dpxgjdnZwyj9wiyhqvnz7h/2AcTYRUbLi6vOXpOVBTPpEnB/X1DknlT7YMmj71KxTLsxIfPfoJj682OjwYtRpSeWK1cp+M3v/UKFQu0NewOvoO4SljXV2SR60ToSfHt3/ouu8NbMi/9HkcJWiecn13zu991EP+yqBmG/lTh1drBevUQ9muh5Aln4Dvqu92OIq+ecEyzLEfqOeyVh8OBsiyfwKyGoaXrOrQ5ndl9P7E/bIMYQBRb4njFp5/+Bt3BzcNP/zNuzf/ed37b/9mKX/ylb/L2/UnoWAjBZnWGtHEQncmylOOxIwvwS0Ne5KR5FrzhxnFEpIJERZytfPBgLGM/MXmOWV0m9M2BalWTeSTAurykHxryxHN9tCZNCyySZnBrw+gDs8np9ek+xyqiSjIGf+8yGTM1A1UWMXjBl/7gOIGxNSHmmWdLHEVYT5fI4gyjHRWjLBaBAkX17AP6VhD5NVx95UOMgb47iTvd3r2hfvYRB38N7x/uSZSlLtJHUM+JssxpvS+UEILD/oC2OnTT2ranXp0xzobbWwcRz7OEhzeviLyPYNs3xEkGRgQ+vI2cIWyep+Hvuq4jSbPAI+qGCSkj8jznuHd7YFmsaXoHN52XMyRKUbMEj3SaRMs4OZ5S1x/9+nS0lfWmfmSE3REnZmFrODpArIkFSC/JX0YFkpLN5iQ1n0UVwzCGmNL5T0mQJlz71fPK24VEYf5c5xeKzO2dq03FrBXDpMP+0jQHolQQiQu0FyBy2gV56OLM1tB1LU1zDPdBRBZrBbud969Ek6bK3bPBrSlhIkgimq4n9p3UfdNizPH03vNEJBXG9IxeUl3JOHStl46XlJKH46fkiXs+4/cOtj+0Q/DCK4rCxV1SEntfO6wEq4JQTJrGDl2mJcPgvcxkxDRriqJiQTjpvg+xO4BVCuutS7qDu84kyZitpekG1OhRRf+/gAVa+4Tgdgqcn3ogPcZhJ2lEmuUBmuWgU5pu6Lj1ONs4ylhVV/zgB5/xykNGsqQkSVM2a7chlnXFhx9+SF1/g7F3wdxf+s//U+g6kiQLMMCuc+pWSzKw8LOkJCxC7b0GHH/rBB90kK7lOp06EsIEbyo7W4Ry/iMLDFFY84Qv5RK5p5wgjCWSCqEeC0N80bNomb/H3KnHaoFf5Cwtf5cEDtnj+/J5UYhIflFV8DG8Lvwdj35eksYA1TtBEIPIxGNOmv/3sQLh4/HkZ5fdIJdE0cL8SM1v+f15nnlMP1s2tIWcC4+ERqZTAuZe0wFbbDEhYXxyLcaixAKD4kQcX76zmJFCBrETrUfSOOPpvZiIVYTAhPRZShWS9DDXxnj/K8+T0BN1VnkIkr8uOSOFDj8rYZnnyW1UuQ/K2oNzKx+nsEmWZYKUhnZ0wcbVJmfUA8PQUVYnrlbbHKmKIhC316sztNYY4xWohjdYOVJvAOGex5nJBcGROxQSkcA8Yu3A4BOEYXTGsPv9QF06SMz7my1WDeT+QNXy1nmU9DMXF+53hr6laR8oiopxXIojmnFowCtxVnUFSNp+4tm1gxhud3dkWeb4IPlCLG652b1kXZ2H+yltzPv9m7BuutGp6U1mIloEGMxIVp58YPrDkbrKvfeMPwQySd8cGKQ8kf/Hhro6I/GKb303kkQJRveUPojv2gFMz9RD6jHsUhoGPQRvqiItuH3v4CqJ5ylVdU7ZDxwOLaXnXKhI8OzqinsvtrApa45Ny3HbhMDi4mzDdnfL4bZjVbv3N8NMJHPssPAkWnrTs1qvifxzPDUTt/st3/j614NfDOOAlYraO/aqWNI0B4oiCby6aZRICZvNWeCs6rkhT0/GrXJSFNGaKEnAcy6SJGEce/q+I/N8OKEUQzdgfWKakCA9tG/B+t/tt5ydn9F5w2cXaPRM80SKm6fvf/oDiihBTaf9qmtb0jjm8HDv57fGTk54Yklem85x7842FwGrLyWMfUvqg/O6WrHd7RFCBOhgkiTcvX7LYd8EnkJVZ8hIMc5eSW4yHJsdUaxoe3/2FSu66XOCFvkF/+Wv/BqxD+6+9o2cCEsUX/PmpVt3b968payuqasVlRduSFTEoAfevHHPvyssxpyfXbEU9Q7tkbOrFdr7IAFU63P0NHB/6/hOH734ACEEg54CVwvj9uVxPMFkx3EkivoAxXTFnfzJHmusZrd/IE1TUm/Oa/uePM8ZPC1ARJLYpEihWK+9ufLQ8mu/9nd59vySekniPdTob//qPwDg449+nGKtEF78BGCUMV0/gTEhWBzHCaNSUrWoBUuaY48RksPR8UymaUTrmTSvOfeCC23fkiQ5aebuu1KKpmmQMmG9conM7e0t0zhz9cxBP+0MbTMwTCPnK7cH3d2/Q6YRVp/g4MMwoYo48LJW64pROrGITeH+7qNnX6HvnTHu4uUVxwllvUZ7L6Mkzpj0wDS0pD7IHbqeKN4wloKsWAqgzsspelREXH3lE6Io4mLt7tU3Pvkq4zjRD9Oj4q3zLTrz+89qtaKZZrTug0nyoTm6hMJYVuXiU5SwKr4WPFBHLdHGcOwf0L7Ac3fXMaWacdZ8/OzDcA/TNOXde1fMEUKw3++J04Ti4ifc2ugGxDwibETu1QldzBEFBdHcJznKCtIFum8keV6ioiQUeBJKhJQIr/aYpInzEet7Cu8+vt/uSOKRLFesvJLqMLREcsb6PUmoiYurnOMuRnmRtDiOQ6G1rs7C9zkcGoQ3/mXOaJqOPKuwXtFvtdqg55bm+J7BX9c0O5Pp3ovzLOfSbAyFfz6GTmONCjFslW8Yhs4pNdtFDdkg7UCeWLSXP83iGCljWl9hWpWVK3qrIhSYljFjiTydQMWOX9n7/WDWFjEP5GUSitW3uztWqwpjZ5rGFfGKvHJcer+/EaWMswAbcfSqtFUWI4Ti2OzC3rzQji4v3LPWtg66GamEyUO94zhGRgmTFUSxu1fJfFK//VHjD0RyZS2h0rMIKDgey1OOTFGccNlO3GEKQg5SOhUjlShyj4ncH3vGaaCuclbP3KbVHI+MeuTuzk3267evnGJKqgLONc9z4qhHCEnXLllw4kz4fNC03+9Jk5y2bcPG5oQpHE9L+tekF1xYAn0lBMZvhktnYNJene1RMiXxxq3+YVlU3JzC3inBWIL9L3CvPpfYPB5KPuVZPU6EFkW/8PeP+Uw+cLfyaSJm7RzyhVMX6SQo8UUBDeH4P1aGh3NRf0QYjF46OcrxgeZHSYR/z6BE+Dm+FIC0CilOComugxPxRNtDgLEKqx7LdruO6WMVRSEt1sqQ2FhvchxHJyl2ZoGSURChWFQHrbCY+WTcGEXK8aP8u2WxcoRjHyimkUXKGa1PFZp5tkzTgWHoyH0Q0fUtZ+sNqU/KDocdUSQp84ws81WjfYtkB7p3yj647rAzl/U/C8NoR6Y+CQFDRE8SA7FkUaiLRcph15BXvkOrJoQ0mLkj8wHQMHak6Yy1DaPvCu8ODre8YMwdzlnxsN2G4sRswGhLdb4on91DkRFFirp2Xaq7ux2RrMmSOfAir64vGKYH4mVByDVRdeELAT4Qj1OGwfNQ/G3OVEqcRx5nDfvDSJxI6rpm5w8YISOUUrQ7zfMzdw2qjrGcxA4aPTDOA8QC6feEyLiCxbqqORxcgLfxAdUcuwuI44RZJKzXVbjOvttTXJwxz4I0d3O8UQI7T0EmuCxclbxpDtx7snNVbkhiSZbA8eACe6UExkC2cR2vpj2Qpgk3798Qe/y77s6IVUxBQuQnIpng+HDHygdSQlryPOHq6pxD4/lO1rI5u2ReT0xeOKHOL92zu3CL4gQlY7KzjPno+bBFzvrZTNNtQ4e0WOVEaUSmvLx301CvzrFMLPqy5xcX3O/uibISsXBfJzh2A5Gfz+sPPnCEdyCrfedof3AdSwFz5+ZzvcnIVAbCcymalvNqg547pHBruBA90WTJ/VkUzZYsyTBRglpM4NOEThvuHx5CNzSO3V6wnGF915GkqeNeepGUOI7J8pIoikNX0diZqqqR3ixzt9tR1xV3dw8nxdtji7Ez67oMMt2CmP3D/Ymja71SrhEM/l7pxJAt8vx+rGr4u3/vv2Ptu3d//E/8q/z2b/1t4uScw9FV+N/efEq9fsH2/gHrORbdQTvZZx/cqdgQK8kw6CBsoLVls9kgIhG4KBjDuqo4ePPo7Ks/xps377i4WAeDcCklYtTAyfh9SaIWqw4lnRrd8XAyRdZa8/BwR5xkYV7yPKM9NiG5s0awfWh48cEzbm6d8E3XTRhjuH62offd5zdvXwPwwx+4wO3FV2sOzQPjdFJ1vDu2TNPAMExE/sw4HHaoqKBvb/x1psRRRjlkQfp5HDrHJzk8MM1uL5lnyzAdqetFLXAgz3PapuPB88eUUhgSXt64feR4PHJ1foEREXvfrXj2/GsusYmiUAQehoFhGCj8upulpThfMeqU3q/hQ9NitSvatb5Dk4mEu7c3zF7p7dnVC7p+5/g1t+67xJGLjbAR9++8xLiMSdMU4bmTgx7JspxuHAP39dAdeLjf8eLFR+GMPhx25GV24tkNDWkSk8an7tYqr7BZgdaaynNf4jhl1BN14ZKmYWzohwPl5qs0rUMZxGcKyCmrLJzRRVEwTQM/841P3NK0mvvtljRNUR4BEqcJd3cPjKPm6O+DiGLiKOXoCy6TkeSba9q25ar0gidSMWsXN2iPPMjEQJqkSM+926xyICeOz+k6915X65S2bTkeOir/XpkUGKsZBy9yUuQk9FysizBXfd+665YuCXbzEvP88oJp3vv7knH9TDL0M6OPp7quY9KKqi7A80qjWVKVFaNXcTTGBGPn5V5VuWGc+tA5s6YnS3IMltksMvYRkVWhCeKuKUFrTeELaOOgYbbMUpOmi7WGUyWUQgaRMj3MxDLBH/VYq706ssYsBtMyZegtxoAefPFeDxh74qa3osVaS5YlxLm7L0etKfOKYTKU50vhe0IpRVK6/e7sakO/PTAMQ0jij+09m/qKm9sHtO/uyvzznNan40vO1Zfjy/Hl+HJ8Ob4cX44vx5fjy/Hl+HL8Pow/EJ0rKQVxsnRMdIB/BRiXsBgzYx6ZwCohkdYE2XVrZ4wSCGOQwhspjgek9Mas2vNTZE2aSoSvGidZijGaJFUMcumwOAWjKLJB8UrrCSFOlRalHFzssVR5HKc+0xVPfIPmefa4MFfpGYaBh4c7Um/0N1uLFBEqjgIEL4pjlIyJxdIBW8yCT+2XRUHOmFO2/vnxed7ack3utQUqdZJEXzpXizrh56F0j9UIT2a5Tz9vwdCeYISnzoX718mrL5yn5e8c5E8hgumtRQpLlJ04WLMvawepad8yE/bUuVr8rE5QQdcZ1JN+Aj+N49hXSZd77MxyH6svjaOT/FwqREIIxnFknk7QQRUrskKdpGC1q8ROU896lYX5zPOUrmkQvgKcF6mX5fd8gMSvOwXZ0qXqWmKp0eZA7ytQq/qSrt8GCMdxf+Di4oz9YaD1lfqb928p64ymaQIUI45SkthV4gDSKGHoBmQx8u7WVbwCrCHOiPzvHY6uMxBP3h9ncJ5zMhYBs7+oIXZ9s4juUBQFXdtTe95Q1zm7hLPNVbg3bd9Q1xn3W8eBuji7pGk6bu9uubx2v5OWhiieyXIF0QI/qek12HiRcK/pGo1lDl4jKo6QWUwa5cGPQ48T9ToLnjpRvKOqNozDzPW1NwzctjzcvncKW76TdJZdMRtBP7mK5vlFydAZ+nbgwsug7+KZNHYGtqsXvqplNF07sFii1eWGWVh2uy1LO03PPcZoptGSLDA9ZRm7kdqvn35oUZElzzKEr3jn2ZrZDChhAwwRDHW9DuqPeVYTR5IXL14Eha26rrHWYdUXmG2SC5qbW6zfk0Yzs+80r9/ecTg4Raq8rOi7kSyPGCdXzc6lMzdeoGaX5x+i5563hzuGwc3d/dvfI7Ypw9iS5J6LMjmT3ZX3QOmnntFK8qxkd3zw12CQkeTt3RsWyyZDh54MfevW3bubO7JixTAMVMXSRZnouo48Lxk9jOTVd16RJTFV5dbiqizZHR7Ikoy09Lzd5ILtwwNiWcCJYhaGadKhczXZGZlEXD6/9nwISJIYrTUvnj/367xx8M5cAK4KfLaxyCj1vodu7yqKjGHsGb0C3uVmhbGa4sUzBm9y/eGza7bbO7a7W2YPrej7B9arDffeTuDiMkdrQ9dOfP2TjwBo2onx1Ohxrx23/Ok/8yf5s//hXwTgz6o/zc9/849h5AP/x99yZq5TF6POetq5I1/gL6sY1Ij1ktV9ozHAMO3DfjHPhovzr7HbPQS+8WwmkiQl9hzWpulwSrUxTePWT1mWjFaTpymokxrZPM/hDG36Hcnmio8//phPP3XqhGkaY4wCSeBhG5Trdnk0y+asJs9WxFEcrlMKyx/+6Z/k3dtbssy9/3Im7H134id+7CNkokN3BmB33HFot1gjEN49Ni8zbm7eUnoo3zhPmMgwz0Pgw6IK6qpERZb98OCvIaI8W3O/c3uelJKx75zMulfBrPI1m/MLhtZ3WueJ/bD3svXurW93D8jZdQsWpMOxbTDC0Pael4Xj3Lx6c8Q3l7i6WGPnkSxOglVIc3igKle01iN6bt5RroQ3A/fxlNSMXUMURWQeMjYNlmHSHJtFtS5FRSnjqNl7vmqdF3z04kMiFYd7Ko1EGEvf+Hs3aeJIYQ3cb90cqEiQFxFmngJVYGhnkrXkoXHrNSLGGuV8IDN3H6q0YjYt8xQz+DN52HbOT8l3Yw2WyMRYTYB+zd1MqRKqKuPDK8dBHEan6HnlYdaTWYWu8vs7b+eR5ggh/Dpc1PoM2lgu/H6T5AWbzYbd7oE6WygjA5uyRlwr9KTCWowTzeBVI/vBulNiNsQ+DqrzFZMeiWOF8UquWIPWM0XhrtsyMYxbhsGE83Cc9yAytg8jyiNH8ixlHE+wUsdPF8yzPj3H2kngX527Oej7DmOdj1ki3HdJoxJlYEYH9IwQgiTJgjqxEs42oBtEiN8kAqFdfFRUPh6eF5XCR6bFUUTbdQFqjoCpHzFYEo880H2PiE4xqx6djYMZWhc7ALqb2fcHymLN4PlUcRQxjTPf+fYPADhbnVOuE4QVRJF7rj54fs4wDHz87EWIkQ+HH+09uYw/EMmVIBhUAyrAv5Q8LbhIJVghg8q2UgqjTl4/CIG0EKkI5SdSejJgnBdhc13kxYWHcMTCYm1EJGNs5GFdNnJwsBkij7WdhYf0zMsNl+jJJVfLwjTGYI34nOmtccmH594gIqyc+Plv/hFefOwW65s3O7Se2B8fGPpFjvZI17RMenHJlkFEIxwUUpJGjjwr7YlQqj0O2v2SwM5e7pbF7+ipAMQyh3GsmPzDGqcSYxVGa6R8lKSYOMBYjBmIkwlj65B0RcrJmCZxjvbQoUiOWHqULMI1LYISdl5gfQozGZSIGD1nR0qFwaIX6dJxRglHdI885GCcZ2YzIqVi9thihWW2bRBNiKKEqWuoswg8ljqOY7p+i7VuA3AXMaISSazcAfPwcEdRpcRxyuFw62fAUOYJ3XBH6je7vm/p+pMR3zAOXJ6dM0SD5/eANjPjJJEiRnjT0K6LuLt7GQJTvXe+aXFU8GrvDqtD84aqXCNUHlrwohlp2zbctzRNGfWO2Vi6xr12fnFFnAiKLGfyQZk7ABqk5wi8374jiiS1vSb1GOg0U0xac2z23mfFwbqkkjT9splIVAR1VdO27trzImUcZqqiBuvnXcxcnNUB/59mCVVVMowTuTdJbBr3jF1feJhHbzhfVWzqVSg0KFKOg+OsLLLA+/0NZVmHpKlpGupqzfHYcHXlsNPTNDFNI7MeghytsBOvX73h2QsXhG6qK7quY54bJhbX+py6VkTRNa9fugArTzPGqWNV+sA4EuT5BTbRlPXiRrohzSraboeSLmh4937kK19b0fgkVIuR++0tz58/x1gP4ZpSitx5kBwbF5Ck8TlTdvQiNxBFKcO4xdgkmDh/9vaHqKQnz8+DuIIxhveH2zAvWV6ipeTTz17x0UfuO7/a3tG2DXleUBYuMTy+u0cIMD7ANLNFmyNRFPHuxvFHvvGNT0gSi9EzuQ/m+v4IOiXzkMPP3v2QobeuqLXQanTKQXdkeYTyh9VZvQKTsw8+hYY4NUxW0PlCmG47tAZrp1DwECJGxi3rS092VhuyrOLNmzfceljn9bMzVGTZHo4n/qQqWF1chkLKYTKoes1kct4f3d/N+oGu61it3HvfH3qkFAzDEOA+cRw7+JGNgyCJGifquuZ7r91aOT8/Zz921HkR+GoWi7WG/X5H4pOWVy9vubo659aLn0TZHhVB1/ZI4UnovWG7u6MsFM2wmNAqEIrV2vuIyQoSwWWdMngYi8Yg86dSwXoSfOPHf5yv/6QLwP7yX/4f+VP/xr/IxfU53/nB33fXfnGBmRuGY0/reRnYCNTsSOs4GK+KXAFx9ImGkBlnl+f84OUPgz0DVhFHGbM/w477Oy4uLri/eRe8qKZ+II5jBjkjfaCGlQz9FGCWiSp48/o9//1/+z9x5YNeIxz8erNZIX2CMOoV2jREvjB1ez8iLdw8fJerK3fW5iJl+/AWBEGopV98zbTbt8pzweub97z1JqMAN/sdBsHLly+5vPa+XTe3ZFnG7r07G/qhpVqVvL25RSpfwIpT4jgmTiI+unSB9vbhDvv+PohXxLGi7zuXoF647/fmbotOVYg3Xt/ecH/MuL5+HtZ0s29Ik5oohhUL79sltMYsRYyJJDHYaU/vOeXrjz7AzrnjAHnu8+rqgpvb9wE6XJ2VgIGon81q5wAAIABJREFUQ6Zufrdbxzu3Fu627jtnWU5Z1Ei59ksl4a7rqcuKysPLXt3e0L7u+eCDTTBlLVfnHI/vQmG2XNWMY0OWO18ycEnzOEz0/RjOaCs0/XFm9knTvrknTXOa5sBq7eXFsRTFirYbufDcej2MlNk6QNukVAzTzG63Y/IJV16W9ENHnifBNHhztmK1WrH1MG9Fiooj4sSy8Zy8IjkniiRdcTJz79qBaY5Icre/dUNH04LmgThxz61RDWM3YYwNfpFSRIydDebq1bpCzxPdQdF7GPDVtTv3ongOcMVx2hOphId3TpQljWKSJKUqYlarBR6cM+qRaS1PfngPDUVVuGYB0DYWZsE4TejOW5OsY5qmofEiRsZAEmWsswTtxVTSWGBGQfzYUscqurYj8TFXFCmytCJLUs7P3bPQD95PcbYhru373gnD+QNEzxPCQp7k1F5cZZom9DR4URQP4RSCtjsGGLKeRlIpuTi7IPXnVVQrlMxcE8RTpswckaWKsW78dcYIYrTRiCU+nTUK4RoFPj6t1ycfvB81/kAkVwhJ4lVODIu63hxuuIgcB0LMJz4MaCIlgua8Uo6cPE1T6AxEqWSaR6c+tZRtrEU94i2pyBkWKqmQYbNLkSphGubAlVo4PYvykRCCOI3o+57RexTEcRy4UctwHTAVKpFxpGjblr/+1/8mf/Sf/wUA2k6wOV/z8UefIFiCCEuSRsFrYBgGjzN/oPWKW8YYDocDwzCelOuwIE8a/lEUEcUpyp46R7E3ZFwWc5aqQBhfSO9mdgqMmvbkr4R0RsKet+ASvhRBh5CLd4PAGIG2GtRSsVAIVoGEPnWjT6ymUzcrUkgZcej2rPw1RF49bwn8s3VBNwwYTBADuD5PGccJSx/MjderM/a7HiEXxa339H2HSFWYl6E3zLIhSRWt39jyvESbiaZ119kOHYO2VHXKoffKZ2lFe7REUnA8eO5SJOibXZjDPC/ZH9+DmWlalyRN00xRVGzWl3R+DRkjOb84CxuUHiPGYUSaiVXuDvBNdU7TNKyqS6xdKsItebY+3RffLZymCenVu9I0Zb9riOI0qDZVVUWWFuHAruuKJIpph5564z7v/v6GNE25uj4LKo3uMDdcXDuDyd22xYg9u/0xGMVa22HnhLJOOLSOw7A7aM5WHwWFPaVS8mLD7u6WxHeXVxvLPAusD0LTPEKlKYqMZuEtneWcyZyiqrh577gTm6Lg8uIyVJ23egs9fPX6RajiKiUxVnN+vgkV7zhWfP0rz4N/TF2dY9ZH7m9TVpsF5H1Oc+y5ON9gtfvOZxcJ9/db8nLhMm2pioTJRMyeV2foGKcWKyR3917UY1Mzzyld5/lrSpBmZ7x8fRc4LLOZkPIOPffhOapKw/vb12zW7vO7xmDlHsExJMuRKojjmLuHHwYu2qwN4zSEjnuWJSiZ0fea737/NwBv0joamu6WTz/7TTef6yvmWXB75yrC5SrHMhDJmo+/5oQ+3r7/IQhNmlT0O/f9Ls4+5tDuuNm+9nMuWNWXDP3I3Lj7fn72jFVywe3d28Cjnc2Mno6BI2CNJDIJb2++H7p1729vESImL0vwSVma1ChVcPQH6mZdc2hHzq+es9ST9s2Bs7M1eblhNG691KucZux9xxBAgBAcm0PwCLu7fY/WI+r9omRniSLJOHRM3kR0s9k4Y9NW8n677LGCu/t3gYeWRhl5XnJ+dhkQGb/xG7/O8+cf8vFXvsZv/8bfAeDNm3d88OJjWu2uaRrhww8+YZ4nJu2uW8+DN7BPqWMflJmRT3mP9vKBcRxT1RtW9TkvX7m/6zvNh195evj/X7/+W1w/2/Bzf+RnAPgrv/Jf8T//b3+VX/zFX+Anf+KnAfjX/uTPIqQTl1i4aNM0sW9u6T3/+Pb2nua4ZdR94MBOM3z3936H5jiGAHZqJ7p9G/b4YZhoup5xHCk8X62qKsqyROuRonbB6uFwcHv5YgCdOzWzb//utzm7dMWAb/7MN7m5fReEngASNdIcdl4JDJQz3iMRCb1XVhwHQTvODP0UTJKXiEL4eGPfHfn+p0fe3yxrBb77vdeu8ysFO+/b17aO+/LYgP3VqzsmM6Pk6OduS6IkKhJ873vOXyySMdM0B2W5KHLfOUkSzO+650hiSD/9LMQ7WmsilZC/2dJ7ZdA4ShH2FVWVIexJVfDy/FnYb8bJnc3nmxfBk+i3X71391edYoL29Wuc8pP7Lut1zz/49b/H1dUVl94zS+uRZJw5Ho/0njcUxzFDPzL57sTUawTOpyj2B/IwH9nvBt7tKqTvRnzzp38GlSZ0XuxIKLisn/Gwuw8JXpoo5tlQFwKlFhVVhYlS8nzj70HpO/WGzu8JcazQ84iKDMIXfePEIQQ2Zy7ZstZSWcHFeXVSkowiRKRouhOPfjaWw9SEhG8c3Lz1w0SRr/w1bCmjkmp9GdZsstIM0w6M453W5/Bwt+X64mMi5ZWqDzWTcqbyyzlmmZ0SsPSiTGPrxKM2YzAf3+/3qDghzRSlF1iKkwuOh5ZLn+QmcUzb9F5p2N3Ttu/YnF9w7FrK3l3Dxov69OOyv+VEUcTDXYPwxdzrLCOqCurKvfeh7Zz4B4o0W/n1UnPsBw7bY7hOp4hd0zZezADJpDtmoWhu3/nXDEVR0XYTnS9En51dIBIR7ovwioLzPDPuT4lUlWfoYaDwjYuyLNFJzgc/9bNu7oaB4/GIVCfxLykShmFCJgZtFjPgEmsF0eCTQJWDNF6Mzl2lQBHHkYubl5zjc36qnx9/IJIrIQxx6hV3rACcpOLSCVB4M0M5BBMxcFoGS8tR69l3kSJiDxWzo0biKo+JD9CliDDWBiXCeWqcaIRI0NNJwjVJEqw5tVSFEF757CS2sEDGFnUbrTXGGwsuNzNJEoahJyi2efW9rmmD/ObN2wNv377GCB26QpEURLEMKmBFmbGuV1xfXlN8vJA7T5Lak68y9sPA4XAIwcd+v2caRrp+CIeeMI4AvSQjQgkPkXsqOS5l5BbIIn43W+Dk5j2PIFWGNRah3fxGSmHkhLEjelpgHb7q67uAdV1SlRnrTcnh6Frr/XRkpqO+gGl2FeA0Ety9e8v1c2dC2TQvEcpSlRm7t66a/nbrgr1hGGh8d2nT1mBVuHdRJHn95jX5YaYq1/6+Z6RZxO3dfTBAPewHEDpUyeqyous6DtuJqzNHAh+GgVVZ0By3JL5zZaxGoii8eIHWM0bPxEnKurrw6yVmGCa6rqXy7e/DwUH7lmSgbXqquqAocu5vd/4eK+pVwrF5S5W7QHvsnJT20onY7XYYaSnLmrZzScN+u0NGit1uG4Qw8jyH+aQ4uVpVvHr1ijhPiXzVuC7PyJIU+SgZb9uezWbF5LsvZaxo+5KzUrDyUKw829BkHZvqgqvSHShppnj1+nv8xCcbP+cxRnd85TxD+AT96voabBqKA0IaHh7uqKo10le3D4ctm+srkIL24Ob8k08+oW1btBdSuD7/Km9evyKPKnrfibi+vqbpeoYmYlN96O+N4eLsApm54GD7cOSHn72kqA2jl2bWc0vTHOhsjkzce/3wsx2zbXl945K0Ij6j6w1JCvvGPSCb9Tnvbt6w3mwYB7+GVopXL9+HTt2uuaXTt2H/AUfKNmaiKGpWPnm7231GkisOw3fdNVnBcARrMlJPztVTTzc4GX7j4Tyj7pn0xP7o3r+caqpacHv3OnxeVa0ws+LZs2fhHj/stkgpKOpHKk5WImXEqzfuuTo7P2O3v6Eb9kReiOIw7TBxTIwLBtI05uHQUpY52u/Nt7uGPOlJ09o9Y0Ccag6HAxp3gEcqJ441SVyfJHnTmKIoeHf7hvdbr/IlM+JE0Pv9rRkG9vu9gwX6uev6Ay9vBMdDeypqJYZx1Ezz0sWNiFTONO/C83e2uWDQPdZ3WuZhZpqcKEuwJbjdYe3MOAiMh589e3ZNbwzSnzEjgofbO37wgx+Erv/V1RVSSj794ffZbNx+s1rVfP97L1HlIu5i+da3v4WKI5LoZOBtrWW3O1DGwXGdobesvWpckiS07WviXAUzbq0V33n593k8vvPpG/5v9t40drckr+/7VNXZz7P+l7t139vdM0PPMI09GJxFOC/yAsuWoiQCycJRQiIQwYwxM0wgtrCRTTIZEbZgljGDY0RMUBIcBydBEUogxMYJCbYFwzKM6Zmhu+/+X5/t7HVOVV5UPee5lwHnDS8m0i2pX9x////Pc86pOlW/5bs8fFocBEOOTolj+Gf/9Df4lf/7lwFnep3EGcZqotAr6uYp08mS5ZG77sVixvHxnCzLDsp5vSaOUwJ5gKgZBFEUueQYBxN2XfaWzZVbU0qFTqyiqMaiT9tquq6ja936KaKCNE15/xuv85u/+Un/PI/RQ4+VgsFXl7MoJ45OCfdnvSppuxptJcYXQFvdY5EkWYbdq+DuDbd9YLwtNl6Y4/COrjbX/l4PMPLj42PX/dH7GEESxzGZCh15H8a1ZAdDuLdZsQOJDBk8tLXoGrI0pSm2Y8eZVFFW9VjMtdaSxCn90FCVe2SHxErFphYjzDkJEx48vRrnQMiBtqt4850HIwIkCBLCIHWQqX08I5zJrWEvMOXipk1nePPhIz8vDlrnCjb7TkfDdD4bC4tKKZSQPNmsR3uINFO89NIrmChns3Fn+z/+9U9y8/TGKEyjNppPFg8pq82YDNw8uUMUKrq2IIm8Ququp+t6jo/duVrXNdG6J45jtD/vl8uMutEENh1tRwCWiznnHkp7dnbGNMtRKiCfuvm8fPSE2XRKHCXsfBEmT1JOsskIqZaBBmtYLo7HJHdxtMQYGHo5Or3IoiHCUpXuHK9Nw3y+pNptqD08NE4mZBNF3a6Jo72A1UCeZww+6NK6o9c1s/yEhYfXh0qy25UkCTQ+tkqSKXGU48Vz6TvNfDpB64Nd0SRPmc2XlGU5qtca62Dr4K1CvHz5q3dvEPpig272sYibg5PjBVJFCBESx4cYtCp3xK+8a9xf+r5zqJ+tO2uzLCPLY5p6GKF/fVdjjLNYmnl4rTFOwXB9tbffEKggoO01crSRUTRdg1IC6Z/d6uoajGV17eYuTVPSJKPvDb1P6k+P5wRhzGZ7RenNhqtdCSrA+H2gbhpsWIKxhMG+Y6pp644gkEifX5wcT/gXjReCFi/Gi/FivBgvxovxYrwYL8aL8WK8GH8E4wuic4UVvpuCd+m1OLvcgwT3MAwMSiDsodq0r9yAg8j1g2vlpb4zEAQRre7Isgna7mVrjasm+oqi6SJHmFPpSFYXBB7zKTH1vpIjvXiF+5iua0fhhP3YC0A8K5rQe5nHfYU4CB0kUUhDlrsK22I5ASXp+obQCycMWmPsQYDh+rri0aNH7E2B9yOJEoQQzPK9HG1KnKXcOHJdjnt3XhnFGNQentGtaZqGonDVi81mQ9N07HYlVb1nQQsPNwsJfRtUKtcdPHCuAiSSztZoL3Yw0DOwIo1j5lMHJzi+e8yNWzlR4KoHu3LH537vd7n/YMtq7WAsWu+QQrNYTlmcvApAsV2x2T7is52rdh0vlvQWujVMM1cXeHp+QRtds92uqL0JbJzeY+jlyImIYsu9u3dp9Ype+ypnGlMVBWkUM/EiAjYOkUKNbV9jnGyntZbaVwvjOKIuC2bZYpQKjuOQ7XZL7eEiYRAzzedorSm2HvLTN6RpyND1DI37/EhNGPR2rFIn6ZTttuX66pzp1FWN0yRnvbrAgc5cpezlW7fR3UCxc/OXqMTBUbUh9K902ewIgojbJ7dZrVylTPWCy4s1Jyeus2Q7CAbBPMgIfNd2Pj2m2JbEcUzvZXo/8Pr7iaKI0leguq5nmrQEKiL0laTN1TXHR7cIlcX6DmWeHDF9z9FYTU/ChMG0nJ895OVbrhNYd65TXfkqrh40ab5AqpTEQ4WbVrDdFjStZrN27+j1VeE6V75DvFhIrJFs6s3ISdg9KJnNFjx8+ojGQz+NkPzuW59jW7lqbBAEFEVFlp1y/+LX3HVGCwZTU1XFyCVMUvdu72G7UZrQ9AUynNB5f46HT95ByJ63H11wvHRdxav1FUZozi9d56hpSsLYSebvRSfCIPX8xhjdunuuqjWBCBy8Fgd7nk6O6VpBp8/Ha+8Hy64oCb1/lECBEGSZe9ezPMEMguXR7bGi+Df+8n/DX//uP4fWA/2+4xxogtDSNd5sMcuRynWXEm+A7CwjMmZH2Thf1+sL5vNjzq/uu79LJwxdzNWq4OTUVSLXm0uyNCSOJoSpF8wYSrTUKM9N6QfY7S5IwozY1/zW6y322iKUGUVZLp48IknDA++k0WzXO5qmpW3e8nOlEIFGqYDzpx4aPLRIGYyGmlXVYK1gskix1r235xdur953AZqmQhhLnERjpVUJidYhaZpSVq6i+/TyyvOD3O9kSeA6AFkKviO86xXFdsPZ2Rknx074Io0T1CSl9FVyYzWzaYLWPamXnh56C1Zz8913CL3tQZan6K4aeRlKWWanC4qdpvE8iaarD6Q3P1QmqOqa2dRd5+S4pS8kR8dThPSwUtNiBosx4XjPV9cbnp49RL61h/fVCKGQIhy7OMYYkiQhjSfjvhsnieuWe9GUOI7JJlNOTk44WXq41DTleHmD4yMznmuvv66cp6T/Qd81KKX44tffy0c/+lG3frqO9773i9mVBY3nwwlTsVptKMs9aiSgLDeU2x3BnlcLGNVBHxB46Kzx92l8V9NYy2arUc+UnnXvxDiMMSPHq155+JivistAoVuN1tuxc2Rxnc8wCQ92LCoEG4xrOs2ndF3HyY3F2KnarUvyfM7c8zqKokIYgW0EoRcW6TuDDRt0Lwj8tXZVQ1O3o1CECiVSGQQK4ecqiTO6rqdpalIvPjCZhJih2/s4EwQR08mcYbAjkqNtpBNlqiu0l8TOJinWDgR7aw3dI7ylSpB4CLAI+J03P43RAXFy4It/7v4ZQeSenW4lTd8ShGYUjPjnb505wSpb0vk5tjomzi3T6V78YKAsnSXInqM7neYI27O6OFh+pKnzFpR+zru6IYoioiAkiP3f5RMKA7q94q5HhVRdR10WZPvv6zrAUlX1iLZYXZfsdgVRmBH4iUhzgRIzVObmM5THzpxYt+hm6edBIQPJxcXFKMsfRclzwmXzKMdYS0M8xmFaa6aTkNky8zYMsN2uUYFEepuAgZY8naKHgiTex2uG6+uHzJdLpHb3U5Qrsixj6qHRQ18zdIY4TNDeUqXPMvquQ3iee5bmDBoCAiJvxaDb1nlhCcts6TpsRjtZ9pNXjsbrjuOERWrQXhAsihcEUniTbHedUljatuHd92677+97x703jH6VFoGK9ubE7rOak9x74XqYru7ZbDbOBiNxnc6y2rAt1kzyhMDDT+/cvIGx/ej9FQYJvZiDsPSNp1BMUtrGiXzkE/c+rK/8Af6HjC+I5MqxrPb4Z+Ohc4ekRUmBwBIrOZqnOetHOyrnSCFQQhBH4dhqLpsdhgErDLFvafbGGQGOHkyhZTADSg3EXnFLSEeqC8NkVEypKxdo7BX29lhWBw10n5WmqVM1NGZsbfe9fk65rus6hsF91mhC2bVjcDT4TWuw5jn+llQBWXaEtQcVpf3/11qz3rpgZ7Vx+vx7fyxr9op+gtQHNtPpnCgKRrPVOy+feFx7MH5f27YU5TV1rbm6cslNVzcOP+uxuGFsOLt4wBBdMPdQsCBSPHj7t/myL/2XmaXe40e/zc/+D7/Al73x5QA0Xcum2rLZbTk59SZ4OuVkPiWydm+vRC4y3vW+L6X03jhl2xCrEKWC0cvk9vJV9LBlnqZjEtn3PTIRFIV3pN8YovCI4+Vt+r1AiFLkseL2zTt85s3fA+DGzYy6btmuvcrRZEkUS3RfoK0LZHSrKLY1TbVzfhGA7hR1VY7+ELrrMUNLkhz8R+J4TlU1pBN18BLrOybxBOmFOBI5kORHRKFitXLk2OlsRjxNQdhRgeo3/9k/4QMf+BPMM2+8GzpVwEDG1D6C/mPvfYN33n6A6gU3F6f+fia8due1Mak2tue1e3eZpslokihlwCu37nJ5eYmaHTbJAVgunFDEk6f3mUwiBgO7rVf9ymY8fvIOiI7Z0iX2D88v0bpF+Xeo1878++rqjPtP3dw4EnE1PpOqqskmCVW9HTkCxkASuIBrT67eVFuur69HWNLT80fcuHEDo+UIQy3LgqvNlqapRiEMlIMdjAaQTLh7awEmxDSehxk16K5lOg05u3RBymx+A2MMc08OljJAGHj0+D4nR/fcvLNFEdJrS12755JkU9bbB0z9s0zTIwZTUjcVYuQfNaiwp2quRngvYkvTaaYzb9bbhxhbI5SAfk8YTujakjg5CPbUdcN0MRshwGXdEoiQ2XQxksDd/eQU5YrO/yzL3KE+8ep9q+2OobfkkzkDHnLYDSBDVtc7VLDnmVrW60vyifc70wVNf02rO373My7Zmea3ePjwEkvPyYkn7D99SJIuubh08N6joyMG09D1Bz+3sugJZEQQCKeShoN1V1XBznO+7r99xsnpgqYrkfsEgYC+0SgZkU69oIRxJGblvZ/CxNLpmrrsR4+u7Xrl15Pn7AYhg+mJjKTYueeplPO021UlnQ8QdFkzmcwOqnO7jkhJrDKjeEXVbambAiEETy9dIpokCW1XIL35qQojrncOJnN+7d5/y8ByfsT1o+sRthKoBKMNSerV+3SFtYbZfELh/ZWMhGiEEbqxKyp0HWOMm888n7Ha1CQppIkLgLblNWVV8hX/6ldy4Q1Xy+Ka2TTjU5/+bcDB4aIkpSm7Mbma5hMGo2l1SbVfe42FNeN5BQeF2T3/tqmdx1MQRGNQrZRiPltifVI2y3Imk4zpLOX2TZeYfvLXfo3Hj59ydHTCF733PQDMj27xRV88GcVPiqLg/OKCi4srzr3oRFkVxCohi2cjN3vPT4rCvVljR1NX454IEEYJm82GKEpIfdK6Xl+RpumYhDpIvYMGjklnP5AkCUNvx3OsbTfk2ZTOV1fiOHUFvM1mDM5lZCDoCHyBaRKE47PTvkA4dIY4yp0fjy+ctIMmFIzJXZxGbg9WMXnmzqeibmiHjuPFfBQW0G3JfHaM9QHVo0dP6HRHmuZYv877tqE2mqLYUnmPsDh2vlT7fTgMQ6Q1aN0eRGgkxLGgs9DqA1crjqeYvfpvKrGNBdGN4jF6aAmCACUbjF8v0/mcrt9weXUQG9Gtpo/r8f178rQiCCWT6ZzBF8esFWyK3YEaISxxFLkkvvbmw9saK1YkScLveCikYWC+mI3qlqpuuXnz1IkW7RX2hogsy2i7s7Hoo0VKUayJI7efvnT7hK49Y5JMUV46duglgRVEUTqqGvb9lvl8ytoLWl1cXDCd5djI8PSJi8OMMdw8fZXtthqhnu7VaQgDt9+cHr9KP3REoSLwBVARWqaT2xhjCE3l5yYiiXMmkRcbiyPSowmRTEbBJdtVCCUxPiavm45omqLCaCzwRJOYE3kDrB6fi25a74Hqiw/K0A8tXXOYKxFm6EEydAO154H1fefi0cm+UNQRxzHNbkej90XumOvLC5eweepMHMc+Bve8tzwlm2ZIDnSX+XHOohw8dcervcYJXVvRD16EJpqyqUvm03xU6xz6lpdOb9G2DVHsPisa/n/AuQI7YlWVCpzc6TPGttJKpIBOMxIGgyDw5rA+YFCgjUEKwdyT49nVbM9XpEFC6z9rEBIVBRReEGEiHE7W6B5f+HCO58IbFvvyVRRFNE09bhgH9cHDtTdN45Ovw7UHQUjT1M+QXi1JElOVNcZXvNtmQEmBlYLGcwSUNK4y5QOwMAyputap/vnF27W97y45BUJw8tN5mmB8deugZHh42mW35vy64q0Hjs+hvMS9EILcd3HyPGc6P2KxzLl9213D0dERm/U5v/br/7u7d7Pj5ddC8v6O767A7eMjXv8TX0ZdtDz53D8F4ANf+iV86au38RQldkXJ+dNH7JqK7cZhoO/dvIlNM3e/fiK2ZcFVtaXs3EaQpDl1vWU+ndF6crWIBF3XIGUwmg83bcNsnnD33rsAOHu6Zpo72Wnt8fmLxSmDrlldVGSRS/BMFyJMxe2bLgBM4gnGGFYrzdHUcXa224I7N25RtpcUW6/oM5kQhiG9n7s0SbheX3J6eoz1gfDDx48JVMLp8Ymvgro1lwY5pe94SdlyvDwmzxKOvDBFGgZMTo6dAaCvqA8v3aKrCjLv6j5JI7JIcevWHVb+eXZdx707t0nTdExEJ5OJS7DNobK73lwTJTFPHjuCaZRmaAybZjeuddsK9GDYbT8DQJaHXD4JQDbsvGz20AsWRzFVYVkXrhtp0SR5OHZ/jQlptOT0zk0ePHTcpbK6dNeVuHspS0scJtT1iq5zCZjuLDbNWe22zL3suWFgMp2O7//8+ISL1ZrjI0OS7PmNkr5v+I+/7W/x7PjY9/w76NCtqVZrql3EZBpy++UvAqDWVxwtFpRFz41b/v0P5uh2S1W557Sc36GuKhYLS+llgQkGlMp492vvhsCthevrK1p9hah9UYYJvTa01UFQptGaTAb0fUMUufdYN5o4vEnfel6fEQShoW1qPvpXf+65+/nO7/5q1n7ep4uc9eZ8VAEUwkkerzZPWcwOpoddI0iSBdutm6vj5R3+ow/+CH/Q+Nbv+rMApElGFi84v3QcGHdhA1EgSScuQDm/KJnkS/6r7/25P/Cz/sy/9zrgDrBi147Fqvv33+G3fvHyD/ybf+XffPfYXa7rgsVygvJJ9nF0jNY11gikf566GwijgLbvRiNcITJ0X9PpQ5I0mYbUZc/EV42zvPem8J5H1FWAoO3tKC9sraUdtohBHALDOGJXXI3/zqIUM0SEQtL7gAErmaZL2rYl8e92XVcoG+MdD2gb6FpLnASc3rzhn3nEk8cXLKYnYwe4LFqqqqHw/Mo0C9Fa83ufe8CsKcqfAAAgAElEQVTEV6ClhG3zvFpguWsYOk3tub4WZ6Q+m03Q/Z57VzCbLZjOl/z2p37df5Zhs9kh8MUrrZDCdTcuLtz7cOvWDay1PHrzjKlHUTgTTjMG+hhLljkz1DB2c3WyjAgiRT/o58SqiupiTFrWl4ET9tAtR0eHjtdn3/znlE3JL//j/83NqTzmpZduj4T6KPKm6qI/dB6tpN40bNqLUX11L85wde7u5eim49jsTXABdoXjx/Z1O66pMBTU9Y7OHASRTDdQVdtxLVhrGVZOvGKviKz7lqZajWfzphvGfTrN3GJIkhlNV3N5fek/OwUMYRgekDHWUrU1IJCe/zedT6iqhk3hFSi7gDxPSeKY2gsnIS157pKyneento2mqFZjd6TrneJwEB3sZ3ZliwoEy+UJx3P3/hdFgVKKpnXPsLeWQAwoAkzrC8pNTRCHZFlE00h/fzHWMPLltrtLJtGS1fYg7x+FIQYNoh/PMBUUKBGM53/XdU5B2f8HIGJFmkXUtSYM99yw0p3R/b4daWibBmuCUcq/a3uCKGboD2rKUsFqtRm7YoNVPN49ZDYNRxsYqwemWpJPI/Y+JHpX0JsSXbp18PC8om1r6spgfPU4jY/QHc8lon2nOT4+pvVJhLWW/mpNW1yOXf7pdMqDR2dYNL320u+hoa5a5l7g4rd/83OAQciDnVCe5zTeDHjuhaiCUFBVa1Jv6pskMcJcIMShsJ8kEXmWjv/O0wzZAaYhS7zAU6lRtgAC2sC9D1keMbQt291BMGS7W6HCjMxz3y+3pZNnL5tx3rMs4+l6B8WBWzidptggo99zGXVHEEdoMzD4eS+1pq/rsVuZxwHt0GHNgVtYbxvmi2MGzFg0bHVH1ztjZLdWWqbTmO3mijjI/Dow2KFBBXbs6KX5Mf+i8QWRXCkh2YtFCmCgQ0kLwsue0yMEHMfDQVrXuk3xsAASEA52p2u3sfzJL58ymX05XfcQG7oN39qQpobIy3Sb2h0wURhR+aqK7gzZNKHTA3ZwE9D3wpEm9UGVJ45jlArGa3Ju5eEorgHugHEv9d6R3qm6NdVuhCEkgUC3JXEeIb0sdygD132yB08qawYCKVFje9jQtU48YUy4dAvY56qF1joFwf11Dn3D8fEpfe+mXxiLxHlYicF1bS6ePODyYsfb9z/F48efBeDoaMHx0RF57hbc8dENVk8qntYtp17F6eJa09YlYRhz+qpLSH730X0my1OuVy5YvlqtMcNA37WEPlle7a7J0ogkCKn85tPJzst/e2ETMzCbTbj0FVWA3WqH1jVJ6mBTALoP6PspZ97dfrW95vz8nOkkZjpxh/Ojh+dcXzplvH3HSSnF46dPmc32kpwBx0c3KHY1V16W9+TkiKvVNclEIfYqNdmCpmwIPel2db0iCELeeecB77rn4G9CFNw4jTH9Fcq4tfje196g7SraibvOahOTKkUWxrz6khPxWK2vEUBdFpzcdBvifDFlPp8f4H4yYBgE5xePx6A3zBIePX5KWe1oag+T25XIIHom8XdKkp975z4nXuK4rmvuP34bsFyt3CYyWyyBwMFpgPVOM5kuqRtIFy6QqZsdHRHT4xmmdYG+7kLoEhLfLZCxoai3PLr/cOwkD+qa8801YeX+3TQdw+aCOI5JfMKlh4KyviRNU84vXZext5bF/IjGewSFYUwURRTlZiSiW+ve2R/7xEdGcZNv/fCPkkZL0j0MYhjAxmxWO2T4JgBZcuyk2buezAtRXK9+DxBMPESurNYYSrq2RviOqTWKOIG37n9qtAFYbd8hn0gQXiGq2DhobWiwfn+LooBQTTDhwdZBklPVKyIPWanrGqtyjIj5lu/41wD4ke/+PwGIY0uSuvkr65auNRjjqpyvvPIKbaVpasHV6vDe6N4iVcS9u+8F4MPf+P0AfPwnPwLAtjhj6FK+89t+grb2CqlG0dTX6K7An9dcX1/TNgbtS+fbYsMv/f1/yFf++fePEO6hlxRVyT/5+bf5X3/aPeM//ec/wG59CCb/n198hz/1b72XIGMMNLbrll/9n9/kV3/uc3z5n3WFkiicsNkYKg8BDlQMOLGK2cR9VqdrZNthrR3FDaIwJEujEYrlvOoEWpcj4RoMSoXsEeRaa7IsYxj0AQnAQBjkBKFg7TvcMlKuy+p/JwwjbC8gMqMUXV2XzsYilhivoprNU5QKqdfu+9MoII8D513oCxLr7ZZpNEHXDVXv9om+t0ym6Qij6bqGMAwZesXQeeW8LEHuszY/kiilGxri2O3VdXGJNYpdoSn987RW8MYb/xIPHz9g48WGlsslV9dPR78jcO/NdndN672+Ot0wGNBtTy3b557fGJzvSsq6IY5jygt3Ru+7PJPZdFz7+3N2D5E/Ok1IUqequs+/ri6vWZzkLEVG5bvEUu84e/yUdz7nq/nK+bLN5tmYNJydryAN3XPyojrhXlnTe7AJaUjijDQ7iLso4eDfwzCMypEoydD3oyJk33dESYIYJMYc1Dq7rqfvexofCAsVghj2oBuCJKK3FpSi9cgK07ggcd9JGgan0ucCUE8xCAKEEujuIMK0rpwwSOjVGMMw5Hqz5WrVjEJKzhuzRwjBrlr7ebcMTTdCgKMkJQsCR+737/Hy5JQocnY0+yLlZLFAYrlxy6EV2rZm6LtRvRbAtj1hZGnqjunMrb1dsUYIO9p9pEmIEgG3Tl6m8MluHMduzq1l7uG8bdXSC03kRcSSafp5hXYpBAQS2wl2XvjmaH5EU9UHAY/QvWfbqjyIiMgBKWE6n1Js3LsWIpFI9F7hNzHEcUzdOtgxgO0rqusdwc5SVp6aEEYk8YzQd491J4nUhHxhSUKnvjqbzRCJE28ZCwtWUlU14T7Z6g1aD0TB7VEEydCy2j1iV1yTJ8txLURhTlF67zRlvFy5HruKunBKnQ4ue+XXR8zq6ox+6gsijYsPBGYUZTG9ROt2fIfiOHYNDylHr00JBGHmLHX288BAGIakXj696zrSLAEZjsWqtm1omobJxMVgAFxd0bQVSTQf16aUVwQh5BP3d3me0ltLOj/lxKvLFnVF13UjtHa9uuD4ZMlmUyD9Gj6+tcAisPTI8ICWmS8mrNd7NM0xVvZMZvmoWJjEIb0YqNuKzsftkdpnLX/weCFo8WK8GC/Gi/FivBgvxovxYrwYL8aL8UcwviA6V4IK3f4q4DpCXeewoqMXrzBICXUvxhZnlmX0pqBtPU7TOL3+MApovYxlHifceU9O07+D9u3oWEbEStD5SnY1uKrHJLlJf+26E4PZYK1BWDV6fWSZHPlU4Loc++7QHtIE+zZ1MMq6Syk9z8fzuYSrplfVCin2fhWGxw8esDieoKQXtOgbsiwdJUCLouDWrVt0Xc/uwlVH5vM5YRggpSIWrpu03Tnz3DjbZ9WGpmmcsaCv9phBkUaC3nfXFrMlwlh0VzOZuu/XTU2cbXjj1bsjnlt4OEHuPZisNyqUifcGAOgFYaCQEsrKQ3AiR0B/2cML/3jsCOEqlM9wvDRN40RCYl9psdYiMSN+XXe9kzsWYsTjy2FDFE4ZBjv6RRkvob8XAxhMRdsqokTT+Irw0AW89urrCNWMhOSLiwtefe19NM0BT35+eUUzbFGh+53VdocMJVG0JPS+SJeXl8zyxVgBk1LSthXb7Rbp5/21V99Ps6u4sTwaTe+K7SPqRrNcuueZnEQ8ePQmd8K7PD3zkJHtFcvjU/L5CVXtuxjliu2mGDkX28pBKW7cvsWbv/tZ/zxbwtyy2+wQHr5QaokQ4cjrcxdrPS7cy67qwQvI6NHbIwpDyrIiUL6T1F/SNIp+aDhOHQeiKyJQDbvNNcGePxiU9H1HP+w7HwMqUui6AY/DzoIlTbUl8X8ThJYoDFjMbrG6cvMZkBEnW4SE2XQPd3MSzHvj0aEHFcWIoKaq3DsTBAH1bv18ZRAH+6Pcm3wu6W3F4jikqd39GbGl3fVEKuNq5XhD0rzEydExF5fOq2Y5v0vdDsRpPPJxsmTJkyePWCwW1IV7nnm2IIuWbH13ZJbPKPUZoTxsvb02GCz5NKHzojrT+Yzt7ozY8ySzfEbVXGLpPL7+MIZe0fq/C5MQy8DZU1eRDoOctqqp6gLdHZ7BtnjKSXKTJDlABX/ob387We5IxOuuJfZ8oR/7nn8IwF/4yJ9DSsfzmPvOX9t2TCcBTy7cc/mlv/9p94xLM8J0AjkhiZ+HqO02BUGg+OV/8Ob4M6UUpkpY+y7xndsvA2/6+3JrNs0ExvQkfr+TUpLEkjgJqT0PJIxDjBBgFUruBTNq+t6OnJbBCIwW5Hk6djoHM8DAaOgZRBHrtTOvtsMeBq6Q8YRNfTl6PHX9ADZC+0pvIztMPxA1iiz1cuZqRl85pEWvDuaxKtAjz04QoJTzdNHNHiosaboOKSKUF1MJ4pS6ahj8+TMMA4tljlrEI8yq2hWjGMJ+ZGHMLAvptfv+O3fusL18SJZOuHXXe0MFOYvFgvXuPu9+n+tGJGHOvXtz6sJ932//1md4z3vew2vvukNRel5rYzBDwBtf8q5REKTve6bTfOTsHp8eY6xDULx813UihXD/1n17gFkmmjRXY6W81gWtMeR5ht5Dr6eOozVfTjDW7Qm9UASBoddeeKNUPHnylJPT6bjnvnJjxtBv6VuJ9Hwc3Rmesh4hqruiQJDQ1Id9cugDimqHMebAJQpcV3aUIZeSyku+Ky96sV6VHoqtwKMMAxVg6Q4ejygCGdBbRjElK6EZujHGqPsWPWhCeejQtGVJEAjsYJGVR88ohYSR57reOXGNgZJib9itNWEQYa0YO4FJnNE0HXXt5qpuG7IsQ4iDuavuHcxz6O0zXOKYYdCjVcJ0mqO7DmuHEUWRpIqqKRHBgO495yqywOBlwMGIAmJDrS2V56bJOCef58ThfDyTpdbY2owwPWN6qqpjGAaMjyWM6DGmQ8mMxPNxur5HBRGhj3ka3YAUKBWOgiYiEBhh2HU7jEcMWOn5//5+MyEZOg02prj2MvmqIwglmJQ0ce97JELqXc+q90JN3ZajowVtV2AH9wwsD+kqB1dMooMvKfw+S5wgpCq7MZZIMkEYTXj5pflhrbctVdkRyD2PL2a7dfBt4cP8qmzcOzT0DH7im0YzXdwYocpl0TKZJgymGeO7JEtIRcrGi3NhB3rTMegDXLquawLbEwZifG+rqqPbaZJ65e9XIXYR0jbPoWeUjLnebJhMZuO1az2g+71HqOv85nlGUXlkVxy5rn91yRNPj9i/F1XjSftYNiXMZ5MRFry9XFHuDJPJhFbvDaVDdmXFbueROpOWUDXM51P2fkXX2y1RlCBVjPFCZvr/I336gkiujBlIY5+MxIJgucASUvrNIE1ClrOMsolYe4W0fDFHRTHKu7h3bclgOqIwJfMbnLCGWEkmgaTyUIWqWiOFxecZHMUJdWcR+hzthQX0UNP0EVGgqAtPpJwFWCsOfBk7YIxlGOyI/6/rmiyd+Un2LVyzF6bYBzYO7ti3W/LIQZy2+owPvH4E6pqdT/BEUBKrGdmR+z49EcymCiEl69jDrvqnzNMTkmBK4AHdk6jC2Cs6T9yc5EtmUYQMzJhsxNJgWsFk4SByN++csCu3VJ1i8C/U7PYt5CCIM4XxxL1WD2BrrNjDElt6U9KsB6Q5JI+6q4iigM4H1YPtESom87AE1RouL3ZeRfHAKQuDmK7XGG/4GKYRJlCjGIEQir7WLinzyZXpJmy2jli9JzamWYzYq2IAWk+YpBFJkmDn7sXo287x6HQ3qj+99FIIenhO7dGiEYGg6/b+MT1IgdY7tn6zSaMNZ2ePUYGbu8k8Y7vTvPLKaxRrL/4RdcRxwCA6rCe0lnXDdJLx4IELSJfLl7h9892cPzknSvyhA0R1xXQ6Z33tNpuy2mFCRewT/6KqaIqa9bak9JCKSteYesvR0Z1RYSeJM/QwUA3u2UjV0w81dWW5OPftfeXKAFEYju+WbjskUGm37rJswma3IgyPGDxstRnWLIMFXf+EpvOKc1GOUgOZFzvQrTsEv+Q9b/D0qeM36GHDqy/fpvDY/9n0LnXdUO06qnLln12AChICOaA81Em3EcNQc3rqnd6bigcPf4/FPHOwEEDXtfOHCeSB9wFkMXzkwz/Fs+OHP/F1aL9vDM3AYj5js1kxSx1ZPskzPvj1P8TvHz/wo3+B3Kt+fttf+vj48499/9cC8J3f/rOf9zc/8uNfz27TMZseVJTaoSVWiu/4lr/3eb8P8H0//C1oAbXekD4DzwJXFPj49/2j5372bd/51QBsr9ekWcA0ewmTHtb15VnPxeVb/NTf+rvjzz75G58mSR0Ps2vXnD8V8KHDZ77z1lOCUFIUBVni1nrPBU27ZTF99bnvz8OYfcJubEmu7vCnvzrhF372UwBEck7VrJ77m1l2g9X6gsDD5p48fYev+Df+GL/yv/wWkXLBf7GrCcNghGw1taapB/qe8b1ta4MKI6xtCdODeayygffqg6Zdk6QBu0LTeSGT06ObNE2L2SuTWUmsAnTT4n9Els5Zr94iDLKRPG5sR1FejsFB1/VIpYiilI03wjbGKXBZIyjqPSTOFZn2SW+SaqRJwSpSn5S53y/J0pSp9yDTesD2agxw08WMpi3otcQY7wkpetYj3NGNq12BlND5QNWYHpknVKJE+Gd37+R9PHqy4WJ7zmzuPqsyGtu3JMfu3/ObEy7Oapa3IhYefnb+uEZLw+27c6R0xSIRKNbra145dvyx5XLOdlsQBNEI3QkCibUhdavGIp7jvBnCfVA/WIzo6bVkfuSeeVsErLc74rTBBj5paBwEauP33Dd/Y83r77/LbCKofOCURBnRSUrVSJT3N2oe+OLIzD3Pq3VNoCSB3CvnwqOzS3qjUUoQeVjQMAwEUh14L4EdYaf7xAkUumuQktEXrROD42544R0ZhARKIKREe5glg0TIAWsP8H6MS7KeNRZWKkWq4FCAHLwg2HA4M5t2QIpk9I9TMqI3Aq0Pn9X1hYdl+utXIbuyfi656ocNBuvu24vAjEb2nsd7vpZOUdEcBLQGGxIGHXboR3U7jPNki3wSGAQpQRKTJGrkV06mKcY4X83a8wulDOltReg92ASSXncYzCh6IUWAsBHWBJTlXsmxQckE0++DegEaVGpHXruQirYbEIIRcqvbjiAIR9ilkU5AzBrAKx+G0sHm2mI3Glhr0SLDgNyb7A59i+5r0mRC64sPQWTI0zlFUdIOeyVlt5b3ipvG9EglEEKi/TPutgFpPKUqNcJz7buuIQgiev8Muq5hfuOEYrPFeP5WEIa8ff8dlsslaeSuM4yg3JU0lfLfX3BxcU2UxmPiNGjIJzELr/DZ9z3l9Y47t28yGDcvKiiQdspg+jGmS9OQOFbPeKBJlBIIkYyG5DJURIFEENB53l6x2xBGETsvSBZHCdksZVNds09ZYuWUGXtT0DRekVm3SCnJvMF03/dsd5qz8ydj0jtYp1NwvbkY1y44Xug+PrWqpreatx485PjEc1gT2NVnLGZ3ET6h3cMt/7DxBZFcSaXIvPSssobeuiqQ8sHyJI1oNjtq07KYukXfFtdU64rbPrhalzWTJCVkGDtXfd8TBQFt35P4QDHK5myuV0xj/9CCGCs7oiwdq2JmCEljSd2sMaNYhXW8p70UvBAEgQLMMyaCgrouncSnr24NxslRjjhU6aTYK6uJPUfg5HRJFGaU7Zbbt1218HJ9hogiQi9Hn8aKWe6M0jKPFQ2yDBUb+mHFrYV7+c/O18yzlGbflZpIOt2i9RbpP0sFOXmuaWtHln3z07/DfHLKan0xVpsuLwxlozk5vY2POZkvjinLYdxAAhURqzlB0iC9I700IWlyCyXMWNltG0vTFigvDmLMwHyR03Uddq8SaWokPVFgCTO3IZVdhS7b8cUIrKv4GaPRPnFL45Q0lu6w8FWxcr0mi6PxwBkG7aT8nyGOSgWb7cqR4P15plsJwhzU/KxXWuwhjPeVXXe4iCHjpZv3/Bx3vP4uge59QmSchH4oFStPLN5urrDKcr2pqCsvwpAtqKuBPHXBx9XlxUhY3pveTRdz1ptrdsVBYnx+tOTJ+RM+98gl58vFgjSOmC5i5qdenawp6dYZQy2J/HMIY81skjL1XIYgCOj0jmbQ3LvnpWfLBq0H8umEsnAbYF03BEE4dq5UENMpS55nVBv3zI+mJwjTcTy5S+8rQov5EavVGtH5l6gfWOQZbVUS+a7GNHqZ4+kS1bu1mKiQUm+YJAk33nXPf3/N8fFdgpDRQNOisXZB0TiMeRpZbp/eYDHPx2qoUi4AvV5fUbeHxOIjH/4ZvvcHvsF9X6750Df9XT70TT/Jd//Q17j1WvfcP7uPlJbEq6h98Ot/iI9+79eOh24UBXzrBz9B2/Sk3hbgJ376r7DdlHzkm3+Uv/bt/zUA3/M3v46+7TnyCoof/Mbvx/Qhea5IM7c5l1dXpKngwx/86fEaP/Z9X0eSxXzbN38CgDjtKGrDKy+/htqrmvnx8e/7R3znx74KgHwq+I4P/Sw/8J+5pO6jf/OrkKqnqwO69tA9SvJmFEjYj139Dg8fuq74yekcPTwfnFftU/pyIE3saJMxn+Zkcc5gnk+U4nDCbuuS+NkiQ1rNz/93nxr/v7QNaZA89zfFVY1Uanwf9LDl//gff8s9o40vDOWKstiiO185jzJ027JZXY/qkoO2jvgcGPD8Rt1VEMbUO2+2miqG2pKEU6LMi49cXTjJ6r15drkhjARhFJH5QLGqKuJgihIZ2icpKggJpKHYHgo6aRY6dTkv3GAH43hevVMpA7cPVlWB8Sqguu9I4wwl1civsAxM5iGmL3nyxIsUqIy27TDCBR/dZYMgRPfPWIMYwWSSPfd829ZxEvbSzEGoiMMQa5wAAUASHDGITzKZGGz/TBelbUbRi5PTnHc+fc31Zcx86dZwXYJMQta7zWiuqrX2al7usy8ur8myCdbWI4G+9cq6cZQjvQpfFAS0bTueF4GY0ekdgYzGDm0QJ+Sp43Pt1dBsFGKs5Z233Ry/8WV/nNmipG7XRLE7Q7a7FdPQGcjXXmZ530UfJeODmKE1GHFIbOJYkcoQlB05rJiIQSiU2isB907komrGwEsphZTSKw+6uYnDiKapnuHM9fR0MHCo6KsYjCLam+fWNRjhzgf/7kVhQN/3CDGMaASlHOclCPdJj+PGtN1BVKvrNcb0xEmI8OvF2oHedvTdIb7J8sR3zvxaNAaB607tESBKSeI4HJPlLElJ05TVakW8F+dIY6I4JY2iUfzL9pI0i0e0SVWVBFHPdJKz27k5brraB8AD82OPIEonWDlnt9uLUEBEQlU11K03SVYBUkQIsaXTPoE1A4JmjAlEb8EItBUHBI4MRmXnrjvYMWDEKHKTJAlN0zHJp8Q++eh9l08JyWrl1fOiCGt7ur06YuQ6lG2jibwCZCAzolhSVRVbL44VqsCvCzPOQxiGSGkIQx8XFQ1VvcLaA7e/GzqU0ePe1dYNYVAwny/H7mfTVpyentJ1LUa666q7fdfVX1OgXJJtnf0JQBJbqhaU7/pHYUyaz1itSnIfw4bBMcXOxVnGc08dusuM5sNd79VWhTODB9htWtoIrO1GURalBIG1o20GyknGd1130CDIBabrSWNJ6REDRbljlk9G3lnbtkwnTpBlNN4OlDMy7w7xgBKSpi2pm0PXL4oCzs43DJ5Xn8aOU/bgwcUz5+/zZ97vHy84Vy/Gi/FivBgvxovxYrwYL8aL8WK8GH8E4wujc2UF0bCvYhqCUFHrHl+IoKsbp7DTWlpx4DypMGDt/QdaK7xaUsfUYzBVGFO0La02BKG71SBOqIctj85dK3GSzZkezYgjSezhBdPsCGxFFKbwTFfeWuvNIhm9o6IoGjtXYegyYiHtKBm/ryQ820I0xtBtCoonzuNFloYolgjTgZcdv50fMShBt/L4YwyRLrkTJeQT1627XK/YXJ3R6IYzX+Xb1AWt7Yl8FfCts7dhMKgoHCtX5eaMSKUceznOXtYkix5Ra6bH7rMfPHjA6ckpMnpKv/Vyom8/QAaa3ZWHh0xvcX39cJQsB5hMI4ZpztW6Y7l8FYAomSODCOWrcGkQuvZtMh35Dl3XgZBIGKtbsYwIIrB77x8k0gpilWK7vcRqR5JkzvfFezXEIqHTDcpDNGZZjggdj2gv6SqlM0Tcfz84tadhGMZKizUD2AGpxGioC703hIzoeu99oQTXq43vZEIaJaRRjLWW0yOnmDifHDOZJkgpR06HUsp9rsePd7alKkqM7kcVniyLnfHd5prWV5mCTYPpQXq4Zh6klJuCSrWUngMRhDFZPiVdxCPnIQ0XWC2ofbu9rlqOj25xPIswe1PBbU1RFjx8eDmqZw12cCbCvksVJ9A2JYt5Rtnvv2/KarXm+HjJbOrW0NXVNdPp/MAbqBuMCNDGknu55EkesN6d03lPjbaqGWyHikLa/fONLRfnjwlUTj5193x5vkaFlsG3lmfzmFl+i+12y8RLQZdlQdnWZEmK4aD89Ykf/waUhyo2dcIP/uC/z0c+8lN8x4d/BoDv/cF/l1maE4SSb/2LfweAH/rEB9mJDUXpoJG38tcA+Kvf/hP8+E/+ZXfttWE2cap9/8XHv9Hdc++MjEuvygXw4W/+cX7443+JnZfgj6OcD3/wbwPwX3q1viodEM/4FH3oP/xxPvaDX8PV1Qapnjcv/O4f+hpOX3Wdx7fe+txz/2+Rv+IgF+Eluju4j7/vi+9QlvVzv/sl7/+iEWIh7AlHRxd8w4fu8Xd++JcA+OL3L4nikFl2G+GVDvPkiNVmjX+1eeO73sd//l1/j1fv3hslh4UQNL1+7ruObqQUW/hTf/oDAPxfv/AbpFNNHGXOgBew1aH2l3ozzmGQBCJAWM+LDCVpmBGGB/hSEjkYjaGnb/c+gQddwigAACAASURBVBm6q0hTb845OcLYnmHoRlXM6eTIKYH5PSJQAkRPIENqr0q53RRkacTV+owTD3ebZAm7Ys3gW/zLowXb3SVxGo1mwGVfYq1lMplg/Z6TZROSNMR4n6uiuUIJ12Gq9V5CXhB2il6D8hw4rQdUFIycIRVmBEHCZr0jy71XlAiQv690euPmkTt7fAe87waiyJmF3rv7KgBVVfDOW/d53xv3RshPVWqWyyV1vZeenhBGA+t1x7tfdx3Zpw8/Q9EYkmw6ws+iJHacYH/2KiUoyxprIPReZiqQo/pe57sYTVt7xICHcA41aWDACkrv47WczUi6gEaXGA9Ra7s1bTUhidw19axpjaXtB8LEd6WGhLY0qEATe2hp59+zvcT6MGiM7QnlofOXRimdcf53ez8sgULKyPOHnAWAU60LCP09CyHoe0P3jNpa2/bPKa1FsUOzGCsYfHcQJdFtN0K6h97xRHa73VjR77oGGQqGYSCK9+pyPXESUFXNeI3D0BLH4dhFcZ1CBQzUHkae5Ynvyri/i6KIvu8IwpDYQ6plXRPHMcYYJt4/LopC5ospm81q/H4RDIhgQIZ7yGFN14OUYkTGxNOEMIgxjXt283zGer3m6dUFWXLo7KogYrOuxu7S1XWDtcPILWybDimljx88JymX7HYbwgDyqfeeNDWhTMdOTxBKyk1NkoZj18Z1WgfXTfO8mjhNsIMZ/67paoyVjgu/9WsjDjA2IJBi7OTkeQJItts9YsnxylUgxq5Y3ayZ5jPCJObeYuHX3kBVHFSwlVK0TUcQw+BhgVhJlDooqPImvpFNnQm83nP+AiSSqinHDmkYO0Xdtu8c5wxPxwjDca/I85RuMLRtO/La6m6HUiGh9YboRY2SIcYY1qWHhyJAWpQ4eJ51vaU3EtMfTMTDMGa3vR6vKc1zdDdgrSXYmwJbTS/6kU+mmworDIKI1KOmemPou56y0FiPglnMlxRFwT6ki+OEbVWSRjFl6TqD2XSC6QwM/SEerit2u90I/bTCmRpPZlM++1nHJUbWJGGOUgHawxynz6PzP298QSRXvem58nyOuna692Ec0ewPgUEzn0/prKTd+pZjXBOElq03+crznE27RoYZV1fesyeQhKFiMAOVN2ULgggbDSzn7qUz7YanF9eYy4ai9Ca785DNtmGwPdIvsF5LhGB0wLbGMAwQhvLggTDocVN9NmgXQowys05WUlIU5WEDrDvqqufk1oTaY1HLXcPZ+ozUJ3PL5ZxaWQgs68p72hxP6Zs10+mc3Eua9kIyDYIR8pNlARKBICLyyUelQsqyRnvI2iKPsV1BJCyXT5z/0CsvLREypdh13LnhMPSb9QrbW1JvWNXrC+7eyTGNRfuD8eLyEWdPDCc3X2FXOEjPIs4oqoHtY/c8bty4SbGrSNOcrnULdTZb+sNJEU48PCScQz8ZDaabrkSqgaFXo7xnZBp6bTwk0wdFoQRpafxhsrlakYQ5KBfcAGyKDX3fk4QH88owVIRpiPYQMoPA2ghhD7KyhoHNriVL9YF0qkKmk+VoUNh1hiCQBCoese9R4JIqR7zde4/1IIORbJ0EMcfTE4wBj0ykxxJPLDK+Zj5395yKHm2sw+vgPMx01SClRq2db1GSKrquZ1WswOOrHzzacOPmCfhDj7jh6eo+289cMZ3t/XEC4jRH6AF8sliXBcUwsLzl5OEvr9+hqRSnMifO3NovqoEgjnlyfsb7vReN1tr7xbhNeb44omk62raladwavnk0Yxg0M2/Ou15fc7o8oio7dOc3zcUJRXsFyRqPVKQX1wz9nH5wAeD1SqO7HU03cOTl4YWC5XTqRGaeiTSVTgi9fOsgC47nr/LsmE1u0umGD/3Fgz/Wh7/px/hDhxem6W1N9/sSlpPTOWdnTwmD5yFananAWyHsD6Pv+YH/gBXe0y0JkcrysR90UMW/9pGfoW168jynqK6e+6w8n49GqEJ1/I3v+bf5T/7K/wTA08dPSVJBkolR9hhg6CRJcPf5z8lijmZfAoCUmpPjOd1w8J76k1/6r2NMRbGrUT7wPDmdM9tGY9Fps3PE9h/5/v/28x7TX//+r+U/9XDJ97znPWzXPWfn98f///P//a993t985Vd9Bb/4D36FLHFJa9WUGPX/svfmsbYs+13fp4YeV69pD2fvM935+t3nhyfkZ7CxmcEgMAlBTjCOjTCOEmScCJIgAv6LJAIhlBCH4AgRnBghGRzMYIQwtsEYYfOwMcbDG+9w7j3jntZeU89dXfmjqnufC8/kH6S8SKf+O2ev1auH6qrf8B0K6N18aUpJ27YkScb62iWGaTr4HAXE2Y14hLXzkUew3j4iUAFhOGXh9BCQQqMDhfbBgBbKk/8lajhOqICC2eJw5BZYUaMCycGRK1b1tiObTplkMTvPEQ59UCqlBDmYY3asd9dozzsJIheE100/Sr9rrT1EWBJ4EQ8rK4QyRN7ioCgqmq5mfpChlS9S2uckw/243qzAauyQlMmIMEqxMkcJJ0xzXTwhW7T883/2ae7cc2vCvZcO2G9S9ls377JZQzKXrK62dN4rqjauoJimkxH2VNWlE6LyXIpABFhlCKKIsnHzta86x0fq+7FAp8OAPM9HaKTOYsBSVzmhh9IW1Z6j6TH52QrrYYdKC9arlsWB29ujdEddOX+9MPXQ/drQFhZaSZq4tSsfYEza/77pEQkjTcDd4wKLQUc3IVPbVkzigN56n6tJ6DjYgaauBo6ucZB0Y+laD1s1zli4HKB0E42UPRDhc2ra5srBCT0HSkqX+LqCoBd3UBDoyImAeWiUkNZDKgfT7ZA4CjG2RI08sJ6+B4Ml8IlTGKd0z4l1FHnlOMqIcQ47zowiL3YIf169banO9jc+VMoVMfu+G8XHXELf0xlGTvDV9Qrb65vEut1hjKA3lkCF432q6/pDwhtKaur6Bv42XE8YBW4/BZ49u0ZrTVVFFIWH188jWtMPORNKW6aLKX1XUXvPU4EiTWOEjMZjNU1F2XWDNSRKOUGdyWw6nmcYapbzCdPpBK2GJOya7SYn9XzcqiqQ0u1JA+QvCBXWOinwyPMnm6ah683ouWqMwQpoSkFTbv1ckCht/fvh4xLj9hEjbuJh07QYC3sfU2opOL+8IMsyhPWcsg5621N4CHigBs9Txwt09xxM11JVNw2DpnNCHLWP3wIdIazBYEY6SNsaVKBBDUIjLVXbECaaunK/Z3pNHMdM4nSMg6TsabuStvUJfBRipWV9ndN7VZjO1EgJFjmKlGz2OxTiuUS4xpie1uRkvuC7L66dKEvbjQleHMfEkxuIet9b0mnEZn8N/j7NZwn5usGyZ7Zw8XCvPmzS/m+OL4jkqsfybOU25ZPTW5yfnyNKp+QEkJdbqnaHDlNKn3DFMmIWJOOCYeoa1VfU+wLtF83eGq59cDfEVmVeMUlT9t5ZWukUoRRRHLP1ZMLtpiKbHVCUW3rPfdHCumOIm4pCYzsQN/jmruswxuFg5XNqYNba57x3nGLVrmiwfnM8fvmQIjc83a+IPR9GKM3ByYIsGAiyirJqKMuGyPtM7doNr731GrcOb/HOuy7DTmdTduWeNHbfmy8PKPOCroXa37vZYkqUBqN5XllWrK9WZJnmzp1b/qRDqvaa0FY8ftcF7NNsQZTE9P7tieIJl9cr4kRw667bnLOjI4pyR5IKbO/JxxtQfc0SV3XsLlbcXh5i+xLtlUXOH72PtYI4jtkVXt1m31MWlsNDN5nfee+zvPLGKwgidsLdp0k2B6mo65Zk8KsKFLaXxIlLKjIdELQtZV1QeR+KLIoIp1OKoqD1m3/fWWTQj5yEum5puw6tM1Q4qPcELGcJjWluBBCMQVhLEnkndO+5YWmxg1pZ1yJk7z7rV2olBJGE+dKdd9F1dKL0G5P348l3TLOERdgRDP5RagqKDwl9pPMpbdVwetv5FgVxRCDdQjksWuuDFVoK5uK+f36OB9A02xtic+sCuaquRw7Z/OQe+/2eiUcRn7z8Zbz33nucP/gc08wFYMXlFdncMJF6NJROE03TVGxWLihzZoBTdNST+I337ML5V63Wu/Fenp0/Yr6YcXDggtVNfonpdoR6Rj5sFP0dgthwcuJ8xM4v36UNSo4P75LnwyKtqCvLbl/w1he5+wLQy25UBoqikP2m4C//H3+Mb/+Df9Z9r4dp/JzvBvA9f/mPEyeavRfZmM4OKfcVUrecnLhK+X5rubp2ycLgc3VxtuNw+dLY8RpGEGbUPigr/ca22p6N3Z6pnZKKWyNB/rv/0ndSpiWHh3OE/HDnar9tR6Gd5exVLDfcqqNbMUqmRFHA6vps/P8iN0TJ7kPHwWha69bFptthTcif+KN/e/zzO2+/zcFhhhUbtpfu/uz2a549uWZ+4Nayusn5lm//dWz3Z2jt3r+yrNEy4E/9wF8dj3X+8IJ9vmYxc+/6b/jtL2OtJVnEmNZX5ruOH/07PwlA5QseYaRAB0Re4KI3EiEldVtxcOTWiap063LVlBjPzQxUhg5atns3F63VtKqnKJ4ynZz4Z1JTNVtmXjShaSwHkzl935N4/59Hj54QxIKmqxA+sOlMg041Ze05UVqCaCnrjvC5tURJR6geOoiWFh0qrB3I6yXGdgQxRD5I2+9ykjQmDBJaX7xRuicINLU3swyiwHFmumZUqa2bEvlv8K3bvqU3djTZNFaitCbSMaVfc6/zD7h1J0XpiIcP3JzdriTHJyHTqTeAnXQcnEw4P9uw2XhDYhGiicir/Sj0g7SjxyI4s05jDEVVYXyxKkkiMM4ct+9vPN2CQNGZYU0qiAPX9b8eOJ7ZhFC7zljs4wRhTsgmOYvDQSgCurZECjmK8wRBg4hm1Pseux0M0G88Kd2BJGmkiaKbAmkYSaxVYBljCZ2E7Pd7ktQLb7QtXed8kOJkWPc7sK64OnjtxVHm+ODxoGTpWOJKSpTnUwvpOrTDHqNUMIpjDYW+pukoq72PN9w5xUE87gsAvXFCVsZ2RH5/KsvSiVKoeEST7LaV80uTg4F2OyZ6o6qhtHSdoetuVJO32xwpb4Qt2tYQBBalFGfPtuN5dC3ku57IdxDDMKZt+tHLLEtTus7QtgZ8JzdJQ2y/cfx2n8gURUEgu+c6O054qW8h8pzgaebQREZ2SDkkai1K2DHpjeMUawBhhulJbwymtwgrxuRKotAyGJO7MBIIIbC2HZUIlVLkec7F5TMWvrAXBSFN3Y8+fnWzJ4gVVVkxvJiyF3RdiVKayivmNFXjijb+pKqq8t0+PSqmgut+BsGN2Ne+KNA6HrnoeeFMeZ/vJJVtx2Q6Iwj1KEQlpCuCDB3LyWTijNLremwsBJGk7zWVLzYsl3NM797loTDUdQYxaB3Ug/kv2K4fjZyrqsH0LVGgKWu3bkQk1Jua3XY/xjxB6HzYjEcwaB2wWl0yXx5zdXXu72fBwcEBRVUz9XFmFAdUVUnT3nTKVBggtaIY/HGVpKorIh2MSoD7oiQKk3Ge7/M9QWvRoab18LmyLMnmhzT1hr1Hyy30h9VY/83xBZFcaaU4veUkgJUKOVieeLKe+3t0a0JR5UhbMb3tN96iRwUS4zsmUTKlaiYcLEO2uau0YHomU41pDLGHZ0QBmLYbCbwqiOh1Tpg1RBO3aJXtOfNoTrvb0fnsOY4DhLS07UDkdA7WbXNDKoQB0naTPbtF6MbUVwhLEAaItsfU/jPGmQArGaH9Ay/2e4JYs839hio0UZBwaz5H+epZXuU8evyUq6sdUw+zCFTAblMzOXBGdU3Vs92UzLOM0nilw0KQhRm98dC6ckcWR0zTmLMnbkNdzE9BpcigJ01cAN13gkgbLEPLPGFmOsptxWblq70yY54ckufXbDcPAMimKfN4Qhv46nNRcLXOfdLr3e5TR87dltfcm7pEbZ2vmKeGrnDV8zuLjqBZIQi49obEK9nStZbZ7IDVU78CSs12lzM/cL83zRZsu5A0jkdYwtnVygXvStN7CGecprS7a7a1l+Q3LfP5EinkuDH2WBAdXeuIs+4ZW189T/y/jVfJ6TB+0ezbGqQlDsMx8RbWLZ5VNaj3GYQMsJ0ZA4vJZE4vNEGSji9/1HfkxQ45ONJLQ2tqgkjTNO7Fb4seJWbUdc7Uq+AcLDPapoPBsttqtA4IpscfMr221rAMJI1PrsIwQN0RtB4CoLTla176Us7OPyD3C83t+x9DKYEUIa2Hu6yuL5kvNGnqjrPbbeit9gGBu75lEhKEPYkajEb3HB8umUxS4nFDzUnUnMtnT5jNh7nxjKN0weOHnwYg3yq0TjH501F5EEIm0wNOb73Ge+8/YxgiClh4KEixt6ig4dv/sxslwEni4JjPj3cf/BL37r6K9qTif/GJn+VweUQ2mfPu2z8HQKBmvPTyqX+27nNp4iArwyY/jN3+muNj3xHeuPt1/6U3qQrfBW87bN8wSVwXsCqhqS2r1Zq2/TDeq6fk/j0HU+xMgWlvumTbbU4Y7UmzkzExBMimB2x3Zx86zja/JI1dcWU2PRk302G89NqBm6PikNhv9Jvdltc/ejrCHt986Yv5A7/HGRL/sf/xtwNOjtol4TedqVfeOERFUOTu/fjev+g65t/+x34t16vCf++m8/LSa+5+np9dodSE3g7QvZCyrOlMOZLXVeDfPVsBPoDlHNM5qAi4YkfbOKgV0t2XwkOUGg9hTLOMoijQUrL3ENXpNKWoAkRvR9WvJJ25wHqAw0UBdV1ihaEblEetoO5q1yUXg9iBJZ0EDKXeosqxNmS5OGa1cmve8a0jdrsN293ViJCI45S67Vj6osx6vcYYF0R1ZrC/ECPKYhhlXZJNDrGDUIORoA2xPuV6536vF2suLjq0jnnjTTen3nv7irOLd3nzDV+x7RKSSYjSHatLHySlPasnOwwWPGw1CBRdz5jwtY0hTJwAyVCUadoKIdQIf3NDUnc3CUKsp+S7Eik1R0sXJ0QyIoqmLJcHFH5fi+w93noz5PUvds/hnXc/y27rkzYPYy3yAITi+qJkmbn3aLBz6Dsvhx04uNkAJQQnblQUhYdReXhfU2L6ls21O9c0TVFasb7ejZCjyWRCb3BBtT+Hpi08lM9X4Ttn6Fs3e5Rf87qmQwRq7DYPtqy7Xc7UQ92iMB1l/IfAtO8NCMZ4Aysx1qC1vjH1tZYgiDAdiAF2aCyhDrmh4UuqsnOWKb4Q1rY1newxraH1iAwp9YeUWLV2glC2t8hBCThUiN7SdwZfx6Tcb+l7xj1zfb0nCqeESoOfn/vtmjSJyPMc46Ecs8mCpr4RwhBCYRqLluHg4c16nbvA26xHjoFtoKir0V6kyStM1xLFwShxLuixncHSj8VwrTWxDsY53FQOGin6nt3GFWqKomK5mFEUOcV+MOOWSKFHuxRQSKnRQjIoMpqudO9sa7i+dmtQGqUgBK1PmpquQxuDFQbDjVCEUK4LpvyaECYSgUQE6fiMrVAIJdgPyJg4RgSSXVmM8yNOIqSWKA8r7aVyCa6MnNgXUBZ7rG2Rvru23RcI2XvxNo8gqHv6tiMIgrG7K4TAYkYFQyfuosiLZkTv7PaFg++tN6OI18nJMUpJjO+wP724GiF+k7nbB2Sl6KUknU0xfg6XXUWUJgSD6EUPZZMTyQgdDMIbC5J0QRonY4K+3++RVgx+7ywPj2iqnLavR8E1JRPW6zVRfGMf1NT/7s7VC0GLF+PFeDFejBfjxXgxXowX48V4MV6Mfw/jC6Jz1ZqGTe2gRGcPr0iilEU2Ycgl66ZgMk1ZbVbsfZsuSWfsrq8GDiMX+wt0kNEVPZobrKiwIANJJTyJsKuJo4jA47TzXFDWDbEtUdpl4baXVI0hjKdjxbnv90RhMFY0pFSEQei9rryvFgFS2hEWBg4q4DCsPssVFiEsZdvwzItq3Dm9TV232D6k9r34MA3pBcS9lyGXEtH3TuTBG/auy5wszkjiCOOrd5vNNWkm2eceikXguWKMxNv1fkff77EegCxSg06nbCqB8JLDm/aarnCSxvOJqxbUecmzh/VIxF1v3+Xg4IAsMGweD3Cic4JEksZLbh++DsBuv6GoFfv9tb93Dv7VBx1qICPXro1tq4LPeFL+qy/fQ9BhvFTx8WRBXrZU9Z5X3nBQnqbcUxY1aRpwdemO//6Dh7z08mtcPnbHOWs7JlmGmCTEnmvXXJ5zce4gFgvPEerDkEIqNhv3XNbrFbP5AXfvvIHp3TywVqD7gFBYBxMBbG8JtKLxHS/o2W4KpospkYddzGZz6qah6603lQYdxiTT8EZkQwvXXo/0DZ9LCtq2QiFGSIMMI8IkHbH3aIntBatNxXzmWtVFURECgYrZewhc3/dOnnkwYDQdvQVZt6OpYBIEBGFE0zSjMbWpHAxEeBhrayouzrZM4ntMkqG7IJFMkKqj9VywbPoaUt28H9C7ij7d2PGSpsNaM8o1B5srOlOz61qerp/6e95za3ZIr4/IveBL3Uo+eHJG5SVWwzClLiUHhz1FM+DQe2eemdfsdjcQuN1lz8OVe8aTWcBidpfnx2fe2XJ0pPnTf+Y7+O/+uPOuEjbml37xU+x9p2W36Th7uuVvfv9P8Lv+w68DIIqf8u57n4Vvg0/+0mcARsniszPfJfK+UVdPBb0/z8rj1v/wH/xf+a4/9U0AbNY7Tu+e8O5nHdz3e777B/nO/+YbKKv92D0fxgdvFzx7+El3r6hZHCzHv603OelEIGVOFB2N/1/XlsPjI/6H/+nbAPiuP/pX+O//+D/gf/srfxiA9z94xsbLHf+BP/JxAD49ueT87JIw0nzfX/hxAL71O34zRfmAqce0/8KnnsDvge/8rv+EC88N02rO3lf5/tP/6msBeEc+5OqipG1u6nu/41u+nH/95Am+qcHMGxV//Le+xIPoib+fzocujIYuau8l92Pa9sbMVcsQ03akiVu79vs9YSTYXnsIsAnIsjn5djdWzxFOhrn3aIVqD4Gast9vR5+7JIno2h1CB0jp9pRdvkIpRZa5Cv7JyZzVumCeHJKlrtuz3e7ZbM5IkoTCmx0fH85JMzt69iXpPbbXmouznNNbTqAky1KUqjFGkBde6KO3aOWk3sH5AVVVTZpE41q52+3GTskwogiiUI6w7jCKCcSSvtFceshoetgQVBNCvojV2kFc3/ySjrc/nfPuZ9wzvPtGQhi3ZJOA1ZWX2z+cYdmyvt6PpuHGhh+ST07TjCLP6W03znkHsTJeQtydZ1VVRGk0ctqkVSST1IkL+fZEGk+YzOastmfUXmxkX11i2ND6TlRe5OS5Q0gk3ixb9DGNyZHCoKybs6EXshr844K4Z1eUUN90ZNrGEEWO+G99lx1jSeKURg78I4mwgjBKxs7A1WrtO0UBeCPqvGkIQj36m0kxweKQElIMvk+uK+T/OcI+kyQeeUoOMtx7yesbZEzdVaNZtturJaINRuiw0pK+ax0SxXdWpHD8KR0MDuUBehI6PjPDOYXsN1um0ynGDobEjvsiniviO5sajRlEmZKYLujojUNiuOuxbDZXdJ7vFKkA0Rcgo7Fb19WCwrREYUzu4fxVtUeL2cgfb9sGYzr69saYFirW66f0yj7nIRegdUjt3z0he5SN6CuBlQO/McSanq5raP37LqKISMejT2mgIgKhMHRjp2OapGihWc4O6H3Xu24qhA3Jr937kaQRpnceWoM3lZQNdW3QQpH67l+P66oPEM4oCam7EinUKESjlHI8vt5JsANe6r8du7DWWpI0om3b0VC6qAsa39kWfs0p6oIwihCeX32xvqTvLLPZ7MbcXCrqukYPpttdQxhq6r5D+rsQT0Ns56TsVeghjf58K985GyCnSonRMFxrSdnVVH0zdkD3deHiZ79OWRkQyoReGC48r3Y6nbMraoypmXg0gkLQ1CWNt1iI4xhjarbbfPQNLHoH66zrG2uC3W5LEAQj31AJSRjGTjhpMvjoGnrb0vfRjRT78HL+MkPcTMj/78ZLryzsf/0nfyMAvQiIo9QFnH5z6GxDS4dtuxuDsLpBCcXUm7T1tqHtK6RWVKU38DWd83aSciTZOVduNarbZOmc/a5mcZhy8dgtLP/7X/iHzBYRUhmKyiuW9dKRyb0am+ksSjki941aYOAhQHLkYQ1+HaOjuG0Iw4DVec23ffNvAOB3fP2Xsl2XpMkhUnmDQmqCJCNqBnx8Td7ukWFE4Fux52eXRJEgizXrtU/wZEeYyFEsQ4qISTTHmh4P42e3uSKbJuz33phPBARCUuYFqV9c811Fuphh+wrNsMkodNCCh0XkdcPBcUZfKK58IDxfTCmL1kEvPfwlVCFpotl6mFEcx0RRwmqz5vYtlyQVRYExLW3TYHwL93g5o8zXbDyc6fb9e3RG0TY911fuJetomGVziqIi8yqRcRwyn895eubwuVhJ0EGg1Ziwb69XTCYTpw7mg1VjLFXdjIF+ZyrOLq6YpAcY465ltjig6zqebZqRa9M2PWEcjcIYA2dAhwHT+X1/7BYhLLbvbjDsvUVLNS40SiiEcOqYQ+IrtYMTYMVoGmpsQV21I4/A9hodRAhpaQYfGGVRYULX6BGv3po90NP7TV6qHqEqWjMZN+OmqhHSEkXRqJCodei4EuXgxTFDyBat49FTKog6pNRYa0aib9NUWMF43kKFzkw1CkaRkt5GRFE0bp4Kx9Nw3nDu8Ukp6ds9rbHj5x4+PGN1lXN17ZKPqrpyfjFtwmbtIE4vv3KKVi1JJEm80fef+Z//Hr/c+H2/zyVJTVdR1g1ta/iHf/8Xf9nPA3zTt34tJz4Q/vN/7vs/72d+9zf+Kv7WD3zi3/r/b/wm93thEFPXLf/3X//xX/7c/sDXUpRbsJK//dd/7t/6+zf//t8EwF/7v37s837/N/22L2e2cBvM3/r+f8pv/91fQW/kyL9p2pwf/juf/1q/+uvfAtxeV1c9OSzUCAAAIABJREFUQQj/4h9+dvz7l/662zcqXDrgZ37kwec9ztd9w5tcX7k5NZ8vSeKM7c692//iRz/1eb/zlb/xdaRihCZJKSirFqmGIlcAVjrfIl+kCIOEKA6cHouH14ZhiBQB6912PI7jwt6o5yWTBCnV6BnUtk6MII6CsRgQRs7IXamAyosyxEkAShH5opcLjBrm09noDVfXNVmqQPTs1i4wzWaaIKoptu65HJ9ErNdboigeeZJF2blAQ2ref+QSzJNbt52R8AAnLK85OEoJg3j00AsDyIsNP/r9N8/iP/6OX0VdaqxXWqyqGLN7jfX2DBm65OqlLyo4ezBlf70gmLoCQRgbJmnIT/2we9dkkvCxL7/Do7ev2Fy79+rO6yEffCrn5Y8tmS88J3h/Td83Y9ACgHCc47LwsC6JD8QNPn6l6zqSaTzumdIIUAYdCCahhwV1Ia/c/Wp+5l/+c77yK78KAKtCfuKn/hrbdTI+8+NTBVazXw/PoSSbGprrgsor0C2OS37+J8948yscfzOetKAtlphf/PH3APjY191BSM8NYyiqBXRe1Mpdi2W3c95mzxeUwHOtlYfr2tbTHjw1QUa0piMIxWgabIwTFxjmYpyE9LalKPaj51JvLEoFXrXwuXhD23F9lVLSdR2BnIAYzGRdsIxVPiiHsnLJuByO3btjY2+uRSlF17QOliZvoF8857cUBIq2rglDPSY7fWccRNUqppkrNrRtS2+r0Sg6iefs9+vRPNfdA4OQlqqqhtfYKeTpcBQ7kcLtoRjGYFnQOX5Y34yxYBAoqqoZ17u2awhVhtZ65A1JKTFt4wrR/nrmiynCSq7OXaEoTWdY25IX+5Gjd3hwi2fPzj/0vKfTqVMGHZRPTUkUJSgZjVyfvFizrwpAMvEiPNt94UVvPDy87xBaUOQ1UeiNk3uBpUYGknx/ozyMMjT+eqXyJtOBHBsQUrr/k1I+p6gXUhT7G4+3KKKt3T0YxH8UMUJaisKtnSpQpOmEtr0R9XK8czly1dwPQtt27poBIbSLDTykEGA2T+k7VzAb4oS6rdz19MNziR0NR9YjxyyJp3Rdj9IG4cWVuqakrgonXgYIa8A2aAl2UGTNS46ODgi0HhPmMi8+tEYJIahKw3SREnnuZF26mAokTTcYkr/O93/fD/5La+1X8nnGF0TnqsfSBl5dp60o8rUj4/mzq8sdSksSPaX0SZHUChnChd8s68oySWcslinad3GKokHJFGMKFl5prW81UhlCT6R+//wppydzpjF81qvi5P0Fh9ExeV2jlFcUiSxNW42VARVKqqoijiVSDguiU+2ZTCajE/gQSI8crE4jhMbIGu07Qoe3U3oDOuh5eu6lUFNLt78kG/gyRyfcSV5mtyvY7LxcchRhQkETh2RTd81V7fC/88xVr8/Pz5HhiiSeYwYeT5twkB1x4CvDCJe0Pnj/barW3YPl/UOqeg9YDo7d5x4/uiRQkvncTfrD7JjrqxrihtBjYSsrebo946s+/hWcPXU8F03EZm8Qvgs4WWTMpinQU3kRkeOjGeeXT1kczyjywUCz4vjOKSfqTfes3n+HJA05XCyxnVdHa3oC1SMjS9G6AEFlB3TxnNuvuY7EvtzS5YqqKfDFUILsGBlNWeflqDZ1evuIZ2cXlO1QyUq4d3pEU5VcPnadVdvHVHkFl++zLt8GIM1SNhc9qb/n1+s18+Uxt2/f44P3HM/kzu2XETJGajVWhHprQWeYzlebVEfXN8TLGOW5InXe0zYlccwo4RqmkiycUOzd3Or6GlqBUC1Yz2IXhmJXoWRK6xcyqRuUvukW7vcFYagQRtL4qlScRFgL5XY7blatcaaYU28B4LqyAdZ2zOZedc8Yh0UXgtIn7ZGe02PovcRx27YEYUpV1wwvdxqBpBuTuzCMXSKqFZmXHDZti9RTmqbkNHI8uldefR1j9aiYeHn1jDhxBszPK1fV5Q4p4Jbv5vzKv/rrsJiRKL7LL0G0PHt6QXXiu0znT0m0JUmm/L5vcklL3ay5e/clqmKo+G2RUrLdlry/c0Hvt3zbr8d2TupRi8EQMaCLOr7597qOjZQSKy2amM4LTwS0SNXzrb//V488DK2W1MVmNFY11pKXmjRN+dZv/Q3+nrecnB5xcnLM5hV3z//QH/mttJ1Bi0P/ezUml2yXF7TPmaImQURRrlG+Wp9mc772t32Eqd9MtIixQY+OeqqZ7w6qDIShMjW/81vcOeyuHmKzG+J221i+7ne+hUUzBBqtaYiCFK0U84Xyx+ro2mti/+79lv/oqymrHhltCGvPaelrTBohG0vs58euqUhTOYpeBLrDdJJb8xMq34Go6oJ5EtN3Ea3y6wQdWlheves4O11X8fTsimxyyHXv3u0ut8igIvSbc162TKYBfd8yiYfEKaEsWury5nO601T7BrQ367QlRjRcPs7pvGyvChVRpDFNx3zm5vB6VdG0O3rrnt3Pf/qaw+MDpvGSZj3weDRn6zOCdErvRXyeXV6jA4i1ex+laHn/sxt2u/fHQPjOdInUQxfCjYvPXXF2/nBoJPErv+z3clXWVHlNlrnCyX51h/31AfH8fZbeliPf7ZHdki//ave9f/aPn9LnKYvDC84eu/OcJYeooELWGu2Fi5TsUXJC5a0n0iTBtAVROKPs3e/1xoLQhFqNsuBaQWg1re/sGiRJIDBVjPEIlDQ5JU40J6cH/Ppf+/UA/MAP/Q3CMOCtt1ywdXF2zdmDgN5Kkrnnx80hmwdsbEd34Yuw3WAa74M0vaWlIVbP2acoCwiE7bFD1YeWui7p+ht7ljAKEb0k8AG0jhQGQxSFY7LRttB1AuXX174rfIKSYjzvK9CSrq1HBExV9TRNRZQkYzE3jELi2BVmhGd4pNME01fjPAiDiChKqetyVEcTQqCFQArt9jKcwI6UEtPfqJ2apmcyidh6HlEYaiYHKZv9lnQQlOl7lA6pao+O8Mq9XWNHHpiSlrpsCSJNa7xwkW2o6gLwCWfVg7AuyWNA+bhOspAC6eOoJEkxnaXwqqxaK7SCdJaO6/4knXJ1dc0ki9HpzR6WpIyG01IJoMWiiQaxKiLiZEJRbm6SvnCKEpK7X+Y47KuzSyaT1IsnuPl6//593nvnc2itmM8GNeKWotwwm7p1WKkAROsK8PXAvQvRUcZqtaKq3BrbSEnTGU5uO+5rbfYI2XHrYEbgO0Kb3RolI7JpyHbnhWHSJVKXNP6c2i5kvdmgZDcaUSvdu2oG8VjcMB10XThaBwXaEgYhUThju3Hvre1rFwt44YimKlguAkzbUfr4TYoJOq2QBKPKYNd1TOJstC7I8w06nNHSE/sCepxBVVqU1KMgxjxdst1dYf05md4iwhYpQ1q//4aB8WbeEhn4TnlbEyca4RUT2wbiJKano6m8Lc9yipEdu/01SezWt2R+C9N3I49XSKdqqJrpKITRdT1hOChI+r39/4VU9QXRubr38sz+0e9yC+TB8jbvf/A5VFCO0A9sQKBmSCnxSucuSOkitl49CFuRThRRqPHG0qTJEms7jOk5OnKTtdzVJPEMfGAjesPyIKOpr/n5X3QH/1+++3tZzkOqfiwg0PQVgY6w3SC7bmiahjS9gWd1XYcgHNu2/gcQgrGV3/c96SRme9bwu36b69b9oT/0cR699xgjGibZoFzlJp01LnnsjSabLDm7eEY6cRPz6NYxptfUFWiv/CXtgs72NF5Jap9fgegIZczy0G3ObW4dUXsgKK43vPXWW2z3ORfnnkh9fMx0oumNHCu7UkFZ7YkDD5U0ysmNW0b53SfPzsnzHcuD+U2b1UasLtfoyAdbvZMbjYMM4YmqQSi43l+TzSao0WdKU7cNUexm8dXVBUfHB+T7GuvhL7ePj5BBSNPVvP/4ofuegJNbd6g9/C7f7lh3W78RDTC2nrwq0UGA1O73wjDl+uIp0m+MQeDkk6dJROyD3M1qQ6wSpnHAzCe+GElb9xjf6TmcTyjzkkk8RXkJ7svLFYiAZDol8OTRfVFRVQ1LD0tMprdpbUNR9ESDbH51xXZ7zsHBnbFrGwaCtjGjL4QkG4P9Xg6boyQMJgQivfFYE06e2WtzMJ8tfavbMmxoUjoJUyEUxgeG+/0ea+0YKLat2yScOuaNf5GUEiEhm/jfsxFN297Ak2zAar3HGON8QoCrq2vKshwDjSiK0VqTxsno2RMHIZNZgtKWboBd1A1Xl5sbrybRobWgrBXGk63ruubP/dk/zYvxYrwYL8b/n8aX/uY7DsZWubWsbRy0MEki9gOCINBYY7FWjLBAFUiqqhyLuoOIwGABM/xf27aEYTwmfLZ3VIbBwkXrkN6AsDcEfhVITF/T98aLXzgoljFm3Ott3zNJp1RVMyJJqqpy8v/yZv2WUpHGNwptVVUxmUwdJHRQNawrpJTUVTsmhlGUEKhy/D3T9mBgvyvGbsx2U/Dyq69jWI/+WO6aK6wvek2ygO16TZpOyT2ipqk7rDFkWepEHQBrnBjZdOqSJiNd18xiWC5dwa6tG7R2BYIRoVQ1aC0Z7DEDnTCdRWx3m3HPnM5ClrcWnJ+fc3rqkrcwSNFaj/YFIjBk08RBFX0siA2ZTid0pqSqfCLRVqAMyotVdaZEqJ6u69hvXYHp7p2X2eU56/UVge+yCwKePb0Y4cTGGF8o6yhKF1MWRUVThmMcMZ/O2BdXtGZNmbvvJUmCZEJvK7KpO/Z+V3D//n2WC/esnp09YrupWO9aTm57mC4SqQSryxwp3PcODmLKMscOsustCEKOjo5GJeC8WIMsaWtN6WGATd2hNCM6KY4maBX7ueUVWbWLPaS0QwPR2w5MxmMb03EwmWOtIvAQ4Ko0aO0UQ2OPUMqSl/ihH/gHv2zn6oWgxYvxYrwYL8aL8WK8GC/Gi/FivBgvxr+H8QUBCxTo0bj1yZO3iSNNlt0euz3WGuJYUO0FRXnDb1rOJtw+dh2pps25vrwmUXOW97wIhErIizVdVxH5lt78dsJqdYlU7jiny4/w+MlDvvRX3ONTn3Rdm/1+z8nRkjLv6Qb5S62QCloPrbPSopSgbWue7/4NsqqDUWNveo/vH/wdXEWooebZysFRglmKmk45Os7Ybb3oRHXN+48+i5SuYqJkyEkQ8/LHPsqnPue4EeePPsNsvqTv4WTpuh+b7Z4gUCjvH5WFhxRFwcUqp/AY04M0JpgorHGp+8n9l/jg7BHTbMnpyw53XpQb8p3z7VC+2rNYzJhl2ehw2/eCJMjQhKMnUTw5IJnEFHWJ9RWa7XZFkEE69RAAldJbTY++cXbXIYvTu3zu3V8g8fjjg+Vt9vU1Ky/zHCUzWmFZnE653rgqzoPLB/TCiYi0HlM+nc754Ok7o9v90eKYhU0QpufysYNwZVnG/aNTtkWF53iyW10yiXqymcc2C8Vqfc0235F7XLYJW1Ta83AnWHr8dt9Kpx7r29hlU9ALSaMn2MbDXzKBlJZGVez3uZ8rPXEcsN05LkNePKbpWupScn7uMNyzucDSYvInYxcsiuYEUUyYuvOcL15GaIFQIWLAuRtFXnTk+2djl6huKqIkHUUWmqah7goCPRmrhZvtFRdXF/RGsLpy3zs4OKJtu7GD2bYt8/mc2Wx6I0vaVOx3O9q2pfHGvqbVSAX9IA8tE7rWjhVXACvd+6N8S7ppGoerF3KsHkoEWOXw54McdZoghR67jEL2dF1FKIOxwtZ5SM0f/o7vGPmFfe+gJkP3MIqVfz8hjmbjZ6IoQinB1BO+J1nK6voZe+8N57y5nlLXBdnEVTCTYEZRrNGiGk2SQy0JVcR+5wUR4jn37gWcPbsi9pC8q9WWr/qqr+Inf/JHOTp0Fb26smx3FfOFg5CdX2yRWtGJlthD8JRS5FXNJE64feTOPW9bVvtzkgESpiUiDgis5dkT96593//5T/nGb/pqTo6PR3+TWZZR7ra0Hpo1O14wWyx4793Ho+BDnIRonfHe+w+x2lU1911PGk5JPfRke53TtAVhrAn8/Wy7jkh1VHU3wjhRmjzfMcnc9/b7HUIIpsslnbef2Jc7ZDTFNj22d+dp0QShoPc+KVl6yH5foGTMfO4hlFyDcDypaeb2hzjWFEUx8lV2uw3T6ZK8KEfj97rrMG2Ab1Jj2oLBVmOAZ2WTBRJBENxI+XZd5bsCHkqUBGx2lwizJJt6/qHnQiopx+55lqT0RlL5e3frcEZX1tRlPQoU1UVOuSmZhCl3jp3M/9HpksXymEnmuH6n917i8PQ+q+s9e8+B1NLyQ3/3B/ne7/1LDOPv//BP0XYdJ7cX/rlc8sM/9CN84hOfGAUCPnhwwfTwgkkWIfFzsb3EWDNWyZNU868+ccXB8ojc20gc3ZqwXq/pbMxHPub2ot3+it4wQkYdd8zS1D3SQ1TDMMIK3/V+roshCBhqv3EYYPqKLJuyWbn19Mt/xW9hObtFls1GT7nv++t/lldevcuzp27dePa44Pg2tM2a8yfuWb310deJF2t21xWP3nGfWy4Uv/iv3ufVjzrp+eVtC6SU9YpP/TMHbX/ra05G89/WS0aHoYP6DbwoNx/T8Z0CsEJQ1zVC3aBu8rz01jDP87IVcRzzC//4XTdfipbaNjfQOmFp64ooEGjhrrfcOw5RGOmRg+S4zgrj19cwCqm8OMOAphnWwKa5EREo6xrTdkjvs2U6i2k7mqYb11NrJWEU0osO4bkvXefg4IEXRDCtpalLpGA0wu77HoTzQRqese0VXXvDRU+SCU3dsVnvkP5Yk8kE0wkm2WTsukmlSCZ6fObWdCRJxLK9gcBm2X1koNltbiE8X3y/31PnhtNT9w7FieLwdMkkmXHlbV2u11dcXqzYNw3Kc8qstsgwZOfv4Wp9RRg5/8YPnjieeRzcQD57M/i3JWgt2GzcviNQHB3P/TN39+DRdUX16QcIAQ8euXm9XB4QhCFX3oohTWM6GpRV43tblSVxUtC0e+LIxYcHhzP2mzXKz814MqEuLQrNYu7WwItnJWG0IE1iIg/dC4OMttXjfdntV+T5jjRNefDACdq8+sobSGXJ9+4+LRYZm41gMXtzjAmapiNQPWVZEioXl8h2z+aiZLdya8Sd01c5WYCIei4u3HuVTZZ0XYedB9y54/jpV6tHhHFA6Xmn16s989kp+/xq7FjeXhyTZrC+3jE4hkipmEwmIwyxrmuaSrDdCprO7X2nB1OCLiPUC/AoHzXv6Huws4X/Xsl237JYpgjlrnm29Py2foLxXeLnRbI+3/iCSK6wlpnnq0gp2W1zMCHaq3E0poY+5ngxG1t5dV1SNzmPHrvJm00PeetLPk7b7bl45siHeb7jYHnMZlePwdX89mscHces1y5gP9+cc3Z9QWEPOF+7gCiaZTR02DDCDF4/MnBKUoNBnwChFd1z/hxSKhrTIgON+BDRj+fa9IFLyHTIzr+I103PWQ513JPng0fInFc++hVceCLlPt/yrz/3czw4f8DSO0Q/evyEaV2QTSPKhx4uVTmI3cQ7Tj98/Gm6PidUR+SND8C6nKYqqT32N00ymrYg2F6OaoXTWUrXCoToR++Np9sVZZmzXHgPoN4iSUl0yiJ3xy7Kkl5IZ+Lr4YNVnSMjxezQLQQXlyVNV5JMFVPv1XJ1eU00kUxnC6Q2/hlfMV+kXJ67l+zq6Y5En/Bzn3yXxEPG7tw9ZDaPnbHbxE2OOIw4nE/ZeGWZuspJpkvqcsurb7gXWArLLJuSTRc8eeYWspOjU6Kopy4Gzo7iKD1guZhxvXFzQ0Zu8z+9e5e1ny9939GYdiTdX+wbFvMTmrDn3Ydus3z11Vc5f3ZOEiZI6167+SyjrHNU5KGmXURrNWEGx9IFNqdHd9A41/snnvdViSvSxQlF7V78d3/pn7C+7jg4PuL9By55dGR9S5olpJ67tFgsaE1LL939vVg/45Of/HmSZDomDUEQE8UZSgakM3cOq92V83bxIOM4DinaPdtnVyP+P4oiBM7UMIw9edwCwqJG1TLLfDmlqXOsX9yFSjzB1h0nyzKvHNaPMMSuaYmiBK3lCH+x1hGubwIGw2QyQSocFBFovBfKtt4yPZj6zzk/jqZ0f9uvN8xmM+Iwo9hu/LEN21VLHGXkiX9vryKHJfeO7cLOULajys+ZJt7/x2qSSUzf1SwzN8+Wi4zWdMQDH2AyYZpECLXB+uewbd7nwdOn3Lr38gilmSYHFOXTkWe3ODjCEnK1PuPw2D3Pxw9XRGbCNIoIPF480ZI3Xr3Dzq+BJ/dfYdfsuH94yL1jt3l8H/+UL3nrhKPDGeeXbk6d3Dqk2yUon9gUleUwS7n3ZW+ivBhIZRqKRrFaS1TsiikfWSa8++DRyIHIZilZdpd8X9D6c8/imN22RlhLNnNBfBhHpBM1Kt4FPpg5mEzJfRFomh1Q1wGL4yW73G3GYRhyeXnJdO7VQs0lYaoJwwqZeChNn9J3gYNnt24N2pQNMggIPcdrPr9Nsa8JdQReCdR0jtg8eNMFOkXIjiReIHCfiRNFsc/pbYXtB3+cGIGkM26dquqaNJnS1yUb74UDGq0zeh2iEvceres9q9WawAdmD36+QoqO115/hUniAqJXXvkIH3vry3ntpa9Apu45HN865Hv+0l/kb//l7wHgv/2T38WD9S8hVYD23ky3JhnX2/d5fpTtJZebKx4/cdfysbde4fjoHsvlu/wX//l3AvDTP/Oj/MzP/jjvvPsZbh27ZxXH0HYC27l5J4XgIx+9zduffYISbt2fTyeEseWzn7qmzr0SYCfJkgjt98JNviYMY+jFSLJfr7fO41HrMelM0wzT9wR64MeAkAFNW47B5PHRbZqq5vTkNj/yY3/XX2HBO597iuhfAuDu/QChd67w5426P/Vz7/HFX3EHSUkUDF6U/v54WNCJWGJExXI2BZ753zvwkLWI3K+VSjnIfOyLbGEYgjDEkRzXRR0GpPGEIAhGXo3OQmwvxqRFC0kUhjeqlcBidogxLb0dvKkMUqcoGYzeaQqNMV6NVQymuk7AY9gfoScOXaFwiEHc8bxBrOdBd8oQBeFY5KrrGh0lZIkaubd1U6JQBFrdiHMELiZqm4EjOKPx/Ksh8AZBHKcIYWn856qyoezK5/xANUVRkSQTav+Z3bZEKElVt2OhXSnFM9mMnFaQLuFtLGbwqxJbTNcjZEc78ul6LIZNNey1OYdHM3bbx84bDDB9TWsMtB1p6mGPGnohaPxelEaK+XyKtRlXV94Y3kLbtIRhiPbBf2cDyjInmc78OQk2ReHOy8Pd6qZEK2eOvTlzx3p6fk1rujExDYKAKApQMuHt9879ORmkcMqGU895TuKM6/UzFpnb59quo22cce9sepNghrHzLQsGAavaEgYxn/xFlygK1dB1LbaXaO2+d3b+kK7fEfikfjavKLYd9a0FUXQjuHJ86xaB1OM8v3vf8lM/9U9YTF0RqGkVoi9pqy114ROgouLiYkWYKDb7X/BTNkKHavRzm6SnGLtHqprrtUtodvkEvQpRgUUJt2dmkwhrFQNTYT6PqaOO5cGU1vh9Ry45vFWhA3j2xM2pg+VdEPXIx++aI+JMomXGZu3W7zfeeIPPfuZfjhBJANFN+XeNL4jkSigxJiNhuODwcEk6uTFE3O/3NE3L5DCj9OTY690FrdmOaogXj59wtX2GFQrlCfTWWmrTgWjHxe6Tn/s0YXBj8rutKtLDjLcfPqDzJHsZRBjbYzpFErmApGn2hKF+zlDPQB8gxc0t7FqnxGKtHTcPpRQIMTrSu78pkkDw7KELbB49vGC7v8DYaMzMt1tDqF9i7rs9tn/K4rU5TbunqFwWvlxkIFpWF+d0XogijhOiHtbeSDWbTVFqwX5rCJIBm1ojrcL47xR9w2J+QFntMV4J5fzZhsXhLbbb1ag2lyZLZCi43q/8PWjRasJyfsrqzPGd+t4wm82RCHZbl7SU1YaDZcZTd7nsdjt2+ZbuWUleuOAqDudE4ctM04Sd/79koYn0hNfuu+p9EMImv+b0dELpyxWPHjzm9p0jptMJ50/d4vPppx9wcDgh9QlmVfZ04opybzhaeLNqoTFFRVsXvHLqkoiz80tEEzP1/LFnZ4/JZhPqsiDyCo0qjolS2K02dJW7f0kacrpcemlcqKqC8/NHiEnOR994BXCiZXdPb4OSrLfu+TWqolA3G0yYpjx5ckaUwEsvuwVpV/Rs13tMX3HduWTqWL7EOw/OsdJd32pzxid++mcRQtwYYtoKSNnnG46OvAS3DehaQerlcIMg4PDwENPpkTNnbU9dF7SdZZ+v/bNJ6BrjBU6cKpPWmkk6ZeoNptvW0DQtaTJH4N7RW7ePUYGg8fNstXLnH6fJiKHvjXaVXjmYdYc0VUXTdB9S8FHK0ttm3HQA+rYbjxPHU66urgiiGO2DskEkQ6GwgyRvFNGLgMZX4ZdH9+n7jqItiLJBvGZB0zSue7t286wsDKavSb3c9iStaduaJA3ZbN3ENsYwX2QcLDLS9OZ+pmlKEruF+NHDpzytFZNZyGAmGU1fZ7W7ZjY7xvj15SrPuXX6JlLX43GSKOFOd4JS7h1dLCdMJye8/+AZ92+5ZOfOvS/i7PwJu8qrd3UBn/rsY8xBj/CG3X/iv/xNTG6nQMabd1xiqOuSw9t32XqV0Xq3petqDpYzrJ9T0fwuV/srFr/6I8hBln/fcThLqWsv9NFsePmV1/nMpx6yXj/x9yrm6DADqUb54jSb8O5713zJF78BwGazcWuSEbzxxhcB8Pjx5+ibHQioa/fOnNx6nTTRBH7j3xctZVljjCG/9onFrduUdkOgG9ZeYjyMDaJJWV26wHg6yZikS+pmQ+c7rQhLVeZkPkDZ7q5Ruqeu61Gspqt6wliy3eTje7tYJpRljfWB6vUq5+BwgY0j/PSkKSuK+pJm59YcAGsMb7z+UT720kcAuHf/yzi5e5t/9BP/hFD+AvTAAAAgAElEQVS75/k7/oNvYFtd82S3pfNV8K0p+Jt/72+gfbf5V3zxR3n4+BHWSEzn3tmqFDy7fIfnR9fDfHFENyimtTU/8o9/kB/70Z/ga77mNwPw8Y9/Pb/m13wDb7/7k/z0z/4jAN7+3AMuz8+Zei6FRHF4K+T993tKTwMp9oZ7ry74pZ87J1IuoFxkCb1pbpQkZ5peSKYiwPReoTF2ycH/w96bxeqapfddv/XO0zfuee9z9hn6VHfX0LN7djodTwkYO4SYdBLbErnhikQxmIgghdwgJISEkAi5QEAcLgBFAgd5IhgUG2OcHuweXHOdU3WmPe9vfOdpvVys9b27DU4kkC/64qybqtr1Te+an+f5D0Vd4WoVWtt0SPKERovOtJWkbWuC0Kcq1Aza393jydNHXF1d8fob3wZgujXmz/ypf5NvffefAnAd/yGyUJzU1z6uftPy6gmnH5R88au3uTr/LgB5rM6XPFHjMh0/IM2vMcwbKfa2gtAbI+mYatXNpqkIAkX4B5V0UzzUtg+cqqJECIOqLHFsLdDTNdi2c1O18ZRtQJosb8aqVdLihlbcrRtlRNy2ab8vDocOphmRFzHjkZov63WC73t95appK/1bjO8zGpaYlqktYnTySAgc2+6DCEfYeF5AWzdYOqnmhEOyLFFc7e4m8O1aE9/Tth1NC7gYhnFTwesqbNvtLUhAGQubptnLvEsJw6Had237BuWTZDFZnvbICse1qdtwI3mB4zhkRYlpCiodgNW1qgQq2rn+7b4NtFxdqTuJYQjqVtDUHaG7ES7pKPMcyzSI11o8yuswDAgDdc6ldUPdKbGOwUTfDUuJ4dQq+c5GxdBE4FKXeq1VHablafVGrZSJibpaSVxzY/nTYAsHIW7QCSr4L2l0oB0EPk3TYdkWiU4ILBcrgtAlz3XlERBWCXbOKlbfN4xGLFbXCMvE1rz2tm3J6+qm0mkHdB00MsdxteiFlLS1wcVa7aWjpMW1bN5/8rZKTgF+YHB+GeN5HsORSoAIkfLyJ14l9LQdxUoFy0d7+xSZRsosM9zQpKgTLFeNVRLXCAuKQp8xVYntSqqsQuc1sNyaulJWMjtT1S/zRaz61NBqof6Q9XpFGIaUWhwnrSuObu9SVmtsTz3ffJXQNoJIB6GWU1OVNq1Rsr+vklznZ5fs7R5jmQ6J3idW85tkxR/XXnCuXrQX7UV70V60F+1Fe9FetBftRXvR/gTaD0Tlqm0bnl2+B6jIuW0sZNXhazhT02aUVcI8nmPZKjMQhj5N4tKUGw5Uyyz+ANmVONoLI3DHZOtnKkOi+UVCWJSVT6erW3mbkqQzyrrj7FwrpgmJJSwcQ9AWGxigRdvccKdkayBagRA3ajpdJxDaP6XT3IKu69gIfYLyjxDCYBAazK5UyfHk6Rmj7Yy2FqQ68yisknUKy3jjnwCB7dPIlvFEw5Ck5of4U+p6ofvPxTRtZKNe01GTpQsMw0UrcmOFNVlRcHCooDWr1YrT6znDSdRXe5xaUCQZkTui1pkW2aRa7nRjomZhAGfX7/YZDMt0WSxLDASOrgR6jk+yzCm1ul1dpfiux2y+5vBQYX2NziWNU3a2h0hdDVysZsSrGXtbCgvvetAhmEz3yE4VHtgPwbI7iixmqqWDw2HIcBQxm6sKm6hzqmZFNBiR6Mx8U3TYlkHoO2QaSyzrjK17R1S6IrW1GyKlZL26YjJW1Z9svkRKELbFcKyy2aPBkOnkgPmVqrjt7x/RdQFVVdHoeWDZHrITWK6Dr9Xs8iJmOB6z0DwCo0txfUlZNZxoj666rrGtCD8aEoUbb6g5VZUwHqt5vsgLfuTHf0gp5+jK7nIRU5Q1HZN+fuZZSzAd9Ko4q1Wqs0OSSht/+oHN1dUVtuUSRerzLy5jhsMh44EaqyRJyPIa0ZZYGqecJgVIwXgQ0jSbz2+QsqXS/BHLUPCYvEh7f46OWkMKVVsvEwzDwDQVlwVU9k4IgeeGDIdm/zdD3HgbdV3H4eEtXNPAsG+kkQFu7x18X4bWoJU1w4nKtAohMIVF00Rs1mia5AwGI4bD8Q2kIi45OXveK1d6nkdVmRzs79zAeWQOUmB0AldbODiWUDDdQM39l259mM6ReL6FZWgZdH+LIPCwtDeJmosN6/WSttMQBBzyogIkQmeSD3YFVVUwGK65PlH75wdnzxGWyeiWVqAMBvzpo88Tz865vlB99cnPvMYHT57QFga7t1XVJJ3HrPOWgz211trpjLZuSNI1ta4SDw2HdZHgOg3xtYJnDKIJW7tjBr7KnG9tD4mThsEDH2+kKlBxXNKZDabl9aaeXSe4dzgl0NyJq6tLDAOm+wdklRrj8QBcsyHNMj72qrJjaBpYrJ4SBDpD3L2CMDokBk8+UJnV8dQhy10Gg12ePld/M+xLilRwrH3nhFmAtEBss9Scx8Od26RZS5qq9X9wdMzt2/c4eX7K5UxlvI8O71CkhTJ5NTdnQYfn20S68nn3/jaPPzhjMTvvx7MqGu7d/giv/dBXCT21b3zhh77C7YOXWGgecVG3nFw85j/+T/8eP/Hjfw6An/yZf5XVrMAJPcaGWmt5kpEsz/ja1/6a6pO2Jk2V7PuGm5ZcPes9fTYtzypMt0VqBTVMn6zo+PBHP86DV1Sl/OGT98jSNWEw4ce++m8AsJ79GvfvlpxeqErYO+88JPAjwtCj1hCgJFZQpTCAeKn2M9e2SdKkX4dh6NJUJbKrcF21/gaujxQQtFWPQqnrCnvo9Dw3xzGxbIM8qbhzS0H+Do/2efzBO1ycnZGu1Vz8b//r/4nd7dd47+/+NgD3X3nA22+9y2hIX+35zOc+xK/88nf56Mc+y/0Haj97/Zvqe8pc9ct6XtCQ0pDddJ5sWK1nDAdjlgtVqXZcl7ope/SKY/tcX52yv7fX/3bTVj5sSZLQuRsuliRNbrx1TNNWvkx11X9dWytPrQ3sWRgd0SDQ/n/6s00T01BVWKTuYz+gbbse9mVZAyzLoutED73e8P481+3pEhYGXSux9B7o+h55mtJ1Xe8VJYQgiHyKoqCs9fofDKiaira92XObWiornQ0SyJDaLFp8nyx4R12XPbqgk6bi6zgWpqYT2LaJYYaA3z9zniWIzumroelihet5tHWDq7lavj9Q5rlWi6crakpeu8W3bwyRy1WCaXjEGr7oui6iszDFDceyKTuE6Ej0mZZlNfE67/1M1fuU6mErJY6rPTLrQvtKaVqJ6+F6Fuv1ulfh9XwXUyh+2wbqJMRG2VFXcS2f5XKN43YEnoYTForDVjUSQ0Oag9CmriSlRom4fkjbZICJ76lKUl03ICSuG/SaAMiGosr6qmpWZTiOR9XkfRXTcyd0Xcl4MtTzriUrCqUQqO90WVVTi4Rn58+xNTfcxKdpS4ZDdQ+bL67xXYs7d26zWMz0a0KapqLt6KGmwmlIlilNuVECrvDLkLKEpV7rt46H5OU1htzl5FTfmY0KKSWOrSrLyVpSNxUGdT/voijn8cOneK6Pqb+vTmJMq2NxsdJzw6LtBnTEXHknqptaEyFNHDtke1vt3/v76o79z2s/EMGVMAShhr8ZhoWQBk0jqQt16Z1MR9SlieWOeiOzNKsJorCX9syqCtfawjIDVms1cEUjoWtZxUsCf+PHU9PUAtO64TYZZquCj0INkmWpkrZhqUULMBxvkWUJjcYDm51FJ0wMYfalYMtSJsgb6CAo0YIO0RO5m1oqQqlt90HEN775kJ/52Zc4e3YjjiFkS1o+wTVuHLfj9RLD8ChstRmkcYLsKgyzxbI07GG5oq4L9nbUZaQqOra3jkiSNWjjTZMRNQlnz1U/+a7H1mTAer2m0tCBpmlp6oQsl71vQZZLHMeh7B2wA9JkqWBe9rB/3vUypm4ygo1ogOtSFgWOqw6rwBxTNgkPHjxAthsJ9w8wjY7LSxtL8yL2do5xrBnb+xqOYhjMrlPyMmFnRy2W2XXK7PKaeJ3feLwc3uIsWZLmurTvuHgccX1xzkhDW4bjAMdycEyPUEO2dia3qcuUzbKI3CGrOObo4D7Xmr9VScULGoT0sIciiZlXFmmhYazFmgaBP5pQVBoCVBbUjSIo1xqS5nke85Oz3ngvt0wcS5kdbrw3ptMR63VFWS1JKrVJVd0O773/CE9zN9rSx46XXFxcMBwOdb9UNG1GGHo9iT8aR6TxFWiombBzPM9mGS97L5qmirBFyO7WPgstuOI5LXRLnj59DMD29ja+b9C2SxoNVTStluFgSts97423RRdguRa25hZFkYVtCRzXxtAbW57n5HlJ6KtAw7Ud6rplOp0qnhf0ghwbHy01FxM8z+1lX7tW4rqOEgTRsKtYXxwenz7vbQeaSsE3bD3GtqNMKmVr9eRu2xckcYxjRxT5Jkli4Lo+ldbWTdM5dA6OmSEMTUYOd9nZ3WJ3a6xMM4G0SCnrglIHWzgukTugqdZY+tC7OE9x3Jy27fqD0MBgEoU4loKjNB2k/oqyznooTVW32I5gOjxmZ0vDgso1jYyIQu1ltpSEnWDr/kcZDfQzuyavvvIqlxdvMdeHVdyWLK5moD3Qms5hFLkYgYutL4VXy4dMxiG27bId6SDTt0GYrHXm5o3XH2K7IXVRUsRq3LenE1JZ05UFu1OV0PE9j6oqeH6i5tTt6ZC2rTk7e59gpGBat7YjdoIhed3Q9CalLuvMYlEoLoxthISRDzS4gfZACgLKfAfHCBlp2KrrbxO6e1S1mhuOY0I3IcnXnF1qeWYnYhK5nFyosQu8AWlSkWUFu1sqWL08m3F8POHW0UdZLtW8PDjcYblcEvhq7KJgxM74Frenu9y+rQyYt7fuc3K+4O//l/8VP/UXlGGUs3XE77z+HTx9aTq+dZeH73yHTkp+5mt/Wb3GH1M3z/AMi/GuCga+9a3fwAk7PvTgZQCWywxvEFIVJb6tDv6Ls9/ncsMJ0a2jJUtiBloI5/zyjPfeec7nPvtlJmOdOMkqhiOX9Vzw6KEKOr/9vd9gb+82f/6n/ioAL3/4OY8ev4vz7JSTZ78PKBjy+YnH4dFeD//thInvOZgaVloWNejr+8bsdHZ5RiOlErnRvidFmeHbfs/1yZOKaBDw/OmcH/vKXwCgrVroBG+99RY/8xf/CgCf/+xX+Ot/46/z7Mkj/SwNLtvYOAip5ZMHFV/88of5xu8+5GOfug/AOlZGwU2rg5vO4fjehKvZxrYDhNFgCklerJXsNQqm7thGv3eahsHO9rbi4nVG/zchhIZKqWfe2d3i/PxU8c+Apmjx/QgzMoEzPT8dQOK6mzXr0TTq7rD57CpvCaNaeelpyJ3n2nSd7M8mPxhQlhXb063eUDvLMqLA5fpqxkSfF3Ec4zgOo5GaP6vVCjsKFJ944/XTSeqqwHEsXE/tQXWtxCtufJIcLMPE89zeJLkoJCAVtFufdaK+SVSrJvF9l7ouGWrZ86IsEaJDIAj0nBUtOI7Rn7Wua+LYAssPqHXS0jDAcwN8z6FuNl5JQNcidBBhmSaBIyjLmkYbzJoIJTIjG7JCG5I7Ho7j9tBI0wtp2wbpuP38zNICx3PBlEjNF22aCtc3MPR/t11DUbWYFv2zVFVB3dRUZc1Ae452wqRpKvR2R16khKEPXd0LfRgSDT31sHVSu5UrXNdDe5ZT5DmOF5GnGULv6Z3oGA7GtHSs1zccYDoLQ0uOW0joKgxsfC1I1LQVnmdRVzpw7DpkJ0gSaNHiVW2NVQ1pGwtT+74m6QLTtEjON9QPlzgW/OEbz6k0xYBujmUIDGH2HC+sUhuLZ3qMXeazWEnia8j2/HJOZ1WYVg5iQxUwANkXYJ6fPWcYRso0W8MCk1iZYpvWiixTCbO6khgi7PdvYWbU7QVRuI3ZB72CQWhAV3Gm6Sda4+yf234ggqu2Ae3fi0mNY1s0TUOgsw6rearMReuCSB+Wi9U1z2ZPabSLc9s5rJIFpr0g0INk2xWW8BEI5qnaILbGx1hW12dsKrdmvbrm2jSYL3VgYzgIDKLBkKtMHVDHtz/E5eUZK82XaatWO6SblJsLNJ1SHenEjbt8p4LHG+Km8o4AGI7Uhv/uG6fUzQFlfcVooPwO6naIJKHVz9eZBVbQEa+vqVZq8h7sH7NcXTEc2GQajOq4irCYl2rirNc5y6VL3Uo6oSb5eHCXpjWwdYSPYRGnCY7r9eRewxFg+BTlishRk84zh5gYOBqr2rYNVVyzOxxCtzlAC0bTLerK7XH1qzRjPN6iLtXvTqsZA3fK++8ucLR5pbA9trcOieOYJFEXJ9HtIyVcX6nsQZJklHmF6FrGQ3UIVHnBMJry4JMf4eLssZpDZsVwGPUGpUW5pKo67t05Igwmehwa0mxF2Sb42qW+qgrOr04ZaCNA2/eZhvukWUWl2SJu5GJ4kgZB22dkBzx9/oRCV7y80MHyAtK5cl8HMOyO4SigrBoMfYkvO4EZ+QSahD4c7HB1FeNjYekLV9NYuK7JydljCs2Hk8z58Ce83iPMkAOypGX/zhRXV20Fe7zx+jc4Pj5gPlMX6OlkmzffvGRbZ6BWS0Ee5+ztbNNqoZYgtEmTNW17AVopZzIYMplMsI/VxXgwGDBbzPs5DfDs6TlBqFSqTH0hQLZczc6pGy00cPGMNKuIoqAPZNrGosir/sAJgzFt0/H8edJ//sZTKwzDXlBGypaqTHqVrOPjY46PD2nLklwHQOfnaswO97f6QzbLcsqypNGqeF3lAhZYDYazEcdQKlbCKCjLGxXDqhZ9JrKjoiprzooa21LfN3euOJ35uJYisYMSuamrrp8HddVSyoZGrqjzjdv8CMtqsRwLobdk17MQpkmtv28QhAzCiL3tPUaRmhvjsYOUFU2ZInSmc29wQNcMyFtVsdkaB7S1Q2embN/VCm2WTye3uXXvM31VyjZKmlczYq1ql5cZoefRlS2l9tUJ7IZOCrIs4WqhLrB+NsT3ptTaFHb34Dam1eJ4HZahgqTvfu/bHG3tc3x8jKkvYYIGacPOR1SAYNmOUs88O6Fz9SWpXGOlLluDCOlozHyc47cDho7q31VyxcAMkMLmyFFzKFsKXBqq+l32dZXIsUNEXfHKyyrYOT+NefjBOYMwxdc+c51n8ujxO/g6KbRcXLJ88hbhIGStzRN3drb54P2HvPLKK8znilN2dXnKdLjLzh1V9dsbf5hXvvAZlklGmqj+zdohv/ab/4hf/fVf5t/6hb8BQFOV3Lt7m6xT67NrC9L1E37ur/4w944V1j9Zzjja2abJoNTJokfvv8GXvvwxDg9UtUlYAkNK7IDeoDiN10wn28AN8iHyfVIpiUJVAXvznXdAlnzko/d7ZdWuAwOP3T2HWiteHt3yGEZHXGn10CQR3Dq6x93jV/jkx/4UAG+9+S1mi1OKTGCbY/08kKWSsQ6WJ1OfwWDE9dUFueYNe54PGNRt1V/iHcOjriWlrkrndcN6EZMuJZ/7rDLj/s3f/E2SNMa2XH7x3/7bAPzSP/iH/M+/+g956cMqaKoTg6xaYhtT0ACU7a1bPPjoU/7wO085eaYSZqPRkIQ50UCtodn1mgevbDMe3wRXg2iMZZYkecaOVm0ESZ5n2DoRphQNR4qLbG04M4KyLAhsFyk2Kq0Zh/s7fRU3oVCJSo0oAAhCQdsavY+nKGyCIMK2TPT0ZGs6Jk6WFHneJ48mkzF1k2M76mwqyxQDmzJPet657yrvH0NIOi3mYBqCssrIU7W3uLZFmsWal6vWUF6pS28jWwzNuWrsBtezb/itpkmWpZRl2asjjkYjyipFdg3mhgNlR1gSFSkAmBIhwQrc/u7U0dI2Da7jE2shA6TAGFiYG29RWWP7NlmWMtTndtd1lFVBXnU38xpJUXc49s2VVwhJNPJotCptmlTYtk8Y+b3f1zAcKU9Qd4OYALAwRNd7UQUDF8NWCKYNF9wPLdq27o3GTdOlbUsMS/ZJw7KpcR0H1wnQw0AjW/zgxhC5kwIMC8NoKVItHhEKpLRo6o5Oai+qPCVwWzItFCGsFGEpnmKpzzrHjOg6i65L8HUy3DVtXMvu54YpPDphEo4cVmu1/m3Loy4tOq2cLQwD15OUVUzTaiN1y6cpKwI36rUSLLPGdXwKLSIipUBgY1oNjqOSgYZoMDGp6xyxUe8zPGTb0GnudrIEy64JQxNzk4ApwLfGSsm01Txof0rVxAjj5i7TmWDZ8sbAu6hwPAPDMHAatU/t7U2om5RSK5YbwsfutC8Wak3awqEjIEu7fr6M/sV6Fj8YwVWaF3zruw8BZQ7aSAM/sJlMVfBhOa2SPc9synIDAzJZLbMejuY4BrKW6rJWbVRcSsbDkKKoekL5+0/eoTMkVbvByNnIOsNji/W1Wui+XWEZNvFq3VfUHr33JmVZs6PNiLOs6GXYh4ObzUdmKUKK3sjUtl26rusnrxAGpm0hhKTTsINVYnBxFuFPatJKO5hjkecNhqUOdVO6dHVNONzl4lQdmO8/fsTB/m2SzCDTMCvPF6RpzoY7ejVbMIjG5GVFoM1dL5evU7XX3L+jvM+arObd917ncP8jrHWFZu/IwDIcLMfuFXdkm1IWyz6wyTMQJrRtR6ZlM1tR4Qob24nwdbZeGNcUNchOE06bgs6NsSYOa33IyrJlEa853L8PvurzyigZDCJMNgo/DbcPFOTC0QH01nQXz7HJVhXbQwUZ8aKO2eKa6fhIva+7Q1HNCFyX5UoFna5pMPAUcTW+Vr+ha2FrNOJaK0kGqUljV3g2+HpzdUyoipr19ZJQq+klMiMIBxzduQuowD8MQ2RbE2+krYdT4jgmS2v84AZGkueSVJPC22aubAnaJZdX2nQam7pu6cwGDDXuFkMuL9NeuMG2BJ1RE7DNKlXP53sNn/viV5gtnhE1aqPwA4NbR9vKgA9Vad25nTEeDih1Rv/g4AApx1xeXnP8kg4QLBvT8BDaoNC1bLa9LaaTIXmm1szO9h5lvSRLa+KVrsy5EeGgw9QH/ac+8RneevM95vM5y0Wq5wJUzYq80omUeKNQ5iE1ydZ1Q7qupWxcTKEO0GSdcOt4mzhVffKdN57w9ju3iAY2Zb5JuOh+bUt29QU6cxxmsyXO2O+/JysLyqrF05krx3EwaKBrKDTcRnaCus57UnhRttiOxf7utM86GrQkSYJtWgixgeVYNE1Blqt1HMcxnbCxHROd+KSoVliNhSMdLEdnOlsBlUkU6cqcrJTiVJJwvdAy76cWw8EEy2xp203Ql9DIs75aiVFhmS6N7HC9jaJYyWpxRZq+raAiqADWMG8Uxcoip6hK8qxAaMNJw7RxPBsvGuBPPq36sxzwxruPiHWVyvfWCKMh8gPOzr4DwOX5jMXsDxlPfUqdgBgOxjjCZaKJ+E2VMJn4HOzskuuq1CA4IHFz9vwJ68XmfRNu3RmS60UTDXzOrs6J44yDbXXpDWjphE1R71Mkqjpf1x5B6PDeIzVfbNdhemAjGVAVWg319Dk7oz3eP73Sn+0SJwaB0zEM1EEc+CHNyCIc2vzoV/5lAD7+8lf4g+99wD/59d8E4Gtf+zLvXjxFNh2mFp0ZGQV/8Pu/zWc/c5/7d1VA+ejx2wS2wLfV/LlelTT2JV/48suYthLeKdoUx2zofJtEQzGdacZf+Ym/hC109beWuE5E1axYXqm9zAgbfv7n/jX+L95k04Rn4hpWbzRKvuZnf+7HeeW1Y2YLVaUaRxF1JTH9lscnKkDfGb7GJz//xb7iZblKqGGZxpxfqErLszfe5av/yo+yM91itXpHzeE85/TknEstu/zu05iiaumsDqFVBk2rwQ2HmLaJY+l9ypXQudRavGbHdVitZ3zqY6/i6Ivpt775T3n1lU/y5S99gUcfvA3A3/rbf5P794/ItNVFnlUYhsWjy/fYO1Tjd3F9ws72gON7IbPLzSVQnVN7h1od7STn5EmGZd9IrGfrlMCbsHe0368/Q7g4GJiaiG8YNrKtMYwOtCLscDBiWV8icHAN9cxbU4fBYMD1TPXdcOTTNg6jUQh6vO7d/jC1XPD4AwV/39qOtD1HxCZR08oMixajy7G0imlblwhaTF3d9r1awQCzmi1d3a7ygi99/kucnLzPw4fq3vVDn/40T58+5fRczX3LsoiiiA6QG2NcJGmuKuedTpxOJiNtYqwDUSPDMD3qpgBTnSlpFjOIpjSN7KGfltcBJpmG6SurAgUF7WXXDYM4ycitqq+oFUVBvKx7lIrrBGTrjOEkYLXSggvDHWhCDPtGBMIwDALXpMi1rYsV0jYGthtgWLpqM8px/I5Otkw09aKtOqo6pdDKwOPhCNlZNF11Mz86i6rosCyboa/PhwpaaZFman8dDB26xsAwJFWl9iSnm+K4gixf9LB4A58ya3orBlCCOm5g99VCJEqR0qgIfTWvTcOlo2R7T63RQXRIsr7CtYPelqMoYxxniBATsk7tpx0NVdHi6QS6Gyjxk7JqcLWgS1YUSCl7EQwLA6SNYwz6eVYXJUXZUNWyPyOFcKjbEsPazNca21Z39Kbd3IcFTZvTUfWJUsfpMAwL29D3R1tgGJ5SQOypAjZFXmLZvoaOQppcY1o396my6PAdk84MkHoviUIXKSFwLCKdVDdkQ+hMsDZojyRBmA55nuG5KjEkJZR5jG272K6mZ8gbGO8f114IWrxoL9qL9qK9aC/ai/aivWgv2ov2ov0JtB+IylXbtiTrTP97Rydq6hJWGs7keRGWZZDmMbazifElVSXptPlhsi4YhJ42ptOchKZlmS1pmoZVpiL1shSE4YBSl3Zkk2CZLZ01oK41gc/xqWtlGLiRqJQSonDQZ0w8zyHN1gSBh9QlZM8R2CJQpoGd5i7VDRg3kqdSNoqH1QlGI51Nu8z4x7/8O/z4T36KdaIyHVk2I8tr7t5XGcwkKcmLhGHksow116fpePjkCaOxjyVUdkKaLlI4WBqqcHg8ZWCuCOEAACAASURBVL2eU2Vruo0yahGxf+uI+XwjS2rg+A2r+E0cDUezzVvs7OxzNTtnrfV2jU4RZZexGqvFMuOll15iNjtT/hBAEI2ZXT3HNLw++zIe71LLtseBT7Z2qJuMZbLo/WOgoaoWnF48ZqRNfC/m58SZy3SsTS87g3XeKuKrrT57Ek1Ji5TOqik0qXZ5KRiOdnpJdzqb7eke8/mMWMM6ZTDE6LYJgy3aVs2zMHKocsl2pDIhWyPJdbzi1tZdLlKV0avKlrJIcbYHWJqM//TpJdFowGMtsrFczRmNlW9JU6lxXywWSlaVpv9dju2BaHri6Py8xLQrTMPFdHUFIU2YbO3x+Olb3L+nxAeKMkPKhjrTpPDhAZKEJEnxXQUrXcxPWM9fZzod4bhacj81OTjcoyjV+6bjA7JiwcnZO2xpTkma1jS1ZHtrhyRVfXV1uaKpUy4vVaZ1f/sOphHw/OmCYajL+6agbhuW84I41gavocSLPBwN1/rd//N7fOy1T4F4n+FYzYX1KqEoxjcQw87E932y4rKXdB2PXBaLGecnJz0WXUqDb37r7V6K3fUEk+0EN4+4vlLjKdH8s+UZy5X6Tbu7H+Lo3i6DYLPOUkadT1ZWZNr7arlaYlkOnuPBxiyzLhTfsNJ+TtGANI45Pz/nSgvTWJaFYRhsT8b4WsAiyzKqqsDQ63GyMyZeVVgCBlrSOU1Trq+uaKXsvTZs28SyHORZo3/3PstlRVFUVBqGXNcpQeDhe2NgI6qjZKArzUXN85K8VFy/8UCtoyRJkG2NaZoUGoq0tT1ia2tCFGnT8s6lTROKeNnveVlakWQxpgWDoc5iupKDvfs4Uksxty2yLZFVzPGhyjYfH2yDcYc8azF01SReJSoTK1V/CsMiCEJqw2E4UdC9waghW8MiWZDqylhdLXj27AQptTmna3N2dYnowNMViO3xBMfxuT05ZDFT6wHLpqbi0WNlGbFaZnzstZdZL84Z6ApwfjhFVoJX76hs5fHxHb7jf4PtwzFS84b29x+wO/5ZBsPbZGsN7yla/vP/7L/geqk++xf//X+P06sVltdga17N9eWCl16a8OUf/tofMZnFstG8f2ZXT+nagjA46NdDVzfUjYE7CJidKN8qz2x599EJn3z1C2r+VAltYxA4w36er+bPmAUR399kbeHaTg+/G4wtDu8eYlsRrqUy9VV1TdsK2iJka1tV1L7w+T/D5SxAap8rLxhAZzAQBoNQVQJ+7q/9WX7/zWeY2y9x/HEl6/6FTz/geD8A/X2nH7xHvpbMVtdcnKtKwPsnZ1zNM5bX55Sa51JJk7xoMDr1vqvcomwlX/rRH+F/++3fU89chEy3D1llC/6dX/wPALhz/wDT9pHNxkC3RMqWo6MHDMfqWYoiZbF4wnQ6ZTnT54OGph3fVZXP9995l7b1yMsbzlroToCWul4QBur1BhazWYyhKztppeD/wjB6HvZyuaYzBI7vYGtI7LMPFrj+TPlvocROluszwuAGZ/T82WNeffVVHKUJQ1XVyPoWbdOxvaPet1pfM965jWVZrLVMf1XmPQ8dAGHjhUPsqKastcCTcEjTlGjkcPe+QntMxlus1tfsHShoa1FVzOdzyqLD0Ea8NiZVW+J4BoE2QG/rCtNw6DZFnGaA49QMw238SAu1FBmGEJim2UPibNthe3tXVfmA56ePWC0bRqNRv6dXdYHj2diOhWlu4PUWSAWzAzCEzXpVIDq/pzkkWYohTNruxqPL0F5qbc97EFiOSVbEBFqQqGugzjq6ruk91lzHZjKa9jz6lhLLaJGt3cMCDbPCMBXCp2400sgc4Ngtln6+plqzXtUcHByQa/iZZdtqvftDTF27si1FzbAszb01BWma0kn6Sq5AaJibZKltQAxLKPSWPq8uLy/xbIdlPqfTXDtXV6eapsDz1H5W1x1BIHousVEbyhXFKPE1D9p0XFzXpa422gKWhr5avWCP6dqYVonrulTVhlMe0FR1719nGxaWaeF5giq7MbZ2XAfTcvrKVY/00pUlz7eJ41RbCmjqjpC0bYvr+cSxFnNzLCzL7sVIDJFjOS2uayGlmj9ZIkHUjKIJjUj02AAk1NoT0TDAcxxC3+rhtmWlKreONehpQKH/R/fX/2f7gQiuLNPGddUEr6oK2fhIqbylAGQTU1UthmUQLzfBjUfX3pDdBwOL9fKSPKtZLDdO0mNs28FxB8w1XMINHWJ5gTA0KbXboylzUlqmkbpgrlcNW6OIxWJFpCdYa7fKQKy4MXdzLIflfM1mwF3XJs9zhsMxhqGNTKsUU5gITd70tGqJEAalVokaBBHnJwW//iv/jO0ddRloyTi/eM7TZ/p3ux5VnSLbCwLNV9ndnZJlMc/XC0y9mY/GLYYNUpuKBlFHmrbkSUjnbnggBlcXLYuFmiTHx8c8+NAXubh6m0Dz7psqIM8Frj1lsVKXBj9wMc2QWJMhs6Lm7YePGQdjskodVMt8RtMov4NCm9ymbctke4jUQhjz9XNl5tpaWJpIaZgSb+iyjk8ITAW9rEiwJFwuFJQnDH2eXF9iGg5Oo8mHzhLZuARBQCXUxeJqtmRVj3rfBKiQ6wTbj2g0oXVRJcwK5b8xGIT6d1XUbUeaKYLzuttBmIJ3rt4k1mROhM11fg1Jyd7uXTWmkUmSzilrddEYDA3ilaStbNBBYOQZXM6usMwQT0MHZosFiBpTbyJ+NFHeKd4BC61gFg2m5JnEDwdUGmpapQ62ETHaVofsdBpxcv6cwUTiaPUgISJC54ir2SOyTF1SxgOXrnUYaYPbZ2ffwrIijvZfxrB1sJHWrOYVRVawWCs4z90Hd1gtajz74wAc3Z7yrX/2XUZjDzNQYxOFE64uG1pKjj+kIU15iW3bLNZL3b9nPDtTalaL5bWew7us4zm+DlSz2KGVObfv7ZDr3/3s/HUG3pQPPbjF3q7imZw8fx8pJVWh1tVkK6ATMVM7Yjra+BSp+V2urxlM1noOCZpqyDvP9UXVc2nrHCkkI60IOZ3ukK5N8lRwcar4fpblEURhT1DuGgvbCkizmFGk+ryqWyxTUsuW+FL1S5ZlJFnG7q6a0+PxhMHBAMsyei+co1uHfO5znwOMngSepjG+beJr49ggMlkszxkMRj2O3xAeZVaSt2l/YRdSMB5P6cSG49WxXiesk5gqVa853N1mb2cXuhpzYz5qwTAKCQIVJEUjD8+3MMyOTl9W21ZQVx15XvSHatEWTLeGtBsxAEMon5Ta6PljvjegayU7e/v9+6KBzXJ9o6bnWCOKzCSVVzTVhiy/gkZ55liaU1ZWORcXZ6BFU8ygxd3aY3m9Zji5rX+DTS0Lnl0tSPUhHoY7PD+d88bbam2DxcXim2zvjHE1f8sf+tiYXJ6pJMnl8wUf+8KnkOYAibqcv3z/x3l+9pTF8yUb4Edotzx69w1++i8rYQUjCKnqx5jGGKHFgJazOfuHLtOdXbJC7VOe41NT4+roKlk+w3UsJtMPI/VF3DQkoguQdFxdqD1hdfqU/a2XsbWapzVLEKLDsQZcXiuIXLp8TrX1Sf5IEyWidei0WuD/+lu/zic/fp8f+eq/xNVM851sB9e2yMuW08s3AJhOjwiifXINcXJw8dyAxeOHfOpTak/4hb/5k/zd//CX+N57jzg7U3Pot3/rEfcOD/n8Z+4CcO/eba7mC1750qf5PGqNGlZNmhes12vCSK1X2cBiVvH+B2qs0iTh/OKC5OyKR2+9C8DhYcB0O+S//x/+MZcz1S+fePU+WVojtE+Sa1uITkH4N+Ij0/GItp7iOhajifq+XM+RTOOJ7t47ZL3o2Ds8ABTUzI9qFosFTRuyNVX7W5xccXAYUmkusZGW+KGFH/mcnWpRrVwiRIfv+Eih1t/2dEzbZX1y07ZdulaSJTcwo8P9Xd56/W0cnWRranWf2dmLmF1rSKyzjyFqHNMkDHQwnnQEvtsLR2Rpg+eA6QTqPEIlsN99703ieNV/3+nJjMFgQFPrtdeU2IaL5QuSVPsr+T6uK3FcmGko/Wg0okhhqoVqynZBUdSEUUeoFURtw6WqKszAxA82fk4lyBzPU3P4k699huvlCcPhkEcfPFbP5zmUpaSq6j54q+sSExdT3xtW8SXTnT3iOGY80UIRdUaSJJj2CFtfjqtKeTn1cGmgbiv1zOVGHMOjrEvoWhot1GCYFp1Z0W38TDuLMjdwXRcdn2AYAn8YkGUZhuH3f5NdQVlv+Okdt4Y7NE3V7/uia2mqFilM/FBzfFpJ28g+Wb5YzLRqYoenucxl3UAjcJ0A01RjM93awjBMFgvtEeq7lEWHaYr+s00REidzPD/oAwQDi6aWOPquJLuK+WyOaXW9YbfrexRF2u/Vnj+mli15dXMO2LaF53jIVmLr/awqckzLwNGUCiFbDKOlrbq+D1QgqZKSGz5c27ZYRtMr/DmOje8GCGH0dJuOGs+LqOqW4eBYv86hbVs6PfetbZOiKKjKDlNDHF1XUJRLFbxtJpVhYrkORu8tFiGlpJY1uU6Et40JSKzA0DxRKKsbjuQf134ggqumaXtBhqJc0bQFpggwtPOyZfo0RsbWYEC7keBFYJsdQqjL6/YkYnltkZQNoy21+Vk2FFVOOl/joTpuIEYkK4u6Vf9dGzm2Lei6mlbLbY4GE5IkxrEccs0lMkxJWcZKNhNwGw/DMDGFQSM3RFX1z8Vi1i9i274hC4KSTm+aShmz6kxER0u6zjm4NSZL1PPsHQSMBncpSr05FDVGY2EJH89Qm8izRwvKosO1bYQmyyarNZbpkesNo24TBoNQyciPtCt2C1V9heurz37j3TeIng+xTXD1ZcBsMy4vV6RZTKex062saHKrj9j9yEXUJm1nk+rKYCWXyNogy9fs7yiD0LLOefb8lIle5HW1ZhBNKLuOda4OGNM0kGnH1taU1VKTVVtBjcTXl8m0ENw6fpmzi3PsUB3g53FGml4hLIGlSe8FMYtnD3nwoVfV725X5Nkaz40YbCke1mw5w7AzruMzClNlbc+vrvEDG8/UXCocTGlRV4JwpL7v+ekpjj+i62CVbzKfFp1Jj88NozHr4gPwwRLqs2fzGGG6+OGgxw27YcRqfUmolddaMyUvK6rsHNNTh8cHz97ntVc+i1cZXF5rQ1Snw7FcDFddMFf5Asc1SZI1tqFeEwZDTLNlPJ2wvbsxauzouCbWBNPhaErob2MwpNWk+svLR9y9+3H29vaYJ7t63DPaJuG1V1XlLEnW/Pmf+QnSbInYZGOfnhNNAvxozDsPv6eez/W5desW+5po7EYjhJ0yO5+TZLpaWEEw9FnE6tJ05+4nSVZTkvyM6bYW53AmpDOL4Zbg5FwFO6998iXOLh5jo8bFcivS9ZDdWzvMrlUwN4s3CQfYGavXuV3LbPWQqZaVf/L0e+zv+7RNx9NH6n2GuOT52SmG2fDZz6kL6ni0zfnFgna+4f5sIfA52B9zfa3mcNfUWLZPXqyxtbKT41jcmR6Tl63upwuqLGU8iTg6UomUqlrw5MkH2LbNUmP756s5TeP0AixCCMLIZb1+mzLbEIvBMBuKSqkggsr62bbidIFKQvm+S9XUdPpgiqKIk+sZTdP0dgkbdbJN5WowHWFaHV3bKMN01AWvyFvSddr/LQwFVdn2QVnbKcNXIQTxWqtS4uC7FWVZ4elqimFYhGHYB9We72BZAtsUmJoDEQZjKp6ynju4+vK4Tgpka2K7KsiukgrLuUXt5zxbq/dVeUGa1RQFXOkg3hQrBCW1oZ/Pn3J5ec3J2ftI1J77zusPuX3rPj/9038JgD/1wz/FzuE+P/Znf4JXXlM8qQf3PkfWrAkGAaHeg/7gd36PT392m899+TMAXM4XBE6A7DpKvffPZifMr+a4wRa15p66tsmqqZCtOiveev2bnF885POf3aeVG24BiLbCME0ev6c4bGZWsLt7j6TeKG51WpHWJNOcktAqGA5UFWLTTFxcJ+J6roKRqlkR+fcRhkFZaXGH4ZSu67i4fMLXv/FPALi++Dw/+/Nf4eRUJdkcTMpCCdt8/Zu/BcAv/LvfIG/3uLv1gFJo4YtuRZae8p/8R78EQBAJXvnYD/P17/wfnGgl0qPtQw6Ptri8zLh7rIQork7P8QLJ7r5SvP3pP/dlfu/r3+FXfv3X+MJXVR/XVc5sdsGrrx7wtY9/BYBnH8xJszmV3l+lFCAFu3sTWv23usgx3EP8qOHTX1TrL01q/j7/Cy+9ooKBz3z2LpPRIZ1Y82uoPr/30i635R62GHLvvpp747FDkddY+uI2HA7pKDk7f857D1Xyxvd9ZtcL6taiK1Xl+M69fbYmh2T6/MjLJUWuLpP/OyqgPTg44kMP7nKuZaaDwMcLBIawe5GUpmnZ2tqirmBnRyu71ZDnKblWWt6ZKjlsDEGtBZCydMHHXnsVKTuur9U4NG3BIJqSaOSM4wRUVYbnu+zsbLg+LWUl6GigVt93dGvK87M3sbXsf5lk3D7YZzab4egAaDANmc8qus6m0lWb0WjEfDYj1Qmwp09Stra2WOQpkaP6abVa0bYSy7IRm8qOYYJR9AJFvu9TFSWDMKLUKo6DaKTsMaTsq3i+b9JxI7GeZRlVnWJaIZ3YiGrUWEZHHKcMBhpVkBWYpomlK251nmAYAbKrmGhhKCFMBBLXCSk36KCuJU1LfI3asCODqqqIIh9doKVuTPan+wjR9fffON7oCOg9wjMxDKHEGXRiz2kMqkqJMgy1/UOWxthWgKWVAU2zwRQdlhURBhtl3hQv6GjrEn/D3xItdOYNckQ0HN/do6k7ZLcpEBSYRsVEW920bckwtLiarwi0XVJVKWuLpiq+D4XS0jVG/5vqVvGGTdPrK4Gm4RBFA+I47gMuz3G1gq/6GMsU1FJiGXZ/ztmeiWka+KaNaWzOWpO6lqxjdbcITA9htJiGQeBvzkMX2dnIJqdtNorBIBt7UyijqVUBopMmhlDP1xktpiXIsnXPtfPcf7Fc4AvO1Yv2or1oL9qL9qK9aC/ai/aivWgv2p9A+4GoXHVd10NdoiggGri0bUuRqChVthaeYyHbDG9j4mkNcCy7x+dm6xZZ+bx074gkVtlfCTRyTBKXlFr2VLaS0HbxNZ/DCyPC0KdIZR/JBn7E5UVL23ZUlfaUaAoC3+mN/1T5UWCYNqbG55qmgC6kaSRpqtIT6ve5fcbE913atqZD0sqNfKlFUbrEV0Yv055cFERRgDQ32V9TwWwMel7U7mCMMbKoamUguelLzx0iWpWtaMwh0+GEpkuRutSdXoETOFSJLgW3BpfLa4TsGA21/LU1o7i0GQwigpGK3qfhFHMY9mqBx3f2SbOC69OM4y2Vdbx9Z8LV1Yzl8hypDSZFU5MlMzxd6m7qCNtWJdzdrQ+p500Xqtxaj7Ed9b4iWTOa7DK7UBCEt999xNUs5+Gjd5lqI7emhmBYsLu/RduqzMrpSct4fMh3vqchQLLDNnPG4xFVtcm4hwShySAcEWs4yHjo0siYvNA5B6NU4yZDKl0hFdaaxXXFeOuIutmo4uQYlosy/IBnFx8wnmyRVUv2tGLhZPuI09Pn1K1PozNQCPCjLUKtNnd6OqfuFljGkjBS45fmaz54+g6mabLQ0J3DWxPSvOFCZx0HA58sbfC9fc5nGsY2b4mGC1wnolj4+plrpEioderMtXZI2gKsmH0NcfzwR4bMF5c8/oO3uHWk/jabFYxHu/zu73wbgL1Dh+vZUhlj6n65OM2J1xXreE6kLQYur+fkVcZkqtd2OMWxPKbbDeOp9p2qQ6pKck/zycbhLpFvU0uTShuEf/Qj97h3+EM8Of06Ukt+r9OMpjFY63U2GhlIbJ7ML3sfr8ltlTW89+GXmW6p7zs/OeezX/oy11eKPxYNX2IUjbg4X2JoH7jlKmEQ2gxHY05ONEdntSJe51xrOMx6dYnlmHS0PRdtGN1ivWrwfZ/5fKnn1ARBw86Ogs0YomR6fJfnJ095+kwpdVVNTeBHjMdjRlM1r4NBwHq57D1DgsDDdisGkUOebmwPlKyv45p9pTyJldH3prKUrRdUmYXjOFQ6E5lmMXVd00nR70u2rfZcoWGJTtPSdArStFF/6qEcXYO9gXDY0DYGQqujVWWDaXW4jrHxxcRxHGYrB8NsENmG9yFonyvMv2oSyzKYDoe4rtpvhGUyHIa00mS+Uv1p+wV0Nk2q+mUdZ7Sag5CmGioooK4aLMsh0P5/nekgnJZorKrLSSLB8Whrn/091ec/96//HT7yiY9wrTmDp9mc//G/+VXefu8NfvFv/QIAlmNgWY56Zs0XjeOUj3/ioxzfUYiJsizxRUBSxgw1dG82e8b9e58jGh1QVWp++oaFBBKdva+amE994k8znhyQNdqiojVxfJuT0zOE5nN8+hNfYDrZ7zP1tuVR01BUJWWq+ZwM2d6+y/e3v/iTP/FH/pufh9/gO8Df4Y9tP6v+8XWe8N/xj/741/x/bN/mnf9/b3ygfs+fzK/449sv/b1v63/79v/r/y2zhrbtmIyhlGq8LucxaSx5/Q+/Dii7lr39LQLP5/5dda7t7W1xPbvg2dNTdnfuAGCYJZcXT/n0p5XiZtdJrq9njHR1HeD2/Ql0JqHmSfqh5PGT9wi9Xe58SFfcsiXDYMj52YxGI2qWyxTf9/nIR9R+2jSSvFig0Fv/N3tvEmt9mt93fZ7/PJ7pnnOnd6yxy1Xu7nLbjiF2Yie2EwUICEEQsTBCCMEOKaxgh7KKxBKJDWwQEhKBKGTHEFh4iO0kbne7uquqq6vqne58z/yfx4fF85z/20ixV1n04v0vr+6ZnvE3fAe119979wnHx8fskjumR+q9yrIGWh6/pfZHkiSE4UNu7y4R+nVxPKZtpmzWe861suLp2YzR+GeIQoXQEP0YP+rYbCaMJgdYl8l6rYx/D7xWKQWjUU+v6/tVFbC+WdL1DPz0URCw2e9oy3rokGRNwWjms9tqRJEwMMwOQT140e13BfQhtlsO3PfF4oTtZo+tDYpDN6KJatIkHTrH40nIfnuH7AVtpb5D4IXK48w6eIRGlGVOGB6hAUu4nlYuNEf4uvtRt2sMYRBGWrmWDt83EKLBtF7D3wyzoe1TOq0c7To2s9lo6Dy6oY3sLRzHwtDxheI7dURRAKgzYBT79J1JoekLtu3j+RZ0r3lRx8cn9J1B0zTDWb7dLWlbczhzs3yL73rg2MN5KmwlSX+QMze1psFCw+gBjMDAMCRtaw5KuW2rlB8PsNtxFCueb5Uq7y6UvU6W7fE8B1/jLNu2xfVMAl+dnZ5vkCQJXcuAFkDUJPs9jh1iaaRDVe6xHQPta0xTV9iWh2EbAxWjyJaAge2I4e4JRhFpUuBq7l1RZLS1RAgTRyskNnVF3eW49pROc8N2yZ4/7/mpSK76TnL5tcIoT6YLLn68xbAqTs+0od1mg2NNMa0527UKbjbLWwxpcTRTAZRrRnSNxLTuB4LiZrOhqioCPyKKDxvdwfePB1+GurSpq5ym7thr8+HNck9Z5Qgsqlqb8Tk+RVORaSnh6eRIOWl7PptEm5G1LUWmFu+BRyP7mqIoBmJl3yljuCgKMA7t0qbGMg122zVINWGe45Hs7oYF7TgeptXhOjHJWkO/fBfTcAnCgEgbhNYyIR6ZCA2RS7Oay8sXxPGYTh+SbVnSVPmQ0LoeRHGAIWcInfA1xR4hTZK0I9EeYe3EY+SbXO7UHKyWKRJI9nseag+kuxvw/GNCL+QAb7662ZDtKpKdGsswDMkzEKbJl1+oAPP4eM5sPOJHn10wPlIvzJKc519m7DVnJ88LXjz/HkEQsLxTY46U+L7L7asdrpb83m07bu1rSm0KbYsAJ4L7zQpDqN8ShA5l0XJyusAPtct5MKKpOmxfzcG28oj8iG12w0zzwMrGAStF0FFkhwNxDKLifq0SGylsjhcjvvoi4y74gfrNzoJRPOOf/rMfgnmQ5U5pug7PVes8TVuCKCeKRnStgu60TU+ebmnahKZSa/h+VZEmywEm4NoOtiMJPMlGr+Hd7oIonNJTsBgrVrTp3WHZBSczdcmvNxWmyIlnMZ/84HO9FjuSZEOe59zcqHGoK5jPU5JU/d79Fwn73QuqrsQ2VZFiu01wPRVor5fa68M/Yr9Ph8OuLjP2uzvGcYSluW+r+4Ro3ONt1EGdrrZgbqjqgu3S1N/zht//nU8w7Yonj9Vv+frZPVWRDR5eCgqSc3X7nNnkANtRn/En3/vxIIQRhDY/+Pz/wJQ6ODg54rvf/T53q3sWJ2oN94Ad+3QCil2v1/U10+nRwPHq25bpNOZ+dYujpbSTdKmw2rWLrfd216e8vFixXD8HYDabc3V7zf3qlqY7eD5ZLOYP2O9TPO3tF0cjlrsU7X3OLi/p2obJbEwUqs/bF3uyvMSuXSpt4ZAlKbPZjFBztdzIo6gKGtky0/xR2zQJw5COboAsl1VF3/PaMsJ0tNmiM4j6CFORfZu2HqTfaSIManp9yXu+gnrQKSNNANl1CNtEYAyBjCEklmsTjV4HlJaljLg3G/XeZXND91wlWGMt2V7ntpLF1edyKzuEadA27SBaIKWk65UksJQqmYuDBWVZstNy5l1r8M0Pvsko/CbvvfshAKPTBf/0Bz9EaJuHx4/n/PCHn3A083nrbQVxvtlmCiZkmqRaRCDLEhZHfxEhNY9I9khR4YUupT4j/uAPfp+/9pv/Op47ItPy8FmdEYYhX3ypEg7XtXjy1jcRpiDXHKFROKPtJMvrJWWlAsrVbsxDYWHqs6zvLUzXoqlKrr7+BICTeMF8ccY/+D//X0zNh6v6hOPpE/7R//I/AHB1+zl/67f/SzwroBcHf5yGkR/xD//X/5Ef/Oj/AeA//O2/y3h0TKfX6za9Q9iS7/+TP+HFi/8LgPP3fhk3ekKb9lQaYtzZDsUyZXej3ic6OaLsjWtZbAAAIABJREFU38cxPB4cq6LT7Ejy+ee/R57dU2vPnra3kVIMxY7AjMHsaGQ3FAO6TtmgWJY3BIq7bIVrj8i0LUnX5gShhezQXnPqMakGbhFAPLJIMxiNXgvM2K6FaTpsN2oensV31HWOba14/lwlEm89PcP3BG6sEpIXF/fsspLpZESkrV9eXfyQR4/OePutD/n6hZrn48VDnGDHDz9XSZxnTbAdcwjaDmsqS5RBNcDd7ZqH5++RZDc4WrhhPHoX21K+gb7mYZdFSxCEPHigYc+vnrPZVkT+DK1LQxAEbDZLongyGLDe3X3JfH48QGTjkc/d3R3n5+dsd2q9IgTzxTFt42JoWNfDBx/Q1CG2q9bYfntDUQnefu+YrtZFtrs7ppNjyrog0QXz+fEx8fho4GXaToR4P+Pi4pKHDxWH5vLimmjiMx6P2Wp7FMezcF2Xj95Xient3TV+AFmxpmvV+hnFDne3axw75MGpKnjc39+zW+84OlJzN4kC2t7FtU203zth6BDYYwVZDnRg73kk+Z6mPZgKjxA2ZGmObavf55gGtmNi4jCbLfQMnrPZ3tFJtWe3mz1H83PSpKHQ9hdHU4Oibsj3JYaGPXpeqIx9hbbWkJKmNqiqgpE2fB5FI5qmwaDDdtSZV2Sl8uPUkvgmAZ0Q5HmuIJLAelkQ+GOSbIPQmgC2GVG3BaUWCJK9RZ6VSFkMlg2z8ZSmeb33eqH8zPblHlMX43zXR5otwhXDHVZVFW0n8TXFQQhVBIvjeBC5cOwew7WI4xGHx7Y6LNvA1udwWWVE0Qghu8FU2/Viyspju9lxeqK4tkWRUlYpQme4202KkB1VUSAOMu+2IEm3hMEp4xP1f8v7PZ0sqDU/zrBq6qpksTgj1YW26SxEGIcC92vY+p/3/FQkV7YlmE20+3O9wTIq1suE9bWqwk3Hp9iRTWmVLI7UoeF7K/oup9HkYGyb+VFM1yX0OnDyfJ/F4gjH8ahKNSCbdUbXgKEJilm64fbugqpuh0pLnZusVveMo+mgqlKVe8YjH2sgk1u0neJQRdFhYbTEnsN6vR6UgJqmYb6YDEnScrnE913GsUdW6cukFXieQxgtOExclrYIKZA62TGETdf2JGU+BEDdOEbKgvVOqewAzE9GXF1ueXmjMN+L0xm9bLlf3SCMg4u6hYmJq/HAceDgeJIsFWiuM1XXKDO33qDVPlfPX27x7XS4zNq2x3RMhJC8+OcqOLdMF9dTHLEDgdeyPequR3Q64EsrulaRfTvtt3B/n1BmXymM+k6rPTYJVZkMfk59Z+N7IVleIwy1EcaxS5H33H+1Qhg3einYuK4zKBPVTcmm3rLfdhyQsLYhqLuWm1VCpqtEjh2xiI8Hkmi8CFhdPmdfloxslTi10iKKOzo+pdfqhEn+BbJzBtJ0EPn8wR/dYXkGZa2SQM+6VEFh3tBpToKUEmlIKm347HkWq6RBdGtm09eBcCErsGC/U/uhvn+J7Qjyrb6YRMzxWcB6eYXQl57jSZbrVyT7mmx+6ARK0jRjOtbBayJwgj3TySmrtRo7yzIwDZcwjPED9X9tV3F5vR4O914WLB6EzI7mQ4d2ujPoO4O2sdlttWmh41DuHL7+SnV2XA+aGrJdq3D7QNMUyP6EfK+Tg/RCdT5ch/1e7W3RLzg5jakqm+9/TyerwZS23zGeHFSrBKvNmnm84P5edZtSPV4XL58TR7o7+Kphn21xNEfgxz+WyN5kMo+4ulemsEIIPGuCRTiMi+d5OG45qPlZlsFuv8Z2BIUWV7BMh6ZpSPOW02PdqTIM0jTlxUvVRS2yhL3mTBw4j8iOr599juv6gzCFbdv0HdxqHqdl2RR5heNag6dMHMdgqEBUavVT23bpY4/t7rUfyPRoQRRF2PpiCoIA0zBIkh2V7oa4oacLUWo86yqlrEr6vgMOPndSFZSEhXswIDc7wEH26iLuuoZe1niejau9d/oe6j6lrCVSHjxsgD4bPNeqqlIFKdHi6eqyK0wMz6VJKnaVKig1Ndhujzh0t3qLvjGgk0MnRxoSExPTCbAdFVxdv7hGdjUff6w4dH/r3/0P+Af/6H/n7/23/w3/9d/9ewAsfIfF8dlgJlmnPU1d8+//7d/GCTVC4nbHeCQRdCRrtfYvr294651vDbyzuq5pRYfrhlxfqHn3vQlvvfMz1G1BojkBsml5MJrw7JkqMP3x937EL//K36YodyqCQXGGRW8im4r2YDA9e4iwLHIdcI68EY7n8uLqknfO1fydPXpMLRvKsmWsu4xWOCPNGpKdKtx8+M5jwmjCanXJdKyKRz2C7Tphff+SJw9VEWY+X1A17VCcc2yfvMm4u3lGYB2I4i6OBdKx8fQ5bzou2+1XzEPNxylt3MDGkQlf3z1X89nN+eDpX+UP/uj/5v0PVbfn9vr7NFWPYarzJrWW9PWMTkKn/Sn70uLB2QNu7i9xIm0U7W3wXQct0EYUjmnanPF4xlIrAwoMzL6lKtthvnpS/NDn+lbt9fHoCNvxaZodo6n23QtipBiRF3vutuqcMC86HFdQlGqPjuMp692eNC8AlZD4vs/m8+cYwh06CKv1FY8eTXh1qYRTHp157PKcLH2tIrlZGphWr8RbgNvrlNvrjMnModZdlGR/Tdc1jGOfqlKfF3g+P/riGd/9ruJuWZ7BZrNhFG7pW7VHl9trjuYxk/EJZaXGxbQdvvjylkCv89Vyg+8HJMk9mtJCFJlsdjdcXd1i6YKSYd6y3H4xxEDXVxdMxjPySmJaB6U8GzeMuLy6x7bVXHVywja5Js3v9TiFlPsWw4747MvLYewc3yItK6ZaIdFwTMo8B82Tmk3nRLGLl3pMpuo7jUYRSbomT5QRMsDHH7/PdrdiopWHHctAmCZFURBY6m+GYfCjLz7j4cNz0lTf0UbL4vSITguGjeIFy9UK78mIRN9PTW1Q5DvCSUihz6m6aJmMAxo954F3xC7ZUpYNlt4zwmi4eP6Kx0/Oh7VRFT3ZbkujA/0wjmjLkkcPpqz1Gl6nWx4+PGW92tI2em+bAe+++5j79Ss9D0useISQ9aDEvVgcAxWLozF5rsXGiorJNB7mar9LGU9C2rZlMVUFkPv7WzzfRMrX/Nyu67CdeIhrXdehx6NtW5rmYL7dUxYpRwvtMZmr2LFtOoJQG/9GPn2nOpmRRuuUVUrbNriBvlN6wXQWMx2HAw/s9HiB4xrsdtuh6BMEJ9R1PXyn3W5HGMb0nYEhVAwZhTYvXjwjHk9B6xQ8fvKAstojsIfP32w2uJ7J3f1Sz4OkaVpsO+D8gdZ00FzLP+v5qUiugtDhW99RG6NrBX0VUKQmppa/9QOb/W6NNCw6DXeZnsYI0wUNBSuSDikLLBFQ6wpGGLqcnY6YTaYsNcldtgl3969wfRUclHXG0SJgvlhQNtrcTcw42tr0tRjakJbtsFgs2GxU1yZNKq6vN/heTKxbnJNpjJAd45k5mB1bbct4HAwX9mx2qhQRO7Ddw+IJSPKEvhEYGtI0msxou5KuOcD9WpA1tuMw0gGQpMJxPAwTet1W3mQNvTSZaUNNxzPwfIs0aQaJemlJHFsOLfp8kyCMHtlH9JqE2ooav9+BIYniI/1eNnW7ozW1eEbdUO5zxoGFaasORtUsqVObrpWDAuRye4swXDpdsTFygevaOI5PlWvlvO0G14ZdtqXV1W3LFriWS6mrHG1bUbc1lmUMG9jxPXAa/LFJqdUIszqh7FzotSqP0dHVFdPJZFAiy7IMx5bc3ywRhjawbRPu7hMMvcawBZFlUmOx0mqIZiDg3obWHEiZrexp2nIg53etgWlJ9psEUwfQy+0tti0YhQsMQx02ZSXojZx4pKWgiwpLguW21JUm0BotmK+oEgMh1feUXUhfSdAKlFK0rNd3HB27yF5X3EKLIOgZx5Oh81AJwXQWDfKw3qiirg22+z2uO9djLNnudkgj5+Bya1o9wpJDULhal5hGy5df3A4G2km6J4wMmsod/s9xJVJ6eK7aa02TY/QWeV5iuqqjMJ2ecvHyYlDRDGML+hijDfGsg76vQ7qzcH0TV6+9uijxI2iag6R7TlWUpM4GOch966THcWg6NVZJnuA4FpGvJXoDh6YG2QosDibCFn2dk5cbWq1AaVg9d8sdi4XaV7Jt2W8SvMAdDEMBLMvBdX322i5hvd4iux5b74/7ZY7tGMpaQsMu+h5OT08pi3pQB0uSDb7vq/EAfN/C88FxDaS+6A1D7QfT8Mm0pHpZbmFV0OpzAwySakXTdJjaDBgp8TyP/XbHaKTOzyiKkL14fUYIQV0KwshGHExEm1q91jUJA3UGIRqEJYbPNzuDurEpGwZzzk42ONJnHAevO/h9T9s3rzvzRyq5o7XodPe+rW16s8cMjUEK3fYMpGhfqww6FrFnIEQwFKJc16UqM1brK/ZL9X//yi/9Cr/+m78xQHdvkhX//f/03+H5Nt/+SFXBv7r8Cs+x8G31vZd3OxzX5cnbHw3nTWAJqiIiHptc3SjD1+12yelbI1ptPSFrSS0kpt3y/NmPAXj04H1mJ1P2u+VQAZ5MpuRZRlGotfKrf/nfYHH8iIvLFxxPVQV8n6yYHp3w4tmn3K3UuPzyWx8gpWQcqrnru5quN7i9vuSpNhGdnz1mXWYKYq9VNz1nxsuL5/zCL6ok5p3zj7m6u0MARaHVZUcGy809pydHPHqqgqu2bWn6GmFrIR5jRNXCbnPBdz5UHb3RyQPu7jMM00EvF4p9QRyY/MI3VWfwd/4koSXGkJJIb5m2Nnn+6nvsN58ys/9jAD7+9b/A51/+Y3b6zo7yBfbIoO5b9jv1wifvf4hpw3bzDM9TAVBgn7DfrQn0GrPaFqO36IqCSKuhWY5HustZnIxotE1Gnrs0LZwc645wuqOqO6SsEbqtka8bTk4fU1Y5pqXGIUlT2nVHLdQ+fvbyBePxmMDxh/fu2BFEijog9Rra7Vfc3J0OSp3XV9/FdcJBvAzg4uIFlmUhtAy6MEvS9IYsWwwFGNeDthFUdTZ0UUdjH9O00VPOflPQ1AbLej0o81V1Q5LWvLr6Ib4m5TuuSVlW3Nxoc+DJEftdCaIdBApWq0IhI0RHqy0U1tsl01nM7forAELnnDR3yLISqQPhpjb44ecv6PueLFfBv+s6xBN3iKd6TEzRI4QxfF5TK6Ec0xI8vyqG13mGxdXNnf5OK+I4xHE8Viut1hvW2E7PdpcQaXXC7a4gjiZU2rRYOjYPHhxzXVwTTg+iEBUffftjPM9C3GvIWOySZAW2FlfrWpNHjxbs9zt6LezlTm26LlDq1FoyXvaW+v6RKrI9e/aMJ2895v5+Seiru1ZYCb5j8/bbb3Nzc6Pn/Qbfg4mmE3Rdx2hk8/TJHFPLrD96+oS2Szg+PR7ETfxgQtcVPNJJbhDl+F7I0ezpIJbheC5CVHSNxeWVmptvfPAA240oCm3hkFxzfnJKltY8eqQQIPOljZRyUIh0HIcgCJBSkqavC6lpXpKmJS6HMTAZT04HERP1GoHnN+z2as9GUcj5wzNevrhAaOSR7/RYtkng6yL3JGA2iXFdG1sLEmVZSZFbhP6EfaoKEE1p0vXGID7iehaeZzM/OkP2ah/d3/+Id959TN1kQ4wVxTameMJqpeL/4/mcs+MzqqoaUDee5ykknHCII1uP558vWfFG0OLN8+Z587x53jxvnjfPm+fN8+Z587x5/iU8PxWdq74VHIWqAua6JleX9wSz0SDz3HY5bhCSFQX+WPs3JCm2OUJoj6le1AjTQHYdgatFJ1qH1XajJD21MMViMadHGcUBeJGPECZVk2JbqjoxmXq44Yy+lYy02IBAYV4XJ6rqUOQV8fiCXsDpuaoyNHXLW0/e5euvvxhwyo7jYBgWOw17chyHNE3xbZtSkybrShLGDr2swNRCFL0g8I+HNmie77GsgDiOByGMuuuRsqGnG0igbW/QdBVS+x2t1xmm6WCY3SBZ2bctZetiuqoCXhY9TdkguRukJztq8qLF9QyW96rL4Pshhtkg5UFG08M3bfKiw3MK/TeXvlPkRNNRYzyyZ1RFj6UzfdNwKcoEaakKOkCblUghKKsaW7dwpdnTyGZYpb4/VhCjpsLRkI71KlFjjD90qoJgRFX0FIWWM/VbLN9ivU+GcQrHPk1j4dseByim3VmYhjMY+iEabLtj4gV0jXrvfZbiRw5CyEE0wDZMAmENxP+6rsnTliAco7v7TKOYri/ZJtuhWh+4Aft1RqrnxXYMqqpiFMXU2pha0tJ0AtO08DVkQ/Q9ddcQH6mxa8oON4hIi4auVJWk6WzOOB6R2hV9q777LHboe9hp3mLXgW0YtJ1FqonwpmniBy551hy8P0nTnMlkMviiJEmGYSSMRiPWmmdmYNI1Y25vr+k7XSW2DCYzjyB47TXiew6y7xmFCift2BZvv2MrKVeUIWsn92zyLb2GF61fXKqWvShpO7UfoiCibDx8V3sw9Sm2Z5PvHQxDi5bobpjneUNnZTKZ0EuJab02qjQ95c3Uabl02bvUnUkwmeDpblZRZITxBI3So65znKhH0iB1V6XpCyzHoWp6et2ltZ2W2WxG2xzGxKLrK3a71/j/IPB4dfGC89Mz9nvd4Z4G2H48QID9KCQcGSCNwfKhaSrGkUVRMlhSjMdjLMvCsbQfiJR0zZq2yUG85jeVZcDx6QzDLPR331MUzQAPcZxzelHTSRvb0N40nYlpmqR5TZJpA3LAMPoB5mnbNq7lUaMk2QFcx4G+pzManANm3vYw5Ot5oWuxBXReh+8qCJAyYO6wHGfYM76rBYS0mExSp9St4Ch2iTT3pcxKJqHNh9/6Dj/7bQUD9IMJX3z5FfJgWtz1XDy/5m/+W38DdLW5ayo6x0fTCNhsn2GIjvl8Qt8ePAIbehL62ufuVp2LXuBzfvQQqSE6OAWysLD7hqs7LTozDYmCKfvr64FU37YtWZJS6H119t67aGQKhZYOj4KYYrfmfrnj8fu/oNaLYZMs77E9beRsu7R5x9Es5uH7vw3A9SZh5M6xgVob/TZ1SWS7TD5QFhXJ3sM2XHqzJtPQVqMP2N1t6e0ES3ezbdullR2m5p3irejuan7pF7/Fe0/VPr5cWwSzOc12jaHP5s1mybtvLzg9VQdJZLusm706pjU6IenviKTNf/6f/nW+vNBiJ+bP8e7bv0ny9tcAPAosnn/1JdKaIzWfqyhyPnu15/13v4mhpa2LKuHoZM5Md+/u77Z0tYXr2syOtXXH/RWn85BkB85BcIUARMVoov35QkEQSTbZivv7g/iPi9EV2NLG0DAyz/MYnVp4ujuR7huk0XFzc0M0UmO33d+yThzi8Bih5bWPT89J1im+dzgTeu7ulozHCgUAcHF1SRROyVMFMzs7nzOfn+A4DkvtcyUME8f16Opu6C6XuYEhTISl44Zig+vZNLKns3SXIRLUsmBxOsHVpM79LmV2ckSSqruh6BKsyMEyTApNvXBsD9tz6fucKNToEiMmzVIqre5QNjfkmaRtW3rNNzoazbjfXGFZ3gDHLrqCeukMXXjPc+hlS9MUTLVNxrbZssvW+n8OxrALGrkhK9VaaXuTpjfYrVeDL9PtKkNKG893WO90O7A36duSo7n6n7vbLceXO6TI+eJLBd1t2hz6kKPj16b2Vz/8lNBfDIIItmlSfL3Hdc0BCrndpUgpaFuL0VidE5v1hiCCbKXe5+k7qoP0jW/8Ahtt/FtVFvFE0Fst732oBEi+9XMfc3P1akCbBNFYCYLsr3jrA7X/fGfEOrnm8fnPsLrXflVGqdAI2vf17bd/hbYR2NYRlqf2/9XVBU+efEjf+jx+qj3z9hVFvSPS0N1H5x9hGAahX5LnGhLnzzHtgvFErfMkv8EwYDF5hNR+qr61YLV5RpotMB0td18usc0I1znALntWy1tc74i6UXG158yYz44YByY73X2tygbPrdlq/jh9wCg4pshWg0T+0eIUxwbbHONpu5Km3SNFz0YL2lUyY3WzYZOVLBbqrs36hpura8LAx9ColGJVYBlram17FIQ1QZBRVS3H2tbpaD7i9HiG41oUOlaR3ev79F/0/FQkV/QwFmqhlmmFL0P264pEw9bCkUmZddSFR1scSOBjhBsgdCB1dBTRdS0CC1cnSY5rMp4sKNIOQx9sfmgQjqdomCbLu5UK2NtqCAaU6ZhNFHikiZpwz7e5vl0NsBbTNPGjkKbrB7PTLK/5008+wbLB0DyhRnZ4P6Ek5wU+WZHTGiaVhpEkeYGwTDzfoW4OyVRGL6XC36O4E64X0XQ9lSZX+r5LXvQkSQ46uKmqBs8PafX/CGFSN3ts237tgN07SJljFBoTLXv6Xql8dXqzVHVJ4Ia0XQvaXK1tLYokG7Cxgh5Ei206lNWBG2JhmZK8Suh0O1gIoQ4LDTlqpVI5Q8gB2+x5HoYpMQ2Hsjg4ioe0XQ0Hv4Uq14ZzYnCkNzDI85KyuMfS5pFBaGMIf1CtMQyLIqnUwd5ruF1tgKj0Iaq5L9R4bk2pzZ0n0QLbVsIXqQ6AwsjBMCuStCfw1QGxWe+IJz5pqjHYpVSk4dV6wPomssMPbPqmpdPM4l3Z4XkByU69rutbHMejPmQ1oCAghjIDlHp9mpbANAV5dphjm+0mwzQFvX7vzz5/Sd8GVE09rNmDq/tBYdPxY7brHbQZjqsTWqCqazzPo+3lsGarugGdRBwdeQShhTBanr6joAOjKOb6+hbXn1GX5rAWdvt71ht1mcymJxRpRplX7HfqUohHPo5jDx5MhinpG4u2dgfDwJOTEXXd0LZa5Q0Ig5h0v6fVRHU/8Og7iTOJhrkaYJp9+xOO9A2mbQ1O9qYpyPMU07SxdTImUUnAbrch1h5dnhfQtd1gbO44FnXT47oCqaGJTV0rsQPLxvLUGdQmGa732qPEssExfMVv0gpYfd8TxzF122DoufI8F8MG0zrwBjO6rqP9CWKxaZrUvUXTSsLwgOOXTCajIQFLkpSyKBmPF+xLbUjuhAS+y3pzO5Cdk3TDZDKh0aqqVfYSx7FJfwLG5ro2VZFhOjamoZPTWu3vgziHVfuMRwscx6bQ+6hvW4qqJCtSdokOrvIawzCYz7XC136PYThEnkmpv3vfNZhSagEeLd7g1Aq+qINHwwrpm46L/QuuV4r3+Rd+/lcwXYfl/XpQI/vTH3yCH4UcjdVl+Ye/9/vM5lP+xr/2N1mtdZLkeRRFgR+off3iheKtzufK6BogzxNG0wllmXN5qQoL52ePieOQVItXNH2HlC1VZXF1pfg5v/5rf4XNZqO4CjqBKsuS9WY1/M8HH32ooKBhOMAeR1HMF59/Sp7nzLUIk2kJktWOAHVn+raDLQz++I//hE//9PsA/KW/9tcpmyW9EdNqfvEkcvn+Vz9GPFNn7kcf/RIWHY4bs9seTFMFN/cXvHy+4hd/XgVTUkpsw8RwDt44MenuM7a7Neu92v/j6Zhkt8IwwbbUmhJ9R9uV9Bq6nxRbvKOIvmo4iEQ2u5qjMCDbC5ZLtY826fc5XzxgPP9LAFyvviArP8GQFZFOTG8vrhj3c2Z+RKF5u/OFh2FNMFDBuTlaEkYVZdFi9mr9zNyQ09OnbJzdUBBw/QlhNObZSwXzLNsMYS54fPodQmunv9PXmKJn5I0GvnbghZjWHemN9tVxp+T7Bq+2MVCf9/H7T7i+uifwJVKo4Pjtt87Zz2+INKysrW3u4wt2h2ASePfpI2zbxtRjuVgcYRDR9BmPtR/Y1UVC03WU9ZYP3lMiEPtdQVl11BoGfeQfY5g2fSe0YiB4rsU+WRHE1gDdk3TUlaTSvlCu5+A6PmXZDMGkaRoIs8XzrUFkyvcgjAI6DT3r+5bJZEqapsO9XdWS6XTBdl3Q8hqGKDAH2KVhdjS54Hhxxs215mFFLq5hqgKz0Gp9ItXeSfo8r3M2mw1RFJEXal33nYHnmZT1GqmLMKF3BMJis1VwwiCyyYuN4gUfikCeMo+9X9+w1UnKfH5EUW1ZrTW01lOFnl62zLufMHz3fV5879nAfd9uch48PGO/3en3fonvzqiqS8pWJS2W5TGbnPGHf/gps5k6ByPfo23z4Y7u73eYjiSMXHTdFG/eErkn5HuTiVa9FiKmqmoiV+3Z/WYPRsZm+yVRpPbfKI549vVnxOEZni4QNMWSo8kj2l793rzY07cuYeRwdaXikvnCpsh6DBo9lmoMbq82nByrz39x/Qmea3N+/IDlRkEcF9MJd7drTM2znU0X2PNHeBOXXMMzy7KmNWpOTp7iuGpubLehqU1OjvVBKRqqsiEMJ5SliuNXyy1x7CJEPnDdRxN1R3e6MG1ZBp5vU5RbLq7U+T2bPMIYmTQFSC06NxtPaBuB7LSpeNZDdMRoZqmGB9AT8/jJ+yzXl7i6QH98phK2P+v5qUiuhGHy6WfqgvF8Uxs0CppGbeCTyTl7M6MKU/Q9j2XagIHrHiagw/Y88qyn11l/05Q8f/kZo9hjOtNYzUJS1wwVm3jqcnV5rWUx1aAVZYYwJNvdHedn6vIwTIX3PxiGZllKHMcIw+bVpbqE67oj8COyMhsObt/3afsdhV5MjezBMsnKll5/nh071HXNPi+Gzgq2Sd5kaCVRLNvibnOvnME1X2R9vcJ2PIqyBp049X1LXbt0ukLruSNsy6Rt+6FbV1cSIXoMXaIVRodhqiTkYOCJcJVoR9sOogFpkhOPg0HZpmk62rbFMHo8zcEwLIemrxCmpCkPxqYqGSh0MoA0cV0bSY+jJQX7piVPMxzbR2j5+V2SUlfdUEUSRkvb9rSlpNNV4t4sCMMQxw7otClzlrb0bT6slTRvMCRstxW+xp1bVUfdZhzN5qSJVgJyDRzTwtElzX2ypq4kruMPBMw022OaAsMQA0/B9Wz6vqDWal6eO6brGlXt0hMYRWN2yRYpBb7m+xV5hYAhyDY8IX3lAAAgAElEQVRNG8/zlA2Bfq84HiNMhb0+kFwFLkVZDIIkbVlTlCWOHQz7oWmh7yosyxi4IL3ogZ5OJ/C9CXZkI8seWx/mWZZhWQZJlis+G9B1PXme4uvkwzAdqhqQkuWdCmxsw1OJdWQhdILnuD1x+HAI9KPYY9WBZ0dEgXqvspbItiHQAW2elzRNTeAL4lh1MPoOHMMkHoUE7gH7DpN4Clr1SHYdbdVhmwzCGweFwL7vB9y3aZpkWUbo6wKBEMSx6hAdTL1tU2HMjxbzIREWQpCXu9cV4qbFEhZ9J4c9Iw2LsiqwTCgrdVB7vs022RB5B0XBDX2n8OcHLHyWJxiWRVFVrxNhYZAmBZ3es66nSMRN3Q4V7ru7GybTMaOZP/Dc+r7l4maDzkvVHjcl2/Ru4E4awuXy+grXNQfRF9+3keQ4eh10QhL4iodhajfHvlUJbtt0eFoBKk3usSyLXJP6w2jCdl+y3e6HIkwQ+Limz2a75+xMST+PYpdXry4pK1//Poumq0mSHtvRFX3TRghBUaQEmpfY9xVZ0jLRVeqems1qx8n5Gf/ef6TMf48Wc/7SL/0i/8Xf+a+Ggk4QRzRdi9SL4uLqml//jd/AC6PBGqFrJcKQ1NqocnV/y8OHj7Etj/ulup8c26QsS9L9mp1WTX369C2FHtDmkpQmYWRy/ep6OLvOz8/JsoyqqoY1FUURX3zxxRBIHR8fkyUpXdcNalpFUbBZ7xTB/EjNu+u6YBg0h4JEVZFVNfvNnvkTFWTPJ3M221ts1xkCTNHDJ5/8M87PngKwfVxjOhWmOR2S7Koq2GxvmY4fcTRTiIyy7bEsi0KfSZPREevlJavVinfe0xLcjsEi9mlin3SrxrMpcrabHZ98rr+n49O2FZYwsX31t3Zr0DQv+ezHazpTGQTbvk1SlKQX2nri6QdsX5U8fuwitIrqxP2QVtj0ZoanJc7T7ZK+7emFihs8v8G3J3z45D0aVMBeVx15VePNTAx99yRlSp7t6TQf1zB7kuKG+/X1QHI3DAPHnJDkCfOFLn51DWbnMx0fuKHw4QfvcXn9Y0bxsZ6/lF/7V38V1+/ZquXCybnH7W3J6eIDvaZ73vvGjO12xf+GSvD+6m98TJm/tj+4uHyBaddMgzHadYUg9nBcgzBc8MWXivPkuGOmizPuNyqYLNOEtq0p0oxOS8hPJgHSari7B904om4TantNU2suepnhOpIgGOHqToQwHTzHoai2nJ6qfdw0HfvdfgjgkzyBDjzbw3IOQjgmQRgisPC0kptt2xiGQdWo77nbbTg6OmM0dhE6CV0sjri6uqJtg4GfOp6c0Xu7AQlgWhLPU936gYdt+1iWSZoITC1aUNV72kbgOVo0oe9p2xTXf404aRqDrNrQi5bxVI2DaVvMjiZstQLueOJQFDVFWVM0urjp9JhOx8mDObnm2k6nU3xPkOjzx7ZCqjphu0s5PlP3WpIuuV9tieIpW11gXa+2WGZDrwWKwniENDqMe4uHD9Q63673uK5Nma+Z6vNb4JBlN/iWmpf57IjlOuf8dESV62JqP2PsBbiWORTfTUxsu2Oj0Un0Hb4fsLrfDAJvTVkxigKSRCX/oRdSVRWLuUFRqb326PED1cXcd5wfK06n5bQIGQzc9K6vyauGbPt6/exfrdndXWMxItTIHNPrsOwJda6+44OHC4pMUhQV51r9cb/PsG2b3W6HF2qOFzVN2w57Zj5/yHq9Jgwj8iLRv68hcH2CiYOrxY6aKue9dx4NiKw8q+gaQRh37HVsaAoTS9xxNrfIMy3YU+nc4894fjqSKyFpNdRtNJmAaMnzLZPZQS49QbYZWZIPPlNB7FA2xSCj7Qc+s9mMIm949qWCE4RBRNl2jOdga7hLltfgpGwydYFauBhuhhA2jfZAStMU3/cp244Xr9RhbrkC27bJCt2GtSzyusY0O2otdWt7Lrs0wTB7fC1ysU8S+k5SH6KdZE/f9/TSHIJzw1CQO8ezB/hLVfWUVY3rqPeRdISRQ1EUFHofeF5AkeXMj6YDsbCuS1zXRgyk0JamrHEcD6ErboahkjWkDsSbVh9OcqiUe35IXpWMxzGZVpYxbeilHDw1TMPR7uUGte5cmVJQ1HsMw8A0D54SIW2dYw9yscrnyzCMwXm9aZR6k8AaWuK+Z+G7BpUmWwahz36/JZ5FQ4W9N0xcx8MwLJL9QfxDdU0OiWovJY5rYzXh8DdhSBwnYpekBDrQz9KaspL4gSbGOzaWEAjLYLVXN6NtOfStg7ArOj3vhiHY7pIB4tjTU9UFjg29Tnpv7+9wHAfPjYZ5932frm+GTqSHS9tAmud0utKKIRFCyc0fPLqSJCGMHA4qbpZrMfaPyLNmUDUSpoPoD8mpVhU0LXoknn1QWuswO0kj5VAp9zxPj1EzqG51XY9r2XS9ujjqxqCuOkI/ItHQy+XtM4QAyzY4m6vLMd012G43+HOs7kuqskGSIbJQf56L5bqD4ItsDXzXw3EEufYfk52lO3wppj6xDEON00H2tWuhzDp6txrm4fCM4imtr+WomxbTfu1Xp4LfHsdzf2I/mvoy7wdFsTRNcRyHVsO1uq6j7UxMs8PxdPVVmnixhyHsAZ7h+wFN01Bphb8g9pG9gRBiSIhmsxlSShVQGweVuIquB9c7+IGoAHsyng7+GqZtUdUNwrBJM/W9HEcpc/XNQfTCwDAEQggMXSXO0h3CaDiaz7S/Dci+pW1NmlrtNT80MJ0jjNbB0HLNiJqsypjNYkqtdCrwCQIfy1ef3/c5vdkwmbqDUINBSJY+Jw4idisFwbFsySQ2qBJFcDdsB0RJPIsxxUHcqKPvG4JIIAdPmYiTd0bsVuq8S/Il/85v/RaPH39Ab6vf/Pf//v9MWdb85V/71QF2KHuBa9tU2nMl2W3xgxA/GFG3rzvsURiwutNWE6t7vv3Nj5Vfixb6Eb2BKTquL28GeeazBw/oOjlU84UMWMyPeP78+aB8OJlMWG62uvOuYYhty36zHqrdcRxTFAVVVQ3BFSjVK9M0OTvTsLWyxHEcLB1ExKHPZ8+fIWXHA61kJaSAPqTpWqJIraHLr294/71zPvrZbwNQGy6+b9A1/QEZyfXNPW+99Ziz06ek+q7rpYEbuTj6HFnf3nN2dsLP/fxvsdtqUr9r0LUZr17eMI3U/ncMweMnb/HVMzWeVdviCpe2rQfocF81ZP2GuqrwdECb5A2mtx98b/abLc9ffMlybfOND7WXWP+Ks8UxVh3y6qXqIJ4enbNNntP0hw5/TBCMeP5j5TsI4IQCmVp4VsKrS5Uwh6MQ0RU8PVeJaW8+4tNPP2URh8RjtR/vrleMI4GL4GyqgmNER+CeIhwtyZ/UzMYTTubfGRAZjh/gWg5CmAQLdbem+y2eawziDnUDebnCeq0OT9dLPv38s0HuuSgKpBTYLuS6hZEXazzXxbTNQcxluVyz2WbcaquSVijIoBf0+Fp9bb3ZMImPcb1g2P+WExCPXLZaWS5JC9K8QNIN5+ntzS3T0Zjp5Agh9euI8Fw5qPLRujRdj5QF+40+k3yL3dWW2dEI2z4UwwSGYXPY2B+89xGBHyNFDWhxHEvy8NEJfS+pKn2+tC73yzWmvgjOzx5ye7vCdV1GI9Wx3G1Vgc42I6Shz0qnIwymSqQDFS8Ks+Dm9oKJVsqUtNR9STQaUx9sB65XiE5iG2oMxuMJ+/0lpm0xmaq1v9svCYKAvpVDN/L89Iy8SIk1rWQ8CVner/FCi8nM1mvYYjGbIo2W3VbTFfw56+UtSX04twyQkl6a/OgLlXg/PH+HydinkzteqFCXo+kD0rwmXqg9Y5kugfMI3zEQzcFbrMI2bXy/UzBxAAyy5IYTDVEvigLTNAn9iFgnVxfPdkxHDxhHap3nec50NOHFq+dYlrofXadmkz4jDo64vFdQViGU6u9md68/qmabVCy8R7x8qeJqW8SMI5e6KBkFR3pt1NzdLYkj9flff/2lKlJLF0d3z6sqZzQ6pm1i6kaPVdfjWAL03RcHIxzDZzQasdmqxNCzQx48eEwvM24v1OuevveI3XaJ0F3VeBThNBJhl0j9+06PT7DsjrIsqW2dsJc/oUDzL3jeCFq8ed48b543z5vnzfPmefO8ed48b543z7+E56eic+W5Lr/yiz8DgBPY1F2CH7yN46kqwH6/xfU9tsl+INXXdUldm4PT83xxRtM0eCF8/AvfBGCXrJnNxwijHaQui6Lg8dMHdHeaIGnXyKxEokifALYfU7VKlrrQWbCocwT2wK8ajSbYdoAQgDhIs2bIDgzEIJcaxzFpkmEbh4y7oqoq4vHoNYSrUTAQ06ypm9eVcct0BmnkpimIohDDMKkrDbtoWoIgoq7bgyo38XiCZQh2u4Ncq0C0Nq5jUGmp8k7WWELQaXyBYUpMC5qmH6r3wjeQXU5Z9ZS6OxEEEXXZDF4YXdfiOSZ931Nq6XlpKgM4z/OU/wzQNQ1pmuMHB+6Pqr76vj90NWzbxjQFVdkMkM2+7ej6hnikhRuahidPnrDdbg8q4Th+BPRKtt0/CCe02LYzjFPf2VRNje16Aw+nqguSrCAeeewyVVn1/RGW6ZLmqkLTdAZ9Z2IYHY3GGLZthaCjrSDUkpz7ZI9peINsbhh6NG1F31uvZbOFBYZF2dQY2nemaissg4HkaxgNaZ7guC6mGQ6/2Q9CPQeWXlMBbVdSaENrxzWQUuL5IbZzcCuXCBkwGUXDWvR8h6rOybVEueu6tG3//+N4lEVJ27aKOKo5VwIIfB/rABlrWgwBdbsbsO9RFBIGE7bbPbrgTduVNKkYYBdZscYyPXx7jNTvLenI85+AQYQhApuuMeh0lygr1qTaxf3QkS2KAtd2htfZpo8dexiG8Zoj5B2I9TsKPQajcYQQrzl7tm0PPi4Hn6QgCLBdD9t97U1TljWuZ3GoRxnCohcmdV0fkGeEfoRp9/SdGIi3pi3Iy4rIPwhoFGRZgWEY2Frndb1eY1gmi8WC21tV9TNNk3gcsVrd67lSXe2qzXB1JdlsLcoyxwsDEr1mjVpgFia+tmvo6g7f9dhu10SBN7yXb5js9tvhDKrrmroyh85g10OSLWkbgXHwJGsTLMegbgoM1O87PiupagnVgYPlsLxaMl/M6PXYGXaL24/ZbF6LeLi2Q9u2hPFrsZNdUhH7Pm6gzyDTIssKgiBgrXlRril5+eMVH33r5wD4T/7tv8PlzTWff/0j3n1PcVH+9J9/l8ePHnByfsrzFy/1+hAYwuD6SkG4i3zLu+98m1E44U4LvNimgZCC1b26K2TfMJlMdDVXCwZVLV1bcn+35vhYVbzDyGO7SXC1SEpZFux2O+7v73nwQEHrpJRst1tG4zGWrdZQliWkacrTd9T3jqKI29tbXNsZhC12ux2b7Yr5fD54teVljeOGA/RUyo4s2fHg/JjjUyXqkec5huWS5XuiWH2v/XbLg/PjAV5cUGqbAEGhIapN0xJFirMnNU9ZiIAkzQj04sh3K26XG1qzI3IO/CrB7/zO7/Lt7/xFKv1enuNiWwGWq+9VL6MuGhxHIlu1Fh17z7d+9le4uFiz0tB5z2nJK3fwN8zSHdd3P8IpHf7Nj/4zAH7vd/8hP/qjP+Tx9Bs8eaR+82gKRR9wojtgYRhxfbtiMhWMpuo7Pf/6lmk8ZjKacPVCVbMfzU/40Y+/4Pj4qVqLXcavfeeb7NcVDx4qLkxylLJPNsSPH1Lk6vecPZjQ9TWZhqPJaMQ0sJFdw/xIzTvSoGthm37FxY36DuNJgO86Aw9zt7ri4uIl7QHvB/zuP/4njKJjPv9SdStOTh9zf5vjhjvqdqfX64T7LMMPXA74vqLMKasNroZPB6ajJOOtikzbM1iM2S0TpNwow1r+P/beJPa2JM/v+kTEmc+d7394U+bLoTKrMttV3abbbmO7kQdkCQsLCYmWWEBjBtuoNwgWIECwYcECLNksQJZYYMkLEINACMmyGoywwBPubldV15BZncMb/vMdz3xORLCIuOelJbe7JXsBKENKKfPm/d97bpw4Eb/hOziuVhqcEXv/yOvdgctHT7m9eWA5d92z9XxGWRYI0Y4cSNihjXjjgWgMxaFCSsXjR+5ZPx46qqFgaN542FVNRaTOSAP3DG3vOrbBA5M84PbWPetBsCOKA4RgFNqY5mfEcTpa3XRt5n+DoW383qwsWThFxVsaz/81WrCv94T+rLXqiO47Li/eIvPdpuvbG6aLNV0rEV7s4Gz9GCmgrd3+en+dsN/V5JMJxdHNgRITDjvNPM+YTdzaC8KMeZyM0O8gUmSTlKZraTwCZZqfIZVBKsHZuZvjPJlhhpaZh1kiQqqqIMsmYxy02WxQKgRRoXt3/16/2jBbZSNn/8tXnxIwpzgmI984TQuSJKSpDZEXLptkktTMqDy8b5LNyNIlm90t+wcvwPJ45cSTvEl7Ek+5ub0ly5IRSl/rO9q+5P7F1Qi5f/rkbReHeE57FIWs1xMCGWJ7dz/PHj2lKHdkc0XXntAXMd/5+COOHjFlyGm7PVYnZKmDRmfZlL5vGHSN9NSLxXxJEFoKj3ipDxVxnHL14vVIy1muM9q25dMffe64VThIeldLZODWVF0KkrgmGJZsvQ9bXC8Y9h1VuSEI3e978Fzy32r8vyK5GozGTNxiLjpFWe+4P/aEHpeUzTR3r+6ompYTFOpQ1lRVwatrt0He70uMbVHBQOZdq7thT5rPKYpixKdbm/LyxcB25wLMjoKyrNGDGM3IAAKpUCqk8wFCVeyZz9djYHy/3RAcdyjvzu3+yHoIDiNmvigK5rMVXmuBrhNMJhMG3RKfHnRjaNqGOAlJfWIhpaTvB05CFVmW0XcOSjd4I8V8kjF0A7tNxcUTj+M9lmCH0WMqiiz1MCAEY6DvoE6GxsNagkCC1UwnMY2HZ6Wpou0CwiBhNvPcLEClMWXpjUfjCFBo0xInHpsuBEYqMHZUOsQI8jzFI7GQWIwR9G1DeAo6hx5rDYGKUP6+N8PBqYN5tbKq0hwPHdYq/B7Nw+aGPM/JVDpCxJr6iIwFQeThISJjkTn/pV57zywryHLlglzlPbralpZmFMKo6xoVJPRtO4plKCU57Pek0YKDhwqqwJImc3oPjRRTy2w5oWuh8vCQOImwQtI0zQg1a5oeK+y4+UkCjLX0pqfwm7kx0OmB4347FhJCYyj2xciTGNqBsqxJko7MqzhZE1D3hs5ux0O8PmzJ0phs6n7vcVsxm66oiv2YAAnr1v7Q9WOinUTOuaIYFS8TsJI4fANjtVYz6JrVeoLw2P4wykmShMbDWIRckicJQRDSt6fvq5nmEwJ1EhWRzoi3KcYkIs/OyXOJHoQ7VID18plr0XuFv/u7O6bTDG0Nq8XS3093/5erM4xPUrASGUDoFUWbpkFKiTb9yNFyRoSCtm0JfPI/nU4xxpkIgjfQljFah2T5SU0vwJqWsizHpOxY9MznS793QT84sRDHNzjBib2PyMPDGOxUleOrnBLFKIoc76uqyLxYhggU2XJC0WyZzU/PUQuYkXOZpjn393dcnJ1THf1BH2jW6wvMYGk7z2VYZaRxQlm5wEbagNVq4FjsWC0c73Top7RtycNmx9QXWIq6p9MD0mP/MS1hVmGkQvln+1Ae6IyBvKcSvjg2ZPRtS+w5bEmasEwXHEqJjE48iR1CKq7ut+N++tb77/LuN57y3jec4t2PPn2JDiqyWTZyZr788lP+xD/9R6iqYfTWE0qwXDzmb/2N/xWA2/svuDj/4zRNhzh5LvUDcrBcvXYJ2WI5YzabUbfVaB4NYIzmuN+N0NLpNOdYtGNwHMeK8lhxe3vLT/+RP+rW53bjuJqK8e9urq4pi8MI99vv99R1TZIkIxTr9csv6bqO/PLyDYe0KEizjMgnH7rtqKqC1Wo1ruuirQhlQBwIag8ZFeHAYnqB8PudVClKWPTQkvizqDwWbPev+fjj3z3CnutSO78xv0dsN7d8+pufI6MPePYttxZfvXrFz/zu30OkAnovPjDolk9+8pJD51Vc4xhQDJSjoExd7zFW0aMw+IC5iYikovdqobvtDe9+45tMV5f86t/9mwD81Ed/mPnqMd/91f+ZTrnCbP9ii7B73lYu2Xp1f0PXH5n06zGh/cb775OHUzpz4Od//gMArJV8+PwbRPFJVCcmECsiDly/8LzaaEm97zCdZb12Cdf9XUeaZQTS8UemeU4W5nSmOeXGrC9CXr3cME8fw9p/Vj5wfVOSZe7MDoY7Hq+e4vKFH7nXdMTh7kiCF8YpKmR/JDEp0iemi/yMvr/CdB295//ooWI1yym9uJLpKy4WGWUXcyzcs3d5lrOvKqIwpD4JC2UxfbvBeJGGWXKGqQXvPnlvFIrQveZitcTakmji5kCbmqYZ8DRJHj09oygadMebJFAq3n5riTUJF95Qth8Khi6l9gXQstkTqYCXr15xvjrBXztMLzDGqRICdK3jPJ6SJGME1lrquiOO3hSKQhURpxHHnQ/YswxCPfI594ctcRyT5rNR6Gs2z5AqJBERBy9kNsSGvu4ZPPSst0598ljsqeoTXzVlMplgkDx4rttuXzNbKDYPbn9drmZ0umJ9fsnBn6MqskiTUFZHYq/8ebPbIKRTOgR4uN9z/viC47bkkeerrtclcTin6cxYjFvML5nNlzQneWKZ0hvNy5e/ycW552ElAS+vXrCcv4X0z8PDbsvl4wl9eSoeH9juSpQMMV6JGwVpEvH9Hzpe3+XlE6JsRldq5h5yPNgNaRTy6L0VUfCmcHrsG5bzlb/nNavFnCiKmOSe31TfkmVThr4fffQm05yyqUk9z/3lyy0ffvgNtpvDKDqXZwFt3RGpcOSGC6Bra1Ifiz7cFaRxxiRL6T3vWwnNzesrHj9+TF2dYvQDZ2dnXL12ucR2c8XyLCfPQo4+Dqtefp9QZjx+/Jjt3vuprf8hBS2EEG8BfxG4xImJ/QVr7Z8TQqyA/xp4B/gc+EVr7VY4wsafA/44UAH/krX27/yDvqPTPXc+APvis2vK45G+jch99VUFPaHIubm/pvQHRd/35NN4TBhe3T2QTxLSTGCtUx25u7tnOlsQqozGiylYHdEOG6zyfA7j+BwqCkcOhBCCOFJcXX1JFrtg6uzsgsPhQO8PUCEtIghQYcDEK4ptH1rOLi89PtonCHVH2dQjzh4c/6FteoLwJEMakmUx6u8JVg19374JoAeDCgRWmHHRKSkwAazO5iNHJ04kRguUV1FEOH7DMHSE/oES1gCSicdgS+k6H1KBpx+w3+8JZMzQ9QSR7w6EkihKmXhy+fG4x5gAhHqjRCgEoQw57vfMTkpreYy1AsFJBWxgvlhS1zXSl/3rpiefpPS9oSxPJELpjdzemIMKYZkuciebCsztkiCI6Ds9Gqe2DcTJwHKV+XlKSCchQdRSFu4apkFG17gEferFFNp2oCiPxJFbd0Eg6PoH8jSl9MpnfSvJkhwhhvFBT6IUPVRjV+X6+jVvvf2E8rBh8IeeMa7LGEpF5CvAUT5lt9uR+IT9WFYkaUqn+1EgRAhBWzesL9YoTkItlvnsbCSTChTrxSVVfaT3OGCjB/rOETNPIgl6sBz7ltQHwoHKaGqNCuQYvDqunCKKIjqfECyXS7quI2Puf4tFa0GWz+g9lyhJQoS0WGtQnsDRdTVKTdCDT85FR9u2nrPnu4rdgDXQernfST4njATzdYg2bgNO5Rprd3SDZpbO/HrpUCJkNvNWCSLE0pFlb7q9rS8CSCnHjsm+2NGWNZFPxFWkfNcvQfpnr6oLwiBxQYNyh95kGpHE2ThPfV/TmOOIqQfXndhsNyilCH1HSAUBd/f3rFYu4MviHKxTk+y9geZkMiHPcx4eHkZRnSiKaNuWy0vPMWlL+r7n4uLRyJPYPDijYSEN8mQxELmDabd3gU2wjrH9QFvXTHxybq1mkqXO1NPzi4QwnJ1lhJ7j9fTiHfbFFY+fvU9VuPt5c1XQ6oEgq6mEU+YT4WO6vkH0nivW1zx6ekZdlzxsHO8jTXMGG7gOa+fVl/qWoTcsT4boekCFhof2Sw637t6cX6y5ev0lwkz45T/zHwDwl//yX+W//R/+Ev/pn/0v/KRLLAGzeMkPf+QC0+VZzu/6zneo6w7ln61+aKiqlrt7lzjFqWU6WdJ0LUHgu1KDwfSG6yvX3VotZ4RhSK2bcU0FgaQ87tjvr/noWw4hoYRAoDF+vUol2WzuSZJk5FNtt65L0HXd+NrN1SuePXs2KoUNg0FrTVmWIzf07s7xmy4uzqjaEwfKUtc1gV8rXXUkVAGrszWVXxsmgCAG3fbMffL/a7/+N5nEKf/47//Dbv1sb4jjhDxWIyLj9dULrO2ZzRaUBy+K0CriUGI8N/V+uyOKJWerNZWvNgdxhO4ksYJbX60XCvLJHHWy9xgsVdHSGUtT+ADTJtRty7EpEIE3V9UJ5aElVO6z9/srPnr+DXQQIb09y4uXn3L2/F3+ifN/je/97e+6dTabYPo5rx68gM4sYX5+TlcJjn59Hm5CyuYnzGcXtH4tfvH5C5QK2Nw7M1KLxvSCxhiatvBr2HVLpJSsVi5YbDvDblsS+sD/2VvnJHFIGoVIz99YLGZUlUSolok/t28+36LtQFG6DunuoUcL85UCk5N1X62mWHMSoaqYz1P6SjAM/hynILQarSXTqZu7strRHGrq6lQkjakOBYOB55duD1JK0YmApmiZLU7iSgekrQikK0yt5wuCICCJAxReECGKqMoj02lE4xE9j88/IEr6kYMZhwsu307YH6/pGvdsny0saW64OH9r5CW+eL3DmhblBYmWi4im6bhcLxA+psvTkGEYsNZi/TlTNxXYhK49nXMldV2j9cDR88XOz89pG832rsbqUxGtJowE+52Pi+yULJnQttVYXJHSCboo1THxHajN9s6hIbRbi9tKup8AACAASURBVLkCrEJbxm7vbnfPfD4njlNXeASULCmbeBQfskKyfeiYZBGHo9tzJTlVWYEaiL3W1zC4IuVYEBGKYHcAq6h9sfBhc8+LL3+DR4+XYxf1y5c/gt8wGF88nmRr0iygqgvuDi4eXm6W3D9cM5vsePvCq7QWDY2o6bqTlY5HbkQT9l5Z8W5/RVnKkXda9BvO1hf0Q0Dsu0ZlU7I/FiiVsyvu/W+uKMqWyzPXxe26lpvNDU+efIDwyIfF2YwffP9z3n3+PrWX1//y1QNxnJK58JGiEDRVQt9WWM/H63Y1i+klx6+g2c7OLjgWzSjSdLZ+TDbJ2e3vSHyhb7lOuL7Zo3povapgU3ektSWbuPVadwdCmVPXd5TeouZsdU4gUl5/uSWfnxBZ3kbktxi/E87VAPxb1tqPgd8H/LIQ4mPg3wF+xVr7AfAr/r8B/ingA//PnwL+89/Bd3w9vh5fj6/H1+Pr8fX4enw9vh5fj6/H/6fHb9u5stZeAVf+349CiB8AT4F/BvhD/m3/FfBXgX/bv/4XrWvB/HUhxEII8dh/zt93mGHg5Y8dbGfQPW0Z0PY1g+9AbXcDebpCyuMoQ6rilLJpMF/htBzLgftNOcIuumFOvSlgaIhDl7lmWc3QdyhvDiql40AZ3Y4SjhKB7jSr+Yq5r0wXTYse5NitCKTrLrVtR+HlN40NOBT7kS8FIJSEwY7Y29lsxnbr/BVOfAete9pBEzEdZaul0l5y3WfJugWliaKIvvNV6unMdX8kHkLo5CiTJB31+aMwJwg64mjiPKMApWDohVecc9LlTdMyDAOCzN+TjvMnMUMvML6iHwSS4ngYeRnLdQI2QOsp2rejt9stkgwlAjLPsdK6xaJITv5KUYK1Gmu1x4tDEM7JJwll0ZDPfAehKlgs1txeeb5FnKOt4ssXt6OvTxS4itegKxLfWs+nEhm0ZL5Sb7Qz2q2PgtR3LOr6wMXZOUW5JYp9N6JTXJxdjH4SBojihENZkHmIipIZXdcQRhmPLt4F4LDfIoNgtACYZkua0iBRPLl0vhPt0HPYlwgkQ3/y7dIsZ2fUXtEsiKFuDsgwGJUdj4eKNM3RvaU88WqUJU/mI75bipChl6RhMiot9l2P0oZAhDQHtz7n87kzCK68ZHxigYEgiN/I0eqB+WKB1QOTmed9DS2D7jlbut+y2+3Q9FTN9o0pZNNQFBXTyYK5N1dshobrmwcCr1I3n2ZeRUuNOOyuOWIjRpjHYHdgQ4ZegPeKaYdr+q5CEHGo3TovDg9Ouj7wXj9pTt9X3N7uRvx//BV5+ROfCqFBCYLoxAMbCFVAVZVoz53I85RABcwm01HyF+tgoye51qatiWJL3ey4993QoVNEsZvnwcuXB6ElCAIK//xHsaI6HLHWjt2JruvQWmOtZfvg1vp0OkUqjfG+c11fEgQDTbMboZCTPCKOA9J0zm7vqoV1VfL220+5v3d752KR8PjyOXmecix2fg3nbPevybP5qJQlpeFhcyDPvGJbrQnDt0iSFV+8ct5Ju2aDUguEeor1PNN+ODJdvjHLlkHG/tDQ9zBbOjiKNSDFDjvE1L4CrURHkmWUtfd8229IctcN6TyX8NNPfsDP/cwf45//xX995Iv+Z3/+z/PP/eIvsvLQrC9ev8RqTTLJ+eLK8VNEGPDo0YeU1W7kF6lY0dQdP/nixwC89fYls/mKeigZPHw5EpKHhy2N//633vqGg2K2FdJ7tZzN13z66Xdpu8PYVdSDBTFgeg91jSKub16TJ+mIPKg7p2JZt80It3316hWTLB/RCofDgSTJMGYYTah3ux1VVfDRRx+Nno6D7tz55M+m17e3fP7ZZ2STKeeXrkp8aAdM23C5nnP0/MLt9oEPf/YXCLxkvNU1QiQ0XT1Cmruu4unTpwgh2e9P5rGZk/P2yM/bzYEsjsjjaISREwr6Y88siSjL1i8GS9f3dP4Mm2Q5ARaGOb2vbq/OFxT1nsPBEKS+C24awiSm8r5sw9Fwf3+HjnMuFl7ePxfcbK55tHzEeu32qb/1d77LT33rOb3vMhTHBfujIExCNr6j3zcV1p7xxa1m0CdT+/cxwwF85Zo+JchbVmqg8F2Gvg2Iwi2IgR//yPMw0p48n2Kkm6frbUG5bzhbPnO8OeCLq5+g+4QwsSO6IwodxzWK3YRWrUKFEVX7RiFSywEZJZw/cs9j3/egJU2wZ+7Nh+sC8ijDhnK0iJhOYiZ5wGLhIXKtJZvm1MeO3u/71VAQSsn87Bxr/BktBQGS1t+7+SKlLXvqQpN64++ibImijGpXsi99dynZUBzteK5uD9c83GrWq0uePF6M6+76akOx+4Km9ep9cko7dFjh1mZRCCQhYRIxNG5fPD8/48XrK7TuMR7aPnQ9WovRYF6qlihImE3m3G9u/X3vmM0mlNWO5co9I5aKwdQjmsbFNQN1c+TCy3s/7LY0bYmmJhCuO5lMIsqiRvp47uGwIYmWSBOSJa4rFc6N0wDoLZXvAKeJoChfc3XlPdCyhK5v2G1/DF59tWkcHL9uCwbvS+a4uAL87wujlNdXdxTbjvPLk52AJZsGvLj+Hkng5NnDpKMfKhJPCziULzm2IWmyomnc3zW3R+J0ztXdhusr18F/+tYzmusG5U3nu6bl8izhy92P2Hraw2wROwi79+x88brj4UEThJZ56V4bhgojW3ZtzU9eOHidVQ3Pnz/n7/7QSRqmaerOuOMPefrY2RCUXcvVzQ3b3ZHHT9ye/tlP7jAUnC2d+mOkMh62R6qiJsvcOs/TCTfXD0Rx7VQncdoFeb4ckR13dyV5PVD3NYuFW4v7XUkYSaJgwe2tOy/yfM6haNGeC5dnU47HIyruR52AbtA05oqHzRVL/Q2/pv4RmggLId4BfjfwN4DLryRM1zjYILjE68VX/uylf+3vSa6EEH8K19kizUIOhZuQrndeKwZFXZx09hW1HRisRHo+Tp6H1FWPsm7RKxXSdQ2TZEbk2/S32y+ZziO00kh/MAVyjhLH0eCuaTRpFlAeixErGoU5Q2+RStN6ye+hlyzWEyp/CIVxjtGKYeh4ePCCCNEckyqathshRlEsMLYdoWZdC1JkSKmJQ3fDD+0dxaFnsXzDKVMyASrw153mE5IkoW1rkqmHWVmcrwcQe/8Gqx0xf7lc+N+rOB57Z3DqF2HbNVjTU3k5etOnpKHCipTAwyWX6wQzPLBYPCXxwiK7ww1CR0wX3vdG9CRZS3lQHF3cxvJcE0Yl5+I5Brex3N/VpLlms/Ut62mK0S0Wzd3DSz+/huPReSXVXoZ0MlVMF5o0c0vrRz/6IUmeoWQ6misPEXRNwnQ6H83jrIZJsmbvvSkkBqkSkjBB+SW/nITEseV4ANO5RG29itlu98zXHua52dN0EcYkTH2i0XSavm2I5cD1Kw/LE4Yk60dPqziOORYPLCYLpl4CuHt4IE4TyrKmLdyBMp/OORbbkRs0zSOsjREDSOFNC+mJjQZh6eyJ8L0gVCXaiwi0LcjwgJADM+9ldDA10WRJrzfkPggzQ8qxKMZkpyoa0nCKUIrQQwezTFEWFWmcUXu8sY0gkJbNzkke932PkiHCOG4OgApDFosZZXnEDCd/M40K3kAArXDJcayCsbCwWr6Lph3xz8YYDqXjs1jjN25doY0kyy3Gy8GHWUQQBLTDad/oaYd76jbA+grM9JQo9DsC5YLcsuzo2dNp99sgxArJZDbFGDefAifuYlpD1bogcDKZURwHFiv3nsVqSls3hKFk7oMy4b1bpIxpvTRx3XXMFpejNHOvdwRpQKu7kZR9fn5OXTUcjnfMFifRCUl/WNCdvFRURZavqI4BUp18xDRSKVZn6egJliYhF48TbqduH7m+K1DJCiM1eeq4U01/hxVw9/CSD77pDjCjA+xwyXbnDuIXL0qapmF2vWK3dYlhvshomh1NFZLlHm7XRghjqRv3+4IgIE0nDhrpg3gpEoLgKV0/YKWflwo06QgrDeOAMBTcXW8Q3lD+T/+Z/4iPvvmP8eLzOz754Q/c7ZKGn/vZP8DWz6eUFqMDelPx4x+6JDCdCuLJgqLY45F6xMTc3L1gf+/26p/68NvESUBTKqQvRNlB8XD7Bb12cK0oPKdrDYkQI+RXo3m4uUGJA2vvO1UcSmwaoQNvdGpaHh7uOFunCB+UySGiMYZ8HlMe/NqrSqaP516UBwatsX1Hr+DoIbldXTGf5WTpYkyqAytAGuypAHPckeYJMgoJA8/ZbWvCRNLqms+/eO1X+ozZNOaw8wmuUig7IIRi5zfwpmuJooS62o9+hoNuEHFM6xON4+aa9focFUbsth4OFiVY0dD2MeXB7c3T2QRtFYE/d6I4pW4b0gBee1jgO+dztJF0yNGnUHc1k0hQ33tz16DDBFOkha73cNfmHKU0dXNk44sGSWiQgWL34NbGJBvAGNrSclJ8El7vXAmBFKdCVIm1dvTQ66UhCHK01iPk32gw9pyu63j/mz4QDjw32u8lVVGSTDRFb6h8EVabEBgwR0Pi4af3uxuWyyX33iy361vSPB/lxcH5UyqlKIuTxYEhDiOyZInynxMt3LWnaUrnjX7BnTEXnsfnfAN7ivqGhU800uQxQSAYjB6LuUJWdO3A5drFDU3bM7SGIA5oG7efPnr0jJubG/LknIsz931D09E2hkh4n7SdIsszZBexfe32jVZHzPIVTVdTlW7O98cvsVaz8gU72UviRFLvDCt//u52t/R1wSR/gvDCF4ormkGPUHPdgRlaJDGpN1yOlGZz/yVxGo3n6O3tjsV8TZq5veXYl2gdkucZzcE924lYoKKQpo/G8zANJkTziKMvBkzzR4ShQhgxGm9naUgUS7A9gy/iDXFMkqVUfp1LldBXPYbhjeWHCBF9T99bus57WAoFJkH6WFR3R4wwdKpkW7r7niQJYZCj1Fu0g3ttlp2jbcHMy5drs6dta1p79GvQS56YhnRqOW4973NzSxgJtP+cNEpB3XF/d0fjqRBXd3fowY6Qx7rqeLgvEEKwXHpKxdAiIos1nzB4iGHXdVx9+b3Rf0yGgqruWM9XvHzluGLb4wNJnKFUxCcvvK68dhYor72dwO/92T9AUe942N4ybdx1t+kZYSxpdUpTu7m7f7jm0dN3aPw+8vr4QxZyShbO2XhjY2sFYQSbXU3nryvRmv22GRMpbSNsVGHshMBbBzTtHYqc+fQJUnqtAv4RmQgLISbAfwf8G9baw2h2C1hrrTiVTn6Hw1r7F4C/ADBbZHYYhSMEUaKp65baZ5KTmXKBiA2YeC5KIANHqvcE3nySImWGNh2dJ+c9e/YMYzV6MOPCGKxhPp8jvfFnU3XE0Yz1fDaS0A/bGi1qsnTxFbJjQdMqpPGO2H3ALJ+w3RyZenLeZD4Qx05kwl8Wx+ORJLUsVqeOV8I0O+P+/p615wRNJ0+5eLSja+HxU/daVVXc3vQkJ6EIqSmKK87Olxi/eF6++JI0mSJlQNe56T87O6Ou65HAV3UVk8mEw+FA5wUswjCkHTSx7/A1bUnRH0jiCVadOCWSRxfvkk3sqN716J0FUp6NG83rl3uMSTm7kDx77jaoNHvM9euBH33vNc/edQfZ6kKjdUzjyfNBmGKDBNqOhQ8md7sNxrZYHEEcwNJzffWKQLk5+cY3HjHYiieXU7Y7t4mszxNuriq2m83o43O2uGS3O4yCIcMwEIVu8xY+6e21pTgcsWag81Xxw4OgrRLqo/b3AJIsJDD6TbeprpikAdWx4OLcVYl3+xuyJKOt/GdXAyEpuhfcnjaR7ZbpdMo0yWi8OV5bHunq1sknAtKsELYgChThyW9smhIEAVmWj9ywIHQO8Qe/qei+Y7FwJsq99xZaLnKkSYnzt9l5n4mBhvl0gfLsfJMN7HdH6IaRl3XY15ydXbDbFgyd98fKJi5B9TyCQRdgpfNU8yTiUIZEUYBCjL8nDCP2+x2J5/EkaQQm810y7ylX71DKMj1VFA3oY0/X6zefE03IwxQVwG7rgqvJ/Jz7hxfkfq0IpRA2Jcmh89y77mQqXTdM3ZlKkPVEYj0aPpdlQ5IoRCCx5rT2GyaTOcVxQxCfFC57kllB5ROiUKycD1BVcnHhExRcd7frGkxw8lMJqJp7tPeFSUIIVMQkVazeccnO1ctb+iZiOX9M27vAcF+XBFGPGdy8ROotbN+xXFnq+mTY67hwfbPl459y1zBJpxgDtnH3JU4CPvnxF/S2J0r8dm9D1mcT3n//gstL1126fv2abBFwd+eu+/x8zaEYSOOIw9Gbeu8bZtNz+q5gvzsZQ8cgzBgY7vcFiJIsT4hOiYXSdL3FYtlt3PwtFisOx7tRCGc2WfH6+ic8e/Qz/Pv/3n8CwMOm4W/+7V/now/e57vf/9tu/jJ49723RiQASOIkpKoaXrz6BIDvfPtnCQLHI228qtk0y7m+fk3tg/P1+jFCCJq2IEhORp8xX3z5CYWv8M9na+ru4AIPb0Y6DIa73S2Xj58TnIyMbYkipPUB9WAVt9cv+PAP/n72nhifZiEm0PRtxO2tqz0q1bA+ewS+g9KZihBDmqy5f/05ALfXn/Du7GPCOBk5QYEVWClHpc7tww1pErJaXvCwdVXjNF8hZEkYnnH90n3fbAqIhLp316S15XgsmU5zNneu6p/Gyei3dTrjB60JrGXrE6k8T1mfnVG3PcNw8klzyVrdlM6tGIfIUKEk8hyIYRhc0cRaQs8XkyKhLHsCaUZezaAtVdFQ+uD1+UUICLpW08duTxgijTDCJTdeFGExPyNLZ1xcuvl8/foly9mUunrDjZDaCebEcfxGVTSOsdZiTt1mJTF2QCpXnARnkgyuKPTG6NtijCHsXREnXdg3Hmb6jWm5RbuA2heiAnmkqUtU5JLzrrlHdoI4fhOO3d9XVBX03clLEZbzHGu1Vy2Fvm+JwxXT+RxjTnFQwmLOWLRYrxbc39zy9Mk7Iw/UmoDpdEpZlhj/vmeXa+q6xngxEmsq5k+WLn7x4irlcQO6cdxCP+faSPIsofP1AqngsG2wek/b+KLhIqWqDzR9Q+C7H3k6xdph5C1lyZKmqAkIKX0RtmkNs/gRdWHw1ozYTrozxvt6db0hzQK6ph05iBhDnqc0rQAv+rJenGG0RPf+rEUym8yo64ow8kU1ramqikm+HJOdquoQwhIGJyGjgECFWDEgQj8HtByKlq4zTL3Kn9aa4lgRx+7Ci+KADCRtM5yONYzSCKuJkpiF5/H1neZwKMj8D26GwflOZdMx0bZG0PcdWjNyJdtyQz80HB58cUUpVDSnrQasOBmwDxy3FWkaI7VH8JQdXWsYPIeuizoetq/RuicM3bm9PwxkceS6p0CWx1gjgICqPKGhAtqhQOt27KIqpajKhsjznQbdEiiF7Wbsjw6hkWVrlAjRfQ/eXD3LI8rjntp373/t+/83qZrz8UfvU/q9q2xfM5uuMJT0xu1d0STjky/+d9LshE5qiMOUQd2M3UIRWuq9ZDGdjD6BXW9IJ2oUUtttN8xXKfvt7Yh+GpqYLLd0zUDl0Ra6d92/32r8jpIrIUSIS6z+krX2v/cv35zgfkKIx8Ctf/0V8NZX/vyZf+23HMaYsfKSZTkWt/k1fnKNMQRhSCSSES4xGMssn45VgIeHB9I0RkiD9sqA2WTG4bAnCGL8n6FkRFPrEQKYRsrJdBjLNHeHpR0ks/kZQkh6v+jyyYzNZkfqA9zNQ4E0hmdPZrSeaBxFEVXVkEURMnAb+tOnKXFwznzlHnwrtxy2O54+D5nNfZWji5lMnnNz+wIhTtAkze/9Pd/m/s4FW21/z0//zIfs98dxA5cy5Xz9hKqQY1foBF1qPKE1yxxR3smnu81ABR1RGLNcecEQseM8XVE1zShnjjXsiyNhlmAa3xrtfOcs9R23/YHLpymrs5As8oHi1RWzheW9bwUs1r6DeLPDDIrQbxh1WTCdndM0zSjb+/G3P2S/PRDHCXHmXjscNMv5N9l7cn7d7FjOn9DVCY8uPAOUgedPL7wao5eWF5rz1Zr7e5cE5tkUhMEMPUN1MhoW5NOU9XyK6d1aqCvLaram8G7wjbGIviUKFIOvcsxSRZJEnM8uxvVrJynTJCTy0CFrIowJsVqPgh3PH7/lNlghybwIQ9M0rOeLNyImOsUkht705KmbO2PAWleBDXxyEyLoho63n7kNebvZE6qQPJ0z+K6RoSQJJrRliW+6EccJVakQXqQB1aCiBKsl2m9sk8mcYeiIYknklfEm0wRMT555dbROcn9/z6PH6/EQ6poaMwSEQeRgDcAw9FxcvFE522/2DL1xAYl//k6CNKfnrKgLDofCi7Z4Q+1yYJKsUcFA679v2B6YL86ovXJmV/UkWcZQ91RewjXwyfVieY7xMLYoUAihyP0huD6LaNsepQTWV+onYUZZ1sSTaLyutpdoE4xzsjncMp0EpMmaQ7339z2kbn33WXrhG61p+pbnz53ctqQnUJa2sbz80iW9QZCQ5AGDKZGB9HMn6doa6SFH0STB2IFjVeA9ten7gjQfiNMZbeeyx7ubI++98zYzb3CZLWJ6Cxern+J7P3CdnarQDF3E5uFIXbitWciOMDZ8+ztOea0qGxAd+/1m3G9iBVaUroDTnOwKdpyv3+H+1ncw0pYkDdAmQftO4L64J42XtI1m8Cpxm+0tq9WKpnZdh5urV/zCz/+z/MIv/Ak++4k7SsqmYDrL6AfDr/66Ey34+d//ezl7NOd2641GhSAIJC9evODuzglRfPjev0pVlRTlAePhNnVT8uNPf50496pxH/y0298iSesDFIvlsy9+SO73+Ml0waF8TRjMEJ54f9jueH33kvfe/X0Ibyxa1A/EGib+UN/fbLGmYDE/J5u51/qiQvctab7g9Ut3KO+Pt6T5jPY0l32POklV7z0kZ2LIsylhHI3QXSkCwiQYBZhMX5AnK+bz1UjSxhqsEg7G6gPo8/MEFaY0PsFMIwfT6ZpmhGut1+cEQeAUMwVfua6Qje8IyRCiOOBwKMYkwlpBljjJ/7FT7aGvoRcVMVYTqgA76NEwV8mIru/I8ngM3oyW6E7T10d/3R9yqHuKaiDyIlAybOkwRCqhq99IOA8WHj97x81h13N99ZpvffAutfeHUBKvLvcmedRdjxCC3pyEm0ICERAnarwmaw1xlCJEPO7XWmtkIEdqgrWWpinRWo/7qZB2/MxTpTybzdF6RugLEvP5c1QoKCt3XwBUtKDTgtzL76tAsj1sEcLQ707JokGoe370ecli5hK189WapmqJvC1AkqR0dUcymRJ4ew8rtT8bnQw/wHZ3T5qGKBuOf1cVFdN8Ou7xtrOsZ2vatiGJ3fl0d3dHpBKMV4hdLlbImaHrOqb+/O+qhqFoCWXI1F9nIyqatvT7pfNOX6ZTqrpE+Gf2cnFJ3fSoaED5BaOJ6JuatUeEMF3SdCVd34xFvH7oKKuWQE6Z525f3Gw2JFFM5M+dSCqO2y3TaU7vRVnCJCZLUqwxpF7IrB+cTU3rA8i+P1BWikk+Z5K7e1NWRy7P3+Hq6oq6dPPQtTDJFiM9pGz2lMeKPJsj/W9puwKlFNoONA9uH8yyiRPw8A9f32mqoWKRXzJN3d5VFAfySULf1lhvRWKVQQWWOHrTSRrqAW0sVXUq8CRIKxmkpvfzHiQC0VlUeEqAFEEwoetLhmbw9/Qcad8oYMfhHKykKKpxf6ubkraXGKtGmxwVSIIwRvnkbr1Oubu9Jww6Go8IM4Nm6DRxnBBELp4xtUYMgiBwz3953NEHHb/+6zueXDhI3upC8v0f/AZWJ3z4gUcQFANvPXuHq2u3v9bFgO0Liuqe8wv3nmNVE8gFYdRhfUHCtAOPLqcjiqqtK+5uBWnOKBSXpXN6faAZBJPcxX7C/oNhgb+toIVX//svgR9Ya//sV/7X/wT8kv/3XwL+x6+8/i8KN34fsP8H8a2+Hl+Pr8fX4+vx9fh6fD2+Hl+Pr8fX4/8P43fSufoDwL8AfFcI8Wv+tX8X+I+B/0YI8a8AXwC/6P/f/4KTYf8UJ8X+J3+7L1ASGl9tnkxbimNNrw0ff/RNAF69vKfte4ahGFv5UgRE4YITODEgpG807dAy9dDB4liDEei+Hyts2tROcEJ6fLWV9G2JlJIHT4g8P3NeBEHUk2Wew7KVzGYLjJdGfXKxJp0GdPqe48EbqRWKfBLz8PBAKFwFoS47onnH1WtvSnc+RfeG2xtNcZKVb/Ys5pYkmzCduOx9uU7Y7/d885tuDpp+Sj/UXJw9/wqcIeT88imf/2TLYE4y4LfoQb6R39bWSYCHTs4ZXCXgow9/muEEHbh0IhvLdcb6bOo/Z0eahQy25uaVq44cDw+cXYZ89pk3iYwfU5Z7/q//oyIO3xhaRvmWIAjYbdx1TiYph8OBULjPjlXEcdvTNpLUm8J9+uNr8jSj2NUk2Qkaabi/fk3sCbyYmE1bgTgQeThaX2miOGCWTsffXNcNXduQ+nZ/FARY0xCogN5DBwQBxaZADDHg7p+UEW1Zonx1/dkjBx3aHUqWc4cNt3SuIoscjVozJRl0xaXnYNRVRxAour4aibdd17KerVxF2Fc6Z6s1Zd2S5afqzxGlFEXR0nroYBynGKs57rYjOV6YBGElxfHkH6VJswm319vRxyOONPPHEx5dvD3COMMw5Pp2T+kN9VQoyPIltjPUJ4lsbcmzlCy1NI2Xkbcl2H7k3s3nU9LUVZoXy6V/j/PaOb+YU+5P3dcFWr/pEm9TQVEUnJ2vKTwW3XQhd7c75t6b6smzp7y+vieMJStf+UTG9E1PklmuPQ5bypAwjNm/9PCCyQwhB5Jpwgffcvdqv3d7yubwgugE/RACS8eh8B566ZS+sxgzMJm6+6CNRMgQJePRhgBxJFAxwrh7HqVQVlui7JIHL1kdKEmSpRRVifFwKWqFCgK++z1HLIBz8QAAIABJREFUnp1kU9I4pG1rKl+Zn02X3hJC0nhJ+iydY/WSzrrfu6u/4Ke/83MMfcj3v+ckx0UE+3rL7vOeqwe3hr/9u97jk9dfEilHDt7tewYCXj58QnkSCBA1Ml5x9mjJYunXlE0RxKPX183NhiRZ0Q03HHwnd5ItKdsD+/0B4b3nFvMJ1zefUVXuD/N0jZY1XbtH25OlwZS2K4hShfUwq0BEFPua99/+OQB+6g/9An/1V/5PvvPTB2YTB7eN44QgUFxfX2Okm4ePvvXHaOpghHBra5nnK37zJ7/h/PqA9fIJXduilED4CrAdBD/5/FdZrlzVMcsXHA4FYQSZJ+PfXt1yKK75+Y//oJsTaccuRegruz9++X36bsdbTz+k8fwKqQIQeuzMv7p6QZjCfP6Ih9J1J5WOiYOMrut48PC+RxePmc2XdCcDdqtoe0ESG15+9j13I4aOJ0/fY1/uRnSHkjHz6YSXL12ltSy3PHnythNcEie7C0MeTCmKguLgvu/J/BFxqii8SIsxBiEUbdtye+s+693nHxOGIceyGJ9bISx9245m5NNpjlKKvhuwHm7T9xZhNJvNjhNBIAxjjHnDpQqiBGGkg6iqE7ROYRBuP/edI41g6PUI61bCQaUGk1J4bqYICqZRxm5zP/rvrVYr8mzCw851AWarM6xQ/PCTz3h66e67lgaBIY7jNwiQOKHve5KvUB2MMV4SX42v9c2BwZjRBxH4e7pUUgYEQeDPIXcfosh1HLuh9QJCoJSzPbH+nImThK4byJI3HI733v0OXdeRekieNYInj50v4wmtEwQSPXRo3TF4EY++LdjsXpEm7r+PVUMUBRT7mrg+Sfm3TLMF2HDsrK0Wj9lsb5ku3HMdCkGa5gghmGYnj0dFU7Vk6eyEtkMqizUBIyfEWFRgWcznI8wynilUYKmbgdZD2YUMmU7nnJ153nAzkMYTkjAckQ9SBo5XHgecuOjp6i0ehXB76+XFl8vRmFh4LtqhOJJGOUqFWE+P6KsGGxgiv//EQUAjBiJl6LWbl+39hslk4vYNeUJftFgsrRedsrYjS5bUh2YUfFpOz+lqWM3OOBTueV/MlhyPNYOnn2Aks+kCpcLRiypJUoQQaK2JvF1CcTwg5RtO8qADLJqqO1IeTkJUCcYMWCsIvO+jUophUJStN0SPAqTUREGAVCcuWoCUIIXFh79O3CFKHAwfh8Tve4sgGM/MAI1SodcBAN1HGOO8tU4xpZIpsUzQVtD3HskVZkSSER6+uduTRnO2u2seXTghjqIo6PQO1Wmk93RL0xSpembKQdbbfo+0EjtovvhNJ0h0/TJFyYRjcUXlzyclM4qDoqxOkFVLpRsmk3O8Kwuvb+9Y5AFdF3Hm7VH0UBGEMyI/TyoKOXsbZtM1t3cO2WF6Q1VHBCpl773LZrN/SM6VtfavAeK3+N9/9O/zfgv88m/3uV8dcRzy3jvuQG3aI4t5TpJkDIObEWFqkghmy9moBFIUFX17GLHbZXVACEE+zbG+lWetZRgsygZsNy4Aunj8iH6o6dqvGKIKR4RNfCv9Ybshz3N63VOUO/++gcGEpKFflbTc397TdQ2brW8rBwFlvSMJ1nQeKqiCiOLYE3ii8e5esd3tGYDq6FrWs/mE69t7knjOYeIezvtNjBUNm81nAMigp64G0qhH+Dbrdt9x9/AZ93cH8sgtjMPuniSejsmW0RIVStrWGesBPHr+MZIU402Tu74lCuYMneHVKwdNyNM15X7P9Zdy5J31emB7ndO1XjGtukffBuyPLXnuAtnFbM3hIWGSTcb3MfQk6pzGczfa/uiUCQOF9DAEZUP29wd3EHlOiZSSoZfjdZfHAWE0USzpvdqb1JJYCbq2o/dJSxTFDFgeeS+HpmkoDjV907Jcerid7ZlkE46HdoTbQE3btlxcukA/TWPquubR2RmFT6CN7enansk8oq/dkSIIESbGDG8w+4FStI0ZD2cp33hJnQJMoSLiWCD9/VzMcnaHkvlsPcJRgkihxIRACbSHXvSmRWtF7tUCLTFJFvPs+aNRpUqbniCJKduGz790G+75+hFRFFD6AL6uQ6I+I44smceGl3XrREXqelTda9oCaw2vblyAO5vNCIIAIx1sAiBOMuaLBCFD0pNHlxKUxZbh6IKBLI/JZzH5JOSd973+jdmhh4tR1anvWxbnAXVTsVidyOSKslZMpgnpzG+AMmKzORD5hHoSRUSJYl+03F67Z/sUzEzymMbfq/l8xn53RHhRiLpqmc+XbDabcc7b1lAcK87O3gguTJYtwkYYLyqwPW6ZzBRG7Zn6aze9ZjJJiTNJ3b3x41NSMl+6uQxVymAaVBKReZGbdJJhTURdV/T2pCAGIjwQe4PE4/HIX/vr3yUMJhjcOtjc3ZNlOUI+0HlBoL/yvxU8eXrO3Z0zWzXEJNEaSzdCfsJYcHtfkqRzWm/0GaiYumxGlVGBZWCLNu45cL/5HkyKxWA9T+iLL+4IIwjVzF93RzfsiRNGpUOtBZgAPUCauPcVhwPPLj/ml//0fwjAn/ylf5lu2PLuu/8mL7505OMgsSRhyo8//yFp7u7NBx98m97ocZ031R6r4Qc/+Bu89dhBL+fTKXXbegNh79H1+oZh2PDhe/8k4Li3IhBobTE+2Hl4uKLrDrz9zEEj6+aAFDFdX5Ekruj16vVvopQmS5dvRDxUAuKI8LDdTz/7VdJJDiKi9EHZ+XwBw8CxOHB/+zkAFx9+QBhktN6k1WinalhVNa2HSyZRSJTOqKs3nod2UFgL+60LMGezjPPLJ0610Ae9QhqMkRz3B27uXTL+0cc/Q2/fmGUnYUTd9WzuH+g8LDCQysHwg2CEzQVBQNu21LU7Q1frC7p2oOlarPTrxUYIE1OX1VikiMKMXjMmwoEyBCqk6dpRICiMA+JJhtQK6YuG2hpqq1kt3FqZTDOq37xFhCHGn0VdXwEZu+2Gxdx9Vj7NiNMYHZxigobZYkFb1SOBfrWcgtUYo8cipRJiVD0F+H/Ye5OfzZI9v+sTEWc+5xnfOeesrLpD3fZt99zYbizcxjKIFUJesGkJ0Uj2wjJbViyR+BcQa0BYYsPSbNpASz369nCnrqqszMp88x2f8cwnIlhEPOe9Rm4sJJDuImNTla+e4Txx4kT8hu/QdY4bpoKYYRQaEMRRQiofINRWuqD2YIiMlRhjsNaOhbeu6+j7hjAK6dsDP0V5tdwDp1QRxWrkNgGosCNWButpAlZYdmVH1w0Pa7/RaC1IkmQ8Z5Jswne/93K8d0I4sYSu2bH3nJ2m3dBUA01bUTcHBdGY45MZq3cu/si80fmHDx+YT905k2UZ2/Wa58+fH4TjiLMZ0yKh8jz3MBLoTpHFOduN9w2NDEEqSEODsMn4WU1b+vsIQoVIERME8ciR2+9cgp9NYnrPKQ9UiLXtuA/X24rpfEE3DLQe3hcHOev1mvk8GwuuTx+/YLfeoD0fV/cdtresb1fYgxJh2bKtB4wZSLwHaBimZGnC/NQF+ruNJgxS+qyn2rtzNUkS5rNjdpsVR14pd7tuXALkYwIVxk7xd78aVWyjIKWqKqy1zL2Ix369oWvbESocxiGBEHRdRTHxiW+owGqSKByhu103uM/1Agxd55LCpm1G71KFIElyuqal83tzEOYMvXngYVuDEgIlI+wBEisCtvWGLPNmvaJjsy7p2oIoPPxNIugQ1pJ57mBVr5nkxyMHq0gjmrri7OQJ+90BbhswzeYMuqH28EU91ARxi21d/BYEPYiGIn1MZTw0WRksJYvFgtZTA/b7gbubisXSrdfpPOHu9p79LqT2SaAxC7Z6y3otuPFKjmfHj/mz21tePncJXzaLePws5y///Cs+ffWLbm0MA+/fX5PnKbO5e95vP7h46K8b/6/UAv//GsbYseqY2yXpJHbmsT5zfvJEk6Y5q9sds2OXLT57/ISbmytCfyOXy4TBuA3rwNFRSiIFPDo/5ukzFzCv1lsCFVB7JRRjeqeUFbRg3AaVpylxJBmGeFRMEkSUW0EfuPft9u85Pj5CkDOfumuIE8l2E9FVgv6gahYPCHNO7xV3ermmbzuS2UCaON7H0DUIG7DbrKi9IkwxcXjjrnOHwvHxKU0zsJd3XB+q9yIkihVRHNP4it5nr77FF3/1midP3ULZbHbc3FyRFymLYzcHt9cruu7q4XAJNPPpGXXVEHjFndvtmr7RpIki8kGg0XOqqiTwCo35xG3qp2fHlJ643XcNUkvubm6YpC6A3lYVSSDw8TuTImN5NGE6nbJee/x/MmG93nJzc8Ni5g7n3c7QdIYkdw9LfixY3a3QnURbtykXxRFGNyRRzNUHd/gnaYyQir72G75MmBYTtLZjlTgvQnQf8fTxY3alm8+223J6+hQZeBWZOKaqV0RxzpNn3gS2sPSdZbu/wnjFoihYkOfpGGyBZNCQZDFbrxo3TWcYYxiGYVRo0oNBSEno1U+G3vL08QX7agc+aLFGsLrfUxRzkF7xrtLEkaD24gfGatrWCbeMMssM7N5fYcWWKHD37+27hsGUTKbu+wJpsWZP05WjsWnZluzKxgXbHnO932+ZziZEB97J7g6jBVGU0PjEtLwuWS7n3H71ZiToK6UcD9If/Fd3G4oio21rtHEk+9OTlMXsiPfv3ToPgoBy36Ct5vXXng8kFFIpkiQbJb+32w+kqWR6IBC3kt5ayt2e4WAce1AWqsQoijJ0ijjOENK9pix3tP53HirC1lpUILi6uiTP3dxtvh7I4gHrqaVdrRnigrAICX2C2Q2Gpmno+hrlA21EQpFnbL0am7YaIQMnr68Tv+4GjOnoTY3xleS66wlET1+7ex4kOXWzYbDVaJJYxEsCMcfoNdYXStLccnNzNa6xLM1pqhWW9oGA3brK5OUf3o1CP0IojFZkvmucxYYo24GeEYcusZgvJbfXFUIsscoFFnEWIMjGThayQpuWoS9Q/m9x3DHoDciAq0s3x//gt/8R//Df+x3+6A+cCuAP/uyP+af/9J+h+2BUl0IOyGzOm7dfsNk6oYazswuabkPvMftpmrLd3fPmmx/yy7/4twBnGLpab4hiReGVHL/46Y+pqy1Hc2efAJaua0EbJj4Qfv/uS5IkYZK7Q72qtoTBBGgYOr/v7y757NWnJNkMbfw500OLGQt/dfkNj1/+ElVfE3pcftU2xEqwX6/GTsDx+QVdyyg+JJRGCLi+umXmA6li7hTMhspg/BnZtS1pn9L5xLg1HXUnyAIBxj//QhEFii9+8lOOzlzgNp2d0ncC5fmAu52zLVmv7zk7dUHhfD6nG1qECjD6UKxKuLu5Zb11qoPnwwlaWTrdILwco9UduutB9KPQRl3XhHGB9d9XNjWzLKLtagJ1SKQGoiTGNBbpBZY0A/v9ntRX8ze7kkEb0kyN3TsZCExv2e02FH7POwhTdIegUAqasmM2m/Ov/uiP3WuiVxwdzQjDcDToDYR0Cro+2UmSwBVarBhVBq11RTqlFCo4ICmcGujhfUoFrqPX994o/WDKHlDtq7FTRjsgUEi/BzVlT9uVGK9gDFDuatIkH39LEASEYUwgQ9quHv+WpJJhqMZrGgZDWQ8PEvl+xFHIwgfwcITWfp/zxemhd+iDzDdahqFjGHqm04D7tTtX7+/v0aZn8xd/xXTm9qXVaoUxw3h+WDMQSkW53/LkieNhr+4aJtOEoigY+gNHtibLYwJ/VuRJjAwsVkgydbDXCB2vTQ0Y435zWd5zfHzMs2fPAHj79q0TXhCC2iMyhJIcz88ZhoHSI0CWyyXJshiFcOI0oQwzrm8+0PmkTBGTxilSQpa657btDLILwVuc5EHkFGEHQeKl2JGSPMjYdHcjv1AOlsU0HxOp9bYkK1KaQIzF3O22Ik1dgrXzYjHTYkZdl2NBQlmJ0YY0Ksj889C1DUGgXDJ5aBNb7QqeB1RMETMMA01jmBXuvodBzG7rmhCd5+RP0hi0K9CAE3MSVjAMA60Xbuh0RRhYBu1jp3zC6dmCzWaLCt39a5oGowNXlJCeqzXJ0b0eLzGKFCKOGPqePD9YzbT0TUBezFlMPBetbembmHLwStJVTJZHrLcrfF0aOwSosKPrynE+o1iQ5ymVT0z3H0qiMKdtLUFwEE4qkGHJdmVHQbeh3zAMmp/8xHXFinTCv/rjPbttOV57kkZM8glS2rEIEqj/ZxPhn4vkahg0lfdNqMqWrDUoJVDqcFAErJuSatcQe1Lmvt4jjBhlXq0ImE0nXF3dcHbsEov7+xvOzubkRcTNtauGLhYzJx3qJ7soCm5vdkRhwMRXHSCga/coGYzB1Xq1o297at89ODk7wWpBFMTU/hBCS06PnrDZbEgTV0WtmzVNd8t85g5sY1OKaE4QG7LsUMUtGbqcNN0BPtjpLKaVDK1bOHcfVgShk4cPvRpTmqQIFJMk4/zUBUB5nvPo9KmrnAI//OEPOT/1kLaDR09VszyejX4k8/mUrhVkWUpXHWSeA2wYoWRPXni1t7jgw1XL0LlrOl9O2W569mvD3L8mjA3lJqKIMtLY/b4km7Ba7Tj20K/FYsZ8WmAZePLqmZ+njufnL/gifsv61gVSpjacLMUo6VzkSyJZMpue4G19GGxNnETMp3OkT/reX73n+fPnY1dFKYHSCXowh9iDSb5kkp/y+MkF13eOAHl9c0W1CzjxwUhVdaTZnEEbIq+cY8yevu+I04jc3780mrJcHPPVGy8+0gxEac5mvyPyAiFXt5cEQUSSJGOwYZUliiIGc+iKCa5vr+hMyzAcpHUlIghY7e7H6mqvBYieKPIQoMZgcclM27lr6HSHtYZiEhFF7keX+z3KPMBcZNAibE0xzTE+mXv6/JjtpmK12nJ85EU7RAwMYxAxmeR03UDXtgw+qA9iTWfWHJ/nD55r1jL0jUsMgbhY0LUGi/OHAVitNdc31yS+8zoMeiSRhj7w74bWeVxtavyZw3R6TN/VaP/9wkgUMednj0b4kvEHdxTH49wJAWW9wWr3fCSRq0zW9WbcSLu2IwxjJGo8jCUZJhpovHjFJH+BaVP6RrFdu0NAicxV0USP4lCdzKnLBuXhIXW7R/YpQlQY7zafpimBLBh0NAqSWBPQ9A2Cgz8P5PGcPJuMHdAosdzcvuZ0/oLerxcrV4RxSpwu/W8xLM8Dsnjm1LNwZOuuGwiDflSgUkoSJQo/nezrAb1VqGBDEntY8H5FnEWUuyuC8KAWuHDBpA9+pBZOZXXQoyRvEs6oq5ZBV/yn/+ifAfDJs9/g9es3vPnmJ34xwne+8x2q5m6E/Bod0TeCL978Ps+ffebmvZhxffcQxETFlB/82Z+QJPDYd666tkb3DVrlY1f43bsfEVDw9OKVuw97JzwQSjXO583dGz598TlRcBCq2CPoiOKI27trP08tjy6+S6egLv3vMyDTjKsPbh08u5jx+PRTNtWWiQ/S1l2HDAM+vPuSvHCTnE+PMIPmgDLr6oZJseD6wyVl477v0fzfQQQh1uwRXm1uPp1QbrbsPdz3+OSY07OXmMBiK58wJCmmaykmEKROllvFCWU1MHjBJ2MtzbbCmoEkfSgsDtJQt90Y/OdBSNfWTKcPATzS0Lb1qEQWSkXdttT1HSr0HZI6JSVE+fOqaweKZEbftwR4IQMZkKUzlKwPgoz0JmMb7JlNDwIFwgvJmNH/TwhBWbYYY8YEOogTqrYduzZxHMPQc/fhnk8/dWsjSkLevn3Hs2fPRgVYEyoPX/Yy7SoY97gHyJ8ci2MPSpXuOoQP7qw1owDOz3bCrBVOodjfPyst8NA5kzIgyxfUdTu+J48S2q4Zu5W97onCEBkHJKmHa1rneZYWBU17eK8kDKPR69MY45TdjBiFvoyWCKnp+x7jz4MgFCgbgPUQstAihGUuTnj6xIkIaN0zDE78o6ndHD++COm6gX3l4a8younWJGbO/cbNxc3VPfcrTZI8+HZWVUOeRUjfNVouj5kuF/RDO3ZD+t55w7VdQ+EThEmhaBtL3bhk5Pj4mDjOadqWEw/L35Z7lJRoIfFHK13t7tsBjYGGJJrw6CxE+G7oduM8JfflepTzD1RCJGLW3l4gKxRFkRGEakRDTKYLZKc4nZ+PZ/n58YRdtRvPogBBXzeEMmJ15xU9k5wkjDHBMJ49gVUsiuUYd96vV2itfWHBI0AihTWS6eJ4VI5eb7fkUYFUY5sKG2gYDKGXdbe2Jomd1+Ji4mLCvrOEYYjyif1kllKVHZN0ChNvZVKtCZTrcgMIoyjrijRNsPhnvbtHmBlS6NEuSbcQqoDTE/ddVbMiDpwce+RjoOdPzuhajTU95xcuQU+SiO2qxCqngNu0JW0Pm1XH0bG7f3e3a7bbgeOz+ShcppQiTmBy5O/ntqTVLdPJgmHwtgBtgmkGAv/Mu/ves5zNCfzekhaGyTxnVsV89doVgUM5IYok+TQn9Pvb3e3BzuXfPP6tghYfx8fxcXwcH8fH8XF8HB/Hx/FxfBwfx799/Fx0rpQKCdTc//+KSTHn/v4evAFcVRqsMGRxwvW1y/onxYyhU2jfDo/ChA/f7JjPnhAkLsN/8eIV2/WGb16vUNL7U8VTsjBEe97JfrslDiWhSCl8tWC3bsjjJffr92hfoVGhy9ilr0jv1g1hkDC0A7mX+8yzgXovWU4nGG/4mscnbLdbFhP3/dt1i7E958dPuP7gpIN1Y0jTnklxjpUHiEiHHgSJl35WYefwpbXk0ZH72939B/qhxA5wd+2qWfdiiwzhwwfXqVMI8jRlv6tHL5rl9BQrt6T+s20vkUPIdr3n5Mh1kqK0Q1hDGBQMHqe8ub3mdLFESC/32d1zND/j8YkZW7Nt15FZmM4jKg/LixJLpDICL/uqiBjagOsPV9woV3V49uIpb16/I5Yxnzx1ldbuTLLrLglDV5GK4ilJcsHQCLR2MLLzi1ecnCYMg2Y5d+978fIpH27ecaAKBhE0+5Q0kYRxOq67dx++4eruLTtvjmmGlCxPuPzgpaD3A8VUcbS8YFe6SlaYlBTFnL53UsQAt+2a1eZmrP4aEfD+/TsavaH21a26rIjjmP1+O85V3/dkWfYAz1QxWreEUUAo/X1Xiq4vSXJGTxkjB0KZUmRuXlpb0bYthj3nFw6+uFoJFouaKJqPFdE8MygV0ns+oDEdWTJhWzVo7fF2tqbrOuI4pKwOBogQpwGRh2tZ2xCHCq0ZRS6GoSPPMuq6HEVgVCBo25rGQzH7zgkEBDHc3DuS/XQxx4qW0nMbQpnR9g0qiKi2vlI3jal3LTKoHnhRqwph7Oi8Pgwdu3IP+xnKE/G19pDTNB2J213XYnSE9ITzOIqpyg1Hi/kIJ06TCXGcselKpPSdK6lJophAnPn70lPW7wjr5VhdnhRTNutrptOMJHBVP91nyCimFw/QqEDHJElCNzz4/9T1nmEYOPfY/kdn3+aP//T3uHjspYRXN5ycLHj/7sNYSb69vcd0GhHEHNxyT05P6FrNvvTy8MKy30IlFL2HVMVx7C0b9INxotb0bUfkYSwqDOj6miTOMX5PSrIpQ29JU0PpBSzKqscMmqZ13zefOenmptzz+JHrfN7drsBE/Jf/+L9BGQcVevv1F3z2rWf8j/+Tg2tNpxMuHj1ls1lhfbcuigq+efeG7eYdv/W3/5Ffs8IJLXg/nqF38unPnj0j8cIUm/WOIJYMg2Hn19Bq+4aXzz+n8N3mqis9SVtxd+sgm/vdDZ9//lsjEd9aQT+0pEnC6s7tU4tlwdHynKpvCT3OOehAJTOqvetcHU9S8mQBcYr2WP++1wxCIWTFha/QBvGculphlffiyqd0dUs37JBR5+/VMXE6obp7z+BhwRZFudtze+eeoU8+/T6dkei2HhEZCMHQWX70oz/g+acONdFbQ93s8Gg4kA3WCrbb7egNpZSi7lz359CpHgbD+u6e4yMv5W0BJMYMDF6wRwSW3W5H16+ZyIOc98R1NcUDJ8la6yD/iXuOQxUhVYQNm9HXzoiQQIZMCvc5m51lOp0ziH6E6UdK0TaDFwTwFXUCrBEIjx3SfUccxwy6I0wOctBHnJ+f8/v/x//Jb/zGr7nv0z1KMHKL27bGGPfZD549gjiMiCYFxl+D1pq2bam8GJDjq1mUUmP3TErp4XcPkEarLVqbkQ80DB3GmLHb5F40EEhBWx84SYqmb5BSjp9trSAIFJv1foTzhpGkbRvCA1RKKbTt0VKO3DBjArDafY7vptVVhVIRxt745SMAJ4xlO++1GQSoIEYphfRIB607x6OdPPdrSiPFMxC+YwY8/wS02bPb36M8NHLoDW2zZ7NyZ+++zzCr1sG0W3cNxnYkSURRTCm996R7/tUoChUEAU3TuP96Hn2WhORFwaAFuedOTacFNzf2oXMlHCSy6xpyzym9lHdYaynykxGWJ2XEbl/xySeu8xnHGe/ffYmSls53C9++viOIFEkxQR+ExS4vCcLUWxPA/HjCtJhyd79i4nmnloHdevOvias0dUO+KLi/cfNihaLIJgx9xyQ5GMwrJCF933J85s7ak6MZu21DU3uhr7ZnNp8SynCEUH64ek8QB+iuRfjfZ9OIxeKInedT1tWGtrEM0jCbues0bU40y+hat09aaUiTmLZvmHn7ojTOCOOEpip5/sztN1IImp0litzZ9+rTT7hbrZhkUyoP4ZQiQJueLA/ZejhSpCRB0JJ4afbj+YLZvMDYkj/909cAzIqULA5YLCKS2MU8f/mXrzk6PmWzcbHhfPaYm5sbVADCx+3tcEmez9C6pdx5BEFacLN6SyDcM9R0EUV+RFPrUSvh7nbNi5dP6Fo95iBh4GKPv278XCRXQohxMc/mKULH2D4kjN3iXU6n1PWeOAyRmQ9M24HJJBsnaNfeUuQzbLcb1VHqsqWvB07mp2MLUHeaOIuJPOk1i6cI4cikUruFeX684G69QRDs0ES2AAAgAElEQVQhY3c4F/kRuluTxe5gjOOEMAzQeYv1m3ssM4p5TjdssCMZeEKR5UxmfqrNjjyfoZAUiQvUJpllNolQASSxC5jv77e8ujhllnloVtRwv7llOp049TZAzI4Zhp4kz9j5QD8rIuq6RXkNfxUo9iuXEKQeTlDXMfu9HL0jhhYCOmaFZFL4Nm8liFLhFGY4GH86vyg86f149pjLqy8xScIkc4IkUvQcHRdEUcR26+B2XTVndqRIQr+pWMt6d0mj3T0F+MM/+iFpFhDHijByi3ayzOjvF/QH6JfYgRhohpap9+hal1/z5o8s02lB5iHlUrkN8XCg7rYN0kLfBoTavUibHav7HYaS6cTNuYhqml4T+01seRIBktv1HYvFwfAROnNHnk9QnisVKQtqy+3dwXNthQig3VuOZi7Iji7OudusmU1zPrx5DcDp/BklezSFnztD21SgpyO8TwhBKHMseoSR9q2Di+z27uAdBkM+LdBDyO2d96IIUoL4Ebt9i/aHXLmpQWvSA1lXhVQVdKVg4OCr0aIHyPMp+70LmKUEqyNs74MWqUFIjBUM9QNUaNv0GKno/XXpoSfNwhH/HyURu2pPlmRMvI9IMMQom9J4fx6DIQwlTbPHenZ+wJy8qDGEI4a9aVpUaNnv3IacJCm93lCVKwLvhZN6k9ey2o1k3KZpQPSEoVtjZbMiUgXNXlAkXmAiy2i7mqcvYfD+UWkWYXVG45OKySTn5l4y9B14D62u2fD08beI04zUB95ff/ENFxdLru4950qHHB0tef369Wi2moQBg3b398qrYN2vNhydx9Sdn0udsNpekUw0N/fu8BBKc3r2in15N8IjV+sSawTzuTvght5y9WHFZGo58YqXm/UOoy1JGrL1ympFURClMYFXhPrm+kusHBDK0Hc+8BYRWRZRN9UIe+q6FqEasonfc2sI5JqzswWX31z5+z7hd/+z/xolzrm6dUa/0SJnu91yeen8q/7Bv//rmMjQr0KIvS9Larm7/ppPXh5z6iGqZrCIrkd6M9ldpdncfMWikKPa2r4qyU1ItEi480n8Wag4efSURh7UAzWiV5AI7q/ddarYkOcnDB5OagQwuP2q2rtrmuVTAvGESEm60s1Bmw4o06ArJxwxPzql0QLVVAzeoDxJe8pKIboteJ9AqUK6zmIO/DwMQlckcs/ae9HJfEm336LiBDxsRaYB29Utyp8723rKWSCg07S4pCydL9h/dYmp7smC77u1YBMGfYNHCSEF3LeCPL7j2YVTpd00hl7XpOIIG7i1V7cV0jacHbvr3NQF+25D0Ec0Ho7aA1255nRpsb5osNOKtl0z86qRIo642d6RhgNPPP+nDDKqdk0iIrrIB75DSyYrjBfCaXRBNFkSVZq9jxNMkmFuNswSTeTFjQhiOluPPMKUlKEM2d1vOT1x5/Zus+X4yTMevfqc//2P/wSAX/0b36Pve/TgztVAZWgRYAY9cheTUNKrhrrbO+EQvIKkDEbop1Iujun1qJ1Hr3vn41l1hD5QQ7gi09AdoJESqzUPWS8YGRDHAmsOgkgKqXq0Fhgv7qBNA0Yh0eNnuetXoyksVjh+lTSkfn9TqcXakL7vRy7K0BfIMEKYAzzbJZfGGAYP85ZWMAwtRsqxyCVlQNmWaH7G+0tKrN/bwHF9hBCk0Wzc8+I4IC8WnJ6/9N/n5qhYiDEJtFZ7/lpP44UvEIay6Yj9dTf7gf32ju16PSYR+01FGGqCXIwJbNv1hFHM3colEWbQ5HnOMAxsNr7QXlYsFkdYqynmXhlP96g4GXmRersniSYkYUCsvOH6ImC92xLKiMTTI/aRpCgKTjyMrSxLYhmitB1NhLM0pWvuyNSCq9srP1c5zW6gK/2ck9APiqE3lF7cRJmIxUnB7foe682AB+FUmA+wxPk0JQoCnj1+wrp0n32yzAlVhB4Ugad/HD07p2wjtDeFPlleUJs1u11LFru1d3Za0LcD3/3E3aub1Q1DM9AFhnDi/bHWIbGAaNLQeSrLyfSU5aOa2Me+rV7x5NkRbbljMXdnkZCKXXlDFFqWcwfZ3jc3hG3CxHPMrm8uSdIl2lS8fO657/mEt2/f8d3vfJu6doloeVMzz3bcvfewy+mS3/j+Z/z0J18z9TDIvW3ZrWusFGRzHw833zA9XnCwCNRxx75dk+Yp+43bT4/OYr7+5gtMl3BQAl0uHwRo/k3j5yK50noYlUKKSUFTrQkjS+h5GFkyJwhjklSN5PO+75FCUkx9MtBkqEDStxatvcqgEEySjCxK2PvDMQoClNbkXlq31x1RolxXwSvCtc2GJLI8f/qYcu9nPBg4O70YlQilssRRiJTpWOEznSCJJUlyPvI9elNjbEOzd9/37PETVutrlOyY+sUjRUyoBNYMo9v80WKJVIb1vZe6jDR9B9Z0COkrq9oleV3XkfiEcnO7oqktx6fu312rsJQ0u5Z6+yCXPsnnCE8qRmh6oxBM2JcPQh+RSamb3XiYSKnoe8Zq12I24dHZd+mqeiScC2GJF0esV/coz8s4OQkxtqDrDmpsA5tNCwQ0pZuDo9OQvtfsyh2tDyjv1wlDL0Zlqdu7miCQRImm3vtAIxr47Lsn9N2A0e5vk8mEo9OY1cpd03YXkWUJfa9pOxdMHs+PyGaCu7sGGfkAoZNo2x2eHe62HZPJgrouWXm53yTOCSN4ls4ecPVpxf1NRZ66gLZYuAO1mGds7ty6290OqDikGXrSies81NYFrh52zmpfkk8FfW/BV4SjFDCge8HNletGRqHDuY/YfzE4bHqTjEpSbb/j7naLtQ9dxeXiEev1mp1fB2EkUcrSNMOIk7YBzKZHzmTTbw9t0yG0GTtsYRIAiq4rCXySZNDEoUALQ9McJIYDtDC0vuOl4sArePZjpa7TJUVRoP2kD8NAluZ0w0DqlfK2+/dEckYYhyMvwkYD01k8rqkoTDzJdoc1h8DfPatFPsdwwJ3vSNOUbPT/myNMTBTFhF6sZhg6JpMJSXLM1hyk3w1Nu8McKr1th7EtYRiNMra3N1A37+k7ffhwjO7pTMu1x2erKOb6nXO0OASB9daQJlOu1ju0da8LQk1Tz8g8z0WIiCQ8Z7tbUTf+mpRmkjzBakO5d2tdBTGYmP3WdcWjxNL3PbttyN2Ve00chywXMU3TEHn10/PjF9zcXtH2ngA+nSEDS28s2t/P46OCutmxnKZujQKrbYfWAuHX63wekYYnvH79l/z6L/27APzC5/8BXZ3S1G/IfQd/COHD+w3PX7nv//4v/ApDF4PaM3Sem6Il19dvyLKEo4XreFVVhRAW4Q2K692WxSxgPi/wTuL0raauB8LFgtW1S662mz3f/cVHdI3vMijBoDXCBJSVC7geP35MkuQuaAX0YBECuq5hX7oD/PQiZTKZsN9syLyK6WAt1a4iL9w1ZbMJpZWgDVFw4LSE3G3uWd1f88zzf0QQIqVGiINoQcJ6veL95VekExeULRZHaK0RhCO/AjPn5vYd+PfNixPiKGBf10iveDu0A7d3lxwfL0blurLcOWlrH8RbLVmvv+LxMqXx545MItQQQKrHwPT2duDiYo40XsFMFVizQZtulH4vdy2TLCMJM6627rlrMAgxUB+6/iqmaStOYosWbu1XTQNIgoRR1Wy/32IYkH5vSc2EQUgIQya+SxyokN1wx0kuqA8qcbIlUynqYH+BRiSaWZozPShe6oHdvubz770ag7I//cGP+PVf+x669XxnA73eIMOA1BPiQmGo2x6pEoxnU7RNR2AtveePhGHglRbDce+JAoUMAiIb0h8OFgRGKJQX/pDWVfnNw7aB0IZm1yEOnHIrWG9umM0WP8P/lQSBIo7TMdlJ05S2rYmT2L/PEsURnR7GpEWC51sZDiGgigKXSPizQvnfKJUaOYF93yOkQEhGDpS11gk8eA6k+hn+2SEuEsLx1bTWP9MVtgzGjN30tm3prGWaFaMYkNaWIC0Q1o7F8TBUSMQ4v33fM5k+4+KpYbdz6yxeONEkeon1naSqb7HmQYAgSSI2mw2r9T0LrzbZtx3b9R1ZkjL481AiEMOA9DGQDWsuTpaEYTiqgxoDRbmgaYex+PfoUcH1B8Gs8PYsm2/Y3e85W57TN97gOWuRJsNqQ4JXe75fk6mQ2BcWw9BQbm8IiwxVe6RRNPDmp285OTnh9NQVnUKr+NEXf+b4kMA8P0JXPT/58x9B4Obu4uKCSTElny+ZH7t5ubxrIFb84i99y92rIWa9viR5EY4CT3fXd6xMz8znEhvbc3x6RBoeU3vesC1CFpMCIefsfbceuWZxMuHuysVcR5Pn0O7RZsPtjbvOly+/h+4rjLin9SbaUZDy2ctTKh+LPoleMeiKZfE5beTu8fTImcefnbzgyy/cnv7bf/+3uN18wcXZC3/vZnz62VOS6ISra28s3AaUu1s++fYFu62bg0BESB3jgWsI2TNZFmxWdxyduGRuMs2IkoGhD9ntfA7g0QV/3fjIufo4Po6P4+P4OD6Oj+Pj+Dg+jo/j4/j/YPxcdK6UFKMBa7Wt6IeWJM4oJg4+0TQDdV3T9Zb1ylXd5/MCM1gunjhcpm7h8vKSPF/y6Nxxb9bre6SUhJGi89XkUDkFwIPa1Ga/Z9A9cRBjUi9/KWNAkkUZtXHdguPZgrbuaGpXaZlNcgIVOVl0r8J3PL9gcaxo64B3772xaLIljhQnx45L0fU7IiWJgpgo8d0XEXF3f0WaSazxSlmdYN/uCXwlZLvfEQQSJQVN4/kUJmS72REVAd3gcdFxyDSLKXuPnZYJBB3Vfk3huTBddYmSCv9TMFZRVlssHbud69A8ffKS6SRn6AWD9xboGokIoetdJeLqg8HSEoUFg/YV4QD2uyuMbHnxwrXp82zGaneHt68gTqckecr9+o7UVyc1d+TplGKecn/rrsFKSWfX7LwpnFSa3u5QQ4aVByn2iKywDL0ijg7eIhV1syfybe2XR0cMNmG92iKCg8yzIIwijo7P8FQ04uAMEzSu6gVkzUBVtswXR+z33lBXw6AFP/jBn474bRn0KJNRe45SlAcMQ8/9zQZhPW7Z7CmvJFneMvUqPP0wEOYp4GWXs5Re7wnigtarfoVyQBtBmicjzj1RC7RoyD0nQQ+R6+QGHYPvfCAs9/clxSSm6w64+oEwEqTetHiz2RDFrnuW5G6d5XlK1/UYY/GNKqbTOUom1LX77KOjJZttRRxnnJ87RZ8PV28IU4lpNcsjB8Gpqoq71T0T361oW+cp5uTWDzDWmH25Gju9WZaDUE7171ABEzHCODnVzsvDZumSoevQvXv2qlYzdB1REoxQCfccQxhHGO1et1ie0rY1g/cf6XuNtTVVKzg7ddDWST6n2m25u92ChzRV2y1a61G+uWlLjBlY76qR73B28oJeD3y4/rFXxgKrEza7G4yHCmViRkBBFAcjVyQOI6x2ld6ZNybUximhbe8PXjiC66tbolhxfOT2kst3b4jFFozks5e/DMCXX/2Y2Txl67H+gZ2SqowwHEYYmRJePWxoefLE7ZVDf8vQ35N42HM+nXO/uSUJI+J5PF7D0Fskmsz/vsXRjLJqRm+jLH7E6zd/yN/6tb9HZP8GAP/L//y/8o//yX9FOVQM3aGrueTruy9oOreXTWcnVGXNYDuS5GACX6Jkw/OLT4kjD+fVLWYYSLwS6ZsvfkAkevL8CZXvYiIEKlQIYdneOUhMXBQgp6MvYj8YkIq20bx57QyeF8duDzmYAwslCWTAZrOi6VzXr+9mIAWiGxwRFwhUxHazH2HX+z6k05oiCen8nAc65u76PUmqCCNXbRYyoCl3BPmBC5Py7u17rKiYTx1MLwpzVru1a8r5UmjTtFTtisZ34WbTJftyTRBH4/MQILi5e8NkVjxYftgeISyt/33T5JgwDCjvN6j5Z/77DKYO6a14MGrXKwKzIooctCaPFtzv96MhL7guzqOjDCUkyqMozAC262DkQDWOnzfcc+X3vHCpHHKl3iF8FzWOIqphQ+07UCrOCYygj0H6bkgsI1ZsuVvdkk5cJ1Bqi5U9gb9uEc7Z7N+hxIbAQ7gC9YIki9mstvzW3/m7APyLf/G/8Wd/8RW/9IsOljR0O/Ihx0g1+tURwfJoznbbMSqxmwGFpedBca8qO5Q12J9RAuz1gAwUhe/WB0GEFepBSS5wkuP6sH4BBagoGtURsZKjoxOwYuSGSQndMLgOlP++yiOADl0HKaVTZ4wjBt/hC9KIyFvYaL8PSmEZMEjfKWv7niSKGIZh7EYdDG+VUgx+XUeRUyYcVRWxaGvda32ntas6p0gn5Wi5o7UmVAp18PbKczrd0HUPvpNFHCNlQBhEBB6xMHSt8yT1a1qGAdZqZBCOnUAhFFVZs9/txo6etc56YVTDb1vCSHF88ZzUBwCOM9jQViWDP5+6tiTLstH0Os0mbg33ZkQo7HY7giDg6OiIylvS3JTOU+pAbfmVX/8+6/sNbdty9cHFT33fcn58StOteProO+73KE2k4hGF8+TJkq+//po0SPnmtfM8zacv+N7fe8m//P0/5Md/8iMAfvlXn/HqxSuOPMXh+vI1URCyXEYE8cGCoybOEnJVsPQwuXL7mrNHC2I/v0jFYnlGkiXowMV99yzQiWSRew7dRUGnQrq6I/D+f9d31zz/5Ncooo53X7vrzGY5u90ly8BrEsgJ4QIuf9ogrNvPf/zjH/Kdb/8CafqU1185Zb5ZOkO3luncrdc0OqGstuy2twReSdL2MecXJ3z99iej2XmSJLASo6rqo0cnbFcd33r1OZn3Sfx+/h3u7xtev/sS5c/k7377KV/+5C2vPnPP/668oi4tL549HSkjd/fvWZ6lKJmhblyct/cc279u/FwkV1EYkHkerooL4sUEawRNfXhgG168OEaamHnuDuOqWnN2/oTH526RXL6/5unZBVEUYa0n4getw+w2cHbuFkEQSpq2xUrX0pvPJF1TEAUx8dRviKJGyYjd9p4nvu16sphQlRtyT9Ju25YgEjxaTkeI03p1S5pOUXLg5ScPBq9ShLTepDHPIpLA8aaSyAVS1momxTOCMBkDyrJa0Q+GTq/9+1JOj8+oqobOSyNf3ayxpiIQEXHsYVW9oGlumPse7sX5KVdXV/zq917xl3/uoEKvvvMJRTHl8sotjnQScXWZMi1e8pOfON5Atf/A1eUHZrM5k5l7qFarjufPnyE9XGS71gxaUTZvWHrYjjYVcZiz2e354q/c9x0ff4s//JMfcHLubvIkP2VfvycKJ8jEmy0LgRIWRIsIPVlWlixPFhyU7pdHBVJlhGHI7Y2bz6OjBSqwxFHMYuE23K9e/xgIEV6a/fLyHWWjSNOU1dpBQTaiwgyCIISjI/e+3XbNrm5G74/drvQmfw+HXlbkNO3AYnHC4KFR+1VEFAt23imctUHTgo1pcMl5aELCKKYVkqv2IJIAQWkJ/Sbd4PwtkmRL6oPXqmzduWklVnvfl+ot8/kRlfeqkIGg1w0CxWrl1svZ2QXFRNI0FaH3LpNKsN3vx6QwjAX7ckseHyO93H3dVggUKlBEPngU0hlRJz4gur1bjWv++vbBn2q1WhEEYDwst6oqoiB0CROQ4Z6btm05YE2ariWJC8RofiwRDKxXd2NyFwYZUdwQqJC+PQhTdESx442A849q25626+h7dx/mc5f43V5fkaZuM6+bjrbbjh4XUhYIQpp2z+Wl+y1DX1OkBfPJ6SjzKgNLnk5p/b3SpmPQgkm+HCFyQSh5/+GG87NHGM8t2K4HlNbE3mOq3F7z4vFLhqHjy6+/BODxk4y+r1gsMjiYapOCrbDBAW4jGKTAoml9VWQxPSOQgsBqSg/Lffb4lOnkiKfnT91S3FwRCBeA1N44ssjmgCaeLoh8sKHyACFmhKHbt253G4IoIptM2K68KIMeKGZOCn46d/tpEigePylJE5eY/t7v/Uv+7m/+J7y4+Pv87u/+DgD/+X/xO+STgKtvWlLlPj8QCW+++REItzYWiwuqbu0I9B6ufHt9xbt3PyWKv4V95ua41TWBDdBectgcYKX6eIRUBxjaukNVe5rOPe/5/JjF6RN6z00xRiOTiGbXMDQuEbX6sfdk88GI1igRsNntQbo5SKIXdMYSCDkKFARxQb3ZYGJ3NvXhjFkeghkof0Y+eb+/JqRFKZfA3t/fk4WKpjkEoVvKasdsnlB4rq3WmkC66z0E2lW9oelWXHi+ShAEaDTWhBgv9GF7y765w5iOPHfnzO12Nb4eoO8Ml5dfkWxblp+5QljX1zTDBtH3zKULLC7ffUEoviT2HGEbt0wmE0rb0Xlvr0BIbq7+ikdHlqrz5t9KI6yi9AbXWVqzvrtldlKiPf+oKfduL1GSxOs5DHXP0WnM4Nem1pAqkGmK6Q567ZYo6Xn+/BRtXYFA9wKTBg5nBwSqRzea5bJABm5PSKUCJIEK2O/dPf2H/+E/5J//D/+cH/7oNQCff/uUxTTnfnUFPjBtxEDbVsznxzQejm2UhLBg6RONwVjSWcF+vyXOYr/OBpIkJJSC1kPLqrYCoYiyB85GKBRx+mCTkc0m7nM89HzoNZGKxrMJIE0LrAiR8DOy7k7w4rA/6/4guw9dcLC4CLDaoK3BeKiiFIo0FIT++w6J1c+aSR/4VM6D9MHLzCVyZlxbDir48L4oCpDSScGPXl+4QvfoCyUlJ8kMMISBHD8rUoqu69CeEGNjhbXxaFmRZzFl3TK0HZX30OpbkGFCudtTeHsdpRRxkI3J1uzohLZ3HmXG37/ZckYoBZNpNnrBSSkoq91DYioAKRzn2Z/3i5OWqtxQ1vdk3kMyFQG39+/RpXvWl0vFYDRJkvAL33fPbd9pFvMJdbMcJfzTWFHkU85PXDx1v7piMfuco0TSfN/RCYokpOx7futvf86tNxJ//K0XXF9fI3zC8L3v/AbffPWW4+NjFke+EPXuPUZLiixh7214jrIJ5duK2i+9p0/OsKbh6psVgy9SxIXi9OwR1ifsqw8V2g7QBkyWLnbK04zyzTuWzyb0pfuw+77m/Ol3effGQbPlJOGLr75Eisfc138OONua9N0JL18cjdD9MCoQ8guUcEXE95dvmWQnhNE9xdQL4QQRr/+q4umrJW++cnFs1w0Ia8j9PpVlkq7KuLt7x9NHns+12XJ6PKHre4LHHvIbCL796jvYzusUIDhaJoQywPqmRRxmzJapp/C4sw7xKfBn/HXj5yK5CoOAb790ajPbrWGzf8dsMePu2i+A8zmahkQFnCzdoY69IE8z2sbdkNN5wWRyRhhYd+OBd+8GUAXT+Yw49uIGVYkK9KjwFZMymyesN9cMB1NYNaNu9hwvJ5wcuxu1ut7z/MURhVd66Y2lau8xWnB25jb3q2tNNntMWZYUE18BaiQf3q9G0704jvnqyw9M53PWW6/PT8xqdYMKBxZH7iCMkgzNQILntOiQplvRtXtevHRBY5wa5scXGDmw90pnZ6ePqJv92FHYbwd+9Vd+kavrb/jO5+63xJlA0PHrv/kLANzcXnFxesLtVcXf+U3nSH1985Z9q3n6YsH1Bxcsf/vzOW27pfJkyDDpiJlCNKP3ROOsSLm6/MD8pKfT7hB6e/WHPH05Z33vNoKTs4qTxwt+8CffMPdE9UePLuiahF6vac1rAD779Hs0lWBSuHu+uq9YHuUUU8V87j77ePGI65uviZKY9drN53Q6IVApf/EXX41rLAkTPrx/9zObreD6bsdsHlHX7tD5cHVJHGWkE7chhgIYWk5Pl2NFMFKWfd1S7wbazneqggmhTOjqg4pTQDPUZJklaL1YhWwIk5bQJgyl/6zwiHKoqLxgSD/ckkULhq6m9X4jQ+eIyyoIGDwfBmuxRoz46jzNGIYOrS1x5FXqBqeEFKhkrCrmkwhETtMcuFqa45Nz+k7jqRp0XcfQWwLcgQjQaU1ZV+Mh5JTEnFN9Zw+eUpCkGWGgMAdy7OKYru8R9sHbRBsLNiAvPL9But9yMMGNE0XTuEQpUO41Qy+Y5Gfc3+0xHkM/m8SU+zWd927pWsPJ8RP25R23K7cOdj6okzLk2qtnhnGMtYLKuxEr2WAM3N3tKbyx4fHxhEhNubty5rsAVb1Ht5Zd6d538eiYdtvT6X5MAr/68hsWy4TF9IT7ledYsSVK07Gi+e1Pn7K/3xFFMb/2N38VgLarabqK8/NTbm5cx7nvLUkxofXBpFKC+fExm82K2yv3+06OzoiUhLgmDA6JsKavy5GbloaS5fKYL7/8K45mLuGK45i23yKMRDduf5GRQmjF+SP3mnboKeuSvuoYWncNUT5FYYkl1Hu3FsphRV0dsd65Cup/9Nv/hF/9/n/Mf/ff/7cEkVt33/3O33QoAhVjfPJYlWt2m/c88glCmixpmkuGAUJzMNncImTDrHhOkHgho5Xzhbpfu2Ty6zc/YZb3nM3no4iQ1R0gafcdd2t3sJ9ePEPLaFTKTNIEbeH25hLpuUtpdooQdjQfV4nrJt7e3BPnh87qKb2VWGNHjx6rLfv9Nfdrl5wfPfkW1gx0QzuaXtZ1jTE7VByOvD1he4y12EO3KYqJE9iVDfOpO8ANmrZtSOKQ1Ctz/uT1X2Bky3TqOJ5pmnO3vSGJ1MgDqfclX739Cz7/7Htoz0G01tI0HYF/rtu2Z+iu0bNiLMCs6/cQKWIZs7k/CC5EPHr2CO33JCF6+r4kUBF56vbTWux49WoGbOhWnneTdMRBgPEBe72vWCwCvv/Lr/iDf+U7eokhCi29ibA+gF3d3TJdXjI5/5sANG1EIDStGVC+CFOuVrz45JT5XPD6te/gLUK63hD5OVCBwbQ9v/lLj9kOrhj309dTTCiJbDcK7ezKPXlu6L34yNevtxz9DUlha7rygJDIyNKMSHQs3PHA5e0Vszzm1IsWXH3YkBcJw3A7ejPeXn99aIoAACAASURBVJc8f/4UKxqarRdmUhaDHjtQg9FUVcWk8B+M46IlmeMbAeRFTF3XzqzVbzj7/R6lwn8tYQmkQsoHtcLDa41tHxIuLbxJshy7RFq4jlTb+uKR71D97IiiiK7rRs8vNy8FQljM8H9/nxw5V1a45CYMGTtXVhvH1fqZa9dG/F/svcezZFt23vfbx7v0mdfXLW+eRRs0utkAumEIghGUJhIZIUZootBEGkj6ayRNNBJDJBhSQNSAZEgQxQhKEFyju9H9Xj/TVa/M9Td95vFWg73zVCNCwIiDHtQeVd3Im/eYtdde5lvfh2FY7YxXVUNtagS9bjvPaNsmrmb8tWQyyzLKpm5J0bIsI0liDsdjUsWUm2UJaZG3JA22KZh0HHq9Hm5fFnhM08RQIsE7pINuWMRx3M54lYUJmkZZlnRVZ65B6jRlWUSq4DmiqvGcqNUSPb94xXxxi6F16aoZcpqKs4tLojBjoIzq6PCQmpqskAlDVercu3PCLIqo1YNJ0g1fvTjnw4+ecueejOmen6344OkHfPVazhZFWcqvfPNXubm55fOfyyKeabn0umMMQ+POqSLo2iSUUc7BsYw7DeGyXp/z8urnXF3tCkMBNxcFo/6uGdDleNynTEpUrZNJb8RIC3n96pzAk/Faki95/fkLNG8377xiGHhYjs+wlMmO4wxp8iG6FmJZM3XPFrezc1DxY9c7ZX57Ra9vcHklSWgmByZ37/1dkjjEUVqinmODNqBjypjSoARDcPjwEEMxXOfxljiPOD4etLPZQmQIrabI5Nn09Oi7eJbBVy9et4LovYMjsiJhPgsJuh1lC/yt65ciudKEhu+/Fa9zA49Oz20TBM83idItdRGiCQVR0Ud0PLsdnrcsD8+VlY9cqXIfDMe4QYfZbEapMvp7x3fZbDYkqqJo6tLRjHoHWLbcUEJk1HWHPK/ageTjBwdswiu+vPhM/p7lEecbnjz+kPWO9cd1eHn2nHt3H9IfyOv82afPcTwLx1PVnybD62eg5xiO/D3TFDhViWGlNKoV6/pj+pMBQlWWzs9u+eDjJ+iiIVIVk48PHiIMnTCs6XQD9awMuuKQKFHsZHRZRytsz+dEVVUW61fooqOqtOC7Y9abGfceBgx6qjP3F2sO77kEPY3bGxnw7R0Omc0iBkNpqLUIca2ApDRJFLW9rpdYgcX+4ZBoo9hY8jVpmmCpSmi3Kzg5PmHUfUB/KH9mewlFYnF9E/P9739fvZshNzc3uAru1w36rNZTrs+LVq18N+QbJ1scV6jnGUDt8+vf/Q4A09kVUFE2M/JsByvt4Dod4mQN6jkcnAiiVdrCyRy7Yu/RIZZls1y+ddyHexNmyxWuoxywnqFrHp6itbacgjzzyfKSQAUWjWnw+lXEZDDk9I5iFNo0pNUcRx0ceuGDqLFNwaAvN7AmBINBjyKHWDnu9SpGNyscV1XzswRNWNi2wA/kc4mjENO0sYwOmhLjDsNQygcoGKvQNIpcUDcFO1cgq5MG69W2TYA0TSOK45Ysw3V9qjIjTkJcBYmrGkER5QwHA3wFjyqbEtPUmC1kZ6DrB6SVhLHsDp3AC2iajChWAsyZjm0FFGlDR7HxmLrg9fkLOt6gZZu6nV7h+x22qhNpWRbL1RRL1/BcRVGvktE0KugpKKauN2y2Fa77lu0uTzOGvbcd6CzU0D0PSAnX0oMORyPSrCJw1ODxckt/0OPV60scBW0dDQ8psjXz6zmVYl8c9HuUeY1Q3a260OkEAdvtlkAdCr5tsVw3UAi63o5uu8YwNDRNCaTqNlVV0He6DO8P1TveUBQ5lgW1Ymjyg4AkT5gqdsJer8fNxZaj/bsMOjIYj+OUrjdhs1231x7HIb7jMrtSB9w2w0QnCLr0xvJ5OqZJXWVojk+soLrYPmFY8r3f+E8AuHPwbc4vXnP25or+QF770dEh26jEMG10lcisFhsWy+fce/IfAFDUEmGgWTWmUCxRNxd4HYfR+ISt6jI0wsIwKzKV3F0vzrHsCZbrEKr3XeUJttNndr1kurwE4MmH30Xoelsd3W63dHt9bq9egapS93qHhPEWXVc2XWU0RUZehOgt+Y9LmsZ4loGmaLqbsmETXb6twguHJNqgaSBUUriaTUHbEmdNK859uzonqUo8RXMaxilRNkXoNo67g8RHCEOnrktMFVxtwyWaWdLpSTvIckUbn8U4vvzZ1cVrtsktg/Hfa4srTV1T1yWaur9ttEIXBmbQk0Q+yGq66ftQlWxUN/Tkjsvk8ID1XImrZwmOqZFVGo2ioz46GPDwocftQsBXct/qTU1VlTuVAJbzkF/5zQO8vs5aFZjGrsZ2u8E0eziKqGm1OOPx0z7TmfTVrlOCq+PYRkvAkixnDPoGmyTDU525StehSbFUYSHXbdIwxBQVWaYSi8ZCkKFpAl114tfzlF/92mNOTuXw+h/8s3/L7HDEP/jt3+Pzn/wAAN92SdIK4QiiQvqq7333KXmUcf5KMmA+mtzl5evXPDkYsl7Lzxw9OODBvSPOL16wTmWi/97jp6zmSzaKhGbgdjH8fnseA1TpGsPov4UANhW+bVEYegvBzbOGpkolXFn59LIs0UTdJtme60pInvkW3ldXjWLhK1s/lWWZomeXvtuyJNTcsezWfvJUJmh101DuyKmSmKouWvY+gEY3Jf28ghhWNGgIxYooP7Mj1Ghp5amonAy9eEuWEQQBruXieR5+IO9ZiAbLNPiFnAw38MjzHCHeyvLYto3jWr9AsCHp8KNd17GpWK/nksJdSc0URYahlczmq/ZdpElOVuQtIRq6RpLKAuTuWemmJbt3Vd0SWlR6hXA7mJq0sUHnPfzBUzRshOour7dXmGWMr/UoVEPg/GaO6xktQZFpBHz54jleZ4yjmKt3BZ4/+bMfsFjJ7zoYjdC1AYdq/OT25oJPbz4lCAIent6Tf28Tsprd8OTRQ378lzIJ82yNyV6fn7yQiKXtStpM1fi8/1Tuh3QZY5sGjSKvsH0dtIRCh1zJ+UzTiv3Dhg+PvolW787RS7brBnuwI8ZJOZwMiKuQxUY2JPp+AJ6Ba+zTO5bx/mY7ZXj6a9wqseWDyTGmY5Kl4EeyIPmzT35E4P2cr3/9+0zGMj69vrhhMjphMZX7bLGo2N+7y/XFAt2Q55phNiTzHKdrcf+ehBNfXP2IzSLj7h0JxS4Tg8HglF5vg2GpsYN8i4aBaXjoqjO+za7429Y7Qot36916t96td+vderferXfr3Xq33q1/D+uXonMlNMCSmXuSbhjudynqkq7ix6+bGr1u8DynbdcKUmq9QnNkxW2xWREmKwK/h27JqkpR1MTrJX63IzUpgLJMuH9/vxUMNd0O19fXRPGKYUdW/cKwwrK65Pmqnf8phEacV9iKDKA/HFHcQpSXXN/IQbw03uA6Ope3L1isZQXa7Zj4gcmVgiVZloXfa0Bbc3gih4jD7Yr9o0MCf0Shqq+mXRJHNaaiXf2147sYds16uaFSld0kK9gulhhan0hVAptSpyxijk+UkBsdzi5fMOgftHCQg8kzbmczXp5LKM/+wQEPjx6wWcasNrI68eTD+4RpyXK55Gtfk/CMWBFp9Aa7joYHNEzcewhFY73aXGG7Hao6oUolXOKjD57w8xc/5uk9WVWp6ow8q/DcpqXgN00T1zY4PT6lVlSlq3SDZ00o61X7LPftY968OWvphYsspCgMsjSnUG16Q3fxPZ9MwUzu3jtiMUv47ncOiRP5mensktjZ4AYHLRFGLRboWo6maFB1C9zAZrtJ6KhO0sSZML0N6fZFS2QQhRnUGffvqQpqs0E0XYpc48XFKwA+vNvnv/5PP+Avf/hTYiEr15+9mnPQ73F6KO3uepmwnksoUV+14Mu84OjogOVyzXCsRAS9HM8fcTPdzT1N8X0PDb+tMrquoK4FQqtxVLdlOr8lCMCydxPZcvDX83VWC/ludc3CtmzGw4C50lOybZOygK2a2eh1K0yjwba9t1pNdsA6SdkstswVrt3xXSzHJlOQiggp3LpZbdg/lFUqGoPtaoWtrjGOUraLNXt7ewhFHlHWmdIkaeh1Zde0zGqSUCfwlH5FtEAXkJBhKTFZx3OAFcN+QKUq7OvtlvFwn6KUdtf195hVM46O+m1V9fLykiy1ybOaw8OdHkdBVWZUqgNeVyXb1YZRZ0yoZt+i7RRTtwh8m2ZX3dpu6PV6WI2ilZ+FuI7Fo/uP2Iayuj0eD3Hsoaweq5GSyXBMUtSYloKZNDo1GpbbaSm5m2JF15ng+WZLSe+4Jp7VZaggY5vwhoO9PZpaJ1faaTQ621WKZZl0fAXPJKXb75Mq0WvPstjGEck2p9dT0FZqyqKmFCWDnvz+VT7j9//+f8ReXwqynl18waA74OdffcrduxLq7Tp9ZstLqqpi0JO+6+L8NVW1ZW+kZAmSLSAwhEaq5njm6zf0el0azSBVsCOhoEwrBY0cTiwGBwfUoqQpdmK1grKpWa1vOL0vK6S+O6Qq3moBmWqm5Hb2mr0DuR/9oAcULRW7oCIKU+Jkyr6Co2umTVPlGKaHULDc9XrFanvJnTsS4qhrFk2Tk1cFtoKezKe3zNdXHIyftRIKhq6TKx0t+T1Lrm+fc7g3wfekna/SKZruYhg1qYKobTaX2LbJYCzvrahKTMOhyqNWZFc0Ob//D36LYXDCJtp1hTM0vX475E/GBx/ZaOwR59LOtMbEwkAz9VYOYj5dkJQrup68P8vQqCrZBc6Vvxn3LU7ujvm///w5gt3Z01A0dTt/5IU2UfY5P/ixSZZK24jCLU1TIsyM83P5Tr/3m+/xne+c8k/+Fwnr7gQaRZUj8hqhUAVCNJLCWeuhG9J/a0LHNB1S1VlK6oSDkYXFkMVWdcG6uaRu120cdV031zNOemtOj2Wl/vu/833+5f/6b3j/mw94//tSI+zFJz9isDfifHGJpzQW3VGf+eoL+nvy//6g5ln/mE7Q5/ZWniGe7aAZEU+f3mMykD7u7p0jfv7zEOdQ2l2SZAhTYJhvRYR/5zvvk6YFQVf6sourazbbAtsN0C153bklyNIKTbPbDo3jOArCJm1TQuUKNuushdJJ29YpqhJ2gsjIfaUbO32sCsfWMQxaMglNM1son74Tc7YshGhagos8zajKml/g2CArcqoio9G0lqBr0OvhOJYkIVDvU7ATcZb7tExzkkKQRCmblZpvGvbIrbT1gbajk2cJTdOwWe80OivStCDLfnEW7e3zARBaLeGMVFhq7lPTBbre0DRvdVdN02TU67I/kudAnGyk7l2UUu70zgybRV2Ql1VLalUbFmm+wVOzvnmZYTsGZZmhqY5XJzjB1U3V2d11xmV3uRPI/ZGmGToCx9Eo1dz+zfWSOo8wTI2DYxmXBB7E2SV1Kc9VxzSZrzbYts1wOFbX6dHzxySblIdqtGS2vCbJCyxbzb5OanTL5ODkMXtjaa9FVVOUMbU6G9ZhThAYuG5Joezu6eMHiGXF5y9+jqUQL9ggXIuTu/JvmVqELkqMKGP/vpzxvJrdkiQLzMZviaiOhofEcczJgbymwNHp9Z5xc3vN/Q9O1N97xk+/+Cv+9E//FZ5Cve1NDmkKD1NXOolOxXY9Bc3i4WMJs/78s58TxymW3/BadZyvrs7xzVM8pTEb57ecXX+OZVmsVTw8mAy5vDmjNwh480b6pcePnwL/mr9p/VIkV43WYCvIWh3nzFYhju9weStxp9PpDZPDMTZm2zJ2vJzbm9t20FFUDpqRsYym7XB+kRYMBmPW8ZogkEa4Wa2I0+1brHG6ASPGtDPCRBqv43tst3OEVdLpS3jG2eWUKjNalp0kLtkbPWQ+vyRXTFIHR3toWiO1etSLoqnIi4i9PWlgjagxjDF1rbVDk48ePuHm9gLDLJgppjzb6uL5ZotRPr+4pjv0WS1TLHV/cT1lsGczu7lEU07DCmy0suaNgvLVvMGwbK4Wly30MlouMb0Kc8cYVLvcTlOqOidXCRRJRZE3eL5FpYLcvIg4PBqTxjscd8pwOKAqYrYbCf3aPx4Qbbvk2Tn7+wr/v91wPH6Kodram1WPLL2hO9ZIE2m8Xf8BcbIkDFPqRgmbuodc3LzC7+4OCosizzBNu4U9WKZLkicUZU5RSMf95MFHvD77nKWat+jEY1xrX8JDS2kvi2XEnftdfPceX3z+XH6Xq5GkBaUKNCzLoA5jFss144l8nxdXl4gmoDfsoHRiaXApq5RQ6Q/1ggkCHU2rOd2XB3Z/UFAaOf/4P/v7/ME/l0QftgYn905aEWrIqGoTTc+4un4lbdHs88mnP0GImq4vAy45IJ6x3ch78ZwRRV6R57OWac3QA6JsBWbVzu2MJ33KKiJXA/S65uH5Brow0HeCvZqNYwY4jsO8misT1uj43fbQs02HOFlSRwXjA5kE3t7c0ut2KNOKZMeQpnksF7e4SnOlLmvqMmc8GLaHZRQmbLcRDx9InY1tOifPE1aLNZZiWzBMjf7ghDhMyRWcV2i5ZEhUzXfPt/FcmyzRMdWwdaygqkWxJVezmYeTU1bhtCWOMB2H/X2fIr/GUVjtk6M90mzL0dFhO+PluC69bsPVlbSpySSgLHOaxsJXWG1/0MMUFdcXK3o9aS+dfY31NmY0kIWF3l2fLE5YTBccHirh7abG0gRux2lhh02t01RpK5LuWTplNcUyHJqyp2yjR900xFGE76pZ1KrBNDQVOIFjK/hrHJKk8pkLTMo6pmtP2K5kIrNZ14i6ager/cDFMmuipsBRhYy6ySnqBM/yiZQW3d/5tX9M173Lq7MvpY2NTnj51WfEUcZvff/3AciLtfR1BuwIIJN4xt3jR+0cUVE0oCUIhszn0nfdzl9w5/jbaIZFrWBrTVNTlT7LW1msyqsNwjJpBAjFsFU2NWmVMN+c4yth8V5vRJqEaEqbqqIh3W4oqgTNlWeD0E0pkqqSK0szicKEJF1gGTKRcRyHui6Iom2rlRiHIVG2pFRzBElWYlChaXqrEZjGIUm6YbJ3p2WcrPIC3fAQYpeIr6mJyLNhq29UUUEtRVYbNfs2X53R6ZQYinwkKxOEaNCE3Wogvnr9nKbzir33fouaHTNmhmk1LRRrGy747ve/yeufeJxF6p4RrKdTtCAgieWM7PBwRZaJttAXbxIaGkVeoIhbohVf/PyaqppQCfmuRFmgCYtEsRpqouH9j/r8+Z9Cpeabi6KgqXIKbUWq/JRlJ3zyk5/RlKrYud3Q70ohrFwltEm6ZW8y4MsLHVP5LscoqXUNTbEVisjErGbkwqO0pZ/SUh3TqGiqiqrazZQlfPD+HpdvZDFp2Ps7fPjNr/hv/7v/jf/iv/w2AHt3hviWQ+N2mOzLvVxnDftHA+Y30qgrI2Yy2qfr7xGoItCwZzGfTdmsQg7U75VlyaDXwdB3YsQVRZO3SShAnVxw/+gepbL7tR1T5Q2WW6G3HxN0vAFxEraMaVWVEgQWhSok9Icdqqri4HDcJgxCSJKIOErZ6R0LISiqimInGKyIMXb/BhB1g2malGWJ1RLtoFgl1cxn4FOWNQKNHTCqLiuapqFq6rb4V9c1m9WStSoslFVBXZpo+tuEiEajqmqaX2BI1HUNU7dxFHzasgw6XY+6rtsEyjBtOpago2WUCvJX15LRclc4SeOUpqkoy5JGj9Q15IhazePXO8i/Tlo47R7VsTFMjV5v0MZmRZkwPjghjLftuEkT1ZSVSZKo0QjW1JWJLkaUtYqx9JpKA0PPQcEHq6rANl0MXTqu0UhDCB0n0BGG8vuilELElYatiNmiPCZcLbiaqQKScNi/9wGOY3G9VpDGpqERDVkRczJW+k2DY+qywTcU6c0sw+zImbrPfvoKgMn+McKvWSmdzUH/kP2jUz779HOevCehdNNFAnFMb5IRKsi2Y3uYdp/bS3m/X744xw0G3B0PKZV9uq6N0Apc/+0zN7wQV7dbqOt6cw6kmEZNFctztSFl1LmD9+SIbSTf3/5en+0iRdTSbzx86HJ5eYnA4GefyLkzzzUYjx28wCNLpW18+1d/g6urK6YzCZUs6y1FYWEaLucXigTI1bhz95if/vQTajUj//rNC/629cuRXNUNKyX46vmm/P/qhv1D6Yw6wT2iMEWYDZutDGCX24Jed0imuhxpklE0tWSyUh2FwO+z3oYk2YLR8fsAWKWPE2itQOrt/BJN0xjv3+fqUh7qhgdOz2e7ibmeyetKixq7J7CUEfa6fa6ntwT+gI7qsJkW2MaIOE7ZxjJBuP/gDq9ebdoK+KtXrwgGOUXi0VeVrB/+7N8xHh0xu11x9748oL96cUFYmJiNqvr1a1bhGQkZmtlXDy5hsUpx/B4VqjOn5yw3KwJfUZVqJnGYUNV5KxBcNBpdb4CJEvArr1lNc4TQMRXrYOAP0A2dIk/beY5R18IyhhhdVYWPZvKQEhrHR/L5hukc012wvJniKvKPvGwYDB1q1cnaGzh0vQmLxZRGdeGyMKbrDlgnXyDU3NBi9QWBN2CpsP/9scbsNiFLIlYqaqrLCs+3GAzG/PTTnyhbKOn1fVLF/1BmGUv9K64uZ3gqOZ/PQjbrDM+LW9rafJpgCodOoAbczT3CaM2wb5OqmY+uM2SxmaKLAZuFcvhGgW14LJTzIe1QVmr+Q8hnvFre4w//Rcz/8D/+EbpioOoNDwnDmoXqGtVljaFZNLlOjnSadRHSCQZs4zlbJWQshEWWlK1AYphsyXITTFiHKqmuHbpeQ1V7hOFOcDGn4/dIEvk8NRHi2mPyAhLFcWG7BXWzYr7JsZSI7+K25PGTA54+kt2JL55/ys10zWBisdyobogoKcqIIhPohrQhVzfRrMO2S+TYJv2+SVnoBDsK3oM+i5uEVAl69gYWcaRx/8ExrqdmIrYhUVliOg2r6VzZ0AF1XXMzlYlqt3fAcNhnNdcYqLrGQtGYa7WN0OUzFtoGSxTQ0u9e0u16lKVLpZXqeV7R63XoDPtslfhvXRbMl5c8uCcJX1y/An3FZmFSuvJdu4ZDEibcf3QHU81zOKaHoX+Fr4Ll4b5AFB0EHkJVfw2tz2o7R7M0DOWXbMtne/MVg95uvqKiKkzy3MNUzIOWsGiaAseY0KigWrdCqH2aSon11gWr2Q2i0XA7OyRAwnoeshW9lvji+GSPKEmp2TED6ni+hecdoOI28gTqUjCNpnz9Y5k4nR4+5bOvLhj3VEexijh78yXf/e4+dx5IcoxtDFnToJcFZaUILdIpzuAAXxELpXlBU1s0dsNyLgs1e+MJTjAmylIKZeuWUVMiWjs/OXpC17xLXTYUSqDUsAVNrXN1+5r3RrLb4rgaeWaBtkv8LRbbkFybYloyQGjQKGsQaq63qjS20QbTy2kqRRTTCJK8QGgNQqEYpstPSPIQTYlCG42G0HOqpqRUwUCppYyPxjSaQ6XtKM4Flt60TJmr7SsM22Lv8H1KNYOhVya6IdB0k3NVwNKcFC84gh3ZUbbCcwdkdYlyO1yuXzHUXWzTJVFJhOM1NJVLXSvGzeqWs5cZN9Epujo/irqkooFUoMlXw+OHeyyXA+Y3isBDN9CskDrRsVQANNnr4PYcbPcGK1VFOwRZmQA7MqcldTEhjkoasVX24tPUFXWo05TSCe0fj7l6I1ir4ojjGyRJhtMNyJWY89H+hMGow3jrs10pdlkDaDQsNYS+Xs9577cfEych2qVKGhyLPK0otYY6VgGeptHd11ik8sxczWd879d+m7Ov/jn/17+Qdvaf/zePcU0LazXn4gsZhHUGHY72Bky/ugBgdPiYsiy4mZ0Tp/I6a+2QN1dnHO8/wlNEBuv1ku5B722QHfYQeULN27ml8V6HV599id+X+/je3hPK3iuuFitcxZhWJTGPDo5xgjGWGtf62c/OsR2HjkKXnL+ZMjr5mLObTynU/dkdgdMYON0AUxWGNCNB103yQhWdm5K0kILdcbYrbARUzQZd71DViizI6OK7DkIlP7VuYpkaTVEStR32mqrUEIWBritkha9hmm/nvnXhgZZRlQ1JvPt7gqoq0MRb4eskkde5o3nP8phiEUMtWmp7y3Ko6pqKFF0x3pZljS60dlZLdtyERDSpGSih+biuS5S+7XBXVcV8vW27y5aeYJomq23YkqSZpontuvS7HQ7UuWbbAkTRIq1oStI0pi5q1ptd0mpQZBpFURFu5bUXhUmal2SpTPRn64IsS0hyvWX5BdB1+Td3wtBCCBy3Q60SMNs2WYQpIm7kDDpgmQJdQOMG/OgzaZ+uZ+MHDj8+e9n+30oMwjDk5ER2ifbuBXz++TmOIt2ImpznL17h+D5ffP4pAIYm8DuQxybLhdy3ewQIbcbt4lI9O0g2l2w6ezi29Kd1GjDu22yiFTTSPnVjQG8E2Vr6iAd3H5EkW9bbgr/84Z8BcHz3HqYdcLt8yVrJ5FSvdR7cu8/ZG4nIevVSEKUZlpuz2MhC0fk05eMP38MSFleKYC1ZW5i1QabITyaTO9xu3/D6/Dn9oUxCsyRnei4YuGMOxtLvvnl5yd+2fimSK03TKRVjku8N0TTwvX16CopV1VDXS+q8ZtiXLc7lekMW2zg7bZFwhWm5DId7rNSwvCMi4sICw+NKsXDFW4MA2opG2QjiaAaGSXckH+Rys6XbN/E7AUWuKiv1kkHXYq0MZ7lZkhaXHOzfp8zlhvJcm6vrS1zXZbinBsXTBMvu8snPpBH6nQ5Z5tJUJuFG0UGLPmG8JM1i3py9AsCyLdI0olBU3nZ5Qhp59Pou4VI+l+6gh6ZXsougEqUqrhC1gaHgE7PZLV2/x8HhCWkuE9NkW7OeLZnsyQy/qCxG/QkVCaYa4JsubtGNhmF3j0wdcq6YUJYwV4lwf2RQA13nkLXaCGUZYZg1h91Tso3c+EW+pYhXOKpFfhuneK5Nr++hKTjoejOjrvtUjYWuAqBt3GDZ61bbpXuobgAAIABJREFU6OrsnLr06PR7fPlCdpsCp4vrHjO9ifHtA2Ubl4Rh0DrNMM/RtJqmqqmVZtfeeMh6FRH4AkNVsx3HI0tpdZqm0xvG4x5FVaIjg6TFYsazR4/Jirili3G9gKALniVb+VmWM9kboWlwfiGD/+1mge8OSfMOrq+6RMJnG76iq/THFvMIQQyNBwpGVtYJeSzoBj02a2nXulkQxQvSVFGJ2gboEWVuYejyAN+Gc9APSLO01RKrm5DFdIWhYBDDyYA6s9ibBGxsq/17Quh0nC6O4kbuWBmb5ZQk3UEHK+7dHbHdRiQbuT9Oju5SVwVxHRKoLsp8do5Gh/0D6TTDaM5mYzEe3qM0ZaDoOgHjB0dsQxmg9LuHzKchw46PpzoKlAVVvqBsSh4/lHogonSo64ZcMRN+7aMPeP78OZtsQVdIyIGlfIPjBQQKShP4ehtcAozHJzSkpElIXSsinIN7WJrFfHqFr97VyfE+jx51iUIVEDUOnjNmeBeqUh0KDZS5R1hp5Ip85KOPe/zVj54y6EpbtHWHZbih3/OJVvLal9EUx9e4vrjEsZRelJvTDXroQulCGQ6ObUAjGI9lhy0MN+TllqoKMXWlucYhN/MX+L46vPSIYe9EQneaW2XDOaPJHvt7eywW0iekeYJlmRiq67fahGTZim5vTFmqDnS6wnYtju98i2gl98Pl9ZzewKZQ7ItNrVMUGSd3jlo4alVVaHqBob3V31ptLnj88GMKVTqv6pwGQdM0XN28knbXd7HMDrpZU+XS1jVd6rOFihzALSWsRlSCFlUlGsJoQ6PPCTrflCZUNRRlhtVSWQtupxfMF1Ms1YESlJIJU3WbSlGz2lyQ5RmeQj6kaUojCgwtaBnEwviW4egAx5LnhxCCoswkm6YKAqs6RTQWnaDPVtmQY9vUddVqf1V1TqfT4eT4AUWpAkXbpKprDMNis5bvr+N79LoH6KpA0BgOaZ5j6IJMdd1vrl7QD74GtUteSd9clBq27pCpruNmqfGTvzqnP7hHuaPkrnNsx2SzDblzR+6jKl8QbQS5gqy7Xoc0caROkSb9VBLF5KlNmZuYqlOdJwsc2ydVhE/9gYvn9phNX+EpqvmyrDB0kyROmEyk3/jGN36F//7f/Z8UxU4vL6Uya4Tlkiiyqu/8xgHb8IrF9Ih+TxYuSy2lLMu2Q1PWIZfXU9YrsMyj9p2auk1R51QqBmi0DZeLlPlSXpNu2cw3Md/7e7/LH/yTfybv+Z/a/Pr3fIa6wcmxCjLjksW05tkzaWNZmaJXGmWR0VXEG/HtOUNT4GQLEkVq05BTVLQdPc8b4Q/7bFSADWB4Bv6xRakryQrHpCw7HO0f4DvSlwz7KZEoiG9fM7Dk/n98YtPYMapeiOtusMUld0cnxLEM2Dt7XW42BVoZUStm3Nroslkv0VXXMU3Asl2aRifoqI6iYZLlFVVJ22WIs2uitUOj7q3QcnzLRdcaTNWVzsqCru9haqIlgaiKkiSk1Y+DOVlc0GCgq2qOYetUdYZoahrlgxyzQ5xs3nbAqDCMGkGDpopOBTGGY6HXVjsOQi0JLcpS3l+eSyKQ7TZCV6QzdV2TpplkiFMMt5pm/DVGQxq91STbdQtNS+dnn77Atm0aNXph6w2+16fXl/7cNCS6xfUMbFMmLUKrMOwCYVRYSufRAuy6ohbyvaRZRpqmjIRokSO6rpMXKVEY/TVWyCyxMB15TVFUoQmp6boDxtSYlFWDrttoO0KwJOHNbM52K79nqAWsb9bkecOLC5lw/dkPA6oabFP6DcsUNFXK3rjDViUtjq1TXWj0ex6GJZ/x5fwMTdOJI/leOl2PzeqG9KXNo8cKXZKvcbMDup0JoeqUV3XEJz+ecveOJJy4na4xLZ3ZbMFAEcXVxQrHcej7Lr5CuOQJXJ19jlAMxqPefQ72K65nLxl1JPLAsxbMzjf0O3scjlXxb7NECL9FFFzdvkZg0PPutYRkebRBYNPvjNCQfmrsG8Af8zetd4QW79a79W69W+/Wu/VuvVvv1rv1br1b/x7WL0Xnqmlqeoq2t2lqqlpjvpixCZWOkGUgDJ2KhCjeEVro1E1BpirXgXfEyfE9VqsVui6ro7ou6PcDdMOjUZW5omhoRM50Ltui/bGPbuwRpjmbWHYZ6sokXVT0uz5lLSuWti/IMikEB9AfDpjNMlaruJ3HEcJmvY2oapeKHUV8SBhn9EeycmYYOppess1XrZJ0p9+jqiomwWk7a+MHoAmDbk/+3mZd4HkQeAOyUFYwkihnvGcThV4rilo2JU39VvROo8SxNVynw+WFHODTa5tRv0sWyfJWnKwYDO5QaQapqmoOBiN0zYDaJOjJSkSWCDqBDansAjb1HfJixe3mFZqm5h2yHEqHg9EJNzeyGxGlEY5jUqjOgN8RZGnM5jYlUVoG4+EdNNOiN/C4mctq+mAYENh7+KZ8L1l8h0WxRGg1rpoXu3v8hEqf09QRQlWNnj5+QpyGLR3uwWGPeLNlb3zIeF8+p+V6wYOHI7qdPkkkf88LNObTJeOJrBoFrkNRxrjOA2xLVuFHw0O6g5Lt1sI4VMP5rxd4+gEHD2UHo6in5GVGngruHknSkqKaEy5WVGVKobSvLC8kEA6ZIkTodaSex8152NJYd/s9HGPMbDYnVZ24zSpjvrnmcP+Jso0Iz++jaW6rk+QZLjpbTFFh6tIWAmeE2fHoDxUUKynQPAvLq8mLnTivhuPqGLpNrvSwhoOG4eAei4387nCjUVYVmzSh15GV62FXo0q7nH15jrYnK4GPTt+nLGp0U+6F6VXB4HAfUTfs70m7Fk3K5dmMoCMrr67tcDDRyNIVkRLszaKMcfcOtbVlfivhtnv7E3qDLt2BrD55HriWzZ2jEa7S7Nk/6QI/ZdDrYRiqMjd06Lk1gVKbryuN+WLKwfgYQ+yo33WKPOHBScDJ3XsAfPX8Jb7nYal97bumpLbOYLuS1/Ts0VOKckN4/pI7R/K7nGbInaMtHU8+k5vZChOLcLOkqWz1XQVJGtHrBsSRouC1KmyrS6N8RNA9QVgpL19/gq2q9w0W62VKt2+zDeWe8dwBvZGLXistPGdDki3QNI1tKN9nWeu4fo/pYorrqkpgpbPdbnE9Tf09l645QKuHxLHsEhmWyeH4G/zB//wv+Yf/sYQv+x2P2eYa35A+MClCFuE5D+7toyF9Z9HEVHWGZ3fZRNI3v7l8ydc//u12lqGsC3TNIk1ybqYSyz4aN3jOiCxLEGKnsSQIwy1hrjSlmvvSVosEzZDfZVsdprMrPF+n40ukQ57VGIbedpss02U6P2c8HDHcdT6qTAoDqyqy47hsonNMz2kJJtAadE1X9NfynZ5dvkDTNMwH8jNVnVI1OoYwidWQ2cXVcw4ORzS1iXLV1PVOW0iRAa1nuEFBXZktrFjoUNdSLiRTULMwDDmeDKjVWVRUcgh+NNnjpz/5K/lejIa7x89I0hLlminTElez2IZylrm2XuP3HmG5NpWCVFU0iKKmrnOev5Joi8OTZ5hm81aXqQqpm4S80ECXPvZbv36HN6+mNASYSt/MqjQ0zSJSunMfvTfCcwY01TVCEb6IWoASZD49kT5hurhmtdZbHaoir9E0CfnfzWr0xjmrFWgEoLq7VAaW2SBUZ1o3UzBiyryPYe6gmCmmZiFsjVDBsS1nw2qbUgpp06JpSGsYBkP+7u/9BgD/xx/9iOPj7zD+Ws3NXL6bRfwVnvmAnmqG3kzfMOjvYdsGva7sTpTFkjIpaHSNvvJxF6stcRxz51Te73pd0BQ5hv0WjraYJ5wenzJbyWq+b63wJgOKWpCq+3UH9+mnPolhoSvkgWGZROtrGkVwdXpwl2W0xjUqugqRcfPyBd3AYD5dME/kLEoQ+GRNB0MJx5Z5TpV5cr7KUVIehcnN7TnjvWMMRXueNx621VCouT6BzipMEIVNbUk70xuHyzfXNE0i4X9IhIvAxFPoCNOwcT0bTWswd7PMTYmpm7i2h2ntKNVr+vUYS3VHNVMnz9NWgwsgyVI1Q1a2M6Qg2GxWVKqNo2kaeZHTHQRstvJnSZLiej2KomqFtpuKdmYZoCIhyzJ0obWzhWkpO0fr+QpbdY6azKPhjKJR4sdNg2m46JrdfkYzBGZT4rgWmrbTebQo66KFS+ZlQ1U26KJBV12qTPn/wPFauCKNhm07qLFIwjBkOOxgCINEdY6rssJ1fMI4ap9VXdd4jvsLQtE1vh/Q6xktL4Eopd6fpc4KwzCwbYeiKOh70s67gcdmtaVoEiz1rrq+T5IklBvpq4tKcOf0IS9evOT6Wl53Eka8+vISx/bbuGQ6fUU32OcNEnZnmjqe62Jg4SpClDzNMbwO9w8ftRqdYbhhNo148ljCvIOBxR//P3+Oqx0xuiPtbtB5xnaZEIU5x4fybBDHYz778iW9vrTFohaslxn9jk+h9PGSsGIyHLI37PFXP/4ZABNFe/83rV+K5ErXBZVq5VWlHGA0zAon2F1eg6Y1pJWgKBWbngU1KY4tA1rP3uPlqzMaEYGa+VjOEkwzo2k00kT+bG+/T5ZlOIqff7kIcTyXujKJdkO9TkJVwmxRoqnDwxDw5s0c21VK5PaKqtbQqZktpYPSRYBp16TZGqHa+etNTJwmDPpKPyqCvJxD3QWFW+52XOpKCu/5jgxIfMcibSrqSkJN8uoTAmOfLBSYSnOlqBJm1w6GOUJTsz1lUbA32cfUB+q6PbbhNctFiFADtGUBZZWSJjthTA+0iCrtsI3lAT4eP2R+u8DvrtnOFf7fikiWDbqC0aX5Et/rEVUxZSPhi7UwCaOKSvsJiYLEPHjva9zOX5EoeF/QC4jCFXVT0lHzcbbu4hguliaoFaNYY1RYwJ4v76W2Ag4mpyyiMw4Um16v75IWOhouh8/U3FnmotmXmIY0/iyPODk54fT0A+arl8qiBJbRg8amG8jn8Obm3+I6e1DKOY3TOwOieMl2G+GqYePJpM+rl1dMJh1eK8G+Rw8PqHILhPzuIjVxbR87ALsrW8izdU24XPDek6ecXSsoRKkRL4YEA/leXK9P1awYjnw0tTVF2ePhBz0MvaETKJhVs892O2E1V2LOWsl4MMJzHZpGsRUVXT76cI+zszPyWH7X/qTP9HbFwVC223/8yR+xt9+jKjpUyomcHJ0itJKXL15xdCQd56Mnx5y/mRLOlTq6ZfDk0UMOR0MixWDWsT0WUczp3TE9xUB3/2SPNMlZqeTj5OCIbtembiKKdAfNE9x7sMfNjQwiTLvEMByqqIdpyP3x7Oget1dLDMemfyqTBs02KfKYwwPFTCZqTg72OTzeY3YjgybblAfGw+PjlhBhu14w7oxaLH4wtCiziFFvhKsYkwzDwNQNDE0jK2RwfHpyShot6AzlPv7ow6e8fPUl/f4R04VMOvXC5MOPPuabX/+VFsKxXYc4J10O9uWzfBwHvHjzipvZF+Q7YgMqnj76gLocc3EldfSKakWaRKyUjlczsfjwwVOub3ylSwZZuWY43idJt5hK8yxKQvyuTpjcqOdioOkWabmmahSrWc8gTKWPa5QvyfKQTr/XzjtUIsYWfQpmLLYyIfoPf++/4p/+T3/I7eySh08lZGy93WJbbkvAEC3WLFaveN9+hqE0usL0krIQZHrCm7Pn6vn9Cr3+fqsf0whBLXKyTU6tbdS9ONhOQEFMo9i0bKvD5eWnGOoAD/y7WLZPoyeg5lMt02e1nuI5I2w1n2rbHmm2baE8WVawDW/x3G4LbcuygqZp0FQiHkUh62jG0+OvtdpQcbLGsX1sWyNO5LVfXFzy9PHHdDqyaLLYXmDoLpow2Yby2W2iJafGQwJ/wFxpb9mmhWmaLFfye6Y35zzo3aPbmRCpRMrRPYoiwTQ9LhXzaK0J9sYPyKtUPTuoaeRckgpWD8cHWIZOKSISFUhpOhRVSKq++zd/5xuEt11ub7cIBX3S9RpDc2iqmO//rtSUsdyczTrEdZQf1iJM0yZPNDxVpEiK16yWDdABTRXRfJ84rhCNvKaDI48Xz8+xnQ7GjrG00TFMKULb7Wfq/Q2YzjLM/k7/K6LSKoSmtXPC09k5s1vJXLcLgExriGZULG7l/+/cHbC3VxHORlQK5oUoKdIcLINcJZTP3uuRomEIRQqDBgaEccKTx3KW+C9/9EP+zf/+Cc+efI9Ek7O9WeUy2bOIt7L44FgZ05svsUyPOFQzNFmIbRkEvkcxPVP31yA8rdXjDNdrju4cI9R+ARh2DfkdtdL1tOHNxRTHq5iuVCHFGzPueNw9OuBmKd9pWpu4/QPWqlDc0DDq99mGc372hfQbo66H36u4uMqwVFCdzD9nNPmIhUq802TOZPiMMMswKrk/msrk5I5PGJ8Rx9J3ffTxN4jiqtVcW29CDKOkLhM8R56jebElDGOCXkkaSVuwrJogCNiu5DMo8oZNmmAadgvXFKLBNgVRukbPVRJmBXiuyXwrzznJyqpRr6O2SKFrFmVTo4saU0XemqaxN+m0xZyiKCQraaNhdKXd1d2gJdPYMQ8LIdD1t6LFdeNS11KwuBW+t20Mw8AP3DYpc5oE09QxDQULtGWBIE3LFtJo6l3KSlPESPK74iTEMJyW0bMfuIBAM96CzIoiQwjJaLj7nGg0iiImXiuSFF0jWW8py5J6Ny9mWWxLCX02VaGkKCsaUdKzd9qUNUKA7zu4aua6H3SomrolREvzEsOwqWrasz1NUzo9A10bobUi6TaT8SH3TiWUP44jTEdjf/9roAqLush59eYz6rJhNJIxnePtyWRZjffE6xVWXVJnFYaC9ydRztn0DX7gEKmcYDzaw7bg88/kGRpuSr71radc35yh5bKw0aQN42HAuKuhK2aYrKh49vAZ3Z6MLXTT4Hb5miJN8Hw1f1tqVFVDlq948kx+Lgje7tf/v/VLkVzVDXQCeQjOZjPyPMcwDOrdJtNNNM3BsORMCIDtuWzWFa4KGK6nN2h6jW4KLs7lgeYHJrZV09QOhiYd9fXlFtsBT4mKJquEF1/9gEF/QseTVXBTWMTJkiSd4ijygaYZUGsZeaGy28wBUrI8w1eJGtqWpgHHzylLJY5XORTZlpWq3pnaAC8YEIYhTlca9Hw+5f7pQ169PGPcl0FLv9vlp2d/wtFdaeCdYEwSNTx5cI/PPpeZs25YdIeCxeqckS8xpZtNyXy+oCykc/c7BnUjsGyXSm0os+MSFzV+IBM337NwO3B9BkeH0iFu1zlZGuF1TGxDfg6xoW5KmlJe92o7p6lSHGvAtYzl8NyKTs/EtEfEhUy43tx+ShTq1I3qFhY2jr2PZWQM1XDgi+dv6PfWDIcDMjVvUJcC083pqjm71TTF78Bs23D3VAarmAlNtY/uz9gu1UxAJ+Wg/2t8tZaB3De/8W0W0w0v3vwAS5Pv2Pcdrq7PCexTKmRgL6oxdTMiVt20NIzoen30xubJfXnIxnHOV/lz4tUQUyWZw4GBXt7l8xevAPjGx9/j8+c/JQjG7A3ldd59cJcX/o8wqmOqw10p2WJow5GiKj09/ZB/9a//kAcPD0kSGbBHm5rp9ZrA6WKqztzRvQNWi4L/94/lcOfv/v5H3FwvMQyL7z2StPnzWcjeYMRoWLTscpqo6AUGpi7v795pj7oyqXLBB4/ke7++WvPo0T2e3T1B4223wK6XHE6k8+sPAob9CYEdtNTkg+GY1/Zz7twZIxQboa0nVDTcP5LJXJhdcHA4wrF7/OAvfgjAex89ZbPZMFHFB99yqJsCz3EwFINSL+iQ2iuOj+/QGHI/bsOcElirmSELh0l/SJXlBK4i+tgqgWEBKxXkNlWIbey1JCZxk8vEyrEQilZe13WSLEHTipad9KP3vk6eXjPoy2eZhRYfvf8tVpspjqUw9HXOzdWGk3tOyyTpuh2mlzqL+a4yKd9Lr/OQQV++93W8ptE8NEZ0RrJ6fn7+GXEY8f7JB9IWk4pPvvgLXNdnNJG+62b6hqDr0RMWN9fy/rIqxtdNRmN5mFzf3lCXBZY4ptBmys412XmhaYWpO12b6+UlnUC+4zwRxNktFSu+9Y1/BIBjHfPTz/6ER0+fYakZj228pmpy3I4Sd76ZYVoQdI/amceqBBodgckmlHvt+KRLVWrtPI5lmdiWzez6hqKSyfGD+98mr0qaRqeslYiw1Wcb3baJTMefSMa5RqDiE+pasFpPGe0NsBSpTpYWkg1MsYxm2RZdr7hz8qhlhERYaFqJruay5uGaNE1xrVErXm9ZGnUtbeRWUYcLdA4OjggjGWg0TUWe52jCYrmWgff/x96b9Fqyped5T/Tdjt2fvsmTfeZtqyXrFllVZJEWUJJpCwIESbA9MAwYMOC/4b/gAQF75IFhQDA0sGwIpmSLLJpVrOZW3S5vdqfL0+8+Yke3ovFgrbPTA1EjDcpArlHmxj6xI9Za8a2ve9+33Wkx7O9R16xA9WVZomkGF5cySbOxOeBg/33qRl/hPNI0xbZdomiJyG/pxENMzafRUvW86rkrnfM3RwD88Q9+wrIIpbSHcnZKAZoWk6qg8OZ8DbMKaaqGUpFJmD7UdUNZVHiuvPDf/PxXmDxZYRJ1rcHQfaqyQJGTMpvNWMzbuJ5FrhKlcVTgOgFaLc8LxxdcvMlxnDameUuOoVFVGbal0w7lPfztL75CNAa1Yl7zdJNGgyTJ2eorMJEGs4lLq1VQK/IYIXI828RUgf5o+pr1vQF13Sa9ZTeyBbqt0WgaqUokQoLFgKSUa+zYNYZlkGoOsUquWOik6Yx//r98wX/930pSm4v6BXWqEarkX5E7bPQdLi7m5Ip5OAxDZrNLTKNmNpX7uqwEu1vbGK5cl7XNPuPxmK5iGAVIsyl+GGKZyi9aTGgPB3h2gqOId3TNwQwFz199xe6e7JBYVguMyuZgX55XmZ7iuyF26nJwT15rmaVQjzEf31tVhAcPfsjRqzd80lUJSTFnMVtQ5BobW9LmxUlBHMNwfchwV+KLk6RktowYL+S+MwOfXncf03wrGFzqHTRNY7ge8OZMOgqtVkhZOJSlDAL7AWxWW8TLCYWac9O0GF3NaHntVSBTpGNOR/NVsGNYJlUlg3NLrXsq6lVlZsUgaJq4rrPCO4Jks6vrGksl2tANmqrCsCzsW0Fn3aCsaooVLjLHtlxs01p5z4ZhrFgOK2XPws1dTFOn3ZVfsk0HrTGwbJ1SJRYLEVFVkr2zUiyYgXeAEGJVQRRCYJo68+htVdO2TRk0lcWqcpXnkmrfNNzV3xVFwWKxWGFf51GKXhoyTaUSHoapsGjKeNqWiahKFlHOzUju4ZNqhGGCr/ar7zp02iGG1rA/lHuj09pA83SWy3zFAFtVgqouVpX6dmdInjfkTYrRSL+vaSre//iHNLW+kqjorT+iqU1qxcNgOxpFIfHFjUosesOaNF8iyhRjLvfLyeEZne6ANL3FpuX87V9/Sjvo0u3IYPx6qWOZcHV1tdpD/UHAcG2D46NfArJStrV+n/OzoxUen8bBdnR0qyBQBYH5+K20wb9rvMNcvRvvxrvxbrwb78a78W68G+/Gu/Fu/AcYvxuVq6phdDNb/duxbJqmIlNl81JkWHbF+uYONyPZUlGWEstxdPI5AJYdsjbYpcgrbF+xBTU5otxG02FjV0bvJycj3HBIrfrl++01Avf3qOoMU2UwPKdPmWls7d7hUtGzu16A7gkM1Vo3nU7xw5r5bEGeysrKwd194vk1i/GSgapYzNIle1v7+L6M5q/Oc9qBQTfY5lRhoHa2d8nSBtt0WMQyszOPTxlu2KSKAtzUu7S7gmcvfrHKFkaLDCtfkqeCi+xI3rvbxnVdponM0FS1RzsccnEWrZgVG6fAtjrktWKNGs/R5hG16OGpKkdV5ezt7ZEuNaKlvFbTuOhmhqmyOmFrSFrM8EyHjsok26aDEBalBqYmPzu9OKbVNdFVtiJapHi6R5pPGY+VrpdjkeURi7mNofApd3Z3OHl5xXUj2+hSITh8fY7vdQlUG4JulHiuzmTiYivx6DQqOY/iVfUgz+e8eHmE0xKYpbwHTwha/iaOMWCRylYITdMIdBeXW3a0GrOl89lXMWkq16rdN9k8WOP69JquL7N8yaLh6uLTVdVxchPx8O5jzq+uubmRf9fT7rDW2YNS4PdlxcLWB2jVjEKT6/D1s7/i+9/9hLxIcNcVBWgRMbq+xmtnq0qZiAtafsVP/v6Hcl1wKLMSwzAQxS1O8Zq13iaGtUWpKo1nZyfs7W+ilbLy0e9+j1IkxKMRkRIMvru2wZPdbbJizs21rIZYhs9PfvQDvnr5KQCuazNodwhaGxiqhaoQGlbdZzDcXlWcXU/nq6++ZmNNbtjHa09wXJubq4jdLVnNsooar6mwfJnmnEym3Lt3j6oQb7OVyzOGHYfA1SlUz/zkZoTtuPiqDePqfIyvOyRNgqnf9ssrKuBax1LMamu7WzTCRU0vR69vABPqks0duS7jUU4hcvxORraU2eTDw0NafosvP5dzsLa2gWt7bGyFHClNOceyOTw94eQ0pTuU1/KcLkl2jOlJG1HPWiyTM1qhxetDOeeGa6JbCVn+Br/lqLnbRjQvcH2FRaUha8bsbu/w5lyViTUHtJLLm5golxUS27dJU5fRjayqJElKKqYM+gZpLjP1s4VB0PLodF2mU2l3RdXGdGpMW2b9TEfnZjThux/9UwahrJ69OvkFs3nEJ9vvr3AuZZ3jaDaFwjaMpofs3tnCsnssFE1/UeQ4ts9stkBU8r6i2MCyOhRC2pY4m+G1dhmNz9BudQpLF12HLKtWbYClaFim1ziqHc20NAqR0DQWhqqGFEVKks7ZtLawVetH3ZRYlrNiBjw5PWKZLjDNLrrCFpRpQVmL1Z6az6ccHNzHsdsrdtmqrnBsDU0zGY3ks2xstfF9n6WqhjqeRa01VJVYnVd6o+M5XbJMWkKGAAAgAElEQVQseasfpIEoSspavrNu4KDVLTSNt5pPhoWuS+bSW6as7Z0DTNMkv8WPGA6u7RIvYo6PZUuMb5fcffB7lELQKLpm04LlvGJtU9rO99/b5ed/PcHUbTRNtfLoGmlS4bo1hiH3Rhi4RNNs1fJUVTmaWWFgYLryO7olqCuduinQuNVmCxRuWN73cBDyi6sZrm+DqrrptkmSaQzWWnz4gTwz/4//7UswDSxLuScVJFlFnWR88h15PoVtg7KxWGYxLVNi5kyvkgLZkdxT/TDi5NAiz7dX+wfTZBnFNLVJty+v3+kFfPZpQhDKdRG1QBMNpgmzS1kB+U/+449ZRiX/0//8Kf/rv5D29IOnNVezl4TKnle5YNNpsXPvfc4uZYv4TZQRBB2uR0sevy/t9YsXz/jq8DWdjqwSB56Ja7WZKbFngLB7l0W6IFX0+74n8W2TeMH6UJ4zQq8Rk4iWP+DiRu0zG0Knx+mx1J2zAgd3v0t+LVhTOFCz7ZMnBuFaSarYO3/z189Z665zIbciea4hRJemWfLiRJ6/hy9P2N99TLezyfJKfvH5V8/ZOjggENJO3rm3RiquSHJIld8wGK7T7+5wfZ2y05dz/PrVF2xvfsyHD+QcRMsL4oWOFYYMNqW9MSybxd6CNIoYqE4js67RrXqFg/I8jyRJsC0DoSjjdWo0U8PxPUpVEbIsC9u2uS2nOY5FnueS5fKW5j3JKYqSeZQilGh5nBREcYqjYB51bVJVFTXa6j3O0yWGYRDNprSUvX61PITGwLZUu60Gga/TabdWDLdlXpJUgjTNqBXOM8sKdF1fYcMsy8KyDPrd7qqd0HVdXNfGNE18JWXhuwG5ElMGWR23bZNOZ02KZgOaLitUSZJQqy6YIsuoqoZK4RSrqkEIyTTYUfIajSkYDHpUih15MBjgux62YWKqKn9eljTTBstyMFQngNYYmIZLqqADXksjbHu4ObQUdCCOY5omo2hKbIXNznOB53XIPLme06jAtjx0e4CpWv3RdKzGwXV7hKb0ke/nE+Iooa5v8dQJaTKhKhrORnKft7oN11cR04ng4I7s9go7AefnEbFqidetlJurc6q8ZmtDitw3WsxstMB1N3G8W7uk9MP+jvE7EVxBQ6h0i0SZs5hNCVoemipVtltSc+rZl3+7IhYwLR3LMdndlOXptBCk6Zgkm2IqEUHDyrHshJubG3xPtj213D7jmys6PWkgF9OMdsfFc4ayKR1Iite0u20s3WdjQ778eV5RFj6uLzdhK4Buu0/LW5IpLNEy0iibgnZ3Y6XCrRsNWTFHU4BF29Vp9Jg88egoPE6WpxTFnFbHIYpve1hz4qXB2tqt7sQFs0lA4OxRKZHdaDmnP3hAuz3DthRWoqwRaUM7lMau3ws5OT3l8ZP7TOcyiIiiKbNlztaWbJGbJ1d4joHpZcwW8r6zNJKg6sairbR2Op2QL776Ff2ebOG6mR7SCrqcXx2vNLsSsaTIGta7O1wrMdD1wRauZ3B0quiTByGxkKKamiYPRs9rM0uOcDSNupTXujq7IGj5HL6WrURoc2wGLMYjeoFa92yO6bg4VhcVh+LYFWkxYdhXwtGLBZ1eh+Fgi6NT2S9/ejymE3TpdibkimL8+z/Y5xd/OSPw5PN+9PQTDl9d8/S9BtdXxqhK6LZ22frwAV99JvUUZlnGYDCgrdpDDEvg+W200sH1pbNaZDfc3dgEseTluVy/g8cuRRQS1/L3ttZCBv0N0mzO6Ea+uEl8ytamy/bGLtcXcg+tr1vMo4j0FrfUBDza7zBcs3lzIuc8z1wCy8VwMnCUoOX736TTa3NyKIlG9nY2iOOEwcf3iBbSAAZ+B103ODy85vvf/wiAujKpmprvfSxbDgfDFotoStM05IoiO6Phyb0DDEtfOQmLecr2bpvvfle2ul3dnHB5PqLRau7ek3s/n42wQxs/kP9/9MDHbxmSTlm1lQgRYFs5TV0ihMJzbW9yenbNdz+R2lt/cfWv2N7oMhcQK1kAw7wVcL1h0G+pdUjJkhF+W+3pbout9R3my2NcFVhsDDucnhdUJUwj+c44QYerG5ONbTmX8/SQ2ljn17/9mtuRLXVwU1xzyNlY4jDDjgW5x3QiW1Q3NzpUHgizRsW8TKOUTt9hXsxIIrn3j09O6HR9KlPOZWsQ4vaGRMmEoC2d1cHGFpdXR6zv2fSXslVpGZcYVoKhSAXCgcF85jOZT+h0b/vHS9JkgeuXKy2xpskJAmvVkiPKhk++84/xjacI1TI2m80oMrhzsLlqB220BIM18lzZwPQYh1LioBSluWnVOI7D5fVLKqSj1ut+iBBi5aAELY8iL7kZH2Go9p6g1aOocmxHX1ExL9IpSX6N7SkdPxw0U1LA3+rOTMYTCrHAsd7DUQDoNF1Kp0j52E0jKMsCg3DlkNR6ia7V6CogK6uCPKsUCZEiYGgMCpGgax7j2REAw/UA1+6sBIqzbIHrhYg0od9VlNxZh1bQJ8tjanWuubZFVWlcXssEjNsW7OzeJynmoDTQNE3q09VVwb2HMiPQlJ4E+yv8gShrRFGxjBb85O9/T36HLrqmg9DJEmlLul2L63TJcFMGNql4Rl0PccwCFX9R5AZZWnD/IKAQSselcqmEja3Q8g1QVymT6Yz3Hylig1yjrHwCzyVRzpzru0Tzgv5AtVRVJVURYAZgKAxbViVYpo9jTRCKRj6a6RiWsSK9aIoGwzQpxGLV+vX5by9I0qf0PHPldCbpAq1eo1npN16QLtu0PWeFoamKGtNyWUwSvEBR6WcRdblGXqpEpqWhlzWBE3CpEm/9exZBOKM7cPnlL6X9/OCjb5CaU/oq0D85OUEUS6pNSJVwYFULstTC0N0VrqbTDRnPCj77Uq772tBn2N0nX7511izX4fLNBb1ArrntBJTUVMLEVnIby3SEqEyiZEquMJZua4txc0FHSWHWecbF8SviaoGWyPPw3tYuJ4fPsFsBeSMTBO8/ukd7fZOzC3mmxfGU9x7/MWW15F//haSb/tYf/CkX58+5ji+5VOeTE5gkWYpQrfSff3ZMr7fDxu46jWrPKuIbrhYZw/5TEiHv87sfPCaKpiQzhRVfxrQGLURV4XpyHepGUFdLOn2fRCzU3ksZeA9AU6ReGAhTx3BcHEXGY9k6Z+enmNkSx1E45aTAshp87xbSEKNTI0RNoQTXHc8nyRa0Wq0V9lRrbFzLW7XelmWJKCoyUaxsoO+ERFFEu9VDQbooNYNOx1r5eI6t4fs+juVhWKq1zZVtiU1V/n8SFxWmqb+VEyhr9Z0a01S41lhqffm+u9K5ggbX81e+r+NaOI5JWYsVgUYcx1RCXm+ptKFMS2c6na78avlMGp4VrEg15M/WK4mFeJmi6ya1VpOIW8F1A6MxKIvybUumZmFaHpaSBcrjnGQxw0YnGSvcuWai6TWmBa7Cvnp+R7YqZnLNN/wWdaWj6Q2LuSJT0xua2iQdXTJRWo2u06LV2qFWbckNLbrDA4So2LonbYSu62iVSVMbaGoPvT76grio6QxksLW+6SOyBUevjrm5JUXb7DBse7w+PCVUyWnB22TIv2v8TgRXdV1zfa1A4ZqObQeItEKdu5iaTrcdolHQG8oPLa/m8NU1ldJXCVpdhIBBeIe8kk58nQ8YDrvUKSQz6XCFnZBh7w7XY5Xp0UyqOiNNSna3JPDu5GJGXufozWLVM29ZLsP+ATdTmRHyfI/lMqWhIFDZ5mQZY9k6tm2s7r3d6yEKVqxcRV6wmAl8J6ITyiAlyxOaJmUyHdHqSMOZiYyiEugKiFsKi07HIs8yjs+kQ7e7fZ95dE3L7yEUOYXjymxglsl5ubk6R9NyXr/+fJWhresMQzf4+isJsK2NEb1mnfV1H72WTmiaj6g1CbYso1tRvwrXWmOZymBnupiziDO6A51ZLrENWVoQxxmV4ZEX0ojYnk2WGHRUtSleNGRJjKYn5KU07nF8zcHDDc7PayyV1TT0CQ0ZrZacp3Rpcnk8InAsjjK5DkdvZKVje2+TXlceROeXF1huyfRGvgR37pu8fP0109mYxUIGNo8ffkCewnj6EkNls/7Vvziivf0Idyjv88vXLyjzKdvtDoZS5e47IQ93vsGnJ3+z0gnzHI8g0FaHvGZobKx3WNtweP65XKutnTv0fFhOBFvrChheCIrI5/5TGfi/PvoMvdmi2w7otaUTP5iabK3tspg1dB5IDFtVzHG0jZVGSae9jusZjMcn7Cgcjxeso+HS6a/x6kQe4gcHj1hEN/T70ogNuwdU+dd0ez6W3lFrZdLUOn/yp/+AaSSdiEUcU1YlU9UTvbyuqOqCm5uI7W1ZvZsWN/h+G8OFtgLx3rt3D8dxVsQfpt0wmwsePXpEnMr36tH+RyTiijRVTEGTM9pr65BZ1JY8ZG1Lw2v6eAGM1aH+4FGPh0/WaBq5Lv/0n/wJi2nG7OVzHtyTVaKt4UP+O/4vPv5wSODfCm9LJrbCVg6grbHW9/GmA0zFBLh3b0BJgmY1dO7LZ0lTwf69Hq22fId+/asF25t36fZC8kzu4WWW0urfYzZaMFWB4c7BNlVWU0+V3QpdHEdQihRUhv/j959ydPoStwV39w4AaAc+rV6LZS7fUZGaBEEPx5szj5Re1SxnZ3eDJOpjt+S9u+GCm/EFQTBQazcnaA2x3DlFIm/q/aePELVgPB5TKcY5TSvI0xhTl+/xew9+gm+/xzR6xf6WZF/62c9ecOdRl/sPd4nmyvnQDCwbzi+k09QfOtzZ/5CKHF0FCGUpD7V5dE5X7b1eZ5u6yVd973pjU1YlVZOwtb0DSKB4WZZglASuIoYZvSHNZuz6kilTa1qU1QyaBl1VymezGZats73+wQpbp2kSi3WLx/nNp78krzP6vS0ydYYYJlS1TqUqpsfHx/zBj57SCWVWGEA3LAxTVqUsV362s7uOobXJi1sduhpdc1gm1+ztyWd59PhHXJyUaFq90t7RjZo8KVdsmn5g09Q6ul2ugsJaCHzPYTq7YW1TVevoUtU5VLcaNw2mbVIUCXEqbUTQCcmTCq0pCBQZz3IpyMspm7sKkzjTyFMLO2DFpik0i0JMsRwD11QMbdk6uuFSI989DQNds+kNBAd35fNNbkqqwsVve2TKEbZMjarUua9YFMuyIk9N3I5Glso59z2Lq1nFnSdtbFVdFoWOGzjkivDJtm1ykWFYJevr8iwYvZjKDgkBukpAGE1DKRo0JajdHZgk4zaGoVGrA7kUBugmjUgYKdKHjzt76JpHLuTzlY6H3lhohSC/ZYS8OWNrvcOTJwf89Kcy2Pi3//uAn/yzJ4wupS3b2Nhge7DDzc0FKq9AXi+ZTiq6wYDrU+njdPs95rOEx49k8qrIR8SLGQNF4AHw8vkXeK5HFKugQpTYrRazJCd9LRM14fqAsBtQe0vGhyohEKV017oUynle3+hzmWekN1d8/aW8llYm+Ft9xCylvyHXL3CGTCfPKZUm0dMHH1BU17Q6Nt//8e8D4Hoa3vABWZbhmtK+kELY76AbMkmq5zCPJxQkhEp0tmqAxqQwLqiVjb2ezCmba5aRfOa1tU3StKHtWZz+6ueATObu9jf5+tUrOmvS8T0bzbi6+A1PP5Rzl2YVthMQJxlCiUIHgUdrbYM6r1GvMr7fQuQFmcLVRnmCyDMs21gFGl5RU9YNWRKh4npMHXSzXrEhZ6LC0Gtc31nZLtN2GQ5b1HW5StRUeUlV1qQzGUQsRIVhaNhmQ68nz6elOcexA1zPxlVaW4atSdIN1XZT16BrNnmZI1T13BuG1DUUxVsMqWnKgEqv1TtUCuIyQwhBtJDvEbVOVerYho2ncFiGoXFnbw3jFj+GRtNAWVQ0SiA8z5egm1RCkcL1BxKrZhm0QnmdusoRSDZWW4GsptMpRTHDVYEbVYWl22RFjdtSTISNI7XDmhjLk58to4w6Z9VxUyLwHQ9RFCvx4bIs8FoBVVXhq2cpq4IyXVCpwFQQk1NQ1/aqKLIoDOompiqaFblRd7jH2qa70tqtNA092OXgvadUheraqhZE8TmGO8ByZGI/juTZ/HeN34ngStek8QAYRxc8uP+YIjdWWdTJfEIhMjTHIVOpgePjK5LIpTdQEak5x9G6GFqB7iphPH/JMp8CLotYluktoySwLIQSP3VbGW1/yHw+4nomQfZFkULa0LhjXAUQXF8PieMbamXwF3GCbozQ9BaLkcxurQ03KRcuaXpNrZw+GoemNLg4U+1vbiCrSlq0Evpz7DYpAk2rUUlinKrL9rZPU8mNkxYJc7GgqZc8uCMrAfNJw/7+PebRjFI5DWevb7AHOr7KMl5Pcyy9Rg9KsoUS540XbG18yHBN/j8rbNb6d5hOT9DVfXue3LiNs+T6StFRVjfkIuXhfWnY4s9fkUSvaD/4E55/LWl7DQRp1GDpM1xVPq0bm8bQ8I0DAEbzMePRDe+/95iuKvN6j+7y+uINDgVRJX9v4Fk43oBWqMCd2h7DbwzxnAGLE+mwf/+//CecnxT85qf/nB/8sRJz9HxydApVubi61OgMPmLbg5dzldnBwvVzBuFTsrl0tHs7FdeTiOZYzoHt6uw8Wmd2cUmhymIf3etyefZbNtvbnCWSWGS7bzPwQ66m8vDUgjZfPz/kW9/5Hp2uXPe9zT6X0wv2791HO5VBi24YZFpMEkmH6N6DdfJJwmIe4/RuqXUD4kXKR0/v8tUz6cSXucbenXXOL+W1K6fGbq3RN8yVuONkdkoDdDodnjz6FgDHFy+wzQ73D+SB+ubwCKflonlr1HNpRE7fvGR7/z3+5jdf0goUyN20aLV2uR7JgHaw5ZGmNmGvwg/k+3d1PmL37gZ5YfHitWxNCrw+ZZayFLIkn41jPnryhM5+yNn/fQTA1gCyyqCppWPjD7podcrF6NnbanNokeYxIoPKlPdk2B6WazJXrSdl2UK0NH74935EnSnxSsUe5Qwc6tu2p0YjzxcM16WBtP0pJhqe0WFjSwZg8/mE7YMus2jGYFMGq4vFjFl8zo1izvzmN/6Y56/OCAcel7E8QANbZy1sczr5jHAg78Fvb/H85m9xVZb87GKGaXYZDtrc2ZHP9+b4nAf73+bF4c8YK9X49qDNcrnEUeKn7Z7PxeUlvYFN2JKORpYeM5uUTOa/xnOUs6MJHGvImzfKJq23KHOfwA24jGSQfXW5oBBzbFcQeqoyL2K6a5uEnmyD8Jx7zGYnuGGf0fS28nHD3/uz/wjT9VkulNSD2SAqjyKVa/zNbz+kSoaIJKJUQXZTucTTJU0Z8cGHMijKpxplbaAp0gtTg2gZ4bZzwkDOi2N1oW5oqgRbZWRHoxMC32TYk99pjJK6NinLClclN+L4BsMwsK2A2wC2QWCYJtOZtC3bextURkJRFFSVoirWcmyzQ7SQ7/83f3/ABx/s8upzB8u9bcED22oxHc+4d18mSdphn3iSolm3oHAPXSupREnjynfh+toAbRvLtgEVmOKwWER89J5sh7uenuLaHlGaYKoW57JpiOYNRXnM9q6sHF8dDxB2hXnrEZUFmulwNbnmYaCon1MN3QlYRAtM5SkaZoMhdDbbcg9PLodUmk5lVBjKKdM0gwadrXWbOpFrnOQlll/RqLKfRkFTmuik6EK1WRsbOOaEmpJGnUW230Y3Z6wN5bzcXAs0r4VlOZjcgsF1ND3nvaebvHwp7ctNpGP5OoFQFRqRkyUL9oYd1nZVS9XXJnni0QxqqlJR5xtD5vENVS3PhsDoEhNSoFEpx0nXZZIwyaY8fCTvvZi7LJcNpgrAzFSj0CIMo4WhmAjXtnpMRyHv39/l7FTa4V8++5wPnu3hqMRGVTaYlovdqhkG0pZcXma4bei5DpliUby6Kigbi0LZDdMPsERFb+MttXMaadzd2WY8vaVKNylyQzLsJdKpW6t3cdo116cpnW1VydV1bN+lsybtVlwsqcsRuchprcl35vz4GWvrXVx/iFVIOzXKX7CIYacv53w+G1HrHRy3ha7L+Tx6fUWr3cc0e7QcGSBkRIhlQ7sj94bV38Dp9llMv+L6Rq7Vk4++ja5pHL/6FW9eyGttbW0xWiyY3VYwCJnPp7TCHSJVDSnmGac3l1R2h5tDOQ+DbkVo2Tz72b+Uv1+1CbpdWkFDkcl9F+p3WeRvCCyLZawkagqBYTc0qkXNant0vHWqZUyZyndtc++A8fUFw527WCoRbekNVuDhqoSv49o0pWSzLlQg2lSQZQm64a1Yr8tCoxWGLJaqClcLXNtElCaGahVsKKg0wXIuUB2NmJaJpusIlTip8gyzWWDaIaZ5SyPfYCBFxw1F1OboIUW6pFHnYykaNFxsm1Wi3/UbDN2lFDWWrijxaykdcRu4JUIyplbl24pwXYNl6RiqcpbmYhVERUrOp6oFGga2YzJXnSOWZRGGHqmyI7btIkSFa9k0qo3cdhqESNEbnbpQ7yglpqXhqCRUkiQUQlYNfUUmEQQhVSXk/5vbgBJqPcdUcBCZkKuxdGNVGfQLgWmEVFW1oukXhU3VlDSaYpvUDOpGJy/z1e+BT2ewS6s75/ZM8Qf/frbAd4QW78a78W68G+/Gu/FuvBvvxrvxbrwb/wHG70TlStN0NIURuPfwCfGy5GZ0Ra0E2HrdIWlWsNEOEHMZPQ+8O+wP9FV1q84cWkFAnJ5hq97Nmja6rrGxqfHwyScAXJ0nnFyc43eVYKgVYmouYfC2V9xxHHzHk3pbt330S4s0G6Er6lLQiBYz/FDQbctM8pvTC0QBnX5DqnQgDN2k09NJVUtOWlywOB+jWQ19peuRVzWFqPG8fQohWwycIEBUBflcZrJ1PaPRfaJogWe11Gc1xycnLPMFrZ5siautitevJfgeYGd9l+l0wSAc4nTlvb/38Y+4Pj3Hs2VGcywsahpKQm6upIDnk/v3uJlqvLl8wZ/9g38GwM9++a/56Ju/z+Hr5+rZXKxgh7Nfv2RnKDNnYXvAWXPOIHQ5O5ZZqd33HyKKjEWttJT2Ntjv3mXCax7ekZnseFai6zpe6FMp/RZvPWR+o9MN5Rrs3R3y5moKTclP/vF/BkBSWMzO/opv/MFDDE1+Twt0jKXPelfmDpL4jAd+B8we73XkPK13u6SORXS5YG9f3nt34NKcjPlgVwKGl6UgTXMe3H/6VoQ6a2iMjFK7ZP++zN516NBz1tCVTlIczXgwbHP66pc0Kjt59jJi9+5TyEtMlRE6PvuCp994ykC1o0ZXKaPo55ALily2Qe3eWeOrl5/yysm491B+79Of/z+k8zmhomttOQFikVE3LpoqdbfXNolHE16dTkhypfvk6BiGwem13GPjdM6jjQdEcYrVUjiXYhutCXG8aya3VObGBujLlRZO298inR3i6A7nZ/LaD59+m9k8IUtP2dySVaGwc4Cl5/SWMi1nDgpc00IXOtv7srp8fPgKzTewm9sK7Rmu5eNbWyuaYOolTqjTlB1M1Wq2SGf4xoCxImTorTf0euu8eD2n05Y2YTqXrb+jeUqj2jo3NwY0ZZvzK9U+5YfMZg2V+YqbiWw5WkTghTVJIbgcy7nK8xTf0wgUtbfhaBT1KW/OYzbXJQB7ejMiSQrW+o/xbJmFLrIEV3vA5eXfAHD//n3yVGceXTOdSnKMXnebl4cvCVp9hBJzns1mhGFImsp3YTqJaHcNlklEVcl97ntbdMI+frshXyoNqyDg4vKYx+/JNh3X7nH2ZkSjW/zwR/8FAH/6h/85k/mMxhjx9bNfqL0hEMLh018peypGtLwOlSi4UPvlez94ysHdbQ5fl3iqhQNM8mK5wk5NZme0tC10I0So9gxNF2RJw5276+xtyUrZ65lLFguCjiLQMDuMxxcYpJiNrAhVosZ2K9I4p1Bg6kVyRtAxSJQGSn9gIcocx7HQkHv45PQV69t9LMta3VdZCXTNZj6XFbYn768ThNssrisMlSFtqoqqKhkrcpfv//g+k1lEhU/g3GJfZdZyEV/wB3/0TbWHBdOioR3IlpVCCLAEeVLy9CNZJRaJiah61HW0yphqtaS8fvKexL4Okw2EEDiOsxJctyyL6XTCJ598tKrgn5QZDjaVuo5pOaRJjONP2b8r9+ebS03SsNfOitAiy2I2twMahRWIEwPXaSirjPK2hdtvIcoYr+Vz+ELOlaHtU4sCibaCdq/LfBSxthmutChffDXGdVvoukboyXlwLB3IGKiz6Te/Osa1AwwadOtWcDlF02vaXYvLU3VuCxOv06JW+jWe7REnKb1uTVWoKhwDRLVA5A616v2y9ISmaVZioLbpoeHK9qnmFveVUzcmULGnNPMOX44wnAfc9oKVIkd3NCaTjLqSc9ANHRbXJppp8NE3pR1+8y+P+Om//YL/6r/5AQA3589IyxTX2qTJFI7H0LE8l8YS5KpDwnNMzKak1pVd9EyCtstctWEDWH7G0csv2NyTxD/zaE7PT4njGL8l13gZHzGJNOJyiZ7KZ7776AMss8FRNsL2A1ytYK33e/iKFvzN8xS71aUi4YvfyI6T9nBIU1p8reAElmWhN9cU2gShfJm6hmfPTumuT8n60u5HNyd02n2MUHbTXD//G5zAY7C2hTmTVaqL82eMFiU7d/Z5T5FxlElC43n0hwo/OvCJCpfBxhbDgXz/4/kl6bKm1AI+eCTtWaNNKKYCV1UnJjcZfj1ieTWDWv5d4xm0jJTQ9JkkRwD0wl2SyxHClmd934ip0ylZkRL25bWT5RzX7ZLnBZnC3y2jmkprEdgKA2lNMBhQlFN6PUV7nlf0+i3mywVVJd9Ry5lz9OJr2h1ZLewNfKosxtLCFR28o8tqm+u4K11EIQol3qt+T7fR3TZUDU39ts26rmvKSsdULcWz9A26XlNn0iYEfoiuyUrYrf6fyDMqTSPLKrJcnn+aZqJr5gpTZts2VZ1jWGAr6nVdtzEMbXWPhjOQ4sMAACAASURBVKHjWS6aZqCrKnVVGdSlQZplGKqiXuQVVVms2gvn80jpjlVUCgMpRE6WZei6uWqpNE0dw9DQDfmdsO2ja1IHdj6T61LZDUIIZtPliuSiqiqqqiJQ5CfzaUbTSFiRrlqFXb+mqhuKXKxEklu+1EVTU0ld12RFimnUTMeXqzkPW206rQ6R0lgbhApq8HeM34ngqqzEqn+1EpL17e69bWKlnK3jobVclsuYQgVAa/0BabpACbZj6g6z2Zyt7XtvhTDrgrLKaQVDnn8lHa12N2Rtc49SsbGUaUG6bMiKcqWnpGkay+USXbdRvg3zyZQH720yupETO5kucL0hmh6tgjLLabG+7TK+tKkUcHPvYB/L1khi+fuz5Q0tf5NCeLxRxAZFccPmxj6pmHJ6Ikv+7aDNRn+LRjkMZxcX9HounV571QYRRxN6w4D+cIPDQ8Uq5Njc3XrM9VgeoK7Zoz8MeX10gqFe2Pe9AZ1wyGwqf+u9J+/z2edf8+jRI7aVwShyDdeDteAjppdyrobDIfHcoIwVNmW4x2TpYYYz7t6RxAKZyIjzmr39+9zZlIvTdm0uz09JM2n8vv2tJ/QCjb98NuJGsbiUywiryrDtnA935aatyw6hcYansBSTukSkCV465nL0awDOzyOqKsbQM+aZnOOnuy3OLjXWlADj2qN1Dt8cEVoeRks+3zIeYWkDHt65i23L+bw4fYNTJ1yP5CG3eWeXZDJHoBEM5IFWpAVkc7bWe2QKd5JyTmmmtBXJBk1Nmlzye3efYtyC7KdXlOkNJzdXiEqWmnc234fUJTuRzmtrUBMvpnQ8j8NX0hlvdb6L569RaSF/9VOpw/Dg3hbt0OPiXM7dci7odzYYTSZo6vf8DkTZAseBeSwPufs7j7m+mNFW2k0bD7YwKodleUOhhCJttyKKvmSrv0+W3bbgzZjPcvqqh/7TX/0Fg14P1/FR/h1nFxeYmkklPO4+lQ7lyckhQsxXBj8tNZYktBYFk7ki9bhzwDi5oBLSFIV6B9sN2NoMmSi8gWW5eP4Wo/FiRaBRaVDlKb7C8Wm6ztnFG6JsSalahxdKoHYynfPxt2UANBnHRHG80rSazMYM1zoYTY/RVD5vp3WHqsxx7IBUaXblqYPrhgiFz3t1/BovGGCJLulStdeWS7Jcw7KGsrUYOH7zOfcPvkNWSCfJqEOybMT+3l0q1VJVVhWLakajG4yVHp4oMwoxYmNTOt51kyPKhE64QaGwIcukYha/oSxLHjySDt/Ll894/+NtbuVcLBvu3L3H9c0Vm+tSFPb5izO2NvewvE1+/MM/lHt90ObP//zPubiQ7Kvvf7xJVQtcs42uKcbUoEAIT6rWqqHjkBUJolaJlN1tLl/52Hazam3RSImTBWvDmDyXz1yLFkHLQKsVSUKpIcScO9t3aXnS2dHqClFUOK7OMlbYWr9P0P+Qjj1Qf1eh6zpNU63aQDFi3n//exIIru61oUHTdBIloNuyrjD1JzS6hqYpditNg9oAXe4D3U6II5+m1inKWxyoh9aYFCLl9Ey2/F1dhQTtR6sAsKoq4qik3/PZ35FB9slrh0VSgl7d8iaRZQWmVXL3vvzOm9OG66sSzWGF8bBNF+ocz885OpR7w3C2pEN0y4apG0TzmAf3AhaJPHfK8kOKKqaqS3T1PqRxg9sTaArPdT1NKUULrQZdnb+LxZywY+KGJXkub1RUNYHngiHtfpHXiEpjkZxwfqlaBbX7uE4Lw6y4DcI0Gtqhwd6uTD7+/C8nBIELRk2j2MmkYKlGo82ZK3tqGB5Nla8YKW0jAAGPHm6uWpUOX07xO13yNMNTDG0YNdF4xh/9mXwXXr86p84tdAcsJVqclTViWaDZBaZipdRps8wLbIUxs3WXGlguU/YVw16yuCLPd9GNht09+Tx3Dx7y7POv+fLzDwD44L0BL357wvZBQaTaEMsmoOV4uDR016Xd7fV6PP/iOft9qU1VWA3z+QXT6FZ3CzTDZilSKtVi5Tc2J9dLwnWP0Ru573r2BmHbRzQOU8XM+fLkiHlc8INvSYKb4+tTdtpDhJ5y/Fq2CptGC8PeIM/OV/eEDUZZ4XrqTFlatPsDJvGUvJDXPr8YgW5z+OzXfOvpnwLw4PETnv36ENeXfzdOLygiGEdn+I48x6NoTC5qqmbIm3N51m1v7nCw+zGWwgxF01P277YxqpJj1XZZW0tawxASA11hQ3/16ZfcefLhCmv/6L1vsIxvKMeCl+r5jI5BrzdkOZ2jF/JdnhydEi9Ttvdk0JlHCY7jcHJ4Qrgm3+07B9ucvnlNN9hnc0u26k7DC7LFkqCt2HsbC8OAPLa4UDpQg3CDk6OINButWv7C4S6zxRGmagdva30s3aVqdCzVRprPC0SZUpVv9RRFoaPRYCoCtFoviJY5vuFRqwSh45hUpc6gPVwxjVqOh2F0MbXbJIIUA66bjFwl9mlqomSE7wcYqmig6zpNrWEpOSfDbGi5PaqmXrEaalpNXVcrFk4hBMPhOtP5jFboq89yOq2QTq+PoQSPy7KQ0BIVNPmBo1oOdZT2MVDjeZ7CmOrqnqRG1i3udLlMieM5ruvjK5ZB27YRQsMPbDRD6aLWkKYF3Y70ga6uZjiOh4a+Eo+eTaQ2mK5pq+snTUndvG051JuK0LaoqorWWlvdpy61w9IlvvKxsmXEv2/8TgRXTaMThAowWMyZTmIMw8BRPfumldLy+0yTUrKfAUlZUAkTS+2KZZ6T5RGTr3MaFRWvDTeJlwWimmCb0rHXarg6n1Ch8Bi2j21b+EGbZSxfxLxMuBldMhj0VpnP3Xt9Lq7OWaie3VozaPvrzOPlKuAL/XXarU0i94juUAZqb84v6a1nKyfbSu8wGl/S7gUUS8X+EglmVknVLAgCdVDoOaLJuJxJg7Gxt08a6eh6h2QhN/3u5l2CsMH1A9xvSifs5fPPcE2Dg8fSQTn58jn7jw4ITBvXU3SpG+ucXswpSrlRbxZnrG/q6NqYeCLnfBIds7k74MH+R7z8WgJoh1vrxKMpn3wkGakuj3J299Yo4piBOrxenqes+x12+gH5UjpOIksY9g7obqqDOKtoPJP3Nr+HpwxGXl2zvJ4imoKWElNcnESsDe7QWpP3OZlfMdjdoxAJQnHGbgcOy6LGDOxVH79eWpjlIctSOt5cm3T8fZaLhE1Fkd0kBUm65Lo6ZFOBJHvDEHsRUyrx2q9fPmPgtimFS67YyjQb3NzGxyUtpJN7lc052Fzj4rmsAuzv7FJ5XV6d3vaTg+fajF+/RHMM+j1p7FpBQ6nB0aEE8HonFkM/IGgN+f73bqtnCRtrA/SmxK/k9RoRMBnXrCswcp4m+KGFn1WMxnK/iNTkYOsxUXSKO1TsNnGK7zbMFU13sGZyPb2g0w3QXXkIXJ/llMuKJP4tg45KNrCG7kWcX8ggzXAEhm+BUZMrrNZsHrO9fxfLG/Dprz+T91nVGE5GRxmxq+ua2ijJ7SlBS65DkkkihaAvA7frc4GgREvPiFMF9LfXeXl0DFqJE9wKi9eI2lgBvvU4IokNLGfCoCMDKUsJz+5trTO9kM+3XOZ0wj5uIPfrbOYisgyj2UYUMtlQVCPm6Sme06NRAqU7O/uMxxPiparmWT6u1cF1Km4mspLb7ftc3jwn7NS0fOmAWU7GfHZBp1Or31/iWgF1ZSMqufeX2SWO18M0HNqq2urZW4ynJ+RLRYThlGRpjWOtkSp2y7oZs7a2RV3ZzGZyTQdDmyKFQAUoo+mYmgjHeMTZhTxghmFDenRIadR0FYFOOt/if/gf/3v+9D/9ibrPGM83KHIfR/Xat7sBSWJgWtrqsAKDWpQU1ZFcK/dbNGWI5maUt1gYy2E6znF6M/pKrqBB0GgVtZJ+WIqKWstodyyySD1zq6RBl5gtFTQMtxc8ePCE0+eK2TVJEXVFy3c4PZEEBf/wH/0Yz+0yu9ax3FsqZIOqajAsaasfPu3z/Nc5eW6vMphZkWPUHkGoKgo2NNUQ13dpGoVvBKIoYmPdR1QymaNpHRoKahV4u55DtMhwuguqUokYJx5llWHqBmUlr1UXNl6gMZ7JZ5tNh1Ro2Prb4LUqGtJsiuVU2KZ0+KLCJGy5K+HPWtfQavDcmssrRVHttDFYUFXViu318jzG7zTkSoizajpYhoUbdFnMlWNfa6T5jPEsx/XlOyomOWCj2KhpGo2yLOgMmxUTWZKAZuiIKsO2b90KnbBlcXQqz4841nADnUYDTb9lt23wXY2yjkiyW5ypQSXeSqMYpo5jmdh2Q6QkRpJMR3crHLchVxlWPdPRqag1Re8fmViWS0O9+o5hWmSJTMYlyr4s4x6Vpq/OeiEEru+zXF4zVListd4+V5d9kjJHK+R++eCDjzl8+Yx/83/KRN/O1u+xt9/izesjrlUC8t7dgMnpDd/58A9YKPyUUerURk1iqCqDZbM9XGOju7Va9/loQeA7lAo33Gr7hDc3RJcJT57IROb18Qnp0mF9GNBWMic1DdubFs++/DcA+INNbpY1DSZtL1DrnjCavsQ3AvyWDBoOr095cHCf0VTapMvJmHEWMxhsUSTyHH/6+DtY9gKtecgslvd1cjXC7RkIhUXHcul2fGzdw3Dkej578Rn9zpCzs1e4t8QpZcoXXz8Ddf4X0ZK7e3e4Hr0iU9Ta4doWaa2xfb9Hpy3vYX2+Q17meI7cY5fjS3b3tzC7OvvvySBXZBHJNCZzBbMLGfS9+HrB93/yY07eyLX6+U/P+P4fPqXWPcZX0u4fv37FxsYW5fQZX3wubfqDJ4948dnP+OCTP5LrUE05PJzSGXZxbZkgPDs/Z3v7HrPlhFqx2fZJeHT/hygSVY6OnxOYAbNpQ6CqdaZW4Ic+nqGtZAfSOkYzwLmVEylg0OpQ6RP0WtoSrSkI2iVVmWA0KhlepNR6RF6pBIHlgdZIHFL91ga2WiGg0+0o4rJsSVU2q86cpqmZTqc0jUZ9S5ijZei6vrJv8nsN/W6XVFWzdN1ksUhYLGJildR0XXslhg6SNl/TNLJsucJAaTrYpqxaKTMsWV3rmtlMEehoGq1WGyHEKuFbFBl1XRKGIbaqgtuOQ7tVre5zZ2ePPM9XtPYAZWmiaRq6rmOab+8tS4oVn4LruswmS/VveZ+maaBbNVpTrch/uu3/H1Su0GvmkSr3azpLMaXjd6iRCzeJZnT6PeqlhWHJiVvESzrdFoVigMibgrSKyNIZ20PpdI4nCwqRUjU5t/a+THIsx6TdkgsyW5yTpzqnV8u3ZdempD/oYDr2KlM2mY2J4ooLxWroe21sZ4LITWLl4LmO4OWLV5huxIsX8nu9zg7pfI1StbrpxoJ2cEDP22MYKMN5fw3Pdjg+PuTggVyw1y9fEd1UfO8bPwQgjgS5nWOZGuG6fBHi+RjXbnEz0aEtjdT93X3m4+mKjvrjB98gNxN+/Ee/T5Yp2tyqwLU0/l/23qRXtiw9z3t230Yfcfr2dnmzuVWZVcmmSFEUKUoUIEA04EZD/QLbgOGRNfTMMKSJYcMA/Q9s2ZYGFEVY7IpktVnZ5+3v6c+JE33E7lsP1jqRnlAjDcrAXaN7DyJ27L32t761vuZ933uH4gC2sbNLpVd89fSP6G0IB992d4lXK+x+RlfqqzzYOOa2ekMs0+IHB22qJEPzd9Y0sk8e9mg19xjPr3EscQ++1WY6ibjXEM7h+vqUdKmwu3PEZCyulZhdYi1hmaWYdxpk+1uEWUIgN8bDrR6ntyuyEpC/pxo5+/fu8+b15zSk1pZl79LxE2YrYRt2VqIbOVvdNmevBHufZ/sYispoOWNjX8z5+HaEZ7dw5GGyt9NBVxLG1yu8UgYoSojfdKA9QJcB+nZYsTqdsrkryACeX1/y8PgeN+MLbk8F9bvRa9Pu9al0F1WToOx4TqXDg3cEQYiju4zDJaWi4MhWmnbD4asXv0DJlmxsS0bGVY6i2kxy0TJqmCo/+uk3HN/fYRyIv7WUHWrTJK0NHEmAEBYpqZoTSmfTVlVqWyNOp0heCNxmg0ypUXSNpWSlW84XHN3z2DsQ1zk9mzIaL7Bt846nFa/pcjMcUttXWFJL7GjbYRKY6wpGkpxgNlvMliq70rlH8RQqd83qVJYp01nAk4ePGd5IYHNRsVjdUClTHm2K9t5lkJOV8Zrhp0htNL2m0zwml8BYJPNlGhvUUrNHVTU83yK9a58wtxhNX6GqV7R94TdsU0NRVHTNxJaH89urSwynotcTm/xsopDUCb5V4shWwTL3afdjstDjWgY7nvOIyeSKRls65NYuqAaT6cW6EtBsb7BarbCNNrubgnxERcEzt1GkxEJWVrS8AV9//Td0uyJpcLj3EbPlNWmUYFri+udnC7pdnVoRgUavv0WRe4yuOjg94SMWyxRVMcjVkuMjcWD/7/75P+fifEIlZQHQKtJUp1JCFotLeQ8Zq0WMzhax1FPzXY/pdMzWrvhesNCpqMmrGhTJMpo7pOUVg22V83NxiLecQ7ISDFNsYotxzqPHO2hWRbiQm7paURUavt/ktVy3D54YlEm2bitRFYMiV6lKZU3B3elbXJ9FUKnfshFqKkmcEWeSstrZII11VFWhkJTKpmkSTjLawi0yHN4Shftoeogmt0rH0oijlGY/Z76UbXnsiAOBDHbiKMO0ctLymtMTsTeU2fdRtZA0rVBku0uwLHjwvsb5pUha1MW+zOyCJgOLOMpodQzyPKWuRLVO12zBgijnQFF08mzFoN9C03fkvWcYhgO1SiCThq6t4XUSnr4UtqFbj6jyGWlWrsmHomVOu+uB4jCfShC466Lr6rripmkORXVGko04P5Wtu/VDarVEN0wUVba7Bwq6VWFKNi/DtNBMC8NmPZ/nk3M+eHcHzwk4PRO24bm7pCXcnYkUDVQ1o91RWEm5jShX6DgaURzj2zLBGiYMmgZpLK5jWztERY2qVejckSsJlrWGVxAH4v2tAo1aEZlpYQcuq2WMZejs7Yq1dnr6kjSrMRydSIL4NwZbHN0/4NU3gijmT/5ij3/2T31efh7QkG35dpXSbje4Wkx4/L5ogX/2s8/53nd/ADLplNzckqk273ws9gHxrhKO7r3DJBDBwXR5QbPpUa4Srq5EAq032EE1VszmV5SyJT1PNBpYNHpi/4gmIxTPZ1XU6JJxsyhTBgOfk5en3IZS1qXW+Hd/9XP2ZVvp2eiEXqfBMrhmd1NUwV6e/w2tjo9aPSbOpbafElOTcvZCEIbd29tjuUrAbLIhGfD29huMpxXpfErXk3Y2VInSiM6GJBXJDJ4Pn1KlIVu7oj372csL+oMdbm5uuJQdk9t795jdvCaKhf+pixuevzxDtRrs7ogWvPlsha6kjE/PyWbic//4nz7k9NWM7370u8IWsz9DqUpYhcS5bPmNQopGB8OaMjwTz9Pv1Xz8qwOyQgRgaXjKYnLG1bnP/n0xx5PRGZPzIZ1en+5AdjZdfcJooXHvWDgTLcvYPXiXRusKJOnFPFCJ4hlZ5ZNKyvhFZuO6NolsezYB1ShQFJdaRh+a4hCnJXWlrn1enM0oC+Vb2vXlFBUdlBzHulu4ObphkSYFuSQNsyyLVZpgGzKpX4Nn12R5QltCPQpKyqJYs+vlWQmorIIIw7hrD9cwPEEc0enItu4yJ8/zdSBVVYIG3vPd/w9ZRkFVl1RlQV3LilAi2ntlQwFVXVPXNaapk93p/xkuaZoyn0UUkpqzqip0VaFakxiVMtFTrX/Pcj2qqhJnFzmKosD1v6Wen88DLEtHVVWCaCnvKcOWvai67qz/9h8abwkt3o634+14O96Ot+PteDvejrfj7Xg7/iOMX47KFSp+7w6kGeN1WqBrpLVswenmfHP2BXmcrjO0/c4mw5szms07ut8aBQ3bVtHv9JzqBY7jUeQ1k4XoU65UhdFQp90XUeegc0BZ1fhubx2ZL5YTlEohDsJ1GdJyGui6RU+qscdRThjNqVKP4aWIbu8fPMTtPyZKr2naoqXKNlwso0uUiUz2yemUnZ1NWp7Jai6eb/ug5nZ4wdG+j6+LCsneYJfQXmJLgeK6gHcOH3B6ekIkdXUark+nZ+L6Bsc7Iiv24ukzorziaFNkqYxYR9VrolXAK4nL+vjxHqWZsSlF0+a3I65uv6ChDVjeinsaDFq0zS6XL9+sxYYvz5/T7TVZrSQAvKVx/SbG9mZsyx7lKte4unqNY3sYtXin0+A1pu2ylNa2870jrp9fcXp5zvWFbHVrKERBQJgOKSVexdJ7dHyT0xuRJVvuKmiuQqtsEEiK3Em+ZDE7Zxnn1H3x3oNgiOPtsmWI7I+rFQR+zfV5wta+AN5+ffoZR80NWk2f8o5tc7yk6e0zHoo2lnhhMq3mmJqKdiszNEbNdRrhpsVaCLPZ3KWx7TG8FVlAw9MJqxVFabD77ofyexpVnqObBXkkbDgOFcz2ikUkMi1aS6XX2eEnP/tLBjviWrXiEq9ibKtiIjEJdrvFdHqNZYl5yscpx4fHvH51SrMp7C6vDG7nrxktY+JbkU1rtgqqKsPpScrxIuL6+hrHKXCbwu4mwZQoHgtwviayr4oz4mw4p78hMoNJvqRj2xRFQSIpXG3fI03GdGwXRYLOKbfY3YWnz0Xbo2X62JZNkmRMp8IWDTfgepQwsGWm121QxjHLKFhnpNJqgWMZKNoeibS9JFph2gVpFEt7HRAGCWGikeXib77svc61FF2+q0qJCZIcW9JmR3HGahXx+N1DLqR+nOltoCkWdQmz8Uz+Xk6r3aeQLQeGXvLw0QNev3mGKjXC6rqmSH1su0Em+0GC6JqsSlkFIo+1Ct6QpjV1rdJsiWe5eJ3Q6fncji7Wuhq6ntF0PEaSBv3+OxtcXbxhf2cXDZE5+/rzT/ngoyd8efEM1zPkPGygGSlzifFMxgF18lvoukkSC5vKCwUVg63dHf78z/4SgP/z//6/2D3Y5XYkMrSdzgDLMEEtyOoTMVeZTVk8RlFz7tKKRZETBLc86Iq1nkc6plVRFhZpJiqfimbgOxWOYa/1sXzHQDN0FEVmXpMZ7b5OFDsosmqjqDoVJWG0wG9IbbbeLlnYJpP4Kr/lkpcVYZBiu2LOyzIjz5r4lkWpRPLdKKxWIf2BuHadm+SZimEpFLIyrlsKuq6TpcIntZwdxqWN5mgo9R2YvGC1DPG6Mb2e8CUXMxXLNaikGKnt2EzHE9779T1WUlA7CBJ0qyavBE0yQJqURNklrbsuikmJ5QlaY0eSQsTLGVl1S1lXBDKbXZYhYFDJUpJWKmjaink0ZjoRGdm8LFBRqOocVVaXyyohLwM8T1Roo5VGUlUkaYkjsSJJkvDoiU8URhSlFEX3KhRNWet/FVlCHA2xLRVHF22ey8DC9CSJgtS5mYQF/Q2L81NZ5bD2cD2PvEzWWl+arjDYhKvLG3RNVOYURUNTVGSCmGCV0m4ZmE7Ej38iKo9+4wG1mpBFCkgfYKoa0+SEq0uJhc0OQRMCy5XEotQYBMspj+875PJ7RW2jaxpFIWm0dYs4yWm6FoG0BYwecZrj2MoaV7MIFuzt3+fLz4QkxxdfPefixe9xtHsPtysqiA3bIEsvibQSR3aO9A4G5NOa4/fE/vsv//hf870nv0XnxRsQklI8fvQOqa7T68gs+bzC79xDcWcML0Xl8fL6im7TYbd9zDfXoo2t49tkYUIhq6PHOzs8fTGmtdcgyYRPWC4igoXF9vYBVy8FYH88vmESLUjFZdgcHGFoJi9evWSVCl8dl2NenLso1TkDeV67d3hA0x9QlaKaVpJydn6NYfp4ptDHe/r8KYuw5ujoMYePhFzK559/KlrNZAvw6YuvKHL4le/+fc5Phd/Y3tymLmNaToPRjfCLzz77a1axgizQsDFoUpQJ2SpmMpcVEgtOLk4ZtHTsvugmOb1JSK0Fs0zY2Ac/+JivPvkxTl+llpgk1/cp9YCN7Xv8F/feBeD52SuSbIvpjTgTVIqJ3eugtwpWoZi7dx8e8+Unz9DylHwu3vGz5zccPv4BeSie5eTpV4TziF7TJZjItmDbY3urzeuTl7z3/V8FwBm0yaMQX0IV8hKm6Yo6jFFkJ9d8Pse2bXTdxJJta05jwNnZKbNXosS3vb3Nwd5DbEcjuqNCtwyiMCPJIwypn5qnJYpakZV3uoU1pqXgmB5hIvaeGg3qck1oYVkW89kS0zTXVak4DsmLkjiO11gmx3EwTfPbileeY+kmWZat2wXv6NnLIidJEnkPGq5roevfVpeqqkLTjDUdfZIk6IaGadnrFvUkSbAsay1ibJouUAlcmaxmBaGgkU+TbytqiqKQJ/m6cq1pOvP5nDiO2dvbWz+LYQgc1lrzUP0P16Z+SYIrmM2EEzOsnKJKKVOfLJeAtiik2amp6wpVMsKhJmxubxPK3ua8iPDcLvsH95lIQoJa19nc2+HTn39KUouF4LtHnL654YF0bMFkRV0puK6HKfFb7WaTzc1NLk7P0Upx6Ov3HG5vJ3SluCvNiiRJSFKdD98RQHFH81FZMmhtUUuGFtvw0dSKl6/Fgrq/d4jXrHHUYs1SYykKRqHy6PEBq5kUmGxkmHWCKw3MbiREwTXtjoK2Eg53uohZzGwU3eB6Ilj+vI6C2tiglKB3xVSokhWVZ/NIstu9fnEGBkzmYp6KtGC32yGLDBTJppNEJtuHB8S5zkgCNy1XY7JakEttjPhqTsQMt3RYjoQDRtOJ4hTbtQhCKcB4ajLYrnl9KxxN9M01Spzx6J1jzmdSj8dv0W42sVWDurpje7xkfJPgy5J5Hs65LEpUPaYh9XGSpYkRTMhVm2AsFr/iV8xmN2zviMPPzThiFqwYLpY8FD6Lm5tLHhwd0VAcPv2hYG07erjP+dUrLMlcmeQrctPAb/jMRhKToBs8PDwkqnVmF6KdZ8Pf4+bsdI3LKpSCr148I5is+P6uwKcN0ETX9QAAIABJREFUT8f4TY8wvFm30ikNH1/TqKVm0J+9+JyH93YxHJtVKBm+uMV0VQo8ElUy5VQrzHa9LpFnGLw6e4Nje6TJHag+YlzqRFmE54mEQJqPME2XC7k5V2VKr+mS5AqOJUVah0t2792nzuFail6aXkWW9ZlMhR24TbA9i9UyIJQYRFU32N3ZYHq7wmuLezgZn7B4cYPXFvPZaG/gOw1UVaPhC7teJDfsH3U4l+x9vqNSpQWlAouFJIV5dEBdmtRKtAb67u9t8OrkFzhSHLjR8NA0BdtvEwbiXc3mEtMUlRgSq2kbHp7X4YuvBCZB0zS2d9/l2YshvQ1hHBdXc0zTpChTTBmgdwe7PH92wuaWOHCi1ozGV/i+z0yKCd5c5OzuHBCWN3iuTPAEPns7e1xdS8KHI5Pnz09JIwXLFc55b/8dep19fvjX/45uX7xjRXFYrGJKiUn48ssJraZHkcdrtrnN/i6f/OwX9Pv7aJK16dXrLzi+d0gpe/9t9QlltUmzZ1AUwt1rlkkQLsnSkP/lf/qXADR7DdBsplKbpt1SKSkJFil90f1CVZsoahNVjVGl/l+elSTZDNcR/18sTZIkQVE9HMlmuZwuaTgOG83HvJTYpbquqYgp5ZqJk5A4DIkTbc3IWpYlNSUVHpUibC/Khrx8OsTzxSEtilcYhk0UlvS3JJOjVlAVHrVbkUvNHNdrk8ZTPvwNQTrz5WcvUfgA3VDXgTCozOY37PeFbZZZgwoTUCgkoF6tFGolob/VYHglN2O9piizdVtgVYqWPMtLGE2lXo5pUqsqmpZTSyY3VVvR67WZ3MoEhdNC1yqKGgopSBxGU9xehG60SGS7a9s1SauMUrbRWIZOGF9wMx5R54I4xbIc6nyG5UCaiD0lTqf4jsflpSQxqHM67R6zJMaWjFumHrGKXxNnNYYproWhoqCgSQGZ5Tyh0awZ9PsQiYTLEh1Vq8hSBXmWAqXGb2ZMRuL3HMfF0GoURQgxA3i2TRBeo2gKliv2VlWv0TOT6k7JdVGwtdWgTBOKpCE/o5GmMZbRWmPmsjTlO+9uYErMTppbGIIYkKwSdlBXBaqWs90zmS7EvJSKilLn6HdYN1WliHPcjYLutkjKvLooKZWaJErXwWNepSjo/NZvCQKNP/2zF/z4by75b/7bX+fNuYhStgf3mM8s4nDKq1vhA7Z7bebhkh998kMAfvudB5SE5Oa3ujmrVUnGgrlMIrbdLkqa0/OOUI9E21ySv8QrGpSk9LbEIp0vJ6ySiGZLnC0ytcPGno3bga8+fwrA5tYDosmUXAmZTIXfnZ0NUbyaTksEFXoxIA1LHt/7O3zx9I/E6yxt2naLvV2LQJ7Nnj8NeXjkEy0l+Um1QtMzirBkNhdngmBW8eGHT3jz+pI/+ld/LOy6moOu8nBP4GM3ey5npwnTxRWm9J23N5cs5yvirQ0cU/ytYWhk+ZAkEf9vNe9xc/uG2XSE70nc7niB3zzgcFfhp6/Opc0W9Dcb3J6Iva/eaNNubVIaKpvbws5W44pG06HZ6sA6KROjax4PHoqg7NPPzxlsuth2m+tz4ctGozGPv3uIZXRZSrjC0YOc68uXuPfE/vSDX3+fr59/TZ4OyCRLWppURG4XXy958dn/I0yv4eGoKivZSm82GtSeg5ZVuE0JV7jfY3Qb02j564RAFJS0m3t87z1x3ijrFZdXM5pVgziWicSkpip1sqpmfCuC1V5nD92OUWQTW5pF3I6uIHPZ2pHtfZVJGkfr1rooClEUlaKANJfQC9tGVS1UxVoHHXWpEwUlRSF8WVVVUNegltxpRd3hHOtaQ/JeoGkqaZojqQxQFEjTFEWt1615VVVQlilFUaAoMmFW11iWSrstzjJJksiAKCeVZEOOpeG7PooqEmUgru36BqYUOy6Kgja7aJpGJVlGoyhgsZjRaDSwZEvhnd/528YvRXBVVyXzsTh4O67Bqxc39Pt97j8SbDplovLNp5+zf28D0xRO5OZ2zPaOz+1EOFJdVxmOR+S1w138VelNXp1/Q29zkywVE6crJv/o9/bxLQkwZ0iv2yaOI1Yr4Qy2N7dQiej1PDYGIgM1Xt7wweOP10Hg1q6LUtpcX97gyn7nXr/D5dWK4dUrqkoY3fH+A7pdA1+C7lzbo2nsQRms6T6DSYRrugSTgFtJhb67u0/H7fHiuSAH2Np30FSL6+sbTFndOjzc4XYyRiFGTh+GklO7fRYLybSmq9iuw/Uy4v6RCCj1zYTx8JqBIw4jhxsHnJ2c095U0UrxLIU25SfPfsje4F0qGZhOsxTX3EdVxaG72+8SFjnjskFTzud8ccH29i6jNGbQE5tAIzKZzVNaEsjJMGLrwWNGkxPcjlhRnd4RxfVrxsUS35XCzYNjXt1+jVKLF3qw/4js1Y8JF5d8NRXv4ftP3uPRwa8RLCqGQ+E4j5oPuOAZuSQDmGRz+g0H0kgEN8C7x+9ye3FBbroc7osDl9OyiMqMRSV+rzPwKFY5ambgb4kDwqIqOJlcY6xCChmIPrs8ZXt7k1QKFjbiBI2I3v1D5osTACyzQncNtLyP5ghn8MlXX7Dlb9Fs3wG3C77+7Cu2NndZFcJxmw2LJCyIkhjTkery4xFO08X3REXx1fWCvLhgx21RZFLgzrVISw/NHDJbSKpizyNc6UQL4SQ1RSH1Klyny0ySXMTJnPFtTZZEbMoK1yrU0d1qjV/ptB4SRRHUOg2ZkGiZDuEsQbMqrm/EOrqZhXx09CvYvniWVRKgpQnjyYK6Futq6+A7fPLzP1sLsDrNCltXWIYBe3vi+SaTDMvSCKIAqyOcZLRK2Ox+SCUDcceHy+spXjlFcsfw4uRE2s0OYSIrpFXAycmcVBesVXncwY1vMPyAc0mSMpqc8fEH/zlRMhL4AaDIY2o94GoiAup+d5fprKYsQTfEOjKcc5arEbsHHsuFZCPNNJ6/OKUhA8402scyBkTKK7JErMfz01ueP/2SdnOPpfQvcQCuVTE4FPM7n9VMp2PUpkO/IyoPNR22+i5ZNePyWlTd3rn/MYv4BbYmbDoL9ultOHR7LQQdA6yWCXuHB/xv/+v/zL174gD9i89X2J7OdCYC2sHKpalZzBdXHD4WSRnTGTAaRehWhaKItbyYR3Q7DvcfibX+V69K8sygUhdrnGsYhvz+P5lQVOfksdj4LD+iqnTSO7pty+X4nsFnP1epJT61KAs0xSfLMmr1boP26HbeYS7nVzcVajKKokKVh+PJeAnVDnkRrkHLYRgTxzF5eYc/GhAqmmS0EvdZJQquZ/DokegC+MXP3uA5D8mKFOMOs1uUNFowXT4jTY4AyWarh+u1F4Yhth2TZnPClbDPuigoixrTMshTCZb2MnrdXZYz4avTIENRBTuXKg8fi+WU9391QBiGUAvfrNZArWJKdqtwPsf1Fd578h0++Qux4etKgKGWlKVFEok56HZ1/IaKWos9VNdsaiWg1WqgSICT3zBpd2oWsxLLkJkoXSfLQyxNBiNFwtHRDtsbOk8/Ef7bchxqNcE0bOw7Fj5lSpqPSFbCXhuNFhUBaqWvGQxVxeKddx7z9VdPUSR5jGWpZKVNJrFwVBGuo7GchBSpZBTTSurSRdU0CqnAmsUrfN9BkeyyYVjiuTlUKoXMzEergGbL5cMn9/i3fyKqEXlZYOigyOA4K1KCZUjjISxl58pw4qApJXWhkklstqobJGnIP/sDgYtejiP+7d/8OU9+8pDdgbj3n/zoC7xewTJJuOMWm6UuSbgkjYVtHHdaKEVCPP8Ww9HodFmmEdErsadtHmygNWw+ff4Jg55IlvV7PcBHqW3Cc0mp3rAgV+m5dxjhFZoN1DaqLtZerTT54MNjvnr+lEFL2Po73/uYo+/0ePVaBGCz2wVm4bHtqiwawkf83j/4fdKk4uT1iM09ic06/4LXp8/Z3RX+Jl5dUiYtdrafcHku2G0fHRxz9nxFlhS8/1AkPDVlm1U640gyLy5XJpoxY3uvx+VrcZg53twg7fv86Be/oLcpfq+IC5qKjmbIdWV28QcJszBkPhPramPgc31zxecrA1tKRuhbXfS8Rpc40Ks3l2gti65Z0LDFXpRuj1GTFcPrgEImtTc3D5lcv6KQiXDXd6BuMZ9PaXbEtdXa5cHxewyHCx4+EkHY+UVJZ8NYS0YEic9H3/ltXr96ztG7otqbFCGqVjG+vqAv32kYBnQGXTJJFGWXNbOrKfsDm3Aq1uhiNKOoDJ6evaDV3pHPvE3JiF98IZLVijGnKmyKqo8iyTHStMC1e/T6LVz/zpd4OK5BLplBK2oqcvI65fxWJAiuzlL+0e//Q4pC0vSrGqqqE4YhtXpHApOSJzlpGq+TD7qlohvGupNFqTU0TaOsq3WQJKpAKmmSo0m2zrquyfMURVabi6KgLEsMw2IpWTE9ad+ObVBVdxTuQvh4NhVzl+UJdV2CUqLIa6uFiaLk5GWBK8+ZpiECQkN2TDiWQ1YkpGm8DihNU8dx+sRxSi79fBxLAOjfMt5irt6Ot+PteDvejrfj7Xg73o634+14O/4jjF+KypWq6iBFRB2rycamQ1nmDGUZvVQUtg638XQXzxHRrFp6zG8DdrcktkjzuSgvGd4+R9NElajIYxoNl0G/h6aJzEBRzCmSGFsVWT9NLfBcjYbbp2GLjF4QDAmWC9JYJ1ZFRn+j4cJ8xZ4nMhPpJMc1bXbNAednAs912Gmjpiq9voUrqZBvzq6x8TneFpH2m9cjdjeajCYZhRTVLcIK39dYLWOoRTT85tUz7h3vsH0knsX3euRZTae3gabfZQZsNnc3mM9WBJLuLVdM1CJhd0v8/vVFQnf7kHp+w7NPRXZrZ7PL43t9slDiV7IF3R2DslQJIlHZyWvRrpikM2xH6mqNFcbL5xzdE1nqrGxiGgOstk8lqTz7uwNyHdBcvnkjMoP37+9yenlFlIiKV6LWhNEcpYTtTTHnSR6Qdnz27R7LlcieR4sATBNFikK+fPYJdTKh427hmiIb2rYecnZ9yv3jeyxXoj/2ZjhlY2eLN89EFs7xbOxGl4NSZSkzWfX0loWpQqUzqe+EqE28boPxlfh/NKnQrIKSDFUR9+klK2wzwutucilbKlRVRa1aVIiMW2JWbB5+yDLReH4h6HePDnvc3AZ4uk8ylLpBikahBgxlx2HD9lF9lZgJkWTcs5UmWTZDyTQiSd1t2gNWw5Dmhpyn+jktwyeLU9o7Yl5ub0/I4wuiMEWTORQtP6dIenR7nrSpDrUS8ebkNb2WFFd0LXxVYVXDtSyHWlYTmzbXSzGfyyhELXRsw0S1RGZuFce49oDpfI4jWSH31R5eR+fkXGTTmp1DhukE3e+g2KI68dnX/4Y40/GdO9HtI4x2xnQSMywC+bwL0sqm07m3brsIkwmHR9/j7I3M+g9L9nY2ub6ZUss20u19MRfj6Q2pzG4rlBRVjKuLexyvKsy+TrPdJp6Laz3a/U1urk9RrBGrpcjMzYpT0qRmc1MIx9qWR14GXJ+vePRQZCJtPSJLNV69GhJK9tNf/fgHuFOFHFHxqgqV0e0VrY6/pqxVKo+itrm/NWD1TPi8fk8hzAvU6kja2BmmpfLy9S3JtsjMf/TkXZavvqDTN7i9le2EWsCgd8z1qfAbx8fHeC0T23XIJLvR7sMtPvnZT/n6i+f8/d8T7FmLYMF0EbCQ9xQlNXYUoisZT74j1tVP/1JBKWxCLaAsxH1GecFht4uEgTKaL3H8BnpRUWSy/c3J+J3v/Cf88b//KaXsx6+sAlKNQrYXeW6EoT6gzKbkEt9kVTroJctlyMa2+FxKwDLMUCT2rax8UKdkUUyrIWwozmwKdNI8xjRlS9yixPGma3mB0+cJZa2gFBaaxKdEcUGnXdBoyyyn2qPMYhRVodJlBhOX2WpIOZuhRk/EQ5cRZaFQSWptSpvF9DM0zcQ2BeYjTA1sNyfJM0zJzFWES+JgTF1KimVSqsKiNjWyQmRHdT1itZgznbSxZJtlho6mZNw1kU2XEbEy4+rCQkFiX+uUum5SFQlRLLA2x4dNWl5zrV9TqkIuQdEKTOVbKu9Wd5t05VJnIrNbEqJpOrUScze6mxbLoCLNJBOgDdRCby6XWMnaXnHv4JDRqWxDtDP0xCLXElRVUtQnP+HF059wOe2jW5LVNM1wFItc0uYXdcbH9x+gtebMpSC6ZlSoVU5QZTQccf3J7ZDrUYWiiOtUZU5VifbeWtpLnBQc7Zl8/tnPGC7EPuZaPlm1WAvA6mZJWRf0tz0m52LdFskDIkOhaRYoksqeykWrK4yuuM+/+zsf8eNP/g/+9b/5a/6H/17oQF1kJ1jNDXatiJVsU44LlY7doD0Qx69XwzH9zR10/VvdnMq0SEaX3DsULYeXi3PGZUy7Y5AmsjNg0sUoJ6hOjwfvChbV6fw5jqdxeSMZEw0YnwRsH3XYHojnnSyfM1l8wIPdD9hpS6ZK0yGu++z27jQYJwzsB4yWF/w9yeh3NZ7i6AWebRFEYsFvtr+LacbkuXgvw2FMv7tPw59yx9z98sUIbJfvHt1fMw//6LPnbG1s8fy1YOXb2t5AS2F6ek4shZtfPjuhtWGwt7mxfn8RHr37ewQSA3U5CdCNGr2OcTuS1l11aLW6mE7M068FzvTRewOW5TWN1oG0V50ymaKVBufXYn04GyqzOKBMK1xFnPMKo4HlW+iO8EmNpES3YjZ794gDwRJ5dTXl558r9Ld2GN5KWZ5mmxcXp2x0xJzrScbiaswH33lCWIj9vxwrhGHN1vEWKsKGd7q7GA7U+7JrY3xBM4948WzKxrbENBJjNTy2OgqT14KN+NXJz1CaXfbfFf7t5c/OcW2TMsmIVbGO3333mGUVE0c+0Z2kwXRGc3fA8FTYS7fncbh9n8tTD1MV+/9v/Gab2TylkNpiimEQFRM6loMhzyTzZUy/rWGZHqr0L/NwyTwKcVThI/zOJqYSk+YzbifCptqmg9K0MZIETbYmF5qBWYEhmYDdno2madRKSCK7g5IIdLckLmJWcyk/4Qr9L02XEi6eQRKrNJoempTXudPcUjVl/W+B0aqF+DsQRhGGqZNlGaX0w5pWye4IITkB0HEkROhvGb8cwZWiU+fi4Zt+h/M3U7Z3DghiicNoNumYW5imTqsp9bBsnQ/f/w1ySX5wcXpD1y7Y7nZZSBKI/b0dkmjBbDxhMBCH416nx+hmRLIShrLpd6miGL9pUmViITZsj7Aq0RVjvdHGWcbh7han8jCnOxkKJYah0pGHnfFyyaBvEiQuyVJcv9V0SKKChmzBa3earMIxWVHRaUtsQaZiWBWt5hY38qRdqCOG81uaXbFZng0nNDwf1fIoZdvacBbSGejYbZeW7DM1jQbXN2fcToVRNDct5kuNRZDjSrzY7UxFdTXmklK93dgmiJYUVYAuW9YcuwTDIc8i8uLO6HPanSaxBGSf3/4cTa/Y6fVYjoXBVUuV2qxJ4gsMCXr/7PNzTM/CkkDgmpRl+pxB+4g4FteaTa5wXI8Sn0TqeE3LFXvbjzkfixYuQ4voDjYoY4cPHot5Gb2esLHRZjEq0WvhtCbLIV3ngNZAtiGWTZaLBR27hX5HtkCHgQtuaZI4Usi00WVx8yWm7CsLZit2NgYodYOb8Z346IzZQkEdNDCkDput2lBkBBIgXakKt9cRSZ4xkGK1btQiiCOW6YRWS7yHR4+OWMwjVKTuhaViWDpxOaIqxOal6w3CpEbRTAxPirLmNbpZrAGm25sP0fKcrIi4uBQOP8syDra6gg5V6rKMbjNQA2xXbByqM8dzdni3dY/Tl6Idtb83wNBrbpYz7sATmwfbjGdTtgZHAKRRiOJUbGw3ODuRQtHuQ/Iix7FLctkDbVldqJfr1oxlMMI2d5iHX1CXUkMnKrBNCMYSh5YGGO4S02hwOxWBqV20afkZZR1wfX0t5+57vHz+mlgGDAftfd68eUF3sMGnnwmShqNj0fKSpQqLmVjblq2yWiYcPxCU53XxkourFQ/dY4parO3ZMqdSl4SLgpYMFB8d/zafff4Jtt6X3wNVTdje6XN6JQSfe+1d2q0+jtcjScS8XF4/IytH7Ek64ygZ02z06fUSVETCZT7LaTee8M2XL/A9sTEM+j7lqGA+Emu0qDIe3/+ADTfgnceiTefNq6/4znvf56uXr3n3+A8A2DioGd006ErNnMFGE8t1RBuGL2x/a3vAf/1f/lfs7OwwGgs7MwyLLJ3RkP51sbqmSjfY36lBtuRpaoOsSjFrd631cTU/p3kcMR5J26x3qHKbqlhQI96NpTb50dd/ynCqYtxtcnGKrlhEsQjmPDvj9clPCYN9TEnhnGcZhl6RJymGLg4tjtmjLmp02R+fs6IubHQ9J4yFbZTFYxR9hWn0qOSBAKUkSmZcXd0F2Ttyc60EQQegohOGM8JQhC1VaYBeAdoaRxAFKd2BTm21iGbC7+u6TlytsCzZ6hKrNFs28QrSSDyvohYChG06hAsZCBshRVUzHwvf3fI9FC1H01USqSPU7OgoKvjWI6ZSSNlxDIIww7EVaVNTZskt4XJ/rZdTqRXBaoLX9Enkfng7foV1toGKsJ+6StHVJrWSEIXinrLqmuubgDJ+glrdCSebFEWObsg2xPgSx2nz6uU5hi7awVRVpUJFNYU2F4BSBcxnJWUt7LwsSzTFwjRNoqW4dqOhoWGj110UGQhXmWjfzxKxZnWzRrEjsjwnl62CtVqhVTqaZZJE4m8Nr+CdRw94+kxcu6wgTjLKMkKRydQ4XlLVNYrusJJ4bfQaQ9NJJIY1j2q0OqfKUhqeDG4uh2zdd4jjBN+X5FtxgN8omU8EdlnR9tnd3ebZ11/zr/538Zk/+Mff5eTiksU0xZI4U7fZpt9uEEyFTR8feqSrCRbfHtbGYUSll5ycCzzw2Tihv9Hh6MAkkWKuZaLR2S65nb3G9gV2KV/VoBagijkwXJ/j9+9jmQoNSTjid1xsS+zHI3kwnY5H9LZ9PCndc3qzIrdeoDUMRufi9xZJSu/oMb0tuL4S7yYOxmzvWJCIoMWscva2HrAKb2jIpN47Bwck5HhliSVxLg/3GsCIQB6WveKQ9lab2fUNDbmvaoMuttOjP0j49MsT8XxWQZ7UmKqAjJjWM372i5f84Pi7TJZSAHk5Ym93ly9fXhDE4t6vb4fMJxFxZyhneEqSZBwbv0arI/b28aJm0DlA1xQ0KUlht12WkwuildgblFaX6RwuJs/pyv145/g9kqSm6RjMrsXvuX6Llq0yG4sk1Fw38JsVy/MrNO4IkNr0jzXSCxdVZkrCxTWrmwRlINZsOooJb+e889HH6/NUUZjUqoWvprhSNuLF7JSDto0ig+qtdhNNdYiXr1iaYn1c3yiYegVhyPRa2F6r76PHCS2JH7r/8AEXV39BZ2sfR9p5kerkSUqtCP+mYWIpBrNZiCOFonXF4nwU4vohRSB8UKu9xYY9o5S4sDqZotlNlNwjluLOnpESTHV80ySTe0EWmmhWQWZJivW0wjQcwrlGoyX8vunHaEqHioCeFIaOkwWKohFLf1DmBb5nQKUSyRY+3bBQVcjzak1gUZalINHR7s6GClGYoygGvi9ZdagEPbxSr4OyXBIh/W3jlyK4MnSddx4IIKWixPzD3/1dzk5GHGyJA5jf9Dg5OaPf6GBJNeatjQ3GlzOO7onDzqKlUyk+eTnl/juir3c2Sjja3aLlFhSFcAZK1cLSdPyWePTlPEHXmtiGh+9KUGh4gdty6Pc3BLYEQfowXSToUvdG0Ss2d/cZXr1BM4WFLxIwqpzheCZAkYBlK2hmhxvJ+rV33OLrp0MMw+DlG5ExGXQ3KeKQm+mS6Uw4n07PoMgUhpfigFkVEavcwms2qGT/+CKZc3N2y/b2NtOlMLAsHeI4Dq2eDJJclZcvzvCaJpkEHxqNilejS/KVOGSbrRWVF6KXTUYj4Xw2NntMZisUY7RmaHM9C9vuMLwWjqa/qRIGMJsGWLo4aGi6jmKV1HWCItmlWk4DVW9jyExkGIb0t3Z582zM8YGoLrhhRB7FDJcLTGRWY7tNms/Zl7pl0/k5O5tHmJXDbCrmLtVDnp+9pO01SVTxPIPBgHl5S6MrwdY3p9iaSmFoaLl8f06b7uEW1z/7KbYEA9+ef8Ph9mPCWCrERwpZmVHnC2yZufJdgzDXCfOQZl+qd0c1QTSirqSAnwlxFGK6Lgu5xII6ZRJMONze/lbsONfp2h0mU7k5pwqGtUuuZmz2hP3M5kP6G03SlUm4FMmG0tLY2tzl5FJklnRLxTNUOr1NMqkb1mq7mJpJnCdsbUjiFLVFzCWqPOCip5xcXHB88F02JLvkxeUbjo8PKUqLva1jafslShVSlBK/EhfMlxdUxh61BGGn6oTBVo86b1GqYmNQqpAkzun3RV/41y+/JNYzOp0uz7/+DICtvccc7h7S8oWNBbOUKj8kTTO0Wjxfv+mxChWKVCjYA3z6yc/xWya+JzKDb15dkdZXTF8Nuf9I4jJvTgBYzSoanrgH1Zgzmww5eyXs3GqA46nM5jGdO4zZak6eWbz/+DtcXggs0+X1a47uNykKMb83wyWz2Ywn738MUrRUVSzqSqPIy7UW1cnJgv3dd2g3hW28+NEI17EYdHaYyx76ph9QMeJwfwNLEmikUQp6zoHUnVlOC2bjEhQFTRHPXDPhfPyMsk7pSEzgzbXG1YnNwVFT2qJJr+ugVG0Opa7dv/gX/yOT2ZT+YJOJZCy9vrkVrEkyGFnOdRQnwvFgeCnuczROsd2KJMnwm/IgXCzwWiWGLg7sSZ5iuTqKoq1Z+Maj53xzEhInD1E14YMooAZyyY7Y21VQ8wZFYaBIEDGVSp2m1FXGwb54npevQ5RaX+tcVUqKrnYI41NaLbFf3FyVKHWTLEmxJItiXYT0NzwcKaQaLAQ4OklXeDKqf50VAAAgAElEQVT4n48SvH4FtVizRW5SagV5UWDK7O9sMubwvs3GwUf88FTqTNkKRQG2Kzbi4WzOb/7dfRqtnKdfieft9hsksQjS0kSs/6P7DRQ1QdXEs+mmRpzPsTSXJBL3YDgpWVoSLHNsGdxkRYhpWiSZJNkwE379o48Ipp11EJiXJaZrUFYKhmQV7A8sdM2jlGB5w6ypKtD1cu0TauY8OP4VLr7eW7NullWFYVnEoXhew45JUg3fPiCRuFZN1ykyUIyKWOoL9juwmhVYtmT0RaGgRlUhS8Rh5zvvP2BrM+L0WqO4yxKjkRQxtaxgKoqC24KffXpBJfFiqqaQ5RlGKYTBAd47GBAGY64u5LHGjKnqgqLIcd07ltEQtIogNgljMS9+s6QsWYskF6VKVcxpuhYnb8S+jaqQRjMc1aeSh8zJ5IZ/8tsf8uRd8ft//jcXPPno+/zVX/wJf/rvRUXmP/tPf4dG94Js5hKkc2nWDaqOyemt8IGHWwcYaIwnYk8DqOMpozwja4ok4qNuzfHuJi9fP8WT1Z++C5ObDMM6YpUIO7+avOLe7j22uyK5sojG1D4sgxWlfMCbqzG+FZIXAabdknbmkqwmuLmw4a6xyXw4xtIW6+SKWgT8+Ec/pOft4O+LvTZBJ1fadLuyImRYXJ++xNIbvHf8nrDXMsDT91CrFXEqfOz2nstomPD4fXGfL158g++WJKnCltQbVNSEWZ2hVBpuV7zTvmMxvz3HkWvdqTf4Ow89SncHfy7eZ5pfcDW5RI1z7u0dAfDVsy9QdYOF1P98dHREsDpjOXnGzZmw61XlsvVRi7Pzl9gS1z5/vaBv+cwXInF7eP8BF8NX+M0GKuL3nj1/zuPHH/Pizafs9cUze3qNo/hM5Z6tOTV6ZwdNdbmdiH2t1/e4Og1peTrDE3EGMEyPyodmIv1UlDP44Dfob+0ThiJQm1y9wje7+L5LZoh7391t4FrLtRjwYnlFe/8JB/d/F98WPv7551/gNe/hOTX6nUZmOWd2fkPbEXMeDl/QocvF+YT2fbFfNJsRYa3y8y9ElWz/8IA6z9GMLpVk7zPUG/x6wPHRPj/+q0+EEdsmBxsOheQkKAKV2/mKjU4XsxZ2l0UhVRWhlBWeJz7nNUvquEsik163wxss3aDb7TIc3wXHFXUxQa98conpNkyNutZwXIndtFWK0iIMY/zOXaW8oKwqirxEQshRVZ2qqtYsg6qqUxQZjuOsK6Z1XaOq+jqhLcb/DwgtoFpHt/1Bk15fZzKJaElRWNuqeXi/yeXrIZ2GOLSodUIULTg5kzTdQYBl+2x191FlFF7UU37+6Te4VmPNsNXtpuQ1FBIIr3sGcbxicnXL/pFkiascRtNLCtVnuRIGfXC4w3g4xpWHSb/R4fMvLtna/JY6OEjOoUyIlYAkFlmUrrXNfPWa3kAcls+nCeaGwvj89VrcsbKW5HlJu9uhsykW9XR5jUaPXBMH/VUwxrFbdLbfZz4T3+tt9bgZhbw5HdHwjfU8ZBOFdl8YQRb7aJrCdv+In/5UkGPYviiZNgfiWV5c/px+v4NJE92TGZNqznJ5TZkpuDKTPB2GWLWFLwONaO5i2gbDq0scyf7UHfgYdsZ4cs3eQATM3fY2T19/w0ZfzIGqVeS5zfZRj+n8BIDNzh6YMArPsXKxgcZlRLerM5V07QdHj1isxqThazpN4ZT7HYdqXtBtdXkjA1hb81ncTlFdsYGrlUa7bZG6GkEsrtVt3ifJM/xeg3konE+nbXB2/RmBFAfd7ne4HkZs9Htc3oogN7X79Lsa09WKTC5+pSzwVRffFc83np5RK7B97yHxrfhMls14uHPAbHhOX9JWZ5mDaUO7I9kXSwXNrsmnFYokO3EqG0cfoPgpvmxpODk/4TSc0dkUh+xKmWPqmhCiNWVrVDwn1toEgcrJyZcAvPveAxpeh6vLOxKBDWw3ZbR6tWZDs1sesyjC8DSuJuKAMBtPaLfbxKmwxTBccXD4LqvVhKwQ3/O9bS6GX9CwjlAlxentzRDPbmC4Yg72tz9iFd5SJj7ffV+0zbw8O8N54KBL9flo9YZ4NUVTOgwkWDcOQ/KiQ7s9YCKZh7Y2Xbb2LX76c2HTDXefInJptdvYhpjPli/s9uGjA2YT2cYWNnnv0d/j5lI8WxpYvPf4N8hZ8vmXPwbgex/+XT779Avu7b+LI8G5l1dv8Lw2c8mAtbHh8vjxYy4uLtbtDDkzLka/4NG93yQIRBWl6fVZTKJ1MuDw4CFRekW0ytdsiJalEWWX7G1+gC4Pbrqa4nb2GErRXdsJsB0NXbMYSTbUsLxidL3CdV1uZWVuMfkAv7mDIitLtWJj6S7tRoM3L0VV8w//8A/Z3TtgGQaUEsSPouJ5LlUus/fVLatlE01vrGnzs8SnIMW0Y8JAAqKVnCwrOHkpxYGtJkk2Q6k0JDM5uwcWB0eP+PFJiS6DFEgo1IQsu6O2r1Fzj6zQsCUzWJEX1JWKaeT02uL33lQmVVajWbJSXigE0ZLtjQF7e+LA9+Z5hOVXFFmBKRMei+WE7//aAeNb0QalsEWahRi2ShSKvUdTVHZ3W7x5fSXfwRNZCfqWmdM0DabTS8LUAFW0/P2/7L3HkyTbleb3c+0eWmZEalHqaQHxALSanm5Md5PTTTMu+C/MjkbjgkZuuZwV/wMuuKPRhhyz6eZwWqAJoAE8iKdQ9UpXVmoROsK15uLeTPQGXM0Ci3KzWlRYhof7veeee+453/m+rMixzTpRKGFzZUgQTkhSHdMUazTNSlTNoihTVq6Yq5V/BOPuLalHho6qWCiKRpyKv3FaHstFhKKV3ChtFzlkeYkl4S9JNsMw66hl67aLukgzlFKVhy3xft/55kc8fOSRZL+hHL6Bu4SycqUZAZMrjzjRQbsR+izJ0fE86ZOcnJU7Jk8HZLkmzafEMAwM28GVcN6NYY3LCwskk6VmahR5hm2aJIkccxTGY58y72NKevYoDslVAbUE0BUhtuqu1NuDL0BRppSaSprdyKrYkGcUudg/SkriLCaOw9sgSSFga7OLVjjYEk1SEoOiops38PeIvYMB2+sNfvgjQSiRFjZxaKBpCoUpGVJV8FZnvHol3jfXq7S7XQZrXc7OhK/8D3/9Jd//yy4vTl6RSGbMNDyhYdvc3RPV7Pn8nNCfU9yopgLUFIwi4/RI+K2NDz9kFl1Tagm6hKyFccl0ekpzfYPLa2EvzU6bpPDx5CEiTDNmyxfc2z8gk3BGw9LJyXGqDaqtvrSXFYWn0OyIoFdbVXDqLcZXD/FLEWR//Mm/YWevxYuHn/L4oWA6bLXXWM3O8UrxnFW7it3qsNbfRDPFM4yOr3jx7Jo/+7M/43ou1lbor/BdFwMRI7z/9jvMpxe4hk8pE9iNeoPR5TPi2QabAyFp0rdMdO3sFno+Po/5+OP3+fLXf0/gi2df31lnHijsvVPn0WMBSd9YX2dto8d8LNba/t6QTjVhdjnnrXd/H4BpOkHF4e7+fc5PhP9u1raIvHOabXHv84sjum2dyeSUhi2QEdVKBde7QFUcApnEP3p4RrvWwpLg3SSIMEuHxSzgYFf4jV989Rl9y2S3fx9FJuNWgc4qWGBIZs57d3d57b7ky6+P0GSMp5UJk5NjmsMuFVv4hLV+k/FsRSATwxsHW4yuznGGm5iy3eWb3/keceLhll0+llB273zEevMOxGJPcb1LFKWLU53xkx98Jm2jxvV4xIffFQfHydExg0GFx09ecu9tKeswmZDmY5Jgzt1dEZupVsLk8JioKmJaO9WYzDzcWsA8Eb7ECFL6jR7Vgzq+J5L2RVjHU8CQJHSaXnBx/ZQvHl+w8sRn3/zmv2RnSydZZdTqEnqpq0TxElfS+09mC+xqilK0Wa2kTI5iyoPSb9j+NE2TByop11CWOHaNKIrIZXwTJyFxHKLrKnkh/u6GAv63XW8ILd5cb64315vrzfXmenO9ud5cb64315vrP8P1u1G5UkoqdZnVSRPSPKFah0w2y11dj3GqBb2tHi9PROaj022gqRmVqsy8FBZR5GHrTR5LLYN2q8bOwR38Vcp2RzQItrstnj15zkr2ROiOhmLVsR0FLxYn0YV3iWpZBNmKaktAGtIip1AiAklZaeUFVlVjulpgSL0a3c45O7+kVumyuyuw6JbV5/J8wflEPPe9B0M++/yaPM3RJM42NX1qtV1Mx2Es+4vmiym6sWD3QDz3aHSNogQs3QuyTMJPxnOiaMZiprPyRPnZtm3a/dYt9GO2uKZVH3JxccXmusCPL9wjsqBGbInM0traFt4yQ29EN0lG5sucYW8bxzZuKXJ1VWU29dAkBFC3M4LQp9aqoMqsXK835PXFK3a230cvxAtOZlPaPYtC4uV3tu9xfDrFqvkEmcje77fuc365xKj2qUks8+eHxzS79xluizk+vjwniGcMujZeJh708nCKbkYcujM6UpDnYnJBzzFwM5Fx3+3ex/PPOfeXZBKylkwvePn5a3bXmqwNxVxNw4DFSiGVVdRae0BsTCj14BYythhPMGodOuvbuLInIC1SXp1M2NwRmVdFs1E0ePX6kFL2itTqPZazFZppUMrs/cSd0DIaZLnM5lslqzjiIjrB0sW93EXM0EyomR0K2ctQra3T7NZ48kKI825v7nI9TvCXJ+zvi0xSHjSZldcYWo1qTVTUjk8u2b+zx03CZXadkHCE4q/jyEpA6PvEQY6i5ICktl2rEPsRzZpcQ42M0dkR1ZZOzRS2cHE8JS9hxgnDDZHp9EMBfZguhO1XbBWDPovlK379hVijD+7u8NO//wdUKWq4Puxz/+4nXE88ItrSXh384IxOY0Ai12hByNmpi6EIeEiz0WP/YJPryxVPvxbNzm+9LTKEj7864qOPRYb05WzG9dWMalWs67X6ENcfoZkmg67oW/DchPffO+Dhw5+AIfVOSBlfJeztvQfA109+jmPXaTWdW62PLIe7e+9xenzG5pYkTjg+YXd7jSSUaygv0FQHx+oykLDS0fWE4aBHkmR0+lJvbJSieBE7XTGfw3Wdh18dMhius1wKSMfeTp3jYxs/muHEQrBTYxurWmKYwqYsy8D3fbrNIf/j//Q/AEIQsSgKojQmiCT0K8/JsgJLirTHvo2qB7zz9if43g3hwxLHWZLmCsj17hgmuqXfIgHiCGzLBCXGXYkMX0PJMJIh7uoQuYwIkxCtluDHIls53Oxx/qJEU/VbeFhZ5iRBQRSPSUIxnnlik+QFmoRwWGaNpT9hc62GJW1x5Y/pVhx0tbyFOWZpgqGb1BxRDT2NS/QK5HmBdkNupPtsbfd5/FTMp2ZUiFIfVJWKJElJMpfBUOP1UY4uHbimFaRFKDnShQD0cK3D8WGBLSExJSWaVlAUEZnUINzZtxmNftNYHSUxqmJSlFAUwnft36nx+a/G1Gz7VmJE6MkU+IEYu4ODIY2qxoWXoZo3pBMqpm2zmHtUHeHzTo+P8JY9dEmpDjlhLMQ43ZXYd37/+/dZLkqCcEWtIX6wyBXx3HLMWx2FZqPG64sIR/arpkVCHPmoZo2FtE+1LAgDm0KmcPMkx9BV8rK4pWv+6KM7PH4JpaaTl2LckySkNLnV0Pv9D+8yHLgkkYfKjYZOiq4bJHGGKnv7drd7qJUqK6kbaOkmqe9j2jmRpD03tYSKEROv6qyWEgHSV4nLAl1WjtI0x3JyJpMJhSTaMi2H2M/RnQhd7od55rG+1ebsREBbFbWDCty5+4Djsx8B8IMfPOHbv/d93GiEKSdwc/N95osF/pFYVx9/fI8z/QVnUigXQMm79OpVvIEgoQojiAqNSqUOuaiirOIZWQNq1ZKDlqhmjUYjVNvGkFpRy0nGoLfOy+fPKDSx32exzf29b6CZPqOxmHelCNGSAjcTvbf+ooXZrNHb3MOWsNXhnQ5HV+d0N9ocRGId9dbu8ejpE7rdHTmWFpZhcj15hTsV41uzh/z5X7U5vnjCbC7mqtdew2kEzBfi9wbDDu7Kp97og0TBpLpDu/suzGZcXYvnPA5Dak5JHInQ9eT0CQt3wfe++SGf/upXAMzcLpcXZygb+9QHYj0sF6CkO9y9I2z/81//jL31dzHbCU9fCqKmzft7XF4+I/YVdrcFJH4eTTGtbdDEczcadV6/egiFeotiqNTr+MEMPy1ZToWulhuavPdgm9ATEDm/KDkd+9wfDvjHv/33wvb/4C+p2iGnL8+wu6Lao3FCOzXxTWHDejHBWI0Yh23e2hbQ6zwMqTRCnp8ds5aJPXKrU6c+sFjIqs3hquB73/8Tnv/6M+xT8c4b+7s8fHHExaHL3QciFuwrJm74iGYq/GucmxiNDG9m80ASGS3PXfpbd7EMMZal5vPk8St2dr+FIVtae501jh//CmopV4di/0WPmb2ccO97/1KMyfycyfEl9967y8cfCDIgpXS5PDlEi3WiqbB/3c5IwoxGV8Rzg8Fdhp23cOM5viQfq1bWWLkuvWYfZG/oeLzEqbbQZftJt94kCQpUTRViWUBa5liGgW05t9IyeZ6L6tU/EyOeLs/pdDq3hBbtal0QBBWCIwK4/f5vu35HDlcaVl08Srs+4OJqRLMxJInF5lwoMfXmGs9ffH3bVLtcrVC0iOgG4qBWqFTrjOcRo7lYiEW5g23ZNHpN5rIJ1I9V2v0dCkRQn2Y6tq0xm+s4bVG+nEwX3L2/zuGzERub4rPrUUKzukEUCviLn3ikZCzdcwpPOO6Z52HbJWu9u4zHsvwdnjCZzTFlc971ZY00PaZQU7oS9mRVSlw3YLa6JElEWfe73/0Tfvnzx1xLzaBKTSNPIfDT276Fl6+O+MbH36XIJnS7omQ7W52TpTllIRZUt19jOfNwHJtWSzzneFyCNScOhfHWq13s6imT6YjtDWH0ZT6hVDLizMCQsKc4hdKExprUYFmUxIVOp9GmLnH1q8AnSuBifC4MH6jUm5xeJeysiUDx/Dxk/+49Xrw4RtNFID7y5/hZSLzIKWU/x87uAZfXAeFKKrZvrXN9ktDR28hYBz2JyGMN4pBZKP6u1JcUawecn0qtr+5zlLKG06hwdC4Z/pSM+/fvk5AxceWmo2UM9xv4gfAYx+cXqE7JbOaTS9IJu1mwClP8+TVZLhZ6Xa9RbbSZSA0ty+ig5LByz+n2hUPMMoNFGlKptDiUDe2VdouxmxFKCJnVG+IGLgZbNGTwYdVj8jhl5J1QtcThZmOnTxgblIVYM2eXD8kyi7oz4OJUlPe3dtfBNNGo4kWSlTK2+OqrY9pNGYSupsTFFb12i8VUkoYYDkViYZt90lLYi2EXLFZTjIokk3Fz+oMKFXPImYRs1ps2JycprZ7KeCzFh60KbnhBIZudZ96cIj+hWd0AyZ7Xqt3j3ncOGC/EQTEv4Oq6ZLQ6ZemLNWrbQyr6OovFBaot1vHr1zP+8A/+kotzsaHmTAndPlGU8cn3BFxjMhYHuHtvtzg6/yUAa8OPuBi9pJTELfMA8tKjYnfpS9HEi9GnNJU2SaGwmAg46HDtAWnic3wqYIidnoMfzOl37xBNhf34boChmgzXm7dz0+qWRNk5mTw8TuYZG9t1FsspjiUCoqS4IgiH6LrOkezx6jW3WU01Gg2RBHr5+hI/DkjKa7Sb3pu4Q5ZOUM0aoScSBIrSJAz9W0HNInfpNLf527/7Pzk+FRt/p9tn5fuouvrPNggBISukP02LEa1Kg4TnnJyJw8DS7VFpr1FkJYuVeGfHDli6EX4oBWA1gygKsWzlFnJ4/8E9vvzsp1jVXQrlponYIHIrNKqSZWz5BRP3DqbevNUWMXSFwI348MN7FJnweRdXc2x7QHrznCUohYZuznn0a0n4UDFR0EjSFXki56FdYX2zzi9/Imyq4vSIyyVxkqBzw5o6Z2f3Pp9/Jv6mKDIUTRBW3IplKz61mo6SdbgdOS1DyXMS2cOzMezRqAcEnkJW/gZyqKgl/jKjURe2l6Yxq3kDXb+ZzxjNzCGqoUj/vbW+z0k7JvRVCglHKTIF01TxpAZTxSpIQwPTcEglgY5SaKRpQhwlfOvbogfZXf4Md9WjKCVjqhIKPfPSotsT+5ylQxwYVGsmpbQNVVcoC+X2UNhsqrSqQ2yrjpR8oiTH0C3UUkWXsDk/ROjs6DfNTBmao5MmObYj5mUyfcHJ2ZKiHFDK4E1RSygs8nQqx+WcxaIgyypkUo9LKyCOcnTbIgwkC206whubrGRbX6MRo5QKRW4wlczDO4MWrYbDLx8ucGQfZhy5aJYt8JZAGoU4VYU0y4hT8ZxlqZBnCVHskq1kMhUdRY0IM+FPVaooms/O/gP2T4TfePHikB//6Iz/4r/8hFdPvwDAC3LiQiFVZEvDrIqSWnQkYyuAO17Q29lh/y0xB/PLKVv9bYKgxsWVsE/VDKmYDieHU8r1G9bGIfVaj9WF+P00XnJ9vWC4toUXiue+XExZuT6asaRREc9+enJB1XboSjH5tB4zc19wfBwwXBcHjb9+9m+p1Wq0KlVOZ+LZ53kDrWET6OL3lucZ+5vfode5T5ELGHJRKrhZC6VZp2KIPVJxErLMZCVh3laSsPX2BotpwbU88HUKBTfwCRdLbNnerOY5aabiywP8ux/fQy8NVrFOsyXm6vX1IS1nwGJ6iHS7OFaL45PXVOrCT25v3cFbLZmP52i6GOPUHWKpHkEYcymJmoxuzGTiENzAgps1BoN9lFhFVYS9LGYug80WebHkdOJKO7vH65OvuZ7IZEejyfpeyeXsKXd2RNIvHE9p73bx4jnLibj/W7tvkxdX/PCrI/F7TslW/T323or55c//XwBazQGdjSqhDzPJtrq1XSf3L/GuxL56FRW8eFnHdGz6Ul90NnmN6am89+EeDzZlAnR8SZgsefZIMF5HRp/3P/l96r2Q3JZEO8MOl/MrYilMX2o59eYBaAs8CSd2wwir0mTlGpQyKWvqKm9/8E0OzwQ0M13p/N73PkRRQuaPfiJ+v9Kl1qgQXL2gUxHwvkCbsKuqOFVh02fHL2jUNmnaCUYqtehWVxhaTOJGzCXE2LbqhNkcTRX7OEqFXJ/gBSmGTChVagZJmlKkKVkitSfjGMMwbpMrKBrtzhBV04himWgrb8STy1tCsjKWrKG/5fqdOFyVKJiOcO5T16XWbTGejljMhBPR9BTveEGjsUanI3H1J0fU6nXOrsQhJstG1Op1ikK5VWhGjdEtm+vJ8jZTfb16xmoW0m2Lv6nULGbLGZoO47kUB7Q05vMSw6xzLcVVS83E0KtMluKwUy1qzCZzDvbXmc3EBNRbKe1Gn8nsiCv5XJV6gW4qDNeFMQfJc9Z6feqVXTDEvVdhSFpkKJpCvysy4xtrfwz6MzTZ4G6VbTrrbY5PnmNKWvn7dz4hTVPW1vaYz0VQbRkmilJlbV1ikqcR9+5vEgYZnuwteu/9b3B2doEiN2Kr4vPsq2vu3X8fwxGGGk0jFMXBMGG+lMKmyZx6rcWTr2Vvk6PzzjsPePTwIYUU2dNUC6duECUeisSwz90Qq9rgdCScZqs1YG19yGQcEkbiXmfjMYYpsp1zV5Jx1BR6gx0uMiFmt/InDDY7nI9PKVPxWdWyMRo1puGYmi4Ocy21z9F4znQps3KVPkrpYa9U2rKhXUtyfN/FUyCRTfWuP0ItTVpS3Z5kjrsK6LVqRLGwxarZBi1gMc+pN4SzUTQdw1Fp9USm5+Lskrpdod9vY+ViUXcaVbJKjfPRFblkpVHMGlmmcD4W/T9OdI1m2KhWzotT4Wy3BvcJVhMcp6Qoxfx9/fSYl8fPee9tETT1ux/wxcOH5OmUXUlCUakN+PkvP2e4hmQ8A6MSkysFG9vCfsqTkt3dexxdX2EWItDwlgnddp/x7IJKS2weK88n0TWmN9TzSoI39ul0Ghydi/WwqayhGRF3t77D8aVofN3Z2WA+TbiYioPT+rBLo7LHfL7g3fcFzn0Zp1S1EbkU2c6LlOv5a0ol5r1335bPBEG8wFArbMsGZatyhxSXdl+Mk+deEq0yWq06qgweTSmaOZuOb+cKZcXdewccnUoa+2iEU1GJsqdcXUvmo3KDMotY6zuomhjPIjPIihGbspE7z1RmU5er8TNsedJ3jA5xfEW7s8V4JDbZvb17zOaX3OgNbmwMqdYcvn74U/b2JasRPq3OAJU6h4fisNjrQqRHHEu2Kd/1SMuIkauQJpIuffSc/mATz98jy8R6t2suWlEhuyFuURROjq/5/OFfs7MnDuevj69Q0CiVnFAeGhR0lBLR2wPkYQNzEBMGS548kgQv5RpeGGCaOiVSPLY6xzD7rFwp6uu4QExZNAkiYS8nF8fUjDZRamOo4vd0NSaMFCyEfxsM1jl+VqUoMsr8Jqi38Pw5rmvTaogDOkTkaoEuyQ6yIsFQVFTNx3NvmqvfIk4n6FRIQuHPwuiK8WzCVFLGa0YAhoaumbd9ZrWqytXVFUohNueSBMqSIofVXMzng/tDNtYiPvMLFEN8L0pCTN267TELowWWVScMYoyamIckTzFUnTLTUDSxjutOjyJpk0mKc8sssXSbwI1QFTEu/irHUNqssgg0sV2rSkmJAVI8s9UymVzHRGGJ3pCkEJmCbpqUhUt/TY5VXEFTa7fCypSFIGoIfVoV4XPr9QZl5pBmOXkhniFVExyjRuCLJE1ezDg/alPkLXQpHh9GCY5ZJVitqDTEmC9XGUGikMiKVEuroZYFql5hPhU+wl0WGMYmBertodoyTLy0vA1O7HrBs5dnXFzWsXsiAIt9F1Vz8P2QtiR4qLdqfPHpMWkh7DwIfMo0o1ark0lyjm6nSxKnzBY5rjwstiqKED/VbjLSEe+9+wCiMaFkMHMqBlHuURQZ4Up81qnBdObhJsJvlJlKVibM4oKtTeE3Lo4v+OzzE/71v34fpyq+16jDeDRnd0ckgRQtQCly9BvYCBAaHuRblPMAACAASURBVFdnTxnKPmXbUFmkU1w3x6rIQ2hSYzGO2X+wzkKSekRLg9HqOakvDj+O1qQxqHF29pq6syfGoF+C4eK6KY48oG/vDEjyBmOZTLpYXFGrbFOvN6nIZE59rYfvX2LbDh1Jpb84f8HW9lusprLqlyRcql9QFCaGJSvX1Q3mixf84tEpB3tiHRdek/tvbfLll4LZdflqhFa0CMsVjTWxFzUbbdLsBK1VwfUXcuxMsHVqsppmKDU8Q+Pq8UMK54YIK6ScJuxtbHN8IRJsS++a+SpnMh7K78HGWod+W2MxE05hNn9Gt9ajOwi4vhS/1zd2yPUj8lDEDfPEp3ArbHRtRueSpGx9k9gdYYQW76wL23v09BF7O+vYsrIbhHNefr2gWmlhqTeslGfMjksMWweZEND1BoVi8C++J9b23/w/vyI1nqCM6lxfiWeyrTrRPGO9a5PJtXV8fohapOxsifixNblmaNX59PARzwOx/v/gj/+UytaUq9GUs4U8DC9C+sM7rN+RyBzD49Ov/pFOq0siCZf2Bxu0uh2OX4lDUmdNwV/MCc4zTGkHQbKk2mhwcTXFlARPG/02jw5fMgmEbZpah8lVzMKb8fWXRwDc+fgDtqMute6ApYxPg+mSrfe+gSvjjUGnRZFHKIWPKQsujpbj+VOmno0juQOKuKDR7JMrUsLF1FmscvpDkyQWY+AHOaCimQ6mLAgYVo6iKLe9VGopeAE8z70lQMozkTzSNI3RtYh5kFIDv+36nThcxXHEpaSQ1DWLSs3BchpU6rJsV7o4RpvL6SGzVBxaNKdKlCpoEj5VqzcwNJvZdMnGUFKalhplgSCOkGVdp1pg6U16dVGGvVydUWoFQZwRFyKQyYolSrpEsSoYlizPatsUrChkcBBkGYWS0m5vcSaz5zEJK2MJRsD6llhkuqGCEt9Sjl9dLdlY75ClC4Y9QQd9dPxPZMo5veYeK18cCP7df/yf2dt7izgUBmA2daI4pdnq4DjynWsK8+UVafYS0xSLo9XqoBsVrmXjdhRlBMsVllXFtsXfvDq6xDQden0RhF5dzun0+ijGjEiW24eDTTxvQlmWrA3E9/J0SOgXbG0IR9rv9nnx9BGtTp2FJ7WbjD4YfVrNPrFsWly4I1q9DpGET5XVMX//oyc4Wp1eTwTAL05GOJWUrV6XVS4y7HpuMp7kt4w0q2VIsYoI4oJhS2SgkiJmOV7Q7e6zkEaf2COUzGS9Jxy5UmhE8SVmZY1rWWVAj7FUF0Wp3rJE7W6+xfX0EKcu5mo8zmmt1cmLOaYM/q+uRrSGJf36A2ah1FgKfe7fvUcomy2z8ASnpTF3M6yO+BvfLxjPXOyGgi+djWPZaGbE7sYfADDLEnrtIZ9/9TmtrmQw82ckUUihlySaZLPzQxrNFoYqSvnTcUKj3cNbFDx5JhzSo5ev2d8cYJo21yPhbHrrDmmm8uVXIkv1jQ8+5Oef/hODrQMGPbHpKNkZ1VrI1v4mv/jspwB0e3v0du4Qh8JB5UXM+fVTCiNmbVMcNhTdZnNrjTTO+Pid3wPg8OQx89UUU67R6WRJVAtQFA27ITKmRbzi5PwYTb/JiqkUlkq1ohLIoNfLp5jWijLvot5AsdQKs+mC3pqsNl/HVCxQ1ALfvakpSJIOR0GTUMwgHKObDXJZCen3LU6PTlnMlnzjA7Fmq5UacVTijko2diWLkpfT0hsEsgI+m8SsDdsEyRlLyfBZZjm22mPpnd/KFyTxgDRsoWgi0+qn51wdpbT7LY5OxQb31v3vYRo14iShWhfBlOtFaIYGN8yZrQZ5FtJrrHN5Lb73rfe+zWraZBWvUa0J+4zCJQoxeSk2QsvY43/93/4t9x7s8O//w1/fjl2Wl5CmKBIuoSo6aZqiSTKSMLlmbbCHRps0FuuqNCL8MMew9FtW0299ckC9YVPmMsOfppiaRRwVFJIO+p1373D+skqQudRkBZ/MIQ0WSJk7LHPIwstp2PkNgoM4yjHNkrVuj2dPD+VzWpRKSSx1derVNWbBKZ2ewclLsf4KXMrcoiwKDF3C1rSUlXeNbT2QlqGS5gWaprGaizW6PtCoVqt4vnhuvaKQJSVqKZgFAbJ0xeHLM1TeoZCOo4xNMjIKZFXFSLi68LGsPlGmy890SmC5umJzS8IeC4U81kk0Sc2uWqiBIIy5e1f4iHpdJ4srlIWJJuFSqAVFkaLIstHO9oDZ5UhQL4fCD1d0G0qLIFjhSqINyxAaV0VVEimlOabeoCyXt4m3JFUpCouiKIT+JGAYCrGf0mqJQOo7nwz4+nMHpbRRVIkLQkHXTZJ0TrUh1l2r0+PoMqMmofXZKiEnx0tLKjXxfsvFjOWiQ1EaKIgxLpKMssgEzhZAL2jU1/GjEEtCExWlRFMhjX16m6LKv7G1ie+OBZQT0DMbPS+JkxVheHO4MvEDn5WfYzYlvC8PQVMo5OHO1sFbjvDnComsXJlFIenmSwIp6taqlawN7/DLr4X9BKGHYYFqmty9IyDGr1884umzZ/y7/6PLvXflXh4XdE2L89ciwbO9M8RuKngzmYEBmnqdjbfu4C7lgawwODt7hlLt3JJ/+eczynqd6QRKOVdZtkAzFSqO8CN+VHIxPqde6VPKw7iiaMR5wdq6w+xc3sur8Hr0kp7U/9td30ONW1gNi9NTUU0f3rnPoDmAwMCuit+z622ceoAi/TdqQZqolFmbXl98dvrqmvWeRbOV0usJv98yazz64ghNvyFE6WI3FcpzWMnE16qsohmf4CZPsBzhKJZBylu76+iSJe/4xRjNLuk/aBLfwJDXvk/cfsrSnTBaSLr7O/exFtcEsdgfAz/GaeyRZRmlKmKQ56dPWO+r7N9bYzES/npyOme7v48km2S1OGUWTEgiE98Xh2E7vcTWa/zhn/8JP/zZfxLPMNgkiTP2ZHvID3/1U+48+ADKEkNqOs5XLqU5ouG0aG8K/+0lC7AUxithK8ONHRK1xJvmDGVybHOjTjGJ8HON+EYLLihYX99kNhZxn7Xe4On51/RaLQqZoP/yy0/Jg4L1zQFX0jfXOk3G7gXNdWkviwinUHGTGR1dvN/57BI18KnUxd6rWDm6Omc6WrG5IeLojqUzH49pV2tsS8bEshjhUWF/KOKw6qDk+BdX7B1s8ud/JeYzKTXy6RmPLyYMJGGXO/P5+7PPWdsQNtZodZnOznHU9i2rcMw1qtpCq3hEspI7Hvts7G7TWxcxSep7WLmLnik4ppjjV8cptVqNnIw4uGFDzTA0nTi++X8qqlhFSVaRqK0kIktSbNuke0PM9v/PZ/GG0OLN9eZ6c7253lxvrjfXm+vN9eZ6c725/nNcvxOVK1VVmcum953dLU5OX1Gr1bArIvNxdjKmuQ+6aiAh11RqDnmq0WiITEtRhliWjaIkvDr6Wt5XJ4lzNrfWmE5Edml/f5dF6nE0Fo2UZW5iWnUqdZNOT2SyLq8UsjCjVq1RSCrfNJtg2L1bHRFdSWg2axydXvLg7W8DMJ5cUqIyHs/56MMPAHjx6gsCf0mrLnC2DcvBMhOchs3RiaDINrSAdmOAH0wZrItM+Xr5FnmR3ipLz+bXOBWTZvUuV9Mj8X76KeQNIEJRZAZIUW8F0ACazSp53EAzElYrccJvNWtYdsnJsaSC1ius9wckUcLUExmbvDzjYPcbPH7yBa1EVAeajSqWXbltWI4iaDQG1JubvJwKIdV2q0IJnByf4VTlWJkBRA0qmsjGxO4cS1eI4/y2F213q0pahES5hi5hXYGvo2uTW5FdL1iwNthi6Z/hFSLTahoGemLir6a32bSKtg62waWkv767u894cs3p+ecMuoL+FqWGZTaI44CVKzLzr19fU29UuTwRGcSN3SFlWXJyGHBwR2RDfM9jcWGiryVENxTOdsR/+sd/vIV55FbJZ8+esnvQ5EJSVDvVNuguh0eL2+/Nx1+gmzlrTdFc6icTFmMFnR6lhKzc3X/Ai9dfcjZ6xoO7ouR/sDXkanKO7wvbiPMrAlclD6Hekno1eRPdtBmPprdN9fvrH/Hzi7+h2xMVsKrToVKzKYOC00BkvOarY7qtXeZXo1uh1igJIcrpSEz78fkJO3tVJlcaSigW5F/86R/x008f0r5T58nRcznGLsN1i709kcl69uw1Ybji/XffQyLbWIQr3OWC3e1P5BhcoChNrq+O6PVFxm3lhlQcA63IbqGsSTZl5r7ACqQuS+2AJJlRN6soMjO4kgQgmtojl6QMWTJnnl1SU0UG1ShM2nbEcGsbfynmZTU7o9vooxkB85FYI81Wi6vZCVkq1tXa2j5ZPkGlekuA0nB6zN0J7njMu++KzHUQzWg3NqmWUmdP0eg33mHlHdKyxby0WyaHx69ZBZfsbIhMZ+Bd0B++zWokbHHljSkNhdTIeOf9bwHgBgYrZZON3TalxIargcM89Li/ISCqP/6Pf8dHH9R5fnZ8W9FrtVXyNEJRFBSJM8/zFF23bqmu1USj2oF2t00oySr85ZJNs0PumGRS/y+lYHStgqSxLcwSt0hRYwMrE/du9lr88scXdIwmQSL8sKXbaGWBn4lM8mxSQSNHsRyKSJauVIUsztC0Y04nEjdT7qIkLpqs6IVZgqqn6GpCnAg4UVrkkIFdsZhJOF9vLSacV4nz31SyVHL00iSQpBrbu1WODq9JZe+UYpcUaYbRajG+Fj6vdaDTrOwwD0c0bQH9UooZmlklS8T4DrZDrucuXtbGlgQhZW4S4ZHFEfe39wAwdZ2AHE3uMZRLslzHdyGNBMx7saozXfYxq9GtHwyDnMxQcCqSgrxewfcNEi3GkNCyMoJADTAtja2+2CNfzBKCMkGXVUaDGlnm4QUK3z4QayVLwY0d6rpOiMjkmomKH/sMdsTYKb7KxWKOZvRRZE+ZY2jEeUwUKmz2RAXo4jqn4lTJb7RhFAdF10lWS+4/6Mh3KQkzG1OHQIokl4pPiklFUjh/670dfvAPDykMlTgUdlY1dDJdISoKhh0pRJssuZ4GlIasYCY5QZaiY6PKqs2dvXX8wMdLC5xM9toqGWDg+1L8tFHj/n6FH5wtbquvCgpFUaBZNUIpvL33fp8yhPlMVuoaJppWRdUDnKaY07WNd5kvn/P1kxO+9S3hBx89ecp3vvEeJxcCGumFF5RmDasmcaUI3annz7+gookx6LS7rDKLe9U2r5+LNdN/+4DST1GMBFsKhFcLlfmkRJc9l/2eiesZJC60+lIbTtUJlxmRYbBKxPsl4Yo7OxtokjBIVxuU1oKCnDv3xL6z9FYskpIvPnvKnQMh4ttt1Lk6idCb0tfaMZZtM7m6oNcT/caWDYswZ3/9PqFsiJsnY/Sqyu6BQDm8OPwaLa5QbzYIZO/yxL0mWV6CGdBqCZTGZDbm2VOPlbxPpWKzVt3g8vIaCuHTvfjX3LlzwOunR7TWZKtA32F8DFZdfG8RXrGM1gnHM2p94TdanZKJd0l5omI3BPzUcyMqdZ1nTwS8sF7r06vX8YIrrJ6wFz8qcJWUz54c0aiI+TqanbJUalhS0+73/viPWOvW+OHPvmZ1JJ6huamxO9zl2RdHqIbwQZ1eiOeeksm6x2hxSVXtU26Z9CwxBhW7x0Q7IgqW6Kb4zKxrHJ6cUkr5hPDlmG98810qTluMDaKqOc6nVGsWa46wj9OTa5x6ztMTAd1PI5vuXpuq08OTvYy2rmJ1WlxKiYpgruKs1fnm3QMWgbDz8cJl491NtCjj+kqM1TRasX+wy1T2wquRxfa7Fa69JUtJOle6NrGdoGd9kGiSyWxBo5UzXop3eX24RDNdgthnd0+s7ZrWwtIVvCBHkQRr3/7eR7x8ccjXJ2IsTTWkmEGlcZ9lLMWVq23c1QhDL8kz4U+NwsIqErYGYiyTioqpWASLM2JP+Lf79/ZYRKdcXF1zdSkh8DeMdL/l+p04XBVFynBNBKZHh68w7QJT71CThrq3t8f4esF8GnPvnliwo9EUywa7ItnDshrT2TXbdywST5AyhPEUZwjeKse2KvLvLChTqhJvaZo2dlXn4vIV3pnYBLa39jk7P6JUgluGuywpSPQZnbZYrCt3LLR/fJ0XL0TjpmlU+eDDjzg/+wG/+vyHAKwP29y7820efS0cqVUraNT3ODuZE0QChri5uclsuiJNQjzJ9a+SE8UBjiEOW1o5YLk8YnPTZtgXpdjp6hFa1qHZ3GWxkkQb/px33r3DxYkwwtC/piivsFQHRwZzpt5hsbomkhALXasQxTlploFkQqrVuyxnCpubm/jBDSvemMXMYF2SfIyulww31ri+fMz6thQoHL/mo3e+z3x1SibxsesH29j2HqNrQSqg2T5keyRZwdWF2ODi2CBML1GVknpNHGRW3or+UMfRxME0KWaMp8cE0Qgku5UbOGwPBnh+hKlJIWPLxzJbdFvCgcyWT7FqPXa7Dzg9Ev1NjXodQy05OXrMxlCKFtoZtt1nIvWdHOddlosVpZbw7JU4CNfsLk7F4vnRp9y/JzZLXR8Sx1csXeF8Wh2d1WrFdFRDioCjxgm5ElIqPl3pzL2ZSqej4iVSTLrb5Hp0QaNjcn4hxtx98gsMJ2Nj8BbXI8FqtL6+jmkbFIVkY6qso+slzT2DOBbOYDa/YrSYsTbYR9fEnLrxHKvmsHDF5vXq7Gvu3P2AxfySwP0NM+DS00iLCnf2RPBoahbz8WtGMzHHFaWCEWcYVszOA+GQvn70GVWz4OTFIZqEKswXS3qdIVksmcE++CNWwSvKUqPXFTDEHItlsGA0F2toc+OA8fUKy/Fwl4l8vx667rOaL3Fs4bKePHlIq9NEsYQD1qsRttHj4uw5ra4IJn2paTabvGLQ2RNj3N5jNr/Cl4FjEiQ8uPMenrvAroqAL89z0mRB4KlU6uKdjy+eUq21aNdEUBEES0bTM4Zr+9iOsOHx/DH3736L0Vgh9cSmujO4w6sXL2lIkcZ6vcXrs5d0OiaubJLOEgMvOmHQu0uRiTXq1GvMlydsbonfy8992v0Bruvy6kiwMW2s/dd0exUyJUPTxXgmyQk7GwPOj4U/ePjy/+K//a/+gh/9L0fo+o1QrIqq6JT/DDSuauLfrR6Q4fDxlsVA93AX8rBTbeDGMbkf4jTFxvTW/S3+7m+foqgC0pGnAYph4vpLHrwtehfDeEqWR2hqm1IKL+Z5QRrl3L0rYHpJZJGnKUmakqcSqmhkVKoane4aJ+fifdAgL1RxEAQWqyXVaoGh11jMJRysZuFYFnGWkstGqCC+wLb3b3XEsqIUYuy6RVVCfj3/CtdNUDTx3HGWY6oKYRCgSEY/w8x48cLFsgeE8lBWFgZmaZNLdtI7u29x/HpEnueUN3qKpkoYqTi2zttvi3f+hx/+iDzbwrRvhI4tEnxW3ikffiASdkcnz0hTg4rSIkqETzCtOp4bU6kK5zJbeBSlJt41vwGj5CQR6EqKWop5Pzq+wrB2fzPvSkaWKyhlRn9NrJmZO8MwNfIip5AED4UCeq4zWgkyl5n/AUWiUast8Vxhr5WKjqYrrMIzDqQuYjp3iOMcQ8IZFTWjLAqyJKaURAadVpeXZypxEZNLWKWlV4lXcyx5cLucnDBbJqAJnSmANPMptIQsyvn4fZHIuLh6zWIBSiOTNlYShRlqvqJdE8/Z7JeMvi7wveAWrlgkKioqgUwYdLoqdmXIxdkYS5ekM1FEnimYjkq9Ksaq3tR5/uIxltQ7KkoNRQmgKMhTce9KTUO3Vxy+jPnqiUh47N/tcOaNGdwT+9pkMqNVs4g8uc6A0eg1CjqxTD42G30e7I6YjF7T39gDYNhocI1HFEXoSv/Wzrb2EkYjkTzWtB6Bt6TutOhLxuTHj3/NannJwrbZ2xb6UfPVBa1qnUkmfOb1+IpWq8V49oxCHlpKLSLyC7rdNloq1uOXX1zTaBb4Y2GbH9/5kJm7otOu8uiFaJeoaw2cuobtNHj++rEYz9xnrb/FF78S2oLf+da/4vgXP8JVl7y9KcZlESTQczk/vCCSz5WnFTqtPTqyZWR7e5dXRy+wzBZeeBM7GTx58Yr6zhqJTIaVHPDeH/TwluI5N9TvcHpxTKQVhBMBoXY621SVkjgDRfbtKF6FVZYxHAj/Fikek9UFd3feuWVRNu2EiX/GV8//gYNNEXve3VpHd7bpSCjd1WjKj58+JcwuqUth843Gxzx5+AqUhED2yE3mI/Z3N5hJ3bJSaxKkY67PhiiG2COfP3rI9rCPYdRv4754tcKfwcEDsQ+gKLx4fobvHVJxZCLRDlHQefLojD/5U9GKMJ59xeR5wP4d8b0gnHM984ijBdyw7jU28cc+zS2xh5k1F0cdsnRDWjXxvX67wtK7ZBX75BKGWG9Z9Lpw8lwcpLTZGllU0mo4VGtiDWlVk1J30MuY0wux/g7eWkNNa8zmknPBSlHVCrW6zmQiYoJT75yKUyOMltzfF3b9y19/iu+HFJl4zlY7QlFi5qOrW6bDMEjZaCs0ayaTWMx7s9HkKhgxXoj935oEzOcmg3s66U3y0TOIxlOsQvRpAzRlHP7brt+JwxUKhJHYmGq1Bmmac3U5odMWk3Q9OuPB/Q9oNuvMFiKjl5cu7V6HqiMyDK9fjqnVasS+c8tu1zA26PfWOU+Pmbsim56VEe1u61YALMtjwjBA1UqqMriazzwcu854fE2nuSfuVXcIozmmKTaASqXGcjWiUmngStKLWi3g66c/Y23Q5vREOIOyGOAuy9vqgetd8OzZ1+zs7dLJxCarayq9tZyL8wA1E4F3EM/Iy5QoFgH1cG0fwxsyGXtMJsJx7u48oGIM8YIJLUnFOr4OWC5CdEuMXeBG1Bshg/7BbaUqdyLi2OfBW8KJTScrDo+fYtuwI6svR8eX9Ls+jlYhLyQVc1DBqphoUjQ5iXOePX9FzeoyH4nNxHFCHj3+BZpaYlfEZ6dnM6rNjHAlexmCnuh50aJbityN9QMuL+vUmzZpKuZmc6dGgc/phTjYrA071CobGLrN2rr43suTEWF4hWm2SCPxnCELzs9dDEf8zcFdi/k4wstOMW0RgCWRQW4bNNtdnr8Q/Rx37vVZeVfMZ6Kf46c/+TmDLYtaW2F8LRxNf33IbH5Jf7DD4bGwxe3BkHfe+oSjYxH0RvEZtqMReApOVWwC08UZJQnvv/u9WyawoD9mOjvk7oGopoWey/r6JqbZwD4QG8XzF4/Z2bmHohbMRrZ89hhDbWJLUcGj4xfU6hWytEUkx6Bi9+l2wFvpWDeirEnEeu8OUSqcCsqS8bjAtusMepIVhx0m85BqN+VIjsu93X38eUwsg6bdjR5Xxws2d/cIJWPi6ekVezsPsDTllszFUh0ij9ugZeU9p1ozSZMal66wYbuyRrdzF0Pa6/n5C2rVDp3eA549Fr1hSpqyvb1P4l+RyAD23fd3CT3rVgjw5OwJG4M9Ot11bBmQuAsxj73OgFUg3llVmpCVdCUNepwHzJcuUTijsMUcL2Yhw7VNyFYUhVhXOxvfpFB9rkaCWlcr1ujU38X35tiSyqo9/JCDrftEnsdAUut6K59Wq8WTpyKo+OSTTxj261xMj6jWxb2fvPolb9//PdJ8zqGsuneaB7TbfRZLsWZmC5f2Wosoidjq/wUAG1vbzMIFWlHBK4W91Dptyqjk03/63wH43r+o8+j5cw6fxpi2SLiUpYKqqmRFTin9oKqqlEpJXkpWLC3kv/mrHY4OX3J9LuZz7Y4KeY1waVBBbOqvDyPiWCORhzLyCNO0yJWYVSw2oSdPVxTlPuQlpaywF+RoRkJFYuhPjlxspwsoKDe9ReTEyYqNrW/y45+IYKdUClTVIs1vRGENKtUEb2mTy0pZ1bAJwhVOpcZiKXzzux/3WSwzIomrV8wCRTFRSptSVs8KZcl8DppMaGVFimIolIBsDSEvZhiVAcVMJZBkAFpeUKu2iDxxyC4Tg4o+IMu839DKaxGtWp8FJ9gSx1+prqMq1i0teV4EhEFCWZi3+1yaF2RlSkZAKmVADLMgCBZ8+O4N/b3NfBait1WSSLI4WhViN+b9t7cxVDHGRV4nTnMsW/ZO5omgfy2WzGX1fuoJZsCiyMhyEchUnAZxGKDWxL2fnT7CML5L7Beosnk9Ly3ytIflLLAkYiEvLGFXkjkzTXNAoUhi2g2JTnAT8kwXvX6y0b8sTHS1pN0UY2c6GbrRQ1VzkJXrvEhQCxtLNRkMxPhdnIREcYYlG9PjFMoS3OWM/lAcmPtrW/zd3/ySorCJZUVNKyqoZkEpbV+3M/7xxz8jTVtI1mWKuMDQbbIsxZCfDYcNlqOcKBS2qJJi2Aq6arFceHKuSv67//7fcPhkSd0S71x1Sg5fPSWMRIKw3Wiy3rVpSEFmgNzPUWsOVTlXP/77/5vNOwecjlX2dkUyYIHCaHJKu6vz+KUIYL25w8HdAWZFrPXR2KPM6pgNjbkISTAr0DGGbG28xXgqvqeYKl8+fM5gQ5AkOe1zrsfnGGqbQCZXO41N0mxE1arQlExy3/39GuevX7O/IQ5p/mjK0fgYLTP56JNv3Np1GimEwQJH+pvFNGSlLkjlXvjrZz/DrFh4QQ3DEoeB81c/p7N2l/2DB5yeSzHgjR6GBo+fij4wDZuKbRGtIgbrsifZzXEMG40YVzLO/vrZ37E1rDO5FPtVs91h6p2xublOJiUNrkZ1/vgP3+MXX/2KpixKtCoFJ2dXNOSBqGFYeEWGn/1/7L1HjG1bet/32/nsk3OofKvq5vBSdz+ym6EJ0aJFwpLoiSXYM9sDTwwDHtnwwBMFAwYEDTywBcuCNbHplizBFgXGZlPke+yX4823cp2qOjntnDxY6xY1ECkPNODgrtG79fbZYa1vfeuL//+CpQxI1ktVCnaHZju5Dsalfo0kyjGbi/1RbpgYY4t2bpfVlXCknj87IlDnGJpJOSdkOGcZqElMMRN6Ks1WSzYyFwAAIABJREFUNEsWQZxD5hqwjZi19U0uhhOMggwkBDqtNYOFRCLudXaZjE9J0xB3Ka4xQgXX87FMlZ/8+F8AoGsazWqdTkvY0VeXY/rDYza21yjKIPfZ5VPcWcpsJUGMkpS7d2Mcb4jvCP1q5+uUShorU7umVNi6scX06oyODNxoicn+e3d5+eVPWbkSNZY5Dx58j9ngjIffFcHc42dH1EoWxZKkTzAKZMRomcLBq3P5vCKdTomNjW1efC2cziAtc/veDgUZYCpWyvz0qxdEbkK1JfbD9PCIk0WTQlXl/q13ADi5OmS9d5ev/lgQY290NiC/wh0ZnD4XZ1i0GaMYU/Dy3O0J2T++kBU6f8Z403P1ZrwZb8ab8Wa8GW/Gm/FmvBlvxpvx72D8hchciSiqiPT0evv0z4fs7ZUJJI781uYeppURxybViihDWjkmOb3CaiajVKFLoWwQB2U0GR29eafJF599hed55AvCC48TH0uxOTj+AoBmt4ZmtNA16zoLNp1OUVUL2y5c1/aORyvWt4p4rvBHHX9ImnnEic7uTZHun80muEsDRXG5cWMHgMHomLP+C7pt8e+bN36G87MhhlIm0kR0QlPrmHqPYjGg3RUe9tGxj0qJqSTdszQPVVNQFZ12U3j4arRJhE+z0bmGNFfUSz7+6IhH74g0tmVk6LrO6ckR1bLIig2uhtiFIhMJFx14MbtbD1jf6LCQ0dedzQIr94Ik0clbIvUbKwvytsnFiZjfre0eg/EBnt9nMRVR/163hqIvidWI4Ux4/WWrS+yMMdSWfMeIwfQEULl7S6TRw3hGHMf0rx6zu/seAGcnC8Ls8rp3azoEzxmQy+X49lNx7/msT6teoNoK8ST3zsrPUShtsVyJKE7/NMfobMrWjQrjmVjPi9MF9+7fIW9uU5C17o6T4+LyiF/8oUiZz8YZcaJg2wVsW2Sl+pfP2L1xl+XUZuuuyMz1+33Ozz3astxvOPF4950HnJycsb8v1mo4WFCr1JitnpPIUtNWdY/JNCNLRATVcyK84IxO+xFIsry797dZjH3mq1fkdCH7qR9SLnbwXBEV215vYuVSzk4uqTdEhDZv18gin41O7prH408+/CM6ndJ136AbxJRrDllWx5GRViVRqNpNzCxkpysiiO1GkdlQYyH5xsJWjZ2NdUbnBxwciejr9oMe89UpjUabTCKkGWbA1eCETZkNXXlzXG8JiYPri+idOk/JF3LkdcmTlEwIwyvC1U0e3P4V+Z59Aj+l0aqxXIgo6mIxo1BIsSUvC1kP3/cp5Bo4kXjPXFVERaMsRTUkZHQ6YzCdUE1EhDYkQsmu0JQKqSWzmmpEfzin0e0RymidG8xpd9dxLBHhj4OUvB0xX83oNEXUNlNtPn/6LdWayUlfRNjK+QJKWuRXf01Az5+dX5AC1WqXRkfyD4ULFEy6zW3SRERIJ9MQDeu6v3Jv9zZJskBLtqlVJBqTklEubpA5DkupS3rdNf6nv/W3+dW/Inq3zmYfsRwc4XgqliyNjqJAQB2lgIQ0V1DJUq7haLOkwG/+ySWffH5GIIlUkxAcPSBNA9a3ZR/fxg4ffnhOnEriVkMlSQU5cSZJS5v1bY5DFU31UG0xn0EQQuaTSgLYxUojDH0M1UI1XxMZxxRsnfFkQOALmdU0Dd/zUc3XqSSVStkgn+ugKL68d4CiKCwWS3o9IVf1WsjRC5tM9lxpqkeWGQRxQIzIXJWKCvNp6Zr7KwUSJSEIA8oF8bzNnQb/6o99cnaeNJUoeNmS8eTqWk8VKxk//ZMTdL2NqksY9Dhi6S8oFgwePxdnT/88IW93SDPZk4SCqtjUqksabbGeT18sQNnH85xrqO4ky9AV/Zpo+OmTDMVYI82Ca1TMOMuIwxhnfsDJsZgDRSuhZyqqLHFEyQhDhZwR05YIu0efe2BopGl8XaGwcn30Qsbf+Ov/BQBPvv2Aby5CavXiNYR0GhgskwmlvIYjz8z5NMTK+ZBZcu0gTBJ8f4AnM+yjSQ5VV0gTn0xmytByuE7ASq7LaNBhOjXRVBtFkX1ZqoHvZUTh7FovTZYamWaiy97e0PPIsowkDVBNkWl9eXjAYAZhnF2jg9m6QZpmeDKDUa6bON6cRGkj27dIVY3IyYhjBzUWujKfUzieqoJUG7AKBmkSY9ll+oMjAN57932+//4v8d5b0TV08+XVkp/57i+jygxttVLgd//lP6dWkjwBQDx12d+9SSSRK+/ub3M4GbPW6zDsP5brt4kzX6IpFWoFSbg+PKPTfXidwYiCMRFTrq7yVAsi+zMZL+h19slXTaZHUldaFaIkRo9ltrKwhqe9olTOM59LaouCxWb9FoPREU4i1ubqMKVdbpJJFNWjy5/Sn0353oO/Sl9WnIRejlI5h+MtiFIhU9vb9/CCJZki9PngdIrRatMpVPn2WFQs6EaONHDw9TprW8J28Fcez84OrrO/Z1efEsUx9foupwcyi1K0mM6fUzDrNGo7ANQaKxajIb01saCzxYBOs4Iam6SmJIqtL3hx+IqNjkLqi7Mv8ivs9RacnYnz37PLKEqZweiKkuz1S/G47AfkShWQWeLVaoYfH1KV3E2hNydvZAwvFhQl7UGUwWA+oVLtcHQk5r2xrqMs5tQkR2irvcarj/rs32ug6GJNtazEbBEwmc3J5SVUf87ganhGuSi+b3hVoFatoDDl6lxk79arTXK5AqVSjcVQ6OZqucXezTv8seTQmg/HvHXnJp4fMeuLNb3feUC2ERFJXaZGGsvlJeNBwt66yPIvpi8wtCZ5w6LYE3bQ8nJFvLRo9EQVx2KWsBqvCAPvGhMgTRO+ffEt7WKNaCXkc+mGFKspXZmhHY8DRoMjusUNmg1ZlbJKeHX4EWcHBufHQpc8fH+Xk8MFexJB+Pe++D2M+hpOPGD0Qpblt9bpVSzyVsrxmeCdiyOVpWLxzvd/Wc7vktHLc/KlFhv3xfO+/ugVtpLj9oM8H30u3vPKk2TJf8b4C+FcaaqCmoiUrrOIyLIFilq6LifwgzlNK8fx02fX9eoX5w4aoShtQLDI9/uX1Coqoex3+OSTJ8xnGbmciSFLhYolm8hXqJVFL8Nick5OT3FWIZOxmKxuZ53l0kHRNUzZkJwqsHRcTo6Ekt7eLVEoVgmdFmcSOrRYqmJZJheXx6iqMHLXu/cYT8/IyxK5crWMXTRYLSIqEj75m6+fYNkJ1WqVSPanlEs9XG/O3q4o3XP8c1wnptPeQZEG0eXlMx49+D5nFwcs5kcA5CyFm3t3OTkUZVeWnVIwbuMtp5AKg29v5xH9y3MUVZJ1GgmWYTMZRGjG6xKSFVlcw8sGBN5rfhOHIJ9SkFDlK8cniysMxmfs7ohv+fbzQ27ffESna5C44l5X/VNqFQNDll2lvka5ajAaLK/L7bZ3S6Bd0Gv1OOoLgrkwzONHY/qnQmGUShoaGYZuYhnCWW7ZLXZaNa6m5ziSb6xWzdjftRkLvc3J4Qm5ShPFMqnVhbJT8dna6DAdzyjnZXlPbPH2g/dx5GHiLHwwx4RximUJo6VS7DAcvUSLu6TBjphzO0JVE2YrIT/FYomriwWVSusaAvhq8BSCt9BzZeJUKMmj42fs327iy96i+cxFzy8ZTJ6gG2JtKrqN54Tcv/kOJ2dfAuCuMlI7xlBl6edMZevuTSx1xJEs/dx8cIPIn+N5Hs2a6FP8K798E8d/xrPn38hv6eIuZqiFc/J52SxrgDu5YLNzk/FKyPqX33xOp7nHcCb248UgYmad0ao22bv5ms05YDg45+TinG5FKLLb+/fpD07wPaGkLeq4ccjKOyMvD4XN1gZJmLFwROlZ5Cjs3bjJbJawWoh5aq3V+PTLT8iZKa5E0t+/1cPzZ4wk0ECz1sSwYibTi+s+LCsvZDIOXVpN8U6ea1CudsiZYs5zhkEcRRTtIifSUWx3WqymCe5CQ5W9BVEUsxyNsWXzLLpLEk4pWWWiUML7TnV6jSb1hk3/QBgI48krtrfvcCFr/WdxSCmnsxzoXHwjS13WN3H8M+rJLTIJ/VwqahwfPKHcFt9ycP4FncoPadffQTWFTKlalSRMCJKUd++KEoe/+7f+O27cvsCVhly9vk3Rfg/f/xH5mjD+g0jCnSvZNRQ7SKJUaSiuranU9jsMPhwSyV6fjIQUncmFx84viYCLplosptk1caypFkiUmKJd4sEDMf/9Ux+UPIahECODYYFKyTIolyX4QKKiJAVIQlIJMR66KT/3/buE/gJZXYcf+ZiaTqC48j4atWqVV8+v0C3xvCgOsAyTJPavexAb9TJP/QjXFzJl6RGJluKsMjpNIfvL5VMWi851r5GqqxiGSpRk1CvCGDg+fk7k3SNRY+JI6rNEYTFfsrst9MZidcnKTdDzGl7wmicph6ZqDOcDXOkoGvoN/CS9dhgUxURRIhQmDPqyfDJqs3R8Wrk6QSR7SiIDzx2z1hXlNoO0wOKZSyGn/SmghWbhLS+pP1SuyY6dIEY11WvgDV1XifyIrYbNwwfiXP3i60OWQYytK2iqdGBVi6vLIz7+E1Fquljk0VnHT+fXDljeVpgOXKptm9h7DSRTRNOVawoODI001qjWNV6dyFKe1T0wYvQkvSZlRYlIE4Pb90SAYL29x3z+BbqlCrASIAxT4tSjXMkYjcW59u2zEYmiE8XSsch8cmaNYKqjykb/+WLIwfkUjHVCiY6lZCtMzcCXpUrddhsynZevbMy8OI99LyaIliRpRNUSfzs9Pef8IiUvDego8skZBVRVw5Yl2z/8xX8PJc0DYxxJdpoviXLJ+JqD1Obv/J1/wN/8j3/w+g9oXZswnbKS4E6LMKK3t0lRz2NJcI5kknH//nc5P50yngg9eGNng8uzQyZjIXfrGy1cV0FRDSLZz71ebZOGMSevnpHJ8ylwDfY2Nzg4FaVOjUqbcrVI4IfUWuJ5paLOdPGcgpUjU2QbQG7GeLViRzrn5XaZn93fRLUPmZ+8LlFr43kZG+u7zJaCZiVNAtwgIZH6rt3soJkZzuiIhQTVsKwKuVqLiX/Ik2+EPbNWv0Gqx5gS3KGYt+h2Njm9/BZVlXQXc5Wryzl6zqEhKYm2d28Q5lVeyO9LgpBSPUSzoVsXzkAcFMmpChcnU3oSWEipGMRRke2W6Ou7XF1ydvqUbmefdlXSiTgevWqdpb5iNJX0E5u7fPnkUxIJUT8bHmOVCthqnoIQH0qllPe/9+ucnh9BXbyon5WpV7oocs8skxz33oVvvvmadlM4XDmzRa3e4MXxS3KyPFNNdO7ffoQmbbr+yYzpdIaGTqMmnI04XFKpFlCJKRVl6XPm8unHX3HzhgDVqpYu8cOQolFkc38HAF33uZzPyFZCpuv1PK4fU7R9lrH43kqrzehqStGs0pXw7Mp4AGt1Epk4mboTrPmcVMmhyP7KyA+JFYUZQxZ9IZ+GZrBwfZiLc8Cu2Pzc3f+EwPmKD/5AOND//q9/j48//hRntiAn5WUy0ahUdb56IZ0mo0hGzNxZMl6Jc6fa3eJyMKNZtslCMcf+co4/XTCS7SCZprDWqrNYZSyW4vt2bmyTZAt+5w8fM5dlstX6n+8+/YVwrtIUWk3RlHZ2dkKmZoyGC+Yyy7B/c53TkwsqNZNIIoqouodpGSwXQpiKhRopBuVKntCX9fiKSf2GxWzmU5KOzMvHY5J0QrUuDrhaaYP51EchIZRGROA1MLUiiqpRKonD/2JwzPgKbt8UmYhyTUSAnJVLrSw24mx5RJaO2bmxh+eKxQzDEDtXZjIW7+25R8wXA1rNDdLsNQnlhLXNDbLE4vBIKJ9ur4aCyYO7PwTg88++5HT0h5Sr59ckl7dvv8Xn3/weoNCQHCSe5+A4Ax7cFfXOCjl836NQtDk9FAasUz6HLMSRlmqjrbNYHBL6ButrIuM1nc6pVIpk6TrLSB6EQUAQHmFIslwja+B5A37uO7/CaHYEwNZeFcdNKCxD3KEQzE6zimY4OBJZDgWIdNa7N5lMhIE5/9pD00oszi6p1IXiVtHY7/wqL5+La6p1j5yts5p7dCWohp4lLGYjGq0mKEJr1SsFLo5PUXVxjeIWMCtFpsPo+gCv19roWo7VKqQo1891F5ycfU1Dohwpho/jjmg1e9RKsmFYy7i8itnbbzGV/Di1ao5SYZ/RTNSBD0dnpIlFp32bwVCs543NO9hqgSBRrqOKlg2eYzC6krwp1TrlapUgCFhJUsjFJKCUK9A/mVG0xSFgqRqGFfKaZjPUTFSqmIZPpyd+t1w6pHGA4zo8ffoHALz14Bcx9V0y2RycqRm1+hozt09ONr1OJyNq+R6T0YhIGsJ3HnyPZ6+O2K5Jfgc7wGfBcJGjJzOt56cj6rpBaipkCAfvvB8xn4bkZN+ZXSrgxSn7u5u8OBTycnGWQlpg/6Yw1pulGkvXoVWrcnYher48t8ut/S3mE49K7nUWc4lm5nBXkqB0p87Ku0JVXQxp/JiWOAxMTeXouThU681dTNOgoJTlO+Up2XWmswEm4m/3dt5nYPb57OvPuX9bZJcq1R7L1QRDl6SCUZ3NGxucXx6IkDzgEZIm0D/yubEp9lH/PCZausSyMbaiGYRxQLFoEGdinsaTkFJuEy8IGQyFoVjIF8nnY3xpUNv2Pqb2CCNvkkpOG8ed43srtm8+5B/9vf9NfJ/5lDu3b5EZQvazdMXzb84pFvNoktRbNzSyTDhWr3vWQEVRNGIZua7aebx+iDJTMRUhr1m4Rqwn6JGHqYqD7/TsisCP0QxJlhup6JqBtxxeZ7PdVY040UR/qyRzjaIcsTIhDlpSXhMMpUQSR+h5IYtKFlEqZoxHc6L4NWG3SpaBIQEt4nCOaRbJ50ssL4WBUqqUyeIMz/O4ufaa4NlgOr1Ck5mzOI5RjTymaaFmYj9US21OldK1w6mpkGYqcehzdirAHFQtJVN0lBSSWDjeWZxRsG0CX0aIu9/HME5J0vT6Xpmi4C5XdLo2nnTwojiHqv9rTkWm4ixX3NnvgQzAOF6MajnEcZHXVfyeF2LnPLJE6MnhIMLOFzGUBEWSO4dBiGUplMo6oUSAdL0QQwNN6knDNAnCGbf327x6JtBCF8sMM28QpyHI/pgwjKiWYipV8fzRtIRGBJlPrMpsaKaghh65gosjg5uKaqLqGabyp4Qwi1VEq2Dz3ffF+fT4qw7HgyWZp2JZEvk3TTEMDaSxPB5aKIqNpoO7kISkhs18POXGfoW27Ct/+c+PUNWH1zaCbigkWYySqbz7tgg+VCtl+pdzWr0evgRcyrSUGJ1SUQZ4Vx6rBVi5Iol0TfP5Anlb4fRoyp4M8P7ar73PV1/9iCCRTlPBJlNTBoMB770rAEk6vXUuzwfoRpGiPI/iaEroJ9fZwj/8ye/RP3/FT367Bv+D+JbO+m0uzk+xVIly6qoMA5OVPqMiUZRTGxZ+gF4wubshjONy0eLk8Ih1yU25Gl9RyNVIbA1N6jeYMJlfsX9rl/lcBJRMVaHZbLGcChvBVOakTsLwfMx33vslMZ+5OQdfx+zuF/AlB9n5aUr3Vnbdt9zbeQDhhCitUq2JdXh29JS9nV1eHXxOFop3r9Y7ZMkSXYJsqXmD2B2wVEvcuSv27LOnCxLVoFVukr8vebX0Fl6cEHpCDg7PnxHFRbZ2HnJ2LuTz9OiU7d4esRKB7M0MnDGXpyf06uKsXwUjvNWSd/ZLfPt/C+CNt7//l7k0puTMGldXRwAY1SY4Ho7U+2pY5eF9C9tcZ21NBHjPn50zuVyRVS+4OhXfbDld1KzBjqzMefUqwKrbBOGINJMgN5nF+eUlF5djqpYQ4sAIyLSMwVDosqG74AcPv8fGRky9JNa0379kvoLeWovlXOiutd5d1EylfyJ57go6x8fPaNb2qZbFuX3/0Toff/Ih81mfVBH689H9d4m0Vwym4ndeEKNZC9K4xMIV+ixSMrxQoygz7LPBghvbW1QNF1fyx54dzqm1CozGQ3IN2fvmzil0CwxlL/oySblR7fDy+IKcKYmp7QL5ZovV5BhvJc6GuT/C9FMMReyPi6OEp588pVyOKDeEU3h8fsDc9dnc3+H5leiDvnfLYDp8jt6UoFBWA1XV8GYV3v2uSHbMH3/BoRdx5RXZkNVsK9+nXKvjecIOS9McSbOKVnS5uS70VKtV59MPfp9H+13smrB/L+bX0ZF/4/gL4lyphNKIuHl7g+FVDk01WZcpxyT1KBWr2PY6l1fS+ehU2dzcRFXERF5cXLBwIoGcgxAmTQ8oForYxhojKaw3tncZTl6Rs8RGXEwdCgUTVbMF4S8wnozI2yX29+6wXAoH5OaNe8xnDqkEW+ifHeO5CXduPWAmketMXSNOM9xVRkWmddPMYTCcsLEmMmUnpwf01nv4XoyzEp7yrf3vMJ2PUI0J3Q0h9IvZFMdZ8k/+2f8MQBh5/PwvfI/+2YKroUCzm877vPMzW3z9+RDNFEqkU9nm/PwUV0ZMAzeiUFDI2za5goQv9pc0Wz0OjyWa2mSBZdmkmc9UbtZatU3/bEzOTlEyMS/7u++QZCvGA7HJhpPPWF9f57J/xdm5BJMIpph6i1pFpyTR+mqVNb58/Mfcu3tH/u6KdmcTzw3Z2hSOjG6tWAYHDC41FkPxO2cVcvzy/6VRE9EZk3fJAh9DLTFdiEhWt1tifBSxihdYRSHOR8cT2r0cQSw2S2/tFvlKgdn8grUN4aBoao4Pf/oJb73z6BpAI9GOKZSa9DrinQ5OP6dQrKIZIUEk5Gc6m1HMtQhdnUpFzMvzo1NmhkVZrrmhzPCDEYvRAaaE67w4PYHkJY8e/iKzsTiwLWPB8YsR9ZpwAhTNxXF0JqM5lmx+NgyDYlklTiKq5S0pn32CYIwvSX09b8Rnn15RbeTp9IRhOhlNKeaLhNGYMBKK89vHH6AbKbYtS8EUi3K5jjfTCGQgI28rGJbJ7OrkGrWt03mHyeJLGhL5aLZyaK53WWgJ52NZymO0ieIj2oU19Lw0rvQId2qih+J386sRCREvn6xor4s59idLbDtiOhTyGscLpuMhrtWmJ5vQ53OXqTNnZ+0egQQR8PwcuqGQyUNe1VJMtcxq5bDREc8LfAkTnevg5MQ7HZ+8pLtR5mQsjNfcqk6rUWZ4mdCRZKTPvx1iqi4Pbt9nOhN7LXBVUAxCWX6XhiF6r8b5yYRbd0VgyFM8Dp58TbW+TWdDGFK93jqF3CaeDNyc9Z9Trio0yptUTTkH0Qo0myi9oC7Lnm3LAN3neCAOgG7rVyiWyyh6ykJ+VxQk7K7v8/f/9t9lvyf01MNHtzi8gGpXEmEPnnF8kqdcKxPI8lPLzJHEEUkUo0knKUMQs0aRuKZQqrLZbHF88JNrA8iNZ5hZjlUc0+yJtZnOLnBXK+yCOFBD1cdbhuTMgP0dYcicH7tkCiRRgG7JgICSkM/HzKQDlmQ5dMUlS1P8QKxppkZUqyknz2Jimc3STAXX8a9LvzTdYzSeMxrUKVXKUoZCstCAVCGIRcP+k691LH2H8PV90gIaNoE7I0Vc4y3rOE5GqSgdxTDANirMp5f8B39JGEkaES/ORxTsOgU5L3HsMxk7PLq7I+Tnm0MB2a8qZNprEAqFmRPw8GGVnESqzVINXVcIXLFW+YKN547odS1KJUkiPnFQCw0cb4RpvQamKWLEMZsdUdXw6SdnoPukqUEqQT1UDeJkRanS5fBQrGmmFYmCmJwENomyFE1NMBSHJBayryoacRJhGmBK4uT5fMFut8noVBjew6lBs2wTxzFJKJ6nGDZ+7KHnfeKlDI5ZGUmSEKZCR2iKipoo9Dp59FTso4v+CD1fhUghkMGcJE7R04TvyGzaBx++JKYImXpd0gwQ+RH7N/ZpV8X36MYGYZCRl86H7zpYuRRV9wjjI/Etk210tYDvrsjkfOoFhcD1qUi9FSQ+abJOQkIos5OGbhCEoOsms6VwJP74w4TlykSRwaMwjDHNHI674NEjQcVydXVFmETYukGkiDPSyBcJUx/TFnvmH/3j/5N8UefrJ0+vv+1s9C1qvkbVFPtlZ7vHydRnvpgROsJWWsQxSeBjaFVUCfRh6iXsQo68rGc0DYvZ9IrLC4V92Yg/H6d0NjpM/TE3bwnj8fPPP6Tje3z3tpCpl88PBCplzuHglQhMqWaZztYGWBaX58IZf/etn+FF/zNCCSIwmcwgGJJvqNdothuN24wGfeIM1m8WpUxN0HJVSgUZJPEC6s0N2jsl/IU4a3dvl7i6OqVWsKlJAKLT80tOTs7odcQ5XuvYLIILXh75VGQAdGevyvDqOXquwMQRumQ7X2WtPMKXAZBSMU/banH04y8pReJbnnzyzyi9812MWGE2ErKOvySYjal3xbxstm5Trxqc9R0mY2G/Ld0hlUKPfK7FLDiUshDR6zR58lhAnL98fMLt977LzmaZx48l8jBVFstjOvUOs4n45kLeomQVqO+JoF7+7JgPfusPeP+H36d/LILc/ZNL9KKJ4/qs9USwP0lmmNYabbnGgb9ga/s2apKn1RFz/vHnPyVVcmimSiADPJ8//jE5o8xoJOzqd976AWkmbKXVXALmxBf4yw6774ry/o8+/QSzsEQNLTRdvHe3qjGbOShmwrEEQEr8FU8/fkJDgtcsZjGjSQ6jmGdrU8zn0y+/RLPOiVYBOxL1b3o5Y32nxtGZKH+NHJu1tQZWPo8uyy7PT68oFqos3YR3vvPzYt1vNNje2GS0EOv5xRevqJdLmHWL01NJiVNRqRdhvtBR5Lx4zoKBf8Z8KvTPg2YTw5wzH4KaCNl/8dkSq6zg6DaOLAuJX1NM/BnjDaDFm/FmvBlvxpvxZrwZb8ab8Wa8GW/Gv4Pxb81cKYqyCfzvQAcR4Pxfsiz7+4qi/PfAfw4M5aX/bZZlvyl/898A/ymihfO/zLLst/68Z6iKynL1uvxFod4oMp24eJ4sjXAqZW7UAAAgAElEQVRnlEplyqXGNefSaulx+HKK44kMRr1eZzr20UwHRUZM2s0bELfIF1KCC/E7L3QoFmq0miIN6xZc7LzFZOQQyBpMJVmweaNJGC5xHPEORatCs7TNeCoiE83yGknJ5KJ/xGAgyhfeevsei2UH30vRVQm9vkyxjDqHR+I9O50mgR9jaDrLhYiKqUaf5WqBZRaxTQkRHVxSKKnc2BPRgpfPBgyHY1AjSmWR6TjpH2A9zuGFcy4lyWarkeIFPk+/lSUyNZ+DoyU3djdBE3M8HGWcnH+JZomI4uXJBEurE0cqmi6iMbVGHVXJEScuW5si6xb6OklWJAjFNevdu8S+wsn0glZbiNJybtBpbJBoY1apJI9zi9y+s4Wmiujaje1H/OEHv0mntc9y+TrSmhBmVdbWehw+Ee9eLpus9R5Sl0AcfrAkXyxwcnbK+oaI+k+Gc7Z3bjJfeQSy1r67m6NQ1hhORYSh0bMIw4BmuczBiSjd63Z63Njrcnz8Ia2WiMK78zn56pyB/F0+n2c2mzCafsFWR5R5VApd+uevGE8GKJI/6u697/Hq+DnPZa3vo7sPceYFeq0uw5WIbqwWr3j7wQ948fIJrwMeqpWxvd1hciWja1vbvHj5mG63SyI5kDrtCpP5gGqpxvGJSK83yhlJVMKXEZRGSSUIPbJohhaLaOh6u8rhwSm1aovCnsgEzmdL7LxBTpbLFfIGSujTawTXmdy506Z/cczbt/eYHIts0rD/Ee+9/QOG34r0++ZeizAoMx98TKUmok1aLYeqtchlCp4syz05GtOub9NrivmNMwsnnLEMc8xGsnkq8VnMZzhzsS57NzbptbdwvRQjE3tof2uPp6+ekMYe21si2nR8NOVyMKYsSyXchUq7uc1qJ6BYEZlrZy7k6HT0FaYsQ2h262R46KaI8IXAi9PHGEqNliH2mscJK2fFTvsu3kyWjHpDUu0UdynWvGJvs3CPaLUanJ3JUr6Cxf7ddzi9fEUuL7KMhfKK6WDKoC/eZWOjCVkOJcywc5LENNIJEofnh19RltFs27AYDWwK/Bog+se8yEdLbBJZ577eWeNHP/oNLD7j0Xf+MgDLpcGdW/DFZx8C8M47v8Yf//gDdFMnkRlay3gNI+H/adkaGVmWXGeubt/sspo6LFcBmiYiwloUE3kRxZJBXfIGPXvu4fs+yPKpTFkRxinVXEggdWcUakSxi21pJKkEU3FX3Pv+XZyl1MvLHKV6BT8ZkPKauH3FZO7grjRkBRdWloGqEAWyb8mMefutO/zoR4dEEs5YU2Lyik4c+nRbYk2X8wMyVQHZp2hoObJ0xWo+5rvvC1LmQF/iuSF27jWZa0oYJNimRVvCCR88u8AwKsRZiJKI59k5C00JuHdP6KSnT38bhZuYVoNQ9ggFgUcaq+ztb3AiKRzCOMZUNUxZLpakPpnioml5Zlci8xAHBkoOjCTj9Vu5fsBO12A4EHp4Pg1JjRxxpAjuLoA0o1LKczkcMZxIYmitialHxJLwOTMVXG9Bmii4S5nNiqBYMohCB03234VJTLNepaCJ3x2NC4SqR5p55CXvVByGpNYCx1dYueJ5iSrIdzMJoKHpGkkyx3Of8vILscZl+12GkYtuAvL+kRtTzhfYksBN/yrwSBhh6uZ1qW+apqhqiG2qpEuxRxeTb9Ct1TVMv6qbKORJE43eWl5eE7Gah9QaBqEUqsDI8MMQwxDR7Zt37/PxH6U4TkChLL4lcFOyzCTTZvzsD0VFzVef/z5u8JCi7EGOoojxYMTdu7evQYQmkwnlch0/dihoQi/5zpz1dpUvPxcZjD/6yW+zvbnN5fklvlzjklXDCWKGjiSOZoJu2hi5Ct2akDv96JjOnTWODq8Y9IWuzhk3yOVzxLLl4Gw4pVKtsdHUmV8K2ei2u0ydAUrOQM/E2VowqpwcPyPbEv++Upbk4zxGeZv+QACwdHu7kOgMr1z27oky7pcHn2CrOR78rMjsPnt+zP79d/nyiw/QVGFjbd6qwsjF1C0yX1bmjA5RTI+yLHXv7dZxv1U5/eZz5pq4ZntnH109wSpW6Z8J+6Zg19i5OWQhKybKlS6BsSJMrvj6pbDNbm+/S2ujTDi22bsjsi+r6QW1ap2rSHInVar47grXrlJ4+B8CUF13cUcucTAnXxBnlmGv88GTA4KvfweAvbsLHu7eolIuYEtMAAKXQOuTxV3eevA98Z41hfnUZ6sh1rx2K+DJk4/Ib+3SqQodn8ubRMMOhDG5/Ov+d43j0zN0Q8hUp7nGzq+UOT58gS05AG7sNKnVy0QJeL6w6XrrZT794id0GyLjZZgKKQn1Vp5vn4lz+/JqxsbGLaxCQEPav7oZELgKwUro+IPn5xSKJvliG0WXFDx0aTZrvDgV2a3NnW0WXkgaB/SK4jx2koye3mHCOZOF7GU0oJNvkMZizsuFiMPzz2i3usw9sdfyuQaGMmZ7o4UjYfnbnYzEneDGElCrV8ENwNdXhIHI3qVOnVLzJU8fr7h/T5T8fvnZCc1yjkJLnO2lQge7pJPmcrQLQk+Z6ITzK3BHzDxxhkR2iKkbbG5LIK6iwSBYcnh8TlUCZu3fuMXV6BUlPYdeEGu11RNz/WeN/z9lgTHwX2dZ9pmiKCXgU0VRfkf+v7+XZdn/+K9frCjKPeBvAPeBNeB3FUW5lb1mp/w3vYShEb82OFMTd7UiclXWZZlO4NWZT13MdAbycNZQyOfOqMo68EQfkDOqVAoNPEUIXP/8Fb0NyKwKliZx7scuCRHduuScKBQ4Ojyn276BL3kndBXGlyPQIEUoc4wOg9kLglAoMbQSw+EZVt6mtyYUTb8/w7BSKtUGI4ny5fsuqhrSaIimV8MqouoK7WaH5UJcM50c0O7WMY1tRlOR0px7x3TNe5wfSxQuO0VXcizcKaYsFbi9f5vlLEXBxJE9OtPBEbOpQ6Eo+49aGyhZgcuziIJkUVc1kyzJUcoLA+no1QmmAbXKOpVqTq5DmfFszN2bP8vhS2H4tlo1nj4+ZXdfKJ7I05mvLoGU8Uz42J32BmQKBWuNgeT/MCsGVwOuyVYtt0Sluk9mTQhl/bFtlkhdWK4mtBqybEZxWS4cDE2iM5UiRqMBmppydSEcN9OyOT1/yvr2HWyJEhm5lzgrnUwSCCa+w9nxkJt3OuRlz87ZybfsbrxLQcuTyWbHdsFgsvTRpUypXsTy6AW7N9+jUxKG03xyTLpyqTUbPHxHNCA/fv6CLE5Zb4taXG+VMBqvWIaHFGzxTmutdwidGpbWZ3NblpF5DleXh6x1hPx4wZQMFZ+AQG6Xk8df0ij0cKYjNiVXQ/9iiF1IUKUxgm4QxwbeZI6VCWNVzTtsNPMEoXqNzKNrGbPlFdsNcaCNRmOS9JiafYPhSjrCSg4lVRjOICcZyy/7J6SXr6hKB3o+WTFYXNAobTPzRYlMhT0G8wFTNWBjTRgfdqmCUfFZKn25fg2uXnhsrW1gLoSiztstpsGctx6+D4DiGBQrVb56+SHupfg+xVDZ3mvz9OtXZIrYR+eTKXY+j6EKx9GfR3jmAXGgs7wSjowpgUrUwEaTZLKNaovzuUbzdU/CKGCWRVSLFpYvyxeyCMNoEU4TZpLcsVQKyNnrtMvi+fNFzNODr9CUMpWqUNwHZ4fc6BZZ22oROJIAuVpC1RfcuSvmPJz5BKlLqZhHV8QB51gzXhz8Id+7/R62LHs4GegcDaoUyxI572xAqVggVSbXfZG/+U9/i8HRP+Nv/mc/x/ETAapx/zv3WIzOkS1XXAUO3kojZ2oQy5K4Ug7F1XGcFa/hDuJUHMhZKvRGrw6ZmrByDHJl2SeVRGR+jlbJQssk71RYJI77OJ4EbrDaBP6CarfKaiX+Nh7kyOXzBGkMSOS4LKakzXAD8XzNjljhoGopumxQjpgzD8rMZgmK3EeKH2JYBgsJtrJeMvGdCF+zsGS/EX6Gmw9J4oyNdRnkCrd4eZ5gSK62IFPI2QZ6ZpLI5vGcsoFpBySSPFPxFRI9IjF8XNmLMndClNgnIrkur/PDhHajzbYsR33+dY3Q0lHiGCsTe89Pbeplhyg8wBkLmdXNHEkSoMhysUSro2Yuy9kzrJrYQ5FqUCYjTDJUQ8hUEiWYms/FpShnXER5dC1GixWUvLjXdOBze7tGrzrl6EDMeRAHqKRIdUA4V+g0KlipwwfPhQGU5R4SuQsSy7pGriSNKOdzyAorvCQlnyrEUZ5I9vGlqcV6XiM3uSRMxfmgKglxGKJL3skkVgW4zFqJxaU4r0ajmCw1iYwFpi/BIzINrXCOMxPG1Xg1xbQqaFFMakmeq1VKwdDYXS/w9EgQ0XrhCs3aRJElT7oZ43gDNpt5nP6xlEUf08iItfTa6VMTi5U74d6ueO90GTA8V4iNGnEoZD8jI0pVcprFXlvoqlVP59PPXpLPZG8vOqvVnF5ng8VcfJ+iaIShSy5nk0qOTC1TaFbb/MN/8F+Jb0kV/CglfQ0gAtTMMheTFT+4K4OrJ1+hpzpheEl/JeZFLxU47s+IVJ2c1BNRdsXjz4es7YgzxS4XSbI5vmtz97ZwVkfTgPOzEwpVG+dEOE6d9TqJ4fOHH4newv21NVBDqrUiqCK4aig52vUdhosjsuh14LKBbdtcygDz9k6Jo9MJ+VaDZ4ciGDh6PGd7Y5fQ83CmQk/d2t7gp8c/xZ6IOW84Fu7ljErlAdWWuNfC79NuNwiWPlEo+U3DNTqd79LYkg60EbNYzZjOYnISDVW1Y86OHSpFg3xTBEW7ayGnZ19SyUsgMy+hVeqy+Z0bzCQ3XJSEnF3MuP/gEUfHXwFQVcfstd+htJ3KaxYYSUjswOlK6Amz+YBf/vn/iJ/8yf/KM2m/FUYWRt6AlZynVp29gs/Lo3PuPRR2QrBcsrdb5fBqyWQi+7cTWMQRW1VpU54eoJVbKGgMJOfh7k6XwXjActpnc0MEhgbDKdVSnXxB2FNoIdPpHMVNURxZol5t0KrUyOdr9KW91n+2ZOtm/dreKJQSXgxeoJ+UefCWKN1bnAU0OxaqDF4vl0O8wGU0H5OawqneqNe5cAZMRyt60qF0MfFij1gCojW7uwwuBixGPllO/C2gwVapx+xsRq4l3n3pphSKLe7IAPp4Maa6ruCvEmxpLy6jI54/W5Ar5zk8FkGKKEqYBZvEl0KX1WolDk+H7O1sM7wSstHr5Fm5V9Srm5hF8Z6Hr75mvVVnJfEbri6+5m6vyc72tgA1A9RCirqscmv3BqOZWOMsk8T2f8b4tzpXWZZdABfyv5eKojwB1v+cn/w14P/IsiwADhVFeQl8D/jwz3kIyEPo6PCCu3fXaTbs60b/UslmPPH56LOv+f4vCNSWxm2Lq/4Z+ZyIWiXKJonxgsHSIQ5lbbNuoSoGj794fl2jmwUJUeRwNRWbtVbtsLnTw1mlnF8JBby2VoNEBwK6HXH/0XhAEsdUa/JA9R2azS6mqbL0RZ9Sq9Pl+PCS6XTI+rpsnFxaLJYnXF2K59XbCZoZMxqMcWT9bxJoTKcqcXxFvSF7Luw7jId9NtZEn1KGhbsKUNIKWSCU69fffkS1lme+mpH6Yimz1GRwkfH+z4n7PP1myo0bN2h3SgyHQhhW04jx8iWrlfDK29W3cd0VN9Z/lm++/RSAbqdCu95iOl3SqAlDdD4bY9vadabu0f0fEh+lqMoZW+u/AIDjrDg4/pq3Hn2HYk6s6Ww0Jp8rEPsS2VHxqRYqRKlLvSa+5eDFlFwBspzOeCLeM0lX1FsVFo5Yz8uhQ76YMRmlbO2K382mPqqVkqlzqkVxePSnEXHi0KpJtKm1Hr5TZTUJr2HCd9bexnXmGGoNTXmN3tVkveCSvY6uh0u+8/AdyvUeH38uDp23HjzEyoEflfjiCzFXl9MBlWqenGyyP+8fUGsVCNMZi5VQkqbuUK6OsIoGU0lsqKBRrusCGQcIxx57vTXGowG+jOjf7m7y6vAlN261uZIZtUKhRkZITkZj0jRi5Y9p1fdAgg/4UYRf6DFyLq6jYrodsdu99ZqHE1NJQKnRKhdYeGLu5u6M9bU8qRtTr0vDtFDn7LLP3dvCqD99fkqESmWzSewKpTW+nFKrNzE0i1Bm61rdOlaxSf/yYwC2zQau0ydTu3S2RdRndBazu1lnMpRIQYpO/8UZeavCnftC9r/44gs65Qoba11iXxhEW4UN9u7v8M0LURNd6RX5+vELbuzu484lEa40Vh69/T6xRAJbRgGsLlEcoVh1Y8bDtZ/HyuUZXwqlWSq2aa93iKZjWpIgeLk4JVwW0Q2RLdDzGqWqwdVpSsMQuqWzYTG4OiMXqdxpCAd6fLii2coIZULh6cUJ6+s1JsMpy1QY7JkV8jP736Xb6vLJt+LdPztQKNsm00DshdCDwaLP1tpt/uk//A1xM/tjfvWvbvBP/vH/w1/7678u5s+PmJ3MuNUQkcgf//6neKqFnk8wpeOUEWOaKqqqoSLRAtUMhRTLFHu2VI/44ptvccOE/J8y6BL5CkY1ptUSMjWZXhFGhiSIBZQpvpNRLlfI5cU+ylk6XpihoIJEbcvnbH7uF/4Sv/F/iWMhiQPUTEdXKsyl7K93bXY6a3zyR2ekiiR4NRTCOCGQ69m+Ucf3QwIvwjZkUCaLUTIThZii7F26PPZIEhNTOq9kCo7v0GjZ3H0o1u93f/+MyM9RlMiAqqng+SEaGaahyXtb+KFFqZjiywytpmZMhsc8eSacSdetQmaRpB6ZhC+eTubsV6tstYu80sX+iJIQk/QaoTEMV+hKylv33+Xpqeyri+YoSkYYReimjMKv5uzd6lx/i+sEFCwNXU0IpWMauAndjYCcVSIJxXW5nAWxS5JIqPlUxXNmlMsNDKmrs1UElkaWJWgyK5WEHsvFgCtZZaCpXVQVFBLQxL3cVUjmLcEwUVOhm0NFQdVVMulcRW5ArWyzt9lhIvudfvp0hmXniPwQTRfyGYUZvc0Gim5L2U8xdYijECR6XhxH1Bp1eltNfvIHIjKfZCV07U/RJkktlMwjizPefVsEwn7843PcGKpxhqq8hsReoWYG1aIwQkt2l0ztkyQucSR7yvSY2IspFjxOzz8AYDAISFGus73z2Yhut0ulWqXfF3u71WpQMPMEUYhhiu9pNZp8/tln/O7v/ra8pkUQeJTLRRyE/r5yYyL/nC8PxHrWcjb19SoHH39NMy/noGwyOx7QatQ4lXDbjdYDSs2A0UAirTZq6HYdZTbiG0m2ato61bxNodKisyUc/Q8++SmYCRtdoRfXt7ukvoHnuNy9/1CuS4ptWWh6jbOBMM57vU28lUNOOr1Pnx1zdHTJ5s0ua+s7Ql7mNskqIZcvUuuIPTroX/Ko9328ubjPy4MZ93/4iNFwiWUIh2s2vCQ2bXJGHiWSoDPVBMcbY5oSifgiwrRt7t3p4TlirYbDQ+qNMtVqiSdHYq0uDl9SK7SwG7J/uxRzcDzi4YObjPpC71t2ynvfeUSShSxcEWiv15vc+45OEsq+6LhDeXOdwfA5V8+Fo+is8mzsfZf27k0Of0sQ0Zr7BTJdu3aybc3Ap0W5M2Mse+YCT2eeXhHOS9za68rvOUefFAgkPHyzs8HBySsadh1N6s6D0wvceEGlXebJYxGkVLMijVqDw2MR7CyWmqAXiTWVQBf6VDdaZDmNSE3QpTNVKabMxn0qZbFny9Uil9OE23vrjOfCzqtZJlEW4IVCpg5eHrF78xbrzTKq1F2zyMMjYL29TpyJ4KY3iKh2y4S+cNLm0wBDz1MvmEwlBkK+WUbJlVHyBk4iglyxVsAqFnj+7FvxLfke3tTBX2nolpjzk5NLutsVNKVNTvaQ//SnH9N41KZaFXbLdORTtFWIIwZXon+r1XvATqtMmqtzeCB6HDdrNaJkRiidq3opzyhYUsKk1BDnzgeffMTb7+3y5at/QachqFf+P/beK9ayLL3v++2898k53HNzxa7q6q6OM8OeGWoYRA4lyKRhGLZAWoIN24BlWHzxi54tgIBhPdqwbFpw4IAQh5QtSxbDkBxO7JmOVd3VFW9OJ+ew8/bDWnWHBDT06MEAbdR6qnvqnB3WWt+3vvj/P7ddf9z4NwK0UBRlG3gN+AHwDvCfK4ryHwDvI7JbI4Tj9e6f+9kpf7kzhu9HZGVqfTVPEYcOy6BLPi8EwbJMzk4vuHqtxdmZSP2SFOic1jAtIWTt/glaZkY2rzMfysMk5bC//xnrzW1KNXEtO22gqjEHZ6JMbzhaUCzWMeyQzW3hKedSDsNRj9nUJUrEApTzW5y3Lwg8CX1JlkTxUTUTFaE02+1z8rks4+mIflcoO3eVUChl6XTE/c4/2efWKy9x3h1QqwvFNvNdeqNDKqUtliuxmLaZYzldsJKgCZ4XkLKr6OqU/lAoms2NK0wnK65tb2FI6O6DvXPu/Ow7aLo46CvlY5aLMb5rM+5JA3Ma4q92WCGuYxltPv+5d3j84AQ9kYhCyRQooagu6bTYrJpmcHL0kNffFI3Ge3ufgLrCTJd5eioUd6NRo75R5qx3hC6jzQQZzi6e0JSwToo+IY5nWNQZj4SQ3brVojvoUS7lsHWxLfvDI1TSrLznRkyaWnmNjUYVPxDRptPxQ5x8DGGa46fCOTbTK/IZgzgSzvnHD05IGSlKuTrZlETc04uUdqpMBjNOz0WEbXf3VRajJYEEryiWsoRuhtHQp7UhuRqCJa4XYNo65zJNXm5m6Q+O0Cvi2rqRYjBcsrZZuOQImbsrHj57hG3ozGXEq1ioECUBioQExtZwtQVOpgQymt3rd8ikixwcXpDJi0MnrRv4UXwZOVksbXzfJExPKcjMyt7ZCYG2h2q7OBnhFE3GYw4mBzjykC8X6yhKxN7kPn/0r4RSHo19vvqLr7BWrPCD94XjUmleIZPT2T8XGUzb1hj124xmoJhCtBeLGYqmYTgFVr5Yv5W3wu8/w0gJeXx69DGZfJ7z/iG5lXimi94pK8/CW4jDpLVeIpNPs1rCyYHkMpsPKGWLuPMlliaCBqPlgvtPemQt4fir/oirV5oMXRNLlk9caQkHZ//okKwtFGG93kBbrZGqCoW8GDbw3IBw3saUXme+kKV71sUwfHK2OPT2PnvKtV0V2xCyMJ8rqJ7CX//yL/Gv/uDrgOBJqmQzaPmA7kToqcQ2COLCZZZja3MNDY+l5lLPiWuPvANmsxn37iW0FTEviaOhKiqq5Gpz7YRGYZP/4+v/lJc3xP78d3/xDU47Pn/7b/4SkUQQHIwO2HzpGnoiI27qDE3XMT2fRPLqqHGMrisQJ5ecTgkqoRchbV7KtSqdQUAYHRNFz+VPxVu5bLQa5LLCaDg6viBRssSyoT1JFDx/wXrrKr4EXFm6PdLZJn54cdnUH4Zj7j/4FgtPcqcoDmHoYhrK5bWqpQa1fBbDSRHLAMvKj9Edk9VCZvRNkZ1NQpUkEe9i2AYLNyCbstBlxUIYFvD8BQV5XgRBxGzq0iw4lKQjOl7sYRk5pB9HREgYKpiJ4AoEuOgOsDJpFpMAOyMMRW+h0yg7bG+LiPS3v3dAYuTAhCiWZYiKhuWAN3Lp9MWaKmaeKAixzOfAGAsa1QLuMuD0YC7fLy3K+DQIJNJiPl1gd2ud+x89k3vRxcrZxKF/ibSYhEM0bcxwWGK5kAAP+pKUaZDJiznonU9plFJsb+/Cgwv5DD4LNUJTYC7LSHOWye2XCrR/IN4ljAOSGJIkYemLvTGdzPjqz3+BcPqAvadyTY2EKE7QJCDSYragaKssJks++lDcz3Fuo2ohup4llnj7Kz+gVs1wcCafybfIZixmc5eVdOJVRSdOXI4vHrOQJaJe5BNFIisKoOoW7nDF61+5gyV5kR4+OcBMp4nigDgSaxoHFlE840tfegmA44NPGfQT7MoC15MZ0zBLEAzYKF/jl37uvwDgO9/5LwmJLhE2fd+nXq8xnY1BwqUvFgsURRE0K66Yv1yrwde+9jWmkrOvsdZiuZyTy/3IWDMCn/VyCtMWz53Lp/nm977HRr7O8EzIf2UnB5HK8CLmmtx7B/snFIot9Eg4d3nHxkonrCYJ6YpwnMoFi/7xCZGf8OmnIuqvRQnVQotIETr37GRAq3kTlAn374vvbO3cZDI7wQgC7twSGaCP732CYlhoKclX5yr81E+/w6NPPmCrIZ5poK/Q1ZjZQGNn42UAJvqUomYzknrfqJv0egPuffqYak2WRqcClkudVFFlU4IdDCdTZrM+tgRbmc88tsvrHDx9TOxLTrtGFsuGSSckMYSdcPet15j1R3Qlv5MVGPiuy3jiUiqKsz2OY3KZGhe9Z7wpy6w9z8MNx4ShzKZrCcZEVFYp0ohPF+fsP/sj9EQh25DZM83AnYek5PqdXgxQjABIGHalfGgh/mBEySleBt+8CGazGY7kq8oUdDKxxuHFAbmsWD8rbVCytphMl5f8ppnUnLPTJRUJaHF6fsw7X/o5Br19FIl06ORs2r0BG2t58jL4V6rMOe12MRC/CyKFGzdvkdEMWuui3G4+bvP4k6eU6kJP3nzpDrYRolo2j/aFTZAabPPyKxWO7h0RS0CZtWaJFQopCfgSrNKsb26Tcww+6AnZ9hYTxuOQ/rRN1hDrMJv0WM2H5NIbcl3msNTJWnnydWGfqnqZXC7HbDQHyW9488YVQtdGotNjKLC2VuPg4AE5STUzH0wJlRXLzh6GDNQWWxkmU5eNjNCLmVqF0+Gc4XyO7wqdm06nGYxXDKcxni/O9mdPf4SC+q8bPzGghaIoGeB3gV9PkmQK/HfAFeAuIrP13/yk15LX+08URXlfUZT35xKC8cV4MV6MF+PFeDFejBfjxXgxXowX4/+r4yfKXCmiaeN3gd9KkuT3AJJEkrSI//8fgH8h/zwDNv7cz9flZ39hJEnyj4F/DLB7pZncuLkNwN6TDhfdE1prDSIZwTw+PiFrN3rpshAAACAASURBVPji517is0cihbty+3jKklJZRAF28jmODrpU6y1UX0R2lr5HtZhHw78kZR2Ph9QqVWo1ESGeTZacdB4yHffZbIlSpc3aywQrl7VmndFIRBn9cIXnBsxl02uSKOhamuF4QFU2Ox8+OSfthLTWXmc4EBES35vT7XgUs3JKogmffNTGdODpUKQ4m+smzcIdprMO+7LxfWt9i+m0zemJ5J3J19g/beNFYzIZERmwjCJf+fLb7B/e59nTh3JiLVKOgSYj/KrSo33q4ehpanUJlzo4Zmf7Gl4gYJ6zeZP+IGBn+xqJJiJX9do6YQKK6vP0qchKffUX/ybz5YIgEe+WymnMFytazSajkexl8CZMpm0MNY0l+WVtO02lmMEuSNI2NyCTK+KuhhRLYl3m/gA/CLCMdUiL75UqrzOfxhTzsik87KFraRbzAU8l99XmTpWL9pT69TLr4nUYLhaUihbIaJOdrdI/mzMYHJGRMMuObTDsG4xGM7Z2RNlDt98h0ldEsg8k1rKs/DHrtRK2KkpGuqMuGdPgYnRGIjkecrkcw+EYxxRZIx1IVkOm45BI8tWslnNspUihXMF3xRy7gUu3s+DaNZHxGnUWpNarPD15SCyzile236LdOWXNbtIoiqjfaPUIA5PQE2scREsWvSVWo4GhiehnNlNjNj7H0tPMZRQ1CEekMyaOIyJgp8N9dCNAXTR4/RVJtppvoJoJhcoWt9/4Jbn3rmI7WR4+EiUWB0//kFtXXsbzFCZj2ZiacnEMg5QGEwlXHqwi3NURlikybprmE6Oja3OOjgT4R6HUoDc8IyOhgweDBFW3IHSZ9kUIaq35EtPRGWEGUlkJfKMMeXB8yu66gDzeLu7gzQbYzPnCG28AcHwi9q3iRmzelDx6ex0UrcpQZpLjoIFTtlGDNJGMwne7h8w6Pq3yVWJTlCpttl5lGTxk5YrIpKE3WAUxJ919FJnuWdk6GaOAFi6JJHltbTfD04eP0SXHTKlZxF0YmCuVUlWUGAbBOt/ZC5lETdKyJ8lxFvgzwcMFsHBs/vB3/hfeaq34+//gPwPgcP+U+OIxS83BkqV7iZvCiA36Q6Fyz7oJfuKTU03msrdIVTQgJomVS0ALXdEAnXxBgjSYNpNRAEqArsu+1gB8LyZt2UjOUtxVRJIYeJ5Y8wgXVbFprWc5kbrMjzX0cEgcG3iBkJm1cg3bqjMYywspAbphQOzghiKqmS/kmA5GdPoBKRkl9pZzlCTG1EWZzrWbdQ4PpgJ7XGYeI0Unijy82YiNdRE9/+D+FEMP8SWajKLqmKqNY6lMpyL6O1mG6JqCtxTf0WxROnd1s4HriSzDZJaQ5AIs0vih+Gw61blzJ0skM2erWMXQdVS0S96p2FuiKBOK2TpzT+jPxEowFQdf0gksZ3NeXi8yHnaIQkknoqmiBE+LWS3EWhUyLoYzpdOT2UktRRhGBO6ctCwdDIM5hXSZ6UAhlISdhmoSJbBcin3g+xE7mzn2D485uZDlS04a1bRIXBdPZoCyhuivbXcklL9j4fsrkshAk7Dymtal030X08pfZrMytkLohvi+BLSIFVJpg6nn4UcyO2Hl8OMxnh+Rlvx0oT+kXs7gOLJiwtRwoxDTNHGkxTKej6mVCjSaZT78UGRpXH9Ks5JFIr8TBAFhEJDNG4SSa6s99DGdGkrioUky7jBOY+oJoaR5aHf6LPw8TqJelgUaZkwUJfQ7T/je935LrPtyjqIbzCVdi6ap5HI5RsMJhbw4L1YrD8OwcF2XmgQyODk55mtf+xq1qqRrCRQ8L6DVaPIMERXvPPyE7JUaDRn1ny/GaJ5PtuyQLYrKEceNqVyrMFt0SWTmOuUYpB2LWSj0iEqGci5L56xLIMu1euca2VSafDOLJs+QrGpw7foNFPVVAAbzYw4P79Gs1ilVxIQ+ffoBW+vXaPfH9GREf+Nak5ODM/yZzPrbaZLVkJSqg+yhe/nNm6irgOXY5/F9ofdnSopKzsOXusXTQtbyDXZvLInlfsmqNmXHYTweMZYluOXNBsNRF0cCMJVKJtm0jaaWSCQQztOn+1y7eQNbPwVFGAWaYuNYBlvrYs6DaISZ2ATuBGQlx3QyJPCeki+B7z/v24uIIodAgsDY3pLJyqdcK6JGEu7esUk0Czsdcf5EVDqp2gZqNGXQkyTb1YhcpsS4NyUQl+bqzVsoyjrT8RBDFedDLbPL9l0NKyXWc/9sH8XWqZYaJNIezqUyTKcL4nhOuSSydYoGmuVSkBVape0886hLe9JnW7Ttsbf/KY69xcJLc3NHtEzsPf6QrJ1lIXu+IuDlrStE8xHf/v3fB6B54ya1Rpac7OfKli3+5I//jHS+RLMi9uLR3n2ePamRzlnMZa/m1B0zXSoUc5IvbxaxmBkspwqptHhfM+UwaQ+YDi7o94Q+zaWzpJwiu7eEbnn04IThaEkqPeHgVGb0swmr5QaZjMNkLOQv9gx2dtZwPeGaZByLybCNt3TBFs9weLTPy6+8RPf0s0tAsumwhzeds9SEPp3sL1BtnZxpYieSKsBQGA/GrBYqi6WwHfT4OW/cv378JGiBCvCbwMMkSf7Rn/u8KfuxAH4F+FT++58DX1MU5R8hAC2uAT/8y+4RRRFhJF5C0RyqNQPTtjg8PBQvOwoplZc8enRORxJF3nm7hWVZdM9E6Ze3vKBVLTLqLKm1hBI76/QoFxroiUH7XDyqRkC/d4LmSHSkeMZ6awcvVwNZcjDp9agWS9x7+AHpgmzAVm1iJcCX9eqGoTFbLhmNZpgSJCHl5LiydZPZOCGRqE03rl3nD37/d8mVhNHbau3w0b/4mM1rMfOJmP5wpbH2eZ2v/Y9/SlXy05ScTR58fM4bb0sBLttU6zrbu5toqhCgJ48O6HXaXNu9jbsU98sXTMqFCvc/EUqsVb9DtdKnUi5fli98+StrRAzY2RF16PvPLpjPBuRKK1xfHKD7J4/Z3LxJpztgrSkEaDz2MXSLel0c/L3uBEWziPw0w4lw7nTDIIwj8oU0niwLCoIsTlZntRTrqWoG7e4eChHZzC0Auudd8vksrt+/NHbMVZrm2jq9gXCoM1kNJfZorlXIpCTNOS63bt0k9sacHQtnNVQclpqJItdKMzwSo0eqEHN+KgTv6lWNdneEF42YHgkHIZ3KEccRtZpQyO5qhZHyGY4v8CXnUzpfYNaLqFQ2KMvDcTLv0qht4cg9FSUdMrrBbKyjIvZUMWPhzcFWalzZFuvXmTzDshxcTxzq2YxJFNooZo5iVSL8qTFqyWC0GLGSiHpWALoJY8lJls4XaTTrOCkYSO6NWnUL72RJo17Ek8zqreptInWKI8EO2uMVWtzErimULGGElnJXGA0XeHGJ23d+BoD5aoGqe3zxS78KQKFQ4Nt/+NtkbJeqDG54NKkUmgzaA7yZUNQbm7cJ4yLjvvg7o+RRFYsg0kgXZb8YCSldxZalBKqZAg8MXaU3Fe+X1bKUNouM3YSRPATq+QJv32ow7Elk0GIZO+uR+Ck+/ET2YNhCvkqpHL0D2RC9CiCrEUti1agQ0nAKDIIuSxmUaRXKTMsKbucCG9kzZ4CmNhlJBLzm5gaYId3BM6qSDHQ5CYgYkk1nyFRlD9vUIQyfooTiueezmFQcMlctfEQZ8rcez5goJbIo+JKo0XdN6qU1Dof3AHj0e9/m7/1KhTtfeJv+qXDGS6UC4y2dnK3CQuzFRD9jetFjOhEHRW8yRtdTzKIFxHKOLYNotURVdRTZE4RiErkhhZzQdzd3X+Ybf9wTPEWyHM000sSJS7FqgOSZms1ASymXxLh5u4Rt6rhum8VS7g0/IJ3RyWQNuodCtt98500KlTnnPdlLUdohCicEqkXCcx4vn/6owjIAVfINKaqKoukEsrl66fboDARHF5eEvTbL1YAvvv0G07Eo3T07u8C0C0h/Ac1UCLyAZjNFpyv0S5iYhJGHrABEVVXm8xn5dIqCRLzULYg1jThZYUmjTFGXNGsbPHoiyvQWnoZtnmMqdYy07N9CwTR93vvg4aXB5weeIHJ+3mvkxWyul2k0VL75Q2FoGHYN31+g6iqRDGoRxwRei3ZXcr4Fc7RliZSRYbYQ81su5tiobXK/M8INhXOVSWdAdYll0ElVVQxlxnF7jq6L4INhqCxcFwcNX5ZjN1t5Hj/6lBhhkOm6iu+HGKp+yT+WMjXyhZi945BAAgm5S484jlG057xhKzRTIZ12mEsHbJmsUJQQRVHx5fmbsjQcLaLdEWvsJSZWysKdLwCxF5UkJGMq3Ll5k9/7ujh71MQm8nzi8DmJeA5TW9CoZzk8kaiUcY5CLkUSxKJnDAijFZbhMV+IvagaVZYscF0PVRGbYbqY4S1i3nwNHu7/7wAMhhU01aAje3a+8pWvkE2lCYKA50VB6XQayzAJ/eiyd/k3/uv/lslsTK1UkZvRwPUS3nnnDf4MEcAKijFZTWd1LPb9MS7V9U0Sd8lsIObAaG1zdPhdhqsp9eo2AKPhOcuxytpVWeq67HPx3SNCbUTTEVZ2TI/RrI06qbK5Lsq6jw/bPDs+JSX362arQtEq0G0vyeTEc5YyMOqPWCYDrjREP6ytrmMkPs2mmKdnn40J5ymuXtnGlfxtq/6UQb/NbDmiUpLAW9M5M3+T+Uqc0dV6hsHhexQzxcvem95ogra+SX6zRSID0Q/f/5i7b7yNKdH04sglCGeUCutcdMR3ctkyUaAShiG6PJPbFxOUaEldOh/TwYJiMU+omNjSaXj0WZtauc6wBzHiHHXsNIoaEMVCceRadcJpQBykOO+L8+K115qMLvZRowxb5S+I9UtPGXUWvPZ5AX7y+OQ+k5VFaKjUdsQ+wNHRYwO0hIuBaGkwKNNoVelIYIxASVOq6YSJTrUk1nRvbw9Hz/LK3VcxZZl6u32GH/sEnghWPX10jKE1uX5lh6m0kfVgSrAcM1ssOZa8j1GiEYUarS2Jnuu6nBz3mV4cUJc8UCnNJrP+EhnEGfbetx5w89Uv4BgelgQti1o7qFaCq5mYskyvVKxjamPMjAg6q9kOSVal5OSYBs/leIoa6mxubmJIRN9OZ46dshi0hfzvXr3OcHzCbBKzVhQOTTZT4ehswmTisb0t9n7ouVQqKdodsRcVS6HfibmyfZPpVFzrzs03OTl+RhKa7O7KvujTJUmy4OhYzN0br7xOd/ghvaGFKeU/TGycMmSyCou5mPMYqY9/zPhJMlfvAL8GfKIoysfys38A/PuKotxFwLMfAv8pQJIkDxRF+afAZwikwb/3lyEFAgRhyP6RMBwVMhTLTSaTGdmcUJK59Dr5osbKVylVhLB0T8dMZodErtgkGb2MpWXo+R0GT8XGXN+8wnnnlEzK4Uw2q6uqgpIoLCX89Xw1J5hfxdR8EhlVrRWzdLuHDNoho5FE+HHOCIMEYrHB840q6ZRPtbTB4TOhuIMgz5/+0X1arTXWt4Sx+s0//oBCbo3VTEYLFw7zscLoAu7cFoJ3/XaFd9/9E7Y2c9y9I9BfdrZT9NrXGLTFApp6h2q1gu/HrJZCiSj6gItOn0r1dUxFCEcuC4dHj1kshHJ4+/Vd7n16TqSe05DoK6OBT7/jk8sK5TAYdtjcLpDJWLQvhHKdzE+YLEOW/uIyAzWeKWiahpIIIchlHHQ1R6e3RxLIelW7hpPKkkwjPv+aMEwtbcG990ek1iRB4dMzCpk66YxKXzb+lvIxu9vXOTrosX1NPGeCi6L6qIYQ6iTK4Psh42GfmTS8C3mbJ589oFzMIQMRbLduEfgzun3RxOh2A/xQJ5Oqc/WqMBAuzo4Zjqe0tssslxItCB/HyjMayh4odUW1mcNzo8tmxyf7bVrra1RrOfpDMcfLWczV7QadjniXpTfkxo2bXMRTMrbMZqlLnLpNHEVkJWrjSdunuVZFk+hkKTVFFLg0y1ViyXBgWzPmPsznOvmGXAjHZ+UGvPHaLwDw+Oh9DCfh/MSmmBPvctZ9n3JtnTBZcdY9kHujgGFCyhaGf1bfIUpGlAOTfEYcsp/ee48bL30elBm/8Ru/Jr5XLnLr1bewLiGjl2xc3WTcO8WVh+MiAU2fEqlwRRIgHu5dsLO7TnlXHCbdzphM1saP8lgr8S6trZsEg2OmQjyYuwFVp8Czw8fk1oQiXSvqeOMl18rXGM2Fk6QEKYIgZqMp9thgMiPOZHBdF6sonP/+2aG4x/o6vb6Qf932sMKE1ULcf7h8QteDha+S6MJJGq7AcRL0bBo9Ep+l7QGGkbsMUDx+ep96o4FtrxFLh0i1F+hYLGca08X7AFi5TTYyV3FdsZ6VrW06h2eE9jb/83dFpDVWCuS1gCCJ0eUzpPIZfvDD36M8EofuP/mvfpnsWooHn50z/OR/BQTFQG9+wXi6ZCVVuZVfZ6OcoigBCs7PZ6xvVImUhFgCtyho+L6Poqk/Ai2IwXV98jkx5+edR+wfnKBbYEl0Ut8PUBSF1167y3lbstmHKRR9hS6jlasZEPZJ2U0uJDy0auhEUcRqoZFKPUeue0x/oKFJBziI5pBYzJIR2ZRYm1La5KSzwEiZxBLSXMckjBJ0VfaB6XDWnqDqxmX/GLpGEiuoyopqRRygi9Ueihldgh0kSULg+RhqQC4rqQJCD81UiRKxxkZso6mw1khxKIMyKzfAMFJoakAoe4TiyKczfe+SNN20iuiKRZi4l9UXo0GfrY1bTPptFhKMI1PM4K5G6IqUqyDGsEIUXUP6NaiGSqLGJIlKLLMR5fIS01JYyV4j3Uwzny9JnIilBHMqldMoScRw4uP5EobYczFTPobMwgd+l+Z6hv3D1eW7JImCEifotsVqIM6Hnd0WndOYUIKfxIGHoWnESYK/FEan5vmknSrLZcRznl8vcIkVLns8x7M56XQGQoOVvJ9ihCRxAsQkEqzC0BKquRwXZxIqPfSxNJ04UjAl2qOp6mxuFDg6PKTXE4ZasZTB0Q1cmbqKEpGJjOM57Y7U1Z5CPuuTJAGqkpJ7eMHNGztYaWGAfefd+6jqbfwgQFPFtZxMial7RqlQpdUSwY3Z/IhIW1LIi3V/7e5dPM8jk3Yo5MRnvu8zdVfs7OwwGIgz63d+9/colEp/Yf8YhkXKeQ62D1uFDbRYoyMRPhu5NG1/SWLVyT1Hs122WS/eIIhOCWXvUsVokssbhAthkySBzs2rJaL0OqFs7NHMFGpgk4SLSzCnt175Mt968BGKrGqY90xKxYjR9D0URC9MWsvS2Nnmo2cz8pHYQ8/2v8/K9ei0JThWyySTsqjVtphJBGMtsth96wYHh8948H1xFr35hbscnB9jyR7ID977kC994fO4gYIiibdvXq9zcX6I6oUYCIO2UsgTJysOZSB1a/06y1WbT+7t8fnPCVCtQjGLoqpc9KuojpAHM52mnN3Al+TV2XKLKFownYb4kronm81SqlosJxE5ie48Ho9x1CaNpkRoTBRmygRVgUJGEtOfPMN0cszcgKNnwjnOVPL81OtfxUPojfksQnfGZPMp0hmhb3qDA3JWwrDXp3VVnJmxZjHy52Rk4PZ8doGpFynaFvO5pNtZq7Kcw5ODNrpsEI2jBdNBwkSiKt6+/Yt4/hQnZ3EuwU0WK4di3sGgwKGkD2lUysRazFBm0zXdYBLN2bp17bKqYXDSR0k2KTVEkL15xcWyLKJxnnxFzO/AHWOqRao3qpw/E0EmRa8RRwtRkQBk6wUibZ3Dow8YLkVAW/cS0mYBxVIZueJaaj6NXa4SuRIQybFZnWX54tv/FsOB2D/f+e491naKPDnY41gckaw1s3z80QN2d4V8esmcSnGNcjlLEok5OD9/hmWBWUnhzyTwhu9hp1S2m0Jmp/4IVU+TqzbpdcU89fun1IwGaDFrVWHHRggZ+3HjJ0EL/A48h5T6C+P/+kt+8w+Bf/j/dO0X48V4MV6MF+PFeDFejBfjxXgxXoz/v4x/I7TA/7eGqkZMZiKq4/sdZosL6pWbFCSKS/d8wnxcY/2qRUdmOo4PxkznU5o1EWnJGDr7xydoeR3fFymMdqfH0dEBuXzqskTMtjKk0wWubItI/fe+/z5WXWE+izk7kr1aS41sNmFrI8vHHx2K35ViMk4Ldy6iKp9+vE+jvo6WROw9FlHxh5/u8Xf/419mPF3Q7Uuy4WZAHHoEgZjqw8NPefm1NI1ylduviujIdOrTbGzy1psvo0gSWN/3ufPKTUJZTrR38IzdneucHvdxRMCNyfSYSqXC2fnHJKaIYKQyu9TqFRRZPzp2vw9ml8PTEaihvHaEH/foDcTfN15qcXp6jLtUKZRFtODl9A2mk4ByKU0i+4ZG04cYps6gLyJglpUin0tz2J5z/ZZ4l2EnxNHLRP4FnbHIHL16N+HfvtXk61+Xdf1Wjt75grAcsS57IkJXIQwW1BoZDERUbLQYYZoBz8H0zs87bK6vM52OqFeLcj0L4EcUCmUGIzFXJ6cH1GoVNjZ/CoDTo6dYVoRuaPi+mJd8sUJ9vcB0OiSfkQiQSYGFNyWbFdF7d66ynGfBDBktBHLe9pUWxXqRo4N9CrKGvlZqEIYzdnbq8jkVpsMYIp3ZUuypJNTJpKYUCioXbZF+3t7YJZ0p0OmLSE+ghWTXdPrdPouZyG4N+y5+4ILmkkpLJKCzI9Dg8YmAOB/Oxmi2BsYIQ0Yew2HMdDJH0/RLpMP5YkhvOKLbf16TXOJzn/sSB08+4pGMZGXXHLrTI1zX5eqOKCM5OTvh8QcfcfOWiNhcdGcE8xhsi4EkA3ZyBu7Cp1DMohpiXurNDPVqnQuZNa63WgxHA7QEcqrYL93DCWE8ZjYRUblWo4WtJVRqDrYst+1ddHnp+hu0BxPMvIggessJxCGHw0MA7r76Mo8/fUym0MI0hKzX07IkZNTnpddEL8GzvRMygcrIFZnyRSdibC8Jx1Nu3BGcKCeDEd75hPrWGhdtgaJYr9Z48vSQr/zMXwPgz771XU76e1TLG6QcoW8eP3vAv/MrfxvHrtO7EGvq6wpJb8C1vOjx3O9l+daDPIswxpSlGYblEXgmTiHLqC/2xpPf/9/461/c4W/9hz8HgJbR2D/p4FgeN2RZqaWG1HMvM0xOUeri+tNxn3o5z5/8UKyn58JsMUcxYiQiNmEY4/s+um6iSK40VdNJEmjJnoQ33v5pVt7HqIpGsHpewqlimhH1hsPBnoSxDnxsJUciy/aW4ZIbO02u7lznd/7Pb4nv+EXMvEMYR8h2H+IQBr0YLxDraSo6mhViKCX8iZC1RqHBcUcjDHRsScoaeC6g40j4XZ0Ui3mPbKZIFIpniBQPNU5Qkhn37olrBZGNoSeXPIkBIWEcsXKnnJ1LdMtlTMYBTfLlhV5CFLpMZmecDefyOiZaNCPCuERD1BKbXCnNM1nBkCQ5wjiFZUYkyfOyS5/19QpH7TbPS8YCNyQkwbbEd1arFfVGAcfOsXBFZjDjxCiaj6nk8BOR/bxx9RaJX6TblzwrtoKq6sSorGR5+EDtMx1nOD4aESVCHhUlIgqTywyGoXoMhnP2L6ZEkVj3KAKDhCj6EaH0cHxMKtdgJcmBUwb4Soij6qymQr/dbFpousVgOiCU2R5LhzgI8GSfm6WbNDeKjKYdIlmWa+sqcWQQJRHT2XNkLhgPBhyfSnoGo0ToeziWTaA/z/YomOaS5dJkuZQlC5pPHBnE6lKuVUw2neHWjW0++OTPcdJEOugJqoS7TmYR4dxnR+q7YqnHZBAQRDZqLA4fz/PwgyXFcpXR/ELuswXzYcgv/Y2fBUQpZr/fx10u6Kxk33AhR6NZp7VW57d/62sAPH2yx8ZmE38m5knXFVKpFOub1uUjGn6BhbEi2xJn4XD4hECz6ffGNAuiTM820sxXM4zQIGuJM6taNzFMC6TuK5cLtPdO6U4nl6V0G+UcTqBRtPKXHGTvvvtDtq9tEcp16fWfcXbus15/iWtXhW7pdSekbJNquowfinLbRrNKf7CgKDN1KQf2988Zu2N8SYUxPPdIPb7KbPyM+ULYb3uPhjjVXVqyyiCVu4o7HOL6FnZOGDifHX2ErRTQkxmm7GvNN7IMZyeXGWHXVSnkNynXF4xmYl0uLk7QtTSNegVf9qJOpn1QrmM4Yg7Ojg+oVUv44QxL6qBX79xiMm7jZE107Ud8UblCiVpDfOeD775HuphnPD8hiMQ5+uzjBdduv0W9niZ9Q7Q55Cs2nc7jy71PPKKYrTMaK2Q0kYUrZvIEyRwjl6XzvBIgDklZKVKhWL9UVsXHJBtnL8tY2xcdnHSG+eqUC1nuev3KLRRzzq6kAPK8EZmUhbu6oC5Lmm/Usnz/3W9RTLt4MjU+pouatrFTP0K8rtQaLGYTZlMhj43GBml9hGZ8CYBf/rWX2L//fS5OjvFUIXvXd3doH5+izMpEK8k9WeiRy5oUMkL/nB4vcFYDyrkCQ1fsA0OtUqyWedp+RJKIedFTHqfn7ct9d3hwRs6qcW33Cv/9H30DgJAxg9GMrfp1MnlJ/qsnnEzPGUhOK10PWSz6RGcJDYmieHA0JF5EWCWdw0OxfiWnhKmkiHRhy6hBwpWtl7jo9EnnZd+nE6AnDrmCyWwhzuhKU+jMHzf+SjhXtpO6JK8cDcdYjQyjyQmKIpSIG85RVTjdW9FtiwN0PgvwMUA2wnthQiZdZrDoYKbE4va7XXau3ibBwJFp5WZ1gx9+9AGTjjA+KtkK0cqhNzjCzMnG9Cgir1a4OJoTe+JQfeXGz/GNP/yAvqxzX1svELgHnJ8d8dodwcr9U597nXZ3QK2VZSXTs2ktRzX3BlHzOeHrjPXmLebzPocnwgBrNnf40pffYTw4YyXT+56rs7FV5vBQCN3W1ha6tUQ3x6xko2gS6yznc2aeR6QLB+ije2fk0luXymHQc5m7e6w1bnJ+Jr6jGUPCKMILxSa56CxZLV2qpU0OnwmgiEazaP8r2QAAIABJREFUSmutjKIoDEfiGRxbJ/IKXNkVkLXL5ZT9w8dca17HkzXCxXwOw5qSLdhMhrKJeKLz4PGC8US8W3O9SWMTFrOYelM2qrsB4/GMfLGMLw//jNlgEe1ja0JhNOsh2bTKahyxVhV8Z9///ge8cfcNRssp533RaJizU8xmRfTU856rFZaZY+FOyOdEv0O/NyEIEtKZCr3ecwZx8BZzalK5j89DTg+esP1yjXpWOAN3tl7hsL3AMpfUSkJpvffsgHqtTBKLvWnoKba3rvDxh/fYviOc+N7pnFKpgGXBwpWwp+GSs1OXrS0BqDGZzzjaf0ij0bhspJ5Oh7z6+s8TKec8fiqcVY82xA7DC5nuX6h88ad/Fnd2jC4bxSt6isRfYlnWZcN8q3kXVd2nVJZlSL7D3uEBs5WOF0ni3VUa3c5hZRRS0nnc0iLu7H6RszPhaISTIbORys4bm1TzkudidoA/WVHc2eHwQChOzQroTvVLgsL+XptqNYUXjClJ3rnMfMXBVGFTltrYQcAcl0LVwe2Kg6nYSPPuw0fc2t1hNBDXrq+lWMxCTE04jsenJyzjmHByxIYEuTgdi/VYr1QYSB6t/lGbwvUWVUX0DOQrVxlNV7xy9zYruX7d9gXXqy1G5wGmpBM4Oj5lrXGTB/clS/36Drl0hUQ94/xQzN1f+/JX8foJ+yf/nKLs1Wy98rc4Dp/wx/fEnv7GgzPsQgXHiEg8WQZhZIltjQ+/+RBrJXolf/3Xf45SVeVwJHvophGhtqLQ2OBQOgyNeppMEJJfX+NC9inN531UM8OjE1E2E8QKkbpA9yyQ9kIcCwJVTdOIkWUliFK5iuSBObuYcXrRx7BNVEX2p/oxuYxJyrY4fCL0oqaZaGaE5j2HRl6xtVMhmypeyr+qwnwxxUnlMGUjer4Aq1VMKHlSEnwSAjxPJW0IQ1ELbDTLIU76GLq4vmEa9CYDbl8V5X5JaOJ7IbodguyLNByFwI25erXJcCT27MpPUDFJZN+ZoRpouuitmC2fQ+oaJGqCIoEplFjBUFV0I2GyeI5oa6JhkeCjSVCNiBHrrdfp94Q+3T9dkrIjXNcnkv1G2ZxNFM04uDjFMEQJkJpYRImOK50Y0zZYLCccH09RVNm/SYRGQBTFxM9JfJdznj1dXPaYGXqO5XKKoi7w5TFz7WaVUqnMeNJFyYdy/VyU2CKQhn+iLDEMGydTQJFrFccQui5xRkNWjJHNqzz49JhEEXKFEpDg4QcWzwv+G/Usjx89ItQ3CWUpm7rysXSdleTxc2yDIBlz2j1DT4Qjs5gtSdlponh6WaKK4mGYaVRZTmiqFnG0glhD08ScW7rD+maep3vPWLiy7NHOoURgSr5Bb2lgGCGvvPwq3/me4ClM4gjVdIl8h0QVe8Fzp7z5xlXOz4XMnJ330J2rRKxIJMm2O7fJZA1K5SzvfyTqkGZjhWohzy/8vHCuFosFrebzAJs4/x3HJuVYBMGK3/zN/wmAaqOB67oY8l28YEk2VyCV/lH3hFOHETq9jgjSTjyNm1uv4vuHvP9ElJ69/cbPM1/2KJSrZCV0dxT7JKqN4Yjn/uRJjySeUchWcX3ZU6bYrFYTjKWKJukEQitB0xxUWR5mpxQsq8bWxjX294S+Mcw0zAbMBiOWF8ImCLUaeimLUZVUBacj8sUMtXWbj+6Leco2Nph3PuALn79JYEkghUcfYxYyVMtiDnbjGh/uP0WJPfyZBC1QE8rVNTrjZ+CKNW2Ud+mehGRz4n6j6TOMZQ5TLxAH4l3efO2LPN27zycfHLF9W+iJVjWPt2xztC8csNFYpZL3CJfxJUXFfDplMQ/RDIdHeyL4Vqts0O4/Q9FE+Xm5sclms0oSvEGmLIOIv7rGfLLPN7/52+QzonRuueiSKFWam8K+iQ4WdM8HoNdJJFdi5Lv0z5YYpTTeSuy9tFPDKa/R74r7R/MVM3OKWtxFlZzezUaD7tmEm7t3cXQB2pQpepi5NLEM4g8GHXqDmLzjsJQ8rOejLplihTBKWN8VrQHeLGIVukxlr+ZitYJzl6qTx5Bnw7ODc2xd42TwTwA4HbxMWjNJHBskdP946LGxcY3HBw+YjcW5lqCScm10qXMjJyL25uSsNLWyZGeaOoynSyrVFpOReM7VdEHeslmF4pl2dxsUnSJ/9Pv/jLtvifld332LH7z7DRyKNLeF7dAffkat1MA0JC1IaEBoELgrLiSAVcZIo2U1xu02Gw1h0y1nJspqhSX7StK2w/RixGLapVQR+7XuNBj1Z2QLMG6L/bJ/KmzeHzf+SjhXrueCrIXPOgn+fMlZv087J8nP7BFKWCRyQ85OhVAXCgZxkOPDI+EMbO3miXwHLTY5PxMRvlw6y5PjY27uXuHefeHIfOP4z6hvrzMfCoPo6i2T9354j7XGlctIeUqfc3LoYtsWd98SoA8XvR5vfWmLx58JISjmHcqlOtc2XidRpeE2HPPGm3e4aM9RDaFcE1yW4ZRUWiiHwfiCgx8cUWva1GpC8M/b97no+VRKORJfREgUPeJob0KpJAR4sfIYDU/JZC08SahnaWUKhTKmnua8LZTw2obDYOBy+7YgOvvo3ndoVDeZjAfUa+JAC6nhuhcsXCGYK18ln7tFpGQwJUGhk7Uo13LMpwmpjDDig8AlUTX2T4WRb5opJos2nhdx7YasQ5/4jHorTN2nlhcCfPShyuHhPju3pEFtV7h9+xX+7JvfZjKM5P1MND1HLlvm+IkwYLe2tlgNsqRTz9HKEtbWdplPz2h3hAC//cWX0XSFSfeMYl44TqVymtPTB6xCcb9iuULnYkzWKTMZC4FdW2+xf3iGEi4oNYQCrJWvYg+XKLIHah62ufvWNYJQw8wKo/MP3v0uYbTkrVfeYSKjtqVqhmqzysbaNgCPHj0mn83xxpsvc9ARyiddCFnEXZ48mbOxIda9O9hnbW2NpXRsVsEITTXpXUxRVJmuiyfcv/eHTJZTNnaeA23kMbU8TlaIb71p0T0bspj4rBaHAGxuVgg0nbN2n1pjTe6zLolqE8kMZnfUw067VOsbtC+E/BWrFcJAIZ/fJklk47SZ5WjwkL4kB451BTU1ZXmRp9F43gibI1OpM+y12doWDs9ouMCbTkjLJoyF12HRVVGNHE8vxOFRaaSom7lLxK1ZsmA2XWGq1iWXieKnKeUjnh49IJsTazV1LaxsBdMWz3h8+JR8tkat1eLkWCi9SkHI0jIxSVnPAV9sLo72RbMO8NKrX+Wzk49577MPefW2WL/FYkX28w2CgzNmQ/HsN67fJUoNOX72XLescTrao2TWKGalU+3Au+//MWU7oncq7vcvP/yXPD3JoNoSbSqfBi0iMR00aXjf+/hD6D/kC5+r8Hf+vb8DQGd0xrx3Qj0v+GS8xYxKJk0QLjAKkizXGPN+/4hXyz9F2RN6sfLya7T9Pu0j6QxoMa6rooYrTGkoqobNKkowtZBYOt6GliGO06SqYh16vY9YzMZkTIdAF3sx8hZkcykiZc6JPFiWikuJHKEte1ONgHSU8OTpQyaytyiXkz1OERCJuWq0Nvnud/dQJLiCZeQJUJhOJrzy8rqcT4eP3rtHunCN0BM6NtY8osBElcSY7cEIhQKqrhBISz/2QgxTwXJMRntijuM4gEgjen4/JUENffyFx2ddYewoWpNoFmNKImU3WpHWTW683OAHH4rnBpUg9jA1mMg9u1Z2ODp4j/ZI7E3VLJDEM3RsltJ5LKYsDo4PiKI0SPLaxPDIoLOQ5KeFTMLM7fNsr0MiqSGNJELVHMJYQ5dGw2TaZ6117XIPa+oSXYnRtQxL2ZN4Zfs67cWIZRRgR8Jo8PwxZrrK3BfO1W4jR/1Kiz957zGBhDDzVhaJohGHMFuIs9XK3MXMbhCcyyb0KI0SOSR2eAkCNRieYqdNVMVEeU7aYyj4SUCcSMCOaMxiYhCFNQJPZkwzGq43xVAcvECs6XolxcCdsxhL5yNZYKs6kZ4hfh4Esn2qZYej8w28RDjQtpagRNGlg6nFKo4Vc9495LGUW8M0URMLJZpjRJLDKlGJ9ISTY3HtOFKJ1RAzhJWUmURdkbg2w9kI3xOOTL93yn/0q3+fVkus1fn5GdlslulsjCJ/VyrmaTZb/LOv/y6fSoN9q1pjMo1IJDG1GtjUyhWUnEQaAjQ04u7skmPOwKMzuI8fxJd8arNRj3p1C7tg4MnAZRTNmHhjXAliEC0z+OaMhr1NIIM5i0Wek8MH6GqWTFbYJcVyhvPTC7Si0Oea2SVxEz748PvYGSkPk3OK+Qa+qhGoYr+s1TL0J6c8fSDWvNla57zdpf10Co74bLxsc2W3zpPDQ/IZSVK8dpUnZ/fwotcByGd1NMNHI8OO7Fcfj6fMwxlxlCcKxPsdHn/GYDZh7Im5qlbLZPUC7iJkNRfAJp0Pfe7cfRXHHnN4IvSU7+ssVmMU2Sum0+P8RKGU38bIiudcRCvmPpRyCTeuCsTZYd+jua5xsC/k6m/8wt+l1dAYtWPGnnAejzvfJF7NMZIWoZStfG0TJ2MxGotrX7/9GsvZijiecNYWchVhUKw5DN3VJfiPFUYE6gmVlDjrh4slWpLQ659TK4m581YBuhnQuZhc9qefXpwyHoxxZP9mrVzEdAw67TGezL46tkq0zLLWapFOi/sd9kekcgoXbWFTjvoQ21OMaMpIcrUGbszGKzcJJOHzefyY3eY64/Yz7LRIZHQ6fc76XQqZa+zWXbk/fdY3NkmkzPbvtckW8mTyMUFf6MrqesCjB+dUtQ2aVRkoPT8kW69hSoTPuL+kr4LiZLEkf+OynaW1dZ3vf/tDFInN0EhVoTZn6QkH2h+mUOOY7uKI/PMea32AOp+zabfoSuLtbLOGG8R4K4n+XMjzw+/dY3Nzm4UMqqlmiduvbPHBR49IFSR32WPhC/y48VfCuYoimHoiba9oDsXiDU6Gf8LH7wuQQT0s8jM/UySKTa5eEwb7dNYn0WEsifh6wyVKkqFabWKaYoPbOY3hoxU/6H2Xgi0W7s037pIrFXntrnA0vv2DP+Wdn8nhuwq5nNi842FA0V4w6EB/IYTK9WacHBvsShQe3RyQeCmuX3kJTbKTlys1Hj85plxy6J+IRbEdlfHiIStfOEmGbnHn1U3O28+YLiQzuK1hmVUUVWGyEgIbhRrN2isglVg1X6A7CBjOuhQkXLKdsYmSKRf9Mz73ukCpWXn/N3tvEqRZdt33/d783jfPOc9ZVV1Dz2gMDZACQFCiSNqkI2xIDlEOb7T0yuGNIxze2OGNpQ2DClmW7HAEHTYti6ItcbIEEAJ6BBpd1d01V86ZX36Z3zy/+T0v7s3sjeHwwgss6u6qIvPlu/ede+655/zP/+/R6z7jo48FJMfStxi5n5EkKbFs3AxcH10fYkcC5mUaWWzdIGNN8GQG4/zknGKuxsH+GfmShOSEIa3mHhWZbdKVErqqUl6skzXFJj/u3UfVxoTj2zSbMnNtjHnja3mKhU3x97QMD+6/z/aNBmPJ0DSbTjHUDEnks7Aogurz5iWGkyErIQ6X7RmjiUumqOEmIhOZJG8TuC6KARnJNjObpSwvbdGTLHXu3ERRVMbTSzJZSRkfDtDNOZNxSLEovvt4fMSwH6Ka4kBbu/EKsR6hJwpFyepxc2uBxuoq075HIhmEbEejM9oHWSKvLpV5evYjYneJVApMd4dDlpdX8OIPePBEXE5vbH6HKKjT7YtD99bN13h4/wDHSfAkFawXBhRLCjXnS8Fl28hQLq0zHEvKU/cZoTclZy1gSSG83uwCAoVKJU8SDaTtmQRBQL8tvovnTtnZ3WUyda8bfydBhzQooM0tBmOp/q6Y5KICugwGNK1Oo7FEq/Wc8ZH4mclQo15bp1AscCwZ4ayMi6YHdOUBkymVUbWYTmdAJKE7g7lJMT/Al8Gzk5bZXlph72wfqywq1w9Pz1heWiBTsBhNxLqk2BjGgJJk4bRslZWVDTIZm1YgnN6VCvyLh0+4IUk23rr9VX7+s/+LjIQTD6dnvHnrGxwb6yzJTNZ3390mGExZWrrB3P85AK3LJtPZAF1WiPI4eGFKuQJ6LIKrZ5+dMBg47E1NDiTDeGgb1BsmtiLeMzEtvL7P88MPmXeFAv3f/NZdvv+f/z1al8/4Z+8L+tuN5UUqURECEcC7TJi2HcJigCcvKCO3yHKlxN7+U+oVcaga80v63R6XHUl6YxaZTXuYVo4rpco0SonDgCjUMAyZuY5nFPOgS5ve39tjOImoLOSvM6ajWUi9vMTaUgPvSiB47mPaMZEktAn8mPqihaIajGUGOputkio+sa/iXCGf0oA0Ma9hSa4fY9omKCFff0sEXO9++13+57/4TxnSJVLFYWylEakW4MvqSyG/SRiMSRNBJQ8QRRGa4TEPzriQFUvdqJKkEbqECbnzGNuKeeXuIg+/EPvd0z1KRo6ZhLVNg4Bla87eQYf5VQUjidBjW5JmiH2UpiGN+jIffCICWiXWUFDQdfVaOLY36PDkSQ93so5lfelPVTUlkI3/a6tZNM0giYrXCpSGYTCbToljl4yEdX313Qo//+QprkQcWXkTUEnC8DogMdWUB58+wfdNLK7IP2wUjWvK+GzOoNm8YDQATRd2EMceqRpiqg6BK77fZ/d/zHR+C1PKEiRJRBwrxElKItn7ltcKTMZT3PmXxBRRCNlsnq6Euu5uZKhUfQ725qDL6n0QoKOiWgrBSDzLsSwsM8s8lBTyZoyKSRQERJEktNBtprMe3c4MVZNkJ7pOGkc4EnblTgPWd4sM548Zz0UglcmskyQJmmZck9OUyzqqOmTiSuIkbNI0JUrVazKX8XTMQjHHzVfW+It/JYiRlxcX+e1/528wkQyilUoF3/eJgpD1dQFpUhSFIAj4wz/8Q3KOqMj6cYRqCJgmwHQ+YWdnmbzzpezoYX9EbIWUpXBs5A3Y3drh9HxCviT3uh5Tq6xw3jkhq4n1tE0DLx6iKCLeCa0JmaxN8/yCak3M5azzjL/23b/OSesTzo5FIKq7FVBtfE8kaRcaRY4vWpSyK+imIu26wnm/Saj0WVoT8Ua24hJni0jlCey8gz4WdPOrKyIxVFZAUwwmioplikuDqrQYDrpkCyIGao1j9LhCY71CX+7ty3EfXYkYdi8o5oXtGVaOVxZWeLH3OQDnx2PC4Am3Nt9laVXEMz/60Y8Iw23ypQWWpJBxthjQ/OKcBSnyO53bVCuLjKb7FFRxNoznHqW6TxLlyUvo5dn5BzzfmzKSzLx/9Ef/JWvVDGaoMBqK90zMAaOpilZysGri91bnGU5OL9m4JWKZR48PyTkldN2jVJRswe0B1ZUlBgf7vPWqSOJ3B0cQqORs4SjTCji1BhktQo3Fs2bhJcPhkCCI0UJx9pTyEVbqg2R/nQUj2hczoii5br04Oz1mMppjWxpdSRFfqxc4Pmxya0vA5t3GgJtr32OhtkImdyXwrnN+ckpNEkz5wZS9B885uphScCRlfWYDUw04uXyONRTv/s47b5NVG8x6Yu+V8z71hQqDcRNFSjj4ccRStc64f0mlJOzl1sY9FhbKzCRKzPMjElVjac2kKxkhQ12jkLN5p1Zj2BZ+KtoMMTMpnoQljqMO1XqWvAq6JcnVum1SxcQ3MiDj2mkcUKgXaD2TrTz2Ol/5jTdpnrUxZewZGwknp0O63RP0sbDhtZrYY79o/H8WEX45Xo6X4+V4OV6Ol+PleDlejpfj5Xg5fvH4pahcRUHI5YnI+g2GY7wdFcdc4uZNSW3dqjA8nbKwscpnDyR2Wh2SRkVGQ5HtsjIa9YUss/iERGZ23WjM2pINSZkF2Tj5b/70EbdvrrJaF7dkJ93Gcc64mBwTRSIj1O30ef3tW6xtJDSbonK1u7pO7s4Oqi3+/eCTkHwmxzRoMu2JbGG5UWRxpcajpx+SpOL/fF8lTRNSmT1cWd5lOJhSyq8ykhCOlbVlJmOPo6M+ednbY9lgFcYMRqIyt/eszdraOuPuiEJJzKU3mhKnU5YW12idilv+aH6I73koqriVt4efUqsuUC2vMpmKZzlahpxzh/qmyEQcn7TR0ckXddYWBQmEHzd58uQJjfoSJ2cCPlgpldne2mAmG3rHowtevXcHXV9n0BcZ9np5gYXFTS5bY4JIvNPm2hJ7e3uU7olMZLd3iWEmJKmHL5uRG7UGw0EbNfUEnTNgWSmNJYtMVsDa2p05QTLkovscOytM17SzfPrxJxSrOonUJIjTiIvuKYYhnn1ydoKpV/HdCVd87Z3RIY3qLqZRZCC1duqlbba2QrotKQA9mtKd9cmVLc5GIqtRyS3y4OP7lBqF65JxqZxhPPAY9EU27fadd7DMG/S9MwZ9YcOKojCctsjnVlldEf1i561DDOOMJanL9q9/+M/JZkpgO4xdke2xcgUsJ4+um1iByMiG/hRVixiNRRan3rjDKIzQMhMiCbeJVYdY93CnLQxJhRr5BeqNAp2BgCUouSkPHgXoRnJNTBHFHmsrq/h0SWTuxRu71BsrXMg+gnzRxsnt4pSyHBwKKMZi+S5mzidfLPPRz/4KgJ27BS7Pzgmnws62NraJwzlpxiQvpT6e/PznvPHm1xmnwqaN6grPZ+cs393h7FjMr1QqkSox+Xz1umo7nV/QvDhENUTFdLGxRX8wJkoc1m8I2OXTx6JXoLK9wGdtUYE6cTuM3TsMHoh1+v7f+g5PHz3l+UGb+8+PANDcHid7j3nz115na0sSb3Q1igsqeiI13gYeg3nCpz/pE7hSJ+UiwY3rGGpM7koQ1Q/Ze3FAKiGV084F6xWfu1s2X/+73wKgmi/z8HxA93iPe1JnJudA3ze4lHCU+r0czswgdROyociwW5kSQTwhazt0ZKUqiX3q1S3GE1kiMmMM1SbwdGJN7D+FhNif4zgVAkkHrekGaTzj5vqmmO/FJWg2qiH2E4CGQymTwVI0RvMr4UaNJAHLlnpHjkqtXuCiFeNKUVaUBEV3CN0YSxLtjIdtOm2XRELkVA3SJEWJ4PBU2NSD+5fMZjlcEhxVZCd9zSD2Ur7+TdGneLJ/TApoekosoeUkOr47ZeqNCBPxfE01BD27FC313Jjbu1tUKymX7SuNmTpRGmHL/tzhPOaVr5T4xm2LH/6pOJ8y2RKaqkGqMx2Jyujm3Q02NxsEnqQzdhSiKEE1QhJZHWkUc7z7jW3+l382RFUkZFPV0E1RbQFQ1Bn37n6bf/XP/xJF9hJP53OiNCXj6NfQyMkkR3/gY1giS+4GLpYTEgcaWUlstLJQ4vSkzdzzychq5Nj3KGWzhLKZXVN8To6GWOYqoWw7E714Ov1ul9deEegOS3/KwWVIon9JfpIkEIXKtSRGnFrk8nU8N8aQsMo4ifHC4TWlc6VQRUuPiSP3unqmKBpJGhIpEYEr9XEKNvNZyCSQoqn5DJGfkjC90onGcWK6gyNCf0n4TISdqoqCIStsJDM03ePo5AvC5Er2wMDSFaJUQ1NlBT+akS9qaJp4jqnbQsssjK77VdNEwwu7PH76EU+fC7/ye7/3n+E4Dq6ssJmGju/NURTlGs5XLpd5//33+fmD+ywti3NsOBGVVl2VQr/6jJ3tFcxE2B1ArBikSgxSqHR3cw1vkmBokMr1nHs2D568h0IJQ0rUjKZd+qM53e4RAKur6xwdeLxy4y7zQHJWmwM+uf8xVlansSyqNlHgQWATSvaos7Mx7tzANCcolliXrZu7nDZ1pl6GvkTYNF+MyWcXyeWFD3z4+DnBPKaQy1GriPkcPtlHQWNprcH9zz8E4M7mq9y9822e7Qm/nNUzJFFIcqqTa0iiGAym0yGL9ZvUFsX/TQYaVpLh7g2BIFrfKvHk0RHj/oC6Lyo0X/va18hn18gULGxbvPuDzz/GtFNSGZdp5pQoMlheusXJqejH2djYYOa6jMcJ3a6Q0kANGfRc1tdEv2G3u88wTDF0E2NDnD31Ronx0yOqi0UiKUnx/PlTlpdvcrIvztrZNCHnqDh2jpkr1ng6Uuh2Yn7lK7/Op5//RNhiCnmrTCTLHpmKQalo0j6dMZ8KH69nInzfJ5w36V4I+wj1iF77klpd2NjDzx6ysrrBwmKR6Vj4JV2zWVsrY1kxU+mrSOvc2FklYwpb7F9G2GaR5W2bJBD9RodHD/gf/vE/4Nd/6z8B4NY9lY8++hMqN+5RLop9NQ0DqrkldrZygKxwhRatyycUTOkXDZvD1nPWFpzr+Q1mBpmCzsKyQipJSzJalsMXR/RnEkHUqNDsP8E371A2RIw+HT5hNi1z6evUKuKbDocpOVthOhIP33ptE3cypt8MyZWFb/F9heXNZZpjj1j6hKKeo2GWKe3Iynzo86K1j6nnCaXe2enFBCcpsrpxk05XxNGKFK3+ReOX4nKlKTo31wXG1XlFJ5Nt8N6HP2ZRspydfzGkauzy0Yc/Y/emhOA8HfI7v/OrTKVDvmh1aSwX8Lw8eQm7sBsKzz/bw3I0EinA+KvfW2V7YQtTimCW8iGDXgE9qqJLZqd7d7d49vkhG7dMVEUcjoPBgKPRR9x7VTTU3bn9Ov3JMyzbwZcsTh/f/1Mcp0a2YDKUGNqZ26Ze3mA+Ff8+Pz9lY6vBfFDgXDLQKYnB8sI6w+4BealmrSgKne4EX7Jw6bpBHMcsLax+2ZznrNFu94iDDKYkp7AcG99PyZTEJv/mq19jOtLRVYNsRsx5bXGVnz/4Kyoy2FJNHzdUQTPIlMV8P//oBdsbN9k/+gxdEUbXH1wSeSkrS8IhV0ojkkDjsn9GQ+rJ1Ko7+P6Irc1Fjo9EYFgoLtNo6JiW+Pv5gsNoluI4VQxHXCJOzp+hpRnmUwhcYdB+ENFY3eYL2cBvZS0uzqfYdo6JhJCctV6wuraBG3RoXQiHn8kaKJqLI3HLcxc8t02pbnEie/QyWZuzYI/AVZF7mtmkzEI/Gk0+AAAgAElEQVS2iCPXxc7nCRWF/qDH7g1xGT853cMumVxejqgsioDvuHnE+sodfBmhPH7+Eyq1IqHXIFeQLGORT7t7Qb5k8eJIwABz+Qz96QXJuYC6NhaWGYxOGU50VNksP53N0I0eUexRLwnYw/7hCy77B9cMP1HssrC4wHAyJIjExlcTA6ecMrqc0igLSNosOGfvuHV9OX9+cEKhWMRwdYplUTYP/DzjiUeqqMwltZul53m+/xxFBtmen/J878fcuLWMaohnj70+4/OA8SRg5554/tHxC7Z2btI5FzYcaEPG0w7TEdQkLKhUaXDZOwYJeTo4PWJ753V6vf61fpSZKTJNRoS+R5oIl3XZblGvL/KDn/0QgM2NbUqlKs2jc3RNat/JOZ1dHBIjnn92dsLf/6/+CXPJNvVf/zf/AMfUyZQKbO/KMv98RjlX4f3f/wGZKzifphMFFq4UfFYUjThxiM08sSa+sZ1VSeY6VugTeyIQ1vSUG68ssP6qCD621pdoLFTA8ehKEcrPmy1ul9dYK+cZjMSBUkkMCvqEzVsbcu1S+p0mqlnilZ03Adg7PeN8cM5irXqttTNWYy6ihLnUH7IyKl7goaUJiWTBC/wAQ9MJAg/FvIJw6RQyOW7dEGt2tNcmVHyi1ECRl5bAm/HWa7cYt10Oz8RBb1l1ht2A+rIIYtRUJZdxOD6bXIuIz6YBegr4Ea/dWpC2r9Pp9zBkX12chhiqjpZo7N4RgdPcazP3obpUwusLW4jNFJ2UWkXM92fvz7CzVfxgSiqZAOPYZmGxSjZXZDCQ0FajjmUUQPYRxqHPdJyisQKJhPOpJqkSoEi9rNRIKYWPefh/fMhs9hsA+HoFFY80NTDl2pUrKo+fPkKX0DpNS0iVlBSF8VD4sv/ob3+bnHPGeHTBWlVcisIwRlEMklj8DOmE0/NTVL1AKpvJowhMy2TYn7C5IpjIstkMl5ePSJWJtEUdlBzTaY9yQewrBZ9ePyBRVObSZg10XM/j6ia1spCjVKrx6NGUWBE2bOg2aaoQBCNsS6zxjZ3X+PxJQKJfJQx9oihGtypUZA/k9ladTz8dE6cmtiHOECU10AztWuMtSQaM+n3cSZFAMjvq6KSoxGmKIdnlJoMzIr8GEkKWhAlBEGLbOqOBmMvOdgnLntBs9kjlBUQzEiIvxJR71jZSXrl5g2rJZSxF7g1HIU3GqLqJIYkvNDXFsD0mlxKC64dkyilKqqGoEmrqB2y/tohi+NTr4vv9+ve+zXzmY0pfrWkqk8mEyA9YXJQECNUqf/AHf0A+n8f3rnTYwNQUNNnzpRsO2ztbKJLYBGC7ushB5z5BKILJ8cxiMDuk2T5jzRSEUu5kjmZk2dzcpnUiztqFeoa5u8idu8JWJsFjyvUKaCPGXQlxnCRsbts8enjGu++KBM90PODi8uJad6pQylCp2ewdfMFEJkk0O6HT65DJLNNrixhkqb7L3D3j8ly8+/b6DdLqjELuFl1JPlSs11ENk1G/x+2bYm9/8lef8tWv/io3boi476K5R74cE7tTShLiOI3nZAu3mUdDMlIb6vjoMcsLG1SkDtXpyQW2XebON3dpXQqIoaJV0HKwd/QFXUk+UK3nyBfqzH0BY3OyJXqjS3RzG10mDU6Pu9h2FsvWubUtEgvvv/dz8gXzmshscWWH1vkeuzfu8uixgPcvLK6wtrTI2cEB5YJ4T1SPg4OH3L0r5hf7Ldx5xHg8JgiEHRQKOfI5g739Z5yePwUgl1uilC3y6RMRy3z1G1+le9JjMJlSrwpfedEZ85W3vsXp4T5zX6xxtxXynV/5LkfPRYL33/3Od8gX1zjrPqE3E3addbJUq2X8cEw2EWddvhChpTqTgfiZctnivPtTPvqnp3zrm98H4Cc/+V+5eWeZTz9+D4C//pt/n7/7H0/5wQfv05GFBQeFe68tcHDSozMQsejMVylXEjpj4Zedxha5jEpBdb9kru0qHJ4/IQ58bMmeWysVsdUstYpk0/RjblZWOW8eUd4QPuHRky5mfQq6RTIV/nO5vEx/5GHLnqg06EDgsHNjlTMp9L2wVGFxuc7M7WHI/r/tWo1xv01Pkb1pWpbbt97g4cPHjGUiysqqDN0TWv2AXF74xcV1scd+0filuFyFUcDjJwLH7M0iTs/avPLaMnEinOTiVoG/+MF7/I3feotaXVyu/DBl5s9QJMNQoWJyePycYtm+ptINxgp3bm+gsMA7d0VF5vDkmGTeozsQAfvdV+9wcuAz8jLXYq6Bb7K2U2JtbZNOSwTjxUqNnKNfO+SZcsBrr9+m34a5LwNaI6JUtghCn1xWGMFZ84Sb22XSRFYGQo168W2eX9zn9g3xcQaDCHc6ZDLuUpT9TO5MJQ5Npq6Yyxuvv8ve8xcMxi0KJfHsF8+eM5vNcCc6jZrMIPglNrdvMpmLjGK/bVEopmipxlxi6C/bJ1SqOby5bOhVykTpGe1uiKEKg7tz+xZxNKNccijlNwHIZ3I0T88IpMHlS3miuUGYtJjOhMENem0qNYf5fESMFFfVFeZTi9a5mFupqtIb9GkPZphSRFRRTQzDYtDtUKoIJ5LB4qIzwi5IUcjYZTrtkypTMrZ4z3anyWJjG9+FYuHL/8vnS5wdC0dqWjZ+2GT/RcLuzj1hP0HI2dk5qu6SlRm2TveceWdGRjYWz/w+hm3BzGDvQBzOpXKWy/MQy5lycSHe6/VXv8uLp8+vm4OTyKPfHRL7I6yccD6TqYtlWahJDhlboakZTKWGnROB+HySUquKS1y9Kta82XqKnZ3TbgVcBsIWC1VQkyqK7FtSMOl2JsxmEXMp6mvaF0zmMaqic7QvAsyb268yN1TQxd9bX9yg0+uzuLLJsC/msry0wEnzM2Zuev3dDS1HGE8pS4X40WwfO6/yYv8Fqi6C1c5ozK3td+mcz8jWhZOqN7Y5O+mxvnZDru8FpFCtOvT6gmBmZffrTCcd4kTYa8iI09NDDKNMQQbQ/fke8VCh54fkSmI9t1Zv0R2es9QQ2co0TWl3TikVVRxDHM5X5AXbW0s8fXYq36nAq28u8OiRCPiKlRx+PCcMRjz4UPxMo7rO9/7eW/yt3XcZDmTzav8xiRvRupSMl4W7jNoPUWzR6wZwMWhTXYR6rcrOikhA1AsZtjbf4PMXwt/0hmcksUY4+bKfQldcwnAARkxpWfRqTNA5vXzOLBKJjWLeAVtjoE/4QJL63K6/ja8mzIYjbF329oWXjNwILxF73UzXUJU5qRqRyssxaUAU6RiaiWGKb+W5PnYmJZaMiWcXLSxHR1ezxIif0Y2UekOn25kwHIn3UjNDPN8CJD20pZEp9jj99IBIEkykzEmICf05qawkoTbwAgNDktWEYYKSiG+mxsKux0EVP3hC6GqEV9R1vk+1/iVl9XCoChKFNEIxhC/xJzOyRQU38EAm0VASkihAFnGBFNe9oN/P4ktmTj2jEzMnln1uQRTiLO+wVf8a40/F4VzUVSKZ/VZlf1PgdQmsVFxyAJUUP4rIZB1MeWEYDU4olnLopuiPAiFB4nshbRmE/p2//Q4vjvboj12suggeI88l9ECNTGJNVM9Ta41UUdAkUYytW5Cm2NkCXiKeNZvmGY4j/ETBkpUVVVXx/RjJZo6t+MxH4M0DMiXhc6MwwI8SXNensSAuzFZGYTo7ImPl5Lr4GIbGZDRksyouNkms0bqcANXrXtTQj1BT41qolshDSx2ICzgZQ65diKHb+PMIVV5qN9aXODgMSaX9KKqC7Shomo2SiDMln7OoVFdQ1AFIQenIj3Ac67pS5nsuxYLFbBZeS6HkayoEGVJVw53JipNicHN3l2ePBSLDsvOoqU4UuijmFZlSROCN2N8f8u9//+8AsLS0wLA35CpS9DwPRVHY2NigUhE+6I//+I958NlnLC+vMB7LhItmkMZcV40bSw7t7s8YdkYgdOGZ9vuoSRYtJ/uywgn5Sg17ouL7svKpdzCtRS6aBwRXlN/pCoo2ppAVfmQaH5AvBihpgbUNsRcuLiYcH12wvbPCw8cfA1Cr1FndXGHUEXtP1TX6owtsq8ZNeWa2Ls7IZhU++fgn7G4Kn97rHhFFEUUZ7wSBixcFOJmY7kDERf4sZF7yMLNFdEkG4lif8fCTH3H76+I5k7RH0Nlhe2uNmaS6Xl5ap9kcsrhY5Pln4jJXL63TaU1YXpVix6ceip7Sau8zl0zLquVx8ujnJLFBTkpgeJ7JrZv3ePpMXIjGg5TF5bwgSxsKu0aJGE96LObXcD3J6Bca7Ny+TX8ozl4zU6VYnXN8co5jiTjhrHnIaDxHy+c5kv2F9YqK57nEkfgug0GH1dV1wtBElzGrlZkT+gOGoyGVoqjEW7mAB5/+jADh4549fkbVMei7L0iHwsdqRoGn+y9wyGBlxLm9Uijw4/feY3tBJOM6F3N+8uMPuXG7Aal4lqE7ZJ0iZ2fnlHKiwpVEPsfHfawrWRClgmW3cUyTf/1n/5P4NjOFGzfXqBTFnn16+CeslBxev7OJkhN+ynSHdDuf0hv1UU0Ro7+ym6fdPicjZXM0K6DXikjyFTRZ7FDiU3a3x9xcf5Nnj8QdIFfQ6PQjluS3KxdSHjz4nDjN0J6Itas3KuTqHpO+T9ER56+bJjy/6HF3U1QU7cSiUS3x4vIpzbmY31e2F4iTObYRXSdKHzxugwYzKc1g6zMY5ClVbfqSlGnuzYgCj1S1QLLnTt3/d0KLlz1XL8fL8XK8HC/Hy/FyvBwvx8vxcrwc/z+MX4rKlaZrLC+IDFjWNtnZcRgMdSxEVnpzJ6G6YHNze5f2hahGfOWrr9Dv+/Q6IsMw9VtYZp7e4DlLS6Lk2enEWBWDW1tLPH7+GQCD0ZhGLYRUQHKeHccUaipVRaPTFlmAveYz6isubmpRqIpsjxcfUiquc3ohMu71hsn7H/yIfEHH90TWL5evMRokZJwKeVNkQxqVIeetM25sCyhPvbTL3OuRzZm0zsS7L62u4AdDXnvjFnkpaNs8a6HqPptFAQG4PG+ReDqVYgHPE5+tXq/xnW/9LuVijg8/FhjhSBlw2jxjfU1Azc7PTzhrDthev4Uns1uKV8cLW0SeWEtFUXCclOdfdLj1iqjiFByNw+YlpmkyGgro3rA3YGNrEVNixVvnHXQ1otJYoSezryFzzpsKqrPPYl1kY45Oj4n1CE8RGff9kwgnZzCezLGVK4HbNuu3t9jeeJ0nsrSdK6h023MkUoHQt4jTMaZuo6lifQ0j4unTp2zvbDCTEFHHyeK7KlF0pYVxTKWW4d7aWyi6WPOcWqJYWKPXP0CX0A/f1zEr0J6Lqoaa2JhRCd0aMPOvmMAWKFpjfM1kOr7SFpoynxvXIqaZbJnZNCSI2xjBpniW3icKCswnZVxfZEi67Q5vvf0qhyciK7bUWKFQtLhod2g2BSzAMvPMhhoLjQW6XUE5XKgsMh25lMoigzKZnJDNVilXllA0Ue0J5gr92SW2vkESiszRwVGX9Y3VawrnfLqMbuTQ0gKrq4m0l1Mcc5n1tW36fTFnL+yioF6LNCppjVrVIU0VRmORqYsCF1MvMZg9BtkLsrTYYDruX1dtTV1UaXe3NpHEg/Sbx7jBAEUyfC3WvkGjVmc4aBFJ2vyKvsBUH7NQrxJFUpcpviRvFyhIuQzPyxDRIklVKjUByWmeir/r+Q7bO8IWsebsvrbNJx8Ley1kFdI4j2mppBnx7PrKAmq+RxwVqEm9qvJigVur32SsCvv56Kef8Bu/+31awzapIvZHNtqg4w1INZ2hhEZkDYv3Pr0Pks68ahYoOAVOmzMOJgIe6phlRuUVVh2TqCeyYQfDCWtLy1hZkR2dDHs4ikpJV9iTNOh7FyGRCmE8IFsV33Qts8CLz9rMPAnXUFxi30NxCmR18TPdyRk5u0ocJ4QyszqdBty7d5OVpV8Vf//0z9DIo6Ya6FeseLBzY4GLzikT6TsalRqhqzGdShFz2yKNdTq9BFPi4wMfHDMh8CM2N8QHOzo5J4zAkP1GmqmhpOCYKa6EQp80m1i5ElGoIlnI8RKL6fSU0UR8vwQDVIUoVMg4wieEwZyso3Hv3jv82f8pWFMzjkUcz0gkdHA+d/lrv3OHwegAP5aZ3SQgSTVMKcnh9ELOBgWedYaE+lW/YwCagqprqLGE161UOT49v65E6JaFqqYoqcKVdFM+r/DhRw/Jlaq4kuYvTUMMO4tuXFW3h3h+zCyYY8vqS5pEGFYWN/BRNbEf/+IHf8mg/5uY9hXT4xwVh6l7RrUk2VBdj37fQ1Wz+HPx94yMznAwYVvq87z5+jb/+x9/iG6tkqSSsj4ao1sOhmrxxuuiqvD06Q8ZTKfYEjGRxBGqCrP5nMU7m+LvzULCyCSIVMzoChERE8cBcSKFjbsDijmHiIhE9vboqoHnu9hODsuQfUrVEoMHl0hiRwI/IVZ89EQllrZRLJRot8fMfRVNkwyCRoY4mZNILvZCvkixBHsH+9drnKYuqqKjmzCfisx1vQ73P/8xL2R/jG7dxdBVQoxrRr8wmlKtrNGo7vCVt78NiFaBVE2wZH+cZRpYlkHkSyp64B/+w3+E6TjMXJ9Usb58Bz1Bmg+727u887XXOJboCIB+egbaDsuyT6s/6BD5KWkyuxa5Pz+yKCxYjHtj6g3xf63WGUmg0TwTviVQHBxbIzJbHL4QvdNxFPKtr3+fducULRW+UjfHzOYDHGlTjmkxnFkUyjaGJfxGsZhnOOnx+ps3Cf0r/bY6kzEgGZoH7pDZ2CBIz9Ezkj23VsB1J2SzebRU+IRi7RaBm3B0KHz8K7e/RRJ4GIUYZP/tSXNGrZHSfN5nuS7231nzPpZzk76sEEXhnMXqIkkS0TwVPuGr37jB6eE5+bKNHwt/enlq8LVvvEpNsvnVqzbD4RAvmBLKKorjZOlNTqHjkiDaHMqlGqZpEvhSwiGdU8zY1HJVPFegLXzPJ3FDTMPm7VuibeT4fI9+p8uRKs7xxkKNmX9Kr29RKgo/NRkHnO4doOsR+cqqfIcVVpdTVmTV2FNC2uMmYRJfQ1TRPMbjGYlpY0ibKpY0Nm+Uefz0UwBcP6CytEJhsczlgYgbynaVF/uf0ajUGci+az9QKFcXsG0p1zAPiOOUJJ2TyQq/ceurX6VcPGcqoZEX5z/j6JMEo2gSjkRcfbZ/xtbGEsNQwPqkodNpzUlssb6GmSGv1YmUlKKkcHcHCtXiTbz0mDfeFbbxb/+yj1LKsS91Sxno9Mjx9Xff4eJAVO+rpTKGraDqEaqErfbGLsWigevJFplMgzEjlKx+DW2fpCOaRyeMRxq1sljjiXdJaiYUZew07I6JwxOW6rsoU4FmCeYxt269wmW7zfqGbKGYfLnP/5/GL8XlytA1CgXhHHbWX8H1EhprGX76M7GQUThiNp3S6j4nJ0Vgx2ODdueUoWwqLhUW8AM42TPZ3RFwu8P+z1BrOQ6aH1LICeNd2yzw4Scf8tYbonR4dPqCTLnGRTO6Fph89bWbfPrgQx59/uf82nd/G4DJGNz4lLzk1L9odVDIYSTb17pIJ8fHpHGTSs2mUhalyuWVKouNHS5agg77/PwDShUVnSK5ophzCiwurXNxcU6pIJtcNZiMIgxbzK9QKFDIOMSYjCZio99a2WLaT7n0LlldF85gONZZWd5gMpK0solLvlChNz2HWGzq1N7HzmUZT4QR9gc+hq1gF8b0uiJwM3WH2VijPT1na1esua45NFtPSEPhoCazCN0YMI/K5HNVOd91/sk//R/5ld8M6M+FeXnzHPN5Sk7ipDVX4bL7BX7YZy7ZK1Qt5NGzj8jbq9hSaysMVVB8ymVR6t7cuMlP73fZ33/BvXs1OT+LVPFwvYmkY4bh6JL1tZskcr7q8BwnW+Gi95BiWcJfAguFHoWKjpqITe3OY+aui50Tz261j7m7mUPr5SnaV9C2E0ynhufC5pYQoh2MWxRrAdOxFM9TCqysL3J8pJCkopS+uvgGj589IJvxr20oTEPGkz5bst9wNGkymeTxpyameUWNfkGaxkRplpyEkeUKKkpcu+4nsYwSrhcSRl82j5umTb2xy/HxPqUFMT+NLcZTH03Sb8eeQjG/jK5kuGwfAdCor2AYOuNJnzt3xfw++/xjTMtmOJKUqoUi7cshiuJTkLAApx5zcPYDVtcXUKWOlsqIjZVlBj15idcSdnZXOT27vP69Yj2mZizTlbTpl50XjMYHaKlNVjpgPxyj5SzOe5fXItAf/dt93vnaKyzUxGH2+cMPaDQaFIqrPHggqZJlksXvpuSysnfKHfLadpn/TRMBkZvMCWPQ4hQ9I5uyn77g+9ZdWhdHlItiX1XtVT7+4Y/ZuCeSJFaYofnkkMSyGXXFZbw777O+fZfW5BLFEY57mBh0J0dUJVGFEq4wdHrMUPn3fvP3APjgrz4gmrUZjrJMZL9YLVtAUz2e7v1EfuNlXqkuc3J8yvqCuCgGtJlOoORsMgnFxdfJlTGLMW+8I3TuDs8OMBSTKPIZTa4gPwaxCgoRlhQbtzWbWJ/QnUq9o6CGqXZI0pBYAhwUNPrjISenlyD7EpNYJQxSAcEDthYMJqOQ/gBkjzRR7BF6KlEEuYI8VNMZsdomlRosQeCjGhmcTEymKL5NrzsnSGKylgmS+GbmD7i9uk5XwnSGEw/0AgoJoYS/6ZrCzvY2vXZEJIWFXXeOocVohphvmiZ0ekeUMgXcUOq5KT5GIjTAALK5hO06tLwJodxXSXaFdNoX+QnJrrC1schonDK/EgLNOmiGge8F2BJWtrLa4NOHB0Qo14kFS88wD1xsSZ9s6Qmhb2A7RXz/SyjfZa9P+/SC73xbfPfbb7zJf/eoxywU39M0dUH5Pc3wxjfEuaYlE8JAIwpCLAkZjaIA141QZU/UcNQmiByCOEG2JGLoOjPPI01Tmk2RSDxvdrHsIl4gda4SH8OwSNOY4VBqyvg1vEAhVRKQZBVpnKAqNrms+Hu/9dtfR0kUfvTTI7IyaRFHKdmsxXQyJyPRWaGSEqd5VEl/r0kb1fQUFOFbRuM2a9USk8kQpF5cEkcoioKqC1vRjIiYLqNBgC73u64aJCgkqYquXPUAR2hKhCEFyW07R4JHIZ9lHl8lhlRK+RI7m99Gles5d2dYts1gIGVkFCHwvLW+wT/67/8xAJ/ef8Ct23cZjL4USVYTlTj+8j231lYYu+d0J7L3DshWqlx0ujS74vLRPrvg9u1bVCs5To8F3K7bO+Mbr/6H9M0vmEr6ci8eUsvfoLEiAsAfvP9n7GwuMeqoVIrSNrSQg4M9QQqmiLX1wh7tM4N37oqfmYYT1LSCk9P4yfsCOri5scBsqlOpLTAKpTajb1Bdijk6Ehf/SmWVYi1mPk8plYSP95IpuVwOkgEXAxFfZKolbm2VOdw/AuCy9TmF4hL95iU92XdaKmboD+eUq5vcuClg1qW6QWO9zI9+LJLJtrHNyuoa52cDvvL1KzKJJndfeZfH++9xU/aszqct3n/vI3IlcWHw5jpJovL6679CtyeC+C8efYznpqyvNmhJkpul2k1URScnxZZHox6D8wGrjW0WpAZpuzVkrVbD0DMM2mJdfA/eefvr9NpiLrlcjov+KXEaUqyIdbk4S8hkIV80GI7ExTecRywuLmPKjHIaDJlPAhxrE9O6aldo42QMNtfX2D8UF8qjB49ZrFXZvi2ebehZWpfnPHz86DreGI1mmKrD5cUQ05aQbcVmNBoQJ2J+K2tlmkczUi3CzIg2gETx+OznMxbkBXfvxSWmNmTesSllxHs6+Q3c1GA4u6Q9FMnxYbWI7eTBEmdK4BrUtwvs7zfJLYhnO/UZQ8/ntJ0hnxH7ozvtc2t3m/75zwB4cdzhzu43GZ88Z+6KmLXnjVC1kGSmYEpIYxTGLNdvMJuLNdl/1mFjfZuTi0sKEmLojn3qCw3m3oCxJHhb3tjk1t2b/PBf/jkAtXqFzeUbuCONxRWxP0YzIDa5sd1gLm1/OBG//4vGL8flyjRxKuIjhVpKplpi/6TL278iuPc/++w9bA98z2BtVRjPsBtiGnmW5A2/kFtiMDrj3qs3efRUNhrWirR7c5YaFl88FRnMUqHMXBnxRHLa9/tt+r1LCoVlDvZFU+HmdoXl2g7tI5ODJ2Ih7765zr/5y0/ZkOxhgZtjZbXAsDNnbVtckqzMOo4dUS4uc9G6ympM0DSNza2rjO0XXLaH1KsaOXlzDqMuB4cDXr13jydPhWF2e6esLt7GkOxPiWcxd9ssL20RpSKwePToETm7wc72Iqn8kqa1jK7YRIkIvPNFk8WVDF98fg6q7CkLmxSLeZYWBEHCtPs5yiTP6uou05Ew3sHgjASXlbVFZlJPSQFyRZWjlsiEbGyucnh8xOJCnk5HBN6j2YBXXlvmT/7oJ9x6VRjfO2+9zcLCa4wi8XuDecTi4grN5oxUFYHp3E2w9RqX43MWF0QA/eTRCd/89jqX5+LZg9E58/mct97+GonE549HPdY3q0ReiiUJMwxTIwgn6LK6lc/WaJ73qTVSutLZqYBuhPTGXSxLrIuWrJIrlRl2xEGxs7ZGp9VDtxLGU3HIpVqJIOmwsNRgKkVDdTNkOAzI54TDCNM2bmizun6DJy8+AGB949e5sfMd3OCMk+ZDaZ8LzL0ZqmQn29m+w7PHL/CCDnVJEBIEdc47zzEthU5XVmm9BMdsiGADCMIEw1AYjHuYMpir2K9wfnCOYSRkpFbL6VGL5ZUa+ayw4dbslMRP0fLpdT/e5vo9XHfGeNzm8ERkP3VLoVSuU6tdZVGPIdEoFRp4gVgrx6xDXMWyVUxVHgLzQyrlHJadyvf2abXn2Ebjuk+he35CpbhJQ/Zz5TIR46HL2vINXFcEcwfNM1YWauwureGNRHB+b2eKEyu0HomD8WZ1i3arTbZUpiAb8XOSNCwMz8jnxDt1hiTEQxYAACAASURBVDN2t+6wtCAc93AyxlR0dDX9kvBh2mN4lPDmO2sEmphztW7geHWGIxFMqs6M8uoih/37jAIxl2pWgRwEwQhtIoV3A517W99Dl/pjnZNHZCOHoh7z8NF9YZ/LdWbtEXbOYaUgvs39p58RdnOUCyLg85yQ4tI20fCQ0VT4pEb9NiWrz8W8Re9cZq5Pqhz2ymSLIih8Lf8Wjp4w98ecHoq9dtjsk2QsNDWAqdgjvq/g8oz/4r/9DwA4ad8mV1TRFJNRIPbM8voSdibDFw8vUK5IBDQNxQ44PhIXi+++/VWqtRz93ph8Xqx5ErnoSkyahtSl6PT9Jwd4XkC5JPeQoTObRqxWitfCtHuHczKZOsHcxc5J5sH5nEJhgd2dRfneD8hYCpCSSLKKKJngOCb3P3183SPr2CZECp4rG5RtjUZjmb3P22hXxAZYhEqAIYVGO1Gfi8yE9rGDeGswwwGxpuC6EzKO2Ff9/jm9wRRVVl5QUlJi0lQjkAJA8/kcXTOIU5BmRhSmzFyXzVWx1y3DYDwMMa3CNfNhTIpm2ChGxOaK8MOdU4/JOCC/7Fw/R9MDlCTlrddEcHyj7jLsHWCVGoSe1IEywQsCTEvsxzgN6Q3mBKqDIVkbwyAFDBw7JYmET9DJ47kRudLVJSbF9RMURaEk+zBGoynTiY+iOPhST83QFfwgIIld+WyFTmtCSEgiyRy8wJXaZwlzT5yZj/d9gnTnmkAHJSQFwiDGUMWc79y5g2pP8b0+ljguCCcJum0y98TvZXM6fjhkNjbRFVl1C1x0MyJRrGsClIyt0mg0iEPZ9xkHONmYeBYiW+bQFJ1yqcHuzlcZDsX+K+YsFMxrsXVFUVlaWqLf7/P7v/8HAKytrTH3fDTdJEmuBLMVZv6UvEyuLi2VmXknuFGfq9Hrtinmbl1rcS2sVMjmHb543GJnQ5BQfO/XvsOTzz/A1CLO++KC5+QKPD37OT+6L/b6m++8S3v0CUa6gOmItTs8vEClyer6CsOeeH59scTCkoEuL7RRNGXmtZhdmBhXbYu6Q3kBuv1zlFAy0CZnnH3e595rIkE4GAwwTZuMlRDMxbP74ynGYp4knZLNyZjHm3By6KGosmfPUHAqDuePOwQzMRdXLaKaBp3ZAT/9F+8DsLWd5clxjhd7IrZ491t1nhw8x3d9aqbYt2Ga0Bo8Zzwe0+8JP2jZKobtYuji73UmF1hWhqcvPiSWBC9rK7fQNlXK5TLDkfg92zE4OjynsSQWodM+RFVrZIslUplgMksjev0JjjNCTUUwvrK8RpJ4bO+Is+jwZJ98eQk3PKNz1Ze10CDVyqjErEgW42IuITDhwQsRKy2XMwTtOTd+ZZmJRJIE4ZTxZMzBixxeIGKC3Zsr5Iw6iewzPj1ssrLRoOUmGJpItBuORr1a4PDwCEMV50x/eIYabXL7tvAbw+ElQTKjPZywtSGKFN3zNqmu8XxPxGF6PsIL8yyuFph3xbc6OH3KneJd3FlAqsg4MzZQdY3EF2fMykaNZueSgXvGxRPRg1wtZRl3Aoo5GzMW36ZcNLFTWFsReofry1lWN8s8fvECWxJordhrnJy+YKqO8WUcFE0UHow+wJGVXVOx6Q0uSHBQXHHBm6gmj5uHZOIimkxqGQS8996PKa2IGKQ3HjFq6uQcD4YioV2oGowue2SdLPsH4qzbXBU//4vGL8XlKghnjFzBdtPqDqktw9SdEu0JZ5ez6xiNOYtLG7x4LgK+eqOC6muk8iD0gxmZjI0XDFitCkic57vcu/km3c45S1I1ejgc8dadt2ldiMvHN25/F8+f0+v1qNXFgZ3JGSwtrrO8ukNWqpMPh2PeeOsW/Z5kJllbpFopEycX1xnM1197jf3nx+hqCVUTG2hteZM4GJFGwphXl7Z4+GiPyXRwLe7mTuss31CYTNssyBu978/woj4rOWHg+70vqK1U6M3ahKnY1Ll8kUxew1WguS+CzLW1VVqtSyxJFDHojTEtUFFxdLGBtlZu0Gw12dsTF8zRYMrWVp1m8xGrsqG+mFsjTAboqsKgLwLKW9vfwvX7ZBzheDrDz8nZNwhDGysnHOnl8HNu3b5F3vpdJlORQWidDnlw8ZcUtsTh4TiLtNoBxwcDlpaE8ZqZKaPRBQkxl23hyO69fouTk31Ksnqwd/QF+WyN2aQrstlAEmVJ0ilTd0y/JWyo0VgkDZbwFOFUYmWO65/RqH6HmSP+jzRDFKVMxscYqnCIqqIzHg2oSoFZVXG4uHxOvjrGkzArVAVb17i9W2c4FGs+6Rv4o5S5J2zKMWz6oyPmkzM2VsS793snHB9f0ljOXcN0+p0ptXLuWkS429tHJ8etnds8fiz+r1LNsrxwh3lwiSeZACvaLt3LCSDmksl7zNyAOFEoSMmBi9YXFCtlut05Z03xvbY2v4kX7/P0xREApZrJ8eUZC9Gd64Dr2d7H3LzxKqbt0+mKS/xX3vib/Pi9P2NzRVRjLbVB6kCgzAhN6bSGGonaYzraQDNFUHbaHKKf+rz5hqiA9Q8OsJwIXUlpnomsuGXoZDN91FgmSawKHa/D2B1dQ6qy2QjL1nHDCn/+4b/k/2bvTYIkyc47v5/v7rHvS+5VWUvX0ivQDRAAQQLDZQbkzFBjNJmNmUzSmHTThRddZKaDzESN5iaZdNCFY9RBxjnpQFI0AiQBgli70XvXXllZuWdEZOwR7uG76/BeJi7D2xxwKL9lWoSH+3vf+/bv/wf457/364wPIu7fEWd2b++M1maeo9MHdFrCWV6cygHyxOBAUiqoqkKznPHmW+Ls/eVf/JRmc5swGaHH4hmKJYc//5ufc+Mrv0EyF5nWn3y2ZGurQn5TOKreZ3MGpyvalS9z9x2JYHjW4+LFCbW1LYZLsea72x38YMSHB8IwNRo6mRaxVE/xh0JJ2+oG9cYao8kzlGAHgO37X8JfLkhky0q6SHl29pSAC4oFkcVVTYvlssjPH6WMB9ILVAR5bSaHc3W9QhSHaKpDS4gimiUqGt4qRpMISfFywnu7v8WN28KAf/ijhyhaDd8UCGziQVcY+hLNLJImQncoWZfEiNETsedKkuFRIMo0HOnMxV5KmGQooY6biMSXmtmYmk0i+/2yyEYNA5pNm8lE7FWWpgK9T/NZ+rKiT56zs4/4/ENJRmpWyAghg1g6FvViiTu3XuPi4x+TypYxBZ1Ed0lScZ+mU0cl4dlohlMUia8wyjAMlUhWsjoli4YZ80lvSWCK8EpDoMKlATQal8P5Gi9fTDEk7HPKgjTLoanJFQLci6MPOJ+a6KqBLtvkQmVFGqXokti8Xilw+PIXRLyBnolgcrUKMEwHU9dZSgj+2SrBUwJM2c6o+RGxpYMREY4lIElyTmQYKFmCI2lA0kgnyhLmK+F4r7wSc9chTQMSLqHRbRIjJp4eUdYFYtqgP8My24Se0N+qkQMVovmKSkkIVaJFpGkFT4lpIWymuwpQsElkAuaLw8/RknV0IyOR5MOqEpPGOTxvzDfeFr/3zv0aLx64pJe9oImOmgUsopRqU5K5b1l8928+w7BMNNmOGRsWZqYwiWQ7cdGkVVkj5hgc2RJvQBBrFMwCbipsdBDEHDy/uOoI0fJ5ktUCRc9Rkm2Bg9UR3/rNfy0SBTIxlKoauhJfVUMNCxy7xL//k/8DOyfhtgMdI4sxlIj4StZTbKOAIytX82yMr1ToNGU2CDgduDQbQ8y8CBSfP3mMZXVptur0LkRyTnt8n95gwuZ2DVW25Z0PTilVNvjNN94V9zl7Qrt+h6dP9ikWRaImV51x4/otjg76tNaEv3F8uke9uM2RK/TdZL5gMvXw/At2ronvDRdfELgV7t6/z2oq5Gz/4AW3bn2bx8++B8Dt7W+SKlNGwzluLO5VK2/grob0eyOurYuukFqpzWn/c3xf2Ppu6w6D0wlu7GFJkmQlt2Axz6jVtmlIv2h42ifRE+69LkA25ouXvNh/QqlsM50Jp7pYLLL/8oRbu2ukktA6SnwSzeG0JypEOlUcJ8DOF/jsC9GO+druTRbTKU+efUCzIWyWomsodp9eX9xnZ+2rWI7PyfEpVmdTyrnPyl/R2dzg2TMRFOX9HGolBZnYKBVK+IuMG9u3efRY2Paz3ow3bt1mZ2P7CiZ/scgIFnPyhtjPXH6N7RvXOXq5jymDiMgtYaQuaRJx66awBU8fjmjeK/O5TOZmoY4WrqNyhO9LQ6oYLH2fRqOBijhH7qRFs2Hw4DPR3jeYnqMbFpsb1zg5F/6b781Q1IhqXfgWYQi1RoN2scrTEyGf995oMJ4taJU36WzIJNDFGFXPYxWFrAyGS1bhEF1V0GUlsGrtcOO+zWHvJUEkztHu7hqLaEhJEX58tVtg7/AhiQ/VurDbTx/3mIw81m7ZDCX5tzef4i37qKkoGtiVkNlswuHhkPSO0C3mvEfqLSm3uSKYDw5PaVW7TGQlK2flWfhDDKNMFElwo3GGF6w4eTKjXRcBs2H8kjrhP3a9ArR4db26Xl2vrlfXq+vV9ep6db26Xl2vrv8E169E5SqOM1TZE91ut1CNEUG45Hgh+modvYRtVIh8hS9/ScwSfP/vfsjOtXVMQ2SIcnabpRfTbDZRJIlZTc3hhwc0O0VM9SsAuPUe7jykWtgBYHDmsrbeJMmXWFoi07K+to2ptXjx7Au+9g3RmjidBPjBBa4rsmLNpEG1VsbQ6kxmIutwcXGBoTcoVDLSniQ3dlq47oLFRGQBak2dr3/tG8wXKxYL2YsaXOBETS76HoWihNI2NJazFcuazK5FKmHkksUGiS9iYi2z2Vxrc3pgYeoik9vvTVm4E0ol0Wscp8e4boKhbKAaE/mcGde2rzFfiiySoijoRkKSZPQl14eSrui02ywWE0xHzjvEZzjWNRRVZJb91YjNdofB8JDMkPwRVomDvVPuvnaHh1+IKt/hiwl37t3HKl1ykh1DNKHWSGg3xT6k2ghDc4gTl0xm62/cus8Pvr/HYi5+T9dVqlULkgqqKuG2o4dM+xmKZtJpiIwFxhgvOWBnV7R07T11qDdrHB8POD8R2VfLVul06yhxnaacx/NXGVG8ZDIRFdOHJ4/4rW//AR/+4lM0W1QZylUTkgo//vELrt8QMhsEZapNH90W2TVDW2fmPSJLj1EUce9CoYiVO8RdBuQsOWuXJYTJkoHM1LfqayQs+fzJj9El10e/l7BzvUajVuXjj0SlzJ0OuPvaFkko9vzo/BmGUWJjY5fFTNwrUg5ZLKFZu4lTiKQMH+H70OmIdVksJuhqCU1R2dwQVc3Tkz6TyZgkW3CJoPz5F59w8946h7I//tp6naJVZr6MWcn2kIyYZu0+/f7DK46ucrHN22++y6nsxyd1sE2dbv0Ovhw6jwMXNW2gSFWUzxk4xoTzs+dXrYK7O3fZ3Nrm+YsLdq5LOPhJiJcseXYszmNqwMsjl3zJRI3E+62ti/VxKjUMCW375PkHWHrCl78u5O7/+6v3BUx5Apkqzqxt2/TOhvhuiqKKjPe9t7eZTUfsPxO/t712j9gbkioxcSwqHyezY5xKhIVBHInKzjDLODk+xZ+L9zXq9xlfPCVZOdzsiuzy04N9VvMBre3rTAJxZtoLi9RMef5ctCrvbm6SBBN2tl7n6Z7Ion7vZycsQw091VFl5VE3EryFTiTJLKeT5xQLBpVCh2ZXPOfGznXCIEXTNGLJJZYGEU/PQvbPRYW9202JM58gS5iEl7MvGc3mOr3e34GcZQq9IZGaMeyLPS5VVI7P91FUm9IlpQE+YWCRL0acD0TW9sWehePUL8dl0KyA2ahHd+Nb5GQV3I9GqGaEpZroiZCPhTfh9/6zb3Ek59yCYICVd9AzjTgWe+wHFxxffMRs6qGkQncpio+WFkFWksajEyBPTi+zkGAclpUnTlwURAY11U7QrTxZnL9qL0yTGFuxABcMsS6RZpFZNoqEmTe0IqYGU3fJ7k1xrrqbGvOfviBRI/xAzt6YFlE0p1KV/Fh6SLW8yXSQyToSWGaehS8AC77yjgAb+X///OfEwfyX5MNaTJaZmKZJpAjdpTprqLzE0o0rrkRHdwhmF9zYFiBJy2BGTEqSZaTy2QsFk/5gzJs3bmIVxVrN5hppml7xXi1WHlGcUalU2dgU5/G7f/vnRPFd8qpOKjsrdEdnulhxa01Ul29u5fn77++jswFyOD9TYhRdI8sycpKI9vSsD2rligIgigJyeYPxaoItQVnGZ4eYikaShuim0MPECVmmokqI+ixb0Ww28dx9kO1aSZxgJCpoY2KEzfrX/+p3ePDoL0iRbeWJi6MqYFscvxQV9v/63/yXdLod+v0BunwG3w+oFEtM+8K2b29v83//6X+gP/kR5bKc4x1lxHGKrpXIUlGN0HUDw7JQNCHnnbWE094zjOCX7li5pjGcfYYZCFuxvXWL0eiCfNFhNRA64q/+5s+499ptdG0T9ZKbMb/CD2bM5sLODSf7VEtvo5oZEwkCoxkmT19+gWPVCFWxBqruECoBTVnFNe2I8eKHbG1uXc2mebNzut0qp4cHVCVNxiqc4a6W1Gs7Ql7zBudnM5JsRl6C8fR6PTrN11jfMDk6FTZsa+M+YVDCD4S8ng0/Zb29Sz4w0eQ+TM9nKPqCi551xbl45/YmZ/05F33xve56EUXz2Vh7hxShm+czl531d6kWY2ahqEbUa9fREo+lL87CIj5gMA2pFxeEkejQOD4GU69hm3nhowCzxRmh73NjR7SohcmcF0+fUCqscXwqWtuKxSIaGbEfYUgbYhQNgnDFeCgrNFWDIDghWNxlrSGqbqtmj4v5AclBynQpfMhyfovWpsHzI/EuxcYBo0WPpWuyvi72ZrE8RqWIgsNsdMlP53N69jmOHHTd3t3g6GCPONOQHNCkiYIetcnZMU8PhR6OY4/zXky+KM5MpVRmrXuN3tmUyBfdFk4uR5xk5GS1qayVMQyLX3z6I+7d/LUrmS6VRviZQRbJaqHnshj36Mr2udhb4S9WJGlCLP2GjTc7nB6/JFPz5PJy/s9dksUB/fCh2Jd+BcdWiZOAZ89FlbHT3ULTL5hcaDRyooJ0wJLMaaBoQi+Oxy6jaUC5W2A5lKAaqkat6/Ds6T6aBPHI5Q0+f/w+9baEcK/lsMw8qmZwJnlRO2tlXHdBq92lVhNyPboQ5+kfu34lgitF0ZjORevZcL6HqTXIUouVJwSnVL1GnI559PhTrkswha/+2ns8fniEYQtncqYOSYIa1VpVDE8C7dYG/f5LVu4Suyo2s1PuMrOHuNKgaqrCef8FKgXWpBEIXAiVEYqasJjLPv4I8rkSW28Lw/S333ufO3dv0Wqss1iI8uL+3ik7O9fRtSKNqgTVOHjEzvUuT58JoYgyBV0tEkUWy5VwSGbTIbZto6o+w7Esxbo6lXKOo5NL5JOMyWSCv4QwFIq0Uiry7PEeGRaVsuyhnc3QDYXJVAQDzdYaq0XG+maJoQzwwmhKo3WbkxPxd6XcIgguaNWv40sQCruYcnJywhtvvMfeyw/Eu5x8TK16zPZ1Ua4djjJmiyWzaQqyPQxlTj5X4v0PfowpZ33u3t8l9HTSUBz8t25+m5dHnxLZGgU5C3N8HFOplHj4/CHbO8Kg/ODv/5pKvXY13D3oe5xnfaJoQqMl5KW9XuPhw4c4TpVgKX6vnqthGgF7T8T7JbFDrXiPxw8PaEq0oGqpTpbNubZxndATzzAZP0alRRDLFq56l+ODOcVinmpd7OfhUR/LjtBshed7QuEXzQZOISVyZQtSYYq7WKBQwQ3FM5y+WJGmSxy7QhRJrqRui9F0Racl+tWn0wNcd8Bad5ecIVoxZtkEpxjy4sUBN++Kls1GpYueVQkSYZxbjRXr67d48vQhfiDOzNb6e+ScNitX5eSFcEQLVZdmq4Hrir2azs/5ynu/zrCfcHokjFCYTkDTxLufSUAZjjg89vAz4cT4vk/gBiTZgoIj1rNUspnODkGJiSQp63bna3zy8QMURTgD5VKV1ayIW5jTaAiF26zd5fPP/wo7FW0Yqh7RbudQLsIrZMDhwGM4fUrOqXD7tmjFiEOVXC2P6QiFPx73qNSKFM0ik1Pxvvam0AOj8Rl2TrQ0bG9vk3NM8olYg3q9huf7KJp1Rdxq6Xmm4xn7z1zu3BKy1++9YLqcEuvCqfB8n0bZ4XRwSlaWAdj9d3jw5EOqNpRkAH0xicnX1inWJf+Qt09/MeVffOV3cCXaZKN4hJumnLtzTEvs30VkkyU6W5LMchHAs0dLRv8QEcSiPVNxbAqlFFVT8JZCphZuhL9akZf+ponNaujjnR2z8ISBSZIEO2+ArpGTyFWqkVAsVAkS6QysbxGsXOI0pd6UMzuJyv/wP/4t0/E669viB9J4TKlY4NrXhQzXG1ucnbxA05MrAJQsiFm6C2olhfV14Vj87PtnZEmEoUqHVrOolrvUaxYLVzhOCjb5fJ4oEPNKAJmms7ZbYoIM2JUIcFA0MdsFUK06lBo2nmujydbE1AjI4l/yFuUaPqssAr1DEIl9140VlmWwkKAezUqJMDLxohlJKgERlIxAE+TLS0+cD9NooaTa1YxQGAekSkqSJBSqIugdjEfEaQm7qJJKfpU41SAzSDMhB8f9IaNhjKZFZPJ/mlEgc6FcLOH6p1IWR5iaDjKYzFKDMFVRlIyLqWhL+uTpMzRNQ1chtsTa+WGCnalsyharxWrAxXBOsdtlJZHzLLOAoenoOsw8cW5Hswis6CoIVc2EOEmIMg8jJ+R19/YOT783olJtEWYSwCbJYSsp3kSi1E1vUixu86LvYoWX3DsqaQJRFGFaEnzEz5gufbSy+FtRM8JUx1ByxK6c33I1hqcrbKuEKr+nxilqqhOshF688VaHnJ3HXaVoEiFVtRTINDJSUESy0R1/wFatInoGgSiNMEyTYD5ja0skx/7wD/+Q0WiMF/iI6WNI05SD4yPKUreML6b86Cf/gW/+To3/509FUqTcbhAnBioZhgxSIn+JqpvosnGoUs6RzNeYSJAWgK21L/F8/30sU+ibcqkO6oqnzz+mURE+yLd/+1tY5pzT3h6mI7nX3DW2t+6yfyhQ4za7O7w8+ALUCM0U73d2dkans4ZlFhhPJLdXEU57PVaxOFfLpcfWxk10JcfeM7F/3eZN6tUSn37ymFxJeOx22ac/eUxZErJ++uCHBKuQ9nqZlWx/z+frZGqAQpG8bIEdjB4BJgmX4xpLNKuCY6QYcv73q3ffZZR+yulA5fpN8c7RFBolG08C+IwnSyrlNrm8weMnIiBazjws54jj5TVu70j+zcMjbq/f5ku/9nsA/NXffo/ubpGnxyNubEmQIrOAP9epNms8eCCCHbKQze0Gk7kIsg1LZzyJqNVNBiPxP1XZYWOzwMuXz8jnhN73gwXxas7mtkjqz+I9VoHNxfQEzxX7vn2zi5uOsHMe5lJ8r1y0mfZidrcvZ65nmGqBJJtydCTer902yJlNJpMBh8dydKZgE8c2oeS5PD87Q1EMlMxg7/Gx/Myc3JrDYhRiSnCait0hs4aMZJtnrdVishgz8yZXoBq2Y0LmMJsK2UjSkGq5S6u5jh/L9uXpgowC+Zx6BfByfXeDs/4Z44mUg1JAkQ5+uqBeFvJzMfBQDJt8LsGUSD+rcI4amDg5cWZXiyXjcYhuT9nsCjmolgu483N2b93GiYVcq6WHDCZzHGnD4jBj67UOv/jwY4KZ2Kv3vvp17JxH6I25e1MABCVJTCW3xC6KhHYxn+Pw4ITDw0MUmWGOQ5tKKY9uKJz1hOxdopT+Y9evRHBlGAZhLAKUfL7FoDfh/u1vsvdULMhs8RxvNcXJmTx4IIKNb3y9S6uT57wnHKlOt8j5SR/TX5KviEX6+QdP2Vi/wWg8o1CSA/Qjl2azxjUJCfy3f/ddisUi48mcfF4IZrvd5WJ4RrO2ztn5AQDu0iNNVe7cFtnwP/qjP+Lhk59QyKnIBCI3d2+R4hJEE1SZkp3NJxyeDOhKSNVrW7d5+PAxhRKsr+0AsJwvRR96kuBK47G1fZM0tkkQBq5UUZhMAvxgRndNBFKjyRHj+YxmY5cE2RsaLVDIYUjy2vnMxdQKHJ797KrnWo9MDg4O+NJ7IlJ/+PgxsdsV/cmaWM/xdEqmpDx+tMcbb4mM6fu/+Af8IOFIZmw0fYVhpVTbKrohYS29U1aehW3nkMi9zMYLymWDSA4VDi56uHOdG3du8OKpMCiKOaBcv8Nu8A653KVz02Tv9Od84yv/VDznFxonZ89ZxSGpBK+YzCMUq8zG1n2ePBUM4mvaBoZpYoQiMzFYfERkZ9y/dwtNon75ns/mVpv5os9p7xKWdMrOdpfFXJK7mgGFos7GtTXSSATe5eJNDo+esfQmFEricI1nD7i79QYHL0SA4hRTqrUmxwd9VF/sZ6lQotdz0XSP9W2xVit/RKlm486EMsriPP3eS27dustkLOQuX8kzGL1gMBjRaIn3GY36bHTLKLJaGOPQ6z+n3alg6kI20qhI3imQs3SWnlCuaZpB6jBbiP1b32jwwc8f8c7b7xGE4vzFCgR+QhrlaTckUeRkiWlWroKk4eSUvBGSJTYbNaHsDk4P2NhqYhv3efJEZMUW7gDD9ikXRVCoKgblesRw8gX7e8JRfO32Nt3WPU6PRDIgXoXo5jWaHYvDMwHucm/3O3z+7ENqrIglQo9lVFh6AeOxnEMrltG1EC1L2d0Scv5Yog1Viy2WkgwwV3RIlzaOLn7vtbtVfvazl+TLZVKJEpcqC+ycwkfv7/P73/ktAB59NKBixGQSze9+d5vvffcvaL95k4fSwH1Z3+DN7jWOzo5QZKVs1HvOzde+wsmZeM54GbPZ2eDHX/yEeCj27/qtNmqkUdNUzqdiXeaZRad6Az8QwWv/NEW3FezWEFsGgb67ZHgaoIQr9o+EvKAr5Ep1PYD2kAAAIABJREFUFrbI5uVzZYqNBpqmsCbRmHTVIFgtCP2ExVgYvtHgnAvOWM6FY7NaKZiGiqoUMaRD5JRyFIoWnRvb3JJV4mvrbVqddfJy6H1+6lPNl/jP/+BdZnIo3J8FHA5PUFKX3okIUjTVRtNjZnKg3tQzZpMxT/b+kkZR6EoNjWC1RCOPLg1vuWwwnbu8eObJve2iai5qUsLSJViNcUaCT+BDKoPo1BfknaEr3netaVN0NKbDESVHzC1kmU+UpCiyyqFaRzzbnxNFt7Al+7CqZGAmrGYBO2tCpp4+/pz5tIotwR00NUM1DVQs6h1xrgrVCFSDNIZL9CHDUgnDJdd3BeJlmD7Dsmos/AhVjknESYiaGRh6epVsuBhNUB2dlZyFMzINp1hglky4d19UQz/fe4xpV0UgkYp10UwNVfVBEWtg2Ru4/hlGmKDLKlwYpER+QKXWIpKw2atkgpKopNLRUBWFLNVZrk4ZSUhnz9XRDJMwWqHJtdIVmyRacfdNcf4P9o94flBEd+pYhgwMY5c4CSGLaXeFg/nk8YgsVTDknqdxhIYAh1qTiTe7aDBZJRiWSSirkZBiWDapXKdOPc/zZ/vMZhGKlGGVDC8LSVMHTc7aGrbB8/0Jq5WcuWzYqJZBNJnzX/13/41Yg3BFEPhomn5VGQNwXZeOnNX+4//1j+luZcwnEEnbk6KQJBGJHhJLQnDLUEmShHZHfG85nxJ5FiS/nLnq9Q8xTBUFIefHJ4c0a9fY2XiHTFZj+70xUdhjOo7otMR5bLcqnJ4/5c4dIVPHB8cYhkHZXMeQ5NfXNjdwFypeOKFeE2dt6V1QzpXJJMF8qrnsHwZ06k0iSTHS7/vMV0usQsa5RJfN29cZe2OKEn57ONJQdYPhxZjAEzqovbEiTSGKanS7wj49ePYBRFt02iJ5ZOdSjg+fsvvVXc5eCB0xyT4h9QP8yYyeXAfb7JJEp6x8iUip6KytXWfunV+RK3/9N97i+z/4c9acDTSJMlrtNHGLPkee+Mx3/sV/y88//jOqmkoQCB2RhBFhqHN04nPnNdEhtfAmzKcDZjOZjKtu8ea7ZY5fHmHZws8M0gGnfR8nVwNNnDU7yFMuNyATtmEy9qhWRZdT4oj1/OSTHjfad5iFCrtbwtZ60UMmiyHLiQyqKyqLxYRapYhjCJ/VsjLc1ZB8KcG2xf+qdZ2jwx4bm8L2TUYeS3eBHwa0OpfonVAqa+wNPmU0k4TdO1AutlhK+PJhf0yhEFJyEty5eOeJv8TUWjRawhad9J5jWWOm4wFzSRTdaq3x+OEjNtbWuH5TfG46H+AnK1aJ+MzpiyHNxgb5UpHQk4A9Rp84jlkOQ9SmnP9VdfL5Ip4v7Ed7vUwp12E2njCYie4yy16nWd9hOh0zWQl9qhkOm6011taFjvjw+19g+yp1Q+fd3xdATegLlr7F177yFqrsyAqDFN/TmS2EHumdnFEsWVzf2cCQemrv2XPW19c56Z2gmUKXXOrjf+z6lQiuFBTaLdma5Wl0OgqKMWT7unBy58shzdp7oATEZXE4Xu4fo2kazYYQriTUyJQe54NDIuQgXLvGaHbIjVt3OD8RUbdqTBmNFnQ6YiHrzQr93ph7d9/m8EiUrPv9c2aLHt5SRZeQ2N1uh8OXfZ48ERkpfytDQWc8OcMPxCIv3JTlLKZ3PiKViCmtRp2LYY9yVWZa1XWClUcuZzEa78nPtAn9kNHslHxBQkRHS85OX2DLrOPsOAJ1RZLBw6diaLFS3qa7+RrzJfSOvgDAtPPkzAZnZ8KBTpOQ6zu7oFd5eSTQyXJmnnyxwKMHBwDYBYUouWA2tMnlhIFJsmPidMHm2ts8evIRACFP6Hb+CdOphKBUAuazJa1uDh1hKGrFXVbhOS+eXVDMCaN6bVflnXvf4efvC1jpOPOYz3SSqMIqksiDGxWyyKGUL7JYCOO1Mk7pVK/xyfsii6QYERvbHfb3h2SpyL5ezB8KhKbpOSvJwt3vD1G1AFdm87sbDisXuusrujXhfPz0pz8mzUxI2+xcE/cq1xPm0yk714TC/+STX6CZCX6Q8HJPDHxu37AI0zMatXX8UGTmozThsy8eYOWETA0nNZIoR75UR8lExUJRZ6xttHl+8DlGQVQeppMYJV1hapITrVvBMN9AQSVXlG1kqspy6bK11UBVxHGdzmccneyTKaL1o9u6w3RyQRym5G0JE5yeM5wsMMyUkoSfPz2ZkPCERl0G2VqejY0M3YzIS2i9WLFxYwXb0bnqtkkV1je3GZ0LZXfaf4KvFFnf2GYoK87+akUYlFBZstG9B8By9Yw37r1Lb/BSfibk9PSMSjWPIyFc40gBKyHTxLlu1teJUw8vijg9E4apUnlBsawzGOzRkgOtJWeL8fQxpaJUdH7AydGQ19+8w5Hkj8tXJNLeasFsKu6VaUUO9yeUukK5/+G//G2+/93/i0bdxpXtDJkaY5gKjx/1ODkWRqdayVPJb5LEYlG++OgBG50O29vXefHy7wE4f/6Y22t3ycVt+iuROf72e99iNNOYDIVj89W3rhEuPNzkhPYtkX04DyIWS407m3dpq6KC51RuEKUGgWxRbbVdvNBDGRR4IXlLXH8lqlB2nZ3bwjhHSchs5jObzaSMDfDmExQS0GSVyDDI5XKYRpG87BlpdDcwHIU4FPomiZek6RlZqKPEQn4Wrk8QzLlYjPGOxOf6Dx1UrUKtIe7dqK9jOCaKqWLIqqLVNHhv90s4+i+r17/9O29h5lIiCUJhmjncpY/CmMGFyJR31vOskhmRC55sf1Ejj7wz5fBQ6ARvuUXJUAAfRQJF6GbEkycP0XmNgoTA17QQNTZRVQnOkeaBnKhmScctTTJ0yyZILs/eCt0okcQqmimcpiRNqRplJsmYjkSq7Y0WROoG1iVka5Ri6wZeFjCcCj2cKSaB3yWfU4gkEmAYRpiKxngu7E5ZV1ksY1TVIOYSkEQliWNUbczBmQADGc9jVKWJLpHzlMRlPpmzu34LfyYc2mBpoKgmURZhywy05/o02iW2bgm7+v2/+wI0m8V8Ra0inl3RgCzDcSwiCf7hriKKjkEsg5jYS9D1HIWyhiurW8+fh5h2HZKENBHyotsak+WMSBfn8MvvvMePnvyMSu4ani8BUJSENIkoFywSiU5IViBOEyLZsWDpFlGSYmoGSSo+8/R8gVVbw3sZkpcVp0SJCZMQQyY2HSvm6HiIbpVQJGlWGAXkTZNV7GIoElltajMMqr8MOjWL8/19/vnvf4e33xZjAQeH+5QKBZIkZTyWwB6qiuM4PHj0MwD+/Z/8Kf/Tv/tnHL64uHLKFCwMK0ZLTTLdv3qfyTTg2q6wA8Wiz3Aw4fr1G8DfAVCt1jk5WeHJtrZrOzsomcV0ccZ4Ktbc0AusvJC1LYtUwkM/f/GSZu1d3rr/TwD4/OP/nbW1DdTMoDcSsphEJtsbb3J28QhXVrO77Q4L5QWqrNCuliqddpHR7Jg33hE66aK/xF0ZKFbIeCySUxtOjU63wWAknF7bqVMoFIijJWFB6K611i6LecjF6DEkwjZUi7cIgoD5QiRz40yh21jj9MhlKR1vZWTg6JugP2V/T9iZ127XeH7wgrwtZGpzp8NoespyuWRzSySLB/0ZlcJ9CjmH87HQ31sbt5gMnvLgVLSafWD9iCAaoSVFKjVxZtxFgm0XuLP7Jv2BeB/dSOifHpLPCV191ntCLvcGjlNnMBbncTkcUi/fJVcIrigpSlmR3sWQYCZsX5pVGSwXbG6/iSHBuJ5d/JQQH9UKORwKXzDyNBSriSVpCfzsiPFqTqv2OrOF0PuqYTGfeZRLm1cO/uHZEaVqW1AhAIqpUmnUUYwxy8umIsXn8OiERPllomRja5fnT89Yyi6xbvs69XqdZ8/eJw5EckXPFFKCq6Ce2EBJdPJ5i7MTyX2ZOrQ6Bc4Ge/QvxNo5BQVVK6JLIJdqIUerWqS7uc2RRAscDwPanS2u30hxZZC7nFloZkwYib/39lx+61tv8MGHPyCXF7792ladLz56wtSL2dgSwVTiqdg2HO6L8/HVt25TVz9gPV7grIT/dmyuUS00CPw+L/dE0vm113cZrRSK0lZstBqMp2eEQcCoJ85VzixDbJHP51lJXq1L8Ll/7HoFaPHqenW9ul5dr65X16vr1fXqenW9ul5d/wmuX4nKlaqq+HKQu1hqM59FhP6cuWRA3rm+yfHLKednY2otkc3SFJdWY5fR5ACAZrONqusES4+x5GpC1eh2O1xMD1nfFhnvvf0jUD0+lhwzWlagUinjB3OyTGQnV/6Su3fv8/HHn1At7QCwWIzQjIC9F6JC5PkXKJmNaZpX5IqHp0OKuTrFSsZiJjIBt2++g2FkPHgqqjbHR2fs7Fxn5S8oSo6Xs94zViuXQqHF3ktRGbt5d4N8xWJ0IaLrIFqy1t0kl6sRpZewq30SS+Hx3h7dmshglPN5vPmM7obIOrx4/ozBRZ4sqV8xya9vNFh5wRWH1uDiDNNwCJMhrdJlO9F7zGYjBsMXbKxL6GcUXjx/RqUmsjjewibKegyGEVtt8ZnPPnnK/dfvcOtWlUy2ITj6Nh9/9BBdllPHF3NanRq93hndjiQozuqcHPepNhMMR1brRg7lQok7b0kuhcMh4/GKza11tEsCSPVdWlWdF/uf05T8WN5qTKHUYP2ayKBOZnMMPB58MiO4IVJCdmnJ0UlIp9NhEYhWrEL0DuOLc1DEHtg5hVV8gF0q0OqI3zvcH1POd2k1S0zlDEKEy3SoXA3ixpGGH4yx8iqKLFmvbW4wGIR027fYXJMkxS+/4NaNm0zHQlYqFROdggB+kNkez/Px3JjumgaSl0VdZJiWTr0uWvImwzlKlqN/PiBwxf6VWjCZ7WNFHZ49OQCg3oJUiYgDUVEcLQ8plXO8OOyjI9pDksSi3dpEwWC+vMyQ5hidqVzffgOAuXdOLv86h+dPqcvBbV3XOTs5xilmbHRES8VsUeDl0R7Dy8HP1AFjTMG+Q1XOU5yf9RgYx1eEwakeUsibjCcJ9+6JKuPG+janxw8oFysUHJGlGvaH5EydOzvimU4Pj9l4rc7+2R66JjLz9YqopmZqhF0Q65Irt1nf1fBW4t3W1y22N5vMl8MraNU0TTFsndl0SP9QZKBv3yvh+QFIgmtnW6FZaEC05I1rYu1CRWVcCDDrBXZyIkM7Skr89U/+gXe/KaGD0zUgpuB0WOlCXhy9SiFnc+zF2Jp4Bn+xJGCFIuGalcBEzwLylspaV6z5cKgwGi847z0hS8X3FM3GzEVUqrLtKsvRqLdB9VEk/0+maCwWC6JoxGIlzpamZiSpR1n2wq+1O7TaX0HNTC67oOySTaqraJmJmYnnGi8v8FwfdyFk+NHex7iLJXFksFhI7h3LotHaob1u0KqKdqVmO0+j3qTREBloy1bZ3CwQhQlb20J+kswkUYe4Cwh9YQu8xYL94wHXrot++eXCYLEcE/gxvpTXzrrHdNnj7LRJkhPPaZs+llEgk207jU04PZ+RK7VQJbS2pUAQp1im+C3HyJOGdTRNQZUZ4UxNWQUxuhmx3hRr9fTRDNu2MSRUsq4UiLMlnndGQQ5bu7MyYJOlK+JYPINi2KiJTpKJ5w6DBigapqFdzfpWamW85YS3vnTzilzdCy7wlRlaKDK7edvBXSTU7yR4qbBrpEVUS8HUFQypN2LVIVU87JL4ezJe4LkKlWKelZzVwF4Q+RGvv77LyyPRIZFmJlmWEsn5CtuyCXyXvD2jXt+Vexywmq3I5x1U+XtJFFMwTTxfyIFVuoOmVyRx/CVIioHrubRLJsWS0NeHR6eYdgcVCbaSxmi6A4lBFIt37o0j+rMSdj6PLu2hoRooinpFPtyo5wl9Fdc7wTFXcq3yxPGceJWjVRHfW+8W+egXI2qSl20xPub29fv8qz/4l5ydi7ZHw9BZei4qKrrsIFh5LpVKmf/+3/4vAFSrVbauVfjuX/6cLJHnUU1IYo00zoMq7WG+xnjWvxpV8OYZpUIVLf2lO5ZlLsVcnetbQo9MZyPSZE65lEPXLmkJSky0Cb6nsbMldHqx0ieJXQ6PRVXlW7/5uxwe7PPZo0/ZvC7Wd7WwSLSAQjmHH4sK1MTV0IwqE8nx2KpvMxkvCUKFs3NRjVGyjDRRMKLrZLJCc/DyCeVSE9sROrfWyBFEE4KZg+2ItrnRRUirfp2lNyBOxf7Nph6bm5ucn4vKx9bGDr2hB1GApYtqVuh2WGgzPDNhe1cAdC0HQzbW3iKT1WZ3GeB5Swr5OlNZ5V/OE7a3d8g5KkdHoooyHByyCqYUa1J/F3Ms5xaNyjZkQj6jMOb0/BmqFjOZiIqT7dSpVSs8eiSqyxvdNY6PDlgsp1y7Juxvo3nG4cE+lnObTBG6o9woUVK3Gco27zgMUIwJH3/0Q27vfBmAdtliMR+LsRBJRXLjWpeTwZi33xSdCB/9fEC9vMMqOGYyFi2bitZgbbPFh5/8A5YlebXWbzBfzq+wBNIsoGjfIIoVdER3V7mc8eD4Ia/f/XU21oS+9icFGpUytZLwDWfzJWGQUSy12b592Rp5TBKrLHyxL0kas1x66HqR7Q3hyxiGRaoKXRtEwme9sf0Oq3kJ3RAydXEKhQRePnhCuyU7j4Z7dDfLnJ6eXnXwFJyIouUQS2qUVsvk+f5H5Kw25Yp4zhfP+3h+TLFiMp+K/Xv64iWNxhbdrrjP1utN0sXbNCsWJTlPNXjep9GssPdwTKMsQEqatQbPPvuMW28LW2TqGdNRGdtMqe2K7623N7m4uOBi4tHtCp2Xk3gB/9j1KxFcZVlGPSeGChfBZ+iaydnZnFJRbNaTxwMcu8it29vMZ7Ivsn+A4wywbLGQimpyc/cN9l8aHJ6Lg6BS4t7tTY6ODjg6FnMgy3lGs13htZs7AHzx8AMmUx3LHLF0hZH7ta+9y4cfPCRNVELJmTFf9gmjJV4gWsFWvsl4NqDZWLsitG1t2hScmOk4wrDEId47/CHnZxds70jy4UhhlQ4hK2DLFkDDWjBfxmxudXDl/YdnIZqRkZdO0uy0x1lvSC1XIosue5kXHJz8hOEg4je++m2xfu6UMAiIE6H8bKtM3u5ycnaE5DBkPlOolBxUOT/SP7jg1s17FPMGi7loQ9CNGbq9ZD5NmUu0vnxe4/h4Sq0qnNaUJQs3xsrOWa6EEutu2pwcH3Lj5p2rgfbJ9IzrG3fZPxaKtNPa4JMPX3DjTo2CLZwtMz8hX0nJsjylsni/3slnbO0+xZFINqtphxs3Nuj1HzPpCdF97dY9VsGQfK6BJbl9ktTD1EvsH0o+EGNFq7BJqZzj40ffBeDajWt445Te8BA/FEq5UUoIVnC0L5SIbuqkjFjMQiYjobQ2Nzc5PvmcMKpjyeHV6XRMZ+0GeVPs8dn5Q/yoz/rWTRxNBPWnJyeoaGy3bxEvhGG4deM2ZBpVydi+nIUsVofkC2Vmkvwwlyuws/02Z2fPaEhnrlSsMBjtk8+Jw63pGboW8drN1ziS8z+DixGlksVk1KMoA5lmY5fp4gg9L9vmChYrL2GxGrK1Lp4hjioMejOs/PIKoUkzE9IwpTcSMuV7Cml6jKJ6GLpobUkYMr5wud3cZDwW91c0l5PDkGrjkotuQKXSJggXWI5Yz5KSI5erM1+I8nulXuP504d029cpS1LmT9//lI21IoZtcbgv2sG66xtc9D16p+K3HLNAnM4oOWvEMmhZSD6fvFrDkoA2nzz4gFYFWkXR6rL0h2zulPjFRxe02kLuPM8jzjIcx+Hf/W9/CcDOtoaOQq4mjEmn3sFQMnZeV1mMhZ4yjZhpNWQwfsJZT7YhpwaHz1dM5EDtbPyQ2KxgOCqaJFvV1BzuZAJKTCxRU7v5HGej8yuHTNXzZHqKouoYOQlQEECmK+Sq9SvkuDhKCFY+/lxyokQBumqgKgGWBI9w8iaNRotisXiFfGbrDooBnuQkcl2FZ4cDNHV1RTCrGQaaYwEZORn0FQoNNjbb5OxLJ/tt4jDCXyUksk1vMfeYz5fM3SFHJwdiHz7tkYTRVftUpipoqkGlUqVUETq90WhRb5epNkuUSsLIOaUGadbhN7aEg5LFiZi10FT8ZSLvNWM0fYH9bokoE3K1nHgsZysmcqbM1F2O91wmFxmOBA3ItBXFShNX9t6XSzHThYehtbENCVaRpqSqQr5kM59IPaHYkGYEsl1EK+bQDQPSjFpNnNn+qU2MD1mBJL1s+ROB1Yacod0/9nH9FEvVcGQgFUUJqhJD5jM8F3uVpjaaBURifVNVQzMtojShVJZOqHeOaljEpFfIg4qiUcrZ7D8Xs4zlfI0kGaJkEZHsljR0HYWEZfDiioD1krhTl7IZBimuu+C9L+8yuhBBy2plkWUpcZJhIp4riDJsXWN7TZyr9z94gJI0UGxIJaiPqjnE8QTbKV21OE2nAWgaiuTLisOAOF1hF1LSRNiQ2dwjSVskiUIi52iVTCUlodkUZ7TbqfHXf/0PGE6VWAaiaWqAUSBOp1eOvhWm2MxYSS41W2/wX/ybP2C+mBLJc6XqKrlcniSK8V2hB2/cuMH//G//mONjIQetThM7bzG8CLFkX5eiBKiqjqJk6Pple+YK1Aw/EWdNU9aolmzOj38JaKEkNfKOzkomPxbLExyzyc7GXX78IzFL2mxVeeON+wwvzjk+EXqxUmmB4vOZ5C3qtMvcu3eP0+Ee86l4X2/pEwZHfPUr3+TwSNzr/LxPzm7y3jtivvnR80/wVgsKZYUwFs8QegpJds5yqdNtCYfdDy/QzenVrHiucJPxbIqhGqiqWIMwWfLw+Q8wTJVQYgA4+QJpplGtCh2v0CJnjOktXuIYO2LfowE5J6GVewtdFwLa805xHAskT5Nl2JRzVbrt63iBCBQ/+PBDFHWD0WxOyRZnK29BotpYsg16eLLknTe/yuH5c05la1ulniOOJsCcRJI3J35KrMCOBJi4OJ2wuVFj6/4N5EdAy2EZFjmtjVUWezoaZWxtt3FXEpzHatBYu0fBUvjGu6LV9OX++4xnY2anEV1Jrh5FMeVagfOe8KdKThG74XByMCCRoybD6SFhWqLdrRD6Qiecn51QLKvsHws5qFabGPqSaxtfZjwXftD5kcbv/tPv8OjRE7odscZnJ4/wZzp3XxMze5VcwnK5JJer482FPF7MjhiNYt54SwT6UTKkUjMZXXgU5GD9ydE+7iKl2dZJUrFXjx5/Ss7YvkpMKWlKEMF4fMxsKuQljHwef/6EStumL9vBo7mNvlEWEMBAq1nEna8o5jLqVeFzzSZDHNNCURROerL9tH2LMB7QPxTn8YezJaZZoFRrEAzEu5RzZc4HPRQ9oyDiUvoXB1y7vkGWief80Y9+SsFp0e7WSCUf33H/AMuyaHebJBLpcLm4nPX8j1+/EsFVkgRol2zX2RZGPiSfn6CkIhsSRwZba2+yWLhokpz3vffe4+XBc+zLTPbFhOlsyHgy5tbNu/LOKz748Ae4M5+8nM0oFFS++PSAVkcYVNsqQ6ZxPniKKisPT55+xsVkj1a7QijJMVXFwndD1rqi53M5MyiVCgwGPV5/XQA+HJ085fnLT2hUdtAi8Vy7N3ZYTGwy+XezmeOk9wRvdY4jYg90C8pVjZOzpzi2JGCcW3hLHwvx3IZRZ9Cf0PcfsLkj/udODPxZgffeW+PopTh4sTLCMmoMh8LRmI4TNjs2dt6/YptfrnoMBkNsQ0LkXnsPb9kjy3QcSXefKnNyZYM0ja6yYN12i521txjK/monV6DT2mY4HjGbCE1TK1fw3TmDwRkKwgGrtYt8+PGPsGTvf7XiYBgrlDRkFYgsx8L3UFWVw+OXtCQ539p6ASUOef6+yCjcvflNfvTpn7G1sQ5lX+5wn6PeU27tfpWTU3E4W40ElRW3rwsHOonh8ZOf4zhNtjZFNaSQK/JycYipt7m+IwfKVxGmUcCWg/+j+XOcwiaWbbK5KYKIfD7PrVuv8ZOf/JS1TWHEbbvO6eBDtFTs8Ws3XsNb6aRBkdOlONSuO6HZTAmX5tX8jZYL0WyTWCrNxXwFZkC+oLPwRECp6Brj6ZRS1bwahG81KihYDHoShtVIaLeqoKTouUt40Aaed04YRliWJGo2cyhJC9cVGbDmustG9+v0l+/j+eI509gkinMQBiw9YSiKZoRpCHZ1gCgOMJIxjh5zfCJko1SuUE4M3HkCkrx54Q4ol69TFkeN3kUfd27h6wOKRfHP8WhKnMZYljDWB4cnbG9v0z90cXSh7EqOwWZzh1W4JInEGtRqBpZ2m/klAmYYoWvwta+8xyeHIrlyciCM7XJ8zIVEkvu1d36Dwfkhc0/I62z2kBv3KvziI4vwMtjRMjJCNM3Ec4WD+bu//01GpyX+zz8RwXmz08PUi6x+ckBODqfl7QybMWZJwSwKGcpSlXy7yYuhhOhlSTwPUZYxSXKJjqbghgtMLSaVFYTnmk1Js64cwGA1wXYMPDe6gro2tCJh4qGbJooMwkqFNo5ZuoKZdfI2KRaqYkEg/hdpI9xkSv+wh5IKA2HrKnm7jKIK/WPaGWFsYjkZiay0xJOYWFuhKClya0i1HvumSrUkZCyfdzAMg0qpjCIR2syCzla1i6ptYTvif7r2FbJ4eWVA/SBg5k6YTYaMh8KZ++LggNBVCFMfP7ysmlgUyy2qXXEeNzebrK9vUqtVaK0JZ85xmtxQvk6tFpKqwrFPVgHeYslKnqHl9AvG/RU37uWYyrmIxXiKF+l4IyE39fIaz59OGF30yEnwGtU0UTOTIIpJ5b1i1cDI11FUWVHIwFLz5As5Vr4I7gbD/5+994ixbUvv+347p5NT5XjzC/e9fpnNZje7RVKyTJOyYAO2bBiC4ZGnHhgGbEAeCLIHsgUYhgHDnsgTwXKSSJGUxG4+dVB3v/zuu7lu1a1cdepCULm9AAAgAElEQVTkuPPeHqxVp2FAnGlAA3fN6uCcXWuv9a1vfen//wJMq06chOimJBuadSjVXHTZTqTb62Hba+haii+zjIaiY9opveERrbrI1sVxim2XiTNJ+JCn+NEYRfc4OBBnNs4MktRA0TU0Gd2ORkMqFQXHluyEp11su0YU+ws2xtnUZnm5QbURcCXbcoBNhr+oRDANjZk6o97YoNUS53g83idODEppGV3e5d14wOpynZUNGTx62SXxA1yvQSDxohkafhizc3MHRRefKbonHFCJx7N1hURVGPam/FvfF5lq2455+n/6mGWPSOKSLM3BT3ySRMi0quXUGi3m8xGGdd1EWEMhIkpCSiu/YuLLsyYjSZH/H/+N7+E6OrPZDMuR2ClFIYwigtmcrQ2RJfqDP/zH/NM/+mNWNzakbLr0Li1Oj4eUa0InpDioagRqiqYLnW6ZJlqU8fq3hAz3L89Ikoi1+l2ux2B0gaMvkylC7kfjDnff+pDpaMIb94UD/fWDL5hHSzhmkzASOu/ZswFOJcCUrKbp2ZxiocKdm+t8+kthZK+sVPGDKbPxCZeH4p1rlU2ipMvec6HPLdtCN6A/6PL2m8Ih2V39PrVyi2iuMM7Es/aeR5ydnVCQGe+rzinV2hL+tAeK0Hn9SQff11mvr1Ktinkd7Z+TK6BIp+nhs4+plJsoub1oe7BU32A83+f45SXFilA4s2CEVXRRM2GrmU7OfBSy/+Qlm9vCQfgrv93ik88/Y6l6A7cqzn+hoJB3DPyeWPP337e4OD2k/eKUknTwLCPlxo1bhMGczoUw2IuehuYolKri/d74wRsYao3ReMD+yY8Awba8ubKDZ+REvnCSsnyP/YM22+siuzWOZnz99AFqkvPyRJzbcZ5wfnVOoaWQOcLBO21DbXmbZy9FG6K/8p2/zi+/eUi52qAlccqzmcnGxjZfPvghzboMpujndNsXC9KyO7vfQSHg8OBogSWcBx3OTlr48ZhZV9zRq9UWemPEQFLbu2UPr6JyeHhJuXndnmGFJO9x2t6X0lnl4PAM1y4yC0Rwc32nxaj3gmjaYm1JnNEXB8+JtVNcS1Z2JDnt/a+wtJz1DWFzvb/9NkeHjzAVA9l5gfXVImHqM5EMhurFnNkk4+7Nd+lMBet2GCZ8+9ff57Ovv+TOushALa0XeL6XL2wEq2CRJip+2FtUbR2fP8ewq9y+0yKRDcItp8zTry5wLKGn7t55kyAZkeQzVlfFeh4fdpnNQ2xHYSBJtIoFadj8OeMV5urVeDVejVfj1Xg1Xo1X49V4NV6NV+Nfw/gLkbnK8owryZy3tLqCkhYY93MmMxFBVPQMLB8lCkklPePRWY6iZfTGAgOVxQVsN6Xb0/BjkRq9d+8Ow/QMzfpVSj4MFX79O9/nySOBq1nfXAE1pFxs0b4SUfDz8x5pHrD/8hnLki5VS1oUygYzSfdZrlVw7SZ6fsmoLSL6u603SefQrCpcyFrtL746ZWN9Cx0R2X38zT71NRUnc7nqigxQwWoQxSGq4pNK6uVioU7NDnm4/2MAVpdvYdSWSJVLFJmCSsKM33j3dzAMjaFsGqhaCu12m0pZRGiW6st0+y/pnKncviUiCJ1Bm5WlXQxX+NZhPmWcdQGDgi4iPdPRmOEox7ULbK2JiJBjVphNEl67IXraHJ1cMpq9oFFcW/TVMs0Mt1AGJSUMRRT1xd4xcaizuiM8/d4g5Fvvv8PR0RHVuojQ+qOEarXO1nqRyUTssVNaZj5/i1QT0bsf/uQf8vqbN9GoU1wREZNc0XC8EmiwJqNEw2EP0xqShiK61emecePmR+i6ylVb/O7w5S+4eeMuBbtKIvtTdYffMJiPePP1D8RadgOiMCXN5uSJKG07PXtEo9FgbW2XIJCNDNdtqvEd/JmIrp2djqlUmkzD3qJUaDbRsE0XChPmmZjDRnWLZwfPUXRJt++tUanc5fJiTLmhyWd9ha3toMUaYSgiOeurGsNhyMaakBXfb+NPCujFNZJcsCE1aiWOj7ssLS8zlKW0ZiEgvUrwZbQpPTc4mH1CfTmj7K7KNTihUjIhK1DyRKYjDWPUwgxbUpU3tuu02xGGHVPQRKbz4PAxppVTLDXJMiGLSTZhnj5Dn4mslK67HF98SZblrLVEtGkwO2e1sMVsKuRHzz0Cf0KUv+DlS/Fsz3F5cvoNaZQSyvKeMKphF3q0NkXUcTwN8MMBnz37MzLZZLvlyoi5blAsyuaj4+dUlzaJ5XNcpUy1UeQf/G9PUCSWIQozFC1HUyxMR4TT/t7f+wlescDdeyIyqRomjlPFtAqL0rZESYiiCD+KGfckrWwyReso6PLMapqBpeZouivLW8CxE1btOuQ6ynWrgHCKkipkmZA7VQNFEf2A4lRE3FAUMsXDwFxkFaJoThrG+BKvOupBmuZoqo5nivPgFgu4ukupXsSQGS5dV0mzGNOU2a04QM8UlEwlln2LgjwgnquoquiDBKApNlGuMpCyOY4SPNuhO5qjXWOQdBVLBdM0cSWbpaYZmJq+KEsslcqsLu9w+9Zri3fJ85z5fE4QBIsS1el0ymAwYCxZKh9+sc9Xn/ycLFZQJBbGMh1arRblcpmqjFyvb2zQaNVpLIkzs7H9Ebt3TAxDI45/XSwnGb7vE0q65na7w0ff6XH/nSmBbB4/HPVJBjqDyVOsstDpg/0rhqfHaK5Yc8WKyaMWcTohNcXdoMT3qRklYkVBlXTXtagARo+KzFh4lkqaJySoqKakg1cidF1ndbPKZCrmlaYGhhqTSzDcOBhjKhpbazZaUaydH81wjAQSn0gVOja1DCw9pyIblE/8hFAN8fDIZCnPPIpoqnOOHk/xh/I8mHOsfJlEl6VSmcF8MOXO1jt0xqKsbDAfYbibhEmIWpAZvamGpcF4JO7j8dAhMhOirE8aShk2FNQ8IhgOmcvMqp9oWE5CKrOaiaKRxxEGEW4uztHJ6YDMcLE0FV3eWQo5SaxQkv2cshQur2JcxyBNZWNhZYqmuqTRjDstUZ61+9o2+8c/4nsf/C4AzeYGZxd9Sp6LJpuyZiRk0ZSS0eLxU6Fj/9u/+/fYvvU6I1n+Xi1Av/9LotgAmTlSsggtB9X2KBausVpjXEWn6gn9NHe6OIqOU/9V3xxD06nWK0TyrH974/cp2Ca//OVP2dkV92+zdoNavcLe3h4rKyKDUa+F9HtDMkkBXiqtcXjQodpUKclm1YPekM3NbQbdmN0bkjnWsJmFDifnoqTqxuoGKytLzCZvYxli3z/57FNK3jovjn9EILPZhZLCytIdiaMDPzwnOQ8pl6ucX4l1qlS2aFRrjIcxXdlEfHNzA9tTeLEn7opisUl3NGdzc4lMUne3R0PalzMsM6PXF3PY3NplNBqRSNsiTA0m02NMZYsY8axOW8Uzm/SnHUJZforikmohklya+TyiOxizducOLUmlf3J8Tq93zupGhfqSuA9fv/chxyf7GBJPdPD4EKc4YjLoUK3IhvZDHS2POJsdUJMMyTdufsjZ5TOGkuJcyR2cqIhbHbMvISppAu3RiIpSZ+SLczvz50RnD1E18fePfvYx82mOWynRRGTPsvmE0fCcomPRl3htt+GwU3oHyxB649NP/ymb69vsbNxgNJV92XojHj38mpXVdU6uOlKGPErWCl5V6O/R+JLOQcBr39rh+FzixQIVpWhwKtm0m2WTglshSANS2XD9Tn2TB+MrFMsikAyUrmpQLTcYSNmwSw127tznix//jNUVca5QFXJVod0Z0KhL1r9gQDTTcTyhq9Ooj+uoPHjyJ0SyAkwxTPaOT0iyHEOWn3euTrFtGwriruh22zQby5ydDPFK0j42HUb+Sx7tny4yxc8fnoIRkk1FxcRl95JqTcUPZpyeCl1SrZc5Ojyk1wvZlPwN497/D8oCNU3DcoXCf/jkmGatyuryGv6ZmLzuRBxffkrJ2UBWmpBEEEcu77zx1wD46Y8/Z2W5SL42o1rdBkDNLDxniUF0tKBQ1mhRrbR46y3pbPlFrvrfoGQx6jXYueUyHe+y3DSpVIVxc97u49krDNqiljkNzunFLm99620+/VI4aqExZal+j4P9X7K+IcoHg3CEbRfwp0IA7txZ5vLqis7gGTdWRA8dRfMx9JxgrpFLEO/mdpVO9xJb0srbygobu6u82M95943fBuDo5CmWrdBo1tg/FdSWtmkymyj0O+IgmJZKq7GDZo+JZPnZB+99j8dP9sgklW9uXhEHPo5pk8teG1GUsLxeIBh7GPKi2N25xTcPv8CR5AB37+3w+VdXDCf7VGStvz8xMMo6rlNGkc3cnHqHRnmNTz4VSuXWrXt0OgMazSp3bosSzpyEb75+SZyEhJLy18tKREmPSBFO74cfvkuhYDIcdhn0xQHK1TEaAYdHf4aq1H+1dhcK3a6gLt2+YXN8fEEYzbh3R8hGluxwdnrJvTsNBmOhqNudQ3RdZy7L4VqVO1z1zohDhYLEQPhTnY7ykvfe+x3+yR/8KQBx2qZWuE+mCSPC8FISVEbTE2xHyI9bGRIlVQx9BcuSZYDRAStLLS4vhYJSgoy1JY+yt86zg5+JuW+8zXLzIy4vprzsipK0p4+mfP/7H/LiuSzlw6JSMVGsGZYq9sE0Q+7c2qXXD4hlmcyXXzzg/W+9xXwi9j3OY0bhZ0T+TcaJLLvwQzRtjOclWJKa1FBbnF28IJAYj/K8Rph08AcKQ1mWV6sUMbQSV+1zLPeathpWlnd5sSf2wfVs3nzjHT755DOiVFLkbrSYDDNkbIC37i+TpT7zmUa1KC4v2zYhMznrfYlri/VULJ880ei1xUVcrS7TOe/QbG0wlg0mlVhcUMWqysWlBC0PIMs/Z3uzKvd8xNLSNts3qhxL2vVKpYbvm2RKRi4paw1DJ5iHTEbCMEbR0DRhVBUrQuEXCgW8QplGsYYpsaCaZqAoOWksDOMg8JnFClPfZyyxd0mSkKYT8jzF8zz5/wwK1QKeLfRUwXOxLQNdgUQCZHzfJwxDhrOAJJQljUYJJU6wnV85NqZlEEUBKWJfpklIOEkhS0hiIYs5MVpuLPA1mpnj2CUMw0KResL2inhFgziNFjS5wdwnn6dkyFJXRWeoGhiahuuK37lFF7wiGjaJcl1mpaLYFpb8Tm4pzJM544vxoo9QFEX4fiAxreIz0yqwvb2EZYvzaOoGuqGRJ/Gi7DlNY3x/xng25bIrAglP954ymUzIc1mumYp7p9VoLQJotbUKy8vLNCRhz3Jrhbt3XkPTFWGpI8hO/DQlCmL6XSEvS7U5k+GEuQR8z2cz+l2fVF3Fk8Q7vasOo+GMzFAwHPGs+TihVE7YvSHO2T/5v0YYaQHHsgkCefaUFK8Elhrz8kr8P9UGXbfJJBbOUl2yJGB3uUH/ugzZjzBcExIF0xb6JZxc8fZru1Qlljnxz1E1iywxSBemQIiDR80rEM8lmUPeRM3GqLnYg5gpZW+Zhw//FK8h6ZPDEpaV4+o6oexBpGsueZxwa0dgNb7+6gWa6qIpKpkqe5LpMXmicPvWNop6vNh3zVBJc2m85AppAtVakTXZw+50MELTDJIkwTTE3NMsZupPeestcWdGqc9oPMc0iqS6cDAVTQclR80cnIIQmI9/+lM2tm9yU+Kw2xdtvIpHriQkWSCFKkTRInJL4z/7L/5zAFY3S+hqTBoLXVKquLTbl0RxDoYk44gzlNxANTJyWc6bJQZLywX8UDhAttmkbN8lic+4Hp7XYjA6pywxJqZt8s2jh7jeGr2BkGlN9xj2MiqVCn4kG7WXa4S+Sibft9e/IpxbqFqBiifuhuXdGv3OgM8/+5zv/+a/A8BgfMjZSYApST7OO18x6mmsLt1lPBZ6Loxi7JVzssThu+/+hvgsHTCb+qCLc2VUHaYzn0KhgeOJANp42ubk4BFvvfkdLttiPU2nxDePPkOT+mbiRxjmjKuLgPlcfKdc0lldL3Fx0cYSapGrCx+v6JLoUveWtlDVDVQVLgfinol8BU3TODk6ZXNT6N3zvUuWlpbxPBF0Pjw+A91la+cmP/vZPwagUrOYTkMmgxqNuvDChsMuuq4SJeKeuXXvTbrjS27deI/LriR4iI8pWFVKNaiXxXk/Pj0jDBVsCfNoty/Y3F1HMVz290UZ8tJSk1vFOxh6gaJ0tHv9M/zwhCAS+3d8eESlcIdWwSWRJZTpUMGxaty/02TYEXNoxxMKXkjiC5n+ze/+Hu2rQx7sPeDeDQGFeP31ZV48f0T79IgbN0XAfDK9xFIreJ7QebW6SZLP8Up1bhXF3nz98338uc+m7Ms6H3YADx2LG6vCbn++/4JcK1HwTKZz2fDdKTIOx+SxuDMrTovp5YjtezeZBWL/LgdwOexTL29QrguH1p+dY3oq+yfCpnXmm2ztNAiTjIsTscfNlSpPngwhn1P0pI2lGLSvTtlYEuRRyzWHy84T+j0NUzpqxDnVWgVLK7C3L/RNo+7x4sWQjRvyvrIdyFMUXPxA6DdDMzF0l9bmBpZs61BYl6CtP2f8hXCuVNUkkkKRRDOiROeqMyCKhTK3vTLx3GPg++SSGahaavLowSU78gW3N2/imtC8UcWSdf29/hXraxU8N8eUEag0jTm72KffE8poMDrHq10SjV67xgfT6w2oVlo0l4pomRC6N18v8PWTf8n7H4mO21loYpsaz558yc1dcfKT+IKLwZBmc4mLMxmdbJistnZ5cCFqRdudl7SWNmnUf5P02qCdjWjUNtHyKYNIRPDnU5/hoE2hdF0XmhKnI6rVMu2eOJyHp99gmibzcJmKKwwEXTWpb7fwZdZvOg9JJw0+fPd19iUT4f7hM8oVj0waGpdXKtvb7zKdzrnqHornaA62vkpupkxCIdC/+PKIwPc5+Zn4zvJKlVs3t9g/yKnVxUHsd2Mm0wGVaoGnL74C4L33XiPBX4BSj0+/4Nsf/VUqlQoPvhTPtl2FTm+fWrNMqyD6TL08esD2dg1XEZdCwdpCzX0if069IQyE+RgcZZmV3U0GY/Gsvad7rK1u070Siu7lwRVBOqLiLdG5bs5bdghjn2dP91nbEspgc/01at57fPP0nwGwu1Pk/KxHkiR8+J7AOzSaPk/3D8j4IffvCzadk8tPGPknZJlQwGGQsrFeo1zcptu5zr4qeEWF3JpSloQg4+k5hYJOtSJZcZZjjs5esrGyjD8Sl4C1WuJk/4x6dZPXVv8yAEF1TPtywEDi3JqrDqfXYNBQOBHheYmy69MfjFlfF/O8sfUtvvj5x/zWb30bgJ99+SPcYhk/GOFK4G+WgWF5pIlCKJuUuo6B45XonwmnJc99LLtAplxw+7bYq3HHYDA8pNkyuQ4WjsczlNylVhUG0WX/U+yezr1b38ZyxbN73T2iQGX3plCsE3+Aruasrr5LqyUMi4uLM2bpJZXlIoEviRtUGPZ91jZlc1c7I1HndAZ9VlsCAzGSDQ6DKAVp1Duuz2hkcXwgzuc0PeeDD36P+/d3eP5cvF/iCYxFliWo2rUCTVC1BEP2KDJ0izzPSdKAcVdcFOPuBVGakKOi6rLXj25g2S5lmeUoFEqYrkuz5iz6nWiqi+u66FrGWBLKDAcz2gfni0xWHMfkeY5t21jSWNZMA8sy8LwiBYkTtG0T0mzRWHU+n5PEKXmmosiov6HmuBUL27SIImnw5TlpFpFIlPYkmDAYDMniDE06mLqqomFgmxa2ZPQsFktYjolpXmOpRJbOUDWiSMzB9yfMZhPaSbLIStmmjmmaOI7Eq3kuhUIBXdcxZBNawzCwpXOZSufN93367f6CEEHMoUixWFxgOl3XY21zk1uuS0FG6/M8xzLNRQZs0h8yGY64vDijL/fv4JM9RoMBiXx2kmSYtku93lqQx2ysbtBYc1ldXaVQEud25+4ylmOiSFnxwxmOpxKMQvpdmd3+G3Om0zFTv8+wK9ZlNpoyGR3y2adiTkFQYx4PSGYB/vy6Yj9AVyKcygr7P5QGpbdGEE1QNEm2oKjEakKxXudK4hSy6DlpNCInpSBl384hDga83BeyEgYpBadG4A8pSxKR/ukpG9/6Ljc2GyiKjFynAWrZI/ZlpUM2xXM03r63wvMjCRxOY0wiwshasEtmyRzHVXn2TAQke70IXVtGUfVFIDOKx6iKR5Z1OTk4BESUXzcUMtl3TtVz0kgjzUImgTgfg3GMomhkeUKSyI7Leo5l6Ezm4hwfHWdM5imqbqBLByxLMrI0Q1dz1jbE/oVRifvvrqLp1wyGJhkKURIQT6QzoFk0mxX+1n/1X+NI/EbJKzGeR8TKQO5DHY0t0I9EmhlI8wRDtzF141dnOe2xtnmDKJJEIcaQvYNPeeP1txcyfXL5c0rFHbK+mPeLZ59RrVqYpsGgJ/EymzXOz14QBAmlinTi+5dMxyqbW+LOfHb+jPVNl3E/X/RFbNXqaHrCzq0yh2ciYGdomxjeGYkMmnhOg2atgOlcovoiyBXHx0SJICNq1kXA9etHewSRz/KSyNiUK1U+/frPmE4LrK+Lz84O96h4Fhen31CUjIyJP2VrtUUQijXvd2dUSsvEEYQSQ6cqy8xnM1QtJpyLtXK9LldXsLsjMDtKVmB1tcUXX/0UryCDDZaFYZRxqlOKJXGHGHqDNAnpjgXG2y4rDHs+3cEZ5YZwEGbzS9bW3+C9d77HcCyybvsHzykXqkxnQqin0wTdNOgODri8Eme7WrVZqbyOWvDpyIql1kqdf/ZPH/GDvyTuWrc0ZDyNcb0ypbLMyCQVDMPEsiO+/FoEaivFErs3tkhDIZtl95Lzk4A7O7f5w49/CsC333gXjDlXVzEVV7LsTgOmuYYi+9pOJ3NU3eP1N26wf/ALAHaWPsQx1lm/41CTvS4tK6dZr3B6dijWXFexCyUm05Aklfhbz0f1PWxN/K/aSovu8Clbm/e4OhHyUqg6JL0hZsGhZor3q9bWmeUDamWxdqfPL8FoUCzYzEPJfBgrLLXKXLw8RpOTzzSfZBwQpNK+aXg8P/qalaVbmLrEV5ou435Is7ZBZUnco1fnc5abtxe8BS+edrlz97dZXenx008E8+k7b3xE4Pc57wy4e1c40NFcZXVD4IwBnEzHdTzqVQfknbm6tMzFRYfpxCeRjJBZds1o8q8efyGcKwUVVVKXbq29RphckObdhddvqiWCVKVgF6g3RKO4MICdHegPhCJ9650dfvHzz2nd3eLyUmzceKIQzEMOXw64+5qIZk2mp4yzKVGUyb+73Lz1IVcnBQxdRDk+eH8X35+jaVOODoXBXulpvH3rB4umoqY9YK4UKXorXF2Kg7jUuMfrd19nZ+Md/o9/9D8CECddPv/059TrQkE1W7tEiUocBcTSAEoTnckwIIxiliQLz1XnDNd1uPfmtvj7TOfs/CW2o/HyUIItazqm4dDrQa0hnMDxaIbrNLE1oVRqKzaaUuLg8GtKEmQ771ss7azSk92uy0Ub8hlx3GVtV3ynd+FyfPYQTVPYuiEU4tlhRLXlsmYKZTQdTzk+7ODZVZRYzNuzY3rjB8RxRGNJXOqnZ102N27QkUbMb3zvHWp1g09++QWuKy6FbmeGbkCtskZPNijUtBGutb0ocfzpTz7mgw8+4PnBAyJJf39ztwBJgyC+YjKUNKv6NlnikCGc0Giu8Nf+7b/JP/wHf8zOHUmXHhlUizd49HAfR0ZtgpmPsXxOLstRxqOQ5dYKr73+Jof74mIq18o45x6lwhIPHwnn8f7bd3n05Au2t0Q0yLXrBOEpYWCgylT+5vL7dIenjMaDRRTcdctMgwsKJVkeEjpkmcb+4TN2bwtFVigUefLsY7ZuWuztidLZNLVQ0zKVipinXQgoJCZzXyGSJXklu8I4mDKZ+Tx+LEDK3/leA7ug8eSZMJoq9RrnvVP0RCeUxsdS5S62rnHVuVyUISpVnyjRuPmGMMSVxCGerbC8dpcslIEF7wLT3CZLYeqLC2xlvcTLw6c0GmL/GsWPGI1O0IgpZcIQda1Nri4OsW3JWicdB8/WiFXxLroLWT9kHl1RK4jz3253cV2YjcScRp0Jb9x7m+k8ZzIRZ9SShnu9usZ4LCJ8ll6h2RgtGL+09HUeP/yS1pqCIbMMqDPUzCHXM3IZADEsSKIIVZOZD3ySNEFRwbwukaOAQ4aiZYvIfJrOSMIpnQsxp3aWE4UZKBnaNXmE6VLwiuzsbrK9KUoOdprrJI7FXJaoTScBSZKTpArT6TWD6ZzZbEb7ZMT1rep6BuWyh6KKeTueS7nsoKCS59eZshwSlTyHWJEoYnKSTCNOJAmN3SAzU1QNVPmsPFdQMcgzlUg2Tu8OxsSdkCSTBAyaoKp2HWeRhXNdl3rDwbKsRdmhbTokSUIcy2zadMrwvMN0NCOVpVC6ruE4FoqaL/REpVKiWipj2eri2aoqMkq2/Mw0YTTuctUJMY4kMYyiUCpV0K1rZkeLtdVttjZvLmQuVURJYyBLeQeDAcPJmNFowumRkOkXL7/k869GRFHEZCwMizgJKZXWWF4WAYL1rRK1ygrFxhKry0Ivrm3XsB0HzYRMZuGULGc6GTG4EPv5+783ZjQeMhoNGE/Es3v9C/JUJ5kaxJGYw3zUhzyAROxdFJnkwZz7t+5y8JUIGs5mE2pLG0C2AG4XSkV+/Qe/S7cn7kzf+hhFC7AdgzgW+2cpReoNHbeoc9ER589wt0iDCM8S98Bs6NGsNcnCEedn4lyFahk7z8jyCEeSiMz9Hq+9+TrzuQj8zGcpumkQJTGplEVdqZHE3+AHl2iSjhplgIKJqoozmqYpmmZhWClH56I6IEpcyFU0jYXzr2gmWZLRrMlsb0Eh9GcoigbSEDIMg1xVULU5l11hqLn2hxSrBgVXX8h5ppighmiSVn65uczf/tt/h9FwTqtZlWusYboWmSQWun3P4fDoGeQFclkpoygZtlPCMvJF0MAyqlOPs1cAACAASURBVJimSmNZ7N/pC43NnQqd3iHXo16rkPiVxb1eLGUMegH1yjZpLKoFzk7Oee3+Gj/+0ZNFwEXRInIlo9sRcrC6vE7gtzm5POT+GyIwPJqOMEwNt+guWppMp0OyxMKVBGHBLEBTMoKpxtWVCMqWSiXCmUKa+JwOBeFCf3bA+touF5dCDo6OXmKgUy2YHO2JfV9bvYdhRcyCNgW5xWHkMwnGxKFY80ZrFUPPMDSNSOqNy+4FZBGmk5JEUoZbZabTHpOp2LvVFY9u/4Qw8XFkJrdU8zg5eUm9fIPRQKxxvbpBlJwtWnAUtQrjSZ9uT8VzxaRGwykHF09Zam3wg+/8hwD84DtlRuMnfP6lcGwevfhTnj7p8a2796hKEg/TmGJaPqmikKfiLjdMWN+uEkiSpELZYjKIiJI2eX5d/pmQJwUcbYl3vyUjEolHyYOzkWzSvNrCdoYMkoj3PhIO5WzQJ54O6IcTdHtb7Fdvyta9+8xVoSP2nz0gTSIs/Sa5rJr4yY8/4bf+8vfodOcEMqg2CxOCi0tKFTHvfr+PoqeMJjMmI+FsrO/eZdw5XlQZjGc9KpU3cG2d52cicNLUmvhBTp4MyFLxfsvldbTQ4uBSvMv2TpVoqtAbj1Ekh+l8OqBYdrjSjgh9GZC0HCynhJOKoIUftbFsg06vy+a2eN+9fZHVKpc3CEJxHoplF12xCa6hO1WPi4sLlMIxlaJ4P8c1GPVmqJrGyanQb2enzygX17hVFYy3kdbj9LBNro4I5+Kdh8sTUDJarQaabOtwfixLm/+c8YrQ4tV4NV6NV+PVeDVejVfj1Xg1Xo1X41/D+AuRuYqThHEsqD2Xq1vY6hqaOVsQBDzff8HquoblNXAdEQlslFtEwWNUVYKYjQqV4jYnJyesbYjMShDkVMoeO7cKBL5M4aUV3NIYPRRe+Z3aXfyxjWZdUK+Jcq2Ly2N2b2xw9HJKbyTK+Rqr32Pv6JJmQ9JaKyaN0gpHx3vcvfN9AF4e9Kg1dP7vP/yf2NkWTeAUYtrzQ6664v3mc4VKdYUwHWLqYp5pZFNoaPi9K6ZTEQ2ZjBRyIjyJ9UljnVZzlU6nvQBlT6djeu2EQiHn4LnEguUJCneoSdzA0+efkec9GoVdipJUo7pZ5eTiOaYu/leSjDk6OqK2VGA4EFE4P7nC1krM/Cv2ngsMUr18F9NOSSNJnxyoqCrM/C4vD0Q6fHl1jTixyNIiriMiuS8Ovsb2Mv7KX/43ATg77fLPj/4E162hyQaeXhFm0yUMzSaVNbr10iqnJ4eLmvr3P7rB2dkhqlZE9US0IlPqOM4yh0cv2doUmaPDTx5Qr77Ha3d+DYDL9in7j2FpuYUmI/VhrBIEGd/9jd9c4LdiZsz9ActNkT24efsWh0d7ZHGFbRHU4MmjQ9Y2KoRhiuXI8p7whLXWu0xGYp3yvItq+nSGpzRlD6vji68peE3C0ESTEVnLbmCZu4xl9sV1YT7rohs2/aGIzOXqMqvbmzx4+hjHEVHGLB5iOjPmIxHpmfUEHXv3qk1tTUTmR+OYwfiCm9u3iafis0cPn9MbJzg1EWVcaRqMhut4jrIoI02TFLdY4tc+/Iinj0WUeOZ3qTeK+L278v1yeoNDjk7OuH9P9EWZTiJyZUypVOHu8kcAfPXg59y6tb0o/fL9Pq36GmHkE8UC5JqEIYo2JY5Edsu0IgxljcPjl2xrK/IsWAS+y8r6KtFMyF6luEF39IhORzxnbeU2Dx8esLKxy1CCn7tXYv4vT15Qqomo+/HJHu+/+13OJG3/cHzB1B9y/+03qTcFJjDKZmRZhmaoKIbQE2EYouYqqgQ2x1GOplkoZEQyw2cYPqBCppJLmmxVNbB1ffGcNI0pFBRAW2R/sjxmNLzi019c8POfiAiproOuGmSI8+E4ZbxyDcsrYsvmhZVaFa9YpGinFAoi4hwlCvNZwlRm5oZDn+OjDqgKii7kNU9zVDTKRRfXEe/jOQa6bmBbQsaCQCUIp6DEKBKrFQRzclK0PCOXJU5Fx0W3Suj6rzJEkZ+QJTnhdeNfP6BzGhJF0SIrZTsmtmFSLIoodb1eZ7nVonSvtMhuZVlGmqZkWbLoJRiGIVE4J08lvbg/xXEcDMtmMpbEDckEUMnTbNH7DhSm83BRmpVkqcD2aBqeJ97ZtiwsXUM3rrNiNroBq+tN3nhDyL5tmqRYxHHITAKgZ9MrOpcTerIB62hwxuHDSzqDC4JQ1uwbGsViGVUpsrwq7pnl1SpbW2uUZFuQcstmaf01FDQyqSPiZM6o73N+fsr33xPf69wIGU1GjOT/P77oUauu8uOvXvKzR0IP67nGqNMhyxWM4XXrji6ff/FTjk7F+SdX8MwSUTDGkaX0c6OIVY54fnxOKEl88BSUNCWQJXm2DVHaZ231PuZD2c9NBXKLXA+uE/MYWoH9F8f87m8LPfzxTx4RZymaGnNdOzib+aIf3+ic4zNZWmq6+GGwyE5ahku7O8Nr5OiOeJeLqxmWWQE1JUvFHmeRhq1Z3NgVZcEnJ59im01UzSTLZXm9ZXM1OOfOzTdYXhJ6StFa5NqcTGYUbNslyQNiPMqSIODv/89/h2H/JbWlG0wTkflPsyJGHJJKbGejZvOLX3SxnAxFZgtVVAzVQc2mBLIFhpKZqOaQyUDYCP3RY2rV+1ycXXI9GtXbHPSecPRSyI/jWKAFXHWPsGTJ8a2dO+w9PmRleZsgEZlxXVOp1YqcnYmqlFazSa3cZNKMGclG3zdvLnN0dEASFRZZW82c0Gs/59u3/l0ApsER/kwlTmYL3TKdxJTLZRQ15qsHoldaqVhnNIjQZclYfU0nCquct49xZK+vLMlYWl9l78WI+UTicZjxfO8JFU9grpu1KocnX2LaObYu9L5t6hhem/nQ4bd/R9hYP/rhz6hX16g2RPYgUy6ZjENu39xmIrO9nUufN+7vsPfNFNsV98N0dkqjWQTZbN02CiyvZOSZziwUa1eum9i2xbP9n3HeFnfB7/8b/wErS7v84Lv/KQDf+7Xf55Ov/4yvPv0hFW8bgBcvzjhRf0p5aQXPE7LwYs9npbmDL/XwxD+lUXoHRU0W2Mxapc7J6RGVok6/LSsk1JiXey9YXRWyMZ/3IMsoZDF1qfczXSVXaxSCCcOpeOf6VpOJOkNPxPru7NYZXJl4RQW7L9bz7V8rctk7Bz3i6ydC1r713g4vD84o1YVe3LptcHJ8iho3GPTEPNV8m/XNEqMrYaeousmDJ5/wVvQ+t++LOX325XN2dm+T+kM0S1SXjMIRLbuGIltPRJmLms1Q1RK1JXHfZ3EXS63SWGst7m0rU5jkUxxDyF0SDtEtDctRqcjqq9cdYYse7D1mdVXYsaedY2yrgKZIiFHeZ7m5S5Yvc/um0CXz4IqVlRWe7x3iysqR5dUt6qW1RbXH1eWMOB3jGBWaK2LtLq4eY9s27csur70uMojlsvj9nzf+QjhXqqYsenYo7oDRoE3Ury9qIDVvTpKuCkKJnkzvN3PqtRa6IX731VcPSPOMOBtydCSEYDK/QDVr1Ks3qTfEBlxcHhDFGddQCsc2GU9OeeP19+lcisVtNgwM3aZYqHJ7W5BHlPMIr1jh7q5gGHqy94DO+Bmj4JCaLi7ere01PvniH/Hm2zuULWGN/9mP/jm2M6Ms+zvEsY8ftQnS9qIJWWupilt0KMU1+n2RqixUHU5OzunL1Oxy/QNOLi4w7ZiLC6HcC9Yq77y9Ra/Xxo8ltkDNCDMV1RTCmxt1dncb5NMGliIEcxaORH8vSXAR+aJ+N4jGNJbFcyxjjTQbY1gFIl/87nzaZfuWzXQmLvUkDfHnYy6vXrK6LpzeQiknz6uM5s+ZyKaea+sNulcx93aEwzebP6NUKbLUXKfgCQfz8OQzdm/e5fnzx+iIQ1Us5rRqawy7QkFarkHZ3SbLjti5KRygZ1/0ee/9mLp6iy8+Fw7m3dtvUW522XssLpNmfZezy0cYTsCwJw5ErWYTajEPHv1LyoU1uX93sA2P00uRcracjMCPOTp6wtauKOu8bB9TbRj0+k+4uS3qqceTNrP5ywVguXOh43omd2/vMhxKEHg+ZjqfU646kAnneDCas1TfoFwVCtLPjsm0gGqjSftKzP3g5EsKhQKqrqKrQoZGfozjxXhVsb69TkCpWqFQCcgiIeeNJR3Pe4f5dELBFc/XswrVpXDRaHAwtDGVJq2VbdrSYIijlKP9DgR9Co5kxRmPmU8STOkId7pDPNegtXxrUdpaLK5x0Rmwc2OJXGIl7969Tbfbo1UWF2NH+1P8uY6qaUSyzENJC9Sr6yxLnFSnv8dgesba2gaDoVDupqlje2WyTMOXjTfj1MApbHH/bQFePTrdw6tWGfSn6J74TrkldEWuxHR7Yg1Mw2Zvb4+rvijltW0HW3eZJQ9Z3xGy8firOk5xTJoqqNIYVxWbPE2Zz6fydyYKOboOEs5JlEQoaDLgI9YgTRKSOF4Y9Xmek2Yp5BqaDAwZuoqSg+cU0TTZuygN0Cxj4aDkqc9scMbwKhbAOOA4F86aWihi2+LcFoolipUyzZYw4FfqRbbXVlBVlbmsx0+zGH86I4lhKvtq9YZTfH+GvJfQbANNU3BtG88RRlKtXqFe83AtjTAU+x7PEubhnFD2EbMsg0K9immaC/xWGAekiYaiKAuyCk3TREPWQDqBsyGD6QDz3FiUTzm2wKIZhrUIohm6i2PbpJm/eM5kMiPqDQhDqffHM4bDIVmSEKnXBrONouQYsoazUipSrnjUK5UFEYamaaiGSSTJQebBFMt2CMOQfk84LY5lg5ISxTG2ZD7MlBLLW6vceEOwuBlmhq7r2JbGbCbkbjjwmc8vubqYc3oodPzB0QFffPMxw/Z1aaaKrtjUW2U8T6x5sVSiWilSK6+hVURp+0ozYClVmEmw9W44YTIO+MnPr6gtCWP1d373JmHok5OSJpIpL1yn0/GZTcWeW6TMgzaxD/5MyOdwOCfVyxx3QkZTsceuZ6KYCo4ujZ1ZiFeY4aclLo/lRepHYMzJyclkw+U0nXPrzh0eSmbeOM3ASGRDXSGvMUOCMCFKY+bXJB7aMokSgizJS9OcOJmx1HKZ+eKd80w49EmeLBwENJdht0PRkw7D0rYol1USNBmkSMKInc1bfPj2e3SlLRHzkqJbx3Yl6Uw0R89sqq0K/8v/8N8D8J3vWQRqhYePRnhlWT6oQoaObkrSi8SgfZbhljXyVOxp0S2iajG3dpdAOsw//uEztjfu8ouf/1Ds39Zd7EK8KBMW8wyo11YXPYr6/TaqWqO61GTYF47Tm28ucdk2GU375JKBMo4tGvXWop+caaW8PHxOqVJHkay7Lw+fMugqtNZ7vHwp9H61dIO1lbs8ePpHALTqN4gjlTiJUGXDXseb8/jxU27euMN0JnFRuU4c5Atm4CAdEsxcNnbWGcuG3Zoy4LNf7LOxdRNDBjsePTri3Rt/k//o3/svAXhy+A9x7JiENj/+WMjLyso6btkhiUOePhbnr9GoisBcIu6LyTyg1SwTxxalwnVj2jn9cwvHGRGG4p3T0CIIJwy7Ys9//Tsf0tufMJ2ELMleRrPgArAplasUiuJ++vv/+3/DvVv3qcqyud3VdzHVNV577X06QwFz0K0K9WqFi8HlQg8GgYKiRliy3L1eb7K//y9p1rcJQsnWy5d89O03mI0SXuyLMst66TbNZpMwkj0sVRvDnNKfzRj0xZxWS0VyBfonIW/eFnbJNy9OIBxy0pV9H2OV+kqZ7nhKJJV6qdSiffWc+WiM4wk7IUfFdV2mMjCVRQ4GDSxDY3VN/G7Sv8RU7rO1Ib4z8h1+7d//iB/9wZ9QrYr79/ZWjD9MOD+bEaVfAnDztTcYWlckYyE/ZjjhYjQhDxXmptiXteYKw26H6bBALtduY/UOodJDzaR+Zc75xSFBNmMwFNCIUs1DmUGaTtAzsTfbmxuct48X9mro51zEF7z91vsLO6Xb73Fy9gjLaFGpiGQDWUT78hhnJi5yy8nIEhfHMjm/EjZl0W4QR1AsG5SK4ndnZ79q+v2vGn8hnKs0TfAlYPDgoY9hRaTKcwxdOAh6VmE6ChjbU66uDgFBRBGGMa2WbNpGAkaCZXm05CXUvqhzevY5rlPk8UOBUzq6+AnVusutbdH4t2A2eP74DFP/hva5ECbdDOgPbPK0iGFLgdavUMoTkPTeeWTy8vJLvGrKwz2xAavLd7l/9/cYdB9z0PszAG691sDWXufx838BQL3p0rsy+Eu//df55tHHgGBHOj/1WVpaw9CuWWMucZ2KiB4DvcFLssTAsZrcvS3mMB97eHaZmTllsyUiHSghaaYQ+QIr5pkDtLRCnE3YuSU87qlv8unXf0xJYnaWl95A0V+Q5yDva5Ksi+8PcD2Fel1Ee3SlxdVpn401caCPp8fk+ph33n8dQxXG3GgQYDopmtKi3xYKd2UL6k2Th09EtOu9Dz7g8ePHnF58TcGRNcosc3XRp1pxaDbFZ0QucTyhXhYZsLOzM27cMmjWSpw8F5dCve5QK1UI5x5/9bf+JgBPnj/k0YNTdEkTPhxP0Mwpy7W3OZ9LIpOOuBxu3FwnkZm45wf/gvtvvc5kJiJZpycFLtsnbO34fPONeJdZ0Gar8C7x3GUgmcgMo0qW9PFcyUjjBvjzlPOjnFkq5jmfh+xuN7CsnO6VWOQknWJoEy7aYq+cWkqzvsann+yxtCbxVHYB1y4xG46IEuHULq/UGQwvSKUTo1XGtCfnOG59gf+5Or9gONrjvXfe5eRE1GErisfG2grl0jW4W0NJVfIkZVfWMj971ub27h1mfpfLC8E4efPmTU7PjmlUZaS1H5CmU5S8yswXDpDt5hQ9jU7ngMGVODPbW7fY3Wry+MUfyXWyMTSTzY2bXLYFHs5zbaazHs8l+YmmaWxsrlAtbnB0Tf3aKuLpFboX5yzLrG1o9Gifx5QcIYum9ZCb27scHp0wD4ROsKwl4EtOLjpUJGnIaHLFxO9jGDIKCOSpy8GzDoWSOGtpmqCpVXJicmQjQ8UCDe6+JoDilxentE+H5JlBjsQb2QL/Y5g50j9A1000W///OFfkMWmakqaSUSzP0VWFNAsWZAqqqhLOsoVTga6i6GDZ5q8cFCUXGZ2ExcU06AxonwUcPJPZZT+ETMM0bTzZ9NAtVEQmrFJaED60WgUsq4VhCqNeSROSNGIy9pmMxRqc96dcnccUCi7VsljjilOi1GiSyybGQTDHMAyKxQJeUTxbNxUUUvr9PkOZkU3TFNNxFlgqUxMEF0kcLrJUea4wGg8ZDofMZ+L9wjBk5s8hu26ubmBZFqVSiYokZWgtNbl5c0lE++X6+b7PbOYviDDSNCWb+fiAloj9myhTyjI6LJ6tEUc+GSnFgswa5xPURENXcnJ5YSdZTpAETCVOQlNdktRHzRwMU7LU6Qam5rK1Weftt0STTcPSyRRIfPFu88mcuT+kO+wwGIhnn58OePT8Id32x+SSTjxOpviRjqaLDF+54lGqFmk2lhfYxdxTaVSWQEmwPfE7z3HJA5vNqjD8l2/EzGZTht0+oWyqPZmPCcIi3Y6CLmU4mV8xHSkYhtBb496MbDnh4dk/I5DEFGgucZ7gKDaexLVFRgbqdBGQcJwiigEGDtk1+2JqUVvSuHXzJr2+kLOz3gzHqWBer12mU/A0ajWbOBXPyjUHTcmI0owsE7KXKAGmpRDF4juD0RxFK2Ca2TXkCq9YpFrzODi4IpQf2t6McW9AnF1jdgo0SvB3/tbfpV4Q8vLuB9/l//mTJ0CDPJRZaSUnyhV0TRhzh0dfMRhNUc0VkIahVywSJwFxNMKQjZpv3VjCsXVu3RFG2tXFOSuzDZbWfwWQv+oENOsVltfF/woeqRQLDWbhS+pV4Vj4wxqFcoLp1jm7EHp4dXUdwzAXTYRXVzYZDXoMujlr22LNh/0EzRozn9rs7ghbKY/rrCyv8s0TccdMZ32m8yvycJe7rwvd6c9zxv2c8XjM1pYIHl9eXmIaGlkunLvtjds8ePAcR6/S92XWRpvRWKtil9wFnjk1Auo3zvnv/tf/RMxpfMHacoViuczNWyLzUSjY+L5PfcnGdoXD3Dk+pVC2ebonbIlb29+h23+O4ziLYLWqqty5fZejw2TRJmMwGKBnOlOZff3sy5/QbC7jEgvqUMAxGhwcH1OpQJr8v+y9R4ysWZbf9/t8fOFdRmSkz5fPv6rXVdVlpqure7o13TPkUByIEChDcEBBECAIHC0EkpAAacGNtloI0IJLghIlSKLI4WAMx7Tv6u6qrqrn/XvpMzK8/7zR4t6M0kINbSigF+/uMhER3/3uPffcY/7nf8TerFSv8Pm9T6nXZN33x3/F3Sc/4dLmHt/6zb8HQGszxvfOWVmpMJtdBOhXKZfXSBUhi4dHLzCNKrqpYco9zhcMHj/qMx36rMtGuHGY0G6fYsm2GbuNyxwdnxElCz74psi0nr14zov9czZqNY5lhjJWbYLA5cp1mfEKxsxcB3eWx5CogjTW2G7dZjpro0sWY3cekrUyxFKXKUoRbzGhXCoxW8jsq2rw8tWdJRFHGMeslL7J9t51PKk30iDPV997j9LaCyx51xVLKfefRlxZE2sZT6ZsbxUxzJRuW+jKZ8/alAsmW6tXKZWFnfnw7nN2L68wi4TNbqgWplLFLlTQMrK2uGgx6CaUszCbHwBgWS1q2W0cZFsgdFqtFe7c+ZxKTchwtb7KrebX6XUHnJ8Im+7b334P33nAwhfOsqaVaK5WqVVX2dgSNuRkKGqlm60iL14Ie23mSATArxi/Fs4VSkwSS+Wwegsl0Tg+f4RVFAcqbyZ4Xp927z6zqYy6WRrlcp125wCAyTSi2SowGk8wFPG9QnGFqt/i5eHPSRMh0pc2v0m3d8xZW3wv9k7Y2SuwWMyxpSL99rf+Jj/78SvOR59z5YoQ1rvHXdKuSmNDkAPE2hr+NEe9nGNlXTgD7XaHTLZLIV+nWhQsaqedu3TnvSXUZa35Lpd2DX72s1+QE4FAIj9PuVFjMY+YjoWwetGQeqOMbUinZdplfeMSUWAvDeiee4xq5DDVMm4i3ofUxnU8HF/CvJjz7KlLrbrGQ0lk8ODRx1QbNpolWaPSQzK5AoaqLMkPdi9bqOoKqhKRKiJzVC5eoX3SZTAVn9nYuEm7E3F+NieNRcTEMIrM3D45u0m1IX5fU8r4QY+CJTIh5+fnlCoanfMMt94RNKHH+zP8cE77fIxhCoVUzBmgGAwl5Wk2G3C036ZVvsVqTTiK+YKHO87jBw5tVzhFQdjDc2O2dsQFNxmnvHjss/1dg6dPBczza19/C9ed4cyHmNISNo0Sjx4+R1eE4fjLz37OpUu7TEcqQXRBdWvQPe+Ryxbp9J8u1/zGjWt8cfdnYk75HJXSGtPplO5MMjStFvCCkWC4vFKW6xLghgMyBWFcFbRduicH7GzVOToR+7l1pcHR+XP2dpo8visOtdEzqLRUMmVhVDx6dkKltoKeqlia2Ktrl96k3z+hN9ynINn60kQjZ66BJTvE+wtW19aYtB2mrshmra6XiVOHbEGjJjNlKkXSNCWRLDnlRsDJYUi22FsyCpYKV1hMx1iZCFvSkB/uH7O1vUciGfbyuTJ+MGYwbENy0YPsDN9fUMyLy8t1XQ72TzhMFhQLIqOoJCp6PqZRrNJq3JTfO2Fz1+BMFufvH3fx05+wGIMmSSc+/skn8F/BZDqnP5TQy4zJ6WTERktc4KgLdDVDMbfJ+18T6/SLH4hgSZIkKJrsoaMaJGmPf/Bf/6dins4J00HIpO+Rysv55dGYVwcnnJ708b0LNrSQ8dQjkga8aqjkrAqJGmFI+JmmW6RpjJpGpJLmNY5UlNRHUy+cspgojtE0Y0knLjl5SBVl6YTpWhHTLpNIgoJ8TkVFEd/3hSzOJ2PGw5T4WLngRABNx9LNZRYgX2tRXylRyOXZllTl169doVkvkMYJiswqdGYiI9sbivNx3h4wGc8ZTb8kptB0HStjUK2WqVbFRasbOnbWwJZ6MUpDptMJ+NrSEU2SmCTRaDRX0eW6aLqCoihLGCKoTCcLJpMZ7Y44/wcHRwSBh6GpmJLZLZvNUl1psLYmAjXraw3q9SrZjEUoqe2j2GE+XRBJEpPZxCWMYxJgKAMpagqWmUfXVfSMZI7UiiQGmJJBUUlAQ8NJRoSBJMvwHAx00thlLtEJQm50YimviqJgGBpbm2+yuyfe76sfqGiJwmIxoNcX35uM5sznc3o9YQwM2hPG/RMOnz5CJgvxQxUtCciX8qiWWIN8yWS1tUuzJZnICg2q1So7G5sYkoRCN13ms5CsGvH3fl9k5l1vTm/eZS57p40HfVbKVXrdHiNJwpA6Cm7s4U2zLMZCf8/mfaJgnVpV6H1n0SfTyKNHOhdJGj8sUW9MOeucMRoLvWFns6hqROjJoIWqE8Uuo9EYPxJGvK7tkCqgKYKgAiBOQ0xbJUkl2cpghB+aJKnHelO2qLBtQjclSEeosoedM60SJHNWNsU8e8Mj/rd/+i8YDlO+9TeFQfun/+IvmXXyqAWIZHBDTQKUMCQjmTq3dy6zmD2htpsh8WXGWw+xDJ32yYLEE3O/fO0KViYgkgQeq6sqL/fvsr0rg4pAvhSycF1GfbEvpXKG0/PH5Kwa6ztCd42HpzhzBXemc/WS2KtnL57j+cesrwumvk9+cZ9WY418xsBfXGRaeqSUMY08viv0Rhh2uf/giEAyL+ayBQwtpHWpgGlJhERX4dKly8wWp3QkgvHt27/Dyckzxkfit1M/x95uA286ZK0u9rPd0dm7cpOXBw9xZfpci3KcHr7ClNCveimHphSYjtxlts51FXRDI2fXseRZs7M687lCqSbOVXd4SqQMyelX2dgQ+vv0/D6ffvoppqERxGJeOzs7PLjTHgQ/tgAAIABJREFUo9XcEXKQnZCxskxmZ5w+FoG+m7eusLlxGc0ICGWrkMMTh93dW6TKRMp0lu985z+hWPD44S/+ULwLFaoFm0ItpGEIObNzFiftB5xJIqNyqc4773yDJ08/plIWe0My4aT9hEq5zv6hkI1i2SBUfGLJlHnaOWDh+Wi2xcGjffnObVxXJc1kmMl2N8WaSTwzCDWp0JUctjVgMjhd6tPALVCw16hVdzjtCTtoOg6pZNcIJHFSvpEAJoqaodEUjvdAH1DKlxhLJsmMnuNPvv8/4jgG26tiPfPVIj/4yZ/RqqwT2kJmB4uUnSJ4kRCWqJwhns1JdBVVFfdAc1MnYxskfod2X2aO6jMWjkmjIRBhC/cRUTImm13HlCR3ujnDnQ8oFE08SaqVLSs4E4eKDLj6SkilZOKHJcau0J1xP2Fjq0YYz5f9zXqdOWE8R0nE92qNGpbp0+32uXZFlJokwYJCK8+jJ1+wKt+5pkkCrF8xXhNavB6vx+vxerwer8fr8Xq8Hq/H6/F6/FsYvxaZqySJKFck3Cf2GA2G3L79IfsHBwDEWkig2WhphsqqxPr7Jt3BC2JVFgxnigyncwLGPHkh6ik0LUUzXZJUxTBl9GX2glib40qq1Frd4LQzZTSOyEiP+1/+yb+kaG9jGcVlkWnOukqixPz4ByIS8f5HBvV6Fc93mcj+Sq6/IFspkbOz+GMRCWjmdhlbr1gEIkKj6U0ePvgZnfFTPtgTGP0BUybTAyYDk5zsCXBt86ssZgm1mkhLZu0ip8cn7F2v0z4WkdVstshndz7BturoqfD6r13f5N79hxiSir1WXqeXnuP7CguZPt3crROFGt0zkXa17AKVqs3hfpftbRFVmQxn7O2t4kwquAuBc/3k+Y/Y3Ssw7It3mw2fQBLx1cu36Us8sGqpzIMcqQ6+LPSNPI0Il1iXXdX9HK4bs73xNo4j3qVcNwnDCkftu2RMEUF8dfg5m5vXyMpIT7d/SBwvsIwQX9ZqOKcel3YL3L1/QKUs5lCrbLCxfpNTSZH/8MEPefvtj7h/9wVVWbjZ702YzCb4yYC3viIoascTj5XmKtOJJLiIVNBNMgUDPBG58icmqhLjB+6yAHIyGnN4eES5JCKPr172GBVPUI0p5bzIgvmLCWfTI1QrRyThPa47Q4lCklhE85qbJkq+RpJq1GW2IFFjitU8o3lEWWKg6+Ump4NXOJ7Yv83SFmY25eTgjMVF1Gj+KVYcoSjr1CWkYtzZJ/U8ho6I4lSrK4SBim4H5Asi4uZ4Lr43Yj6e4M4lNNHuUyqt0OmIKGC+0KBWG7FW+yoHx4KoZbZYEIQJSbjKcCyyYFeuXOOk/YiMzE4cH/ZprlYZjk/QZbawVGnQ7w8ZDmbLffH8EYp6iKGKSOT25lXGkxlq5oyHD4QMp3GWN94qE/lCb1xe38PQFDLliPaZOKONli7Xvkgs++PFepacVceR+9lorDIba9ilGVXZf0yzJ8RxAcNQSGUkN1HmuE7En/yZgPf+3b91g/WsTv2j38CT8ICvTyd0unvMFpCpyFqmkUXobHPv/qcAHO2f0e50WSx6S0p+d6Qwd1zQHHQZDSsWbDQjSygj9YapoaYmappFkbWSmhEI+JEak0TirEFMmiyWPeyCMIeq+5CEKFInGGZIEqcYqoWuy1RHZJCkNqEiZKrXmXFy7JJGKoEkAyqXClQrTTJmhbUNAU1qrqxSWymT1YX+WWnUWFltgqIRSLn2fZd4MWMymbD/9CK7DK4TkpX1MWvrKzSaZS7vblIqC8iKqoKiJviut8wu+b7PbDYTBS+IHldb6ztkM5aAAcq1cuYLHMfDlU2SHccR8KihwMkPeicEQYSKTl5CCAolHdvKUiqV5PuWUA0dRdfQ9YurMiHyI8LQx1kImRr7fUjVZZ8tTTOJ4xjdUpZ7pes66DEo6jIzJmrQYqL4IlOXEMU6fvucUGaEw9DHCz1c30WR6xkmMagKq5sis7tz+QqaoYv1klHqxcLDWXjMJtMlFHPY7/Py+ROePrpIV4IWqdi57DKjmK8UKBSL5IoFMpJ62cxYrK+/gaKky3nHYYjnLPit74jMTuA5JElCGvosPNliJI6pVHf5q+/9AADLqhANHZwgQDHFPDuHI1aKJj3vjLOOOO/VapNEDQll7ZaRhRpNEv0BsS/kxUAn0UKyigGyd5oZpCghOKHQb36YZ72xSn0zj+dJCn4zg2L5JCEokdCnpRWbYnGD7//wewD8/JO/YnfzFpYaUGsJWew9GENSwExNiMQ+BzgooY4t03Cd8TlenEMJHeyL/kOKgRJE+IbNXH7v5Gyfr5or3Hv4xwBsXf6ItfolhoPD5b5Mp3OGk1OKhd8WvxOOOXzm89f/xtd58OovxBoECmqcpVGtLdsCkGrs7u5hWOLMbmyt8+rwlNVKnoIt0DTbGypnZyfUqld4ciD6XNXqq0RJjUJGwqB9FSMxME2D4UT81nZuk67TpVpo0B4IyPbx3T71lUuEqyLC3xs9wg+zLPwXNAvi3m6shxx3v888zLMrSacqVgajoKFIwqd0eMrhuEtrrUEwEme70WjghV26nS4fvvcfAhB6Nnt7l7hzT2Re7h38MRs7TY5Oz5Gtt9C0Ffr9Q9658Q5b2wJu99kX97h1s45lXJSRrFCtWMRBiygQ+rvfdSkUCuSzFqpErxTzPQq5PLFsFaQ2ElTNx7JKrDWvywdO8Zwxlr6Lql/QlQcUqxUaqwL55DsLJuMZxcLKsr6ptb7Gjau7PH726bLBu+PpFMoWriv0fm/gks+VWWu1lnT01VoLUg/HSRhPhT4r+XWaq1VCT9ZO9o9pbV1mVg4pSwhgNlOjWNbpHh+xkhG6Y86AQtlmEov7d9SdouhNht6QdC72vdas8LL9gmpO6IgoDzdufQWPkGguFv3S9cvcvPYd9i6vspgJhNTDe5+yf/pXLHoiQ5vJrTGYepDkeUO2RvImY9zI5+TkaNkDLY4CHHXAs1PxO95Ip9HcYTTv0tLFvDNJg+vv1Mj7Bk8lOVXnrEOluI4uEVmYLk+eHTP3dTZ3xX4GQ5XpbIGGijMTa3ccTrFydZrr4v2ePHnIztYu4PL0sfhMuZLDCxdki8UlBL5SkcXWv2L8WjhXSmoQuZJIYVVDc31Oeo8JJe5VTTUu7RYYjTsEUrlq1oh8pr4sSrUyOl4QoxsOaSzSe/VGnYePf0lztcLartjgLz57TK26xlDC7yLXR9FU0mS6NJbPusd4xTFRmJBI+Mv2esLO1i1MyYDjOgELfwBKSCphTwomSZhnMDrjorG7ZWfQ4y38QBihP//0T9lobZKZb/CLnwv40e6lGpGfZ22jwaHsSn16/Bk7O7sEisDULoZrjAYBgadhSGKBybSDqVeI45AwEPOczT10U1k2VhwNHcLQx865OJKtSDV1Mpkili0ORoqJqeUxrUPWJFxqmrPxophM4zl2Ij63f3bOq/0QFbEHljnCzC6o5X6P4+dCCD39BSvrm7T7Y5prQvhq5T1+/vFLVlfFJbRwHLK5DKPRMbOJrHNJ5rhOghblcGWxbMas021PqVbFmvvxEYPRCX6wydq6uIj9AFxP5a233uf5SwFXHJ+nfP2jLe7cE5dsrRkzHLVxwwnbW0LhP37ymNH8BQoreK6A82UMi6OnHSp1cSwKZpZ7vzggW/JJJfuTbds0d2M6x/Nl0XBtpc7JcZvhRMKuAugch9RXigSJMOKVJE+o9lhbXWG+kIZi4DLq99hqCAMlVGeM5+cEoUqKMFAGowX5TItKvUwhJ4v/h6ek4yKVhtiX2UJhceqyW/uAxBIO0JPHp2ztXkIzXRxPXNp7e+/SPnmOnYr1LFsWJh5jN6C+Ks6DnVnl4OWQZstgnIrntc8esb12Hd8W7/Lw0QtWqnssnHPcQFyySbJCPmeTRDGFrDi358cezeYOQ1mLkjLBMrbJ2CnTuZhn99zBzOhM50J+Fq7HdDolVXSuXBEy/Wz/C8IwpZAvo1viEkh8ePhgRr4g3m1n8xrFUoZ//s9+sKyT3N6+DfyQ+WJEUTZ8DTyflIDihSGMRa1aZerMcVxRb7C7s8bBKxdLS0mQRdmWRRRYBLLwf//8kIxi0b7/KQXpVEdOxHSu0Gi2WN2SdQoPRlRXrlOrCthj+2qb/qjLcNDmlx8LnfDGt3Jcf2ON49Mz+mdCcX/vL76gUCqgyTqN0MugKAswQubzi9oCDdNM0a0U3ZB1LkGEqmSwLCGbQeyhoRIlKqiSLTCyMUlIY59U6rc0iVGTGTnJRBYYCnmrTJL66LKUMIkUZvM+M6XLqwMRuEiYg6KiyAIdP1LJ2Hmy2TzFktDpK/UKX73xDl976xZrsnjctrMEQcR0KnTSYNBjsRD9wAYS3hfHIbm8jZnJks0KmV2prbK5lsfOy+bHEhqpaRqGccHQqKKbGcp1Hdv4ksVQUbQlPDOKItI0xnGcpfMxXvh4jsuRbJa9WLwgDENUFAxNwi5VlXy5Iuq8ChLHX1zDtNSljggjlyCIcKbiHQCCKCQIfOI4RtG/hAHGcbxsJq0qOrZtY1kZUsmAF8UhURIKqJssb4qiFMdz8DpCXr0gIE1TNO1LJ1AzFSzLws5nKJaEQXL5yi6GYSJtA+I4xk8iFosF86k4V4vpgqPDfRbTxZLpLAxDNONL5zFfKlKplSlXq0sio8Z6SxDvZDPLs6YrOkEw5d//DwSEO/RTAs9jOu7jSIbNycCh1SpQbSjc+1gY+s7skIUbkKTiXc48h0LW4m++8Rv8xZ9/IuQsjVFUE0XXvmSzVGI0U+HJ/kUD7TJWJs/CDdA1KQdpjJYUqZRtitJ5PDx4yT//n/8XQlm3ePvmW2hKnuP2Pp4pgg2/83d+iz/84ucYiYoq++8kuoXnjHnzmpDpyE+JojnEdfL2RTG+QpIoGKZBRhI85Bsxid5hZ/t9AF7uf0Ja0cnlvzTWtta28ebPmI+PpICHbK63uLTdYtAXRuh0dkDgr5CpzVmMhD7d3Wsxm/bpyb5TG1t7lAsmjY01Akds/P7hgEphT6yFZGn1RgaVrIUhz3+kxTSubnD3k19wdVXs36UbAy4V+qyVVqmtiH3XtSqvnv+MxedSL04DcrkC67VdTheiVKAR5Zi5PoqfwRkLPW+VbNxFn0gWeQepRjEPnaMpm/KOJlBRIoOc1uCXv/xMLIOSp3vexZmJwMZXb/zHTJ0umZyCZohz/OjuC25c/hAjl/DilbiTEyVhpXKDSMJ0N3ZV/vIv/5wwcrnxpmjefHbaI0r7TOYJaSgCLrlcmTSNefzsFwAU7Ouousdg6FKpiDUolZocHh4TRD6dUxFAvn5rB9dLONoXgXdFBdd7xvb6LdLkgnUv5MnTz8hZV8gVQ7meGUxLZa6LuzbJJSiYjHoR+YKsow3PSRSP7nBGc1XM07ZSBsMR7kzot/XrBeb9EY1SkxAh14dnLzjvBqCWCBE6rlxuEkxnpKF43nzuYBfOKZY3OBqId5mfdDk77lG+JS6C4alDrbaNpo9QDFkbun8MmTY/+OSY/TNxRtXIwlId6gWx5m/e3KU+DxhNzjg+EnauGpS4dfMa7rxHIoNMplUll7eZnh8AUK9ajObnzMYpo6l4Xn0V/Jcp169+fdl8++6dR7ixiysdTCOTMJsa/DvfucGD+8IBiyOdsKdz0jtnqkhCik6BvFkm3hBnPXUiHj89pF4pkZFw9KfPDvmt73yDMJrTlfWNeCLx8avGr4VzpakqhbyYShxkKZdvcnhyn2JOXDCFQpXO2YiZv6AuDe1Ou0/OjkhicTGt1m8wnA6IUDg6vWB20nn/7W9y5/4XnJ4KQ6Zor9Bvz2iuyY70HYX+4JTWRh1LFYtVKUakSszG5h5X9sTGffrxJ3S6P1kWxjvTDG/cvsmLlw+xs0LoV4o11tfX+d6/uUOzJTGzvX10K0tTRjCS8CmO/4JvfuM3+elPfwxAJlPF8yf0BkOaKzcA8EpnVKp5uuey5kLtkCu5vHj5dMmmlaQOt278Bv1zH3NFHKBHj5+QqkMsWzI2aT6ZbAHbtjk+FMpne2OX9vmQXFEotrlzxErhbWyjzXgglFEQRByf3seyE7REKLuPvv5t/vRf/wDVEgfDm2fZ26vxyZ2/5PRQ7MPt9/a4d/9jWpsb5DLC+O+cvSRjZrAl5bG3SNhcvUr3dM5Z76dCCBKb3Z0r9Lo+kSQNefZkn1bjCt2uOBiVepGVyu4Stw2QyTVYqV/j+PiA8KJJauLjBwsasnmt7Y1wFi6VmsmrA4FbHs2O2NjZYj6f0++L/2lpBsuOcVNJUZ+4VJol+t3FkjVua6PBq0dT9PwINRXzPD0/ZjJPODqVzZ1LJfwQpkEfVwYNCsWEYk3n88+fceXyDgCuGxG4GRahiNCcH0JzvcLR4RmpftER3sW24enLAzY2RPSlfzbjg698m5eHB2J9zx/xnQ9/i05vzFw2c11vZCnrRQq5OhnplIVugm1XGPTEpffs6WM+eO93iToD2ufCoM3nHeqNBq+etVm4QgFbls2r9IDxSCjbXC5H+6zP/osJN98Uhc3d8yl2LqXduccb1z6Ue1pkMnKwJWlJZrVEtzPgzoNPWdsShsRvfu336HSPaLU2pNwNsTIVEmXE6ZEw+HKFPMPpY/qjEzKmMFoMrc3+QcJKU0QUH92bsre3R3OjxtGJqIWbTEV2pVYv4bvCsFBVhYXTIVHE8xUytNtfcGn3OhtrIqraaIx49mQfO1cgls2OFRIyuZjH98RlWWlV6bVdyhmdD771m0KuJ0PWN7ZI0xlPn4q5V4tvcXx6zGAkdNJ4NGc8cVhMIlRZCH/lZpWvvLOLYU/5x//4vwDgb3z7v+Ply30sSYSma4Ci4zout98SUb8ktjjc7zLodyGQdN7VlFQf4bqy/keHRFFR0zyJIYzQFBcdhSTRiWQ2AlUnDqNlcXeSpGiqQxzGSP8ATVcEFbtiUa3JJqlUMUwNL1jI52mQqgTunHlP/G94csJnP/kpWbuIIVnicvkKq81NdrZExnRza4PV1XVuXlulWpXNOTM6cRIynU8YSydsOh1z1j5esvDFUUoQBIKYJBX3RZjEqIqGrpvUq2Jd7IxJvmCRy4l9z9kWmUyGXCFPoSA+s6NHS/p3EHTtSZwSBBELacz5bsBk3mc+69M+F4avs/CJomSZNdIMFVVVyGayS/bHbDZLrpDHNE0uyOGjKCKKEhzZFHrhjggCXzBK/j+cQBQFNBVDyr5pmui6TiqdSVPPiO/EKYH0NhU3wVn4BEF/6VBevNsFIYpwviwKhQL1VRGQ3NzZxjJNNNVYfi8IIjxngeeINXBdl9APGLU7dPal8R8nZCwD3TbJZ8U9Wq3XKRSLmDKjmK/kMDIatdUq65JcJZezmU2GOHP4h//tPxRr7IU47pz5VAjeYNhFTSNKuYhLm8K4Onil4jk+jutiycCCpphksytIziuCMCZUXlGultFlLWohp1OIm3z+4BP2D0QwbjAesblzhWJF3L2zecB8MSBjJCB36+d3j9B0izBJMaWOVVSTWNEIAnGvnp74ZEwbJVWJ4i/JKQzFwvPn+Lq8n5QcOc3m5Vjo/a9e/bvEXpusUll+ZzYIubF3jUFfOHK6mcEwfNrHR9QkBbil5Ei0lMGwz3lX6JdioYClNHn7KyKr0h2/QFey9I8dGk3x+wOvi1HLc9ZZsLktslknpy9pFta58LzPH/6Ik7MMW60rXL4mlNDqnonnbxCGFZ6dyM+9MhnOnxDIbMHO6htM3af0z6e0VgSTnK1qhPorPvrmO3zx8QWx1x6Njd/A9cRmffbslDjQWNtw+fxTYRd98MFfI5dfIUk6TGdiHUhVpjOVrDzHC+eY4WDAzs4lKiWxLtGVBJQBcdpEyUriq6zKy+O7XL8s9vj50y6xn6HRKi2bsjcbGywWc3Qz5MEDYS9+7f3vsn/wCl0R99PqpkL7zKFSzTEeid/ud3vkcgU03SOM5V27sDhr3yFvi2C1YnRRkga5UrJkro3CFvOZw+blJoEkb0gJcByNsiQRe/H8iGp5jdmsgyfraIejLjs7O8y9I847E7nvcHT6kFJJzNPfz2ImOaqVIu2uCIDaORtDz6IbKn1Z090/9thZzWHLxs2tvV264zHj8QA1FHs69Xze+co7zAJhI4w7Lra1YHUlpCsZExfOhDDs4c9V3rz6tpC9ocNm8zdQfaG7XT/gRfsxRWON1BC2RKT43HnyKe++9eZyzXv9Cev2ZbI5IcNfuVHh7otzUquLK1FF03GG22/vcefzn2GGQq7rLYswHZMizt7oTONr734Vd9TFlTTy62+s8PzZM5z5gta62Jtcq4G3GPLqpdjPQrnBSrZOtZplPhFzrzds7j94SblqgSZ0wnAudd+vGK9rrl6P1+P1eD1ej9fj9Xg9Xo/X4/V4Pf4tDCVN0//vT/3/PLZ2VtI/+G9EitxxUxLVoVRuMZ6KyHk1v0scKZx3B3iyH8964wqGPVxGhLPmFtlsnrtfPMGZCm/65q2rGGqdRv0SDx+LtO71G7s8e/aSXdkVNgg9zk5GZPPKkj7VsFS80McNY5pNkfJ3piMSdYCe7ACC/lbVT8iYNQ4PBOxJN1IUdK7svkUkw73t9oju4vtsNL4OQCFrspiPaLWazCRm/+DwhGIxRxRqeJ7w6OvVXUH/LCNnipowXRygG/llvyMUl2K+SjZT4bwrsi+uYxBEIwYTke3JF6s0apeoVNY5kCny1XqZWJ3RORd/K5pHxqxgGiEnByIyiH6Mbuc5PRpSroioRi5bZD4/o1YRkeXYt9naeIM0iPAl5bhuqqDkmc09NEPWndWqGOk6R13xmdZqAyVSOT/tcvO2WN/OacD6Zp3O4IAbN0QN1P37d/ne935OrSGi8JpqsrpWYb4Y0WiIFHVW3+G8/5B8wSBjiv/NJgFZu0yUyt4bno8bd1C1Bb1j8Zndy+vcvf+IiXOE5w3lb63ipBEbWzL93ffY2apwftahKOmMV5p5ep0FimWQyPqU9tmQldpbGLKp8P7hS8Yjg0JZIVmY8ntFXG+I53kEss7sW9/+iFF/RhSISIuWrTObH3H1xirjiYgSRVFIpdDAmxm4sglkQsibN77OLBB77DhzzFSn203Y3BHZmkFvSOKmtBolUlPsw4vTZ2zubBLORcT7/NTDytaYjg6YiwAYV29bqEmD4XhApydZzeIGjjskXxC6wrZzBE6MoWfxQ1m3Z+VQ4ix+OKKQF+9cqVTIZ5ucygi/gkan0yGMYHNHyHAlv0saZ4jkWupKlXxF57M7f0XGFBEpQ13h6OSYjK0SyRoaUynjBm2yEtoaBBFeELG7eRlfRg/7vSF/8r8/4vf//rv02iK6nM1miRmRxDLqb5roRkrgmty4KeAvf/iH3+P0UMcuWIQXn0NQgxsyKvbbf3uTKDXIEXPrtojUCVrbMru7JU5PZYRt0EDRSrRlZLndPmE8HjPsT5lNBEX+X/9bu+SyRU7OnvHhR+K3nHGDP/jP/3vKJUljmzqQ6oxHLn/wD34HgD/4R9/gL//0IaoWcnog9u9/+h/+L0qlFr6kEoySBZqmkUYKk5Ej11hD1zKouo8meYHjNKVUyuN7yXJd0iQkilI0VcKVtBDPD0liFV2XGDViQEWTsMAwTFEVHU3Rl1mbNIlITEVC81L5OQHJC0OZKYsErbypmGQk9LpcatJa3aJWr1OtCfjL1vY6zWaNUjkr9zOPH8aMJmN6PQHXOO/2GIzGTMYzTiUzXxylKImCJev/cqZJPmezUitTq14wltUolnLkJYzFyBropoYEd8m1U0nJoOnqkrFQU1JRg+VI6unZjMViwWg4ZzQSEffxeIzv+4Thl42UdV3Qz6uyXs3OZSkUCpimiaZ8GfcMowTH94iDixYOMZ7nLXuEJUmCrgtK+gsYaSYrGsSqqsryjlcVwfqZfEn3n0UhimNRxwWiz5mmoGjqEiGhmxo5NbeswbJzOSw7g6prmLZYq1TOIwlmeFLnuc6EfvecQNZgqalOGmnolkNGu2ganqHassnk6jx4JGDylcommhUtoaA5y0RJIQzGF6V2jGcLTs9Pmc2mXCSJojAlCGeYlsjCBUlMqkyJw5B5T8KsXJ/zsx4338rw7tcEmuTk2MLxSjiBuLM102Y8PWNFUfnGd0W24MlDh599HJAUdQxJPx1nbeZjnzf2xPvNnR4//1hj93JpSVs9WiywVQs9q9IZCjvhr3+wzm++l+Gv7gn4lG03WKltEE5H/KN/KBjo/s9/9Y85P33Ctasi+zOYLohCoc//7M/+FQB7V98km80ShC6jodBvrfUq46HL1rZ4txevHmJnq9j2iK5k+KvXy3R6h2TtbfxE/HM0OaNevo4bine70SxhVYqsrrzFs+eyT1loMZ94tIcv+cUDYU/dWr9Oba3KNBHU6Htr20ynWQrlJgVdyN3J+XNApVYFt+fKOagcnjpsbYu2BLff3OCX9x4SxzErDcHoGUUBjx93qNcrFGXrh+nijOlIRZNtAVIMcgWV7lnKxo6Q17OTNjevvsdJZ0StJgQmcBPKKz0Oj0QPpqyxx2iQ5exkxJXrAgmw0qhweHBKs7GFJ7Mt81mA4w5o1ESGzwn66LrPykqek0NHfi9PEM3odMbkC2Lftzd2aayW+PhjkYXrj/a5vPcOk9k51bKAE3qOSrVmoGAym4nz9/z5Y4rFMpbsTTmfO6y1dqmWVul0BQRe02OmU4dsNrPsh+X5CaGvcvW2yMy9fPSEVusGmjnm8b3PxZzW32W6GFG08hg52coiLTGbP2IkSz02W2sMuhNm/RmbG+Kdb9x8g9PTXzIZifMfLs6xcwaHLx+QzYq9una9xhvrNqZu8ennYu5h3iRKIgzZ8uCkd0poFCjmS4zGwr7xvID19RoZs8yZhFRqtkecpiyGAs327fd/m9bKmLvPu2SKYj9ZMbwSAAAgAElEQVSHR2PqGy1myYCMbMUw7i0o5ZtkcuIsTLsZVN2j056hqOIcF5smqW1jaBrBXLYPiWxaW/DyqcjwZa01bAN2Ll2h15fohHhAvxfTWt3AkRl8XUv4z/7OP/ksTdN3+X8ZvxawwDgJ6Q6FI7Xe3GGxiOkeedQkVEHPpJiaTtLvLGsXgmRO92hOviCK0DNmncV8zBtv3MaRtMDOfEysRdhreT56//cA+NGPv4+drWNISM54NsCLhqxWijRqAhaUsep8du9H5PKCdABgpdkgjkRDQABF6+KMM4y6LuubEq51Pufalbf50c/+D67dEIba3rU91sPvLvurdM5PKJczTIc+IWJz4zjC932yVpVCdlOuSQpaSBSJ56fEqEmT2dijUrtoVBdiagGHR5+SBOJCs+wEzwFbUoK2Dz0eP/oh65uXWMheMf1eDbSE4UDWA6kJGWvGpd0NTs+EMIVpm3e++iZnacSLR0IB1+oOxUKOYk7CvCo1omBOtmxy8FgItGUWWVkp0FhZodsTxuOrV0NWq3nek7TrL1/sc3L6OTp5dE1ADtc2i5x2T8nnSzyVhCRBoGMXFCp14dwV8nWmi1ccHJ0ThRI7vXFOEE+59cZ3+OJToXzWN1qcnD3DCyUFaJCl1izw+IHDmtBrPH5xh8/vPOb2Wztosknij/78gHc/3COfEcbO4/PndE/b7OysUqiJ/706bhOTUsmV6HfE/uXzayTKCGcqG7laNT74nV1+/IP7hI40HgMFf7JCfdWlsSeU+csnQzx3wPvvfQOAe88/xzLz7D8/wZaY/cU85Hz/hI3WFrWGuMTz2W1eHOyj6UI2MnoOo1Cn1YpxF8KYRFV41X6AYm6jy2bcrW2bNNSQJXs0axliY8R4ErB+STgpkV9nNO5ysH+MLvvoTEYvwRrgyxpIc6qSzSuk2hkSYUSx0uLJwzPsXEJWEgQcnR4wnz2lXhcXjqlr7OyuYCqyBwEwmXSYLaZLKBH6K7xwi5vXfoPhVBCpTAc+9eoOhn3G0UuhuBPNZn1jD1Uak6Zp4rkh2VxKZ18o6Uu7G8AjNDVLQRLFjCbHpGlKxhbn0QtUwrmKZSXcfyjghNmCjqoVAA3kGuNmSAxwZI+bXmfKQe8V71x/m09/KeAMb7/f4MWrO5z0Ddaq35ayscr54IQoFvP2fZ+UmOGow5Zkg6+UG9x/8EvqtSb37oh3fvMrMzbWNxmOhMOg6waKqmKaOn/6r4Sx893fvcl4MmB9Y4WNbbkOVhFNM5aOsB8pBG6A43j8/f9SFsebfe7eOcCdVEmko9/v9hh2hySykLvnnmAbBTQ9JSPh2UQWmYxBnHgkkipY1VSiKF428DVNAVFL8JeU8SgJhDpRHC/lRVNVilkL7aJnl2oAKoIDR+5NdMzTgxeEzyJCSY6jYECiLiF4diZHpVZndW1teWaiJKFYqLK2us7OVWEgJGmEqoqeggCzyRR37nJ42uazO0LfuCFYuoZhXtA+m5TKOUqFPAVbnG3byqBlTQxDw5a1UqalkTHMZc+uXDZHPVeiuRGhKrIGSjPQFJ0giJjJ2pux7PvVlw7gcDjk6OhA9OCSS55EKapmYNoZCmWh82zbplAt05TP03VjCfmTPhL+Ys586hAEwXKt0FSiKPpy7WybIJMjn8+TN2TvuyQhigMBsZRGi++4zPw5QVs23Q3DpeOVkZC8bFY0fDazGvmsuKMLRZvWxptosl5NUU3iUEfVfWJZ3zSfJ0ymPRbjff7Xf/ZPAEEtrxCRkS0dfFUjny9SKzVoyBqT3uiMKA4olXJYsl2ClsQEQYCuSWIIVcUwoVQq8c5N0cJhY32N3qTHd3/3NvsvxTm6//kTNE3HlzWXSmwyH3TJl7K8+8FXAPjpx3+J72cw4y8bYQd+Suh7bGzKOt6Hc+bzPopaxA8kkYkGJDFR4IGso1vfbXJyeMJHt/4aAP3ZYyIdhpKqHWDiP8WNPB4/EzBkK1vnyt4bHL28z56ElY/n55j5PHMHCrKxqW3pZBo12lIHXt2+wmh8wMTN0JOBaDMbUyjVCd0ZkWz+a0YZCrqBPxdB2lPHYD7v8fj5Pufn4h5dX79FLVPCc/b5/X/v3xXPy9uctB9g+eL5j58+Y3fjtykVHZ4+EaQTZqaGP3NxlSEnp+Id3y9+l27wbxgfic/4kUGz8Sa5bBU9L3Tej75/hytXvsJkdszLQ+G81atbrG1mePJc6MnNjR26nTGu72FowhFtNW363ZRGaYeTE9E/0dQNyoU3UTxZ1hHOSdKIj771IX3ZeHc0CLh14z3Ou6+WjoypblKtrFNvCjk/76ZMxyE3rm/wxSc/ACBjNVnMQ/KZFrubwqmdu2Me3D+UwRlYqV0lDBJ0HV69EnNvrGwynvgMhwNU2cn8xo03GY6PsG1JZJYtcnJyiKWVKOTFPerME4q5mEotw8mRkNmsZrNxo0i3I4KUa2tlomTCeDhlpSrm9JVr32Tv8iZZVcOWcGlTV9h/+AMePBSlHs4sxrWgvnsZ1Rbn/+j0HikW7Y5w0irVTUy1wu1336FzJux2PzS4d65ghOc8PxB7vPpGHifOUDCFTVuo64SqStHWKBry8os99g+O2L2Ro1yWdoGWMO0HVOvi73vPfkzJep/xsI3uCANurZnj8ZMvKDRaeIZ43sibEiYWwWAu184jY9nUtlqkinCI5rOQYDZAVbPYUnfVszn82YzVDeFLjI5jclWbybRLhNiXQT9gOktoNl1Oz8SZXF+T7/Arxq+Fc5WkMSqiSDNfrJKzG2Qyc7xQGOyd42OgSCFfR03F5W9aI1obG/TkwVBmU4yMR6W0zUZTKLvFYkJKFiOX5fO7ImKh5CZUm2v0B0K5X7/1Aab5BW5wxEhmvJJoTH2lQoqPqYvnReGcyXRIuSQZf176bG/XKOTsZaFxvbjF/qs2H37wu0xnwihrn/Uw9SrFsriY9nZuMZ9PKZdqpKpsSJwoTCcDNle3GZyLz7nxE47PDri8J3CnqiayEX40J5H2s2XZLIIRzlxnPhPr4HoOKDqVshDoMBkwHk/QrRNcaUiduseUSgWGYzFH3/cp5LL0R68oSnY7M9jk7p1T4jBDKqPNCzcgUSPuPBRRq2vXL5PVtzh51CejC6XVqNcJkh6vDhcMe+Jduv1DMu+WSKRh+stffMr21Yh68QaqLDh/uf+U87bD2trasjar2+tRKa1yeiwzXq0supVnY2ODmuyYvvDbWGadF89OaXeFok6YUanXGPRl3Vv/Dtlqk1QN2T8UF8zl62vo9m0yepaVunjnxSimkG1x+Eys080b24yHI9RY4+5PhNLKFvKkRgfXiQhdcanvXLvMZHZGtSGex7nN83sjKhULD/EugeczGs5ZX9sglI2hZ9Mul3ff5fBQREwySUQ8X5AtGrSq4qAfzA6p1ooUCgXOJHtOq3lEvppnfCpksb7SQlPyjIZPcV1xZhSjyMbaFU5PR8QIua4uVBrVDZ48Fs5AnM65ees2aqjjOOJ7R5M2Tjwhrfk4FwxJ5Qn5ooG7L9SF4yXM5wbVZgbfF+cIdYxiTzjp9nFk80Hf98lnLdpn4n1V1efq3k389EsSmIQAOxdj54UcZDObvNw/4FrpPVxZk5DLFIhDn9kgRV8SFMBo0mM6EHOsNEJq1RazWZ9SSRh+gSzSXWkUSSUCejCJydmbLOT7Fgo5AjdiNpujyDosM6uSqiFR7BJLFaloJqYVMJXFsqdHLvlKFS9USX3hOP3sp6/YaF1iOvFoVsW8nHAKSoJ7ERU3QrxBiqKk3LglLtCHD+9SqZRYW28xHstMSxzzzW/f5p/90z+We5wj8i2KRZ2XL0XQ4t/80VNuvqPy/MUjcpY4754D5WqCI9nm0sRAVRMsy+L2hwKP3/Uecvn9y1xufoPxqXjerctv8MXDe3z+8L5Yc/Ov8Rd/8udUKiu8eiXOzHmnx6IXoqsGhmRIS1QF09LJ5YRjk6YhpqWRpgmqvLw8N8FQXRRU0kQ2uY1VVE0j8IWOUHBRVZXEVC7KvlAUAytjkc8py15ipBEpMbqWWcpYt/eMs/PHS6M3UVRIVZJEXZJcZKwchWKVfFkYKOVKjWqtTK2+xu5lcWGnpo6hasvnL1yPyXjGSX9C4Mo+PjGoiJquC4KHrJ3BsgwyWfGsXM7CzhjESrokwjBNC10VhBNZWUin6zqFcpFsUTgRzY2WzG6FXzbG9QOmcwfHcXCn4qyNuxN8z1uSZWiahmXZ5HI57Iw8D9UctUqVXC6Hboq9UlXhXF0E2VzXZbqYMBn28GWfG1O3sE0Ly7Io5MW8MqZFvppZZiJRFTRNIwzDZRYuSRIWgcdikdKXhA9xHKFrNpbsTZMvWDQbLdIUKrJhaHWtgNcocevWLX7xsai//aM/+iMqtVU86aBkVR/HOyEeTNiXTsNHH36d3a1r+MGczU0h+3o2y/rKLnZGyEGpUKBYrbCxs0sqncFFOECJE0ZDh40NYSf8R7//dWbOiH5f/PZoMGE0anJ2esz3fiwMNz+tMPM6WJOEJBB6RTdsvIXL/rH47Siu8NG314nDZFmDmBCxcCJUI8KW8nky6qDFB+TnYn0XxyblSwFbF0QOwP7TIeX8Zeyy2OPGmsaLl79AjUyytnBeMcY0WpcoV0JC6ZhVV8p8/P27JJ5kHd59Gyu3xuD0lEuSXbIzuM90pHNt7xoq4jxE3oTx7Mmyye9PP/lzbu5ex1A1VmpChj3nEEt9m0trTXTZN/Tw9IBsVqdzIOa5vfYGvjPj1cNT4kjWG1dLrJYr3H8costMZyc6Y2/7A4o58fyf//JTtvrbVCoVVFPohPrKOmedB0znU2plWbuERhimZHNiTs+fnrDSKFNYKxFIeVFUE8MeMxifYpvCiWg0BBrjgizjyYsvKBVX6XTPmY0vzsOUldoWq41LlMqS7OTwGF1fwXfE33EUsLpylXr2Pf72790ScpAZ8+Mf/5BO7ynPnglW2L3rb/K1D7/KD394IP7efo+j9s+wrCx2XpL4jI4IY403b36N8UwEtbujAzKGucyK97ohcQznnVN0TeibQqFEmiTMRgk3rgn7cDxtEwZQlOt7cu7Tapj4+Ciynup8+AnzOz/m8HBMJZWEQJpFqhdZ2RDrqzsx7vmQpn0fTRdO2UQvUiw1Kdji7rv/6JSVvEqiVdja2AHAymmcdIZcXa9z+y1xPh4cfMLYtfngffHbZTVHb9ShWrtGJO3joxdnlEp5xlMHJxR6o6S3aFYz+IYMHns63aCNE8MlybmQKepcv7bg8HCOmRfop0u7NpNpiBFLBNPijEy+QMoITUI0Ll99C5uIq1s3mUm4TuBNePLyLrmssANbb9lMJx0ePHrO1Rs35R6n+P1TDvfHqJL1s38mU8G/YvxaOFeaqi2LyU9Oz/D9kPpqiVQezrxdp1DIo5sRw74wcs/OHFS1S146u+edu1RrWYJohYwqFnLmzTg7v4MbtGk1haBc37jO06dPyUoDfjzPomgaRHUcTxzq6ajL6sp1kjRgJov4q82QnJmjLJudLsodMhlIIjAUyfaUhUyuwMLtMZBMK1vrX2Gx6DHoCQOztbLKxnqd/ReP0DRZ1Js1yapXMBUXTReXuKmo3L59e0mHNRr4hPGYaqlFriQu3sePn9NsrBMmI5KLMumkShg5nLdFRKG5niPx94hDuNQSUdzHj17gmwGthhDUuTMgcIoshiofviMgeXc+f0E+n2Fto8DZoXienhkRe6v0RiLSe+/eEVqScmlXx3HFxds+yXLSeUkmD0kqDnqxlKPfP+Qvfiwb1W6bVKs7HJ8e/t/svcezZMmV5ve7Mu6NG1o9rVNnVWVJVAENFNBodE8LTjfbyCGtzWjGFc34R9CMO5rxj6BY0DikcYYzPc0eNlqgUQAKVSiUSC2e1i+0jrhxNRfuL8DF9G4WWKTvMjMy4l7348ePn/Od76M5FNA2L+hTyr/Ly5e7c9jK5uYW/b4/h/IoRJweDEjnA4YDASHxXJ1iweLFs6+wHLF+pXjCq4MzchlxAasurWKoBeKkRbEsqqGv9k5IlAFxqoiJmJc33i4zi1U0KZp4el6n4KxyY7NEsSCCcXea0G5ugDFgFopsSKPepVmPWNsSzx0zI50tU3J0sjvisHr5tIWuDzk9GnLjpnDw2zurPHz8BUVZFXvj3Ts0LhqMpzPOz4Wdx7GKO/ZoeFcYiXAi52cNDGvGRBZVup0+2WKAmqioilyH0yaTXp+llWWaV5IsxnVIpkOyWWHDmr3A/vEJqYrFRFYiXp720FI+kWcQRyLwLuYW6VwkBBPh8G1niutG0AErJZIix0cXOFmFDz96h15T0l+PsySxxlhS+TqZPLt7bXxvOA+EKwt5rLTJoYSs5gsxdsbgq0c/5dkjkUVd3cihJgqQp1ASDrDXneLoC1hZyarYCVH1DhfHF5Qq4tLiumIdX704nZPOLNZWqJQXOJXJbT/osbi8xNnZGF1eGJz0Aip9FGxMybAX6x6hH8wp5L0oppjSODrepeSI35v6HqNByNryW6RNYVOtUZ/xdEYgaXQ1xSGKRywvZefEDePBjMk45vTkgs0tyUCV3uDNdy+x/qWkp41mqEaA71tzsppPf/YEK7vK1vYNTONaON1mMopR5CU0iqfoqk0Yunz9mWDcWlq/zac/eYbyYZ7ZQKxVNI64bD7n5pbIQH/w4U1+8MM8i5V1dvfEwX/V6FAqrbD79IjzU5F9Pbyc0W0POdgTyR3TyNJtjQjCGENerux0hkATMMFrMXVFTVCUEE020KuKCagkoTen1lZVhSRKCGJlHsQnCSiJxjQW/lTXdTLZPPAbQglFUWTVTEWVJBcJEaPBxRxGfnAY4/shJCqODD4MRVRfykVJbFSqkM5n2VhZwJaU8alUCgKPmRvgTkWAMpl6DEdT6m0xl2HoEyUxacOZX+4cx8ZOm2h6jKqJ9dMNBV1L5jBvVdeIASVJfgPliyMMS6fg5ClKgp4kDuW/S+hnGOK6LrOZz0gG9b3jNkmSCMFl7ZqGXFTXrimPHcdhbXkFXTfmsMBEzrXneVzTBk1nU5RRf+6XwzgmJYO46wqW4zg4dpr0QgndkOeFqmPo9m+e03eZTHtEocaoKwLMlKPj+RGJqxBL0eBoEmIUwZesalpikdZT2FaaVlskXO69/S7/3X//P/Do6VeMXGELw86Y6aTPVV8ETYfHDYgVvPATkZ0E0k6WlJOjUDBJ2WJedNNANwoUKyKALlUrzGYr3Ll3n2ZdfPdKLU/pD2cM+yPGnvj+yJ/QaUwIIjEX9+7fZ+SP0RUT15VJJy0kY6SJ9YBHn4jM/1J+ATQLDCkifMOgP22zsHab6zHqHrK5skEmK3z3+cEBS7UNTKPGUF70Yy+gcd7Hzo6YSPIPf5zlrXc+4NVzkUyut9rYRoCj9qjl3gRg3LnAzvs06ofcuinkYByrx+pGhuN9YQf/7V/8KVGzTnc6IpHQ4fbgCtX2SbyErx/9FIDq2hLdVoHioljz1dUap+ceerZKXsZFhhEzGHbI5SfoUjz+6PgpevlDthaEP59557Q6NguLKyATkiOvgZ02KRTW6fYG8nMq+eIiGUcyUtYsVpfuEXIAkVg/z3OxU2W63jm7z48B+P0/2MSwevSGoiqmGT6j6T7LtTfmsNVsNk+jcUUcx2QkwVroJdhFh1u3RbKz2DZxpwP29vbYXBZosJ0bt1j4s032D58wc8X+mwYx036PiiPmvFLJ0GwXCEOfyUSc9zs7G+wdf81l/WCepAzCGZl0jWZLJFxrtTUyMwVTzXIlBYnjKCCih5XKcXwoIXixh6qYpHMiJslYWZaKd/mdD+7wf/3rvxLrEBg0uMTzylxKsiolnBLrGrtTiRLZu+A//+hbuG6dXVdA1JeWLY6Ofsm4J/Z6rjglSek0W2cYuiQa6emUSgXaox5HUlC6UnqfzVp2Lu9xdHhCtXCTvZPnzPpir9+7fZeXu4/YrN7GkRfoi6MBm3dKXJ2L7x4rMboJO6srNMaS+MMo0T5o0xpPeGNBxHREFsF4xtKSrHiPTfQwIYhdDg9EtalfHxInFXZ3X5HIZJEbWCwULTSJMjptn1AtpCmlDdyhFE6vqyxWV3H9IyYjYeuW/Zv9+h8arwktXo/X4/V4PV6P1+P1eD1ej9fj9Xg9/iOM34rKFYmKlRY32byWYzKGy3qLQl5k2LJmHjXUuGr3CNVjAEyrSm0xz+mRuCW/cf8B/iTFF1/9I9/7HdFboKkxhgnvf/tbPP1GZCxevNjFNCPaHXFT73SvsDMB7sihkBbVs0rN4ezsgBs7m4zkDdsdWlRKZUY9KVRbdlit3ufstImXiKpGnAxx8mlaFyElmWFPWTPanSHVirhNz2ZNJq7D9vYNen2RFTs+3UWhTyq1xqAn/i7UfIZTV9AjA461zq0bS+zt7eGHIoNw99a3OTo6oNOOycrSvdsf4NgVCgXZg6UuofIcJUnmpAzvvH2DRIsZDEQ2yLA8hhjcu32X3kBkoIfDOm9vvYs3aZEkokSyVFvl88++5tZd8W47Ww84PDhl5/YqWiyyvb2Wx2S8g57uE0nRyxQbxMk+fpCW3z2l0ThGM0ISOZ/d1pR88XOiaIg3ltBEzWIwuCKTSUtDGZPJZHCsLGubwjY+/flzrmaHuF4XOyvKmPVWnfOTKcWaWJdIneC5O1xdzuh0RBY+ky8wmQ5ZKd3mqiGyGpGXJ5UGNxJZne2Nt6kWN0hmPkgB5FQxxu8mlMwtSkWpdxCUyGVjZr7Iqlxc9ilXDCrGGkPZdJ7NOiyurZIz7tFsiKzt6VmfXM6h3RZ/PtxrUyrkOT8dUl0R89KfDug2VRK6GKrIsKdTKv5kSior6W938swGMbo9Ii21d5wZhEGO89aAUlk0nZrKhFFnRG1NZMAbrTbBTMeJ01xI+YK3Nx4w8Pqsri/x+KGAiAWDFKbpky5JMgJPI+PkCIMRkSQ2GA7HjPp5lHhIck1HG5oM+jNyefHcvSHk8wq6rpOTVMz1Vh3fC/FDkXm18hEZM8NgMGB9R3wmCmISstjpiGfPRUYvm1vEsHz8UKxxqEzodiJu3rtFuy2qYEEgsoHVchEvEOsXByUaV2dkZHN+NnuTvVenZNJpLiWH863KW2TTAaPpBMsUWcYZESYO0fW7BTq2btNzh6Srwl51p8FsPKBYKNMbCdtwgwF+5BLJfgvP81HUPvcfVIilmHSpXGBpaYdmfcIn/yia3L+wHvLR926xvC6ya+2rCuhNVJV5f9qzJ03+5M8/IJtP0zwX3z9xpzh5kygSe0bRRI+N7/vcuSlkJVKpCfe3H2BqJRZvCHs5Pb+iurFJoyEyoV//skk2bTG4vKS2LDKDy0ur1JtnfPjtGr/zbVEVbod9SEzKeWFjD79+SRhoDHo+p8diDi7O2wSxzsH+KR25NqqWIiGaV2hsO4Wm6FiWhSKrWZouqkCKmoCEJieJQhQlKOq1rlZIFHlAjKpd6w2qxGGCrpt4skKqoKEaGSypd2SoGqoiBOyve2Hi2GM2crnsC1s53X2F53lEJPPeCcMwsDIL5HIZSnLd8+UiCyulOZxZM1IkCsST2ZzkYjx16Q3GeF4wt/XrapuuXcPMDEzTxLJNHFkZskwNxVBQE1BikWFXEkCJ59TzKBGWbZB2UvNqoYqA6qmqOte1CsMQz/PpD8T7tTt1fE98RxRf98wJSJKua+TyEoJjWagZG8eSsgSK6DvyvNm8mjWaTRiMh0RXTRJZ89I1G83w59WtSmkNO5PCylrkcsJ/J2gEcYBqJXzwkYBZ/fVf/xV+5JKoUoYkmQIp3NmUcl7Y6z/+3d+yuXaL7nCAnRHfpaNh5VRMKZeSXXLQjYTYi0Bq6HizCd6wx8nBiNCTFPWJhxeAKYlbFMUjRRHf9HFkBaOayeKmHaqFNDNN9Mwq/hjlvkIkIc7T0ZAEk5gJjiN7WtIhq9VF9o72uJDUzZryJt1Rg5msuJkZlSJp+hcSigCslVZJp3xcSTSkeBati0s2b9RotkT1ruDU2Nks8fnnL+c0/XGmi5XYtCcCOqz3DdR4xPryXS4boq8mny3Q742opheYdkV1oNdqsphZ5DtviX28ePt9BoUG31v/iETCIH/19BdsrK/wxU+7NGzhW9NOjvPJPht5ETs1L1uUCnm8sMCwK/Z/plhhMO6BusKG7JnTNZ9UOs+TXQFRX6guUalm6fSvyMvWi0brlJXVCkqySDYvKxb+IkmszmF63jQmikY8e/WQ+zdFn2unc0Ucd4mjhFRWzOnx+VOKxTxHxyLuU5WIjF0imE2I5L5CMXFnbTIZk70DsVaVSgHLsji/FHPX703QlCwXF39DhIA//M3fjkkCm9WNNGvrYt0tRSOl2CiqoPt/+XhIsVbl1dEvCWUcdHU5Y3mlxuHeS9bWNsVcKRUGozPGEzHnQTgmZVTpzg5Iy17Gfr+PaSVsLd+i2ZDSOUGXSqlMty/8TSmt0222yDsVqo5se3ATckv3MKMhi44gvhiPTFS0eS9s536Nx+kU/uA2M4SfaDw5JG+tsnNbwMrP90+IC2OSMCGUtORhssfpSY5cPk2uIj5XW87TGe3i9sQeSikVlLiHN5yxvih6GY9Ov6FWWSFva0wlVNCb1dl/NsaVfZm56iKPvtgnmykwkLaY+Fe0Axc7bdCSbRaGZ6AYA06PpbZZoDNUxihWQkZqkhlOxHR2SSOYEoyEz8tZq0zJcNIWd4mdpW38+oiSvsbpgbDh2mKGTMHgybMrdnYkHNT+jSTQf2j8VlyuzJRJuyEeVDOq5ByFwE0RuqIkd9w/Z+oNSGYG731LlLH7nRndy11mXVnaG05xRyr3b73P6b7AQn747fe4vGxy9GpEXqpU2+ksk5GG6YiAoVzL4o41ploX25G6LHWfXKZIEHZYkMK3YVhhf/+KYlkcOHpS4bJ7RGs4xpDCpqbh0PUaGIEAACAASURBVG43MW197uyavQtWb9hMh+I5M45KqzWmr5ooutjUhfxdnGxEtzPBLl5vhCl6CHZGXu5mHRx7jbt3HjAat+W7pAh9g50b69y+IWCPu68OefDgTdSUeJeHT16ypG6ScTQSeRAGYYBpt7jaFYZjpSFtpeh1L1laFoFbdSVHqJ3TvIhZXhUYVs+d8vHH79OQulCnV6fYZZiOQqJAGLSXDFi/ncY01zk7FWXs3vgVZpLFkAKX/f4F5WqGKFRRZTNiGJio+gzDzPDtD34EwL/8P/41GztpIkOsp2lvsGCuMQ1P6I7F+y1tVFkpVvj5r59wLB3izlt3MdIa9Ya4KG7cdLga1Mms2rx3R8Ae958e0RpecXnxjMgT0cfGzjJPX12xuigCx4zhcHGwR3NwgiObLbVYRbeOSWdKJJqACvSn56Ts3JwBZ7GyjTeLOT1uUV4UxCLjgc9i8Qa10jITKdi5s75Grz/EqEl4SuwzGHo4uRR9SZahp20SdYQ3SmHLICXSdQICkEKc9Xqd8TDh3v0dNBmolRdyuH6DxJ/iToXN9mc+ZrrHgmTqsm2bMJpwVu9gGeKdC9kCmuaybLiYS5KEZTyhHmcYNIS9GnZCSvUYdsYoiwJGli+UGLcs6s0RqbTsKSFEwSBKxD5OYg9vrBPGCu2BuNBWiktY2YTOWFxoR9MUx+fHJMqUvC2+ezgNiEIPXS9y+464IEynMyHgLSPHXH6Jna01grGOqQtbXF0TB1257Mxhs5cXI9JmiUpxE4BHjz8ll83hzzTKBcnM58eYKR/Vi4lU8ex6ohORcM0NEM5iFNVE17JzIdxEVxj3E6IkR68nDgZ/OmbsuYwky2HgQTAbMZ4OQRF7rVzJM/NGVBc1CqfiGb74bI+7b21y5y1hY/9+7yXVWgE/mJLIJk8VhRePOnzv4xuc7grbG40GpDMbMiAFNVEJoimOU2QSSwaq2RXvfu87vHzaYTYUe7k7O2LJ+R4rC8JvLRQz+KHL3tE5uuwfO32xT6Vq0G0FRFLM3dUGNLtXrK9K6Kka8db79zCMgNlEMLKqiokfR7RbE6yU+K5PPvmEdnMCsfjzwe4Zqgq9/oSJ7NkbDSeYegrdsOc9SClLQ9MSTFP4RdtIoSgKs9kMebciISBBiAZrUgSaJIUamyjab9gKE5iL7wKomoaqmKQkpDIIItJaCj+YEkfXpAwR3uCUq4nHxaHsa4t0PGI02bOXcyqkciaWnaFcFv48U8xSLjvYmYKEQAoiijCA6eyaoGjGcDDm/KxNJMmOFEX0NymKQtqS/Vu2hW3bOJJQI52y0UgwdZ3omglDtzFSHlE4JUquL50aKd3EkeLOJCqmps8hunCthRUzGY2ZyB6vdqOP54dCyFgOVRVsgtd9Z45jY1kprHQWJdHk+6moanouiNztNaAn+uQUKXKvmiliJSCXc+bweiNtkiQKioQqK7pBEE0wDBVN9o9d1btMp1PGkz5ndeFL4qmO63nMZH8jcYJpWqK3TUIhLdvGyqvYjk1KQj3tdAnD0OZ9LmEYEwQRxkxh1JOwoKBPHE8Yj/vcvCWCKyO7hBdOUCUAqFap4kqfbEobIgn4yU9+yr/7t3/JR28LZrwbtxb5+qtnqJHwy4aSpt7psZxrzed3/d7HNE4fY0o9ueJWjqOnHZxRne1Fsa+GrUt+9bMDgiQilRGf0/QMvSuDTFGc2TMzoqDWGHVcqmUBY3KNDie9GUZmhuGIOV9ZWOfTh19xFUqY9bOHREOo5b7B8cQF7HD/GfsbH6IvReiI89DwLd6+9TaNpvCB5dIaz57s88F33iLhWjNzRDqtoqsqliKe8/b6HeIwZjAQv//t9/4T9PSAw6MT0hnhA8vlMsFEwXJ81stCS/DZy1+DmcORfuTps19w5+5b/Mkf/Df89Kd/C0Cp7OCHA8LYYElC/s8vd5n4q5RkEmjof8M0VumfDFgoiDN61G5TLuYI/RHbmxKeXSpyePiIDfn7+ew5drZKs5fj+ELMy+b9DRbXbOoXTR4+FwlJM5UhVgNiVfjcybjN1q3fxUhO8BRx4Zr5A/r7JZZqa1SlEH2zPmU0bKIokiVaCYn8Ga5nsSoT9GuVHLc3P+b5k4eYCD9/ctVkZXmbtZJY98O9DgsLWZ4eHhLLWDcOdEq5Erv7Z/Qk89/m6ionBxcUKmLd+60Lhv2IJAkpl8QlybbLxGqK6UDs9Tg1odMySGc1saeBIIyoFgsYtk7OEXOsGgGDrsW6TIQP+iOOT3ZJJnA6/VK8y723CHsD2u0GmiL29rh3hZpfIZMS7zJsdkhlIFTGWJLASg8d8hkfd6yxlpUFiKrFZNrn5TfiXcqrGdxBF9UtMfFkz7y1hDfxMXUdR/a6jkcDnJHJj74j1jidrfH00f9Lszlg8a6w82y6jB9E3Nh5l7ffEfv4i5+J/rp/avxWXK68WcRVXbz8jbs2mWyB3mDI+ZkUMU17qGhs39qZM9cYahbHrmBvCWeWxCbd/gVWNo0nnevjx8+olBe4uHrF+sqm+K2pQbWaRlfFoffk5UP++A/+lF5nxOMnQuDuzdvfJp2xefjka3RDGEalkGZtdR3Zn8x47DMYDrFMh9FE9mWtrDDom+QKylyYzh0bHAxcBgMRxPS7MW8/eIdQaxCJfUEcQ6GwTJJMyebFYlqpBUq1iEcPxUasVldIYoPziyfEkTAw21zgz/7s93n0+Bu+fCgW+s69FZrtCT33FwAsLC5w8OKYpGbxxt2PAPjx3/97VjfzbEs6+nrziM1tm0EzQ/1KihGmC6xtO0wmI4ZTsTarKxV67RZLUp09n9ng8nLA3tE+lZJwiGEYknYMOr0GhinW4eatNM8eNbhxT6qJO8toRkCr1USXwqbL6xlS6iaXzaeMfZHNipUuhWKVhGuxzBKG7TIcnPHJT4UdLG/cRIv7eGGbgqS/bba6xH4GLRBOpf5ygpYPWd9apF0XNnW0f8Tb730X3+sRTsV8ttuXbN3RGTWv2bssgjhkefkmVk5sWN/1WFv8LtnsIr/89HMA1jdvcXp6QsYSvVTFYpF6a4iBwlVDONve1RhFMThsfMV1D0LsNRl0XOTrEZQ0DCeg3W7P+5TSOY1ZEGCkI0iEQxyNrkibeUYdyRCn6jiWzu7TExYXRdb27t1toshg7NaJZjKYS1l4bpqTK2GLgWtiGnlIonmPzuHhMRk7zZdXV0w9cQn0kzyzWMgPAKTSWS5PGmRzFUHoAgy7JkurOuncEodnIqPn+TMKRRtP0vvqeoWJ10dVdQxTOLYoMhhNPDxX/H7dbeB6U2wzjSVJS4ZJHdvSWF9b5tlL0Ws3mkxZWl6cZ8WnozFXV1eYhsPSirBP1RDzfHbW4O49kVnVVhR63SmRZP28cesGidrj6GhCRjZw1mo62UOVRjcgbYgLWpxE6EaCKvsBppMRk7HHjRtbLCwsyPd1OT0c0+3OSGJx2VGjGWPPw/OF/bSbDbZ2svzoD7d59ljstW7/nO9+9Gd88/Wv+NZ3NgGoLmRxUnn+6I9EMulnP94nCEKIU/MLgma4PH18TBT9gI0tMVfptEUcxySI4DUMQ2zbZtSdMJkKP1UoG+zuviBXUZDkeWTTawx6HpORmLNiqc9oGkPiMWiKZ885Bbq9fSx9jX4oeiV1JU8QzWjIHoGVlTV+9fnX2Gl9TnFer9fRTZ1bN94llBWS+w8qpLMlVuRa5dK/R/2qw3g8ZfdYBMvd7pTZMIfb8Wk1xHwenJwy8Qy6dVnFYUZCgGVrkIjD37Is7DQEwQzTkBU8VSdOvHmlTMchiAMgQIll0EAIygwvFBsyjnzCSATbqnpNEAK6rRMlNlZOSnfoKj4xcz1mhvQmEfrwnPqlrJgECWYqTRwJIiKxVmkyaYdqVaxdNp9jqVRgY3kBW9KQx4rKzPOYejNGA+GHZ35Iq9nndCoz7hGoSoKVTmFI1j/DsrDTKpm0TSZ9XfFOoWohiir70JKEKELQvF8rG6Oi6zqpfA5H3lYrmoqqBPMqVZIosl9Nw5PEKX4QMOr08U9OkYAFYiVG0VQU2YifzmTJF3Ok7AxFWXVP1BhVcwjDhEJBUq+ns7iuP2d/jOMYTTVIkmjes9fvTRhNhqwsr1FD0rrrBq43I5T0/lEY4k19xsMh05kkmLq6JDhOoekgc30YKYuU6cy/O52xsdMpirk0zsq1oHyEZljESUIcy0RJFKLEBoqkHHe9GaZuYJgapyfChv/q3/4bzo6OScIQpyx6ZiPDx9ArICtn4bCLFw84H3vXi8D50a9Zqr5DT1YZj3tjFtaq+G6XjlqT6x6Szpd559Z77F6Ifspev8X3v/UDXogCDbOozWLGodeZomZEoH+4e8zaVoHzkzopU8RBg96EUlWncSjiDVNbJl3t8/nDX3J3XbDZ/uAv/gUX+7/m8aM+b729CYBuWbx80cApivXsj7qkMnBwsMelFK9Nm2XeePM2F+ft+f4bDsfYdvAbYePeJcOTLulMhoLsCVajNIZiEcQdXu6Kc9TOqkwjl6ms6P3ugz9BH7m8OmnhjcT8ebZKqXSfzuAUdyzWoVwo4eMylILP5dwqZ8c9yqVVVFX4Fj/qsrx2j5fPvsCbXCcEdaYDUBfExffqIqRcUFhfus3jp+IsMowVguklg6sxO+siyd2bXqLpWRRT+J/Dk5/w1ddF3n3nh5yci6Saao25qh8ShwZPHgqm2p2dHRYX3+fZS8FyWMreoj9osF26w1VdzOf3fuePyS68yb/49u/x9S/+bwBaV0MOGg3KlljPSiVHLlvm7OyMQlH2IJkjdM0miRzWq1Iwe3pOffiSquwf+09//8/wgzp///O/JEL2gXkQJSa2ZA90LxJUdYLnuTSlnM/CUo7B4IJ2K823P3xXzGcwwh2maDfF3HXaYwqZNXphnVjagaMukF0w8ZIJvY64rN6894Bmw8OXpBsLi2tcXjQxUwqxdC7lcpnJxCRyu5wdibXqPH3GnY1FLE0yA/aviLwUupOiLG2q645JpdMsVJdBldIdl4/IWw6fff4z8b6xTa9xQnX1jXn8v7d/ygdvf0S3+2sGHWFni+tyH/4T47ficoUCsXrdHOwyGA3pD7q880CUDhuXU7bfWsUwdR49kvAe65x+a8b6+iYAvj+ltpAnkzGZyY1x486GKB+rszkV43CYYNoKk4kw5o2tMg+fPGQwPkNLCaM/PD0gDEPy2eqcwrnTGTIetcnlhPE66RxK4nDVuJozj01G+2yu3yeJx3Nl9bSdZzabsLwksiN5x2AWTBl1JrxxVxjh/sFLvHDMLBjixOJCoBkzjk/3MVOSi9/vcdnoM532WVwSDv/hF+d4Xg/HTnMlKdTv3rpPoWjTaIqFnypwc/su27ct/IkIRLc3t7j3xj3Gcp4K5SGWnsNZ93Asselsc5mj3V1WlrL0h8KYNNXAdSesL4sMWJyEOMUORSPPUMKgarUyL18+YmGpgC3JP0Ivw8qGQTYtaVCHHYbDAWGQEMzExr+5cx8vGNKdzNjfPwbgnXfvMXGn2JZw3Jrpc3p2RHfUYm1DXKQG/RNm2RVu7uxwJeF1b93ZoLiyzdljcZH6/Jsfk1EiTl7FpOTt+I/+5C3a7RT5zCrWspiX43qXctFGk3StgT+ikLfoDvvorsjibG9U2Xt+RKd3SlZCd2Klz3vfvsfJM3Fp8Wcqg8mEYWtMcUVWAlYtmu1LdGdGVRKJDDtTHCeHFwv7CeIisZtjMhojUU+gFEkSmAUtrmkps/Y6mmaQSou90Gy6pJ0xuYJG/Ur8x37v12zerhAFBt2xgIgUjWX03AWGDHCXix8z6s24NIYMB8JJdrodHl/2+OCj72MVxDsnrkfj6jFvvfnHAJiOjz+dsVDd4vRKHHqTWYihlZh6fYiFU7bSEEdZdEkQYqdDes0Jll4iDkXA58djmo0hZaljZOhpdEWnkCmzXBXBiGOHKGGWXm/A6rJ4piCMGQ6H3LkrssjdzpDzi2NKFW1Ot32twTUYzXjxShyylmVi6Bkuz8XpPPRdwnhAyrRIyWrzWcvDLjpo+1NU7VqHKQQ1mjMKTsYhlmUTRxrNhvguM6XiZJbo9WdzcoXQm9IZdDEkg1m9XueHP7pNuzHgzk3RMHxw+JLJpINpmriTa3Y5k3AyYuO2uLB/9NENPvnHfUqVIr6sgqUslcP9K54+umBjW9hUGHlEYYwq7TyKPHw/RNM0VAryu6c8f17n1r08jiWCm9a0wcXF13z4wccAPPrqAt2csFja5uxCZBlX1tYJAxslN8KTGWdDcYgCk5MTcaFWVR1F1fG8gNqCYHGbTGwy+ZD942+oVMU65wpZvKlC4gs/1Rq59DoB+doatiPW6v2tJdyxwf231njySESL927+15wfDgk0sbbHh232XzVpNab4sgJ9enpKuz3FmyVzPbNc3kZBm7NNamYPK5VH1zJzVslIMSER1SkQhBNJLN5pJhkhARJ8FEUFRVbvxjGqoZLIbHMYuNiWhmmU5lXGoqkQxb6gqQ9F4i3ypnRGDc7PBcvpNYNmoipYlgjKUqZFsVilWCxSqEhK45yOvWxjWdcaUxHeLGA8neDJi8VoMKA7Cbh0O/jXMbsGdlrDdqSulpUhnVZIWcZvoNdxCIQoqAQzMZ9+FBFgzm06CCRUk5BMVuzjfErDNHVM3SJOxA9GoUIURPO5m4xdxv0hg26LS1+Tc6WhaApROGNRXjKtVBbPb0q4J3NK/zjR5lTzcRzy1Vdf8OY779HqCN9lJjpoKtc4SFUVWmSqqVKVGf0Ve5m06WAYNhrCV7rTCaE/nSdEPXdEo93m64srFmSz/Fvv3iWaRqKSKJM2um4RKrN5oJjLZRj1B/z9j/+eT3769+L9PJ9SoYg7mbJYFXvU93qoms32HZG4eXXwS27f/RHt9t7cxqZJj+P2E5Yrgg1uOPDRdZVklkZBMp1myqwuViB2cUfChnPpMoPR5Rw6XMkt0W8cUKjdYeyLmMfJWsSJy8ryJnsvxd+tLW9x4/YSrx4J0g0j7bJor7H0B+v0m+Ly8c3nX2MWDZR8mxe74qxVNYc3332bzyQt+cbGBsuri5ycHLGyIuCT5UKZQX+KlcpgWyJYPT+rc/feJi2ZJCmUVRIvj1EI+NUvBRnH7btr7B+ckclacwkcXS2wsFIkEwnyg5VyyCef/Jj+SOXWexJxkrlJr3eBEuukpZzI+tptjrs9JlI79fjFkGqtTDaTUJf6m9XqTc7qpwRRjoUFEXsmzj7lxTyy4IZq9ZlOAy4OG6wubQq70xP02Tp/+IMVvnkiktru5ArNKNK6EvN0/+63mHhDXux+Rm8ifGW1eIsHd77Dl19+ydameJ9ysYKih2TnrRAK+ewSmYUQXZ7HR3u7/PIf/oZyYYs7D4R93Hr/TY6uXs3nKYnSxPGYweiCQlEkFoO4wXBYolpZQJGkM5pvs7G0jNxqrG9u8NWjXXQW2KgKG7pKTkjpOUZj8dxZu8R42qTba4KsZke+DURU8hsEMxH7Tr1jFmopUrrYe5GVp1jRiGPYkXG7qlzQbk9RrRqmJRIgj54/JmMssCrnJNFmLC6s0Oufsr4sbCpIjjk57uCkU9gS4r9TuEXntM5l4zoxXURRfUIvYKEgYpJCeonZNOTi8jkbNZG43Fg0UZwWL5+L/b+1tcm7qwt89P3/gtAQz/5/7v2PPP7yE6Z+wJklzieJtvwnx2tCi9fj9Xg9Xo/X4/V4PV6P1+P1eD1ej/8I47eichUEAR9/LPps9g726Xba3Nr6nXnDqe/12Nt/iKmuzNXfVT9gfX1d4PaBXMnD1DYZ91Q0XVRR1lZu8Vf/7m+oLqmkFJHVWLhdxDYXOLkQJdZmp0Uma9HqgpUW/y8MhxQzBYbDgFxGVpL0EHSXRJYqA39CHKksLpTR5O2WRGPkNginbSxT/F2lXKPTHjMaiexatphmOOpgpbZpt2RWs7jC2XFDaJ7YIrP75a+/YWnNYXFJ3MpTZobziyNsszqn23558HMGQczLL01++IcC53rvzjoPv94jnIl50XIGlYU87rhCiMj23r97g2bzct5Qv1Z7k8GoycJSjrbsqxm7Z2xvrNOZvmJjS/R91Ot1MvmEl/ufARAFE9bWa1y1G0RI4eZTl1Kphuv2UCIxn3E4ZG1tjVimHRUs7uz8iHbnjGpVzO/pSYN0bsLm1gqTkVh30+lQskokEptupRXsTIpV520USS+c0yeYSh+rPKMrm/pbJx6PH/0dNUldfPPeJrcXb4KhsLkqhAafv/wxw1EHx75LGIrf6/c9Uqk8PZkJHU3qOC602pC9L6ooP/6HH7NcW0VJPMplkdmxcxle7D4nlJlPO5th+9YKv754hSuz6SlCFhczoBhoifjcwkKB8cglY8tKVl9hFl2hp8GROOnBaIaiafh+MhcRHXs+4/ExKfOasnaGZtskWomzc5FlXFtf5Op0zGAY4SWq/P4WpcISqibW5f7qAsPOQ0jq3FgQa7xVdnC+/yGnlwNmYzEPhUyEtfYGTSmDkPYCUHJM3YhYEicUiiaWXmEaXGHJChBJirXtEpcdkS189uqI2+s7JIEtYFxAHMBibQlVk/aKT7VaZKG8wrAvCUnikPWVEi93X10jKsnlCqxv3OXxYwGfUNHIZdMEswGB5A4OEqmbkV2mUhFVm1bnkMkkwUwJ16crJqOBhpky6A9EdiuVK2NnXVQtBln50FSNwJuR0kW2O2U6KJrL4eEl3kzY9db2bVotHc3Q5n01gT9j7A5xW6Jyns1ZoLVRwvvUr8R8/vM/+lN+9cXn1ColnJyUiBheUCyUGbXFd9+5X+Inf68QRGOQEgeWlWeQdHn01RlvvSeqYJatoyQJYSgx+1pC5AdEcThvWm40L9nZuctk3OTeTVH5Sxll3OefM5PNwTt3VrmqP2bihdy8K5ufxwPabcgXF8hIYWFT1wjDmPt3BQ69XKpgGDZXjTPOLiX5SHYVTZ8wnnaIpDTIxvoO3qxDX2rttZodlpYz1M+O5zBWP+6Tyef56pcvOTkQz6WELyiWapQk1EW1znj/228x7qeoLUvtm90DfDeHH58z7AhbfLX7nKuLIeO+1ChpxfT7XSZjD9uS0C+ljq6bpKU2jq6mSJKEJAlxbFGhgYjI1UCfoKrX8LMIRQ3n8EJdKRL6I3yzOxcDnvkqiaqhqQaK3I+apqFbCnlH9hbJylASxajqNXnMkPpZn7PDYH5ah7GKoqbmMghpO0O5XCWdTZOVJBRLqzVMM4WTys97oAQVepe+rFL77ohGV1Q2o7nunIpppbDtFHZaUunbKdJmhJkStq9ZOqaVJgoC/DmZwwxVhSAeoMj+tCCYkIQBKV08p2k4lAqgqw6kxF5TtRhvBpGfw06JeVlaXqT5sI2Zk3ICUUSMgqr8RjxaUSMm0xGu62Iast8oFTGbeai63LO6jucGhH6AOw7n32VqKqomILQAhXIJK52mJhXmFU1ANqfeTIhWA5o6xZvFqKpOrAhbCAMNJ5cipYjn/PKLX/Pjv/5/GPQ6c6F2004RhiGWZdFpHAvbawbkK0PO5P73Y4ez/QNW5FkI0BpcsFGr0fOE78xmApLYolzbInSF+PHuy+ekuhkm/YjsgvAvjrPOi+dPuJAVoXvvrjHoRiTKAF1KDngTk62VBzi5aK55NvMGzNwaPYk6qJJi4gUMWgeMr8Q8lbdszo4e0gsD8orcf0nMyVGT25LSvdms48+m2FkFXwoLu+MJtu2wuJDm+ExUoH/w8cecnx8zllWclFUmMUccHvZY2xDVmEFvhqqFLK+UOTgQnyuVs5yftehLSYVIX2D7gxs02iPUtIjx9HTCQrrI7m6DalnoFLXqDQadKRsrohVi++0dXux/RswIQ9sEYGOpzIsnuxSrZYa++L3puE8hu8RgIOK+6TBgGvcwDWcOSQ/CMT/76lMO994gLXXJuu2ExdUshZJAGVw1umys79DsfIUjoZhKFBN5FqVMDVUebP3OgPPmEwxdvEsUT7DMEoqbolwS+/HNjVuM3ymxd37CoC4qpGtqiOOE1DXxe71WxNpGgdXVCuWS2H+aUabVuiQIAqrL4hmCTsKDe9/j5ETY5l/97f/Ck+dfUna2cAdiH9nmEp4XcPxMzPk7b/2Ae/fe4Jef/wMqwqZWlmqcXrQZeLt8/mtB/rFYu0G5lOFcxtrlSoHBYIyja+i6sNerszoZZwUsj2Ak3q9QKJFJQjzZV285AcNBnVJmhdlMxAROPs0b98t40wBdF5WqvWcvyRhVvvt9cRblcyVeHj5FSRlMJI29lffYKBdp9ftcnYrn2lhbpusOMBXhz5snD4nUBeKvP6E1FVXNje0ik9mM55/9isQWz24Nxb74p8ZvxeUqZav8+gvRZ1Ne9ilktnn2aJ+dW5LRy9Gx0iVm7phCUTiNIMyhKAma1LTSlSyg0x/tc+um2FC7r07IZxeZjhuEinCu2YzG6dk+l01Rgk+VcgwnTSrlZUxZKvX9FIapERoRE00s3Mjtk8mkiXzxmcGsz/273+L0fI8oFp8xDINSJYtprtPvCshWp3vJyto6k4kw8P6gQdraYmmhxv6BKH+XylkSNaBS2abbEs5ta2cR054yHYsFrw/qFMtlzHLA8ZGAed1/UGL7Zonf+/42/bZw8K/2vmEy1nClqGAxdw/PHzMc72OkxQautxMmbhdLFxey3qBOt9/AcnbQzeuN6NHzT/DNMftn4l0G3Tqbyys82PkuANXcMqEf8nx3D3SxEV4dvkQpD1A0B10yrVn5mP36PmV5iXC9C6aTJZYXV3i+KyBHhXJC4NawrDKNoTD6SSNkeS3H0ycCurC4WCBlB6TTJn0p+OiYDp5qU8plUNckfDHqc2+jzIYrnN9O7QGfdtpMZi1+9StxCdxae4edWzWePn2OJgkQ0CMWiuukZJPo3qFLpz1EN1J0emJz5goVekMPR/W5Sa0OOAAAIABJREFUtSPK9JetAZquYKfF4RypHaazAb//hx/y1a6AWUSjHFGisbmxQ4zUoumeky6YHO5LmN5ykeb5Y/KFIte4QNdvoCk63tRhYl0LU7fZ3lkhJZt6l5cN1Ejj4qQ/hyr1Rx6NsynFhRyq1NHx4z4KBWYj8eevvzxiebVGRouot6+kvdyGcJ3J+KesysZp03Y5ObtiGogAN5no1FtjGr09bBl0Rp5OtjijfVinlBWXzpkb8stPn7O4JX5veaWMauiMRi2MUCQfMuksYTyltiACXCVU2djYYDryabfFnKcdg6dPn3Pz9g1aLRGQdDt9wkAhL5MfV/ULgiBke32VobwkZa0a0MaxHDot8V0pK4uvKoxkH1gqBj2aUe+CYcqL8OwCSy8Sp0w0Ca+LCTFVB1UVfmQy9tFVk9t3tqnXpcBsktDvx6TzHonUQPOCiERR6LXEfvzg/SVyhSyuN+Xg8LGYp8mE2/cWODocz3ugarVt3OnVXHvjn//57/G//a+PME2dmdQDcqcqacfg058/5M//QgQkuq6QROG8yT4KQsyUjabFqIq4uD19dMFHH1apLZn87BMhCL6xfpPawgpXjbq0zW9474N3SWINTeLOVTVLNndMzrHIZzYB6Ay7vHHvAYsVYftX5y20TEIuU+KyLqDKS3e26Y3a5LNLvPeO8B2/+PQfKRZyXEkR4+XVCoE/xUl5vP/9fwbA4+e/xnVjvvX+d9laFkGDU1J4tvuQszOxLltbb/Ly+UucbMTBZ+Lg3dm+R5xzUVjlzl3x7KF+xH/2X709h94dHTSpVdc4P2vw6oV4zn67jKIo7L8Q6zkZdfC9mMDX52QHUagT4VMoWPiB7N/STWEXUr9KMWcYakzsp67bKYmUmDhMSIwE15vK/6eiqgop/5rgQiFWwA8D1P+fYK+ZtbA1B0PC48I4IFESkkQK3Lptzk+PmbkByTWpR2RhGAa2k8e2xbrnCjmKlTyZnAh6i/ky2Dq2nZrDF33fF4LFU4+p7AHqtDqEs1j2qIlzTlVVstnsvE/JydjoukYmlcZ2hO/KZ0ugJnOokmArDPCUIaHUj4kiBYUAVYVEXlpypRKJ6qEgzotEcVHRicIIXZJ6OI7DxcUZK+s7NFti3VPYKIYOkmVQT5mYpommKaTkhctybFJ2CttKzYk8JrOA4cjjxYs9aVMbWJaJbqbm0PkAj1ngC1012TOnxgrH+yf8/Cc/BeD5k6coxOTzWVQJ2QzDEFSFKIlB9plub36LT/72Lzm5EL9378Fdxq09Tge/YQv87ht/ypdf/xzbEdCoWrVMY7TP8VmdjTXhK41KFpUst9cdmk3hm7vdIbXKNouLwsbOL04YdNtctD0qBQHnr5UKuGOXbmtCJi385/HgEc32Ipsror/KMRTi8ZSVlTfI3xOfOWy9Ytt6wLA3xpZwUMUIuKhfEiRZaXg2b7+7w8GRQhz/BvIXzFTiOEZBzOfB3jmhZ1KSLMf+GGrrVWYzUExh11989iX/7Pd+yMV5k4vzupxPn/XiFotbYg5i3eWLL464ubOJJfdfMh1QWcjzxAvIrIqkqJmEjIZtFvNi7cxUQqFWJfGHlDdFHPTe7VWS1jmeqbJ7JYLqpdoKh8/3WVkT0LpSyaayVObFi/M5gdV47HPnvS069TOmA3G2Z4p5Ru4AJSXhjEbC5fkxKdvGlXqRS8UsF5fHrG+s4kgB8sPdIYZhcXvnA7F+9YeEkUerO2LSFv7m5GgXJeozmA5YcoStR3k4nVRZXhRrpVRc/JlGSq8ymUjtrZGOrscYWoWy7E+fjC84OW9TlMnHq8se93bukygxT/bEJf53P/4+3U4fdyyFjfunKOpdvvXBdzg9FRcu3zXZWFrCcw1eNkR8+l/+ye/zcv9n1GS/09XJGe+/d5/dkz1OT4/Fek5ndFpTOu6UBckO7lQ0gt6UgRRWLpYMKvk8k8mIekMkTGuLWSZ9l8VaibzUPFuvjShvbvLySLQqpFM1fHWGNhywJvui3bTJ4fEuKys7lJeEvYz6Z5hUuL0mbMPWfE7OfA5PDkhlrrVMYfPmOvfefJ9QEu3o0m//U+O34nJFopPJCiNM6+todoRtK6R1EbxiXRD4U9LphKwiMJfZXJlOewSRDLLtHMPBmGppB00Rwdxl60t0o4rqLXHVEJvF86foVoJpCCO0Q53u8Jz3vnUbXzJgPXp+yKTnks3Z7H0j+mhW8stkMkU0VRjA9s5N+sMRUy9Gkc681amzdatCr6NwciZ+b2W1zM9//hU3b4gM8WRikjcjzi4a+JEULZ70idQZJ8cp8jlRQUi0Hv/z//Sv+JM//SEAISq93gwFnVJJ9GEkhHRaAWPzkmJJ9HT1Bzn0dERZE3PnzSKG/RSxeYAtGeGUTIWp5xNK7LY7cXGKNpHi4Uvw7dX5GX4YUV60sGUvWmDD2WmfN1aEwz8/mdHqvKSwkOVv/0401Da7pyyup6hUKii6uMwZU5vxcIom2VmWFm7jzoYYsyGmJL3o9wKWqkUmbpPaomSXGWU4vzhhFonnbDQUCkWH/f19qguyajOckkpyNI6PeftNkbEwqzdo9DxSt0TA98lnP6NNl8pimazwo5zU97AnQ5yMjqFLZe7EZDodk8iqQ6WyzMl5QqJ7PHt2Kdd9nak3Y9r2uMyKCtCTF4956+3bRNLuFCPHctXh4vicjZtiiy3mtijmipxeXjKQivBJakzBrmGci0O12WmTMZeZjCYEkmLcNsrEXshKdRkjIxs36y6zkY7viv+XyltMByOWl/Kopljj/cNDUo7CyGfOyKYZCQP3FQslUb377NPPuT1ZZ31hlbFkiTo7f8HCpEHa0Xm8J5qbF9cSVEPD8MR3H+7u485c0rkSSSADGctkOPTRqZEyxO8F/pBqzcafyr7FbpvSVoalheV5A703HmNoAbdviYP/ydd7DAcdgshHN8Uz5Qs5FDViOG7gy2ZyRZthphICSQyWyxukDANd0/Bk1fb73/8h/zvPsZwZsjjB5WUPTdMYdMVnVmuLmKpCHHYxZAP9+tIGhpYh+OU5saSaTdSAhGge9Fq6Q+inUROFnMwId4cefmSiBwlIyvYojtAVjZSkyF5cVfnF57/i4w//nPffl0mK4gat7j6TWYNpLBIJaauM7ZQ5vhQH3K0H2/zu77/Pv/lXn1BdFHYWBgop06LZ6vP4kci0ptNphn0f7bppL9GAmNFoNBcy/973H2CwgD9OEasik+yHE9ZqP+SyJfp/DD3N0dEZhWKZ02ORiXzjjbfY2C5ysPeS7c1NsQ6Jymjs8qwunrNUKPLNo5+wtnaDrCP28Xg0IvACVCXmxXMRUBayNRxHwTBkQkTzCdw8GXuB8xOx19JUWd3c/P/Ye7MfybL8vu9z19j3PXLPrKysqqyq7q7u6p6e5nCGM+OhaJESJQiiZMuAV8EG9CL/BfKT4QeDNiDTfrBli7YgShChGdqcGXIWcqanp7fq2tfc94jI2Pe4ux/OyRwBNkk9+IEG6gCFyoyMuHHvOb/zO7/1+0VRDRTZt/fyxRHVaolQSCxorzsgGb5BJNrENC4i80l2D16SzaV5tS2eLxW9gTOaZ2dfQN1HYqIpOmTGSCT1y2fZuPompyciuBMPFRmNZjx//hhFFRUFR6d7dFsBZ3tDshLZ9bzRYdgH2xPzq5ojdDVCLGShSadM13xChoKqBhhybXxfQVMNfNmrZfseihqg6zqGzBI5jkPgO/iBiyONRxeVwFNRA4mcpYXQDINI2L+UT0+dAD6K32UqAYkGfZfDPY/AvYBrjxCoFrFYnKgMiGTzeSKJJLFEnKX5ix6WFRQ/wJGbzXJd+oMRs6nLcCiN+tY5k8kE11EvM0mGCXpYvazGiEdN4okIiXiWsExu65ojqAI89zJbN7+0iBlKYdnCsNEMncAX83KBPOg6PtZ0RjwWIZEQ+2HqjvEC/xJQIwgC3OkIz4LpUDqiloUfWEQiMapz4hwtlwsk8iGW1+WZ6Tiit8udYUqqiUw0g+c5NBs1Ht4XZ93h/iu2Xr3AkqjGoVAIXTcYj0aX0P2e72NoCj7wwVcE0mm/NSAVv4WZFOfA0G2yMv8mcb0AfE/IgmOiZWx09xfw+zeur/DZRy84awidWyjlsPs6nZqHGRaycFp7RSaeuTQ4U8kB6USRrf0tNFX2FrpRHt47JFOMkJRBkphyi4iu43dFQNRK9JkM2thGhOPzz+U9FIim5yhXmkwlkIliTImFUiwuCGN9Z/8+tbpHr1e/RNiMmiYn+x0WF5ZZKAidF45NODt7hT0V6znozginIyiYRELCTrj7zi3Ozxu4sxDvvCP604ejAafjc1yZ1Rz2e2jhCJ6mEUgi9dP6Nj978Dl33nwbX5f9qWqKSqHA8ycimBtrLmEkI8yllzltiCDXt39/h6srRba2W8wuEDZVm5tv3GZhQfQ7vnx5j3ufP6FQrjCbivN/Ynsk7AVGzhGGLRFqy1msYMhwLNZvaWGB2mmNVGQd3Re2Yb1+RqFcpNedXJLMHx7vcOfuDfYOngmZnrqsrsU5b+1Tb4gAaLmyQKm8TPP4CWe2kNmJm2NmDOh0JUkyGTzf4fDwmDfeEETGhpbG9Ye0m2KfAthjk2G7znQoZLhYrtAfDogns1gz8Z7eqI1uplBM8f3pbJH7jz/ixuYijfaBuLYyTyqukIkucO2KkP2njz7BV2ecSFLhN994g3uffEyokGDcFefvm7evsP3sgJWld7l+Xczdp48/QYsW6E+E7T0dJEmGKnT7D4gnhP3f7U4Ja1ViaYX6mTj7er0Buw+2uXNHgHN0Rw1QOkQmEWayhGAyHJDUIvRq5yQrQmYnAw9dGbAsz7T+eZeFhSm3bl7ne9/9EQBrVxY43q2RSIbptYVtVikIh+3PGq97rl6P1+P1eD1ej9fj9Xg9Xo/X4/V4Pf4/GH8pMle+b+P5Iu27s2WTiEOuGOL09CKKGmNhaYFarXZJpFoogO2MKJZExO3s7JBKpYJKgsN9UWoSSazSHwyYTXdZvyrKZlrNLiFTp1oRXme/fUQiWmB/22PqHwBguwbpdAjfGZKXZLypZIrT4w6GLB3sj7pUqsv4qoEjyc4m7oCtl2MUTGaW7M3yPIqlTSxPRM4836HdEchg7aaIqqTSaVyrTygxZeSIKJE1cnjv3Q8uiX7z6eu0B88xDI2J5ChIxOdZ24hTO9/Ck2VIx2fbmBGDVFSUfoQTIWqdA65dewvFEBGhn376bQxtQjEqMhghPYVrhTlvNQls4c27LqSjUUbnLrYr7qFQLBIky/zO//bfAhCLqrz19jrNl0dcuSqJInsbPHv5iNubv8xIMiBG477IMElellSijBWa4jgTUgmRtn/45B7xxHPUoEz9THxuaLUJJUwiE4mcpWbo9DroSpa7b31N3OfUYzKZ0OoM+fyekKG+W6fujfhjWZ97o7JCpZqhPbTQEOsymwW0x/tUSmmODyXaVBxa4wG9nlir6TRENJak3XQImyIOcXy8TyIZxho7fPiRKPkrlhPUjjvMFUXkrNvtclTf4fiwRXFV3HvQ36NrpNCMOGGZ/RyNcnTHAeG45NlxdVzbpdseYsvad6sfJhlzyS8UePbqSK5DGEUbMB5IfgfDYDp2KWZ1VJntqVarEBrhTNO0akI2fIbozgIRSVD8wZdvs737FMVyaQ5FiUMkEWFz9RonJ0fcvnnRZxKQDefZPhLzm4qpLK2V+eTjY65dEVlMVR3R7kzJZ1O02uJanucQGBaOJKGOhZNYQ5j6e7iumJf1hXU6nQ7ToViraFTF9gbkchmGEzGfrW4HAp2ZbdPpiLXyfB+UDupFf5XjcPvmTZJJjVJVyL4js0e+P8KX9XaJSJj5+Sq5pIimndRPyRdSpJIunY5cd3tGrGSIMhPlIgMEispln2LAhOkghpK3SMTFfZ630/iKj215KJ6QM0/1sC0LTUZQVX3C0tJ7dAZDxlPJLdL2UdQZiVScaEzCNbs6vcEIPS4iqB9/eo/rtxf517+v/QKJ0LUJhSLMpmNqx2JNK5U56mevSCbEPlYwmE6nJBJJzg6lvskpRMM2gT9mc0Nwezx9+hjDSHF0JmS6Mh+hkniLs9MDfJk5NhQDU82xvFyi3hSlH71xgwCb9EXGXfUwIjMCzSGTFPfQatUIGVX6oyd4EjWx3Qi4tjmHK6Oj50OHjWt5eucnHJyJyO7iyjr79eekI1lOzkRGz/Fdhk6SQBNz1x/X0JV5up0hltS5hXSMm5vv0Ovvk8mK+dQpo6oT4pLbaK5yhePjfZ4+e8L77/wNAAbjYz75+B7z87LsKhzhbOeA937pFrVzER1dvp2gkFxjrbzJ7t6BeJbWiCefn4MkoR04Ezr1CcfHQ5pN2cfrCMJg13UxzItiQYjH4wQyI2Wapujpcb3LDI2ugCBX83EsIYuqBoEKiuSPU1HwgxkBKo4sW8cLoRs+geKhy5I4Qw8TqN4versUFcUx8f0JnabYV/XTZ7iugD6/eB+ohONF4rLyIJFOkSvkiccSlJZk2VXIQNdhEviCmBmYThyGgxkT2Yt6Xu9zuOeh+DuEZIm47/uE4yrhWIjJROipTDKMZvgo/0YGyglcTHQciYYYC8cYjEdMRyMMWYI36o4JRWO4srfXCzxUTQXNx5TlhIl8nPHIRlF9xrJ0/vjMJjjyiUr4+0QiSjYXJ5qNkZTooM++eMXzpx/y6tUTzusi6u54M1xbIZ8Jy98FabNuGLieRJzUDTRNQ1MU3IlYr3iQwPVOufvOlwE4239BZzxjGK9dysWT3ftMvWX+87//HwHwu//r7xAK3aCSvo4hSZNvXnuTbmOL5188xIiIDIZpJJhYCo1tkX11A5dYcsKbm19mMq3JdWiSz5bIZH06TVH5E1XmSIeznJyL9oz58gq6UcL3fWp74tx560urHO8+Yf7aFW5fWQbg23/4LwjFVF4+FWd2ubJKtzljeW6NWvNAXKtyE899TjobIh0VKHxf+fK7bO99yNPHooRreW7Io2ePcewkffMC6TTF6vI6itFlb0+cPaXKHIlSlyf3Rabcnk1JJhIUchVevRKv5fIlmucjXr58xsKikM/F+bcYGBOiaaGTMpkcrurS607QdYmiWonyxY5NOK4zlxS6pDsYMjHiRPtCVlauLHFw1GTSd3AVIQcKYRqN54TVBO4F1YMW4uTkiLk50a8aNVIsVrNAl91tMVe3bv0SiYxBqzGgLfdfvhTns3sfXlYnVSsbbG0/IZYuceuaeM3qdmFmYk8zfPWu6HU72dqnOldiMpWluxSIx2xQx2hSL5XnFP70Tx6wsXHtMsseDtKgzRhL2ey1B3Ttc4Yjn/mSqPx5uXvC3HzksuS43VDJl3ROTs7J5wX0vOcENCZHDC2LQlpkoHaOD7n7pa/iaEJ37jUO0JIG6USckWxDePxowPr8GqhNzg7EvS/k1jlrtCjmRTXLZHzKzG4RNZexJN9hthQjHppxfDhGlSX+haUM06bKZCYRxZUZk6GJrtmcD8V+fP/2Bqf3ntIfWhgZ2TvlqMRCYRpnAgUwZKTwzAg7tRHvfl3woh7svSAcTuG5Ll/9kkBNnsiyxT9r/KVwrlR02jVhyGVzIaqVZRTPp9sQm+XKepFu20FXFSIRSTTWi9BudVlYEiseiyap188o5qpYkstAsxRMM0oxd5XJQLyv3jgkOk5z1hD9TvGCxmLpXQ6PH+HLFLxjzZi14mxs3MbXhBExGYxZW7/K9o4oWZmORqTsHqPJgJOauM87d66iuwvMZjMUXSxUszljfUPh/FiSkaW6GGoO3YySyUqI2vGU+lkfr2jgyb6WQnYdrdqj2xMHY6N5gOVM0XQP1xGfyxdLHOx3SWfK9DrimedKC7S7E/JFUbJyeFJjp/acg+k2MgNPpawS1Qp4siNgPBvS77UZDRwMTRidi3PLoCYYu3skZC3syWmdarHAnbvCoHbtKZlcmLOTcxJhcTDNL2fYvPNNrKl1Ccmruhqr84s4klRw7/A+k7FHOpPAmYmNv7SwSDSSpH4yxAiJU1V1TAb92SWUr6OoZDIGrcaYJ8/FIdA+GRGEG0x1MCV4RDSSxDir8ZWvCUUe9n0ev3hCKpsjkAZ3KqnSHUA4doYqv282y6CHTHRTlIx6kx79YZ1sIc1kJOZze/8Z8cQSs4nPzJIku2dTBqGR6DFAQLNf3Siwdi3LD34g0vtmqUAs5ZKPhziQzZWF1ArdyRGxqJjfRDRBu1UnOkxgSr8mmA0x1RCDXgvPF4aaF6RIJ+ZQFGm4+Q6pdBozHKcnmywtd0A+OU+j1SZmiDmeq9xg/criJWz2ZDZldf4adjBBn0i28mgYR21TWSxxdCbKJYaNCN14n6l0suPhCNZ0wle/WeZ0Syiy2cQjkojSbp1e9gTlsgVs30fxZOmgbqMpCulUGWQfRioRJh5dYizLElKJKkbY48Wr7UtoZEUJiMWStHtdNNlErKEwHveJmLL2PrB48eIpqWyGaFiUMw1HwkiPx1KMxkN5rR69Xp1YVMzJcnWNbu+AYW/KWDbw6sY5Rsgmlkhd9se5ngG+gm5e1OxP2dupc+XqEssLohz1k0+3CacV7JFNSGL3+tjMrDYLVQmbHXiousr+0T6bN8WeGU5G1I5dPvjqNQZ9MVfdYZ1sqcDRqdA3Zwd7vHnjqyRTcXw5n4YREg36psmj+6KEwrJmGIZ2yScT+BqGEWLYt4gmpd7IzdPvKKQLfXb3BSBIaS7F0ck2qZxwevf3m0TWRlxZuUm7KV7LxotEYmEGkwGOvywE1FB4tfWYqOR4y2UXqZYd6ic2UckxNbMOmUxcCLIogXgtlhjz7OkLoiFJbZF0eLXVoFBaY2qLdd959QwzMiW/toLriQN7beUKE+ucowPZ56bYONM6ucw8iipea3Qe8UsbX2cwPEYPhNOn6R6T2QkhWV7Y6fRRlRAL82Ua51tSPtNUl2I0zy/6ZZuUyivUT3XqbWlcrV/hvJZg2q7x5KUoD1tb3eCtdzbIyYCdE+6hKRYoKXZ3hZGbz+R59XIPLYixuyXkstexOT1pMulJ+PShoKdA1QlFftEsbYR0QiGTsAzsKZqGFwT4vnBMVRRRfqrrOBfQyIqPr7t4noMmAxCKouC7PqrkSfN9GwINP/DRo2JdzHicIAhEqZ4tdKWuqLh2jV5bfK7dgp0tBd83UGWJYygUEv/CaVIZWZKWSRJNxChXRAA0tBoRjqGvoEhgKM/z6PfH9EdTfNn3dVKv4XkaquxlRjVEL6EsDQQRTDFUg/29HSqLwrEYTqdY/KJfLR6PY+g6rmVjWZKnMPAwTY2QGUG9ABZBwdeCS7lz+jOOTk8Yt475/HNhJ/zat36Vv/cf3sWy59h5Is6x6bTH3vYhH34i9LlhhLBsG0VTL3vRbNtGU3Q0RcWR+nrcmLBWXiKdE0q+PLrOQaPF8lvLl2v+wQfv8/jRC374Q8G98+9+68t89tkJ0XSG86aQ1xf7UeayCVauvEGzKwzFYiKH5veplCW3WTTO9qtjJprH5i1RlmhNfw5OFWvc4ET2vrz75gL1o21u3hZcmEOnw1Jujs+e/ylzC6KlIR2rYK5bPHr4A6zW++I+v/wej7c+vOzr6TWz3Lx5i+db93EmQsc2z3uEQiGazSanx/8LAJ3OIz798JB/+A+F8ziYfk7vXCOZVXE9ERybODUOtwdcvVnFMEdyPlUU1SZwhWwmTHj55AXJ2DzZvNC5vfqAmytrzPQZB7vi+ULGPEFIRYmIdelPanieR8iPEJdO9XFvh/rI4mYhjduTpZflHIES4mRf9BaFImFu3azy/Nk519/4AIAH9z9mqTxHJgu9jgyw9PqkElFSKbE/oqE4kbTB7l6fZEbYgqPJkPPukLlqgX5byGwquUBvOBDAR8DB8WNcR2Ujm0bi59CeQr5S5Jt3luj1hZ1Zt6bcTr5BStoNJ2eHaHqCdCp7yRdbVnxyuQzhUJQwrryHI8ZDl5vXBSDSzsEJ0ZBJPOOydyqeORYLYU8EBxfA1qt9FNUgEoN+V9idCwtLTOwm9cND8oaQjZge5vFnz8iVxZ493Tvj+vqbRHSLaEjyotk9huMSy1dyjJrCnhqet2i3Dhj6sp8zZDIeg6KpaJJ77/S4Q7mYwnMSKIqkslHjLM0ncByxPz9/+IiNqzd4tPUQ3xA6aNV6A0JxcnEIopIv1kgTMRMECTHB7b7HQiTJ+aDP6UCClClVCtkEh+dbHB6LBMh4+Isg2f/b+EvhXNmOxdK8MDCVIIwz8kmlI5dIcrFYgvZ5j8AOSJVlbXH9BHQfR9aPVxcKNJoO24evKC0KJVKvHzBfiPPws11aspcgVHLIz0q4thCuVt2iV39OsRLC08XGqJ11+OpX7tDrDghFxIJrTphKdpmnM2FwOvaEYSfM8ckB84tiwWctlal3QCqV4d13RN3nsydPsAcj0kJf4PgK7W6HUsG8RPTyPQWdBMV0GscRu6Pd7JAphAkCmbla89g7GjCXWidhis15dtogng6hdkukE5LQLKyh6mcc7QgDwYhPWS4uMRkPsWVvmH+a56AZkJMIQ+N+F1PVqCRLFPLLAFhujYHjk04vETJlJDI2YDxtcAE7Fqg6M2tEfi6EJftqdnZPWA4qOLZPXIIN7B48YK60TCIj+sLu3n0TewpHx9s028IgWlgs8/T5KYEGkYxQkulUFPotGh3RxGiaI/q9DIam8cVnPwfgyx/cxQjP8eTpHjl5iI/6USJhk9FAaqN0iCtrm0SMBUZTca39g4ekE3GwVshKFJ7dg8/JhjYIZuIgjidMdneGKP6ITlvy3AQa26/apCLRS5JaTdGxQhqtpjBwNcMlHb3Ks/0D8svCaPHHsLS4TDKxwdOn3wbAtc+JZdJYMtJSOztnOB2QLMY4PxeGRa5YoVopcn7SYaUssq82I5zBjLh05rBCGN6QpfkqbpESAAAgAElEQVQSkYFwIg6Oe3h+j2I5R3sgDqKVKxucdmvEk+Lavhbm+KSJqU8Y9iWZ82qYsTKjdnh8aQC985UVdCPg8WdCiZWuJzg4HBEiQ1Hux3pvhO+PuLZ4nURJyGLt9IiJZ1KT0VHDjhBPJHGmU/KSLLsx6uBNZmwsCblrNOrk88tMRjCbXcIBMBjUUBSFeELoCc/LYJhh+jOxP0aew3TqcHCwgzMUDslCRcjfZ09eEZENw7F4hOmwSaYkrnPasmm128QSM3SZ7XEDm7ARJmIO6UhyYwMFP/BxZLTSwuP21Wu8dWeF+x+L59PMKIGjoODhyQhboHhMxzMyBZmdDPuc7T6g3XfxXREZzCR1VNPmwaODywx7Kh5m3G1ytiMMspCRZ/foI8JhncFIrKcZ9lCIEomo7O+IIJBuGphmmEASQAaAqkZw3TaJlHCAbt/6Ct/5zj8jElsgGhE64dnTB5hmmojUP1HTYDg+5crqJvcfiT6l4exz5ivLuM6QRk1GPpMz1lY2iEnktb2Dx0xnQ8b2iK1dSQIfFuiq5XIZ1xMH3/Fhh+pCCN8T+3Fr+5BMukgmVWVlTkQsf/ijP2R18RbH2qeUJOdZo7mPFtW4IfnNPvvsp2xszFOv75GICbmLhG1+8tG/JJvNstcQQZi1lSs4U598SsjZYDThpD7i/ff+Aw7qAtTDG/pE9CIxSZaZyxbpj5okM0UchAK3RzMWFudQlCTXDWGI+t6EVDbC0QXXl2kxmB0QVsvk8kIGXW/M22/PU04s4n9TGnO1Dp5yg4REJ/zuj35AMp4l7C9z/6mI6Fsjj3ZtRHcwRZHIlcNZD1NJEJK9THpohEEcR7EveX08ZmhqBNcLgezp8vwpaqCi+nJf+SIrFPgeKEKmCOK4juiP05QLhL0wqh4iJqtG0F0sTcVDw5AABXrg49g2g8EB3Y7Qn74bxvUsTFUGGhSIhLJEEybpvJiDVDJDLJagkkyipMQ8hELzPPEcdnaFExHRgUBFUzU8WSWiKCbh6Jizsxc8fyzIXCMxU/LSySxVPEk4msQwo2jS0Q9HNAIzQiqt4LvivgI3YDrr0ZZ6qtOpMe5PmQ5bpJPCYX73lxXm5iOkY19noSpsgN2tOn/tN3+d53//vwOg3mgTiyZxPBfHEXNgGDpe4GOYURKSI3OambK985I5T/TCVKsxKlmbT3/yCEQimd39M4rpPE8PLjiYCtx8a5NGt82NdQH4cm//Ex7tJXnjnRUiabHGJ8cGE0+hsiozikpAeS5DNOzy9MmBXIciZnjMrDfml94RtlJ1pcTAn9KVmQ9GCl1vgD/xyOeF7B8fPyJuZvngrS/xf/yz7wOwfnsNtIB0WHwfSptXDz9neeNtBtOGlE+Fs70ahfnbXLspvq91ssfKnRn3XgrnsdOfEInrhHMpakdSf5fTxGMzuqMOhYrQ1+NRG21mUKyK9Sz5i1xNZ2laI2KysbYfdHAw0CyTUl7I1NbLP+Hrv/xbdE2xR0ejEZ3xPpOxSUqSnccmaZaKNoa+RnpR8hnOjklGqkwjEkyqsYtrXeXu21UC7aJPqUQQ8bG9LOGw1B2ZNK12l9hFEMo9o9ONkM9HcSSXqe/bzOVLRAOFouSUCsU8+v0U5yfi2pX5dTIRjWg0yvHxRbAwzXh8imuVMGRQe3PjCtXsHIOucLYajSZTxyefWqAsudoUT8dLNTneP2AoAS3S6TLt7kP+6E9EEGFtsYg2jNGbqkTTQl4126M8H7rkxlIDg+tLy9x/9jnJ4pxcvzrWJOCNd27z4rEIOt25/R62FRBWxbO8feMuZCwOdmokE8KZmy8FTMYKnY7JWFYeVDZLPPz+z4jFhUy3W3Eqtzdx3BHH0uHTqZDKlAnSDooj+uH6zVNatkNxRchG0VhkaXGN6aBGpyP6Tj/62bd5d/M2lucwkQHdXAo6XQunJc6wTCpGSGniouHLZEcuEWMyHKJoUXYbQq7VwZ/fVfWXwrnSVANTepbxRJThsMaz5x2islzq5c45s4nCwlyWSEJMeOfFOeV5jYaM7I56HoUFk3FvjS9dExGF33t6n1oQpnS1QmZJCOHtu9do7VrsHwmFPLUHuF6Xpw/PmK8Kw2Zp7ioKJqreYjqRUT/b5E9//h3uvCUO1NrZIaqicnPzFuORhOmcTVHDFmO3z4ttsRnThXna9XOKVUkAq+ZJReZo95vUz8Ui5Ysx1jbm6Q16hGXEK52LoxkqcZnG7nRbLCysEVWS1PfEgsdTGt1uHyWeIiTLXXYOv6BUTqPHJNKS6qEYPplChscvxIGthtposSyTQMxlaeUavuWTSxc5PRUgBsNZG9WbMI4cUpcQ55s3rqAbGqOhuO9k2kfRfYr567R7+/K+51CCMPF4mE5HIvMt3WF1dZXmqYj+1vZHePSZWmMcRUT9PvmkRzgV4fSshf1KGJRzS3lM1SQvPVPT1DnYPaRUSLNxRaRrC5lrnNWaKJzRaV6AYzQwwwovnku2eTNONV9kOuti+2KTRaIqhhFiNuui9YTsFfNLjMcBUYl21R/NWF4oo2ohHFesZ3kxRe9cAWwsWzyPgcG4P6KtijWo5tb44fefkF/xKUUvIq1RPD/O0cmImSINEnoM+mlCObHmFVSy4Rj98YR5qfCXliusLM3z0vZJSoQmLZbj1cETEjlxmFTzixw/e8rzl8ckS2LDz89HOdqbEDFcKvPicz/5/KcMvS5flZnHpBLwN//jVW5fCxPui/c8f7nPZ7sBUyVPSjYNp4w4Q98mLhv4J06CdM5g1O0zlgTImSxEgipT38KaiD153uoydSYEshTTwqHWbGE7IdoyK6UrNqlonNq5mMt2b4ISb5DKhuhsCTnDM5k4LslkklhEIJKMrQ6HxwdU54RiLWc2OTzdYuoYRCRICoqMjmPjGeLnwcgilQkT2GJ+Z859br9xlZODcyxVOCimfp3d7ROsmYEq4bXRLDQ9dEmDMB11ma9UuXn7Bj/63ocAxKNJRk6fSFjFnQrDVNNUQprO6qrIoDQ6J2BEsalTl+SctZrL5u0Ke4fPCYfFfQWBRTafYOKIg2JpfZlm7xlacoxuSQjuUMBsbBMgspfApeHsywbeIFDwZInSUDYRf/L594knU6jEOK4JPZgtGDiWxmQkG/9tF9O0+PFP/oj5sjAC793/Gbn0HCtLd9A0oUtcprx8sY1pij07Gs/YP9glnytz7bpEEKzvUTtxKeZXaHZEkOvkrE469wa9vmySzuRYWl7g0fZDIqrIRGQKSXy9Tjh0i5dbojpgYWmF9vk2lqQJMPUUe/svCILhpXM1GyVAVSjP5egOxBy7jkMqb/JSwgk75oTVuzP+9ce/zeGu2I9feb9MNvk+fl/MuaLq5LJXOD3/FFeWhyWjSwxGIyajIS93fgLA5rW3GU+nOJLiIGyqmF4JTQ2DL+G+M1F2tp4xK7t4lnAQpv4QRVeZToQhvr6RZa54A88LUbopDKJ0tEDQj6B6IRp9URqlh5Icvurx9IUwpIaDMIPWCGsc0JQZWi0UxfW7mCGVqCzBUxQNVXcvEXZxNXzPJhQKi8wsYDtTNCK4Xh9TF4ap50/xlBAzX5bRaFFwLXRVEShHgIOLp9roSpRwSHwu8B1CEQNrKgFDVAfL7jAc6bS6wpEJ3F08z0PVLmN2hMMhNF0hFhGfCxQF15uh6SEUTQKEKDq2FZBMm/zVvyn02ZVrGi+f1bF7Qif88rcKNHtHbD8bkcoLfbO7e0rMm+OjP9hC1SSRuTPC930C5yKYoxIwJZMrMx2L1z7/WY9SKUXt4IiBRIS7tvEu9dY+46mYl4tsFXCJRBgEAb7vi7NGGo+NvRZX52/QkrbLwFkiUV3ivVtV4LsAROMm1XSJ/aYAZHm+/ZLN63fRzDkCXVwnZJukroSwRm3SWTHnvdEZ16+usL0lzmMlSFEs+LzaOqOyWJX3phEKq2TTVYY94VQ/evQEB4VIWji0S9UVBq0Om9e/ia6K59vI3UHRC3QOX/GD7/8TIRuZNPMpn+yKXONIhkmjz8+++IRCVcydU9MwQgUm9X0OZPlZbC6MYWv0zoQ+aI+m3Lm5ysnhHj1J+THrnRBNL6IYHieHYv+n8lFcM4ktkeuUpEer73HrnV/h9Eg4TpWigRMZMeqOaEqbYG3tKkenjy9lejqdYupJsuUciZhwGtKJqzx/ts9kbDOTZT7Xbt7m08+e8v77AkWxPPdzWvVTzpsxChUxn1fXr/Pxz++xOBfijdtCd336oxPCiQhDW5SOndcPSIQrbKzO87FsJ1i5ssRkrOEoAQfHQk/9xl//OqmYwqNnYv2Wl5fpnbWonYwvSxz3DvdJh27Q7p5gTsTzkO7xcucTxiOhW9ZX8hwdN4guRWlPhM4b9cLMrBqGoRGV9A/nrQ7lxeXL/REOdBQnRzzqoUTFvGwfHhLrR/AkmFS9tkcsNiJXmcOQGfbh+SnWVEFREixeBErPe1y9eg0dGfDdPaLV7tLpepQXxLocHrfYftFkfaVIQVZbDdp55gvXaHXE565cjXJWe4Wm5IlKiqNwJMThyRMIDFYrYs6zS3OcPHiMfyCJvzevMGq3iZhFIiEhUz5purMJo9YUS1br9LsanXEHIy4BKhIhDg9GHHQsNtdFoK8767KwkINumFBInDNzc8Ju/LPGXwhooShKWFGUzxRFeaQoyjNFUf4r+fqKoiifKoqyoyjKv1AUAbGmKEpI/r4j/778F33H6/F6vB6vx+vxerwer8fr8Xq8Hq/H/9/Hv03mygK+HgTBSFEUA/iZoijfA/5L4LeDIPg9RVH+J+A/Af5H+X83CIIriqL8HeC/AX7rz/sCTVeJS2CDx08/JZUwSaSSTCYiQhSoFhO3x9MX5wS+iOQMRz6x5BKOJaI/Ez/geCdGu9Pje3/8TwG4+++8zd7TGfgD1LysYd1tc35wSEH2QBy99FCNIZvrb3FBSlKqRnBsj8XFBc5l47bi94ibZc46IuqQKxRwJiWItbAk38nY6qNYLp4Dw8kBAOHxFt26ja+IKK7phUhENcBD0mUQiunoRpRYLIovm5THI5u4HmGhJKIjj7d7zPwI7WGbyURCcGeWOB/uMmXIUEIFO47N8dk20YjIauQTOTrNGYPpiHxWvFbJ5Xn+4pyQjDD26xbWrMettZucS+jnfKTE4dk+t9bmmJNNi7bbp90cXHLFTKcjXGuKGdphZonyHkMD07DRjA6eI8SrUq3iBhZ1WR+cVYscHpxRqhokIyIyMIidEI7C8mIY7GVx/WES1bCJyrIS0zSYq4ZxnYB8VTzvRz//IQEmrh/Hl1xb0WiYk/0Rf+XXJV/O45dMJz5zlXl2ZYRo7PWwxj6lcpgjSby7vnGV8dRibIsIdCwSZzIBw1QuoUvxNJYXiijKlMCX/RzY+EGa+7KMRQ1MVlYyDJw282nxLMeDAZlkioifJiF7rJbW5+icT0hmZX9F3KR2YrCyXOfWXRGhOTmw+fGn32V5cZH8goi0Pn74OSsrG1iyh23YGeHbIb71q+/z4T1R4uRYXcxkGi1IsroiIi3NoyErpWsMW2LPBM6Y8+Y8v3PvE37rqwLq1oikSZRGpBMOwVTIx2FNwU04xGRk6ZMPP+WDD95iNNGxZB+GFnPojttYM9Blen1qu8wmU1Ykx0Sj3kHNxLGmM1pnFzwiORq9AUNZputj09huMpuOSEqi2LBqoJoF7JnOl94V1AQPn37Oe+/dZWtLPMv+/jneLEw1lyImI/XHsgn69rWr1HqiVCEIT8mnbqFLGOuolqN/7nHj2jVSOSGLOy/6LCzN8S+PXjAdiywGakCgaMhWJsJmDAWP7d1dmj0R5Uwloli9DqoSYMjafsuyKBR10gUR/W2OevSmLdLZBQYDkSUqL6d4vv2CVCLNcVPol/HQJ94zCKfEvDx5/gmVco7NG6v85A/F3EUMBTUI42vTyx6rX4yLuFmA53lomsb2jogSV1azDLthFH/CYCh7ENUEvu/T6Qv9mk3P4btRTk53OT4S2aVev0Otccbnn72kOi8yOYah0+/alOfEfsxkTU5qDt1enVZTrINrR8nnTUa9CGeSn+r2mzc4PNxFQfa+rVR5+OynJDJ5nm+JjFAhsUTP9mmcf4QZkf1FDQtN9WmPhA66crXE/Yd7VEtL9HpiP6yu3qb78hmPHz8inRbXt62Aen1GMiPWM15YIBTb582v1MhWxb5KpGE8G19mIKyZjx7ZIxINIRG58bUug75Cr9ejWhVllrF4mv39XdJZQ/4eprvXR0+20WWJYb3RJ5ePYwVDxjOhv+eraxwd1nBmImqshTT2jp6izN4gmxbR2PPzEVYwY2n1OtOGkJdra8t842srPJYlVfge1shiafkdHt4T5dKHjTOOdh2ajTHttsyUTab4gcJMZsp03cAgghoaoahiXUw9gmFohMliyWx9OJTAwcWQWVvHH+N7Oro2IxwSz+f6Bp7qoLoKnqQY8V0DhfAlNDoqGLpKgIN+gRMTqBiaSRAEqPIeXNfFcd3L7I8fBOi6iqIEKMoF2IGLoccYD2f8tb8h4L0Pjvb5td9cuSz3WVu9ySefdvjmXy3SkmVB3/i1W3znnz+lN5qSkJUxnuLjKgGKKWRKVQ0CBPHveCSqLz756CX/2T/4Eh/vfsr774rKmN29LRLpJNYFLLmi48iHvSQ7VhSCwEPXVb72gejNfHV/xFlrwPV10RPcb7QgsC/LJwE60xHJcJ5kSJz/b92+y6NPX/LWl97m4EzYJNFIiuapg1E02d4S2YmVlRVevHjC7TcFT1I6XeV7/9fvko+tENhTKVM1btzYJB0vMfUkMIR/Ri57k3JWnBXh1JiToynvvf8mj59+LG7KWOK49YDv//C/5x/9138PgN/8O3+LH/3+t3ksOfuiiTzf+sbf5qd/+s+5WPZCxUdt5pjMDtjxRJnu3er7dOunFHIC8GGpCPWaQzlt0O+IzTZSFHRvH2ucJVcR9tPezgPM2CbFgshOjuw2bjjKYHrA9TfEe7qdGg9e7BMKGaQKElp+kiSa7tLtiUxStbTB4eERqzfKHB8JnTcaBLx1+y715gFx2WbR6QwoVXO0WmKPjqcJTo5rqK5JeU7oktPGNlc2pyyuPWc4Ebqy48ZZTiQwZCtGBJNokMWfBbz35V8R8nr+BGdYIJMa0GsKOXv5tINuzDB1sUdfvfwclQmqVqJ5LmHlQyGavR0GPZ1cWuyRqW2hMOTFtgD++ZXNr3FtZcazZ02WN5LyPo+JRwto9pBBT5ybJ0cD1JBJIiTe0521ubV5k0KxzMwT93R60uCk/px4SsCgv3XnW7ROn6CY40s6oZkX4Ho2tdopCyXR/vFk66f07k/5ynuiFHT16gKpUZGbIZPTI0HJMWi5/Nqvf8CDzz9Bl9nCnq1TWJ4nGhff3+21mI59iiUdR3KnmYZJMXuTrd0H1A6E7VnZzJFPJplfFr1hAQO6jT6Br1PJCp6yE6dJr2NxdWWO569EZZPiLHF1vkBjLNZuOIjjzmasLOg4U2FTXtm8Sbtd5/nOCcVFYc88Pn7Enzf+QucqEKf2SP5qyH8B8HXg35Ov/1PgHyGcq78ufwb4V8A/VhRFCf6fp//lUDWf/RNhVGiRJlM7z/LVPAe7srRGNxiPmmRyJk4g+xsSsLXTopK9CkCjfsziaoFAHVNOCiOprG+ghO/TPdvHlwdDr+ZSXVnhbCwU1Oq1PIsLf5vxeEi3f4HoN2PivSKVvcnEEgfteDJBU/KkYsLAzCbn6XPO1ulDzLDYwJ4O1nBKo+aRyEqC11KZQdC8ZIhP5mckUwmefnqfokQ1CwKF4WRE/bRGNOpdzDudYxs7LQiRnemIhbkSWHGcsCQ27inki2UODs94/lAojagRYeFqHF/W3jf3PfLlJbb2HrI8J4Re10usbWSptUVpT7/XJJ3M4AYGruSmiMcTrC1dQ7FSl7Xoaugc2+2g+LIcrXwTz59weHiMI/mOgqBDKKySTuXIFYUQHtdf8cXTYxJxSTKljFG0MMd7HtGoeJZ8Jkuj2SEcTjEaiZNXURWM2BB3IsXPSjCdBoDPH/3xjwHYWC8SDSc5PeuQTQlll4kvUS1VL5GQWp1tdDVLf5DGt4RD9PaX3uLhvWeYepZwSBgRhwcnROIxPFnG1hlNULwIiuJTlE2otfoBo+iUTEwnKxvYG6dNrq0WeacolE8xXGHUHaESoz8QG1/1dRxtwMvGPXJ5cS3VUyiVNbZeCmO5lI+RzATYdptuTciZNYizNLdOOuvT6giZzeQ1OmeHFKuylLbs87W7v8zZaZuyJANsdV0mU5uFpTwP7ol66mtXlpgvV6ifiHlyjR4//uIl02mWf/xtIT/2CNSwS6D4XL0mZPi81cJqTzipiWfZvHqdQReSySK5lFjT/d0tIrEEs6nNeCpk2ExFGM2mDCURrh+K0hwNcb0hqZJ4vkavRzJaZtaX/YdYeAToQZIrtwWapW83sL0Zc9VrvNoWz4Jr8OjBDudNYVRE4gFr6/NMhuC7Ql7SGSGTezv7eNKZMvQosYrPRPahlTILeEqPcd/GnknUobkpyYyOETIJJMqQoUeYWjZhudc1FALfpdVycALhTEZ1j1gsgue6JGVZ7vlZi6VVg9FUGP6qqpJMVajvD8gXxfzO3C6xdJx+e0IgnXhfNTk8rRM2JLjLNMn8TYPotTQ/+z8FSIJCAk+1URXl0rlSFAVVVS9LARX5N1XTGA4uuLfCWL7LaWuHqOzbG4w8zJB9SXB5POpghM4p5BZpS6O+WMrw8Mkn3Ln9S0TC4nM7O/dJZzOXBvT5eZNYZIFuu8OrV6JM5847G7x6dorv79CqyTKdlXl8p8dtucaff3yfRDrHSbuLGQgj8+4bN7n/9AuiSZVUUcj1oDNjeW6VwJfIed4y0/4uW8MHvP22aFhxvAGlUp6p1bvsi7h96wM+/NkfUCzIgNKww3A/QSL5Tcox4aT4E4t2q0v1QjZPjlHNLnMLq+zK/jFLH5FIJZlfSGAinKvp8Jx01uDRExFcWVtaJxw2GfRckCBJltPCD5XI51eZzcR+7/dgvrhIkBRlLM8ffYdKtcDJ/i66J4zj1aUQE2wWCkUOYkJff/boEwbDJkey/2BtZZV8McPP7/2YjXVJqLn2Bs57PrbTZzqNSHlJsrX7nGxGGI7PXj6j25jR74RptoQhMx7azGYjVCKC+BaIxV1AwTOETGlqjmgMNALcCyJNdUpIieD6PobsRbXcGZ4foBkXJaqe4D5SXBRZmui7Lh4CwfACwEZVVULh6KUMm5qGouh4voPChdMiBM6ahHn8QKJLrnt062BLsIOdl89JxkymwwGLRREgnM8u8+zxjzANQ8AuAioxdMVH1S+APlwIdMAnGpelUTvPuf9pjbn5AvfuCYcklBxgDc7RNMlJ5noEgQDduCjLNQwD8NF0hZkrZH/lyhIP9j6n1pM8W+saJ+c93pQ8WwCGq9CfnPFiVxihubklNjaX6Hef8rN7IlB0q7pJOmFBkOTtO8Jx+/GPf8g7d/8uv/LV3wTAtnd59vwh52c11pffBSC9EOHpo0e8d/s3sGaeXPcGOhHOzoTRvbEwT713ym//z/8FVxaXAXj4yX269XNuv/cO7baY8//9d3+PzY13yc9EIMsPzbM1arC8tMG4LvTU8XaTIOlxZW6e4YUtEVlgoRrBCAuHz4tUKYbK+NMaE1usy1mnTSg8h68MOT2ViJcxj6VKhJ1doZPiGY/+eEi5UuS0Lc6wRvOMbrfL2++8x+mZuId0NgR+lkpJ7IWJ1eTGxk2+uPeAblfIfqmwgqZHcAP3EnHSCURP/MGhcMDmqjnW1tbZ3t7m+Eic473ZPsnSCS+eBhw8F3vyrXd/FWgRjYnee8Xwef5gl2jhGjNbfN+075KZi6D5XRIZyfOactDJsyQBSfZPtplfvInjTTg8PhDyUlmm3trB0Na4dVvYvw/uPSCTiXBlTejTai7Bp48fsrC+juqK+yymLXzFoVgwOZWIha7dIZNZwLPE+pWLm6hRjx98+HPyZSH7NhMCJ4rdF3t24a0ojYMw2UKU/kzYBKlQFE1r0Gk3WKyKREkiVkQz+4xsce3jnVP0SIniG0X8ExFgXrla4OjkmFx2nslYBLXzpSgr8yvsT4XDnkgvEYlc5eiwT6Ui9JtueDQbPZaXruDJ8np/kGalFOegJkDEzlo2129uMqx3yedk8qGe5/rVDPunx2xcFTo2EtYZjfuoI4nwaZpE0nGcICASk05u4yPqBwFX15fwQxJox5L27J8x/q16rhRF0YAvgCvA/wDsAr0gCC6CEyfAnPx5DjgGCILAVRSlD+SA1p91fcty2T0UB9OdO2uMeiatdodWS37EPKeymOFwr0taNtlFox5vv2+iWeKwTKezbGxm8awzbsgD++d/+kf4jMlmF8nGJNrUjRyd4JzlpHBadsdd7j//E/E3CS866FukzAqHJ8eMurKpF51IpE9FErA6M5vJrE3EnEcNCaNXDZJksxXm8xkOz8TznG7VycXmyaeFQev4PbYOH3Br8yauIoTXsQeMhmMMPYaqCaXhug6lYhUzLr4/zwaDSZONzWV2DyRkpOrx4tUhv/GNb7GyJPoiTLdErXnOD376BwAkwyq2paIqJQ6PRB3vTN3juHHI/LJwNAa2ydGLbQLlu5QqwriajGcEM0G63BtKVmzzCtlUgnZHGBrjaYN4LEMknGRfZm0WFxfRVJ9Q2GBrVzTCr19dJRn7gKf3ZDPivEM6ZfLyoE9PF8o1PZmj15/QGR1RkFHbbq/NcihLInqRKZvRa3ZJp9P8p//+PwDg29/9JwS5gExex7GFk7Szs8Xd91Y5kig585Uqk6FDe/CU+SURRXn5YpcAi+UrMT76SByW8WiJTDZORypbz7cZDSxUVGKR6eVrmcQc7qxNT/anDIMJzXaH+U0RHbHbYzx3yg8MTRUAACAASURBVGg24PGeiHz8rQ/e4ekXH1GfQrUstsrz5y/YvLFGNSVR48Z1UskF1OkKzWNx7W63QTq9ijNpE40K2c/HrkKyIcIcwGmvxr2dJsVkhbUN4XB17ve4WU7THR2QldCo7fE59ce7vHnz1wA4a/RIJYuUYgqORFGb+mcobp7JoMvWlsh0RBMlasdDYknZHLzbI0qSdHHIUCJzliorJKNphpNTZjLiPe12SYazHEuACSMSIhh5lLNpXh4LmcpmYDTugSvkLhpVKeXSApZaxj4jqSQnL1u89cY80bAl18Hiw5+dsX5NKPI7d97k04/22Hgjz2wse9oMj4/YY+aPWF8Tju+wp6GFVWKurFUPTErzGY6OjkBGxfUgjuO1iEXjtFWZPfej6HpwCQ5g2zOi8QiOlSLQxfO6gY+uRUAZY0oADcUPKFUNAl8YtN2mTyyvcPVqmaePxeGhhsOk0nE0dYwm72Fn+5BEPoMjnaaFUoH9vQ6xWIJ49sJAUFENVUBOy+DRJUrgv+FsXaC/eRJLt35ew/MjFMo5Om0h672u6HGNxpYBOD57TCadZzYNmEmaB6cbwnVDfPzZT7iyIg6mbr9Do3XKZCLmpTqfASWJqvf5xje+Ia518hQUi6lzxNvvCr3bap2yMF+lUBbP8tbbGzi2yuHRFl/7sshEYIVYWkowDQKKRbHOqZDGbDIlLoNcBAp33r7FyDJZXRSwxA8efYwaamEYBs222CRfPPlD+oPWJaqZotmkkgVMtUhFFfthu/YZK0sROm3Zl5kysPwJXzz4jIgEnShXrzGyW7x69orrK0J/RpJhIrFF5qrCQBoObDbWrtHo1XElnUAkmmY0dIkn7csgk+a7dFs+TIRu+fW/8nd58HibdOWI47qIioayd9BDPo+/eISiCefDDKcZjyeUisI4b3ea9NsahUKJbkdc69qVNU5O95iMbOyZmONvffM30A2N9kREt9/7apZyep14GpBN4Y36CSeHNVrNEe22WPeTozHOyKU/Fr/3R13GE1DRCGT23DQNVC+DHrHAl4ASSRPXCVCQzp3qYNkTVBJYEglQ13Vc10XX9V8g7Dne/83em8VIlqX3fb+7xb2x70vue2Vl1l7dXT3dM5qZHprDRRRpW6QtgqJNW5ZsGJAN2H4QBFsyDPnB9oNswLIBWrQsyZAISVyGpDjDnpnumeme3qqrqruWrKzc99j37UbEXfxwTtVQMCnSMAzIQJ23ioqMuHHOd779+//xfV4gEfq+i+t6oPgoqoS89QEcXFfjZF8EU6sbSZyQzbAhAxszRLm5SyKWp9oUtueDjz7h2aM2sYTFaCyDPs8XAZ9MwKi+h2VZTJwR/b74bMfx+O3ffJ9f+g9WsGVHxqX5S5xcbIGsTmiahueJ4PB5cKWqKr7vEwqFaJZFQDJyeyys3kGTAFoPHn2btYXXOTwXyQiAfC7DxUWNzauLAHh+n/NiEZUgP/Uzfx6Akw+f8Nobb/H0aJ+9HRGE+U6M/eO7fPe/+acA/KVf/s/5xV/6j/h7f++/5em28BsW1sIYRpTD430iYbHni9NXOSs+JqSKSlLPUQln8qysrZKSXTCNVImLcoXLr33xBYCOMlYxQgOubwqAmd3PH9J7/5j5N3+CUUwkjwuXe1iRONXSiJvzQg8boy4z6znee/BDsSc9qOgNQukAnqxgvnr1dVqjM5xmCM0WdzIytUgoqBONOVI2fCLhJKaeoiSJzI+O+1y9foVut/Ni/k9VVfr9Cc2h+JxBf0gutcnXfvxrfCaJhU9PLjguBXB8m4TUsY32EENXWFwV/3782RbzC9Pk81nO5czcyuWrGPo0x+UaBTnzfHpxzGTs0U0KHX+4e8DGpQ0eP/2U/JS4a1PT83R7DWLZJGOEvDx68ik3L79CzxZ3LZoM0Wk0SWVjrCyJvWvUXTKJFSYTm0pR+MiObbK8OEW1J2zRt773LaKZPOfVMn5PvGcmN08g1qfbMJhdFnpJqWskkyk0mbSoFzuM7Tjx7Ij+SHxWLJrFG48wZLm5XNomk9XodOvEJDBGp1ZkKnOVbPYZu9JvWN9YxUHjk0/E/i5Pr2FGAzx6uIMm79rU7DSdbZvpGZPTotDNxWKfWKREIiPux/bjJlPTl1hZK9BoteUe9AhGdMKJCf2uCNSGHZ87t5a4LBEMv3u0y+4H98hMx3i0L85lebaApqXJzCS4eVkEXG+//TZLN37qRcL3h+/dZ/lyjmSgQLUpfMhiv0bETBKJhDi+ELIXMSRK3R+z/lTBle/7LnBTUZQE8FvA5T/N3/3LlqIofwX4KwCxpMXyqsjeDUcuuUIUnAyzM+K1serRavSYmg5h6sLIKuqYZq2JOxIKqZBZZe9ZCcNs8dvf+QYAuWSYfCaH3Yvz0X3h/F+9NeTxYYXXbolMT71+RiiqYKqL1CUUZMAIMB5EaNRbhMMSRCAwoNPpcL8sBGV1IY87MckmlqjJjHAmeIOlwiYPn/02qbjY2tmZS4wGEaoNcfEXVy8RiapM+i7BmBDofrPPaDIArUssIVHULkZM3An9hrgYMSuJrk3Rbk44P5FGVj1gpnCZTivB909FO9hcfp29wz3KDaHI3XQMu9em0WnTaUpoazVALj1HzJLIa6Ex5Z7LxVGL2oW4UCErRH/g8tZXvkJ+ShjCeqnHeGITDYkKWLPe5mj/iH6/96IN4vysgmL42CMNyxJBxCcf7KMpZYJBsZdHZ0XcPR3HHaCpIrAYj07oDbvoWoxhRwIgDG1ODiuEI+KShYIub965wV/59/9TVmfFgOnv/84nDLqnBMMxHDkoXpjR2Nk+JZMRQZphjNC1Hs2WiW2L7NbUVITxKMHnD85YXRGGod1r0RtOaLeEAs4ko8QKKZqDDglZZag1mjRLNS5tznB/X5ypYU4gEKB8XJF/p3L5Wo6HW2Nur0jOJcXHC6d5cy7Ev7YuYch/7jV+45MeqiUCoubhiFZPZeJ10GQVRQm67BU/ZG5ugW5DKPh48IKUMY1riPNsDB0CXpVepU+lJc4mk/OonHbo9TrkUuIc9ESUk942730mqn4h1SMeC3J64RFOCGXbaXu028fkZmIMZHtm5ewEV/GxFDmcn0gwnckRTiicnInfcnRyQCpdYOzW8YXnw7Bjo4U8EjIo6/W6RK0U2UyGocxKt1s9rICOL9vY4nqCxfwUY69FryOUreYX2NyI4lKlWBKKMxJOc/3GFOvrIiP96OEOa+tBrHAQT0LGd2VSYGYqhi/5fzYuZ3i6/YjbN6SzHitQq18QS2ggncBwMIOql+gN6vgSyEDxwdAUnpffA8EA5XqLzuhHvFO+MgFPwVc0LFPcLc87YHVtisfPRGKj2x8wZMjQGBIMi/dopkY45qAoDtW6cFYL2QK65eO4IilzWn/K8NjkS1++QXZFyOLTz+ukwhoTW8F/jgTqeS/aqeB5kCWqWZ58eiuoMRg6HJ/sgS9hq2MxatULJjKrGgzO0OnvC2fWEzqpP2ximiZH+xUGPQFHa1gjfN/HCMhKXbNEJOySnUpweiHuWsAMk0yHiFgbpBJi30eTOpmMxcW5eE8+t4zrKrhGjdpA6OpBPcTQabKwvEjjQpxlNOrS6XYIBIQhLNYeEA6HUbwkJ2fCwbx942s82v4Wjn+CIwPDYT/C+sZlqlUBkuKi4Gl9zuo7mJKqYDKKMz/7Cp2e0KV9t0QguMz6lQhD6ZT1bRvFT7I4fYcv3BFm8Dd+9x8xu3SZhJxvblUdymc94jMKXV8kSSxjGj3V4vj8e+TSom1ltjBHtXSK2hX3+O5H8yhmkLnZVSa2qFKhXXBWP6BvJ4hEhX5LJiIoExVLVg9D8Rb+SCGaytJpiexvuzPEslIEQzFKFfHsT549YGVlg4MfiM++Nn+NSqnNabFFVCbxuq0S03Mx5hemSWWF41Tv7aF5QzwvI2XD5Z1vbaGSp90T3zcZxqhdXNDvx7koCodE0zQcJ8RYQpwHTJVwyMRxPQwJ9KMoCrquMplMMCUXlW7AaNwjYMoWQEcELQFTcOcB+J6BEQDU/vMCFJql0yxOGMozd5tN8vkNYvEAY0cmLbciuIqLGgigygAIHFRNwXN+9ExW0MD3IRQSOiEYMjg8OEfTLlFti8rRwZFPr6NiS746y7IAwfOlvEh2KHieRywSJpIVQbXTeIZpG7RsoRMaJWimKkzUc54vIzhLPOMxkZyLuXCGzkTh2dEhgYoQtBu3brO1s8vE7dAcCduTKOQJhzzys2Kffue7v8abt3+Rv/jz/xVv/0DAoA87FpfWVrl7/9svbIMZ9Bi7WVZnxF24e/4bIjAZXKUfFOeXywe46O1y8JsdblwWHF2qUuc3v/ERy8si6fxXf/oGnz8yyKbTfOdAJFf39ntELJOYYUJJ6KDl1VVaowapmAg0zLRN9aJEKnONfEYkou5/fJdYKs2rty7x8FORECiXjxk0XGISGTCbjVFrddg/OSYsu37mcjE6bY+JPXwxZuGPfeKRFPawJc9YY+/gAZ8/GjIzJ84lEjUZDJtous/TbREUOZ7C+tpNXIna7NFnOOyRzS4RkB0vMXOa83OdVr2CPRZ6anb1Ohubq5wcCR/h0somITPK9LTHk+0jAKYWYxjqmNMLk2s3REJp2D6lXHyGLvfFccPE9DGVYoOFBeGnkCxzXjolm1qiLJHrVtajVOstHOlPDQMOmVyaeCZA41Dc7YOTbW688lX0/IBKUTyDYWXp223yCZEoimfKNKonBA2VhbxIjtn2HtValLllcS5zU/P89t//Z4QTC8Rka6RjJwgGLQz9Gh1D6O8AEVzfpiBHWxqNFrPJDP5oTC4tazETCGg5onETpyhtwWyORCZFryrs2PXrS9gjhdysQtMW9rF6VhK0DP4GBQkWd94/oxMqYLSEjUmGd4jNXSI5ozNWRCyxvnYby61hhBcYdUVVMxkvMO7XmYzEGU9PpQnrY5zeEEve44ATI5NcoD90sJ9zVvJcf/zR608EtPjDy/f9FvAu8AaQUJ43QMMs8FwznANzAPL/40D9j/isX/V9/1Xf9199js72cr1cL9fL9XK9XC/Xy/VyvVwv18v1/9f1J1auFEXJAhPf91uKogSBH0eAVLwL/Dzw68C/C3xD/snvyH9/KP//nX/ZvBWAaYa4fUsQj/3jX/8HbF5e5drlWS6KYuCs3WuQSU/TrBepuGK4Mj+VoVYZszIvovler0+71Wd+fpF8VvyscDaENtFo1tokHRG9n7X2WEnFaMgSPRMf3w6ixg6plp4z3sfI5oekknFcCT87mah49FBNObyqRWk0ywQ9k0JCzHil4jq90S7ZqRxxOSvV6BZx9B6vvSGGYHttleL5Lr3ROfNJSbyZzuGM+mha6gVccjaboz+u0KrLrF/SRffnUYJjsgUx66N4Ko4/4f37v8f5mcii2PYfoGpd4rIScfjwgmQsSyQQo9uVz65PaHR8ej2R/Rk7YzKZJUrlFpYleZlcDdPS+O737yIpO5jYYxYWE7RkW+Da+hSTiYunB2h2JbCBH2XQajOZVPAkL0s4HEEJBLioiayRaYSp16uYAQ9DZv3dyYhwWGXcM+i6IoORm4kQDKZfZIMqjRbT2S9gmgG29kXb41d+fIlyHWq1BkPZXmOYFr3emIApMnyOckIw0cNXLap1WUZ3Iwx6GiPbY+SI39NstwhoNlc3RUb69KSEq3TRQiMq9eezNzF+7K032Dl8jC4rAYavEUsFiIVErqLVdXBLLpYRYigJdesVl2zQZ47HLFvi/N7+wRnl5iKvfEkMuO7t/iMmk3PC4RgXR7Jypbug6hw+q7K4sghAfzggGfOpSshadWhwffMN9g4OaPeFvCxG85izPsWiz3gkXmtX20StGDuPRcY0GcxgRYf0tTqVU9Eekkpk+Qu//BrvffghxSPZ8jedRdNSjCXsuo9LY9ji6KJDMir2OBCC/YtjdGdCT7bbBFNpLipFEr7IpmljCyek8axYImrJ0no4hBlQUWSCZeQMcFWHcFSnUhXnPj13Cdse8PhRlcFQyMZwVGN+bo2tR6I1o9upsrFxi7s/fEwoJs5ltnADuMv6So65guhNL9YOmJ9PoxhCqGudx0yYMFt4k9yUyFI/2blLRJkhlRpQqQqZ0sM+47GLZlry91ps756wtJhAlzmm8aiHM9KwEiH6Xdm24vU4OzlAD4psVyQxoj9x6TtdFNlqNuwPiaQmTFQVQxJaphMGdt3C1GSGtpDgo0/u8+Dh5xQKQm8cPBqhujaK8qPqmeOBpasv/i3ALBR0Q+XkQJLj1jxisRjF4j5xyb1TK3eIRVN0bNE+hTJEVSI0m00iMiM8nvQxA1Fu3/oixbLI+tWbHaLxKKOxyChaaoh6s0UydZm+5CDTLZ+hd048mGPgiGdIFyJEoxCQLWuHR/tMF9bIJvPs7Iqh96XlDHEtz9HeKTevfRmARvcRo4nL1LyQ1/ufPqLZjJNMphk74hn++Tf/CauXpuj0o6QSosoXD8XRNIW4nJ3cPX5GsVLkzqtvoSvi3oYdgx9++h2SSVFR9CdjOjWV2esxyqfi3HvOAaYRYmZm5UX7y+rCTbSogsQ+4PLGEhZx9k7OSGaEDrS7Ds1mkcnQwO79aNZHCY3ILogsNfYcrfIJpZNzcnnxWq1/n15zTDoeJxwXlfgnW1v8h7/8i/yjX/8nAEwvuOSS0zTaPSTvL3sHJfLTQfrdJk+fiefUA1EG3hm37gj5efzogJAVIDPlcHEq3qN5ER4/2eba+p+j0xa2r1Z3cfVDchJ0KqZt8voNj9SswYNtYXduXH2dbCRPu9vi7j0BSDI1M8PHHx5jBMTvPT6sUi8NmAwdmg1xP8bjMZZl4Xke3aZ4+HAkiKZb8JwsW1NxlAmKovN8EiFgiGpWMGjw8UeiivrWT/4EvlNjOitkupDd5PT0mPNmi6lZIWcfvnNIMOgxGkx4XrhSNQ/F10Fy0+m6ga7rdNpDFN2Rr6kcHZTZfxLgjS98FYBqrcHW9qdMRuI9lsUfApZ5Pr/lofhCZ/ZGorJaPG+TzIWYSEL79UsJzk5OSU/JVmWgerFFKBZiogt77Gs9jms7LGzMkJXyebDzKUosiBUK4LniPmwfPeHq6k2yKWHDqq1j/s7//pe5svlnMSPifuxt7/PF13+ar731Ezx5ItryxoMki7NzPDgXpOJRo0DMDRFPJgEhB81nJdb9L6IuaHQHomMoEUxxZXqON24K2fzGP9hicPlVrrU69A5F1S2RarKUfhV/3HnBF9kb7fH0WZRuWzzT9FqS3miEOgqRmRf7cOf1W7z/7bdpz2UJF8T9W+d1jkqPcCSoVqekoPg+n33+Lrac+/rq6z/NWfOE/d3HRC3R5rh53ULXhi9kajzpE4yYJDNhmnVh116/c4NyucbTZ3eJRIWeiEZy3L33A0xTVPQKhWV6/SrLCYuLC+E3eM4uuVyOePTLzMwK2auet2lXzrk4F/ZjYSVEvW9jD1wW5Kz2dCrBk60Drl6ZojMQFeDRaEg4GGMwFG2XlVqVcjPD+rUNhq74voHdxXeDWGaMqU2h3w4OTlAIgKR5iWXyhGNJAmoQPSSqZ8mQweeP3yWWiNKoin2fn5kmk83Tao/lZxssLCTZ3i6jeELHR9Mpbt4O8tEHwve+2F7ip372F/nmu+8QjAj7nxlrbO1+jk6COzeFri41PuOkNCafElWjubU8580SE18jIWfKPrl/D7ubJhTIk45KKHSvwtFWh7EkA1a1OdodCCfj4AhdUshH6XehXqnSqghf/rxXZr9yDK44u9nFAM1ekVvqHTJydvLg3g+wzCGt4Rb2c1on1SfSqr0Aq5orLDKcVElnQ/QH4vvKtS5+v8XuWYuZK+Ju9Yv/t5rRv7D+NG2BU8Dfl3NXKvBPfN//PUVRtoBfVxTlbwEPgF+T7/814B8qirIHNIC/8Cd9wWQ85mhfCPjm2i1wTB4/ecDGFdHi9P13y0yiMDu9QHpKGIZyvUY8GcDTxMVPpWbQlBDxWBhTOk5nFxUuKs+wkgFSBeEgtHopcqkkB6eSnDdhMB4H0CYWIVMIuG5APjNHrzWgXBPl9mgygef2mYyFE9isj9F0j5XFBbYfSeb1VoNAaMxEVTk7EE6gpxgMWj6eHHoPGgFOj87Izef48H1xgeZWNHr9Mvn0JlpAfH4869M49FAs8e+ePWLQfYCnWxSmRHm2325SrZ6SS6WxokK5apbDsJ7DmwgnrdE8p9ms0rEnhKJCaXW7A4JRg8N9oYyCkQnjYQcrqKBa4nPKxR5R36XW7pBJiBKu6054sn1ISJIYP360SzoTxfPiZAvi7+q1LmbIQNVNLEksun+wR9iMYViSHHgWDNOjXVdYWJJIgCELe+iDOkQfC+NRrg1YWLOpP2cvT8R4cvI+/+P/MuTR5x8BsLgaIp9fZXe7zKYU+gf39ti4Mk+lIeYWRmOXRCyGo1QxTPGcF+VzUukEyeg8fdnPPex1mZqLYkiH1qVBfklnZAcwJBngJDrkw0/fo1QqgSH2IROKUjrvY+ZFi1qpdk4sXCIdC3BuC+X62nqbyUmD0rnNH3jC6Hyy41P2bN59V8z8Oa6C5wq0pIVVsXflUgtDm6U/KlE8FXckGC6gGxWuXxUOWGgSoXTWJh2ZoeKLM+03ZhmNDun3QlQrR0KG/AYRr0BYsqObAZeHnzUxgxFcOQPhYXL4bIAymKYwLV7b2WmSyqg0WkKJ+WqAkKGSiepMdHGP/MCARFQnHc9wWhX3wXBVzOkEdlsOobsO3qhNMneJdl22UGYChMIByjKQCsWjVDs9kgEdIywd77MtlhfmmV9Y5+hQOBv9fpHRpMrEF20RRkBn0IN/8+e/zsmJ+P6dA+EwphI3qNZkq5AXBatFIi7m457uvYNhDNB1k+++83sARGI50E9xnAnItkDft9GUAM+nTPWARbPZJ5OyGT1HklR9AoqK4rkUK+IZXn3lKuurAd77XDichfwMWwcHOC5EI0I2IkGNbl9lrIxxVHHGZ0Wd+XSScFR84Y1br9Gpn6EqYb70JdGS8/Hbv4ahhBkrLvgSCEMOYj9fnuegaRqqqqJp4h51Wj3ajSHeIIwREzI8XYhyfHyMPRYJmHTOwrHDNGsdcITV6bUdcCZ06mdcuiyC1cH+mGAgSVs6A73+iETaYevpY26/Ku6jQpCp2SlG9jlPn4ozmV9KYFhxVEW0mil6i659wnjgMDMt23TMTYxAn8K0RVeiA3a6YRbWFjg8EN+Xm44R0CyqtV06PfFbbt3exPMdPD/LdEG2kXtpjk53mZoR7S8/+1Nf5h//xq/xyUfv89aXxPB/d1hE0+rM5UXL8dbTIelYiEFDodUVrXQBdZpur01IPydmClmst87RjHNicYG4GdTXyCZNGq0uw66Qu3QqTzBooXpxul2ZCBs3UDWD3pGwV+kZDcU00IizfybuQ/Xc5q03fpK+V+ToWOipheVrbO9/ihIWDu7ugct5sEQ0l8buiWe6qHyT1NzXmVu4Tkui51Wb5yR0ne3HwoFW3HmsmEc2m+Peh+I+zk+HUfwhimajm8L2NNqHhKKXcWSL1dnRPkE1TvH4jHhC3NEH29/l1toXOTmqcGldJBZddcDN1xZIyWHy10cDfG/A7NQm33n7d8V+GmEO9yvgRujKBF2t0qfe6DPoie+zQjFQPFwn8EKGPX+Mqhqoivki+deogTeeZXZmUTznaZVoLMH+46e4ktOqWO6RSkeYjCOABNpRELNhkkhZ0cFVFEbjDgaydVcVc56ffvKUL31NzOg9ePApkZgi2pMABRFcidkr6al5/guwjlhSOI9aYIdHp/dJhYRsZqJzNNgjMhD7BNDpHjBS5tE18YzHB0XWl9aod4fcf/A9IQfpJcZDhXJbJT8r2s8Pzx9x79kPmJYtiLP5OW6/dp2AdsrxmXCWTcvA8Wvcu3+IPRL6LRILY6XTLCoSQMsJMJ0pMKwdYSWEz9UKq4TsIJ4+JisJpcsHNUIpl199+zcAuHb1a6wsK3z44TNyU2JG6Ne/v8NkucXyhsVYDgoPqj3GpTN0R7aVdpsEjASfPrrLj+d/VjynOsP84hKV1jlIbqGL1jamFkKTIwb6RMEax9hYnObpkbCrJ8cHLC1tkHwth+MIB3oql2Vr6ynjiQjuHHeI646ot5qsFURLo+Zp9Bse6yubfHRXtDQur8So1g8ZS7/vzS/M0+k0ODoo/iFeM59uxyaR+BHRr92vMRg3KBSe87JBtXHM3NQKpmyhrjXKvPXlr3K89ZgD6S+GZy3GwwumkpKLMpRFWfbRtS7hsOS+S8wSjpxSqx2TmxXnFY5kuSgeo/vi+6en1/DGXS4uSsSicoa9c0i/3SKdmCISFHamVW1iBqL4prBF05kN0qksy2s9Hj4WM3p55qjWqwx8kaTttU44PYa5qRyJ2PORgwjEmhxs9Tk6FLp5Zf0K5fbHHJ8JgIl8aplAIEHtvEghJfS+5oOrFNk9qHHnDcEhe1J8hILK3PQiAAd7J6xcWuL44PQFyM5oYtOsO1y9EeH0XOiJL83c4uDRUzyZJI1bi4T8AZNBm2pHguMlo1QdnV67RP1I/OalpRmOD0/QMkJ3tos1rm0usb9VJZ8X9uLPfv0LHG19yObmJucjEfQR+H9JIuz7/kPg1h/x+gFw54943QZ+4U/63D+8DCOApYkIeOnGNDs7e1y5/GfY2hYO9HQhhxVU0FQLdyyM1biRJ2p62BJhaGT4lGpNxvRZmhaXpTAb5/1PP2HRnGFtSghhNuxg9yaEDbFBITONFWrheirNhvis1eUcB/sP0YwEQ0co19r5PqFAhnhEKMBhb8jS0gwnu+eMbGGYVleu0Gq36fVtDFMou2a1RtTI4o6Fkhw4E3L5JIZRRZXD8p6WIaynOa3uY5hCCB6eNFmdyTEoy/mjoMlQq2JoHgNbKJbzizKpTJxINEn5vmskqgAAIABJREFUE3E5Y/kI6riC3RLOyKWZPKWLAamsT68j/s61gvR7baIR4cBPPJdgRGNkO7gtYakyyQTt7gUBI0NHEvY121VM0wBNKJVGuYtiJAUi2qrs/4+oDLoOvUGDoC8rZaZJmw66nE2LdbLkMotYgQ59SXpphUKYhko0HKVcE6/h6Ow8KePJQfxkMsnszDRju8ncgnj2QmaZTtPmF3/h5/nBD94H4NrmCmbAIGKILPm1xR/jrFxCM5OotnDGj88qBIwQ5cYFU3PiTBNTWS7qDSYSBjUaM2mV2limTn8gqj39zpjuoEtsyiKqyyxKroBm2bT7kuRTA9Va5Kh+QUAXz55ydY7KJTqjKE3pwD47bDBK+NgdSVi4ucBgZKH3m7TbQvlNFJd03KR0oJCIC/mMMqRf7LMjnYO1xQ0WVhdoPd5iWSrSw/MKATWB4lVRVZnO7oWoTwaYMmjRgxqNnR65hQitpgTsGJ1xclRFsfpEZFVjMnFxBj7ORA53j11iuSQjD0YVibCXKHBWO8RMR/BlCj+STDByO5gyc21Pxpj6DD4uhmRf7zhdOi0XNKHYdDPB/kkZs9554QhXKhWCAYPZwgoROQPh+Qaq5hKXaIW24dMbdDk+2MaWsO5XNsT/eY7HypqYc3nn3TqLG9PsH0jI+mGCaDjD3c/fJ5MXOqhSalJYytAbHaCrefl9QXxVEUhigO+N8TWDwXiCI6vimqHSGjfJBHOM5LxYetrg7GzAeCCh3/M6rg/euIPXFfuiWRq9to0RgoEt9qUwnWM6v8DuqXCIzptPeOP2qzTrIzLTIvgf6TBxFAKG8SJj7vk+rvsjWGtV9fE8SSosjeP09AZ3P3nIG6/9OX7mz/0cAH/7f/0v8AM+riOcrXhkgTbntN02vkTvLMwW6I+GWH6KopzjyU9naNdrZMLirM7Kp5x1NZLJJEFN7F2tfJd0IU2jViMiySs79QmGPiGTFs/9hdd+ir3Du+iKz3lJ3NutJ79NNp0hW5hC18TdSgZNKhcPSEZE4m3s+pSqfW7dfJNnzwSSZDJ6iWL9c5rtUzkHA/Vii1jK4vTsuWE8460f+0m+8ft/h48eiIzs6swiigXdwXNi7Eu4bplW94xbNwRs9vaTY25dfQvT0nkss/6xfATXX6JxKmzT9EaI/eMnRMIK48HzmT2DUnmPuZmbmHHhBD7Zv0vQuMa6DFQfP/wGl1NzLK29iXJH2I+nD1uclD7iyuYXsVQx62M6Y7rdU8bSgZ9KhCmdt3C8COWSyG7PLWxwenhBcc/FRzoRq8uclk7oN+UcsXUMrsnnj+D6mggog06XScCjPxkyuBCVlkxqCrsHli8cooE/ZGKOaLXGeEg9NWkx1h18xULXhM385u+/w/riJmWZFAokbbxumELB4o23JOpX4A4/9pNJth7dZVNCaVvqAg8/e8RJWQSP3//tM6odF1fto7tiP3VVw/M8NMtm2JPV5ECCN9+codEQyG623eD0wuXGnRXe+464R8OBQyyZkpDpwq4FDDFn5Y2EnActn0F3IJA3FVm1HdmEojoffbzF2+9ItNdQDyu0hOOJGROUoACOcV2eu1aqrjDuwsxsmmpR2JCRGyMRipCUGf16o83q9BpzhWvAN8V++sDomPK58C2mFi9T6o+YHH6M1RLn2Q0tMbuaIWLXOD4VXT4Ls+uE9HkKs+IZTy8e0e2dYQZz3L7x0wB02g0Gdo319TmePhNVjYvTZ/jumOmCCIj2Dx+jqx6ek2YikwHJeIgf3rvLwvIUkaDYq7UrG5RLZ0QkjPzyWo7qYZ1wBBqS9PprX92gcVJB0eaJSMTJkZ/njZ+4Tq0ski0X3RrhzATdivCtbwoqnTdf/zJmIsvTp0+5fEn8nnG7RyG3QuKK0DenT05JJ3SqjX0iYSEb0azF/s7HZGObnDTFvpRrfV65vsZ774p5Y8dTmZleZG/ncw5OxTxXn6sw6ZHRZvnC7a/L/atyefYypyUhU51OB19RUfQJAemX6J6K4jVwRxbxpPSVKiVSkSSWBGCzlSTXb81iV/tMbHFvp9cWeHawSyo9zUZYktr7I7Z294nFhEyrTg2tbeJYLuV94bskp4M0ak1mZue595Go8kejChvxHFsn4q4lrua5qBxTLp4x6ckZoZDJyAsI3SxJdbtNk2g4jvN8fsjt8+DxPtFImEuXhM+su12qjRDJiLAfLh49p8X+xQkVSeWhGyOWFmLEYyF82QlgWGnSsRz5lDirnYMPsAJBBm6bB/fFfVybzYMaR7FU7j98D4DF2SlUYkyti/cUO0/o9Vw63TpLC5IG4bxMPGsymWRJWGI/64Mx+RvrWIYMQpMROs0OsbjG+ZmQxbOLE3LZWfRgBHMkZ7XOTxmPbZK9oJRNA4jhBkucNkWgWGyfoOopSpU6x/uiW+bGpkjQ/nHr/9HM1cv1cr1cL9fL9XK9XC/Xy/VyvVwv18v1R68/FVrg/9fLdaBUFhmUcueQoJnl4aNHdPoSGSyiEI7k6TUHZPKyH97vkk5MU2+KzI4z6ZBMKqgafPhQ8JaEojrXbm4wVVjk4lhkX8b9GoPBOfm0yJztH+1w7eobnF1UWVgQ2dBkMkMwGMTQszzZFpkVU4mzsb5E+UJkPqPpOIdHRWZmZlA1kTGp14fEslN8vPXPuLQm+k57/V0uWvUXmflCborFmTROqEbelm0Aoyy1zjZe3GEss8tLa1nKuxNsWXkeOXU0Xef8rErXOgLg5qXX+fje7/H1n51w+ZJoSfvn/3yf0VBnal5k7S9Kdfr9LhE9R1X2Fg/GXTLZGP2RyLTm8tM0my3s4YhQWETztdoJmmphhiwc2camYTLqKyCx/33XodOu4KJwfCAyIaZhMbT7WKEwSNhcU9dglCabF+LWH9aIJlIEwyZ9OQfW7oxpt/oCUSryPOuuoKshegPx/cnoLK3GmGp1n4UZiUDTrtBpwuOtu7z6hnhtMoyxvb3DzRuvi72ceouJ/msUzzw0VZzVpfVV6s0OmupQLYpMhDtx8H2FZk1yCzkhCoU0ISPNYUfIgat4aKEQ1zfmSURFRmZvb49sOMrlqyKr8t6Dd0kXQgS1LMWiqOLcPRwz0tJMtCGjrjj3mhmgXCsxlxAtD722SqNXYSmdIxUWFait/R063RqplAqebNnsXZCKhTivlKSM+cTjUySyOUp7QjbUgY4RM4gGQ/QCIgvetXrYkzF2WyL1dcpMLSTwDOcFzLsVsunYfWZTK4xckSHFH9EdtYjERXuI67p0+m1838SxhSw0Km1CVohW3aYnijaY4QETZ8BkLCFLFXD8NtVqg7Al9kAzHRKJGKr3nMeoxnQuQaNjs/1Y/BbT0ji+qFOtvYPdF7Lw5hs3KJbPXrRY2LaNw4RyzXzR3rPoi7bJk4si+3uypWrBpPLMRY0KPRKOjKmUbGKxAL5sOXC9Cd44QUCdQtUk/5bnAT9qrXMVFd/1GU9sJhMhw95IE5DjwwHdochSN3sG2Uycy6uizaPWqbI0Z5BJXKNXF+d5fn5Od1gkGZhiJiNkeNwfUnG75OZF69D9T6tcvTRFuVEl8kxUJy7NTnNxVkFVn7eniAz8c64rEISsz/89GUgo9pMx/9lf/e+5vLnJX/+b/wkAA2eAEQgQiQjZ6A4rBEyTTDZMCFHp6LbrRLNBUlaCekNkUVOJGM2mRlO2v4UjOYJxhfF4zMOnojq4kL1Ev9skbM0RCIu77fkjIsEc9ZaY8Rr2nnBa3ObW9RvELCH7wdsu3/7e73Bp7doLgl7FgPHkEkNf6OGxVyOVukS7DAtpMbu4u/uIWCKK6qcpD4V9CMXn8PwBi3NCFt/5g29xZeMr/MJP/3t8+53fAmBp/t+g1vmUTz8RHRNra+t4fg9Ti1Mtit/XaB+wfxwgFs6RlAhb+4fPmJ9fQpEVItcZ4401av0qtkQZs2slTDNKr9dHl3QCG2uv0G/mUQNCL//Y5SWGxQuK3//feO2OqGZlFvLsOD7zygOuLomzefeeTWmskM4IGVZ6C6Sm9un2m1xfE601V2/f5O3v/l3ajTNu3hTIuCO3Q8hKszAvfsvx8TEfv99jedlhGJEt4maYcNSlUKjxZFd2ciReoR8LsSfbbHXDZ9Q9IR5eJugJ+Hv8Q5qHRfrNJsaMqBKvX50mFXLpSSTL5fwUj59qON0ariF0XuukTSVg47pwsi32anZWJWpFuP26yCR///dGuI6Bqilokotq4o4AA1MPUeuIM1b9EI7XpFYTsjFbWEBRKiwWlvmfPxSyqKsG4/EYx3GQBWeUkY9pGgQCEq1QCzBxKpiB2I/44ogQMhUq5SatmrAht97c4OP3Sy/un+N4eB7o+o/Ixn08UDwi0RBnp2KPBz2V3PQ8p4eiMhjQcox7Ht365/Az4u8sfY6ructUl0X2fuv+PiFD5Wd++a+xrIrz+93f+k3srsXx+UfYkuz8L/3bf4NYLMH/8D/9NfH9egXTShMI6OzuCwS8sJVEUw2elQ/oD8WeT83HSCR7PN4VLepThWV8L4BhDnkiZ/be+tJPMj+7Qbd3yEVV6Njl6QjD4Yil2UUAmsdllHGSL/6ZL/LpvT8AYKd4gKWZHO4fEVHE/dNTFb718QOSkqw3riWwAhaFxTRxyRHY6Ti8/oU3mXj9F35RJB7lrLyPJufT+t0Wlp8ikQlTr4p9Oa+c0etUsE0VzRJ6MJaa0Oxa3LolmrAe3/2M88NtXDPDrJxhHbsleq7C+9v7hMTVYmlxjcGkzcfHokIUbpzieFCt7TGTETo9Yhr0O0Pc6gELK6KacX3uNpVaHacv5Ee1XFKRaXaO76LKbpbymcLa2iWOD0vkJA+bHhqg6EE0SdNzdmyyvj7DRfWcy9fEHp8fnbFw+TU67TPCaaGDvngnyMHOPV6Vowk7Z/cpVjU2bi8SlYjXuhmjPaoyHI4YSIqhft+ia3dJJUXVeGf7MdOzMTptWN0UnQcf/XAX0/BQFDmPHxjz7HibYumCJdkZYDPkrG4zGYX56hvC77p373u4TpqlDeFbxLNpqiWXr1x9lWdPRdtlqzHD0lqAbr/Plcuiatos9ti4DLtPRNXR7YeJZ6JclIqgCXk1tDyxeJjeoM205IstXXSol0fML0jMhXCYXsfn0ePPcT0hr+lEGsUbMh5NSGWFbFycBDB0jVBUyF0wFODDjz9gjE0qLs5YV4NoAY9et0khLTAWFmdEh9wft/7VCK48m4TEy3e9EHgBhrbLQDI2q7pFLp9h3LHZeSqEfHp6mlgiiqzMcnKxz3jkk04VCATkfMUgQ2/cxQ6PKJXFQc0txtF70zQlL0somqTT7xGOjcjlxUU/PyyRTIZxRkOSIXFwI9/jyaNt6nLAPVsIULkwSaRsOj3xWqNxRrbfZCH/Cv2GEIJYNEwwYdAeia22rQmfHz1hZ3ebzWVhhGKxBkuXwuzs25weCkNbKbao1xUWJZS3pqk0qmOcUR7XFJfluLqDakb4/rsjwgFBwnywZbF2NcVFU5Sxu2oNxXSZms/Q84QwjcoqnU6PoJzTanfqxBNhwpEAXTnE7I7ACPrUK3XCclYLxSYeM1+0HHm+gxFIoA41XOmIx7Mh1K6LqqoEJC9CNh2mUmy/GEbWDY2T030sM/4C9MLxVEKhMEO7w7AvnitoGvT6NQwJJvns6Q6qYtDtt/Flu2Y8bqJoA8q1Mtev/1sAfO8HW1y6NsWgL77vn/7u3yIYsamVwjiKcM4vL99kf69COGYQjwmHVjFb9JsOQUnAPGLC7l6LqXmbaFw4uZph405UmiUHQ9Z9dcNieWGTFQmV+u6977D15BNyRoaleTFQe3b2KZoWoeOGOTgR0YeVsEkqK7Q6Ys+t1j6qCdOzr3B0IJzXQiGHpUYoXfRewB47xoB4akKrIrkqJjUy6RlqjRYHVdHikIyv8PTRZ0xNpag3ZLAasnDcEV3JAxGxdALxOPX2GaoigpvdvTLXrqyQjFtMZKdA326ihwI0S+KZbHtCNpPHGzu02kLOJ66GqwWxu21M2b9t9+K4ikZPcm+Ypvh+jAGG5J2zAhb7O6esLsv7T4vBSJD9xhPP54dUyh2bsOowk5Hktbvn9Ec2i2uy59uHWqPK3PQ16mURfJiGNGLNJoUZ8fnvfPAJYTPA6ELIXTqVA7+H44SIRIWgXd18hU8/fspgAIr2HOdZwCrL0SY0RcFzfRxnwmgk2yVdj2AwSvmixOq6MHqba5f54Q+/S0ySRGamEihuB69voqii/U3RVJaWFohEIgw6Yj8DjsbRs8csLIggdHU5z/7hIy6tzpHIi1bM5FSA09MAuvIj0lL8fxGO3fO8FzNX9kjM8fw7v/Qfc+eNL/AXf+Xn8HUhi+FIjF5//CJQHGkt5vNLpCYGSUO8NlVY4rDapNbfwZPQ3aWSx9RsllJJ9Od3OyrxoEo4GkIiadOfFLkyfZnxwOf8TDjCwagJ2phmR8hiY/KYZGyeTnNMuyt0V2is8srN16i0izw7FsHq4uwGF7Ui4aDQ1ZVOk+uXJ3QaxyxNiXvcsj3awxrlWpfliJCP/mAXL9Lj/n3xnnA0hBEecrhlcfuKCEg6w2fYPYPbt8WsWLlcopCdZ29vH03O8SbjBfLJa5yePyWekgA2vQOCDZ35WfF37U4VXytTr1zQ74r3bF5ZQVVh62EZwxJnHE9pbD28x/4z8VveuH6JilHl6p1l3n5XtDjbpsooYfGs60BVPMPhZJVQ5gqNHaEPFtZszHCB6VyEx4+FnQtY89y49gXe+8EnlCRBsNYtkYxeYmZKyObB9gkTO8Tm+gbf/Y6YCXz19gZzq1fwmvssLy0CcLJzRFBfJpMSzlbHu0/9UAG3xGu3hEM0Gc3RbXto4Sp7B2I+LRmNsP/kc1Y2RNCrjUP42hHnVZd0Ttg+U3lK3XYxAgbNmpgXsawI7eYxniTdTWdDXJTH6KryokVc13XGE4fxBKyQuLcffnCXSGqDzJSko0gluffZAy6v3eb0SOyBGXYY2TqKoqNJO+b7LrY9ekEGPOiP8T2V0WRIwJSQ6q6B604IRQJsfSaScT/381+mshjAHohkbjQaZTQavaA/EJ8tSO+73TYzOQFt3Q4W2Xr4gERSYvcrOq4/pNMRegug5ytsl+5Tk7ObhdkcTrfH//l3f5X/+m/+QwBWNp7y/Sd3uXn1ZyleCF3i2mG+8cHfplqXbex2i43LM0SDBcpF0Xbluh0MNUsqtcjVaTHZUalf8NmjeywvCofRMi2qtWPGoy6xiJCXk+IuE/rk81MMy2IP7t+9y+b6TSTXOQcP9zATcX7nnT1OtsWszUxhk659jpY0aLsiSJoazfHTt9N8diZaPyeKQaXcZufZU0YDEVTfeT3Eb/3ODzHDDq+8IhIEJydbvPfhhy9a69cXL/Pt73ybf/3P/8xzujHsQY+bb77O1tNjAo4chbA19hrPmEoL+3H1zg2KlSPq1Q4zC0InPLl7hjvyWZqdwxkLHbv98D6xcIw/syQCBjM+YP+gRyG5wki29+k4WKEwo/GYSlX8nmrZoD6oM5bzzatXTd779LvEwwnCsl06k9Bpdoa0B33qB6LFOJ1YYaqwgC+TR4f7+zj+ColYlEdPxQy5b8W5FI/Trz4ltCvkI3H1KqurX2UuJPZ31tKoRl1KY49KX8j5k8fvkQxGyMczLN8USZFHOw9RrSC9vtAtszPzPPn8HsGYxecPZFLdUNBU6A+E/RjYKosL62TTQS7OJJlz2mQyVml3epxUhOydFSfceX2Gp49FIDWVmWV9McOzZ0+JWqJ1j7BDqVLh6OiUL39RwNEf977PZw87eJ5oS/zS16Y5Pmzx+p2vgCH2PJVKMRzV2Ntu4itCPkdOGcwS3/wDkUT4qa//DIHgmJGjksmK9+QyWbqdMWelZygtESyapksmF6Z4JhKwhak0i8uXKNaO6HTl/VvZpFisksgkUCWYyr27ImHzx61/JYIrx3GoNoVQOmONeCyPHqxSkKhf1Uqfo9PPSEXT2G2ZSVZjPH12hu0I5yASyVMu1xgM2xTmhKN4vFtnbWWZQa9PXKK/jPpTGCEXPSm+LxuZ43h3zNgfM+yJz04kczTqLQoFE88XQhc000QiQXp9YTiikQL6AvT6E2w5lLl+eYWT2pDesIMtkYHCZgJV1xi2hDJqDyEWSGBoKnuPhdMZjXvYcxrGRCcbFEIXCifJZafYuxAK0Qw6zC9usPP5GYmkjNTPL7A7OtOzU+QSsur2lR43b7/GeVfMYP3eu/+QXC6HZSa59bpQ5sf7VVoth6FEObJHI7ywCHKHQ+n4WxZmMIjjjhiPnju5HlYkwER63cFQjNFIxfP66AGRzdOMCIm4haVrqJokQF2Y5YnxDEeiI01ch3bLRk9FsSTIhT0YMJjY5AspOi2h2Bp1m2w2R1ci2dj2hInfIB7PYU8kimNtQiQUw9C1F0Pu4VQVT2mj68LwB604hqpz7UaBel1kJ1TFZWMjSbPmYuiSCFOZYuQ2icfFc+/uDbh0LUenoTNyJflgxSCXNRjaY/b3xWB4Np/j8OiEsCmrWyMfTYlhWkHaPaFo4qEYAT3B2BswvyhnHtwuMSuLIw14q7lPMhvh2XaFZFIY+rP6Pp1JllarzfyqCBAcJUi5XKc9EgbVM2yKH75HPKGRDkk+rtopRjTBk6NTFEcYlH6piWlqrM4LpVUtXbD38IS167N0JbLbpY3rTPwhH378AbduC2Lq2ZlF9k+PsRLimQKjFKHAHKNRi9FAOATRYJRseIb+oExdDtAnE2OGtodhSkLbQAmVBJaaIZOSPeYTm7mVAgE5oNyrTWj3z1DVEOGIzAj7AwJmgWGnTa8niU2PG8SzaXZ2hSOuqB6aCqXzA0ZD8Z6SRATtOvvEdeHEjzQFx3HYXBXG+ofv/YB8PsncTJhaSZyDoY0IRYN0exU0Xez5j2aXJEkvCqqqSIJTOVvojfEdlXaryey0CKpn5xZYXr6MbkjglnCbfiuL41bJpUXAHolOaLd72HYPJOpX0FJY5BITqTcqjWesz9/k5OSIQ0mWOVRUxo5HUBKVgqhcOZ5HQAZXvu+/IBH25TU+2m9w7/HfYqKdEQ4JnTAejxl0h+gyseC2NM7aY3IzSWzpEDX7GsurVwgFwtSaQr8dndQYj8eY4ucx6GtoqgK+R6Mm5ODaZoSdnR0yqSjNzpHYl5UN0DqMpZO0uhrHHtYxtCQTtyOfPUC7YpHITaEGhT7rdNokM1n8kfh9+cwUeFFu3Fjn8eMPxF7VTCJphfWNKBOZWIjo81Sqj9BMmVSzZqg2zlmYW6ZdlzOs5c9YX/oKxZK4x1OFAqo6Ijfj0aiJzdOVHOdnO9TrJ3TaIrmhYTIcdF+Q3l8casSSY84vjvniHUH42m60GLkVrlzd5HM5q1XZL+IpGpYkRP7O9g79foVW5XX+8q/8dQDuf/ANTisOrfo2PUmIuvLKGslQjjNHBGXTMyOKtTGfPX5CQQJ4PHn6XZZXC8TiBWJJWbFsHOC6Lns7IsBtXuj8l3/jV/ju24fceUWApNSbDQb9AZmOi62LquL2RZvZyJBwStyrjl1mafaLjCdVAhLc6N7D9wmGlmhXVS4viO+rDZtMr68Tjwmd2x5MWEtvMNKqfP8H3wbgJ29/FQ0XRdfILomzevrwENNUCCdEADa/OOaDjx4ynZ7CkdXX8WiEFnjOMyNkoVhxiGYyPPhcJBqXJ9f5yo99id/8p9+k3RP3I5aOE9BhPHYwDCHrE8dGJ4gVEu9ZWorz0ft10ukgg6GQfV0TVSjLhE8/Fnq/XLrO06376HKm9HmVS9xFiZ6r6fi+QiQSwQgL53T73j1cVyGXFpV1RVH4+NNvc+XaOs9XJupSrZYJpUUVVzdDqCgEJ2d847f+DwB+/me/zntPnvHZ/V0uX74JwDe/999Raj7i/2LvPX4ky7I0v98TZs+0lq5lhIeHzIhIXVmVlZVVLat7uhuN6eY0QHBBbriev4E7AgQBggABAiQwxHQ3e1oMp1VVZZdIFZkRkaE8IlxLc9NaPbOnuLg3vDgEajeLWsQFEon0dDd7795zzz33O+d836UN8dmqcg2PKc5Ep5AVfj+VkKQqypjnEqwejqYszt0gn5W9cMExur7EcFgiHpH94+02us9FMao4suLk+o2reJMpybjYC9svKlxZv4zpdFndFM/uKpCbzFOtbrMihXA/ePeH7D3+BlP2FqdSaSLpIJHwCoO+iLme7v8Vycg7hGJ+fv6piIPikSTvfusDRiPhW25ef4+F9Vn+8i/+HzJxEYMsrSc5Lk3plzyMmPj8QnGZWNRh68kRAG9/6y7xQpzc9JR798Tl+Nvv/SHdxhlPHv+MjUuCvfqNN6+x9fAZZlPEapXjHvbYYePONZ7K7MvE1Vi9vEDpcEgisQTAo6N/JpTzEZdsusNxj2JhgclkyFlF7KtAMM9e5ZxE2M+gI7Kaqh2nVH9Mvy9s6e3bb3Cw8w2BUAhPVq6sZle49+gBm+trVM5FgF978ZCzRpt/3hMA6G99e5PZeJ9eO0d1LNbqg2+/xfX1W0RTCXp1MX+LC3c5rW1xcCx83vUrN7l8bYNgYkRvIOz1/MhhZJsX/fHNaoQP37/G579oMCcJH/ILLu3TLptXV3jxQmjBjno+Bv0qtgTejWCE09Mq/qmNIostXMOkUq2xdmmGF5J9NRTPM/VadKWI+eOHeSDEwcHzC7KxuSUDa+xSOa0zsET8fXY8YHYmw+qKiIUnVof9w3NmZmYZWcLnffnVL3CdMP6giSGzmori5/nTU7JpYec7zw9ZWl3Ec9SLy9WTrZ+j6wlmFq+Rzwl//aQl/MCvGr8WlyvHm9IciIn1+6IMKx2MSOuCwUxzVgnoLuXWObohJvf0fEi1Xqc1EuhIKJCmUdFIp9NMyuKw3D2pgDFk53GbW7eEo27376OqKdI5cVC1HJu333+X07Md7t921T/sAAAgAElEQVQTgfAsOtZkQGuvRlQ2FsYTG+zsfc3iikghtxse/lCPqRUQ3gM42N/CCyn4gj7GpkAn5vPXKGQj7B6LZuRYdJ6joxPeKrzL3JxwBtUz2CufclLav6CRnY/OMbHrDKrCScdnM/SrQ+LBMF/+VLzzH/zeH7P17CuC0RE9SewRDA35q7/739FjYg4MPQtOkEg4RWsoSyjdPslUBl2iFepIwzRNHGdMNCgOS8tysGzBXREMiUtY0FjCtjwsRwTiiqLhWCaG4bEoacIrjT1CPo9CYompLDucTk6Jp1QOd8UlxnUgncijqzrmSPyOh0Ov1yISDJBJCSOvVVqEA0H8PrEG44lJuz/ECOrY8oJXOi6RL4zRVZ0vvhCN6bffXKHTUiiKOIP5hTyzhTy9YYOTAzGf4fQx09GI733nN3ixdSTWfT7E570GtZq4BC4vFvBGU1rnbeIJEWRn0zquYpLKhnj+VKD1Pl8AM6BwT7I4zeQLTF0Ta1wmJoUxG70EtdE+rYYpmpWBeCqCP1HC6Yk5//BbP2D3sMRY6dHrCttIRa9wfn5AOpFj0BE/w9olZM5jISnrUzHOun08JYopkd2Z5SWG7SCN3oBkRjgRw4lTzEWJ58RanZzucmljFs80uHJZHOy1bp1INM3161cJGmL/Pds6ZHEhhSdTiM1SH2vSE6rtkvI7GFRw7THF/AwDhL1YyhR/yE8iJg6YsVnBs1QcxaFVkYLWxgTXVdGkyKehGwwHGsFoAFv+zKdHSETCuKrKRIorzy0u0B33OT4TF+pAIEQmkWOn1sCRGdqpLux92k9y+EzYbC6yTKs5YOuFyPAVZ5aJhBSmpoYuxWQfP9tCD5gEwzpTyUR2gUbL1JUQShY0wIq8tdgTj4nloqFxcCpQxr/++21ymSxBKXrZbo/xhcc0z/w0GiKwmZtdRrE04uE0sazwGwdHLwjoFv2eQNdUNCaWi237GUt+EstsEwsHcRyH/7/axavMla7rF//vlYj3j3/8Y7Krx8QTEern4gWH4wH53AKDvtijY0ujPTzGUYIs58RhFTNmGfVHnDXKbFwT/hQ3QLXSxpBlz4mohuGmOC3tcEWyxnXbLRqNJj6tyMqyKFtxbI/Tkyo+ycY4bCdo93ZQo0NWCsIWK2U/nl7m/OgxqYIAnUx/j3xkmYEmnjMSLjCa6Nz7ssbAFIf/8tI6/e4ZVj1AfFb4+Yk5pbmnc+O68N+t1hm6oqArOsdnR8L2AhbNzj7JtGSN6o2IRaIMxz1MyTJWKY/5wXe/x9m/7LA0K0guVN8lzsqnxGIiYijrLRx3xJXN2zzZFmVW8dg8qm6zu/+QwVAAEh+8+yGMVugMhS3OGhmOd3s8vL/LX8VE0FItObz13gc8+pdTovLsa54rnEw+5+7t3wTgvNwFzebqnTlUWYI7tcecnO6RTKc5PxcBQq0c5M27KyQN4RhnokGe3z/EsvqU92SwdTVK5WiC5QYoyDV97+4H3PvpX7MQFvu4VQ2w+rZJp63y7KkAMGpnET7+rVUmM5tcKoh1eLzfoTvJYPhFlYHVP8UIxOjbcOuqKM/SjBXCbKEHcuweiqDT9drM5tfRoyITePcdj7/4y0dYloPnShIYn4qnukJ8WLIa1hsVzst1+hNxjn9x/4Df+Ogj7n22h6b9MuNl2X0URcey5cXJ59FrjtmQBDh//Gdv8Om/bDGd+tElK57nOeDp+A2N1li83+nBgHQij22JfezzaXieg6KoOJLy25ra4Km4jnohX7C+fpPS6RmjiQBOpuMga6t3KMzIWjQg7IVw4kVCkqbfa5Wo1SY4AZWXsu2h+s4HfO8HP+TRowcMpiI4PzjdIZmOY7siME7FsnzxxQMSmQDryyJDm03P02p9zexcnq0tEUtcu3kF1W+xsydIYXRDJ+K/xsjpMpEi0Mtzt9nfO8GqRLgsA9jD0i6JQBJdSrjkV9Ioepd+pcXajMj21EdbTAIuf/D7/xVhRaAw2YKPkZnh2BIAQbsyIJLViUVm0HUBUtRPdKyAiTUpEgyLC4LjWihahNs3FuQa/z3J1Ft89J2PqdcE6DTt5Vlc3eTjGzHu7YpLWaM95A8//JDF3E8AeFn+hEbHz/LKZXa+FHP3ySef8OEHH3Hr7iU+vSf27cujBZYK87gp4XRvrd3mk198Qq1W4+Y1QQJTbzVptloEYyrlmrCFK5urbB+2iUmAMB1QGVZ0ZtbmqZyJzz45sEnMGNjmgHBQ+P1grMf5cYtOS7xvqzPkxp1L1A46kBS2352OefPdtzjY2cefFZn5v/nsU1x/nJvf+T4Af3d2TuVxn7mMxl1ZYvy//m//Cw8Wv+HS0ia7EkA63XH57sebuJ64AN1/ep+ZuQTpcPxCLPvGzbdRlWX298W+yiTHPHv+NdX+EYc1AWSYziZLCzOcnJUxEDHsW3ddpmaTBZlJHg1rRCMZdMWg54g1Xl3f5Mqljzg4fMDlNeHfRmOFZr/KeVt8v5LzEYx4xOJLKJr4bMfrcl6vkpu5RGFWxIdhf51wWMGSwFsgEGJuPsvhwTmmKdZvfnmJynmXQCDGxJLC4pbGbDHLsCfOwkvLl6k1D2g2h0SCYn5jyRG1RpWX24eYMhN4ZUMAtL9qvCa0eD1ej9fj9Xg9Xo/X4/V4PV6P1+P1+C8wfi0yVwo+YlGJTFpVjk6PWVm6ys1rAvl4+s05xwen5BZm6Q8EYllrnFFvlUhmBWpcb7QYjQO4DY+XL48AUBWX8vmI4nwGB4GQNloO/mCdviS4mEwCTKZTWq0y6YJA5irVFq7SoVEbs7YqEJJo6pjepMrkVCAKuuYRS+Q4OSgTlqj0e9++Q7M85MHWM5bkLfy8vIVnFUGWZj169oJoYkLYP8O5TA8P3RbdEWxsLuNMBNKh6Daddo+1uVeIcBYcHyhTbl0TSICnWoTTBpDm8WOBmOj+EUvrSVp9gTa5Tpt0Psvu/gGerMdNpOd4vrXL7JwsM3McJqZKKBjHkWhlKOpxVjtC9UBVxLyYY4tmp0skKkUwfTa2NcUfSLO9I1A5c9pm8/IijXaHhaJA3UrlM2x3wtVNgXafnJxjTf2Y1vRCI8Qwwvg1HU016HQF0rp5dZ1w0MezbZF+TyQL+ENFur0qji3QykjYj+NOCIVCDIciC9bpjWnU+nS7Ar1bWbrMkyd7rF4usL4pMl6PHg24tHqVzSuXwRHIhxEx+dM/eYN/+Pv/AEAsHubwsE44qvL+2x8B8OL5A2xHoVwt4UwFqjkdGFTrFbJpsXbDco2p2yWXc3E7Yu7a5imJXI7esEMsKmzWSGocl8akDFG6UCrtMuh2WVxLMemIOTbCIUamn9//+I/4H/7H/wmAQCzAct4hHpQ1+4MIncYBi6tZGhWBwvnVOK1qg5DPY3VRlGf09BZXN++ydSiog4sLRVzNxmqN2N0Wc54uZhmNxjRbIxJxMcf9YY9WJ4qmiz3jUy1G4xrZTIHcnMhE9IZdqo1TTC9NSPY6ddsuuWKSgCEQ2UqlR6/XIp50mcgexITiJ5Uy6DSkLMHYQLFthp0eSBrk9GwMb2Li8/lwZS9Ds9NmNBkRDkSlnSvYtsfIdnEVkbE6KsleNn+c0pGkAE4e4+HDJzWKBj2H4ahONrFGtSHT/KrHtA3WNPKf9S4p6GiaLCXwbGx7iuM4qLL5WNcmWJaK52uyeEmi/K0m4XiC8z3RC6NqDobuw7KHBHSRQTjcLZOfSWJZHi+3RdlxOJRgp/KUxbgo99m8ssbWyTf0Jx5+KVq8sXqFvScvCQfFu7wanudxUTClqhflSq7Mum3vPiG1GKVb05nIcinDSKJ6IRxXZFWcXg+fHqDd0ZnI0tbOtEQs7SOiBxlVhC106xbWpI1niwzR4d4uS3Mh1hc3aNRkVjGkMOhZ5G5k6XRkj1XbIpq0KZfF9yWnKa6tfsjZySmOpNs2wl0uL69TqYawXNlv6NjUqh1MWcob9Ycolb5kKfVt5nOimXw83SMcUPA7sxxKQiLFp5NIBLAsgdAmkkl0Q6Fc6bAie18VAkTC6Qv/s3v0ObHgPKn4GqmYlJEIerQbLn/0B3/EF5//WDx7fI471z7k518I7aY3775FNBbi5fF9RlNZts6IVqfEjcvfQpWlnw8+fUy3+oJv/54gP9r65h7J/A3++M9uENMEwn967HG8X8fwX2JmVpSajicjKo0OT2WWwbAm+LUQ5eYA2xVz7mLze7/7++zt7RAciDPrvbffYHvrgMvLImt07fspPvmZENX+6cu/B8BXTrO6docHD7dRQyKrcPDiKdYkQSIm5unzX5yzemWARZsrK6JPYnkuT+PwmOjMMjoChS8fPCGctmg0RYbm3aUN/vaLJyyuFwn4BWpc7T4l4h9wfHBAPC8JibAplVv0zwUhgqLZGIYfhQCokmRHsfBcA3AxAsLau80J6bjK8bGw99/67p+iKROq5Rpx2azuWHXQEqiuD1dKhRh+P55rsbIm9uPcfJhLl5cpnzcutIw8TNEb5Yr/AvjsZy+YW4wjmaeZTMYoiibLcCXNu9+g7/ZJJJI4pqQKT0S5fecmlZrIWCaiea5f+4An20JnTxiaQT6Q5qtPBbnK6uosPsNl2qyjB8X7/uL+X2ObASyvTTwk5jyRSOMpVcy+1DZSdpnYDfKzt5hK23/+4gGDgUmxYHBVZoByMxpb248ZSp20+WSesf2MTrdB0i/6svKZdU5PzjGtHlLtguu37tKtd3h5IMqEe+aI5cUctmHTa4izaKIGCIXg5LDHaCjihMaLl7jWgIMd8TtvX38HIz7icP+M2Vn5LqEVyqUDOoEOb78lsm7tZgc8H+PmKwHma5yUtxk3pqRT4l1Ms8Puy0eMC3FKsicoGnN58PQLsllxfgwHNvPZTSatKUtLIsNuJIJYvUPOz7oXP1PNMRF1QnMkbOp5dZ+NK9dodGqk0q90p2J883Wb3/yN71APi0zudFRjsWBjyKqbhGFQH+2x8zLEyqroa4tHQyiuRmvQ4rwn1iamK8xm1vntj8Ves2yb47MzNu/coHwiKmUePtzjytJlEskgz45E9UryjWvkszN0JZvU0PEwolkWLiUpd8SZcu3NTQbTATv1EllJnPTum8t0mg1Un4gN/WE/J5UGDx40CetyXy1tsX/0GSubYg+tXArQ6zf51m2TTlna2VGUnmNQnR6SlERfemiFRNhgb0+UCU5GQ4bNPvnZFaE3CTz6pgn6Pq3zOsmUONdikQLuxCI/L+b3qPyQW5u/gx4Y8XxHZMoy2SjBAETDFq2mrHrx+TAMhXpVZMWikQwnZydUyl0uX5W+xRY98oFgDL8h3gdG9AdtJmOxZy1rjOFLoigj/Ib42f7eAfnCAi5Tjk6E3x33pOjxrxi/FpcrXdMpl8RhnU+vcu1yingsw3/8T8LZJFMB0slFHLdJoy2F8PwxwoE4tZIIoFs1FbwBpWGTbFIcCqpicHRQJRIZcXomey68OrlCnoicV1+0zRf3X5IM5wkHxYatVHrkikmuX7vE9q5YzHpnj0HHwKeJhQsEdGrVFpnUEv6w2EC1ukO9PQbNT68val8DRoFgMMmZ1LgI62NmozOYkxM+fyAO/pW1AsXZCb1jDdMTvT3tWgc1qLC8JFnGzkdcX3+LY+vwor9hJrPAT//pAW+9OUMgKHrIxpMgp/sTAvL9xp0UzeEALBVdbvRev08mnbvQ58Hy4U01Gr0W5kg2W6c1ZmeLDHt9Ie4LKGqXWOKXteWK6uI3gqjxDooUgHTaIb65d8aVa1nOWuKZ0pkc5rCBZYv5nVgDopEZMD3akvjDtj0SyTDDwQSf5BCwHZNmp0dELlY2k6NetwkZXZSgcAbXrlyhVvYztmvMLouDodpsEDDi+GSZV6tdQgtMaTQdUMSFaGP9Kienz6lUlrm0IZzyva8eEu5arK0JhzGZTgkYMeIpj4ePRYO5N7U52N9l7VaKK9dF4PuLT78gEg3iU0Va+bzeJz8fx5p6BGUN+8c3b/KLz55RXLbQXHHRfrojLtfzUs+pWWmRK6qYI5WoJFc4q+yzsf42n/7in0VpCuD0e6jpGXw+MS8Pn2xx6Y0VtEiQ8Uj8bOA/ZmyZhHwKjikvsCmVnjXAssScR0M6/nCIUG7K1BG2MOgPGY772BOFclUeRBMLzQ1hqjIQd1SyMymW5ufYPRBrXKr0SCZyJJP5i4DEb9SoNxv4BpJAI6aClmJqD7BMcfEJ6gFG4wC+oCy3Gwt9GSPoA098Tq87xR/2aHUHJGVDcqfbIhNPE5QlR/1uG3c6QMfDNMVnma54z5eHTy4+y+9kaXfPmS+KfRUI2zQbCsHQOa4MiCbTFqO+H03NCUVRAFeUwV5cthwFPAXPcfGcVyyDfib2kFgqzMQSttcfwqNHFbIJ4YhTaYVqqU44FMRFPFMsEaVSmhCKtS5KjHU8FpLLJDVxaXEGU6YjnWAojC1FrxUN/GEDxVP+s0vgq39ejVc9V4rUojEnDqeHE2oli6DUqynmspyVDlm/Inoi7n9aAsViacPAk3vbsCOsLVzlZ//0c7gqbH009FiYz3N+INbh+kYcyxowN7PEyZGw72i4wGymgDn04UltNmdioNgOt6+KXp/zswrDzoBifoV4VoAyzd4nnB53yM8t8eChAATCkRrpYoaWDKADusZsZoNiehbTEj5XYY653By9eok/uv1vAPj88c8w4jFaIxHMNds9VDeMprsoinA4iUCW/Z2nxJOy+Tlxg/PyIVcuRUnLXoqjw20q5z1y2TTmUMyLP+nnxpWreAibHoxG9HsBDncPL1gwo+EEfmOC6qVYWxSfVdKecHUjzpNnUnB+aDAcDnnwzc/54JaYFz2sYaNwY+ESA1lCGQxZuNaAdET4rcMnT7l8OUtLHTGbFxen7EwGzwrSbzvkM6J80VAdZuZtnm4JsMqdvgW+Psf7h6wui2cq5tf45It/x9QtsCYFULVWh0sby3z7PcGGery7zelzhY9/+CGVstj/taMmH9zdpNZrc3Iggp3M7AK2mwLJwlmq79PpVZntaozCUjutqPLg4Rm57CLjkbjU3n7nLaqHDay+sHNfSCeRDDEaO0SCsk/KmYKjoetcaM/5DIdmKUwhIubO5wX44tNtGi2TXEb4Rc8JY7kqnqqhKuLvJmOVYEhjdkEyiIbD5Gd87Gz3SQp3g6p62K6J52mEo+IcPTysoSjeBTOgpmk4jofn/dKmxP/3mEwm3Lx+F4C/+du/4M6dt1mcE+tijj2azVOaNXGmAXR7PfrVEQvzsn2hMebW3XWsJwrFmFgrH2lqrccY/giP9sVF7ePfucKn954Tk4RBtqOQTCc5r+wSktqe0WiYbHaB4XBIVRJTnJ7WWFm9QUgSPqmKTn/QYDgYE00JAKQ7esnUMtF1G0UXF9OH9z4ll7hELClAtptrRca9Fu3RMZojwFtTaVBIv8Fxv8R5STzn/kGFP/7NP+PD/1r0Nm2f72G2LYbDLv22ePZkcIV3fvBDyo1tRlIjs1BIEFYNyofCNgauTbW6x0L+CgFZ9qxpPhyvS29UIhUXMUFxJsTR7kvKLclEOFYwNJPz3WOQ3ArTkxCuWeHq9U06prCNhfwCu88eE5KaoLF8nlhKw9WgPRS2Hwh5rG/Mofs8ugMBbpwcnDK/MHcBHkfTy6hnExLZNLmCZMH8yVfcunQZc+ixuCBijlarwdxskp3nkpBs3KXRrRGOFFHleTGbTfDk8DkxI8TiqihNa1T2OdnfIZ8XwPvmYoFOq0smEecXX4pyyUwhzrDUJBb2Y09kH227wslJnU5fvG+2MEsy3iLuT9OWoMizJ1U2buY4OxOl7pVSknDEwN6vkg2LS+Bv//Ajdh7u4MVj7J+KZ5+aBX73D29y3hbngOd5WO6Al6fPLkpg0zmNdtPjvW+vc3IofPOL7a9ZXbrMyqogCPMsnf7wjEH7nGJG2H4wOGX3sEx6JUEkI9b92bMnGK0k8ZQsz268IBDWuf7GGmmptfXw6QNSqTjZ3C/7KRs1UL0VcjPCpkrVBoZhkEzHiUaEzw1HNhj2NHzBEa2WsGFrIgPVXzF+LS5XpjmkK5uBU6Fl/BGTh48+I5UQ9Y7JrMLpSYlgdEBIF4hwo+4wHoFiSVFR08FVTDKJAu2RuKiNRiMUO8Jo2iWVFJvMthxa513GYdmcZ3YopPNEIzF0SdMdDcSwhn467SEdiZC4kxXu3l3j8RNxMGVSKc6G55jOAeO26Jn5+qvnZAoqcyt+jvaEI4uFPVw8NEk9PeyV0Nw02jjNnTdE8NeoTCjtO2ST0K6LA23UqBMJGDiS1XDc9WH3HpJJBzmQCEYgqvP9H37AyxeHFxeeVCbMdKhQP5XMLtMJ3sRHOOxij8Rnp2bSlEsnRKMyCDVtLNsin41zfCLmxXP99BoeuuEjnRenh0qUeDxJqyeCmNPTOrF4iEAyhGSsZuVakXAUplqPcrUiP3+CPlGodsUmKxSzdLtD+sMePkPS2Lc6mFabUNAg7heB787LU3JFP35J99kbddnbKxMNh7nxhjj13HEMI9DCsTTiMpGDPqJTaZAIiVpj11ZJZHx4dgxVl8jnNEQ+k+fJN8/wGVL8NxCkYx7hOuJ9L69eJheZ5cGjn4HMBQQTCkrIo5hbvkDF4skshuISkxTyUy1EdzDEHpjEo6IHYvfREapTY9zosTgjNno6UMAX0ShmRADWHU2ZDKeEU6DIrdltm6CaNOrnvHFXHGDdvomr6+yfChtbvbRA2J3h5VcvmCmIIN6IxHC1KvnYAoV5sY/U2Br3v/yMoCJOE48dLl26Sb+vcrAr0C1rbBHWc2iuQqMjbEEJ+AmG+wRsEfiPO0G6dYev6w/wdLE2l268hT9g0Ot18WTAHgnGqY1q1NvCac3m0th9i0wigCrVzavDLp1xF7+8CAfCkEiHqNXKJGLiwMlmc3R7NQLhCLY8rHzhMOeVJjk558FAkH6/xIQQPk/4hHQ6zjY9/EacTlcKYaoajjWi3RUHh0YMnzGh3ekz6gunnMpFUZwA/bqHJmlzVVVFlcKlIJgDxb+9C5DC08EZGViuRaUmApLhdIRf9YOkh++3NRIxnW5nyESSiKQzczS7L8Bdpij32qjbI6L4abVFT8Szp0MyS2kmposj3685qBCwwcJDl8/5inZdkb1wiiS7cF0XT5GC61OXRsXEr+kEZDN+vVZjYbHA+ZH4nXSiSGFewVEnqFKcc/PyDWqlDn/yr/8Nj7eEGKeqtGg1RsSSwoa/9Z03OTms8OTxMzY2xUGv+EaMB4fs7KgEguJSFovNkI5epXwqfMLG8neYjNpM/W3aHeFfhuMe3W6Qqf2Cm1c+FvOwc4/d53skpVh2v9FAmYYJjqp4AeHzkvl1to4PscYm6MJXrm/eotE+J5MRwUjppEqtUmJpbpl6RwQfnXEdvzODN5WU3LqNz6fx9MmXJAOr8n1dYikbc2hy7fJ70s4jfPPwPh3ZAD6YDGl2X/K73/kjxlNhd988e8TxWYefn/9H/tv/7r8BYPnyW6STadodQcSRWJglGUvyfPsZpaqYl7Nmj8QlF9MsYlji89u9PpuLb5LwCwR8ZinB1tnXxGMBjkoiQ9oaGYTUDVKx8EUFwWg85tnDe0Tj4v0OSg9xzR7ZWIR+S6x797zGresfMhn0mZWXwGrpJcX0u/zo7/8GgKuLN/EFA2w/3aIzFEF2LrGMrSZx6SF5khhPRvh8Ha4U3gSg50xI5fqEoinWb0q2x4Mz9LhFPr/I8anIOJ80D9G8IJfmhZDrXu1HZHJRTo7Bkb5Z9QTQYTlDohJJrHdGfPP4Ke9/W5I5BM/587/+B3zhGK5E7BTPh6pO8Twb6boYdUxW1zLcvCP85NnZCbffyfP5z/dRFLHfLcfD5/Ph2IKqHaBRGzKZmIQkiZC4WCkXFysQYIfnuaRSSWpSFPn9N3+HRMrHi5eS3juYwY6O8WvOxd/NpfKc914SiYq9frL7nL2XgDG9yIolU11q5yZhI8nt2yK+ODzYIZUoUpMsg5Y9IhYp0DPPUELi8zuDPSzflN4gQjIu9lE8tsjL/U/wy7MhlvBh6Hn8uu9izreenOJ4JqobZmZGfF/QKTAyVXxR4SPa7TaDbpfUYoHTPQGqxTI5XFxSqTn+1fcEY2ImsUlv3KfWE2D5zcVFjKsh4qEIR4cCiJ6fW8YctmnXy0TCUk6krXJ8eMD1KyKL+6h6n4Dfz2jYo1QV9rO6msVyTqnu+0guCvvc3inz9saHvHgumOSuXrvN1s4+ek6nfHIEwMbVW6TSN+nWpxdi1bn8PE92fk5XElrcmr9COm9xdtYjLTOty0s5qtU6g2GTTlueM+ll3vvgfX70I0HcclI5pz89Zza0hNURz3RlaZXT80Oubd7hWF5cFCfCaKSSnRUgV9pt4ztSePjkHrdWRQYxtzjLYNwmk57l3ueif+vL+0f823/733MoSbYOaofMFG9xeLbPXEESpwT6LM4sUD3vMjBFvN2qD1haXmXiCp9bPW3zzvuXefn8lLl50fOo62esZG4wbIq+TE8N4Y50islbNM7F3/38X74kF1rk7o3fZnVZ+K6DozNePt4mHsnKtWuyeukGmjLgVM65aihoio9m0+PRE7Hu167dIVMYc7ovALVk9BIrq3maHY+ulGIZj4Pk53I0BzX8pjh7CsUFUHVaHXH2hgJJ+sMGxXyGE/l92dQME6vB118/ZGlZZKp9eohQ1Icn4/94BvYOtglFgiQlqFbI59nd3UVTVLIp8XeDtnj3XzV+LS5XsViSm9fFQdVqVOj0+6xdmaXXEUHa8uwlDPuMRzv3WFuXMAMVQkEHV6LUiQgEgpfpj5uEgsK5jQY24fgYnx4hKam065UmmjFEceWrj2LouQDt3oSIDPBOjg7Izxd5uf+SW9cFEujYEertCrMLIvA3zS5vvfMDXrx4yUiyk+m6znnTR286xGHv/OkAACAASURBVBqJ9Gy50sTRbe6+I/6ufKLy1YOnhCMGoVeXG3tCLBjGbI8Jyk0dm5/FUBTGXUlfrkTo9qecNHc4PxOHrGIkMEf7VKtVAj5h+HTmcFyVIeK/Y4UMk8mEoM+PTxPvfLJ3jq7r+A1xugz0DrF4DF2H2ULiYl0qoy7FcIzzsrh8hI0Bmq0xkZoTM+k4ETuM+dwkmhaHwKDbpO/aaFjYsvylfO6QSlosybKLQX1KMpQgZOik4uL79o4OCUaCJGIq7bLYQOlclHariesXdjDpNpmbX8QbO1TOxNzVml+xsnKJTKrAwctduQ5jcplNDFnG1hlU6E1ymKMj8pKSU/eSmJM0zUGJpaWoXNMpne45qk/MU3+yxtHZMe99+AO+fCqcX6P9gmvXVnn06CEJ+VlLaxG0SRhFBv4rhRif/vRrxs6YXFZsdDtsEI+lOelp6JL1pzk+4aO3V1BGkjWhb+NpcVKJOHsHwtG4EY+DswOKqzOU68KmcsUYgzFISSLmZ2b58tNHzKwm0GNibZqNLrc27nBYrRCV0c7ZvR06nQ7xeXEoLGZucrR/hq3ZyDs8/miS8WSA3wsxOyf2Wq12hmGNsWS5z8JSEVOZ0m07F5nHnRc7jPsDOoM6aRFrYOoQ8UWZk4xXqWiMpZxBdzLAlGWPAfpMxio+Xfx3rz/AxU82u0RMNkkP6ue0B0M0TaMg9elsLcVJs4seFXbeH1uMpiqKMkaT9LedkbDvbruFX2aAmvU6qpai3ZLlWiE/o5FDOBgkLjXevJGBYxnYeOiSrtmybfwqF0ikNbEEM59tX8hBOJ6K4/WJR3zEfCL4cCdVhmOLnq8u12qJnWd7zBSTBPwCDWi2j9jceJNoOM+BLHu4e+0dRnaNSkus3Vh3GfQcPGXA5FUZa3SKGrJRxvrFZc91PXT9l9TsnuKiaQqO4+CXQeFkaDK3sEIiM+Hez48AeOP2VVRXwxuLQ9e2ujjTIH7Dx+YtgSB2em1Ux+HkaBtPEsrE4zPMzmQZD2QD/1hDUXWuXHmLZldcWkbDIdHQLM1ajVVJnT81J1hDhaNdEVzpZoH5ZT92vUupL/Zx4VKe5OQmz7Y+IxMQl9O10CwPWwM8XdhGLBxlMJmQm4/x5KksWw1U8KldEqksWzsioL1p6GQSSaZS+sHEIb00Ryzjp3QsbMqZDFm+GWMss/fNrkkyNEe7UcZniJ9tLF7HG4LmqZyVBUq9sjSHM+nQ64jn7lo2ycIa88t3+fE//B0A2eDbbH4QpdrdoSnLJRdm30VnyNqGCBxDSpZSr8b1tXf52ed/DsDlmWVCtTSBsEJagjCmqmAPLWbjwqYHnWOuLb5Bs3rOzIIAlEqNZ7R6z3hj8z10TczdZ1/+LevzsyQl21WtCYF4DmtkE59ZEvO7tMjO8UMGTgavK2nI3WVcXwdfVPiN4+Y+s6kchhsnPK7LuXrJdLCAZ/uodoQN5XOzTLwBlYoACLp2jHCvQPCWx/ZT4RcjwVkWEnEC4TGlc7F+ieQKYyzckHi/+YVrpFINjrZtvKBkiZ2q+FQHXQsxRbyfqphMTAdXE/P77//vn1A/VUmGc3iS0dNVJxhqjLHbvShfxO2ytBbjqCIC7+lI4f13vsv/Ef4MWx6rqqYzmSJ0tl7td3eEPU2iyfIpFOdCR9FyxfeBH9uxSWR1Jn5Jid822Not0ZLlp4VMmkDEIT0rdS8BpTdmWNWoy+qPhc3rtKoNXK9NWwLD9qHKOFDBG8TIpMW7eMopqrtIKirOlP6wyvz8Iju7XYYd4UuisTnC0RgT06UqLyT7+wfE4gHiWXEWHh1VcaYh1q+sor3SzDM94tFZytUa3zwTgW8smAA7jCNpuqc27J/UeO/N7+K/IoLshbkkx3tPmYkZ7JcF2LF1/hPOj1tc35AyCO6QkBvFF65iScKQZr2EperMLKzgjoXPm8vHMWwTE/E7w5HO8to6uh0mkhDP4EwczImNGp1iDcWeWSu+ha1P6MqMTb3aIBIIksktcygvc93RDgorRKJp9KCIQX765Y+IxWYZKALIPCgfUO+HiBcMbFcy15aPOD3qcvdOhlt3BOCx9fKQf//n/45iWvj4zrjM7HKe87MtTEuWxKd8dPtN6vVTskVxRj55vMVqcY69F8KG1WiQYGKGxcAESxf2QzeMQZRnT7exXOE7PvqwiGWPiEpJo2CvT7dVI5vJU25IdtDWGUFtnpAvhqMJkHkSVLh0OU48eRuA54cVSqct1hZuEUuIy0Nk3MKb+EjGBNFQOhqj22tiGDGSGeGHW5U6J4MvaPdvkJ0XazVTyFBr10hHxBpcujSDObXxdJ3SqfAR+EJMLZNub8ytt0UmNx6Io46SaIg5t90+uwdtFKPF+anY64riEQzpaBh4uqSM96KUz7cpSM2wyXTI1Bpydl5BlXhHuVZjeXmJUCTLSFaABEMQiji0pe5ktXnCxvpNVDdDLC72zN7hU3oDk+XlZUZD4T/7fgHQ/qrxmtDi9Xg9Xo/X4/V4PV6P1+P1eD1ej9fjv8D4tchceY7C4qxIea6v2fSHDfb3DklEBAReq9UZOkMWZwuoErFIJR3OTsYUCyJ7UG90SGQMzPMgfSlsWpyNM+x75AphZCKJYCTIdOqSyYmyhGDaj+2NqHUqeBHZWxBWmTptQoEUp0cCXfL0Eo6S4WBX3OZv3dpkb/8l/UGDalkgA5oSQ6dLrzJBlb0a2cIclWqHv/xzUUZTSFxm/WqWTnvMuCNu3KHIDOOYTjyvsYBAjsr1Fr2eTTgkkIHz2gEDs43md8nlxDvv7j3HmWoE9ZQQ4gCa9QahSJCipC4+OBH1/PHw/AVa4KhShG0kUfGBhj2xsdwOsahAGQzDQBtNmNgTigExV91JnanaJivL9Pq2gRJSSDjeRUbodNukUz5mrpiiZ1blOkDAl6TRFGUC3WkX32jCG7eu8/JAIFlLG0nCBHEVH4tzYt23nh+gaAnSUYEUROd96KrG3t4Rqi1QhqkSp1KrMfUOKaTE7yWMdVzVT0Kmb3tnSYZmm0Q2zFAiioP2Fr0OxJMapivm5en2LplclJmIQAEff73Nwf4WzdGAsqRPTmhZpiOHSDSFLik/48EItj7EL+tMPv2Hf2Z9aZ6Fy3NUKgJBDERCPPxqi8131zAc8VwL0Qb9lo3uyX4A34SzsxKaf0xDCtoV5zM4gymDwRC/pBg/e96lae/jC4ts4fMTBSfm0BurhCXNcsLXZGpqvNh6zKAle3SsMKlUhrbU1Ij45gj5k4zVKZJ9Gnc6whx2CST8GLIcRHE02paNLvukBuEa4/6IYWdKTmqgmP0eoViIsFakK0WR/bqKZavsNwWCWQorzKZzmK6L6wl7Hfc1Lq/P0KgLW8mlLnNWKaN4HWyJYIaUFMWCWFtPrnu1ekA4HKPbEd9lTlv41DCqHqBZlwLeMfH346GCX5YPOt6Q6TSCqwuEedIYkI2n6TZGjGOvxI41fAQI4seVfW5+vy5Eg6W4JJqK6nooyi8p0MNTl/HUImAYdNvC4ZhDB9Xx0xkLpHA03MPzNKxpClf2fIQCWcaDMY3KfXIzIktrxBwqBxqlM4HU5XMLqBmDcqWCgkBoDX+QaMygORZCwfBK14qLskDPVQEVRRE9kgDO2CObyhKJt7l1R2TUW7UhV67NEA6JeXEdhdH0jNnCMocHoob+2tXbBAMhjkqfE4uLdbenBu1mj2BUvG+rHWNiteiM2gRj4v02rn4Ls5Ugn+6Sywl0/q/+r5/SLZe5dUeQFmleCsVziIVWCRjCd7XrPVq1F8wU1jEkacdo3CcYHFNMiQxNv62QT4wYtOvoMoNhuGs0mvvY/gTf/q6gSw74wmiqSelI1Mv71BDZhI/a6YgVWf4Wj1/Diw/5yU9Fiew779ziZ//0CNcJE1ZFiWM8mEUPVClVnzGTF6VJz57fJ5PwWJwRc1mpV4gQpHR8QkieYZsbS/THU3LZZUZDYUOJyICJ6nKyIxDQN7/9XSLOA6bTMHe/K87DJ5+d8PatGY7PT0kmRXnd3tFn6EYAIyfWvKAvsjZT4B///v8kkxbzksm9S+ngHtGURqMq7Prtu9/DsizqVXGm+UIRHKXHdKKyuiIQ97PjHVLhWdKJVT79hRCrjSbnSRtJFkSlEo8HZfy+MMWlJOSlrtbLpwRCMazJCDchxc21Ca3DAeGrIgNVNNLoCwqlgypLUvrh/KxGb9ige2hx47YoX7K9Bq1Kl0RcVqk4QaIxH+akRUwT9uPpY1wPVFe9kEdQPJVGtcmsLEPsN9PgjXG8AYoUtFfcCI6n4/P5UHziPLLtKTduXsG1RbZgb2+bd2++z8xsitMTYVOe5qApGqqqX9DBW5bYZ4ZfvK/tWEynU3w+7WI/Oo6DEQhwenpKMiP89eaV61i1PtffFfNy9NimelQHqUcG8KzcIFg8pxgWGQ1tbDA7F6Hb9REIy32sjmidBJgrGHyzJTLAK5c2iadsXrz4KQBT16FSyxM2ikg2atqtU8zxhFHfTzYv/OLCSpqv7z0nZAhSmHfuvMtw0uFg/5j1NZG5zhbneLH9FZqmYASEHx70+7z39hv87FORtZ2dWySVDtHpNpmXFRLH1UOCiTzdSZ9qSZz340mL2XSOk0PRZjGeDrl+5duYgyGRrJxzVWO2MEe1toOGmM9SecjY52F2RVYjowU52H7B3Tc+4nxbxDrBaIa1pff44uu/JZ+dkbZu8/DRMy5vincJhTUOj0o8fviE3/ytPwbg5PRTPE9haLapSbHzdCqH6gUJBaQeoNdiOHSIJwqUTkQW7O6d90kn65TLVaLBJTF/717m+TOLZ/dFme7ixi2ioTWCuTEhSev+4P4Whj9Co9FiX4pcf/zxn9CsPMUnZVAcRUV18tjTE2rHMkOqwdpygdVZm2FBVh+ZOt98+pzcohQ2b/coFos8uH+P65cEGVcmOk+70SY/Y/ByW/iE2991aZhP6HRFCeBsfpbm4TnpSwa7B69KYrNMK4fYI7Hml5ZyJLN52q0KhswWHh+VKC6tMrYrdNvimdqnfRaW53AdEQO1mjqj/hHm1OCd94V/K1fr7DeqhIfzXBOui7PzPpm47yJ7WFycYWvrnFBMYWqJffy9j9/j+ctvUL0UU0vGCUmN8ChMqylivFQ2RM4/j6qPaDfE3rq2scTL7WPCEZ35JXGGlU9amG0Vv5QcigUNJlYby+pR2ZYiyeEU4bBGs9m6KMF/RVbyq8avx+UKl90DUTrQ6J3heUOCegBDio8elZqilMFLYo3Fi+Uzedq1YwxZbz1x2hxXzggnsvjH4rUmUx+630+j1ceZis9aWFboNkMoUo29UTkkmsgzHSrYknUokSoydhSGdg3bJ3vBghl290rclY3GtXoZnzGg2+mQyYhUbLPRI+IP0xl6eLL5sFG1sRUdyxYLMdQ87n/1klw2SH5GlMgclpr0h0P6gSGDntTMsTTc6ZheXzhk25ui+32Ew0FaDRGoTSce8UiQqdnHJ9mlUFxUxaNREQfoZDBifnGeVqNJty8DtWIeczxFl02pPp+F7YzwBxSGpmRt0xwM4gTQISjmM66kGLb6rLyxBEB//5CGOuZ6OociBYLj8S4bc9d5+M0WhMTnB2N+OoM+g6FsPHbTBBIOzw63sWzxOyM3iC+sEw1E8fvFmsbiYZzxkIFsQh+6YcatCUPLoS/Fa/VQgUzOT72n4vnE3EUjKRbWs+xJJqJEMoqqD7C9HJoUjrPHGrE5j16vzqAnniESixIOLF4IFLa6FXLFHL1BByMkfpZKxhg3B4SiJvNFsTkn4ySV3pBoUtjP9dsbzF8OUbbOyPhE8KEMD8mngtzY2CDx6sDM+6nUK0xkG5g/HELzdajXOhfN8r3mkOnYxHM0VjZkD5nPoX3aRJlKmxpNSEZjOJgclUQZSSIS5bx8QiySRJVMmYqq4tP8VEsyuHI6rK/Mcdot02rXL/7u0vo1zk4aOFN5SfIpDEb9iz3T3W+xkM6jRiK4ugxsgjYxwyCbmsfXEsFiv2+RCARIauJgqnc6+EIBxqMuvYrYV0Y0yPHxKYYuglBrCnMzS7TaJVR5GbDdEclgiNHEpNsTAVGnMaFQjGBOhL160yRqyMFzFUIBsS4jU0xsKBSg0xXfF4v4MLsKjkzah0MxeuYAJWDimFLTRnUZTAbAHD7ZG2bbU0DBJ7W+bNPDdRw8H7xiD+uoQ/zJCN3xGG8sDpRkIojnOChj8XfaVKMwV6TRPCOakMKm+mUmQxc8j25bCmiOd0jH1ohK4ebcbJBOzSIWDzBsCbteWrxG42BEzTm6IBG56LX6//zbcxUUNHTfK9/iEAknmZkNMJVlXZmUznQyYmlF2KahubRaMeqtOhtXxbykchOODwbE40UGo1eEPUGub75PrS3KZsu1R+AUyeVnOD4XLFFr2rt07VMMPY3miYD5T//0T3i5/58Ih8ReT6VCNOsNLl/NUz2VPZbdErffuMOTJy9JZgTolEpcJt6xsCXIVphbIZPVgBiPtv4KgLSdBF2na57z1QNJnOLPcfPGDSZTMQcrSxlUW6fndvD5w9KG/Rw8fkplW9jKFi+J2Iv4tCG90yMA9gyVb715k87AT2sgQIP3PrrMwfYx52XhAzvDEY52xnLiMuX7Yp6uLVwnwJBENEKjJX5v6yc/48X2IZfXxL7eevQlYcfm1gfzhBRxmfONviI2s8CtxRX+wz/+zwDorQWy8wtossz7u1ff5vD8BW9/8H32HknttMVbpK7+K9KJEFNXEovE5/n8pz8msSBspdOtYI8C/PD7H3F4LALTUmWbzesztPsvmCkIUO3Ntz+gNzokLBm+QvEAq7kV7NaEzILwb0/2DhkpdQL6hJmVdfl9N0n6f87W8U+FnYdyGPEZOtMpxwdiH8djSVSfzWg8vhCmntounpqgVBMxQSYZwLTq6JofV2pfep6HY3v4/M5Ff6qGRq81ZaYoboH9ZhyPKcFwjPFE7CufX8PxBhhGmMlAnEe5fIxsfkqlLfzF+vo6T188YG41xe6e8KfJSIjJZIJnK6/4OVAVA9fl4iI1Nkf4fWEUxUPxXvVmCsINVVWpyx6N/dJLtKDHiRRuHQ7mCUay9NqyVAoIJHMcHn+JNRB2fm3pHabWlGAkQacv5i6gqywsrBDxp7FcwVLXbjYIh1VmCmIOqu0SgVCUdC6B54p9NbFmeLnzkDt3b3O0J87Rs8MO62vXKObFhW9/55Sx2WF1bZmzU7Eu9XKPjY1bPHr2CdgCSIhFHCq1MsW86Enstiw6DZNc0ubkVFz4RpaGEYnxzdMfkZ0RNlXMr+PzQowl8Zc5jPOTT36MyRjDPy/XReH+gx8xn7tEXJafBXwpbNdG6gVjeGNUX5z+cIrhCTvo1AdcXb5KMXsZnybOAsdzUYwmn34hQO6Z3E3imSlv3LrKmbzw1ZtVLq0lCQdTeKoUhkeh3dqjkBPPFI4tMB1FGY/7jKROqGmd8dVX91leWOfx4T8CEAx8n80rNxi3hB3kFgzOTp9RSMzTkH1ZN9+4jt9ROTp+gDqSoHrlhHR0jrgUtH3yok4oniSgDuiEhC/TdJ1y5Sm5xCajsbjc2I6L7dVQWZHvt8hw0GJhZo1HT4T+5ztvvcn7H9zg/v37jGVfZLPsYLZVymfCFu9+cAurn6fWGHB6Ji6GM8VF8pk0japsUdH8nO3tYZlBrr99XdjZ6YDJwGOh6GcqmZxX1zZRo23+X/be41eyLL/z+1wTN+4N7+N5nz4rXfmqdtWO3RyKwxkJAwmgBBmMFlrMH6LNQBBmIWghQAAJgsSQ7B6Rotp3V1dXl01vXubzLry/Ya7X4px8LS16VrPoRZ7dC8S7ce45v/M7P/v9PvpSPNt2qnzzO5fYfvCAn3wmSsYX5/Jg9/EzBZbeFYGw+o/uceHyKj1ZWdvtDLCsKlN7QFL2Qjx59pjxOGI8e37uJwxPDNbXLvHxR0IHJlKL6DGFSmEDJDfrsGfz9Xf/GV7Y5ec/E2Tu7771dU7r93FcoZdL5Rydrk3chLIpzlGz1eX265fZeXHKgrT7el3heP2+8QfhXM3cGUNXXPL2ZMpk0uSdt97lwT1xqPPlIs+3zxiN2iiyZj5uqiiKyuBMRDAmvo+vwqhTOycVNOMpxtM2w8EYU/ZhlEtblPMmBwcHAKSTceyBzXQcZ6iJw2JPezhBAs2IUBQh9IcHdcxYkU5PGI6K5qBEJvYwwh4JYydtFZgFMaxU9hz1qz8c4YcepZJQKo1am/FoQq9rU5Pe9HDWRcNhhAaqiMxls1nckcE0EAdR0w3UKMZk7BBKh6Scn2PQ61Mq5s9hqxU1wrZtFFUo0sjTIITxeIwiC0/7/QFqZBKFY/kuM1Q1JJ9bojtw5HeGpDFpjUcsVoVxvHHpEve/fMyoI3u+tDHZwGTaV9CL4v/mVwya9SaZXJ7QEIpFjfLEdY+ZJUmaixUagxNKlWWm/ZT8vTOaDYeNtT5uIJHPDAXX9qhUhBOaLVfZnj5nsTrHqC+dSTOi23ZQMzqKhKPdOXlGlNDQTPG+C9lV9g77FMtFDg6FMs9kLKJoRjJp0pHvM+n6xOjgSsN4HNhEuoo71Wk1RfZHnwRU5jMM60meSnSri69Z9I8PSenCyX733e/wm7tfMgwjLlbEPIdOjBt/HGGWh/xf//RXYl6lDMOpQiCzGuHEYjIe0RjbJE1xybmRgqtquJ7C549FT0BMj2PpxXNy2N6kj5koMJw0MSSi38TRCb0pShgxGkrko8USe4fPCTwhG7XGHok4zCIdV5OIlzOHwekZ6XSeTrcp9y+kpGaYSVCGbCGFoumU02l2GkJJ2oFLNV7AtCAre6WcSY+337zOzo64+NMFk4FTZxKExOJC9cyXM/SaE/yJmPeMHrnEAlqywkyi+bj+gMGwS6PeJp8XxnkiGUPXTWyJcjQ3nyGdytNq986jfgSyPyp0cWYSLj2TxbQcdEU6SUyZOQqVaopIXgpWcki/ZUEsRhTJWwiBAGZIqE573MMPfHTfxZfZUC3Q8GZDzFJIWvbDDKZNFNUjbohznYjF6A17oKWwRzKrmfH51//df8Mvf/kD9o+EfBYqFQrZJHYkzt5gNCZmxLhyZY3hF+L3DCMgU3Dx93+HTqZpCmHo//+dLNmU/xKoRYtBsz4iVwqxbXFBLC0VyOXKmFIOwsihMKdg5mKkZUR97/gLYmaBZDZB9mVEWHW59+Auq5viO5X5OHbXotlokIqL83h09Ih8NsfRwV3GsqcrHV8UaIeWsJL2z74k8goMp2N0Q5yHy2tvEswCcsUEja4glPzg7fdodWcU18V6Voopzk6PiHyXP/lT0Szfs7t4x1WUdIuYImShXn/Gbz5pYg/FOfanVWZDFV2f8dttacQvX2YxW+HNNz8Qa94x0c0GpcI6774rkEH7qk+9MwDNQPLSMu2ZrMy/Tycu7iJ9GOP4uMXR4zO++T1B9FtUq3S6E3p7Lu+/J+Y5l1fYunCba8si628tpMiMFQ73n+BKMveLK3kYdGn6Ft/+mniWFSxyuHPApCGMwmf9Gb3mGRvrF0jeFPL56NkuNzav0K6fkZeQ4+3nXzBfTrAi2dV/8OwZt1avcv83d9lviTO6cmGVDz/5EG9a5Z2boh9Gc3o8f3yfC4ui/zhdLDAJxiwubp4HReczJrmUhT40Od4Wjvbx6D6aphMFQh80OzZFvUMxu0go+1WcMMCIKxyf9Ni6KhG9vjwgn48RSeS6qdsjFvcw4nGkP4nrhmiagaJ6+J4mZdFiNnN5+kzoydrJFDOewPM1dEkxgjJD1SS6Ziic6tU1nb2jT6mfiedcu7rM7sljrt64yE/+4UDIi++iaiHnHAeApsaYOBMsQwavUAVJsYQ8B1EB4vsjVlZWuCllKKY6fP7gc+ZT4l67sHqRBzuP2JbrBrBWydA8LeJJff7wwROu33iNXs8nlxd7XM3H2DnqYPv75GS2EFWlb3dRQjGnyxe+SSqT5sNf/YyMrMiYzMbcvn2HnednLEonLGb16XXt84qb6aTH6uoqO8/3yRfE/6UzIlNlD6aU1sS8qnNJ7KFDLi/spMFwn7ffuUPSSlCvi8UqpiKa9TaXLm4ynAidfnx2QkIrkpRgOdn8OrGsTrPZppAUOn5jsUoyTKApOqvzQjae7zwjmavQ6oo720xUySbimJ7OXFaswVwqw+OdX2IaJjn5Wa/XIZtew86I/VT1kOWVdR5v/4q5gug3Sie+hmUouJMUMWkapxIW8+VFPv1YgM4osTxra6soasT6ppjTyfEh5WKF8UjlLbnHh/vPme2HzEkwmcCbMZl1SGVfYzIQDu2vP/oVC9mr/Omf/Rk/+blw+nwvIGZp9EZCb6UyMY5qd1muLjKRQZLtJx/z/o1vMwqnBBJ91Z1OmV9+n2xaKKVMfsL9+6esLK1x84awPXVD4/P7X2IkQ9a3RPBmVGtgtwwWq8IOe/7sMw4PTvjgnf+BS5fE3O2pjT/JM78szseTnX2UwAbLoV4XQZlCt8lMv4CylWYmtobe/jOUYo7LtwVKpqZPePhsHzMqsyp79FNZndW3rtPrnLH/UOiSVLLDTz88Ii6JlQvlGL4yIFvIM/WE/j5rHNFtz1haq7CwIKpa2g2Xx0+es3VZvO9k2idE57MvPuGNt0XAp3Yw46z9jM7gFFcVdvvRSZ9vfe+P+Iu/+t/Ec+yI1coFarUaVkLY33q8zfbOgGJhibb0AVznP+4+veq5ejVejVfj1Xg1Xo1X49V4NV6NV+PV+E8w/iAyV44fcPeRSCFPhw7FgsmXD/Zp9yVq06TP0X4TdJPFeeGVziYO01mPuITydiObwFOZjKeYOYnQMuxgxrLk8ybLyyKy0us2KBYSeGMRVTErFdSgjaWBK5nx9DBB4PmkEyqqI1Gx0hVAZXtHZNPSVhZV9zCMFIO+iEBnsxGz0QQC0GUvhqHNCIIpPuMc6wAAIABJREFUHYn65YY+c+vzDEZTao0DAFJmGl+zMJMmqswq1I/aGLkx+OL9JuMxuXyGyXhGqSyi4rqio6txpkOVUJPoZ+k4McNkPBYRjZXVJcbjEYlEClXCpaqKR6c+Iic5IBzXw3V9Rv0xqaSIYHhMySXi2A2bRkNECwbNAUldZyqzW2sbW6TjcTxHxU+KrNRZF0I9xdW3L6HNJLLiqEOt0WWWkH1ZwzMKyRhxX0XPiXmfPOkwV7A4Pj4F3ZbzDEgkcgxnIoozPDzAiQbk8guMe2J9R5MpQTjA11ROTkVk5+K8wSd3Pz4nrx2UhgzHLpVFhVRSRF/CyKU/GlOsJqEnyiz0SKF22mWuKspmRg2HTHWOfGKekS2yOP3akHg8hm+MaM5EZHXwtM/GQoG07Jc72n+G3epTKhXJmjJCpIU8fXqA4YUsLYqI5XjQIlVU6Z+I9z06qJHO55i4U3RZLjWYOhgxBVWLk5Ww2bbbJ2tl8GTkRMFgMJwShBpxU2RfZrMZQagS4pFIiBjK2PaY2i5jT/yeH3SoDOfwPY1pW0S3c9Uq3iygNmoyGIp3vryxTjB0uFgSsvnkeMrUChl7HRKRkCndSDIZjwU5p8x0WEGMg4Mjopjsj/NG5JUqgR2gqmLNx26cWDbElSV8mWqZwaxHo16nnBcR9pmTRDNA1xL4EqXOdV263SbVOZGiz+ZMer06ZlLBl9HlyMsATawkzFfEPnTrAxaWizhTkYXzHJsoNEgYFi9OZK/mhkk8lWFstwk8Ed02TRPPDc8pD4IgQFEUZrMZEeJsa04CjAGr18q4M7EPtSczVpaWsWUkcuiNSBppur0mC7Iv0uWY//Xf/c9sbMWZk0hLjdY+uupRkuhd+HG2Vm7hzcasr4hnNdoPiJkmUaSgSlRDQR8cochSRUVRCJUQXVHPibcV1edgt873//n7DCWXUKV4kbG7fd6XaSQjEimN5YWrdAaiT8nxIy5uXMGenjCRGSjbhakX0WqJPda0LHp8Sl5NMbXFWu1uP+fWjW/z3W9/wNQR0cLn20/QtCoTWdapkMNI+hw0HpAOhJ6IFJhORhhxHVMXEfZ6Y8ClrUXuHYrsxM7TbdYWrxA3fIppCcHtpVFyYwajCC0udFXgdUmnkxRzIruMG2NtvUKzbrN5UZw1ww5ptRqYZdF/8L233uOjj/4DeiHDaUc8J5nskUwlcR0FMyWyd6eH+2xtGVy7JP7vt583WV8sM5ru8ZsPxX1xc22Tyvwc//4fPubhrshwvfV2mmvXr+N2hO48+kWDrStvU2v/mnxRyOeTB49IJ9JMRiFzc2LuJ2yzuXWFQkpEVT+9+ynVjRs8PdqlEJNZv2KGyfAUc+YxGAp5qaRKbKYNfvRrEYX/1qV3MKtzbH/5a+ayQrdknAxf2/o+x/YhTVsgV+595nJh/gJvXn0dgI8e/ZjDk0NacYeKIdfFyhJpKYJkg5Ij+nZPvQ7Ziok9FPfx1Rt36LVPefjlYz74joBZH4zO6PU6JBLh+X2YNErkk0WaA4FIV07dZGXJ5hdeA/Ulwp/qE4a+gEaXCJ4u4AcTHkpk10ZzQipbwfUdQhnhVzUDP9JR9ThhKD577U4J09pnbl5EwDPpMulEGtfTzzPsoKIpEVNnimmJ+zckwHMdVFNE4XVdx3Xdc6JxQKCJRgrFXImkKvl4HtbZylykawtFddw+Rg0cti6vA+J89HsN0sk5DEnkfuFreWZ+m/4Lj3Re6LxOp0sqWWVzeY3Gicg8JtJlVuZWuH9XlOTWax2cwwaqGnHhsli7Z88eM7YdLDPJ1dfEuTo6foJlKtRbIip/6fI1CGOMh9u88foaIHRLv+2wVFnAlxkEhU0KpYhuT2RjRqMBcT3J7v5DVMT7ZqwMugoxdY5OQ5zb0djm4pUcR0fizl4qJXBVjWY94sZV0ctoOx0y8wskk0kCU+gJ25thhh6BJnTuOBqhtbpYSo4L10Qf33atSas/YzGzfI6Qurq8waNH+9y6IUivQyXk+LSGO1lm/Za4Gz79zR5DY4jvd3jzTZG1DYI29z4/5fU74u+H23fp9wfceO0Ow5FY8+HQ4cLmGk+39/jpj2XvcHmRdDHJWUOkcayMx/rqGnfv/5LlJdHf+MH73+eLz3/F870jEimh5/f2z+iNO0xl9Uy5amIkPI5bdUoFYfvG479hZHzK4Rcx/uS73wDgsNak32+d056gTNm6cA3X73BwJGTqduUNiqVVVN1hOhLz2jmpMaVNMhC6S5sssrWQ5sXzzzFk/60/iUgtxQkjsccri1me7+6RSl7hoaxKuf3mGpcXUzx98JStja8DsPyNDP0XuyQQd0VYDPCOXCaFdSzJV+e2JoyYYMXmSEvC3rFmMRwNubgu0XsjnTAMODw+Ym5B2KxzlQVK5THOxGTUkdUkdg9F9SiXhH3lhyOO9nok0zAciEz5eDyi0+sRixsUykLn6ald/v4Hj1iZF72ay+tx7HaHQSfL/JywQZqjHrNJllangZkQd2t3IJsYf8/4g3CufC+gK+vQA29AbJLl8NETzKRYbF0xyWazjCYDZhNxefTbY/JF6/ziiMUz4Bmk1QgzJgn0Yi0ymQzZrCY4T4B8Po7nTijJcq10PsFwBMV8jmj6kiDUo5IyGdsuk4E41GbGotloEZecFnosjz2qEUVjYoZ4VrvbIQoTeK5LNi0Oi5VKEwsSTCXRqG74zNSAiAmlSl4ugE887jH1Q+JpsXFzKZOJF+H64n3L5TLtdo9UIn0O/eqMAlzXZ3ktz3Asa1QUn4nrYEl+DNebkk6nsUcuCQmOYSU0VHWIoYvfSgRZoihkbr7A+LzfCNqzNlvlec6kYeF6IdX5kK1rQhkZVo7j5pB00OdlvYSvmFgpUFMBqi9KW9zgmHwlzcGBaIw3jQAlmGNu0WM4EIdsZamMr0xwhwpnsidoabFIKqnSlhxF0UzFizQapw1C0ezCeDZG14rogY8zFkaKP7vBZLqPKZ3JB8efYsRN7LsjCpKcd+YqDJw2PTeFNxFrt7C8wOiwR1o21FtBnJiaxov6+LKR8o3rbzEcd3ly8pikTG1/9qMh639epjcSzojJjMDzcYIxzw9F2Vxcj5NTVqg97lCX5IPpgsawZhFN5CWUF8a6rut0+8IAKycL6HEY9Ltc3RKkoYORjZVNMuxIpeVNGLkeuWyaiiw/PT4+IGlVcWZ9shnx/MlwTCoVEUhZUeOLRATstev4jnQaTgbMzS2wUqySWBTGI67DLA5NaQycdc4oL6zRmM5IxMVneS2Fp+qM7QZGKORsOlJJlHI8lQSXMcMiN58g5jTxZy97JxTqjQalckG+25TRwGbiOpiSnLvdmaDNPIg8/JcgG0oMPQaTqYSnjXQ832E2c87lPEScXVVNMJaXVXW5hD1uUykK2Rz28xQKBdrdJoEsVfJnGUb9MYpRRDvvnRD8NS/L7YLAI6ap5zw2AO2ZzcZmDs3UGLaFzF65cZ12vYsnnbJUOo89tklaOdyZhG32TillttjdG7C0LNYhn4kTTzioEkJ+ffkyMVzuPryHLg2+YnqTtK7yiboDyu+a4cW7R3LekkBYUdBlU38ykaHV7vH06cPzvh1VTXB21se2xRoEdoyrpQuousXWhqiFt6wdXNcl8A0k5zSqqvPOe5fYfyEu8EgZMJ01GAxGrCxcBUBnncpcle0Xj5hMJTBMrkQxt8poKs9HKoauptk7PaAke1FHQ4e5xQoEG5x0fgHAznHEt9/5NsGhMCJuvv4GteMOF6++yc6+7Ntt9Vhb2OLwwCHwZV+bUSAXz3B0LIxHVTFIZxLEEwqTqdibbCLPhUurPJW9Is+e/z/ML+p0hyOmY7F2w+6QSdjh7TtvMJ2J8/5k94Cp7xBThP65sLrI00df4JgBWU2cobsf/ZaFzbd5/buXWSyJvrZo1uc3n/wj37khygSfjX7Cbn3E9uNTfEuAKxiRQTE7Ilkt0I7EXTBsdvny822uXRJG6B995RvYWpd7zRr3PxKlgosXrnDp0iXafpeElBclgomv8+4tUfYYxmfs7j1hczHDxg0BZPCLnz7jyuYm5c2L1M5EAMRLTLh9+W36J+LZo1OFy9cvsPNom9SKMGj3Zl/Q/NXH/Lf/xb/h+KEocXIbQ452Duj2xV650xlbF+4w07vUa8K4W1qZ5/ioxtDuoMbEmUnHlyDwiWZiX6y4ybUr11GU4/PgBqGCYRgEgYciSYR9PNKZOZKmeBfHPSGtexBGGNIBC0MVXX9ZciUe9Uff/S6/+vDvznun33//fT797S/xgpCi5JWw7TGKAmbCxHWELaHFVMkp9zvwCnGeIjzvZemugaaoHB4eQ0KiamkxXpwcoMgycrfnc+viJSb9ISD6Y6pLqxy0zlhYETrwpPaIUKvx3rcvEyLk82S2wLQ7pNEYMvLEnHTFpdtxmJP6NIpUzEyOfBGePhD7WVlYIlRm5IsZzmrCuWk3x8wtmlRj4o6pn/TxXfjud/8YJxD3TO20w+r8Kl66ylCWsse1HPVaC8uSpa2JGvfu/YY7N7/JaV3cc76XpD9+SMcOWd3IyuenySTi1Dyxn+urUF6u8t7rAcrklwCYRpKz5oDTk1XajnBafCWgXbexJUKZng353rvv8ei39zkeSFLYzhgzSNPudrj3pdiHN969yvK6wbOnwrkjynPpygp6GFA7PZD70qdULpNKx8+d02q1SnUpiaeKs26ZGkYMuoMDdneE7nr39a/iTl1CpUs2L+yZZmePTOGD88DN46efYdsamxvXGMk+urx1jdeuvs4vfv5rbt0UvUu375T48snnDLviORcurjIYBJgxnVOpYwvqLYbNFt/61veYDsU+HD5ymbtxRPNAPHuxcoGDw0Oy2QLlqtibR4+fsbic5+h5m2xS3JEby1Wi+AZjW5yJzc1F9GSJ412b4kvibV+jpzRRZXtBwopIhlmSts3ME4GUk/Eqye59lqs7zPYlMfXRBuWNtwjqoueqvf+M3vCQayWfI1+U6c1deZMXzx5C0Gbnvtjjtc1bvPHPKuw/ugtAp9Nhce4yaxtF6m0BNjTpV/CDDhc23kSX1Dmub6FqBmcnYp16g1PC0CeZijMdS0J0fUxSy+H6AZoqqSwmj9GUOQoZ8Xe/1aB+OKU6n+P+YxGIKlZzNFsnmCmPoiL6Cyv5NPAjft/4g3CuiBTSSaHp9LhGPB4nVGO4ssFs5qiMZiHVaoFO8yV6R4ZIi/BlxiYMFcIwIJUwzo0q00gwGPZQojRnZ+JwbGwlCP0USih+r7Y3wtKz6AEkZPSu1Rqyu9ujUFwmnZfK7eSQmF5ClYaUPW7T6Xgksy6h7MuoZJdxZj66boDkZgpVjak7IpsTHnCoj1C1LMZ4ytATBnQ2OQduQOBP0GXmwel46L5LXBqYk4nNfLVCGKiEsg8rbiQplnXsWQfTss5/L/R9ziT6WioRR1ViWFaSRlNEGapzG0zcAC8U62toCYyYMOhfMllPJz7dgU+w6vLuhjDYH73ooGWShKE4dJ88/Iww9FlPJhg7Yp5bly/Rrde49/Qhc5a4VLXYgHo7QpG19wupdcx4hQcPHuJJo3c4jXDidfLJKhe3hJBPetAfh+ALMR2Opji+g6vVsOJiDqE3o9G3yas50oaYe68/BEqMZe3vaOqzkl9iOvJ58ULIRrrYI5YwSZoWXdnYfNZuMTdfISkdaGKHpNPzvNh7yktrMl8I2D+YcXNtnlvvCiPpq3cinj3fwekL2Xj7jSK6mmD37Cl/+mfCEe2PZjSaQ4zAZ763BoDnTPDUPhOpHJaqqzy495QgCImpwlgedGz0Ugp7PKPZkk6mbeInfCxZZt92puh6mvHIZ6gLmYprLrqqkC3Oo8j+v2IxS0gDXSIfhbpJhMOyUSYhmcjzxQypkoU9tZmNXzpTA05bLQxZVx+pFqNWG9dUcSSxMLOBAJeI4kxkf9MkDOhMmhRykgNp5nOws01pKc9oKJ3/dofl3AJdSdIYS0dkzRzuYEKnJmR4oVKk2TkiLmUZYH4xgaYkGEpkwiCcks+sY+SmHB8Lw7RarrJDjeHQplAUyrVrT5hNFNKSVHQ07dDYOyOTKXD9mjDqdTXDsGERS2po4UsHaIqmxVCV3zktwrESwBYASRWyikpVS0PCkPNyKc6lGI2EE5hOWHh9n2G3Q07yBqnqHIVKGXvWO+ffUpWAuLrK6obMjhyf4Y/H3L55k6eHomH39utv8U8//BJV/V1fFUooHUBJ7KEoEKkEKKiItQoDndFwzOHR2blB+XTnU0yzSCoj/y8EQ03iOC0m0qke9QdcfPMKraaB44hnzVdWmQ1jbG2IyKc928Vzl1lbG9FriWf92b/4V9x99DMC9QxLojaWSgXa9QamzJSbyTGdVo9KaYFcTlzYY3XGWavGynyJyVicyVjCpjFoovjirCvEcIIBnb7DRBpqoWLS7ExY37hBpyEMynIpzk9+/GPeeOddAKysx9C2aXcPMRSxx6mlDDmzh2WIPU6mk6RTFisLKr4miWpHAVH/hI9+9SHf+oZ41vLcOp6fodOWQANXS5SrF/jkl/83l94U67v52nVePB1y5ZbFsHcgvqfqzM+t8Isv/xGAK2tr9KcxVi6uslAQAZ7yyhydqc3+owd89XVBgPx//OWPyMVVnLrQ5y8OshQvLvOt7/wLnuSFQTIaOzimwnIqjzcUivDJk895cTTm6//8fwQgW9FJJquEvTqnz4UeTlAlpcR59vgF6YyoPEibBprq0p8Jma6sbNDuDGiedNF18S49TyU2GPLJz/+ep8ci42WbCXRtATMjDMBWr4H6+Iirm19h9/inAOjWNRYWSlT9EpEMbuy9OGJrbZNLm8KI6Q+a7O8ekU5Y+BJkR4BbCFnXZC+xbrjUGyP+7q/F+TATKeHwqCaODFJahkGITuSpEAlb4oc//Etu37xBrSv6R7a3X3C063HjjXkMU3bV22AYcTxvdg645AUzQDl3+OLxOK7rC045CTDjeQFhGHHp0hWSc+L59367y6Wrb6BKlLNWv0ut2+XWsnhfgGh8xtrcGiB0YDZ1hcl4ifsf2dy4KZDWxoMutl0nn53HzAj95gQ+oW6fo6iWinkSOSil3sTzxZo3220ubX6XVvuMVls4VwcHLYaDPJuXhH7VdRcC2Nu9z523RR/R1DZ58OQB8xUfTfZ0P3+yx8J6gu3nAvVvYgfcvPYakTJmPJJBYPsMSGFoOZIxcY5z2QbObEQwFXv34T89JKtXSJQCLt0WZ8azk4Smjx2+wJLVD/V6k4WNCmmJVlrrtTls9hhbGo7kG/VjfZLWiHvPTlmWRLhmyqZ+FvD+238MwMMnP2c8HLCwkKF7Is7a6kqJidOnWKqe8zfeu3vCN75zm5MzIQeZTAYtFtDv+hQy4m7PJCuQO2HqXGUwEvuwtuyyffCEyBXOajJh0my1UJUzKlVhNywuF3i2PeL2jTc5OhAZbm8cJ5NJU0iKu6jXOqXeeMaw41ORve9LuTk+u3uEMnnAoCHe+eLWCo3+Mc5Y/F4mV+Ko9gTXdXF9IWfhLIfnOehagomsXmns7bG0cZmVVfEu9nBKMpHm9HSbclYEbzaWcvzDPzzkrdsis9PoP6ewcIGFxTK3s+L3d3cbfPokyXzpXSqyN6t3ckTYeEF3Vziqqcwl8tkb3H/wS7ShCLIdpO6hVd5ibT3HzBNrvr9/l9JkC38mDRy9hc8c02kZbyZtkADWFy/Qad4nnhbrMp1EzNw+vqzQSGdMGp1HnL7wuXxFOI9qZJHJGxyfNLi0LBzavZ0QnCqVN8U98NEvHa7fvEDHfsTimghS9LsalbkUB/tH5CQo2vJFmRz5PeMPwrkydB1NEcovCjUmsynjkXNOyFcoWsw8nU6/TbYiLl7LihGpabyJROzQHMLQI3CyZJCORuQym3mEwRhNFwrx6KSP6wywZNN7JVMkVB1aHR9nJCI0pUIOQwvwnTGzWULOsUQ6YzAav/w9AyU2I50zmQ7Fs/3IBT1gZIfn7+MxoFQooEji2GASMnVbeOOQiowoRGGMjjMiMpNMh+KSW8zNcVavk0+J73jePooyoFGbEZeohsW8RSlTxB7NaLZFlMhKZ3DckHJVHE7PGZNM5bCHI2KGcBAa3QNCP2IqI3C5lMGo36dcStGUjc1BMMMyYxydjemNJKx038XRXKKkUJCWYmAl8hyctghltsDM1UlpadonNgNPKIx33l/guOajKcJJOxmfcPboC2LoxCzpyCge2jjHzIdcUqzVSa2FaZrMleX6KgFpP4kTugQSEjutWiSXLTxDQaJ7Mw3G9Md11isiqprOX2TvxT7VcpH5ZVlmaadJqhm8kYsho/6aEadcjmO3xLNnYUiqFGGdZLl1TSit975RwI9PuP1WmSdPhDEVJrq88UEJfyjeL2UVOOo+Q0tN+dt/EBfaoBFSKOYoV6tsXhR7c7z/hN6xS06Seh4enhLXTCJVxZXwpVFkUG91sfIVZrJx2o33iCd0pg0JYmArJAsRihk7ByQJYzrGbMK1y+scn0oyPj/EHih4EgnJzPuM+hHhdIgbCoXkDoactMe0OwMmEhBC1eKkExbIOcXyBpOpgzEM0GQ52tiKkzGqxFSNkdybckonGOsMZWYgk1YxJwkGHQ8tIyO7swxjxyWUkeWR5+LYI7A0vLgs4TRj6JpFwkqeB1zskYOmuQSytCdpFfDCKa3jHjFD7GdHouotLqwQaeISqJ010ZQCJ33hgOWSBYqFCrGYjyVl+Pj0DEO/TCwKmckaQ12Loes6joSjV4kgDPHd4Dy44kc6RjrGVA/wVDGvfCaBqkXEDCkr6gAziMhYKxiq+L/JxOWsNqDXV1m5LC56DYVWbcJ4LJG6zppcu3iN3Z09FFXowA8/+icyuXk0/XeQ8KrKeSQdIIxUVFUYopEu5h5Xk9iuw8TWCUtCT43sIc2uw/KiQK6bL6dYmb/IYNjGlKXXyUSB+tmEVquHERMyO5dfx/eHdPsS5EZJEPgxLq2+x73+ZwCcnvR57dI3+MWH/wHDFO/caj8nV1hkdu6kXaHfeki33SY+L+DZG+NtoliMs+4nRPIuqLWHWMYhG8siAPP5b35GtXKDs8NjfIl+ujS/gsaU47097twRzeqjXp//+r/879mV1BS5TIqpv8OFzeu4UxF463T2iNQ1tEjcHzElzd//zV/zzbe+iZEVd8Og1qdYKqBbBzz+SGQZVstFjHSWVkHoslZ/h1gyx82tN3n4QmR7KuUF/uUff8Bxr8bqRZFZOTv+nMXSPOmMiOIeHe6hTj1CI4MrCS4LuWvE8xmyOrgSlOFf/0//hqOde6wtiTXozhw0P4vX7JHUxByKxZC5whI7h/dAltwaxhrLaw61feF8TPor5IpZfE4JJmJ9q9kUCTXk5tWL1HtiXa6uvs+jwSGxsggQRmcvcNtj3vv69+j2RfQ+r6+QmC9xfPKUji5k/ztvvMFJzSaTEU7o53s/YufgAcXIZCYBbMaDENft8pV3v0GjJiLeg3yf5eoVAke8b2dq8ZV3Fvmr/3MbRUabI1VB0xU8J3ZO5mrGM8zcKe2W0CWWmYNIJyTCNF5SEygYhka/3+ftN8S6//H37rDzdMAbN/8UgCcvfs5w3CZurPC1rwlH5q/+8ifEDYtIAV86U5qmEfrqeZZKUQRQhqr+7jPTTAAqB/uHXJVO5txyjmIsiych3dWqw2gI9+oiww/w4V+84M53btKXmde+neO1G9f47W9/xotHwlhOGVm0csC7b36Lo4YwYB8867CyWeSZtAfee+8dPvvkc6ZZg2xG3E+9TsDDJx+SSKTOg6kXLqv0uhNGXfFu3f4e88XXMNQMj+6LfQm1Do7fp9XOkpWlUXHTY2zPWF1ZA6DVPMD1h9x78JSlRWGcZ+MJtK6G46hMbVOui8ni4iLpuNCLhhfxb//t/07M2mJlV+jAr3/zJs0jn3JpnqTUUy5zNPtDFmS5VpUp+4e7ZHMWxwdCp1slDU9T+eq33mU8eAlo04Uww84TIdPBVOH54R6Ke4UgEpUy6wt3ePiow90vDs5RYRfWdI5rZ4zkefSHBhtb6wzsFr2W0Hn11nNUHAZ9sD3xfNsOMJMj+gOxThe2rhNGKoZh0BsfALC9o5I0UuSyCZySMNTLZY3t+zusSkqOTHyDaLbPN773DnZH3GeHByfcevsayWScZFKUet74YIMf/M0OuZxIGnx570MsM8PUbZCIi8++9Z3v8+Gvfks6kSImEfaqV77Di/49/GNxPgwcjusRt98q43vi/b540eLNt68j4/6spdapNc44rmscycKRfJTl6uV1jp+pJNJi/5L5ZQYzFb8gvvSTR4eU5z3K829z6Q0hd512g4NRi+3nJyxVhQxNxveZjHaIZIVWPrPC7s4x6ZSDvIrIZuHhFyfcur3AR18Ixz6VSpIt6UxtsZaL8ws8uPeCzQvrnElqm3hskebkIYae5/G2CCzkshkK81lOGiKQsbxVxcMhX1xj90AgCqatDVKZkKQRcWFDlJ/jC3vo941XgBavxqvxarwar8ar8Wq8Gq/Gq/FqvBr/CcYfRObK9zxcWZoVxCYocYtIj2FPhMcbG8WwYiGhZTEei88GfY14fEYiEFFcJYpQVBM/cAllI/XQ7pJImPhOQCDhkmOWgWVojPoi+uP1bYrVJNNxiCo95VjSIu741OpDcmXxf7NQQQt8HAmDbsQjrJjKdGiSyogIWyJVxJ620a02Rkz2dOll3NEMXxURPtc1CMMkSlylUhUe9slxnVTSIJ9PcHYmohNh0SOTmWM0FhHTxbllGs0+m5cqFLMiEjHt+fhBk9XVtXOIysrSGomJzbAnog6GphE4bbKJkEpOlBg92GmyWprHi8QazJQhxBVq4wapjCQxjZnkiylQuiRnMkWtzCiWkvRkvXWn1qWSnrGwmiSKiZDC8+d1EokSdmcxssYQAAAgAElEQVTM2BbfO6n5uE6CzlCSGPeHLJcXiJGg64t5RorKdKTR7tfxJZRtKpdDCz16sudKsQwCRUWb6uQqMoThazR6E3SGbFRF1LbdSaHn1llZELLxdPuEt9+6hatqfPr5LwDYMlfx8iqjRg9FkhYura9z8qBFQfIYKKrLwf4JRiIinhHP+ou/PmYWtukMLAZTEdF799tZ7PERI11E4Wp2wFlwRnYuxUVdpJ4HpQGGZeIHEc9fiIj+ZBwjXcyfZ0OzVpJw5nJ2NCIlM5aK1sHvxfCigJYvopFFM02t06IzEL+nGAZhACgzApnZycQLlBIme/tP8CX5cL+tYiRS2K74v/bpjAtL17jX2MWQqiChZolroKeqRF0R0UslU9iTEZ7ky83n0hBa6HEL2xblBd50hFbIEqkx8jIjE0ws7Nn4HJAEwyOVTzBoR8xkdmt5qcTB/phMRZZBdh1iukkiGeFLbrqz2hFhpNGpD8kWZG8YLgQqk7Ek9NQbWMkkCV3HiIsygZmMLNmzFnkZ0ctn8sxGEJdlLc5kSqUwz9jukUyIz0qZDfZbEbqlo4WyVyPyIYqhyr8DzxehqUhFkSV4uq5Sq9VIpuIkRYAdM2URKl2urooo7tnZCXkzTqmaZW9f7Gcqv86w18WMmdRaYm/iVgIjptJuvyx7jPPrj7/EsjS2Loks6tQu8dr110glD3Bkf4oR1wlDD8KX2SsfFRdN0Qk9WcZgjHE8G2+8Tjohyiy6jYCt1RViutBbhqmzd/SQ1fUlHMkRlEun0VyNZBTHmQh9067XiKfjaFI2tGiDRHLK9uefoLpCnz76+EMGVy+ytrTAVELnl1emTCY6LyTPTjSJM59JsLb8Tdo9IcPD4YxvffM1hpMJLxAZ9Sj0SRgGaQk6cXF+RsceMAr7ZGXzerascDwYkq2WeL4neiyuXX6Xs8YBsbSY00Jlju3tj2gEx1y/KrITk/ERw2kfZyT0zfOdz3CiMY927rEme1iuX75Mv9YmUNdYXxL69KjzjFbziHRORJHNkYeR7vHGf/ZV1o9Fad3R7lN8tcfJ0T45U+iuYiLL/S9/SkGCrTSmJ8wbiywmYG9PRMBT+q+pXr1MvbaPMRX6bb/9M3KFNays2KtcXKO194ixFUEky+udiC+2P+X+zz5lKEmLb797ndVCkcSCKEuKxV2CTpNCFkJF7OdJo4eevgyahtcVkdzfnN6jNdrjvixdSgQbXP36RR78+Aes3xCyeNhskY1PWN7awuyJSoO9B3Va0ZDQFPfHxYWL1OMBCwtzLBTF2n12f5+wP+WLe4e4E9Gz9trcTfr1GKOx6K8oLa1ysvuAyItw00KXxaYzInRCxUeR9AFO4KAYKqHkx1M0UHSIAo8I8R1V11F0CByPd94Q5W6pxDq5yj6eL87eUvk6lWydLx5+QUaWyapRnEDz0AIFpC0RqRahNiMMxe9ZVpzJeIZh6Oc6QfRpRqTSFvmSkP3dvTqTQsRInquUtsZs+oi+BJcBiF9Q6I2aKDkh06UFhcmwRj4f41hG2C8vXWU06PGjn/97Ll4W0fRqLmQ9f439lOAo+vFPPiSd0dGsPsenso8vNofvNzg6eoYhy11Ny0eJJuSLkrPLrRDqLmY8xO5IXkRFJ2GUCdUBgWyrmK+UafWeoqmS8DlMcnRyQqGc5ekzkSFdmJsnkUqSKyRx5F0wmZrUj7rMlYSuPjup8Sf/1b/CjernZcG7L/a4/8UT/vzP/5yd56JvR4s7zGYzzLjIXOWLGQb1Ia0aVCVH1/PtMzL5DLsvTgglL+qN27eYjEwGbdH3XSiYLM1d4P4nn5OWJbiVks/mxjJnhydUpO2wc9Rjb/8ZaUPoyc2L80zGfVpHUyJV7J9mOPR6Iw5rB9hjIUMXLrxGONNo94ZSDizW1ud4+vQeuuRA3R2e4XigTMfE0+Jsj5yIQm6RmSPs3OagxspilZxm0RuLvSpX8hDGaDUcsikh1//0t7+kVx/y3le/BsDG5hpaFKfT2MOdCvnc3b/H6voKw2Hz/DzEKlO8M4OVC6I/ttU6hGkbNWYw6orfS2ojdk97fOUDoTc+/fgL+t0TGo+2ufmGAK9oJdpYtsHWBZ2EIeTl40/qbNzaxEAA4dy8UqGZntLsG7SfyvsqEZHOJJgMjnA9sael3CU+/eKHlMuiPPT6la+gLrZxgz6ZjLjDvrj7Kaa1wpndRjXEZ8uL3+Td917jh3/7N0LGBg5vvX2JXjdk64p4Vrt5xmiYJIwrZFNCXirVOI3aKV4g9bfV5/Spy6VLF7i4LrLbMVao956QK0/xVWHz7LwQd9LvG38QzlUYRedGjOXFCVyXdKIIBeG0jJ0JgWsynikYljj8hjXDcwOSKSGoZ0cB2bSOog9wJHhEZa5Mr2vjzCZkshL1Kw6O75FJi/9LJ5OEoYtpmiA5UQaDAa5nszCfI3hZrqibhI5NRabR/cBlGoKuQL8vHKJ67ZRIU0llQxT1JVJWi2o1TSYljZZUxGQquJyOakLZDQcjCoUC9dP6eZmTG/bILKSYjcWlPnXaKKpPKh2jLw9sLIgIA8GpcG1DHI7TRpNxr0NMNq+PJwqzwOf2rWs8eSIUVNZSCY0CkS4OsGvHCP2ASjJDMhCHru+H9IcNKqUM2aIwNkZHE/aO2xgSDe7i9TViXsj8Ypn9M+Ek9Qc68cwIxYjI5cRaHR0o1GpdvFA4V6l0nP16ByvuEkqjLJf3MTyFzfQmq1fFIX5w/yFpU2fQF8aWEiloukvg+cymEjRh2mIyjkipCaoFcYmHTsRUmRFGsuyxkGWusMBPf/pTUrr4jpKN8NwxajqGIksVdvcPCOM+mrw8L9y4Ruusw+bmCrt74kLT9BBvlufx8SEba6LMYv+TNBNHIVEWl2OxpBDkS8SUKvcfi7Kgi5sLWFGMk6N9gkjI2dhOMM3YNHvisBbRWZhfJR7PM5EZ5+FgQi4fw41sfFlqoiU1Wp0m6xuivKjfH9LqtojHY0SSPTyZtzg+a1JeqDKTzc7tbpNKtUoknde5Yophf5dssoQak6zjcYW4HqfTb5PLluSe2mi6SVqVnGStGkEUks4pZOfFOYq6CfpdB9McEVqyjNMfohoK4cuywMwylYLFYHREViJVnp41SWeSKHJOb752lRfPjgmckEgR7zt2RliGjuuEDHtinqVCATWKgUSDc2ddYgYM3YhqXijJmOw1M1SDaVusQTFlsX5xjv26JBUc9hlOOjiOjT4ReqPZjLCsi0SEhBKoRVFCFIXzIA2EqKpKqEYgS3eCyGNra4V43KBdF2em29knnVNAgnyMhjHc2YTRZEKvI+ZUyOqoqkqpYp2XHbqugWEaBFKXaYZGZTFLs9mkPxBrVarmsZ1TFCVCVYTRqaoqUaTyu6IEjVDTCAMFNCFnSpgjnTI4ODpg44owyuKmhqak8ENRQjq1SwyGHRJmiqxEsjLNGdlkiYXqAkNJInxSO8Gtz9i6LEpwM5GJ6hjMwiJl6ZAcGW0y2QGjrguSBPbwic902uGNq4I7qZhdpla7TzB1KEjOntlwkd/8ap/FjSp7+5KM9/YmtaMDnKFYz3K6QqpsMLe0wK9+8kPxe7Fdktl5jvcf8pU7Aoyjc/AUUg51R+hcZX/MxuVbvDh9hjMT71xdXqZxNCBUhGzcee0DHrofoSdUGhIBzujUSRRy5HWFQBF6f31ljS0cto8ksblpEHgT/vEHf8PGRWFY1IdDJj/7R8ahxt0dsQ9WqGKkVmh9fk/Iz1wJVQ2xdI24JdZp+/CYnTDBQpjDjoRsXL+0SWsyRPoZKI5PY3jIauoN6p44D8W5Kv1f/IpIzbJ6R+xN6CVodUOqVWnwDcfsH9UpLa6TiMTDzDmTv/nkY/o7Gq+/L87/t/9smfZZj+v74l264yUKlTKTr1xmJvtvw0OPe/37zA17PP5c3DOXrt3g1lev0qwJJ+nChatUKhUKsSSdttiHb35wmy8/fgozh0WJ5JhOGpyM7rG3K8p2fvqL3/Ktf/k10sUi3YFwUEwrie1NiMdieL5E69Q0guj/y/GmQhihqjrqy7VSVAjjWGaOK9eEg7f9/Am3bl/n089FmWelkqHdGrG8sEp2STjH/+5/+RkZLYlHRPSSU1LxUYgIw5dceHF0XUVRI6Lw5XciwijATMR5KsvIC+UFjISNXRf3hTedkE7pWJLbDKC6GafTnVAIhPMxbNj0sUkmklxYFN/buDKHeayD3uJ4X9wzq0uv8enHv8CVTkWxnEfRbIYjDyUSxqSV0NFjKXL5a4zk3ToeB8wtFnipN0I/QdzI0W5MuX1bIOU9efZbDMPg8uWv8+KJAC3Z3QnY2LjJ8ankiqsk6PcUkpbJ4pzQLfWzMV/9YIPjox6doQDj0FWTudXL1NsSqCJMUSnn+PJuh5uvXROfOdvceesWZtLnZffAfOU9Pv38M772NSErO9tHZFJVVH0qQEMQHIimYbC2fB03Eme51jhgcWEFFOFI9ZoKW6sFSt//I56/OBDrkp7QaXtcvXKLdl8gVSpemrm8wdgW67t70KHfdUiYZbIpoYO+uPsUTc0wN7/E3oFYzy/v/Zqrl94hWxD78Hj7l4RBDNPUSWeFE1HOLbF/eI/6WYtIRi5LxU0WS1sct0QwnliD5ZU1jk6O6U9EwGVvt8mt1+9QqSikMsJutp04Fy9fZtyXveGlKXt7x5QXF8ktis/6rRPyGQ3f90lJTlBnHHHtylWspNAt/WddfE+j33VpD0W5naUnyefWefilOI8ZQyeyLBLrWWZj4awGfYvi2gaLFZ170uZxoxi7z56ztSrxBgyHUd3nnbfuMJTBuYcPThnNOmxuXGc6FGtXqeT4/ve/z9/9w0/EnGY/IR1fp5wvMB2KwNR3vv6f0+k3mF+ZZ3VZnLWtzQt89NF/YHFNOFtRFDAc9lGVItWy+Gx/9zGGuslX373Do8eiP3U6mufqlYucngk56HZdAZzSb7DzTKz5d779BuhtZpMRjx8KOUtm/uOFf38QzpWmaRRWRKSgO3AxDIPRsElcEZesFypMwwkRyrkii8ez+I5HsyY2JJ2NoetTVDWBnhGC6gQTipUCaqSix2RzpeOg+ZDNS8dtFhCFFpE/wJCGmqrkmc16pDMm445wUnqdBrlCnEgiNs1mA6LIIfAV4qrMUuUsupMmKStJtyO+t7ZWImUJ4wlgOJpgmD6TyYTRSDiKm+uX6XZaLC7MMZU9CAEee0fPqUpm8FTKoNX0ODl7QUJmxRaW1+g1LLrDXQ73RBR8NPBZWVmj0RIHIfJmoCocHh6KjAOQHAY48SYThLJPWApX194kbkzpNsR6biylOemfoIQeLsIYiP2/7L1Xr2xbep73zFhzVs5VK4cd1s7h5GYHNtnsZpOyKYKiaMOUCUGGb/wb/BcEGDDsGxuwJEOQYdmkuk2J7Gaz+/TpcPI5O4e198qxalUOc9bMvhhjr+aF6CvB6Is9bhZWoWqGMb7xjS++r+5QKqeYSETBJ092uXa5Qr+/yJWrIkPTGwVMnCPMlE5ZRgaOT48wDDB9SaSq5ohKLokWkfKFcW4peayCSkqPefJINESHoUIcq+QlHbunKVSKNQ6HJ0zlPFVLc5SKNsPhEeW0MNSslRH9sU9WRojn39B58XyT9752mZ2Xoueiq8zIDLMkah9Dro3qJ2h6laIhNuLp0RGFis3B/ikpQxhgSZhQr1XIVer0O+Kz/jAmMg3620LGVucWuHHlBkcnhxQsMVcZu8qL7WesX/kKO7tCcebsKbXyMjmJ7NbtfEE64xGoLrOumPNyuYYWa8REeJJQUklslMTh8WNxoK1eWEXVNcYTT4CpANvbLS7VF1CDFIH01Ox0mliJQSItFkt1Tg8PmK/NEUigliBxSAKdIIioLAqlOJy8JFvIEkdCpvXQQolVWicjMnkxx81GhV53RDAzKVVkr4TTZdwNyOWFIk8ZRbZ3NomJKFVkD+I0wjANFIlc2Vxu8GTzJSoiqwSiCb2Uy5IxE5RYyFRKc+i0R+fOQDbXIPYiIneK05ekzHJex+Mx+awwAt3AZe/khOlM6JFKrYpCjKEaTOX98vklnIlGrETo0uhMYlC1mCD4FfxqHMcoSUIke66K+TyeM+P09JhKVcLIz1yOdjS6Eh0tW4BOK2QWTklLWoDO2UMMzSdllFGkSlb1Ge7UxJAZ4VK+iB/MePPdN5g64p0fP/+Ur371qyRJgq6rUjYE2IYpewaiKCYMQzQ1hYZYl0SZgergTDLoiOdUlCP63T6m7OecTfYoF8okro+RFTJcsHJ4zoSB38UJhXzeuHyHWj3Li+ciSr3bOyOTytNYSdNcEMGV8jADs4RSM6Y3ktUIqRmL61mGPeGspsxjbKvJFy+fs74hDCe9aLOxfI2HTz7kunTeTjbbdI9GZNaEUXb/SZeMXcabOizJHo+d7SPmOKV9esxf/+1PALh15zL99oiS3NtJlLD7YouUpTF1xDkzl1EZj56Rrwh5nSkOsV1irnEBNxT3+2TrQ965+A06kzFpKS+r8yUwfSZTSR6rTjgZtXn70jukDbH39vCwC2s0dZOBZNnUAo25Ro3MXRH9xUg4OTnhiwcPsWZiPaPKCqXdXeKLTeqrcj61PI9/8Dm2zFJVl3MsrF7k6aPnlIU/y7PuMXalzJ3FOvacCFZNett0T/dZMkRPmzlRWdhwmZ7scOaL8zdtpGgGU373ny2zsSyMltbuv6GIz8qqgO3udD5jZ3PA298pcf9zcYb8o2/dYji7irHYoFYSWcULF5YIxw76TOy9Yn4RO+mw86TD5QviWh+//wWlcprmYoajQ/E+6vIqyjakLDFPpXmNd27dRY2+jy+Zm13fw9B1wihC02SWKFGlU/MrAu0oilF09RxMU9M03GmP69eXGfbFGXL39lWePH3K19/5UwB++uG/ZDrx+NqV32L3kXCgUxaEYYKqmMQych0nHpqqngdAgtBHURPiOEKTqVxBPqzQPjtlFgqj7PrGN5hvNBhIgKJOq8N8sYkfvQrcQNpo8LT/EaYtMoNGXCRE53DrGReXRDTdmURYqTSKZjOWFTWzsceXn9zn7a+tCpluzNEbtfnks2NyJbFWlUKTK1dW+fTz9wlcYRO88dZdFDXkxTMh57mChuc5+OGIoyMRoX/v3d/k5dZTPvn0Z1xaEc+VTTdZWVlBkcBinUEbO63w9PEjVha/CsDXvrrO9u5n+JF77ghn8lWOO5tosj8+X8kz6M349nffZlNmqW5e/yrdszbHraNzgI61FZ2337nDaCyeM4oCjJTP8vIaCUI3W1aabtfh5ctdkpS4XzAbUy6WOO6IapNOu0WpoDGLXNY3RMD1o4+ecO3mIjt7Dzg62AWgUC5Ry1+hawmd+2jvKaXCEnEy5dmuIDafb75NtbGI6zo06iIbem1DZThwcF2JamgG+H6amTfB0IUertdymKRQDRVNAixtv3hCqhhQbL6qyKrwYveQS/OLnPaE/Ny8cwfXH5LO53j5Unw2GAnKEd2QPW12llqlgDs94eFjQU2Qty7S6x5hF2NGrTP57JfRlALlvDgH1tc9Qjp89sUnlEsio05Ko1hyGcqqirSl0RkElOaLhBLh0zSzmNmY7eM+obStr68t4R/4rCwJfb53YrGYafH0/j1W1oWu/qM/+kN+/KO/xhvNUGUw9Oykz8SF73ztD4VMu20efNxm/eoSA084bg8eHpDNLqImCjMJZKJE+8SJh+uJudy4VmN3d0Do2Yy64rP//Hf/W3aPP2Bn+wXOVKyNoY6YDCPGI2EzZ+wmYZRwcLRJpf4Kq+Eeo4FHvV7H0GXS4Eg4ln/feN1z9Xq8Hq/H6/F6vB6vx+vxerwer8fr8Z9g/FpkrpIoYjgUkYjuNIUZRcSBQ0bW/+OGKGpMJpsmkaSlvgOplEHaFF6xYYJKTBjN8D0RrbAti1gZoKdSTAav+iIMDCMklGlYJ/BIgiFpw2YyFNORshzm55fwvARFFdGQtUs5TL3A8bGIfGiGR7mSJ/DUczQtRQ2xI4t+18OUqEaFnMVk5DB4BTVtZsinTGp5i4wlokaT8Zg4jFBRkOXipJQCy7Umnb6436CV4LsRzaUUJwcikrztdwj8KTNPx5aQ7YHSY2frCYnMclTKFaIkYmG+iibDd10KZDJnDPYlqbCVot065LSrUZckbRetBaxkDNqIiUQwbFaW6E8U8ETkbuVqDRMTkhQf/ESgquiWjq0VyDfTnOyLyOOli02uXl/hZE+swScffUS1miWlNEhkT5k/cdFzEZlSjjfeETwsn3z2kCiYsLEhota9yZjpJODO7ZskquzLsk3GE59KeYmu7JUolEPmsgV2d0SZztPtx2xceZNyvUa1IEr53n/yCQuFHMX8RSaOiOKUC2lOpg6azHhU6hlmQUignJ1D4t+4+gY7218wGU0x02KxsqbGeOKTjkTkdXA8YVTvoJMllxe/G7ljMrkcw8kZjbqIlL18/oCcNaReEZHe9tEJZ6camGlcT8xLSgsJginpXJbZTMx7dzRkfqlJqyOec3vnJeVKDW8WosktPR5PaWstbDtNWpbpqarOyUmX5XUR+ey0+4zGAb2zfeYWJbHx1IHEJJurMJTw4UbaQkkp6BKyOvbBNlU03SOKJBrTQZ9EVUCbMRkJ2c9ly6QtiOUecv0tzOyMdK50TrxrpROmk4BCXczT862XlBoFup0jmk0RIYoSh8Adk7HK+LK3aNRNsE2TIBD36rZPMNNplMBgJOvVDQlv5M4iEkXMnW6k8bBwxyIi3euMKRfyZNJlbJklmoxNVFUn8D10Q8SfwkABJcIPZJRMUQjDAA2NREbcZu4Qd2piG0WOd8X6qWZEpVKi1xWRz3L+MoXiAHU6IfJFtNnWq2iahqGnzmGdNRXG/pBA1vCPZg5JFBOpAUEgrp3O2UynLvlshkFPZk00BVM3zq8TxzGmqeN7Ea9iaaoWYOgmruvy9JHQu6oCU+eQalXMWdq2CMYObnpILOkSjrfP0AwVxY7xJay7zTH7hwqdU7GHMs0Eq+yxP+xwKiHAM0mGYUdnOO7RGYuo9NzcEqGToWCLiOmjL56xvLLO1668yXFbRGOdcYet3oR6NUOxKLNugxqNCzCUUf61y3Xmy9d58uwhA9mPG44Thi2PbD7PhTVx/QePX3LxwlWSREQrDw6PWVqoMxl08STB5OPJLiEuuuxDC0ca1+Y3OHYHVCuiPOxW3sSLZ7QnDhslIXs7ez2KVYtGQ5QcjVqnNLK3OBhN+M0bAq3wu7VlDl5uErkeI7mPb129THrYYlPSCRyOO1y6foHstEFVkt43Ls/RtHIc7bdRZclPsKjyzu/8AeFUZBQOXmxz+72vk+gFxi9Fj1kUqlQvXMTyT2EqItf1QpXxcw/NEfux73a4tH6Vnz17n4xE01xdTXjj2pDuyc/Zel/ol+W3rlBaXcKLhAynmzZ3NnIUGirXTVFaF2ZOiAcdWkczirZYq6xSwDeH+Ip4zpRqMe75WDUVLyPh7guL+PEZ7d0B1y+IaPaLx5sEsxSFJTGfcW9C/+QY29CRQITosUJChKJo5xxyhqaec7oB5391RUWqdHRFZTY2uHwjJp0XpUmun+A5Lpoh5uXK1TW+/OQpO7svSOeEPl1fq7J75JE2bZAZYF01UBKNRGaySBQMQ8fzpufExmEYAiqtVoubb4n9d/HCMsNOFzUU2YmMZTBzA3zE+QWw1LxFtVbm333/LwC4vH6LUmmFK79xgeebIkv8w/f/A5eXbzI3b+C54rfuWOGbv3Wb4VSUM129tsGff+8Bq0sbfOvb4pxxvWN++pMfc/fGbzOSSMDDcZtycYnmnHi3wWSb3YNjMpn8eebDTKU4Om5TrKQ5ORH65pu/NU93dEAs+4+uX/0GL1++BHWXUk3M/2SacOvKn9AafMnWrqiwyZeyjEdjiEXZYxhF7Bx+hJW/zmgs5OzF1uesLd2gWKxSLAr9cnj6lCj2SSTpfbO+wHjc46cfvODCBbFHPc+DWOPCpSaf35MlqRfXuPflDmuXxZkyHfV4/KTN5ZtlzmQJoJGKsK0cWBHzicja9AcdXL9FGMp+TruJqWVp91pcuSzslFt3bvPxL7+gOV9AlZm4YDZm9+Aeg76Y3+vXbqOmEqIgTbkkdNJx64SYiHjWZOmS+MwPHtJcbbArSaFzpk7aVPn4ix/ix+JaXhRxYe0GI2cPVdqZdqrKbDpiqgqd2x7M0FN5ymqG5aqYl/rCBiP3lMl0G1PWWRaKJs8f73D5osjWF4tFnGmOpcUJM0/M8XjSx91yWJY9poNBxNxqA10PCWU1km7YzAITJewwG4vSz+Pn22ysL/OjXwp9bmVDtABK1QqPnoi+8zAwyGZNDo5GLC6LOZ+6x+yc7rD5UFz7j//Rt6jlOlTqKwx2xDpcvRlzdPSYs+4Rb0iI+OFgTD5boNMWe2EyNCmXGnTbY1LSl3jw4CMsO0W3M+HqhpiXo6NjpuMsF1dFprU3esFw6nPWHnDxLaG/t3Y/pFFbZTye4vkie1YqyT7Dv2f8WjhXiq7gSvJKzYnQw4BwBu2h2FDpjImlZwljgygSCjBj6xhGhlgelv12iK7rjCcD5mpCCJRkCHHCdByeK+BYiVFVg5G8n+v7WFpC4M+oSC6qTq/LqT8hl0+hyvpq1wsx9Br5gphQ0/Kw0gqaFlDMCeO122tjpSPSioWiiufq9ScU0w0SWT41v2KjGAHxIMXpqaz/N1VQLeJIw3fFAZo1LSZjn3xaOANngy5Xr2bpjwzW1yWARmbIsNNEm/XY3hICbWghjWrhvN+p05/hRlOOuy2mfWEImyULo+OzKEEaQjOHR598Hrpt4TD88OwXFK0UuWpErIqNd3ji4CYhhaxwaLsdh2xK42R6CJrYiGEISViFyETTxRxMhy5PvmzhIwyG+fUq82sG3YMB1bQw5rS4wWHviGMGw3sAACAASURBVMAbsidr7a9srEHkMHPFtXvtAWlbZ3frEXNLoizh2e4hmumwvnaH7c7PAUgNmmixjm5KA2xlAS8YcrajYMh+jhtzV9HCFmpa42xfGLBGymcyOOJ0IiHOtQz57AQzmZyXzZ25j7DNIWrNoCkbww+eh4SzgHpVXHthaY7euIs3jRnp4l2C44QLi7eYjicMZTp6aWGRYjXk5EQoGt0O8MIAgwqTofjdkBMsVSeTq+J7olciX0px1uviuaac3xSzyZTbN69z/0vRv1EuV/HNKRnbxovFwbe7f8r83ApqJA69cadD3k7jotNui8PYDR0KeYsk0ZjMJImoL7jDVE3IZqw4TJwUvqtSKYt3tgs2Z90+uhFwuC0CAvmcjmXrBJEsJ8jmMTNFOmcTBi2x7tdvLfH4wQndI3EolC9mGA36zJeLeJJbLI4TTCVPHAVkJEy/M/GIA4gDSUtQrNHtd2guzZNoQs7HE3Ff2yzgjGWfi60w9aaoiSxHVRUK2Qz7ezvkZM+HxhyaqpOoNqHsEQKVJEnOoeBVVScKQzTNOAe5ydXSKImJjs2yNI73944w8SlmhKrdebaJmlVIZyw8eXjlyxbubEiiBPj+KzJgk74zOT8UYn+GMx0y8ybkpPGaBC4PJ/ew7TlcCXEeJYkoA9SM8+dWDQPf985LlRTFhEQlnQ7ZfC5ABDRFIZfV0TXJO9c2OAh2KOSqVL5yA4DJuEV3NMBMV5hGwjm1qGEbNrfeekvMeW+Xzt4ZmpUHRTz7MJXgKFtcunmV+JGQDTMMSMYRGdm3sDI3R9qOqTYsDmVpcnO+gTcL2Ts8ouOIPZm1ywymIYWC0G9aSuWLR79kb3uP63dvi2dXO7TaXSJT44v7uwBkdJ1Jb5ecLfRpISyx8/wh+70jvvtb/xSAw8GUo9OAiwtiDnqjIamFZVZr6/Ta4kBdXLjCTz76CSlFoRuJEsN63SEKxsSeuHZufpW5udv84G/+Z/58Kt7lD/7Bn5E3mriTAzZGQs68KKawUKVeF3KqjRTqpsH1P/6HnPSFAz3afkx3IU1p+RLRq97JrT1WNy5x1BNz8s7bX6UzjCnkfVa//bsAtA5buL1TjgceyADBlVqaK29d4URCfs98j+kXKpnyIk2xHQiMgNbwCoP+GSt3REn684MR4795xuKCqDn87KjPy/1TSu6QvbHQSXtuiffefJs5e8baRXEeVudn6H6BWBX8XOmSxVncJvA0NrdEeXbkeNy983UOtj/j2T0JnJAJmCvqzF0WzsDR9jFHnQN0DfSppKiwUngKpFSNlPaKxDeWpL6/cq40TRO9kbJ0N0kSsrmA29feRZU2Qed0ipEKebErSkg3N7e4fPkmU9cnCMQzLS3O83J7Cy0d/Z0ywCm6apAkvyLw1jSNOI6lUyVIhONohm3lKJeEvHz4yQ9IGKAnYp4MTefdO+/ymYSUBjju/QQtfcS735SBvkKPzccHoM4TSd5Hkzq3br/F/tGH2FnJi7aQ0GppWLZs/L//ESET0qrGk/tyz5o2B1tjTHYo1YRxPpye0OtMeftNAe4yeV5mfq7IzFGYn5floe1j4tk8RmpIqiBkcf9oi9axx9IFcf8PfvEB3/zmN8lUx3z5mQDV2LhyjY8/P2Q6nZIrS50wHDOddgklOffa6iopK2Z/7xRfgh11+4dk7AIk+jkPY7m4RBCfMegLA1rVLEhmLC6XmTliXhYW69w/2SRlWSzPC5lNpXzWr5TotoRz53sxF68t0z3bJp0TOqhaC/nZB5/wD/7gK7iStPhK8w0ePXrE9euilPaKYfBvv/e/cGXjK8zPi57nP/+33+PiRpOj3Q6LS0IvjQY+tVqNek0ELTN2g0JZoX06ZtgTaxoEeUw9wqhMeLgjyGrL1jw/+uGX/Pa3hT598ukvKDSXyOSzlDTBg5bKurzY/TlxbDEdiDOkMWdRaywyk6WuB7stbt1eQSFA7b1ifJ9SzpUZtzqsXRWOTG+4S2e4z88/nkqZKgnsgcSgWRf2DUmAqYKRFmvXd0+49VYGdegyPBO/C7WQrD4jTiW8c+s74p2thONOm0xVyM/uXpfB1ONrb3+HsQzwPHr6CQtrDYprXfYlQbAZ5alk0/Tlufrxs/tcur7Cv/jeX/DGLeFIVYwUaaOEGqi0D8X7NRtL6MR4khrp9KSLN4tIZyPGAwmzvrjGzlabuWr9nEdrZbHMyckJh4fibAqjInbK4OatufOEy3ztNtu7DykU6yyuC7u5K8Gm/r6hJEny//mF/z9GKqclb/6eEBzf0Rn3RgSuwyudVagWUQwTb+RiWuJ57bSKploMZW+K67pUq2V6XYeUJUEvrADPhcBLEymyHtcGzwloSiK3/mTApDfDtjzykqHZMEoct0/I5nzKefG97qRLEqo0miLSoiYwcboYenL+nbP2gEQfMXNUzJRQwMVChkmfcxSuUBnjBwlNO8toJiKttYUm/WGI5zv4Miqtxyl0Lcs0FN/Jp8poxpTJzEC3hUDn8yYHu0PMVIPVefHsnbZLr3PGoiT+7Qy6mJkU+8cnFHLCSdJMheloiC0RfxLVY+y6ZAydOJJ9IMGMoq2QKejnyDxaaDAJTtDku/SGPoY2wTCKeL4E2dDToESYmsVoIJ4ziTXyeQUkIEOpWsFPNMJgxt0rYt3VOMX+/ggr56BHQpGuXFrj6GiLpw9FZMkuFJg5feqlJqokafZokctmcIYZJqp0xo00OT1LpSbWZXQ2JuIIU69QbQql0jnYI9ZCVCtPT6JSRu6QWEvOo7jZwjyTmUvUS7hwWbzz2WDEtB+QrumcTcX9wiDNzcu3ePJARPgajUt0zyZ4kyHVBSHEnbMZjcY6g/GQfEYY3rmcyvHuPsFMgjIoKpELtdo6Lw9FP1WuoJFSsxhaiZHk44lil7Rdwg/EHDhTn4ydopDLnpNle05ApCmE7piFRbHuo6mDodtE8uDvdruUy2X6vSEpGaENIwVND8mlaxiaUIpeoOCGDnmpXD0/xkrlQA9wHSGv3kjBG6eolStEupCFYrFEJgNxKBXpsyOskoGiD2kWRfaskNI5PG6TktnfhfkKzjhgb3uXVEbMXalSwfMsRuMzFIRi0/QJtpklkeADva6GplvoFYdOS9w/ly2y/Ys2c3cyIiIJ5HIZND2hIwmKV5cXIAw4bbWo1sUzKawRRhBHAY7/Kw4bYo+j/VcN2Tqh72NqOgOJPHrj5hyN+RytoxZmRhjMzeo1vKCHmZIonKkUI/cEBRNvJt65N+wQoZBJFxnLLIphzoj1tMgkAoVcEVs3SeKQJJCGYhiStlMMexnaLeGcqrouCY4R3yEgn7U5PjhFkaAMSaKIBv/IxJIAQc2FGCUyyEmgn+kgxNBMfC9CM4SOnWsUWVwrMxoZLKwLo2V1cQlLBT2RDfyezmgUksop6LbklIumnO59yDe+/l/gJ6+Msl3cWUKtKhurXZ9sRufhsy0KEiEtChQKuSLVapGTlshCB7MJKT9DW0bqJ1rA9UsX2H3yjD3poM8vzXG4+5TpQGFOotLdffMd9o+/xLTEgWqm8oyGO8yXqtiakKkgibBKDbr7ouczqJaxKxeojEYEMljWISI6dojUhK+8IUCEXu7uE8cR8xfFvZyByX5nj6WFN9l9KXTC5eoaxZVFBv0jVvLCELWaDUb+Gc6ecHB3T16ieTY3btygI8mkLdWkPzglZecpZMVa5YpzBGiMZLWHZTXxoj5qMiVtioi0XrC5/9GnrG+ss3MsvmcoZTaurKOEQqa6uyMuXZ3H9fd4ui3PHatMxc4wIeF4RwaGQoe7164xGYk1nsYtHH2Rvacfk5aEy/nSV1ls5ojDLmZJPEPgjJmcHaHlhJNm1HJ87//+l2xs3MWUADr0PUxzVWTGi0LOrly5Rtjv020LI/vqG6vs7j/jf/jn7/Ozh8Ipy2eyhIZKEkTnaH3CqYrQdSF3up4hSUDVfmXjhEFCJhXxv/+Lf07nTBj/o9EZxydnFErivBiM9yHxmXgJb1wR8vlv/vUH/Kt/9UvqjTJRKNYhUUKIE0KJyqRpCqmUzWjUO89cmaZF98zhz/6b3+HOb4gzWY0TdAUUieiZJGOKuSJLcwt8/Rv/PQD/0/92lc5sxsuXwiEKI5+VtessVSp0NsX9Ni5/FaOmYeo2PUmAbFkWTx/fY+OKMEIVY0S/N2PY95E0fhRTt9jde8mT57/grbfE9wrVFN7MpFYT9k0xv8Cnn/2ScrmKaozku6QopK9w3H5I71QGrnI6k4lDXSLs9roDWqcD3njrMkeHIgO1uDhPOqfy5OHxuZ7/jXf+gIBnTPpyr4cKmjnluP2c6USse7Ewh6FHDEdnOOd2bJpyTT/vvdUNFc2Y4roBcw0xv7s7J4TRjMC3eeuOqHpx4g4PHj+gnhdB2fHEQTMTwmnAb3ztPwPg5OQLWu0JesoECWRULSxhmQU0U+jOs6MOa5cXefzsS+JI2nSc4czO2Ns+5NJF0XvuezG1WoVPPhfAH4XMOm++vUGne0K3LfvHVi+jGwbH3U8J5bUuNpY4POlQkbKYDcccDyOqS2V8eV64/gmeP8J1YvxIOASe75AxrvPuewJ05mTvFM0wePTiI5RIzIupJty4/jaVRpFnWyIQrWomfuDy8qVwbK6ufRfPHUBUpNoQa5MyVf7q+/dYuSp0/satSxxuP2ayM+Op5ECrLBt85Teuc9o/odpckM9ukDJmBJrQ+YHfRKnUUGIPZyL09/Ote5x1fS68OaFqfA2ATBgw2UuoXRV76MFWi5nSYG6uweV1oYNePnxJJrVAdbF8zo8VhmdsPntOTdroe0cnHB61WJpfo1oU8vLs+QH1WpPDw0OuXRXBuFnQYXd3m5kkVo7jNHdv32Jr+yndjrDxLl64ShjNsG2TdlfoRcus8t/9s//x8yRJ3uI/Mn4tMleWmUICqOE4E8bjKYQBi/MSZSRWCXyHUiF/HtWIA5cgOSVjrwKQy0IQjsgWonMjIg4LJIqDYYfYSMfJnGEZcHIoJq3eLFBfKjDXbPLpp+IgzNW61OfKKIqLlROGrxmD504ZjuWDejaQIlJnvGiJxra5uTkKuQWen+2Ts4SyqVYs1NghJUkMNzfHhLHKxq0FEol4tbPTwUxlcGZDVENClQYJuUyWdFoIStrKkrZLRMqYckP8btRVWF1cZOPaBvc/fl+8n1rh4uUNxjORVi4UNTrdDtVSBtUQghlOVNRUmoNjEcWpp6vUC1V8X8dBQgCrGrZt47kRk564VjBLkclaKBVh3CmMILIYDmfki0I5pDQF1/XQ7DSKLBlLgpgk0GlaIqrTqBU57Z+SrWV4eSgMC2c65NLSW3QnI8anQpn7ig5JjKIJpRIEAbl8iW5vQFY2TWq5Aqpik8tpzMbi+rYesr54nYeb4l0KOZPF6iVmiYLTF4ZGfa5Ie6gQTz0mA1HioIQqcdpm0hFGWnfSoTR/gdJ8j0FbKJr7D0IqKz6nz0eE8v0W5wu8/PwJqUAYbsOTFyhawjR0SXWFQsrmIk6PDkin07hjiUDpaYynI1ISoMQZprl6aR3HcVBCca1itoI7BNcJGAzF5q/VavhewumJ2OSXrl4mSRK2trbJ5yTkaGeE76poccI0K5WbMsP1XWJfInOmc7T6XdaWGr8qDxkMsQyL47NjFuaFYREGU3K5FKpkPreNiOHolJmToL1am5mLYQa02icYsjl30GmzuJRGlxmvK1cbhCkNxYC6zPYqQ4Ni3sGVDsNxq0spW6LZbGCnhbyqcRrskHSUZiphpSv5i/SHL8lLWoJcyWc2i4mmU3JyPpmJPTdfq8kDE8I44uD4mBVJQhl6MyajGWkrQyKRMlXDQE0CoshHVcU1DFVn4vbOsz+vyox+hR4I9VqB+bkMK8tVdnYEbLXjTmgs1OhKWHslsslpC3hRn7yM4vqJAapNHEVEkbiwERoQqCSO0GUTx6U17lMupslmXhkkBkftGYapo8hMlaoqhGGILtFC/dkMO2VSKheYjP9OpC0xiVWfmXTsL168haGkODoQez1OXPp9MG2DUMqLHxUJI5293TYvd4ST2b2xzVzxEt96QyCKfe8v/z1X7q7j+xEHp+LgLZlpKuoNPv/JI+yyjJQrIyZ+xHFHrKetTUkFJRYWKxyeiCxRJl1le+uAw9M+PVkeGYU+dzc26B6LMmS1qHB6NmHixiwvC8NpFuvUF1aobNR5FTz8+aMfY/sJkjsTq9GGsM+4G9BThBF4ce0t9k8esbHxe2LtvCkpz0HJ2rimmIP+zhYXKiV2uyP+9qc/A6BYbTDyunQfij10Z/0umbRKPd0mXhaO1NPtZyyqXcLBiOvvCWej7/b48pPPuLsizop88yrPnz/He7JD47KEBdY1rl37Dc4ePqPVEXrxtK9Rracwa0L2m0UDZyti5MPZQMzT7GDA7Xd+H3t8yJzw+TicjBgc7TC/sArAxh0DpR/THwTUisIhMktNwuiE7DDmveuypPJ4ytE4ZuwIg2i9WWepUmSqVljPCgfTyuvsbz2iNr9ETpZwlooWD9wu9pIEMppAI5kjXYzPy1hHozG3bixwYa3B0Z4k1T4b0e10mMpg3OMdWC2VCf2ISOoWRdchDEgAw5B0CXFCFIUkya+crSSJJeCL+M5oOOKtm29Rb6psb4k9GcUO/X6fpTUxL9sHR5h6iVQuxe6uOAtu3VzDTn8CiU4YiXWOFLB0FUN7BR4TCOROTUGVwBthKNALPc9nNhWOkusMcEYhcxKCP4o9nj17xuncNRBI2vQOL5BpTPmHfySM5UfPH9I+7jPO+LzcEQGlZu0iumkwm81YXxVByu997y+oz9kMxiIYMRx4ZLM2lhWzuCC+c9p6xPxqlWu3/in9kTh/67Uavd6QSlms+VnngMZckWotzV/+5d8CMB75fOfbBqrukpc0AJcv3uHhs09ZXRVzF8SfgpLH0EpUJay7peaZ9IcsLedJy5JbRR3z859+zrtvfRuAFy8/48KFixQLDWauOKODwMMwLCyrTqkidLMTnNI7G/DGbQEc4QcT7j18RMZuoBhCppyZS6GYplIT1C0AL18ckHhFSgWRDc1kzuiPzshV5/n0C+FotI5ecOnyHVTdZmlJ7NH97SP2epssXRBZKsXqMx5nuH37Jh9++DEA+cwiuUoO3zHOEXZNU+fkuMMFCdxy88ZdNjc3sdIJEeI5O50W6azNsKOzvCJkYdbzcLoqC3WxZ4adNqnsRayUxnAsgLC6HQffDdAMl8lUnJHFwiK1apajA6HLYt/BMvO8d/crtFvifqoesX/6iIc7U95+U5DF//IXj7n95hUODmV7RjjDTqeIvJgvPhcVNbcv/j5/9Kd3+PRjce1RK6TZnONp7wXv/IHwK6ZOj1asEaYjZpFY49q8yqef3aPbFvv4+soq68spTk+3CFyxZ0rpPCl0Ri8iDjvizPzmu29QKJ9ieiLo9ealCv/Hv/8LVptf42c/ELZ2vXiDTKXB/kGPxw+FA2vqAWkzD7p4363tXZpzFxhPJ1SrYl3K1TJT12dptURXttwMRw4XLm9w0hZy9+jpPZ6/mOBP6ixJ4JbB5ICEiJc7XYp5YWfqyiu6k//4eA1o8Xq8Hq/H6/F6vB6vx+vxerwer8fr8Z9g/FpkrrwgoHUooqrNssnChRWGU4gU8VkQOpiqijuFSKbgvYlLqVRDMUU0X1VSzFyffjekUhee83jSppAtk8vlGPRFJGcySrCMPAtNEXnRNZvxaERqIcXqmkhn9sIt/DDA90MKeeHxztyIOIyx8xLEoGTTbY+pFAtoqvDCE2WKQYlC3iaTldDvY4WslcJOi0j9d77zNc7OxvS7A7ISXMGaTRmNRuiGwVT2+8xilTjlY8gSjsnAp1bJkMlqxBIaPVLGWJkJJAvYaQnskQ0oVl2itox4d3XW5lfw4ykDWefankxJ2SEZmV0LjIRuv4Oi2WCK+2d0BS1WCYKIdCKimmrOQlVCRpJnx4w1tCSgns8zlJFI1dIxlAy5gka2It4v8D1KpRKRJ57p+WafVBpSRkC/L6Jwi41FWkeH9J1TvLFYv8Fsh2zeQJcEfuPpmLSts7Q8h5kWz3TqTAhCnYyaQldEZDeYeAw6bcYzkdkplHM833OoljLMPEkqeOfrRPsvOdp9xjsXJHCCHfOLn74glReZgVmYZtI6wlJjzKKIvtQuKaiBT7k+x8meaIhOwhQrG8scSgCNlG4wHE1ZXrhKLMslPTfBSLIknkKxKkEYBi2y9iKaLsv0+g4PHz7m8voV7lwTKfKz3haTUZeZN6ZQEDKUJAnTyYDF5VcNlSGdXg/FVOlNZKlQAYpzWczQIPBFdmLsTlFQyepi3cPQpVnTOT5oYaREZHC+WcUZh1TyGRJJ4tuo1Dk6PaHbF5nAYtFCTSwKtgWaiJRfvLPM0WEH3TAo58TeOm1NyGbTXFoX++pw/BmTIIdipPAlYefIMyjNzTOTDbxaYtA964E2QUmJeRpPpuhqhpiESl2sTbdzTL5QYCAJZzN5k0j1yJmXOBiKzMdcM8M24LnxOfl4oiaUchW6Z0Jel+cqlIo58mWdTkfs4xiVRLFQ1QRN9m+RxHje7BzgIpJzkyTReVR85k6o1+d48OgLdLmPKpUacZxhJNflwobNeP9UZOvDV1k/D8U0OG2NyZsygjkZMu0PeAUrHSQBcegz6kGvJRubA58kiWjMZ8+pHjRN5e+WegeBRzqdxjAsHAmAgDrFc3RMK2E4kETLQ51BfxfLENHCKBxQbeZpzJeQSTBymTztznNaxzMqC+Iek4FFqmrw/pc/BGBhqYo36jHxQi5K4uRW12My3WW5dhEZQOTwMEfDzmPnxBwMO3toasCj918ytyRKYqYDj9DR+OLzL9jrCNlrzl3l6f0fMDcn7m85ZU727qFrHuZI6MXbt+9ycHCGlTVIZkIPf/PN9/jD3/0zfv6zvwLg+3/zv1JvLJOqVNnfFRUL482Y+coCraEA3SiFFb7c/JKlr27g9iQ5aH2JtFKl5gzY3hd7+827G9gZh0fboof2x4++T7p4GfdFD0d71c+pYqcquM0U3/+l4G9pFhoYpsn9F7JH8fJFalmDei6Fuy/6oj4/9VleO2CpvEBBlhOmlJiXLzZpFoV+Pc2OKdRtikmeoir29nBwxrS/x6PNx3zltqCoqGXnyOXTfPij/yBkc+MW0bSHHYMiAZ6y1YhPHh9glUv0I3H2LGUbbHk+Rixk+FQ1uOoZ3KndYXNPzJ3h1bn/4gnjL3/Ob/7WfyU+m55RzF8gj9j/W4e/oNJoEh76LNdFM3m0NGO4t8/9vSNWrwh+o6W1t0gVHvLgs4+ETPdT/GTrCaplY0Zyj0Ywi0PSqgmBBIExDQLZgwkIvqkoQVWU8z0aBAGLi3kO9vqUsqKX0Fe/5Prti0xkf1y5eIlnu3/O7bXvMGeIDIa5OsG2VIhMdO1VqbBJQnRODxMnEYqSCL4t2c9lGjpRFDLXaLIoqwqsqoEfzvjoc5H5yBUXOD0bUSz2zp997kaRez854q//H9FDu3ZxnvdufpNf3vsB73z3vwTg+OiEaNtBN0NC2Ytea5aw0yrOWMxTLq8QeArr6+s8un8mr7VEEDp0R1s05sSZcu+LJ5SKTZ5uir6vZrNGMG5xfNLlT/7xfw3AD/76Rzx69IS19XlKMtOZyQfcvnGTjCSKbp/+mMuXrqGbs/M5SFkOUaRycnZEbklk4sKkg+9FPHoqskaHh8dcuHCB09Yhhqw8iKKA4chhbfUyWzsiY2Fkp+SydcJY6POHD58QuEVc9YztLWnOJiqakkNNFFqyUuXKtRUeP3jBbkvMZ3MuR8bW8aPkPBO5sLSKYWj0BmPOuiIr7fRn2JbK/p44U6y0hufvM3K1c4Cpk5NPyedqvPv279IfCh3gui7z8/NUa6JMcGv7EcHMJpM2MWSZ9bUbKxyfdKkU56gV5dnuxQTOkO6ZsCVwDdbfrnC6v8l0KjLHnutw7crbPHn6ERdXxHwWyzqP7+3x7hsSOnx0xKA1xTRsyhJOvDsc4NNhdWGVzWevYMfLfPDBJ2w9k/LzLYdarYEz3eHqFZG16Qy+ZOdzqFdEVvPmBZuffrBDqVbirCVsoEvLeb58/Ih6cR67KvbHSTck0pYozAk9sjP4JVbnHeYW13n6UNw/l1ln44rK/fsPMRbE+r0461Kqa5yeiv1w+8Y7/M57v81cPcvDe0Kf/l//57/mT//JPyGTd/j8vgB4uXF1mYODExoSin3maXihQRJFfH5PlAAvzddJFIcXm2NU2fYwmfqga5iWyBaurF6hWaxjL2bJStLiqb/PTz/4MXaqxKUNoRMO9oXe/vvGr4VzpSsG6axE9inqoJmoTpdE9juk0x6drkG54GDqYqGsRCNRB4zH8lAomGipGCsXEPmyTykXMDoLcIcDmvNi4s4mAZHi4HpCKOKowCTyebD9KZWsKMEbd8cYqkWlliVQxMarlXOEbglLwvn1On0MI8XRaYv5qjAGKoU8B+0R5XqNxUXJKWVn2Xp5yMmeWIjxLCGIp3i6gufsApAooOoJqhbQaIjfOQ54kYsmmbRL1QKR6tLvRRjSAbMyFu4szaOHB4RTcf1COsvxUQ9flvJU5opcuFjn4YPHOK/Quwo5vFkeVRXGQRyEKEmEGrl4riyxyMeY45lQVGVNzsuIUIlISXJXXc2QtTIoWowtUfHUyGcWzHAcg7wlykM8XSNSFFRTliVOJ7gTn+Gwd94A6vRnhImPbqyQpMT6nfWOSRkLOFOhjEIvYm+7TfqmhSv2GOFwTFuZkFLymIowDB3Fw535JBKcw1pP0/VzzCYz5i8IOWh3nkgnaJ/HmkiJV8sab3wlz862kKmUqmFYOrsnQ0qy1KSYi5klBaxSlpuLwjDce95h5hiY4l8m/TGlQhlvNiCdFnId+3n0tMOg77E7FevgjE3yEyJpDAAAIABJREFUFUhJFD4tilBMg/aojym5GzYuXWcy+RjX75/3DvQ6A4bDiJpEJzs6PGM2cySAhzCys9kcnj/gZDAhnxGlifl0AcNU6XVFeUqhWKHbm5HS8xQkshOqy7XFm0zH0TlS5V77GWpSwtCEYxOHEa43wyiOUKSCCsxTrr5Z5MEnhzw+FDI0N1/GV8/48oWY33K5QTgeQ2IzSgnFqVopjo4TPOn05tMBmpZCiRSmHelETAy87IRCKU93KJ69Mp8mmDnU5sS7DQcOYaCg2C75slSIoVgz2wwwdCHDYRwzdXyqNYmgqKdB0wgiUVYEkEp0FCPATRIUacwpSULohphyDnwlJtYgjgWIBICilvnlJw8ZjvoUSsKQ2rr3Swy1RiRBRcJHCgoDzCRHviQOtIJVoDcMiEfgS3Z71x2SstJMR2Kv+a5HpVxn7EwZTcXcmUYGVVVwJz66DPAoiiBVtWxdXsfj3Xff5W9++Nc4juSrM8G0fBRMokh81u/3ubB+kcFA6EW72KBaLDEdTclmRIlarzVheektrv9xzP6+ONAuNdcYdKd4kgR+5e0VTg9OiFE4PBbv0u5E1KsW/dEZjWVhbORWahw/fYDbFms8UBTa+1uUmhsk0ni1ijrtwRlzzXWWLwrUJiWc8nzzE45OJbGx1kJVQbfN856SD9//EN3uYqayxIpE1Dx8zg9+9CWVhjAQCnOXMPJ5zKrNrczviHfO5igXLHYPX5XWuNz6yh0C10WRHma+XOLwxQ6envB7fywat59vbdN+2GVlVeif026djZVFnj3dIY6FQTS/dA0nmBDGY168FI5T9o082aLBU2kALp4UwdA5nZzS74jfrTWbdE5e8Piz51iSxCpthlxZWuPBE9mLas0oFJsUKzVUU5R5+ZMIs5Ilv3ydrZ7s28sPOOnB/IYo5XPafcJimiCl0W8Jo3B49IiR3yVsxyiR2CNWweXu9bf4d38pypLmvAOelnSKmRTrTWGAPd4+5Ftv3+C4M8GXpWYHLw6ork04u/c3AOi5MrXrl9i79wxDcid1gVpljdVaiXZHGLDPvnfGlZsFqtKY29naIrNYJeToHDwi0UMMF0IdEgkwkVIUdCONKgMihmIQkRAmCpo0oI1Q540bqxwdfMBgIHTlN77xXR7ce5+9x6Is6drdS5SXfgdlVABFyPVBJ0ZJYhKtTyz1teJPSYxfAWjoaoYoDNE1A1cGtBJdQ1USpkMVyxLO8Pf/7UcsbJzSGghdVq9UuXbtDeaaa4Bw/nvHHhdv3mRFyv3+4XOeb4U40wzrDRGAGZ0MUW2DqbtFIAOXi80VPvrk5yw0hP5ZKaRoHY65eavEpQ1xRrfa+8wvpZm5Op9+KOY8VzDQlRS25GXc3wlpXMizf/gZO9vCLvrHf/gnfPHwZ7R7J6i6kPXNTQ8lXUCXwAbv3f1tTrpP2bkfkpIgFHbGplFPMxgmjFtC32R0i3feusSDZ8LZiYn4+POPWV5exveFvD5/tsPXvv4Ox61d7KzQCaNumXq9xuam7CPMzBg6Q9YWbjIaCH2jKgoLCyWOh/coGaIM8PRgRrVaZTwTvWlqsoAfHxO4KouSB242HrO3uc216xf5UqKoxqpJMDMoW9KZzMb4fp/dlwkTGbBLFzw0JcGyPXQZrBr3O9y+8ZuE0n4cj6f0hx3ylSVqTaG77t1/TBIpvPOV9/BDsX77w5BuZoieiOtkizkG7TYls8nMEfZaNlfjYPc5d659gwVZLvlXP/qIS3ffxrQl35mTIdEj0pbFWPJxDttjUrk0RydtZhKsyhnH3Fy5xfUVybnWHbAzc4j8gFsbItixcSPm/R8+IJB9ricdj9X1Gu3RC7K20EmV+VXqgxNs0yJByNBkHBBFGl4o/i9V6+xsn7LamEcxxBqbxRy9yQBNNVlcqclnGtHIXWWWFuv5y08ecuPNBvc3N3n7vd8H4OqtNgU7x+HuCbWyLGnWE7LZGVZmFYBrl7Ok9CxP90+xZrJM3kswjTRWKqZUl0jHXwzQzCkP7ou+s3KhRpB3UEOd3Xui/P3W3WV+/9t/yPHpMSRi/VJZMfd/3/i1cK6iICIcikMwt5hl8+GASlUlJ52did9jZd0m8UxGsl/FMEy0JEVWkmXGEx8lnFHOpFFffRZlac6nsGyTSDob6UKHTDZHSzjOpLIKmUzMqD9GkUhdtlpFTxmgxDhyAzXK6xglFW8mBKw37NNuazTnqhy3hKB60QAUnYOjFjlZi356coQfRRRL4qAajo8JohgSFT8QGyqdTdDMmLm5Gm0JcaooM2ytQColkfL6bdLpNIoC47G4n27kmLgupqFiGcIAOjsYodk+g4kwkgzLY/dwSKvdJ2uLTEDWztA9myHtMdLZBsOZgx8PSLxXfVkplDBEU/K0ZAShWsoymHjkMkKY87kKZ/tTZrFDZVX8zumVyWcUXGdGhJg7L/ZhqjOLxO/K+RpeMsSwCpzKyEelNk+SRGiTEZ4lNt6FW8tEiU5WEVZT+3SPslXgrNthSRKUVqpLvHy5j5mxJIEkOI7PXmufxkWJ3OMHrFy00QOFYVcYx0E6wTYsli+s4ERiHdyJQ73RYEk6TdX6Es+375MuDRnKXp8oTJOyoNubsPNSkoEqMBoNCDxx7WJ1nbO2x0F7h4WGmHMfl/7Ao5yrUKsKhdQ2D/DVCWl9FQBVnxArCc5sfI5K9+TJE6qVeQ72W7xiwgzDmEo9x9GJhOkfDCiV04RReO4gjEYj8rZJya4yGsherWoeIs5pAkIPMmmYjobsPRdzYGkWau2Uo06LVFFSGmh1CumEQlY2qhOTMhy0tEJVZol7gxGf/eIMf5ClMSc+E43J/y97b/JkWXbf933ufN88DzlnVmZlDV1d1YVu9ACgSUAgAUqWZcqSZYVsRzhCtv4ee+EIh+2FvVCEQiGKkmmSIEESI4Geh5qzsirnzJdvnu+78/XinEp4Ye60wKLOrjLq3XffOb/zm3/fr0kSiPMbepAtpJhMx2iBuI+ZjEm1GFDaFsb66OCERLWZOw67u2JG4PT0gk5/SLFiosjhWC9M0HSLkUQrcwOXUjWDEiYUJel0LPvt6/UmCwkMESUxa+vNq2BHVUwW/gRn4YEq5AVVwfVj/DBEU18FlAGqBqmUhNqVDoCmaVgSrKI3OyYIAjKFCn4onBvVXqDbEEylqjVN4nmaUr1OviTOZTIOMOwC3VaKal3ovCTOc3jUZzQR+mBtdYnpdEYmlyEnAR9UdOrVEq3eBFeSVZpqGsMwkFeBXKHCz3/2MednPe5/Q2Tqx+MJx0enGEaMaYpnKej8s3/+z/nxX//fAMxmMQQJ6UwaS854WWHIzO8SRjaKKhzF/uSQ8tIWOytiJuH05IgbK+tErsNsIfSGn3IIZy43377Pf/xjka1/4527mKUcoxPx+x4+OeT3fvg9hk6XWKJgvtx/RPsixczxWL8hs/quh22kCVQhL/msxmwwYO64JK8QYROL7tkAlQkZOf9zePwV+dwW44XY80KhgRpphE6aVlcEDW+WPuDJ4330rDirvlrgreXbPD35ioGcDWtUi2ipLvPxkE8/FaAz2aKBmTY4PhC2IfAMvvzVl3hqwNwRuno+TbB0i6WlFSxJH+IGCz7/259hpuVwftShVrSp11bRJLz30ekx2WKRQm1OvyOM1kI1+WrRJ5K0JIvBgOx8RqpzzO41Ebx2O2c09DKWbqNLAu2nj/bx3C65rNjflK3zf//RL8kaWZab4p0q28scdcb84YffwrsQ+vvF+TMWdYuVFZGxtY0YN5hweupeVUm7gcfhr48IFiOWKqKKQcoj0i3euCMgjn/01/+eZqnBW9/cukICvV+6w3h2TKVeoP1Y3MlqecHTB5dU1sQZp1IJ60sr1Cs9okgEgYapEvkQxWDor2auAvzAR9eFnowiD1XTEDzgsfwc2LmA47Nz4kR87sc/+TPeuX+LkQx2BoMLpm4W0+/hysrj6tYmtWKNVn+KJgGzYlUjSXRUTTw7CRJJ0RBeVcpeBV6TyZi1nU0ANm+/oFQrUFwV1bt0kmPozinUZHYAiIIUrtLi+o6A+97cvMaTZ5/ye29+yJcPBKrhYDjn29/5AZ2exlyCKx2fHnD//n06wz357zJLq3UmwwX7eyIYL1XSdC/n3L13HVUmH7xwznh8SmxIP6ykkzg28/Mmb9wTvsXB88eYyhorG2laEuxkZbmKqeepVERCazqaoUdL6KmnNFaE3HUPjynbt2kuVdl7KgEYjDHFYpFyRgTQ939nnT/68x9z48Y2VZmsJs5wedGn3qhwfCgSdKlMhLOYsLktvq/d9qiXb1HI5gh9oUtmkzanJy3uv/37nJ2IIEmJI6JkgR6L7xv2hqws36IbnRNK9NzBeEC+UGM0ccgWxd168eICyyiRxOL3KkaeXDbL9rVlIkRi4Vef/gkZM8WDrx+xXBPP37q+zln7BUMZ2CwmHhtL90hZAc/3xG8xjAJ3bi7z1//Px9SbQn8ub+TRFyHMxOeUwpjENxmFYyo3hM71nBLlVEgYzvn4oQhOi6UMydhBzQn70Wq1qDSWcbwpOUvco/XmLvPomOtrJQZz8XuefH2On0QMxqIz4LLXY319hWy+ysmFSDYMZikqtRKGIe7swg1ZTG10LYMze5XoU1EiDS3JkDFE8F9cUyiVW3z8iXj2zlaDlVsNLvqXuFPh1642c8wHU/LZNK0LofP+4MNvc3FwwUT6CIVyyM7uMnsHP+Fc5G1474O7zJ0Fi0AhL33rpWaR7mVA7Eg/STVoXLNx9oZ8+N3vCxl++YyzM4d3P7jB158LXZLP1ag10qhPZRfHbMTYNZh3uixJ4LvpzKVUSREFJr/42ZfirDblbPffsX4rgiuUGNnhQKfTIVNIUa0VODsVpdFiNcN00sVxLDSZmU9IsE2VJBbO1nzqks0YGKqO48rBxvmc9bXrKHoLdyE+t7ldpHUaUihLfH4rYbm+yiCzoNURAA+FgkUYJLhzH9MSDuV0OGRza52LiXBillaalEslYn1GGMlshTfGMmzSqQKnFxJQIlMmjhIWoXgnd2Ewm01YXm1SKMnfomiouken16UuB+2toY2qJaQlv1LvMsH1xxiajhKLvw16Y1RNJwpMBhKxrFxKEXk2vi+E/uS0z+m5QdrUyaSEExg4IWk7hx8KIRyN28R6iDOdEYqfRz4XMXM99ERDtWVlRYGUZWNqMuuvxKxtVBgqCZpEfzLKC3ZWt4iDmMu+EN4SJabzDqWM2HMncIjcOWZYJJ0RBz/ujcmnIVVMM+7I95o6JMYCS/I0LaKA1QYU0xmub4oLfP5yQjFjYds6Qwn4kE4pNGolrFdD6MGI84sem8vbpCSKY6B0ScIzdCtDVVbY0vYS0VQhXxNB0/HxZ1QLDcrrCoOOUIi5sIqVtnh+fMbmdQExPBxM6A6OKWbEcwrZGkQOOxv3iRKhRDzXRS23SIIOIzkY3p+MKebqjC5Fy0Mqv8RkMiFJYjTJUzZxFkynY5ZX1+h2xXulshkqjSJfSRTF5nIDRVmgJjqvnAjTNNDJoNoLLCnDgZcwnbnIn4uie/RHc8rpKnZOVolTOYajC1aXKyxCyd+Ch4eBL0EnNENF8V1KZQNLtvc9+SxC02rUNkTCAsCdxTgDlWpFDucujpj3A4jTGBLgRSdhMumSyMzre3c/YNxzuDgec3kgHIZCySeVqRImIfmCMB5zb4xhBBi2+FyuWML1JhhJFssSf/OkMI8nfWxD7IHvhvi+z2XrNy04uYpGFKcJoldtlzFRHBKFCba8M3NnTpIkV87SK8jnJAFDtgBm7QJmOcXl6CWKBNNINAVFM5jOXoFJjAhHIZenz1hqCKSB09NDCqUcS0srzGRVczrrE+shzXXpiIczCqUMqgYZORSuKzb1eoPhzGcun58QoWrxFSKkacGzva+ZTDzGY+Es39i9TRQqDIYd7FgOfL885ONPvmRlVTgM4/GU89MBiZqiLbPSQRTz4vAZpXwBUyKybTQ3qOSrHDwSlawf/MGHfP3VF1zbXOXONYFEdvzinOHEJ2sH/Mv/4bvic7e2+dN/2+dsLOTu9r1b/OWPfsGbb31A7AjdefHCo1BN88MffMDL58IZMDMTZuMBP/5IWNn1tRRKvKDZLCMTpPQHLvn8ChenL6EjAhdF3WRj632kKmPR7zLvj0hb5aukyP5Zm4U7w5XVu3Jxl8uDhzw7+ojdFQHYcXl+xjwIcLwZI5kYMuYK46mHoQv5sTJZ/NgiicGZCcdmPjvFc0Mu+13K0vZ0uw6FYh0vEgmC8ayN75QZjkdXnFm9eQ9Xn9EfzqilhZM7dccct0fcvCuG5RfnVfrzMX5vgmoKnZvN5vnlF18SRY/Z3vgAgGoti+4uePBcBIW7177N9//eP6bvw2wkbG2GKvW0y+eH+6wUhC1K51PEkcr73xeB26xzTLW8xsLPMJqIysd7lRid72CXrlGS6IufPf0xf/wf/oR7OyKov/f+tzk46TJqXWL5Mvu7axEaM/6n/+tPuHtbtJGulyu8s7rKFx+LipddLDIbtZg7HdRXU+KJTkKIqkIsK1eGocmgRt69RCFRIpIoYS5b4leXm+xurTHqHTObi/Or5pt0LwakTXFnJhMXz33B5vZ9xm2J0JgrUGtqnHd0VCQaoRHghVwF9SoaqqKjKBr+Kw67lE6chJQrRT7+tQBhse2YWmmDlmwhK2bq3Ly9xdPnH/NqvfPeNocnUw4OxP4WcnmqdZP+YM7qmjj38fxXPH+2z/vvf8jLQ9HOl80o9MZ7VIri/6iqQaW8Qrt7wNY1YTMNK6CQXePg5Qnr66Ky0x8/p93dp/dCGIe11QqKZ3Dv1j1mY9lun0yZui3SVplaTVZRcwX+7E//LTduiWSyv4Df/dY3iBhe6anrHyyzmOc4a9XJZIUd3by2gaomTCVMuGWlWFupkrKqHLwQ1aVMTkXBot1uo0paB0spUmzqtC+Fr2Yqa3zjzvv8+uO/YuFK4KZwRrmyQ7s1oNsWjv21nSUOX5S4/84mAA8f7nPR2sP3EnoDIfs3rt9EifKE/hQf4Ze8/fYKC89lsZAjALGJqkXEsX2FEvvOW9/l6fNfoxk1Zp4I0BM/A5F9hdBczDZZWs3w4uCQJBR6or6W5pMHj1hbewe7KH1I5yX61KW5IuzT737/Ll//fELkBrw8EXtXWV2i3W0xGsVMZNIwbeT54FsbHB8KnjtTr9CobVGsZvj8q58BMOx8xvWbN/FCBV3+vlpNZXN7mfZnQp9Wqk2y2SUaxW1qdSH7Xz96yFLlBl4sqjjdQchsvOAb37xHrS6C12eP91ipL+G6Dl988jkA737wFu5MZ31d6LvZ9JQDZ0ySLEhXxDmk1TJJvstnX/T5zocC7v7nnz7izfouRUUE4r7i8uMffUqpvMFQIt8NZx3OLloUmyaWDJhLWZXT4Jh6UY49+AteHF0yuGxxcS72zplHJAl0OmMSGf4Uy6LN/41bAi14OrR4ebyHkaxRLArd4o8u8BODi/YBekaczWwmvufvWq8BLV6v1+v1er1er9fr9Xq9Xq/X6/V6vf4TrN+KypWZMrEqIhvkuQnposfMcXjjLQm4EHhcHNeprFsMuiKSnE4cLL0Aisj0FotlkYnSTNKayPbWVlSceYvZ1GXnlhyybw2ZOT6ZtIimLVPj5HmLdDVNVUK/wwWqm8WZJ6RlxltV5pwcdcmWJYy2Ap6v8PzxkOs3xbtP+gVCdNLpNO5CvOdkHqOq0O2JrHG5kiNbsDk/HZMqi8yHlbLY3tlCURKyOZERUmK4vDwkjF+VYjVUNaBYWKLryZmyvE6vO2TkdKnVRJZoFo4YD0GTIBBp20bBxjZ8Li9FdqRhJxRKZV5IIkfNVkhnFRr5VY6mItOzfnsdrx/iOg6hKrN3eRM7gXFLVDTqy01Cd4zXDVBk9lAtLmi3u/R6PdIy6s9mykzjIrrk1PFdhzlT2v0Jm5JkL2VZqFaaiROgaUIsnekM3dTJGaJacW1Np1YJqBUbnF9IqNmFxxtvbjJf9Kk0Rbb39KyDhsZCVgHeur/Jp589wfEHFCWZ8+XonHdu3uDBkykbVfF9nfYhWdIE8lrUqzZh3KXnzklL/p/teomDwzYbtRpjyfG0dbOAN1vn2Rciy2gZQ2bdS5TxIYkcetX0LLV6TMoOqauBPNM0h90+thwWMWYzsrkc5+0z1pbFvoShh6YrRJFDpS5kMZXO0++fUJZzPbOpgx8MKefr2LLVMwoDFsoCO6XhzkTlQbds9DgRbZpASk2Ry+bQ81lyKZFn0YlJ6SVmc584EVm38fCYYDJkY03At5r5EUq2SEKKx18LGX7jXp3epMciCAgScc6qbaKnYnoTKWcoOGGCZQQEkSTxNepsXtti1BOZwacPj7m9ex2vFmHKeTUfk3RKozeYEoaiolcuL+P5c5Dtb8N+j3KlyFK5wKXMVuqKqPCWiyuUS7J6NumgGnNu3JRgIIpFf9Kj1XJJm+IcdAOCRUwmlSdBnFUQuliWiS5bDe2USRzHOIvZ1TxXykrjBC20RCdRxF3TEoPDR31yBaE3Yi9h0LFY28gQSBLhxE9zsueQaM+uznjn+hb5rRUM2YvfOzpm2h1jGia5nKjGOIFCz1nQ784YjUTWNhVoWJZFNivuf69zwbvvfYebN67z6ScCfOAnP/kFhqFg2Ab1pshgGrrNz372KTUJtrJ1bY3mUh3TUuj35azfwKdcXsfUsnQ7Int3VAlobGR49wMxWF2uVijWtth7OeH0QABDZNUs9z78LsPOE5CtHs8+eoHTGnBtVVTvVm7f4GD/gO37W3z1a/G5tTeu8/zFlP/tX/8Ff/+HAno58SJ+/tkF19aEnjQKId3ugOzMR5sIXXn8bEJtNcf6eonOWLxno7jBbLzgR78UZJ3ffusNcukC09GYxJUto45NrbpKpyOEqrm6gVmI+PSvn9BcEwAzE2/K+fFTdM2kINE5AqYs3DFDV2Rxq3EDdxRQaiyhauLcNX2FRXCCG45pdeQ8RSZEUWPstJBTjRKhEjP1FowPX82UFOhPJ6hmir1zMZRtpaGQ3UAW2BhPpxDHLBKHXz0QcwONWoVJuCCMPEZ7vwbgVnKfpbXrFNaEDpyrLqWta8zPplfgKofdFkPPoe5rXMo7ee3WFg9+/TVDybPnDg84Pfw3NHfeZllSYkwvpniLI3LZc9odkXH+2ac/Y2trm2pB6BHXVnnn3SKffj1kTXYeTFUo5Xf4R/8o5PSF2Kt8eYtE01lMxb74eoivHDAeH2JInrIktlG0EEVLUJJXJMICoCqQEO6mqZHEIaquMxsLnffO79+lP+nQ7be59w1RiZt6F5wPB/hST+ZyRbSwybOjC3KWkLP+fISvaEQ46DIlncQJKsoVmAyxaDs2DAPfF7IYBAGaprG/v8c3PxAEvb3umI9+vcftd0SlLl/0OD17REO2BAN88dlzIjxu3hR66xc//ZL1zRq5gsrRoah41apbqNqE0ShAUcR9v2i94N6bf5+9fVHBePP2PR58/RXN5hKhJ3RL6/KQ8fgIOztn76XgdNvY2MJxNN66LSq02eqULz7d4/b1t6mtyJbt0zbNRoH9F6esr4l3vege8J3vfIfTtqhqNJurtDpPGTszmmVRzerMF4x7L7m+s82oJzsyzns4gU26JgzWzJry+9/7ISSFK/Lax3sfkVAgk7FwfdHmGKs6k4GOkkgi9VjnwVdfstSs8/kXoo3NSCW4ns/xyWeoiN88GtZ5895tHj4Ud2E89tB02Niu8vipqDYNxyMG3TMKJZNsVoKNWQUePv5bsnnxnvUlm+Gwx6D/OcWS8Ck73RmZVJ3JKEbXJTR6WSNyK8TxSP6fS/Z/3eXspM8//if/pXiH6QWhd0oSXdDrC9nvti741vvrXJyJu/dXfwHvvnObZy/2acrZsIWv8PL0JYa6xOZ1UaE8fPGYk9MaY9kevrQdcdj6khX1JpWquGtv3i3Raj/n2V6XZlk8a3W1ztn54RV9yvHJc0rFLJ9/+bfs7Ih7a5oG/dkZ05nwO9WkzsathB/9+C/4Bz8UtBW3bjWxUy6//JsTrl0T3Q/D+dd8/mmHO/dERej04jE7GysUS8vMfbEvR8e/Zvn2FttvrXKwdwRAqlLF3IppfSz8N0+1iDMR733/h/zxv/ljIS9jFyOImI0mDOUssTdKEyoBz14KWdm+dosoULix1aAuRxoGgwDHnTEdaaALfTacnBK82IBEtr9nAuIoQFcNVteEn/DJR19Qfus273/nHT7/VLSRj6bC//u71m9FcJUkKmoolFjKmOIHPXQMuq1XyIAaa+sara5Lvy2MZb5go6gDiCR/jFLg7pu79PozpgNJMDlWWF0rMtRCMtKxd0Z9kjihWhbKSE1PsRIVzfZ5diAOaWW1SHOpQH8yIvKFkBcLKmEcoCoi4Ft4CZ12n3ff3+LlC2H0tjaucdFZMFmM8OTskjt2yWXzWNIoREmAYStsXa8xnIt3Wl6rkGDTG7ykLhH2dGuO6/kocoA+kw/otgIif0hvIEq4q/YqcaRTqdmoqlA2gRJR3jAY9oQBH/c83NmURqOGLwf8D3pz4t4LEtnKl7GqOIMRueyUD94VTr2RgkK5SHk3x2lbGHrPmeEFsL4l9mDmjcjbSwx7R3z4vrhQFwMPlylrW8uoilAYl6dnDJkxn0kCaKuBgoVpGESxbCFbzFFsA9cPrnhLZvOAVEojkgFKNAsZajb9bg8jJYxXZaPA+UCAPZTKkvF+MCJbNAgkUljvLKFWWqVa1Tk/F0FZtpSl3/HJ5BWQ/E1ZcxNLnzIbi2fXK2v4DDg8v2R3Uzh3zlhjZbWONsnx4Fj03h48b1HMLrG2Kfqmx+MRrhITeD6jiZCzpaVlAmfBUr6BL/vjb6+nyWYUTiVyXbVeICIhOytcMdDHUUQuY5IrZK7Ql9qAjjMEAAAgAElEQVTtDn7ikiCJlBc+mUyKWrl5hb6oJyaYEePJAlMOYDvODN3USMvA23cSsnqBiTukJ8E/1CCglKsTxA7tvtirzaVN/sW//AP2Horf++RFRMEK6XZ0VqtydslNo3kGmZSLlRIyOx4MWTg2qytCpuazDkGcI2NUyOaFQzKcjIl7MZokhT69PCZpG6RSKeaSQypJUvizMbqlE8s2y+GoT6PShEDIT2W5hOsO8BZjLPksyxC/0zCMq3Yi07SYzDxkly5ePMH1EyytchWooQRYtkroj7FMIVNx6FLIZ4kD4aTpQYSmaeiaeTVzMZx38cMIRVVwpeypeKRthaxRkrI4wTRMuu0hi5mcI9BybG6Y5HIZGjLBs/+sTet0yIUkgM3mbdww4tr2OqotZ/tsC52E9c0a6Z7QJYZuMp8vCDxJ4J0kODOPTLpERToy6VwTP/RJEsjnc3JfTDw3IJSD1Pt7Z/T6bQwb6nWhk9J2iWtbK2SyOqtbwlhl7SYfP3jCtU1hrH/14CH18hpmNkO1LuTu/PSMX33+11y/scvhqWjvm152WNmtk0iDpgcx/+K//2f89G9+TlbOEq1uZPn2uzf487/4CdO2SFw0SzX+0Q+/wf5zEbA7C4VKdpNpb8p0Kpyk1bVtCrUsz/b2iSQA0e+8vYPjj7h7RzjUkR6SryxjaxlGUr9lrQUnox6BnMf79JOfcnB+jmpXaR+L1t1SsU7a1llMslfy2Z+ekssWMWTLsaGrLNSQi8u9KztgG03K5Q16w0N8KRvlSgPNiDmTqKNv3NqlVM7w0cc/ZWVF7GekxCiqSbO5SSKTXO32IZniBeOhCO4uz6aYqRlzR2NjQ3zu6PkLNm/cJJ210SUQzqOnn3F0csD166IVbDDsM/rVY5Z33sAqCPvbqK9y3rtgFkzpdoTz8aM//Q+kChnWNv5rKYsbrO6YjKcXPGwJJ8mfdFBDH+vGu1eO9y//1wO++Z0/JM5LXigMem2NzcJtFhMRgLXjHl+9fMx6eZmRBPbo9Xr0/RF6SXK+pTNc2yiRzR4SytZkVdExTRPHczAUIUMkCYoS/wb0QoE4CrE0Hc8TZ7p1rUitGPPBNzcI5NzX2cEFiTrHkI08o+EMEgXLVMhlxd8y6Rxvv/U2n33yR2QrssV4GqJo+m/QOROFOI7RNO2qfVhREgxDogfKNvVCU6O5tU5Fkp8eH5yxeeMeF93/T1vg/b/HVw9/ydmZuC/VWonFzGA8PSGdFrNv6YzKdL7g8Owpu9eX5Bl/wXg85+Z1EdhMRi3Sqk06XaHTPQJgZ3eF8TTNbHFC76VwYMfDDL/3e/+UxVjYnf29Q7ZvVPnswZ/w1q2/B0Aqk+XoqMPKynV6coQisXUq5SbdoWgdbDZ2mYw/xrLSxJp4lu5quPOQR/sHLBA6z86rjIYqG3khr3sfP2I/PuXOm29ycine8/Ybb/LwyRNWlzfIFmTi5OWC3d01LDkrenl5wXA8I51dYX1TBAOt9gmjXky9uo0biu8LQpWT86MrEvpMxiFWIp7uvcSRLaOjWZebb15jPOnQ7wgb7TkOb9//3hXa7Hw+J5dZIQj2GPSEEcnYGfKlZby4x2gqOdawOTj8Ke++L1rdBgeXLK+s8Dvf/Q77T0Xbc66gksul2D94TCLRen//e/8AbX7CpeQWzWgJT8/anE/OWc2L9trJ+YEAk1ne4OaumNt799oWauQRI9s1S2XC5Jjjs1+x0hSfcwY6tdwuWnDGYiRn64oO83HM6pKQxc2Vm4xHLe69cZPZQujT/nBGuZbm7FK2dG4U6U9G5Erw+Ik4q2a9wPHxmO/87ns8eynaEOejDHff3sRdiLtXym/Qbk0wkghLFXa1uFmmfXDOtGNyKoOV/+q793l+csaLY5FEqG9bjIc6z54eU5TckGfnl9Ryy/jRPrs3RfBo2VMuHi54575IhPlJh9bzGU3rFqYMHov5ArNJRLd/RGNZ+KyzmUMcKgS+sI922ubG1hsUck1mI3HuhZzJ00ct7JTCYCj80c0dMRP+d63fiuAq8kOiiQiaKuUmczdLLh8RBBK8Ip1m/8kFupri7q5wYIeTAVEUkU0Jx01NTJRQJWUraDIBNO57jMZTMvkc4UIcyvp6niRq0FgWTpJdzzA25ww6Q25uS8LXokEYTNDVGKMgNnzQ1hk6x9SWxcM1Q6XWyGFYfWxNOB+TUZt8tcp4FNCsie9rnQbEMYRSuQeBTq2e57JziKrLqlhSZDgYU62VOZMTe763AH3KfC6FsGyxcDSCoMX6pjCqndYcRQ+IIpVEzn0dX1zQWFthIY163kqRUlUmkwmKzMzn8lUyaY1IDuXnMzmCuIyRdSk15fzB3pQP31OYDUzeeksEFu58QHfio0ohHE7mbO7colRfp5gR+/nls09oXNPxwyGhHOKfjiZUVhvIeUg6vQWlQhosDcV6RRSrMB0M0K0iui0MU7W6zHh8iY4ED1BsJvOQRTigIjNLC0fn4OU+7717l+f7wgF6+86HPHvwgHe+KQju+pc9BsMuw3ZCRqLbHB8dUry+jOdOiAoScrxRw5sZbG+IZ19b3+DoeI/VssIHb/8uAP/6//jfWdvcoFKqsbolABCe7weMB3PiWBiq4aiPosSkjCXsspzxsnv0Oylm8QVd6ZCMRy4rG+tXA7WnJ4eouoEfJmTlnNlkPsUyNCYXI2LJXKvrBrlckzCUqJGGiaYFdC+72LYkJF4IosFiusrluXBkGs0y+UKKg5dCaTXKK1haihiPdEUYx9CxSEKPYA7/438nIJW3tvOgf0l4Lj63cbtGqZylsdSBQBj107M+U8fBUAtU6uLehos8zjhgIgfF7ZTGsDdmMh3SUIUhzOciuoMBpswMpqspXp5dkDPzKLGE908iKvU85xeXjCWz+/JKg2CxwHeE3K/tbtMfeAR+QrUkzngmwW/SGY12VyjuyXiBgkloCqcpVOcsFiFRyBUaY+JboFqomvGb4fjYJ5vK4cgAV0liDFUjNAx8yeyuGnkstUB3vI8ukzC5XIq8bdO/EAa8216QyTjk8mlWl5ry/GB5qYamRiTSsU+lHfKNVSqyklTMpslmc0RJhCbnzArFGuPZhIYRsbkpnqXpJpZlMZ2KfXJmDu12mz/983/DjZs7cj8qeEGItwDXkTDSUYhtp8hmJNLidEC9UcC0bRZyjxczn4XjMR7raNJYBc6ExVzj449E8BMToqgT7EyKKBI6XcOmmq7zcP8zCiWhJ3aXVlkYeUYjcS7byzbn7SPu3N5m3BXBhmov8fLwhHffu0cwFXL9+UdfoGYz3H9LZEPPzs4JAzifTzGrIst4/923OGmdYBgWWTkfevisz9d7n3Hv25vi/CyDjx/9irJ5m49+LPTGP/lvb3F8vE+UiD0paCYXz3z+1b/6V6gL4Uz+9M9+hJ8xUeIAuyR05XAckJQTfEWc8XTYo1zdIBVGuClx10ejS0ZTjYU74v7d35UylOfw6GtyWeEcdHovCJMs5UqR0Vgm9XLrJFGL8fQCVd7/cmET3wnxQ2H4q40GS8u3OD3okJVgGRs7Pmu1Jpe9UxZyXiVfMXEGB5ydyMSN1sAfdzg571EtShCafJWJG6GoNm9s3wDgg3fe4umLl3z8qYBGL1ayrDTWmQ5imjIIdDMT3GRB0SyRXwib/IffeotGeo4iE6eD4YR5t83GzRukVBk8TuaYEZh2zFgReuL56R7fun2bWlXI+YOXF9hGFUUtkMhOFVULCYMENdZAe0VDAFES/uYe4xMnPn4QYUmghjA+5atnz0lr28w8EaArKYf5LGQ+lQTakUWx0iDunZOSlAs9Q+GtO1sogS5IzQHT8AgSiGQwp6iCtJg4uYJnV1WbIIio1hqYaXFeg5cvKYQb5PPiDs1ml+y/HBNpovoDcHZ+yI1bqxwdHYnv74y4trWBbmu4C/Hs0WBMKlskldLQVXF+W1tbVKtV8jmhp05HKi9efomWh3RaBBbLjdscHPwN+wfHZCUJ+8pamXb7gljOq5l2isizuHPjPQqvKuXhjPFsjpo6pdMVQV++nGcRtgjljOfp0ZiQiFplGV/CpR+dnpArVNFUDUuiH5N4rG2l+fozAchQa+gUdZ0oWjAcCtn//POXLK1VOTx+giFByppLVS7Oj1lZFrrMC4aMpi28Q4fpvCPPIUVrcIkfaaiaJs/KZ2OjgNIWsn984JLK6TizhM0VEYjWl0s8ePSIt+7eYtKTCZeMxaDXp9YUezBZnBB7abZW3uVYEqnv7jY52O+RJAWKeUkM77kYhnUF195YylIqlfjpT/6WVVmtt8w6kRqxdXOd3kCi/AULBi2XVEXostvv5/j8y59SLm1eUeAkypx0dhnSKh89+iMhQ70NNrc3ybiBPAeXtbV7WOUMSFn0GKLEZcxMCV3OYTcaGT778icsFmI//+E/fJ+//sshfrbH0bGoAKlWDdQ8mbxMerttFD+isdTk8ddivsoPbmNmsjx89pjd2yLo+PhXz/D67hWB782bK1ycXuB7l+iyoljU7tKe9tBUi29/X8ywf/3oEblUgVt3ReA4np5TN+vsffbndAdCpkqla+RXyyjVFS5Ohbzkchbl1Cp56T8OJinWmmV21lYYy6p7ytCpVtMMF894+JX42+1btxhPekxlsvN24z8jVzzlT//jR2zJ6p2iaASeyni+R16CJGXyr7Kx///r9czV6/V6vV6v1+v1er1er9fr9Xq9Xq/Xf4L1W1G5CgMPdyJbuLwhmh4RGQLiE2AetdnZuIdhK/SnolVJMyPSlkVKE//H1A2GIwfX95nI7HYhn+HD732D3pnGiwOBkHT7bg3XKaBoolVBiWzyOZuNxiatqchExNaEjNrAfdKn3RXZyI0Vm2Jwl/ZMtEZl8xUG3QHV8jLFipynyt7loH/JaLhAi2VLkmrhzCdkbDkTNR+iaw0MqgxHRwDsPfFQNJeMnebOrXsA/OqXX5Mt2lTrIuuXJAp2KmBptXbVclBvpOn0jnAcyMm5ndXmEoZuUVwS2ZG8XSR0oDs6QTFFBjqdLtO9bHEp55a++c3r3H97i1b3hK6sTmTVPPP5HM3K4ckKl6mX0JIxQSwy0td3t4lClaVygY++EGhFy6sFRsMRpmnjSTTCzTvvcXF2iSmhvSPfIYirxJpBMBPn7gSg5yJSVsBs+gqCMyFHgalEAVxdLaPqGrPujMgTGcWLVp9atsS467K+LNpdOq0L7ty5S+dSyMHZ8TGJDhEmfvQKtdHh+YsnZFIZXkhelmtrPlkzz4WsHqpAkmjc2dlh76GYV8mXyuhWzOngGXnJ7ZXJzbC1JToXImvUKBQIUVCNFKsVkTl/fPSU7EoDz+1SlRwkijbl+HhKUaKHlYsV/DAkni+uMnWZTIbhcES+UCJRJSKjBsEiYjoVGRvX8SmVimRsDVXOq6HE5LMZyrkaCzmPk02liaKIWlXIhmkaxP4INJO8KSpQvhNQWgq5896HPD0Vsy9/9ckDbq2n2XskfsvW9RkXJz7FUp7JTJzN2nqd+WTK4dMOoStkfftGjihYEHjinVQ1xfrmEpoR4jji/vXbIZph4kpI11wui2WnSOsaSiD5ZKIMOj6FTJm0hJVtliq4zhhdwr9FEWTSVc4vL8hmXyEmivMZDT36A1ldIiabNfDkTEQ6p6PpRULfBEXIyyKcoxsWdlrBnclyaxyhKgnGq0yopYssuSeqWgCRF5BOBxhhnrSsPB49PyVrZrFS4g7du3edZlW0CcWIykehaKH5FaqNFMsrooqxeu2EbLHEqC853nyXbKZImMTMZcuf67rMxhPcecygJ7Jumq7geQviWHL/WBalUol8Pn/VGlUul1kqltFU86rC1R90URSF6URUWjRNA1XBCwIyBTkTqGqoKEymDpok+rxwuiR+TFZmQlE8NNtksZiBbLsyNZ3WsM3SSo6nh+L3vHzmkEoFBJLT6rAyRYkXnHcuKGaEfDrqHuNRn3xaVNoA8qs11IxKyhKZ3Vu3mxjGjA++9U3GE9mimmhsrFznH/zgPi+fiP54zw+4kcnScsSMiTIzSGKV0XyIVhI64bjVx7QyHB6JisZhb4SayrB9e4edLZHd/vknP8P0S/TGZ0S6qHCvV3eJ5i6xpPKwE4vzJ+eU6iVGc7Gf+fwyoT+nlM9xcS6qL4Y+YPf6zavKtbuYcnTwjJy9i6WJ/xO4Y8qFCufn51Qaspo9GVHIXqOSe9UxMeXlY43Va2mubYrOirNLm+FogJWOODz7TLxn8w6WleLFnqgQXX+rzixyCJ0ps6GQz1Elj2Fn8GYJecnt4ylluqdzfNkS6ycep+fPqFZXSeRc4/pGjZ7vMmgNee9NMZs5freLZ9VIJGy+n5qytrGOntP5ck/MBKUii6JWQ4nGfHPn9wH49Kv/yCcPO9y7I2DIx4t9Ot0x47GLbUmo0yQhiiJM076yh0kUg5qQSNjIIJQV18WCkqzMLfxzuu0YxZ9x/963ANh/8R9QLZfNZVFNe/HiCD2JyZfX6PVFRTa0pnz1eY9ETfDlc4kTElVDf6VzY40k8VFUFU2T86K+j6brPH/+gu6FOJs7u+/T7w5otUTV5htvbfNg/xmO5K8E2D/4lFT2Pik585UrnKAa5zz+usXW9qY4FzdmZQ3qlQpqIpEq1QaGDv1L8U6rq03uvfkug9mI0BV/Oz7aR8Pk937nv+Gdd0QLVa2W4d/9+3/HoSTCXlmLaF8eUsldw2jI8xu2WVtd5c073+KkKs6v1e6w/6JNsykq55aiMRjnyeQWHB4fib/ZS9h2ES+cUjBF1fZi8CmRF5HLiGcX7RJKlOPp02Nh4ADTDri87FCplJiOJVJuPmYxUxlI5Nx8voDrD8ln8tQb4h3Ozg+xywmWpbMqu1AeP33MIrR4+lBUqddWVjg67JLPrzDoiWfV61WK2QbnJwMU2XJ/++YWqB6HJ8InSJs1FH3MeDxhQ5Kdnxweoaow7yuUC6JqM2Of1fUCnZa4axurO9hp8GYmayvCT/HCGbev3WY8fkk1L2zB04eP0KhRXBatnz/7+C9Q9Cxpx0SR9Ay2nWK2GOB5GpWSaPk7fbHP4IsDLvfE74v8Ah+8989J1zLYaXGPC6UGz4+/JFEnVFOi8r9/1GPn+nsM+6KK89lnz9jYqfDwwT6pvPBTDDtPt99nPJEdBfoqmjajUtvl9ptCplzHITEi3Mjh6WOh8+yUxvLyNQYSiXTQ7WHaU8rFbbLSrD776BfEzV3WduocfCb0lOfH1O40+OLBvpTpMfe3b3Hv9pv88leC3NlQfLzhjELexNEltL2V4dvvvs9oKP7teWlSpoETTMgXReXx/Hify/4UM5fm1htCXlRUtlbvkk6LDp9O95ynzx9z9/4O8/mrkY0UinVJNsxiW+Ief/xTcVf+rvVbEVwZZgrVFJcsShSU2Gc8WVCURt2bpjjuPkYrZBhJEIF0ukjVzjKU/Fjz+TmnrQ4rq6vkJImYu5iQMa8xNNr02kLhtrtdnOmcW7dEy8PBwQEZO+F0NCGQDoJqW2hhnd0VlTd2xEUYTh3UlMpaRuDlr+80+cmfveTzT75ieUU4eAeDp7QdD3cWoobCWdRJsDQD3xdtV6aZYtif4MynWJY43MCJcJ2Q7IrG0aGA176+W6PX8a6ciiSC+lKawAdFla10hkMmXSGMFhRki8FoZHB5ckImI5StlwkYDAZMJy7bN4QTX9CnhIWIel30A5tpnVb3iPl4gGUJp8mwFRaOwWIW4SPhTDea5EvQOhGO26gfU8iPePD8BXFRiFLrZMDaUgVTtehIOPiz0z63N9N4kttg7b13mHsBn37y9RV3SjD1MK2IYTwjK1ux2qchS8vbTBXh5PvhGH9qES4MxudC+RmGQxxl6XQGGN4rosEps2DO+oYoK1uTPMHilEymjOfK9tDGLfb2L6lVJwSIfTk+OGR9bYtaVQQaC3dCoZjm+cMnDGU/8NbuBq4XY0xGuJeS1TxVw0xlMWIhP+3LZxTrKnM35OhEBN7Ly2WGg5e4vSxJSrb3VRLSiokqFU0unef04pQw9JEcwriuh6olIiiXfdluIAhgDTlZnS1X0NAw9YRQ9v+rKlyc9ancLmLnxHk9f9nCtE3SBSE/URhhGzbEM6JYGKHAGvHyNOKr/b9CK4jft7yq8PBoiquLO7R/dslyeY3ZxCPwxMvPJxqmVaFS1cERsucPoJzNYxTFnqdTOeIYFl6fhpwt7EQdvDlXgZThx+hKgK3bxJYwOM3VBq2jNgYK+YLYA8/xsI08KVt81+XFmBCPaukanUuhJDNZoT8Ojk4oSwLWSjnNZNolJwPa3qiD4ucw9QoRImhx5j6KlgIMXMlrZ1o6URRdDctrKKgoaJpGKS/hvj2fs7MBYeQwaQuDXUjnUPFYqYn2AkPPonoqqpmQr4k7m8/WWMws8uUKgWxJa65cY+/JIdKfxXFd9l4+x0yZV8zyWmRgmRp2Cgzz1aB/RC5TuZoDWXg+YQC6nsJzhVPY787odsdsbDWZSy4qy7IIvBAZ0+PMfbzYx7QNPMm1JWggFKxUmt5A6ISlQg6lYLIIxLPjwMedBqTMNIkE+sBMYfo+550BunQCU3iMpiM0mXRyZhP0XJaQLCNX/Og4peE4WRq1KsWMbOOcB8y8HueSWHx1qUyplqF12qFkijP97Cc/4+47N0msLHdviqBo/+SCy7hK70gY/kLW4OxozHx6TjAR+7m920ENPBrrsm1ne43JIGQRTfmjP/obAM4eOWzfXObWzipnQyELB/svWK3X8VxxBqFiYJdSPPjqAGcmnr15fcp8MefazhLVJcm99fyC5foqw9kRALaR4Y3bb7LefIfn+8KJsO0M86BFo7oieBSBfC7H3HuKN6hLOdextnvo6hKH58IhcUYx6+vLjBdDlorC4Vuv3eTxxVd0+zIQf/oczQq47AzJp8X9UAs2y5Uai+CC9uQr+awxmlajKOXOypbZWt9AH0dgyVYhp82otSBXVnm2L9oHzwbH3NLTHJ+J1iF9rcLKygc87j3CdKVo5F38zBDdzGMEYs4lG0eoapGxnIXZ2r1LM6MRRj6xpCGJQwNDjwjDkFC25emKhW3DTML7R3GComgsHJ9qXQTjceyQJCu44RGPn38KQL22zGX7mLMz4QibVsKvP/5LVtfvkpbw+r7Tpn5NQ7dVkMFUGMzRlRyxTK4YmkYUJOi6djUfq+smum5iGCZ3v3EfgM8/+YJ8wcRZCB1xcHZGhEKpsgMIuPbltTqj4YxYQk+vrKwwmbVZXa/g+sJZ/e7v/IBHT36M4qe4vitlIR8wHYdsbIn3fvbliHr5Pb55I3elE1bWrlHMr6KoPu5cyOJkfIaSqPTlbOOdW98iv2vyt7/8Eiyh3zQtwXc0Npq1qznslFFmZXOHi2PRUvnG29t8/vUUghQ7W78DQKIOOTvuUSlXIRG/ealeoNMe8of/+T8FYH//F+zvH1CoV4mTNflbdDKZAu3LMwoy4VIvrII/vUr45nI5Wm2DhRuipYWdazRyBNGE0WDG/ovu1bMuWpfc3hWgIqYZMpnM2d1dYu+JCJwePXxGqVxgY2MNXdpykogw9Oldimfrhosb9onDc8Zy9nXQFnN2mzfLnF4IQBlFz1Gvx8RzIZuzrsNCU/nuh+9i2kI2+v0Jz/Yes7GyTb0qzsH1HhFFY8JAKOKKtcZoOuFi1EaT86P5QoIXjlG9kL4kFi6vNFlv3CJ5TwJodF6QSnVxFxpTOY87dw9xJmPcxZTlexITwI8ZLZ6SlvbwcvAV7UHM3W98g9PLffmsLkqS4Xvf+y8ACOIhH/3tL2hfdqlWhP5+cvmYRE1hmjHzkTjjaqXIaHKJocoZRQ92tu7SbrvostV06917DJwBF8d7ZGryHnkJ7dZLdnaEDM9aGmlDo/0iYLkmQGBIOVxePGZb+QbX1yUAmmLQ7b3Aj8S/PcVHj1McHJ0SSzCXP/jBD/jLn/0FqWKVwJc2ZZFhOr8EiRGwsqFy95u/y+MH54wmrwK3LER5TE1lIQOutLRdf9f6rQiuNFWlcy45QwyDWPWwbIWFL/mUNIPy2hamruPJDRkOzkmlUvSHQpjCcEa5WqI/nqIiDO94ZPA//y//J6WmhVUSTsvf/Nwjk0pxKTPZvhuiKx63b10nWQiBW1FWOG+1+M53b3F6JqpZtbTC0mqDz78SFYzzl1M0q4VllEkCOd+Ui0iRpU9AK5FIgLZCep5C0SV5beJgDC3sVIFQTtUbtoNmmXTGDllJHhsu0mT1LJYkIzzpH7OxsszM8Zgmr6ovHqVCkblj0VwTGe/Tiy9Y3lrGSgtnx+8uSIIhlfIyeZnlX9kw0bvXGcbCmaxmbHrDhIUbEDnCeDXWLQpVm6XVVY6OJb9BP8T1pwwl8lK1YeFqc87bfRaheO98ZoXT9hmxFvAqwbdaXaY3H1CRAWBxdcj5xz2+/947XBrCoKnpEc38Dk/OLoki4Yy/vWuT+CMUSwRJR4dHTMc6luHTbIp9KVfT9C5GrG80ePjsSMiTrRIdm+CJ9y6XixTq7zMa7VEuic+NZnBze4V2r4WzEAFQTilydjJle0dcmpcHT1lbXwYzYHVVBFze3OHy8oiNjQ2e74nnh5GOPjbIl8X+hoZKqz+l3syTkwHRYj6hkKmSs1JX1YKgb6NHCbEqEcXsCllzGdVYXJHHelEP3aiQtlM4M+HQapZF2ipBLBSwTpZu5wTdDMhLUsZyxkQtwGI8JSsz0G+/VaDTvyBfFnM1jjfHTBVxu8c4i5fyTAts31lm7kNKVonWGivokU8qJX5fMZ9GS1QSRcGRDvv+wTOWUxFxnGPuCOcmm95AMQIWoTAA6VKKtJ2j0w5QNGEcN1Y3cB2PnJyhm0xGZNIFZpMQR3KGnB89ZzidkTLyaJ6QPVWLuehNaNaF8QKHvFkgcvtsb4n+cdcT+3z/jZvMXCjafqYAACAASURBVKHwFwsNxciRzok9SI+3GasGtpEQzmWlTA1RkoDQN3HGPfmbM/i+i5mSittLMJOY0I/wHWH0FraDqnsUMyVUCSLSafVI57IcHInv9/0Wqq6RSqXYQNzZp89eUiqVGDkzZnOxV7quo+vqb1DH/Ig4jqmXG2xuiZmWai1HrVqkUiz9pnoWLwjDEM8VOmk6cbm47BHHXKGDLhyP+cLn8y+eXVXYLDsgjuMrB6yQs0ljEoYhcxnsOP4CBZVkMkWX+swJEkJ3Ti4nnqPbKdpOh8gLSUt5Wcz6aJqOGqtkZJCkaCk2c3UaTZEF9EMPTVNZWskzlH31CToEHhedcyYSpKhULrK5scJkJgLo/nCKruVoVptMBmKP7/+/7L1JjGVZet/3u/O9b57ivYgXc0TOWVmZNfbMJrvFZpOUZAFeSDJAG17YgCV4YQG2YC9sw4AXtgwakARYECDLtlaUJZuSIZFtUmx2N7t6qMrKrJwzIzNjjngR8ebhzoMX5+RrGBC9EmAu8qyqIt+779xzvvOdb/h//+/bXyKITMahSSLJW/RcykoyxV4T7FrReIk7H1ZoLWl0ToVhWKoscLR3QlU2r883i3izI45/+pLzPRkxtcao+RKaUuTSgjije6MBR6+OKC/IHmFRiKZopFGOSlXIp+t3+fBL73LRcTk7EHszPpvSLwzJNLFOpgXTYMSjwWe0lsQeN60CJycuzkYNdyrmdTYaMx77tNrifB4cdSgXSnQvXlBrC6Ol1mrgpwqh16bdFGc0dqdcWr9KoyrWZDLUyNSApStLzKSRNhtMyVZOCZUJE8nC115apWSvgiqeMxsdo7k5Ymw02a/Oj1UW1+qcHu0QycjQ2kKJk94LSiuy15d7zA/u/w71xQKxJWSqUrjJ4Ow5Ln2sssxKt6+SKU0MQ+iRScfC/kod1XJQ5N/QYpREJ8kUDOn0JRmkoYqmyLMQWuhmROxNWF8Vd8jm2jVeHx6ihAWyqTgPvqmgpAbBRNyHR/6A9aVv0tl/THtbRO9LrNPtn2JoJrp05lItQ41T0QASQR4jemsxRx6kSYSmQu/CZSZ7ZPbP7rL72OQ3/tJ3AHj1/AW5YIXhTMg0wPZSjR998im33/1lsXfk2Xn5lGZTxzbe3E/PSKMigeLyxz/6fQBu3fwa5bbH4UA4tJu3vsy1W7eZTIy5zo0ti6fHD1GSCp1TUVezurKEG4BdE+d/d/+YL73/Td6/k3F+LhwuVXUoOJc47p7y9J4s6t+2mXQTCpbQp+edM0pWiUJhk+VVIYu7r1+Cesbj5z9ne0OwipbKGqWizctdkVkuVW5TrWtEoUapKOROV8fsPumBqtJsiDX+9P4Tqgs5DEMyNLshhp7DsRbIy+89fvKCjBiiKmubwinrd1U+fO82P78rgiSV3Ao3rt4h8cbEktGv0qijqRH7u8fzmvXr165wsNsjLxu31ysateqXGPQnKDIIfO/BH/Lrv/5rmI6P/0rs4eraApPhCZasy1Tyi0TZGD9NmQ2Fw/fo88csNIo8ffSYG1e/LPdhi97wkCgWNpBmluj2T7l1+QaLbaE7P/nJc1TdIxg9xsqJddcji0kxpKKJ73XPQrr6AYv1NYqyzvXFziNMs05kevwf//RfAPDrv/HLTIc9TiZij5ea2xzvD8iye6iqsOlquQaKajKSxFvdwRHvXPsKnaMeSiKe3Wxt8fOf/BRVhfaK0FN+6HFw+JosFfuy2LzEaBbQbC3ja6KGdf9whjvN8OMBm5eEbj45fkqKxf6BcO5it8DwweeUFlZZkqy/h8d7uGOV1/ZzrlfEHl8c7TGdBQTSHl9bukacdZmNXa7fEA77T7/4lIlrsLjQoBdI30E5pdYocffnP5HvN6CQr7DcrPPxd8W+3H1wn6mrcu2ddYYdcc/osp/unzb+TDhXSZpSXhJCmGURhlEgTmDsC2Wk6QmTsz7lnEkmO4qbZo0kNlDk/5PmMdQqgbfPRDokmgW5vM559wzNFIdRtU0yI+LgVBbnL2yj6Q6ffHKftoxuaXpGoin87LNHgjkIWN3S2d0PODmW7ITnE7A8bn9pkcm5UMCjYZ+Zd8H7t7d59EoYwuOpT+J6ODIKH0YT1q9c4uGDXRrLsoi/bOH7GqqqgyJ+bzi9oGjVSHQR0bDtPJOZihdopFJJko057/XYXN/gaFcIYk6xKKBzcSYuy3xlmbyho2U1jmRU6tTLKFV9Xh8JI2bppk57c4UnD7qsrAhIR6a7fPLzDvnCCFuyYHnuGYvVTfyhLIIPBuw86rDYLuJPxTynScD5KKRaDKnXxHqens+4tt4iUIUD/eDeLmtb7xD6MRs1qYAVl/F4yjfWMj7+WKxL+1KE7uh8+kNJqX6s85WvNskt1Pmjn4n9m5wtYzYHDEYhuZyM0EwDcI7YfSr2vP2VDxilPQ4He1imJMLwQlaXVxhOc0wCcUjM8gZJcMJxRzwnVRMuBh2q5WVGIyGLqjGVzGopSSjmni9YlEo1zgdifTVNYTCIscwRbQkLzBSTQc8ljCbUq20p9xNsq8japogaf/KDXRzLIJj6tJbE3w6Om2TpjFpllaHMlBUsjUrJmkdxZxMXyzJQVQvPF/vuxy66XsB1Q1ZlE78g9DAMDVVmoBI3YTw+YXm1jS9EAcWFmmmzVCox6Uso60GIkk15eLAHgFMwKFdtjjo7jMb+/P1aS3UUR2VD0shXKjn6/T6Drlg7b3zEu+9cw1QM8uaqXAOXzNDwXaHo1lcXOTzaZzjuoegyw4dNu1XDtoqEoZh7tVoly45ZbIt3U8nhuxFbG+8ymkqO6jcEINYYTRZclyoG/WHM2ZFQ7mpssthYYjaD2BefsVWDlJg08YgCcR4NrYyum0RSVtI0Rs0Ey94bKF3YMTBMAy+J54xpqysNMlUhljbhYDDC0E3K1QqBzLSqGuTyNuPJkEJByMtoMiSn5bAdcVmaFngzn153gC+pw6tnJdrtJvncBY0FsQ7VWgknn8eSRb12LiVTDCazKYuSdn1n5xWOY4OqE0sGUd/3yefzTCZCEC7CEFVVMQwDRcIXDV2wrllmbg47HE6mQMpMzklVVQpOHkVR0GXmynYKZFlElqnEyUw+y2EyHTHZkeQKCEa2wWAwb8qcZAoXFyP8IMCx32Qx6+gOLEpD3Mmr+NMcQabQ3hQZmtnUxcyGjKYzLo6EHs7lbCqVS7Tb4sxM3QsKjs1oNKNclQ6m5XL92hq7e8Kp331yzGIJmjmDdEOc2Z2Oz6vXn/Pee18jXxUGuz0y8e1zeq74rUZ9kShUufXuBqYhs1SvT9l9lDIajoilTOVVnfPDLrdWZbPe4grL2y32d17Qeyz02zhfpdRokkxD9l4LQzhIVcJpyngszvrJ0QnaUpU4Ujg9FhmF0ZnCwH1Ju91CVcXcbSNhFoyoSBbHQqHCQquO63VwcmIOR8czglkXy9CpWGI9956eceO2zuCkL9/vMl+8fMilrTXcI+Fw2ZZKkjPIck3OZFNWxSwSqEV+//e/D0BzuUi1WGC8m7HcFHPKogGOVmU6vuDhoXi/5tIaXrrHcF+sXeqaPH16ROhHkIm7QdVt4thDVTVBIgFomkoUBXNShpgppApZ4rDcFoGM8azDemuFKIjxjyRbJz1+dvqCQlG8b3u5QK1xmc1fbrLzcwGzmo0HjHo++aJOlr5hAnVIdU3ASoA0TVFVXQYpZLNxVCAhyzJWF0XQp9na5MaaxYvPBKuamyySKxzwa+/9Vf4rfheA508SvvalX+bJU5HJQstx+53fZOoOMUxxHypxgILN+WkfXTKWHh494mI6IEmFXm5s9vjP/+Z/xPrWB5SLAilTqlhoWDg5HUcXtsvBwQGTUcI7N0Wm9/ToOc937oFiUCyLd6lW65TzFj/9kwdcu3FTrF0+wY1ecbAvgs5nz1dYX9vkp5/9M1ZORJah0VzAm7lUCsu0WiKDPwu7zEIXW5YFdDr3CbOA7cuXGU+EPq3VFtjZfcBy6xJPX4jsUhIVWDEX2dkRsnLzxhU222tgHLN/IFibt9Y3qFVWGU98GouS4Ml/xM9/us/SonDusiSkVFhnPH3NVKKKNmo54mAIcUqQCN388MEzNpZXaayJ/7/30xcYtoaXnnPxWmZDvvsd/CAhy+pUK0J3xcmExkKVY6l/dK3D1pUif/j7P6VcFvfqnfe3Odg/YaGxQhgLPbhcLXJ4aGLKwFSUHVEox+Rsi1TeFx/eeZ8kHNLvDrGL4vz1D4c0ShnIO3OlvYGmm+hGQhQKnXvj2nU6g/s4ziLtupD1x49ecvXGEjOZYXfsMnfevc7Me407FXqRZMZoGKHakrRInRGlLteub/F6RwYE9As+/Og2/d4My3lD3hZgO0VcT8IECwmu22U8dRnKMqDPf7bLex/eor1ynZN9YVuXymt4foeFvJCVfDNPkuWoLtTYk5DtleYdqtcd7t1/yE96MlC7sUIwfU61LlBpuVKNs/Mx9bbDUUe2ySjXqORjzo461Jcl8dV4gpU1Wa/LrFissthwKOVMHtwVcq0rDu/fbvDs2S4lS+iunCWzB3/KeEto8Xa8HW/H2/F2vB1vx9vxdrwdb8fb8W9g/JnIXKVpNKczDqOMqlXAnfYpVmSzw5mHO47QdRMlExHaKIxxvTHHMlJHbLOwqFHI5ef9lEw7pODoRHZ+HuX3A5eJlxD5MstR6BNHKaadpz8THv7w6ResrWwzjQJS2Vxx1KvQWqxw45pIwx6cHHNycUoQdHGnwoM1jQRfhR998hjZqok4SGm3i8QyuhUMK0w5wqkOSWU/l+koI41VTFvHsMT7oevMfJ+eLCJcWlhBURSm4xGGISGGvsqlrS2MpMB5JiIIaSHHQf+AiswIedE5rWaNx5/sUiuKCFT7eo2Tk4iS7Ed0fHTK6/0Rqm5yPhRRhtSImAUzDMcglbUEWaJy3j8gVUWk4PXukNOzC8IsjyVrvJI0pWCmmI7H+ZmIMrSKl+j2z9mWuPDYrzLoDghzKVEgIhhM4XrpjGvLGq96Iqvw+Y5H6qtc2hJZgL/wV1ZZWnf4+//bc372XKzde2sZnd1z8nmHWxIu9emfHJJbimnfEhj316+mnIwfYRRmrK+I9Y1jlZdHJ3izKfWC2NMsvgDF4MWOiNCYVsbK6iKPnzyjImEBi+0SnYMpo1GMYYp5Fcsqk/GAztEbfHcRUy2QRCn9noiK2Y6GouXQ0hwvXwmZLZUdXu28wPdlbwrHpN1c58XOU1RZ+5K3l3GT5/h+h3eui/q//aN9RqNzTF3ssZPTccwFJuOQpWXxfoP+OYqeUasucHYuIvGVahMnZ5KTWdzO9BWeP6OrhViZiMZM+y77u+folkmYCpkNo4TmkoUWi2zB7vkpraUWvquSMyWleqmA714wHk+IJXTvon9MLmez0BDrSwSxlxD7EdGbRlO6iZJmkIlI6+HBAePxgGariCoppNPMQVN9LrojKmWx5uORR7u9OC90bi0sYuopj5/cJ8qEDDcXRXasP3TJEiHDrjdk6ouagDcy3VqsMAoGJLJBcZqCnsuhJBGxhDSGYZ3AVTBM2XtHU4kSiMOIxUWxBgutHJPRmHq9TiJrs6YTn+6gj+OIKGetViPwQ3q93hyCV6lURH8cXSeWvZPy+TyW6cxJKDTNwMmLSP5QUvJOZwEHBx0ajSbtJZElzuV7875UAN2LPq7r0jk/I5HQXd0wcJw81Xqe0UhkqpIkw/ddQWQBxHFKmqYoikIUvZmDhqZppFk812dJmKFp6pzyOI5SZkSYmo4uKbJNw6HWqJOmMQpvSG0iPM/DljVzCjrn56ckZFycScrqwQjDsElSFUURCvXx41foukqxIM5Mo6UzGc3IFxLOZB3YdJyQqGNGfQVkuwI/PMAwy7QlaYEfXKDrOu3mJYLoTaalwEKzxDvvi+z9//473+OXvvEB3/+T+4SyxUFr7bvcuaVzcNAlkFm44TRDUYpc2RawQEdVGfYHZCrYltirL9/ZRjFMVMvh+Eic/+NXY668c5N7rwXq4NpSmwXbJtRy9IdC32zV1hmcnOAYKYasVzva36M3heFQrK+Vtbk47WPaEbGMQGvFIavN2+TzJg8fi0Lx7/zqn6c/fkwg77Ri2UYzcnT2Q3KObDkycGkvL5PGPWZjIYvVUoPZ2QmRJHcZm8ecdQcstj6ivSRpumf7nB33SbWM08+F/l7/mkOztMFf+4t/HYAf3fsRga+zsF7FC4QsnhyfkgYKllKgXNoAIM0iqsUSSSbeT7VVapUC04mPKqn10zRD002SJJk3C0+UAN0M5vUqaeqSJg6KolBtvYH3FrFyq4STgKkla0NK67yzkePJayEHzfICu0//iOmPYzbXxJxiL+bK9UX++P9+TRLJeWkaoZJiIGQsSxUyRUEm0oRMaSrIWs1Hj0T2ZXHpJt60R4DQUybnBMNVXh89m38vTkTz4VjWj+i6zeHpE6rVOoOxtHncErdu3WZnf5csE5nOvb3PaC2v8cmPxZ4rX/11Pr79LX73X/wBf+0//isA/Gf/yd+mUW/yl3/rSzx5LGqEis42K61tbNkepj/ocuO6yRf3d7Ak2VGSnHN0uEujucY02AOgc27w/gdfZdQXMMRcq0ShFMGxjyYj+z5HaIbO8mKD8wtxF50Mdink61wMBKX7SuUKiRGiEfNawu1fR0t87Ze+yr17D1lviyzxk8fPmXptrt8WmZcwCIk4Ye/VPvWmyMxhJbw8eooWVYglXLpWreP7LmNp32xsbJArmnx6/wmaRDo933lKzd4kZ+vosr9owalRrhQ4OxRZ24/e/7fZPf4xx3sqly6LOZycPcHzZjQbm2xdFhnSBw9/REqRRamXXf+Me5/1aS05nHWEXbT/Ej748h2++PzVnK48iRRWVlYIZfsZ77RJMT/G1XtzIoznr15i6xql+gapIW2OhkJ3PMaQGUxdLeI4EKdDzk9kLdGqTeYvsbj03hxBMOknKBi0V8QdfffzT1lurZHFRa5eFe9yundMpabN640rxRUOjp5zfnzG+rLIYOqWwuPHrylXciAh8XEYY9saN24I4pjnz15TLKvcv/+Ab3ztuwB85asN9g+fMhr1WF8TLX8qtYQnj48p18QexNEYNItoOqRelRTu+SKWo7Kw1OboRMC6u70+a6sfUq0IG2EwPqRWr1IpbXB6LHRSrV5mbavOZBiQSZjz2cWY48OnXNsQPRA1LcFyYkbdIfXyopQzm1HH49rmJXrSri023pQk/OvHnwnnyjRV3pddnE9PuiRZShqZ+EOxmYpisrJUF/CWyRuoYEoQejQa4pLVVIskG6GqNlUJxcoVEsIAasUGp5KhyTJTKtXG3GjJlJQkyzC0Ip7EittOnr29c2rVHBMJC+xedOj16pRKwsAslwoMxzXIEpyCmKc780lSn4WVBpEseru6USY1fQ774tmVZgPXH9JoLpBKJW2oBidnx5g5+xfvp+Yo5htQEAbYwO1i11Q2bzd5/URcAq3VDexalaefHbB1SbDGHJ0dsNZuEUhIZafbpWjUWV5sIbkq2HtxhmYbeH2xBheoNFZMAl+jL4tlE61IFof4U59I9lPRNYdSAeyyWN/XryasbFeZjSMiTVy8Z3sjsigibxTIxBTQihFqPuLzz8XlsbyZo5yt8+DJEdOieL+rCyfUdI2fv0yxbCGW9VKR8prDynVhLP6Df/yUvZcKg4FPoy2U0eOHT3n8pMfXv/MOO6fSEd3Ocf1Kg7NjCQ9xTghPQ3L5KqrE2a5UGpycO1y9vIoh+6LsHncYDGJ0SxbLVkscHnWwbRvPE896/KCHolpEbsJkJBza6VQnTTRUyVpVKlgEsxQjrpIo4lmd0wucfIX9vSlhJP42GYdsbVwiJ2uiNleqDIcTNNWcd6DPl8fMzg3iMMKsi+/lHYdCwUDJZD2X52JaOteuXWMmGxSXik0uLk65SE7I5cXnJrNT/DAhy4TCX6g3GAwMsriAURRrbgYGW2trBHFAX9aGrS6UmYwSFtvirK3mK7ze28fKGSw1Jb56OmEymbG0tEm5KH7v7LzPaOhTkeiCtbVFTruvaTZa1Evie83mFfxgRG8olN9w6GPbJvmShaaIdRmOp5BaNBeW5g1edV2jezEkkB3pSUzCMCZOI4JI/P5gKP7trONTkJBRPxqz1Krzsx8Lg3Zr+zZ5p4huehiSZSyehYRxSjaZcPPGhnjGiY+a2QSeMAqtnIZhGOi6ieuKfUkTlUqlIiGjkkUpEOyM+YJsrDzpo2t5KpWKgAEj4EQZYNv2nBFSVVW8MCKUhDZL9RaDgWTMyot1ieOEJI7Z2z/jxUuxfsWcQ5Ik85oP0zLmxfWJ7IU3nU6xbRfNHM7700RRhKqqqLLbsm2b5PN5xuMhuiRO0Q1VwP00HdeVjGxAHMXYsgYr5xSYzWa4sTtfl9nEZjzyqTeKLCxIdrCmg5PTCYI3uHWV1lKRwXhAyxfv53ktslRnOouYTsVehkHMdDae1/UNRgbjqY9pqSSS/UNBJY5jNNUhle+eLxio0ZRHjwVrrGUb2KbKyxdfUGuI36s2TAajE754Itay1NriZU/lhy936feFcVVeuODO9esoikn3ZE/Mc+bSqLV4/kRc8rffu4ZVjAldg1ZTQIDGI5/nT55TadoYEoql2zqf3P0CrSrk9f6n91AnM1Y2mly6LgJF/+v//I9YqjXZWFlhdCH0VDJpMTzvsvta9ghb26I/OqfRqpE3xRrs750zHduUqxoXZ+Jzn/7sKbPphGNZD7y46LOxpXLSOSRJxLPXt5eZeiFOrkGpLJ51tDvmzuVb9ALxfm5sUiuuksYWucoGAC8PHlF2lvjyxx+zXBH62rAV7t5/zrvvifU1CibDQZ9a7da8HjcJuqy/myfyTAY9YXgvLOX40fefcueOCI557gRDz5NlKqZsHptmKUmSoKoqQSDkTLd04kglkZCuNNHwkxGWrXPziiCialQmjEddTvsXlBdl8HaqoFkBq6LkgygKWFxuUNhex5EkCe3qCtPpAY5ZpS+dN8VI0KMYRX3D0KqQoZJloi8OiDosRVHwPG9ucwRuF8PSSTWx73eubTOe1Mnyv+ibs7CUMZpGrF16H4C9g0ckSczLF322Lwmj96D7mv39M+xcwkg641/98q/RuRjz7/47wqH9J7/zt/nmL/0W/9Zf+Dovd0ST4l//7nvcvn2bP/ijf4qmintmqaFQrHl4Y2EkbK5+wN7uMYsLN1F1sQZ7e8+pVIrEcUaSyDKO1KTbm7C2KZyf6STg8KjPu+98RK8n7uPJWEM381hWmyARf7t3/y6NepNGUSz6O1sLDKZHDLozPr79FwHY2f0J455B0WmSz4m1qlRy1EoaeibWPAx9FDWmVV9gMpFwyagJ0RQrF9GoiXvmpDMmSwN8qb973TNIE25c/Zh/9YMfAvDlr37E1z76NouNS3ihsBMcR+Hx/UesbYt51oqbLG+7/JPO9+cQ7o+2rnP35y8oVOB8KJzadrtNoWjx/JkI1K6srDEznqNQZ2VZwM/K5TKPHu6RJjoPHwsW09XNHFGYcXAozsLW1RbauIqtWey9FFDIOBSNrlfaZQxbOEVlrYYfT3j1QpzRZrOJEgbMZjOWZEB5OgkgUwl9j/1DYbtUqnkm7iHTkbh3LG2F9a11quUSY9lQutaoU1ss8eipmGPoFri0fpmL81/UCKaZSqZNiRKNzTUByzs5PiOfL9M5FrozTQIK5hpf/8Ya/VNxhq68U6JzsQvGhONTQebmBjpRYGI5IuAb0UMzZ7hewGlH1sdt2fSOEuIoY1MyQgahy0X3Hv2eCPhUq1XQEvZ3XHRHyN33/+SAD798i2F/iCaDf63Fy0TehLNT2ffqxjtkSYRSylPKC5vHKs548uSE/nkgWElhThr3pw1l3l38/8fRWChktz6QDE1OhXzeIcsSLmQzSUXRMAwNPVdhPBORjjSGKAoRaH3QVJs4ndBYKPL8tbiIl5Zszk+HlPN5CnlZNKwrPLx/RFk2z7PKKnGUUSk3CIM3VOUQeSmW/ouoWKr4qFmO6UzMSTcczGKBydTHzsmo33CKGk3wU5D6iIKRx008KmuyniszSNwxs5mCZE8nDEMsM49m6POolFPIYxllUokxV1IPH5eZN6UgmcEWmmUOTo95/8ZtYkkC0TnYY9GpcCFx737eYjxVqFrGnL40axikSY7ZmVinfDEmU6YQtXCK4qLy1SFmXCCKM0xpmJYqBp3DCbYhjINCoUCuZBN5GU/uPZL7Z3P1ZhVHsQkDGcFQBfNeOCc6MKivLDPqDrmyLCIDDeeQ3c4xVjGPkxMGX7ejcHQS0ZHR9VqzSv80wjCm3LgtG8z5OSYXFxSdRSwp6+tXDS6OpiiSGjkOfU73Ezav1zjYEVGH1aUmy1uXBJNNKrITgaLxRz/8lKUVWfcyCHG9gOpCRq8rFI2hVFholiiYFQoFmSUaHHN+NqJaEXtcrRW5e+9PWF3ZnMunqitM3ZjjozGNuvje6ckQjYx6TRictqNTKDicdXz8ULxzoZqSuovU6hrBTDwrS6BYcfBkA9j19VVcb8T5WR9HetCnp6ekiU59IUdXZnd0M6Ncrs4v/jiZUauWyQyNTF6W02mXIAhYWlrGlUXucZBSLCoYkhzgYtCnVFugPzgjJ4vxC7kWg0mPcTilIAvhdSdPFEU40jj3vAHN5QVsPUe7LoxHUpEN8SOhNC2zwNHREakSY0qWqjD0yeUKDEZ9gvTN+QMlLszJB548/QInV8EySxydigut0nD4yb/s8M0/fxlNkfWN9jJWLsKUGZNioc14ENL3fEaSSMF3YzA1wkGXnCV+7/zEo1JaQpMF9aato6k20/FsTlFbKRfQVINGo8FkImu1DEs4YdLZURQF1ABdM+eOTJxmpGlKEEXzzJGu6/h+IHWcJPpJU2zbnjtEWZZh2w5e4KGr4ntBJ9FoKgAAIABJREFUEAiKahmNTZIEXdfFc7NfUD0rSkYcxySyLi0MA7IsQ5NF2m/qrIRjJT6jaSJ6aZomuny+6w9IkwxZhoKu62SZgpoxr4+L45hCrsTS0iLFgtBdubyFpmVzg1M03AwJQ3dOf6sqBoZlM5mM5lS6URQSBMG8mXS/O6NYLKHpOWYSeWBaoGQqWeKjvGEszDQGg8G89tW2HNI0xfe8Oe1yGHhYugVyDaxCjqk7plHNocgMgqqZeEmEoWlz4gZDV9E0A9sW76ZoEKcRrYU1Ls5FtqJ33mNxYYWjoxMs2czdm85Y3aiiaZJpMTHIqxqlgoafinleeucrhN6UcPSamWTmfPb0FcPIpSN1khpnKJpKEGss1WUWvK4yGfchyuifiPertCBXVOn3xDoViha33l3j008fUlsU52GhVcD3MrKwwXd+QwTs9l71aLdq3Hsi2pB4/pjZtM9a8yvzOr6Re5+ado183aA/EO/z7V/+KicnOySKcBr++Ps/5sbVazSWiuw8FUagbdusb7XYeb1Da0nowcH4jGplcd68uqguUms4/Pf/4/fpJ2LtHNOBLCEIfAxJ3Z2moKspfUkrHUcZgT+jWc/z278tnI1+Z5/ReZeFj67iSMTJ3ssdsmrEUlnI2ODinO55hk6Z1Q2RlR5Nn2PbOf6nv/MpB2dSpys6WqqRycCigk6cBFiGMm/0m6QRqgpxoPDf/a3/EIBhch+vCyVZj6fnbDJrSrN4hd/6rd8G4O/9o99iMPC4dFVkQ8+7IzaW3kO1j/jsMxEYqtcW6Z3vUGusMhkJ/VmwHc4HnTd9Y1lubHPaHfObv/mb/Kvv/3MAquV1CkWDDJ2bN0Xm4cHjH0KmocVCn1abJsNxj95gn8MDcWfevPLnWN9s8OrVDiWJKuqc9Vjb2GYoM0K7u6+olBvMxj6/9I0/B8BnX/yA58/3uXbtBpLdG3c6oHs2JI3Enn/7W9/lvPeCJ49ecfmKmIOmWjz8YpfvfPc32JUNeyuVBsNTn5vviPq1h09+TJxOmIxDokCyIdcSDo9PWFlsMxpIYoqb62RZRu88kPL6gtOTLh++/02iSNhcF/2MUinG1stkipDrRsMhCVwUS+jAotPi7KIDqLzaFfV47394mb3XPfxghKmL87dxqcjjB8e0ZDZtacVh/2CP50+OuSz3tFwukzJBj2+yvC427Ac/+j2KhQXW1kVWzAvO6F04aNoFvY6s33LKDLrnlIoLXL62AUAW+PTGQ/IlcUf3+wecdcbk8gqWZFEtlYrEcYLnTwk8IXsLyz4PPve5fUdk61utNmFyxs6TYyplsVmD8T7uLJnr6m//yrc4PrhgMplSaYjfi7OUmfsSb2qxsizu9skopnfRxZUB38uX3sGbTbnwLrBS4WCuXvK5d/c5mhVjGcJ+Wllc5OT0IQpiLdfWlzDthBe7Q67JddHUEbOLED9R6PSFT1CpmRhWiCuRa95Mo1ZZprlQZiZZxqutJWaTBNMOkbFGrHSVre08iQxk/t4f/kveefcWxXyBl8/3AChULIbDgEK+gioRPSsrOX7lW//N3SzLPuRfM/5MZK5SMqpNcchy+QWCICGKJ1x7dwOAYW/M4X6femVEJFn+fC+iVmvQkwaR7Si0lhYZTU4pLggD82IwZDpNKDoOqjSuXC9gY3sTV1IQm4aDrkcYRkSpLL43mw7IF0qEwYRuXxpOCzXC0KVclUVwbkBGSBjGWAVhRLSaOr0Li6QX0VoXEQUvjAhdhelMRp2NAvmCQ6YE80sBxaDaqHPWvUDRhNBnqYgwGEimARQWqjUWK20yWR1vZwoVvcru0xMKbfGsRlvn4GmHIBCGcbOyRqVRZNA9YLEhlNYwibArCsVEXM5OtYaStsnbDh1JYz0dVMgXXVJFp15ckGvepVSskkiWlMif4SdtttYLcFMou0iJCNMRiqKzJNmsfvSjL1hZWaZcFHswvoiwWx5GPsdYpre++HyEp4D5TGVtU6S/TS3mwzvwfEcolas3Nvn5ZJ9yaxNDNkowzRFfWr9FRS8QKCIq9fLwC7JIxx1IoxCbS9sFAj9l44qY5+rGEnFi8OTJI5YlNXLgGehWn+FAPMdzY/KFAr0zl9FAZqVKGZqmYBWGXHQleYQbYFk66xsCPvFy54BCvoSuOVycy542psOwn6FrCv2ezAgtreF7CaeSsclxHELfQrdSNFmY2jvNyOV7DIZCUQDkbIMwyGi1hIwlSczMHVKqmJgS1lGJDRyjynDUpdGQpZWaimEEnJ4Kx9ubQehHLLeauL44DzkrR5zM2Nvf4cqmSNOnpsd0MsYX9h62VcCfJqiRQxQLFTLyXNzQI1M90lSckdTxqDby+ANhjNTKaximRee8Q1466J4/JOeU5lmcvb1dYgLKlRwzT5z1LNXpHu1Sb9UwpZOSxCHJVKfXFZd6a6lNLl/lojecsw51zgRUJos1Bm/6+GQejZZJrAqZSqsmw1GE5hRwZA8dbzZESyH0fLzJmxYKeVxvjC0hQK7vYpkxrutRLorLK+808H2XMEwwTelM6TGmYxBLaB2KgqZo+GE0j3ymXoAfRei6PnecgiBE0zQcJzff4zRNmc1mcyM+SmJc3yWKIlJd/F6WgZIydzQKhQIKKp4bzkkoFEUhl8uJHjzSmbNNEyefx/MkuYuuo6owmYyo14XeSLOYxuoyuq7h++KMGHoD1/UZTcT6jsdjlDQjTDJSyRKVxQqT6ZTo4HD+PU1TKZeLczjadDZBVQV1tS1bYgyGPdyZzLY9EwaecDJBkdGrNM1wbIiTAYr2JgunE0cBuqKTyihjlon1fpP5CMOA84sOzXpzvi553SHDRM1JOGwQUjAdlNghk3CbMNMh8jEMC00yLQZxhBdExDJokUQh1WqZWPXJpLzeev9dxuMxlytb8yyc7xvka5W5k+0mKoNZzMnJKZEMnEziz8hUhVdP71GXDmaY+hTyLRYRhps/c7GclKnfYzgW+uZ8qKArIR++fwW3KZ61s/uSvf0Bof+mB1KNB58fE3o2Z6diTg8enLCxVcZS4ZNPBNRrb/811fIWWSL22MlZBF6FF6932d8XbTIyXJxCSvBshB+K/Xv89BFFy6G1Jc66n7ocnU05GE2Jp5IIZ+LRGQ1pLubZO5RG7vmEm1cu8eihcMA++nCDJ08P8WMFQ54rVQXfC1E0dQ5XUjDJ1JBIMu4CpIFNtVaiO/tXAJxeJKwV3ifpDXn+ubjrli9dRqn28LoS/ppYxMOHbC1/m2wodLWSOpx6Q/w0I5FBJ7uYJ1J1SN8EqDWyLCNF+0XgJMlQVY0oiSiUpL5WTFqLa0wGQueutFcpVyrc/fT5fN6RqmDlE16+FLC9/b0RuUKTqB+z0BKGfzgb8qvf/sv8+N7v8kqy7jUbS1SbBU6OxNql2XWWVyv8zj/+R/zqd4Szc96ZEoQj0kThh5/8U7Hmw/u44yJXLwlolH9SRrdUXrx8yEpT3AOWk/LpZ1/w5a99hCFJtXRT5fxsQFeSFtlOGcvJUasucXwmnI/TTodv/MrHuOEJL3fe9GEysHQTzZTMoEpCFproxpjDE7Evo67NnVsfc/fz780zK8tfafFqfML3vi/WBSWkXl3DKZ2T+ELfW/nrnA7+iMQLcBzhHGu6g+9PMSW810pzlKsarnvE8YEw/re3v8biksH52Sm6JZzH5zv3mU47HB6J91tbWSbn1ChUE2JFZLgfP46pVutU6uV5puzBk0c4xSKnfUGygbpNRkBjoUwQi/v38LDPwd6IzY2MqSf00uvdFyzW81y9JJ/z4BNWVy9j2kUSaa/5QZGPP/6IKOtw0t8TsjCJKFYajEbiDM1mAWvLm9Rqdb54IjKW1eoycTLFnVrckrBKz5vx0ZdgOBQy/eDRTyFa4s7tj8kQdu1CUuHe509or4g53v3sPuOuzYdfXWNflkKE6YDF+iWy5ISD1xKlVbNYXm2wtyPO5+HxCwI3xVMyFFXolu7nCZXaMraj8+qlcJJatQb1+gK7e2I/Xc9m3E8pF4sE0m6vNzW8gc90lLG1LVBv40kPVclTqkhUmj5h4B+iuSn5nLBhX+/sUcwtMBz6c4Key+sOSlLg8T0RtPjgvTsUGmXu331JWZK57B/2yeVyJOoETer0MP7/pqx4S2jxdrwdb8fb8Xa8HW/H2/F2vB1vx9vxb2D8mYAF1hZy2Ve+KzzLJEshLRKHJqW8iPR0Tg5YaDZ4uf8FBiJamM/ZuF7CUEY27YJCtVagezF60/4HJYaiY+NNAgqOhPelKUvtFr2B7DUUm2RpTBB42LIhYpRMMfUSGSGJjMzbeZW8YzPoCW++UCigGSq9wRDbljVXU/DCCYsLDQYS0pCaLlmQoWki6ugnM9Yal/DHY1JEhHTmuRgFH8VICWfC3y0WCuiZRiijcAvLyxwfH5MlKVeuiBRu6PXJlJA0NQm6IrJ683aTn959QbUhKUj7NuedEYE6msO8bm5+gFX1iPti711TxQ2G6KqBY0tK14shS4vrjMdDPE+8S6NexsAkDUVGQc1ssiRAVRRqKyKrsnf4ksmgT2OxhSVp8nNqnURzGMm9un65ynQcs7Kywv6hgM00WysMZyO6J8cUZQQ4dAMCXrK2/RWxdkFCs7HIycVnTGUEfLm8zMUwIdfIkWYiipIFIy52LGpVkUkqNy2cqoKGRxCJ9xsMp3zwwS36vYBnzwS++axzQLtdZG9XRNPiKCOIAyyzOK8fMUwFw7CYTS9YaokMycqaxdLSIlEgBG9w4ZIqKaOhz/OnIrq1vLzC/u45Tk5nIhugep5HvdYkVcS+OJZJ3mniRT2msuGy4zgsr9m8eNahvSqicLbhkEYmzbo4H8PRCfmCzkV3MM9ErF8ucXHSI5+rcFUSYdz97CHTyQxdZkwLxTqqBsFwgi57oKkGjGc+WZZx6bJ4v8/uPgFUkkR8plrXODo44csff0Qqc+tRCL7vsbq8xM4rsaeh6VMsFvHOY7nHDV6ePKVSrLFcfpPpDPGDCWWJF4mjFMuB84vDeYS/VtsmCrtMvSll2TfEn/k4WYtA9iOJ8OmOBqgqqMimvpnPZz94xbWb6+QltBU1o1pvkM8JOMNme4txoHBwOkAmORh7EyxNZffpCwp5KcO5AqqSsbEh6h2CKGQyntLrdrl+VWDMG02H6XQKqLjyzHieTxgl88zcm4i2omgCIghMXY9CoUAo6c/Fvyvzuqk3sqLrOmEoovUgIHhBEODO/Hm9n6FbNGoVCkVx/gPPZzyeoKCh6HKtoojxcIRhGPNMjqobuK6LIuNtqmri+z62aaAbkpjCNAgCMQ9HUsT7gctsGqBIBhbxvBTP8+bvYmgmru8xnbqUJfQql7OJwhnXboj1fP/D62h6ijeDp08E9v7lziGuF+H7oUhVAIqiYxo26OL8q2iksUOGhx/KejyjjKaEpHGGLslbLEsjI5mvealQxvd9siwhljKsUiSIfIJEnE+SDC1V0bU8sSTxiVMw4wzD0OaQSkXR5plAAEOFQtFG1Sxmsoa21VqiPxgxGPWpSoKX0WiEqioUZN+iQjnFzGnEiU/sypqWwCNfaHAxGaPKnnJW3kZV9TlEvbVYJ0tU3Ml43jNr99UrWrVt/u7f+6/ZPRM1Jbv7Y54/OeHzn4sIbcksEMcxXjJmLPXbaBTh5EIsPaHVFnv88mWHXCFPwRLznE1SrIKJHySkMppuORGWcZmNRoGaLOJ/dfCM5kqBbl/ot25nTBiOaa+UaEnd2e96DEfnrK2s8+qlrLmYuJwdz7h5XUSkaytlHj98RBBfJZFw3iwKUdOURIVAnjXDzKGkE3odEQE3tRwX3Q7/6d/8a1TqIlOdtxWmyQv2Ho5QJeQ2V1zh1pV3yeSdPRh2Wa8v0KlZTC4ExLjs5GluKvyt//Yznj2W1NL5Akmiocjmo2QGGbGEAQqdF0aiTcZkHPBX/32RObp8p8vrzohZICLnH299yPbqHV53zvkb/97fB+B/+Wd/g9OTh0S+kLHV5feYpYc8f3bBlz96F4CnXzxEUxsU6wYH+wKyqcQVvvr1bzEdi/2cDvvoVonB5BWdYyHX79y6yuHRa5YWt7AtsVc99wcstd4nmIg9f/7iPpeuVXCHBULZ/y/MepRKbS7dWMPWxVp99tkTjk4v+PjrojasPzgXRBxhQs6R8jLVqNbh4f0vyEt92uu6fOObt+l0xPk4PDimqJdZXm3gBW9gZDe4f+8xxXyTpVVhvxmssLP7Y1RVwtjHKeVymVwhxrBlnflJhhucErk5Ll8WdXsnnQMUI6ZcEPrmnXfW2Xv9FHfi0pB9Hzcv3eLRw59h6XV82cz95GSHrfXr88bfL18+olHdwovP2Xks9v29j9t8/vnnfPXj75AoQvY+/+yE1bVFSiXxvYvuGaPRiEa9ycwTsnh2MsRxHHJ2k1JF6M+zi9c4+iUmrrAbtreu4gcTBl1YXhbZrIc7n9IorqMqFvW2hJZmHmf9E8YX4p5rtQySWZtU785bI2xt3WLn1T0ub79HQaKIDg8PGU1nkAiUUXu5Shgm+NOUQkncF/fvHtFc0eb6dToJuLT2de68X+Wzz0Sj8TAJubx2B001mPlncu1OuHnjDqedPQAaC2Xuf/FzlteuoutCFu/+7ICVlTaNZm6OmrL1HGqSoy6hyqE3oFIqMJwNcHRZT92d0WiskCuvECPWczKecXDyAt0Qn6kt1Hl1+AmGcpX3PhAQ585uB282plJd49KSILyyzHOmU4X2grBJBkGfTq9H7zyk0nhjLw5ZXGxz3jkjDt/UErv8l//F7/3ZhgXGEURTsble5KKqCUuNFmeSoMCxSgSzIcvNS3ROJc99e5OjY49JIIR5bbvGyeEUb6ZSrwoBU3WbViVHqCnYhoRUxB5K6qEr4rJOVJ9coYAdaHMojW5pxJlHmM6o1iX0wovpnI/ISzjawB1gqk1UY0gkYRDdsyntVp3pyCWTbDPRIE9RM1i8ImrK9s+P6B+foRoaE0+k7s28jaJbhKGGkxcQI2/Ww1Jtpq5sQnn6iiiK0UIDT5JVGBQZno/JzARVCn73aEAWWmgSTtTzOuRWNJrFNhd74lm5xTETr4IXCKEkX8P1RmS+gaYLhahmBpPhMYWyw5uaizjromDOe/Zsr90gjHucng158liknsv1RYrFbdJpwED2vrG3IxabSziSBfDFi2c0K2329w8py335+q98nX/4D/4h/rRHQZJV2MUSefM6qiX2MxpeYMVNvJ7Pex/9KgDF3DJPvvd/stXysKVXXaq9y8Y3tlldF4esfxxzcPIM057RHYjJB5nJRc8njSrkcmKeWhLRu4hwZOPfcTDGVIu4IzDk39zxlFE/ZHV9mTQUyu5g94TTwxRFQsYaLZPRaED3YowpHXZUF9PxUVkkJzu025ZKo2VzciyMj/pyiYyI2FVZastapnyZQqFIazlkKAleKhUNQ9GYTGWfsnaNNHPJlCqaJZ498vaoNRoYOviukJf337vOyekxpnyXyXSIaRWJ8yYTeRlrqkMlZ5PLaxzIXiLf+NJNDg97WM4bpjeobF6laKvMJDmH5ZTxozFHnVdsbAnIwdFgl5nXpyXZ9K5d3iQ1Ijpnx5x3xbPN4gbn3QPiTBg6+VyZztE5Tk72qQEODl+yslzFtAw6J9LxnaZk4TGlkpCf8WxCc7XJixfPWFkRBsJYEgBcv9qm2xWyfv36NYx8xMWpUJqvnz2gvLzJQrVEbyCZ8xQfUOn2zwEBwblx/RK93sm81qhcrXD9+nX+r3/2u5x2xDlW9SaFQg5UBUsGc/SphqYZc0exXKoSJilhGBKFYq+q1QpBEP6/YIGWZaFp2i+a+pbLxHGKrpt4MrAQ+AmKYlCrW8Sy2XDBybG4VMc0xFmvbLa5desOjx89RdGF/IxHI/q9IUEQkKbic7OpRxiGuLLeqVhM2d5uYdsWli3OVT7vkKUx0+l07pQVC5sMhlMGspl7lmXopk6xtDQnarFtm1LVZDL2ufupgC+Nhh6WWeThA6G/vZmK5084ODifNygOgwQ38MnlTZyCOA+uOyVWIZJOta2bxGGKomXzwIKipihKSopPJB2nYKJgmgayNI3hMCMIPBQFTFPIS6INUJSUkjQ4o3BIlKkkiocmnc6crjJLU0LZ1Fn8XoQfefN1MvQcXgDD4cW8/xBnJ+RyeWrVAq6sx9MTHYOMKJEsfL2UtAd2OYcvWf+alQYrK2u0jYgHd38q1iqLqVpltiqiXibVMtKkR22pzYKsd7i6nYe0zN/5u/8Dxz0BFSzVl2ksFPkP/rrQnR++8yEpQ37vez/l2TMh+2P/Kcd7RwSTIvFMQk0Dh/PpjD1JPmJYMZXQZHV5g2Ff1vqN4MY7Tex6xmefi2acej4iPJxhyWBZo27j5NsMzo/oyECfbtqkUUyuoPHueyJoeNE9p7mYsroqjJ/erMP65gaPnmaosm4wzRJUJQNUdEkMoesqURDMe0xFoUpGgGHFVCXJxo9++AfMpiHf+qVvECOMzp/8aIdXrw8pm8IG0dQc3WsuhWmR3kictXRxSm5YInZdsvRN/Z1OpgwhFbKZphmqpgK/aMYNzP/7DXmLrhd49rPHXHtTN5yd8uOf9Uij8fw7rx+PKDcXGc1ELfNJ9ylHrwK2t1d58rkIBnZnO2T+lO2r32WlKZ51997v8b0/+Od89cNvAXB0cMTS6har7etcvS6ev/NkhqHnOD7qsLUt7vul+gfsve5w46qQKc9X8Wc2/ckDagURnIu8MWcXBvZxnpuSKe/yjR5P9x7QlzVX01CQ5Cw3L3PeEfMcul0uzlqUzWUMQxjev/TNL3F40iOUtWnXr12DWMMwdM6Hwsn+4x9/j63193m1+wW9oTB8/9L/w96bxUpypXd+v9gzMnLf7r5XFWtjsUg22ZS61WotUC+jZeAHY2BDA48Bj+EHA36wYXsebMOAYfhxHgYDDASPZgxbGo8wkqClJbWkEVtiN5tsFtdibbfq7jfvvbkvkbFH+OGcm9U9cGv04AfBqAAIkomMuJHnfOecb/l///8vf43e7BPGPbG3/NTPXOFP/vgvKcUJx8fCNyRdZeeldczyMlkiIOqJ9oT9vXO++fUvi+8YU7K0wJUrNzFNcYY9ftxjZelNqrWIzkC8V+fUJ5cz5hDuRulNwsAlDArEiRjPUV9h2Et5unvIcCDOmVq1ReAFtKXmouePcOwKxYKNogp7Das1Aj+j6CwCIqA0tTVu3t7gW98SxDvNxpSFxQajyS6BL+yzUV9gOhiwvbNOLifec9jO8Md5Xr8rgoh33vk29WKetfXrINdHEAhdtKd738edChu+fvMVLLsosOTAcHJMGhvMph6tRbEev/LlVzm+eMzphUgGLKzZ7LefYDwuUizJPtOkwtF+nys3yrT3xLpSjZjvvPunvPWmIJPpT58RKgHT0ZRrO+I9f/GXNvjBe58S+UVeuf2GfFZMms4oyH7ZT+6d8PRxh43NFUayf7uYW2U0yDDNmDgT8zAbmJgUSNORnBefsvkFGgsFDvbFGV0sNKnUq5x1XDryzLL1lG7PZzgWCYpIr1ItrFC/fcrnkuFzMOwSBi4b69fwpp6cYxX4Fj/u+ltRuSpW7OzWFyXrmJdRckqkcTZvvF9dzaNqEbl8kfe/K5yynWt1DLOIXRKL5+nRD1hpXqFsm5yfiwU881KyuEylopHJA2zmRaj5Cb2ROLxMXWfme0xcH1tmOeu1RY5P7tNqXZkHXOP+iNZinlQRDopl54gTj+6JSr5wScno0z3XWV9poBuyqc4LePZ0wp0viCBt6p6wVL3O6VmbU8ni1NiymLgxulrBVCTLRTTEnY4pSyE1rQZpkOHECv2xMB6zYmFqEbXcNsUlsdkc7H/O5uYmAzkGfqLjlOuo3gQ38eUYaPhegFO8ZNwJIFYol8uMxmIBDUZTWk6V4nqFruxri6YuK6UcEpqOkznUWgYD7wIvkWwSoYXj5FCjAFU2gQca9KcXXF0TY2Bgk5HnsP2Ya7KqEo5zOOUpR+0LNJldjpWAcdfj9g1RgYoJmPVy5B2LzdsiCD15GjP2T8jUPsOO+Ht3X3mN3uCQmqTk3H8wpVr1aLZMPEnc8Mn9UxarK1SrJZ4dCpvqH/lUag4jKQYaeCoX50PKpQa1hrhvd/cU1zW4srXMSLLpmXEBtdijIPuIrHpEwWyy+7jHq2+Kd9dTh+OLpxhmmfYzEUh40YA3f3aNQV8EnalXpdedoKgRpbKwxZWVJcJsyng0QdeE0zc4G7C+XUCXKvWTyQg/mGGbBfJF2WytqkTuEhtbOu0j8Xs67R6GamOYwlZSzYPUJohdqg3ZsJt/idHQxXJcqjUxp+PxkO6Zxst3xeauJptcdI7o9Lo0lkTWbzJNRe+DkrBS2wTgnXfe4eq1TUwZsA8mFwzdkUjp6OLdk0AjVcbEkfj7tu3gewOSoDzPGq0s3GLcOSec5sgiYYuqk6OSzzOeCUd1qZmn20voji9oNEXARZLxO7/5Hv/Rf/wmicwA62oRRWFOcVyqltA0m1mgc9ERtu/5Y1zP4+nTA2RLEHfuvorjOHiSndB1J0SRS7Va4sqOyIaGkUq320XXzDn7WhCEZEAkm2CDIEA3LFRVnbP4pWmKphmid0o67JZlCYIJ7TkJhfi77rz6kmUZlmVRKhQxTPGZndcxdYU4Fs+ejD3+zjf/LqP+jD/41v8tx9jGNE0sy8KUAYGdtzBNY067XqmY5PN5HMehIqtNSRLRajWIw5DhUDjsCRbT6ZTpVDgogR/iej5REOJIVkPLskjTmNXVTf7qL4Xjfe/D+5RLFUJZLoyjFMMwsJ3cvIcmTVMSBNnH5WdZnLCwsECcXDIDanieN68aXY4LiB6xLH3OvgjMg6skismyDNM05wGtH0ToPzTecRxjmwZGzppXEdM0RTcFQYiiZvPvKYoy/1uj0RhNNShTLbgMAAAgAElEQVTk7PlcWZZFlGSoqkp8KXEQxoLMyHhOHlJ0HFQUFFn1T+KQnK6SRT6HHXFetJqL6LqBYYl3ajQrVMoFRqPe/D0H7XPOzjsUynlOJXWw7/usr2/ylZ8WTozrn/H+ex+TZGMaEulQrzQYdSKY5TiVDokOeDmTSk3sZe3uhFnoE/ojXtoQTq+j6MR+j4lWZu9YONW1apXV1RxPZD/Q6uIOnfMzSksLrLTE/j0cTFDUmMBN0SXRR6meoVvKnBksGKUEqs3pmU0hL2wxS2bEqUZEiCUrq2kaE02n9E6Fc5dpEUms8dZb60xGYo9d3Vjj4b1jqtUS29siEM05Ppo1o97YBMCwHab+ObFb4+UN0Xj/8N493vjyLf7Xf/z77J/LPkwL1NAi1X+IkjnLgHRuU6HnY1o6o9GIr/7CKwDs3NJZWd7EyYv99PDi23z2QZ9f/Xv/Of/gP/zfAPiv/qevk7Nd+n0xBk4RXnnlS9y792weRGxvb7KzXePkeMrOpuiL2n38jMFwl9UVmdDauksQHfPZg7M5a2xiDKCfp7TQ4qIrnHg3zPMTX7jLE9nncn42ZPOqhqXV6UpWuEJ+Cd1SGU0GLJWFc7y0ssijx+/S7Yr1v3zD4dmBRqts0JSVq+FIo1RNsZyM9z/4CwDKlWtcv3aVP/8TIaT82p07VOoVznf32bwlxuX47JCc3aSS5hlOxLtvXr/Kh59/xvKKOHd2P33Eq69ss/vsQ3yJHNm5fgdVLXB0sE9rSVT5ipqHPxsTyh66cXjG+WRCqVDniy+LOb6/26a5dAXHNphMhP+09+yIV1//Akcnoneq11Z4460bXHRP5yLea+srRG6RxcUKn3z0vpj3bMRsYvLaF6VY7+f3UDSX6cSaU/WftJ/y8pVvkOq7mJoI/jUroN+u8OaXhW3+6Z//GzTVplC2novHF7bxgxlecE69IdbRRWdIo3GNyBV7y9bSNRZWyszCiOGFuO908JDBdEzeDAVzILC99SqtZY1v/4kQ+n7p2h1ILYbDIQ3Za7tUX+Oo1+VsIKq4k+mQkTtmpaVQMoS0RZw2WFmpcXT4YN63q5gzCvmd+Voo2BWGw30SRaWoi8r1xoaD70eEyYT9Q0FacmvrS9QqJrmCGIOaY/Do/mec9U551hGVsp3Nr+E4LqcHJ9Qbwq+86J8RhjrLkpCs0zni9bu/jJ+O+PQzYefrq1to+XP2np3MidkUUhyrjGaI+bz/8RnbO1fRzQnHhyI4vv7SDkHgY2oGZcnuurv7iP/6v/y9H1u5etFz9eJ6cb24XlwvrhfXi+vF9eJ6cb24Xlz/H1x/K2CBSZygXUKqKnl63Sn+NGJ9XVBW9npd6i2DsRvRWBDRpjudcfX6Ol1ZxSmWM7oXPmmhhOuL1EA+X+SiMyGXWJRLEmZVbdAfWIw6IitXbWpkSYalW6SIaL7fO8axlvGmE0ZSW6boNNAMCKS+izuZYTtQLOUIL/VQrYRms8Bo3KdYkBlhG65da/GVnxeZiT/+3SmOvcbZ8SGlBRHRq3GZnKbgOA5HT6X+QFGnYNdwRPIHNbUZj/okjokqufenvouh+hg5D3Ukxs+myOF+D+uylwKV4WBM4ruYkqa7YBnYtk27K7JUTsFC103GsxFWTjynnOWIgE7vAlOTLIqKytloSqkuMphKZnI+GVEurzCTWRwvmKBpYOsWp21JK79QxCkXSKWelJ+MyRk5Nq6uciq1BbTMwcsSBm6HLJCsW4Rsr6wzk/1HtYUqq80qR3sX7H8uMgqlehXGddY2F7k/E3N1dHKMnqY8PRDPXlisUF9pcHjanpe/0ywmNVw++uiAlqRCbzSLtPsBcShyDplvE3hj1KqBJ7V3tnaWUGKVZ8d9rl4T89BpT3npVoXjPQkFdW1ytQA7n3Imoa2a4dFslYkDhUpLjMN6pc7Dj09oLYksI8oI0zRZaLVoLOjSfvIcHHZR1JRqQ/ZhmSWcSkgi8ZlhJyVv29QbDmOR3CaKEvr9J4y6DTrn4h1WVypcv36D41PxpTQt4vuQNw2GMvNYXIvZ2ikwdSM8CRFLkowbt5eQup8Uiy4LyzVuvvwGM1/M+3v3vkelauONLU7kvJu2ie9qTAKxRmfhFE3LMZ50aTaEDWnVDD9MCCclaZshFiZO08QbCzv4g3/9Lprhs7FewJM0z5sbV8mXZjh1MS/u1CRSBry8emVO063Iyt7JWcDaisiwOU4ZFIVAstv5fkqceqhajp6kcE6VlE6nw/Xr156zNg57tM/P0SVbWa1aptqooSkZp2eS6l7PYVoOlpmbV6UyNLwwwLqkAM/lUTXtR6oxaSqqCmmazu9zXZcoiuaVCEVRBORO159XZtKU2WxGFIRzu86yBE1T0WWlZjpx+fVf/98ZDsdMxpJZsbU0v9+UvXZxHGMY2ry3MEvUeXXNkhWSmeui6yqGYYhqN2DmLDzPmxOm6bro1RKCw5cMhhlZFhGl2bzXrd5YJiUDCXF0ijnCyJ9reQFYeZtcLieqfRKOqaEwGo1+aFyS+ZhcVv3iOJ5XAy/Th6ZpoijKfHyV+fvq8/vK5TKaqs7ZGC3LIAxDJpMJsVxrWZYRuRG2ncORUMVatYDj2Fz+scCv43kekR8RRXI+/SlJIioal7A10zTJ5XRUSVVumSqRFK32/VC+U5VMM5hMZixJ2QpNNSgUSriusPOjwxP6jkMcJoxG4v6t9Q3WNpqsbi7xivYmAB9++DHECr/2TwVD3GB0wYJdYHFtiUQX59PQL1IpL+As2Ki2yFyvb9RQoxp/9ucCArOzsS4q41HCW2+KasxkdMLv/84DKncXuLEg7KtmlRmNU4JEPKczSpj4MWG/zWwkESEjjziAxeUCBOKz4+OQ7SstlpviOQeTR8RpjFMoc8n5n6YZqqqjJuF8HZm5IjGXumlArOJYBRI/YTIU9vpodEx52SIMEs66ourdSGymozz+SPx/c2lC/yJmlLQ5Oxd+wunBMSdhB9225nBe03bwtQhdsmIqigJZRvZDKsKqKrTh0jTF0oTt/8ovfhMvnPLkQDAYPv4k440vvsZnDz6d35d3AtKojqKIOS7at/jedx/hVH0W12Vmvj/B0i1UTeHzh4J9LY48vvQTP8vpkVhHvcGYydgnSzTyEkGwffMa/9dv/BveaBQJp5dQxIDvf/yE5br4zuJCnVnf4vrLXyKvCGjU2D3hon2BZeehKHyXZxe7VBeKDMYC1jab6gxPznlt/Wd4vC8qHbl8AT0r4056jM/FXC00G4wvzrl2Tfh4YTih5WxjbqmcykrZ1tLP8/jp71CzXG61vghAyYq4/Qs/zdm+qHLc/MbPcffmTb79hxYzKVEzGu4S+x6vb22RSFmX46P7tDsFvv7VXwLg5Pwd1IMeK5vX+KuPBPTyrTdeZjrtEoclVAkVDvWAMHC5sbIJwLcPfoDnbZJECW++ISrAH3/8MbZ9wWcPZuSkb1bP2Vz7iVucnEuJGmzOzy9YWbg61xtdbiyiWAMe7+5hG8KmCoUKkfqILBWQPN2C2RhW11a43xZVvizRGfRmvPHm63MtsStbV+h0j7m4kPI6hkl/tEyx7uFLivFyqYVm5Yhcj9fubkn71Pn+9/4twUyMXRxq9IdH7GzcBCmyu9veEygyeTZogUvRhslggVjyDSzvuJy2R4zcC8JQoi/SHFN1H0324+byRbK+imnlmPr7AHz+UOXKzhu43oh8Ufj273z0exTU69QWTuU7jVCSmOOjM4oVMS4Hxx+y3FqmUs/jSvbq5lKeSmmLyUTYYrW6xO/+3m+wtnlVnPuAadkcnfXxZ1Uay2JfqtUNPvrgIxYXhB/YXCgQxB1SJWFRInM6nQl3X73K8eEzdp+JSvxs9tfXpv5WBFemYZBTxA8ldXn11R26F1McRxxCSepgqRX2Hh7w5puizOtPPLonXTBkyTHbxm6UeO2167z/A3FgH54+ol4v0GqsM5EwlqeHHxP4KjXZ8O17PpqukHMcZjNxnztQgAGG7lCTDYnFeo7xeCoPUXjjCzf4+MOH5IvgInUuFItCFWx1nW5bGGL7/JStKy0effz895aXz1nfaRDL4e93A8p1g0n/mHpNGGbgQTCboUu9mSDsY1lFOu6UUHoyBbVIisPe4JDtWByO5ZyJN/UYeJfCvxlBpmBZxtyJCFwfNWcSBOIAT9UUJQtxLAt88VuqxSpjT9BDF6Xi9kxPSROFUPZ3uH6ApqdkvRjjUuPFihmPRhRqG2xtiobko/ExlmMxkSX5wBuztVGn2VoknIoFlDcKnJ1d4OgmiSYOD03NMx2OWV4Uh+yor1B0LKIQFuvi2Z2jNivLJerNZarD98R7+h7hpMnWpmzWnXS4OD7ioj8gkfBFz4VPHz2gVdqiMxKL82zSw8lZc2z8yvICrpeysb3DWH7nk3tPWKotoagxF6fCplJNoX0UMVNFUIFbxRh51JY88tKZDOkymy7QOT5Dy4t5VzSdVtUkjAUcrehkZErESbtDQTbezvy+gIhFGqcXj+TcrDLpNbFsqcauD3BsE1OxWFsU45LiUXAM9vePWd+WfUO3rjMaDQgiYec5u0A+1+fkYEpRanaN+h5KquEUq9glCXcZ+Jh6ma67D4CRT8iCIt3uOU/2hWiibkZ4M4/jY4+d6yKQuVraIU4HXMj+ppWNLfqDNl5QZTQQYxUmKVYOlExq2sUVqk4LS1HpdMSh/uarW+xsrOJFfUp5qed2/ww7eIkkFuvsjasllopL/Kvf+wHOpYippMqu1DaYhZc9lgHlcpVqQ2yk4+kU27CI0oSShEGOhi6aZjCZDqhJaISVy9Narv4IJC+MErIMTEmDHkSib6rXlxEugvjGNHJzKFgcxaRKKIIDSUwRhAGaaaBhYFnPg50fDhjCMERRFEzTfE6ckBN9WUkYkZOQ5ul0hoKGql1CqutYOZ3llYV5QBEEgRBfjUTgABAnCVGUzoMYwxa6U6Zhkcpgzsrl5oGMZV+SXGjzAA0AVaWQFTAMY067nmUZSVLGfv4tpn5ERjJ3RNPII00T7IIzh2InSUIURUK4WL5XnIhA8vLZuq5L2GH6QwLIOfkq6hySctm/djl/OdshSQTBRVGeBZkcm9nsknBG9Gm1Ws05gUcQhWRRiO+Hc925UW9IFPbmEE7D1LAsg1xRoVKS2juZ2DeT+PmcJklCFAfEMmkxHrmAShylKNKmonRKEvtUqsXnFPxhxslph2JBOAyarjOdeuRyFsWySDb4iegbfPBwF9MUY9dqrrDYbHHlilifKS7jzpDeYMh5V+xBk+k5cbLLy7dvo0lYft5pctYeYNqyJ0mJsa0cYy/ie+8Jp/6i06e5tUndcfj8UJw9X/vPvsEf/dYfkcrAVDVDbt38Ah/ee5+LmVj/hUKL5XWNSsXg80+EU7SxsQFxxJPPxHdarSUspcT5+ZRSUfw+NBWUCF1V5vqUcWyRMCZJLwkmLHI5lWZLxZLCzc3aK6zvGDx79hQnJxzho5M9xqOA/lTs1XsXAxaWFgnTFDcW72Q1Mz4/fMCoV8eWUgHCrkzU9HlrRQYo/BBMEGH/qqoyGQi7/s6f/gXHJ2dYeXHfq3fXCMOEOJ3M7xlf6HztF77I/pFw7gajAabpoSgquiL2KQWXvYMzPG9KpSK+Vyxo/Pbv/Bn/4O//KgAPnvwJabxIFPm0e2KO9b0pd16+RufCp+fJPbJQZLN+lduvCNv4F//8t/jSV6/wwQffJovEutq+ViT2l2ktNvGkoPS0/4SzYQ6zJPaknZU3qMZL+L0Jy2Vx/u6PT0iSBE0rsbgu/LeccUrv7BOWN4UN19YKlBsVql6KciKSCK7fYzrpsXStzuEjQcpSPPmI95KUUize+82v/T3Gkx5HJw9pbQhoZJgaePGUdx/vU64JaJkxiyhWhnzw+NsAvHLtVaLxM778xausSc2u7u7HOGrMNJ/SmV32yFq47ohA6s7Vqg1crw1pjr09EQx704TBRcbKRondJyKgvHNrm739h5x3hH/T647Iopxc+2I8w9BnOpnRqLxMHAmbef0LN/no/ts8OxTnaq1eot/ps762RU8GTlEUUS5B96JHvXXZDvKQQtFGIjFJyagtRJy0B+QMYRsXw08JwzxXNjeIfEmAZOiUCk0qZbEn9fodUDL80OfpM0F8s7F0HasRM5TJ1mquxuLKNufdQ1JPPGe460NBiBeH8aV8QRnd9jiR+4GaHTPzpuTiIlvbop9yNB4wnF2wf3zCjZuCGGptfYVee0itJfbAjz465tbtBVJzka0N4d9Ua3keP3pKvbnG8ZHUviQvzk25/Gyrzhe/ZHF2dkJRilX3pk8IvBGxb82lVzrtDq+9+jrnUrqlXsuBajGetDF18Z3l1Rpv/8V3aTSqjAdSymJN+Bk/7vpbEVwlSYonmXOqTh5NcUizAYnMCN195af57vf/kMWNIkPZGD4Z9mgu+4DYDHKFlCy2+eDTe8xScVpVFsuYpslB+wRTCvZlVp56o0wqo3kl8EBRGAwvyFtisOxcRrGkoKoqw4kUJByOyeVUNtcFhnY29inmi2zuNNjbExtwmqosLBbQshzb25sA3Psgw7RSqnVhKK98pcxf/PHHBOSxTNmX4fmsL97h6PACzRROWKXs0ruYsL0lMd9PDsmMlIVFh0Q6ivmwjJdMmMQDvJkw4CBNUEwbR1bO4iSkVa7hTsfMJlKoVSkwGQ9Q5PgaionvxsQJFCVBgD9NhA5XHFIti43MHcWYdp7s0sE0dSzVxp8F9Loi0CjndRaqNbbXNhhJlih3OGV5sY6liApNdalMpQKzacDSonBezw4mbK68xMCt0ZcsWIE/wjJSKhWJwe70GLsBzaVVJjMx5k7VwM3gz959jzgTh+zG6iKxleGm4jsXk33ydpHKks1ILgzdL5HNxkTBlH5X2MLOSzuYepGjZ+LgCOOI9e0cwazD2bF4p7/7Sz/Ll3/yZf7RP/o17IJwuJprRZ486GMuit/bXMmTGXlQTDQ5x4Qq3d4JrdYSqirG09C6VJY3MBwxn0ftXXKOz4JWYSQrkUE8RdUM7EKdJLjUQCtydtFDJohZXlnk6YMDwjqsLQnHcHGxyGRq8JWv/CQDSdQwGk95un8+7+fojk8pFAqUyhVOT0RAUConnLRDauVNdEsGb1ZMsdpkMhDOyKA3wclndLqfz0kS1lauMXEvuH5bB1WM5/n4IeVqhdKCJJjwhkxdjfWVBgPpzCl5E3ca4XlirtRZEVc7RokzXn9VbLa2bbNSKhJHNSrLItNZy+3xpdfeQhuJLNI79/6Kf/35Idde/wprGzfF75XY71fvvIo3Exv+/sEu/fM+fiC14vIV0tRFUSPqDemAtZbZjrcxLRVf9i6BcNovg6RL0V7fDzHkf89mYq4areLciQ/DkCyO0dXnDpeia3NBRoBGo0EURfOqCkAcp2gaP1K5UhSF8Xg8J5PQdV1k7bOM/lDMsaZpZIlKmkoGUz1ENVRWVlZpNoTd9ft9wtDHsiyOj8Xh77ozVEWfM4NqmoGippimSSr7x3TdQdd1Zp47r55dihvHMmDQNJ1MEQmOS+x9LpdDMSPxG+PL3jqbJIEsk4yChkVGgjv15oyCYRiSStauqSuCaNMUvWCXz0mSBFVVsSxrPuaXgZeqqhSLRTk3MzEX8r0vK1lxHBPIYFXTBINjXvaKpWlKGAYMRkPOOxfzz/K5PKVSEd2SGeF6TghF688ZIUejCRkGbVnVVFWVKAhJs4hqVVYHlutoekYaS6ZORNDoTj2mvpi/yXiGrpsMBy4TqdUmerwyXFk1NnWVLFPQjfo8Q1upVpm6E5QfatkLo4SHj59y44Y4U/I5gyjbp1GoocgM+Oa1EkkUoZJyeiB6l/ae7FFd2OSNLwvnteJUePj5A2qtCpWqcISv39Y4PD6k09nHnYr7fv833ubiUZelovh9/dML1u+8ykLzS9z7gWgeH/RnRDObC4+5ntraeo6T42NMxDxsLN/kBw8OsCybVBLfqKqCohqEoYKh1eRnBgrWPHhV1QhVVdnfG1KW52GxkvDk2QGkPqWGPJOXrrN/cEKmJnJerjGdaAzGj7m+9VUAHjx4wGsvX+PjsEO/L3yQvGGQxSmpbLJXpW2p6vNqaJqmZJmCZVmcHoi19vBjn6XVKmdSz8nKKliFlMPD3flcjS5GHO09YiBFb/00YdBOaS43GI3EXO0/G1OuNmguF/Emwhbrtk1n9B5/8Ed/LGyzkiMMnzAbG2zcFGQVj47ew52FNM0lajUxdkuNNT77/AMS6TstL6/iuyZOISWQmmQm6+zsuHz28ENOLoQvsVVZRI/GaL4IpEa7GUZO5bOzj4lT0XtX5E1iQyFR+uSk6PSi8YzzTpt8X6yFsRajui57Bx2CSDzr9VfvsHu+yvmoTG1FjPH+3gmGUSaTLd5/+FffY6ecce/tP8O8IdZMo1ZAT0ysUpGC7CW8Xt/i/pM9hpLQKpxNeGnL5jt/9PuYsk/5B9/9gOu37uJnAY4t5rJ/3qXdy9i4I9bMxtYi4/GQNI4JZhIZcbDPrTt3CNwUyxZ788OnTyg4CximGF/FHpBkCqVWxljqb3ZGbWbTjNu3Xp+fyYeHx5y3O+hynE5OXRYXl+l2u7QWxNq2cgqDQcxskjDqSN23ZIUoatNYFOM5Hfc4Osn46P473L4t1m2ztc5pe5/BYMDmpvie541IE4PxTAbLI4uvfOWn+PzJh0xlperkYsiNJYdxO5bP0Th62qHaaDANRS+jGyvEk5g0rmFa4j3jSKE/GuHY4lwtOGWINWrlIt9/956ws5WrXLlpsXvS4+lTEazWnW0KtsFM6lzqVsZnDzvkrRztc7FmxsMma1stDo/aoMnKv10kSvtcdMR9eSvCMDMUtcjRgfBv8oURRpZDpUv3Qsx7tdIiDGZUKnJMZjHTccrVndscSgHtTqdDpWrR67rsbIpq/SXr44+7/r3BlaIoOeA7gCW//1tZlv2PiqL8OvDTMFe5/U+yLPtIESfcPwa+Cczk5/f++r+RokpqXT8JuRgcgh5Qa4nN9qPPPsC2c/Q6feKxWGRrS8tUCzGpZOrpjfbpDx4TBTkMS1IVRyXah2PCZEpzURihpZWYDiJi2f+skaCaEdXCAoHMsOVLORRDZTjuUpKHx8SdoGQ5Hj0Qg23nUpLQZO9Je565DQKHLEtIUpOOdGi3rpWwCxHHUiDx3fefMp35+K6OroiX+MIbVygUR6ztRBzuy4Z5z+Xm7R1SmQUoVDLi2EWLVQwZTe/vHVEpN8inDmMJ02kt15kOJ2SygV+zc5zudyELKchMZ6VUxgsDanmZAVMyIWAbhhRskWlNbY3pbMhsGnLgiU2yaTUZTkaokmyhVa7hjqZUawaahD35ox5B4HLWOWEsxXhLxRruNKSfijFI2CJWVYajgLuvisZYVelwfHLEyBvjeZIJLD9je+M1zs7EYmkPD0kNjfHZhJ2rYvPZPT+mlWtiWGPGE3Hf299/wPZqmRvbrwMwmI14ujvgpVsNhkOxyCyrgBLYjKIeCytStFit8L233yNfkrDE1GFjw+Hg/jk/85MCWvPVn3uFf/F//Ca3X25Sk5vd091nvP7la1iWyMp9/OgzNlaKuLOI0xMp5mpa3L6zhjsZ4/fkIaykBGmfyZl4b7vsMZsE5CsGkaQFHg0jyjWd824b0xSBxXicoBopF3KzM5Mq5VKGYczoSmawQT/i7KKH682olEX27tHjQ6I4o7UgDq8wmaKqJTw3T7V+yRp3wfXr1xn0x/OydzVf5/MHT2i0xJj7g4ju+RBFVzAkLOD07AnN+g5KlhEmwqm29RxeL8U2xDZj5Ys0rSrtzn1MTThOSqbRH7sUC+KdVpZWiGOXckmj3xeb+2KrSrvnYRoBuUQmQIpl/u2779HbF8xSev0KN37251jfKBMGkhVTVobK+RyZlDRYW1lnNArISYIbQ8+TpDFJ5s+DmzjKGA7HGGZGJiENuq4ym80uGcExTRNV1fF9n9kla1yqYJoamvmcQj1/uS6kw19zHAaDAY7jzJ1xPwywnTzTyew5AUJOOPqXpBD9fp+MBNvJz5/tejMMwyBToCATIAU7L4WMxd4ynbn4vo/vxZweCbsulgrYtoWuOSwvSTKOMKQ/HOMFwnEbTSZEUYQXhM8DvEuaUHheSUoVNM2EeRAUzMk7dAnT8YIIJYtRDZ1EBn3jSYCumfNqEwjIn+M48wqNYRhM3TG2bc8DLt+dCVp6ycaqSYjlZWUPLglCRAA7hwEqwsG9hJC5rotpmhiGMf+OgDJqz8kzMrEvFovFHwlyZ7MZgR8zkU3haQJOwcaSTJ22o1OuVlDUlJqErSqKwXTikSYqs+klZfSULMvw5ZjnbQtTV7DtPPmiGJdGvYyuq6RxhB9fimobTEbhc6ZH10VVFc7OznAccV70ekKWoFIpPa+UpT6lSonuUPy9frdHFHukCVSry/MxD4IxOctg55bYz0hSUqakifh7zcUyhfLLPNt9iqqLNXp+2kdTT7DNBa5VhUMbjQ9Z2KmwLRnoarmMd//sbdodH0kSR85yOD485c2fvMHjB2JvPnpa5OD4gtQTc965eAdPqZBvLhFKIpMsS4mDCaqqo0jm3ywB352iyhaDLFUolBxayzq6Jub0YnjM+vo6hqGTqSI43X12RBbnMSSMuN9LqNQ0qqMq9z8VLLhLyzUMTWUycslJqmdSD1U1CaX9JGRoKPPko5j4FNDQdZ3BSCY8wohpcMFkLMZJ0S+YPpsxeU4WSBBNuffhfQ4OhPOac5qkuk/vkceyJIaaBh+jRmOCo/ycrdPKb1MsXOPkTIxlbzSlXlvEt4Z88PE74r5xm8hUKa4WyQJhGy9fu8Gnjz7gvC/201QfMejepVZ3uL8vECE5bYOdG2UGnSmnhyKAXtRf4fWv3uDX/vlvAvDL32gQ2CHJYEY1FQyGY/M2VmgAACAASURBVG9AWVnk9vo2ZxcCzhedfsqC3eJLXxXsff/kWx/TasLe5z0WNsX4fvjgIUXnKkbLJFAjeV8fogKnsWQiVXr83rf/kllUYF0yOdulHDk3ZUUr4yWiajNL4dHTx7SqovLxV29/h3rRYhYqDCS19vZP/SIPDj8l6Pa4fVMk6Fy/yGJ9hWvbIjH86NER3c6A6Sjmyz8pWDdnV32G4zbLq4uopjhr4zSjfTLk9h1RoQmSHuWyjV0waB8LG75y9Ranh23C9CmWIhKJV7dfY+/ZQ8pFsR6rNxbp9Pc4a7fp98T6r1RKpJkCSsqnD8TclArL3Fzb5KNH35VmV6BZV1lf32Y8E/7bweExrUodVYVuX+wBU++Q8WxKqSgRGmaO/eMnTMYuS8vy3dMZ2WiFm1fEftALR9y4s0n/xGdnUfgk9x8/pFZUaR935nulqurkzEVKEio5m4zJ0oTBsM/6qjh3nMYhx0c9moUNEiluvljepFKf8rvf+tcALK/cQTU7ZGnATCKyatUCo1EeN5owG0pIZVH48JfQ9nqzwnQyZDSasCMrXqN+joJZZO2lgGdPhS/f655iF2FyyeKqlikWanS72rwtQNdMVpZbhEsxTl6cwe7e8wr2/9v1N6lcBcDPZlk2VRTFAP5KUZRL/sH/Jsuy3/p3vv8N4Kr854vAP5X/fnG9uF5cL64X14vrxfXienG9uF5cL67/317/3uAqEzgKqaqIIf/56/jbfwX4l/K+dxVFqSiKspRlWfvH3qFmDKTY4nK5glNOGfQDjk5EFsXJl3FHCqZdZmNHZLPWVhdoH4Q8OxRFMduuY9qwtLzK8anATk7ckPEkIEx9kguR0R8Oh5QqFnZRRJ8VSjSWanQHM4bnEkPrm1y7to0flNk/3AegUV1EM2fMxuKnOzkHNecShgaViuwJGrQJvDpHR4dcvymifsM0GA9Dbr10V773NQ7bHzN2+9imyDavbsecHMUcHfWoNGU0nFQ4Pe4RyTJ2cTWhVGpw8mSClpPNzgsJ+XyEkZVBQgXHgYepm/NGRyNWSfyMQsER4ptAmMxASfAlbKdSLBFFCTldpygx7X6YMBzH+GFGKpstGZ1j5M05BMkPEwx0kjTGsEV2Ip8ziZgyCVwSWTkIk5iJl6HbwowePPyIuy/fpVAo8N73BEXmzRtNdDNge22R3fuiwrWxeZUnT485PxW/ZefOKqE/RTE8njwT33H9KWvXl+lfzDA1SeE+TilaO5yeifvOTs9RkjyLxTschSKz43lTQtfgxt2XGciqRrt7xtZVjXxBZI2ULM/B4ee0mlvsPhZl8794+10yVWNxo8Y0FuOwfb1FlI1QfTG+eVthNj6n325SWxSfrS7l6A073Llzm/Gp7O3zE/wkYWFJ0sqPJjh2BXfiCUgmogFzNJ6RKTqu7FnTtBmObaNLIcWT86ekQUqhqKEb4jeXSwrlSo3e4IAwcqQtKpRq5rx6ZxccPG+IXQqJpbhywTAwCwrZOKNQFfPnxWeYZn5uU2qm0Fqq8fmDT1hcFBWn/nCCNz3m9u1rPHoslvrR7pCXtpdJRuI5Q/+cIHtGqbqIkRfjcnp0zsZGnVZLQAdG44DJOMPzptRlNj1KfOqLyzgVm7ysvi5XVpmsR8Q/ITKMimaQDkM63TFxLOGM2mUGOSUnaYhnYYCeg9QXNu0HE1mxUDFlpSW1IGebpGlMKrPimqYRx+G8ryaKAhRFoVIp8fzSSZJkTjwBzHuoLiFyk8mERqOF67pzoeEsy0jicA5VA+aEF52OgEv6vo/jOPPnXT5bURSm48n8vlkwQzMU1PhSaFhF1016/TEzSYBgGBqaruB57o9UiS5JFAByeRNDVUjTbN5zFfxQD9Xl77v8/8tx0S0L27CZzWZokqLatGx0csxmM4oFAb2I41joekmWFE1RSZOEMIjnfVKqqqKg4U49AgmTy5lCOFnPm/NxTVMBX7yESyqKMof4XVahCoXCnBTk8veKqhdc9kAIYedsPg6qqTL1PLqDwXzcL+F/+XyegtSPiaIQ1JizjlgfuUleyoikc82uSxtq1WvkWzk5LoJA5LIaE3ohSqpw1h6g6pe/zyUKXMolh1K9PH+WU7DJJImJaWXouknOcubvPukLvaGDgyPydvn5vBlnVOqXek4GSeiiqjpBIPZTVdExLR3fj0ikjk8chyiZxnFbQHnfee8ellkkjVRevSulQnIaH7074fYbt3Dk2p6cF2ktNTjtiP3AKq1hFmrcvLLM17/6JQAGkzZPn50RRhG/8h8I6NXTJ2e0Fr6MUxJ2kLcKvPPhKb4SoF3qU4YmlmkSxymxhPg7hYwkmcznkzRPq1VjeT3k8JmA8BSMTaqVJsenT+mNhcZaMb8h+t1UsUYtvUKzssqH39nl5dffEnOTzegNBoSRC6mkNEcnwEPj0u5UUrIfoWC+7PVL05REVjY126A/VLFKsoJ5HpIpUxqtKyC1tx7sn7O6ukxpQVQGdncfU1920PQKb78temHWrzRQEpPJeEomJQ0efHKMqli0FoXdH7fP2X0csHoj5kKeo0ZWQrUSeuMxli7O7Ud7B5StBiNJ9FMq1Xj8ZJeXlC1e2v4qAK5/zuFextbGTZZLklylEnFwesTipqjCfXfvt8mSjFX7OhslWdWYvc/hoxMWiosUJETUjd5ia7vE7/6F2JOazXVS00OtRdgSmZNNPc69FD8es/eZoELfbGyyeuUGybnYb67f3eZ8kvD66h2conj3hz/YJ6yZ9B8/Q5M6jONxk1mQ0p88h+k6TsY4jXC0RfmbcyzVt6jUbc67EtLYaKAVVD54T1TcZv6McnGZWhXOOpeQaoVmY4sk9RlIQpnxwKdaqaMg5uGNV77J7pM9vPGUWlX8Pt+dUq00adQ03ntH+rqOzfr6NkdH4vcaZpFyNUeWmpRL4qw9OnlIzioSBBlf/8Y3AZhOp0R+kVZDVIQ0wyaS5eE0Fmv0a1/9GqH3GQ8ePCJFVKq6PY9iWaNSE3tEFBjkHYVa/RU++1T0eK+sVNkbPcOUkOOlxXW6z7r0znvk8+LZwSDkpbtvoSjf57OPxH2thU02Fq/MobyaogjkQqZgS+I017WEAPykxNUtUemsVmL8WcLLV78OQEyKGxnEuKjy3OkPBzx5+jmvv/lFXKmd1m6fUSwr2EVJQjWYgCo07iyp7dkoNXHHHt3TMTtrQmDa45RHe59QKosqp67YLC01GQ+guSAqkUfHTyAu0lxwePhQtCKUS8Jn+XHX36jnShEnxQfAFeCfZFn2fUVR/gvgf1EU5X8A/gz477IsC4AV4OiHbj+Wn7X/nWf+Q+AfAth5g6tXxcDGqcKTJ2eUnCJI2MNCrYndCpgMXKp5YTznp22m3ghHNq2XazPC0KJ9vMf+rthILSNHwc7QaKAbwsG88foGg+Ep9Zow8PXmdaazkHbvANeVm7lt4c9m5PM6q6vCeWrWqlxcXFCT47lzpcVH9z6mXFpkY1N86M0mnB6ec/XGEnIumU0dmg2HR49E4NZqbKCFC5TzKpYpjG7v8ZAg1igVVziRDHdb2yaVkooXSpYs1SBMAhT9+bStLLd4+uwIzWpgm5dMcjp6ZuAUxbjU6lXcsUtn3EGREBxTLWKZNqk8iEXfgk6xWKbXE86cFyS4rkexUMOVGMpxPKBVKJGX/cL98YhyoYyWKExlT4s36pMZEUutGtNACkVaCppqMOlKgeI0oHcxwsgpsokb2qcaD++fUe2l+J44BB4+mlDKO7x0S/TV6aZFlkY4ukOnKw6qm8sv8e77n1FfylPXxQGztahz2rmPUxQBbqW0hF3TeXJwTqkgG4R7A3auLbO7+wDdugyYqwRR7TnDl3tK6FsEZsxY9qs1lpc5754SawGBL0kghj1yuRoTVyw6RUvpTT2sfJ6TQzF2UZCi6Qn33j9EVYWTsrbRIhs3OToR9028KdVqFV03CKXuE1qGZRbwvZQklbAgVOzcEt5UHBTnxyc0qiu4foAjoS0PHh2xc7VE3l7HkkF8ikupbM+1MU4vDrCsEjnLwM3EO/neiMNDi0F/zOqqGPe80SKMe/RGIpisV2rkczZkKooU2fUmCkubS3z8yX2WpB7O1//TX2LSOyeLxMGrajM6g3NSxWA0kdj+1SopCqiXDbZFdq7WCAOPlRXxnMWFFuViC7QIVX5vGGckqcdMiq6FkxFhHJHL5anUxdoO5TwurS9y6XCtJRmD8QhPYnB0w2A8nhL4KYH8fhQHRHFMmprEP8T8pWnW3FnWdZNMEdpMc52kWUA+nydOknlvVjRzSZJk7vg7jsN0OhMsfPKzwXCIrutzZjFgTuRweV8ul0NVVQzzeRNNkiQCylssz2GAuqoSBvFc0KneqOG7PkEQUGsI2xeseRF5x5k/bzabUSg5c5heGEu4sKnPNbs01cayrB/pDdO0jCTJCCXMUgQvOqpqSs0fMI0cmmphJBmGhIheBpJSKkoEQkkIpPNAtFwuzwOkS9F3BY1mvUUog6bLXrQfHjvTNEUfWBzPe9su4X9zoWFFkSLK6Tyodl2XXM6cwzo1XTy7VKzMg8kkyYjCmGEwfU5gYVgYukGpfAlBBNs2CQPtOcuhphHHMf3RcE6YEccxOdskZ4n1aWoGhYpFtVmi1Vr8kd9ycnhER0J+fT8gSdX5XCVpQsHJQIm4fkM0bpNGoMRUKiXOZFB0dnYuWCjlmLhegB/GGFqM5M8gS1Isy0LXdcYjcV+5UkTRNfyZZKQt1FAUnThO+P49oTuTxglL6y8xbD/mVEJ++8MOF1MXRZ5X3e4eltPi4rzDv/w/BfjFKapcfWmFwTjgj/9YkGPUyhUKTo5mvjEfJ8+HzFaIZJBtGTmi1AUyTNkGkEYpgQ+W7kgbjuhe9EijFFMRjmmQJXzy2ROKhRqjwT4AQz5HT8sc77lyPicMLp6iF/Q5TM6Phjj5GrVGnf3RZU9XiqnnSCXEUUWZE7RcBvFJlqEBqgpRIL4YRwJ+WhZxDTlzicZSEVV9DvldvVbg9OgZ2wURcF69vcZ4nKJqY0plsT5Sb5miozKctTF0YcMlp0izVZ+voVpxhX58gXuiQyqc49SuUDJVHn64x+ameM/fePivSKMcX3pLOJynJ8csrlZB71AsCIa22I4JwylLSzukpnjW+7tv0+l0CKfi3Ln71i/x4fv3iFsb9GSwur78BfLWUw77H/HzN74BwIP0c75/P6OyLn5z7CastIoMDkcMFWGf08k5w0EXq3Wb5VWRRBv2Y6p+xqMjMS/f/fQzfvXvf53O2QGfyQTo9be28PsxR6lOoSreKxylFKs5Hj0ROnsba6/S9+Ci2+Plq5sAmErA2f4ucbZISwrfP/r0KQuVCsWCmKzuQOH84oyNldfYuSb201zR4rN7h6zvGIwlRM00clx7aYfDI9FHl8V9KpUab3/nBzQlmZKipqRkjCcOP/c1EcT3u0Mm7pCS7DP3/YzxSOHa1Q3CROwbnz0ckrdbJGmf8444x5aXF/j04z1ay8LOFa0CkUK7/SE5S/YSF9Z4uPeMm1fXePfTB8IW4jytRZtOW/g3O1svMxidMHEPWVsTAZhuOCjeCM0Tger06SM2Kwlf/uYGv/H7gg1xcekKx0ePaNYabG2IfVhRdHL5jGJJ+H0PP+0xnQRsbFYJJYGHpbfIFJfGxhrrUtD6/HTMzA9Y3JCsmINTxm2dazevcXok/Mzj84ekqcXxweE8aegUVFS1jDuVPkF2TBynGEkNUvFZMZ9DMVUKZpX7n4u+z1uv3uD61TeZumJ/jYKI05Mz3FFesJgCgZ+iM2Q6KrCyLGKQp8/EGP64628UXGVZlgB3FUWpAL+tKMpt4L8HzgAT+GfAfwv8z3+T58ln/jN5H6VqLvMDYTiGWcIyMqJkwPrqpnhJPURXc3SjHmdS6NMw8hj5EclYZqmNPL7vo+gBa9vixwdTj8XFCrXy9rxX4bS9h9u1ieUEmEkbP+2hKQrNulgs9coOxbKKZaeMpDimkiZoaQlFOsad7hk3b90lZ5TY3haB4eOHR6ys2fTPE84zkdVotHKMhsxx2id75zSbTTK9wXgiIvzINwmSAcXqmIJkLOychizWKyQLskl7pjHrD6lWNXJNcZicHLv02yZr6yF2UZyO/fMh682XyCTuNYsj8nmTpeoq41OZVYlmpChEMrs9C2OyOENRZxQlO2IWRxTtPMOxhyOJG5KyQ65gUpFsScNum4gYf5KhKeLvlQoObjhj0JtQkNnlaeJi6jkM2T+i2RqHF3s0W5u4qTjQnuyfkZoKJ91jajnxvXb7Ardg4xfEJh2EY66sXSOLCkShyCS1J23uvNLkeH+GJ/tjjFyBbu+Eg7YY383lJgurNdqHXX7iNTFX778TcNT7hOpKi0xSh7q+ip23MUxhG2VqDDvP0FbG1PLS2dHyTN2YRmUBX/ZcDP0h0czFrIrNyHPP8TIFTU8oFoVD0h9HbKysEoQTXLlJtuIFZn4HDLGRMlRQkgrTYRdFHpam2qJUyzF1nzPQaWaZRPFwZbZ5bXsBA5sg0PEljX2sDvH9GisrTTRdbPjfe/cZEzdPVdp5pVIh72g8+GifVl0coDfeuMPu7kPWNossSjFejTonF20u232qtQKPHhzSbCzPHczWosNJ51PKThNLEfZSqTp0egqBKmx6aanFSq3Fhx98l2ZFZDXzlR0Ggz6FgtjE6o0inbMB5XKT4UDM+6BzQrU5Jok1LHnwpqaBYxjE0u5UXcVyKhhxjCKprQtyU0+JMaSTWyw4mEZAYUNkpDx/wsyt4ropo5HYpCezMX4YMJ0E/w97bxYrSXbe+f1ij8jIPW/m3W/durWyuoq9sZstktIMJVHSSLIWAwMb8FgYW4Ag2MD4wQYMA34y/GLYAy+wDcOA4RnNYo/lkWSNJIqkRA0lskk2l96qu6pru7fuljf3PWOP8MM5GdU90EieB8OCUfGWF3kjT5xz4pzzfd9/IZJVTUXRSOKMQOLzNc0QVZ0kygObgu2iqBpxEOSVjvliThzHuRHveDyV0t5xLtRSKBSkFPozUQRd14njOL+ProuqWJrFz7hhcSwO2BnkClRRimUU8iB0NJqwWMwol4tYUgM4CSOy7FlFBcCxNZIkyVXxlGhBEmdEcZTLumtahu/PRaAiA8pU0QnC4GMiG5ZsF7mRcRAEaFqIbihMZULAlEFbKr/j2i6FootFkgcNK6l5S3/GYQuDAAXyebdSCUySjGDlFYDgXBUKhRwfHwRBbhoMIpBacbNW6oSFQoH5fIkhjZX1VCdOExzHzKuhguOTSO6VeI8ty2KxSLDt1eFYI/B0ktTD8yTJXoqlTCbzfBzS1COJFSa+6BPbMJlNp8RRyv17Yv9QDZ1apUS1Ws7fkTTNmC88kNwi2yqQxDAcLDh68l05Dia2o7K1sZ5zIPb392k0CjjSGTsMMlTF5rx9Sl/uq93OCN/LhPiHvH9/NKbiltBkJJz6OuWKzsFeg/lcrNX1WhPPW5CyT02qy+40tyg6Lr6MPubeAku1qCRFlpJXa5om3//+MWkWcfuFF+XzxcxnA8YjuZ5bDqriEEthCBAHIDQwDCc3IM/UOYqSEUaenK8Khq5gawXKUlb+2o3rvP+97/Bwcg/NEkmmkX9MYnTRJEcoiGxeuGoQnPbRUtHnlzeaoC851AaEUoTF1TTiRQoyuEuyFCXL8iTman7quo5pWkSyqlE1L/GTP3eFf/qbvyc+ry1IYxXTeaanOeqmbDU/RSZ5jnGaEPlzFAz2tmXw6E8JlhmfefkW/Z5IfDlWgdnknF5frGVJalKvb1FwDRSZOFXVHrrSAKOJLpV5k1gDVN59WyT6er0eN67pjB0o3xBr0quvvsG3v/9VHh/2uLIj+uq1T7/EO/feZzQS682wd8Ev/dTf4vodjT/8hghkeoeH6IUCqgbvvi+SzIWiRq1mMekJ3sv3vjnh3/3VX6DXvctWWVY+ZzPWt2+QZQljKZN//eY252dtbraEefViv8Nbf/omr926w82fFxzr7771VZKozmu3X8+D42Uc8vkf/WladamqaLbo9trsb7SoSpuew6MZd167wzvvHnHnZSGz/oN3v8Npt8Xlgnj/nx6ek2Qh9x//kEtXfxqAwXDM53/iEk8eddi9JMUcggpxkpJlEjVSmDKZzCjVDBbBijupc+vWbTrdY5YLMdd399cZz0f5nL50eY/D48dMJiKBC+AWasz9EYWyhWWI3+uPzhlNBrSaIqHcWm/w7vc+wFtm/MzPiAD9v/0v/gP+9s/+lzS3H/OVb4ig70d/9NMcHv8AHRFMzqYDiq5Fr9eGWLRpe9tlj1PO74sz14/8+B1ufmHO//W7H1AxBJ8yjC8YdzLqfovNDckJXI6YzcZ0OlIPoNViOp6Qpi6DqRiXUrHOcg6WPWM0Fmvz0Wkf2zFzYbPz9jmus8FosESxxToVxzG7ew163XMaNbGXl0oW48lFjlbQnQzHKVG0LGKpgl2q1rk4PmZ/Z5d9mei79+BDavVNtnZEH0Q+XHT6XLneZDoVY7VcLllvrWG5Y9575wgAp/BJVdB/8fpXUgvMsmysKMqfAD+TZdl/Jf8cKIryvwL/kfx8Bux+7N925N/+pVeSwHwkFoMlc9bqDWwNpnLz0rUd5osLdi5vY1jicHP8pEtGmKsMhu0i1UqLzrnP7pZYNE3XYNCPGE0/Yiajbl1NuHx9N5ejvXvvEXvru2w0t3jQFqXYnQ2Xi+MxkZry0quCcPnowZRyYcz70rHZtT3Kqk57MuYD6ZOwuVYmNC1UTcM0xUtcKxcYj8dEifh9tTBjFkUU7CprGwL25C6gMzNpTy7wWAkgVNCNhF5HfL68d4Prr73BWfsxqSmKgJ99bY9OvUwQPeFcEv0Kbo1G1WUxfAbh6fiHRMUyFeluv+gMaGxWcFVx+OyPRFZlOVvm6kG1eoPJbErB9vE9kdWoVbZQ1YxDqRRUtIr43oDQmOGUhYpiOLNZK5sEWQzyYKP6GtPQzystiqXhuBUG7UMyR3yn6w1wzIww8UlsMS2v773A0dlTwkAEFsVilTc/eoQfB+xXxYugBhWO3nmCW16TmUwoaibLeUClKTYOtw79fkgEfOVNASMN4xnrG5fwFj6RrOBd3blCb9Jj4UlophmCU0Jjj/lCbEx6yaBQ1Tk9e8xaXWSJTLNEf9lHlXLtpuZS0E0yHQxXLJKXtXWO7j3k8u0dOidiMX98t0O56NKdiTnsaCWCMMZbRjlMT9Ez5t4TnLLFzZsiq/jk/iHd+RRPSuRayZJ4KuBXoS8I0AVnh9FsDmcJRib9G9YOMN2MQIplOJZKr9Pmi3/tFRZSUtWtmOhmzAu3bvPovlQCWnYpOFs0JFl+fHrKzRvXOR4OSRQxpkeH9ynVq1SsKmZZzKEv/+HXmI+HvPH65wA4e3DISfuc1vo2taZYgDvjIwo1B1VWJop2geLlIp4/py0z7qVilfliRr83IpNFE8sy0E2NWP7BtG2iJCMNMgoSlqTJSsY3//S91TkUXbPx/CQ/KGqGShQJEYaV/1CapujoLL05lowoVwHUqhJSKpnEcUySZJSl3Lbv+yhRjOO4hL5U3VN0LMdmKg8HhmFQr4mD9ipIKrhCfjxOEzS54GeKqFZ5MmCIQ1GlSqL4Y4GFnKaGQUG+a3EcE4Zh7sdVrpZY39hCURRWCazE0DFNkyzL8udRkwQ1TQmk4ISqCfU+XdVyqfkkSViFZLrMzPu+j20XchirCGB0HMfJK0krcYkgCHL52zSNKblODmNTdAHjS+IsF/+AFAG0SlDkADoyAFq12zTFOCi6lsOxdF38LQ4TEim0EUUR5VIpF3fQNQ1NVmgymWGP0gSn6OTjslwKwRB/6aGqKyXAhCSUEEWZMEvjFN14Js6RJDEpsdAySKTf2NxH1TQ03c79BtfWWkynU5IkysdO0zTMgpofEJIwwltOmU2HOeBM0xRUDVSpMBZHEWmaUiq5lCTc3bEsppMZ/d6ETKrCPvzoTMwhWWWsVErUaxV291rcuCkqXnuX6lSrVcIoIwxEP0wmM9rnfXwJM+pcDBkOPZ4ejciQwbE7QNNEpW9VDbUsE7do5VLwlmOTpksMzaZur/oqZGu3jIKWw3Lm8yUpHlNf7Jnh0EfTDDIyEqkMhpqhKRpZEqPId1lJMzF2MrFguyZ/69/6aZ4ePqYkE1h62OUnvnSd9sWcP/rj9wB49faX+PJXfouNbbF/zJdTvvyPQlprBap1cXS5eKLhFmzSmYGqreangaZauUqtrmqQpGLey/dBiVSiJEFTdDREG4ajNv/L//QBu1KxTVUjTo+nlErPKsKOpnPtWokL6QPZPg0xLJW9Sy1imYVvT9uslZoEsUeqrMRjppwfpezsCAK/6Qb0xiMWHTVPDBVw0G2H7Yafe/vFiwTTCim4Yl+9tVamfxGibfp847vfBGBjq8nJyYhEtZkOz2QfZ4wGEWEg5vTx+ILd7TOO/uQjKvJeYzVjseiSLRKO7os1/ZXPvEAYDHPLiNt3qnz5a1/FNjMmXXFvb+5gbpm8/84pGxJKm2Uekb8g2RTv7LW9bd577z2+8b17vBCJexWLLd566xHOls1JVyQg1yotvvKHX+O1zwoBLSMuszw9YzLsMZ6ICkSWthmN93n505/la18R9IGD/dc5PPohj5+IOXVl/wa7e5v0+30eHYrzYrFS5OTUZ2P7gCgWbe8cKiSpTyxtVj66N2N9q0EWl9jeFvvFfOqhmQaZYvP0SCRTOsOQ04sALxB7rR+XaDZazEY+r0pPuaPTh/jBHAUrR02USyUq5QKHR+Kc0u6MKbouvp/xu7/5z8TfLs7oxt+npb3BtSsiOF3Oz+ifd9naEXPz6HhEvVnk/r0hP/clEWAu0yHHT1Juboh96zvfv49eqLJ4EFGpiR2h2Nrk7KzHdJxwcFkEOwkp33vzgmpDl44bcAAAIABJREFUjJ1btDm4vknghaiJhEbOIwqORvv8EeOBOFe65YRhLyOSljGVQopjZphWRP9CJCjK7hoFW2c86WC5Yg8ZHx7w6dd2OTsRcvid7gnFeh3XKDOV+8WTR4cU3Qb92YwsE+NQLgboWsZMCqJFIbQaZcbhBXNPjF+QhpwPTpgcBnz2NXEOm3Y+pkDz51x/sQsWoChKU1asUBTFAb4E3FcUZVP+TQF+Cbgr/+V3gV9RxPUGMPkL+VbPr+fX8+v59fx6fj2/nl/Pr+fX8+v59f+D6/9J5WoT+PuSd6UC/0eWZb+nKMrXFUVpAgrwDvDr8vt/gJBhf4SQYv93/rIfSJMY1xZZqygeMh3PCVAp1KRXjD1AQ+PBw3tsbYiIO0qmZGpEEosI+KLToT/IKNeqdCVn52CnTjzXuDgLaEn/Hz+dMQsfEUaieuBaBo+PDtGtCy5dkjALA7xwSeQbdI5F9aPgekwuhly/Iopyy+GEh08/ot7axcpElnE0GdCP+lSrVZRMRPlPnx5iFxSm0iF+c7NBpdTi8XGXMFxlwGAejzDNErGENIV+RJYUKMgMWPvkKcN2n3INkLL1y0Wbje06lvEi/beFzKrrJmRqiCYJ/GkaY6oiCx/KSoe7XqLWqjAZiPts1Groqk651aI7EiTfQrlF5+KCal3DdkQbZssTwsyiuibJ817KtL+kdskAXfRv77RD4+o1tq9e4c23vyd+z84ouTUmQ5Fh2KrvEAYpYSFi0BdVqWaphZYmlCsldESfD5bnWE5C0RIVov54jGoqVGwdLxSVOtcpMfcyonhKQZq6lHfLFEoadWl62b4Yo7GATGM2l5K1usFwMKbkljAKso8vztAMDSUS80czIpotk97iOLcFCNMYzUopLFwWEs7Tm55jV2qMRnKMmyVQYTQJKEmhEbVis7ffpFxqomSCLGtZJrNJiGWJuX/8uI9eOGdzp0YUivmZJQXcUo0sXXD2WPyf6ugoCRiZmPvLEDAm2KbGxaEY04MbQlo0DGYUDPG3gCFmouNKDlbi62jZGv2BwsVQZHs4s4m9AoPBEqfYkOPnsl5tcN4VlayZFnH89BQvSlAdMRdKzhba3MZ3+jyV4jRrmy1Utco/+dOvA+C4BdQ45qU3XuOtb4rM4NqOSs38FJsNATNR1Jj2+SnFwhqvvfpZ2U863X4HlJCC9NBYzH3BqZnL9zryMAwLRYnwl5L/I+fDzPNJJTQpUUbouoYl4TATKduaJElesTBNk1iJhQiGsuKCGTnUDETFplwuE0XPYIFJkrBYLDAMI4fXFVxb8HakVPqq6gTPqmCqpuWcqpWMbBCF6AU3J/D7vo+qCLPzFcwrTVMcxyHwfZaSY2GaJo7j5BlN4YmksFh4eQUhiWO8OCAMwxxmpakqhgFxIr4T+D6aJjhDuUlyEKAokke04jfZ1ifELXzfJ8syDFlRAvD8kDgKP9EuXXcJwzCH6QF4YZBzz0BWwRyX0PNzqOdi4WEYRv4dRVFQDf0TkL/IF/exbJsoklUFXSdJ04+ZCIsKVZjE+VgZhkUcP/PCqtfrLBbPPL1W8yAJI4IgyMfh4/5Wq+8oioJpqTnkMM0CwiCm0WjkUJPeYEEaP4MTrkyOoyjK54aSZui6eN5YVhWdQhFdL2BLrlgcp6Rpynw+x/PE73mziMZaBUWNKUrrjK3dOq7r5hw6VdW4aHd5++13CXxR0RsOFriui2lqNNdFu3b2mqxv1NneFpUIy9bp9XoM+lN6XZG97XZGzGY+SRoiC2MoikFzbR2kqa5mLMlIcF2LVPqUzRcTSmUHQ7d4/31RCYjCjGKxQCKr0svQR9ELJAQ51FRR5RxUlfydChcLSNRcMl5RHN5660Msy+JDyX1JZmN0x8R2dP76T4mK+vZ2g89+7u/w9IlY3957/yE3btxCtSOePBXiEe8/OaZx4xYoCoa+4iXOMQ0wNFk1DgNMXSdJw9xHU1V1yX8LCWWl0cvmlNYyNEu0ezLrY5gFUmmkDnD92h7DXpdhV4pOlQsUKyXCeMGjx205XxTWayonDxPiWHyvUDS49bLLsC/uNe1mRHQpFvfYkIIEg/4cywTLcgl8MRfXmim2vUZrrSXn9JAkVKi34AePBITr7t0PiROHhTfkqC8g96WKzvbWAY2agHmfdR7x3Xe+Q5T6uQ+jZbm8+Y13qVVquKY4F7311gN2Nreor4vxHI67eEubSnNC90y08/LlHS5Ox0yn59y6Icaqf24wGaXMpgK+OJ/e4I3Xf5zv/uAbnJ2LPSxKZ2xslbDiEEcV77aOgpUteevrot2Vmk29XsNbDtEMgbTY3d7nD377j6lULqPKvWM+X+IvbCxLzKnB+Ajf95lOAurS/6u+bnH8ZMn1n9/knXdE5erhUZerN38m9xFsNbfwFgp2UWM4kCgmFcqlXabFOZ1zsbfHI4NStU6jKd7t8eCMSmmXs84x//Af/G8A3Lh5wGhyThw46HJtHo/GbG6t40u/0/FAxTJ10miD198Q1Tq7HNIdn5KV7vLmtwR8+I03PsdLr/wisSrORUrXRvWGfPGFfe5siLNZWDzg9w9nnKSin2ZJj//x74/ZvbTPcCbkFSrTq6BeJYsX1KXwzjf/9D1a65vY0iy7fd6lWnNxGw4FKYA2Go1YLI9YLGckkRSKiZo0WxmLhdwrEwdFDTGNBroqznSGGTLsTygWi6ipqEDd+FQRJdEoWGLiXdpZY2tzh0dH34FUQuIzlYQFd+92MKXmwbXrB2SZg65JCwlFweeYw8MuB1KCX68taa7t0TeG3PtArFPD02eV5j/vUj6+cfx/dZXqZvZjPyV0/k+6Uxw3Qc9C0kwM0lorQU3L9NtDMrnghpEiDjqSuF2tV2k0arQvjqnvi4HzuxOc0GKt3mAcivLeYHlOHC64dkkc5kgMZkFI6CsspfnhpSsNpmNhBHp8dgRApsfc2HwFHFlaNw0On5xQr1RpyPPBMivz6PwRUWBSlHA3056ymMeokuNgWR6WtkVvHuHLNqVKjOmo6FqN8VC8HCWngGNrrEn41M7GOh++/4hCJWThiY0wjAMiJtRrzdyfqtvtMuiO8GUgpSo2fjohzJas2yIwNCsmvV6PljQVnXRTbMOkUEkZL8SE8+IQlhmpElOuiAU3yRKSbJ6bXo5GPQy9gFpWSaRyXt2sYxglllrMbCaVgMololBlgeSrpCqkZRRjStEQ/ZIuTLxsCqbJUJI0J8GcqlNnVyq23H96F0VR0LEolqRiSLDETxJUQ0WPxCGnVnNwaxaTmYQuLSYslzNK1QIrqx7TcLEMm8lwQn3FlVou8NMMXRcHG9dSGQ0X6I6RK24VjAaWlXDWPccxBXxwGcyJtZRQCn9Uig6aajNbxGw2xGaSzTKq9YRMrzKYioVUiXwW85jpUrRze3eHKPYIvRHVioCMlItN0hT6g/OcQxSnEIce84UkT2s6sd9mvbyBrskg0JzRGw2oWiV2WwIi0h09pV7bpHsm5vlavcJ4NmWZLLHtFUdvyVp9nfHYp94U75FlaHSf9tCkk/3En5KFMXpapVwX8643OKTi1pnOlriq+F61vsbR2dNnkDzLJPRCmpUaTclBWtuq4/s+jikWNscsceXKZaI4pCs5ghdnHrptsFjMqNVqcvw0RqMBO9KLo9/p0+sM0LQCMWIc3n7vbR7ce8yv/PqvYciDd5ZlLL05voQcKYqSBzarg7frlvCXQoXOkAfYMAxRVTWHlYEQN8iyZyIJK1jXx4UodEMlDp8FYEJYQcCXcl7WfE6xWMQwjPxwLNZlNb93kiREUUSlUvmEQWkURcQfE5hYBYmrdT0MQ3RdJ03JgytFEV5OHw+KRGA3fyYKYek5xHAVNKhoxIngG31cDTFOwjyIWAlMhGGIZa64RWkOMfwX4XyrdqqqgMJpmpG3U8AWvTyIEc8Tk6Zpfp9VP644aas2WbqB53m5IIngZSV5u03TzD2yolU0kEqoqPZMVCQIAizL+YT3lSK/t2q7omu5mbGYPy6+72OY9rN5YBqCP2ZZ+e+ACGJjuW7Ytk0cx5TL5dz8OE1jvPmCxWKB761MkhdUqm4OxaxWirTW11hrVnIe2GLhMRxM6PcmLOay7amKH3iYpmhntVamXHSoVotYEoodhxHz+YLFPGAo96LF3KPfHxOF0n/QMdncbHJwdTPn6K4EQwzDYCT/r9sdsJiHzKYL2e6QMJbcN5ncMC2NQkGMRaMhkmjL5ZIsS9iX3Okf3v+I+dIgzKLc34xUIU5CslTDsKRf3OiC08f30CXnq9Gs8/kv3GbpL7hyRXBKa0Wb/tBHURM0KYBULbqcP+1QcUWft9arPHh4jzhckknIYRLZOEWDb3z7XU6k+ahTNon9CFWTHmikEhYIoYQ0pXGEkmVoippDYC9drnP7pU360ki9WlcEvNYN+O1/IGBsP/OLt8myjIJclxUtJCNk2AsplsS6PJ5eEAURjdoO1br00ws9UnWObtTk7/s0alXO2mfs7wrF4u3tGkG44PHDc6piu+D+h2M2NhtMFyJAqRb2KNcKnJ/N2F4XkP/59IIoW+ClAWtNceg0dYPZ1Gc4EGOs6SbFtYQHTw7ZkJx5K55yeHjIy7feAAlR3drYZtxf0NwQn58+GuMrAakyJArEPKg1bPwgpt15giFFSrwg4LOv3+apVAt+9TO3yZKI1uYGx6cC3ucHAUHg8Noru3znewJQ5ToF4kWGJ0W2qpsb9NszDvaaHHYENHIxirhz8xZF6zpnw3cBOOvfp2hf4fJ1ycv66AR/kbK+vplzmWfLHq31BoFn0OuJ/nv5pTdYTD3IZOK23aHeKmHaEceHIpC5en2bMJlw/DDBLcvESTHh4jxioyXOp93+h0ynY27cfAFPBsKaauIFY7LE5s4LInC6+/4DGnWbmRQIW6tdJYhGFKtLCoYYv4v+hyRxiV/6m6/yf/7vvyXaPiyxf3CZzT3xfO2zDgfbB/z46z+K7wlO4OHDDln13+ShFHD48OE3KDnbaFlKZyQSzGXVp7x5Cct00GQyt1SsM5wc0mmLwD9DoVyrYihWDicul13u33/IemsTPxLjUHI32dvboz88EvOuB62NEk8vjtAlv9HQUjpnc0rVbdak+a9ph3z4wWOqDalyvIxxzHUy/YzhSPRdubLJfJrgmAU0qax88nTERusFalLf4MHDp5RKDuVyHVMG2Uo6xtBMvGCZKyTqOPzq3/67P8iy7DP8Ode/Eufq/61LN1Q2t0REetzvYZlrlAtGjlueTXsEXhtVNYlCsRHefGGXJK4znIrIOQwjnh530C2XQU9E3JPzKdvlDU7OD/GkeENmhdgFkyAUi/taQ6HXb2O7BVxNEP90fchaq0T3/JjbnzqQbfBR1JizU3Ewtk2PnUvXCMYG7XORRdm4XCeIixRrGcfHK7Nai0LRxzLFJqS5N/GiNpk6QzGlOpFaJUthvuhRKYpFJA41RjMP1RSLQa/XIwsVaoUIVUocTxcpuzsHdC9SMnd1oG0y6sbIPY8oTjGULcJkyVFbkEe3lXU2t9Zp98SzpKYDVkYaPiPeB+cXWHaZMA2eqWLpBp22T22lQJlZ1OtFRlFAQYpeGKZBtVhkfHTE5YZUe7u0yTc++CE1WQELgoBZNGOzvkbiiz4InAn1Vp3H7xwTScJwve5QKxYYTURflotllsEc1zZZSi6KGmgYlkJCzEDyWkpumdPTMZlUdqu7ZaKgSL/TpdFoyGdxaV+cY5s2Z+2B/D+LYdil4oq5YasywK03CJeJ/D2d7tkEu14jDWTWqDMiMwJKFfG5qu/QbS8pNgYMpGFnydIZLlQGc5G9AVDVAok/xq2KPleSmMViQKIGjOS9u5Njyq7Fpf0dTCne8P7RIVcu7zB/ciEHQqM3DNCVKdWqeKWn0yXVeoXhWQ/HFItPkoZ0Oh00TRwYOoM2tl1lOVwweirm1M5WCTVLUS2HB8fSwLKY0djaZDCQyYBER9UtKmWbaVcE0KpeZppOqZW3cCyptpYa3Nq7gSsrgxe9DkbFYTia0I6kfGqvz8Zmk4UqA1o7ZPiDEdubG3xwX2SIatUGdqlJfzIm6K2U5BrYVpG3vv0dQCQ7atUKi9mCWl08y6/8ys/xn/LfYZkxs4U48Jm2QxyHeQVKHMxNFotFngWcjidYlkWaiswakHOUVrwswSnKiGNwHCnUoim5xPjq8L9c+EJ1TtoSxGkCWYZhaASBzEpXy8Lg1g9oVGv57wmRC1nBzDKm0ylkEMrqj2U6QhmQZwf9VFZn8gqNbeeKhqsAT1FiskyRwcyqkpZ+oqom+kP/xL1M0yJeLqUQxLNqVpZlFKRq6yrAdGw3b1OWiWDSMDUM65n5bxRFuaqZoqkULJckznIxDhEAmhQKLuOxxOO7LkEQfKJKtBKmWAVhYRgymc/yypt4ZsmnY6Xilgpim/LM4Nk2LSHkINtdKBSYzRZY1jNJ/TRNyeJEBhLib6qhyz4O876zLAtNVwhC8Xxh5Av+lgyeQcjPF4sFUintvVwucRyH2WyWP5/jWGxubmJZFpOZOHyMxzqeF7BcinvPpwuePD7DMCxse5XEM3BdF011qNfF+z8cDrFtPZdG7nbbHB/paEqGpq3UGGPcokWpVODGzX0AiiUH13Xz6u94POTstMfjRyecnrTzcTctHUi4fCASHqVygSvXmhSLgs9VLJbpdoaYpkVXyrP3ukMu2j2Wi4RHD0/kM9uoekhP8nrmiqjUqUmWB7mGrot3KExR5CHX80YY+rN3oVwus7mxh59E9EcimbKcRWSpimkbmLaYE/ceHPH7v/t15lKG/Nr1fa5fv0FR02h3ReXh+vXrxOmcWsPhWHKO1ayIqigkqSfbpKIqKppq5uOepBGmZhKHSs4vGo9D3n33IVkqxuH82GJtbY1KcR0Q66Jjq5yd9kirYl+djGcoWsxyCQc3RH82tjVOH88wLY0oFXvk/YeH7F+5yWlH9OVWc4v22QXT6Zx4XQSF84nK2+/ew7QDGk1xvrl6UCNMhqxZ4uCYhQ7+MiBTPA4u3wSgP1hyeu7jWnVUySj56KMHbG618nfvc5/7Im+9+/vsbq3he2Jv6Iym1BotOv0ZxCsV45DRos3TdwVHaDpO2N5tQbSBZYt2BkFAv9cjSX0MySF1HZO77x2TpNK8ujPl5PQJ9bOPGA7F/qQbFerVbf75nz3I+ZRPDj9grXKdfWnl8/jwhN3NV2huunzwUAQNlmWRZT7zqcdsJlEiWYtKQ+dQmjkX3XVcd0S3e4FbFu2cTKf0Bx2uXX6R61dEkWA0GaNrMbOhmJuvvfEKp2fHBGHC6elKnCrDrSboloIiOXMP7w35iR//Gyy9kfzGBmpco9dZYhdFH6ytl5menEGicHIixvm8fYyWvkC5Ib4z859Sb1SpVrdoNmXifQzdTo+TY49r10Wl8Su/+4SXX73J3XdlZRePOLT44l9rYDs/B0B59w/4jd/6zznY+gUAvvDjP8s///YfY4Yl6uuWnNMZy3afYtUXXiZAwanw+PERxYKYr698Zpfz9oxqsZHbLGWYtBpXUBSPghQys6yQb3/rm5RrYq5sr2/zznt32Tqo8affFgit/e0bxEnIbHBKsyaUJOPQpt4ok2ayMkiDcrVIt2/jL+U5M1wAU8ql9bxSpio6qjHHD8WeXSgmjCZndHtn1ApCm2F31yFcOhiWxumJmLOf/8Lr/EXXX8q5en49v55fz6/n1/Pr+fX8en49v55fz6/n119+/ZWABTbXa9kv/02hTBLbY5aLERYV7IqsZj3uUC7rvPyZF/mjr7wFQLFqEfgp9abIQB8fD0kYYRcqhDIDtVYvY+g6oT9ja1dEwd3ejGJVR5PQpVJB473Hp1y9UkedipKMbkcEnka17HD8WGQ+djcqVDdbxJk0gB1NGM98OoMnrFlSHSUp4DQmnB6P6Er5yUqtilWMcKX56WSUMpvN8OIO25fF71VLe/iLiFFvQFFWri46ExZBj9am6APbsoiXZTaapZy/cXx6j9qaSxhVcrjE44/uc/uF13jwUJTDl0FCEjnYNoSx6BdLT5jHCu2RrMKVbUzF4MrOQa6AoqQBfqRSqRksJYelvhnTbnvUKgKqmMRL4tjH0VUSRypLLRK2dvZZjqdca4ly9DsffJeBOmDdEdmSYSfBKBWIjZjxUGQwN/e2WI4SgosuWl0a6CoVojijWBZ9MhsPMS2VZRyQJiJD4/d9zGqKY5SI51I5p9lgPJsSyMyZqmnU3Baas2Q4lLj2zGBzvUGSJLkEdxT6FMpNPGnu6jgOsb+kXq5QkaXA4/YRXpLiz1WKthzjYg1/6WJIs+VSQeH0ZECxZjP2RH/WqiXS2MZLJlSrAtbhaC6DYZ9KQ3xOGJB4LoPugtqKJ7HT4OSsx3pzj5H0nVl4fa7fPuDsRGRQo2VA6CUYlkmtIbJp4/6CznjIzb3rqLHov17/nHJlk7WayNg4bspotuTh0za39wWX8fLuFd5+fJdI83N4n61bzL0h47F4lnq9SpgsMVKNG3tC/nYUTDg6f4+rW7doS4Wrkt0gjWEqjRudQoEwiSm4OkEks7808YMll/Y3ZJsyJpMpB3u3aZ+LDFEUe1hWDUVR6Ep1K9d2SJOEkydHANy6ep31tSb90wk3XxKZ8299901++3fe4vNf/AI7l8S7Nl9MhGeJfLb11ia+H+fS5yBMduM4JQxiMlVWhqTn3jMYmy7NQeOPwQndHAq7updlWTn8DKR/FVKxTlZVisUilm58wpxX0zSm02n+e4vFAk0T0CtF1/J7i2da5JnjlWHp6rOAWAkuz8rUNwgC4ijNvwuQkeSy5iDm/uoeqypcwSmKyk2WEQRS7lZXP1HdWsErRTVIzftC0wSU0A+ecdw+3gdRFAmonqLnEEPT1PF9n0iq4YGoQDmOgywg4i89CoXCJ5QPoyjCcRzCMMwrhitY4GpchNeXkJ5ftcGXlaC8OiK5XcvlMq/q+b6ProhndCWMDE1lNpvl9ymVXPFb0TPzaFEJTQVcUlZIi6UCSgah5OyuvM0s65kKmGmaqBLauJKDt2RleOXHF/rCryvN4rztURRgGBZpopKlkiMUhpTLZcKch6ai6gFZqrAmVWJLbonpdMpwMM7XxSwTMM+1hoCH1+oVtneaaHqML1XNLl26RBQldNrTPJve6XRFxUXOjVLZpVQqsrHR4tJlca/NzXVUTWG5XDIaivXl/KzHbBowlft4N9AIAwXDeFaJDH0B2ctSDVtsh5w//IDFcJxDv199/TXu3LwiuHwyK24okHgRqqKzGr4/++bXqdY3WV+X6nbjISWniqL4FKUS6EW7S7ms8+S4zf1H0g7GttCyhGRlVp6qmHrG0ptQLgmExGi4JPBnFIsJwUJ8r1HfZH2ziJYJpMX54RjXiXjp5Rf4x78j5Ms/e3uXzb0KI2kcf3rWo7m5SaG2YDCUPNOlQ7PR5PUf2eedtz8AYGt3D6MS8sFdwS168daLqImG7ag8PRaQNdd4gVQ5ZTRe5PNla3NfcNikMbWlaXSHp1z0jrlz545ot3uV7ughqVYgS6VlzGBIpdak05bQU0I2NqqM+gMMQ3rMaQ00DeqNEp70wwrjESgRiuSUNhp1ZpMBw1HAwZ7g2nrRKePRgpl/zEyavjfWrmMqcxxDrPHrm0W++4MvU6+tsZDoEscu0VircDHo0KyI8WvUKhwdn3L9xj4AZ8cPqLq3+fRL1zg9FFUpx1U5fPwE29wkldVQjAJWOWQhFXbrJYVJPySNIUzEHra3d0Dgp6TJElWu7U9P+/z8z/48w574TozHdLZgOu8QSa/2VmuNwPMx7CD3opxPM+rldSpVcZ774MP32ds94Nvf/w6bO3I/tteZjEMCv8vuruiHyfQUNTVwJWS01dpgPrDY2Tdon4v5Ol/2SaMin3ntDj/4oZhnW1tb7OwdcO+e6INKVaPfbXOwu86/9qV/G4CCW2YyfcqXf/9/AODtD8+JjDJ2VsCSsNWN/W3SpcLC7+R+eJf2W1x0e6hSrXhn32IyUYj9jCtXxVnwvffep98NuHZjnaKszPleTBKDG4v7dC4+pGDuUN9ucXIhKsn18hoXo/uUqk2eHotzbK26x5UbRd6Xcv/DUUxrS2E5LVAorXiSE5QMdMUik1Lz661tvHDC09NDAKqVTbJswHwWYZqSMhJOcK0Wm+truT1LEI35T/7O7/xLYYF/JYKrRsvJfvSvC3z1PE2wrAXrtX0kbYhRb850MscpZzRqYjLNhmU2tovYllghjw57uBWfk/NDlj1xmGtuFTEtB92ecmVfQNTaxymHJ4948TNicN1CyPd+0ObazXWyWCxk/U5KqeBAbBB4ohG3r11jHmdcSJjA7kaTe4/uswgXZHMhqa4y4+B6XUgPy5Kj5Wg8eHzOZCq9dyox7Ysu9TUX1xXY4qPHfa4c7DIa9ll40knerYOisbEnBrfX6ZDFRSolHWd1qJpOqNSqoJoMuuIgapomqa4zmoqNaumrKGFItpywe0XAHu/efx/DNtCLok3V8i6hH1AswGIsS92jCdvbG5RrsC1f4N7gKarm8OCeKGsf3LDwvYzpaEqiiU2v7NbQtZitzUucPhYLy9nFIdX9Ar0LKWhRWOf0SY/EmdLYEjCoxVxD0zNmkzmKLQ4k/ScKlw5ugCEgXaPuKbt7O0xDH9sWcIn5YE6xYnJyOObyjhjTiTdiOFtQk0HodLjAW8Zcvb6VH9z8pQqoLOcjDFNseoWCjZ4mK/VkYjJUUjZr+3Q6Aq5hlGKmfohuZBCId6fkljk/61GriQC6UdPJsoRUc5iPZDCnaISxRr3homti/ILFErAplsUPTkcBjVaJME7pj8QLXKs1IEmxTYPRUCwiN25e5nvvfw9HCn28/MI11Mzgu9+/R6EgIU6qSm8ec7DdYjYUh5SNrQqG4VJ0xMI9GI04bY/ANqgWwv3RAAAgAElEQVQWpRS6u0577IE2ZUsaLpezKkm85P1jsfiUqjWa9Qqz3hS9KIPArS3uvvMR1TWXonwnwyBmPpkxl1Aoxy2gqoKzkUgC9trGFqap0+uKd1ZVTZIkolj6mNDBLCIMY5ySwUD2gaJkqKkBUtLVzMoQqSwnbW7cFu9joib8w9/4M/7GL77BjkxkRFHAcLDAlrhp0ygSRxlRlOQS7sKXxiYKM1TjmeS4pj4TUjBNU/BsNPKD/2g4kYIBySdEJ1bcKBBBk6aK4CX3sFJUwjDE8zxcydVMkkSIkMsgZDqeoJsGqqqvvNVJZKCTpOKwKPpPBAOu9JSL41gc0FU1T8qs2h4EQS6gYds2cRg84xbJNlmGmcuQJ0mCHwS4rouirOTg4/w3QPDHVFWVHKrkY30Q5xBFEDC5VcAD5AIRoR9g289MfaMo+gTnasVpW8H7sizD0PSck7a6VtDMPCgKAyETLu+1gnkCeR8IXoyfG0UbhpEHp6txtywLDSkXL9e8FdwyN2AOfVzXpVy0cm8qXdeJokjAGiXfbwU5DeQhQlVVAeXTtLxtuq7jL+Y4jsNSmtyHYchiOcv7SUUBUvxgTiwji3KpSZJEJGlAKg2tTUv/BBTU8zwiT0BEy2VxAEpin7W1Gts7LQqu6IePPrrHjesv0em08/87O+2xmMc5Cdw0TRqNGuubbh7Eb29vUG9U6XUHst0RF+0+w+GEp0ftfIw1TaNSKXFwRazfGxtNNjbXUKVwxN/7p2+iKhZRFEIi+YaZj2bqxJGCaoj+PLv3CC2D2UJ8vvPpW1ze20BVFbzVvA5TMiNBSVQMmZB0HIdarYYqZd4NHSaTifBOi5+dj/Z3N/jWm9/n7feF30+9XmO5HJFJ/liWRmjo6JoNqrQFqcQsFh79dkAhE+O1tl7m1/69XybwBUw/mLp4iwtmU5X/+R9+BYDru9v44TiXlR8MIprNJus7KShi7SQtUaw5xHGKzN0SR1UC5QRJd0TPmkxGfRr1MomkRlSrwpzbsjV6UlDKMBUK9jqOXDf8+YwgzBjOTjEkZO2VT3+GRXjB+/eOuPWCgER1+zMUI2QwEUJYhpEwPA9olDbZ3xFJ52H/hCRxeeWzl7h7V1AoqvU6vYsutbJYh5M05unxQ9a3ynQ64h21SwPq1Qbn7QvmvlgrnVKFogqW5JgrGIymbRQ9RZECT6qpgOIxniyJZiIQ/ZHXX2Q8iykUpJjTcoiSWFz91GX6XdH2KChz41PbxInKwpcwYrPPRe+EJ0eSJ7WzS69/SqWwnb/vqB6KGuMPy7z62j4A/+yr/4SD3c/z2mcEZO3d936IW26i6COOjsS4r9UvUakYnF98RKkkOcj1fR7dO2OtIZ7PC3zKtTK2o/DDd0Vh4dKlfXzfoFiyCaU9gm1HnJ/OKUrLzM2NJmkCldIzWxDP89jYqtA+ianUxdjE3EONtrl6XSQ3nx6e0Khv8c47P+Dsqbj3r/76v86nbtyiVRLv5/c/+nt8/ds/xDBL2JIfO+7YXLm8zuOH57gF0fZEucAPQFPF/CkWCzh2jePjY/YvicR7uapxcXHBfJpw+2Vxnvng/SfMJzqODLzfe++rjPoJr7z+k1y+JmB6uunz8PFd9i9/muOnImmwvbvBZLyguS3mysn5ExbLKSgurqTNuLYLUZ1B74KdbVlM0VzuP/iAxoY4t4xHE7Y2mhw/7eFWpNR8eR3PnxPMMtbqcq/VNf7DX/vNv9rBVaXuZj/6RRFcDYMLDK1MFioUZBa+VLToDU6x1H3mUu3NUG0qlQpRIAag4Bp4vokfneP5IkgK/RlqZrOxlZGuBB5UhXqzQudcLFB3bm3ieR7ngzazSBzip12dL3xhnycfhdy4LkzS+l0Po/iUyVD0V7W6QRIHNKpXePRYvCy6PoZgiqrU0HSRmSvU4PDpY+zCytDUw1+YdKcjAsk3KjplotjHNMsEoVgMXMfFMEGeYXDddSqlLe7e+yoHeyLYmQ5jUExSVAIZGJbLFp3xFFUq7tVaNSadc6pUmEo/jm5vyOa+TRyKRez8rMenrt9guRzilqXp7VClVNap1zVMSx4wlxr9QYhbltU7f0KpvE6BAt2p+P1KtUh/9ITG2iX8qXjJtCAkCTwO5aZXMZZcWbtDEI45vDgCIFYNMBcMFy6NssxmLwXRP8vEi3F+dIJrKzQ2K3hS2GQxmaPEYNmNPNubhkuWSUYk09taELDWvASqlwsZhAsNMgvDTElZGUXrbDe38eU8aF902dxo4WhFyERfleo6nh/SnixwpRiHpaWcdc/Z2RbVn077ENUJWEQJVUPscvWNCoPhAtdxiCW2Nw5iyqUakrpBlFhE4ZLxYMz+nngfND1DUU2Oz84pSUEJzTLpDy741M1PAzDon1EqlTg/ndFsrCqkUzI9pVEw2KxclmM6ZH19i35PJAhGowk/9wu/zHtvfRdDZo0uRj1GUw8vCHGkCfRatcKwM2IqK3q1VoFGaY3O2ZJqS4xVwYkYXISkapGNqlgkS6UC/cFFvgmdnJ2yvbfNZDihJpnUnj+h2+2zfyD6bjqdEkY+vr+k2RT3yTIFSzcYjQYMJ/L9NyzSRKcmD4WkGUqaEiwzfKkkWakX+KPfu8vP/xuvsrcv+vPehw+oVlqMpcqgZbrUyuvCmyrPSIVYZgFNexZMicPxM6GIVTY+DP2PVbyMPKhYKdD5vo9lWXnwEwQBtmV8QiiiWW8QRRG2befVF8uyhEKg/E4QBEKgIY5JZGChmwZJmhIGAZo00PaWSyaTSW5aa8hAynGcnESs6zq1Wg3LsvJgYzwc4XkeirISwkjzKtTqmVfBSpplzGX/FQoFUU37WOVqFdg8E8xQgfQTB/skFUHT6nlXFTEle1bN8jyPLMtwXTcPgFb3CGT6V1efmR+v2hDHcX5oX3mQRVEEqpIHwr7v4zgOqqrm1SUhuBHlQa/neZimKYVGvLwPTEUR3D1ZUQrDMP9t8fsiCCiXy8+4XqngormuS1mKVSwWC5IkYSxJ6JqmEWep5HqJ9VRfFUU+xnMDyLJnc6zd7lAsOOgGOfJhMpmzXPh4XoSS+2NpuEWbckUcdgquhWMa1OstxgORjBuPxwRBRPu8h++LgMQyXXb2KtQb4l27c+cWpiUO5ytRjUePjrho9zg57jCbLmUfxxi6TbUmnnetWWZnd53tnVYuJkGm4Psx85nH0aE45Ha7A4aDab75xW6LJMnwvEXOb1zxnBQsskwEqccf3kdLVZYywLx15wU263Xxfq4CZmw8dUy55JJK83jTtOFjSRLXdimYBvP5nJIU7JlMhxQMi/ff+5B7D8R+XyhWII5QVqJMUYqmi/0kkJWW6TygtV7BdSOOH4sxdd0Sv/ALX+DkUKBLqsY2r9w54N69e/zj3xeckv/+7/7HdLp9jtriO3sHN+j2e7z17R+ia+LZX3npNQ6PPqRS2cGU5bvh5BzbVgl90eeb2xv0J09ZeifYhlgT6rVNgsBDUx1mS9GmxXKMomZsbe0DcPiozeZWnck04Kas9jy4e5+1jQILr8ynXhKH3De/9QPKpTqKJuZwrzulUi5yaaeZc0pHwxne0qa5VcwRA0kSUXZdMvkeDwdtvNmCy9d3eestwcepN5qAz0brEr2pSDqH2gXF5DpLOTft0pLZLEWzIEjFvuYvbFrrDXw/xJTr7nw6o1rc4fLVhny+x1za3SMMMnRjLOdiC0VL2Vi/xFQGvt3ujMHsPq4l9tCy28S2I06OZhxckYFa0MSwZxw/Puel20LVcLQ84eTJkJdeEWJSo/EFmqkxWXREYhZwzCaz2WOiJCEORWB46/qLeMFJfs5s1Pd5ePghB5euopkr7vIpaWZw+8V9zla8aHONi87DXFU18R3W1zfxFj7V+ioJ5GNqLdZaTo5CWV9vcvI4YHNPrBGds4DGuouXdrn7kah+Pr1/wk/95E/zYz8puHdXD15iOO3wJ3/2QzqykjQ6n/Hy7R8jiMa0u6JyvbZWRaXCdC64k1s7daLAYjwe0+uINeLFF18gjGdcnE+Ye+L9n81mLOcq+3uiGLBWSzl6ckixssWH9wU/7vJeg43NJklq5+iEze0i773dxXRl9dWNmM9nYC7wpWl5q/oqSTyn3xly45oYm3fe/TZOOUM1VsIbOpP5EPwqmirm50uv/BizScRyMiHOxFy86Ab8N//ZH/8VD65qVvbq6zLrYG2jKjZbWykTmfG2rIhCoYCibnB8Joz/ajWdgrHJo49E5qXREtK+42GAUpSGesMlrp5Rrfg4ctOxnTKZdkEwFy9GWS+gJRn3z47Yuik6G9/BtJ8ynxRxXTGhP/PFa9z71ozNPdHOQAvJlgHr5Rc47YjDXJg+RMlUut0ZMynTia5QrFTpdSXZUhpc9nozanUxKSJPkr4VNSfM65QwjTgnLX/qhTuEocnJ+btUHVGFi0OFzuARhmWjSLdof5pSrBYI52IyjeZTLu9u4SQOT0ai+qKmUxr1Jk+PRfawWingFgrouo5uif+bL6eUGi6u4zLqS/NffcLaWp1uR/TB9hUVy2yy3ajxzkcie3DRPuPq/hrjmcpsLBbSsmZRdyr0Y5HB7E1j0mDBRr3I2VhM1OkiRE10rJKaV90216tMewuqRZFVOXx6TMMuomsKviX619JiVMNhOZ3RKInDuJ9ldMcTMhl4t9YtqsVrzGbnTKRSn7dM2Vg7oNawWSzF2IwGPrVSC8OUsECjxmjawSooXHTFGBuqQbVcQ9EjLm+JTM6D+4foboAj5e+n44hZOGGRJjiKmGeVVkYQGyR+imXJ7HJhm0IRumfieaMoorVucefWC7z/jljYgsCnWKxQLreYTMQ4XMzO2aivYejiPu1hB8sqkPkhP/a5L8kxvcxv/NZ/zWfuXOX6poDuzWchk9GMYCE21PXWFi+8+BJf+9qfcDqUGUwrZrNU5PFHp2RFMc6NjSJ6FjGSsItwPEY3bHaubDPsiIWz7FQxLEdmmMWaMpkNiVGwXXEoW/oeuh7TKJVAfmc8nuK4dl4dGU0mlEoVTEvl9EzIILslFStrUavVmMhM5HwZUnBcFlIdzdAiprMRVsFAVeQBczjhvTfP+JV//yd5criSL9ao1ksMBqLSu9bY4v9m702eJMnuO7+P7+6x75H7UllrV/UGoEEsBECCoBlEipqhxNGMmUxmki4ySQfdpOscJDPpIjP9AxJ1kIYyiaQo0wxpHO7AEGw0Gt2N7q7uWrJyz4zI2CPcPXx3Hd7LKLSZqBMPONQ7VaVFuL9462/5/r7fRn2dPIfRSIyBaZrU6i2WfkxJRuGEE6CuyA8cx2E6HQsxY+kM/HxW5IYZTDgQ8UvGPVWFLF9lj0BAHKMo+kKGRlEEOcaNwXfDArjwPcoS6mJL6FsYBCsj/iYav4IlSga6YrG4YjBzHIcsFU6BIfueZQm+JKsAcU5pmrZyDm/6kCQJrud9QSD456nR4zhesSq+hPwJJr0bwgfRL2P12ZtxybIMx7JXfbdtG9u2CYJg5dyoqophGLiyQDkOI0k0kq++d0NkkSTJar5u2ARvMl5ZJpyfIAjI0xsj2yRN85UDWCqV8DxvJSZ8M66L8RjTdlYZxCwVfoCxgnQq6IqKbtn4cr3eZNxEhu6l4LJ45sv3B0FAlmUrwhVB6qFKBsZgtTbEeshXa0xTVNIsZn1dZALKFZu1tTV832UyEWeXuwjo94e4C8kI60cUC1AtV6hLJ2JtvcGtg21UVV2x2c2mLoPrGZ4Mjg0Hc5Z+RKlUodEQwZxGs0S9UWFnt7uCCqVpiuu6nJ0KA+zifMh0HDAZu6vxLFeKdLp1NjZbNFtS1LNSpNVq895PhOD7n793TKlYYT6fY0rEQpoticIEXbVJpXN1/snHmLrF1Bfn6W//039Eu1ql37/Cl0yHxGCbVRbzHvbNus4ywjzEKYhzytRswsCl5JQoVMX7vMClaBb59JNnPD8WBq2mO2gK5Ip09BWVNMnRdQtPMrslaYDv+1TKDdZ3xDxfvlD5T/+j/4CTJ/8SgLp2zd31CkmQ8l//z8Ko/89/rUJn64CFZLK0GkVyvcFiGQLinjk/G5JkEzLDY7P7bQCmi2PULCOWEiBRalJspURxD8cQDtHl1Rmz2Yxq02StJQJ0qq4w858ym0lhc0fDsVrMFgvqFTFO/ZMlj97c4/y6x8aayDyUqi7HT8dsbYs7enqtsbPX5PD4OctAzEOpUGa2HKAYKlEoGXZ9n2CaYqjifd1WiTSCzJ6SazeSETphMEYzHBK5b+NUJ/ZGtJrC2Wlv1nny7CmqXqPelZT8yYzTFz7r3TaWDBoaGJweHrG/L2y8wewK2yhQtRuokkArSK8pldbRrIhqWWaOvIzA16g2pTNnrTManVCwalxfC9ulUu7gWBoPDl4niGQWLKmj63NK0ia4Hp1yOTik2biNbop9PB646LmJaRXZPRDB+GFvgaYtGVyJPv3q977Oef85/fM+liUcQ7MYYTk6F70rDg/FHXl783s8eKPBZx99IMazs8tHP3uP+/fvUm+KsyRaFkiYiLNIloj0hx63Hzzk6Fysu0qlwv6tOlfHC0wJjbx16y5PPv6UIBTvX8RPub/1NoVGzE8+FPZ4rRqRxWusbzSZjl+y15qGQ6UqCZ9Mj5NDl7VNk+dHYm+vNd9m6algXK3Os+H4hPOzHrdeE3DUh/uP6F9c0xucYkpik069xNZGmyhxUE3xvh/94JD1ncaKVl4zYDiY0m5trdgY++MhhmoRzG3e+Ypgz3z69Cn+ckKtJdZ571LHrifM+j2aZXGejsdjDE1HU5ZUKiLrt4hy/tv/6vf/XufqFaHFq/aqvWqv2qv2qr1qr9qr9qq9aq/aP0D7hchc1epO/mu/JaFuixR/GaKZLjv74m+VusL5sct6t7PCjwb+kMvLMdtbwmk8Oz/HLihkiUOWS4rzQsSgH1Ar1ggnIvJpmw67dyoMJyKq0mo4rG91WcQvcGyRSlfje6DMmV/nNOoimpXYL5iOyqzviM8Mhpd02tt43oJNGS08O31GHNZZBH3Oe4LaMgktTKtORUK6nn5+yu7uPr3BEF3Ss89nS3Q1JUsy5lOpU9CuEgYZC5la39/fRcmWZEFEFokIjR+FlNsF5t6cVGqgFJw1vFkPuyWePRkuqOgGY29JRRYtB5mLrRbxJCSgWbMoGhYnJ2NqXfGZ0JtRL6vMCylGKCJJX/3ym/jTEYoUX5sqS7JBgatZn0pNpsiJ2G1t8MGzPuWKiKLv2S2mGVzJuiVdaTKbXqNqIyxdRLzG4xFe4NFsthlJkU3PnfLa5j6eJ3ULOGIysFAKKUYudXZUFbusEiUxjhTjTJKENFWYyWLgvdtbDK5jqjWHc0mgYbom1VqBku0wD8X7duobXM76LMcSklO2iZIAzShQlJHPWRzhqgFv79ymIJYih5fXZIWXkfo0W6JrNsPEpVQUEbA8jEmyBaphoiFqmebjC954sM/5sRjP7g4sFxm3HtyiYot5OD45RFUbGOqEUEJbuzsmJycnhKGEYmll8hwm0+GKan5jfYujq2fslCzSUMxNZhjsblYIR6Ljtcomaa4SZHNOX4jfvEyWNNYLuO413kD8Zs3KaG/s0T8X83cxOOL2/pfZv2XQPxMRoVLLIUk9OpVtzo9FbVZWMBmeXEMiondjY8HO7gFt2+RERtw67TukTAh98Vtm45hqrUSYDPCWIpNUK2+RphbecryiHi9XTQoFeyViWm9VMU2TcNnj4JbAudtmm//hv/ldfvOfPSCWmP1SscnMPaYgs5yaWuTqskexYq1qZ8rFDrVqG9/3qVbFXKVpipJrJDIhHcdLNEtEfINA1sIkIZVKAZWXlNjewscyK6iqpEH2QnQ9pVZqkKayRkjJsMwi/nIOEtZh6mXy9CV1OEAYxOTqS42pPJdZFjX/OShdKHWtXma8LEsQYdzAArMkJU9SHMsmlcK0aZKv6jsAVMNcQfFummVZqJpGoVBiNBJZ6EqpgmHoNCT5UMGxGI/HmKa+ok8XOl6VVTZOjJ8QMc5fvpIgDqQelxg70zRX8MKbeirX91aZGxDZrYJTEtmrlXCrSkpOFMer3yzeGX+hLitPxfdvxk7XTJbLJaWyFIqWNWlOwVrpTvm+vxJmTuX70lSQgdwQQOiaSaVSYe7OVs9eIQN0XVDqI7JZURStCCssQ0gCJGGy+p4fBiDn/IYKPcsylPxljdfNeGXkq4zifLqgUilRLTsUZQZ6Y2MNXVdXZwSoDK/HPHnyZFXzuFh4xFFKpVKh0xHR9Ga7QbtVWNWU1uplptMxea4wGoo5Pj/rMZ97DK+9le5Us1XDtnW2tgXSotOtoWkqxZLDTGpfnR9fEi6hP7hmMRPrTVcz7IpNJgVnZ0FOnmuEgYsp6zeCzCVIQxyjiD8XmaSrJycYxktimre+/AblikOpVKQoa0OjKCAnI4nTFUokWCbourEiabmREyDXyZWX+6harXJ6esrz5+JuvxHFzmUmKcsTLF0nywPyWN6H+QJdV1ksPCryLKl2I7719jeZP/shAC37Ck2pcna54A8/EOv1Wwc604lKKPfnoy9vU2nsEOQhe3ckxPnZc1qtBr2r0UvIqJLizmB7tyPX65xmu8TFYEBVaicm8Yz5fI47V7AktPTh67c5O71gGYn57HTWWMxA1yNuyWzP5cUhk/GCBwffYjw7FnOlm8zCAEXCNL/0zq/z0Y9/wGg6pi2h31PfJ8t8/IVP0RJj0Oufs71+b2WDzCYReZLy2ls7LJYSajZ2McwCXjjD88S47O7c4Xp4Rirp2lMf3rr3iOPLJ0SS5Oq6f0693sSqFrAkQuI6XuApc+qJuI832jvYzRqBe4aeSd0iVWMyuyTVXJybWrBkjhZ0acram4g54bzGP/7tf4sXz8UYfPrZu+zuNvBnZSpSCmW69LHUiMgV73vzS/v87LM/Z2N9H0tKdB49O6Naq3P39peJZH3q46cfsfDHvPXwt8RaqS/50z/+I8ySzd17bwNwdv4Cw4QoHZH4Yp298dZXUIwlP/trcfdalSFKVieMFmzvinlIIw3HVtGyGudDsWcqVZtuZxN3IVEw/XMO7m8SLOOV8G6t1mDpTXnzzTdFvw9PUJQcxypwfCzGYG2jziefPKVa3uBXv/slAP74T/6StbU16lVhy+zv7/L08MdkOBwdC+inmlforpcYj65XNbrNRoetzTV+9Ld/BsD25tfIzQvmE7h7S5wl87HP1laLMEo56ws7od1Z5/TimJaUALoejsnzCZq5IHbFWTLwjtjY2KBgrXFyJLJ13fo2rnfGTMq1hJGwJweDHm++9isAHDwwefr4BY3aDu2GeP7+Pfjq/X/+iw0LbDSc/BvfFDUXhq2hmQqa7a4u0PX1NrqhYDktDg8F5jIJUnSjymwhReGaVUzTwp25+KnY6Avfw9BKzAceqUx3l+0K27sVWl2x6BNCnjw9odLyUZA6As1N2u0y7WqX2Uhs4vPZB9y/f1ekMIFue5/L/icEizaVstSimE3IUDg++YxyVSzo0fSaeqvBdCyssoU7xfd9VEPHlw5RnpmgLkh8nVzqQIhCWI9UXhSFosp6Z53peIFjStYYq0yt0eWTxz9DlSrYtlYkyVKyTNY75AkF08DUmyAhFReDE7Q4oNEVB91wPgV8LG2HUkP0c9KfsVZq0Et63JW1RLqxIHDnVIqSVUmdU9NrpFnAbCwuy929HY7Px1wNL9joCmKBslPA910GUh+kVu2SxNDrn5Hn4sBvdmpcD8eMpyMcuSaLtRLBNCd3ZI0SRUwlZjrx6BRF38uFJmN3SMyUekdsIH85o1iucXos+lQsOUTLEdVmE9+TxrhaZ6qHKGlCVV68ncomWwc6z34moG6T+ZTMKpPlCsWiuAQGZwGJarNXKtNfCCd3bc0mDH3efCQOlf5hn0/PPyWu6KiSmatUKhDEU3S9Ru9MbOJvfukdPHfK1oFImx8dD1BVh7Vmjd5Q9L25obAYRyi6RiwNkkq5hRv0QMICq5UOWWYTxzGDgcA7K2pEc61OMXXIUrEWK+0i/vicR7uyn4Mei+SCgrHN3Bdwyb99/1P2H21RLlU5+lgY0K+/fsCTZ6eUyrLmqlvFd1XeeLCPIWGPT54/Jc8EeYota67yOKbY6OBPxIEfhDGR6sAso7Uhvtfr9chJWF/bA8BzffIsZT6fU5SQjvHIZX2jzXV/TEFCYssVh0KJn4Pbqli2jqGWV+Qja519fu93/4jf+U++iooY4/FwiW7GxKmYu0a9ievNWEZzKpJpyV0IuFpORF2u9TgOMUwFVzJQFpwy5UqLZRjQ60tIRamEbdUwtRLZDSsKGaqWY1vi/YqiESdL8jhfQaOKJYfpbISuvazN8n0flIyGDIjcMABmCqvPuK6LruvYBYc0vmGUSyQEL1m9vyChKVEo4WeaTqVUhp+D7kVhgrv0iSVELQgFiUIcxysjNEoFeUWt2lg5omkWYpo6RUfuoW6TjbUuYeSvYGztdlsShCQrrP91f8zCXa4cEj+QDgsRSSTWtdCz0leEGACaIVgab+qNkjjDsoRjbN6QZSwD/DDAdhySVPSzVCqt2PoA4kjALEsloVUEkEhSjBt/7+Y9N6QWN32ynJeEFyDqBIMgWDlgy+WSNMlRgKrUDQyCYFWbdVNnFobCwdSMlzVRlVIZf+GuHKbZbIZhGPz8PX0Dn7x5zs/rct3ACZeBh6FbLD2PpWQ6E78xolgUY1epVKhVC9TqFRoN4Ry3Wi2yLOPoxRmDgdj/h4cvSCID1xXOT7FYpNVq0WiW2dsX+/Hg9ha1eoEgiDiS7Gvj0YLpxGMgHbAoTAiCSMA9C+I+3t7cot3uEsY+sZwbx7KZLT0+/EzsY0WvkOWKqJeVWMw480jVDFtzuD4XdUnTixG6ZZLLetzf+Z3fwXXnuN6cG+6BIFjKYIe6+ly1VhzdEYcAACAASURBVCaKlisHTOxZhdkkXP1msc4cnj87YjS+cY5NNNUglro6WRJRrhTQtYShhNLXK2VAJUkybEOeQfNzvvXLb1Ipit9HqDIdNGiutflffk8It37rV79Op9Wk1RHfabZUskxlOOzhypKDKIxpd1qYenlFhJWkOuPRnGpFnBvrGy0+e/wJWmnJYi7ZdAs1ND1ltrim4Ih5N/QC5WqVqWSktWzwlxNmk5z7916XnxF1ocu5ThiLzwVJznAwYX1bQOSvLl+QZbDWWcOQdaCVepv5OCaKe2hSlHkZzNEVm7msN0TV6LY7+F6Caoj9f9m7wi6UCIKIjS2xzjTT4/OjT9Ek2cp65T7dps0P/+5dDiRh14ujZzx48y6zXkTJEPvv8/PPaW21UWLxvnp1gyBJmQyG6NJWMsyALLGwi2ViRYq5xw5RMGRTiiZv7W+gaRbTQcB3v/d1AP7sT/+UYFak1tS490A4vjpFnh89pm6LBEGxOiZWLGaTlNcfiL/95IOf8O1vf5e51+Nv/kKUAXzjV/f50Y8+5rV7AuZ5cXHC3QcmZ5cXZKlYC3Fc5dvfvcMf/F//OxVD1EFt7BjM/Rm6rP9bThVu3+pwejigVRe27sj7FKfQZT7XsRUZYG3tozoLNAm3mwwnGHmJZttCooAFLDlTaTSEzUWeYxpF6vU6T56KvaepJophU7QqVCTT8Seffc6jN7dYSAHfPC2hWTnXvRNSeZ4Hvkq5ljKe9MklsY/vJbz15kNadREg+Dd/+wGWXSBOfDrtmhyDa2ztgHe+fov/54//FQCt1ha2VWUqNcqurwKarSpPnzxesRFP5wuSPCL0izy4L2CyWSrqn10ZWBhNAva37/PVL3+PTleM+b/4vf8J21RxigpBJM5FJXP4z/7D//UXW0Q4R8Fbiq7Ui0XQRiR5QkkyqMyDHslcYzbuY0gmObtsE4Yey1gcdHbkMBjO2NioQSgO6bmXkrGgWMvYfyAcBG+yoDc5RZeY2mqtw/YtB7tgciKFfy8GnzKbt2n/0gaZLlbY3t4jrsdHaKlYYOPxGE0x2d2rrbDozHMWixGluo4uCzfXNiocvRize0ssCu9YodnqMhzNSHNhZCeJgqE10PSM198Um+Xw6AVW0aOkCAcFLSZLDZxCiVwe5uWyhj8bsLPe5lqy4HmTJZ29OrEvjCRvarDMQ7bXHY6uZN2QFbLWqDOXtTeb6zssgxmdmsNQRgG+9uYtnjw547U7tzn/XBiPB68XMIwUXTIolR2LZqPEp+9NufOaOFxPL0/JsWm0TcaeiEAltNCzCpXWjXCrzuhsyNbmASN5gWfBkoppUO40iabicJ1NXOJIIZXWjqoHxEqEmmS89UhgrnOlSvIiZTjzOT8VF9rerS7Fkompiw28mM0oNm2iRUaOOHyuvTn1epUg88gkOvbks1MODr7O2bmIqrSaayyNGWW1yURest32AYaT4fkuMoiK7/voJCxkND/MckIlxgxVkNHtZeyytr3PIuixtyU2bDSf8vZb9/jJx0INfrIYsLG2Ta1scnou1vX5xYBiwcBfgCWjoRWnxJMnV9Sa4jmNkopKznQ2xVKE4bS1eYcoDdBICDNJqazkqEqAKtmmgmRBc61DMHV5601xPjx689f56dOfcHH1nGZXjNVgMAM9pVKTdSeBRhIGuPOcjjTKJmMX151imyZfq4vPtRsmP5oe4d2wnA2XFNYs1tsleiOxFouVIsPhlItLse666xUUVSXN7VXtxvZugyyAB/fuMF2IA7DXP6YebxBL53VtrU28rDKZTCiXxbhkqhhD1xvhe8KQKZZaoERkcg37SxvbKqLpOSCMljz3IDVp1juEvqjNipKMZZisSAyCaIlVdPCjOal6IwZeYDl3UQqgKWKtG6aGU9CIQ2HIJVmAu/Ro1uq4Cyk+asH14IyD/Udk0ukL42uq5RZzKYxtWRbuQmRNbrJpBatAFEVkiYLviTPPtm20XKEqhakNQ8PzFkyn05XzkZHjuy66qhFkN0QNMXHy0tmqVsRa0xRVCB8j674UhfliuqpLCqOExXzJVSzGqd+b8OzJOc1mfUUmEQYLgvDqC86NqugUCgUMGfDp9wcii6OoKwdPUXV0KRJsGGI8sywjzWIWMtCgqipOsYCjOVjmS6fHShxMyyLL5SUeBKRJjvwpVCrCwPY8b+XI3Igdr0g30lQ4VslLwo6SU8APIizbwHfl/Gkm1XYNT56nqqJjF01Mw1hl+GzbXokd3ziGqqqKbKD+sg4sTiMyLWcpRWgrlRKJJPEolsR4+r5PLgkzQDhzhqbhuS6RrDEr1SzUPKTZKtBdEwZfuVxGVXRGI1mjdH7Jxfk1H334+UoQuVgsYtsma+stWtKQ2d37JqVydTWW0+mUF4fH9AanHL4QtbZ/9ZcFNNWk0SyzvSNqEtY3tmk2Ex69IdZUp9tiPp8zGo24lHfRxUWP999/ih8EKya3omOSmTl2SRg/ruuT5apkiUxXc6UoCrqWE0s0i6IrqCooN1kcVRFG+9rWqs4tiiLiyF/9DoDRaESWZbhyPnXDwDRNSmWDTlca9ZpBvdZmOrvm8ko4j+12F9edoWgvhbGzJEfVi5Qla6umWVimQ5qm6CWxZrfr+wRxkTASe7RSrXDr1j2WMosD8Nv/7q/w+ecfMpsJpMV8kuO5IhNrSSrATrtOr39GsVRnKgmlbu09ZLlUuOoLZ0vRAjrdbabLc4pFKR7dv2ZvfwsnsTAk06Gl1zD0mMCThFp2k0rJZnod8+wzERw7uL3Fi+efsvSTFbHI2m6bVsvk5FwwKG50yriBynw+WRE3DGeneJMmnbUUQwYSBlcJtZpNZ108ZzpZcHE5ZH2zxGwm9n+zW2O+8EhSKJTE3z7//JyGs053S2YBqx1G7jHFTg1FFfuhaLTxpyFJqtCPRKA0ST1u77zOj38sGPemQ8gzHys3WOZiPHfud+n3piRhQJiKuVJICbyMmSHmszoPWMxd1jpN3vuxqG/a37tPo9FkMr5gLIOiKS6+1qdqit/3wUcvKK9vs71bRJdn3nd+49f4l7/3F6C7zD25Fq83+KV3vsxyKTOWb7d5/PgTxv2Y9rpwmN98+w5PnlxRLx+QJWIfqeoGpg6nJ2Let9YbnBye8/DeHdJE3INJsk+OSrW2QAvE3j4/e0ao+Nw5ELV3WpqiphnTUYBuv8yGaprDxYW055KEuwcPOTs7Q5WkM45dJs1moPkMrsX31rsmL54+ZnQtDKVGa4qu2jSaHU5PhU1Zq7aZzc8wtBKpJFwzzJxnzy7wWlKSx8pZ36wyGmpkMpum6CWuhme8/zMfWY7LxeUJeWpjF2X99uI5jeZDbt/ZxpOB8Ht7D/DDOa4/WzE0xhHUmmUGU3GWheE6b7/2m+xsrfH+h38OQBSYNOoGU+/TlRTGbCECDH9f+4VwrpIkIdXFYI/mZYolm3pLx5cLTNUNGtU1En9KIuk2TaWFXbRZzMX3phOfUtEiWCbMrsUBQZxj2Q6LYEJfUoVaZoG9R/dWEc2LcxcUleOTAWvb8rIK5phFiydHT9EQC7pd10iNKbOJuIT2D4oMhwOyXGU0EYZFsVbDjxT8mU6tLem2Y5N7d3fIFXH41UsVLk49MHxKknZ5MAsp1MQC/tsfigLBZttA1W38VPwWJbZYeGN0I2drTURRRqMB5XKVUqHOYCmMx+5GizxVKUtu1qvzx9TbFcYzd0WOMTMymjloioi0htkcs5hxdnkC0vjR8jZB0Cf1VB7dEbSrQXJJqeDQKUpHajzk86eHPHr0AENSDj//8U/5/je/xdnAYRKJufEmIcX6HCO7oboc0W5WiCKPXJVsYWj4UUK93qDbFs9qBxHX/QuWS0n7rMwIlwm7u9u8/4k42KxSGc0sYVYcarbMFg6G2GaThiQ2yUopqa5hl9bxA2FY+NmcRBcZhaNTsTbI65wP+6zfF99zRz7VAmj+nHAgI97rCwy7yunjK/7t73xTrKHBkuH0nJ8eiegTjsZGp0lNqRAWxEV+fjpAVSwMrUOmC0ei2Ix5fnxIbyAOxHfe7jI4jtC3HBpFYcw5lQ6jxYxbrQK1mtR9Oxpze+8hgZyrPLUZT68olXR0OacqGp3GDvPxGb0TccEYpo1DnaMbIpNui9lsDGlKo7wu5yGmZg+hUeTWpoAB9PsL7ryh4V3KzG7rNmE05+piyJfeENG7ZqNDuaoym3rUA6lPNVdoNpoEmYyw1zxyY53IVDEbYn8EbkaqKZxcCqhNufEaearh+imhNIIMY4mVNbB0h0Relo2mQhxMcSyxZ0ksrk6vuH3/AQmSWUpGKQ9uvca1DBqoqo5hlyGXunCHhzSaNXzfp2BJUp1codVskucJuey7kkOWqCsIkKZDrydJXGRQZmGEtFotomSKYUgtsayMH0AqYayuN2Fv94Dh9TU1CZdwvQHFkkkQzlfwjFq9iK68JMu4gSBZpsl8ehMksVEUhcV8tHIIHNPCsHX6VyJ6V6mU0HSFRw8fstaVRDQbG0wnc6GnJGFyM5l9eSGN5eHQQ9d1KvUGCpK1rZSSJBlesFyhCtJMOH5NqQeSpime5+Oe9TAlC5fIfuWQZdRqYr5MSwONlyQXaoZT1PHceJV9uSHQcBxn5QB5nodjF7HlXJmmiapr5CkES7kfSFE1jTTPVk60adgkSrIap5s+maa5cpwURaFQKKwyQo5jMRtP0HUdR8I8syxCV8E2TIrSSIrjGF3TSSx58SuR0ARLopV2kqZpuK6LItkUAeqNBrPZjKIjmWSzjCRLMJvmCnKoKyrX/T6GbjCairOrVqthWQ6BDOq1OhXc+Qzr5zS7bijW59MlVxfHYr0YBoWCRbMl7oY7d+5QKho4jsPJich4z6YeSZxzfnbNxx8J6K6qaqBoNFvi/NnZ3WBn5xZvvV1Cl5pruq6S5zlnp0POTsX3njx5TBDEAnIKaLpCo1Gn223TkMXjd771NqblMPWnjCQ0cTpxWYQB17MbB0WTc5KvYLO5kqFkClkUEPuBXAs6SZahyML4i4sLkiT5AjNnlmUUbIs0TSlIOm/d1L7wGcFMGBD6Jqee2EdBuGRjw4XcRCJumUwmGIZBLO9oNQfPT0hjg7ffFmen7yX85P0fY9smjiHWZ6uqM50tMDKxhqfTAdEM7J+D5U4HF8wn5ytobq1ZplOrc3beW73/86MrbKNCxpjZXHz3ejhle+tNhuO/BiBKIi7PT3FqIvgDsHeryGBwTr28RUHemcPxBdO5R7cjUAfFYouz8zmOY3Hv3l0Aer0rDN3GqlosXGGwj64XLF0PT9pqftGmWKrTH09XRDiakVKoTgiDEjpizF9/q8XVZY+B1J2cz0KisEIn28ELxRmfxgsKxQKeH3ByIuwnp9AgwUeXQe7TTy9ZJhl1u4LkNWBzbZNit0HuufzgvccANMp1Tj9/QS4dqdpGgfFlSLFURsnFs48PL7nuedy5+4C92yK7ZFgKvasptnRonz87pVpzmM4qbG3KAJalEqczZvMeR8/EfO3uNlGTDgevPRLjaZnkxTI7+w3+7i/fA2B7f487D9d4fvSCSlUQhCzCM9zTNrdlFq7VrpLeLXJZuKC7KRy1H7/7hFKpwVfe+RrDidB0Ojs6w3U9NrpiPrWkS2M94GJwQlXKjtx/eIvnL864PktY3xJ3+Z3uNu/+8Anmhnj2+votnj47ouLscuuOCOyPxn0CL1ghH3Z371AoW1wPA2o1kRHq96+wdRtNM1A0eXZRhSyi0ZaICdund+rS2dhGctWxudag2sj54d+8t8r8P7h/j35vysVA2FP16hqKohKGSzYkffpP3uujqDnFSYmtLRGESSKN8eITbOMWAN3GPc4vn7G/c0Bqib1drMQkM5Vb621G1zKjr2jkSoDkjsHRi9SbEdfXTymXxLnfqHdYLn12tt/i3XcFXPHh/a8Bf8Xf114RWrxqr9qr9qq9aq/aq/aqvWqv2qv2qv0DtF+Imqtqzcq/+xvC+1TQGfR99m83V5or5AZpYgioiYz2fvzpExRNXSnEz2YzTNvBtAwSX0R2S1WHha/gRy6pjEAXpMpavyeiXTVV4cGb6+RZhfNLkfbUHY8ck3rHJsklXGnD4fx5iCGFDVuVLvVGmVbtAbokePj48ac011TyqEKrLjzsKIjo93s8eSKgZtOJz/b2JrZT48MPZEHdRpMgCEUNhCG8/GUY0F4vMJ2Ifs/6PhWnRHezxY2LHXkTKs0qz876VGsisrLwEjrN6qqWIY4y2u028yAhkPSwvr6k5DlsSBzxyeyCVBO8/tW68NSn45DvffchBa2LlkmxLSvF9XsomYhIO+U2F72MPAxX3P9Pzubc2uwSpw6fH4mo0U7DJFYbuEsR1VkuMnZ2NhiPe3gSimFbdcIoIifGkmldNXYY9fvcvy2iOs+eXFGoKRQLDQ6PXsixa2PYCgtvSSbx+AXLIlpmGFJjajq7prrRwtI7DPoCvqDmAZ16nfnUY397S45dgaeHP2X9jpjj6aJKuox58LBJOBTzcv/eNv/mz09pr1e4d1dESI6OLzg9eUEm0+iKHaIGGanlYEja9SxQKVWrhElEkklRXVPBd1066yJaaKsJ81lIda1OIGlsbc2mXNwhcF00U8KChmNU4yXZga7ZQhdHCUGRRf0oxHlMt1ZlNhN/a3bXsFWdUNZglGoGjqUSxSaG3GuWXmQe9Nndew3LFN/rX1+yTHOqplwvR8fs7W9xcPs+07mIeH/2+AjNivD9gGefHQPw8PZrnLseI08KXOsR8bzM63cOiBBj8NHxR6x3b3F1JvZZngc0Kh0MRWfpi6jmemcPS1EpVgwuLyey7w6eP0ZNavIzu1z1zrFtm9MLsdfeeecd/sf//v/kP/4vfpPZXDy/01nj+PgET9ZOmaZOuVxmsVishHGLToEwjCk55VXt0jIMKZerhJH4f06MomYEXoa3EGujVLXQjRzNMJjOxPvqtTago8po3nS2oFqoUCmt4cs+BNEIRdEoF1urPli2Rhrrq0yLoZl43pJKpbLStJrNZpimRbVe+wIN+Xg4WlHWdtttut0u9XoV46YMM88xTYNSqUSxJPZIGIakabx6zmCwZD5bMF24KwKGHJXJZIJmGi9JFHKRVbgh0FCUnCxPMDQdkIKvWUapVMH3/VXGq1QqoKoqtiShWXiuIKXIlJfinIisUBiGq7qim/qqm3ZDaZ/nOarMsMVJCIqCYZmQvayVAlbCu77vr7Subp7teUupQyWFVAOfaqmIbRk4jr16p2NZqKrKaDKVnwvwvOUKrjV3FyiKwnz+snYqiCOcQuELgtKWZZEkyarWKE4EkYVuGqvs3Ww2I4liyuXyF7S0wjBcCaIXHAfIKFg2S++m/iegVq+iadqKxj6JU4IgWRH9+H4AuUa5UqBccWSfNNqdGpVqEV3eda43x3cVfE+M4eB6wmzmslwuV9mJZrNBpVKi0XJWdWbNZh1FzVdrYzFf0utd0+9dMx6KMVj6LrbjUGuVVzCdjfVNBrMZnx8tZJ9ipgtXQgFfzr2maWT+jKPHInqvmpak2BfIh69+9avkqiIykpJFIEkS0viGtELW2mUxURSs9pqu6yiKQrHkrNaLoijEkagFvdF4m06nLBYLVFuKQo9iQi+GPKZck9nQtMk/+Sf/HpU6zEYCRvT483ep1zcYynORbEQyzykVa/zRH34IwHe+v0eWJVQkFXylrrJ0DcgtLs7FXWvZ0G62qLeqnJyIszJHiDmP5+I5aaiSLddorpnsbO8B8JP3f4yiKGiKzv6euMNUVchMrMm7aDZ18dwY3Q7odEQGYzIZMZ+7xIFJEAukTODHFCpVInlfmYUqkT+j2mgyGQg7rNyoQWLjL5YEofjN6xs1Bj0TpyIlHawho0lGtdzBsG8ouUciG5lGSBUJnGKGbTdo10Sf/PEQz8vo7lexJIX70WmfWR5wfHiFVhBzut0uYCY2YSbWhhf06Bba7Gw7IOngt9feIEmXPH12zDIRe2u5dHj41gbPDsWd0m1XGA77KHkZiYwkCw2RYYoVElmLtrPX5ZMPhnzna78NwCL+BKuik4Y1dveEbdiq13j8/BOeP5vxm/9IoIMOn/WYDC6xJIT7V371+yyjKc8Pn/HR478Ta6H0kM6WSRJPqBSEPdw7H0BuYMpatN5lj0qpSKn4Epxm6AUarSZnZxfUO+Jz/SMXNbK4eyDG0yhqaHqJdmeDwVBkEE/Pz6hWy6SJ6FOeQbVWEJqUUituMhmgWybV1hquJCRplC1mswVxJgZqMrukWnRI/IAokfXblX1a7SrHJy/oSA6AwF+S52BI+N3VxTntTp1hL4RM7Md7D+vMRha39nZIpRbVwhvw+PEntBoCWZWTkbHAUQ+491Bk9H764SfYZoEwmqNrol9JPqBcaXJxIp7z3e/8Y157o8zRi3Msee7/xZ//jIn3EfNJxp3bYqzOT3z+u3/+L36xa640XafXF5NUb5gYhZjDowGGFE2r1lVubX2J7e1d3vvJ3wBg2BGqlmJLIoc8c/BcHS9JMWUKcDAdoqtdvHlIXUIhlEwhDCeUJBa2UE7p95aYxksNG0uPGY8X+LHNdCwG/Oxoyd0727z1SEDBPvnwOVGY8eHHH6yM5VsHOzhOE7ue8dF7AqccRVPKJZWvvC3G/+OPjrk661Nrx3z31wTOdTINeP78mGKxyFyyGNabHebDiPlcHJpba21srQyJxnAoYGy3d3Y4PLtANXTyTBre8ZwwnaHbsuA8rnD0ogeVhJasmbFMg2yZMJqITVArlTD0IkapArq4iL/1/df5ux8dooUzXntT1ggoI9KsjD+5YfMqMp88x8tc1gpiPI1M5bOrC251Dlh3BMtQzDmmYXB+LAzOjfZtxrOIJNFfwqfsAkoeEKc+S19c2FYO9XabmS+MGNVQMCwdRU0xbLE2tNRmOujR7LSFMQXMZy6zuc9rr4v3L8IlnhsSzK+IpQit3bIJPJfFMsCpiwOqWqtxeqEjUR5UqkWM9SGBPebR2+JZ7/7JhxQjk3/67/z7/O7/JnRK7j2sUWzc55PHEtqGQ15TmS9nVIvi2UkOZ0+Puf/2HoORrIuw69haREGucxYFyvUZiR+gSzhqo9REXSaYjsloKC+0JKDo2CwkOYDn5Rh6AUVh5UA3GnXUvMh04tNuycPgtEelVKJQuNHecHH7IY9e/w4VCaEM6RFGGVfTPjVZ41EorHF9fMrcEBfMxt4WcRry45/8hM0NcVHsbu7QHz6hXHS4/Y7UgViG3K5ts3gs1qu3TLFVOLocM3Y/F2PVLHJ+NkSXcDvLzhmPr6kWG5RKAiaQ5zlBEuENhxSKch9jEHoZ9++K2rssSWl3qvhhj298Q+hXLOY3DKEaSUGcCcPrc3a3tgklHE1RFDTdwLErpBJnPxr3yFIVTdNXTpih20RJiOVIjaJUZzxyadfb1MoSWudPQTMYDFyaTTF/KRGmoTKdS6ZMwyLNXZbhlCyXekPBnHZrgywPV4X3p6dD6rXOSvujWq5RtC2iZUC7faMtVBNCt7aJrt5A0hRs8yXTX38wIMsyHj9+TBpLVjxdFwLFhr4yRFEyHNNaERssfJf5fIHnLZEyUDQaDTRT1EDdGPppHIk6l5XulU4QpDjl0sqRiqII1/cwdWNFAhFFCaVSBU3WpqpKxDIISNNwZeTe1ECVyy+dXCGYrK5qaLJc1AklSfKSiCJISbNM1DgpYkBLpdIXdK8Ei6JFHMerflarVTRN+YI2VrUqao1uRJlLpRKmIRykYlH03Q8jojBZORXz+YIsy1A1C88Ta/B6NCSKY6IoWr1P13VM08STjHSZkmEWDFGfJWuLSvUypmELx0LCcmaLObZdoNNZk+OUMhpey98jjJbpLGM66WMYBhsbYv932mVKJRvDlItMSej3JkzGU84uhNGrqSa9vsd0OqYgiS+q1TKtRomtbbGvO13xO01LZzIWBvRgMOLi6gWnFzqmrEG6EdNuNmV9TK1IsVTkl772JUxbro0g5qOPnqCYOr6sWfvwow/wkhgMEczJSSUBRb7SJMsVBU018MMlSJ0iQ9NZBO5qLS4WC5ZLD9u2SYvSEgYc28QwX8IzgyDD0EortkDPTdB1kyiJkL4xmqZSKttYBYfWWlP2a4Orqys0KTCvHaSCyU11VmNsGg7vf/A32IUym8090U+9SbXVYDATTlIeqxi2QpS/ZLZEG1Nw2miKOIMvT64JXJtut8XehhjPWlNnmZ1xPZygmWKdaXmJN97axpYw/R/+5U/xlIST4z7VivhenqmcnV/y8OF9NOlAL+YujXqH589F0HJzs8vR8YD1jSZX5+JcdMMFppWRKB5JJn5zvVFjEQWEEpJbbXZJvQxFcUhySWDVyQjnKoqeUkzEnNpWDbvYJ5aCz05xA89/iqoPKKhSk7RQJopDgiDgzoEIsH788Sfsbh9glMXc2arNPBkw9WyODoWTbepV1EpMp5WgmJJd0lWoVKtMZQ2bbVTpbDb58LNDwplYw7/8X/4zNvYSHrze4tNPhG30s48vuDwfUS7LGlRDJQhUyCOsghRuXzo8uP8lprNDTiWjrlUu8/3feg1vKhlvG1Wuei8Ilj5lCYk/fPKC3dtt7jzIObsQgfZmt4qmtCk7Ys+6yyt+/O4TOps2t/YEW+B0McQ0ypw8n7H1FQE7NHeKRNGU04EUuHaqbK6voeUxw5FYZ3HqcXoeUW6YuJ7U2mzUMbSA95+Juu/N/S621capmUwla3KwTIgjd8V4e3R8yKOHX6JQsIklb0Cj3WI4vGbhXaNIwqXPPv0QjCtUqa8W+RU6hSrVlsn1ldxrC1+w+YU6mawpD8MpjUaT1rr4nmFrBDOF11436MsKjlnfplCA3mWfSBF33dbWFg8f/BJOSTz7hz94l7ffeofrwRk//IEIAm/tl5lNR+iayWQk1uf+wQHT6Zxv7Q7/TAAAIABJREFUfEPwHVj2KX/2r8959vSKclX06bLfo1DOObj1GlEkHGjzpSzl/2f7hXCukiyh2BSX5XDqkRPRaNnEgdgYM3dOSMZwck21LrzSfn+Arof0rkUmolCqUG0VeX50tIo27e4dMB0GFO0aaSCNAaBWa+HJQm7fsxkO+5TsOqaMPJbsBrWdMsPhADUVC7NZ2MLR6vzsfVHrQ1bikw8vUfQZrz0Q+OrBRchk9iM6nRqedIqyVMdQdKZSVf3WnT0WnsfO7jrHL8RFWG1H7OyXGPZEUS4ASkaWxytMu2lpjMY9tFKRxBJ/+/zoHKtQp9asUiyIqbxaXDHzFTYbMrKTh8SpwTLQOXoq6zC6m2yt1bg8F4vEiRc0zCa+n9JeEwbtxYsxt/a73LtX5EiqgLtXRRQ1RpcW4LA/pGiWabTbPHn/UwDsZoCpbzAe9UETB27vEpR4TFkXN9Xw7Ji7d+4xWZgsJQ16yTFxlwpxrlK2ZUTBC3BqDvOZLBx3ArzAQFNz1nfFIR0MPSqlqrjQY+mkxBm6njKdit9nFwxiI0ezbLbkBVft1LkeXXGweYAp0bGfHH1AYbeAJo3QvXUTs6SSx00mF2IjKnmX1l2bT48es7kjLtmnzx9z++7rNKVzlykZ42COGkMus3W2YXH//i2m/XAlnBi51zh2i+Mj4Wg4xgY6OfWygyHnuDe8xMocUkVFkQw7rhcQRilzKQaqqTYJAUE0Qy4XXNclCX0sK8K/EhGoOALTX2I7UuR3PsK0m2h6yt3b9wD44XsfMRovCeIJqjRXIy9ibdfhUArxGvpdTs8OaXa2Ob2URfx5QJyqXA6HGNIgKLVLnF8do0jHv90qUi4mnJ1csNsVB+cyNhktrqjI/Z+z5O7th7x4dkmnLdbibDqE3KBYLGFKpjZV8Vlf22EpxbJRPUxL43oY8vG1GM8NKfKcJDmlopgrd3GJZevkvjgZj84+p9Gs0Lsargr9nYJGu90mij1USfm7XPoMJ2P2JCtWuVghCjPcYM58ccOwZWNpDpb1MlugGyqzSW8l4Luz0+Hq/IzZ9IzdXUE0kGWiDtH1vFVR/8KdUamW2NsVRsVkMGcwEkb1jQCyXbS5vLwkigK6svC+2Wwync5pyMy5bTv0B2PhpEghddf3MAumMOzl+iwUCoxmUw5PjgHBoBbHMVkqAgMA49lUOGZRyDIU3ysVHVSFFQlFEkGpVBbU9TILp2kG7jRErzokkmwoSRJct0dJGoFRLLNPqrrKXNm2TRRFgrTj56jli8XiisY+DGSmRtZn3TTHcchIV0afqih4rot+I15bLq/etxItllmYwUCMr6IozMYTNjY2CAO59g2DJF5KMWDJhlap4Xke5+dXq7EEMSaGLZ55kx0zDENkmeW/NVWlYIv90ajZuPM5i6FHJjNumqEzjzx03cQxxH7cXN9g4bk/xxaY0u12MVRlRUSyuyUyRp63YC4ZUp8fXTGbLjE1yYqX6xSrNo5jceeOWGedtTb9fp80XSdMxN4aj6acXoy5HnmruUvSCMexVtmfarXK7q06ea58walOopi+DApd9sWayDIoSrr7aqVAo7lBpVWg3ZLMnEHMn/zVX5NoN2y6mQi0Zgo3RPpZCkqusljMvpDRI8txZJbKtm3JyJiwWFyv5lRTRTb1hi2wUqmQ5wqVshQRNk1UVUdz0hV6Jo4iJpMls/GMK/mqG1mITlcGFtISwTzBbA6ZjCVaoHlAs9nGDy95fCTu39H8jOf9n5JJKY1mo0OxZHPdG61+Rrd5h+lsTu9K/M0smHR3W4SBz4kkbirPVGw7w9AKeDOZuUpH/Nm/+jGvPRQEXiVnh7V2gfa0zscfieDfvQe7bGw1GPYjlkvx+xQ1wfXmK1KWfl9nMh3Q6qwRBZkcAw27qOCUUwyZEfFmCw4O7vP0MzEzphGilxTGkz4VGWwYDWOScE6eRsRLsbdr5SKqVkZDnJOZktJsOizmKgtXBqLMlDSNiQONk0Nxxhq6xuMX77JU9wBYjlS21zo8O3xBuyPO5mfPn1DKCzTrdTxZj2eVbIo1FUfW+iQLBW82o+BYzCfij7//f/8Bv/69b5BkA7Y3xFr8/m98i9//g7/iXDLCRqFOq/oGB/cLvPt3whaslyqsbzRw/SPu3BYG+mI0ZN7L+dbXZFD9sw8omrt84yu3OPpcBN7VJMXOG3z9nQMOz4TXcHUxxaTEw18SgYwfvfdn3Lr9kE8/+ylRJNZLZ71CuAzZ6LTpX4jvFYsp/fMlt3ZEwF4hwtJhOkxoFsU9c3j2FKNgsshzPE/WXN2xKRTqXFyKcSpWbC5Pjzh+9oStrhhPTTO4fWufy57o9+2DHU5Ojtje3iZIxPzleU4YHnH1QuXLX/qGWBuX93GcDSae6Pd+5wGOeYXrVml3xZibeovLy1PyPGIyFns0TQ36/UsuxmK/bLUf0Gi5fPzhC7797e+ItWHk/OH/8UekkcI7v/yOmPdnh9x7sMngWvRpd+sRtWqbci3lvfeEfRqHdbS8yBuv3+MHPxTkJp9/PKC7VmEqM8skAdeDE6pN+PQTwYFwcOcRuiHYew1NnBNh9Lf8/7VXNVev2qv2qr1qr9qr9qq9aq/aq/aqvWr/AO0XouaqUjfzd74rI8LlKpcXfap1jcVUZDUKjoWumdQaVSqVPQA+ffwhd+500aRw3NHpCaqjopkVxpJ16GBvn6uLUxRFwZcidJZdIwjdVS0MihBn1DMb3RJRUG9qUm+UabbWSFWR9iRQefrsY3Z2RUZoa3OPJJvwyceHmJbUmDLb7B449C9Dhn3Rh+6mgTspr3R9VK1AsaqjZDWGQ/E+pzJD1VLmI4UsF/0aT/skqYYq2XUujo5ob1aJnYSZFBrebqyBpjHyYwyZhDRNmzAdUoyE36xnDlpVI49zPFdE00xNsGYtZdTItKo0Kyahl2Ha4v3tahunVGM2O2VjRzDnDIdD/OSaYkVGTIOMLNggCl3cUPTpOk7R3JxqocbSEmPu9QzCZYAqKYgVdcnW2ibPPr9AM0VGqFJaw3V9Fu6YzrqIDGixRZAvSQMZ3coyllgUiiF+KrIaNUun4FSpV22uzkX0rlBQmbkjClJjIsgW2DakHjiqiKZ3b+8zOjlke+8ehyci03E2+X/Ze4+nybIzve93vUnv87Plq6sduoFuAI0BOIYczgxBMSQuNCFSEoN/gbbc6n/QghsppAgpJDEkjhSaUdBoIIyBGQBt0aa6qst83qQ311stzsksIEKz4wKLurv6KvPmvee855zXPO/znPLKg7eI1rKXoXHNzs13ufhkSrwQ2ey6U+fSSzistbnzHYFX/8kHT+m5Dkkgxvdo/Jxho46SN5FoN6LyjFxRuRovsCWtUb/RwgsLApmlDsOYdq1B/9Aikxn5s+Mpd2/dIhh7LGV20qkOmM0mVGsC5rFcBBiGRqVq4st+gLLQaLouUVbQ6oss2NXlU7L1glfuy+xaGrKeR3Q7A27viEznoF/l6dkzMjWgWrkp5kEvuVp8wp1dAUFYjD3SfEpv/y6fPRJjF3hzdGPAOveoIDLj6yQjji9pV8Tv59mSVrvC6CqkKWF6/U6brFBJpDbdxeVzUBxajca2OuHNa9y5t8vKu2Yl+0UalRY1t8X1SFTlOt06mlJl7XvsH4is49qb8j/8y7/kP/7j79JqibFarC4xrZwiF8+o63A5GlOttFiuxZr1/AV7uwfEkcrUE9kzTdGxTRdHVrcbzSqnp89ptndIQgljNQssy2AVXuPIasSGIr1VF5WzMJpTlDEoOYXcu+azFRXXolTAMqvShk1MEyRqhl5riOu0mIxnWzhTRobnrcjznGZTCoSHCb3egGIDJ2x0oFQJw4hc0sjHcUySFcRxSlNql1Qcl8Vise0nabaqeOuAJEnodEQfRpwmMquvEkq6J0uvoqpCdBFENUbX9V/rYcnzHE0XcLsokBlLSxdU5HI8FU2V1QRly06oKMoLpjh5TmmahvYrwsob7S3bdlHkOsqyDNVQycuCDdIqyzIsy9pWOcIwxLJtmXGV/biOs2Uo3Px+WSgEa4+KhJWZpkWSFZim+UL7KgxRlBdCznEsermSNCBMNvTCgtlRCCqLOa5Vq5SFgm3kcg5s7t69S55mzGUV9fJqRJJknJ+fk0jttKzISYucSkVUf9rtNhSJrMiIZw+8Na1Wi1t3b20p21VVxfeirW7Z8yfPubi+EH2vcl+suDXQVNqdJvP5dDvvVbe21ctSJePhpqoIou+s0axtYcniczq6bmKoL+xgU+1berJ6p2poqkmulmRS8LXMCwrTwEvFmGssiZOSIitRNrIShY5pV3n66G8oJMNmTo6qsn3fW7du4QU+eV6iy2cQumkZjmO9ENUuhA1teuFM06ZaqWOY7hYFY9kGRZFRFBm1uhh3ypzlcg6ywndxccZbbz/g9OJzvvhUwMpeff0BfrCg0lTRVXFmrf0AVYXJVLK41nQUvYKa2vzi34v99O//ozcxrJjFXPJMawX9QQPPiwgkY3GS+ihmgmXUyUJZratMsa0aJ0cSOaPEWE6N1+7fI5UK6N3uLienz3jl1RvMJAvus6en1JsZ8tghiSycWsZ8tULZYJVLk/2DPvPlFYps4PRDn36jhx9LGYQyJEgVIm9OVfaBG6YNpYVWFttKc+AnKJjUWuLeQaCjmwmG7iJl7kjya1zX5cnDGQd7QvNouZjRbBbYfTGWZF0O95t89vhjtEL4kEo+p0xKmt0eC0mNXmubDPc7PP9KVHq0LAOtgtYKOH4kKmVFltGuDnn1QZeq7J9utHs0exankn31pz/7kju3vsY3v/UGP/rxD8W9jJSdztsMdyCU/f69dh8vj4lnYv5qjoE73KPXi7n4UvzeOlToVVuYdYvz8YYNsYXjZOSyIhQFde6+1uLo+MkWCj3YafPkyyf0W0NMTdi6F1xQ5hVWS9lbWimx7V00K2YktS8tvUerVWF0PWUl2R5tO6NZOeTmoYDXh8lTsjznyVdTvvs73wZgeuEzmR+B7HcschVF1VktQyp1Ka4eTSlzl8MHQ55/Jd7voHGXk7OPuJbaaXs7uzw4POTZ8Ygc4ffV6k2CaIS3zrhz6zUAfvH+39Dp2VwtxF725oPbpGFMv9tDt8X7Pfp8xje/+TqnJ4/wfNk/Nrqi2WhtWU2Pnk342tcfsFheE4cbYXOVYGawf1DnieQEsO1d6s0Ky6lYx3/497/Fk5Ov+H/+3Yc0e+Jvql6h0+vgrTwoxR63mkf81//iT36ze67IVfRMLuA8o9WospheU7cFtKUsQ6Fsv5wxHAgjME2TIJzSqIqDP8sKGnTRFWjsCoiMt5zgeT6mo5FpYuOOghFFGeNUBF49CtcUiopmqGiJMJRo4XFVjCkrJaWEmgXemu7tPTzZ1PfB518ybOsM2nVcSyz8lb9mfKHSqLrsS5Xo5ydjVmGEfykOyzJdEwXX3HlwG1XqQMwmCkWu0eglXF9uDicX24R9KWJmW3ukqoJdltRa4jln8ZrS1DFVC1Nurmhz9EBjuRYb6WBYQ0k1cmI0UaVnMQtBKbBlr4+3Pqai3CLyI2wJMr+ezlg+PuMb33p1q0kyW86p1mrEC2G8lqayGCXUGwrTa7FY46VBfdBmfu1jSWrNmr6mVm8yiySEzHK5XqzItZBcUmLP0zWtfpMsj5jJoNN2cqquw0TqXg36XcKrBWajw3QuNrtqf49cC5lPfJBkA2q9haInTCZiU6nWatTadcbelFpNzNVnH/4S042Jz74kk/DF4bBKqk3Ra8IWE8/m3/3pD2i2Kvzdb4mFv5wp/ME3f58///Mf8PHPBE65bmW8cv82v/xU4NUPhkNee/0eZWHy8ANxWFbsJvPilJ19h1ZN2J5alCiLlN9+W5S1P3n/J0z8EdFpE1f21fTaYKMztwO0WNheFIFpOeTSGVG1EtWsUOo5c9lnVHcaLPKIne4eoeyLsBWLuFLharWBJVkYVY2sWPHRs5+Jsbq2ycMVzVqf0D8C4MGrd5lfDbg4lxS5SYpZsXh6fIqXSbr0WpWzJxfcv7WHKgviXjyhrjTx1wJSUW13UdM2d28ZTKYCXJOELl4ZM5GkCUrZomHWqNeqjGbiICyNOacXY3SlsoWHZaXNzC9RZU9bWLpUXR2zcEkkLOnpUwFjMGwLPxabOVrOdBIQhsL5uHlwm9x3KTSFdnUo53OAmme0qzaqIptjFQ/TUvClgOfVRc6tG29Qb7iMRsIWUXLisETNdbJYvE/NrYNisPAE5CGOY6o1B0tt40WS9KJq468UVE0hlxp2syCmLPNtDxaFg6WHaDqMZxLH36wT+CNGsyuOT8VafnD/bZ49e/KCaKDfochLTHQmc/G9drvN0yfPhfhwIX5vPJuzXi+3cMbJ7JRer0ejVd3CHh27QZ4plGq5he6pqiogzBtYIDG2XZLEJakUNjbMnDIpIM2xZY9lxamQlS+o0Q3DIk1z4ixlI0Zl6Dp5lmEa2hZKpygKpqajSueuVMBybNL0xb3iLKVMBc36hrK9NEtUjV8hk3hBc79x+NMkQdO0bdC00aGq1X/F7rIM2zBQtXL7TLqlC7pyGfQ5phBfVnQNRzZNb/S6Ws3m1mGvOjau6+LY4jnq1QpxHHL3zg1u3xXnx6D321TqDuPxmEupBZcmOfOZx4Xsk5rOVkynMasw3tJ0Z7lJlAScXXxMJgPf4U4fTVO2lMf93QH9wwErSU4BMJ7OUBVdwKkUYzsPSZqim5sARUVVFXQ0dBkc64YIjlvt/naMi6IQvW3VF6QQy+USTdOoyQDIDyN0s8TMNOJSrJnUdLDNAQ7y354BRQZFjilFYYPUI42nKEFBJvW3TNvC930OZA9trdFEVcVzbQLmLMtQ1Jwk9bc6ep1ue0umsnlulAJVywgisU+EsYprV7g8u2TYF8kbRSsoFJXjp4J6/stHj4mihFanSXcgzsirySlOxeX48pp+W4zVZLaiVqvRkCKpumKiGSGjxQZwCJbpkFHQaIvzcTZfslxFaJpCVQYkreYdykSQyLR7Ikny/vtzbKvKb/+OgIc9evQITVWJ0wRPiggb2oLVvOTjDx+SZMKGNdVBL3e3ML1azaE3rLEOPqfREMmxKJkwGa8IwgLd2eiNFZycT7Y2pVjCLtqtFq7cg7x4TRwl3Li5z9MjsZfEZU6r7WK5Yj7nqyNWUU67qW6TD2napd1tsj5UGcn+mAcPunjDgNWJmLt9rU7VOSTMPmHgyoBk6VBticBtKSGN3YMuz+enTKVNfe9rN3n+2QhVqTHoi/O/Xq/y7PFTHj0MkZrvuC2f3VWPPBDf69fanJ5O6e9d89YbgoTipz/6IWp2SamaqJH44jtvv8l48RkfHQtY2el5l+/1bmB4A0zZr/bqQZPBTp/5KKC1I9bI2999E0UrKeS5micuqX7Csy9OcKR8wY9+8BlhkNH6ust8JoJ404k4vzxHlf1/T5+G/NZ7FpeXJV97W/Qgnx+PcVs2u1qV2liK1esG9Z7Bs8cCNtfYs/jlo6f0Ol0uzoTfkKymUBosJcFUHETsDW/xztdf58unglbeMfvs7e0TBx6TYwlbvXnJ9377d/jlF0Ir6snjT1heBwz3uvhyv1Fjm8X6nDKpM5e0/BYN9gcPqDry9xZNGl1xRmnSj3cq8PGHl9y//y5uQ5zz01nAZDbZinzfv38fShWlaFBKwrXTZyrffOdrqOWM7kDEF1dXZ7jN13jtDdGq8Nc/+gVurcp/8o//IX/zgeirvxid4jR0Zv4Fy5mwqQ0t/N92/WZUrppO+b2/J3Chrt3i6Owr7n2thb8SCzb1Yb28oNGxMSQRhaqlFGXG9Zlwdjo9i/kswFsb7ElDTfKMZbREMR3OrsTBVK91qds2sTSUrMixjDbVSkIgI9ciNlA0G6epEkvV5tLcIYtmqLlUR/dzdgZDajWHSDaPT8cet18fksQl9faGhSvl9PQYRRHPregKfhjQbgy3h7vp+IzHY1qtfeJIZu/KHD9YY+pi0xwetglildRTSGPx7JGyIEwTdjp9/JF4hpiMVqOJhOdzevYMvVTJM41cVtjSOCHPVExDGEeUXGIoBbbhbgVYVcWhJELVUhr1rrzXEXv7vW11bX+vy9/89BPe/MYbjGVV5fo8pNG9SRbMkLqfxF6BbrTIZZ/berGkUakSBAGJ7DHTXZtwtWLXabCUopBR7GPZCkUqPqOqKrt7t5l7U9ZS7DBRI9LYom03CZZiDCb+iv5uBUMT90lShTxWaXdqJJv5zGqkxRxNbZNssqHkRMWKpnQGdF3HaQwoVh41Tbyz5lbot3ZRKip5Ij7XqHWwzISzC3HIGmqF66sxhze7XE3Fc7baXU7PjtgZ9jk5EpvBTu8Qp9Lixj0xUEdPRky9CeE8oZDNU+v1GlvVKayCpiNsYXydYRoq11cy+9Nw0QyHs8tHdBtSryrTUJUUQ69gb3q59YQ8sbaORpquadUHuHULA3GgLSfHDIdD4lRh7knyD71Cs1ohMaSNRSmW45BmOn4kNuA8T1HyCo4NaiEO/1G4Qk+TbaP43o1bGKXNdDLBscVn9na6vP/ph0xkZXCn26XpdhnNR1tigZ3+bcJkTq1SR83Fmrk4HzHc3aEndYSCKGQ8WVIxoVapbu3n3/zvH/P9P35nG7T43pJ2c2/L4hauEyxLYeUtt70y/d4Ojx6eUK9VaUsGo9OzZ6y9OXtSU8P3YzTVRNMUfBlIqaWKoVfJ8xjLEjZr6C5hlGC5myb/FMe0mc9mWJrYI+azNet1QK/XoyWzbjkRk9GcXk9UvJ49ecxrr72CF0zJZSCTZyaH+/cYT09IcmHrluWiYG2rAMOdA+bzJbajUsqqTRAkxFFGrzfAD4V9zhcjVC1nLat3pjbk/r1XSeOMSqUmPzPGcV1M00aX/ZNJFKOoGYoU5zZ0h5KUohCN8wC6oYiA3DS3fSqarhCG/ov+pDTFtl0yyi17npK/6KVJN+q/eYFlWUTS+dA0jaIoME17GxRtiDAsy9hWabIsQ1GUrYCvYWjkxYu/g+jb2vQLAdu+ql/ty9pUrDaEGJvfU1V1WwEzTRNN04S4uNzjwyiiUqlQ/oqQsmOKnqWqK7XpVBWVgjAMqbqO/J5PrVZlOBzSlw67qqoMh8OtDQterJzjk+ecXwkH/f2fPRZMi1FKGEdyjEVf26a3ablckmYhtWZj+96qruE6sg9uo6NXirnajIHjONuARf8Vhr0kSVBkMAMvKo+b8Y2iaEtMsRm7OE2IghBbN1AkCcQ61TCNHkkibHM5maFYJnmSQCHZAo2CZLXg5KvH26BaM1SWyyVvvSV6oO/fv890Ot0GsyBYIlVVx3VtViuxv3meR5mztQ1d1zEtHccxt9WtVquFWhb4nrfVZiuKnJU/5+JKOG66mTKb+rh6l25HzFWhBig6+IlHEohxqDVgtQq2tt9q9gijJeQGP/mByKb/3e8/wHEqXF2LZ6zUTfJyTb1ex1Qb0j5zDnYb9IcDHj4UmkCT8Zyy0La20WjWiOOQ4XB3208VrHPidEGUrLdJEl21iDyN4a5IMI2nR3R7u1yNr2jKqr+mqxSJQ7trsQwnW1u4urwmTTfBlkLVcYnDZEsCc3T8hE5LCDlfT48AqFf7WBWdOJKJFMPEW2fkZcb+vhg724Dx8pTKMKQhK/9PPoxZKCHdmrh3w21QMRwePn+C7gq7Uxc2zZ7OehWgVMUYa5qG743wfbEem1aFXrNLZsXknqw4WxlaDWbXz3j+ifjb2+8e0upbXF+J82m32eHGzpvk6mJL6vHs2REVs81rD+7x6KkIUr7z7ns8PfoArRBz9U/++L+g06iSxxG3DkRBIPDn4FaoufeESBqQJac8/nTKxZnoc7++eMrDh3/NXDO5cVvYtdNK+PyTR9w6uEmtKp7z9OqcwgjQJaInLS1u7Qz49Ph9OqpAWxVGysHvPWD0l5cUmtg/Vc3ka+98h/c/EAHQ9GjJ/fsP8PMFmirWzflsTLfV5uRIJKtv3q4QrXT2Bq9xX2ph/eIXTzHdhCKP6PdF0qBMdd795h3+7M/+LSDOVT9Y4TgG04Wobn34ycfcuXmHy9MZrbrwXZptHW+p0uyI8e0NVfz5Abl2zXIufInv/NZ7PD96H13pY9csaftrFG1NTaIMJuMlChrv/dZb/PTHnwGwv79LSUq10+DJF4JRc+UHmKXF7p4gLfP9kNPTT7C1A77zO+JvT54+AhTGo/k2qba3d8C/+K/+x9/sypWqKpiGGBDfD8gilczL0GQGbB2oZPjMJw5vfkMsvDRfcXlSomRi8UyuJ5hWnSRd40sIYJF0MPWC1cynbYqJM42IpJhiSOFGS9HJkojFvEQ3ZeNmHhN7KyrNAUEgDCzPjmg2dhgMBKRKNVT80OPhyTF1V0xu43aT08sxtuuyuBSLcb0IMOw1ilSfVvIBvX6TMtW5HgsnO/JSqnWNOF4SBGKzi+OMer1CKrNpx1dHhN6MjrtLHIiFWKk57O8MuL5cUKlIAoTjKa5dMpZCykVscfuBSaddoMis8WQeUG2HHD0RAdHZMxfDWVGkzrZBWdWXlHkVRdeZzmWlw3KZLTwGPTEHK8/n8OYejz4+xanLTcuuomYJ8/mMfCmzxFoVXJswEM8U+z71XhetLLmSB1yU5fTrTWIvoJQMRqbmYFcg9CQcRSmYLkYkRUIi6ehDPxOZ3ekJui4Wo9s0iJKcUv67UAJKdDx/wXQinMfe0KTe2GW1nmCbMrud6mBWQKqx11ULNYZH0zVvvibm3dBSLtZLOpUWy/hIvF+9wgcfnGJXJCV3MyJyUj48+iW3b4oM4sLTsOw2y+WcblM4N1eXExR9ztm1sFeNCqZjoajlC3IFu0ac5mRBgRRjxzAdlssJrqRd9fw1tqPSqHRoyiZif73GUFWiNNzalBd4dDv21vFXVYckLQku16i6yCi7WgbpAAAgAElEQVTWanUuJmeopbF1mDUnIVFbXByLhmjbrZJmGkWeo5TC7haLGbka4mo2A10cHkWhs45WDHqi+rP2U5JoSp5BJklKLrUUy6xw76b4ThrnrEMVp9bBcaWTXWZYVgO30qBmiaz0YjVjujxDMcVaaLc73Dg4xF+NMWRD+yZweH72nJ098T3DsfGDEKcqKya2g6Ik2E5JRR5UQRDRqHdoNm0CCZ/IopJOo89CQqV6/SFhkBGlL8gWqvU6CiahF28Z9sL1mkajgSdhdEEQEGo+pm5jSmHa23cOGe7e4vLynCSXh55aodP2iSRhx907D0DJqVV6JKkYu93dIeOrgLLQaEjR8FJVCfyEVMLD8iKiJOb6eo4iJSPmsxWO3UBbpEThZm3llEXK1bXIhL779htcXJ7SbvW2pDOWI8RDO21zW+mgKNHVAkcyZ8VhjqqpZFnAcineudV6QcmdSgreEuHguo6k97VNQQef5jiGpC9PBHW7ohm4zgsqbYUCXdkEbgagbgMeEMGN4wgmwI3z2JAw062jn8TkWcmvJhfLosAwjG1Qvwm8NE3bOsKbudZ1fft7gj3xRTVvQ7derb5gTNywGZa8YFaMs5ROtYW9OT9q9e1nEwmfMm2L5XLJs5/+nDLbwA5j4jjm4FBUFJIkodNrY1kGpik+895736TZbHJxcUkiK/reOhDQTykm3+40SRJBib+ScD7XEAK7QgxazJ9hCOr9TUC0XIpERK1a5eTkZDt+zWYTTdFQ1Y1bUfwarb6iKNi2jWVZrCX0Ok1jbNulLBKybDO2LqpWbOdG0TXKsvi1+zmWyTLwyfMcVSaikijGMsztmK9WK8qyJAzDX5k3FQUdUOl0xD7c6+1IAhZJepWnIhBJXUwZ8F2cXBLHazQ1ZyDZAqPYQ9cjmpIEygsuGe47OKrN/iZIGV9zNbrGqbt4kSRFalgMel3iRMzDYn6NYYLjvHDH8kxhNl1hS6Fq13VYrgL8dUmii3N1PDlHx2A+PyOXSYR2v7Z9Z4CMENvRuLi42Npwp90nTEqOT9aYMkniVgc4VZNSSmQ0WnVWqzkHhz0uzsWeNxgMqLVtyqwkWUu2zighi4FcPKfjlliWg2PuYMq2gP6gwWy8pNmwaFVFoKaqKetpilsX47uaFbz33re4Wj7HF0cfb7/yKo1Fyaz8ikKTCdDS4JW9Lg8fCt+pefd3UPSUVrfO5aUULV+M6N14QDUr+ey5cOLf+tqrmPmag46Yl1UUkVkx9UqVVSjOvqdfTbjx2i63br9BwxH3t80VWVluGf6MClQMg939u/zNY0FocD33+f733iPOnqFJCjm3YhItXf7ZP/unAOwd2KThivHlGYOW8HVrFYXPf/kpZ/M/4ZNfCoTL9CrFsHUymXS2bPjrjz7g4anP939PnKO/+w8q5MUxS7/Nvqxwr/whzabGszOBLllMz1m1m9wYvslcUvcbRhVl7HO+ntFpy8SwYfPhxz9mJCUcAlKenZ0QqQmxKmyokrXIiOnK+fzq08ccHL7Dcn3Kn/yfH4j3rdewEod2W+dUipbfu/OAH/34fSJp59fXPm+/c4eHn55QJMIOXrn3NpPRGQou3cFGqD0jyZdoiqz+nigYWkx332IhJRzS2GDYe43Ts68IUvEu3W6Xq+s17Vsb+G2Erpn88pdn3HtF+G/oIaPRlGLdppBMtU415PLpNd228NW++93XyLw+Sj7g9PKh+L11wdnolEb9PrfviXYTP5GIlb/leklo8fJ6eb28Xl4vr5fXy+vl9fJ6eb28Xl7/Aa7fiMqVoZusRyIt/+YbNVaXOouxSmsgsk2rp3Peev0dnl+NWUvo19nplCKtk0iK9UKx2dvvU6gBhtTxSYIRu706rcjm6VNRScoMFTSLVbTRuVmg5Q5ZppOqstJimOzcbqMpLvNIZD46zQa62+RaahTNJucM2wavH95kuhKZj4V/TmF4NJoVVlciO9HptNCtW4Ty99bra/x5jSC7xJGZ8lrFot4YMBrPsRuyWufHWFUTJRZR//mpx06nT5ZMsKsi4h5decwupoxXa27siSzc3u6QhmXR2RW/f+9+g7y4oIhT/FSK/1o+Nw4djh9vBBnXFIWBrkWMR7ISMLCJvQTFVJjI96s2XSxD50xWf6q2RbD0ORlFtMuN8OaMgxslURLTqomsyt5ehdFkuaVhtmoO42hOmufbjKKtaKxCn3rLpS4rJldXV9SbdQJF0i5nMZW6wXK6RJFldFPLMdQUy1ZotmR5OJiyWhW0K6JkvQ5SbDvD1E36ezLTs0jxoicoGNtG7X6ni5FajKYiI7EyTHYbOt/u3CV5IioYM2NN6trMzia88qoUVzy5RIu8baZuejLDdbukWcpUijuuV+foGuRFTFMKdua5j5rXyTORBZzHK8LIw7RruKa4t1JqxKlPp9XYVhDjYEGalagbDZ9Ki7yIyPKA67Gw1zTKMe0cx6mwXkmdkmWKVYYMdkSG3DA0/DimWasxXmzob6FIDNHrY8r+nzAFP8WUOM+y0FkvfHRbNO2DEJhVFahbVVSpO2OaJppSJUjEc0+nUwa9DlEUYUiBWUWr06haTMcCZlKt1wjygGrVEbpuQLQI8ZMV7WaPtSROSc0Cw7RIpP2s5j5kHpql44eSCEPCS0tdZSpFtbMwpls3mU7EfHbaNfxlwmoFX39LYNM9z6PIT0njjLfe/IYYl7dN/pd/9T/T6UrIYRhimBamU2M+F5WB2XxJGuc0m80twYNt2yR5toUmrdcelUpVQJFUKRDujWn32qyDMYasPNhahUKrkMsstVo0uDqZYFWCLezi/HRBGC2o1WrYEvu58jx6/SbXY5Gt/Orpx6DqOHadUnKqd1pN2q0OhmHwaNuX2GA0GmNLZcwvv/olB4d7rPyYi3Nxr/v33kRVRE9Skoh5T9OSmu6QSQINTVdZrRZomkKjIe6VJAGO45AW8RZ27DgWtapDVVJyG6qGruus1/62YjGX1LhhFBNJKvuyKFB0HUt35L0TcjLK8gWhhKgcFei6viXoKMucUnnRX6WpBqpRkqUvqlm26xJF0TbD77ouZVkK4dl8A18ytiQYG5hcrVYRFRRZsdlUfTRN20LrNs+yWCzodETlw7IsZssFQSj25SBcbStEqkQQWJZFXa2jmdq2l6ksFZIkYTIVe4tSKKw9D1VVt59R1JJms0mj3dj2sDSaFWxH/7X+o9UyYraYb0knTNMkiiJBeS/HYbVaURTFFja3Wi5Jk4Q8e6Ht5TiOIDZJX8AqoyjBcV48t227xHFCWSpbCKWiKpQoZFmGKeGIml4hSqKtppXlmGIcNTDknldEEbEXYBgGiiRqMQwLz/O2VOxCpFrARSUARFY4M4I4oyJtaDUX2liOKc6P/qCNphdoZUqrLeH8ho6qNLi6OCeVtjieTNBNg/FCVHvzNEMvC+ymylSSanmriL3dA66mp+i2hMSqddbrcLs+dFUFNSAMNkTzoCgaw2GbxVrY79nJKc12DQVjSw/f6Ta5nl4xHPaZS9kR07KEXICscqYJBGmIbZsUkhAlLxICP6VR7W0riE8eX3H39g0URcy5roFtuSynPod7Agr99OkR1ZoQag6WYkD39w95cGfAs2PRb1yrNhmNL2k1S1S1uZ2XejPj6tLf6im6rk6ZGPT7mrSxSz7/6hMMw+DvvCfotvNQQVPb6NEu3bogXLr3D6qE4WhLVLM/SElNyB2dez2xVxd6RKB6VOp7eJmEQvvXVLU6nvQfdQe+fPicg70q7379e8LOqk+pVw5pV8ptX2vrfo3FwiZeS5h3L+fRo2N+97f/OT/57KdyHqo0Oi6T6zog9tPjo1P+0//s+/T3xJz74QhLNTGciH/53/03AITrlGU65/x6xrvvCvryrPYYtaJQyHmoDlt844++x42Lz0iKHwPw6Zfv8Pt/75+zmh8zv5BwljLHMO/RcsT7de64PHv019y5+bu8/c6r4t6rv+D4g59T1TqMJ8IPatwCZRUxldVJtQJ6q8bjnz5lLXXK7t6+TxF2qTtyDMwhQZxz516F0aWsz6ghiqZgWnsg0SyPHj8nisaoUvR+MT3j4qJDs22yXIn136z2mE5O2N3rcbD/OgCj0Yg3XnubG3uiuvXjn/yM7/9H3+R/+z/+ewYd8ZnZ8glnRwmdnkm82XKUAm9VshxJ4p9kTb/7HbqHHgvZb1xzuozHUy7f/wJL6r4+uHWDWXPF2hNtHR/+1Kfea9OsWOzdEPPy+lsmmrXm+VOP+UL4hwtZvf3brt+I4CrLo20Tc61i881vvsH10iNNhQP04JVXmFxf0tv3WXtiI3UrTUzd4FJif4eDKv2BhlPd4aOPhVH0h22OrwKW/gK9KTbOVqtHWaREsgy68kwajQbNrs3RqWRasVQmlwF5vqbblJjdWKXwJ8QSkkPi0Wu8ytnzM2ypH6PTotKrkRdLkkJulJpKFCZEUm/FdV2SKCJIY+yKMNbbt7/OaHpGooYsJmLjtm0TLwhQZY9Cu5rhmArTccaG0idJMtA0dCdnsRKL7L03d7CKEBvxe7PJp7hugaYmaIYkalgY/MX/HfPZJ+Le3UEd1+ijGyqKFCN0HIflfEzFbWyZshy7yvX1Nal0WuuVKq5p8MrtGyRSeTcML9F1lVyBQMI8uoc3+ODxn9N1BPQrVBIipSAu0i02fDGdUevWCdWMaCIJGDQTf+GxkgQXzVaNMs3QFZWlDPjqdZ3ZaE6zVsWWit779TvE9ZREwl/8UCEvU+LIRPZD09mxODstMVSFQkKxLseXFA5bgovpKqFZ6/Pcn2JJ+FJrp8X1bIw9tDieCod9MQkZDGpkivh3WiZcXl/RbNW2TrZm6HheSJFbW0hqc6/P2ckYSxe2GWUZbrWFohXMpehsrVbDqJbEZbgNxpczle6gjycha8v1kjzPCb0Cw5Dz55pkmc7CC9Glg97uuDx9dE4k50+vBNQaDYJUw7EkTDY3mC9W5K6CIYUTa3aV8bMj6hJGY5omq9U1rpazWEuB50LBzIUAoNaTjqGucXI2p9aSDq2Sspol2G6F1BfP8Pj5l7xx/wGFJJNZ+EvsqkMQBDgSQrKzs8M8MPCDBb7sw9A1l0q1QR4JG16tAso0otbs4Ui45HwhDoz+YIdU9n2hJ+iWQbclHFzHUlHdGjVniCGZl8ajYwaDHrri8uSZ0NF759232N3rE0ho63K5pCBjZ2+fdlsyAYYh7Xab0fWMnaGAnzx7/pjd3R2WSzFXZanQ39ln8fhD1pGYzxv79/j0i4+wDBPbFGNccRxGZzm9vtTn8gI0K2Vvf4cw2PTHWJRqSeCVGLKnM4lDzs5HOK7U7LEUwmjN/btfR5H9KldXFxiqQxCsXzCkWRpx5m91mSwNyiKk0mhRr4vxfPLkl+wMD/GCnMVCOI+VSpUwKtGNF9AsTYckzrFtsXdunH7btre9U3qaAu5WhHbY72PbrtD5kqx/3bbQTZot5kxm4vfW6zV5Xm4FgxVFochzkiTZBk6VSoUgEMHG5m95npPmGXH066yGIihSt5/RNG37nSiKSJIESzqsIJkIZd+Q44ixStOUIAi2vWmWZZEkCXGUUpXB5KYva9OPtRmX9XpNkshjWLUZT6eUZYktCTuEWHGFKIq2zJOO45AkCaa16T+W+ltZhquLv8VhRpRkFLM1k7HUCDI1XNdG0168r+NatNUmzaaw/cl8huM44h3k2dPrOvjBi/6c3d1d8jz/Na2vQoo2Z1m6DY5rNSHuvIGaJ4lgmwxD4ewDlEpBloJTsdGk/tYySsnSmA07R54laGUBZUGeyvU+GxF5awxNId9gcOXYb/rs0jjjejzC8zxu37u9fU7DEMx/s9lsay95XuJHMtB4MqVSdfBmHp89FElZ3ShpN6tkpcJYnk/TmY8fhTSHws4V04QU4jDi9LnomWl3mizXAZqhYkmh74W3xnWrzOQa6vcOyIsCT+qRASRpwGQRYehiD7518x7j+SlZHjKRCd52u0u16hIl4VZTKgxDwjBmA0pabxlQZyiZuFf71Rqm4RLGM9yqeCbLLMm1Oc+eif3cNFxa7QqlonIiSSjeeuO3uH23w2r+Yh6uxo85enrM4Q0BWZutLjA0E9uuspIkRVcXJdVqjUYn4dbNHfmCJoOvOzx6JM7MwV7A6XXK1++/BrKn++TqiMHuIevjjNGR+Nxn089AjblxU8C8xuczMq1OmjtoEn7WulFwMRlRq/fo1WRbh6oTBxXeeONN8fuax6C9h7eY88P/V+gd/ef/5ff5/IPPOH+WsLsne7yeneKYDf7gt39XPJP3KfVDjSgfcXNXBJ17uzp7hw2ypAKa1N+MfVbhBV88lBqh65TJaM7Pf/oBjtRObDUOeO3de5z86Q8JA/F+dgUefnrOe98WzLxKsCCeh2j2W1RsCVFfFzz/wifgAsuScPDCI1+U7O4I7oIvPv0Ut1lnuTjGPpSi070D3r1j8efvn/DzM3E+nF9PiSYGpS9ZDjs3iGceN9sDnp+K8Xz++ITdV3usJrLVRNOxTJWPfj4lkf2chqZwNb6gyByqFbGX3LzZ4rOHx4TLDcOuw3K9IPY9lqGAL9bCm+zfOKTp3OL1N0R/0/Qvx+zsd5jIBOHBYZ+j44fs7LYoImHDf/hH3+Hnv/gxj74Ys78vEtF5rnD3bpcyF/vkTv8WKimTqymLqVgPSpnQrA+ov17hdCT6U8+uLygwGOyIpOWnnzzm9cZb/Oz9f0tF+ui9Tp1mdY9vfP1btCQL5molbORvu34jgitN0anIzeGDz0948OYtdpoxk5FsUFQDmlaDOHFQJQ7UsFWycs7OLbGR16s5k8kIUHntbbGgLk/WXJ/PaXYclE3qKlVQlIBqRXzPX9sMdi0uL6YQi0MvDRWqVo1+v0okw+IsT4hDqMlG4/4rDeZFSNqEhS8WkF4a+M+alEZ1S+s+ms54cHeItxS/d3YVYjcj9CynXhdGcXZ2xvNnY2I1or8r7m8qFhRLvJk86PM5uhqiGCZRJKWhVQ3D1akqdVJPbEjv/2TCrZ0q3e5Sjl2F6UQjTz0uZuLwmF6bLFYGe4eSeSn0iMoravouS18YnGIOsGsGRZrh6mI8O1YLpargy2ZIy3Gw3ZI8CZFtEjRaDdLYptJq4Ekh5X/9Zz+gWq1hS4HLpqVzfnlGs9XEW8oFW6RUHZuriyt2JcX4epZSczSRZkI4k0mRU6oaxebA9i0qjo6q6mShmL/ZaE7ghRjGxjYMDLfCZBSwWorDstHP0UqHKArpSBKIKMzhfEEpM59WGbOeXnPz1bvoEvs+Ob3kerTm/ivd7eGx8hdMv5zTaov3K4iJi5Bk7lEzxRi3qjcJp+fkWUG8EM917a3IM5WLiXimw5sDTNtlPpmCdJbXXkatoTJeXOJIh2S4O8T3S84k9bxl6jTqOut5iKbJCskqYjWP6O/U8ROxAVYbLsM7bSxZ2S3SksX1gkqNrXjtaLyi21OJlYA0Fu9ca7hYBzdIpdPiWjZlq02qxGjSCSSFxA+4Go15cCDJaXSdcqBwuRYBigaQaVBqdAci+FiOfKazJTt9gWMOL0pKUsLAJ9CEnXvBFN2p4XszUhlc1d1DGmaHSkc808X5GKWs41Yr296QLeFAXrKYirk6PBhi6xrvvSdoZv/1//pv2N3po6g5T59JWvk4IjrLMMwJlqS7/snP/5Kd3SYjuSft39hlNr+GMuP+3fsA/MVf/Zhuu0Gt4nB1LrJbd27e4vziDNOQvRNOlTD0WXsBt28IB2E+j7HsLv5yQk8yQhVpTq/f3Ioft7sVNKPKRx89FfTqgOlAVpQ0mhbz9bmcY50yTskKsdY1PcdxVZ6ffEyvIRubiwLPC7l15y7HR+J7jlljf/eQlSQxyZKY9dpnOQ/otTdEKpdUG/ssV+esJJV2mq+4e/tNViuR5JpMr7h5cJ+yMMjzjcRAQqNVJ8/KrcO+mnlMJgsaMoCPowJN06hWqzgySWIZJmEckZcq9YZ4BkU1MXWDWPaUTSYzam7t14gpRMAoAqAt859m4Jr2r9Osy//ffCaKIhRF2fZcGYaxrVRtrk0fl22b2/Xvui5FUXB1Jea81+thWRZxKYIueBFYKIrya8QQvV5P9gDBer0izwTLYSarMZpp4UcxGipOTdiQpik4jr3tiVKKgjxNKcnJZKZe1USFKvJCKo6w4STOqFUtDNmLmqUhYRhimCbVTS+MoeN5AZ7nocmMs67r23cUvy+qUc1mcxtcxXG87fP51f40Xde3VSrLMjEMnbI0SeWe5EcphqFhoG2rKHFhgpJtA+gyS1EVhTRL0NmQMkWURUaJshU7L/Nii47YXMN+j3LQI5P83nmeo+smrVYHXZcED5HPjYPDLRoiCUMsU+d58JxqIav1WkyYeuR5SkUGJNWWRrbMMSRqQ9Nh5U/ZObhNoyn22DRf4Sc+eVEhl2Q81YZOs9EgTcR91sECQy/4lfY0oqSgiH36PbE+6vU6WdHCMFQ6LfGcnueTZlAqKaHsmYsij2anjS4FvAfDA5bLJbbZYDKS9OyFiaZnrNdL7tx9U37vkslkwiuyN+X04kt006biDDANcT599umnnB63GU8uuH1b7CWGCd1eG8uUCTWtShbF2KbBcib+dnizx2x9hl5WSKREzKC1Q+Rl9CVpQW4FOK02N28aIMmHMm3Jxaig7tRwe8K2Ot03saoWY4nQ6OwM2Ou2+eSTpzR74hy/MbR4fn7Kl4+mfOMtwfL76Ksz/vD7/xilEO/yi59+TpYYvPXq36HfFOdTGmTMlwEHw7skiPHsRxmuXWFydQTA7TvvksVnHJ0ccf+mCIAuRk+xdBPbtmmWYv9u3rB5/vhyu38fnxxRaSW4bZOKZEisNBr4aYV3v/1HPHv+GADdqfPaO/vbxNtqec6suMbI4epa2PbNmx5u7xr/3OVS9vbfv3+X8/Vfs9/6AwDu3XyFTy4vieKQv/q5YAJ+/WCfIJ/x9Ks6blOyO/sGTtulY4jz2Eptln7OMo042BcJiSyLGF+cEEom4t27eyxmx3hzePVr4qx/+OiM115/m+PjU64uJTNu1EbXKjQaItiKY4/lakrgxSxkz/XK+YR4XeXuHZsPPhZ7QpwmTGYeS4mQanQNgnhNFve4eVfY9Z/+X/+eslRwKgVPnwhW4Vs379PpOuSSdfT4aUCre874QqHdF3Zn6BZJVrJGo9OXfl96jbpu8OFn4j69/TpZcsprD2pcXIu/rbw1cRzzP/2rXzCfSsTJ7EVS5//v+o0IroqiRCZ/caxd1Nxl7I0w6uLhh72Y0WnAeJqD1ARJVQ9VKwhCCRdJm5w+tnn9jT10TTgIN/dsvvn623zx2VfU6qJq8sWj5+wfduj2ZJbT8ri6PEdT6yDZqeIk4rd+b4+PP3qIIUkujIGO6jvsSc2gJ4+/Igp0arUaqiqa5Zczn1o9YbEKUAyZZTRK1sscQxObdJQ+JfN13rz/DuFcbMBPTj/iD/7RTT767CHXFzLLn/nsDxxyCQG4s/c9rldnWPUATUIh60aXdeBTMVRU2aCflxnnV1N8TzxTXGRMr0P8ZYEp4YSpFlBaCfmGBtmygIRKe02+FgtBd3PCmYJaamRSm6aoZtw6vMHjp2IjWK1WjJdzYqe2bUJXDYdgtSAtAkxZKet2Wui6jiEhQKvJjF6jR0G5ZTDqDlqkUUi/3mEuA652b49cWyETmlxML+n3uxiOQ90Q81kuc3IlIw4h8sRzDg+bTNWYlqRdH83nTCcjWq0Wji2+d3V5RaWeYqoWwVosliyFpAh5fVdsyJfeAgMTfVnlSDLl9OoV7vZ7qKHN/EL8zXYMVDVjvpQVN8ul2epyPTqjKSmHg9xjmSypVBwSXWyIUQyO2eW110XWL1orzGcBWRCT5puMe0Ya+dR7PVZzkUHsuArj62tQJczSciiykrrbRxbByIsYf10IBrtSOE4UDrder7KSG9v4RGFvsIO3XDO+EodVtW0Tqzr7vS73D0TQ8MO//Jy1HqFKZ9mzbGynQhynIiAFVAz2DvaJPRVfZrMuHp7RHw7ICmHnpqHTsB1m4YoNgZdhd6g2elRlcmWoh8yXZ1SdNoaEBR5fPidexjQqNmopnA3Htrm4OMW0pbOVZDSbXZbemHopmKTeevNtPuaMNAxpNSU0ql7FW662Y9Du1/jyq0/odA/p9sX3nKoGpc7Dz5/RllnpTquBZhqU0gFrVBtkccZkPuGjD0Xm82BnwPOjrxj2b2xhgHmRMRx28aSNOY7BfDbhYO+Q62sx5oZe59aN23irCVEibLja7ECypC4bjy8uJ1g29IYdlFI6rxT0hgbBIsGPxEFk1dqUpcF8IWGW1RpZqoCdExUb5zWkYldYr1d0u2I9GKrFehpgSWhWraFCqbP2fSIJm71z5w7rxZyj4yNMSziGgb+kzNlWUSsVh8vrU9rtHoEkwnDcOsvlFEMzt9WJ7rDLeDze6iutPJ8kSViuva2zbJs6milUrTZVkyzLfy3gMU2dSrXKer1+4cRLEgoBSxOOb7pl+9tAxkSgZFnWr8EJN/ThIPY3VVWp1+vbIEnAylLB8ifhdhvInCXHxPd9qtWqpPp+EaAkSYKuq1gbwhXfx7QsCsn0qGpg6QaKVm6rMVmWoeu6pIjf4F9KAi+ilIGGbpikcUa93noBt9NyykLBsC0UWamyLYvZbPGCsj6OqVZdlss1q5WYq5xSEk+YWyKMohQBTrwh+igLyY4YbnW1BIuiYM+UMS6xhBe+YDV8QcbhOvK80GxQYvI8pyjEc1mmThAkSC4JTMsgTzMKQLPEXGmahqoKhIS2YXukxLI1LEmVmxebNedQrWw0FlXWQcx8PkWTpAFBuObTzz7c7tXNZpXTkxHL9YiaDP4b7TqnxydUKhWuLsU+bDsOSaazkqiRnBxT1zi7GFOTEhGK7qIbCoblUioSNlosuLi4QN3nVUcAACAASURBVJHw6arTIA4TlMLZjlMYKpimzmQiqcvjnNVqRa872Aa5AlZ6Rc2usZxJFlxFR1drW+KYo+fHpFlEt1XlwatiP8+ijPl8Rn+4z8m5IG6oNzWCKMEPN6zKTfJ8xXqdsrsrAqnZ+Ij5YkKz0cXzN1TaMYbuUhbi3RQK6g2X46fntNpi/16trtgZDJjOFxSSQdSpqOx2X+f9j/5KvIvt4qoqZ8dX3L4hIIDnxylJOqHfajB/KBIXf/gPf59Z9AXBUoxBvWJwcTkjSH2MWDj13myPZJGScMBakhqNF+f85Ec/5J/+k+8CcPJswOVFyNe/eQPrQ8k43azx4LUD+rXbjEdi3PNWyXDP5tVbguzgYnFJo3obTV/x5SMRtDiuyvGTL7CqNqolbDj0LBpNlxJhG81WBd1weOV+i5NTse9nyojVQufVu98mSMS4G2qHq8kpvc5NAExcVtOC4W6VQUOM+bDVZ3I5Yx2o3H5djNX1aEqWlPzgL/9bAG7vv82Nyj2+ml8zltXLpqXw4y8iitaAeCn+FiwNhrdzTi/E2EXeGYVeISbBlXvl7Zs38f1LSlckKK68BUqp02k2WUvYqmUWfPXVU27feoXrawGv++rROe2eTi7ZnhvtnKvLGfu7hxtpOOyKzziMQangyfk7vVhx95U3mFyLOV4uU4aHBtNxQmsg5mU+X2DpA+qtNjkiabDyjzGsIa4r9vyD/R6j0YRmy6bXF8/w7OgZo9mcxEx4/Z4Yu/GVzSo7ozsUflIcQVjXWSxmDGWl9ejJCsPVyClptsQ63dmRRY6/5XpJaPHyenm9vF5eL6+X18vr5fXyenm9vF5e/wGu34jKlaFDvSriPKtmcjU6QylSopXMgAdTnFqdt7/dJgpF+XK0fApqiSd7GYbdOslKo970mU82VL4ZSfKYwe0an30uqi39mxqGEfHkM5EFeONrLebz/4+992iWLE3v+37H+/Tu+lumq9o3ZjADYECAEikKGwkfgDtRC+20ET+FPodCCy20YoSkCCJAgCQG49A91aa6fN26Nm96d87J47V435sNKGJ2XIwi6t1V1s3Mk69/nudv1piOzv2PpJnsScDXL3/DXEtoeCI6VfOM9onPIhWZCLNtkWUKWgUjafybpjq1ZsUf/ugRb14LuM1qFlJhk6qiQlNvtOn2AkzF4OXFVwDs9xv8/G+e88kfd3YeXYuLiiopObgvpT31NX/06ef87a9/gSKhNWGyQVUMTFRh+ggoVoFiliwTmTnXwW3bDG9n6JJzUT/QKLfWzgDSsDyWq1tubxYYljRGU0xcx2I+W+FKEuqmCAlnEYYv+lfJMzq1DsPLNaaETwXtOqURsc51PF2UIwt7Q1lmjKYiOxNN5tRsl0IH07nzICmxbYdSUelK2NPNcMy9D1rcnAvYZVUlZElEFG/xJEQmLROKLGfQ9bCkL8NykVCmyg5v7ro+mlrRqvdZLESGT9e2KMkBrU5Jlol+Wc4LTh99wXgh5pTlVRx4NSbTa9JCZG22mck8XmAu36LbIhuyCiN0zcaWhGhD0WlaTQo7YjGSXlFWjU6/Q1LGZBK64zkWH58eoUis+LPoivlyzXGzTd0Umc9Xb19xb3/AIk1xZTUkS7bEm5gHpyITeX1xjRvomJ6Dqolsz2KTUe/UqUowJJH5uDtgs54ylWumtd9nEU6wXZfP7gus+HgS0+yesl7ccBOKkvjD/TZfXl2RSAGGNIfLyyWGUdKR3hTbTYLneSRxwkpmoHqDgPn8gkIT24yue8zmQxTHYzgTf1Mb1Hl7/ZThSGRQKy3H8UqKPCeUcvtOw8UJLU6OH/L1E+HngnGL7SpE0jvG0EApM+oNl+VUrNF6TWSwfM/aZeHTOIJS4TdfCvND3cg5OrlHhc5yI95n5DAcvqLVG7CJxHOqiolj10Fmm79+8j297j4//vxn/Kf//FdivtgGjbqPpirUZRZ8E06oSFGlp9XZ2Rv29o6o1DVSQZqPHn/Ize0runstBvunAFyeXVFWK776SuwRg8EBr9+8pl738GQF+uzdFcenXdbhgsGB2Bfz1GAbldQkBLcoKuJYoVH3uLkV6yHPElzXJYoX+HLfVUg5Od7fmdBeDM9pBDV07QfeF5XG02ff0+8dIRGbBG6b5WqB44ofU6v7rFYLtul8V9kxLZ28jJmMh/R64jln8xCUguFEzDHX8Wk0WhiGQSSzoZt4i6+5TGYTdEvsObVandH4Gk2aA6u6RhiGO74UiIpCFIbY9g/QOcO0BXRYcr60SsAGDdOkSKWvXpbjuu6uctVsioq7kFqX1TTbxjT1neiD6OPin0izN5tN8kxIid9BDHVdR9fFd97xjVLJ1QpkPymKgqoJYY+pFKuwbRtNtUi2CSpiDqOpKIpCsyaqvWmaYhk26/V6JzpRViqGqaGpUJQ/wFfSdLuDvzmOQxRtJR9UmnjrmhQHUSnleTGdzmnWmrsKVDsISIucMAx3nL07iKSoeInXfN//JzLoaRJTFMJrK5Ec5KLy0M0SQ9OZymqIjorruqi5rJSVQsa/UkpyWbEMtyFpnqEYKnl5J6GeoGkaS5mV10MdN/BJknhXSQ7DEMt3SdMYRZfWK7mYixPJod1uHdJ0S6Nu7bjE61WMaVeSwygRCq5Bq+1TVneehEuafY/bmyFVdAdDhDiNcJycUhHfZxoGtu2jcIc80MnyNa79Qya802nh+z7n52J9qEpKs1nn/PyCQEI4bVdhMcto1HyaDTE2q3DFJlzs5uv+wR5RtMF1VCJpnD6ZDnGDGmVVsZRw4qrqYTkuy5kYg3unH3I1/A7LsDh7ey2fSidNMm7XMw6l31+auKzSJS++Fz5CpgWGqeI6TWLJEdzEWyDk8OQBuUQ1XF6c0Qya/Ok/E7DEpy9/i+EYLCYF33wn5K8NM+Hk9JR6rcvnXwi44mZzztNvN3wgLU7Gw3/AaR9xs15z1BGCRJZtsJplmCc3/Oefi/E7eeDS6xps5+Iscl2Hn/2LE168fcb9LwRyJIyWqKWOo9W5/1CM1fblnGa3wWgl6BJNv01Qy9huNVxLVPTu3zdJtvDk6XOCtuiXz+4d8vzFExxL7Hd7/QGj8TWe26DbFp9dqjdEo4h1d0ZHQrcuzt7R7/RJbTHuWQKf/cHPUIqc+VT052gyxbPrXN08IaiL+5pXN0kyn8ZSQPlWkxuGYcq9+1/w6FS89vNv/paklfNBz+HtjZTu39Mp1hG55BJnhg1KRd002BuIs305fctkscB1xXf1Wk2ySKMoTFYLiURoH3Ax/J5nTy84PhFn33w6ZDVzaDTEGVOrBZyfveZmONztG6rW5fhY5c3rK4KmmBtuveT5q18TSG56UW74+d9NqdVd3gg3GExLw3MzZtOIruRKvXt3RlkqDPbFfvr1l6+plJhSydFscVeKswknp0cMr2+E1gNw2DmhWw9IJI+/2z5kEV/jOwOG0i5pvAo5/GAf1W+zkfy0l89FTPG72u+FiXCv61f/07/5EwCsRghGxXgxZiGJo9uFy6Djsq17DCdi8Sf5mG7gYG3EpFyNtkT5gk9+8jm3UqnHtlRmq2v2Bw92qlHbLSRpTCrxzkkxpdZzyKxbClUcxMPLDFWp6LcfsJiI11JVpV5XKe+U3WZbFDPm/uE+3/9cLIRybeGdNAhcHVvCXW5vR3S6AZYjFlRVlPh2k/FiyvBaQnlsg02Ycfpgn73DO7+vGNt0iCURbx6HFEbC8EJjJaECg65OlQdU2or63WUqdFEUhY6ccOPlCqsOt8OrnYqbElTkiYpjS4PLdcZ6EdNuNhnLvqs1NB6fPOTt2xF2IH1SwgTf99E1cYmo2T2m07dkmYMleRK6qbGI17QP97iV41fmKVk+oVpJ5SVFwbLAdS2WMthRNDBcg7JS6UtiuIJFklZUuhiDq+GQsqyj6hWVPJiCmkOSqviuSx7fKSapFHgsxuJ9jmagmiVBrcHVjYAX9PcOMEyFZTRHl/2gGjr5Otpxm1aTiH63S5oWKEg+XhmjajrLcoIh4Zlue4/tOiOTh67fbqFlQkxAF/c2CtMlVVSi1Yw8lw7mDZ+G2+Htd2cAdE6aDJcbSkunGUhhg6oiCxOScoMmsf52UCdcbMgkbqZVr1Fd32IZGu8kSXu93OIZFnpNJQtFf94fDHBqOd9fiR3KsA067jGqseB4IErk56/esi22DA72SSRHz3PrDNMNNXn2Xy9vWV9rVOqC+4/Fxj0fzXj04WOmM1ivhHKUqTqUlcKgLwLF67M5kbpBcwx0CVtJVA/Hyxleimeq109JMgXLKLFLS/5ei9l0RaNjskrFOgp8nbKymF2LdX3c9LiNSjy3IFdFHzhZi//47wSP6n1739639+3/L+1f/eUHOL7HqxcCeu4FNYoiIY4zWnVxtjeCGtNFTLvrMF/KAKjSWK2nFBKKvbffJ6jZ3AzP6bclTwqL0egG2/PoS+GdyeSSNNS4L4UpposxUajR6uqUiTi38yxiPJ6R5zqffSb2/d/8+iW5klFJpeVPP/8xy8WQk5MeL1+KvdcyWji1GfV6HVMVn9Xrd3jxYsQf/kQESS9ePSNZbYkqG+WOv60phHpBkcyYSJGLvtrmX/7kv2Mq+dx/+/d/zenpMePrEXttwdvV9JJtUuC1N3z1pTiLfvyTx3RaRzvzc88MOH1gcvkmppIKlFWVMZ1s+OjzfTJV3EtefVvw4x8P+PLvRfD4Z//8J/zt3/0VteCEtTTk2t/fBw0uLi5EAg5wrAxTryOZxigYuM0pb16H6Jq8B+kJSaxTq5m8k35xQaPPanNDthWB/mqR0bvX5vZ2TEsmC7/8++ecHPd59OFjZhIOmmQVtlmn1hZndr7WyOIMy/X47ntxtt5/2OdieMtobmL5EtqaBTRrDtYdjy9SKfKUOBpjmWIupKspeWlQl55keQZhWZJQ33ECzayiKEQCzQxEH/s49IMe84W4Pzb7JefTGWapQyXvi84G3bbpHxwThSLYaXoDLD1nG8p7ZzTFtwN6/SZfff1Ezv0Wn336kO++eUrvUARz8aaAyuDg+AEAhTnm9ctzTvcfUbNFYBhFMypDI1YWvHwhEkJ/8a9+yn/8D7/Y+TkOjhz2ap9j+lu+eSFMoetNj1wJmV/WqKRa6Ompzf/yb/7332ki/HsRXB0eeNW//bf/EoCouEZRXSajmLovOu2g/5jfPvklZ6NLKlVcaJ0amFWTdkM8/3dfnaMah/RPA06OxAR/8+qGVrOPAmS5mKyT8ZI08Tg8EZnPy5sZD39U8OzliIXk+jRqR+haScM7xLVF8LbOLtmsIhIpelEZMJpPSdIVex2RSVrdOOimwmo+4oMTQfRzLJO0iOh0xAVzG5YUW5v+aZu5NCQd3l7w6N6Aht+UZolwPXvL1WTOyb649Lb6Gt88fcLVmY8ixQ5q9YrX319y0NunXROff3HxkmhtYtWkQVpTodk+YbVcInUMSPKC8XiF9G0my1OCwMFUNcKpmNCWYbLfa5NmKoZ9Z2xcUFYR61Bwhpr+HtsoxlQTMkV8X24W6JZKuMzJ5eLIkw2+a7OQZGPdqHAcjzwtdgIFSVLgeU22WcSgJX5LmiiE2xBfOnCvNxssOyBOMqZzcZj02h0U3aAsFCp58a6UnKRKdlK3m8WCmumz2YY4DcnVUgKm0ylBQyGOxdxotXqk05xczhWlrEizmHqzTZrJLGe1olRKHN1BiSURvUgJaiaVzNAaQYPFeE5RZARyXIKaTUVCWZZMx2JD9L06JRVTKaOrGdDqtFnOYtqSBKqXFleXZ3R6fRKZSd7oGg3DwZf8g1IpmE2GKPoPcsIH7R7LzZS0VCllv7i6i+0GxJrYgIsqYzNJ6Ry5zGQgXHd0qFSG1yGV/M33HtxjnQFSBh1fZXazRNNLCom1d2xQTYN4U6HfqbMWBbrboURsYmYhbFTDKKMrBQq2SkoebZlnYrPt9z02k1syRawlAEfzUdwtT795Qa0uAsyGfkimh9T3ZYbxDPZafUZpQlaK/jxsNSiLLov1kPVazNlEh/1el5bMwr09P2cdr2g26zhSuGU1W2IZFoFfp5Sk/vH4FYE3oFETQe8qPCfLKjTdxbBFwmcbV4CLXyvJS6mC1fqIr379hHsP7jKY9/nNP/yCxx/ex9DFHvTi+7eg5PT6LXQpSBKFKYeHh1xfi3nuui7T2Rhd13dS7FlaUCklmyhhKc24/brKYrWmLuedrptk25L9/fZO8CVJltT9Oufvbji6J57raviObmuPLL9TYyxQyzaavsWQFQtdt5guNrR7bZ69ELj6TrfGzfV4lykvS2g2A66GF+zt7cm+G1OzB9RbBWksfrNp5xSFSHgA9Dr3ePPuS7qdQxrePfGc8VqIhlCjVpcm0+kGy/J21hYoKYapYhoempQvDuMZqpKjKT6uVK7KyxAqnVyC/Q2zxDRcilxlu73ji93ZSWzlnPZQK0iz7S7T2mq1hBqqNNoFiNOENM13fCfBj6qoqmL3PsdxZAXnByT+nXLdnShElmUkSYJt2zu57TRN0TQN0/zBHDdN053E+N3nZFm2E5K4a1VV7apGICpshmHs+GtxsqVKhfqhKoVpFosFlmURBMGu6ncn4nH3W3RFRdf1HWftrg/iOBZCIfkPFb27ZwNRhQsCmTjT7xQZIzRDZ7EMWciqSa3ZJMkSFEV8ThRm5NkWU4ckEevq8vwdlm6hVD88V1FkFFnG0bFYH6YuFCEdT93ZUTRaTW6Wb1HKCl9yM1SjJMsj4kicV7Wgy2wyJvAGeJIft9yMyZUE3aztxiEM13QbXXJV8PG2SUgUqthWAEhRFN0VF+Sux7aSSq6WS5Ytd1zfvcEBabomSysMySFNwwQFk8Gh+C3L5YzZ9K4yKcbPczqMJ7d0ugHLjdjf0qTC800UpCiT4zBfjGg3WqRSKXN4fU2ndYBuK+zti3U1Ha7xPFVUlAE0aHcGXF59S08a7+ZJSaPRYr1SdwmsPNfQrSaKIfrAdjSybUW7Y5JuxXM+Pr3PeP6M7crn088eA/D1d1/iBBr5VqxP09FZpHPyokSRwiizjc+f/PQhf/M3/w+l5NomWcG/+PO/oIpEJWk6XbIuNyRZStcQ+8bBns3F6Iq9wTHJVoxVvamziVJ+8mMRzN27f8xkNEXVE4qt2Cuvrl7TO1S5Hl5weynWg2np/OnPPmE6FKgb07OYzqesFhmGNLD/6slv+fjxHzM4NLmVKKY4yun1A4pc7J3L1YTR6Bo3cBmPxN8cHrWpMHn7+oalrBx/9OlDrkevQVqVZFmGYW24uUiQdC7uH/2UJFtgGz00W+z7FxcX2Hqf+p7k45VNVvMNQb1OcSfmpiYMp0MuRiv6DbnvthtcX14QS65vs+2yXoVUqUOtKfpFVYVgVLMl9pfRcIjZsLiZzVDlGn3Qe8xg/4Dx1Q2zO+RIw0Z3DOryfhzPViilyWKzRtXEWhvs11hHW1znmMVaBJhK0uDBvT6aLj57sxZ7kuWYPH0mqkWGbjPodbEMi0ZL9HGn2WJ0e4ViSCumskTRHbIk4v6pCLhIMsI85/XlVyil2It+9tO/oCyGvH4lxqV7uCUNEy5vMpoDMQ+22YSvf/kGrWzw+DOhu1DoIf/r//x//87g6j3n6n1739639+19e9/et/ftfXvf3rf37b9A+73gXKmVSkNiol9+t8Kub+h2+swWIkt1vXmJNnA5cfdptUUmZzIWmZyvv5N6+T2b/aMGe/sPef5bUY5uNvfJ0ph6rY1bigxJ/6Mel2cZmlRjuv+gzpvnX6IVHT7+QGSy3r1e0u2dUKU5SSFgZFploSYVtuRO6ZaD2RsQFx43UmlFUdfYmU3g1lCkEe5kOqLbrjEaiiqV59ZwaiXff/mGZSze9+DxIWVZ48tf3mBJlcHHnw4o44CrNyLCf/fyOb12n879A371jYjer66u2e82yaIVc5khaTQG7B2UpKWo3s3CKSVb3rx5Q0tWCzzLoWYZSLEkolTDiE1abYPKkqabhUZWqSR5RCBLqpPJENOCXBpFWrZKVfooekEayyxnXFBkOr5uIlVX8Vr7JNmWVEqsxikUeoZpqIQr8VnddouyLEnDFMsRY7XerGg0AmKpCOk4PnkRYWo6e13xTJYVMFsJKfiilJwZw6DmNkmlt1Ecx2SxjhM0USTRZZve4Dcq8qxCJl/YLOaQqDRbMtudZGznIUWaY8oy33q+Jt5WaH1tBxVcbzdkukG/JTJ8aZmT6yWa6bGSnmRRFLE/2GO4uKaoxBjXAp3FJkLzZHZ0OcdcrSmTjEpC4q7GY0oDNusIT8rB13WL+c2EWPZvXmwxbIsqq3bGm5tNhEqNKot3kthRGeM2aozO5Ry2Fe59MGCjjDHr4pnGswWWUqfdbpNI6N7l/Ib9wTGKI6q41/MJtY5FWW0hlry6SqNSFFxrSilxHYYJOUtWUzEOnzzYYz67ZbXMubgS69b0Ypxqj3ZfZCYnqyvatotmWgyXIluYqnNqah0zN7m3/6kchynT0ZxUKowNek0qxWAzu0aV37/dZqw353iBh+WKfWORLJitJ2wiURHalib9wT7LxQLdFOO+13/AYnqNoUAozXLXqxiFOa2uyOaNz5c02gGVGjLYOwXgdjhjOp0wONjDkB5PZ2fPwC4wpJLc5fVrPnz8AcvVnHAj1r/rO2R5zM1wSJGJQQ1cjzRfcjsWsJZ6vUlRFrTr+9xcC7Wp8XDCvdMHhNF6x79p94Qa5Goh1oLnWqyWCfu9Pp6sQFtqnSLL6XbqbGOxv/iuw2h2hmuKdaVaJmVuUKQaqiLnxmiMZlXoSp+mrIwvZnPu3z/l7ExAo9vtGnmest8/RJHS/YPuMUf9B8yWL4hKkaG9f3Sfd2dDZguRgV6FE0bTV2iahivn+XR5Ra0WoKCy2Yh9OIo3VKVOUBdZx5rfJMsjknSFJxXoHLtGWcUoakYkq9KaprNaj/GkapxlNdB1jSTdoO4k/9fYto0qN8Y02+44Qo48n1ar1a6qFEn+iGnaO2l38W8huX6nEAiiuqSq6q56BD9UhO6k2TVNw3VdqRD4j4y4JZfsTtUwz3PSNKUhs89hGO4UBe/+RlGE0bCqqrtK1Xq9Frwu+W9TNzAtR3iAyepPp9kiyzKWy+XuNcMwsG17V9lL05Q03tJsNndVHE3TyLaJ6AP5vHdS7PVArKuqqphMJjiOs4NnlUrJejYnijMK2Q/haoVp6bv9xzYrNtsthukxWYiqRhpVKGZGWaW78QCdRruxq6KmeYJfCzCsgqAlZfrLjHqjw9XV9wQ1MaZ5YlCUFobkYFXZlrrTxrAsUslzq7dbrDdzpssZR0cic32w1+b123egid9yfNKnSAwuzxf4npRtVUJqdZX5cogEctCsFwR+jUwq2U2nc3q9DrafEkYSWu52SdKQeCN+715vj3S7pahKbqTh68Ghy/5Bl/VmiiF5rYUmKoaBdwfBr3BMG9MwGEtIvBs4xFmEa1m8eCG56K19PNdiIivgrt+h0jK8WgdHwtHNusr33z/l009/RlWI3/ybr37LwVFBqyHuFrfjc/Z6fRzdYu9Al8+psJjVCHoq46nYz5J4y2JhYzlifba9GutpTKuvUUqYXrFaUlkbykqn0ROfNR7O+Xf/1//GH34qaCS3w0s++PgRi+UYvy7e1x7UaHa6aEZKkYn37e13iJIbnn0rqiMff/QRijLh+jJmfyD2+FbHo+67PLl+y96egMQtN8958fQViiI9n75d8uM/vM9f//Vfc3okUEWDbhvPtZlPlrx8LXn0vR8xHI52xuKVskHBJYkyAl/s1YvFhul8Qqt3StAVc/jtu6eg1phuBOTw9OgBnpqxdq6pkN6s4YSyssiqaxxDnDNZbqJWFeu12Bcrd8piMyNOFfxArMc3Z9/g+B10rN2e8/LyLbbh4uli/Ew9Q1ML5lFFZ0/052qZYDn2D+qhqFRVQbNpoEg11Pliildvo5oGewNRDcUqWGUZo3OBcigig/uDPYKut7OtMOYdsnLNcnVOrSaVwNMJ3369Yv/Il2O84PjohFdvz0Aqga62YzbnId3OgFzanFzfzqh7LrOhOFOKKuBnf/4Zr14/51LKw1tGRVCr4TsDfAlzLPOKNNvu9k5T6/Gr5/+eVuuATl9UQ6ONQxy/5eSewUaiO+4Mvn9X+70IrtBV3t2Iy9ag1aHQC0bhFU5HTMJf/OLXBLaF7TQZXZ4BsFqsODpp8+if/RSAPPcxFIvhy7MdFGIbp+RbFyW32CZCFKHWhE8++RnvLgSGltxh0PwE0zGo1cQEV3olqGsMK2cxF6819wtsp2QzF5/t2fDBxx9g2y0m0lzNd32+/O0Z0XrIdHItf88xTb9DlopFPZvNsMwavu/u5Lbj1Q112+X4wQGuIw/CRYSe2KiIA85Sa2SrJpl2zp/8VOCdb67u43lL4mWMqYnL4/XwLVFa0h5IKN11hVeLufdgwEZecm2roio0Ht27v3umbZwzH205OBCvaVbJ0+9eEtR1rm7EhaikoMx1SunXsQxnpEnBJo8ppCiDrtgYvklcbZlJ3PDFdMHRwT69juSBTRa4rodpahjSWFFVKyxDoVZWO3GOWsPhZniOKiGHqlLR7vgUaUEYinG4DSdUekGWllh3ON7SpYxLFlNpJt09YpqtSSqV0Zk4mPZ6HuQpm9mGmpTpTrMVjl+jkpyyJM+wHY3BvoVEo1IL6synKX6rw8Xb1/I1l6IwuD2Xkqd9G4MC0opKBjaqohNuUsrC2PXf2fkVpqWjS35et94mWq7p9RuMpax7fa+OY9Qowi3RSASwvZqP268zngooiKZqaIWCqmrYUko7l3ccw6yoSSjkKr5gujnj8FCUyIfTd4yml0y3M2p3vmHbim0eoQUJK2nCfDVOyDMF/+5yFYFv++R5iGOIsZrMJ2TbnJ+cfsA7uR6idMLh/mPMRAQDHafPL559+/hVBQAAIABJREFUgxmYHB2LC8pkeInlhTvhj1k8we0NYFESSR8J3VIIHB1bqXAssSmH/pxa2gIZGJcUZJaEKhZi3sV5h1zfcjPb7ALhAsiSeOed5rkNVquQVhCgSbGDqgrxajaNfpNyLOb+fv+Y9Sbd+TT5NY9wDR9/9AWrpRSvWUa0WwPOXk8wJGQzKVVa3RaTmfQjOXnM5dU52yRDM43d3PfqLXSlRSShWFplsFqt6XbEId/ptHh3/oq3Zy/59ENB3O61O6xXMz589BHffvcP4rnqBZ9//BHffy8ONEPVCRwFqozZXHIgs4q9/iGuW+JIIZrZYku4GVHlcr7aNj/7549ZLjbSPxCCZkqv9RGj0ZIqN+T31ViFM2wpaBFt15S5xkH3SBi/AjW/zjYLKQuDuhSiiaOcTrvH/QcCJvTt859z7D7g6t1w52/Urvt4noNlm8zWgnuyLdbkqUEgL/DPnp9jOVCy5t6xgPyoNMjSClWrdkb0ZFCrudSl5P9stkBVVXHJVyR8OYNMrXbQQfEVCp7j7SB/eV6iqJXwh5JB0l0Q05Jw5jzPybKM+XzOXfN9X0B8DOOfSGmXZU4mk1V3h7vjOOS5CMoMw9hBA+98pO4ggsIsFhzHkwIb1U6uvSx/gCneiTncBX13QeCdX5UipetF34mgzLMdcgn5S/OMvKh2EuuW5aBWMJ1OdyIXRVHgeR5VVe34zUEQUGT5Dl7o+/6u/9drsUbDMCSOYyzX2wUkWZ6TJTmKXNxVoWBoOppS7rwTLVORAai98+hrNutUFDiuLvtOY71eYiuCAC8+OyQtMjz7gMVMnIf1wEUtDRaSG6IGBmUCRrYikfvi5CbEROeoe0K1lkm7KMKsFMZrkVi4vlQ4OTql5k9Zr8W863Q6FGWCbTlocu+IwiXRKmOvJ/bl9WbBzc0thRZTl3u1E/hML2/unAowtD62aaGgczC4M/7VGI9v6XQb/wiWaxAnCxz3zuMvJ6JiOZtxcCw4V64f8O7yHbrp4isSQpUv+erJC+yahO06LtdXKVm6IJCiU9PVAtu2mc3m7N0TSbXOQRO/q7KUY66bHoWi8PDRT7B8sW+8fXWJ63ooxpTNSq7/JMK0muyfiDvB2fkbFuMZo9uEvrzQ9vdrfP305wRth2ZDjGnDu09gDlin4hz4oz/7M66vX9FwDrh/KqW0Q5fTe03GN5dcXpwBsN/dR6WiLu94v33yS27HN/R6e3z9pTifPv5Rh//z//gV/81/+xdsc7Ffz+cBw81oZ8nz6PERy0XIQf8Bigwwmx2H0ehLbOuYh/dOAfj+6bc8fPAR1h2lIs2pig2a4RLnImm4TZZsU43Z8i3RRjyX7TSI1dudbcZ4tOI2WmP5OZoq5vnTVy9pddusVhmW5IuvFnMa3paGTOpdni0YdHtEyZx8Jc3N9T6X5xd88MknZLEI1F5dzLCcLS6x/JsmFQaak7OUIhdpWWE5Bkkizg/NLFBVkzJV6TXFPAjzjBevv+bw9AFz6aeWFim6oWCpYh1P8opRsqGIJySZeM79ey2+/u1rPnz0KYcSovrlr37DajPFmghY+fHRCetwwmy6IJPnk+VqOC7M5u+4TSWvLa14dPgA3ZIcum3OZjUmjSPOJP/v0UePsLKYigJdFeP35vUlg34DvybuIJtwwf17jxnfXHL2rYgbDo/2+PwPHmC4GquFmAttV/Tr72q/F8FVnMRcLs4AcO0+s9UNvUGdaCI2sY8/ech8tcZxAwrJ3/jDHzU4e66wkgO+WsZ8/OgDXr2boRtiYxkNx9TqLts83GWc3UBH8W7AEgdPrdYhGWusJiEXb0QA5LsO82lOHK9QDfF3mWZRpKDfcYvinFe/vcb3CpZrsdBP7+1hAPP5FYN9cYhYasL5xWs6HZERLnINTXPRrBxrKwn7mkndNQgaDi9fiIX+/PklX/z4BEMqmqiaT65MCJxDAl96aBxdc3W+xfda1KRKlG6ccjMZMZ2IfuoPTMJlge/pHJ7IbGFkYns661wssNLWKBRwbJetfG27jPB8C9+3SGMxmRwXomhL3RcZ0zLbUOYpgRkQSs5MViTcrqbYToBZFwvI91WWsyWKJRZZv+MxX2/IdIc7D9o0V4i3cHL6gJtLkXlAK3Ecj6MjccGMw4gyTwia+o6voqURt7NrsjTFsOTlpqzodH/AmJuGj69kxFFMKatZeWgzn204Ojgir8QYV6XKfDFmLtYYNa9Nq9vCdhxWsoqaJMI7ZxtuMS1xoOVFxnZbYctsc5XmFJWOpau4jvSisW2m8ymB7aJJRca0iMnKdLcI0zBDqTQ00yJO5CUiMTEUm3W8xmmI3zzcDvEqBdeT6mj1gF6zx+uLd4SSTxWgkuYlmzAml7yhStNZzGM8S16g1YrxcIvmVVSRePa60uNyPcM2Supynq3DOZvtLaoulJCW6xlF4VAqW5byORttDc0wMKo6HV+M8822zosXL/jzz8UF+vk370TAfJugFWKtHR37fPN0xam8MARmjfm8Yq+l4khPlKbtsVFzjh8d4MvD47ffnFOrebh16SsSwc2LK3AVOj2xrqIyZBWneJ7HRnrRNM0BMRWLjYzKshme63B7O8ExxGd5nsPx0QOWm4hOU4yVaSmswnMSmUTQVOh2HZ49+/UuUPSdFudnb3E8G19mcpeTW3xPKGgBDIcj1us1zeYBk4UIytq9OlGUMOi3KQoxfpZmcX274uOPRKXO9WzOzy/xPQcpooht2mwUg+loyL0TQea+uT7Du99kry8uLZWyxenVWK+WzKTYycOTL1iuZ9hGjTi6U+YL6Tbv06qL6utqM+P757/ggwc/RnYdpnnEfDEh3IREG/FZDX+fyWS0u8yVZck2zdiEc67fid/X63YJWjWmk1tMS1wQZvNrDg4OduqPq3hCq3VIr+OArL5OxgsWswTDXBHLLP9kekW7tb/D56+jOXnl0Gq1GN6KhJahT9kmEbru4slkQxgtsO37O4+iTTgTZswVJDIAC7yO5FJlu3kQxRs2cYYiA0UR+Wm0281d0HDnpXWnRKooCqZh4zo+pvVDtemOt3TH6cqyhCAIdgqNuq5zdXWFZVkMpNDAarXAcZx/ws1KkmTn1wUiUMnzfKcUCMIXzHGcHe9JvHanfCgztiUYlkmZ5bv3VYigzzAMVOmjhapg6BbcKfWFIRQ5tm3vnklRFGE8rGk/eLzJgKsmlTPjZAtFjmVZP/C+4liYHc/m5Lt+MLBt6wfz5jLH0HTm0wmJRDHohkqJxmAwoNsVl7K82KJrNprxjwLTYoMb6HiO2MuK5RkX5+fcO/5ImLUDSboiSxX6MuPeada5ub6kstkZBtfaPvPbKbN8iiXVc9PNBq/tUYTiEjgdReTROXvdBlUmzlpLCViECY7rEsg7R7Gp2N/vsZxKb7GyoDXwyYsaYSLmYppsMBx3pzD44tUzHLNOlqdY8tzRqWjUfFRV5eJSzP2qNAjqKgvpc6cbmvBWjLMdguF6NCIrK7ZJjm2KuWsaGkUxQ63Ec4erMQY9nJrKUiYJbMNFMQ3moxTdH8u5UfHi24j9A9F37Y4JKDx78WuqUvy+6zcJHz5+QBHrvHj5VHyWfcjDD4/5u19Lr6hApXbsERgDsrnop1Uc82hwn7G+oG+IS/xYWbN3r0NnI5M56yn7B0foSsD0Ziqn65Rvny4hXlBKJMB8ecN0kvDhR58AcDX7Jd+/+JpG47/nD38iKlBhPsRrhQxnXzOV/dfd6/Hdt+/wXclNGy+5uVxj6Rr9vvQbLAwsVaVIUo5OxN1ofDNndjtHk8G55TRQlQzbcolLccdaLG+JM5s809B1cc7cLl+xSRe0AhFguu6C6WZKERmspVH73kGD9WaLbXu75MaD0w5lETO8FOOiFg7R2qTSSi6kAXLN2uN48ClFumI1FGOjpRW6VnEgRSGmt0N0zyHwdOYysVhr+cRhwt0WmGRbylhDKXWitVhrvh9wdj2kvB6y3xd33/nllq7bwEQKohTXbNKKpt/B8cVnP/nu5wwO9lgu1mQrsVe3m8eE629IpfDXcJjhODbROuHBBx8B0GjarBZTVG3DOBf79fFBF00zSJI7M3OfNy/PyNKSRx+I/tyGG8rcQC313Z2uzCra/T5PvhUqld99d8Zew+bwqEe4Fvv3fDIn2RbUmjXWmZhnD47FvPld7fciuFIUjeODHwOwDi950H1AXqg0uuKC8ub1OWme8WnvEVkgL/HKWwZtg/tH4uK2aWUkyRX7B33aTZGhGY9nzGdLOp02vX2xGNOs5Fe/+hW5hNFt1lNqtQGq4hMuxQJezKeY7BGXU1RZ9jz/XlQdmrKaFpkmYXyB443ZSGjb9eiW1Sal1ztiIOWLb6/OQanIM0km35Y4wYbVbY4TSGXAxOV8GOMtI5RSml4WBWfXt/i6VAFEZzIviL0RviX+5ubNmO12TbHVcWRAGQR13IbNmzciSPP8CrUqaDUatLpio3n29RtKOoxG4qDq9GwMJSHeApo8QOMJrUaXsoQ71MVsekNZ5aQynRb4Dr7jM5quqEnDwNt5RODUSNbbHdlRdRwqLceSG93o7RDNstFcbef63Wg0yJSMNy+e4npig1qtFwRue2fgqWsas8kKRfVYShNYQwFilc8//gnvXopKUqEkhNsQV1ak4mJNkPuslzc8OBWCBIpeMjg6YDKa7gwQW80B23WG7Yt/24ZHuE5YLafokiR5O5rQaqpUpY5lS3J2kVOqOa5UOcpKBbfRx9JzzDuFvdEUz/cwUXemoVmZEiZrNBmUlYlFrVnj7NUV9Yb4vngaM8sm7B/eYy3NnLdRgqlZbGSgWOoZ02zMNg7R7kxSK5XRaESmQik/P0sMXKvGthCbdLxd0gzaoNVQZbBhmipHBzXmmxWlJFy3agHLNARL9LldD/HcilWyQTFEf6LoxFHEWXSGc2cQasR49YK5vIgn1oLjwz6d9hxzKw171YKPPswZvxMXvM9/+gdk5ZTf/sMbSlc893/9o4f8zS+f0PnilGtZ3j84qFHkBhNZzfvij45wdJ3xcsxyIsncnRDbswnjmEKeDLalYRAQheIyolGhaTaWW+PmRlRH6mkNzw9RVZ3vX4jMlapVHB6c7GAeRVIRxyGn9/fQVXHIfnv2ina/juZm5BKiqpCjlSpTKT1/eNDAsl3iJGEjxzO+XFIWBnG0oiUNCh3fxg8LXr/9GoD9/UNa7R6e4xJLUn8ab7FMHy9QMCU8xHWaLCYruj2xhtbhFl1RqAcBUSIu/9v8GsOoMzhy+PqJUEM62Dvm+F6D759IY2zXJ1ptefPmKYFzKp99j998+Uv29gdMZiIB0vWbtOw2qkRB5VWOZuQ8f/E1f/AzUUmaT+YMh0P29ns8lcpV9x58xM31iBIpaJPHqMxRCx1Hwlicmse7dyMUzcHxJGm522cbb1itxVzsdvdJ05Lx7UgES0CarVksJ7Q7A66lOXarcUAULQg3d0m1AYahMZtfoUkLB0MNUXVtF0CPp1vqjQBd1XZS5ZZtSsGGcicgkSRbFEXZweayLCMvUhS0Hfzmrrmu+/+pHBW7/0uShDzP6Xa7u0DNsgwURWG1Wu2+TwhHbHemxf84kLqD8ilKJWTcNY1E7hOqqlKW5a5KBZCmOYqisJUXElPTURSFJM92kEbLsoQpbSkTRbqKZXvMZrNdJSrLMgGpVNVdP0ynUyaz6S6xqCgKeZ5juQ51V7zPMAwePnzIcrncPaemaaxWq10lbzq5RtVUTNPm4EAIRdUaAd1uG9s3SaWUvmH4UBnkEnqaJAn1ts+789f02mKMvZrH3t4eYTzEl1UaTTWZzSbYUuApTJak5QrLPWQrq4XFaoPvu0zKNUMpxdzxW2wXEZuwks9k0Aw8Xr+55XBfBMe34yu22y2N+mPWU5FsOD44pNEM0GSN9uJ6RZZWOL66g8RlWYGmKTvBkKvJBKPl4vs+FXeS/3Uury9J8mQncrMOM1QrYSMFPOq6RxhvcQ2H4UisBdU0qEqTUk2YzsQFVisNHjw+Ibkz3R2Puf+gxmq1YLWSUvrmkjhSsF2L+TiS87mi9AO2slI3DGPiUOHBfY35Qlz0G60DqspkONyAKueZ5XI7HqHI4GO8GGFoJv/iLz9nKS/+3z55id5sYVQONwtxoZ1xS/Qs5UcPxcX2xZNX3P/oRwR7GqmEdY7PIw73eyzSmGNpV/Ly5TmqWnE7OxNzamRwevhTzs7O6Pviwq5aoBkKr96ckW0DOYcu2CYpOeK3FOkKRctYrtJdtdCza1DVGU1eU6+Je+wHjwu+fvI9mkRMxOsVYRRhBQErKfmdpxb7R22uz64pSrHeA79DvozZrsT51DA+IIks0rLiY2kCfXH1nIPuJ4wmF6RyPysUjYs3F/hdEegXWcZo8o5Ov0ORi3XkdQvyasLwKqUnpd9TNef2dokvBZDiNCHNCq5vr2m3pYhQngvEhlSbtlyPq8tb0iji9PgUgCoHw1AoIoPhlbhXRhHsfXHA86dC4W9vUGN2G7Isp1L0BZoNl8V0yeR8zaefiXW0XRfU6nuYpjRuzwzm04wqVxnfivNpb+8hhWuQJTaVRDqVEVyOp7skwsMPbZ5884xuZx9Pjuf4ZoKiLbAdk71j8dqzN+eYtsXNSKxPXSvpdjqkVUF4l99RawxnG7p7AypD7FMLeX7/rvZe0OJ9e9/et/ftfXvf3rf37X1739639+2/QPu9qFzpusE8Elkyxe6imibqeks6Fq/1/QMeHH5Ku36PvCWi/rfvfHIt5KtvBIEQNcJ2dLLM5mYoIuVwDapSEEYLLq8kKVhzKBKXVJb2NqVGlphYlkOrKbI/FxcJUTKhKEo6dZEJmN0OidIxxURkR/yWSbRd0uzsEUtu0XiypB3s0W8f8Z/+5ucA1LwWzaBGkf3gc0Xmo+UlqjRzXMVLNlmKPs/wDFGtOz0+YDSdYgdS0tVR0VVBqL7IRVbDMWooZUFeKTuIgR2EOM4Bn312CsCTJ2+o1CWLRcTNUDxDp9lGoSCQBP50E1MVBVZVEUszyKbXpe7WmC0XO98nVe8IGIsI3Em3Ec1Gm/EsRNOkzHqZo24rGq67i9wX64igU2MlsyyZHVBaFZmjYEi5/Q1bKiPG8C3MUnzffFGQbWd0pIjJbDPHdlwsu04l5W/zNGOw12a9WrFaiP6s+yYV2x13CtWg26yzTlY7KE+95VCUW1StxHLE3NCshJOHPRZLkaWK4imabmJbdbbJHbTGx3ZqbCZTKlkR8l2XiiWZNMG0DYNks8DyNC5uZPVOU8kIWW9CDgcnst8VsNgZVWqWjqlrtIImWSJ+n4FBvTtgs1pTSj6Fh0VZmSh3BEzVw6xUgsKirkvyeJZTxg1ahx7jhfh8NVXJlAVZIknvuQeJhmFX6J6o8mVxxOR6ger71Lsiuxwu51iqzeR6Jt/n4vouWBBKbLgeQrbRcVo5S8m5Ovm4g6qtGF5KkvTAwa1VOIqJi3jOsIz50cnHPNUEJjpJJ5zeO+AnH37O3/3VbwB49t2QP370Kd+9ud1J23/08Qla6e2kyuu1OvNaSUOJWd/KzPn4Ate2qHKTbltkz9+9fk2320VTxO+NVhuyPKTXdxnsiUzdcr7g7dlzPC/ADkRf+V6TF6/e0O+J5240Pcq8QZm5zGIBUTPtjOVGR1dSTo5FZnA+SsgTk6b8/pvJkrrjoOoFzWZTzrMlvf0ecZSTySzjfDnDMFxMySdZhUums4iVrjGdiWpav9Mmy2PKVZ1u94fnunx3TSAlyA21yXg8odk2OJX+H6tpyuH9fTarhIb0zPniD37Cf/j3vyLJxVj90R/9Ea/PviItUmxP7EHj6Rn1psFsEqNIqMeDe6e8eXVLJaXnq7xkOLpkmyx3lY84jfhXf/Ff8fz5Sz75VEJwQpVGs04mIXjxbUwcLTCqAENWQ25uhmi6SlrGTOdiX+r2OmzCBd2mWEPLRUiebZgvr9lKn5KDw/uUhc5yEeHYcm2rKpPpBZL3T1n6FKWGqoeosuzmOBYXFxd89oWouL15+444SsjyZAd1S7a54EkVKbaEqG6iDb5f28FRNpsIy7IQy1PsEXeCE2EY7oQoDMNgOBzu5MkXiwWdTmdXhQIhApGmKYZh7DLlruviui5LycusKoWyLPE8b1flN6R4hqZpu0qVaZpUVbX77Dv59iRJsORvUdQKXRVS8nefVfcD4jTZVX+zLENXVJrN5u5vqqrCNE10Xd+9dgdtvHvONE0JGnVUVWUrjdtNxyYvChqNxq6iF0URjtPdmVCfnJyw3W7RdXZVmzzPSYucslJw7rgPakwUbnYwz7zI0BGGyHfQ/VWYsVmnKCUkUurZ8yr6e0dkpRQ2Wa0pFJXJYrozbnZ8l5vZLe2TNl4g+moyXNL2NfZb0j4hSrmdXdE/7hBLeF+haKi6Q6VCuyeq/FG6JhxukGATBgdtFvMI3agwJQe5IqMstliWWAv7BwfYlo6igCNfm66mBDWHdZjsRCdyNHK2IO8WzW6P+XQORYkq5+J4NMP3WuRpTk2+r8oNptMJruQIGZZLXoQUpY4uzY0ffbjPfBGi2wqhFMLZrHMMx+XoUJqDr17S7OtcDm8JAilIMHuBaef0e4dszzP5my1GsylpJtbsh4++oKvanH1zw1Ba4vzln/8Fw+0VXreHJu1YXj1/hlJt6bcFvO9/+B8/41e/PBNCBqnYc3Un44P7ezz5fsJEyoJ/+MljLofPeHchUBu9zhF7hxXnZwt0T4zVu+sXhKFGo96ndyp+z/DmNQe9GqORmK9Hp0e8fPGW49PHqPodJWXJalLx8Rcfc3bxKwCsxR77xx9wIf2rbFtDiUUVOY9Evzw6/ZCr4TWu16XRkyJMkYoVfMDNlYCozaoR908/xDQzLs8FWiAOFc5nr8kKnW5HjE3gatRrAVNpdhz4EE0TLi+MnQ1IHM/IswLLq3jyXMCx7x+f0KxrrDZiXDJMFuGEPC9ZR+K895wWm82MloTIr5KMwG+xf+/Bznbh1bvX7B89ZDNPqFQxNz796IjJeE2lifVSpBH1Wg00gyQRFVOlOCQL4dFHOoYh+vPd2y2Nlo5SSV9PU6fdsfnwk4c7c/WT44e8u3jGfBriOeIZhtc3qJWNL++U8801H3xywu3lEBBn7eAwoNvZZ7WqiNbirFstMvYGfVZLIbbidX2m6zl22aB3INbseDGhs+cxW8cohthv5onon9/Vfi98rnp7TvWv//WfAvAHX/wxw9ECqxZzOxPuyA8OfsRies0yuqWUEK5SNbl6N6LXFhuNQsT52w1B02S1kqpD2yGtRpN4VVLKQ7xS10TLOv8ve28Sa1m25nf9dt/v05/b3xs3IyIjIzNfk6/qvSqqVAjbGAQyYs6YEWKEGCJREiAEE4RgjBAIT5AsSzZyGbAtd1V21WvzZUZmZGR0tz/39OfsvmWwVpysJ1GMLFSDWJNQXJ2zz9prfetbX/v/h11x4UTRhiK36e85BJ4Qwq+efYMTJMRrF1cSouXJlEYJkTQ0bLM1CqZAGCplM6LZx3YMyiankHWgbQ15Gu0aHXXdJ1rD3fqCruR42RsHXL69oGlVkkzM/WDvA7bxPQdDsbnrLGK2ntLrhazuxSVe5gVPPj5itdxSroXj4voDRvs90lRyoBAzW6yo1YZaAmiYtYrll4LAACiKnCKrqDOdI8nKXcQKUTlDM0vGstFQVV1urqesZVPS4f6I9WpFq6kkqWxUUkp0y6dsXeJEGFxVFpFHOZop2dF9h6Ko2d/f35XNzWb3BJ2Aoo7JtuJQZWmFYba8q6zp9ToolNzezHbcV5ZhgKmTZwVFJIzARw9OSauETJZYrNc5eRbhBw5lKmuErQHb9Zq9/ZBCNrEkGahm825ZKJKGpq1IkxwkV8Ro5IuGdbXBC4UyV5qc+XpBzxSyuL6/pw5ryjRDU8X+aSgkeULQ9+l3JIDGNkdtLG4mQtEMDi2KtCAMPCKp7FqjwrZ8At/m+kKkxDWnJasUgkD8fpVk+JbO6f4e06lwDJ0gJN2kZE7Jqyuh4IchuGpAmkmEoTpBjQ00N9khXg38Pmgqk2RGnEnEov0HPP9qQk+iflHrnB2ENF2bryXAzIGtYpRDwrFNvJVGWHXJ6fiMe4nMd5+o9Ls66TTnR/+aKMVQ9Aa30nkmyfrGo4c8ePIB6t3PCJfCSPuj2xxbeYjaVVDkZZw3Or39kI4sBbm+iYnKkqoqyJYS8dKseXR2wsXlNbHsXTTyDo1Wslm+K8MYQ9NQVVssCXKxWC/YO+yB2rKS5S7HB4ds1xv6HWkgbUpsR2W+WmCZ4hw7nQ1X05hWc+n4Qj4Puj0Wt1tWkTgLibJGy2zCrkHbfGf02m7Lelni2cJ59IKKNM13JQ51Bb7XZ7vd7njE8jRj2OtzeHjMWJaD/NN//n/TVDV7Y9EHdnh8xGRyS6nMeWfo2+qYtq7wO7BeiOd/+OEP+eM/+SPOz0Vt+na54NGjR1xfzrFkP9VsfkPQCdkmSwYSGfPg+Ih/9k9+QWcgzlVb27x+/ZLh2OHTjwR3YaskbKI1Xz17hQR2ItmqfP+zD/jiF+JCGwx9un0LsoDBQKzBv/izP6U37rKKp+iSbyhLWs5PHrFYCGO5yLeYpkka6ztOQtsTJVqz+ZrAlUhnlkqaxuQS4U9RFFB0ykLBkKVXh8MBbuCTS1Ss9XaLpmlUZcNGBp3CMKTjH9A0zQ411XE8BoMB93dT+WyNKIqo63pXrvUOTAK+A67QdZ26rnelX7e3t4xGI6qq2jkaWZbRNJUAxMjy3dz/vJPkeQFlWe7Q+UBwPlVtg2naO2dHUZTf6MtqaqibEsu2d98rqxzTNMmSFNv4DqwiLfJd75ap6aRpSpIku54r3/d37/fu33fffddjluQZ/X6ftm135Yuapu36whrpcAlEQ3337NVqgWmKEsc4S3efKaoUz3fJZBnnbHqNQF1oAAAgAElEQVRDt+dzcyvsBssy0HWT+8kVriwrpfEwDIvry9ccn4iznBdgOyGa+Y5HbENTGShJxSoV+253h5RRRlkuKV0JnOQHlMv5Dt3SMUfQpgShzb0Egep1QqoyRlcDahmIUlWTxXq2MwINQyNLYiyjSy1lajDqcXt3zWAodLXtGNzdzjEMC98Xf0uSjI5nsk1SPBl0mi42GFZOUwvdYmgWTQ0H/ogb2WOmGh53kwtC36MnHSDTaqkbm0ISpHYHJnWrczt5SegKw9T1RhhuRBgecvd2IuUl4+nTMyzznQHdspwnbNY53YGQl0V8DcWYMoWjEzHP5fae5Ubj7InQnY8+PuUHBw/5h3/vc/Y/kuWgtxM0s+XlqwWridCf5+fnhHsq6Ux85sHxQ7yRS9g18GV59ldfvqA1KwzVo1UkoqdXoqk+k4l0/Md9FGq223i3nl88u+Z7vz3ANY6Y3L4B4Gj/gC+ffbMDk2mViLJwOT4+ZbUVumt62xKEHnUz4+1rUf6N1eFwf0wrAcloK9o6xw8MdOM7PbC8s0ntV8SpCFaNgj6u2iCPLAcP+symS77+9prJjXA+HK9mPDjB9tIdGa/WWMzulthDoScDv+XNqylheMrwQDgD9zeX3N1sCfujHTDEcfcQhRW9/Xe8j5cYWoXStCQywFsbJv1AQ5HcW9s8JV57nO33yVIxJ9XRuJne4ukeV1MB8PKT337Cr362wB0IGfO9nMWmxVTqXeDEsAq0NsDVVWb3Yu36nVMae8L1S3n3Ho2oap3RqI8q2x401eH4ZMyzL77iPpZgde6YuorIM8nP+eghi/sVg569a6+x/Q5RDHF2gW74cq36aFpKKav8qmJFON6jyXSCQOjcr15ekKsF+8dnDKTdPrue8t//53/nL+S5+kuRuVIwwRKK9ae//uc8ffgJ8abkMJRNvatLptvX0PpMJzKCuRfgehlpJhaIVmd44PDNF3c0sknIDRva3KcuFOay3tnyRCP/NhbPma9afF9jcpNzU30un9XQsc5I22tMCaludz9iHV8SSaQg3w/wA5f1oiaS5Kp2qDKbLjGMetcAWSkbRvuHxLGMFuZbEeWzOmyl8RpdzaHVqGuIE/H8vIqI0gWXt++ieRl7J0Nmszt0ifCjahbXVysejUdM3130rsKzXz9DkX0EZ+f7lGlGx+/Ryl6puonwTZv5Qnwn6IdsyxndoU6SiHVpUVDahuUsZSuVSJyoQEMi67I73QBVtfFCm0YT854uZ3haTZPFFEthpCimQ//AxXBknb1qE0d3NE3JYiY+UxSNQFK0FYxWAj7oU6raYStBS4xSw3Vs6kZDdpazzBfosUWLxvEHAnDhYjJhFWcUEgWs33OxtRIKj0oevMac0h/qlEXBzUT2x5hgWGDL2mIVn1oz0bSWvbFQYjf3UzZVwQfn52Tld4hQiqETSRARq9PDMAz8oUDLAnj29VfsH3Q4Oxijy6xJrm5ZbpYM9yT0e5KB0qBaKvW7OuJcQzNUjg7PuJek04aikJcJq6WQadvy0U2TpE4wJEP8ZHbJeNRnPp0TSiARKo/GDlgl4mIcd3oEYZdp1GLUYk5NUbB/ckRIn7fSSDHwORnUKJ4kGmxH1I1Hkq2p1+L9atMn7LVM7t7Sl31YdmfETZaxkdGtVskw1EOMUUW2Fe93dz/DcwqO998hkxUU0xs2kzvcB38NgB90Nrxdrul0B3z1SijuHz59QnSjsLHF/7fKkvH4Id++vePTp6IPc76d8c+ePcPwAvqhMHK7loWZpBz2hG65XVwQpxWKqmDKLEeTKiy2S6pIoa6E4TRxVtSmRyqhkrUip8nGDMY+M9nfsLwt0VyDwkypJJre6mKNhkljiss53Wj0LMhWLZkk3mzihIE6wA5NljPhNESKxmIJh12xn2pbsd7MaZWaofzbIo3QVYu63vJ//SORrc+rmGFvTC4j11/dfY1tqbRpzqgnLoWu1+Xi6oKb5TWmKc7MZP05//pf+V1evxYR06LIMcyas/Mx/9Zf//cA+Lv/x9/CcWxeXk45fyAcmVffTnj4gcer10KXnX4Q0tanfPrpR6iScmDQOWSe1Dz/OqGMZcbrgy5qM+e3fij6CDBa/uj//Jc8Pjnk8EzMqTFT3l5WOKbNQAZTxmGXtp3TIgl7zZrx/pDVUmShAJJ6Spv30O2AdSwiuV3tgG0SU8ggSVZmeLrNJ2fnWIpw/gM3Y7md8PNfS4So03PSNGa1TeiFYt6X91OyfIZtjL5zNkyT2fyaVPb+lLmKbpZ0vYOd45QXW2zboqkVFEXIRlUVGKbC/UzM0Q8dyjKlKApMGewwNZW0aIk38Z9DAmzIsmyHklcVNQoqSivQ/gDqRsE2beqqxrVkVLwUsOU7qPIiwzAsNuv1DsQjCALSskLTtB3UvKqqWJa1c+YAHN/DC78jGs7zHM/zdnOD74Ax3gWvQitE0zThmMpnJ3WFaZpYlkUl37mqGpqmoZXfy5sIhRxF12kkOa+m6yhsubp+uXMkxoMxRR2jG0L2J3cbuh0fQw/RZRAmyxPKuqQzDFjI+6G4Tzka+ixkgFArSmrbxzPVHSpmUW9p7RZVddC34vlGCqF9xmuZnTg8itimS+LSwJJ9w7PlCsuw0fUteitkSLMKfCug6xzKvbom6HXZrit0RYLquDZ930WRZSKbWGWRbrBzc0dDopQGUZwR9CxKGcw19BpNMXb9a4YjoP5n8XSHXDcceoThGfezGVZPIjTmOS0bdInYOFts8Rwb2xzSSLthGc1gq7BYXLDXFXdWY1r88svX9D1hwM9XGf0zFyNQuPhaomX6Jqa15oPHT0lLEe3XHZfjrkMYiP9ffRWRvlR5+qMx6UboRa9/yP3NHKfTwZYOiaaoeJxiheJ7Dx49xtRWlJseWl/YJYZdg6+RrmYsJUJqNzzB83TUVvbLrO5oqw6HBweoUl7+xt8Y8b/+z3+HP/j9w10f9tX9HYP9Q5JMBE6++vwVoX/AZj3n6kq8X687hlbF8Q75nd8X+uzl62/Jsy2WJ0FEbjb09wy+vbzh5FBk3eeztxS5zUDr4Uu6C1sxqJUS3ZdogbMN3766Jo8bDsbCyR0fAnWFY/coSkm4PEs4OO2QSA9he6fTdfvsjS1WEva8qRQ0y0atWw6G4v47e7DHy+fLnWPR9Rzu5leYRsjeuwqbYk2e57ukRX/UI9W3TLc3aLVwTJ3WxFQdNL1mv/dAyNCdwV4/5O1C3NFuc8rj0z2++uUzAkX2qxcps/VL4pXPJ58+BmD/pI/u6jSROC+u2cENMuJ8iaWLeydJGhbzCNeyMWUw/v56QtArieQdEycrOkFI26wZjwTgU2X5WFaOGx9hSTCsr569YG9gs5yJveruuyzjNY4TIM1oHh6O8IMuV/M71rk8I1Oxrn/R+EvhXEFNUQuDz7XP+PrFtxwdauiaEIDXb5/z8OkD3r7ZYJiSe+diARQUuUSWKzt4vk6n3+XTTwSc+C9/9pq4ndPgsF3LyFXro5jFLrMTLxs8Q0Ehxw9lCVAw5tWLKYZlg0Rjeftyxv6ZiSpTnnXRYrQOeXLLSII5LGczXNfGNCF+BwbQP2C52NDtS0b6Yo1hOnQCl7s7WUqnmziOS1FndIdCMJabNZbdxZNR46qJieMS3xuhqZn8Gxi6h6J7lIpQIkleUVGzlNmQ4dDH1X2SzZYiERfMQU+lXG84HYso0joqGXVOeHv9gkaW5PUHHdK84OCwz3Yjo6FWjKIoHB4K4yfNtlQV9JUBkyuRZm5UnbhQ6HZgtCcR09KMVs93Eb4qr1DRyNOaAxlhny0N7qd3dMIhF5KnIC1iTs51NHlRVY3KYn2HFViM9oUBP5+2mJZDpaosZHQ5zTI6ns9sKUoANjczHNfHVcGSsLKoNVFUU+b5Durddkz0rku6lohtbc7e0COOOlxMpnKvXD49/5DJ7BXtO5jeGWhYdN+BnWzvMbScMi+xdfG3Tz76Adv0DtfrYqhij799+Qw/6KDbYl+StiTseER5jPRH2O8PMPSWF88+57gvLuOry7fMr7eEQwnzmkdcL2oW3i1nJ7I5F4OyaAh9FyUW+7ddptBknMqm8OntLXbo4bsWliyN6vg2F5cTVNvccXtVaGzSGNeQ/GYdl9vFEkVLOOiJ9xuFPRaLBcNuH00aBHf3d8StRtCXyHV1xux+iqU36Pvie6af8eZiQu+puKzzbEWr7aP5/wa5JlAij/Zbvnz7xxweabi6dNjrhu5I5X4uvnc89vA8nfBa5au3osQwK2P2ui5lo5KuReTTcw0CZ7AzFJu2IslSBr0hmcyKWb7D0cEhk+sLFnNxjnuDDn7H46svBCBD6KgYXZ+LiymFNHrzTMetDZbZmkpyaGA5uG1LsZABCXuMqVkoesNyKc4xhkLVNAh8NnFZ5mlLtlaYxOIzZydHzKbXtG0FMrOyN9pndDjgi1//iv5AnBHd7PPi+QWmJ4yP/n6f+e2W73/8gKYR+ubL5z/n0cOnvLy4QmslH5Y1JtrMsSQ87V/9g9+nbRU0wyFOJIfVyOHl6wsG/Q95R/FRNnOaXKeVDoOh+7RKzuXFPbomI9cTm08+/j4/eLph/0Rkxn79+ddcXy0ZjUU21lP2+Lf/2u+xP3b4+lsR5Do67lCXIW9eXfPkY1Gq9+bNK1ot4/ETIT8vnwdE25If/dYjfv65KMV2tBFpskJTDTRVfu7la7phQJ6ItXM9n9PjPVBbvnz+RryMfsNkHWM5Yi0LJaVuVD568AOuJyLApBYl8TaidWscRxgW19cvsZxyB3biu/tomkqrbslkMGezzsmdlsDv7KDKHcciT/Ud11iv67NaLjEMg1KCR1i6SRgaVFW1y0D5vo/neTsnqW4qARRhGWxj4SDYtouitDtADABF19AVlSwTclAXNVm2llHk79AHTdOkLOsd2IamIWHrZdVI06AoCr7v70A24s1Wohiav8Hj1arKDtGsbdtdmaMpURz3+30WiwVFUVLLc1TXNUrLrqy0LEuKcsv6dr5DGbQsi7pq0Qyd3kBC2JsWyXrJjUTO05QueZ6jqCWqLuaUrCo8z6I3DFjJKpHh4T6h46DJZ+dpQa015HWBL6sMlEaUSybkKLKMfBtHmK7O0+8LPbVcrHF8C8sISSV4jON1sMyG7XrLsYRej5Oa/f0hjgREWi0DXMchju5x5brc3F6gVC79d8iEywo1hTbQmEswl9CAqtVpkiGptINMq8LUeygUUs51qlVOVlaE8h6dL1boBgQdk6aW2cIGDvYf8/pK6M5kU9G2CtttyYFEQ7W1hKpsWa6vMSUVyuquxtP26Uqo9LdXL6ivDgl8k70Tcc90Bn00M8J1FeY3kgrl8BGW3ZJKQ/h49BOGXZV1sqbWhM7903/xLYd9iyAscNzHUjgDOgOLZCNsi+vpBaNQYb2doElOq+ure04/6XF9PeHhuUBbjdIrzs5/xOWleDatjqoYrKP5Lqv59bMZw+GYy9s/o5bO1V/767/HP/nHX9ANRDnzk0/hfrJCt3t0+mIfvFBBxaSuFC6vRGCmyGzcUCHKhB4ejI/YP+iQRZDLFpHVBs4OPDbrhEDyBq7Xt/QHA26uxfc2mzt6ow4PTj1M4x234JbpdMLxiU0YiHWoyg2hv8/0QvCWnT84Q9FLyrqlL7NZ7faWqq2gUjE8IS9ffXNB4PbZZiJQu40a8lRD1VYMhqKM/OXFhMurOY4sh6vqDMtsiFcph5Je52AvYPMygirFlLw1eZJg4/Lxh8LhDDUBxnR0uMeRtCGbZoHt6PjnR5ycC/twk94QGAP+4F8XCSHLqvn7f/THdAbHdHtCXl5885b9/T5FmeIYYg7OIGZb3GAbMmgSNRjBgnhR8w9f/CMAzh5+iFb59PtDOp6wfw+P1liqSTsS+me6fsnRg49oI42zvY8BeJ38mgoN3zrEk/QlD46f8r/9j3/GXzTeA1q8H+/H+/F+vB/vx/vxfrwf78f78X78Kxh/KXqu9g/D9id/IKIqql5yfHSKpkecHgiv0TKPmS/uMf0Vd7ciq2DqHaJ4w0zCdp6dPuLi8itOHwy4eC6i1He3SwwnwzA1FAkLnqYOmqbhyYiQ75fM7iNUfA4OhQd8fX3D1fWCsGMy2heRAUVtWM1hvhR1oY5bYRgaumpgWsKTNYwOQWAxm19SZJLcLbqnE46Jo+/W2e+05DWsNyIbcnhwzDYR5Qx+ICKteV7g+TaRhCDdrjRqRQc1ph+K6JYXjtisY/qmy+WliJiYlobthBSZJKYdd2maCs+1qWXkenkVc/TggFIR66SbFsv1gjevJtiSh+b4g5AircnznFpChxdljEoXNxRRQMezcRyb9aThl18KUuZHT86ZTKaMhj6LmSyFqiJ0V0eXsPbj4QhNbUk25W5NdEMlTXKCvsmd5NXwA5tagTiSWUdtQd2scJT9HbliHdV4tsftfLrrDRl1+0yn812ktUojvH6foqjohoHc49fQWDSVQ3co+bGI8UYafiM+s1kviI0t48Pv8fa5KJf65MNHLBcrrm43OLKHzLFNovUUzxYRG79r0aoaUVLgyeRwJzynJKfTHfHsmegvapUI1+lRy7rsSs8JvJDNIuLB0QP5ghXr5YyqbTg8ehchjUmqFQ+fiihOmlRst/eYqrcrs5zNCrq+yvH5iMvX4owsZkuOT0fvWgRRqQjMDvfzO1RFliGZJkHYY50uSQshHw8/eMTLV1+zNxSpdVcb0tCyjN8icQx4/PAjZvMlaTrfRbOTMuZ+taKS3Cmeb9MNdLTWJEvEBnb7W243GV1dnL1eUDHuhzjWKd/IBl6tWmH0wbV93jwXJTiKXvLRJz/c1Venac46nzFyQ764FuWMmuEQaD6m4XF7LVL4fd/H8kxuL8X/dU/DMD2KtGa+FlmURjcxrR6hX+PI8+gYOo5uM78VsnlxfUN3YPPw/EPSRGRIk7pmtbinKCwq2Ryv2RU9q4Mpo/LrVYLnhywWGzZbqct6LaPhMU61x2L9RszL3kBlUxdCpgbDA+bLOaEf0A/FnLJqw3x7yTB8QOAKWYjiNVmpsomWUu58Am+EF6hUW5H5KKuUhw+PmM5uGO0JvXtzc4+tdfBk78R4dEBZtByfHewIirdRg6pYdDp7rCRxaqMUzO6TXX3+aLjHbD7BMV3OTvfk2j1g6A64XvycSGYHF8lbirQmsEVU/Ec/+AmGHfO//81/wumHovTDDVRev7kj2rT89o8EcfImmWK77a4cTdd8Ts9GzGZLlFbIUJzMiLKFAPOQ5M3np3sUUcZqIXSZ37Gpm4ibyyV5LM7fgw/7rNOWTHLo+H0bra2xS4/Rsehpq7KEVbykE465mwidUBYKnU6XvBTPHg77bFYNiu5iW9/Bs+uqRSfY/+58pCtU1UR5h4zeqqJEzjQlQa7gZdF00WOlYO2e5boum43IjpimiWnqgktLPktVdPI8x7bdHaBFmqY0TbPLsIEojyyKYscV9a4fSlX1XZZIVdXfAMZYrRbouoBs//OZK1VVURTlXXuqkA+Z5frzQ9f1HbeXYRgiu6R916u12Wxo6+o7yHil4eXbXxKGIZnkXPTDlrKOKDKF51+L826qCp1uQCUfPu4f4foa62hBKcvBNN0nWUX4nZBOT9JdLDd03TET2cd3v7ri4GifV5d3dGXppa0a6LrOJs3YZhKG3PdwLJtWvp6qqqyWG3SDHWy9qrVQN5RliSPBhjw7wLRLTE2Wg6cKQegRpRdEkjcoXUf09h5h2mLeL79+w8GoS6Wq7En46/X2Br1xadhiSxvkzasIywGnIwFDzJC8iKkrFU9mWjt+h8ndnMOTU77+WvBOdUMX2oqseAcvrRKE+xR1w+hAyP7zb7/h5LhPVZZcXwu7xHe6WFpGU4uqkSSxsZw+Dx/7REtJ2eIGBKFCVt7RVEKG60ZntVrgSU7E0/EndMdDLm9/zuJS3g2HOj3jKfPsOT/6njj/R8eHNMaaOhPr+/rtlHiuMxi23E7EeegHAxb5C6oEeh1xZ0bpFUHQIUsk0Eevh+k2XL5Zo0pwrI8++hTPDXjx6mcs7sWmPv3eOVcXc+pK/N/p+TRlwaeffp8rmeX76U9/ziA85vC4x9u3QhbTvKYgZSlLxo8PHzO7f0uRZnzwgais2mwbDHVOnYUUlVhPRUm5v084PRNZozeXb6iqDNPyRHoRONo/p27X3N695fFDkdGPooy764SpJFsPHJdOry96lfpi31ebLarWw9BqvE4j5eWawPW5n4qL3DA6fPDogF/88k8YSBAmz7WIo4xYZv2D0BQcaZZFI7OFPT/E9RXWy3tU2btctxGL65bRibhjVvcLNtmM4fCUhx98BMDbi1cMh31sR2c+E9Vr8bYiCD1GB6JSZ5u8pswO+Ph7H7CMRFY6iufkqw5H40NGR6K645tfXbLOG1xLzHO1qjh5fEibzHn1RnKEnZyxWVQcHTskEgDJaAdUVcI2Erb9ydkD3tz+jHzq8tFjkflsrIT5esuPP/s9NpKK4Xo+47/9T/6Xv9w9V/Adn0PgH7DdbtmsUx5Jxm3LNCmLhr47YjkTi+S4JaPhAx6di417/uIXGKbJ5dUd374QnwlCB031aJqIH3xfvP/P/uSC5fIVpezV+vwXKYGnc3oyZHIrLuKr19c4po+rmLz5QhgRR58eYvanjGUq0fccsmTJ3XWBoorFdsOMly9SDk/NXW247WmsNnPKQlxUjtdhsSzRdX1nrE7u5nT6XWpibiayN8x0iLMlisxiq4qN5YJp2bS12LY0adhut1heTV/2YbieRpabOH3ZCF+3rKJbqmaMIYXO67towRZNlvLEsbg49/cPmS/F+/rOAc9fT0jzFbZkvB71nuKEBUFfGMs3NysuLibkZQbS0ZitrmgBte3S5kJJHu3v0ao17+CR4myLriqkRU0tSYsdx8APOsTJAs8Tv1eVCmmRo6pCEcTJFl21GZ8FxJJbDFNjdT/DsUxC2Zy7ni+EASKN0DhOSacTXMdH4gqgoGHaBppqQCuJKbWG1W1KG0jS5EAhsAKmb19ycCTkLCljrq7m6I2CYQol0jQNqgWFNFriKERFwzUrmkI2gUYLOiOT2fwF20go0tFgn4PxGUUpJjVfvUbLVU6GJyzvxUWvaRrTWUJn0KGSddmjQxPV0ZhNhdwdn+zx0cen5Fkk+GiAi5dTZtcmtzcLykwYEY8+eMxscY0iz5rnaOSFitpCJYkNDcWiqmAxWXB4JAzf28sthtZlsRTznJHgWAbJJsWTjaKqZvH64i1hzyTfSE63siJwumSyJzHfLsjVPt2Ox2QtHKeitRl9OGIqyyBu3mzpj894NX/DdSqZ7JMLnLqPXi/Y6wmDJPA71PqGa0maOHB7dM1zLq5e0srAhmWYrKM1dbGgJ4leLVtnlSe7EkCn7hI6DpbacjsRe+V2LUzVwdAr0kii90U1m9kLnnwozv8H53tcTG548eqaxw8lotDNLapnojUK9Uas1WA/ZLlcYkpny+865EnE3kGfgz3Z9xXfUTcJUfxi1+w8cM64m0/RZYnV1fSaXq/DYjVFkf1Uy2iBFfhYrkdvIIlMVzPSqqQrSzG3yxmHezZlU1OUYj2Hoz3eXLykG4yY3UvCziRG73S4nYv1vLid8/TpEy7uvkVhX8r1K7rDlDiLxLlBNNBrps5AEovf3F5hmGDYOmku5DWOXxPFl8w3bziQ5SBR1uez3z3FM8U8O4HN3/u7/5RKi/EDcamWVUsnCAg9g0heaP2uzWx+y4MzUf4a+vvczn7J1cuC88dCvxlmitcO6A98hnsyyLWI2MxLHEPsX5msUcyGn/zkx+wNxJy+/uWvQb/j6EDsZ2+0z3x2wcn5I5YbIS/DoY/fUyiLGkOWPR6c7LPdZKwmIpg06FpU1QrbNikk+qJh1xRpTVEbmJK0fLm+wdYDbLl2juPQti1VVZGmwqjWFQVNNWQfk/helhXC8ZDQh7plkpf5b/RhoarYnk8cxzsHyPM8oijCtWV/Y5mh1iphGP5GP5WqqrRtu0NITJKEsiz/HCCHCTTUdc1M3sfUDYZhoJkGiuz7etd79c5JatsWdYdiqEjZSISj1qo0kjxaU6BR1R0yYJys0fSK9fYOV/b2TJdTqDU8d8jjD4Xsm6rFbLph1Bd6f7Z4y1l4imk6TO8k6fVYQzdLBiN3V157cnLE9G6KIc2h0+MnNMT03C51JYFaHJW0LqlbaGvJkTmvMDoWs40EilArXNdFISfavONzUrCMkLZRSCQ4RlE0aFFGWwmZ/v6nv8Pd9Q1VC1kq3lk3fDq+xlzqkd5RyCpa4xkDvvjVL8Vn/IDAzGnJoCPW8/i0T17EJEUi13dJtzckLVd8+aUIBvz1v/Lv0jZr4jjmqexPXSzvSeMSV8riOpqyjmI6I5/Xl6IUem98zP39Ja4eUtbijja1Bt1QKBBn5vRDlyza8uzLl2iqsN8++TBgu63Q1CFpIeybyU3K06efsFmIu8F3jjja14nic85+S+iEj793yKcP9inb3+KP/4UI8Pzyy5+h2y0vn4nzODjUubm65ouvbX78478i9kp5y/U3M05PHmB5Yv+yvEueZxwff0/Ixvyaq8nXbBYO5w9FYGEyvUNTZsznMc+/EfdTpx/Q7ZsompjnT//0DYf7Y37xZz/FkSh1g04XpU24vV5iKuJ+qtWIb1+u+OFvi9+ryiXJIqPb6XM/fSHkXPOxjAM++dEBP/+pkKGm3GM8KlivRXDsaPyQb15/jqoq7yi9ubh4g+Oa6MqAm2sZiPZd/E7DNJIlh6sF3VGP+TyikDKlWRmPPuoxv5lz9a2Qz5ODp+wf6xiGCM5fXUUEbp8ff/ZXub4RjmKRpBRJy6dPhKNxN3tL3TbESYIpHanWyJncxfSHLmkifk/BoD8YksTClllt5vjhiP39EEv2gQ97j3DdhDdvrghlW87r51N++/BFt0AAACAASURBVHctOj15j1sfMl9tub1bsUmkXaQW6IZOVW14cv4ZAE5Z8Lf//s/Re0JHqI3C/WyG3RasEvG+zf2SKL7jgye/Sy1bfNaTLY8eH/Dt12KPD/dOSOM7bpOM1+tncg4NauJRb5c8+7VAhMxw+f8afykyV/2h3f77/8EDAK5eRyiKQzfsiagPcHJ0jONUrNct11dC6G3PYtg/wpMRmrzY8PpFwmjPZjMTSj0MQ6os5M3VTwnDd8zjW46P9ul1ZBS+YzIceFxfrnZeuKrUpPmMn//0T9gbiSiD+8BhtY6Yy4hGvI04PRVG/notLkIvUKARJJZNLUnoiojhcEglYWynkw2e18ExDSwZbZ5O1xRNiuFkKNLd3axyLNPFqt6hI84YHYzRlD5qK/52t7wXaFRRSqu/I0RMmE23hB0JK7uuaJsclQBH9vbsDUOSrGK+FhdOmmf0Bx3Wi5YPHgtFs1wuePNmgm3bdDsS5rnTYxtlfPv6DQDDY42q0rCsIXkm4b6VBkuzGY/6LGYS3UpvaLWEjaxxt3SNIPBIo5o4Fkql0wnI0paiWlGWMjPmBqAmRMl3tfiu3aHX+w7uN4prDEUh6HdZSAS41XTOYLyHKp2fPMrIozn9cLwzIlStQdMUdMtE02TTclFwd50wOBbGR5TNeXJ+TjRvd8puvY3ZHw+J1AxLwubamkHb5qSSTmA+z0mKlNFBwGFPOChJNiWOU/zQQ4JiEXgjXFchkR7fmzevOH/wkYDrV2RWTzFZrDNm63tsX6yf2TqgZAxHXblXa47PfB6df8T4QDz82y9j/uVPX2K5Ggcj0eei6wXT+T2GjGCm+Qqn3We9XuJKyPGiUGlrFV1rGcm5f/XiJYNRD+s7u42mrDgcnOxYzo+OT3n+8kt0u+X1SxEBCkMdQ3eppANtmDaj4T6r7QU92atlOfD61YKBL41J9Yii2dAdmWwTYdxtpis2SkLfDznuy+h9Y7GpQZegCb635PVrjbOjh1y+llncwCZrKuo0pxu8u/RgtV1hS4e9bn3aumK7iPBlVlO3LcrKZDW/IpdZRcezqHObJ2fCOL+7u8AOOmySgm5Xymfikpklq/mGwBIylZYNlumjy2h6UdUcDYbUbUMjkc8q22EbxxSrFaYhzt9//B/+p/x3/9N/g+YIHVhRY2kmtmYQbSWohmWiaDq+4RJJhMSDow5X93cs5mJfup7No4cfsi1SRl1pvFoqq2lKk684PhAXZmfkcHc7Y3kp3uXBwy5xpFMrMxRpdCb5iv39Qy7ezOhLFLPldkUUpTw4Ez0JWZZQ5g2K0uJ7IgP19u1b9HALVcB4KNY4jTMO9kNmU0mNEBxwcDBmsv6adzG/2aTCDwyODsfMZxKVSilYzFLCrohID0c+t9czvve975HV4m64u87wQ4VtumAuKxtsy0KtnZ3eQI3pdF0sw2U4EIZhNKk4ODJQbRGxvb+dcT35liDY4/RY9I9MVi+5vLkkdMa4Eokw2kQkSYLrCqNQMRrSpGCR3GDJjAJNQxB0oVUpZbBhOp1jG2NGsnHccTzytMDzgl3WqEgzVFUVfViV+J6CgWna1NLccl1bZpxqbBlEqKuCqqrQdX2HfqqqKroiqgTEHiTYfkDbKixltUDbthL2XfuNjNefB7RQFAVd1ymKbNcXZWoCar5R+A1AC5EFk4a4aZKmqUAblAh/4jM1hmHsvqfrOlVVUEnnvGg3XN59znq9ZjiSsNL5iiyuccwevf53vbybzYZ+X+jFILQFwp7lkcqO/azd4mkhQdjj9luRBf/w00fUVULXEWf7drtCVXOSpmT1jppk0Gc6nZKsE3QJGWyaDpsoIV6Ic9XvemhGhmm6qHJvkrglLzYCKl+iDNaVTjd0kRgieI5Bv3tAmqbYltBTilVTxnN8T+jJbRtztZgSeh002QuXrCrSqqCqE1RJO9LthqxXCUUtzn9Nhq4d0zEtphPhCB8dHfPw8UMWm3tcCcWeJAmL+Yo4lecDFUUz8AKfV2/fAPD0wyeUacJ2vWIkq3w8rc9ifcNWOsaquSI0XEy61BLJbtA3uXx9Rye0cEKxp9c3bzF5yOPH4hyPegekccI6jTk9E/v39sWajx/FfP75hKyQhvao4vJ6iyUBSoLuORoKtqNwfSXe+cc/POPV21c8efgxhiVskPWy5ey8z+UbmZlrTDRvJkjXl+K+WNzrnJweMZvd0ZEId0EQYFgVr16KjJBjHlDXU5K4wdBlLxMZKBVXl3ccHYuMTF3m3CxXGJIsN40uOTnY52623JEIHx+c4TkOV5czOl1xr2VJhKH72JJeYDAYgJby05/+AsOQZMC+xWq1IPSGvPpW6LwffvYJm+iWtBFnTWt17u7XlLXCJ0/FPX43meIFfU5OG3qycuv6dYBldvnBb4lz9eXnL8gyle9/dsbzb8T50OwaFWWnO1fLmKIosE0HTXYWddw+1/MrVMWkqsTeGHZKFpu4rgRpK1s+fvIRf/zP/oRPP/ktAPaPfbabmLKZoytCV54dnzO7nzCRJNT39wU/+Oz7GHbLyxfC0d/f7zK/XeN5AUP3AQAfnof86s2fcfaBOAv/+B/co/Z8lNTizUx8r+sEbDYbLO2Az34ksoN3d7fE+ZSTY9EbZjstjw4HxChErcimTV5f8XDvMTeXU0yJ6NvZC/jP/qO/+Rdmrt73XL0f78f78X68H+/H+/F+vB/vx/vxfvwrGH8pMledvtV+9vsiWmGbKoH1kPl8Tilr2H/nJz9GU2o2mxWF9Ipn0zX9PQtVf5cx8ahLndu7t/gSPWQy3eD5Kt/77ITbCxEtcLyY7330Cc+/FLWb2/iWz370A2xjzItvRHRi/3DAm4uvuLufUGViXq2/pszsXYTPsW2yKCfsODs0pNlyRdFkeJ5HW4qUeJRMaVtl1xsSBl0MS0DNlhJVzbICGgXSbMXxqYhi3t8JEk6rkv6vtsHvBbS1R1uK7FmplVR1QT5PMRyJeGebhEGfyZ3ICMXJGtfpEAYuZS0yO77f5fpqg+mLSES/P0RrbUyzxJFwrdcXMaZromsqi6mIKA5GPsv1HM0Sv19VKv1xhzKrSbYSxU1RSLMVhmUSvYvMKyWjvSFJJNF1vACNlvUqw5Xz3mw2eG4PVatoMvH8KFuhaDGuI7KMs9k9rq/g2CFLiQTo2h69fsDr2ysqmfYLnBDbtglkv0y2SLl5+5ow6GEbkgei59IqBbbnsJQEpaZhE5omG5mFi+oKzaypYzjoiSj85eUl/sjCMocsVkIW6rblRz/+AXdTEUVazO5wdJtkk6PJ7Jlh5GyWKp2uS9AR87y/v0PXKyxdRMqzskVTG/phuCsPKSsVL/Boybm9u5B/a3FsG0X2sKmajefrRMmUAwnrPp9smS8Lzp+cM7kR82rbmNOzc755/RKA0TigiULuZxOOjsWZMXSbxXyG57hsV0I+Hn/0mLvJZNezt92sMGuVpjDxZNR4MNijrGJKNeNuKuRaVacMwjH392Kv9sbH3NxOGO6rGLLfwAtrok1JJsuunE4PTWuxVZtEyn6rFWTEDMMQCzGn+UyjtUxOPhRyPr8uSZYf0bYTFMkflaNzdTPh0dlD5rIHaTqf0NVNAglBnKYab97ccHZ4zNGBKA+7ur1iMpmxNxoTZyJjQpBj22MMSRSrlCVBb8QqXpFI2Pyeo7LYVCSUBCOxx9NrBV8x2JMkkZNZga8Pub74ltMzER3VPZ/OYEyZbCkzSUzt2NxmG2QrHGVZEy1jzs9Omc0lcXKng6541HlBIcuAOl2P6WyFKUt5m6LAtDq0Ro7ff0fFYEBm4pHhaSKqqbhzlvcz/s3f+XcAOD0/4/Mvfs3F5OsdcfObiyXrZEngnYNELN1uLtgfP0GTPa3b+A5TG2DaJZYkKb+9mRO1OcOuhyLREBeTGaG/R+dd1qGw8H2fKFviStJy1/bpdC1UDapCQtuvZ6iNzmy5kDLcw7ZNxuN9Dg9FOeHN9R2g8ebqV7SSCqEsS66utjx69Eg+2+Tm6pZu36A/kBH27ic8efCYP/2VKAXZrm949Pic6Sollr/38uJrLM/lcPyY5UzIlKbobKM5huy5bPUESz9lsZngyGzFZp0yHp5Rl8WuPDuKItpGx/PF+ratIlD6ippBty8/k9DrdCmKatebZVs+TdOyicWZcV0Xy7JIswpfIsLRlgKVz7R3HD22baKqEK0ld5NjUrcaVVV9xzsjM0iqqu76ot5xUX3HoVXjOA5JEu2QAdVWlEdXbbODqNd1E0VVKWV2S/R7gaFqO6hwRVFoVUVCuL8jQNZI03hH+dHqMbeTF2i6hURwR1ELyiLFc4d4lkTKNBocr6Yo3pUjWqQxGE7D/ULyD2mgGy2d4AhTQrJaoclXz17we5/9jpDNdMPF2xtOnpzx8tUbIQvbmGHQJ91uMAyxEYt4heaY7PvvylhTknSN2uqYkkNns9kShiZVnaI1sjfbVKirlET2+p0eH6Ax4NWr1+zvi8y1ohWk5ZYwlCiqdQW2xeGDAVcvJI9XBpVpU5RbikySyXZdknSJTPrRGw6YTzfki5STE6HfppNbfvTj3+Z6ckcuIdybtiDseCw3kvA5L2kbgzDo7bJZYeDTFDFtU1FKmPxR75y0vuVyKWwn1zrE13x0bU0h7RRb6+D5Gov7CM0RmSNNCYjjmocPhf5ZL+8Iuge06pL5hfjMcHzI1eXPGB6cEsgMnqo3qI7Jty8EwXy8HXB6eMo2WaBIpFPXiNluawb97m6Nu0HIhx+d8lrefTpjxsces9mEywvRO9XpnNHrjilLA8OVaH2rnP6oz/XVQu4L6GbGejVj2BcVIcvlkrBns93EdLoiC/782Qv8fdEHCRDaBtE6pdEsHJn5UIuSTtcmzzY7/Z0lFkdHR6CKNZhNV2iaTp7VlNV3fHWmnbNex1AL+dg/6DGb3xHJUteet0/ZZKRpTVfycSb5gsqo6IUGnkQHTtYBHz39kKadyt/bksYqDx7us5XUBN88v+Unv/MJt7dij+9uEvYORpRVxFb2fVrqgKjcYlpQ5OL8Ky3YfkjPE+fsfrWhyUxc09rxlI5HR0TJBsuwUQ2ReTw9eMLzFz+jLykH5vMY3TJxnR6vX4mS2I77CC+oiaKIKpZ9fFnM0Sc6R4dCDv7233qJ3nEwlJSoEGcv9OHi8pped5+e1HlVJfhHg0Bk/T988AnJYsWLN2vGp+IgTa9ucc09Dj9wdzRORQT/w3/5D/7CzJX2h3/4h/9vf///dfxX//V/8Yfjoy5No/Powad4rsNisSbPoGlUmnZDkSgsV3Ncx0ShxfZb/E7MZruhqnJce0gUbVAVl7BrYzkanm/x5OnHJJHGyfkeg9GA+XLOcm6CYtAqJotZxa+/+JLVZk2Ux8RZxM3tlLLRGR/0cEOFoKejNi57BwMW9zW66hB2cn74w0co1SHXN29JkxzVbNiudTo9gySJqZsU1w0wtJAiK1HRcGyLxXyOadqsNnPyPGO9nlMUBZ1un2iZkGxT5tMFvqejqwqq1tLr7KNoBk1bUBU1TVszmU2pqoJRZ0iSbmmaStTn5ylVqQIKeZ3QYpLXG7yOhmpA48SUaCgGoGvUlATeiOViSttCnleEXQ+qENNU6A11Oj0Ty/Tx3DFlO0fTBZxxlets1veYeoCqaFDDcN+npYtq1piWheFbtKqJZSgYhk5dqVRlxnYToWs2dd1S1QVxlKGg0zYRTZtTpCpBx2M46OO6Jut1RtXMsG2bzbKlrjQsS0NRFdA1HL+D47ioTYupqbx6/g3zyQRPt9jfO0RpWqo6o20rlusFByeHFFVJmiW0NAwHA+pmS6kbYGis1xvq9ZzQtlm2G6IqYtMUpFlNUSs0bUFLw/HpAZqu8/kXnzOfz6nbEs/voBsdrq5XRHGJ7ZQ0TQYomLZB07bcTqcouonr91F0g0ZRQKvQdY0ozinrSjh/mzuqMsM2HSzTotJyGlSCro1hq2S5QdReY9gdsqomKRtaVaUTdHECg5oCw9bwfI+kSFEtMB0DVVOoixzDtsgSQbCaxiWz2ZTjo8cUlYZmuOimwsuXl7hdh1ZVONs/p2P4aKbG8KyD33VYTCOoK2ojJy10FFUlSeYYhka328HzXdbbNYNhlzRNWK8ykjjj4OAxeqfi5asVUd7Q2++RbguK2CBRCsGR4+fYoULbVszXc7Z5Ssfb46BvMHmTEM9ULp6lHA0aev0DLu9uyYqc2WaO61ms1wvRaN9UdJ0OaqmSZyllJgAAtqsUBR1VbUnTBLQW13XJkxxDs9AVg6RJaBTItxl5UVIlKoYZcvv/sPfezpItW3rfb2tduurollfPffe9wQhGkAGDBokIBn04pMW/Af78HTRh0WPQIgOMGc5gSGBAvnni6tu35ZFVp+TWem8YmX0miMAwaMB4RqfXJ7p27cpca2XmWt/6vuUlSunSlSp10qPWHlEUM1qoaEoPBZwfj9H0Fk2DptRp1JjF1OD45Jhg4JNlNb3WklYbJmMfy4G7+wO9pVLUHU3boykahtozmrgsN/dUbU0clgTGGENvSMuUtm+xPJu6gGEwwDJsmr7BDYayp0snzxWW9wWB79AkBboGbVewjq756PlXmIpJ1dT83d//OzTdw/On3K62pFmN4/soWo1peVzfvSNJYz795FPCbcPPr7/jfnOPYxmoqoGhG2y3O9I0pe0aTi+eEIdbzo4+xXNm5IXCIYk5Op2imQab7Q5Nt6mzClPV6Vs4Oz5mu90QxgVpUZEVBVF6T6fuuLl5TZptMA2X+fyYH77/iavrr3nz5gcC/wzL7rGMAEXRsSyftm/wnQFlnlCXOX1T07c6lu0zHh9j6APquuFuectsOsBzDeL0niStydOeXXhHXmaE8T1F4/P42Rnf/O4VhzBFV4ZAKWjfe5uu9dltC2xLx7Q6VAV8b0DfWBRFSpqG5HkBaDRtQUdL3RR0fUmaHXAsUPWWri9o6ozxZMAh3KLQ03U1aR6j6xqObWKZOnVbkGYRPR26rtB1FT09uqFTlAW27aIbBrbj0rY9judiOy4qGqiCUv0/hPxpmoZpGhiGTtPUNE370Iulqip5nqMoKmVZUdcNpmHSdS2WbdM0DYqi0LSdEENWFJDCx/17RXgU6ZMdhqaSJiGmqaCpAmbVNCk9OarW8OrVD6R5gmnqTGYDXM8gjjJca8zQ92nbAz05adIRjBw22wNFWRNFBV2tols1ZVmhoGCZAYo5pMhSqqKlqBr2YYzrGxRVRZTGHKKEXXLH0eiMvm5xdIt0HzNwfVQF8jKlbRsc20BVO5q8oSpz0jzEMl0WZwuWt1uKsmQ+H5HEKbZt4jkDKXidYJjgWja2ZQp9sEZnPB7RKwWq1rEYjcCA7SElL2u6JsfodaLtlibv6OqexWJO15qkRYJl6+iGQpF3NHWPF9iYpkFXa7Slz3gCfafQdS0oJkVR4w89kmxH39fs9wcUxXggFlGBvlGIo5BhMMDUdZReocgTsqSg66BtejoKNNPisC/pG4O+NbFN0W+rGAqokCV7Aj+g6XN2u5wib1HNnkMc06ITpTmus2B7d4tpDZkuxgSjAUmZY2sBuyjGdTzatuflu4gs63CNEaY+AVpQ95iGzyFUqCqFydBmubnGtlzaWqPvFDx/RJmXrDfX5HlG19eE8ZYsLdB0G8NwCYYOL35+iePOUPSettMoSoW371ZYjkmvgBk03C1zUHXSpCRJc1TNJEzuGQxmuM4QXTN4d/sKTRuj9gp0Oof1LaY5YV9tUNWOus1wOh3bB8+bkRcFimrg2Cb3qx1tW5LnBaPRkKqELI/wvQmm4aAoBlVbk+cVwWCEYRn4voXrqbx6eyOgt7pGR4FuqJR5RlVlzBcnRGnL0ckRTW7T9w6O5xAme/abVCS7LZO6TXj97pqyC4mzEN+yaasaxzYxdZ3Xb98xHHtstwVhWpCXYo9yHZUkjUBt6KmJwhhnaNLlGlUJuudxujjCtVzyvKRr4fjoEZrWcXQ8YbsJaRuF1V2J61u0uU9TmsyPbb7//gcWC48o3KMqkCQxs/kpRVOwz64o+5DJ7Jjb2z3/6n//ht/85oazJxNur3POzo/QlR7H1Dgc4PEnE6qq5GRyhmO71GXFarVhGEzpGoVwHfFP/9k/49Ov5owHHkeLBa59TNWmVEUq/K/qmYx0/uZfvbr7i7/4i//xP3av+YMgtOh7yFJxQ7y73WAaFW7QYNiicmWZM/whaNqIOBUYyOHIYXWXPqgsX75b0Xca88WY6UBkD37e/sj//eu/AUXDFR/DVH1SdnSyIVPxXAbeI3ZZgeuKTMtscYypOgxGYFjvM2wnmJaKJbWaXr76iUOSsTqsWEuxXG9o4g9NdocD07GoQGVJyGG/J5UNrsNgxCcff8TPL1c0tbgpV1WN547pa/VBW6RIa2pPwZekDMv1NeP5hKZROToSmQh76PHTzy/I9YRGk2KVfU1VpMg2IqYDC703mIyOySVBwOWPW47ODK6vZTPpwGCdXFE35YNqfIPCIXzH6ek5ti3ZXsIY12+I7+Q8HUNe7GkUlU0ksum2OqDdtnT9DFUyZWmdTrhfoutStNieEaYpg8BlOhEVvtUqwvR16irD86W+UpfhOmPut4Jkw/E0itpFNxwCmaBtUDFMG6eHRGqLqU0nWO8knjwIPLIsYTCxMXWR1dhFPVmRsU8iHFk92x3uWcdLul78u+sUXH9E0k9I5O8ztA7fDlANg0xmhIsq5833V+ju+8ykznJ1RWAZWIFs7m5znj7+jDQK2O5E5iOYLHA9g30qsmJ93aApOtEuZijnJe9CkibFN1z6SthivM14dL5ARWbA0xcsnnhE4RrbkVUjxcfQx2RZ9oCV1lUP09UoJclGXYCr9xRFhSPx41EYMhgErA8bSqmBso6W+JOOW1kt/PjRV1zf/oQ70Xi7Ffo/R/Mv6NKCd8tXPP/kl8Jmr1PWqz0j2WztBR5VUzCbLaiHYu5evnzJn/7nz/ji05uHNV4nLa2SMJZsXn2nYecuX3x5QlML1qj18i0T6yMGErN//OSOZuth+zBdiIxX/O6Wo+Nztrs9C4lhr6KIwoFwKbLb3qTh+PiUJCxQpY6XoRq0bcYvf/Ul3/1e6IYoqo6maCSxJL2wa6I4x3MmOHIdHl98we3NmqJTGEtmN5WQLCsfbMM1oTR7tELB80RF9m71gs3NmvF8QSxFysMyw3F7DNkUrjQGlq6xXG2YH4vPResQQ28pixhHZnYPYchsvCCLhC8kZYU37ugLnc2tsGHFMkhDm5ntsI/EmjbWkNUuQWtkpd6x+fHyBcOxQSTJFTw6hv6cLEv54tkTYddFgWZVjERIwg9cijxnGDxhu34LwOTIgrzidPqM5b3IuruBz8nzM0rJ+mWZC2xDx/FsHp2JjPB6dc/bq1dMTsZkpdQlo2cwWTCSMQIU4oPGYnrEXpKk3N9taNWQ+fyY4UQyq/oGjj7n+p3IXB8dOXz+2Zf85ptvHvTxjtyAvEn49tfCpntDY3244ez4CYlEUXz+xZ+wjmK++/G3nDwS8WXk1ex2FQvZD/T27gWPP16Qxy1xJrK9fdsTeAO8AKpakhtZIxQM7rdiDdq2pS4LurqjkGLyumay2x1o6o7jY2HDf/mXf82nn33JSAq5LldX4tKjWQ9Cw22rMBlP0TRFHHiQlbJewZBEGIKw4h8qTSCqS33fi/5WabO+7xPH6UOVSpEXJcMwHjS7uq7DMCyqqnrouYjiGM8LHogxmrpGV3W6rn2oUlm2wWZ7L7RzJBnH9v6SONliS+KN4XBCFCmUBewkAcvJ4imW0REedlTZ+/1ogqHrLBbCNpP4QJHXaJr2UPlI0pK0zHh6esL9nbDPvCkwVZ9C9vUZtoLl+dxd3eBIBtrR2OVQbDF9l9FUVPmrtMCuWzJZmTs7eUbdl6RpjTsQ1S1VM2i6nkfnn5HE4vuK3MFWXXoZNw7RCsccUlU8iNcmdYatdQSqWANr0FNXNYG+4NCI/WK9rnn8dEqadBhyw4+Le45PTh5EoZsahqOa/b5lIrX3HKUlK3fcv75mMBTx5aOnn7Na3WOp79kfMzy/xbEDpAY1690ltjnl5HxEKPu1ixL8wMczxtJeHVQ9AzxSqfVlYpJlOVUd0/Zi3VebFE3XH/Qc1+EdrlmyWy7ZywqN6s3wHItGcTBNqW/Gjn0IF5KExjJGnE6PeHv7kkr2ZVX1CMs8Jolbnsse8q60WC6vcW1RZUyyW9Q2I4mENh+IXrSqe8n97meOLbGvXN69o2lLQtnPVbKnSjVOTsZ0xXvR2TFaMaZKTJJestIOZ3R9Q3wvfqDNnFprsO0Zxd6Uc65wu9lj6C1tI/7WKgqDkUVVyv5qfYZu3JKlBYns+x7MRFX4+OgCwxAx6O3rkLNzj6ePBcO2o2koVszd3ZbFRPAGhOmGvIrYbj36UvjRfD7HNW2u92L/zTINzx1jOTquK4J6GodkTY0lz06WZXF3d0dVKriOFCjPM9J6RloesCzh/49PvySudxx2sioX1TROjuNY1JXs+bq5oe0SLq/eMhxJrT2joek1QokoSlvo+5aXP96htMKPR2OP5W3I7eaGon1/5qn4xS/+hN9/K35LWVdMZxrRrqaXdqe3CuRTLl/cMjJEte6n71c8+nzOSs7B6eyCv/2r/w1HH3N1K/72i3/yKYqqslnljKYi7jIU94x/bPxhXK7ajk4mtPI+5d3tltHYY+gJw9lHGV4wIu/uwRTB7tW7HaalouvikJTmBRePTqirijCTAV8F27WYznys9yxRdcuLH2/5+FPBMPT4KVx/1xEfDFxfwt2qlNOLEYfojvu3UoDR+B3r9Zbj+RMAAm/IIVwxGHs8U8Smquk9m6uYLDeI1uIgo9oNqtZzcS7K320Vkdcqfd3jyMbi6TygakNsZ0EaCYc9nh8xnmp0lliiM+UIQ9MptBrkFYgR2gAAIABJREFU38K3W3zTQvcNGslGmJYhmhajScamqX+MVs8o65AkFZN8cj6l7XWGUgzR9mp8IyCtTbJcwKxsbcT4eMQ2jah2kg4+31Fel4yGYnO5vN7R9Q2WohFYwhGbNmM0mrO6i9EkGUdS3DNdDEmkwOWLH684OjO4v0+wXRG0isZCs3XUgUppinc/sqds7mNa4z1USsHQBxRFxnggDoGaanGo7qkpmMuSvKtDnDR4srlbs3UWA4cozlHkJdR1B+RFS9FVD2x2baRg2gN0KVRttSpp4xLnWzx5sOhNh6zPMeueYCA2or5RSMMDTyW9aBLtKRsLw/XpOglt6Vy0vmVVv8KvxHuFUY+tmeSxcHKlVzEdk642CSV7H4lJoI1Q25wGST+tq9zf3TI5E3Z3fPERVteSNHfQjx/eaTAfsNq+JpBwpe06IVVyagmxmrgmwXCIFfRE9yJwP3/6hDTJaE1oJaHM7pCi9B1uI3zor/+v/xV/NKVd7RmP3s9ByqvLN4zHJ2zuxGV44B/j9hoLGYxudjc0tYZGjzMUQfLkpOXVNy8ZSYgFmsYnv3jCyPdZL6Vws6Hw+kVLuLP5N//n34pp2av8d//DF3z/o2Beunm15Ysnj+iVnKoSG9xw3hNtD3RJSW9KAc00ptM0zEBevL0z7tcxuqfTSfrrKNug6ja7MsU5lnT3cYTlOxhTcSkMDxnPn83Yxy11KuzlNrrGHnR8cjrElGQH23XGOFDopajn4+efswtXVF5F3b5njTugqCUDU8WWECf/kLCYTrh7KTeYMuPksceXF0/57e/FBWE4PcYcWIw4ZSchVGozZOTZ6JZ4z1dfr/nkyWfs44jZWFw66/aAgUZcpjSyET1fl7TFJRri93Z9Qqem3K5qNN6zYrbEYUtWbDiSJAx3V1ecPplSSvSk2hvYw55dtkKRkhGHaANmwrH7nN6UxD7NEr/9iDITdhe3KRfWI1a7jLUvHpa0Hd1wxup+y9lM2HrY6SRbHVMevHUKfLsgLUqm7/3fKkEZCyRDK2x/t06w7YKji/ex65I4+xa116liSSizOxCuD3SZ+L3eUU/aaaR5wlqKUJ+ejLl+fcVo6FPqYo5/vt1zPBhQIjJ4TV/x9ucrmuaegYT3KVioqoBSFzIOu3rD7fWao2OxLod0T910VE1D34jYrJs2r39+wdn5MW9fiHWfBROqOOdvf/N7AD755DnB2EMzdKKDeIdea8iiLePREaomvi9vU0zLo+qkMG1SMvAhyzpBHgRUdUgUh3jWlESefLsmQbU0ckm3X9c1Q39CkdcgWTBVQ2cfrfEsD1Ne3kZDj65XKOVBrm4bWtUkKyIM2Y7QdyZFu6dhz91eip2nFUncEkmR7bzo+fjzT3h38xO2K/eCEaioLKwzZlMRS66v1ly+u+O/+q//GwD+j7/8a3oFwqxht5OwrkZFsQyW9ysKqUkxnkwJdymK5JD3DYs+s4jzA6mET1Vthe6ZNF1JW4rP3YVLDN1h4Yvvh4JECtCPhyIuKl3J2B9QJAlN8z7mlMTJ+oGtzFDn7PYhQy+gaIQfRyuFs49crm+F/88uFrz48R2BOWV2KmNS0XE41Jw/eszyXpw3BmMbRc9RerHGJ+cm0aElXFU8OhN29vZdiK4m6G5GW4m/DUYG8UHFcUWsVvWcQ1Sjo5Mnwh+D0ZC2OJCHOkYv/t+jx6cckmsimSB0h2OOj2bcvLulTcVatX5HXGWYasAhluLKpoFlaMQHkRBpNAPH9RiMXOLUlLaYkOR7LNPhZiVgnVWvcno85OVPIgFyfHzK6/vv2Yc5Ewm3M7SUJN5wevbkgVSrSO8YDmZ0kml1F7fYlUm8y9hKCn5N10FvuNle00gSltkiYHOXkSTCFs3AQR9WGKZNLf2hakKitsOxY7pYPH9ziHAMDcuT0MiByj5sabPuIYm/qjTiSMX3VRxX2n6ywbAHLOV+3DY6gW8xnj7l7la0BdA4VFXP1fKSpx+LS6bphkRhi9JIQhu1xPfOGA1sHE3sKXncMXRtsnCDbwqb7QtI0xBT4s+zLGE6mjP2HrGVUM9wvafxO6JDKG3FZb+PGI0HJLGYlyQvMIKYyXDESrZHVIs1abTDdiXBjGKx2cYcndokhVjPydRlNDjh8vYVq42IsUN/yN3lJbr0vYuTJ1TjmqRW2MoEbx97qHpC18GukGQ1yyv28QpTzu8+rCnKGAuwbPGefe8SbmKeXkwe2Jb9gcHEDdgehO/9/rffoCoVmqWh2fLC/vUKrW3p6hbXFBf29SvxO/+x8YHQ4sP4MD6MD+PD+DA+jA/jw/gwPowP4z/B+IOoXCm9ii0rA1XeEG5bdBTOZuJW/vLNG+7u/jW27fJEiqvNJiZ5UWEa4sb91ZdTttsDZRUzGoks52Q8pSiG0NdcvhZZhtvla/78n055dCZun3/3d/8aq5qi9lN0W3aB6g13yzW9tsGwRQbKtoa8id5yWfwEQDA0KcscTXUYz0S24PZ2SZiWrO/2fPQLUX42XJMiCdnLRkPDaQl/vmSTqoxnInvQ4pAkFZ5t4A2kPpWa0pgWpiXeu9ShiGu2XcJY0s8Xgxy/dPBPpySXbwFw/IY6H2Ah4USVR5Hfoqoqo4F4z8F8wtW7gqPH4tnrQ47mFGRJyNCRTYT3MZYLh7BEUSS9p65hWQ6dpJnVdR1NN4iikK4TWYaTxZA8SjiaBmxlxssfTonLjEBS3Rr6HdFeYTqdc/1OZI0sc0CS9ljjkDIWmYc43BL4E5JUNjprAXEaU3c5W9lkq5sGg5FHFm1oZVZqPjtHtzVqKe56v9pgeS2WYlB1svoz9NivM2gVDF28e2805FFH0/TSxmySQ8jZkxmdbIw/hDpxkfH80exB46U2VM6OL4gkHXZVtdjWiKIsHshHPHPAb7+7Qdd1JiPhdprb4Y1cylSsS5bU2MGQ0lk+EKCYSkuj6jiaR5rKvghX9D1IPgscx+Gw2WOpE6xWZpL1nra9Y3dIMWX2JaoPTKopntQba9uS211IX9YEpsh0ep4jIDtKRyErwI7pUGcFw7HMwnUaulGShAPaXHzfMn2L6+pYhklVSzHnSU87NFneioxUr4zAyoiLPRiyQtKGzEYzOvney+s7ispAWRxxK6EYo9Exf/TlmN9883cPFMez4Yi8CVkfBGzv/PHHWJMZy8M3KJ2Ypz5uULWYVq1pTfEszdBZXe346JmII66r8OSxR694vH0jRYRbHd83efXdj6iSUl3FIVvneKrI2Oq2QqIUtI1BlcvsqJJyNppRlD0DCQvSyEHRQVZMd+mOSuu43625GIlM9uTcZrtpqNSGofT/yYlDnve4QynEaXnM5xM8N2ASCN/Osoy73Tv+6PM/pQtFdbmstjR1wMh6IubO2/P65h2jmUomKyZNatAQo/Yq46l4FuqarKjYhiIjHPgzDmGMH7hEWyma6lh0Xc7iaIIu1WoX56fcre+xVFERqkuLMLzFcCKcgYiBq9sdamsy80saWa3L8pLNMqLIRQZT9VyW6zXbeM1MVu9e/PA9g8VY9KceIjnHJlXdodrvs/4mq+WB07MLUklk8P3rN9CrPHl2hCHpxPebkC+/uODmjcjwVyX0jsmPr35mcSRhpEnLbHGB74o5Ces1bZGyT2NGc/G3m/s7MEoa1WItqwpnxydkaUsnM9l2G/D88yNu7o8equLT2YAw3GFoCoP3gs+HBE2pUCXFct32BEHA2dkZWSiqYnXe8mfP/owoTXi7Fmvjjl2Oz6a0mqgeVnXDYZ8wmQ4eZA+qLKNKY9JDQjAWFcSqycmrezSkSLLvk2Y2TdsyW4j98Obmjk8+/pwsrShlk/10PKEsa4pCwG9czyPJQ/q+xXQlmdNujaZaVI2Gbor9qS0VdE2nltWYpqqomj2WaaJIjcX15i2dsiPPDpiWRJeULb7roEjY3PHpMX2XcXI8fyAIcMwReZIRxpe8fi18++nsUyx0/uov/xKAq9sbRscjDKvH8iVMN28YDmcoeoeiSJH7pMWyfW5vJCmD/xzV3PN4/IxQyntsm4Q2atEME1X67Zdnv2S1vqYpZNa/LxgFUzRN49VrUVU8PTrGtGw6JeR0IlsFmoaqC2lqYfvTqUutOET7nKMTMQdZ2dFpNo+eir33cEh58vFzqrJhfiRQNy9/ekEwOsMwO5JC6o2pKm9/esfjCwEP2+57bm+XnJ4ckUgB5rSs8DyHtvTw5W8J05RdFDOUyKzecNE9E9vSKDMRW8JtymR4RBpXjIbic7eXawazIaaEWMVRyKsXLYbmYXpS0H51xWI0ojUMJrKaPR6PybOEw174hzuYY3sm19dLdFvEQEVR6KhQ9I7bGxE3jo/OCHcVE4mG6JWIrlVF37TUQIsisFydIu9xJZnCzeUV233Js6cCUn08OuKQbBlOA46ORJxqSSjSjr5y2W7FuteVjWnaLI7F3rfcRShqTRoemEktwdvVGyaTR9iKxnYn1mHkuwS+gdxCUWqVqlhi+xppJIWwzYC+a9jtNgw7KQOiWxR5w6nUA6SFLK8oK42PPhb207ct69Uay1YfiGEMfUCSHNBk64Xra1iWSVt3hIWw60fnn/DNi9/jWBa9RGlcXl7y7MkFnlyrrmvZHrb4TYlURsA0FvRKi/K+rWM4YL1ek4QllqyATUYBdAp50eC6wob3+wNVYWKqsv2l3dK3LtPJgFwSRa33CVmpUmQKhSSGivcVwdDCdSTaZNey3qU4wwDDEfO02l2TlzsMe4AjmX76uiAJC3Sp6q23FuluiT21KaScj2bXTMZHXK8uCVNRHXSdgNfLOyxZcbfMKYZXYnoWaS7mZbs3MbWO0UDh//lGkGpk+f9bHP0/HH8QlyvD0DDl5Wp5ucF3VFzbfgh2/ihiOnqCaflcXQrI0ZNnJ9iuz1aKXu5CIbx4CLc80Cp1Dof4HYv5I2bSOb76k19xfX3Fr2+EKF3XDMnKira84Uyy/oynU1xnwu1dy5Nz0T9yu/w9nzz/hDgSG05epIwnU77+3Q1NL/oIzo4/54//y1N+/A4++0Ic3qJ0z0ttizsSxqRUOdu7mtHcR5FYyCIJGTkexaFBk/PgTMckSYIuIUcDuyJuSgzPYx+KoJxFKYZ7zurdkr4TG72hTUmThsWpCGJpcs+T51O+/vslzUQY5rb6iSRX2Rzk/Lsdb95tGY6n7A8iOOg61K0pG5tlP1dZkOYZjisvhW1B1/cEwwEHGYy8TMNQFHbpEk3qgaRlSTAaEmWSWcYuGDtjuipClXh8e+zTxVdM9RGtLg/eTY6ql8xHwsnW9xVh2LA484kkHVLbWKh9S5u1dFKHKakKBr7JQkKJyrKmbGLaWmcgL7T7YkfVhGidy1iydbmTAb/79c9cPJE4d2+KEV9z2NxhyICh6hWTwZzd/p73WFbXGeIHBvdrsQadpjBwXcq6YWQKW+zans+fPiXOEwKpWH7/8wtub0MenT0RNhUmrDYbqjZkOhWbgG853OzWuJaNvPPRZxCceuSZgGLUbUJvWLjDgFLis/JiR7m2ePLohL//VrAhXRyf8fzoOd9+KzZ+beSRskNv1Adh4cPhgK6blGXJUDa2bVZLdAU6KWKaxjrB0MBxQNcls2OkEm4rzs8GRFJsOI8yFMN96DtzrIz9vmd/aLBMKbypjakbhUYe5NTO4nDf0MYxhtRuC9M16ZsVjjdkLC87X376Ky5v/h5VaqJ99uRzZk9OefU/v8C33kPINviBTZF2qJ2YPE1TuPj4mFJeNIoyoWxixuMxpjzkOsGAJElQ+prThXjW7dUO1VTopU2fLCY0dKzulgzlAUWzTPI252g6oNXFHDz/7BnL7R2WLXyv7StUTUHXTTYb4cdFFnI8PyHarylkQiBwPQ73NV98JjbZu/WWNz+/w3zS4Xtys9olNJ1CXqXYcrOMK5OsyFHb98xuHdg5ujEjfg/dsUaYjka4jVEUcbg5f/yEy9dXD8QGcRajqwMMzeToWNjG8u4Kzx1Rpg3B/D28JgddoZc9UQ0apuoQ5yFJKWKz7S2I0xuyJmc0kPO5/onBiYom17jsPIKBTeeN+P7td8J++hxSndPRFEX2CWZhTGDrD/1AVaNSlwXpz6+oK3GSOZ4/RlV1HAxWd8InLdPn5xf/8H/atuZ+e2A0PkaxxMVpMSvwHB9fasCsbxrSJGEwPufmrcDez0c2Q2fI3WqPL+FgVV5Q5zqf/uIr8e+iZrm/pNMVDlvhj5Zjcog32JbBYix8+9XPP7BYHPPqSoi76pqFac548/MNlkwaGqbGT69+JE4SCimQ55oub+9eo0r2vrwooeoobw4YElIduAFYOr4/IJa6aFl0QNF1HKlz59k9d6s189OAqysRE+JIYb9bUNclXfv+4NZS5gamZFqtigZV0zAdnfR9AsYxBGGFUpPlUp8qU3FcBVv6VVtU5NmBJDygS0j6drOF3mAwPqVuYvmsnLZRmc/FJSKKDqi6BY1Cupc9iUrIZntDMLCZTERsjvU9tRmjSoHrs7MxzjDgx9ffc7GQyU7LwaxBoSWS/ZOdmgro/omwzfPFgm+/viN4pHAqLztzZUZ02NF0OgcJNdP1iPPRBVUv3sk0bSxbw7IsykT0uYxGY8oqpigLdoqAEdWVynDgch+KGLSO9zR1C6iEkum0QWGzz0ijtXz2jHSfoZk5L16IQ+hseMzq3TXjhc9YxoQkUzkanhNu5F7kDrEUD0U12crel+OLM67efkeh1STy8u9Pn3Lx1GJ7L6BncZ4xPp4C2QP7ommPCSuFTb6mVITPuLYLhkXgi7YAPW/p65Ii3nIk188+esR8MOXm+pLZRCQyug7SfU2TSy01CnZWhOnqqIpM/vUdfQuvX94xHonPVVXJcn3DqdRu3IdX7JM9ljnA9kQMLIsKz5uSJDv0XMRdy59gYJNE4kB9PD8nTvaouoltC3u52+RY5oC27x80pZI4R6UFhJ1Ppo+ZjkwuX25gKuygzDuyeMPhnofk31df/JK83PLtnYTyKRq6qmFrNkki/cMtsV0breqwJKtw21XEcYzrSKbcVsW2HYKBjqEK387yElVVGQ6H2FJ8sm5bdDPDlD1zqtJz9faWQTDGtYUt5EnM88cX3N9v0GXSaTgYE4fNQ9zX1QFtVxElOxLxMYqm5598/iWVjPHv3r1jPjslDlNM830MitENnbpoHpIwumHS1jq6TCwqmkIQ6LS5R+AJf7xZ3eM4U2yrffCr93vx6kqcEZpRg9lbrO82OLIv09A6NHtGSUUl57NXehTbfdDV0zuH0/k5eR0htbhpsz1xUmJbKr5MqpdRzb6Kmc7Fs2234XBoCDSNthO2f3mzxXN8VtvqwR+++uWf8m//F3H2/4+NP4jLVdt17Ldi4z8+Nbg4/yPCfcU+FPjxItN4+thlF2c8/VTc3uksfvf7r/n4448BePv2DlXP8QOX9VY4UBJtWcyOoBmBKSokL37aUGQ9p+fCWdf7t5j2CSPfw5ANe7rmER1qlHbG777+dwD0pUKvxnS1cILxYMHyOsI1ThjOpWialbP4aMzt3ZDLN8IyG/tAZ3dUtRRNbFUGpyPG+jEt4iBlGDVqC0XSEWfiwBWvMuYnp/hSbdonIiWjSHM8KXb66OkJWaoTLmMefSUyVWWqoSprek18/2q7pzcNxqcjTEmhfjhoWL5BuJeBm5a+gKb1GI5ENma/j/GHBnVVcjhInLTjQAezichyZkmK47gcogNHZ7KhftcymTrkWYjRiYA/cm12yx2xrDZ17Rhl1tNUMA/eXwJT9HZAi0WaCKeaDlyatnnIzmiazXxygaVUTMeSMn55Q1MFeMGEWpeCgckGvZsQyoy77mZMRgH6wGVyLJw6fBujNjphmqH1MkBFJa7b0snsel4dMLBI9iquvCQNfI2qSsjShrZ+L6q3pm18lF42DJsGqtahNhqWDD524NCXBY3ZcyNp8utUYzQa8OKlcNDZ0MQfqISRy24pDkSxkjJ8dEwaX2NIDLIaqOSUuPIQqPQqUdaQbyICW/bR6YFQUN/vmUtpguQ+4rb5kVgSXMxOBqTXBaNgRiEPr76tsFrdoOoGXS+eZWouSZTiyr4M6hhdsUnK+EGaYLE4Zj4fsdpsGAQiwA8HDq8v1xiBmJcXvw358o/PcKwpbfL+HY4o0h15KgLW7d0908mEs/GM+43w/7BaczRaYBj2Qzb9enlHWJacTj4FYHMTE2c/s9nu+fQXIiZc3q057HM8d8TtpXjPTgF3YtJJceAkjimqnM0+eyAkuN9tGY4CTF/n8krSHpsu8/NTskjaYtXTGz2LhU/vSyKVNqVvfVarHm8sbP9wvwGjQdFyadMOh7ymLsF1RCyJdi0mMYZi4MtNVm0tTobWAzFFFVccTRZUdYdskyJOIxzXR+1rPEf0G3VTlaw8PFBrDyYeRVWwXq85PxV9n7vdjq7yhZisTESlkYJhO9zdijkfjGAxm3F19QOmLg5Sw9GMxWJGUdToUhxXT3sur3/kdCL8v9YHxIcUdWDjSrKhfRyhFwY/XL/jl8/Fe544IxQC8lrG+DDFmp6xvHrFTiaPfG+IoqhkVYUvxTg1LSHKQ0YyS20MAm5WSxbeFEMSM7x69QOfffUlL6/fUL9v9I+21JWC50qSFKVhl2Qobk24FEk8T9HR1BRVxpG7zR4/GGH6PmYiqfU31/j6BNMOaFpxML1fVTz/+Bnf/yQo3H3XIKxjsixDt+XFphqQVwqoPWtJoOHNVDqjwpUHue06ZjZ7RBTuyTLxOd3oKYoMbxAwlUmgNC9QFYPtXsQRxzIpq4zheP4gNVFXEHgeKCXTI7FWRR/x7MlnRHvxbM+1OJroeIYgMAEwUVGaDKXN8SQlflNGjIYByEpSGKVCCL03UfX3/TEhvu/StBWhvFD6gUNXjdHkRdi2D+x2IaauY7wXYHdCHp09482rJZnkWTd9ncHAJ05kX8b4EXVZ0VYKrryMd3XFZOSTZj1n58Km3r67oixbnjwRl9ei7Mm6iifPPyORshmGaVNT0bYKAykeHZd70qQkkOQO2/2WweAc9ITbg0zi5SXTyQnbTcRUJqImR3M2y+uHPXqxCCiTnrboeHR2Ku3MYLPZMxxbbA7CrqbDEVG+opD26uoTplMIsxUHmUwNdIM43RPJU+Fo7NK0PX2R81j22hq9wnimUVbxwyV3Mn3C6zcrJmMxT4fdEkPTePMuwnWksHkDit6jqQZZIQ7MUVTQKwe2kvTq5OQpigKH7Q5qKW2R6yh6x8QfUUoESGCP+Pa7n7Dl2cIxArpWIAQ2m7fib+6A+2xDmlRM57LKV2YYio0me+h0V6VtGtquYC+rhYqioKkWvrt4IEAZjWAxm9P2whaHwTGb7ZbFacB2d5C2aFC393gDC1UR65Cne+xBzSEV723ZU84unvL9z9+ShZJwoVCZnQzIyxuGsuoeRR2K2qAiEQu6TlYWWI5LKp+laRrzoznhJkeRwXl9d88+vqerxb+9gYnZu7z45jWffiJImXxPIU5aNB3aXlZ3246+U9nvxLPPzk+gLziEO1JVxFPH0uiVFs8f8565LDzsGI/ndInsDe9q5rMpbdOQ51KIvk0o04o4TVEUiZCqTeJuw9FMVu/jhMFUZ3m/x5DnhslszP3dhu1WVtNr0U80nR3TdsJefdNltwbTMnB94TOeN6SpY2xP+LWhz0jSA7vdDkeSqyTbmr2xQ1VUzi/EZbxIWsLdLU0p5rwqEjQT+jKn18U+4xlj7lYxrZU+yAk4Qx8MkRgE0GlxXZWqhkoS/XRVhq0rOHpAI0lE1F5HMUyQRENxEeFYPnqjkMmL4tjzyNIGTbOYjkXSOYnFneUfGx96rj6MD+PD+DA+jA/jw/gwPowP48P4MP4TjD+IypWiwNGpuJEujkZ8/fvfEu4bxmORSZpMZszn56TdkivJRBZFGYvzCTdrkXVMiwrH1dnuS548khSSgw5T0yirPY4sASqdxsefTB++2/dstpsIJ2joJT37YRNjOCqX1+8o3vf7oBNMIuJYZHEcTyUJVeIwJJXsNrMjne9+8wa7s4kbkQHKwh7X1KgLmaVu91iqxc39lumx+FzXCm2S+cUYVSTYULUey+7YSzjKzlQYWGP0ocH8TGTKSjUi6FqqucVhI/GqYchoNEKRuPqnH5/iBQNuV5fUmfjd1kAnOUSMA/GcpnZQtZzwULB4LjIYdrDg9etXDIdDLF1kQ5qy4aPHz6F+nwWo0WwHb+Rjv2d23DcsN3v6vkItZfXOyTAUlamEJe6jmPWhZew/IglFxmbgD9iHe8pu9yCqWbU9Va5TSMY93Sw4mvpEYUIlWcCePjlnOPLZpTsSSV8a3/cYfsdsIuY3rsC1ZxzyPS/fvBQT3OioncHTi2OW1+9pwGumo/lDpayuM44WFu2yJgkldenglOXNOxQFBhI/rnYanjvhcC3WfH7ioBum0HaQosxaW3OVbzkkGV8OnwBQBAFFW/NUip9GeULX5cxmM/JQZH+HzoS4iRkN5w+Ml2FxR6XU2BKuud3uBeW249PKfElW5IxGHvUm4L/4c7Gmm8Oet6/XjCcCarZbL5kFQ5zA4T3m8H69Jwxrjo+nRDLD3rYtJ6dzdImd7oOCzf0K07HRpTSBYkLU3rEPcyI5fz/98ALLGjPvRQbs2ZkJVc3qdsnIEbZ4e32NSs58Lv6taAv+7Fd/xk/fvqTKJcwj10m7FtUtSBuRFdu8uWM6d3gtmYn0NiLLMuqg5/VrESO0XoNeJ2/2FIr43MfPPyLchCz34nO+76GZNoblsz+IbGHX6SiKw83NW3rJmHg+HbK6vGQsxaQfnRzxZnOHYfaEkiK/jnXcwKPvlYd+kaPjCXGyfWBQsw2XLFxj2haVzLh53py8OaD2LoEiKgiG5XE4rNhHwveOTmY8/+icFz+9oZeQv2Fgc3x6QpmuuVqJrGJPg2MpbN5DRvsCtWl8TIzVAAAgAElEQVQZDCaED6K3DX2jcHa+QJFbwPXbDY+eDqlqKV65XPGnv/oTkmjFYibQAmkVcXW3wjIDXr39GoDT01MeP14gVQ+4eHbG7tbnbrMkkL1LcVqBUjEZDEDSVnvujE4JMGVG2PFb4nJLHqa4Ugz0ZHoElUrXdQ/V877tGI4DOgmR265vMByXm3WMbUpoYqDzb17/hlYFXYoyz2YLijRi30gByKommExYbm95fCEqenEREzgeV5J5zfZH5E3ND1dvUSQ8xA5cWlUjbwuQWenhcMCbN6+Y+lKIWzVp8hKt75lMxP6xW77i/PRXTKdjXr4WrH+areEEHpWUkFjMh9i2hWVDK/sGHNfi5OKU7SbmzZt3AJydXbDdHVClaHpVVRwOB2zDZHQu+5banFqpqcqMSoq3d73C7nBAVWWlLIw4Gs9IkpBUVmRVzUDTG3aHHXEift9oNKIoCnJJD29bAf7AI8s7wlCgH1zXpcoSuqZg4Ih1b9o9ZRuTb9+/Z85uH5ElMY8eCWbVkX+MbQyo6yvmUgBd0TuWqx2DYCLXuCSKr6DReP5cwO1VpSOKVXyvoJTC3seTCVlj8cMLASsdjo8YnSxYL9ecTGW/b6FwefcO0xzy+FhkoKuNSTDzURWJJNFyzs57Dsv2Qcjc0Hy2UcLxyYyZrMz/9O4lrVax3YlMeVVoPL44Q9MbfnghWEy32zXjWcAhqqgl++JNGINRMhuITH2ZwyGNifMMzxDnoPFwSG23DwLlmqawX4eo6GxlNTRP99yHNdPJnE5CoZp0g2Y7VIpYu91hz8Bb4HgdtexzKcoe1zVQCcjk+ea2/RHTKmlaEe9e//iSWfAYywxIakmpbrTYbofneGy2wv/S3QFL1alzCS+0AiaLKVkVsZJsbOezIaZisN7dECay91XT+OKr5xwkpfshPVBWGqYxxtBExTLPOjpVwQpUCunHtm2SZzWhlEZZLI6ZjETvtir7YZ2gw/U1oijBaEXMuzie8ubtj+iaWLvzixFxGeIHNlorgpcb2BhGh2GqD0gj1/EpyoSdRFa5pQKGytD3ibayejY2uLzZYqoduuy/UwMVtbMYyjNCUe1pe/js049p5LPjuMP2HcJ9ApKRtaoqQW0vWWo9z+TmRvSTh1LI/GQxZ7Y45tXrd4ynYm9ve+gVlRKxnm2nkeVr0jhjPhGVzqPFgCJXOD09pZdsxGWq4PohWylM32sVdQ+e79JWkj3TnpIUW559JCq9TZvz4scbPG+OLWVs1vcHPv/8M1br12SSve/t6xWL+QnXsr1nNpuhKhbX61ecKgL9ZCkpZZhx9NglkigNS1OpUoXOErF6Fzk8ffaYSjeJYjF3TbMjLkoUPExP2KxpWfRKhqoJP1scOST7kMCcU3di7pLQRg0CNHvGfSTOAKqZY+jpA8Nm2+iEh3vOH11gKVIWqO2wLQ3fG7KV/TStFAX/x8b/78uVoiga8Gvgpu/7/1ZRlKfA/wRMgb8H/vu+7ytFUSzgXwJ/AmyBf973/dv/r2f3KARDYYSu/Zh/8S/+OS9fveXH70UD7x//+TE//fSCXXJHUoqD9qOnZ7x8ccNo8B6Le6AoOoZjm3dvhHOa+gjbOTAKjpDoBS6eKTRVQSNpWKs8petVVvcZGwkPOTt7zD6MyfKK2UhsFLeXB0zP45nU1fn+x+8Jxianj55gSshYXm14/S6k0e4eGoS7rUGbaehywVtNIfDGnJ56rDYCctQ1OkfzpxzNRlSyvG+aDi9fvsEcSeV1d0iulDx7dsTXX4uDzXRyRNPUXN3c8fnxHwEw8ls8y3o4NDmBynpbo/Yas4UwlBfXd5RpxtFYOEsUt0ymAWGbUMoNIIojptMFcbjD9yTddqeiKzpK/15wUmyqvVpweSk2pq5WOD06pq4yKkNsxkVW4ngqa6kj4gQxXTWi76+xHLHuvR5hOCF5bNDZwhGGwQDHGaDJsvY2vOfd24zA9xhI+KJpKOyTHUmVoMjDhmv3zMaTf8DeRiG1kmLqNaYlAvAhahhNJ/RVxmeS3MAPRixXV7iSnCMpdKH7NB6gyIO+adRMhiN28e5BH0bTNMoyZ3EsDt69mmHbJioaqiPtrMgxFA3DMQT/KTDxFTTNJpQU9egad9c7nl+4nJwKu17dhjRKRdaNuLl9Kdd4yNHiiPulgLpGUYM9HjCezBjIHrY4jpk4KopT8dHnAv9vvW748/9M5cVLcSD6q79JcQYe8SHHlHhuU9MYjT3KKsGTJ2Zds8mygq4Rn+tpCdwhWDmT4Uh+X0dZtQTDOZrUuYg2Hh9/ckQr++OKSOe7X69ZPB49aN+kaYhuqSzvxbPzrOGbb75Bb3XGUqPocLukKToapWFbCdsw9SnLdUlZvdehS5mOXTzziMNWBGXHHdK2DQ01nrSzJMmI4hhfarCNBzPavmcb7hkOxO+dzRZ888130BuMTsSalnWF0ZocZI/nytKIw5CqLQhG4pC0rJY0dcnji3Pu78XaPHv+R/xuucUcm9KmSt69vGQ88RlOxTs4tsFgMCPcdawlZPT0RKPWWkYSqqgaDb/7+t/SlC5zSdgzPZuRFBGbzYZMQlTzSOWjJ08pJMkGfQ9tRnow/z17bxJrWbbmd/12v/fZZ+/Tn3PP7SJuNJmR+d7L11bzeOWibINoysJCuIQFWAxAnhgJCRCSZ0wYMAIkJJBnwMBIMIEBEiVhV5Upyo/X5MuXGZkZGd3t7+nb3bcM1oprLFXjEjAA5ZpkROQ5++y11re+9bX/P54tZP9b3/oWd/MlN9OXmKbs7bFULq9uaPpC37z/jSd8/vxLPK/PaxmQsH2dODOIi4jRsTh/Sq5z0nvCxa0AFnl7cU0SJihqRpCKy+vopEWwbDDq9Qm3Yl1io2S7+xxb9qvsAocgD8m3EX0JHrGbbvjog2/zB3/0+3QH4veatkVJwfVcBq/yCK89wrbbRIU4V9PVnFavz8nwhPmd0Oma6REubylk74SptUBV0MyKTAYWSiVhuVtiyIBBnCQUaYFqa/Rlj0BeViyX56DouNZIPluh3GrYEn4/zGriIqOpxyQb2Zxf5Sh5QBKWPHsinNVffpbSOe7dl7+2mj5JKkoAt6HsN1Bq9uGO2XKCIhFsiqKgrgQcNEBdahyNzlgsJ5SFMOpdz2UfVPh+G8MUcjZdzJguXtGXHHqGXjHs9SlRefBYlNImecBsMaNSa9aSV+dqdsmTowc0XeGgXF1d0PQc/JZNQ5b8NJyCqjBIC4e8EO9eqXvC7Rppv0OtkpYB82VBuyX7N6OUND2nKCPmC2lAN2y8ZotSlp4lUUaz2aXp2uQSsENRaipCVrsFrVrsTaPRIM/h5PQhAI7ZItrm+LWPWYjzr9dw1D+gN+wxmUp9FscYXsAH74nnpNGaye0LulaDvuzn+mqV8N7ZY3bLPZdzoc9265j26BhNk+X1RsXri3NQUxqeuIuSwme9SWl1fGxfzGe525JGCpNAGuyOQsNpEVU+pezZifYpuyhDk/yDcVJwOH4AlXIP6oGugRqjWwrrnfi3m7tLxv1H5LI06kc/+j631xeYeodSGsJ+d8TN5QV5VWDo4vf8jka40eg1pbGulihJittsstqK+6nhmcTFhru3FxiqsG96/QF5qYCayn1RWM93NDwdV8rdchnguTr9UY9K8v/VlcLzLz5D08VdoRgOeVYS7nb0ZPBPHxhcXFwI8mhJpfHl56/p99vkkoNxMV9xPD5gdr2ieyDWKogmeO4Yy1RBOht5UnM4fMwuFmuuWhXL6w2qaqDKvmHDriiKiscPP2Qu9Ty1SpVDS1KOKGjkSsXdZM77p+8BMN98xXSx5WQ84uFDIXtvXl/SGbrsJM2Doii4eot222IyE+fdsjxuby/p9/tUxbtARgffd7m5FYGU6ayk2zng/PaOfl/cRa7XZLfZkmYxzaZ4r8vra4oyR5N7HO5TBm0Ho6Gw3Ir7sOkrpGFNpZSoEvxrH+Ycn3XvW2kOj09R9ZjbMKc3FL9nqBXZPiRNxfmslAxVgywtKWSAMElNwnjC8fEpV9dCJxwcpZTFlo601SpqolDhV37jN3nx8c8BOD48wjRqdpstgyPxe6ubHWqlEUl+xQdHXRabO9qDDrlcpzhqYFkppbbHkI7p6xfnnJ706Ejo9+VkSZXXvHj5iqcfiVLh1sMutVoQJuF9v28YLfE8i1Davk2vTafRIi5CkvwdlY5KrWrUaYUuS6jPz4Wf8SeNP0/m6t8FvgAZaoT/BPhP67r+7xRF+a+Afwv4L+V/13VdP1EU5a/Lz/2rf9qDywK+enUuJtaIKHI4ftDi8FhcAtc3aypzg6V17rMtq8WKH3znu2jysPqOx93tjKbhktZCiaVZQFVqeJbD5Vw0Fh6euNxcbOn3xQakYcrg1CeKY0xTLPbzz76gd+jitY54+0Y4BO1hid874aef/QSA0/E3sJobLs9vOZQ8JQ3rgKOhi9Z9wPOPRf39g9GYbQGpJKGsMYmqnGhxhecIo8xQGjhozK5vmd+Ki6nbG1DkKZYk9TTKGGto8+qrlzx6IA51HpTEasVHR++jSl6b3aZiPk85O5NN/cGE7W5FrWhcT0XENA0iHh49I5VZo9rKMFo+Wlqy20pSzwbsVhlJktB7hyhWqYRJSCEV29HREYv5mkzJ8QzJ6uvExMmWohARDoA4S3F8h750FMcnH/HmzYR2y0LNxaVXVHsaVpt9uMMqhfBGYUHX455PouX3sXSDhqtxI4E3Dv0Omtog2K7x3nGgeArrsBDhHCAvl1C32C2T+x6T4eGIfXCFrSsspCGMqmCaJu84NUeuQVa1mC9u+eCxcKovrnaUZUhv4NJuiaMQbPbE2RbDFBec5zbYbTc0PZtcount45pG4mF4LSQIDy3VYLXdYUuAkEzZMhqNyXYBakc8e3Q0ZBHMWW0WeH3xb5WeM1ldkkvD1Gs3aPkt1KxkH8p+CnSagyaKs+PllcjkpHkJU4vtTOzfX/urf5Hf/fu/D1lEuy+JMZOc2+sJZVlzJpGV1psZx8eHFFK5ZllGEK8o85xCNrmahs6we4Ktt7leCKN6dNpmv68JZb9DUVi8/60+nfYhr84FyEZ/0GC3jahKIQdlnVJpFavdLZ7k5yiqHNd3mBCi6GL/ug2XzSokiISMqXpG29OpwgpLEw7DZHqJaWkoqFCLfVjEEUleMJIErElRMp9vqbUKAymv6YZW28G1WmiyvyLcpvQ7HV5KElrD3eHbDb68XFLnYs0PegO6DZvw/0K4+vb8FePDPltp/FwvUtQKdFTmN+Kid12XNDJ478lDlkuJ+heHtDp9Kpk5W++2TK42nJx0KKQEVZXB7d2MqlbwJbjC0dBi4HV4+VoEptyWLbjZwhWHD78JwOnJU+4mS9brLb40tNMSirLG1WS2sK7ZRHM8/5hCgoE4bhPd0UjygBcvRbb30eFTPhy9x2Qh/n406PO7n/4eR4MWmUS8XAchjw7GdHseX7wQQEK6d0xaVoxkU/gqn1LuXIYHQ9FnAMznW7788nPCNMRVZK9bKf493Yv1bNomeZgzemARS7k+G4/Yr0viuzWORMqc390QbxIengqZnk8DVpsl3aHHLhSXpOc0Wa9jDk8kOaSdEa725HaGIxvAb6/vaHgaruVRFpJUV1VxbBvVEEbaYrNE1z2ysmR1K0kvfR+3aRPEK+ZL8XthtGW+umEgSWn3+4D1eovb8sll70aeJlzNrjBUjUqRhORZiFJDLfskkyjD7bl4lk8kwSsUpUKrbRQMQund7Pd7LLOBLoNOwW7LYrFks1lzNxc6oj/sEccxhtm4z6xGyZ7tLr9v/PdbY4LdhqpM78lrz2+XlIXKw7Pje7JzRXNATylk5cFsFtDtjjg9bVFKB6HKLPZhgOna1LLaAlXFbMBKBuP6/SMWs5ThqM/VpZCz8bjHOpigqS7VOyLa6xm1WXAoe5LSfUhLN1Gcil0idHyz2cSuHdbBmqKWpKiuhunBVxcSaTEBr/lDLq4/IZJIp0a/xY9//BNOxge4MgBx+GjIKp7TssR6rrcBrmtRVDGzlcxG9k+plTluwyeQ8Q4bBVVV74m/63qCXppYSoXWkCixDY0yzsikcafWDcJ8z9mDI7JYTDjPVXZrnXATcDgS2ddB18HWTS7eChvos49fY2kqercilgTlvb7Gw9NHvLp7Q6stq0k2GyzdZrkXhn/fP2SXTXBQUN/tSw5G1WPct5ishU5QnYwqi+i1hUOUbGP6nQYPTvv8Hz+WvTZOg2C/xzA1pAjjeR4d3+fy7bn43j7HsRp4nkolK2NaHYfvffd9slTj/FwEosf9B/THDe4Wwp4zFRc1tzHUglA6q7rRYno349k3nrCcCRlabXOePHxGPBGO4mZ5S5IG+L5PGos93i5XeF6PqlTZLMW6n5w8wG96bHZC7sIoo+05uGqXspJZlMzg4WkbS3G4vBLzSYsKw9LY3cqAsmUT7tdUpY5tCWcgSvfYjQ6lkoEmdNd2l5KlFZ2WWM8gXFISU1cKuvIOOXJDkVZ4rs92I+6L9x8/oigzNlks3/uEZLfF1G1iSaC72m2I9hlZGKLWEogiVAl+tiANZcBgu2Y47mJYBt2ecKC//Ow5vYM2O3mHnV+8xrF9dA006ZgeDFuY9oqri4j3PxD3zMs3HzOZbNAkOnG31eJ28iWTWRNfBgjTYEcRq5wcH5HLPrrf+ud+hZ//+KdUG7Evpldi1j7bfUSYivnqqoNtWyy2c3TZe34yHmNrGkvJEXY06FIZFf1DE9d9h4oLab5lcn1LKKuRhuMh20VAsyvs//n2NQ1zgK2DZhr3ewwaJVArMqDk/yPy9T9u/BM5V4qiHAO/DfzHwL+nKIoC/CXgX5Mf+a+B/wjhXP1V+WeA/wH4LxRFUepasgb+MaOqc3pjYUjdXVzz448n7NMxDYlAs1xtCKIJitJj0JOlbEaXq7dv6MrsS7gJBQx4HBPJhv2O32M0HHN9eX1vVNvGANfJaDSll9quMBsK+yShqkWkZXz0kPZxwnYZcPBALOA2TPjsxS/4zX/61wCYT3I+/skEXdXQVSHQbV+j02/z5ScvGLiyAdLwaTdqtpKEMtFuydKQpAqwPTE/z1WpygjDMHj/fRFBnKzvePThEw58IRSxc0NVmczvuG/Ew97h7DKKWiEKxPwUw6Qsduwz4XxM5wlut0Gc1FxfCcXZc31WqwXDllDIWTHhfPYZjvuIhi3WZbNeYxojuv0+lkRoW8w2PH70VCAyAjd3t6RJgWe6pNLh0tyKVInZJyWWLO9xfYsqr6gkitNyekfX8CjWJmEpm7vbFg3Lwt+pWNJJ6fVP2MzX9yVBnfEhdsNkHdyxldmsA29EXeU0LAE6IuajoVsZuinkwKPLPtRRDY/hUBhuebzHqAyKykA3hUztVxu6w0N0Qzz74sUl7cMj7IZF3xPR5s/3n2B5AZo2YC0BMxzTA2N3jzanqTZhsMW2Ut6Jfb0LGJydsF+u8WSpwPntNUfjIyjF3s3iNS3Pp98eMJHNkoYJSVxTWwW1NN7SqMBFp1Ik5LhaUuYxi92WB5Ks2q4qVuEVDWPMfC5LL5Y3dPs2e0nW+enffcF7v6Lz8nMF2d/K9PaGk6MD6rqmks7bcNBjMplw9C5boes4XhtV18nlu2/2M5KwYLE5J9PFetqNmjifYBpCSZ++1yQMCvJihyrJamfzDX7LRrGEouufDNgvVkRRRZxJUsFujygIuY1ndCQAih03OO0coiKJVTt9FouU1tggj4XyK/cGT95/zM+e/xK/J9Yu3oa0PZNdLqGQVJ3KKXhw+pT5TKxTmus4jSaKmmNLkJLC1AnSDchMz3yd0fU9DsZH1OU7JFCfwVGfV+evabZlFG6R0uy75KVE03RsDo4PMAwLRZYAue0K2xiyDCdcTcW5tUyHga2ykhFU12zS75ziuE1qCSF9M98S5AaOU1PLqJtSNFGqhLb5rvQkozQcuq1DEkkQ/g/+8He5m8/wuw6aKTMruYpuGuwiYZSpaoblNtkHGYaUu320IQhVKkKOT2Tkun3CH/z9f8jZM6Gn1DLi8ZNDnh0/4atrYdBe3d3yneOP+IfPf0L3RJJcJjFpHFBIFEW/3+So3cZxbXSJfFhuA1zbRwtMHNmgPzufU7kWEpeCRqNJkujcLGdYEiAkCjfkhQJNnc1U6KU8WfPNs/eRAVsCZUmeVSxmMYqku6iqirgqmEugCFsv0FSNdbABWa7Z7R+z2k0wNIGWCmDZR6hxQJVIlMxwwW5qUlrpPYiI5Vm8vbmF2qCoJWhB12AbJpxKkJ0ozFHUmCjb3Gci0yBHUTW8ZhfFEHNRyhrP6+A7suE7jkiTmE67TUeSiG/2t1iOTbTbs9gIZ67TcrAtl47MNif7gGgXsd+taEoM7tevvuLs7EMWqy0tafy3Wgaz6R2ejN7vtjGPHj6mSBOKXKzB+LiJY5koasFWEkOvpnuq2uXwRMzPtAKW84B2d09ZCd1S4nFy+oSsXrOSJYZVneE220Q7cYaCfc7wwGW9Cu8pKrK8pOU9pOFYOJIsHr/JOt2wl8G4uqo4PX5AcZvSbkonYr0lNWqWq/B+PrrlEMcq23eZpGaTXVFiPv4GjibLM2uVurwhL2J0hA766voNXssnkNQhRa2SVhm3k+t7xDtrv6HdbbFZ3+GYYr/iWgEtxZHW13ZXcrWb0znokMozutZslrsVo764r/ZBxny7Yf9ig1GJ8/GdZ9/nh7854JPnvySRJfG21+TLLz/HMsTcDEunrFLiZHNfTrxZJZwcPQT9pyyXklyVPo12Qh5L5NOyQ6tVYtgOo8N3Wbg9wc7Ba8GxI2RD0yEO93gNGbQ86NH1Blxc3dCRwcAwj+j1RsTJlsqQCHv5mk73CY4lgVvsmNVyxsOzb7OQZbmzu5LXr675nd/5V7i6EM6/qu158dUFhgy8DUY+abrGUG2SROxDy+ugUDKb3hFH71A34Wb6BkMG4+NNwbA/IEj3qPK+1yubpuew3szpdsRdp6oqQbi6z9qaVsxuvUJTTLayjM31mhRpQa2XrGU5//h4QJzFtCUwhq4VHAwUprMZrj2Q8gmFFpBlJX1JgD65XVBXOzo9+142isSm2WySpRIxdb/B0h3ifU4hnSnTUPDc4X2F1nJ+R50qVGqb9lDI/uRuha40UGwbZACk6RssFzf3dkqBxnweoygqb89F1j/PHRqNBq9fCwe34XhoioGmaHz/+yLQ/8tPPyaNbHyvyeWFyLqlYZPDcYdSlQ7uaoPfafPqzWs+eCQrrfw+nqVRlSqlzEqtVzsup3vG74tztpxtCKIU12+RG0L2F7M1TgPyuoFmyrk0Xcq8oNMX36t0g7TY8+BZh20o1imbbtFNhYbrgQQbK42MPIIkEvJTlipxHbNeZ7hSx9q2gqKnxGFJJVtS3gXX/qTxTwpo8Z8B/yHwzlXrAZu6lq4mXANH8s9HwBWA/P9b+fmvx9fj6/H1+Hp8Pb4eX4+vx9fj6/H1+P/t+DMzV4qi/BVgVtf1zxRF+a3/p35YUZS/CfxNAMfVefpIpEGLoAlKBarDi5eCsPfDDz+klR4zmy4YSt6Zl19MCOIIvyNhkA2XKA5JQ5MqFlGGwjbI8xzT0Yll6nefzVnGE4xURApsp2C3fc0XP9NpuiLi1mm7kCuM+8f88mNRXlOUCR98/4RcFf7lF69f43cO0LQttSwBUloOb9/OGAx6LDcirZymEfvVDFfWnXrNMVcvA4ZPfHaxiOwuVivUvcq33z+kqkXkqLZt5vGCyysRTRsdOpTsePS4T5aKSORukeKZLWZXSwrZQH90PKTRHLJaSLJc02B2s8Z28vu6ZSO1eHz2IaUstQlmVxx0j2k6Q87PZTlRs0Gw3PDgcZ/dWvjgDbfFfPWay3PJX2OVJDuL0AywJZeBoWkEisIuzbEkpLmntYhSk4ORiG7pik7noEOtKLiyBNBsOJxfXuG0XHxJ+BiEK3Q75eAdhHQQoDsqm7im64lIS7tls1iFKKWGIQkty7pCVRM0CV0e5jXvffsD1qtLLl6LtVstZ5w89DBpsN/KNG/LIQw2qJZ4p9TIIc846p7w8y9FOZPjqTheg2xn4snGZsXK0VTzHpzDc7t0ej6VnjNqiXJCKhvLqtmoNWEk+VWSjCxJMCT8dh2pqF7FXksoZL1xXeikccBwNCaVUWLXVjBNAzUX890sA0oqDroNujICNl28Ra8MLl4u7rkaRiMVW7F4/NEHAFg/rNlEMcFmye2N2HfN8KiqirLMyWUZYKuncdp9TFvOdzlfodDh7eure9JCr60QhGsafgslkrwT65rB6ZjyHeSwoVDoGqvdiqKQ0Ohug32ckMmsYxLNsKlZB1sKU3zvqPUe63SFV5vEc1lUeZYQxAV5JlSYbteoVUoVh8xvRZbo/WcPSdItplpgyixfYShYdgdkSVWYrTg+OYbaQAa3qfOSdRywjwJcW5T8mmWGqisMj0UkMtwH2A98yreXNC2huxRCMqVGNXQ0qSdOT1xmNze0fHH2yqJgOy9xOjqPH4hnLbfXxFFIFBX3oCFtv8F8saHVEjKcpVsBfpBvsWS/WJpmDFo1tt2mkrVR4S5inpss9rJsR4tRy5q73Y7FnXhWc9RG9zTWcwtTl6VQheCC8j0h+4tZSKVmbJLlfVP/UdOjDiJ0xSGzxBpfJF9x/P4xeSHO+mY9xYhVokDBVYUcHHtdsEqsWsPoyL7IhY1alWwl991ht8+rl2/4wW98xKXsmc3qEN9p0RsdsLoTWXdMnZFvYhpCl2SVxcnokIvLN2iSIiLOtozGNvtAJZARy0HXIS5T4q3Qi6ap0+lBFATomojC75c540fde/lxdRe/LQgkByeinLDe7jh78gG7JObt+Sspe49JNZtr2RRuFyPK/IZ1GHF0JEuTC4U41YjjgE5X7N92teDh6WPeTN7I905YhP/7XQUAACAASURBVBN8xaMpyTmj5RTf96nrGk9C26NVpFsVVXJhJeGURqNPqazwe+8yFg+Z3NwADieSUDard2RRTCJ5WjRP8BPdLKYglgXbGLFer+n3jigqsQ7zmYJWp6xlBmzUe8DV5Q2mGd+D/+xWCSenB6TFFFlNSJZlDMdt8kTs1ctXn5HETRTTp98Rsjg69VhvNpxffk6rJRvTVZcvP3vJ4ZHYT7VyURQF31fpyPKzNE3JqoqryztGw2O5xy5UW7JM3IVxknCxmrIvajxp6rSaxxTWEnUfU1eSvDndUdcuj2TWf7Gas98syWsYS16mtHJQHQ1D7fHp20/FBFUTY5+gyvtftUJQuxyMH1JJyojNcoZh9KHW0ZqytyczSHc5alOSMlsHDB7VqLXC/FZkHj3fpWieMb0SJdZ+t6DtKESRSSjX96uLF8RZyoPjx1xfiiyDoTg8fvCQMJTAEXpNVRo0ez1ymWFfzq7YvdnS9U/ZIPSw1zYBnTyXFROtCKXQuFvMOD0Td5geVOyTPXnu4MlzPJ3ecnAypELMbRls6PgdlssQU/YDuU2L1XZDVWq4kii65eYs7y4IZKnio28csd7Nmcy2WJJEvDPscnu7ZTqdc/pAZPBKYnzX54uX4uyVuUmjUdJyRhxIIlzFqdCaA8p8x4EEc4ijNXm5oSzE2Wt1NIY9n/2rBauduLdH4zbBPsVxLeYzkUF0fR1V01jIPl5d1TBthzxPsCX3ZRwu6fVbaIoOinj3dbDEazYwG2J+lt5htQo4ODq8tw2rCvTMQbEror0E7aoNnEaT+Xwq91MjSlIcQ0Vi+NBwDymVCQ/HxyzmwoaM4hRNC1mV4r3LqkMULei7Ftdvhb3WGT3g9vaW00aHvWyZaPcbbGObpi2BcDKVIN1hqDaRlIWiivmD/+2G8ZHI4rS6FklgcLO4xH8rwc2UikKJ2a8KSvmi731jzPn1DRuZEbb0EUVmEm6umd6cA9Br+oyfPiLNAlq+0Amff/WCwfCAtgQoejN5i2W77Ocr+gOhF9Nlilk49PQET9prap5hNkwyWcJt1iZJXoNSciTvhpWSEwYVjq3SbAoZXi1ijFq7BwiLwlPUfI1TOniStDiulpz2D7i+DdhI4Cvf+b9PIvwj4F9SFOVfBGxEz9V/DrQVRdFlduoYuJGfvwFOgGtFIAy0EMAW/9io6/rvAH8HoNVp1Ju5WEjH0vH7oGjxfdN7u9Xh+fM7NDXhFz8TvRp5XNHtjuh6ksk6F6VEWZKhaUKgj47bpHGFZUf3zYD9nkKve8J8Ig5imuSslyl+Bz78UDyrrGJMbch2tSeUjaF/4be+g6GrnH8man1bVhuv3eTg8Iy3r2RqW9vxqz/4gC+fB3z0faFwcyXl8jJnNBab9MUXX3D6Xpt652PLWvheLydQLbSGwaAlNu6r3ysYnT3kUhNlT4WisVyntIYb0kSCJKRw+uyM4fiUhiFJ6OqI65sbHj0Ryuji8o6ijFEVG68hFEu0DtCNhOVU9sZ0x+yCmNVuTp2LdWk3RqyWtyQ57DPxuZY/YDrLacuG87Is+Wd/4yOenh2wl+SOSb6n0BziCC5kX0ZZaKiNNYlkiLdtm9lmQSWfAXDonmA6JpvtiiSQXD9KSb/nEAfveBMcyjzFUk1MiXj18vUr/LaL322iS1LNMAyI45hdIhTW0fiI2d2M/a7CcsTvHYy79L33uJu8YTCU/T65QxCv7sEODo+GJElFlNfUsoyl7fvoRot23ySthWykeYRhte/rn5vehLrSaJhjptN3SIRN0hhRsiobtXuDATUlC9mD4WgOZV4TRgGHJ6IM8cWLl/TaI7TKIN5IYsimjdNoEkSyZKxX4LkGLX+M5LhmFUS0XQ3DqlBl+lpTfIIt5B3p/JQb0tSgd9CmaQuHy1BzKjSmmxVZKuZX4aGUAgwCoCgzFvMJrmtgyqZ6XQcqi+U8opZEmHUWs9/6eG2xnre3U4yGgtmwUTfie89Ojrm+3tAYCuN8tlpTGxYHJ0fkoXjvbPWGumixXO44eyjWpdFocnP5Ek3yH03vSkbjPmkV0jmVZ7tcY2oF33z/MTPZj9NslzSaOpvlOxCamu0kZb9/zTe/JQIgV5d3NPQu7QMf4x2pFCZZVGHJXH1UJaxvd5ihRu9Q9inqHabTO4Ig4uj4IQDL5RLDbmG7Qv/sdjva/ZIg3FPW4tmL1RZda+D5LpYt3nO9m+G27XsEJQGekLBdbwn2wiBqtR3SuMC27XtjCkUX/WeSeHu32xGFCaPD8T2Jd5GXTG/nKAVYLQlaUiekqUYtQUS0usSodXahxehYXCJHhw/x7JgwDdjKYFUebESvoiHPcU+DKKFYVeiyDOr0I5fzt8/J2ZFvJB9W6VAqOX3p2DS1E379h6fEYcD1W0Gq+8H3nxIkM6L9nMcH3wJgH9W8fH7Jb/yaKEf58vVrnj07ZR+NkLYHwW6DUQ7Iq5RDeRnraoERqpQStfHN7I5+b4R/fIBvCBn69PNfEm6HZIksce45zOY5aR6xDYUxl6xKjs++zyefv+L4WDhcWbIlCvdUhSyb22m89+F3uFpdsZUOSXNwhqN73IU31FIHhWFKmSbsN8KQCoIKy1EwS41Alhz3nENsTNJsTSgDaLZp0hoo9+TnmlGzWi9odfvkEtTjm995n7/wW4/5/b//U87eE3u8WmaUURu/LfstrlZs0x3j0xEz2XMVJ9AZjtlspyxX4t01tcHw4IiN1D/bMCavQoqsxjElAm1zS1LvqFSFSDpc6BWG7rOSz3n27EMUtUGQTJhLtLmruy959N5TDFOlqmSQUutycHiIIcGAuk2TxWKJqTnk70iugw3j4w4lMU5TnOXr6R2Vrt2XrQ+6xzz//EsefXjGIpa9qFZIFSWE6Yb+SJx32+5zd7PjXJaetVod9FplOOpw+UYAhBwcn+E1GyTrNY40+vJSo2UdkoTCwA02GbWfCf4jqTbGw0esF1sORmfczaXzbWo0Gg6F5NUpCpNwX9BomCD5KTW9olS2/ODXhV6eLy+5vp3iOM17VEXDTLj+fEmwDXn0UJDqLpdrDNPmgXSop9M70jwl3uzJZS+Tikaw31IWDj3ZV3N3fYPvDTA98eySFCqbOJjz1eeif9wz2hgaVFTsFXn+k5w98b3ObxoG07sZRZ7jyACToXrURUCv12YtnYZoU9P2R/T67xBv5xwOTlAUBaUWsuG7Ju89OuX28i21DI5pmklZxAx94fDVocnl9C3dgc58IgIpx60e55+t6B12WecSzdK1qdIeliTdjZKYtxeXlLUG8tlhkKKq2n25PcB0dku/NyKUvcwKNQeHLdI8YLUWZ63jdyhyA9208H0JSBJFtFsD9hLQYrPJUEqH3TLHcyURrq1wcXdDtil5cPJQfm/GarXieCwCb5PrBY7foNnQ+fJClOU9ePge+00K9YbhWHzPaRh8+ssXWB0ZgHUL1ELBsT1iWSa728xRqpj5LL8HhijSHfFG48EHQg7mqzlxrLHer3lwJgIL6/UtrqfjysDbbhtR5zqDUZPVRtiG3dYpba9idbdiLVsariY6UZHgSiRps9B48vR9er/27fs1ePP6E+7ml5ycnjGX/XGe3WQ+WXMjOaZG3RHtdp/F6oLdVOjAYdfl4MBnt7JZl+LfwnWObSpYkqBcTXSazpCW45AGkmD+oM3t7Y7VMrq3F5MgZzw+IpQ8e7pe0O33CRc5WSHuvhqbm+s9iuKg69LWfQca9SeMP9O5quv6bwN/G0Bmrv6Duq7/dUVR/nvgryEQA/9N4H+UX/mf5N//SP7/v/en9VsBVFVJmsnGv6aKooQ0/Qbf/94/A8BPf/YJ282CNLBJJWKZpduY5iE7ybh9c/uKZ88+pC4jbE/WZROTlDENe0j3XYPg/ivyMmU2FQt09qFDndUoKcSpJERzWviei2mFfGSKC+xHP/rLfPLzF9SKcO6+9a0zDNPjzesZF2+FAW02bJLBLaenXWoZZVwuI4o8Zy2b8zqHPbpeSeu4y/Ofy+ZVtY/TWuOdpHz1qZjPN747xPYaxNG7hmidhuWwu7PwbElGOBqSZyolBZudWD/LMjgYnbCSRHzXl3N6vT5RtKLri4tQ7Zc8f/ElB0fiHbf7iG0YMT5uozviwpks5vR9n2HXJZFGrq5GjAd9VFUIc57VZEnGF5+9xrJlT0uqUKsbhj2bbzwTivP07Bk///QFcSwhgVGxHZPbmwmmhHnfb9Ys7+4wNBXNl5CqjsUu2LNYiLVrtjv4rk0aFzhSSZpKjGUb5IXLlYzeffSdJyjo3Mms391kga4vMLW2yIoCw1Gf2eQKv9XA0N8pjSW9UZeFJPD1B01yLSJOlvi+2Idx/5Dz6xs8p8K2pQG7KamKkAcPhYypioOCiaEr95d6u9vh4nyCbhr37OtxlJMnMaqkCWgddJnf3WHqKus7cREPvCZFWbAJQ8ayUbusEtJkRRwIJy1LIW0UUIeEsZCfPI/R1CEHwxFv3pyL9WymKFXFy9cS3OWsga63KPMJz74pmlAns9f4zRHa3GCykPtclVR1xvZSrGeS7Oj1egTbmEbjHUN8hqLktDwfXTaBUjmgVEwmt3KvXMyGxWYV4ksUzuupqJd/l1HoGxZFoJJkkKhCIbbaLqvrlEYTuiMh+2/eXpAXFgnCcOt1D7i+XLLLltS5DMp4Ja6rodUQScdQNzX2u4j5ai3/3iDKbml1DZ5/JmHd7YJ+XyNJDVZLUT/ePzhEURNqvZBnwWN8dEDsqax37wAt2kThkv6gx2wudEJWwXA45FpSSARBQL/VpCgqwkj0Vx2detxcbaDyubsV3zs5a1BkBo7MYFiOIFVfzqp7h9Y2Hbb7COdIY7MW7+W6FnG2oZQNy4btYrlN0ZuUi3MUpQv8lsN8uuJBW5KyG3dUqGilBJ15OmZ1t+XB2MPviDOz364oU5UyTGi5wgDZ7hOCaMrRI2Hs3Gyu+d73n2HVLWaSEDXKtgzax/zw+2d89UpUI6wIsWqLJ2PxPbvR5Tr6BXdvZzz+UNTjVxuNbLOm4x5Qyb7Ph10XTzV5/kLo4YOjHnGS43gVlgT/OImfsl9HnD5o34PFLOYXqC2LaC70oq21UeomxDW77N0ZHaJUEYrsactrC8s3MXYWYSDBajyf29kCy4bdVgadWg/YzDOiTMz3e9/5Bpvwgl67iWMJAyXJUoJ8zmAwYDETl3iWV6yzOUX87rxAHoWouklfAtoU2Yqq9LC1PvtEyKxjN/H9Ec+/+qmY7+mY0eEZ2+gO3xW6ZTK/ZLWqKOuKUAanTk5OuLu44/Hjh2Lvohk/ePaMxSIiOn1HUFyxCXdESchYZnKqOmGxmPPkqUBV3e/3VIpJFIS0e0IOMqUiiCtMo8H4UGTP03RHXReM3kGelwnrdcRwfEwZifOgKw6Lyzk2Ldq2CMLEcc34uM9iLfTUYp1hWi7T+QRPOpSG7bLdZYRxhqLLLHizZLXc0BuINa/Lkg+ffUAQh1iyZ263W7LfR/SGh1iyL/rVmxfYhoMriakd24OqpipTBgNhdGZJTJQFtN1D6kpGWGodSw/JpL7puG1W62tarQ4GMlOd1Xzjw8dkZcxcQtlnmYLv+ZiSeFczYbPZUysdLF98Zr69o1RqFmtJ7zE6JCkqwrhkt5dojHXGyemIJA65uBLn4dHZe8zma9Z78T3TUji/fMVB5wSvI/kSlJKi2KGqCtulONtlrLHJ3/LomXDKZpdr0nTF0WDMaiX0vqIauA2PTEnvjdWm4aHpJjt5F3V6x+hFjlLPyCREftN2qPIYHZ++7JnfLGJcT2dxJxyGUrXRNA21bt1nbde7DL2h0fGaKNJRe/P2hm6rjz8Sz8mKlKdHH2AqK56ORSZ3dBIzaqsoLbiVyHVBVKGbDshe1CgJ8Z02eZlSS4CgwfAxSQy3k5d0JUqkYdhcX18zPpBZVK3i5naCbXko0gmLsi1FGVOVLu86ZfZJQZAVJLJndx7f0TAsOs6Ar94Kh30wGGA2mqiElBJMqdE2qbcVYSDk4PTslMOTAV98+gk9iS5b11tGB31upzPUjdjTB70DGm2LSvbVqZWObmicX73m4EAC5sQ7sgA8r4cqES69Rg9LK5jNRPZ8tawxrCa2U9z3aiq1RhDsuL6WlTleizpPCeMQRSIfG8oA06o4e3yGtxL67O56SnvgcCLBVtSqiWUoLDdLctlP7To+eaLw8//9K3JNnGPTNRk/ekCci7tQqz1m1wturjI+/Ejs+8Xlms2m5uzxEG0q7vazkybXswmFxNzrtkw22x3TdUKaSjREu0e7ZVNVxT2Rsu/32W322LJPsVIK4jhkFxWUssro8XsnbOchURjjSHTwNP9T3RqUP8Pv+cc//I+cq7+iKMojhGPVBT4G/o26rlNFUWzgvwW+C6yAv17X9Zs/7bntrlX/O//+vwBAmbkURYXtqnz1VpRiaVbC9duaYFPzvV8V0cI0avDmyymWK4TS81yyXKOsK8yWEILlJCHZtvjoV0pKyTO1WF2CptKQ5T6tXsab15f4zQO+810RJQq3GU23gdet+PK5iEodHY3Yxxl3V2Kx/9Jv/oiXz5/zd/+b/5m/8bd+Qzx7HREXACFvXglo4veffIu8UillXLUyQj7+eMqjR21iyaZtYDLq63Q7fX7+y3MAzs4e8slPXvIbPxQAF1EeMJ3GtPwDDE0c4PV6TRipqPaS7VIc9LPTM3RdJUzeNbTnTCYTup0mroRinq7u8D2XKhW/XxHj+haaYRKEQghVXHRVo2RJy5dIcrmItiu1EML9tuT2ZkGnaREUIiLlNLpoaoMkSLAkzOugZ5EmBoVUIEUJo9GYyWxJKMt2KAWvmWUarCUCXBjt0HVI5Hs2mz6mXZJlCaU0ZEo0+gOfONDJUvF7pw97fPnFKyJZP3Fw2ME2FfK0hEpGIuoax67QVOueP8pu1FRFTiIbR7NSIyxiNC0n2ogDfHr0hIvr13QbY2xXHLJtuGcXru9hWNeLHb7nUVYBpi0cp/UqoCpVRuMW86lQWo1GE80sKSQak66rTKd39Dqt+wZT22owm60wdRNPImUGUUCpCiRMAFPxWM43DMcmuoQXXW1ivK7Ldrem3RKyHkfnuLZLmcpyRj3FsDxcx6CSBsOT95/x5uUb4iilltD2q82WStUw3kVayxS/6WEZDluZIi/LHNOBmgxdOsyHB0e8/OocryUuBbW2uZvOiOKYkweSE8iyGfZcXl7IjHDPJttoTCc7mgOx5hoNorVFcxhQF2I9k42O7ehEMnrkOC5qXZDFOwLJz9PwcjRNJQ4VRkfSyI1Lzs9ndIcym2caZFmCoposZzKredjCcBQ2q4SBRBByHZugTFjIqPhp/5ibt3M0U7uPVvqGR1lX9EdDJrJcI0xSkiS558dKkoRh6wE3d1/Q7Yt12e0SLLPNyckRE4m0aJo6r15e8PBUXEzRfsP0LsI0LPyOcGjTqGR82OVmcoGpibPd9Ez2wUIAOiDoGlAKVFW9L+eNqUHLOO51aXvCYf/Fi1/Q77VYXEukt8cqx60HHHUsrl8IA0irK+ra5WY+5Qc/Es54uNzQGfjcToQcXF2/4bd/+y+T7FXWgQh2xEnN0bBNFQXk0nBSnZrZzYIPJQ1Cnuf8bPIp9U7ltCey7sv5LZGSkt2AJcF/NCekezhg4MpI6/IcQ2sTpxHDkXDYr14vGQzG1FqKaYv5nb/9CTg22U7s8XyyoD3qo6sJlSwj0VQHzzOJQmGYpmlKFNeUhY7b8KQMg+mCgsV2InRep3OErao0+0Kmut0hf/TxHzLyDhmNhPOaVTHTxQsUpYmhirnExRLHrZjPJB9gpjMat4j2EQcSjjrYrul22yTZFgnMx3YFbtMglhd/lCw4OjqmzO174269mYryYUtBleUuR+MHLO/WTKdChkfHQ7pdlc8+OeeDbwneqVevvuT49BgFh6qW66JnKIrCK4noWxYqfrPFZrO4N0LDfYTjCI43VRrCnt0iCbL7z2R1yHojeGWGEqVytVyiKAp1GeNKUA0w2ewikOAHjjWkzGI22xXf/Na3xfdWG7brHaapY0po5KxKoKzIJNjRZnPHw5NTDN1Ds8S+X128orJsDMOgkOWuilqjaYAEijA1k37fJkpFQAEgzUXFRdcbM50KHq04SOl7D1BlpcxyHXBw1CZLVVzJ4xdFG548fZ/f/8O/hy0z+LpqkMcVtbQJOr0hqlFQ5AJICqBQI4a9EfPJSspYh9liyna7vW+yb5guQZBAnd5X+Qz6YxRNpZbUGkGwwdALjo4PePVSIp0aFppaYpomcSBkI89zzMYWSSOIVbbJ1Iym7dyDc6iOxWq2odGzKVMRkKAU6IiqDH5Yioen68TpmigTa95vddiFAcfHp2xXQvaSEFzPIiiEnsyLNqqyZDmtefpE6JYs21AlOutlRiZ5igwrp2H36I5kKb0akwURalFhO8JgV5sJzU6f5XzOXKJlV4rDNtjz8JGw8aoqY7tekkQazZaYtKG1CbYqcXZ3X1HT643YbXa4rljfCgXLstiFAaYpqV50k17rAKoSRRPOxjbMqGpYSrvho++e8ekvP8Z3BkhAVnarPZrVpNXUCGXgK8/guHdMKrmwTNdgfnnDbhcxktmsXbhCNywM28HQhT6t9A1pUpEl0pncp/T7Ha6v5rRlYLhhmyiFjqYApggy6YzQzFJkW4EwKqmUnLIyUExZPqwabDcxnivlQCuxTJ0sS2jLTKfndVCMgobRZbEUd9jZ6fuYdsmLz2X5tAG+20bXLLp9sX9us8cXX31GhQ+GuC9uL3OODwf4PfF7u0WEWipoWopMfJJmOsMjA7QCT9IHxEFObZTwbg9mG4J9jm5ynw1F0VC1BovF7L7sOE0zoh0kEvnY95vonsrkMiKvJVhdxyVYl2iahozBsJgtuf10+7O6rn/AHzP+XCTCdV3/HvB78s9vgF/9Yz6TAL/z53nu1+Pr8fX4enw9vh5fj6/H1+Pr8fX4evx/ffy5nKv/t4bndjElc/YiusWyPf7X/+VTnJbw5k+eajx8eIppeITBO+6LIYpxhyPBD5ymzfTtjs6BeU9M1+pVdLyaYfs97q7F9+rM5/ChycVrEVF4+vSUw8Fj8jrh7WsR2XlwOiYMCjb7S773K78OwNtXF2yDCR98U9T+f/nyC0xL5d/+W3+D1qGMFtRvuflqDjuVf/4v/ssA3NyFLF5/SSRTnoVjMvB6mIbJLBQJve9/8JSnhx/y4otrRi0R5XMNm0cnQzTpJR85R6TJDUF8hePKWtgsotZColVEFooo43q9pqpDwkjMN4xLmr5HURTMZpLPxe+g1gnzhYga2Y5Gs+GQ5wVy6Wg2bFBrGs0hs6lkaHdsBgcQ7mTERvEwNJPxWcX5K8lWrdRs9ivGj46ZL0Q5WGn5bOZbkFDCpuUwW23IyopaZjmsZgPFMnBaLTbv2NFbLqpa4TRlIzcqjmOKyNFOclrQEuWD2yVeW6zL67dviKKMw0OR5Wx4FcvphrKK+MF3/ykAPvn4lyyXG5rNBrYtor1O2qbT1IglTHe/f0p+V7NebjiSDbVBvsdseAwPRkS5CKN4qs7NTXRf/ubYGmme47sNEsnnkmUZdqNmvSzRFAnJG4cUcU5Dgh1MpnPKqmayWN+XSxpGQZaWGLpKKhu1s6JE0RrsduI9hyONo5M2WVpQS6JR39WYTxeYdkUs68U1DkjjBFf2KGx2NT1XZbpJ0BDR1x//wSe4vkmz3eH1G1nOZyh4iovSFPse7Ut0XSeMAyzZTKppDUpiyqrgaCSyBfP5njIrySSHhuOXdDoWw4FPqyV7kOKQz19+gSv7H243Ca5pkpkxuozQKqmJboTo1Yi97C3qD5vcXM9ZrMTZa/sVjgXj1gOcpigniPOa7T6i3enfkznWdcxw6BDK7JZm7ImCmoY14ulDUVZSk7JdxlRhyTYT51bruczWM1DEZ5SmQpVnqIpGGcjG1pYLZHz+/AW65ARyHAd0hWjzrhbeYx/M6HYHLCRhcLgv+ODDDleXr98xRrCYpPiOiyObvHZzhYNBH6tZoUhKA0VRuLyaYLv1PcR4w2lSVAmGJJQp9ZooSNjM9jQH4uFVoTNodbEjm7tXIktj223Mts6jptiH1MhoGDUX5xNkEpx9sKXVtnj44QEdSeq7vNkxbo+5ei76Mk79MfvpDrc9xGy8K7frk2YTWr0OcSpkfRu85f2nYwwJ4HMzn9Hx+jQNi8wRazw87bCaTXHeHxDJ+SilS6akTOdCd+pVA7/bYDeNmE1lL1pH6K44S7EaMiJc5diBgibTP0fvnZBVMdeLKU/GImuz2WxgGxDthbx2ewdYdkKW5ajy962GQl4WRPuUptRLZjOi3fQJcnGGfvH8E/yOy3S5JUoE4fvR8BFG3SE1QtpDWVa20ciDko7MjqqaQRzH1HpJIkuVtIYraAMUlWgpNkLXdcxmynon/t7rj1mvcjzf5P9k7716LMuy/L7f8f76e8NmZKSpzDLdXdV2ejhDAjIQBALkp9GTvpIECHqjBErjNI7T0766qtKHv94d7/WwdwRfOIQEkMA85H7LRNx7z9l7bbfW30SpWON36QpFt0lyH9MXv/f7V69QKx1ssffdrm7Y7H0Oz05ZLMXvxSm8ezcTEDnpb+a5XQzLopLr8mR8wuw2QVXcB+872+pimjqbdcREekjS6lxfv3swI9/st0RxhW7X7DxRxWmsCkMHGo2t5CBbnoEeGNwuRMX0wDHJ84zheMByI/aspi7xPQXTMMkkLzJf7zEMg/FQVqnLBTfzG05PPqGVHBPL6WB5DcvlHf2RqCq2lUer1vgyM2+oGkVZkGY7cim7bFsO1zfvOfjygIOJgLLu9Q2jcZdciuMYgUHR7Gl1j9VOzKvtLsTq3GH3a8JEilMNT+l2Bxiu+JswTFhOQ7ygpZZ+R/3OCXkeP5h6QPC0kAAAIABJREFUZ/me/XrD0cEpWXIvbOShqC1Znj2Y3LZqTlLEBNL3TlMrDLPhu3d/YLUUlcDTwwPaMifNFBYr8QxHJyN2ofJQsTG6CYoSkKQ5ZSYq3oPRU4Z+h912ge6I52qbgqrMkfoyRMUKOxhRFTqV5Hg1hYOhlOzXIeFOjLHnDISQgIwDxzFYXFdQOLz+WkCxLTsh3EWMh485k5D49W7OJrli90b84OHohNX2Ds00SffinOLvj7HmXZbThELGxujEotevub4WCInhcExVNlimQa8rDlmrVUiWWRi2RnMv+Z+3BEGAKr3htuEczzvCcYwHj6cia9ltEyzLwrLlOIQthquiSguONNxQ5Rnz3ZKuVE6yrR5JsUWzjukHooqyWywospBQesO1O53z4xesevHDHnZ6+pSsaEiShEiSrPsjiySLcCz5+43KPtzSHwdUuazMmTqKrrDZLoiW93yhdxweHLPd3K9vJftoQxJZjI/upclNOl0fW5fcKbNhvb2mE4xR5QE1ySPyRUHoRCjSruRuu8J3TWYz8S5//LOXRJsdYbYGRZxz76Z7Fsstk4M+ZSOe/XB8xNGjiFpWklbJHkcLWE5Tnr8Qc9ZwK96/W4FSUZVi3X//as3ouMfkWBpvJzX7/Y7JUZ9wI74rK2NaGkzHo2zvBSkCtrsp/YF4l7yqCWchmmbiSEP5ZL9BNzoM+1169yJTccUtsoL7n2j/LC5XcRLyd3//FwD0ekN6vZqnz05YLcXhcXFzQ2/wgb/6M4OB9HfMy4I/+tPPuJZ8J8exePE9g+V2y/FYTNiLNxkj/4j55Z6hhL+cHCost7d87wsBt8kiBYwF0b7k9ECUjHuOx9/+8peMzmru5oJcrRkqZuMQraU/TqPhjvuswxn7hQjU128uKOcehweHLKRp8W9+8Yaf/vxPuZuJg2rVwqMf61zdfeDlqVThqbt8/c0tg7GOthIDbDYbnr2wKBsxkG9evUMjQ2sMMik0oFATb3M8Y4hqiwX3D1+/5uXLE3R5WS2LkMmoR5bllFJIYbG8ozew6R+Kz9jGEXeLFV98/pj5VEzEKFygNA6jXg8Jq2U06kO6QC8lzEtNCMY6abTDasVkSeIlnz15wu1iiyF9ZzI1J9G2KIk0+fX6bLZ7BoMRmYT3GYZBXbe8e/cOpyO+33YCinLPTm6ovtulKl2qEvah2OjPT46YzxaYlkOci4Usq9c8//ScuTQM3GwL6rpkNHT59rvfAzA+7DFpRuRFSyw3vTiPKJOcSppFXFxeo5QllukSS+PGfbaiSlUqbcugIxbE7SrDdZd4rthwXDdgfrcjj7e0kkC/3W7poHF2/Cl5Jvrhw9Uto8MDTFsaKasLTMPCMUwc+QxpmnN4OGG2mrORfCpUBVUz6Q7FBaVo9szmS44nT7m8EXDU7kAjz1tU1UGRkD/FVOn3HJYLyc/TJxRFQbhPUKWi36Dfp1U09tvwQR3QtWzWyzXSyoy6rdjuV/S6wQPva7st6fS6VJXOThJ9W6XmxafHXF+JzdmwLFynS57kvPpWcpKUnMODDuVWbJbZrMAJKjwsXv9ebPwaCocDH9qYT14I4va7b6/J85xAKv40tTCh/PxHJ/zmW/F7y1mEF5wQ7UpG0ixT13x+/90Vbk9CJXSXk1Of1TxlsRSXncOjEZZdksQFz0/FYp6rOfq8pSchCNe3OyoKDvodqKTipaWy2iRUVUW8EZtelua4tkcmoa6e6ZLWMftpiS7haCfHPeoqItpVHEq+Su2v0VSH6wtpZnlyyGY7J9y3KNJM8urqWkBkRh3WC8m/K2uSKsTriZiKwy1FXKJoGrq8lHW7Gq26Ia8NvLFUl3M84mzGciqeezAYgKbS1BqffCYuH0mkss2u6PQDfvtbcWk47Fdss/e0vphDw9Me1/vf8Nnhf0dHqmm9+u6OftBjn2/57EysedE8ZL2pKOWF6Ca6pDvo0vR00mtppGy0WKMztttbBgci1qu8i2saRKmIn8Dp8PbNJcNHIxTpc3Pz7o5nL0coisvddC777wWv/vAdgwPxvrbdoVYCrq6uyFIR2GGyp7FMDENCXdw+i+0Hhv0em634mySuaGho2wa/e+95NmMYHD0IPlSNiloX2I6D44s+r/WKRq/pdYZsZJztdxsG/gRDqpoulrd4wYjBZMBKHnrbGmyzYjfX8Xvy8G86rGY77lUTonhBr3NClNziBuJiY+U1adbiOx57STBvyWmKElsejFFcVE1lMZ0x6IlEFE1Lv9sjSTKRGAA6nS5NbXB4KPbQqoBuMGQ88VEUOf83MUrdkOxTIl3E3sGxwcvvPSWQKoBOd8w+2aAoFmEh+qDrm+iKRRzmKFJxTlV0FA0ORmKPni+nHHbGtHVDKi8WruuzX+84eTQijMR8r7Sap0+f8fqdgO0lVUG/NyaMtyzvxJp39uSUTmBxc7Eis0P5XRq7eEvbSPETTRjJV5VBkksfRlfjaHzMze2aQfdelbZkG8WYtkxs7jZUVcVwMESVpPeibfhw+4pKyfF9MbeTRMWxmoe5vd7kPDoPiMIUzxBJPN9xmd4tcSU5v6xyxpMepydjVov7A3uMN7A57fW4vZFcdNulLPMHIQVLD2irjCIxkV6raG2DoQ0oi5rTQwmprHJGvVMUVbxvXqpYPnQHXa7eiHe5vfvAn/7JT/jVr7aUcq45DpSNQSvFSPxAIUkjOt6ERnJK15s72lZh1D8h3InvD+Mp+7ACeSbo9Eqi0AIlJ03F33QVE9cfM18twRO/NzoecWSePVyS7nZ7ilbHN1zKUPLHjIw43lDkyoPpfFv7lEWLoojzRrRbMxr2sEz1Iek8na2wzAnHkwl3tyI+q6qiKmoaufmZrst0vmE0GhAn4jkNw8L2NeL9jn7/XIyNv2ezm+IGYs6++u4K2zjEcNwH1cZaXxLoJlmWoUj+TrdjEm5X7DMpLNbzuV3coPs9fHmBdgOffLPDMCzKTIpMFSX9oEORi9jsOBbTdcjk8JjNQjznPlxjKAFoFpbVkf2u8v7imjoX+2hQKVhmn5Q9u60UplE0NKVCC0RfRvucqlTRuiaJnI/oGW1tUFcpByMJ04tLpsvdAyR+sd6jqy212tDIq8ewZ9EPXpDkO+ZL0cejfpdh94hoJ3iZtrsnSVKOP5nwu/f/AMDR5DmNvmUyPsGQHCvvUwer6xMX/3EP20ULKqWilp6rURJiWhW2YVLIMVWxcAObRpWm6WGJoZrouoku9QUM3aFpNKqSB5isIYXP/qn2z+JypSgVQffeNb6iZc5/+9//jL/4M7FIzmYWOyUnCHKOTkUnBZ2a717/FT1XDKSmGnijDcE4AGmE92/+7b/m4u0lXTeg2xOT//pyyOGRRb8vFrG3H15jWhEnxy8ZBGIxb7OCH331YzbpnIUE7fa8Lk+ffslMZtMePXnG13+Yc/ZswDffiKztj7/6I9qwRc3gLhaH+B//8ecYbc3ZY3HQWKRTfvOPX/Onf/LHzG/EAlhXPmZQkxYNrSnFKiaPUQyTV98K2dc6E2bBnuPdq0gz6VrsLkyCE+2BdNoPjihTlVZm2D1zAK1BXuRYlvg/N+igGRlPn34FwK9/+zWW5/P6/QXVw2RtePSoi2LquP59dmJN3+tTlbJaKIUZAu0JF4kgqp898bCdiqqMHsxAp7d3qLXFZCwPjnXN8eEJd3d3ZBIjfP7kjOVqhWXYDIZislx8mNKS0e+Jf7e5wma+p1FMelIQwXV02nZAnFeUskpU1S3baMM9m9DzAryOSeB7bOTGtFotOT4+YTCe8MtfisubYkRolUUszUC9bgcvULB0i+VKVu8sFd1QuZxOMZCfqwM2y/jBfNiy+qiqy2DYfzAkbduaInOpK5WmFQeS09MzkrwhDGVlqWowdB3DMpktxaHT9wOSIqUXuGzWYmzC1MYJqgeJ9aJIsd0BcbbDk3KmNRonx11Mw8KWBMzb2yl56hJHkq9mr1B1n8Aw2UnM9XI9o9cb0HEcNMnfSOOESqkesnK22cGxdKJwQyaz2YblsljM6A8CcgncT8od5aLFccTnoqghCRfsVyG55Df0Owq+YXI5FRuxb7doZYVaGHzxTGaIlxmuW+F4Kqv1Ur7fhrposSWPsDfKGDojfvCjZ/wv/9u/E/Hp2fT6PpZvoWjSSHkXMzkZYsqqg2GkdLod8jyiqGV2crdD13R8PyYpRL9skoi6tDBlRUo3U5LIYLlvOZYufnVdCi5HqRLH9/jtgLoueXwuzSTVhuV7A8dt0Q154fImvH/3Fr8zQlElSTot0U2F8YFY3xbrFbcXe1wnQLfFJuCYHT779JDXb2eE8pJrGAX+oEMojdSbNkc3DDzHfOAp2nqfKKnwPIWlNCluE4fnnzxnmot+Kv0QvCFaqHL9ShCwDw77WMYSzx8zGEkDZEXh4nqKNby3kHDpOD1W2wWVlOSd9I9oKjg7M1jO/07EwqbG6ozwAvG+Z8MD1gvYfVjTBpK7dHhIut+T5zlKIWKo7z3id9/8lpEcP71ToTkNVVsxktzC9qDg/MmI//svfk2liBjudweovsNeCv3ML1ccnz/ltDMRvBWgPxiynSe4pujLtg0JzCHT2yWxPGRbRp9KiTgYPWZ+KyrXtWayj0JKqR7aFDH7WEG3YtJ72fz9HftwRRj3SRPxTKoOtuc8XLZ63QPW+xDDgUIetjpuH9dMSc0pWSHWvLLO0ZQQVRoGe9YA3WjYhntsRx7g04i+NyCM5pSy+hL4xzx9fso7qcbYHTq8n33g+PARhqyQdCofyzGp6pxWZnazNEJ3WlayclZmCn/0s09Zb6+YXov9cdAbg5oyGBqMJfctTuccn57z4UokfEzDo9EilvMZpiPGb71I8S2fThCgShW+7X5HnoIiDzGe3SeLCjB5MP68urxj6I/Y7EK+uRDnhK++/y/JG42OrIpH+RLf9WnymLNzKale7fjm2z2mPaBuxHxQ9BTXr0ljcdlBUWgbHddwMKSa5X6dobYVilXS3Bs173KKuiKQHDPbc9nvI5IkwrXFM/R6fZLZJevVmsNTmXQqDRara1K5z7z45AlhcoVS2Shy/9+uQrIoZSBNaF3bZba4Y7G+Y7USnzt//Jy4iFisNygSsfDq1Rv8voYi+c1lEjEaioTEXpag93GDYys4gY5li2fPNjme0yWVe4PdLUnyiGWk0JNquq7T48/++leMey73gpBJWFCVQjwMQFVciiijThcPcTDLb9CwmC/uOJSG2bPlW1Rzj2eLeJ3fbDAVl7ZtGYwE39DzVRQVLKvElkaxb7+7ZNSxHoRqDLdlsy25vV5xfnYuxj17j9dx0eOAu4Xg+wyPAjqBTyqrOJ6vs1nG3Lwr+PIn0pbHHOJ1bNabHZlMpj4+87i6XOJ5Yj5GewVd19juZw+V6yypMQYG588fcXspVTeVnHfTK87OpHVHnpHlLa5XYMjKjmpoNIW47JqSf5dVKUlT48rLj2/Y4CvUVYUnK6tv3rzBMHWiTU6UiLUjyAwG/eOHymMaRxitQx43OFJ4K97FVDTYAaSJWLvWc5+u13mwPekPLaKwoGlLdEWMn6ZpoGjEiRQxcTyuZyW0G7pSZGO32tEbTMjK9uEsmu8VTNVgLPlx233O4eMB7XTHbCZ+/7rK8bw+QdfHlGtlmlxiVF8ym8og00bU1pZ5fkN+n/hefcBUDfTwPVkq1gRTOaBebx+4m8vlElN1UIoWRyZuqqDLYNRjsbmlbqVKdLFH0yGXKrFta1HXLboD+1By0aKIyckQ3bG4kPYIhnPPEf1Pt/+vJsIf28f2sX1sH9vH9rF9bB/bx/axfWwf23+m/f9SC/yv1Tpdu/2f/ud/A0CyzwmTkMPDYy6krv/o0Of96x2DgxxFlvfiBIZDhRefSDWtSOHt1QV522AooiqlNSqPTl3CbUxVicxjEJygGd0HRbj97jWOX5PtBuTS0O6rL8dEe5u3d2+whyJLdDj8jE284M1r8T0vX55Rxzqr2Q3HUsY2TXUsr8SsjpgvRLa3LHN24RRFGsW+m04J+iqfDV7gyErZcnuFoe9xrR5TCct7+dWAb357x0JWyk7OTkhy2G30BwlJTbG4/jBD0xs6EmOdhS2KvmcgJU/jrCZKE8KooKplVSpRicKMbld8Ji6WHB51CbcxE8lbGnfH9A7AMnpcXYvsz/e/95TX3015dC4gJKPBCXmRslrvuViIrIbaW/D+my3ZvqVqRVZcp4+jV5SmyBQ4jsV+u6Mucw4ORNZI13XyPKVRhKkqgGUMOT97xEpWD7VWp8x1vE6XXBEVjHhVEMUxw8MRSEnzb797g67B5y9l5WO/wDR79DoHZBJyYBsedaXgeHApYWsoJmXc8OQT0S/ffXhHbTS4jk0TSshKU9MqOrVW0wukCWWsoDQWvaEtY7MgT0v6fYvrG1GV8oOGZ49/ys31BRUCIjoaPSUpNGpd8kKSAtd2aMqGPLv3XBCqPKatgVRfjLcNlt9gSEnuOF3iOQfstosHqFIYKvQCU2QipcHs5198ynqdkUn/r7LZYdCjVlQymZXybIu6bdgnMT2p6JXsYrI44eS5iNfNOmK/CQl8k44vsk1FUZOmCUfHA2rpCXY1XUNqcXoqsriXd3M0taLjdsnae/8mnarMiNP7bH5JtCgYjk8oFVFlmF7A8+cdVuGO/kiMw+WbjHRv8ezFsXymNV9+9ozxocWf/fnfyz6o6XYsAt8kk8pVtaHgD0ZoEiJraQ7Z3sSxNUKZubYdndntioOxyXwjxsbWPEzPopAYWdN3WZcZ4XLLJwNpcj3fMRwfcXW3oJR9rhsKJ6cDNmsBYzk6HnA7qzg67HF9JeaVp0/Y7Tf4nQ4lUu62N0TRIJMc0ypVmAwesd3s8TuiD66v39ALHnOzmKNJZbUib6gUG38sspXz+YzTg1P6gY/hib/Z3KzQG4/GzHD9ex+oEsVSQEK60l1MVGfstzVnEg6GPqNtHUzF4tFEcK6Upk9cXNLpizzdu28ynn0yQcdjOJaZ1ve/YHTcwVNULt5JVdHOZ+yyCwpZNUL1UQOhhBfYIs7z0kZvc0JlytAXfRxXOh3PopTwl3q/pLYSzG7AgS2eKZouaTSLi9tLHKlm5Zk2WqGyk2OsORZFG2J4JtMryUnwe5RFSCz96v7Hf/U/8Mtf/oa0zukMxVy37Zwo9miLDrasWOTqDa5hMd9K6WndwXV9StdguxTcMLXWMSwP3XJYrsQzPDk9Qi1rtpLXZ5gOSZ5iuwZZIueHaVDHGVbbYSv5I5OxzX694JNnAsaexi1ff/0HBicQRzKLa/uk4Zb1fP/AsQzcMbZi0xtJOGFU0R/1iJPwQQ1xudihYNKfqJSZ5EXVOhkLKEV15Msvv6Tbcbm9nhFLA/a6ytlHd5ycPoFWjHulhuR1SJaL37cth9vpe6KdxuBQ/F4/cBh2Rlxe3OB3RXVysZ6htvCzn/8QgF/+5lvsUmM8GBC2ou+KosA3OqRlyVUoEATPJp+QhkskfRQMG13XsS0DxRRj/PZmRrK/w7YCDo/E+mZoHT58+MDTp88Bwatpa4doE3MovczycEebG+SNyvGx2CN1PWO1T9GlWqCiKAz6Q969f82gK+ZMrz9hvrvg5naNqkgochCxnO7o96RimoTPNlVKcS+eWyikaYoq7QVMW6NqWlrFoCs5Qqg5ZWaQZBssTVbiLQvV3BFJ2PxmWvDo9ISgZzCfyS83G4aHPXbbnFLCunxHY9CZkBfiXepOSRLtScKCji7WEq0qWO0X6GnA0VOxb6+nIXWT4Eue3WaVczTosllfo2vi/aKkwnU0dKNl2Bcw64PDAd++/VuKRFYi3m0wLR8nMBkNxTyeb37P+fljLi4WNLqU/NZH5J1bnEasBy4OgSeUY6c34pzw5PEZbdFQqDnf/F7Ah0/Px2hmjK6J2AyCPu9ev6PjHjzs22Fc0hnW3N7uKaVCsWlF9DtPGcg581d/92tOT0/RjZpIogWOD59RVhlZltGUIoYLYwWNwX4r1m9b7dBUmTCBdwWdwHa7mG2Lqes0tej3XC24mt3x+aOnAJwcTHizvCJcbRjJNX01zyirFFPp0umJubXfren3jtEVEef7TYZiwjab0zaij4swQdc7qN2EjuwH2zjEtEsWkhflBjqzWUzQOWK1E3tWXanUBSiSB/rkyWOuLnbUTUZnLNb9pgFDV3BcjyQX/TK7S/ny888xbQmxLA5YrN+iaF2QVIWub1CUcwzliFp6pQ3GCmXRMPJlhTiKme4WKJbC089EVfPtN+8IDJez0SOefyqoAn/9//x7Gs0BVcwzUzGo65z319doUqJe0QryNqbFRoJgGPTGmEZJVSYyxmzatqUxa/are38zHbc3QFc1Emm9UqoFd79Y/ZdRC/yv1WzHIEzFpvvq9ZoyUwi8CakkFV58uMKwfGY38P0fiwNzt4w5P/6EKpIHzs0dTRZwc5Wy3gjYw/GxS1P1qTKTqSxD/vCrU978/m8evA063hkd5xnX6+/46kdC9vz3b254/OicJF9z+Y04CPPCIi9qTLlxqKXLercgq3OQ5VNF9Xn66ID5csb2lQjWJJ+hWiO6I7EgjhuVZ4fnJFGCkoggbKKGxV7h+Am8+yBKqjfrFXWTMJQXoLJqmd/WpMWKji+hAp7DYKyw35QspFmtFwTYgYHui0NFlqzYxgtcz4VUPHur55y9HNFK8zUzO8LpOpwcjrEkfyTXU1bTlsnEJ3DEZvLbX13QGfh4gfTHira8ev2eyZn+4P0xf7dGLS1OjlzyRHKuEgXLqejJPtisI3RaPvviM8JQXHbiMJamqjX9gdjQfMcRfkGl+B634+F2CvJkx9VbUZrtDQeYrYOjK0ylTqeVVHzvyzNapDG12kU1DG5vVriuOJiWScFw1OGbr7/BCyS8sZOTNQO20vuj7yskocOoZ7OTvhN+v0OYpDRlwWYnhT3UAOqMdiPGM9mlvPjsU2olpJGy+boyZHZ7RZYn+CPxfrWlUzd7skj0gU8XVTdImz2OJzHQho9pHVAoGVuJYQ+GoFkDUnlJGg8P0Bqb1PIfYE8D3ycvQXFcDEm8fftmxsGBhmWI2A/DEbtiRuCNSCQZeOQ/JUwLUm2JIon9fWeIO/BRuZeHB/ckYL9KaaS0vYrHeOxxc7PAvIci5w1ekLDZSIljXcWyXZIsppGXm7IOMHQfKYdCHE15+fIJTeUQxuL3Xr5siPIEi5IwFIfOTs/g/MkxSS2NOUl5+WzMX/3i90jkAEenNqruEEYFhYSMem5AtN1hBZIzkFeopcmuugZVbF5VqeDbFZ3hY1JJsm2KGN1SaAsZi0qHxjKxDg1iCfM6+PSAOFmh6xHjkUjw6FZCU8Vopvj3Jt7RWBnzZIkruXYHnQmqYeJ6HivJ1Wo1lf3O4PMnL8XY3bxmn0Q01ZZMztG6HHB0csj17IqzR0LSPKoLoihCz0VsHHkaxycBZa1SSu8breMQJzv6is/3ToUZ7/vla0zzCMcUfX4dtbhtTm3lVJro87LR6Lgq+9mKmRz3o4lGQcbFB9Evlqcxi1/z0x9+xXf/KH5vsS3pPzaZLzxUW457OyMtI97eivf9/LMDtqtbdmFFmkkfkWTGi6fP8JtDlnN5eWzA1nxyCalqM42mUgm6Bu8+CCnfvheg2TWHR0ds7sQzTNMVk8MTKnnwLsolMRUOXVrJw1h++MCjH5yR7u/5cQFxHOKf2JSK5IosFOKk5nBckMq5rSYOj4ZPoBKxuIs16qplvrhCRwSjoZr0ukMWqzmWeZ9IUKgbk9VGrPnjwRF1rOI7A/ahtCaYeJRaTlLvMKR32XS6o+d12U7FGGdxyuHRCYU+x5HEmrpOSbIIKo/+WBzefbsmnWVUSLjW+pJGczDaglkk1tO40Qg8levLlK7kXH7+6XM+3GYP86osF/zD387Ic5PuSPyNqdZ4QYfDwQn/8I3w38IpKZJYwIqAvPGwgwGnpwNWCynmsEpJdxdk9YqdzHGZms+jyZChKtYft3LQbJddlSApENh+n6o2uZhdMD4TcyuMM6CgSMTvOd2CQmnQTZe9HNOmBG8wwFWtByuSfZ3R6Y64uxJ9nqcZTken1x1xeSH65fj4kLJdU+5qygfOhYtSpZjGva2DRd40lFRUcq3cbWPmN3PqsiBpRAy5Rgf0nFh6FFVRhRWAqzsspFDT4eEhpt9QSv7IuDtmuY5wez6LhTTGtVym2ynjkf/gE6q1Q5Jtiq6LMT9/odK0Fa8+XDGQVIiTk0fE0ZZku8V1pZiKZfBh9QG7I0VvZh0sa8TgMOHuUuwNXU3DNwfkrUpaiv9r7RLH1llMRT8N+49o9YThYYc8ErHY6bmE0Zq6qUik39Ddckq3M+HDTNIZBl3m6xVuZ8xye2/mPCFLLaK4ZnIizg5+R+P2UufgqbTkSHTCMKXbPWQkDcMbI2a9rzHNiqPHItb9nkscJ2x3UlgsU4Rf3EBjvhG/p2k+84VIOt97CU7nG1abC3RNHOCfP35KQ0S8T2klVFmrLe7uLiiKDMcW/V7bBXmR0JFQ5dXdmpPDAfNpSiXhmYXWUCYFmWFhuOLMkcYWnmFwOReiHstwjdI6DI7H5G+kmNNowE10S39ko9Zi7eo7h2ySFV1bvG935HFxccmg16FWxefi1iGLNYbGAaol1+F8T7FVUaUBumuYdLoxHc+jLsW7rDeXdDsjanlBmU33HB6OWCxvufeur9oKR7HZrlMaQ3y327FYxDPKhdj/x/2EJlMpmoqTJ+KCOekesF15WK7BaiaFWtQJobphk4ixyjIhuKa0PlYuxsWpbAZdm9U8ZnIi+k61R2TRnsATi0S0q/GDHqcHJVu5ry7jEAWfus7xu+L/8qalzjsMZNLCUFyW0ZKuZbGUUuyWMUIrG/blErcv+oX1fx7498/ictW2DXUuHljRbnnxxRmL1Zq7azFI509P+OwHPf64dycAAAAgAElEQVSvf/cHbi7EZDTtkt+s/5qOIzpkt69YbbfcLV9xMBC3/tG4S9k2NIpC0BUHmQ9XrzEdm9MzkfW7erviH37xC/qD4OH2vlmHTG//lqopMXSRRWlqhdU8pyOJ8evNDDfQub5U+dVO8KIOTw65eJeQbMMHn4sfff9H/Pu/+S1bQxxGBv1PmDcmStjw6lYY/zlWie8P2KclG4mh3S5nGBb0A3lo2rfs9hsMw2Mf3ldadiith6blPJKLyHJZoKuwlrjsKMwYDgekcctoIA6PkXrD2dmITF7uqlpnvZuyyxqsRiz4vaMOit0lrHKOn4rLgDY3qdqcP0gPL1qdxWbJNFEeHL9t26dreex2N2Ti67Ftm1pp8SyZealWjMdd5rPlg2t83ZQEXY8wDNFLMTlmyxW2rWCYYhLEcUWWKgRBwJMXIjMYpwaOW5BUBbH0uXry8hFON2C+SmRMVehKQR7VjKW/QpEV/OIXv+bJs2esViKmNruU55+0JA/qej3sYM/0ZssPfyhUIpeLPXmxwFADLq/FZa43gLZuHg5SQc9mH4WE0Qpkv3R7LlWZMuxYJNJA7+LDHariYOsyE+lmhFFC01r0J6LP1SamYM0qDElSaTppZHhqRCC9RaqyQdGhSkuKRPRBZioMD3p8++Y1I6nEF3RUws2ekSSczncX9MYT8iqm0cSilRUNplJSRSG1XKidfoDbUfnmnfSr0DV0VaVCfXBVbyuFXtNSFgXzhXjnl58csbyKmRxIRTFdZ7HZ4PVMdGMi36/F80ekljiY7nN4/e6SwWhEWUje4MDFq/qstN+Rl52HsTK7SzbygHky9vjFb/+ei+vFf1Rxsz12uw2m5eLcKw+qKmUeYVYiFp1uj7jZUexLDFkRNjoq/uQQ09XQZOb4ky8e892bOd++EhvxT/7IpygyZrMVL4/Eu9iVzSZXSJoGXxqbambNbgu2JCMX2Y66McnWFid9MQ5mAGMnYLFeUclnuLmeohcGyVCMsVqZKEpGrZY0iHj5yc9/wPXsG84fPcNW7z17XNB0okwcXh89OyUtdoS7kmdDEVPfvr+kaTVm+4QrX5rxPvLJ8h23H0R/LpsVSqXguj6WIU2vVyVlovAvfvwn/M0//gaA9e5vqHVPKL4BxwcTosTid7/7Ff2eWAOPApXbt0v6/RqrL+ba8tJgNzM4G4i+2y/fEG89jg97LJayiur2uVvO8NQTulKp8vzJMbd3V5jSO+3rP7xjeGKTFiFVda9u1yPKFoT7GMcTmc7ugYtabnDFmYza9+mYHkY+pmmkP86gi5JmDCT/aLmbE9bg1S77laiODDuHHJx1WaZrxlLasTd+xHV6jSa9hR4d+Ly7e4dv+QzkfrHb7FAo6HW7lNIvLstjsiyiK98ljTO01me13SLv3ZR5ShqVHJ8O2a0lp7TI6XR67FZi/fnsxRfM1yGr2MGTPIDdtkZpAwaTgvFEzKNXv7vhpH9IKi/eCiatphB4PS6/vUdaWLz86iXf/5dfYhqyclVWHBxMSGOx7/z229/TNhqdTo0rD4VN1tDWCh/ufo1tS7Ww5Yqu3WEg1VD36Yr9NkPPa2LpJXZ29hVJGbKfTzGlSqRpl+yjHf/7/yr+xp0E6MqK/TpGl/yxZLal37GgUgilyJQVK3gDg95ExF3TpMR5wcXmEltykI+9HoeTERezC1LJDdVMD9/skWT3lwGVipiqCbFdqXhXq+iaS29UEsl9czYLUdqIUiq7RguoixKvH1BKM6OkjtnlLY+Pnz6s+4vlO1znEROJ2pjO35JvFE5ffMFkLNEs6yWm3qPjivmxXt2x3UbkjU5ZiMm2ijacP35GlMxB8uN0BXq9HoOe+J7L23fQqlSZRS69qdbbirYy6Hg+uuRhV2WFZbgosVQn3M8wun3u3qV0ZaLW13MMFwzTZ74V86HIbfq9CUspDvD6zbd88vyctlKoJXJlsRciIcePOmzXkudWQ1NpvPiBSBS/ef2Bbt1ldJqiIvbounS4uLiiG3i0jdjL724qdGfCXCYyPT9Cay1uLm8IfLHG6o1NnlxiG0NcV2RTomjBsD+mkgiJwcglTVXu7mYE0n8sz2E0GbNeR1SlmNueMyDe5w/ian7gEMcVaZzhSh6WbtS0jUkSp5w9EpeGKJ1ja81DhUZtS5oy59Hp8YOfkqor2F2X2e2MIpfjZ5rQ2Fi+eG5NUWjKiOlNQleTh3pVw1JN8iQllSJXHeOIOmtIFTG381JlMnmMpoKqe/K7ck6OejTsSaSAhWm5BIND9jK5Ol1+wNa6NEmO3op4mYyPWS4zJtKk+frDElM36XY8rq6kyfVRj4PDEarqMlvdi7fFLGZTBnINbqoIxxrg6g19iSh4//oNxwc9PLdDLL235vMp/aMO0V70QZbX2HaH2+mC+Z2oTh4ceJS1RpSE/O1f/jkAhmaioGJ05GVu0uPuZktRRTgd8ex2rJOmMarm4EmvxH2UUZMTS7SQbSWkcYimxJiy7+oqQ7NsVBw0iViQGmD/ZPtnAQscjJz2T/4bsfF//v3nvHu9xgu6FDLLQWthGDmHB2f85V/+BwBefDbBsApuLkTQn5+fs4wv2ad7nj0Speebd9d8+vRzVtsSSyotzRcxZZrweCI69pv3V2i6gqqqaNJx27Yb8jznh1/+nCgWi0GYZtzObh/kGuOoxNYDfvC973N1I4Ipr1LyOMT1OiANbPOm4PX1irXcTKx+hybNcFUXUxGLSFPtePrkgCTdcHMpFg3dSDiYjFiHYrIYxpB9vCbLTFqZRTUN0Nselqk8wLpMqwt6xoW8hHa6I3QzIwgCqloET7LJcD2fk8diYbubztnulvzg2UsqWS0wApjvt0RRREcqq7Vtw/sPb/GkQIHrujx9/JS/+dXX+APxvrtNxdHoCNdquLuSWbdhj7aBSGYP02JHb9jBdrpEshLhBS6b7Zy6Lun5YhGpqpbrqznjA+l235QoqonjeWz3YqNyHAfPU6hqAQ0FqLIVy+0GryM2L9dpUZuGzdTk6JGElcYKZa7iD0xup2ITUPSUtk4ZjsTJJkkKylThcHRAIc2Oj0/7vH1/iWtr7LbiuYK+ynq35fhA9Kfntfz21+8ZH3nY0sDT8zw8z2OxWOBKaeK2bcmK6kHK9+yRz+00otc5wpSxmBchWVvRqC1Vea/C12Cp0NxL3WKiqTpdt8NiIQU0DI2eH5AWKYtQLFqB3zJsHRZLqR42KlH0AfUmpS+JxsU+Y+C5JHVEpUoolBXg2BqrueinolK4ubrEt8c4jhRuCISZZ1nFKKWYI3UJtqk/VI3Sco1uBLSUHIzPAQizW3reY2aLDwBkiYbrmrRazGYpFZSGJvFmh91TmE/FuGuqzelkwvZOPGOiTDEtBdOV6XWAtqYsM1EhauUlRVWp6phxX1wcW93mdjXlpNdBk1WqVRahNDWPTkbMlhI+aBnkpU0tD2lZNedi+oF+f4wh2d1erjGtM7yeTpJIcmyd8OLpF+xllXF6eYnTmZDvU3xHPPvp8SlJFLMOp+hSNCRLNJ4fn9DKzfLV2ytOjk5IipijE9F3tmJyvXyPrujs1mJsbNdBtQwseeAL+gMa0+buak43Ff/3q6+/o3fQ4eiwj9aK2NOVCY8fj/nFL/8cALMfcPc65LNPn2L0xBxNkwqjcvj5z37Er18Jg/dvvrng6adnVIWE8mVzQCWPKlxpbNroGS9PXrBI1ixKcUF3rRHH3Qmzd1L5LM3x/BFnJyP+w69Essrpehh2nxqFJBJxpughQRfCWMTU509fojsGX3/3e2xFbISPD85Zr7eEmy2PJaG80mERx5SVWIc79oCbi5Sf/ouv+Ie/EjBSw2uxBxb5WmZC9y3rOKR2hZANwNHkMUk+ZeAd4khBhJvVikWY4agSdl2VBMGE/qDH1ZWAdQfdCUEQYBiakHwHdFcligoMCZcuti2jicsyvcCRQi35Noe6oWrFGiJiqoBWo6pEbAx7E0zN4OZ2T0fc15nfJuh+i2c6hDt5WfUMNNVkK/tyt1nSP5oQ6DVTCX/5Vz/5CdFqR1TbpJFY8+oKRoM+eSX241KrKLMcz1bIJVTRVn0W+z1WJxRuoUBZqJxORtxdi9gMvBF5uSHPN5xOvgAgzGrm8XvKNmEnEQOH3QFlmHB+8AMAFvmUrImYHJyykntt67iUUYTpBhiBFGVIa/S2ZNQT6JawithHCb5pM5ACCIoqbCQuV3doUlSjY1vk6y0/+uKPAFgtQ7598yu6I5flSoyx65pYdkyeOOStVL0NXLJkTyyTAdtdwve+fEpdK8zmYozPnk64uLhiMO6TSHL8fLVkt2j44Y8Fmkg3V7z63Ss+efpTDoZi/s83IdP5LQeH0jy7UgjjgiQvKCXs2rEMkrymVfe0chyaQmVy2H24oNwtbnDtPoGncHMnqiFlZeJ7fXStfBA3CoIemp0TWFKdhz2rzZbxwRP2Uo1x3BlhKgbbcElfmlznec5uF3J2KhNMjsbt9YZ4X2I40g6mzBj0TomS1YNxs+1Y5ElNIysDnjPhaNIhyVOmUtnRdW3aJqEsGuJUvPNg1GWfR5S12AfWc4XJyKLrW6i1tMlofXbhhuOTCbYvFURvrvCs3kOlrjdwmM/nrNYZphRJ8QIP2294/3aKJiGcR4dj0iinKKSdiN2gGxqKpj9UwTe7PedPjlG1mpUUc4ijiFZp0KQS6HAwQdcb5nerB0rKwdGEusjYzhMcUzzXdHeJN5g8XPjCVY6hxORtg1qLC19/OMC2NN5f3uJJ5U+HgNasKKWCqI5JmqaMRz0SWV7SNIOibLEsDb0VD59mJZbvPBQf8gxef3vBy6fPubkSF+i0LvE6Jaqs1LelQxpW9PonfPNaoMQOH1s8efqUu7uUohLjVxQFtqMjNYUoU9AUneHIp5VyD67uEPgO13dLej1pNp5vyNucTFagdbOGVkczCp6cC6SFbu15++YaKhdc8V1ltMOyLDRXvMvVxZLDXo+63nBzd38e9tiGe4HC6Mt1t2ygcbCk8Fddl3RMlbQouZNCW71gRJmVVHr7kKxe3k5ZfLP9J2GBHwUtPraP7WP72D62j+1j+9g+to/tY/vY/gu0fxawwKbmAc/96vUNbevgdxwaeVO/urjAdz0uLq84eyKwr7tohq+qfP+H4tL49t0HltsU1ehweyfujF73hKhUCTOT3Vbc6Oe7PXm0pKmkL0xj0HOG7DcR/b7E8RZ7To9eMp21cC8e4SgsljNaCfrWFIVVuCPMA9Ybkdm5ur3i+PiY+Ycpli0yD1XZkMQ7FJnZbW+39P0Oqb6lkvKldV3y3ZsrmqbCdSUUy/N49TZhOJbGZllFkiSYJqiqyAhHYY7vhYSxTicQv5elOdG24fz8HIDNfoFpGSTJlnAnzTE7XY6PT7m9EThpzQLTNIniAlN+92KxpW0rHMN+IPUt1is6ne6D/0lV5Lx/+46uM6JMBd/hZDzCNlWi7Y7nT0Q1cjp/y89++qe8fy9+L887BL2A9WZDKatp201DGtfYjsZaYm/TesZgOECX2Z80aUjzCM0S/QpQqxGK12d6e4XeSqy95WIbJprE7DelSVXq5PWeblfw6qJogeWCoZtsV6ISeHDkcXh0xGolZdBdGzcY4No520hkkvvdIaNexdXllE8+F32+Xie4tsFuJ/rp+jrmyfMuSuszkTK20/mcuoH+4OChnlzWIeOR/wBPubndMj4YYWg1l28kEffsEfv9gs1uyaAvzYcznYoQT2a76lplvV7Q9jMUmWHvDjz2mxl9t0uj3UPiCuJ1iyGzk4W6IwlzRqdHKIaUh81r6qYl3Rf0BuLZ26ImDmNUyZPar2I8y+XowKQ3kDKrS5U03WD6BZWUra5qk0rRiWRlJ/APUY2EaKNjSE+JaBWjV1OyveiDp08+Rdd1vnv1NZoizbnXe7BUvE6fwf7e9wUGXZ9sI+WFC4P+0CDZZw9VBs9xuL2JSJOQ02NRwdB0hbLQKWXFfjtfo9UBbaOSICoPUVViNw37TcZOViyVMOPxkwm//LWo2ExOR3iuC039UG3qd3wub9/j6S2mKuaxqnmstjW7VHx3XaokScKjQ/9BYCZOMqqyZjAYsthJjLdec7e+RrGl8eezM+ZXU7xBgCez8G5TopQ1kdHSKhIOtk45fzKilrKys7d3dDodTkyDjRyX558P6B8OsAKVN99Ivyiz4vImxPGlT4oWcPzzI4yOwetXAn7mNz49R+Uv/o//E1wR+/1+lyzJH/iVpyfnhLstWVHy2Q8E9HofL7Esi3iV4PcEzHkxi/DMFeNjgSDYLBoenY/YFQXn0lfr4uodbb3GNEeYrlh34yzD7kxQJTQjjgp2q6mwDzBF5vPdxRt6XY+D8YQkEVUazIhO1yEuRcZ9MV9Su3tuVm94cS6QDm8v/8DFbPWQMf2kf87RqMPddvYg07/Zx6TJnh+f/oxfficqbNs4oVi1nH8lKiZXt++xzC5KC0Ff4vjNls0mxbdt9jvRn2P3FNcZsttLIv7pE25ubvDHxw8QMk1pGAyGvL+e0e+I7TrLY1RNwfIk/NVx8AwH146pWom+cFRqvUE3ffr3nNJBh7cfbpkcSA5Wx6CsKyzdoDOSfffmjnC5JzVUtPreWsIhzzS2Ei5tjRsc32K7zBhOxOeyrGQbh4wHPrGcM6ba42J6i2GIsaqUmlaHIjN4+0FU9BqzJCtbjibnRFMBUe27I9KmJpZWF6YDRwcvmS2nRKnkZQQWaQGDE48wE8/leAFZHktRayjTkoHfIUx3bIp7v6MMRfHxR52HCqJm9PEseHchoJEXFwv8rsZicUG/KzLlZdGw2+zwfe9BiGKxXuD5HRxpKzGZjCjKlCzJMRX5zmFNuBKxV0mvu9U04fz0hOVaQIyfnE94/uwR+3DGQfeJjM8bDAcyWQV4ef4pd7cbrCJntV3IOIhIax1DbbBtsUc+evqUm5srWrmvKk3N8eEhaXjD/FbExtGJTauG7LY1/a6EmrUFetUnltYa+32K43XZRzGtKq07FJ9t3FA0Lkh4ZpFvOBg8IZfS5UUR4TgOrZKRpqLPz05egtpS1nu8juiX9brAdg1qiSgo1JC7dQZKwcGpQISEG1guMxStQNMkRLWxUbkj3EiLhV6HloT1NuGg/6UYh2OH8FVKozSkiUSzlBpGoFPKvXa5KGlVB9uDOBFxYLUqb95c0Ouc4DuiX1aLOWcn5w/+XI5roSo68+UMV0LNPM9it02xbfNBxKM/1AUH/N4vPNzjehaG71BLARTLM/lwtUCtXToHgubQObCJigxXypInSozja/8ve++xa9uSnel90/u5vNn22Hvz5s2b9GSVgGKpIAEUIHb0COpJ0FNJzyCoKUBUAQRVxSSZ5trjzzbL++mtGhFnUQ2yV4CycaK3N5aZK2JEjBFj/OP/cRSLUvY81YVCVCRMLocCGgvsPp6oWxNPogWOh4hePyBO16S5WL/bm6dstgd2p4yppFC/GF0xX92zkJqgh11Bp+syW78lkDqaJDEoBbuNOA+GPQ/TPbLafeDnvxQtOOv9I9/9+B7PNzhJX47SMJ14LGbCzzl2h0HP5XjYgSLW3fZ18qQhSTI6n7THFYW6MUhLMedabaNj4ug1j5K0pCHmcADU3Zlef+T3KBqVNBK/JeyHZKcCTWl5+eIpAA/vZ/ScMZvD5gylT5OSJzd91ltR2a0SjbIJ0LDwHEkw5RpUlk7aVGeR+br6JEL8L4/fi8tVS8t6KRbA7/gML0yOccRmKYLcr795ztuf7rE8hcNBOJ3dNqMsA6paBAf3jwtMJ8R2FE4PUpxPg+TCpq4yXMkWVix2TKZTWksY+OzjPZquc/vsCdu1CP4Px4xet2a7n4MmYXm6TTd8hi6d+mq55/IqZLfJ2EhGmG73ktUi4fHjjOtbcbFIsoJWac5CakpWYlo2puoTy8ZG07CxbRdDtygr2Xjb7uj0WzTJjniM9qiqie+7LBbiOT1niu0VnA41qTzcTLvFamG+FM7Ltl0MpYOqpegSTrg7rFmsllieFD8NNeL4SDu8Yr4W85kWOY5m0Bv0efdWXJxefPGSxWpFLAkYOoFHkufUpYrpiGAyjXJaEwL3ioUU5zNdh/v5A5HE+jtmh48fHlA0RYCvgfVyhmP5dOwhHyRm17BtTMtDl032egrjwSV5dgB5cbJNm/uHBagKA1lKt8wQt+ez3onPKasWBZfB0OXjnfh941GPskqxdJ1L2Qhb1zHlscDTRbA8DMdMx0N++vE7rm/Eet4/3GN7LqanU8vS+npVcPu8iyG1IvyDy9OnfTarw1nzwTBUwq7F8XBAV2XTcFuxWe/RDBGAObbHarkjiXI6srTdkuP5JnnpUkhHsd/vePnlBbGE+9R5gefYHJIjvZFkcVNr9lHM0O2jyYtolLT03S4Ps/diXUyV63GXVGn4/h+ETtk47NG5GYGqnlms5ssFUbpDkaeFG4R0Ow5VrrNZSbHcLCPoh2h0SCVZjGIDqkUghTCrOKVVodPxSEqpEN84bNdrDFXMQb9n8+7je8IwJMkkiUFmY/oGSRmfNd6wTGJNxbkR9vNV54ImVTDYUVXifZZlEwRd4ijFkZCqTqfDu7cf2BwlVtwNicmplJCNZB0zDIvFxxUKUJZi/3VCn/iw5UrqTjWKyqQ/paqqM6QiiWqCzohkXdKxhJP1hzWnJEWV9tp1fL74gy8FvEmK+tZthK61NJVKlYpn932VYxGTJ590ryxavUTRFaJYit4etpxOB1Srw14yar58+YL9IQJF2EqkViTZga4Z8OyZ6Bv8zXd/j+O4fPftezwJP/N7NSYKjuxFc20Hw2p5/PhIIsUkB7c2H9Ml+/KEH8uehCBgs7jDtISTj6MrRoM+rhnxb/4rAev6/od/4h9//Zqvvv4Fd4+CPe/l4ApbUXhyKxja8uKOj6sVvWDEZiP2qK7YXF9d8erNOzLJYvb02XO0tmLUF9/39scZXs+l1/NAOtmyhtV6z7MLlzqWgfAhYn+/xnbFWTKdBszv4M3ffuTf/vLPAbCxubJ9VAkzGToO33/3msl0hBcI+4zKlJ4/5vX8NY3U8dPLLn/0JwNmO2FTHXuM5jqkZc1cCnhOrhqOsYZvdRgPLqRdt6z39/zsKxGgaHlLkac4bUgi+w08RTtDkmjEsytNS1HmBN5A2o/G4VgxvQlY7cRr/dBgeYzIlCNfPBHw6PtZguX2+PJLMec/vPodL6ZfML/7gOoIv9PmXRa7A0/++IJkKZMbyZEn4ZDTSvjeNK7QagsNE0/C7YttRW/icTpkhPK5HMdif1RRJOzqGO+pq5a8qLClRqKqOjRlTdlqjKfSZ5YRmq9TWcKG87RgNpth+wq6JT5Ld7s4vYasOVBW8uJ0EqQzqtQIS48Roevw8589Z/4oYontZs3AdCh3ET+fiHmn0rnfrtEtsX5JtcfILS4mf8BEnqfL1T3rDwd8//Is4pvnJZ3Qhq7Yo6uHA64VMrns8uHxvXjf7sAvfvFLkmSD2xGfdRiVXE5GHCVLZBrn1JVOra5ISxFkO0HN4+LI4SRZ69QfOe5PaEZIVwpFR9GSk5KBrmJL1s9/+s0/0u+OqKTN9LpdemHIh1dvuL2V0avWsl6ndIPe2bZUVPJkh+eK14TBgOHUZ7s/0pP6lE19IG7W9PsDMkko0+12SY4ZhbyUaWYMVNRtjOcIOw8ci7w40dQVkhQPVTGJkxxkfKOqgJVSlCp3P4ggt9fzeP6zG+Jjwet3wj+1usKT2xd4tvA7ZVOy3+0pYp9ZLs6Wi5s/4KtfjDnuT8SJvMg4Fo2aY0jykarQqNsar2tiB8JekzjDsbuYmgvStweuQXqMifciseiYFqWS8sWXz1htBQT/cEz4xS++4NWrV6RSkNxwcpqywg878gdrnKIU0xQxHAjffnvr0igqWSnOvCZzqAvOArfXVxPW2x2apuBYwq6bIqXWazaHhI4zlZ9VUpQZroTpX0xDomTPYDBitxVxQhzHGC7kx4SVvBjs9iZ5AZa88PUHOdlBYzNX8F6K3xyGCvHRZzIVe6/IU3TDZXDRUKqySFErGLrLbrfEMjtyjWvSvMW2xO+t6pKkOpKkOZ4n1r1pMz48LLGCEW5HtkucSnbb/VlnLy8SOv2A4yGnlLpsutkSBB7bfUFXCjUHqslstUbrSlKfqmS32fLv/90fU8h7UIPCq9f3DC6uaSWt6GG/40695+Ja+MKtmtEPe8RlzvqNbGU51Fzf3tA2kBRi/19c9tm+kq1L/8L4/bhctdAfiw28O8TU84qqKs7NgH//n5doaIzcKx7loaVpOvezI4FswNRtCMI+3/72B64vxORePHvJbL2hKUw6kkHE7Fp0LrocUkmb6/RxbJvlbH5mPjPMDovtmjRNQTLOhY7LdDI4s93oBrz68SPv9Ef6Q5F91QyVpIbGts+OMM1zVKMiPoqNiKlySvc8v7jlKPu5hoNbDFPFgLP4aF15DHybg8w6xHlE4Hd58/oeVzKdZeWeJrJxfY9WisfFSUUY9M/Y4qw4sF69Q1U8urKxeLFN0VXtvPHXyQrXDllut6SpbO5WLJKioNkeuJiKbG+WljS1hiYpz7fHiGG/T1ScWEoKeb9jUFc1Za5wks2jbV6xPj4SyorX8TCnLlviQ4Fh/XPzYZ21LB929CQDVX8wYbU5ECWSkqZuKMsYRVPOTdNB2GWdpwy6F+itDDrzI7oT4HgSZ98AtYKKxl6SXCzm9wy7U5I04usvJd1umVBFCoPxpwPR4tWPr+mEQxokcYNWoBomrepgymzhL//EIY1VCsns5AYNHz7cEfg2jWT4apqc0+FIXUOeC9tzPQPTsFDka1aLGbrbpVQ4UxXPFw/so4Qw6J9FmUcDh8NxhyJRvYHvkWUKVVYShuJCdNxusLUemhJiaSJI2qUZbdDw7CtJqX7csZy9J4pMXFldqtUtc6wAACAASURBVJOIU+Ry++wL3t+9F+veFMTAqCeCA0NvMMyY7MHi5lZcLGxP4acfM1pVO4sG102G5SkYkiY4PR2pUo/+IODuo8RzVzFV1VLmks3ndGJ72nBz8wSvEHt79rAnSk/YhYsnKxhe4PHmhzVuRzx3t3XI4gOWWtPKincWNwx6PcbD0Vlk89vf/UianbgYiyqD3uS02o7f/JAwuRCHa0dtqCwfLJe+pLZXKwOtrZlOxfx+//aRTifAVGtiiWlvKpOr6QU/3P0WW2ZflSbH8xpIxBoPByOMxsBUA3ayB0PVSsqyxLBBbltMY0BHNzkcZH+O3qBgs3hYMX8U32fWFYqt0nVNbp6J6svd/AOTyyGlTLYEuo3SGNT7nJ0jGSGHt2TRFq2o8SQrZWiPqXPQEK85pjVxodMQcjGW+9FuuPuwoNO7xjIkquD9nMGgSycQyZXHxQ7bnLKYKfyv/9v/DsB4XHE1fcKPv36HIquDqqcwmLzkV/8oxNZvb58ytQM6+gBbVomyxuDjxw2OZ3NxISrOWZQJBlSZEu6EAY2is5mdCKTAc5mlXF48JU9SWinsa1sNlulRyD7F+x8y+v0Ow18qZDKD+Rf/3V/xuHrHJ+7Kw8OJYGQz/tk1v/u1CNw8J8fWXCq7YvtRBOPj7oiKgvggEin+YMBsMWM8njAaiwB+tXpHN5jQGwe8eftePHswZNjziaWIeL6z8DyLttmdM96GHWIYCi+ePWG3lmyozghN2XL3QVyonz8P2W7u6as+eSGbsp0O/Z5HU+woM+GLjtkMx++hllL8tCwYDvssZh8xPiWUpiZZHuCYNmpP7K3p9TWnasHtSyloHZ8IgiGm1vC4Ec9QJQq6bVJHa65kz3OUnIgPR5yRCACzvKasUiaTMdu58KO1VmHbCqrakCSy180tMHSdMhbna1NXZCWYToDflYQLRU1nGBAne+mhwTdcbEPnKJN/pa2wayNObzdcjMSFcntYQ1Dh6CG/lWv613/1P1AUCYujYAvsj0KCIEQzWv7hH4XoNa2FH1xwOOx4Ki+rs0XFqx+/x9aF7b98cc1um1AVJs+uBMvnD9+/IfBtDLXPTlY/nj254XRao0uimLZouLm54f1dyvogEzymSnc44XQU67naxvQ7DrPHGV9++UcA7DZ7fF9DUzQWj8IWVa0mT2KuLsWc93sBf/d3f8to3CMWW4/Zaovn+qRJTlcmhqoyZzgaoclqepSlPN4vcKwhdz/KBMH1BNfukJ9qnl6Lzy/LFrWbnPtlfvppTdPmKIpKnIrfMh167JYZrtFjsRbP6YUqaqMh84xomsn9Yo6m2mjnS3XDq1evuLkNuX0mzubZbMfHx4L+UPZObzOSY8CL5z1SKetw3CxYLpcMJmNUSTKj6zqG4RLLWOL66pb3H+9oVRPLEM8eH99gGDpVBT15WdWcDmql05EkSapVkWYJ88UdYUdcHv2nQ8oyRTdqOqbw21lW0wu6tBJNEx8TamoMzUSXz3RYbkiONcOrAa4UvvWNPkqV8vhB7PVEiZmMJsTJAUNKsaBZPB4f8QOLppHJf0/B0nrossqoqy2O47NdJgwHwt9/9+PvqJSS3vAGRZUxiN/iuzbxWjxTJ7jEMdbkVQaSzCE+leiWQtXIZEu2R1G66LUFlryca2AaKrQjPF/4lDg5cNxX1I1MhOseu92e0z47FxbGoydUtLRVwUIWNxzdQVN0AkmkksQ7HK/B98d0ZVJttd6TJgUqBqlMSMZqArpyTkgeNmv6vsvhsOP1O2GLjmvw/MmULG3JUzGfY9+l73cp9lJmpbbojgNOi4SLK2nnUYWCSbdrka/E2eV6/5/+7n9h/H5croCs/OQUVLIsw7IsCgmlU5WAY/qB7Q/JOdOZZnuiKMV3RMC3O34gTeY8vbml05H8+LOSy94UN1RJG6mVZHR59WYJ0oFfjafoWHx4eMNw+ImxEFarudStEAfp5cUt28OanYTWbbaPhP4VphHw+CAuTm73QGVq9DsWy1hewvBRKgVVFxcLJSmJaJgvN/zsawEBmM0TtscC129B0mQ/LhagDOj5n1L1DXmZM72cfEpK05ITn2qiZEWZiX9OJgOOUYxly8zE9kjo9nBtj0xmyq4vbqjzlKO8uIGCbfvsN7szBMA3HZIixw0DokhWnByHqijQNVkON31O+wjH8RnLTH1RJBhGi6bWaLqE7qFgWRquzJIvdnPyJCUpShzkoRI4ZEmC5/sMJI31/fIdZR0RugP5fX1BAtGmfOLkrbIcw1QoyhpVQmLiNCI5lMj+RNqywDEtLMfEktmfjm8y6IRoqs27N2LjXd9cYIcRP/0gKnW3Ly4IQhvdTNgfxbxEeYJua0yvQ/Y7qWHleSIIPjvLGtO02exP2JY4bE/7E47l0LY2qSR42EZrbNtEqyW9t9PD6GroocW7mazeNSZhYBIn27MumWcYLObrs9p9HEUUeYujeeSyQXm72WO0KopRY8ikQdhxyJoE5AXeqH3SNGXQ72J0hF13+gFJpvHtP/2AJrODpqfTcQaUstr0+O5Im7T8L//zX1KpfwPA3cOMZ1/YRLGLmovf/OFxS9MG7E4iE2kqAd2gpcxCXFuSuRg1aVKQFp+c+oHBeMJqE7GU9L6TaZ8ystFbh+NJ/K+KTV5cjdBdYT+PdxH4K9KThuuIi03TgKpqtG3LYS8riDq0lk5Vi4uNrWrkacLzl78gzaXwBQ23Nzfs2wxDVlZrFHTLxHBlk62nczhumfS8Mz18xwlp8hMvvvToShrrqrmmaVNiydRldDRmHxfodksjK4q+36e0amy3JY2E8//4ccnz589JZbBjqhDnLU2m0ko6c68/YLPZ82V/ykY2gVvBDaWu0r+S0JpjxXK9oB8qLI+S4ngwQlNsRuEAVyYIDseYplbPDI03TyfcPczo+DoX/U+X8RU3T7+gqg1UqXn0B9/cUFYzdMkC+PxqRB1VfPHLPo8bAbMKwz+liLcU5QlPYj/2GQyalsVO2MbF5ZjN/YLX2QOehEZv1jvGY5fHxx6xhDQ2Vc5Ff4LVF2fJcrEnOUWEroUqKeqf3d7y6u13/Ps/+ktOrZir9T8tSe05sYRL/cHNH1PaMYPba04/ykA/cnn96gO/fPmNmJPohNv3ebN4w2AknilsUg5lQb5a8uRazEvRnnh42DK9EEm2WG0wUoO0Sslk0sBSQxxX4/3DT+iGpBN2HAzNJy4EzGuxXnN9ectut8I2xDnleCp+6HM47JmvxBxfXg9RTY2uhBz6roUy9tDVLqXM3ldoXAxfUCQKmSRXMnSL4cAlkpe0/Tbnd69/JMoq0kicXf2fjWku4bQr+fJnwj89LrcYpokl4X1O5dPpBmT5iTyWLLjdkDxX+ObrX9LIhv39omTQf3omYJhOR1RFRotxZlrzOxXrTcnV4ClVJBgoURuyWOdTYlPXLLTaQFVsChmU5UaEjsrD/YwXz8VFZmgMmJ827GrZUJ+d6AxtFFvnQZL4DAcX7PI90fqR3rW4LP79j3+HretM+k8BeQGzFBarBHmU4Fg+3VEfpc3ZS4bU/apidNnlyhGJmrcff4cTBMzXCXVeybXp8ObVI91BwCKSSI7WpTewiSQcPXS6bPc7msqi1cTv268jhlcTTEcS4xQxrutS1wseH6U8DBp6qaBg4usilGutFFP752rn4XCi2+2iaganVCROJpMbRpMQpT2gtsIWN4ucfm/AQbIM53mNYYU0RUrHE2e1puTsDkvGgy7f/ySZjo0eN7dDtlLnMos0bCfAMAwGV5Ky3nSoqg2qWvLzn4k5/vGnOZ4f8lSSav3jP3yHobtUdU4otjbl0cKze1R5TSMlMC6nLlULulbLZ1L54qsOah0wlCRFabpl0Lsm2m3xJJpF0zSKoqZtJAFSlOJYNuvVnquxsIOrySXb3Yqu1zn72tXykafXL86x02adEHQuGY5tNkexDioGm7VF4F6RZOJ13e6U5eqBblf8GMtrycsU27QxJMtolNV0exec9ifUSKxXeONSFPk5Pp12LzmeEtTa5JhIQqm0QrUaQjdgJeVn0irnl08uKArJBLxbM56OUBqTnUyAvHhyySlLUE2Lg2RIXq10dDfBk5XktNTwvD6dyZ6TLCSEbh8rKLi7EzZtuT1MQ+e4K9DlHr0cTBl0e9w9ztgsJHunX1GkBZZENbQUaIrPeBzSykqZqoKi2qCp53OiOtQEvs39w3sAdLXD7C5hPNHZ78Rnb+YVdQtPngx5kGROp0KlbAzK4lO8qhL4XTTdwpD28ub1gSdPJ9i2g90Rz/Vnf/xzvvvhO2YSafXFL/6UU5ox26wYyLYHfzDECQKibI0mE7WbpUz6/yvjM6HF5/F5fB6fx+fxeXwen8fn8Xl8Hp/Hf4Hxe1G5UtSWSDZ7Bn6Pujqxiw9IyQ6yPCOLXW5urjlKMbD5csZg6LOT+iOK4pLuC3o9g50kmAg6XRSz5nG5xZU6KVFcoVjWmQ77tF5Txy4dd0AQiIxCWiW0FNhWeCYp4LLl8fEHVEuqSPcusG2bsqzojET2zrL6pPGCNI25lHoVZROCkpFIStfR7ZQ426HoDh9nUgG7tLBdneS0JznJ2n2pk5YHWtmA2Rv0sG2XKDpQpCIrbtkGmhHh2j5qV+pq5FvKNKCS1ZFOp0OZGNyt7s46V2lUs5otMLqi1K05LtEupyzAkBkMWhNVLVHalkY+Q51rVHGG6QqzcW0XRbewPRVDUo5rWoiqx1SliiN1rcpWo6EgkRn+9FRi2Q7TXodUZitOcUGW5gwvujzO3os1ns8ZTjtksjlfUxI0raUBSgm7LFOR0cuzglYV2YlO0MetXdRSvG97WmM5HoFtUmoSUnnKmRdLhv3Buan3FD0wHj/FC8X7phcd5rOGKDkSJ5IeNuiy3RWUdUIrG4B6XQ3f96hkFlA1E+bLLbZhYXmSXKHS6bguUaLzIPtjhpddHMPGlfoVDScOeUrS5OiSpts3A9I4YzKZUNay6fSQczV6QiV7ksbjMbP5kSLOUUoxn6ZpMnRCNtstBynUXLYZL794SnoSe+3hcYNtj+g4Gkn0qV+lIolPDLohusSGn5IdjtriSjhRwYb/8X/6D5TFr/jt34tKkqINccYRReESSfKYtvLwjBGpVF53bPDsgI/zt3T6oho5sHs8pGvGEjqkaAXH/QZDGzDsi0rA4uFAr1+htAaRPLK6PY9TdcfIloLM9QNFUhE4fSIJC3LdgPrQ0gk8KkkQcDHtM58VuB1JVV77OOGQH+/n1JbI5vWDW3TFJJotqSWUtW5yer0LXn8UvYxPvrrALExe/eoNnsyYDoKGY9zQ8Z9jSLhdvkvYzLf4EvZRNyaqEtHp+xiKyNoejxFaaLPfVSQHCaVVcxw35GosKvVvlt8zGV8z6ekcMmEH3U6PIknxFYtXUnSye91FNUy2sqes152geVcsHx9QJK17dmzRa4Oe0/D+/XvEZNkEvfEZbrs5FOSlzjE/UcsG5cO+QFcsLL3lyy/F+fYf/+7X9MIeeGKNN9sag4RWK5gthJ11nSVe6NBXxxwiMcdO1+XbH35NK8vwrz88QKuS5KezDtTFRcjxGJEmW1yp+1Y3KpFSsHwU67CcH7i8eIKpG+SScOHJ86/5/jcfaKuaj+/FGZupLa4zxlXEb6nSCL3qcXqvEMv5/NtffSTPQ9YLMSdFu2Zxp6L7Lf2JOJePh4aHfcR1YJ6JGnarDK9ncCw+aSIVtLpOU4PnSarrYZ/1ek5aaNxeCxpyU8vIjhVJIn7vYHRJkq8JfWgaKXSlNahGSZG3hJ2J/CyDqpwS2FIcNF7guCHbzQlHnsNYPmVxIo8SKklMYbsdyrglykW2+2Iy4JhB2B1gV8KPPtw9QtpwMWlIUmEvvcGI3WFPV2Z6f3r1hqZMCDsTyvITTG9BN3jJenfkE6PEPn1AVbu0UvOpru+4GDyhapMzdK/jjmG5QG2zMxlAXCjkTY0i5zdoXXqdkDhaUUtB+bDvUJwifv7FSyopOv3D41umF0MuJBnBel9iKirr2eycbb6YXHAz9Pj2VFDJvpq74yuGXh8vEXu0TCu6wxGRf48rxXiH3Qmb04yqaGgb+VmXffyxTi+U2e3ThKqpOWZbVEv2eCs+WaxitxWGPAPcJiBOK/xA/J0kEVrTpchTVOkvNM1gvz8ykRTnm9mCzSZGU8Kz/liWPXJaqlxfTRhfyX6u5Ld0OxNmj1Juo7EwDIOiLen25Tk1DFgs5liKi65JsgGvoWlLLq8lXNpNmT9GTC57XN2Kvf5h+SNN69LUAY4r9oxvWxS5xmwm/MCLLyesVnM0xUKXYtXHKKM7uCbLjxwjYXu6rmJaybmCkp8UrHGApTholTybOx4qGfEhZXopnmt7vMeoTBykRqD6SJqcMPWatpW9y36XokgIfJNP+FrLtpgt5tjmP5/nvucwe7xHkdBhpYbQHuK5IYuVkIy4uh2yP64IAlHJMq0jpiXQEKasQJd1zX6/wTC1s4zL2/cbXN9jLyVy4iyl0wuZreb0pQbqaHKNoln4ak0pbdjWDbK65udfi3P/44c3KI2BrltcXYkz8M3rj3TDkNX9DFXuEStwaIqGTMolxOmc5SrB1G2CQMynYSq0mskpPtGVgrklGnbYO8c36/WBNGoxnYZWxl37ake5S8iljmBoTiizGl1R6UuEhmep3H14Q5aBrkgET2lgmiaOjBeXj0eev7ilriJKiTK6u3sgSzUMo2ItY/k//8Vf0NQJgSPOuzTb06KR59YZQbBzHsjShv0m4tK9kHMcM3r+hNdvfpDr0mI7GmkZMZWtLY4ZoZsmNzc3vP7hOwDu36xxdZ9ACrBvNyfyZkYwaDElRHz9cIJ9yXDs4jji7MpkdfpfG78Xl6u6gko6pvuHt9hWSLfnM38UAYNn9+iFDnFcc3cnjP5i2kfHRJXlPsNSSZKGIs0oZDCQaCeMUZdDbeJI4bZytySpG4xWNv63Bk5HQzcUUhksH+KWMBiQZiesTxok1YEordAle2AQ2ChKS1kmOJ/EVt0u+2NMkrRsP5X8OxWq2iFtRbCzOp5QapUvvrgGRZSMP36YsV1vyNIaWuHALi/6FIUgwwAok5r5ZottdhgNpZDqcY2ueEDO8SS1L0yHOJ3jWrK2jsFye8d0NBYCdcBxdeL6yRTTFnOy2bRodonhXaCYsmk5jbE6Ng+7NRdTCXfZn3j+1VdUUnSzbjJaHFpV4RR/CubGVJVOqSaUMnBqm5YgUDll4vsm189Jy4jx0OHxQRy2re5SaxnHvKCVDGn9QYCp2PSl2OrmdCDP97i2RbqX5ejxJXXxiGHYFLlwOo4BWpuAhE9+/fUF0U4l8HwGstyv6wZF2pImES9fSkYvfcr7+ztGshRM47HYfgdaQSghI+8ff4PvDtDyiumNcDr7ZENZqnR7YjvNZ3N6bpdO6HLYCliSUuns4hJFT7meiENyu1vQvfbOIq27I6hNhaMVFJks01cHHM9gd4hICjEvtt5BdWsMOZ++EVBEe/7wT7/i2x9E429R5iihS3S8pzMWh8Zp3xI4PdaPwjEals70aoxvWdxJwe6UiqIqcF2L4044vq++9qgTnwfJ4vjX//2YD2/+D759D54m7PXlN0P2kc3f/197rl6I/ff0+RM2h3sqKQ44uPqGh7uPmA6YksxhfTqCatPtyQAsayizAMySw0nYWZImeBOX6n3KH8ggonZtPsxDStl3Enoq2xiuri+ZVxI206j87OsBb1+tCXzxvjg6oegamgxo1X6HMj8Q+Ba1vHirmUZBja6HZBJiEPoBdZUylEyL6buc+WGFZbsgNWYeH1MGI53H+St6A7FnhuMRi1VxtmGt0Ulbl+M6Adl47wQ6RXPiejrlJ6lrN+35FNmO8TPxvnFzQXoq6V4aFCcJt1Ui3JHHKt7gDkK57inNtkFyx6BFJ+bLGYqX4RriNVkS4QYuqWnjXYp5CZ0hp+OWwa34fVmckKUxKh1CSQbiew2apjIaX7CRgXdNys2XL5l/kMyj9ZZSc1hva2zprOa7lIla0iQaqj2Qtr9lvY6ZXMl+h8UWIwi4fzvD/0LsvzzWKKOEn39xxVb2rGZVTHK0qKTg49PrK3zb5eHuDU9uJFHD99/yl//11/z00+tzA3u/35DmGbmEmmdqy9XU5X7+iC0JGLRThB/UrJbytyk2X/7smqw68eFB+KKwDOkHIa1Wochm/LRIKbYarWRaxSookgOBPYZWOOfyqHJaVQRXY/JKnHlfXX7Drx9/e4YApeWKrGq5veixl/Mb2LcYpUpRPWCHkjhlu0LVPI61TNhNQvbHmG22ww/F98X1Fku1SfOG0JON2ts5o4HB5igC6v7NBdFhR05x1nhsywov8FmcHslk/63njAmCAcn+E/lBTemY3J/e8eJa9Fc1mUfWRJyOBf/uzwRMzy8cmmNDLPsG1+WWn373Fu9CJZDspBaXdEYJb+fvzr1T4UBlOSuwzmvXZb8+0WjQep9UoFXUUiPZqngj2dCeaQy6YxYyeeVqHpbaYmoOn5xBUabM9Iqs1jGkrx17VxR5TCwvGqmjYSc7tNagllDoSX/E/P6BU5ExnIo5VhWdrIj4zTvx+2zNxjNzCl3Fk+K/SpxyIoOyRy7nM+ioqI1KvysTBkVEgYE98DlKPR4jCDimaz7Mxffn24Q//uYP2d9/y2YjYOvTmytCP0XpFywUQYhA1eeH335gI5PQ3as+Zs9hGAzJUnHGb9cRlhYQuAa5ZF98fvszHMvm+9ciMO34Xf76r/4btrs9+72EYs1quuEIwzA4JtLXajXLaEEioW6HSKXrj1mut9S6eF/gNKTZBk1RWT/k0oYMNrOEdiTeN7q0KO2WJlVA9t6NrkzSMqVtC2bLV9IWLlH0kp20qSc3tywfYjynYrcTsGfTHKCpUOQqyEuL1pYYbYtuiu/f748UecXV5JqTbOGYjq9QG4v5/COeZE19+2qLGwacavEaWxnQFAq2lXOU/dsYLX5gYigq6V5eVh2LtjygSEh+3TbM11sux1fUuViHrFUwtITkkKEjnvO4rbC0BjTxmihpcAObTqiylyyjg1GfLNthmNo5iedaJu9mH/Ek+UjvYkxZZCzmS64uRBJvt6lQ3BQ9tFFP4mJQZi3jS53FvdxDlsG047GOI3JJRGNoNkqhMpDJgUap0A2dwOzQSqKhVRrjWi5JktCV+xEsVocDSiGS7N98+XMMN2GzVLiaimTqm7c/4YU6pzRiJJ+9UFe8+T45J0T9nkJruLhTl63sk9rHJZamczG5JpWQzflqQ1rnKKo47+xBxXobM2kglW0zpjvk4rbl/esVjieeYX1cEXQDel3x/Q+PH/F6Bk5jER1lccWwubkIqKoYxxD7X5Eabf/a+L24XEELisRgYmDoDaZp0+uIg7uuUk6nmO7QPGNmFU2nbjVyWVVptArH66JpCpNr4YQcz2OzPFIne6JSZpJboVj9iba71FsqrWS729HtiECmrFrKJubm9pL7D2ITf3xY4jg+iWwYXi4OvHzyjNMhwZNBxP60xzYrbr8ZUcdic757s0C3Ul58IShd7x8+cnVpst0/ML8XBj4ad9AUH3NS40na3PWiIk4jauVT43YfL/RYr1fMluLQMgwdOywpi4ZG9mYcDg26ZmBKVqXjIebpk6847Y8Umdiwk4tL3nz8ji++FNmg+/sZ40uVyhTBLYgme1MzMVqNXIpJhoHPbrvEko3jWZ6yWj4yvb5GayVTn2VRNQWa0iXJRCaiqiMW8xZPGr3jKTzM5uyWNa4Uctys7ri8uURXIIvEfE5HFxwPG3ZbiVW3XE6nkn7Qo5KZwdliQa1qeGGNoojnPGxdLFNnOJRkEq1Fp+eRFitS2edS5hVh6BEOfApJgLDezalQUHWxEX98+5q6bqjrGl0X83s9vabXG1FXCejiuW57Vzi2x//9H0X/0ZdffI3jWayXGRt5AGsm1G2JrcDNWM6DmXI1HrLaiEDq8e6eJ1/0WC+9Mz2s3ZvihCp612P/VmThp5cQRQmZDBQfVt/SHVokRUwwEmszGP2c+9++wnEc8kKyn11OeXO3wJXZJlUzCDwF2ojLJ+LSWSlCfV4pYq6GYh5GoclR2fFn34jvW72zGYY+z7+y2bwT8/nT6wOHqGZ8O8BxJXNcNsMLG1RVUrpWFtPxCMdRePWDCJZ70w79qYKMtXh8WFC2NcdjTihV3EdTg/WuQAltjpItyFYyTrsHjLH8bN0mUF0U1cWUJe8sy0jyBCfQqaT0gmGq1Gl+poffP74l7Di4psZRqtbbHRe1aWmOa1LZr9LWRxS1PItXF3mLZVn4XoApG7BLtaUuHaajrwj6wqY+fHjD9dULpLYjVVVguxZxnlNKwpckU9Ask8fZguunwhHaqkoVp2yXItgKHJN+oLMrS0wp0mrnDW2tUpQbuoGYQN0N+e7VHVeX4uxMkz29gQuGRZzKytzohjxrUDA5HkWVv8wTrq4m7GRDPYXLzZNLwjA8s4PWuUKaZST3HwTTJxAMPMpGO7O4KaVCXWVYhsd4KsgcTtslUezz+HhHpYv5M1qPjq+jykuuqTlsZ0v+8A+/ZPZB2EagD/ny5XP+4dc/YEnikvnjPY7XoxOKeTIMi0O0Ight+iNh12/fbdivuxSJybEVAcmL51+z+fCe58+fArBZbfnd774n7PYp0k9C0T3QLU6l3LO2wv6YcIi2DIfCNxixhTH0SPb3pKUkCJmEZMfqTIe/T7eEnk28zEg1YQeBuuWXP/+SV7tHahlsLB8WbNMtVSEDm45BntSkWUmSi7XK8j4YU8AGQzKfNVMCp6KRqIp4kxGnCZapC5pjoIpalNsjnq6B7L/R8grf7FHp4rzRixatrThtU/pBX9pBTKqe0AwLC0l2QEOtVOiyP7bb81jsDmiqQpLIit62xOl5mL0dS2lnXz/7mt/+9lsU2lCFUQAAIABJREFU2TfkxyE3L5/xN9//Dd/8XFyE19sPaEaFrjW08oKn6Taa3qBpwqYqNaEJlzT1P1c1TrsVw8GAJN2zmklq++kVx2LDMRfJFdO0WW4bFuuCly9FAnS2eYdWO9iuRSEZ72JNQS9rHNkf4ykO6/UaI7TJDp/8ocI2PjEYDbElQsE2dFaJwvZe2OvzmwuqQkGvNC4HMmnQHDEVi6JKCUzxDPmh5fKyTy4vEU1dkmhb0qrilJXSXl2MWKWSl0Ij6PP4uCOcBNhSKN3QYPT0ll20482rN+IZnr3AHgQ8GctkUpFhOzrr9Zq99DPXV1cYSkmWRBwlqY5haxzjJYej+LsuLV7f/Ybvvv2JL74SkgqqWZJkLQVrTqWwPY8rVqvkLJFTJhvqVsG1dAxDIl5akziOodV48uyTFMMDfesJw7Ekyyr3rE4R4UQjlygK0/Y5pQlVbZ2lO9qsQbcsLHn5qJKIptyQrEc8kVW3x/lHFEwcP6CUCXPXs0iyDq1kks2TmH7vCstUaD6J3mYJXd+nKmrqStieY+nURYxhiL+jeMN0eMlht6YjWerKtqGmRW11Vkt5floOoeeTy6LBuGuxXO857hL64SeiiCNdrc/N1QUfP3wLwGb3kabQWK/FWTYaXqMYLVV7OiePPT/DJMeuLymdTyRec1AHFPIS0Q0NmmZL1/f56ZVkX+z3cVsbrVbObLJ+1+T0EKEfxLzs3Ii43mOrLUUi/FPdKNiWSSaFnE/pHpqC6+EEVElecaiofBPTd6mks1PrFkv3sKW/6roms9UcXVdZrkUsYzsah0ND2LMoJdvrauYw6HskB/GMnhuSZBnJasdGMoE7VoCiaWyimMd34n/DmyckpxivK2U7yogsV/j2dwlFI/zcX/ybP+T7X6/ZH9bYljjfxhcmSbo7V/jHE4uqVNBqC9+QXAmmQpxt0SydSpPESdonPoR/eXzuufo8Po/P4/P4PD6Pz+Pz+Dw+j8/j8/gvMJRWimn+/zmCrt1+9WcCRmOaOmkUo2sOhipuloraUtZ72rallNll02kIwz5ZJjJnTZtyOlZ0OgahxBZnx4okrYibDTQiy+B3fLK0hUJSs/tDOl2V0DeJE5GhXS23WKaDoau8/0lkSG6e9XiYPeJ7IusYeD1MXWO3vefqmcB8LrZLtNrk+mLAQbITnU4xg9EERWbhLNMmz2JmiyWS4Ic81fEDE8vWqeRluG4TDC3Ak31gddXQtAZRvD8/w48/vcEPVRRszE8lXNXBMnVmkmklDLvoagcVledPBYztfv6BptbIZOa1rQO6I52yLPlkD4qqs9tuqQqDvoQclXVKvz9iORdZgCSJGI56aJqC64jX3H34QKfXR1F16lbijaMTw841yOpa2hRsDwmj/gWxhEF5gY+umyznMyqZzW6UEsdVKRsJ/dQdNAMqtWArWWMG7hjTzSgKE0fqK8wX77i5vj6veVbtCDyfslApZQUj6GjUlUq8V7iQ+ip5diKpM0JZ/i4LIWpcVQ3DkYRwfvyIomj0hh67rciY3kxeUtUbQl+UlT13yg/v/xObVYsmM9du6BElJ8q8oO+LcrSuF0RRQSUFGVtV43B6JDna9EZiHUaDp+Tlnu0hY3IpmQePO46HlFJSuiulxfXFmM1mR38iS9YOZFsFy+nwsBRwCU11sK0G2/gkTL2lpaTOFOyueM7tKScwNJ70e/zR1yKr+PCwwgo0gq74vnhxoq4d9nrOu7dirvL4QKYpJOmBvsSna4pHmVeUlWS8s1Se3N6wWW/JskquMUTxnlJmjZ4+f8Z8ueZ4jLEl9fx4MMSxXL5fvKaQQilfjsZMe1PePoo1cH2PQA/Iyh11LaGtScPFxYT1JoJWfFbV1PT7fQo5d5vDnjZuGHZG5LKqopsmlAbf/u4NA1nNfvJsSF5HvH33Vq6dh+t0uLwYsdkISOxkOqLMdWwrYLb8HgDbU0milkzC2FzHoa5zWkwqqYliBQaqUWHpAb2h7NVMTzzs18yPws5fDKfo2pFVk3E9ElWb/bsZ+wQuRg0dT1TGX3+Yo3oKQSjWZTffMOhP2SUz2lYcLt1wSnTMaesURepv9fohdd2SSwbR8WDK6RRj6w6KzHImRUxexHR7IxJZ0dN1laxMcWU2b3uMuLq9RkPh+9c/AdAPA2wC0vaRZCeeq2p31IWKrblyngwMUyM7tWw34nz5t3/xZ9SlzmK1Q5Esn5rRsNudqLRPQo4lge1BbaHJatpht4KypeN7tF3x+dvtlrpquZE04fcf7nFth+nlBTsJ/UiiNaOrL/AkfPrj/D2t0mA6onoN0GFCpifkp/i8bw1To0wSVF38tlat6Ngdom3GphLz9M2XY5pc4fG0xJNw8J7pcuB01rQq24QsL/FtA1fCl6vCwDdU7I7JQopjjlwVJSnYyfflioOi6NiWzsNM7PWr6yFxlTIMBuiyP3W5WqObLqqVyvVMsfSASf/i3LvckOKHPXaHHaXs3xwMb7AUlwspl/Kff/2fKIyKIAjIpX0OBl32SUrXUZh2JXQ2hkhRMFTxmmLTYAxC9qclSiay8GHXYLnakRcRnoT8GVZI4HeYSXpxd9RBV2NUQtYbUSUKPRNPGXB/N8PpyUqOknCIIxSZTff9MbvtGj9wmMo9g5pT5irH5MhQSpHMVxHpMeLiuag2eZZDUiQci4qeLnxtx9eYrbckacwkEFUhVSkoWgNdk70ork0cVfT7XSnjAtv1jusnXX71D/+A6z0FoOt16PY87u8FlM8PE6KqJKtskNDv8eUlRWaQJGIO4rTBN3yGU5XtUpx5l+PnrOKEMqrYbcXZNX4WkuTtmTGNysD0HCwM8kI8k22LfpkqM0BSck8nHd7+tESi+3B8h6Te4Opjbi/Envn22/8H2+lSGzWBXKvZfYSiKDSNOFtCq8PNuMvhMEOXlYEgHLHe7qnqlFDC7Vy7g28HZya5/WHFenvE77Rnodhub8zV1RWr9T3LlZirQfeCqrVpVVGhVRuD4z6i2+ljSwHdtDhguz2oT9B+0jw6UNfB2RcdT1uG/SsCz8EURxd1pZMkCWEYkkvIvWLktGRnxFSLS12m7NcVT54KP+76Dnfz1wR+n1BWgH9885pu6J4px1VPxXICVg8HJrJvaLVd0XH6XD91uf8oYZ2NRSfUmUuYfh57/If/9hnf/vA9ryU0unMxIL1PeToYsJBMeWXbotUt/Svhr3bzDUpZo6k2hi/htRcW23lFW5gUsrrcGA43/YAqEgv//eINmdry53/y7MyCnaU6hllyOgqbUk2HrtunzhpcqZU6Wx5wXBdNbzAcyab3+IjX75+h2WaWopgWnW7Aeid8g9L6dLoDltsZa7Hs9PsOoeNjSn4D19jQnI6Mezm2pId3fQ/bL9msDe4XUrJB1/nwbs7T58JeD7Jn7Mn1LX4g7hdJcs+P38+p2uosbTOeesznBwxJL51mWypUVM0illVU1zNQlBKl1kGxpG2U/Pb//PCrtm3/jH9h/F7AApumYS11L168fMo+PbKNVrx4KSZyu91h2S1xHGPKwPDq6pYoTcilbonjdHCchP1xxT6SAox6iONYmEbvTKU9W8wxdYOOFMuL8hXFWuenH/eMhqIE6LkBSuMQnzbcXEs1e7dLWy3xbPFMupKRnWImoymJ5Mcv44YwsEh2DfFJ/E9VWrbLE09figB+sXoPlUavO6ZBOJ2b2z7zhy1FrpwDhNurZ7x/d8dR9o9ZtoJhdIiOJaYUQH727AWz2QOGoZwvFq3UBru5FpCcx8dHHEfhYjpldxKbpdvvsNtGjDrC4eR5yX6zZDi+JC2EhbeVj2u5NLrCo2xWHU0HbHc7KhloeIGPZTqoWsVaCv/6YYhlORyPO0ypYfX85itmjxsUSb/ZHXTo9CfURUu/LxsG5zuaCnzHOEM9FcNlMOpzdycw5lmeoRg1uqcQygufa9WcogOWPiHLpYMZd4Ga/eET5MEiLSKi/5e9N1mSLMnO9L47z9dmc/MxPCIyMrMqK6sKQAPoFrDZLSQ3xIIbvhcfgSu+ANct7EVLk91oNIBCVuUUc/hobrNdu/PIhap7bYhdL0CRUJFcZIiZ+b2qR48ePec//3/IUSTpRTLXaGodVW3ZHKTy+OiUfJ9jS9jj4bDD8QKqqn4SZT7EB3zfJyvhN9/+SwDSaE/b9khiMff/z//97/j6V6/4tPv0NAdJdCAIXXTT4W4lApn+kUGlKbSyd8L0WlwlwPbhhbywX99+j2n79IdDVhuxR9KsI49ynkstE1szuLtfEY5tLENqly1u0f0+eVNgqMIZDAKTMOxzkL1MlWlRUTH1L9nFj5hyCAOdTmn5j//pv4p1n57w+npFKw/ihw8qv/mLkGrY8Oq3Yq6We4OHn3Ls8BRf0pUfooi6VnnxXPRgbPdzOsXk6maJLR2+Y4eYekApdZmyfUVdJDiWSlOJ+dSNHqv9nH7PZruVjXSmz3y1IpKCntPhgLYp2G8UbKmzEfYtmq7F8VoS2cNyfPqcu7v7p8PTdDraHGytI06l9IPVkacpX748pReKg3Cx+kTbmgyk4KSi2RxNLijyJZ4jna2icEjm5EWM4zz6riWHOEVTxW8buo7r9dhHByZHwob32YJh/5QiNvl0JYLjNLklvBii7cVzjnzRMzFxhyxvxWUg3nUEU48sr5kMZLO81VGWNcsb8b5V2fEhuUNxSjypwfL69XvGwwnnpyekiaQ4bxSSOGYvkx10DzRNQ0rKsQxMN9slXmBQFgl1IXsEoohWi/BDAQtq6oSuhg6LgdS+sXWfdLvAdUckrXj20DnFDApMW+z/LE/Y7HeUG5tvv70EIOgpvP7hAcNpgMcgSSM/JFihtOn+EePhgM0mYi8hjoPxgDY3oYmwZAO7qoJm6Wwknfmg12c07lFVKZ28AAXeDK1Vub8VgU7XdXh9j/nDLWOZsGuVlPiwI1pETwQTSVERei69odh77z/cMnYDZhch2k7qq3g+d+sVddRSmLInWIcirugPRGAaZSbT8Tnr1d3ThdbzFAJH54fXn2hNKaT8q69YRBErCdf2JuCYDUqZUue6tLs7Rscv2KUH2kj8/tFkzOT4iLu5CM4P0S0P+x0jv//U6F/XpiAcMl0qCf06RLdYwTPW0sfH0YHzL064vnngZCx80OGwx7RDBnrA3Z2woW9efMt89QFFlyLtZkyyzxmOAuxG7KNOTWhKBdfqU8ng37VtDE2jL+FT63iDVnaYeoehCTtQOpXp0IVqSutL+YK3G8pGZzQRgVQU53z5zRcobcaDTMKMxj3KqKIuanJLBNq/eHbGf/7xB76WfuphuWax3tDzAoaSBOqwX9FVFXVZEcrgODpsKfY5PQnBq1oouzWdklJIiJPX84jKDLc3e9JKcoyKm9uPPJdB4HrzEaVw0NQOVbZHbHb3qGqHJwlKsqoha5bU2pB+KOzu+uMStd/huj6+DOoX0Zy8Uhn0xXrullscwyOJE3oT4U+zLCEcnZArB6pS7ON3P2/JioTjY3l+JEsMt09atuTSzr66eMmbq4/47oxEUrb3fQ+/bz8lVylNUFNUW2c+l7DZ0Sl5emBy2uf6o7Ch6cDksNujyaSJZQbMxiMUU2V2JtZ4v07Yb2KUTkOVEFUr7FFv71Hknt1tKvpDB0NvqRrp86oOJd/QNjplvpFrtaMfmvT6jzpUOVm6Q+80NBkLTmdTlqtbsiLFkRpG0WGL59pkEq5ZVCWTqUnPOwaZEKzLivFgzGYXcXIsoIl/8ut/wU8//T22DNjjpERXKgahgWuL752fzlBJWN6njEdiTQ9JxCFW6I+EX2x7Ncv1nqPpKcZY2N3NYk4vHKP1OwJTPHuZWrhKTS3j4SAI8G0DlIrVRpJHvE2wbQ8vzHn/RvilF89/w0OUY6hir83GIzq9oSpVOtl20PNciqzkeCrmLskKiiyjzltMeXE6OXdJE4M0PeC6Yq3Ozk5Z7SJ2W7Euf/HNC24XG5IsxlBFPGwHHYcsIokVnj0Tc1c3a5L0gCUlALKdzVlPY3tzQ9WKPTs+gfKhxDYcDF3EebtDzZevRmiyxyCvG4LBiEatuL6TxFtVS1W2HD8LSBOxR1//vGB8ZDwVbrpOYRdl2F6DKm/eXddRlQ111aBLvbHw/xc6V233lAW4/vhAURQcTYZPDaCuD9t1waDXQ5Mve3//IARvZeDY1Ca2q6I1Bl0je21UhaLLUCsdR/ZFtG2N4+rkUotD02v225TZZIamiSA32qW0VcF4FLJ8EJcN3ax4fn5GJRlCbF3D7w/YbZMn0olfffUFP//hNf0e6Mpj0FBT1jkf3j9iTA1GQ5e7+ZJcBnM9t6PKWlyvQ+nEM7x/c0N/EPD6nTjo0zTm5ESjKpUnfPXFxSm6qpHnO85PhLFeX1/jeQ59KWzcH/SwDJ27u+unzI7nhYSBjSYbsufrB0aTkDovOWwfs1sdSq7j+ybhiQj0bTcgK1JeXYhD4dOnK/aLLZqtoGvCKQdBQFpEdDRPQX3T1vT6Dk4gHMj+sCMIbKpqy2otHLCleTRlg2MoGD1JLKA73M5vObmQTHJtw8ebD/juEFUGZckuoaoCVK1kOhWb+up9hmGWT71TaQZdo2GaOnkmf1uzKKodumqwl02oSfKGfm/KUuod6JpJXXbUdcOwL1XHcUmShDK22UXCQT3cL6hyD8cXjvT85YD0kPPy/ISyE5ds2o7tfocduIQD4TTyfEmR81TRq3cBVR1zeTnl+r2Yl/kq5cXzC27u1tTyIPQNh/NLl9VjoDgOUHoW9njI9b2wF6VzqLZrev7oSbSwayuy9IBhSmbJCnGpcTP8UNidFovDovIU1oWs/G1ydlHL3Y14zj//9oJmnNNubBaJeKbZdIRzUpIVFkvJjDmdzNjvclKpq1U3CvfLW3RXp5NZI2/gkMcFz/vCptI4pk5yDNNFl1WNojxwl+ywmxxNdeW8HDjs1hxL4oiu6yjrnBevRlSNJMtYbjD1AXmeMr8TgeJ2+w7DENVIANe3CPo9ZtMJi7fiM9Fuw9n0C8o0pZH6e6bho6rqk0bRYrHAsU64vYp4fikEbvf7HdFhSxggyGmAPNUwNf+pQbqtdBRFYXw0ZfMgkh3rLYx7PXa7ew6yidcye3jqkJfH4uDo6gfKeICah9x/+DvxGcujy1rsfoDxqKNXg22qOENxELaNwmZfktUNO0k6MxwO0dF5+9MVfiC/p1YoekcoA0DN8Oj3DaL9jofFnbQpFbW1ubu94eL8Uv6+St7qHD1W0+ItD/Nrnp38gqkMOlfLOeHQp8tMzFA2Fneit88JxN+7+35OOJ3xV//jn/H77/4TAFcfInp9H13XieKltAWT0cymKsX3tsuMtu7Y7hecPxN/7+rjA4PgHF2zySK5R/whu90OVZLlhKFHU9UcDgmzExGYrvcPFFn21B9nGhZZuuf0eMbQET7o7vYBrW158ewcQ16q319d0TQiqQTw8uVzbNUgawpsR2aEf/8exzfRLZ3BSPan6mC3FrUixcf9jLraoakNttQI7Ps+SZLw21//kvlS+JtdVGAGAdpeaj5VO/RapSv7HF+Iv1ekDofdBs2sGPVFn0u5y3mzek8pzxhfHRKcwtXNLVPJ3pnnOaZtEth9ShlQKk3OfpMS+mKeQs9F7wxCK+QbqYX145sfOT6fcPX2A8djkdibnIwo6z15JS7w+JDfNaSVQufLBEGy5hdffcHV1Q2aLkkuTI372ztmx+KZnNYXqIq2ZTOXCbO2h6apZMWWdC/nz+ihKypFLlkOQ4/F3T3DYIxniMBUbyqasiTw+sQHyXh34fLs2TmtJBZZ3G0Iex55VnF7JyrVjuPj98R/y5XYD4HbRwtFFQRgMrrg5va9YGzshC2k6Q4DnVZVaPVcrhd4vv5UGfTNM05eDtjGH7n/KBNKgYumL1jdysx5/xwviMmKluU7Wdnxeoy9AZmypZNC8dpBo01LVF/M3SAAT1Pweybeo0J5V7Pb7TBUD+vxjNQjToZnPPKFXH88EBz38UODJBdrVRU1o+kJm3hBIpl/nUAl4JLdStjiswuT3T6mbixmMxH4r7efmF3Y7PcHTs+EvZiqiuMMuboVyaSzkUOjxlSxydFIrBVFRVnvUa0OVV4yVa3i7OQrZJsk06OEKFmTpflTLFjVOk1nQF2hy56ZX//6gvndFssW+2MwDAg9H7X1aSTp29sPP9IbDGhrlQaZoA8DAqdPJRO34aBHGu85PwspJPKgaQqsbohppny4EsQbuuoxHgyoJOlB2XboKFi2RSL72qzAw1Bc0nSDoj4mRWtmJx6rlUgoV5noK03LglgSi7hajmW5HO5zNJkwc5SWlpL9SnxGMyMc95LDviONJQpmZpPkW/I8YDgSSIfpsc3VbkVryl4tU+fhU0qVFtiSsThNDuiErB7u5RwMCfsmeXyglHNQNA2N22L4DgeJJjsd23iVzbAn/HLPDnho1+yihKoQZ7SHimYaKIr6dNnpj3pstjd0G5mkXSeYvoV39AqjEM9ZU2B4Hp3Rp5Ji1b2BTdDvkUgxad9uOCxWXEcFf/5n3wIQN1t2ac2nD3NohR9WOofpZMzvf/97QCARTKXGMxzuZYIgCEY0XYXl6WhSg3AtWZj/qfHP4nJlmjrx4VGQq+b09JSmSikrSeXrjjj96htu79+SSIeYpyr7eIchM25VkTA+0VEUk14oDO5+vcTrNKpc5/RUXBDqrGYUjpmvhAGs55+YDGcMfZfvfxCOdDo5pVVLuqZ9YnuJo5RB4KGbsvztm8RxjG3pmLJa8On1Pa4VMhoOuLoWDjiK9vRHnlA0BbbrhuVih+OadJUwnjc/blC1iOhwh6aIoMF2TWgcnp8/sqrcYmgdrdlw7Au4VhIfaJuK4/EJlWRMKbOU8XjM/VI4LUs36DqVyWTGXlauWmoC/4x/+J1Qn59MJhR5R1HsaGrxTGVZU2UNitkSSFHmvKxwXZerG7Hx9/s9/d6QVqkJesJQV+s1dVdyPJuQJ+Kdl6sY3Wh4iMUm6CqNuqlolBTjUfww10VlyWoxZDaraUoO+zWOIw8cy+D0/IhtkpJLsdWBHbCJV/TCEbuDCB4t08dx1SdI3u39HaOzGZ/e72gkNGowbRiNQ+Kd9kStW9cxu92Onsz6GbrLITqg6hppLOzA9UIU1WK3L7i/EwGfZXiYvs56Jy42pmFwcTxl0h/zQVbdfnz7Pf7gGMt1aCTdvlFrKK2KLpUq+06f+/ktXRTwixdijU+PPdabiPOpQyrn0y4V9E5hci6+9+F6g+9ZbB+WDGTVLd6X2IZFmxVoUnC56iyaNuGwEQdjmWf0/R7bTYQu6ZLbpkJzW8pGR5VkI4WyZ3+oGfXF9776yzX/13/IeHH5JV++Eoflz28+8Pbv7vnFn/0W1D8AkOVDZrNz3khq1MHE5+oqZjx5TlmL9Vsv72iygotXUhz80JFuNVq1RtHF3zMMAwUd13DxfWGLVaKg+yGNhAkWWU55yFg2W5r2EbJmYtkazb5gPBbBhusM2O8iUMUaKI1G2J/w05v3T5dO23HYrVZk+R+DY93w0fWMXihgF71ej7Le07U6thRE/jh/SxAENE1DIUWRR6MJ203MXgpOT4YjQOEPv/8JR0LpZscnOLZNvFsTHIkAtheO8XSTu3vxLm2Yg++w37yjPxX+pm0twlFHVzT8/d/KKuNRyOjUp5RQ2tVdjNmJ/mX3SMzB6dE50TZFVxoCWQE6ms2I05L9VhxMhqaSxgU9r89hL4JVRVW5ub4WEDnpF6sip2o65vcfAXBMiyJLaeo1eSEvpnpOodn0+gZVI3yzkuRUK43Vg/jti6MLvPGQbXpDIQkefC9kPLHZHxJaSbNs2hD0TG6vRUDbtBrLRc50dsJKZtNPzi7Z7Tc4gYqZCRtO0hTfsxhI2vymKlGMBtNSmc+FXzJ90JqMSPqRsDdmt94xcE/4/h8Fbe/lq1MU9YjtfEO+Ej73/OQSFbi9FXt9MD7j4/U1ThBwdScC4fPpF8T5B6yeTy3p4He3C2aTU1QZFG7TJZaSYDsmSicqAfeftkzOXDoleRKdzrOWeP+ec7meq5XGen7PV1+NebOQldzjEf2ezd39NddrkYybBB7j2RkrKeo9G05YJyvaqn2CZ+13axzbQ9c0Jn2RuLj6uMZ3MopSsn4OBxiKysl4ysd34pyzVJ/5xxWYGl8ciT3y45v/wM+vl4wlhfvZ8wnJeo/qBuSpmIPAnXD96Yqqrp7E6mejc/o+T35KVWA0HPHp4y39QJwzpmbw4ec7trsI05OJr7ZiNjHZ7MWcN7FJHteoZocuSYt8c4wyWBBXKsdDERP81z/8RL/f57ufhJ+qy5zT0Qn/+Xff03cU+c4zDF9nvVjzxZlIJFy9nWMGGrNH2ZUq5/x8TFdXBH2RhBm0fe4WCaBSSTbCgf2MKHpAFre4OD4mSdekaU5vJHxJ5ztURUgtoftKm2FbAYd0z9mlmM+e5XLYQ66XYIjnXN3lXJyFqDLeqGvo2NMf+Lx+I9ZqOJnhWDWmppPsxVnbNTqd/sC7N8JvnA8vmA09Sg3+y/dSMLgXMLMu6GoNT5LH1HXN3fye2UBWcQ8H+v6EMtnRk+iERnV59+EezQwopDDteBCSpCWKJAXYpRviKqJY6mQSOqzoLcG0z9XDHY6sLtm6y+aw5fxMkGwMjlri5JJGXfLTj8KuVbXjsCswqPF14TfSPAFV4SBtbLV+QNeeo3YHTOnjh75H1wkR5KUkXBgOh8wfElQJK9/tN0wnJxRNRJzKy7E3YLl8IK23hIFIcptWx2Gf0cpEhmkomLpOldW4koq9rGJa1cUPbNJMBOp1K2D0VVNKuzsjL2Js18LWJQ35aktRtnieQ50IH6toKodqDZ1MXhkB601EFEUojbCptpxhqDVpvsOSUkRlp9CmNY0mfOft7oBljyiTGEsm7FQU5g9YZonIAAAgAElEQVR3nD8Ta7DZJKzKDa9eHPHxo9hrVV5hdDZFajIcyUv1ZkGLiy9ZTt9fXZPkGZpukiK+97C0aJqK05MeiZQhWcwzHC8kCMX/b9Y1P71tGfop5UHMy3re8urrCZMjm9//XjK5ug6WG1C34vLaVjpB32A2u3xCNc1XFXWt4wcjBjLJ7YUVb35+w/hIJk7vU1qgoWE0Ee/SVBq+O6JqDnRyb0kE7T85PhNafB6fx+fxeXwen8fn8Xl8Hp/H5/F5/DcY/ywqV7pmoLUS5nVYcnx8TFl1DEeyOW+Rs3z4mbBnQSYeWW1N/vJP/jV1KzID+/hA0SW0rSky08DR0YTNao2lqUSy8dZzfLRCJ5Bimf7JlNnwOdv9PaO+yGTpio3uaLiuRxmKTIemGdTtgclIlDjruma7zbFtm+ggbuGW5bBYLBiNx9zKjGVd16i6Rikp3OuuxrIc6kNFLoWTQ8/FdX1Go3MqiXOdTEe8ff+aWmYwXr44JQhdoviA1MZFxePlqxNcN+TtGwEVchwPVdGxJWxO61rCoEdZltiOeOfTsylvfrxhdiyqI52SUqQdHQ2dFHfLEh0MFcWyaZRHjQCH5fbhqbnb9nokZUq0jcCQDaCWgqm6LLcRh41451CWVBVdZHp6Xo/1akF/MsCW2js3t/fMjk9ZRRvaRsInkojJeIQvtRzyqmWzjVANk9CVzavZkn5osdnOcT3xvaZxKNKKWEIXfGeKaXUEfQvDFvmEwdBmt4v54tUFqkxBLOYdcZYSSzr6om6wLRXbUbm6Etnt4diGTuPsRR9yid+OMhT1gRfnYj59+4Jo/8BquaeuxXO+ePkV+7hmu1sxlpXAm+sI23A5Honvffh+zt3tmjJynsSrV9GcQe8U04SjkXhQtfFZRgccV2QGLd2kyWOUtsW3n0m7yzF8FaXr0GRTdt1mlI3JeCKeKVprZHFGOJzg+sLuNtsI03ZIkpxYaqV0bUMYmvzJNxK6sNnjWibPj56xTUUF4cN3d5hjj6vr99SleC5Na7i+e4Ml5X8UxWAymjIMLD5di2y60dp8cfkb7qSO0HqzJRgMycuUYCCzo1XJcTgm8KCThAtN0ZHsIk7PhI/oOoU0yen1dHYr8ZmLizE0OtE+e+q9wa4IQwNNFdnnQ7InK2L8UcAukdXzQsc2HHIlQ5UCiIHrkMQNaSazVopOVnZ88ctLNomoWNq2hWPbbDd7KqmPkyY7Ar9HKXu86rJhvdtzenbMyVS8n2FYLO8WmFpIGAibWqw+YeU+DZK2V685mtm4jk8lfyuKWuI4YXWjcH4i9lHdLFk9VHRSX8m3Azy9QbddLE9qhFUpptFwdjrm+ERknD99ek+eNWSxeL/RcEQS66SHkkpWTGazEU7gACq2fM6xAjcP10+9jK7ucNSf4ZktH34WGW/XDXEHLT98/5rTl5Iieq9x+uyEQhILOYqK7jRkZcJXvxAwyyRZkWYH+qOQaiU1CLM1y7c7Ro/QoQZc12O/36HL6kvVlmz3dxyf/ApPajwtf/49o+noqULTaTqWrbNaP2BIUday1lldHzBMUbEZ9mY0tcNyueRE9qKYjs719T2UBqOxOC+yuOT05IirK4F8aNsVluFQHkpOj0VF1nBVqlwnPyS0kh56u9lxcnb6RFqQpTXT2TMMw+bhTviyfuBiWh3/+Lu3XJyJnqD76I7tXUYkoUNm3+L5lzPq8kBVPkKVHjjcTlAjhzPZ2+c4W1aLDxydicx5Gkd0hcaf/elvefuz6Cn1nCnnZ8esNzu0R+kMK2M8nHCzEBqTnt8nzw+oqsr9rahE9Ic20+mUIip4/V727do6zy9PeIjEZzY7C88ekDsFY1vYwc3tFXUn9m9PEgIlUU0v6PH6tfh755fPuPv4QB4pjGTV1vcsXK3A91sKCfVcLWCX7Z4IkMzWwTQ1kjyirqU0ynVN4Jacnz7nd9/9LQD2qEeebmllL4Xth8T7mq+/fMn2Xvj9+eKekXYEKMzvxXo9uzzj/c177EBqWgYBSmYTdwWa7Lks9hFNHaG7LbnU2sQssS2fspY+QU05ZDmd5pN1wp9anUZbuEyOJB19kUN1xElvyHwu7Gy7LxifOCRLlSIRZ/SzZ8c8vxyzXopnbEyNosxYzDMUmWu/fHnBarVgM494VHweDG1224KLL8XeG/ohVbXF9aeMp8LOrUDB0BROjy652gr4W68/ojQiAk/s49CbsjnsifMtpjGUdjDgl7/5NafnJ3z68B0Ar7//B77++pLn8rc3SQ55Rn9kYTzCQ20Vw1apspqLSzGfXVVy9/4d82sBUbu7LxgdDRhMHfoSPbPZLrH0DkXTnoiSDklBXraonXhOvz9ksdliaGAWct2tHm1d0+v/kcI9zxrCcEDXPuqkxaA27NMYGVLS658QDlS6OGDxINZPt+HZ6XMKWXm8urnDGrhYhkFfktV8mC+olQrDVmg7sQ4vXpxyOOzYbkTcOR0aqK3GZrEmsMT3bCMEu8IIPCpV2H58SOnaEDopI4FDUaWEXp+ue2yzUHE0D6XTkNQFLG+3tGVHKfsrmy7E7Y3xhxGGJASLDyvGxxqJbNPxwx514fMwL5+EjUtatttbtusIR0IMNaWiM1reXgntzYnjsthueXl5TFaINU6rGM/tY1say0ycoy+e/4K6zVHk2eBqLaNjjYe7Ja2U23n2p6c0psbv7l6jh8JPpWWB4TTkUnsrS1TibEUU21xtRNynKDr9vkPgKqhSjinaJTjWFKUWnwnCOVd3W9yhTyN7ICcnU/JDQpnqTyR6Y9mD9k+NfxaXq6KoKVLhaCajC1Stodf3ULpHPZ6UUeDjehbJQUxuWyss7vY48v38wEAtfVQ1fGKgq+sWzxigkhJJRqjz41OatEHuQzzvhLc/zRlOFC6fiUN1szuQlxX3tzvOz0Xw9tOPHziahSSpOCgWDym73YHJBAJZ9kz2NcPBlI8fbjEl1lfVG8qmRrY3YSk6rmtwdbPl2YU4YIos5+6uQMXBcsSm/ru//x1loRDKpl6Vlra2OOxWT43b0/EFWVywuH+PJRsbVa2jbcF3xQE6v/2I5zk0lOQy4Nvs9nRaiWEKyGGUxPihgao4mFKbwtBCsiZGVzSSSMz5u/kb2q7g5FhAwSzLRdFUJqOLJ0KEQtmSpxG+KfSZABQ7ZrVOUCRcq1QiirJB6WxSqfUR9h0O5R3OyKNNLLlW54z7Ibd34uDvDQZkZcVydYMuoTUdY2otoinKJxhg3sWUaYUr4SKOf2CxynH9PoVsxH/zds7xyYi7+yWtVB7XjQIzUGjlgVO2HWVe4YQ+YfgI16yJDxn3yxazFItabGK+ejVFacVvX324YrtbMugH2JIAoWsL9BpcU2EdiYOvN5yQHUp+9zeCOacoE4aTKbpdEqXCQem2zf1yha1Z9KQQ7jyLCUKDUpIdkKb0R8/Jy5KNZKXLtT3NVqWrG5ACiIPejNEkoJF9YA/ZnMAfCBICqUQe5yVeaVIcVIaSGKKsMgxnz0QyA/28anh1rNPzWn74g4CadtuO6S9CFrc32BLvv1xsCPreE7QuTyJcyyON1/Q98feG9hi1ySlbcQidvRyxPaSMvB5tJi5pN1e3TPo+ZdpQFZJ50J9w1htQl0u55hnWJCSKNvQCAdM5xBGGXnN6PENRxG/pukpRlSSySVvXHHQ7oNNVRrJnLjA8yrSh7RQqGZStDyv01sJxxd/frneY5pjb+0+UlWQiLBuy+ICqOBiSQUzXLMqiIpYEN64dcno2w+vbRAvx7KYZslvtmR55ZKUkmChdqjqjVMQa9HUXq+3z0+sPOBJnrxgGiyxlenr81Dw+64+hVSgqcRA6PZ8oyTE6k7UkUumUhMl0gO/32G8fWfcMfN9jMhF+6/5mjWEYXFycPTF1tSiin0Ft2a8E7NhBod/vP/VAtGw5GZ7y7vuPhK7wnYHdJ1sX/Pabr6kr8ey1dUN/ZPLwIPzyvqmxCxfVrrmei0vZw/WWFy/PqaqKpP4o3ln3cIIelif2Y9dmxNGey8vnrDZi7m6v3+J4KnQW3/0ggrnp0TFt17DZijk3VI39Hg5RzlASGWXNgaAf0BsK/7PZ3nJx+if8z//LX/N//O//GwCL1Ro/GHD51QV/+F4E50E4YrfZosm10jUVUyvI45JQHvy1FZM0Dj1No5GJtunFOa2qcH0tnik8m/Aw36HpJru9eJfp8BlxXHF2/hVRKkgZ5vdrTo+mnFyK+a3dhnyVUu9UTqfyXeIYp7AZ2i1qKXxJVoHvDCkl6UV0eCAIjnn/85rAEb7lF99c8LvvfsCy+igSOjs9NWgz0Z8GULUFYTCCDsYTSVpi9jA0DcUx+LQWiZLji+eEPehWYo01Q2H27IRos6CVukV+YFGUFm1X0g/FeTQcjlnvPqDyaJs1zkCnMXJ2G/FbeQKXF2f89NOBvi3OvkG/Qmk9Mtn03nNtmkrn7u6eb3/75+Lv9VXW61toUgJJTKEoDlWdEcpESt7VpHGE7ShMJyJ4dHyD9WZPU9XcS9+h2ApBf0SNsJflumCx3lAZf/TfM3dAMCyIohxLktrE8YLVasPFi2+Ena+3pHlLZdTsZR/2kWNT5AeeyTXe3a+ID0u02iOTzG5fPptxs/uJNnMYyoC9N7NIMwdTl7bRpURFTnrIsHxxXswfrvh0tWI68kklA5ymnHFx+oxPa3HWvr+75jd/8udcP9yjaeJ7egOwwfT6aE0ov2dBY5LKvJTS7nH7DtdzhX/xxV8B8PwX39KqLb4f0pNJGUevsc2KqztxGUlrHbsKwC24l9A9Iy2YliPOB89QJcnV7f0CTfMIQhGrbeMFWpizOZR0iHeeHI0JAoN3bz8wOxPr53kOW23HIRXn1X6dM530MXSVUgopV4WB49gk6Y6jmZz3TUS026NrIg4Lejp3i2uKwsBzJInI+orBCOq1h+08auYdsVxE1Ir0w+6ALG3ohx1xIln/+h5FGpMmOkhB6zQp0bWAXvDY47nA9xv2uwPNSOqi1SG9qkNdNoSm8IP3ywXBxHzqW6SyCMya7b7hWArR27bOp7crHAJaW7IYFgJC2soeZEtRiOJ7DmjUsvdctQwMXWPzIL7Tm2xwTIsshpu5eBd0D8c2cUObQuoEnk5PuFvFNLLvTG0tjkcTijyllq3opm4xGJpkccKwLzVP9z9Q5iGeJDEan05Q7Iy8svnNl18D8PPP36EGA1R/QiqZD/thj6QskeaD73bkhU56qBlK/Vhv6KN0JX/7N/f8q78QPkHvOu7ufhA6h8DzL75AU2/IywqlE3FDEu1Jki1loWC7j/qbF4CAj/5/jX8WlysAry8beIdjqjrDc6a8fy8yNIpaMvA8Xr/5yPMLkQlsB5VQwVZko3jX4dkV8/ktvZ6YyIePMWHY0e/3n5rqb25u+OXXL0kP4hDScSi7BxxvTHYQzzB/eODs5DlJsaOQrDTHJ30mwy/4/sd/AERWRVdsulZ/arI3jANYOflmy4nMCEf7nLKKsGzZJ6GovH97jRcqHCS15nB2jOM0xKxQW4mFrTUsu33KaHi+zT7ast9vceXi3ny6Zuub/Nt/81e8fyMCINfqMGwFTdLRHo2n7FYRbjj8Y19WnqGbFXcPIvA4mb5kMLC4+nRDIjG8Qa9hcbujPwlQpThmeDxm0PdpZFCv29AbjjjsYhpZQdRwmIYBVRcTR8JhHKINF9M+Mv7D7hzcgUHdNtx9Es/d73mcX7ygyFNS2Wuj2wo5FbVkUGzLlLbUKBOFRorAWl6Jp7nQlFhtKP/NoXFK2kcF7drB1nSaqmZ0LJ5p3BvSKBq6qVJLPL5Wt2RNQyszfNm+4Xho8tPfXTM5lrj6jcJ8GzOKHTRP/H4wDDHdKW/fiAyNgsFwFNCiE8iqzdXGYG8kGEbAYiEyzt5BJdNyMkSGZjQZYzs6QRA8BeNZoXB+HLDf77FkJmGomNx+vEFzxByge8TxAdMzqBDfWy9zjiY9gmHwRIQRhh67zRZHNuLrmoZjOsT5mlxSAA/8kK4Lqdslv/qN2GtvP8yZ2n2qQnitYt0x/GbGcvXArbS72csJTVxj2kPKWMojFCqruzn/5t/+tZiDq/eslw+UVc7RSKrGZxsobVxbXMiqg4pWu9y+mzMMxKFwfnHMfrdlR8JU9jxl+YqeOyaRPXSeomMrJlmmYcl5yfIMzSrFxVFmLA3DxdYcKjnnw+GQJN1x/3CPZckegSHcfpozHMyoUnGJUOwQzVIpI1kdrRwO1YoqrcjSR8ZSg/7giLZtkUlpbMsg2kZMJ+L9NE2hyw44YchB0gSn2Zaj5x5xvKeWlaPQtUiNllcTSSubZHz/3d/jBhbrRBx6I2dA9KnA3uf86Z/+CoBDvML2KgJPrN317S22OSDZJ/xPf/1rAP72b34m2nbMjjT2km1R6VyqrqKQB2pdpLjuEbt9xsNGJAP6vQlFkaGbFrbsZXBMnbpqcMJH2tweu/2Csy8GTxf21XpNqxnc3S/Z7cQecd2Qjx9umUyHcl00lss1SZzgeaKS+8XXMzSr4IefF0xklWi+v+Hl5QxXinreLkpMs6PK1rSV8KfeQMfopjgdJLGw63qYUdKi5pJJioR1uqUfzKhkYJHtar799hW/fy38YlUXDCY6//7f/Z+MJGtcYyiQHtjOP1JLe9H7Okqh0RuLd3m4fSDNK2ZTj1pmkve3e/RDxOmr50h2ZqoCtquMQpbOd/cpeZ7z6tVLHF38W+80YHMV0dYRuWxM/+br57ROTiMzu8uHK6aDEcq5TZWI4M42DYJeR1u5VJ3MStc2vX4LuuxhtU06pcXzGupanA0395CmET1fwZNJEs32eHt3y3AgqbWDgMNOIxxb4Irfmk577LYZF+MJ/7gW53YeH2hiE1cy2X34+I7+Kx/XDZ980smRTxZnlEWCLfup5osPrNdrhmeSQS1dYzkupjtBQ6znKOixW+eEfQNF2oKuVESbim+/Ec3rXZaxqXccqROyRFxM29pCU0M+3S2IZaV6OOqBUrPZiIUZjTzW8Z6HtcGrL8UzVJkglVJNg5PjSwCU8sD0fMz2RlyE0y7F0UHDwJN9GVmWsI0KXFV9Ip1YJh3WYESuiLnzgoI4iinSDFVWxu2+Quq7VLIfeBieYroN+zSnPxCxxMPmI8PwlP6pTSyrYm9/nvPiYkDZivXcpTv2m5wvnr+kkcK7y/mCy5Mx+6RkNBKXj/1+TtIVKDJTPxz6GHWBVh6eKkKa0tIWe9IUJkPph7M1mlKzTsWF2ghekW4r/uqv/lf+1b/+HwD4x59+h69YTKczIinC/qtf/3f87X/899SPZ20e0ZtM2e+3OBIpU2QaWwoMu2UvL6vDizPur27IJbrlq1+dsz6k6FqB/njeVw1vfljjDU5A9tr88OkTuq0/oYVcJ2S/3jCePCPayZuhesPEOqFta6IHMVf93hTTSjk/FUx295t71CLAckqmQxGrRfEBrRmjWTfohfDpD3fvefn8nH0iPmMFDavlFk+ZcNgLP6XbKkrV0VQl/Z7Ya0ka0QKnkgk4TUrSNOX0xRGtzGBVVctkcsLDaokv+6Isp6UqGqhlhc0sGI0maHrB7mEr13RMuk9wxz4Hkc/h+EyhqDyeX4iq+L7c8O7tJ1hUJLIPq+eO2a0WeJJpsShLAtfj22+f8/at2OtprmK4Hqa2x5JkVe/efcSxPX71reiPW727obUV0qTBtWVBojwQFTmaplHIvrOu6VC6kvNvxBm2O3zih99vUNQKArlnJlM+vF3TGx6YHouL9j5OiNM9vpRwGh6dQ1OjGjpVKs65u/d3nF7MmE09UinZ0u/1USyLkUT0JHGJ7wyh3tKXJD5XH++YXRxxf73k9Ej6qQcRG/xT45/J5arjkAijPz7xyfcx9/e32BK61yHgL69evUKVN0k6TVJKi8+cnJ3wsL4l8HzuZfB6/uyCQc8lihL68sJVNDb7XUIrmXrWdYZlG+x2GyQCjzAM8X2X+fU1RScM+vhoxPX1Na5k3PEDg7ao8TwHRRGH3jbaMpz6nJycMJuJ4HFlRyxXJbmEM/V6PWazKYbdYOiPuiw7qrSlN7EYSfazn3/+gYuvXjKTQb3l1lx9fM3R0RGtpFunVvFck7u7uyedK7SGrmufmvrbuuFwOGDaAQOZ3SrLCl0xefZMVKCidYGq5liWg++JTf7x6h3T2QzTM/AdSWihpJhmh63KoLAp+OEPP+JaLiNZ3t9FezRFxbJ89muZ6fADyrxCVR41uxps02az2xHITJala7x5/ROns2OQa5ylBcvlEldCAOu6pW4qhqPwiexkNO6zWi14dnnGw63IYMyORux2G9JMkhaoHbbtYdkuloQYHg47doeE8WhELQ/CvCgolQ7blvS3RYyq9Xl2+ZJWMkIqbctwOCKPc1wZABVlCaqOI22jN5iwXD1QVhWv38qGTxOMQEdTO3xfBGrL97c0TsP5c7HGjuHgeiZZkZPIZ59Op6TJnsFgQF2LdX+YLzEM7Wl/tG1Dlsd0qoMimWyGwz5JEmPZKplUWu8N+iRZTC0PpouLCz59mLN42JPVYj7/5Nd/SuANyfYVn96JQ8A2BijNjvmD8MinpxNcP+Bv/+YdgS8utLqq8enuipe//Jp7SSc+Ho4oW50HSYNa1zWmrjPoTVjKqs1g4NDr9VhLJrQwGJHVsYDbStzFKiopmpzBUYCuiXceDQPU1iC0xFwWUUGSp4R+D00SRfTCoZiXsqOVB3a0jTAMh1LCp7I85+P796ApnJ+JA223i5jMJgROSCnJVd7e3HBxfs6gJw+FfMPRZEK8TZhOhb2EYYjrDri9vSWQGjqWaRCGIQfJEKVocHRyzH5/AMkIN+4N2Wy3VHX3ZLNHJz36dh95TvHuu9e8fHZBnEdM5EWtZ3r8y9mfoTQ5UmKJYtWyWqZohpjPxXLB6bnO178+45NsPvbcgCTbcXV1Q7J7ZLMysMKOoSUOjt/+9kv+y9//A64/fIIYd11H13WCIVAysgX2lDhL0GSSq8tV5ldbdFVjMJTMg+oRry5F4/bbdwIOqhkOaVo8QVbevr9iNjsm3qaoEp7VD/t8uvkZXevwpA8wEgtN6Z5gs5qmUVUFruOxWIv3y4uM08sXzO8XPH8pmOuSQ4TmGhgyEL+5XuBPXIajkIcHEXh7nsfbt2+f7C7s2dzc3LDd7nFE2p6Uir7roqsGYSD84vWnG0JtwF5mpHVDZeIGQMdK6h32RyGO4WGZDne3V/KZWk5PT7k8F4Hbcr5mdHxCXddP8NDtdsu7958wfIPzE/Eug+OAH97/Dt8Tn5lNZtRZQRRvcB6r93WB64UMRyMSCXEqo5ykyJHbn6oDoy3p9UPUTtKe77eMpxOyIsI0JdSz53N8MmUfyYt3U3J8do6i5SwWW7meGl2rUlU6o4EISBzbZLfcMzkTZ9r8vmIb7TkZjViuxZwHQx8v9BgaPhtJcqFoYFoWOwknPKwPqJZBvzfmeCoJbYqKNM5Iq1jcfADV1FB1jflC+Jt0s6d33hckRtJXr3Zbmkahasonf5qmGUcnwydik0Oc0u/32e0bdjsRlJmqjqIIe3v8Xt9x+fjxiqEn/LdSNzhux2q9oDeQmoTpgaPjKZ3a8f6d0L5TWxfbMClksGzp0DUKtumgSORBXde0bftki2O9h+/7XN0/0EgYm2spgjynKNhK3cemrWnb+uldLp5fED3cEMcJuivWMwx7tK2w9+lQrE2WrIVegRxJFpNnYn86Uq9qv10T2C7z+Q7TFc8wOxqhobHfCdufTs5pWoPBsMd6I+ylrTK80OPtm+/57nei2lsUe8aeiWlIqHvbsNlsME2N0Be+8+OHezzfZ7/fo8uKTNfURFH8RIN9/eEjhhsSBi661Oi8ublGVfv0g5DdWuy1pmlwTZedlDMJgyGm7XN3d4chyUB6/T5lWeI4FqX0QV3XEQQBHz/KhEFXYKgOpqs/aRdqhkGWZXieRyKT4ZeXlziOyT4RcV+WJXRdh+M4tJ2IzeI8YuD5RPvNU8IzrzU0eIKVb7ZL2rZlvV0x64uz1jY91qsFZVZQyBB+2Ouz2u2eZGQ2mwVtWzKZjtAMWeXvGsKBx3Q64pmkuzeMincf5xyk71psDvh+SJQtmUqiFteH3bqmkhVw0wrYb3PeRB8IemIdGq2gaRqOj4+JJbOrokhofJo+zeVisURXzCffWRQVSqnQGQqlhPNpLYynfT5+ENDTpquZTAesFrdcfRB27TkuL56f44U6iYxd6qql1xvQl8yLy8WaX371kvvlgtMTEY8bmy2HeIumq2xk0nDSG+A4Fk37iIZSyYuScBKy24j9/+zZM3bpHN1Q6ck4epuI9/qnxmdCi8/j8/g8Po/P4/P4PD6Pz+Pz+Dw+j/8G459H5UqBr7/+JQDr1YGqbhgOB2xlNsQwTZIkoe+3T1Ub33OwbZNOYlW//+H3zI6HzE5OGMns1na9oqNBQcOSdN49p4drj1hIIcU4Lrm7W/HFyyMcR5RYe0GPw+FAr9ejVnP5uZiiFBhxgKJIsGyPtmt4kFl33dBQFZ0oTvn0UVTP5vMVw5ELsmw/HA6J4oKjWe9JwDNKt/SDAXGyYb8VN+VvfvktPd9CkZndD+8/YVkW6+UDvi8yyePxlK4tSZIEXdL5aorQJXisVnieQ+D7WLrxlFltG5Wm7Z6mP00bosOGwP9jdUTXXFpgf9g/isaTtTG20eE1IhOCqeM7LsdHJ2wlHv/i9IyuKdnHe8Yyg2kaFeluxXgsoAT5pqQoMjRNQ2kkBKAoeHZ2jmOb1DspNNO2bJYrdrJf5vj4mC+/uOCnn35AQzx/tNtimQYP97eMR6LyUJYpWX7A88WcDEdH/Pj6E37YPRYLOOxSHrZ7+sMRRdPK9TPpOR47SYhiOzqv391g2iFHpyKze3e7RAtCTEMhk1TTugLf/eF7PCkS0u4jajo6RaFr87IAAAs0SURBVKGS2imaYtNUkCc5RyOR+bj8ywGNC4Yrsj/ZvqCV8LXH/rG7uzlnp0eMx2PuJKmG67q0VfLHDGp/RB+HooYHqcHiqBaapqHqCiPZF5EXMZatPdGSa2qH67pMj1QOslk1TVMqCg6Hhycoa9O1lNqao6HInJleyGq1YbPe44Xit+k0hsM+dVE+VYUUCs4uT8nTR6x4jSn38qNI6mA45vr+ji8uLwGY3y85JCmWZeNaEj+ut5iuRqv9ERue5zkUFYrsnbQMk+1uzeDsjFRCW3uhTZaWBGH4RGSw2Syg1Zgdi0qrooieodnxhExmkhWlo2ka8irn/voRH9/j+uoB/XQsbUWh6xrxHpLqXlUVFss5t3fX/PKbV0+/pev6k+ZaOOgR54moxEk72Kz3rDcRs9MZhiv9S5ujNS11JjbfX/33f0ESHTgJJhRSdDrfZygGDAb9pzlvuxJdczg5FZk6x3PR7Y6qrvnDd6Kf4vnlGZatYdkmlqTN3WUFDTGGrFzd3tzTthW6bvAwF7YRhDWaZhBtYgzrUZR9S1eBK8XVHc/i21+/4uXzU3wJFczLHMqaJN3yzS8E1OP+fkuaFmzWIpPcNC1JWuL3wieI8Zs3b1CNlunRiE5S7lMq1FXxlOndryP6gxBV/SNpUWcKwfndLkKRz1kWDU1+wJU4/iDsc/78hPyQg6RZzrIMXdef9AB1XadsaqIkpjaE3Q1PpmR5zuLTNbqsHHmmR5nVeBLW1nMcbN1gsTxgyCqq0irYtouq6uTZY3b7C8bT/7e9u4uNrKzjOP79z7Qz085M29m2W9jdLqywvGwILBtjIBKCEA0qES+IwWgkhIQbLjDRGPTGaMKFN6JGQ2IAReMbWUWJF0YCm+iNCAgILhqg7Lp9b3c67XTeO/17cZ7tNhv1hqGzO+f3SZqe85xJ9tn5nZ7O0+dtnIXFEwBcecXlrFdqzM5NUwpDHI/cfJjmfmewkGMoF/177558B99oMxAWaSnXK1TXK1xzzbVMheXEPeEk+5Ns2ialMN+43ox6VxphY5+hsV1sVhtseoti6Pkora1y0USOTGaQWrj3NottllcWcD/7szc8vEGrVSERejvS6TTV6jpzc0uUQ89Kmk3GR3excno5vJ8JyuUyc+0NimHeaXo4Tbm8QF8SlsLcXqeJJaEv9DLumdxHX6qf5kY0OgSgvd5gcs8Bqq0UUzPR+5fKDZKwDKWwXcvYyAhTJ/5NNpklmx8M9+ssu4bGaKzVtt7PWqPB4uIylTB6Zmh4gKXlIhdffGCr9yWJkRpM0mw2mQ9zF/dcfRmVRpXX3oje8yuOXEm5VGd09zizM9Hni4OXXcbplSIzc9McDD2Uq8UKCTao1aIcUok+ivOr5EcHyQ1E9Wy1GlRrdfKZwtb76wknnR6gEDaTrawtkcsNsVpa2OodmJgYpVorb432WFlZYffu3SRga97wzKlZ9l9yEYvFFQbCvV4YGqaRTFELC2oVCgXmFuZJ2CYeehTTmWE2WmXGxyZp2NnPRROjeymE+eOFsf1kBrIcP36cdhgR0q7VefWtFzl+8i3S4eeoMNzPRqPCYpgDOZQfZjPRz/LyPCWP7kUjTaNeZyQ/QitsjrswO4/RT73aCO9Tk/xQH5sbTVbD/LF6tU4qHb1nm+H/19+EVqu1tRiXe/RcHhhIELbQol6vUyiMUa3WOB323ywMjZFMwlAYEleaP00mP0itXqVvIwwjN9hMtBkaz2ztmZUbzFIqLlIJw3T7cm3cnXK5srXlQKVZxhstLJmkEeaU5fNZarUaM9NR7+vuiyaiDdAZYOlUNPRywzdo02J0fIxk+Gw02D9EX2qA9TD5bXR8F/lcjtJaiWyYt1/fqJDNpqnVy+TD/OJ3p07Sn85wajpaPGZ+2dm7d4KJvWOcCPMbl+bXGMqPsbYa/V9y+Wivrma7zVo5LHpRcdKZGpP79wBhxFIj6o06FbajGWga/akk6WSKvrAFj5HA3VhZXiMbRl9l0kkWZucYGQm/s09vcvWhSXJZoxqm7qyvr1NaaZDoH90aIppOZ8jnspRmo+xabSgWi4yPj7O6Gj1vEkloNZpkUtmthdoymRRYm1x4fkM/K9Ua7XZ72+ewWXbvG6LddObDXqK18DnjfzF3/78v2AlmtgRUgOVu10W6YgxlH1fKPt6Uf3wp+/hS9vHWK/lf4u7j/+3CedG4AjCzl9z9g92uh+w8ZR9fyj7elH98Kfv4UvbxFof8NedKRERERESkA9S4EhERERER6YDzqXH1w25XQLpG2ceXso835R9fyj6+lH289Xz+582cKxERERERkQvZ+dRzJSIiIiIicsHqeuPKzG43s3+Z2dtm9lC36yOdZ2ZPmNmimb2xrWyXmT1rZm+F74VQbmb2vXA//N3MjnSv5vJemdmkmR0zs+Nm9g8zezCUK/8eZ2YZM/urmb0Wsv9GKD9gZi+EjH9lZqlQng7nb4frl3az/vLemVnSzF4xs9+Hc2UfE2Z2wsxeN7NXzeylUKbnfgyY2YiZHTWzf5rZm2Z2Y9yy72rjysySwA+AjwOHgM+a2aFu1kneFz8Gbj+n7CHgOXc/CDwXziG6Fw6Gr/uBR3eojvL+2AC+5O6HgBuAB8LPuPLvfQ3gVne/DjgM3G5mNwDfAh5x98uBFeC+8Pr7gJVQ/kh4nVzYHgTe3Hau7OPlI+5+eNuy23rux8N3gT+4+1XAdUTPgFhl3+2eqw8Bb7v7lLs3gV8Cd3a5TtJh7v4noHhO8Z3Ak+H4SeDT28p/4pG/ACNmdvHO1FQ6zd3n3P1v4bhM9JDdi/LveSHD9XDaH74cuBU4GsrPzf7MPXEUuM3MbIeqKx1mZvuATwKPhXND2cednvs9zsyGgZuBxwHcvenuJWKWfbcbV3uBU9vOp0OZ9L4Jd58Lx/PARDjWPdGjwlCf64EXUP6xEIaFvQosAs8C7wAld98IL9me71b24foqMLqzNZYO+g7wFWAznI+i7OPEgT+a2ctmdn8o03O/9x0AloAfhSHBj5lZlphl3+3GlQgeLVmpZSt7mJnlgF8DX3T3te3XlH/vcve2ux8G9hGNVLiqy1WSHWBmdwCL7v5yt+siXXOTux8hGvb1gJndvP2invs9qw84Ajzq7tcDFc4OAQTikX23G1czwOS2832hTHrfwpmu3/B9MZTrnugxZtZP1LD6mbv/JhQr/xgJw0KOATcSDfvoC5e257uVfbg+DJze4apKZ3wY+JSZnSAa7n8r0TwMZR8T7j4Tvi8CTxP9cUXP/d43DUy7+wvh/ChRYytW2Xe7cfUicDCsIJQC7gae6XKdZGc8A9wTju8Bfret/AthBZkbgNVtXclygQnzJh4H3nT3b2+7pPx7nJmNm9lIOB4APko05+4YcFd42bnZn7kn7gKed23EeEFy96+6+z53v5To9/rz7v45lH0smFnWzPJnjoGPAW+g537Pc/d54JSZXRmKbgOOE7Psu76JsJl9gmhsdhJ4wt0f7mqFpOPM7BfALcAYsAB8Hfgt8BSwHzgJfMbdi+HD+PeJVhesAve6+0vdqLe8d2Z2E/Bn4HXOzr34GtG8K+Xfw8zsWqKJy0miP+Q95e7fNLMPEPVm7AJeAT7v7g0zywA/JZqXVwTudvep7tReOsXMbgG+7O53KPt4CDk/HU77gJ+7+8NmNoqe+z3PzA4TLWSTAqaAewm/A4hJ9l1vXImIiIiIiPSCbg8LFBERERER6QlqXImIiIiIiHSAGlciIiIiIiIdoMaViIiIiIhIB6hxJSIiIiIi0gFqXImIiIiIiHSAGlciIiIiIiIdoMaViIiIiIhIB/wHsD9gY4ceWooAAAAASUVORK5CYII=\n", + "text/plain": [ + "
" + ] + }, + "metadata": { + "needs_background": "light" + }, + "output_type": "display_data" + } + ], + "source": [ + "# show the results\n", + "show_result_pyplot(model, img, result)" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.7" + }, + "pycharm": { + "stem_cell": { + "cell_type": "raw", + "metadata": { + "collapsed": false + }, + "source": [] + } + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/detection_cbnet/docker-build-context/cbnetv2/demo/video_demo.py b/detection_cbnet/docker-build-context/cbnetv2/demo/video_demo.py new file mode 100644 index 0000000000000000000000000000000000000000..661130b42c56f64707c4c79749f10e488be02ef0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/demo/video_demo.py @@ -0,0 +1,60 @@ +import argparse + +import cv2 +import mmcv + +from mmdet.apis import inference_detector, init_detector + + +def parse_args(): + parser = argparse.ArgumentParser(description='MMDetection video demo') + parser.add_argument('video', help='Video file') + parser.add_argument('config', help='Config file') + parser.add_argument('checkpoint', help='Checkpoint file') + parser.add_argument( + '--device', default='cuda:0', help='Device used for inference') + parser.add_argument( + '--score-thr', type=float, default=0.3, help='Bbox score threshold') + parser.add_argument('--out', type=str, help='Output video file') + parser.add_argument('--show', action='store_true', help='Show video') + parser.add_argument( + '--wait-time', + type=float, + default=1, + help='The interval of show (s), 0 is block') + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + assert args.out or args.show, \ + ('Please specify at least one operation (save/show the ' + 'video) with the argument "--out" or "--show"') + + model = init_detector(args.config, args.checkpoint, device=args.device) + + video_reader = mmcv.VideoReader(args.video) + video_writer = None + if args.out: + fourcc = cv2.VideoWriter_fourcc(*'mp4v') + video_writer = cv2.VideoWriter( + args.out, fourcc, video_reader.fps, + (video_reader.width, video_reader.height)) + + for frame in mmcv.track_iter_progress(video_reader): + result = inference_detector(model, frame) + frame = model.show_result(frame, result, score_thr=args.score_thr) + if args.show: + cv2.namedWindow('video', 0) + mmcv.imshow(frame, 'video', args.wait_time) + if args.out: + video_writer.write(frame) + + if video_writer: + video_writer.release() + cv2.destroyAllWindows() + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/demo/webcam_demo.py b/detection_cbnet/docker-build-context/cbnetv2/demo/webcam_demo.py new file mode 100644 index 0000000000000000000000000000000000000000..5bded14ff6c3ca633ba6af1843d5a32a433f2e06 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/demo/webcam_demo.py @@ -0,0 +1,46 @@ +import argparse + +import cv2 +import torch + +from mmdet.apis import inference_detector, init_detector + + +def parse_args(): + parser = argparse.ArgumentParser(description='MMDetection webcam demo') + parser.add_argument('config', help='test config file path') + parser.add_argument('checkpoint', help='checkpoint file') + parser.add_argument( + '--device', type=str, default='cuda:0', help='CPU/CUDA device option') + parser.add_argument( + '--camera-id', type=int, default=0, help='camera device id') + parser.add_argument( + '--score-thr', type=float, default=0.5, help='bbox score threshold') + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + + device = torch.device(args.device) + + model = init_detector(args.config, args.checkpoint, device=device) + + camera = cv2.VideoCapture(args.camera_id) + + print('Press "Esc", "q" or "Q" to exit.') + while True: + ret_val, img = camera.read() + result = inference_detector(model, img) + + ch = cv2.waitKey(1) + if ch == 27 or ch == ord('q') or ch == ord('Q'): + break + + model.show_result( + img, result, score_thr=args.score_thr, wait_time=1, show=True) + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/docker/Dockerfile b/detection_cbnet/docker-build-context/cbnetv2/docker/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..8a2f1d691eba0a0ac0ebd70b205f088c63b81f3b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docker/Dockerfile @@ -0,0 +1,24 @@ +ARG PYTORCH="1.6.0" +ARG CUDA="10.1" +ARG CUDNN="7" + +FROM pytorch/pytorch:${PYTORCH}-cuda${CUDA}-cudnn${CUDNN}-devel + +ENV TORCH_CUDA_ARCH_LIST="6.0 6.1 7.0+PTX" +ENV TORCH_NVCC_FLAGS="-Xfatbin -compress-all" +ENV CMAKE_PREFIX_PATH="$(dirname $(which conda))/../" + +RUN apt-get update && apt-get install -y ffmpeg libsm6 libxext6 git ninja-build libglib2.0-0 libsm6 libxrender-dev libxext6 \ + && apt-get clean \ + && rm -rf /var/lib/apt/lists/* + +# Install MMCV +RUN pip install mmcv-full==1.3.8 -f https://download.openmmlab.com/mmcv/dist/cu101/torch1.6.0/index.html + +# Install MMDetection +RUN conda clean --all +RUN git clone https://github.com/open-mmlab/mmdetection.git /mmdetection +WORKDIR /mmdetection +ENV FORCE_CUDA="1" +RUN pip install -r requirements/build.txt +RUN pip install --no-cache-dir -e . diff --git a/detection_cbnet/docker-build-context/cbnetv2/docker/serve/Dockerfile b/detection_cbnet/docker-build-context/cbnetv2/docker/serve/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..f96f21cc4382224bc80f53a665bf4c0d8df3286b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docker/serve/Dockerfile @@ -0,0 +1,47 @@ +ARG PYTORCH="1.6.0" +ARG CUDA="10.1" +ARG CUDNN="7" +FROM pytorch/pytorch:${PYTORCH}-cuda${CUDA}-cudnn${CUDNN}-devel + +ARG MMCV="1.3.8" +ARG MMDET="2.14.0" + +ENV PYTHONUNBUFFERED TRUE + +RUN apt-get update && \ + DEBIAN_FRONTEND=noninteractive apt-get install --no-install-recommends -y \ + ca-certificates \ + g++ \ + openjdk-11-jre-headless \ + # MMDet Requirements + ffmpeg libsm6 libxext6 git ninja-build libglib2.0-0 libsm6 libxrender-dev libxext6 \ + && rm -rf /var/lib/apt/lists/* + +ENV PATH="/opt/conda/bin:$PATH" +RUN export FORCE_CUDA=1 + +# TORCHSEVER +RUN pip install torchserve torch-model-archiver + +# MMLAB +RUN pip install mmcv-full==${MMCV} -f https://download.openmmlab.com/mmcv/dist/cu101/torch1.6.0/index.html +RUN pip install mmdet==${MMDET} + +RUN useradd -m model-server \ + && mkdir -p /home/model-server/tmp + +COPY entrypoint.sh /usr/local/bin/entrypoint.sh + +RUN chmod +x /usr/local/bin/entrypoint.sh \ + && chown -R model-server /home/model-server + +COPY config.properties /home/model-server/config.properties +RUN mkdir /home/model-server/model-store && chown -R model-server /home/model-server/model-store + +EXPOSE 8080 8081 8082 + +USER model-server +WORKDIR /home/model-server +ENV TEMP=/home/model-server/tmp +ENTRYPOINT ["/usr/local/bin/entrypoint.sh"] +CMD ["serve"] diff --git a/detection_cbnet/docker-build-context/cbnetv2/docker/serve/config.properties b/detection_cbnet/docker-build-context/cbnetv2/docker/serve/config.properties new file mode 100644 index 0000000000000000000000000000000000000000..efb9c47e40ab550bac765611e6c6c6f2a7152f11 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docker/serve/config.properties @@ -0,0 +1,5 @@ +inference_address=http://0.0.0.0:8080 +management_address=http://0.0.0.0:8081 +metrics_address=http://0.0.0.0:8082 +model_store=/home/model-server/model-store +load_models=all diff --git a/detection_cbnet/docker-build-context/cbnetv2/docker/serve/entrypoint.sh b/detection_cbnet/docker-build-context/cbnetv2/docker/serve/entrypoint.sh new file mode 100644 index 0000000000000000000000000000000000000000..41ba00b048aed84b45c5a8015a016ff148e97d86 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docker/serve/entrypoint.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -e + +if [[ "$1" = "serve" ]]; then + shift 1 + torchserve --start --ts-config /home/model-server/config.properties +else + eval "$@" +fi + +# prevent docker exit +tail -f /dev/null diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/1_exist_data_model.md b/detection_cbnet/docker-build-context/cbnetv2/docs/1_exist_data_model.md new file mode 100644 index 0000000000000000000000000000000000000000..a2a0b718b0c9d16fe428669e7c0f0b33713468ff --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/1_exist_data_model.md @@ -0,0 +1,569 @@ +# 1: Inference and train with existing models and standard datasets + +MMDetection provides hundreds of existing and existing detection models in [Model Zoo](https://mmdetection.readthedocs.io/en/latest/model_zoo.html)), and supports multiple standard datasets, including Pascal VOC, COCO, CityScapes, LVIS, etc. This note will show how to perform common tasks on these existing models and standard datasets, including: + +- Use existing models to inference on given images. +- Test existing models on standard datasets. +- Train predefined models on standard datasets. + +## Inference with existing models + +By inference, we mean using trained models to detect objects on images. In MMDetection, a model is defined by a configuration file and existing model parameters are save in a checkpoint file. + +To start with, we recommend [Faster RCNN](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn) with this [configuration file](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py) and this [checkpoint file](https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth). It is recommended to download the checkpoint file to `checkpoints` directory. + +### High-level APIs for inference + +MMDetection provide high-level Python APIs for inference on images. Here is an example of building the model and inference on given images or videos. + +```python +from mmdet.apis import init_detector, inference_detector +import mmcv + +# Specify the path to model config and checkpoint file +config_file = 'configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +checkpoint_file = 'checkpoints/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth' + +# build the model from a config file and a checkpoint file +model = init_detector(config_file, checkpoint_file, device='cuda:0') + +# test a single image and show the results +img = 'test.jpg' # or img = mmcv.imread(img), which will only load it once +result = inference_detector(model, img) +# visualize the results in a new window +model.show_result(img, result) +# or save the visualization results to image files +model.show_result(img, result, out_file='result.jpg') + +# test a video and show the results +video = mmcv.VideoReader('video.mp4') +for frame in video: + result = inference_detector(model, frame) + model.show_result(frame, result, wait_time=1) +``` + +A notebook demo can be found in [demo/inference_demo.ipynb](https://github.com/open-mmlab/mmdetection/blob/master/demo/inference_demo.ipynb). + +Note: `inference_detector` only supports single-image inference for now. + +### Asynchronous interface - supported for Python 3.7+ + +For Python 3.7+, MMDetection also supports async interfaces. +By utilizing CUDA streams, it allows not to block CPU on GPU bound inference code and enables better CPU/GPU utilization for single-threaded application. Inference can be done concurrently either between different input data samples or between different models of some inference pipeline. + +See `tests/async_benchmark.py` to compare the speed of synchronous and asynchronous interfaces. + +```python +import asyncio +import torch +from mmdet.apis import init_detector, async_inference_detector +from mmdet.utils.contextmanagers import concurrent + +async def main(): + config_file = 'configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' + checkpoint_file = 'checkpoints/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth' + device = 'cuda:0' + model = init_detector(config_file, checkpoint=checkpoint_file, device=device) + + # queue is used for concurrent inference of multiple images + streamqueue = asyncio.Queue() + # queue size defines concurrency level + streamqueue_size = 3 + + for _ in range(streamqueue_size): + streamqueue.put_nowait(torch.cuda.Stream(device=device)) + + # test a single image and show the results + img = 'test.jpg' # or img = mmcv.imread(img), which will only load it once + + async with concurrent(streamqueue): + result = await async_inference_detector(model, img) + + # visualize the results in a new window + model.show_result(img, result) + # or save the visualization results to image files + model.show_result(img, result, out_file='result.jpg') + + +asyncio.run(main()) + +``` + +### Demos + +We also provide three demo scripts, implemented with high-level APIs and supporting functionality codes. +Source codes are available [here](https://github.com/open-mmlab/mmdetection/tree/master/demo). + +#### Image demo + +This script performs inference on a single image. + +```shell +python demo/image_demo.py \ + ${IMAGE_FILE} \ + ${CONFIG_FILE} \ + ${CHECKPOINT_FILE} \ + [--device ${GPU_ID}] \ + [--score-thr ${SCORE_THR}] +``` + +Examples: + +```shell +python demo/image_demo.py demo/demo.jpg \ + configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py \ + checkpoints/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth \ + --device cpu +``` + +#### Webcam demo + +This is a live demo from a webcam. + +```shell +python demo/webcam_demo.py \ + ${CONFIG_FILE} \ + ${CHECKPOINT_FILE} \ + [--device ${GPU_ID}] \ + [--camera-id ${CAMERA-ID}] \ + [--score-thr ${SCORE_THR}] +``` + +Examples: + +```shell +python demo/webcam_demo.py \ + configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py \ + checkpoints/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth +``` + +#### Video demo + +This script performs inference on a video. + +```shell +python demo/video_demo.py \ + ${VIDEO_FILE} \ + ${CONFIG_FILE} \ + ${CHECKPOINT_FILE} \ + [--device ${GPU_ID}] \ + [--score-thr ${SCORE_THR}] \ + [--out ${OUT_FILE}] \ + [--show] \ + [--wait-time ${WAIT_TIME}] +``` + +Examples: + +```shell +python demo/video_demo.py demo/demo.mp4 \ + configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py \ + checkpoints/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth \ + --out result.mp4 +``` + +## Test existing models on standard datasets + +To evaluate a model's accuracy, one usually tests the model on some standard datasets. +MMDetection supports multiple public datasets including COCO, Pascal VOC, CityScapes, and [more](https://github.com/open-mmlab/mmdetection/tree/master/configs/_base_/datasets). +This section will show how to test existing models on supported datasets. + +### Prepare datasets + +Public datasets like [Pascal VOC](http://host.robots.ox.ac.uk/pascal/VOC/index.html) or mirror and [COCO](https://cocodataset.org/#download) are available from official websites or mirrors. Note: In the detection task, Pascal VOC 2012 is an extension of Pascal VOC 2007 without overlap, and we usually use them together. +It is recommended to download and extract the dataset somewhere outside the project directory and symlink the dataset root to `$MMDETECTION/data` as below. +If your folder structure is different, you may need to change the corresponding paths in config files. + +```plain +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── coco +│ │ ├── annotations +│ │ ├── train2017 +│ │ ├── val2017 +│ │ ├── test2017 +│ ├── cityscapes +│ │ ├── annotations +│ │ ├── leftImg8bit +│ │ │ ├── train +│ │ │ ├── val +│ │ ├── gtFine +│ │ │ ├── train +│ │ │ ├── val +│ ├── VOCdevkit +│ │ ├── VOC2007 +│ │ ├── VOC2012 +``` + +Some models require additional [COCO-stuff](http://calvin.inf.ed.ac.uk/wp-content/uploads/data/cocostuffdataset/stuffthingmaps_trainval2017.zip) datasets, such as HTC, DetectoRS and SCNet, you can download and unzip then move to the coco folder. The directory should be like this. + +```plain +mmdetection +├── data +│ ├── coco +│ │ ├── annotations +│ │ ├── train2017 +│ │ ├── val2017 +│ │ ├── test2017 +│ │ ├── stuffthingmaps +``` + +The [cityscapes](https://www.cityscapes-dataset.com/) annotations need to be converted into the coco format using `tools/dataset_converters/cityscapes.py`: + +```shell +pip install cityscapesscripts + +python tools/dataset_converters/cityscapes.py \ + ./data/cityscapes \ + --nproc 8 \ + --out-dir ./data/cityscapes/annotations +``` + +TODO: CHANGE TO THE NEW PATH + +### Test existing models + +We provide testing scripts for evaluating an existing model on the whole dataset (COCO, PASCAL VOC, Cityscapes, etc.). +The following testing environments are supported: + +- single GPU +- single node multiple GPUs +- multiple nodes + +Choose the proper script to perform testing depending on the testing environment. + +```shell +# single-gpu testing +python tools/test.py \ + ${CONFIG_FILE} \ + ${CHECKPOINT_FILE} \ + [--out ${RESULT_FILE}] \ + [--eval ${EVAL_METRICS}] \ + [--show] + +# multi-gpu testing +bash tools/dist_test.sh \ + ${CONFIG_FILE} \ + ${CHECKPOINT_FILE} \ + ${GPU_NUM} \ + [--out ${RESULT_FILE}] \ + [--eval ${EVAL_METRICS}] +``` + +`tools/dist_test.sh` also supports multi-node testing, but relies on PyTorch's [launch utility](https://pytorch.org/docs/stable/distributed.html#launch-utility). + +Optional arguments: + +- `RESULT_FILE`: Filename of the output results in pickle format. If not specified, the results will not be saved to a file. +- `EVAL_METRICS`: Items to be evaluated on the results. Allowed values depend on the dataset, e.g., `proposal_fast`, `proposal`, `bbox`, `segm` are available for COCO, `mAP`, `recall` for PASCAL VOC. Cityscapes could be evaluated by `cityscapes` as well as all COCO metrics. +- `--show`: If specified, detection results will be plotted on the images and shown in a new window. It is only applicable to single GPU testing and used for debugging and visualization. Please make sure that GUI is available in your environment. Otherwise, you may encounter an error like `cannot connect to X server`. +- `--show-dir`: If specified, detection results will be plotted on the images and saved to the specified directory. It is only applicable to single GPU testing and used for debugging and visualization. You do NOT need a GUI available in your environment for using this option. +- `--show-score-thr`: If specified, detections with scores below this threshold will be removed. +- `--cfg-options`: if specified, the key-value pair optional cfg will be merged into config file +- `--eval-options`: if specified, the key-value pair optional eval cfg will be kwargs for dataset.evaluate() function, it's only for evaluation + +### Examples + +Assume that you have already downloaded the checkpoints to the directory `checkpoints/`. + +1. Test Faster R-CNN and visualize the results. Press any key for the next image. + Config and checkpoint files are available [here](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn). + + ```shell + python tools/test.py \ + configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py \ + checkpoints/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth \ + --show + ``` + +2. Test Faster R-CNN and save the painted images for future visualization. + Config and checkpoint files are available [here](https://github.com/open-mmlab/mmdetection/tree/master/configs/faster_rcnn). + + ```shell + python tools/test.py \ + configs/faster_rcnn/faster_rcnn_r50_fpn_1x.py \ + checkpoints/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth \ + --show-dir faster_rcnn_r50_fpn_1x_results + ``` + +3. Test Faster R-CNN on PASCAL VOC (without saving the test results) and evaluate the mAP. + Config and checkpoint files are available [here](https://github.com/open-mmlab/mmdetection/tree/master/configs/pascal_voc). + + ```shell + python tools/test.py \ + configs/pascal_voc/faster_rcnn_r50_fpn_1x_voc.py \ + checkpoints/faster_rcnn_r50_fpn_1x_voc0712_20200624-c9895d40.pth \ + --eval mAP + ``` + +4. Test Mask R-CNN with 8 GPUs, and evaluate the bbox and mask AP. + Config and checkpoint files are available [here](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn). + + ```shell + ./tools/dist_test.sh \ + configs/mask_rcnn_r50_fpn_1x_coco.py \ + checkpoints/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth \ + 8 \ + --out results.pkl \ + --eval bbox segm + ``` + +5. Test Mask R-CNN with 8 GPUs, and evaluate the **classwise** bbox and mask AP. + Config and checkpoint files are available [here](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn). + + ```shell + ./tools/dist_test.sh \ + configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py \ + checkpoints/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth \ + 8 \ + --out results.pkl \ + --eval bbox segm \ + --options "classwise=True" + ``` + +6. Test Mask R-CNN on COCO test-dev with 8 GPUs, and generate JSON files for submitting to the official evaluation server. + Config and checkpoint files are available [here](https://github.com/open-mmlab/mmdetection/tree/master/configs/mask_rcnn). + + ```shell + ./tools/dist_test.sh \ + configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py \ + checkpoints/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth \ + 8 \ + --format-only \ + --options "jsonfile_prefix=./mask_rcnn_test-dev_results" + ``` + + This command generates two JSON files `mask_rcnn_test-dev_results.bbox.json` and `mask_rcnn_test-dev_results.segm.json`. + +7. Test Mask R-CNN on Cityscapes test with 8 GPUs, and generate txt and png files for submitting to the official evaluation server. + Config and checkpoint files are available [here](https://github.com/open-mmlab/mmdetection/tree/master/configs/cityscapes). + + ```shell + ./tools/dist_test.sh \ + configs/cityscapes/mask_rcnn_r50_fpn_1x_cityscapes.py \ + checkpoints/mask_rcnn_r50_fpn_1x_cityscapes_20200227-afe51d5a.pth \ + 8 \ + --format-only \ + --options "txtfile_prefix=./mask_rcnn_cityscapes_test_results" + ``` + + The generated png and txt would be under `./mask_rcnn_cityscapes_test_results` directory. + +### Test without Ground Truth Annotations + +MMDetection supports to test models without ground-truth annotations using `CocoDataset`. If your dataset format is not in COCO format, please convert them to COCO format. For example, if your dataset format is VOC, you can directly convert it to COCO format by the [script in tools.](https://github.com/open-mmlab/mmdetection/tree/master/tools/dataset_converters/pascal_voc.py) + +```shell +# single-gpu testing +python tools/test.py \ + ${CONFIG_FILE} \ + ${CHECKPOINT_FILE} \ + --format-only \ + --options ${JSONFILE_PREFIX} \ + [--show] + +# multi-gpu testing +bash tools/dist_test.sh \ + ${CONFIG_FILE} \ + ${CHECKPOINT_FILE} \ + ${GPU_NUM} \ + --format-only \ + --options ${JSONFILE_PREFIX} \ + [--show] +``` + +Assuming that the checkpoints in the [model zoo](https://mmdetection.readthedocs.io/en/latest/modelzoo_statistics.html) have been downloaded to the directory `checkpoints/`, we can test Mask R-CNN on COCO test-dev with 8 GPUs, and generate JSON files using the following command. + +```sh +./tools/dist_test.sh \ + configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py \ + checkpoints/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth \ + 8 \ + -format-only \ + --options "jsonfile_prefix=./mask_rcnn_test-dev_results" +``` + +This command generates two JSON files `mask_rcnn_test-dev_results.bbox.json` and `mask_rcnn_test-dev_results.segm.json`. + +### Batch Inference + +MMDetection supports inference with a single image or batched images in test mode. By default, we use single-image inference and you can use batch inference by modifying `samples_per_gpu` in the config of test data. You can do that either by modifying the config as below. + +```shell +data = dict(train=dict(...), val=dict(...), test=dict(samples_per_gpu=2, ...)) +``` + +Or you can set it through `--cfg-options` as `--cfg-options data.test.samples_per_gpu=2` + +### Deprecated ImageToTensor + +In test mode, `ImageToTensor` pipeline is deprecated, it's replaced by `DefaultFormatBundle` that recommended to manually replace it in the test data pipeline in your config file. examples: + +```python +# use ImageToTensor (deprecated) +pipelines = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', mean=[0, 0, 0], std=[1, 1, 1]), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) + ] + +# manually replace ImageToTensor to DefaultFormatBundle (recommended) +pipelines = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', mean=[0, 0, 0], std=[1, 1, 1]), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img']), + ]) + ] +``` + +## Train predefined models on standard datasets + +MMDetection also provides out-of-the-box tools for training detection models. +This section will show how to train _predefined_ models (under [configs](https://github.com/open-mmlab/mmdetection/tree/master/configs)) on standard datasets i.e. COCO. + +**Important**: The default learning rate in config files is for 8 GPUs and 2 img/gpu (batch size = 8\*2 = 16). +According to the [linear scaling rule](https://arxiv.org/abs/1706.02677), you need to set the learning rate proportional to the batch size if you use different GPUs or images per GPU, e.g., `lr=0.01` for 4 GPUs \* 2 imgs/gpu and `lr=0.08` for 16 GPUs \* 4 imgs/gpu. + +### Prepare datasets + +Training requires preparing datasets too. See section [Prepare datasets](#prepare-datasets) above for details. + +**Note**: +Currently, the config files under `configs/cityscapes` use COCO pretrained weights to initialize. +You could download the existing models in advance if the network connection is unavailable or slow. Otherwise, it would cause errors at the beginning of training. + +### Training on a single GPU + +We provide `tools/train.py` to launch training jobs on a single GPU. +The basic usage is as follows. + +```shell +python tools/train.py \ + ${CONFIG_FILE} \ + [optional arguments] +``` + +During training, log files and checkpoints will be saved to the working directory, which is specified by `work_dir` in the config file or via CLI argument `--work-dir`. + +By default, the model is evaluated on the validation set every epoch, the evaluation interval can be specified in the config file as shown below. + +```python +# evaluate the model every 12 epoch. +evaluation = dict(interval=12) +``` + +This tool accepts several optional arguments, including: + +- `--no-validate` (**not suggested**): Disable evaluation during training. +- `--work-dir ${WORK_DIR}`: Override the working directory. +- `--resume-from ${CHECKPOINT_FILE}`: Resume from a previous checkpoint file. +- `--options 'Key=value'`: Overrides other settings in the used config. + +**Note**: + +Difference between `resume-from` and `load-from`: + +`resume-from` loads both the model weights and optimizer status, and the epoch is also inherited from the specified checkpoint. It is usually used for resuming the training process that is interrupted accidentally. +`load-from` only loads the model weights and the training epoch starts from 0. It is usually used for finetuning. + +### Training on multiple GPUs + +We provide `tools/dist_train.sh` to launch training on multiple GPUs. +The basic usage is as follows. + +```shell +bash ./tools/dist_train.sh \ + ${CONFIG_FILE} \ + ${GPU_NUM} \ + [optional arguments] +``` + +Optional arguments remain the same as stated [above](#train-with-a-single-GPU). + +#### Launch multiple jobs simultaneously + +If you would like to launch multiple jobs on a single machine, e.g., 2 jobs of 4-GPU training on a machine with 8 GPUs, +you need to specify different ports (29500 by default) for each job to avoid communication conflict. + +If you use `dist_train.sh` to launch training jobs, you can set the port in commands. + +```shell +CUDA_VISIBLE_DEVICES=0,1,2,3 PORT=29500 ./tools/dist_train.sh ${CONFIG_FILE} 4 +CUDA_VISIBLE_DEVICES=4,5,6,7 PORT=29501 ./tools/dist_train.sh ${CONFIG_FILE} 4 +``` + +### Training on multiple nodes + +MMDetection relies on `torch.distributed` package for distributed training. +Thus, as a basic usage, one can launch distributed training via PyTorch's [launch utility](https://pytorch.org/docs/stable/distributed.html#launch-utility). + +### Manage jobs with Slurm + +[Slurm](https://slurm.schedmd.com/) is a good job scheduling system for computing clusters. +On a cluster managed by Slurm, you can use `slurm_train.sh` to spawn training jobs. It supports both single-node and multi-node training. + +The basic usage is as follows. + +```shell +[GPUS=${GPUS}] ./tools/slurm_train.sh ${PARTITION} ${JOB_NAME} ${CONFIG_FILE} ${WORK_DIR} +``` + +Below is an example of using 16 GPUs to train Mask R-CNN on a Slurm partition named _dev_, and set the work-dir to some shared file systems. + +```shell +GPUS=16 ./tools/slurm_train.sh dev mask_r50_1x configs/mask_rcnn_r50_fpn_1x_coco.py /nfs/xxxx/mask_rcnn_r50_fpn_1x +``` + +You can check [the source code](https://github.com/open-mmlab/mmdetection/blob/master/tools/slurm_train.sh) to review full arguments and environment variables. + +When using Slurm, the port option need to be set in one of the following ways: + +1. Set the port through `--options`. This is more recommended since it does not change the original configs. + + ```shell + CUDA_VISIBLE_DEVICES=0,1,2,3 GPUS=4 ./tools/slurm_train.sh ${PARTITION} ${JOB_NAME} config1.py ${WORK_DIR} --options 'dist_params.port=29500' + CUDA_VISIBLE_DEVICES=4,5,6,7 GPUS=4 ./tools/slurm_train.sh ${PARTITION} ${JOB_NAME} config2.py ${WORK_DIR} --options 'dist_params.port=29501' + ``` + +2. Modify the config files to set different communication ports. + + In `config1.py`, set + + ```python + dist_params = dict(backend='nccl', port=29500) + ``` + + In `config2.py`, set + + ```python + dist_params = dict(backend='nccl', port=29501) + ``` + + Then you can launch two jobs with `config1.py` and `config2.py`. + + ```shell + CUDA_VISIBLE_DEVICES=0,1,2,3 GPUS=4 ./tools/slurm_train.sh ${PARTITION} ${JOB_NAME} config1.py ${WORK_DIR} + CUDA_VISIBLE_DEVICES=4,5,6,7 GPUS=4 ./tools/slurm_train.sh ${PARTITION} ${JOB_NAME} config2.py ${WORK_DIR} + ``` diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/2_new_data_model.md b/detection_cbnet/docker-build-context/cbnetv2/docs/2_new_data_model.md new file mode 100644 index 0000000000000000000000000000000000000000..a9736e7ebed3746a79035a3f654342e0a7dc4a4c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/2_new_data_model.md @@ -0,0 +1,263 @@ +# 2: Train with customized datasets + +In this note, you will know how to inference, test, and train predefined models with customized datasets. We use the [balloon dataset](https://github.com/matterport/Mask_RCNN/tree/master/samples/balloon) as an example to describe the whole process. + +The basic steps are as below: + +1. Prepare the customized dataset +2. Prepare a config +3. Train, test, inference models on the customized dataset. + +## Prepare the customized dataset + +There are three ways to support a new dataset in MMDetection: + +1. reorganize the dataset into COCO format. +2. reorganize the dataset into a middle format. +3. implement a new dataset. + +Usually we recommend to use the first two methods which are usually easier than the third. + +In this note, we give an example for converting the data into COCO format. + +**Note**: MMDetection only supports evaluating mask AP of dataset in COCO format for now. +So for instance segmentation task users should convert the data into coco format. + +### COCO annotation format + +The necessary keys of COCO format for instance segmentation is as below, for the complete details, please refer [here](https://cocodataset.org/#format-data). + +```json +{ + "images": [image], + "annotations": [annotation], + "categories": [category] +} + + +image = { + "id": int, + "width": int, + "height": int, + "file_name": str, +} + +annotation = { + "id": int, + "image_id": int, + "category_id": int, + "segmentation": RLE or [polygon], + "area": float, + "bbox": [x,y,width,height], + "iscrowd": 0 or 1, +} + +categories = [{ + "id": int, + "name": str, + "supercategory": str, +}] +``` + +Assume we use the balloon dataset. +After downloading the data, we need to implement a function to convert the annotation format into the COCO format. Then we can use implemented COCODataset to load the data and perform training and evaluation. + +If you take a look at the dataset, you will find the dataset format is as below: + +```json +{'base64_img_data': '', + 'file_attributes': {}, + 'filename': '34020010494_e5cb88e1c4_k.jpg', + 'fileref': '', + 'regions': {'0': {'region_attributes': {}, + 'shape_attributes': {'all_points_x': [1020, + 1000, + 994, + 1003, + 1023, + 1050, + 1089, + 1134, + 1190, + 1265, + 1321, + 1361, + 1403, + 1428, + 1442, + 1445, + 1441, + 1427, + 1400, + 1361, + 1316, + 1269, + 1228, + 1198, + 1207, + 1210, + 1190, + 1177, + 1172, + 1174, + 1170, + 1153, + 1127, + 1104, + 1061, + 1032, + 1020], + 'all_points_y': [963, + 899, + 841, + 787, + 738, + 700, + 663, + 638, + 621, + 619, + 643, + 672, + 720, + 765, + 800, + 860, + 896, + 942, + 990, + 1035, + 1079, + 1112, + 1129, + 1134, + 1144, + 1153, + 1166, + 1166, + 1150, + 1136, + 1129, + 1122, + 1112, + 1084, + 1037, + 989, + 963], + 'name': 'polygon'}}}, + 'size': 1115004} +``` + +The annotation is a JSON file where each key indicates an image's all annotations. +The code to convert the balloon dataset into coco format is as below. + +```python +import os.path as osp + +def convert_balloon_to_coco(ann_file, out_file, image_prefix): + data_infos = mmcv.load(ann_file) + + annotations = [] + images = [] + obj_count = 0 + for idx, v in enumerate(mmcv.track_iter_progress(data_infos.values())): + filename = v['filename'] + img_path = osp.join(image_prefix, filename) + height, width = mmcv.imread(img_path).shape[:2] + + images.append(dict( + id=idx, + file_name=filename, + height=height, + width=width)) + + bboxes = [] + labels = [] + masks = [] + for _, obj in v['regions'].items(): + assert not obj['region_attributes'] + obj = obj['shape_attributes'] + px = obj['all_points_x'] + py = obj['all_points_y'] + poly = [(x + 0.5, y + 0.5) for x, y in zip(px, py)] + poly = [p for x in poly for p in x] + + x_min, y_min, x_max, y_max = ( + min(px), min(py), max(px), max(py)) + + + data_anno = dict( + image_id=idx, + id=obj_count, + category_id=0, + bbox=[x_min, y_min, x_max - x_min, y_max - y_min], + area=(x_max - x_min) * (y_max - y_min), + segmentation=[poly], + iscrowd=0) + annotations.append(data_anno) + obj_count += 1 + + coco_format_json = dict( + images=images, + annotations=annotations, + categories=[{'id':0, 'name': 'balloon'}]) + mmcv.dump(coco_format_json, out_file) + +``` + +Using the function above, users can successfully convert the annotation file into json format, then we can use `CocoDataset` to train and evaluate the model. + +## Prepare a config + +The second step is to prepare a config thus the dataset could be successfully loaded. Assume that we want to use Mask R-CNN with FPN, the config to train the detector on balloon dataset is as below. Assume the config is under directory `configs/balloon/` and named as `mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_balloon.py`, the config is as below. + +```python +# The new config inherits a base config to highlight the necessary modification +_base_ = 'mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py' + +# We also need to change the num_classes in head to match the dataset's annotation +model = dict( + roi_head=dict( + bbox_head=dict(num_classes=1), + mask_head=dict(num_classes=1))) + +# Modify dataset related settings +dataset_type = 'COCODataset' +classes = ('balloon',) +data = dict( + train=dict( + img_prefix='balloon/train/', + classes=classes, + ann_file='balloon/train/annotation_coco.json'), + val=dict( + img_prefix='balloon/val/', + classes=classes, + ann_file='balloon/val/annotation_coco.json'), + test=dict( + img_prefix='balloon/val/', + classes=classes, + ann_file='balloon/val/annotation_coco.json')) + +# We can use the pre-trained Mask RCNN model to obtain higher performance +load_from = 'checkpoints/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth' +``` + +## Train a new model + +To train a model with the new config, you can simply run + +```shell +python tools/train.py configs/balloon/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_balloon.py +``` + +For more detailed usages, please refer to the [Case 1](1_exist_data_model.md). + +## Test and inference + +To test the trained model, you can simply run + +```shell +python tools/test.py configs/balloon/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_balloon.py work_dirs/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_balloon.py/latest.pth --eval bbox segm +``` + +For more detailed usages, please refer to the [Case 1](1_exist_data_model.md). diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/3_exist_data_new_model.md b/detection_cbnet/docker-build-context/cbnetv2/docs/3_exist_data_new_model.md new file mode 100644 index 0000000000000000000000000000000000000000..2ea0e812c32ba1ae1f6bde38c46dc7c1585d95f5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/3_exist_data_new_model.md @@ -0,0 +1,275 @@ +# 3: Train with customized models and standard datasets + +In this note, you will know how to train, test and inference your own customized models under standard datasets. We use the cityscapes dataset to train a customized Cascade Mask R-CNN R50 model as an example to demonstrate the whole process, which using [`AugFPN`](https://github.com/Gus-Guo/AugFPN) to replace the defalut `FPN` as neck, and add `Rotate` or `Translate` as training-time auto augmentation. + +The basic steps are as below: + +1. Prepare the standard dataset +2. Prepare your own customized model +3. Prepare a config +4. Train, test, and inference models on the standard dataset. + +## Prepare the standard dataset + +In this note, as we use the standard cityscapes dataset as an example. + +It is recommended to symlink the dataset root to `$MMDETECTION/data`. +If your folder structure is different, you may need to change the corresponding paths in config files. + +```none +mmdetection +├── mmdet +├── tools +├── configs +├── data +│ ├── coco +│ │ ├── annotations +│ │ ├── train2017 +│ │ ├── val2017 +│ │ ├── test2017 +│ ├── cityscapes +│ │ ├── annotations +│ │ ├── leftImg8bit +│ │ │ ├── train +│ │ │ ├── val +│ │ ├── gtFine +│ │ │ ├── train +│ │ │ ├── val +│ ├── VOCdevkit +│ │ ├── VOC2007 +│ │ ├── VOC2012 + +``` + +The cityscapes annotations have to be converted into the coco format using `tools/dataset_converters/cityscapes.py`: + +```shell +pip install cityscapesscripts +python tools/dataset_converters/cityscapes.py ./data/cityscapes --nproc 8 --out-dir ./data/cityscapes/annotations +``` + +Currently the config files in `cityscapes` use COCO pre-trained weights to initialize. +You could download the pre-trained models in advance if network is unavailable or slow, otherwise it would cause errors at the beginning of training. + +## Prepare your own customized model + +The second step is to use your own module or training setting. Assume that we want to implement a new neck called `AugFPN` to replace with the default `FPN` under the existing detector Cascade Mask R-CNN R50. The following implements`AugFPN` under MMDetection. + +### 1. Define a new neck (e.g. AugFPN) + +Firstly create a new file `mmdet/models/necks/augfpn.py`. + +```python +from ..builder import NECKS + +@NECKS.register_module() +class AugFPN(nn.Module): + + def __init__(self, + in_channels, + out_channels, + num_outs, + start_level=0, + end_level=-1, + add_extra_convs=False): + pass + + def forward(self, inputs): + # implementation is ignored + pass +``` + +### 2. Import the module + +You can either add the following line to `mmdet/models/necks/__init__.py`, + +```python +from .augfpn import AugFPN +``` + +or alternatively add + +```python +custom_imports = dict( + imports=['mmdet.models.necks.augfpn.py'], + allow_failed_imports=False) +``` + +to the config file and avoid modifying the original code. + +### 3. Modify the config file + +```python +neck=dict( + type='AugFPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5) +``` + +For more detailed usages about customize your own models (e.g. implement a new backbone, head, loss, etc) and runtime training settings (e.g. define a new optimizer, use gradient clip, customize training schedules and hooks, etc), please refer to the guideline [Customize Models](tutorials/customize_models.md) and [Customize Runtime Settings](tutorials/customize_runtime.md) respectively. + +## Prepare a config + +The third step is to prepare a config for your own training setting. Assume that we want to add `AugFPN` and `Rotate` or `Translate` augmentation to existing Cascade Mask R-CNN R50 to train the cityscapes dataset, and assume the config is under directory `configs/cityscapes/` and named as `cascade_mask_rcnn_r50_augfpn_autoaug_10e_cityscapes.py`, the config is as below. + +```python +# The new config inherits the base configs to highlight the necessary modification +_base_ = [ + '../_base_/models/cascade_mask_rcnn_r50_fpn.py', + '../_base_/datasets/cityscapes_instance.py', '../_base_/default_runtime.py' +] + +model = dict( + # set None to avoid loading ImageNet pretrained backbone, + # instead here we set `load_from` to load from COCO pretrained detectors. + pretrained=None, + # replace neck from defaultly `FPN` to our new implemented module `AugFPN` + neck=dict( + type='AugFPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5), + # We also need to change the num_classes in head from 80 to 8, to match the + # cityscapes dataset's annotation. This modification involves `bbox_head` and `mask_head`. + roi_head=dict( + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + # change the number of classes from defaultly COCO to cityscapes + num_classes=8, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + # change the number of classes from defaultly COCO to cityscapes + num_classes=8, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.05, 0.05, 0.1, 0.1]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0)), + dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + # change the number of classes from defaultly COCO to cityscapes + num_classes=8, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.033, 0.033, 0.067, 0.067]), + reg_class_agnostic=True, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)) + ], + mask_head=dict( + type='FCNMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + # change the number of classes from defaultly COCO to cityscapes + num_classes=8, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)))) + +# over-write `train_pipeline` for new added `AutoAugment` training setting +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='AutoAugment', + policies=[ + [dict( + type='Rotate', + level=5, + img_fill_val=(124, 116, 104), + prob=0.5, + scale=1) + ], + [dict(type='Rotate', level=7, img_fill_val=(124, 116, 104)), + dict( + type='Translate', + level=5, + prob=0.5, + img_fill_val=(124, 116, 104)) + ], + ]), + dict( + type='Resize', img_scale=[(2048, 800), (2048, 1024)], keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] + +# set batch_size per gpu, and set new training pipeline +data = dict( + samples_per_gpu=1, + workers_per_gpu=3, + # over-write `pipeline` with new training pipeline setting + train=dict(dataset=dict(pipeline=train_pipeline))) + +# Set optimizer +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# Set customized learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[8]) +total_epochs = 10 + +# We can use the COCO pretrained Cascade Mask R-CNN R50 model for more stable performance initialization +load_from = 'https://download.openmmlab.com/mmdetection/v2.0/cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco/cascade_mask_rcnn_r50_fpn_1x_coco_20200203-9d4dcb24.pth' +``` + +## Train a new model + +To train a model with the new config, you can simply run + +```shell +python tools/train.py configs/cityscapes/cascade_mask_rcnn_r50_augfpn_autoaug_10e_cityscapes.py +``` + +For more detailed usages, please refer to the [Case 1](1_exist_data_model.md). + +## Test and inference + +To test the trained model, you can simply run + +```shell +python tools/test.py configs/cityscapes/cascade_mask_rcnn_r50_augfpn_autoaug_10e_cityscapes.py work_dirs/cascade_mask_rcnn_r50_augfpn_autoaug_10e_cityscapes.py/latest.pth --eval bbox segm +``` + +For more detailed usages, please refer to the [Case 1](1_exist_data_model.md). diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/Makefile b/detection_cbnet/docker-build-context/cbnetv2/docs/Makefile new file mode 100644 index 0000000000000000000000000000000000000000..d4bb2cbb9eddb1bb1b4f366623044af8e4830919 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/api.rst b/detection_cbnet/docker-build-context/cbnetv2/docs/api.rst new file mode 100644 index 0000000000000000000000000000000000000000..04406303ebafd54920490647714c446ede53b833 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/api.rst @@ -0,0 +1,101 @@ +API Reference +================= + +mmdet.apis +-------------- +.. automodule:: mmdet.apis + :members: + +mmdet.core +-------------- + +anchor +^^^^^^^^^^ +.. automodule:: mmdet.core.anchor + :members: + +bbox +^^^^^^^^^^ +.. automodule:: mmdet.core.bbox + :members: + +export +^^^^^^^^^^ +.. automodule:: mmdet.core.export + :members: + +mask +^^^^^^^^^^ +.. automodule:: mmdet.core.mask + :members: + +evaluation +^^^^^^^^^^ +.. automodule:: mmdet.core.evaluation + :members: + +post_processing +^^^^^^^^^^^^^^^ +.. automodule:: mmdet.core.post_processing + :members: + +optimizer +^^^^^^^^^^ +.. automodule:: mmdet.core.optimizer + :members: + +utils +^^^^^^^^^^ +.. automodule:: mmdet.core.utils + :members: + +mmdet.datasets +-------------- + +datasets +^^^^^^^^^^ +.. automodule:: mmdet.datasets + :members: + +pipelines +^^^^^^^^^^ +.. automodule:: mmdet.datasets.pipelines + :members: + +mmdet.models +-------------- + +detectors +^^^^^^^^^^ +.. automodule:: mmdet.models.detectors + :members: + +backbones +^^^^^^^^^^ +.. automodule:: mmdet.models.backbones + :members: + +necks +^^^^^^^^^^^^ +.. automodule:: mmdet.models.necks + :members: + +dense_heads +^^^^^^^^^^^^ +.. automodule:: mmdet.models.dense_heads + :members: + +roi_heads +^^^^^^^^^^ +.. automodule:: mmdet.models.roi_heads + :members: + +losses +^^^^^^^^^^ +.. automodule:: mmdet.models.losses + :members: + +utils +^^^^^^^^^^ +.. automodule:: mmdet.models.utils + :members: diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/changelog.md b/detection_cbnet/docker-build-context/cbnetv2/docs/changelog.md new file mode 100644 index 0000000000000000000000000000000000000000..f3a1e7a6174abca844d09edba3380687e4b94a49 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/changelog.md @@ -0,0 +1,927 @@ +## Changelog + +### v2.14.0 (29/6/2021) + +#### Highlights + +- Add `simple_test` to dense heads to improve the consistency of single-stage and two-stage detectors +- Revert the `test_mixins` to single image test to improve efficiency and readability +- Add Faster R-CNN and Mask R-CNN config using multi-scale training with 3x schedule + +#### New Features + +- Support pretrained models from MoCo v2 and SwAV (#5286) +- Add Faster R-CNN and Mask R-CNN config using multi-scale training with 3x schedule (#5179, #5233) +- Add `reduction_override` in MSELoss (#5437) +- Stable support of exporting DETR to ONNX with dynamic shapes and batch inference (#5168) +- Stable support of exporting PointRend to ONNX with dynamic shapes and batch inference (#5440) + +#### Bug Fixes + +- Fix size mismatch bug in `multiclass_nms` (#4980) +- Fix the import path of `MultiScaleDeformableAttention` (#5338) +- Fix errors in config of GCNet ResNext101 models (#5360) +- Fix Grid-RCNN error when there is no bbox result (#5357) +- Fix errors in `onnx_export` of bbox_head when setting reg_class_agnostic (#5468) +- Fix type error of AutoAssign in the document (#5478) +- Fix web links ending with `.md` (#5315) + +#### Improvements + +- Add `simple_test` to dense heads to improve the consistency of single-stage and two-stage detectors (#5264) +- Add support for mask diagonal flip in TTA (#5403) +- Revert the `test_mixins` to single image test to improve efficiency and readability (#5249) +- Make YOLOv3 Neck more flexible (#5218) +- Refactor SSD to make it more general (#5291) +- Refactor `anchor_generator` and `point_generator` (#5349) +- Allow to configure out the `mask_head` of the HTC algorithm (#5389) +- Delete deprecated warning in FPN (#5311) +- Move `model.pretrained` to `model.backbone.init_cfg` (#5370) +- Make deployment tools more friendly to use (#5280) +- Clarify installation documentation (#5316) +- Add ImageNet Pretrained Models docs (#5268) +- Add FAQ about training loss=nan solution and COCO AP or AR =-1 (# 5312, #5313) +- Change all weight links of http to https (#5328) + +### v2.13.0 (01/6/2021) + +#### Highlights + +- Support new methods: [CenterNet](https://arxiv.org/abs/1904.07850), [Seesaw Loss](https://arxiv.org/abs/2008.10032), [MobileNetV2](https://arxiv.org/abs/1801.04381) + +#### New Features + +- Support paper [Objects as Points](https://arxiv.org/abs/1904.07850) (#4602) +- Support paper [Seesaw Loss for Long-Tailed Instance Segmentation (CVPR 2021)](https://arxiv.org/abs/2008.10032) (#5128) +- Support [MobileNetV2](https://arxiv.org/abs/1801.04381) backbone and inverted residual block (#5122) +- Support [MIM](https://github.com/open-mmlab/mim) (#5143) +- ONNX exportation with dynamic shapes of CornerNet (#5136) +- Add `mask_soft` config option to allow non-binary masks (#4615) +- Add PWC metafile (#5135) + +#### Bug Fixes + +- Fix YOLOv3 FP16 training error (#5172) +- Fix Cacscade R-CNN TTA test error when `det_bboxes` length is 0 (#5221) +- Fix `iou_thr` variable naming errors in VOC recall calculation function (#5195) +- Fix Faster R-CNN performance dropped in ONNX Runtime (#5197) +- Fix DETR dict changed error when using python 3.8 during iteration (#5226) + +#### Improvements + +- Refactor ONNX export of two stage detector (#5205) +- Replace MMDetection's EvalHook with MMCV's EvalHook for consistency (#4806) +- Update RoI extractor for ONNX (#5194) +- Use better parameter initialization in YOLOv3 head for higher performance (#5181) +- Release new DCN models of Mask R-CNN by mixed-precision training (#5201) +- Update YOLOv3 model weights (#5229) +- Add DetectoRS ResNet-101 model weights (#4960) +- Discard bboxes with sizes equals to `min_bbox_size` (#5011) +- Remove duplicated code in DETR head (#5129) +- Remove unnecessary object in class definition (#5180) +- Fix doc link (#5192) + +### v2.12.0 (01/5/2021) + +#### Highlights + +- Support new methods: [AutoAssign](https://arxiv.org/abs/2007.03496), [YOLOF](https://arxiv.org/abs/2103.09460), and [Deformable DETR](https://arxiv.org/abs/2010.04159) +- Stable support of exporting models to ONNX with batched images and dynamic shape (#5039) + +#### Backwards Incompatible Changes + +MMDetection is going through big refactoring for more general and convenient usages during the releases from v2.12.0 to v2.15.0 (maybe longer). +In v2.12.0 MMDetection inevitably brings some BC-breakings, including the MMCV dependency, model initialization, model registry, and mask AP evaluation. + +- MMCV version. MMDetection v2.12.0 relies on the newest features in MMCV 1.3.3, including `BaseModule` for unified parameter initialization, model registry, and the CUDA operator `MultiScaleDeformableAttn` for [Deformable DETR](https://arxiv.org/abs/2010.04159). Note that MMCV 1.3.2 already contains all the features used by MMDet but has known issues. Therefore, we recommend users skip MMCV v1.3.2 and use v1.3.3, though v1.3.2 might work for most cases. +- Unified model initialization (#4750). To unify the parameter initialization in OpenMMLab projects, MMCV supports `BaseModule` that accepts `init_cfg` to allow the modules' parameters initialized in a flexible and unified manner. Now the users need to explicitly call `model.init_weights()` in the training script to initialize the model (as in [here](https://github.com/open-mmlab/mmdetection/blob/master/tools/train.py#L162), previously this was handled by the detector. The models in MMDetection have been re-benchmarked to ensure accuracy based on PR #4750. **The downstream projects should update their code accordingly to use MMDetection v2.12.0**. +- Unified model registry (#5059). To easily use backbones implemented in other OpenMMLab projects, MMDetection migrates to inherit the model registry created in MMCV (#760). In this way, as long as the backbone is supported in an OpenMMLab project and that project also uses the registry in MMCV, users can use that backbone in MMDetection by simply modifying the config without copying the code of that backbone into MMDetection. +- Mask AP evaluation (#4898). Previous versions calculate the areas of masks through the bounding boxes when calculating the mask AP of small, medium, and large instances. To indeed use the areas of masks, we pop the key `bbox` during mask AP calculation. This change does not affect the overall mask AP evaluation and aligns the mask AP of similar models in other projects like Detectron2. + +#### New Features + +- Support paper [AutoAssign: Differentiable Label Assignment for Dense Object Detection](https://arxiv.org/abs/2007.03496) (#4295) +- Support paper [You Only Look One-level Feature](https://arxiv.org/abs/2103.09460) (#4295) +- Support paper [Deformable DETR: Deformable Transformers for End-to-End Object Detection](https://arxiv.org/abs/2010.04159) (#4778) +- Support calculating IoU with FP16 tensor in `bbox_overlaps` to save memory and keep speed (#4889) +- Add `__repr__` in custom dataset to count the number of instances (#4756) +- Add windows support by updating requirements.txt (#5052) +- Stable support of exporting models to ONNX with batched images and dynamic shape, including SSD, FSAF,FCOS, YOLOv3, RetinaNet, Faster R-CNN, and Mask R-CNN (#5039) + +#### Improvements + +- Use MMCV `MODEL_REGISTRY` (#5059) +- Unified parameter initialization for more flexible usage (#4750) +- Rename variable names and fix docstring in anchor head (#4883) +- Support training with empty GT in Cascade RPN (#4928) +- Add more details of usage of `test_robustness` in documentation (#4917) +- Changing to use `pycocotools` instead of `mmpycocotools` to fully support Detectron2 and MMDetection in one environment (#4939) +- Update torch serve dockerfile to support dockers of more versions (#4954) +- Add check for training with single class dataset (#4973) +- Refactor transformer and DETR Head (#4763) +- Update FPG model zoo (#5079) +- More accurate mask AP of small/medium/large instances (#4898) + +#### Bug Fixes + +- Fix bug in mean_ap.py when calculating mAP by 11 points (#4875) +- Fix error when key `meta` is not in old checkpoints (#4936) +- Fix hanging bug when training with empty GT in VFNet, GFL, and FCOS by changing the place of `reduce_mean` (#4923, #4978, #5058) +- Fix asyncronized inference error and provide related demo (#4941) +- Fix IoU losses dimensionality unmatch error (#4982) +- Fix torch.randperm whtn using PyTorch 1.8 (#5014) +- Fix empty bbox error in `mask_head` when using CARAFE (#5062) +- Fix `supplement_mask` bug when there are zero-size RoIs (#5065) +- Fix testing with empty rois in RoI Heads (#5081) + +### v2.11.0 (01/4/2021) + +**Highlights** + +- Support new method: [Localization Distillation for Object Detection](https://arxiv.org/pdf/2102.12252.pdf) +- Support Pytorch2ONNX with batch inference and dynamic shape + +**New Features** + +- Support [Localization Distillation for Object Detection](https://arxiv.org/pdf/2102.12252.pdf) (#4758) +- Support Pytorch2ONNX with batch inference and dynamic shape for Faster-RCNN and mainstream one-stage detectors (#4796) + +**Improvements** + +- Support batch inference in head of RetinaNet (#4699) +- Add batch dimension in second stage of Faster-RCNN (#4785) +- Support batch inference in bbox coder (#4721) +- Add check for `ann_ids` in `COCODataset` to ensure it is unique (#4789) +- support for showing the FPN results (#4716) +- support dynamic shape for grid_anchor (#4684) +- Move pycocotools version check to when it is used (#4880) + +**Bug Fixes** + +- Fix a bug of TridentNet when doing the batch inference (#4717) +- Fix a bug of Pytorch2ONNX in FASF (#4735) +- Fix a bug when show the image with float type (#4732) + +### v2.10.0 (01/03/2021) + +#### Highlights + +- Support new methods: [FPG](https://arxiv.org/abs/2004.03580) +- Support ONNX2TensorRT for SSD, FSAF, FCOS, YOLOv3, and Faster R-CNN. + +#### New Features + +- Support ONNX2TensorRT for SSD, FSAF, FCOS, YOLOv3, and Faster R-CNN (#4569) +- Support [Feature Pyramid Grids (FPG)](https://arxiv.org/abs/2004.03580) (#4645) +- Support video demo (#4420) +- Add seed option for sampler (#4665) +- Support to customize type of runner (#4570, #4669) +- Support synchronizing BN buffer in `EvalHook` (#4582) +- Add script for GIF demo (#4573) + +#### Bug Fixes + +- Fix ConfigDict AttributeError and add Colab link (#4643) +- Avoid crash in empty gt training of GFL head (#4631) +- Fix `iou_thrs` bug in RPN evaluation (#4581) +- Fix syntax error of config when upgrading model version (#4584) + +#### Improvements + +- Refactor unit test file structures (#4600) +- Refactor nms config (#4636) +- Get loading pipeline by checking the class directly rather than through config strings (#4619) +- Add doctests for mask target generation and mask structures (#4614) +- Use deep copy when copying pipeline arguments (#4621) +- Update documentations (#4642, #4650, #4620, #4630) +- Remove redundant code calling `import_modules_from_strings` (#4601) +- Clean deprecated FP16 API (#4571) +- Check whether `CLASSES` is correctly initialized in the intialization of `XMLDataset` (#4555) +- Support batch inference in the inference API (#4462, #4526) +- Clean deprecated warning and fix 'meta' error (#4695) + +### v2.9.0 (01/02/2021) + +#### Highlights + +- Support new methods: [SCNet](https://arxiv.org/abs/2012.10150), [Sparse R-CNN](https://arxiv.org/abs/2011.12450) +- Move `train_cfg` and `test_cfg` into model in configs +- Support to visualize results based on prediction quality + +#### New Features + +- Support [SCNet](https://arxiv.org/abs/2012.10150) (#4356) +- Support [Sparse R-CNN](https://arxiv.org/abs/2011.12450) (#4219) +- Support evaluate mAP by multiple IoUs (#4398) +- Support concatenate dataset for testing (#4452) +- Support to visualize results based on prediction quality (#4441) +- Add ONNX simplify option to Pytorch2ONNX script (#4468) +- Add hook for checking compatibility of class numbers in heads and datasets (#4508) + +#### Bug Fixes + +- Fix CPU inference bug of Cascade RPN (#4410) +- Fix NMS error of CornerNet when there is no prediction box (#4409) +- Fix TypeError in CornerNet inference (#4411) +- Fix bug of PAA when training with background images (#4391) +- Fix the error that the window data is not destroyed when `out_file is not None` and `show==False` (#4442) +- Fix order of NMS `score_factor` that will decrease the performance of YOLOv3 (#4473) +- Fix bug in HTC TTA when the number of detection boxes is 0 (#4516) +- Fix resize error in mask data structures (#4520) + +#### Improvements + +- Allow to customize classes in LVIS dataset (#4382) +- Add tutorials for building new models with existing datasets (#4396) +- Add CPU compatibility information in documentation (#4405) +- Add documentation of deprecated `ImageToTensor` for batch inference (#4408) +- Add more details in documentation for customizing dataset (#4430) +- Switch `imshow_det_bboxes` visualization backend from OpenCV to Matplotlib (#4389) +- Deprecate `ImageToTensor` in `image_demo.py` (#4400) +- Move train_cfg/test_cfg into model (#4347, #4489) +- Update docstring for `reg_decoded_bbox` option in bbox heads (#4467) +- Update dataset information in documentation (#4525) +- Release pre-trained R50 and R101 PAA detectors with multi-scale 3x training schedules (#4495) +- Add guidance for speed benchmark (#4537) + +### v2.8.0 (04/01/2021) + +#### Highlights + +- Support new methods: [Cascade RPN](https://arxiv.org/abs/1909.06720), [TridentNet](https://arxiv.org/abs/1901.01892) + +#### New Features + +- Support [Cascade RPN](https://arxiv.org/abs/1909.06720) (#1900) +- Support [TridentNet](https://arxiv.org/abs/1901.01892) (#3313) + +#### Bug Fixes + +- Fix bug of show result in async_benchmark (#4367) +- Fix scale factor in MaskTestMixin (#4366) +- Fix but when returning indices in `multiclass_nms` (#4362) +- Fix bug of empirical attention in resnext backbone error (#4300) +- Fix bug of `img_norm_cfg` in FCOS-HRNet models with updated performance and models (#4250) +- Fix invalid checkpoint and log in Mask R-CNN models on Cityscapes dataset (#4287) +- Fix bug in distributed sampler when dataset is too small (#4257) +- Fix bug of 'PAFPN has no attribute extra_convs_on_inputs' (#4235) + +#### Improvements + +- Update model url from aws to aliyun (#4349) +- Update ATSS for PyTorch 1.6+ (#4359) +- Update script to install ruby in pre-commit installation (#4360) +- Delete deprecated `mmdet.ops` (#4325) +- Refactor hungarian assigner for more general usage in Sparse R-CNN (#4259) +- Handle scipy import in DETR to reduce package dependencies (#4339) +- Update documentation of usages for config options after MMCV (1.2.3) supports overriding list in config (#4326) +- Update pre-train models of faster rcnn trained on COCO subsets (#4307) +- Avoid zero or too small value for beta in Dynamic R-CNN (#4303) +- Add doccumentation for Pytorch2ONNX (#4271) +- Add deprecated warning FPN arguments (#4264) +- Support returning indices of kept bboxes when using nms (#4251) +- Update type and device requirements when creating tensors `GFLHead` (#4210) +- Update device requirements when creating tensors in `CrossEntropyLoss` (#4224) + +### v2.7.0 (30/11/2020) + +- Support new method: [DETR](https://arxiv.org/abs/2005.12872), [ResNest](https://arxiv.org/abs/2004.08955), Faster R-CNN DC5. +- Support YOLO, Mask R-CNN, and Cascade R-CNN models exportable to ONNX. + +#### New Features + +- Support [DETR](https://arxiv.org/abs/2005.12872) (#4201, #4206) +- Support to link the best checkpoint in training (#3773) +- Support to override config through options in inference.py (#4175) +- Support YOLO, Mask R-CNN, and Cascade R-CNN models exportable to ONNX (#4087, #4083) +- Support [ResNeSt](https://arxiv.org/abs/2004.08955) backbone (#2959) +- Support unclip border bbox regression (#4076) +- Add tpfp func in evaluating AP (#4069) +- Support mixed precision training of SSD detector with other backbones (#4081) +- Add Faster R-CNN DC5 models (#4043) + +#### Bug Fixes + +- Fix bug of `gpu_id` in distributed training mode (#4163) +- Support Albumentations with version higher than 0.5 (#4032) +- Fix num_classes bug in faster rcnn config (#4088) +- Update code in docs/2_new_data_model.md (#4041) + +#### Improvements + +- Ensure DCN offset to have similar type as features in VFNet (#4198) +- Add config links in README files of models (#4190) +- Add tutorials for loss conventions (#3818) +- Add solution to installation issues in 30-series GPUs (#4176) +- Update docker version in get_started.md (#4145) +- Add model statistics and polish some titles in configs README (#4140) +- Clamp neg probability in FreeAnchor (#4082) +- Speed up expanding large images (#4089) +- Fix Pytorch 1.7 incompatibility issues (#4103) +- Update trouble shooting page to resolve segmentation fault (#4055) +- Update aLRP-Loss in project page (#4078) +- Clean duplicated `reduce_mean` function (#4056) +- Refactor Q&A (#4045) + +### v2.6.0 (1/11/2020) + +- Support new method: [VarifocalNet](https://arxiv.org/abs/2008.13367). +- Refactored documentation with more tutorials. + +#### New Features + +- Support GIoU calculation in `BboxOverlaps2D`, and re-implement `giou_loss` using `bbox_overlaps` (#3936) +- Support random sampling in CPU mode (#3948) +- Support VarifocalNet (#3666, #4024) + +#### Bug Fixes + +- Fix SABL validating bug in Cascade R-CNN (#3913) +- Avoid division by zero in PAA head when num_pos=0 (#3938) +- Fix temporary directory bug of multi-node testing error (#4034, #4017) +- Fix `--show-dir` option in test script (#4025) +- Fix GA-RetinaNet r50 model url (#3983) +- Update code in docs and fix broken urls (#3947) + +#### Improvements + +- Refactor pytorch2onnx API into `mmdet.core.export` and use `generate_inputs_and_wrap_model` for pytorch2onnx (#3857, #3912) +- Update RPN upgrade scripts for v2.5.0 compatibility (#3986) +- Use mmcv `tensor2imgs` (#4010) +- Update test robustness (#4000) +- Update trouble shooting page (#3994) +- Accelerate PAA training speed (#3985) +- Support batch_size > 1 in validation (#3966) +- Use RoIAlign implemented in MMCV for inference in CPU mode (#3930) +- Documentation refactoring (#4031) + +### v2.5.0 (5/10/2020) + +#### Highlights + +- Support new methods: [YOLACT](https://arxiv.org/abs/1904.02689), [CentripetalNet](https://arxiv.org/abs/2003.09119). +- Add more documentations for easier and more clear usage. + +#### Backwards Incompatible Changes + +**FP16 related methods are imported from mmcv instead of mmdet. (#3766, #3822)** +Mixed precision training utils in `mmdet.core.fp16` are moved to `mmcv.runner`, including `force_fp32`, `auto_fp16`, `wrap_fp16_model`, and `Fp16OptimizerHook`. A deprecation warning will be raised if users attempt to import those methods from `mmdet.core.fp16`, and will be finally removed in V2.10.0. + +**[0, N-1] represents foreground classes and N indicates background classes for all models. (#3221)** +Before v2.5.0, the background label for RPN is 0, and N for other heads. Now the behavior is consistent for all models. Thus `self.background_labels` in `dense_heads` is removed and all heads use `self.num_classes` to indicate the class index of background labels. +This change has no effect on the pre-trained models in the v2.x model zoo, but will affect the training of all models with RPN heads. Two-stage detectors whose RPN head uses softmax will be affected because the order of categories is changed. + +**Only call `get_subset_by_classes` when `test_mode=True` and `self.filter_empty_gt=True` (#3695)** +Function `get_subset_by_classes` in dataset is refactored and only filters out images when `test_mode=True` and `self.filter_empty_gt=True`. + In the original implementation, `get_subset_by_classes` is not related to the flag `self.filter_empty_gt` and will only be called when the classes is set during initialization no matter `test_mode` is `True` or `False`. This brings ambiguous behavior and potential bugs in many cases. After v2.5.0, if `filter_empty_gt=False`, no matter whether the classes are specified in a dataset, the dataset will use all the images in the annotations. If `filter_empty_gt=True` and `test_mode=True`, no matter whether the classes are specified, the dataset will call ``get_subset_by_classes` to check the images and filter out images containing no GT boxes. Therefore, the users should be responsible for the data filtering/cleaning process for the test dataset. + +#### New Features + +- Test time augmentation for single stage detectors (#3844, #3638) +- Support to show the name of experiments during training (#3764) +- Add `Shear`, `Rotate`, `Translate` Augmentation (#3656, #3619, #3687) +- Add image-only transformations including `Constrast`, `Equalize`, `Color`, and `Brightness`. (#3643) +- Support [YOLACT](https://arxiv.org/abs/1904.02689) (#3456) +- Support [CentripetalNet](https://arxiv.org/abs/2003.09119) (#3390) +- Support PyTorch 1.6 in docker (#3905) + +#### Bug Fixes + +- Fix the bug of training ATSS when there is no ground truth boxes (#3702) +- Fix the bug of using Focal Loss when there is `num_pos` is 0 (#3702) +- Fix the label index mapping in dataset browser (#3708) +- Fix Mask R-CNN training stuck problem when ther is no positive rois (#3713) +- Fix the bug of `self.rpn_head.test_cfg` in `RPNTestMixin` by using `self.rpn_head` in rpn head (#3808) +- Fix deprecated `Conv2d` from mmcv.ops (#3791) +- Fix device bug in RepPoints (#3836) +- Fix SABL validating bug (#3849) +- Use `https://download.openmmlab.com/mmcv/dist/index.html` for installing MMCV (#3840) +- Fix nonzero in NMS for PyTorch 1.6.0 (#3867) +- Fix the API change bug of PAA (#3883) +- Fix typo in bbox_flip (#3886) +- Fix cv2 import error of ligGL.so.1 in Dockerfile (#3891) + +#### Improvements + +- Change to use `mmcv.utils.collect_env` for collecting environment information to avoid duplicate codes (#3779) +- Update checkpoint file names to v2.0 models in documentation (#3795) +- Update tutorials for changing runtime settings (#3778), modifing loss (#3777) +- Improve the function of `simple_test_bboxes` in SABL (#3853) +- Convert mask to bool before using it as img's index for robustness and speedup (#3870) +- Improve documentation of modules and dataset customization (#3821) + +### v2.4.0 (5/9/2020) + +**Highlights** + +- Fix lots of issues/bugs and reorganize the trouble shooting page +- Support new methods [SABL](https://arxiv.org/abs/1912.04260), [YOLOv3](https://arxiv.org/abs/1804.02767), and [PAA Assign](https://arxiv.org/abs/2007.08103) +- Support Batch Inference +- Start to publish `mmdet` package to PyPI since v2.3.0 +- Switch model zoo to download.openmmlab.com + +**Backwards Incompatible Changes** + +- Support Batch Inference (#3564, #3686, #3705): Since v2.4.0, MMDetection could inference model with multiple images in a single GPU. + This change influences all the test APIs in MMDetection and downstream codebases. To help the users migrate their code, we use `replace_ImageToTensor` (#3686) to convert legacy test data pipelines during dataset initialization. +- Support RandomFlip with horizontal/vertical/diagonal direction (#3608): Since v2.4.0, MMDetection supports horizontal/vertical/diagonal flip in the data augmentation. This influences bounding box, mask, and image transformations in data augmentation process and the process that will map those data back to the original format. +- Migrate to use `mmlvis` and `mmpycocotools` for COCO and LVIS dataset (#3727). The APIs are fully compatible with the original `lvis` and `pycocotools`. Users need to uninstall the existing pycocotools and lvis packages in their environment first and install `mmlvis` & `mmpycocotools`. + +**Bug Fixes** + +- Fix default mean/std for onnx (#3491) +- Fix coco evaluation and add metric items (#3497) +- Fix typo for install.md (#3516) +- Fix atss when sampler per gpu is 1 (#3528) +- Fix import of fuse_conv_bn (#3529) +- Fix bug of gaussian_target, update unittest of heatmap (#3543) +- Fixed VOC2012 evaluate (#3553) +- Fix scale factor bug of rescale (#3566) +- Fix with_xxx_attributes in base detector (#3567) +- Fix boxes scaling when number is 0 (#3575) +- Fix rfp check when neck config is a list (#3591) +- Fix import of fuse conv bn in benchmark.py (#3606) +- Fix webcam demo (#3634) +- Fix typo and itemize issues in tutorial (#3658) +- Fix error in distributed training when some levels of FPN are not assigned with bounding boxes (#3670) +- Fix the width and height orders of stride in valid flag generation (#3685) +- Fix weight initialization bug in Res2Net DCN (#3714) +- Fix bug in OHEMSampler (#3677) + +**New Features** + +- Support Cutout augmentation (#3521) +- Support evaluation on multiple datasets through ConcatDataset (#3522) +- Support [PAA assign](https://arxiv.org/abs/2007.08103) #(3547) +- Support eval metric with pickle results (#3607) +- Support [YOLOv3](https://arxiv.org/abs/1804.02767) (#3083) +- Support [SABL](https://arxiv.org/abs/1912.04260) (#3603) +- Support to publish to Pypi in github-action (#3510) +- Support custom imports (#3641) + +**Improvements** + +- Refactor common issues in documentation (#3530) +- Add pytorch 1.6 to CI config (#3532) +- Add config to runner meta (#3534) +- Add eval-option flag for testing (#3537) +- Add init_eval to evaluation hook (#3550) +- Add include_bkg in ClassBalancedDataset (#3577) +- Using config's loading in inference_detector (#3611) +- Add ATSS ResNet-101 models in model zoo (#3639) +- Update urls to download.openmmlab.com (#3665) +- Support non-mask training for CocoDataset (#3711) + +### v2.3.0 (5/8/2020) + +**Highlights** + +- The CUDA/C++ operators have been moved to `mmcv.ops`. For backward compatibility `mmdet.ops` is kept as warppers of `mmcv.ops`. +- Support new methods [CornerNet](https://arxiv.org/abs/1808.01244), [DIOU](https://arxiv.org/abs/1911.08287)/[CIOU](https://arxiv.org/abs/2005.03572) loss, and new dataset: [LVIS V1](https://arxiv.org/abs/1908.03195) +- Provide more detailed colab training tutorials and more complete documentation. +- Support to convert RetinaNet from Pytorch to ONNX. + +**Bug Fixes** + +- Fix the model initialization bug of DetectoRS (#3187) +- Fix the bug of module names in NASFCOSHead (#3205) +- Fix the filename bug in publish_model.py (#3237) +- Fix the dimensionality bug when `inside_flags.any()` is `False` in dense heads (#3242) +- Fix the bug of forgetting to pass flip directions in `MultiScaleFlipAug` (#3262) +- Fixed the bug caused by default value of `stem_channels` (#3333) +- Fix the bug of model checkpoint loading for CPU inference (#3318, #3316) +- Fix topk bug when box number is smaller than the expected topk number in ATSSAssigner (#3361) +- Fix the gt priority bug in center_region_assigner.py (#3208) +- Fix NaN issue of iou calculation in iou_loss.py (#3394) +- Fix the bug that `iou_thrs` is not actually used during evaluation in coco.py (#3407) +- Fix test-time augmentation of RepPoints (#3435) +- Fix runtimeError caused by incontiguous tensor in Res2Net+DCN (#3412) + +**New Features** + +- Support [CornerNet](https://arxiv.org/abs/1808.01244) (#3036) +- Support [DIOU](https://arxiv.org/abs/1911.08287)/[CIOU](https://arxiv.org/abs/2005.03572) loss (#3151) +- Support [LVIS V1](https://arxiv.org/abs/1908.03195) dataset (#) +- Support customized hooks in training (#3395) +- Support fp16 training of generalized focal loss (#3410) +- Support to convert RetinaNet from Pytorch to ONNX (#3075) + +**Improvements** + +- Support to process ignore boxes in ATSS assigner (#3082) +- Allow to crop images without ground truth in `RandomCrop` (#3153) +- Enable the the `Accuracy` module to set threshold (#3155) +- Refactoring unit tests (#3206) +- Unify the training settings of `to_float32` and `norm_cfg` in RegNets configs (#3210) +- Add colab training tutorials for beginners (#3213, #3273) +- Move CUDA/C++ operators into `mmcv.ops` and keep `mmdet.ops` as warppers for backward compatibility (#3232)(#3457) +- Update installation scripts in documentation (#3290) and dockerfile (#3320) +- Support to set image resize backend (#3392) +- Remove git hash in version file (#3466) +- Check mmcv version to force version compatibility (#3460) + +### v2.2.0 (1/7/2020) + +**Highlights** + +- Support new methods: [DetectoRS](https://arxiv.org/abs/2006.02334), [PointRend](https://arxiv.org/abs/1912.08193), [Generalized Focal Loss](https://arxiv.org/abs/2006.04388), [Dynamic R-CNN](https://arxiv.org/abs/2004.06002) + +**Bug Fixes** + +- Fix FreeAnchor when no gt in image (#3176) +- Clean up deprecated usage of `register_module()` (#3092, #3161) +- Fix pretrain bug in NAS FCOS (#3145) +- Fix `num_classes` in SSD (#3142) +- Fix FCOS warmup (#3119) +- Fix `rstrip` in `tools/publish_model.py` +- Fix `flip_ratio` default value in RandomFLip pipeline (#3106) +- Fix cityscapes eval with ms_rcnn (#3112) +- Fix RPN softmax (#3056) +- Fix filename of LVIS@v0.5 (#2998) +- Fix nan loss by filtering out-of-frame gt_bboxes in COCO (#2999) +- Fix bug in FSAF (#3018) +- Add FocalLoss `num_classes` check (#2964) +- Fix PISA Loss when there are no gts (#2992) +- Avoid nan in `iou_calculator` (#2975) +- Prevent possible bugs in loading and transforms caused by shallow copy (#2967) + +**New Features** + +- Add DetectoRS (#3064) +- Support Generalize Focal Loss (#3097) +- Support PointRend (#2752) +- Support Dynamic R-CNN (#3040) +- Add DeepFashion dataset (#2968) +- Implement FCOS training tricks (#2935) +- Use BaseDenseHead as base class for anchor-base heads (#2963) +- Add `with_cp` for BasicBlock (#2891) +- Add `stem_channels` argument for ResNet (#2954) + +**Improvements** + +- Add anchor free base head (#2867) +- Migrate to github action (#3137) +- Add docstring for datasets, pipelines, core modules and methods (#3130, #3125, #3120) +- Add VOC benchmark (#3060) +- Add `concat` mode in GRoI (#3098) +- Remove cmd arg `autorescale-lr` (#3080) +- Use `len(data['img_metas'])` to indicate `num_samples` (#3073, #3053) +- Switch to EpochBasedRunner (#2976) + +### v2.1.0 (8/6/2020) + +**Highlights** + +- Support new backbones: [RegNetX](https://arxiv.org/abs/2003.13678), [Res2Net](https://arxiv.org/abs/1904.01169) +- Support new methods: [NASFCOS](https://arxiv.org/abs/1906.04423), [PISA](https://arxiv.org/abs/1904.04821), [GRoIE](https://arxiv.org/abs/2004.13665) +- Support new dataset: [LVIS](https://arxiv.org/abs/1908.03195) + +**Bug Fixes** + +- Change the CLI argument `--validate` to `--no-validate` to enable validation after training epochs by default. (#2651) +- Add missing cython to docker file (#2713) +- Fix bug in nms cpu implementation (#2754) +- Fix bug when showing mask results (#2763) +- Fix gcc requirement (#2806) +- Fix bug in async test (#2820) +- Fix mask encoding-decoding bugs in test API (#2824) +- Fix bug in test time augmentation (#2858, #2921, #2944) +- Fix a typo in comment of apis/train (#2877) +- Fix the bug of returning None when no gt bboxes are in the original image in `RandomCrop`. Fix the bug that misses to handle `gt_bboxes_ignore`, `gt_label_ignore`, and `gt_masks_ignore` in `RandomCrop`, `MinIoURandomCrop` and `Expand` modules. (#2810) +- Fix bug of `base_channels` of regnet (#2917) +- Fix the bug of logger when loading pre-trained weights in base detector (#2936) + +**New Features** + +- Add IoU models (#2666) +- Add colab demo for inference +- Support class agnostic nms (#2553) +- Add benchmark gathering scripts for development only (#2676) +- Add mmdet-based project links (#2736, #2767, #2895) +- Add config dump in training (#2779) +- Add ClassBalancedDataset (#2721) +- Add res2net backbone (#2237) +- Support RegNetX models (#2710) +- Use `mmcv.FileClient` to support different storage backends (#2712) +- Add ClassBalancedDataset (#2721) +- Code Release: Prime Sample Attention in Object Detection (CVPR 2020) (#2626) +- Implement NASFCOS (#2682) +- Add class weight in CrossEntropyLoss (#2797) +- Support LVIS dataset (#2088) +- Support GRoIE (#2584) + +**Improvements** + +- Allow different x and y strides in anchor heads. (#2629) +- Make FSAF loss more robust to no gt (#2680) +- Compute pure inference time instead (#2657) and update inference speed (#2730) +- Avoided the possibility that a patch with 0 area is cropped. (#2704) +- Add warnings when deprecated `imgs_per_gpu` is used. (#2700) +- Add a mask rcnn example for config (#2645) +- Update model zoo (#2762, #2866, #2876, #2879, #2831) +- Add `ori_filename` to img_metas and use it in test show-dir (#2612) +- Use `img_fields` to handle multiple images during image transform (#2800) +- Add upsample_cfg support in FPN (#2787) +- Add `['img']` as default `img_fields` for back compatibility (#2809) +- Rename the pretrained model from `open-mmlab://resnet50_caffe` and `open-mmlab://resnet50_caffe_bgr` to `open-mmlab://detectron/resnet50_caffe` and `open-mmlab://detectron2/resnet50_caffe`. (#2832) +- Added sleep(2) in test.py to reduce hanging problem (#2847) +- Support `c10::half` in CARAFE (#2890) +- Improve documentations (#2918, #2714) +- Use optimizer constructor in mmcv and clean the original implementation in `mmdet.core.optimizer` (#2947) + +### v2.0.0 (6/5/2020) + +In this release, we made lots of major refactoring and modifications. + +1. **Faster speed**. We optimize the training and inference speed for common models, achieving up to 30% speedup for training and 25% for inference. Please refer to [model zoo](model_zoo.md#comparison-with-detectron2) for details. + +2. **Higher performance**. We change some default hyperparameters with no additional cost, which leads to a gain of performance for most models. Please refer to [compatibility](compatibility.md#training-hyperparameters) for details. + +3. **More documentation and tutorials**. We add a bunch of documentation and tutorials to help users get started more smoothly. Read it [here](https://mmdetection.readthedocs.io/en/latest/). + +4. **Support PyTorch 1.5**. The support for 1.1 and 1.2 is dropped, and we switch to some new APIs. + +5. **Better configuration system**. Inheritance is supported to reduce the redundancy of configs. + +6. **Better modular design**. Towards the goal of simplicity and flexibility, we simplify some encapsulation while add more other configurable modules like BBoxCoder, IoUCalculator, OptimizerConstructor, RoIHead. Target computation is also included in heads and the call hierarchy is simpler. + +7. Support new methods: [FSAF](https://arxiv.org/abs/1903.00621) and PAFPN (part of [PAFPN](https://arxiv.org/abs/1803.01534)). + +**Breaking Changes** +Models training with MMDetection 1.x are not fully compatible with 2.0, please refer to the [compatibility doc](compatibility.md) for the details and how to migrate to the new version. + +**Improvements** + +- Unify cuda and cpp API for custom ops. (#2277) +- New config files with inheritance. (#2216) +- Encapsulate the second stage into RoI heads. (#1999) +- Refactor GCNet/EmpericalAttention into plugins. (#2345) +- Set low quality match as an option in IoU-based bbox assigners. (#2375) +- Change the codebase's coordinate system. (#2380) +- Refactor the category order in heads. 0 means the first positive class instead of background now. (#2374) +- Add bbox sampler and assigner registry. (#2419) +- Speed up the inference of RPN. (#2420) +- Add `train_cfg` and `test_cfg` as class members in all anchor heads. (#2422) +- Merge target computation methods into heads. (#2429) +- Add bbox coder to support different bbox encoding and losses. (#2480) +- Unify the API for regression loss. (#2156) +- Refactor Anchor Generator. (#2474) +- Make `lr` an optional argument for optimizers. (#2509) +- Migrate to modules and methods in MMCV. (#2502, #2511, #2569, #2572) +- Support PyTorch 1.5. (#2524) +- Drop the support for Python 3.5 and use F-string in the codebase. (#2531) + +**Bug Fixes** + +- Fix the scale factors for resized images without keep the aspect ratio. (#2039) +- Check if max_num > 0 before slicing in NMS. (#2486) +- Fix Deformable RoIPool when there is no instance. (#2490) +- Fix the default value of assigned labels. (#2536) +- Fix the evaluation of Cityscapes. (#2578) + +**New Features** + +- Add deep_stem and avg_down option to ResNet, i.e., support ResNetV1d. (#2252) +- Add L1 loss. (#2376) +- Support both polygon and bitmap for instance masks. (#2353, #2540) +- Support CPU mode for inference. (#2385) +- Add optimizer constructor for complicated configuration of optimizers. (#2397, #2488) +- Implement PAFPN. (#2392) +- Support empty tensor input for some modules. (#2280) +- Support for custom dataset classes without overriding it. (#2408, #2443) +- Support to train subsets of coco dataset. (#2340) +- Add iou_calculator to potentially support more IoU calculation methods. (2405) +- Support class wise mean AP (was removed in the last version). (#2459) +- Add option to save the testing result images. (#2414) +- Support MomentumUpdaterHook. (#2571) +- Add a demo to inference a single image. (#2605) + +### v1.1.0 (24/2/2020) + +**Highlights** + +- Dataset evaluation is rewritten with a unified api, which is used by both evaluation hooks and test scripts. +- Support new methods: [CARAFE](https://arxiv.org/abs/1905.02188). + +**Breaking Changes** + +- The new MMDDP inherits from the official DDP, thus the `__init__` api is changed to be the same as official DDP. +- The `mask_head` field in HTC config files is modified. +- The evaluation and testing script is updated. +- In all transforms, instance masks are stored as a numpy array shaped (n, h, w) instead of a list of (h, w) arrays, where n is the number of instances. + +**Bug Fixes** + +- Fix IOU assigners when ignore_iof_thr > 0 and there is no pred boxes. (#2135) +- Fix mAP evaluation when there are no ignored boxes. (#2116) +- Fix the empty RoI input for Deformable RoI Pooling. (#2099) +- Fix the dataset settings for multiple workflows. (#2103) +- Fix the warning related to `torch.uint8` in PyTorch 1.4. (#2105) +- Fix the inference demo on devices other than gpu:0. (#2098) +- Fix Dockerfile. (#2097) +- Fix the bug that `pad_val` is unused in Pad transform. (#2093) +- Fix the albumentation transform when there is no ground truth bbox. (#2032) + +**Improvements** + +- Use torch instead of numpy for random sampling. (#2094) +- Migrate to the new MMDDP implementation in MMCV v0.3. (#2090) +- Add meta information in logs. (#2086) +- Rewrite Soft NMS with pytorch extension and remove cython as a dependency. (#2056) +- Rewrite dataset evaluation. (#2042, #2087, #2114, #2128) +- Use numpy array for masks in transforms. (#2030) + +**New Features** + +- Implement "CARAFE: Content-Aware ReAssembly of FEatures". (#1583) +- Add `worker_init_fn()` in data_loader when seed is set. (#2066, #2111) +- Add logging utils. (#2035) + +### v1.0.0 (30/1/2020) + +This release mainly improves the code quality and add more docstrings. + +**Highlights** + +- Documentation is online now: https://mmdetection.readthedocs.io. +- Support new models: [ATSS](https://arxiv.org/abs/1912.02424). +- DCN is now available with the api `build_conv_layer` and `ConvModule` like the normal conv layer. +- A tool to collect environment information is available for trouble shooting. + +**Bug Fixes** + +- Fix the incompatibility of the latest numpy and pycocotools. (#2024) +- Fix the case when distributed package is unavailable, e.g., on Windows. (#1985) +- Fix the dimension issue for `refine_bboxes()`. (#1962) +- Fix the typo when `seg_prefix` is a list. (#1906) +- Add segmentation map cropping to RandomCrop. (#1880) +- Fix the return value of `ga_shape_target_single()`. (#1853) +- Fix the loaded shape of empty proposals. (#1819) +- Fix the mask data type when using albumentation. (#1818) + +**Improvements** + +- Enhance AssignResult and SamplingResult. (#1995) +- Add ability to overwrite existing module in Registry. (#1982) +- Reorganize requirements and make albumentations and imagecorruptions optional. (#1969) +- Check NaN in `SSDHead`. (#1935) +- Encapsulate the DCN in ResNe(X)t into a ConvModule & Conv_layers. (#1894) +- Refactoring for mAP evaluation and support multiprocessing and logging. (#1889) +- Init the root logger before constructing Runner to log more information. (#1865) +- Split `SegResizeFlipPadRescale` into different existing transforms. (#1852) +- Move `init_dist()` to MMCV. (#1851) +- Documentation and docstring improvements. (#1971, #1938, #1869, #1838) +- Fix the color of the same class for mask visualization. (#1834) +- Remove the option `keep_all_stages` in HTC and Cascade R-CNN. (#1806) + +**New Features** + +- Add two test-time options `crop_mask` and `rle_mask_encode` for mask heads. (#2013) +- Support loading grayscale images as single channel. (#1975) +- Implement "Bridging the Gap Between Anchor-based and Anchor-free Detection via Adaptive Training Sample Selection". (#1872) +- Add sphinx generated docs. (#1859, #1864) +- Add GN support for flops computation. (#1850) +- Collect env info for trouble shooting. (#1812) + +### v1.0rc1 (13/12/2019) + +The RC1 release mainly focuses on improving the user experience, and fixing bugs. + +**Highlights** + +- Support new models: [FoveaBox](https://arxiv.org/abs/1904.03797), [RepPoints](https://arxiv.org/abs/1904.11490) and [FreeAnchor](https://arxiv.org/abs/1909.02466). +- Add a Dockerfile. +- Add a jupyter notebook demo and a webcam demo. +- Setup the code style and CI. +- Add lots of docstrings and unit tests. +- Fix lots of bugs. + +**Breaking Changes** + +- There was a bug for computing COCO-style mAP w.r.t different scales (AP_s, AP_m, AP_l), introduced by #621. (#1679) + +**Bug Fixes** + +- Fix a sampling interval bug in Libra R-CNN. (#1800) +- Fix the learning rate in SSD300 WIDER FACE. (#1781) +- Fix the scaling issue when `keep_ratio=False`. (#1730) +- Fix typos. (#1721, #1492, #1242, #1108, #1107) +- Fix the shuffle argument in `build_dataloader`. (#1693) +- Clip the proposal when computing mask targets. (#1688) +- Fix the "index out of range" bug for samplers in some corner cases. (#1610, #1404) +- Fix the NMS issue on devices other than GPU:0. (#1603) +- Fix SSD Head and GHM Loss on CPU. (#1578) +- Fix the OOM error when there are too many gt bboxes. (#1575) +- Fix the wrong keyword argument `nms_cfg` in HTC. (#1573) +- Process masks and semantic segmentation in Expand and MinIoUCrop transforms. (#1550, #1361) +- Fix a scale bug in the Non Local op. (#1528) +- Fix a bug in transforms when `gt_bboxes_ignore` is None. (#1498) +- Fix a bug when `img_prefix` is None. (#1497) +- Pass the device argument to `grid_anchors` and `valid_flags`. (#1478) +- Fix the data pipeline for test_robustness. (#1476) +- Fix the argument type of deformable pooling. (#1390) +- Fix the coco_eval when there are only two classes. (#1376) +- Fix a bug in Modulated DeformableConv when deformable_group>1. (#1359) +- Fix the mask cropping in RandomCrop. (#1333) +- Fix zero outputs in DeformConv when not running on cuda:0. (#1326) +- Fix the type issue in Expand. (#1288) +- Fix the inference API. (#1255) +- Fix the inplace operation in Expand. (#1249) +- Fix the from-scratch training config. (#1196) +- Fix inplace add in RoIExtractor which cause an error in PyTorch 1.2. (#1160) +- Fix FCOS when input images has no positive sample. (#1136) +- Fix recursive imports. (#1099) + +**Improvements** + +- Print the config file and mmdet version in the log. (#1721) +- Lint the code before compiling in travis CI. (#1715) +- Add a probability argument for the `Expand` transform. (#1651) +- Update the PyTorch and CUDA version in the docker file. (#1615) +- Raise a warning when specifying `--validate` in non-distributed training. (#1624, #1651) +- Beautify the mAP printing. (#1614) +- Add pre-commit hook. (#1536) +- Add the argument `in_channels` to backbones. (#1475) +- Add lots of docstrings and unit tests, thanks to [@Erotemic](https://github.com/Erotemic). (#1603, #1517, #1506, #1505, #1491, #1479, #1477, #1475, #1474) +- Add support for multi-node distributed test when there is no shared storage. (#1399) +- Optimize Dockerfile to reduce the image size. (#1306) +- Update new results of HRNet. (#1284, #1182) +- Add an argument `no_norm_on_lateral` in FPN. (#1240) +- Test the compiling in CI. (#1235) +- Move docs to a separate folder. (#1233) +- Add a jupyter notebook demo. (#1158) +- Support different type of dataset for training. (#1133) +- Use int64_t instead of long in cuda kernels. (#1131) +- Support unsquare RoIs for bbox and mask heads. (#1128) +- Manually add type promotion to make compatible to PyTorch 1.2. (#1114) +- Allowing validation dataset for computing validation loss. (#1093) +- Use `.scalar_type()` instead of `.type()` to suppress some warnings. (#1070) + +**New Features** + +- Add an option `--with_ap` to compute the AP for each class. (#1549) +- Implement "FreeAnchor: Learning to Match Anchors for Visual Object Detection". (#1391) +- Support [Albumentations](https://github.com/albumentations-team/albumentations) for augmentations in the data pipeline. (#1354) +- Implement "FoveaBox: Beyond Anchor-based Object Detector". (#1339) +- Support horizontal and vertical flipping. (#1273, #1115) +- Implement "RepPoints: Point Set Representation for Object Detection". (#1265) +- Add test-time augmentation to HTC and Cascade R-CNN. (#1251) +- Add a COCO result analysis tool. (#1228) +- Add Dockerfile. (#1168) +- Add a webcam demo. (#1155, #1150) +- Add FLOPs counter. (#1127) +- Allow arbitrary layer order for ConvModule. (#1078) + +### v1.0rc0 (27/07/2019) + +- Implement lots of new methods and components (Mixed Precision Training, HTC, Libra R-CNN, Guided Anchoring, Empirical Attention, Mask Scoring R-CNN, Grid R-CNN (Plus), GHM, GCNet, FCOS, HRNet, Weight Standardization, etc.). Thank all collaborators! +- Support two additional datasets: WIDER FACE and Cityscapes. +- Refactoring for loss APIs and make it more flexible to adopt different losses and related hyper-parameters. +- Speed up multi-gpu testing. +- Integrate all compiling and installing in a single script. + +### v0.6.0 (14/04/2019) + +- Up to 30% speedup compared to the model zoo. +- Support both PyTorch stable and nightly version. +- Replace NMS and SigmoidFocalLoss with Pytorch CUDA extensions. + +### v0.6rc0(06/02/2019) + +- Migrate to PyTorch 1.0. + +### v0.5.7 (06/02/2019) + +- Add support for Deformable ConvNet v2. (Many thanks to the authors and [@chengdazhi](https://github.com/chengdazhi)) +- This is the last release based on PyTorch 0.4.1. + +### v0.5.6 (17/01/2019) + +- Add support for Group Normalization. +- Unify RPNHead and single stage heads (RetinaHead, SSDHead) with AnchorHead. + +### v0.5.5 (22/12/2018) + +- Add SSD for COCO and PASCAL VOC. +- Add ResNeXt backbones and detection models. +- Refactoring for Samplers/Assigners and add OHEM. +- Add VOC dataset and evaluation scripts. + +### v0.5.4 (27/11/2018) + +- Add SingleStageDetector and RetinaNet. + +### v0.5.3 (26/11/2018) + +- Add Cascade R-CNN and Cascade Mask R-CNN. +- Add support for Soft-NMS in config files. + +### v0.5.2 (21/10/2018) + +- Add support for custom datasets. +- Add a script to convert PASCAL VOC annotations to the expected format. + +### v0.5.1 (20/10/2018) + +- Add BBoxAssigner and BBoxSampler, the `train_cfg` field in config files are restructured. +- `ConvFCRoIHead` / `SharedFCRoIHead` are renamed to `ConvFCBBoxHead` / `SharedFCBBoxHead` for consistency. diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/compatibility.md b/detection_cbnet/docker-build-context/cbnetv2/docs/compatibility.md new file mode 100644 index 0000000000000000000000000000000000000000..ae97eefd1478dca90a7c03de309f6a7ef097483f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/compatibility.md @@ -0,0 +1,130 @@ +# Compatibility of MMDetection 2.x + +## MMDetection 2.14.0 + +### MMCV Version +In order to fix the problem that the priority of EvalHook is too low, all hook priorities have been re-adjusted in 1.3.8, so MMDetection 2.14.0 needs to rely on the latest MMCV 1.3.8 version. For related information, please refer to [#1120](https://github.com/open-mmlab/mmcv/pull/1120), for related issues, please refer to [#5343](https://github.com/open-mmlab/mmdetection/issues/5343). + +### SSD compatibility + +In v2.14.0, to make SSD more flexible to use, [PR5291](https://github.com/open-mmlab/mmdetection/pull/5291) refactored its backbone, neck and head. The users can use the script `tools/model_converters/upgrade_ssd_version.py` to convert their models. + +```bash +python tools/model_converters/upgrade_ssd_version.py ${OLD_MODEL_PATH} ${NEW_MODEL_PATH} +``` + +- OLD_MODEL_PATH: the path to load the old version SSD model. +- NEW_MODEL_PATH: the path to save the converted model weights. + +## MMDetection 2.12.0 + +MMDetection is going through big refactoring for more general and convenient usages during the releases from v2.12.0 to v2.15.0 (maybe longer). +In v2.12.0 MMDetection inevitably brings some BC-breakings, including the MMCV dependency, model initialization, model registry, and mask AP evaluation. + +### MMCV Version + +MMDetection v2.12.0 relies on the newest features in MMCV 1.3.3, including `BaseModule` for unified parameter initialization, model registry, and the CUDA operator `MultiScaleDeformableAttn` for [Deformable DETR](https://arxiv.org/abs/2010.04159). Note that MMCV 1.3.2 already contains all the features used by MMDet but has known issues. Therefore, we recommend users to skip MMCV v1.3.2 and use v1.3.2, though v1.3.2 might work for most of the cases. + +### Unified model initialization + +To unify the parameter initialization in OpenMMLab projects, MMCV supports `BaseModule` that accepts `init_cfg` to allow the modules' parameters initialized in a flexible and unified manner. Now the users need to explicitly call `model.init_weights()` in the training script to initialize the model (as in [here](https://github.com/open-mmlab/mmdetection/blob/master/tools/train.py#L162), previously this was handled by the detector. **The downstream projects must update their model initialization accordingly to use MMDetection v2.12.0**. Please refer to PR #4750 for details. + +### Unified model registry + +To easily use backbones implemented in other OpenMMLab projects, MMDetection v2.12.0 inherits the model registry created in MMCV (#760). In this way, as long as the backbone is supported in an OpenMMLab project and that project also uses the registry in MMCV, users can use that backbone in MMDetection by simply modifying the config without copying the code of that backbone into MMDetection. Please refer to PR #5059 for more details. + +### Mask AP evaluation + +Before [PR 4898](https://github.com/open-mmlab/mmdetection/pull/4898) and V2.12.0, the mask AP of small, medium, and large instances is calculated based on the bounding box area rather than the real mask area. This leads to higher `APs` and `APm` but lower `APl` but will not affect the overall mask AP. [PR 4898](https://github.com/open-mmlab/mmdetection/pull/4898) change it to use mask areas by deleting `bbox` in mask AP calculation. +The new calculation does not affect the overall mask AP evaluation and is consistent with [Detectron2](https://github.com/facebookresearch/detectron2/). + +## Compatibility with MMDetection 1.x + +MMDetection 2.0 goes through a big refactoring and addresses many legacy issues. It is not compatible with the 1.x version, i.e., running inference with the same model weights in these two versions will produce different results. Thus, MMDetection 2.0 re-benchmarks all the models and provides their links and logs in the model zoo. + +The major differences are in four folds: coordinate system, codebase conventions, training hyperparameters, and modular design. + +### Coordinate System + +The new coordinate system is consistent with [Detectron2](https://github.com/facebookresearch/detectron2/) and treats the center of the most left-top pixel as (0, 0) rather than the left-top corner of that pixel. +Accordingly, the system interprets the coordinates in COCO bounding box and segmentation annotations as coordinates in range `[0, width]` or `[0, height]`. +This modification affects all the computation related to the bbox and pixel selection, +which is more natural and accurate. + +- The height and width of a box with corners (x1, y1) and (x2, y2) in the new coordinate system is computed as `width = x2 - x1` and `height = y2 - y1`. + In MMDetection 1.x and previous version, a "+ 1" was added both height and width. + This modification are in three folds: + + 1. Box transformation and encoding/decoding in regression. + 2. IoU calculation. This affects the matching process between ground truth and bounding box and the NMS process. The effect to compatibility is very negligible, though. + 3. The corners of bounding box is in float type and no longer quantized. This should provide more accurate bounding box results. This also makes the bounding box and RoIs not required to have minimum size of 1, whose effect is small, though. + +- The anchors are center-aligned to feature grid points and in float type. + In MMDetection 1.x and previous version, the anchors are in `int` type and not center-aligned. + This affects the anchor generation in RPN and all the anchor-based methods. + +- ROIAlign is better aligned with the image coordinate system. The new implementation is adopted from [Detectron2](https://github.com/facebookresearch/detectron2/tree/master/detectron2/layers/csrc/ROIAlign). + The RoIs are shifted by half a pixel by default when they are used to cropping RoI features, compared to MMDetection 1.x. + The old behavior is still available by setting `aligned=False` instead of `aligned=True`. + +- Mask cropping and pasting are more accurate. + + 1. We use the new RoIAlign to crop mask targets. In MMDetection 1.x, the bounding box is quantized before it is used to crop mask target, and the crop process is implemented by numpy. In new implementation, the bounding box for crop is not quantized and sent to RoIAlign. This implementation accelerates the training speed by a large margin (~0.1s per iter, ~2 hour when training Mask R50 for 1x schedule) and should be more accurate. + + 2. In MMDetection 2.0, the "`paste_mask()`" function is different and should be more accurate than those in previous versions. This change follows the modification in [Detectron2](https://github.com/facebookresearch/detectron2/blob/master/detectron2/structures/masks.py) and can improve mask AP on COCO by ~0.5% absolute. + +### Codebase Conventions + +- MMDetection 2.0 changes the order of class labels to reduce unused parameters in regression and mask branch more naturally (without +1 and -1). + This effect all the classification layers of the model to have a different ordering of class labels. The final layers of regression branch and mask head no longer keep K+1 channels for K categories, and their class orders are consistent with the classification branch. + + - In MMDetection 2.0, label "K" means background, and labels [0, K-1] correspond to the K = num_categories object categories. + + - In MMDetection 1.x and previous version, label "0" means background, and labels [1, K] correspond to the K categories. + + - **Note**: The class order of softmax RPN is still the same as that in 1.x in versions<=2.4.0 while sigmoid RPN is not affected. The class orders in all heads are unified since MMDetection v2.5.0. + +- Low quality matching in R-CNN is not used. In MMDetection 1.x and previous versions, the `max_iou_assigner` will match low quality boxes for each ground truth box in both RPN and R-CNN training. We observe this sometimes does not assign the most perfect GT box to some bounding boxes, + thus MMDetection 2.0 do not allow low quality matching by default in R-CNN training in the new system. This sometimes may slightly improve the box AP (~0.1% absolute). + +- Separate scale factors for width and height. In MMDetection 1.x and previous versions, the scale factor is a single float in mode `keep_ratio=True`. This is slightly inaccurate because the scale factors for width and height have slight difference. MMDetection 2.0 adopts separate scale factors for width and height, the improvement on AP ~0.1% absolute. + +- Configs name conventions are changed. MMDetection V2.0 adopts the new name convention to maintain the gradually growing model zoo as the following: + + ```shell + [model]_(model setting)_[backbone]_[neck]_(norm setting)_(misc)_(gpu x batch)_[schedule]_[dataset].py, + ``` + + where the (`misc`) includes DCN and GCBlock, etc. More details are illustrated in the [documentation for config](config.md) + +- MMDetection V2.0 uses new ResNet Caffe backbones to reduce warnings when loading pre-trained models. Most of the new backbones' weights are the same as the former ones but do not have `conv.bias`, except that they use a different `img_norm_cfg`. Thus, the new backbone will not cause warning of unexpected keys. + +### Training Hyperparameters + +The change in training hyperparameters does not affect +model-level compatibility but slightly improves the performance. The major ones are: + +- The number of proposals after nms is changed from 2000 to 1000 by setting `nms_post=1000` and `max_num=1000`. + This slightly improves both mask AP and bbox AP by ~0.2% absolute. + +- The default box regression losses for Mask R-CNN, Faster R-CNN and RetinaNet are changed from smooth L1 Loss to L1 loss. This leads to an overall improvement in box AP (~0.6% absolute). However, using L1-loss for other methods such as Cascade R-CNN and HTC does not improve the performance, so we keep the original settings for these methods. + +- The sample num of RoIAlign layer is set to be 0 for simplicity. This leads to slightly improvement on mask AP (~0.2% absolute). + +- The default setting does not use gradient clipping anymore during training for faster training speed. This does not degrade performance of the most of models. For some models such as RepPoints we keep using gradient clipping to stabilize the training process and to obtain better performance. + +- The default warmup ratio is changed from 1/3 to 0.001 for a more smooth warming up process since the gradient clipping is usually not used. The effect is found negligible during our re-benchmarking, though. + +### Upgrade Models from 1.x to 2.0 + +To convert the models trained by MMDetection V1.x to MMDetection V2.0, the users can use the script `tools/model_converters/upgrade_model_version.py` to convert +their models. The converted models can be run in MMDetection V2.0 with slightly dropped performance (less than 1% AP absolute). +Details can be found in `configs/legacy`. + +## pycocotools compatibility + +`mmpycocotools` is the OpenMMlab's folk of official `pycocotools`, which works for both MMDetection and Detectron2. +Before [PR 4939](https://github.com/open-mmlab/mmdetection/pull/4939), since `pycocotools` and `mmpycocotool` have the same package name, if users already installed `pyccocotools` (installed Detectron2 first under the same environment), then the setup of MMDetection will skip installing `mmpycocotool`. Thus MMDetection fails due to the missing `mmpycocotools`. +If MMDetection is installed before Detectron2, they could work under the same environment. +[PR 4939](https://github.com/open-mmlab/mmdetection/pull/4939) deprecates mmpycocotools in favor of official pycocotools. +Users may install MMDetection and Detectron2 under the same environment after [PR 4939](https://github.com/open-mmlab/mmdetection/pull/4939), no matter what the installation order is. diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/conf.py b/detection_cbnet/docker-build-context/cbnetv2/docs/conf.py new file mode 100644 index 0000000000000000000000000000000000000000..1c60d9c5b332ad630a53cadd413b4860aca12713 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/conf.py @@ -0,0 +1,90 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import subprocess +import sys + +sys.path.insert(0, os.path.abspath('..')) + +# -- Project information ----------------------------------------------------- + +project = 'MMDetection' +copyright = '2018-2020, OpenMMLab' +author = 'MMDetection Authors' +version_file = '../mmdet/version.py' + + +def get_version(): + with open(version_file, 'r') as f: + exec(compile(f.read(), version_file, 'exec')) + return locals()['__version__'] + + +# The full version, including alpha/beta/rc tags +release = get_version() + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.napoleon', + 'sphinx.ext.viewcode', + 'recommonmark', + 'sphinx_markdown_tables', +] + +autodoc_mock_imports = [ + 'matplotlib', 'pycocotools', 'terminaltables', 'mmdet.version', 'mmcv.ops' +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +source_suffix = { + '.rst': 'restructuredtext', + '.md': 'markdown', +} + +# The master toctree document. +master_doc = 'index' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_rtd_theme' + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + + +def builder_inited_handler(app): + subprocess.run(['./stat.py']) + + +def setup(app): + app.connect('builder-inited', builder_inited_handler) diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/conventions.md b/detection_cbnet/docker-build-context/cbnetv2/docs/conventions.md new file mode 100644 index 0000000000000000000000000000000000000000..86e8cb721c7501ccd7e8a20b7d1aabb362f69f9d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/conventions.md @@ -0,0 +1,31 @@ +# Conventions + +Please check the following conventions if you would like to modify MMDetection as your own project. + +## Loss + +In MMDetection, a `dict` containing losses and metrics will be returned by `model(**data)`. + +For example, in bbox head, + +```python +class BBoxHead(nn.Module): + ... + def loss(self, ...): + losses = dict() + # classification loss + losses['loss_cls'] = self.loss_cls(...) + # classification accuracy + losses['acc'] = accuracy(...) + # bbox regression loss + losses['loss_bbox'] = self.loss_bbox(...) + return losses +``` + +`bbox_head.loss()` will be called during model forward. +The returned dict contains `'loss_bbox'`, `'loss_cls'`, `'acc'` . +Only `'loss_bbox'`, `'loss_cls'` will be used during back propagation, +`'acc'` will only be used as a metric to monitor training process. + +By default, only values whose keys contain `'loss'` will be back propagated. +This behavior could be changed by modifying `BaseDetector.train_step()`. diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/faq.md b/detection_cbnet/docker-build-context/cbnetv2/docs/faq.md new file mode 100644 index 0000000000000000000000000000000000000000..ba2ed6b89bedcea4ccdbfb484eb729bd7fab7724 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/faq.md @@ -0,0 +1,88 @@ +We list some common troubles faced by many users and their corresponding solutions here. Feel free to enrich the list if you find any frequent issues and have ways to help others to solve them. If the contents here do not cover your issue, please create an issue using the [provided templates](https://github.com/open-mmlab/mmdetection/blob/master/.github/ISSUE_TEMPLATE/error-report.md/) and make sure you fill in all required information in the template. + +## MMCV Installation + +- Compatibility issue between MMCV and MMDetection; "ConvWS is already registered in conv layer"; "AssertionError: MMCV==xxx is used but incompatible. Please install mmcv>=xxx, <=xxx." + + Please install the correct version of MMCV for the version of your MMDetection following the [installation instruction](https://mmdetection.readthedocs.io/en/latest/get_started.html#installation). + +- "No module named 'mmcv.ops'"; "No module named 'mmcv._ext'". + + 1. Uninstall existing mmcv in the environment using `pip uninstall mmcv`. + 2. Install mmcv-full following the [installation instruction](https://mmcv.readthedocs.io/en/latest/#installation). + +## PyTorch/CUDA Environment + +- "RTX 30 series card fails when building MMCV or MMDet" + + 1. Temporary work-around: do `MMCV_WITH_OPS=1 MMCV_CUDA_ARGS='-gencode=arch=compute_80,code=sm_80' pip install -e .`. + The common issue is `nvcc fatal : Unsupported gpu architecture 'compute_86'`. This means that the compiler should optimize for sm_86, i.e., nvidia 30 series card, but such optimizations have not been supported by CUDA toolkit 11.0. + This work-around modifies the compile flag by adding `MMCV_CUDA_ARGS='-gencode=arch=compute_80,code=sm_80'`, which tells `nvcc` to optimize for **sm_80**, i.e., Nvidia A100. Although A100 is different from the 30 series card, they use similar ampere architecture. This may hurt the performance but it works. + 2. PyTorch developers have updated that the default compiler flags should be fixed by [pytorch/pytorch#47585](https://github.com/pytorch/pytorch/pull/47585). So using PyTorch-nightly may also be able to solve the problem, though we have not tested it yet. + +- "invalid device function" or "no kernel image is available for execution". + + 1. Check if your cuda runtime version (under `/usr/local/`), `nvcc --version` and `conda list cudatoolkit` version match. + 2. Run `python mmdet/utils/collect_env.py` to check whether PyTorch, torchvision, and MMCV are built for the correct GPU architecture. + You may need to set `TORCH_CUDA_ARCH_LIST` to reinstall MMCV. + The GPU arch table could be found [here](https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list), + i.e. run `TORCH_CUDA_ARCH_LIST=7.0 pip install mmcv-full` to build MMCV for Volta GPUs. + The compatibility issue could happen when using old GPUS, e.g., Tesla K80 (3.7) on colab. + 3. Check whether the running environment is the same as that when mmcv/mmdet has compiled. + For example, you may compile mmcv using CUDA 10.0 but run it on CUDA 9.0 environments. + +- "undefined symbol" or "cannot open xxx.so". + + 1. If those symbols are CUDA/C++ symbols (e.g., libcudart.so or GLIBCXX), check whether the CUDA/GCC runtimes are the same as those used for compiling mmcv, + i.e. run `python mmdet/utils/collect_env.py` to see if `"MMCV Compiler"`/`"MMCV CUDA Compiler"` is the same as `"GCC"`/`"CUDA_HOME"`. + 2. If those symbols are PyTorch symbols (e.g., symbols containing caffe, aten, and TH), check whether the PyTorch version is the same as that used for compiling mmcv. + 3. Run `python mmdet/utils/collect_env.py` to check whether PyTorch, torchvision, and MMCV are built by and running on the same environment. + +- setuptools.sandbox.UnpickleableException: DistutilsSetupError("each element of 'ext_modules' option must be an Extension instance or 2-tuple") + + 1. If you are using miniconda rather than anaconda, check whether Cython is installed as indicated in [#3379](https://github.com/open-mmlab/mmdetection/issues/3379). + You need to manually install Cython first and then run command `pip install -r requirements.txt`. + 2. You may also need to check the compatibility between the `setuptools`, `Cython`, and `PyTorch` in your environment. + +- "Segmentation fault". + 1. Check you GCC version and use GCC 5.4. This usually caused by the incompatibility between PyTorch and the environment (e.g., GCC < 4.9 for PyTorch). We also recommand the users to avoid using GCC 5.5 because many feedbacks report that GCC 5.5 will cause "segmentation fault" and simply changing it to GCC 5.4 could solve the problem. + + 2. Check whether PyTorch is correctly installed and could use CUDA op, e.g. type the following command in your terminal. + + ```shell + python -c 'import torch; print(torch.cuda.is_available())' + ``` + + And see whether they could correctly output results. + + 3. If Pytorch is correctly installed, check whether MMCV is correctly installed. + + ```shell + python -c 'import mmcv; import mmcv.ops' + ``` + + If MMCV is correctly installed, then there will be no issue of the above two commands. + + 4. If MMCV and Pytorch is correctly installed, you man use `ipdb`, `pdb` to set breakpoints or directly add 'print' in mmdetection code and see which part leads the segmentation fault. + +## Training + +- "Loss goes Nan" + 1. Check if the dataset annotations are valid: zero-size bounding boxes will cause the regression loss to be Nan due to the commonly used transformation for box regression. Some small size (width or height are smaller than 1) boxes will also cause this problem after data augmentation (e.g., instaboost). So check the data and try to filter out those zero-size boxes and skip some risky augmentations on the small-size boxes when you face the problem. + 2. Reduce the learning rate: the learning rate might be too large due to some reasons, e.g., change of batch size. You can rescale them to the value that could stably train the model. + 3. Extend the warmup iterations: some models are sensitive to the learning rate at the start of the training. You can extend the warmup iterations, e.g., change the `warmup_iters` from 500 to 1000 or 2000. + 4. Add gradient clipping: some models requires gradient clipping to stablize the training process. The default of `grad_clip` is `None`, you can add gradient clippint to avoid gradients that are too large, i.e., set `optimizer_config=dict(_delete_=True, grad_clip=dict(max_norm=35, norm_type=2))` in your config file. If your config does not inherits from any basic config that contains `optimizer_config=dict(grad_clip=None)`, you can simply add `optimizer_config=dict(grad_clip=dict(max_norm=35, norm_type=2))`. +- ’GPU out of memory" + 1. There are some scenarios when there are large amount of ground truth boxes, which may cause OOM during target assignment. You can set `gpu_assign_thr=N` in the config of assigner thus the assigner will calculate box overlaps through CPU when there are more than N GT boxes. + 2. Set `with_cp=True` in the backbone. This uses the sublinear strategy in PyTorch to reduce GPU memory cost in the backbone. + 3. Try mixed precision training using following the examples in `config/fp16`. The `loss_scale` might need further tuning for different models. + +- "RuntimeError: Expected to have finished reduction in the prior iteration before starting a new one" + 1. This error indicates that your module has parameters that were not used in producing loss. This phenomenon may be caused by running different branches in your code in DDP mode. + 2. You can set ` find_unused_parameters = True` in the config to solve the above problems or find those unused parameters manually. + +## Evaluation + +- COCO Dataset, AP or AR = -1 + 1. According to the definition of COCO dataset, the small and medium areas in an image are less than 1024 (32\*32), 9216 (96\*96), respectively. + 2. If the corresponding area has no object, the result of AP and AR will set to -1. diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/get_started.md b/detection_cbnet/docker-build-context/cbnetv2/docs/get_started.md new file mode 100644 index 0000000000000000000000000000000000000000..f10b987bdc54984f6e59496af8c7af3a18c80ec0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/get_started.md @@ -0,0 +1,240 @@ +## Prerequisites + +- Linux or macOS (Windows is in experimental support) +- Python 3.6+ +- PyTorch 1.3+ +- CUDA 9.2+ (If you build PyTorch from source, CUDA 9.0 is also compatible) +- GCC 5+ +- [MMCV](https://mmcv.readthedocs.io/en/latest/#installation) + +The compatible MMDetection and MMCV versions are as below. Please install the correct version of MMCV to avoid installation issues. + +| MMDetection version | MMCV version | +|:-------------------:|:-------------------:| +| master | mmcv-full>=1.3.8, <1.4.0 | +| 2.14.0 | mmcv-full>=1.3.8, <1.4.0 | +| 2.13.0 | mmcv-full>=1.3.3, <1.4.0 | +| 2.12.0 | mmcv-full>=1.3.3, <1.4.0 | +| 2.11.0 | mmcv-full>=1.2.4, <1.4.0 | +| 2.10.0 | mmcv-full>=1.2.4, <1.4.0 | +| 2.9.0 | mmcv-full>=1.2.4, <1.4.0 | +| 2.8.0 | mmcv-full>=1.2.4, <1.4.0 | +| 2.7.0 | mmcv-full>=1.1.5, <1.4.0 | +| 2.6.0 | mmcv-full>=1.1.5, <1.4.0 | +| 2.5.0 | mmcv-full>=1.1.5, <1.4.0 | +| 2.4.0 | mmcv-full>=1.1.1, <1.4.0 | +| 2.3.0 | mmcv-full==1.0.5 | +| 2.3.0rc0 | mmcv-full>=1.0.2 | +| 2.2.1 | mmcv==0.6.2 | +| 2.2.0 | mmcv==0.6.2 | +| 2.1.0 | mmcv>=0.5.9, <=0.6.1| +| 2.0.0 | mmcv>=0.5.1, <=0.5.8| + +Note: You need to run `pip uninstall mmcv` first if you have mmcv installed. +If mmcv and mmcv-full are both installed, there will be `ModuleNotFoundError`. + +## Installation + +### Prepare environment + +1. Create a conda virtual environment and activate it. + + ```shell + conda create -n open-mmlab python=3.7 -y + conda activate open-mmlab + ``` + +2. Install PyTorch and torchvision following the [official instructions](https://pytorch.org/), e.g., + + ```shell + conda install pytorch torchvision -c pytorch + ``` + + Note: Make sure that your compilation CUDA version and runtime CUDA version match. + You can check the supported CUDA version for precompiled packages on the [PyTorch website](https://pytorch.org/). + + `E.g.1` If you have CUDA 10.1 installed under `/usr/local/cuda` and would like to install + PyTorch 1.5, you need to install the prebuilt PyTorch with CUDA 10.1. + + ```shell + conda install pytorch cudatoolkit=10.1 torchvision -c pytorch + ``` + + `E.g. 2` If you have CUDA 9.2 installed under `/usr/local/cuda` and would like to install + PyTorch 1.3.1., you need to install the prebuilt PyTorch with CUDA 9.2. + + ```shell + conda install pytorch=1.3.1 cudatoolkit=9.2 torchvision=0.4.2 -c pytorch + ``` + + If you build PyTorch from source instead of installing the prebuilt pacakge, + you can use more CUDA versions such as 9.0. + + +### Install MMDetection + +We recommend you to install MMDetection with [MIM](https://github.com/open-mmlab/mim). + +```shell +pip install openmim +mim install mmdet +``` + +MIM can automatically install OpenMMLab projects and their requirements. + +Or, you can install MMDetection manually: + +1. Install mmcv-full, we recommend you to install the pre-build package as below. + + ```shell + pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/{cu_version}/{torch_version}/index.html + ``` + + Please replace `{cu_version}` and `{torch_version}` in the url to your desired one. For example, to install the latest `mmcv-full` with `CUDA 11` and `PyTorch 1.7.0`, use the following command: + + ```shell + pip install mmcv-full -f https://download.openmmlab.com/mmcv/dist/cu110/torch1.7.0/index.html + ``` + + See [here](https://github.com/open-mmlab/mmcv#install-with-pip) for different versions of MMCV compatible to different PyTorch and CUDA versions. + Optionally you can choose to compile mmcv from source by the following command + + ```shell + git clone https://github.com/open-mmlab/mmcv.git + cd mmcv + MMCV_WITH_OPS=1 pip install -e . # package mmcv-full will be installed after this step + cd .. + ``` + + Or directly run + + ```shell + pip install mmcv-full + ``` + +2. Clone the MMDetection repository. + + ```shell + git clone https://github.com/open-mmlab/mmdetection.git + cd mmdetection + ``` + +3. Install build requirements and then install MMDetection. + + ```shell + pip install -r requirements/build.txt + pip install -v -e . # or "python setup.py develop" + ``` + + Or, you can simply install mmdetection with the following commands: + + ```shell + pip install mmdet + ``` + +Note: + +a. Following the above instructions, MMDetection is installed on `dev` mode +, any local modifications made to the code will take effect without the need to reinstall it. + +b. If you would like to use `opencv-python-headless` instead of `opencv-python`, +you can install it before installing MMCV. + +c. Some dependencies are optional. Simply running `pip install -v -e .` will + only install the minimum runtime requirements. To use optional dependencies like `albumentations` and `imagecorruptions` either install them manually with `pip install -r requirements/optional.txt` or specify desired extras when calling `pip` (e.g. `pip install -v -e .[optional]`). Valid keys for the extras field are: `all`, `tests`, `build`, and `optional`. + +### Install with CPU only + +The code can be built for CPU only environment (where CUDA isn't available). + +In CPU mode you can run the demo/webcam_demo.py for example. +However some functionality is gone in this mode: + +- Deformable Convolution +- Modulated Deformable Convolution +- ROI pooling +- Deformable ROI pooling +- CARAFE: Content-Aware ReAssembly of FEatures +- SyncBatchNorm +- CrissCrossAttention: Criss-Cross Attention +- MaskedConv2d +- Temporal Interlace Shift +- nms_cuda +- sigmoid_focal_loss_cuda +- bbox_overlaps + +So if you try to run inference with a model containing above ops you will get an error. The following table lists the related methods that cannot inference on CPU due to dependency on these operators + +| Operator | Model | +| :-----------------------------------------------------: | :----------------------------------------------------------: | +| Deformable Convolution/Modulated Deformable Convolution | DCN、Guided Anchoring、RepPoints、CentripetalNet、VFNet、CascadeRPN、NAS-FCOS、DetectoRS | +| MaskedConv2d | Guided Anchoring | +| CARAFE | CARAFE | +| SyncBatchNorm | ResNeSt | + +**Notice**: MMDetection does not support training with CPU for now. + +### Another option: Docker Image + +We provide a [Dockerfile](https://github.com/open-mmlab/mmdetection/blob/master/docker/Dockerfile) to build an image. Ensure that you are using [docker version](https://docs.docker.com/engine/install/) >=19.03. + +```shell +# build an image with PyTorch 1.6, CUDA 10.1 +docker build -t mmdetection docker/ +``` + +Run it with + +```shell +docker run --gpus all --shm-size=8g -it -v {DATA_DIR}:/mmdetection/data mmdetection +``` + +### A from-scratch setup script + +Assuming that you already have CUDA 10.1 installed, here is a full script for setting up MMDetection with conda. + +```shell +conda create -n open-mmlab python=3.7 -y +conda activate open-mmlab + +conda install pytorch==1.6.0 torchvision==0.7.0 cudatoolkit=10.1 -c pytorch -y + +# install the latest mmcv +pip install mmcv-full==latest+torch1.6.0+cu101 -f https://download.openmmlab.com/mmcv/dist/index.html + +# install mmdetection +git clone https://github.com/open-mmlab/mmdetection.git +cd mmdetection +pip install -r requirements/build.txt +pip install -v -e . +``` + +### Developing with multiple MMDetection versions + +The train and test scripts already modify the `PYTHONPATH` to ensure the script use the MMDetection in the current directory. + +To use the default MMDetection installed in the environment rather than that you are working with, you can remove the following line in those scripts + +```shell +PYTHONPATH="$(dirname $0)/..":$PYTHONPATH +``` + +## Verification + +To verify whether MMDetection and the required environment are installed correctly, we can run sample Python code to initialize a detector and run inference a demo image: + +```python +from mmdet.apis import init_detector, inference_detector + +config_file = 'configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +# download the checkpoint from model zoo and put it in `checkpoints/` +# url: https://download.openmmlab.com/mmdetection/v2.0/faster_rcnn/faster_rcnn_r50_fpn_1x_coco/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth +checkpoint_file = 'checkpoints/faster_rcnn_r50_fpn_1x_coco_20200130-047c8118.pth' +device = 'cuda:0' +# init a detector +model = init_detector(config_file, checkpoint_file, device=device) +# inference the demo image +inference_detector(model, 'demo/demo.jpg') +``` + +The above code is supposed to run successfully upon you finish the installation. diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/index.rst b/detection_cbnet/docker-build-context/cbnetv2/docs/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..5b30e24133956f4e46a00deb07c74e57ebff0154 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/index.rst @@ -0,0 +1,50 @@ +Welcome to MMDetection's documentation! +======================================= + +.. toctree:: + :maxdepth: 2 + :caption: Get Started + + get_started.md + modelzoo_statistics.md + model_zoo.md + +.. toctree:: + :maxdepth: 2 + :caption: Quick Run + + 1_exist_data_model.md + 2_new_data_model.md + +.. toctree:: + :maxdepth: 2 + :caption: Tutorials + + tutorials/index.rst + +.. toctree:: + :maxdepth: 2 + :caption: Useful Tools and Scripts + + useful_tools.md + +.. toctree:: + :maxdepth: 2 + :caption: Notes + + conventions.md + compatibility.md + projects.md + changelog.md + faq.md + +.. toctree:: + :caption: API Reference + + api.rst + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`search` diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/make.bat b/detection_cbnet/docker-build-context/cbnetv2/docs/make.bat new file mode 100644 index 0000000000000000000000000000000000000000..922152e96a04a242e6fc40f124261d74890617d8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/model_zoo.md b/detection_cbnet/docker-build-context/cbnetv2/docs/model_zoo.md new file mode 100644 index 0000000000000000000000000000000000000000..307acb591cec1a078a34b973823b33c9ff6f40c9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/model_zoo.md @@ -0,0 +1,326 @@ +# Benchmark and Model Zoo + +## Mirror sites + +We use AWS as the main site to host our model zoo, and maintain a mirror on aliyun. +You can replace `https://s3.ap-northeast-2.amazonaws.com/open-mmlab` with `https://open-mmlab.oss-cn-beijing.aliyuncs.com` in model urls. + +## Common settings + +- All models were trained on `coco_2017_train`, and tested on the `coco_2017_val`. +- We use distributed training. +- All pytorch-style pretrained backbones on ImageNet are from PyTorch model zoo, caffe-style pretrained backbones are converted from the newly released model from detectron2. +- For fair comparison with other codebases, we report the GPU memory as the maximum value of `torch.cuda.max_memory_allocated()` for all 8 GPUs. Note that this value is usually less than what `nvidia-smi` shows. +- We report the inference time as the total time of network forwarding and post-processing, excluding the data loading time. Results are obtained with the script [benchmark.py](https://github.com/open-mmlab/mmdetection/blob/master/tools/analysis_tools/benchmark.py) which computes the average time on 2000 images. + +## ImageNet Pretrained Models + +It is common to initialize from backbone models pre-trained on ImageNet classification task. All pre-trained model links can be found at [open_mmlab](https://github.com/open-mmlab/mmcv/blob/master/mmcv/model_zoo/open_mmlab.json). According to `img_norm_cfg` and source of weight, we can divide all the ImageNet pre-trained model weights into some cases: + +- TorchVision: Corresponding to torchvision weight, including ResNet50, ResNet101. The `img_norm_cfg` is `dict(mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)`. +- Pycls: Corresponding to [pycls](https://github.com/facebookresearch/pycls) weight, including RegNetX. The `img_norm_cfg` is `dict( + mean=[103.530, 116.280, 123.675], std=[57.375, 57.12, 58.395], to_rgb=False)`. +- MSRA styles: Corresponding to [MSRA](https://github.com/KaimingHe/deep-residual-networks) weights, including ResNet50_Caffe and ResNet101_Caffe. The `img_norm_cfg` is `dict( + mean=[103.530, 116.280, 123.675], std=[1.0, 1.0, 1.0], to_rgb=False)`. +- Caffe2 styles: Currently only contains ResNext101_32x8d. The `img_norm_cfg` is `dict(mean=[103.530, 116.280, 123.675], std=[57.375, 57.120, 58.395], to_rgb=False)`. +- Other styles: E.g SSD which corresponds to `img_norm_cfg` is `dict(mean=[123.675, 116.28, 103.53], std=[1, 1, 1], to_rgb=True)` and YOLOv3 which corresponds to `img_norm_cfg` is `dict(mean=[0, 0, 0], std=[255., 255., 255.], to_rgb=True)`. + +The detailed table of the commonly used backbone models in MMDetection is listed below : + +| model | source | link | description | +| ---------------- | ----------- | ------------------------------------------------------------ | ------------------------------------------------------------ | +| ResNet50 | TorchVision | [torchvision's ResNet-50](https://download.pytorch.org/models/resnet50-19c8e357.pth) | From [torchvision's ResNet-50](https://download.pytorch.org/models/resnet50-19c8e357.pth). | +| ResNet101 | TorchVision | [torchvision's ResNet-101](https://download.pytorch.org/models/resnet101-5d3b4d8f.pth) | From [torchvision's ResNet-101](https://download.pytorch.org/models/resnet101-5d3b4d8f.pth). | +| RegNetX | Pycls | [RegNetX_3.2gf](https://download.openmmlab.com/pretrain/third_party/regnetx_3.2gf-c2599b0f.pth), [RegNetX_800mf](https://download.openmmlab.com/pretrain/third_party/regnetx_800mf-1f4be4c7.pth). etc. | From [pycls](https://github.com/facebookresearch/pycls). | +| ResNet50_Caffe | MSRA | [MSRA's ResNet-50](https://download.openmmlab.com/pretrain/third_party/resnet50_caffe-788b5fa3.pth) | Converted copy of [Detectron2's R-50.pkl](https://dl.fbaipublicfiles.com/detectron2/ImageNetPretrained/MSRA/R-50.pkl) model. The original weight comes from [MSRA's original ResNet-50](https://github.com/KaimingHe/deep-residual-networks). | +| ResNet101_Caffe | MSRA | [MSRA's ResNet-101](https://download.openmmlab.com/pretrain/third_party/resnet101_caffe-3ad79236.pth) | Converted copy of [Detectron2's R-101.pkl](https://dl.fbaipublicfiles.com/detectron2/ImageNetPretrained/MSRA/R-101.pkl) model. The original weight comes from [MSRA's original ResNet-101](https://github.com/KaimingHe/deep-residual-networks). | +| ResNext101_32x8d | Caffe2 | [Caffe2 ResNext101_32x8d](https://download.openmmlab.com/pretrain/third_party/resnext101_32x8d-1516f1aa.pth) | Converted copy of [Detectron2's X-101-32x8d.pkl](https://dl.fbaipublicfiles.com/detectron2/ImageNetPretrained/FAIR/X-101-32x8d.pkl) model. The ResNeXt-101-32x8d model trained with Caffe2 at FB. | + +## Baselines + +### RPN + +Please refer to [RPN](https://github.com/open-mmlab/mmdetection/blob/master/configs/rpn) for details. + +### Faster R-CNN + +Please refer to [Faster R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn) for details. + +### Mask R-CNN + +Please refer to [Mask R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/mask_rcnn) for details. + +### Fast R-CNN (with pre-computed proposals) + +Please refer to [Fast R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/fast_rcnn) for details. + +### RetinaNet + +Please refer to [RetinaNet](https://github.com/open-mmlab/mmdetection/blob/master/configs/retinanet) for details. + +### Cascade R-CNN and Cascade Mask R-CNN + +Please refer to [Cascade R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/cascade_rcnn) for details. + +### Hybrid Task Cascade (HTC) + +Please refer to [HTC](https://github.com/open-mmlab/mmdetection/blob/master/configs/htc) for details. + +### SSD + +Please refer to [SSD](https://github.com/open-mmlab/mmdetection/blob/master/configs/ssd) for details. + +### Group Normalization (GN) + +Please refer to [Group Normalization](https://github.com/open-mmlab/mmdetection/blob/master/configs/gn) for details. + +### Weight Standardization + +Please refer to [Weight Standardization](https://github.com/open-mmlab/mmdetection/blob/master/configs/gn+ws) for details. + +### Deformable Convolution v2 + +Please refer to [Deformable Convolutional Networks](https://github.com/open-mmlab/mmdetection/blob/master/configs/dcn) for details. + +### CARAFE: Content-Aware ReAssembly of FEatures + +Please refer to [CARAFE](https://github.com/open-mmlab/mmdetection/blob/master/configs/carafe) for details. + +### Instaboost + +Please refer to [Instaboost](https://github.com/open-mmlab/mmdetection/blob/master/configs/instaboost) for details. + +### Libra R-CNN + +Please refer to [Libra R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/libra_rcnn) for details. + +### Guided Anchoring + +Please refer to [Guided Anchoring](https://github.com/open-mmlab/mmdetection/blob/master/configs/guided_anchoring) for details. + +### FCOS + +Please refer to [FCOS](https://github.com/open-mmlab/mmdetection/blob/master/configs/fcos) for details. + +### FoveaBox + +Please refer to [FoveaBox](https://github.com/open-mmlab/mmdetection/blob/master/configs/foveabox) for details. + +### RepPoints + +Please refer to [RepPoints](https://github.com/open-mmlab/mmdetection/blob/master/configs/reppoints) for details. + +### FreeAnchor + +Please refer to [FreeAnchor](https://github.com/open-mmlab/mmdetection/blob/master/configs/free_anchor) for details. + +### Grid R-CNN (plus) + +Please refer to [Grid R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/grid_rcnn) for details. + +### GHM + +Please refer to [GHM](https://github.com/open-mmlab/mmdetection/blob/master/configs/ghm) for details. + +### GCNet + +Please refer to [GCNet](https://github.com/open-mmlab/mmdetection/blob/master/configs/gcnet) for details. + +### HRNet + +Please refer to [HRNet](https://github.com/open-mmlab/mmdetection/blob/master/configs/hrnet) for details. + +### Mask Scoring R-CNN + +Please refer to [Mask Scoring R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/ms_rcnn) for details. + +### Train from Scratch + +Please refer to [Rethinking ImageNet Pre-training](https://github.com/open-mmlab/mmdetection/blob/master/configs/scratch) for details. + +### NAS-FPN + +Please refer to [NAS-FPN](https://github.com/open-mmlab/mmdetection/blob/master/configs/nas_fpn) for details. + +### ATSS + +Please refer to [ATSS](https://github.com/open-mmlab/mmdetection/blob/master/configs/atss) for details. + +### FSAF + +Please refer to [FSAF](https://github.com/open-mmlab/mmdetection/blob/master/configs/fsaf) for details. + +### RegNetX + +Please refer to [RegNet](https://github.com/open-mmlab/mmdetection/blob/master/configs/regnet) for details. + +### Res2Net + +Please refer to [Res2Net](https://github.com/open-mmlab/mmdetection/blob/master/configs/res2net) for details. + +### GRoIE + +Please refer to [GRoIE](https://github.com/open-mmlab/mmdetection/blob/master/configs/groie) for details. + +### Dynamic R-CNN + +Please refer to [Dynamic R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/dynamic_rcnn) for details. + +### PointRend + +Please refer to [PointRend](https://github.com/open-mmlab/mmdetection/blob/master/configs/point_rend) for details. + +### DetectoRS + +Please refer to [DetectoRS](https://github.com/open-mmlab/mmdetection/blob/master/configs/detectors) for details. + +### Generalized Focal Loss + +Please refer to [Generalized Focal Loss](https://github.com/open-mmlab/mmdetection/blob/master/configs/gfl) for details. + +### CornerNet + +Please refer to [CornerNet](https://github.com/open-mmlab/mmdetection/blob/master/configs/cornernet) for details. + +### YOLOv3 + +Please refer to [YOLOv3](https://github.com/open-mmlab/mmdetection/blob/master/configs/yolo) for details. + +### PAA + +Please refer to [PAA](https://github.com/open-mmlab/mmdetection/blob/master/configs/paa) for details. + +### SABL + +Please refer to [SABL](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl) for details. + +### CentripetalNet + +Please refer to [CentripetalNet](https://github.com/open-mmlab/mmdetection/blob/master/configs/centripetalnet) for details. + +### ResNeSt + +Please refer to [ResNeSt](https://github.com/open-mmlab/mmdetection/blob/master/configs/resnest) for details. + +### DETR + +Please refer to [DETR](https://github.com/open-mmlab/mmdetection/blob/master/configs/detr) for details. + +### Deformable DETR + +Please refer to [Deformable DETR](https://github.com/open-mmlab/mmdetection/blob/master/configs/deformable_detr) for details. + +### AutoAssign + +Please refer to [AutoAssign](https://github.com/open-mmlab/mmdetection/blob/master/configs/autoassign) for details. + +### YOLOF + +Please refer to [YOLOF](https://github.com/open-mmlab/mmdetection/blob/master/configs/yolof) for details. + + +### Seesaw Loss + +Please refer to [Seesaw Loss](https://github.com/open-mmlab/mmdetection/blob/master/configs/seesaw_loss) for details. + +### CenterNet + +Please refer to [CenterNet](https://github.com/open-mmlab/mmdetection/blob/master/configs/centernet) for details. + +### Other datasets + +We also benchmark some methods on [PASCAL VOC](https://github.com/open-mmlab/mmdetection/blob/master/configs/pascal_voc), [Cityscapes](https://github.com/open-mmlab/mmdetection/blob/master/configs/cityscapes) and [WIDER FACE](https://github.com/open-mmlab/mmdetection/blob/master/configs/wider_face). + +### Pre-trained Models + +We also train [Faster R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn) and [Mask R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/mask_rcnn) using ResNet-50 and [RegNetX-3.2G](https://github.com/open-mmlab/mmdetection/blob/master/configs/regnet) with multi-scale training and longer schedules. These models serve as strong pre-trained models for downstream tasks for convenience. + +## Speed benchmark + +### Training Speed benchmark + +We provide [analyze_logs.py](https://github.com/open-mmlab/mmdetection/blob/master/tools/analysis_tools/analyze_logs.py) to get average time of iteration in training. You can find examples in [Log Analysis](https://mmdetection.readthedocs.io/en/latest/useful_tools.html#log-analysis). + +We compare the training speed of Mask R-CNN with some other popular frameworks (The data is copied from [detectron2](https://github.com/facebookresearch/detectron2/blob/master/docs/notes/benchmarks.md/)). +For mmdetection, we benchmark with [mask_rcnn_r50_caffe_fpn_poly_1x_coco_v1.py](https://github.com/open-mmlab/mmdetection/blob/master/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_poly_1x_coco_v1.py), which should have the same setting with [mask_rcnn_R_50_FPN_noaug_1x.yaml](https://github.com/facebookresearch/detectron2/blob/master/configs/Detectron1-Comparisons/mask_rcnn_R_50_FPN_noaug_1x.yaml) of detectron2. +We also provide the [checkpoint](https://download.openmmlab.com/mmdetection/v2.0/benchmark/mask_rcnn_r50_caffe_fpn_poly_1x_coco_no_aug/mask_rcnn_r50_caffe_fpn_poly_1x_coco_no_aug_compare_20200518-10127928.pth) and [training log](https://download.openmmlab.com/mmdetection/v2.0/benchmark/mask_rcnn_r50_caffe_fpn_poly_1x_coco_no_aug/mask_rcnn_r50_caffe_fpn_poly_1x_coco_no_aug_20200518_105755.log.json) for reference. The throughput is computed as the average throughput in iterations 100-500 to skip GPU warmup time. + +| Implementation | Throughput (img/s) | +| -------------------------------------------------------------------------------------- | ------------------ | +| [Detectron2](https://github.com/facebookresearch/detectron2) | 62 | +| [MMDetection](https://github.com/open-mmlab/mmdetection) | 61 | +| [maskrcnn-benchmark](https://github.com/facebookresearch/maskrcnn-benchmark/) | 53 | +| [tensorpack](https://github.com/tensorpack/tensorpack/tree/master/examples/FasterRCNN) | 50 | +| [simpledet](https://github.com/TuSimple/simpledet/) | 39 | +| [Detectron](https://github.com/facebookresearch/Detectron) | 19 | +| [matterport/Mask_RCNN](https://github.com/matterport/Mask_RCNN/) | 14 | + +### Inference Speed Benchmark + +We provide [benchmark.py](https://github.com/open-mmlab/mmdetection/blob/master/tools/analysis_tools/benchmark.py) to benchmark the inference latency. +The script benchmarkes the model with 2000 images and calculates the average time ignoring first 5 times. You can change the output log interval (defaults: 50) by setting `LOG-INTERVAL`. + +```shell +python toools/benchmark.py ${CONFIG} ${CHECKPOINT} [--log-interval $[LOG-INTERVAL]] [--fuse-conv-bn] +``` + +The latency of all models in our model zoo is benchmarked without setting `fuse-conv-bn`, you can get a lower latency by setting it. + +## Comparison with Detectron2 + +We compare mmdetection with [Detectron2](https://github.com/facebookresearch/detectron2.git) in terms of speed and performance. +We use the commit id [185c27e](https://github.com/facebookresearch/detectron2/tree/185c27e4b4d2d4c68b5627b3765420c6d7f5a659)(30/4/2020) of detectron. +For fair comparison, we install and run both frameworks on the same machine. + +### Hardware + +- 8 NVIDIA Tesla V100 (32G) GPUs +- Intel(R) Xeon(R) Gold 6148 CPU @ 2.40GHz + +### Software environment + +- Python 3.7 +- PyTorch 1.4 +- CUDA 10.1 +- CUDNN 7.6.03 +- NCCL 2.4.08 + +### Performance + +| Type | Lr schd | Detectron2 | mmdetection | Download | +| -------------------------------------------------------------------------------------------------------------------------------------- | ------- | -------------------------------------------------------------------------------------------------------------------------------------- | ----------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| [Faster R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/faster_rcnn/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco.py) | 1x | [37.9](https://github.com/facebookresearch/detectron2/blob/master/configs/COCO-Detection/faster_rcnn_R_50_FPN_1x.yaml) | 38.0 | [model](https://download.openmmlab.com/mmdetection/v2.0/benchmark/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco-5324cff8.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/benchmark/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco/faster_rcnn_r50_caffe_fpn_mstrain_1x_coco_20200429_234554.log.json) | +| [Mask R-CNN](https://github.com/open-mmlab/mmdetection/blob/master/configs/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py) | 1x | [38.6 & 35.2](https://github.com/facebookresearch/detectron2/blob/master/configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml) | 38.8 & 35.4 | [model](https://download.openmmlab.com/mmdetection/v2.0/benchmark/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco-dbecf295.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/benchmark/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco_20200430_054239.log.json) | +| [Retinanet](https://github.com/open-mmlab/mmdetection/blob/master/configs/retinanet/retinanet_r50_caffe_fpn_mstrain_1x_coco.py) | 1x | [36.5](https://github.com/facebookresearch/detectron2/blob/master/configs/COCO-Detection/retinanet_R_50_FPN_1x.yaml) | 37.0 | [model](https://download.openmmlab.com/mmdetection/v2.0/benchmark/retinanet_r50_caffe_fpn_mstrain_1x_coco/retinanet_r50_caffe_fpn_mstrain_1x_coco-586977a0.pth) | [log](https://download.openmmlab.com/mmdetection/v2.0/benchmark/retinanet_r50_caffe_fpn_mstrain_1x_coco/retinanet_r50_caffe_fpn_mstrain_1x_coco_20200430_014748.log.json) | + +### Training Speed + +The training speed is measure with s/iter. The lower, the better. + +| Type | Detectron2 | mmdetection | +| ------------ | ---------- | ----------- | +| Faster R-CNN | 0.210 | 0.216 | +| Mask R-CNN | 0.261 | 0.265 | +| Retinanet | 0.200 | 0.205 | + +### Inference Speed + +The inference speed is measured with fps (img/s) on a single GPU, the higher, the better. +To be consistent with Detectron2, we report the pure inference speed (without the time of data loading). +For Mask R-CNN, we exclude the time of RLE encoding in post-processing. +We also include the officially reported speed in the parentheses, which is slightly higher +than the results tested on our server due to differences of hardwares. + +| Type | Detectron2 | mmdetection | +| ------------ | ----------- | ----------- | +| Faster R-CNN | 25.6 (26.3) | 22.2 | +| Mask R-CNN | 22.5 (23.3) | 19.6 | +| Retinanet | 17.8 (18.2) | 20.6 | + +### Training memory + +| Type | Detectron2 | mmdetection | +| ------------ | ---------- | ----------- | +| Faster R-CNN | 3.0 | 3.8 | +| Mask R-CNN | 3.4 | 3.9 | +| Retinanet | 3.9 | 3.4 | diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/projects.md b/detection_cbnet/docker-build-context/cbnetv2/docs/projects.md new file mode 100644 index 0000000000000000000000000000000000000000..5b7c240fd4e7655d2242f875a499a9103e37aeab --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/projects.md @@ -0,0 +1,46 @@ +# Projects based on MMDetection + +There are many projects built upon MMDetection. +We list some of them as examples of how to extend MMDetection for your own projects. +Pull requests are also welcomed. + +## Projects as an extension + +Some projects extend the boundary of MMDetection for deployment or other research fields. +They reveal the potential of what MMDetection can do. We list several of them as below. + +- [OTEDetection](https://github.com/opencv/mmdetection): OpenVINO training extensions for object detection. +- [MMDetection3d](https://github.com/open-mmlab/mmdetection3d): OpenMMLab's next-generation platform for general 3D object detection. + +## Projects of papers + +There are also projects released with papers. +Some of the papers are published in top-tier conferences (CVPR, ICCV, and ECCV), the others are also highly influential. +To make this list also a reference for the community to develop and compare new object detection algorithms, we list them following the time order of top-tier conferences. +Methods already supported and maintained by MMDetection are not listed. + +- Overcoming Classifier Imbalance for Long-tail Object Detection with Balanced Group Softmax, CVPR2020. [[paper]](http://openaccess.thecvf.com/content_CVPR_2020/papers/Li_Overcoming_Classifier_Imbalance_for_Long-Tail_Object_Detection_With_Balanced_Group_CVPR_2020_paper.pdf)[[github]](https://github.com/FishYuLi/BalancedGroupSoftmax) +- Coherent Reconstruction of Multiple Humans from a Single Image, CVPR2020. [[paper]](https://jiangwenpl.github.io/multiperson/)[[github]](https://github.com/JiangWenPL/multiperson) +- Look-into-Object: Self-supervised Structure Modeling for Object Recognition, CVPR 2020. [[paper]](http://openaccess.thecvf.com/content_CVPR_2020/papers/Zhou_Look-Into-Object_Self-Supervised_Structure_Modeling_for_Object_Recognition_CVPR_2020_paper.pdf)[[github]](https://github.com/JDAI-CV/LIO) +- Video Panoptic Segmentation, CVPR2020. [[paper]](https://arxiv.org/abs/2006.11339)[[github]](https://github.com/mcahny/vps) +- D2Det: Towards High Quality Object Detection and Instance Segmentation, CVPR2020. [[paper]](http://openaccess.thecvf.com/content_CVPR_2020/html/Cao_D2Det_Towards_High_Quality_Object_Detection_and_Instance_Segmentation_CVPR_2020_paper.html)[[github]](https://github.com/JialeCao001/D2Det) +- CentripetalNet: Pursuing High-quality Keypoint Pairs for Object Detection, CVPR2020. [[paper]](https://arxiv.org/abs/2003.09119)[[github]](https://github.com/KiveeDong/CentripetalNet) +- Learning a Unified Sample Weighting Network for Object Detection, CVPR 2020. [[paper]](http://openaccess.thecvf.com/content_CVPR_2020/html/Cai_Learning_a_Unified_Sample_Weighting_Network_for_Object_Detection_CVPR_2020_paper.html)[[github]](https://github.com/caiqi/sample-weighting-network) +- Scale-equalizing Pyramid Convolution for Object Detection, CVPR2020. [[paper]](https://arxiv.org/abs/2005.03101) [[github]](https://github.com/jshilong/SEPC) +- Revisiting the Sibling Head in Object Detector, CVPR2020. [[paper]](https://arxiv.org/abs/2003.07540)[[github]](https://github.com/Sense-X/TSD) +- PolarMask: Single Shot Instance Segmentation with Polar Representation, CVPR2020. [[paper]](https://arxiv.org/abs/1909.13226)[[github]](https://github.com/xieenze/PolarMask) +- Hit-Detector: Hierarchical Trinity Architecture Search for Object Detection, CVPR2020. [[paper]](https://arxiv.org/abs/2003.11818)[[github]](https://github.com/ggjy/HitDet.pytorch) +- ZeroQ: A Novel Zero Shot Quantization Framework, CVPR2020. [[paper]](https://arxiv.org/abs/2001.00281)[[github]](https://github.com/amirgholami/ZeroQ) +- CBNet: A Novel Composite Backbone Network Architecture for Object Detection, AAAI2020. [[paper]](https://aaai.org/Papers/AAAI/2020GB/AAAI-LiuY.1833.pdf)[[github]](https://github.com/VDIGPKU/CBNet) +- RDSNet: A New Deep Architecture for Reciprocal Object Detection and Instance Segmentation, AAAI2020. [[paper]](https://arxiv.org/abs/1912.05070)[[github]](https://github.com/wangsr126/RDSNet) +- Training-Time-Friendly Network for Real-Time Object Detection, AAAI2020. [[paper]](https://arxiv.org/abs/1909.00700)[[github]](https://github.com/ZJULearning/ttfnet) +- Cascade RPN: Delving into High-Quality Region Proposal Network with Adaptive Convolution, NeurIPS 2019. [[paper]](https://arxiv.org/abs/1909.06720)[[github]](https://github.com/thangvubk/Cascade-RPN) +- Reasoning R-CNN: Unifying Adaptive Global Reasoning into Large-scale Object Detection, CVPR2019. [[paper]](http://openaccess.thecvf.com/content_CVPR_2019/papers/Xu_Reasoning-RCNN_Unifying_Adaptive_Global_Reasoning_Into_Large-Scale_Object_Detection_CVPR_2019_paper.pdf)[[github]](https://github.com/chanyn/Reasoning-RCNN) +- Learning RoI Transformer for Oriented Object Detection in Aerial Images, CVPR2019. [[paper]](https://arxiv.org/abs/1812.00155)[[github]](https://github.com/dingjiansw101/AerialDetection) +- SOLO: Segmenting Objects by Locations. [[paper]](https://arxiv.org/abs/1912.04488)[[github]](https://github.com/WXinlong/SOLO) +- SOLOv2: Dynamic, Faster and Stronger. [[paper]](https://arxiv.org/abs/2003.10152)[[github]](https://github.com/WXinlong/SOLO) +- Dense Peppoints: Representing Visual Objects with Dense Point Sets. [[paper]](https://arxiv.org/abs/1912.11473)[[github]](https://github.com/justimyhxu/Dense-RepPoints) +- IterDet: Iterative Scheme for Object Detection in Crowded Environments. [[paper]](https://arxiv.org/abs/2005.05708)[[github]](https://github.com/saic-vul/iterdet) +- Cross-Iteration Batch Normalization. [[paper]](https://arxiv.org/abs/2002.05712)[[github]](https://github.com/Howal/Cross-iterationBatchNorm) +- Generalizable Pedestrian Detection: The Elephant In The Room, CVPR2021. [[paper]](https://arxiv.org/abs/2003.08799)[[github]](https://github.com/hasanirtiza/Pedestron) +- A Ranking-based, Balanced Loss Function Unifying Classification and Localisation in Object Detection, NeurIPS2020 [[paper]](https://arxiv.org/abs/2009.13592)[[github]](https://github.com/kemaloksuz/aLRPLoss) diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/robustness_benchmarking.md b/detection_cbnet/docker-build-context/cbnetv2/docs/robustness_benchmarking.md new file mode 100644 index 0000000000000000000000000000000000000000..5be16dfae2ebd42c75b0f886efa5459ab97afe26 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/robustness_benchmarking.md @@ -0,0 +1,110 @@ +# Corruption Benchmarking + +## Introduction + +We provide tools to test object detection and instance segmentation models on the image corruption benchmark defined in [Benchmarking Robustness in Object Detection: Autonomous Driving when Winter is Coming](https://arxiv.org/abs/1907.07484). +This page provides basic tutorials how to use the benchmark. + +```latex +@article{michaelis2019winter, + title={Benchmarking Robustness in Object Detection: + Autonomous Driving when Winter is Coming}, + author={Michaelis, Claudio and Mitzkus, Benjamin and + Geirhos, Robert and Rusak, Evgenia and + Bringmann, Oliver and Ecker, Alexander S. and + Bethge, Matthias and Brendel, Wieland}, + journal={arXiv:1907.07484}, + year={2019} +} +``` + +![image corruption example](../resources/corruptions_sev_3.png) + +## About the benchmark + +To submit results to the benchmark please visit the [benchmark homepage](https://github.com/bethgelab/robust-detection-benchmark) + +The benchmark is modelled after the [imagenet-c benchmark](https://github.com/hendrycks/robustness) which was originally +published in [Benchmarking Neural Network Robustness to Common Corruptions and Perturbations](https://arxiv.org/abs/1903.12261) (ICLR 2019) by Dan Hendrycks and Thomas Dietterich. + +The image corruption functions are included in this library but can be installed separately using: + +```shell +pip install imagecorruptions +``` + +Compared to imagenet-c a few changes had to be made to handle images of arbitrary size and greyscale images. +We also modfied the 'motion blur' and 'snow' corruptions to remove dependency from a linux specific library, +which would have to be installed separately otherwise. For details please refer to the [imagecorruptions repository](https://github.com/bethgelab/imagecorruptions). + +## Inference with pretrained models + +We provide a testing script to evaluate a models performance on any combination of the corruptions provided in the benchmark. + +### Test a dataset + +- [x] single GPU testing +- [ ] multiple GPU testing +- [ ] visualize detection results + +You can use the following commands to test a models performance under the 15 corruptions used in the benchmark. + +```shell +# single-gpu testing +python tools/analysis_tools/test_robustness.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] +``` + +Alternatively different group of corruptions can be selected. + +```shell +# noise +python tools/analysis_tools/test_robustness.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] --corruptions noise + +# blur +python tools/analysis_tools/test_robustness.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] --corruptions blur + +# wetaher +python tools/analysis_tools/test_robustness.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] --corruptions weather + +# digital +python tools/analysis_tools/test_robustness.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] --corruptions digital +``` + +Or a costom set of corruptions e.g.: + +```shell +# gaussian noise, zoom blur and snow +python tools/analysis_tools/test_robustness.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] --corruptions gaussian_noise zoom_blur snow +``` + +Finally the corruption severities to evaluate can be chosen. +Severity 0 corresponds to clean data and the effect increases from 1 to 5. + +```shell +# severity 1 +python tools/analysis_tools/test_robustness.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] --severities 1 + +# severities 0,2,4 +python tools/analysis_tools/test_robustness.py ${CONFIG_FILE} ${CHECKPOINT_FILE} [--out ${RESULT_FILE}] [--eval ${EVAL_METRICS}] --severities 0 2 4 +``` + +## Results for modelzoo models + +The results on COCO 2017val are shown in the below table. + +Model | Backbone | Style | Lr schd | box AP clean | box AP corr. | box % | mask AP clean | mask AP corr. | mask % | +:-----:|:---------:|:-------:|:-------:|:------------:|:------------:|:-----:|:-------------:|:-------------:|:------:| +Faster R-CNN | R-50-FPN | pytorch | 1x | 36.3 | 18.2 | 50.2 | - | - | - | +Faster R-CNN | R-101-FPN | pytorch | 1x | 38.5 | 20.9 | 54.2 | - | - | - | +Faster R-CNN | X-101-32x4d-FPN | pytorch |1x | 40.1 | 22.3 | 55.5 | - | - | - | +Faster R-CNN | X-101-64x4d-FPN | pytorch |1x | 41.3 | 23.4 | 56.6 | - | - | - | +Faster R-CNN | R-50-FPN-DCN | pytorch | 1x | 40.0 | 22.4 | 56.1 | - | - | - | +Faster R-CNN | X-101-32x4d-FPN-DCN | pytorch | 1x | 43.4 | 26.7 | 61.6 | - | - | - | +Mask R-CNN | R-50-FPN | pytorch | 1x | 37.3 | 18.7 | 50.1 | 34.2 | 16.8 | 49.1 | +Mask R-CNN | R-50-FPN-DCN | pytorch | 1x | 41.1 | 23.3 | 56.7 | 37.2 | 20.7 | 55.7 | +Cascade R-CNN | R-50-FPN | pytorch | 1x | 40.4 | 20.1 | 49.7 | - | - | - | +Cascade Mask R-CNN | R-50-FPN | pytorch | 1x| 41.2 | 20.7 | 50.2 | 35.7 | 17.6 | 49.3 | +RetinaNet | R-50-FPN | pytorch | 1x | 35.6 | 17.8 | 50.1 | - | - | - | +Hybrid Task Cascade | X-101-64x4d-FPN-DCN | pytorch | 1x | 50.6 | 32.7 | 64.7 | 43.8 | 28.1 | 64.0 | + +Results may vary slightly due to the stochastic application of the corruptions. diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/stat.py b/detection_cbnet/docker-build-context/cbnetv2/docs/stat.py new file mode 100755 index 0000000000000000000000000000000000000000..9625c62efcb1be17671ba011cd999da70e86204a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/stat.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python +import functools as func +import glob +import os.path as osp +import re + +import numpy as np + +url_prefix = 'https://github.com/open-mmlab/mmdetection/blob/master/' + +files = sorted(glob.glob('../configs/*/README.md')) + +stats = [] +titles = [] +num_ckpts = 0 + +for f in files: + url = osp.dirname(f.replace('../', url_prefix)) + + with open(f, 'r') as content_file: + content = content_file.read() + + title = content.split('\n')[0].replace('# ', '').strip() + ckpts = set(x.lower().strip() + for x in re.findall(r'\[model\]\((https?.*)\)', content)) + + if len(ckpts) == 0: + continue + + _papertype = [x for x in re.findall(r'\[([A-Z]+)\]', content)] + assert len(_papertype) > 0 + papertype = _papertype[0] + + paper = set([(papertype, title)]) + + titles.append(title) + num_ckpts += len(ckpts) + + statsmsg = f""" +\t* [{papertype}] [{title}]({url}) ({len(ckpts)} ckpts) +""" + stats.append((paper, ckpts, statsmsg)) + +allpapers = func.reduce(lambda a, b: a.union(b), [p for p, _, _ in stats]) +msglist = '\n'.join(x for _, _, x in stats) + +papertypes, papercounts = np.unique([t for t, _ in allpapers], + return_counts=True) +countstr = '\n'.join( + [f' - {t}: {c}' for t, c in zip(papertypes, papercounts)]) + +modelzoo = f""" +# Model Zoo Statistics + +* Number of papers: {len(set(titles))} +{countstr} + +* Number of checkpoints: {num_ckpts} + +{msglist} +""" + +with open('modelzoo_statistics.md', 'w') as f: + f.write(modelzoo) diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/config.md b/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/config.md new file mode 100644 index 0000000000000000000000000000000000000000..449a42ca128f2c1b791cbf2454474db449f0f11e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/config.md @@ -0,0 +1,532 @@ +# Tutorial 1: Learn about Configs + +We incorporate modular and inheritance design into our config system, which is convenient to conduct various experiments. +If you wish to inspect the config file, you may run `python tools/misc/print_config.py /PATH/TO/CONFIG` to see the complete config. + +## Modify config through script arguments + +When submitting jobs using "tools/train.py" or "tools/test.py", you may specify `--cfg-options` to in-place modify the config. + +- Update config keys of dict chains. + + The config options can be specified following the order of the dict keys in the original config. + For example, `--cfg-options model.backbone.norm_eval=False` changes the all BN modules in model backbones to `train` mode. + +- Update keys inside a list of configs. + + Some config dicts are composed as a list in your config. For example, the training pipeline `data.train.pipeline` is normally a list + e.g. `[dict(type='LoadImageFromFile'), ...]`. If you want to change `'LoadImageFromFile'` to `'LoadImageFromWebcam'` in the pipeline, + you may specify `--cfg-options data.train.pipeline.0.type=LoadImageFromWebcam`. + +- Update values of list/tuples. + + If the value to be updated is a list or a tuple. For example, the config file normally sets `workflow=[('train', 1)]`. If you want to + change this key, you may specify `--cfg-options workflow="[(train,1),(val,1)]"`. Note that the quotation mark \" is necessary to + support list/tuple data types, and that **NO** white space is allowed inside the quotation marks in the specified value. + +## Config File Structure + +There are 4 basic component types under `config/_base_`, dataset, model, schedule, default_runtime. +Many methods could be easily constructed with one of each like Faster R-CNN, Mask R-CNN, Cascade R-CNN, RPN, SSD. +The configs that are composed by components from `_base_` are called _primitive_. + +For all configs under the same folder, it is recommended to have only **one** _primitive_ config. All other configs should inherit from the _primitive_ config. In this way, the maximum of inheritance level is 3. + +For easy understanding, we recommend contributors to inherit from existing methods. +For example, if some modification is made base on Faster R-CNN, user may first inherit the basic Faster R-CNN structure by specifying `_base_ = ../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py`, then modify the necessary fields in the config files. + +If you are building an entirely new method that does not share the structure with any of the existing methods, you may create a folder `xxx_rcnn` under `configs`, + +Please refer to [mmcv](https://mmcv.readthedocs.io/en/latest/utils.html#config) for detailed documentation. + +## Config Name Style + +We follow the below style to name config files. Contributors are advised to follow the same style. + +``` +{model}_[model setting]_{backbone}_{neck}_[norm setting]_[misc]_[gpu x batch_per_gpu]_{schedule}_{dataset} +``` + +`{xxx}` is required field and `[yyy]` is optional. + +- `{model}`: model type like `faster_rcnn`, `mask_rcnn`, etc. +- `[model setting]`: specific setting for some model, like `without_semantic` for `htc`, `moment` for `reppoints`, etc. +- `{backbone}`: backbone type like `r50` (ResNet-50), `x101` (ResNeXt-101). +- `{neck}`: neck type like `fpn`, `pafpn`, `nasfpn`, `c4`. +- `[norm_setting]`: `bn` (Batch Normalization) is used unless specified, other norm layer type could be `gn` (Group Normalization), `syncbn` (Synchronized Batch Normalization). + `gn-head`/`gn-neck` indicates GN is applied in head/neck only, while `gn-all` means GN is applied in the entire model, e.g. backbone, neck, head. +- `[misc]`: miscellaneous setting/plugins of model, e.g. `dconv`, `gcb`, `attention`, `albu`, `mstrain`. +- `[gpu x batch_per_gpu]`: GPUs and samples per GPU, `8x2` is used by default. +- `{schedule}`: training schedule, options are `1x`, `2x`, `20e`, etc. + `1x` and `2x` means 12 epochs and 24 epochs respectively. + `20e` is adopted in cascade models, which denotes 20 epochs. + For `1x`/`2x`, initial learning rate decays by a factor of 10 at the 8/16th and 11/22th epochs. + For `20e`, initial learning rate decays by a factor of 10 at the 16th and 19th epochs. +- `{dataset}`: dataset like `coco`, `cityscapes`, `voc_0712`, `wider_face`. + +## Deprecated train_cfg/test_cfg + +The `train_cfg` and `test_cfg` are deprecated in config file, please specify them in the model config. The original config structure is as below. + +```python +# deprecated +model = dict( + type=..., + ... +) +train_cfg=dict(...) +test_cfg=dict(...) +``` + +The migration example is as below. + +```python +# recommended +model = dict( + type=..., + ... + train_cfg=dict(...), + test_cfg=dict(...), +) +``` + +## An Example of Mask R-CNN + +To help the users have a basic idea of a complete config and the modules in a modern detection system, +we make brief comments on the config of Mask R-CNN using ResNet50 and FPN as the following. +For more detailed usage and the corresponding alternative for each modules, please refer to the API documentation. + +```python +model = dict( + type='MaskRCNN', # The name of detector + pretrained= + 'torchvision://resnet50', # The ImageNet pretrained backbone to be loaded + backbone=dict( # The config of backbone + type='ResNet', # The type of the backbone, refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/backbones/resnet.py#L288 for more details. + depth=50, # The depth of backbone, usually it is 50 or 101 for ResNet and ResNext backbones. + num_stages=4, # Number of stages of the backbone. + out_indices=(0, 1, 2, 3), # The index of output feature maps produced in each stages + frozen_stages=1, # The weights in the first 1 stage are fronzen + norm_cfg=dict( # The config of normalization layers. + type='BN', # Type of norm layer, usually it is BN or GN + requires_grad=True), # Whether to train the gamma and beta in BN + norm_eval=True, # Whether to freeze the statistics in BN + style='pytorch'), # The style of backbone, 'pytorch' means that stride 2 layers are in 3x3 conv, 'caffe' means stride 2 layers are in 1x1 convs. + neck=dict( + type='FPN', # The neck of detector is FPN. We also support 'NASFPN', 'PAFPN', etc. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/necks/fpn.py#L10 for more details. + in_channels=[256, 512, 1024, 2048], # The input channels, this is consistent with the output channels of backbone + out_channels=256, # The output channels of each level of the pyramid feature map + num_outs=5), # The number of output scales + rpn_head=dict( + type='RPNHead', # The type of RPN head is 'RPNHead', we also support 'GARPNHead', etc. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/dense_heads/rpn_head.py#L12 for more details. + in_channels=256, # The input channels of each input feature map, this is consistent with the output channels of neck + feat_channels=256, # Feature channels of convolutional layers in the head. + anchor_generator=dict( # The config of anchor generator + type='AnchorGenerator', # Most of methods use AnchorGenerator, SSD Detectors uses `SSDAnchorGenerator`. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/core/anchor/anchor_generator.py#L10 for more details + scales=[8], # Basic scale of the anchor, the area of the anchor in one position of a feature map will be scale * base_sizes + ratios=[0.5, 1.0, 2.0], # The ratio between height and width. + strides=[4, 8, 16, 32, 64]), # The strides of the anchor generator. This is consistent with the FPN feature strides. The strides will be taken as base_sizes if base_sizes is not set. + bbox_coder=dict( # Config of box coder to encode and decode the boxes during training and testing + type='DeltaXYWHBBoxCoder', # Type of box coder. 'DeltaXYWHBBoxCoder' is applied for most of methods. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/core/bbox/coder/delta_xywh_bbox_coder.py#L9 for more details. + target_means=[0.0, 0.0, 0.0, 0.0], # The target means used to encode and decode boxes + target_stds=[1.0, 1.0, 1.0, 1.0]), # The standard variance used to encode and decode boxes + loss_cls=dict( # Config of loss function for the classification branch + type='CrossEntropyLoss', # Type of loss for classification branch, we also support FocalLoss etc. + use_sigmoid=True, # RPN usually perform two-class classification, so it usually uses sigmoid function. + loss_weight=1.0), # Loss weight of the classification branch. + loss_bbox=dict( # Config of loss function for the regression branch. + type='L1Loss', # Type of loss, we also support many IoU Losses and smooth L1-loss, etc. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/losses/smooth_l1_loss.py#L56 for implementation. + loss_weight=1.0)), # Loss weight of the regression branch. + roi_head=dict( # RoIHead encapsulates the second stage of two-stage/cascade detectors. + type='StandardRoIHead', # Type of the RoI head. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/roi_heads/standard_roi_head.py#L10 for implementation. + bbox_roi_extractor=dict( # RoI feature extractor for bbox regression. + type='SingleRoIExtractor', # Type of the RoI feature extractor, most of methods uses SingleRoIExtractor. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/roi_heads/roi_extractors/single_level.py#L10 for details. + roi_layer=dict( # Config of RoI Layer + type='RoIAlign', # Type of RoI Layer, DeformRoIPoolingPack and ModulatedDeformRoIPoolingPack are also supported. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/ops/roi_align/roi_align.py#L79 for details. + output_size=7, # The output size of feature maps. + sampling_ratio=0), # Sampling ratio when extracting the RoI features. 0 means adaptive ratio. + out_channels=256, # output channels of the extracted feature. + featmap_strides=[4, 8, 16, 32]), # Strides of multi-scale feature maps. It should be consistent to the architecture of the backbone. + bbox_head=dict( # Config of box head in the RoIHead. + type='Shared2FCBBoxHead', # Type of the bbox head, Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/roi_heads/bbox_heads/convfc_bbox_head.py#L177 for implementation details. + in_channels=256, # Input channels for bbox head. This is consistent with the out_channels in roi_extractor + fc_out_channels=1024, # Output feature channels of FC layers. + roi_feat_size=7, # Size of RoI features + num_classes=80, # Number of classes for classification + bbox_coder=dict( # Box coder used in the second stage. + type='DeltaXYWHBBoxCoder', # Type of box coder. 'DeltaXYWHBBoxCoder' is applied for most of methods. + target_means=[0.0, 0.0, 0.0, 0.0], # Means used to encode and decode box + target_stds=[0.1, 0.1, 0.2, 0.2]), # Standard variance for encoding and decoding. It is smaller since the boxes are more accurate. [0.1, 0.1, 0.2, 0.2] is a conventional setting. + reg_class_agnostic=False, # Whether the regression is class agnostic. + loss_cls=dict( # Config of loss function for the classification branch + type='CrossEntropyLoss', # Type of loss for classification branch, we also support FocalLoss etc. + use_sigmoid=False, # Whether to use sigmoid. + loss_weight=1.0), # Loss weight of the classification branch. + loss_bbox=dict( # Config of loss function for the regression branch. + type='L1Loss', # Type of loss, we also support many IoU Losses and smooth L1-loss, etc. + loss_weight=1.0)), # Loss weight of the regression branch. + mask_roi_extractor=dict( # RoI feature extractor for bbox regression. + type='SingleRoIExtractor', # Type of the RoI feature extractor, most of methods uses SingleRoIExtractor. + roi_layer=dict( # Config of RoI Layer that extracts features for instance segmentation + type='RoIAlign', # Type of RoI Layer, DeformRoIPoolingPack and ModulatedDeformRoIPoolingPack are also supported + output_size=14, # The output size of feature maps. + sampling_ratio=0), # Sampling ratio when extracting the RoI features. + out_channels=256, # Output channels of the extracted feature. + featmap_strides=[4, 8, 16, 32]), # Strides of multi-scale feature maps. + mask_head=dict( # Mask prediction head + type='FCNMaskHead', # Type of mask head, refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/roi_heads/mask_heads/fcn_mask_head.py#L21 for implementation details. + num_convs=4, # Number of convolutional layers in mask head. + in_channels=256, # Input channels, should be consistent with the output channels of mask roi extractor. + conv_out_channels=256, # Output channels of the convolutional layer. + num_classes=80, # Number of class to be segmented. + loss_mask=dict( # Config of loss function for the mask branch. + type='CrossEntropyLoss', # Type of loss used for segmentation + use_mask=True, # Whether to only train the mask in the correct class. + loss_weight=1.0)))) # Loss weight of mask branch. + train_cfg = dict( # Config of training hyperparameters for rpn and rcnn + rpn=dict( # Training config of rpn + assigner=dict( # Config of assigner + type='MaxIoUAssigner', # Type of assigner, MaxIoUAssigner is used for many common detectors. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/core/bbox/assigners/max_iou_assigner.py#L10 for more details. + pos_iou_thr=0.7, # IoU >= threshold 0.7 will be taken as positive samples + neg_iou_thr=0.3, # IoU < threshold 0.3 will be taken as negative samples + min_pos_iou=0.3, # The minimal IoU threshold to take boxes as positive samples + match_low_quality=True, # Whether to match the boxes under low quality (see API doc for more details). + ignore_iof_thr=-1), # IoF threshold for ignoring bboxes + sampler=dict( # Config of positive/negative sampler + type='RandomSampler', # Type of sampler, PseudoSampler and other samplers are also supported. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/core/bbox/samplers/random_sampler.py#L8 for implementation details. + num=256, # Number of samples + pos_fraction=0.5, # The ratio of positive samples in the total samples. + neg_pos_ub=-1, # The upper bound of negative samples based on the number of positive samples. + add_gt_as_proposals=False), # Whether add GT as proposals after sampling. + allowed_border=-1, # The border allowed after padding for valid anchors. + pos_weight=-1, # The weight of positive samples during training. + debug=False), # Whether to set the debug mode + rpn_proposal=dict( # The config to generate proposals during training + nms_across_levels=False, # Whether to do NMS for boxes across levels. Only work in `GARPNHead`, naive rpn does not support do nms cross levels. + nms_pre=2000, # The number of boxes before NMS + nms_post=1000, # The number of boxes to be kept by NMS, Only work in `GARPNHead`. + max_per_img=1000, # The number of boxes to be kept after NMS. + nms=dict( # Config of nms + type='nms', #Type of nms + iou_threshold=0.7 # NMS threshold + ), + min_bbox_size=0), # The allowed minimal box size + rcnn=dict( # The config for the roi heads. + assigner=dict( # Config of assigner for second stage, this is different for that in rpn + type='MaxIoUAssigner', # Type of assigner, MaxIoUAssigner is used for all roi_heads for now. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/core/bbox/assigners/max_iou_assigner.py#L10 for more details. + pos_iou_thr=0.5, # IoU >= threshold 0.5 will be taken as positive samples + neg_iou_thr=0.5, # IoU < threshold 0.5 will be taken as negative samples + min_pos_iou=0.5, # The minimal IoU threshold to take boxes as positive samples + match_low_quality=False, # Whether to match the boxes under low quality (see API doc for more details). + ignore_iof_thr=-1), # IoF threshold for ignoring bboxes + sampler=dict( + type='RandomSampler', # Type of sampler, PseudoSampler and other samplers are also supported. Refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/core/bbox/samplers/random_sampler.py#L8 for implementation details. + num=512, # Number of samples + pos_fraction=0.25, # The ratio of positive samples in the total samples. + neg_pos_ub=-1, # The upper bound of negative samples based on the number of positive samples. + add_gt_as_proposals=True + ), # Whether add GT as proposals after sampling. + mask_size=28, # Size of mask + pos_weight=-1, # The weight of positive samples during training. + debug=False)) # Whether to set the debug mode + test_cfg = dict( # Config for testing hyperparameters for rpn and rcnn + rpn=dict( # The config to generate proposals during testing + nms_across_levels=False, # Whether to do NMS for boxes across levels. Only work in `GARPNHead`, naive rpn does not support do nms cross levels. + nms_pre=1000, # The number of boxes before NMS + nms_post=1000, # The number of boxes to be kept by NMS, Only work in `GARPNHead`. + max_per_img=1000, # The number of boxes to be kept after NMS. + nms=dict( # Config of nms + type='nms', #Type of nms + iou_threshold=0.7 # NMS threshold + ), + min_bbox_size=0), # The allowed minimal box size + rcnn=dict( # The config for the roi heads. + score_thr=0.05, # Threshold to filter out boxes + nms=dict( # Config of nms in the second stage + type='nms', # Type of nms + iou_thr=0.5), # NMS threshold + max_per_img=100, # Max number of detections of each image + mask_thr_binary=0.5)) # Threshold of mask prediction +dataset_type = 'CocoDataset' # Dataset type, this will be used to define the dataset +data_root = 'data/coco/' # Root path of data +img_norm_cfg = dict( # Image normalization config to normalize the input images + mean=[123.675, 116.28, 103.53], # Mean values used to pre-training the pre-trained backbone models + std=[58.395, 57.12, 57.375], # Standard variance used to pre-training the pre-trained backbone models + to_rgb=True +) # The channel orders of image used to pre-training the pre-trained backbone models +train_pipeline = [ # Training pipeline + dict(type='LoadImageFromFile'), # First pipeline to load images from file path + dict( + type='LoadAnnotations', # Second pipeline to load annotations for current image + with_bbox=True, # Whether to use bounding box, True for detection + with_mask=True, # Whether to use instance mask, True for instance segmentation + poly2mask=False), # Whether to convert the polygon mask to instance mask, set False for acceleration and to save memory + dict( + type='Resize', # Augmentation pipeline that resize the images and their annotations + img_scale=(1333, 800), # The largest scale of image + keep_ratio=True + ), # whether to keep the ratio between height and width. + dict( + type='RandomFlip', # Augmentation pipeline that flip the images and their annotations + flip_ratio=0.5), # The ratio or probability to flip + dict( + type='Normalize', # Augmentation pipeline that normalize the input images + mean=[123.675, 116.28, 103.53], # These keys are the same of img_norm_cfg since the + std=[58.395, 57.12, 57.375], # keys of img_norm_cfg are used here as arguments + to_rgb=True), + dict( + type='Pad', # Padding config + size_divisor=32), # The number the padded images should be divisible + dict(type='DefaultFormatBundle'), # Default format bundle to gather data in the pipeline + dict( + type='Collect', # Pipeline that decides which keys in the data should be passed to the detector + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), # First pipeline to load images from file path + dict( + type='MultiScaleFlipAug', # An encapsulation that encapsulates the testing augmentations + img_scale=(1333, 800), # Decides the largest scale for testing, used for the Resize pipeline + flip=False, # Whether to flip images during testing + transforms=[ + dict(type='Resize', # Use resize augmentation + keep_ratio=True), # Whether to keep the ratio between height and width, the img_scale set here will be suppressed by the img_scale set above. + dict(type='RandomFlip'), # Thought RandomFlip is added in pipeline, it is not used because flip=False + dict( + type='Normalize', # Normalization config, the values are from img_norm_cfg + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict( + type='Pad', # Padding config to pad images divisable by 32. + size_divisor=32), + dict( + type='ImageToTensor', # convert image to tensor + keys=['img']), + dict( + type='Collect', # Collect pipeline that collect necessary keys for testing. + keys=['img']) + ]) +] +data = dict( + samples_per_gpu=2, # Batch size of a single GPU + workers_per_gpu=2, # Worker to pre-fetch data for each single GPU + train=dict( # Train dataset config + type='CocoDataset', # Type of dataset, refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/datasets/coco.py#L19 for details. + ann_file='data/coco/annotations/instances_train2017.json', # Path of annotation file + img_prefix='data/coco/train2017/', # Prefix of image path + pipeline=[ # pipeline, this is passed by the train_pipeline created before. + dict(type='LoadImageFromFile'), + dict( + type='LoadAnnotations', + with_bbox=True, + with_mask=True, + poly2mask=False), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']) + ]), + val=dict( # Validation dataset config + type='CocoDataset', + ann_file='data/coco/annotations/instances_val2017.json', + img_prefix='data/coco/val2017/', + pipeline=[ # Pipeline is passed by test_pipeline created before + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) + ]), + test=dict( # Test dataset config, modify the ann_file for test-dev/test submission + type='CocoDataset', + ann_file='data/coco/annotations/instances_val2017.json', + img_prefix='data/coco/val2017/', + pipeline=[ # Pipeline is passed by test_pipeline created before + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict( + type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']) + ]) + ], + samples_per_gpu=2 # Batch size of a single GPU used in testing + )) +evaluation = dict( # The config to build the evaluation hook, refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/core/evaluation/eval_hooks.py#L7 for more details. + interval=1, # Evaluation interval + metric=['bbox', 'segm']) # Metrics used during evaluation +optimizer = dict( # Config used to build optimizer, support all the optimizers in PyTorch whose arguments are also the same as those in PyTorch + type='SGD', # Type of optimizers, refer to https://github.com/open-mmlab/mmdetection/blob/master/mmdet/core/optimizer/default_constructor.py#L13 for more details + lr=0.02, # Learning rate of optimizers, see detail usages of the parameters in the documentaion of PyTorch + momentum=0.9, # Momentum + weight_decay=0.0001) # Weight decay of SGD +optimizer_config = dict( # Config used to build the optimizer hook, refer to https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/optimizer.py#L8 for implementation details. + grad_clip=None) # Most of the methods do not use gradient clip +lr_config = dict( # Learning rate scheduler config used to register LrUpdater hook + policy='step', # The policy of scheduler, also support CosineAnnealing, Cyclic, etc. Refer to details of supported LrUpdater from https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/lr_updater.py#L9. + warmup='linear', # The warmup policy, also support `exp` and `constant`. + warmup_iters=500, # The number of iterations for warmup + warmup_ratio= + 0.001, # The ratio of the starting learning rate used for warmup + step=[8, 11]) # Steps to decay the learning rate +runner = dict(type='EpochBasedRunner', max_epochs=12) # Runner that runs the workflow in total max_epochs +checkpoint_config = dict( # Config to set the checkpoint hook, Refer to https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/checkpoint.py for implementation. + interval=1) # The save interval is 1 +log_config = dict( # config to register logger hook + interval=50, # Interval to print the log + hooks=[ + # dict(type='TensorboardLoggerHook') # The Tensorboard logger is also supported + dict(type='TextLoggerHook') + ]) # The logger used to record the training process. +dist_params = dict(backend='nccl') # Parameters to setup distributed training, the port can also be set. +log_level = 'INFO' # The level of logging. +load_from = None # load models as a pre-trained model from a given path. This will not resume training. +resume_from = None # Resume checkpoints from a given path, the training will be resumed from the epoch when the checkpoint's is saved. +workflow = [('train', 1)] # Workflow for runner. [('train', 1)] means there is only one workflow and the workflow named 'train' is executed once. The workflow trains the model by 12 epochs according to the total_epochs. +work_dir = 'work_dir' # Directory to save the model checkpoints and logs for the current experiments. +``` + +## FAQ + +### Ignore some fields in the base configs + +Sometimes, you may set `_delete_=True` to ignore some of fields in base configs. +You may refer to [mmcv](https://mmcv.readthedocs.io/en/latest/utils.html#inherit-from-base-config-with-ignored-fields) for simple illustration. + +In MMDetection, for example, to change the backbone of Mask R-CNN with the following config. + +```python +model = dict( + type='MaskRCNN', + pretrained='torchvision://resnet50', + backbone=dict( + type='ResNet', + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch'), + neck=dict(...), + rpn_head=dict(...), + roi_head=dict(...)) +``` + +`ResNet` and `HRNet` use different keywords to construct. + +```python +_base_ = '../mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py' +model = dict( + pretrained='open-mmlab://msra/hrnetv2_w32', + backbone=dict( + _delete_=True, + type='HRNet', + extra=dict( + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(4, ), + num_channels=(64, )), + stage2=dict( + num_modules=1, + num_branches=2, + block='BASIC', + num_blocks=(4, 4), + num_channels=(32, 64)), + stage3=dict( + num_modules=4, + num_branches=3, + block='BASIC', + num_blocks=(4, 4, 4), + num_channels=(32, 64, 128)), + stage4=dict( + num_modules=3, + num_branches=4, + block='BASIC', + num_blocks=(4, 4, 4, 4), + num_channels=(32, 64, 128, 256)))), + neck=dict(...)) +``` + +The `_delete_=True` would replace all old keys in `backbone` field with new keys. + +### Use intermediate variables in configs + +Some intermediate variables are used in the configs files, like `train_pipeline`/`test_pipeline` in datasets. +It's worth noting that when modifying intermediate variables in the children configs, user need to pass the intermediate variables into corresponding fields again. +For example, we would like to use multi scale strategy to train a Mask R-CNN. `train_pipeline`/`test_pipeline` are intermediate variable we would like modify. + +```python +_base_ = './mask_rcnn_r50_fpn_1x_coco.py' +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True, with_mask=True), + dict( + type='Resize', + img_scale=[(1333, 640), (1333, 672), (1333, 704), (1333, 736), + (1333, 768), (1333, 800)], + multiscale_mode="value", + keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +data = dict( + train=dict(pipeline=train_pipeline), + val=dict(pipeline=test_pipeline), + test=dict(pipeline=test_pipeline)) +``` + +We first define the new `train_pipeline`/`test_pipeline` and pass them into `data`. diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/customize_dataset.md b/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/customize_dataset.md new file mode 100644 index 0000000000000000000000000000000000000000..d1e956d4abae00a32359ee7136f3998caffc796a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/customize_dataset.md @@ -0,0 +1,487 @@ +# Tutorial 2: Customize Datasets + +## Support new data format + +To support a new data format, you can either convert them to existing formats (COCO format or PASCAL format) or directly convert them to the middle format. You could also choose to convert them offline (before training by a script) or online (implement a new dataset and do the conversion at training). In MMDetection, we recommend to convert the data into COCO formats and do the conversion offline, thus you only need to modify the config's data annotation paths and classes after the conversion of your data. + +### Reorganize new data formats to existing format + +The simplest way is to convert your dataset to existing dataset formats (COCO or PASCAL VOC). + +The annotation json files in COCO format has the following necessary keys: + +```python +'images': [ + { + 'file_name': 'COCO_val2014_000000001268.jpg', + 'height': 427, + 'width': 640, + 'id': 1268 + }, + ... +], + +'annotations': [ + { + 'segmentation': [[192.81, + 247.09, + ... + 219.03, + 249.06]], # if you have mask labels + 'area': 1035.749, + 'iscrowd': 0, + 'image_id': 1268, + 'bbox': [192.81, 224.8, 74.73, 33.43], + 'category_id': 16, + 'id': 42986 + }, + ... +], + +'categories': [ + {'id': 0, 'name': 'car'}, + ] +``` + +There are three necessary keys in the json file: + +- `images`: contains a list of images with their informations like `file_name`, `height`, `width`, and `id`. +- `annotations`: contains the list of instance annotations. +- `categories`: contains the list of categories names and their ID. + +After the data pre-processing, there are two steps for users to train the customized new dataset with existing format (e.g. COCO format): + +1. Modify the config file for using the customized dataset. +2. Check the annotations of the customized dataset. + +Here we give an example to show the above two steps, which uses a customized dataset of 5 classes with COCO format to train an existing Cascade MaskRCNN R50 FPN detector. + +#### 1. Modify the config file for using the customized dataset + +There are two aspects involved in the modification of config file: + +1. The `data` field. Specifically, you need to explicitly add the `classes` fields in `data.train`, `data.val` and `data.test`. +2. The `num_classes` field in the `model` part. Explicitly over-write all the `num_classes` from default value (e.g. 80 in COCO) to your classes number. + +In `configs/my_custom_config.py`: + +```python + +# the new config inherits the base configs to highlight the necessary modification +_base_ = './cascade_mask_rcnn_r50_fpn_1x_coco.py' + +# 1. dataset settings +dataset_type = 'CocoDataset' +classes = ('a', 'b', 'c', 'd', 'e') +data = dict( + samples_per_gpu=2, + workers_per_gpu=2, + train=dict( + type=dataset_type, + # explicitly add your class names to the field `classes` + classes=classes, + ann_file='path/to/your/train/annotation_data', + img_prefix='path/to/your/train/image_data'), + val=dict( + type=dataset_type, + # explicitly add your class names to the field `classes` + classes=classes, + ann_file='path/to/your/val/annotation_data', + img_prefix='path/to/your/val/image_data'), + test=dict( + type=dataset_type, + # explicitly add your class names to the field `classes` + classes=classes, + ann_file='path/to/your/test/annotation_data', + img_prefix='path/to/your/test/image_data')) + +# 2. model settings + +# explicitly over-write all the `num_classes` field from default 80 to 5. +model = dict( + roi_head=dict( + bbox_head=[ + dict( + type='Shared2FCBBoxHead', + # explicitly over-write all the `num_classes` field from default 80 to 5. + num_classes=5), + dict( + type='Shared2FCBBoxHead', + # explicitly over-write all the `num_classes` field from default 80 to 5. + num_classes=5), + dict( + type='Shared2FCBBoxHead', + # explicitly over-write all the `num_classes` field from default 80 to 5. + num_classes=5)], + # explicitly over-write all the `num_classes` field from default 80 to 5. + mask_head=dict(num_classes=5))) +``` + +#### 2. Check the annotations of the customized dataset + +Assuming your customized dataset is COCO format, make sure you have the correct annotations in the customized dataset: + +1. The length for `categories` field in annotations should exactly equal the tuple length of `classes` fields in your config, meaning the number of classes (e.g. 5 in this example). +2. The `classes` fields in your config file should have exactly the same elements and the same order with the `name` in `categories` of annotations. MMDetection automatically maps the uncontinuous `id` in `categories` to the continuous label indices, so the string order of `name` in `categories` field affects the order of label indices. Meanwhile, the string order of `classes` in config affects the label text during visualization of predicted bounding boxes. +3. The `category_id` in `annotations` field should be valid, i.e., all values in `category_id` should belong to `id` in `categories`. + +Here is a valid example of annotations: + +```python + +'annotations': [ + { + 'segmentation': [[192.81, + 247.09, + ... + 219.03, + 249.06]], # if you have mask labels + 'area': 1035.749, + 'iscrowd': 0, + 'image_id': 1268, + 'bbox': [192.81, 224.8, 74.73, 33.43], + 'category_id': 16, + 'id': 42986 + }, + ... +], + +# MMDetection automatically maps the uncontinuous `id` to the continuous label indices. +'categories': [ + {'id': 1, 'name': 'a'}, {'id': 3, 'name': 'b'}, {'id': 4, 'name': 'c'}, {'id': 16, 'name': 'd'}, {'id': 17, 'name': 'e'}, + ] +``` + +We use this way to support CityScapes dataset. The script is in [cityscapes.py](https://github.com/open-mmlab/mmdetection/blob/master/tools/dataset_converters/cityscapes.py) and we also provide the finetuning [configs](https://github.com/open-mmlab/mmdetection/blob/master/configs/cityscapes). + +**Note** + +1. For instance segmentation datasets, **MMDetection only supports evaluating mask AP of dataset in COCO format for now**. +2. It is recommanded to convert the data offline before training, thus you can still use `CocoDataset` and only need to modify the path of annotations and the training classes. + +### Reorganize new data format to middle format + +It is also fine if you do not want to convert the annotation format to COCO or PASCAL format. +Actually, we define a simple annotation format and all existing datasets are +processed to be compatible with it, either online or offline. + +The annotation of a dataset is a list of dict, each dict corresponds to an image. +There are 3 field `filename` (relative path), `width`, `height` for testing, +and an additional field `ann` for training. `ann` is also a dict containing at least 2 fields: +`bboxes` and `labels`, both of which are numpy arrays. Some datasets may provide +annotations like crowd/difficult/ignored bboxes, we use `bboxes_ignore` and `labels_ignore` +to cover them. + +Here is an example. + +```python + +[ + { + 'filename': 'a.jpg', + 'width': 1280, + 'height': 720, + 'ann': { + 'bboxes': (n, 4), + 'labels': (n, ), + 'bboxes_ignore': (k, 4), + 'labels_ignore': (k, ) (optional field) + } + }, + ... +] +``` + +There are two ways to work with custom datasets. + +- online conversion + + You can write a new Dataset class inherited from `CustomDataset`, and overwrite two methods + `load_annotations(self, ann_file)` and `get_ann_info(self, idx)`, + like [CocoDataset](https://github.com/open-mmlab/mmdetection/blob/master/mmdet/datasets/coco.py) and [VOCDataset](https://github.com/open-mmlab/mmdetection/blob/master/mmdet/datasets/voc.py). + +- offline conversion + + You can convert the annotation format to the expected format above and save it to + a pickle or json file, like [pascal_voc.py](https://github.com/open-mmlab/mmdetection/blob/master/tools/dataset_converters/pascal_voc.py). + Then you can simply use `CustomDataset`. + +### An example of customized dataset + +Assume the annotation is in a new format in text files. +The bounding boxes annotations are stored in text file `annotation.txt` as the following + +``` +# +000001.jpg +1280 720 +2 +10 20 40 60 1 +20 40 50 60 2 +# +000002.jpg +1280 720 +3 +50 20 40 60 2 +20 40 30 45 2 +30 40 50 60 3 +``` + +We can create a new dataset in `mmdet/datasets/my_dataset.py` to load the data. + +```python +import mmcv +import numpy as np + +from .builder import DATASETS +from .custom import CustomDataset + + +@DATASETS.register_module() +class MyDataset(CustomDataset): + + CLASSES = ('person', 'bicycle', 'car', 'motorcycle') + + def load_annotations(self, ann_file): + ann_list = mmcv.list_from_file(ann_file) + + data_infos = [] + for i, ann_line in enumerate(ann_list): + if ann_line != '#': + continue + + img_shape = ann_list[i + 2].split(' ') + width = int(img_shape[0]) + height = int(img_shape[1]) + bbox_number = int(ann_list[i + 3]) + + anns = ann_line.split(' ') + bboxes = [] + labels = [] + for anns in ann_list[i + 4:i + 4 + bbox_number]: + bboxes.append([float(ann) for ann in anns[:4]]) + labels.append(int(anns[4])) + + data_infos.append( + dict( + filename=ann_list[i + 1], + width=width, + height=height, + ann=dict( + bboxes=np.array(bboxes).astype(np.float32), + labels=np.array(labels).astype(np.int64)) + )) + + return data_infos + + def get_ann_info(self, idx): + return self.data_infos[idx]['ann'] + +``` + +Then in the config, to use `MyDataset` you can modify the config as the following + +```python +dataset_A_train = dict( + type='MyDataset', + ann_file = 'image_list.txt', + pipeline=train_pipeline +) +``` + +## Customize datasets by dataset wrappers + +MMDetection also supports many dataset wrappers to mix the dataset or modify the dataset distribution for training. +Currently it supports to three dataset wrappers as below: + +- `RepeatDataset`: simply repeat the whole dataset. +- `ClassBalancedDataset`: repeat dataset in a class balanced manner. +- `ConcatDataset`: concat datasets. + +### Repeat dataset + +We use `RepeatDataset` as wrapper to repeat the dataset. For example, suppose the original dataset is `Dataset_A`, to repeat it, the config looks like the following + +```python +dataset_A_train = dict( + type='RepeatDataset', + times=N, + dataset=dict( # This is the original config of Dataset_A + type='Dataset_A', + ... + pipeline=train_pipeline + ) + ) +``` + +### Class balanced dataset + +We use `ClassBalancedDataset` as wrapper to repeat the dataset based on category +frequency. The dataset to repeat needs to instantiate function `self.get_cat_ids(idx)` +to support `ClassBalancedDataset`. +For example, to repeat `Dataset_A` with `oversample_thr=1e-3`, the config looks like the following + +```python +dataset_A_train = dict( + type='ClassBalancedDataset', + oversample_thr=1e-3, + dataset=dict( # This is the original config of Dataset_A + type='Dataset_A', + ... + pipeline=train_pipeline + ) + ) +``` + +You may refer to [source code](../../mmdet/datasets/dataset_wrappers.py) for details. + +### Concatenate dataset + +There are three ways to concatenate the dataset. + +1. If the datasets you want to concatenate are in the same type with different annotation files, you can concatenate the dataset configs like the following. + + ```python + dataset_A_train = dict( + type='Dataset_A', + ann_file = ['anno_file_1', 'anno_file_2'], + pipeline=train_pipeline + ) + ``` + + If the concatenated dataset is used for test or evaluation, this manner supports to evaluate each dataset separately. To test the concatenated datasets as a whole, you can set `separate_eval=False` as below. + + ```python + dataset_A_train = dict( + type='Dataset_A', + ann_file = ['anno_file_1', 'anno_file_2'], + separate_eval=False, + pipeline=train_pipeline + ) + ``` + +2. In case the dataset you want to concatenate is different, you can concatenate the dataset configs like the following. + + ```python + dataset_A_train = dict() + dataset_B_train = dict() + + data = dict( + imgs_per_gpu=2, + workers_per_gpu=2, + train = [ + dataset_A_train, + dataset_B_train + ], + val = dataset_A_val, + test = dataset_A_test + ) + ``` + + If the concatenated dataset is used for test or evaluation, this manner also supports to evaluate each dataset separately. + +3. We also support to define `ConcatDataset` explicitly as the following. + + ```python + dataset_A_val = dict() + dataset_B_val = dict() + + data = dict( + imgs_per_gpu=2, + workers_per_gpu=2, + train=dataset_A_train, + val=dict( + type='ConcatDataset', + datasets=[dataset_A_val, dataset_B_val], + separate_eval=False)) + ``` + + This manner allows users to evaluate all the datasets as a single one by setting `separate_eval=False`. + +**Note:** + +1. The option `separate_eval=False` assumes the datasets use `self.data_infos` during evaluation. Therefore, COCO datasets do not support this behavior since COCO datasets do not fully rely on `self.data_infos` for evaluation. Combining different types of datasets and evaluating them as a whole is not tested thus is not suggested. +2. Evaluating `ClassBalancedDataset` and `RepeatDataset` is not supported thus evaluating concatenated datasets of these types is also not supported. + +A more complex example that repeats `Dataset_A` and `Dataset_B` by N and M times, respectively, and then concatenates the repeated datasets is as the following. + +```python +dataset_A_train = dict( + type='RepeatDataset', + times=N, + dataset=dict( + type='Dataset_A', + ... + pipeline=train_pipeline + ) +) +dataset_A_val = dict( + ... + pipeline=test_pipeline +) +dataset_A_test = dict( + ... + pipeline=test_pipeline +) +dataset_B_train = dict( + type='RepeatDataset', + times=M, + dataset=dict( + type='Dataset_B', + ... + pipeline=train_pipeline + ) +) +data = dict( + imgs_per_gpu=2, + workers_per_gpu=2, + train = [ + dataset_A_train, + dataset_B_train + ], + val = dataset_A_val, + test = dataset_A_test +) + +``` + +## Modify Dataset Classes + +With existing dataset types, we can modify the class names of them to train subset of the annotations. +For example, if you want to train only three classes of the current dataset, +you can modify the classes of dataset. +The dataset will filter out the ground truth boxes of other classes automatically. + +```python +classes = ('person', 'bicycle', 'car') +data = dict( + train=dict(classes=classes), + val=dict(classes=classes), + test=dict(classes=classes)) +``` + +MMDetection V2.0 also supports to read the classes from a file, which is common in real applications. +For example, assume the `classes.txt` contains the name of classes as the following. + +``` +person +bicycle +car +``` + +Users can set the classes as a file path, the dataset will load it and convert it to a list automatically. + +```python +classes = 'path/to/classes.txt' +data = dict( + train=dict(classes=classes), + val=dict(classes=classes), + test=dict(classes=classes)) +``` + +**Note**: + +- Before MMDetection v2.5.0, the dataset will filter out the empty GT images automatically if the classes are set and there is no way to disable that through config. This is an undesirable behavior and introduces confusion because if the classes are not set, the dataset only filter the empty GT images when `filter_empty_gt=True` and `test_mode=False`. After MMDetection v2.5.0, we decouple the image filtering process and the classes modification, i.e., the dataset will only filter empty GT images when `filter_empty_gt=True` and `test_mode=False`, no matter whether the classes are set. Thus, setting the classes only influences the annotations of classes used for training and users could decide whether to filter empty GT images by themselves. +- Since the middle format only has box labels and does not contain the class names, when using `CustomDataset`, users cannot filter out the empty GT images through configs but only do this offline. +- Please remember to modify the `num_classes` in the head when specifying `classes` in dataset. We implemented [NumClassCheckHook](https://github.com/open-mmlab/mmdetection/blob/master/mmdet/datasets/utils.py) to check whether the numbers are consistent since v2.9.0(after PR#4508). +- The features for setting dataset classes and dataset filtering will be refactored to be more user-friendly in the future (depends on the progress). diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/customize_losses.md b/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/customize_losses.md new file mode 100644 index 0000000000000000000000000000000000000000..c3e1ddd8900a2a295a1f78df37408f2ea14d7214 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/customize_losses.md @@ -0,0 +1,105 @@ +# Tutorial 6: Customize Losses + +MMDetection provides users with different loss functions. But the default configuration may be not applicable for different datasets or models, so users may want to modify a specific loss to adapt the new situation. + +This tutorial first elaborate the computation pipeline of losses, then give some instructions about how to modify each step. The modification can be categorized as tweaking and weighting. + +## Computation pipeline of a loss + +Given the input prediction and target, as well as the weights, a loss function maps the input tensor to the final loss scalar. The mapping can be divided into four steps: + +1. Get **element-wise** or sample-wise loss by the loss kernel function. + +2. Weighting the loss with a weight tensor **element-wisely**. + +3. Reduce the loss tensor to a **scalar**. + +4. Weighting the loss with a **scalar**. + +## Tweaking loss + +Tweaking a loss is more related with step 1, 3, 4, and most modifications can be specified in the config. +Here we take [Focal Loss (FL)](https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/losses/focal_loss.py) as an example. +The following code sniper are the construction method and config of FL respectively, they are actually one to one correspondence. + +```python +@LOSSES.register_module() +class FocalLoss(nn.Module): + + def __init__(self, + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + reduction='mean', + loss_weight=1.0): +``` + +```python +loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0) +``` + +### Tweaking hyper-parameters (step 1) + +`gamma` and `beta` are two hyper-parameters in the Focal Loss. Say if we want to change the value of `gamma` to be 1.5 and `alpha` to be 0.5, then we can specify them in the config as follows: + +```python +loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=1.5, + alpha=0.5, + loss_weight=1.0) +``` + +### Tweaking the way of reduction (step 3) + +The default way of reduction is `mean` for FL. Say if we want to change the reduction from `mean` to `sum`, we can specify it in the config as follows: + +```python +loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0, + reduction='sum') +``` + +### Tweaking loss weight (step 4) + +The loss weight here is a scalar which controls the weight of different losses in multi-task learning, e.g. classification loss and regression loss. Say if we want to change to loss weight of classification loss to be 0.5, we can specify it in the config as follows: + +```python +loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=0.5) +``` + +## Weighting loss (step 2) + +Weighting loss means we re-weight the loss element-wisely. To be more specific, we multiply the loss tensor with a weight tensor which has the same shape. As a result, different entries of the loss can be scaled differently, and so called element-wisely. +The loss weight varies across different models and highly context related, but overall there are two kinds of loss weights, `label_weights` for classification loss and `bbox_weights` for bbox regression loss. You can find them in the `get_target` method of the corresponding head. Here we take [ATSSHead](https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/dense_heads/atss_head.py#L530) as an example, which inherit [AnchorHead](https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/dense_heads/anchor_head.py) but overwrite its `get_targets` method which yields different `label_weights` and `bbox_weights`. + +``` +class ATSSHead(AnchorHead): + + ... + + def get_targets(self, + anchor_list, + valid_flag_list, + gt_bboxes_list, + img_metas, + gt_bboxes_ignore_list=None, + gt_labels_list=None, + label_channels=1, + unmap_outputs=True): +``` diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/customize_models.md b/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/customize_models.md new file mode 100644 index 0000000000000000000000000000000000000000..81c391258d2942dc3ab9f39d0c8aac218a935eea --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/customize_models.md @@ -0,0 +1,363 @@ +# Tutorial 4: Customize Models + +We basically categorize model components into 5 types. + +- backbone: usually an FCN network to extract feature maps, e.g., ResNet, MobileNet. +- neck: the component between backbones and heads, e.g., FPN, PAFPN. +- head: the component for specific tasks, e.g., bbox prediction and mask prediction. +- roi extractor: the part for extracting RoI features from feature maps, e.g., RoI Align. +- loss: the component in head for calculating losses, e.g., FocalLoss, L1Loss, and GHMLoss. + +## Develop new components + +### Add a new backbone + +Here we show how to develop new components with an example of MobileNet. + +#### 1. Define a new backbone (e.g. MobileNet) + +Create a new file `mmdet/models/backbones/mobilenet.py`. + +```python +import torch.nn as nn + +from ..builder import BACKBONES + + +@BACKBONES.register_module() +class MobileNet(nn.Module): + + def __init__(self, arg1, arg2): + pass + + def forward(self, x): # should return a tuple + pass +``` + +#### 2. Import the module + +You can either add the following line to `mmdet/models/backbones/__init__.py` + +```python +from .mobilenet import MobileNet +``` + +or alternatively add + +```python +custom_imports = dict( + imports=['mmdet.models.backbones.mobilenet'], + allow_failed_imports=False) +``` + +to the config file to avoid modifying the original code. + +#### 3. Use the backbone in your config file + +```python +model = dict( + ... + backbone=dict( + type='MobileNet', + arg1=xxx, + arg2=xxx), + ... +``` + +### Add new necks + +#### 1. Define a neck (e.g. PAFPN) + +Create a new file `mmdet/models/necks/pafpn.py`. + +```python +from ..builder import NECKS + +@NECKS.register_module() +class PAFPN(nn.Module): + + def __init__(self, + in_channels, + out_channels, + num_outs, + start_level=0, + end_level=-1, + add_extra_convs=False): + pass + + def forward(self, inputs): + # implementation is ignored + pass +``` + +#### 2. Import the module + +You can either add the following line to `mmdet/models/necks/__init__.py`, + +```python +from .pafpn import PAFPN +``` + +or alternatively add + +```python +custom_imports = dict( + imports=['mmdet.models.necks.pafpn.py'], + allow_failed_imports=False) +``` + +to the config file and avoid modifying the original code. + +#### 3. Modify the config file + +```python +neck=dict( + type='PAFPN', + in_channels=[256, 512, 1024, 2048], + out_channels=256, + num_outs=5) +``` + +### Add new heads + +Here we show how to develop a new head with the example of [Double Head R-CNN](https://arxiv.org/abs/1904.06493) as the following. + +First, add a new bbox head in `mmdet/models/roi_heads/bbox_heads/double_bbox_head.py`. +Double Head R-CNN implements a new bbox head for object detection. +To implement a bbox head, basically we need to implement three functions of the new module as the following. + +```python +from mmdet.models.builder import HEADS +from .bbox_head import BBoxHead + +@HEADS.register_module() +class DoubleConvFCBBoxHead(BBoxHead): + r"""Bbox head used in Double-Head R-CNN + + /-> cls + /-> shared convs -> + \-> reg + roi features + /-> cls + \-> shared fc -> + \-> reg + """ # noqa: W605 + + def __init__(self, + num_convs=0, + num_fcs=0, + conv_out_channels=1024, + fc_out_channels=1024, + conv_cfg=None, + norm_cfg=dict(type='BN'), + **kwargs): + kwargs.setdefault('with_avg_pool', True) + super(DoubleConvFCBBoxHead, self).__init__(**kwargs) + + + def forward(self, x_cls, x_reg): + +``` + +Second, implement a new RoI Head if it is necessary. We plan to inherit the new `DoubleHeadRoIHead` from `StandardRoIHead`. We can find that a `StandardRoIHead` already implements the following functions. + +```python +import torch + +from mmdet.core import bbox2result, bbox2roi, build_assigner, build_sampler +from ..builder import HEADS, build_head, build_roi_extractor +from .base_roi_head import BaseRoIHead +from .test_mixins import BBoxTestMixin, MaskTestMixin + + +@HEADS.register_module() +class StandardRoIHead(BaseRoIHead, BBoxTestMixin, MaskTestMixin): + """Simplest base roi head including one bbox head and one mask head. + """ + + def init_assigner_sampler(self): + + def init_bbox_head(self, bbox_roi_extractor, bbox_head): + + def init_mask_head(self, mask_roi_extractor, mask_head): + + + def forward_dummy(self, x, proposals): + + + def forward_train(self, + x, + img_metas, + proposal_list, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None, + gt_masks=None): + + def _bbox_forward(self, x, rois): + + def _bbox_forward_train(self, x, sampling_results, gt_bboxes, gt_labels, + img_metas): + + def _mask_forward_train(self, x, sampling_results, bbox_feats, gt_masks, + img_metas): + + def _mask_forward(self, x, rois=None, pos_inds=None, bbox_feats=None): + + + def simple_test(self, + x, + proposal_list, + img_metas, + proposals=None, + rescale=False): + """Test without augmentation.""" + +``` + +Double Head's modification is mainly in the bbox_forward logic, and it inherits other logics from the `StandardRoIHead`. +In the `mmdet/models/roi_heads/double_roi_head.py`, we implement the new RoI Head as the following: + +```python +from ..builder import HEADS +from .standard_roi_head import StandardRoIHead + + +@HEADS.register_module() +class DoubleHeadRoIHead(StandardRoIHead): + """RoI head for Double Head RCNN + + https://arxiv.org/abs/1904.06493 + """ + + def __init__(self, reg_roi_scale_factor, **kwargs): + super(DoubleHeadRoIHead, self).__init__(**kwargs) + self.reg_roi_scale_factor = reg_roi_scale_factor + + def _bbox_forward(self, x, rois): + bbox_cls_feats = self.bbox_roi_extractor( + x[:self.bbox_roi_extractor.num_inputs], rois) + bbox_reg_feats = self.bbox_roi_extractor( + x[:self.bbox_roi_extractor.num_inputs], + rois, + roi_scale_factor=self.reg_roi_scale_factor) + if self.with_shared_head: + bbox_cls_feats = self.shared_head(bbox_cls_feats) + bbox_reg_feats = self.shared_head(bbox_reg_feats) + cls_score, bbox_pred = self.bbox_head(bbox_cls_feats, bbox_reg_feats) + + bbox_results = dict( + cls_score=cls_score, + bbox_pred=bbox_pred, + bbox_feats=bbox_cls_feats) + return bbox_results +``` + +Last, the users need to add the module in +`mmdet/models/bbox_heads/__init__.py` and `mmdet/models/roi_heads/__init__.py` thus the corresponding registry could find and load them. + +Alternatively, the users can add + +```python +custom_imports=dict( + imports=['mmdet.models.roi_heads.double_roi_head', 'mmdet.models.bbox_heads.double_bbox_head']) +``` + +to the config file and achieve the same goal. + +The config file of Double Head R-CNN is as the following + +```python +_base_ = '../faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py' +model = dict( + roi_head=dict( + type='DoubleHeadRoIHead', + reg_roi_scale_factor=1.3, + bbox_head=dict( + _delete_=True, + type='DoubleConvFCBBoxHead', + num_convs=4, + num_fcs=2, + in_channels=256, + conv_out_channels=1024, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=2.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=2.0)))) + +``` + +Since MMDetection 2.0, the config system supports to inherit configs such that the users can focus on the modification. +The Double Head R-CNN mainly uses a new DoubleHeadRoIHead and a new +`DoubleConvFCBBoxHead`, the arguments are set according to the `__init__` function of each module. + +### Add new loss + +Assume you want to add a new loss as `MyLoss`, for bounding box regression. +To add a new loss function, the users need implement it in `mmdet/models/losses/my_loss.py`. +The decorator `weighted_loss` enable the loss to be weighted for each element. + +```python +import torch +import torch.nn as nn + +from ..builder import LOSSES +from .utils import weighted_loss + +@weighted_loss +def my_loss(pred, target): + assert pred.size() == target.size() and target.numel() > 0 + loss = torch.abs(pred - target) + return loss + +@LOSSES.register_module() +class MyLoss(nn.Module): + + def __init__(self, reduction='mean', loss_weight=1.0): + super(MyLoss, self).__init__() + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None): + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + loss_bbox = self.loss_weight * my_loss( + pred, target, weight, reduction=reduction, avg_factor=avg_factor) + return loss_bbox +``` + +Then the users need to add it in the `mmdet/models/losses/__init__.py`. + +```python +from .my_loss import MyLoss, my_loss + +``` + +Alternatively, you can add + +```python +custom_imports=dict( + imports=['mmdet.models.losses.my_loss']) +``` + +to the config file and achieve the same goal. + +To use it, modify the `loss_xxx` field. +Since MyLoss is for regression, you need to modify the `loss_bbox` field in the head. + +```python +loss_bbox=dict(type='MyLoss', loss_weight=1.0)) +``` diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/customize_runtime.md b/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/customize_runtime.md new file mode 100644 index 0000000000000000000000000000000000000000..616ce508aa2bec61c7fc23cd381b64e670b3b96e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/customize_runtime.md @@ -0,0 +1,323 @@ +# Tutorial 5: Customize Runtime Settings + +## Customize optimization settings + +### Customize optimizer supported by Pytorch + +We already support to use all the optimizers implemented by PyTorch, and the only modification is to change the `optimizer` field of config files. +For example, if you want to use `ADAM` (note that the performance could drop a lot), the modification could be as the following. + +```python +optimizer = dict(type='Adam', lr=0.0003, weight_decay=0.0001) +``` + +To modify the learning rate of the model, the users only need to modify the `lr` in the config of optimizer. The users can directly set arguments following the [API doc](https://pytorch.org/docs/stable/optim.html?highlight=optim#module-torch.optim) of PyTorch. + +### Customize self-implemented optimizer + +#### 1. Define a new optimizer + +A customized optimizer could be defined as following. + +Assume you want to add a optimizer named `MyOptimizer`, which has arguments `a`, `b`, and `c`. +You need to create a new directory named `mmdet/core/optimizer`. +And then implement the new optimizer in a file, e.g., in `mmdet/core/optimizer/my_optimizer.py`: + +```python +from .registry import OPTIMIZERS +from torch.optim import Optimizer + + +@OPTIMIZERS.register_module() +class MyOptimizer(Optimizer): + + def __init__(self, a, b, c) + +``` + +#### 2. Add the optimizer to registry + +To find the above module defined above, this module should be imported into the main namespace at first. There are two options to achieve it. + +- Modify `mmdet/core/optimizer/__init__.py` to import it. + + The newly defined module should be imported in `mmdet/core/optimizer/__init__.py` so that the registry will + find the new module and add it: + +```python +from .my_optimizer import MyOptimizer +``` + +- Use `custom_imports` in the config to manually import it + +```python +custom_imports = dict(imports=['mmdet.core.optimizer.my_optimizer'], allow_failed_imports=False) +``` + +The module `mmdet.core.optimizer.my_optimizer` will be imported at the beginning of the program and the class `MyOptimizer` is then automatically registered. +Note that only the package containing the class `MyOptimizer` should be imported. +`mmdet.core.optimizer.my_optimizer.MyOptimizer` **cannot** be imported directly. + +Actually users can use a totally different file directory structure using this importing method, as long as the module root can be located in `PYTHONPATH`. + +#### 3. Specify the optimizer in the config file + +Then you can use `MyOptimizer` in `optimizer` field of config files. +In the configs, the optimizers are defined by the field `optimizer` like the following: + +```python +optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001) +``` + +To use your own optimizer, the field can be changed to + +```python +optimizer = dict(type='MyOptimizer', a=a_value, b=b_value, c=c_value) +``` + +### Customize optimizer constructor + +Some models may have some parameter-specific settings for optimization, e.g. weight decay for BatchNorm layers. +The users can do those fine-grained parameter tuning through customizing optimizer constructor. + +```python +from mmcv.utils import build_from_cfg + +from mmcv.runner.optimizer import OPTIMIZER_BUILDERS, OPTIMIZERS +from mmdet.utils import get_root_logger +from .my_optimizer import MyOptimizer + + +@OPTIMIZER_BUILDERS.register_module() +class MyOptimizerConstructor(object): + + def __init__(self, optimizer_cfg, paramwise_cfg=None): + + def __call__(self, model): + + return my_optimizer + +``` + +The default optimizer constructor is implemented [here](https://github.com/open-mmlab/mmcv/blob/9ecd6b0d5ff9d2172c49a182eaa669e9f27bb8e7/mmcv/runner/optimizer/default_constructor.py#L11), which could also serve as a template for new optimizer constructor. + +### Additional settings + +Tricks not implemented by the optimizer should be implemented through optimizer constructor (e.g., set parameter-wise learning rates) or hooks. We list some common settings that could stabilize the training or accelerate the training. Feel free to create PR, issue for more settings. + +- __Use gradient clip to stabilize training__: + Some models need gradient clip to clip the gradients to stabilize the training process. An example is as below: + + ```python + optimizer_config = dict( + _delete_=True, grad_clip=dict(max_norm=35, norm_type=2)) + ``` + + If your config inherits the base config which already sets the `optimizer_config`, you might need `_delete_=True` to overide the unnecessary settings. See the [config documenetation](https://mmdetection.readthedocs.io/en/latest/config.html) for more details. + +- __Use momentum schedule to accelerate model convergence__: + We support momentum scheduler to modify model's momentum according to learning rate, which could make the model converge in a faster way. + Momentum scheduler is usually used with LR scheduler, for example, the following config is used in 3D detection to accelerate convergence. + For more details, please refer to the implementation of [CyclicLrUpdater](https://github.com/open-mmlab/mmcv/blob/f48241a65aebfe07db122e9db320c31b685dc674/mmcv/runner/hooks/lr_updater.py#L327) and [CyclicMomentumUpdater](https://github.com/open-mmlab/mmcv/blob/f48241a65aebfe07db122e9db320c31b685dc674/mmcv/runner/hooks/momentum_updater.py#L130). + + ```python + lr_config = dict( + policy='cyclic', + target_ratio=(10, 1e-4), + cyclic_times=1, + step_ratio_up=0.4, + ) + momentum_config = dict( + policy='cyclic', + target_ratio=(0.85 / 0.95, 1), + cyclic_times=1, + step_ratio_up=0.4, + ) + ``` + +## Customize training schedules + +By default we use step learning rate with 1x schedule, this calls [`StepLRHook`](https://github.com/open-mmlab/mmcv/blob/f48241a65aebfe07db122e9db320c31b685dc674/mmcv/runner/hooks/lr_updater.py#L153) in MMCV. +We support many other learning rate schedule [here](https://github.com/open-mmlab/mmcv/blob/master/mmcv/runner/hooks/lr_updater.py), such as `CosineAnnealing` and `Poly` schedule. Here are some examples + +- Poly schedule: + + ```python + lr_config = dict(policy='poly', power=0.9, min_lr=1e-4, by_epoch=False) + ``` + +- ConsineAnnealing schedule: + + ```python + lr_config = dict( + policy='CosineAnnealing', + warmup='linear', + warmup_iters=1000, + warmup_ratio=1.0 / 10, + min_lr_ratio=1e-5) + ``` + +## Customize workflow + +Workflow is a list of (phase, epochs) to specify the running order and epochs. +By default it is set to be + +```python +workflow = [('train', 1)] +``` + +which means running 1 epoch for training. +Sometimes user may want to check some metrics (e.g. loss, accuracy) about the model on the validate set. +In such case, we can set the workflow as + +```python +[('train', 1), ('val', 1)] +``` + +so that 1 epoch for training and 1 epoch for validation will be run iteratively. + +**Note**: + +1. The parameters of model will not be updated during val epoch. +2. Keyword `total_epochs` in the config only controls the number of training epochs and will not affect the validation workflow. +3. Workflows `[('train', 1), ('val', 1)]` and `[('train', 1)]` will not change the behavior of `EvalHook` because `EvalHook` is called by `after_train_epoch` and validation workflow only affect hooks that are called through `after_val_epoch`. Therefore, the only difference between `[('train', 1), ('val', 1)]` and `[('train', 1)]` is that the runner will calculate losses on validation set after each training epoch. + +## Customize hooks + +### Customize self-implemented hooks + +#### 1. Implement a new hook + +There are some occasions when the users might need to implement a new hook. MMDetection supports customized hooks in training (#3395) since v2.3.0. Thus the users could implement a hook directly in mmdet or their mmdet-based codebases and use the hook by only modifying the config in training. +Before v2.3.0, the users need to modify the code to get the hook registered before training starts. +Here we give an example of creating a new hook in mmdet and using it in training. + +```python +from mmcv.runner import HOOKS, Hook + + +@HOOKS.register_module() +class MyHook(Hook): + + def __init__(self, a, b): + pass + + def before_run(self, runner): + pass + + def after_run(self, runner): + pass + + def before_epoch(self, runner): + pass + + def after_epoch(self, runner): + pass + + def before_iter(self, runner): + pass + + def after_iter(self, runner): + pass +``` + +Depending on the functionality of the hook, the users need to specify what the hook will do at each stage of the training in `before_run`, `after_run`, `before_epoch`, `after_epoch`, `before_iter`, and `after_iter`. + +#### 2. Register the new hook + +Then we need to make `MyHook` imported. Assuming the file is in `mmdet/core/utils/my_hook.py` there are two ways to do that: + +- Modify `mmdet/core/utils/__init__.py` to import it. + + The newly defined module should be imported in `mmdet/core/utils/__init__.py` so that the registry will + find the new module and add it: + +```python +from .my_hook import MyHook +``` + +- Use `custom_imports` in the config to manually import it + +```python +custom_imports = dict(imports=['mmdet.core.utils.my_hook'], allow_failed_imports=False) +``` + +#### 3. Modify the config + +```python +custom_hooks = [ + dict(type='MyHook', a=a_value, b=b_value) +] +``` + +You can also set the priority of the hook by adding key `priority` to `'NORMAL'` or `'HIGHEST'` as below + +```python +custom_hooks = [ + dict(type='MyHook', a=a_value, b=b_value, priority='NORMAL') +] +``` + +By default the hook's priority is set as `NORMAL` during registration. + +### Use hooks implemented in MMCV + +If the hook is already implemented in MMCV, you can directly modify the config to use the hook as below + +#### 4. Example: `NumClassCheckHook` + +We implement a customized hook named [NumClassCheckHook](https://github.com/open-mmlab/mmdetection/blob/master/mmdet/datasets/utils.py) to check whether the `num_classes` in head matches the length of `CLASSSES` in `dataset`. + +We set it in [default_runtime.py](https://github.com/open-mmlab/mmdetection/blob/master/configs/_base_/default_runtime.py). + +```python +custom_hooks = [dict(type='NumClassCheckHook')] +``` + +### Modify default runtime hooks + +There are some common hooks that are not registerd through `custom_hooks`, they are + +- log_config +- checkpoint_config +- evaluation +- lr_config +- optimizer_config +- momentum_config + +In those hooks, only the logger hook has the `VERY_LOW` priority, others' priority are `NORMAL`. +The above-mentioned tutorials already covers how to modify `optimizer_config`, `momentum_config`, and `lr_config`. +Here we reveals how what we can do with `log_config`, `checkpoint_config`, and `evaluation`. + +#### Checkpoint config + +The MMCV runner will use `checkpoint_config` to initialize [`CheckpointHook`](https://github.com/open-mmlab/mmcv/blob/9ecd6b0d5ff9d2172c49a182eaa669e9f27bb8e7/mmcv/runner/hooks/checkpoint.py#L9). + +```python +checkpoint_config = dict(interval=1) +``` + +The users could set `max_keep_ckpts` to only save only small number of checkpoints or decide whether to store state dict of optimizer by `save_optimizer`. More details of the arguments are [here](https://mmcv.readthedocs.io/en/latest/api.html#mmcv.runner.CheckpointHook) + +#### Log config + +The `log_config` wraps multiple logger hooks and enables to set intervals. Now MMCV supports `WandbLoggerHook`, `MlflowLoggerHook`, and `TensorboardLoggerHook`. +The detail usages can be found in the [doc](https://mmcv.readthedocs.io/en/latest/api.html#mmcv.runner.LoggerHook). + +```python +log_config = dict( + interval=50, + hooks=[ + dict(type='TextLoggerHook'), + dict(type='TensorboardLoggerHook') + ]) +``` + +#### Evaluation config + +The config of `evaluation` will be used to initialize the [`EvalHook`](https://github.com/open-mmlab/mmdetection/blob/7a404a2c000620d52156774a5025070d9e00d918/mmdet/core/evaluation/eval_hooks.py#L8). +Except the key `interval`, other arguments such as `metric` will be passed to the `dataset.evaluate()` + +```python +evaluation = dict(interval=1, metric='bbox') +``` diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/data_pipeline.md b/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/data_pipeline.md new file mode 100644 index 0000000000000000000000000000000000000000..7ea5665f3d927efdde0b2c2328a919b83d3ac51e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/data_pipeline.md @@ -0,0 +1,184 @@ +# Tutorial 3: Customize Data Pipelines + +## Design of Data pipelines + +Following typical conventions, we use `Dataset` and `DataLoader` for data loading +with multiple workers. `Dataset` returns a dict of data items corresponding +the arguments of models' forward method. +Since the data in object detection may not be the same size (image size, gt bbox size, etc.), +we introduce a new `DataContainer` type in MMCV to help collect and distribute +data of different size. +See [here](https://github.com/open-mmlab/mmcv/blob/master/mmcv/parallel/data_container.py) for more details. + +The data preparation pipeline and the dataset is decomposed. Usually a dataset +defines how to process the annotations and a data pipeline defines all the steps to prepare a data dict. +A pipeline consists of a sequence of operations. Each operation takes a dict as input and also output a dict for the next transform. + +We present a classical pipeline in the following figure. The blue blocks are pipeline operations. With the pipeline going on, each operator can add new keys (marked as green) to the result dict or update the existing keys (marked as orange). +![pipeline figure](../../resources/data_pipeline.png) + +The operations are categorized into data loading, pre-processing, formatting and test-time augmentation. + +Here is a pipeline example for Faster R-CNN. + +```python +img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) +] +``` + +For each operation, we list the related dict fields that are added/updated/removed. + +### Data loading + +`LoadImageFromFile` + +- add: img, img_shape, ori_shape + +`LoadAnnotations` + +- add: gt_bboxes, gt_bboxes_ignore, gt_labels, gt_masks, gt_semantic_seg, bbox_fields, mask_fields + +`LoadProposals` + +- add: proposals + +### Pre-processing + +`Resize` + +- add: scale, scale_idx, pad_shape, scale_factor, keep_ratio +- update: img, img_shape, *bbox_fields, *mask_fields, *seg_fields + +`RandomFlip` + +- add: flip +- update: img, *bbox_fields, *mask_fields, *seg_fields + +`Pad` + +- add: pad_fixed_size, pad_size_divisor +- update: img, pad_shape, *mask_fields, *seg_fields + +`RandomCrop` + +- update: img, pad_shape, gt_bboxes, gt_labels, gt_masks, *bbox_fields + +`Normalize` + +- add: img_norm_cfg +- update: img + +`SegRescale` + +- update: gt_semantic_seg + +`PhotoMetricDistortion` + +- update: img + +`Expand` + +- update: img, gt_bboxes + +`MinIoURandomCrop` + +- update: img, gt_bboxes, gt_labels + +`Corrupt` + +- update: img + +### Formatting + +`ToTensor` + +- update: specified by `keys`. + +`ImageToTensor` + +- update: specified by `keys`. + +`Transpose` + +- update: specified by `keys`. + +`ToDataContainer` + +- update: specified by `fields`. + +`DefaultFormatBundle` + +- update: img, proposals, gt_bboxes, gt_bboxes_ignore, gt_labels, gt_masks, gt_semantic_seg + +`Collect` + +- add: img_meta (the keys of img_meta is specified by `meta_keys`) +- remove: all other keys except for those specified by `keys` + +### Test time augmentation + +`MultiScaleFlipAug` + +## Extend and use custom pipelines + +1. Write a new pipeline in any file, e.g., `my_pipeline.py`. It takes a dict as input and return a dict. + + ```python + from mmdet.datasets import PIPELINES + + @PIPELINES.register_module() + class MyTransform: + + def __call__(self, results): + results['dummy'] = True + return results + ``` + +2. Import the new class. + + ```python + from .my_pipeline import MyTransform + ``` + +3. Use it in config files. + + ```python + img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True) + train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='MyTransform'), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']), + ] + ``` diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/finetune.md b/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/finetune.md new file mode 100644 index 0000000000000000000000000000000000000000..afa5021a8c88abd425262357108cad35f2ec4081 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/finetune.md @@ -0,0 +1,89 @@ +# Tutorial 7: Finetuning Models + +Detectors pre-trained on the COCO dataset can serve as a good pre-trained model for other datasets, e.g., CityScapes and KITTI Dataset. +This tutorial provides instruction for users to use the models provided in the [Model Zoo](../model_zoo.md) for other datasets to obtain better performance. + +There are two steps to finetune a model on a new dataset. + +- Add support for the new dataset following [Tutorial 2: Customize Datasets](customize_dataset.md). +- Modify the configs as will be discussed in this tutorial. + +Take the finetuning process on Cityscapes Dataset as an example, the users need to modify five parts in the config. + +## Inherit base configs + +To release the burden and reduce bugs in writing the whole configs, MMDetection V2.0 support inheriting configs from multiple existing configs. To finetune a Mask RCNN model, the new config needs to inherit +`_base_/models/mask_rcnn_r50_fpn.py` to build the basic structure of the model. To use the Cityscapes Dataset, the new config can also simply inherit `_base_/datasets/cityscapes_instance.py`. For runtime settings such as training schedules, the new config needs to inherit `_base_/default_runtime.py`. This configs are in the `configs` directory and the users can also choose to write the whole contents rather than use inheritance. + +```python +_base_ = [ + '../_base_/models/mask_rcnn_r50_fpn.py', + '../_base_/datasets/cityscapes_instance.py', '../_base_/default_runtime.py' +] +``` + +## Modify head + +Then the new config needs to modify the head according to the class numbers of the new datasets. By only changing `num_classes` in the roi_head, the weights of the pre-trained models are mostly reused except the final prediction head. + +```python +model = dict( + pretrained=None, + roi_head=dict( + bbox_head=dict( + type='Shared2FCBBoxHead', + in_channels=256, + fc_out_channels=1024, + roi_feat_size=7, + num_classes=8, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)), + mask_head=dict( + type='FCNMaskHead', + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=8, + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)))) +``` + +## Modify dataset + +The users may also need to prepare the dataset and write the configs about dataset. MMDetection V2.0 already support VOC, WIDER FACE, COCO and Cityscapes Dataset. + +## Modify training schedule + +The finetuning hyperparameters vary from the default schedule. It usually requires smaller learning rate and less training epochs + +```python +# optimizer +# lr is set for a batch size of 8 +optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) +optimizer_config = dict(grad_clip=None) +# learning policy +lr_config = dict( + policy='step', + warmup='linear', + warmup_iters=500, + warmup_ratio=0.001, + step=[7]) +# the max_epochs and step in lr_config need specifically tuned for the customized dataset +runner = dict(max_epochs=8) +log_config = dict(interval=100) +``` + +## Use pre-trained model + +To use the pre-trained model, the new config add the link of pre-trained models in the `load_from`. The users might need to download the model weights before training to avoid the download time during training. + +```python +load_from = 'https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco/mask_rcnn_r50_caffe_fpn_mstrain-poly_3x_coco_bbox_mAP-0.408__segm_mAP-0.37_20200504_163245-42aa3d00.pth' # noqa + +``` diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/index.rst b/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/index.rst new file mode 100644 index 0000000000000000000000000000000000000000..659a5cb6da681fea07662690b74630c558d767d4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/index.rst @@ -0,0 +1,12 @@ +.. toctree:: + :maxdepth: 2 + + config.md + customize_dataset.md + data_pipeline.md + customize_models.md + customize_runtime.md + customize_losses.md + finetune.md + pytorch2onnx.md + onnx2tensorrt.md diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/onnx2tensorrt.md b/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/onnx2tensorrt.md new file mode 100644 index 0000000000000000000000000000000000000000..46f2e6debe7be8de04105f5dc4ddb1f63a4206a8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/onnx2tensorrt.md @@ -0,0 +1,102 @@ +# Tutorial 9: ONNX to TensorRT (Experimental) + + + +- [Tutorial 9: ONNX to TensorRT (Experimental)](#tutorial-9-onnx-to-tensorrt-experimental) + - [How to convert models from ONNX to TensorRT](#how-to-convert-models-from-onnx-to-tensorrt) + - [Prerequisite](#prerequisite) + - [Usage](#usage) + - [How to evaluate the exported models](#how-to-evaluate-the-exported-models) + - [List of supported models convertable to TensorRT](#list-of-supported-models-convertable-to-tensorrt) + - [Reminders](#reminders) + - [FAQs](#faqs) + + + +## How to convert models from ONNX to TensorRT + +### Prerequisite + +1. Please refer to [get_started.md](https://mmdetection.readthedocs.io/en/latest/get_started.html) for installation of MMCV and MMDetection from source. +2. Please refer to [ONNXRuntime in mmcv](https://mmcv.readthedocs.io/en/latest/onnxruntime_op.html) and [TensorRT plugin in mmcv](https://github.com/open-mmlab/mmcv/blob/master/docs/tensorrt_plugin.md/) to install `mmcv-full` with ONNXRuntime custom ops and TensorRT plugins. +3. Use our tool [pytorch2onnx](https://mmdetection.readthedocs.io/en/latest/tutorials/pytorch2onnx.html) to convert the model from PyTorch to ONNX. + +### Usage + +```bash +python tools/deployment/onnx2tensorrt.py \ + ${CONFIG} \ + ${MODEL} \ + --trt-file ${TRT_FILE} \ + --input-img ${INPUT_IMAGE_PATH} \ + --shape ${INPUT_IMAGE_SHAPE} \ + --min-shape ${MIN_IMAGE_SHAPE} \ + --max-shape ${MAX_IMAGE_SHAPE} \ + --workspace-size {WORKSPACE_SIZE} \ + --show \ + --verify \ +``` + +Description of all arguments: + +- `config` : The path of a model config file. +- `model` : The path of an ONNX model file. +- `--trt-file`: The Path of output TensorRT engine file. If not specified, it will be set to `tmp.trt`. +- `--input-img` : The path of an input image for tracing and conversion. By default, it will be set to `demo/demo.jpg`. +- `--shape`: The height and width of model input. If not specified, it will be set to `400 600`. +- `--min-shape`: The minimum height and width of model input. If not specified, it will be set to the same as `--shape`. +- `--max-shape`: The maximum height and width of model input. If not specified, it will be set to the same as `--shape`. +- `--workspace-size` : The required GPU workspace size in GiB to build TensorRT engine. If not specified, it will be set to `1` GiB. +- `--show`: Determines whether to show the outputs of the model. If not specified, it will be set to `False`. +- `--verify`: Determines whether to verify the correctness of models between ONNXRuntime and TensorRT. If not specified, it will be set to `False`. +- `--verbose`: Determines whether to print logging messages. It's useful for debugging. If not specified, it will be set to `False`. + +Example: + +```bash +python tools/deployment/onnx2tensorrt.py \ + configs/retinanet/retinanet_r50_fpn_1x_coco.py \ + checkpoints/retinanet_r50_fpn_1x_coco.onnx \ + --trt-file checkpoints/retinanet_r50_fpn_1x_coco.trt \ + --input-img demo/demo.jpg \ + --shape 400 600 \ + --show \ + --verify \ +``` + +## How to evaluate the exported models + +We prepare a tool `tools/deplopyment/test.py` to evaluate TensorRT models. + +Please refer to following links for more information. + +- [how-to-evaluate-the-exported-models](pytorch2onnx.md#how-to-evaluate-the-exported-models) +- [results-and-models](pytorch2onnx.md#results-and-models) + +## List of supported models convertable to TensorRT + +The table below lists the models that are guaranteed to be convertable to TensorRT. + +| Model | Config | Dynamic Shape | Batch Inference | Note | +| :----------: | :--------------------------------------------------: | :-----------: | :-------------: | :---: | +| SSD | `configs/ssd/ssd300_coco.py` | Y | Y | | +| FSAF | `configs/fsaf/fsaf_r50_fpn_1x_coco.py` | Y | Y | | +| FCOS | `configs/fcos/fcos_r50_caffe_fpn_4x4_1x_coco.py` | Y | Y | | +| YOLOv3 | `configs/yolo/yolov3_d53_mstrain-608_273e_coco.py` | Y | Y | | +| RetinaNet | `configs/retinanet/retinanet_r50_fpn_1x_coco.py` | Y | Y | | +| Faster R-CNN | `configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py` | Y | Y | | +| Mask R-CNN | `configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py` | Y | Y | | +| PointRend | `configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py` | Y | Y | | + +Notes: + +- *All models above are tested with Pytorch==1.6.0, onnx==1.7.0 and TensorRT-7.2.1.6.Ubuntu-16.04.x86_64-gnu.cuda-10.2.cudnn8.0* + +## Reminders + +- If you meet any problem with the listed models above, please create an issue and it would be taken care of soon. For models not included in the list, we may not provide much help here due to the limited resources. Please try to dig a little deeper and debug by yourself. +- Because this feature is experimental and may change fast, please always try with the latest `mmcv` and `mmdetecion`. + +## FAQs + +- None diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/pytorch2onnx.md b/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/pytorch2onnx.md new file mode 100644 index 0000000000000000000000000000000000000000..1202ec9c18fe1bb8da4085fee8334a83b2f2d3d3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/tutorials/pytorch2onnx.md @@ -0,0 +1,304 @@ +# Tutorial 8: Pytorch to ONNX (Experimental) + + + +- [Tutorial 8: Pytorch to ONNX (Experimental)](#tutorial-8-pytorch-to-onnx-experimental) + - [How to convert models from Pytorch to ONNX](#how-to-convert-models-from-pytorch-to-onnx) + - [Prerequisite](#prerequisite) + - [Usage](#usage) + - [Description of all arguments](#description-of-all-arguments) + - [How to evaluate the exported models](#how-to-evaluate-the-exported-models) + - [Prerequisite](#prerequisite-1) + - [Usage](#usage-1) + - [Description of all arguments](#description-of-all-arguments-1) + - [Results and Models](#results-and-models) + - [List of supported models exportable to ONNX](#list-of-supported-models-exportable-to-onnx) + - [The Parameters of Non-Maximum Suppression in ONNX Export](#the-parameters-of-non-maximum-suppression-in-onnx-export) + - [Reminders](#reminders) + - [FAQs](#faqs) + + + +## How to convert models from Pytorch to ONNX + +### Prerequisite + +1. Install the prerequisites following [get_started.md/Prepare environment](../get_started.md). +2. Build custom operators for ONNX Runtime and install MMCV manually following [How to build custom operators for ONNX Runtime](https://github.com/open-mmlab/mmcv/blob/master/docs/onnxruntime_op.md/#how-to-build-custom-operators-for-onnx-runtime) +3. Install MMdetection manually following steps 2-3 in [get_started.md/Install MMdetection](../get_started.md). + +### Usage + +```bash +python tools/deployment/pytorch2onnx.py \ + ${CONFIG_FILE} \ + ${CHECKPOINT_FILE} \ + --output-file ${OUTPUT_FILE} \ + --input-img ${INPUT_IMAGE_PATH} \ + --shape ${IMAGE_SHAPE} \ + --test-img ${TEST_IMAGE_PATH} \ + --opset-version ${OPSET_VERSION} \ + --cfg-options ${CFG_OPTIONS} + --dynamic-export \ + --show \ + --verify \ + --simplify \ +``` + +### Description of all arguments + +- `config` : The path of a model config file. +- `checkpoint` : The path of a model checkpoint file. +- `--output-file`: The path of output ONNX model. If not specified, it will be set to `tmp.onnx`. +- `--input-img`: The path of an input image for tracing and conversion. By default, it will be set to `tests/data/color.jpg`. +- `--shape`: The height and width of input tensor to the model. If not specified, it will be set to `800 1216`. +- `--test-img` : The path of an image to verify the exported ONNX model. By default, it will be set to `None`, meaning it will use `--input-img` for verification. +- `--opset-version` : The opset version of ONNX. If not specified, it will be set to `11`. +- `--dynamic-export`: Determines whether to export ONNX model with dynamic input and output shapes. If not specified, it will be set to `False`. +- `--show`: Determines whether to print the architecture of the exported model and whether to show detection outputs when `--verify` is set to `True`. If not specified, it will be set to `False`. +- `--verify`: Determines whether to verify the correctness of an exported model. If not specified, it will be set to `False`. +- `--simplify`: Determines whether to simplify the exported ONNX model. If not specified, it will be set to `False`. +- `--cfg-options`: Override some settings in the used config file, the key-value pair in `xxx=yyy` format will be merged into config file. + +Example: + +```bash +python tools/deployment/pytorch2onnx.py \ + configs/yolo/yolov3_d53_mstrain-608_273e_coco.py \ + checkpoints/yolo/yolov3_d53_mstrain-608_273e_coco.pth \ + --output-file checkpoints/yolo/yolov3_d53_mstrain-608_273e_coco.onnx \ + --input-img demo/demo.jpg \ + --test-img tests/data/color.jpg \ + --shape 608 608 \ + --show \ + --verify \ + --dynamic-export \ + --cfg-options \ + model.test_cfg.deploy_nms_pre=-1 \ +``` + +## How to evaluate the exported models + +We prepare a tool `tools/deplopyment/test.py` to evaluate ONNX models with ONNXRuntime and TensorRT. + +### Prerequisite + +- Install onnx and onnxruntime (CPU version) + + ```shell + pip install onnx onnxruntime==1.5.1 + ``` +- If you want to run the model on GPU, please remove the CPU version before using the GPU version. + + ```shell + pip uninstall onnxruntime + pip install onnxruntime-gpu + ``` + + Note: onnxruntime-gpu is version-dependent on CUDA and CUDNN, please ensure that your + environment meets the requirements. + +- Build custom operators for ONNX Runtime following [How to build custom operators for ONNX Runtime](https://github.com/open-mmlab/mmcv/blob/master/docs/onnxruntime_op.md/#how-to-build-custom-operators-for-onnx-runtime) + +- Install TensorRT by referring to [How to build TensorRT plugins in MMCV](https://mmcv.readthedocs.io/en/latest/tensorrt_plugin.html#how-to-build-tensorrt-plugins-in-mmcv) (optional) + +### Usage + +```bash +python tools/deployment/test.py \ + ${CONFIG_FILE} \ + ${MODEL_FILE} \ + --out ${OUTPUT_FILE} \ + --backend ${BACKEND} \ + --format-only ${FORMAT_ONLY} \ + --eval ${EVALUATION_METRICS} \ + --show-dir ${SHOW_DIRECTORY} \ + ----show-score-thr ${SHOW_SCORE_THRESHOLD} \ + ----cfg-options ${CFG_OPTIONS} \ + ----eval-options ${EVALUATION_OPTIONS} \ +``` + +### Description of all arguments + +- `config`: The path of a model config file. +- `model`: The path of an input model file. +- `--out`: The path of output result file in pickle format. +- `--backend`: Backend for input model to run and should be `onnxruntime` or `tensorrt`. +- `--format-only` : Format the output results without perform evaluation. It is useful when you want to format the result to a specific format and submit it to the test server. If not specified, it will be set to `False`. +- `--eval`: Evaluation metrics, which depends on the dataset, e.g., "bbox", "segm", "proposal" for COCO, and "mAP", "recall" for PASCAL VOC. +- `--show-dir`: Directory where painted images will be saved +- `--show-score-thr`: Score threshold. Default is set to `0.3`. +- `--cfg-options`: Override some settings in the used config file, the key-value pair in `xxx=yyy` format will be merged into config file. +- `--eval-options`: Custom options for evaluation, the key-value pair in `xxx=yyy` format will be kwargs for `dataset.evaluate()` function + +Notes: + +- If the deployed backend platform is TensorRT, please add environment variables before running the file: + + ```bash + export ONNX_BACKEND=MMCVTensorRT + ``` + +- If you want to use the `--dynamic-export` parameter in the TensorRT backend to export ONNX, please remove the `--simplify` parameter, and vice versa. + +### Results and Models + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
ModelConfigMetricPyTorchONNX RuntimeTensorRT
FCOSconfigs/fcos/fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.pyBox AP36.636.536.3
FSAFconfigs/fsaf/fsaf_r50_fpn_1x_coco.pyBox AP36.036.035.9
RetinaNetconfigs/retinanet/retinanet_r50_fpn_1x_coco.pyBox AP36.536.436.3
SSDconfigs/ssd/ssd300_coco.pyBox AP25.625.625.6
YOLOv3configs/yolo/yolov3_d53_mstrain-608_273e_coco.pyBox AP33.533.533.5
Faster R-CNNconfigs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.pyBox AP37.437.437.0
Mask R-CNNconfigs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.pyBox AP38.238.137.7
Mask AP34.733.733.3
CornerNetconfigs/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.pyBox AP40.640.4-
DETRconfigs/detr/detr_r50_8x2_150e_coco.pyBox AP40.140.1-
PointRendconfigs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.pyBox AP38.438.4-
Mask AP36.335.2-
+ +Notes: + +- All ONNX models are evaluated with dynamic shape on coco dataset and images are preprocessed according to the original config file. Note that CornerNet is evaluated without test-time flip, since currently only single-scale evaluation is supported with ONNX Runtime. + +- Mask AP of Mask R-CNN drops by 1% for ONNXRuntime. The main reason is that the predicted masks are directly interpolated to original image in PyTorch, while they are at first interpolated to the preprocessed input image of the model and then to original image in other backend. + +## List of supported models exportable to ONNX + +The table below lists the models that are guaranteed to be exportable to ONNX and runnable in ONNX Runtime. + +| Model | Config | Dynamic Shape | Batch Inference | Note | +| :----------: | :-----------------------------------------------------------------: | :-----------: | :-------------: | :---------------------------------------------------------------------------: | +| FCOS | `configs/fcos/fcos_r50_caffe_fpn_gn-head_4x4_1x_coco.py` | Y | Y | | +| FSAF | `configs/fsaf/fsaf_r50_fpn_1x_coco.py` | Y | Y | | +| RetinaNet | `configs/retinanet/retinanet_r50_fpn_1x_coco.py` | Y | Y | | +| SSD | `configs/ssd/ssd300_coco.py` | Y | Y | | +| YOLOv3 | `configs/yolo/yolov3_d53_mstrain-608_273e_coco.py` | Y | Y | | +| Faster R-CNN | `configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py` | Y | Y | | +| Mask R-CNN | `configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py` | Y | Y | | +| CornerNet | `configs/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py` | Y | N | no flip, no batch inference, tested with torch==1.7.0 and onnxruntime==1.5.1. | +| DETR | `configs/detr/detr_r50_8x2_150e_coco.py` | Y | Y | batch inference is *not recommended* | +| PointRend | `configs/point_rend/point_rend_r50_caffe_fpn_mstrain_1x_coco.py` | Y | Y | | + +Notes: + +- Minimum required version of MMCV is `1.3.5` + +- *All models above are tested with Pytorch==1.6.0 and onnxruntime==1.5.1*, except for CornerNet. For more details about the +torch version when exporting CornerNet to ONNX, which involves `mmcv::cummax`, please refer to the [Known Issues](https://github.com/open-mmlab/mmcv/blob/master/docs/onnxruntime_op.md#known-issues) in mmcv. + +- Though supported, it is *not recommended* to use batch inference in onnxruntime for `DETR`, because there is huge performance gap between ONNX and torch model (e.g. 33.5 vs 39.9 mAP on COCO for onnxruntime and torch respectively, with a batch size 2). The main reason for the gap is that these is non-negligible effect on the predicted regressions during batch inference for ONNX, since the predicted coordinates is normalized by `img_shape` (without padding) and should be converted to absolute format, but `img_shape` is not dynamically traceable thus the padded `img_shape_for_onnx` is used. + +- Currently only single-scale evaluation is supported with ONNX Runtime, also `mmcv::SoftNonMaxSuppression` is only supported for single image by now. + +## The Parameters of Non-Maximum Suppression in ONNX Export + +In the process of exporting the ONNX model, we set some parameters for the NMS op to control the number of output bounding boxes. The following will introduce the parameter setting of the NMS op in the supported models. You can set these parameters through `--cfg-options`. + +- `nms_pre`: The number of boxes before NMS. The default setting is `1000`. + +- `deploy_nms_pre`: The number of boxes before NMS when exporting to ONNX model. The default setting is `0`. + +- `max_per_img`: The number of boxes to be kept after NMS. The default setting is `100`. + +- `max_output_boxes_per_class`: Maximum number of output boxes per class of NMS. The default setting is `200`. + +## Reminders + +- When the input model has custom op such as `RoIAlign` and if you want to verify the exported ONNX model, you may have to build `mmcv` with [ONNXRuntime](https://mmcv.readthedocs.io/en/latest/onnxruntime_op.html) from source. +- `mmcv.onnx.simplify` feature is based on [onnx-simplifier](https://github.com/daquexian/onnx-simplifier). If you want to try it, please refer to [onnx in `mmcv`](https://mmcv.readthedocs.io/en/latest/onnx.html) and [onnxruntime op in `mmcv`](https://mmcv.readthedocs.io/en/latest/onnxruntime_op.html) for more information. +- If you meet any problem with the listed models above, please create an issue and it would be taken care of soon. For models not included in the list, please try to dig a little deeper and debug a little bit more and hopefully solve them by yourself. +- Because this feature is experimental and may change fast, please always try with the latest `mmcv` and `mmdetecion`. + +## FAQs + +- None diff --git a/detection_cbnet/docker-build-context/cbnetv2/docs/useful_tools.md b/detection_cbnet/docker-build-context/cbnetv2/docs/useful_tools.md new file mode 100644 index 0000000000000000000000000000000000000000..2306e26f7410e49defa8d8a8c3a4491b034251c4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/docs/useful_tools.md @@ -0,0 +1,384 @@ +Apart from training/testing scripts, We provide lots of useful tools under the + `tools/` directory. + +## Log Analysis + +`tools/analysis_tools/analyze_logs.py` plots loss/mAP curves given a training + log file. Run `pip install seaborn` first to install the dependency. + + ```shell +python tools/analysis_tools/analyze_logs.py plot_curve [--keys ${KEYS}] [--title ${TITLE}] [--legend ${LEGEND}] [--backend ${BACKEND}] [--style ${STYLE}] [--out ${OUT_FILE}] + ``` + +![loss curve image](../resources/loss_curve.png) + +Examples: + +- Plot the classification loss of some run. + + ```shell + python tools/analysis_tools/analyze_logs.py plot_curve log.json --keys loss_cls --legend loss_cls + ``` + +- Plot the classification and regression loss of some run, and save the figure to a pdf. + + ```shell + python tools/analysis_tools/analyze_logs.py plot_curve log.json --keys loss_cls loss_bbox --out losses.pdf + ``` + +- Compare the bbox mAP of two runs in the same figure. + + ```shell + python tools/analysis_tools/analyze_logs.py plot_curve log1.json log2.json --keys bbox_mAP --legend run1 run2 + ``` + +- Compute the average training speed. + + ```shell + python tools/analysis_tools/analyze_logs.py cal_train_time log.json [--include-outliers] + ``` + + The output is expected to be like the following. + + ```text + -----Analyze train time of work_dirs/some_exp/20190611_192040.log.json----- + slowest epoch 11, average time is 1.2024 + fastest epoch 1, average time is 1.1909 + time std over epochs is 0.0028 + average iter time: 1.1959 s/iter + ``` + +## Result Analysis + +`tools/analysis_tools/analyze_results.py` calculates single image mAP and saves or shows the topk images with the highest and lowest scores based on prediction results. + +**Usage** + +```shell +python tools/analysis_tools/analyze_results.py \ + ${CONFIG} \ + ${PREDICTION_PATH} \ + ${SHOW_DIR} \ + [--show] \ + [--wait-time ${WAIT_TIME}] \ + [--topk ${TOPK}] \ + [--show-score-thr ${SHOW_SCORE_THR}] \ + [--cfg-options ${CFG_OPTIONS}] +``` + +Description of all arguments: + +- `config` : The path of a model config file. +- `prediction_path`: Output result file in pickle format from `tools/test.py` +- `show_dir`: Directory where painted GT and detection images will be saved +- `--show`:Determines whether to show painted images, If not specified, it will be set to `False` +- `--wait-time`: The interval of show (s), 0 is block +- `--topk`: The number of saved images that have the highest and lowest `topk` scores after sorting. If not specified, it will be set to `20`. +- `--show-score-thr`: Show score threshold. If not specified, it will be set to `0`. +- `--cfg-options`: If specified, the key-value pair optional cfg will be merged into config file + +**Examples**: + +Assume that you have got result file in pickle format from `tools/test.py` in the path './result.pkl'. + +1. Test Faster R-CNN and visualize the results, save images to the directory `results/` + +```shell +python tools/analysis_tools/analyze_results.py \ + configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py \ + result.pkl \ + results \ + --show +``` + +2. Test Faster R-CNN and specified topk to 50, save images to the directory `results/` + +```shell +python tools/analysis_tools/analyze_results.py \ + configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py \ + result.pkl \ + results \ + --topk 50 +``` + +3. If you want to filter the low score prediction results, you can specify the `show-score-thr` parameter + +```shell +python tools/analysis_tools/analyze_results.py \ + configs/faster_rcnn/faster_rcnn_r50_fpn_1x_coco.py \ + result.pkl \ + results \ + --show-score-thr 0.3 +``` + +## Visualization + +### Visualize Datasets + +`tools/misc/browse_dataset.py` helps the user to browse a detection dataset (both + images and bounding box annotations) visually, or save the image to a + designated directory. + +```shell +python tools/misc/browse_dataset.py ${CONFIG} [-h] [--skip-type ${SKIP_TYPE[SKIP_TYPE...]}] [--output-dir ${OUTPUT_DIR}] [--not-show] [--show-interval ${SHOW_INTERVAL}] +``` + +### Visualize Models + +First, convert the model to ONNX as described +[here](#convert-mmdetection-model-to-onnx-experimental). +Note that currently only RetinaNet is supported, support for other models + will be coming in later versions. +The converted model could be visualized by tools like [Netron](https://github.com/lutzroeder/netron). + +### Visualize Predictions + +If you need a lightweight GUI for visualizing the detection results, you can refer [DetVisGUI project](https://github.com/Chien-Hung/DetVisGUI/tree/mmdetection). + +## Error Analysis + +`tools/analysis_tools/coco_error_analysis.py` analyzes COCO results per category and by + different criterion. It can also make a plot to provide useful information. + +```shell +python tools/analysis_tools/coco_error_analysis.py ${RESULT} ${OUT_DIR} [-h] [--ann ${ANN}] [--types ${TYPES[TYPES...]}] +``` + +Example: + +Assume that you have got [Mask R-CNN checkpoint file](https://download.openmmlab.com/mmdetection/v2.0/mask_rcnn/mask_rcnn_r50_fpn_1x_coco/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth) in the path 'checkpoint'. For other checkpoints, please refer to our [model zoo](./model_zoo.md). You can use the following command to get the results bbox and segmentation json file. + +```shell +# out: results.bbox.json and results.segm.json +python tools/test.py \ + configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py \ + checkpoint/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth \ + --format-only \ + --options "jsonfile_prefix=./results" +``` + +1. Get COCO bbox error results per category , save analyze result images to the directory `results/` + +```shell +python tools/analysis_tools/coco_error_analysis.py \ + results.bbox.json \ + results \ + --ann=data/coco/annotations/instances_val2017.json \ +``` + +2. Get COCO segmentation error results per category , save analyze result images to the directory `results/` + +```shell +python tools/analysis_tools/coco_error_analysis.py \ + results.segm.json \ + results \ + --ann=data/coco/annotations/instances_val2017.json \ + --types='segm' +``` + +## Model Serving + +In order to serve an `MMDetection` model with [`TorchServe`](https://pytorch.org/serve/), you can follow the steps: + +### 1. Convert model from MMDetection to TorchServe + +```shell +python tools/deployment/mmdet2torchserve.py ${CONFIG_FILE} ${CHECKPOINT_FILE} \ +--output-folder ${MODEL_STORE} \ +--model-name ${MODEL_NAME} +``` + +***Note**: ${MODEL_STORE} needs to be an absolute path to a folder. + +### 2. Build `mmdet-serve` docker image + +```shell +docker build -t mmdet-serve:latest docker/serve/ +``` + +### 3. Run `mmdet-serve` + +Check the official docs for [running TorchServe with docker](https://github.com/pytorch/serve/blob/master/docker/README.md#running-torchserve-in-a-production-docker-environment). + +In order to run in GPU, you need to install [nvidia-docker](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/install-guide.html). You can omit the `--gpus` argument in order to run in CPU. + +Example: + +```shell +docker run --rm \ +--cpus 8 \ +--gpus device=0 \ +-p8080:8080 -p8081:8081 -p8082:8082 \ +--mount type=bind,source=$MODEL_STORE,target=/home/model-server/model-store \ +mmdet-serve:latest +``` + +[Read the docs](https://github.com/pytorch/serve/blob/072f5d088cce9bb64b2a18af065886c9b01b317b/docs/rest_api.md/) about the Inference (8080), Management (8081) and Metrics (8082) APis + +### 4. Test deployment + +```shell +curl -O curl -O https://raw.githubusercontent.com/pytorch/serve/master/docs/images/3dogs.jpg +curl http://127.0.0.1:8080/predictions/${MODEL_NAME} -T 3dogs.jpg +``` + +You should obtain a respose similar to: + +```json +[ + { + "dog": [ + 402.9117736816406, + 124.19664001464844, + 571.7910766601562, + 292.6463623046875 + ], + "score": 0.9561963081359863 + }, + { + "dog": [ + 293.90057373046875, + 196.2908477783203, + 417.4869079589844, + 286.2522277832031 + ], + "score": 0.9179860353469849 + }, + { + "dog": [ + 202.178466796875, + 86.3709487915039, + 311.9863586425781, + 276.28411865234375 + ], + "score": 0.8933767080307007 + } +] +``` + +## Model Complexity + +`tools/analysis_tools/get_flops.py` is a script adapted from [flops-counter.pytorch](https://github.com/sovrasov/flops-counter.pytorch) to compute the FLOPs and params of a given model. + +```shell +python tools/analysis_tools/get_flops.py ${CONFIG_FILE} [--shape ${INPUT_SHAPE}] +``` + +You will get the results like this. + +```text +============================== +Input shape: (3, 1280, 800) +Flops: 239.32 GFLOPs +Params: 37.74 M +============================== +``` + +**Note**: This tool is still experimental and we do not guarantee that the + number is absolutely correct. You may well use the result for simple + comparisons, but double check it before you adopt it in technical reports or papers. + +1. FLOPs are related to the input shape while parameters are not. The default + input shape is (1, 3, 1280, 800). +2. Some operators are not counted into FLOPs like GN and custom operators. Refer to [`mmcv.cnn.get_model_complexity_info()`](https://github.com/open-mmlab/mmcv/blob/master/mmcv/cnn/utils/flops_counter.py) for details. +3. The FLOPs of two-stage detectors is dependent on the number of proposals. + +## Model conversion + +### MMDetection model to ONNX (experimental) + +We provide a script to convert model to [ONNX](https://github.com/onnx/onnx) format. We also support comparing the output results between Pytorch and ONNX model for verification. + +```shell +python tools/deployment/pytorch2onnx.py ${CONFIG_FILE} ${CHECKPOINT_FILE} --output_file ${ONNX_FILE} [--shape ${INPUT_SHAPE} --verify] +``` + +**Note**: This tool is still experimental. Some customized operators are not supported for now. For a detailed description of the usage and the list of supported models, please refer to [pytorch2onnx](tutorials/pytorch2onnx.md). + +### MMDetection 1.x model to MMDetection 2.x + +`tools/model_converters/upgrade_model_version.py` upgrades a previous MMDetection checkpoint + to the new version. Note that this script is not guaranteed to work as some + breaking changes are introduced in the new version. It is recommended to + directly use the new checkpoints. + +```shell +python tools/model_converters/upgrade_model_version.py ${IN_FILE} ${OUT_FILE} [-h] [--num-classes NUM_CLASSES] +``` + +### RegNet model to MMDetection + +`tools/model_converters/regnet2mmdet.py` convert keys in pycls pretrained RegNet models to + MMDetection style. + +```shell +python tools/model_converters/regnet2mmdet.py ${SRC} ${DST} [-h] +``` + +### Detectron ResNet to Pytorch + +`tools/model_converters/detectron2pytorch.py` converts keys in the original detectron pretrained + ResNet models to PyTorch style. + +```shell +python tools/model_converters/detectron2pytorch.py ${SRC} ${DST} ${DEPTH} [-h] +``` + +### Prepare a model for publishing + +`tools/model_converters/publish_model.py` helps users to prepare their model for publishing. + +Before you upload a model to AWS, you may want to + +1. convert model weights to CPU tensors +2. delete the optimizer states and +3. compute the hash of the checkpoint file and append the hash id to the + filename. + +```shell +python tools/model_converters/publish_model.py ${INPUT_FILENAME} ${OUTPUT_FILENAME} +``` + +E.g., + +```shell +python tools/model_converters/publish_model.py work_dirs/faster_rcnn/latest.pth faster_rcnn_r50_fpn_1x_20190801.pth +``` + +The final output filename will be `faster_rcnn_r50_fpn_1x_20190801-{hash id}.pth`. + +## Dataset Conversion + +`tools/data_converters/` contains tools to convert the Cityscapes dataset + and Pascal VOC dataset to the COCO format. + +```shell +python tools/dataset_converters/cityscapes.py ${CITYSCAPES_PATH} [-h] [--img-dir ${IMG_DIR}] [--gt-dir ${GT_DIR}] [-o ${OUT_DIR}] [--nproc ${NPROC}] +python tools/dataset_converters/pascal_voc.py ${DEVKIT_PATH} [-h] [-o ${OUT_DIR}] +``` + +## Robust Detection Benchmark + +`tools/analysis_tools/test_robustness.py` and`tools/analysis_tools/robustness_eval.py` helps users to evaluate model robustness. The core idea comes from [Benchmarking Robustness in Object Detection: Autonomous Driving when Winter is Coming](https://arxiv.org/abs/1907.07484). For more information how to evaluate models on corrupted images and results for a set of standard models please refer to [robustness_benchmarking.md](robustness_benchmarking.md). + +## Miscellaneous + +### Evaluating a metric + +`tools/analysis_tools/eval_metric.py` evaluates certain metrics of a pkl result file + according to a config file. + +```shell +python tools/analysis_tools/eval_metric.py ${CONFIG} ${PKL_RESULTS} [-h] [--format-only] [--eval ${EVAL[EVAL ...]}] + [--cfg-options ${CFG_OPTIONS [CFG_OPTIONS ...]}] + [--eval-options ${EVAL_OPTIONS [EVAL_OPTIONS ...]}] +``` + +### Print the entire config + +`tools/misc/print_config.py` prints the whole config verbatim, expanding all its + imports. + +```shell +python tools/misc/print_config.py ${CONFIG} [-h] [--options ${OPTIONS [OPTIONS...]}] +``` diff --git a/detection_cbnet/docker-build-context/cbnetv2/figures/cbnetv2.png b/detection_cbnet/docker-build-context/cbnetv2/figures/cbnetv2.png new file mode 100644 index 0000000000000000000000000000000000000000..6cc46d1efc6dd938b9aef13917a011c5c26aff69 Binary files /dev/null and b/detection_cbnet/docker-build-context/cbnetv2/figures/cbnetv2.png differ diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmcv_custom/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmcv_custom/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7e0e39b03e2a149c33c372472b2b814a872ec55c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmcv_custom/__init__.py @@ -0,0 +1,5 @@ +# -*- coding: utf-8 -*- + +from .checkpoint import load_checkpoint + +__all__ = ['load_checkpoint'] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmcv_custom/checkpoint.py b/detection_cbnet/docker-build-context/cbnetv2/mmcv_custom/checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..51322c1c3802f357481065a70dc5152469d80eb8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmcv_custom/checkpoint.py @@ -0,0 +1,500 @@ +# Copyright (c) Open-MMLab. All rights reserved. +import io +import os +import os.path as osp +import pkgutil +import time +import warnings +from collections import OrderedDict +from importlib import import_module +from tempfile import TemporaryDirectory + +import torch +import torchvision +from torch.optim import Optimizer +from torch.utils import model_zoo +from torch.nn import functional as F + +import mmcv +from mmcv.fileio import FileClient +from mmcv.fileio import load as load_file +from mmcv.parallel import is_module_wrapper +from mmcv.utils import mkdir_or_exist +from mmcv.runner import get_dist_info + +ENV_MMCV_HOME = 'MMCV_HOME' +ENV_XDG_CACHE_HOME = 'XDG_CACHE_HOME' +DEFAULT_CACHE_DIR = '~/.cache' + + +def _get_mmcv_home(): + mmcv_home = os.path.expanduser( + os.getenv( + ENV_MMCV_HOME, + os.path.join( + os.getenv(ENV_XDG_CACHE_HOME, DEFAULT_CACHE_DIR), 'mmcv'))) + + mkdir_or_exist(mmcv_home) + return mmcv_home + + +def load_state_dict(module, state_dict, strict=False, logger=None): + """Load state_dict to a module. + + This method is modified from :meth:`torch.nn.Module.load_state_dict`. + Default value for ``strict`` is set to ``False`` and the message for + param mismatch will be shown even if strict is False. + + Args: + module (Module): Module that receives the state_dict. + state_dict (OrderedDict): Weights. + strict (bool): whether to strictly enforce that the keys + in :attr:`state_dict` match the keys returned by this module's + :meth:`~torch.nn.Module.state_dict` function. Default: ``False``. + logger (:obj:`logging.Logger`, optional): Logger to log the error + message. If not specified, print function will be used. + """ + unexpected_keys = [] + all_missing_keys = [] + err_msg = [] + + metadata = getattr(state_dict, '_metadata', None) + state_dict = state_dict.copy() + if metadata is not None: + state_dict._metadata = metadata + + # use _load_from_state_dict to enable checkpoint version control + def load(module, prefix=''): + # recursively check parallel module in case that the model has a + # complicated structure, e.g., nn.Module(nn.Module(DDP)) + if is_module_wrapper(module): + module = module.module + local_metadata = {} if metadata is None else metadata.get( + prefix[:-1], {}) + module._load_from_state_dict(state_dict, prefix, local_metadata, True, + all_missing_keys, unexpected_keys, + err_msg) + for name, child in module._modules.items(): + if child is not None: + load(child, prefix + name + '.') + + load(module) + load = None # break load->load reference cycle + + # ignore "num_batches_tracked" of BN layers + missing_keys = [ + key for key in all_missing_keys if 'num_batches_tracked' not in key + ] + + if unexpected_keys: + err_msg.append('unexpected key in source ' + f'state_dict: {", ".join(unexpected_keys)}\n') + if missing_keys: + err_msg.append( + f'missing keys in source state_dict: {", ".join(missing_keys)}\n') + + rank, _ = get_dist_info() + if len(err_msg) > 0 and rank == 0: + err_msg.insert( + 0, 'The model and loaded state dict do not match exactly\n') + err_msg = '\n'.join(err_msg) + if strict: + raise RuntimeError(err_msg) + elif logger is not None: + logger.warning(err_msg) + else: + print(err_msg) + + +def load_url_dist(url, model_dir=None): + """In distributed setting, this function only download checkpoint at local + rank 0.""" + rank, world_size = get_dist_info() + rank = int(os.environ.get('LOCAL_RANK', rank)) + if rank == 0: + checkpoint = model_zoo.load_url(url, model_dir=model_dir) + if world_size > 1: + torch.distributed.barrier() + if rank > 0: + checkpoint = model_zoo.load_url(url, model_dir=model_dir) + return checkpoint + + +def load_pavimodel_dist(model_path, map_location=None): + """In distributed setting, this function only download checkpoint at local + rank 0.""" + try: + from pavi import modelcloud + except ImportError: + raise ImportError( + 'Please install pavi to load checkpoint from modelcloud.') + rank, world_size = get_dist_info() + rank = int(os.environ.get('LOCAL_RANK', rank)) + if rank == 0: + model = modelcloud.get(model_path) + with TemporaryDirectory() as tmp_dir: + downloaded_file = osp.join(tmp_dir, model.name) + model.download(downloaded_file) + checkpoint = torch.load(downloaded_file, map_location=map_location) + if world_size > 1: + torch.distributed.barrier() + if rank > 0: + model = modelcloud.get(model_path) + with TemporaryDirectory() as tmp_dir: + downloaded_file = osp.join(tmp_dir, model.name) + model.download(downloaded_file) + checkpoint = torch.load( + downloaded_file, map_location=map_location) + return checkpoint + + +def load_fileclient_dist(filename, backend, map_location): + """In distributed setting, this function only download checkpoint at local + rank 0.""" + rank, world_size = get_dist_info() + rank = int(os.environ.get('LOCAL_RANK', rank)) + allowed_backends = ['ceph'] + if backend not in allowed_backends: + raise ValueError(f'Load from Backend {backend} is not supported.') + if rank == 0: + fileclient = FileClient(backend=backend) + buffer = io.BytesIO(fileclient.get(filename)) + checkpoint = torch.load(buffer, map_location=map_location) + if world_size > 1: + torch.distributed.barrier() + if rank > 0: + fileclient = FileClient(backend=backend) + buffer = io.BytesIO(fileclient.get(filename)) + checkpoint = torch.load(buffer, map_location=map_location) + return checkpoint + + +def get_torchvision_models(): + model_urls = dict() + for _, name, ispkg in pkgutil.walk_packages(torchvision.models.__path__): + if ispkg: + continue + _zoo = import_module(f'torchvision.models.{name}') + if hasattr(_zoo, 'model_urls'): + _urls = getattr(_zoo, 'model_urls') + model_urls.update(_urls) + return model_urls + + +def get_external_models(): + mmcv_home = _get_mmcv_home() + default_json_path = osp.join(mmcv.__path__[0], 'model_zoo/open_mmlab.json') + default_urls = load_file(default_json_path) + assert isinstance(default_urls, dict) + external_json_path = osp.join(mmcv_home, 'open_mmlab.json') + if osp.exists(external_json_path): + external_urls = load_file(external_json_path) + assert isinstance(external_urls, dict) + default_urls.update(external_urls) + + return default_urls + + +def get_mmcls_models(): + mmcls_json_path = osp.join(mmcv.__path__[0], 'model_zoo/mmcls.json') + mmcls_urls = load_file(mmcls_json_path) + + return mmcls_urls + + +def get_deprecated_model_names(): + deprecate_json_path = osp.join(mmcv.__path__[0], + 'model_zoo/deprecated.json') + deprecate_urls = load_file(deprecate_json_path) + assert isinstance(deprecate_urls, dict) + + return deprecate_urls + + +def _process_mmcls_checkpoint(checkpoint): + state_dict = checkpoint['state_dict'] + new_state_dict = OrderedDict() + for k, v in state_dict.items(): + if k.startswith('backbone.'): + new_state_dict[k[9:]] = v + new_checkpoint = dict(state_dict=new_state_dict) + + return new_checkpoint + + +def _load_checkpoint(filename, map_location=None): + """Load checkpoint from somewhere (modelzoo, file, url). + + Args: + filename (str): Accept local filepath, URL, ``torchvision://xxx``, + ``open-mmlab://xxx``. Please refer to ``docs/model_zoo.md`` for + details. + map_location (str | None): Same as :func:`torch.load`. Default: None. + + Returns: + dict | OrderedDict: The loaded checkpoint. It can be either an + OrderedDict storing model weights or a dict containing other + information, which depends on the checkpoint. + """ + if filename.startswith('modelzoo://'): + warnings.warn('The URL scheme of "modelzoo://" is deprecated, please ' + 'use "torchvision://" instead') + model_urls = get_torchvision_models() + model_name = filename[11:] + checkpoint = load_url_dist(model_urls[model_name]) + elif filename.startswith('torchvision://'): + model_urls = get_torchvision_models() + model_name = filename[14:] + checkpoint = load_url_dist(model_urls[model_name]) + elif filename.startswith('open-mmlab://'): + model_urls = get_external_models() + model_name = filename[13:] + deprecated_urls = get_deprecated_model_names() + if model_name in deprecated_urls: + warnings.warn(f'open-mmlab://{model_name} is deprecated in favor ' + f'of open-mmlab://{deprecated_urls[model_name]}') + model_name = deprecated_urls[model_name] + model_url = model_urls[model_name] + # check if is url + if model_url.startswith(('http://', 'https://')): + checkpoint = load_url_dist(model_url) + else: + filename = osp.join(_get_mmcv_home(), model_url) + if not osp.isfile(filename): + raise IOError(f'{filename} is not a checkpoint file') + checkpoint = torch.load(filename, map_location=map_location) + elif filename.startswith('mmcls://'): + model_urls = get_mmcls_models() + model_name = filename[8:] + checkpoint = load_url_dist(model_urls[model_name]) + checkpoint = _process_mmcls_checkpoint(checkpoint) + elif filename.startswith(('http://', 'https://')): + checkpoint = load_url_dist(filename) + elif filename.startswith('pavi://'): + model_path = filename[7:] + checkpoint = load_pavimodel_dist(model_path, map_location=map_location) + elif filename.startswith('s3://'): + checkpoint = load_fileclient_dist( + filename, backend='ceph', map_location=map_location) + else: + if not osp.isfile(filename): + raise IOError(f'{filename} is not a checkpoint file') + checkpoint = torch.load(filename, map_location=map_location) + return checkpoint + + +def load_checkpoint(model, + filename, + map_location='cpu', + strict=False, + logger=None): + """Load checkpoint from a file or URI. + + Args: + model (Module): Module to load checkpoint. + filename (str): Accept local filepath, URL, ``torchvision://xxx``, + ``open-mmlab://xxx``. Please refer to ``docs/model_zoo.md`` for + details. + map_location (str): Same as :func:`torch.load`. + strict (bool): Whether to allow different params for the model and + checkpoint. + logger (:mod:`logging.Logger` or None): The logger for error message. + + Returns: + dict or OrderedDict: The loaded checkpoint. + """ + checkpoint = _load_checkpoint(filename, map_location) + # OrderedDict is a subclass of dict + if not isinstance(checkpoint, dict): + raise RuntimeError( + f'No state_dict found in checkpoint file {filename}') + # get state_dict from checkpoint + if 'state_dict' in checkpoint: + state_dict = checkpoint['state_dict'] + elif 'model' in checkpoint: + state_dict = checkpoint['model'] + else: + state_dict = checkpoint + # strip prefix of state_dict + if list(state_dict.keys())[0].startswith('module.'): + state_dict = {k[7:]: v for k, v in state_dict.items()} + + # for MoBY, load model of online branch + if sorted(list(state_dict.keys()))[0].startswith('encoder'): + state_dict = {k.replace('encoder.', ''): v for k, v in state_dict.items() if k.startswith('encoder.')} + + # reshape absolute position embedding + if state_dict.get('absolute_pos_embed') is not None: + absolute_pos_embed = state_dict['absolute_pos_embed'] + N1, L, C1 = absolute_pos_embed.size() + N2, C2, H, W = model.absolute_pos_embed.size() + if N1 != N2 or C1 != C2 or L != H*W: + logger.warning("Error in loading absolute_pos_embed, pass") + else: + state_dict['absolute_pos_embed'] = absolute_pos_embed.view(N2, H, W, C2).permute(0, 3, 1, 2) + + # interpolate position bias table if needed + relative_position_bias_table_keys = [k for k in state_dict.keys() if "relative_position_bias_table" in k] + for table_key in relative_position_bias_table_keys: + table_pretrained = state_dict[table_key] + table_current = model.state_dict()[table_key] + L1, nH1 = table_pretrained.size() + L2, nH2 = table_current.size() + if nH1 != nH2: + logger.warning(f"Error in loading {table_key}, pass") + else: + if L1 != L2: + S1 = int(L1 ** 0.5) + S2 = int(L2 ** 0.5) + table_pretrained_resized = F.interpolate( + table_pretrained.permute(1, 0).view(1, nH1, S1, S1), + size=(S2, S2), mode='bicubic') + state_dict[table_key] = table_pretrained_resized.view(nH2, L2).permute(1, 0) + + # load state_dict + load_state_dict(model, state_dict, strict, logger) + return checkpoint + + +def weights_to_cpu(state_dict): + """Copy a model state_dict to cpu. + + Args: + state_dict (OrderedDict): Model weights on GPU. + + Returns: + OrderedDict: Model weights on GPU. + """ + state_dict_cpu = OrderedDict() + for key, val in state_dict.items(): + state_dict_cpu[key] = val.cpu() + return state_dict_cpu + + +def _save_to_state_dict(module, destination, prefix, keep_vars): + """Saves module state to `destination` dictionary. + + This method is modified from :meth:`torch.nn.Module._save_to_state_dict`. + + Args: + module (nn.Module): The module to generate state_dict. + destination (dict): A dict where state will be stored. + prefix (str): The prefix for parameters and buffers used in this + module. + """ + for name, param in module._parameters.items(): + if param is not None: + destination[prefix + name] = param if keep_vars else param.detach() + for name, buf in module._buffers.items(): + # remove check of _non_persistent_buffers_set to allow nn.BatchNorm2d + if buf is not None: + destination[prefix + name] = buf if keep_vars else buf.detach() + + +def get_state_dict(module, destination=None, prefix='', keep_vars=False): + """Returns a dictionary containing a whole state of the module. + + Both parameters and persistent buffers (e.g. running averages) are + included. Keys are corresponding parameter and buffer names. + + This method is modified from :meth:`torch.nn.Module.state_dict` to + recursively check parallel module in case that the model has a complicated + structure, e.g., nn.Module(nn.Module(DDP)). + + Args: + module (nn.Module): The module to generate state_dict. + destination (OrderedDict): Returned dict for the state of the + module. + prefix (str): Prefix of the key. + keep_vars (bool): Whether to keep the variable property of the + parameters. Default: False. + + Returns: + dict: A dictionary containing a whole state of the module. + """ + # recursively check parallel module in case that the model has a + # complicated structure, e.g., nn.Module(nn.Module(DDP)) + if is_module_wrapper(module): + module = module.module + + # below is the same as torch.nn.Module.state_dict() + if destination is None: + destination = OrderedDict() + destination._metadata = OrderedDict() + destination._metadata[prefix[:-1]] = local_metadata = dict( + version=module._version) + _save_to_state_dict(module, destination, prefix, keep_vars) + for name, child in module._modules.items(): + if child is not None: + get_state_dict( + child, destination, prefix + name + '.', keep_vars=keep_vars) + for hook in module._state_dict_hooks.values(): + hook_result = hook(module, destination, prefix, local_metadata) + if hook_result is not None: + destination = hook_result + return destination + + +def save_checkpoint(model, filename, optimizer=None, meta=None): + """Save checkpoint to file. + + The checkpoint will have 3 fields: ``meta``, ``state_dict`` and + ``optimizer``. By default ``meta`` will contain version and time info. + + Args: + model (Module): Module whose params are to be saved. + filename (str): Checkpoint filename. + optimizer (:obj:`Optimizer`, optional): Optimizer to be saved. + meta (dict, optional): Metadata to be saved in checkpoint. + """ + if meta is None: + meta = {} + elif not isinstance(meta, dict): + raise TypeError(f'meta must be a dict or None, but got {type(meta)}') + meta.update(mmcv_version=mmcv.__version__, time=time.asctime()) + + if is_module_wrapper(model): + model = model.module + + if hasattr(model, 'CLASSES') and model.CLASSES is not None: + # save class name to the meta + meta.update(CLASSES=model.CLASSES) + + checkpoint = { + 'meta': meta, + 'state_dict': weights_to_cpu(get_state_dict(model)) + } + # save optimizer state dict in the checkpoint + if isinstance(optimizer, Optimizer): + checkpoint['optimizer'] = optimizer.state_dict() + elif isinstance(optimizer, dict): + checkpoint['optimizer'] = {} + for name, optim in optimizer.items(): + checkpoint['optimizer'][name] = optim.state_dict() + + if filename.startswith('pavi://'): + try: + from pavi import modelcloud + from pavi.exception import NodeNotFoundError + except ImportError: + raise ImportError( + 'Please install pavi to load checkpoint from modelcloud.') + model_path = filename[7:] + root = modelcloud.Folder() + model_dir, model_name = osp.split(model_path) + try: + model = modelcloud.get(model_dir) + except NodeNotFoundError: + model = root.create_training_model(model_dir) + with TemporaryDirectory() as tmp_dir: + checkpoint_file = osp.join(tmp_dir, model_name) + with open(checkpoint_file, 'wb') as f: + torch.save(checkpoint, f) + f.flush() + model.create_file(checkpoint_file, name=model_name) + else: + mmcv.mkdir_or_exist(osp.dirname(filename)) + # immediately flush buffer + with open(filename, 'wb') as f: + torch.save(checkpoint, f) + f.flush() diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmcv_custom/runner/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmcv_custom/runner/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c701cb016abe470611830dc960999970738352bb --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmcv_custom/runner/__init__.py @@ -0,0 +1,8 @@ +# Copyright (c) Open-MMLab. All rights reserved. +from .checkpoint import save_checkpoint +from .epoch_based_runner import EpochBasedRunnerAmp + + +__all__ = [ + 'EpochBasedRunnerAmp', 'save_checkpoint' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmcv_custom/runner/checkpoint.py b/detection_cbnet/docker-build-context/cbnetv2/mmcv_custom/runner/checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..b04167e0fc5f16bc33e793830ebb9c4ef15ef1ed --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmcv_custom/runner/checkpoint.py @@ -0,0 +1,85 @@ +# Copyright (c) Open-MMLab. All rights reserved. +import os.path as osp +import time +from tempfile import TemporaryDirectory + +import torch +from torch.optim import Optimizer + +import mmcv +from mmcv.parallel import is_module_wrapper +from mmcv.runner.checkpoint import weights_to_cpu, get_state_dict + +try: + import apex +except: + print('apex is not installed') + + +def save_checkpoint(model, filename, optimizer=None, meta=None): + """Save checkpoint to file. + + The checkpoint will have 4 fields: ``meta``, ``state_dict`` and + ``optimizer``, ``amp``. By default ``meta`` will contain version + and time info. + + Args: + model (Module): Module whose params are to be saved. + filename (str): Checkpoint filename. + optimizer (:obj:`Optimizer`, optional): Optimizer to be saved. + meta (dict, optional): Metadata to be saved in checkpoint. + """ + if meta is None: + meta = {} + elif not isinstance(meta, dict): + raise TypeError(f'meta must be a dict or None, but got {type(meta)}') + meta.update(mmcv_version=mmcv.__version__, time=time.asctime()) + + if is_module_wrapper(model): + model = model.module + + if hasattr(model, 'CLASSES') and model.CLASSES is not None: + # save class name to the meta + meta.update(CLASSES=model.CLASSES) + + checkpoint = { + 'meta': meta, + 'state_dict': weights_to_cpu(get_state_dict(model)) + } + # save optimizer state dict in the checkpoint + if isinstance(optimizer, Optimizer): + checkpoint['optimizer'] = optimizer.state_dict() + elif isinstance(optimizer, dict): + checkpoint['optimizer'] = {} + for name, optim in optimizer.items(): + checkpoint['optimizer'][name] = optim.state_dict() + + # save amp state dict in the checkpoint + checkpoint['amp'] = apex.amp.state_dict() + + if filename.startswith('pavi://'): + try: + from pavi import modelcloud + from pavi.exception import NodeNotFoundError + except ImportError: + raise ImportError( + 'Please install pavi to load checkpoint from modelcloud.') + model_path = filename[7:] + root = modelcloud.Folder() + model_dir, model_name = osp.split(model_path) + try: + model = modelcloud.get(model_dir) + except NodeNotFoundError: + model = root.create_training_model(model_dir) + with TemporaryDirectory() as tmp_dir: + checkpoint_file = osp.join(tmp_dir, model_name) + with open(checkpoint_file, 'wb') as f: + torch.save(checkpoint, f) + f.flush() + model.create_file(checkpoint_file, name=model_name) + else: + mmcv.mkdir_or_exist(osp.dirname(filename)) + # immediately flush buffer + with open(filename, 'wb') as f: + torch.save(checkpoint, f) + f.flush() diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmcv_custom/runner/epoch_based_runner.py b/detection_cbnet/docker-build-context/cbnetv2/mmcv_custom/runner/epoch_based_runner.py new file mode 100644 index 0000000000000000000000000000000000000000..7cdf3fa05639f7fde652090be9dbf78b48790744 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmcv_custom/runner/epoch_based_runner.py @@ -0,0 +1,104 @@ +# Copyright (c) Open-MMLab. All rights reserved. +import os.path as osp +import platform +import shutil + +import torch +from torch.optim import Optimizer + +import mmcv +from mmcv.runner import RUNNERS, EpochBasedRunner +from .checkpoint import save_checkpoint + +try: + import apex +except: + print('apex is not installed') + + +@RUNNERS.register_module() +class EpochBasedRunnerAmp(EpochBasedRunner): + """Epoch-based Runner with AMP support. + + This runner train models epoch by epoch. + """ + + def save_checkpoint(self, + out_dir, + filename_tmpl='epoch_{}.pth', + save_optimizer=True, + meta=None, + create_symlink=True): + """Save the checkpoint. + + Args: + out_dir (str): The directory that checkpoints are saved. + filename_tmpl (str, optional): The checkpoint filename template, + which contains a placeholder for the epoch number. + Defaults to 'epoch_{}.pth'. + save_optimizer (bool, optional): Whether to save the optimizer to + the checkpoint. Defaults to True. + meta (dict, optional): The meta information to be saved in the + checkpoint. Defaults to None. + create_symlink (bool, optional): Whether to create a symlink + "latest.pth" to point to the latest checkpoint. + Defaults to True. + """ + if meta is None: + meta = dict(epoch=self.epoch + 1, iter=self.iter) + elif isinstance(meta, dict): + meta.update(epoch=self.epoch + 1, iter=self.iter) + else: + raise TypeError( + f'meta should be a dict or None, but got {type(meta)}') + if self.meta is not None: + meta.update(self.meta) + + filename = filename_tmpl.format(self.epoch + 1) + filepath = osp.join(out_dir, filename) + optimizer = self.optimizer if save_optimizer else None + save_checkpoint(self.model, filepath, optimizer=optimizer, meta=meta) + # in some environments, `os.symlink` is not supported, you may need to + # set `create_symlink` to False + if create_symlink: + dst_file = osp.join(out_dir, 'latest.pth') + if platform.system() != 'Windows': + mmcv.symlink(filename, dst_file) + else: + shutil.copy(filepath, dst_file) + + def resume(self, + checkpoint, + resume_optimizer=True, + map_location='default'): + if map_location == 'default': + if torch.cuda.is_available(): + device_id = torch.cuda.current_device() + checkpoint = self.load_checkpoint( + checkpoint, + map_location=lambda storage, loc: storage.cuda(device_id)) + else: + checkpoint = self.load_checkpoint(checkpoint) + else: + checkpoint = self.load_checkpoint( + checkpoint, map_location=map_location) + + self._epoch = checkpoint['meta']['epoch'] + self._iter = checkpoint['meta']['iter'] + if 'optimizer' in checkpoint and resume_optimizer: + if isinstance(self.optimizer, Optimizer): + self.optimizer.load_state_dict(checkpoint['optimizer']) + elif isinstance(self.optimizer, dict): + for k in self.optimizer.keys(): + self.optimizer[k].load_state_dict( + checkpoint['optimizer'][k]) + else: + raise TypeError( + 'Optimizer should be dict or torch.optim.Optimizer ' + f'but got {type(self.optimizer)}') + + if 'amp' in checkpoint: + apex.amp.load_state_dict(checkpoint['amp']) + self.logger.info('load amp state dict') + + self.logger.info('resumed epoch %d, iter %d', self.epoch, self.iter) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..646ee84e3182dc70b519a3add6904c28514f4f51 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/__init__.py @@ -0,0 +1,28 @@ +import mmcv + +from .version import __version__, short_version + + +def digit_version(version_str): + digit_version = [] + for x in version_str.split('.'): + if x.isdigit(): + digit_version.append(int(x)) + elif x.find('rc') != -1: + patch_version = x.split('rc') + digit_version.append(int(patch_version[0]) - 1) + digit_version.append(int(patch_version[1])) + return digit_version + + +mmcv_minimum_version = '1.3.8' +mmcv_maximum_version = '1.4.0' +mmcv_version = digit_version(mmcv.__version__) + + +assert (mmcv_version >= digit_version(mmcv_minimum_version) + and mmcv_version <= digit_version(mmcv_maximum_version)), \ + f'MMCV=={mmcv.__version__} is used but incompatible. ' \ + f'Please install mmcv>={mmcv_minimum_version}, <={mmcv_maximum_version}.' + +__all__ = ['__version__', 'short_version'] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/apis/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/apis/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1d8035b74877fdeccaa41cbc10a9f1f9924eac85 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/apis/__init__.py @@ -0,0 +1,10 @@ +from .inference import (async_inference_detector, inference_detector, + init_detector, show_result_pyplot) +from .test import multi_gpu_test, single_gpu_test +from .train import get_root_logger, set_random_seed, train_detector + +__all__ = [ + 'get_root_logger', 'set_random_seed', 'train_detector', 'init_detector', + 'async_inference_detector', 'inference_detector', 'show_result_pyplot', + 'multi_gpu_test', 'single_gpu_test' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/apis/inference.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/apis/inference.py new file mode 100644 index 0000000000000000000000000000000000000000..c257c7f28280b6746d053b2d5bcac4ddd5220ed2 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/apis/inference.py @@ -0,0 +1,240 @@ +import warnings + +import mmcv +import numpy as np +import torch +from mmcv.ops import RoIPool +from mmcv.parallel import collate, scatter +from mmcv.runner import load_checkpoint + +from mmdet.core import get_classes +from mmdet.datasets import replace_ImageToTensor +from mmdet.datasets.pipelines import Compose +from mmdet.models import build_detector + + +def init_detector(config, checkpoint=None, device='cuda:0', cfg_options=None): + """Initialize a detector from config file. + + Args: + config (str or :obj:`mmcv.Config`): Config file path or the config + object. + checkpoint (str, optional): Checkpoint path. If left as None, the model + will not load any weights. + cfg_options (dict): Options to override some settings in the used + config. + + Returns: + nn.Module: The constructed detector. + """ + if isinstance(config, str): + config = mmcv.Config.fromfile(config) + elif not isinstance(config, mmcv.Config): + raise TypeError('config must be a filename or Config object, ' + f'but got {type(config)}') + if cfg_options is not None: + config.merge_from_dict(cfg_options) + config.model.pretrained = None + config.model.train_cfg = None + model = build_detector(config.model, test_cfg=config.get('test_cfg')) + if checkpoint is not None: + map_loc = 'cpu' if device == 'cpu' else None + checkpoint = load_checkpoint(model, checkpoint, map_location=map_loc) + if 'CLASSES' in checkpoint.get('meta', {}): + model.CLASSES = checkpoint['meta']['CLASSES'] + else: + warnings.simplefilter('once') + warnings.warn('Class names are not saved in the checkpoint\'s ' + 'meta data, use COCO classes by default.') + model.CLASSES = get_classes('coco') + model.cfg = config # save the config in the model for convenience + model.to(device) + model.eval() + return model + + +class LoadImage: + """Deprecated. + + A simple pipeline to load image. + """ + + def __call__(self, results): + """Call function to load images into results. + + Args: + results (dict): A result dict contains the file name + of the image to be read. + Returns: + dict: ``results`` will be returned containing loaded image. + """ + warnings.simplefilter('once') + warnings.warn('`LoadImage` is deprecated and will be removed in ' + 'future releases. You may use `LoadImageFromWebcam` ' + 'from `mmdet.datasets.pipelines.` instead.') + if isinstance(results['img'], str): + results['filename'] = results['img'] + results['ori_filename'] = results['img'] + else: + results['filename'] = None + results['ori_filename'] = None + img = mmcv.imread(results['img']) + results['img'] = img + results['img_fields'] = ['img'] + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + return results + + +def inference_detector(model, imgs): + """Inference image(s) with the detector. + + Args: + model (nn.Module): The loaded detector. + imgs (str/ndarray or list[str/ndarray] or tuple[str/ndarray]): + Either image files or loaded images. + + Returns: + If imgs is a list or tuple, the same length list type results + will be returned, otherwise return the detection results directly. + """ + + if isinstance(imgs, (list, tuple)): + is_batch = True + else: + imgs = [imgs] + is_batch = False + + cfg = model.cfg + device = next(model.parameters()).device # model device + + if isinstance(imgs[0], np.ndarray): + cfg = cfg.copy() + # set loading pipeline type + cfg.data.test.pipeline[0].type = 'LoadImageFromWebcam' + + cfg.data.test.pipeline = replace_ImageToTensor(cfg.data.test.pipeline) + test_pipeline = Compose(cfg.data.test.pipeline) + + datas = [] + for img in imgs: + # prepare data + if isinstance(img, np.ndarray): + # directly add img + data = dict(img=img) + else: + # add information into dict + data = dict(img_info=dict(filename=img), img_prefix=None) + # build the data pipeline + data = test_pipeline(data) + datas.append(data) + + data = collate(datas, samples_per_gpu=len(imgs)) + # just get the actual data from DataContainer + data['img_metas'] = [img_metas.data[0] for img_metas in data['img_metas']] + data['img'] = [img.data[0] for img in data['img']] + if next(model.parameters()).is_cuda: + # scatter to specified GPU + data = scatter(data, [device])[0] + else: + for m in model.modules(): + assert not isinstance( + m, RoIPool + ), 'CPU inference with RoIPool is not supported currently.' + + # forward the model + with torch.no_grad(): + results = model(return_loss=False, rescale=True, **data) + + if not is_batch: + return results[0] + else: + return results + + +async def async_inference_detector(model, imgs): + """Async inference image(s) with the detector. + + Args: + model (nn.Module): The loaded detector. + img (str | ndarray): Either image files or loaded images. + + Returns: + Awaitable detection results. + """ + if not isinstance(imgs, (list, tuple)): + imgs = [imgs] + + cfg = model.cfg + device = next(model.parameters()).device # model device + + if isinstance(imgs[0], np.ndarray): + cfg = cfg.copy() + # set loading pipeline type + cfg.data.test.pipeline[0].type = 'LoadImageFromWebcam' + + cfg.data.test.pipeline = replace_ImageToTensor(cfg.data.test.pipeline) + test_pipeline = Compose(cfg.data.test.pipeline) + + datas = [] + for img in imgs: + # prepare data + if isinstance(img, np.ndarray): + # directly add img + data = dict(img=img) + else: + # add information into dict + data = dict(img_info=dict(filename=img), img_prefix=None) + # build the data pipeline + data = test_pipeline(data) + datas.append(data) + + data = collate(datas, samples_per_gpu=len(imgs)) + # just get the actual data from DataContainer + data['img_metas'] = [img_metas.data[0] for img_metas in data['img_metas']] + data['img'] = [img.data[0] for img in data['img']] + if next(model.parameters()).is_cuda: + # scatter to specified GPU + data = scatter(data, [device])[0] + else: + for m in model.modules(): + assert not isinstance( + m, RoIPool + ), 'CPU inference with RoIPool is not supported currently.' + + # We don't restore `torch.is_grad_enabled()` value during concurrent + # inference since execution can overlap + torch.set_grad_enabled(False) + results = await model.aforward_test(rescale=True, **data) + return results + + +def show_result_pyplot(model, + img, + result, + score_thr=0.3, + title='result', + wait_time=0): + """Visualize the detection results on the image. + + Args: + model (nn.Module): The loaded detector. + img (str or np.ndarray): Image filename or loaded image. + result (tuple[list] or list): The detection result, can be either + (bbox, segm) or just bbox. + score_thr (float): The threshold to visualize the bboxes and masks. + title (str): Title of the pyplot figure. + wait_time (float): Value of waitKey param. + Default: 0. + """ + if hasattr(model, 'module'): + model = model.module + model.show_result( + img, + result, + score_thr=score_thr, + show=True, + wait_time=wait_time, + win_name=title, + bbox_color=(72, 101, 241), + text_color=(72, 101, 241)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/apis/test.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/apis/test.py new file mode 100644 index 0000000000000000000000000000000000000000..e54b1b8c24efc448972c31ee5da63041d7f97a47 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/apis/test.py @@ -0,0 +1,190 @@ +import os.path as osp +import pickle +import shutil +import tempfile +import time + +import mmcv +import torch +import torch.distributed as dist +from mmcv.image import tensor2imgs +from mmcv.runner import get_dist_info + +from mmdet.core import encode_mask_results + + +def single_gpu_test(model, + data_loader, + show=False, + out_dir=None, + show_score_thr=0.3): + model.eval() + results = [] + dataset = data_loader.dataset + prog_bar = mmcv.ProgressBar(len(dataset)) + for i, data in enumerate(data_loader): + with torch.no_grad(): + result = model(return_loss=False, rescale=True, **data) + + batch_size = len(result) + if show or out_dir: + if batch_size == 1 and isinstance(data['img'][0], torch.Tensor): + img_tensor = data['img'][0] + else: + img_tensor = data['img'][0].data[0] + img_metas = data['img_metas'][0].data[0] + imgs = tensor2imgs(img_tensor, **img_metas[0]['img_norm_cfg']) + assert len(imgs) == len(img_metas) + + for i, (img, img_meta) in enumerate(zip(imgs, img_metas)): + h, w, _ = img_meta['img_shape'] + img_show = img[:h, :w, :] + + ori_h, ori_w = img_meta['ori_shape'][:-1] + img_show = mmcv.imresize(img_show, (ori_w, ori_h)) + + if out_dir: + out_file = osp.join(out_dir, img_meta['ori_filename']) + else: + out_file = None + + model.module.show_result( + img_show, + result[i], + show=show, + out_file=out_file, + score_thr=show_score_thr) + + # encode mask results + if isinstance(result[0], tuple): + result = [(bbox_results, encode_mask_results(mask_results)) + for bbox_results, mask_results in result] + results.extend(result) + + for _ in range(batch_size): + prog_bar.update() + return results + + +def multi_gpu_test(model, data_loader, tmpdir=None, gpu_collect=False): + """Test model with multiple gpus. + + This method tests model with multiple gpus and collects the results + under two different modes: gpu and cpu modes. By setting 'gpu_collect=True' + it encodes results to gpu tensors and use gpu communication for results + collection. On cpu mode it saves the results on different gpus to 'tmpdir' + and collects them by the rank 0 worker. + + Args: + model (nn.Module): Model to be tested. + data_loader (nn.Dataloader): Pytorch data loader. + tmpdir (str): Path of directory to save the temporary results from + different gpus under cpu mode. + gpu_collect (bool): Option to use either gpu or cpu to collect results. + + Returns: + list: The prediction results. + """ + model.eval() + results = [] + dataset = data_loader.dataset + rank, world_size = get_dist_info() + if rank == 0: + prog_bar = mmcv.ProgressBar(len(dataset)) + time.sleep(2) # This line can prevent deadlock problem in some cases. + for i, data in enumerate(data_loader): + with torch.no_grad(): + result = model(return_loss=False, rescale=True, **data) + # encode mask results + if isinstance(result[0], tuple): + result = [(bbox_results, encode_mask_results(mask_results)) + for bbox_results, mask_results in result] + results.extend(result) + + if rank == 0: + batch_size = len(result) + for _ in range(batch_size * world_size): + prog_bar.update() + + # collect results from all ranks + if gpu_collect: + results = collect_results_gpu(results, len(dataset)) + else: + results = collect_results_cpu(results, len(dataset), tmpdir) + return results + + +def collect_results_cpu(result_part, size, tmpdir=None): + rank, world_size = get_dist_info() + # create a tmp dir if it is not specified + if tmpdir is None: + MAX_LEN = 512 + # 32 is whitespace + dir_tensor = torch.full((MAX_LEN, ), + 32, + dtype=torch.uint8, + device='cuda') + if rank == 0: + mmcv.mkdir_or_exist('.dist_test') + tmpdir = tempfile.mkdtemp(dir='.dist_test') + tmpdir = torch.tensor( + bytearray(tmpdir.encode()), dtype=torch.uint8, device='cuda') + dir_tensor[:len(tmpdir)] = tmpdir + dist.broadcast(dir_tensor, 0) + tmpdir = dir_tensor.cpu().numpy().tobytes().decode().rstrip() + else: + mmcv.mkdir_or_exist(tmpdir) + # dump the part result to the dir + mmcv.dump(result_part, osp.join(tmpdir, f'part_{rank}.pkl')) + dist.barrier() + # collect all parts + if rank != 0: + return None + else: + # load results of all parts from tmp dir + part_list = [] + for i in range(world_size): + part_file = osp.join(tmpdir, f'part_{i}.pkl') + part_list.append(mmcv.load(part_file)) + # sort the results + ordered_results = [] + for res in zip(*part_list): + ordered_results.extend(list(res)) + # the dataloader may pad some samples + ordered_results = ordered_results[:size] + # remove tmp dir + shutil.rmtree(tmpdir) + return ordered_results + + +def collect_results_gpu(result_part, size): + rank, world_size = get_dist_info() + # dump result part to tensor with pickle + part_tensor = torch.tensor( + bytearray(pickle.dumps(result_part)), dtype=torch.uint8, device='cuda') + # gather all result part tensor shape + shape_tensor = torch.tensor(part_tensor.shape, device='cuda') + shape_list = [shape_tensor.clone() for _ in range(world_size)] + dist.all_gather(shape_list, shape_tensor) + # padding result part tensor to max length + shape_max = torch.tensor(shape_list).max() + part_send = torch.zeros(shape_max, dtype=torch.uint8, device='cuda') + part_send[:shape_tensor[0]] = part_tensor + part_recv_list = [ + part_tensor.new_zeros(shape_max) for _ in range(world_size) + ] + # gather all result part + dist.all_gather(part_recv_list, part_send) + + if rank == 0: + part_list = [] + for recv, shape in zip(part_recv_list, shape_list): + part_list.append( + pickle.loads(recv[:shape[0]].cpu().numpy().tobytes())) + # sort the results + ordered_results = [] + for res in zip(*part_list): + ordered_results.extend(list(res)) + # the dataloader may pad some samples + ordered_results = ordered_results[:size] + return ordered_results diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/apis/train.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/apis/train.py new file mode 100644 index 0000000000000000000000000000000000000000..7f2f1f95c0a8e7c9232f7aa490e8104f8e37c4f5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/apis/train.py @@ -0,0 +1,185 @@ +import random +import warnings + +import numpy as np +import torch +from mmcv.parallel import MMDataParallel, MMDistributedDataParallel +from mmcv.runner import (HOOKS, DistSamplerSeedHook, EpochBasedRunner, + Fp16OptimizerHook, OptimizerHook, build_optimizer, + build_runner) +from mmcv.utils import build_from_cfg + +from mmdet.core import DistEvalHook, EvalHook +from mmdet.datasets import (build_dataloader, build_dataset, + replace_ImageToTensor) +from mmdet.utils import get_root_logger +from mmcv_custom.runner import EpochBasedRunnerAmp +try: + import apex +except: + print('apex is not installed') + + +def set_random_seed(seed, deterministic=False): + """Set random seed. + + Args: + seed (int): Seed to be used. + deterministic (bool): Whether to set the deterministic option for + CUDNN backend, i.e., set `torch.backends.cudnn.deterministic` + to True and `torch.backends.cudnn.benchmark` to False. + Default: False. + """ + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + if deterministic: + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False + + +def train_detector(model, + dataset, + cfg, + distributed=False, + validate=False, + timestamp=None, + meta=None): + logger = get_root_logger(cfg.log_level) + + # prepare data loaders + dataset = dataset if isinstance(dataset, (list, tuple)) else [dataset] + if 'imgs_per_gpu' in cfg.data: + logger.warning('"imgs_per_gpu" is deprecated in MMDet V2.0. ' + 'Please use "samples_per_gpu" instead') + if 'samples_per_gpu' in cfg.data: + logger.warning( + f'Got "imgs_per_gpu"={cfg.data.imgs_per_gpu} and ' + f'"samples_per_gpu"={cfg.data.samples_per_gpu}, "imgs_per_gpu"' + f'={cfg.data.imgs_per_gpu} is used in this experiments') + else: + logger.warning( + 'Automatically set "samples_per_gpu"="imgs_per_gpu"=' + f'{cfg.data.imgs_per_gpu} in this experiments') + cfg.data.samples_per_gpu = cfg.data.imgs_per_gpu + + data_loaders = [ + build_dataloader( + ds, + cfg.data.samples_per_gpu, + cfg.data.workers_per_gpu, + # cfg.gpus will be ignored if distributed + len(cfg.gpu_ids), + dist=distributed, + seed=cfg.seed) for ds in dataset + ] + + # build optimizer + optimizer = build_optimizer(model, cfg.optimizer) + + # use apex fp16 optimizer + if cfg.optimizer_config.get("type", None) and cfg.optimizer_config["type"] == "DistOptimizerHook": + if cfg.optimizer_config.get("use_fp16", False): + model, optimizer = apex.amp.initialize( + model.cuda(), optimizer, opt_level="O1") + for m in model.modules(): + if hasattr(m, "fp16_enabled"): + m.fp16_enabled = True + + # put model on gpus + if distributed: + find_unused_parameters = cfg.get('find_unused_parameters', False) + # Sets the `find_unused_parameters` parameter in + # torch.nn.parallel.DistributedDataParallel + model = MMDistributedDataParallel( + model.cuda(), + device_ids=[torch.cuda.current_device()], + broadcast_buffers=False, + find_unused_parameters=find_unused_parameters) + else: + model = MMDataParallel( + model.cuda(cfg.gpu_ids[0]), device_ids=cfg.gpu_ids) + + if 'runner' not in cfg: + cfg.runner = { + 'type': 'EpochBasedRunner', + 'max_epochs': cfg.total_epochs + } + warnings.warn( + 'config is now expected to have a `runner` section, ' + 'please set `runner` in your config.', UserWarning) + else: + if 'total_epochs' in cfg: + assert cfg.total_epochs == cfg.runner.max_epochs + + # build runner + runner = build_runner( + cfg.runner, + default_args=dict( + model=model, + optimizer=optimizer, + work_dir=cfg.work_dir, + logger=logger, + meta=meta)) + + # an ugly workaround to make .log and .log.json filenames the same + runner.timestamp = timestamp + + # fp16 setting + fp16_cfg = cfg.get('fp16', None) + if fp16_cfg is not None: + optimizer_config = Fp16OptimizerHook( + **cfg.optimizer_config, **fp16_cfg, distributed=distributed) + elif distributed and 'type' not in cfg.optimizer_config: + optimizer_config = OptimizerHook(**cfg.optimizer_config) + else: + optimizer_config = cfg.optimizer_config + + # register hooks + runner.register_training_hooks(cfg.lr_config, optimizer_config, + cfg.checkpoint_config, cfg.log_config, + cfg.get('momentum_config', None)) + if distributed: + if isinstance(runner, EpochBasedRunner): + runner.register_hook(DistSamplerSeedHook()) + + # register eval hooks + if validate: + # Support batch_size > 1 in validation + val_samples_per_gpu = cfg.data.val.pop('samples_per_gpu', 1) + if val_samples_per_gpu > 1: + # Replace 'ImageToTensor' to 'DefaultFormatBundle' + cfg.data.val.pipeline = replace_ImageToTensor( + cfg.data.val.pipeline) + val_dataset = build_dataset(cfg.data.val, dict(test_mode=True)) + val_dataloader = build_dataloader( + val_dataset, + samples_per_gpu=val_samples_per_gpu, + workers_per_gpu=cfg.data.workers_per_gpu, + dist=distributed, + shuffle=False) + eval_cfg = cfg.get('evaluation', {}) + eval_cfg['by_epoch'] = cfg.runner['type'] != 'IterBasedRunner' + eval_hook = DistEvalHook if distributed else EvalHook + runner.register_hook(eval_hook(val_dataloader, **eval_cfg)) + + # user-defined hooks + if cfg.get('custom_hooks', None): + custom_hooks = cfg.custom_hooks + assert isinstance(custom_hooks, list), \ + f'custom_hooks expect list type, but got {type(custom_hooks)}' + for hook_cfg in cfg.custom_hooks: + assert isinstance(hook_cfg, dict), \ + 'Each item in custom_hooks expects dict type, but got ' \ + f'{type(hook_cfg)}' + hook_cfg = hook_cfg.copy() + priority = hook_cfg.pop('priority', 'NORMAL') + hook = build_from_cfg(hook_cfg, HOOKS) + runner.register_hook(hook, priority=priority) + + if cfg.resume_from: + runner.resume(cfg.resume_from) + elif cfg.load_from: + runner.load_checkpoint(cfg.load_from) + runner.run(data_loaders, cfg.workflow) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..00a54e2b86e72a6d8309a6bc0ee5acb34ef07d68 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/__init__.py @@ -0,0 +1,6 @@ +from .anchor import * # noqa: F401, F403 +from .bbox import * # noqa: F401, F403 +from .evaluation import * # noqa: F401, F403 +from .mask import * # noqa: F401, F403 +from .post_processing import * # noqa: F401, F403 +from .utils import * # noqa: F401, F403 diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/anchor/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/anchor/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f14dc1749ab1533f4481494e2ccc70004a74c163 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/anchor/__init__.py @@ -0,0 +1,13 @@ +from .anchor_generator import (AnchorGenerator, LegacyAnchorGenerator, + YOLOAnchorGenerator) +from .builder import (ANCHOR_GENERATORS, PRIOR_GENERATORS, + build_anchor_generator, build_prior_generator) +from .point_generator import MlvlPointGenerator, PointGenerator +from .utils import anchor_inside_flags, calc_region, images_to_levels + +__all__ = [ + 'AnchorGenerator', 'LegacyAnchorGenerator', 'anchor_inside_flags', + 'PointGenerator', 'images_to_levels', 'calc_region', + 'build_anchor_generator', 'ANCHOR_GENERATORS', 'YOLOAnchorGenerator', + 'build_prior_generator', 'PRIOR_GENERATORS', 'MlvlPointGenerator' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/anchor/anchor_generator.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/anchor/anchor_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..b39fd585d5d82f4eaccb948be4ddfcbaac57ccc6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/anchor/anchor_generator.py @@ -0,0 +1,838 @@ +import warnings + +import mmcv +import numpy as np +import torch +from torch.nn.modules.utils import _pair + +from .builder import PRIOR_GENERATORS + + +@PRIOR_GENERATORS.register_module() +class AnchorGenerator: + """Standard anchor generator for 2D anchor-based detectors. + + Args: + strides (list[int] | list[tuple[int, int]]): Strides of anchors + in multiple feature levels in order (w, h). + ratios (list[float]): The list of ratios between the height and width + of anchors in a single level. + scales (list[int] | None): Anchor scales for anchors in a single level. + It cannot be set at the same time if `octave_base_scale` and + `scales_per_octave` are set. + base_sizes (list[int] | None): The basic sizes + of anchors in multiple levels. + If None is given, strides will be used as base_sizes. + (If strides are non square, the shortest stride is taken.) + scale_major (bool): Whether to multiply scales first when generating + base anchors. If true, the anchors in the same row will have the + same scales. By default it is True in V2.0 + octave_base_scale (int): The base scale of octave. + scales_per_octave (int): Number of scales for each octave. + `octave_base_scale` and `scales_per_octave` are usually used in + retinanet and the `scales` should be None when they are set. + centers (list[tuple[float, float]] | None): The centers of the anchor + relative to the feature grid center in multiple feature levels. + By default it is set to be None and not used. If a list of tuple of + float is given, they will be used to shift the centers of anchors. + center_offset (float): The offset of center in proportion to anchors' + width and height. By default it is 0 in V2.0. + + Examples: + >>> from mmdet.core import AnchorGenerator + >>> self = AnchorGenerator([16], [1.], [1.], [9]) + >>> all_anchors = self.grid_anchors([(2, 2)], device='cpu') + >>> print(all_anchors) + [tensor([[-4.5000, -4.5000, 4.5000, 4.5000], + [11.5000, -4.5000, 20.5000, 4.5000], + [-4.5000, 11.5000, 4.5000, 20.5000], + [11.5000, 11.5000, 20.5000, 20.5000]])] + >>> self = AnchorGenerator([16, 32], [1.], [1.], [9, 18]) + >>> all_anchors = self.grid_anchors([(2, 2), (1, 1)], device='cpu') + >>> print(all_anchors) + [tensor([[-4.5000, -4.5000, 4.5000, 4.5000], + [11.5000, -4.5000, 20.5000, 4.5000], + [-4.5000, 11.5000, 4.5000, 20.5000], + [11.5000, 11.5000, 20.5000, 20.5000]]), \ + tensor([[-9., -9., 9., 9.]])] + """ + + def __init__(self, + strides, + ratios, + scales=None, + base_sizes=None, + scale_major=True, + octave_base_scale=None, + scales_per_octave=None, + centers=None, + center_offset=0.): + # check center and center_offset + if center_offset != 0: + assert centers is None, 'center cannot be set when center_offset' \ + f'!=0, {centers} is given.' + if not (0 <= center_offset <= 1): + raise ValueError('center_offset should be in range [0, 1], ' + f'{center_offset} is given.') + if centers is not None: + assert len(centers) == len(strides), \ + 'The number of strides should be the same as centers, got ' \ + f'{strides} and {centers}' + + # calculate base sizes of anchors + self.strides = [_pair(stride) for stride in strides] + self.base_sizes = [min(stride) for stride in self.strides + ] if base_sizes is None else base_sizes + assert len(self.base_sizes) == len(self.strides), \ + 'The number of strides should be the same as base sizes, got ' \ + f'{self.strides} and {self.base_sizes}' + + # calculate scales of anchors + assert ((octave_base_scale is not None + and scales_per_octave is not None) ^ (scales is not None)), \ + 'scales and octave_base_scale with scales_per_octave cannot' \ + ' be set at the same time' + if scales is not None: + self.scales = torch.Tensor(scales) + elif octave_base_scale is not None and scales_per_octave is not None: + octave_scales = np.array( + [2**(i / scales_per_octave) for i in range(scales_per_octave)]) + scales = octave_scales * octave_base_scale + self.scales = torch.Tensor(scales) + else: + raise ValueError('Either scales or octave_base_scale with ' + 'scales_per_octave should be set') + + self.octave_base_scale = octave_base_scale + self.scales_per_octave = scales_per_octave + self.ratios = torch.Tensor(ratios) + self.scale_major = scale_major + self.centers = centers + self.center_offset = center_offset + self.base_anchors = self.gen_base_anchors() + + @property + def num_base_anchors(self): + """list[int]: total number of base anchors in a feature grid""" + return self.num_base_priors + + @property + def num_base_priors(self): + """list[int]: The number of priors (anchors) at a point + on the feature grid""" + return [base_anchors.size(0) for base_anchors in self.base_anchors] + + @property + def num_levels(self): + """int: number of feature levels that the generator will be applied""" + return len(self.strides) + + def gen_base_anchors(self): + """Generate base anchors. + + Returns: + list(torch.Tensor): Base anchors of a feature grid in multiple \ + feature levels. + """ + multi_level_base_anchors = [] + for i, base_size in enumerate(self.base_sizes): + center = None + if self.centers is not None: + center = self.centers[i] + multi_level_base_anchors.append( + self.gen_single_level_base_anchors( + base_size, + scales=self.scales, + ratios=self.ratios, + center=center)) + return multi_level_base_anchors + + def gen_single_level_base_anchors(self, + base_size, + scales, + ratios, + center=None): + """Generate base anchors of a single level. + + Args: + base_size (int | float): Basic size of an anchor. + scales (torch.Tensor): Scales of the anchor. + ratios (torch.Tensor): The ratio between between the height + and width of anchors in a single level. + center (tuple[float], optional): The center of the base anchor + related to a single feature grid. Defaults to None. + + Returns: + torch.Tensor: Anchors in a single-level feature maps. + """ + w = base_size + h = base_size + if center is None: + x_center = self.center_offset * w + y_center = self.center_offset * h + else: + x_center, y_center = center + + h_ratios = torch.sqrt(ratios) + w_ratios = 1 / h_ratios + if self.scale_major: + ws = (w * w_ratios[:, None] * scales[None, :]).view(-1) + hs = (h * h_ratios[:, None] * scales[None, :]).view(-1) + else: + ws = (w * scales[:, None] * w_ratios[None, :]).view(-1) + hs = (h * scales[:, None] * h_ratios[None, :]).view(-1) + + # use float anchor and the anchor's center is aligned with the + # pixel center + base_anchors = [ + x_center - 0.5 * ws, y_center - 0.5 * hs, x_center + 0.5 * ws, + y_center + 0.5 * hs + ] + base_anchors = torch.stack(base_anchors, dim=-1) + + return base_anchors + + def _meshgrid(self, x, y, row_major=True): + """Generate mesh grid of x and y. + + Args: + x (torch.Tensor): Grids of x dimension. + y (torch.Tensor): Grids of y dimension. + row_major (bool, optional): Whether to return y grids first. + Defaults to True. + + Returns: + tuple[torch.Tensor]: The mesh grids of x and y. + """ + # use shape instead of len to keep tracing while exporting to onnx + xx = x.repeat(y.shape[0]) + yy = y.view(-1, 1).repeat(1, x.shape[0]).view(-1) + if row_major: + return xx, yy + else: + return yy, xx + + def grid_priors(self, featmap_sizes, device='cuda'): + """Generate grid anchors in multiple feature levels. + + Args: + featmap_sizes (list[tuple]): List of feature map sizes in + multiple feature levels. + device (str): The device where the anchors will be put on. + + Return: + list[torch.Tensor]: Anchors in multiple feature levels. \ + The sizes of each tensor should be [N, 4], where \ + N = width * height * num_base_anchors, width and height \ + are the sizes of the corresponding feature level, \ + num_base_anchors is the number of anchors for that level. + """ + assert self.num_levels == len(featmap_sizes) + multi_level_anchors = [] + for i in range(self.num_levels): + anchors = self.single_level_grid_priors( + featmap_sizes[i], level_idx=i, device=device) + multi_level_anchors.append(anchors) + return multi_level_anchors + + def single_level_grid_priors(self, featmap_size, level_idx, device='cuda'): + """Generate grid anchors of a single level. + + Note: + This function is usually called by method ``self.grid_priors``. + + Args: + featmap_size (tuple[int]): Size of the feature maps. + level_idx (int): The index of corresponding feature map level. + device (str, optional): The device the tensor will be put on. + Defaults to 'cuda'. + + Returns: + torch.Tensor: Anchors in the overall feature maps. + """ + + base_anchors = self.base_anchors[level_idx].to(device) + feat_h, feat_w = featmap_size + stride_w, stride_h = self.strides[level_idx] + shift_x = torch.arange(0, feat_w, device=device) * stride_w + shift_y = torch.arange(0, feat_h, device=device) * stride_h + + shift_xx, shift_yy = self._meshgrid(shift_x, shift_y) + shifts = torch.stack([shift_xx, shift_yy, shift_xx, shift_yy], dim=-1) + shifts = shifts.type_as(base_anchors) + # first feat_w elements correspond to the first row of shifts + # add A anchors (1, A, 4) to K shifts (K, 1, 4) to get + # shifted anchors (K, A, 4), reshape to (K*A, 4) + + all_anchors = base_anchors[None, :, :] + shifts[:, None, :] + all_anchors = all_anchors.view(-1, 4) + # first A rows correspond to A anchors of (0, 0) in feature map, + # then (0, 1), (0, 2), ... + return all_anchors + + def sparse_priors(self, + prior_idxs, + featmap_size, + level_idx, + dtype=torch.float32, + device='cuda'): + """Generate sparse anchors according to the ``prior_idxs``. + + Args: + prior_idxs (Tensor): The index of corresponding anchors + in the feature map. + featmap_size (tuple[int]): feature map size arrange as (h, w). + level_idx (int): The level index of corresponding feature + map. + dtype (obj:`torch.dtype`): Date type of points.Defaults to + ``torch.float32``. + device (obj:`torch.device`): The device where the points is + located. + Returns: + Tensor: Anchor with shape (N, 4), N should be equal to + the length of ``prior_idxs``. + """ + + height, width = featmap_size + num_base_anchors = self.num_base_anchors[level_idx] + base_anchor_id = prior_idxs % num_base_anchors + x = (prior_idxs // + num_base_anchors) % width * self.strides[level_idx][0] + y = (prior_idxs // width // + num_base_anchors) % height * self.strides[level_idx][1] + priors = torch.stack([x, y, x, y], 1).to(dtype).to(device) + \ + self.base_anchors[level_idx][base_anchor_id, :].to(device) + + return priors + + def grid_anchors(self, featmap_sizes, device='cuda'): + """Generate grid anchors in multiple feature levels. + + Args: + featmap_sizes (list[tuple]): List of feature map sizes in + multiple feature levels. + device (str): Device where the anchors will be put on. + + Return: + list[torch.Tensor]: Anchors in multiple feature levels. \ + The sizes of each tensor should be [N, 4], where \ + N = width * height * num_base_anchors, width and height \ + are the sizes of the corresponding feature level, \ + num_base_anchors is the number of anchors for that level. + """ + warnings.warn('``grid_anchors`` would be deprecated soon. ' + 'Please use ``grid_priors`` ') + + assert self.num_levels == len(featmap_sizes) + multi_level_anchors = [] + for i in range(self.num_levels): + anchors = self.single_level_grid_anchors( + self.base_anchors[i].to(device), + featmap_sizes[i], + self.strides[i], + device=device) + multi_level_anchors.append(anchors) + return multi_level_anchors + + def single_level_grid_anchors(self, + base_anchors, + featmap_size, + stride=(16, 16), + device='cuda'): + """Generate grid anchors of a single level. + + Note: + This function is usually called by method ``self.grid_anchors``. + + Args: + base_anchors (torch.Tensor): The base anchors of a feature grid. + featmap_size (tuple[int]): Size of the feature maps. + stride (tuple[int], optional): Stride of the feature map in order + (w, h). Defaults to (16, 16). + device (str, optional): Device the tensor will be put on. + Defaults to 'cuda'. + + Returns: + torch.Tensor: Anchors in the overall feature maps. + """ + + warnings.warn( + '``single_level_grid_anchors`` would be deprecated soon. ' + 'Please use ``single_level_grid_priors`` ') + + # keep featmap_size as Tensor instead of int, so that we + # can covert to ONNX correctly + feat_h, feat_w = featmap_size + shift_x = torch.arange(0, feat_w, device=device) * stride[0] + shift_y = torch.arange(0, feat_h, device=device) * stride[1] + + shift_xx, shift_yy = self._meshgrid(shift_x, shift_y) + shifts = torch.stack([shift_xx, shift_yy, shift_xx, shift_yy], dim=-1) + shifts = shifts.type_as(base_anchors) + # first feat_w elements correspond to the first row of shifts + # add A anchors (1, A, 4) to K shifts (K, 1, 4) to get + # shifted anchors (K, A, 4), reshape to (K*A, 4) + + all_anchors = base_anchors[None, :, :] + shifts[:, None, :] + all_anchors = all_anchors.view(-1, 4) + # first A rows correspond to A anchors of (0, 0) in feature map, + # then (0, 1), (0, 2), ... + return all_anchors + + def valid_flags(self, featmap_sizes, pad_shape, device='cuda'): + """Generate valid flags of anchors in multiple feature levels. + + Args: + featmap_sizes (list(tuple)): List of feature map sizes in + multiple feature levels. + pad_shape (tuple): The padded shape of the image. + device (str): Device where the anchors will be put on. + + Return: + list(torch.Tensor): Valid flags of anchors in multiple levels. + """ + assert self.num_levels == len(featmap_sizes) + multi_level_flags = [] + for i in range(self.num_levels): + anchor_stride = self.strides[i] + feat_h, feat_w = featmap_sizes[i] + h, w = pad_shape[:2] + valid_feat_h = min(int(np.ceil(h / anchor_stride[1])), feat_h) + valid_feat_w = min(int(np.ceil(w / anchor_stride[0])), feat_w) + flags = self.single_level_valid_flags((feat_h, feat_w), + (valid_feat_h, valid_feat_w), + self.num_base_anchors[i], + device=device) + multi_level_flags.append(flags) + return multi_level_flags + + def single_level_valid_flags(self, + featmap_size, + valid_size, + num_base_anchors, + device='cuda'): + """Generate the valid flags of anchor in a single feature map. + + Args: + featmap_size (tuple[int]): The size of feature maps, arrange + as (h, w). + valid_size (tuple[int]): The valid size of the feature maps. + num_base_anchors (int): The number of base anchors. + device (str, optional): Device where the flags will be put on. + Defaults to 'cuda'. + + Returns: + torch.Tensor: The valid flags of each anchor in a single level \ + feature map. + """ + feat_h, feat_w = featmap_size + valid_h, valid_w = valid_size + assert valid_h <= feat_h and valid_w <= feat_w + valid_x = torch.zeros(feat_w, dtype=torch.bool, device=device) + valid_y = torch.zeros(feat_h, dtype=torch.bool, device=device) + valid_x[:valid_w] = 1 + valid_y[:valid_h] = 1 + valid_xx, valid_yy = self._meshgrid(valid_x, valid_y) + valid = valid_xx & valid_yy + valid = valid[:, None].expand(valid.size(0), + num_base_anchors).contiguous().view(-1) + return valid + + def __repr__(self): + """str: a string that describes the module""" + indent_str = ' ' + repr_str = self.__class__.__name__ + '(\n' + repr_str += f'{indent_str}strides={self.strides},\n' + repr_str += f'{indent_str}ratios={self.ratios},\n' + repr_str += f'{indent_str}scales={self.scales},\n' + repr_str += f'{indent_str}base_sizes={self.base_sizes},\n' + repr_str += f'{indent_str}scale_major={self.scale_major},\n' + repr_str += f'{indent_str}octave_base_scale=' + repr_str += f'{self.octave_base_scale},\n' + repr_str += f'{indent_str}scales_per_octave=' + repr_str += f'{self.scales_per_octave},\n' + repr_str += f'{indent_str}num_levels={self.num_levels}\n' + repr_str += f'{indent_str}centers={self.centers},\n' + repr_str += f'{indent_str}center_offset={self.center_offset})' + return repr_str + + +@PRIOR_GENERATORS.register_module() +class SSDAnchorGenerator(AnchorGenerator): + """Anchor generator for SSD. + + Args: + strides (list[int] | list[tuple[int, int]]): Strides of anchors + in multiple feature levels. + ratios (list[float]): The list of ratios between the height and width + of anchors in a single level. + basesize_ratio_range (tuple(float)): Ratio range of anchors. + input_size (int): Size of feature map, 300 for SSD300, + 512 for SSD512. + scale_major (bool): Whether to multiply scales first when generating + base anchors. If true, the anchors in the same row will have the + same scales. It is always set to be False in SSD. + """ + + def __init__(self, + strides, + ratios, + basesize_ratio_range, + input_size=300, + scale_major=True): + assert len(strides) == len(ratios) + assert mmcv.is_tuple_of(basesize_ratio_range, float) + + self.strides = [_pair(stride) for stride in strides] + self.input_size = input_size + self.centers = [(stride[0] / 2., stride[1] / 2.) + for stride in self.strides] + self.basesize_ratio_range = basesize_ratio_range + + # calculate anchor ratios and sizes + min_ratio, max_ratio = basesize_ratio_range + min_ratio = int(min_ratio * 100) + max_ratio = int(max_ratio * 100) + step = int(np.floor(max_ratio - min_ratio) / (self.num_levels - 2)) + min_sizes = [] + max_sizes = [] + for ratio in range(int(min_ratio), int(max_ratio) + 1, step): + min_sizes.append(int(self.input_size * ratio / 100)) + max_sizes.append(int(self.input_size * (ratio + step) / 100)) + if self.input_size == 300: + if basesize_ratio_range[0] == 0.15: # SSD300 COCO + min_sizes.insert(0, int(self.input_size * 7 / 100)) + max_sizes.insert(0, int(self.input_size * 15 / 100)) + elif basesize_ratio_range[0] == 0.2: # SSD300 VOC + min_sizes.insert(0, int(self.input_size * 10 / 100)) + max_sizes.insert(0, int(self.input_size * 20 / 100)) + else: + raise ValueError( + 'basesize_ratio_range[0] should be either 0.15' + 'or 0.2 when input_size is 300, got ' + f'{basesize_ratio_range[0]}.') + elif self.input_size == 512: + if basesize_ratio_range[0] == 0.1: # SSD512 COCO + min_sizes.insert(0, int(self.input_size * 4 / 100)) + max_sizes.insert(0, int(self.input_size * 10 / 100)) + elif basesize_ratio_range[0] == 0.15: # SSD512 VOC + min_sizes.insert(0, int(self.input_size * 7 / 100)) + max_sizes.insert(0, int(self.input_size * 15 / 100)) + else: + raise ValueError('basesize_ratio_range[0] should be either 0.1' + 'or 0.15 when input_size is 512, got' + f' {basesize_ratio_range[0]}.') + else: + raise ValueError('Only support 300 or 512 in SSDAnchorGenerator' + f', got {self.input_size}.') + + anchor_ratios = [] + anchor_scales = [] + for k in range(len(self.strides)): + scales = [1., np.sqrt(max_sizes[k] / min_sizes[k])] + anchor_ratio = [1.] + for r in ratios[k]: + anchor_ratio += [1 / r, r] # 4 or 6 ratio + anchor_ratios.append(torch.Tensor(anchor_ratio)) + anchor_scales.append(torch.Tensor(scales)) + + self.base_sizes = min_sizes + self.scales = anchor_scales + self.ratios = anchor_ratios + self.scale_major = scale_major + self.center_offset = 0 + self.base_anchors = self.gen_base_anchors() + + def gen_base_anchors(self): + """Generate base anchors. + + Returns: + list(torch.Tensor): Base anchors of a feature grid in multiple \ + feature levels. + """ + multi_level_base_anchors = [] + for i, base_size in enumerate(self.base_sizes): + base_anchors = self.gen_single_level_base_anchors( + base_size, + scales=self.scales[i], + ratios=self.ratios[i], + center=self.centers[i]) + indices = list(range(len(self.ratios[i]))) + indices.insert(1, len(indices)) + base_anchors = torch.index_select(base_anchors, 0, + torch.LongTensor(indices)) + multi_level_base_anchors.append(base_anchors) + return multi_level_base_anchors + + def __repr__(self): + """str: a string that describes the module""" + indent_str = ' ' + repr_str = self.__class__.__name__ + '(\n' + repr_str += f'{indent_str}strides={self.strides},\n' + repr_str += f'{indent_str}scales={self.scales},\n' + repr_str += f'{indent_str}scale_major={self.scale_major},\n' + repr_str += f'{indent_str}input_size={self.input_size},\n' + repr_str += f'{indent_str}scales={self.scales},\n' + repr_str += f'{indent_str}ratios={self.ratios},\n' + repr_str += f'{indent_str}num_levels={self.num_levels},\n' + repr_str += f'{indent_str}base_sizes={self.base_sizes},\n' + repr_str += f'{indent_str}basesize_ratio_range=' + repr_str += f'{self.basesize_ratio_range})' + return repr_str + + +@PRIOR_GENERATORS.register_module() +class LegacyAnchorGenerator(AnchorGenerator): + """Legacy anchor generator used in MMDetection V1.x. + + Note: + Difference to the V2.0 anchor generator: + + 1. The center offset of V1.x anchors are set to be 0.5 rather than 0. + 2. The width/height are minused by 1 when calculating the anchors' \ + centers and corners to meet the V1.x coordinate system. + 3. The anchors' corners are quantized. + + Args: + strides (list[int] | list[tuple[int]]): Strides of anchors + in multiple feature levels. + ratios (list[float]): The list of ratios between the height and width + of anchors in a single level. + scales (list[int] | None): Anchor scales for anchors in a single level. + It cannot be set at the same time if `octave_base_scale` and + `scales_per_octave` are set. + base_sizes (list[int]): The basic sizes of anchors in multiple levels. + If None is given, strides will be used to generate base_sizes. + scale_major (bool): Whether to multiply scales first when generating + base anchors. If true, the anchors in the same row will have the + same scales. By default it is True in V2.0 + octave_base_scale (int): The base scale of octave. + scales_per_octave (int): Number of scales for each octave. + `octave_base_scale` and `scales_per_octave` are usually used in + retinanet and the `scales` should be None when they are set. + centers (list[tuple[float, float]] | None): The centers of the anchor + relative to the feature grid center in multiple feature levels. + By default it is set to be None and not used. It a list of float + is given, this list will be used to shift the centers of anchors. + center_offset (float): The offset of center in propotion to anchors' + width and height. By default it is 0.5 in V2.0 but it should be 0.5 + in v1.x models. + + Examples: + >>> from mmdet.core import LegacyAnchorGenerator + >>> self = LegacyAnchorGenerator( + >>> [16], [1.], [1.], [9], center_offset=0.5) + >>> all_anchors = self.grid_anchors(((2, 2),), device='cpu') + >>> print(all_anchors) + [tensor([[ 0., 0., 8., 8.], + [16., 0., 24., 8.], + [ 0., 16., 8., 24.], + [16., 16., 24., 24.]])] + """ + + def gen_single_level_base_anchors(self, + base_size, + scales, + ratios, + center=None): + """Generate base anchors of a single level. + + Note: + The width/height of anchors are minused by 1 when calculating \ + the centers and corners to meet the V1.x coordinate system. + + Args: + base_size (int | float): Basic size of an anchor. + scales (torch.Tensor): Scales of the anchor. + ratios (torch.Tensor): The ratio between between the height. + and width of anchors in a single level. + center (tuple[float], optional): The center of the base anchor + related to a single feature grid. Defaults to None. + + Returns: + torch.Tensor: Anchors in a single-level feature map. + """ + w = base_size + h = base_size + if center is None: + x_center = self.center_offset * (w - 1) + y_center = self.center_offset * (h - 1) + else: + x_center, y_center = center + + h_ratios = torch.sqrt(ratios) + w_ratios = 1 / h_ratios + if self.scale_major: + ws = (w * w_ratios[:, None] * scales[None, :]).view(-1) + hs = (h * h_ratios[:, None] * scales[None, :]).view(-1) + else: + ws = (w * scales[:, None] * w_ratios[None, :]).view(-1) + hs = (h * scales[:, None] * h_ratios[None, :]).view(-1) + + # use float anchor and the anchor's center is aligned with the + # pixel center + base_anchors = [ + x_center - 0.5 * (ws - 1), y_center - 0.5 * (hs - 1), + x_center + 0.5 * (ws - 1), y_center + 0.5 * (hs - 1) + ] + base_anchors = torch.stack(base_anchors, dim=-1).round() + + return base_anchors + + +@PRIOR_GENERATORS.register_module() +class LegacySSDAnchorGenerator(SSDAnchorGenerator, LegacyAnchorGenerator): + """Legacy anchor generator used in MMDetection V1.x. + + The difference between `LegacySSDAnchorGenerator` and `SSDAnchorGenerator` + can be found in `LegacyAnchorGenerator`. + """ + + def __init__(self, + strides, + ratios, + basesize_ratio_range, + input_size=300, + scale_major=True): + super(LegacySSDAnchorGenerator, + self).__init__(strides, ratios, basesize_ratio_range, input_size, + scale_major) + self.centers = [((stride - 1) / 2., (stride - 1) / 2.) + for stride in strides] + self.base_anchors = self.gen_base_anchors() + + +@PRIOR_GENERATORS.register_module() +class YOLOAnchorGenerator(AnchorGenerator): + """Anchor generator for YOLO. + + Args: + strides (list[int] | list[tuple[int, int]]): Strides of anchors + in multiple feature levels. + base_sizes (list[list[tuple[int, int]]]): The basic sizes + of anchors in multiple levels. + """ + + def __init__(self, strides, base_sizes): + self.strides = [_pair(stride) for stride in strides] + self.centers = [(stride[0] / 2., stride[1] / 2.) + for stride in self.strides] + self.base_sizes = [] + num_anchor_per_level = len(base_sizes[0]) + for base_sizes_per_level in base_sizes: + assert num_anchor_per_level == len(base_sizes_per_level) + self.base_sizes.append( + [_pair(base_size) for base_size in base_sizes_per_level]) + self.base_anchors = self.gen_base_anchors() + + @property + def num_levels(self): + """int: number of feature levels that the generator will be applied""" + return len(self.base_sizes) + + def gen_base_anchors(self): + """Generate base anchors. + + Returns: + list(torch.Tensor): Base anchors of a feature grid in multiple \ + feature levels. + """ + multi_level_base_anchors = [] + for i, base_sizes_per_level in enumerate(self.base_sizes): + center = None + if self.centers is not None: + center = self.centers[i] + multi_level_base_anchors.append( + self.gen_single_level_base_anchors(base_sizes_per_level, + center)) + return multi_level_base_anchors + + def gen_single_level_base_anchors(self, base_sizes_per_level, center=None): + """Generate base anchors of a single level. + + Args: + base_sizes_per_level (list[tuple[int, int]]): Basic sizes of + anchors. + center (tuple[float], optional): The center of the base anchor + related to a single feature grid. Defaults to None. + + Returns: + torch.Tensor: Anchors in a single-level feature maps. + """ + x_center, y_center = center + base_anchors = [] + for base_size in base_sizes_per_level: + w, h = base_size + + # use float anchor and the anchor's center is aligned with the + # pixel center + base_anchor = torch.Tensor([ + x_center - 0.5 * w, y_center - 0.5 * h, x_center + 0.5 * w, + y_center + 0.5 * h + ]) + base_anchors.append(base_anchor) + base_anchors = torch.stack(base_anchors, dim=0) + + return base_anchors + + def responsible_flags(self, featmap_sizes, gt_bboxes, device='cuda'): + """Generate responsible anchor flags of grid cells in multiple scales. + + Args: + featmap_sizes (list(tuple)): List of feature map sizes in multiple + feature levels. + gt_bboxes (Tensor): Ground truth boxes, shape (n, 4). + device (str): Device where the anchors will be put on. + + Return: + list(torch.Tensor): responsible flags of anchors in multiple level + """ + assert self.num_levels == len(featmap_sizes) + multi_level_responsible_flags = [] + for i in range(self.num_levels): + anchor_stride = self.strides[i] + flags = self.single_level_responsible_flags( + featmap_sizes[i], + gt_bboxes, + anchor_stride, + self.num_base_anchors[i], + device=device) + multi_level_responsible_flags.append(flags) + return multi_level_responsible_flags + + def single_level_responsible_flags(self, + featmap_size, + gt_bboxes, + stride, + num_base_anchors, + device='cuda'): + """Generate the responsible flags of anchor in a single feature map. + + Args: + featmap_size (tuple[int]): The size of feature maps. + gt_bboxes (Tensor): Ground truth boxes, shape (n, 4). + stride (tuple(int)): stride of current level + num_base_anchors (int): The number of base anchors. + device (str, optional): Device where the flags will be put on. + Defaults to 'cuda'. + + Returns: + torch.Tensor: The valid flags of each anchor in a single level \ + feature map. + """ + feat_h, feat_w = featmap_size + gt_bboxes_cx = ((gt_bboxes[:, 0] + gt_bboxes[:, 2]) * 0.5).to(device) + gt_bboxes_cy = ((gt_bboxes[:, 1] + gt_bboxes[:, 3]) * 0.5).to(device) + gt_bboxes_grid_x = torch.floor(gt_bboxes_cx / stride[0]).long() + gt_bboxes_grid_y = torch.floor(gt_bboxes_cy / stride[1]).long() + + # row major indexing + gt_bboxes_grid_idx = gt_bboxes_grid_y * feat_w + gt_bboxes_grid_x + + responsible_grid = torch.zeros( + feat_h * feat_w, dtype=torch.uint8, device=device) + responsible_grid[gt_bboxes_grid_idx] = 1 + + responsible_grid = responsible_grid[:, None].expand( + responsible_grid.size(0), num_base_anchors).contiguous().view(-1) + return responsible_grid diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/anchor/builder.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/anchor/builder.py new file mode 100644 index 0000000000000000000000000000000000000000..d53a62429688caa1025a8db5b64cc535dab1245c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/anchor/builder.py @@ -0,0 +1,18 @@ +import warnings + +from mmcv.utils import Registry, build_from_cfg + +PRIOR_GENERATORS = Registry('Generator for anchors and points') + +ANCHOR_GENERATORS = PRIOR_GENERATORS + + +def build_prior_generator(cfg, default_args=None): + return build_from_cfg(cfg, PRIOR_GENERATORS, default_args) + + +def build_anchor_generator(cfg, default_args=None): + warnings.warn( + '``build_anchor_generator`` would be deprecated soon, please use ' + '``build_prior_generator`` ') + return build_prior_generator(cfg, default_args=default_args) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/anchor/point_generator.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/anchor/point_generator.py new file mode 100644 index 0000000000000000000000000000000000000000..7b11a855fbd7448d2b490911547c7e3f04546d45 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/anchor/point_generator.py @@ -0,0 +1,241 @@ +import numpy as np +import torch +from torch.nn.modules.utils import _pair + +from .builder import PRIOR_GENERATORS + + +@PRIOR_GENERATORS.register_module() +class PointGenerator: + + def _meshgrid(self, x, y, row_major=True): + xx = x.repeat(len(y)) + yy = y.view(-1, 1).repeat(1, len(x)).view(-1) + if row_major: + return xx, yy + else: + return yy, xx + + def grid_points(self, featmap_size, stride=16, device='cuda'): + feat_h, feat_w = featmap_size + shift_x = torch.arange(0., feat_w, device=device) * stride + shift_y = torch.arange(0., feat_h, device=device) * stride + shift_xx, shift_yy = self._meshgrid(shift_x, shift_y) + stride = shift_x.new_full((shift_xx.shape[0], ), stride) + shifts = torch.stack([shift_xx, shift_yy, stride], dim=-1) + all_points = shifts.to(device) + return all_points + + def valid_flags(self, featmap_size, valid_size, device='cuda'): + feat_h, feat_w = featmap_size + valid_h, valid_w = valid_size + assert valid_h <= feat_h and valid_w <= feat_w + valid_x = torch.zeros(feat_w, dtype=torch.bool, device=device) + valid_y = torch.zeros(feat_h, dtype=torch.bool, device=device) + valid_x[:valid_w] = 1 + valid_y[:valid_h] = 1 + valid_xx, valid_yy = self._meshgrid(valid_x, valid_y) + valid = valid_xx & valid_yy + return valid + + +@PRIOR_GENERATORS.register_module() +class MlvlPointGenerator: + """Standard points generator for multi-level (Mlvl) feature maps in 2D + points-based detectors. + + Args: + strides (list[int] | list[tuple[int, int]]): Strides of anchors + in multiple feature levels in order (w, h). + offset (float): The offset of points, the value is normalized with + corresponding stride. Defaults to 0.5. + """ + + def __init__(self, strides, offset=0.5): + self.strides = [_pair(stride) for stride in strides] + self.offset = offset + + @property + def num_levels(self): + """int: number of feature levels that the generator will be applied""" + return len(self.strides) + + @property + def num_base_priors(self): + """list[int]: The number of priors (points) at a point + on the feature grid""" + return [1 for _ in range(len(self.strides))] + + def _meshgrid(self, x, y, row_major=True): + xx = x.repeat(len(y)) + yy = y.view(-1, 1).repeat(1, len(x)).view(-1) + if row_major: + return xx, yy + else: + return yy, xx + + def grid_priors(self, featmap_sizes, device='cuda', with_stride=False): + """Generate grid points of multiple feature levels. + + Args: + featmap_sizes (list[tuple]): List of feature map sizes in + multiple feature levels, each size arrange as + as (h, w). + device (str): The device where the anchors will be put on. + with_stride (bool): Whether to concatenate the stride to + the last dimension of points. + + Return: + list[torch.Tensor]: Points of multiple feature levels. + The sizes of each tensor should be (N, 2) when with stride is + ``False``, where N = width * height, width and height + are the sizes of the corresponding feature level, + and the last dimension 2 represent (coord_x, coord_y), + otherwise the shape should be (N, 4), + and the last dimension 4 represent + (coord_x, coord_y, stride_w, stride_h). + """ + assert self.num_levels == len(featmap_sizes) + multi_level_priors = [] + for i in range(self.num_levels): + priors = self.single_level_grid_priors( + featmap_sizes[i], + level_idx=i, + device=device, + with_stride=with_stride) + multi_level_priors.append(priors) + return multi_level_priors + + def single_level_grid_priors(self, + featmap_size, + level_idx, + device='cuda', + with_stride=False): + """Generate grid Points of a single level. + + Note: + This function is usually called by method ``self.grid_priors``. + + Args: + featmap_size (tuple[int]): Size of the feature maps, arrange as + (h, w). + level_idx (int): The index of corresponding feature map level. + device (str, optional): The device the tensor will be put on. + Defaults to 'cuda'. + with_stride (bool): Concatenate the stride to the last dimension + of points. + + Return: + Tensor: Points of single feature levels. + The shape of tensor should be (N, 2) when with stride is + ``False``, where N = width * height, width and height + are the sizes of the corresponding feature level, + and the last dimension 2 represent (coord_x, coord_y), + otherwise the shape should be (N, 4), + and the last dimension 4 represent + (coord_x, coord_y, stride_w, stride_h). + """ + feat_h, feat_w = featmap_size + stride_w, stride_h = self.strides[level_idx] + shift_x = (torch.arange(0., feat_w, device=device) + + self.offset) * stride_w + shift_y = (torch.arange(0., feat_h, device=device) + + self.offset) * stride_h + shift_xx, shift_yy = self._meshgrid(shift_x, shift_y) + if not with_stride: + shifts = torch.stack([shift_xx, shift_yy], dim=-1) + else: + stride_w = shift_xx.new_full((len(shift_xx), ), stride_w) + stride_h = shift_xx.new_full((len(shift_yy), ), stride_h) + shifts = torch.stack([shift_xx, shift_yy, stride_w, stride_h], + dim=-1) + all_points = shifts.to(device) + return all_points + + def valid_flags(self, featmap_sizes, pad_shape, device='cuda'): + """Generate valid flags of points of multiple feature levels. + + Args: + featmap_sizes (list(tuple)): List of feature map sizes in + multiple feature levels, each size arrange as + as (h, w). + pad_shape (tuple(int)): The padded shape of the image, + arrange as (h, w). + device (str): The device where the anchors will be put on. + + Return: + list(torch.Tensor): Valid flags of points of multiple levels. + """ + assert self.num_levels == len(featmap_sizes) + multi_level_flags = [] + for i in range(self.num_levels): + point_stride = self.strides[i] + feat_h, feat_w = featmap_sizes[i] + h, w = pad_shape[:2] + valid_feat_h = min(int(np.ceil(h / point_stride[1])), feat_h) + valid_feat_w = min(int(np.ceil(w / point_stride[0])), feat_w) + flags = self.single_level_valid_flags((feat_h, feat_w), + (valid_feat_h, valid_feat_w), + device=device) + multi_level_flags.append(flags) + return multi_level_flags + + def single_level_valid_flags(self, + featmap_size, + valid_size, + device='cuda'): + """Generate the valid flags of points of a single feature map. + + Args: + featmap_size (tuple[int]): The size of feature maps, arrange as + as (h, w). + valid_size (tuple[int]): The valid size of the feature maps. + The size arrange as as (h, w). + device (str, optional): The device where the flags will be put on. + Defaults to 'cuda'. + + Returns: + torch.Tensor: The valid flags of each points in a single level \ + feature map. + """ + feat_h, feat_w = featmap_size + valid_h, valid_w = valid_size + assert valid_h <= feat_h and valid_w <= feat_w + valid_x = torch.zeros(feat_w, dtype=torch.bool, device=device) + valid_y = torch.zeros(feat_h, dtype=torch.bool, device=device) + valid_x[:valid_w] = 1 + valid_y[:valid_h] = 1 + valid_xx, valid_yy = self._meshgrid(valid_x, valid_y) + valid = valid_xx & valid_yy + return valid + + def sparse_priors(self, + prior_idxs, + featmap_size, + level_idx, + dtype=torch.float32, + device='cuda'): + """Generate sparse points according to the ``prior_idxs``. + + Args: + prior_idxs (Tensor): The index of corresponding anchors + in the feature map. + featmap_size (tuple[int]): feature map size arrange as (w, h). + level_idx (int): The level index of corresponding feature + map. + dtype (obj:`torch.dtype`): Date type of points. Defaults to + ``torch.float32``. + device (obj:`torch.device`): The device where the points is + located. + Returns: + Tensor: Anchor with shape (N, 2), N should be equal to + the length of ``prior_idxs``. And last dimension + 2 represent (coord_x, coord_y). + """ + height, width = featmap_size + x = (prior_idxs % width + self.offset) * self.strides[level_idx][0] + y = ((prior_idxs // width) % height + + self.offset) * self.strides[level_idx][1] + prioris = torch.stack([x, y], 1).to(dtype) + prioris = prioris.to(device) + return prioris diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/anchor/utils.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/anchor/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..ab9b53f37f7be1f52fe63c5e53df64ac1303b9e0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/anchor/utils.py @@ -0,0 +1,71 @@ +import torch + + +def images_to_levels(target, num_levels): + """Convert targets by image to targets by feature level. + + [target_img0, target_img1] -> [target_level0, target_level1, ...] + """ + target = torch.stack(target, 0) + level_targets = [] + start = 0 + for n in num_levels: + end = start + n + # level_targets.append(target[:, start:end].squeeze(0)) + level_targets.append(target[:, start:end]) + start = end + return level_targets + + +def anchor_inside_flags(flat_anchors, + valid_flags, + img_shape, + allowed_border=0): + """Check whether the anchors are inside the border. + + Args: + flat_anchors (torch.Tensor): Flatten anchors, shape (n, 4). + valid_flags (torch.Tensor): An existing valid flags of anchors. + img_shape (tuple(int)): Shape of current image. + allowed_border (int, optional): The border to allow the valid anchor. + Defaults to 0. + + Returns: + torch.Tensor: Flags indicating whether the anchors are inside a \ + valid range. + """ + img_h, img_w = img_shape[:2] + if allowed_border >= 0: + inside_flags = valid_flags & \ + (flat_anchors[:, 0] >= -allowed_border) & \ + (flat_anchors[:, 1] >= -allowed_border) & \ + (flat_anchors[:, 2] < img_w + allowed_border) & \ + (flat_anchors[:, 3] < img_h + allowed_border) + else: + inside_flags = valid_flags + return inside_flags + + +def calc_region(bbox, ratio, featmap_size=None): + """Calculate a proportional bbox region. + + The bbox center are fixed and the new h' and w' is h * ratio and w * ratio. + + Args: + bbox (Tensor): Bboxes to calculate regions, shape (n, 4). + ratio (float): Ratio of the output region. + featmap_size (tuple): Feature map size used for clipping the boundary. + + Returns: + tuple: x1, y1, x2, y2 + """ + x1 = torch.round((1 - ratio) * bbox[0] + ratio * bbox[2]).long() + y1 = torch.round((1 - ratio) * bbox[1] + ratio * bbox[3]).long() + x2 = torch.round(ratio * bbox[0] + (1 - ratio) * bbox[2]).long() + y2 = torch.round(ratio * bbox[1] + (1 - ratio) * bbox[3]).long() + if featmap_size is not None: + x1 = x1.clamp(min=0, max=featmap_size[1]) + y1 = y1.clamp(min=0, max=featmap_size[0]) + x2 = x2.clamp(min=0, max=featmap_size[1]) + y2 = y2.clamp(min=0, max=featmap_size[0]) + return (x1, y1, x2, y2) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a3537297f57e4c3670afdb97b5fcb1b2d775e5f3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/__init__.py @@ -0,0 +1,27 @@ +from .assigners import (AssignResult, BaseAssigner, CenterRegionAssigner, + MaxIoUAssigner, RegionAssigner) +from .builder import build_assigner, build_bbox_coder, build_sampler +from .coder import (BaseBBoxCoder, DeltaXYWHBBoxCoder, PseudoBBoxCoder, + TBLRBBoxCoder) +from .iou_calculators import BboxOverlaps2D, bbox_overlaps +from .samplers import (BaseSampler, CombinedSampler, + InstanceBalancedPosSampler, IoUBalancedNegSampler, + OHEMSampler, PseudoSampler, RandomSampler, + SamplingResult, ScoreHLRSampler) +from .transforms import (bbox2distance, bbox2result, bbox2roi, + bbox_cxcywh_to_xyxy, bbox_flip, bbox_mapping, + bbox_mapping_back, bbox_rescale, bbox_xyxy_to_cxcywh, + distance2bbox, roi2bbox) + +__all__ = [ + 'bbox_overlaps', 'BboxOverlaps2D', 'BaseAssigner', 'MaxIoUAssigner', + 'AssignResult', 'BaseSampler', 'PseudoSampler', 'RandomSampler', + 'InstanceBalancedPosSampler', 'IoUBalancedNegSampler', 'CombinedSampler', + 'OHEMSampler', 'SamplingResult', 'ScoreHLRSampler', 'build_assigner', + 'build_sampler', 'bbox_flip', 'bbox_mapping', 'bbox_mapping_back', + 'bbox2roi', 'roi2bbox', 'bbox2result', 'distance2bbox', 'bbox2distance', + 'build_bbox_coder', 'BaseBBoxCoder', 'PseudoBBoxCoder', + 'DeltaXYWHBBoxCoder', 'TBLRBBoxCoder', 'CenterRegionAssigner', + 'bbox_rescale', 'bbox_cxcywh_to_xyxy', 'bbox_xyxy_to_cxcywh', + 'RegionAssigner' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..891e6237c537daf5b445eeffc160747ff78f695d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/__init__.py @@ -0,0 +1,17 @@ +from .approx_max_iou_assigner import ApproxMaxIoUAssigner +from .assign_result import AssignResult +from .atss_assigner import ATSSAssigner +from .base_assigner import BaseAssigner +from .center_region_assigner import CenterRegionAssigner +from .grid_assigner import GridAssigner +from .hungarian_assigner import HungarianAssigner +from .max_iou_assigner import MaxIoUAssigner +from .point_assigner import PointAssigner +from .region_assigner import RegionAssigner +from .uniform_assigner import UniformAssigner + +__all__ = [ + 'BaseAssigner', 'MaxIoUAssigner', 'ApproxMaxIoUAssigner', 'AssignResult', + 'PointAssigner', 'ATSSAssigner', 'CenterRegionAssigner', 'GridAssigner', + 'HungarianAssigner', 'RegionAssigner', 'UniformAssigner' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/approx_max_iou_assigner.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/approx_max_iou_assigner.py new file mode 100644 index 0000000000000000000000000000000000000000..6d07656d173744426795c81c14c6bcdb4e63a406 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/approx_max_iou_assigner.py @@ -0,0 +1,145 @@ +import torch + +from ..builder import BBOX_ASSIGNERS +from ..iou_calculators import build_iou_calculator +from .max_iou_assigner import MaxIoUAssigner + + +@BBOX_ASSIGNERS.register_module() +class ApproxMaxIoUAssigner(MaxIoUAssigner): + """Assign a corresponding gt bbox or background to each bbox. + + Each proposals will be assigned with an integer indicating the ground truth + index. (semi-positive index: gt label (0-based), -1: background) + + - -1: negative sample, no assigned gt + - semi-positive integer: positive sample, index (0-based) of assigned gt + + Args: + pos_iou_thr (float): IoU threshold for positive bboxes. + neg_iou_thr (float or tuple): IoU threshold for negative bboxes. + min_pos_iou (float): Minimum iou for a bbox to be considered as a + positive bbox. Positive samples can have smaller IoU than + pos_iou_thr due to the 4th step (assign max IoU sample to each gt). + gt_max_assign_all (bool): Whether to assign all bboxes with the same + highest overlap with some gt to that gt. + ignore_iof_thr (float): IoF threshold for ignoring bboxes (if + `gt_bboxes_ignore` is specified). Negative values mean not + ignoring any bboxes. + ignore_wrt_candidates (bool): Whether to compute the iof between + `bboxes` and `gt_bboxes_ignore`, or the contrary. + match_low_quality (bool): Whether to allow quality matches. This is + usually allowed for RPN and single stage detectors, but not allowed + in the second stage. + gpu_assign_thr (int): The upper bound of the number of GT for GPU + assign. When the number of gt is above this threshold, will assign + on CPU device. Negative values mean not assign on CPU. + """ + + def __init__(self, + pos_iou_thr, + neg_iou_thr, + min_pos_iou=.0, + gt_max_assign_all=True, + ignore_iof_thr=-1, + ignore_wrt_candidates=True, + match_low_quality=True, + gpu_assign_thr=-1, + iou_calculator=dict(type='BboxOverlaps2D')): + self.pos_iou_thr = pos_iou_thr + self.neg_iou_thr = neg_iou_thr + self.min_pos_iou = min_pos_iou + self.gt_max_assign_all = gt_max_assign_all + self.ignore_iof_thr = ignore_iof_thr + self.ignore_wrt_candidates = ignore_wrt_candidates + self.gpu_assign_thr = gpu_assign_thr + self.match_low_quality = match_low_quality + self.iou_calculator = build_iou_calculator(iou_calculator) + + def assign(self, + approxs, + squares, + approxs_per_octave, + gt_bboxes, + gt_bboxes_ignore=None, + gt_labels=None): + """Assign gt to approxs. + + This method assign a gt bbox to each group of approxs (bboxes), + each group of approxs is represent by a base approx (bbox) and + will be assigned with -1, or a semi-positive number. + background_label (-1) means negative sample, + semi-positive number is the index (0-based) of assigned gt. + The assignment is done in following steps, the order matters. + + 1. assign every bbox to background_label (-1) + 2. use the max IoU of each group of approxs to assign + 2. assign proposals whose iou with all gts < neg_iou_thr to background + 3. for each bbox, if the iou with its nearest gt >= pos_iou_thr, + assign it to that bbox + 4. for each gt bbox, assign its nearest proposals (may be more than + one) to itself + + Args: + approxs (Tensor): Bounding boxes to be assigned, + shape(approxs_per_octave*n, 4). + squares (Tensor): Base Bounding boxes to be assigned, + shape(n, 4). + approxs_per_octave (int): number of approxs per octave + gt_bboxes (Tensor): Groundtruth boxes, shape (k, 4). + gt_bboxes_ignore (Tensor, optional): Ground truth bboxes that are + labelled as `ignored`, e.g., crowd boxes in COCO. + gt_labels (Tensor, optional): Label of gt_bboxes, shape (k, ). + + Returns: + :obj:`AssignResult`: The assign result. + """ + num_squares = squares.size(0) + num_gts = gt_bboxes.size(0) + + if num_squares == 0 or num_gts == 0: + # No predictions and/or truth, return empty assignment + overlaps = approxs.new(num_gts, num_squares) + assign_result = self.assign_wrt_overlaps(overlaps, gt_labels) + return assign_result + + # re-organize anchors by approxs_per_octave x num_squares + approxs = torch.transpose( + approxs.view(num_squares, approxs_per_octave, 4), 0, + 1).contiguous().view(-1, 4) + assign_on_cpu = True if (self.gpu_assign_thr > 0) and ( + num_gts > self.gpu_assign_thr) else False + # compute overlap and assign gt on CPU when number of GT is large + if assign_on_cpu: + device = approxs.device + approxs = approxs.cpu() + gt_bboxes = gt_bboxes.cpu() + if gt_bboxes_ignore is not None: + gt_bboxes_ignore = gt_bboxes_ignore.cpu() + if gt_labels is not None: + gt_labels = gt_labels.cpu() + all_overlaps = self.iou_calculator(approxs, gt_bboxes) + + overlaps, _ = all_overlaps.view(approxs_per_octave, num_squares, + num_gts).max(dim=0) + overlaps = torch.transpose(overlaps, 0, 1) + + if (self.ignore_iof_thr > 0 and gt_bboxes_ignore is not None + and gt_bboxes_ignore.numel() > 0 and squares.numel() > 0): + if self.ignore_wrt_candidates: + ignore_overlaps = self.iou_calculator( + squares, gt_bboxes_ignore, mode='iof') + ignore_max_overlaps, _ = ignore_overlaps.max(dim=1) + else: + ignore_overlaps = self.iou_calculator( + gt_bboxes_ignore, squares, mode='iof') + ignore_max_overlaps, _ = ignore_overlaps.max(dim=0) + overlaps[:, ignore_max_overlaps > self.ignore_iof_thr] = -1 + + assign_result = self.assign_wrt_overlaps(overlaps, gt_labels) + if assign_on_cpu: + assign_result.gt_inds = assign_result.gt_inds.to(device) + assign_result.max_overlaps = assign_result.max_overlaps.to(device) + if assign_result.labels is not None: + assign_result.labels = assign_result.labels.to(device) + return assign_result diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/assign_result.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/assign_result.py new file mode 100644 index 0000000000000000000000000000000000000000..6a16ca205064df87b9cfe0b3cf07b545c562a6c3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/assign_result.py @@ -0,0 +1,204 @@ +import torch + +from mmdet.utils import util_mixins + + +class AssignResult(util_mixins.NiceRepr): + """Stores assignments between predicted and truth boxes. + + Attributes: + num_gts (int): the number of truth boxes considered when computing this + assignment + + gt_inds (LongTensor): for each predicted box indicates the 1-based + index of the assigned truth box. 0 means unassigned and -1 means + ignore. + + max_overlaps (FloatTensor): the iou between the predicted box and its + assigned truth box. + + labels (None | LongTensor): If specified, for each predicted box + indicates the category label of the assigned truth box. + + Example: + >>> # An assign result between 4 predicted boxes and 9 true boxes + >>> # where only two boxes were assigned. + >>> num_gts = 9 + >>> max_overlaps = torch.LongTensor([0, .5, .9, 0]) + >>> gt_inds = torch.LongTensor([-1, 1, 2, 0]) + >>> labels = torch.LongTensor([0, 3, 4, 0]) + >>> self = AssignResult(num_gts, gt_inds, max_overlaps, labels) + >>> print(str(self)) # xdoctest: +IGNORE_WANT + + >>> # Force addition of gt labels (when adding gt as proposals) + >>> new_labels = torch.LongTensor([3, 4, 5]) + >>> self.add_gt_(new_labels) + >>> print(str(self)) # xdoctest: +IGNORE_WANT + + """ + + def __init__(self, num_gts, gt_inds, max_overlaps, labels=None): + self.num_gts = num_gts + self.gt_inds = gt_inds + self.max_overlaps = max_overlaps + self.labels = labels + # Interface for possible user-defined properties + self._extra_properties = {} + + @property + def num_preds(self): + """int: the number of predictions in this assignment""" + return len(self.gt_inds) + + def set_extra_property(self, key, value): + """Set user-defined new property.""" + assert key not in self.info + self._extra_properties[key] = value + + def get_extra_property(self, key): + """Get user-defined property.""" + return self._extra_properties.get(key, None) + + @property + def info(self): + """dict: a dictionary of info about the object""" + basic_info = { + 'num_gts': self.num_gts, + 'num_preds': self.num_preds, + 'gt_inds': self.gt_inds, + 'max_overlaps': self.max_overlaps, + 'labels': self.labels, + } + basic_info.update(self._extra_properties) + return basic_info + + def __nice__(self): + """str: a "nice" summary string describing this assign result""" + parts = [] + parts.append(f'num_gts={self.num_gts!r}') + if self.gt_inds is None: + parts.append(f'gt_inds={self.gt_inds!r}') + else: + parts.append(f'gt_inds.shape={tuple(self.gt_inds.shape)!r}') + if self.max_overlaps is None: + parts.append(f'max_overlaps={self.max_overlaps!r}') + else: + parts.append('max_overlaps.shape=' + f'{tuple(self.max_overlaps.shape)!r}') + if self.labels is None: + parts.append(f'labels={self.labels!r}') + else: + parts.append(f'labels.shape={tuple(self.labels.shape)!r}') + return ', '.join(parts) + + @classmethod + def random(cls, **kwargs): + """Create random AssignResult for tests or debugging. + + Args: + num_preds: number of predicted boxes + num_gts: number of true boxes + p_ignore (float): probability of a predicted box assigned to an + ignored truth + p_assigned (float): probability of a predicted box not being + assigned + p_use_label (float | bool): with labels or not + rng (None | int | numpy.random.RandomState): seed or state + + Returns: + :obj:`AssignResult`: Randomly generated assign results. + + Example: + >>> from mmdet.core.bbox.assigners.assign_result import * # NOQA + >>> self = AssignResult.random() + >>> print(self.info) + """ + from mmdet.core.bbox import demodata + rng = demodata.ensure_rng(kwargs.get('rng', None)) + + num_gts = kwargs.get('num_gts', None) + num_preds = kwargs.get('num_preds', None) + p_ignore = kwargs.get('p_ignore', 0.3) + p_assigned = kwargs.get('p_assigned', 0.7) + p_use_label = kwargs.get('p_use_label', 0.5) + num_classes = kwargs.get('p_use_label', 3) + + if num_gts is None: + num_gts = rng.randint(0, 8) + if num_preds is None: + num_preds = rng.randint(0, 16) + + if num_gts == 0: + max_overlaps = torch.zeros(num_preds, dtype=torch.float32) + gt_inds = torch.zeros(num_preds, dtype=torch.int64) + if p_use_label is True or p_use_label < rng.rand(): + labels = torch.zeros(num_preds, dtype=torch.int64) + else: + labels = None + else: + import numpy as np + # Create an overlap for each predicted box + max_overlaps = torch.from_numpy(rng.rand(num_preds)) + + # Construct gt_inds for each predicted box + is_assigned = torch.from_numpy(rng.rand(num_preds) < p_assigned) + # maximum number of assignments constraints + n_assigned = min(num_preds, min(num_gts, is_assigned.sum())) + + assigned_idxs = np.where(is_assigned)[0] + rng.shuffle(assigned_idxs) + assigned_idxs = assigned_idxs[0:n_assigned] + assigned_idxs.sort() + + is_assigned[:] = 0 + is_assigned[assigned_idxs] = True + + is_ignore = torch.from_numpy( + rng.rand(num_preds) < p_ignore) & is_assigned + + gt_inds = torch.zeros(num_preds, dtype=torch.int64) + + true_idxs = np.arange(num_gts) + rng.shuffle(true_idxs) + true_idxs = torch.from_numpy(true_idxs) + gt_inds[is_assigned] = true_idxs[:n_assigned] + + gt_inds = torch.from_numpy( + rng.randint(1, num_gts + 1, size=num_preds)) + gt_inds[is_ignore] = -1 + gt_inds[~is_assigned] = 0 + max_overlaps[~is_assigned] = 0 + + if p_use_label is True or p_use_label < rng.rand(): + if num_classes == 0: + labels = torch.zeros(num_preds, dtype=torch.int64) + else: + labels = torch.from_numpy( + # remind that we set FG labels to [0, num_class-1] + # since mmdet v2.0 + # BG cat_id: num_class + rng.randint(0, num_classes, size=num_preds)) + labels[~is_assigned] = 0 + else: + labels = None + + self = cls(num_gts, gt_inds, max_overlaps, labels) + return self + + def add_gt_(self, gt_labels): + """Add ground truth as assigned results. + + Args: + gt_labels (torch.Tensor): Labels of gt boxes + """ + self_inds = torch.arange( + 1, len(gt_labels) + 1, dtype=torch.long, device=gt_labels.device) + self.gt_inds = torch.cat([self_inds, self.gt_inds]) + + self.max_overlaps = torch.cat( + [self.max_overlaps.new_ones(len(gt_labels)), self.max_overlaps]) + + if self.labels is not None: + self.labels = torch.cat([gt_labels, self.labels]) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/atss_assigner.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/atss_assigner.py new file mode 100644 index 0000000000000000000000000000000000000000..d4fe9d0e3c8704bd780d493eff20a5505dbe9580 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/atss_assigner.py @@ -0,0 +1,178 @@ +import torch + +from ..builder import BBOX_ASSIGNERS +from ..iou_calculators import build_iou_calculator +from .assign_result import AssignResult +from .base_assigner import BaseAssigner + + +@BBOX_ASSIGNERS.register_module() +class ATSSAssigner(BaseAssigner): + """Assign a corresponding gt bbox or background to each bbox. + + Each proposals will be assigned with `0` or a positive integer + indicating the ground truth index. + + - 0: negative sample, no assigned gt + - positive integer: positive sample, index (1-based) of assigned gt + + Args: + topk (float): number of bbox selected in each level + """ + + def __init__(self, + topk, + iou_calculator=dict(type='BboxOverlaps2D'), + ignore_iof_thr=-1): + self.topk = topk + self.iou_calculator = build_iou_calculator(iou_calculator) + self.ignore_iof_thr = ignore_iof_thr + + # https://github.com/sfzhang15/ATSS/blob/master/atss_core/modeling/rpn/atss/loss.py + + def assign(self, + bboxes, + num_level_bboxes, + gt_bboxes, + gt_bboxes_ignore=None, + gt_labels=None): + """Assign gt to bboxes. + + The assignment is done in following steps + + 1. compute iou between all bbox (bbox of all pyramid levels) and gt + 2. compute center distance between all bbox and gt + 3. on each pyramid level, for each gt, select k bbox whose center + are closest to the gt center, so we total select k*l bbox as + candidates for each gt + 4. get corresponding iou for the these candidates, and compute the + mean and std, set mean + std as the iou threshold + 5. select these candidates whose iou are greater than or equal to + the threshold as positive + 6. limit the positive sample's center in gt + + + Args: + bboxes (Tensor): Bounding boxes to be assigned, shape(n, 4). + num_level_bboxes (List): num of bboxes in each level + gt_bboxes (Tensor): Groundtruth boxes, shape (k, 4). + gt_bboxes_ignore (Tensor, optional): Ground truth bboxes that are + labelled as `ignored`, e.g., crowd boxes in COCO. + gt_labels (Tensor, optional): Label of gt_bboxes, shape (k, ). + + Returns: + :obj:`AssignResult`: The assign result. + """ + INF = 100000000 + bboxes = bboxes[:, :4] + num_gt, num_bboxes = gt_bboxes.size(0), bboxes.size(0) + + # compute iou between all bbox and gt + overlaps = self.iou_calculator(bboxes, gt_bboxes) + + # assign 0 by default + assigned_gt_inds = overlaps.new_full((num_bboxes, ), + 0, + dtype=torch.long) + + if num_gt == 0 or num_bboxes == 0: + # No ground truth or boxes, return empty assignment + max_overlaps = overlaps.new_zeros((num_bboxes, )) + if num_gt == 0: + # No truth, assign everything to background + assigned_gt_inds[:] = 0 + if gt_labels is None: + assigned_labels = None + else: + assigned_labels = overlaps.new_full((num_bboxes, ), + -1, + dtype=torch.long) + return AssignResult( + num_gt, assigned_gt_inds, max_overlaps, labels=assigned_labels) + + # compute center distance between all bbox and gt + gt_cx = (gt_bboxes[:, 0] + gt_bboxes[:, 2]) / 2.0 + gt_cy = (gt_bboxes[:, 1] + gt_bboxes[:, 3]) / 2.0 + gt_points = torch.stack((gt_cx, gt_cy), dim=1) + + bboxes_cx = (bboxes[:, 0] + bboxes[:, 2]) / 2.0 + bboxes_cy = (bboxes[:, 1] + bboxes[:, 3]) / 2.0 + bboxes_points = torch.stack((bboxes_cx, bboxes_cy), dim=1) + + distances = (bboxes_points[:, None, :] - + gt_points[None, :, :]).pow(2).sum(-1).sqrt() + + if (self.ignore_iof_thr > 0 and gt_bboxes_ignore is not None + and gt_bboxes_ignore.numel() > 0 and bboxes.numel() > 0): + ignore_overlaps = self.iou_calculator( + bboxes, gt_bboxes_ignore, mode='iof') + ignore_max_overlaps, _ = ignore_overlaps.max(dim=1) + ignore_idxs = ignore_max_overlaps > self.ignore_iof_thr + distances[ignore_idxs, :] = INF + assigned_gt_inds[ignore_idxs] = -1 + + # Selecting candidates based on the center distance + candidate_idxs = [] + start_idx = 0 + for level, bboxes_per_level in enumerate(num_level_bboxes): + # on each pyramid level, for each gt, + # select k bbox whose center are closest to the gt center + end_idx = start_idx + bboxes_per_level + distances_per_level = distances[start_idx:end_idx, :] + selectable_k = min(self.topk, bboxes_per_level) + _, topk_idxs_per_level = distances_per_level.topk( + selectable_k, dim=0, largest=False) + candidate_idxs.append(topk_idxs_per_level + start_idx) + start_idx = end_idx + candidate_idxs = torch.cat(candidate_idxs, dim=0) + + # get corresponding iou for the these candidates, and compute the + # mean and std, set mean + std as the iou threshold + candidate_overlaps = overlaps[candidate_idxs, torch.arange(num_gt)] + overlaps_mean_per_gt = candidate_overlaps.mean(0) + overlaps_std_per_gt = candidate_overlaps.std(0) + overlaps_thr_per_gt = overlaps_mean_per_gt + overlaps_std_per_gt + + is_pos = candidate_overlaps >= overlaps_thr_per_gt[None, :] + + # limit the positive sample's center in gt + for gt_idx in range(num_gt): + candidate_idxs[:, gt_idx] += gt_idx * num_bboxes + ep_bboxes_cx = bboxes_cx.view(1, -1).expand( + num_gt, num_bboxes).contiguous().view(-1) + ep_bboxes_cy = bboxes_cy.view(1, -1).expand( + num_gt, num_bboxes).contiguous().view(-1) + candidate_idxs = candidate_idxs.view(-1) + + # calculate the left, top, right, bottom distance between positive + # bbox center and gt side + l_ = ep_bboxes_cx[candidate_idxs].view(-1, num_gt) - gt_bboxes[:, 0] + t_ = ep_bboxes_cy[candidate_idxs].view(-1, num_gt) - gt_bboxes[:, 1] + r_ = gt_bboxes[:, 2] - ep_bboxes_cx[candidate_idxs].view(-1, num_gt) + b_ = gt_bboxes[:, 3] - ep_bboxes_cy[candidate_idxs].view(-1, num_gt) + is_in_gts = torch.stack([l_, t_, r_, b_], dim=1).min(dim=1)[0] > 0.01 + is_pos = is_pos & is_in_gts + + # if an anchor box is assigned to multiple gts, + # the one with the highest IoU will be selected. + overlaps_inf = torch.full_like(overlaps, + -INF).t().contiguous().view(-1) + index = candidate_idxs.view(-1)[is_pos.view(-1)] + overlaps_inf[index] = overlaps.t().contiguous().view(-1)[index] + overlaps_inf = overlaps_inf.view(num_gt, -1).t() + + max_overlaps, argmax_overlaps = overlaps_inf.max(dim=1) + assigned_gt_inds[ + max_overlaps != -INF] = argmax_overlaps[max_overlaps != -INF] + 1 + + if gt_labels is not None: + assigned_labels = assigned_gt_inds.new_full((num_bboxes, ), -1) + pos_inds = torch.nonzero( + assigned_gt_inds > 0, as_tuple=False).squeeze() + if pos_inds.numel() > 0: + assigned_labels[pos_inds] = gt_labels[ + assigned_gt_inds[pos_inds] - 1] + else: + assigned_labels = None + return AssignResult( + num_gt, assigned_gt_inds, max_overlaps, labels=assigned_labels) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/base_assigner.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/base_assigner.py new file mode 100644 index 0000000000000000000000000000000000000000..1ff0160dbb4bfbf53cb40d1d5cb29bcc3d197a59 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/base_assigner.py @@ -0,0 +1,9 @@ +from abc import ABCMeta, abstractmethod + + +class BaseAssigner(metaclass=ABCMeta): + """Base assigner that assigns boxes to ground truth boxes.""" + + @abstractmethod + def assign(self, bboxes, gt_bboxes, gt_bboxes_ignore=None, gt_labels=None): + """Assign boxes to either a ground truth boxes or a negative boxes.""" diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/center_region_assigner.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/center_region_assigner.py new file mode 100644 index 0000000000000000000000000000000000000000..488e3b615318787751cab3211e38dd9471c666be --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/center_region_assigner.py @@ -0,0 +1,335 @@ +import torch + +from ..builder import BBOX_ASSIGNERS +from ..iou_calculators import build_iou_calculator +from .assign_result import AssignResult +from .base_assigner import BaseAssigner + + +def scale_boxes(bboxes, scale): + """Expand an array of boxes by a given scale. + + Args: + bboxes (Tensor): Shape (m, 4) + scale (float): The scale factor of bboxes + + Returns: + (Tensor): Shape (m, 4). Scaled bboxes + """ + assert bboxes.size(1) == 4 + w_half = (bboxes[:, 2] - bboxes[:, 0]) * .5 + h_half = (bboxes[:, 3] - bboxes[:, 1]) * .5 + x_c = (bboxes[:, 2] + bboxes[:, 0]) * .5 + y_c = (bboxes[:, 3] + bboxes[:, 1]) * .5 + + w_half *= scale + h_half *= scale + + boxes_scaled = torch.zeros_like(bboxes) + boxes_scaled[:, 0] = x_c - w_half + boxes_scaled[:, 2] = x_c + w_half + boxes_scaled[:, 1] = y_c - h_half + boxes_scaled[:, 3] = y_c + h_half + return boxes_scaled + + +def is_located_in(points, bboxes): + """Are points located in bboxes. + + Args: + points (Tensor): Points, shape: (m, 2). + bboxes (Tensor): Bounding boxes, shape: (n, 4). + + Return: + Tensor: Flags indicating if points are located in bboxes, shape: (m, n). + """ + assert points.size(1) == 2 + assert bboxes.size(1) == 4 + return (points[:, 0].unsqueeze(1) > bboxes[:, 0].unsqueeze(0)) & \ + (points[:, 0].unsqueeze(1) < bboxes[:, 2].unsqueeze(0)) & \ + (points[:, 1].unsqueeze(1) > bboxes[:, 1].unsqueeze(0)) & \ + (points[:, 1].unsqueeze(1) < bboxes[:, 3].unsqueeze(0)) + + +def bboxes_area(bboxes): + """Compute the area of an array of bboxes. + + Args: + bboxes (Tensor): The coordinates ox bboxes. Shape: (m, 4) + + Returns: + Tensor: Area of the bboxes. Shape: (m, ) + """ + assert bboxes.size(1) == 4 + w = (bboxes[:, 2] - bboxes[:, 0]) + h = (bboxes[:, 3] - bboxes[:, 1]) + areas = w * h + return areas + + +@BBOX_ASSIGNERS.register_module() +class CenterRegionAssigner(BaseAssigner): + """Assign pixels at the center region of a bbox as positive. + + Each proposals will be assigned with `-1`, `0`, or a positive integer + indicating the ground truth index. + - -1: negative samples + - semi-positive numbers: positive sample, index (0-based) of assigned gt + + Args: + pos_scale (float): Threshold within which pixels are + labelled as positive. + neg_scale (float): Threshold above which pixels are + labelled as positive. + min_pos_iof (float): Minimum iof of a pixel with a gt to be + labelled as positive. Default: 1e-2 + ignore_gt_scale (float): Threshold within which the pixels + are ignored when the gt is labelled as shadowed. Default: 0.5 + foreground_dominate (bool): If True, the bbox will be assigned as + positive when a gt's kernel region overlaps with another's shadowed + (ignored) region, otherwise it is set as ignored. Default to False. + """ + + def __init__(self, + pos_scale, + neg_scale, + min_pos_iof=1e-2, + ignore_gt_scale=0.5, + foreground_dominate=False, + iou_calculator=dict(type='BboxOverlaps2D')): + self.pos_scale = pos_scale + self.neg_scale = neg_scale + self.min_pos_iof = min_pos_iof + self.ignore_gt_scale = ignore_gt_scale + self.foreground_dominate = foreground_dominate + self.iou_calculator = build_iou_calculator(iou_calculator) + + def get_gt_priorities(self, gt_bboxes): + """Get gt priorities according to their areas. + + Smaller gt has higher priority. + + Args: + gt_bboxes (Tensor): Ground truth boxes, shape (k, 4). + + Returns: + Tensor: The priority of gts so that gts with larger priority is \ + more likely to be assigned. Shape (k, ) + """ + gt_areas = bboxes_area(gt_bboxes) + # Rank all gt bbox areas. Smaller objects has larger priority + _, sort_idx = gt_areas.sort(descending=True) + sort_idx = sort_idx.argsort() + return sort_idx + + def assign(self, bboxes, gt_bboxes, gt_bboxes_ignore=None, gt_labels=None): + """Assign gt to bboxes. + + This method assigns gts to every bbox (proposal/anchor), each bbox \ + will be assigned with -1, or a semi-positive number. -1 means \ + negative sample, semi-positive number is the index (0-based) of \ + assigned gt. + + Args: + bboxes (Tensor): Bounding boxes to be assigned, shape(n, 4). + gt_bboxes (Tensor): Groundtruth boxes, shape (k, 4). + gt_bboxes_ignore (tensor, optional): Ground truth bboxes that are + labelled as `ignored`, e.g., crowd boxes in COCO. + gt_labels (tensor, optional): Label of gt_bboxes, shape (num_gts,). + + Returns: + :obj:`AssignResult`: The assigned result. Note that \ + shadowed_labels of shape (N, 2) is also added as an \ + `assign_result` attribute. `shadowed_labels` is a tensor \ + composed of N pairs of anchor_ind, class_label], where N \ + is the number of anchors that lie in the outer region of a \ + gt, anchor_ind is the shadowed anchor index and class_label \ + is the shadowed class label. + + Example: + >>> self = CenterRegionAssigner(0.2, 0.2) + >>> bboxes = torch.Tensor([[0, 0, 10, 10], [10, 10, 20, 20]]) + >>> gt_bboxes = torch.Tensor([[0, 0, 10, 10]]) + >>> assign_result = self.assign(bboxes, gt_bboxes) + >>> expected_gt_inds = torch.LongTensor([1, 0]) + >>> assert torch.all(assign_result.gt_inds == expected_gt_inds) + """ + # There are in total 5 steps in the pixel assignment + # 1. Find core (the center region, say inner 0.2) + # and shadow (the relatively ourter part, say inner 0.2-0.5) + # regions of every gt. + # 2. Find all prior bboxes that lie in gt_core and gt_shadow regions + # 3. Assign prior bboxes in gt_core with a one-hot id of the gt in + # the image. + # 3.1. For overlapping objects, the prior bboxes in gt_core is + # assigned with the object with smallest area + # 4. Assign prior bboxes with class label according to its gt id. + # 4.1. Assign -1 to prior bboxes lying in shadowed gts + # 4.2. Assign positive prior boxes with the corresponding label + # 5. Find pixels lying in the shadow of an object and assign them with + # background label, but set the loss weight of its corresponding + # gt to zero. + assert bboxes.size(1) == 4, 'bboxes must have size of 4' + # 1. Find core positive and shadow region of every gt + gt_core = scale_boxes(gt_bboxes, self.pos_scale) + gt_shadow = scale_boxes(gt_bboxes, self.neg_scale) + + # 2. Find prior bboxes that lie in gt_core and gt_shadow regions + bbox_centers = (bboxes[:, 2:4] + bboxes[:, 0:2]) / 2 + # The center points lie within the gt boxes + is_bbox_in_gt = is_located_in(bbox_centers, gt_bboxes) + # Only calculate bbox and gt_core IoF. This enables small prior bboxes + # to match large gts + bbox_and_gt_core_overlaps = self.iou_calculator( + bboxes, gt_core, mode='iof') + # The center point of effective priors should be within the gt box + is_bbox_in_gt_core = is_bbox_in_gt & ( + bbox_and_gt_core_overlaps > self.min_pos_iof) # shape (n, k) + + is_bbox_in_gt_shadow = ( + self.iou_calculator(bboxes, gt_shadow, mode='iof') > + self.min_pos_iof) + # Rule out center effective positive pixels + is_bbox_in_gt_shadow &= (~is_bbox_in_gt_core) + + num_gts, num_bboxes = gt_bboxes.size(0), bboxes.size(0) + if num_gts == 0 or num_bboxes == 0: + # If no gts exist, assign all pixels to negative + assigned_gt_ids = \ + is_bbox_in_gt_core.new_zeros((num_bboxes,), + dtype=torch.long) + pixels_in_gt_shadow = assigned_gt_ids.new_empty((0, 2)) + else: + # Step 3: assign a one-hot gt id to each pixel, and smaller objects + # have high priority to assign the pixel. + sort_idx = self.get_gt_priorities(gt_bboxes) + assigned_gt_ids, pixels_in_gt_shadow = \ + self.assign_one_hot_gt_indices(is_bbox_in_gt_core, + is_bbox_in_gt_shadow, + gt_priority=sort_idx) + + if gt_bboxes_ignore is not None and gt_bboxes_ignore.numel() > 0: + # No ground truth or boxes, return empty assignment + gt_bboxes_ignore = scale_boxes( + gt_bboxes_ignore, scale=self.ignore_gt_scale) + is_bbox_in_ignored_gts = is_located_in(bbox_centers, + gt_bboxes_ignore) + is_bbox_in_ignored_gts = is_bbox_in_ignored_gts.any(dim=1) + assigned_gt_ids[is_bbox_in_ignored_gts] = -1 + + # 4. Assign prior bboxes with class label according to its gt id. + assigned_labels = None + shadowed_pixel_labels = None + if gt_labels is not None: + # Default assigned label is the background (-1) + assigned_labels = assigned_gt_ids.new_full((num_bboxes, ), -1) + pos_inds = torch.nonzero( + assigned_gt_ids > 0, as_tuple=False).squeeze() + if pos_inds.numel() > 0: + assigned_labels[pos_inds] = gt_labels[assigned_gt_ids[pos_inds] + - 1] + # 5. Find pixels lying in the shadow of an object + shadowed_pixel_labels = pixels_in_gt_shadow.clone() + if pixels_in_gt_shadow.numel() > 0: + pixel_idx, gt_idx =\ + pixels_in_gt_shadow[:, 0], pixels_in_gt_shadow[:, 1] + assert (assigned_gt_ids[pixel_idx] != gt_idx).all(), \ + 'Some pixels are dually assigned to ignore and gt!' + shadowed_pixel_labels[:, 1] = gt_labels[gt_idx - 1] + override = ( + assigned_labels[pixel_idx] == shadowed_pixel_labels[:, 1]) + if self.foreground_dominate: + # When a pixel is both positive and shadowed, set it as pos + shadowed_pixel_labels = shadowed_pixel_labels[~override] + else: + # When a pixel is both pos and shadowed, set it as shadowed + assigned_labels[pixel_idx[override]] = -1 + assigned_gt_ids[pixel_idx[override]] = 0 + + assign_result = AssignResult( + num_gts, assigned_gt_ids, None, labels=assigned_labels) + # Add shadowed_labels as assign_result property. Shape: (num_shadow, 2) + assign_result.set_extra_property('shadowed_labels', + shadowed_pixel_labels) + return assign_result + + def assign_one_hot_gt_indices(self, + is_bbox_in_gt_core, + is_bbox_in_gt_shadow, + gt_priority=None): + """Assign only one gt index to each prior box. + + Gts with large gt_priority are more likely to be assigned. + + Args: + is_bbox_in_gt_core (Tensor): Bool tensor indicating the bbox center + is in the core area of a gt (e.g. 0-0.2). + Shape: (num_prior, num_gt). + is_bbox_in_gt_shadow (Tensor): Bool tensor indicating the bbox + center is in the shadowed area of a gt (e.g. 0.2-0.5). + Shape: (num_prior, num_gt). + gt_priority (Tensor): Priorities of gts. The gt with a higher + priority is more likely to be assigned to the bbox when the bbox + match with multiple gts. Shape: (num_gt, ). + + Returns: + tuple: Returns (assigned_gt_inds, shadowed_gt_inds). + + - assigned_gt_inds: The assigned gt index of each prior bbox \ + (i.e. index from 1 to num_gts). Shape: (num_prior, ). + - shadowed_gt_inds: shadowed gt indices. It is a tensor of \ + shape (num_ignore, 2) with first column being the \ + shadowed prior bbox indices and the second column the \ + shadowed gt indices (1-based). + """ + num_bboxes, num_gts = is_bbox_in_gt_core.shape + + if gt_priority is None: + gt_priority = torch.arange( + num_gts, device=is_bbox_in_gt_core.device) + assert gt_priority.size(0) == num_gts + # The bigger gt_priority, the more preferable to be assigned + # The assigned inds are by default 0 (background) + assigned_gt_inds = is_bbox_in_gt_core.new_zeros((num_bboxes, ), + dtype=torch.long) + # Shadowed bboxes are assigned to be background. But the corresponding + # label is ignored during loss calculation, which is done through + # shadowed_gt_inds + shadowed_gt_inds = torch.nonzero(is_bbox_in_gt_shadow, as_tuple=False) + if is_bbox_in_gt_core.sum() == 0: # No gt match + shadowed_gt_inds[:, 1] += 1 # 1-based. For consistency issue + return assigned_gt_inds, shadowed_gt_inds + + # The priority of each prior box and gt pair. If one prior box is + # matched bo multiple gts. Only the pair with the highest priority + # is saved + pair_priority = is_bbox_in_gt_core.new_full((num_bboxes, num_gts), + -1, + dtype=torch.long) + + # Each bbox could match with multiple gts. + # The following codes deal with this situation + # Matched bboxes (to any gt). Shape: (num_pos_anchor, ) + inds_of_match = torch.any(is_bbox_in_gt_core, dim=1) + # The matched gt index of each positive bbox. Length >= num_pos_anchor + # , since one bbox could match multiple gts + matched_bbox_gt_inds = torch.nonzero( + is_bbox_in_gt_core, as_tuple=False)[:, 1] + # Assign priority to each bbox-gt pair. + pair_priority[is_bbox_in_gt_core] = gt_priority[matched_bbox_gt_inds] + _, argmax_priority = pair_priority[inds_of_match].max(dim=1) + assigned_gt_inds[inds_of_match] = argmax_priority + 1 # 1-based + # Zero-out the assigned anchor box to filter the shadowed gt indices + is_bbox_in_gt_core[inds_of_match, argmax_priority] = 0 + # Concat the shadowed indices due to overlapping with that out side of + # effective scale. shape: (total_num_ignore, 2) + shadowed_gt_inds = torch.cat( + (shadowed_gt_inds, torch.nonzero( + is_bbox_in_gt_core, as_tuple=False)), + dim=0) + # `is_bbox_in_gt_core` should be changed back to keep arguments intact. + is_bbox_in_gt_core[inds_of_match, argmax_priority] = 1 + # 1-based shadowed gt indices, to be consistent with `assigned_gt_inds` + if shadowed_gt_inds.numel() > 0: + shadowed_gt_inds[:, 1] += 1 + return assigned_gt_inds, shadowed_gt_inds diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/grid_assigner.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/grid_assigner.py new file mode 100644 index 0000000000000000000000000000000000000000..7390ea6370639c939d578c6ebf0f9268499161bc --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/grid_assigner.py @@ -0,0 +1,155 @@ +import torch + +from ..builder import BBOX_ASSIGNERS +from ..iou_calculators import build_iou_calculator +from .assign_result import AssignResult +from .base_assigner import BaseAssigner + + +@BBOX_ASSIGNERS.register_module() +class GridAssigner(BaseAssigner): + """Assign a corresponding gt bbox or background to each bbox. + + Each proposals will be assigned with `-1`, `0`, or a positive integer + indicating the ground truth index. + + - -1: don't care + - 0: negative sample, no assigned gt + - positive integer: positive sample, index (1-based) of assigned gt + + Args: + pos_iou_thr (float): IoU threshold for positive bboxes. + neg_iou_thr (float or tuple): IoU threshold for negative bboxes. + min_pos_iou (float): Minimum iou for a bbox to be considered as a + positive bbox. Positive samples can have smaller IoU than + pos_iou_thr due to the 4th step (assign max IoU sample to each gt). + gt_max_assign_all (bool): Whether to assign all bboxes with the same + highest overlap with some gt to that gt. + """ + + def __init__(self, + pos_iou_thr, + neg_iou_thr, + min_pos_iou=.0, + gt_max_assign_all=True, + iou_calculator=dict(type='BboxOverlaps2D')): + self.pos_iou_thr = pos_iou_thr + self.neg_iou_thr = neg_iou_thr + self.min_pos_iou = min_pos_iou + self.gt_max_assign_all = gt_max_assign_all + self.iou_calculator = build_iou_calculator(iou_calculator) + + def assign(self, bboxes, box_responsible_flags, gt_bboxes, gt_labels=None): + """Assign gt to bboxes. The process is very much like the max iou + assigner, except that positive samples are constrained within the cell + that the gt boxes fell in. + + This method assign a gt bbox to every bbox (proposal/anchor), each bbox + will be assigned with -1, 0, or a positive number. -1 means don't care, + 0 means negative sample, positive number is the index (1-based) of + assigned gt. + The assignment is done in following steps, the order matters. + + 1. assign every bbox to -1 + 2. assign proposals whose iou with all gts <= neg_iou_thr to 0 + 3. for each bbox within a cell, if the iou with its nearest gt > + pos_iou_thr and the center of that gt falls inside the cell, + assign it to that bbox + 4. for each gt bbox, assign its nearest proposals within the cell the + gt bbox falls in to itself. + + Args: + bboxes (Tensor): Bounding boxes to be assigned, shape(n, 4). + box_responsible_flags (Tensor): flag to indicate whether box is + responsible for prediction, shape(n, ) + gt_bboxes (Tensor): Groundtruth boxes, shape (k, 4). + gt_labels (Tensor, optional): Label of gt_bboxes, shape (k, ). + + Returns: + :obj:`AssignResult`: The assign result. + """ + num_gts, num_bboxes = gt_bboxes.size(0), bboxes.size(0) + + # compute iou between all gt and bboxes + overlaps = self.iou_calculator(gt_bboxes, bboxes) + + # 1. assign -1 by default + assigned_gt_inds = overlaps.new_full((num_bboxes, ), + -1, + dtype=torch.long) + + if num_gts == 0 or num_bboxes == 0: + # No ground truth or boxes, return empty assignment + max_overlaps = overlaps.new_zeros((num_bboxes, )) + if num_gts == 0: + # No truth, assign everything to background + assigned_gt_inds[:] = 0 + if gt_labels is None: + assigned_labels = None + else: + assigned_labels = overlaps.new_full((num_bboxes, ), + -1, + dtype=torch.long) + return AssignResult( + num_gts, + assigned_gt_inds, + max_overlaps, + labels=assigned_labels) + + # 2. assign negative: below + # for each anchor, which gt best overlaps with it + # for each anchor, the max iou of all gts + # shape of max_overlaps == argmax_overlaps == num_bboxes + max_overlaps, argmax_overlaps = overlaps.max(dim=0) + + if isinstance(self.neg_iou_thr, float): + assigned_gt_inds[(max_overlaps >= 0) + & (max_overlaps <= self.neg_iou_thr)] = 0 + elif isinstance(self.neg_iou_thr, (tuple, list)): + assert len(self.neg_iou_thr) == 2 + assigned_gt_inds[(max_overlaps > self.neg_iou_thr[0]) + & (max_overlaps <= self.neg_iou_thr[1])] = 0 + + # 3. assign positive: falls into responsible cell and above + # positive IOU threshold, the order matters. + # the prior condition of comparision is to filter out all + # unrelated anchors, i.e. not box_responsible_flags + overlaps[:, ~box_responsible_flags.type(torch.bool)] = -1. + + # calculate max_overlaps again, but this time we only consider IOUs + # for anchors responsible for prediction + max_overlaps, argmax_overlaps = overlaps.max(dim=0) + + # for each gt, which anchor best overlaps with it + # for each gt, the max iou of all proposals + # shape of gt_max_overlaps == gt_argmax_overlaps == num_gts + gt_max_overlaps, gt_argmax_overlaps = overlaps.max(dim=1) + + pos_inds = (max_overlaps > + self.pos_iou_thr) & box_responsible_flags.type(torch.bool) + assigned_gt_inds[pos_inds] = argmax_overlaps[pos_inds] + 1 + + # 4. assign positive to max overlapped anchors within responsible cell + for i in range(num_gts): + if gt_max_overlaps[i] > self.min_pos_iou: + if self.gt_max_assign_all: + max_iou_inds = (overlaps[i, :] == gt_max_overlaps[i]) & \ + box_responsible_flags.type(torch.bool) + assigned_gt_inds[max_iou_inds] = i + 1 + elif box_responsible_flags[gt_argmax_overlaps[i]]: + assigned_gt_inds[gt_argmax_overlaps[i]] = i + 1 + + # assign labels of positive anchors + if gt_labels is not None: + assigned_labels = assigned_gt_inds.new_full((num_bboxes, ), -1) + pos_inds = torch.nonzero( + assigned_gt_inds > 0, as_tuple=False).squeeze() + if pos_inds.numel() > 0: + assigned_labels[pos_inds] = gt_labels[ + assigned_gt_inds[pos_inds] - 1] + + else: + assigned_labels = None + + return AssignResult( + num_gts, assigned_gt_inds, max_overlaps, labels=assigned_labels) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/hungarian_assigner.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/hungarian_assigner.py new file mode 100644 index 0000000000000000000000000000000000000000..e10cc14afac4ddfcb9395c1a250ece1fbfe3263c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/hungarian_assigner.py @@ -0,0 +1,145 @@ +import torch + +from ..builder import BBOX_ASSIGNERS +from ..match_costs import build_match_cost +from ..transforms import bbox_cxcywh_to_xyxy +from .assign_result import AssignResult +from .base_assigner import BaseAssigner + +try: + from scipy.optimize import linear_sum_assignment +except ImportError: + linear_sum_assignment = None + + +@BBOX_ASSIGNERS.register_module() +class HungarianAssigner(BaseAssigner): + """Computes one-to-one matching between predictions and ground truth. + + This class computes an assignment between the targets and the predictions + based on the costs. The costs are weighted sum of three components: + classification cost, regression L1 cost and regression iou cost. The + targets don't include the no_object, so generally there are more + predictions than targets. After the one-to-one matching, the un-matched + are treated as backgrounds. Thus each query prediction will be assigned + with `0` or a positive integer indicating the ground truth index: + + - 0: negative sample, no assigned gt + - positive integer: positive sample, index (1-based) of assigned gt + + Args: + cls_weight (int | float, optional): The scale factor for classification + cost. Default 1.0. + bbox_weight (int | float, optional): The scale factor for regression + L1 cost. Default 1.0. + iou_weight (int | float, optional): The scale factor for regression + iou cost. Default 1.0. + iou_calculator (dict | optional): The config for the iou calculation. + Default type `BboxOverlaps2D`. + iou_mode (str | optional): "iou" (intersection over union), "iof" + (intersection over foreground), or "giou" (generalized + intersection over union). Default "giou". + """ + + def __init__(self, + cls_cost=dict(type='ClassificationCost', weight=1.), + reg_cost=dict(type='BBoxL1Cost', weight=1.0), + iou_cost=dict(type='IoUCost', iou_mode='giou', weight=1.0)): + self.cls_cost = build_match_cost(cls_cost) + self.reg_cost = build_match_cost(reg_cost) + self.iou_cost = build_match_cost(iou_cost) + + def assign(self, + bbox_pred, + cls_pred, + gt_bboxes, + gt_labels, + img_meta, + gt_bboxes_ignore=None, + eps=1e-7): + """Computes one-to-one matching based on the weighted costs. + + This method assign each query prediction to a ground truth or + background. The `assigned_gt_inds` with -1 means don't care, + 0 means negative sample, and positive number is the index (1-based) + of assigned gt. + The assignment is done in the following steps, the order matters. + + 1. assign every prediction to -1 + 2. compute the weighted costs + 3. do Hungarian matching on CPU based on the costs + 4. assign all to 0 (background) first, then for each matched pair + between predictions and gts, treat this prediction as foreground + and assign the corresponding gt index (plus 1) to it. + + Args: + bbox_pred (Tensor): Predicted boxes with normalized coordinates + (cx, cy, w, h), which are all in range [0, 1]. Shape + [num_query, 4]. + cls_pred (Tensor): Predicted classification logits, shape + [num_query, num_class]. + gt_bboxes (Tensor): Ground truth boxes with unnormalized + coordinates (x1, y1, x2, y2). Shape [num_gt, 4]. + gt_labels (Tensor): Label of `gt_bboxes`, shape (num_gt,). + img_meta (dict): Meta information for current image. + gt_bboxes_ignore (Tensor, optional): Ground truth bboxes that are + labelled as `ignored`. Default None. + eps (int | float, optional): A value added to the denominator for + numerical stability. Default 1e-7. + + Returns: + :obj:`AssignResult`: The assigned result. + """ + assert gt_bboxes_ignore is None, \ + 'Only case when gt_bboxes_ignore is None is supported.' + num_gts, num_bboxes = gt_bboxes.size(0), bbox_pred.size(0) + + # 1. assign -1 by default + assigned_gt_inds = bbox_pred.new_full((num_bboxes, ), + -1, + dtype=torch.long) + assigned_labels = bbox_pred.new_full((num_bboxes, ), + -1, + dtype=torch.long) + if num_gts == 0 or num_bboxes == 0: + # No ground truth or boxes, return empty assignment + if num_gts == 0: + # No ground truth, assign all to background + assigned_gt_inds[:] = 0 + return AssignResult( + num_gts, assigned_gt_inds, None, labels=assigned_labels) + img_h, img_w, _ = img_meta['img_shape'] + factor = gt_bboxes.new_tensor([img_w, img_h, img_w, + img_h]).unsqueeze(0) + + # 2. compute the weighted costs + # classification and bboxcost. + cls_cost = self.cls_cost(cls_pred, gt_labels) + # regression L1 cost + normalize_gt_bboxes = gt_bboxes / factor + reg_cost = self.reg_cost(bbox_pred, normalize_gt_bboxes) + # regression iou cost, defaultly giou is used in official DETR. + bboxes = bbox_cxcywh_to_xyxy(bbox_pred) * factor + iou_cost = self.iou_cost(bboxes, gt_bboxes) + # weighted sum of above three costs + cost = cls_cost + reg_cost + iou_cost + + # 3. do Hungarian matching on CPU using linear_sum_assignment + cost = cost.detach().cpu() + if linear_sum_assignment is None: + raise ImportError('Please run "pip install scipy" ' + 'to install scipy first.') + matched_row_inds, matched_col_inds = linear_sum_assignment(cost) + matched_row_inds = torch.from_numpy(matched_row_inds).to( + bbox_pred.device) + matched_col_inds = torch.from_numpy(matched_col_inds).to( + bbox_pred.device) + + # 4. assign backgrounds and foregrounds + # assign all indices to backgrounds first + assigned_gt_inds[:] = 0 + # assign foregrounds based on matching results + assigned_gt_inds[matched_row_inds] = matched_col_inds + 1 + assigned_labels[matched_row_inds] = gt_labels[matched_col_inds] + return AssignResult( + num_gts, assigned_gt_inds, None, labels=assigned_labels) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/max_iou_assigner.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/max_iou_assigner.py new file mode 100644 index 0000000000000000000000000000000000000000..5cf4c4b4b450f87dfb99c3d33d8ed83d3e5cfcb3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/max_iou_assigner.py @@ -0,0 +1,212 @@ +import torch + +from ..builder import BBOX_ASSIGNERS +from ..iou_calculators import build_iou_calculator +from .assign_result import AssignResult +from .base_assigner import BaseAssigner + + +@BBOX_ASSIGNERS.register_module() +class MaxIoUAssigner(BaseAssigner): + """Assign a corresponding gt bbox or background to each bbox. + + Each proposals will be assigned with `-1`, or a semi-positive integer + indicating the ground truth index. + + - -1: negative sample, no assigned gt + - semi-positive integer: positive sample, index (0-based) of assigned gt + + Args: + pos_iou_thr (float): IoU threshold for positive bboxes. + neg_iou_thr (float or tuple): IoU threshold for negative bboxes. + min_pos_iou (float): Minimum iou for a bbox to be considered as a + positive bbox. Positive samples can have smaller IoU than + pos_iou_thr due to the 4th step (assign max IoU sample to each gt). + gt_max_assign_all (bool): Whether to assign all bboxes with the same + highest overlap with some gt to that gt. + ignore_iof_thr (float): IoF threshold for ignoring bboxes (if + `gt_bboxes_ignore` is specified). Negative values mean not + ignoring any bboxes. + ignore_wrt_candidates (bool): Whether to compute the iof between + `bboxes` and `gt_bboxes_ignore`, or the contrary. + match_low_quality (bool): Whether to allow low quality matches. This is + usually allowed for RPN and single stage detectors, but not allowed + in the second stage. Details are demonstrated in Step 4. + gpu_assign_thr (int): The upper bound of the number of GT for GPU + assign. When the number of gt is above this threshold, will assign + on CPU device. Negative values mean not assign on CPU. + """ + + def __init__(self, + pos_iou_thr, + neg_iou_thr, + min_pos_iou=.0, + gt_max_assign_all=True, + ignore_iof_thr=-1, + ignore_wrt_candidates=True, + match_low_quality=True, + gpu_assign_thr=-1, + iou_calculator=dict(type='BboxOverlaps2D')): + self.pos_iou_thr = pos_iou_thr + self.neg_iou_thr = neg_iou_thr + self.min_pos_iou = min_pos_iou + self.gt_max_assign_all = gt_max_assign_all + self.ignore_iof_thr = ignore_iof_thr + self.ignore_wrt_candidates = ignore_wrt_candidates + self.gpu_assign_thr = gpu_assign_thr + self.match_low_quality = match_low_quality + self.iou_calculator = build_iou_calculator(iou_calculator) + + def assign(self, bboxes, gt_bboxes, gt_bboxes_ignore=None, gt_labels=None): + """Assign gt to bboxes. + + This method assign a gt bbox to every bbox (proposal/anchor), each bbox + will be assigned with -1, or a semi-positive number. -1 means negative + sample, semi-positive number is the index (0-based) of assigned gt. + The assignment is done in following steps, the order matters. + + 1. assign every bbox to the background + 2. assign proposals whose iou with all gts < neg_iou_thr to 0 + 3. for each bbox, if the iou with its nearest gt >= pos_iou_thr, + assign it to that bbox + 4. for each gt bbox, assign its nearest proposals (may be more than + one) to itself + + Args: + bboxes (Tensor): Bounding boxes to be assigned, shape(n, 4). + gt_bboxes (Tensor): Groundtruth boxes, shape (k, 4). + gt_bboxes_ignore (Tensor, optional): Ground truth bboxes that are + labelled as `ignored`, e.g., crowd boxes in COCO. + gt_labels (Tensor, optional): Label of gt_bboxes, shape (k, ). + + Returns: + :obj:`AssignResult`: The assign result. + + Example: + >>> self = MaxIoUAssigner(0.5, 0.5) + >>> bboxes = torch.Tensor([[0, 0, 10, 10], [10, 10, 20, 20]]) + >>> gt_bboxes = torch.Tensor([[0, 0, 10, 9]]) + >>> assign_result = self.assign(bboxes, gt_bboxes) + >>> expected_gt_inds = torch.LongTensor([1, 0]) + >>> assert torch.all(assign_result.gt_inds == expected_gt_inds) + """ + assign_on_cpu = True if (self.gpu_assign_thr > 0) and ( + gt_bboxes.shape[0] > self.gpu_assign_thr) else False + # compute overlap and assign gt on CPU when number of GT is large + if assign_on_cpu: + device = bboxes.device + bboxes = bboxes.cpu() + gt_bboxes = gt_bboxes.cpu() + if gt_bboxes_ignore is not None: + gt_bboxes_ignore = gt_bboxes_ignore.cpu() + if gt_labels is not None: + gt_labels = gt_labels.cpu() + + overlaps = self.iou_calculator(gt_bboxes, bboxes) + + if (self.ignore_iof_thr > 0 and gt_bboxes_ignore is not None + and gt_bboxes_ignore.numel() > 0 and bboxes.numel() > 0): + if self.ignore_wrt_candidates: + ignore_overlaps = self.iou_calculator( + bboxes, gt_bboxes_ignore, mode='iof') + ignore_max_overlaps, _ = ignore_overlaps.max(dim=1) + else: + ignore_overlaps = self.iou_calculator( + gt_bboxes_ignore, bboxes, mode='iof') + ignore_max_overlaps, _ = ignore_overlaps.max(dim=0) + overlaps[:, ignore_max_overlaps > self.ignore_iof_thr] = -1 + + assign_result = self.assign_wrt_overlaps(overlaps, gt_labels) + if assign_on_cpu: + assign_result.gt_inds = assign_result.gt_inds.to(device) + assign_result.max_overlaps = assign_result.max_overlaps.to(device) + if assign_result.labels is not None: + assign_result.labels = assign_result.labels.to(device) + return assign_result + + def assign_wrt_overlaps(self, overlaps, gt_labels=None): + """Assign w.r.t. the overlaps of bboxes with gts. + + Args: + overlaps (Tensor): Overlaps between k gt_bboxes and n bboxes, + shape(k, n). + gt_labels (Tensor, optional): Labels of k gt_bboxes, shape (k, ). + + Returns: + :obj:`AssignResult`: The assign result. + """ + num_gts, num_bboxes = overlaps.size(0), overlaps.size(1) + + # 1. assign -1 by default + assigned_gt_inds = overlaps.new_full((num_bboxes, ), + -1, + dtype=torch.long) + + if num_gts == 0 or num_bboxes == 0: + # No ground truth or boxes, return empty assignment + max_overlaps = overlaps.new_zeros((num_bboxes, )) + if num_gts == 0: + # No truth, assign everything to background + assigned_gt_inds[:] = 0 + if gt_labels is None: + assigned_labels = None + else: + assigned_labels = overlaps.new_full((num_bboxes, ), + -1, + dtype=torch.long) + return AssignResult( + num_gts, + assigned_gt_inds, + max_overlaps, + labels=assigned_labels) + + # for each anchor, which gt best overlaps with it + # for each anchor, the max iou of all gts + max_overlaps, argmax_overlaps = overlaps.max(dim=0) + # for each gt, which anchor best overlaps with it + # for each gt, the max iou of all proposals + gt_max_overlaps, gt_argmax_overlaps = overlaps.max(dim=1) + + # 2. assign negative: below + # the negative inds are set to be 0 + if isinstance(self.neg_iou_thr, float): + assigned_gt_inds[(max_overlaps >= 0) + & (max_overlaps < self.neg_iou_thr)] = 0 + elif isinstance(self.neg_iou_thr, tuple): + assert len(self.neg_iou_thr) == 2 + assigned_gt_inds[(max_overlaps >= self.neg_iou_thr[0]) + & (max_overlaps < self.neg_iou_thr[1])] = 0 + + # 3. assign positive: above positive IoU threshold + pos_inds = max_overlaps >= self.pos_iou_thr + assigned_gt_inds[pos_inds] = argmax_overlaps[pos_inds] + 1 + + if self.match_low_quality: + # Low-quality matching will overwrite the assigned_gt_inds assigned + # in Step 3. Thus, the assigned gt might not be the best one for + # prediction. + # For example, if bbox A has 0.9 and 0.8 iou with GT bbox 1 & 2, + # bbox 1 will be assigned as the best target for bbox A in step 3. + # However, if GT bbox 2's gt_argmax_overlaps = A, bbox A's + # assigned_gt_inds will be overwritten to be bbox B. + # This might be the reason that it is not used in ROI Heads. + for i in range(num_gts): + if gt_max_overlaps[i] >= self.min_pos_iou: + if self.gt_max_assign_all: + max_iou_inds = overlaps[i, :] == gt_max_overlaps[i] + assigned_gt_inds[max_iou_inds] = i + 1 + else: + assigned_gt_inds[gt_argmax_overlaps[i]] = i + 1 + + if gt_labels is not None: + assigned_labels = assigned_gt_inds.new_full((num_bboxes, ), -1) + pos_inds = torch.nonzero( + assigned_gt_inds > 0, as_tuple=False).squeeze() + if pos_inds.numel() > 0: + assigned_labels[pos_inds] = gt_labels[ + assigned_gt_inds[pos_inds] - 1] + else: + assigned_labels = None + + return AssignResult( + num_gts, assigned_gt_inds, max_overlaps, labels=assigned_labels) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/point_assigner.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/point_assigner.py new file mode 100644 index 0000000000000000000000000000000000000000..fb8f5e4edc63f4851e2067034c5e67a3558f31bc --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/point_assigner.py @@ -0,0 +1,133 @@ +import torch + +from ..builder import BBOX_ASSIGNERS +from .assign_result import AssignResult +from .base_assigner import BaseAssigner + + +@BBOX_ASSIGNERS.register_module() +class PointAssigner(BaseAssigner): + """Assign a corresponding gt bbox or background to each point. + + Each proposals will be assigned with `0`, or a positive integer + indicating the ground truth index. + + - 0: negative sample, no assigned gt + - positive integer: positive sample, index (1-based) of assigned gt + """ + + def __init__(self, scale=4, pos_num=3): + self.scale = scale + self.pos_num = pos_num + + def assign(self, points, gt_bboxes, gt_bboxes_ignore=None, gt_labels=None): + """Assign gt to points. + + This method assign a gt bbox to every points set, each points set + will be assigned with the background_label (-1), or a label number. + -1 is background, and semi-positive number is the index (0-based) of + assigned gt. + The assignment is done in following steps, the order matters. + + 1. assign every points to the background_label (-1) + 2. A point is assigned to some gt bbox if + (i) the point is within the k closest points to the gt bbox + (ii) the distance between this point and the gt is smaller than + other gt bboxes + + Args: + points (Tensor): points to be assigned, shape(n, 3) while last + dimension stands for (x, y, stride). + gt_bboxes (Tensor): Groundtruth boxes, shape (k, 4). + gt_bboxes_ignore (Tensor, optional): Ground truth bboxes that are + labelled as `ignored`, e.g., crowd boxes in COCO. + NOTE: currently unused. + gt_labels (Tensor, optional): Label of gt_bboxes, shape (k, ). + + Returns: + :obj:`AssignResult`: The assign result. + """ + num_points = points.shape[0] + num_gts = gt_bboxes.shape[0] + + if num_gts == 0 or num_points == 0: + # If no truth assign everything to the background + assigned_gt_inds = points.new_full((num_points, ), + 0, + dtype=torch.long) + if gt_labels is None: + assigned_labels = None + else: + assigned_labels = points.new_full((num_points, ), + -1, + dtype=torch.long) + return AssignResult( + num_gts, assigned_gt_inds, None, labels=assigned_labels) + + points_xy = points[:, :2] + points_stride = points[:, 2] + points_lvl = torch.log2( + points_stride).int() # [3...,4...,5...,6...,7...] + lvl_min, lvl_max = points_lvl.min(), points_lvl.max() + + # assign gt box + gt_bboxes_xy = (gt_bboxes[:, :2] + gt_bboxes[:, 2:]) / 2 + gt_bboxes_wh = (gt_bboxes[:, 2:] - gt_bboxes[:, :2]).clamp(min=1e-6) + scale = self.scale + gt_bboxes_lvl = ((torch.log2(gt_bboxes_wh[:, 0] / scale) + + torch.log2(gt_bboxes_wh[:, 1] / scale)) / 2).int() + gt_bboxes_lvl = torch.clamp(gt_bboxes_lvl, min=lvl_min, max=lvl_max) + + # stores the assigned gt index of each point + assigned_gt_inds = points.new_zeros((num_points, ), dtype=torch.long) + # stores the assigned gt dist (to this point) of each point + assigned_gt_dist = points.new_full((num_points, ), float('inf')) + points_range = torch.arange(points.shape[0]) + + for idx in range(num_gts): + gt_lvl = gt_bboxes_lvl[idx] + # get the index of points in this level + lvl_idx = gt_lvl == points_lvl + points_index = points_range[lvl_idx] + # get the points in this level + lvl_points = points_xy[lvl_idx, :] + # get the center point of gt + gt_point = gt_bboxes_xy[[idx], :] + # get width and height of gt + gt_wh = gt_bboxes_wh[[idx], :] + # compute the distance between gt center and + # all points in this level + points_gt_dist = ((lvl_points - gt_point) / gt_wh).norm(dim=1) + # find the nearest k points to gt center in this level + min_dist, min_dist_index = torch.topk( + points_gt_dist, self.pos_num, largest=False) + # the index of nearest k points to gt center in this level + min_dist_points_index = points_index[min_dist_index] + # The less_than_recorded_index stores the index + # of min_dist that is less then the assigned_gt_dist. Where + # assigned_gt_dist stores the dist from previous assigned gt + # (if exist) to each point. + less_than_recorded_index = min_dist < assigned_gt_dist[ + min_dist_points_index] + # The min_dist_points_index stores the index of points satisfy: + # (1) it is k nearest to current gt center in this level. + # (2) it is closer to current gt center than other gt center. + min_dist_points_index = min_dist_points_index[ + less_than_recorded_index] + # assign the result + assigned_gt_inds[min_dist_points_index] = idx + 1 + assigned_gt_dist[min_dist_points_index] = min_dist[ + less_than_recorded_index] + + if gt_labels is not None: + assigned_labels = assigned_gt_inds.new_full((num_points, ), -1) + pos_inds = torch.nonzero( + assigned_gt_inds > 0, as_tuple=False).squeeze() + if pos_inds.numel() > 0: + assigned_labels[pos_inds] = gt_labels[ + assigned_gt_inds[pos_inds] - 1] + else: + assigned_labels = None + + return AssignResult( + num_gts, assigned_gt_inds, None, labels=assigned_labels) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/region_assigner.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/region_assigner.py new file mode 100644 index 0000000000000000000000000000000000000000..2e8464b97c8d8f44488d7bb781ca2e733a258e55 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/region_assigner.py @@ -0,0 +1,221 @@ +import torch + +from mmdet.core import anchor_inside_flags +from ..builder import BBOX_ASSIGNERS +from .assign_result import AssignResult +from .base_assigner import BaseAssigner + + +def calc_region(bbox, ratio, stride, featmap_size=None): + """Calculate region of the box defined by the ratio, the ratio is from the + center of the box to every edge.""" + # project bbox on the feature + f_bbox = bbox / stride + x1 = torch.round((1 - ratio) * f_bbox[0] + ratio * f_bbox[2]) + y1 = torch.round((1 - ratio) * f_bbox[1] + ratio * f_bbox[3]) + x2 = torch.round(ratio * f_bbox[0] + (1 - ratio) * f_bbox[2]) + y2 = torch.round(ratio * f_bbox[1] + (1 - ratio) * f_bbox[3]) + if featmap_size is not None: + x1 = x1.clamp(min=0, max=featmap_size[1]) + y1 = y1.clamp(min=0, max=featmap_size[0]) + x2 = x2.clamp(min=0, max=featmap_size[1]) + y2 = y2.clamp(min=0, max=featmap_size[0]) + return (x1, y1, x2, y2) + + +def anchor_ctr_inside_region_flags(anchors, stride, region): + """Get the flag indicate whether anchor centers are inside regions.""" + x1, y1, x2, y2 = region + f_anchors = anchors / stride + x = (f_anchors[:, 0] + f_anchors[:, 2]) * 0.5 + y = (f_anchors[:, 1] + f_anchors[:, 3]) * 0.5 + flags = (x >= x1) & (x <= x2) & (y >= y1) & (y <= y2) + return flags + + +@BBOX_ASSIGNERS.register_module() +class RegionAssigner(BaseAssigner): + """Assign a corresponding gt bbox or background to each bbox. + + Each proposals will be assigned with `-1`, `0`, or a positive integer + indicating the ground truth index. + + - -1: don't care + - 0: negative sample, no assigned gt + - positive integer: positive sample, index (1-based) of assigned gt + + Args: + center_ratio: ratio of the region in the center of the bbox to + define positive sample. + ignore_ratio: ratio of the region to define ignore samples. + """ + + def __init__(self, center_ratio=0.2, ignore_ratio=0.5): + self.center_ratio = center_ratio + self.ignore_ratio = ignore_ratio + + def assign(self, + mlvl_anchors, + mlvl_valid_flags, + gt_bboxes, + img_meta, + featmap_sizes, + anchor_scale, + anchor_strides, + gt_bboxes_ignore=None, + gt_labels=None, + allowed_border=0): + """Assign gt to anchors. + + This method assign a gt bbox to every bbox (proposal/anchor), each bbox + will be assigned with -1, 0, or a positive number. -1 means don't care, + 0 means negative sample, positive number is the index (1-based) of + assigned gt. + The assignment is done in following steps, the order matters. + + 1. Assign every anchor to 0 (negative) + For each gt_bboxes: + 2. Compute ignore flags based on ignore_region then + assign -1 to anchors w.r.t. ignore flags + 3. Compute pos flags based on center_region then + assign gt_bboxes to anchors w.r.t. pos flags + 4. Compute ignore flags based on adjacent anchor lvl then + assign -1 to anchors w.r.t. ignore flags + 5. Assign anchor outside of image to -1 + + Args: + mlvl_anchors (list[Tensor]): Multi level anchors. + mlvl_valid_flags (list[Tensor]): Multi level valid flags. + gt_bboxes (Tensor): Ground truth bboxes of image + img_meta (dict): Meta info of image. + featmap_sizes (list[Tensor]): Feature mapsize each level + anchor_scale (int): Scale of the anchor. + anchor_strides (list[int]): Stride of the anchor. + gt_bboxes (Tensor): Groundtruth boxes, shape (k, 4). + gt_bboxes_ignore (Tensor, optional): Ground truth bboxes that are + labelled as `ignored`, e.g., crowd boxes in COCO. + gt_labels (Tensor, optional): Label of gt_bboxes, shape (k, ). + allowed_border (int, optional): The border to allow the valid + anchor. Defaults to 0. + + Returns: + :obj:`AssignResult`: The assign result. + """ + if gt_bboxes_ignore is not None: + raise NotImplementedError + + num_gts = gt_bboxes.shape[0] + num_bboxes = sum(x.shape[0] for x in mlvl_anchors) + + if num_gts == 0 or num_bboxes == 0: + # No ground truth or boxes, return empty assignment + max_overlaps = gt_bboxes.new_zeros((num_bboxes, )) + assigned_gt_inds = gt_bboxes.new_zeros((num_bboxes, ), + dtype=torch.long) + if gt_labels is None: + assigned_labels = None + else: + assigned_labels = gt_bboxes.new_full((num_bboxes, ), + -1, + dtype=torch.long) + return AssignResult( + num_gts, + assigned_gt_inds, + max_overlaps, + labels=assigned_labels) + + num_lvls = len(mlvl_anchors) + r1 = (1 - self.center_ratio) / 2 + r2 = (1 - self.ignore_ratio) / 2 + + scale = torch.sqrt((gt_bboxes[:, 2] - gt_bboxes[:, 0]) * + (gt_bboxes[:, 3] - gt_bboxes[:, 1])) + min_anchor_size = scale.new_full( + (1, ), float(anchor_scale * anchor_strides[0])) + target_lvls = torch.floor( + torch.log2(scale) - torch.log2(min_anchor_size) + 0.5) + target_lvls = target_lvls.clamp(min=0, max=num_lvls - 1).long() + + # 1. assign 0 (negative) by default + mlvl_assigned_gt_inds = [] + mlvl_ignore_flags = [] + for lvl in range(num_lvls): + h, w = featmap_sizes[lvl] + assert h * w == mlvl_anchors[lvl].shape[0] + assigned_gt_inds = gt_bboxes.new_full((h * w, ), + 0, + dtype=torch.long) + ignore_flags = torch.zeros_like(assigned_gt_inds) + mlvl_assigned_gt_inds.append(assigned_gt_inds) + mlvl_ignore_flags.append(ignore_flags) + + for gt_id in range(num_gts): + lvl = target_lvls[gt_id].item() + featmap_size = featmap_sizes[lvl] + stride = anchor_strides[lvl] + anchors = mlvl_anchors[lvl] + gt_bbox = gt_bboxes[gt_id, :4] + + # Compute regions + ignore_region = calc_region(gt_bbox, r2, stride, featmap_size) + ctr_region = calc_region(gt_bbox, r1, stride, featmap_size) + + # 2. Assign -1 to ignore flags + ignore_flags = anchor_ctr_inside_region_flags( + anchors, stride, ignore_region) + mlvl_assigned_gt_inds[lvl][ignore_flags] = -1 + + # 3. Assign gt_bboxes to pos flags + pos_flags = anchor_ctr_inside_region_flags(anchors, stride, + ctr_region) + mlvl_assigned_gt_inds[lvl][pos_flags] = gt_id + 1 + + # 4. Assign -1 to ignore adjacent lvl + if lvl > 0: + d_lvl = lvl - 1 + d_anchors = mlvl_anchors[d_lvl] + d_featmap_size = featmap_sizes[d_lvl] + d_stride = anchor_strides[d_lvl] + d_ignore_region = calc_region(gt_bbox, r2, d_stride, + d_featmap_size) + ignore_flags = anchor_ctr_inside_region_flags( + d_anchors, d_stride, d_ignore_region) + mlvl_ignore_flags[d_lvl][ignore_flags] = 1 + if lvl < num_lvls - 1: + u_lvl = lvl + 1 + u_anchors = mlvl_anchors[u_lvl] + u_featmap_size = featmap_sizes[u_lvl] + u_stride = anchor_strides[u_lvl] + u_ignore_region = calc_region(gt_bbox, r2, u_stride, + u_featmap_size) + ignore_flags = anchor_ctr_inside_region_flags( + u_anchors, u_stride, u_ignore_region) + mlvl_ignore_flags[u_lvl][ignore_flags] = 1 + + # 4. (cont.) Assign -1 to ignore adjacent lvl + for lvl in range(num_lvls): + ignore_flags = mlvl_ignore_flags[lvl] + mlvl_assigned_gt_inds[lvl][ignore_flags] = -1 + + # 5. Assign -1 to anchor outside of image + flat_assigned_gt_inds = torch.cat(mlvl_assigned_gt_inds) + flat_anchors = torch.cat(mlvl_anchors) + flat_valid_flags = torch.cat(mlvl_valid_flags) + assert (flat_assigned_gt_inds.shape[0] == flat_anchors.shape[0] == + flat_valid_flags.shape[0]) + inside_flags = anchor_inside_flags(flat_anchors, flat_valid_flags, + img_meta['img_shape'], + allowed_border) + outside_flags = ~inside_flags + flat_assigned_gt_inds[outside_flags] = -1 + + if gt_labels is not None: + assigned_labels = torch.zeros_like(flat_assigned_gt_inds) + pos_flags = assigned_gt_inds > 0 + assigned_labels[pos_flags] = gt_labels[ + flat_assigned_gt_inds[pos_flags] - 1] + else: + assigned_labels = None + + return AssignResult( + num_gts, flat_assigned_gt_inds, None, labels=assigned_labels) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/uniform_assigner.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/uniform_assigner.py new file mode 100644 index 0000000000000000000000000000000000000000..1d606dee94f53c179eaf18cf671ebd39ee6b8203 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/assigners/uniform_assigner.py @@ -0,0 +1,134 @@ +import torch + +from ..builder import BBOX_ASSIGNERS +from ..iou_calculators import build_iou_calculator +from ..transforms import bbox_xyxy_to_cxcywh +from .assign_result import AssignResult +from .base_assigner import BaseAssigner + + +@BBOX_ASSIGNERS.register_module() +class UniformAssigner(BaseAssigner): + """Uniform Matching between the anchors and gt boxes, which can achieve + balance in positive anchors, and gt_bboxes_ignore was not considered for + now. + + Args: + pos_ignore_thr (float): the threshold to ignore positive anchors + neg_ignore_thr (float): the threshold to ignore negative anchors + match_times(int): Number of positive anchors for each gt box. + Default 4. + iou_calculator (dict): iou_calculator config + """ + + def __init__(self, + pos_ignore_thr, + neg_ignore_thr, + match_times=4, + iou_calculator=dict(type='BboxOverlaps2D')): + self.match_times = match_times + self.pos_ignore_thr = pos_ignore_thr + self.neg_ignore_thr = neg_ignore_thr + self.iou_calculator = build_iou_calculator(iou_calculator) + + def assign(self, + bbox_pred, + anchor, + gt_bboxes, + gt_bboxes_ignore=None, + gt_labels=None): + num_gts, num_bboxes = gt_bboxes.size(0), bbox_pred.size(0) + + # 1. assign -1 by default + assigned_gt_inds = bbox_pred.new_full((num_bboxes, ), + 0, + dtype=torch.long) + assigned_labels = bbox_pred.new_full((num_bboxes, ), + -1, + dtype=torch.long) + if num_gts == 0 or num_bboxes == 0: + # No ground truth or boxes, return empty assignment + if num_gts == 0: + # No ground truth, assign all to background + assigned_gt_inds[:] = 0 + assign_result = AssignResult( + num_gts, assigned_gt_inds, None, labels=assigned_labels) + assign_result.set_extra_property( + 'pos_idx', bbox_pred.new_empty(0, dtype=torch.bool)) + assign_result.set_extra_property('pos_predicted_boxes', + bbox_pred.new_empty((0, 4))) + assign_result.set_extra_property('target_boxes', + bbox_pred.new_empty((0, 4))) + return assign_result + + # 2. Compute the L1 cost between boxes + # Note that we use anchors and predict boxes both + cost_bbox = torch.cdist( + bbox_xyxy_to_cxcywh(bbox_pred), + bbox_xyxy_to_cxcywh(gt_bboxes), + p=1) + cost_bbox_anchors = torch.cdist( + bbox_xyxy_to_cxcywh(anchor), bbox_xyxy_to_cxcywh(gt_bboxes), p=1) + + # We found that topk function has different results in cpu and + # cuda mode. In order to ensure consistency with the source code, + # we also use cpu mode. + # TODO: Check whether the performance of cpu and cuda are the same. + C = cost_bbox.cpu() + C1 = cost_bbox_anchors.cpu() + + # self.match_times x n + index = torch.topk( + C, # c=b,n,x c[i]=n,x + k=self.match_times, + dim=0, + largest=False)[1] + + # self.match_times x n + index1 = torch.topk(C1, k=self.match_times, dim=0, largest=False)[1] + # (self.match_times*2) x n + indexes = torch.cat((index, index1), + dim=1).reshape(-1).to(bbox_pred.device) + + pred_overlaps = self.iou_calculator(bbox_pred, gt_bboxes) + anchor_overlaps = self.iou_calculator(anchor, gt_bboxes) + pred_max_overlaps, _ = pred_overlaps.max(dim=1) + anchor_max_overlaps, _ = anchor_overlaps.max(dim=0) + + # 3. Compute the ignore indexes use gt_bboxes and predict boxes + ignore_idx = pred_max_overlaps > self.neg_ignore_thr + assigned_gt_inds[ignore_idx] = -1 + + # 4. Compute the ignore indexes of positive sample use anchors + # and predict boxes + pos_gt_index = torch.arange( + 0, C1.size(1), + device=bbox_pred.device).repeat(self.match_times * 2) + pos_ious = anchor_overlaps[indexes, pos_gt_index] + pos_ignore_idx = pos_ious < self.pos_ignore_thr + + pos_gt_index_with_ignore = pos_gt_index + 1 + pos_gt_index_with_ignore[pos_ignore_idx] = -1 + assigned_gt_inds[indexes] = pos_gt_index_with_ignore + + if gt_labels is not None: + assigned_labels = assigned_gt_inds.new_full((num_bboxes, ), -1) + pos_inds = torch.nonzero( + assigned_gt_inds > 0, as_tuple=False).squeeze() + if pos_inds.numel() > 0: + assigned_labels[pos_inds] = gt_labels[ + assigned_gt_inds[pos_inds] - 1] + else: + assigned_labels = None + + assign_result = AssignResult( + num_gts, + assigned_gt_inds, + anchor_max_overlaps, + labels=assigned_labels) + assign_result.set_extra_property('pos_idx', ~pos_ignore_idx) + assign_result.set_extra_property('pos_predicted_boxes', + bbox_pred[indexes]) + assign_result.set_extra_property('target_boxes', + gt_bboxes[pos_gt_index]) + return assign_result diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/builder.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/builder.py new file mode 100644 index 0000000000000000000000000000000000000000..682683b62ae55396f24e9f9eea0f8193e2e88de6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/builder.py @@ -0,0 +1,20 @@ +from mmcv.utils import Registry, build_from_cfg + +BBOX_ASSIGNERS = Registry('bbox_assigner') +BBOX_SAMPLERS = Registry('bbox_sampler') +BBOX_CODERS = Registry('bbox_coder') + + +def build_assigner(cfg, **default_args): + """Builder of box assigner.""" + return build_from_cfg(cfg, BBOX_ASSIGNERS, default_args) + + +def build_sampler(cfg, **default_args): + """Builder of box sampler.""" + return build_from_cfg(cfg, BBOX_SAMPLERS, default_args) + + +def build_bbox_coder(cfg, **default_args): + """Builder of box coder.""" + return build_from_cfg(cfg, BBOX_CODERS, default_args) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ae455ba8fc0e0727e2d581cdc8f20fceededf99a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/__init__.py @@ -0,0 +1,13 @@ +from .base_bbox_coder import BaseBBoxCoder +from .bucketing_bbox_coder import BucketingBBoxCoder +from .delta_xywh_bbox_coder import DeltaXYWHBBoxCoder +from .legacy_delta_xywh_bbox_coder import LegacyDeltaXYWHBBoxCoder +from .pseudo_bbox_coder import PseudoBBoxCoder +from .tblr_bbox_coder import TBLRBBoxCoder +from .yolo_bbox_coder import YOLOBBoxCoder + +__all__ = [ + 'BaseBBoxCoder', 'PseudoBBoxCoder', 'DeltaXYWHBBoxCoder', + 'LegacyDeltaXYWHBBoxCoder', 'TBLRBBoxCoder', 'YOLOBBoxCoder', + 'BucketingBBoxCoder' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/base_bbox_coder.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/base_bbox_coder.py new file mode 100644 index 0000000000000000000000000000000000000000..cf0b34c7cc2fe561718b0c884990beb40a993643 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/base_bbox_coder.py @@ -0,0 +1,17 @@ +from abc import ABCMeta, abstractmethod + + +class BaseBBoxCoder(metaclass=ABCMeta): + """Base bounding box coder.""" + + def __init__(self, **kwargs): + pass + + @abstractmethod + def encode(self, bboxes, gt_bboxes): + """Encode deltas between bboxes and ground truth boxes.""" + + @abstractmethod + def decode(self, bboxes, bboxes_pred): + """Decode the predicted bboxes according to prediction and base + boxes.""" diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/bucketing_bbox_coder.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/bucketing_bbox_coder.py new file mode 100644 index 0000000000000000000000000000000000000000..92d24b4519edece7a4af8f5cfa9af025b25f2dad --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/bucketing_bbox_coder.py @@ -0,0 +1,350 @@ +import mmcv +import numpy as np +import torch +import torch.nn.functional as F + +from ..builder import BBOX_CODERS +from ..transforms import bbox_rescale +from .base_bbox_coder import BaseBBoxCoder + + +@BBOX_CODERS.register_module() +class BucketingBBoxCoder(BaseBBoxCoder): + """Bucketing BBox Coder for Side-Aware Boundary Localization (SABL). + + Boundary Localization with Bucketing and Bucketing Guided Rescoring + are implemented here. + + Please refer to https://arxiv.org/abs/1912.04260 for more details. + + Args: + num_buckets (int): Number of buckets. + scale_factor (int): Scale factor of proposals to generate buckets. + offset_topk (int): Topk buckets are used to generate + bucket fine regression targets. Defaults to 2. + offset_upperbound (float): Offset upperbound to generate + bucket fine regression targets. + To avoid too large offset displacements. Defaults to 1.0. + cls_ignore_neighbor (bool): Ignore second nearest bucket or Not. + Defaults to True. + clip_border (bool, optional): Whether clip the objects outside the + border of the image. Defaults to True. + """ + + def __init__(self, + num_buckets, + scale_factor, + offset_topk=2, + offset_upperbound=1.0, + cls_ignore_neighbor=True, + clip_border=True): + super(BucketingBBoxCoder, self).__init__() + self.num_buckets = num_buckets + self.scale_factor = scale_factor + self.offset_topk = offset_topk + self.offset_upperbound = offset_upperbound + self.cls_ignore_neighbor = cls_ignore_neighbor + self.clip_border = clip_border + + def encode(self, bboxes, gt_bboxes): + """Get bucketing estimation and fine regression targets during + training. + + Args: + bboxes (torch.Tensor): source boxes, e.g., object proposals. + gt_bboxes (torch.Tensor): target of the transformation, e.g., + ground truth boxes. + + Returns: + encoded_bboxes(tuple[Tensor]): bucketing estimation + and fine regression targets and weights + """ + + assert bboxes.size(0) == gt_bboxes.size(0) + assert bboxes.size(-1) == gt_bboxes.size(-1) == 4 + encoded_bboxes = bbox2bucket(bboxes, gt_bboxes, self.num_buckets, + self.scale_factor, self.offset_topk, + self.offset_upperbound, + self.cls_ignore_neighbor) + return encoded_bboxes + + def decode(self, bboxes, pred_bboxes, max_shape=None): + """Apply transformation `pred_bboxes` to `boxes`. + Args: + boxes (torch.Tensor): Basic boxes. + pred_bboxes (torch.Tensor): Predictions for bucketing estimation + and fine regression + max_shape (tuple[int], optional): Maximum shape of boxes. + Defaults to None. + + Returns: + torch.Tensor: Decoded boxes. + """ + assert len(pred_bboxes) == 2 + cls_preds, offset_preds = pred_bboxes + assert cls_preds.size(0) == bboxes.size(0) and offset_preds.size( + 0) == bboxes.size(0) + decoded_bboxes = bucket2bbox(bboxes, cls_preds, offset_preds, + self.num_buckets, self.scale_factor, + max_shape, self.clip_border) + + return decoded_bboxes + + +@mmcv.jit(coderize=True) +def generat_buckets(proposals, num_buckets, scale_factor=1.0): + """Generate buckets w.r.t bucket number and scale factor of proposals. + + Args: + proposals (Tensor): Shape (n, 4) + num_buckets (int): Number of buckets. + scale_factor (float): Scale factor to rescale proposals. + + Returns: + tuple[Tensor]: (bucket_w, bucket_h, l_buckets, r_buckets, + t_buckets, d_buckets) + + - bucket_w: Width of buckets on x-axis. Shape (n, ). + - bucket_h: Height of buckets on y-axis. Shape (n, ). + - l_buckets: Left buckets. Shape (n, ceil(side_num/2)). + - r_buckets: Right buckets. Shape (n, ceil(side_num/2)). + - t_buckets: Top buckets. Shape (n, ceil(side_num/2)). + - d_buckets: Down buckets. Shape (n, ceil(side_num/2)). + """ + proposals = bbox_rescale(proposals, scale_factor) + + # number of buckets in each side + side_num = int(np.ceil(num_buckets / 2.0)) + pw = proposals[..., 2] - proposals[..., 0] + ph = proposals[..., 3] - proposals[..., 1] + px1 = proposals[..., 0] + py1 = proposals[..., 1] + px2 = proposals[..., 2] + py2 = proposals[..., 3] + + bucket_w = pw / num_buckets + bucket_h = ph / num_buckets + + # left buckets + l_buckets = px1[:, None] + (0.5 + torch.arange( + 0, side_num).to(proposals).float())[None, :] * bucket_w[:, None] + # right buckets + r_buckets = px2[:, None] - (0.5 + torch.arange( + 0, side_num).to(proposals).float())[None, :] * bucket_w[:, None] + # top buckets + t_buckets = py1[:, None] + (0.5 + torch.arange( + 0, side_num).to(proposals).float())[None, :] * bucket_h[:, None] + # down buckets + d_buckets = py2[:, None] - (0.5 + torch.arange( + 0, side_num).to(proposals).float())[None, :] * bucket_h[:, None] + return bucket_w, bucket_h, l_buckets, r_buckets, t_buckets, d_buckets + + +@mmcv.jit(coderize=True) +def bbox2bucket(proposals, + gt, + num_buckets, + scale_factor, + offset_topk=2, + offset_upperbound=1.0, + cls_ignore_neighbor=True): + """Generate buckets estimation and fine regression targets. + + Args: + proposals (Tensor): Shape (n, 4) + gt (Tensor): Shape (n, 4) + num_buckets (int): Number of buckets. + scale_factor (float): Scale factor to rescale proposals. + offset_topk (int): Topk buckets are used to generate + bucket fine regression targets. Defaults to 2. + offset_upperbound (float): Offset allowance to generate + bucket fine regression targets. + To avoid too large offset displacements. Defaults to 1.0. + cls_ignore_neighbor (bool): Ignore second nearest bucket or Not. + Defaults to True. + + Returns: + tuple[Tensor]: (offsets, offsets_weights, bucket_labels, cls_weights). + + - offsets: Fine regression targets. \ + Shape (n, num_buckets*2). + - offsets_weights: Fine regression weights. \ + Shape (n, num_buckets*2). + - bucket_labels: Bucketing estimation labels. \ + Shape (n, num_buckets*2). + - cls_weights: Bucketing estimation weights. \ + Shape (n, num_buckets*2). + """ + assert proposals.size() == gt.size() + + # generate buckets + proposals = proposals.float() + gt = gt.float() + (bucket_w, bucket_h, l_buckets, r_buckets, t_buckets, + d_buckets) = generat_buckets(proposals, num_buckets, scale_factor) + + gx1 = gt[..., 0] + gy1 = gt[..., 1] + gx2 = gt[..., 2] + gy2 = gt[..., 3] + + # generate offset targets and weights + # offsets from buckets to gts + l_offsets = (l_buckets - gx1[:, None]) / bucket_w[:, None] + r_offsets = (r_buckets - gx2[:, None]) / bucket_w[:, None] + t_offsets = (t_buckets - gy1[:, None]) / bucket_h[:, None] + d_offsets = (d_buckets - gy2[:, None]) / bucket_h[:, None] + + # select top-k nearset buckets + l_topk, l_label = l_offsets.abs().topk( + offset_topk, dim=1, largest=False, sorted=True) + r_topk, r_label = r_offsets.abs().topk( + offset_topk, dim=1, largest=False, sorted=True) + t_topk, t_label = t_offsets.abs().topk( + offset_topk, dim=1, largest=False, sorted=True) + d_topk, d_label = d_offsets.abs().topk( + offset_topk, dim=1, largest=False, sorted=True) + + offset_l_weights = l_offsets.new_zeros(l_offsets.size()) + offset_r_weights = r_offsets.new_zeros(r_offsets.size()) + offset_t_weights = t_offsets.new_zeros(t_offsets.size()) + offset_d_weights = d_offsets.new_zeros(d_offsets.size()) + inds = torch.arange(0, proposals.size(0)).to(proposals).long() + + # generate offset weights of top-k nearset buckets + for k in range(offset_topk): + if k >= 1: + offset_l_weights[inds, l_label[:, + k]] = (l_topk[:, k] < + offset_upperbound).float() + offset_r_weights[inds, r_label[:, + k]] = (r_topk[:, k] < + offset_upperbound).float() + offset_t_weights[inds, t_label[:, + k]] = (t_topk[:, k] < + offset_upperbound).float() + offset_d_weights[inds, d_label[:, + k]] = (d_topk[:, k] < + offset_upperbound).float() + else: + offset_l_weights[inds, l_label[:, k]] = 1.0 + offset_r_weights[inds, r_label[:, k]] = 1.0 + offset_t_weights[inds, t_label[:, k]] = 1.0 + offset_d_weights[inds, d_label[:, k]] = 1.0 + + offsets = torch.cat([l_offsets, r_offsets, t_offsets, d_offsets], dim=-1) + offsets_weights = torch.cat([ + offset_l_weights, offset_r_weights, offset_t_weights, offset_d_weights + ], + dim=-1) + + # generate bucket labels and weight + side_num = int(np.ceil(num_buckets / 2.0)) + labels = torch.stack( + [l_label[:, 0], r_label[:, 0], t_label[:, 0], d_label[:, 0]], dim=-1) + + batch_size = labels.size(0) + bucket_labels = F.one_hot(labels.view(-1), side_num).view(batch_size, + -1).float() + bucket_cls_l_weights = (l_offsets.abs() < 1).float() + bucket_cls_r_weights = (r_offsets.abs() < 1).float() + bucket_cls_t_weights = (t_offsets.abs() < 1).float() + bucket_cls_d_weights = (d_offsets.abs() < 1).float() + bucket_cls_weights = torch.cat([ + bucket_cls_l_weights, bucket_cls_r_weights, bucket_cls_t_weights, + bucket_cls_d_weights + ], + dim=-1) + # ignore second nearest buckets for cls if necessary + if cls_ignore_neighbor: + bucket_cls_weights = (~((bucket_cls_weights == 1) & + (bucket_labels == 0))).float() + else: + bucket_cls_weights[:] = 1.0 + return offsets, offsets_weights, bucket_labels, bucket_cls_weights + + +@mmcv.jit(coderize=True) +def bucket2bbox(proposals, + cls_preds, + offset_preds, + num_buckets, + scale_factor=1.0, + max_shape=None, + clip_border=True): + """Apply bucketing estimation (cls preds) and fine regression (offset + preds) to generate det bboxes. + + Args: + proposals (Tensor): Boxes to be transformed. Shape (n, 4) + cls_preds (Tensor): bucketing estimation. Shape (n, num_buckets*2). + offset_preds (Tensor): fine regression. Shape (n, num_buckets*2). + num_buckets (int): Number of buckets. + scale_factor (float): Scale factor to rescale proposals. + max_shape (tuple[int, int]): Maximum bounds for boxes. specifies (H, W) + clip_border (bool, optional): Whether clip the objects outside the + border of the image. Defaults to True. + + Returns: + tuple[Tensor]: (bboxes, loc_confidence). + + - bboxes: predicted bboxes. Shape (n, 4) + - loc_confidence: localization confidence of predicted bboxes. + Shape (n,). + """ + + side_num = int(np.ceil(num_buckets / 2.0)) + cls_preds = cls_preds.view(-1, side_num) + offset_preds = offset_preds.view(-1, side_num) + + scores = F.softmax(cls_preds, dim=1) + score_topk, score_label = scores.topk(2, dim=1, largest=True, sorted=True) + + rescaled_proposals = bbox_rescale(proposals, scale_factor) + + pw = rescaled_proposals[..., 2] - rescaled_proposals[..., 0] + ph = rescaled_proposals[..., 3] - rescaled_proposals[..., 1] + px1 = rescaled_proposals[..., 0] + py1 = rescaled_proposals[..., 1] + px2 = rescaled_proposals[..., 2] + py2 = rescaled_proposals[..., 3] + + bucket_w = pw / num_buckets + bucket_h = ph / num_buckets + + score_inds_l = score_label[0::4, 0] + score_inds_r = score_label[1::4, 0] + score_inds_t = score_label[2::4, 0] + score_inds_d = score_label[3::4, 0] + l_buckets = px1 + (0.5 + score_inds_l.float()) * bucket_w + r_buckets = px2 - (0.5 + score_inds_r.float()) * bucket_w + t_buckets = py1 + (0.5 + score_inds_t.float()) * bucket_h + d_buckets = py2 - (0.5 + score_inds_d.float()) * bucket_h + + offsets = offset_preds.view(-1, 4, side_num) + inds = torch.arange(proposals.size(0)).to(proposals).long() + l_offsets = offsets[:, 0, :][inds, score_inds_l] + r_offsets = offsets[:, 1, :][inds, score_inds_r] + t_offsets = offsets[:, 2, :][inds, score_inds_t] + d_offsets = offsets[:, 3, :][inds, score_inds_d] + + x1 = l_buckets - l_offsets * bucket_w + x2 = r_buckets - r_offsets * bucket_w + y1 = t_buckets - t_offsets * bucket_h + y2 = d_buckets - d_offsets * bucket_h + + if clip_border and max_shape is not None: + x1 = x1.clamp(min=0, max=max_shape[1] - 1) + y1 = y1.clamp(min=0, max=max_shape[0] - 1) + x2 = x2.clamp(min=0, max=max_shape[1] - 1) + y2 = y2.clamp(min=0, max=max_shape[0] - 1) + bboxes = torch.cat([x1[:, None], y1[:, None], x2[:, None], y2[:, None]], + dim=-1) + + # bucketing guided rescoring + loc_confidence = score_topk[:, 0] + top2_neighbor_inds = (score_label[:, 0] - score_label[:, 1]).abs() == 1 + loc_confidence += score_topk[:, 1] * top2_neighbor_inds.float() + loc_confidence = loc_confidence.view(-1, 4).mean(dim=1) + + return bboxes, loc_confidence diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/delta_xywh_bbox_coder.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/delta_xywh_bbox_coder.py new file mode 100644 index 0000000000000000000000000000000000000000..98d30906d2c9e617194f1a1ece6d45d2cd000b6a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/delta_xywh_bbox_coder.py @@ -0,0 +1,271 @@ +import mmcv +import numpy as np +import torch + +from ..builder import BBOX_CODERS +from .base_bbox_coder import BaseBBoxCoder + + +@BBOX_CODERS.register_module() +class DeltaXYWHBBoxCoder(BaseBBoxCoder): + """Delta XYWH BBox coder. + + Following the practice in `R-CNN `_, + this coder encodes bbox (x1, y1, x2, y2) into delta (dx, dy, dw, dh) and + decodes delta (dx, dy, dw, dh) back to original bbox (x1, y1, x2, y2). + + Args: + target_means (Sequence[float]): Denormalizing means of target for + delta coordinates + target_stds (Sequence[float]): Denormalizing standard deviation of + target for delta coordinates + clip_border (bool, optional): Whether clip the objects outside the + border of the image. Defaults to True. + add_ctr_clamp (bool): Whether to add center clamp, when added, the + predicted box is clamped is its center is too far away from + the original anchor's center. Only used by YOLOF. Default False. + ctr_clamp (int): the maximum pixel shift to clamp. Only used by YOLOF. + Default 32. + """ + + def __init__(self, + target_means=(0., 0., 0., 0.), + target_stds=(1., 1., 1., 1.), + clip_border=True, + add_ctr_clamp=False, + ctr_clamp=32): + super(BaseBBoxCoder, self).__init__() + self.means = target_means + self.stds = target_stds + self.clip_border = clip_border + self.add_ctr_clamp = add_ctr_clamp + self.ctr_clamp = ctr_clamp + + def encode(self, bboxes, gt_bboxes): + """Get box regression transformation deltas that can be used to + transform the ``bboxes`` into the ``gt_bboxes``. + + Args: + bboxes (torch.Tensor): Source boxes, e.g., object proposals. + gt_bboxes (torch.Tensor): Target of the transformation, e.g., + ground-truth boxes. + + Returns: + torch.Tensor: Box transformation deltas + """ + + assert bboxes.size(0) == gt_bboxes.size(0) + assert bboxes.size(-1) == gt_bboxes.size(-1) == 4 + encoded_bboxes = bbox2delta(bboxes, gt_bboxes, self.means, self.stds) + return encoded_bboxes + + def decode(self, + bboxes, + pred_bboxes, + max_shape=None, + wh_ratio_clip=16 / 1000): + """Apply transformation `pred_bboxes` to `boxes`. + + Args: + bboxes (torch.Tensor): Basic boxes. Shape (B, N, 4) or (N, 4) + pred_bboxes (Tensor): Encoded offsets with respect to each roi. + Has shape (B, N, num_classes * 4) or (B, N, 4) or + (N, num_classes * 4) or (N, 4). Note N = num_anchors * W * H + when rois is a grid of anchors.Offset encoding follows [1]_. + max_shape (Sequence[int] or torch.Tensor or Sequence[ + Sequence[int]],optional): Maximum bounds for boxes, specifies + (H, W, C) or (H, W). If bboxes shape is (B, N, 4), then + the max_shape should be a Sequence[Sequence[int]] + and the length of max_shape should also be B. + wh_ratio_clip (float, optional): The allowed ratio between + width and height. + + Returns: + torch.Tensor: Decoded boxes. + """ + + assert pred_bboxes.size(0) == bboxes.size(0) + if pred_bboxes.ndim == 3: + assert pred_bboxes.size(1) == bboxes.size(1) + decoded_bboxes = delta2bbox(bboxes, pred_bboxes, self.means, self.stds, + max_shape, wh_ratio_clip, self.clip_border, + self.add_ctr_clamp, self.ctr_clamp) + + return decoded_bboxes + + +@mmcv.jit(coderize=True) +def bbox2delta(proposals, gt, means=(0., 0., 0., 0.), stds=(1., 1., 1., 1.)): + """Compute deltas of proposals w.r.t. gt. + + We usually compute the deltas of x, y, w, h of proposals w.r.t ground + truth bboxes to get regression target. + This is the inverse function of :func:`delta2bbox`. + + Args: + proposals (Tensor): Boxes to be transformed, shape (N, ..., 4) + gt (Tensor): Gt bboxes to be used as base, shape (N, ..., 4) + means (Sequence[float]): Denormalizing means for delta coordinates + stds (Sequence[float]): Denormalizing standard deviation for delta + coordinates + + Returns: + Tensor: deltas with shape (N, 4), where columns represent dx, dy, + dw, dh. + """ + assert proposals.size() == gt.size() + + proposals = proposals.float() + gt = gt.float() + px = (proposals[..., 0] + proposals[..., 2]) * 0.5 + py = (proposals[..., 1] + proposals[..., 3]) * 0.5 + pw = proposals[..., 2] - proposals[..., 0] + ph = proposals[..., 3] - proposals[..., 1] + + gx = (gt[..., 0] + gt[..., 2]) * 0.5 + gy = (gt[..., 1] + gt[..., 3]) * 0.5 + gw = gt[..., 2] - gt[..., 0] + gh = gt[..., 3] - gt[..., 1] + + dx = (gx - px) / pw + dy = (gy - py) / ph + dw = torch.log(gw / pw) + dh = torch.log(gh / ph) + deltas = torch.stack([dx, dy, dw, dh], dim=-1) + + means = deltas.new_tensor(means).unsqueeze(0) + stds = deltas.new_tensor(stds).unsqueeze(0) + deltas = deltas.sub_(means).div_(stds) + + return deltas + + +@mmcv.jit(coderize=True) +def delta2bbox(rois, + deltas, + means=(0., 0., 0., 0.), + stds=(1., 1., 1., 1.), + max_shape=None, + wh_ratio_clip=16 / 1000, + clip_border=True, + add_ctr_clamp=False, + ctr_clamp=32): + """Apply deltas to shift/scale base boxes. + + Typically the rois are anchor or proposed bounding boxes and the deltas are + network outputs used to shift/scale those boxes. + This is the inverse function of :func:`bbox2delta`. + + Args: + rois (Tensor): Boxes to be transformed. Has shape (N, 4) or (B, N, 4) + deltas (Tensor): Encoded offsets with respect to each roi. + Has shape (B, N, num_classes * 4) or (B, N, 4) or + (N, num_classes * 4) or (N, 4). Note N = num_anchors * W * H + when rois is a grid of anchors.Offset encoding follows [1]_. + means (Sequence[float]): Denormalizing means for delta coordinates + stds (Sequence[float]): Denormalizing standard deviation for delta + coordinates + max_shape (Sequence[int] or torch.Tensor or Sequence[ + Sequence[int]],optional): Maximum bounds for boxes, specifies + (H, W, C) or (H, W). If rois shape is (B, N, 4), then + the max_shape should be a Sequence[Sequence[int]] + and the length of max_shape should also be B. + wh_ratio_clip (float): Maximum aspect ratio for boxes. + clip_border (bool, optional): Whether clip the objects outside the + border of the image. Defaults to True. + add_ctr_clamp (bool): Whether to add center clamp, when added, the + predicted box is clamped is its center is too far away from + the original anchor's center. Only used by YOLOF. Default False. + ctr_clamp (int): the maximum pixel shift to clamp. Only used by YOLOF. + Default 32. + + Returns: + Tensor: Boxes with shape (B, N, num_classes * 4) or (B, N, 4) or + (N, num_classes * 4) or (N, 4), where 4 represent + tl_x, tl_y, br_x, br_y. + + References: + .. [1] https://arxiv.org/abs/1311.2524 + + Example: + >>> rois = torch.Tensor([[ 0., 0., 1., 1.], + >>> [ 0., 0., 1., 1.], + >>> [ 0., 0., 1., 1.], + >>> [ 5., 5., 5., 5.]]) + >>> deltas = torch.Tensor([[ 0., 0., 0., 0.], + >>> [ 1., 1., 1., 1.], + >>> [ 0., 0., 2., -1.], + >>> [ 0.7, -1.9, -0.5, 0.3]]) + >>> delta2bbox(rois, deltas, max_shape=(32, 32, 3)) + tensor([[0.0000, 0.0000, 1.0000, 1.0000], + [0.1409, 0.1409, 2.8591, 2.8591], + [0.0000, 0.3161, 4.1945, 0.6839], + [5.0000, 5.0000, 5.0000, 5.0000]]) + """ + means = deltas.new_tensor(means).view(1, + -1).repeat(1, + deltas.size(-1) // 4) + stds = deltas.new_tensor(stds).view(1, -1).repeat(1, deltas.size(-1) // 4) + denorm_deltas = deltas * stds + means + dx = denorm_deltas[..., 0::4] + dy = denorm_deltas[..., 1::4] + dw = denorm_deltas[..., 2::4] + dh = denorm_deltas[..., 3::4] + + x1, y1 = rois[..., 0], rois[..., 1] + x2, y2 = rois[..., 2], rois[..., 3] + # Compute center of each roi + px = ((x1 + x2) * 0.5).unsqueeze(-1).expand_as(dx) + py = ((y1 + y2) * 0.5).unsqueeze(-1).expand_as(dy) + # Compute width/height of each roi + pw = (x2 - x1).unsqueeze(-1).expand_as(dw) + ph = (y2 - y1).unsqueeze(-1).expand_as(dh) + + dx_width = pw * dx + dy_height = ph * dy + + max_ratio = np.abs(np.log(wh_ratio_clip)) + if add_ctr_clamp: + dx_width = torch.clamp(dx_width, max=ctr_clamp, min=-ctr_clamp) + dy_height = torch.clamp(dy_height, max=ctr_clamp, min=-ctr_clamp) + dw = torch.clamp(dw, max=max_ratio) + dh = torch.clamp(dh, max=max_ratio) + else: + dw = dw.clamp(min=-max_ratio, max=max_ratio) + dh = dh.clamp(min=-max_ratio, max=max_ratio) + # Use exp(network energy) to enlarge/shrink each roi + gw = pw * dw.exp() + gh = ph * dh.exp() + # Use network energy to shift the center of each roi + gx = px + dx_width + gy = py + dy_height + # Convert center-xy/width/height to top-left, bottom-right + x1 = gx - gw * 0.5 + y1 = gy - gh * 0.5 + x2 = gx + gw * 0.5 + y2 = gy + gh * 0.5 + + bboxes = torch.stack([x1, y1, x2, y2], dim=-1).view(deltas.size()) + + if clip_border and max_shape is not None: + # clip bboxes with dynamic `min` and `max` for onnx + if torch.onnx.is_in_onnx_export(): + from mmdet.core.export import dynamic_clip_for_onnx + x1, y1, x2, y2 = dynamic_clip_for_onnx(x1, y1, x2, y2, max_shape) + bboxes = torch.stack([x1, y1, x2, y2], dim=-1).view(deltas.size()) + return bboxes + if not isinstance(max_shape, torch.Tensor): + max_shape = x1.new_tensor(max_shape) + max_shape = max_shape[..., :2].type_as(x1) + if max_shape.ndim == 2: + assert bboxes.ndim == 3 + assert max_shape.size(0) == bboxes.size(0) + + min_xy = x1.new_tensor(0) + max_xy = torch.cat( + [max_shape] * (deltas.size(-1) // 2), + dim=-1).flip(-1).unsqueeze(-2) + bboxes = torch.where(bboxes < min_xy, min_xy, bboxes) + bboxes = torch.where(bboxes > max_xy, max_xy, bboxes) + + return bboxes diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/legacy_delta_xywh_bbox_coder.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/legacy_delta_xywh_bbox_coder.py new file mode 100644 index 0000000000000000000000000000000000000000..190309fd42a1b76c12c82fc1acf0511494be5ac3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/legacy_delta_xywh_bbox_coder.py @@ -0,0 +1,215 @@ +import mmcv +import numpy as np +import torch + +from ..builder import BBOX_CODERS +from .base_bbox_coder import BaseBBoxCoder + + +@BBOX_CODERS.register_module() +class LegacyDeltaXYWHBBoxCoder(BaseBBoxCoder): + """Legacy Delta XYWH BBox coder used in MMDet V1.x. + + Following the practice in R-CNN [1]_, this coder encodes bbox (x1, y1, x2, + y2) into delta (dx, dy, dw, dh) and decodes delta (dx, dy, dw, dh) + back to original bbox (x1, y1, x2, y2). + + Note: + The main difference between :class`LegacyDeltaXYWHBBoxCoder` and + :class:`DeltaXYWHBBoxCoder` is whether ``+ 1`` is used during width and + height calculation. We suggest to only use this coder when testing with + MMDet V1.x models. + + References: + .. [1] https://arxiv.org/abs/1311.2524 + + Args: + target_means (Sequence[float]): denormalizing means of target for + delta coordinates + target_stds (Sequence[float]): denormalizing standard deviation of + target for delta coordinates + """ + + def __init__(self, + target_means=(0., 0., 0., 0.), + target_stds=(1., 1., 1., 1.)): + super(BaseBBoxCoder, self).__init__() + self.means = target_means + self.stds = target_stds + + def encode(self, bboxes, gt_bboxes): + """Get box regression transformation deltas that can be used to + transform the ``bboxes`` into the ``gt_bboxes``. + + Args: + bboxes (torch.Tensor): source boxes, e.g., object proposals. + gt_bboxes (torch.Tensor): target of the transformation, e.g., + ground-truth boxes. + + Returns: + torch.Tensor: Box transformation deltas + """ + assert bboxes.size(0) == gt_bboxes.size(0) + assert bboxes.size(-1) == gt_bboxes.size(-1) == 4 + encoded_bboxes = legacy_bbox2delta(bboxes, gt_bboxes, self.means, + self.stds) + return encoded_bboxes + + def decode(self, + bboxes, + pred_bboxes, + max_shape=None, + wh_ratio_clip=16 / 1000): + """Apply transformation `pred_bboxes` to `boxes`. + + Args: + boxes (torch.Tensor): Basic boxes. + pred_bboxes (torch.Tensor): Encoded boxes with shape + max_shape (tuple[int], optional): Maximum shape of boxes. + Defaults to None. + wh_ratio_clip (float, optional): The allowed ratio between + width and height. + + Returns: + torch.Tensor: Decoded boxes. + """ + assert pred_bboxes.size(0) == bboxes.size(0) + decoded_bboxes = legacy_delta2bbox(bboxes, pred_bboxes, self.means, + self.stds, max_shape, wh_ratio_clip) + + return decoded_bboxes + + +@mmcv.jit(coderize=True) +def legacy_bbox2delta(proposals, + gt, + means=(0., 0., 0., 0.), + stds=(1., 1., 1., 1.)): + """Compute deltas of proposals w.r.t. gt in the MMDet V1.x manner. + + We usually compute the deltas of x, y, w, h of proposals w.r.t ground + truth bboxes to get regression target. + This is the inverse function of `delta2bbox()` + + Args: + proposals (Tensor): Boxes to be transformed, shape (N, ..., 4) + gt (Tensor): Gt bboxes to be used as base, shape (N, ..., 4) + means (Sequence[float]): Denormalizing means for delta coordinates + stds (Sequence[float]): Denormalizing standard deviation for delta + coordinates + + Returns: + Tensor: deltas with shape (N, 4), where columns represent dx, dy, + dw, dh. + """ + assert proposals.size() == gt.size() + + proposals = proposals.float() + gt = gt.float() + px = (proposals[..., 0] + proposals[..., 2]) * 0.5 + py = (proposals[..., 1] + proposals[..., 3]) * 0.5 + pw = proposals[..., 2] - proposals[..., 0] + 1.0 + ph = proposals[..., 3] - proposals[..., 1] + 1.0 + + gx = (gt[..., 0] + gt[..., 2]) * 0.5 + gy = (gt[..., 1] + gt[..., 3]) * 0.5 + gw = gt[..., 2] - gt[..., 0] + 1.0 + gh = gt[..., 3] - gt[..., 1] + 1.0 + + dx = (gx - px) / pw + dy = (gy - py) / ph + dw = torch.log(gw / pw) + dh = torch.log(gh / ph) + deltas = torch.stack([dx, dy, dw, dh], dim=-1) + + means = deltas.new_tensor(means).unsqueeze(0) + stds = deltas.new_tensor(stds).unsqueeze(0) + deltas = deltas.sub_(means).div_(stds) + + return deltas + + +@mmcv.jit(coderize=True) +def legacy_delta2bbox(rois, + deltas, + means=(0., 0., 0., 0.), + stds=(1., 1., 1., 1.), + max_shape=None, + wh_ratio_clip=16 / 1000): + """Apply deltas to shift/scale base boxes in the MMDet V1.x manner. + + Typically the rois are anchor or proposed bounding boxes and the deltas are + network outputs used to shift/scale those boxes. + This is the inverse function of `bbox2delta()` + + Args: + rois (Tensor): Boxes to be transformed. Has shape (N, 4) + deltas (Tensor): Encoded offsets with respect to each roi. + Has shape (N, 4 * num_classes). Note N = num_anchors * W * H when + rois is a grid of anchors. Offset encoding follows [1]_. + means (Sequence[float]): Denormalizing means for delta coordinates + stds (Sequence[float]): Denormalizing standard deviation for delta + coordinates + max_shape (tuple[int, int]): Maximum bounds for boxes. specifies (H, W) + wh_ratio_clip (float): Maximum aspect ratio for boxes. + + Returns: + Tensor: Boxes with shape (N, 4), where columns represent + tl_x, tl_y, br_x, br_y. + + References: + .. [1] https://arxiv.org/abs/1311.2524 + + Example: + >>> rois = torch.Tensor([[ 0., 0., 1., 1.], + >>> [ 0., 0., 1., 1.], + >>> [ 0., 0., 1., 1.], + >>> [ 5., 5., 5., 5.]]) + >>> deltas = torch.Tensor([[ 0., 0., 0., 0.], + >>> [ 1., 1., 1., 1.], + >>> [ 0., 0., 2., -1.], + >>> [ 0.7, -1.9, -0.5, 0.3]]) + >>> legacy_delta2bbox(rois, deltas, max_shape=(32, 32)) + tensor([[0.0000, 0.0000, 1.5000, 1.5000], + [0.0000, 0.0000, 5.2183, 5.2183], + [0.0000, 0.1321, 7.8891, 0.8679], + [5.3967, 2.4251, 6.0033, 3.7749]]) + """ + means = deltas.new_tensor(means).repeat(1, deltas.size(1) // 4) + stds = deltas.new_tensor(stds).repeat(1, deltas.size(1) // 4) + denorm_deltas = deltas * stds + means + dx = denorm_deltas[:, 0::4] + dy = denorm_deltas[:, 1::4] + dw = denorm_deltas[:, 2::4] + dh = denorm_deltas[:, 3::4] + max_ratio = np.abs(np.log(wh_ratio_clip)) + dw = dw.clamp(min=-max_ratio, max=max_ratio) + dh = dh.clamp(min=-max_ratio, max=max_ratio) + # Compute center of each roi + px = ((rois[:, 0] + rois[:, 2]) * 0.5).unsqueeze(1).expand_as(dx) + py = ((rois[:, 1] + rois[:, 3]) * 0.5).unsqueeze(1).expand_as(dy) + # Compute width/height of each roi + pw = (rois[:, 2] - rois[:, 0] + 1.0).unsqueeze(1).expand_as(dw) + ph = (rois[:, 3] - rois[:, 1] + 1.0).unsqueeze(1).expand_as(dh) + # Use exp(network energy) to enlarge/shrink each roi + gw = pw * dw.exp() + gh = ph * dh.exp() + # Use network energy to shift the center of each roi + gx = px + pw * dx + gy = py + ph * dy + # Convert center-xy/width/height to top-left, bottom-right + + # The true legacy box coder should +- 0.5 here. + # However, current implementation improves the performance when testing + # the models trained in MMDetection 1.X (~0.5 bbox AP, 0.2 mask AP) + x1 = gx - gw * 0.5 + y1 = gy - gh * 0.5 + x2 = gx + gw * 0.5 + y2 = gy + gh * 0.5 + if max_shape is not None: + x1 = x1.clamp(min=0, max=max_shape[1] - 1) + y1 = y1.clamp(min=0, max=max_shape[0] - 1) + x2 = x2.clamp(min=0, max=max_shape[1] - 1) + y2 = y2.clamp(min=0, max=max_shape[0] - 1) + bboxes = torch.stack([x1, y1, x2, y2], dim=-1).view_as(deltas) + return bboxes diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/pseudo_bbox_coder.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/pseudo_bbox_coder.py new file mode 100644 index 0000000000000000000000000000000000000000..1c8346f4ae2c7db9719a70c7dc0244e088a9965b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/pseudo_bbox_coder.py @@ -0,0 +1,18 @@ +from ..builder import BBOX_CODERS +from .base_bbox_coder import BaseBBoxCoder + + +@BBOX_CODERS.register_module() +class PseudoBBoxCoder(BaseBBoxCoder): + """Pseudo bounding box coder.""" + + def __init__(self, **kwargs): + super(BaseBBoxCoder, self).__init__(**kwargs) + + def encode(self, bboxes, gt_bboxes): + """torch.Tensor: return the given ``bboxes``""" + return gt_bboxes + + def decode(self, bboxes, pred_bboxes): + """torch.Tensor: return the given ``pred_bboxes``""" + return pred_bboxes diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/tblr_bbox_coder.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/tblr_bbox_coder.py new file mode 100644 index 0000000000000000000000000000000000000000..c45c61678291cd1611c8e9b09e082b0142a4720c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/tblr_bbox_coder.py @@ -0,0 +1,205 @@ +import mmcv +import torch + +from ..builder import BBOX_CODERS +from .base_bbox_coder import BaseBBoxCoder + + +@BBOX_CODERS.register_module() +class TBLRBBoxCoder(BaseBBoxCoder): + """TBLR BBox coder. + + Following the practice in `FSAF `_, + this coder encodes gt bboxes (x1, y1, x2, y2) into (top, bottom, left, + right) and decode it back to the original. + + Args: + normalizer (list | float): Normalization factor to be + divided with when coding the coordinates. If it is a list, it should + have length of 4 indicating normalization factor in tblr dims. + Otherwise it is a unified float factor for all dims. Default: 4.0 + clip_border (bool, optional): Whether clip the objects outside the + border of the image. Defaults to True. + """ + + def __init__(self, normalizer=4.0, clip_border=True): + super(BaseBBoxCoder, self).__init__() + self.normalizer = normalizer + self.clip_border = clip_border + + def encode(self, bboxes, gt_bboxes): + """Get box regression transformation deltas that can be used to + transform the ``bboxes`` into the ``gt_bboxes`` in the (top, left, + bottom, right) order. + + Args: + bboxes (torch.Tensor): source boxes, e.g., object proposals. + gt_bboxes (torch.Tensor): target of the transformation, e.g., + ground truth boxes. + + Returns: + torch.Tensor: Box transformation deltas + """ + assert bboxes.size(0) == gt_bboxes.size(0) + assert bboxes.size(-1) == gt_bboxes.size(-1) == 4 + encoded_bboxes = bboxes2tblr( + bboxes, gt_bboxes, normalizer=self.normalizer) + return encoded_bboxes + + def decode(self, bboxes, pred_bboxes, max_shape=None): + """Apply transformation `pred_bboxes` to `boxes`. + + Args: + bboxes (torch.Tensor): Basic boxes.Shape (B, N, 4) or (N, 4) + pred_bboxes (torch.Tensor): Encoded boxes with shape + (B, N, 4) or (N, 4) + max_shape (Sequence[int] or torch.Tensor or Sequence[ + Sequence[int]],optional): Maximum bounds for boxes, specifies + (H, W, C) or (H, W). If bboxes shape is (B, N, 4), then + the max_shape should be a Sequence[Sequence[int]] + and the length of max_shape should also be B. + + Returns: + torch.Tensor: Decoded boxes. + """ + decoded_bboxes = tblr2bboxes( + bboxes, + pred_bboxes, + normalizer=self.normalizer, + max_shape=max_shape, + clip_border=self.clip_border) + + return decoded_bboxes + + +@mmcv.jit(coderize=True) +def bboxes2tblr(priors, gts, normalizer=4.0, normalize_by_wh=True): + """Encode ground truth boxes to tblr coordinate. + + It first convert the gt coordinate to tblr format, + (top, bottom, left, right), relative to prior box centers. + The tblr coordinate may be normalized by the side length of prior bboxes + if `normalize_by_wh` is specified as True, and it is then normalized by + the `normalizer` factor. + + Args: + priors (Tensor): Prior boxes in point form + Shape: (num_proposals,4). + gts (Tensor): Coords of ground truth for each prior in point-form + Shape: (num_proposals, 4). + normalizer (Sequence[float] | float): normalization parameter of + encoded boxes. If it is a list, it has to have length = 4. + Default: 4.0 + normalize_by_wh (bool): Whether to normalize tblr coordinate by the + side length (wh) of prior bboxes. + + Return: + encoded boxes (Tensor), Shape: (num_proposals, 4) + """ + + # dist b/t match center and prior's center + if not isinstance(normalizer, float): + normalizer = torch.tensor(normalizer, device=priors.device) + assert len(normalizer) == 4, 'Normalizer must have length = 4' + assert priors.size(0) == gts.size(0) + prior_centers = (priors[:, 0:2] + priors[:, 2:4]) / 2 + xmin, ymin, xmax, ymax = gts.split(1, dim=1) + top = prior_centers[:, 1].unsqueeze(1) - ymin + bottom = ymax - prior_centers[:, 1].unsqueeze(1) + left = prior_centers[:, 0].unsqueeze(1) - xmin + right = xmax - prior_centers[:, 0].unsqueeze(1) + loc = torch.cat((top, bottom, left, right), dim=1) + if normalize_by_wh: + # Normalize tblr by anchor width and height + wh = priors[:, 2:4] - priors[:, 0:2] + w, h = torch.split(wh, 1, dim=1) + loc[:, :2] /= h # tb is normalized by h + loc[:, 2:] /= w # lr is normalized by w + # Normalize tblr by the given normalization factor + return loc / normalizer + + +@mmcv.jit(coderize=True) +def tblr2bboxes(priors, + tblr, + normalizer=4.0, + normalize_by_wh=True, + max_shape=None, + clip_border=True): + """Decode tblr outputs to prediction boxes. + + The process includes 3 steps: 1) De-normalize tblr coordinates by + multiplying it with `normalizer`; 2) De-normalize tblr coordinates by the + prior bbox width and height if `normalize_by_wh` is `True`; 3) Convert + tblr (top, bottom, left, right) pair relative to the center of priors back + to (xmin, ymin, xmax, ymax) coordinate. + + Args: + priors (Tensor): Prior boxes in point form (x0, y0, x1, y1) + Shape: (N,4) or (B, N, 4). + tblr (Tensor): Coords of network output in tblr form + Shape: (N, 4) or (B, N, 4). + normalizer (Sequence[float] | float): Normalization parameter of + encoded boxes. By list, it represents the normalization factors at + tblr dims. By float, it is the unified normalization factor at all + dims. Default: 4.0 + normalize_by_wh (bool): Whether the tblr coordinates have been + normalized by the side length (wh) of prior bboxes. + max_shape (Sequence[int] or torch.Tensor or Sequence[ + Sequence[int]],optional): Maximum bounds for boxes, specifies + (H, W, C) or (H, W). If priors shape is (B, N, 4), then + the max_shape should be a Sequence[Sequence[int]] + and the length of max_shape should also be B. + clip_border (bool, optional): Whether clip the objects outside the + border of the image. Defaults to True. + + Return: + encoded boxes (Tensor): Boxes with shape (N, 4) or (B, N, 4) + """ + if not isinstance(normalizer, float): + normalizer = torch.tensor(normalizer, device=priors.device) + assert len(normalizer) == 4, 'Normalizer must have length = 4' + assert priors.size(0) == tblr.size(0) + if priors.ndim == 3: + assert priors.size(1) == tblr.size(1) + + loc_decode = tblr * normalizer + prior_centers = (priors[..., 0:2] + priors[..., 2:4]) / 2 + if normalize_by_wh: + wh = priors[..., 2:4] - priors[..., 0:2] + w, h = torch.split(wh, 1, dim=-1) + # Inplace operation with slice would failed for exporting to ONNX + th = h * loc_decode[..., :2] # tb + tw = w * loc_decode[..., 2:] # lr + loc_decode = torch.cat([th, tw], dim=-1) + # Cannot be exported using onnx when loc_decode.split(1, dim=-1) + top, bottom, left, right = loc_decode.split((1, 1, 1, 1), dim=-1) + xmin = prior_centers[..., 0].unsqueeze(-1) - left + xmax = prior_centers[..., 0].unsqueeze(-1) + right + ymin = prior_centers[..., 1].unsqueeze(-1) - top + ymax = prior_centers[..., 1].unsqueeze(-1) + bottom + + bboxes = torch.cat((xmin, ymin, xmax, ymax), dim=-1) + + if clip_border and max_shape is not None: + # clip bboxes with dynamic `min` and `max` for onnx + if torch.onnx.is_in_onnx_export(): + from mmdet.core.export import dynamic_clip_for_onnx + xmin, ymin, xmax, ymax = dynamic_clip_for_onnx( + xmin, ymin, xmax, ymax, max_shape) + bboxes = torch.cat([xmin, ymin, xmax, ymax], dim=-1) + return bboxes + if not isinstance(max_shape, torch.Tensor): + max_shape = priors.new_tensor(max_shape) + max_shape = max_shape[..., :2].type_as(priors) + if max_shape.ndim == 2: + assert bboxes.ndim == 3 + assert max_shape.size(0) == bboxes.size(0) + + min_xy = priors.new_tensor(0) + max_xy = torch.cat([max_shape, max_shape], + dim=-1).flip(-1).unsqueeze(-2) + bboxes = torch.where(bboxes < min_xy, min_xy, bboxes) + bboxes = torch.where(bboxes > max_xy, max_xy, bboxes) + + return bboxes diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/yolo_bbox_coder.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/yolo_bbox_coder.py new file mode 100644 index 0000000000000000000000000000000000000000..d6d0e82ac780820952938d8751ac9776ea31588a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/coder/yolo_bbox_coder.py @@ -0,0 +1,89 @@ +import mmcv +import torch + +from ..builder import BBOX_CODERS +from .base_bbox_coder import BaseBBoxCoder + + +@BBOX_CODERS.register_module() +class YOLOBBoxCoder(BaseBBoxCoder): + """YOLO BBox coder. + + Following `YOLO `_, this coder divide + image into grids, and encode bbox (x1, y1, x2, y2) into (cx, cy, dw, dh). + cx, cy in [0., 1.], denotes relative center position w.r.t the center of + bboxes. dw, dh are the same as :obj:`DeltaXYWHBBoxCoder`. + + Args: + eps (float): Min value of cx, cy when encoding. + """ + + def __init__(self, eps=1e-6): + super(BaseBBoxCoder, self).__init__() + self.eps = eps + + @mmcv.jit(coderize=True) + def encode(self, bboxes, gt_bboxes, stride): + """Get box regression transformation deltas that can be used to + transform the ``bboxes`` into the ``gt_bboxes``. + + Args: + bboxes (torch.Tensor): Source boxes, e.g., anchors. + gt_bboxes (torch.Tensor): Target of the transformation, e.g., + ground-truth boxes. + stride (torch.Tensor | int): Stride of bboxes. + + Returns: + torch.Tensor: Box transformation deltas + """ + + assert bboxes.size(0) == gt_bboxes.size(0) + assert bboxes.size(-1) == gt_bboxes.size(-1) == 4 + x_center_gt = (gt_bboxes[..., 0] + gt_bboxes[..., 2]) * 0.5 + y_center_gt = (gt_bboxes[..., 1] + gt_bboxes[..., 3]) * 0.5 + w_gt = gt_bboxes[..., 2] - gt_bboxes[..., 0] + h_gt = gt_bboxes[..., 3] - gt_bboxes[..., 1] + x_center = (bboxes[..., 0] + bboxes[..., 2]) * 0.5 + y_center = (bboxes[..., 1] + bboxes[..., 3]) * 0.5 + w = bboxes[..., 2] - bboxes[..., 0] + h = bboxes[..., 3] - bboxes[..., 1] + w_target = torch.log((w_gt / w).clamp(min=self.eps)) + h_target = torch.log((h_gt / h).clamp(min=self.eps)) + x_center_target = ((x_center_gt - x_center) / stride + 0.5).clamp( + self.eps, 1 - self.eps) + y_center_target = ((y_center_gt - y_center) / stride + 0.5).clamp( + self.eps, 1 - self.eps) + encoded_bboxes = torch.stack( + [x_center_target, y_center_target, w_target, h_target], dim=-1) + return encoded_bboxes + + @mmcv.jit(coderize=True) + def decode(self, bboxes, pred_bboxes, stride): + """Apply transformation `pred_bboxes` to `boxes`. + + Args: + boxes (torch.Tensor): Basic boxes, e.g. anchors. + pred_bboxes (torch.Tensor): Encoded boxes with shape + stride (torch.Tensor | int): Strides of bboxes. + + Returns: + torch.Tensor: Decoded boxes. + """ + assert pred_bboxes.size(0) == bboxes.size(0) + assert pred_bboxes.size(-1) == bboxes.size(-1) == 4 + x_center = (bboxes[..., 0] + bboxes[..., 2]) * 0.5 + y_center = (bboxes[..., 1] + bboxes[..., 3]) * 0.5 + w = bboxes[..., 2] - bboxes[..., 0] + h = bboxes[..., 3] - bboxes[..., 1] + # Get outputs x, y + x_center_pred = (pred_bboxes[..., 0] - 0.5) * stride + x_center + y_center_pred = (pred_bboxes[..., 1] - 0.5) * stride + y_center + w_pred = torch.exp(pred_bboxes[..., 2]) * w + h_pred = torch.exp(pred_bboxes[..., 3]) * h + + decoded_bboxes = torch.stack( + (x_center_pred - w_pred / 2, y_center_pred - h_pred / 2, + x_center_pred + w_pred / 2, y_center_pred + h_pred / 2), + dim=-1) + + return decoded_bboxes diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/demodata.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/demodata.py new file mode 100644 index 0000000000000000000000000000000000000000..feecb693745a47d9f2bebd8af9a217ff4f5cc92b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/demodata.py @@ -0,0 +1,41 @@ +import numpy as np +import torch + +from mmdet.utils.util_random import ensure_rng + + +def random_boxes(num=1, scale=1, rng=None): + """Simple version of ``kwimage.Boxes.random`` + + Returns: + Tensor: shape (n, 4) in x1, y1, x2, y2 format. + + References: + https://gitlab.kitware.com/computer-vision/kwimage/blob/master/kwimage/structs/boxes.py#L1390 + + Example: + >>> num = 3 + >>> scale = 512 + >>> rng = 0 + >>> boxes = random_boxes(num, scale, rng) + >>> print(boxes) + tensor([[280.9925, 278.9802, 308.6148, 366.1769], + [216.9113, 330.6978, 224.0446, 456.5878], + [405.3632, 196.3221, 493.3953, 270.7942]]) + """ + rng = ensure_rng(rng) + + tlbr = rng.rand(num, 4).astype(np.float32) + + tl_x = np.minimum(tlbr[:, 0], tlbr[:, 2]) + tl_y = np.minimum(tlbr[:, 1], tlbr[:, 3]) + br_x = np.maximum(tlbr[:, 0], tlbr[:, 2]) + br_y = np.maximum(tlbr[:, 1], tlbr[:, 3]) + + tlbr[:, 0] = tl_x * scale + tlbr[:, 1] = tl_y * scale + tlbr[:, 2] = br_x * scale + tlbr[:, 3] = br_y * scale + + boxes = torch.from_numpy(tlbr) + return boxes diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/iou_calculators/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/iou_calculators/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e71369a58a05fa25e6a754300875fdbb87cb26a5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/iou_calculators/__init__.py @@ -0,0 +1,4 @@ +from .builder import build_iou_calculator +from .iou2d_calculator import BboxOverlaps2D, bbox_overlaps + +__all__ = ['build_iou_calculator', 'BboxOverlaps2D', 'bbox_overlaps'] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/iou_calculators/builder.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/iou_calculators/builder.py new file mode 100644 index 0000000000000000000000000000000000000000..09094d7ece46a9f18a28ed0960feac2afa9331bb --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/iou_calculators/builder.py @@ -0,0 +1,8 @@ +from mmcv.utils import Registry, build_from_cfg + +IOU_CALCULATORS = Registry('IoU calculator') + + +def build_iou_calculator(cfg, default_args=None): + """Builder of IoU calculator.""" + return build_from_cfg(cfg, IOU_CALCULATORS, default_args) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/iou_calculators/iou2d_calculator.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/iou_calculators/iou2d_calculator.py new file mode 100644 index 0000000000000000000000000000000000000000..25f2b4679292b8568997443651cef6b89c9d2404 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/iou_calculators/iou2d_calculator.py @@ -0,0 +1,260 @@ +import torch + +from .builder import IOU_CALCULATORS + + +def cast_tensor_type(x, scale=1., dtype=None): + if dtype == 'fp16': + # scale is for preventing overflows + x = (x / scale).half() + return x + + +def fp16_clamp(x, min=None, max=None): + if not x.is_cuda and x.dtype == torch.float16: + # clamp for cpu float16, tensor fp16 has no clamp implementation + return x.float().clamp(min, max).half() + + return x.clamp(min, max) + + +@IOU_CALCULATORS.register_module() +class BboxOverlaps2D: + """2D Overlaps (e.g. IoUs, GIoUs) Calculator.""" + + def __init__(self, scale=1., dtype=None): + self.scale = scale + self.dtype = dtype + + def __call__(self, bboxes1, bboxes2, mode='iou', is_aligned=False): + """Calculate IoU between 2D bboxes. + + Args: + bboxes1 (Tensor): bboxes have shape (m, 4) in + format, or shape (m, 5) in format. + bboxes2 (Tensor): bboxes have shape (m, 4) in + format, shape (m, 5) in format, or be + empty. If ``is_aligned `` is ``True``, then m and n must be + equal. + mode (str): "iou" (intersection over union), "iof" (intersection + over foreground), or "giou" (generalized intersection over + union). + is_aligned (bool, optional): If True, then m and n must be equal. + Default False. + + Returns: + Tensor: shape (m, n) if ``is_aligned `` is False else shape (m,) + """ + assert bboxes1.size(-1) in [0, 4, 5] + assert bboxes2.size(-1) in [0, 4, 5] + if bboxes2.size(-1) == 5: + bboxes2 = bboxes2[..., :4] + if bboxes1.size(-1) == 5: + bboxes1 = bboxes1[..., :4] + + if self.dtype == 'fp16': + # change tensor type to save cpu and cuda memory and keep speed + bboxes1 = cast_tensor_type(bboxes1, self.scale, self.dtype) + bboxes2 = cast_tensor_type(bboxes2, self.scale, self.dtype) + overlaps = bbox_overlaps(bboxes1, bboxes2, mode, is_aligned) + if not overlaps.is_cuda and overlaps.dtype == torch.float16: + # resume cpu float32 + overlaps = overlaps.float() + return overlaps + + return bbox_overlaps(bboxes1, bboxes2, mode, is_aligned) + + def __repr__(self): + """str: a string describing the module""" + repr_str = self.__class__.__name__ + f'(' \ + f'scale={self.scale}, dtype={self.dtype})' + return repr_str + + +def bbox_overlaps(bboxes1, bboxes2, mode='iou', is_aligned=False, eps=1e-6): + """Calculate overlap between two set of bboxes. + + FP16 Contributed by https://github.com/open-mmlab/mmdetection/pull/4889 + Note: + Assume bboxes1 is M x 4, bboxes2 is N x 4, when mode is 'iou', + there are some new generated variable when calculating IOU + using bbox_overlaps function: + + 1) is_aligned is False + area1: M x 1 + area2: N x 1 + lt: M x N x 2 + rb: M x N x 2 + wh: M x N x 2 + overlap: M x N x 1 + union: M x N x 1 + ious: M x N x 1 + + Total memory: + S = (9 x N x M + N + M) * 4 Byte, + + When using FP16, we can reduce: + R = (9 x N x M + N + M) * 4 / 2 Byte + R large than (N + M) * 4 * 2 is always true when N and M >= 1. + Obviously, N + M <= N * M < 3 * N * M, when N >=2 and M >=2, + N + 1 < 3 * N, when N or M is 1. + + Given M = 40 (ground truth), N = 400000 (three anchor boxes + in per grid, FPN, R-CNNs), + R = 275 MB (one times) + + A special case (dense detection), M = 512 (ground truth), + R = 3516 MB = 3.43 GB + + When the batch size is B, reduce: + B x R + + Therefore, CUDA memory runs out frequently. + + Experiments on GeForce RTX 2080Ti (11019 MiB): + + | dtype | M | N | Use | Real | Ideal | + |:----:|:----:|:----:|:----:|:----:|:----:| + | FP32 | 512 | 400000 | 8020 MiB | -- | -- | + | FP16 | 512 | 400000 | 4504 MiB | 3516 MiB | 3516 MiB | + | FP32 | 40 | 400000 | 1540 MiB | -- | -- | + | FP16 | 40 | 400000 | 1264 MiB | 276MiB | 275 MiB | + + 2) is_aligned is True + area1: N x 1 + area2: N x 1 + lt: N x 2 + rb: N x 2 + wh: N x 2 + overlap: N x 1 + union: N x 1 + ious: N x 1 + + Total memory: + S = 11 x N * 4 Byte + + When using FP16, we can reduce: + R = 11 x N * 4 / 2 Byte + + So do the 'giou' (large than 'iou'). + + Time-wise, FP16 is generally faster than FP32. + + When gpu_assign_thr is not -1, it takes more time on cpu + but not reduce memory. + There, we can reduce half the memory and keep the speed. + + If ``is_aligned `` is ``False``, then calculate the overlaps between each + bbox of bboxes1 and bboxes2, otherwise the overlaps between each aligned + pair of bboxes1 and bboxes2. + + Args: + bboxes1 (Tensor): shape (B, m, 4) in format or empty. + bboxes2 (Tensor): shape (B, n, 4) in format or empty. + B indicates the batch dim, in shape (B1, B2, ..., Bn). + If ``is_aligned `` is ``True``, then m and n must be equal. + mode (str): "iou" (intersection over union), "iof" (intersection over + foreground) or "giou" (generalized intersection over union). + Default "iou". + is_aligned (bool, optional): If True, then m and n must be equal. + Default False. + eps (float, optional): A value added to the denominator for numerical + stability. Default 1e-6. + + Returns: + Tensor: shape (m, n) if ``is_aligned `` is False else shape (m,) + + Example: + >>> bboxes1 = torch.FloatTensor([ + >>> [0, 0, 10, 10], + >>> [10, 10, 20, 20], + >>> [32, 32, 38, 42], + >>> ]) + >>> bboxes2 = torch.FloatTensor([ + >>> [0, 0, 10, 20], + >>> [0, 10, 10, 19], + >>> [10, 10, 20, 20], + >>> ]) + >>> overlaps = bbox_overlaps(bboxes1, bboxes2) + >>> assert overlaps.shape == (3, 3) + >>> overlaps = bbox_overlaps(bboxes1, bboxes2, is_aligned=True) + >>> assert overlaps.shape == (3, ) + + Example: + >>> empty = torch.empty(0, 4) + >>> nonempty = torch.FloatTensor([[0, 0, 10, 9]]) + >>> assert tuple(bbox_overlaps(empty, nonempty).shape) == (0, 1) + >>> assert tuple(bbox_overlaps(nonempty, empty).shape) == (1, 0) + >>> assert tuple(bbox_overlaps(empty, empty).shape) == (0, 0) + """ + + assert mode in ['iou', 'iof', 'giou'], f'Unsupported mode {mode}' + # Either the boxes are empty or the length of boxes' last dimension is 4 + assert (bboxes1.size(-1) == 4 or bboxes1.size(0) == 0) + assert (bboxes2.size(-1) == 4 or bboxes2.size(0) == 0) + + # Batch dim must be the same + # Batch dim: (B1, B2, ... Bn) + assert bboxes1.shape[:-2] == bboxes2.shape[:-2] + batch_shape = bboxes1.shape[:-2] + + rows = bboxes1.size(-2) + cols = bboxes2.size(-2) + if is_aligned: + assert rows == cols + + if rows * cols == 0: + if is_aligned: + return bboxes1.new(batch_shape + (rows, )) + else: + return bboxes1.new(batch_shape + (rows, cols)) + + area1 = (bboxes1[..., 2] - bboxes1[..., 0]) * ( + bboxes1[..., 3] - bboxes1[..., 1]) + area2 = (bboxes2[..., 2] - bboxes2[..., 0]) * ( + bboxes2[..., 3] - bboxes2[..., 1]) + + if is_aligned: + lt = torch.max(bboxes1[..., :2], bboxes2[..., :2]) # [B, rows, 2] + rb = torch.min(bboxes1[..., 2:], bboxes2[..., 2:]) # [B, rows, 2] + + wh = fp16_clamp(rb - lt, min=0) + overlap = wh[..., 0] * wh[..., 1] + + if mode in ['iou', 'giou']: + union = area1 + area2 - overlap + else: + union = area1 + if mode == 'giou': + enclosed_lt = torch.min(bboxes1[..., :2], bboxes2[..., :2]) + enclosed_rb = torch.max(bboxes1[..., 2:], bboxes2[..., 2:]) + else: + lt = torch.max(bboxes1[..., :, None, :2], + bboxes2[..., None, :, :2]) # [B, rows, cols, 2] + rb = torch.min(bboxes1[..., :, None, 2:], + bboxes2[..., None, :, 2:]) # [B, rows, cols, 2] + + wh = fp16_clamp(rb - lt, min=0) + overlap = wh[..., 0] * wh[..., 1] + + if mode in ['iou', 'giou']: + union = area1[..., None] + area2[..., None, :] - overlap + else: + union = area1[..., None] + if mode == 'giou': + enclosed_lt = torch.min(bboxes1[..., :, None, :2], + bboxes2[..., None, :, :2]) + enclosed_rb = torch.max(bboxes1[..., :, None, 2:], + bboxes2[..., None, :, 2:]) + + eps = union.new_tensor([eps]) + union = torch.max(union, eps) + ious = overlap / union + if mode in ['iou', 'iof']: + return ious + # calculate gious + enclose_wh = fp16_clamp(enclosed_rb - enclosed_lt, min=0) + enclose_area = enclose_wh[..., 0] * enclose_wh[..., 1] + enclose_area = torch.max(enclose_area, eps) + gious = ious - (enclose_area - union) / enclose_area + return gious diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/match_costs/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/match_costs/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..add5e0d394034d89b2d47c314ff1938294deb6ea --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/match_costs/__init__.py @@ -0,0 +1,7 @@ +from .builder import build_match_cost +from .match_cost import BBoxL1Cost, ClassificationCost, FocalLossCost, IoUCost + +__all__ = [ + 'build_match_cost', 'ClassificationCost', 'BBoxL1Cost', 'IoUCost', + 'FocalLossCost' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/match_costs/builder.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/match_costs/builder.py new file mode 100644 index 0000000000000000000000000000000000000000..6894017d42eb16ee4a8ae3ed660a71cda3ad9940 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/match_costs/builder.py @@ -0,0 +1,8 @@ +from mmcv.utils import Registry, build_from_cfg + +MATCH_COST = Registry('Match Cost') + + +def build_match_cost(cfg, default_args=None): + """Builder of IoU calculator.""" + return build_from_cfg(cfg, MATCH_COST, default_args) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/match_costs/match_cost.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/match_costs/match_cost.py new file mode 100644 index 0000000000000000000000000000000000000000..ae852a5dafba2946462904d9b2b110f4cfe7ee8b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/match_costs/match_cost.py @@ -0,0 +1,184 @@ +import torch + +from mmdet.core.bbox.iou_calculators import bbox_overlaps +from mmdet.core.bbox.transforms import bbox_cxcywh_to_xyxy, bbox_xyxy_to_cxcywh +from .builder import MATCH_COST + + +@MATCH_COST.register_module() +class BBoxL1Cost: + """BBoxL1Cost. + + Args: + weight (int | float, optional): loss_weight + box_format (str, optional): 'xyxy' for DETR, 'xywh' for Sparse_RCNN + + Examples: + >>> from mmdet.core.bbox.match_costs.match_cost import BBoxL1Cost + >>> import torch + >>> self = BBoxL1Cost() + >>> bbox_pred = torch.rand(1, 4) + >>> gt_bboxes= torch.FloatTensor([[0, 0, 2, 4], [1, 2, 3, 4]]) + >>> factor = torch.tensor([10, 8, 10, 8]) + >>> self(bbox_pred, gt_bboxes, factor) + tensor([[1.6172, 1.6422]]) + """ + + def __init__(self, weight=1., box_format='xyxy'): + self.weight = weight + assert box_format in ['xyxy', 'xywh'] + self.box_format = box_format + + def __call__(self, bbox_pred, gt_bboxes): + """ + Args: + bbox_pred (Tensor): Predicted boxes with normalized coordinates + (cx, cy, w, h), which are all in range [0, 1]. Shape + [num_query, 4]. + gt_bboxes (Tensor): Ground truth boxes with normalized + coordinates (x1, y1, x2, y2). Shape [num_gt, 4]. + + Returns: + torch.Tensor: bbox_cost value with weight + """ + if self.box_format == 'xywh': + gt_bboxes = bbox_xyxy_to_cxcywh(gt_bboxes) + elif self.box_format == 'xyxy': + bbox_pred = bbox_cxcywh_to_xyxy(bbox_pred) + bbox_cost = torch.cdist(bbox_pred, gt_bboxes, p=1) + return bbox_cost * self.weight + + +@MATCH_COST.register_module() +class FocalLossCost: + """FocalLossCost. + + Args: + weight (int | float, optional): loss_weight + alpha (int | float, optional): focal_loss alpha + gamma (int | float, optional): focal_loss gamma + eps (float, optional): default 1e-12 + + Examples: + >>> from mmdet.core.bbox.match_costs.match_cost import FocalLossCost + >>> import torch + >>> self = FocalLossCost() + >>> cls_pred = torch.rand(4, 3) + >>> gt_labels = torch.tensor([0, 1, 2]) + >>> factor = torch.tensor([10, 8, 10, 8]) + >>> self(cls_pred, gt_labels) + tensor([[-0.3236, -0.3364, -0.2699], + [-0.3439, -0.3209, -0.4807], + [-0.4099, -0.3795, -0.2929], + [-0.1950, -0.1207, -0.2626]]) + """ + + def __init__(self, weight=1., alpha=0.25, gamma=2, eps=1e-12): + self.weight = weight + self.alpha = alpha + self.gamma = gamma + self.eps = eps + + def __call__(self, cls_pred, gt_labels): + """ + Args: + cls_pred (Tensor): Predicted classification logits, shape + [num_query, num_class]. + gt_labels (Tensor): Label of `gt_bboxes`, shape (num_gt,). + + Returns: + torch.Tensor: cls_cost value with weight + """ + cls_pred = cls_pred.sigmoid() + neg_cost = -(1 - cls_pred + self.eps).log() * ( + 1 - self.alpha) * cls_pred.pow(self.gamma) + pos_cost = -(cls_pred + self.eps).log() * self.alpha * ( + 1 - cls_pred).pow(self.gamma) + cls_cost = pos_cost[:, gt_labels] - neg_cost[:, gt_labels] + return cls_cost * self.weight + + +@MATCH_COST.register_module() +class ClassificationCost: + """ClsSoftmaxCost. + + Args: + weight (int | float, optional): loss_weight + + Examples: + >>> from mmdet.core.bbox.match_costs.match_cost import \ + ... ClassificationCost + >>> import torch + >>> self = ClassificationCost() + >>> cls_pred = torch.rand(4, 3) + >>> gt_labels = torch.tensor([0, 1, 2]) + >>> factor = torch.tensor([10, 8, 10, 8]) + >>> self(cls_pred, gt_labels) + tensor([[-0.3430, -0.3525, -0.3045], + [-0.3077, -0.2931, -0.3992], + [-0.3664, -0.3455, -0.2881], + [-0.3343, -0.2701, -0.3956]]) + """ + + def __init__(self, weight=1.): + self.weight = weight + + def __call__(self, cls_pred, gt_labels): + """ + Args: + cls_pred (Tensor): Predicted classification logits, shape + [num_query, num_class]. + gt_labels (Tensor): Label of `gt_bboxes`, shape (num_gt,). + + Returns: + torch.Tensor: cls_cost value with weight + """ + # Following the official DETR repo, contrary to the loss that + # NLL is used, we approximate it in 1 - cls_score[gt_label]. + # The 1 is a constant that doesn't change the matching, + # so it can be omitted. + cls_score = cls_pred.softmax(-1) + cls_cost = -cls_score[:, gt_labels] + return cls_cost * self.weight + + +@MATCH_COST.register_module() +class IoUCost: + """IoUCost. + + Args: + iou_mode (str, optional): iou mode such as 'iou' | 'giou' + weight (int | float, optional): loss weight + + Examples: + >>> from mmdet.core.bbox.match_costs.match_cost import IoUCost + >>> import torch + >>> self = IoUCost() + >>> bboxes = torch.FloatTensor([[1,1, 2, 2], [2, 2, 3, 4]]) + >>> gt_bboxes = torch.FloatTensor([[0, 0, 2, 4], [1, 2, 3, 4]]) + >>> self(bboxes, gt_bboxes) + tensor([[-0.1250, 0.1667], + [ 0.1667, -0.5000]]) + """ + + def __init__(self, iou_mode='giou', weight=1.): + self.weight = weight + self.iou_mode = iou_mode + + def __call__(self, bboxes, gt_bboxes): + """ + Args: + bboxes (Tensor): Predicted boxes with unnormalized coordinates + (x1, y1, x2, y2). Shape [num_query, 4]. + gt_bboxes (Tensor): Ground truth boxes with unnormalized + coordinates (x1, y1, x2, y2). Shape [num_gt, 4]. + + Returns: + torch.Tensor: iou_cost value with weight + """ + # overlaps: [num_bboxes, num_gt] + overlaps = bbox_overlaps( + bboxes, gt_bboxes, mode=self.iou_mode, is_aligned=False) + # The 1 is a constant that doesn't change the matching, so omitted. + iou_cost = -overlaps + return iou_cost * self.weight diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0b06303fe1000e11c5486c40c70606a34a5208e3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/__init__.py @@ -0,0 +1,15 @@ +from .base_sampler import BaseSampler +from .combined_sampler import CombinedSampler +from .instance_balanced_pos_sampler import InstanceBalancedPosSampler +from .iou_balanced_neg_sampler import IoUBalancedNegSampler +from .ohem_sampler import OHEMSampler +from .pseudo_sampler import PseudoSampler +from .random_sampler import RandomSampler +from .sampling_result import SamplingResult +from .score_hlr_sampler import ScoreHLRSampler + +__all__ = [ + 'BaseSampler', 'PseudoSampler', 'RandomSampler', + 'InstanceBalancedPosSampler', 'IoUBalancedNegSampler', 'CombinedSampler', + 'OHEMSampler', 'SamplingResult', 'ScoreHLRSampler' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/base_sampler.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/base_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..9ea35def115b49dfdad8a1f7c040ef3cd983b0d1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/base_sampler.py @@ -0,0 +1,101 @@ +from abc import ABCMeta, abstractmethod + +import torch + +from .sampling_result import SamplingResult + + +class BaseSampler(metaclass=ABCMeta): + """Base class of samplers.""" + + def __init__(self, + num, + pos_fraction, + neg_pos_ub=-1, + add_gt_as_proposals=True, + **kwargs): + self.num = num + self.pos_fraction = pos_fraction + self.neg_pos_ub = neg_pos_ub + self.add_gt_as_proposals = add_gt_as_proposals + self.pos_sampler = self + self.neg_sampler = self + + @abstractmethod + def _sample_pos(self, assign_result, num_expected, **kwargs): + """Sample positive samples.""" + pass + + @abstractmethod + def _sample_neg(self, assign_result, num_expected, **kwargs): + """Sample negative samples.""" + pass + + def sample(self, + assign_result, + bboxes, + gt_bboxes, + gt_labels=None, + **kwargs): + """Sample positive and negative bboxes. + + This is a simple implementation of bbox sampling given candidates, + assigning results and ground truth bboxes. + + Args: + assign_result (:obj:`AssignResult`): Bbox assigning results. + bboxes (Tensor): Boxes to be sampled from. + gt_bboxes (Tensor): Ground truth bboxes. + gt_labels (Tensor, optional): Class labels of ground truth bboxes. + + Returns: + :obj:`SamplingResult`: Sampling result. + + Example: + >>> from mmdet.core.bbox import RandomSampler + >>> from mmdet.core.bbox import AssignResult + >>> from mmdet.core.bbox.demodata import ensure_rng, random_boxes + >>> rng = ensure_rng(None) + >>> assign_result = AssignResult.random(rng=rng) + >>> bboxes = random_boxes(assign_result.num_preds, rng=rng) + >>> gt_bboxes = random_boxes(assign_result.num_gts, rng=rng) + >>> gt_labels = None + >>> self = RandomSampler(num=32, pos_fraction=0.5, neg_pos_ub=-1, + >>> add_gt_as_proposals=False) + >>> self = self.sample(assign_result, bboxes, gt_bboxes, gt_labels) + """ + if len(bboxes.shape) < 2: + bboxes = bboxes[None, :] + + bboxes = bboxes[:, :4] + + gt_flags = bboxes.new_zeros((bboxes.shape[0], ), dtype=torch.uint8) + if self.add_gt_as_proposals and len(gt_bboxes) > 0: + if gt_labels is None: + raise ValueError( + 'gt_labels must be given when add_gt_as_proposals is True') + bboxes = torch.cat([gt_bboxes, bboxes], dim=0) + assign_result.add_gt_(gt_labels) + gt_ones = bboxes.new_ones(gt_bboxes.shape[0], dtype=torch.uint8) + gt_flags = torch.cat([gt_ones, gt_flags]) + + num_expected_pos = int(self.num * self.pos_fraction) + pos_inds = self.pos_sampler._sample_pos( + assign_result, num_expected_pos, bboxes=bboxes, **kwargs) + # We found that sampled indices have duplicated items occasionally. + # (may be a bug of PyTorch) + pos_inds = pos_inds.unique() + num_sampled_pos = pos_inds.numel() + num_expected_neg = self.num - num_sampled_pos + if self.neg_pos_ub >= 0: + _pos = max(1, num_sampled_pos) + neg_upper_bound = int(self.neg_pos_ub * _pos) + if num_expected_neg > neg_upper_bound: + num_expected_neg = neg_upper_bound + neg_inds = self.neg_sampler._sample_neg( + assign_result, num_expected_neg, bboxes=bboxes, **kwargs) + neg_inds = neg_inds.unique() + + sampling_result = SamplingResult(pos_inds, neg_inds, bboxes, gt_bboxes, + assign_result, gt_flags) + return sampling_result diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/combined_sampler.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/combined_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..564729f0895b1863d94c479a67202438af45f996 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/combined_sampler.py @@ -0,0 +1,20 @@ +from ..builder import BBOX_SAMPLERS, build_sampler +from .base_sampler import BaseSampler + + +@BBOX_SAMPLERS.register_module() +class CombinedSampler(BaseSampler): + """A sampler that combines positive sampler and negative sampler.""" + + def __init__(self, pos_sampler, neg_sampler, **kwargs): + super(CombinedSampler, self).__init__(**kwargs) + self.pos_sampler = build_sampler(pos_sampler, **kwargs) + self.neg_sampler = build_sampler(neg_sampler, **kwargs) + + def _sample_pos(self, **kwargs): + """Sample positive samples.""" + raise NotImplementedError + + def _sample_neg(self, **kwargs): + """Sample negative samples.""" + raise NotImplementedError diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/instance_balanced_pos_sampler.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/instance_balanced_pos_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..c735298487e14e4a0ec42913f25673cccb98a8a0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/instance_balanced_pos_sampler.py @@ -0,0 +1,55 @@ +import numpy as np +import torch + +from ..builder import BBOX_SAMPLERS +from .random_sampler import RandomSampler + + +@BBOX_SAMPLERS.register_module() +class InstanceBalancedPosSampler(RandomSampler): + """Instance balanced sampler that samples equal number of positive samples + for each instance.""" + + def _sample_pos(self, assign_result, num_expected, **kwargs): + """Sample positive boxes. + + Args: + assign_result (:obj:`AssignResult`): The assigned results of boxes. + num_expected (int): The number of expected positive samples + + Returns: + Tensor or ndarray: sampled indices. + """ + pos_inds = torch.nonzero(assign_result.gt_inds > 0, as_tuple=False) + if pos_inds.numel() != 0: + pos_inds = pos_inds.squeeze(1) + if pos_inds.numel() <= num_expected: + return pos_inds + else: + unique_gt_inds = assign_result.gt_inds[pos_inds].unique() + num_gts = len(unique_gt_inds) + num_per_gt = int(round(num_expected / float(num_gts)) + 1) + sampled_inds = [] + for i in unique_gt_inds: + inds = torch.nonzero( + assign_result.gt_inds == i.item(), as_tuple=False) + if inds.numel() != 0: + inds = inds.squeeze(1) + else: + continue + if len(inds) > num_per_gt: + inds = self.random_choice(inds, num_per_gt) + sampled_inds.append(inds) + sampled_inds = torch.cat(sampled_inds) + if len(sampled_inds) < num_expected: + num_extra = num_expected - len(sampled_inds) + extra_inds = np.array( + list(set(pos_inds.cpu()) - set(sampled_inds.cpu()))) + if len(extra_inds) > num_extra: + extra_inds = self.random_choice(extra_inds, num_extra) + extra_inds = torch.from_numpy(extra_inds).to( + assign_result.gt_inds.device).long() + sampled_inds = torch.cat([sampled_inds, extra_inds]) + elif len(sampled_inds) > num_expected: + sampled_inds = self.random_choice(sampled_inds, num_expected) + return sampled_inds diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/iou_balanced_neg_sampler.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/iou_balanced_neg_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..f275e430d1b57c4d9df57387b8f3ae6f0ff68cf1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/iou_balanced_neg_sampler.py @@ -0,0 +1,157 @@ +import numpy as np +import torch + +from ..builder import BBOX_SAMPLERS +from .random_sampler import RandomSampler + + +@BBOX_SAMPLERS.register_module() +class IoUBalancedNegSampler(RandomSampler): + """IoU Balanced Sampling. + + arXiv: https://arxiv.org/pdf/1904.02701.pdf (CVPR 2019) + + Sampling proposals according to their IoU. `floor_fraction` of needed RoIs + are sampled from proposals whose IoU are lower than `floor_thr` randomly. + The others are sampled from proposals whose IoU are higher than + `floor_thr`. These proposals are sampled from some bins evenly, which are + split by `num_bins` via IoU evenly. + + Args: + num (int): number of proposals. + pos_fraction (float): fraction of positive proposals. + floor_thr (float): threshold (minimum) IoU for IoU balanced sampling, + set to -1 if all using IoU balanced sampling. + floor_fraction (float): sampling fraction of proposals under floor_thr. + num_bins (int): number of bins in IoU balanced sampling. + """ + + def __init__(self, + num, + pos_fraction, + floor_thr=-1, + floor_fraction=0, + num_bins=3, + **kwargs): + super(IoUBalancedNegSampler, self).__init__(num, pos_fraction, + **kwargs) + assert floor_thr >= 0 or floor_thr == -1 + assert 0 <= floor_fraction <= 1 + assert num_bins >= 1 + + self.floor_thr = floor_thr + self.floor_fraction = floor_fraction + self.num_bins = num_bins + + def sample_via_interval(self, max_overlaps, full_set, num_expected): + """Sample according to the iou interval. + + Args: + max_overlaps (torch.Tensor): IoU between bounding boxes and ground + truth boxes. + full_set (set(int)): A full set of indices of boxes。 + num_expected (int): Number of expected samples。 + + Returns: + np.ndarray: Indices of samples + """ + max_iou = max_overlaps.max() + iou_interval = (max_iou - self.floor_thr) / self.num_bins + per_num_expected = int(num_expected / self.num_bins) + + sampled_inds = [] + for i in range(self.num_bins): + start_iou = self.floor_thr + i * iou_interval + end_iou = self.floor_thr + (i + 1) * iou_interval + tmp_set = set( + np.where( + np.logical_and(max_overlaps >= start_iou, + max_overlaps < end_iou))[0]) + tmp_inds = list(tmp_set & full_set) + if len(tmp_inds) > per_num_expected: + tmp_sampled_set = self.random_choice(tmp_inds, + per_num_expected) + else: + tmp_sampled_set = np.array(tmp_inds, dtype=np.int) + sampled_inds.append(tmp_sampled_set) + + sampled_inds = np.concatenate(sampled_inds) + if len(sampled_inds) < num_expected: + num_extra = num_expected - len(sampled_inds) + extra_inds = np.array(list(full_set - set(sampled_inds))) + if len(extra_inds) > num_extra: + extra_inds = self.random_choice(extra_inds, num_extra) + sampled_inds = np.concatenate([sampled_inds, extra_inds]) + + return sampled_inds + + def _sample_neg(self, assign_result, num_expected, **kwargs): + """Sample negative boxes. + + Args: + assign_result (:obj:`AssignResult`): The assigned results of boxes. + num_expected (int): The number of expected negative samples + + Returns: + Tensor or ndarray: sampled indices. + """ + neg_inds = torch.nonzero(assign_result.gt_inds == 0, as_tuple=False) + if neg_inds.numel() != 0: + neg_inds = neg_inds.squeeze(1) + if len(neg_inds) <= num_expected: + return neg_inds + else: + max_overlaps = assign_result.max_overlaps.cpu().numpy() + # balance sampling for negative samples + neg_set = set(neg_inds.cpu().numpy()) + + if self.floor_thr > 0: + floor_set = set( + np.where( + np.logical_and(max_overlaps >= 0, + max_overlaps < self.floor_thr))[0]) + iou_sampling_set = set( + np.where(max_overlaps >= self.floor_thr)[0]) + elif self.floor_thr == 0: + floor_set = set(np.where(max_overlaps == 0)[0]) + iou_sampling_set = set( + np.where(max_overlaps > self.floor_thr)[0]) + else: + floor_set = set() + iou_sampling_set = set( + np.where(max_overlaps > self.floor_thr)[0]) + # for sampling interval calculation + self.floor_thr = 0 + + floor_neg_inds = list(floor_set & neg_set) + iou_sampling_neg_inds = list(iou_sampling_set & neg_set) + num_expected_iou_sampling = int(num_expected * + (1 - self.floor_fraction)) + if len(iou_sampling_neg_inds) > num_expected_iou_sampling: + if self.num_bins >= 2: + iou_sampled_inds = self.sample_via_interval( + max_overlaps, set(iou_sampling_neg_inds), + num_expected_iou_sampling) + else: + iou_sampled_inds = self.random_choice( + iou_sampling_neg_inds, num_expected_iou_sampling) + else: + iou_sampled_inds = np.array( + iou_sampling_neg_inds, dtype=np.int) + num_expected_floor = num_expected - len(iou_sampled_inds) + if len(floor_neg_inds) > num_expected_floor: + sampled_floor_inds = self.random_choice( + floor_neg_inds, num_expected_floor) + else: + sampled_floor_inds = np.array(floor_neg_inds, dtype=np.int) + sampled_inds = np.concatenate( + (sampled_floor_inds, iou_sampled_inds)) + if len(sampled_inds) < num_expected: + num_extra = num_expected - len(sampled_inds) + extra_inds = np.array(list(neg_set - set(sampled_inds))) + if len(extra_inds) > num_extra: + extra_inds = self.random_choice(extra_inds, num_extra) + sampled_inds = np.concatenate((sampled_inds, extra_inds)) + sampled_inds = torch.from_numpy(sampled_inds).long().to( + assign_result.gt_inds.device) + return sampled_inds diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/ohem_sampler.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/ohem_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..8b99f60ef0176f1b7a56665fb0f59272f65b84cd --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/ohem_sampler.py @@ -0,0 +1,107 @@ +import torch + +from ..builder import BBOX_SAMPLERS +from ..transforms import bbox2roi +from .base_sampler import BaseSampler + + +@BBOX_SAMPLERS.register_module() +class OHEMSampler(BaseSampler): + r"""Online Hard Example Mining Sampler described in `Training Region-based + Object Detectors with Online Hard Example Mining + `_. + """ + + def __init__(self, + num, + pos_fraction, + context, + neg_pos_ub=-1, + add_gt_as_proposals=True, + **kwargs): + super(OHEMSampler, self).__init__(num, pos_fraction, neg_pos_ub, + add_gt_as_proposals) + self.context = context + if not hasattr(self.context, 'num_stages'): + self.bbox_head = self.context.bbox_head + else: + self.bbox_head = self.context.bbox_head[self.context.current_stage] + + def hard_mining(self, inds, num_expected, bboxes, labels, feats): + with torch.no_grad(): + rois = bbox2roi([bboxes]) + if not hasattr(self.context, 'num_stages'): + bbox_results = self.context._bbox_forward(feats, rois) + else: + bbox_results = self.context._bbox_forward( + self.context.current_stage, feats, rois) + cls_score = bbox_results['cls_score'] + loss = self.bbox_head.loss( + cls_score=cls_score, + bbox_pred=None, + rois=rois, + labels=labels, + label_weights=cls_score.new_ones(cls_score.size(0)), + bbox_targets=None, + bbox_weights=None, + reduction_override='none')['loss_cls'] + _, topk_loss_inds = loss.topk(num_expected) + return inds[topk_loss_inds] + + def _sample_pos(self, + assign_result, + num_expected, + bboxes=None, + feats=None, + **kwargs): + """Sample positive boxes. + + Args: + assign_result (:obj:`AssignResult`): Assigned results + num_expected (int): Number of expected positive samples + bboxes (torch.Tensor, optional): Boxes. Defaults to None. + feats (list[torch.Tensor], optional): Multi-level features. + Defaults to None. + + Returns: + torch.Tensor: Indices of positive samples + """ + # Sample some hard positive samples + pos_inds = torch.nonzero(assign_result.gt_inds > 0, as_tuple=False) + if pos_inds.numel() != 0: + pos_inds = pos_inds.squeeze(1) + if pos_inds.numel() <= num_expected: + return pos_inds + else: + return self.hard_mining(pos_inds, num_expected, bboxes[pos_inds], + assign_result.labels[pos_inds], feats) + + def _sample_neg(self, + assign_result, + num_expected, + bboxes=None, + feats=None, + **kwargs): + """Sample negative boxes. + + Args: + assign_result (:obj:`AssignResult`): Assigned results + num_expected (int): Number of expected negative samples + bboxes (torch.Tensor, optional): Boxes. Defaults to None. + feats (list[torch.Tensor], optional): Multi-level features. + Defaults to None. + + Returns: + torch.Tensor: Indices of negative samples + """ + # Sample some hard negative samples + neg_inds = torch.nonzero(assign_result.gt_inds == 0, as_tuple=False) + if neg_inds.numel() != 0: + neg_inds = neg_inds.squeeze(1) + if len(neg_inds) <= num_expected: + return neg_inds + else: + neg_labels = assign_result.labels.new_empty( + neg_inds.size(0)).fill_(self.bbox_head.num_classes) + return self.hard_mining(neg_inds, num_expected, bboxes[neg_inds], + neg_labels, feats) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/pseudo_sampler.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/pseudo_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..2bd81abcdc62debc14772659d7a171f20bf33364 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/pseudo_sampler.py @@ -0,0 +1,41 @@ +import torch + +from ..builder import BBOX_SAMPLERS +from .base_sampler import BaseSampler +from .sampling_result import SamplingResult + + +@BBOX_SAMPLERS.register_module() +class PseudoSampler(BaseSampler): + """A pseudo sampler that does not do sampling actually.""" + + def __init__(self, **kwargs): + pass + + def _sample_pos(self, **kwargs): + """Sample positive samples.""" + raise NotImplementedError + + def _sample_neg(self, **kwargs): + """Sample negative samples.""" + raise NotImplementedError + + def sample(self, assign_result, bboxes, gt_bboxes, **kwargs): + """Directly returns the positive and negative indices of samples. + + Args: + assign_result (:obj:`AssignResult`): Assigned results + bboxes (torch.Tensor): Bounding boxes + gt_bboxes (torch.Tensor): Ground truth boxes + + Returns: + :obj:`SamplingResult`: sampler results + """ + pos_inds = torch.nonzero( + assign_result.gt_inds > 0, as_tuple=False).squeeze(-1).unique() + neg_inds = torch.nonzero( + assign_result.gt_inds == 0, as_tuple=False).squeeze(-1).unique() + gt_flags = bboxes.new_zeros(bboxes.shape[0], dtype=torch.uint8) + sampling_result = SamplingResult(pos_inds, neg_inds, bboxes, gt_bboxes, + assign_result, gt_flags) + return sampling_result diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/random_sampler.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/random_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..c23a7a1f04ae6e09a4122c2fb4e1b037c238f387 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/random_sampler.py @@ -0,0 +1,81 @@ +import torch + +from ..builder import BBOX_SAMPLERS +from .base_sampler import BaseSampler + + +@BBOX_SAMPLERS.register_module() +class RandomSampler(BaseSampler): + """Random sampler. + + Args: + num (int): Number of samples + pos_fraction (float): Fraction of positive samples + neg_pos_up (int, optional): Upper bound number of negative and + positive samples. Defaults to -1. + add_gt_as_proposals (bool, optional): Whether to add ground truth + boxes as proposals. Defaults to True. + """ + + def __init__(self, + num, + pos_fraction, + neg_pos_ub=-1, + add_gt_as_proposals=True, + **kwargs): + from mmdet.core.bbox import demodata + super(RandomSampler, self).__init__(num, pos_fraction, neg_pos_ub, + add_gt_as_proposals) + self.rng = demodata.ensure_rng(kwargs.get('rng', None)) + + def random_choice(self, gallery, num): + """Random select some elements from the gallery. + + If `gallery` is a Tensor, the returned indices will be a Tensor; + If `gallery` is a ndarray or list, the returned indices will be a + ndarray. + + Args: + gallery (Tensor | ndarray | list): indices pool. + num (int): expected sample num. + + Returns: + Tensor or ndarray: sampled indices. + """ + assert len(gallery) >= num + + is_tensor = isinstance(gallery, torch.Tensor) + if not is_tensor: + if torch.cuda.is_available(): + device = torch.cuda.current_device() + else: + device = 'cpu' + gallery = torch.tensor(gallery, dtype=torch.long, device=device) + # This is a temporary fix. We can revert the following code + # when PyTorch fixes the abnormal return of torch.randperm. + # See: https://github.com/open-mmlab/mmdetection/pull/5014 + perm = torch.randperm(gallery.numel())[:num].to(device=gallery.device) + rand_inds = gallery[perm] + if not is_tensor: + rand_inds = rand_inds.cpu().numpy() + return rand_inds + + def _sample_pos(self, assign_result, num_expected, **kwargs): + """Randomly sample some positive samples.""" + pos_inds = torch.nonzero(assign_result.gt_inds > 0, as_tuple=False) + if pos_inds.numel() != 0: + pos_inds = pos_inds.squeeze(1) + if pos_inds.numel() <= num_expected: + return pos_inds + else: + return self.random_choice(pos_inds, num_expected) + + def _sample_neg(self, assign_result, num_expected, **kwargs): + """Randomly sample some negative samples.""" + neg_inds = torch.nonzero(assign_result.gt_inds == 0, as_tuple=False) + if neg_inds.numel() != 0: + neg_inds = neg_inds.squeeze(1) + if len(neg_inds) <= num_expected: + return neg_inds + else: + return self.random_choice(neg_inds, num_expected) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/sampling_result.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/sampling_result.py new file mode 100644 index 0000000000000000000000000000000000000000..1ca2d29cafeb4b731ca4f7eb406666afb592a163 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/sampling_result.py @@ -0,0 +1,152 @@ +import torch + +from mmdet.utils import util_mixins + + +class SamplingResult(util_mixins.NiceRepr): + """Bbox sampling result. + + Example: + >>> # xdoctest: +IGNORE_WANT + >>> from mmdet.core.bbox.samplers.sampling_result import * # NOQA + >>> self = SamplingResult.random(rng=10) + >>> print(f'self = {self}') + self = + """ + + def __init__(self, pos_inds, neg_inds, bboxes, gt_bboxes, assign_result, + gt_flags): + self.pos_inds = pos_inds + self.neg_inds = neg_inds + self.pos_bboxes = bboxes[pos_inds] + self.neg_bboxes = bboxes[neg_inds] + self.pos_is_gt = gt_flags[pos_inds] + + self.num_gts = gt_bboxes.shape[0] + self.pos_assigned_gt_inds = assign_result.gt_inds[pos_inds] - 1 + + if gt_bboxes.numel() == 0: + # hack for index error case + assert self.pos_assigned_gt_inds.numel() == 0 + self.pos_gt_bboxes = torch.empty_like(gt_bboxes).view(-1, 4) + else: + if len(gt_bboxes.shape) < 2: + gt_bboxes = gt_bboxes.view(-1, 4) + + self.pos_gt_bboxes = gt_bboxes[self.pos_assigned_gt_inds, :] + + if assign_result.labels is not None: + self.pos_gt_labels = assign_result.labels[pos_inds] + else: + self.pos_gt_labels = None + + @property + def bboxes(self): + """torch.Tensor: concatenated positive and negative boxes""" + return torch.cat([self.pos_bboxes, self.neg_bboxes]) + + def to(self, device): + """Change the device of the data inplace. + + Example: + >>> self = SamplingResult.random() + >>> print(f'self = {self.to(None)}') + >>> # xdoctest: +REQUIRES(--gpu) + >>> print(f'self = {self.to(0)}') + """ + _dict = self.__dict__ + for key, value in _dict.items(): + if isinstance(value, torch.Tensor): + _dict[key] = value.to(device) + return self + + def __nice__(self): + data = self.info.copy() + data['pos_bboxes'] = data.pop('pos_bboxes').shape + data['neg_bboxes'] = data.pop('neg_bboxes').shape + parts = [f"'{k}': {v!r}" for k, v in sorted(data.items())] + body = ' ' + ',\n '.join(parts) + return '{\n' + body + '\n}' + + @property + def info(self): + """Returns a dictionary of info about the object.""" + return { + 'pos_inds': self.pos_inds, + 'neg_inds': self.neg_inds, + 'pos_bboxes': self.pos_bboxes, + 'neg_bboxes': self.neg_bboxes, + 'pos_is_gt': self.pos_is_gt, + 'num_gts': self.num_gts, + 'pos_assigned_gt_inds': self.pos_assigned_gt_inds, + } + + @classmethod + def random(cls, rng=None, **kwargs): + """ + Args: + rng (None | int | numpy.random.RandomState): seed or state. + kwargs (keyword arguments): + - num_preds: number of predicted boxes + - num_gts: number of true boxes + - p_ignore (float): probability of a predicted box assigned to \ + an ignored truth. + - p_assigned (float): probability of a predicted box not being \ + assigned. + - p_use_label (float | bool): with labels or not. + + Returns: + :obj:`SamplingResult`: Randomly generated sampling result. + + Example: + >>> from mmdet.core.bbox.samplers.sampling_result import * # NOQA + >>> self = SamplingResult.random() + >>> print(self.__dict__) + """ + from mmdet.core.bbox.samplers.random_sampler import RandomSampler + from mmdet.core.bbox.assigners.assign_result import AssignResult + from mmdet.core.bbox import demodata + rng = demodata.ensure_rng(rng) + + # make probabalistic? + num = 32 + pos_fraction = 0.5 + neg_pos_ub = -1 + + assign_result = AssignResult.random(rng=rng, **kwargs) + + # Note we could just compute an assignment + bboxes = demodata.random_boxes(assign_result.num_preds, rng=rng) + gt_bboxes = demodata.random_boxes(assign_result.num_gts, rng=rng) + + if rng.rand() > 0.2: + # sometimes algorithms squeeze their data, be robust to that + gt_bboxes = gt_bboxes.squeeze() + bboxes = bboxes.squeeze() + + if assign_result.labels is None: + gt_labels = None + else: + gt_labels = None # todo + + if gt_labels is None: + add_gt_as_proposals = False + else: + add_gt_as_proposals = True # make probabalistic? + + sampler = RandomSampler( + num, + pos_fraction, + neg_pos_ub=neg_pos_ub, + add_gt_as_proposals=add_gt_as_proposals, + rng=rng) + self = sampler.sample(assign_result, bboxes, gt_bboxes, gt_labels) + return self diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/score_hlr_sampler.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/score_hlr_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..83244ed569c7fbc684ae088a9d6460cbfd5dc645 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/samplers/score_hlr_sampler.py @@ -0,0 +1,264 @@ +import torch +from mmcv.ops import nms_match + +from ..builder import BBOX_SAMPLERS +from ..transforms import bbox2roi +from .base_sampler import BaseSampler +from .sampling_result import SamplingResult + + +@BBOX_SAMPLERS.register_module() +class ScoreHLRSampler(BaseSampler): + r"""Importance-based Sample Reweighting (ISR_N), described in `Prime Sample + Attention in Object Detection `_. + + Score hierarchical local rank (HLR) differentiates with RandomSampler in + negative part. It firstly computes Score-HLR in a two-step way, + then linearly maps score hlr to the loss weights. + + Args: + num (int): Total number of sampled RoIs. + pos_fraction (float): Fraction of positive samples. + context (:class:`BaseRoIHead`): RoI head that the sampler belongs to. + neg_pos_ub (int): Upper bound of the ratio of num negative to num + positive, -1 means no upper bound. + add_gt_as_proposals (bool): Whether to add ground truth as proposals. + k (float): Power of the non-linear mapping. + bias (float): Shift of the non-linear mapping. + score_thr (float): Minimum score that a negative sample is to be + considered as valid bbox. + """ + + def __init__(self, + num, + pos_fraction, + context, + neg_pos_ub=-1, + add_gt_as_proposals=True, + k=0.5, + bias=0, + score_thr=0.05, + iou_thr=0.5, + **kwargs): + super().__init__(num, pos_fraction, neg_pos_ub, add_gt_as_proposals) + self.k = k + self.bias = bias + self.score_thr = score_thr + self.iou_thr = iou_thr + self.context = context + # context of cascade detectors is a list, so distinguish them here. + if not hasattr(context, 'num_stages'): + self.bbox_roi_extractor = context.bbox_roi_extractor + self.bbox_head = context.bbox_head + self.with_shared_head = context.with_shared_head + if self.with_shared_head: + self.shared_head = context.shared_head + else: + self.bbox_roi_extractor = context.bbox_roi_extractor[ + context.current_stage] + self.bbox_head = context.bbox_head[context.current_stage] + + @staticmethod + def random_choice(gallery, num): + """Randomly select some elements from the gallery. + + If `gallery` is a Tensor, the returned indices will be a Tensor; + If `gallery` is a ndarray or list, the returned indices will be a + ndarray. + + Args: + gallery (Tensor | ndarray | list): indices pool. + num (int): expected sample num. + + Returns: + Tensor or ndarray: sampled indices. + """ + assert len(gallery) >= num + + is_tensor = isinstance(gallery, torch.Tensor) + if not is_tensor: + if torch.cuda.is_available(): + device = torch.cuda.current_device() + else: + device = 'cpu' + gallery = torch.tensor(gallery, dtype=torch.long, device=device) + perm = torch.randperm(gallery.numel(), device=gallery.device)[:num] + rand_inds = gallery[perm] + if not is_tensor: + rand_inds = rand_inds.cpu().numpy() + return rand_inds + + def _sample_pos(self, assign_result, num_expected, **kwargs): + """Randomly sample some positive samples.""" + pos_inds = torch.nonzero(assign_result.gt_inds > 0).flatten() + if pos_inds.numel() <= num_expected: + return pos_inds + else: + return self.random_choice(pos_inds, num_expected) + + def _sample_neg(self, + assign_result, + num_expected, + bboxes, + feats=None, + img_meta=None, + **kwargs): + """Sample negative samples. + + Score-HLR sampler is done in the following steps: + 1. Take the maximum positive score prediction of each negative samples + as s_i. + 2. Filter out negative samples whose s_i <= score_thr, the left samples + are called valid samples. + 3. Use NMS-Match to divide valid samples into different groups, + samples in the same group will greatly overlap with each other + 4. Rank the matched samples in two-steps to get Score-HLR. + (1) In the same group, rank samples with their scores. + (2) In the same score rank across different groups, + rank samples with their scores again. + 5. Linearly map Score-HLR to the final label weights. + + Args: + assign_result (:obj:`AssignResult`): result of assigner. + num_expected (int): Expected number of samples. + bboxes (Tensor): bbox to be sampled. + feats (Tensor): Features come from FPN. + img_meta (dict): Meta information dictionary. + """ + neg_inds = torch.nonzero(assign_result.gt_inds == 0).flatten() + num_neg = neg_inds.size(0) + if num_neg == 0: + return neg_inds, None + with torch.no_grad(): + neg_bboxes = bboxes[neg_inds] + neg_rois = bbox2roi([neg_bboxes]) + bbox_result = self.context._bbox_forward(feats, neg_rois) + cls_score, bbox_pred = bbox_result['cls_score'], bbox_result[ + 'bbox_pred'] + + ori_loss = self.bbox_head.loss( + cls_score=cls_score, + bbox_pred=None, + rois=None, + labels=neg_inds.new_full((num_neg, ), + self.bbox_head.num_classes), + label_weights=cls_score.new_ones(num_neg), + bbox_targets=None, + bbox_weights=None, + reduction_override='none')['loss_cls'] + + # filter out samples with the max score lower than score_thr + max_score, argmax_score = cls_score.softmax(-1)[:, :-1].max(-1) + valid_inds = (max_score > self.score_thr).nonzero().view(-1) + invalid_inds = (max_score <= self.score_thr).nonzero().view(-1) + num_valid = valid_inds.size(0) + num_invalid = invalid_inds.size(0) + + num_expected = min(num_neg, num_expected) + num_hlr = min(num_valid, num_expected) + num_rand = num_expected - num_hlr + if num_valid > 0: + valid_rois = neg_rois[valid_inds] + valid_max_score = max_score[valid_inds] + valid_argmax_score = argmax_score[valid_inds] + valid_bbox_pred = bbox_pred[valid_inds] + + # valid_bbox_pred shape: [num_valid, #num_classes, 4] + valid_bbox_pred = valid_bbox_pred.view( + valid_bbox_pred.size(0), -1, 4) + selected_bbox_pred = valid_bbox_pred[range(num_valid), + valid_argmax_score] + pred_bboxes = self.bbox_head.bbox_coder.decode( + valid_rois[:, 1:], selected_bbox_pred) + pred_bboxes_with_score = torch.cat( + [pred_bboxes, valid_max_score[:, None]], -1) + group = nms_match(pred_bboxes_with_score, self.iou_thr) + + # imp: importance + imp = cls_score.new_zeros(num_valid) + for g in group: + g_score = valid_max_score[g] + # g_score has already sorted + rank = g_score.new_tensor(range(g_score.size(0))) + imp[g] = num_valid - rank + g_score + _, imp_rank_inds = imp.sort(descending=True) + _, imp_rank = imp_rank_inds.sort() + hlr_inds = imp_rank_inds[:num_expected] + + if num_rand > 0: + rand_inds = torch.randperm(num_invalid)[:num_rand] + select_inds = torch.cat( + [valid_inds[hlr_inds], invalid_inds[rand_inds]]) + else: + select_inds = valid_inds[hlr_inds] + + neg_label_weights = cls_score.new_ones(num_expected) + + up_bound = max(num_expected, num_valid) + imp_weights = (up_bound - + imp_rank[hlr_inds].float()) / up_bound + neg_label_weights[:num_hlr] = imp_weights + neg_label_weights[num_hlr:] = imp_weights.min() + neg_label_weights = (self.bias + + (1 - self.bias) * neg_label_weights).pow( + self.k) + ori_selected_loss = ori_loss[select_inds] + new_loss = ori_selected_loss * neg_label_weights + norm_ratio = ori_selected_loss.sum() / new_loss.sum() + neg_label_weights *= norm_ratio + else: + neg_label_weights = cls_score.new_ones(num_expected) + select_inds = torch.randperm(num_neg)[:num_expected] + + return neg_inds[select_inds], neg_label_weights + + def sample(self, + assign_result, + bboxes, + gt_bboxes, + gt_labels=None, + img_meta=None, + **kwargs): + """Sample positive and negative bboxes. + + This is a simple implementation of bbox sampling given candidates, + assigning results and ground truth bboxes. + + Args: + assign_result (:obj:`AssignResult`): Bbox assigning results. + bboxes (Tensor): Boxes to be sampled from. + gt_bboxes (Tensor): Ground truth bboxes. + gt_labels (Tensor, optional): Class labels of ground truth bboxes. + + Returns: + tuple[:obj:`SamplingResult`, Tensor]: Sampling result and negative + label weights. + """ + bboxes = bboxes[:, :4] + + gt_flags = bboxes.new_zeros((bboxes.shape[0], ), dtype=torch.uint8) + if self.add_gt_as_proposals: + bboxes = torch.cat([gt_bboxes, bboxes], dim=0) + assign_result.add_gt_(gt_labels) + gt_ones = bboxes.new_ones(gt_bboxes.shape[0], dtype=torch.uint8) + gt_flags = torch.cat([gt_ones, gt_flags]) + + num_expected_pos = int(self.num * self.pos_fraction) + pos_inds = self.pos_sampler._sample_pos( + assign_result, num_expected_pos, bboxes=bboxes, **kwargs) + num_sampled_pos = pos_inds.numel() + num_expected_neg = self.num - num_sampled_pos + if self.neg_pos_ub >= 0: + _pos = max(1, num_sampled_pos) + neg_upper_bound = int(self.neg_pos_ub * _pos) + if num_expected_neg > neg_upper_bound: + num_expected_neg = neg_upper_bound + neg_inds, neg_label_weights = self.neg_sampler._sample_neg( + assign_result, + num_expected_neg, + bboxes, + img_meta=img_meta, + **kwargs) + + return SamplingResult(pos_inds, neg_inds, bboxes, gt_bboxes, + assign_result, gt_flags), neg_label_weights diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/transforms.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/transforms.py new file mode 100644 index 0000000000000000000000000000000000000000..fb141f4735e6c18925d72691597e6ccc2ba45096 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/bbox/transforms.py @@ -0,0 +1,246 @@ +import numpy as np +import torch + + +def bbox_flip(bboxes, img_shape, direction='horizontal'): + """Flip bboxes horizontally or vertically. + + Args: + bboxes (Tensor): Shape (..., 4*k) + img_shape (tuple): Image shape. + direction (str): Flip direction, options are "horizontal", "vertical", + "diagonal". Default: "horizontal" + + Returns: + Tensor: Flipped bboxes. + """ + assert bboxes.shape[-1] % 4 == 0 + assert direction in ['horizontal', 'vertical', 'diagonal'] + flipped = bboxes.clone() + if direction == 'horizontal': + flipped[..., 0::4] = img_shape[1] - bboxes[..., 2::4] + flipped[..., 2::4] = img_shape[1] - bboxes[..., 0::4] + elif direction == 'vertical': + flipped[..., 1::4] = img_shape[0] - bboxes[..., 3::4] + flipped[..., 3::4] = img_shape[0] - bboxes[..., 1::4] + else: + flipped[..., 0::4] = img_shape[1] - bboxes[..., 2::4] + flipped[..., 1::4] = img_shape[0] - bboxes[..., 3::4] + flipped[..., 2::4] = img_shape[1] - bboxes[..., 0::4] + flipped[..., 3::4] = img_shape[0] - bboxes[..., 1::4] + return flipped + + +def bbox_mapping(bboxes, + img_shape, + scale_factor, + flip, + flip_direction='horizontal'): + """Map bboxes from the original image scale to testing scale.""" + new_bboxes = bboxes * bboxes.new_tensor(scale_factor) + if flip: + new_bboxes = bbox_flip(new_bboxes, img_shape, flip_direction) + return new_bboxes + + +def bbox_mapping_back(bboxes, + img_shape, + scale_factor, + flip, + flip_direction='horizontal'): + """Map bboxes from testing scale to original image scale.""" + new_bboxes = bbox_flip(bboxes, img_shape, + flip_direction) if flip else bboxes + new_bboxes = new_bboxes.view(-1, 4) / new_bboxes.new_tensor(scale_factor) + return new_bboxes.view(bboxes.shape) + + +def bbox2roi(bbox_list): + """Convert a list of bboxes to roi format. + + Args: + bbox_list (list[Tensor]): a list of bboxes corresponding to a batch + of images. + + Returns: + Tensor: shape (n, 5), [batch_ind, x1, y1, x2, y2] + """ + rois_list = [] + for img_id, bboxes in enumerate(bbox_list): + if bboxes.size(0) > 0: + img_inds = bboxes.new_full((bboxes.size(0), 1), img_id) + rois = torch.cat([img_inds, bboxes[:, :4]], dim=-1) + else: + rois = bboxes.new_zeros((0, 5)) + rois_list.append(rois) + rois = torch.cat(rois_list, 0) + return rois + + +def roi2bbox(rois): + """Convert rois to bounding box format. + + Args: + rois (torch.Tensor): RoIs with the shape (n, 5) where the first + column indicates batch id of each RoI. + + Returns: + list[torch.Tensor]: Converted boxes of corresponding rois. + """ + bbox_list = [] + img_ids = torch.unique(rois[:, 0].cpu(), sorted=True) + for img_id in img_ids: + inds = (rois[:, 0] == img_id.item()) + bbox = rois[inds, 1:] + bbox_list.append(bbox) + return bbox_list + + +def bbox2result(bboxes, labels, num_classes): + """Convert detection results to a list of numpy arrays. + + Args: + bboxes (torch.Tensor | np.ndarray): shape (n, 5) + labels (torch.Tensor | np.ndarray): shape (n, ) + num_classes (int): class number, including background class + + Returns: + list(ndarray): bbox results of each class + """ + if bboxes.shape[0] == 0: + return [np.zeros((0, 5), dtype=np.float32) for i in range(num_classes)] + else: + if isinstance(bboxes, torch.Tensor): + bboxes = bboxes.detach().cpu().numpy() + labels = labels.detach().cpu().numpy() + return [bboxes[labels == i, :] for i in range(num_classes)] + + +def distance2bbox(points, distance, max_shape=None): + """Decode distance prediction to bounding box. + + Args: + points (Tensor): Shape (B, N, 2) or (N, 2). + distance (Tensor): Distance from the given point to 4 + boundaries (left, top, right, bottom). Shape (B, N, 4) or (N, 4) + max_shape (Sequence[int] or torch.Tensor or Sequence[ + Sequence[int]],optional): Maximum bounds for boxes, specifies + (H, W, C) or (H, W). If priors shape is (B, N, 4), then + the max_shape should be a Sequence[Sequence[int]] + and the length of max_shape should also be B. + + Returns: + Tensor: Boxes with shape (N, 4) or (B, N, 4) + """ + x1 = points[..., 0] - distance[..., 0] + y1 = points[..., 1] - distance[..., 1] + x2 = points[..., 0] + distance[..., 2] + y2 = points[..., 1] + distance[..., 3] + + bboxes = torch.stack([x1, y1, x2, y2], -1) + + if max_shape is not None: + # clip bboxes with dynamic `min` and `max` for onnx + if torch.onnx.is_in_onnx_export(): + from mmdet.core.export import dynamic_clip_for_onnx + x1, y1, x2, y2 = dynamic_clip_for_onnx(x1, y1, x2, y2, max_shape) + bboxes = torch.stack([x1, y1, x2, y2], dim=-1) + return bboxes + if not isinstance(max_shape, torch.Tensor): + max_shape = x1.new_tensor(max_shape) + max_shape = max_shape[..., :2].type_as(x1) + if max_shape.ndim == 2: + assert bboxes.ndim == 3 + assert max_shape.size(0) == bboxes.size(0) + + min_xy = x1.new_tensor(0) + max_xy = torch.cat([max_shape, max_shape], + dim=-1).flip(-1).unsqueeze(-2) + bboxes = torch.where(bboxes < min_xy, min_xy, bboxes) + bboxes = torch.where(bboxes > max_xy, max_xy, bboxes) + + return bboxes + + +def bbox2distance(points, bbox, max_dis=None, eps=0.1): + """Decode bounding box based on distances. + + Args: + points (Tensor): Shape (n, 2), [x, y]. + bbox (Tensor): Shape (n, 4), "xyxy" format + max_dis (float): Upper bound of the distance. + eps (float): a small value to ensure target < max_dis, instead <= + + Returns: + Tensor: Decoded distances. + """ + left = points[:, 0] - bbox[:, 0] + top = points[:, 1] - bbox[:, 1] + right = bbox[:, 2] - points[:, 0] + bottom = bbox[:, 3] - points[:, 1] + if max_dis is not None: + left = left.clamp(min=0, max=max_dis - eps) + top = top.clamp(min=0, max=max_dis - eps) + right = right.clamp(min=0, max=max_dis - eps) + bottom = bottom.clamp(min=0, max=max_dis - eps) + return torch.stack([left, top, right, bottom], -1) + + +def bbox_rescale(bboxes, scale_factor=1.0): + """Rescale bounding box w.r.t. scale_factor. + + Args: + bboxes (Tensor): Shape (n, 4) for bboxes or (n, 5) for rois + scale_factor (float): rescale factor + + Returns: + Tensor: Rescaled bboxes. + """ + if bboxes.size(1) == 5: + bboxes_ = bboxes[:, 1:] + inds_ = bboxes[:, 0] + else: + bboxes_ = bboxes + cx = (bboxes_[:, 0] + bboxes_[:, 2]) * 0.5 + cy = (bboxes_[:, 1] + bboxes_[:, 3]) * 0.5 + w = bboxes_[:, 2] - bboxes_[:, 0] + h = bboxes_[:, 3] - bboxes_[:, 1] + w = w * scale_factor + h = h * scale_factor + x1 = cx - 0.5 * w + x2 = cx + 0.5 * w + y1 = cy - 0.5 * h + y2 = cy + 0.5 * h + if bboxes.size(1) == 5: + rescaled_bboxes = torch.stack([inds_, x1, y1, x2, y2], dim=-1) + else: + rescaled_bboxes = torch.stack([x1, y1, x2, y2], dim=-1) + return rescaled_bboxes + + +def bbox_cxcywh_to_xyxy(bbox): + """Convert bbox coordinates from (cx, cy, w, h) to (x1, y1, x2, y2). + + Args: + bbox (Tensor): Shape (n, 4) for bboxes. + + Returns: + Tensor: Converted bboxes. + """ + cx, cy, w, h = bbox.split((1, 1, 1, 1), dim=-1) + bbox_new = [(cx - 0.5 * w), (cy - 0.5 * h), (cx + 0.5 * w), (cy + 0.5 * h)] + return torch.cat(bbox_new, dim=-1) + + +def bbox_xyxy_to_cxcywh(bbox): + """Convert bbox coordinates from (x1, y1, x2, y2) to (cx, cy, w, h). + + Args: + bbox (Tensor): Shape (n, 4) for bboxes. + + Returns: + Tensor: Converted bboxes. + """ + x1, y1, x2, y2 = bbox.split((1, 1, 1, 1), dim=-1) + bbox_new = [(x1 + x2) / 2, (y1 + y2) / 2, (x2 - x1), (y2 - y1)] + return torch.cat(bbox_new, dim=-1) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/evaluation/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/evaluation/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d11ef15b9db95166b4427ad4d08debbd0630a741 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/evaluation/__init__.py @@ -0,0 +1,15 @@ +from .class_names import (cityscapes_classes, coco_classes, dataset_aliases, + get_classes, imagenet_det_classes, + imagenet_vid_classes, voc_classes) +from .eval_hooks import DistEvalHook, EvalHook +from .mean_ap import average_precision, eval_map, print_map_summary +from .recall import (eval_recalls, plot_iou_recall, plot_num_recall, + print_recall_summary) + +__all__ = [ + 'voc_classes', 'imagenet_det_classes', 'imagenet_vid_classes', + 'coco_classes', 'cityscapes_classes', 'dataset_aliases', 'get_classes', + 'DistEvalHook', 'EvalHook', 'average_precision', 'eval_map', + 'print_map_summary', 'eval_recalls', 'print_recall_summary', + 'plot_num_recall', 'plot_iou_recall' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/evaluation/bbox_overlaps.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/evaluation/bbox_overlaps.py new file mode 100644 index 0000000000000000000000000000000000000000..93559ea0f25369d552a5365312fa32b9ffec9226 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/evaluation/bbox_overlaps.py @@ -0,0 +1,48 @@ +import numpy as np + + +def bbox_overlaps(bboxes1, bboxes2, mode='iou', eps=1e-6): + """Calculate the ious between each bbox of bboxes1 and bboxes2. + + Args: + bboxes1(ndarray): shape (n, 4) + bboxes2(ndarray): shape (k, 4) + mode(str): iou (intersection over union) or iof (intersection + over foreground) + + Returns: + ious(ndarray): shape (n, k) + """ + + assert mode in ['iou', 'iof'] + + bboxes1 = bboxes1.astype(np.float32) + bboxes2 = bboxes2.astype(np.float32) + rows = bboxes1.shape[0] + cols = bboxes2.shape[0] + ious = np.zeros((rows, cols), dtype=np.float32) + if rows * cols == 0: + return ious + exchange = False + if bboxes1.shape[0] > bboxes2.shape[0]: + bboxes1, bboxes2 = bboxes2, bboxes1 + ious = np.zeros((cols, rows), dtype=np.float32) + exchange = True + area1 = (bboxes1[:, 2] - bboxes1[:, 0]) * (bboxes1[:, 3] - bboxes1[:, 1]) + area2 = (bboxes2[:, 2] - bboxes2[:, 0]) * (bboxes2[:, 3] - bboxes2[:, 1]) + for i in range(bboxes1.shape[0]): + x_start = np.maximum(bboxes1[i, 0], bboxes2[:, 0]) + y_start = np.maximum(bboxes1[i, 1], bboxes2[:, 1]) + x_end = np.minimum(bboxes1[i, 2], bboxes2[:, 2]) + y_end = np.minimum(bboxes1[i, 3], bboxes2[:, 3]) + overlap = np.maximum(x_end - x_start, 0) * np.maximum( + y_end - y_start, 0) + if mode == 'iou': + union = area1[i] + area2 - overlap + else: + union = area1[i] if not exchange else area2 + union = np.maximum(union, eps) + ious[i, :] = overlap / union + if exchange: + ious = ious.T + return ious diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/evaluation/class_names.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/evaluation/class_names.py new file mode 100644 index 0000000000000000000000000000000000000000..c2487c2ee2d010c40db0e1c2b51c91b194e84dc7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/evaluation/class_names.py @@ -0,0 +1,116 @@ +import mmcv + + +def wider_face_classes(): + return ['face'] + + +def voc_classes(): + return [ + 'aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', 'cat', + 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike', 'person', + 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor' + ] + + +def imagenet_det_classes(): + return [ + 'accordion', 'airplane', 'ant', 'antelope', 'apple', 'armadillo', + 'artichoke', 'axe', 'baby_bed', 'backpack', 'bagel', 'balance_beam', + 'banana', 'band_aid', 'banjo', 'baseball', 'basketball', 'bathing_cap', + 'beaker', 'bear', 'bee', 'bell_pepper', 'bench', 'bicycle', 'binder', + 'bird', 'bookshelf', 'bow_tie', 'bow', 'bowl', 'brassiere', 'burrito', + 'bus', 'butterfly', 'camel', 'can_opener', 'car', 'cart', 'cattle', + 'cello', 'centipede', 'chain_saw', 'chair', 'chime', 'cocktail_shaker', + 'coffee_maker', 'computer_keyboard', 'computer_mouse', 'corkscrew', + 'cream', 'croquet_ball', 'crutch', 'cucumber', 'cup_or_mug', 'diaper', + 'digital_clock', 'dishwasher', 'dog', 'domestic_cat', 'dragonfly', + 'drum', 'dumbbell', 'electric_fan', 'elephant', 'face_powder', 'fig', + 'filing_cabinet', 'flower_pot', 'flute', 'fox', 'french_horn', 'frog', + 'frying_pan', 'giant_panda', 'goldfish', 'golf_ball', 'golfcart', + 'guacamole', 'guitar', 'hair_dryer', 'hair_spray', 'hamburger', + 'hammer', 'hamster', 'harmonica', 'harp', 'hat_with_a_wide_brim', + 'head_cabbage', 'helmet', 'hippopotamus', 'horizontal_bar', 'horse', + 'hotdog', 'iPod', 'isopod', 'jellyfish', 'koala_bear', 'ladle', + 'ladybug', 'lamp', 'laptop', 'lemon', 'lion', 'lipstick', 'lizard', + 'lobster', 'maillot', 'maraca', 'microphone', 'microwave', 'milk_can', + 'miniskirt', 'monkey', 'motorcycle', 'mushroom', 'nail', 'neck_brace', + 'oboe', 'orange', 'otter', 'pencil_box', 'pencil_sharpener', 'perfume', + 'person', 'piano', 'pineapple', 'ping-pong_ball', 'pitcher', 'pizza', + 'plastic_bag', 'plate_rack', 'pomegranate', 'popsicle', 'porcupine', + 'power_drill', 'pretzel', 'printer', 'puck', 'punching_bag', 'purse', + 'rabbit', 'racket', 'ray', 'red_panda', 'refrigerator', + 'remote_control', 'rubber_eraser', 'rugby_ball', 'ruler', + 'salt_or_pepper_shaker', 'saxophone', 'scorpion', 'screwdriver', + 'seal', 'sheep', 'ski', 'skunk', 'snail', 'snake', 'snowmobile', + 'snowplow', 'soap_dispenser', 'soccer_ball', 'sofa', 'spatula', + 'squirrel', 'starfish', 'stethoscope', 'stove', 'strainer', + 'strawberry', 'stretcher', 'sunglasses', 'swimming_trunks', 'swine', + 'syringe', 'table', 'tape_player', 'tennis_ball', 'tick', 'tie', + 'tiger', 'toaster', 'traffic_light', 'train', 'trombone', 'trumpet', + 'turtle', 'tv_or_monitor', 'unicycle', 'vacuum', 'violin', + 'volleyball', 'waffle_iron', 'washer', 'water_bottle', 'watercraft', + 'whale', 'wine_bottle', 'zebra' + ] + + +def imagenet_vid_classes(): + return [ + 'airplane', 'antelope', 'bear', 'bicycle', 'bird', 'bus', 'car', + 'cattle', 'dog', 'domestic_cat', 'elephant', 'fox', 'giant_panda', + 'hamster', 'horse', 'lion', 'lizard', 'monkey', 'motorcycle', 'rabbit', + 'red_panda', 'sheep', 'snake', 'squirrel', 'tiger', 'train', 'turtle', + 'watercraft', 'whale', 'zebra' + ] + + +def coco_classes(): + return [ + 'person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train', + 'truck', 'boat', 'traffic_light', 'fire_hydrant', 'stop_sign', + 'parking_meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', + 'cow', 'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', + 'handbag', 'tie', 'suitcase', 'frisbee', 'skis', 'snowboard', + 'sports_ball', 'kite', 'baseball_bat', 'baseball_glove', 'skateboard', + 'surfboard', 'tennis_racket', 'bottle', 'wine_glass', 'cup', 'fork', + 'knife', 'spoon', 'bowl', 'banana', 'apple', 'sandwich', 'orange', + 'broccoli', 'carrot', 'hot_dog', 'pizza', 'donut', 'cake', 'chair', + 'couch', 'potted_plant', 'bed', 'dining_table', 'toilet', 'tv', + 'laptop', 'mouse', 'remote', 'keyboard', 'cell_phone', 'microwave', + 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase', + 'scissors', 'teddy_bear', 'hair_drier', 'toothbrush' + ] + + +def cityscapes_classes(): + return [ + 'person', 'rider', 'car', 'truck', 'bus', 'train', 'motorcycle', + 'bicycle' + ] + + +dataset_aliases = { + 'voc': ['voc', 'pascal_voc', 'voc07', 'voc12'], + 'imagenet_det': ['det', 'imagenet_det', 'ilsvrc_det'], + 'imagenet_vid': ['vid', 'imagenet_vid', 'ilsvrc_vid'], + 'coco': ['coco', 'mscoco', 'ms_coco'], + 'wider_face': ['WIDERFaceDataset', 'wider_face', 'WIDERFace'], + 'cityscapes': ['cityscapes'] +} + + +def get_classes(dataset): + """Get class names of a dataset.""" + alias2name = {} + for name, aliases in dataset_aliases.items(): + for alias in aliases: + alias2name[alias] = name + + if mmcv.is_str(dataset): + if dataset in alias2name: + labels = eval(alias2name[dataset] + '_classes()') + else: + raise ValueError(f'Unrecognized dataset: {dataset}') + else: + raise TypeError(f'dataset must a str, but got {type(dataset)}') + return labels diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/evaluation/eval_hooks.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/evaluation/eval_hooks.py new file mode 100644 index 0000000000000000000000000000000000000000..68dc92f23b8ee108f87f2d06275fbf738149acbd --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/evaluation/eval_hooks.py @@ -0,0 +1,60 @@ +import os.path as osp + +import torch.distributed as dist +from mmcv.runner import DistEvalHook as BaseDistEvalHook +from mmcv.runner import EvalHook as BaseEvalHook +from torch.nn.modules.batchnorm import _BatchNorm + + +class EvalHook(BaseEvalHook): + + def _do_evaluate(self, runner): + """perform evaluation and save ckpt.""" + if not self._should_evaluate(runner): + return + + from mmdet.apis import single_gpu_test + results = single_gpu_test(runner.model, self.dataloader, show=False) + runner.log_buffer.output['eval_iter_num'] = len(self.dataloader) + key_score = self.evaluate(runner, results) + if self.save_best: + self._save_ckpt(runner, key_score) + + +class DistEvalHook(BaseDistEvalHook): + + def _do_evaluate(self, runner): + """perform evaluation and save ckpt.""" + # Synchronization of BatchNorm's buffer (running_mean + # and running_var) is not supported in the DDP of pytorch, + # which may cause the inconsistent performance of models in + # different ranks, so we broadcast BatchNorm's buffers + # of rank 0 to other ranks to avoid this. + if self.broadcast_bn_buffer: + model = runner.model + for name, module in model.named_modules(): + if isinstance(module, + _BatchNorm) and module.track_running_stats: + dist.broadcast(module.running_var, 0) + dist.broadcast(module.running_mean, 0) + + if not self._should_evaluate(runner): + return + + tmpdir = self.tmpdir + if tmpdir is None: + tmpdir = osp.join(runner.work_dir, '.eval_hook') + + from mmdet.apis import multi_gpu_test + results = multi_gpu_test( + runner.model, + self.dataloader, + tmpdir=tmpdir, + gpu_collect=self.gpu_collect) + if runner.rank == 0: + print('\n') + runner.log_buffer.output['eval_iter_num'] = len(self.dataloader) + key_score = self.evaluate(runner, results) + + if self.save_best: + self._save_ckpt(runner, key_score) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/evaluation/mean_ap.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/evaluation/mean_ap.py new file mode 100644 index 0000000000000000000000000000000000000000..1d653a35497f6a0135c4374a09eb7c11399e3244 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/evaluation/mean_ap.py @@ -0,0 +1,469 @@ +from multiprocessing import Pool + +import mmcv +import numpy as np +from mmcv.utils import print_log +from terminaltables import AsciiTable + +from .bbox_overlaps import bbox_overlaps +from .class_names import get_classes + + +def average_precision(recalls, precisions, mode='area'): + """Calculate average precision (for single or multiple scales). + + Args: + recalls (ndarray): shape (num_scales, num_dets) or (num_dets, ) + precisions (ndarray): shape (num_scales, num_dets) or (num_dets, ) + mode (str): 'area' or '11points', 'area' means calculating the area + under precision-recall curve, '11points' means calculating + the average precision of recalls at [0, 0.1, ..., 1] + + Returns: + float or ndarray: calculated average precision + """ + no_scale = False + if recalls.ndim == 1: + no_scale = True + recalls = recalls[np.newaxis, :] + precisions = precisions[np.newaxis, :] + assert recalls.shape == precisions.shape and recalls.ndim == 2 + num_scales = recalls.shape[0] + ap = np.zeros(num_scales, dtype=np.float32) + if mode == 'area': + zeros = np.zeros((num_scales, 1), dtype=recalls.dtype) + ones = np.ones((num_scales, 1), dtype=recalls.dtype) + mrec = np.hstack((zeros, recalls, ones)) + mpre = np.hstack((zeros, precisions, zeros)) + for i in range(mpre.shape[1] - 1, 0, -1): + mpre[:, i - 1] = np.maximum(mpre[:, i - 1], mpre[:, i]) + for i in range(num_scales): + ind = np.where(mrec[i, 1:] != mrec[i, :-1])[0] + ap[i] = np.sum( + (mrec[i, ind + 1] - mrec[i, ind]) * mpre[i, ind + 1]) + elif mode == '11points': + for i in range(num_scales): + for thr in np.arange(0, 1 + 1e-3, 0.1): + precs = precisions[i, recalls[i, :] >= thr] + prec = precs.max() if precs.size > 0 else 0 + ap[i] += prec + ap /= 11 + else: + raise ValueError( + 'Unrecognized mode, only "area" and "11points" are supported') + if no_scale: + ap = ap[0] + return ap + + +def tpfp_imagenet(det_bboxes, + gt_bboxes, + gt_bboxes_ignore=None, + default_iou_thr=0.5, + area_ranges=None): + """Check if detected bboxes are true positive or false positive. + + Args: + det_bbox (ndarray): Detected bboxes of this image, of shape (m, 5). + gt_bboxes (ndarray): GT bboxes of this image, of shape (n, 4). + gt_bboxes_ignore (ndarray): Ignored gt bboxes of this image, + of shape (k, 4). Default: None + default_iou_thr (float): IoU threshold to be considered as matched for + medium and large bboxes (small ones have special rules). + Default: 0.5. + area_ranges (list[tuple] | None): Range of bbox areas to be evaluated, + in the format [(min1, max1), (min2, max2), ...]. Default: None. + + Returns: + tuple[np.ndarray]: (tp, fp) whose elements are 0 and 1. The shape of + each array is (num_scales, m). + """ + # an indicator of ignored gts + gt_ignore_inds = np.concatenate( + (np.zeros(gt_bboxes.shape[0], dtype=np.bool), + np.ones(gt_bboxes_ignore.shape[0], dtype=np.bool))) + # stack gt_bboxes and gt_bboxes_ignore for convenience + gt_bboxes = np.vstack((gt_bboxes, gt_bboxes_ignore)) + + num_dets = det_bboxes.shape[0] + num_gts = gt_bboxes.shape[0] + if area_ranges is None: + area_ranges = [(None, None)] + num_scales = len(area_ranges) + # tp and fp are of shape (num_scales, num_gts), each row is tp or fp + # of a certain scale. + tp = np.zeros((num_scales, num_dets), dtype=np.float32) + fp = np.zeros((num_scales, num_dets), dtype=np.float32) + if gt_bboxes.shape[0] == 0: + if area_ranges == [(None, None)]: + fp[...] = 1 + else: + det_areas = (det_bboxes[:, 2] - det_bboxes[:, 0]) * ( + det_bboxes[:, 3] - det_bboxes[:, 1]) + for i, (min_area, max_area) in enumerate(area_ranges): + fp[i, (det_areas >= min_area) & (det_areas < max_area)] = 1 + return tp, fp + ious = bbox_overlaps(det_bboxes, gt_bboxes - 1) + gt_w = gt_bboxes[:, 2] - gt_bboxes[:, 0] + gt_h = gt_bboxes[:, 3] - gt_bboxes[:, 1] + iou_thrs = np.minimum((gt_w * gt_h) / ((gt_w + 10.0) * (gt_h + 10.0)), + default_iou_thr) + # sort all detections by scores in descending order + sort_inds = np.argsort(-det_bboxes[:, -1]) + for k, (min_area, max_area) in enumerate(area_ranges): + gt_covered = np.zeros(num_gts, dtype=bool) + # if no area range is specified, gt_area_ignore is all False + if min_area is None: + gt_area_ignore = np.zeros_like(gt_ignore_inds, dtype=bool) + else: + gt_areas = gt_w * gt_h + gt_area_ignore = (gt_areas < min_area) | (gt_areas >= max_area) + for i in sort_inds: + max_iou = -1 + matched_gt = -1 + # find best overlapped available gt + for j in range(num_gts): + # different from PASCAL VOC: allow finding other gts if the + # best overlapped ones are already matched by other det bboxes + if gt_covered[j]: + continue + elif ious[i, j] >= iou_thrs[j] and ious[i, j] > max_iou: + max_iou = ious[i, j] + matched_gt = j + # there are 4 cases for a det bbox: + # 1. it matches a gt, tp = 1, fp = 0 + # 2. it matches an ignored gt, tp = 0, fp = 0 + # 3. it matches no gt and within area range, tp = 0, fp = 1 + # 4. it matches no gt but is beyond area range, tp = 0, fp = 0 + if matched_gt >= 0: + gt_covered[matched_gt] = 1 + if not (gt_ignore_inds[matched_gt] + or gt_area_ignore[matched_gt]): + tp[k, i] = 1 + elif min_area is None: + fp[k, i] = 1 + else: + bbox = det_bboxes[i, :4] + area = (bbox[2] - bbox[0]) * (bbox[3] - bbox[1]) + if area >= min_area and area < max_area: + fp[k, i] = 1 + return tp, fp + + +def tpfp_default(det_bboxes, + gt_bboxes, + gt_bboxes_ignore=None, + iou_thr=0.5, + area_ranges=None): + """Check if detected bboxes are true positive or false positive. + + Args: + det_bbox (ndarray): Detected bboxes of this image, of shape (m, 5). + gt_bboxes (ndarray): GT bboxes of this image, of shape (n, 4). + gt_bboxes_ignore (ndarray): Ignored gt bboxes of this image, + of shape (k, 4). Default: None + iou_thr (float): IoU threshold to be considered as matched. + Default: 0.5. + area_ranges (list[tuple] | None): Range of bbox areas to be evaluated, + in the format [(min1, max1), (min2, max2), ...]. Default: None. + + Returns: + tuple[np.ndarray]: (tp, fp) whose elements are 0 and 1. The shape of + each array is (num_scales, m). + """ + # an indicator of ignored gts + gt_ignore_inds = np.concatenate( + (np.zeros(gt_bboxes.shape[0], dtype=np.bool), + np.ones(gt_bboxes_ignore.shape[0], dtype=np.bool))) + # stack gt_bboxes and gt_bboxes_ignore for convenience + gt_bboxes = np.vstack((gt_bboxes, gt_bboxes_ignore)) + + num_dets = det_bboxes.shape[0] + num_gts = gt_bboxes.shape[0] + if area_ranges is None: + area_ranges = [(None, None)] + num_scales = len(area_ranges) + # tp and fp are of shape (num_scales, num_gts), each row is tp or fp of + # a certain scale + tp = np.zeros((num_scales, num_dets), dtype=np.float32) + fp = np.zeros((num_scales, num_dets), dtype=np.float32) + + # if there is no gt bboxes in this image, then all det bboxes + # within area range are false positives + if gt_bboxes.shape[0] == 0: + if area_ranges == [(None, None)]: + fp[...] = 1 + else: + det_areas = (det_bboxes[:, 2] - det_bboxes[:, 0]) * ( + det_bboxes[:, 3] - det_bboxes[:, 1]) + for i, (min_area, max_area) in enumerate(area_ranges): + fp[i, (det_areas >= min_area) & (det_areas < max_area)] = 1 + return tp, fp + + ious = bbox_overlaps(det_bboxes, gt_bboxes) + # for each det, the max iou with all gts + ious_max = ious.max(axis=1) + # for each det, which gt overlaps most with it + ious_argmax = ious.argmax(axis=1) + # sort all dets in descending order by scores + sort_inds = np.argsort(-det_bboxes[:, -1]) + for k, (min_area, max_area) in enumerate(area_ranges): + gt_covered = np.zeros(num_gts, dtype=bool) + # if no area range is specified, gt_area_ignore is all False + if min_area is None: + gt_area_ignore = np.zeros_like(gt_ignore_inds, dtype=bool) + else: + gt_areas = (gt_bboxes[:, 2] - gt_bboxes[:, 0]) * ( + gt_bboxes[:, 3] - gt_bboxes[:, 1]) + gt_area_ignore = (gt_areas < min_area) | (gt_areas >= max_area) + for i in sort_inds: + if ious_max[i] >= iou_thr: + matched_gt = ious_argmax[i] + if not (gt_ignore_inds[matched_gt] + or gt_area_ignore[matched_gt]): + if not gt_covered[matched_gt]: + gt_covered[matched_gt] = True + tp[k, i] = 1 + else: + fp[k, i] = 1 + # otherwise ignore this detected bbox, tp = 0, fp = 0 + elif min_area is None: + fp[k, i] = 1 + else: + bbox = det_bboxes[i, :4] + area = (bbox[2] - bbox[0]) * (bbox[3] - bbox[1]) + if area >= min_area and area < max_area: + fp[k, i] = 1 + return tp, fp + + +def get_cls_results(det_results, annotations, class_id): + """Get det results and gt information of a certain class. + + Args: + det_results (list[list]): Same as `eval_map()`. + annotations (list[dict]): Same as `eval_map()`. + class_id (int): ID of a specific class. + + Returns: + tuple[list[np.ndarray]]: detected bboxes, gt bboxes, ignored gt bboxes + """ + cls_dets = [img_res[class_id] for img_res in det_results] + cls_gts = [] + cls_gts_ignore = [] + for ann in annotations: + gt_inds = ann['labels'] == class_id + cls_gts.append(ann['bboxes'][gt_inds, :]) + + if ann.get('labels_ignore', None) is not None: + ignore_inds = ann['labels_ignore'] == class_id + cls_gts_ignore.append(ann['bboxes_ignore'][ignore_inds, :]) + else: + cls_gts_ignore.append(np.empty((0, 4), dtype=np.float32)) + + return cls_dets, cls_gts, cls_gts_ignore + + +def eval_map(det_results, + annotations, + scale_ranges=None, + iou_thr=0.5, + dataset=None, + logger=None, + tpfp_fn=None, + nproc=4): + """Evaluate mAP of a dataset. + + Args: + det_results (list[list]): [[cls1_det, cls2_det, ...], ...]. + The outer list indicates images, and the inner list indicates + per-class detected bboxes. + annotations (list[dict]): Ground truth annotations where each item of + the list indicates an image. Keys of annotations are: + + - `bboxes`: numpy array of shape (n, 4) + - `labels`: numpy array of shape (n, ) + - `bboxes_ignore` (optional): numpy array of shape (k, 4) + - `labels_ignore` (optional): numpy array of shape (k, ) + scale_ranges (list[tuple] | None): Range of scales to be evaluated, + in the format [(min1, max1), (min2, max2), ...]. A range of + (32, 64) means the area range between (32**2, 64**2). + Default: None. + iou_thr (float): IoU threshold to be considered as matched. + Default: 0.5. + dataset (list[str] | str | None): Dataset name or dataset classes, + there are minor differences in metrics for different datsets, e.g. + "voc07", "imagenet_det", etc. Default: None. + logger (logging.Logger | str | None): The way to print the mAP + summary. See `mmcv.utils.print_log()` for details. Default: None. + tpfp_fn (callable | None): The function used to determine true/ + false positives. If None, :func:`tpfp_default` is used as default + unless dataset is 'det' or 'vid' (:func:`tpfp_imagenet` in this + case). If it is given as a function, then this function is used + to evaluate tp & fp. Default None. + nproc (int): Processes used for computing TP and FP. + Default: 4. + + Returns: + tuple: (mAP, [dict, dict, ...]) + """ + assert len(det_results) == len(annotations) + + num_imgs = len(det_results) + num_scales = len(scale_ranges) if scale_ranges is not None else 1 + num_classes = len(det_results[0]) # positive class num + area_ranges = ([(rg[0]**2, rg[1]**2) for rg in scale_ranges] + if scale_ranges is not None else None) + + pool = Pool(nproc) + eval_results = [] + for i in range(num_classes): + # get gt and det bboxes of this class + cls_dets, cls_gts, cls_gts_ignore = get_cls_results( + det_results, annotations, i) + # choose proper function according to datasets to compute tp and fp + if tpfp_fn is None: + if dataset in ['det', 'vid']: + tpfp_fn = tpfp_imagenet + else: + tpfp_fn = tpfp_default + if not callable(tpfp_fn): + raise ValueError( + f'tpfp_fn has to be a function or None, but got {tpfp_fn}') + + # compute tp and fp for each image with multiple processes + tpfp = pool.starmap( + tpfp_fn, + zip(cls_dets, cls_gts, cls_gts_ignore, + [iou_thr for _ in range(num_imgs)], + [area_ranges for _ in range(num_imgs)])) + tp, fp = tuple(zip(*tpfp)) + # calculate gt number of each scale + # ignored gts or gts beyond the specific scale are not counted + num_gts = np.zeros(num_scales, dtype=int) + for j, bbox in enumerate(cls_gts): + if area_ranges is None: + num_gts[0] += bbox.shape[0] + else: + gt_areas = (bbox[:, 2] - bbox[:, 0]) * ( + bbox[:, 3] - bbox[:, 1]) + for k, (min_area, max_area) in enumerate(area_ranges): + num_gts[k] += np.sum((gt_areas >= min_area) + & (gt_areas < max_area)) + # sort all det bboxes by score, also sort tp and fp + cls_dets = np.vstack(cls_dets) + num_dets = cls_dets.shape[0] + sort_inds = np.argsort(-cls_dets[:, -1]) + tp = np.hstack(tp)[:, sort_inds] + fp = np.hstack(fp)[:, sort_inds] + # calculate recall and precision with tp and fp + tp = np.cumsum(tp, axis=1) + fp = np.cumsum(fp, axis=1) + eps = np.finfo(np.float32).eps + recalls = tp / np.maximum(num_gts[:, np.newaxis], eps) + precisions = tp / np.maximum((tp + fp), eps) + # calculate AP + if scale_ranges is None: + recalls = recalls[0, :] + precisions = precisions[0, :] + num_gts = num_gts.item() + mode = 'area' if dataset != 'voc07' else '11points' + ap = average_precision(recalls, precisions, mode) + eval_results.append({ + 'num_gts': num_gts, + 'num_dets': num_dets, + 'recall': recalls, + 'precision': precisions, + 'ap': ap + }) + pool.close() + if scale_ranges is not None: + # shape (num_classes, num_scales) + all_ap = np.vstack([cls_result['ap'] for cls_result in eval_results]) + all_num_gts = np.vstack( + [cls_result['num_gts'] for cls_result in eval_results]) + mean_ap = [] + for i in range(num_scales): + if np.any(all_num_gts[:, i] > 0): + mean_ap.append(all_ap[all_num_gts[:, i] > 0, i].mean()) + else: + mean_ap.append(0.0) + else: + aps = [] + for cls_result in eval_results: + if cls_result['num_gts'] > 0: + aps.append(cls_result['ap']) + mean_ap = np.array(aps).mean().item() if aps else 0.0 + + print_map_summary( + mean_ap, eval_results, dataset, area_ranges, logger=logger) + + return mean_ap, eval_results + + +def print_map_summary(mean_ap, + results, + dataset=None, + scale_ranges=None, + logger=None): + """Print mAP and results of each class. + + A table will be printed to show the gts/dets/recall/AP of each class and + the mAP. + + Args: + mean_ap (float): Calculated from `eval_map()`. + results (list[dict]): Calculated from `eval_map()`. + dataset (list[str] | str | None): Dataset name or dataset classes. + scale_ranges (list[tuple] | None): Range of scales to be evaluated. + logger (logging.Logger | str | None): The way to print the mAP + summary. See `mmcv.utils.print_log()` for details. Default: None. + """ + + if logger == 'silent': + return + + if isinstance(results[0]['ap'], np.ndarray): + num_scales = len(results[0]['ap']) + else: + num_scales = 1 + + if scale_ranges is not None: + assert len(scale_ranges) == num_scales + + num_classes = len(results) + + recalls = np.zeros((num_scales, num_classes), dtype=np.float32) + aps = np.zeros((num_scales, num_classes), dtype=np.float32) + num_gts = np.zeros((num_scales, num_classes), dtype=int) + for i, cls_result in enumerate(results): + if cls_result['recall'].size > 0: + recalls[:, i] = np.array(cls_result['recall'], ndmin=2)[:, -1] + aps[:, i] = cls_result['ap'] + num_gts[:, i] = cls_result['num_gts'] + + if dataset is None: + label_names = [str(i) for i in range(num_classes)] + elif mmcv.is_str(dataset): + label_names = get_classes(dataset) + else: + label_names = dataset + + if not isinstance(mean_ap, list): + mean_ap = [mean_ap] + + header = ['class', 'gts', 'dets', 'recall', 'ap'] + for i in range(num_scales): + if scale_ranges is not None: + print_log(f'Scale range {scale_ranges[i]}', logger=logger) + table_data = [header] + for j in range(num_classes): + row_data = [ + label_names[j], num_gts[i, j], results[j]['num_dets'], + f'{recalls[i, j]:.3f}', f'{aps[i, j]:.3f}' + ] + table_data.append(row_data) + table_data.append(['mAP', '', '', '', f'{mean_ap[i]:.3f}']) + table = AsciiTable(table_data) + table.inner_footing_row_border = True + print_log('\n' + table.table, logger=logger) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/evaluation/recall.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/evaluation/recall.py new file mode 100644 index 0000000000000000000000000000000000000000..23ec744f552db1a4a76bfa63b7cc8b357deb3140 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/evaluation/recall.py @@ -0,0 +1,189 @@ +from collections.abc import Sequence + +import numpy as np +from mmcv.utils import print_log +from terminaltables import AsciiTable + +from .bbox_overlaps import bbox_overlaps + + +def _recalls(all_ious, proposal_nums, thrs): + + img_num = all_ious.shape[0] + total_gt_num = sum([ious.shape[0] for ious in all_ious]) + + _ious = np.zeros((proposal_nums.size, total_gt_num), dtype=np.float32) + for k, proposal_num in enumerate(proposal_nums): + tmp_ious = np.zeros(0) + for i in range(img_num): + ious = all_ious[i][:, :proposal_num].copy() + gt_ious = np.zeros((ious.shape[0])) + if ious.size == 0: + tmp_ious = np.hstack((tmp_ious, gt_ious)) + continue + for j in range(ious.shape[0]): + gt_max_overlaps = ious.argmax(axis=1) + max_ious = ious[np.arange(0, ious.shape[0]), gt_max_overlaps] + gt_idx = max_ious.argmax() + gt_ious[j] = max_ious[gt_idx] + box_idx = gt_max_overlaps[gt_idx] + ious[gt_idx, :] = -1 + ious[:, box_idx] = -1 + tmp_ious = np.hstack((tmp_ious, gt_ious)) + _ious[k, :] = tmp_ious + + _ious = np.fliplr(np.sort(_ious, axis=1)) + recalls = np.zeros((proposal_nums.size, thrs.size)) + for i, thr in enumerate(thrs): + recalls[:, i] = (_ious >= thr).sum(axis=1) / float(total_gt_num) + + return recalls + + +def set_recall_param(proposal_nums, iou_thrs): + """Check proposal_nums and iou_thrs and set correct format.""" + if isinstance(proposal_nums, Sequence): + _proposal_nums = np.array(proposal_nums) + elif isinstance(proposal_nums, int): + _proposal_nums = np.array([proposal_nums]) + else: + _proposal_nums = proposal_nums + + if iou_thrs is None: + _iou_thrs = np.array([0.5]) + elif isinstance(iou_thrs, Sequence): + _iou_thrs = np.array(iou_thrs) + elif isinstance(iou_thrs, float): + _iou_thrs = np.array([iou_thrs]) + else: + _iou_thrs = iou_thrs + + return _proposal_nums, _iou_thrs + + +def eval_recalls(gts, + proposals, + proposal_nums=None, + iou_thrs=0.5, + logger=None): + """Calculate recalls. + + Args: + gts (list[ndarray]): a list of arrays of shape (n, 4) + proposals (list[ndarray]): a list of arrays of shape (k, 4) or (k, 5) + proposal_nums (int | Sequence[int]): Top N proposals to be evaluated. + iou_thrs (float | Sequence[float]): IoU thresholds. Default: 0.5. + logger (logging.Logger | str | None): The way to print the recall + summary. See `mmcv.utils.print_log()` for details. Default: None. + + Returns: + ndarray: recalls of different ious and proposal nums + """ + + img_num = len(gts) + assert img_num == len(proposals) + + proposal_nums, iou_thrs = set_recall_param(proposal_nums, iou_thrs) + + all_ious = [] + for i in range(img_num): + if proposals[i].ndim == 2 and proposals[i].shape[1] == 5: + scores = proposals[i][:, 4] + sort_idx = np.argsort(scores)[::-1] + img_proposal = proposals[i][sort_idx, :] + else: + img_proposal = proposals[i] + prop_num = min(img_proposal.shape[0], proposal_nums[-1]) + if gts[i] is None or gts[i].shape[0] == 0: + ious = np.zeros((0, img_proposal.shape[0]), dtype=np.float32) + else: + ious = bbox_overlaps(gts[i], img_proposal[:prop_num, :4]) + all_ious.append(ious) + all_ious = np.array(all_ious) + recalls = _recalls(all_ious, proposal_nums, iou_thrs) + + print_recall_summary(recalls, proposal_nums, iou_thrs, logger=logger) + return recalls + + +def print_recall_summary(recalls, + proposal_nums, + iou_thrs, + row_idxs=None, + col_idxs=None, + logger=None): + """Print recalls in a table. + + Args: + recalls (ndarray): calculated from `bbox_recalls` + proposal_nums (ndarray or list): top N proposals + iou_thrs (ndarray or list): iou thresholds + row_idxs (ndarray): which rows(proposal nums) to print + col_idxs (ndarray): which cols(iou thresholds) to print + logger (logging.Logger | str | None): The way to print the recall + summary. See `mmcv.utils.print_log()` for details. Default: None. + """ + proposal_nums = np.array(proposal_nums, dtype=np.int32) + iou_thrs = np.array(iou_thrs) + if row_idxs is None: + row_idxs = np.arange(proposal_nums.size) + if col_idxs is None: + col_idxs = np.arange(iou_thrs.size) + row_header = [''] + iou_thrs[col_idxs].tolist() + table_data = [row_header] + for i, num in enumerate(proposal_nums[row_idxs]): + row = [f'{val:.3f}' for val in recalls[row_idxs[i], col_idxs].tolist()] + row.insert(0, num) + table_data.append(row) + table = AsciiTable(table_data) + print_log('\n' + table.table, logger=logger) + + +def plot_num_recall(recalls, proposal_nums): + """Plot Proposal_num-Recalls curve. + + Args: + recalls(ndarray or list): shape (k,) + proposal_nums(ndarray or list): same shape as `recalls` + """ + if isinstance(proposal_nums, np.ndarray): + _proposal_nums = proposal_nums.tolist() + else: + _proposal_nums = proposal_nums + if isinstance(recalls, np.ndarray): + _recalls = recalls.tolist() + else: + _recalls = recalls + + import matplotlib.pyplot as plt + f = plt.figure() + plt.plot([0] + _proposal_nums, [0] + _recalls) + plt.xlabel('Proposal num') + plt.ylabel('Recall') + plt.axis([0, proposal_nums.max(), 0, 1]) + f.show() + + +def plot_iou_recall(recalls, iou_thrs): + """Plot IoU-Recalls curve. + + Args: + recalls(ndarray or list): shape (k,) + iou_thrs(ndarray or list): same shape as `recalls` + """ + if isinstance(iou_thrs, np.ndarray): + _iou_thrs = iou_thrs.tolist() + else: + _iou_thrs = iou_thrs + if isinstance(recalls, np.ndarray): + _recalls = recalls.tolist() + else: + _recalls = recalls + + import matplotlib.pyplot as plt + f = plt.figure() + plt.plot(_iou_thrs + [1.0], _recalls + [0.]) + plt.xlabel('IoU') + plt.ylabel('Recall') + plt.axis([iou_thrs.min(), 1, 0, 1]) + f.show() diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/export/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/export/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..91685619b92d0994061000764fdaf387c48ff163 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/export/__init__.py @@ -0,0 +1,11 @@ +from .onnx_helper import (add_dummy_nms_for_onnx, dynamic_clip_for_onnx, + get_k_for_topk) +from .pytorch2onnx import (build_model_from_cfg, + generate_inputs_and_wrap_model, + preprocess_example_input) + +__all__ = [ + 'build_model_from_cfg', 'generate_inputs_and_wrap_model', + 'preprocess_example_input', 'get_k_for_topk', 'add_dummy_nms_for_onnx', + 'dynamic_clip_for_onnx' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/export/model_wrappers.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/export/model_wrappers.py new file mode 100644 index 0000000000000000000000000000000000000000..4f4b35b9742679a09875546eb5da5ae5a03b1ab4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/export/model_wrappers.py @@ -0,0 +1,182 @@ +import os.path as osp +import warnings + +import numpy as np +import torch + +from mmdet.core import bbox2result +from mmdet.models import BaseDetector + + +class DeployBaseDetector(BaseDetector): + """DeployBaseDetector.""" + + def __init__(self, class_names, device_id): + super(DeployBaseDetector, self).__init__() + self.CLASSES = class_names + self.device_id = device_id + + def simple_test(self, img, img_metas, **kwargs): + raise NotImplementedError('This method is not implemented.') + + def aug_test(self, imgs, img_metas, **kwargs): + raise NotImplementedError('This method is not implemented.') + + def extract_feat(self, imgs): + raise NotImplementedError('This method is not implemented.') + + def forward_train(self, imgs, img_metas, **kwargs): + raise NotImplementedError('This method is not implemented.') + + def val_step(self, data, optimizer): + raise NotImplementedError('This method is not implemented.') + + def train_step(self, data, optimizer): + raise NotImplementedError('This method is not implemented.') + + def aforward_test(self, *, img, img_metas, **kwargs): + raise NotImplementedError('This method is not implemented.') + + def async_simple_test(self, img, img_metas, **kwargs): + raise NotImplementedError('This method is not implemented.') + + def forward(self, img, img_metas, return_loss=True, **kwargs): + outputs = self.forward_test(img, img_metas, **kwargs) + batch_dets, batch_labels = outputs[:2] + batch_masks = outputs[2] if len(outputs) == 3 else None + batch_size = img[0].shape[0] + img_metas = img_metas[0] + results = [] + rescale = kwargs.get('rescale', True) + for i in range(batch_size): + dets, labels = batch_dets[i], batch_labels[i] + if rescale: + scale_factor = img_metas[i]['scale_factor'] + + if isinstance(scale_factor, (list, tuple, np.ndarray)): + assert len(scale_factor) == 4 + scale_factor = np.array(scale_factor)[None, :] # [1,4] + dets[:, :4] /= scale_factor + + if 'border' in img_metas[i]: + # offset pixel of the top-left corners between original image + # and padded/enlarged image, 'border' is used when exporting + # CornerNet and CentripetalNet to onnx + x_off = img_metas[i]['border'][2] + y_off = img_metas[i]['border'][0] + dets[:, [0, 2]] -= x_off + dets[:, [1, 3]] -= y_off + dets[:, :4] *= (dets[:, :4] > 0).astype(dets.dtype) + + dets_results = bbox2result(dets, labels, len(self.CLASSES)) + + if batch_masks is not None: + masks = batch_masks[i] + img_h, img_w = img_metas[i]['img_shape'][:2] + ori_h, ori_w = img_metas[i]['ori_shape'][:2] + masks = masks[:, :img_h, :img_w] + if rescale: + masks = masks.astype(np.float32) + masks = torch.from_numpy(masks) + masks = torch.nn.functional.interpolate( + masks.unsqueeze(0), size=(ori_h, ori_w)) + masks = masks.squeeze(0).detach().numpy() + if masks.dtype != np.bool: + masks = masks >= 0.5 + segms_results = [[] for _ in range(len(self.CLASSES))] + for j in range(len(dets)): + segms_results[labels[j]].append(masks[j]) + results.append((dets_results, segms_results)) + else: + results.append(dets_results) + return results + + +class ONNXRuntimeDetector(DeployBaseDetector): + """Wrapper for detector's inference with ONNXRuntime.""" + + def __init__(self, onnx_file, class_names, device_id): + super(ONNXRuntimeDetector, self).__init__(class_names, device_id) + import onnxruntime as ort + + # get the custom op path + ort_custom_op_path = '' + try: + from mmcv.ops import get_onnxruntime_op_path + ort_custom_op_path = get_onnxruntime_op_path() + except (ImportError, ModuleNotFoundError): + warnings.warn('If input model has custom op from mmcv, \ + you may have to build mmcv with ONNXRuntime from source.') + session_options = ort.SessionOptions() + # register custom op for onnxruntime + if osp.exists(ort_custom_op_path): + session_options.register_custom_ops_library(ort_custom_op_path) + sess = ort.InferenceSession(onnx_file, session_options) + providers = ['CPUExecutionProvider'] + options = [{}] + is_cuda_available = ort.get_device() == 'GPU' + if is_cuda_available: + providers.insert(0, 'CUDAExecutionProvider') + options.insert(0, {'device_id': device_id}) + + sess.set_providers(providers, options) + + self.sess = sess + self.io_binding = sess.io_binding() + self.output_names = [_.name for _ in sess.get_outputs()] + self.is_cuda_available = is_cuda_available + + def forward_test(self, imgs, img_metas, **kwargs): + input_data = imgs[0] + # set io binding for inputs/outputs + device_type = 'cuda' if self.is_cuda_available else 'cpu' + if not self.is_cuda_available: + input_data = input_data.cpu() + self.io_binding.bind_input( + name='input', + device_type=device_type, + device_id=self.device_id, + element_type=np.float32, + shape=input_data.shape, + buffer_ptr=input_data.data_ptr()) + + for name in self.output_names: + self.io_binding.bind_output(name) + # run session to get outputs + self.sess.run_with_iobinding(self.io_binding) + ort_outputs = self.io_binding.copy_outputs_to_cpu() + return ort_outputs + + +class TensorRTDetector(DeployBaseDetector): + """Wrapper for detector's inference with TensorRT.""" + + def __init__(self, engine_file, class_names, device_id, output_names=None): + super(TensorRTDetector, self).__init__(class_names, device_id) + warnings.warn('`output_names` is deprecated and will be removed in ' + 'future releases.') + from mmcv.tensorrt import TRTWraper, load_tensorrt_plugin + try: + load_tensorrt_plugin() + except (ImportError, ModuleNotFoundError): + warnings.warn('If input model has custom op from mmcv, \ + you may have to build mmcv with TensorRT from source.') + + output_names = ['dets', 'labels'] + model = TRTWraper(engine_file, ['input'], output_names) + with_masks = False + # if TensorRT has totally 4 inputs/outputs, then + # the detector should have `mask` output. + if len(model.engine) == 4: + model.output_names = output_names + ['masks'] + with_masks = True + self.model = model + self.with_masks = with_masks + + def forward_test(self, imgs, img_metas, **kwargs): + input_data = imgs[0].contiguous() + with torch.cuda.device(self.device_id), torch.no_grad(): + outputs = self.model({'input': input_data}) + outputs = [outputs[name] for name in self.model.output_names] + outputs = [out.detach().cpu().numpy() for out in outputs] + return outputs diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/export/onnx_helper.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/export/onnx_helper.py new file mode 100644 index 0000000000000000000000000000000000000000..4bab842dedaa80ae164b0e071b21bb34f5382aeb --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/export/onnx_helper.py @@ -0,0 +1,222 @@ +import os + +import torch + + +def dynamic_clip_for_onnx(x1, y1, x2, y2, max_shape): + """Clip boxes dynamically for onnx. + + Since torch.clamp cannot have dynamic `min` and `max`, we scale the + boxes by 1/max_shape and clamp in the range [0, 1]. + + Args: + x1 (Tensor): The x1 for bounding boxes. + y1 (Tensor): The y1 for bounding boxes. + x2 (Tensor): The x2 for bounding boxes. + y2 (Tensor): The y2 for bounding boxes. + max_shape (Tensor or torch.Size): The (H,W) of original image. + Returns: + tuple(Tensor): The clipped x1, y1, x2, y2. + """ + assert isinstance( + max_shape, + torch.Tensor), '`max_shape` should be tensor of (h,w) for onnx' + + # scale by 1/max_shape + x1 = x1 / max_shape[1] + y1 = y1 / max_shape[0] + x2 = x2 / max_shape[1] + y2 = y2 / max_shape[0] + + # clamp [0, 1] + x1 = torch.clamp(x1, 0, 1) + y1 = torch.clamp(y1, 0, 1) + x2 = torch.clamp(x2, 0, 1) + y2 = torch.clamp(y2, 0, 1) + + # scale back + x1 = x1 * max_shape[1] + y1 = y1 * max_shape[0] + x2 = x2 * max_shape[1] + y2 = y2 * max_shape[0] + return x1, y1, x2, y2 + + +def get_k_for_topk(k, size): + """Get k of TopK for onnx exporting. + + The K of TopK in TensorRT should not be a Tensor, while in ONNX Runtime + it could be a Tensor.Due to dynamic shape feature, we have to decide + whether to do TopK and what K it should be while exporting to ONNX. + If returned K is less than zero, it means we do not have to do + TopK operation. + + Args: + k (int or Tensor): The set k value for nms from config file. + size (Tensor or torch.Size): The number of elements of \ + TopK's input tensor + Returns: + tuple: (int or Tensor): The final K for TopK. + """ + ret_k = -1 + if k <= 0 or size <= 0: + return ret_k + if torch.onnx.is_in_onnx_export(): + is_trt_backend = os.environ.get('ONNX_BACKEND') == 'MMCVTensorRT' + if is_trt_backend: + # TensorRT does not support dynamic K with TopK op + if 0 < k < size: + ret_k = k + else: + # Always keep topk op for dynamic input in onnx for ONNX Runtime + ret_k = torch.where(k < size, k, size) + elif k < size: + ret_k = k + else: + # ret_k is -1 + pass + return ret_k + + +def add_dummy_nms_for_onnx(boxes, + scores, + max_output_boxes_per_class=1000, + iou_threshold=0.5, + score_threshold=0.05, + pre_top_k=-1, + after_top_k=-1, + labels=None): + """Create a dummy onnx::NonMaxSuppression op while exporting to ONNX. + + This function helps exporting to onnx with batch and multiclass NMS op. + It only supports class-agnostic detection results. That is, the scores + is of shape (N, num_bboxes, num_classes) and the boxes is of shape + (N, num_boxes, 4). + + Args: + boxes (Tensor): The bounding boxes of shape [N, num_boxes, 4] + scores (Tensor): The detection scores of shape + [N, num_boxes, num_classes] + max_output_boxes_per_class (int): Maximum number of output + boxes per class of nms. Defaults to 1000. + iou_threshold (float): IOU threshold of nms. Defaults to 0.5 + score_threshold (float): score threshold of nms. + Defaults to 0.05. + pre_top_k (bool): Number of top K boxes to keep before nms. + Defaults to -1. + after_top_k (int): Number of top K boxes to keep after nms. + Defaults to -1. + labels (Tensor, optional): It not None, explicit labels would be used. + Otherwise, labels would be automatically generated using + num_classed. Defaults to None. + + Returns: + tuple[Tensor, Tensor]: dets of shape [N, num_det, 5] and class labels + of shape [N, num_det]. + """ + max_output_boxes_per_class = torch.LongTensor([max_output_boxes_per_class]) + iou_threshold = torch.tensor([iou_threshold], dtype=torch.float32) + score_threshold = torch.tensor([score_threshold], dtype=torch.float32) + batch_size = scores.shape[0] + num_class = scores.shape[2] + + nms_pre = torch.tensor(pre_top_k, device=scores.device, dtype=torch.long) + nms_pre = get_k_for_topk(nms_pre, boxes.shape[1]) + + if nms_pre > 0: + max_scores, _ = scores.max(-1) + _, topk_inds = max_scores.topk(nms_pre) + batch_inds = torch.arange(batch_size).view( + -1, 1).expand_as(topk_inds).long() + # Avoid onnx2tensorrt issue in https://github.com/NVIDIA/TensorRT/issues/1134 # noqa: E501 + transformed_inds = boxes.shape[1] * batch_inds + topk_inds + boxes = boxes.reshape(-1, 4)[transformed_inds, :].reshape( + batch_size, -1, 4) + scores = scores.reshape(-1, num_class)[transformed_inds, :].reshape( + batch_size, -1, num_class) + if labels is not None: + labels = labels.reshape(-1, 1)[transformed_inds].reshape( + batch_size, -1) + + scores = scores.permute(0, 2, 1) + num_box = boxes.shape[1] + # turn off tracing to create a dummy output of nms + state = torch._C._get_tracing_state() + # dummy indices of nms's output + num_fake_det = 2 + batch_inds = torch.randint(batch_size, (num_fake_det, 1)) + cls_inds = torch.randint(num_class, (num_fake_det, 1)) + box_inds = torch.randint(num_box, (num_fake_det, 1)) + indices = torch.cat([batch_inds, cls_inds, box_inds], dim=1) + output = indices + setattr(DummyONNXNMSop, 'output', output) + + # open tracing + torch._C._set_tracing_state(state) + selected_indices = DummyONNXNMSop.apply(boxes, scores, + max_output_boxes_per_class, + iou_threshold, score_threshold) + + batch_inds, cls_inds = selected_indices[:, 0], selected_indices[:, 1] + box_inds = selected_indices[:, 2] + if labels is None: + labels = torch.arange(num_class, dtype=torch.long).to(scores.device) + labels = labels.view(1, num_class, 1).expand_as(scores) + scores = scores.reshape(-1, 1) + boxes = boxes.reshape(batch_size, -1).repeat(1, num_class).reshape(-1, 4) + pos_inds = (num_class * batch_inds + cls_inds) * num_box + box_inds + mask = scores.new_zeros(scores.shape) + # Avoid onnx2tensorrt issue in https://github.com/NVIDIA/TensorRT/issues/1134 # noqa: E501 + # PyTorch style code: mask[batch_inds, box_inds] += 1 + mask[pos_inds, :] += 1 + scores = scores * mask + boxes = boxes * mask + + scores = scores.reshape(batch_size, -1) + boxes = boxes.reshape(batch_size, -1, 4) + labels = labels.reshape(batch_size, -1) + + nms_after = torch.tensor( + after_top_k, device=scores.device, dtype=torch.long) + nms_after = get_k_for_topk(nms_after, num_box * num_class) + + if nms_after > 0: + _, topk_inds = scores.topk(nms_after) + batch_inds = torch.arange(batch_size).view(-1, 1).expand_as(topk_inds) + # Avoid onnx2tensorrt issue in https://github.com/NVIDIA/TensorRT/issues/1134 # noqa: E501 + transformed_inds = scores.shape[1] * batch_inds + topk_inds + scores = scores.reshape(-1, 1)[transformed_inds, :].reshape( + batch_size, -1) + boxes = boxes.reshape(-1, 4)[transformed_inds, :].reshape( + batch_size, -1, 4) + labels = labels.reshape(-1, 1)[transformed_inds, :].reshape( + batch_size, -1) + + scores = scores.unsqueeze(2) + dets = torch.cat([boxes, scores], dim=2) + return dets, labels + + +class DummyONNXNMSop(torch.autograd.Function): + """DummyONNXNMSop. + + This class is only for creating onnx::NonMaxSuppression. + """ + + @staticmethod + def forward(ctx, boxes, scores, max_output_boxes_per_class, iou_threshold, + score_threshold): + + return DummyONNXNMSop.output + + @staticmethod + def symbolic(g, boxes, scores, max_output_boxes_per_class, iou_threshold, + score_threshold): + return g.op( + 'NonMaxSuppression', + boxes, + scores, + max_output_boxes_per_class, + iou_threshold, + score_threshold, + outputs=1) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/export/pytorch2onnx.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/export/pytorch2onnx.py new file mode 100644 index 0000000000000000000000000000000000000000..b9384a858c74e394d40cad9c66e0eec689f0d35f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/export/pytorch2onnx.py @@ -0,0 +1,161 @@ +from functools import partial + +import mmcv +import numpy as np +import torch +from mmcv.runner import load_checkpoint + + +def generate_inputs_and_wrap_model(config_path, + checkpoint_path, + input_config, + cfg_options=None): + """Prepare sample input and wrap model for ONNX export. + + The ONNX export API only accept args, and all inputs should be + torch.Tensor or corresponding types (such as tuple of tensor). + So we should call this function before exporting. This function will: + + 1. generate corresponding inputs which are used to execute the model. + 2. Wrap the model's forward function. + + For example, the MMDet models' forward function has a parameter + ``return_loss:bool``. As we want to set it as False while export API + supports neither bool type or kwargs. So we have to replace the forward + like: ``model.forward = partial(model.forward, return_loss=False)`` + + Args: + config_path (str): the OpenMMLab config for the model we want to + export to ONNX + checkpoint_path (str): Path to the corresponding checkpoint + input_config (dict): the exactly data in this dict depends on the + framework. For MMSeg, we can just declare the input shape, + and generate the dummy data accordingly. However, for MMDet, + we may pass the real img path, or the NMS will return None + as there is no legal bbox. + + Returns: + tuple: (model, tensor_data) wrapped model which can be called by \ + model(*tensor_data) and a list of inputs which are used to execute \ + the model while exporting. + """ + + model = build_model_from_cfg( + config_path, checkpoint_path, cfg_options=cfg_options) + one_img, one_meta = preprocess_example_input(input_config) + tensor_data = [one_img] + model.forward = partial( + model.forward, img_metas=[[one_meta]], return_loss=False) + + # pytorch has some bug in pytorch1.3, we have to fix it + # by replacing these existing op + opset_version = 11 + # put the import within the function thus it will not cause import error + # when not using this function + try: + from mmcv.onnx.symbolic import register_extra_symbolics + except ModuleNotFoundError: + raise NotImplementedError('please update mmcv to version>=v1.0.4') + register_extra_symbolics(opset_version) + + return model, tensor_data + + +def build_model_from_cfg(config_path, checkpoint_path, cfg_options=None): + """Build a model from config and load the given checkpoint. + + Args: + config_path (str): the OpenMMLab config for the model we want to + export to ONNX + checkpoint_path (str): Path to the corresponding checkpoint + + Returns: + torch.nn.Module: the built model + """ + from mmdet.models import build_detector + + cfg = mmcv.Config.fromfile(config_path) + if cfg_options is not None: + cfg.merge_from_dict(cfg_options) + # import modules from string list. + if cfg.get('custom_imports', None): + from mmcv.utils import import_modules_from_strings + import_modules_from_strings(**cfg['custom_imports']) + # set cudnn_benchmark + if cfg.get('cudnn_benchmark', False): + torch.backends.cudnn.benchmark = True + cfg.model.pretrained = None + cfg.data.test.test_mode = True + + # build the model + cfg.model.train_cfg = None + model = build_detector(cfg.model, test_cfg=cfg.get('test_cfg')) + checkpoint = load_checkpoint(model, checkpoint_path, map_location='cpu') + if 'CLASSES' in checkpoint.get('meta', {}): + model.CLASSES = checkpoint['meta']['CLASSES'] + else: + from mmdet.datasets import DATASETS + dataset = DATASETS.get(cfg.data.test['type']) + assert (dataset is not None) + model.CLASSES = dataset.CLASSES + model.cpu().eval() + return model + + +def preprocess_example_input(input_config): + """Prepare an example input image for ``generate_inputs_and_wrap_model``. + + Args: + input_config (dict): customized config describing the example input. + + Returns: + tuple: (one_img, one_meta), tensor of the example input image and \ + meta information for the example input image. + + Examples: + >>> from mmdet.core.export import preprocess_example_input + >>> input_config = { + >>> 'input_shape': (1,3,224,224), + >>> 'input_path': 'demo/demo.jpg', + >>> 'normalize_cfg': { + >>> 'mean': (123.675, 116.28, 103.53), + >>> 'std': (58.395, 57.12, 57.375) + >>> } + >>> } + >>> one_img, one_meta = preprocess_example_input(input_config) + >>> print(one_img.shape) + torch.Size([1, 3, 224, 224]) + >>> print(one_meta) + {'img_shape': (224, 224, 3), + 'ori_shape': (224, 224, 3), + 'pad_shape': (224, 224, 3), + 'filename': '.png', + 'scale_factor': 1.0, + 'flip': False} + """ + input_path = input_config['input_path'] + input_shape = input_config['input_shape'] + one_img = mmcv.imread(input_path) + one_img = mmcv.imresize(one_img, input_shape[2:][::-1]) + show_img = one_img.copy() + if 'normalize_cfg' in input_config.keys(): + normalize_cfg = input_config['normalize_cfg'] + mean = np.array(normalize_cfg['mean'], dtype=np.float32) + std = np.array(normalize_cfg['std'], dtype=np.float32) + to_rgb = normalize_cfg.get('to_rgb', True) + one_img = mmcv.imnormalize(one_img, mean, std, to_rgb=to_rgb) + one_img = one_img.transpose(2, 0, 1) + one_img = torch.from_numpy(one_img).unsqueeze(0).float().requires_grad_( + True) + (_, C, H, W) = input_shape + one_meta = { + 'img_shape': (H, W, C), + 'ori_shape': (H, W, C), + 'pad_shape': (H, W, C), + 'filename': '.png', + 'scale_factor': np.ones(4, dtype=np.float32), + 'flip': False, + 'show_img': show_img, + } + + return one_img, one_meta diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/mask/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/mask/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ab1e88bc686d5c2fe72b3114cb2b3e372e73a0f8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/mask/__init__.py @@ -0,0 +1,8 @@ +from .mask_target import mask_target +from .structures import BaseInstanceMasks, BitmapMasks, PolygonMasks +from .utils import encode_mask_results, split_combined_polys + +__all__ = [ + 'split_combined_polys', 'mask_target', 'BaseInstanceMasks', 'BitmapMasks', + 'PolygonMasks', 'encode_mask_results' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/mask/mask_target.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/mask/mask_target.py new file mode 100644 index 0000000000000000000000000000000000000000..cfccd77c67ee9867b4c8b1e5a88da16f1af99366 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/mask/mask_target.py @@ -0,0 +1,126 @@ +import numpy as np +import torch +from torch.nn.modules.utils import _pair + + +def mask_target(pos_proposals_list, pos_assigned_gt_inds_list, gt_masks_list, + cfg): + """Compute mask target for positive proposals in multiple images. + + Args: + pos_proposals_list (list[Tensor]): Positive proposals in multiple + images. + pos_assigned_gt_inds_list (list[Tensor]): Assigned GT indices for each + positive proposals. + gt_masks_list (list[:obj:`BaseInstanceMasks`]): Ground truth masks of + each image. + cfg (dict): Config dict that specifies the mask size. + + Returns: + list[Tensor]: Mask target of each image. + + Example: + >>> import mmcv + >>> import mmdet + >>> from mmdet.core.mask import BitmapMasks + >>> from mmdet.core.mask.mask_target import * + >>> H, W = 17, 18 + >>> cfg = mmcv.Config({'mask_size': (13, 14)}) + >>> rng = np.random.RandomState(0) + >>> # Positive proposals (tl_x, tl_y, br_x, br_y) for each image + >>> pos_proposals_list = [ + >>> torch.Tensor([ + >>> [ 7.2425, 5.5929, 13.9414, 14.9541], + >>> [ 7.3241, 3.6170, 16.3850, 15.3102], + >>> ]), + >>> torch.Tensor([ + >>> [ 4.8448, 6.4010, 7.0314, 9.7681], + >>> [ 5.9790, 2.6989, 7.4416, 4.8580], + >>> [ 0.0000, 0.0000, 0.1398, 9.8232], + >>> ]), + >>> ] + >>> # Corresponding class index for each proposal for each image + >>> pos_assigned_gt_inds_list = [ + >>> torch.LongTensor([7, 0]), + >>> torch.LongTensor([5, 4, 1]), + >>> ] + >>> # Ground truth mask for each true object for each image + >>> gt_masks_list = [ + >>> BitmapMasks(rng.rand(8, H, W), height=H, width=W), + >>> BitmapMasks(rng.rand(6, H, W), height=H, width=W), + >>> ] + >>> mask_targets = mask_target( + >>> pos_proposals_list, pos_assigned_gt_inds_list, + >>> gt_masks_list, cfg) + >>> assert mask_targets.shape == (5,) + cfg['mask_size'] + """ + cfg_list = [cfg for _ in range(len(pos_proposals_list))] + mask_targets = map(mask_target_single, pos_proposals_list, + pos_assigned_gt_inds_list, gt_masks_list, cfg_list) + mask_targets = list(mask_targets) + if len(mask_targets) > 0: + mask_targets = torch.cat(mask_targets) + return mask_targets + + +def mask_target_single(pos_proposals, pos_assigned_gt_inds, gt_masks, cfg): + """Compute mask target for each positive proposal in the image. + + Args: + pos_proposals (Tensor): Positive proposals. + pos_assigned_gt_inds (Tensor): Assigned GT inds of positive proposals. + gt_masks (:obj:`BaseInstanceMasks`): GT masks in the format of Bitmap + or Polygon. + cfg (dict): Config dict that indicate the mask size. + + Returns: + Tensor: Mask target of each positive proposals in the image. + + Example: + >>> import mmcv + >>> import mmdet + >>> from mmdet.core.mask import BitmapMasks + >>> from mmdet.core.mask.mask_target import * # NOQA + >>> H, W = 32, 32 + >>> cfg = mmcv.Config({'mask_size': (7, 11)}) + >>> rng = np.random.RandomState(0) + >>> # Masks for each ground truth box (relative to the image) + >>> gt_masks_data = rng.rand(3, H, W) + >>> gt_masks = BitmapMasks(gt_masks_data, height=H, width=W) + >>> # Predicted positive boxes in one image + >>> pos_proposals = torch.FloatTensor([ + >>> [ 16.2, 5.5, 19.9, 20.9], + >>> [ 17.3, 13.6, 19.3, 19.3], + >>> [ 14.8, 16.4, 17.0, 23.7], + >>> [ 0.0, 0.0, 16.0, 16.0], + >>> [ 4.0, 0.0, 20.0, 16.0], + >>> ]) + >>> # For each predicted proposal, its assignment to a gt mask + >>> pos_assigned_gt_inds = torch.LongTensor([0, 1, 2, 1, 1]) + >>> mask_targets = mask_target_single( + >>> pos_proposals, pos_assigned_gt_inds, gt_masks, cfg) + >>> assert mask_targets.shape == (5,) + cfg['mask_size'] + """ + device = pos_proposals.device + mask_size = _pair(cfg.mask_size) + binarize = not cfg.get('soft_mask_target', False) + num_pos = pos_proposals.size(0) + if num_pos > 0: + proposals_np = pos_proposals.cpu().numpy() + maxh, maxw = gt_masks.height, gt_masks.width + proposals_np[:, [0, 2]] = np.clip(proposals_np[:, [0, 2]], 0, maxw) + proposals_np[:, [1, 3]] = np.clip(proposals_np[:, [1, 3]], 0, maxh) + pos_assigned_gt_inds = pos_assigned_gt_inds.cpu().numpy() + + mask_targets = gt_masks.crop_and_resize( + proposals_np, + mask_size, + device=device, + inds=pos_assigned_gt_inds, + binarize=binarize).to_ndarray() + + mask_targets = torch.from_numpy(mask_targets).float().to(device) + else: + mask_targets = pos_proposals.new_zeros((0, ) + mask_size) + + return mask_targets diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/mask/structures.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/mask/structures.py new file mode 100644 index 0000000000000000000000000000000000000000..6f5a62aea8e611408fb4e726312fef9934675df9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/mask/structures.py @@ -0,0 +1,1037 @@ +from abc import ABCMeta, abstractmethod + +import cv2 +import mmcv +import numpy as np +import pycocotools.mask as maskUtils +import torch +from mmcv.ops.roi_align import roi_align + + +class BaseInstanceMasks(metaclass=ABCMeta): + """Base class for instance masks.""" + + @abstractmethod + def rescale(self, scale, interpolation='nearest'): + """Rescale masks as large as possible while keeping the aspect ratio. + For details can refer to `mmcv.imrescale`. + + Args: + scale (tuple[int]): The maximum size (h, w) of rescaled mask. + interpolation (str): Same as :func:`mmcv.imrescale`. + + Returns: + BaseInstanceMasks: The rescaled masks. + """ + + @abstractmethod + def resize(self, out_shape, interpolation='nearest'): + """Resize masks to the given out_shape. + + Args: + out_shape: Target (h, w) of resized mask. + interpolation (str): See :func:`mmcv.imresize`. + + Returns: + BaseInstanceMasks: The resized masks. + """ + + @abstractmethod + def flip(self, flip_direction='horizontal'): + """Flip masks alone the given direction. + + Args: + flip_direction (str): Either 'horizontal' or 'vertical'. + + Returns: + BaseInstanceMasks: The flipped masks. + """ + + @abstractmethod + def pad(self, out_shape, pad_val): + """Pad masks to the given size of (h, w). + + Args: + out_shape (tuple[int]): Target (h, w) of padded mask. + pad_val (int): The padded value. + + Returns: + BaseInstanceMasks: The padded masks. + """ + + @abstractmethod + def crop(self, bbox): + """Crop each mask by the given bbox. + + Args: + bbox (ndarray): Bbox in format [x1, y1, x2, y2], shape (4, ). + + Return: + BaseInstanceMasks: The cropped masks. + """ + + @abstractmethod + def crop_and_resize(self, + bboxes, + out_shape, + inds, + device, + interpolation='bilinear', + binarize=True): + """Crop and resize masks by the given bboxes. + + This function is mainly used in mask targets computation. + It firstly align mask to bboxes by assigned_inds, then crop mask by the + assigned bbox and resize to the size of (mask_h, mask_w) + + Args: + bboxes (Tensor): Bboxes in format [x1, y1, x2, y2], shape (N, 4) + out_shape (tuple[int]): Target (h, w) of resized mask + inds (ndarray): Indexes to assign masks to each bbox, + shape (N,) and values should be between [0, num_masks - 1]. + device (str): Device of bboxes + interpolation (str): See `mmcv.imresize` + binarize (bool): if True fractional values are rounded to 0 or 1 + after the resize operation. if False and unsupported an error + will be raised. Defaults to True. + + Return: + BaseInstanceMasks: the cropped and resized masks. + """ + + @abstractmethod + def expand(self, expanded_h, expanded_w, top, left): + """see :class:`Expand`.""" + + @property + @abstractmethod + def areas(self): + """ndarray: areas of each instance.""" + + @abstractmethod + def to_ndarray(self): + """Convert masks to the format of ndarray. + + Return: + ndarray: Converted masks in the format of ndarray. + """ + + @abstractmethod + def to_tensor(self, dtype, device): + """Convert masks to the format of Tensor. + + Args: + dtype (str): Dtype of converted mask. + device (torch.device): Device of converted masks. + + Returns: + Tensor: Converted masks in the format of Tensor. + """ + + @abstractmethod + def translate(self, + out_shape, + offset, + direction='horizontal', + fill_val=0, + interpolation='bilinear'): + """Translate the masks. + + Args: + out_shape (tuple[int]): Shape for output mask, format (h, w). + offset (int | float): The offset for translate. + direction (str): The translate direction, either "horizontal" + or "vertical". + fill_val (int | float): Border value. Default 0. + interpolation (str): Same as :func:`mmcv.imtranslate`. + + Returns: + Translated masks. + """ + + def shear(self, + out_shape, + magnitude, + direction='horizontal', + border_value=0, + interpolation='bilinear'): + """Shear the masks. + + Args: + out_shape (tuple[int]): Shape for output mask, format (h, w). + magnitude (int | float): The magnitude used for shear. + direction (str): The shear direction, either "horizontal" + or "vertical". + border_value (int | tuple[int]): Value used in case of a + constant border. Default 0. + interpolation (str): Same as in :func:`mmcv.imshear`. + + Returns: + ndarray: Sheared masks. + """ + + @abstractmethod + def rotate(self, out_shape, angle, center=None, scale=1.0, fill_val=0): + """Rotate the masks. + + Args: + out_shape (tuple[int]): Shape for output mask, format (h, w). + angle (int | float): Rotation angle in degrees. Positive values + mean counter-clockwise rotation. + center (tuple[float], optional): Center point (w, h) of the + rotation in source image. If not specified, the center of + the image will be used. + scale (int | float): Isotropic scale factor. + fill_val (int | float): Border value. Default 0 for masks. + + Returns: + Rotated masks. + """ + + +class BitmapMasks(BaseInstanceMasks): + """This class represents masks in the form of bitmaps. + + Args: + masks (ndarray): ndarray of masks in shape (N, H, W), where N is + the number of objects. + height (int): height of masks + width (int): width of masks + + Example: + >>> from mmdet.core.mask.structures import * # NOQA + >>> num_masks, H, W = 3, 32, 32 + >>> rng = np.random.RandomState(0) + >>> masks = (rng.rand(num_masks, H, W) > 0.1).astype(np.int) + >>> self = BitmapMasks(masks, height=H, width=W) + + >>> # demo crop_and_resize + >>> num_boxes = 5 + >>> bboxes = np.array([[0, 0, 30, 10.0]] * num_boxes) + >>> out_shape = (14, 14) + >>> inds = torch.randint(0, len(self), size=(num_boxes,)) + >>> device = 'cpu' + >>> interpolation = 'bilinear' + >>> new = self.crop_and_resize( + ... bboxes, out_shape, inds, device, interpolation) + >>> assert len(new) == num_boxes + >>> assert new.height, new.width == out_shape + """ + + def __init__(self, masks, height, width): + self.height = height + self.width = width + if len(masks) == 0: + self.masks = np.empty((0, self.height, self.width), dtype=np.uint8) + else: + assert isinstance(masks, (list, np.ndarray)) + if isinstance(masks, list): + assert isinstance(masks[0], np.ndarray) + assert masks[0].ndim == 2 # (H, W) + else: + assert masks.ndim == 3 # (N, H, W) + + self.masks = np.stack(masks).reshape(-1, height, width) + assert self.masks.shape[1] == self.height + assert self.masks.shape[2] == self.width + + def __getitem__(self, index): + """Index the BitmapMask. + + Args: + index (int | ndarray): Indices in the format of integer or ndarray. + + Returns: + :obj:`BitmapMasks`: Indexed bitmap masks. + """ + masks = self.masks[index].reshape(-1, self.height, self.width) + return BitmapMasks(masks, self.height, self.width) + + def __iter__(self): + return iter(self.masks) + + def __repr__(self): + s = self.__class__.__name__ + '(' + s += f'num_masks={len(self.masks)}, ' + s += f'height={self.height}, ' + s += f'width={self.width})' + return s + + def __len__(self): + """Number of masks.""" + return len(self.masks) + + def rescale(self, scale, interpolation='nearest'): + """See :func:`BaseInstanceMasks.rescale`.""" + if len(self.masks) == 0: + new_w, new_h = mmcv.rescale_size((self.width, self.height), scale) + rescaled_masks = np.empty((0, new_h, new_w), dtype=np.uint8) + else: + rescaled_masks = np.stack([ + mmcv.imrescale(mask, scale, interpolation=interpolation) + for mask in self.masks + ]) + height, width = rescaled_masks.shape[1:] + return BitmapMasks(rescaled_masks, height, width) + + def resize(self, out_shape, interpolation='nearest'): + """See :func:`BaseInstanceMasks.resize`.""" + if len(self.masks) == 0: + resized_masks = np.empty((0, *out_shape), dtype=np.uint8) + else: + resized_masks = np.stack([ + mmcv.imresize( + mask, out_shape[::-1], interpolation=interpolation) + for mask in self.masks + ]) + return BitmapMasks(resized_masks, *out_shape) + + def flip(self, flip_direction='horizontal'): + """See :func:`BaseInstanceMasks.flip`.""" + assert flip_direction in ('horizontal', 'vertical', 'diagonal') + + if len(self.masks) == 0: + flipped_masks = self.masks + else: + flipped_masks = np.stack([ + mmcv.imflip(mask, direction=flip_direction) + for mask in self.masks + ]) + return BitmapMasks(flipped_masks, self.height, self.width) + + def pad(self, out_shape, pad_val=0): + """See :func:`BaseInstanceMasks.pad`.""" + if len(self.masks) == 0: + padded_masks = np.empty((0, *out_shape), dtype=np.uint8) + else: + padded_masks = np.stack([ + mmcv.impad(mask, shape=out_shape, pad_val=pad_val) + for mask in self.masks + ]) + return BitmapMasks(padded_masks, *out_shape) + + def crop(self, bbox): + """See :func:`BaseInstanceMasks.crop`.""" + assert isinstance(bbox, np.ndarray) + assert bbox.ndim == 1 + + # clip the boundary + bbox = bbox.copy() + bbox[0::2] = np.clip(bbox[0::2], 0, self.width) + bbox[1::2] = np.clip(bbox[1::2], 0, self.height) + x1, y1, x2, y2 = bbox + w = np.maximum(x2 - x1, 1) + h = np.maximum(y2 - y1, 1) + + if len(self.masks) == 0: + cropped_masks = np.empty((0, h, w), dtype=np.uint8) + else: + cropped_masks = self.masks[:, y1:y1 + h, x1:x1 + w] + return BitmapMasks(cropped_masks, h, w) + + def crop_and_resize(self, + bboxes, + out_shape, + inds, + device='cpu', + interpolation='bilinear', + binarize=True): + """See :func:`BaseInstanceMasks.crop_and_resize`.""" + if len(self.masks) == 0: + empty_masks = np.empty((0, *out_shape), dtype=np.uint8) + return BitmapMasks(empty_masks, *out_shape) + + # convert bboxes to tensor + if isinstance(bboxes, np.ndarray): + bboxes = torch.from_numpy(bboxes).to(device=device) + if isinstance(inds, np.ndarray): + inds = torch.from_numpy(inds).to(device=device) + + num_bbox = bboxes.shape[0] + fake_inds = torch.arange( + num_bbox, device=device).to(dtype=bboxes.dtype)[:, None] + rois = torch.cat([fake_inds, bboxes], dim=1) # Nx5 + rois = rois.to(device=device) + if num_bbox > 0: + gt_masks_th = torch.from_numpy(self.masks).to(device).index_select( + 0, inds).to(dtype=rois.dtype) + targets = roi_align(gt_masks_th[:, None, :, :], rois, out_shape, + 1.0, 0, 'avg', True).squeeze(1) + if binarize: + resized_masks = (targets >= 0.5).cpu().numpy() + else: + resized_masks = targets.cpu().numpy() + else: + resized_masks = [] + return BitmapMasks(resized_masks, *out_shape) + + def expand(self, expanded_h, expanded_w, top, left): + """See :func:`BaseInstanceMasks.expand`.""" + if len(self.masks) == 0: + expanded_mask = np.empty((0, expanded_h, expanded_w), + dtype=np.uint8) + else: + expanded_mask = np.zeros((len(self), expanded_h, expanded_w), + dtype=np.uint8) + expanded_mask[:, top:top + self.height, + left:left + self.width] = self.masks + return BitmapMasks(expanded_mask, expanded_h, expanded_w) + + def translate(self, + out_shape, + offset, + direction='horizontal', + fill_val=0, + interpolation='bilinear'): + """Translate the BitmapMasks. + + Args: + out_shape (tuple[int]): Shape for output mask, format (h, w). + offset (int | float): The offset for translate. + direction (str): The translate direction, either "horizontal" + or "vertical". + fill_val (int | float): Border value. Default 0 for masks. + interpolation (str): Same as :func:`mmcv.imtranslate`. + + Returns: + BitmapMasks: Translated BitmapMasks. + + Example: + >>> from mmdet.core.mask.structures import BitmapMasks + >>> self = BitmapMasks.random(dtype=np.uint8) + >>> out_shape = (32, 32) + >>> offset = 4 + >>> direction = 'horizontal' + >>> fill_val = 0 + >>> interpolation = 'bilinear' + >>> # Note, There seem to be issues when: + >>> # * out_shape is different than self's shape + >>> # * the mask dtype is not supported by cv2.AffineWarp + >>> new = self.translate(out_shape, offset, direction, fill_val, + >>> interpolation) + >>> assert len(new) == len(self) + >>> assert new.height, new.width == out_shape + """ + if len(self.masks) == 0: + translated_masks = np.empty((0, *out_shape), dtype=np.uint8) + else: + translated_masks = mmcv.imtranslate( + self.masks.transpose((1, 2, 0)), + offset, + direction, + border_value=fill_val, + interpolation=interpolation) + if translated_masks.ndim == 2: + translated_masks = translated_masks[:, :, None] + translated_masks = translated_masks.transpose( + (2, 0, 1)).astype(self.masks.dtype) + return BitmapMasks(translated_masks, *out_shape) + + def shear(self, + out_shape, + magnitude, + direction='horizontal', + border_value=0, + interpolation='bilinear'): + """Shear the BitmapMasks. + + Args: + out_shape (tuple[int]): Shape for output mask, format (h, w). + magnitude (int | float): The magnitude used for shear. + direction (str): The shear direction, either "horizontal" + or "vertical". + border_value (int | tuple[int]): Value used in case of a + constant border. + interpolation (str): Same as in :func:`mmcv.imshear`. + + Returns: + BitmapMasks: The sheared masks. + """ + if len(self.masks) == 0: + sheared_masks = np.empty((0, *out_shape), dtype=np.uint8) + else: + sheared_masks = mmcv.imshear( + self.masks.transpose((1, 2, 0)), + magnitude, + direction, + border_value=border_value, + interpolation=interpolation) + if sheared_masks.ndim == 2: + sheared_masks = sheared_masks[:, :, None] + sheared_masks = sheared_masks.transpose( + (2, 0, 1)).astype(self.masks.dtype) + return BitmapMasks(sheared_masks, *out_shape) + + def rotate(self, out_shape, angle, center=None, scale=1.0, fill_val=0): + """Rotate the BitmapMasks. + + Args: + out_shape (tuple[int]): Shape for output mask, format (h, w). + angle (int | float): Rotation angle in degrees. Positive values + mean counter-clockwise rotation. + center (tuple[float], optional): Center point (w, h) of the + rotation in source image. If not specified, the center of + the image will be used. + scale (int | float): Isotropic scale factor. + fill_val (int | float): Border value. Default 0 for masks. + + Returns: + BitmapMasks: Rotated BitmapMasks. + """ + if len(self.masks) == 0: + rotated_masks = np.empty((0, *out_shape), dtype=self.masks.dtype) + else: + rotated_masks = mmcv.imrotate( + self.masks.transpose((1, 2, 0)), + angle, + center=center, + scale=scale, + border_value=fill_val) + if rotated_masks.ndim == 2: + # case when only one mask, (h, w) + rotated_masks = rotated_masks[:, :, None] # (h, w, 1) + rotated_masks = rotated_masks.transpose( + (2, 0, 1)).astype(self.masks.dtype) + return BitmapMasks(rotated_masks, *out_shape) + + @property + def areas(self): + """See :py:attr:`BaseInstanceMasks.areas`.""" + return self.masks.sum((1, 2)) + + def to_ndarray(self): + """See :func:`BaseInstanceMasks.to_ndarray`.""" + return self.masks + + def to_tensor(self, dtype, device): + """See :func:`BaseInstanceMasks.to_tensor`.""" + return torch.tensor(self.masks, dtype=dtype, device=device) + + @classmethod + def random(cls, + num_masks=3, + height=32, + width=32, + dtype=np.uint8, + rng=None): + """Generate random bitmap masks for demo / testing purposes. + + Example: + >>> from mmdet.core.mask.structures import BitmapMasks + >>> self = BitmapMasks.random() + >>> print('self = {}'.format(self)) + self = BitmapMasks(num_masks=3, height=32, width=32) + """ + from mmdet.utils.util_random import ensure_rng + rng = ensure_rng(rng) + masks = (rng.rand(num_masks, height, width) > 0.1).astype(dtype) + self = cls(masks, height=height, width=width) + return self + + +class PolygonMasks(BaseInstanceMasks): + """This class represents masks in the form of polygons. + + Polygons is a list of three levels. The first level of the list + corresponds to objects, the second level to the polys that compose the + object, the third level to the poly coordinates + + Args: + masks (list[list[ndarray]]): The first level of the list + corresponds to objects, the second level to the polys that + compose the object, the third level to the poly coordinates + height (int): height of masks + width (int): width of masks + + Example: + >>> from mmdet.core.mask.structures import * # NOQA + >>> masks = [ + >>> [ np.array([0, 0, 10, 0, 10, 10., 0, 10, 0, 0]) ] + >>> ] + >>> height, width = 16, 16 + >>> self = PolygonMasks(masks, height, width) + + >>> # demo translate + >>> new = self.translate((16, 16), 4., direction='horizontal') + >>> assert np.all(new.masks[0][0][1::2] == masks[0][0][1::2]) + >>> assert np.all(new.masks[0][0][0::2] == masks[0][0][0::2] + 4) + + >>> # demo crop_and_resize + >>> num_boxes = 3 + >>> bboxes = np.array([[0, 0, 30, 10.0]] * num_boxes) + >>> out_shape = (16, 16) + >>> inds = torch.randint(0, len(self), size=(num_boxes,)) + >>> device = 'cpu' + >>> interpolation = 'bilinear' + >>> new = self.crop_and_resize( + ... bboxes, out_shape, inds, device, interpolation) + >>> assert len(new) == num_boxes + >>> assert new.height, new.width == out_shape + """ + + def __init__(self, masks, height, width): + assert isinstance(masks, list) + if len(masks) > 0: + assert isinstance(masks[0], list) + assert isinstance(masks[0][0], np.ndarray) + + self.height = height + self.width = width + self.masks = masks + + def __getitem__(self, index): + """Index the polygon masks. + + Args: + index (ndarray | List): The indices. + + Returns: + :obj:`PolygonMasks`: The indexed polygon masks. + """ + if isinstance(index, np.ndarray): + index = index.tolist() + if isinstance(index, list): + masks = [self.masks[i] for i in index] + else: + try: + masks = self.masks[index] + except Exception: + raise ValueError( + f'Unsupported input of type {type(index)} for indexing!') + if len(masks) and isinstance(masks[0], np.ndarray): + masks = [masks] # ensure a list of three levels + return PolygonMasks(masks, self.height, self.width) + + def __iter__(self): + return iter(self.masks) + + def __repr__(self): + s = self.__class__.__name__ + '(' + s += f'num_masks={len(self.masks)}, ' + s += f'height={self.height}, ' + s += f'width={self.width})' + return s + + def __len__(self): + """Number of masks.""" + return len(self.masks) + + def rescale(self, scale, interpolation=None): + """see :func:`BaseInstanceMasks.rescale`""" + new_w, new_h = mmcv.rescale_size((self.width, self.height), scale) + if len(self.masks) == 0: + rescaled_masks = PolygonMasks([], new_h, new_w) + else: + rescaled_masks = self.resize((new_h, new_w)) + return rescaled_masks + + def resize(self, out_shape, interpolation=None): + """see :func:`BaseInstanceMasks.resize`""" + if len(self.masks) == 0: + resized_masks = PolygonMasks([], *out_shape) + else: + h_scale = out_shape[0] / self.height + w_scale = out_shape[1] / self.width + resized_masks = [] + for poly_per_obj in self.masks: + resized_poly = [] + for p in poly_per_obj: + p = p.copy() + p[0::2] *= w_scale + p[1::2] *= h_scale + resized_poly.append(p) + resized_masks.append(resized_poly) + resized_masks = PolygonMasks(resized_masks, *out_shape) + return resized_masks + + def flip(self, flip_direction='horizontal'): + """see :func:`BaseInstanceMasks.flip`""" + assert flip_direction in ('horizontal', 'vertical', 'diagonal') + if len(self.masks) == 0: + flipped_masks = PolygonMasks([], self.height, self.width) + else: + flipped_masks = [] + for poly_per_obj in self.masks: + flipped_poly_per_obj = [] + for p in poly_per_obj: + p = p.copy() + if flip_direction == 'horizontal': + p[0::2] = self.width - p[0::2] + elif flip_direction == 'vertical': + p[1::2] = self.height - p[1::2] + else: + p[0::2] = self.width - p[0::2] + p[1::2] = self.height - p[1::2] + flipped_poly_per_obj.append(p) + flipped_masks.append(flipped_poly_per_obj) + flipped_masks = PolygonMasks(flipped_masks, self.height, + self.width) + return flipped_masks + + def crop(self, bbox): + """see :func:`BaseInstanceMasks.crop`""" + assert isinstance(bbox, np.ndarray) + assert bbox.ndim == 1 + + # clip the boundary + bbox = bbox.copy() + bbox[0::2] = np.clip(bbox[0::2], 0, self.width) + bbox[1::2] = np.clip(bbox[1::2], 0, self.height) + x1, y1, x2, y2 = bbox + w = np.maximum(x2 - x1, 1) + h = np.maximum(y2 - y1, 1) + + if len(self.masks) == 0: + cropped_masks = PolygonMasks([], h, w) + else: + cropped_masks = [] + for poly_per_obj in self.masks: + cropped_poly_per_obj = [] + for p in poly_per_obj: + # pycocotools will clip the boundary + p = p.copy() + p[0::2] -= bbox[0] + p[1::2] -= bbox[1] + cropped_poly_per_obj.append(p) + cropped_masks.append(cropped_poly_per_obj) + cropped_masks = PolygonMasks(cropped_masks, h, w) + return cropped_masks + + def pad(self, out_shape, pad_val=0): + """padding has no effect on polygons`""" + return PolygonMasks(self.masks, *out_shape) + + def expand(self, *args, **kwargs): + """TODO: Add expand for polygon""" + raise NotImplementedError + + def crop_and_resize(self, + bboxes, + out_shape, + inds, + device='cpu', + interpolation='bilinear', + binarize=True): + """see :func:`BaseInstanceMasks.crop_and_resize`""" + out_h, out_w = out_shape + if len(self.masks) == 0: + return PolygonMasks([], out_h, out_w) + + if not binarize: + raise ValueError('Polygons are always binary, ' + 'setting binarize=False is unsupported') + + resized_masks = [] + for i in range(len(bboxes)): + mask = self.masks[inds[i]] + bbox = bboxes[i, :] + x1, y1, x2, y2 = bbox + w = np.maximum(x2 - x1, 1) + h = np.maximum(y2 - y1, 1) + h_scale = out_h / max(h, 0.1) # avoid too large scale + w_scale = out_w / max(w, 0.1) + + resized_mask = [] + for p in mask: + p = p.copy() + # crop + # pycocotools will clip the boundary + p[0::2] -= bbox[0] + p[1::2] -= bbox[1] + + # resize + p[0::2] *= w_scale + p[1::2] *= h_scale + resized_mask.append(p) + resized_masks.append(resized_mask) + return PolygonMasks(resized_masks, *out_shape) + + def translate(self, + out_shape, + offset, + direction='horizontal', + fill_val=None, + interpolation=None): + """Translate the PolygonMasks. + + Example: + >>> self = PolygonMasks.random(dtype=np.int) + >>> out_shape = (self.height, self.width) + >>> new = self.translate(out_shape, 4., direction='horizontal') + >>> assert np.all(new.masks[0][0][1::2] == self.masks[0][0][1::2]) + >>> assert np.all(new.masks[0][0][0::2] == self.masks[0][0][0::2] + 4) # noqa: E501 + """ + assert fill_val is None or fill_val == 0, 'Here fill_val is not '\ + f'used, and defaultly should be None or 0. got {fill_val}.' + if len(self.masks) == 0: + translated_masks = PolygonMasks([], *out_shape) + else: + translated_masks = [] + for poly_per_obj in self.masks: + translated_poly_per_obj = [] + for p in poly_per_obj: + p = p.copy() + if direction == 'horizontal': + p[0::2] = np.clip(p[0::2] + offset, 0, out_shape[1]) + elif direction == 'vertical': + p[1::2] = np.clip(p[1::2] + offset, 0, out_shape[0]) + translated_poly_per_obj.append(p) + translated_masks.append(translated_poly_per_obj) + translated_masks = PolygonMasks(translated_masks, *out_shape) + return translated_masks + + def shear(self, + out_shape, + magnitude, + direction='horizontal', + border_value=0, + interpolation='bilinear'): + """See :func:`BaseInstanceMasks.shear`.""" + if len(self.masks) == 0: + sheared_masks = PolygonMasks([], *out_shape) + else: + sheared_masks = [] + if direction == 'horizontal': + shear_matrix = np.stack([[1, magnitude], + [0, 1]]).astype(np.float32) + elif direction == 'vertical': + shear_matrix = np.stack([[1, 0], [magnitude, + 1]]).astype(np.float32) + for poly_per_obj in self.masks: + sheared_poly = [] + for p in poly_per_obj: + p = np.stack([p[0::2], p[1::2]], axis=0) # [2, n] + new_coords = np.matmul(shear_matrix, p) # [2, n] + new_coords[0, :] = np.clip(new_coords[0, :], 0, + out_shape[1]) + new_coords[1, :] = np.clip(new_coords[1, :], 0, + out_shape[0]) + sheared_poly.append( + new_coords.transpose((1, 0)).reshape(-1)) + sheared_masks.append(sheared_poly) + sheared_masks = PolygonMasks(sheared_masks, *out_shape) + return sheared_masks + + def rotate(self, out_shape, angle, center=None, scale=1.0, fill_val=0): + """See :func:`BaseInstanceMasks.rotate`.""" + if len(self.masks) == 0: + rotated_masks = PolygonMasks([], *out_shape) + else: + rotated_masks = [] + rotate_matrix = cv2.getRotationMatrix2D(center, -angle, scale) + for poly_per_obj in self.masks: + rotated_poly = [] + for p in poly_per_obj: + p = p.copy() + coords = np.stack([p[0::2], p[1::2]], axis=1) # [n, 2] + # pad 1 to convert from format [x, y] to homogeneous + # coordinates format [x, y, 1] + coords = np.concatenate( + (coords, np.ones((coords.shape[0], 1), coords.dtype)), + axis=1) # [n, 3] + rotated_coords = np.matmul( + rotate_matrix[None, :, :], + coords[:, :, None])[..., 0] # [n, 2, 1] -> [n, 2] + rotated_coords[:, 0] = np.clip(rotated_coords[:, 0], 0, + out_shape[1]) + rotated_coords[:, 1] = np.clip(rotated_coords[:, 1], 0, + out_shape[0]) + rotated_poly.append(rotated_coords.reshape(-1)) + rotated_masks.append(rotated_poly) + rotated_masks = PolygonMasks(rotated_masks, *out_shape) + return rotated_masks + + def to_bitmap(self): + """convert polygon masks to bitmap masks.""" + bitmap_masks = self.to_ndarray() + return BitmapMasks(bitmap_masks, self.height, self.width) + + @property + def areas(self): + """Compute areas of masks. + + This func is modified from `detectron2 + `_. + The function only works with Polygons using the shoelace formula. + + Return: + ndarray: areas of each instance + """ # noqa: W501 + area = [] + for polygons_per_obj in self.masks: + area_per_obj = 0 + for p in polygons_per_obj: + area_per_obj += self._polygon_area(p[0::2], p[1::2]) + area.append(area_per_obj) + return np.asarray(area) + + def _polygon_area(self, x, y): + """Compute the area of a component of a polygon. + + Using the shoelace formula: + https://stackoverflow.com/questions/24467972/calculate-area-of-polygon-given-x-y-coordinates + + Args: + x (ndarray): x coordinates of the component + y (ndarray): y coordinates of the component + + Return: + float: the are of the component + """ # noqa: 501 + return 0.5 * np.abs( + np.dot(x, np.roll(y, 1)) - np.dot(y, np.roll(x, 1))) + + def to_ndarray(self): + """Convert masks to the format of ndarray.""" + if len(self.masks) == 0: + return np.empty((0, self.height, self.width), dtype=np.uint8) + bitmap_masks = [] + for poly_per_obj in self.masks: + bitmap_masks.append( + polygon_to_bitmap(poly_per_obj, self.height, self.width)) + return np.stack(bitmap_masks) + + def to_tensor(self, dtype, device): + """See :func:`BaseInstanceMasks.to_tensor`.""" + if len(self.masks) == 0: + return torch.empty((0, self.height, self.width), + dtype=dtype, + device=device) + ndarray_masks = self.to_ndarray() + return torch.tensor(ndarray_masks, dtype=dtype, device=device) + + @classmethod + def random(cls, + num_masks=3, + height=32, + width=32, + n_verts=5, + dtype=np.float32, + rng=None): + """Generate random polygon masks for demo / testing purposes. + + Adapted from [1]_ + + References: + .. [1] https://gitlab.kitware.com/computer-vision/kwimage/-/blob/928cae35ca8/kwimage/structs/polygon.py#L379 # noqa: E501 + + Example: + >>> from mmdet.core.mask.structures import PolygonMasks + >>> self = PolygonMasks.random() + >>> print('self = {}'.format(self)) + """ + from mmdet.utils.util_random import ensure_rng + rng = ensure_rng(rng) + + def _gen_polygon(n, irregularity, spikeyness): + """Creates the polygon by sampling points on a circle around the + centre. Random noise is added by varying the angular spacing + between sequential points, and by varying the radial distance of + each point from the centre. + + Based on original code by Mike Ounsworth + + Args: + n (int): number of vertices + irregularity (float): [0,1] indicating how much variance there + is in the angular spacing of vertices. [0,1] will map to + [0, 2pi/numberOfVerts] + spikeyness (float): [0,1] indicating how much variance there is + in each vertex from the circle of radius aveRadius. [0,1] + will map to [0, aveRadius] + + Returns: + a list of vertices, in CCW order. + """ + from scipy.stats import truncnorm + # Generate around the unit circle + cx, cy = (0.0, 0.0) + radius = 1 + + tau = np.pi * 2 + + irregularity = np.clip(irregularity, 0, 1) * 2 * np.pi / n + spikeyness = np.clip(spikeyness, 1e-9, 1) + + # generate n angle steps + lower = (tau / n) - irregularity + upper = (tau / n) + irregularity + angle_steps = rng.uniform(lower, upper, n) + + # normalize the steps so that point 0 and point n+1 are the same + k = angle_steps.sum() / (2 * np.pi) + angles = (angle_steps / k).cumsum() + rng.uniform(0, tau) + + # Convert high and low values to be wrt the standard normal range + # https://docs.scipy.org/doc/scipy/reference/generated/scipy.stats.truncnorm.html + low = 0 + high = 2 * radius + mean = radius + std = spikeyness + a = (low - mean) / std + b = (high - mean) / std + tnorm = truncnorm(a=a, b=b, loc=mean, scale=std) + + # now generate the points + radii = tnorm.rvs(n, random_state=rng) + x_pts = cx + radii * np.cos(angles) + y_pts = cy + radii * np.sin(angles) + + points = np.hstack([x_pts[:, None], y_pts[:, None]]) + + # Scale to 0-1 space + points = points - points.min(axis=0) + points = points / points.max(axis=0) + + # Randomly place within 0-1 space + points = points * (rng.rand() * .8 + .2) + min_pt = points.min(axis=0) + max_pt = points.max(axis=0) + + high = (1 - max_pt) + low = (0 - min_pt) + offset = (rng.rand(2) * (high - low)) + low + points = points + offset + return points + + def _order_vertices(verts): + """ + References: + https://stackoverflow.com/questions/1709283/how-can-i-sort-a-coordinate-list-for-a-rectangle-counterclockwise + """ + mlat = verts.T[0].sum() / len(verts) + mlng = verts.T[1].sum() / len(verts) + + tau = np.pi * 2 + angle = (np.arctan2(mlat - verts.T[0], verts.T[1] - mlng) + + tau) % tau + sortx = angle.argsort() + verts = verts.take(sortx, axis=0) + return verts + + # Generate a random exterior for each requested mask + masks = [] + for _ in range(num_masks): + exterior = _order_vertices(_gen_polygon(n_verts, 0.9, 0.9)) + exterior = (exterior * [(width, height)]).astype(dtype) + masks.append([exterior.ravel()]) + + self = cls(masks, height, width) + return self + + +def polygon_to_bitmap(polygons, height, width): + """Convert masks from the form of polygons to bitmaps. + + Args: + polygons (list[ndarray]): masks in polygon representation + height (int): mask height + width (int): mask width + + Return: + ndarray: the converted masks in bitmap representation + """ + rles = maskUtils.frPyObjects(polygons, height, width) + rle = maskUtils.merge(rles) + bitmap_mask = maskUtils.decode(rle).astype(np.bool) + return bitmap_mask diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/mask/utils.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/mask/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..c88208291ab2a605bee9fe6c1a28a443b74c6372 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/mask/utils.py @@ -0,0 +1,63 @@ +import mmcv +import numpy as np +import pycocotools.mask as mask_util + + +def split_combined_polys(polys, poly_lens, polys_per_mask): + """Split the combined 1-D polys into masks. + + A mask is represented as a list of polys, and a poly is represented as + a 1-D array. In dataset, all masks are concatenated into a single 1-D + tensor. Here we need to split the tensor into original representations. + + Args: + polys (list): a list (length = image num) of 1-D tensors + poly_lens (list): a list (length = image num) of poly length + polys_per_mask (list): a list (length = image num) of poly number + of each mask + + Returns: + list: a list (length = image num) of list (length = mask num) of \ + list (length = poly num) of numpy array. + """ + mask_polys_list = [] + for img_id in range(len(polys)): + polys_single = polys[img_id] + polys_lens_single = poly_lens[img_id].tolist() + polys_per_mask_single = polys_per_mask[img_id].tolist() + + split_polys = mmcv.slice_list(polys_single, polys_lens_single) + mask_polys = mmcv.slice_list(split_polys, polys_per_mask_single) + mask_polys_list.append(mask_polys) + return mask_polys_list + + +# TODO: move this function to more proper place +def encode_mask_results(mask_results): + """Encode bitmap mask to RLE code. + + Args: + mask_results (list | tuple[list]): bitmap mask results. + In mask scoring rcnn, mask_results is a tuple of (segm_results, + segm_cls_score). + + Returns: + list | tuple: RLE encoded mask. + """ + if isinstance(mask_results, tuple): # mask scoring + cls_segms, cls_mask_scores = mask_results + else: + cls_segms = mask_results + num_classes = len(cls_segms) + encoded_mask_results = [[] for _ in range(num_classes)] + for i in range(len(cls_segms)): + for cls_segm in cls_segms[i]: + encoded_mask_results[i].append( + mask_util.encode( + np.array( + cls_segm[:, :, np.newaxis], order='F', + dtype='uint8'))[0]) # encoded with RLE + if isinstance(mask_results, tuple): + return encoded_mask_results, cls_mask_scores + else: + return encoded_mask_results diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/post_processing/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/post_processing/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..880b3f06609b050aae163b2e38088c1ee4aa0998 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/post_processing/__init__.py @@ -0,0 +1,8 @@ +from .bbox_nms import fast_nms, multiclass_nms +from .merge_augs import (merge_aug_bboxes, merge_aug_masks, + merge_aug_proposals, merge_aug_scores) + +__all__ = [ + 'multiclass_nms', 'merge_aug_proposals', 'merge_aug_bboxes', + 'merge_aug_scores', 'merge_aug_masks', 'fast_nms' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/post_processing/bbox_nms.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/post_processing/bbox_nms.py new file mode 100644 index 0000000000000000000000000000000000000000..9d95db880fc4f510c60f1d590b34b84c785b1bc1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/post_processing/bbox_nms.py @@ -0,0 +1,170 @@ +import torch +from mmcv.ops.nms import batched_nms + +from mmdet.core.bbox.iou_calculators import bbox_overlaps + + +def multiclass_nms(multi_bboxes, + multi_scores, + score_thr, + nms_cfg, + max_num=-1, + score_factors=None, + return_inds=False): + """NMS for multi-class bboxes. + + Args: + multi_bboxes (Tensor): shape (n, #class*4) or (n, 4) + multi_scores (Tensor): shape (n, #class), where the last column + contains scores of the background class, but this will be ignored. + score_thr (float): bbox threshold, bboxes with scores lower than it + will not be considered. + nms_thr (float): NMS IoU threshold + max_num (int, optional): if there are more than max_num bboxes after + NMS, only top max_num will be kept. Default to -1. + score_factors (Tensor, optional): The factors multiplied to scores + before applying NMS. Default to None. + return_inds (bool, optional): Whether return the indices of kept + bboxes. Default to False. + + Returns: + tuple: (dets, labels, indices (optional)), tensors of shape (k, 5), + (k), and (k). Dets are boxes with scores. Labels are 0-based. + """ + num_classes = multi_scores.size(1) - 1 + # exclude background category + if multi_bboxes.shape[1] > 4: + bboxes = multi_bboxes.view(multi_scores.size(0), -1, 4) + else: + bboxes = multi_bboxes[:, None].expand( + multi_scores.size(0), num_classes, 4) + + scores = multi_scores[:, :-1] + + labels = torch.arange(num_classes, dtype=torch.long) + labels = labels.view(1, -1).expand_as(scores) + + bboxes = bboxes.reshape(-1, 4) + scores = scores.reshape(-1) + labels = labels.reshape(-1) + + if not torch.onnx.is_in_onnx_export(): + # NonZero not supported in TensorRT + # remove low scoring boxes + valid_mask = scores > score_thr + # multiply score_factor after threshold to preserve more bboxes, improve + # mAP by 1% for YOLOv3 + if score_factors is not None: + # expand the shape to match original shape of score + score_factors = score_factors.view(-1, 1).expand( + multi_scores.size(0), num_classes) + score_factors = score_factors.reshape(-1) + scores = scores * score_factors + + if not torch.onnx.is_in_onnx_export(): + # NonZero not supported in TensorRT + inds = valid_mask.nonzero(as_tuple=False).squeeze(1) + bboxes, scores, labels = bboxes[inds], scores[inds], labels[inds] + else: + # TensorRT NMS plugin has invalid output filled with -1 + # add dummy data to make detection output correct. + bboxes = torch.cat([bboxes, bboxes.new_zeros(1, 4)], dim=0) + scores = torch.cat([scores, scores.new_zeros(1)], dim=0) + labels = torch.cat([labels, labels.new_zeros(1)], dim=0) + + if bboxes.numel() == 0: + if torch.onnx.is_in_onnx_export(): + raise RuntimeError('[ONNX Error] Can not record NMS ' + 'as it has not been executed this time') + dets = torch.cat([bboxes, scores[:, None]], -1) + if return_inds: + return dets, labels, inds + else: + return dets, labels + + dets, keep = batched_nms(bboxes, scores, labels, nms_cfg) + + if max_num > 0: + dets = dets[:max_num] + keep = keep[:max_num] + + if return_inds: + return dets, labels[keep], keep + else: + return dets, labels[keep] + + +def fast_nms(multi_bboxes, + multi_scores, + multi_coeffs, + score_thr, + iou_thr, + top_k, + max_num=-1): + """Fast NMS in `YOLACT `_. + + Fast NMS allows already-removed detections to suppress other detections so + that every instance can be decided to be kept or discarded in parallel, + which is not possible in traditional NMS. This relaxation allows us to + implement Fast NMS entirely in standard GPU-accelerated matrix operations. + + Args: + multi_bboxes (Tensor): shape (n, #class*4) or (n, 4) + multi_scores (Tensor): shape (n, #class+1), where the last column + contains scores of the background class, but this will be ignored. + multi_coeffs (Tensor): shape (n, #class*coeffs_dim). + score_thr (float): bbox threshold, bboxes with scores lower than it + will not be considered. + iou_thr (float): IoU threshold to be considered as conflicted. + top_k (int): if there are more than top_k bboxes before NMS, + only top top_k will be kept. + max_num (int): if there are more than max_num bboxes after NMS, + only top max_num will be kept. If -1, keep all the bboxes. + Default: -1. + + Returns: + tuple: (dets, labels, coefficients), tensors of shape (k, 5), (k, 1), + and (k, coeffs_dim). Dets are boxes with scores. + Labels are 0-based. + """ + + scores = multi_scores[:, :-1].t() # [#class, n] + scores, idx = scores.sort(1, descending=True) + + idx = idx[:, :top_k].contiguous() + scores = scores[:, :top_k] # [#class, topk] + num_classes, num_dets = idx.size() + boxes = multi_bboxes[idx.view(-1), :].view(num_classes, num_dets, 4) + coeffs = multi_coeffs[idx.view(-1), :].view(num_classes, num_dets, -1) + + iou = bbox_overlaps(boxes, boxes) # [#class, topk, topk] + iou.triu_(diagonal=1) + iou_max, _ = iou.max(dim=1) + + # Now just filter out the ones higher than the threshold + keep = iou_max <= iou_thr + + # Second thresholding introduces 0.2 mAP gain at negligible time cost + keep *= scores > score_thr + + # Assign each kept detection to its corresponding class + classes = torch.arange( + num_classes, device=boxes.device)[:, None].expand_as(keep) + classes = classes[keep] + + boxes = boxes[keep] + coeffs = coeffs[keep] + scores = scores[keep] + + # Only keep the top max_num highest scores across all classes + scores, idx = scores.sort(0, descending=True) + if max_num > 0: + idx = idx[:max_num] + scores = scores[:max_num] + + classes = classes[idx] + boxes = boxes[idx] + coeffs = coeffs[idx] + + cls_dets = torch.cat([boxes, scores[:, None]], dim=1) + return cls_dets, classes, coeffs diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/post_processing/merge_augs.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/post_processing/merge_augs.py new file mode 100644 index 0000000000000000000000000000000000000000..79e1cc59c136ee7fe92dca50f2de9d5045c3de9b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/post_processing/merge_augs.py @@ -0,0 +1,153 @@ +import copy +import warnings + +import numpy as np +import torch +from mmcv import ConfigDict +from mmcv.ops import nms + +from ..bbox import bbox_mapping_back + + +def merge_aug_proposals(aug_proposals, img_metas, cfg): + """Merge augmented proposals (multiscale, flip, etc.) + + Args: + aug_proposals (list[Tensor]): proposals from different testing + schemes, shape (n, 5). Note that they are not rescaled to the + original image size. + + img_metas (list[dict]): list of image info dict where each dict has: + 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `mmdet/datasets/pipelines/formatting.py:Collect`. + + cfg (dict): rpn test config. + + Returns: + Tensor: shape (n, 4), proposals corresponding to original image scale. + """ + + cfg = copy.deepcopy(cfg) + + # deprecate arguments warning + if 'nms' not in cfg or 'max_num' in cfg or 'nms_thr' in cfg: + warnings.warn( + 'In rpn_proposal or test_cfg, ' + 'nms_thr has been moved to a dict named nms as ' + 'iou_threshold, max_num has been renamed as max_per_img, ' + 'name of original arguments and the way to specify ' + 'iou_threshold of NMS will be deprecated.') + if 'nms' not in cfg: + cfg.nms = ConfigDict(dict(type='nms', iou_threshold=cfg.nms_thr)) + if 'max_num' in cfg: + if 'max_per_img' in cfg: + assert cfg.max_num == cfg.max_per_img, f'You set max_num and ' \ + f'max_per_img at the same time, but get {cfg.max_num} ' \ + f'and {cfg.max_per_img} respectively' \ + f'Please delete max_num which will be deprecated.' + else: + cfg.max_per_img = cfg.max_num + if 'nms_thr' in cfg: + assert cfg.nms.iou_threshold == cfg.nms_thr, f'You set ' \ + f'iou_threshold in nms and ' \ + f'nms_thr at the same time, but get ' \ + f'{cfg.nms.iou_threshold} and {cfg.nms_thr}' \ + f' respectively. Please delete the nms_thr ' \ + f'which will be deprecated.' + + recovered_proposals = [] + for proposals, img_info in zip(aug_proposals, img_metas): + img_shape = img_info['img_shape'] + scale_factor = img_info['scale_factor'] + flip = img_info['flip'] + flip_direction = img_info['flip_direction'] + _proposals = proposals.clone() + _proposals[:, :4] = bbox_mapping_back(_proposals[:, :4], img_shape, + scale_factor, flip, + flip_direction) + recovered_proposals.append(_proposals) + aug_proposals = torch.cat(recovered_proposals, dim=0) + merged_proposals, _ = nms(aug_proposals[:, :4].contiguous(), + aug_proposals[:, -1].contiguous(), + cfg.nms.iou_threshold) + scores = merged_proposals[:, 4] + _, order = scores.sort(0, descending=True) + num = min(cfg.max_per_img, merged_proposals.shape[0]) + order = order[:num] + merged_proposals = merged_proposals[order, :] + return merged_proposals + + +def merge_aug_bboxes(aug_bboxes, aug_scores, img_metas, rcnn_test_cfg): + """Merge augmented detection bboxes and scores. + + Args: + aug_bboxes (list[Tensor]): shape (n, 4*#class) + aug_scores (list[Tensor] or None): shape (n, #class) + img_shapes (list[Tensor]): shape (3, ). + rcnn_test_cfg (dict): rcnn test config. + + Returns: + tuple: (bboxes, scores) + """ + recovered_bboxes = [] + for bboxes, img_info in zip(aug_bboxes, img_metas): + img_shape = img_info[0]['img_shape'] + scale_factor = img_info[0]['scale_factor'] + flip = img_info[0]['flip'] + flip_direction = img_info[0]['flip_direction'] + bboxes = bbox_mapping_back(bboxes, img_shape, scale_factor, flip, + flip_direction) + recovered_bboxes.append(bboxes) + bboxes = torch.stack(recovered_bboxes).mean(dim=0) + if aug_scores is None: + return bboxes + else: + scores = torch.stack(aug_scores).mean(dim=0) + return bboxes, scores + + +def merge_aug_scores(aug_scores): + """Merge augmented bbox scores.""" + if isinstance(aug_scores[0], torch.Tensor): + return torch.mean(torch.stack(aug_scores), dim=0) + else: + return np.mean(aug_scores, axis=0) + + +def merge_aug_masks(aug_masks, img_metas, rcnn_test_cfg, weights=None): + """Merge augmented mask prediction. + + Args: + aug_masks (list[ndarray]): shape (n, #class, h, w) + img_shapes (list[ndarray]): shape (3, ). + rcnn_test_cfg (dict): rcnn test config. + + Returns: + tuple: (bboxes, scores) + """ + recovered_masks = [] + for mask, img_info in zip(aug_masks, img_metas): + flip = img_info[0]['flip'] + flip_direction = img_info[0]['flip_direction'] + if flip: + if flip_direction == 'horizontal': + mask = mask[:, :, :, ::-1] + elif flip_direction == 'vertical': + mask = mask[:, :, ::-1, :] + elif flip_direction == 'diagonal': + mask = mask[:, :, :, ::-1] + mask = mask[:, :, ::-1, :] + else: + raise ValueError( + f"Invalid flipping direction '{flip_direction}'") + recovered_masks.append(mask) + + if weights is None: + merged_masks = np.mean(recovered_masks, axis=0) + else: + merged_masks = np.average( + np.array(recovered_masks), axis=0, weights=np.array(weights)) + return merged_masks diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/utils/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..37ed26f0ee419d09ad3d5494598505fcbda631b9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/utils/__init__.py @@ -0,0 +1,7 @@ +from .dist_utils import DistOptimizerHook, allreduce_grads, reduce_mean +from .misc import flip_tensor, mask2ndarray, multi_apply, unmap + +__all__ = [ + 'allreduce_grads', 'DistOptimizerHook', 'reduce_mean', 'multi_apply', + 'unmap', 'mask2ndarray', 'flip_tensor' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/utils/dist_utils.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/utils/dist_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..5fe77753313783f95bd7111038ef8b58ee4e4bc5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/utils/dist_utils.py @@ -0,0 +1,69 @@ +import warnings +from collections import OrderedDict + +import torch.distributed as dist +from mmcv.runner import OptimizerHook +from torch._utils import (_flatten_dense_tensors, _take_tensors, + _unflatten_dense_tensors) + + +def _allreduce_coalesced(tensors, world_size, bucket_size_mb=-1): + if bucket_size_mb > 0: + bucket_size_bytes = bucket_size_mb * 1024 * 1024 + buckets = _take_tensors(tensors, bucket_size_bytes) + else: + buckets = OrderedDict() + for tensor in tensors: + tp = tensor.type() + if tp not in buckets: + buckets[tp] = [] + buckets[tp].append(tensor) + buckets = buckets.values() + + for bucket in buckets: + flat_tensors = _flatten_dense_tensors(bucket) + dist.all_reduce(flat_tensors) + flat_tensors.div_(world_size) + for tensor, synced in zip( + bucket, _unflatten_dense_tensors(flat_tensors, bucket)): + tensor.copy_(synced) + + +def allreduce_grads(params, coalesce=True, bucket_size_mb=-1): + """Allreduce gradients. + + Args: + params (list[torch.Parameters]): List of parameters of a model + coalesce (bool, optional): Whether allreduce parameters as a whole. + Defaults to True. + bucket_size_mb (int, optional): Size of bucket, the unit is MB. + Defaults to -1. + """ + grads = [ + param.grad.data for param in params + if param.requires_grad and param.grad is not None + ] + world_size = dist.get_world_size() + if coalesce: + _allreduce_coalesced(grads, world_size, bucket_size_mb) + else: + for tensor in grads: + dist.all_reduce(tensor.div_(world_size)) + + +class DistOptimizerHook(OptimizerHook): + """Deprecated optimizer hook for distributed training.""" + + def __init__(self, *args, **kwargs): + warnings.warn('"DistOptimizerHook" is deprecated, please switch to' + '"mmcv.runner.OptimizerHook".') + super().__init__(*args, **kwargs) + + +def reduce_mean(tensor): + """"Obtain the mean of tensor on different GPUs.""" + if not (dist.is_available() and dist.is_initialized()): + return tensor + tensor = tensor.clone() + dist.all_reduce(tensor.div_(dist.get_world_size()), op=dist.ReduceOp.SUM) + return tensor diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/utils/misc.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/utils/misc.py new file mode 100644 index 0000000000000000000000000000000000000000..e1f40d8c5ec33e649a87bd6f2259be76cf038e05 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/utils/misc.py @@ -0,0 +1,84 @@ +from functools import partial + +import numpy as np +import torch +from six.moves import map, zip + +from ..mask.structures import BitmapMasks, PolygonMasks + + +def multi_apply(func, *args, **kwargs): + """Apply function to a list of arguments. + + Note: + This function applies the ``func`` to multiple inputs and + map the multiple outputs of the ``func`` into different + list. Each list contains the same type of outputs corresponding + to different inputs. + + Args: + func (Function): A function that will be applied to a list of + arguments + + Returns: + tuple(list): A tuple containing multiple list, each list contains \ + a kind of returned results by the function + """ + pfunc = partial(func, **kwargs) if kwargs else func + map_results = map(pfunc, *args) + return tuple(map(list, zip(*map_results))) + + +def unmap(data, count, inds, fill=0): + """Unmap a subset of item (data) back to the original set of items (of size + count)""" + if data.dim() == 1: + ret = data.new_full((count, ), fill) + ret[inds.type(torch.bool)] = data + else: + new_size = (count, ) + data.size()[1:] + ret = data.new_full(new_size, fill) + ret[inds.type(torch.bool), :] = data + return ret + + +def mask2ndarray(mask): + """Convert Mask to ndarray.. + + Args: + mask (:obj:`BitmapMasks` or :obj:`PolygonMasks` or + torch.Tensor or np.ndarray): The mask to be converted. + + Returns: + np.ndarray: Ndarray mask of shape (n, h, w) that has been converted + """ + if isinstance(mask, (BitmapMasks, PolygonMasks)): + mask = mask.to_ndarray() + elif isinstance(mask, torch.Tensor): + mask = mask.detach().cpu().numpy() + elif not isinstance(mask, np.ndarray): + raise TypeError(f'Unsupported {type(mask)} data type') + return mask + + +def flip_tensor(src_tensor, flip_direction): + """flip tensor base on flip_direction. + + Args: + src_tensor (Tensor): input feature map, shape (B, C, H, W). + flip_direction (str): The flipping direction. Options are + 'horizontal', 'vertical', 'diagonal'. + + Returns: + out_tensor (Tensor): Flipped tensor. + """ + assert src_tensor.ndim == 4 + valid_directions = ['horizontal', 'vertical', 'diagonal'] + assert flip_direction in valid_directions + if flip_direction == 'horizontal': + out_tensor = torch.flip(src_tensor, [3]) + elif flip_direction == 'vertical': + out_tensor = torch.flip(src_tensor, [2]) + else: + out_tensor = torch.flip(src_tensor, [2, 3]) + return out_tensor diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/visualization/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/visualization/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4ff995c0861490941f8cfc19ebbd41a2ee7e2d65 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/visualization/__init__.py @@ -0,0 +1,4 @@ +from .image import (color_val_matplotlib, imshow_det_bboxes, + imshow_gt_det_bboxes) + +__all__ = ['imshow_det_bboxes', 'imshow_gt_det_bboxes', 'color_val_matplotlib'] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/visualization/image.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/visualization/image.py new file mode 100644 index 0000000000000000000000000000000000000000..5a148384d7a77c4d9849c54570e85740eaff8235 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/core/visualization/image.py @@ -0,0 +1,303 @@ +import matplotlib.pyplot as plt +import mmcv +import numpy as np +import pycocotools.mask as mask_util +from matplotlib.collections import PatchCollection +from matplotlib.patches import Polygon + +from ..utils import mask2ndarray + +EPS = 1e-2 + + +def color_val_matplotlib(color): + """Convert various input in BGR order to normalized RGB matplotlib color + tuples, + + Args: + color (:obj:`Color`/str/tuple/int/ndarray): Color inputs + + Returns: + tuple[float]: A tuple of 3 normalized floats indicating RGB channels. + """ + color = mmcv.color_val(color) + color = [color / 255 for color in color[::-1]] + return tuple(color) + + +def imshow_det_bboxes(img, + bboxes, + labels, + segms=None, + class_names=None, + score_thr=0, + bbox_color='green', + text_color='green', + mask_color=None, + thickness=2, + font_size=13, + win_name='', + show=True, + wait_time=0, + out_file=None): + """Draw bboxes and class labels (with scores) on an image. + + Args: + img (str or ndarray): The image to be displayed. + bboxes (ndarray): Bounding boxes (with scores), shaped (n, 4) or + (n, 5). + labels (ndarray): Labels of bboxes. + segms (ndarray or None): Masks, shaped (n,h,w) or None + class_names (list[str]): Names of each classes. + score_thr (float): Minimum score of bboxes to be shown. Default: 0 + bbox_color (str or tuple(int) or :obj:`Color`):Color of bbox lines. + The tuple of color should be in BGR order. Default: 'green' + text_color (str or tuple(int) or :obj:`Color`):Color of texts. + The tuple of color should be in BGR order. Default: 'green' + mask_color (str or tuple(int) or :obj:`Color`, optional): + Color of masks. The tuple of color should be in BGR order. + Default: None + thickness (int): Thickness of lines. Default: 2 + font_size (int): Font size of texts. Default: 13 + show (bool): Whether to show the image. Default: True + win_name (str): The window name. Default: '' + wait_time (float): Value of waitKey param. Default: 0. + out_file (str, optional): The filename to write the image. + Default: None + + Returns: + ndarray: The image with bboxes drawn on it. + """ + assert bboxes.ndim == 2, \ + f' bboxes ndim should be 2, but its ndim is {bboxes.ndim}.' + assert labels.ndim == 1, \ + f' labels ndim should be 1, but its ndim is {labels.ndim}.' + assert bboxes.shape[0] == labels.shape[0], \ + 'bboxes.shape[0] and labels.shape[0] should have the same length.' + assert bboxes.shape[1] == 4 or bboxes.shape[1] == 5, \ + f' bboxes.shape[1] should be 4 or 5, but its {bboxes.shape[1]}.' + img = mmcv.imread(img).astype(np.uint8) + + if score_thr > 0: + assert bboxes.shape[1] == 5 + scores = bboxes[:, -1] + inds = scores > score_thr + bboxes = bboxes[inds, :] + labels = labels[inds] + if segms is not None: + segms = segms[inds, ...] + + mask_colors = [] + if labels.shape[0] > 0: + if mask_color is None: + # random color + np.random.seed(42) + mask_colors = [ + np.random.randint(0, 256, (1, 3), dtype=np.uint8) + for _ in range(max(labels) + 1) + ] + else: + # specify color + mask_colors = [ + np.array(mmcv.color_val(mask_color)[::-1], dtype=np.uint8) + ] * ( + max(labels) + 1) + + bbox_color = color_val_matplotlib(bbox_color) + text_color = color_val_matplotlib(text_color) + + img = mmcv.bgr2rgb(img) + width, height = img.shape[1], img.shape[0] + img = np.ascontiguousarray(img) + + fig = plt.figure(win_name, frameon=False) + plt.title(win_name) + canvas = fig.canvas + dpi = fig.get_dpi() + # add a small EPS to avoid precision lost due to matplotlib's truncation + # (https://github.com/matplotlib/matplotlib/issues/15363) + fig.set_size_inches((width + EPS) / dpi, (height + EPS) / dpi) + + # remove white edges by set subplot margin + plt.subplots_adjust(left=0, right=1, bottom=0, top=1) + ax = plt.gca() + ax.axis('off') + + polygons = [] + color = [] + for i, (bbox, label) in enumerate(zip(bboxes, labels)): + bbox_int = bbox.astype(np.int32) + poly = [[bbox_int[0], bbox_int[1]], [bbox_int[0], bbox_int[3]], + [bbox_int[2], bbox_int[3]], [bbox_int[2], bbox_int[1]]] + np_poly = np.array(poly).reshape((4, 2)) + polygons.append(Polygon(np_poly)) + color.append(bbox_color) + label_text = class_names[ + label] if class_names is not None else f'class {label}' + if len(bbox) > 4: + label_text += f'|{bbox[-1]:.02f}' + ax.text( + bbox_int[0], + bbox_int[1], + f'{label_text}', + bbox={ + 'facecolor': 'black', + 'alpha': 0.8, + 'pad': 0.7, + 'edgecolor': 'none' + }, + color=text_color, + fontsize=font_size, + verticalalignment='top', + horizontalalignment='left') + if segms is not None: + color_mask = mask_colors[labels[i]] + mask = segms[i].astype(bool) + img[mask] = img[mask] * 0.5 + color_mask * 0.5 + + plt.imshow(img) + + p = PatchCollection( + polygons, facecolor='none', edgecolors=color, linewidths=thickness) + ax.add_collection(p) + + stream, _ = canvas.print_to_buffer() + buffer = np.frombuffer(stream, dtype='uint8') + img_rgba = buffer.reshape(height, width, 4) + rgb, alpha = np.split(img_rgba, [3], axis=2) + img = rgb.astype('uint8') + img = mmcv.rgb2bgr(img) + + if show: + # We do not use cv2 for display because in some cases, opencv will + # conflict with Qt, it will output a warning: Current thread + # is not the object's thread. You can refer to + # https://github.com/opencv/opencv-python/issues/46 for details + if wait_time == 0: + plt.show() + else: + plt.show(block=False) + plt.pause(wait_time) + if out_file is not None: + mmcv.imwrite(img, out_file) + + plt.close() + + return img + + +def imshow_gt_det_bboxes(img, + annotation, + result, + class_names=None, + score_thr=0, + gt_bbox_color=(255, 102, 61), + gt_text_color=(255, 102, 61), + gt_mask_color=(255, 102, 61), + det_bbox_color=(72, 101, 241), + det_text_color=(72, 101, 241), + det_mask_color=(72, 101, 241), + thickness=2, + font_size=13, + win_name='', + show=True, + wait_time=0, + out_file=None): + """General visualization GT and result function. + + Args: + img (str or ndarray): The image to be displayed.) + annotation (dict): Ground truth annotations where contain keys of + 'gt_bboxes' and 'gt_labels' or 'gt_masks' + result (tuple[list] or list): The detection result, can be either + (bbox, segm) or just bbox. + class_names (list[str]): Names of each classes. + score_thr (float): Minimum score of bboxes to be shown. Default: 0 + gt_bbox_color (str or tuple(int) or :obj:`Color`):Color of bbox lines. + The tuple of color should be in BGR order. Default: (255, 102, 61) + gt_text_color (str or tuple(int) or :obj:`Color`):Color of texts. + The tuple of color should be in BGR order. Default: (255, 102, 61) + gt_mask_color (str or tuple(int) or :obj:`Color`, optional): + Color of masks. The tuple of color should be in BGR order. + Default: (255, 102, 61) + det_bbox_color (str or tuple(int) or :obj:`Color`):Color of bbox lines. + The tuple of color should be in BGR order. Default: (72, 101, 241) + det_text_color (str or tuple(int) or :obj:`Color`):Color of texts. + The tuple of color should be in BGR order. Default: (72, 101, 241) + det_mask_color (str or tuple(int) or :obj:`Color`, optional): + Color of masks. The tuple of color should be in BGR order. + Default: (72, 101, 241) + thickness (int): Thickness of lines. Default: 2 + font_size (int): Font size of texts. Default: 13 + win_name (str): The window name. Default: '' + show (bool): Whether to show the image. Default: True + wait_time (float): Value of waitKey param. Default: 0. + out_file (str, optional): The filename to write the image. + Default: None + + Returns: + ndarray: The image with bboxes or masks drawn on it. + """ + assert 'gt_bboxes' in annotation + assert 'gt_labels' in annotation + assert isinstance( + result, + (tuple, list)), f'Expected tuple or list, but get {type(result)}' + + gt_masks = annotation.get('gt_masks', None) + if gt_masks is not None: + gt_masks = mask2ndarray(gt_masks) + + img = mmcv.imread(img) + + img = imshow_det_bboxes( + img, + annotation['gt_bboxes'], + annotation['gt_labels'], + gt_masks, + class_names=class_names, + bbox_color=gt_bbox_color, + text_color=gt_text_color, + mask_color=gt_mask_color, + thickness=thickness, + font_size=font_size, + win_name=win_name, + show=False) + + if isinstance(result, tuple): + bbox_result, segm_result = result + if isinstance(segm_result, tuple): + segm_result = segm_result[0] # ms rcnn + else: + bbox_result, segm_result = result, None + + bboxes = np.vstack(bbox_result) + labels = [ + np.full(bbox.shape[0], i, dtype=np.int32) + for i, bbox in enumerate(bbox_result) + ] + labels = np.concatenate(labels) + + segms = None + if segm_result is not None and len(labels) > 0: # non empty + segms = mmcv.concat_list(segm_result) + segms = mask_util.decode(segms) + segms = segms.transpose(2, 0, 1) + + img = imshow_det_bboxes( + img, + bboxes, + labels, + segms=segms, + class_names=class_names, + score_thr=score_thr, + bbox_color=det_bbox_color, + text_color=det_text_color, + mask_color=det_mask_color, + thickness=thickness, + font_size=font_size, + win_name=win_name, + show=show, + wait_time=wait_time, + out_file=out_file) + return img diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9b18b30a258c32283cbfc03ba01781a19fd993c1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/__init__.py @@ -0,0 +1,24 @@ +from .builder import DATASETS, PIPELINES, build_dataloader, build_dataset +from .cityscapes import CityscapesDataset +from .coco import CocoDataset +from .custom import CustomDataset +from .dataset_wrappers import (ClassBalancedDataset, ConcatDataset, + RepeatDataset) +from .deepfashion import DeepFashionDataset +from .lvis import LVISDataset, LVISV1Dataset, LVISV05Dataset +from .samplers import DistributedGroupSampler, DistributedSampler, GroupSampler +from .utils import (NumClassCheckHook, get_loading_pipeline, + replace_ImageToTensor) +from .voc import VOCDataset +from .wider_face import WIDERFaceDataset +from .xml_style import XMLDataset + +__all__ = [ + 'CustomDataset', 'XMLDataset', 'CocoDataset', 'DeepFashionDataset', + 'VOCDataset', 'CityscapesDataset', 'LVISDataset', 'LVISV05Dataset', + 'LVISV1Dataset', 'GroupSampler', 'DistributedGroupSampler', + 'DistributedSampler', 'build_dataloader', 'ConcatDataset', 'RepeatDataset', + 'ClassBalancedDataset', 'WIDERFaceDataset', 'DATASETS', 'PIPELINES', + 'build_dataset', 'replace_ImageToTensor', 'get_loading_pipeline', + 'NumClassCheckHook' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/api_wrappers/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/api_wrappers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..05f95c94ae6dee2f29fae9f9b0e369cebb9ae210 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/api_wrappers/__init__.py @@ -0,0 +1,3 @@ +from .coco_api import COCO, COCOeval + +__all__ = ['COCO', 'COCOeval'] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/api_wrappers/coco_api.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/api_wrappers/coco_api.py new file mode 100644 index 0000000000000000000000000000000000000000..57077f9ba15afd35ef4bfca388b547bf6ae7b59d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/api_wrappers/coco_api.py @@ -0,0 +1,46 @@ +# This file add snake case alias for coco api + +import warnings + +import pycocotools +from pycocotools.coco import COCO as _COCO +from pycocotools.cocoeval import COCOeval as _COCOeval + + +class COCO(_COCO): + """This class is almost the same as official pycocotools package. + + It implements some snake case function aliases. So that the COCO class has + the same interface as LVIS class. + """ + + def __init__(self, annotation_file=None): + if getattr(pycocotools, '__version__', '0') >= '12.0.2': + warnings.warn( + 'mmpycocotools is deprecated. Please install official pycocotools by "pip install pycocotools"', # noqa: E501 + UserWarning) + super().__init__(annotation_file=annotation_file) + self.img_ann_map = self.imgToAnns + self.cat_img_map = self.catToImgs + + def get_ann_ids(self, img_ids=[], cat_ids=[], area_rng=[], iscrowd=None): + return self.getAnnIds(img_ids, cat_ids, area_rng, iscrowd) + + def get_cat_ids(self, cat_names=[], sup_names=[], cat_ids=[]): + return self.getCatIds(cat_names, sup_names, cat_ids) + + def get_img_ids(self, img_ids=[], cat_ids=[]): + return self.getImgIds(img_ids, cat_ids) + + def load_anns(self, ids): + return self.loadAnns(ids) + + def load_cats(self, ids): + return self.loadCats(ids) + + def load_imgs(self, ids): + return self.loadImgs(ids) + + +# just for the ease of import +COCOeval = _COCOeval diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/builder.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/builder.py new file mode 100644 index 0000000000000000000000000000000000000000..c9466a517dee746a6677b27a19713f2e89ed7194 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/builder.py @@ -0,0 +1,143 @@ +import copy +import platform +import random +from functools import partial + +import numpy as np +from mmcv.parallel import collate +from mmcv.runner import get_dist_info +from mmcv.utils import Registry, build_from_cfg +from torch.utils.data import DataLoader + +from .samplers import DistributedGroupSampler, DistributedSampler, GroupSampler + +if platform.system() != 'Windows': + # https://github.com/pytorch/pytorch/issues/973 + import resource + rlimit = resource.getrlimit(resource.RLIMIT_NOFILE) + hard_limit = rlimit[1] + soft_limit = min(4096, hard_limit) + resource.setrlimit(resource.RLIMIT_NOFILE, (soft_limit, hard_limit)) + +DATASETS = Registry('dataset') +PIPELINES = Registry('pipeline') + + +def _concat_dataset(cfg, default_args=None): + from .dataset_wrappers import ConcatDataset + ann_files = cfg['ann_file'] + img_prefixes = cfg.get('img_prefix', None) + seg_prefixes = cfg.get('seg_prefix', None) + proposal_files = cfg.get('proposal_file', None) + separate_eval = cfg.get('separate_eval', True) + + datasets = [] + num_dset = len(ann_files) + for i in range(num_dset): + data_cfg = copy.deepcopy(cfg) + # pop 'separate_eval' since it is not a valid key for common datasets. + if 'separate_eval' in data_cfg: + data_cfg.pop('separate_eval') + data_cfg['ann_file'] = ann_files[i] + if isinstance(img_prefixes, (list, tuple)): + data_cfg['img_prefix'] = img_prefixes[i] + if isinstance(seg_prefixes, (list, tuple)): + data_cfg['seg_prefix'] = seg_prefixes[i] + if isinstance(proposal_files, (list, tuple)): + data_cfg['proposal_file'] = proposal_files[i] + datasets.append(build_dataset(data_cfg, default_args)) + + return ConcatDataset(datasets, separate_eval) + + +def build_dataset(cfg, default_args=None): + from .dataset_wrappers import (ConcatDataset, RepeatDataset, + ClassBalancedDataset) + if isinstance(cfg, (list, tuple)): + dataset = ConcatDataset([build_dataset(c, default_args) for c in cfg]) + elif cfg['type'] == 'ConcatDataset': + dataset = ConcatDataset( + [build_dataset(c, default_args) for c in cfg['datasets']], + cfg.get('separate_eval', True)) + elif cfg['type'] == 'RepeatDataset': + dataset = RepeatDataset( + build_dataset(cfg['dataset'], default_args), cfg['times']) + elif cfg['type'] == 'ClassBalancedDataset': + dataset = ClassBalancedDataset( + build_dataset(cfg['dataset'], default_args), cfg['oversample_thr']) + elif isinstance(cfg.get('ann_file'), (list, tuple)): + dataset = _concat_dataset(cfg, default_args) + else: + dataset = build_from_cfg(cfg, DATASETS, default_args) + + return dataset + + +def build_dataloader(dataset, + samples_per_gpu, + workers_per_gpu, + num_gpus=1, + dist=True, + shuffle=True, + seed=None, + **kwargs): + """Build PyTorch DataLoader. + + In distributed training, each GPU/process has a dataloader. + In non-distributed training, there is only one dataloader for all GPUs. + + Args: + dataset (Dataset): A PyTorch dataset. + samples_per_gpu (int): Number of training samples on each GPU, i.e., + batch size of each GPU. + workers_per_gpu (int): How many subprocesses to use for data loading + for each GPU. + num_gpus (int): Number of GPUs. Only used in non-distributed training. + dist (bool): Distributed training/test or not. Default: True. + shuffle (bool): Whether to shuffle the data at every epoch. + Default: True. + kwargs: any keyword argument to be used to initialize DataLoader + + Returns: + DataLoader: A PyTorch dataloader. + """ + rank, world_size = get_dist_info() + if dist: + # DistributedGroupSampler will definitely shuffle the data to satisfy + # that images on each GPU are in the same group + if shuffle: + sampler = DistributedGroupSampler( + dataset, samples_per_gpu, world_size, rank, seed=seed) + else: + sampler = DistributedSampler( + dataset, world_size, rank, shuffle=False, seed=seed) + batch_size = samples_per_gpu + num_workers = workers_per_gpu + else: + sampler = GroupSampler(dataset, samples_per_gpu) if shuffle else None + batch_size = num_gpus * samples_per_gpu + num_workers = num_gpus * workers_per_gpu + + init_fn = partial( + worker_init_fn, num_workers=num_workers, rank=rank, + seed=seed) if seed is not None else None + + data_loader = DataLoader( + dataset, + batch_size=batch_size, + sampler=sampler, + num_workers=num_workers, + collate_fn=partial(collate, samples_per_gpu=samples_per_gpu), + pin_memory=False, + worker_init_fn=init_fn, + **kwargs) + + return data_loader + + +def worker_init_fn(worker_id, num_workers, rank, seed): + # The seed of each worker equals to + # num_worker * rank + worker_id + user_seed + worker_seed = num_workers * rank + worker_id + seed + np.random.seed(worker_seed) + random.seed(worker_seed) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/cityscapes.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/cityscapes.py new file mode 100644 index 0000000000000000000000000000000000000000..71eead87e7f4e511c0cb59e69c3a599832ada0e4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/cityscapes.py @@ -0,0 +1,334 @@ +# Modified from https://github.com/facebookresearch/detectron2/blob/master/detectron2/data/datasets/cityscapes.py # noqa +# and https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/evaluation/evalInstanceLevelSemanticLabeling.py # noqa + +import glob +import os +import os.path as osp +import tempfile +from collections import OrderedDict + +import mmcv +import numpy as np +import pycocotools.mask as maskUtils +from mmcv.utils import print_log + +from .builder import DATASETS +from .coco import CocoDataset + + +@DATASETS.register_module() +class CityscapesDataset(CocoDataset): + + CLASSES = ('person', 'rider', 'car', 'truck', 'bus', 'train', 'motorcycle', + 'bicycle') + + def _filter_imgs(self, min_size=32): + """Filter images too small or without ground truths.""" + valid_inds = [] + # obtain images that contain annotation + ids_with_ann = set(_['image_id'] for _ in self.coco.anns.values()) + # obtain images that contain annotations of the required categories + ids_in_cat = set() + for i, class_id in enumerate(self.cat_ids): + ids_in_cat |= set(self.coco.cat_img_map[class_id]) + # merge the image id sets of the two conditions and use the merged set + # to filter out images if self.filter_empty_gt=True + ids_in_cat &= ids_with_ann + + valid_img_ids = [] + for i, img_info in enumerate(self.data_infos): + img_id = img_info['id'] + ann_ids = self.coco.getAnnIds(imgIds=[img_id]) + ann_info = self.coco.loadAnns(ann_ids) + all_iscrowd = all([_['iscrowd'] for _ in ann_info]) + if self.filter_empty_gt and (self.img_ids[i] not in ids_in_cat + or all_iscrowd): + continue + if min(img_info['width'], img_info['height']) >= min_size: + valid_inds.append(i) + valid_img_ids.append(img_id) + self.img_ids = valid_img_ids + return valid_inds + + def _parse_ann_info(self, img_info, ann_info): + """Parse bbox and mask annotation. + + Args: + img_info (dict): Image info of an image. + ann_info (list[dict]): Annotation info of an image. + + Returns: + dict: A dict containing the following keys: bboxes, \ + bboxes_ignore, labels, masks, seg_map. \ + "masks" are already decoded into binary masks. + """ + gt_bboxes = [] + gt_labels = [] + gt_bboxes_ignore = [] + gt_masks_ann = [] + + for i, ann in enumerate(ann_info): + if ann.get('ignore', False): + continue + x1, y1, w, h = ann['bbox'] + if ann['area'] <= 0 or w < 1 or h < 1: + continue + if ann['category_id'] not in self.cat_ids: + continue + bbox = [x1, y1, x1 + w, y1 + h] + if ann.get('iscrowd', False): + gt_bboxes_ignore.append(bbox) + else: + gt_bboxes.append(bbox) + gt_labels.append(self.cat2label[ann['category_id']]) + gt_masks_ann.append(ann['segmentation']) + + if gt_bboxes: + gt_bboxes = np.array(gt_bboxes, dtype=np.float32) + gt_labels = np.array(gt_labels, dtype=np.int64) + else: + gt_bboxes = np.zeros((0, 4), dtype=np.float32) + gt_labels = np.array([], dtype=np.int64) + + if gt_bboxes_ignore: + gt_bboxes_ignore = np.array(gt_bboxes_ignore, dtype=np.float32) + else: + gt_bboxes_ignore = np.zeros((0, 4), dtype=np.float32) + + ann = dict( + bboxes=gt_bboxes, + labels=gt_labels, + bboxes_ignore=gt_bboxes_ignore, + masks=gt_masks_ann, + seg_map=img_info['segm_file']) + + return ann + + def results2txt(self, results, outfile_prefix): + """Dump the detection results to a txt file. + + Args: + results (list[list | tuple]): Testing results of the + dataset. + outfile_prefix (str): The filename prefix of the json files. + If the prefix is "somepath/xxx", + the txt files will be named "somepath/xxx.txt". + + Returns: + list[str]: Result txt files which contains corresponding \ + instance segmentation images. + """ + try: + import cityscapesscripts.helpers.labels as CSLabels + except ImportError: + raise ImportError('Please run "pip install citscapesscripts" to ' + 'install cityscapesscripts first.') + result_files = [] + os.makedirs(outfile_prefix, exist_ok=True) + prog_bar = mmcv.ProgressBar(len(self)) + for idx in range(len(self)): + result = results[idx] + filename = self.data_infos[idx]['filename'] + basename = osp.splitext(osp.basename(filename))[0] + pred_txt = osp.join(outfile_prefix, basename + '_pred.txt') + + bbox_result, segm_result = result + bboxes = np.vstack(bbox_result) + # segm results + if isinstance(segm_result, tuple): + # Some detectors use different scores for bbox and mask, + # like Mask Scoring R-CNN. Score of segm will be used instead + # of bbox score. + segms = mmcv.concat_list(segm_result[0]) + mask_score = segm_result[1] + else: + # use bbox score for mask score + segms = mmcv.concat_list(segm_result) + mask_score = [bbox[-1] for bbox in bboxes] + labels = [ + np.full(bbox.shape[0], i, dtype=np.int32) + for i, bbox in enumerate(bbox_result) + ] + labels = np.concatenate(labels) + + assert len(bboxes) == len(segms) == len(labels) + num_instances = len(bboxes) + prog_bar.update() + with open(pred_txt, 'w') as fout: + for i in range(num_instances): + pred_class = labels[i] + classes = self.CLASSES[pred_class] + class_id = CSLabels.name2label[classes].id + score = mask_score[i] + mask = maskUtils.decode(segms[i]).astype(np.uint8) + png_filename = osp.join(outfile_prefix, + basename + f'_{i}_{classes}.png') + mmcv.imwrite(mask, png_filename) + fout.write(f'{osp.basename(png_filename)} {class_id} ' + f'{score}\n') + result_files.append(pred_txt) + + return result_files + + def format_results(self, results, txtfile_prefix=None): + """Format the results to txt (standard format for Cityscapes + evaluation). + + Args: + results (list): Testing results of the dataset. + txtfile_prefix (str | None): The prefix of txt files. It includes + the file path and the prefix of filename, e.g., "a/b/prefix". + If not specified, a temp file will be created. Default: None. + + Returns: + tuple: (result_files, tmp_dir), result_files is a dict containing \ + the json filepaths, tmp_dir is the temporal directory created \ + for saving txt/png files when txtfile_prefix is not specified. + """ + assert isinstance(results, list), 'results must be a list' + assert len(results) == len(self), ( + 'The length of results is not equal to the dataset len: {} != {}'. + format(len(results), len(self))) + + assert isinstance(results, list), 'results must be a list' + assert len(results) == len(self), ( + 'The length of results is not equal to the dataset len: {} != {}'. + format(len(results), len(self))) + + if txtfile_prefix is None: + tmp_dir = tempfile.TemporaryDirectory() + txtfile_prefix = osp.join(tmp_dir.name, 'results') + else: + tmp_dir = None + result_files = self.results2txt(results, txtfile_prefix) + + return result_files, tmp_dir + + def evaluate(self, + results, + metric='bbox', + logger=None, + outfile_prefix=None, + classwise=False, + proposal_nums=(100, 300, 1000), + iou_thrs=np.arange(0.5, 0.96, 0.05)): + """Evaluation in Cityscapes/COCO protocol. + + Args: + results (list[list | tuple]): Testing results of the dataset. + metric (str | list[str]): Metrics to be evaluated. Options are + 'bbox', 'segm', 'proposal', 'proposal_fast'. + logger (logging.Logger | str | None): Logger used for printing + related information during evaluation. Default: None. + outfile_prefix (str | None): The prefix of output file. It includes + the file path and the prefix of filename, e.g., "a/b/prefix". + If results are evaluated with COCO protocol, it would be the + prefix of output json file. For example, the metric is 'bbox' + and 'segm', then json files would be "a/b/prefix.bbox.json" and + "a/b/prefix.segm.json". + If results are evaluated with cityscapes protocol, it would be + the prefix of output txt/png files. The output files would be + png images under folder "a/b/prefix/xxx/" and the file name of + images would be written into a txt file + "a/b/prefix/xxx_pred.txt", where "xxx" is the video name of + cityscapes. If not specified, a temp file will be created. + Default: None. + classwise (bool): Whether to evaluating the AP for each class. + proposal_nums (Sequence[int]): Proposal number used for evaluating + recalls, such as recall@100, recall@1000. + Default: (100, 300, 1000). + iou_thrs (Sequence[float]): IoU threshold used for evaluating + recalls. If set to a list, the average recall of all IoUs will + also be computed. Default: 0.5. + + Returns: + dict[str, float]: COCO style evaluation metric or cityscapes mAP \ + and AP@50. + """ + eval_results = dict() + + metrics = metric.copy() if isinstance(metric, list) else [metric] + + if 'cityscapes' in metrics: + eval_results.update( + self._evaluate_cityscapes(results, outfile_prefix, logger)) + metrics.remove('cityscapes') + + # left metrics are all coco metric + if len(metrics) > 0: + # create CocoDataset with CityscapesDataset annotation + self_coco = CocoDataset(self.ann_file, self.pipeline.transforms, + None, self.data_root, self.img_prefix, + self.seg_prefix, self.proposal_file, + self.test_mode, self.filter_empty_gt) + # TODO: remove this in the future + # reload annotations of correct class + self_coco.CLASSES = self.CLASSES + self_coco.data_infos = self_coco.load_annotations(self.ann_file) + eval_results.update( + self_coco.evaluate(results, metrics, logger, outfile_prefix, + classwise, proposal_nums, iou_thrs)) + + return eval_results + + def _evaluate_cityscapes(self, results, txtfile_prefix, logger): + """Evaluation in Cityscapes protocol. + + Args: + results (list): Testing results of the dataset. + txtfile_prefix (str | None): The prefix of output txt file + logger (logging.Logger | str | None): Logger used for printing + related information during evaluation. Default: None. + + Returns: + dict[str: float]: Cityscapes evaluation results, contains 'mAP' \ + and 'AP@50'. + """ + + try: + import cityscapesscripts.evaluation.evalInstanceLevelSemanticLabeling as CSEval # noqa + except ImportError: + raise ImportError('Please run "pip install citscapesscripts" to ' + 'install cityscapesscripts first.') + msg = 'Evaluating in Cityscapes style' + if logger is None: + msg = '\n' + msg + print_log(msg, logger=logger) + + result_files, tmp_dir = self.format_results(results, txtfile_prefix) + + if tmp_dir is None: + result_dir = osp.join(txtfile_prefix, 'results') + else: + result_dir = osp.join(tmp_dir.name, 'results') + + eval_results = OrderedDict() + print_log(f'Evaluating results under {result_dir} ...', logger=logger) + + # set global states in cityscapes evaluation API + CSEval.args.cityscapesPath = os.path.join(self.img_prefix, '../..') + CSEval.args.predictionPath = os.path.abspath(result_dir) + CSEval.args.predictionWalk = None + CSEval.args.JSONOutput = False + CSEval.args.colorized = False + CSEval.args.gtInstancesFile = os.path.join(result_dir, + 'gtInstances.json') + CSEval.args.groundTruthSearch = os.path.join( + self.img_prefix.replace('leftImg8bit', 'gtFine'), + '*/*_gtFine_instanceIds.png') + + groundTruthImgList = glob.glob(CSEval.args.groundTruthSearch) + assert len(groundTruthImgList), 'Cannot find ground truth images' \ + f' in {CSEval.args.groundTruthSearch}.' + predictionImgList = [] + for gt in groundTruthImgList: + predictionImgList.append(CSEval.getPrediction(gt, CSEval.args)) + CSEval_results = CSEval.evaluateImgLists(predictionImgList, + groundTruthImgList, + CSEval.args)['averages'] + + eval_results['mAP'] = CSEval_results['allAp'] + eval_results['AP@50'] = CSEval_results['allAp50%'] + if tmp_dir is not None: + tmp_dir.cleanup() + return eval_results diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/coco.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/coco.py new file mode 100644 index 0000000000000000000000000000000000000000..f8a779eba5aedd7f553e30f5277fb422121a764e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/coco.py @@ -0,0 +1,558 @@ +import itertools +import logging +import os.path as osp +import tempfile +import warnings +from collections import OrderedDict + +import mmcv +import numpy as np +from mmcv.utils import print_log +from terminaltables import AsciiTable + +from mmdet.core import eval_recalls +from .api_wrappers import COCO, COCOeval +from .builder import DATASETS +from .custom import CustomDataset + + +@DATASETS.register_module() +class CocoDataset(CustomDataset): + + CLASSES = ('person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', + 'train', 'truck', 'boat', 'traffic light', 'fire hydrant', + 'stop sign', 'parking meter', 'bench', 'bird', 'cat', 'dog', + 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe', + 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', + 'skis', 'snowboard', 'sports ball', 'kite', 'baseball bat', + 'baseball glove', 'skateboard', 'surfboard', 'tennis racket', + 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon', 'bowl', + 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot', + 'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch', + 'potted plant', 'bed', 'dining table', 'toilet', 'tv', 'laptop', + 'mouse', 'remote', 'keyboard', 'cell phone', 'microwave', + 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', + 'vase', 'scissors', 'teddy bear', 'hair drier', 'toothbrush') + + def load_annotations(self, ann_file): + """Load annotation from COCO style annotation file. + + Args: + ann_file (str): Path of annotation file. + + Returns: + list[dict]: Annotation info from COCO api. + """ + + self.coco = COCO(ann_file) + # The order of returned `cat_ids` will not + # change with the order of the CLASSES + self.cat_ids = self.coco.get_cat_ids(cat_names=self.CLASSES) + + self.cat2label = {cat_id: i for i, cat_id in enumerate(self.cat_ids)} + self.img_ids = self.coco.get_img_ids() + data_infos = [] + total_ann_ids = [] + for i in self.img_ids: + info = self.coco.load_imgs([i])[0] + info['filename'] = info['file_name'] + data_infos.append(info) + ann_ids = self.coco.get_ann_ids(img_ids=[i]) + total_ann_ids.extend(ann_ids) + assert len(set(total_ann_ids)) == len( + total_ann_ids), f"Annotation ids in '{ann_file}' are not unique!" + return data_infos + + def get_ann_info(self, idx): + """Get COCO annotation by index. + + Args: + idx (int): Index of data. + + Returns: + dict: Annotation info of specified index. + """ + + img_id = self.data_infos[idx]['id'] + ann_ids = self.coco.get_ann_ids(img_ids=[img_id]) + ann_info = self.coco.load_anns(ann_ids) + return self._parse_ann_info(self.data_infos[idx], ann_info) + + def get_cat_ids(self, idx): + """Get COCO category ids by index. + + Args: + idx (int): Index of data. + + Returns: + list[int]: All categories in the image of specified index. + """ + + img_id = self.data_infos[idx]['id'] + ann_ids = self.coco.get_ann_ids(img_ids=[img_id]) + ann_info = self.coco.load_anns(ann_ids) + return [ann['category_id'] for ann in ann_info] + + def _filter_imgs(self, min_size=32): + """Filter images too small or without ground truths.""" + valid_inds = [] + # obtain images that contain annotation + ids_with_ann = set(_['image_id'] for _ in self.coco.anns.values()) + # obtain images that contain annotations of the required categories + ids_in_cat = set() + for i, class_id in enumerate(self.cat_ids): + ids_in_cat |= set(self.coco.cat_img_map[class_id]) + # merge the image id sets of the two conditions and use the merged set + # to filter out images if self.filter_empty_gt=True + ids_in_cat &= ids_with_ann + + valid_img_ids = [] + for i, img_info in enumerate(self.data_infos): + img_id = self.img_ids[i] + if self.filter_empty_gt and img_id not in ids_in_cat: + continue + if min(img_info['width'], img_info['height']) >= min_size: + valid_inds.append(i) + valid_img_ids.append(img_id) + self.img_ids = valid_img_ids + return valid_inds + + def _parse_ann_info(self, img_info, ann_info): + """Parse bbox and mask annotation. + + Args: + ann_info (list[dict]): Annotation info of an image. + with_mask (bool): Whether to parse mask annotations. + + Returns: + dict: A dict containing the following keys: bboxes, bboxes_ignore,\ + labels, masks, seg_map. "masks" are raw annotations and not \ + decoded into binary masks. + """ + gt_bboxes = [] + gt_labels = [] + gt_bboxes_ignore = [] + gt_masks_ann = [] + for i, ann in enumerate(ann_info): + if ann.get('ignore', False): + continue + x1, y1, w, h = ann['bbox'] + inter_w = max(0, min(x1 + w, img_info['width']) - max(x1, 0)) + inter_h = max(0, min(y1 + h, img_info['height']) - max(y1, 0)) + if inter_w * inter_h == 0: + continue + if ann['area'] <= 0 or w < 1 or h < 1: + continue + if ann['category_id'] not in self.cat_ids: + continue + bbox = [x1, y1, x1 + w, y1 + h] + if ann.get('iscrowd', False): + gt_bboxes_ignore.append(bbox) + else: + gt_bboxes.append(bbox) + gt_labels.append(self.cat2label[ann['category_id']]) + gt_masks_ann.append(ann.get('segmentation', None)) + + if gt_bboxes: + gt_bboxes = np.array(gt_bboxes, dtype=np.float32) + gt_labels = np.array(gt_labels, dtype=np.int64) + else: + gt_bboxes = np.zeros((0, 4), dtype=np.float32) + gt_labels = np.array([], dtype=np.int64) + + if gt_bboxes_ignore: + gt_bboxes_ignore = np.array(gt_bboxes_ignore, dtype=np.float32) + else: + gt_bboxes_ignore = np.zeros((0, 4), dtype=np.float32) + + seg_map = img_info['filename'].replace('jpg', 'png') + + ann = dict( + bboxes=gt_bboxes, + labels=gt_labels, + bboxes_ignore=gt_bboxes_ignore, + masks=gt_masks_ann, + seg_map=seg_map) + + return ann + + def xyxy2xywh(self, bbox): + """Convert ``xyxy`` style bounding boxes to ``xywh`` style for COCO + evaluation. + + Args: + bbox (numpy.ndarray): The bounding boxes, shape (4, ), in + ``xyxy`` order. + + Returns: + list[float]: The converted bounding boxes, in ``xywh`` order. + """ + + _bbox = bbox.tolist() + return [ + _bbox[0], + _bbox[1], + _bbox[2] - _bbox[0], + _bbox[3] - _bbox[1], + ] + + def _proposal2json(self, results): + """Convert proposal results to COCO json style.""" + json_results = [] + for idx in range(len(self)): + img_id = self.img_ids[idx] + bboxes = results[idx] + for i in range(bboxes.shape[0]): + data = dict() + data['image_id'] = img_id + data['bbox'] = self.xyxy2xywh(bboxes[i]) + data['score'] = float(bboxes[i][4]) + data['category_id'] = 1 + json_results.append(data) + return json_results + + def _det2json(self, results): + """Convert detection results to COCO json style.""" + json_results = [] + for idx in range(len(self)): + img_id = self.img_ids[idx] + result = results[idx] + for label in range(len(result)): + bboxes = result[label] + for i in range(bboxes.shape[0]): + data = dict() + data['image_id'] = img_id + data['bbox'] = self.xyxy2xywh(bboxes[i]) + data['score'] = float(bboxes[i][4]) + data['category_id'] = self.cat_ids[label] + json_results.append(data) + return json_results + + def _segm2json(self, results): + """Convert instance segmentation results to COCO json style.""" + bbox_json_results = [] + segm_json_results = [] + for idx in range(len(self)): + img_id = self.img_ids[idx] + det, seg = results[idx] + for label in range(len(det)): + # bbox results + bboxes = det[label] + for i in range(bboxes.shape[0]): + data = dict() + data['image_id'] = img_id + data['bbox'] = self.xyxy2xywh(bboxes[i]) + data['score'] = float(bboxes[i][4]) + data['category_id'] = self.cat_ids[label] + bbox_json_results.append(data) + + # segm results + # some detectors use different scores for bbox and mask + if isinstance(seg, tuple): + segms = seg[0][label] + mask_score = seg[1][label] + else: + segms = seg[label] + mask_score = [bbox[4] for bbox in bboxes] + for i in range(bboxes.shape[0]): + data = dict() + data['image_id'] = img_id + data['bbox'] = self.xyxy2xywh(bboxes[i]) + data['score'] = float(mask_score[i]) + data['category_id'] = self.cat_ids[label] + if isinstance(segms[i]['counts'], bytes): + segms[i]['counts'] = segms[i]['counts'].decode() + data['segmentation'] = segms[i] + segm_json_results.append(data) + return bbox_json_results, segm_json_results + + def results2json(self, results, outfile_prefix): + """Dump the detection results to a COCO style json file. + + There are 3 types of results: proposals, bbox predictions, mask + predictions, and they have different data types. This method will + automatically recognize the type, and dump them to json files. + + Args: + results (list[list | tuple | ndarray]): Testing results of the + dataset. + outfile_prefix (str): The filename prefix of the json files. If the + prefix is "somepath/xxx", the json files will be named + "somepath/xxx.bbox.json", "somepath/xxx.segm.json", + "somepath/xxx.proposal.json". + + Returns: + dict[str: str]: Possible keys are "bbox", "segm", "proposal", and \ + values are corresponding filenames. + """ + result_files = dict() + if isinstance(results[0], list): + json_results = self._det2json(results) + result_files['bbox'] = f'{outfile_prefix}.bbox.json' + result_files['proposal'] = f'{outfile_prefix}.bbox.json' + mmcv.dump(json_results, result_files['bbox']) + elif isinstance(results[0], tuple): + json_results = self._segm2json(results) + result_files['bbox'] = f'{outfile_prefix}.bbox.json' + result_files['proposal'] = f'{outfile_prefix}.bbox.json' + result_files['segm'] = f'{outfile_prefix}.segm.json' + mmcv.dump(json_results[0], result_files['bbox']) + mmcv.dump(json_results[1], result_files['segm']) + elif isinstance(results[0], np.ndarray): + json_results = self._proposal2json(results) + result_files['proposal'] = f'{outfile_prefix}.proposal.json' + mmcv.dump(json_results, result_files['proposal']) + else: + raise TypeError('invalid type of results') + return result_files + + def fast_eval_recall(self, results, proposal_nums, iou_thrs, logger=None): + gt_bboxes = [] + for i in range(len(self.img_ids)): + ann_ids = self.coco.get_ann_ids(img_ids=self.img_ids[i]) + ann_info = self.coco.load_anns(ann_ids) + if len(ann_info) == 0: + gt_bboxes.append(np.zeros((0, 4))) + continue + bboxes = [] + for ann in ann_info: + if ann.get('ignore', False) or ann['iscrowd']: + continue + x1, y1, w, h = ann['bbox'] + bboxes.append([x1, y1, x1 + w, y1 + h]) + bboxes = np.array(bboxes, dtype=np.float32) + if bboxes.shape[0] == 0: + bboxes = np.zeros((0, 4)) + gt_bboxes.append(bboxes) + + recalls = eval_recalls( + gt_bboxes, results, proposal_nums, iou_thrs, logger=logger) + ar = recalls.mean(axis=1) + return ar + + def format_results(self, results, jsonfile_prefix=None, **kwargs): + """Format the results to json (standard format for COCO evaluation). + + Args: + results (list[tuple | numpy.ndarray]): Testing results of the + dataset. + jsonfile_prefix (str | None): The prefix of json files. It includes + the file path and the prefix of filename, e.g., "a/b/prefix". + If not specified, a temp file will be created. Default: None. + + Returns: + tuple: (result_files, tmp_dir), result_files is a dict containing \ + the json filepaths, tmp_dir is the temporal directory created \ + for saving json files when jsonfile_prefix is not specified. + """ + assert isinstance(results, list), 'results must be a list' + assert len(results) == len(self), ( + 'The length of results is not equal to the dataset len: {} != {}'. + format(len(results), len(self))) + + if jsonfile_prefix is None: + tmp_dir = tempfile.TemporaryDirectory() + jsonfile_prefix = osp.join(tmp_dir.name, 'results') + else: + tmp_dir = None + result_files = self.results2json(results, jsonfile_prefix) + return result_files, tmp_dir + + def evaluate(self, + results, + metric='bbox', + logger=None, + jsonfile_prefix=None, + classwise=False, + proposal_nums=(100, 300, 1000), + iou_thrs=None, + metric_items=None): + """Evaluation in COCO protocol. + + Args: + results (list[list | tuple]): Testing results of the dataset. + metric (str | list[str]): Metrics to be evaluated. Options are + 'bbox', 'segm', 'proposal', 'proposal_fast'. + logger (logging.Logger | str | None): Logger used for printing + related information during evaluation. Default: None. + jsonfile_prefix (str | None): The prefix of json files. It includes + the file path and the prefix of filename, e.g., "a/b/prefix". + If not specified, a temp file will be created. Default: None. + classwise (bool): Whether to evaluating the AP for each class. + proposal_nums (Sequence[int]): Proposal number used for evaluating + recalls, such as recall@100, recall@1000. + Default: (100, 300, 1000). + iou_thrs (Sequence[float], optional): IoU threshold used for + evaluating recalls/mAPs. If set to a list, the average of all + IoUs will also be computed. If not specified, [0.50, 0.55, + 0.60, 0.65, 0.70, 0.75, 0.80, 0.85, 0.90, 0.95] will be used. + Default: None. + metric_items (list[str] | str, optional): Metric items that will + be returned. If not specified, ``['AR@100', 'AR@300', + 'AR@1000', 'AR_s@1000', 'AR_m@1000', 'AR_l@1000' ]`` will be + used when ``metric=='proposal'``, ``['mAP', 'mAP_50', 'mAP_75', + 'mAP_s', 'mAP_m', 'mAP_l']`` will be used when + ``metric=='bbox' or metric=='segm'``. + + Returns: + dict[str, float]: COCO style evaluation metric. + """ + + metrics = metric if isinstance(metric, list) else [metric] + allowed_metrics = ['bbox', 'segm', 'proposal', 'proposal_fast'] + for metric in metrics: + if metric not in allowed_metrics: + raise KeyError(f'metric {metric} is not supported') + if iou_thrs is None: + iou_thrs = np.linspace( + .5, 0.95, int(np.round((0.95 - .5) / .05)) + 1, endpoint=True) + if metric_items is not None: + if not isinstance(metric_items, list): + metric_items = [metric_items] + + result_files, tmp_dir = self.format_results(results, jsonfile_prefix) + + eval_results = OrderedDict() + cocoGt = self.coco + for metric in metrics: + msg = f'Evaluating {metric}...' + if logger is None: + msg = '\n' + msg + print_log(msg, logger=logger) + + if metric == 'proposal_fast': + ar = self.fast_eval_recall( + results, proposal_nums, iou_thrs, logger='silent') + log_msg = [] + for i, num in enumerate(proposal_nums): + eval_results[f'AR@{num}'] = ar[i] + log_msg.append(f'\nAR@{num}\t{ar[i]:.4f}') + log_msg = ''.join(log_msg) + print_log(log_msg, logger=logger) + continue + + iou_type = 'bbox' if metric == 'proposal' else metric + if metric not in result_files: + raise KeyError(f'{metric} is not in results') + try: + predictions = mmcv.load(result_files[metric]) + if iou_type == 'segm': + # Refer to https://github.com/cocodataset/cocoapi/blob/master/PythonAPI/pycocotools/coco.py#L331 # noqa + # When evaluating mask AP, if the results contain bbox, + # cocoapi will use the box area instead of the mask area + # for calculating the instance area. Though the overall AP + # is not affected, this leads to different + # small/medium/large mask AP results. + for x in predictions: + x.pop('bbox') + warnings.simplefilter('once') + warnings.warn( + 'The key "bbox" is deleted for more accurate mask AP ' + 'of small/medium/large instances since v2.12.0. This ' + 'does not change the overall mAP calculation.', + UserWarning) + cocoDt = cocoGt.loadRes(predictions) + except IndexError: + print_log( + 'The testing results of the whole dataset is empty.', + logger=logger, + level=logging.ERROR) + break + + cocoEval = COCOeval(cocoGt, cocoDt, iou_type) + cocoEval.params.catIds = self.cat_ids + cocoEval.params.imgIds = self.img_ids + cocoEval.params.maxDets = list(proposal_nums) + cocoEval.params.iouThrs = iou_thrs + # mapping of cocoEval.stats + coco_metric_names = { + 'mAP': 0, + 'mAP_50': 1, + 'mAP_75': 2, + 'mAP_s': 3, + 'mAP_m': 4, + 'mAP_l': 5, + 'AR@100': 6, + 'AR@300': 7, + 'AR@1000': 8, + 'AR_s@1000': 9, + 'AR_m@1000': 10, + 'AR_l@1000': 11 + } + if metric_items is not None: + for metric_item in metric_items: + if metric_item not in coco_metric_names: + raise KeyError( + f'metric item {metric_item} is not supported') + + if metric == 'proposal': + cocoEval.params.useCats = 0 + cocoEval.evaluate() + cocoEval.accumulate() + cocoEval.summarize() + if metric_items is None: + metric_items = [ + 'AR@100', 'AR@300', 'AR@1000', 'AR_s@1000', + 'AR_m@1000', 'AR_l@1000' + ] + + for item in metric_items: + val = float( + f'{cocoEval.stats[coco_metric_names[item]]:.3f}') + eval_results[item] = val + else: + cocoEval.evaluate() + cocoEval.accumulate() + cocoEval.summarize() + if classwise: # Compute per-category AP + # Compute per-category AP + # from https://github.com/facebookresearch/detectron2/ + precisions = cocoEval.eval['precision'] + # precision: (iou, recall, cls, area range, max dets) + assert len(self.cat_ids) == precisions.shape[2] + + results_per_category = [] + for idx, catId in enumerate(self.cat_ids): + # area range index 0: all area ranges + # max dets index -1: typically 100 per image + nm = self.coco.loadCats(catId)[0] + precision = precisions[:, :, idx, 0, -1] + precision = precision[precision > -1] + if precision.size: + ap = np.mean(precision) + else: + ap = float('nan') + results_per_category.append( + (f'{nm["name"]}', f'{float(ap):0.3f}')) + + num_columns = min(6, len(results_per_category) * 2) + results_flatten = list( + itertools.chain(*results_per_category)) + headers = ['category', 'AP'] * (num_columns // 2) + results_2d = itertools.zip_longest(*[ + results_flatten[i::num_columns] + for i in range(num_columns) + ]) + table_data = [headers] + table_data += [result for result in results_2d] + table = AsciiTable(table_data) + print_log('\n' + table.table, logger=logger) + + if metric_items is None: + metric_items = [ + 'mAP', 'mAP_50', 'mAP_75', 'mAP_s', 'mAP_m', 'mAP_l' + ] + + for metric_item in metric_items: + key = f'{metric}_{metric_item}' + val = float( + f'{cocoEval.stats[coco_metric_names[metric_item]]:.3f}' + ) + eval_results[key] = val + ap = cocoEval.stats[:6] + eval_results[f'{metric}_mAP_copypaste'] = ( + f'{ap[0]:.3f} {ap[1]:.3f} {ap[2]:.3f} {ap[3]:.3f} ' + f'{ap[4]:.3f} {ap[5]:.3f}') + if tmp_dir is not None: + tmp_dir.cleanup() + return eval_results diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/custom.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/custom.py new file mode 100644 index 0000000000000000000000000000000000000000..2942eccb59a3ec536b2cfcc05ac3e1f379ec1618 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/custom.py @@ -0,0 +1,361 @@ +import os.path as osp +import warnings +from collections import OrderedDict + +import mmcv +import numpy as np +from mmcv.utils import print_log +from terminaltables import AsciiTable +from torch.utils.data import Dataset + +from mmdet.core import eval_map, eval_recalls +from .builder import DATASETS +from .pipelines import Compose + + +@DATASETS.register_module() +class CustomDataset(Dataset): + """Custom dataset for detection. + + The annotation format is shown as follows. The `ann` field is optional for + testing. + + .. code-block:: none + + [ + { + 'filename': 'a.jpg', + 'width': 1280, + 'height': 720, + 'ann': { + 'bboxes': (n, 4) in (x1, y1, x2, y2) order. + 'labels': (n, ), + 'bboxes_ignore': (k, 4), (optional field) + 'labels_ignore': (k, 4) (optional field) + } + }, + ... + ] + + Args: + ann_file (str): Annotation file path. + pipeline (list[dict]): Processing pipeline. + classes (str | Sequence[str], optional): Specify classes to load. + If is None, ``cls.CLASSES`` will be used. Default: None. + data_root (str, optional): Data root for ``ann_file``, + ``img_prefix``, ``seg_prefix``, ``proposal_file`` if specified. + test_mode (bool, optional): If set True, annotation will not be loaded. + filter_empty_gt (bool, optional): If set true, images without bounding + boxes of the dataset's classes will be filtered out. This option + only works when `test_mode=False`, i.e., we never filter images + during tests. + """ + + CLASSES = None + + def __init__(self, + ann_file, + pipeline, + classes=None, + data_root=None, + img_prefix='', + seg_prefix=None, + proposal_file=None, + test_mode=False, + filter_empty_gt=True): + self.ann_file = ann_file + self.data_root = data_root + self.img_prefix = img_prefix + self.seg_prefix = seg_prefix + self.proposal_file = proposal_file + self.test_mode = test_mode + self.filter_empty_gt = filter_empty_gt + self.CLASSES = self.get_classes(classes) + + # join paths if data_root is specified + if self.data_root is not None: + if not osp.isabs(self.ann_file): + self.ann_file = osp.join(self.data_root, self.ann_file) + if not (self.img_prefix is None or osp.isabs(self.img_prefix)): + self.img_prefix = osp.join(self.data_root, self.img_prefix) + if not (self.seg_prefix is None or osp.isabs(self.seg_prefix)): + self.seg_prefix = osp.join(self.data_root, self.seg_prefix) + if not (self.proposal_file is None + or osp.isabs(self.proposal_file)): + self.proposal_file = osp.join(self.data_root, + self.proposal_file) + # load annotations (and proposals) + self.data_infos = self.load_annotations(self.ann_file) + + if self.proposal_file is not None: + self.proposals = self.load_proposals(self.proposal_file) + else: + self.proposals = None + + # filter images too small and containing no annotations + if not test_mode: + valid_inds = self._filter_imgs() + self.data_infos = [self.data_infos[i] for i in valid_inds] + if self.proposals is not None: + self.proposals = [self.proposals[i] for i in valid_inds] + # set group flag for the sampler + self._set_group_flag() + + # processing pipeline + self.pipeline = Compose(pipeline) + + def __len__(self): + """Total number of samples of data.""" + return len(self.data_infos) + + def load_annotations(self, ann_file): + """Load annotation from annotation file.""" + return mmcv.load(ann_file) + + def load_proposals(self, proposal_file): + """Load proposal from proposal file.""" + return mmcv.load(proposal_file) + + def get_ann_info(self, idx): + """Get annotation by index. + + Args: + idx (int): Index of data. + + Returns: + dict: Annotation info of specified index. + """ + + return self.data_infos[idx]['ann'] + + def get_cat_ids(self, idx): + """Get category ids by index. + + Args: + idx (int): Index of data. + + Returns: + list[int]: All categories in the image of specified index. + """ + + return self.data_infos[idx]['ann']['labels'].astype(np.int).tolist() + + def pre_pipeline(self, results): + """Prepare results dict for pipeline.""" + results['img_prefix'] = self.img_prefix + results['seg_prefix'] = self.seg_prefix + results['proposal_file'] = self.proposal_file + results['bbox_fields'] = [] + results['mask_fields'] = [] + results['seg_fields'] = [] + + def _filter_imgs(self, min_size=32): + """Filter images too small.""" + if self.filter_empty_gt: + warnings.warn( + 'CustomDataset does not support filtering empty gt images.') + valid_inds = [] + for i, img_info in enumerate(self.data_infos): + if min(img_info['width'], img_info['height']) >= min_size: + valid_inds.append(i) + return valid_inds + + def _set_group_flag(self): + """Set flag according to image aspect ratio. + + Images with aspect ratio greater than 1 will be set as group 1, + otherwise group 0. + """ + self.flag = np.zeros(len(self), dtype=np.uint8) + for i in range(len(self)): + img_info = self.data_infos[i] + if img_info['width'] / img_info['height'] > 1: + self.flag[i] = 1 + + def _rand_another(self, idx): + """Get another random index from the same group as the given index.""" + pool = np.where(self.flag == self.flag[idx])[0] + return np.random.choice(pool) + + def __getitem__(self, idx): + """Get training/test data after pipeline. + + Args: + idx (int): Index of data. + + Returns: + dict: Training/test data (with annotation if `test_mode` is set \ + True). + """ + + if self.test_mode: + return self.prepare_test_img(idx) + while True: + data = self.prepare_train_img(idx) + if data is None: + idx = self._rand_another(idx) + continue + return data + + def prepare_train_img(self, idx): + """Get training data and annotations after pipeline. + + Args: + idx (int): Index of data. + + Returns: + dict: Training data and annotation after pipeline with new keys \ + introduced by pipeline. + """ + + img_info = self.data_infos[idx] + ann_info = self.get_ann_info(idx) + results = dict(img_info=img_info, ann_info=ann_info) + if self.proposals is not None: + results['proposals'] = self.proposals[idx] + self.pre_pipeline(results) + return self.pipeline(results) + + def prepare_test_img(self, idx): + """Get testing data after pipeline. + + Args: + idx (int): Index of data. + + Returns: + dict: Testing data after pipeline with new keys introduced by \ + pipeline. + """ + + img_info = self.data_infos[idx] + results = dict(img_info=img_info) + if self.proposals is not None: + results['proposals'] = self.proposals[idx] + self.pre_pipeline(results) + return self.pipeline(results) + + @classmethod + def get_classes(cls, classes=None): + """Get class names of current dataset. + + Args: + classes (Sequence[str] | str | None): If classes is None, use + default CLASSES defined by builtin dataset. If classes is a + string, take it as a file name. The file contains the name of + classes where each line contains one class name. If classes is + a tuple or list, override the CLASSES defined by the dataset. + + Returns: + tuple[str] or list[str]: Names of categories of the dataset. + """ + if classes is None: + return cls.CLASSES + + if isinstance(classes, str): + # take it as a file path + class_names = mmcv.list_from_file(classes) + elif isinstance(classes, (tuple, list)): + class_names = classes + else: + raise ValueError(f'Unsupported type {type(classes)} of classes.') + + return class_names + + def format_results(self, results, **kwargs): + """Place holder to format result to dataset specific output.""" + + def evaluate(self, + results, + metric='mAP', + logger=None, + proposal_nums=(100, 300, 1000), + iou_thr=0.5, + scale_ranges=None): + """Evaluate the dataset. + + Args: + results (list): Testing results of the dataset. + metric (str | list[str]): Metrics to be evaluated. + logger (logging.Logger | None | str): Logger used for printing + related information during evaluation. Default: None. + proposal_nums (Sequence[int]): Proposal number used for evaluating + recalls, such as recall@100, recall@1000. + Default: (100, 300, 1000). + iou_thr (float | list[float]): IoU threshold. Default: 0.5. + scale_ranges (list[tuple] | None): Scale ranges for evaluating mAP. + Default: None. + """ + + if not isinstance(metric, str): + assert len(metric) == 1 + metric = metric[0] + allowed_metrics = ['mAP', 'recall'] + if metric not in allowed_metrics: + raise KeyError(f'metric {metric} is not supported') + annotations = [self.get_ann_info(i) for i in range(len(self))] + eval_results = OrderedDict() + iou_thrs = [iou_thr] if isinstance(iou_thr, float) else iou_thr + if metric == 'mAP': + assert isinstance(iou_thrs, list) + mean_aps = [] + for iou_thr in iou_thrs: + print_log(f'\n{"-" * 15}iou_thr: {iou_thr}{"-" * 15}') + mean_ap, _ = eval_map( + results, + annotations, + scale_ranges=scale_ranges, + iou_thr=iou_thr, + dataset=self.CLASSES, + logger=logger) + mean_aps.append(mean_ap) + eval_results[f'AP{int(iou_thr * 100):02d}'] = round(mean_ap, 3) + eval_results['mAP'] = sum(mean_aps) / len(mean_aps) + elif metric == 'recall': + gt_bboxes = [ann['bboxes'] for ann in annotations] + recalls = eval_recalls( + gt_bboxes, results, proposal_nums, iou_thr, logger=logger) + for i, num in enumerate(proposal_nums): + for j, iou in enumerate(iou_thrs): + eval_results[f'recall@{num}@{iou}'] = recalls[i, j] + if recalls.shape[1] > 1: + ar = recalls.mean(axis=1) + for i, num in enumerate(proposal_nums): + eval_results[f'AR@{num}'] = ar[i] + return eval_results + + def __repr__(self): + """Print the number of instance number.""" + dataset_type = 'Test' if self.test_mode else 'Train' + result = (f'\n{self.__class__.__name__} {dataset_type} dataset ' + f'with number of images {len(self)}, ' + f'and instance counts: \n') + if self.CLASSES is None: + result += 'Category names are not provided. \n' + return result + instance_count = np.zeros(len(self.CLASSES) + 1).astype(int) + # count the instance number in each image + for idx in range(len(self)): + label = self.get_ann_info(idx)['labels'] + unique, counts = np.unique(label, return_counts=True) + if len(unique) > 0: + # add the occurrence number to each class + instance_count[unique] += counts + else: + # background is the last index + instance_count[-1] += 1 + # create a table with category count + table_data = [['category', 'count'] * 5] + row_data = [] + for cls, count in enumerate(instance_count): + if cls < len(self.CLASSES): + row_data += [f'{cls} [{self.CLASSES[cls]}]', f'{count}'] + else: + # add the background number + row_data += ['-1 background', f'{count}'] + if len(row_data) == 10: + table_data.append(row_data) + row_data = [] + + table = AsciiTable(table_data) + result += table.table + return result diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/dataset_wrappers.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/dataset_wrappers.py new file mode 100644 index 0000000000000000000000000000000000000000..4276385eea521918f1db3b9c377ec2b6d9210f5d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/dataset_wrappers.py @@ -0,0 +1,282 @@ +import bisect +import math +from collections import defaultdict + +import numpy as np +from mmcv.utils import print_log +from torch.utils.data.dataset import ConcatDataset as _ConcatDataset + +from .builder import DATASETS +from .coco import CocoDataset + + +@DATASETS.register_module() +class ConcatDataset(_ConcatDataset): + """A wrapper of concatenated dataset. + + Same as :obj:`torch.utils.data.dataset.ConcatDataset`, but + concat the group flag for image aspect ratio. + + Args: + datasets (list[:obj:`Dataset`]): A list of datasets. + separate_eval (bool): Whether to evaluate the results + separately if it is used as validation dataset. + Defaults to True. + """ + + def __init__(self, datasets, separate_eval=True): + super(ConcatDataset, self).__init__(datasets) + self.CLASSES = datasets[0].CLASSES + self.separate_eval = separate_eval + if not separate_eval: + if any([isinstance(ds, CocoDataset) for ds in datasets]): + raise NotImplementedError( + 'Evaluating concatenated CocoDataset as a whole is not' + ' supported! Please set "separate_eval=True"') + elif len(set([type(ds) for ds in datasets])) != 1: + raise NotImplementedError( + 'All the datasets should have same types') + + if hasattr(datasets[0], 'flag'): + flags = [] + for i in range(0, len(datasets)): + flags.append(datasets[i].flag) + self.flag = np.concatenate(flags) + + def get_cat_ids(self, idx): + """Get category ids of concatenated dataset by index. + + Args: + idx (int): Index of data. + + Returns: + list[int]: All categories in the image of specified index. + """ + + if idx < 0: + if -idx > len(self): + raise ValueError( + 'absolute value of index should not exceed dataset length') + idx = len(self) + idx + dataset_idx = bisect.bisect_right(self.cumulative_sizes, idx) + if dataset_idx == 0: + sample_idx = idx + else: + sample_idx = idx - self.cumulative_sizes[dataset_idx - 1] + return self.datasets[dataset_idx].get_cat_ids(sample_idx) + + def evaluate(self, results, logger=None, **kwargs): + """Evaluate the results. + + Args: + results (list[list | tuple]): Testing results of the dataset. + logger (logging.Logger | str | None): Logger used for printing + related information during evaluation. Default: None. + + Returns: + dict[str: float]: AP results of the total dataset or each separate + dataset if `self.separate_eval=True`. + """ + assert len(results) == self.cumulative_sizes[-1], \ + ('Dataset and results have different sizes: ' + f'{self.cumulative_sizes[-1]} v.s. {len(results)}') + + # Check whether all the datasets support evaluation + for dataset in self.datasets: + assert hasattr(dataset, 'evaluate'), \ + f'{type(dataset)} does not implement evaluate function' + + if self.separate_eval: + dataset_idx = -1 + total_eval_results = dict() + for size, dataset in zip(self.cumulative_sizes, self.datasets): + start_idx = 0 if dataset_idx == -1 else \ + self.cumulative_sizes[dataset_idx] + end_idx = self.cumulative_sizes[dataset_idx + 1] + + results_per_dataset = results[start_idx:end_idx] + print_log( + f'\nEvaluateing {dataset.ann_file} with ' + f'{len(results_per_dataset)} images now', + logger=logger) + + eval_results_per_dataset = dataset.evaluate( + results_per_dataset, logger=logger, **kwargs) + dataset_idx += 1 + for k, v in eval_results_per_dataset.items(): + total_eval_results.update({f'{dataset_idx}_{k}': v}) + + return total_eval_results + elif any([isinstance(ds, CocoDataset) for ds in self.datasets]): + raise NotImplementedError( + 'Evaluating concatenated CocoDataset as a whole is not' + ' supported! Please set "separate_eval=True"') + elif len(set([type(ds) for ds in self.datasets])) != 1: + raise NotImplementedError( + 'All the datasets should have same types') + else: + original_data_infos = self.datasets[0].data_infos + self.datasets[0].data_infos = sum( + [dataset.data_infos for dataset in self.datasets], []) + eval_results = self.datasets[0].evaluate( + results, logger=logger, **kwargs) + self.datasets[0].data_infos = original_data_infos + return eval_results + + +@DATASETS.register_module() +class RepeatDataset: + """A wrapper of repeated dataset. + + The length of repeated dataset will be `times` larger than the original + dataset. This is useful when the data loading time is long but the dataset + is small. Using RepeatDataset can reduce the data loading time between + epochs. + + Args: + dataset (:obj:`Dataset`): The dataset to be repeated. + times (int): Repeat times. + """ + + def __init__(self, dataset, times): + self.dataset = dataset + self.times = times + self.CLASSES = dataset.CLASSES + if hasattr(self.dataset, 'flag'): + self.flag = np.tile(self.dataset.flag, times) + + self._ori_len = len(self.dataset) + + def __getitem__(self, idx): + return self.dataset[idx % self._ori_len] + + def get_cat_ids(self, idx): + """Get category ids of repeat dataset by index. + + Args: + idx (int): Index of data. + + Returns: + list[int]: All categories in the image of specified index. + """ + + return self.dataset.get_cat_ids(idx % self._ori_len) + + def __len__(self): + """Length after repetition.""" + return self.times * self._ori_len + + +# Modified from https://github.com/facebookresearch/detectron2/blob/41d475b75a230221e21d9cac5d69655e3415e3a4/detectron2/data/samplers/distributed_sampler.py#L57 # noqa +@DATASETS.register_module() +class ClassBalancedDataset: + """A wrapper of repeated dataset with repeat factor. + + Suitable for training on class imbalanced datasets like LVIS. Following + the sampling strategy in the `paper `_, + in each epoch, an image may appear multiple times based on its + "repeat factor". + The repeat factor for an image is a function of the frequency the rarest + category labeled in that image. The "frequency of category c" in [0, 1] + is defined by the fraction of images in the training set (without repeats) + in which category c appears. + The dataset needs to instantiate :func:`self.get_cat_ids` to support + ClassBalancedDataset. + + The repeat factor is computed as followed. + + 1. For each category c, compute the fraction # of images + that contain it: :math:`f(c)` + 2. For each category c, compute the category-level repeat factor: + :math:`r(c) = max(1, sqrt(t/f(c)))` + 3. For each image I, compute the image-level repeat factor: + :math:`r(I) = max_{c in I} r(c)` + + Args: + dataset (:obj:`CustomDataset`): The dataset to be repeated. + oversample_thr (float): frequency threshold below which data is + repeated. For categories with ``f_c >= oversample_thr``, there is + no oversampling. For categories with ``f_c < oversample_thr``, the + degree of oversampling following the square-root inverse frequency + heuristic above. + filter_empty_gt (bool, optional): If set true, images without bounding + boxes will not be oversampled. Otherwise, they will be categorized + as the pure background class and involved into the oversampling. + Default: True. + """ + + def __init__(self, dataset, oversample_thr, filter_empty_gt=True): + self.dataset = dataset + self.oversample_thr = oversample_thr + self.filter_empty_gt = filter_empty_gt + self.CLASSES = dataset.CLASSES + + repeat_factors = self._get_repeat_factors(dataset, oversample_thr) + repeat_indices = [] + for dataset_idx, repeat_factor in enumerate(repeat_factors): + repeat_indices.extend([dataset_idx] * math.ceil(repeat_factor)) + self.repeat_indices = repeat_indices + + flags = [] + if hasattr(self.dataset, 'flag'): + for flag, repeat_factor in zip(self.dataset.flag, repeat_factors): + flags.extend([flag] * int(math.ceil(repeat_factor))) + assert len(flags) == len(repeat_indices) + self.flag = np.asarray(flags, dtype=np.uint8) + + def _get_repeat_factors(self, dataset, repeat_thr): + """Get repeat factor for each images in the dataset. + + Args: + dataset (:obj:`CustomDataset`): The dataset + repeat_thr (float): The threshold of frequency. If an image + contains the categories whose frequency below the threshold, + it would be repeated. + + Returns: + list[float]: The repeat factors for each images in the dataset. + """ + + # 1. For each category c, compute the fraction # of images + # that contain it: f(c) + category_freq = defaultdict(int) + num_images = len(dataset) + for idx in range(num_images): + cat_ids = set(self.dataset.get_cat_ids(idx)) + if len(cat_ids) == 0 and not self.filter_empty_gt: + cat_ids = set([len(self.CLASSES)]) + for cat_id in cat_ids: + category_freq[cat_id] += 1 + for k, v in category_freq.items(): + category_freq[k] = v / num_images + + # 2. For each category c, compute the category-level repeat factor: + # r(c) = max(1, sqrt(t/f(c))) + category_repeat = { + cat_id: max(1.0, math.sqrt(repeat_thr / cat_freq)) + for cat_id, cat_freq in category_freq.items() + } + + # 3. For each image I, compute the image-level repeat factor: + # r(I) = max_{c in I} r(c) + repeat_factors = [] + for idx in range(num_images): + cat_ids = set(self.dataset.get_cat_ids(idx)) + if len(cat_ids) == 0 and not self.filter_empty_gt: + cat_ids = set([len(self.CLASSES)]) + repeat_factor = 1 + if len(cat_ids) > 0: + repeat_factor = max( + {category_repeat[cat_id] + for cat_id in cat_ids}) + repeat_factors.append(repeat_factor) + + return repeat_factors + + def __getitem__(self, idx): + ori_index = self.repeat_indices[idx] + return self.dataset[ori_index] + + def __len__(self): + """Length after repetition.""" + return len(self.repeat_indices) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/deepfashion.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/deepfashion.py new file mode 100644 index 0000000000000000000000000000000000000000..1125376091f2d4ee6843ae4f2156b3b0453be369 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/deepfashion.py @@ -0,0 +1,10 @@ +from .builder import DATASETS +from .coco import CocoDataset + + +@DATASETS.register_module() +class DeepFashionDataset(CocoDataset): + + CLASSES = ('top', 'skirt', 'leggings', 'dress', 'outer', 'pants', 'bag', + 'neckwear', 'headwear', 'eyeglass', 'belt', 'footwear', 'hair', + 'skin', 'face') diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/lvis.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/lvis.py new file mode 100644 index 0000000000000000000000000000000000000000..3cf489302fabeabf4fa4eb6ee3e4102dc110594f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/lvis.py @@ -0,0 +1,737 @@ +import itertools +import logging +import os.path as osp +import tempfile +import warnings +from collections import OrderedDict + +import numpy as np +from mmcv.utils import print_log +from terminaltables import AsciiTable + +from .builder import DATASETS +from .coco import CocoDataset + + +@DATASETS.register_module() +class LVISV05Dataset(CocoDataset): + + CLASSES = ( + 'acorn', 'aerosol_can', 'air_conditioner', 'airplane', 'alarm_clock', + 'alcohol', 'alligator', 'almond', 'ambulance', 'amplifier', 'anklet', + 'antenna', 'apple', 'apple_juice', 'applesauce', 'apricot', 'apron', + 'aquarium', 'armband', 'armchair', 'armoire', 'armor', 'artichoke', + 'trash_can', 'ashtray', 'asparagus', 'atomizer', 'avocado', 'award', + 'awning', 'ax', 'baby_buggy', 'basketball_backboard', 'backpack', + 'handbag', 'suitcase', 'bagel', 'bagpipe', 'baguet', 'bait', 'ball', + 'ballet_skirt', 'balloon', 'bamboo', 'banana', 'Band_Aid', 'bandage', + 'bandanna', 'banjo', 'banner', 'barbell', 'barge', 'barrel', + 'barrette', 'barrow', 'baseball_base', 'baseball', 'baseball_bat', + 'baseball_cap', 'baseball_glove', 'basket', 'basketball_hoop', + 'basketball', 'bass_horn', 'bat_(animal)', 'bath_mat', 'bath_towel', + 'bathrobe', 'bathtub', 'batter_(food)', 'battery', 'beachball', 'bead', + 'beaker', 'bean_curd', 'beanbag', 'beanie', 'bear', 'bed', + 'bedspread', 'cow', 'beef_(food)', 'beeper', 'beer_bottle', 'beer_can', + 'beetle', 'bell', 'bell_pepper', 'belt', 'belt_buckle', 'bench', + 'beret', 'bib', 'Bible', 'bicycle', 'visor', 'binder', 'binoculars', + 'bird', 'birdfeeder', 'birdbath', 'birdcage', 'birdhouse', + 'birthday_cake', 'birthday_card', 'biscuit_(bread)', 'pirate_flag', + 'black_sheep', 'blackboard', 'blanket', 'blazer', 'blender', 'blimp', + 'blinker', 'blueberry', 'boar', 'gameboard', 'boat', 'bobbin', + 'bobby_pin', 'boiled_egg', 'bolo_tie', 'deadbolt', 'bolt', 'bonnet', + 'book', 'book_bag', 'bookcase', 'booklet', 'bookmark', + 'boom_microphone', 'boot', 'bottle', 'bottle_opener', 'bouquet', + 'bow_(weapon)', 'bow_(decorative_ribbons)', 'bow-tie', 'bowl', + 'pipe_bowl', 'bowler_hat', 'bowling_ball', 'bowling_pin', + 'boxing_glove', 'suspenders', 'bracelet', 'brass_plaque', 'brassiere', + 'bread-bin', 'breechcloth', 'bridal_gown', 'briefcase', + 'bristle_brush', 'broccoli', 'broach', 'broom', 'brownie', + 'brussels_sprouts', 'bubble_gum', 'bucket', 'horse_buggy', 'bull', + 'bulldog', 'bulldozer', 'bullet_train', 'bulletin_board', + 'bulletproof_vest', 'bullhorn', 'corned_beef', 'bun', 'bunk_bed', + 'buoy', 'burrito', 'bus_(vehicle)', 'business_card', 'butcher_knife', + 'butter', 'butterfly', 'button', 'cab_(taxi)', 'cabana', 'cabin_car', + 'cabinet', 'locker', 'cake', 'calculator', 'calendar', 'calf', + 'camcorder', 'camel', 'camera', 'camera_lens', 'camper_(vehicle)', + 'can', 'can_opener', 'candelabrum', 'candle', 'candle_holder', + 'candy_bar', 'candy_cane', 'walking_cane', 'canister', 'cannon', + 'canoe', 'cantaloup', 'canteen', 'cap_(headwear)', 'bottle_cap', + 'cape', 'cappuccino', 'car_(automobile)', 'railcar_(part_of_a_train)', + 'elevator_car', 'car_battery', 'identity_card', 'card', 'cardigan', + 'cargo_ship', 'carnation', 'horse_carriage', 'carrot', 'tote_bag', + 'cart', 'carton', 'cash_register', 'casserole', 'cassette', 'cast', + 'cat', 'cauliflower', 'caviar', 'cayenne_(spice)', 'CD_player', + 'celery', 'cellular_telephone', 'chain_mail', 'chair', 'chaise_longue', + 'champagne', 'chandelier', 'chap', 'checkbook', 'checkerboard', + 'cherry', 'chessboard', 'chest_of_drawers_(furniture)', + 'chicken_(animal)', 'chicken_wire', 'chickpea', 'Chihuahua', + 'chili_(vegetable)', 'chime', 'chinaware', 'crisp_(potato_chip)', + 'poker_chip', 'chocolate_bar', 'chocolate_cake', 'chocolate_milk', + 'chocolate_mousse', 'choker', 'chopping_board', 'chopstick', + 'Christmas_tree', 'slide', 'cider', 'cigar_box', 'cigarette', + 'cigarette_case', 'cistern', 'clarinet', 'clasp', 'cleansing_agent', + 'clementine', 'clip', 'clipboard', 'clock', 'clock_tower', + 'clothes_hamper', 'clothespin', 'clutch_bag', 'coaster', 'coat', + 'coat_hanger', 'coatrack', 'cock', 'coconut', 'coffee_filter', + 'coffee_maker', 'coffee_table', 'coffeepot', 'coil', 'coin', + 'colander', 'coleslaw', 'coloring_material', 'combination_lock', + 'pacifier', 'comic_book', 'computer_keyboard', 'concrete_mixer', + 'cone', 'control', 'convertible_(automobile)', 'sofa_bed', 'cookie', + 'cookie_jar', 'cooking_utensil', 'cooler_(for_food)', + 'cork_(bottle_plug)', 'corkboard', 'corkscrew', 'edible_corn', + 'cornbread', 'cornet', 'cornice', 'cornmeal', 'corset', + 'romaine_lettuce', 'costume', 'cougar', 'coverall', 'cowbell', + 'cowboy_hat', 'crab_(animal)', 'cracker', 'crape', 'crate', 'crayon', + 'cream_pitcher', 'credit_card', 'crescent_roll', 'crib', 'crock_pot', + 'crossbar', 'crouton', 'crow', 'crown', 'crucifix', 'cruise_ship', + 'police_cruiser', 'crumb', 'crutch', 'cub_(animal)', 'cube', + 'cucumber', 'cufflink', 'cup', 'trophy_cup', 'cupcake', 'hair_curler', + 'curling_iron', 'curtain', 'cushion', 'custard', 'cutting_tool', + 'cylinder', 'cymbal', 'dachshund', 'dagger', 'dartboard', + 'date_(fruit)', 'deck_chair', 'deer', 'dental_floss', 'desk', + 'detergent', 'diaper', 'diary', 'die', 'dinghy', 'dining_table', 'tux', + 'dish', 'dish_antenna', 'dishrag', 'dishtowel', 'dishwasher', + 'dishwasher_detergent', 'diskette', 'dispenser', 'Dixie_cup', 'dog', + 'dog_collar', 'doll', 'dollar', 'dolphin', 'domestic_ass', 'eye_mask', + 'doorbell', 'doorknob', 'doormat', 'doughnut', 'dove', 'dragonfly', + 'drawer', 'underdrawers', 'dress', 'dress_hat', 'dress_suit', + 'dresser', 'drill', 'drinking_fountain', 'drone', 'dropper', + 'drum_(musical_instrument)', 'drumstick', 'duck', 'duckling', + 'duct_tape', 'duffel_bag', 'dumbbell', 'dumpster', 'dustpan', + 'Dutch_oven', 'eagle', 'earphone', 'earplug', 'earring', 'easel', + 'eclair', 'eel', 'egg', 'egg_roll', 'egg_yolk', 'eggbeater', + 'eggplant', 'electric_chair', 'refrigerator', 'elephant', 'elk', + 'envelope', 'eraser', 'escargot', 'eyepatch', 'falcon', 'fan', + 'faucet', 'fedora', 'ferret', 'Ferris_wheel', 'ferry', 'fig_(fruit)', + 'fighter_jet', 'figurine', 'file_cabinet', 'file_(tool)', 'fire_alarm', + 'fire_engine', 'fire_extinguisher', 'fire_hose', 'fireplace', + 'fireplug', 'fish', 'fish_(food)', 'fishbowl', 'fishing_boat', + 'fishing_rod', 'flag', 'flagpole', 'flamingo', 'flannel', 'flash', + 'flashlight', 'fleece', 'flip-flop_(sandal)', 'flipper_(footwear)', + 'flower_arrangement', 'flute_glass', 'foal', 'folding_chair', + 'food_processor', 'football_(American)', 'football_helmet', + 'footstool', 'fork', 'forklift', 'freight_car', 'French_toast', + 'freshener', 'frisbee', 'frog', 'fruit_juice', 'fruit_salad', + 'frying_pan', 'fudge', 'funnel', 'futon', 'gag', 'garbage', + 'garbage_truck', 'garden_hose', 'gargle', 'gargoyle', 'garlic', + 'gasmask', 'gazelle', 'gelatin', 'gemstone', 'giant_panda', + 'gift_wrap', 'ginger', 'giraffe', 'cincture', + 'glass_(drink_container)', 'globe', 'glove', 'goat', 'goggles', + 'goldfish', 'golf_club', 'golfcart', 'gondola_(boat)', 'goose', + 'gorilla', 'gourd', 'surgical_gown', 'grape', 'grasshopper', 'grater', + 'gravestone', 'gravy_boat', 'green_bean', 'green_onion', 'griddle', + 'grillroom', 'grinder_(tool)', 'grits', 'grizzly', 'grocery_bag', + 'guacamole', 'guitar', 'gull', 'gun', 'hair_spray', 'hairbrush', + 'hairnet', 'hairpin', 'ham', 'hamburger', 'hammer', 'hammock', + 'hamper', 'hamster', 'hair_dryer', 'hand_glass', 'hand_towel', + 'handcart', 'handcuff', 'handkerchief', 'handle', 'handsaw', + 'hardback_book', 'harmonium', 'hat', 'hatbox', 'hatch', 'veil', + 'headband', 'headboard', 'headlight', 'headscarf', 'headset', + 'headstall_(for_horses)', 'hearing_aid', 'heart', 'heater', + 'helicopter', 'helmet', 'heron', 'highchair', 'hinge', 'hippopotamus', + 'hockey_stick', 'hog', 'home_plate_(baseball)', 'honey', 'fume_hood', + 'hook', 'horse', 'hose', 'hot-air_balloon', 'hotplate', 'hot_sauce', + 'hourglass', 'houseboat', 'hummingbird', 'hummus', 'polar_bear', + 'icecream', 'popsicle', 'ice_maker', 'ice_pack', 'ice_skate', + 'ice_tea', 'igniter', 'incense', 'inhaler', 'iPod', + 'iron_(for_clothing)', 'ironing_board', 'jacket', 'jam', 'jean', + 'jeep', 'jelly_bean', 'jersey', 'jet_plane', 'jewelry', 'joystick', + 'jumpsuit', 'kayak', 'keg', 'kennel', 'kettle', 'key', 'keycard', + 'kilt', 'kimono', 'kitchen_sink', 'kitchen_table', 'kite', 'kitten', + 'kiwi_fruit', 'knee_pad', 'knife', 'knight_(chess_piece)', + 'knitting_needle', 'knob', 'knocker_(on_a_door)', 'koala', 'lab_coat', + 'ladder', 'ladle', 'ladybug', 'lamb_(animal)', 'lamb-chop', 'lamp', + 'lamppost', 'lampshade', 'lantern', 'lanyard', 'laptop_computer', + 'lasagna', 'latch', 'lawn_mower', 'leather', 'legging_(clothing)', + 'Lego', 'lemon', 'lemonade', 'lettuce', 'license_plate', 'life_buoy', + 'life_jacket', 'lightbulb', 'lightning_rod', 'lime', 'limousine', + 'linen_paper', 'lion', 'lip_balm', 'lipstick', 'liquor', 'lizard', + 'Loafer_(type_of_shoe)', 'log', 'lollipop', 'lotion', + 'speaker_(stereo_equipment)', 'loveseat', 'machine_gun', 'magazine', + 'magnet', 'mail_slot', 'mailbox_(at_home)', 'mallet', 'mammoth', + 'mandarin_orange', 'manger', 'manhole', 'map', 'marker', 'martini', + 'mascot', 'mashed_potato', 'masher', 'mask', 'mast', + 'mat_(gym_equipment)', 'matchbox', 'mattress', 'measuring_cup', + 'measuring_stick', 'meatball', 'medicine', 'melon', 'microphone', + 'microscope', 'microwave_oven', 'milestone', 'milk', 'minivan', + 'mint_candy', 'mirror', 'mitten', 'mixer_(kitchen_tool)', 'money', + 'monitor_(computer_equipment) computer_monitor', 'monkey', 'motor', + 'motor_scooter', 'motor_vehicle', 'motorboat', 'motorcycle', + 'mound_(baseball)', 'mouse_(animal_rodent)', + 'mouse_(computer_equipment)', 'mousepad', 'muffin', 'mug', 'mushroom', + 'music_stool', 'musical_instrument', 'nailfile', 'nameplate', 'napkin', + 'neckerchief', 'necklace', 'necktie', 'needle', 'nest', 'newsstand', + 'nightshirt', 'nosebag_(for_animals)', 'noseband_(for_animals)', + 'notebook', 'notepad', 'nut', 'nutcracker', 'oar', 'octopus_(food)', + 'octopus_(animal)', 'oil_lamp', 'olive_oil', 'omelet', 'onion', + 'orange_(fruit)', 'orange_juice', 'oregano', 'ostrich', 'ottoman', + 'overalls_(clothing)', 'owl', 'packet', 'inkpad', 'pad', 'paddle', + 'padlock', 'paintbox', 'paintbrush', 'painting', 'pajamas', 'palette', + 'pan_(for_cooking)', 'pan_(metal_container)', 'pancake', 'pantyhose', + 'papaya', 'paperclip', 'paper_plate', 'paper_towel', 'paperback_book', + 'paperweight', 'parachute', 'parakeet', 'parasail_(sports)', + 'parchment', 'parka', 'parking_meter', 'parrot', + 'passenger_car_(part_of_a_train)', 'passenger_ship', 'passport', + 'pastry', 'patty_(food)', 'pea_(food)', 'peach', 'peanut_butter', + 'pear', 'peeler_(tool_for_fruit_and_vegetables)', 'pegboard', + 'pelican', 'pen', 'pencil', 'pencil_box', 'pencil_sharpener', + 'pendulum', 'penguin', 'pennant', 'penny_(coin)', 'pepper', + 'pepper_mill', 'perfume', 'persimmon', 'baby', 'pet', 'petfood', + 'pew_(church_bench)', 'phonebook', 'phonograph_record', 'piano', + 'pickle', 'pickup_truck', 'pie', 'pigeon', 'piggy_bank', 'pillow', + 'pin_(non_jewelry)', 'pineapple', 'pinecone', 'ping-pong_ball', + 'pinwheel', 'tobacco_pipe', 'pipe', 'pistol', 'pita_(bread)', + 'pitcher_(vessel_for_liquid)', 'pitchfork', 'pizza', 'place_mat', + 'plate', 'platter', 'playing_card', 'playpen', 'pliers', + 'plow_(farm_equipment)', 'pocket_watch', 'pocketknife', + 'poker_(fire_stirring_tool)', 'pole', 'police_van', 'polo_shirt', + 'poncho', 'pony', 'pool_table', 'pop_(soda)', 'portrait', + 'postbox_(public)', 'postcard', 'poster', 'pot', 'flowerpot', 'potato', + 'potholder', 'pottery', 'pouch', 'power_shovel', 'prawn', 'printer', + 'projectile_(weapon)', 'projector', 'propeller', 'prune', 'pudding', + 'puffer_(fish)', 'puffin', 'pug-dog', 'pumpkin', 'puncher', 'puppet', + 'puppy', 'quesadilla', 'quiche', 'quilt', 'rabbit', 'race_car', + 'racket', 'radar', 'radiator', 'radio_receiver', 'radish', 'raft', + 'rag_doll', 'raincoat', 'ram_(animal)', 'raspberry', 'rat', + 'razorblade', 'reamer_(juicer)', 'rearview_mirror', 'receipt', + 'recliner', 'record_player', 'red_cabbage', 'reflector', + 'remote_control', 'rhinoceros', 'rib_(food)', 'rifle', 'ring', + 'river_boat', 'road_map', 'robe', 'rocking_chair', 'roller_skate', + 'Rollerblade', 'rolling_pin', 'root_beer', + 'router_(computer_equipment)', 'rubber_band', 'runner_(carpet)', + 'plastic_bag', 'saddle_(on_an_animal)', 'saddle_blanket', 'saddlebag', + 'safety_pin', 'sail', 'salad', 'salad_plate', 'salami', + 'salmon_(fish)', 'salmon_(food)', 'salsa', 'saltshaker', + 'sandal_(type_of_shoe)', 'sandwich', 'satchel', 'saucepan', 'saucer', + 'sausage', 'sawhorse', 'saxophone', 'scale_(measuring_instrument)', + 'scarecrow', 'scarf', 'school_bus', 'scissors', 'scoreboard', + 'scrambled_eggs', 'scraper', 'scratcher', 'screwdriver', + 'scrubbing_brush', 'sculpture', 'seabird', 'seahorse', 'seaplane', + 'seashell', 'seedling', 'serving_dish', 'sewing_machine', 'shaker', + 'shampoo', 'shark', 'sharpener', 'Sharpie', 'shaver_(electric)', + 'shaving_cream', 'shawl', 'shears', 'sheep', 'shepherd_dog', + 'sherbert', 'shield', 'shirt', 'shoe', 'shopping_bag', 'shopping_cart', + 'short_pants', 'shot_glass', 'shoulder_bag', 'shovel', 'shower_head', + 'shower_curtain', 'shredder_(for_paper)', 'sieve', 'signboard', 'silo', + 'sink', 'skateboard', 'skewer', 'ski', 'ski_boot', 'ski_parka', + 'ski_pole', 'skirt', 'sled', 'sleeping_bag', 'sling_(bandage)', + 'slipper_(footwear)', 'smoothie', 'snake', 'snowboard', 'snowman', + 'snowmobile', 'soap', 'soccer_ball', 'sock', 'soda_fountain', + 'carbonated_water', 'sofa', 'softball', 'solar_array', 'sombrero', + 'soup', 'soup_bowl', 'soupspoon', 'sour_cream', 'soya_milk', + 'space_shuttle', 'sparkler_(fireworks)', 'spatula', 'spear', + 'spectacles', 'spice_rack', 'spider', 'sponge', 'spoon', 'sportswear', + 'spotlight', 'squirrel', 'stapler_(stapling_machine)', 'starfish', + 'statue_(sculpture)', 'steak_(food)', 'steak_knife', + 'steamer_(kitchen_appliance)', 'steering_wheel', 'stencil', + 'stepladder', 'step_stool', 'stereo_(sound_system)', 'stew', 'stirrer', + 'stirrup', 'stockings_(leg_wear)', 'stool', 'stop_sign', 'brake_light', + 'stove', 'strainer', 'strap', 'straw_(for_drinking)', 'strawberry', + 'street_sign', 'streetlight', 'string_cheese', 'stylus', 'subwoofer', + 'sugar_bowl', 'sugarcane_(plant)', 'suit_(clothing)', 'sunflower', + 'sunglasses', 'sunhat', 'sunscreen', 'surfboard', 'sushi', 'mop', + 'sweat_pants', 'sweatband', 'sweater', 'sweatshirt', 'sweet_potato', + 'swimsuit', 'sword', 'syringe', 'Tabasco_sauce', 'table-tennis_table', + 'table', 'table_lamp', 'tablecloth', 'tachometer', 'taco', 'tag', + 'taillight', 'tambourine', 'army_tank', 'tank_(storage_vessel)', + 'tank_top_(clothing)', 'tape_(sticky_cloth_or_paper)', 'tape_measure', + 'tapestry', 'tarp', 'tartan', 'tassel', 'tea_bag', 'teacup', + 'teakettle', 'teapot', 'teddy_bear', 'telephone', 'telephone_booth', + 'telephone_pole', 'telephoto_lens', 'television_camera', + 'television_set', 'tennis_ball', 'tennis_racket', 'tequila', + 'thermometer', 'thermos_bottle', 'thermostat', 'thimble', 'thread', + 'thumbtack', 'tiara', 'tiger', 'tights_(clothing)', 'timer', 'tinfoil', + 'tinsel', 'tissue_paper', 'toast_(food)', 'toaster', 'toaster_oven', + 'toilet', 'toilet_tissue', 'tomato', 'tongs', 'toolbox', 'toothbrush', + 'toothpaste', 'toothpick', 'cover', 'tortilla', 'tow_truck', 'towel', + 'towel_rack', 'toy', 'tractor_(farm_equipment)', 'traffic_light', + 'dirt_bike', 'trailer_truck', 'train_(railroad_vehicle)', 'trampoline', + 'tray', 'tree_house', 'trench_coat', 'triangle_(musical_instrument)', + 'tricycle', 'tripod', 'trousers', 'truck', 'truffle_(chocolate)', + 'trunk', 'vat', 'turban', 'turkey_(bird)', 'turkey_(food)', 'turnip', + 'turtle', 'turtleneck_(clothing)', 'typewriter', 'umbrella', + 'underwear', 'unicycle', 'urinal', 'urn', 'vacuum_cleaner', 'valve', + 'vase', 'vending_machine', 'vent', 'videotape', 'vinegar', 'violin', + 'vodka', 'volleyball', 'vulture', 'waffle', 'waffle_iron', 'wagon', + 'wagon_wheel', 'walking_stick', 'wall_clock', 'wall_socket', 'wallet', + 'walrus', 'wardrobe', 'wasabi', 'automatic_washer', 'watch', + 'water_bottle', 'water_cooler', 'water_faucet', 'water_filter', + 'water_heater', 'water_jug', 'water_gun', 'water_scooter', 'water_ski', + 'water_tower', 'watering_can', 'watermelon', 'weathervane', 'webcam', + 'wedding_cake', 'wedding_ring', 'wet_suit', 'wheel', 'wheelchair', + 'whipped_cream', 'whiskey', 'whistle', 'wick', 'wig', 'wind_chime', + 'windmill', 'window_box_(for_plants)', 'windshield_wiper', 'windsock', + 'wine_bottle', 'wine_bucket', 'wineglass', 'wing_chair', + 'blinder_(for_horses)', 'wok', 'wolf', 'wooden_spoon', 'wreath', + 'wrench', 'wristband', 'wristlet', 'yacht', 'yak', 'yogurt', + 'yoke_(animal_equipment)', 'zebra', 'zucchini') + + def load_annotations(self, ann_file): + """Load annotation from lvis style annotation file. + + Args: + ann_file (str): Path of annotation file. + + Returns: + list[dict]: Annotation info from LVIS api. + """ + + try: + import lvis + if getattr(lvis, '__version__', '0') >= '10.5.3': + warnings.warn( + 'mmlvis is deprecated, please install official lvis-api by "pip install git+https://github.com/lvis-dataset/lvis-api.git"', # noqa: E501 + UserWarning) + from lvis import LVIS + except ImportError: + raise ImportError( + 'Package lvis is not installed. Please run "pip install git+https://github.com/lvis-dataset/lvis-api.git".' # noqa: E501 + ) + self.coco = LVIS(ann_file) + self.cat_ids = self.coco.get_cat_ids() + self.cat2label = {cat_id: i for i, cat_id in enumerate(self.cat_ids)} + self.img_ids = self.coco.get_img_ids() + data_infos = [] + for i in self.img_ids: + info = self.coco.load_imgs([i])[0] + if info['file_name'].startswith('COCO'): + # Convert form the COCO 2014 file naming convention of + # COCO_[train/val/test]2014_000000000000.jpg to the 2017 + # naming convention of 000000000000.jpg + # (LVIS v1 will fix this naming issue) + info['filename'] = info['file_name'][-16:] + else: + info['filename'] = info['file_name'] + data_infos.append(info) + return data_infos + + def evaluate(self, + results, + metric='bbox', + logger=None, + jsonfile_prefix=None, + classwise=False, + proposal_nums=(100, 300, 1000), + iou_thrs=np.arange(0.5, 0.96, 0.05)): + """Evaluation in LVIS protocol. + + Args: + results (list[list | tuple]): Testing results of the dataset. + metric (str | list[str]): Metrics to be evaluated. Options are + 'bbox', 'segm', 'proposal', 'proposal_fast'. + logger (logging.Logger | str | None): Logger used for printing + related information during evaluation. Default: None. + jsonfile_prefix (str | None): + classwise (bool): Whether to evaluating the AP for each class. + proposal_nums (Sequence[int]): Proposal number used for evaluating + recalls, such as recall@100, recall@1000. + Default: (100, 300, 1000). + iou_thrs (Sequence[float]): IoU threshold used for evaluating + recalls. If set to a list, the average recall of all IoUs will + also be computed. Default: 0.5. + + Returns: + dict[str, float]: LVIS style metrics. + """ + + try: + import lvis + if getattr(lvis, '__version__', '0') >= '10.5.3': + warnings.warn( + 'mmlvis is deprecated, please install official lvis-api by "pip install git+https://github.com/lvis-dataset/lvis-api.git"', # noqa: E501 + UserWarning) + from lvis import LVISResults, LVISEval + except ImportError: + raise ImportError( + 'Package lvis is not installed. Please run "pip install git+https://github.com/lvis-dataset/lvis-api.git".' # noqa: E501 + ) + assert isinstance(results, list), 'results must be a list' + assert len(results) == len(self), ( + 'The length of results is not equal to the dataset len: {} != {}'. + format(len(results), len(self))) + + metrics = metric if isinstance(metric, list) else [metric] + allowed_metrics = ['bbox', 'segm', 'proposal', 'proposal_fast'] + for metric in metrics: + if metric not in allowed_metrics: + raise KeyError('metric {} is not supported'.format(metric)) + + if jsonfile_prefix is None: + tmp_dir = tempfile.TemporaryDirectory() + jsonfile_prefix = osp.join(tmp_dir.name, 'results') + else: + tmp_dir = None + result_files = self.results2json(results, jsonfile_prefix) + + eval_results = OrderedDict() + # get original api + lvis_gt = self.coco + for metric in metrics: + msg = 'Evaluating {}...'.format(metric) + if logger is None: + msg = '\n' + msg + print_log(msg, logger=logger) + + if metric == 'proposal_fast': + ar = self.fast_eval_recall( + results, proposal_nums, iou_thrs, logger='silent') + log_msg = [] + for i, num in enumerate(proposal_nums): + eval_results['AR@{}'.format(num)] = ar[i] + log_msg.append('\nAR@{}\t{:.4f}'.format(num, ar[i])) + log_msg = ''.join(log_msg) + print_log(log_msg, logger=logger) + continue + + if metric not in result_files: + raise KeyError('{} is not in results'.format(metric)) + try: + lvis_dt = LVISResults(lvis_gt, result_files[metric]) + except IndexError: + print_log( + 'The testing results of the whole dataset is empty.', + logger=logger, + level=logging.ERROR) + break + + iou_type = 'bbox' if metric == 'proposal' else metric + lvis_eval = LVISEval(lvis_gt, lvis_dt, iou_type) + lvis_eval.params.imgIds = self.img_ids + if metric == 'proposal': + lvis_eval.params.useCats = 0 + lvis_eval.params.maxDets = list(proposal_nums) + lvis_eval.evaluate() + lvis_eval.accumulate() + lvis_eval.summarize() + for k, v in lvis_eval.get_results().items(): + if k.startswith('AR'): + val = float('{:.3f}'.format(float(v))) + eval_results[k] = val + else: + lvis_eval.evaluate() + lvis_eval.accumulate() + lvis_eval.summarize() + lvis_results = lvis_eval.get_results() + if classwise: # Compute per-category AP + # Compute per-category AP + # from https://github.com/facebookresearch/detectron2/ + precisions = lvis_eval.eval['precision'] + # precision: (iou, recall, cls, area range, max dets) + assert len(self.cat_ids) == precisions.shape[2] + + results_per_category = [] + for idx, catId in enumerate(self.cat_ids): + # area range index 0: all area ranges + # max dets index -1: typically 100 per image + nm = self.coco.load_cats(catId)[0] + precision = precisions[:, :, idx, 0, -1] + precision = precision[precision > -1] + if precision.size: + ap = np.mean(precision) + else: + ap = float('nan') + results_per_category.append( + (f'{nm["name"]}', f'{float(ap):0.3f}')) + + num_columns = min(6, len(results_per_category) * 2) + results_flatten = list( + itertools.chain(*results_per_category)) + headers = ['category', 'AP'] * (num_columns // 2) + results_2d = itertools.zip_longest(*[ + results_flatten[i::num_columns] + for i in range(num_columns) + ]) + table_data = [headers] + table_data += [result for result in results_2d] + table = AsciiTable(table_data) + print_log('\n' + table.table, logger=logger) + + for k, v in lvis_results.items(): + if k.startswith('AP'): + key = '{}_{}'.format(metric, k) + val = float('{:.3f}'.format(float(v))) + eval_results[key] = val + ap_summary = ' '.join([ + '{}:{:.3f}'.format(k, float(v)) + for k, v in lvis_results.items() if k.startswith('AP') + ]) + eval_results['{}_mAP_copypaste'.format(metric)] = ap_summary + lvis_eval.print_results() + if tmp_dir is not None: + tmp_dir.cleanup() + return eval_results + + +LVISDataset = LVISV05Dataset +DATASETS.register_module(name='LVISDataset', module=LVISDataset) + + +@DATASETS.register_module() +class LVISV1Dataset(LVISDataset): + + CLASSES = ( + 'aerosol_can', 'air_conditioner', 'airplane', 'alarm_clock', 'alcohol', + 'alligator', 'almond', 'ambulance', 'amplifier', 'anklet', 'antenna', + 'apple', 'applesauce', 'apricot', 'apron', 'aquarium', + 'arctic_(type_of_shoe)', 'armband', 'armchair', 'armoire', 'armor', + 'artichoke', 'trash_can', 'ashtray', 'asparagus', 'atomizer', + 'avocado', 'award', 'awning', 'ax', 'baboon', 'baby_buggy', + 'basketball_backboard', 'backpack', 'handbag', 'suitcase', 'bagel', + 'bagpipe', 'baguet', 'bait', 'ball', 'ballet_skirt', 'balloon', + 'bamboo', 'banana', 'Band_Aid', 'bandage', 'bandanna', 'banjo', + 'banner', 'barbell', 'barge', 'barrel', 'barrette', 'barrow', + 'baseball_base', 'baseball', 'baseball_bat', 'baseball_cap', + 'baseball_glove', 'basket', 'basketball', 'bass_horn', 'bat_(animal)', + 'bath_mat', 'bath_towel', 'bathrobe', 'bathtub', 'batter_(food)', + 'battery', 'beachball', 'bead', 'bean_curd', 'beanbag', 'beanie', + 'bear', 'bed', 'bedpan', 'bedspread', 'cow', 'beef_(food)', 'beeper', + 'beer_bottle', 'beer_can', 'beetle', 'bell', 'bell_pepper', 'belt', + 'belt_buckle', 'bench', 'beret', 'bib', 'Bible', 'bicycle', 'visor', + 'billboard', 'binder', 'binoculars', 'bird', 'birdfeeder', 'birdbath', + 'birdcage', 'birdhouse', 'birthday_cake', 'birthday_card', + 'pirate_flag', 'black_sheep', 'blackberry', 'blackboard', 'blanket', + 'blazer', 'blender', 'blimp', 'blinker', 'blouse', 'blueberry', + 'gameboard', 'boat', 'bob', 'bobbin', 'bobby_pin', 'boiled_egg', + 'bolo_tie', 'deadbolt', 'bolt', 'bonnet', 'book', 'bookcase', + 'booklet', 'bookmark', 'boom_microphone', 'boot', 'bottle', + 'bottle_opener', 'bouquet', 'bow_(weapon)', 'bow_(decorative_ribbons)', + 'bow-tie', 'bowl', 'pipe_bowl', 'bowler_hat', 'bowling_ball', 'box', + 'boxing_glove', 'suspenders', 'bracelet', 'brass_plaque', 'brassiere', + 'bread-bin', 'bread', 'breechcloth', 'bridal_gown', 'briefcase', + 'broccoli', 'broach', 'broom', 'brownie', 'brussels_sprouts', + 'bubble_gum', 'bucket', 'horse_buggy', 'bull', 'bulldog', 'bulldozer', + 'bullet_train', 'bulletin_board', 'bulletproof_vest', 'bullhorn', + 'bun', 'bunk_bed', 'buoy', 'burrito', 'bus_(vehicle)', 'business_card', + 'butter', 'butterfly', 'button', 'cab_(taxi)', 'cabana', 'cabin_car', + 'cabinet', 'locker', 'cake', 'calculator', 'calendar', 'calf', + 'camcorder', 'camel', 'camera', 'camera_lens', 'camper_(vehicle)', + 'can', 'can_opener', 'candle', 'candle_holder', 'candy_bar', + 'candy_cane', 'walking_cane', 'canister', 'canoe', 'cantaloup', + 'canteen', 'cap_(headwear)', 'bottle_cap', 'cape', 'cappuccino', + 'car_(automobile)', 'railcar_(part_of_a_train)', 'elevator_car', + 'car_battery', 'identity_card', 'card', 'cardigan', 'cargo_ship', + 'carnation', 'horse_carriage', 'carrot', 'tote_bag', 'cart', 'carton', + 'cash_register', 'casserole', 'cassette', 'cast', 'cat', 'cauliflower', + 'cayenne_(spice)', 'CD_player', 'celery', 'cellular_telephone', + 'chain_mail', 'chair', 'chaise_longue', 'chalice', 'chandelier', + 'chap', 'checkbook', 'checkerboard', 'cherry', 'chessboard', + 'chicken_(animal)', 'chickpea', 'chili_(vegetable)', 'chime', + 'chinaware', 'crisp_(potato_chip)', 'poker_chip', 'chocolate_bar', + 'chocolate_cake', 'chocolate_milk', 'chocolate_mousse', 'choker', + 'chopping_board', 'chopstick', 'Christmas_tree', 'slide', 'cider', + 'cigar_box', 'cigarette', 'cigarette_case', 'cistern', 'clarinet', + 'clasp', 'cleansing_agent', 'cleat_(for_securing_rope)', 'clementine', + 'clip', 'clipboard', 'clippers_(for_plants)', 'cloak', 'clock', + 'clock_tower', 'clothes_hamper', 'clothespin', 'clutch_bag', 'coaster', + 'coat', 'coat_hanger', 'coatrack', 'cock', 'cockroach', + 'cocoa_(beverage)', 'coconut', 'coffee_maker', 'coffee_table', + 'coffeepot', 'coil', 'coin', 'colander', 'coleslaw', + 'coloring_material', 'combination_lock', 'pacifier', 'comic_book', + 'compass', 'computer_keyboard', 'condiment', 'cone', 'control', + 'convertible_(automobile)', 'sofa_bed', 'cooker', 'cookie', + 'cooking_utensil', 'cooler_(for_food)', 'cork_(bottle_plug)', + 'corkboard', 'corkscrew', 'edible_corn', 'cornbread', 'cornet', + 'cornice', 'cornmeal', 'corset', 'costume', 'cougar', 'coverall', + 'cowbell', 'cowboy_hat', 'crab_(animal)', 'crabmeat', 'cracker', + 'crape', 'crate', 'crayon', 'cream_pitcher', 'crescent_roll', 'crib', + 'crock_pot', 'crossbar', 'crouton', 'crow', 'crowbar', 'crown', + 'crucifix', 'cruise_ship', 'police_cruiser', 'crumb', 'crutch', + 'cub_(animal)', 'cube', 'cucumber', 'cufflink', 'cup', 'trophy_cup', + 'cupboard', 'cupcake', 'hair_curler', 'curling_iron', 'curtain', + 'cushion', 'cylinder', 'cymbal', 'dagger', 'dalmatian', 'dartboard', + 'date_(fruit)', 'deck_chair', 'deer', 'dental_floss', 'desk', + 'detergent', 'diaper', 'diary', 'die', 'dinghy', 'dining_table', 'tux', + 'dish', 'dish_antenna', 'dishrag', 'dishtowel', 'dishwasher', + 'dishwasher_detergent', 'dispenser', 'diving_board', 'Dixie_cup', + 'dog', 'dog_collar', 'doll', 'dollar', 'dollhouse', 'dolphin', + 'domestic_ass', 'doorknob', 'doormat', 'doughnut', 'dove', 'dragonfly', + 'drawer', 'underdrawers', 'dress', 'dress_hat', 'dress_suit', + 'dresser', 'drill', 'drone', 'dropper', 'drum_(musical_instrument)', + 'drumstick', 'duck', 'duckling', 'duct_tape', 'duffel_bag', 'dumbbell', + 'dumpster', 'dustpan', 'eagle', 'earphone', 'earplug', 'earring', + 'easel', 'eclair', 'eel', 'egg', 'egg_roll', 'egg_yolk', 'eggbeater', + 'eggplant', 'electric_chair', 'refrigerator', 'elephant', 'elk', + 'envelope', 'eraser', 'escargot', 'eyepatch', 'falcon', 'fan', + 'faucet', 'fedora', 'ferret', 'Ferris_wheel', 'ferry', 'fig_(fruit)', + 'fighter_jet', 'figurine', 'file_cabinet', 'file_(tool)', 'fire_alarm', + 'fire_engine', 'fire_extinguisher', 'fire_hose', 'fireplace', + 'fireplug', 'first-aid_kit', 'fish', 'fish_(food)', 'fishbowl', + 'fishing_rod', 'flag', 'flagpole', 'flamingo', 'flannel', 'flap', + 'flash', 'flashlight', 'fleece', 'flip-flop_(sandal)', + 'flipper_(footwear)', 'flower_arrangement', 'flute_glass', 'foal', + 'folding_chair', 'food_processor', 'football_(American)', + 'football_helmet', 'footstool', 'fork', 'forklift', 'freight_car', + 'French_toast', 'freshener', 'frisbee', 'frog', 'fruit_juice', + 'frying_pan', 'fudge', 'funnel', 'futon', 'gag', 'garbage', + 'garbage_truck', 'garden_hose', 'gargle', 'gargoyle', 'garlic', + 'gasmask', 'gazelle', 'gelatin', 'gemstone', 'generator', + 'giant_panda', 'gift_wrap', 'ginger', 'giraffe', 'cincture', + 'glass_(drink_container)', 'globe', 'glove', 'goat', 'goggles', + 'goldfish', 'golf_club', 'golfcart', 'gondola_(boat)', 'goose', + 'gorilla', 'gourd', 'grape', 'grater', 'gravestone', 'gravy_boat', + 'green_bean', 'green_onion', 'griddle', 'grill', 'grits', 'grizzly', + 'grocery_bag', 'guitar', 'gull', 'gun', 'hairbrush', 'hairnet', + 'hairpin', 'halter_top', 'ham', 'hamburger', 'hammer', 'hammock', + 'hamper', 'hamster', 'hair_dryer', 'hand_glass', 'hand_towel', + 'handcart', 'handcuff', 'handkerchief', 'handle', 'handsaw', + 'hardback_book', 'harmonium', 'hat', 'hatbox', 'veil', 'headband', + 'headboard', 'headlight', 'headscarf', 'headset', + 'headstall_(for_horses)', 'heart', 'heater', 'helicopter', 'helmet', + 'heron', 'highchair', 'hinge', 'hippopotamus', 'hockey_stick', 'hog', + 'home_plate_(baseball)', 'honey', 'fume_hood', 'hook', 'hookah', + 'hornet', 'horse', 'hose', 'hot-air_balloon', 'hotplate', 'hot_sauce', + 'hourglass', 'houseboat', 'hummingbird', 'hummus', 'polar_bear', + 'icecream', 'popsicle', 'ice_maker', 'ice_pack', 'ice_skate', + 'igniter', 'inhaler', 'iPod', 'iron_(for_clothing)', 'ironing_board', + 'jacket', 'jam', 'jar', 'jean', 'jeep', 'jelly_bean', 'jersey', + 'jet_plane', 'jewel', 'jewelry', 'joystick', 'jumpsuit', 'kayak', + 'keg', 'kennel', 'kettle', 'key', 'keycard', 'kilt', 'kimono', + 'kitchen_sink', 'kitchen_table', 'kite', 'kitten', 'kiwi_fruit', + 'knee_pad', 'knife', 'knitting_needle', 'knob', 'knocker_(on_a_door)', + 'koala', 'lab_coat', 'ladder', 'ladle', 'ladybug', 'lamb_(animal)', + 'lamb-chop', 'lamp', 'lamppost', 'lampshade', 'lantern', 'lanyard', + 'laptop_computer', 'lasagna', 'latch', 'lawn_mower', 'leather', + 'legging_(clothing)', 'Lego', 'legume', 'lemon', 'lemonade', 'lettuce', + 'license_plate', 'life_buoy', 'life_jacket', 'lightbulb', + 'lightning_rod', 'lime', 'limousine', 'lion', 'lip_balm', 'liquor', + 'lizard', 'log', 'lollipop', 'speaker_(stereo_equipment)', 'loveseat', + 'machine_gun', 'magazine', 'magnet', 'mail_slot', 'mailbox_(at_home)', + 'mallard', 'mallet', 'mammoth', 'manatee', 'mandarin_orange', 'manger', + 'manhole', 'map', 'marker', 'martini', 'mascot', 'mashed_potato', + 'masher', 'mask', 'mast', 'mat_(gym_equipment)', 'matchbox', + 'mattress', 'measuring_cup', 'measuring_stick', 'meatball', 'medicine', + 'melon', 'microphone', 'microscope', 'microwave_oven', 'milestone', + 'milk', 'milk_can', 'milkshake', 'minivan', 'mint_candy', 'mirror', + 'mitten', 'mixer_(kitchen_tool)', 'money', + 'monitor_(computer_equipment) computer_monitor', 'monkey', 'motor', + 'motor_scooter', 'motor_vehicle', 'motorcycle', 'mound_(baseball)', + 'mouse_(computer_equipment)', 'mousepad', 'muffin', 'mug', 'mushroom', + 'music_stool', 'musical_instrument', 'nailfile', 'napkin', + 'neckerchief', 'necklace', 'necktie', 'needle', 'nest', 'newspaper', + 'newsstand', 'nightshirt', 'nosebag_(for_animals)', + 'noseband_(for_animals)', 'notebook', 'notepad', 'nut', 'nutcracker', + 'oar', 'octopus_(food)', 'octopus_(animal)', 'oil_lamp', 'olive_oil', + 'omelet', 'onion', 'orange_(fruit)', 'orange_juice', 'ostrich', + 'ottoman', 'oven', 'overalls_(clothing)', 'owl', 'packet', 'inkpad', + 'pad', 'paddle', 'padlock', 'paintbrush', 'painting', 'pajamas', + 'palette', 'pan_(for_cooking)', 'pan_(metal_container)', 'pancake', + 'pantyhose', 'papaya', 'paper_plate', 'paper_towel', 'paperback_book', + 'paperweight', 'parachute', 'parakeet', 'parasail_(sports)', 'parasol', + 'parchment', 'parka', 'parking_meter', 'parrot', + 'passenger_car_(part_of_a_train)', 'passenger_ship', 'passport', + 'pastry', 'patty_(food)', 'pea_(food)', 'peach', 'peanut_butter', + 'pear', 'peeler_(tool_for_fruit_and_vegetables)', 'wooden_leg', + 'pegboard', 'pelican', 'pen', 'pencil', 'pencil_box', + 'pencil_sharpener', 'pendulum', 'penguin', 'pennant', 'penny_(coin)', + 'pepper', 'pepper_mill', 'perfume', 'persimmon', 'person', 'pet', + 'pew_(church_bench)', 'phonebook', 'phonograph_record', 'piano', + 'pickle', 'pickup_truck', 'pie', 'pigeon', 'piggy_bank', 'pillow', + 'pin_(non_jewelry)', 'pineapple', 'pinecone', 'ping-pong_ball', + 'pinwheel', 'tobacco_pipe', 'pipe', 'pistol', 'pita_(bread)', + 'pitcher_(vessel_for_liquid)', 'pitchfork', 'pizza', 'place_mat', + 'plate', 'platter', 'playpen', 'pliers', 'plow_(farm_equipment)', + 'plume', 'pocket_watch', 'pocketknife', 'poker_(fire_stirring_tool)', + 'pole', 'polo_shirt', 'poncho', 'pony', 'pool_table', 'pop_(soda)', + 'postbox_(public)', 'postcard', 'poster', 'pot', 'flowerpot', 'potato', + 'potholder', 'pottery', 'pouch', 'power_shovel', 'prawn', 'pretzel', + 'printer', 'projectile_(weapon)', 'projector', 'propeller', 'prune', + 'pudding', 'puffer_(fish)', 'puffin', 'pug-dog', 'pumpkin', 'puncher', + 'puppet', 'puppy', 'quesadilla', 'quiche', 'quilt', 'rabbit', + 'race_car', 'racket', 'radar', 'radiator', 'radio_receiver', 'radish', + 'raft', 'rag_doll', 'raincoat', 'ram_(animal)', 'raspberry', 'rat', + 'razorblade', 'reamer_(juicer)', 'rearview_mirror', 'receipt', + 'recliner', 'record_player', 'reflector', 'remote_control', + 'rhinoceros', 'rib_(food)', 'rifle', 'ring', 'river_boat', 'road_map', + 'robe', 'rocking_chair', 'rodent', 'roller_skate', 'Rollerblade', + 'rolling_pin', 'root_beer', 'router_(computer_equipment)', + 'rubber_band', 'runner_(carpet)', 'plastic_bag', + 'saddle_(on_an_animal)', 'saddle_blanket', 'saddlebag', 'safety_pin', + 'sail', 'salad', 'salad_plate', 'salami', 'salmon_(fish)', + 'salmon_(food)', 'salsa', 'saltshaker', 'sandal_(type_of_shoe)', + 'sandwich', 'satchel', 'saucepan', 'saucer', 'sausage', 'sawhorse', + 'saxophone', 'scale_(measuring_instrument)', 'scarecrow', 'scarf', + 'school_bus', 'scissors', 'scoreboard', 'scraper', 'screwdriver', + 'scrubbing_brush', 'sculpture', 'seabird', 'seahorse', 'seaplane', + 'seashell', 'sewing_machine', 'shaker', 'shampoo', 'shark', + 'sharpener', 'Sharpie', 'shaver_(electric)', 'shaving_cream', 'shawl', + 'shears', 'sheep', 'shepherd_dog', 'sherbert', 'shield', 'shirt', + 'shoe', 'shopping_bag', 'shopping_cart', 'short_pants', 'shot_glass', + 'shoulder_bag', 'shovel', 'shower_head', 'shower_cap', + 'shower_curtain', 'shredder_(for_paper)', 'signboard', 'silo', 'sink', + 'skateboard', 'skewer', 'ski', 'ski_boot', 'ski_parka', 'ski_pole', + 'skirt', 'skullcap', 'sled', 'sleeping_bag', 'sling_(bandage)', + 'slipper_(footwear)', 'smoothie', 'snake', 'snowboard', 'snowman', + 'snowmobile', 'soap', 'soccer_ball', 'sock', 'sofa', 'softball', + 'solar_array', 'sombrero', 'soup', 'soup_bowl', 'soupspoon', + 'sour_cream', 'soya_milk', 'space_shuttle', 'sparkler_(fireworks)', + 'spatula', 'spear', 'spectacles', 'spice_rack', 'spider', 'crawfish', + 'sponge', 'spoon', 'sportswear', 'spotlight', 'squid_(food)', + 'squirrel', 'stagecoach', 'stapler_(stapling_machine)', 'starfish', + 'statue_(sculpture)', 'steak_(food)', 'steak_knife', 'steering_wheel', + 'stepladder', 'step_stool', 'stereo_(sound_system)', 'stew', 'stirrer', + 'stirrup', 'stool', 'stop_sign', 'brake_light', 'stove', 'strainer', + 'strap', 'straw_(for_drinking)', 'strawberry', 'street_sign', + 'streetlight', 'string_cheese', 'stylus', 'subwoofer', 'sugar_bowl', + 'sugarcane_(plant)', 'suit_(clothing)', 'sunflower', 'sunglasses', + 'sunhat', 'surfboard', 'sushi', 'mop', 'sweat_pants', 'sweatband', + 'sweater', 'sweatshirt', 'sweet_potato', 'swimsuit', 'sword', + 'syringe', 'Tabasco_sauce', 'table-tennis_table', 'table', + 'table_lamp', 'tablecloth', 'tachometer', 'taco', 'tag', 'taillight', + 'tambourine', 'army_tank', 'tank_(storage_vessel)', + 'tank_top_(clothing)', 'tape_(sticky_cloth_or_paper)', 'tape_measure', + 'tapestry', 'tarp', 'tartan', 'tassel', 'tea_bag', 'teacup', + 'teakettle', 'teapot', 'teddy_bear', 'telephone', 'telephone_booth', + 'telephone_pole', 'telephoto_lens', 'television_camera', + 'television_set', 'tennis_ball', 'tennis_racket', 'tequila', + 'thermometer', 'thermos_bottle', 'thermostat', 'thimble', 'thread', + 'thumbtack', 'tiara', 'tiger', 'tights_(clothing)', 'timer', 'tinfoil', + 'tinsel', 'tissue_paper', 'toast_(food)', 'toaster', 'toaster_oven', + 'toilet', 'toilet_tissue', 'tomato', 'tongs', 'toolbox', 'toothbrush', + 'toothpaste', 'toothpick', 'cover', 'tortilla', 'tow_truck', 'towel', + 'towel_rack', 'toy', 'tractor_(farm_equipment)', 'traffic_light', + 'dirt_bike', 'trailer_truck', 'train_(railroad_vehicle)', 'trampoline', + 'tray', 'trench_coat', 'triangle_(musical_instrument)', 'tricycle', + 'tripod', 'trousers', 'truck', 'truffle_(chocolate)', 'trunk', 'vat', + 'turban', 'turkey_(food)', 'turnip', 'turtle', 'turtleneck_(clothing)', + 'typewriter', 'umbrella', 'underwear', 'unicycle', 'urinal', 'urn', + 'vacuum_cleaner', 'vase', 'vending_machine', 'vent', 'vest', + 'videotape', 'vinegar', 'violin', 'vodka', 'volleyball', 'vulture', + 'waffle', 'waffle_iron', 'wagon', 'wagon_wheel', 'walking_stick', + 'wall_clock', 'wall_socket', 'wallet', 'walrus', 'wardrobe', + 'washbasin', 'automatic_washer', 'watch', 'water_bottle', + 'water_cooler', 'water_faucet', 'water_heater', 'water_jug', + 'water_gun', 'water_scooter', 'water_ski', 'water_tower', + 'watering_can', 'watermelon', 'weathervane', 'webcam', 'wedding_cake', + 'wedding_ring', 'wet_suit', 'wheel', 'wheelchair', 'whipped_cream', + 'whistle', 'wig', 'wind_chime', 'windmill', 'window_box_(for_plants)', + 'windshield_wiper', 'windsock', 'wine_bottle', 'wine_bucket', + 'wineglass', 'blinder_(for_horses)', 'wok', 'wolf', 'wooden_spoon', + 'wreath', 'wrench', 'wristband', 'wristlet', 'yacht', 'yogurt', + 'yoke_(animal_equipment)', 'zebra', 'zucchini') + + def load_annotations(self, ann_file): + try: + import lvis + if getattr(lvis, '__version__', '0') >= '10.5.3': + warnings.warn( + 'mmlvis is deprecated, please install official lvis-api by "pip install git+https://github.com/lvis-dataset/lvis-api.git"', # noqa: E501 + UserWarning) + from lvis import LVIS + except ImportError: + raise ImportError( + 'Package lvis is not installed. Please run "pip install git+https://github.com/lvis-dataset/lvis-api.git".' # noqa: E501 + ) + self.coco = LVIS(ann_file) + self.cat_ids = self.coco.get_cat_ids() + self.cat2label = {cat_id: i for i, cat_id in enumerate(self.cat_ids)} + self.img_ids = self.coco.get_img_ids() + data_infos = [] + for i in self.img_ids: + info = self.coco.load_imgs([i])[0] + # coco_url is used in LVISv1 instead of file_name + # e.g. http://images.cocodataset.org/train2017/000000391895.jpg + # train/val split in specified in url + info['filename'] = info['coco_url'].replace( + 'http://images.cocodataset.org/', '') + data_infos.append(info) + return data_infos diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9559969a25be617fb588f5f01a7235a58c6a63cd --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/__init__.py @@ -0,0 +1,26 @@ +from .auto_augment import (AutoAugment, BrightnessTransform, ColorTransform, + ContrastTransform, EqualizeTransform, Rotate, Shear, + Translate) +from .compose import Compose +from .formating import (Collect, DefaultFormatBundle, ImageToTensor, + ToDataContainer, ToTensor, Transpose, to_tensor) +from .instaboost import InstaBoost +from .loading import (LoadAnnotations, LoadImageFromFile, LoadImageFromWebcam, + LoadMultiChannelImageFromFiles, LoadProposals) +from .test_time_aug import MultiScaleFlipAug +from .transforms import (Albu, CutOut, Expand, MinIoURandomCrop, Normalize, + Pad, PhotoMetricDistortion, RandomCenterCropPad, + RandomCrop, RandomFlip, RandomShift, Resize, + SegRescale) + +__all__ = [ + 'Compose', 'to_tensor', 'ToTensor', 'ImageToTensor', 'ToDataContainer', + 'Transpose', 'Collect', 'DefaultFormatBundle', 'LoadAnnotations', + 'LoadImageFromFile', 'LoadImageFromWebcam', + 'LoadMultiChannelImageFromFiles', 'LoadProposals', 'MultiScaleFlipAug', + 'Resize', 'RandomFlip', 'Pad', 'RandomCrop', 'Normalize', 'SegRescale', + 'MinIoURandomCrop', 'Expand', 'PhotoMetricDistortion', 'Albu', + 'InstaBoost', 'RandomCenterCropPad', 'AutoAugment', 'CutOut', 'Shear', + 'Rotate', 'ColorTransform', 'EqualizeTransform', 'BrightnessTransform', + 'ContrastTransform', 'Translate', 'RandomShift' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/auto_augment.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/auto_augment.py new file mode 100644 index 0000000000000000000000000000000000000000..3e0b1e16c05872869d6f8445eda67e7e945c3f39 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/auto_augment.py @@ -0,0 +1,890 @@ +import copy + +import cv2 +import mmcv +import numpy as np + +from ..builder import PIPELINES +from .compose import Compose + +_MAX_LEVEL = 10 + + +def level_to_value(level, max_value): + """Map from level to values based on max_value.""" + return (level / _MAX_LEVEL) * max_value + + +def enhance_level_to_value(level, a=1.8, b=0.1): + """Map from level to values.""" + return (level / _MAX_LEVEL) * a + b + + +def random_negative(value, random_negative_prob): + """Randomly negate value based on random_negative_prob.""" + return -value if np.random.rand() < random_negative_prob else value + + +def bbox2fields(): + """The key correspondence from bboxes to labels, masks and + segmentations.""" + bbox2label = { + 'gt_bboxes': 'gt_labels', + 'gt_bboxes_ignore': 'gt_labels_ignore' + } + bbox2mask = { + 'gt_bboxes': 'gt_masks', + 'gt_bboxes_ignore': 'gt_masks_ignore' + } + bbox2seg = { + 'gt_bboxes': 'gt_semantic_seg', + } + return bbox2label, bbox2mask, bbox2seg + + +@PIPELINES.register_module() +class AutoAugment: + """Auto augmentation. + + This data augmentation is proposed in `Learning Data Augmentation + Strategies for Object Detection `_. + + TODO: Implement 'Shear', 'Sharpness' and 'Rotate' transforms + + Args: + policies (list[list[dict]]): The policies of auto augmentation. Each + policy in ``policies`` is a specific augmentation policy, and is + composed by several augmentations (dict). When AutoAugment is + called, a random policy in ``policies`` will be selected to + augment images. + + Examples: + >>> replace = (104, 116, 124) + >>> policies = [ + >>> [ + >>> dict(type='Sharpness', prob=0.0, level=8), + >>> dict( + >>> type='Shear', + >>> prob=0.4, + >>> level=0, + >>> replace=replace, + >>> axis='x') + >>> ], + >>> [ + >>> dict( + >>> type='Rotate', + >>> prob=0.6, + >>> level=10, + >>> replace=replace), + >>> dict(type='Color', prob=1.0, level=6) + >>> ] + >>> ] + >>> augmentation = AutoAugment(policies) + >>> img = np.ones(100, 100, 3) + >>> gt_bboxes = np.ones(10, 4) + >>> results = dict(img=img, gt_bboxes=gt_bboxes) + >>> results = augmentation(results) + """ + + def __init__(self, policies): + assert isinstance(policies, list) and len(policies) > 0, \ + 'Policies must be a non-empty list.' + for policy in policies: + assert isinstance(policy, list) and len(policy) > 0, \ + 'Each policy in policies must be a non-empty list.' + for augment in policy: + assert isinstance(augment, dict) and 'type' in augment, \ + 'Each specific augmentation must be a dict with key' \ + ' "type".' + + self.policies = copy.deepcopy(policies) + self.transforms = [Compose(policy) for policy in self.policies] + + def __call__(self, results): + transform = np.random.choice(self.transforms) + return transform(results) + + def __repr__(self): + return f'{self.__class__.__name__}(policies={self.policies})' + + +@PIPELINES.register_module() +class Shear: + """Apply Shear Transformation to image (and its corresponding bbox, mask, + segmentation). + + Args: + level (int | float): The level should be in range [0,_MAX_LEVEL]. + img_fill_val (int | float | tuple): The filled values for image border. + If float, the same fill value will be used for all the three + channels of image. If tuple, the should be 3 elements. + seg_ignore_label (int): The fill value used for segmentation map. + Note this value must equals ``ignore_label`` in ``semantic_head`` + of the corresponding config. Default 255. + prob (float): The probability for performing Shear and should be in + range [0, 1]. + direction (str): The direction for shear, either "horizontal" + or "vertical". + max_shear_magnitude (float): The maximum magnitude for Shear + transformation. + random_negative_prob (float): The probability that turns the + offset negative. Should be in range [0,1] + interpolation (str): Same as in :func:`mmcv.imshear`. + """ + + def __init__(self, + level, + img_fill_val=128, + seg_ignore_label=255, + prob=0.5, + direction='horizontal', + max_shear_magnitude=0.3, + random_negative_prob=0.5, + interpolation='bilinear'): + assert isinstance(level, (int, float)), 'The level must be type ' \ + f'int or float, got {type(level)}.' + assert 0 <= level <= _MAX_LEVEL, 'The level should be in range ' \ + f'[0,{_MAX_LEVEL}], got {level}.' + if isinstance(img_fill_val, (float, int)): + img_fill_val = tuple([float(img_fill_val)] * 3) + elif isinstance(img_fill_val, tuple): + assert len(img_fill_val) == 3, 'img_fill_val as tuple must ' \ + f'have 3 elements. got {len(img_fill_val)}.' + img_fill_val = tuple([float(val) for val in img_fill_val]) + else: + raise ValueError( + 'img_fill_val must be float or tuple with 3 elements.') + assert np.all([0 <= val <= 255 for val in img_fill_val]), 'all ' \ + 'elements of img_fill_val should between range [0,255].' \ + f'got {img_fill_val}.' + assert 0 <= prob <= 1.0, 'The probability of shear should be in ' \ + f'range [0,1]. got {prob}.' + assert direction in ('horizontal', 'vertical'), 'direction must ' \ + f'in be either "horizontal" or "vertical". got {direction}.' + assert isinstance(max_shear_magnitude, float), 'max_shear_magnitude ' \ + f'should be type float. got {type(max_shear_magnitude)}.' + assert 0. <= max_shear_magnitude <= 1., 'Defaultly ' \ + 'max_shear_magnitude should be in range [0,1]. ' \ + f'got {max_shear_magnitude}.' + self.level = level + self.magnitude = level_to_value(level, max_shear_magnitude) + self.img_fill_val = img_fill_val + self.seg_ignore_label = seg_ignore_label + self.prob = prob + self.direction = direction + self.max_shear_magnitude = max_shear_magnitude + self.random_negative_prob = random_negative_prob + self.interpolation = interpolation + + def _shear_img(self, + results, + magnitude, + direction='horizontal', + interpolation='bilinear'): + """Shear the image. + + Args: + results (dict): Result dict from loading pipeline. + magnitude (int | float): The magnitude used for shear. + direction (str): The direction for shear, either "horizontal" + or "vertical". + interpolation (str): Same as in :func:`mmcv.imshear`. + """ + for key in results.get('img_fields', ['img']): + img = results[key] + img_sheared = mmcv.imshear( + img, + magnitude, + direction, + border_value=self.img_fill_val, + interpolation=interpolation) + results[key] = img_sheared.astype(img.dtype) + + def _shear_bboxes(self, results, magnitude): + """Shear the bboxes.""" + h, w, c = results['img_shape'] + if self.direction == 'horizontal': + shear_matrix = np.stack([[1, magnitude], + [0, 1]]).astype(np.float32) # [2, 2] + else: + shear_matrix = np.stack([[1, 0], [magnitude, + 1]]).astype(np.float32) + for key in results.get('bbox_fields', []): + min_x, min_y, max_x, max_y = np.split( + results[key], results[key].shape[-1], axis=-1) + coordinates = np.stack([[min_x, min_y], [max_x, min_y], + [min_x, max_y], + [max_x, max_y]]) # [4, 2, nb_box, 1] + coordinates = coordinates[..., 0].transpose( + (2, 1, 0)).astype(np.float32) # [nb_box, 2, 4] + new_coords = np.matmul(shear_matrix[None, :, :], + coordinates) # [nb_box, 2, 4] + min_x = np.min(new_coords[:, 0, :], axis=-1) + min_y = np.min(new_coords[:, 1, :], axis=-1) + max_x = np.max(new_coords[:, 0, :], axis=-1) + max_y = np.max(new_coords[:, 1, :], axis=-1) + min_x = np.clip(min_x, a_min=0, a_max=w) + min_y = np.clip(min_y, a_min=0, a_max=h) + max_x = np.clip(max_x, a_min=min_x, a_max=w) + max_y = np.clip(max_y, a_min=min_y, a_max=h) + results[key] = np.stack([min_x, min_y, max_x, max_y], + axis=-1).astype(results[key].dtype) + + def _shear_masks(self, + results, + magnitude, + direction='horizontal', + fill_val=0, + interpolation='bilinear'): + """Shear the masks.""" + h, w, c = results['img_shape'] + for key in results.get('mask_fields', []): + masks = results[key] + results[key] = masks.shear((h, w), + magnitude, + direction, + border_value=fill_val, + interpolation=interpolation) + + def _shear_seg(self, + results, + magnitude, + direction='horizontal', + fill_val=255, + interpolation='bilinear'): + """Shear the segmentation maps.""" + for key in results.get('seg_fields', []): + seg = results[key] + results[key] = mmcv.imshear( + seg, + magnitude, + direction, + border_value=fill_val, + interpolation=interpolation).astype(seg.dtype) + + def _filter_invalid(self, results, min_bbox_size=0): + """Filter bboxes and corresponding masks too small after shear + augmentation.""" + bbox2label, bbox2mask, _ = bbox2fields() + for key in results.get('bbox_fields', []): + bbox_w = results[key][:, 2] - results[key][:, 0] + bbox_h = results[key][:, 3] - results[key][:, 1] + valid_inds = (bbox_w > min_bbox_size) & (bbox_h > min_bbox_size) + valid_inds = np.nonzero(valid_inds)[0] + results[key] = results[key][valid_inds] + # label fields. e.g. gt_labels and gt_labels_ignore + label_key = bbox2label.get(key) + if label_key in results: + results[label_key] = results[label_key][valid_inds] + # mask fields, e.g. gt_masks and gt_masks_ignore + mask_key = bbox2mask.get(key) + if mask_key in results: + results[mask_key] = results[mask_key][valid_inds] + + def __call__(self, results): + """Call function to shear images, bounding boxes, masks and semantic + segmentation maps. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Sheared results. + """ + if np.random.rand() > self.prob: + return results + magnitude = random_negative(self.magnitude, self.random_negative_prob) + self._shear_img(results, magnitude, self.direction, self.interpolation) + self._shear_bboxes(results, magnitude) + # fill_val set to 0 for background of mask. + self._shear_masks( + results, + magnitude, + self.direction, + fill_val=0, + interpolation=self.interpolation) + self._shear_seg( + results, + magnitude, + self.direction, + fill_val=self.seg_ignore_label, + interpolation=self.interpolation) + self._filter_invalid(results) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(level={self.level}, ' + repr_str += f'img_fill_val={self.img_fill_val}, ' + repr_str += f'seg_ignore_label={self.seg_ignore_label}, ' + repr_str += f'prob={self.prob}, ' + repr_str += f'direction={self.direction}, ' + repr_str += f'max_shear_magnitude={self.max_shear_magnitude}, ' + repr_str += f'random_negative_prob={self.random_negative_prob}, ' + repr_str += f'interpolation={self.interpolation})' + return repr_str + + +@PIPELINES.register_module() +class Rotate: + """Apply Rotate Transformation to image (and its corresponding bbox, mask, + segmentation). + + Args: + level (int | float): The level should be in range (0,_MAX_LEVEL]. + scale (int | float): Isotropic scale factor. Same in + ``mmcv.imrotate``. + center (int | float | tuple[float]): Center point (w, h) of the + rotation in the source image. If None, the center of the + image will be used. Same in ``mmcv.imrotate``. + img_fill_val (int | float | tuple): The fill value for image border. + If float, the same value will be used for all the three + channels of image. If tuple, the should be 3 elements (e.g. + equals the number of channels for image). + seg_ignore_label (int): The fill value used for segmentation map. + Note this value must equals ``ignore_label`` in ``semantic_head`` + of the corresponding config. Default 255. + prob (float): The probability for perform transformation and + should be in range 0 to 1. + max_rotate_angle (int | float): The maximum angles for rotate + transformation. + random_negative_prob (float): The probability that turns the + offset negative. + """ + + def __init__(self, + level, + scale=1, + center=None, + img_fill_val=128, + seg_ignore_label=255, + prob=0.5, + max_rotate_angle=30, + random_negative_prob=0.5): + assert isinstance(level, (int, float)), \ + f'The level must be type int or float. got {type(level)}.' + assert 0 <= level <= _MAX_LEVEL, \ + f'The level should be in range (0,{_MAX_LEVEL}]. got {level}.' + assert isinstance(scale, (int, float)), \ + f'The scale must be type int or float. got type {type(scale)}.' + if isinstance(center, (int, float)): + center = (center, center) + elif isinstance(center, tuple): + assert len(center) == 2, 'center with type tuple must have '\ + f'2 elements. got {len(center)} elements.' + else: + assert center is None, 'center must be None or type int, '\ + f'float or tuple, got type {type(center)}.' + if isinstance(img_fill_val, (float, int)): + img_fill_val = tuple([float(img_fill_val)] * 3) + elif isinstance(img_fill_val, tuple): + assert len(img_fill_val) == 3, 'img_fill_val as tuple must '\ + f'have 3 elements. got {len(img_fill_val)}.' + img_fill_val = tuple([float(val) for val in img_fill_val]) + else: + raise ValueError( + 'img_fill_val must be float or tuple with 3 elements.') + assert np.all([0 <= val <= 255 for val in img_fill_val]), \ + 'all elements of img_fill_val should between range [0,255]. '\ + f'got {img_fill_val}.' + assert 0 <= prob <= 1.0, 'The probability should be in range [0,1]. '\ + 'got {prob}.' + assert isinstance(max_rotate_angle, (int, float)), 'max_rotate_angle '\ + f'should be type int or float. got type {type(max_rotate_angle)}.' + self.level = level + self.scale = scale + # Rotation angle in degrees. Positive values mean + # clockwise rotation. + self.angle = level_to_value(level, max_rotate_angle) + self.center = center + self.img_fill_val = img_fill_val + self.seg_ignore_label = seg_ignore_label + self.prob = prob + self.max_rotate_angle = max_rotate_angle + self.random_negative_prob = random_negative_prob + + def _rotate_img(self, results, angle, center=None, scale=1.0): + """Rotate the image. + + Args: + results (dict): Result dict from loading pipeline. + angle (float): Rotation angle in degrees, positive values + mean clockwise rotation. Same in ``mmcv.imrotate``. + center (tuple[float], optional): Center point (w, h) of the + rotation. Same in ``mmcv.imrotate``. + scale (int | float): Isotropic scale factor. Same in + ``mmcv.imrotate``. + """ + for key in results.get('img_fields', ['img']): + img = results[key].copy() + img_rotated = mmcv.imrotate( + img, angle, center, scale, border_value=self.img_fill_val) + results[key] = img_rotated.astype(img.dtype) + + def _rotate_bboxes(self, results, rotate_matrix): + """Rotate the bboxes.""" + h, w, c = results['img_shape'] + for key in results.get('bbox_fields', []): + min_x, min_y, max_x, max_y = np.split( + results[key], results[key].shape[-1], axis=-1) + coordinates = np.stack([[min_x, min_y], [max_x, min_y], + [min_x, max_y], + [max_x, max_y]]) # [4, 2, nb_bbox, 1] + # pad 1 to convert from format [x, y] to homogeneous + # coordinates format [x, y, 1] + coordinates = np.concatenate( + (coordinates, + np.ones((4, 1, coordinates.shape[2], 1), coordinates.dtype)), + axis=1) # [4, 3, nb_bbox, 1] + coordinates = coordinates.transpose( + (2, 0, 1, 3)) # [nb_bbox, 4, 3, 1] + rotated_coords = np.matmul(rotate_matrix, + coordinates) # [nb_bbox, 4, 2, 1] + rotated_coords = rotated_coords[..., 0] # [nb_bbox, 4, 2] + min_x, min_y = np.min( + rotated_coords[:, :, 0], axis=1), np.min( + rotated_coords[:, :, 1], axis=1) + max_x, max_y = np.max( + rotated_coords[:, :, 0], axis=1), np.max( + rotated_coords[:, :, 1], axis=1) + min_x, min_y = np.clip( + min_x, a_min=0, a_max=w), np.clip( + min_y, a_min=0, a_max=h) + max_x, max_y = np.clip( + max_x, a_min=min_x, a_max=w), np.clip( + max_y, a_min=min_y, a_max=h) + results[key] = np.stack([min_x, min_y, max_x, max_y], + axis=-1).astype(results[key].dtype) + + def _rotate_masks(self, + results, + angle, + center=None, + scale=1.0, + fill_val=0): + """Rotate the masks.""" + h, w, c = results['img_shape'] + for key in results.get('mask_fields', []): + masks = results[key] + results[key] = masks.rotate((h, w), angle, center, scale, fill_val) + + def _rotate_seg(self, + results, + angle, + center=None, + scale=1.0, + fill_val=255): + """Rotate the segmentation map.""" + for key in results.get('seg_fields', []): + seg = results[key].copy() + results[key] = mmcv.imrotate( + seg, angle, center, scale, + border_value=fill_val).astype(seg.dtype) + + def _filter_invalid(self, results, min_bbox_size=0): + """Filter bboxes and corresponding masks too small after rotate + augmentation.""" + bbox2label, bbox2mask, _ = bbox2fields() + for key in results.get('bbox_fields', []): + bbox_w = results[key][:, 2] - results[key][:, 0] + bbox_h = results[key][:, 3] - results[key][:, 1] + valid_inds = (bbox_w > min_bbox_size) & (bbox_h > min_bbox_size) + valid_inds = np.nonzero(valid_inds)[0] + results[key] = results[key][valid_inds] + # label fields. e.g. gt_labels and gt_labels_ignore + label_key = bbox2label.get(key) + if label_key in results: + results[label_key] = results[label_key][valid_inds] + # mask fields, e.g. gt_masks and gt_masks_ignore + mask_key = bbox2mask.get(key) + if mask_key in results: + results[mask_key] = results[mask_key][valid_inds] + + def __call__(self, results): + """Call function to rotate images, bounding boxes, masks and semantic + segmentation maps. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Rotated results. + """ + if np.random.rand() > self.prob: + return results + h, w = results['img'].shape[:2] + center = self.center + if center is None: + center = ((w - 1) * 0.5, (h - 1) * 0.5) + angle = random_negative(self.angle, self.random_negative_prob) + self._rotate_img(results, angle, center, self.scale) + rotate_matrix = cv2.getRotationMatrix2D(center, -angle, self.scale) + self._rotate_bboxes(results, rotate_matrix) + self._rotate_masks(results, angle, center, self.scale, fill_val=0) + self._rotate_seg( + results, angle, center, self.scale, fill_val=self.seg_ignore_label) + self._filter_invalid(results) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(level={self.level}, ' + repr_str += f'scale={self.scale}, ' + repr_str += f'center={self.center}, ' + repr_str += f'img_fill_val={self.img_fill_val}, ' + repr_str += f'seg_ignore_label={self.seg_ignore_label}, ' + repr_str += f'prob={self.prob}, ' + repr_str += f'max_rotate_angle={self.max_rotate_angle}, ' + repr_str += f'random_negative_prob={self.random_negative_prob})' + return repr_str + + +@PIPELINES.register_module() +class Translate: + """Translate the images, bboxes, masks and segmentation maps horizontally + or vertically. + + Args: + level (int | float): The level for Translate and should be in + range [0,_MAX_LEVEL]. + prob (float): The probability for performing translation and + should be in range [0, 1]. + img_fill_val (int | float | tuple): The filled value for image + border. If float, the same fill value will be used for all + the three channels of image. If tuple, the should be 3 + elements (e.g. equals the number of channels for image). + seg_ignore_label (int): The fill value used for segmentation map. + Note this value must equals ``ignore_label`` in ``semantic_head`` + of the corresponding config. Default 255. + direction (str): The translate direction, either "horizontal" + or "vertical". + max_translate_offset (int | float): The maximum pixel's offset for + Translate. + random_negative_prob (float): The probability that turns the + offset negative. + min_size (int | float): The minimum pixel for filtering + invalid bboxes after the translation. + """ + + def __init__(self, + level, + prob=0.5, + img_fill_val=128, + seg_ignore_label=255, + direction='horizontal', + max_translate_offset=250., + random_negative_prob=0.5, + min_size=0): + assert isinstance(level, (int, float)), \ + 'The level must be type int or float.' + assert 0 <= level <= _MAX_LEVEL, \ + 'The level used for calculating Translate\'s offset should be ' \ + 'in range [0,_MAX_LEVEL]' + assert 0 <= prob <= 1.0, \ + 'The probability of translation should be in range [0, 1].' + if isinstance(img_fill_val, (float, int)): + img_fill_val = tuple([float(img_fill_val)] * 3) + elif isinstance(img_fill_val, tuple): + assert len(img_fill_val) == 3, \ + 'img_fill_val as tuple must have 3 elements.' + img_fill_val = tuple([float(val) for val in img_fill_val]) + else: + raise ValueError('img_fill_val must be type float or tuple.') + assert np.all([0 <= val <= 255 for val in img_fill_val]), \ + 'all elements of img_fill_val should between range [0,255].' + assert direction in ('horizontal', 'vertical'), \ + 'direction should be "horizontal" or "vertical".' + assert isinstance(max_translate_offset, (int, float)), \ + 'The max_translate_offset must be type int or float.' + # the offset used for translation + self.offset = int(level_to_value(level, max_translate_offset)) + self.level = level + self.prob = prob + self.img_fill_val = img_fill_val + self.seg_ignore_label = seg_ignore_label + self.direction = direction + self.max_translate_offset = max_translate_offset + self.random_negative_prob = random_negative_prob + self.min_size = min_size + + def _translate_img(self, results, offset, direction='horizontal'): + """Translate the image. + + Args: + results (dict): Result dict from loading pipeline. + offset (int | float): The offset for translate. + direction (str): The translate direction, either "horizontal" + or "vertical". + """ + for key in results.get('img_fields', ['img']): + img = results[key].copy() + results[key] = mmcv.imtranslate( + img, offset, direction, self.img_fill_val).astype(img.dtype) + + def _translate_bboxes(self, results, offset): + """Shift bboxes horizontally or vertically, according to offset.""" + h, w, c = results['img_shape'] + for key in results.get('bbox_fields', []): + min_x, min_y, max_x, max_y = np.split( + results[key], results[key].shape[-1], axis=-1) + if self.direction == 'horizontal': + min_x = np.maximum(0, min_x + offset) + max_x = np.minimum(w, max_x + offset) + elif self.direction == 'vertical': + min_y = np.maximum(0, min_y + offset) + max_y = np.minimum(h, max_y + offset) + + # the boxes translated outside of image will be filtered along with + # the corresponding masks, by invoking ``_filter_invalid``. + results[key] = np.concatenate([min_x, min_y, max_x, max_y], + axis=-1) + + def _translate_masks(self, + results, + offset, + direction='horizontal', + fill_val=0): + """Translate masks horizontally or vertically.""" + h, w, c = results['img_shape'] + for key in results.get('mask_fields', []): + masks = results[key] + results[key] = masks.translate((h, w), offset, direction, fill_val) + + def _translate_seg(self, + results, + offset, + direction='horizontal', + fill_val=255): + """Translate segmentation maps horizontally or vertically.""" + for key in results.get('seg_fields', []): + seg = results[key].copy() + results[key] = mmcv.imtranslate(seg, offset, direction, + fill_val).astype(seg.dtype) + + def _filter_invalid(self, results, min_size=0): + """Filter bboxes and masks too small or translated out of image.""" + bbox2label, bbox2mask, _ = bbox2fields() + for key in results.get('bbox_fields', []): + bbox_w = results[key][:, 2] - results[key][:, 0] + bbox_h = results[key][:, 3] - results[key][:, 1] + valid_inds = (bbox_w > min_size) & (bbox_h > min_size) + valid_inds = np.nonzero(valid_inds)[0] + results[key] = results[key][valid_inds] + # label fields. e.g. gt_labels and gt_labels_ignore + label_key = bbox2label.get(key) + if label_key in results: + results[label_key] = results[label_key][valid_inds] + # mask fields, e.g. gt_masks and gt_masks_ignore + mask_key = bbox2mask.get(key) + if mask_key in results: + results[mask_key] = results[mask_key][valid_inds] + return results + + def __call__(self, results): + """Call function to translate images, bounding boxes, masks and + semantic segmentation maps. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Translated results. + """ + if np.random.rand() > self.prob: + return results + offset = random_negative(self.offset, self.random_negative_prob) + self._translate_img(results, offset, self.direction) + self._translate_bboxes(results, offset) + # fill_val defaultly 0 for BitmapMasks and None for PolygonMasks. + self._translate_masks(results, offset, self.direction) + # fill_val set to ``seg_ignore_label`` for the ignored value + # of segmentation map. + self._translate_seg( + results, offset, self.direction, fill_val=self.seg_ignore_label) + self._filter_invalid(results, min_size=self.min_size) + return results + + +@PIPELINES.register_module() +class ColorTransform: + """Apply Color transformation to image. The bboxes, masks, and + segmentations are not modified. + + Args: + level (int | float): Should be in range [0,_MAX_LEVEL]. + prob (float): The probability for performing Color transformation. + """ + + def __init__(self, level, prob=0.5): + assert isinstance(level, (int, float)), \ + 'The level must be type int or float.' + assert 0 <= level <= _MAX_LEVEL, \ + 'The level should be in range [0,_MAX_LEVEL].' + assert 0 <= prob <= 1.0, \ + 'The probability should be in range [0,1].' + self.level = level + self.prob = prob + self.factor = enhance_level_to_value(level) + + def _adjust_color_img(self, results, factor=1.0): + """Apply Color transformation to image.""" + for key in results.get('img_fields', ['img']): + # NOTE defaultly the image should be BGR format + img = results[key] + results[key] = mmcv.adjust_color(img, factor).astype(img.dtype) + + def __call__(self, results): + """Call function for Color transformation. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Colored results. + """ + if np.random.rand() > self.prob: + return results + self._adjust_color_img(results, self.factor) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(level={self.level}, ' + repr_str += f'prob={self.prob})' + return repr_str + + +@PIPELINES.register_module() +class EqualizeTransform: + """Apply Equalize transformation to image. The bboxes, masks and + segmentations are not modified. + + Args: + prob (float): The probability for performing Equalize transformation. + """ + + def __init__(self, prob=0.5): + assert 0 <= prob <= 1.0, \ + 'The probability should be in range [0,1].' + self.prob = prob + + def _imequalize(self, results): + """Equalizes the histogram of one image.""" + for key in results.get('img_fields', ['img']): + img = results[key] + results[key] = mmcv.imequalize(img).astype(img.dtype) + + def __call__(self, results): + """Call function for Equalize transformation. + + Args: + results (dict): Results dict from loading pipeline. + + Returns: + dict: Results after the transformation. + """ + if np.random.rand() > self.prob: + return results + self._imequalize(results) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(prob={self.prob})' + + +@PIPELINES.register_module() +class BrightnessTransform: + """Apply Brightness transformation to image. The bboxes, masks and + segmentations are not modified. + + Args: + level (int | float): Should be in range [0,_MAX_LEVEL]. + prob (float): The probability for performing Brightness transformation. + """ + + def __init__(self, level, prob=0.5): + assert isinstance(level, (int, float)), \ + 'The level must be type int or float.' + assert 0 <= level <= _MAX_LEVEL, \ + 'The level should be in range [0,_MAX_LEVEL].' + assert 0 <= prob <= 1.0, \ + 'The probability should be in range [0,1].' + self.level = level + self.prob = prob + self.factor = enhance_level_to_value(level) + + def _adjust_brightness_img(self, results, factor=1.0): + """Adjust the brightness of image.""" + for key in results.get('img_fields', ['img']): + img = results[key] + results[key] = mmcv.adjust_brightness(img, + factor).astype(img.dtype) + + def __call__(self, results): + """Call function for Brightness transformation. + + Args: + results (dict): Results dict from loading pipeline. + + Returns: + dict: Results after the transformation. + """ + if np.random.rand() > self.prob: + return results + self._adjust_brightness_img(results, self.factor) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(level={self.level}, ' + repr_str += f'prob={self.prob})' + return repr_str + + +@PIPELINES.register_module() +class ContrastTransform: + """Apply Contrast transformation to image. The bboxes, masks and + segmentations are not modified. + + Args: + level (int | float): Should be in range [0,_MAX_LEVEL]. + prob (float): The probability for performing Contrast transformation. + """ + + def __init__(self, level, prob=0.5): + assert isinstance(level, (int, float)), \ + 'The level must be type int or float.' + assert 0 <= level <= _MAX_LEVEL, \ + 'The level should be in range [0,_MAX_LEVEL].' + assert 0 <= prob <= 1.0, \ + 'The probability should be in range [0,1].' + self.level = level + self.prob = prob + self.factor = enhance_level_to_value(level) + + def _adjust_contrast_img(self, results, factor=1.0): + """Adjust the image contrast.""" + for key in results.get('img_fields', ['img']): + img = results[key] + results[key] = mmcv.adjust_contrast(img, factor).astype(img.dtype) + + def __call__(self, results): + """Call function for Contrast transformation. + + Args: + results (dict): Results dict from loading pipeline. + + Returns: + dict: Results after the transformation. + """ + if np.random.rand() > self.prob: + return results + self._adjust_contrast_img(results, self.factor) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(level={self.level}, ' + repr_str += f'prob={self.prob})' + return repr_str diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/compose.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/compose.py new file mode 100644 index 0000000000000000000000000000000000000000..15675305a051eff04c93753c60b732d25a1fc303 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/compose.py @@ -0,0 +1,51 @@ +import collections + +from mmcv.utils import build_from_cfg + +from ..builder import PIPELINES + + +@PIPELINES.register_module() +class Compose: + """Compose multiple transforms sequentially. + + Args: + transforms (Sequence[dict | callable]): Sequence of transform object or + config dict to be composed. + """ + + def __init__(self, transforms): + assert isinstance(transforms, collections.abc.Sequence) + self.transforms = [] + for transform in transforms: + if isinstance(transform, dict): + transform = build_from_cfg(transform, PIPELINES) + self.transforms.append(transform) + elif callable(transform): + self.transforms.append(transform) + else: + raise TypeError('transform must be callable or a dict') + + def __call__(self, data): + """Call function to apply transforms sequentially. + + Args: + data (dict): A result dict contains the data to transform. + + Returns: + dict: Transformed data. + """ + + for t in self.transforms: + data = t(data) + if data is None: + return None + return data + + def __repr__(self): + format_string = self.__class__.__name__ + '(' + for t in self.transforms: + format_string += '\n' + format_string += f' {t}' + format_string += '\n)' + return format_string diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/formating.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/formating.py new file mode 100644 index 0000000000000000000000000000000000000000..f71bca13cf7f1910de2246a9b822851a12529735 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/formating.py @@ -0,0 +1,364 @@ +from collections.abc import Sequence + +import mmcv +import numpy as np +import torch +from mmcv.parallel import DataContainer as DC + +from ..builder import PIPELINES + + +def to_tensor(data): + """Convert objects of various python types to :obj:`torch.Tensor`. + + Supported types are: :class:`numpy.ndarray`, :class:`torch.Tensor`, + :class:`Sequence`, :class:`int` and :class:`float`. + + Args: + data (torch.Tensor | numpy.ndarray | Sequence | int | float): Data to + be converted. + """ + + if isinstance(data, torch.Tensor): + return data + elif isinstance(data, np.ndarray): + return torch.from_numpy(data) + elif isinstance(data, Sequence) and not mmcv.is_str(data): + return torch.tensor(data) + elif isinstance(data, int): + return torch.LongTensor([data]) + elif isinstance(data, float): + return torch.FloatTensor([data]) + else: + raise TypeError(f'type {type(data)} cannot be converted to tensor.') + + +@PIPELINES.register_module() +class ToTensor: + """Convert some results to :obj:`torch.Tensor` by given keys. + + Args: + keys (Sequence[str]): Keys that need to be converted to Tensor. + """ + + def __init__(self, keys): + self.keys = keys + + def __call__(self, results): + """Call function to convert data in results to :obj:`torch.Tensor`. + + Args: + results (dict): Result dict contains the data to convert. + + Returns: + dict: The result dict contains the data converted + to :obj:`torch.Tensor`. + """ + for key in self.keys: + results[key] = to_tensor(results[key]) + return results + + def __repr__(self): + return self.__class__.__name__ + f'(keys={self.keys})' + + +@PIPELINES.register_module() +class ImageToTensor: + """Convert image to :obj:`torch.Tensor` by given keys. + + The dimension order of input image is (H, W, C). The pipeline will convert + it to (C, H, W). If only 2 dimension (H, W) is given, the output would be + (1, H, W). + + Args: + keys (Sequence[str]): Key of images to be converted to Tensor. + """ + + def __init__(self, keys): + self.keys = keys + + def __call__(self, results): + """Call function to convert image in results to :obj:`torch.Tensor` and + transpose the channel order. + + Args: + results (dict): Result dict contains the image data to convert. + + Returns: + dict: The result dict contains the image converted + to :obj:`torch.Tensor` and transposed to (C, H, W) order. + """ + for key in self.keys: + img = results[key] + if len(img.shape) < 3: + img = np.expand_dims(img, -1) + results[key] = to_tensor(img.transpose(2, 0, 1)) + return results + + def __repr__(self): + return self.__class__.__name__ + f'(keys={self.keys})' + + +@PIPELINES.register_module() +class Transpose: + """Transpose some results by given keys. + + Args: + keys (Sequence[str]): Keys of results to be transposed. + order (Sequence[int]): Order of transpose. + """ + + def __init__(self, keys, order): + self.keys = keys + self.order = order + + def __call__(self, results): + """Call function to transpose the channel order of data in results. + + Args: + results (dict): Result dict contains the data to transpose. + + Returns: + dict: The result dict contains the data transposed to \ + ``self.order``. + """ + for key in self.keys: + results[key] = results[key].transpose(self.order) + return results + + def __repr__(self): + return self.__class__.__name__ + \ + f'(keys={self.keys}, order={self.order})' + + +@PIPELINES.register_module() +class ToDataContainer: + """Convert results to :obj:`mmcv.DataContainer` by given fields. + + Args: + fields (Sequence[dict]): Each field is a dict like + ``dict(key='xxx', **kwargs)``. The ``key`` in result will + be converted to :obj:`mmcv.DataContainer` with ``**kwargs``. + Default: ``(dict(key='img', stack=True), dict(key='gt_bboxes'), + dict(key='gt_labels'))``. + """ + + def __init__(self, + fields=(dict(key='img', stack=True), dict(key='gt_bboxes'), + dict(key='gt_labels'))): + self.fields = fields + + def __call__(self, results): + """Call function to convert data in results to + :obj:`mmcv.DataContainer`. + + Args: + results (dict): Result dict contains the data to convert. + + Returns: + dict: The result dict contains the data converted to \ + :obj:`mmcv.DataContainer`. + """ + + for field in self.fields: + field = field.copy() + key = field.pop('key') + results[key] = DC(results[key], **field) + return results + + def __repr__(self): + return self.__class__.__name__ + f'(fields={self.fields})' + + +@PIPELINES.register_module() +class DefaultFormatBundle: + """Default formatting bundle. + + It simplifies the pipeline of formatting common fields, including "img", + "proposals", "gt_bboxes", "gt_labels", "gt_masks" and "gt_semantic_seg". + These fields are formatted as follows. + + - img: (1)transpose, (2)to tensor, (3)to DataContainer (stack=True) + - proposals: (1)to tensor, (2)to DataContainer + - gt_bboxes: (1)to tensor, (2)to DataContainer + - gt_bboxes_ignore: (1)to tensor, (2)to DataContainer + - gt_labels: (1)to tensor, (2)to DataContainer + - gt_masks: (1)to tensor, (2)to DataContainer (cpu_only=True) + - gt_semantic_seg: (1)unsqueeze dim-0 (2)to tensor, \ + (3)to DataContainer (stack=True) + """ + + def __call__(self, results): + """Call function to transform and format common fields in results. + + Args: + results (dict): Result dict contains the data to convert. + + Returns: + dict: The result dict contains the data that is formatted with \ + default bundle. + """ + + if 'img' in results: + img = results['img'] + # add default meta keys + results = self._add_default_meta_keys(results) + if len(img.shape) < 3: + img = np.expand_dims(img, -1) + img = np.ascontiguousarray(img.transpose(2, 0, 1)) + results['img'] = DC(to_tensor(img), stack=True) + for key in ['proposals', 'gt_bboxes', 'gt_bboxes_ignore', 'gt_labels']: + if key not in results: + continue + results[key] = DC(to_tensor(results[key])) + if 'gt_masks' in results: + results['gt_masks'] = DC(results['gt_masks'], cpu_only=True) + if 'gt_semantic_seg' in results: + results['gt_semantic_seg'] = DC( + to_tensor(results['gt_semantic_seg'][None, ...]), stack=True) + return results + + def _add_default_meta_keys(self, results): + """Add default meta keys. + + We set default meta keys including `pad_shape`, `scale_factor` and + `img_norm_cfg` to avoid the case where no `Resize`, `Normalize` and + `Pad` are implemented during the whole pipeline. + + Args: + results (dict): Result dict contains the data to convert. + + Returns: + results (dict): Updated result dict contains the data to convert. + """ + img = results['img'] + results.setdefault('pad_shape', img.shape) + results.setdefault('scale_factor', 1.0) + num_channels = 1 if len(img.shape) < 3 else img.shape[2] + results.setdefault( + 'img_norm_cfg', + dict( + mean=np.zeros(num_channels, dtype=np.float32), + std=np.ones(num_channels, dtype=np.float32), + to_rgb=False)) + return results + + def __repr__(self): + return self.__class__.__name__ + + +@PIPELINES.register_module() +class Collect: + """Collect data from the loader relevant to the specific task. + + This is usually the last stage of the data loader pipeline. Typically keys + is set to some subset of "img", "proposals", "gt_bboxes", + "gt_bboxes_ignore", "gt_labels", and/or "gt_masks". + + The "img_meta" item is always populated. The contents of the "img_meta" + dictionary depends on "meta_keys". By default this includes: + + - "img_shape": shape of the image input to the network as a tuple \ + (h, w, c). Note that images may be zero padded on the \ + bottom/right if the batch tensor is larger than this shape. + + - "scale_factor": a float indicating the preprocessing scale + + - "flip": a boolean indicating if image flip transform was used + + - "filename": path to the image file + + - "ori_shape": original shape of the image as a tuple (h, w, c) + + - "pad_shape": image shape after padding + + - "img_norm_cfg": a dict of normalization information: + + - mean - per channel mean subtraction + - std - per channel std divisor + - to_rgb - bool indicating if bgr was converted to rgb + + Args: + keys (Sequence[str]): Keys of results to be collected in ``data``. + meta_keys (Sequence[str], optional): Meta keys to be converted to + ``mmcv.DataContainer`` and collected in ``data[img_metas]``. + Default: ``('filename', 'ori_filename', 'ori_shape', 'img_shape', + 'pad_shape', 'scale_factor', 'flip', 'flip_direction', + 'img_norm_cfg')`` + """ + + def __init__(self, + keys, + meta_keys=('filename', 'ori_filename', 'ori_shape', + 'img_shape', 'pad_shape', 'scale_factor', 'flip', + 'flip_direction', 'img_norm_cfg')): + self.keys = keys + self.meta_keys = meta_keys + + def __call__(self, results): + """Call function to collect keys in results. The keys in ``meta_keys`` + will be converted to :obj:mmcv.DataContainer. + + Args: + results (dict): Result dict contains the data to collect. + + Returns: + dict: The result dict contains the following keys + + - keys in``self.keys`` + - ``img_metas`` + """ + + data = {} + img_meta = {} + for key in self.meta_keys: + img_meta[key] = results[key] + data['img_metas'] = DC(img_meta, cpu_only=True) + for key in self.keys: + data[key] = results[key] + return data + + def __repr__(self): + return self.__class__.__name__ + \ + f'(keys={self.keys}, meta_keys={self.meta_keys})' + + +@PIPELINES.register_module() +class WrapFieldsToLists: + """Wrap fields of the data dictionary into lists for evaluation. + + This class can be used as a last step of a test or validation + pipeline for single image evaluation or inference. + + Example: + >>> test_pipeline = [ + >>> dict(type='LoadImageFromFile'), + >>> dict(type='Normalize', + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True), + >>> dict(type='Pad', size_divisor=32), + >>> dict(type='ImageToTensor', keys=['img']), + >>> dict(type='Collect', keys=['img']), + >>> dict(type='WrapFieldsToLists') + >>> ] + """ + + def __call__(self, results): + """Call function to wrap fields into lists. + + Args: + results (dict): Result dict contains the data to wrap. + + Returns: + dict: The result dict where value of ``self.keys`` are wrapped \ + into list. + """ + + # Wrap dict fields into lists + for key, val in results.items(): + results[key] = [val] + return results + + def __repr__(self): + return f'{self.__class__.__name__}()' diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/instaboost.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/instaboost.py new file mode 100644 index 0000000000000000000000000000000000000000..6ff4f721f324a165e46a3cde27208db85e3dd54a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/instaboost.py @@ -0,0 +1,98 @@ +import numpy as np + +from ..builder import PIPELINES + + +@PIPELINES.register_module() +class InstaBoost: + r"""Data augmentation method in `InstaBoost: Boosting Instance + Segmentation Via Probability Map Guided Copy-Pasting + `_. + + Refer to https://github.com/GothicAi/Instaboost for implementation details. + """ + + def __init__(self, + action_candidate=('normal', 'horizontal', 'skip'), + action_prob=(1, 0, 0), + scale=(0.8, 1.2), + dx=15, + dy=15, + theta=(-1, 1), + color_prob=0.5, + hflag=False, + aug_ratio=0.5): + try: + import instaboostfast as instaboost + except ImportError: + raise ImportError( + 'Please run "pip install instaboostfast" ' + 'to install instaboostfast first for instaboost augmentation.') + self.cfg = instaboost.InstaBoostConfig(action_candidate, action_prob, + scale, dx, dy, theta, + color_prob, hflag) + self.aug_ratio = aug_ratio + + def _load_anns(self, results): + labels = results['ann_info']['labels'] + masks = results['ann_info']['masks'] + bboxes = results['ann_info']['bboxes'] + n = len(labels) + + anns = [] + for i in range(n): + label = labels[i] + bbox = bboxes[i] + mask = masks[i] + x1, y1, x2, y2 = bbox + # assert (x2 - x1) >= 1 and (y2 - y1) >= 1 + bbox = [x1, y1, x2 - x1, y2 - y1] + anns.append({ + 'category_id': label, + 'segmentation': mask, + 'bbox': bbox + }) + + return anns + + def _parse_anns(self, results, anns, img): + gt_bboxes = [] + gt_labels = [] + gt_masks_ann = [] + for ann in anns: + x1, y1, w, h = ann['bbox'] + # TODO: more essential bug need to be fixed in instaboost + if w <= 0 or h <= 0: + continue + bbox = [x1, y1, x1 + w, y1 + h] + gt_bboxes.append(bbox) + gt_labels.append(ann['category_id']) + gt_masks_ann.append(ann['segmentation']) + gt_bboxes = np.array(gt_bboxes, dtype=np.float32) + gt_labels = np.array(gt_labels, dtype=np.int64) + results['ann_info']['labels'] = gt_labels + results['ann_info']['bboxes'] = gt_bboxes + results['ann_info']['masks'] = gt_masks_ann + results['img'] = img + return results + + def __call__(self, results): + img = results['img'] + orig_type = img.dtype + anns = self._load_anns(results) + if np.random.choice([0, 1], p=[1 - self.aug_ratio, self.aug_ratio]): + try: + import instaboostfast as instaboost + except ImportError: + raise ImportError('Please run "pip install instaboostfast" ' + 'to install instaboostfast first.') + anns, img = instaboost.get_new_data( + anns, img.astype(np.uint8), self.cfg, background=None) + + results = self._parse_anns(results, anns, img.astype(orig_type)) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(cfg={self.cfg}, aug_ratio={self.aug_ratio})' + return repr_str diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/loading.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/loading.py new file mode 100644 index 0000000000000000000000000000000000000000..0ebad8a2a81e8aecec583b147e9fa9ec8081d4ad --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/loading.py @@ -0,0 +1,458 @@ +import os.path as osp + +import mmcv +import numpy as np +import pycocotools.mask as maskUtils + +from mmdet.core import BitmapMasks, PolygonMasks +from ..builder import PIPELINES + + +@PIPELINES.register_module() +class LoadImageFromFile: + """Load an image from file. + + Required keys are "img_prefix" and "img_info" (a dict that must contain the + key "filename"). Added or updated keys are "filename", "img", "img_shape", + "ori_shape" (same as `img_shape`), "pad_shape" (same as `img_shape`), + "scale_factor" (1.0) and "img_norm_cfg" (means=0 and stds=1). + + Args: + to_float32 (bool): Whether to convert the loaded image to a float32 + numpy array. If set to False, the loaded image is an uint8 array. + Defaults to False. + color_type (str): The flag argument for :func:`mmcv.imfrombytes`. + Defaults to 'color'. + file_client_args (dict): Arguments to instantiate a FileClient. + See :class:`mmcv.fileio.FileClient` for details. + Defaults to ``dict(backend='disk')``. + """ + + def __init__(self, + to_float32=False, + color_type='color', + file_client_args=dict(backend='disk')): + self.to_float32 = to_float32 + self.color_type = color_type + self.file_client_args = file_client_args.copy() + self.file_client = None + + def __call__(self, results): + """Call functions to load image and get image meta information. + + Args: + results (dict): Result dict from :obj:`mmdet.CustomDataset`. + + Returns: + dict: The dict contains loaded image and meta information. + """ + + if self.file_client is None: + self.file_client = mmcv.FileClient(**self.file_client_args) + + if results['img_prefix'] is not None: + filename = osp.join(results['img_prefix'], + results['img_info']['filename']) + else: + filename = results['img_info']['filename'] + + img_bytes = self.file_client.get(filename) + img = mmcv.imfrombytes(img_bytes, flag=self.color_type) + if self.to_float32: + img = img.astype(np.float32) + + results['filename'] = filename + results['ori_filename'] = results['img_info']['filename'] + results['img'] = img + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + results['img_fields'] = ['img'] + return results + + def __repr__(self): + repr_str = (f'{self.__class__.__name__}(' + f'to_float32={self.to_float32}, ' + f"color_type='{self.color_type}', " + f'file_client_args={self.file_client_args})') + return repr_str + + +@PIPELINES.register_module() +class LoadImageFromWebcam(LoadImageFromFile): + """Load an image from webcam. + + Similar with :obj:`LoadImageFromFile`, but the image read from webcam is in + ``results['img']``. + """ + + def __call__(self, results): + """Call functions to add image meta information. + + Args: + results (dict): Result dict with Webcam read image in + ``results['img']``. + + Returns: + dict: The dict contains loaded image and meta information. + """ + + img = results['img'] + if self.to_float32: + img = img.astype(np.float32) + + results['filename'] = None + results['ori_filename'] = None + results['img'] = img + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + results['img_fields'] = ['img'] + return results + + +@PIPELINES.register_module() +class LoadMultiChannelImageFromFiles: + """Load multi-channel images from a list of separate channel files. + + Required keys are "img_prefix" and "img_info" (a dict that must contain the + key "filename", which is expected to be a list of filenames). + Added or updated keys are "filename", "img", "img_shape", + "ori_shape" (same as `img_shape`), "pad_shape" (same as `img_shape`), + "scale_factor" (1.0) and "img_norm_cfg" (means=0 and stds=1). + + Args: + to_float32 (bool): Whether to convert the loaded image to a float32 + numpy array. If set to False, the loaded image is an uint8 array. + Defaults to False. + color_type (str): The flag argument for :func:`mmcv.imfrombytes`. + Defaults to 'color'. + file_client_args (dict): Arguments to instantiate a FileClient. + See :class:`mmcv.fileio.FileClient` for details. + Defaults to ``dict(backend='disk')``. + """ + + def __init__(self, + to_float32=False, + color_type='unchanged', + file_client_args=dict(backend='disk')): + self.to_float32 = to_float32 + self.color_type = color_type + self.file_client_args = file_client_args.copy() + self.file_client = None + + def __call__(self, results): + """Call functions to load multiple images and get images meta + information. + + Args: + results (dict): Result dict from :obj:`mmdet.CustomDataset`. + + Returns: + dict: The dict contains loaded images and meta information. + """ + + if self.file_client is None: + self.file_client = mmcv.FileClient(**self.file_client_args) + + if results['img_prefix'] is not None: + filename = [ + osp.join(results['img_prefix'], fname) + for fname in results['img_info']['filename'] + ] + else: + filename = results['img_info']['filename'] + + img = [] + for name in filename: + img_bytes = self.file_client.get(name) + img.append(mmcv.imfrombytes(img_bytes, flag=self.color_type)) + img = np.stack(img, axis=-1) + if self.to_float32: + img = img.astype(np.float32) + + results['filename'] = filename + results['ori_filename'] = results['img_info']['filename'] + results['img'] = img + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + # Set initial values for default meta_keys + results['pad_shape'] = img.shape + results['scale_factor'] = 1.0 + num_channels = 1 if len(img.shape) < 3 else img.shape[2] + results['img_norm_cfg'] = dict( + mean=np.zeros(num_channels, dtype=np.float32), + std=np.ones(num_channels, dtype=np.float32), + to_rgb=False) + return results + + def __repr__(self): + repr_str = (f'{self.__class__.__name__}(' + f'to_float32={self.to_float32}, ' + f"color_type='{self.color_type}', " + f'file_client_args={self.file_client_args})') + return repr_str + + +@PIPELINES.register_module() +class LoadAnnotations: + """Load multiple types of annotations. + + Args: + with_bbox (bool): Whether to parse and load the bbox annotation. + Default: True. + with_label (bool): Whether to parse and load the label annotation. + Default: True. + with_mask (bool): Whether to parse and load the mask annotation. + Default: False. + with_seg (bool): Whether to parse and load the semantic segmentation + annotation. Default: False. + poly2mask (bool): Whether to convert the instance masks from polygons + to bitmaps. Default: True. + file_client_args (dict): Arguments to instantiate a FileClient. + See :class:`mmcv.fileio.FileClient` for details. + Defaults to ``dict(backend='disk')``. + """ + + def __init__(self, + with_bbox=True, + with_label=True, + with_mask=False, + with_seg=False, + poly2mask=True, + file_client_args=dict(backend='disk')): + self.with_bbox = with_bbox + self.with_label = with_label + self.with_mask = with_mask + self.with_seg = with_seg + self.poly2mask = poly2mask + self.file_client_args = file_client_args.copy() + self.file_client = None + + def _load_bboxes(self, results): + """Private function to load bounding box annotations. + + Args: + results (dict): Result dict from :obj:`mmdet.CustomDataset`. + + Returns: + dict: The dict contains loaded bounding box annotations. + """ + + ann_info = results['ann_info'] + results['gt_bboxes'] = ann_info['bboxes'].copy() + + gt_bboxes_ignore = ann_info.get('bboxes_ignore', None) + if gt_bboxes_ignore is not None: + results['gt_bboxes_ignore'] = gt_bboxes_ignore.copy() + results['bbox_fields'].append('gt_bboxes_ignore') + results['bbox_fields'].append('gt_bboxes') + return results + + def _load_labels(self, results): + """Private function to load label annotations. + + Args: + results (dict): Result dict from :obj:`mmdet.CustomDataset`. + + Returns: + dict: The dict contains loaded label annotations. + """ + + results['gt_labels'] = results['ann_info']['labels'].copy() + return results + + def _poly2mask(self, mask_ann, img_h, img_w): + """Private function to convert masks represented with polygon to + bitmaps. + + Args: + mask_ann (list | dict): Polygon mask annotation input. + img_h (int): The height of output mask. + img_w (int): The width of output mask. + + Returns: + numpy.ndarray: The decode bitmap mask of shape (img_h, img_w). + """ + + if isinstance(mask_ann, list): + # polygon -- a single object might consist of multiple parts + # we merge all parts into one mask rle code + rles = maskUtils.frPyObjects(mask_ann, img_h, img_w) + rle = maskUtils.merge(rles) + elif isinstance(mask_ann['counts'], list): + # uncompressed RLE + rle = maskUtils.frPyObjects(mask_ann, img_h, img_w) + else: + # rle + rle = mask_ann + mask = maskUtils.decode(rle) + return mask + + def process_polygons(self, polygons): + """Convert polygons to list of ndarray and filter invalid polygons. + + Args: + polygons (list[list]): Polygons of one instance. + + Returns: + list[numpy.ndarray]: Processed polygons. + """ + + polygons = [np.array(p) for p in polygons] + valid_polygons = [] + for polygon in polygons: + if len(polygon) % 2 == 0 and len(polygon) >= 6: + valid_polygons.append(polygon) + return valid_polygons + + def _load_masks(self, results): + """Private function to load mask annotations. + + Args: + results (dict): Result dict from :obj:`mmdet.CustomDataset`. + + Returns: + dict: The dict contains loaded mask annotations. + If ``self.poly2mask`` is set ``True``, `gt_mask` will contain + :obj:`PolygonMasks`. Otherwise, :obj:`BitmapMasks` is used. + """ + + h, w = results['img_info']['height'], results['img_info']['width'] + gt_masks = results['ann_info']['masks'] + if self.poly2mask: + gt_masks = BitmapMasks( + [self._poly2mask(mask, h, w) for mask in gt_masks], h, w) + else: + gt_masks = PolygonMasks( + [self.process_polygons(polygons) for polygons in gt_masks], h, + w) + results['gt_masks'] = gt_masks + results['mask_fields'].append('gt_masks') + return results + + def _load_semantic_seg(self, results): + """Private function to load semantic segmentation annotations. + + Args: + results (dict): Result dict from :obj:`dataset`. + + Returns: + dict: The dict contains loaded semantic segmentation annotations. + """ + + if self.file_client is None: + self.file_client = mmcv.FileClient(**self.file_client_args) + + filename = osp.join(results['seg_prefix'], + results['ann_info']['seg_map']) + img_bytes = self.file_client.get(filename) + results['gt_semantic_seg'] = mmcv.imfrombytes( + img_bytes, flag='unchanged').squeeze() + results['seg_fields'].append('gt_semantic_seg') + return results + + def __call__(self, results): + """Call function to load multiple types annotations. + + Args: + results (dict): Result dict from :obj:`mmdet.CustomDataset`. + + Returns: + dict: The dict contains loaded bounding box, label, mask and + semantic segmentation annotations. + """ + + if self.with_bbox: + results = self._load_bboxes(results) + if results is None: + return None + if self.with_label: + results = self._load_labels(results) + if self.with_mask: + results = self._load_masks(results) + if self.with_seg: + results = self._load_semantic_seg(results) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(with_bbox={self.with_bbox}, ' + repr_str += f'with_label={self.with_label}, ' + repr_str += f'with_mask={self.with_mask}, ' + repr_str += f'with_seg={self.with_seg}, ' + repr_str += f'poly2mask={self.poly2mask}, ' + repr_str += f'poly2mask={self.file_client_args})' + return repr_str + + +@PIPELINES.register_module() +class LoadProposals: + """Load proposal pipeline. + + Required key is "proposals". Updated keys are "proposals", "bbox_fields". + + Args: + num_max_proposals (int, optional): Maximum number of proposals to load. + If not specified, all proposals will be loaded. + """ + + def __init__(self, num_max_proposals=None): + self.num_max_proposals = num_max_proposals + + def __call__(self, results): + """Call function to load proposals from file. + + Args: + results (dict): Result dict from :obj:`mmdet.CustomDataset`. + + Returns: + dict: The dict contains loaded proposal annotations. + """ + + proposals = results['proposals'] + if proposals.shape[1] not in (4, 5): + raise AssertionError( + 'proposals should have shapes (n, 4) or (n, 5), ' + f'but found {proposals.shape}') + proposals = proposals[:, :4] + + if self.num_max_proposals is not None: + proposals = proposals[:self.num_max_proposals] + + if len(proposals) == 0: + proposals = np.array([[0, 0, 0, 0]], dtype=np.float32) + results['proposals'] = proposals + results['bbox_fields'].append('proposals') + return results + + def __repr__(self): + return self.__class__.__name__ + \ + f'(num_max_proposals={self.num_max_proposals})' + + +@PIPELINES.register_module() +class FilterAnnotations: + """Filter invalid annotations. + + Args: + min_gt_bbox_wh (tuple[int]): Minimum width and height of ground truth + boxes. + """ + + def __init__(self, min_gt_bbox_wh): + # TODO: add more filter options + self.min_gt_bbox_wh = min_gt_bbox_wh + + def __call__(self, results): + assert 'gt_bboxes' in results + gt_bboxes = results['gt_bboxes'] + w = gt_bboxes[:, 2] - gt_bboxes[:, 0] + h = gt_bboxes[:, 3] - gt_bboxes[:, 1] + keep = (w > self.min_gt_bbox_wh[0]) & (h > self.min_gt_bbox_wh[1]) + if not keep.any(): + return None + else: + keys = ('gt_bboxes', 'gt_labels', 'gt_masks', 'gt_semantic_seg') + for key in keys: + if key in results: + results[key] = results[key][keep] + return results diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/test_time_aug.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/test_time_aug.py new file mode 100644 index 0000000000000000000000000000000000000000..fe5175ae0d5472f94171ee1969e26e6ec4b3867c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/test_time_aug.py @@ -0,0 +1,120 @@ +import warnings + +import mmcv + +from ..builder import PIPELINES +from .compose import Compose + + +@PIPELINES.register_module() +class MultiScaleFlipAug: + """Test-time augmentation with multiple scales and flipping. + + An example configuration is as followed: + + .. code-block:: + + img_scale=[(1333, 400), (1333, 800)], + flip=True, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize', **img_norm_cfg), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ] + + After MultiScaleFLipAug with above configuration, the results are wrapped + into lists of the same length as followed: + + .. code-block:: + + dict( + img=[...], + img_shape=[...], + scale=[(1333, 400), (1333, 400), (1333, 800), (1333, 800)] + flip=[False, True, False, True] + ... + ) + + Args: + transforms (list[dict]): Transforms to apply in each augmentation. + img_scale (tuple | list[tuple] | None): Images scales for resizing. + scale_factor (float | list[float] | None): Scale factors for resizing. + flip (bool): Whether apply flip augmentation. Default: False. + flip_direction (str | list[str]): Flip augmentation directions, + options are "horizontal", "vertical" and "diagonal". If + flip_direction is a list, multiple flip augmentations will be + applied. It has no effect when flip == False. Default: + "horizontal". + """ + + def __init__(self, + transforms, + img_scale=None, + scale_factor=None, + flip=False, + flip_direction='horizontal'): + self.transforms = Compose(transforms) + assert (img_scale is None) ^ (scale_factor is None), ( + 'Must have but only one variable can be setted') + if img_scale is not None: + self.img_scale = img_scale if isinstance(img_scale, + list) else [img_scale] + self.scale_key = 'scale' + assert mmcv.is_list_of(self.img_scale, tuple) + else: + self.img_scale = scale_factor if isinstance( + scale_factor, list) else [scale_factor] + self.scale_key = 'scale_factor' + + self.flip = flip + self.flip_direction = flip_direction if isinstance( + flip_direction, list) else [flip_direction] + assert mmcv.is_list_of(self.flip_direction, str) + if not self.flip and self.flip_direction != ['horizontal']: + warnings.warn( + 'flip_direction has no effect when flip is set to False') + if (self.flip + and not any([t['type'] == 'RandomFlip' for t in transforms])): + warnings.warn( + 'flip has no effect when RandomFlip is not in transforms') + + def __call__(self, results): + """Call function to apply test time augment transforms on results. + + Args: + results (dict): Result dict contains the data to transform. + + Returns: + dict[str: list]: The augmented data, where each value is wrapped + into a list. + """ + + aug_data = [] + flip_args = [(False, None)] + if self.flip: + flip_args += [(True, direction) + for direction in self.flip_direction] + for scale in self.img_scale: + for flip, direction in flip_args: + _results = results.copy() + _results[self.scale_key] = scale + _results['flip'] = flip + _results['flip_direction'] = direction + data = self.transforms(_results) + aug_data.append(data) + # list of dict to dict of list + aug_data_dict = {key: [] for key in aug_data[0]} + for data in aug_data: + for key, val in data.items(): + aug_data_dict[key].append(val) + return aug_data_dict + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(transforms={self.transforms}, ' + repr_str += f'img_scale={self.img_scale}, flip={self.flip}, ' + repr_str += f'flip_direction={self.flip_direction})' + return repr_str diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/transforms.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/transforms.py new file mode 100644 index 0000000000000000000000000000000000000000..41d33270ee1f436b175e82ffeac1e8047cd35514 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/pipelines/transforms.py @@ -0,0 +1,1905 @@ +import copy +import inspect + +import mmcv +import numpy as np +from numpy import random + +from mmdet.core import PolygonMasks +from mmdet.core.evaluation.bbox_overlaps import bbox_overlaps +from ..builder import PIPELINES + +try: + from imagecorruptions import corrupt +except ImportError: + corrupt = None + +try: + import albumentations + from albumentations import Compose +except ImportError: + albumentations = None + Compose = None + + +@PIPELINES.register_module() +class Resize: + """Resize images & bbox & mask. + + This transform resizes the input image to some scale. Bboxes and masks are + then resized with the same scale factor. If the input dict contains the key + "scale", then the scale in the input dict is used, otherwise the specified + scale in the init method is used. If the input dict contains the key + "scale_factor" (if MultiScaleFlipAug does not give img_scale but + scale_factor), the actual scale will be computed by image shape and + scale_factor. + + `img_scale` can either be a tuple (single-scale) or a list of tuple + (multi-scale). There are 3 multiscale modes: + + - ``ratio_range is not None``: randomly sample a ratio from the ratio \ + range and multiply it with the image scale. + - ``ratio_range is None`` and ``multiscale_mode == "range"``: randomly \ + sample a scale from the multiscale range. + - ``ratio_range is None`` and ``multiscale_mode == "value"``: randomly \ + sample a scale from multiple scales. + + Args: + img_scale (tuple or list[tuple]): Images scales for resizing. + multiscale_mode (str): Either "range" or "value". + ratio_range (tuple[float]): (min_ratio, max_ratio) + keep_ratio (bool): Whether to keep the aspect ratio when resizing the + image. + bbox_clip_border (bool, optional): Whether clip the objects outside + the border of the image. Defaults to True. + backend (str): Image resize backend, choices are 'cv2' and 'pillow'. + These two backends generates slightly different results. Defaults + to 'cv2'. + override (bool, optional): Whether to override `scale` and + `scale_factor` so as to call resize twice. Default False. If True, + after the first resizing, the existed `scale` and `scale_factor` + will be ignored so the second resizing can be allowed. + This option is a work-around for multiple times of resize in DETR. + Defaults to False. + """ + + def __init__(self, + img_scale=None, + multiscale_mode='range', + ratio_range=None, + keep_ratio=True, + bbox_clip_border=True, + backend='cv2', + override=False): + if img_scale is None: + self.img_scale = None + else: + if isinstance(img_scale, list): + self.img_scale = img_scale + else: + self.img_scale = [img_scale] + assert mmcv.is_list_of(self.img_scale, tuple) + + if ratio_range is not None: + # mode 1: given a scale and a range of image ratio + assert len(self.img_scale) == 1 + else: + # mode 2: given multiple scales or a range of scales + assert multiscale_mode in ['value', 'range'] + + self.backend = backend + self.multiscale_mode = multiscale_mode + self.ratio_range = ratio_range + self.keep_ratio = keep_ratio + # TODO: refactor the override option in Resize + self.override = override + self.bbox_clip_border = bbox_clip_border + + @staticmethod + def random_select(img_scales): + """Randomly select an img_scale from given candidates. + + Args: + img_scales (list[tuple]): Images scales for selection. + + Returns: + (tuple, int): Returns a tuple ``(img_scale, scale_dix)``, \ + where ``img_scale`` is the selected image scale and \ + ``scale_idx`` is the selected index in the given candidates. + """ + + assert mmcv.is_list_of(img_scales, tuple) + scale_idx = np.random.randint(len(img_scales)) + img_scale = img_scales[scale_idx] + return img_scale, scale_idx + + @staticmethod + def random_sample(img_scales): + """Randomly sample an img_scale when ``multiscale_mode=='range'``. + + Args: + img_scales (list[tuple]): Images scale range for sampling. + There must be two tuples in img_scales, which specify the lower + and upper bound of image scales. + + Returns: + (tuple, None): Returns a tuple ``(img_scale, None)``, where \ + ``img_scale`` is sampled scale and None is just a placeholder \ + to be consistent with :func:`random_select`. + """ + + assert mmcv.is_list_of(img_scales, tuple) and len(img_scales) == 2 + img_scale_long = [max(s) for s in img_scales] + img_scale_short = [min(s) for s in img_scales] + long_edge = np.random.randint( + min(img_scale_long), + max(img_scale_long) + 1) + short_edge = np.random.randint( + min(img_scale_short), + max(img_scale_short) + 1) + img_scale = (long_edge, short_edge) + return img_scale, None + + @staticmethod + def random_sample_ratio(img_scale, ratio_range): + """Randomly sample an img_scale when ``ratio_range`` is specified. + + A ratio will be randomly sampled from the range specified by + ``ratio_range``. Then it would be multiplied with ``img_scale`` to + generate sampled scale. + + Args: + img_scale (tuple): Images scale base to multiply with ratio. + ratio_range (tuple[float]): The minimum and maximum ratio to scale + the ``img_scale``. + + Returns: + (tuple, None): Returns a tuple ``(scale, None)``, where \ + ``scale`` is sampled ratio multiplied with ``img_scale`` and \ + None is just a placeholder to be consistent with \ + :func:`random_select`. + """ + + assert isinstance(img_scale, tuple) and len(img_scale) == 2 + min_ratio, max_ratio = ratio_range + assert min_ratio <= max_ratio + ratio = np.random.random_sample() * (max_ratio - min_ratio) + min_ratio + scale = int(img_scale[0] * ratio), int(img_scale[1] * ratio) + return scale, None + + def _random_scale(self, results): + """Randomly sample an img_scale according to ``ratio_range`` and + ``multiscale_mode``. + + If ``ratio_range`` is specified, a ratio will be sampled and be + multiplied with ``img_scale``. + If multiple scales are specified by ``img_scale``, a scale will be + sampled according to ``multiscale_mode``. + Otherwise, single scale will be used. + + Args: + results (dict): Result dict from :obj:`dataset`. + + Returns: + dict: Two new keys 'scale` and 'scale_idx` are added into \ + ``results``, which would be used by subsequent pipelines. + """ + + if self.ratio_range is not None: + scale, scale_idx = self.random_sample_ratio( + self.img_scale[0], self.ratio_range) + elif len(self.img_scale) == 1: + scale, scale_idx = self.img_scale[0], 0 + elif self.multiscale_mode == 'range': + scale, scale_idx = self.random_sample(self.img_scale) + elif self.multiscale_mode == 'value': + scale, scale_idx = self.random_select(self.img_scale) + else: + raise NotImplementedError + + results['scale'] = scale + results['scale_idx'] = scale_idx + + def _resize_img(self, results): + """Resize images with ``results['scale']``.""" + for key in results.get('img_fields', ['img']): + if self.keep_ratio: + img, scale_factor = mmcv.imrescale( + results[key], + results['scale'], + return_scale=True, + backend=self.backend) + # the w_scale and h_scale has minor difference + # a real fix should be done in the mmcv.imrescale in the future + new_h, new_w = img.shape[:2] + h, w = results[key].shape[:2] + w_scale = new_w / w + h_scale = new_h / h + else: + img, w_scale, h_scale = mmcv.imresize( + results[key], + results['scale'], + return_scale=True, + backend=self.backend) + results[key] = img + + scale_factor = np.array([w_scale, h_scale, w_scale, h_scale], + dtype=np.float32) + results['img_shape'] = img.shape + # in case that there is no padding + results['pad_shape'] = img.shape + results['scale_factor'] = scale_factor + results['keep_ratio'] = self.keep_ratio + + def _resize_bboxes(self, results): + """Resize bounding boxes with ``results['scale_factor']``.""" + for key in results.get('bbox_fields', []): + bboxes = results[key] * results['scale_factor'] + if self.bbox_clip_border: + img_shape = results['img_shape'] + bboxes[:, 0::2] = np.clip(bboxes[:, 0::2], 0, img_shape[1]) + bboxes[:, 1::2] = np.clip(bboxes[:, 1::2], 0, img_shape[0]) + results[key] = bboxes + + def _resize_masks(self, results): + """Resize masks with ``results['scale']``""" + for key in results.get('mask_fields', []): + if results[key] is None: + continue + if self.keep_ratio: + results[key] = results[key].rescale(results['scale']) + else: + results[key] = results[key].resize(results['img_shape'][:2]) + + def _resize_seg(self, results): + """Resize semantic segmentation map with ``results['scale']``.""" + for key in results.get('seg_fields', []): + if self.keep_ratio: + gt_seg = mmcv.imrescale( + results[key], + results['scale'], + interpolation='nearest', + backend=self.backend) + else: + gt_seg = mmcv.imresize( + results[key], + results['scale'], + interpolation='nearest', + backend=self.backend) + results['gt_semantic_seg'] = gt_seg + + def __call__(self, results): + """Call function to resize images, bounding boxes, masks, semantic + segmentation map. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Resized results, 'img_shape', 'pad_shape', 'scale_factor', \ + 'keep_ratio' keys are added into result dict. + """ + + if 'scale' not in results: + if 'scale_factor' in results: + img_shape = results['img'].shape[:2] + scale_factor = results['scale_factor'] + assert isinstance(scale_factor, float) + results['scale'] = tuple( + [int(x * scale_factor) for x in img_shape][::-1]) + else: + self._random_scale(results) + else: + if not self.override: + assert 'scale_factor' not in results, ( + 'scale and scale_factor cannot be both set.') + else: + results.pop('scale') + if 'scale_factor' in results: + results.pop('scale_factor') + self._random_scale(results) + + self._resize_img(results) + self._resize_bboxes(results) + self._resize_masks(results) + self._resize_seg(results) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(img_scale={self.img_scale}, ' + repr_str += f'multiscale_mode={self.multiscale_mode}, ' + repr_str += f'ratio_range={self.ratio_range}, ' + repr_str += f'keep_ratio={self.keep_ratio}, ' + repr_str += f'bbox_clip_border={self.bbox_clip_border})' + return repr_str + + +@PIPELINES.register_module() +class RandomFlip: + """Flip the image & bbox & mask. + + If the input dict contains the key "flip", then the flag will be used, + otherwise it will be randomly decided by a ratio specified in the init + method. + + When random flip is enabled, ``flip_ratio``/``direction`` can either be a + float/string or tuple of float/string. There are 3 flip modes: + + - ``flip_ratio`` is float, ``direction`` is string: the image will be + ``direction``ly flipped with probability of ``flip_ratio`` . + E.g., ``flip_ratio=0.5``, ``direction='horizontal'``, + then image will be horizontally flipped with probability of 0.5. + - ``flip_ratio`` is float, ``direction`` is list of string: the image wil + be ``direction[i]``ly flipped with probability of + ``flip_ratio/len(direction)``. + E.g., ``flip_ratio=0.5``, ``direction=['horizontal', 'vertical']``, + then image will be horizontally flipped with probability of 0.25, + vertically with probability of 0.25. + - ``flip_ratio`` is list of float, ``direction`` is list of string: + given ``len(flip_ratio) == len(direction)``, the image wil + be ``direction[i]``ly flipped with probability of ``flip_ratio[i]``. + E.g., ``flip_ratio=[0.3, 0.5]``, ``direction=['horizontal', + 'vertical']``, then image will be horizontally flipped with probability + of 0.3, vertically with probability of 0.5 + + Args: + flip_ratio (float | list[float], optional): The flipping probability. + Default: None. + direction(str | list[str], optional): The flipping direction. Options + are 'horizontal', 'vertical', 'diagonal'. Default: 'horizontal'. + If input is a list, the length must equal ``flip_ratio``. Each + element in ``flip_ratio`` indicates the flip probability of + corresponding direction. + """ + + def __init__(self, flip_ratio=None, direction='horizontal'): + if isinstance(flip_ratio, list): + assert mmcv.is_list_of(flip_ratio, float) + assert 0 <= sum(flip_ratio) <= 1 + elif isinstance(flip_ratio, float): + assert 0 <= flip_ratio <= 1 + elif flip_ratio is None: + pass + else: + raise ValueError('flip_ratios must be None, float, ' + 'or list of float') + self.flip_ratio = flip_ratio + + valid_directions = ['horizontal', 'vertical', 'diagonal'] + if isinstance(direction, str): + assert direction in valid_directions + elif isinstance(direction, list): + assert mmcv.is_list_of(direction, str) + assert set(direction).issubset(set(valid_directions)) + else: + raise ValueError('direction must be either str or list of str') + self.direction = direction + + if isinstance(flip_ratio, list): + assert len(self.flip_ratio) == len(self.direction) + + def bbox_flip(self, bboxes, img_shape, direction): + """Flip bboxes horizontally. + + Args: + bboxes (numpy.ndarray): Bounding boxes, shape (..., 4*k) + img_shape (tuple[int]): Image shape (height, width) + direction (str): Flip direction. Options are 'horizontal', + 'vertical'. + + Returns: + numpy.ndarray: Flipped bounding boxes. + """ + + assert bboxes.shape[-1] % 4 == 0 + flipped = bboxes.copy() + if direction == 'horizontal': + w = img_shape[1] + flipped[..., 0::4] = w - bboxes[..., 2::4] + flipped[..., 2::4] = w - bboxes[..., 0::4] + elif direction == 'vertical': + h = img_shape[0] + flipped[..., 1::4] = h - bboxes[..., 3::4] + flipped[..., 3::4] = h - bboxes[..., 1::4] + elif direction == 'diagonal': + w = img_shape[1] + h = img_shape[0] + flipped[..., 0::4] = w - bboxes[..., 2::4] + flipped[..., 1::4] = h - bboxes[..., 3::4] + flipped[..., 2::4] = w - bboxes[..., 0::4] + flipped[..., 3::4] = h - bboxes[..., 1::4] + else: + raise ValueError(f"Invalid flipping direction '{direction}'") + return flipped + + def __call__(self, results): + """Call function to flip bounding boxes, masks, semantic segmentation + maps. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Flipped results, 'flip', 'flip_direction' keys are added \ + into result dict. + """ + + if 'flip' not in results: + if isinstance(self.direction, list): + # None means non-flip + direction_list = self.direction + [None] + else: + # None means non-flip + direction_list = [self.direction, None] + + if isinstance(self.flip_ratio, list): + non_flip_ratio = 1 - sum(self.flip_ratio) + flip_ratio_list = self.flip_ratio + [non_flip_ratio] + else: + non_flip_ratio = 1 - self.flip_ratio + # exclude non-flip + single_ratio = self.flip_ratio / (len(direction_list) - 1) + flip_ratio_list = [single_ratio] * (len(direction_list) - + 1) + [non_flip_ratio] + + cur_dir = np.random.choice(direction_list, p=flip_ratio_list) + + results['flip'] = cur_dir is not None + if 'flip_direction' not in results: + results['flip_direction'] = cur_dir + if results['flip']: + # flip image + for key in results.get('img_fields', ['img']): + results[key] = mmcv.imflip( + results[key], direction=results['flip_direction']) + # flip bboxes + for key in results.get('bbox_fields', []): + results[key] = self.bbox_flip(results[key], + results['img_shape'], + results['flip_direction']) + # flip masks + for key in results.get('mask_fields', []): + results[key] = results[key].flip(results['flip_direction']) + + # flip segs + for key in results.get('seg_fields', []): + results[key] = mmcv.imflip( + results[key], direction=results['flip_direction']) + return results + + def __repr__(self): + return self.__class__.__name__ + f'(flip_ratio={self.flip_ratio})' + + +@PIPELINES.register_module() +class RandomShift: + """Shift the image and box given shift pixels and probability. + + Args: + shift_ratio (float): Probability of shifts. Default 0.5. + max_shift_px (int): The max pixels for shifting. Default 32. + filter_thr_px (int): The width and height threshold for filtering. + The bbox and the rest of the targets below the width and + height threshold will be filtered. Default 1. + """ + + def __init__(self, shift_ratio=0.5, max_shift_px=32, filter_thr_px=1): + assert 0 <= shift_ratio <= 1 + assert max_shift_px >= 0 + self.shift_ratio = shift_ratio + self.max_shift_px = max_shift_px + self.filter_thr_px = int(filter_thr_px) + # The key correspondence from bboxes to labels. + self.bbox2label = { + 'gt_bboxes': 'gt_labels', + 'gt_bboxes_ignore': 'gt_labels_ignore' + } + + def __call__(self, results): + """Call function to random shift images, bounding boxes. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Shift results. + """ + if random.random() < self.shift_ratio: + img_shape = results['img'].shape[:2] + + random_shift_x = random.randint(-self.max_shift_px, + self.max_shift_px) + random_shift_y = random.randint(-self.max_shift_px, + self.max_shift_px) + new_x = max(0, random_shift_x) + orig_x = max(0, -random_shift_x) + new_y = max(0, random_shift_y) + orig_y = max(0, -random_shift_y) + + # TODO: support mask and semantic segmentation maps. + for key in results.get('bbox_fields', []): + bboxes = results[key].copy() + bboxes[..., 0::2] += random_shift_x + bboxes[..., 1::2] += random_shift_y + + # clip border + bboxes[..., 0::2] = np.clip(bboxes[..., 0::2], 0, img_shape[1]) + bboxes[..., 1::2] = np.clip(bboxes[..., 1::2], 0, img_shape[0]) + + # remove invalid bboxes + bbox_w = bboxes[..., 2] - bboxes[..., 0] + bbox_h = bboxes[..., 3] - bboxes[..., 1] + valid_inds = (bbox_w > self.filter_thr_px) & ( + bbox_h > self.filter_thr_px) + # If the shift does not contain any gt-bbox area, skip this + # image. + if key == 'gt_bboxes' and not valid_inds.any(): + return results + bboxes = bboxes[valid_inds] + results[key] = bboxes + + # label fields. e.g. gt_labels and gt_labels_ignore + label_key = self.bbox2label.get(key) + if label_key in results: + results[label_key] = results[label_key][valid_inds] + + for key in results.get('img_fields', ['img']): + img = results[key] + new_img = np.zeros_like(img) + img_h, img_w = img.shape[:2] + new_h = img_h - np.abs(random_shift_y) + new_w = img_w - np.abs(random_shift_x) + new_img[new_y:new_y + new_h, new_x:new_x + new_w] \ + = img[orig_y:orig_y + new_h, orig_x:orig_x + new_w] + results[key] = new_img + + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(max_shift_px={self.max_shift_px}, ' + return repr_str + + +@PIPELINES.register_module() +class Pad: + """Pad the image & mask. + + There are two padding modes: (1) pad to a fixed size and (2) pad to the + minimum size that is divisible by some number. + Added keys are "pad_shape", "pad_fixed_size", "pad_size_divisor", + + Args: + size (tuple, optional): Fixed padding size. + size_divisor (int, optional): The divisor of padded size. + pad_val (float, optional): Padding value, 0 by default. + """ + + def __init__(self, size=None, size_divisor=None, pad_val=0): + self.size = size + self.size_divisor = size_divisor + self.pad_val = pad_val + # only one of size and size_divisor should be valid + assert size is not None or size_divisor is not None + assert size is None or size_divisor is None + + def _pad_img(self, results): + """Pad images according to ``self.size``.""" + for key in results.get('img_fields', ['img']): + if self.size is not None: + padded_img = mmcv.impad( + results[key], shape=self.size, pad_val=self.pad_val) + elif self.size_divisor is not None: + padded_img = mmcv.impad_to_multiple( + results[key], self.size_divisor, pad_val=self.pad_val) + results[key] = padded_img + results['pad_shape'] = padded_img.shape + results['pad_fixed_size'] = self.size + results['pad_size_divisor'] = self.size_divisor + + def _pad_masks(self, results): + """Pad masks according to ``results['pad_shape']``.""" + pad_shape = results['pad_shape'][:2] + for key in results.get('mask_fields', []): + results[key] = results[key].pad(pad_shape, pad_val=self.pad_val) + + def _pad_seg(self, results): + """Pad semantic segmentation map according to + ``results['pad_shape']``.""" + for key in results.get('seg_fields', []): + results[key] = mmcv.impad( + results[key], shape=results['pad_shape'][:2]) + + def __call__(self, results): + """Call function to pad images, masks, semantic segmentation maps. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Updated result dict. + """ + self._pad_img(results) + self._pad_masks(results) + self._pad_seg(results) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(size={self.size}, ' + repr_str += f'size_divisor={self.size_divisor}, ' + repr_str += f'pad_val={self.pad_val})' + return repr_str + + +@PIPELINES.register_module() +class Normalize: + """Normalize the image. + + Added key is "img_norm_cfg". + + Args: + mean (sequence): Mean values of 3 channels. + std (sequence): Std values of 3 channels. + to_rgb (bool): Whether to convert the image from BGR to RGB, + default is true. + """ + + def __init__(self, mean, std, to_rgb=True): + self.mean = np.array(mean, dtype=np.float32) + self.std = np.array(std, dtype=np.float32) + self.to_rgb = to_rgb + + def __call__(self, results): + """Call function to normalize images. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Normalized results, 'img_norm_cfg' key is added into + result dict. + """ + for key in results.get('img_fields', ['img']): + results[key] = mmcv.imnormalize(results[key], self.mean, self.std, + self.to_rgb) + results['img_norm_cfg'] = dict( + mean=self.mean, std=self.std, to_rgb=self.to_rgb) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(mean={self.mean}, std={self.std}, to_rgb={self.to_rgb})' + return repr_str + + +@PIPELINES.register_module() +class RandomCrop: + """Random crop the image & bboxes & masks. + + The absolute `crop_size` is sampled based on `crop_type` and `image_size`, + then the cropped results are generated. + + Args: + crop_size (tuple): The relative ratio or absolute pixels of + height and width. + crop_type (str, optional): one of "relative_range", "relative", + "absolute", "absolute_range". "relative" randomly crops + (h * crop_size[0], w * crop_size[1]) part from an input of size + (h, w). "relative_range" uniformly samples relative crop size from + range [crop_size[0], 1] and [crop_size[1], 1] for height and width + respectively. "absolute" crops from an input with absolute size + (crop_size[0], crop_size[1]). "absolute_range" uniformly samples + crop_h in range [crop_size[0], min(h, crop_size[1])] and crop_w + in range [crop_size[0], min(w, crop_size[1])]. Default "absolute". + allow_negative_crop (bool, optional): Whether to allow a crop that does + not contain any bbox area. Default False. + bbox_clip_border (bool, optional): Whether clip the objects outside + the border of the image. Defaults to True. + + Note: + - If the image is smaller than the absolute crop size, return the + original image. + - The keys for bboxes, labels and masks must be aligned. That is, + `gt_bboxes` corresponds to `gt_labels` and `gt_masks`, and + `gt_bboxes_ignore` corresponds to `gt_labels_ignore` and + `gt_masks_ignore`. + - If the crop does not contain any gt-bbox region and + `allow_negative_crop` is set to False, skip this image. + """ + + def __init__(self, + crop_size, + crop_type='absolute', + allow_negative_crop=False, + bbox_clip_border=True): + if crop_type not in [ + 'relative_range', 'relative', 'absolute', 'absolute_range' + ]: + raise ValueError(f'Invalid crop_type {crop_type}.') + if crop_type in ['absolute', 'absolute_range']: + assert crop_size[0] > 0 and crop_size[1] > 0 + assert isinstance(crop_size[0], int) and isinstance( + crop_size[1], int) + else: + assert 0 < crop_size[0] <= 1 and 0 < crop_size[1] <= 1 + self.crop_size = crop_size + self.crop_type = crop_type + self.allow_negative_crop = allow_negative_crop + self.bbox_clip_border = bbox_clip_border + # The key correspondence from bboxes to labels and masks. + self.bbox2label = { + 'gt_bboxes': 'gt_labels', + 'gt_bboxes_ignore': 'gt_labels_ignore' + } + self.bbox2mask = { + 'gt_bboxes': 'gt_masks', + 'gt_bboxes_ignore': 'gt_masks_ignore' + } + + def _crop_data(self, results, crop_size, allow_negative_crop): + """Function to randomly crop images, bounding boxes, masks, semantic + segmentation maps. + + Args: + results (dict): Result dict from loading pipeline. + crop_size (tuple): Expected absolute size after cropping, (h, w). + allow_negative_crop (bool): Whether to allow a crop that does not + contain any bbox area. Default to False. + + Returns: + dict: Randomly cropped results, 'img_shape' key in result dict is + updated according to crop size. + """ + assert crop_size[0] > 0 and crop_size[1] > 0 + for key in results.get('img_fields', ['img']): + img = results[key] + margin_h = max(img.shape[0] - crop_size[0], 0) + margin_w = max(img.shape[1] - crop_size[1], 0) + offset_h = np.random.randint(0, margin_h + 1) + offset_w = np.random.randint(0, margin_w + 1) + crop_y1, crop_y2 = offset_h, offset_h + crop_size[0] + crop_x1, crop_x2 = offset_w, offset_w + crop_size[1] + + # crop the image + img = img[crop_y1:crop_y2, crop_x1:crop_x2, ...] + img_shape = img.shape + results[key] = img + results['img_shape'] = img_shape + + # crop bboxes accordingly and clip to the image boundary + for key in results.get('bbox_fields', []): + # e.g. gt_bboxes and gt_bboxes_ignore + bbox_offset = np.array([offset_w, offset_h, offset_w, offset_h], + dtype=np.float32) + bboxes = results[key] - bbox_offset + if self.bbox_clip_border: + bboxes[:, 0::2] = np.clip(bboxes[:, 0::2], 0, img_shape[1]) + bboxes[:, 1::2] = np.clip(bboxes[:, 1::2], 0, img_shape[0]) + valid_inds = (bboxes[:, 2] > bboxes[:, 0]) & ( + bboxes[:, 3] > bboxes[:, 1]) + # If the crop does not contain any gt-bbox area and + # allow_negative_crop is False, skip this image. + if (key == 'gt_bboxes' and not valid_inds.any() + and not allow_negative_crop): + return None + results[key] = bboxes[valid_inds, :] + # label fields. e.g. gt_labels and gt_labels_ignore + label_key = self.bbox2label.get(key) + if label_key in results: + results[label_key] = results[label_key][valid_inds] + + # mask fields, e.g. gt_masks and gt_masks_ignore + mask_key = self.bbox2mask.get(key) + if mask_key in results: + results[mask_key] = results[mask_key][ + valid_inds.nonzero()[0]].crop( + np.asarray([crop_x1, crop_y1, crop_x2, crop_y2])) + + # crop semantic seg + for key in results.get('seg_fields', []): + results[key] = results[key][crop_y1:crop_y2, crop_x1:crop_x2] + + return results + + def _get_crop_size(self, image_size): + """Randomly generates the absolute crop size based on `crop_type` and + `image_size`. + + Args: + image_size (tuple): (h, w). + + Returns: + crop_size (tuple): (crop_h, crop_w) in absolute pixels. + """ + h, w = image_size + if self.crop_type == 'absolute': + return (min(self.crop_size[0], h), min(self.crop_size[1], w)) + elif self.crop_type == 'absolute_range': + assert self.crop_size[0] <= self.crop_size[1] + crop_h = np.random.randint( + min(h, self.crop_size[0]), + min(h, self.crop_size[1]) + 1) + crop_w = np.random.randint( + min(w, self.crop_size[0]), + min(w, self.crop_size[1]) + 1) + return crop_h, crop_w + elif self.crop_type == 'relative': + crop_h, crop_w = self.crop_size + return int(h * crop_h + 0.5), int(w * crop_w + 0.5) + elif self.crop_type == 'relative_range': + crop_size = np.asarray(self.crop_size, dtype=np.float32) + crop_h, crop_w = crop_size + np.random.rand(2) * (1 - crop_size) + return int(h * crop_h + 0.5), int(w * crop_w + 0.5) + + def __call__(self, results): + """Call function to randomly crop images, bounding boxes, masks, + semantic segmentation maps. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Randomly cropped results, 'img_shape' key in result dict is + updated according to crop size. + """ + image_size = results['img'].shape[:2] + crop_size = self._get_crop_size(image_size) + results = self._crop_data(results, crop_size, self.allow_negative_crop) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(crop_size={self.crop_size}, ' + repr_str += f'crop_type={self.crop_type}, ' + repr_str += f'allow_negative_crop={self.allow_negative_crop}, ' + repr_str += f'bbox_clip_border={self.bbox_clip_border})' + return repr_str + + +@PIPELINES.register_module() +class SegRescale: + """Rescale semantic segmentation maps. + + Args: + scale_factor (float): The scale factor of the final output. + backend (str): Image rescale backend, choices are 'cv2' and 'pillow'. + These two backends generates slightly different results. Defaults + to 'cv2'. + """ + + def __init__(self, scale_factor=1, backend='cv2'): + self.scale_factor = scale_factor + self.backend = backend + + def __call__(self, results): + """Call function to scale the semantic segmentation map. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Result dict with semantic segmentation map scaled. + """ + + for key in results.get('seg_fields', []): + if self.scale_factor != 1: + results[key] = mmcv.imrescale( + results[key], + self.scale_factor, + interpolation='nearest', + backend=self.backend) + return results + + def __repr__(self): + return self.__class__.__name__ + f'(scale_factor={self.scale_factor})' + + +@PIPELINES.register_module() +class PhotoMetricDistortion: + """Apply photometric distortion to image sequentially, every transformation + is applied with a probability of 0.5. The position of random contrast is in + second or second to last. + + 1. random brightness + 2. random contrast (mode 0) + 3. convert color from BGR to HSV + 4. random saturation + 5. random hue + 6. convert color from HSV to BGR + 7. random contrast (mode 1) + 8. randomly swap channels + + Args: + brightness_delta (int): delta of brightness. + contrast_range (tuple): range of contrast. + saturation_range (tuple): range of saturation. + hue_delta (int): delta of hue. + """ + + def __init__(self, + brightness_delta=32, + contrast_range=(0.5, 1.5), + saturation_range=(0.5, 1.5), + hue_delta=18): + self.brightness_delta = brightness_delta + self.contrast_lower, self.contrast_upper = contrast_range + self.saturation_lower, self.saturation_upper = saturation_range + self.hue_delta = hue_delta + + def __call__(self, results): + """Call function to perform photometric distortion on images. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Result dict with images distorted. + """ + + if 'img_fields' in results: + assert results['img_fields'] == ['img'], \ + 'Only single img_fields is allowed' + img = results['img'] + assert img.dtype == np.float32, \ + 'PhotoMetricDistortion needs the input image of dtype np.float32,'\ + ' please set "to_float32=True" in "LoadImageFromFile" pipeline' + # random brightness + if random.randint(2): + delta = random.uniform(-self.brightness_delta, + self.brightness_delta) + img += delta + + # mode == 0 --> do random contrast first + # mode == 1 --> do random contrast last + mode = random.randint(2) + if mode == 1: + if random.randint(2): + alpha = random.uniform(self.contrast_lower, + self.contrast_upper) + img *= alpha + + # convert color from BGR to HSV + img = mmcv.bgr2hsv(img) + + # random saturation + if random.randint(2): + img[..., 1] *= random.uniform(self.saturation_lower, + self.saturation_upper) + + # random hue + if random.randint(2): + img[..., 0] += random.uniform(-self.hue_delta, self.hue_delta) + img[..., 0][img[..., 0] > 360] -= 360 + img[..., 0][img[..., 0] < 0] += 360 + + # convert color from HSV to BGR + img = mmcv.hsv2bgr(img) + + # random contrast + if mode == 0: + if random.randint(2): + alpha = random.uniform(self.contrast_lower, + self.contrast_upper) + img *= alpha + + # randomly swap channels + if random.randint(2): + img = img[..., random.permutation(3)] + + results['img'] = img + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(\nbrightness_delta={self.brightness_delta},\n' + repr_str += 'contrast_range=' + repr_str += f'{(self.contrast_lower, self.contrast_upper)},\n' + repr_str += 'saturation_range=' + repr_str += f'{(self.saturation_lower, self.saturation_upper)},\n' + repr_str += f'hue_delta={self.hue_delta})' + return repr_str + + +@PIPELINES.register_module() +class Expand: + """Random expand the image & bboxes. + + Randomly place the original image on a canvas of 'ratio' x original image + size filled with mean values. The ratio is in the range of ratio_range. + + Args: + mean (tuple): mean value of dataset. + to_rgb (bool): if need to convert the order of mean to align with RGB. + ratio_range (tuple): range of expand ratio. + prob (float): probability of applying this transformation + """ + + def __init__(self, + mean=(0, 0, 0), + to_rgb=True, + ratio_range=(1, 4), + seg_ignore_label=None, + prob=0.5): + self.to_rgb = to_rgb + self.ratio_range = ratio_range + if to_rgb: + self.mean = mean[::-1] + else: + self.mean = mean + self.min_ratio, self.max_ratio = ratio_range + self.seg_ignore_label = seg_ignore_label + self.prob = prob + + def __call__(self, results): + """Call function to expand images, bounding boxes. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Result dict with images, bounding boxes expanded + """ + + if random.uniform(0, 1) > self.prob: + return results + + if 'img_fields' in results: + assert results['img_fields'] == ['img'], \ + 'Only single img_fields is allowed' + img = results['img'] + + h, w, c = img.shape + ratio = random.uniform(self.min_ratio, self.max_ratio) + # speedup expand when meets large image + if np.all(self.mean == self.mean[0]): + expand_img = np.empty((int(h * ratio), int(w * ratio), c), + img.dtype) + expand_img.fill(self.mean[0]) + else: + expand_img = np.full((int(h * ratio), int(w * ratio), c), + self.mean, + dtype=img.dtype) + left = int(random.uniform(0, w * ratio - w)) + top = int(random.uniform(0, h * ratio - h)) + expand_img[top:top + h, left:left + w] = img + + results['img'] = expand_img + # expand bboxes + for key in results.get('bbox_fields', []): + results[key] = results[key] + np.tile( + (left, top), 2).astype(results[key].dtype) + + # expand masks + for key in results.get('mask_fields', []): + results[key] = results[key].expand( + int(h * ratio), int(w * ratio), top, left) + + # expand segs + for key in results.get('seg_fields', []): + gt_seg = results[key] + expand_gt_seg = np.full((int(h * ratio), int(w * ratio)), + self.seg_ignore_label, + dtype=gt_seg.dtype) + expand_gt_seg[top:top + h, left:left + w] = gt_seg + results[key] = expand_gt_seg + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(mean={self.mean}, to_rgb={self.to_rgb}, ' + repr_str += f'ratio_range={self.ratio_range}, ' + repr_str += f'seg_ignore_label={self.seg_ignore_label})' + return repr_str + + +@PIPELINES.register_module() +class MinIoURandomCrop: + """Random crop the image & bboxes, the cropped patches have minimum IoU + requirement with original image & bboxes, the IoU threshold is randomly + selected from min_ious. + + Args: + min_ious (tuple): minimum IoU threshold for all intersections with + bounding boxes + min_crop_size (float): minimum crop's size (i.e. h,w := a*h, a*w, + where a >= min_crop_size). + bbox_clip_border (bool, optional): Whether clip the objects outside + the border of the image. Defaults to True. + + Note: + The keys for bboxes, labels and masks should be paired. That is, \ + `gt_bboxes` corresponds to `gt_labels` and `gt_masks`, and \ + `gt_bboxes_ignore` to `gt_labels_ignore` and `gt_masks_ignore`. + """ + + def __init__(self, + min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), + min_crop_size=0.3, + bbox_clip_border=True): + # 1: return ori img + self.min_ious = min_ious + self.sample_mode = (1, *min_ious, 0) + self.min_crop_size = min_crop_size + self.bbox_clip_border = bbox_clip_border + self.bbox2label = { + 'gt_bboxes': 'gt_labels', + 'gt_bboxes_ignore': 'gt_labels_ignore' + } + self.bbox2mask = { + 'gt_bboxes': 'gt_masks', + 'gt_bboxes_ignore': 'gt_masks_ignore' + } + + def __call__(self, results): + """Call function to crop images and bounding boxes with minimum IoU + constraint. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Result dict with images and bounding boxes cropped, \ + 'img_shape' key is updated. + """ + + if 'img_fields' in results: + assert results['img_fields'] == ['img'], \ + 'Only single img_fields is allowed' + img = results['img'] + assert 'bbox_fields' in results + boxes = [results[key] for key in results['bbox_fields']] + boxes = np.concatenate(boxes, 0) + h, w, c = img.shape + while True: + mode = random.choice(self.sample_mode) + self.mode = mode + if mode == 1: + return results + + min_iou = mode + for i in range(50): + new_w = random.uniform(self.min_crop_size * w, w) + new_h = random.uniform(self.min_crop_size * h, h) + + # h / w in [0.5, 2] + if new_h / new_w < 0.5 or new_h / new_w > 2: + continue + + left = random.uniform(w - new_w) + top = random.uniform(h - new_h) + + patch = np.array( + (int(left), int(top), int(left + new_w), int(top + new_h))) + # Line or point crop is not allowed + if patch[2] == patch[0] or patch[3] == patch[1]: + continue + overlaps = bbox_overlaps( + patch.reshape(-1, 4), boxes.reshape(-1, 4)).reshape(-1) + if len(overlaps) > 0 and overlaps.min() < min_iou: + continue + + # center of boxes should inside the crop img + # only adjust boxes and instance masks when the gt is not empty + if len(overlaps) > 0: + # adjust boxes + def is_center_of_bboxes_in_patch(boxes, patch): + center = (boxes[:, :2] + boxes[:, 2:]) / 2 + mask = ((center[:, 0] > patch[0]) * + (center[:, 1] > patch[1]) * + (center[:, 0] < patch[2]) * + (center[:, 1] < patch[3])) + return mask + + mask = is_center_of_bboxes_in_patch(boxes, patch) + if not mask.any(): + continue + for key in results.get('bbox_fields', []): + boxes = results[key].copy() + mask = is_center_of_bboxes_in_patch(boxes, patch) + boxes = boxes[mask] + if self.bbox_clip_border: + boxes[:, 2:] = boxes[:, 2:].clip(max=patch[2:]) + boxes[:, :2] = boxes[:, :2].clip(min=patch[:2]) + boxes -= np.tile(patch[:2], 2) + + results[key] = boxes + # labels + label_key = self.bbox2label.get(key) + if label_key in results: + results[label_key] = results[label_key][mask] + + # mask fields + mask_key = self.bbox2mask.get(key) + if mask_key in results: + results[mask_key] = results[mask_key][ + mask.nonzero()[0]].crop(patch) + # adjust the img no matter whether the gt is empty before crop + img = img[patch[1]:patch[3], patch[0]:patch[2]] + results['img'] = img + results['img_shape'] = img.shape + + # seg fields + for key in results.get('seg_fields', []): + results[key] = results[key][patch[1]:patch[3], + patch[0]:patch[2]] + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(min_ious={self.min_ious}, ' + repr_str += f'min_crop_size={self.min_crop_size}, ' + repr_str += f'bbox_clip_border={self.bbox_clip_border})' + return repr_str + + +@PIPELINES.register_module() +class Corrupt: + """Corruption augmentation. + + Corruption transforms implemented based on + `imagecorruptions `_. + + Args: + corruption (str): Corruption name. + severity (int, optional): The severity of corruption. Default: 1. + """ + + def __init__(self, corruption, severity=1): + self.corruption = corruption + self.severity = severity + + def __call__(self, results): + """Call function to corrupt image. + + Args: + results (dict): Result dict from loading pipeline. + + Returns: + dict: Result dict with images corrupted. + """ + + if corrupt is None: + raise RuntimeError('imagecorruptions is not installed') + if 'img_fields' in results: + assert results['img_fields'] == ['img'], \ + 'Only single img_fields is allowed' + results['img'] = corrupt( + results['img'].astype(np.uint8), + corruption_name=self.corruption, + severity=self.severity) + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(corruption={self.corruption}, ' + repr_str += f'severity={self.severity})' + return repr_str + + +@PIPELINES.register_module() +class Albu: + """Albumentation augmentation. + + Adds custom transformations from Albumentations library. + Please, visit `https://albumentations.readthedocs.io` + to get more information. + + An example of ``transforms`` is as followed: + + .. code-block:: + + [ + dict( + type='ShiftScaleRotate', + shift_limit=0.0625, + scale_limit=0.0, + rotate_limit=0, + interpolation=1, + p=0.5), + dict( + type='RandomBrightnessContrast', + brightness_limit=[0.1, 0.3], + contrast_limit=[0.1, 0.3], + p=0.2), + dict(type='ChannelShuffle', p=0.1), + dict( + type='OneOf', + transforms=[ + dict(type='Blur', blur_limit=3, p=1.0), + dict(type='MedianBlur', blur_limit=3, p=1.0) + ], + p=0.1), + ] + + Args: + transforms (list[dict]): A list of albu transformations + bbox_params (dict): Bbox_params for albumentation `Compose` + keymap (dict): Contains {'input key':'albumentation-style key'} + skip_img_without_anno (bool): Whether to skip the image if no ann left + after aug + """ + + def __init__(self, + transforms, + bbox_params=None, + keymap=None, + update_pad_shape=False, + skip_img_without_anno=False): + if Compose is None: + raise RuntimeError('albumentations is not installed') + + # Args will be modified later, copying it will be safer + transforms = copy.deepcopy(transforms) + if bbox_params is not None: + bbox_params = copy.deepcopy(bbox_params) + if keymap is not None: + keymap = copy.deepcopy(keymap) + self.transforms = transforms + self.filter_lost_elements = False + self.update_pad_shape = update_pad_shape + self.skip_img_without_anno = skip_img_without_anno + + # A simple workaround to remove masks without boxes + if (isinstance(bbox_params, dict) and 'label_fields' in bbox_params + and 'filter_lost_elements' in bbox_params): + self.filter_lost_elements = True + self.origin_label_fields = bbox_params['label_fields'] + bbox_params['label_fields'] = ['idx_mapper'] + del bbox_params['filter_lost_elements'] + + self.bbox_params = ( + self.albu_builder(bbox_params) if bbox_params else None) + self.aug = Compose([self.albu_builder(t) for t in self.transforms], + bbox_params=self.bbox_params) + + if not keymap: + self.keymap_to_albu = { + 'img': 'image', + 'gt_masks': 'masks', + 'gt_bboxes': 'bboxes' + } + else: + self.keymap_to_albu = keymap + self.keymap_back = {v: k for k, v in self.keymap_to_albu.items()} + + def albu_builder(self, cfg): + """Import a module from albumentations. + + It inherits some of :func:`build_from_cfg` logic. + + Args: + cfg (dict): Config dict. It should at least contain the key "type". + + Returns: + obj: The constructed object. + """ + + assert isinstance(cfg, dict) and 'type' in cfg + args = cfg.copy() + + obj_type = args.pop('type') + if mmcv.is_str(obj_type): + if albumentations is None: + raise RuntimeError('albumentations is not installed') + obj_cls = getattr(albumentations, obj_type) + elif inspect.isclass(obj_type): + obj_cls = obj_type + else: + raise TypeError( + f'type must be a str or valid type, but got {type(obj_type)}') + + if 'transforms' in args: + args['transforms'] = [ + self.albu_builder(transform) + for transform in args['transforms'] + ] + + return obj_cls(**args) + + @staticmethod + def mapper(d, keymap): + """Dictionary mapper. Renames keys according to keymap provided. + + Args: + d (dict): old dict + keymap (dict): {'old_key':'new_key'} + Returns: + dict: new dict. + """ + + updated_dict = {} + for k, v in zip(d.keys(), d.values()): + new_k = keymap.get(k, k) + updated_dict[new_k] = d[k] + return updated_dict + + def __call__(self, results): + # dict to albumentations format + results = self.mapper(results, self.keymap_to_albu) + # TODO: add bbox_fields + if 'bboxes' in results: + # to list of boxes + if isinstance(results['bboxes'], np.ndarray): + results['bboxes'] = [x for x in results['bboxes']] + # add pseudo-field for filtration + if self.filter_lost_elements: + results['idx_mapper'] = np.arange(len(results['bboxes'])) + + # TODO: Support mask structure in albu + if 'masks' in results: + if isinstance(results['masks'], PolygonMasks): + raise NotImplementedError( + 'Albu only supports BitMap masks now') + ori_masks = results['masks'] + if albumentations.__version__ < '0.5': + results['masks'] = results['masks'].masks + else: + results['masks'] = [mask for mask in results['masks'].masks] + + results = self.aug(**results) + + if 'bboxes' in results: + if isinstance(results['bboxes'], list): + results['bboxes'] = np.array( + results['bboxes'], dtype=np.float32) + results['bboxes'] = results['bboxes'].reshape(-1, 4) + + # filter label_fields + if self.filter_lost_elements: + + for label in self.origin_label_fields: + results[label] = np.array( + [results[label][i] for i in results['idx_mapper']]) + if 'masks' in results: + results['masks'] = np.array( + [results['masks'][i] for i in results['idx_mapper']]) + results['masks'] = ori_masks.__class__( + results['masks'], results['image'].shape[0], + results['image'].shape[1]) + + if (not len(results['idx_mapper']) + and self.skip_img_without_anno): + return None + + if 'gt_labels' in results: + if isinstance(results['gt_labels'], list): + results['gt_labels'] = np.array(results['gt_labels']) + results['gt_labels'] = results['gt_labels'].astype(np.int64) + + # back to the original format + results = self.mapper(results, self.keymap_back) + + # update final shape + if self.update_pad_shape: + results['pad_shape'] = results['img'].shape + + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + f'(transforms={self.transforms})' + return repr_str + + +@PIPELINES.register_module() +class RandomCenterCropPad: + """Random center crop and random around padding for CornerNet. + + This operation generates randomly cropped image from the original image and + pads it simultaneously. Different from :class:`RandomCrop`, the output + shape may not equal to ``crop_size`` strictly. We choose a random value + from ``ratios`` and the output shape could be larger or smaller than + ``crop_size``. The padding operation is also different from :class:`Pad`, + here we use around padding instead of right-bottom padding. + + The relation between output image (padding image) and original image: + + .. code:: text + + output image + + +----------------------------+ + | padded area | + +------|----------------------------|----------+ + | | cropped area | | + | | +---------------+ | | + | | | . center | | | original image + | | | range | | | + | | +---------------+ | | + +------|----------------------------|----------+ + | padded area | + +----------------------------+ + + There are 5 main areas in the figure: + + - output image: output image of this operation, also called padding + image in following instruction. + - original image: input image of this operation. + - padded area: non-intersect area of output image and original image. + - cropped area: the overlap of output image and original image. + - center range: a smaller area where random center chosen from. + center range is computed by ``border`` and original image's shape + to avoid our random center is too close to original image's border. + + Also this operation act differently in train and test mode, the summary + pipeline is listed below. + + Train pipeline: + + 1. Choose a ``random_ratio`` from ``ratios``, the shape of padding image + will be ``random_ratio * crop_size``. + 2. Choose a ``random_center`` in center range. + 3. Generate padding image with center matches the ``random_center``. + 4. Initialize the padding image with pixel value equals to ``mean``. + 5. Copy the cropped area to padding image. + 6. Refine annotations. + + Test pipeline: + + 1. Compute output shape according to ``test_pad_mode``. + 2. Generate padding image with center matches the original image + center. + 3. Initialize the padding image with pixel value equals to ``mean``. + 4. Copy the ``cropped area`` to padding image. + + Args: + crop_size (tuple | None): expected size after crop, final size will + computed according to ratio. Requires (h, w) in train mode, and + None in test mode. + ratios (tuple): random select a ratio from tuple and crop image to + (crop_size[0] * ratio) * (crop_size[1] * ratio). + Only available in train mode. + border (int): max distance from center select area to image border. + Only available in train mode. + mean (sequence): Mean values of 3 channels. + std (sequence): Std values of 3 channels. + to_rgb (bool): Whether to convert the image from BGR to RGB. + test_mode (bool): whether involve random variables in transform. + In train mode, crop_size is fixed, center coords and ratio is + random selected from predefined lists. In test mode, crop_size + is image's original shape, center coords and ratio is fixed. + test_pad_mode (tuple): padding method and padding shape value, only + available in test mode. Default is using 'logical_or' with + 127 as padding shape value. + + - 'logical_or': final_shape = input_shape | padding_shape_value + - 'size_divisor': final_shape = int( + ceil(input_shape / padding_shape_value) * padding_shape_value) + test_pad_add_pix (int): Extra padding pixel in test mode. Default 0. + bbox_clip_border (bool, optional): Whether clip the objects outside + the border of the image. Defaults to True. + """ + + def __init__(self, + crop_size=None, + ratios=(0.9, 1.0, 1.1), + border=128, + mean=None, + std=None, + to_rgb=None, + test_mode=False, + test_pad_mode=('logical_or', 127), + test_pad_add_pix=0, + bbox_clip_border=True): + if test_mode: + assert crop_size is None, 'crop_size must be None in test mode' + assert ratios is None, 'ratios must be None in test mode' + assert border is None, 'border must be None in test mode' + assert isinstance(test_pad_mode, (list, tuple)) + assert test_pad_mode[0] in ['logical_or', 'size_divisor'] + else: + assert isinstance(crop_size, (list, tuple)) + assert crop_size[0] > 0 and crop_size[1] > 0, ( + 'crop_size must > 0 in train mode') + assert isinstance(ratios, (list, tuple)) + assert test_pad_mode is None, ( + 'test_pad_mode must be None in train mode') + + self.crop_size = crop_size + self.ratios = ratios + self.border = border + # We do not set default value to mean, std and to_rgb because these + # hyper-parameters are easy to forget but could affect the performance. + # Please use the same setting as Normalize for performance assurance. + assert mean is not None and std is not None and to_rgb is not None + self.to_rgb = to_rgb + self.input_mean = mean + self.input_std = std + if to_rgb: + self.mean = mean[::-1] + self.std = std[::-1] + else: + self.mean = mean + self.std = std + self.test_mode = test_mode + self.test_pad_mode = test_pad_mode + self.test_pad_add_pix = test_pad_add_pix + self.bbox_clip_border = bbox_clip_border + + def _get_border(self, border, size): + """Get final border for the target size. + + This function generates a ``final_border`` according to image's shape. + The area between ``final_border`` and ``size - final_border`` is the + ``center range``. We randomly choose center from the ``center range`` + to avoid our random center is too close to original image's border. + Also ``center range`` should be larger than 0. + + Args: + border (int): The initial border, default is 128. + size (int): The width or height of original image. + Returns: + int: The final border. + """ + k = 2 * border / size + i = pow(2, np.ceil(np.log2(np.ceil(k))) + (k == int(k))) + return border // i + + def _filter_boxes(self, patch, boxes): + """Check whether the center of each box is in the patch. + + Args: + patch (list[int]): The cropped area, [left, top, right, bottom]. + boxes (numpy array, (N x 4)): Ground truth boxes. + + Returns: + mask (numpy array, (N,)): Each box is inside or outside the patch. + """ + center = (boxes[:, :2] + boxes[:, 2:]) / 2 + mask = (center[:, 0] > patch[0]) * (center[:, 1] > patch[1]) * ( + center[:, 0] < patch[2]) * ( + center[:, 1] < patch[3]) + return mask + + def _crop_image_and_paste(self, image, center, size): + """Crop image with a given center and size, then paste the cropped + image to a blank image with two centers align. + + This function is equivalent to generating a blank image with ``size`` + as its shape. Then cover it on the original image with two centers ( + the center of blank image and the random center of original image) + aligned. The overlap area is paste from the original image and the + outside area is filled with ``mean pixel``. + + Args: + image (np array, H x W x C): Original image. + center (list[int]): Target crop center coord. + size (list[int]): Target crop size. [target_h, target_w] + + Returns: + cropped_img (np array, target_h x target_w x C): Cropped image. + border (np array, 4): The distance of four border of + ``cropped_img`` to the original image area, [top, bottom, + left, right] + patch (list[int]): The cropped area, [left, top, right, bottom]. + """ + center_y, center_x = center + target_h, target_w = size + img_h, img_w, img_c = image.shape + + x0 = max(0, center_x - target_w // 2) + x1 = min(center_x + target_w // 2, img_w) + y0 = max(0, center_y - target_h // 2) + y1 = min(center_y + target_h // 2, img_h) + patch = np.array((int(x0), int(y0), int(x1), int(y1))) + + left, right = center_x - x0, x1 - center_x + top, bottom = center_y - y0, y1 - center_y + + cropped_center_y, cropped_center_x = target_h // 2, target_w // 2 + cropped_img = np.zeros((target_h, target_w, img_c), dtype=image.dtype) + for i in range(img_c): + cropped_img[:, :, i] += self.mean[i] + y_slice = slice(cropped_center_y - top, cropped_center_y + bottom) + x_slice = slice(cropped_center_x - left, cropped_center_x + right) + cropped_img[y_slice, x_slice, :] = image[y0:y1, x0:x1, :] + + border = np.array([ + cropped_center_y - top, cropped_center_y + bottom, + cropped_center_x - left, cropped_center_x + right + ], + dtype=np.float32) + + return cropped_img, border, patch + + def _train_aug(self, results): + """Random crop and around padding the original image. + + Args: + results (dict): Image infomations in the augment pipeline. + + Returns: + results (dict): The updated dict. + """ + img = results['img'] + h, w, c = img.shape + boxes = results['gt_bboxes'] + while True: + scale = random.choice(self.ratios) + new_h = int(self.crop_size[0] * scale) + new_w = int(self.crop_size[1] * scale) + h_border = self._get_border(self.border, h) + w_border = self._get_border(self.border, w) + + for i in range(50): + center_x = random.randint(low=w_border, high=w - w_border) + center_y = random.randint(low=h_border, high=h - h_border) + + cropped_img, border, patch = self._crop_image_and_paste( + img, [center_y, center_x], [new_h, new_w]) + + mask = self._filter_boxes(patch, boxes) + # if image do not have valid bbox, any crop patch is valid. + if not mask.any() and len(boxes) > 0: + continue + + results['img'] = cropped_img + results['img_shape'] = cropped_img.shape + results['pad_shape'] = cropped_img.shape + + x0, y0, x1, y1 = patch + + left_w, top_h = center_x - x0, center_y - y0 + cropped_center_x, cropped_center_y = new_w // 2, new_h // 2 + + # crop bboxes accordingly and clip to the image boundary + for key in results.get('bbox_fields', []): + mask = self._filter_boxes(patch, results[key]) + bboxes = results[key][mask] + bboxes[:, 0:4:2] += cropped_center_x - left_w - x0 + bboxes[:, 1:4:2] += cropped_center_y - top_h - y0 + if self.bbox_clip_border: + bboxes[:, 0:4:2] = np.clip(bboxes[:, 0:4:2], 0, new_w) + bboxes[:, 1:4:2] = np.clip(bboxes[:, 1:4:2], 0, new_h) + keep = (bboxes[:, 2] > bboxes[:, 0]) & ( + bboxes[:, 3] > bboxes[:, 1]) + bboxes = bboxes[keep] + results[key] = bboxes + if key in ['gt_bboxes']: + if 'gt_labels' in results: + labels = results['gt_labels'][mask] + labels = labels[keep] + results['gt_labels'] = labels + if 'gt_masks' in results: + raise NotImplementedError( + 'RandomCenterCropPad only supports bbox.') + + # crop semantic seg + for key in results.get('seg_fields', []): + raise NotImplementedError( + 'RandomCenterCropPad only supports bbox.') + return results + + def _test_aug(self, results): + """Around padding the original image without cropping. + + The padding mode and value are from ``test_pad_mode``. + + Args: + results (dict): Image infomations in the augment pipeline. + + Returns: + results (dict): The updated dict. + """ + img = results['img'] + h, w, c = img.shape + results['img_shape'] = img.shape + if self.test_pad_mode[0] in ['logical_or']: + # self.test_pad_add_pix is only used for centernet + target_h = (h | self.test_pad_mode[1]) + self.test_pad_add_pix + target_w = (w | self.test_pad_mode[1]) + self.test_pad_add_pix + elif self.test_pad_mode[0] in ['size_divisor']: + divisor = self.test_pad_mode[1] + target_h = int(np.ceil(h / divisor)) * divisor + target_w = int(np.ceil(w / divisor)) * divisor + else: + raise NotImplementedError( + 'RandomCenterCropPad only support two testing pad mode:' + 'logical-or and size_divisor.') + + cropped_img, border, _ = self._crop_image_and_paste( + img, [h // 2, w // 2], [target_h, target_w]) + results['img'] = cropped_img + results['pad_shape'] = cropped_img.shape + results['border'] = border + return results + + def __call__(self, results): + img = results['img'] + assert img.dtype == np.float32, ( + 'RandomCenterCropPad needs the input image of dtype np.float32,' + ' please set "to_float32=True" in "LoadImageFromFile" pipeline') + h, w, c = img.shape + assert c == len(self.mean) + if self.test_mode: + return self._test_aug(results) + else: + return self._train_aug(results) + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(crop_size={self.crop_size}, ' + repr_str += f'ratios={self.ratios}, ' + repr_str += f'border={self.border}, ' + repr_str += f'mean={self.input_mean}, ' + repr_str += f'std={self.input_std}, ' + repr_str += f'to_rgb={self.to_rgb}, ' + repr_str += f'test_mode={self.test_mode}, ' + repr_str += f'test_pad_mode={self.test_pad_mode}, ' + repr_str += f'bbox_clip_border={self.bbox_clip_border})' + return repr_str + + +@PIPELINES.register_module() +class CutOut: + """CutOut operation. + + Randomly drop some regions of image used in + `Cutout `_. + + Args: + n_holes (int | tuple[int, int]): Number of regions to be dropped. + If it is given as a list, number of holes will be randomly + selected from the closed interval [`n_holes[0]`, `n_holes[1]`]. + cutout_shape (tuple[int, int] | list[tuple[int, int]]): The candidate + shape of dropped regions. It can be `tuple[int, int]` to use a + fixed cutout shape, or `list[tuple[int, int]]` to randomly choose + shape from the list. + cutout_ratio (tuple[float, float] | list[tuple[float, float]]): The + candidate ratio of dropped regions. It can be `tuple[float, float]` + to use a fixed ratio or `list[tuple[float, float]]` to randomly + choose ratio from the list. Please note that `cutout_shape` + and `cutout_ratio` cannot be both given at the same time. + fill_in (tuple[float, float, float] | tuple[int, int, int]): The value + of pixel to fill in the dropped regions. Default: (0, 0, 0). + """ + + def __init__(self, + n_holes, + cutout_shape=None, + cutout_ratio=None, + fill_in=(0, 0, 0)): + + assert (cutout_shape is None) ^ (cutout_ratio is None), \ + 'Either cutout_shape or cutout_ratio should be specified.' + assert (isinstance(cutout_shape, (list, tuple)) + or isinstance(cutout_ratio, (list, tuple))) + if isinstance(n_holes, tuple): + assert len(n_holes) == 2 and 0 <= n_holes[0] < n_holes[1] + else: + n_holes = (n_holes, n_holes) + self.n_holes = n_holes + self.fill_in = fill_in + self.with_ratio = cutout_ratio is not None + self.candidates = cutout_ratio if self.with_ratio else cutout_shape + if not isinstance(self.candidates, list): + self.candidates = [self.candidates] + + def __call__(self, results): + """Call function to drop some regions of image.""" + h, w, c = results['img'].shape + n_holes = np.random.randint(self.n_holes[0], self.n_holes[1] + 1) + for _ in range(n_holes): + x1 = np.random.randint(0, w) + y1 = np.random.randint(0, h) + index = np.random.randint(0, len(self.candidates)) + if not self.with_ratio: + cutout_w, cutout_h = self.candidates[index] + else: + cutout_w = int(self.candidates[index][0] * w) + cutout_h = int(self.candidates[index][1] * h) + + x2 = np.clip(x1 + cutout_w, 0, w) + y2 = np.clip(y1 + cutout_h, 0, h) + results['img'][y1:y2, x1:x2, :] = self.fill_in + + return results + + def __repr__(self): + repr_str = self.__class__.__name__ + repr_str += f'(n_holes={self.n_holes}, ' + repr_str += (f'cutout_ratio={self.candidates}, ' if self.with_ratio + else f'cutout_shape={self.candidates}, ') + repr_str += f'fill_in={self.fill_in})' + return repr_str diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/samplers/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/samplers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..2596aeb2ccfc85b58624713c04453d34e94a4062 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/samplers/__init__.py @@ -0,0 +1,4 @@ +from .distributed_sampler import DistributedSampler +from .group_sampler import DistributedGroupSampler, GroupSampler + +__all__ = ['DistributedSampler', 'DistributedGroupSampler', 'GroupSampler'] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/samplers/distributed_sampler.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/samplers/distributed_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..cc61019484655ee2829f7908dc442caa20cf1d54 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/samplers/distributed_sampler.py @@ -0,0 +1,39 @@ +import math + +import torch +from torch.utils.data import DistributedSampler as _DistributedSampler + + +class DistributedSampler(_DistributedSampler): + + def __init__(self, + dataset, + num_replicas=None, + rank=None, + shuffle=True, + seed=0): + super().__init__( + dataset, num_replicas=num_replicas, rank=rank, shuffle=shuffle) + # for the compatibility from PyTorch 1.3+ + self.seed = seed if seed is not None else 0 + + def __iter__(self): + # deterministically shuffle based on epoch + if self.shuffle: + g = torch.Generator() + g.manual_seed(self.epoch + self.seed) + indices = torch.randperm(len(self.dataset), generator=g).tolist() + else: + indices = torch.arange(len(self.dataset)).tolist() + + # add extra samples to make it evenly divisible + # in case that indices is shorter than half of total_size + indices = (indices * + math.ceil(self.total_size / len(indices)))[:self.total_size] + assert len(indices) == self.total_size + + # subsample + indices = indices[self.rank:self.total_size:self.num_replicas] + assert len(indices) == self.num_samples + + return iter(indices) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/samplers/group_sampler.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/samplers/group_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..f88cf3439446a2eb7d8656388ddbe93196315f5b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/samplers/group_sampler.py @@ -0,0 +1,148 @@ +from __future__ import division +import math + +import numpy as np +import torch +from mmcv.runner import get_dist_info +from torch.utils.data import Sampler + + +class GroupSampler(Sampler): + + def __init__(self, dataset, samples_per_gpu=1): + assert hasattr(dataset, 'flag') + self.dataset = dataset + self.samples_per_gpu = samples_per_gpu + self.flag = dataset.flag.astype(np.int64) + self.group_sizes = np.bincount(self.flag) + self.num_samples = 0 + for i, size in enumerate(self.group_sizes): + self.num_samples += int(np.ceil( + size / self.samples_per_gpu)) * self.samples_per_gpu + + def __iter__(self): + indices = [] + for i, size in enumerate(self.group_sizes): + if size == 0: + continue + indice = np.where(self.flag == i)[0] + assert len(indice) == size + np.random.shuffle(indice) + num_extra = int(np.ceil(size / self.samples_per_gpu) + ) * self.samples_per_gpu - len(indice) + indice = np.concatenate( + [indice, np.random.choice(indice, num_extra)]) + indices.append(indice) + indices = np.concatenate(indices) + indices = [ + indices[i * self.samples_per_gpu:(i + 1) * self.samples_per_gpu] + for i in np.random.permutation( + range(len(indices) // self.samples_per_gpu)) + ] + indices = np.concatenate(indices) + indices = indices.astype(np.int64).tolist() + assert len(indices) == self.num_samples + return iter(indices) + + def __len__(self): + return self.num_samples + + +class DistributedGroupSampler(Sampler): + """Sampler that restricts data loading to a subset of the dataset. + + It is especially useful in conjunction with + :class:`torch.nn.parallel.DistributedDataParallel`. In such case, each + process can pass a DistributedSampler instance as a DataLoader sampler, + and load a subset of the original dataset that is exclusive to it. + + .. note:: + Dataset is assumed to be of constant size. + + Arguments: + dataset: Dataset used for sampling. + num_replicas (optional): Number of processes participating in + distributed training. + rank (optional): Rank of the current process within num_replicas. + seed (int, optional): random seed used to shuffle the sampler if + ``shuffle=True``. This number should be identical across all + processes in the distributed group. Default: 0. + """ + + def __init__(self, + dataset, + samples_per_gpu=1, + num_replicas=None, + rank=None, + seed=0): + _rank, _num_replicas = get_dist_info() + if num_replicas is None: + num_replicas = _num_replicas + if rank is None: + rank = _rank + self.dataset = dataset + self.samples_per_gpu = samples_per_gpu + self.num_replicas = num_replicas + self.rank = rank + self.epoch = 0 + self.seed = seed if seed is not None else 0 + + assert hasattr(self.dataset, 'flag') + self.flag = self.dataset.flag + self.group_sizes = np.bincount(self.flag) + + self.num_samples = 0 + for i, j in enumerate(self.group_sizes): + self.num_samples += int( + math.ceil(self.group_sizes[i] * 1.0 / self.samples_per_gpu / + self.num_replicas)) * self.samples_per_gpu + self.total_size = self.num_samples * self.num_replicas + + def __iter__(self): + # deterministically shuffle based on epoch + g = torch.Generator() + g.manual_seed(self.epoch + self.seed) + + indices = [] + for i, size in enumerate(self.group_sizes): + if size > 0: + indice = np.where(self.flag == i)[0] + assert len(indice) == size + # add .numpy() to avoid bug when selecting indice in parrots. + # TODO: check whether torch.randperm() can be replaced by + # numpy.random.permutation(). + indice = indice[list( + torch.randperm(int(size), generator=g).numpy())].tolist() + extra = int( + math.ceil( + size * 1.0 / self.samples_per_gpu / self.num_replicas) + ) * self.samples_per_gpu * self.num_replicas - len(indice) + # pad indice + tmp = indice.copy() + for _ in range(extra // size): + indice.extend(tmp) + indice.extend(tmp[:extra % size]) + indices.extend(indice) + + assert len(indices) == self.total_size + + indices = [ + indices[j] for i in list( + torch.randperm( + len(indices) // self.samples_per_gpu, generator=g)) + for j in range(i * self.samples_per_gpu, (i + 1) * + self.samples_per_gpu) + ] + + # subsample + offset = self.num_samples * self.rank + indices = indices[offset:offset + self.num_samples] + assert len(indices) == self.num_samples + + return iter(indices) + + def __len__(self): + return self.num_samples + + def set_epoch(self, epoch): + self.epoch = epoch diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/utils.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..4eb6423190f517b1d70564b5e87e50b54e949ca1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/utils.py @@ -0,0 +1,163 @@ +import copy +import warnings + +from mmcv.cnn import VGG +from mmcv.runner.hooks import HOOKS, Hook + +from mmdet.datasets.builder import PIPELINES +from mmdet.datasets.pipelines import LoadAnnotations, LoadImageFromFile +from mmdet.models.dense_heads import GARPNHead, RPNHead +from mmdet.models.roi_heads.mask_heads import FusedSemanticHead + + +def replace_ImageToTensor(pipelines): + """Replace the ImageToTensor transform in a data pipeline to + DefaultFormatBundle, which is normally useful in batch inference. + + Args: + pipelines (list[dict]): Data pipeline configs. + + Returns: + list: The new pipeline list with all ImageToTensor replaced by + DefaultFormatBundle. + + Examples: + >>> pipelines = [ + ... dict(type='LoadImageFromFile'), + ... dict( + ... type='MultiScaleFlipAug', + ... img_scale=(1333, 800), + ... flip=False, + ... transforms=[ + ... dict(type='Resize', keep_ratio=True), + ... dict(type='RandomFlip'), + ... dict(type='Normalize', mean=[0, 0, 0], std=[1, 1, 1]), + ... dict(type='Pad', size_divisor=32), + ... dict(type='ImageToTensor', keys=['img']), + ... dict(type='Collect', keys=['img']), + ... ]) + ... ] + >>> expected_pipelines = [ + ... dict(type='LoadImageFromFile'), + ... dict( + ... type='MultiScaleFlipAug', + ... img_scale=(1333, 800), + ... flip=False, + ... transforms=[ + ... dict(type='Resize', keep_ratio=True), + ... dict(type='RandomFlip'), + ... dict(type='Normalize', mean=[0, 0, 0], std=[1, 1, 1]), + ... dict(type='Pad', size_divisor=32), + ... dict(type='DefaultFormatBundle'), + ... dict(type='Collect', keys=['img']), + ... ]) + ... ] + >>> assert expected_pipelines == replace_ImageToTensor(pipelines) + """ + pipelines = copy.deepcopy(pipelines) + for i, pipeline in enumerate(pipelines): + if pipeline['type'] == 'MultiScaleFlipAug': + assert 'transforms' in pipeline + pipeline['transforms'] = replace_ImageToTensor( + pipeline['transforms']) + elif pipeline['type'] == 'ImageToTensor': + warnings.warn( + '"ImageToTensor" pipeline is replaced by ' + '"DefaultFormatBundle" for batch inference. It is ' + 'recommended to manually replace it in the test ' + 'data pipeline in your config file.', UserWarning) + pipelines[i] = {'type': 'DefaultFormatBundle'} + return pipelines + + +def get_loading_pipeline(pipeline): + """Only keep loading image and annotations related configuration. + + Args: + pipeline (list[dict]): Data pipeline configs. + + Returns: + list[dict]: The new pipeline list with only keep + loading image and annotations related configuration. + + Examples: + >>> pipelines = [ + ... dict(type='LoadImageFromFile'), + ... dict(type='LoadAnnotations', with_bbox=True), + ... dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + ... dict(type='RandomFlip', flip_ratio=0.5), + ... dict(type='Normalize', **img_norm_cfg), + ... dict(type='Pad', size_divisor=32), + ... dict(type='DefaultFormatBundle'), + ... dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) + ... ] + >>> expected_pipelines = [ + ... dict(type='LoadImageFromFile'), + ... dict(type='LoadAnnotations', with_bbox=True) + ... ] + >>> assert expected_pipelines ==\ + ... get_loading_pipeline(pipelines) + """ + loading_pipeline_cfg = [] + for cfg in pipeline: + obj_cls = PIPELINES.get(cfg['type']) + # TODO:use more elegant way to distinguish loading modules + if obj_cls is not None and obj_cls in (LoadImageFromFile, + LoadAnnotations): + loading_pipeline_cfg.append(cfg) + assert len(loading_pipeline_cfg) == 2, \ + 'The data pipeline in your config file must include ' \ + 'loading image and annotations related pipeline.' + return loading_pipeline_cfg + + +@HOOKS.register_module() +class NumClassCheckHook(Hook): + + def _check_head(self, runner): + """Check whether the `num_classes` in head matches the length of + `CLASSSES` in `dataset`. + + Args: + runner (obj:`EpochBasedRunner`): Epoch based Runner. + """ + model = runner.model + dataset = runner.data_loader.dataset + if dataset.CLASSES is None: + runner.logger.warning( + f'Please set `CLASSES` ' + f'in the {dataset.__class__.__name__} and' + f'check if it is consistent with the `num_classes` ' + f'of head') + else: + assert type(dataset.CLASSES) is not str, \ + (f'`CLASSES` in {dataset.__class__.__name__}' + f'should be a tuple of str.' + f'Add comma if number of classes is 1 as ' + f'CLASSES = ({dataset.CLASSES},)') + for name, module in model.named_modules(): + if hasattr(module, 'num_classes') and not isinstance( + module, (RPNHead, VGG, FusedSemanticHead, GARPNHead)): + assert module.num_classes == len(dataset.CLASSES), \ + (f'The `num_classes` ({module.num_classes}) in ' + f'{module.__class__.__name__} of ' + f'{model.__class__.__name__} does not matches ' + f'the length of `CLASSES` ' + f'{len(dataset.CLASSES)}) in ' + f'{dataset.__class__.__name__}') + + def before_train_epoch(self, runner): + """Check whether the training dataset is compatible with head. + + Args: + runner (obj:`EpochBasedRunner`): Epoch based Runner. + """ + self._check_head(runner) + + def before_val_epoch(self, runner): + """Check whether the dataset in val epoch is compatible with head. + + Args: + runner (obj:`EpochBasedRunner`): Epoch based Runner. + """ + self._check_head(runner) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/voc.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/voc.py new file mode 100644 index 0000000000000000000000000000000000000000..5a74695567e358e52a27e6bcfb8c7e70a27cf092 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/voc.py @@ -0,0 +1,93 @@ +from collections import OrderedDict + +from mmcv.utils import print_log + +from mmdet.core import eval_map, eval_recalls +from .builder import DATASETS +from .xml_style import XMLDataset + + +@DATASETS.register_module() +class VOCDataset(XMLDataset): + + CLASSES = ('aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', + 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', + 'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train', + 'tvmonitor') + + def __init__(self, **kwargs): + super(VOCDataset, self).__init__(**kwargs) + if 'VOC2007' in self.img_prefix: + self.year = 2007 + elif 'VOC2012' in self.img_prefix: + self.year = 2012 + else: + raise ValueError('Cannot infer dataset year from img_prefix') + + def evaluate(self, + results, + metric='mAP', + logger=None, + proposal_nums=(100, 300, 1000), + iou_thr=0.5, + scale_ranges=None): + """Evaluate in VOC protocol. + + Args: + results (list[list | tuple]): Testing results of the dataset. + metric (str | list[str]): Metrics to be evaluated. Options are + 'mAP', 'recall'. + logger (logging.Logger | str, optional): Logger used for printing + related information during evaluation. Default: None. + proposal_nums (Sequence[int]): Proposal number used for evaluating + recalls, such as recall@100, recall@1000. + Default: (100, 300, 1000). + iou_thr (float | list[float]): IoU threshold. Default: 0.5. + scale_ranges (list[tuple], optional): Scale ranges for evaluating + mAP. If not specified, all bounding boxes would be included in + evaluation. Default: None. + + Returns: + dict[str, float]: AP/recall metrics. + """ + + if not isinstance(metric, str): + assert len(metric) == 1 + metric = metric[0] + allowed_metrics = ['mAP', 'recall'] + if metric not in allowed_metrics: + raise KeyError(f'metric {metric} is not supported') + annotations = [self.get_ann_info(i) for i in range(len(self))] + eval_results = OrderedDict() + iou_thrs = [iou_thr] if isinstance(iou_thr, float) else iou_thr + if metric == 'mAP': + assert isinstance(iou_thrs, list) + if self.year == 2007: + ds_name = 'voc07' + else: + ds_name = self.CLASSES + mean_aps = [] + for iou_thr in iou_thrs: + print_log(f'\n{"-" * 15}iou_thr: {iou_thr}{"-" * 15}') + mean_ap, _ = eval_map( + results, + annotations, + scale_ranges=None, + iou_thr=iou_thr, + dataset=ds_name, + logger=logger) + mean_aps.append(mean_ap) + eval_results[f'AP{int(iou_thr * 100):02d}'] = round(mean_ap, 3) + eval_results['mAP'] = sum(mean_aps) / len(mean_aps) + elif metric == 'recall': + gt_bboxes = [ann['bboxes'] for ann in annotations] + recalls = eval_recalls( + gt_bboxes, results, proposal_nums, iou_thrs, logger=logger) + for i, num in enumerate(proposal_nums): + for j, iou_thr in enumerate(iou_thrs): + eval_results[f'recall@{num}@{iou_thr}'] = recalls[i, j] + if recalls.shape[1] > 1: + ar = recalls.mean(axis=1) + for i, num in enumerate(proposal_nums): + eval_results[f'AR@{num}'] = ar[i] + return eval_results diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/wider_face.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/wider_face.py new file mode 100644 index 0000000000000000000000000000000000000000..3a13907db87a9986a7d701837259a0b712fc9dca --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/wider_face.py @@ -0,0 +1,51 @@ +import os.path as osp +import xml.etree.ElementTree as ET + +import mmcv + +from .builder import DATASETS +from .xml_style import XMLDataset + + +@DATASETS.register_module() +class WIDERFaceDataset(XMLDataset): + """Reader for the WIDER Face dataset in PASCAL VOC format. + + Conversion scripts can be found in + https://github.com/sovrasov/wider-face-pascal-voc-annotations + """ + CLASSES = ('face', ) + + def __init__(self, **kwargs): + super(WIDERFaceDataset, self).__init__(**kwargs) + + def load_annotations(self, ann_file): + """Load annotation from WIDERFace XML style annotation file. + + Args: + ann_file (str): Path of XML file. + + Returns: + list[dict]: Annotation info from XML file. + """ + + data_infos = [] + img_ids = mmcv.list_from_file(ann_file) + for img_id in img_ids: + filename = f'{img_id}.jpg' + xml_path = osp.join(self.img_prefix, 'Annotations', + f'{img_id}.xml') + tree = ET.parse(xml_path) + root = tree.getroot() + size = root.find('size') + width = int(size.find('width').text) + height = int(size.find('height').text) + folder = root.find('folder').text + data_infos.append( + dict( + id=img_id, + filename=osp.join(folder, filename), + width=width, + height=height)) + + return data_infos diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/xml_style.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/xml_style.py new file mode 100644 index 0000000000000000000000000000000000000000..71069488b0f6da3b37e588228f44460ce5f00679 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/datasets/xml_style.py @@ -0,0 +1,170 @@ +import os.path as osp +import xml.etree.ElementTree as ET + +import mmcv +import numpy as np +from PIL import Image + +from .builder import DATASETS +from .custom import CustomDataset + + +@DATASETS.register_module() +class XMLDataset(CustomDataset): + """XML dataset for detection. + + Args: + min_size (int | float, optional): The minimum size of bounding + boxes in the images. If the size of a bounding box is less than + ``min_size``, it would be add to ignored field. + """ + + def __init__(self, min_size=None, **kwargs): + assert self.CLASSES or kwargs.get( + 'classes', None), 'CLASSES in `XMLDataset` can not be None.' + super(XMLDataset, self).__init__(**kwargs) + self.cat2label = {cat: i for i, cat in enumerate(self.CLASSES)} + self.min_size = min_size + + def load_annotations(self, ann_file): + """Load annotation from XML style ann_file. + + Args: + ann_file (str): Path of XML file. + + Returns: + list[dict]: Annotation info from XML file. + """ + + data_infos = [] + img_ids = mmcv.list_from_file(ann_file) + for img_id in img_ids: + filename = f'JPEGImages/{img_id}.jpg' + xml_path = osp.join(self.img_prefix, 'Annotations', + f'{img_id}.xml') + tree = ET.parse(xml_path) + root = tree.getroot() + size = root.find('size') + if size is not None: + width = int(size.find('width').text) + height = int(size.find('height').text) + else: + img_path = osp.join(self.img_prefix, 'JPEGImages', + '{}.jpg'.format(img_id)) + img = Image.open(img_path) + width, height = img.size + data_infos.append( + dict(id=img_id, filename=filename, width=width, height=height)) + + return data_infos + + def _filter_imgs(self, min_size=32): + """Filter images too small or without annotation.""" + valid_inds = [] + for i, img_info in enumerate(self.data_infos): + if min(img_info['width'], img_info['height']) < min_size: + continue + if self.filter_empty_gt: + img_id = img_info['id'] + xml_path = osp.join(self.img_prefix, 'Annotations', + f'{img_id}.xml') + tree = ET.parse(xml_path) + root = tree.getroot() + for obj in root.findall('object'): + name = obj.find('name').text + if name in self.CLASSES: + valid_inds.append(i) + break + else: + valid_inds.append(i) + return valid_inds + + def get_ann_info(self, idx): + """Get annotation from XML file by index. + + Args: + idx (int): Index of data. + + Returns: + dict: Annotation info of specified index. + """ + + img_id = self.data_infos[idx]['id'] + xml_path = osp.join(self.img_prefix, 'Annotations', f'{img_id}.xml') + tree = ET.parse(xml_path) + root = tree.getroot() + bboxes = [] + labels = [] + bboxes_ignore = [] + labels_ignore = [] + for obj in root.findall('object'): + name = obj.find('name').text + if name not in self.CLASSES: + continue + label = self.cat2label[name] + difficult = obj.find('difficult') + difficult = 0 if difficult is None else int(difficult.text) + bnd_box = obj.find('bndbox') + # TODO: check whether it is necessary to use int + # Coordinates may be float type + bbox = [ + int(float(bnd_box.find('xmin').text)), + int(float(bnd_box.find('ymin').text)), + int(float(bnd_box.find('xmax').text)), + int(float(bnd_box.find('ymax').text)) + ] + ignore = False + if self.min_size: + assert not self.test_mode + w = bbox[2] - bbox[0] + h = bbox[3] - bbox[1] + if w < self.min_size or h < self.min_size: + ignore = True + if difficult or ignore: + bboxes_ignore.append(bbox) + labels_ignore.append(label) + else: + bboxes.append(bbox) + labels.append(label) + if not bboxes: + bboxes = np.zeros((0, 4)) + labels = np.zeros((0, )) + else: + bboxes = np.array(bboxes, ndmin=2) - 1 + labels = np.array(labels) + if not bboxes_ignore: + bboxes_ignore = np.zeros((0, 4)) + labels_ignore = np.zeros((0, )) + else: + bboxes_ignore = np.array(bboxes_ignore, ndmin=2) - 1 + labels_ignore = np.array(labels_ignore) + ann = dict( + bboxes=bboxes.astype(np.float32), + labels=labels.astype(np.int64), + bboxes_ignore=bboxes_ignore.astype(np.float32), + labels_ignore=labels_ignore.astype(np.int64)) + return ann + + def get_cat_ids(self, idx): + """Get category ids in XML file by index. + + Args: + idx (int): Index of data. + + Returns: + list[int]: All categories in the image of specified index. + """ + + cat_ids = [] + img_id = self.data_infos[idx]['id'] + xml_path = osp.join(self.img_prefix, 'Annotations', f'{img_id}.xml') + tree = ET.parse(xml_path) + root = tree.getroot() + for obj in root.findall('object'): + name = obj.find('name').text + if name not in self.CLASSES: + continue + label = self.cat2label[name] + cat_ids.append(label) + + return cat_ids diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..44ac99855ae52101c91be167fa78d8219fc47259 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/__init__.py @@ -0,0 +1,16 @@ +from .backbones import * # noqa: F401,F403 +from .builder import (BACKBONES, DETECTORS, HEADS, LOSSES, NECKS, + ROI_EXTRACTORS, SHARED_HEADS, build_backbone, + build_detector, build_head, build_loss, build_neck, + build_roi_extractor, build_shared_head) +from .dense_heads import * # noqa: F401,F403 +from .detectors import * # noqa: F401,F403 +from .losses import * # noqa: F401,F403 +from .necks import * # noqa: F401,F403 +from .roi_heads import * # noqa: F401,F403 + +__all__ = [ + 'BACKBONES', 'NECKS', 'ROI_EXTRACTORS', 'SHARED_HEADS', 'HEADS', 'LOSSES', + 'DETECTORS', 'build_backbone', 'build_neck', 'build_roi_extractor', + 'build_shared_head', 'build_head', 'build_loss', 'build_detector' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a28cedc8835e2eca07694701cf5665f6df98ff74 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/__init__.py @@ -0,0 +1,21 @@ +from .darknet import Darknet +from .detectors_resnet import DetectoRS_ResNet +from .detectors_resnext import DetectoRS_ResNeXt +from .hourglass import HourglassNet +from .hrnet import HRNet +from .mobilenet_v2 import MobileNetV2 +from .regnet import RegNet +from .res2net import Res2Net +from .resnest import ResNeSt +from .resnet import ResNet, ResNetV1d +from .resnext import ResNeXt +from .ssd_vgg import SSDVGG +from .trident_resnet import TridentResNet +from .swin_transformer import SwinTransformer +from .cbnet import CBResNet, CBRes2Net, CBSwinTransformer + +__all__ = [ + 'RegNet', 'ResNet', 'ResNetV1d', 'ResNeXt', 'SSDVGG', 'HRNet', 'Res2Net', + 'HourglassNet', 'DetectoRS_ResNet', 'DetectoRS_ResNeXt', 'Darknet', + 'ResNeSt', 'TridentResNet', 'SwinTransformer', 'CBResNet', 'CBRes2Net', 'CBSwinTransformer' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/cbnet.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/cbnet.py new file mode 100644 index 0000000000000000000000000000000000000000..51041a8bb9288a5bae2c31a36c0bef907ed7df00 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/cbnet.py @@ -0,0 +1,383 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +from mmcv.cnn import constant_init +from mmdet.utils import get_root_logger +from ..builder import BACKBONES +from .resnet import ResNet, build_norm_layer, _BatchNorm +from .res2net import Res2Net +from .swin_transformer import SwinTransformer + +from mmcv.runner import BaseModule +''' +For CNN +''' +class _CBSubnet(BaseModule): + def _freeze_stages(self): + if self.frozen_stages >= 0: + if self.deep_stem and hasattr(self, 'stem'): + self.stem.eval() + for param in self.stem.parameters(): + param.requires_grad = False + elif hasattr(self, 'conv1'): + self.norm1.eval() + for m in [self.conv1, self.norm1]: + for param in m.parameters(): + param.requires_grad = False + + for i in range(1, self.frozen_stages + 1): + if not hasattr(self, f'layer{i}'): + continue + m = getattr(self, f'layer{i}') + m.eval() + for param in m.parameters(): + param.requires_grad = False + + def del_layers(self, del_stages): + self.del_stages = del_stages + if self.del_stages>=0: + if self.deep_stem: + del self.stem + else: + del self.conv1 + + for i in range(1, self.del_stages+1): + delattr(self, f'layer{i}') + + def forward(self, x, cb_feats=None, pre_outs=None): + """Forward function.""" + spatial_info = [] + outs = [] + + if self.deep_stem and hasattr(self, 'stem'): + x = self.stem(x) + x = self.maxpool(x) + elif hasattr(self, 'conv1'): + x = self.conv1(x) + x = self.norm1(x) + x = self.relu(x) + x = self.maxpool(x) + else: + x = pre_outs[0] + outs.append(x) + + for i, layer_name in enumerate(self.res_layers): + if hasattr(self, layer_name): + res_layer = getattr(self, layer_name) + spatial_info.append(x.shape[2:]) + if cb_feats is not None: + x = x + cb_feats[i] + x = res_layer(x) + else: + x = pre_outs[i+1] + outs.append(x) + return tuple(outs), spatial_info + + def train(self, mode=True): + """Convert the model into training mode while keep layers freezed.""" + super().train(mode) + self._freeze_stages() + +class _ResNet(_CBSubnet, ResNet): + def __init__(self, **kwargs): + _CBSubnet.__init__(self) + ResNet.__init__(self, **kwargs) + +class _Res2Net(_CBSubnet, Res2Net): + def __init__(self, **kwargs): + _CBSubnet.__init__(self) + Res2Net.__init__(self, **kwargs) + +class _CBNet(BaseModule): + def _freeze_stages(self): + for m in self.cb_modules: + m._freeze_stages() + + def init_cb_weights(self): + raise NotImplementedError + + def init_weights(self): + self.init_cb_weights() + for m in self.cb_modules: + m.init_weights() + + def _get_cb_feats(self, feats, spatial_info): + raise NotImplementedError + + def forward(self, x): + outs_list = [] + for i, module in enumerate(self.cb_modules): + if i == 0: + pre_outs, spatial_info = module(x) + else: + pre_outs, spatial_info = module(x, cb_feats, pre_outs) + + outs = [pre_outs[i+1] for i in self.out_indices] + outs_list.append(tuple(outs)) + + if i < len(self.cb_modules)-1: + cb_feats = self._get_cb_feats(pre_outs, spatial_info) + return tuple(outs_list) + + def train(self, mode=True): + """Convert the model into training mode while keep layers freezed.""" + super().train(mode) + for m in self.cb_modules: + m.train(mode=mode) + self._freeze_stages() + for m in self.cb_linears.modules(): + # trick: eval have effect on BatchNorm only + if isinstance(m, _BatchNorm): + m.eval() + +class _CBResNet(_CBNet): + def __init__(self, net, cb_inplanes, cb_zero_init=True, cb_del_stages=0, **kwargs): + super(_CBResNet, self).__init__() + self.cb_zero_init = cb_zero_init + self.cb_del_stages = cb_del_stages + + self.cb_modules = nn.ModuleList() + for cb_idx in range(2): + cb_module = net(**kwargs) + if cb_idx > 0: + cb_module.del_layers(self.cb_del_stages) + self.cb_modules.append(cb_module) + self.out_indices = self.cb_modules[0].out_indices + + self.cb_linears = nn.ModuleList() + self.num_layers = len(self.cb_modules[0].stage_blocks) + norm_cfg = self.cb_modules[0].norm_cfg + for i in range(self.num_layers): + linears = nn.ModuleList() + if i >= self.cb_del_stages: + jrange = 4 - i + for j in range(jrange): + linears.append( + nn.Sequential( + nn.Conv2d(cb_inplanes[i + j + 1], cb_inplanes[i], 1, bias=False), + build_norm_layer(norm_cfg, cb_inplanes[i])[1] + ) + ) + + self.cb_linears.append(linears) + + def init_cb_weights(self): + if self.cb_zero_init: + for ls in self.cb_linears: + for m in ls: + if isinstance(m, nn.Sequential): + constant_init(m[-1], 0) + else: + constant_init(m, 0) + + def _get_cb_feats(self, feats, spatial_info): + cb_feats = [] + for i in range(self.num_layers): + if i >= self.cb_del_stages: + h, w = spatial_info[i] + feeds = [] + jrange = 4 - i + for j in range(jrange): + tmp = self.cb_linears[i][j](feats[j + i + 1]) + tmp = F.interpolate(tmp, size=(h, w), mode='nearest') + feeds.append(tmp) + feed = torch.sum(torch.stack(feeds,dim=-1), dim=-1) + else: + feed = 0 + cb_feats.append(feed) + + return cb_feats + + +@BACKBONES.register_module() +class CBResNet(_CBResNet): + def __init__(self, **kwargs): + super().__init__(net=_ResNet, **kwargs) + +@BACKBONES.register_module() +class CBRes2Net(_CBResNet): + def __init__(self, **kwargs): + super().__init__(net=_Res2Net, **kwargs) + + +''' +For Swin Transformer +''' +class _SwinTransformer(SwinTransformer): + def _freeze_stages(self): + if self.frozen_stages >= 0 and hasattr(self, 'patch_embed'): + self.patch_embed.eval() + for param in self.patch_embed.parameters(): + param.requires_grad = False + + if self.frozen_stages >= 1 and self.ape: + self.absolute_pos_embed.requires_grad = False + + if self.frozen_stages >= 2: + self.pos_drop.eval() + for i in range(0, self.frozen_stages - 1): + m = self.layers[i] + if m is None: + continue + m.eval() + for param in m.parameters(): + param.requires_grad = False + + def del_layers(self, del_stages): + self.del_stages = del_stages + if self.del_stages>=0: + del self.patch_embed + + if self.del_stages >=1 and self.ape: + del self.absolute_pos_embed + + for i in range(0, self.del_stages - 1): + self.layers[i] = None + + def forward(self, x, cb_feats=None, pre_tmps=None): + """Forward function.""" + outs = [] + tmps = [] + if hasattr(self, 'patch_embed'): + x = self.patch_embed(x) + + Wh, Ww = x.size(2), x.size(3) + if self.ape: + # interpolate the position embedding to the corresponding size + absolute_pos_embed = F.interpolate( + self.absolute_pos_embed, size=(Wh, Ww), mode='bicubic') + x = (x + absolute_pos_embed).flatten(2).transpose(1, 2) # B Wh*Ww C + else: + x = x.flatten(2).transpose(1, 2) + x = self.pos_drop(x) + + tmps.append((x, Wh, Ww)) + else: + x, Wh, Ww = pre_tmps[0] + + for i in range(self.num_layers): + layer = self.layers[i] + if layer is None: + x_out, H, W, x, Wh, Ww = pre_tmps[i+1] + else: + if cb_feats is not None: + x = x + cb_feats[i] + x_out, H, W, x, Wh, Ww = layer(x, Wh, Ww) + tmps.append((x_out, H, W, x, Wh, Ww)) + + if i in self.out_indices: + norm_layer = getattr(self, f'norm{i}') + x_out = norm_layer(x_out) + + out = x_out.view(-1, H, W, + self.num_features[i]).permute(0, 3, 1, 2).contiguous() + outs.append(out) + + return tuple(outs), tmps + + def train(self, mode=True): + """Convert the model into training mode while keep layers freezed.""" + super(_SwinTransformer, self).train(mode) + self._freeze_stages() + + +@BACKBONES.register_module() +class CBSwinTransformer(BaseModule): + def __init__(self, embed_dim=96, cb_zero_init=True, cb_del_stages=1, **kwargs): + super(CBSwinTransformer, self).__init__() + self.cb_zero_init = cb_zero_init + self.cb_del_stages = cb_del_stages + self.cb_modules = nn.ModuleList() + for cb_idx in range(2): + cb_module = _SwinTransformer(embed_dim=embed_dim, **kwargs) + if cb_idx > 0: + cb_module.del_layers(cb_del_stages) + self.cb_modules.append(cb_module) + + self.num_layers = self.cb_modules[0].num_layers + + cb_inplanes = [embed_dim * 2 ** i for i in range(self.num_layers)] + + self.cb_linears = nn.ModuleList() + for i in range(self.num_layers): + linears = nn.ModuleList() + if i >= self.cb_del_stages-1: + jrange = 4 - i + for j in range(jrange): + if cb_inplanes[i + j] != cb_inplanes[i]: + layer = nn.Conv2d(cb_inplanes[i + j], cb_inplanes[i], 1) + else: + layer = nn.Identity() + linears.append(layer) + self.cb_linears.append(linears) + + def _freeze_stages(self): + for m in self.cb_modules: + m._freeze_stages() + + def init_weights(self): + """Initialize the weights in backbone. + + Args: + pretrained (str, optional): Path to pre-trained weights. + Defaults to None. + """ + # constant_init(self.cb_linears, 0) + if self.cb_zero_init: + for ls in self.cb_linears: + for m in ls: + constant_init(m, 0) + + for m in self.cb_modules: + m.init_weights() + + def spatial_interpolate(self, x, H, W): + B, C = x.shape[:2] + if H != x.shape[2] or W != x.shape[3]: + # B, C, size[0], size[1] + x = F.interpolate(x, size=(H, W), mode='nearest') + x = x.view(B, C, -1).permute(0, 2, 1).contiguous() # B, T, C + return x + + def _get_cb_feats(self, feats, tmps): + cb_feats = [] + Wh, Ww = tmps[0][-2:] + for i in range(self.num_layers): + feed = 0 + if i >= self.cb_del_stages-1: + jrange = 4 - i + for j in range(jrange): + tmp = self.cb_linears[i][j](feats[j + i]) + tmp = self.spatial_interpolate(tmp, Wh, Ww) + feed += tmp + cb_feats.append(feed) + Wh, Ww = tmps[i+1][-2:] + + return cb_feats + + def forward(self, x): + outs = [] + for i, module in enumerate(self.cb_modules): + if i == 0: + feats, tmps = module(x) + else: + feats, tmps = module(x, cb_feats, tmps) + + outs.append(feats) + + if i < len(self.cb_modules)-1: + cb_feats = self._get_cb_feats(outs[-1], tmps) + return tuple(outs) + + def train(self, mode=True): + """Convert the model into training mode while keep layers freezed.""" + super(CBSwinTransformer, self).train(mode) + for m in self.cb_modules: + m.train(mode=mode) + self._freeze_stages() + for m in self.cb_linears.modules(): + # trick: eval have effect on BatchNorm only + if isinstance(m, _BatchNorm): + m.eval() + diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/darknet.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/darknet.py new file mode 100644 index 0000000000000000000000000000000000000000..9e07ba08e5f9ae4f3e04ee0948ed96a5ecd5483e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/darknet.py @@ -0,0 +1,212 @@ +# Copyright (c) 2019 Western Digital Corporation or its affiliates. + +import warnings + +import torch.nn as nn +from mmcv.cnn import ConvModule +from mmcv.runner import BaseModule +from torch.nn.modules.batchnorm import _BatchNorm + +from ..builder import BACKBONES + + +class ResBlock(BaseModule): + """The basic residual block used in Darknet. Each ResBlock consists of two + ConvModules and the input is added to the final output. Each ConvModule is + composed of Conv, BN, and LeakyReLU. In YoloV3 paper, the first convLayer + has half of the number of the filters as much as the second convLayer. The + first convLayer has filter size of 1x1 and the second one has the filter + size of 3x3. + + Args: + in_channels (int): The input channels. Must be even. + conv_cfg (dict): Config dict for convolution layer. Default: None. + norm_cfg (dict): Dictionary to construct and config norm layer. + Default: dict(type='BN', requires_grad=True) + act_cfg (dict): Config dict for activation layer. + Default: dict(type='LeakyReLU', negative_slope=0.1). + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + in_channels, + conv_cfg=None, + norm_cfg=dict(type='BN', requires_grad=True), + act_cfg=dict(type='LeakyReLU', negative_slope=0.1), + init_cfg=None): + super(ResBlock, self).__init__(init_cfg) + assert in_channels % 2 == 0 # ensure the in_channels is even + half_in_channels = in_channels // 2 + + # shortcut + cfg = dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg, act_cfg=act_cfg) + + self.conv1 = ConvModule(in_channels, half_in_channels, 1, **cfg) + self.conv2 = ConvModule( + half_in_channels, in_channels, 3, padding=1, **cfg) + + def forward(self, x): + residual = x + out = self.conv1(x) + out = self.conv2(out) + out = out + residual + + return out + + +@BACKBONES.register_module() +class Darknet(BaseModule): + """Darknet backbone. + + Args: + depth (int): Depth of Darknet. Currently only support 53. + out_indices (Sequence[int]): Output from which stages. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. Default: -1. + conv_cfg (dict): Config dict for convolution layer. Default: None. + norm_cfg (dict): Dictionary to construct and config norm layer. + Default: dict(type='BN', requires_grad=True) + act_cfg (dict): Config dict for activation layer. + Default: dict(type='LeakyReLU', negative_slope=0.1). + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. + pretrained (str, optional): model pretrained path. Default: None + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + + Example: + >>> from mmdet.models import Darknet + >>> import torch + >>> self = Darknet(depth=53) + >>> self.eval() + >>> inputs = torch.rand(1, 3, 416, 416) + >>> level_outputs = self.forward(inputs) + >>> for level_out in level_outputs: + ... print(tuple(level_out.shape)) + ... + (1, 256, 52, 52) + (1, 512, 26, 26) + (1, 1024, 13, 13) + """ + + # Dict(depth: (layers, channels)) + arch_settings = { + 53: ((1, 2, 8, 8, 4), ((32, 64), (64, 128), (128, 256), (256, 512), + (512, 1024))) + } + + def __init__(self, + depth=53, + out_indices=(3, 4, 5), + frozen_stages=-1, + conv_cfg=None, + norm_cfg=dict(type='BN', requires_grad=True), + act_cfg=dict(type='LeakyReLU', negative_slope=0.1), + norm_eval=True, + pretrained=None, + init_cfg=None): + super(Darknet, self).__init__(init_cfg) + if depth not in self.arch_settings: + raise KeyError(f'invalid depth {depth} for darknet') + + self.depth = depth + self.out_indices = out_indices + self.frozen_stages = frozen_stages + self.layers, self.channels = self.arch_settings[depth] + + cfg = dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg, act_cfg=act_cfg) + + self.conv1 = ConvModule(3, 32, 3, padding=1, **cfg) + + self.cr_blocks = ['conv1'] + for i, n_layers in enumerate(self.layers): + layer_name = f'conv_res_block{i + 1}' + in_c, out_c = self.channels[i] + self.add_module( + layer_name, + self.make_conv_res_block(in_c, out_c, n_layers, **cfg)) + self.cr_blocks.append(layer_name) + + self.norm_eval = norm_eval + + assert not (init_cfg and pretrained), \ + 'init_cfg and pretrained cannot be setting at the same time' + if isinstance(pretrained, str): + warnings.warn('DeprecationWarning: pretrained is deprecated, ' + 'please use "init_cfg" instead') + self.init_cfg = dict(type='Pretrained', checkpoint=pretrained) + elif pretrained is None: + if init_cfg is None: + self.init_cfg = [ + dict(type='Kaiming', layer='Conv2d'), + dict( + type='Constant', + val=1, + layer=['_BatchNorm', 'GroupNorm']) + ] + else: + raise TypeError('pretrained must be a str or None') + + def forward(self, x): + outs = [] + for i, layer_name in enumerate(self.cr_blocks): + cr_block = getattr(self, layer_name) + x = cr_block(x) + if i in self.out_indices: + outs.append(x) + + return tuple(outs) + + def _freeze_stages(self): + if self.frozen_stages >= 0: + for i in range(self.frozen_stages): + m = getattr(self, self.cr_blocks[i]) + m.eval() + for param in m.parameters(): + param.requires_grad = False + + def train(self, mode=True): + super(Darknet, self).train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + if isinstance(m, _BatchNorm): + m.eval() + + @staticmethod + def make_conv_res_block(in_channels, + out_channels, + res_repeat, + conv_cfg=None, + norm_cfg=dict(type='BN', requires_grad=True), + act_cfg=dict(type='LeakyReLU', + negative_slope=0.1)): + """In Darknet backbone, ConvLayer is usually followed by ResBlock. This + function will make that. The Conv layers always have 3x3 filters with + stride=2. The number of the filters in Conv layer is the same as the + out channels of the ResBlock. + + Args: + in_channels (int): The number of input channels. + out_channels (int): The number of output channels. + res_repeat (int): The number of ResBlocks. + conv_cfg (dict): Config dict for convolution layer. Default: None. + norm_cfg (dict): Dictionary to construct and config norm layer. + Default: dict(type='BN', requires_grad=True) + act_cfg (dict): Config dict for activation layer. + Default: dict(type='LeakyReLU', negative_slope=0.1). + """ + + cfg = dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg, act_cfg=act_cfg) + + model = nn.Sequential() + model.add_module( + 'conv', + ConvModule( + in_channels, out_channels, 3, stride=2, padding=1, **cfg)) + for idx in range(res_repeat): + model.add_module('res{}'.format(idx), + ResBlock(out_channels, **cfg)) + return model diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/detectors_resnet.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/detectors_resnet.py new file mode 100644 index 0000000000000000000000000000000000000000..c7ec491d8d96ac70fbfd610efbee0008ea626519 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/detectors_resnet.py @@ -0,0 +1,352 @@ +import torch.nn as nn +import torch.utils.checkpoint as cp +from mmcv.cnn import (build_conv_layer, build_norm_layer, constant_init, + kaiming_init) +from mmcv.runner import Sequential, load_checkpoint +from torch.nn.modules.batchnorm import _BatchNorm + +from mmdet.utils import get_root_logger +from ..builder import BACKBONES +from .resnet import BasicBlock +from .resnet import Bottleneck as _Bottleneck +from .resnet import ResNet + + +class Bottleneck(_Bottleneck): + r"""Bottleneck for the ResNet backbone in `DetectoRS + `_. + + This bottleneck allows the users to specify whether to use + SAC (Switchable Atrous Convolution) and RFP (Recursive Feature Pyramid). + + Args: + inplanes (int): The number of input channels. + planes (int): The number of output channels before expansion. + rfp_inplanes (int, optional): The number of channels from RFP. + Default: None. If specified, an additional conv layer will be + added for ``rfp_feat``. Otherwise, the structure is the same as + base class. + sac (dict, optional): Dictionary to construct SAC. Default: None. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + expansion = 4 + + def __init__(self, + inplanes, + planes, + rfp_inplanes=None, + sac=None, + init_cfg=None, + **kwargs): + super(Bottleneck, self).__init__( + inplanes, planes, init_cfg=init_cfg, **kwargs) + + assert sac is None or isinstance(sac, dict) + self.sac = sac + self.with_sac = sac is not None + if self.with_sac: + self.conv2 = build_conv_layer( + self.sac, + planes, + planes, + kernel_size=3, + stride=self.conv2_stride, + padding=self.dilation, + dilation=self.dilation, + bias=False) + + self.rfp_inplanes = rfp_inplanes + if self.rfp_inplanes: + self.rfp_conv = build_conv_layer( + None, + self.rfp_inplanes, + planes * self.expansion, + 1, + stride=1, + bias=True) + if init_cfg is None: + self.init_cfg = dict( + type='Constant', val=0, override=dict(name='rfp_conv')) + + def rfp_forward(self, x, rfp_feat): + """The forward function that also takes the RFP features as input.""" + + def _inner_forward(x): + identity = x + + out = self.conv1(x) + out = self.norm1(out) + out = self.relu(out) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv1_plugin_names) + + out = self.conv2(out) + out = self.norm2(out) + out = self.relu(out) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv2_plugin_names) + + out = self.conv3(out) + out = self.norm3(out) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv3_plugin_names) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + + return out + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + if self.rfp_inplanes: + rfp_feat = self.rfp_conv(rfp_feat) + out = out + rfp_feat + + out = self.relu(out) + + return out + + +class ResLayer(Sequential): + """ResLayer to build ResNet style backbone for RPF in detectoRS. + + The difference between this module and base class is that we pass + ``rfp_inplanes`` to the first block. + + Args: + block (nn.Module): block used to build ResLayer. + inplanes (int): inplanes of block. + planes (int): planes of block. + num_blocks (int): number of blocks. + stride (int): stride of the first block. Default: 1 + avg_down (bool): Use AvgPool instead of stride conv when + downsampling in the bottleneck. Default: False + conv_cfg (dict): dictionary to construct and config conv layer. + Default: None + norm_cfg (dict): dictionary to construct and config norm layer. + Default: dict(type='BN') + downsample_first (bool): Downsample at the first block or last block. + False for Hourglass, True for ResNet. Default: True + rfp_inplanes (int, optional): The number of channels from RFP. + Default: None. If specified, an additional conv layer will be + added for ``rfp_feat``. Otherwise, the structure is the same as + base class. + """ + + def __init__(self, + block, + inplanes, + planes, + num_blocks, + stride=1, + avg_down=False, + conv_cfg=None, + norm_cfg=dict(type='BN'), + downsample_first=True, + rfp_inplanes=None, + **kwargs): + self.block = block + assert downsample_first, f'downsample_first={downsample_first} is ' \ + 'not supported in DetectoRS' + + downsample = None + if stride != 1 or inplanes != planes * block.expansion: + downsample = [] + conv_stride = stride + if avg_down and stride != 1: + conv_stride = 1 + downsample.append( + nn.AvgPool2d( + kernel_size=stride, + stride=stride, + ceil_mode=True, + count_include_pad=False)) + downsample.extend([ + build_conv_layer( + conv_cfg, + inplanes, + planes * block.expansion, + kernel_size=1, + stride=conv_stride, + bias=False), + build_norm_layer(norm_cfg, planes * block.expansion)[1] + ]) + downsample = nn.Sequential(*downsample) + + layers = [] + layers.append( + block( + inplanes=inplanes, + planes=planes, + stride=stride, + downsample=downsample, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + rfp_inplanes=rfp_inplanes, + **kwargs)) + inplanes = planes * block.expansion + for _ in range(1, num_blocks): + layers.append( + block( + inplanes=inplanes, + planes=planes, + stride=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + **kwargs)) + + super(ResLayer, self).__init__(*layers) + + +@BACKBONES.register_module() +class DetectoRS_ResNet(ResNet): + """ResNet backbone for DetectoRS. + + Args: + sac (dict, optional): Dictionary to construct SAC (Switchable Atrous + Convolution). Default: None. + stage_with_sac (list): Which stage to use sac. Default: (False, False, + False, False). + rfp_inplanes (int, optional): The number of channels from RFP. + Default: None. If specified, an additional conv layer will be + added for ``rfp_feat``. Otherwise, the structure is the same as + base class. + output_img (bool): If ``True``, the input image will be inserted into + the starting position of output. Default: False. + """ + + arch_settings = { + 50: (Bottleneck, (3, 4, 6, 3)), + 101: (Bottleneck, (3, 4, 23, 3)), + 152: (Bottleneck, (3, 8, 36, 3)) + } + + def __init__(self, + sac=None, + stage_with_sac=(False, False, False, False), + rfp_inplanes=None, + output_img=False, + pretrained=None, + init_cfg=None, + **kwargs): + assert not (init_cfg and pretrained), \ + 'init_cfg and pretrained cannot be setting at the same time' + self.pretrained = pretrained + if init_cfg is not None: + assert isinstance(init_cfg, dict), \ + f'init_cfg must be a dict, but got {type(init_cfg)}' + if 'type' in init_cfg: + assert init_cfg.get('type') == 'Pretrained', \ + 'Only can initialize module by loading a pretrained model' + else: + raise KeyError('`init_cfg` must contain the key "type"') + self.pretrained = init_cfg.get('checkpoint') + self.sac = sac + self.stage_with_sac = stage_with_sac + self.rfp_inplanes = rfp_inplanes + self.output_img = output_img + super(DetectoRS_ResNet, self).__init__(**kwargs) + + self.inplanes = self.stem_channels + self.res_layers = [] + for i, num_blocks in enumerate(self.stage_blocks): + stride = self.strides[i] + dilation = self.dilations[i] + dcn = self.dcn if self.stage_with_dcn[i] else None + sac = self.sac if self.stage_with_sac[i] else None + if self.plugins is not None: + stage_plugins = self.make_stage_plugins(self.plugins, i) + else: + stage_plugins = None + planes = self.base_channels * 2**i + res_layer = self.make_res_layer( + block=self.block, + inplanes=self.inplanes, + planes=planes, + num_blocks=num_blocks, + stride=stride, + dilation=dilation, + style=self.style, + avg_down=self.avg_down, + with_cp=self.with_cp, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + dcn=dcn, + sac=sac, + rfp_inplanes=rfp_inplanes if i > 0 else None, + plugins=stage_plugins) + self.inplanes = planes * self.block.expansion + layer_name = f'layer{i + 1}' + self.add_module(layer_name, res_layer) + self.res_layers.append(layer_name) + + self._freeze_stages() + + # In order to be properly initialized by RFP + def init_weights(self): + # Calling this method will cause parameter initialization exception + # super(DetectoRS_ResNet, self).init_weights() + + if isinstance(self.pretrained, str): + logger = get_root_logger() + load_checkpoint(self, self.pretrained, strict=False, logger=logger) + elif self.pretrained is None: + for m in self.modules(): + if isinstance(m, nn.Conv2d): + kaiming_init(m) + elif isinstance(m, (_BatchNorm, nn.GroupNorm)): + constant_init(m, 1) + + if self.dcn is not None: + for m in self.modules(): + if isinstance(m, Bottleneck) and hasattr( + m.conv2, 'conv_offset'): + constant_init(m.conv2.conv_offset, 0) + + if self.zero_init_residual: + for m in self.modules(): + if isinstance(m, Bottleneck): + constant_init(m.norm3, 0) + elif isinstance(m, BasicBlock): + constant_init(m.norm2, 0) + else: + raise TypeError('pretrained must be a str or None') + + def make_res_layer(self, **kwargs): + """Pack all blocks in a stage into a ``ResLayer`` for DetectoRS.""" + return ResLayer(**kwargs) + + def forward(self, x): + """Forward function.""" + outs = list(super(DetectoRS_ResNet, self).forward(x)) + if self.output_img: + outs.insert(0, x) + return tuple(outs) + + def rfp_forward(self, x, rfp_feats): + """Forward function for RFP.""" + if self.deep_stem: + x = self.stem(x) + else: + x = self.conv1(x) + x = self.norm1(x) + x = self.relu(x) + x = self.maxpool(x) + outs = [] + for i, layer_name in enumerate(self.res_layers): + res_layer = getattr(self, layer_name) + rfp_feat = rfp_feats[i] if i > 0 else None + for layer in res_layer: + x = layer.rfp_forward(x, rfp_feat) + if i in self.out_indices: + outs.append(x) + return tuple(outs) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/detectors_resnext.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/detectors_resnext.py new file mode 100644 index 0000000000000000000000000000000000000000..57d032fe37ed82d5ba24e761bdc014cc0ee5ac64 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/detectors_resnext.py @@ -0,0 +1,122 @@ +import math + +from mmcv.cnn import build_conv_layer, build_norm_layer + +from ..builder import BACKBONES +from .detectors_resnet import Bottleneck as _Bottleneck +from .detectors_resnet import DetectoRS_ResNet + + +class Bottleneck(_Bottleneck): + expansion = 4 + + def __init__(self, + inplanes, + planes, + groups=1, + base_width=4, + base_channels=64, + **kwargs): + """Bottleneck block for ResNeXt. + + If style is "pytorch", the stride-two layer is the 3x3 conv layer, if + it is "caffe", the stride-two layer is the first 1x1 conv layer. + """ + super(Bottleneck, self).__init__(inplanes, planes, **kwargs) + + if groups == 1: + width = self.planes + else: + width = math.floor(self.planes * + (base_width / base_channels)) * groups + + self.norm1_name, norm1 = build_norm_layer( + self.norm_cfg, width, postfix=1) + self.norm2_name, norm2 = build_norm_layer( + self.norm_cfg, width, postfix=2) + self.norm3_name, norm3 = build_norm_layer( + self.norm_cfg, self.planes * self.expansion, postfix=3) + + self.conv1 = build_conv_layer( + self.conv_cfg, + self.inplanes, + width, + kernel_size=1, + stride=self.conv1_stride, + bias=False) + self.add_module(self.norm1_name, norm1) + fallback_on_stride = False + self.with_modulated_dcn = False + if self.with_dcn: + fallback_on_stride = self.dcn.pop('fallback_on_stride', False) + if self.with_sac: + self.conv2 = build_conv_layer( + self.sac, + width, + width, + kernel_size=3, + stride=self.conv2_stride, + padding=self.dilation, + dilation=self.dilation, + groups=groups, + bias=False) + elif not self.with_dcn or fallback_on_stride: + self.conv2 = build_conv_layer( + self.conv_cfg, + width, + width, + kernel_size=3, + stride=self.conv2_stride, + padding=self.dilation, + dilation=self.dilation, + groups=groups, + bias=False) + else: + assert self.conv_cfg is None, 'conv_cfg must be None for DCN' + self.conv2 = build_conv_layer( + self.dcn, + width, + width, + kernel_size=3, + stride=self.conv2_stride, + padding=self.dilation, + dilation=self.dilation, + groups=groups, + bias=False) + + self.add_module(self.norm2_name, norm2) + self.conv3 = build_conv_layer( + self.conv_cfg, + width, + self.planes * self.expansion, + kernel_size=1, + bias=False) + self.add_module(self.norm3_name, norm3) + + +@BACKBONES.register_module() +class DetectoRS_ResNeXt(DetectoRS_ResNet): + """ResNeXt backbone for DetectoRS. + + Args: + groups (int): The number of groups in ResNeXt. + base_width (int): The base width of ResNeXt. + """ + + arch_settings = { + 50: (Bottleneck, (3, 4, 6, 3)), + 101: (Bottleneck, (3, 4, 23, 3)), + 152: (Bottleneck, (3, 8, 36, 3)) + } + + def __init__(self, groups=1, base_width=4, **kwargs): + self.groups = groups + self.base_width = base_width + super(DetectoRS_ResNeXt, self).__init__(**kwargs) + + def make_res_layer(self, **kwargs): + return super().make_res_layer( + groups=self.groups, + base_width=self.base_width, + base_channels=self.base_channels, + **kwargs) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/hourglass.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/hourglass.py new file mode 100644 index 0000000000000000000000000000000000000000..d9e16e6759f804467cd15a78b11cabde8da7672a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/hourglass.py @@ -0,0 +1,213 @@ +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule +from mmcv.runner import BaseModule + +from ..builder import BACKBONES +from ..utils import ResLayer +from .resnet import BasicBlock + + +class HourglassModule(BaseModule): + """Hourglass Module for HourglassNet backbone. + + Generate module recursively and use BasicBlock as the base unit. + + Args: + depth (int): Depth of current HourglassModule. + stage_channels (list[int]): Feature channels of sub-modules in current + and follow-up HourglassModule. + stage_blocks (list[int]): Number of sub-modules stacked in current and + follow-up HourglassModule. + norm_cfg (dict): Dictionary to construct and config norm layer. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + upsample_cfg (dict, optional): Config dict for interpolate layer. + Default: `dict(mode='nearest')` + """ + + def __init__(self, + depth, + stage_channels, + stage_blocks, + norm_cfg=dict(type='BN', requires_grad=True), + init_cfg=None, + upsample_cfg=dict(mode='nearest')): + super(HourglassModule, self).__init__(init_cfg) + + self.depth = depth + + cur_block = stage_blocks[0] + next_block = stage_blocks[1] + + cur_channel = stage_channels[0] + next_channel = stage_channels[1] + + self.up1 = ResLayer( + BasicBlock, cur_channel, cur_channel, cur_block, norm_cfg=norm_cfg) + + self.low1 = ResLayer( + BasicBlock, + cur_channel, + next_channel, + cur_block, + stride=2, + norm_cfg=norm_cfg) + + if self.depth > 1: + self.low2 = HourglassModule(depth - 1, stage_channels[1:], + stage_blocks[1:]) + else: + self.low2 = ResLayer( + BasicBlock, + next_channel, + next_channel, + next_block, + norm_cfg=norm_cfg) + + self.low3 = ResLayer( + BasicBlock, + next_channel, + cur_channel, + cur_block, + norm_cfg=norm_cfg, + downsample_first=False) + + self.up2 = F.interpolate + self.upsample_cfg = upsample_cfg + + def forward(self, x): + """Forward function.""" + up1 = self.up1(x) + low1 = self.low1(x) + low2 = self.low2(low1) + low3 = self.low3(low2) + # Fixing `scale factor` (e.g. 2) is common for upsampling, but + # in some cases the spatial size is mismatched and error will arise. + if 'scale_factor' in self.upsample_cfg: + up2 = self.up2(low3, **self.upsample_cfg) + else: + shape = up1.shape[2:] + up2 = self.up2(low3, size=shape, **self.upsample_cfg) + return up1 + up2 + + +@BACKBONES.register_module() +class HourglassNet(BaseModule): + """HourglassNet backbone. + + Stacked Hourglass Networks for Human Pose Estimation. + More details can be found in the `paper + `_ . + + Args: + downsample_times (int): Downsample times in a HourglassModule. + num_stacks (int): Number of HourglassModule modules stacked, + 1 for Hourglass-52, 2 for Hourglass-104. + stage_channels (list[int]): Feature channel of each sub-module in a + HourglassModule. + stage_blocks (list[int]): Number of sub-modules stacked in a + HourglassModule. + feat_channel (int): Feature channel of conv after a HourglassModule. + norm_cfg (dict): Dictionary to construct and config norm layer. + pretrained (str, optional): model pretrained path. Default: None + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + + Example: + >>> from mmdet.models import HourglassNet + >>> import torch + >>> self = HourglassNet() + >>> self.eval() + >>> inputs = torch.rand(1, 3, 511, 511) + >>> level_outputs = self.forward(inputs) + >>> for level_output in level_outputs: + ... print(tuple(level_output.shape)) + (1, 256, 128, 128) + (1, 256, 128, 128) + """ + + def __init__(self, + downsample_times=5, + num_stacks=2, + stage_channels=(256, 256, 384, 384, 384, 512), + stage_blocks=(2, 2, 2, 2, 2, 4), + feat_channel=256, + norm_cfg=dict(type='BN', requires_grad=True), + pretrained=None, + init_cfg=None): + assert init_cfg is None, 'To prevent abnormal initialization ' \ + 'behavior, init_cfg is not allowed to be set' + super(HourglassNet, self).__init__(init_cfg) + + self.num_stacks = num_stacks + assert self.num_stacks >= 1 + assert len(stage_channels) == len(stage_blocks) + assert len(stage_channels) > downsample_times + + cur_channel = stage_channels[0] + + self.stem = nn.Sequential( + ConvModule(3, 128, 7, padding=3, stride=2, norm_cfg=norm_cfg), + ResLayer(BasicBlock, 128, 256, 1, stride=2, norm_cfg=norm_cfg)) + + self.hourglass_modules = nn.ModuleList([ + HourglassModule(downsample_times, stage_channels, stage_blocks) + for _ in range(num_stacks) + ]) + + self.inters = ResLayer( + BasicBlock, + cur_channel, + cur_channel, + num_stacks - 1, + norm_cfg=norm_cfg) + + self.conv1x1s = nn.ModuleList([ + ConvModule( + cur_channel, cur_channel, 1, norm_cfg=norm_cfg, act_cfg=None) + for _ in range(num_stacks - 1) + ]) + + self.out_convs = nn.ModuleList([ + ConvModule( + cur_channel, feat_channel, 3, padding=1, norm_cfg=norm_cfg) + for _ in range(num_stacks) + ]) + + self.remap_convs = nn.ModuleList([ + ConvModule( + feat_channel, cur_channel, 1, norm_cfg=norm_cfg, act_cfg=None) + for _ in range(num_stacks - 1) + ]) + + self.relu = nn.ReLU(inplace=True) + + def init_weights(self): + """Init module weights.""" + # Training Centripetal Model needs to reset parameters for Conv2d + super(HourglassNet, self).init_weights() + for m in self.modules(): + if isinstance(m, nn.Conv2d): + m.reset_parameters() + + def forward(self, x): + """Forward function.""" + inter_feat = self.stem(x) + out_feats = [] + + for ind in range(self.num_stacks): + single_hourglass = self.hourglass_modules[ind] + out_conv = self.out_convs[ind] + + hourglass_feat = single_hourglass(inter_feat) + out_feat = out_conv(hourglass_feat) + out_feats.append(out_feat) + + if ind < self.num_stacks - 1: + inter_feat = self.conv1x1s[ind]( + inter_feat) + self.remap_convs[ind]( + out_feat) + inter_feat = self.inters[ind](self.relu(inter_feat)) + + return out_feats diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/hrnet.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/hrnet.py new file mode 100644 index 0000000000000000000000000000000000000000..a3b99558a10192dce236f7d67ba24389c57c5bcc --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/hrnet.py @@ -0,0 +1,564 @@ +import warnings + +import torch.nn as nn +from mmcv.cnn import build_conv_layer, build_norm_layer +from mmcv.runner import BaseModule, ModuleList, Sequential +from torch.nn.modules.batchnorm import _BatchNorm + +from ..builder import BACKBONES +from .resnet import BasicBlock, Bottleneck + + +class HRModule(BaseModule): + """High-Resolution Module for HRNet. + + In this module, every branch has 4 BasicBlocks/Bottlenecks. Fusion/Exchange + is in this module. + """ + + def __init__(self, + num_branches, + blocks, + num_blocks, + in_channels, + num_channels, + multiscale_output=True, + with_cp=False, + conv_cfg=None, + norm_cfg=dict(type='BN'), + block_init_cfg=None, + init_cfg=None): + super(HRModule, self).__init__(init_cfg) + self.block_init_cfg = block_init_cfg + self._check_branches(num_branches, num_blocks, in_channels, + num_channels) + + self.in_channels = in_channels + self.num_branches = num_branches + + self.multiscale_output = multiscale_output + self.norm_cfg = norm_cfg + self.conv_cfg = conv_cfg + self.with_cp = with_cp + self.branches = self._make_branches(num_branches, blocks, num_blocks, + num_channels) + self.fuse_layers = self._make_fuse_layers() + self.relu = nn.ReLU(inplace=False) + + def _check_branches(self, num_branches, num_blocks, in_channels, + num_channels): + if num_branches != len(num_blocks): + error_msg = f'NUM_BRANCHES({num_branches}) ' \ + f'!= NUM_BLOCKS({len(num_blocks)})' + raise ValueError(error_msg) + + if num_branches != len(num_channels): + error_msg = f'NUM_BRANCHES({num_branches}) ' \ + f'!= NUM_CHANNELS({len(num_channels)})' + raise ValueError(error_msg) + + if num_branches != len(in_channels): + error_msg = f'NUM_BRANCHES({num_branches}) ' \ + f'!= NUM_INCHANNELS({len(in_channels)})' + raise ValueError(error_msg) + + def _make_one_branch(self, + branch_index, + block, + num_blocks, + num_channels, + stride=1): + downsample = None + if stride != 1 or \ + self.in_channels[branch_index] != \ + num_channels[branch_index] * block.expansion: + downsample = nn.Sequential( + build_conv_layer( + self.conv_cfg, + self.in_channels[branch_index], + num_channels[branch_index] * block.expansion, + kernel_size=1, + stride=stride, + bias=False), + build_norm_layer(self.norm_cfg, num_channels[branch_index] * + block.expansion)[1]) + + layers = [] + layers.append( + block( + self.in_channels[branch_index], + num_channels[branch_index], + stride, + downsample=downsample, + with_cp=self.with_cp, + norm_cfg=self.norm_cfg, + conv_cfg=self.conv_cfg, + init_cfg=self.block_init_cfg)) + self.in_channels[branch_index] = \ + num_channels[branch_index] * block.expansion + for i in range(1, num_blocks[branch_index]): + layers.append( + block( + self.in_channels[branch_index], + num_channels[branch_index], + with_cp=self.with_cp, + norm_cfg=self.norm_cfg, + conv_cfg=self.conv_cfg, + init_cfg=self.block_init_cfg)) + + return Sequential(*layers) + + def _make_branches(self, num_branches, block, num_blocks, num_channels): + branches = [] + + for i in range(num_branches): + branches.append( + self._make_one_branch(i, block, num_blocks, num_channels)) + + return ModuleList(branches) + + def _make_fuse_layers(self): + if self.num_branches == 1: + return None + + num_branches = self.num_branches + in_channels = self.in_channels + fuse_layers = [] + num_out_branches = num_branches if self.multiscale_output else 1 + for i in range(num_out_branches): + fuse_layer = [] + for j in range(num_branches): + if j > i: + fuse_layer.append( + nn.Sequential( + build_conv_layer( + self.conv_cfg, + in_channels[j], + in_channels[i], + kernel_size=1, + stride=1, + padding=0, + bias=False), + build_norm_layer(self.norm_cfg, in_channels[i])[1], + nn.Upsample( + scale_factor=2**(j - i), mode='nearest'))) + elif j == i: + fuse_layer.append(None) + else: + conv_downsamples = [] + for k in range(i - j): + if k == i - j - 1: + conv_downsamples.append( + nn.Sequential( + build_conv_layer( + self.conv_cfg, + in_channels[j], + in_channels[i], + kernel_size=3, + stride=2, + padding=1, + bias=False), + build_norm_layer(self.norm_cfg, + in_channels[i])[1])) + else: + conv_downsamples.append( + nn.Sequential( + build_conv_layer( + self.conv_cfg, + in_channels[j], + in_channels[j], + kernel_size=3, + stride=2, + padding=1, + bias=False), + build_norm_layer(self.norm_cfg, + in_channels[j])[1], + nn.ReLU(inplace=False))) + fuse_layer.append(nn.Sequential(*conv_downsamples)) + fuse_layers.append(nn.ModuleList(fuse_layer)) + + return nn.ModuleList(fuse_layers) + + def forward(self, x): + """Forward function.""" + if self.num_branches == 1: + return [self.branches[0](x[0])] + + for i in range(self.num_branches): + x[i] = self.branches[i](x[i]) + + x_fuse = [] + for i in range(len(self.fuse_layers)): + y = 0 + for j in range(self.num_branches): + if i == j: + y += x[j] + else: + y += self.fuse_layers[i][j](x[j]) + x_fuse.append(self.relu(y)) + return x_fuse + + +@BACKBONES.register_module() +class HRNet(BaseModule): + """HRNet backbone. + + High-Resolution Representations for Labeling Pixels and Regions + arXiv: https://arxiv.org/abs/1904.04514 + + Args: + extra (dict): detailed configuration for each stage of HRNet. + in_channels (int): Number of input image channels. Default: 3. + conv_cfg (dict): dictionary to construct and config conv layer. + norm_cfg (dict): dictionary to construct and config norm layer. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. + zero_init_residual (bool): whether to use zero init for last norm layer + in resblocks to let them behave as identity. + pretrained (str, optional): model pretrained path. Default: None + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + + Example: + >>> from mmdet.models import HRNet + >>> import torch + >>> extra = dict( + >>> stage1=dict( + >>> num_modules=1, + >>> num_branches=1, + >>> block='BOTTLENECK', + >>> num_blocks=(4, ), + >>> num_channels=(64, )), + >>> stage2=dict( + >>> num_modules=1, + >>> num_branches=2, + >>> block='BASIC', + >>> num_blocks=(4, 4), + >>> num_channels=(32, 64)), + >>> stage3=dict( + >>> num_modules=4, + >>> num_branches=3, + >>> block='BASIC', + >>> num_blocks=(4, 4, 4), + >>> num_channels=(32, 64, 128)), + >>> stage4=dict( + >>> num_modules=3, + >>> num_branches=4, + >>> block='BASIC', + >>> num_blocks=(4, 4, 4, 4), + >>> num_channels=(32, 64, 128, 256))) + >>> self = HRNet(extra, in_channels=1) + >>> self.eval() + >>> inputs = torch.rand(1, 1, 32, 32) + >>> level_outputs = self.forward(inputs) + >>> for level_out in level_outputs: + ... print(tuple(level_out.shape)) + (1, 32, 8, 8) + (1, 64, 4, 4) + (1, 128, 2, 2) + (1, 256, 1, 1) + """ + + blocks_dict = {'BASIC': BasicBlock, 'BOTTLENECK': Bottleneck} + + def __init__(self, + extra, + in_channels=3, + conv_cfg=None, + norm_cfg=dict(type='BN'), + norm_eval=True, + with_cp=False, + zero_init_residual=False, + pretrained=None, + init_cfg=None): + super(HRNet, self).__init__(init_cfg) + + self.pretrained = pretrained + assert not (init_cfg and pretrained), \ + 'init_cfg and pretrained cannot be setting at the same time' + if isinstance(pretrained, str): + warnings.warn('DeprecationWarning: pretrained is deprecated, ' + 'please use "init_cfg" instead') + self.init_cfg = dict(type='Pretrained', checkpoint=pretrained) + elif pretrained is None: + if init_cfg is None: + self.init_cfg = [ + dict(type='Kaiming', layer='Conv2d'), + dict( + type='Constant', + val=1, + layer=['_BatchNorm', 'GroupNorm']) + ] + else: + raise TypeError('pretrained must be a str or None') + + self.extra = extra + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.norm_eval = norm_eval + self.with_cp = with_cp + self.zero_init_residual = zero_init_residual + + # stem net + self.norm1_name, norm1 = build_norm_layer(self.norm_cfg, 64, postfix=1) + self.norm2_name, norm2 = build_norm_layer(self.norm_cfg, 64, postfix=2) + + self.conv1 = build_conv_layer( + self.conv_cfg, + in_channels, + 64, + kernel_size=3, + stride=2, + padding=1, + bias=False) + + self.add_module(self.norm1_name, norm1) + self.conv2 = build_conv_layer( + self.conv_cfg, + 64, + 64, + kernel_size=3, + stride=2, + padding=1, + bias=False) + + self.add_module(self.norm2_name, norm2) + self.relu = nn.ReLU(inplace=True) + + # stage 1 + self.stage1_cfg = self.extra['stage1'] + num_channels = self.stage1_cfg['num_channels'][0] + block_type = self.stage1_cfg['block'] + num_blocks = self.stage1_cfg['num_blocks'][0] + + block = self.blocks_dict[block_type] + stage1_out_channels = num_channels * block.expansion + self.layer1 = self._make_layer(block, 64, num_channels, num_blocks) + + # stage 2 + self.stage2_cfg = self.extra['stage2'] + num_channels = self.stage2_cfg['num_channels'] + block_type = self.stage2_cfg['block'] + + block = self.blocks_dict[block_type] + num_channels = [channel * block.expansion for channel in num_channels] + self.transition1 = self._make_transition_layer([stage1_out_channels], + num_channels) + self.stage2, pre_stage_channels = self._make_stage( + self.stage2_cfg, num_channels) + + # stage 3 + self.stage3_cfg = self.extra['stage3'] + num_channels = self.stage3_cfg['num_channels'] + block_type = self.stage3_cfg['block'] + + block = self.blocks_dict[block_type] + num_channels = [channel * block.expansion for channel in num_channels] + self.transition2 = self._make_transition_layer(pre_stage_channels, + num_channels) + self.stage3, pre_stage_channels = self._make_stage( + self.stage3_cfg, num_channels) + + # stage 4 + self.stage4_cfg = self.extra['stage4'] + num_channels = self.stage4_cfg['num_channels'] + block_type = self.stage4_cfg['block'] + + block = self.blocks_dict[block_type] + num_channels = [channel * block.expansion for channel in num_channels] + self.transition3 = self._make_transition_layer(pre_stage_channels, + num_channels) + self.stage4, pre_stage_channels = self._make_stage( + self.stage4_cfg, num_channels) + + @property + def norm1(self): + """nn.Module: the normalization layer named "norm1" """ + return getattr(self, self.norm1_name) + + @property + def norm2(self): + """nn.Module: the normalization layer named "norm2" """ + return getattr(self, self.norm2_name) + + def _make_transition_layer(self, num_channels_pre_layer, + num_channels_cur_layer): + num_branches_cur = len(num_channels_cur_layer) + num_branches_pre = len(num_channels_pre_layer) + + transition_layers = [] + for i in range(num_branches_cur): + if i < num_branches_pre: + if num_channels_cur_layer[i] != num_channels_pre_layer[i]: + transition_layers.append( + nn.Sequential( + build_conv_layer( + self.conv_cfg, + num_channels_pre_layer[i], + num_channels_cur_layer[i], + kernel_size=3, + stride=1, + padding=1, + bias=False), + build_norm_layer(self.norm_cfg, + num_channels_cur_layer[i])[1], + nn.ReLU(inplace=True))) + else: + transition_layers.append(None) + else: + conv_downsamples = [] + for j in range(i + 1 - num_branches_pre): + in_channels = num_channels_pre_layer[-1] + out_channels = num_channels_cur_layer[i] \ + if j == i - num_branches_pre else in_channels + conv_downsamples.append( + nn.Sequential( + build_conv_layer( + self.conv_cfg, + in_channels, + out_channels, + kernel_size=3, + stride=2, + padding=1, + bias=False), + build_norm_layer(self.norm_cfg, out_channels)[1], + nn.ReLU(inplace=True))) + transition_layers.append(nn.Sequential(*conv_downsamples)) + + return nn.ModuleList(transition_layers) + + def _make_layer(self, block, inplanes, planes, blocks, stride=1): + downsample = None + if stride != 1 or inplanes != planes * block.expansion: + downsample = nn.Sequential( + build_conv_layer( + self.conv_cfg, + inplanes, + planes * block.expansion, + kernel_size=1, + stride=stride, + bias=False), + build_norm_layer(self.norm_cfg, planes * block.expansion)[1]) + + layers = [] + block_init_cfg = None + if self.pretrained is None and not hasattr( + self, 'init_cfg') and self.zero_init_residual: + if block is BasicBlock: + block_init_cfg = dict( + type='Constant', val=0, override=dict(name='norm2')) + elif block is Bottleneck: + block_init_cfg = dict( + type='Constant', val=0, override=dict(name='norm3')) + layers.append( + block( + inplanes, + planes, + stride, + downsample=downsample, + with_cp=self.with_cp, + norm_cfg=self.norm_cfg, + conv_cfg=self.conv_cfg, + init_cfg=block_init_cfg, + )) + inplanes = planes * block.expansion + for i in range(1, blocks): + layers.append( + block( + inplanes, + planes, + with_cp=self.with_cp, + norm_cfg=self.norm_cfg, + conv_cfg=self.conv_cfg, + init_cfg=block_init_cfg)) + + return Sequential(*layers) + + def _make_stage(self, layer_config, in_channels, multiscale_output=True): + num_modules = layer_config['num_modules'] + num_branches = layer_config['num_branches'] + num_blocks = layer_config['num_blocks'] + num_channels = layer_config['num_channels'] + block = self.blocks_dict[layer_config['block']] + + hr_modules = [] + block_init_cfg = None + if self.pretrained is None and not hasattr( + self, 'init_cfg') and self.zero_init_residual: + if block is BasicBlock: + block_init_cfg = dict( + type='Constant', val=0, override=dict(name='norm2')) + elif block is Bottleneck: + block_init_cfg = dict( + type='Constant', val=0, override=dict(name='norm3')) + + for i in range(num_modules): + # multi_scale_output is only used for the last module + if not multiscale_output and i == num_modules - 1: + reset_multiscale_output = False + else: + reset_multiscale_output = True + + hr_modules.append( + HRModule( + num_branches, + block, + num_blocks, + in_channels, + num_channels, + reset_multiscale_output, + with_cp=self.with_cp, + norm_cfg=self.norm_cfg, + conv_cfg=self.conv_cfg, + block_init_cfg=block_init_cfg)) + + return Sequential(*hr_modules), in_channels + + def forward(self, x): + """Forward function.""" + x = self.conv1(x) + x = self.norm1(x) + x = self.relu(x) + x = self.conv2(x) + x = self.norm2(x) + x = self.relu(x) + x = self.layer1(x) + + x_list = [] + for i in range(self.stage2_cfg['num_branches']): + if self.transition1[i] is not None: + x_list.append(self.transition1[i](x)) + else: + x_list.append(x) + y_list = self.stage2(x_list) + + x_list = [] + for i in range(self.stage3_cfg['num_branches']): + if self.transition2[i] is not None: + x_list.append(self.transition2[i](y_list[-1])) + else: + x_list.append(y_list[i]) + y_list = self.stage3(x_list) + + x_list = [] + for i in range(self.stage4_cfg['num_branches']): + if self.transition3[i] is not None: + x_list.append(self.transition3[i](y_list[-1])) + else: + x_list.append(y_list[i]) + y_list = self.stage4(x_list) + + return y_list + + def train(self, mode=True): + """Convert the model into training mode will keeping the normalization + layer freezed.""" + super(HRNet, self).train(mode) + if mode and self.norm_eval: + for m in self.modules(): + # trick: eval have effect on BatchNorm only + if isinstance(m, _BatchNorm): + m.eval() diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/mobilenet_v2.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/mobilenet_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..0905a473694e0066c2270ed3e16c91e32ba42d3a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/mobilenet_v2.py @@ -0,0 +1,196 @@ +import warnings + +import torch.nn as nn +from mmcv.cnn import ConvModule +from mmcv.runner import BaseModule +from torch.nn.modules.batchnorm import _BatchNorm + +from ..builder import BACKBONES +from ..utils import InvertedResidual, make_divisible + + +@BACKBONES.register_module() +class MobileNetV2(BaseModule): + """MobileNetV2 backbone. + + Args: + widen_factor (float): Width multiplier, multiply number of + channels in each layer by this amount. Default: 1.0. + out_indices (Sequence[int], optional): Output from which stages. + Default: (1, 2, 4, 7). + frozen_stages (int): Stages to be frozen (all param fixed). + Default: -1, which means not freezing any parameters. + conv_cfg (dict, optional): Config dict for convolution layer. + Default: None, which means using conv2d. + norm_cfg (dict): Config dict for normalization layer. + Default: dict(type='BN'). + act_cfg (dict): Config dict for activation layer. + Default: dict(type='ReLU6'). + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. Default: False. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Default: False. + pretrained (str, optional): model pretrained path. Default: None + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + # Parameters to build layers. 4 parameters are needed to construct a + # layer, from left to right: expand_ratio, channel, num_blocks, stride. + arch_settings = [[1, 16, 1, 1], [6, 24, 2, 2], [6, 32, 3, 2], + [6, 64, 4, 2], [6, 96, 3, 1], [6, 160, 3, 2], + [6, 320, 1, 1]] + + def __init__(self, + widen_factor=1., + out_indices=(1, 2, 4, 7), + frozen_stages=-1, + conv_cfg=None, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU6'), + norm_eval=False, + with_cp=False, + pretrained=None, + init_cfg=None): + super(MobileNetV2, self).__init__(init_cfg) + + self.pretrained = pretrained + assert not (init_cfg and pretrained), \ + 'init_cfg and pretrained cannot be setting at the same time' + if isinstance(pretrained, str): + warnings.warn('DeprecationWarning: pretrained is deprecated, ' + 'please use "init_cfg" instead') + self.init_cfg = dict(type='Pretrained', checkpoint=pretrained) + elif pretrained is None: + if init_cfg is None: + self.init_cfg = [ + dict(type='Kaiming', layer='Conv2d'), + dict( + type='Constant', + val=1, + layer=['_BatchNorm', 'GroupNorm']) + ] + else: + raise TypeError('pretrained must be a str or None') + + self.widen_factor = widen_factor + self.out_indices = out_indices + if not set(out_indices).issubset(set(range(0, 8))): + raise ValueError('out_indices must be a subset of range' + f'(0, 8). But received {out_indices}') + + if frozen_stages not in range(-1, 8): + raise ValueError('frozen_stages must be in range(-1, 8). ' + f'But received {frozen_stages}') + self.out_indices = out_indices + self.frozen_stages = frozen_stages + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.act_cfg = act_cfg + self.norm_eval = norm_eval + self.with_cp = with_cp + + self.in_channels = make_divisible(32 * widen_factor, 8) + + self.conv1 = ConvModule( + in_channels=3, + out_channels=self.in_channels, + kernel_size=3, + stride=2, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg) + + self.layers = [] + + for i, layer_cfg in enumerate(self.arch_settings): + expand_ratio, channel, num_blocks, stride = layer_cfg + out_channels = make_divisible(channel * widen_factor, 8) + inverted_res_layer = self.make_layer( + out_channels=out_channels, + num_blocks=num_blocks, + stride=stride, + expand_ratio=expand_ratio) + layer_name = f'layer{i + 1}' + self.add_module(layer_name, inverted_res_layer) + self.layers.append(layer_name) + + if widen_factor > 1.0: + self.out_channel = int(1280 * widen_factor) + else: + self.out_channel = 1280 + + layer = ConvModule( + in_channels=self.in_channels, + out_channels=self.out_channel, + kernel_size=1, + stride=1, + padding=0, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg) + self.add_module('conv2', layer) + self.layers.append('conv2') + + def make_layer(self, out_channels, num_blocks, stride, expand_ratio): + """Stack InvertedResidual blocks to build a layer for MobileNetV2. + + Args: + out_channels (int): out_channels of block. + num_blocks (int): number of blocks. + stride (int): stride of the first block. Default: 1 + expand_ratio (int): Expand the number of channels of the + hidden layer in InvertedResidual by this ratio. Default: 6. + """ + layers = [] + for i in range(num_blocks): + if i >= 1: + stride = 1 + layers.append( + InvertedResidual( + self.in_channels, + out_channels, + mid_channels=int(round(self.in_channels * expand_ratio)), + stride=stride, + with_expand_conv=expand_ratio != 1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg, + with_cp=self.with_cp)) + self.in_channels = out_channels + + return nn.Sequential(*layers) + + def _freeze_stages(self): + if self.frozen_stages >= 0: + for param in self.conv1.parameters(): + param.requires_grad = False + for i in range(1, self.frozen_stages + 1): + layer = getattr(self, f'layer{i}') + layer.eval() + for param in layer.parameters(): + param.requires_grad = False + + def forward(self, x): + """Forward function.""" + x = self.conv1(x) + outs = [] + for i, layer_name in enumerate(self.layers): + layer = getattr(self, layer_name) + x = layer(x) + if i in self.out_indices: + outs.append(x) + return tuple(outs) + + def train(self, mode=True): + """Convert the model into training mode while keep normalization layer + frozen.""" + super(MobileNetV2, self).train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + # trick: eval have effect on BatchNorm only + if isinstance(m, _BatchNorm): + m.eval() diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/regnet.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/regnet.py new file mode 100644 index 0000000000000000000000000000000000000000..024d4323fc088cd79fe07d79c7b08186d30ff0b4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/regnet.py @@ -0,0 +1,355 @@ +import warnings + +import numpy as np +import torch.nn as nn +from mmcv.cnn import build_conv_layer, build_norm_layer + +from ..builder import BACKBONES +from .resnet import ResNet +from .resnext import Bottleneck + + +@BACKBONES.register_module() +class RegNet(ResNet): + """RegNet backbone. + + More details can be found in `paper `_ . + + Args: + arch (dict): The parameter of RegNets. + + - w0 (int): initial width + - wa (float): slope of width + - wm (float): quantization parameter to quantize the width + - depth (int): depth of the backbone + - group_w (int): width of group + - bot_mul (float): bottleneck ratio, i.e. expansion of bottleneck. + strides (Sequence[int]): Strides of the first block of each stage. + base_channels (int): Base channels after stem layer. + in_channels (int): Number of input image channels. Default: 3. + dilations (Sequence[int]): Dilation of each stage. + out_indices (Sequence[int]): Output from which stages. + style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two + layer is the 3x3 conv layer, otherwise the stride-two layer is + the first 1x1 conv layer. + frozen_stages (int): Stages to be frozen (all param fixed). -1 means + not freezing any parameters. + norm_cfg (dict): dictionary to construct and config norm layer. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. + zero_init_residual (bool): whether to use zero init for last norm layer + in resblocks to let them behave as identity. + pretrained (str, optional): model pretrained path. Default: None + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + + Example: + >>> from mmdet.models import RegNet + >>> import torch + >>> self = RegNet( + arch=dict( + w0=88, + wa=26.31, + wm=2.25, + group_w=48, + depth=25, + bot_mul=1.0)) + >>> self.eval() + >>> inputs = torch.rand(1, 3, 32, 32) + >>> level_outputs = self.forward(inputs) + >>> for level_out in level_outputs: + ... print(tuple(level_out.shape)) + (1, 96, 8, 8) + (1, 192, 4, 4) + (1, 432, 2, 2) + (1, 1008, 1, 1) + """ + arch_settings = { + 'regnetx_400mf': + dict(w0=24, wa=24.48, wm=2.54, group_w=16, depth=22, bot_mul=1.0), + 'regnetx_800mf': + dict(w0=56, wa=35.73, wm=2.28, group_w=16, depth=16, bot_mul=1.0), + 'regnetx_1.6gf': + dict(w0=80, wa=34.01, wm=2.25, group_w=24, depth=18, bot_mul=1.0), + 'regnetx_3.2gf': + dict(w0=88, wa=26.31, wm=2.25, group_w=48, depth=25, bot_mul=1.0), + 'regnetx_4.0gf': + dict(w0=96, wa=38.65, wm=2.43, group_w=40, depth=23, bot_mul=1.0), + 'regnetx_6.4gf': + dict(w0=184, wa=60.83, wm=2.07, group_w=56, depth=17, bot_mul=1.0), + 'regnetx_8.0gf': + dict(w0=80, wa=49.56, wm=2.88, group_w=120, depth=23, bot_mul=1.0), + 'regnetx_12gf': + dict(w0=168, wa=73.36, wm=2.37, group_w=112, depth=19, bot_mul=1.0), + } + + def __init__(self, + arch, + in_channels=3, + stem_channels=32, + base_channels=32, + strides=(2, 2, 2, 2), + dilations=(1, 1, 1, 1), + out_indices=(0, 1, 2, 3), + style='pytorch', + deep_stem=False, + avg_down=False, + frozen_stages=-1, + conv_cfg=None, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + dcn=None, + stage_with_dcn=(False, False, False, False), + plugins=None, + with_cp=False, + zero_init_residual=True, + pretrained=None, + init_cfg=None): + super(ResNet, self).__init__(init_cfg) + + # Generate RegNet parameters first + if isinstance(arch, str): + assert arch in self.arch_settings, \ + f'"arch": "{arch}" is not one of the' \ + ' arch_settings' + arch = self.arch_settings[arch] + elif not isinstance(arch, dict): + raise ValueError('Expect "arch" to be either a string ' + f'or a dict, got {type(arch)}') + + widths, num_stages = self.generate_regnet( + arch['w0'], + arch['wa'], + arch['wm'], + arch['depth'], + ) + # Convert to per stage format + stage_widths, stage_blocks = self.get_stages_from_blocks(widths) + # Generate group widths and bot muls + group_widths = [arch['group_w'] for _ in range(num_stages)] + self.bottleneck_ratio = [arch['bot_mul'] for _ in range(num_stages)] + # Adjust the compatibility of stage_widths and group_widths + stage_widths, group_widths = self.adjust_width_group( + stage_widths, self.bottleneck_ratio, group_widths) + + # Group params by stage + self.stage_widths = stage_widths + self.group_widths = group_widths + self.depth = sum(stage_blocks) + self.stem_channels = stem_channels + self.base_channels = base_channels + self.num_stages = num_stages + assert num_stages >= 1 and num_stages <= 4 + self.strides = strides + self.dilations = dilations + assert len(strides) == len(dilations) == num_stages + self.out_indices = out_indices + assert max(out_indices) < num_stages + self.style = style + self.deep_stem = deep_stem + self.avg_down = avg_down + self.frozen_stages = frozen_stages + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.with_cp = with_cp + self.norm_eval = norm_eval + self.dcn = dcn + self.stage_with_dcn = stage_with_dcn + if dcn is not None: + assert len(stage_with_dcn) == num_stages + self.plugins = plugins + self.zero_init_residual = zero_init_residual + self.block = Bottleneck + expansion_bak = self.block.expansion + self.block.expansion = 1 + self.stage_blocks = stage_blocks[:num_stages] + + self._make_stem_layer(in_channels, stem_channels) + + block_init_cfg = None + assert not (init_cfg and pretrained), \ + 'init_cfg and pretrained cannot be setting at the same time' + if isinstance(pretrained, str): + warnings.warn('DeprecationWarning: pretrained is deprecated, ' + 'please use "init_cfg" instead') + self.init_cfg = dict(type='Pretrained', checkpoint=pretrained) + elif pretrained is None: + if init_cfg is None: + self.init_cfg = [ + dict(type='Kaiming', layer='Conv2d'), + dict( + type='Constant', + val=1, + layer=['_BatchNorm', 'GroupNorm']) + ] + if self.zero_init_residual: + block_init_cfg = dict( + type='Constant', val=0, override=dict(name='norm3')) + else: + raise TypeError('pretrained must be a str or None') + + self.inplanes = stem_channels + self.res_layers = [] + for i, num_blocks in enumerate(self.stage_blocks): + stride = self.strides[i] + dilation = self.dilations[i] + group_width = self.group_widths[i] + width = int(round(self.stage_widths[i] * self.bottleneck_ratio[i])) + stage_groups = width // group_width + + dcn = self.dcn if self.stage_with_dcn[i] else None + if self.plugins is not None: + stage_plugins = self.make_stage_plugins(self.plugins, i) + else: + stage_plugins = None + + res_layer = self.make_res_layer( + block=self.block, + inplanes=self.inplanes, + planes=self.stage_widths[i], + num_blocks=num_blocks, + stride=stride, + dilation=dilation, + style=self.style, + avg_down=self.avg_down, + with_cp=self.with_cp, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + dcn=dcn, + plugins=stage_plugins, + groups=stage_groups, + base_width=group_width, + base_channels=self.stage_widths[i], + init_cfg=block_init_cfg) + self.inplanes = self.stage_widths[i] + layer_name = f'layer{i + 1}' + self.add_module(layer_name, res_layer) + self.res_layers.append(layer_name) + + self._freeze_stages() + + self.feat_dim = stage_widths[-1] + self.block.expansion = expansion_bak + + def _make_stem_layer(self, in_channels, base_channels): + self.conv1 = build_conv_layer( + self.conv_cfg, + in_channels, + base_channels, + kernel_size=3, + stride=2, + padding=1, + bias=False) + self.norm1_name, norm1 = build_norm_layer( + self.norm_cfg, base_channels, postfix=1) + self.add_module(self.norm1_name, norm1) + self.relu = nn.ReLU(inplace=True) + + def generate_regnet(self, + initial_width, + width_slope, + width_parameter, + depth, + divisor=8): + """Generates per block width from RegNet parameters. + + Args: + initial_width ([int]): Initial width of the backbone + width_slope ([float]): Slope of the quantized linear function + width_parameter ([int]): Parameter used to quantize the width. + depth ([int]): Depth of the backbone. + divisor (int, optional): The divisor of channels. Defaults to 8. + + Returns: + list, int: return a list of widths of each stage and the number \ + of stages + """ + assert width_slope >= 0 + assert initial_width > 0 + assert width_parameter > 1 + assert initial_width % divisor == 0 + widths_cont = np.arange(depth) * width_slope + initial_width + ks = np.round( + np.log(widths_cont / initial_width) / np.log(width_parameter)) + widths = initial_width * np.power(width_parameter, ks) + widths = np.round(np.divide(widths, divisor)) * divisor + num_stages = len(np.unique(widths)) + widths, widths_cont = widths.astype(int).tolist(), widths_cont.tolist() + return widths, num_stages + + @staticmethod + def quantize_float(number, divisor): + """Converts a float to closest non-zero int divisible by divisor. + + Args: + number (int): Original number to be quantized. + divisor (int): Divisor used to quantize the number. + + Returns: + int: quantized number that is divisible by devisor. + """ + return int(round(number / divisor) * divisor) + + def adjust_width_group(self, widths, bottleneck_ratio, groups): + """Adjusts the compatibility of widths and groups. + + Args: + widths (list[int]): Width of each stage. + bottleneck_ratio (float): Bottleneck ratio. + groups (int): number of groups in each stage + + Returns: + tuple(list): The adjusted widths and groups of each stage. + """ + bottleneck_width = [ + int(w * b) for w, b in zip(widths, bottleneck_ratio) + ] + groups = [min(g, w_bot) for g, w_bot in zip(groups, bottleneck_width)] + bottleneck_width = [ + self.quantize_float(w_bot, g) + for w_bot, g in zip(bottleneck_width, groups) + ] + widths = [ + int(w_bot / b) + for w_bot, b in zip(bottleneck_width, bottleneck_ratio) + ] + return widths, groups + + def get_stages_from_blocks(self, widths): + """Gets widths/stage_blocks of network at each stage. + + Args: + widths (list[int]): Width in each stage. + + Returns: + tuple(list): width and depth of each stage + """ + width_diff = [ + width != width_prev + for width, width_prev in zip(widths + [0], [0] + widths) + ] + stage_widths = [ + width for width, diff in zip(widths, width_diff[:-1]) if diff + ] + stage_blocks = np.diff([ + depth for depth, diff in zip(range(len(width_diff)), width_diff) + if diff + ]).tolist() + return stage_widths, stage_blocks + + def forward(self, x): + """Forward function.""" + x = self.conv1(x) + x = self.norm1(x) + x = self.relu(x) + + outs = [] + for i, layer_name in enumerate(self.res_layers): + res_layer = getattr(self, layer_name) + x = res_layer(x) + if i in self.out_indices: + outs.append(x) + return tuple(outs) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/res2net.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/res2net.py new file mode 100644 index 0000000000000000000000000000000000000000..84951f008db3e2bac7537a3bb44bab10d9cb5a4a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/res2net.py @@ -0,0 +1,326 @@ +import math + +import torch +import torch.nn as nn +import torch.utils.checkpoint as cp +from mmcv.cnn import build_conv_layer, build_norm_layer +from mmcv.runner import Sequential + +from ..builder import BACKBONES +from .resnet import Bottleneck as _Bottleneck +from .resnet import ResNet + + +class Bottle2neck(_Bottleneck): + expansion = 4 + + def __init__(self, + inplanes, + planes, + scales=4, + base_width=26, + base_channels=64, + stage_type='normal', + **kwargs): + """Bottle2neck block for Res2Net. + + If style is "pytorch", the stride-two layer is the 3x3 conv layer, if + it is "caffe", the stride-two layer is the first 1x1 conv layer. + """ + super(Bottle2neck, self).__init__(inplanes, planes, **kwargs) + assert scales > 1, 'Res2Net degenerates to ResNet when scales = 1.' + width = int(math.floor(self.planes * (base_width / base_channels))) + + self.norm1_name, norm1 = build_norm_layer( + self.norm_cfg, width * scales, postfix=1) + self.norm3_name, norm3 = build_norm_layer( + self.norm_cfg, self.planes * self.expansion, postfix=3) + + self.conv1 = build_conv_layer( + self.conv_cfg, + self.inplanes, + width * scales, + kernel_size=1, + stride=self.conv1_stride, + bias=False) + self.add_module(self.norm1_name, norm1) + + if stage_type == 'stage' and self.conv2_stride != 1: + self.pool = nn.AvgPool2d( + kernel_size=3, stride=self.conv2_stride, padding=1) + convs = [] + bns = [] + + fallback_on_stride = False + if self.with_dcn: + fallback_on_stride = self.dcn.pop('fallback_on_stride', False) + if not self.with_dcn or fallback_on_stride: + for i in range(scales - 1): + convs.append( + build_conv_layer( + self.conv_cfg, + width, + width, + kernel_size=3, + stride=self.conv2_stride, + padding=self.dilation, + dilation=self.dilation, + bias=False)) + bns.append( + build_norm_layer(self.norm_cfg, width, postfix=i + 1)[1]) + self.convs = nn.ModuleList(convs) + self.bns = nn.ModuleList(bns) + else: + assert self.conv_cfg is None, 'conv_cfg must be None for DCN' + for i in range(scales - 1): + convs.append( + build_conv_layer( + self.dcn, + width, + width, + kernel_size=3, + stride=self.conv2_stride, + padding=self.dilation, + dilation=self.dilation, + bias=False)) + bns.append( + build_norm_layer(self.norm_cfg, width, postfix=i + 1)[1]) + self.convs = nn.ModuleList(convs) + self.bns = nn.ModuleList(bns) + + self.conv3 = build_conv_layer( + self.conv_cfg, + width * scales, + self.planes * self.expansion, + kernel_size=1, + bias=False) + self.add_module(self.norm3_name, norm3) + + self.stage_type = stage_type + self.scales = scales + self.width = width + delattr(self, 'conv2') + delattr(self, self.norm2_name) + + def forward(self, x): + """Forward function.""" + + def _inner_forward(x): + identity = x + + out = self.conv1(x) + out = self.norm1(out) + out = self.relu(out) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv1_plugin_names) + + spx = torch.split(out, self.width, 1) + sp = self.convs[0](spx[0].contiguous()) + sp = self.relu(self.bns[0](sp)) + out = sp + for i in range(1, self.scales - 1): + if self.stage_type == 'stage': + sp = spx[i] + else: + sp = sp + spx[i] + sp = self.convs[i](sp.contiguous()) + sp = self.relu(self.bns[i](sp)) + out = torch.cat((out, sp), 1) + + if self.stage_type == 'normal' or self.conv2_stride == 1: + out = torch.cat((out, spx[self.scales - 1]), 1) + elif self.stage_type == 'stage': + out = torch.cat((out, self.pool(spx[self.scales - 1])), 1) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv2_plugin_names) + + out = self.conv3(out) + out = self.norm3(out) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv3_plugin_names) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + + return out + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + out = self.relu(out) + + return out + + +class Res2Layer(Sequential): + """Res2Layer to build Res2Net style backbone. + + Args: + block (nn.Module): block used to build ResLayer. + inplanes (int): inplanes of block. + planes (int): planes of block. + num_blocks (int): number of blocks. + stride (int): stride of the first block. Default: 1 + avg_down (bool): Use AvgPool instead of stride conv when + downsampling in the bottle2neck. Default: False + conv_cfg (dict): dictionary to construct and config conv layer. + Default: None + norm_cfg (dict): dictionary to construct and config norm layer. + Default: dict(type='BN') + scales (int): Scales used in Res2Net. Default: 4 + base_width (int): Basic width of each scale. Default: 26 + """ + + def __init__(self, + block, + inplanes, + planes, + num_blocks, + stride=1, + avg_down=True, + conv_cfg=None, + norm_cfg=dict(type='BN'), + scales=4, + base_width=26, + **kwargs): + self.block = block + + downsample = None + if stride != 1 or inplanes != planes * block.expansion: + downsample = nn.Sequential( + nn.AvgPool2d( + kernel_size=stride, + stride=stride, + ceil_mode=True, + count_include_pad=False), + build_conv_layer( + conv_cfg, + inplanes, + planes * block.expansion, + kernel_size=1, + stride=1, + bias=False), + build_norm_layer(norm_cfg, planes * block.expansion)[1], + ) + + layers = [] + layers.append( + block( + inplanes=inplanes, + planes=planes, + stride=stride, + downsample=downsample, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + scales=scales, + base_width=base_width, + stage_type='stage', + **kwargs)) + inplanes = planes * block.expansion + for i in range(1, num_blocks): + layers.append( + block( + inplanes=inplanes, + planes=planes, + stride=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + scales=scales, + base_width=base_width, + **kwargs)) + super(Res2Layer, self).__init__(*layers) + + +@BACKBONES.register_module() +class Res2Net(ResNet): + """Res2Net backbone. + + Args: + scales (int): Scales used in Res2Net. Default: 4 + base_width (int): Basic width of each scale. Default: 26 + depth (int): Depth of res2net, from {50, 101, 152}. + in_channels (int): Number of input image channels. Default: 3. + num_stages (int): Res2net stages. Default: 4. + strides (Sequence[int]): Strides of the first block of each stage. + dilations (Sequence[int]): Dilation of each stage. + out_indices (Sequence[int]): Output from which stages. + style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two + layer is the 3x3 conv layer, otherwise the stride-two layer is + the first 1x1 conv layer. + deep_stem (bool): Replace 7x7 conv in input stem with 3 3x3 conv + avg_down (bool): Use AvgPool instead of stride conv when + downsampling in the bottle2neck. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. + norm_cfg (dict): Dictionary to construct and config norm layer. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. + plugins (list[dict]): List of plugins for stages, each dict contains: + + - cfg (dict, required): Cfg dict to build plugin. + - position (str, required): Position inside block to insert + plugin, options are 'after_conv1', 'after_conv2', 'after_conv3'. + - stages (tuple[bool], optional): Stages to apply plugin, length + should be same as 'num_stages'. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. + zero_init_residual (bool): Whether to use zero init for last norm layer + in resblocks to let them behave as identity. + pretrained (str, optional): model pretrained path. Default: None + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + + Example: + >>> from mmdet.models import Res2Net + >>> import torch + >>> self = Res2Net(depth=50, scales=4, base_width=26) + >>> self.eval() + >>> inputs = torch.rand(1, 3, 32, 32) + >>> level_outputs = self.forward(inputs) + >>> for level_out in level_outputs: + ... print(tuple(level_out.shape)) + (1, 256, 8, 8) + (1, 512, 4, 4) + (1, 1024, 2, 2) + (1, 2048, 1, 1) + """ + + arch_settings = { + 50: (Bottle2neck, (3, 4, 6, 3)), + 101: (Bottle2neck, (3, 4, 23, 3)), + 152: (Bottle2neck, (3, 8, 36, 3)) + } + + def __init__(self, + scales=4, + base_width=26, + style='pytorch', + deep_stem=True, + avg_down=True, + pretrained=None, + init_cfg=None, + **kwargs): + self.scales = scales + self.base_width = base_width + super(Res2Net, self).__init__( + style='pytorch', + deep_stem=True, + avg_down=True, + pretrained=pretrained, + init_cfg=init_cfg, + **kwargs) + + def make_res_layer(self, **kwargs): + return Res2Layer( + scales=self.scales, + base_width=self.base_width, + base_channels=self.base_channels, + **kwargs) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/resnest.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/resnest.py new file mode 100644 index 0000000000000000000000000000000000000000..0fd65aeb67ed4d62e034eb23fb3965fb3c6a0ce0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/resnest.py @@ -0,0 +1,321 @@ +import math + +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.utils.checkpoint as cp +from mmcv.cnn import build_conv_layer, build_norm_layer +from mmcv.runner import BaseModule + +from ..builder import BACKBONES +from ..utils import ResLayer +from .resnet import Bottleneck as _Bottleneck +from .resnet import ResNetV1d + + +class RSoftmax(nn.Module): + """Radix Softmax module in ``SplitAttentionConv2d``. + + Args: + radix (int): Radix of input. + groups (int): Groups of input. + """ + + def __init__(self, radix, groups): + super().__init__() + self.radix = radix + self.groups = groups + + def forward(self, x): + batch = x.size(0) + if self.radix > 1: + x = x.view(batch, self.groups, self.radix, -1).transpose(1, 2) + x = F.softmax(x, dim=1) + x = x.reshape(batch, -1) + else: + x = torch.sigmoid(x) + return x + + +class SplitAttentionConv2d(BaseModule): + """Split-Attention Conv2d in ResNeSt. + + Args: + in_channels (int): Number of channels in the input feature map. + channels (int): Number of intermediate channels. + kernel_size (int | tuple[int]): Size of the convolution kernel. + stride (int | tuple[int]): Stride of the convolution. + padding (int | tuple[int]): Zero-padding added to both sides of + dilation (int | tuple[int]): Spacing between kernel elements. + groups (int): Number of blocked connections from input channels to + output channels. + groups (int): Same as nn.Conv2d. + radix (int): Radix of SpltAtConv2d. Default: 2 + reduction_factor (int): Reduction factor of inter_channels. Default: 4. + conv_cfg (dict): Config dict for convolution layer. Default: None, + which means using conv2d. + norm_cfg (dict): Config dict for normalization layer. Default: None. + dcn (dict): Config dict for DCN. Default: None. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + in_channels, + channels, + kernel_size, + stride=1, + padding=0, + dilation=1, + groups=1, + radix=2, + reduction_factor=4, + conv_cfg=None, + norm_cfg=dict(type='BN'), + dcn=None, + init_cfg=None): + super(SplitAttentionConv2d, self).__init__(init_cfg) + inter_channels = max(in_channels * radix // reduction_factor, 32) + self.radix = radix + self.groups = groups + self.channels = channels + self.with_dcn = dcn is not None + self.dcn = dcn + fallback_on_stride = False + if self.with_dcn: + fallback_on_stride = self.dcn.pop('fallback_on_stride', False) + if self.with_dcn and not fallback_on_stride: + assert conv_cfg is None, 'conv_cfg must be None for DCN' + conv_cfg = dcn + self.conv = build_conv_layer( + conv_cfg, + in_channels, + channels * radix, + kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + groups=groups * radix, + bias=False) + # To be consistent with original implementation, starting from 0 + self.norm0_name, norm0 = build_norm_layer( + norm_cfg, channels * radix, postfix=0) + self.add_module(self.norm0_name, norm0) + self.relu = nn.ReLU(inplace=True) + self.fc1 = build_conv_layer( + None, channels, inter_channels, 1, groups=self.groups) + self.norm1_name, norm1 = build_norm_layer( + norm_cfg, inter_channels, postfix=1) + self.add_module(self.norm1_name, norm1) + self.fc2 = build_conv_layer( + None, inter_channels, channels * radix, 1, groups=self.groups) + self.rsoftmax = RSoftmax(radix, groups) + + @property + def norm0(self): + """nn.Module: the normalization layer named "norm0" """ + return getattr(self, self.norm0_name) + + @property + def norm1(self): + """nn.Module: the normalization layer named "norm1" """ + return getattr(self, self.norm1_name) + + def forward(self, x): + x = self.conv(x) + x = self.norm0(x) + x = self.relu(x) + + batch, rchannel = x.shape[:2] + batch = x.size(0) + if self.radix > 1: + splits = x.view(batch, self.radix, -1, *x.shape[2:]) + gap = splits.sum(dim=1) + else: + gap = x + gap = F.adaptive_avg_pool2d(gap, 1) + gap = self.fc1(gap) + + gap = self.norm1(gap) + gap = self.relu(gap) + + atten = self.fc2(gap) + atten = self.rsoftmax(atten).view(batch, -1, 1, 1) + + if self.radix > 1: + attens = atten.view(batch, self.radix, -1, *atten.shape[2:]) + out = torch.sum(attens * splits, dim=1) + else: + out = atten * x + return out.contiguous() + + +class Bottleneck(_Bottleneck): + """Bottleneck block for ResNeSt. + + Args: + inplane (int): Input planes of this block. + planes (int): Middle planes of this block. + groups (int): Groups of conv2. + base_width (int): Base of width in terms of base channels. Default: 4. + base_channels (int): Base of channels for calculating width. + Default: 64. + radix (int): Radix of SpltAtConv2d. Default: 2 + reduction_factor (int): Reduction factor of inter_channels in + SplitAttentionConv2d. Default: 4. + avg_down_stride (bool): Whether to use average pool for stride in + Bottleneck. Default: True. + kwargs (dict): Key word arguments for base class. + """ + expansion = 4 + + def __init__(self, + inplanes, + planes, + groups=1, + base_width=4, + base_channels=64, + radix=2, + reduction_factor=4, + avg_down_stride=True, + **kwargs): + """Bottleneck block for ResNeSt.""" + super(Bottleneck, self).__init__(inplanes, planes, **kwargs) + + if groups == 1: + width = self.planes + else: + width = math.floor(self.planes * + (base_width / base_channels)) * groups + + self.avg_down_stride = avg_down_stride and self.conv2_stride > 1 + + self.norm1_name, norm1 = build_norm_layer( + self.norm_cfg, width, postfix=1) + self.norm3_name, norm3 = build_norm_layer( + self.norm_cfg, self.planes * self.expansion, postfix=3) + + self.conv1 = build_conv_layer( + self.conv_cfg, + self.inplanes, + width, + kernel_size=1, + stride=self.conv1_stride, + bias=False) + self.add_module(self.norm1_name, norm1) + self.with_modulated_dcn = False + self.conv2 = SplitAttentionConv2d( + width, + width, + kernel_size=3, + stride=1 if self.avg_down_stride else self.conv2_stride, + padding=self.dilation, + dilation=self.dilation, + groups=groups, + radix=radix, + reduction_factor=reduction_factor, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + dcn=self.dcn) + delattr(self, self.norm2_name) + + if self.avg_down_stride: + self.avd_layer = nn.AvgPool2d(3, self.conv2_stride, padding=1) + + self.conv3 = build_conv_layer( + self.conv_cfg, + width, + self.planes * self.expansion, + kernel_size=1, + bias=False) + self.add_module(self.norm3_name, norm3) + + def forward(self, x): + + def _inner_forward(x): + identity = x + + out = self.conv1(x) + out = self.norm1(out) + out = self.relu(out) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv1_plugin_names) + + out = self.conv2(out) + + if self.avg_down_stride: + out = self.avd_layer(out) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv2_plugin_names) + + out = self.conv3(out) + out = self.norm3(out) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv3_plugin_names) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + + return out + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + out = self.relu(out) + + return out + + +@BACKBONES.register_module() +class ResNeSt(ResNetV1d): + """ResNeSt backbone. + + Args: + groups (int): Number of groups of Bottleneck. Default: 1 + base_width (int): Base width of Bottleneck. Default: 4 + radix (int): Radix of SplitAttentionConv2d. Default: 2 + reduction_factor (int): Reduction factor of inter_channels in + SplitAttentionConv2d. Default: 4. + avg_down_stride (bool): Whether to use average pool for stride in + Bottleneck. Default: True. + kwargs (dict): Keyword arguments for ResNet. + """ + + arch_settings = { + 50: (Bottleneck, (3, 4, 6, 3)), + 101: (Bottleneck, (3, 4, 23, 3)), + 152: (Bottleneck, (3, 8, 36, 3)), + 200: (Bottleneck, (3, 24, 36, 3)) + } + + def __init__(self, + groups=1, + base_width=4, + radix=2, + reduction_factor=4, + avg_down_stride=True, + **kwargs): + self.groups = groups + self.base_width = base_width + self.radix = radix + self.reduction_factor = reduction_factor + self.avg_down_stride = avg_down_stride + super(ResNeSt, self).__init__(**kwargs) + + def make_res_layer(self, **kwargs): + """Pack all blocks in a stage into a ``ResLayer``.""" + return ResLayer( + groups=self.groups, + base_width=self.base_width, + base_channels=self.base_channels, + radix=self.radix, + reduction_factor=self.reduction_factor, + avg_down_stride=self.avg_down_stride, + **kwargs) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/resnet.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/resnet.py new file mode 100644 index 0000000000000000000000000000000000000000..a61fa488004b7730e4dea2be79787c5ed1a207d0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/resnet.py @@ -0,0 +1,671 @@ +import warnings + +import torch.nn as nn +import torch.utils.checkpoint as cp +from mmcv.cnn import build_conv_layer, build_norm_layer, build_plugin_layer +from mmcv.runner import BaseModule +from torch.nn.modules.batchnorm import _BatchNorm + +from ..builder import BACKBONES +from ..utils import ResLayer + + +class BasicBlock(BaseModule): + expansion = 1 + + def __init__(self, + inplanes, + planes, + stride=1, + dilation=1, + downsample=None, + style='pytorch', + with_cp=False, + conv_cfg=None, + norm_cfg=dict(type='BN'), + dcn=None, + plugins=None, + init_cfg=None): + super(BasicBlock, self).__init__(init_cfg) + assert dcn is None, 'Not implemented yet.' + assert plugins is None, 'Not implemented yet.' + + self.norm1_name, norm1 = build_norm_layer(norm_cfg, planes, postfix=1) + self.norm2_name, norm2 = build_norm_layer(norm_cfg, planes, postfix=2) + + self.conv1 = build_conv_layer( + conv_cfg, + inplanes, + planes, + 3, + stride=stride, + padding=dilation, + dilation=dilation, + bias=False) + self.add_module(self.norm1_name, norm1) + self.conv2 = build_conv_layer( + conv_cfg, planes, planes, 3, padding=1, bias=False) + self.add_module(self.norm2_name, norm2) + + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + self.stride = stride + self.dilation = dilation + self.with_cp = with_cp + + @property + def norm1(self): + """nn.Module: normalization layer after the first convolution layer""" + return getattr(self, self.norm1_name) + + @property + def norm2(self): + """nn.Module: normalization layer after the second convolution layer""" + return getattr(self, self.norm2_name) + + def forward(self, x): + """Forward function.""" + + def _inner_forward(x): + identity = x + + out = self.conv1(x) + out = self.norm1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.norm2(out) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + + return out + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + out = self.relu(out) + + return out + + +class Bottleneck(BaseModule): + expansion = 4 + + def __init__(self, + inplanes, + planes, + stride=1, + dilation=1, + downsample=None, + style='pytorch', + with_cp=False, + conv_cfg=None, + norm_cfg=dict(type='BN'), + dcn=None, + plugins=None, + init_cfg=None): + """Bottleneck block for ResNet. + + If style is "pytorch", the stride-two layer is the 3x3 conv layer, if + it is "caffe", the stride-two layer is the first 1x1 conv layer. + """ + super(Bottleneck, self).__init__(init_cfg) + assert style in ['pytorch', 'caffe'] + assert dcn is None or isinstance(dcn, dict) + assert plugins is None or isinstance(plugins, list) + if plugins is not None: + allowed_position = ['after_conv1', 'after_conv2', 'after_conv3'] + assert all(p['position'] in allowed_position for p in plugins) + + self.inplanes = inplanes + self.planes = planes + self.stride = stride + self.dilation = dilation + self.style = style + self.with_cp = with_cp + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.dcn = dcn + self.with_dcn = dcn is not None + self.plugins = plugins + self.with_plugins = plugins is not None + + if self.with_plugins: + # collect plugins for conv1/conv2/conv3 + self.after_conv1_plugins = [ + plugin['cfg'] for plugin in plugins + if plugin['position'] == 'after_conv1' + ] + self.after_conv2_plugins = [ + plugin['cfg'] for plugin in plugins + if plugin['position'] == 'after_conv2' + ] + self.after_conv3_plugins = [ + plugin['cfg'] for plugin in plugins + if plugin['position'] == 'after_conv3' + ] + + if self.style == 'pytorch': + self.conv1_stride = 1 + self.conv2_stride = stride + else: + self.conv1_stride = stride + self.conv2_stride = 1 + + self.norm1_name, norm1 = build_norm_layer(norm_cfg, planes, postfix=1) + self.norm2_name, norm2 = build_norm_layer(norm_cfg, planes, postfix=2) + self.norm3_name, norm3 = build_norm_layer( + norm_cfg, planes * self.expansion, postfix=3) + + self.conv1 = build_conv_layer( + conv_cfg, + inplanes, + planes, + kernel_size=1, + stride=self.conv1_stride, + bias=False) + self.add_module(self.norm1_name, norm1) + fallback_on_stride = False + if self.with_dcn: + fallback_on_stride = dcn.pop('fallback_on_stride', False) + if not self.with_dcn or fallback_on_stride: + self.conv2 = build_conv_layer( + conv_cfg, + planes, + planes, + kernel_size=3, + stride=self.conv2_stride, + padding=dilation, + dilation=dilation, + bias=False) + else: + assert self.conv_cfg is None, 'conv_cfg must be None for DCN' + self.conv2 = build_conv_layer( + dcn, + planes, + planes, + kernel_size=3, + stride=self.conv2_stride, + padding=dilation, + dilation=dilation, + bias=False) + + self.add_module(self.norm2_name, norm2) + self.conv3 = build_conv_layer( + conv_cfg, + planes, + planes * self.expansion, + kernel_size=1, + bias=False) + self.add_module(self.norm3_name, norm3) + + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + + if self.with_plugins: + self.after_conv1_plugin_names = self.make_block_plugins( + planes, self.after_conv1_plugins) + self.after_conv2_plugin_names = self.make_block_plugins( + planes, self.after_conv2_plugins) + self.after_conv3_plugin_names = self.make_block_plugins( + planes * self.expansion, self.after_conv3_plugins) + + def make_block_plugins(self, in_channels, plugins): + """make plugins for block. + + Args: + in_channels (int): Input channels of plugin. + plugins (list[dict]): List of plugins cfg to build. + + Returns: + list[str]: List of the names of plugin. + """ + assert isinstance(plugins, list) + plugin_names = [] + for plugin in plugins: + plugin = plugin.copy() + name, layer = build_plugin_layer( + plugin, + in_channels=in_channels, + postfix=plugin.pop('postfix', '')) + assert not hasattr(self, name), f'duplicate plugin {name}' + self.add_module(name, layer) + plugin_names.append(name) + return plugin_names + + def forward_plugin(self, x, plugin_names): + out = x + for name in plugin_names: + out = getattr(self, name)(x) + return out + + @property + def norm1(self): + """nn.Module: normalization layer after the first convolution layer""" + return getattr(self, self.norm1_name) + + @property + def norm2(self): + """nn.Module: normalization layer after the second convolution layer""" + return getattr(self, self.norm2_name) + + @property + def norm3(self): + """nn.Module: normalization layer after the third convolution layer""" + return getattr(self, self.norm3_name) + + def forward(self, x): + """Forward function.""" + + def _inner_forward(x): + identity = x + out = self.conv1(x) + out = self.norm1(out) + out = self.relu(out) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv1_plugin_names) + + out = self.conv2(out) + out = self.norm2(out) + out = self.relu(out) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv2_plugin_names) + + out = self.conv3(out) + out = self.norm3(out) + + if self.with_plugins: + out = self.forward_plugin(out, self.after_conv3_plugin_names) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + + return out + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + out = self.relu(out) + + return out + + +@BACKBONES.register_module() +class ResNet(BaseModule): + """ResNet backbone. + + Args: + depth (int): Depth of resnet, from {18, 34, 50, 101, 152}. + stem_channels (int | None): Number of stem channels. If not specified, + it will be the same as `base_channels`. Default: None. + base_channels (int): Number of base channels of res layer. Default: 64. + in_channels (int): Number of input image channels. Default: 3. + num_stages (int): Resnet stages. Default: 4. + strides (Sequence[int]): Strides of the first block of each stage. + dilations (Sequence[int]): Dilation of each stage. + out_indices (Sequence[int]): Output from which stages. + style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two + layer is the 3x3 conv layer, otherwise the stride-two layer is + the first 1x1 conv layer. + deep_stem (bool): Replace 7x7 conv in input stem with 3 3x3 conv + avg_down (bool): Use AvgPool instead of stride conv when + downsampling in the bottleneck. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. + norm_cfg (dict): Dictionary to construct and config norm layer. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. + plugins (list[dict]): List of plugins for stages, each dict contains: + + - cfg (dict, required): Cfg dict to build plugin. + - position (str, required): Position inside block to insert + plugin, options are 'after_conv1', 'after_conv2', 'after_conv3'. + - stages (tuple[bool], optional): Stages to apply plugin, length + should be same as 'num_stages'. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. + zero_init_residual (bool): Whether to use zero init for last norm layer + in resblocks to let them behave as identity. + pretrained (str, optional): model pretrained path. Default: None + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + + Example: + >>> from mmdet.models import ResNet + >>> import torch + >>> self = ResNet(depth=18) + >>> self.eval() + >>> inputs = torch.rand(1, 3, 32, 32) + >>> level_outputs = self.forward(inputs) + >>> for level_out in level_outputs: + ... print(tuple(level_out.shape)) + (1, 64, 8, 8) + (1, 128, 4, 4) + (1, 256, 2, 2) + (1, 512, 1, 1) + """ + + arch_settings = { + 18: (BasicBlock, (2, 2, 2, 2)), + 34: (BasicBlock, (3, 4, 6, 3)), + 50: (Bottleneck, (3, 4, 6, 3)), + 101: (Bottleneck, (3, 4, 23, 3)), + 152: (Bottleneck, (3, 8, 36, 3)) + } + + def __init__(self, + depth, + in_channels=3, + stem_channels=None, + base_channels=64, + num_stages=4, + strides=(1, 2, 2, 2), + dilations=(1, 1, 1, 1), + out_indices=(0, 1, 2, 3), + style='pytorch', + deep_stem=False, + avg_down=False, + frozen_stages=-1, + conv_cfg=None, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + dcn=None, + stage_with_dcn=(False, False, False, False), + plugins=None, + with_cp=False, + zero_init_residual=True, + pretrained=None, + init_cfg=None): + super(ResNet, self).__init__(init_cfg) + self.zero_init_residual = zero_init_residual + if depth not in self.arch_settings: + raise KeyError(f'invalid depth {depth} for resnet') + + block_init_cfg = None + assert not (init_cfg and pretrained), \ + 'init_cfg and pretrained cannot be setting at the same time' + if isinstance(pretrained, str): + warnings.warn('DeprecationWarning: pretrained is deprecated, ' + 'please use "init_cfg" instead') + self.init_cfg = dict(type='Pretrained', checkpoint=pretrained) + elif pretrained is None: + if init_cfg is None: + self.init_cfg = [ + dict(type='Kaiming', layer='Conv2d'), + dict( + type='Constant', + val=1, + layer=['_BatchNorm', 'GroupNorm']) + ] + block = self.arch_settings[depth][0] + if self.zero_init_residual: + if block is BasicBlock: + block_init_cfg = dict( + type='Constant', + val=0, + override=dict(name='norm2')) + elif block is Bottleneck: + block_init_cfg = dict( + type='Constant', + val=0, + override=dict(name='norm3')) + else: + raise TypeError('pretrained must be a str or None') + + self.depth = depth + if stem_channels is None: + stem_channels = base_channels + self.stem_channels = stem_channels + self.base_channels = base_channels + self.num_stages = num_stages + assert num_stages >= 1 and num_stages <= 4 + self.strides = strides + self.dilations = dilations + assert len(strides) == len(dilations) == num_stages + self.out_indices = out_indices + assert max(out_indices) < num_stages + self.style = style + self.deep_stem = deep_stem + self.avg_down = avg_down + self.frozen_stages = frozen_stages + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.with_cp = with_cp + self.norm_eval = norm_eval + self.dcn = dcn + self.stage_with_dcn = stage_with_dcn + if dcn is not None: + assert len(stage_with_dcn) == num_stages + self.plugins = plugins + self.block, stage_blocks = self.arch_settings[depth] + self.stage_blocks = stage_blocks[:num_stages] + self.inplanes = stem_channels + + self._make_stem_layer(in_channels, stem_channels) + + self.res_layers = [] + for i, num_blocks in enumerate(self.stage_blocks): + stride = strides[i] + dilation = dilations[i] + dcn = self.dcn if self.stage_with_dcn[i] else None + if plugins is not None: + stage_plugins = self.make_stage_plugins(plugins, i) + else: + stage_plugins = None + planes = base_channels * 2**i + res_layer = self.make_res_layer( + block=self.block, + inplanes=self.inplanes, + planes=planes, + num_blocks=num_blocks, + stride=stride, + dilation=dilation, + style=self.style, + avg_down=self.avg_down, + with_cp=with_cp, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + dcn=dcn, + plugins=stage_plugins, + init_cfg=block_init_cfg) + self.inplanes = planes * self.block.expansion + layer_name = f'layer{i + 1}' + self.add_module(layer_name, res_layer) + self.res_layers.append(layer_name) + + self._freeze_stages() + + self.feat_dim = self.block.expansion * base_channels * 2**( + len(self.stage_blocks) - 1) + + def make_stage_plugins(self, plugins, stage_idx): + """Make plugins for ResNet ``stage_idx`` th stage. + + Currently we support to insert ``context_block``, + ``empirical_attention_block``, ``nonlocal_block`` into the backbone + like ResNet/ResNeXt. They could be inserted after conv1/conv2/conv3 of + Bottleneck. + + An example of plugins format could be: + + Examples: + >>> plugins=[ + ... dict(cfg=dict(type='xxx', arg1='xxx'), + ... stages=(False, True, True, True), + ... position='after_conv2'), + ... dict(cfg=dict(type='yyy'), + ... stages=(True, True, True, True), + ... position='after_conv3'), + ... dict(cfg=dict(type='zzz', postfix='1'), + ... stages=(True, True, True, True), + ... position='after_conv3'), + ... dict(cfg=dict(type='zzz', postfix='2'), + ... stages=(True, True, True, True), + ... position='after_conv3') + ... ] + >>> self = ResNet(depth=18) + >>> stage_plugins = self.make_stage_plugins(plugins, 0) + >>> assert len(stage_plugins) == 3 + + Suppose ``stage_idx=0``, the structure of blocks in the stage would be: + + .. code-block:: none + + conv1-> conv2->conv3->yyy->zzz1->zzz2 + + Suppose 'stage_idx=1', the structure of blocks in the stage would be: + + .. code-block:: none + + conv1-> conv2->xxx->conv3->yyy->zzz1->zzz2 + + If stages is missing, the plugin would be applied to all stages. + + Args: + plugins (list[dict]): List of plugins cfg to build. The postfix is + required if multiple same type plugins are inserted. + stage_idx (int): Index of stage to build + + Returns: + list[dict]: Plugins for current stage + """ + stage_plugins = [] + for plugin in plugins: + plugin = plugin.copy() + stages = plugin.pop('stages', None) + assert stages is None or len(stages) == self.num_stages + # whether to insert plugin into current stage + if stages is None or stages[stage_idx]: + stage_plugins.append(plugin) + + return stage_plugins + + def make_res_layer(self, **kwargs): + """Pack all blocks in a stage into a ``ResLayer``.""" + return ResLayer(**kwargs) + + @property + def norm1(self): + """nn.Module: the normalization layer named "norm1" """ + return getattr(self, self.norm1_name) + + def _make_stem_layer(self, in_channels, stem_channels): + if self.deep_stem: + self.stem = nn.Sequential( + build_conv_layer( + self.conv_cfg, + in_channels, + stem_channels // 2, + kernel_size=3, + stride=2, + padding=1, + bias=False), + build_norm_layer(self.norm_cfg, stem_channels // 2)[1], + nn.ReLU(inplace=True), + build_conv_layer( + self.conv_cfg, + stem_channels // 2, + stem_channels // 2, + kernel_size=3, + stride=1, + padding=1, + bias=False), + build_norm_layer(self.norm_cfg, stem_channels // 2)[1], + nn.ReLU(inplace=True), + build_conv_layer( + self.conv_cfg, + stem_channels // 2, + stem_channels, + kernel_size=3, + stride=1, + padding=1, + bias=False), + build_norm_layer(self.norm_cfg, stem_channels)[1], + nn.ReLU(inplace=True)) + else: + self.conv1 = build_conv_layer( + self.conv_cfg, + in_channels, + stem_channels, + kernel_size=7, + stride=2, + padding=3, + bias=False) + self.norm1_name, norm1 = build_norm_layer( + self.norm_cfg, stem_channels, postfix=1) + self.add_module(self.norm1_name, norm1) + self.relu = nn.ReLU(inplace=True) + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + + def _freeze_stages(self): + if self.frozen_stages >= 0: + if self.deep_stem: + self.stem.eval() + for param in self.stem.parameters(): + param.requires_grad = False + else: + self.norm1.eval() + for m in [self.conv1, self.norm1]: + for param in m.parameters(): + param.requires_grad = False + + for i in range(1, self.frozen_stages + 1): + m = getattr(self, f'layer{i}') + m.eval() + for param in m.parameters(): + param.requires_grad = False + + def forward(self, x): + """Forward function.""" + if self.deep_stem: + x = self.stem(x) + else: + x = self.conv1(x) + x = self.norm1(x) + x = self.relu(x) + x = self.maxpool(x) + outs = [] + for i, layer_name in enumerate(self.res_layers): + res_layer = getattr(self, layer_name) + x = res_layer(x) + if i in self.out_indices: + outs.append(x) + return tuple(outs) + + def train(self, mode=True): + """Convert the model into training mode while keep normalization layer + freezed.""" + super(ResNet, self).train(mode) + self._freeze_stages() + if mode and self.norm_eval: + for m in self.modules(): + # trick: eval have effect on BatchNorm only + if isinstance(m, _BatchNorm): + m.eval() + + +@BACKBONES.register_module() +class ResNetV1d(ResNet): + r"""ResNetV1d variant described in `Bag of Tricks + `_. + + Compared with default ResNet(ResNetV1b), ResNetV1d replaces the 7x7 conv in + the input stem with three 3x3 convs. And in the downsampling block, a 2x2 + avg_pool with stride 2 is added before conv, whose stride is changed to 1. + """ + + def __init__(self, **kwargs): + super(ResNetV1d, self).__init__( + deep_stem=True, avg_down=True, **kwargs) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/resnext.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/resnext.py new file mode 100644 index 0000000000000000000000000000000000000000..6dbcbd516fd308b1d703eecb83ab275f6b159516 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/resnext.py @@ -0,0 +1,153 @@ +import math + +from mmcv.cnn import build_conv_layer, build_norm_layer + +from ..builder import BACKBONES +from ..utils import ResLayer +from .resnet import Bottleneck as _Bottleneck +from .resnet import ResNet + + +class Bottleneck(_Bottleneck): + expansion = 4 + + def __init__(self, + inplanes, + planes, + groups=1, + base_width=4, + base_channels=64, + **kwargs): + """Bottleneck block for ResNeXt. + + If style is "pytorch", the stride-two layer is the 3x3 conv layer, if + it is "caffe", the stride-two layer is the first 1x1 conv layer. + """ + super(Bottleneck, self).__init__(inplanes, planes, **kwargs) + + if groups == 1: + width = self.planes + else: + width = math.floor(self.planes * + (base_width / base_channels)) * groups + + self.norm1_name, norm1 = build_norm_layer( + self.norm_cfg, width, postfix=1) + self.norm2_name, norm2 = build_norm_layer( + self.norm_cfg, width, postfix=2) + self.norm3_name, norm3 = build_norm_layer( + self.norm_cfg, self.planes * self.expansion, postfix=3) + + self.conv1 = build_conv_layer( + self.conv_cfg, + self.inplanes, + width, + kernel_size=1, + stride=self.conv1_stride, + bias=False) + self.add_module(self.norm1_name, norm1) + fallback_on_stride = False + self.with_modulated_dcn = False + if self.with_dcn: + fallback_on_stride = self.dcn.pop('fallback_on_stride', False) + if not self.with_dcn or fallback_on_stride: + self.conv2 = build_conv_layer( + self.conv_cfg, + width, + width, + kernel_size=3, + stride=self.conv2_stride, + padding=self.dilation, + dilation=self.dilation, + groups=groups, + bias=False) + else: + assert self.conv_cfg is None, 'conv_cfg must be None for DCN' + self.conv2 = build_conv_layer( + self.dcn, + width, + width, + kernel_size=3, + stride=self.conv2_stride, + padding=self.dilation, + dilation=self.dilation, + groups=groups, + bias=False) + + self.add_module(self.norm2_name, norm2) + self.conv3 = build_conv_layer( + self.conv_cfg, + width, + self.planes * self.expansion, + kernel_size=1, + bias=False) + self.add_module(self.norm3_name, norm3) + + if self.with_plugins: + self._del_block_plugins(self.after_conv1_plugin_names + + self.after_conv2_plugin_names + + self.after_conv3_plugin_names) + self.after_conv1_plugin_names = self.make_block_plugins( + width, self.after_conv1_plugins) + self.after_conv2_plugin_names = self.make_block_plugins( + width, self.after_conv2_plugins) + self.after_conv3_plugin_names = self.make_block_plugins( + self.planes * self.expansion, self.after_conv3_plugins) + + def _del_block_plugins(self, plugin_names): + """delete plugins for block if exist. + + Args: + plugin_names (list[str]): List of plugins name to delete. + """ + assert isinstance(plugin_names, list) + for plugin_name in plugin_names: + del self._modules[plugin_name] + + +@BACKBONES.register_module() +class ResNeXt(ResNet): + """ResNeXt backbone. + + Args: + depth (int): Depth of resnet, from {18, 34, 50, 101, 152}. + in_channels (int): Number of input image channels. Default: 3. + num_stages (int): Resnet stages. Default: 4. + groups (int): Group of resnext. + base_width (int): Base width of resnext. + strides (Sequence[int]): Strides of the first block of each stage. + dilations (Sequence[int]): Dilation of each stage. + out_indices (Sequence[int]): Output from which stages. + style (str): `pytorch` or `caffe`. If set to "pytorch", the stride-two + layer is the 3x3 conv layer, otherwise the stride-two layer is + the first 1x1 conv layer. + frozen_stages (int): Stages to be frozen (all param fixed). -1 means + not freezing any parameters. + norm_cfg (dict): dictionary to construct and config norm layer. + norm_eval (bool): Whether to set norm layers to eval mode, namely, + freeze running stats (mean and var). Note: Effect on Batch Norm + and its variants only. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. + zero_init_residual (bool): whether to use zero init for last norm layer + in resblocks to let them behave as identity. + """ + + arch_settings = { + 50: (Bottleneck, (3, 4, 6, 3)), + 101: (Bottleneck, (3, 4, 23, 3)), + 152: (Bottleneck, (3, 8, 36, 3)) + } + + def __init__(self, groups=1, base_width=4, **kwargs): + self.groups = groups + self.base_width = base_width + super(ResNeXt, self).__init__(**kwargs) + + def make_res_layer(self, **kwargs): + """Pack all blocks in a stage into a ``ResLayer``""" + return ResLayer( + groups=self.groups, + base_width=self.base_width, + base_channels=self.base_channels, + **kwargs) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/ssd_vgg.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/ssd_vgg.py new file mode 100644 index 0000000000000000000000000000000000000000..e8a6689a32a6802cf778309417d9edd30a47663b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/ssd_vgg.py @@ -0,0 +1,127 @@ +import warnings + +import torch.nn as nn +from mmcv.cnn import VGG +from mmcv.runner import BaseModule + +from ..builder import BACKBONES +from ..necks import ssd_neck + + +@BACKBONES.register_module() +class SSDVGG(VGG, BaseModule): + """VGG Backbone network for single-shot-detection. + + Args: + depth (int): Depth of vgg, from {11, 13, 16, 19}. + with_last_pool (bool): Whether to add a pooling layer at the last + of the model + ceil_mode (bool): When True, will use `ceil` instead of `floor` + to compute the output shape. + out_indices (Sequence[int]): Output from which stages. + out_feature_indices (Sequence[int]): Output from which feature map. + pretrained (str, optional): model pretrained path. Default: None + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + input_size (int, optional): Deprecated argumment. + Width and height of input, from {300, 512}. + l2_norm_scale (float, optional) : Deprecated argumment. + L2 normalization layer init scale. + + Example: + >>> self = SSDVGG(input_size=300, depth=11) + >>> self.eval() + >>> inputs = torch.rand(1, 3, 300, 300) + >>> level_outputs = self.forward(inputs) + >>> for level_out in level_outputs: + ... print(tuple(level_out.shape)) + (1, 1024, 19, 19) + (1, 512, 10, 10) + (1, 256, 5, 5) + (1, 256, 3, 3) + (1, 256, 1, 1) + """ + extra_setting = { + 300: (256, 'S', 512, 128, 'S', 256, 128, 256, 128, 256), + 512: (256, 'S', 512, 128, 'S', 256, 128, 'S', 256, 128, 'S', 256, 128), + } + + def __init__(self, + depth, + with_last_pool=False, + ceil_mode=True, + out_indices=(3, 4), + out_feature_indices=(22, 34), + pretrained=None, + init_cfg=None, + input_size=None, + l2_norm_scale=None): + # TODO: in_channels for mmcv.VGG + super(SSDVGG, self).__init__( + depth, + with_last_pool=with_last_pool, + ceil_mode=ceil_mode, + out_indices=out_indices) + + self.features.add_module( + str(len(self.features)), + nn.MaxPool2d(kernel_size=3, stride=1, padding=1)) + self.features.add_module( + str(len(self.features)), + nn.Conv2d(512, 1024, kernel_size=3, padding=6, dilation=6)) + self.features.add_module( + str(len(self.features)), nn.ReLU(inplace=True)) + self.features.add_module( + str(len(self.features)), nn.Conv2d(1024, 1024, kernel_size=1)) + self.features.add_module( + str(len(self.features)), nn.ReLU(inplace=True)) + self.out_feature_indices = out_feature_indices + + assert not (init_cfg and pretrained), \ + 'init_cfg and pretrained cannot be setting at the same time' + + if init_cfg is not None: + self.init_cfg = init_cfg + elif isinstance(pretrained, str): + warnings.warn('DeprecationWarning: pretrained is deprecated, ' + 'please use "init_cfg" instead') + self.init_cfg = dict(type='Pretrained', checkpoint=pretrained) + elif pretrained is None: + self.init_cfg = [ + dict(type='Kaiming', layer='Conv2d'), + dict(type='Constant', val=1, layer='BatchNorm2d'), + dict(type='Normal', std=0.01, layer='Linear'), + ] + else: + raise TypeError('pretrained must be a str or None') + + if input_size is not None: + warnings.warn('DeprecationWarning: input_size is deprecated') + if l2_norm_scale is not None: + warnings.warn('DeprecationWarning: l2_norm_scale in VGG is ' + 'deprecated, it has been moved to SSDNeck.') + + def init_weights(self, pretrained=None): + super(VGG, self).init_weights() + + def forward(self, x): + """Forward function.""" + outs = [] + for i, layer in enumerate(self.features): + x = layer(x) + if i in self.out_feature_indices: + outs.append(x) + + if len(outs) == 1: + return outs[0] + else: + return tuple(outs) + + +class L2Norm(ssd_neck.L2Norm): + + def __init__(self, **kwargs): + super(L2Norm, self).__init__(**kwargs) + warnings.warn('DeprecationWarning: L2Norm in ssd_vgg.py ' + 'is deprecated, please use L2Norm in ' + 'mmdet/models/necks/ssd_neck.py instead') diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/swin_transformer.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/swin_transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..67a58e2520592df8539da9818e2d15ff9d52be20 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/swin_transformer.py @@ -0,0 +1,660 @@ +# -------------------------------------------------------- +# Swin Transformer +# Copyright (c) 2021 Microsoft +# Licensed under The MIT License [see LICENSE for details] +# Written by Ze Liu, Yutong Lin, Yixuan Wei +# -------------------------------------------------------- + +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.utils.checkpoint as checkpoint +import numpy as np +from timm.models.layers import DropPath, to_2tuple, trunc_normal_ + +from mmcv_custom import load_checkpoint +from mmdet.utils import get_root_logger +from ..builder import BACKBONES + +from mmcv.runner import BaseModule + +class Mlp(nn.Module): + """ Multilayer perceptron.""" + + def __init__(self, in_features, hidden_features=None, out_features=None, act_layer=nn.GELU, drop=0.): + super().__init__() + out_features = out_features or in_features + hidden_features = hidden_features or in_features + self.fc1 = nn.Linear(in_features, hidden_features) + self.act = act_layer() + self.fc2 = nn.Linear(hidden_features, out_features) + self.drop = nn.Dropout(drop) + + def forward(self, x): + x = self.fc1(x) + x = self.act(x) + x = self.drop(x) + x = self.fc2(x) + x = self.drop(x) + return x + + +def window_partition(x, window_size): + """ + Args: + x: (B, H, W, C) + window_size (int): window size + + Returns: + windows: (num_windows*B, window_size, window_size, C) + """ + B, H, W, C = x.shape + x = x.view(B, H // window_size, window_size, W // window_size, window_size, C) + windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, C) + return windows + + +def window_reverse(windows, window_size, H, W): + """ + Args: + windows: (num_windows*B, window_size, window_size, C) + window_size (int): Window size + H (int): Height of image + W (int): Width of image + + Returns: + x: (B, H, W, C) + """ + B = int(windows.shape[0] / (H * W / window_size / window_size)) + x = windows.view(B, H // window_size, W // window_size, window_size, window_size, -1) + x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, H, W, -1) + return x + + +class WindowAttention(nn.Module): + """ Window based multi-head self attention (W-MSA) module with relative position bias. + It supports both of shifted and non-shifted window. + + Args: + dim (int): Number of input channels. + window_size (tuple[int]): The height and width of the window. + num_heads (int): Number of attention heads. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set + attn_drop (float, optional): Dropout ratio of attention weight. Default: 0.0 + proj_drop (float, optional): Dropout ratio of output. Default: 0.0 + """ + + def __init__(self, dim, window_size, num_heads, qkv_bias=True, qk_scale=None, attn_drop=0., proj_drop=0.): + + super().__init__() + self.dim = dim + self.window_size = window_size # Wh, Ww + self.num_heads = num_heads + head_dim = dim // num_heads + self.scale = qk_scale or head_dim ** -0.5 + + # define a parameter table of relative position bias + self.relative_position_bias_table = nn.Parameter( + torch.zeros((2 * window_size[0] - 1) * (2 * window_size[1] - 1), num_heads)) # 2*Wh-1 * 2*Ww-1, nH + + # get pair-wise relative position index for each token inside the window + coords_h = torch.arange(self.window_size[0]) + coords_w = torch.arange(self.window_size[1]) + coords = torch.stack(torch.meshgrid([coords_h, coords_w])) # 2, Wh, Ww + coords_flatten = torch.flatten(coords, 1) # 2, Wh*Ww + relative_coords = coords_flatten[:, :, None] - coords_flatten[:, None, :] # 2, Wh*Ww, Wh*Ww + relative_coords = relative_coords.permute(1, 2, 0).contiguous() # Wh*Ww, Wh*Ww, 2 + relative_coords[:, :, 0] += self.window_size[0] - 1 # shift to start from 0 + relative_coords[:, :, 1] += self.window_size[1] - 1 + relative_coords[:, :, 0] *= 2 * self.window_size[1] - 1 + relative_position_index = relative_coords.sum(-1) # Wh*Ww, Wh*Ww + self.register_buffer("relative_position_index", relative_position_index) + + self.qkv = nn.Linear(dim, dim * 3, bias=qkv_bias) + self.attn_drop = nn.Dropout(attn_drop) + self.proj = nn.Linear(dim, dim) + self.proj_drop = nn.Dropout(proj_drop) + + trunc_normal_(self.relative_position_bias_table, std=.02) + self.softmax = nn.Softmax(dim=-1) + + def forward(self, x, mask=None): + """ Forward function. + + Args: + x: input features with shape of (num_windows*B, N, C) + mask: (0/-inf) mask with shape of (num_windows, Wh*Ww, Wh*Ww) or None + """ + B_, N, C = x.shape + qkv = self.qkv(x).reshape(B_, N, 3, self.num_heads, C // self.num_heads).permute(2, 0, 3, 1, 4) + q, k, v = qkv[0], qkv[1], qkv[2] # make torchscript happy (cannot use tensor as tuple) + + q = q * self.scale + attn = (q @ k.transpose(-2, -1)) + + relative_position_bias = self.relative_position_bias_table[self.relative_position_index.view(-1)].view( + self.window_size[0] * self.window_size[1], self.window_size[0] * self.window_size[1], -1) # Wh*Ww,Wh*Ww,nH + relative_position_bias = relative_position_bias.permute(2, 0, 1).contiguous() # nH, Wh*Ww, Wh*Ww + attn = attn + relative_position_bias.unsqueeze(0) + + if mask is not None: + nW = mask.shape[0] + attn = attn.view(B_ // nW, nW, self.num_heads, N, N) + mask.unsqueeze(1).unsqueeze(0) + attn = attn.view(-1, self.num_heads, N, N) + attn = self.softmax(attn) + else: + attn = self.softmax(attn) + + attn = self.attn_drop(attn) + + x = (attn @ v).transpose(1, 2).reshape(B_, N, C) + x = self.proj(x) + x = self.proj_drop(x) + return x + + +class SwinTransformerBlock(nn.Module): + """ Swin Transformer Block. + + Args: + dim (int): Number of input channels. + num_heads (int): Number of attention heads. + window_size (int): Window size. + shift_size (int): Shift size for SW-MSA. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set. + drop (float, optional): Dropout rate. Default: 0.0 + attn_drop (float, optional): Attention dropout rate. Default: 0.0 + drop_path (float, optional): Stochastic depth rate. Default: 0.0 + act_layer (nn.Module, optional): Activation layer. Default: nn.GELU + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + """ + + def __init__(self, dim, num_heads, window_size=7, shift_size=0, + mlp_ratio=4., qkv_bias=True, qk_scale=None, drop=0., attn_drop=0., drop_path=0., + act_layer=nn.GELU, norm_layer=nn.LayerNorm): + super().__init__() + self.dim = dim + self.num_heads = num_heads + self.window_size = window_size + self.shift_size = shift_size + self.mlp_ratio = mlp_ratio + assert 0 <= self.shift_size < self.window_size, "shift_size must in 0-window_size" + + self.norm1 = norm_layer(dim) + self.attn = WindowAttention( + dim, window_size=to_2tuple(self.window_size), num_heads=num_heads, + qkv_bias=qkv_bias, qk_scale=qk_scale, attn_drop=attn_drop, proj_drop=drop) + + self.drop_path = DropPath(drop_path) if drop_path > 0. else nn.Identity() + self.norm2 = norm_layer(dim) + mlp_hidden_dim = int(dim * mlp_ratio) + self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim, act_layer=act_layer, drop=drop) + + self.H = None + self.W = None + + def forward(self, x, mask_matrix): + """ Forward function. + + Args: + x: Input feature, tensor size (B, H*W, C). + H, W: Spatial resolution of the input feature. + mask_matrix: Attention mask for cyclic shift. + """ + B, L, C = x.shape + H, W = self.H, self.W + assert L == H * W, "input feature has wrong size" + + shortcut = x + x = self.norm1(x) + x = x.view(B, H, W, C) + + # pad feature maps to multiples of window size + pad_l = pad_t = 0 + pad_r = (self.window_size - W % self.window_size) % self.window_size + pad_b = (self.window_size - H % self.window_size) % self.window_size + x = F.pad(x, (0, 0, pad_l, pad_r, pad_t, pad_b)) + _, Hp, Wp, _ = x.shape + + # cyclic shift + if self.shift_size > 0: + shifted_x = torch.roll(x, shifts=(-self.shift_size, -self.shift_size), dims=(1, 2)) + attn_mask = mask_matrix + else: + shifted_x = x + attn_mask = None + + # partition windows + x_windows = window_partition(shifted_x, self.window_size) # nW*B, window_size, window_size, C + x_windows = x_windows.view(-1, self.window_size * self.window_size, C) # nW*B, window_size*window_size, C + + # W-MSA/SW-MSA + attn_windows = self.attn(x_windows, mask=attn_mask) # nW*B, window_size*window_size, C + + # merge windows + attn_windows = attn_windows.view(-1, self.window_size, self.window_size, C) + shifted_x = window_reverse(attn_windows, self.window_size, Hp, Wp) # B H' W' C + + # reverse cyclic shift + if self.shift_size > 0: + x = torch.roll(shifted_x, shifts=(self.shift_size, self.shift_size), dims=(1, 2)) + else: + x = shifted_x + + if pad_r > 0 or pad_b > 0: + x = x[:, :H, :W, :].contiguous() + + x = x.view(B, H * W, C) + + # FFN + x = shortcut + self.drop_path(x) + x = x + self.drop_path(self.mlp(self.norm2(x))) + + return x + + +class PatchMerging(nn.Module): + """ Patch Merging Layer + + Args: + dim (int): Number of input channels. + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + """ + def __init__(self, dim, norm_layer=nn.LayerNorm): + super().__init__() + self.dim = dim + self.reduction = nn.Linear(4 * dim, 2 * dim, bias=False) + self.norm = norm_layer(4 * dim) + + def forward(self, x, H, W): + """ Forward function. + + Args: + x: Input feature, tensor size (B, H*W, C). + H, W: Spatial resolution of the input feature. + """ + B, L, C = x.shape + assert L == H * W, "input feature has wrong size" + + x = x.view(B, H, W, C) + + # padding + pad_input = (H % 2 == 1) or (W % 2 == 1) + if pad_input: + x = F.pad(x, (0, 0, 0, W % 2, 0, H % 2)) + + x0 = x[:, 0::2, 0::2, :] # B H/2 W/2 C + x1 = x[:, 1::2, 0::2, :] # B H/2 W/2 C + x2 = x[:, 0::2, 1::2, :] # B H/2 W/2 C + x3 = x[:, 1::2, 1::2, :] # B H/2 W/2 C + x = torch.cat([x0, x1, x2, x3], -1) # B H/2 W/2 4*C + x = x.view(B, -1, 4 * C) # B H/2*W/2 4*C + + x = self.norm(x) + x = self.reduction(x) + + return x + + +class BasicLayer(nn.Module): + """ A basic Swin Transformer layer for one stage. + + Args: + dim (int): Number of feature channels + depth (int): Depths of this stage. + num_heads (int): Number of attention head. + window_size (int): Local window size. Default: 7. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4. + qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float | None, optional): Override default qk scale of head_dim ** -0.5 if set. + drop (float, optional): Dropout rate. Default: 0.0 + attn_drop (float, optional): Attention dropout rate. Default: 0.0 + drop_path (float | tuple[float], optional): Stochastic depth rate. Default: 0.0 + norm_layer (nn.Module, optional): Normalization layer. Default: nn.LayerNorm + downsample (nn.Module | None, optional): Downsample layer at the end of the layer. Default: None + use_checkpoint (bool): Whether to use checkpointing to save memory. Default: False. + """ + + def __init__(self, + dim, + depth, + num_heads, + window_size=7, + mlp_ratio=4., + qkv_bias=True, + qk_scale=None, + drop=0., + attn_drop=0., + drop_path=0., + norm_layer=nn.LayerNorm, + downsample=None, + use_checkpoint=False): + super().__init__() + self.window_size = window_size + self.shift_size = window_size // 2 + self.depth = depth + self.use_checkpoint = use_checkpoint + + # build blocks + self.blocks = nn.ModuleList([ + SwinTransformerBlock( + dim=dim, + num_heads=num_heads, + window_size=window_size, + shift_size=0 if (i % 2 == 0) else window_size // 2, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + drop=drop, + attn_drop=attn_drop, + drop_path=drop_path[i] if isinstance(drop_path, list) else drop_path, + norm_layer=norm_layer) + for i in range(depth)]) + + # patch merging layer + if downsample is not None: + self.downsample = downsample(dim=dim, norm_layer=norm_layer) + else: + self.downsample = None + + def forward(self, x, H, W): + """ Forward function. + + Args: + x: Input feature, tensor size (B, H*W, C). + H, W: Spatial resolution of the input feature. + """ + + # calculate attention mask for SW-MSA + Hp = int(np.ceil(H / self.window_size)) * self.window_size + Wp = int(np.ceil(W / self.window_size)) * self.window_size + img_mask = torch.zeros((1, Hp, Wp, 1), device=x.device) # 1 Hp Wp 1 + h_slices = (slice(0, -self.window_size), + slice(-self.window_size, -self.shift_size), + slice(-self.shift_size, None)) + w_slices = (slice(0, -self.window_size), + slice(-self.window_size, -self.shift_size), + slice(-self.shift_size, None)) + cnt = 0 + for h in h_slices: + for w in w_slices: + img_mask[:, h, w, :] = cnt + cnt += 1 + + mask_windows = window_partition(img_mask, self.window_size) # nW, window_size, window_size, 1 + mask_windows = mask_windows.view(-1, self.window_size * self.window_size) + attn_mask = mask_windows.unsqueeze(1) - mask_windows.unsqueeze(2) + attn_mask = attn_mask.masked_fill(attn_mask != 0, float(-100.0)).masked_fill(attn_mask == 0, float(0.0)) + + for blk in self.blocks: + blk.H, blk.W = H, W + if self.use_checkpoint: + x = checkpoint.checkpoint(blk, x, attn_mask) + else: + x = blk(x, attn_mask) + if self.downsample is not None: + x_down = self.downsample(x, H, W) + Wh, Ww = (H + 1) // 2, (W + 1) // 2 + return x, H, W, x_down, Wh, Ww + else: + return x, H, W, x, H, W + + +class PatchEmbed(nn.Module): + """ Image to Patch Embedding + + Args: + patch_size (int): Patch token size. Default: 4. + in_chans (int): Number of input image channels. Default: 3. + embed_dim (int): Number of linear projection output channels. Default: 96. + norm_layer (nn.Module, optional): Normalization layer. Default: None + """ + + def __init__(self, patch_size=4, in_chans=3, embed_dim=96, norm_layer=None): + super().__init__() + patch_size = to_2tuple(patch_size) + self.patch_size = patch_size + + self.in_chans = in_chans + self.embed_dim = embed_dim + + self.proj = nn.Conv2d(in_chans, embed_dim, kernel_size=patch_size, stride=patch_size) + if norm_layer is not None: + self.norm = norm_layer(embed_dim) + else: + self.norm = None + + def forward(self, x): + """Forward function.""" + # padding + _, _, H, W = x.size() + if W % self.patch_size[1] != 0: + x = F.pad(x, (0, self.patch_size[1] - W % self.patch_size[1])) + if H % self.patch_size[0] != 0: + x = F.pad(x, (0, 0, 0, self.patch_size[0] - H % self.patch_size[0])) + + x = self.proj(x) # B C Wh Ww + if self.norm is not None: + Wh, Ww = x.size(2), x.size(3) + x = x.flatten(2).transpose(1, 2) + x = self.norm(x) + x = x.transpose(1, 2).view(-1, self.embed_dim, Wh, Ww) + + return x + + +@BACKBONES.register_module() +class SwinTransformer(BaseModule): + """ Swin Transformer backbone. + A PyTorch impl of : `Swin Transformer: Hierarchical Vision Transformer using Shifted Windows` - + https://arxiv.org/pdf/2103.14030 + + Args: + pretrain_img_size (int): Input image size for training the pretrained model, + used in absolute postion embedding. Default 224. + patch_size (int | tuple(int)): Patch size. Default: 4. + in_chans (int): Number of input image channels. Default: 3. + embed_dim (int): Number of linear projection output channels. Default: 96. + depths (tuple[int]): Depths of each Swin Transformer stage. + num_heads (tuple[int]): Number of attention head of each stage. + window_size (int): Window size. Default: 7. + mlp_ratio (float): Ratio of mlp hidden dim to embedding dim. Default: 4. + qkv_bias (bool): If True, add a learnable bias to query, key, value. Default: True + qk_scale (float): Override default qk scale of head_dim ** -0.5 if set. + drop_rate (float): Dropout rate. + attn_drop_rate (float): Attention dropout rate. Default: 0. + drop_path_rate (float): Stochastic depth rate. Default: 0.2. + norm_layer (nn.Module): Normalization layer. Default: nn.LayerNorm. + ape (bool): If True, add absolute position embedding to the patch embedding. Default: False. + patch_norm (bool): If True, add normalization after patch embedding. Default: True. + out_indices (Sequence[int]): Output from which stages. + frozen_stages (int): Stages to be frozen (stop grad and set eval mode). + -1 means not freezing any parameters. + use_checkpoint (bool): Whether to use checkpointing to save memory. Default: False. + pretrained (str, optional): model pretrained path. Default: None. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None. + """ + + def __init__(self, + pretrain_img_size=224, + patch_size=4, + in_chans=3, + embed_dim=96, + depths=[2, 2, 6, 2], + num_heads=[3, 6, 12, 24], + window_size=7, + mlp_ratio=4., + qkv_bias=True, + qk_scale=None, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0.2, + norm_layer=nn.LayerNorm, + ape=False, + patch_norm=True, + out_indices=(0, 1, 2, 3), + frozen_stages=-1, + use_checkpoint=False, + pretrained=None, + init_cfg=None): + assert init_cfg is None, 'To prevent abnormal initialization ' \ + 'behavior, init_cfg is not allowed to be set' + super().__init__(init_cfg=init_cfg) + + self.pretrain_img_size = pretrain_img_size + self.num_layers = len(depths) + self.embed_dim = embed_dim + self.ape = ape + self.patch_norm = patch_norm + self.out_indices = out_indices + self.frozen_stages = frozen_stages + self.pretrained = pretrained + + # split image into non-overlapping patches + self.patch_embed = PatchEmbed( + patch_size=patch_size, in_chans=in_chans, embed_dim=embed_dim, + norm_layer=norm_layer if self.patch_norm else None) + + # absolute position embedding + if self.ape: + pretrain_img_size = to_2tuple(pretrain_img_size) + patch_size = to_2tuple(patch_size) + patches_resolution = [pretrain_img_size[0] // patch_size[0], pretrain_img_size[1] // patch_size[1]] + + self.absolute_pos_embed = nn.Parameter(torch.zeros(1, embed_dim, patches_resolution[0], patches_resolution[1])) + trunc_normal_(self.absolute_pos_embed, std=.02) + + self.pos_drop = nn.Dropout(p=drop_rate) + + # stochastic depth + dpr = [x.item() for x in torch.linspace(0, drop_path_rate, sum(depths))] # stochastic depth decay rule + + # build layers + self.layers = nn.ModuleList() + for i_layer in range(self.num_layers): + layer = BasicLayer( + dim=int(embed_dim * 2 ** i_layer), + depth=depths[i_layer], + num_heads=num_heads[i_layer], + window_size=window_size, + mlp_ratio=mlp_ratio, + qkv_bias=qkv_bias, + qk_scale=qk_scale, + drop=drop_rate, + attn_drop=attn_drop_rate, + drop_path=dpr[sum(depths[:i_layer]):sum(depths[:i_layer + 1])], + norm_layer=norm_layer, + downsample=PatchMerging if (i_layer < self.num_layers - 1) else None, + use_checkpoint=use_checkpoint) + self.layers.append(layer) + + num_features = [int(embed_dim * 2 ** i) for i in range(self.num_layers)] + self.num_features = num_features + + # add a norm layer for each output + for i_layer in out_indices: + layer = norm_layer(num_features[i_layer]) + layer_name = f'norm{i_layer}' + self.add_module(layer_name, layer) + + self._freeze_stages() + + def _freeze_stages(self): + if self.frozen_stages >= 0: + self.patch_embed.eval() + for param in self.patch_embed.parameters(): + param.requires_grad = False + + if self.frozen_stages >= 1 and self.ape: + self.absolute_pos_embed.requires_grad = False + + if self.frozen_stages >= 2: + self.pos_drop.eval() + for i in range(0, self.frozen_stages - 1): + m = self.layers[i] + m.eval() + for param in m.parameters(): + param.requires_grad = False + + # def init_weights(self, pretrained=None): + # """Initialize the weights in backbone. + + # Args: + # pretrained (str, optional): Path to pre-trained weights. + # Defaults to None. + # """ + + # def _init_weights(m): + # if isinstance(m, nn.Linear): + # trunc_normal_(m.weight, std=.02) + # if isinstance(m, nn.Linear) and m.bias is not None: + # nn.init.constant_(m.bias, 0) + # elif isinstance(m, nn.LayerNorm): + # nn.init.constant_(m.bias, 0) + # nn.init.constant_(m.weight, 1.0) + + # if isinstance(pretrained, str): + # self.apply(_init_weights) + # logger = get_root_logger() + # load_checkpoint(self, pretrained, strict=False, logger=logger) + # elif pretrained is None: + # self.apply(_init_weights) + # else: + # raise TypeError('pretrained must be a str or None') + + def init_weights(self): + """Initialize the weights in backbone.""" + + def _init_weights(m): + if isinstance(m, nn.Linear): + trunc_normal_(m.weight, std=.02) + if isinstance(m, nn.Linear) and m.bias is not None: + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.LayerNorm): + nn.init.constant_(m.bias, 0) + nn.init.constant_(m.weight, 1.0) + + if isinstance(self.pretrained, str): + self.apply(_init_weights) + logger = get_root_logger() + load_checkpoint(self, self.pretrained, strict=False, logger=logger) + elif self.pretrained is None: + self.apply(_init_weights) + else: + raise TypeError('pretrained must be a str or None') + + def forward(self, x): + """Forward function.""" + x = self.patch_embed(x) + + Wh, Ww = x.size(2), x.size(3) + if self.ape: + # interpolate the position embedding to the corresponding size + absolute_pos_embed = F.interpolate(self.absolute_pos_embed, size=(Wh, Ww), mode='bicubic') + x = (x + absolute_pos_embed).flatten(2).transpose(1, 2) # B Wh*Ww C + else: + x = x.flatten(2).transpose(1, 2) + x = self.pos_drop(x) + + outs = [] + for i in range(self.num_layers): + layer = self.layers[i] + x_out, H, W, x, Wh, Ww = layer(x, Wh, Ww) + + if i in self.out_indices: + norm_layer = getattr(self, f'norm{i}') + x_out = norm_layer(x_out) + + out = x_out.view(-1, H, W, self.num_features[i]).permute(0, 3, 1, 2).contiguous() + outs.append(out) + + return tuple(outs) + + def train(self, mode=True): + """Convert the model into training mode while keep layers freezed.""" + super(SwinTransformer, self).train(mode) + self._freeze_stages() diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/trident_resnet.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/trident_resnet.py new file mode 100644 index 0000000000000000000000000000000000000000..44d8c96c6d0a761b3bd896d9f4b5b871b04dc539 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/backbones/trident_resnet.py @@ -0,0 +1,297 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.utils.checkpoint as cp +from mmcv.cnn import build_conv_layer, build_norm_layer +from mmcv.runner import BaseModule +from torch.nn.modules.utils import _pair + +from mmdet.models.backbones.resnet import Bottleneck, ResNet +from mmdet.models.builder import BACKBONES + + +class TridentConv(BaseModule): + """Trident Convolution Module. + + Args: + in_channels (int): Number of channels in input. + out_channels (int): Number of channels in output. + kernel_size (int): Size of convolution kernel. + stride (int, optional): Convolution stride. Default: 1. + trident_dilations (tuple[int, int, int], optional): Dilations of + different trident branch. Default: (1, 2, 3). + test_branch_idx (int, optional): In inference, all 3 branches will + be used if `test_branch_idx==-1`, otherwise only branch with + index `test_branch_idx` will be used. Default: 1. + bias (bool, optional): Whether to use bias in convolution or not. + Default: False. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + in_channels, + out_channels, + kernel_size, + stride=1, + trident_dilations=(1, 2, 3), + test_branch_idx=1, + bias=False, + init_cfg=None): + super(TridentConv, self).__init__(init_cfg) + self.num_branch = len(trident_dilations) + self.with_bias = bias + self.test_branch_idx = test_branch_idx + self.stride = _pair(stride) + self.kernel_size = _pair(kernel_size) + self.paddings = _pair(trident_dilations) + self.dilations = trident_dilations + self.in_channels = in_channels + self.out_channels = out_channels + self.bias = bias + + self.weight = nn.Parameter( + torch.Tensor(out_channels, in_channels, *self.kernel_size)) + if bias: + self.bias = nn.Parameter(torch.Tensor(out_channels)) + else: + self.bias = None + + def extra_repr(self): + tmpstr = f'in_channels={self.in_channels}' + tmpstr += f', out_channels={self.out_channels}' + tmpstr += f', kernel_size={self.kernel_size}' + tmpstr += f', num_branch={self.num_branch}' + tmpstr += f', test_branch_idx={self.test_branch_idx}' + tmpstr += f', stride={self.stride}' + tmpstr += f', paddings={self.paddings}' + tmpstr += f', dilations={self.dilations}' + tmpstr += f', bias={self.bias}' + return tmpstr + + def forward(self, inputs): + if self.training or self.test_branch_idx == -1: + outputs = [ + F.conv2d(input, self.weight, self.bias, self.stride, padding, + dilation) for input, dilation, padding in zip( + inputs, self.dilations, self.paddings) + ] + else: + assert len(inputs) == 1 + outputs = [ + F.conv2d(inputs[0], self.weight, self.bias, self.stride, + self.paddings[self.test_branch_idx], + self.dilations[self.test_branch_idx]) + ] + + return outputs + + +# Since TridentNet is defined over ResNet50 and ResNet101, here we +# only support TridentBottleneckBlock. +class TridentBottleneck(Bottleneck): + """BottleBlock for TridentResNet. + + Args: + trident_dilations (tuple[int, int, int]): Dilations of different + trident branch. + test_branch_idx (int): In inference, all 3 branches will be used + if `test_branch_idx==-1`, otherwise only branch with index + `test_branch_idx` will be used. + concat_output (bool): Whether to concat the output list to a Tensor. + `True` only in the last Block. + """ + + def __init__(self, trident_dilations, test_branch_idx, concat_output, + **kwargs): + + super(TridentBottleneck, self).__init__(**kwargs) + self.trident_dilations = trident_dilations + self.num_branch = len(trident_dilations) + self.concat_output = concat_output + self.test_branch_idx = test_branch_idx + self.conv2 = TridentConv( + self.planes, + self.planes, + kernel_size=3, + stride=self.conv2_stride, + bias=False, + trident_dilations=self.trident_dilations, + test_branch_idx=test_branch_idx, + init_cfg=dict( + type='Kaiming', + distribution='uniform', + mode='fan_in', + override=dict(name='conv2'))) + + def forward(self, x): + + def _inner_forward(x): + num_branch = ( + self.num_branch + if self.training or self.test_branch_idx == -1 else 1) + identity = x + if not isinstance(x, list): + x = (x, ) * num_branch + identity = x + if self.downsample is not None: + identity = [self.downsample(b) for b in x] + + out = [self.conv1(b) for b in x] + out = [self.norm1(b) for b in out] + out = [self.relu(b) for b in out] + + if self.with_plugins: + for k in range(len(out)): + out[k] = self.forward_plugin(out[k], + self.after_conv1_plugin_names) + + out = self.conv2(out) + out = [self.norm2(b) for b in out] + out = [self.relu(b) for b in out] + if self.with_plugins: + for k in range(len(out)): + out[k] = self.forward_plugin(out[k], + self.after_conv2_plugin_names) + + out = [self.conv3(b) for b in out] + out = [self.norm3(b) for b in out] + + if self.with_plugins: + for k in range(len(out)): + out[k] = self.forward_plugin(out[k], + self.after_conv3_plugin_names) + + out = [ + out_b + identity_b for out_b, identity_b in zip(out, identity) + ] + return out + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + out = [self.relu(b) for b in out] + if self.concat_output: + out = torch.cat(out, dim=0) + return out + + +def make_trident_res_layer(block, + inplanes, + planes, + num_blocks, + stride=1, + trident_dilations=(1, 2, 3), + style='pytorch', + with_cp=False, + conv_cfg=None, + norm_cfg=dict(type='BN'), + dcn=None, + plugins=None, + test_branch_idx=-1): + """Build Trident Res Layers.""" + + downsample = None + if stride != 1 or inplanes != planes * block.expansion: + downsample = [] + conv_stride = stride + downsample.extend([ + build_conv_layer( + conv_cfg, + inplanes, + planes * block.expansion, + kernel_size=1, + stride=conv_stride, + bias=False), + build_norm_layer(norm_cfg, planes * block.expansion)[1] + ]) + downsample = nn.Sequential(*downsample) + + layers = [] + for i in range(num_blocks): + layers.append( + block( + inplanes=inplanes, + planes=planes, + stride=stride if i == 0 else 1, + trident_dilations=trident_dilations, + downsample=downsample if i == 0 else None, + style=style, + with_cp=with_cp, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + dcn=dcn, + plugins=plugins, + test_branch_idx=test_branch_idx, + concat_output=True if i == num_blocks - 1 else False)) + inplanes = planes * block.expansion + return nn.Sequential(*layers) + + +@BACKBONES.register_module() +class TridentResNet(ResNet): + """The stem layer, stage 1 and stage 2 in Trident ResNet are identical to + ResNet, while in stage 3, Trident BottleBlock is utilized to replace the + normal BottleBlock to yield trident output. Different branch shares the + convolution weight but uses different dilations to achieve multi-scale + output. + + / stage3(b0) \ + x - stem - stage1 - stage2 - stage3(b1) - output + \ stage3(b2) / + + Args: + depth (int): Depth of resnet, from {50, 101, 152}. + num_branch (int): Number of branches in TridentNet. + test_branch_idx (int): In inference, all 3 branches will be used + if `test_branch_idx==-1`, otherwise only branch with index + `test_branch_idx` will be used. + trident_dilations (tuple[int]): Dilations of different trident branch. + len(trident_dilations) should be equal to num_branch. + """ # noqa + + def __init__(self, depth, num_branch, test_branch_idx, trident_dilations, + **kwargs): + + assert num_branch == len(trident_dilations) + assert depth in (50, 101, 152) + super(TridentResNet, self).__init__(depth, **kwargs) + assert self.num_stages == 3 + self.test_branch_idx = test_branch_idx + self.num_branch = num_branch + + last_stage_idx = self.num_stages - 1 + stride = self.strides[last_stage_idx] + dilation = trident_dilations + dcn = self.dcn if self.stage_with_dcn[last_stage_idx] else None + if self.plugins is not None: + stage_plugins = self.make_stage_plugins(self.plugins, + last_stage_idx) + else: + stage_plugins = None + planes = self.base_channels * 2**last_stage_idx + res_layer = make_trident_res_layer( + TridentBottleneck, + inplanes=(self.block.expansion * self.base_channels * + 2**(last_stage_idx - 1)), + planes=planes, + num_blocks=self.stage_blocks[last_stage_idx], + stride=stride, + trident_dilations=dilation, + style=self.style, + with_cp=self.with_cp, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + dcn=dcn, + plugins=stage_plugins, + test_branch_idx=self.test_branch_idx) + + layer_name = f'layer{last_stage_idx + 1}' + + self.__setattr__(layer_name, res_layer) + self.res_layers.pop(last_stage_idx) + self.res_layers.insert(last_stage_idx, layer_name) + + self._freeze_stages() diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/builder.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/builder.py new file mode 100644 index 0000000000000000000000000000000000000000..85dc2562da1e70d68a80619b3f16abd2fb666407 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/builder.py @@ -0,0 +1,58 @@ +import warnings + +from mmcv.cnn import MODELS as MMCV_MODELS +from mmcv.utils import Registry + +MODELS = Registry('models', parent=MMCV_MODELS) + +BACKBONES = MODELS +NECKS = MODELS +ROI_EXTRACTORS = MODELS +SHARED_HEADS = MODELS +HEADS = MODELS +LOSSES = MODELS +DETECTORS = MODELS + + +def build_backbone(cfg): + """Build backbone.""" + return BACKBONES.build(cfg) + + +def build_neck(cfg): + """Build neck.""" + return NECKS.build(cfg) + + +def build_roi_extractor(cfg): + """Build roi extractor.""" + return ROI_EXTRACTORS.build(cfg) + + +def build_shared_head(cfg): + """Build shared head.""" + return SHARED_HEADS.build(cfg) + + +def build_head(cfg): + """Build head.""" + return HEADS.build(cfg) + + +def build_loss(cfg): + """Build loss.""" + return LOSSES.build(cfg) + + +def build_detector(cfg, train_cfg=None, test_cfg=None): + """Build detector.""" + if train_cfg is not None or test_cfg is not None: + warnings.warn( + 'train_cfg and test_cfg is deprecated, ' + 'please specify them in model', UserWarning) + assert cfg.get('train_cfg') is None or train_cfg is None, \ + 'train_cfg specified in both outer field and model field ' + assert cfg.get('test_cfg') is None or test_cfg is None, \ + 'test_cfg specified in both outer field and model field ' + return DETECTORS.build( + cfg, default_args=dict(train_cfg=train_cfg, test_cfg=test_cfg)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3cbed578dc5009d9b1dd3d0e4f991e3ac08af2fa --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/__init__.py @@ -0,0 +1,47 @@ +from .anchor_free_head import AnchorFreeHead +from .anchor_head import AnchorHead +from .atss_head import ATSSHead +from .autoassign_head import AutoAssignHead +from .cascade_rpn_head import CascadeRPNHead, StageCascadeRPNHead +from .centernet_head import CenterNetHead +from .centripetal_head import CentripetalHead +from .corner_head import CornerHead +from .deformable_detr_head import DeformableDETRHead +from .detr_head import DETRHead +from .embedding_rpn_head import EmbeddingRPNHead +from .fcos_head import FCOSHead +from .fovea_head import FoveaHead +from .free_anchor_retina_head import FreeAnchorRetinaHead +from .fsaf_head import FSAFHead +from .ga_retina_head import GARetinaHead +from .ga_rpn_head import GARPNHead +from .gfl_head import GFLHead +from .guided_anchor_head import FeatureAdaption, GuidedAnchorHead +from .ld_head import LDHead +from .nasfcos_head import NASFCOSHead +from .paa_head import PAAHead +from .pisa_retinanet_head import PISARetinaHead +from .pisa_ssd_head import PISASSDHead +from .reppoints_head import RepPointsHead +from .retina_head import RetinaHead +from .retina_sepbn_head import RetinaSepBNHead +from .rpn_head import RPNHead +from .sabl_retina_head import SABLRetinaHead +from .ssd_head import SSDHead +from .vfnet_head import VFNetHead +from .yolact_head import YOLACTHead, YOLACTProtonet, YOLACTSegmHead +from .yolo_head import YOLOV3Head +from .yolof_head import YOLOFHead + +__all__ = [ + 'AnchorFreeHead', 'AnchorHead', 'GuidedAnchorHead', 'FeatureAdaption', + 'RPNHead', 'GARPNHead', 'RetinaHead', 'RetinaSepBNHead', 'GARetinaHead', + 'SSDHead', 'FCOSHead', 'RepPointsHead', 'FoveaHead', + 'FreeAnchorRetinaHead', 'ATSSHead', 'FSAFHead', 'NASFCOSHead', + 'PISARetinaHead', 'PISASSDHead', 'GFLHead', 'CornerHead', 'YOLACTHead', + 'YOLACTSegmHead', 'YOLACTProtonet', 'YOLOV3Head', 'PAAHead', + 'SABLRetinaHead', 'CentripetalHead', 'VFNetHead', 'StageCascadeRPNHead', + 'CascadeRPNHead', 'EmbeddingRPNHead', 'LDHead', 'CascadeRPNHead', + 'AutoAssignHead', 'DETRHead', 'YOLOFHead', 'DeformableDETRHead', + 'CenterNetHead' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/anchor_free_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/anchor_free_head.py new file mode 100644 index 0000000000000000000000000000000000000000..e399bee90201c250c3677bf29273d3a19bb51ac1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/anchor_free_head.py @@ -0,0 +1,340 @@ +from abc import abstractmethod + +import torch +import torch.nn as nn +from mmcv.cnn import ConvModule +from mmcv.runner import force_fp32 + +from mmdet.core import multi_apply +from ..builder import HEADS, build_loss +from .base_dense_head import BaseDenseHead +from .dense_test_mixins import BBoxTestMixin + + +@HEADS.register_module() +class AnchorFreeHead(BaseDenseHead, BBoxTestMixin): + """Anchor-free head (FCOS, Fovea, RepPoints, etc.). + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + feat_channels (int): Number of hidden channels. Used in child classes. + stacked_convs (int): Number of stacking convs of the head. + strides (tuple): Downsample factor of each feature map. + dcn_on_last_conv (bool): If true, use dcn in the last layer of + towers. Default: False. + conv_bias (bool | str): If specified as `auto`, it will be decided by + the norm_cfg. Bias of conv will be set as True if `norm_cfg` is + None, otherwise False. Default: "auto". + loss_cls (dict): Config of classification loss. + loss_bbox (dict): Config of localization loss. + conv_cfg (dict): Config dict for convolution layer. Default: None. + norm_cfg (dict): Config dict for normalization layer. Default: None. + train_cfg (dict): Training config of anchor head. + test_cfg (dict): Testing config of anchor head. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ # noqa: W605 + + _version = 1 + + def __init__(self, + num_classes, + in_channels, + feat_channels=256, + stacked_convs=4, + strides=(4, 8, 16, 32, 64), + dcn_on_last_conv=False, + conv_bias='auto', + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='IoULoss', loss_weight=1.0), + conv_cfg=None, + norm_cfg=None, + train_cfg=None, + test_cfg=None, + init_cfg=dict( + type='Normal', + layer='Conv2d', + std=0.01, + override=dict( + type='Normal', + name='conv_cls', + std=0.01, + bias_prob=0.01))): + super(AnchorFreeHead, self).__init__(init_cfg) + self.num_classes = num_classes + self.cls_out_channels = num_classes + self.in_channels = in_channels + self.feat_channels = feat_channels + self.stacked_convs = stacked_convs + self.strides = strides + self.dcn_on_last_conv = dcn_on_last_conv + assert conv_bias == 'auto' or isinstance(conv_bias, bool) + self.conv_bias = conv_bias + self.loss_cls = build_loss(loss_cls) + self.loss_bbox = build_loss(loss_bbox) + self.train_cfg = train_cfg + self.test_cfg = test_cfg + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.fp16_enabled = False + + self._init_layers() + + def _init_layers(self): + """Initialize layers of the head.""" + self._init_cls_convs() + self._init_reg_convs() + self._init_predictor() + + def _init_cls_convs(self): + """Initialize classification conv layers of the head.""" + self.cls_convs = nn.ModuleList() + for i in range(self.stacked_convs): + chn = self.in_channels if i == 0 else self.feat_channels + if self.dcn_on_last_conv and i == self.stacked_convs - 1: + conv_cfg = dict(type='DCNv2') + else: + conv_cfg = self.conv_cfg + self.cls_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=conv_cfg, + norm_cfg=self.norm_cfg, + bias=self.conv_bias)) + + def _init_reg_convs(self): + """Initialize bbox regression conv layers of the head.""" + self.reg_convs = nn.ModuleList() + for i in range(self.stacked_convs): + chn = self.in_channels if i == 0 else self.feat_channels + if self.dcn_on_last_conv and i == self.stacked_convs - 1: + conv_cfg = dict(type='DCNv2') + else: + conv_cfg = self.conv_cfg + self.reg_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=conv_cfg, + norm_cfg=self.norm_cfg, + bias=self.conv_bias)) + + def _init_predictor(self): + """Initialize predictor layers of the head.""" + self.conv_cls = nn.Conv2d( + self.feat_channels, self.cls_out_channels, 3, padding=1) + self.conv_reg = nn.Conv2d(self.feat_channels, 4, 3, padding=1) + + def _load_from_state_dict(self, state_dict, prefix, local_metadata, strict, + missing_keys, unexpected_keys, error_msgs): + """Hack some keys of the model state dict so that can load checkpoints + of previous version.""" + version = local_metadata.get('version', None) + if version is None: + # the key is different in early versions + # for example, 'fcos_cls' become 'conv_cls' now + bbox_head_keys = [ + k for k in state_dict.keys() if k.startswith(prefix) + ] + ori_predictor_keys = [] + new_predictor_keys = [] + # e.g. 'fcos_cls' or 'fcos_reg' + for key in bbox_head_keys: + ori_predictor_keys.append(key) + key = key.split('.') + conv_name = None + if key[1].endswith('cls'): + conv_name = 'conv_cls' + elif key[1].endswith('reg'): + conv_name = 'conv_reg' + elif key[1].endswith('centerness'): + conv_name = 'conv_centerness' + else: + assert NotImplementedError + if conv_name is not None: + key[1] = conv_name + new_predictor_keys.append('.'.join(key)) + else: + ori_predictor_keys.pop(-1) + for i in range(len(new_predictor_keys)): + state_dict[new_predictor_keys[i]] = state_dict.pop( + ori_predictor_keys[i]) + super()._load_from_state_dict(state_dict, prefix, local_metadata, + strict, missing_keys, unexpected_keys, + error_msgs) + + def forward(self, feats): + """Forward features from the upstream network. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + + Returns: + tuple: Usually contain classification scores and bbox predictions. + cls_scores (list[Tensor]): Box scores for each scale level, + each is a 4D-tensor, the channel number is + num_points * num_classes. + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level, each is a 4D-tensor, the channel number is + num_points * 4. + """ + return multi_apply(self.forward_single, feats)[:2] + + def forward_single(self, x): + """Forward features of a single scale level. + + Args: + x (Tensor): FPN feature maps of the specified stride. + + Returns: + tuple: Scores for each class, bbox predictions, features + after classification and regression conv layers, some + models needs these features like FCOS. + """ + cls_feat = x + reg_feat = x + + for cls_layer in self.cls_convs: + cls_feat = cls_layer(cls_feat) + cls_score = self.conv_cls(cls_feat) + + for reg_layer in self.reg_convs: + reg_feat = reg_layer(reg_feat) + bbox_pred = self.conv_reg(reg_feat) + return cls_score, bbox_pred, cls_feat, reg_feat + + @abstractmethod + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute loss of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level, + each is a 4D-tensor, the channel number is + num_points * num_classes. + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level, each is a 4D-tensor, the channel number is + num_points * 4. + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + """ + + raise NotImplementedError + + @abstractmethod + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def get_bboxes(self, + cls_scores, + bbox_preds, + img_metas, + cfg=None, + rescale=None): + """Transform network output for a batch into bbox predictions. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_points * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_points * 4, H, W) + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + cfg (mmcv.Config): Test / postprocessing configuration, + if None, test_cfg would be used + rescale (bool): If True, return boxes in original image space + """ + + raise NotImplementedError + + @abstractmethod + def get_targets(self, points, gt_bboxes_list, gt_labels_list): + """Compute regression, classification and centerness targets for points + in multiple images. + + Args: + points (list[Tensor]): Points of each fpn level, each has shape + (num_points, 2). + gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image, + each has shape (num_gt, 4). + gt_labels_list (list[Tensor]): Ground truth labels of each box, + each has shape (num_gt,). + """ + raise NotImplementedError + + def _get_points_single(self, + featmap_size, + stride, + dtype, + device, + flatten=False): + """Get points of a single scale level.""" + h, w = featmap_size + # First create Range with the default dtype, than convert to + # target `dtype` for onnx exporting. + x_range = torch.arange(w, device=device).to(dtype) + y_range = torch.arange(h, device=device).to(dtype) + y, x = torch.meshgrid(y_range, x_range) + if flatten: + y = y.flatten() + x = x.flatten() + return y, x + + def get_points(self, featmap_sizes, dtype, device, flatten=False): + """Get points according to feature map sizes. + + Args: + featmap_sizes (list[tuple]): Multi-level feature map sizes. + dtype (torch.dtype): Type of points. + device (torch.device): Device of points. + + Returns: + tuple: points of each image. + """ + mlvl_points = [] + for i in range(len(featmap_sizes)): + mlvl_points.append( + self._get_points_single(featmap_sizes[i], self.strides[i], + dtype, device, flatten)) + return mlvl_points + + def aug_test(self, feats, img_metas, rescale=False): + """Test function with test time augmentation. + + Args: + feats (list[Tensor]): the outer list indicates test-time + augmentations and inner Tensor should have a shape NxCxHxW, + which contains features for all images in the batch. + img_metas (list[list[dict]]): the outer list indicates test-time + augs (multiscale, flip, etc.) and the inner list indicates + images in a batch. each dict has image information. + rescale (bool, optional): Whether to rescale the results. + Defaults to False. + + Returns: + list[ndarray]: bbox results of each class + """ + return self.aug_test_bboxes(feats, img_metas, rescale=rescale) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/anchor_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/anchor_head.py new file mode 100644 index 0000000000000000000000000000000000000000..e7c975f57d03712498602672d9a0540dd9fdddfb --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/anchor_head.py @@ -0,0 +1,745 @@ +import torch +import torch.nn as nn +from mmcv.runner import force_fp32 + +from mmdet.core import (anchor_inside_flags, build_anchor_generator, + build_assigner, build_bbox_coder, build_sampler, + images_to_levels, multi_apply, multiclass_nms, unmap) +from ..builder import HEADS, build_loss +from .base_dense_head import BaseDenseHead +from .dense_test_mixins import BBoxTestMixin + + +@HEADS.register_module() +class AnchorHead(BaseDenseHead, BBoxTestMixin): + """Anchor-based head (RPN, RetinaNet, SSD, etc.). + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + feat_channels (int): Number of hidden channels. Used in child classes. + anchor_generator (dict): Config dict for anchor generator + bbox_coder (dict): Config of bounding box coder. + reg_decoded_bbox (bool): If true, the regression loss would be + applied directly on decoded bounding boxes, converting both + the predicted boxes and regression targets to absolute + coordinates format. Default False. It should be `True` when + using `IoULoss`, `GIoULoss`, or `DIoULoss` in the bbox head. + loss_cls (dict): Config of classification loss. + loss_bbox (dict): Config of localization loss. + train_cfg (dict): Training config of anchor head. + test_cfg (dict): Testing config of anchor head. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ # noqa: W605 + + def __init__(self, + num_classes, + in_channels, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8, 16, 32], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + clip_border=True, + target_means=(.0, .0, .0, .0), + target_stds=(1.0, 1.0, 1.0, 1.0)), + reg_decoded_bbox=False, + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0), + loss_bbox=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0), + train_cfg=None, + test_cfg=None, + init_cfg=dict(type='Normal', layers='Conv2d', std=0.01)): + super(AnchorHead, self).__init__(init_cfg) + self.in_channels = in_channels + self.num_classes = num_classes + self.feat_channels = feat_channels + self.use_sigmoid_cls = loss_cls.get('use_sigmoid', False) + # TODO better way to determine whether sample or not + self.sampling = loss_cls['type'] not in [ + 'FocalLoss', 'GHMC', 'QualityFocalLoss' + ] + if self.use_sigmoid_cls: + self.cls_out_channels = num_classes + else: + self.cls_out_channels = num_classes + 1 + + if self.cls_out_channels <= 0: + raise ValueError(f'num_classes={num_classes} is too small') + self.reg_decoded_bbox = reg_decoded_bbox + + self.bbox_coder = build_bbox_coder(bbox_coder) + self.loss_cls = build_loss(loss_cls) + self.loss_bbox = build_loss(loss_bbox) + self.train_cfg = train_cfg + self.test_cfg = test_cfg + if self.train_cfg: + self.assigner = build_assigner(self.train_cfg.assigner) + # use PseudoSampler when sampling is False + if self.sampling and hasattr(self.train_cfg, 'sampler'): + sampler_cfg = self.train_cfg.sampler + else: + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + self.fp16_enabled = False + + self.anchor_generator = build_anchor_generator(anchor_generator) + # usually the numbers of anchors for each level are the same + # except SSD detectors + self.num_anchors = self.anchor_generator.num_base_anchors[0] + self._init_layers() + + def _init_layers(self): + """Initialize layers of the head.""" + self.conv_cls = nn.Conv2d(self.in_channels, + self.num_anchors * self.cls_out_channels, 1) + self.conv_reg = nn.Conv2d(self.in_channels, self.num_anchors * 4, 1) + + def forward_single(self, x): + """Forward feature of a single scale level. + + Args: + x (Tensor): Features of a single scale level. + + Returns: + tuple: + cls_score (Tensor): Cls scores for a single scale level \ + the channels number is num_anchors * num_classes. + bbox_pred (Tensor): Box energies / deltas for a single scale \ + level, the channels number is num_anchors * 4. + """ + cls_score = self.conv_cls(x) + bbox_pred = self.conv_reg(x) + return cls_score, bbox_pred + + def forward(self, feats): + """Forward features from the upstream network. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + + Returns: + tuple: A tuple of classification scores and bbox prediction. + + - cls_scores (list[Tensor]): Classification scores for all \ + scale levels, each is a 4D-tensor, the channels number \ + is num_anchors * num_classes. + - bbox_preds (list[Tensor]): Box energies / deltas for all \ + scale levels, each is a 4D-tensor, the channels number \ + is num_anchors * 4. + """ + return multi_apply(self.forward_single, feats) + + def get_anchors(self, featmap_sizes, img_metas, device='cuda'): + """Get anchors according to feature map sizes. + + Args: + featmap_sizes (list[tuple]): Multi-level feature map sizes. + img_metas (list[dict]): Image meta info. + device (torch.device | str): Device for returned tensors + + Returns: + tuple: + anchor_list (list[Tensor]): Anchors of each image. + valid_flag_list (list[Tensor]): Valid flags of each image. + """ + num_imgs = len(img_metas) + + # since feature map sizes of all images are the same, we only compute + # anchors for one time + multi_level_anchors = self.anchor_generator.grid_anchors( + featmap_sizes, device) + anchor_list = [multi_level_anchors for _ in range(num_imgs)] + + # for each image, we compute valid flags of multi level anchors + valid_flag_list = [] + for img_id, img_meta in enumerate(img_metas): + multi_level_flags = self.anchor_generator.valid_flags( + featmap_sizes, img_meta['pad_shape'], device) + valid_flag_list.append(multi_level_flags) + + return anchor_list, valid_flag_list + + def _get_targets_single(self, + flat_anchors, + valid_flags, + gt_bboxes, + gt_bboxes_ignore, + gt_labels, + img_meta, + label_channels=1, + unmap_outputs=True): + """Compute regression and classification targets for anchors in a + single image. + + Args: + flat_anchors (Tensor): Multi-level anchors of the image, which are + concatenated into a single tensor of shape (num_anchors ,4) + valid_flags (Tensor): Multi level valid flags of the image, + which are concatenated into a single tensor of + shape (num_anchors,). + gt_bboxes (Tensor): Ground truth bboxes of the image, + shape (num_gts, 4). + gt_bboxes_ignore (Tensor): Ground truth bboxes to be + ignored, shape (num_ignored_gts, 4). + img_meta (dict): Meta info of the image. + gt_labels (Tensor): Ground truth labels of each box, + shape (num_gts,). + label_channels (int): Channel of label. + unmap_outputs (bool): Whether to map outputs back to the original + set of anchors. + + Returns: + tuple: + labels_list (list[Tensor]): Labels of each level + label_weights_list (list[Tensor]): Label weights of each level + bbox_targets_list (list[Tensor]): BBox targets of each level + bbox_weights_list (list[Tensor]): BBox weights of each level + num_total_pos (int): Number of positive samples in all images + num_total_neg (int): Number of negative samples in all images + """ + inside_flags = anchor_inside_flags(flat_anchors, valid_flags, + img_meta['img_shape'][:2], + self.train_cfg.allowed_border) + if not inside_flags.any(): + return (None, ) * 7 + # assign gt and sample anchors + anchors = flat_anchors[inside_flags, :] + + assign_result = self.assigner.assign( + anchors, gt_bboxes, gt_bboxes_ignore, + None if self.sampling else gt_labels) + sampling_result = self.sampler.sample(assign_result, anchors, + gt_bboxes) + + num_valid_anchors = anchors.shape[0] + bbox_targets = torch.zeros_like(anchors) + bbox_weights = torch.zeros_like(anchors) + labels = anchors.new_full((num_valid_anchors, ), + self.num_classes, + dtype=torch.long) + label_weights = anchors.new_zeros(num_valid_anchors, dtype=torch.float) + + pos_inds = sampling_result.pos_inds + neg_inds = sampling_result.neg_inds + if len(pos_inds) > 0: + if not self.reg_decoded_bbox: + pos_bbox_targets = self.bbox_coder.encode( + sampling_result.pos_bboxes, sampling_result.pos_gt_bboxes) + else: + pos_bbox_targets = sampling_result.pos_gt_bboxes + bbox_targets[pos_inds, :] = pos_bbox_targets + bbox_weights[pos_inds, :] = 1.0 + if gt_labels is None: + # Only rpn gives gt_labels as None + # Foreground is the first class since v2.5.0 + labels[pos_inds] = 0 + else: + labels[pos_inds] = gt_labels[ + sampling_result.pos_assigned_gt_inds] + if self.train_cfg.pos_weight <= 0: + label_weights[pos_inds] = 1.0 + else: + label_weights[pos_inds] = self.train_cfg.pos_weight + if len(neg_inds) > 0: + label_weights[neg_inds] = 1.0 + + # map up to original set of anchors + if unmap_outputs: + num_total_anchors = flat_anchors.size(0) + labels = unmap( + labels, num_total_anchors, inside_flags, + fill=self.num_classes) # fill bg label + label_weights = unmap(label_weights, num_total_anchors, + inside_flags) + bbox_targets = unmap(bbox_targets, num_total_anchors, inside_flags) + bbox_weights = unmap(bbox_weights, num_total_anchors, inside_flags) + + return (labels, label_weights, bbox_targets, bbox_weights, pos_inds, + neg_inds, sampling_result) + + def get_targets(self, + anchor_list, + valid_flag_list, + gt_bboxes_list, + img_metas, + gt_bboxes_ignore_list=None, + gt_labels_list=None, + label_channels=1, + unmap_outputs=True, + return_sampling_results=False): + """Compute regression and classification targets for anchors in + multiple images. + + Args: + anchor_list (list[list[Tensor]]): Multi level anchors of each + image. The outer list indicates images, and the inner list + corresponds to feature levels of the image. Each element of + the inner list is a tensor of shape (num_anchors, 4). + valid_flag_list (list[list[Tensor]]): Multi level valid flags of + each image. The outer list indicates images, and the inner list + corresponds to feature levels of the image. Each element of + the inner list is a tensor of shape (num_anchors, ) + gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image. + img_metas (list[dict]): Meta info of each image. + gt_bboxes_ignore_list (list[Tensor]): Ground truth bboxes to be + ignored. + gt_labels_list (list[Tensor]): Ground truth labels of each box. + label_channels (int): Channel of label. + unmap_outputs (bool): Whether to map outputs back to the original + set of anchors. + + Returns: + tuple: Usually returns a tuple containing learning targets. + + - labels_list (list[Tensor]): Labels of each level. + - label_weights_list (list[Tensor]): Label weights of each \ + level. + - bbox_targets_list (list[Tensor]): BBox targets of each level. + - bbox_weights_list (list[Tensor]): BBox weights of each level. + - num_total_pos (int): Number of positive samples in all \ + images. + - num_total_neg (int): Number of negative samples in all \ + images. + additional_returns: This function enables user-defined returns from + `self._get_targets_single`. These returns are currently refined + to properties at each feature map (i.e. having HxW dimension). + The results will be concatenated after the end + """ + num_imgs = len(img_metas) + assert len(anchor_list) == len(valid_flag_list) == num_imgs + + # anchor number of multi levels + num_level_anchors = [anchors.size(0) for anchors in anchor_list[0]] + # concat all level anchors to a single tensor + concat_anchor_list = [] + concat_valid_flag_list = [] + for i in range(num_imgs): + assert len(anchor_list[i]) == len(valid_flag_list[i]) + concat_anchor_list.append(torch.cat(anchor_list[i])) + concat_valid_flag_list.append(torch.cat(valid_flag_list[i])) + + # compute targets for each image + if gt_bboxes_ignore_list is None: + gt_bboxes_ignore_list = [None for _ in range(num_imgs)] + if gt_labels_list is None: + gt_labels_list = [None for _ in range(num_imgs)] + results = multi_apply( + self._get_targets_single, + concat_anchor_list, + concat_valid_flag_list, + gt_bboxes_list, + gt_bboxes_ignore_list, + gt_labels_list, + img_metas, + label_channels=label_channels, + unmap_outputs=unmap_outputs) + (all_labels, all_label_weights, all_bbox_targets, all_bbox_weights, + pos_inds_list, neg_inds_list, sampling_results_list) = results[:7] + rest_results = list(results[7:]) # user-added return values + # no valid anchors + if any([labels is None for labels in all_labels]): + return None + # sampled anchors of all images + num_total_pos = sum([max(inds.numel(), 1) for inds in pos_inds_list]) + num_total_neg = sum([max(inds.numel(), 1) for inds in neg_inds_list]) + # split targets to a list w.r.t. multiple levels + labels_list = images_to_levels(all_labels, num_level_anchors) + label_weights_list = images_to_levels(all_label_weights, + num_level_anchors) + bbox_targets_list = images_to_levels(all_bbox_targets, + num_level_anchors) + bbox_weights_list = images_to_levels(all_bbox_weights, + num_level_anchors) + res = (labels_list, label_weights_list, bbox_targets_list, + bbox_weights_list, num_total_pos, num_total_neg) + if return_sampling_results: + res = res + (sampling_results_list, ) + for i, r in enumerate(rest_results): # user-added return values + rest_results[i] = images_to_levels(r, num_level_anchors) + + return res + tuple(rest_results) + + def loss_single(self, cls_score, bbox_pred, anchors, labels, label_weights, + bbox_targets, bbox_weights, num_total_samples): + """Compute loss of a single scale level. + + Args: + cls_score (Tensor): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W). + bbox_pred (Tensor): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W). + anchors (Tensor): Box reference for each scale level with shape + (N, num_total_anchors, 4). + labels (Tensor): Labels of each anchors with shape + (N, num_total_anchors). + label_weights (Tensor): Label weights of each anchor with shape + (N, num_total_anchors) + bbox_targets (Tensor): BBox regression targets of each anchor wight + shape (N, num_total_anchors, 4). + bbox_weights (Tensor): BBox regression loss weights of each anchor + with shape (N, num_total_anchors, 4). + num_total_samples (int): If sampling, num total samples equal to + the number of total anchors; Otherwise, it is the number of + positive anchors. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + # classification loss + labels = labels.reshape(-1) + label_weights = label_weights.reshape(-1) + cls_score = cls_score.permute(0, 2, 3, + 1).reshape(-1, self.cls_out_channels) + loss_cls = self.loss_cls( + cls_score, labels, label_weights, avg_factor=num_total_samples) + # regression loss + bbox_targets = bbox_targets.reshape(-1, 4) + bbox_weights = bbox_weights.reshape(-1, 4) + bbox_pred = bbox_pred.permute(0, 2, 3, 1).reshape(-1, 4) + if self.reg_decoded_bbox: + # When the regression loss (e.g. `IouLoss`, `GIouLoss`) + # is applied directly on the decoded bounding boxes, it + # decodes the already encoded coordinates to absolute format. + anchors = anchors.reshape(-1, 4) + bbox_pred = self.bbox_coder.decode(anchors, bbox_pred) + loss_bbox = self.loss_bbox( + bbox_pred, + bbox_targets, + bbox_weights, + avg_factor=num_total_samples) + return loss_cls, loss_bbox + + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. Default: None + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.anchor_generator.num_levels + + device = cls_scores[0].device + + anchor_list, valid_flag_list = self.get_anchors( + featmap_sizes, img_metas, device=device) + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + cls_reg_targets = self.get_targets( + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=label_channels) + if cls_reg_targets is None: + return None + (labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, + num_total_pos, num_total_neg) = cls_reg_targets + num_total_samples = ( + num_total_pos + num_total_neg if self.sampling else num_total_pos) + + # anchor number of multi levels + num_level_anchors = [anchors.size(0) for anchors in anchor_list[0]] + # concat all level anchors and flags to a single tensor + concat_anchor_list = [] + for i in range(len(anchor_list)): + concat_anchor_list.append(torch.cat(anchor_list[i])) + all_anchor_list = images_to_levels(concat_anchor_list, + num_level_anchors) + + losses_cls, losses_bbox = multi_apply( + self.loss_single, + cls_scores, + bbox_preds, + all_anchor_list, + labels_list, + label_weights_list, + bbox_targets_list, + bbox_weights_list, + num_total_samples=num_total_samples) + return dict(loss_cls=losses_cls, loss_bbox=losses_bbox) + + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def get_bboxes(self, + cls_scores, + bbox_preds, + img_metas, + cfg=None, + rescale=False, + with_nms=True): + """Transform network output for a batch into bbox predictions. + + Args: + cls_scores (list[Tensor]): Box scores for each level in the + feature pyramid, has shape + (N, num_anchors * num_classes, H, W). + bbox_preds (list[Tensor]): Box energies / deltas for each + level in the feature pyramid, has shape + (N, num_anchors * 4, H, W). + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + cfg (mmcv.Config | None): Test / postprocessing configuration, + if None, test_cfg would be used + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + + Returns: + list[tuple[Tensor, Tensor]]: Each item in result_list is 2-tuple. + The first item is an (n, 5) tensor, where 5 represent + (tl_x, tl_y, br_x, br_y, score) and the score between 0 and 1. + The shape of the second tensor in the tuple is (n,), and + each element represents the class label of the corresponding + box. + + Example: + >>> import mmcv + >>> self = AnchorHead( + >>> num_classes=9, + >>> in_channels=1, + >>> anchor_generator=dict( + >>> type='AnchorGenerator', + >>> scales=[8], + >>> ratios=[0.5, 1.0, 2.0], + >>> strides=[4,])) + >>> img_metas = [{'img_shape': (32, 32, 3), 'scale_factor': 1}] + >>> cfg = mmcv.Config(dict( + >>> score_thr=0.00, + >>> nms=dict(type='nms', iou_thr=1.0), + >>> max_per_img=10)) + >>> feat = torch.rand(1, 1, 3, 3) + >>> cls_score, bbox_pred = self.forward_single(feat) + >>> # note the input lists are over different levels, not images + >>> cls_scores, bbox_preds = [cls_score], [bbox_pred] + >>> result_list = self.get_bboxes(cls_scores, bbox_preds, + >>> img_metas, cfg) + >>> det_bboxes, det_labels = result_list[0] + >>> assert len(result_list) == 1 + >>> assert det_bboxes.shape[1] == 5 + >>> assert len(det_bboxes) == len(det_labels) == cfg.max_per_img + """ + assert len(cls_scores) == len(bbox_preds) + num_levels = len(cls_scores) + + device = cls_scores[0].device + featmap_sizes = [cls_scores[i].shape[-2:] for i in range(num_levels)] + mlvl_anchors = self.anchor_generator.grid_anchors( + featmap_sizes, device=device) + + mlvl_cls_scores = [cls_scores[i].detach() for i in range(num_levels)] + mlvl_bbox_preds = [bbox_preds[i].detach() for i in range(num_levels)] + + if torch.onnx.is_in_onnx_export(): + assert len( + img_metas + ) == 1, 'Only support one input image while in exporting to ONNX' + img_shapes = img_metas[0]['img_shape_for_onnx'] + else: + img_shapes = [ + img_metas[i]['img_shape'] + for i in range(cls_scores[0].shape[0]) + ] + scale_factors = [ + img_metas[i]['scale_factor'] for i in range(cls_scores[0].shape[0]) + ] + + if with_nms: + # some heads don't support with_nms argument + result_list = self._get_bboxes(mlvl_cls_scores, mlvl_bbox_preds, + mlvl_anchors, img_shapes, + scale_factors, cfg, rescale) + else: + result_list = self._get_bboxes(mlvl_cls_scores, mlvl_bbox_preds, + mlvl_anchors, img_shapes, + scale_factors, cfg, rescale, + with_nms) + return result_list + + def _get_bboxes(self, + mlvl_cls_scores, + mlvl_bbox_preds, + mlvl_anchors, + img_shapes, + scale_factors, + cfg, + rescale=False, + with_nms=True): + """Transform outputs for a batch item into bbox predictions. + + Args: + mlvl_cls_scores (list[Tensor]): Each element in the list is + the scores of bboxes of single level in the feature pyramid, + has shape (N, num_anchors * num_classes, H, W). + mlvl_bbox_preds (list[Tensor]): Each element in the list is the + bboxes predictions of single level in the feature pyramid, + has shape (N, num_anchors * 4, H, W). + mlvl_anchors (list[Tensor]): Each element in the list is + the anchors of single level in feature pyramid, has shape + (num_anchors, 4). + img_shapes (list[tuple[int]]): Each tuple in the list represent + the shape(height, width, 3) of single image in the batch. + scale_factors (list[ndarray]): Scale factor of the batch + image arange as list[(w_scale, h_scale, w_scale, h_scale)]. + cfg (mmcv.Config): Test / postprocessing configuration, + if None, test_cfg would be used. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + + Returns: + list[tuple[Tensor, Tensor]]: Each item in result_list is 2-tuple. + The first item is an (n, 5) tensor, where 5 represent + (tl_x, tl_y, br_x, br_y, score) and the score between 0 and 1. + The shape of the second tensor in the tuple is (n,), and + each element represents the class label of the corresponding + box. + """ + cfg = self.test_cfg if cfg is None else cfg + assert len(mlvl_cls_scores) == len(mlvl_bbox_preds) == len( + mlvl_anchors) + batch_size = mlvl_cls_scores[0].shape[0] + # convert to tensor to keep tracing + nms_pre_tensor = torch.tensor( + cfg.get('nms_pre', -1), + device=mlvl_cls_scores[0].device, + dtype=torch.long) + + mlvl_bboxes = [] + mlvl_scores = [] + for cls_score, bbox_pred, anchors in zip(mlvl_cls_scores, + mlvl_bbox_preds, + mlvl_anchors): + assert cls_score.size()[-2:] == bbox_pred.size()[-2:] + cls_score = cls_score.permute(0, 2, 3, + 1).reshape(batch_size, -1, + self.cls_out_channels) + if self.use_sigmoid_cls: + scores = cls_score.sigmoid() + else: + scores = cls_score.softmax(-1) + bbox_pred = bbox_pred.permute(0, 2, 3, + 1).reshape(batch_size, -1, 4) + anchors = anchors.expand_as(bbox_pred) + # Always keep topk op for dynamic input in onnx + from mmdet.core.export import get_k_for_topk + nms_pre = get_k_for_topk(nms_pre_tensor, bbox_pred.shape[1]) + if nms_pre > 0: + # Get maximum scores for foreground classes. + if self.use_sigmoid_cls: + max_scores, _ = scores.max(-1) + else: + # remind that we set FG labels to [0, num_class-1] + # since mmdet v2.0 + # BG cat_id: num_class + max_scores, _ = scores[..., :-1].max(-1) + + _, topk_inds = max_scores.topk(nms_pre) + batch_inds = torch.arange(batch_size).view( + -1, 1).expand_as(topk_inds) + anchors = anchors[batch_inds, topk_inds, :] + bbox_pred = bbox_pred[batch_inds, topk_inds, :] + scores = scores[batch_inds, topk_inds, :] + + bboxes = self.bbox_coder.decode( + anchors, bbox_pred, max_shape=img_shapes) + mlvl_bboxes.append(bboxes) + mlvl_scores.append(scores) + + batch_mlvl_bboxes = torch.cat(mlvl_bboxes, dim=1) + if rescale: + batch_mlvl_bboxes /= batch_mlvl_bboxes.new_tensor( + scale_factors).unsqueeze(1) + batch_mlvl_scores = torch.cat(mlvl_scores, dim=1) + + # Replace multiclass_nms with ONNX::NonMaxSuppression in deployment + if torch.onnx.is_in_onnx_export() and with_nms: + from mmdet.core.export import add_dummy_nms_for_onnx + # ignore background class + if not self.use_sigmoid_cls: + num_classes = batch_mlvl_scores.shape[2] - 1 + batch_mlvl_scores = batch_mlvl_scores[..., :num_classes] + max_output_boxes_per_class = cfg.nms.get( + 'max_output_boxes_per_class', 200) + iou_threshold = cfg.nms.get('iou_threshold', 0.5) + score_threshold = cfg.score_thr + nms_pre = cfg.get('deploy_nms_pre', -1) + return add_dummy_nms_for_onnx(batch_mlvl_bboxes, batch_mlvl_scores, + max_output_boxes_per_class, + iou_threshold, score_threshold, + nms_pre, cfg.max_per_img) + if self.use_sigmoid_cls: + # Add a dummy background class to the backend when using sigmoid + # remind that we set FG labels to [0, num_class-1] since mmdet v2.0 + # BG cat_id: num_class + padding = batch_mlvl_scores.new_zeros(batch_size, + batch_mlvl_scores.shape[1], + 1) + batch_mlvl_scores = torch.cat([batch_mlvl_scores, padding], dim=-1) + + if with_nms: + det_results = [] + for (mlvl_bboxes, mlvl_scores) in zip(batch_mlvl_bboxes, + batch_mlvl_scores): + det_bbox, det_label = multiclass_nms(mlvl_bboxes, mlvl_scores, + cfg.score_thr, cfg.nms, + cfg.max_per_img) + det_results.append(tuple([det_bbox, det_label])) + else: + det_results = [ + tuple(mlvl_bs) + for mlvl_bs in zip(batch_mlvl_bboxes, batch_mlvl_scores) + ] + return det_results + + def aug_test(self, feats, img_metas, rescale=False): + """Test function with test time augmentation. + + Args: + feats (list[Tensor]): the outer list indicates test-time + augmentations and inner Tensor should have a shape NxCxHxW, + which contains features for all images in the batch. + img_metas (list[list[dict]]): the outer list indicates test-time + augs (multiscale, flip, etc.) and the inner list indicates + images in a batch. each dict has image information. + rescale (bool, optional): Whether to rescale the results. + Defaults to False. + + Returns: + list[tuple[Tensor, Tensor]]: Each item in result_list is 2-tuple. + The first item is ``bboxes`` with shape (n, 5), where + 5 represent (tl_x, tl_y, br_x, br_y, score). + The shape of the second tensor in the tuple is ``labels`` + with shape (n,), The length of list should always be 1. + """ + return self.aug_test_bboxes(feats, img_metas, rescale=rescale) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/atss_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/atss_head.py new file mode 100644 index 0000000000000000000000000000000000000000..17dd39560fe9985bb794257869e0ef52b7d53338 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/atss_head.py @@ -0,0 +1,684 @@ +import torch +import torch.nn as nn +from mmcv.cnn import ConvModule, Scale +from mmcv.runner import force_fp32 + +from mmdet.core import (anchor_inside_flags, build_assigner, build_sampler, + images_to_levels, multi_apply, multiclass_nms, + reduce_mean, unmap) +from ..builder import HEADS, build_loss +from .anchor_head import AnchorHead + + +@HEADS.register_module() +class ATSSHead(AnchorHead): + """Bridging the Gap Between Anchor-based and Anchor-free Detection via + Adaptive Training Sample Selection. + + ATSS head structure is similar with FCOS, however ATSS use anchor boxes + and assign label by Adaptive Training Sample Selection instead max-iou. + + https://arxiv.org/abs/1912.02424 + """ + + def __init__(self, + num_classes, + in_channels, + stacked_convs=4, + conv_cfg=None, + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True), + loss_centerness=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0), + init_cfg=dict( + type='Normal', + layer='Conv2d', + std=0.01, + override=dict( + type='Normal', + name='atss_cls', + std=0.01, + bias_prob=0.01)), + **kwargs): + self.stacked_convs = stacked_convs + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + super(ATSSHead, self).__init__( + num_classes, in_channels, init_cfg=init_cfg, **kwargs) + + self.sampling = False + if self.train_cfg: + self.assigner = build_assigner(self.train_cfg.assigner) + # SSD sampling=False so use PseudoSampler + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + self.loss_centerness = build_loss(loss_centerness) + + def _init_layers(self): + """Initialize layers of the head.""" + self.relu = nn.ReLU(inplace=True) + self.cls_convs = nn.ModuleList() + self.reg_convs = nn.ModuleList() + for i in range(self.stacked_convs): + chn = self.in_channels if i == 0 else self.feat_channels + self.cls_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.reg_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.atss_cls = nn.Conv2d( + self.feat_channels, + self.num_anchors * self.cls_out_channels, + 3, + padding=1) + self.atss_reg = nn.Conv2d( + self.feat_channels, self.num_anchors * 4, 3, padding=1) + self.atss_centerness = nn.Conv2d( + self.feat_channels, self.num_anchors * 1, 3, padding=1) + self.scales = nn.ModuleList( + [Scale(1.0) for _ in self.anchor_generator.strides]) + + def forward(self, feats): + """Forward features from the upstream network. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + + Returns: + tuple: Usually a tuple of classification scores and bbox prediction + cls_scores (list[Tensor]): Classification scores for all scale + levels, each is a 4D-tensor, the channels number is + num_anchors * num_classes. + bbox_preds (list[Tensor]): Box energies / deltas for all scale + levels, each is a 4D-tensor, the channels number is + num_anchors * 4. + """ + return multi_apply(self.forward_single, feats, self.scales) + + def forward_single(self, x, scale): + """Forward feature of a single scale level. + + Args: + x (Tensor): Features of a single scale level. + scale (:obj: `mmcv.cnn.Scale`): Learnable scale module to resize + the bbox prediction. + + Returns: + tuple: + cls_score (Tensor): Cls scores for a single scale level + the channels number is num_anchors * num_classes. + bbox_pred (Tensor): Box energies / deltas for a single scale + level, the channels number is num_anchors * 4. + centerness (Tensor): Centerness for a single scale level, the + channel number is (N, num_anchors * 1, H, W). + """ + cls_feat = x + reg_feat = x + for cls_conv in self.cls_convs: + cls_feat = cls_conv(cls_feat) + for reg_conv in self.reg_convs: + reg_feat = reg_conv(reg_feat) + cls_score = self.atss_cls(cls_feat) + # we just follow atss, not apply exp in bbox_pred + bbox_pred = scale(self.atss_reg(reg_feat)).float() + centerness = self.atss_centerness(reg_feat) + return cls_score, bbox_pred, centerness + + def loss_single(self, anchors, cls_score, bbox_pred, centerness, labels, + label_weights, bbox_targets, num_total_samples): + """Compute loss of a single scale level. + + Args: + cls_score (Tensor): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W). + bbox_pred (Tensor): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W). + anchors (Tensor): Box reference for each scale level with shape + (N, num_total_anchors, 4). + labels (Tensor): Labels of each anchors with shape + (N, num_total_anchors). + label_weights (Tensor): Label weights of each anchor with shape + (N, num_total_anchors) + bbox_targets (Tensor): BBox regression targets of each anchor wight + shape (N, num_total_anchors, 4). + num_total_samples (int): Number os positive samples that is + reduced over all GPUs. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + + anchors = anchors.reshape(-1, 4) + cls_score = cls_score.permute(0, 2, 3, 1).reshape( + -1, self.cls_out_channels).contiguous() + bbox_pred = bbox_pred.permute(0, 2, 3, 1).reshape(-1, 4) + centerness = centerness.permute(0, 2, 3, 1).reshape(-1) + bbox_targets = bbox_targets.reshape(-1, 4) + labels = labels.reshape(-1) + label_weights = label_weights.reshape(-1) + + # classification loss + loss_cls = self.loss_cls( + cls_score, labels, label_weights, avg_factor=num_total_samples) + + # FG cat_id: [0, num_classes -1], BG cat_id: num_classes + bg_class_ind = self.num_classes + pos_inds = ((labels >= 0) + & (labels < bg_class_ind)).nonzero().squeeze(1) + + if len(pos_inds) > 0: + pos_bbox_targets = bbox_targets[pos_inds] + pos_bbox_pred = bbox_pred[pos_inds] + pos_anchors = anchors[pos_inds] + pos_centerness = centerness[pos_inds] + + centerness_targets = self.centerness_target( + pos_anchors, pos_bbox_targets) + pos_decode_bbox_pred = self.bbox_coder.decode( + pos_anchors, pos_bbox_pred) + pos_decode_bbox_targets = self.bbox_coder.decode( + pos_anchors, pos_bbox_targets) + + # regression loss + loss_bbox = self.loss_bbox( + pos_decode_bbox_pred, + pos_decode_bbox_targets, + weight=centerness_targets, + avg_factor=1.0) + + # centerness loss + loss_centerness = self.loss_centerness( + pos_centerness, + centerness_targets, + avg_factor=num_total_samples) + + else: + loss_bbox = bbox_pred.sum() * 0 + loss_centerness = centerness.sum() * 0 + centerness_targets = bbox_targets.new_tensor(0.) + + return loss_cls, loss_bbox, loss_centerness, centerness_targets.sum() + + @force_fp32(apply_to=('cls_scores', 'bbox_preds', 'centernesses')) + def loss(self, + cls_scores, + bbox_preds, + centernesses, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + centernesses (list[Tensor]): Centerness for each scale + level with shape (N, num_anchors * 1, H, W) + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (list[Tensor] | None): specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.anchor_generator.num_levels + + device = cls_scores[0].device + anchor_list, valid_flag_list = self.get_anchors( + featmap_sizes, img_metas, device=device) + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + + cls_reg_targets = self.get_targets( + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=label_channels) + if cls_reg_targets is None: + return None + + (anchor_list, labels_list, label_weights_list, bbox_targets_list, + bbox_weights_list, num_total_pos, num_total_neg) = cls_reg_targets + + num_total_samples = reduce_mean( + torch.tensor(num_total_pos, dtype=torch.float, + device=device)).item() + num_total_samples = max(num_total_samples, 1.0) + + losses_cls, losses_bbox, loss_centerness,\ + bbox_avg_factor = multi_apply( + self.loss_single, + anchor_list, + cls_scores, + bbox_preds, + centernesses, + labels_list, + label_weights_list, + bbox_targets_list, + num_total_samples=num_total_samples) + + bbox_avg_factor = sum(bbox_avg_factor) + bbox_avg_factor = reduce_mean(bbox_avg_factor).clamp_(min=1).item() + losses_bbox = list(map(lambda x: x / bbox_avg_factor, losses_bbox)) + return dict( + loss_cls=losses_cls, + loss_bbox=losses_bbox, + loss_centerness=loss_centerness) + + def centerness_target(self, anchors, bbox_targets): + # only calculate pos centerness targets, otherwise there may be nan + gts = self.bbox_coder.decode(anchors, bbox_targets) + anchors_cx = (anchors[:, 2] + anchors[:, 0]) / 2 + anchors_cy = (anchors[:, 3] + anchors[:, 1]) / 2 + l_ = anchors_cx - gts[:, 0] + t_ = anchors_cy - gts[:, 1] + r_ = gts[:, 2] - anchors_cx + b_ = gts[:, 3] - anchors_cy + + left_right = torch.stack([l_, r_], dim=1) + top_bottom = torch.stack([t_, b_], dim=1) + centerness = torch.sqrt( + (left_right.min(dim=-1)[0] / left_right.max(dim=-1)[0]) * + (top_bottom.min(dim=-1)[0] / top_bottom.max(dim=-1)[0])) + assert not torch.isnan(centerness).any() + return centerness + + @force_fp32(apply_to=('cls_scores', 'bbox_preds', 'centernesses')) + def get_bboxes(self, + cls_scores, + bbox_preds, + centernesses, + img_metas, + cfg=None, + rescale=False, + with_nms=True): + """Transform network output for a batch into bbox predictions. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + with shape (N, num_anchors * num_classes, H, W). + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W). + centernesses (list[Tensor]): Centerness for each scale level with + shape (N, num_anchors * 1, H, W). + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + cfg (mmcv.Config | None): Test / postprocessing configuration, + if None, test_cfg would be used. Default: None. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + + Returns: + list[tuple[Tensor, Tensor]]: Each item in result_list is 2-tuple. + The first item is an (n, 5) tensor, where 5 represent + (tl_x, tl_y, br_x, br_y, score) and the score between 0 and 1. + The shape of the second tensor in the tuple is (n,), and + each element represents the class label of the corresponding + box. + """ + cfg = self.test_cfg if cfg is None else cfg + assert len(cls_scores) == len(bbox_preds) + num_levels = len(cls_scores) + device = cls_scores[0].device + featmap_sizes = [cls_scores[i].shape[-2:] for i in range(num_levels)] + mlvl_anchors = self.anchor_generator.grid_anchors( + featmap_sizes, device=device) + + cls_score_list = [cls_scores[i].detach() for i in range(num_levels)] + bbox_pred_list = [bbox_preds[i].detach() for i in range(num_levels)] + centerness_pred_list = [ + centernesses[i].detach() for i in range(num_levels) + ] + img_shapes = [ + img_metas[i]['img_shape'] for i in range(cls_scores[0].shape[0]) + ] + scale_factors = [ + img_metas[i]['scale_factor'] for i in range(cls_scores[0].shape[0]) + ] + result_list = self._get_bboxes(cls_score_list, bbox_pred_list, + centerness_pred_list, mlvl_anchors, + img_shapes, scale_factors, cfg, rescale, + with_nms) + return result_list + + def _get_bboxes(self, + cls_scores, + bbox_preds, + centernesses, + mlvl_anchors, + img_shapes, + scale_factors, + cfg, + rescale=False, + with_nms=True): + """Transform outputs for a single batch item into labeled boxes. + + Args: + cls_scores (list[Tensor]): Box scores for a single scale level + with shape (N, num_anchors * num_classes, H, W). + bbox_preds (list[Tensor]): Box energies / deltas for a single + scale level with shape (N, num_anchors * 4, H, W). + centernesses (list[Tensor]): Centerness for a single scale level + with shape (N, num_anchors * 1, H, W). + mlvl_anchors (list[Tensor]): Box reference for a single scale level + with shape (num_total_anchors, 4). + img_shapes (list[tuple[int]]): Shape of the input image, + list[(height, width, 3)]. + scale_factors (list[ndarray]): Scale factor of the image arrange as + (w_scale, h_scale, w_scale, h_scale). + cfg (mmcv.Config | None): Test / postprocessing configuration, + if None, test_cfg would be used. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + + Returns: + list[tuple[Tensor, Tensor]]: Each item in result_list is 2-tuple. + The first item is an (n, 5) tensor, where 5 represent + (tl_x, tl_y, br_x, br_y, score) and the score between 0 and 1. + The shape of the second tensor in the tuple is (n,), and + each element represents the class label of the corresponding + box. + """ + assert len(cls_scores) == len(bbox_preds) == len(mlvl_anchors) + device = cls_scores[0].device + batch_size = cls_scores[0].shape[0] + # convert to tensor to keep tracing + nms_pre_tensor = torch.tensor( + cfg.get('nms_pre', -1), device=device, dtype=torch.long) + mlvl_bboxes = [] + mlvl_scores = [] + mlvl_centerness = [] + for cls_score, bbox_pred, centerness, anchors in zip( + cls_scores, bbox_preds, centernesses, mlvl_anchors): + assert cls_score.size()[-2:] == bbox_pred.size()[-2:] + scores = cls_score.permute(0, 2, 3, 1).reshape( + batch_size, -1, self.cls_out_channels).sigmoid() + centerness = centerness.permute(0, 2, 3, + 1).reshape(batch_size, + -1).sigmoid() + bbox_pred = bbox_pred.permute(0, 2, 3, + 1).reshape(batch_size, -1, 4) + + # Always keep topk op for dynamic input in onnx + if nms_pre_tensor > 0 and (torch.onnx.is_in_onnx_export() + or scores.shape[-2] > nms_pre_tensor): + from torch import _shape_as_tensor + # keep shape as tensor and get k + num_anchor = _shape_as_tensor(scores)[-2].to(device) + nms_pre = torch.where(nms_pre_tensor < num_anchor, + nms_pre_tensor, num_anchor) + + max_scores, _ = (scores * centerness[..., None]).max(-1) + _, topk_inds = max_scores.topk(nms_pre) + anchors = anchors[topk_inds, :] + batch_inds = torch.arange(batch_size).view( + -1, 1).expand_as(topk_inds).long() + bbox_pred = bbox_pred[batch_inds, topk_inds, :] + scores = scores[batch_inds, topk_inds, :] + centerness = centerness[batch_inds, topk_inds] + else: + anchors = anchors.expand_as(bbox_pred) + + bboxes = self.bbox_coder.decode( + anchors, bbox_pred, max_shape=img_shapes) + mlvl_bboxes.append(bboxes) + mlvl_scores.append(scores) + mlvl_centerness.append(centerness) + + batch_mlvl_bboxes = torch.cat(mlvl_bboxes, dim=1) + if rescale: + batch_mlvl_bboxes /= batch_mlvl_bboxes.new_tensor( + scale_factors).unsqueeze(1) + batch_mlvl_scores = torch.cat(mlvl_scores, dim=1) + batch_mlvl_centerness = torch.cat(mlvl_centerness, dim=1) + + # Set max number of box to be feed into nms in deployment + deploy_nms_pre = cfg.get('deploy_nms_pre', -1) + if deploy_nms_pre > 0 and torch.onnx.is_in_onnx_export(): + batch_mlvl_scores, _ = ( + batch_mlvl_scores * + batch_mlvl_centerness.unsqueeze(2).expand_as(batch_mlvl_scores) + ).max(-1) + _, topk_inds = batch_mlvl_scores.topk(deploy_nms_pre) + batch_inds = torch.arange(batch_size).view(-1, + 1).expand_as(topk_inds) + batch_mlvl_scores = batch_mlvl_scores[batch_inds, topk_inds, :] + batch_mlvl_bboxes = batch_mlvl_bboxes[batch_inds, topk_inds, :] + batch_mlvl_centerness = batch_mlvl_centerness[batch_inds, + topk_inds] + # remind that we set FG labels to [0, num_class-1] since mmdet v2.0 + # BG cat_id: num_class + padding = batch_mlvl_scores.new_zeros(batch_size, + batch_mlvl_scores.shape[1], 1) + batch_mlvl_scores = torch.cat([batch_mlvl_scores, padding], dim=-1) + + if with_nms: + det_results = [] + for (mlvl_bboxes, mlvl_scores, + mlvl_centerness) in zip(batch_mlvl_bboxes, batch_mlvl_scores, + batch_mlvl_centerness): + det_bbox, det_label = multiclass_nms( + mlvl_bboxes, + mlvl_scores, + cfg.score_thr, + cfg.nms, + cfg.max_per_img, + score_factors=mlvl_centerness) + det_results.append(tuple([det_bbox, det_label])) + else: + det_results = [ + tuple(mlvl_bs) + for mlvl_bs in zip(batch_mlvl_bboxes, batch_mlvl_scores, + batch_mlvl_centerness) + ] + return det_results + + def get_targets(self, + anchor_list, + valid_flag_list, + gt_bboxes_list, + img_metas, + gt_bboxes_ignore_list=None, + gt_labels_list=None, + label_channels=1, + unmap_outputs=True): + """Get targets for ATSS head. + + This method is almost the same as `AnchorHead.get_targets()`. Besides + returning the targets as the parent method does, it also returns the + anchors as the first element of the returned tuple. + """ + num_imgs = len(img_metas) + assert len(anchor_list) == len(valid_flag_list) == num_imgs + + # anchor number of multi levels + num_level_anchors = [anchors.size(0) for anchors in anchor_list[0]] + num_level_anchors_list = [num_level_anchors] * num_imgs + + # concat all level anchors and flags to a single tensor + for i in range(num_imgs): + assert len(anchor_list[i]) == len(valid_flag_list[i]) + anchor_list[i] = torch.cat(anchor_list[i]) + valid_flag_list[i] = torch.cat(valid_flag_list[i]) + + # compute targets for each image + if gt_bboxes_ignore_list is None: + gt_bboxes_ignore_list = [None for _ in range(num_imgs)] + if gt_labels_list is None: + gt_labels_list = [None for _ in range(num_imgs)] + (all_anchors, all_labels, all_label_weights, all_bbox_targets, + all_bbox_weights, pos_inds_list, neg_inds_list) = multi_apply( + self._get_target_single, + anchor_list, + valid_flag_list, + num_level_anchors_list, + gt_bboxes_list, + gt_bboxes_ignore_list, + gt_labels_list, + img_metas, + label_channels=label_channels, + unmap_outputs=unmap_outputs) + # no valid anchors + if any([labels is None for labels in all_labels]): + return None + # sampled anchors of all images + num_total_pos = sum([max(inds.numel(), 1) for inds in pos_inds_list]) + num_total_neg = sum([max(inds.numel(), 1) for inds in neg_inds_list]) + # split targets to a list w.r.t. multiple levels + anchors_list = images_to_levels(all_anchors, num_level_anchors) + labels_list = images_to_levels(all_labels, num_level_anchors) + label_weights_list = images_to_levels(all_label_weights, + num_level_anchors) + bbox_targets_list = images_to_levels(all_bbox_targets, + num_level_anchors) + bbox_weights_list = images_to_levels(all_bbox_weights, + num_level_anchors) + return (anchors_list, labels_list, label_weights_list, + bbox_targets_list, bbox_weights_list, num_total_pos, + num_total_neg) + + def _get_target_single(self, + flat_anchors, + valid_flags, + num_level_anchors, + gt_bboxes, + gt_bboxes_ignore, + gt_labels, + img_meta, + label_channels=1, + unmap_outputs=True): + """Compute regression, classification targets for anchors in a single + image. + + Args: + flat_anchors (Tensor): Multi-level anchors of the image, which are + concatenated into a single tensor of shape (num_anchors ,4) + valid_flags (Tensor): Multi level valid flags of the image, + which are concatenated into a single tensor of + shape (num_anchors,). + num_level_anchors Tensor): Number of anchors of each scale level. + gt_bboxes (Tensor): Ground truth bboxes of the image, + shape (num_gts, 4). + gt_bboxes_ignore (Tensor): Ground truth bboxes to be + ignored, shape (num_ignored_gts, 4). + gt_labels (Tensor): Ground truth labels of each box, + shape (num_gts,). + img_meta (dict): Meta info of the image. + label_channels (int): Channel of label. + unmap_outputs (bool): Whether to map outputs back to the original + set of anchors. + + Returns: + tuple: N is the number of total anchors in the image. + labels (Tensor): Labels of all anchors in the image with shape + (N,). + label_weights (Tensor): Label weights of all anchor in the + image with shape (N,). + bbox_targets (Tensor): BBox targets of all anchors in the + image with shape (N, 4). + bbox_weights (Tensor): BBox weights of all anchors in the + image with shape (N, 4) + pos_inds (Tensor): Indices of positive anchor with shape + (num_pos,). + neg_inds (Tensor): Indices of negative anchor with shape + (num_neg,). + """ + inside_flags = anchor_inside_flags(flat_anchors, valid_flags, + img_meta['img_shape'][:2], + self.train_cfg.allowed_border) + if not inside_flags.any(): + return (None, ) * 7 + # assign gt and sample anchors + anchors = flat_anchors[inside_flags, :] + + num_level_anchors_inside = self.get_num_level_anchors_inside( + num_level_anchors, inside_flags) + assign_result = self.assigner.assign(anchors, num_level_anchors_inside, + gt_bboxes, gt_bboxes_ignore, + gt_labels) + + sampling_result = self.sampler.sample(assign_result, anchors, + gt_bboxes) + + num_valid_anchors = anchors.shape[0] + bbox_targets = torch.zeros_like(anchors) + bbox_weights = torch.zeros_like(anchors) + labels = anchors.new_full((num_valid_anchors, ), + self.num_classes, + dtype=torch.long) + label_weights = anchors.new_zeros(num_valid_anchors, dtype=torch.float) + + pos_inds = sampling_result.pos_inds + neg_inds = sampling_result.neg_inds + if len(pos_inds) > 0: + if hasattr(self, 'bbox_coder'): + pos_bbox_targets = self.bbox_coder.encode( + sampling_result.pos_bboxes, sampling_result.pos_gt_bboxes) + else: + # used in VFNetHead + pos_bbox_targets = sampling_result.pos_gt_bboxes + bbox_targets[pos_inds, :] = pos_bbox_targets + bbox_weights[pos_inds, :] = 1.0 + if gt_labels is None: + # Only rpn gives gt_labels as None + # Foreground is the first class since v2.5.0 + labels[pos_inds] = 0 + else: + labels[pos_inds] = gt_labels[ + sampling_result.pos_assigned_gt_inds] + if self.train_cfg.pos_weight <= 0: + label_weights[pos_inds] = 1.0 + else: + label_weights[pos_inds] = self.train_cfg.pos_weight + if len(neg_inds) > 0: + label_weights[neg_inds] = 1.0 + + # map up to original set of anchors + if unmap_outputs: + num_total_anchors = flat_anchors.size(0) + anchors = unmap(anchors, num_total_anchors, inside_flags) + labels = unmap( + labels, num_total_anchors, inside_flags, fill=self.num_classes) + label_weights = unmap(label_weights, num_total_anchors, + inside_flags) + bbox_targets = unmap(bbox_targets, num_total_anchors, inside_flags) + bbox_weights = unmap(bbox_weights, num_total_anchors, inside_flags) + + return (anchors, labels, label_weights, bbox_targets, bbox_weights, + pos_inds, neg_inds) + + def get_num_level_anchors_inside(self, num_level_anchors, inside_flags): + split_inside_flags = torch.split(inside_flags, num_level_anchors) + num_level_anchors_inside = [ + int(flags.sum()) for flags in split_inside_flags + ] + return num_level_anchors_inside diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/autoassign_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/autoassign_head.py new file mode 100644 index 0000000000000000000000000000000000000000..aeb66d3cfc73baa4fbbde8988a55da524eb882d7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/autoassign_head.py @@ -0,0 +1,517 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import bias_init_with_prob, normal_init +from mmcv.runner import force_fp32 + +from mmdet.core import distance2bbox, multi_apply +from mmdet.core.bbox import bbox_overlaps +from mmdet.models import HEADS +from mmdet.models.dense_heads.atss_head import reduce_mean +from mmdet.models.dense_heads.fcos_head import FCOSHead +from mmdet.models.dense_heads.paa_head import levels_to_images + +EPS = 1e-12 + + +class CenterPrior(nn.Module): + """Center Weighting module to adjust the category-specific prior + distributions. + + Args: + force_topk (bool): When no point falls into gt_bbox, forcibly + select the k points closest to the center to calculate + the center prior. Defaults to False. + topk (int): The number of points used to calculate the + center prior when no point falls in gt_bbox. Only work when + force_topk if True. Defaults to 9. + num_classes (int): The class number of dataset. Defaults to 80. + strides (tuple[int]): The stride of each input feature map. Defaults + to (8, 16, 32, 64, 128). + """ + + def __init__(self, + force_topk=False, + topk=9, + num_classes=80, + strides=(8, 16, 32, 64, 128)): + super(CenterPrior, self).__init__() + self.mean = nn.Parameter(torch.zeros(num_classes, 2)) + self.sigma = nn.Parameter(torch.ones(num_classes, 2)) + self.strides = strides + self.force_topk = force_topk + self.topk = topk + + def forward(self, anchor_points_list, gt_bboxes, labels, + inside_gt_bbox_mask): + """Get the center prior of each point on the feature map for each + instance. + + Args: + anchor_points_list (list[Tensor]): list of coordinate + of points on feature map. Each with shape + (num_points, 2). + gt_bboxes (Tensor): The gt_bboxes with shape of + (num_gt, 4). + labels (Tensor): The gt_labels with shape of (num_gt). + inside_gt_bbox_mask (Tensor): Tensor of bool type, + with shape of (num_points, num_gt), each + value is used to mark whether this point falls + within a certain gt. + + Returns: + tuple(Tensor): + + - center_prior_weights(Tensor): Float tensor with shape \ + of (num_points, num_gt). Each value represents \ + the center weighting coefficient. + - inside_gt_bbox_mask (Tensor): Tensor of bool type, \ + with shape of (num_points, num_gt), each \ + value is used to mark whether this point falls \ + within a certain gt or is the topk nearest points for \ + a specific gt_bbox. + """ + inside_gt_bbox_mask = inside_gt_bbox_mask.clone() + num_gts = len(labels) + num_points = sum([len(item) for item in anchor_points_list]) + if num_gts == 0: + return gt_bboxes.new_zeros(num_points, + num_gts), inside_gt_bbox_mask + center_prior_list = [] + for slvl_points, stride in zip(anchor_points_list, self.strides): + # slvl_points: points from single level in FPN, has shape (h*w, 2) + # single_level_points has shape (h*w, num_gt, 2) + single_level_points = slvl_points[:, None, :].expand( + (slvl_points.size(0), len(gt_bboxes), 2)) + gt_center_x = ((gt_bboxes[:, 0] + gt_bboxes[:, 2]) / 2) + gt_center_y = ((gt_bboxes[:, 1] + gt_bboxes[:, 3]) / 2) + gt_center = torch.stack((gt_center_x, gt_center_y), dim=1) + gt_center = gt_center[None] + # instance_center has shape (1, num_gt, 2) + instance_center = self.mean[labels][None] + # instance_sigma has shape (1, num_gt, 2) + instance_sigma = self.sigma[labels][None] + # distance has shape (num_points, num_gt, 2) + distance = (((single_level_points - gt_center) / float(stride) - + instance_center)**2) + center_prior = torch.exp(-distance / + (2 * instance_sigma**2)).prod(dim=-1) + center_prior_list.append(center_prior) + center_prior_weights = torch.cat(center_prior_list, dim=0) + + if self.force_topk: + gt_inds_no_points_inside = torch.nonzero( + inside_gt_bbox_mask.sum(0) == 0).reshape(-1) + if gt_inds_no_points_inside.numel(): + topk_center_index = \ + center_prior_weights[:, gt_inds_no_points_inside].topk( + self.topk, + dim=0)[1] + temp_mask = inside_gt_bbox_mask[:, gt_inds_no_points_inside] + inside_gt_bbox_mask[:, gt_inds_no_points_inside] = \ + torch.scatter(temp_mask, + dim=0, + index=topk_center_index, + src=torch.ones_like( + topk_center_index, + dtype=torch.bool)) + + center_prior_weights[~inside_gt_bbox_mask] = 0 + return center_prior_weights, inside_gt_bbox_mask + + +@HEADS.register_module() +class AutoAssignHead(FCOSHead): + """AutoAssignHead head used in `AutoAssign. + + `_. + + Args: + force_topk (bool): Used in center prior initialization to + handle extremely small gt. Default is False. + topk (int): The number of points used to calculate the + center prior when no point falls in gt_bbox. Only work when + force_topk if True. Defaults to 9. + pos_loss_weight (float): The loss weight of positive loss + and with default value 0.25. + neg_loss_weight (float): The loss weight of negative loss + and with default value 0.75. + center_loss_weight (float): The loss weight of center prior + loss and with default value 0.75. + """ + + def __init__(self, + *args, + force_topk=False, + topk=9, + pos_loss_weight=0.25, + neg_loss_weight=0.75, + center_loss_weight=0.75, + **kwargs): + super().__init__(*args, conv_bias=True, **kwargs) + self.center_prior = CenterPrior( + force_topk=force_topk, + topk=topk, + num_classes=self.num_classes, + strides=self.strides) + self.pos_loss_weight = pos_loss_weight + self.neg_loss_weight = neg_loss_weight + self.center_loss_weight = center_loss_weight + + def init_weights(self): + """Initialize weights of the head. + + In particular, we have special initialization for classified conv's and + regression conv's bias + """ + + super(AutoAssignHead, self).init_weights() + bias_cls = bias_init_with_prob(0.02) + normal_init(self.conv_cls, std=0.01, bias=bias_cls) + normal_init(self.conv_reg, std=0.01, bias=4.0) + + def _get_points_single(self, + featmap_size, + stride, + dtype, + device, + flatten=False): + """Almost the same as the implementation in fcos, we remove half stride + offset to align with the original implementation.""" + + y, x = super(FCOSHead, + self)._get_points_single(featmap_size, stride, dtype, + device) + points = torch.stack((x.reshape(-1) * stride, y.reshape(-1) * stride), + dim=-1) + return points + + def forward_single(self, x, scale, stride): + """Forward features of a single scale level. + + Args: + x (Tensor): FPN feature maps of the specified stride. + scale (:obj: `mmcv.cnn.Scale`): Learnable scale module to resize + the bbox prediction. + stride (int): The corresponding stride for feature maps, only + used to normalize the bbox prediction when self.norm_on_bbox + is True. + + Returns: + tuple: scores for each class, bbox predictions and centerness \ + predictions of input feature maps. + """ + cls_score, bbox_pred, cls_feat, reg_feat = super( + FCOSHead, self).forward_single(x) + centerness = self.conv_centerness(reg_feat) + # scale the bbox_pred of different level + # float to avoid overflow when enabling FP16 + bbox_pred = scale(bbox_pred).float() + bbox_pred = F.relu(bbox_pred) + bbox_pred *= stride + return cls_score, bbox_pred, centerness + + def get_pos_loss_single(self, cls_score, objectness, reg_loss, gt_labels, + center_prior_weights): + """Calculate the positive loss of all points in gt_bboxes. + + Args: + cls_score (Tensor): All category scores for each point on + the feature map. The shape is (num_points, num_class). + objectness (Tensor): Foreground probability of all points, + has shape (num_points, 1). + reg_loss (Tensor): The regression loss of each gt_bbox and each + prediction box, has shape of (num_points, num_gt). + gt_labels (Tensor): The zeros based gt_labels of all gt + with shape of (num_gt,). + center_prior_weights (Tensor): Float tensor with shape + of (num_points, num_gt). Each value represents + the center weighting coefficient. + + Returns: + tuple[Tensor]: + + - pos_loss (Tensor): The positive loss of all points + in the gt_bboxes. + """ + # p_loc: localization confidence + p_loc = torch.exp(-reg_loss) + # p_cls: classification confidence + p_cls = (cls_score * objectness)[:, gt_labels] + # p_pos: joint confidence indicator + p_pos = p_cls * p_loc + + # 3 is a hyper-parameter to control the contributions of high and + # low confidence locations towards positive losses. + confidence_weight = torch.exp(p_pos * 3) + p_pos_weight = (confidence_weight * center_prior_weights) / ( + (confidence_weight * center_prior_weights).sum( + 0, keepdim=True)).clamp(min=EPS) + reweighted_p_pos = (p_pos * p_pos_weight).sum(0) + pos_loss = F.binary_cross_entropy( + reweighted_p_pos, + torch.ones_like(reweighted_p_pos), + reduction='none') + pos_loss = pos_loss.sum() * self.pos_loss_weight + return pos_loss, + + def get_neg_loss_single(self, cls_score, objectness, gt_labels, ious, + inside_gt_bbox_mask): + """Calculate the negative loss of all points in feature map. + + Args: + cls_score (Tensor): All category scores for each point on + the feature map. The shape is (num_points, num_class). + objectness (Tensor): Foreground probability of all points + and is shape of (num_points, 1). + gt_labels (Tensor): The zeros based label of all gt with shape of + (num_gt). + ious (Tensor): Float tensor with shape of (num_points, num_gt). + Each value represent the iou of pred_bbox and gt_bboxes. + inside_gt_bbox_mask (Tensor): Tensor of bool type, + with shape of (num_points, num_gt), each + value is used to mark whether this point falls + within a certain gt. + + Returns: + tuple[Tensor]: + + - neg_loss (Tensor): The negative loss of all points + in the feature map. + """ + num_gts = len(gt_labels) + joint_conf = (cls_score * objectness) + p_neg_weight = torch.ones_like(joint_conf) + if num_gts > 0: + # the order of dinmension would affect the value of + # p_neg_weight, we strictly follow the original + # implementation. + inside_gt_bbox_mask = inside_gt_bbox_mask.permute(1, 0) + ious = ious.permute(1, 0) + + foreground_idxs = torch.nonzero(inside_gt_bbox_mask, as_tuple=True) + temp_weight = (1 / (1 - ious[foreground_idxs]).clamp_(EPS)) + + def normalize(x): + return (x - x.min() + EPS) / (x.max() - x.min() + EPS) + + for instance_idx in range(num_gts): + idxs = foreground_idxs[0] == instance_idx + if idxs.any(): + temp_weight[idxs] = normalize(temp_weight[idxs]) + + p_neg_weight[foreground_idxs[1], + gt_labels[foreground_idxs[0]]] = 1 - temp_weight + + logits = (joint_conf * p_neg_weight) + neg_loss = ( + logits**2 * F.binary_cross_entropy( + logits, torch.zeros_like(logits), reduction='none')) + neg_loss = neg_loss.sum() * self.neg_loss_weight + return neg_loss, + + @force_fp32(apply_to=('cls_scores', 'bbox_preds', 'objectnesses')) + def loss(self, + cls_scores, + bbox_preds, + objectnesses, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute loss of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level, + each is a 4D-tensor, the channel number is + num_points * num_classes. + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level, each is a 4D-tensor, the channel number is + num_points * 4. + objectnesses (list[Tensor]): objectness for each scale level, each + is a 4D-tensor, the channel number is num_points * 1. + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + + assert len(cls_scores) == len(bbox_preds) == len(objectnesses) + all_num_gt = sum([len(item) for item in gt_bboxes]) + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + all_level_points = self.get_points(featmap_sizes, bbox_preds[0].dtype, + bbox_preds[0].device) + inside_gt_bbox_mask_list, bbox_targets_list = self.get_targets( + all_level_points, gt_bboxes) + + center_prior_weight_list = [] + temp_inside_gt_bbox_mask_list = [] + for gt_bboxe, gt_label, inside_gt_bbox_mask in zip( + gt_bboxes, gt_labels, inside_gt_bbox_mask_list): + center_prior_weight, inside_gt_bbox_mask = \ + self.center_prior(all_level_points, gt_bboxe, gt_label, + inside_gt_bbox_mask) + center_prior_weight_list.append(center_prior_weight) + temp_inside_gt_bbox_mask_list.append(inside_gt_bbox_mask) + inside_gt_bbox_mask_list = temp_inside_gt_bbox_mask_list + + mlvl_points = torch.cat(all_level_points, dim=0) + bbox_preds = levels_to_images(bbox_preds) + cls_scores = levels_to_images(cls_scores) + objectnesses = levels_to_images(objectnesses) + + reg_loss_list = [] + ious_list = [] + num_points = len(mlvl_points) + + for bbox_pred, gt_bboxe, inside_gt_bbox_mask in zip( + bbox_preds, bbox_targets_list, inside_gt_bbox_mask_list): + temp_num_gt = gt_bboxe.size(1) + expand_mlvl_points = mlvl_points[:, None, :].expand( + num_points, temp_num_gt, 2).reshape(-1, 2) + gt_bboxe = gt_bboxe.reshape(-1, 4) + expand_bbox_pred = bbox_pred[:, None, :].expand( + num_points, temp_num_gt, 4).reshape(-1, 4) + decoded_bbox_preds = distance2bbox(expand_mlvl_points, + expand_bbox_pred) + decoded_target_preds = distance2bbox(expand_mlvl_points, gt_bboxe) + with torch.no_grad(): + ious = bbox_overlaps( + decoded_bbox_preds, decoded_target_preds, is_aligned=True) + ious = ious.reshape(num_points, temp_num_gt) + if temp_num_gt: + ious = ious.max( + dim=-1, keepdim=True).values.repeat(1, temp_num_gt) + else: + ious = ious.new_zeros(num_points, temp_num_gt) + ious[~inside_gt_bbox_mask] = 0 + ious_list.append(ious) + loss_bbox = self.loss_bbox( + decoded_bbox_preds, + decoded_target_preds, + weight=None, + reduction_override='none') + reg_loss_list.append(loss_bbox.reshape(num_points, temp_num_gt)) + + cls_scores = [item.sigmoid() for item in cls_scores] + objectnesses = [item.sigmoid() for item in objectnesses] + pos_loss_list, = multi_apply(self.get_pos_loss_single, cls_scores, + objectnesses, reg_loss_list, gt_labels, + center_prior_weight_list) + pos_avg_factor = reduce_mean( + bbox_pred.new_tensor(all_num_gt)).clamp_(min=1) + pos_loss = sum(pos_loss_list) / pos_avg_factor + + neg_loss_list, = multi_apply(self.get_neg_loss_single, cls_scores, + objectnesses, gt_labels, ious_list, + inside_gt_bbox_mask_list) + neg_avg_factor = sum(item.data.sum() + for item in center_prior_weight_list) + neg_avg_factor = reduce_mean(neg_avg_factor).clamp_(min=1) + neg_loss = sum(neg_loss_list) / neg_avg_factor + + center_loss = [] + for i in range(len(img_metas)): + + if inside_gt_bbox_mask_list[i].any(): + center_loss.append( + len(gt_bboxes[i]) / + center_prior_weight_list[i].sum().clamp_(min=EPS)) + # when width or height of gt_bbox is smaller than stride of p3 + else: + center_loss.append(center_prior_weight_list[i].sum() * 0) + + center_loss = torch.stack(center_loss).mean() * self.center_loss_weight + + # avoid dead lock in DDP + if all_num_gt == 0: + pos_loss = bbox_preds[0].sum() * 0 + dummy_center_prior_loss = self.center_prior.mean.sum( + ) * 0 + self.center_prior.sigma.sum() * 0 + center_loss = objectnesses[0].sum() * 0 + dummy_center_prior_loss + + loss = dict( + loss_pos=pos_loss, loss_neg=neg_loss, loss_center=center_loss) + + return loss + + def get_targets(self, points, gt_bboxes_list): + """Compute regression targets and each point inside or outside gt_bbox + in multiple images. + + Args: + points (list[Tensor]): Points of all fpn level, each has shape + (num_points, 2). + gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image, + each has shape (num_gt, 4). + + Returns: + tuple(list[Tensor]): + + - inside_gt_bbox_mask_list (list[Tensor]): Each + Tensor is with bool type and shape of + (num_points, num_gt), each value + is used to mark whether this point falls + within a certain gt. + - concat_lvl_bbox_targets (list[Tensor]): BBox + targets of each level. Each tensor has shape + (num_points, num_gt, 4). + """ + + concat_points = torch.cat(points, dim=0) + # the number of points per img, per lvl + num_points = [center.size(0) for center in points] + inside_gt_bbox_mask_list, bbox_targets_list = multi_apply( + self._get_target_single, gt_bboxes_list, points=concat_points) + bbox_targets_list = [ + list(bbox_targets.split(num_points, 0)) + for bbox_targets in bbox_targets_list + ] + concat_lvl_bbox_targets = [ + torch.cat(item, dim=0) for item in bbox_targets_list + ] + return inside_gt_bbox_mask_list, concat_lvl_bbox_targets + + def _get_target_single(self, gt_bboxes, points): + """Compute regression targets and each point inside or outside gt_bbox + for a single image. + + Args: + gt_bboxes (Tensor): gt_bbox of single image, has shape + (num_gt, 4). + points (Tensor): Points of all fpn level, has shape + (num_points, 2). + + Returns: + tuple[Tensor]: Containing the following Tensors: + + - inside_gt_bbox_mask (Tensor): Bool tensor with shape + (num_points, num_gt), each value is used to mark + whether this point falls within a certain gt. + - bbox_targets (Tensor): BBox targets of each points with + each gt_bboxes, has shape (num_points, num_gt, 4). + """ + num_points = points.size(0) + num_gts = gt_bboxes.size(0) + gt_bboxes = gt_bboxes[None].expand(num_points, num_gts, 4) + xs, ys = points[:, 0], points[:, 1] + xs = xs[:, None] + ys = ys[:, None] + left = xs - gt_bboxes[..., 0] + right = gt_bboxes[..., 2] - xs + top = ys - gt_bboxes[..., 1] + bottom = gt_bboxes[..., 3] - ys + bbox_targets = torch.stack((left, top, right, bottom), -1) + if num_gts: + inside_gt_bbox_mask = bbox_targets.min(-1)[0] > 0 + else: + inside_gt_bbox_mask = bbox_targets.new_zeros((num_points, num_gts), + dtype=torch.bool) + + return inside_gt_bbox_mask, bbox_targets diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/base_dense_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/base_dense_head.py new file mode 100644 index 0000000000000000000000000000000000000000..8b1a1417a142e8e0f04b3a154b5c7ae941f8d83b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/base_dense_head.py @@ -0,0 +1,78 @@ +from abc import ABCMeta, abstractmethod + +from mmcv.runner import BaseModule + + +class BaseDenseHead(BaseModule, metaclass=ABCMeta): + """Base class for DenseHeads.""" + + def __init__(self, init_cfg=None): + super(BaseDenseHead, self).__init__(init_cfg) + + @abstractmethod + def loss(self, **kwargs): + """Compute losses of the head.""" + pass + + @abstractmethod + def get_bboxes(self, **kwargs): + """Transform network output for a batch into bbox predictions.""" + pass + + def forward_train(self, + x, + img_metas, + gt_bboxes, + gt_labels=None, + gt_bboxes_ignore=None, + proposal_cfg=None, + **kwargs): + """ + Args: + x (list[Tensor]): Features from FPN. + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes (Tensor): Ground truth bboxes of the image, + shape (num_gts, 4). + gt_labels (Tensor): Ground truth labels of each box, + shape (num_gts,). + gt_bboxes_ignore (Tensor): Ground truth bboxes to be + ignored, shape (num_ignored_gts, 4). + proposal_cfg (mmcv.Config): Test / postprocessing configuration, + if None, test_cfg would be used + + Returns: + tuple: + losses: (dict[str, Tensor]): A dictionary of loss components. + proposal_list (list[Tensor]): Proposals of each image. + """ + outs = self(x) + if gt_labels is None: + loss_inputs = outs + (gt_bboxes, img_metas) + else: + loss_inputs = outs + (gt_bboxes, gt_labels, img_metas) + losses = self.loss(*loss_inputs, gt_bboxes_ignore=gt_bboxes_ignore) + if proposal_cfg is None: + return losses + else: + proposal_list = self.get_bboxes(*outs, img_metas, cfg=proposal_cfg) + return losses, proposal_list + + def simple_test(self, feats, img_metas, rescale=False): + """Test function without test-time augmentation. + + Args: + feats (tuple[torch.Tensor]): Multi-level features from the + upstream network, each is a 4D-tensor. + img_metas (list[dict]): List of image information. + rescale (bool, optional): Whether to rescale the results. + Defaults to False. + + Returns: + list[tuple[Tensor, Tensor]]: Each item in result_list is 2-tuple. + The first item is ``bboxes`` with shape (n, 5), + where 5 represent (tl_x, tl_y, br_x, br_y, score). + The shape of the second tensor in the tuple is ``labels`` + with shape (n,) + """ + return self.simple_test_bboxes(feats, img_metas, rescale=rescale) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/cascade_rpn_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/cascade_rpn_head.py new file mode 100644 index 0000000000000000000000000000000000000000..1e525c968d8566c89785d60691ef60f4c02a11e1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/cascade_rpn_head.py @@ -0,0 +1,786 @@ +from __future__ import division +import copy +import warnings + +import torch +import torch.nn as nn +from mmcv import ConfigDict +from mmcv.ops import DeformConv2d, batched_nms +from mmcv.runner import BaseModule, ModuleList + +from mmdet.core import (RegionAssigner, build_assigner, build_sampler, + images_to_levels, multi_apply) +from ..builder import HEADS, build_head +from .base_dense_head import BaseDenseHead +from .rpn_head import RPNHead + + +class AdaptiveConv(BaseModule): + """AdaptiveConv used to adapt the sampling location with the anchors. + + Args: + in_channels (int): Number of channels in the input image + out_channels (int): Number of channels produced by the convolution + kernel_size (int or tuple): Size of the conv kernel. Default: 3 + stride (int or tuple, optional): Stride of the convolution. Default: 1 + padding (int or tuple, optional): Zero-padding added to both sides of + the input. Default: 1 + dilation (int or tuple, optional): Spacing between kernel elements. + Default: 3 + groups (int, optional): Number of blocked connections from input + channels to output channels. Default: 1 + bias (bool, optional): If set True, adds a learnable bias to the + output. Default: False. + type (str, optional): Type of adaptive conv, can be either 'offset' + (arbitrary anchors) or 'dilation' (uniform anchor). + Default: 'dilation'. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__(self, + in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1, + dilation=3, + groups=1, + bias=False, + type='dilation', + init_cfg=dict( + type='Normal', std=0.01, override=dict(name='conv'))): + super(AdaptiveConv, self).__init__(init_cfg) + assert type in ['offset', 'dilation'] + self.adapt_type = type + + assert kernel_size == 3, 'Adaptive conv only supports kernels 3' + if self.adapt_type == 'offset': + assert stride == 1 and padding == 1 and groups == 1, \ + 'Adaptive conv offset mode only supports padding: {1}, ' \ + f'stride: {1}, groups: {1}' + self.conv = DeformConv2d( + in_channels, + out_channels, + kernel_size, + padding=padding, + stride=stride, + groups=groups, + bias=bias) + else: + self.conv = nn.Conv2d( + in_channels, + out_channels, + kernel_size, + padding=dilation, + dilation=dilation) + + def forward(self, x, offset): + """Forward function.""" + if self.adapt_type == 'offset': + N, _, H, W = x.shape + assert offset is not None + assert H * W == offset.shape[1] + # reshape [N, NA, 18] to (N, 18, H, W) + offset = offset.permute(0, 2, 1).reshape(N, -1, H, W) + offset = offset.contiguous() + x = self.conv(x, offset) + else: + assert offset is None + x = self.conv(x) + return x + + +@HEADS.register_module() +class StageCascadeRPNHead(RPNHead): + """Stage of CascadeRPNHead. + + Args: + in_channels (int): Number of channels in the input feature map. + anchor_generator (dict): anchor generator config. + adapt_cfg (dict): adaptation config. + bridged_feature (bool, optional): whether update rpn feature. + Default: False. + with_cls (bool, optional): wheather use classification branch. + Default: True. + sampling (bool, optional): wheather use sampling. Default: True. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + in_channels, + anchor_generator=dict( + type='AnchorGenerator', + scales=[8], + ratios=[1.0], + strides=[4, 8, 16, 32, 64]), + adapt_cfg=dict(type='dilation', dilation=3), + bridged_feature=False, + with_cls=True, + sampling=True, + init_cfg=None, + **kwargs): + self.with_cls = with_cls + self.anchor_strides = anchor_generator['strides'] + self.anchor_scales = anchor_generator['scales'] + self.bridged_feature = bridged_feature + self.adapt_cfg = adapt_cfg + super(StageCascadeRPNHead, self).__init__( + in_channels, + anchor_generator=anchor_generator, + init_cfg=init_cfg, + **kwargs) + + # override sampling and sampler + self.sampling = sampling + if self.train_cfg: + self.assigner = build_assigner(self.train_cfg.assigner) + # use PseudoSampler when sampling is False + if self.sampling and hasattr(self.train_cfg, 'sampler'): + sampler_cfg = self.train_cfg.sampler + else: + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + + if init_cfg is None: + self.init_cfg = dict( + type='Normal', std=0.01, override=[dict(name='rpn_reg')]) + if self.with_cls: + self.init_cfg['override'].append(dict(name='rpn_cls')) + + def _init_layers(self): + """Init layers of a CascadeRPN stage.""" + self.rpn_conv = AdaptiveConv(self.in_channels, self.feat_channels, + **self.adapt_cfg) + if self.with_cls: + self.rpn_cls = nn.Conv2d(self.feat_channels, + self.num_anchors * self.cls_out_channels, + 1) + self.rpn_reg = nn.Conv2d(self.feat_channels, self.num_anchors * 4, 1) + self.relu = nn.ReLU(inplace=True) + + def forward_single(self, x, offset): + """Forward function of single scale.""" + bridged_x = x + x = self.relu(self.rpn_conv(x, offset)) + if self.bridged_feature: + bridged_x = x # update feature + cls_score = self.rpn_cls(x) if self.with_cls else None + bbox_pred = self.rpn_reg(x) + return bridged_x, cls_score, bbox_pred + + def forward(self, feats, offset_list=None): + """Forward function.""" + if offset_list is None: + offset_list = [None for _ in range(len(feats))] + return multi_apply(self.forward_single, feats, offset_list) + + def _region_targets_single(self, + anchors, + valid_flags, + gt_bboxes, + gt_bboxes_ignore, + gt_labels, + img_meta, + featmap_sizes, + label_channels=1): + """Get anchor targets based on region for single level.""" + assign_result = self.assigner.assign( + anchors, + valid_flags, + gt_bboxes, + img_meta, + featmap_sizes, + self.anchor_scales[0], + self.anchor_strides, + gt_bboxes_ignore=gt_bboxes_ignore, + gt_labels=None, + allowed_border=self.train_cfg.allowed_border) + flat_anchors = torch.cat(anchors) + sampling_result = self.sampler.sample(assign_result, flat_anchors, + gt_bboxes) + + num_anchors = flat_anchors.shape[0] + bbox_targets = torch.zeros_like(flat_anchors) + bbox_weights = torch.zeros_like(flat_anchors) + labels = flat_anchors.new_zeros(num_anchors, dtype=torch.long) + label_weights = flat_anchors.new_zeros(num_anchors, dtype=torch.float) + + pos_inds = sampling_result.pos_inds + neg_inds = sampling_result.neg_inds + if len(pos_inds) > 0: + if not self.reg_decoded_bbox: + pos_bbox_targets = self.bbox_coder.encode( + sampling_result.pos_bboxes, sampling_result.pos_gt_bboxes) + else: + pos_bbox_targets = sampling_result.pos_gt_bboxes + bbox_targets[pos_inds, :] = pos_bbox_targets + bbox_weights[pos_inds, :] = 1.0 + if gt_labels is None: + labels[pos_inds] = 1 + else: + labels[pos_inds] = gt_labels[ + sampling_result.pos_assigned_gt_inds] + if self.train_cfg.pos_weight <= 0: + label_weights[pos_inds] = 1.0 + else: + label_weights[pos_inds] = self.train_cfg.pos_weight + if len(neg_inds) > 0: + label_weights[neg_inds] = 1.0 + + return (labels, label_weights, bbox_targets, bbox_weights, pos_inds, + neg_inds) + + def region_targets(self, + anchor_list, + valid_flag_list, + gt_bboxes_list, + img_metas, + featmap_sizes, + gt_bboxes_ignore_list=None, + gt_labels_list=None, + label_channels=1, + unmap_outputs=True): + """See :func:`StageCascadeRPNHead.get_targets`.""" + num_imgs = len(img_metas) + assert len(anchor_list) == len(valid_flag_list) == num_imgs + + # anchor number of multi levels + num_level_anchors = [anchors.size(0) for anchors in anchor_list[0]] + + # compute targets for each image + if gt_bboxes_ignore_list is None: + gt_bboxes_ignore_list = [None for _ in range(num_imgs)] + if gt_labels_list is None: + gt_labels_list = [None for _ in range(num_imgs)] + (all_labels, all_label_weights, all_bbox_targets, all_bbox_weights, + pos_inds_list, neg_inds_list) = multi_apply( + self._region_targets_single, + anchor_list, + valid_flag_list, + gt_bboxes_list, + gt_bboxes_ignore_list, + gt_labels_list, + img_metas, + featmap_sizes=featmap_sizes, + label_channels=label_channels) + # no valid anchors + if any([labels is None for labels in all_labels]): + return None + # sampled anchors of all images + num_total_pos = sum([max(inds.numel(), 1) for inds in pos_inds_list]) + num_total_neg = sum([max(inds.numel(), 1) for inds in neg_inds_list]) + # split targets to a list w.r.t. multiple levels + labels_list = images_to_levels(all_labels, num_level_anchors) + label_weights_list = images_to_levels(all_label_weights, + num_level_anchors) + bbox_targets_list = images_to_levels(all_bbox_targets, + num_level_anchors) + bbox_weights_list = images_to_levels(all_bbox_weights, + num_level_anchors) + return (labels_list, label_weights_list, bbox_targets_list, + bbox_weights_list, num_total_pos, num_total_neg) + + def get_targets(self, + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + featmap_sizes, + gt_bboxes_ignore=None, + label_channels=1): + """Compute regression and classification targets for anchors. + + Args: + anchor_list (list[list]): Multi level anchors of each image. + valid_flag_list (list[list]): Multi level valid flags of each + image. + gt_bboxes (list[Tensor]): Ground truth bboxes of each image. + img_metas (list[dict]): Meta info of each image. + featmap_sizes (list[Tensor]): Feature mapsize each level + gt_bboxes_ignore (list[Tensor]): Ignore bboxes of each images + label_channels (int): Channel of label. + + Returns: + cls_reg_targets (tuple) + """ + if isinstance(self.assigner, RegionAssigner): + cls_reg_targets = self.region_targets( + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + featmap_sizes, + gt_bboxes_ignore_list=gt_bboxes_ignore, + label_channels=label_channels) + else: + cls_reg_targets = super(StageCascadeRPNHead, self).get_targets( + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + label_channels=label_channels) + return cls_reg_targets + + def anchor_offset(self, anchor_list, anchor_strides, featmap_sizes): + """ Get offest for deformable conv based on anchor shape + NOTE: currently support deformable kernel_size=3 and dilation=1 + + Args: + anchor_list (list[list[tensor])): [NI, NLVL, NA, 4] list of + multi-level anchors + anchor_strides (list[int]): anchor stride of each level + + Returns: + offset_list (list[tensor]): [NLVL, NA, 2, 18]: offset of DeformConv + kernel. + """ + + def _shape_offset(anchors, stride, ks=3, dilation=1): + # currently support kernel_size=3 and dilation=1 + assert ks == 3 and dilation == 1 + pad = (ks - 1) // 2 + idx = torch.arange(-pad, pad + 1, dtype=dtype, device=device) + yy, xx = torch.meshgrid(idx, idx) # return order matters + xx = xx.reshape(-1) + yy = yy.reshape(-1) + w = (anchors[:, 2] - anchors[:, 0]) / stride + h = (anchors[:, 3] - anchors[:, 1]) / stride + w = w / (ks - 1) - dilation + h = h / (ks - 1) - dilation + offset_x = w[:, None] * xx # (NA, ks**2) + offset_y = h[:, None] * yy # (NA, ks**2) + return offset_x, offset_y + + def _ctr_offset(anchors, stride, featmap_size): + feat_h, feat_w = featmap_size + assert len(anchors) == feat_h * feat_w + + x = (anchors[:, 0] + anchors[:, 2]) * 0.5 + y = (anchors[:, 1] + anchors[:, 3]) * 0.5 + # compute centers on feature map + x = x / stride + y = y / stride + # compute predefine centers + xx = torch.arange(0, feat_w, device=anchors.device) + yy = torch.arange(0, feat_h, device=anchors.device) + yy, xx = torch.meshgrid(yy, xx) + xx = xx.reshape(-1).type_as(x) + yy = yy.reshape(-1).type_as(y) + + offset_x = x - xx # (NA, ) + offset_y = y - yy # (NA, ) + return offset_x, offset_y + + num_imgs = len(anchor_list) + num_lvls = len(anchor_list[0]) + dtype = anchor_list[0][0].dtype + device = anchor_list[0][0].device + num_level_anchors = [anchors.size(0) for anchors in anchor_list[0]] + + offset_list = [] + for i in range(num_imgs): + mlvl_offset = [] + for lvl in range(num_lvls): + c_offset_x, c_offset_y = _ctr_offset(anchor_list[i][lvl], + anchor_strides[lvl], + featmap_sizes[lvl]) + s_offset_x, s_offset_y = _shape_offset(anchor_list[i][lvl], + anchor_strides[lvl]) + + # offset = ctr_offset + shape_offset + offset_x = s_offset_x + c_offset_x[:, None] + offset_y = s_offset_y + c_offset_y[:, None] + + # offset order (y0, x0, y1, x2, .., y8, x8, y9, x9) + offset = torch.stack([offset_y, offset_x], dim=-1) + offset = offset.reshape(offset.size(0), -1) # [NA, 2*ks**2] + mlvl_offset.append(offset) + offset_list.append(torch.cat(mlvl_offset)) # [totalNA, 2*ks**2] + offset_list = images_to_levels(offset_list, num_level_anchors) + return offset_list + + def loss_single(self, cls_score, bbox_pred, anchors, labels, label_weights, + bbox_targets, bbox_weights, num_total_samples): + """Loss function on single scale.""" + # classification loss + if self.with_cls: + labels = labels.reshape(-1) + label_weights = label_weights.reshape(-1) + cls_score = cls_score.permute(0, 2, 3, + 1).reshape(-1, self.cls_out_channels) + loss_cls = self.loss_cls( + cls_score, labels, label_weights, avg_factor=num_total_samples) + # regression loss + bbox_targets = bbox_targets.reshape(-1, 4) + bbox_weights = bbox_weights.reshape(-1, 4) + bbox_pred = bbox_pred.permute(0, 2, 3, 1).reshape(-1, 4) + if self.reg_decoded_bbox: + # When the regression loss (e.g. `IouLoss`, `GIouLoss`) + # is applied directly on the decoded bounding boxes, it + # decodes the already encoded coordinates to absolute format. + anchors = anchors.reshape(-1, 4) + bbox_pred = self.bbox_coder.decode(anchors, bbox_pred) + loss_reg = self.loss_bbox( + bbox_pred, + bbox_targets, + bbox_weights, + avg_factor=num_total_samples) + if self.with_cls: + return loss_cls, loss_reg + return None, loss_reg + + def loss(self, + anchor_list, + valid_flag_list, + cls_scores, + bbox_preds, + gt_bboxes, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + anchor_list (list[list]): Multi level anchors of each image. + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. Default: None + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + featmap_sizes = [featmap.size()[-2:] for featmap in bbox_preds] + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + cls_reg_targets = self.get_targets( + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + featmap_sizes, + gt_bboxes_ignore=gt_bboxes_ignore, + label_channels=label_channels) + if cls_reg_targets is None: + return None + (labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, + num_total_pos, num_total_neg) = cls_reg_targets + if self.sampling: + num_total_samples = num_total_pos + num_total_neg + else: + # 200 is hard-coded average factor, + # which follows guided anchoring. + num_total_samples = sum([label.numel() + for label in labels_list]) / 200.0 + + # change per image, per level anchor_list to per_level, per_image + mlvl_anchor_list = list(zip(*anchor_list)) + # concat mlvl_anchor_list + mlvl_anchor_list = [ + torch.cat(anchors, dim=0) for anchors in mlvl_anchor_list + ] + + losses = multi_apply( + self.loss_single, + cls_scores, + bbox_preds, + mlvl_anchor_list, + labels_list, + label_weights_list, + bbox_targets_list, + bbox_weights_list, + num_total_samples=num_total_samples) + if self.with_cls: + return dict(loss_rpn_cls=losses[0], loss_rpn_reg=losses[1]) + return dict(loss_rpn_reg=losses[1]) + + def get_bboxes(self, + anchor_list, + cls_scores, + bbox_preds, + img_metas, + cfg, + rescale=False): + """Get proposal predict.""" + assert len(cls_scores) == len(bbox_preds) + num_levels = len(cls_scores) + + result_list = [] + for img_id in range(len(img_metas)): + cls_score_list = [ + cls_scores[i][img_id].detach() for i in range(num_levels) + ] + bbox_pred_list = [ + bbox_preds[i][img_id].detach() for i in range(num_levels) + ] + img_shape = img_metas[img_id]['img_shape'] + scale_factor = img_metas[img_id]['scale_factor'] + proposals = self._get_bboxes_single(cls_score_list, bbox_pred_list, + anchor_list[img_id], img_shape, + scale_factor, cfg, rescale) + result_list.append(proposals) + return result_list + + def refine_bboxes(self, anchor_list, bbox_preds, img_metas): + """Refine bboxes through stages.""" + num_levels = len(bbox_preds) + new_anchor_list = [] + for img_id in range(len(img_metas)): + mlvl_anchors = [] + for i in range(num_levels): + bbox_pred = bbox_preds[i][img_id].detach() + bbox_pred = bbox_pred.permute(1, 2, 0).reshape(-1, 4) + img_shape = img_metas[img_id]['img_shape'] + bboxes = self.bbox_coder.decode(anchor_list[img_id][i], + bbox_pred, img_shape) + mlvl_anchors.append(bboxes) + new_anchor_list.append(mlvl_anchors) + return new_anchor_list + + # TODO: temporary plan + def _get_bboxes_single(self, + cls_scores, + bbox_preds, + mlvl_anchors, + img_shape, + scale_factor, + cfg, + rescale=False): + """Transform outputs for a single batch item into bbox predictions. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (num_anchors * num_classes, H, W). + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (num_anchors * 4, H, W). + mlvl_anchors (list[Tensor]): Box reference for each scale level + with shape (num_total_anchors, 4). + img_shape (tuple[int]): Shape of the input image, + (height, width, 3). + scale_factor (ndarray): Scale factor of the image arange as + (w_scale, h_scale, w_scale, h_scale). + cfg (mmcv.Config): Test / postprocessing configuration, + if None, test_cfg would be used. + rescale (bool): If True, return boxes in original image space. + + Returns: + Tensor: Labeled boxes have the shape of (n,5), where the + first 4 columns are bounding box positions + (tl_x, tl_y, br_x, br_y) and the 5-th column is a score + between 0 and 1. + """ + cfg = self.test_cfg if cfg is None else cfg + cfg = copy.deepcopy(cfg) + # bboxes from different level should be independent during NMS, + # level_ids are used as labels for batched NMS to separate them + level_ids = [] + mlvl_scores = [] + mlvl_bbox_preds = [] + mlvl_valid_anchors = [] + for idx in range(len(cls_scores)): + rpn_cls_score = cls_scores[idx] + rpn_bbox_pred = bbox_preds[idx] + assert rpn_cls_score.size()[-2:] == rpn_bbox_pred.size()[-2:] + rpn_cls_score = rpn_cls_score.permute(1, 2, 0) + if self.use_sigmoid_cls: + rpn_cls_score = rpn_cls_score.reshape(-1) + scores = rpn_cls_score.sigmoid() + else: + rpn_cls_score = rpn_cls_score.reshape(-1, 2) + # We set FG labels to [0, num_class-1] and BG label to + # num_class in RPN head since mmdet v2.5, which is unified to + # be consistent with other head since mmdet v2.0. In mmdet v2.0 + # to v2.4 we keep BG label as 0 and FG label as 1 in rpn head. + scores = rpn_cls_score.softmax(dim=1)[:, 0] + rpn_bbox_pred = rpn_bbox_pred.permute(1, 2, 0).reshape(-1, 4) + anchors = mlvl_anchors[idx] + if cfg.nms_pre > 0 and scores.shape[0] > cfg.nms_pre: + # sort is faster than topk + # _, topk_inds = scores.topk(cfg.nms_pre) + if torch.onnx.is_in_onnx_export(): + # sort op will be converted to TopK in onnx + # and k<=3480 in TensorRT + _, topk_inds = scores.topk(cfg.nms_pre) + scores = scores[topk_inds] + else: + ranked_scores, rank_inds = scores.sort(descending=True) + topk_inds = rank_inds[:cfg.nms_pre] + scores = ranked_scores[:cfg.nms_pre] + rpn_bbox_pred = rpn_bbox_pred[topk_inds, :] + anchors = anchors[topk_inds, :] + mlvl_scores.append(scores) + mlvl_bbox_preds.append(rpn_bbox_pred) + mlvl_valid_anchors.append(anchors) + level_ids.append( + scores.new_full((scores.size(0), ), idx, dtype=torch.long)) + + scores = torch.cat(mlvl_scores) + anchors = torch.cat(mlvl_valid_anchors) + rpn_bbox_pred = torch.cat(mlvl_bbox_preds) + proposals = self.bbox_coder.decode( + anchors, rpn_bbox_pred, max_shape=img_shape) + ids = torch.cat(level_ids) + + # Skip nonzero op while exporting to ONNX + if cfg.min_bbox_size >= 0 and (not torch.onnx.is_in_onnx_export()): + w = proposals[:, 2] - proposals[:, 0] + h = proposals[:, 3] - proposals[:, 1] + valid_inds = torch.nonzero( + (w > cfg.min_bbox_size) + & (h > cfg.min_bbox_size), + as_tuple=False).squeeze() + if valid_inds.sum().item() != len(proposals): + proposals = proposals[valid_inds, :] + scores = scores[valid_inds] + ids = ids[valid_inds] + + # deprecate arguments warning + if 'nms' not in cfg or 'max_num' in cfg or 'nms_thr' in cfg: + warnings.warn( + 'In rpn_proposal or test_cfg, ' + 'nms_thr has been moved to a dict named nms as ' + 'iou_threshold, max_num has been renamed as max_per_img, ' + 'name of original arguments and the way to specify ' + 'iou_threshold of NMS will be deprecated.') + if 'nms' not in cfg: + cfg.nms = ConfigDict(dict(type='nms', iou_threshold=cfg.nms_thr)) + if 'max_num' in cfg: + if 'max_per_img' in cfg: + assert cfg.max_num == cfg.max_per_img, f'You ' \ + f'set max_num and ' \ + f'max_per_img at the same time, but get {cfg.max_num} ' \ + f'and {cfg.max_per_img} respectively' \ + 'Please delete max_num which will be deprecated.' + else: + cfg.max_per_img = cfg.max_num + if 'nms_thr' in cfg: + assert cfg.nms.iou_threshold == cfg.nms_thr, f'You set' \ + f' iou_threshold in nms and ' \ + f'nms_thr at the same time, but get' \ + f' {cfg.nms.iou_threshold} and {cfg.nms_thr}' \ + f' respectively. Please delete the nms_thr ' \ + f'which will be deprecated.' + + dets, keep = batched_nms(proposals, scores, ids, cfg.nms) + return dets[:cfg.max_per_img] + + +@HEADS.register_module() +class CascadeRPNHead(BaseDenseHead): + """The CascadeRPNHead will predict more accurate region proposals, which is + required for two-stage detectors (such as Fast/Faster R-CNN). CascadeRPN + consists of a sequence of RPNStage to progressively improve the accuracy of + the detected proposals. + + More details can be found in ``https://arxiv.org/abs/1909.06720``. + + Args: + num_stages (int): number of CascadeRPN stages. + stages (list[dict]): list of configs to build the stages. + train_cfg (list[dict]): list of configs at training time each stage. + test_cfg (dict): config at testing time. + """ + + def __init__(self, num_stages, stages, train_cfg, test_cfg, init_cfg=None): + super(CascadeRPNHead, self).__init__(init_cfg) + assert num_stages == len(stages) + self.num_stages = num_stages + # Be careful! Pretrained weights cannot be loaded when use + # nn.ModuleList + self.stages = ModuleList() + for i in range(len(stages)): + train_cfg_i = train_cfg[i] if train_cfg is not None else None + stages[i].update(train_cfg=train_cfg_i) + stages[i].update(test_cfg=test_cfg) + self.stages.append(build_head(stages[i])) + self.train_cfg = train_cfg + self.test_cfg = test_cfg + + def loss(self): + """loss() is implemented in StageCascadeRPNHead.""" + pass + + def get_bboxes(self): + """get_bboxes() is implemented in StageCascadeRPNHead.""" + pass + + def forward_train(self, + x, + img_metas, + gt_bboxes, + gt_labels=None, + gt_bboxes_ignore=None, + proposal_cfg=None): + """Forward train function.""" + assert gt_labels is None, 'RPN does not require gt_labels' + + featmap_sizes = [featmap.size()[-2:] for featmap in x] + device = x[0].device + anchor_list, valid_flag_list = self.stages[0].get_anchors( + featmap_sizes, img_metas, device=device) + + losses = dict() + + for i in range(self.num_stages): + stage = self.stages[i] + + if stage.adapt_cfg['type'] == 'offset': + offset_list = stage.anchor_offset(anchor_list, + stage.anchor_strides, + featmap_sizes) + else: + offset_list = None + x, cls_score, bbox_pred = stage(x, offset_list) + rpn_loss_inputs = (anchor_list, valid_flag_list, cls_score, + bbox_pred, gt_bboxes, img_metas) + stage_loss = stage.loss(*rpn_loss_inputs) + for name, value in stage_loss.items(): + losses['s{}.{}'.format(i, name)] = value + + # refine boxes + if i < self.num_stages - 1: + anchor_list = stage.refine_bboxes(anchor_list, bbox_pred, + img_metas) + if proposal_cfg is None: + return losses + else: + proposal_list = self.stages[-1].get_bboxes(anchor_list, cls_score, + bbox_pred, img_metas, + self.test_cfg) + return losses, proposal_list + + def simple_test_rpn(self, x, img_metas): + """Simple forward test function.""" + featmap_sizes = [featmap.size()[-2:] for featmap in x] + device = x[0].device + anchor_list, _ = self.stages[0].get_anchors( + featmap_sizes, img_metas, device=device) + + for i in range(self.num_stages): + stage = self.stages[i] + if stage.adapt_cfg['type'] == 'offset': + offset_list = stage.anchor_offset(anchor_list, + stage.anchor_strides, + featmap_sizes) + else: + offset_list = None + x, cls_score, bbox_pred = stage(x, offset_list) + if i < self.num_stages - 1: + anchor_list = stage.refine_bboxes(anchor_list, bbox_pred, + img_metas) + + proposal_list = self.stages[-1].get_bboxes(anchor_list, cls_score, + bbox_pred, img_metas, + self.test_cfg) + return proposal_list + + def aug_test_rpn(self, x, img_metas): + """Augmented forward test function.""" + raise NotImplementedError( + 'CascadeRPNHead does not support test-time augmentation') diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/centernet_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/centernet_head.py new file mode 100644 index 0000000000000000000000000000000000000000..8fb07f16c339ae28939a4f69592c7a4161a2f965 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/centernet_head.py @@ -0,0 +1,380 @@ +import torch +import torch.nn as nn +from mmcv.cnn import bias_init_with_prob, normal_init +from mmcv.ops import batched_nms +from mmcv.runner import force_fp32 + +from mmdet.core import multi_apply +from mmdet.models import HEADS, build_loss +from mmdet.models.utils import gaussian_radius, gen_gaussian_target +from ..utils.gaussian_target import (get_local_maximum, get_topk_from_heatmap, + transpose_and_gather_feat) +from .base_dense_head import BaseDenseHead +from .dense_test_mixins import BBoxTestMixin + + +@HEADS.register_module() +class CenterNetHead(BaseDenseHead, BBoxTestMixin): + """Objects as Points Head. CenterHead use center_point to indicate object's + position. Paper link + + Args: + in_channel (int): Number of channel in the input feature map. + feat_channel (int): Number of channel in the intermediate feature map. + num_classes (int): Number of categories excluding the background + category. + loss_center_heatmap (dict | None): Config of center heatmap loss. + Default: GaussianFocalLoss. + loss_wh (dict | None): Config of wh loss. Default: L1Loss. + loss_offset (dict | None): Config of offset loss. Default: L1Loss. + train_cfg (dict | None): Training config. Useless in CenterNet, + but we keep this variable for SingleStageDetector. Default: None. + test_cfg (dict | None): Testing config of CenterNet. Default: None. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + in_channel, + feat_channel, + num_classes, + loss_center_heatmap=dict( + type='GaussianFocalLoss', loss_weight=1.0), + loss_wh=dict(type='L1Loss', loss_weight=0.1), + loss_offset=dict(type='L1Loss', loss_weight=1.0), + train_cfg=None, + test_cfg=None, + init_cfg=None): + super(CenterNetHead, self).__init__(init_cfg) + self.num_classes = num_classes + self.heatmap_head = self._build_head(in_channel, feat_channel, + num_classes) + self.wh_head = self._build_head(in_channel, feat_channel, 2) + self.offset_head = self._build_head(in_channel, feat_channel, 2) + + self.loss_center_heatmap = build_loss(loss_center_heatmap) + self.loss_wh = build_loss(loss_wh) + self.loss_offset = build_loss(loss_offset) + + self.train_cfg = train_cfg + self.test_cfg = test_cfg + self.fp16_enabled = False + + def _build_head(self, in_channel, feat_channel, out_channel): + """Build head for each branch.""" + layer = nn.Sequential( + nn.Conv2d(in_channel, feat_channel, kernel_size=3, padding=1), + nn.ReLU(inplace=True), + nn.Conv2d(feat_channel, out_channel, kernel_size=1)) + return layer + + def init_weights(self): + """Initialize weights of the head.""" + bias_init = bias_init_with_prob(0.1) + self.heatmap_head[-1].bias.data.fill_(bias_init) + for head in [self.wh_head, self.offset_head]: + for m in head.modules(): + if isinstance(m, nn.Conv2d): + normal_init(m, std=0.001) + + def forward(self, feats): + """Forward features. Notice CenterNet head does not use FPN. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + + Returns: + center_heatmap_preds (List[Tensor]): center predict heatmaps for + all levels, the channels number is num_classes. + wh_preds (List[Tensor]): wh predicts for all levels, the channels + number is 2. + offset_preds (List[Tensor]): offset predicts for all levels, the + channels number is 2. + """ + return multi_apply(self.forward_single, feats) + + def forward_single(self, feat): + """Forward feature of a single level. + + Args: + feat (Tensor): Feature of a single level. + + Returns: + center_heatmap_pred (Tensor): center predict heatmaps, the + channels number is num_classes. + wh_pred (Tensor): wh predicts, the channels number is 2. + offset_pred (Tensor): offset predicts, the channels number is 2. + """ + center_heatmap_pred = self.heatmap_head(feat).sigmoid() + wh_pred = self.wh_head(feat) + offset_pred = self.offset_head(feat) + return center_heatmap_pred, wh_pred, offset_pred + + @force_fp32(apply_to=('center_heatmap_preds', 'wh_preds', 'offset_preds')) + def loss(self, + center_heatmap_preds, + wh_preds, + offset_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + center_heatmap_preds (list[Tensor]): center predict heatmaps for + all levels with shape (B, num_classes, H, W). + wh_preds (list[Tensor]): wh predicts for all levels with + shape (B, 2, H, W). + offset_preds (list[Tensor]): offset predicts for all levels + with shape (B, 2, H, W). + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box. + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. Default: None + + Returns: + dict[str, Tensor]: which has components below: + - loss_center_heatmap (Tensor): loss of center heatmap. + - loss_wh (Tensor): loss of hw heatmap + - loss_offset (Tensor): loss of offset heatmap. + """ + assert len(center_heatmap_preds) == len(wh_preds) == len( + offset_preds) == 1 + center_heatmap_pred = center_heatmap_preds[0] + wh_pred = wh_preds[0] + offset_pred = offset_preds[0] + + target_result, avg_factor = self.get_targets(gt_bboxes, gt_labels, + center_heatmap_pred.shape, + img_metas[0]['pad_shape']) + + center_heatmap_target = target_result['center_heatmap_target'] + wh_target = target_result['wh_target'] + offset_target = target_result['offset_target'] + wh_offset_target_weight = target_result['wh_offset_target_weight'] + + # Since the channel of wh_target and offset_target is 2, the avg_factor + # of loss_center_heatmap is always 1/2 of loss_wh and loss_offset. + loss_center_heatmap = self.loss_center_heatmap( + center_heatmap_pred, center_heatmap_target, avg_factor=avg_factor) + loss_wh = self.loss_wh( + wh_pred, + wh_target, + wh_offset_target_weight, + avg_factor=avg_factor * 2) + loss_offset = self.loss_offset( + offset_pred, + offset_target, + wh_offset_target_weight, + avg_factor=avg_factor * 2) + return dict( + loss_center_heatmap=loss_center_heatmap, + loss_wh=loss_wh, + loss_offset=loss_offset) + + def get_targets(self, gt_bboxes, gt_labels, feat_shape, img_shape): + """Compute regression and classification targets in multiple images. + + Args: + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box. + feat_shape (list[int]): feature map shape with value [B, _, H, W] + img_shape (list[int]): image shape in [h, w] format. + + Returns: + tuple[dict,float]: The float value is mean avg_factor, the dict has + components below: + - center_heatmap_target (Tensor): targets of center heatmap, \ + shape (B, num_classes, H, W). + - wh_target (Tensor): targets of wh predict, shape \ + (B, 2, H, W). + - offset_target (Tensor): targets of offset predict, shape \ + (B, 2, H, W). + - wh_offset_target_weight (Tensor): weights of wh and offset \ + predict, shape (B, 2, H, W). + """ + img_h, img_w = img_shape[:2] + bs, _, feat_h, feat_w = feat_shape + + width_ratio = float(feat_w / img_w) + height_ratio = float(feat_h / img_h) + + center_heatmap_target = gt_bboxes[-1].new_zeros( + [bs, self.num_classes, feat_h, feat_w]) + wh_target = gt_bboxes[-1].new_zeros([bs, 2, feat_h, feat_w]) + offset_target = gt_bboxes[-1].new_zeros([bs, 2, feat_h, feat_w]) + wh_offset_target_weight = gt_bboxes[-1].new_zeros( + [bs, 2, feat_h, feat_w]) + + for batch_id in range(bs): + gt_bbox = gt_bboxes[batch_id] + gt_label = gt_labels[batch_id] + center_x = (gt_bbox[:, [0]] + gt_bbox[:, [2]]) * width_ratio / 2 + center_y = (gt_bbox[:, [1]] + gt_bbox[:, [3]]) * height_ratio / 2 + gt_centers = torch.cat((center_x, center_y), dim=1) + + for j, ct in enumerate(gt_centers): + ctx_int, cty_int = ct.int() + ctx, cty = ct + scale_box_h = (gt_bbox[j][3] - gt_bbox[j][1]) * height_ratio + scale_box_w = (gt_bbox[j][2] - gt_bbox[j][0]) * width_ratio + radius = gaussian_radius([scale_box_h, scale_box_w], + min_overlap=0.3) + radius = max(0, int(radius)) + ind = gt_label[j] + gen_gaussian_target(center_heatmap_target[batch_id, ind], + [ctx_int, cty_int], radius) + + wh_target[batch_id, 0, cty_int, ctx_int] = scale_box_w + wh_target[batch_id, 1, cty_int, ctx_int] = scale_box_h + + offset_target[batch_id, 0, cty_int, ctx_int] = ctx - ctx_int + offset_target[batch_id, 1, cty_int, ctx_int] = cty - cty_int + + wh_offset_target_weight[batch_id, :, cty_int, ctx_int] = 1 + + avg_factor = max(1, center_heatmap_target.eq(1).sum()) + target_result = dict( + center_heatmap_target=center_heatmap_target, + wh_target=wh_target, + offset_target=offset_target, + wh_offset_target_weight=wh_offset_target_weight) + return target_result, avg_factor + + def get_bboxes(self, + center_heatmap_preds, + wh_preds, + offset_preds, + img_metas, + rescale=True, + with_nms=False): + """Transform network output for a batch into bbox predictions. + + Args: + center_heatmap_preds (list[Tensor]): center predict heatmaps for + all levels with shape (B, num_classes, H, W). + wh_preds (list[Tensor]): wh predicts for all levels with + shape (B, 2, H, W). + offset_preds (list[Tensor]): offset predicts for all levels + with shape (B, 2, H, W). + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + rescale (bool): If True, return boxes in original image space. + Default: True. + with_nms (bool): If True, do nms before return boxes. + Default: False. + + Returns: + list[tuple[Tensor, Tensor]]: Each item in result_list is 2-tuple. + The first item is an (n, 5) tensor, where 5 represent + (tl_x, tl_y, br_x, br_y, score) and the score between 0 and 1. + The shape of the second tensor in the tuple is (n,), and + each element represents the class label of the corresponding + box. + """ + assert len(center_heatmap_preds) == len(wh_preds) == len( + offset_preds) == 1 + scale_factors = [img_meta['scale_factor'] for img_meta in img_metas] + border_pixs = [img_meta['border'] for img_meta in img_metas] + + batch_det_bboxes, batch_labels = self.decode_heatmap( + center_heatmap_preds[0], + wh_preds[0], + offset_preds[0], + img_metas[0]['batch_input_shape'], + k=self.test_cfg.topk, + kernel=self.test_cfg.local_maximum_kernel) + + batch_border = batch_det_bboxes.new_tensor( + border_pixs)[:, [2, 0, 2, 0]].unsqueeze(1) + batch_det_bboxes[..., :4] -= batch_border + + if rescale: + batch_det_bboxes[..., :4] /= batch_det_bboxes.new_tensor( + scale_factors).unsqueeze(1) + + if with_nms: + det_results = [] + for (det_bboxes, det_labels) in zip(batch_det_bboxes, + batch_labels): + det_bbox, det_label = self._bboxes_nms(det_bboxes, det_labels, + self.test_cfg) + det_results.append(tuple([det_bbox, det_label])) + else: + det_results = [ + tuple(bs) for bs in zip(batch_det_bboxes, batch_labels) + ] + return det_results + + def decode_heatmap(self, + center_heatmap_pred, + wh_pred, + offset_pred, + img_shape, + k=100, + kernel=3): + """Transform outputs into detections raw bbox prediction. + + Args: + center_heatmap_pred (Tensor): center predict heatmap, + shape (B, num_classes, H, W). + wh_pred (Tensor): wh predict, shape (B, 2, H, W). + offset_pred (Tensor): offset predict, shape (B, 2, H, W). + img_shape (list[int]): image shape in [h, w] format. + k (int): Get top k center keypoints from heatmap. Default 100. + kernel (int): Max pooling kernel for extract local maximum pixels. + Default 3. + + Returns: + tuple[torch.Tensor]: Decoded output of CenterNetHead, containing + the following Tensors: + + - batch_bboxes (Tensor): Coords of each box with shape (B, k, 5) + - batch_topk_labels (Tensor): Categories of each box with \ + shape (B, k) + """ + height, width = center_heatmap_pred.shape[2:] + inp_h, inp_w = img_shape + + center_heatmap_pred = get_local_maximum( + center_heatmap_pred, kernel=kernel) + + *batch_dets, topk_ys, topk_xs = get_topk_from_heatmap( + center_heatmap_pred, k=k) + batch_scores, batch_index, batch_topk_labels = batch_dets + + wh = transpose_and_gather_feat(wh_pred, batch_index) + offset = transpose_and_gather_feat(offset_pred, batch_index) + topk_xs = topk_xs + offset[..., 0] + topk_ys = topk_ys + offset[..., 1] + tl_x = (topk_xs - wh[..., 0] / 2) * (inp_w / width) + tl_y = (topk_ys - wh[..., 1] / 2) * (inp_h / height) + br_x = (topk_xs + wh[..., 0] / 2) * (inp_w / width) + br_y = (topk_ys + wh[..., 1] / 2) * (inp_h / height) + + batch_bboxes = torch.stack([tl_x, tl_y, br_x, br_y], dim=2) + batch_bboxes = torch.cat((batch_bboxes, batch_scores[..., None]), + dim=-1) + return batch_bboxes, batch_topk_labels + + def _bboxes_nms(self, bboxes, labels, cfg): + if labels.numel() == 0: + return bboxes, labels + + out_bboxes, keep = batched_nms(bboxes[:, :4], bboxes[:, -1], labels, + cfg.nms_cfg) + out_labels = labels[keep] + + if len(out_bboxes) > 0: + idx = torch.argsort(out_bboxes[:, -1], descending=True) + idx = idx[:cfg.max_per_img] + out_bboxes = out_bboxes[idx] + out_labels = out_labels[idx] + + return out_bboxes, out_labels diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/centripetal_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/centripetal_head.py new file mode 100644 index 0000000000000000000000000000000000000000..a9d3ddf5bee1a8b42cedb02d4fcd36cc212e42b6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/centripetal_head.py @@ -0,0 +1,426 @@ +import torch.nn as nn +from mmcv.cnn import ConvModule, normal_init +from mmcv.ops import DeformConv2d + +from mmdet.core import multi_apply +from ..builder import HEADS, build_loss +from .corner_head import CornerHead + + +@HEADS.register_module() +class CentripetalHead(CornerHead): + """Head of CentripetalNet: Pursuing High-quality Keypoint Pairs for Object + Detection. + + CentripetalHead inherits from :class:`CornerHead`. It removes the + embedding branch and adds guiding shift and centripetal shift branches. + More details can be found in the `paper + `_ . + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + num_feat_levels (int): Levels of feature from the previous module. 2 + for HourglassNet-104 and 1 for HourglassNet-52. HourglassNet-104 + outputs the final feature and intermediate supervision feature and + HourglassNet-52 only outputs the final feature. Default: 2. + corner_emb_channels (int): Channel of embedding vector. Default: 1. + train_cfg (dict | None): Training config. Useless in CornerHead, + but we keep this variable for SingleStageDetector. Default: None. + test_cfg (dict | None): Testing config of CornerHead. Default: None. + loss_heatmap (dict | None): Config of corner heatmap loss. Default: + GaussianFocalLoss. + loss_embedding (dict | None): Config of corner embedding loss. Default: + AssociativeEmbeddingLoss. + loss_offset (dict | None): Config of corner offset loss. Default: + SmoothL1Loss. + loss_guiding_shift (dict): Config of guiding shift loss. Default: + SmoothL1Loss. + loss_centripetal_shift (dict): Config of centripetal shift loss. + Default: SmoothL1Loss. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + *args, + centripetal_shift_channels=2, + guiding_shift_channels=2, + feat_adaption_conv_kernel=3, + loss_guiding_shift=dict( + type='SmoothL1Loss', beta=1.0, loss_weight=0.05), + loss_centripetal_shift=dict( + type='SmoothL1Loss', beta=1.0, loss_weight=1), + init_cfg=None, + **kwargs): + assert init_cfg is None, 'To prevent abnormal initialization ' \ + 'behavior, init_cfg is not allowed to be set' + assert centripetal_shift_channels == 2, ( + 'CentripetalHead only support centripetal_shift_channels == 2') + self.centripetal_shift_channels = centripetal_shift_channels + assert guiding_shift_channels == 2, ( + 'CentripetalHead only support guiding_shift_channels == 2') + self.guiding_shift_channels = guiding_shift_channels + self.feat_adaption_conv_kernel = feat_adaption_conv_kernel + super(CentripetalHead, self).__init__( + *args, init_cfg=init_cfg, **kwargs) + self.loss_guiding_shift = build_loss(loss_guiding_shift) + self.loss_centripetal_shift = build_loss(loss_centripetal_shift) + + def _init_centripetal_layers(self): + """Initialize centripetal layers. + + Including feature adaption deform convs (feat_adaption), deform offset + prediction convs (dcn_off), guiding shift (guiding_shift) and + centripetal shift ( centripetal_shift). Each branch has two parts: + prefix `tl_` for top-left and `br_` for bottom-right. + """ + self.tl_feat_adaption = nn.ModuleList() + self.br_feat_adaption = nn.ModuleList() + self.tl_dcn_offset = nn.ModuleList() + self.br_dcn_offset = nn.ModuleList() + self.tl_guiding_shift = nn.ModuleList() + self.br_guiding_shift = nn.ModuleList() + self.tl_centripetal_shift = nn.ModuleList() + self.br_centripetal_shift = nn.ModuleList() + + for _ in range(self.num_feat_levels): + self.tl_feat_adaption.append( + DeformConv2d(self.in_channels, self.in_channels, + self.feat_adaption_conv_kernel, 1, 1)) + self.br_feat_adaption.append( + DeformConv2d(self.in_channels, self.in_channels, + self.feat_adaption_conv_kernel, 1, 1)) + + self.tl_guiding_shift.append( + self._make_layers( + out_channels=self.guiding_shift_channels, + in_channels=self.in_channels)) + self.br_guiding_shift.append( + self._make_layers( + out_channels=self.guiding_shift_channels, + in_channels=self.in_channels)) + + self.tl_dcn_offset.append( + ConvModule( + self.guiding_shift_channels, + self.feat_adaption_conv_kernel**2 * + self.guiding_shift_channels, + 1, + bias=False, + act_cfg=None)) + self.br_dcn_offset.append( + ConvModule( + self.guiding_shift_channels, + self.feat_adaption_conv_kernel**2 * + self.guiding_shift_channels, + 1, + bias=False, + act_cfg=None)) + + self.tl_centripetal_shift.append( + self._make_layers( + out_channels=self.centripetal_shift_channels, + in_channels=self.in_channels)) + self.br_centripetal_shift.append( + self._make_layers( + out_channels=self.centripetal_shift_channels, + in_channels=self.in_channels)) + + def _init_layers(self): + """Initialize layers for CentripetalHead. + + Including two parts: CornerHead layers and CentripetalHead layers + """ + super()._init_layers() # using _init_layers in CornerHead + self._init_centripetal_layers() + + def init_weights(self): + super(CentripetalHead, self).init_weights() + for i in range(self.num_feat_levels): + normal_init(self.tl_feat_adaption[i], std=0.01) + normal_init(self.br_feat_adaption[i], std=0.01) + normal_init(self.tl_dcn_offset[i].conv, std=0.1) + normal_init(self.br_dcn_offset[i].conv, std=0.1) + _ = [x.conv.reset_parameters() for x in self.tl_guiding_shift[i]] + _ = [x.conv.reset_parameters() for x in self.br_guiding_shift[i]] + _ = [ + x.conv.reset_parameters() for x in self.tl_centripetal_shift[i] + ] + _ = [ + x.conv.reset_parameters() for x in self.br_centripetal_shift[i] + ] + + def forward_single(self, x, lvl_ind): + """Forward feature of a single level. + + Args: + x (Tensor): Feature of a single level. + lvl_ind (int): Level index of current feature. + + Returns: + tuple[Tensor]: A tuple of CentripetalHead's output for current + feature level. Containing the following Tensors: + + - tl_heat (Tensor): Predicted top-left corner heatmap. + - br_heat (Tensor): Predicted bottom-right corner heatmap. + - tl_off (Tensor): Predicted top-left offset heatmap. + - br_off (Tensor): Predicted bottom-right offset heatmap. + - tl_guiding_shift (Tensor): Predicted top-left guiding shift + heatmap. + - br_guiding_shift (Tensor): Predicted bottom-right guiding + shift heatmap. + - tl_centripetal_shift (Tensor): Predicted top-left centripetal + shift heatmap. + - br_centripetal_shift (Tensor): Predicted bottom-right + centripetal shift heatmap. + """ + tl_heat, br_heat, _, _, tl_off, br_off, tl_pool, br_pool = super( + ).forward_single( + x, lvl_ind, return_pool=True) + + tl_guiding_shift = self.tl_guiding_shift[lvl_ind](tl_pool) + br_guiding_shift = self.br_guiding_shift[lvl_ind](br_pool) + + tl_dcn_offset = self.tl_dcn_offset[lvl_ind](tl_guiding_shift.detach()) + br_dcn_offset = self.br_dcn_offset[lvl_ind](br_guiding_shift.detach()) + + tl_feat_adaption = self.tl_feat_adaption[lvl_ind](tl_pool, + tl_dcn_offset) + br_feat_adaption = self.br_feat_adaption[lvl_ind](br_pool, + br_dcn_offset) + + tl_centripetal_shift = self.tl_centripetal_shift[lvl_ind]( + tl_feat_adaption) + br_centripetal_shift = self.br_centripetal_shift[lvl_ind]( + br_feat_adaption) + + result_list = [ + tl_heat, br_heat, tl_off, br_off, tl_guiding_shift, + br_guiding_shift, tl_centripetal_shift, br_centripetal_shift + ] + return result_list + + def loss(self, + tl_heats, + br_heats, + tl_offs, + br_offs, + tl_guiding_shifts, + br_guiding_shifts, + tl_centripetal_shifts, + br_centripetal_shifts, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + tl_heats (list[Tensor]): Top-left corner heatmaps for each level + with shape (N, num_classes, H, W). + br_heats (list[Tensor]): Bottom-right corner heatmaps for each + level with shape (N, num_classes, H, W). + tl_offs (list[Tensor]): Top-left corner offsets for each level + with shape (N, corner_offset_channels, H, W). + br_offs (list[Tensor]): Bottom-right corner offsets for each level + with shape (N, corner_offset_channels, H, W). + tl_guiding_shifts (list[Tensor]): Top-left guiding shifts for each + level with shape (N, guiding_shift_channels, H, W). + br_guiding_shifts (list[Tensor]): Bottom-right guiding shifts for + each level with shape (N, guiding_shift_channels, H, W). + tl_centripetal_shifts (list[Tensor]): Top-left centripetal shifts + for each level with shape (N, centripetal_shift_channels, H, + W). + br_centripetal_shifts (list[Tensor]): Bottom-right centripetal + shifts for each level with shape (N, + centripetal_shift_channels, H, W). + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [left, top, right, bottom] format. + gt_labels (list[Tensor]): Class indices corresponding to each box. + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (list[Tensor] | None): Specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. Containing the + following losses: + + - det_loss (list[Tensor]): Corner keypoint losses of all + feature levels. + - off_loss (list[Tensor]): Corner offset losses of all feature + levels. + - guiding_loss (list[Tensor]): Guiding shift losses of all + feature levels. + - centripetal_loss (list[Tensor]): Centripetal shift losses of + all feature levels. + """ + targets = self.get_targets( + gt_bboxes, + gt_labels, + tl_heats[-1].shape, + img_metas[0]['pad_shape'], + with_corner_emb=self.with_corner_emb, + with_guiding_shift=True, + with_centripetal_shift=True) + mlvl_targets = [targets for _ in range(self.num_feat_levels)] + [det_losses, off_losses, guiding_losses, centripetal_losses + ] = multi_apply(self.loss_single, tl_heats, br_heats, tl_offs, + br_offs, tl_guiding_shifts, br_guiding_shifts, + tl_centripetal_shifts, br_centripetal_shifts, + mlvl_targets) + loss_dict = dict( + det_loss=det_losses, + off_loss=off_losses, + guiding_loss=guiding_losses, + centripetal_loss=centripetal_losses) + return loss_dict + + def loss_single(self, tl_hmp, br_hmp, tl_off, br_off, tl_guiding_shift, + br_guiding_shift, tl_centripetal_shift, + br_centripetal_shift, targets): + """Compute losses for single level. + + Args: + tl_hmp (Tensor): Top-left corner heatmap for current level with + shape (N, num_classes, H, W). + br_hmp (Tensor): Bottom-right corner heatmap for current level with + shape (N, num_classes, H, W). + tl_off (Tensor): Top-left corner offset for current level with + shape (N, corner_offset_channels, H, W). + br_off (Tensor): Bottom-right corner offset for current level with + shape (N, corner_offset_channels, H, W). + tl_guiding_shift (Tensor): Top-left guiding shift for current level + with shape (N, guiding_shift_channels, H, W). + br_guiding_shift (Tensor): Bottom-right guiding shift for current + level with shape (N, guiding_shift_channels, H, W). + tl_centripetal_shift (Tensor): Top-left centripetal shift for + current level with shape (N, centripetal_shift_channels, H, W). + br_centripetal_shift (Tensor): Bottom-right centripetal shift for + current level with shape (N, centripetal_shift_channels, H, W). + targets (dict): Corner target generated by `get_targets`. + + Returns: + tuple[torch.Tensor]: Losses of the head's differnet branches + containing the following losses: + + - det_loss (Tensor): Corner keypoint loss. + - off_loss (Tensor): Corner offset loss. + - guiding_loss (Tensor): Guiding shift loss. + - centripetal_loss (Tensor): Centripetal shift loss. + """ + targets['corner_embedding'] = None + + det_loss, _, _, off_loss = super().loss_single(tl_hmp, br_hmp, None, + None, tl_off, br_off, + targets) + + gt_tl_guiding_shift = targets['topleft_guiding_shift'] + gt_br_guiding_shift = targets['bottomright_guiding_shift'] + gt_tl_centripetal_shift = targets['topleft_centripetal_shift'] + gt_br_centripetal_shift = targets['bottomright_centripetal_shift'] + + gt_tl_heatmap = targets['topleft_heatmap'] + gt_br_heatmap = targets['bottomright_heatmap'] + # We only compute the offset loss at the real corner position. + # The value of real corner would be 1 in heatmap ground truth. + # The mask is computed in class agnostic mode and its shape is + # batch * 1 * width * height. + tl_mask = gt_tl_heatmap.eq(1).sum(1).gt(0).unsqueeze(1).type_as( + gt_tl_heatmap) + br_mask = gt_br_heatmap.eq(1).sum(1).gt(0).unsqueeze(1).type_as( + gt_br_heatmap) + + # Guiding shift loss + tl_guiding_loss = self.loss_guiding_shift( + tl_guiding_shift, + gt_tl_guiding_shift, + tl_mask, + avg_factor=tl_mask.sum()) + br_guiding_loss = self.loss_guiding_shift( + br_guiding_shift, + gt_br_guiding_shift, + br_mask, + avg_factor=br_mask.sum()) + guiding_loss = (tl_guiding_loss + br_guiding_loss) / 2.0 + # Centripetal shift loss + tl_centripetal_loss = self.loss_centripetal_shift( + tl_centripetal_shift, + gt_tl_centripetal_shift, + tl_mask, + avg_factor=tl_mask.sum()) + br_centripetal_loss = self.loss_centripetal_shift( + br_centripetal_shift, + gt_br_centripetal_shift, + br_mask, + avg_factor=br_mask.sum()) + centripetal_loss = (tl_centripetal_loss + br_centripetal_loss) / 2.0 + + return det_loss, off_loss, guiding_loss, centripetal_loss + + def get_bboxes(self, + tl_heats, + br_heats, + tl_offs, + br_offs, + tl_guiding_shifts, + br_guiding_shifts, + tl_centripetal_shifts, + br_centripetal_shifts, + img_metas, + rescale=False, + with_nms=True): + """Transform network output for a batch into bbox predictions. + + Args: + tl_heats (list[Tensor]): Top-left corner heatmaps for each level + with shape (N, num_classes, H, W). + br_heats (list[Tensor]): Bottom-right corner heatmaps for each + level with shape (N, num_classes, H, W). + tl_offs (list[Tensor]): Top-left corner offsets for each level + with shape (N, corner_offset_channels, H, W). + br_offs (list[Tensor]): Bottom-right corner offsets for each level + with shape (N, corner_offset_channels, H, W). + tl_guiding_shifts (list[Tensor]): Top-left guiding shifts for each + level with shape (N, guiding_shift_channels, H, W). Useless in + this function, we keep this arg because it's the raw output + from CentripetalHead. + br_guiding_shifts (list[Tensor]): Bottom-right guiding shifts for + each level with shape (N, guiding_shift_channels, H, W). + Useless in this function, we keep this arg because it's the + raw output from CentripetalHead. + tl_centripetal_shifts (list[Tensor]): Top-left centripetal shifts + for each level with shape (N, centripetal_shift_channels, H, + W). + br_centripetal_shifts (list[Tensor]): Bottom-right centripetal + shifts for each level with shape (N, + centripetal_shift_channels, H, W). + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + """ + assert tl_heats[-1].shape[0] == br_heats[-1].shape[0] == len(img_metas) + result_list = [] + for img_id in range(len(img_metas)): + result_list.append( + self._get_bboxes_single( + tl_heats[-1][img_id:img_id + 1, :], + br_heats[-1][img_id:img_id + 1, :], + tl_offs[-1][img_id:img_id + 1, :], + br_offs[-1][img_id:img_id + 1, :], + img_metas[img_id], + tl_emb=None, + br_emb=None, + tl_centripetal_shift=tl_centripetal_shifts[-1][ + img_id:img_id + 1, :], + br_centripetal_shift=br_centripetal_shifts[-1][ + img_id:img_id + 1, :], + rescale=rescale, + with_nms=with_nms)) + + return result_list diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/corner_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/corner_head.py new file mode 100644 index 0000000000000000000000000000000000000000..634bd4a371e33e79029aa32b955d2eb547ad26f5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/corner_head.py @@ -0,0 +1,1052 @@ +from logging import warning +from math import ceil, log + +import torch +import torch.nn as nn +from mmcv.cnn import ConvModule, bias_init_with_prob +from mmcv.ops import CornerPool, batched_nms +from mmcv.runner import BaseModule + +from mmdet.core import multi_apply +from ..builder import HEADS, build_loss +from ..utils import gaussian_radius, gen_gaussian_target +from ..utils.gaussian_target import (gather_feat, get_local_maximum, + get_topk_from_heatmap, + transpose_and_gather_feat) +from .base_dense_head import BaseDenseHead +from .dense_test_mixins import BBoxTestMixin + + +class BiCornerPool(BaseModule): + """Bidirectional Corner Pooling Module (TopLeft, BottomRight, etc.) + + Args: + in_channels (int): Input channels of module. + out_channels (int): Output channels of module. + feat_channels (int): Feature channels of module. + directions (list[str]): Directions of two CornerPools. + norm_cfg (dict): Dictionary to construct and config norm layer. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + in_channels, + directions, + feat_channels=128, + out_channels=128, + norm_cfg=dict(type='BN', requires_grad=True), + init_cfg=None): + super(BiCornerPool, self).__init__(init_cfg) + self.direction1_conv = ConvModule( + in_channels, feat_channels, 3, padding=1, norm_cfg=norm_cfg) + self.direction2_conv = ConvModule( + in_channels, feat_channels, 3, padding=1, norm_cfg=norm_cfg) + + self.aftpool_conv = ConvModule( + feat_channels, + out_channels, + 3, + padding=1, + norm_cfg=norm_cfg, + act_cfg=None) + + self.conv1 = ConvModule( + in_channels, out_channels, 1, norm_cfg=norm_cfg, act_cfg=None) + self.conv2 = ConvModule( + in_channels, out_channels, 3, padding=1, norm_cfg=norm_cfg) + + self.direction1_pool = CornerPool(directions[0]) + self.direction2_pool = CornerPool(directions[1]) + self.relu = nn.ReLU(inplace=True) + + def forward(self, x): + """Forward features from the upstream network. + + Args: + x (tensor): Input feature of BiCornerPool. + + Returns: + conv2 (tensor): Output feature of BiCornerPool. + """ + direction1_conv = self.direction1_conv(x) + direction2_conv = self.direction2_conv(x) + direction1_feat = self.direction1_pool(direction1_conv) + direction2_feat = self.direction2_pool(direction2_conv) + aftpool_conv = self.aftpool_conv(direction1_feat + direction2_feat) + conv1 = self.conv1(x) + relu = self.relu(aftpool_conv + conv1) + conv2 = self.conv2(relu) + return conv2 + + +@HEADS.register_module() +class CornerHead(BaseDenseHead, BBoxTestMixin): + """Head of CornerNet: Detecting Objects as Paired Keypoints. + + Code is modified from the `official github repo + `_ . + + More details can be found in the `paper + `_ . + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + num_feat_levels (int): Levels of feature from the previous module. 2 + for HourglassNet-104 and 1 for HourglassNet-52. Because + HourglassNet-104 outputs the final feature and intermediate + supervision feature and HourglassNet-52 only outputs the final + feature. Default: 2. + corner_emb_channels (int): Channel of embedding vector. Default: 1. + train_cfg (dict | None): Training config. Useless in CornerHead, + but we keep this variable for SingleStageDetector. Default: None. + test_cfg (dict | None): Testing config of CornerHead. Default: None. + loss_heatmap (dict | None): Config of corner heatmap loss. Default: + GaussianFocalLoss. + loss_embedding (dict | None): Config of corner embedding loss. Default: + AssociativeEmbeddingLoss. + loss_offset (dict | None): Config of corner offset loss. Default: + SmoothL1Loss. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + num_classes, + in_channels, + num_feat_levels=2, + corner_emb_channels=1, + train_cfg=None, + test_cfg=None, + loss_heatmap=dict( + type='GaussianFocalLoss', + alpha=2.0, + gamma=4.0, + loss_weight=1), + loss_embedding=dict( + type='AssociativeEmbeddingLoss', + pull_weight=0.25, + push_weight=0.25), + loss_offset=dict( + type='SmoothL1Loss', beta=1.0, loss_weight=1), + init_cfg=None): + assert init_cfg is None, 'To prevent abnormal initialization ' \ + 'behavior, init_cfg is not allowed to be set' + super(CornerHead, self).__init__(init_cfg) + self.num_classes = num_classes + self.in_channels = in_channels + self.corner_emb_channels = corner_emb_channels + self.with_corner_emb = self.corner_emb_channels > 0 + self.corner_offset_channels = 2 + self.num_feat_levels = num_feat_levels + self.loss_heatmap = build_loss( + loss_heatmap) if loss_heatmap is not None else None + self.loss_embedding = build_loss( + loss_embedding) if loss_embedding is not None else None + self.loss_offset = build_loss( + loss_offset) if loss_offset is not None else None + self.train_cfg = train_cfg + self.test_cfg = test_cfg + + self._init_layers() + + def _make_layers(self, out_channels, in_channels=256, feat_channels=256): + """Initialize conv sequential for CornerHead.""" + return nn.Sequential( + ConvModule(in_channels, feat_channels, 3, padding=1), + ConvModule( + feat_channels, out_channels, 1, norm_cfg=None, act_cfg=None)) + + def _init_corner_kpt_layers(self): + """Initialize corner keypoint layers. + + Including corner heatmap branch and corner offset branch. Each branch + has two parts: prefix `tl_` for top-left and `br_` for bottom-right. + """ + self.tl_pool, self.br_pool = nn.ModuleList(), nn.ModuleList() + self.tl_heat, self.br_heat = nn.ModuleList(), nn.ModuleList() + self.tl_off, self.br_off = nn.ModuleList(), nn.ModuleList() + + for _ in range(self.num_feat_levels): + self.tl_pool.append( + BiCornerPool( + self.in_channels, ['top', 'left'], + out_channels=self.in_channels)) + self.br_pool.append( + BiCornerPool( + self.in_channels, ['bottom', 'right'], + out_channels=self.in_channels)) + + self.tl_heat.append( + self._make_layers( + out_channels=self.num_classes, + in_channels=self.in_channels)) + self.br_heat.append( + self._make_layers( + out_channels=self.num_classes, + in_channels=self.in_channels)) + + self.tl_off.append( + self._make_layers( + out_channels=self.corner_offset_channels, + in_channels=self.in_channels)) + self.br_off.append( + self._make_layers( + out_channels=self.corner_offset_channels, + in_channels=self.in_channels)) + + def _init_corner_emb_layers(self): + """Initialize corner embedding layers. + + Only include corner embedding branch with two parts: prefix `tl_` for + top-left and `br_` for bottom-right. + """ + self.tl_emb, self.br_emb = nn.ModuleList(), nn.ModuleList() + + for _ in range(self.num_feat_levels): + self.tl_emb.append( + self._make_layers( + out_channels=self.corner_emb_channels, + in_channels=self.in_channels)) + self.br_emb.append( + self._make_layers( + out_channels=self.corner_emb_channels, + in_channels=self.in_channels)) + + def _init_layers(self): + """Initialize layers for CornerHead. + + Including two parts: corner keypoint layers and corner embedding layers + """ + self._init_corner_kpt_layers() + if self.with_corner_emb: + self._init_corner_emb_layers() + + def init_weights(self): + super(CornerHead, self).init_weights() + bias_init = bias_init_with_prob(0.1) + for i in range(self.num_feat_levels): + # The initialization of parameters are different between + # nn.Conv2d and ConvModule. Our experiments show that + # using the original initialization of nn.Conv2d increases + # the final mAP by about 0.2% + self.tl_heat[i][-1].conv.reset_parameters() + self.tl_heat[i][-1].conv.bias.data.fill_(bias_init) + self.br_heat[i][-1].conv.reset_parameters() + self.br_heat[i][-1].conv.bias.data.fill_(bias_init) + self.tl_off[i][-1].conv.reset_parameters() + self.br_off[i][-1].conv.reset_parameters() + if self.with_corner_emb: + self.tl_emb[i][-1].conv.reset_parameters() + self.br_emb[i][-1].conv.reset_parameters() + + def forward(self, feats): + """Forward features from the upstream network. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + + Returns: + tuple: Usually a tuple of corner heatmaps, offset heatmaps and + embedding heatmaps. + - tl_heats (list[Tensor]): Top-left corner heatmaps for all + levels, each is a 4D-tensor, the channels number is + num_classes. + - br_heats (list[Tensor]): Bottom-right corner heatmaps for all + levels, each is a 4D-tensor, the channels number is + num_classes. + - tl_embs (list[Tensor] | list[None]): Top-left embedding + heatmaps for all levels, each is a 4D-tensor or None. + If not None, the channels number is corner_emb_channels. + - br_embs (list[Tensor] | list[None]): Bottom-right embedding + heatmaps for all levels, each is a 4D-tensor or None. + If not None, the channels number is corner_emb_channels. + - tl_offs (list[Tensor]): Top-left offset heatmaps for all + levels, each is a 4D-tensor. The channels number is + corner_offset_channels. + - br_offs (list[Tensor]): Bottom-right offset heatmaps for all + levels, each is a 4D-tensor. The channels number is + corner_offset_channels. + """ + lvl_ind = list(range(self.num_feat_levels)) + return multi_apply(self.forward_single, feats, lvl_ind) + + def forward_single(self, x, lvl_ind, return_pool=False): + """Forward feature of a single level. + + Args: + x (Tensor): Feature of a single level. + lvl_ind (int): Level index of current feature. + return_pool (bool): Return corner pool feature or not. + + Returns: + tuple[Tensor]: A tuple of CornerHead's output for current feature + level. Containing the following Tensors: + + - tl_heat (Tensor): Predicted top-left corner heatmap. + - br_heat (Tensor): Predicted bottom-right corner heatmap. + - tl_emb (Tensor | None): Predicted top-left embedding heatmap. + None for `self.with_corner_emb == False`. + - br_emb (Tensor | None): Predicted bottom-right embedding + heatmap. None for `self.with_corner_emb == False`. + - tl_off (Tensor): Predicted top-left offset heatmap. + - br_off (Tensor): Predicted bottom-right offset heatmap. + - tl_pool (Tensor): Top-left corner pool feature. Not must + have. + - br_pool (Tensor): Bottom-right corner pool feature. Not must + have. + """ + tl_pool = self.tl_pool[lvl_ind](x) + tl_heat = self.tl_heat[lvl_ind](tl_pool) + br_pool = self.br_pool[lvl_ind](x) + br_heat = self.br_heat[lvl_ind](br_pool) + + tl_emb, br_emb = None, None + if self.with_corner_emb: + tl_emb = self.tl_emb[lvl_ind](tl_pool) + br_emb = self.br_emb[lvl_ind](br_pool) + + tl_off = self.tl_off[lvl_ind](tl_pool) + br_off = self.br_off[lvl_ind](br_pool) + + result_list = [tl_heat, br_heat, tl_emb, br_emb, tl_off, br_off] + if return_pool: + result_list.append(tl_pool) + result_list.append(br_pool) + + return result_list + + def get_targets(self, + gt_bboxes, + gt_labels, + feat_shape, + img_shape, + with_corner_emb=False, + with_guiding_shift=False, + with_centripetal_shift=False): + """Generate corner targets. + + Including corner heatmap, corner offset. + + Optional: corner embedding, corner guiding shift, centripetal shift. + + For CornerNet, we generate corner heatmap, corner offset and corner + embedding from this function. + + For CentripetalNet, we generate corner heatmap, corner offset, guiding + shift and centripetal shift from this function. + + Args: + gt_bboxes (list[Tensor]): Ground truth bboxes of each image, each + has shape (num_gt, 4). + gt_labels (list[Tensor]): Ground truth labels of each box, each has + shape (num_gt,). + feat_shape (list[int]): Shape of output feature, + [batch, channel, height, width]. + img_shape (list[int]): Shape of input image, + [height, width, channel]. + with_corner_emb (bool): Generate corner embedding target or not. + Default: False. + with_guiding_shift (bool): Generate guiding shift target or not. + Default: False. + with_centripetal_shift (bool): Generate centripetal shift target or + not. Default: False. + + Returns: + dict: Ground truth of corner heatmap, corner offset, corner + embedding, guiding shift and centripetal shift. Containing the + following keys: + + - topleft_heatmap (Tensor): Ground truth top-left corner + heatmap. + - bottomright_heatmap (Tensor): Ground truth bottom-right + corner heatmap. + - topleft_offset (Tensor): Ground truth top-left corner offset. + - bottomright_offset (Tensor): Ground truth bottom-right corner + offset. + - corner_embedding (list[list[list[int]]]): Ground truth corner + embedding. Not must have. + - topleft_guiding_shift (Tensor): Ground truth top-left corner + guiding shift. Not must have. + - bottomright_guiding_shift (Tensor): Ground truth bottom-right + corner guiding shift. Not must have. + - topleft_centripetal_shift (Tensor): Ground truth top-left + corner centripetal shift. Not must have. + - bottomright_centripetal_shift (Tensor): Ground truth + bottom-right corner centripetal shift. Not must have. + """ + batch_size, _, height, width = feat_shape + img_h, img_w = img_shape[:2] + + width_ratio = float(width / img_w) + height_ratio = float(height / img_h) + + gt_tl_heatmap = gt_bboxes[-1].new_zeros( + [batch_size, self.num_classes, height, width]) + gt_br_heatmap = gt_bboxes[-1].new_zeros( + [batch_size, self.num_classes, height, width]) + gt_tl_offset = gt_bboxes[-1].new_zeros([batch_size, 2, height, width]) + gt_br_offset = gt_bboxes[-1].new_zeros([batch_size, 2, height, width]) + + if with_corner_emb: + match = [] + + # Guiding shift is a kind of offset, from center to corner + if with_guiding_shift: + gt_tl_guiding_shift = gt_bboxes[-1].new_zeros( + [batch_size, 2, height, width]) + gt_br_guiding_shift = gt_bboxes[-1].new_zeros( + [batch_size, 2, height, width]) + # Centripetal shift is also a kind of offset, from center to corner + # and normalized by log. + if with_centripetal_shift: + gt_tl_centripetal_shift = gt_bboxes[-1].new_zeros( + [batch_size, 2, height, width]) + gt_br_centripetal_shift = gt_bboxes[-1].new_zeros( + [batch_size, 2, height, width]) + + for batch_id in range(batch_size): + # Ground truth of corner embedding per image is a list of coord set + corner_match = [] + for box_id in range(len(gt_labels[batch_id])): + left, top, right, bottom = gt_bboxes[batch_id][box_id] + center_x = (left + right) / 2.0 + center_y = (top + bottom) / 2.0 + label = gt_labels[batch_id][box_id] + + # Use coords in the feature level to generate ground truth + scale_left = left * width_ratio + scale_right = right * width_ratio + scale_top = top * height_ratio + scale_bottom = bottom * height_ratio + scale_center_x = center_x * width_ratio + scale_center_y = center_y * height_ratio + + # Int coords on feature map/ground truth tensor + left_idx = int(min(scale_left, width - 1)) + right_idx = int(min(scale_right, width - 1)) + top_idx = int(min(scale_top, height - 1)) + bottom_idx = int(min(scale_bottom, height - 1)) + + # Generate gaussian heatmap + scale_box_width = ceil(scale_right - scale_left) + scale_box_height = ceil(scale_bottom - scale_top) + radius = gaussian_radius((scale_box_height, scale_box_width), + min_overlap=0.3) + radius = max(0, int(radius)) + gt_tl_heatmap[batch_id, label] = gen_gaussian_target( + gt_tl_heatmap[batch_id, label], [left_idx, top_idx], + radius) + gt_br_heatmap[batch_id, label] = gen_gaussian_target( + gt_br_heatmap[batch_id, label], [right_idx, bottom_idx], + radius) + + # Generate corner offset + left_offset = scale_left - left_idx + top_offset = scale_top - top_idx + right_offset = scale_right - right_idx + bottom_offset = scale_bottom - bottom_idx + gt_tl_offset[batch_id, 0, top_idx, left_idx] = left_offset + gt_tl_offset[batch_id, 1, top_idx, left_idx] = top_offset + gt_br_offset[batch_id, 0, bottom_idx, right_idx] = right_offset + gt_br_offset[batch_id, 1, bottom_idx, + right_idx] = bottom_offset + + # Generate corner embedding + if with_corner_emb: + corner_match.append([[top_idx, left_idx], + [bottom_idx, right_idx]]) + # Generate guiding shift + if with_guiding_shift: + gt_tl_guiding_shift[batch_id, 0, top_idx, + left_idx] = scale_center_x - left_idx + gt_tl_guiding_shift[batch_id, 1, top_idx, + left_idx] = scale_center_y - top_idx + gt_br_guiding_shift[batch_id, 0, bottom_idx, + right_idx] = right_idx - scale_center_x + gt_br_guiding_shift[ + batch_id, 1, bottom_idx, + right_idx] = bottom_idx - scale_center_y + # Generate centripetal shift + if with_centripetal_shift: + gt_tl_centripetal_shift[batch_id, 0, top_idx, + left_idx] = log(scale_center_x - + scale_left) + gt_tl_centripetal_shift[batch_id, 1, top_idx, + left_idx] = log(scale_center_y - + scale_top) + gt_br_centripetal_shift[batch_id, 0, bottom_idx, + right_idx] = log(scale_right - + scale_center_x) + gt_br_centripetal_shift[batch_id, 1, bottom_idx, + right_idx] = log(scale_bottom - + scale_center_y) + + if with_corner_emb: + match.append(corner_match) + + target_result = dict( + topleft_heatmap=gt_tl_heatmap, + topleft_offset=gt_tl_offset, + bottomright_heatmap=gt_br_heatmap, + bottomright_offset=gt_br_offset) + + if with_corner_emb: + target_result.update(corner_embedding=match) + if with_guiding_shift: + target_result.update( + topleft_guiding_shift=gt_tl_guiding_shift, + bottomright_guiding_shift=gt_br_guiding_shift) + if with_centripetal_shift: + target_result.update( + topleft_centripetal_shift=gt_tl_centripetal_shift, + bottomright_centripetal_shift=gt_br_centripetal_shift) + + return target_result + + def loss(self, + tl_heats, + br_heats, + tl_embs, + br_embs, + tl_offs, + br_offs, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + tl_heats (list[Tensor]): Top-left corner heatmaps for each level + with shape (N, num_classes, H, W). + br_heats (list[Tensor]): Bottom-right corner heatmaps for each + level with shape (N, num_classes, H, W). + tl_embs (list[Tensor]): Top-left corner embeddings for each level + with shape (N, corner_emb_channels, H, W). + br_embs (list[Tensor]): Bottom-right corner embeddings for each + level with shape (N, corner_emb_channels, H, W). + tl_offs (list[Tensor]): Top-left corner offsets for each level + with shape (N, corner_offset_channels, H, W). + br_offs (list[Tensor]): Bottom-right corner offsets for each level + with shape (N, corner_offset_channels, H, W). + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [left, top, right, bottom] format. + gt_labels (list[Tensor]): Class indices corresponding to each box. + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (list[Tensor] | None): Specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. Containing the + following losses: + + - det_loss (list[Tensor]): Corner keypoint losses of all + feature levels. + - pull_loss (list[Tensor]): Part one of AssociativeEmbedding + losses of all feature levels. + - push_loss (list[Tensor]): Part two of AssociativeEmbedding + losses of all feature levels. + - off_loss (list[Tensor]): Corner offset losses of all feature + levels. + """ + targets = self.get_targets( + gt_bboxes, + gt_labels, + tl_heats[-1].shape, + img_metas[0]['pad_shape'], + with_corner_emb=self.with_corner_emb) + mlvl_targets = [targets for _ in range(self.num_feat_levels)] + det_losses, pull_losses, push_losses, off_losses = multi_apply( + self.loss_single, tl_heats, br_heats, tl_embs, br_embs, tl_offs, + br_offs, mlvl_targets) + loss_dict = dict(det_loss=det_losses, off_loss=off_losses) + if self.with_corner_emb: + loss_dict.update(pull_loss=pull_losses, push_loss=push_losses) + return loss_dict + + def loss_single(self, tl_hmp, br_hmp, tl_emb, br_emb, tl_off, br_off, + targets): + """Compute losses for single level. + + Args: + tl_hmp (Tensor): Top-left corner heatmap for current level with + shape (N, num_classes, H, W). + br_hmp (Tensor): Bottom-right corner heatmap for current level with + shape (N, num_classes, H, W). + tl_emb (Tensor): Top-left corner embedding for current level with + shape (N, corner_emb_channels, H, W). + br_emb (Tensor): Bottom-right corner embedding for current level + with shape (N, corner_emb_channels, H, W). + tl_off (Tensor): Top-left corner offset for current level with + shape (N, corner_offset_channels, H, W). + br_off (Tensor): Bottom-right corner offset for current level with + shape (N, corner_offset_channels, H, W). + targets (dict): Corner target generated by `get_targets`. + + Returns: + tuple[torch.Tensor]: Losses of the head's differnet branches + containing the following losses: + + - det_loss (Tensor): Corner keypoint loss. + - pull_loss (Tensor): Part one of AssociativeEmbedding loss. + - push_loss (Tensor): Part two of AssociativeEmbedding loss. + - off_loss (Tensor): Corner offset loss. + """ + gt_tl_hmp = targets['topleft_heatmap'] + gt_br_hmp = targets['bottomright_heatmap'] + gt_tl_off = targets['topleft_offset'] + gt_br_off = targets['bottomright_offset'] + gt_embedding = targets['corner_embedding'] + + # Detection loss + tl_det_loss = self.loss_heatmap( + tl_hmp.sigmoid(), + gt_tl_hmp, + avg_factor=max(1, + gt_tl_hmp.eq(1).sum())) + br_det_loss = self.loss_heatmap( + br_hmp.sigmoid(), + gt_br_hmp, + avg_factor=max(1, + gt_br_hmp.eq(1).sum())) + det_loss = (tl_det_loss + br_det_loss) / 2.0 + + # AssociativeEmbedding loss + if self.with_corner_emb and self.loss_embedding is not None: + pull_loss, push_loss = self.loss_embedding(tl_emb, br_emb, + gt_embedding) + else: + pull_loss, push_loss = None, None + + # Offset loss + # We only compute the offset loss at the real corner position. + # The value of real corner would be 1 in heatmap ground truth. + # The mask is computed in class agnostic mode and its shape is + # batch * 1 * width * height. + tl_off_mask = gt_tl_hmp.eq(1).sum(1).gt(0).unsqueeze(1).type_as( + gt_tl_hmp) + br_off_mask = gt_br_hmp.eq(1).sum(1).gt(0).unsqueeze(1).type_as( + gt_br_hmp) + tl_off_loss = self.loss_offset( + tl_off, + gt_tl_off, + tl_off_mask, + avg_factor=max(1, tl_off_mask.sum())) + br_off_loss = self.loss_offset( + br_off, + gt_br_off, + br_off_mask, + avg_factor=max(1, br_off_mask.sum())) + + off_loss = (tl_off_loss + br_off_loss) / 2.0 + + return det_loss, pull_loss, push_loss, off_loss + + def get_bboxes(self, + tl_heats, + br_heats, + tl_embs, + br_embs, + tl_offs, + br_offs, + img_metas, + rescale=False, + with_nms=True): + """Transform network output for a batch into bbox predictions. + + Args: + tl_heats (list[Tensor]): Top-left corner heatmaps for each level + with shape (N, num_classes, H, W). + br_heats (list[Tensor]): Bottom-right corner heatmaps for each + level with shape (N, num_classes, H, W). + tl_embs (list[Tensor]): Top-left corner embeddings for each level + with shape (N, corner_emb_channels, H, W). + br_embs (list[Tensor]): Bottom-right corner embeddings for each + level with shape (N, corner_emb_channels, H, W). + tl_offs (list[Tensor]): Top-left corner offsets for each level + with shape (N, corner_offset_channels, H, W). + br_offs (list[Tensor]): Bottom-right corner offsets for each level + with shape (N, corner_offset_channels, H, W). + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + """ + assert tl_heats[-1].shape[0] == br_heats[-1].shape[0] == len(img_metas) + result_list = [] + for img_id in range(len(img_metas)): + result_list.append( + self._get_bboxes_single( + tl_heats[-1][img_id:img_id + 1, :], + br_heats[-1][img_id:img_id + 1, :], + tl_offs[-1][img_id:img_id + 1, :], + br_offs[-1][img_id:img_id + 1, :], + img_metas[img_id], + tl_emb=tl_embs[-1][img_id:img_id + 1, :], + br_emb=br_embs[-1][img_id:img_id + 1, :], + rescale=rescale, + with_nms=with_nms)) + + if torch.onnx.is_in_onnx_export(): + assert len( + img_metas + ) == 1, 'Only support one input image while in exporting to ONNX' + + detections, labels = result_list[0] + # batch_size 1 here, [1, num_det, 5], [1, num_det] + return detections.unsqueeze(0), labels.unsqueeze(0) + + return result_list + + def _get_bboxes_single(self, + tl_heat, + br_heat, + tl_off, + br_off, + img_meta, + tl_emb=None, + br_emb=None, + tl_centripetal_shift=None, + br_centripetal_shift=None, + rescale=False, + with_nms=True): + """Transform outputs for a single batch item into bbox predictions. + + Args: + tl_heat (Tensor): Top-left corner heatmap for current level with + shape (N, num_classes, H, W). + br_heat (Tensor): Bottom-right corner heatmap for current level + with shape (N, num_classes, H, W). + tl_off (Tensor): Top-left corner offset for current level with + shape (N, corner_offset_channels, H, W). + br_off (Tensor): Bottom-right corner offset for current level with + shape (N, corner_offset_channels, H, W). + img_meta (dict): Meta information of current image, e.g., + image size, scaling factor, etc. + tl_emb (Tensor): Top-left corner embedding for current level with + shape (N, corner_emb_channels, H, W). + br_emb (Tensor): Bottom-right corner embedding for current level + with shape (N, corner_emb_channels, H, W). + tl_centripetal_shift: Top-left corner's centripetal shift for + current level with shape (N, 2, H, W). + br_centripetal_shift: Bottom-right corner's centripetal shift for + current level with shape (N, 2, H, W). + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + """ + if isinstance(img_meta, (list, tuple)): + img_meta = img_meta[0] + + batch_bboxes, batch_scores, batch_clses = self.decode_heatmap( + tl_heat=tl_heat.sigmoid(), + br_heat=br_heat.sigmoid(), + tl_off=tl_off, + br_off=br_off, + tl_emb=tl_emb, + br_emb=br_emb, + tl_centripetal_shift=tl_centripetal_shift, + br_centripetal_shift=br_centripetal_shift, + img_meta=img_meta, + k=self.test_cfg.corner_topk, + kernel=self.test_cfg.local_maximum_kernel, + distance_threshold=self.test_cfg.distance_threshold) + + if rescale: + batch_bboxes /= batch_bboxes.new_tensor(img_meta['scale_factor']) + + bboxes = batch_bboxes.view([-1, 4]) + scores = batch_scores.view([-1, 1]) + clses = batch_clses.view([-1, 1]) + + # use `sort` instead of `argsort` here, since currently exporting + # `argsort` to ONNX opset version 11 is not supported + scores, idx = scores.sort(dim=0, descending=True) + bboxes = bboxes[idx].view([-1, 4]) + scores = scores.view(-1) + clses = clses[idx].view(-1) + + detections = torch.cat([bboxes, scores.unsqueeze(-1)], -1) + keepinds = (detections[:, -1] > -0.1) + detections = detections[keepinds] + labels = clses[keepinds] + + if with_nms: + detections, labels = self._bboxes_nms(detections, labels, + self.test_cfg) + + return detections, labels + + def _bboxes_nms(self, bboxes, labels, cfg): + if labels.numel() == 0: + return bboxes, labels + + if 'nms_cfg' in cfg: + warning.warn('nms_cfg in test_cfg will be deprecated. ' + 'Please rename it as nms') + if 'nms' not in cfg: + cfg.nms = cfg.nms_cfg + + out_bboxes, keep = batched_nms(bboxes[:, :4], bboxes[:, -1], labels, + cfg.nms) + out_labels = labels[keep] + + if len(out_bboxes) > 0: + # use `sort` to replace with `argsort` here + _, idx = torch.sort(out_bboxes[:, -1], descending=True) + max_per_img = out_bboxes.new_tensor(cfg.max_per_img).to(torch.long) + nms_after = max_per_img + if torch.onnx.is_in_onnx_export(): + # Always keep topk op for dynamic input in onnx + from mmdet.core.export import get_k_for_topk + nms_after = get_k_for_topk(max_per_img, out_bboxes.shape[0]) + idx = idx[:nms_after] + out_bboxes = out_bboxes[idx] + out_labels = out_labels[idx] + + return out_bboxes, out_labels + + def decode_heatmap(self, + tl_heat, + br_heat, + tl_off, + br_off, + tl_emb=None, + br_emb=None, + tl_centripetal_shift=None, + br_centripetal_shift=None, + img_meta=None, + k=100, + kernel=3, + distance_threshold=0.5, + num_dets=1000): + """Transform outputs for a single batch item into raw bbox predictions. + + Args: + tl_heat (Tensor): Top-left corner heatmap for current level with + shape (N, num_classes, H, W). + br_heat (Tensor): Bottom-right corner heatmap for current level + with shape (N, num_classes, H, W). + tl_off (Tensor): Top-left corner offset for current level with + shape (N, corner_offset_channels, H, W). + br_off (Tensor): Bottom-right corner offset for current level with + shape (N, corner_offset_channels, H, W). + tl_emb (Tensor | None): Top-left corner embedding for current + level with shape (N, corner_emb_channels, H, W). + br_emb (Tensor | None): Bottom-right corner embedding for current + level with shape (N, corner_emb_channels, H, W). + tl_centripetal_shift (Tensor | None): Top-left centripetal shift + for current level with shape (N, 2, H, W). + br_centripetal_shift (Tensor | None): Bottom-right centripetal + shift for current level with shape (N, 2, H, W). + img_meta (dict): Meta information of current image, e.g., + image size, scaling factor, etc. + k (int): Get top k corner keypoints from heatmap. + kernel (int): Max pooling kernel for extract local maximum pixels. + distance_threshold (float): Distance threshold. Top-left and + bottom-right corner keypoints with feature distance less than + the threshold will be regarded as keypoints from same object. + num_dets (int): Num of raw boxes before doing nms. + + Returns: + tuple[torch.Tensor]: Decoded output of CornerHead, containing the + following Tensors: + + - bboxes (Tensor): Coords of each box. + - scores (Tensor): Scores of each box. + - clses (Tensor): Categories of each box. + """ + with_embedding = tl_emb is not None and br_emb is not None + with_centripetal_shift = ( + tl_centripetal_shift is not None + and br_centripetal_shift is not None) + assert with_embedding + with_centripetal_shift == 1 + batch, _, height, width = tl_heat.size() + if torch.onnx.is_in_onnx_export(): + inp_h, inp_w = img_meta['pad_shape_for_onnx'][:2] + else: + inp_h, inp_w, _ = img_meta['pad_shape'] + + # perform nms on heatmaps + tl_heat = get_local_maximum(tl_heat, kernel=kernel) + br_heat = get_local_maximum(br_heat, kernel=kernel) + + tl_scores, tl_inds, tl_clses, tl_ys, tl_xs = get_topk_from_heatmap( + tl_heat, k=k) + br_scores, br_inds, br_clses, br_ys, br_xs = get_topk_from_heatmap( + br_heat, k=k) + + # We use repeat instead of expand here because expand is a + # shallow-copy function. Thus it could cause unexpected testing result + # sometimes. Using expand will decrease about 10% mAP during testing + # compared to repeat. + tl_ys = tl_ys.view(batch, k, 1).repeat(1, 1, k) + tl_xs = tl_xs.view(batch, k, 1).repeat(1, 1, k) + br_ys = br_ys.view(batch, 1, k).repeat(1, k, 1) + br_xs = br_xs.view(batch, 1, k).repeat(1, k, 1) + + tl_off = transpose_and_gather_feat(tl_off, tl_inds) + tl_off = tl_off.view(batch, k, 1, 2) + br_off = transpose_and_gather_feat(br_off, br_inds) + br_off = br_off.view(batch, 1, k, 2) + + tl_xs = tl_xs + tl_off[..., 0] + tl_ys = tl_ys + tl_off[..., 1] + br_xs = br_xs + br_off[..., 0] + br_ys = br_ys + br_off[..., 1] + + if with_centripetal_shift: + tl_centripetal_shift = transpose_and_gather_feat( + tl_centripetal_shift, tl_inds).view(batch, k, 1, 2).exp() + br_centripetal_shift = transpose_and_gather_feat( + br_centripetal_shift, br_inds).view(batch, 1, k, 2).exp() + + tl_ctxs = tl_xs + tl_centripetal_shift[..., 0] + tl_ctys = tl_ys + tl_centripetal_shift[..., 1] + br_ctxs = br_xs - br_centripetal_shift[..., 0] + br_ctys = br_ys - br_centripetal_shift[..., 1] + + # all possible boxes based on top k corners (ignoring class) + tl_xs *= (inp_w / width) + tl_ys *= (inp_h / height) + br_xs *= (inp_w / width) + br_ys *= (inp_h / height) + + if with_centripetal_shift: + tl_ctxs *= (inp_w / width) + tl_ctys *= (inp_h / height) + br_ctxs *= (inp_w / width) + br_ctys *= (inp_h / height) + + x_off, y_off = 0, 0 # no crop + if not torch.onnx.is_in_onnx_export(): + # since `RandomCenterCropPad` is done on CPU with numpy and it's + # not dynamic traceable when exporting to ONNX, thus 'border' + # does not appears as key in 'img_meta'. As a tmp solution, + # we move this 'border' handle part to the postprocess after + # finished exporting to ONNX, which is handle in + # `mmdet/core/export/model_wrappers.py`. Though difference between + # pytorch and exported onnx model, it might be ignored since + # comparable performance is achieved between them (e.g. 40.4 vs + # 40.6 on COCO val2017, for CornerNet without test-time flip) + if 'border' in img_meta: + x_off = img_meta['border'][2] + y_off = img_meta['border'][0] + + tl_xs -= x_off + tl_ys -= y_off + br_xs -= x_off + br_ys -= y_off + + zeros = tl_xs.new_zeros(*tl_xs.size()) + tl_xs = torch.where(tl_xs > 0.0, tl_xs, zeros) + tl_ys = torch.where(tl_ys > 0.0, tl_ys, zeros) + br_xs = torch.where(br_xs > 0.0, br_xs, zeros) + br_ys = torch.where(br_ys > 0.0, br_ys, zeros) + + bboxes = torch.stack((tl_xs, tl_ys, br_xs, br_ys), dim=3) + area_bboxes = ((br_xs - tl_xs) * (br_ys - tl_ys)).abs() + + if with_centripetal_shift: + tl_ctxs -= x_off + tl_ctys -= y_off + br_ctxs -= x_off + br_ctys -= y_off + + tl_ctxs *= tl_ctxs.gt(0.0).type_as(tl_ctxs) + tl_ctys *= tl_ctys.gt(0.0).type_as(tl_ctys) + br_ctxs *= br_ctxs.gt(0.0).type_as(br_ctxs) + br_ctys *= br_ctys.gt(0.0).type_as(br_ctys) + + ct_bboxes = torch.stack((tl_ctxs, tl_ctys, br_ctxs, br_ctys), + dim=3) + area_ct_bboxes = ((br_ctxs - tl_ctxs) * (br_ctys - tl_ctys)).abs() + + rcentral = torch.zeros_like(ct_bboxes) + # magic nums from paper section 4.1 + mu = torch.ones_like(area_bboxes) / 2.4 + mu[area_bboxes > 3500] = 1 / 2.1 # large bbox have smaller mu + + bboxes_center_x = (bboxes[..., 0] + bboxes[..., 2]) / 2 + bboxes_center_y = (bboxes[..., 1] + bboxes[..., 3]) / 2 + rcentral[..., 0] = bboxes_center_x - mu * (bboxes[..., 2] - + bboxes[..., 0]) / 2 + rcentral[..., 1] = bboxes_center_y - mu * (bboxes[..., 3] - + bboxes[..., 1]) / 2 + rcentral[..., 2] = bboxes_center_x + mu * (bboxes[..., 2] - + bboxes[..., 0]) / 2 + rcentral[..., 3] = bboxes_center_y + mu * (bboxes[..., 3] - + bboxes[..., 1]) / 2 + area_rcentral = ((rcentral[..., 2] - rcentral[..., 0]) * + (rcentral[..., 3] - rcentral[..., 1])).abs() + dists = area_ct_bboxes / area_rcentral + + tl_ctx_inds = (ct_bboxes[..., 0] <= rcentral[..., 0]) | ( + ct_bboxes[..., 0] >= rcentral[..., 2]) + tl_cty_inds = (ct_bboxes[..., 1] <= rcentral[..., 1]) | ( + ct_bboxes[..., 1] >= rcentral[..., 3]) + br_ctx_inds = (ct_bboxes[..., 2] <= rcentral[..., 0]) | ( + ct_bboxes[..., 2] >= rcentral[..., 2]) + br_cty_inds = (ct_bboxes[..., 3] <= rcentral[..., 1]) | ( + ct_bboxes[..., 3] >= rcentral[..., 3]) + + if with_embedding: + tl_emb = transpose_and_gather_feat(tl_emb, tl_inds) + tl_emb = tl_emb.view(batch, k, 1) + br_emb = transpose_and_gather_feat(br_emb, br_inds) + br_emb = br_emb.view(batch, 1, k) + dists = torch.abs(tl_emb - br_emb) + + tl_scores = tl_scores.view(batch, k, 1).repeat(1, 1, k) + br_scores = br_scores.view(batch, 1, k).repeat(1, k, 1) + + scores = (tl_scores + br_scores) / 2 # scores for all possible boxes + + # tl and br should have same class + tl_clses = tl_clses.view(batch, k, 1).repeat(1, 1, k) + br_clses = br_clses.view(batch, 1, k).repeat(1, k, 1) + cls_inds = (tl_clses != br_clses) + + # reject boxes based on distances + dist_inds = dists > distance_threshold + + # reject boxes based on widths and heights + width_inds = (br_xs <= tl_xs) + height_inds = (br_ys <= tl_ys) + + # No use `scores[cls_inds]`, instead we use `torch.where` here. + # Since only 1-D indices with type 'tensor(bool)' are supported + # when exporting to ONNX, any other bool indices with more dimensions + # (e.g. 2-D bool tensor) as input parameter in node is invalid + negative_scores = -1 * torch.ones_like(scores) + scores = torch.where(cls_inds, negative_scores, scores) + scores = torch.where(width_inds, negative_scores, scores) + scores = torch.where(height_inds, negative_scores, scores) + scores = torch.where(dist_inds, negative_scores, scores) + + if with_centripetal_shift: + scores[tl_ctx_inds] = -1 + scores[tl_cty_inds] = -1 + scores[br_ctx_inds] = -1 + scores[br_cty_inds] = -1 + + scores = scores.view(batch, -1) + scores, inds = torch.topk(scores, num_dets) + scores = scores.unsqueeze(2) + + bboxes = bboxes.view(batch, -1, 4) + bboxes = gather_feat(bboxes, inds) + + clses = tl_clses.contiguous().view(batch, -1, 1) + clses = gather_feat(clses, inds).float() + + return bboxes, scores, clses diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/deformable_detr_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/deformable_detr_head.py new file mode 100644 index 0000000000000000000000000000000000000000..a7d4332c0e84a4880bae30273b8331807135afad --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/deformable_detr_head.py @@ -0,0 +1,317 @@ +import copy + +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import Linear, bias_init_with_prob, constant_init +from mmcv.runner import force_fp32 + +from mmdet.core import multi_apply +from mmdet.models.utils.transformer import inverse_sigmoid +from ..builder import HEADS +from .detr_head import DETRHead + + +@HEADS.register_module() +class DeformableDETRHead(DETRHead): + """Head of DeformDETR: Deformable DETR: Deformable Transformers for End-to- + End Object Detection. + + Code is modified from the `official github repo + `_. + + More details can be found in the `paper + `_ . + + Args: + with_box_refine (bool): Whether to refine the reference points + in the decoder. Defaults to False. + as_two_stage (bool) : Whether to generate the proposal from + the outputs of encoder. + transformer (obj:`ConfigDict`): ConfigDict is used for building + the Encoder and Decoder. + """ + + def __init__(self, + *args, + with_box_refine=False, + as_two_stage=False, + transformer=None, + **kwargs): + self.with_box_refine = with_box_refine + self.as_two_stage = as_two_stage + if self.as_two_stage: + transformer['as_two_stage'] = self.as_two_stage + + super(DeformableDETRHead, self).__init__( + *args, transformer=transformer, **kwargs) + + def _init_layers(self): + """Initialize classification branch and regression branch of head.""" + + fc_cls = Linear(self.embed_dims, self.cls_out_channels) + reg_branch = [] + for _ in range(self.num_reg_fcs): + reg_branch.append(Linear(self.embed_dims, self.embed_dims)) + reg_branch.append(nn.ReLU()) + reg_branch.append(Linear(self.embed_dims, 4)) + reg_branch = nn.Sequential(*reg_branch) + + def _get_clones(module, N): + return nn.ModuleList([copy.deepcopy(module) for i in range(N)]) + + # last reg_branch is used to generate proposal from + # encode feature map when as_two_stage is True. + num_pred = (self.transformer.decoder.num_layers + 1) if \ + self.as_two_stage else self.transformer.decoder.num_layers + + if self.with_box_refine: + self.cls_branches = _get_clones(fc_cls, num_pred) + self.reg_branches = _get_clones(reg_branch, num_pred) + else: + + self.cls_branches = nn.ModuleList( + [fc_cls for _ in range(num_pred)]) + self.reg_branches = nn.ModuleList( + [reg_branch for _ in range(num_pred)]) + + if not self.as_two_stage: + self.query_embedding = nn.Embedding(self.num_query, + self.embed_dims * 2) + + def init_weights(self): + """Initialize weights of the DeformDETR head.""" + self.transformer.init_weights() + if self.loss_cls.use_sigmoid: + bias_init = bias_init_with_prob(0.01) + for m in self.cls_branches: + nn.init.constant_(m.bias, bias_init) + for m in self.reg_branches: + constant_init(m[-1], 0, bias=0) + nn.init.constant_(self.reg_branches[0][-1].bias.data[2:], -2.0) + if self.as_two_stage: + for m in self.reg_branches: + nn.init.constant_(m[-1].bias.data[2:], 0.0) + + def forward(self, mlvl_feats, img_metas): + """Forward function. + + Args: + mlvl_feats (tuple[Tensor]): Features from the upstream + network, each is a 4D-tensor with shape + (N, C, H, W). + img_metas (list[dict]): List of image information. + + Returns: + all_cls_scores (Tensor): Outputs from the classification head, \ + shape [nb_dec, bs, num_query, cls_out_channels]. Note \ + cls_out_channels should includes background. + all_bbox_preds (Tensor): Sigmoid outputs from the regression \ + head with normalized coordinate format (cx, cy, w, h). \ + Shape [nb_dec, bs, num_query, 4]. + enc_outputs_class (Tensor): The score of each point on encode \ + feature map, has shape (N, h*w, num_class). Only when \ + as_two_stage is True it would be returned, otherwise \ + `None` would be returned. + enc_outputs_coord (Tensor): The proposal generate from the \ + encode feature map, has shape (N, h*w, 4). Only when \ + as_two_stage is True it would be returned, otherwise \ + `None` would be returned. + """ + + batch_size = mlvl_feats[0].size(0) + input_img_h, input_img_w = img_metas[0]['batch_input_shape'] + img_masks = mlvl_feats[0].new_ones( + (batch_size, input_img_h, input_img_w)) + for img_id in range(batch_size): + img_h, img_w, _ = img_metas[img_id]['img_shape'] + img_masks[img_id, :img_h, :img_w] = 0 + + mlvl_masks = [] + mlvl_positional_encodings = [] + for feat in mlvl_feats: + mlvl_masks.append( + F.interpolate(img_masks[None], + size=feat.shape[-2:]).to(torch.bool).squeeze(0)) + mlvl_positional_encodings.append( + self.positional_encoding(mlvl_masks[-1])) + + query_embeds = None + if not self.as_two_stage: + query_embeds = self.query_embedding.weight + hs, init_reference, inter_references, \ + enc_outputs_class, enc_outputs_coord = self.transformer( + mlvl_feats, + mlvl_masks, + query_embeds, + mlvl_positional_encodings, + reg_branches=self.reg_branches if self.with_box_refine else None, # noqa:E501 + cls_branches=self.cls_branches if self.as_two_stage else None # noqa:E501 + ) + hs = hs.permute(0, 2, 1, 3) + outputs_classes = [] + outputs_coords = [] + + for lvl in range(hs.shape[0]): + if lvl == 0: + reference = init_reference + else: + reference = inter_references[lvl - 1] + reference = inverse_sigmoid(reference) + outputs_class = self.cls_branches[lvl](hs[lvl]) + tmp = self.reg_branches[lvl](hs[lvl]) + if reference.shape[-1] == 4: + tmp += reference + else: + assert reference.shape[-1] == 2 + tmp[..., :2] += reference + outputs_coord = tmp.sigmoid() + outputs_classes.append(outputs_class) + outputs_coords.append(outputs_coord) + + outputs_classes = torch.stack(outputs_classes) + outputs_coords = torch.stack(outputs_coords) + if self.as_two_stage: + return outputs_classes, outputs_coords, \ + enc_outputs_class, \ + enc_outputs_coord.sigmoid() + else: + return outputs_classes, outputs_coords, \ + None, None + + @force_fp32(apply_to=('all_cls_scores_list', 'all_bbox_preds_list')) + def loss(self, + all_cls_scores, + all_bbox_preds, + enc_cls_scores, + enc_bbox_preds, + gt_bboxes_list, + gt_labels_list, + img_metas, + gt_bboxes_ignore=None): + """"Loss function. + + Args: + all_cls_scores (Tensor): Classification score of all + decoder layers, has shape + [nb_dec, bs, num_query, cls_out_channels]. + all_bbox_preds (Tensor): Sigmoid regression + outputs of all decode layers. Each is a 4D-tensor with + normalized coordinate format (cx, cy, w, h) and shape + [nb_dec, bs, num_query, 4]. + enc_cls_scores (Tensor): Classification scores of + points on encode feature map , has shape + (N, h*w, num_classes). Only be passed when as_two_stage is + True, otherwise is None. + enc_bbox_preds (Tensor): Regression results of each points + on the encode feature map, has shape (N, h*w, 4). Only be + passed when as_two_stage is True, otherwise is None. + gt_bboxes_list (list[Tensor]): Ground truth bboxes for each image + with shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels_list (list[Tensor]): Ground truth class indices for each + image with shape (num_gts, ). + img_metas (list[dict]): List of image meta information. + gt_bboxes_ignore (list[Tensor], optional): Bounding boxes + which can be ignored for each image. Default None. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + assert gt_bboxes_ignore is None, \ + f'{self.__class__.__name__} only supports ' \ + f'for gt_bboxes_ignore setting to None.' + + num_dec_layers = len(all_cls_scores) + all_gt_bboxes_list = [gt_bboxes_list for _ in range(num_dec_layers)] + all_gt_labels_list = [gt_labels_list for _ in range(num_dec_layers)] + all_gt_bboxes_ignore_list = [ + gt_bboxes_ignore for _ in range(num_dec_layers) + ] + img_metas_list = [img_metas for _ in range(num_dec_layers)] + + losses_cls, losses_bbox, losses_iou = multi_apply( + self.loss_single, all_cls_scores, all_bbox_preds, + all_gt_bboxes_list, all_gt_labels_list, img_metas_list, + all_gt_bboxes_ignore_list) + + loss_dict = dict() + # loss of proposal generated from encode feature map. + if enc_cls_scores is not None: + binary_labels_list = [ + torch.zeros_like(gt_labels_list[i]) + for i in range(len(img_metas)) + ] + enc_loss_cls, enc_losses_bbox, enc_losses_iou = \ + self.loss_single(enc_cls_scores, enc_bbox_preds, + gt_bboxes_list, binary_labels_list, + img_metas, gt_bboxes_ignore) + loss_dict['enc_loss_cls'] = enc_loss_cls + loss_dict['enc_loss_bbox'] = enc_losses_bbox + loss_dict['enc_loss_iou'] = enc_losses_iou + + # loss from the last decoder layer + loss_dict['loss_cls'] = losses_cls[-1] + loss_dict['loss_bbox'] = losses_bbox[-1] + loss_dict['loss_iou'] = losses_iou[-1] + # loss from other decoder layers + num_dec_layer = 0 + for loss_cls_i, loss_bbox_i, loss_iou_i in zip(losses_cls[:-1], + losses_bbox[:-1], + losses_iou[:-1]): + loss_dict[f'd{num_dec_layer}.loss_cls'] = loss_cls_i + loss_dict[f'd{num_dec_layer}.loss_bbox'] = loss_bbox_i + loss_dict[f'd{num_dec_layer}.loss_iou'] = loss_iou_i + num_dec_layer += 1 + return loss_dict + + @force_fp32(apply_to=('all_cls_scores_list', 'all_bbox_preds_list')) + def get_bboxes(self, + all_cls_scores, + all_bbox_preds, + enc_cls_scores, + enc_bbox_preds, + img_metas, + rescale=False): + """Transform network outputs for a batch into bbox predictions. + + Args: + all_cls_scores (Tensor): Classification score of all + decoder layers, has shape + [nb_dec, bs, num_query, cls_out_channels]. + all_bbox_preds (Tensor): Sigmoid regression + outputs of all decode layers. Each is a 4D-tensor with + normalized coordinate format (cx, cy, w, h) and shape + [nb_dec, bs, num_query, 4]. + enc_cls_scores (Tensor): Classification scores of + points on encode feature map , has shape + (N, h*w, num_classes). Only be passed when as_two_stage is + True, otherwise is None. + enc_bbox_preds (Tensor): Regression results of each points + on the encode feature map, has shape (N, h*w, 4). Only be + passed when as_two_stage is True, otherwise is None. + img_metas (list[dict]): Meta information of each image. + rescale (bool, optional): If True, return boxes in original + image space. Default False. + + Returns: + list[list[Tensor, Tensor]]: Each item in result_list is 2-tuple. \ + The first item is an (n, 5) tensor, where the first 4 columns \ + are bounding box positions (tl_x, tl_y, br_x, br_y) and the \ + 5-th column is a score between 0 and 1. The second item is a \ + (n,) tensor where each item is the predicted class label of \ + the corresponding box. + """ + cls_scores = all_cls_scores[-1] + bbox_preds = all_bbox_preds[-1] + + result_list = [] + for img_id in range(len(img_metas)): + cls_score = cls_scores[img_id] + bbox_pred = bbox_preds[img_id] + img_shape = img_metas[img_id]['img_shape'] + scale_factor = img_metas[img_id]['scale_factor'] + proposals = self._get_bboxes_single(cls_score, bbox_pred, + img_shape, scale_factor, + rescale) + result_list.append(proposals) + return result_list diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/dense_test_mixins.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/dense_test_mixins.py new file mode 100644 index 0000000000000000000000000000000000000000..7f136a4aecd81028507b366e23d81b6454ca8f84 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/dense_test_mixins.py @@ -0,0 +1,200 @@ +import sys +from inspect import signature + +import torch + +from mmdet.core import bbox_mapping_back, merge_aug_proposals, multiclass_nms + +if sys.version_info >= (3, 7): + from mmdet.utils.contextmanagers import completed + + +class BBoxTestMixin(object): + """Mixin class for testing det bboxes via DenseHead.""" + + def simple_test_bboxes(self, feats, img_metas, rescale=False): + """Test det bboxes without test-time augmentation, can be applied in + DenseHead except for ``RPNHead`` and its variants, e.g., ``GARPNHead``, + etc. + + Args: + feats (tuple[torch.Tensor]): Multi-level features from the + upstream network, each is a 4D-tensor. + img_metas (list[dict]): List of image information. + rescale (bool, optional): Whether to rescale the results. + Defaults to False. + + Returns: + list[tuple[Tensor, Tensor]]: Each item in result_list is 2-tuple. + The first item is ``bboxes`` with shape (n, 5), + where 5 represent (tl_x, tl_y, br_x, br_y, score). + The shape of the second tensor in the tuple is ``labels`` + with shape (n,) + """ + outs = self.forward(feats) + results_list = self.get_bboxes(*outs, img_metas, rescale=rescale) + return results_list + + def aug_test_bboxes(self, feats, img_metas, rescale=False): + """Test det bboxes with test time augmentation, can be applied in + DenseHead except for ``RPNHead`` and its variants, e.g., ``GARPNHead``, + etc. + + Args: + feats (list[Tensor]): the outer list indicates test-time + augmentations and inner Tensor should have a shape NxCxHxW, + which contains features for all images in the batch. + img_metas (list[list[dict]]): the outer list indicates test-time + augs (multiscale, flip, etc.) and the inner list indicates + images in a batch. each dict has image information. + rescale (bool, optional): Whether to rescale the results. + Defaults to False. + + Returns: + list[tuple[Tensor, Tensor]]: Each item in result_list is 2-tuple. + The first item is ``bboxes`` with shape (n, 5), + where 5 represent (tl_x, tl_y, br_x, br_y, score). + The shape of the second tensor in the tuple is ``labels`` + with shape (n,). The length of list should always be 1. + """ + # check with_nms argument + gb_sig = signature(self.get_bboxes) + gb_args = [p.name for p in gb_sig.parameters.values()] + if hasattr(self, '_get_bboxes'): + gbs_sig = signature(self._get_bboxes) + else: + gbs_sig = signature(self._get_bboxes_single) + gbs_args = [p.name for p in gbs_sig.parameters.values()] + assert ('with_nms' in gb_args) and ('with_nms' in gbs_args), \ + f'{self.__class__.__name__}' \ + ' does not support test-time augmentation' + + aug_bboxes = [] + aug_scores = [] + aug_factors = [] # score_factors for NMS + for x, img_meta in zip(feats, img_metas): + # only one image in the batch + outs = self.forward(x) + bbox_inputs = outs + (img_meta, self.test_cfg, False, False) + bbox_outputs = self.get_bboxes(*bbox_inputs)[0] + aug_bboxes.append(bbox_outputs[0]) + aug_scores.append(bbox_outputs[1]) + # bbox_outputs of some detectors (e.g., ATSS, FCOS, YOLOv3) + # contains additional element to adjust scores before NMS + if len(bbox_outputs) >= 3: + aug_factors.append(bbox_outputs[2]) + + # after merging, bboxes will be rescaled to the original image size + merged_bboxes, merged_scores = self.merge_aug_bboxes( + aug_bboxes, aug_scores, img_metas) + merged_factors = torch.cat(aug_factors, dim=0) if aug_factors else None + det_bboxes, det_labels = multiclass_nms( + merged_bboxes, + merged_scores, + self.test_cfg.score_thr, + self.test_cfg.nms, + self.test_cfg.max_per_img, + score_factors=merged_factors) + + if rescale: + _det_bboxes = det_bboxes + else: + _det_bboxes = det_bboxes.clone() + _det_bboxes[:, :4] *= det_bboxes.new_tensor( + img_metas[0][0]['scale_factor']) + + return [ + (_det_bboxes, det_labels), + ] + + def simple_test_rpn(self, x, img_metas): + """Test without augmentation, only for ``RPNHead`` and its variants, + e.g., ``GARPNHead``, etc. + + Args: + x (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + img_metas (list[dict]): Meta info of each image. + + Returns: + list[Tensor]: Proposals of each image, each item has shape (n, 5), + where 5 represent (tl_x, tl_y, br_x, br_y, score). + """ + rpn_outs = self(x) + proposal_list = self.get_bboxes(*rpn_outs, img_metas) + return proposal_list + + def aug_test_rpn(self, feats, img_metas): + """Test with augmentation for only for ``RPNHead`` and its variants, + e.g., ``GARPNHead``, etc. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + img_metas (list[dict]): Meta info of each image. + + Returns: + list[Tensor]: Proposals of each image, each item has shape (n, 5), + where 5 represent (tl_x, tl_y, br_x, br_y, score). + """ + samples_per_gpu = len(img_metas[0]) + aug_proposals = [[] for _ in range(samples_per_gpu)] + for x, img_meta in zip(feats, img_metas): + proposal_list = self.simple_test_rpn(x, img_meta) + for i, proposals in enumerate(proposal_list): + aug_proposals[i].append(proposals) + # reorganize the order of 'img_metas' to match the dimensions + # of 'aug_proposals' + aug_img_metas = [] + for i in range(samples_per_gpu): + aug_img_meta = [] + for j in range(len(img_metas)): + aug_img_meta.append(img_metas[j][i]) + aug_img_metas.append(aug_img_meta) + # after merging, proposals will be rescaled to the original image size + merged_proposals = [ + merge_aug_proposals(proposals, aug_img_meta, self.test_cfg) + for proposals, aug_img_meta in zip(aug_proposals, aug_img_metas) + ] + return merged_proposals + + if sys.version_info >= (3, 7): + + async def async_simple_test_rpn(self, x, img_metas): + sleep_interval = self.test_cfg.pop('async_sleep_interval', 0.025) + async with completed( + __name__, 'rpn_head_forward', + sleep_interval=sleep_interval): + rpn_outs = self(x) + + proposal_list = self.get_bboxes(*rpn_outs, img_metas) + return proposal_list + + def merge_aug_bboxes(self, aug_bboxes, aug_scores, img_metas): + """Merge augmented detection bboxes and scores. + + Args: + aug_bboxes (list[Tensor]): shape (n, 4*#class) + aug_scores (list[Tensor] or None): shape (n, #class) + img_shapes (list[Tensor]): shape (3, ). + + Returns: + tuple[Tensor]: ``bboxes`` with shape (n,4), where + 4 represent (tl_x, tl_y, br_x, br_y) + and ``scores`` with shape (n,). + """ + recovered_bboxes = [] + for bboxes, img_info in zip(aug_bboxes, img_metas): + img_shape = img_info[0]['img_shape'] + scale_factor = img_info[0]['scale_factor'] + flip = img_info[0]['flip'] + flip_direction = img_info[0]['flip_direction'] + bboxes = bbox_mapping_back(bboxes, img_shape, scale_factor, flip, + flip_direction) + recovered_bboxes.append(bboxes) + bboxes = torch.cat(recovered_bboxes, dim=0) + if aug_scores is None: + return bboxes + else: + scores = torch.cat(aug_scores, dim=0) + return bboxes, scores diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/detr_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/detr_head.py new file mode 100644 index 0000000000000000000000000000000000000000..08a2c8246fa4fe14b3ac4f4a42744b1aa4100117 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/detr_head.py @@ -0,0 +1,843 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import Conv2d, Linear, build_activation_layer +from mmcv.cnn.bricks.transformer import FFN, build_positional_encoding +from mmcv.runner import force_fp32 + +from mmdet.core import (bbox_cxcywh_to_xyxy, bbox_xyxy_to_cxcywh, + build_assigner, build_sampler, multi_apply, + reduce_mean) +from mmdet.models.utils import build_transformer +from ..builder import HEADS, build_loss +from .anchor_free_head import AnchorFreeHead + + +@HEADS.register_module() +class DETRHead(AnchorFreeHead): + """Implements the DETR transformer head. + + See `paper: End-to-End Object Detection with Transformers + `_ for details. + + Args: + num_classes (int): Number of categories excluding the background. + in_channels (int): Number of channels in the input feature map. + num_query (int): Number of query in Transformer. + num_reg_fcs (int, optional): Number of fully-connected layers used in + `FFN`, which is then used for the regression head. Default 2. + transformer (obj:`mmcv.ConfigDict`|dict): Config for transformer. + Default: None. + sync_cls_avg_factor (bool): Whether to sync the avg_factor of + all ranks. Default to False. + positional_encoding (obj:`mmcv.ConfigDict`|dict): + Config for position encoding. + loss_cls (obj:`mmcv.ConfigDict`|dict): Config of the + classification loss. Default `CrossEntropyLoss`. + loss_bbox (obj:`mmcv.ConfigDict`|dict): Config of the + regression loss. Default `L1Loss`. + loss_iou (obj:`mmcv.ConfigDict`|dict): Config of the + regression iou loss. Default `GIoULoss`. + tran_cfg (obj:`mmcv.ConfigDict`|dict): Training config of + transformer head. + test_cfg (obj:`mmcv.ConfigDict`|dict): Testing config of + transformer head. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + _version = 2 + + def __init__(self, + num_classes, + in_channels, + num_query=100, + num_reg_fcs=2, + transformer=None, + sync_cls_avg_factor=False, + positional_encoding=dict( + type='SinePositionalEncoding', + num_feats=128, + normalize=True), + loss_cls=dict( + type='CrossEntropyLoss', + bg_cls_weight=0.1, + use_sigmoid=False, + loss_weight=1.0, + class_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=5.0), + loss_iou=dict(type='GIoULoss', loss_weight=2.0), + train_cfg=dict( + assigner=dict( + type='HungarianAssigner', + cls_cost=dict(type='ClassificationCost', weight=1.), + reg_cost=dict(type='BBoxL1Cost', weight=5.0), + iou_cost=dict( + type='IoUCost', iou_mode='giou', weight=2.0))), + test_cfg=dict(max_per_img=100), + init_cfg=None, + **kwargs): + # NOTE here use `AnchorFreeHead` instead of `TransformerHead`, + # since it brings inconvenience when the initialization of + # `AnchorFreeHead` is called. + super(AnchorFreeHead, self).__init__(init_cfg) + self.bg_cls_weight = 0 + self.sync_cls_avg_factor = sync_cls_avg_factor + class_weight = loss_cls.get('class_weight', None) + if class_weight is not None and (self.__class__ is DETRHead): + assert isinstance(class_weight, float), 'Expected ' \ + 'class_weight to have type float. Found ' \ + f'{type(class_weight)}.' + # NOTE following the official DETR rep0, bg_cls_weight means + # relative classification weight of the no-object class. + bg_cls_weight = loss_cls.get('bg_cls_weight', class_weight) + assert isinstance(bg_cls_weight, float), 'Expected ' \ + 'bg_cls_weight to have type float. Found ' \ + f'{type(bg_cls_weight)}.' + class_weight = torch.ones(num_classes + 1) * class_weight + # set background class as the last indice + class_weight[num_classes] = bg_cls_weight + loss_cls.update({'class_weight': class_weight}) + if 'bg_cls_weight' in loss_cls: + loss_cls.pop('bg_cls_weight') + self.bg_cls_weight = bg_cls_weight + + if train_cfg: + assert 'assigner' in train_cfg, 'assigner should be provided '\ + 'when train_cfg is set.' + assigner = train_cfg['assigner'] + assert loss_cls['loss_weight'] == assigner['cls_cost']['weight'], \ + 'The classification weight for loss and matcher should be' \ + 'exactly the same.' + assert loss_bbox['loss_weight'] == assigner['reg_cost'][ + 'weight'], 'The regression L1 weight for loss and matcher ' \ + 'should be exactly the same.' + assert loss_iou['loss_weight'] == assigner['iou_cost']['weight'], \ + 'The regression iou weight for loss and matcher should be' \ + 'exactly the same.' + self.assigner = build_assigner(assigner) + # DETR sampling=False, so use PseudoSampler + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + self.num_query = num_query + self.num_classes = num_classes + self.in_channels = in_channels + self.num_reg_fcs = num_reg_fcs + self.train_cfg = train_cfg + self.test_cfg = test_cfg + self.fp16_enabled = False + self.loss_cls = build_loss(loss_cls) + self.loss_bbox = build_loss(loss_bbox) + self.loss_iou = build_loss(loss_iou) + + if self.loss_cls.use_sigmoid: + self.cls_out_channels = num_classes + else: + self.cls_out_channels = num_classes + 1 + self.act_cfg = transformer.get('act_cfg', + dict(type='ReLU', inplace=True)) + self.activate = build_activation_layer(self.act_cfg) + self.positional_encoding = build_positional_encoding( + positional_encoding) + self.transformer = build_transformer(transformer) + self.embed_dims = self.transformer.embed_dims + assert 'num_feats' in positional_encoding + num_feats = positional_encoding['num_feats'] + assert num_feats * 2 == self.embed_dims, 'embed_dims should' \ + f' be exactly 2 times of num_feats. Found {self.embed_dims}' \ + f' and {num_feats}.' + self._init_layers() + + def _init_layers(self): + """Initialize layers of the transformer head.""" + self.input_proj = Conv2d( + self.in_channels, self.embed_dims, kernel_size=1) + self.fc_cls = Linear(self.embed_dims, self.cls_out_channels) + self.reg_ffn = FFN( + self.embed_dims, + self.embed_dims, + self.num_reg_fcs, + self.act_cfg, + dropout=0.0, + add_residual=False) + self.fc_reg = Linear(self.embed_dims, 4) + self.query_embedding = nn.Embedding(self.num_query, self.embed_dims) + + def init_weights(self): + """Initialize weights of the transformer head.""" + # The initialization for transformer is important + self.transformer.init_weights() + + def _load_from_state_dict(self, state_dict, prefix, local_metadata, strict, + missing_keys, unexpected_keys, error_msgs): + """load checkpoints.""" + # NOTE here use `AnchorFreeHead` instead of `TransformerHead`, + # since `AnchorFreeHead._load_from_state_dict` should not be + # called here. Invoking the default `Module._load_from_state_dict` + # is enough. + + # Names of some parameters in has been changed. + version = local_metadata.get('version', None) + if (version is None or version < 2) and self.__class__ is DETRHead: + convert_dict = { + '.self_attn.': '.attentions.0.', + '.ffn.': '.ffns.0.', + '.multihead_attn.': '.attentions.1.', + '.decoder.norm.': '.decoder.post_norm.' + } + state_dict_keys = list(state_dict.keys()) + for k in state_dict_keys: + for ori_key, convert_key in convert_dict.items(): + if ori_key in k: + convert_key = k.replace(ori_key, convert_key) + state_dict[convert_key] = state_dict[k] + del state_dict[k] + + super(AnchorFreeHead, + self)._load_from_state_dict(state_dict, prefix, local_metadata, + strict, missing_keys, + unexpected_keys, error_msgs) + + def forward(self, feats, img_metas): + """Forward function. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + img_metas (list[dict]): List of image information. + + Returns: + tuple[list[Tensor], list[Tensor]]: Outputs for all scale levels. + + - all_cls_scores_list (list[Tensor]): Classification scores \ + for each scale level. Each is a 4D-tensor with shape \ + [nb_dec, bs, num_query, cls_out_channels]. Note \ + `cls_out_channels` should includes background. + - all_bbox_preds_list (list[Tensor]): Sigmoid regression \ + outputs for each scale level. Each is a 4D-tensor with \ + normalized coordinate format (cx, cy, w, h) and shape \ + [nb_dec, bs, num_query, 4]. + """ + num_levels = len(feats) + img_metas_list = [img_metas for _ in range(num_levels)] + return multi_apply(self.forward_single, feats, img_metas_list) + + def forward_single(self, x, img_metas): + """"Forward function for a single feature level. + + Args: + x (Tensor): Input feature from backbone's single stage, shape + [bs, c, h, w]. + img_metas (list[dict]): List of image information. + + Returns: + all_cls_scores (Tensor): Outputs from the classification head, + shape [nb_dec, bs, num_query, cls_out_channels]. Note + cls_out_channels should includes background. + all_bbox_preds (Tensor): Sigmoid outputs from the regression + head with normalized coordinate format (cx, cy, w, h). + Shape [nb_dec, bs, num_query, 4]. + """ + # construct binary masks which used for the transformer. + # NOTE following the official DETR repo, non-zero values representing + # ignored positions, while zero values means valid positions. + batch_size = x.size(0) + input_img_h, input_img_w = img_metas[0]['batch_input_shape'] + masks = x.new_ones((batch_size, input_img_h, input_img_w)) + for img_id in range(batch_size): + img_h, img_w, _ = img_metas[img_id]['img_shape'] + masks[img_id, :img_h, :img_w] = 0 + + x = self.input_proj(x) + # interpolate masks to have the same spatial shape with x + masks = F.interpolate( + masks.unsqueeze(1), size=x.shape[-2:]).to(torch.bool).squeeze(1) + # position encoding + pos_embed = self.positional_encoding(masks) # [bs, embed_dim, h, w] + # outs_dec: [nb_dec, bs, num_query, embed_dim] + outs_dec, _ = self.transformer(x, masks, self.query_embedding.weight, + pos_embed) + + all_cls_scores = self.fc_cls(outs_dec) + all_bbox_preds = self.fc_reg(self.activate( + self.reg_ffn(outs_dec))).sigmoid() + return all_cls_scores, all_bbox_preds + + @force_fp32(apply_to=('all_cls_scores_list', 'all_bbox_preds_list')) + def loss(self, + all_cls_scores_list, + all_bbox_preds_list, + gt_bboxes_list, + gt_labels_list, + img_metas, + gt_bboxes_ignore=None): + """"Loss function. + + Only outputs from the last feature level are used for computing + losses by default. + + Args: + all_cls_scores_list (list[Tensor]): Classification outputs + for each feature level. Each is a 4D-tensor with shape + [nb_dec, bs, num_query, cls_out_channels]. + all_bbox_preds_list (list[Tensor]): Sigmoid regression + outputs for each feature level. Each is a 4D-tensor with + normalized coordinate format (cx, cy, w, h) and shape + [nb_dec, bs, num_query, 4]. + gt_bboxes_list (list[Tensor]): Ground truth bboxes for each image + with shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels_list (list[Tensor]): Ground truth class indices for each + image with shape (num_gts, ). + img_metas (list[dict]): List of image meta information. + gt_bboxes_ignore (list[Tensor], optional): Bounding boxes + which can be ignored for each image. Default None. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + # NOTE defaultly only the outputs from the last feature scale is used. + all_cls_scores = all_cls_scores_list[-1] + all_bbox_preds = all_bbox_preds_list[-1] + assert gt_bboxes_ignore is None, \ + 'Only supports for gt_bboxes_ignore setting to None.' + + num_dec_layers = len(all_cls_scores) + all_gt_bboxes_list = [gt_bboxes_list for _ in range(num_dec_layers)] + all_gt_labels_list = [gt_labels_list for _ in range(num_dec_layers)] + all_gt_bboxes_ignore_list = [ + gt_bboxes_ignore for _ in range(num_dec_layers) + ] + img_metas_list = [img_metas for _ in range(num_dec_layers)] + + losses_cls, losses_bbox, losses_iou = multi_apply( + self.loss_single, all_cls_scores, all_bbox_preds, + all_gt_bboxes_list, all_gt_labels_list, img_metas_list, + all_gt_bboxes_ignore_list) + + loss_dict = dict() + # loss from the last decoder layer + loss_dict['loss_cls'] = losses_cls[-1] + loss_dict['loss_bbox'] = losses_bbox[-1] + loss_dict['loss_iou'] = losses_iou[-1] + # loss from other decoder layers + num_dec_layer = 0 + for loss_cls_i, loss_bbox_i, loss_iou_i in zip(losses_cls[:-1], + losses_bbox[:-1], + losses_iou[:-1]): + loss_dict[f'd{num_dec_layer}.loss_cls'] = loss_cls_i + loss_dict[f'd{num_dec_layer}.loss_bbox'] = loss_bbox_i + loss_dict[f'd{num_dec_layer}.loss_iou'] = loss_iou_i + num_dec_layer += 1 + return loss_dict + + def loss_single(self, + cls_scores, + bbox_preds, + gt_bboxes_list, + gt_labels_list, + img_metas, + gt_bboxes_ignore_list=None): + """"Loss function for outputs from a single decoder layer of a single + feature level. + + Args: + cls_scores (Tensor): Box score logits from a single decoder layer + for all images. Shape [bs, num_query, cls_out_channels]. + bbox_preds (Tensor): Sigmoid outputs from a single decoder layer + for all images, with normalized coordinate (cx, cy, w, h) and + shape [bs, num_query, 4]. + gt_bboxes_list (list[Tensor]): Ground truth bboxes for each image + with shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels_list (list[Tensor]): Ground truth class indices for each + image with shape (num_gts, ). + img_metas (list[dict]): List of image meta information. + gt_bboxes_ignore_list (list[Tensor], optional): Bounding + boxes which can be ignored for each image. Default None. + + Returns: + dict[str, Tensor]: A dictionary of loss components for outputs from + a single decoder layer. + """ + num_imgs = cls_scores.size(0) + cls_scores_list = [cls_scores[i] for i in range(num_imgs)] + bbox_preds_list = [bbox_preds[i] for i in range(num_imgs)] + cls_reg_targets = self.get_targets(cls_scores_list, bbox_preds_list, + gt_bboxes_list, gt_labels_list, + img_metas, gt_bboxes_ignore_list) + (labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, + num_total_pos, num_total_neg) = cls_reg_targets + labels = torch.cat(labels_list, 0) + label_weights = torch.cat(label_weights_list, 0) + bbox_targets = torch.cat(bbox_targets_list, 0) + bbox_weights = torch.cat(bbox_weights_list, 0) + + # classification loss + cls_scores = cls_scores.reshape(-1, self.cls_out_channels) + # construct weighted avg_factor to match with the official DETR repo + cls_avg_factor = num_total_pos * 1.0 + \ + num_total_neg * self.bg_cls_weight + if self.sync_cls_avg_factor: + cls_avg_factor = reduce_mean( + cls_scores.new_tensor([cls_avg_factor])) + cls_avg_factor = max(cls_avg_factor, 1) + + loss_cls = self.loss_cls( + cls_scores, labels, label_weights, avg_factor=cls_avg_factor) + + # Compute the average number of gt boxes accross all gpus, for + # normalization purposes + num_total_pos = loss_cls.new_tensor([num_total_pos]) + num_total_pos = torch.clamp(reduce_mean(num_total_pos), min=1).item() + + # construct factors used for rescale bboxes + factors = [] + for img_meta, bbox_pred in zip(img_metas, bbox_preds): + img_h, img_w, _ = img_meta['img_shape'] + factor = bbox_pred.new_tensor([img_w, img_h, img_w, + img_h]).unsqueeze(0).repeat( + bbox_pred.size(0), 1) + factors.append(factor) + factors = torch.cat(factors, 0) + + # DETR regress the relative position of boxes (cxcywh) in the image, + # thus the learning target is normalized by the image size. So here + # we need to re-scale them for calculating IoU loss + bbox_preds = bbox_preds.reshape(-1, 4) + bboxes = bbox_cxcywh_to_xyxy(bbox_preds) * factors + bboxes_gt = bbox_cxcywh_to_xyxy(bbox_targets) * factors + + # regression IoU loss, defaultly GIoU loss + loss_iou = self.loss_iou( + bboxes, bboxes_gt, bbox_weights, avg_factor=num_total_pos) + + # regression L1 loss + loss_bbox = self.loss_bbox( + bbox_preds, bbox_targets, bbox_weights, avg_factor=num_total_pos) + return loss_cls, loss_bbox, loss_iou + + def get_targets(self, + cls_scores_list, + bbox_preds_list, + gt_bboxes_list, + gt_labels_list, + img_metas, + gt_bboxes_ignore_list=None): + """"Compute regression and classification targets for a batch image. + + Outputs from a single decoder layer of a single feature level are used. + + Args: + cls_scores_list (list[Tensor]): Box score logits from a single + decoder layer for each image with shape [num_query, + cls_out_channels]. + bbox_preds_list (list[Tensor]): Sigmoid outputs from a single + decoder layer for each image, with normalized coordinate + (cx, cy, w, h) and shape [num_query, 4]. + gt_bboxes_list (list[Tensor]): Ground truth bboxes for each image + with shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels_list (list[Tensor]): Ground truth class indices for each + image with shape (num_gts, ). + img_metas (list[dict]): List of image meta information. + gt_bboxes_ignore_list (list[Tensor], optional): Bounding + boxes which can be ignored for each image. Default None. + + Returns: + tuple: a tuple containing the following targets. + + - labels_list (list[Tensor]): Labels for all images. + - label_weights_list (list[Tensor]): Label weights for all \ + images. + - bbox_targets_list (list[Tensor]): BBox targets for all \ + images. + - bbox_weights_list (list[Tensor]): BBox weights for all \ + images. + - num_total_pos (int): Number of positive samples in all \ + images. + - num_total_neg (int): Number of negative samples in all \ + images. + """ + assert gt_bboxes_ignore_list is None, \ + 'Only supports for gt_bboxes_ignore setting to None.' + num_imgs = len(cls_scores_list) + gt_bboxes_ignore_list = [ + gt_bboxes_ignore_list for _ in range(num_imgs) + ] + + (labels_list, label_weights_list, bbox_targets_list, + bbox_weights_list, pos_inds_list, neg_inds_list) = multi_apply( + self._get_target_single, cls_scores_list, bbox_preds_list, + gt_bboxes_list, gt_labels_list, img_metas, gt_bboxes_ignore_list) + num_total_pos = sum((inds.numel() for inds in pos_inds_list)) + num_total_neg = sum((inds.numel() for inds in neg_inds_list)) + return (labels_list, label_weights_list, bbox_targets_list, + bbox_weights_list, num_total_pos, num_total_neg) + + def _get_target_single(self, + cls_score, + bbox_pred, + gt_bboxes, + gt_labels, + img_meta, + gt_bboxes_ignore=None): + """"Compute regression and classification targets for one image. + + Outputs from a single decoder layer of a single feature level are used. + + Args: + cls_score (Tensor): Box score logits from a single decoder layer + for one image. Shape [num_query, cls_out_channels]. + bbox_pred (Tensor): Sigmoid outputs from a single decoder layer + for one image, with normalized coordinate (cx, cy, w, h) and + shape [num_query, 4]. + gt_bboxes (Tensor): Ground truth bboxes for one image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (Tensor): Ground truth class indices for one image + with shape (num_gts, ). + img_meta (dict): Meta information for one image. + gt_bboxes_ignore (Tensor, optional): Bounding boxes + which can be ignored. Default None. + + Returns: + tuple[Tensor]: a tuple containing the following for one image. + + - labels (Tensor): Labels of each image. + - label_weights (Tensor]): Label weights of each image. + - bbox_targets (Tensor): BBox targets of each image. + - bbox_weights (Tensor): BBox weights of each image. + - pos_inds (Tensor): Sampled positive indices for each image. + - neg_inds (Tensor): Sampled negative indices for each image. + """ + + num_bboxes = bbox_pred.size(0) + # assigner and sampler + assign_result = self.assigner.assign(bbox_pred, cls_score, gt_bboxes, + gt_labels, img_meta, + gt_bboxes_ignore) + sampling_result = self.sampler.sample(assign_result, bbox_pred, + gt_bboxes) + pos_inds = sampling_result.pos_inds + neg_inds = sampling_result.neg_inds + + # label targets + labels = gt_bboxes.new_full((num_bboxes, ), + self.num_classes, + dtype=torch.long) + labels[pos_inds] = gt_labels[sampling_result.pos_assigned_gt_inds] + label_weights = gt_bboxes.new_ones(num_bboxes) + + # bbox targets + bbox_targets = torch.zeros_like(bbox_pred) + bbox_weights = torch.zeros_like(bbox_pred) + bbox_weights[pos_inds] = 1.0 + img_h, img_w, _ = img_meta['img_shape'] + + # DETR regress the relative position of boxes (cxcywh) in the image. + # Thus the learning target should be normalized by the image size, also + # the box format should be converted from defaultly x1y1x2y2 to cxcywh. + factor = bbox_pred.new_tensor([img_w, img_h, img_w, + img_h]).unsqueeze(0) + pos_gt_bboxes_normalized = sampling_result.pos_gt_bboxes / factor + pos_gt_bboxes_targets = bbox_xyxy_to_cxcywh(pos_gt_bboxes_normalized) + bbox_targets[pos_inds] = pos_gt_bboxes_targets + return (labels, label_weights, bbox_targets, bbox_weights, pos_inds, + neg_inds) + + # over-write because img_metas are needed as inputs for bbox_head. + def forward_train(self, + x, + img_metas, + gt_bboxes, + gt_labels=None, + gt_bboxes_ignore=None, + proposal_cfg=None, + **kwargs): + """Forward function for training mode. + + Args: + x (list[Tensor]): Features from backbone. + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes (Tensor): Ground truth bboxes of the image, + shape (num_gts, 4). + gt_labels (Tensor): Ground truth labels of each box, + shape (num_gts,). + gt_bboxes_ignore (Tensor): Ground truth bboxes to be + ignored, shape (num_ignored_gts, 4). + proposal_cfg (mmcv.Config): Test / postprocessing configuration, + if None, test_cfg would be used. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + assert proposal_cfg is None, '"proposal_cfg" must be None' + outs = self(x, img_metas) + if gt_labels is None: + loss_inputs = outs + (gt_bboxes, img_metas) + else: + loss_inputs = outs + (gt_bboxes, gt_labels, img_metas) + losses = self.loss(*loss_inputs, gt_bboxes_ignore=gt_bboxes_ignore) + return losses + + @force_fp32(apply_to=('all_cls_scores_list', 'all_bbox_preds_list')) + def get_bboxes(self, + all_cls_scores_list, + all_bbox_preds_list, + img_metas, + rescale=False): + """Transform network outputs for a batch into bbox predictions. + + Args: + all_cls_scores_list (list[Tensor]): Classification outputs + for each feature level. Each is a 4D-tensor with shape + [nb_dec, bs, num_query, cls_out_channels]. + all_bbox_preds_list (list[Tensor]): Sigmoid regression + outputs for each feature level. Each is a 4D-tensor with + normalized coordinate format (cx, cy, w, h) and shape + [nb_dec, bs, num_query, 4]. + img_metas (list[dict]): Meta information of each image. + rescale (bool, optional): If True, return boxes in original + image space. Default False. + + Returns: + list[list[Tensor, Tensor]]: Each item in result_list is 2-tuple. \ + The first item is an (n, 5) tensor, where the first 4 columns \ + are bounding box positions (tl_x, tl_y, br_x, br_y) and the \ + 5-th column is a score between 0 and 1. The second item is a \ + (n,) tensor where each item is the predicted class label of \ + the corresponding box. + """ + # NOTE defaultly only using outputs from the last feature level, + # and only the outputs from the last decoder layer is used. + cls_scores = all_cls_scores_list[-1][-1] + bbox_preds = all_bbox_preds_list[-1][-1] + + result_list = [] + for img_id in range(len(img_metas)): + cls_score = cls_scores[img_id] + bbox_pred = bbox_preds[img_id] + img_shape = img_metas[img_id]['img_shape'] + scale_factor = img_metas[img_id]['scale_factor'] + proposals = self._get_bboxes_single(cls_score, bbox_pred, + img_shape, scale_factor, + rescale) + result_list.append(proposals) + + return result_list + + def _get_bboxes_single(self, + cls_score, + bbox_pred, + img_shape, + scale_factor, + rescale=False): + """Transform outputs from the last decoder layer into bbox predictions + for each image. + + Args: + cls_score (Tensor): Box score logits from the last decoder layer + for each image. Shape [num_query, cls_out_channels]. + bbox_pred (Tensor): Sigmoid outputs from the last decoder layer + for each image, with coordinate format (cx, cy, w, h) and + shape [num_query, 4]. + img_shape (tuple[int]): Shape of input image, (height, width, 3). + scale_factor (ndarray, optional): Scale factor of the image arange + as (w_scale, h_scale, w_scale, h_scale). + rescale (bool, optional): If True, return boxes in original image + space. Default False. + + Returns: + tuple[Tensor]: Results of detected bboxes and labels. + + - det_bboxes: Predicted bboxes with shape [num_query, 5], \ + where the first 4 columns are bounding box positions \ + (tl_x, tl_y, br_x, br_y) and the 5-th column are scores \ + between 0 and 1. + - det_labels: Predicted labels of the corresponding box with \ + shape [num_query]. + """ + assert len(cls_score) == len(bbox_pred) + max_per_img = self.test_cfg.get('max_per_img', self.num_query) + # exclude background + if self.loss_cls.use_sigmoid: + cls_score = cls_score.sigmoid() + scores, indexes = cls_score.view(-1).topk(max_per_img) + det_labels = indexes % self.num_classes + bbox_index = indexes // self.num_classes + bbox_pred = bbox_pred[bbox_index] + else: + scores, det_labels = F.softmax(cls_score, dim=-1)[..., :-1].max(-1) + scores, bbox_index = scores.topk(max_per_img) + bbox_pred = bbox_pred[bbox_index] + det_labels = det_labels[bbox_index] + + det_bboxes = bbox_cxcywh_to_xyxy(bbox_pred) + det_bboxes[:, 0::2] = det_bboxes[:, 0::2] * img_shape[1] + det_bboxes[:, 1::2] = det_bboxes[:, 1::2] * img_shape[0] + det_bboxes[:, 0::2].clamp_(min=0, max=img_shape[1]) + det_bboxes[:, 1::2].clamp_(min=0, max=img_shape[0]) + if rescale: + det_bboxes /= det_bboxes.new_tensor(scale_factor) + det_bboxes = torch.cat((det_bboxes, scores.unsqueeze(1)), -1) + + return det_bboxes, det_labels + + def simple_test_bboxes(self, feats, img_metas, rescale=False): + """Test det bboxes without test-time augmentation. + + Args: + feats (tuple[torch.Tensor]): Multi-level features from the + upstream network, each is a 4D-tensor. + img_metas (list[dict]): List of image information. + rescale (bool, optional): Whether to rescale the results. + Defaults to False. + + Returns: + list[tuple[Tensor, Tensor]]: Each item in result_list is 2-tuple. + The first item is ``bboxes`` with shape (n, 5), + where 5 represent (tl_x, tl_y, br_x, br_y, score). + The shape of the second tensor in the tuple is ``labels`` + with shape (n,) + """ + # forward of this head requires img_metas + outs = self.forward(feats, img_metas) + results_list = self.get_bboxes(*outs, img_metas, rescale=rescale) + return results_list + + def forward_onnx(self, feats, img_metas): + """Forward function for exporting to ONNX. + + Over-write `forward` because: `masks` is directly created with + zero (valid position tag) and has the same spatial size as `x`. + Thus the construction of `masks` is different from that in `forward`. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + img_metas (list[dict]): List of image information. + + Returns: + tuple[list[Tensor], list[Tensor]]: Outputs for all scale levels. + + - all_cls_scores_list (list[Tensor]): Classification scores \ + for each scale level. Each is a 4D-tensor with shape \ + [nb_dec, bs, num_query, cls_out_channels]. Note \ + `cls_out_channels` should includes background. + - all_bbox_preds_list (list[Tensor]): Sigmoid regression \ + outputs for each scale level. Each is a 4D-tensor with \ + normalized coordinate format (cx, cy, w, h) and shape \ + [nb_dec, bs, num_query, 4]. + """ + num_levels = len(feats) + img_metas_list = [img_metas for _ in range(num_levels)] + return multi_apply(self.forward_single_onnx, feats, img_metas_list) + + def forward_single_onnx(self, x, img_metas): + """"Forward function for a single feature level with ONNX exportation. + + Args: + x (Tensor): Input feature from backbone's single stage, shape + [bs, c, h, w]. + img_metas (list[dict]): List of image information. + + Returns: + all_cls_scores (Tensor): Outputs from the classification head, + shape [nb_dec, bs, num_query, cls_out_channels]. Note + cls_out_channels should includes background. + all_bbox_preds (Tensor): Sigmoid outputs from the regression + head with normalized coordinate format (cx, cy, w, h). + Shape [nb_dec, bs, num_query, 4]. + """ + # Note `img_shape` is not dynamically traceable to ONNX, + # since the related augmentation was done with numpy under + # CPU. Thus `masks` is directly created with zeros (valid tag) + # and the same spatial shape as `x`. + # The difference between torch and exported ONNX model may be + # ignored, since the same performance is achieved (e.g. + # 40.1 vs 40.1 for DETR) + batch_size = x.size(0) + h, w = x.size()[-2:] + masks = x.new_zeros((batch_size, h, w)) # [B,h,w] + + x = self.input_proj(x) + # interpolate masks to have the same spatial shape with x + masks = F.interpolate( + masks.unsqueeze(1), size=x.shape[-2:]).to(torch.bool).squeeze(1) + pos_embed = self.positional_encoding(masks) + outs_dec, _ = self.transformer(x, masks, self.query_embedding.weight, + pos_embed) + + all_cls_scores = self.fc_cls(outs_dec) + all_bbox_preds = self.fc_reg(self.activate( + self.reg_ffn(outs_dec))).sigmoid() + return all_cls_scores, all_bbox_preds + + def onnx_export(self, all_cls_scores_list, all_bbox_preds_list, img_metas): + """Transform network outputs into bbox predictions, with ONNX + exportation. + + Args: + all_cls_scores_list (list[Tensor]): Classification outputs + for each feature level. Each is a 4D-tensor with shape + [nb_dec, bs, num_query, cls_out_channels]. + all_bbox_preds_list (list[Tensor]): Sigmoid regression + outputs for each feature level. Each is a 4D-tensor with + normalized coordinate format (cx, cy, w, h) and shape + [nb_dec, bs, num_query, 4]. + img_metas (list[dict]): Meta information of each image. + + Returns: + tuple[Tensor, Tensor]: dets of shape [N, num_det, 5] + and class labels of shape [N, num_det]. + """ + assert len(img_metas) == 1, \ + 'Only support one input image while in exporting to ONNX' + + cls_scores = all_cls_scores_list[-1][-1] + bbox_preds = all_bbox_preds_list[-1][-1] + + # Note `img_shape` is not dynamically traceable to ONNX, + # here `img_shape_for_onnx` (padded shape of image tensor) + # is used. + img_shape = img_metas[0]['img_shape_for_onnx'] + max_per_img = self.test_cfg.get('max_per_img', self.num_query) + batch_size = cls_scores.size(0) + # `batch_index_offset` is used for the gather of concatenated tensor + batch_index_offset = torch.arange(batch_size).to( + cls_scores.device) * max_per_img + batch_index_offset = batch_index_offset.unsqueeze(1).expand( + batch_size, max_per_img) + + # supports dynamical batch inference + if self.loss_cls.use_sigmoid: + cls_scores = cls_scores.sigmoid() + scores, indexes = cls_scores.view(batch_size, -1).topk( + max_per_img, dim=1) + det_labels = indexes % self.num_classes + bbox_index = indexes // self.num_classes + bbox_index = (bbox_index + batch_index_offset).view(-1) + bbox_preds = bbox_preds.view(-1, 4)[bbox_index] + bbox_preds = bbox_preds.view(batch_size, -1, 4) + else: + scores, det_labels = F.softmax( + cls_scores, dim=-1)[..., :-1].max(-1) + scores, bbox_index = scores.topk(max_per_img, dim=1) + bbox_index = (bbox_index + batch_index_offset).view(-1) + bbox_preds = bbox_preds.view(-1, 4)[bbox_index] + det_labels = det_labels.view(-1)[bbox_index] + bbox_preds = bbox_preds.view(batch_size, -1, 4) + det_labels = det_labels.view(batch_size, -1) + + det_bboxes = bbox_cxcywh_to_xyxy(bbox_preds) + # use `img_shape_tensor` for dynamically exporting to ONNX + img_shape_tensor = img_shape.flip(0).repeat(2) # [w,h,w,h] + img_shape_tensor = img_shape_tensor.unsqueeze(0).unsqueeze(0).expand( + batch_size, det_bboxes.size(1), 4) + det_bboxes = det_bboxes * img_shape_tensor + # dynamically clip bboxes + x1, y1, x2, y2 = det_bboxes.split((1, 1, 1, 1), dim=-1) + from mmdet.core.export import dynamic_clip_for_onnx + x1, y1, x2, y2 = dynamic_clip_for_onnx(x1, y1, x2, y2, img_shape) + det_bboxes = torch.cat([x1, y1, x2, y2], dim=-1) + det_bboxes = torch.cat((det_bboxes, scores.unsqueeze(-1)), -1) + + return det_bboxes, det_labels diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/embedding_rpn_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/embedding_rpn_head.py new file mode 100644 index 0000000000000000000000000000000000000000..88d83fe0ac4fa2ffa18ef42bc27de78297004afb --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/embedding_rpn_head.py @@ -0,0 +1,115 @@ +import torch +import torch.nn as nn +from mmcv.runner import BaseModule + +from mmdet.models.builder import HEADS +from ...core import bbox_cxcywh_to_xyxy + + +@HEADS.register_module() +class EmbeddingRPNHead(BaseModule): + """RPNHead in the `Sparse R-CNN `_ . + + Unlike traditional RPNHead, this module does not need FPN input, but just + decode `init_proposal_bboxes` and expand the first dimension of + `init_proposal_bboxes` and `init_proposal_features` to the batch_size. + + Args: + num_proposals (int): Number of init_proposals. Default 100. + proposal_feature_channel (int): Channel number of + init_proposal_feature. Defaults to 256. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + num_proposals=100, + proposal_feature_channel=256, + init_cfg=None, + **kwargs): + assert init_cfg is None, 'To prevent abnormal initialization ' \ + 'behavior, init_cfg is not allowed to be set' + super(EmbeddingRPNHead, self).__init__(init_cfg) + self.num_proposals = num_proposals + self.proposal_feature_channel = proposal_feature_channel + self._init_layers() + + def _init_layers(self): + """Initialize a sparse set of proposal boxes and proposal features.""" + self.init_proposal_bboxes = nn.Embedding(self.num_proposals, 4) + self.init_proposal_features = nn.Embedding( + self.num_proposals, self.proposal_feature_channel) + + def init_weights(self): + """Initialize the init_proposal_bboxes as normalized. + + [c_x, c_y, w, h], and we initialize it to the size of the entire + image. + """ + super(EmbeddingRPNHead, self).init_weights() + nn.init.constant_(self.init_proposal_bboxes.weight[:, :2], 0.5) + nn.init.constant_(self.init_proposal_bboxes.weight[:, 2:], 1) + + def _decode_init_proposals(self, imgs, img_metas): + """Decode init_proposal_bboxes according to the size of images and + expand dimension of init_proposal_features to batch_size. + + Args: + imgs (list[Tensor]): List of FPN features. + img_metas (list[dict]): List of meta-information of + images. Need the img_shape to decode the init_proposals. + + Returns: + Tuple(Tensor): + + - proposals (Tensor): Decoded proposal bboxes, + has shape (batch_size, num_proposals, 4). + - init_proposal_features (Tensor): Expanded proposal + features, has shape + (batch_size, num_proposals, proposal_feature_channel). + - imgs_whwh (Tensor): Tensor with shape + (batch_size, 4), the dimension means + [img_width, img_height, img_width, img_height]. + """ + proposals = self.init_proposal_bboxes.weight.clone() + proposals = bbox_cxcywh_to_xyxy(proposals) + num_imgs = len(imgs[0]) + imgs_whwh = [] + for meta in img_metas: + h, w, _ = meta['img_shape'] + imgs_whwh.append(imgs[0].new_tensor([[w, h, w, h]])) + imgs_whwh = torch.cat(imgs_whwh, dim=0) + imgs_whwh = imgs_whwh[:, None, :] + + # imgs_whwh has shape (batch_size, 1, 4) + # The shape of proposals change from (num_proposals, 4) + # to (batch_size ,num_proposals, 4) + proposals = proposals * imgs_whwh + + init_proposal_features = self.init_proposal_features.weight.clone() + init_proposal_features = init_proposal_features[None].expand( + num_imgs, *init_proposal_features.size()) + return proposals, init_proposal_features, imgs_whwh + + def forward_dummy(self, img, img_metas): + """Dummy forward function. + + Used in flops calculation. + """ + return self._decode_init_proposals(img, img_metas) + + def forward_train(self, img, img_metas): + """Forward function in training stage.""" + return self._decode_init_proposals(img, img_metas) + + def simple_test_rpn(self, img, img_metas): + """Forward function in testing stage.""" + return self._decode_init_proposals(img, img_metas) + + def simple_test(self, img, img_metas): + """Forward function in testing stage.""" + raise NotImplementedError + + def aug_test_rpn(self, feats, img_metas): + raise NotImplementedError( + 'EmbeddingRPNHead does not support test-time augmentation') diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/fcos_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/fcos_head.py new file mode 100644 index 0000000000000000000000000000000000000000..323d154b9157e1ac5a8d3915774eb31c848ca973 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/fcos_head.py @@ -0,0 +1,648 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import Scale +from mmcv.runner import force_fp32 + +from mmdet.core import distance2bbox, multi_apply, multiclass_nms, reduce_mean +from ..builder import HEADS, build_loss +from .anchor_free_head import AnchorFreeHead + +INF = 1e8 + + +@HEADS.register_module() +class FCOSHead(AnchorFreeHead): + """Anchor-free head used in `FCOS `_. + + The FCOS head does not use anchor boxes. Instead bounding boxes are + predicted at each pixel and a centerness measure is used to suppress + low-quality predictions. + Here norm_on_bbox, centerness_on_reg, dcn_on_last_conv are training + tricks used in official repo, which will bring remarkable mAP gains + of up to 4.9. Please see https://github.com/tianzhi0549/FCOS for + more detail. + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + strides (list[int] | list[tuple[int, int]]): Strides of points + in multiple feature levels. Default: (4, 8, 16, 32, 64). + regress_ranges (tuple[tuple[int, int]]): Regress range of multiple + level points. + center_sampling (bool): If true, use center sampling. Default: False. + center_sample_radius (float): Radius of center sampling. Default: 1.5. + norm_on_bbox (bool): If true, normalize the regression targets + with FPN strides. Default: False. + centerness_on_reg (bool): If true, position centerness on the + regress branch. Please refer to https://github.com/tianzhi0549/FCOS/issues/89#issuecomment-516877042. + Default: False. + conv_bias (bool | str): If specified as `auto`, it will be decided by the + norm_cfg. Bias of conv will be set as True if `norm_cfg` is None, otherwise + False. Default: "auto". + loss_cls (dict): Config of classification loss. + loss_bbox (dict): Config of localization loss. + loss_centerness (dict): Config of centerness loss. + norm_cfg (dict): dictionary to construct and config norm layer. + Default: norm_cfg=dict(type='GN', num_groups=32, requires_grad=True). + init_cfg (dict or list[dict], optional): Initialization config dict. + + Example: + >>> self = FCOSHead(11, 7) + >>> feats = [torch.rand(1, 7, s, s) for s in [4, 8, 16, 32, 64]] + >>> cls_score, bbox_pred, centerness = self.forward(feats) + >>> assert len(cls_score) == len(self.scales) + """ # noqa: E501 + + def __init__(self, + num_classes, + in_channels, + regress_ranges=((-1, 64), (64, 128), (128, 256), (256, 512), + (512, INF)), + center_sampling=False, + center_sample_radius=1.5, + norm_on_bbox=False, + centerness_on_reg=False, + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='IoULoss', loss_weight=1.0), + loss_centerness=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0), + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True), + init_cfg=dict( + type='Normal', + layer='Conv2d', + std=0.01, + override=dict( + type='Normal', + name='conv_cls', + std=0.01, + bias_prob=0.01)), + **kwargs): + self.regress_ranges = regress_ranges + self.center_sampling = center_sampling + self.center_sample_radius = center_sample_radius + self.norm_on_bbox = norm_on_bbox + self.centerness_on_reg = centerness_on_reg + super().__init__( + num_classes, + in_channels, + loss_cls=loss_cls, + loss_bbox=loss_bbox, + norm_cfg=norm_cfg, + init_cfg=init_cfg, + **kwargs) + self.loss_centerness = build_loss(loss_centerness) + + def _init_layers(self): + """Initialize layers of the head.""" + super()._init_layers() + self.conv_centerness = nn.Conv2d(self.feat_channels, 1, 3, padding=1) + self.scales = nn.ModuleList([Scale(1.0) for _ in self.strides]) + + def forward(self, feats): + """Forward features from the upstream network. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + + Returns: + tuple: + cls_scores (list[Tensor]): Box scores for each scale level, \ + each is a 4D-tensor, the channel number is \ + num_points * num_classes. + bbox_preds (list[Tensor]): Box energies / deltas for each \ + scale level, each is a 4D-tensor, the channel number is \ + num_points * 4. + centernesses (list[Tensor]): centerness for each scale level, \ + each is a 4D-tensor, the channel number is num_points * 1. + """ + return multi_apply(self.forward_single, feats, self.scales, + self.strides) + + def forward_single(self, x, scale, stride): + """Forward features of a single scale level. + + Args: + x (Tensor): FPN feature maps of the specified stride. + scale (:obj: `mmcv.cnn.Scale`): Learnable scale module to resize + the bbox prediction. + stride (int): The corresponding stride for feature maps, only + used to normalize the bbox prediction when self.norm_on_bbox + is True. + + Returns: + tuple: scores for each class, bbox predictions and centerness \ + predictions of input feature maps. + """ + cls_score, bbox_pred, cls_feat, reg_feat = super().forward_single(x) + if self.centerness_on_reg: + centerness = self.conv_centerness(reg_feat) + else: + centerness = self.conv_centerness(cls_feat) + # scale the bbox_pred of different level + # float to avoid overflow when enabling FP16 + bbox_pred = scale(bbox_pred).float() + if self.norm_on_bbox: + bbox_pred = F.relu(bbox_pred) + if not self.training: + bbox_pred *= stride + else: + bbox_pred = bbox_pred.exp() + return cls_score, bbox_pred, centerness + + @force_fp32(apply_to=('cls_scores', 'bbox_preds', 'centernesses')) + def loss(self, + cls_scores, + bbox_preds, + centernesses, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute loss of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level, + each is a 4D-tensor, the channel number is + num_points * num_classes. + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level, each is a 4D-tensor, the channel number is + num_points * 4. + centernesses (list[Tensor]): centerness for each scale level, each + is a 4D-tensor, the channel number is num_points * 1. + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + assert len(cls_scores) == len(bbox_preds) == len(centernesses) + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + all_level_points = self.get_points(featmap_sizes, bbox_preds[0].dtype, + bbox_preds[0].device) + labels, bbox_targets = self.get_targets(all_level_points, gt_bboxes, + gt_labels) + + num_imgs = cls_scores[0].size(0) + # flatten cls_scores, bbox_preds and centerness + flatten_cls_scores = [ + cls_score.permute(0, 2, 3, 1).reshape(-1, self.cls_out_channels) + for cls_score in cls_scores + ] + flatten_bbox_preds = [ + bbox_pred.permute(0, 2, 3, 1).reshape(-1, 4) + for bbox_pred in bbox_preds + ] + flatten_centerness = [ + centerness.permute(0, 2, 3, 1).reshape(-1) + for centerness in centernesses + ] + flatten_cls_scores = torch.cat(flatten_cls_scores) + flatten_bbox_preds = torch.cat(flatten_bbox_preds) + flatten_centerness = torch.cat(flatten_centerness) + flatten_labels = torch.cat(labels) + flatten_bbox_targets = torch.cat(bbox_targets) + # repeat points to align with bbox_preds + flatten_points = torch.cat( + [points.repeat(num_imgs, 1) for points in all_level_points]) + + # FG cat_id: [0, num_classes -1], BG cat_id: num_classes + bg_class_ind = self.num_classes + pos_inds = ((flatten_labels >= 0) + & (flatten_labels < bg_class_ind)).nonzero().reshape(-1) + num_pos = torch.tensor( + len(pos_inds), dtype=torch.float, device=bbox_preds[0].device) + num_pos = max(reduce_mean(num_pos), 1.0) + loss_cls = self.loss_cls( + flatten_cls_scores, flatten_labels, avg_factor=num_pos) + + pos_bbox_preds = flatten_bbox_preds[pos_inds] + pos_centerness = flatten_centerness[pos_inds] + pos_bbox_targets = flatten_bbox_targets[pos_inds] + pos_centerness_targets = self.centerness_target(pos_bbox_targets) + # centerness weighted iou loss + centerness_denorm = max( + reduce_mean(pos_centerness_targets.sum().detach()), 1e-6) + + if len(pos_inds) > 0: + pos_points = flatten_points[pos_inds] + pos_decoded_bbox_preds = distance2bbox(pos_points, pos_bbox_preds) + pos_decoded_target_preds = distance2bbox(pos_points, + pos_bbox_targets) + loss_bbox = self.loss_bbox( + pos_decoded_bbox_preds, + pos_decoded_target_preds, + weight=pos_centerness_targets, + avg_factor=centerness_denorm) + loss_centerness = self.loss_centerness( + pos_centerness, pos_centerness_targets, avg_factor=num_pos) + else: + loss_bbox = pos_bbox_preds.sum() + loss_centerness = pos_centerness.sum() + + return dict( + loss_cls=loss_cls, + loss_bbox=loss_bbox, + loss_centerness=loss_centerness) + + @force_fp32(apply_to=('cls_scores', 'bbox_preds', 'centernesses')) + def get_bboxes(self, + cls_scores, + bbox_preds, + centernesses, + img_metas, + cfg=None, + rescale=False, + with_nms=True): + """Transform network output for a batch into bbox predictions. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + with shape (N, num_points * num_classes, H, W). + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_points * 4, H, W). + centernesses (list[Tensor]): Centerness for each scale level with + shape (N, num_points * 1, H, W). + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + cfg (mmcv.Config | None): Test / postprocessing configuration, + if None, test_cfg would be used. Default: None. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + + Returns: + list[tuple[Tensor, Tensor]]: Each item in result_list is 2-tuple. + The first item is an (n, 5) tensor, where 5 represent + (tl_x, tl_y, br_x, br_y, score) and the score between 0 and 1. + The shape of the second tensor in the tuple is (n,), and + each element represents the class label of the corresponding + box. + """ + assert len(cls_scores) == len(bbox_preds) + num_levels = len(cls_scores) + + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + mlvl_points = self.get_points(featmap_sizes, bbox_preds[0].dtype, + bbox_preds[0].device) + + cls_score_list = [cls_scores[i].detach() for i in range(num_levels)] + bbox_pred_list = [bbox_preds[i].detach() for i in range(num_levels)] + centerness_pred_list = [ + centernesses[i].detach() for i in range(num_levels) + ] + if torch.onnx.is_in_onnx_export(): + assert len( + img_metas + ) == 1, 'Only support one input image while in exporting to ONNX' + img_shapes = img_metas[0]['img_shape_for_onnx'] + else: + img_shapes = [ + img_metas[i]['img_shape'] + for i in range(cls_scores[0].shape[0]) + ] + scale_factors = [ + img_metas[i]['scale_factor'] for i in range(cls_scores[0].shape[0]) + ] + result_list = self._get_bboxes(cls_score_list, bbox_pred_list, + centerness_pred_list, mlvl_points, + img_shapes, scale_factors, cfg, rescale, + with_nms) + return result_list + + def _get_bboxes(self, + cls_scores, + bbox_preds, + centernesses, + mlvl_points, + img_shapes, + scale_factors, + cfg, + rescale=False, + with_nms=True): + """Transform outputs for a single batch item into bbox predictions. + + Args: + cls_scores (list[Tensor]): Box scores for a single scale level + with shape (N, num_points * num_classes, H, W). + bbox_preds (list[Tensor]): Box energies / deltas for a single scale + level with shape (N, num_points * 4, H, W). + centernesses (list[Tensor]): Centerness for a single scale level + with shape (N, num_points, H, W). + mlvl_points (list[Tensor]): Box reference for a single scale level + with shape (num_total_points, 4). + img_shapes (list[tuple[int]]): Shape of the input image, + list[(height, width, 3)]. + scale_factors (list[ndarray]): Scale factor of the image arrange as + (w_scale, h_scale, w_scale, h_scale). + cfg (mmcv.Config | None): Test / postprocessing configuration, + if None, test_cfg would be used. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + + Returns: + tuple(Tensor): + det_bboxes (Tensor): BBox predictions in shape (n, 5), where + the first 4 columns are bounding box positions + (tl_x, tl_y, br_x, br_y) and the 5-th column is a score + between 0 and 1. + det_labels (Tensor): A (n,) tensor where each item is the + predicted class label of the corresponding box. + """ + cfg = self.test_cfg if cfg is None else cfg + assert len(cls_scores) == len(bbox_preds) == len(mlvl_points) + device = cls_scores[0].device + batch_size = cls_scores[0].shape[0] + # convert to tensor to keep tracing + nms_pre_tensor = torch.tensor( + cfg.get('nms_pre', -1), device=device, dtype=torch.long) + mlvl_bboxes = [] + mlvl_scores = [] + mlvl_centerness = [] + for cls_score, bbox_pred, centerness, points in zip( + cls_scores, bbox_preds, centernesses, mlvl_points): + assert cls_score.size()[-2:] == bbox_pred.size()[-2:] + scores = cls_score.permute(0, 2, 3, 1).reshape( + batch_size, -1, self.cls_out_channels).sigmoid() + centerness = centerness.permute(0, 2, 3, + 1).reshape(batch_size, + -1).sigmoid() + + bbox_pred = bbox_pred.permute(0, 2, 3, + 1).reshape(batch_size, -1, 4) + points = points.expand(batch_size, -1, 2) + # Get top-k prediction + from mmdet.core.export import get_k_for_topk + nms_pre = get_k_for_topk(nms_pre_tensor, bbox_pred.shape[1]) + if nms_pre > 0: + max_scores, _ = (scores * centerness[..., None]).max(-1) + _, topk_inds = max_scores.topk(nms_pre) + batch_inds = torch.arange(batch_size).view( + -1, 1).expand_as(topk_inds).long() + # Avoid onnx2tensorrt issue in https://github.com/NVIDIA/TensorRT/issues/1134 # noqa: E501 + if torch.onnx.is_in_onnx_export(): + transformed_inds = bbox_pred.shape[ + 1] * batch_inds + topk_inds + points = points.reshape(-1, + 2)[transformed_inds, :].reshape( + batch_size, -1, 2) + bbox_pred = bbox_pred.reshape( + -1, 4)[transformed_inds, :].reshape(batch_size, -1, 4) + scores = scores.reshape( + -1, self.num_classes)[transformed_inds, :].reshape( + batch_size, -1, self.num_classes) + centerness = centerness.reshape( + -1, 1)[transformed_inds].reshape(batch_size, -1) + else: + points = points[batch_inds, topk_inds, :] + bbox_pred = bbox_pred[batch_inds, topk_inds, :] + scores = scores[batch_inds, topk_inds, :] + centerness = centerness[batch_inds, topk_inds] + + bboxes = distance2bbox(points, bbox_pred, max_shape=img_shapes) + mlvl_bboxes.append(bboxes) + mlvl_scores.append(scores) + mlvl_centerness.append(centerness) + + batch_mlvl_bboxes = torch.cat(mlvl_bboxes, dim=1) + if rescale: + batch_mlvl_bboxes /= batch_mlvl_bboxes.new_tensor( + scale_factors).unsqueeze(1) + batch_mlvl_scores = torch.cat(mlvl_scores, dim=1) + batch_mlvl_centerness = torch.cat(mlvl_centerness, dim=1) + + # Replace multiclass_nms with ONNX::NonMaxSuppression in deployment + if torch.onnx.is_in_onnx_export() and with_nms: + from mmdet.core.export import add_dummy_nms_for_onnx + batch_mlvl_scores = batch_mlvl_scores * ( + batch_mlvl_centerness.unsqueeze(2)) + max_output_boxes_per_class = cfg.nms.get( + 'max_output_boxes_per_class', 200) + iou_threshold = cfg.nms.get('iou_threshold', 0.5) + score_threshold = cfg.score_thr + nms_pre = cfg.get('deploy_nms_pre', -1) + return add_dummy_nms_for_onnx(batch_mlvl_bboxes, batch_mlvl_scores, + max_output_boxes_per_class, + iou_threshold, score_threshold, + nms_pre, cfg.max_per_img) + # remind that we set FG labels to [0, num_class-1] since mmdet v2.0 + # BG cat_id: num_class + padding = batch_mlvl_scores.new_zeros(batch_size, + batch_mlvl_scores.shape[1], 1) + batch_mlvl_scores = torch.cat([batch_mlvl_scores, padding], dim=-1) + + if with_nms: + det_results = [] + for (mlvl_bboxes, mlvl_scores, + mlvl_centerness) in zip(batch_mlvl_bboxes, batch_mlvl_scores, + batch_mlvl_centerness): + det_bbox, det_label = multiclass_nms( + mlvl_bboxes, + mlvl_scores, + cfg.score_thr, + cfg.nms, + cfg.max_per_img, + score_factors=mlvl_centerness) + det_results.append(tuple([det_bbox, det_label])) + else: + det_results = [ + tuple(mlvl_bs) + for mlvl_bs in zip(batch_mlvl_bboxes, batch_mlvl_scores, + batch_mlvl_centerness) + ] + return det_results + + def _get_points_single(self, + featmap_size, + stride, + dtype, + device, + flatten=False): + """Get points according to feature map sizes.""" + y, x = super()._get_points_single(featmap_size, stride, dtype, device) + points = torch.stack((x.reshape(-1) * stride, y.reshape(-1) * stride), + dim=-1) + stride // 2 + return points + + def get_targets(self, points, gt_bboxes_list, gt_labels_list): + """Compute regression, classification and centerness targets for points + in multiple images. + + Args: + points (list[Tensor]): Points of each fpn level, each has shape + (num_points, 2). + gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image, + each has shape (num_gt, 4). + gt_labels_list (list[Tensor]): Ground truth labels of each box, + each has shape (num_gt,). + + Returns: + tuple: + concat_lvl_labels (list[Tensor]): Labels of each level. \ + concat_lvl_bbox_targets (list[Tensor]): BBox targets of each \ + level. + """ + assert len(points) == len(self.regress_ranges) + num_levels = len(points) + # expand regress ranges to align with points + expanded_regress_ranges = [ + points[i].new_tensor(self.regress_ranges[i])[None].expand_as( + points[i]) for i in range(num_levels) + ] + # concat all levels points and regress ranges + concat_regress_ranges = torch.cat(expanded_regress_ranges, dim=0) + concat_points = torch.cat(points, dim=0) + + # the number of points per img, per lvl + num_points = [center.size(0) for center in points] + + # get labels and bbox_targets of each image + labels_list, bbox_targets_list = multi_apply( + self._get_target_single, + gt_bboxes_list, + gt_labels_list, + points=concat_points, + regress_ranges=concat_regress_ranges, + num_points_per_lvl=num_points) + + # split to per img, per level + labels_list = [labels.split(num_points, 0) for labels in labels_list] + bbox_targets_list = [ + bbox_targets.split(num_points, 0) + for bbox_targets in bbox_targets_list + ] + + # concat per level image + concat_lvl_labels = [] + concat_lvl_bbox_targets = [] + for i in range(num_levels): + concat_lvl_labels.append( + torch.cat([labels[i] for labels in labels_list])) + bbox_targets = torch.cat( + [bbox_targets[i] for bbox_targets in bbox_targets_list]) + if self.norm_on_bbox: + bbox_targets = bbox_targets / self.strides[i] + concat_lvl_bbox_targets.append(bbox_targets) + return concat_lvl_labels, concat_lvl_bbox_targets + + def _get_target_single(self, gt_bboxes, gt_labels, points, regress_ranges, + num_points_per_lvl): + """Compute regression and classification targets for a single image.""" + num_points = points.size(0) + num_gts = gt_labels.size(0) + if num_gts == 0: + return gt_labels.new_full((num_points,), self.num_classes), \ + gt_bboxes.new_zeros((num_points, 4)) + + areas = (gt_bboxes[:, 2] - gt_bboxes[:, 0]) * ( + gt_bboxes[:, 3] - gt_bboxes[:, 1]) + # TODO: figure out why these two are different + # areas = areas[None].expand(num_points, num_gts) + areas = areas[None].repeat(num_points, 1) + regress_ranges = regress_ranges[:, None, :].expand( + num_points, num_gts, 2) + gt_bboxes = gt_bboxes[None].expand(num_points, num_gts, 4) + xs, ys = points[:, 0], points[:, 1] + xs = xs[:, None].expand(num_points, num_gts) + ys = ys[:, None].expand(num_points, num_gts) + + left = xs - gt_bboxes[..., 0] + right = gt_bboxes[..., 2] - xs + top = ys - gt_bboxes[..., 1] + bottom = gt_bboxes[..., 3] - ys + bbox_targets = torch.stack((left, top, right, bottom), -1) + + if self.center_sampling: + # condition1: inside a `center bbox` + radius = self.center_sample_radius + center_xs = (gt_bboxes[..., 0] + gt_bboxes[..., 2]) / 2 + center_ys = (gt_bboxes[..., 1] + gt_bboxes[..., 3]) / 2 + center_gts = torch.zeros_like(gt_bboxes) + stride = center_xs.new_zeros(center_xs.shape) + + # project the points on current lvl back to the `original` sizes + lvl_begin = 0 + for lvl_idx, num_points_lvl in enumerate(num_points_per_lvl): + lvl_end = lvl_begin + num_points_lvl + stride[lvl_begin:lvl_end] = self.strides[lvl_idx] * radius + lvl_begin = lvl_end + + x_mins = center_xs - stride + y_mins = center_ys - stride + x_maxs = center_xs + stride + y_maxs = center_ys + stride + center_gts[..., 0] = torch.where(x_mins > gt_bboxes[..., 0], + x_mins, gt_bboxes[..., 0]) + center_gts[..., 1] = torch.where(y_mins > gt_bboxes[..., 1], + y_mins, gt_bboxes[..., 1]) + center_gts[..., 2] = torch.where(x_maxs > gt_bboxes[..., 2], + gt_bboxes[..., 2], x_maxs) + center_gts[..., 3] = torch.where(y_maxs > gt_bboxes[..., 3], + gt_bboxes[..., 3], y_maxs) + + cb_dist_left = xs - center_gts[..., 0] + cb_dist_right = center_gts[..., 2] - xs + cb_dist_top = ys - center_gts[..., 1] + cb_dist_bottom = center_gts[..., 3] - ys + center_bbox = torch.stack( + (cb_dist_left, cb_dist_top, cb_dist_right, cb_dist_bottom), -1) + inside_gt_bbox_mask = center_bbox.min(-1)[0] > 0 + else: + # condition1: inside a gt bbox + inside_gt_bbox_mask = bbox_targets.min(-1)[0] > 0 + + # condition2: limit the regression range for each location + max_regress_distance = bbox_targets.max(-1)[0] + inside_regress_range = ( + (max_regress_distance >= regress_ranges[..., 0]) + & (max_regress_distance <= regress_ranges[..., 1])) + + # if there are still more than one objects for a location, + # we choose the one with minimal area + areas[inside_gt_bbox_mask == 0] = INF + areas[inside_regress_range == 0] = INF + min_area, min_area_inds = areas.min(dim=1) + + labels = gt_labels[min_area_inds] + labels[min_area == INF] = self.num_classes # set as BG + bbox_targets = bbox_targets[range(num_points), min_area_inds] + + return labels, bbox_targets + + def centerness_target(self, pos_bbox_targets): + """Compute centerness targets. + + Args: + pos_bbox_targets (Tensor): BBox targets of positive bboxes in shape + (num_pos, 4) + + Returns: + Tensor: Centerness target. + """ + # only calculate pos centerness targets, otherwise there may be nan + left_right = pos_bbox_targets[:, [0, 2]] + top_bottom = pos_bbox_targets[:, [1, 3]] + if len(left_right) == 0: + centerness_targets = left_right[..., 0] + else: + centerness_targets = ( + left_right.min(dim=-1)[0] / left_right.max(dim=-1)[0]) * ( + top_bottom.min(dim=-1)[0] / top_bottom.max(dim=-1)[0]) + return torch.sqrt(centerness_targets) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/fovea_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/fovea_head.py new file mode 100644 index 0000000000000000000000000000000000000000..657a8791feb8a60521abd11470a0585301757dd4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/fovea_head.py @@ -0,0 +1,348 @@ +import torch +import torch.nn as nn +from mmcv.cnn import ConvModule +from mmcv.ops import DeformConv2d +from mmcv.runner import BaseModule + +from mmdet.core import multi_apply, multiclass_nms +from ..builder import HEADS +from .anchor_free_head import AnchorFreeHead + +INF = 1e8 + + +class FeatureAlign(BaseModule): + + def __init__(self, + in_channels, + out_channels, + kernel_size=3, + deform_groups=4, + init_cfg=dict( + type='Normal', + layer='Conv2d', + std=0.1, + override=dict( + type='Normal', name='conv_adaption', std=0.01))): + super(FeatureAlign, self).__init__(init_cfg) + offset_channels = kernel_size * kernel_size * 2 + self.conv_offset = nn.Conv2d( + 4, deform_groups * offset_channels, 1, bias=False) + self.conv_adaption = DeformConv2d( + in_channels, + out_channels, + kernel_size=kernel_size, + padding=(kernel_size - 1) // 2, + deform_groups=deform_groups) + self.relu = nn.ReLU(inplace=True) + + def forward(self, x, shape): + offset = self.conv_offset(shape) + x = self.relu(self.conv_adaption(x, offset)) + return x + + +@HEADS.register_module() +class FoveaHead(AnchorFreeHead): + """FoveaBox: Beyond Anchor-based Object Detector + https://arxiv.org/abs/1904.03797 + """ + + def __init__(self, + num_classes, + in_channels, + base_edge_list=(16, 32, 64, 128, 256), + scale_ranges=((8, 32), (16, 64), (32, 128), (64, 256), (128, + 512)), + sigma=0.4, + with_deform=False, + deform_groups=4, + init_cfg=dict( + type='Normal', + layer='Conv2d', + std=0.01, + override=dict( + type='Normal', + name='conv_cls', + std=0.01, + bias_prob=0.01)), + **kwargs): + self.base_edge_list = base_edge_list + self.scale_ranges = scale_ranges + self.sigma = sigma + self.with_deform = with_deform + self.deform_groups = deform_groups + super().__init__(num_classes, in_channels, init_cfg=init_cfg, **kwargs) + + def _init_layers(self): + # box branch + super()._init_reg_convs() + self.conv_reg = nn.Conv2d(self.feat_channels, 4, 3, padding=1) + + # cls branch + if not self.with_deform: + super()._init_cls_convs() + self.conv_cls = nn.Conv2d( + self.feat_channels, self.cls_out_channels, 3, padding=1) + else: + self.cls_convs = nn.ModuleList() + self.cls_convs.append( + ConvModule( + self.feat_channels, (self.feat_channels * 4), + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + bias=self.norm_cfg is None)) + self.cls_convs.append( + ConvModule((self.feat_channels * 4), (self.feat_channels * 4), + 1, + stride=1, + padding=0, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + bias=self.norm_cfg is None)) + self.feature_adaption = FeatureAlign( + self.feat_channels, + self.feat_channels, + kernel_size=3, + deform_groups=self.deform_groups) + self.conv_cls = nn.Conv2d( + int(self.feat_channels * 4), + self.cls_out_channels, + 3, + padding=1) + + def forward_single(self, x): + cls_feat = x + reg_feat = x + for reg_layer in self.reg_convs: + reg_feat = reg_layer(reg_feat) + bbox_pred = self.conv_reg(reg_feat) + if self.with_deform: + cls_feat = self.feature_adaption(cls_feat, bbox_pred.exp()) + for cls_layer in self.cls_convs: + cls_feat = cls_layer(cls_feat) + cls_score = self.conv_cls(cls_feat) + return cls_score, bbox_pred + + def _get_points_single(self, *args, **kwargs): + y, x = super()._get_points_single(*args, **kwargs) + return y + 0.5, x + 0.5 + + def loss(self, + cls_scores, + bbox_preds, + gt_bbox_list, + gt_label_list, + img_metas, + gt_bboxes_ignore=None): + assert len(cls_scores) == len(bbox_preds) + + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + points = self.get_points(featmap_sizes, bbox_preds[0].dtype, + bbox_preds[0].device) + num_imgs = cls_scores[0].size(0) + flatten_cls_scores = [ + cls_score.permute(0, 2, 3, 1).reshape(-1, self.cls_out_channels) + for cls_score in cls_scores + ] + flatten_bbox_preds = [ + bbox_pred.permute(0, 2, 3, 1).reshape(-1, 4) + for bbox_pred in bbox_preds + ] + flatten_cls_scores = torch.cat(flatten_cls_scores) + flatten_bbox_preds = torch.cat(flatten_bbox_preds) + flatten_labels, flatten_bbox_targets = self.get_targets( + gt_bbox_list, gt_label_list, featmap_sizes, points) + + # FG cat_id: [0, num_classes -1], BG cat_id: num_classes + pos_inds = ((flatten_labels >= 0) + & (flatten_labels < self.num_classes)).nonzero().view(-1) + num_pos = len(pos_inds) + + loss_cls = self.loss_cls( + flatten_cls_scores, flatten_labels, avg_factor=num_pos + num_imgs) + if num_pos > 0: + pos_bbox_preds = flatten_bbox_preds[pos_inds] + pos_bbox_targets = flatten_bbox_targets[pos_inds] + pos_weights = pos_bbox_targets.new_zeros( + pos_bbox_targets.size()) + 1.0 + loss_bbox = self.loss_bbox( + pos_bbox_preds, + pos_bbox_targets, + pos_weights, + avg_factor=num_pos) + else: + loss_bbox = torch.tensor( + 0, + dtype=flatten_bbox_preds.dtype, + device=flatten_bbox_preds.device) + return dict(loss_cls=loss_cls, loss_bbox=loss_bbox) + + def get_targets(self, gt_bbox_list, gt_label_list, featmap_sizes, points): + label_list, bbox_target_list = multi_apply( + self._get_target_single, + gt_bbox_list, + gt_label_list, + featmap_size_list=featmap_sizes, + point_list=points) + flatten_labels = [ + torch.cat([ + labels_level_img.flatten() for labels_level_img in labels_level + ]) for labels_level in zip(*label_list) + ] + flatten_bbox_targets = [ + torch.cat([ + bbox_targets_level_img.reshape(-1, 4) + for bbox_targets_level_img in bbox_targets_level + ]) for bbox_targets_level in zip(*bbox_target_list) + ] + flatten_labels = torch.cat(flatten_labels) + flatten_bbox_targets = torch.cat(flatten_bbox_targets) + return flatten_labels, flatten_bbox_targets + + def _get_target_single(self, + gt_bboxes_raw, + gt_labels_raw, + featmap_size_list=None, + point_list=None): + + gt_areas = torch.sqrt((gt_bboxes_raw[:, 2] - gt_bboxes_raw[:, 0]) * + (gt_bboxes_raw[:, 3] - gt_bboxes_raw[:, 1])) + label_list = [] + bbox_target_list = [] + # for each pyramid, find the cls and box target + for base_len, (lower_bound, upper_bound), stride, featmap_size, \ + (y, x) in zip(self.base_edge_list, self.scale_ranges, + self.strides, featmap_size_list, point_list): + # FG cat_id: [0, num_classes -1], BG cat_id: num_classes + labels = gt_labels_raw.new_zeros(featmap_size) + self.num_classes + bbox_targets = gt_bboxes_raw.new(featmap_size[0], featmap_size[1], + 4) + 1 + # scale assignment + hit_indices = ((gt_areas >= lower_bound) & + (gt_areas <= upper_bound)).nonzero().flatten() + if len(hit_indices) == 0: + label_list.append(labels) + bbox_target_list.append(torch.log(bbox_targets)) + continue + _, hit_index_order = torch.sort(-gt_areas[hit_indices]) + hit_indices = hit_indices[hit_index_order] + gt_bboxes = gt_bboxes_raw[hit_indices, :] / stride + gt_labels = gt_labels_raw[hit_indices] + half_w = 0.5 * (gt_bboxes[:, 2] - gt_bboxes[:, 0]) + half_h = 0.5 * (gt_bboxes[:, 3] - gt_bboxes[:, 1]) + # valid fovea area: left, right, top, down + pos_left = torch.ceil( + gt_bboxes[:, 0] + (1 - self.sigma) * half_w - 0.5).long(). \ + clamp(0, featmap_size[1] - 1) + pos_right = torch.floor( + gt_bboxes[:, 0] + (1 + self.sigma) * half_w - 0.5).long(). \ + clamp(0, featmap_size[1] - 1) + pos_top = torch.ceil( + gt_bboxes[:, 1] + (1 - self.sigma) * half_h - 0.5).long(). \ + clamp(0, featmap_size[0] - 1) + pos_down = torch.floor( + gt_bboxes[:, 1] + (1 + self.sigma) * half_h - 0.5).long(). \ + clamp(0, featmap_size[0] - 1) + for px1, py1, px2, py2, label, (gt_x1, gt_y1, gt_x2, gt_y2) in \ + zip(pos_left, pos_top, pos_right, pos_down, gt_labels, + gt_bboxes_raw[hit_indices, :]): + labels[py1:py2 + 1, px1:px2 + 1] = label + bbox_targets[py1:py2 + 1, px1:px2 + 1, 0] = \ + (stride * x[py1:py2 + 1, px1:px2 + 1] - gt_x1) / base_len + bbox_targets[py1:py2 + 1, px1:px2 + 1, 1] = \ + (stride * y[py1:py2 + 1, px1:px2 + 1] - gt_y1) / base_len + bbox_targets[py1:py2 + 1, px1:px2 + 1, 2] = \ + (gt_x2 - stride * x[py1:py2 + 1, px1:px2 + 1]) / base_len + bbox_targets[py1:py2 + 1, px1:px2 + 1, 3] = \ + (gt_y2 - stride * y[py1:py2 + 1, px1:px2 + 1]) / base_len + bbox_targets = bbox_targets.clamp(min=1. / 16, max=16.) + label_list.append(labels) + bbox_target_list.append(torch.log(bbox_targets)) + return label_list, bbox_target_list + + def get_bboxes(self, + cls_scores, + bbox_preds, + img_metas, + cfg=None, + rescale=None): + assert len(cls_scores) == len(bbox_preds) + num_levels = len(cls_scores) + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + points = self.get_points( + featmap_sizes, + bbox_preds[0].dtype, + bbox_preds[0].device, + flatten=True) + result_list = [] + for img_id in range(len(img_metas)): + cls_score_list = [ + cls_scores[i][img_id].detach() for i in range(num_levels) + ] + bbox_pred_list = [ + bbox_preds[i][img_id].detach() for i in range(num_levels) + ] + img_shape = img_metas[img_id]['img_shape'] + scale_factor = img_metas[img_id]['scale_factor'] + det_bboxes = self._get_bboxes_single(cls_score_list, + bbox_pred_list, featmap_sizes, + points, img_shape, + scale_factor, cfg, rescale) + result_list.append(det_bboxes) + return result_list + + def _get_bboxes_single(self, + cls_scores, + bbox_preds, + featmap_sizes, + point_list, + img_shape, + scale_factor, + cfg, + rescale=False): + cfg = self.test_cfg if cfg is None else cfg + assert len(cls_scores) == len(bbox_preds) == len(point_list) + det_bboxes = [] + det_scores = [] + for cls_score, bbox_pred, featmap_size, stride, base_len, (y, x) \ + in zip(cls_scores, bbox_preds, featmap_sizes, self.strides, + self.base_edge_list, point_list): + assert cls_score.size()[-2:] == bbox_pred.size()[-2:] + scores = cls_score.permute(1, 2, 0).reshape( + -1, self.cls_out_channels).sigmoid() + bbox_pred = bbox_pred.permute(1, 2, 0).reshape(-1, 4).exp() + nms_pre = cfg.get('nms_pre', -1) + if (nms_pre > 0) and (scores.shape[0] > nms_pre): + max_scores, _ = scores.max(dim=1) + _, topk_inds = max_scores.topk(nms_pre) + bbox_pred = bbox_pred[topk_inds, :] + scores = scores[topk_inds, :] + y = y[topk_inds] + x = x[topk_inds] + x1 = (stride * x - base_len * bbox_pred[:, 0]). \ + clamp(min=0, max=img_shape[1] - 1) + y1 = (stride * y - base_len * bbox_pred[:, 1]). \ + clamp(min=0, max=img_shape[0] - 1) + x2 = (stride * x + base_len * bbox_pred[:, 2]). \ + clamp(min=0, max=img_shape[1] - 1) + y2 = (stride * y + base_len * bbox_pred[:, 3]). \ + clamp(min=0, max=img_shape[0] - 1) + bboxes = torch.stack([x1, y1, x2, y2], -1) + det_bboxes.append(bboxes) + det_scores.append(scores) + det_bboxes = torch.cat(det_bboxes) + if rescale: + det_bboxes /= det_bboxes.new_tensor(scale_factor) + det_scores = torch.cat(det_scores) + padding = det_scores.new_zeros(det_scores.shape[0], 1) + # remind that we set FG labels to [0, num_class-1] since mmdet v2.0 + # BG cat_id: num_class + det_scores = torch.cat([det_scores, padding], dim=1) + det_bboxes, det_labels = multiclass_nms(det_bboxes, det_scores, + cfg.score_thr, cfg.nms, + cfg.max_per_img) + return det_bboxes, det_labels diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/free_anchor_retina_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/free_anchor_retina_head.py new file mode 100644 index 0000000000000000000000000000000000000000..b7f8aa76b33bc14778b68cec32555d4f6b86ad17 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/free_anchor_retina_head.py @@ -0,0 +1,270 @@ +import torch +import torch.nn.functional as F + +from mmdet.core import bbox_overlaps +from ..builder import HEADS +from .retina_head import RetinaHead + +EPS = 1e-12 + + +@HEADS.register_module() +class FreeAnchorRetinaHead(RetinaHead): + """FreeAnchor RetinaHead used in https://arxiv.org/abs/1909.02466. + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + stacked_convs (int): Number of conv layers in cls and reg tower. + Default: 4. + conv_cfg (dict): dictionary to construct and config conv layer. + Default: None. + norm_cfg (dict): dictionary to construct and config norm layer. + Default: norm_cfg=dict(type='GN', num_groups=32, + requires_grad=True). + pre_anchor_topk (int): Number of boxes that be token in each bag. + bbox_thr (float): The threshold of the saturated linear function. It is + usually the same with the IoU threshold used in NMS. + gamma (float): Gamma parameter in focal loss. + alpha (float): Alpha parameter in focal loss. + """ # noqa: W605 + + def __init__(self, + num_classes, + in_channels, + stacked_convs=4, + conv_cfg=None, + norm_cfg=None, + pre_anchor_topk=50, + bbox_thr=0.6, + gamma=2.0, + alpha=0.5, + **kwargs): + super(FreeAnchorRetinaHead, + self).__init__(num_classes, in_channels, stacked_convs, conv_cfg, + norm_cfg, **kwargs) + + self.pre_anchor_topk = pre_anchor_topk + self.bbox_thr = bbox_thr + self.gamma = gamma + self.alpha = alpha + + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + gt_bboxes (list[Tensor]): each item are the truth boxes for each + image in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == len(self.anchor_generator.base_anchors) + + anchor_list, _ = self.get_anchors(featmap_sizes, img_metas) + anchors = [torch.cat(anchor) for anchor in anchor_list] + + # concatenate each level + cls_scores = [ + cls.permute(0, 2, 3, + 1).reshape(cls.size(0), -1, self.cls_out_channels) + for cls in cls_scores + ] + bbox_preds = [ + bbox_pred.permute(0, 2, 3, 1).reshape(bbox_pred.size(0), -1, 4) + for bbox_pred in bbox_preds + ] + cls_scores = torch.cat(cls_scores, dim=1) + bbox_preds = torch.cat(bbox_preds, dim=1) + + cls_prob = torch.sigmoid(cls_scores) + box_prob = [] + num_pos = 0 + positive_losses = [] + for _, (anchors_, gt_labels_, gt_bboxes_, cls_prob_, + bbox_preds_) in enumerate( + zip(anchors, gt_labels, gt_bboxes, cls_prob, bbox_preds)): + + with torch.no_grad(): + if len(gt_bboxes_) == 0: + image_box_prob = torch.zeros( + anchors_.size(0), + self.cls_out_channels).type_as(bbox_preds_) + else: + # box_localization: a_{j}^{loc}, shape: [j, 4] + pred_boxes = self.bbox_coder.decode(anchors_, bbox_preds_) + + # object_box_iou: IoU_{ij}^{loc}, shape: [i, j] + object_box_iou = bbox_overlaps(gt_bboxes_, pred_boxes) + + # object_box_prob: P{a_{j} -> b_{i}}, shape: [i, j] + t1 = self.bbox_thr + t2 = object_box_iou.max( + dim=1, keepdim=True).values.clamp(min=t1 + 1e-12) + object_box_prob = ((object_box_iou - t1) / + (t2 - t1)).clamp( + min=0, max=1) + + # object_cls_box_prob: P{a_{j} -> b_{i}}, shape: [i, c, j] + num_obj = gt_labels_.size(0) + indices = torch.stack([ + torch.arange(num_obj).type_as(gt_labels_), gt_labels_ + ], + dim=0) + object_cls_box_prob = torch.sparse_coo_tensor( + indices, object_box_prob) + + # image_box_iou: P{a_{j} \in A_{+}}, shape: [c, j] + """ + from "start" to "end" implement: + image_box_iou = torch.sparse.max(object_cls_box_prob, + dim=0).t() + + """ + # start + box_cls_prob = torch.sparse.sum( + object_cls_box_prob, dim=0).to_dense() + + indices = torch.nonzero(box_cls_prob, as_tuple=False).t_() + if indices.numel() == 0: + image_box_prob = torch.zeros( + anchors_.size(0), + self.cls_out_channels).type_as(object_box_prob) + else: + nonzero_box_prob = torch.where( + (gt_labels_.unsqueeze(dim=-1) == indices[0]), + object_box_prob[:, indices[1]], + torch.tensor([ + 0 + ]).type_as(object_box_prob)).max(dim=0).values + + # upmap to shape [j, c] + image_box_prob = torch.sparse_coo_tensor( + indices.flip([0]), + nonzero_box_prob, + size=(anchors_.size(0), + self.cls_out_channels)).to_dense() + # end + + box_prob.append(image_box_prob) + + # construct bags for objects + match_quality_matrix = bbox_overlaps(gt_bboxes_, anchors_) + _, matched = torch.topk( + match_quality_matrix, + self.pre_anchor_topk, + dim=1, + sorted=False) + del match_quality_matrix + + # matched_cls_prob: P_{ij}^{cls} + matched_cls_prob = torch.gather( + cls_prob_[matched], 2, + gt_labels_.view(-1, 1, 1).repeat(1, self.pre_anchor_topk, + 1)).squeeze(2) + + # matched_box_prob: P_{ij}^{loc} + matched_anchors = anchors_[matched] + matched_object_targets = self.bbox_coder.encode( + matched_anchors, + gt_bboxes_.unsqueeze(dim=1).expand_as(matched_anchors)) + loss_bbox = self.loss_bbox( + bbox_preds_[matched], + matched_object_targets, + reduction_override='none').sum(-1) + matched_box_prob = torch.exp(-loss_bbox) + + # positive_losses: {-log( Mean-max(P_{ij}^{cls} * P_{ij}^{loc}) )} + num_pos += len(gt_bboxes_) + positive_losses.append( + self.positive_bag_loss(matched_cls_prob, matched_box_prob)) + positive_loss = torch.cat(positive_losses).sum() / max(1, num_pos) + + # box_prob: P{a_{j} \in A_{+}} + box_prob = torch.stack(box_prob, dim=0) + + # negative_loss: + # \sum_{j}{ FL((1 - P{a_{j} \in A_{+}}) * (1 - P_{j}^{bg})) } / n||B|| + negative_loss = self.negative_bag_loss(cls_prob, box_prob).sum() / max( + 1, num_pos * self.pre_anchor_topk) + + # avoid the absence of gradients in regression subnet + # when no ground-truth in a batch + if num_pos == 0: + positive_loss = bbox_preds.sum() * 0 + + losses = { + 'positive_bag_loss': positive_loss, + 'negative_bag_loss': negative_loss + } + return losses + + def positive_bag_loss(self, matched_cls_prob, matched_box_prob): + """Compute positive bag loss. + + :math:`-log( Mean-max(P_{ij}^{cls} * P_{ij}^{loc}) )`. + + :math:`P_{ij}^{cls}`: matched_cls_prob, classification probability of matched samples. + + :math:`P_{ij}^{loc}`: matched_box_prob, box probability of matched samples. + + Args: + matched_cls_prob (Tensor): Classification probability of matched + samples in shape (num_gt, pre_anchor_topk). + matched_box_prob (Tensor): BBox probability of matched samples, + in shape (num_gt, pre_anchor_topk). + + Returns: + Tensor: Positive bag loss in shape (num_gt,). + """ # noqa: E501, W605 + # bag_prob = Mean-max(matched_prob) + matched_prob = matched_cls_prob * matched_box_prob + weight = 1 / torch.clamp(1 - matched_prob, 1e-12, None) + weight /= weight.sum(dim=1).unsqueeze(dim=-1) + bag_prob = (weight * matched_prob).sum(dim=1) + # positive_bag_loss = -self.alpha * log(bag_prob) + return self.alpha * F.binary_cross_entropy( + bag_prob, torch.ones_like(bag_prob), reduction='none') + + def negative_bag_loss(self, cls_prob, box_prob): + """Compute negative bag loss. + + :math:`FL((1 - P_{a_{j} \in A_{+}}) * (1 - P_{j}^{bg}))`. + + :math:`P_{a_{j} \in A_{+}}`: Box_probability of matched samples. + + :math:`P_{j}^{bg}`: Classification probability of negative samples. + + Args: + cls_prob (Tensor): Classification probability, in shape + (num_img, num_anchors, num_classes). + box_prob (Tensor): Box probability, in shape + (num_img, num_anchors, num_classes). + + Returns: + Tensor: Negative bag loss in shape (num_img, num_anchors, num_classes). + """ # noqa: E501, W605 + prob = cls_prob * (1 - box_prob) + # There are some cases when neg_prob = 0. + # This will cause the neg_prob.log() to be inf without clamp. + prob = prob.clamp(min=EPS, max=1 - EPS) + negative_bag_loss = prob**self.gamma * F.binary_cross_entropy( + prob, torch.zeros_like(prob), reduction='none') + return (1 - self.alpha) * negative_bag_loss diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/fsaf_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/fsaf_head.py new file mode 100644 index 0000000000000000000000000000000000000000..6aa442da4e113a53827ffb016134d576d6192f77 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/fsaf_head.py @@ -0,0 +1,432 @@ +import numpy as np +import torch +from mmcv.runner import force_fp32 + +from mmdet.core import (anchor_inside_flags, images_to_levels, multi_apply, + unmap) +from ..builder import HEADS +from ..losses.accuracy import accuracy +from ..losses.utils import weight_reduce_loss +from .retina_head import RetinaHead + + +@HEADS.register_module() +class FSAFHead(RetinaHead): + """Anchor-free head used in `FSAF `_. + + The head contains two subnetworks. The first classifies anchor boxes and + the second regresses deltas for the anchors (num_anchors is 1 for anchor- + free methods) + + Args: + *args: Same as its base class in :class:`RetinaHead` + score_threshold (float, optional): The score_threshold to calculate + positive recall. If given, prediction scores lower than this value + is counted as incorrect prediction. Default to None. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + **kwargs: Same as its base class in :class:`RetinaHead` + + Example: + >>> import torch + >>> self = FSAFHead(11, 7) + >>> x = torch.rand(1, 7, 32, 32) + >>> cls_score, bbox_pred = self.forward_single(x) + >>> # Each anchor predicts a score for each class except background + >>> cls_per_anchor = cls_score.shape[1] / self.num_anchors + >>> box_per_anchor = bbox_pred.shape[1] / self.num_anchors + >>> assert cls_per_anchor == self.num_classes + >>> assert box_per_anchor == 4 + """ + + def __init__(self, *args, score_threshold=None, init_cfg=None, **kwargs): + # The positive bias in self.retina_reg conv is to prevent predicted \ + # bbox with 0 area + if init_cfg is None: + init_cfg = dict( + type='Normal', + layer='Conv2d', + std=0.01, + override=[ + dict( + type='Normal', + name='retina_cls', + std=0.01, + bias_prob=0.01), + dict( + type='Normal', name='retina_reg', std=0.01, bias=0.25) + ]) + super().__init__(*args, init_cfg=init_cfg, **kwargs) + self.score_threshold = score_threshold + + def forward_single(self, x): + """Forward feature map of a single scale level. + + Args: + x (Tensor): Feature map of a single scale level. + + Returns: + tuple (Tensor): + cls_score (Tensor): Box scores for each scale level + Has shape (N, num_points * num_classes, H, W). + bbox_pred (Tensor): Box energies / deltas for each scale + level with shape (N, num_points * 4, H, W). + """ + cls_score, bbox_pred = super().forward_single(x) + # relu: TBLR encoder only accepts positive bbox_pred + return cls_score, self.relu(bbox_pred) + + def _get_targets_single(self, + flat_anchors, + valid_flags, + gt_bboxes, + gt_bboxes_ignore, + gt_labels, + img_meta, + label_channels=1, + unmap_outputs=True): + """Compute regression and classification targets for anchors in a + single image. + + Most of the codes are the same with the base class + :obj: `AnchorHead`, except that it also collects and returns + the matched gt index in the image (from 0 to num_gt-1). If the + anchor bbox is not matched to any gt, the corresponding value in + pos_gt_inds is -1. + """ + inside_flags = anchor_inside_flags(flat_anchors, valid_flags, + img_meta['img_shape'][:2], + self.train_cfg.allowed_border) + if not inside_flags.any(): + return (None, ) * 7 + # Assign gt and sample anchors + anchors = flat_anchors[inside_flags.type(torch.bool), :] + assign_result = self.assigner.assign( + anchors, gt_bboxes, gt_bboxes_ignore, + None if self.sampling else gt_labels) + + sampling_result = self.sampler.sample(assign_result, anchors, + gt_bboxes) + + num_valid_anchors = anchors.shape[0] + bbox_targets = torch.zeros_like(anchors) + bbox_weights = torch.zeros_like(anchors) + labels = anchors.new_full((num_valid_anchors, ), + self.num_classes, + dtype=torch.long) + label_weights = anchors.new_zeros((num_valid_anchors, label_channels), + dtype=torch.float) + pos_gt_inds = anchors.new_full((num_valid_anchors, ), + -1, + dtype=torch.long) + + pos_inds = sampling_result.pos_inds + neg_inds = sampling_result.neg_inds + + if len(pos_inds) > 0: + if not self.reg_decoded_bbox: + pos_bbox_targets = self.bbox_coder.encode( + sampling_result.pos_bboxes, sampling_result.pos_gt_bboxes) + else: + # When the regression loss (e.g. `IouLoss`, `GIouLoss`) + # is applied directly on the decoded bounding boxes, both + # the predicted boxes and regression targets should be with + # absolute coordinate format. + pos_bbox_targets = sampling_result.pos_gt_bboxes + bbox_targets[pos_inds, :] = pos_bbox_targets + bbox_weights[pos_inds, :] = 1.0 + # The assigned gt_index for each anchor. (0-based) + pos_gt_inds[pos_inds] = sampling_result.pos_assigned_gt_inds + if gt_labels is None: + # Only rpn gives gt_labels as None + # Foreground is the first class + labels[pos_inds] = 0 + else: + labels[pos_inds] = gt_labels[ + sampling_result.pos_assigned_gt_inds] + if self.train_cfg.pos_weight <= 0: + label_weights[pos_inds] = 1.0 + else: + label_weights[pos_inds] = self.train_cfg.pos_weight + + if len(neg_inds) > 0: + label_weights[neg_inds] = 1.0 + + # shadowed_labels is a tensor composed of tuples + # (anchor_inds, class_label) that indicate those anchors lying in the + # outer region of a gt or overlapped by another gt with a smaller + # area. + # + # Therefore, only the shadowed labels are ignored for loss calculation. + # the key `shadowed_labels` is defined in :obj:`CenterRegionAssigner` + shadowed_labels = assign_result.get_extra_property('shadowed_labels') + if shadowed_labels is not None and shadowed_labels.numel(): + if len(shadowed_labels.shape) == 2: + idx_, label_ = shadowed_labels[:, 0], shadowed_labels[:, 1] + assert (labels[idx_] != label_).all(), \ + 'One label cannot be both positive and ignored' + label_weights[idx_, label_] = 0 + else: + label_weights[shadowed_labels] = 0 + + # map up to original set of anchors + if unmap_outputs: + num_total_anchors = flat_anchors.size(0) + labels = unmap(labels, num_total_anchors, inside_flags) + label_weights = unmap(label_weights, num_total_anchors, + inside_flags) + bbox_targets = unmap(bbox_targets, num_total_anchors, inside_flags) + bbox_weights = unmap(bbox_weights, num_total_anchors, inside_flags) + pos_gt_inds = unmap( + pos_gt_inds, num_total_anchors, inside_flags, fill=-1) + + return (labels, label_weights, bbox_targets, bbox_weights, pos_inds, + neg_inds, sampling_result, pos_gt_inds) + + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute loss of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_points * num_classes, H, W). + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_points * 4, H, W). + gt_bboxes (list[Tensor]): each item are the truth boxes for each + image in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + for i in range(len(bbox_preds)): # loop over fpn level + # avoid 0 area of the predicted bbox + bbox_preds[i] = bbox_preds[i].clamp(min=1e-4) + # TODO: It may directly use the base-class loss function. + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.anchor_generator.num_levels + batch_size = len(gt_bboxes) + device = cls_scores[0].device + anchor_list, valid_flag_list = self.get_anchors( + featmap_sizes, img_metas, device=device) + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + cls_reg_targets = self.get_targets( + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=label_channels) + if cls_reg_targets is None: + return None + (labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, + num_total_pos, num_total_neg, + pos_assigned_gt_inds_list) = cls_reg_targets + + num_gts = np.array(list(map(len, gt_labels))) + num_total_samples = ( + num_total_pos + num_total_neg if self.sampling else num_total_pos) + # anchor number of multi levels + num_level_anchors = [anchors.size(0) for anchors in anchor_list[0]] + # concat all level anchors and flags to a single tensor + concat_anchor_list = [] + for i in range(len(anchor_list)): + concat_anchor_list.append(torch.cat(anchor_list[i])) + all_anchor_list = images_to_levels(concat_anchor_list, + num_level_anchors) + losses_cls, losses_bbox = multi_apply( + self.loss_single, + cls_scores, + bbox_preds, + all_anchor_list, + labels_list, + label_weights_list, + bbox_targets_list, + bbox_weights_list, + num_total_samples=num_total_samples) + + # `pos_assigned_gt_inds_list` (length: fpn_levels) stores the assigned + # gt index of each anchor bbox in each fpn level. + cum_num_gts = list(np.cumsum(num_gts)) # length of batch_size + for i, assign in enumerate(pos_assigned_gt_inds_list): + # loop over fpn levels + for j in range(1, batch_size): + # loop over batch size + # Convert gt indices in each img to those in the batch + assign[j][assign[j] >= 0] += int(cum_num_gts[j - 1]) + pos_assigned_gt_inds_list[i] = assign.flatten() + labels_list[i] = labels_list[i].flatten() + num_gts = sum(map(len, gt_labels)) # total number of gt in the batch + # The unique label index of each gt in the batch + label_sequence = torch.arange(num_gts, device=device) + # Collect the average loss of each gt in each level + with torch.no_grad(): + loss_levels, = multi_apply( + self.collect_loss_level_single, + losses_cls, + losses_bbox, + pos_assigned_gt_inds_list, + labels_seq=label_sequence) + # Shape: (fpn_levels, num_gts). Loss of each gt at each fpn level + loss_levels = torch.stack(loss_levels, dim=0) + # Locate the best fpn level for loss back-propagation + if loss_levels.numel() == 0: # zero gt + argmin = loss_levels.new_empty((num_gts, ), dtype=torch.long) + else: + _, argmin = loss_levels.min(dim=0) + + # Reweight the loss of each (anchor, label) pair, so that only those + # at the best gt level are back-propagated. + losses_cls, losses_bbox, pos_inds = multi_apply( + self.reweight_loss_single, + losses_cls, + losses_bbox, + pos_assigned_gt_inds_list, + labels_list, + list(range(len(losses_cls))), + min_levels=argmin) + num_pos = torch.cat(pos_inds, 0).sum().float() + pos_recall = self.calculate_pos_recall(cls_scores, labels_list, + pos_inds) + + if num_pos == 0: # No gt + avg_factor = num_pos + float(num_total_neg) + else: + avg_factor = num_pos + for i in range(len(losses_cls)): + losses_cls[i] /= avg_factor + losses_bbox[i] /= avg_factor + return dict( + loss_cls=losses_cls, + loss_bbox=losses_bbox, + num_pos=num_pos / batch_size, + pos_recall=pos_recall) + + def calculate_pos_recall(self, cls_scores, labels_list, pos_inds): + """Calculate positive recall with score threshold. + + Args: + cls_scores (list[Tensor]): Classification scores at all fpn levels. + Each tensor is in shape (N, num_classes * num_anchors, H, W) + labels_list (list[Tensor]): The label that each anchor is assigned + to. Shape (N * H * W * num_anchors, ) + pos_inds (list[Tensor]): List of bool tensors indicating whether + the anchor is assigned to a positive label. + Shape (N * H * W * num_anchors, ) + + Returns: + Tensor: A single float number indicating the positive recall. + """ + with torch.no_grad(): + num_class = self.num_classes + scores = [ + cls.permute(0, 2, 3, 1).reshape(-1, num_class)[pos] + for cls, pos in zip(cls_scores, pos_inds) + ] + labels = [ + label.reshape(-1)[pos] + for label, pos in zip(labels_list, pos_inds) + ] + scores = torch.cat(scores, dim=0) + labels = torch.cat(labels, dim=0) + if self.use_sigmoid_cls: + scores = scores.sigmoid() + else: + scores = scores.softmax(dim=1) + + return accuracy(scores, labels, thresh=self.score_threshold) + + def collect_loss_level_single(self, cls_loss, reg_loss, assigned_gt_inds, + labels_seq): + """Get the average loss in each FPN level w.r.t. each gt label. + + Args: + cls_loss (Tensor): Classification loss of each feature map pixel, + shape (num_anchor, num_class) + reg_loss (Tensor): Regression loss of each feature map pixel, + shape (num_anchor, 4) + assigned_gt_inds (Tensor): It indicates which gt the prior is + assigned to (0-based, -1: no assignment). shape (num_anchor), + labels_seq: The rank of labels. shape (num_gt) + + Returns: + shape: (num_gt), average loss of each gt in this level + """ + if len(reg_loss.shape) == 2: # iou loss has shape (num_prior, 4) + reg_loss = reg_loss.sum(dim=-1) # sum loss in tblr dims + if len(cls_loss.shape) == 2: + cls_loss = cls_loss.sum(dim=-1) # sum loss in class dims + loss = cls_loss + reg_loss + assert loss.size(0) == assigned_gt_inds.size(0) + # Default loss value is 1e6 for a layer where no anchor is positive + # to ensure it will not be chosen to back-propagate gradient + losses_ = loss.new_full(labels_seq.shape, 1e6) + for i, l in enumerate(labels_seq): + match = assigned_gt_inds == l + if match.any(): + losses_[i] = loss[match].mean() + return losses_, + + def reweight_loss_single(self, cls_loss, reg_loss, assigned_gt_inds, + labels, level, min_levels): + """Reweight loss values at each level. + + Reassign loss values at each level by masking those where the + pre-calculated loss is too large. Then return the reduced losses. + + Args: + cls_loss (Tensor): Element-wise classification loss. + Shape: (num_anchors, num_classes) + reg_loss (Tensor): Element-wise regression loss. + Shape: (num_anchors, 4) + assigned_gt_inds (Tensor): The gt indices that each anchor bbox + is assigned to. -1 denotes a negative anchor, otherwise it is the + gt index (0-based). Shape: (num_anchors, ), + labels (Tensor): Label assigned to anchors. Shape: (num_anchors, ). + level (int): The current level index in the pyramid + (0-4 for RetinaNet) + min_levels (Tensor): The best-matching level for each gt. + Shape: (num_gts, ), + + Returns: + tuple: + - cls_loss: Reduced corrected classification loss. Scalar. + - reg_loss: Reduced corrected regression loss. Scalar. + - pos_flags (Tensor): Corrected bool tensor indicating the + final positive anchors. Shape: (num_anchors, ). + """ + loc_weight = torch.ones_like(reg_loss) + cls_weight = torch.ones_like(cls_loss) + pos_flags = assigned_gt_inds >= 0 # positive pixel flag + pos_indices = torch.nonzero(pos_flags, as_tuple=False).flatten() + + if pos_flags.any(): # pos pixels exist + pos_assigned_gt_inds = assigned_gt_inds[pos_flags] + zeroing_indices = (min_levels[pos_assigned_gt_inds] != level) + neg_indices = pos_indices[zeroing_indices] + + if neg_indices.numel(): + pos_flags[neg_indices] = 0 + loc_weight[neg_indices] = 0 + # Only the weight corresponding to the label is + # zeroed out if not selected + zeroing_labels = labels[neg_indices] + assert (zeroing_labels >= 0).all() + cls_weight[neg_indices, zeroing_labels] = 0 + + # Weighted loss for both cls and reg loss + cls_loss = weight_reduce_loss(cls_loss, cls_weight, reduction='sum') + reg_loss = weight_reduce_loss(reg_loss, loc_weight, reduction='sum') + + return cls_loss, reg_loss, pos_flags diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/ga_retina_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/ga_retina_head.py new file mode 100644 index 0000000000000000000000000000000000000000..cc83bd51c58acc80724dd5630f96415c5b68616b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/ga_retina_head.py @@ -0,0 +1,112 @@ +import torch.nn as nn +from mmcv.cnn import ConvModule +from mmcv.ops import MaskedConv2d + +from ..builder import HEADS +from .guided_anchor_head import FeatureAdaption, GuidedAnchorHead + + +@HEADS.register_module() +class GARetinaHead(GuidedAnchorHead): + """Guided-Anchor-based RetinaNet head.""" + + def __init__(self, + num_classes, + in_channels, + stacked_convs=4, + conv_cfg=None, + norm_cfg=None, + init_cfg=None, + **kwargs): + if init_cfg is None: + init_cfg = dict( + type='Normal', + layer='Conv2d', + std=0.01, + override=[ + dict( + type='Normal', + name='conv_loc', + std=0.01, + bias_prob=0.01), + dict( + type='Normal', + name='retina_cls', + std=0.01, + bias_prob=0.01) + ]) + self.stacked_convs = stacked_convs + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + super(GARetinaHead, self).__init__( + num_classes, in_channels, init_cfg=init_cfg, **kwargs) + + def _init_layers(self): + """Initialize layers of the head.""" + self.relu = nn.ReLU(inplace=True) + self.cls_convs = nn.ModuleList() + self.reg_convs = nn.ModuleList() + for i in range(self.stacked_convs): + chn = self.in_channels if i == 0 else self.feat_channels + self.cls_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.reg_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + + self.conv_loc = nn.Conv2d(self.feat_channels, 1, 1) + self.conv_shape = nn.Conv2d(self.feat_channels, self.num_anchors * 2, + 1) + self.feature_adaption_cls = FeatureAdaption( + self.feat_channels, + self.feat_channels, + kernel_size=3, + deform_groups=self.deform_groups) + self.feature_adaption_reg = FeatureAdaption( + self.feat_channels, + self.feat_channels, + kernel_size=3, + deform_groups=self.deform_groups) + self.retina_cls = MaskedConv2d( + self.feat_channels, + self.num_anchors * self.cls_out_channels, + 3, + padding=1) + self.retina_reg = MaskedConv2d( + self.feat_channels, self.num_anchors * 4, 3, padding=1) + + def forward_single(self, x): + """Forward feature map of a single scale level.""" + cls_feat = x + reg_feat = x + for cls_conv in self.cls_convs: + cls_feat = cls_conv(cls_feat) + for reg_conv in self.reg_convs: + reg_feat = reg_conv(reg_feat) + + loc_pred = self.conv_loc(cls_feat) + shape_pred = self.conv_shape(reg_feat) + + cls_feat = self.feature_adaption_cls(cls_feat, shape_pred) + reg_feat = self.feature_adaption_reg(reg_feat, shape_pred) + + if not self.training: + mask = loc_pred.sigmoid()[0] >= self.loc_filter_thr + else: + mask = None + cls_score = self.retina_cls(cls_feat, mask) + bbox_pred = self.retina_reg(reg_feat, mask) + return cls_score, bbox_pred, shape_pred, loc_pred diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/ga_rpn_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/ga_rpn_head.py new file mode 100644 index 0000000000000000000000000000000000000000..7c739de283e4cf9c48c94f0da1f094b0fa786867 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/ga_rpn_head.py @@ -0,0 +1,176 @@ +import copy +import warnings + +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv import ConfigDict +from mmcv.ops import nms + +from ..builder import HEADS +from .guided_anchor_head import GuidedAnchorHead + + +@HEADS.register_module() +class GARPNHead(GuidedAnchorHead): + """Guided-Anchor-based RPN head.""" + + def __init__(self, + in_channels, + init_cfg=dict( + type='Normal', + layer='Conv2d', + std=0.01, + override=dict( + type='Normal', + name='conv_loc', + std=0.01, + bias_prob=0.01)), + **kwargs): + super(GARPNHead, self).__init__( + 1, in_channels, init_cfg=init_cfg, **kwargs) + + def _init_layers(self): + """Initialize layers of the head.""" + self.rpn_conv = nn.Conv2d( + self.in_channels, self.feat_channels, 3, padding=1) + super(GARPNHead, self)._init_layers() + + def forward_single(self, x): + """Forward feature of a single scale level.""" + + x = self.rpn_conv(x) + x = F.relu(x, inplace=True) + (cls_score, bbox_pred, shape_pred, + loc_pred) = super(GARPNHead, self).forward_single(x) + return cls_score, bbox_pred, shape_pred, loc_pred + + def loss(self, + cls_scores, + bbox_preds, + shape_preds, + loc_preds, + gt_bboxes, + img_metas, + gt_bboxes_ignore=None): + losses = super(GARPNHead, self).loss( + cls_scores, + bbox_preds, + shape_preds, + loc_preds, + gt_bboxes, + None, + img_metas, + gt_bboxes_ignore=gt_bboxes_ignore) + return dict( + loss_rpn_cls=losses['loss_cls'], + loss_rpn_bbox=losses['loss_bbox'], + loss_anchor_shape=losses['loss_shape'], + loss_anchor_loc=losses['loss_loc']) + + def _get_bboxes_single(self, + cls_scores, + bbox_preds, + mlvl_anchors, + mlvl_masks, + img_shape, + scale_factor, + cfg, + rescale=False): + cfg = self.test_cfg if cfg is None else cfg + + cfg = copy.deepcopy(cfg) + + # deprecate arguments warning + if 'nms' not in cfg or 'max_num' in cfg or 'nms_thr' in cfg: + warnings.warn( + 'In rpn_proposal or test_cfg, ' + 'nms_thr has been moved to a dict named nms as ' + 'iou_threshold, max_num has been renamed as max_per_img, ' + 'name of original arguments and the way to specify ' + 'iou_threshold of NMS will be deprecated.') + if 'nms' not in cfg: + cfg.nms = ConfigDict(dict(type='nms', iou_threshold=cfg.nms_thr)) + if 'max_num' in cfg: + if 'max_per_img' in cfg: + assert cfg.max_num == cfg.max_per_img, f'You ' \ + f'set max_num and max_per_img at the same time, ' \ + f'but get {cfg.max_num} ' \ + f'and {cfg.max_per_img} respectively' \ + 'Please delete max_num which will be deprecated.' + else: + cfg.max_per_img = cfg.max_num + if 'nms_thr' in cfg: + assert cfg.nms.iou_threshold == cfg.nms_thr, f'You set ' \ + f'iou_threshold in nms and ' \ + f'nms_thr at the same time, but get ' \ + f'{cfg.nms.iou_threshold} and {cfg.nms_thr}' \ + f' respectively. Please delete the ' \ + f'nms_thr which will be deprecated.' + + assert cfg.nms.get('type', 'nms') == 'nms', 'GARPNHead only support ' \ + 'naive nms.' + + mlvl_proposals = [] + for idx in range(len(cls_scores)): + rpn_cls_score = cls_scores[idx] + rpn_bbox_pred = bbox_preds[idx] + anchors = mlvl_anchors[idx] + mask = mlvl_masks[idx] + assert rpn_cls_score.size()[-2:] == rpn_bbox_pred.size()[-2:] + # if no location is kept, end. + if mask.sum() == 0: + continue + rpn_cls_score = rpn_cls_score.permute(1, 2, 0) + if self.use_sigmoid_cls: + rpn_cls_score = rpn_cls_score.reshape(-1) + scores = rpn_cls_score.sigmoid() + else: + rpn_cls_score = rpn_cls_score.reshape(-1, 2) + # remind that we set FG labels to [0, num_class-1] + # since mmdet v2.0 + # BG cat_id: num_class + scores = rpn_cls_score.softmax(dim=1)[:, :-1] + # filter scores, bbox_pred w.r.t. mask. + # anchors are filtered in get_anchors() beforehand. + scores = scores[mask] + rpn_bbox_pred = rpn_bbox_pred.permute(1, 2, 0).reshape(-1, + 4)[mask, :] + if scores.dim() == 0: + rpn_bbox_pred = rpn_bbox_pred.unsqueeze(0) + anchors = anchors.unsqueeze(0) + scores = scores.unsqueeze(0) + # filter anchors, bbox_pred, scores w.r.t. scores + if cfg.nms_pre > 0 and scores.shape[0] > cfg.nms_pre: + _, topk_inds = scores.topk(cfg.nms_pre) + rpn_bbox_pred = rpn_bbox_pred[topk_inds, :] + anchors = anchors[topk_inds, :] + scores = scores[topk_inds] + # get proposals w.r.t. anchors and rpn_bbox_pred + proposals = self.bbox_coder.decode( + anchors, rpn_bbox_pred, max_shape=img_shape) + # filter out too small bboxes + if cfg.min_bbox_size >= 0: + w = proposals[:, 2] - proposals[:, 0] + h = proposals[:, 3] - proposals[:, 1] + valid_inds = torch.nonzero( + (w > cfg.min_bbox_size) & (h > cfg.min_bbox_size), + as_tuple=False).squeeze() + proposals = proposals[valid_inds, :] + scores = scores[valid_inds] + # NMS in current level + proposals, _ = nms(proposals, scores, cfg.nms.iou_threshold) + proposals = proposals[:cfg.nms_post, :] + mlvl_proposals.append(proposals) + proposals = torch.cat(mlvl_proposals, 0) + if cfg.get('nms_across_levels', False): + # NMS across multi levels + proposals, _ = nms(proposals[:, :4], proposals[:, -1], + cfg.nms.iou_threshold) + proposals = proposals[:cfg.max_per_img, :] + else: + scores = proposals[:, 4] + num = min(cfg.max_per_img, proposals.shape[0]) + _, topk_inds = scores.topk(num) + proposals = proposals[topk_inds, :] + return proposals diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/gfl_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/gfl_head.py new file mode 100644 index 0000000000000000000000000000000000000000..a62cf7a4f99522111c9d14079154de861a776809 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/gfl_head.py @@ -0,0 +1,648 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule, Scale +from mmcv.runner import force_fp32 + +from mmdet.core import (anchor_inside_flags, bbox2distance, bbox_overlaps, + build_assigner, build_sampler, distance2bbox, + images_to_levels, multi_apply, multiclass_nms, + reduce_mean, unmap) +from ..builder import HEADS, build_loss +from .anchor_head import AnchorHead + + +class Integral(nn.Module): + """A fixed layer for calculating integral result from distribution. + + This layer calculates the target location by :math: `sum{P(y_i) * y_i}`, + P(y_i) denotes the softmax vector that represents the discrete distribution + y_i denotes the discrete set, usually {0, 1, 2, ..., reg_max} + + Args: + reg_max (int): The maximal value of the discrete set. Default: 16. You + may want to reset it according to your new dataset or related + settings. + """ + + def __init__(self, reg_max=16): + super(Integral, self).__init__() + self.reg_max = reg_max + self.register_buffer('project', + torch.linspace(0, self.reg_max, self.reg_max + 1)) + + def forward(self, x): + """Forward feature from the regression head to get integral result of + bounding box location. + + Args: + x (Tensor): Features of the regression head, shape (N, 4*(n+1)), + n is self.reg_max. + + Returns: + x (Tensor): Integral result of box locations, i.e., distance + offsets from the box center in four directions, shape (N, 4). + """ + x = F.softmax(x.reshape(-1, self.reg_max + 1), dim=1) + x = F.linear(x, self.project.type_as(x)).reshape(-1, 4) + return x + + +@HEADS.register_module() +class GFLHead(AnchorHead): + """Generalized Focal Loss: Learning Qualified and Distributed Bounding + Boxes for Dense Object Detection. + + GFL head structure is similar with ATSS, however GFL uses + 1) joint representation for classification and localization quality, and + 2) flexible General distribution for bounding box locations, + which are supervised by + Quality Focal Loss (QFL) and Distribution Focal Loss (DFL), respectively + + https://arxiv.org/abs/2006.04388 + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + stacked_convs (int): Number of conv layers in cls and reg tower. + Default: 4. + conv_cfg (dict): dictionary to construct and config conv layer. + Default: None. + norm_cfg (dict): dictionary to construct and config norm layer. + Default: dict(type='GN', num_groups=32, requires_grad=True). + loss_qfl (dict): Config of Quality Focal Loss (QFL). + reg_max (int): Max value of integral set :math: `{0, ..., reg_max}` + in QFL setting. Default: 16. + init_cfg (dict or list[dict], optional): Initialization config dict. + Example: + >>> self = GFLHead(11, 7) + >>> feats = [torch.rand(1, 7, s, s) for s in [4, 8, 16, 32, 64]] + >>> cls_quality_score, bbox_pred = self.forward(feats) + >>> assert len(cls_quality_score) == len(self.scales) + """ + + def __init__(self, + num_classes, + in_channels, + stacked_convs=4, + conv_cfg=None, + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True), + loss_dfl=dict(type='DistributionFocalLoss', loss_weight=0.25), + reg_max=16, + init_cfg=dict( + type='Normal', + layer='Conv2d', + std=0.01, + override=dict( + type='Normal', + name='gfl_cls', + std=0.01, + bias_prob=0.01)), + **kwargs): + self.stacked_convs = stacked_convs + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.reg_max = reg_max + super(GFLHead, self).__init__( + num_classes, in_channels, init_cfg=init_cfg, **kwargs) + + self.sampling = False + if self.train_cfg: + self.assigner = build_assigner(self.train_cfg.assigner) + # SSD sampling=False so use PseudoSampler + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + + self.integral = Integral(self.reg_max) + self.loss_dfl = build_loss(loss_dfl) + + def _init_layers(self): + """Initialize layers of the head.""" + self.relu = nn.ReLU(inplace=True) + self.cls_convs = nn.ModuleList() + self.reg_convs = nn.ModuleList() + for i in range(self.stacked_convs): + chn = self.in_channels if i == 0 else self.feat_channels + self.cls_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.reg_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + assert self.num_anchors == 1, 'anchor free version' + self.gfl_cls = nn.Conv2d( + self.feat_channels, self.cls_out_channels, 3, padding=1) + self.gfl_reg = nn.Conv2d( + self.feat_channels, 4 * (self.reg_max + 1), 3, padding=1) + self.scales = nn.ModuleList( + [Scale(1.0) for _ in self.anchor_generator.strides]) + + def forward(self, feats): + """Forward features from the upstream network. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + + Returns: + tuple: Usually a tuple of classification scores and bbox prediction + cls_scores (list[Tensor]): Classification and quality (IoU) + joint scores for all scale levels, each is a 4D-tensor, + the channel number is num_classes. + bbox_preds (list[Tensor]): Box distribution logits for all + scale levels, each is a 4D-tensor, the channel number is + 4*(n+1), n is max value of integral set. + """ + return multi_apply(self.forward_single, feats, self.scales) + + def forward_single(self, x, scale): + """Forward feature of a single scale level. + + Args: + x (Tensor): Features of a single scale level. + scale (:obj: `mmcv.cnn.Scale`): Learnable scale module to resize + the bbox prediction. + + Returns: + tuple: + cls_score (Tensor): Cls and quality joint scores for a single + scale level the channel number is num_classes. + bbox_pred (Tensor): Box distribution logits for a single scale + level, the channel number is 4*(n+1), n is max value of + integral set. + """ + cls_feat = x + reg_feat = x + for cls_conv in self.cls_convs: + cls_feat = cls_conv(cls_feat) + for reg_conv in self.reg_convs: + reg_feat = reg_conv(reg_feat) + cls_score = self.gfl_cls(cls_feat) + bbox_pred = scale(self.gfl_reg(reg_feat)).float() + return cls_score, bbox_pred + + def anchor_center(self, anchors): + """Get anchor centers from anchors. + + Args: + anchors (Tensor): Anchor list with shape (N, 4), "xyxy" format. + + Returns: + Tensor: Anchor centers with shape (N, 2), "xy" format. + """ + anchors_cx = (anchors[..., 2] + anchors[..., 0]) / 2 + anchors_cy = (anchors[..., 3] + anchors[..., 1]) / 2 + return torch.stack([anchors_cx, anchors_cy], dim=-1) + + def loss_single(self, anchors, cls_score, bbox_pred, labels, label_weights, + bbox_targets, stride, num_total_samples): + """Compute loss of a single scale level. + + Args: + anchors (Tensor): Box reference for each scale level with shape + (N, num_total_anchors, 4). + cls_score (Tensor): Cls and quality joint scores for each scale + level has shape (N, num_classes, H, W). + bbox_pred (Tensor): Box distribution logits for each scale + level with shape (N, 4*(n+1), H, W), n is max value of integral + set. + labels (Tensor): Labels of each anchors with shape + (N, num_total_anchors). + label_weights (Tensor): Label weights of each anchor with shape + (N, num_total_anchors) + bbox_targets (Tensor): BBox regression targets of each anchor wight + shape (N, num_total_anchors, 4). + stride (tuple): Stride in this scale level. + num_total_samples (int): Number of positive samples that is + reduced over all GPUs. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + assert stride[0] == stride[1], 'h stride is not equal to w stride!' + anchors = anchors.reshape(-1, 4) + cls_score = cls_score.permute(0, 2, 3, + 1).reshape(-1, self.cls_out_channels) + bbox_pred = bbox_pred.permute(0, 2, 3, + 1).reshape(-1, 4 * (self.reg_max + 1)) + bbox_targets = bbox_targets.reshape(-1, 4) + labels = labels.reshape(-1) + label_weights = label_weights.reshape(-1) + + # FG cat_id: [0, num_classes -1], BG cat_id: num_classes + bg_class_ind = self.num_classes + pos_inds = ((labels >= 0) + & (labels < bg_class_ind)).nonzero().squeeze(1) + score = label_weights.new_zeros(labels.shape) + + if len(pos_inds) > 0: + pos_bbox_targets = bbox_targets[pos_inds] + pos_bbox_pred = bbox_pred[pos_inds] + pos_anchors = anchors[pos_inds] + pos_anchor_centers = self.anchor_center(pos_anchors) / stride[0] + + weight_targets = cls_score.detach().sigmoid() + weight_targets = weight_targets.max(dim=1)[0][pos_inds] + pos_bbox_pred_corners = self.integral(pos_bbox_pred) + pos_decode_bbox_pred = distance2bbox(pos_anchor_centers, + pos_bbox_pred_corners) + pos_decode_bbox_targets = pos_bbox_targets / stride[0] + score[pos_inds] = bbox_overlaps( + pos_decode_bbox_pred.detach(), + pos_decode_bbox_targets, + is_aligned=True) + pred_corners = pos_bbox_pred.reshape(-1, self.reg_max + 1) + target_corners = bbox2distance(pos_anchor_centers, + pos_decode_bbox_targets, + self.reg_max).reshape(-1) + + # regression loss + loss_bbox = self.loss_bbox( + pos_decode_bbox_pred, + pos_decode_bbox_targets, + weight=weight_targets, + avg_factor=1.0) + + # dfl loss + loss_dfl = self.loss_dfl( + pred_corners, + target_corners, + weight=weight_targets[:, None].expand(-1, 4).reshape(-1), + avg_factor=4.0) + else: + loss_bbox = bbox_pred.sum() * 0 + loss_dfl = bbox_pred.sum() * 0 + weight_targets = bbox_pred.new_tensor(0) + + # cls (qfl) loss + loss_cls = self.loss_cls( + cls_score, (labels, score), + weight=label_weights, + avg_factor=num_total_samples) + + return loss_cls, loss_bbox, loss_dfl, weight_targets.sum() + + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + cls_scores (list[Tensor]): Cls and quality scores for each scale + level has shape (N, num_classes, H, W). + bbox_preds (list[Tensor]): Box distribution logits for each scale + level with shape (N, 4*(n+1), H, W), n is max value of integral + set. + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (list[Tensor] | None): specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.anchor_generator.num_levels + + device = cls_scores[0].device + anchor_list, valid_flag_list = self.get_anchors( + featmap_sizes, img_metas, device=device) + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + + cls_reg_targets = self.get_targets( + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=label_channels) + if cls_reg_targets is None: + return None + + (anchor_list, labels_list, label_weights_list, bbox_targets_list, + bbox_weights_list, num_total_pos, num_total_neg) = cls_reg_targets + + num_total_samples = reduce_mean( + torch.tensor(num_total_pos, dtype=torch.float, + device=device)).item() + num_total_samples = max(num_total_samples, 1.0) + + losses_cls, losses_bbox, losses_dfl,\ + avg_factor = multi_apply( + self.loss_single, + anchor_list, + cls_scores, + bbox_preds, + labels_list, + label_weights_list, + bbox_targets_list, + self.anchor_generator.strides, + num_total_samples=num_total_samples) + + avg_factor = sum(avg_factor) + avg_factor = reduce_mean(avg_factor).clamp_(min=1).item() + losses_bbox = list(map(lambda x: x / avg_factor, losses_bbox)) + losses_dfl = list(map(lambda x: x / avg_factor, losses_dfl)) + return dict( + loss_cls=losses_cls, loss_bbox=losses_bbox, loss_dfl=losses_dfl) + + def _get_bboxes(self, + cls_scores, + bbox_preds, + mlvl_anchors, + img_shapes, + scale_factors, + cfg, + rescale=False, + with_nms=True): + """Transform outputs for a single batch item into labeled boxes. + + Args: + cls_scores (list[Tensor]): Box scores for a single scale level + has shape (N, num_classes, H, W). + bbox_preds (list[Tensor]): Box distribution logits for a single + scale level with shape (N, 4*(n+1), H, W), n is max value of + integral set. + mlvl_anchors (list[Tensor]): Box reference for a single scale level + with shape (num_total_anchors, 4). + img_shapes (list[tuple[int]]): Shape of the input image, + list[(height, width, 3)]. + scale_factors (list[ndarray]): Scale factor of the image arange as + (w_scale, h_scale, w_scale, h_scale). + cfg (mmcv.Config | None): Test / postprocessing configuration, + if None, test_cfg would be used. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + + Returns: + list[tuple[Tensor, Tensor]]: Each item in result_list is 2-tuple. + The first item is an (n, 5) tensor, where 5 represent + (tl_x, tl_y, br_x, br_y, score) and the score between 0 and 1. + The shape of the second tensor in the tuple is (n,), and + each element represents the class label of the corresponding + box. + """ + cfg = self.test_cfg if cfg is None else cfg + assert len(cls_scores) == len(bbox_preds) == len(mlvl_anchors) + batch_size = cls_scores[0].shape[0] + + mlvl_bboxes = [] + mlvl_scores = [] + for cls_score, bbox_pred, stride, anchors in zip( + cls_scores, bbox_preds, self.anchor_generator.strides, + mlvl_anchors): + assert cls_score.size()[-2:] == bbox_pred.size()[-2:] + assert stride[0] == stride[1] + scores = cls_score.permute(0, 2, 3, 1).reshape( + batch_size, -1, self.cls_out_channels).sigmoid() + bbox_pred = bbox_pred.permute(0, 2, 3, 1) + + bbox_pred = self.integral(bbox_pred) * stride[0] + bbox_pred = bbox_pred.reshape(batch_size, -1, 4) + + nms_pre = cfg.get('nms_pre', -1) + if nms_pre > 0 and scores.shape[1] > nms_pre: + max_scores, _ = scores.max(-1) + _, topk_inds = max_scores.topk(nms_pre) + batch_inds = torch.arange(batch_size).view( + -1, 1).expand_as(topk_inds).long() + anchors = anchors[topk_inds, :] + bbox_pred = bbox_pred[batch_inds, topk_inds, :] + scores = scores[batch_inds, topk_inds, :] + else: + anchors = anchors.expand_as(bbox_pred) + + bboxes = distance2bbox( + self.anchor_center(anchors), bbox_pred, max_shape=img_shapes) + mlvl_bboxes.append(bboxes) + mlvl_scores.append(scores) + + batch_mlvl_bboxes = torch.cat(mlvl_bboxes, dim=1) + if rescale: + batch_mlvl_bboxes /= batch_mlvl_bboxes.new_tensor( + scale_factors).unsqueeze(1) + + batch_mlvl_scores = torch.cat(mlvl_scores, dim=1) + # Add a dummy background class to the backend when using sigmoid + # remind that we set FG labels to [0, num_class-1] since mmdet v2.0 + # BG cat_id: num_class + padding = batch_mlvl_scores.new_zeros(batch_size, + batch_mlvl_scores.shape[1], 1) + batch_mlvl_scores = torch.cat([batch_mlvl_scores, padding], dim=-1) + + if with_nms: + det_results = [] + for (mlvl_bboxes, mlvl_scores) in zip(batch_mlvl_bboxes, + batch_mlvl_scores): + det_bbox, det_label = multiclass_nms(mlvl_bboxes, mlvl_scores, + cfg.score_thr, cfg.nms, + cfg.max_per_img) + det_results.append(tuple([det_bbox, det_label])) + else: + det_results = [ + tuple(mlvl_bs) + for mlvl_bs in zip(batch_mlvl_bboxes, batch_mlvl_scores) + ] + return det_results + + def get_targets(self, + anchor_list, + valid_flag_list, + gt_bboxes_list, + img_metas, + gt_bboxes_ignore_list=None, + gt_labels_list=None, + label_channels=1, + unmap_outputs=True): + """Get targets for GFL head. + + This method is almost the same as `AnchorHead.get_targets()`. Besides + returning the targets as the parent method does, it also returns the + anchors as the first element of the returned tuple. + """ + num_imgs = len(img_metas) + assert len(anchor_list) == len(valid_flag_list) == num_imgs + + # anchor number of multi levels + num_level_anchors = [anchors.size(0) for anchors in anchor_list[0]] + num_level_anchors_list = [num_level_anchors] * num_imgs + + # concat all level anchors and flags to a single tensor + for i in range(num_imgs): + assert len(anchor_list[i]) == len(valid_flag_list[i]) + anchor_list[i] = torch.cat(anchor_list[i]) + valid_flag_list[i] = torch.cat(valid_flag_list[i]) + + # compute targets for each image + if gt_bboxes_ignore_list is None: + gt_bboxes_ignore_list = [None for _ in range(num_imgs)] + if gt_labels_list is None: + gt_labels_list = [None for _ in range(num_imgs)] + (all_anchors, all_labels, all_label_weights, all_bbox_targets, + all_bbox_weights, pos_inds_list, neg_inds_list) = multi_apply( + self._get_target_single, + anchor_list, + valid_flag_list, + num_level_anchors_list, + gt_bboxes_list, + gt_bboxes_ignore_list, + gt_labels_list, + img_metas, + label_channels=label_channels, + unmap_outputs=unmap_outputs) + # no valid anchors + if any([labels is None for labels in all_labels]): + return None + # sampled anchors of all images + num_total_pos = sum([max(inds.numel(), 1) for inds in pos_inds_list]) + num_total_neg = sum([max(inds.numel(), 1) for inds in neg_inds_list]) + # split targets to a list w.r.t. multiple levels + anchors_list = images_to_levels(all_anchors, num_level_anchors) + labels_list = images_to_levels(all_labels, num_level_anchors) + label_weights_list = images_to_levels(all_label_weights, + num_level_anchors) + bbox_targets_list = images_to_levels(all_bbox_targets, + num_level_anchors) + bbox_weights_list = images_to_levels(all_bbox_weights, + num_level_anchors) + return (anchors_list, labels_list, label_weights_list, + bbox_targets_list, bbox_weights_list, num_total_pos, + num_total_neg) + + def _get_target_single(self, + flat_anchors, + valid_flags, + num_level_anchors, + gt_bboxes, + gt_bboxes_ignore, + gt_labels, + img_meta, + label_channels=1, + unmap_outputs=True): + """Compute regression, classification targets for anchors in a single + image. + + Args: + flat_anchors (Tensor): Multi-level anchors of the image, which are + concatenated into a single tensor of shape (num_anchors, 4) + valid_flags (Tensor): Multi level valid flags of the image, + which are concatenated into a single tensor of + shape (num_anchors,). + num_level_anchors Tensor): Number of anchors of each scale level. + gt_bboxes (Tensor): Ground truth bboxes of the image, + shape (num_gts, 4). + gt_bboxes_ignore (Tensor): Ground truth bboxes to be + ignored, shape (num_ignored_gts, 4). + gt_labels (Tensor): Ground truth labels of each box, + shape (num_gts,). + img_meta (dict): Meta info of the image. + label_channels (int): Channel of label. + unmap_outputs (bool): Whether to map outputs back to the original + set of anchors. + + Returns: + tuple: N is the number of total anchors in the image. + anchors (Tensor): All anchors in the image with shape (N, 4). + labels (Tensor): Labels of all anchors in the image with shape + (N,). + label_weights (Tensor): Label weights of all anchor in the + image with shape (N,). + bbox_targets (Tensor): BBox targets of all anchors in the + image with shape (N, 4). + bbox_weights (Tensor): BBox weights of all anchors in the + image with shape (N, 4). + pos_inds (Tensor): Indices of positive anchor with shape + (num_pos,). + neg_inds (Tensor): Indices of negative anchor with shape + (num_neg,). + """ + inside_flags = anchor_inside_flags(flat_anchors, valid_flags, + img_meta['img_shape'][:2], + self.train_cfg.allowed_border) + if not inside_flags.any(): + return (None, ) * 7 + # assign gt and sample anchors + anchors = flat_anchors[inside_flags, :] + + num_level_anchors_inside = self.get_num_level_anchors_inside( + num_level_anchors, inside_flags) + assign_result = self.assigner.assign(anchors, num_level_anchors_inside, + gt_bboxes, gt_bboxes_ignore, + gt_labels) + + sampling_result = self.sampler.sample(assign_result, anchors, + gt_bboxes) + + num_valid_anchors = anchors.shape[0] + bbox_targets = torch.zeros_like(anchors) + bbox_weights = torch.zeros_like(anchors) + labels = anchors.new_full((num_valid_anchors, ), + self.num_classes, + dtype=torch.long) + label_weights = anchors.new_zeros(num_valid_anchors, dtype=torch.float) + + pos_inds = sampling_result.pos_inds + neg_inds = sampling_result.neg_inds + if len(pos_inds) > 0: + pos_bbox_targets = sampling_result.pos_gt_bboxes + bbox_targets[pos_inds, :] = pos_bbox_targets + bbox_weights[pos_inds, :] = 1.0 + if gt_labels is None: + # Only rpn gives gt_labels as None + # Foreground is the first class + labels[pos_inds] = 0 + else: + labels[pos_inds] = gt_labels[ + sampling_result.pos_assigned_gt_inds] + if self.train_cfg.pos_weight <= 0: + label_weights[pos_inds] = 1.0 + else: + label_weights[pos_inds] = self.train_cfg.pos_weight + if len(neg_inds) > 0: + label_weights[neg_inds] = 1.0 + + # map up to original set of anchors + if unmap_outputs: + num_total_anchors = flat_anchors.size(0) + anchors = unmap(anchors, num_total_anchors, inside_flags) + labels = unmap( + labels, num_total_anchors, inside_flags, fill=self.num_classes) + label_weights = unmap(label_weights, num_total_anchors, + inside_flags) + bbox_targets = unmap(bbox_targets, num_total_anchors, inside_flags) + bbox_weights = unmap(bbox_weights, num_total_anchors, inside_flags) + + return (anchors, labels, label_weights, bbox_targets, bbox_weights, + pos_inds, neg_inds) + + def get_num_level_anchors_inside(self, num_level_anchors, inside_flags): + split_inside_flags = torch.split(inside_flags, num_level_anchors) + num_level_anchors_inside = [ + int(flags.sum()) for flags in split_inside_flags + ] + return num_level_anchors_inside diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/guided_anchor_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/guided_anchor_head.py new file mode 100644 index 0000000000000000000000000000000000000000..252e1ea7954225fd23cfec08fb4cbb6308bf882f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/guided_anchor_head.py @@ -0,0 +1,858 @@ +import torch +import torch.nn as nn +from mmcv.ops import DeformConv2d, MaskedConv2d +from mmcv.runner import BaseModule, force_fp32 + +from mmdet.core import (anchor_inside_flags, build_anchor_generator, + build_assigner, build_bbox_coder, build_sampler, + calc_region, images_to_levels, multi_apply, + multiclass_nms, unmap) +from ..builder import HEADS, build_loss +from .anchor_head import AnchorHead + + +class FeatureAdaption(BaseModule): + """Feature Adaption Module. + + Feature Adaption Module is implemented based on DCN v1. + It uses anchor shape prediction rather than feature map to + predict offsets of deform conv layer. + + Args: + in_channels (int): Number of channels in the input feature map. + out_channels (int): Number of channels in the output feature map. + kernel_size (int): Deformable conv kernel size. + deform_groups (int): Deformable conv group size. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__(self, + in_channels, + out_channels, + kernel_size=3, + deform_groups=4, + init_cfg=dict( + type='Normal', + layer='Conv2d', + std=0.1, + override=dict( + type='Normal', name='conv_adaption', std=0.01))): + super(FeatureAdaption, self).__init__(init_cfg) + offset_channels = kernel_size * kernel_size * 2 + self.conv_offset = nn.Conv2d( + 2, deform_groups * offset_channels, 1, bias=False) + self.conv_adaption = DeformConv2d( + in_channels, + out_channels, + kernel_size=kernel_size, + padding=(kernel_size - 1) // 2, + deform_groups=deform_groups) + self.relu = nn.ReLU(inplace=True) + + def forward(self, x, shape): + offset = self.conv_offset(shape.detach()) + x = self.relu(self.conv_adaption(x, offset)) + return x + + +@HEADS.register_module() +class GuidedAnchorHead(AnchorHead): + """Guided-Anchor-based head (GA-RPN, GA-RetinaNet, etc.). + + This GuidedAnchorHead will predict high-quality feature guided + anchors and locations where anchors will be kept in inference. + There are mainly 3 categories of bounding-boxes. + + - Sampled 9 pairs for target assignment. (approxes) + - The square boxes where the predicted anchors are based on. (squares) + - Guided anchors. + + Please refer to https://arxiv.org/abs/1901.03278 for more details. + + Args: + num_classes (int): Number of classes. + in_channels (int): Number of channels in the input feature map. + feat_channels (int): Number of hidden channels. + approx_anchor_generator (dict): Config dict for approx generator + square_anchor_generator (dict): Config dict for square generator + anchor_coder (dict): Config dict for anchor coder + bbox_coder (dict): Config dict for bbox coder + reg_decoded_bbox (bool): If true, the regression loss would be + applied directly on decoded bounding boxes, converting both + the predicted boxes and regression targets to absolute + coordinates format. Default False. It should be `True` when + using `IoULoss`, `GIoULoss`, or `DIoULoss` in the bbox head. + deform_groups: (int): Group number of DCN in + FeatureAdaption module. + loc_filter_thr (float): Threshold to filter out unconcerned regions. + loss_loc (dict): Config of location loss. + loss_shape (dict): Config of anchor shape loss. + loss_cls (dict): Config of classification loss. + loss_bbox (dict): Config of bbox regression loss. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__( + self, + num_classes, + in_channels, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=8, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[4, 8, 16, 32, 64]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[8], + strides=[4, 8, 16, 32, 64]), + anchor_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0] + ), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0] + ), + reg_decoded_bbox=False, + deform_groups=4, + loc_filter_thr=0.01, + train_cfg=None, + test_cfg=None, + loss_loc=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_shape=dict(type='BoundedIoULoss', beta=0.2, loss_weight=1.0), + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, + loss_weight=1.0), + init_cfg=dict(type='Normal', layer='Conv2d', std=0.01, + override=dict(type='Normal', + name='conv_loc', + std=0.01, + bias_prob=0.01))): # yapf: disable + super(AnchorHead, self).__init__(init_cfg) + self.in_channels = in_channels + self.num_classes = num_classes + self.feat_channels = feat_channels + self.deform_groups = deform_groups + self.loc_filter_thr = loc_filter_thr + + # build approx_anchor_generator and square_anchor_generator + assert (approx_anchor_generator['octave_base_scale'] == + square_anchor_generator['scales'][0]) + assert (approx_anchor_generator['strides'] == + square_anchor_generator['strides']) + self.approx_anchor_generator = build_anchor_generator( + approx_anchor_generator) + self.square_anchor_generator = build_anchor_generator( + square_anchor_generator) + self.approxs_per_octave = self.approx_anchor_generator \ + .num_base_anchors[0] + + self.reg_decoded_bbox = reg_decoded_bbox + + # one anchor per location + self.num_anchors = 1 + self.use_sigmoid_cls = loss_cls.get('use_sigmoid', False) + self.loc_focal_loss = loss_loc['type'] in ['FocalLoss'] + self.sampling = loss_cls['type'] not in ['FocalLoss'] + self.ga_sampling = train_cfg is not None and hasattr( + train_cfg, 'ga_sampler') + if self.use_sigmoid_cls: + self.cls_out_channels = self.num_classes + else: + self.cls_out_channels = self.num_classes + 1 + + # build bbox_coder + self.anchor_coder = build_bbox_coder(anchor_coder) + self.bbox_coder = build_bbox_coder(bbox_coder) + + # build losses + self.loss_loc = build_loss(loss_loc) + self.loss_shape = build_loss(loss_shape) + self.loss_cls = build_loss(loss_cls) + self.loss_bbox = build_loss(loss_bbox) + + self.train_cfg = train_cfg + self.test_cfg = test_cfg + + if self.train_cfg: + self.assigner = build_assigner(self.train_cfg.assigner) + # use PseudoSampler when sampling is False + if self.sampling and hasattr(self.train_cfg, 'sampler'): + sampler_cfg = self.train_cfg.sampler + else: + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + + self.ga_assigner = build_assigner(self.train_cfg.ga_assigner) + if self.ga_sampling: + ga_sampler_cfg = self.train_cfg.ga_sampler + else: + ga_sampler_cfg = dict(type='PseudoSampler') + self.ga_sampler = build_sampler(ga_sampler_cfg, context=self) + + self.fp16_enabled = False + + self._init_layers() + + def _init_layers(self): + self.relu = nn.ReLU(inplace=True) + self.conv_loc = nn.Conv2d(self.in_channels, 1, 1) + self.conv_shape = nn.Conv2d(self.in_channels, self.num_anchors * 2, 1) + self.feature_adaption = FeatureAdaption( + self.in_channels, + self.feat_channels, + kernel_size=3, + deform_groups=self.deform_groups) + self.conv_cls = MaskedConv2d(self.feat_channels, + self.num_anchors * self.cls_out_channels, + 1) + self.conv_reg = MaskedConv2d(self.feat_channels, self.num_anchors * 4, + 1) + + def forward_single(self, x): + loc_pred = self.conv_loc(x) + shape_pred = self.conv_shape(x) + x = self.feature_adaption(x, shape_pred) + # masked conv is only used during inference for speed-up + if not self.training: + mask = loc_pred.sigmoid()[0] >= self.loc_filter_thr + else: + mask = None + cls_score = self.conv_cls(x, mask) + bbox_pred = self.conv_reg(x, mask) + return cls_score, bbox_pred, shape_pred, loc_pred + + def forward(self, feats): + return multi_apply(self.forward_single, feats) + + def get_sampled_approxs(self, featmap_sizes, img_metas, device='cuda'): + """Get sampled approxs and inside flags according to feature map sizes. + + Args: + featmap_sizes (list[tuple]): Multi-level feature map sizes. + img_metas (list[dict]): Image meta info. + device (torch.device | str): device for returned tensors + + Returns: + tuple: approxes of each image, inside flags of each image + """ + num_imgs = len(img_metas) + + # since feature map sizes of all images are the same, we only compute + # approxes for one time + multi_level_approxs = self.approx_anchor_generator.grid_anchors( + featmap_sizes, device=device) + approxs_list = [multi_level_approxs for _ in range(num_imgs)] + + # for each image, we compute inside flags of multi level approxes + inside_flag_list = [] + for img_id, img_meta in enumerate(img_metas): + multi_level_flags = [] + multi_level_approxs = approxs_list[img_id] + + # obtain valid flags for each approx first + multi_level_approx_flags = self.approx_anchor_generator \ + .valid_flags(featmap_sizes, + img_meta['pad_shape'], + device=device) + + for i, flags in enumerate(multi_level_approx_flags): + approxs = multi_level_approxs[i] + inside_flags_list = [] + for i in range(self.approxs_per_octave): + split_valid_flags = flags[i::self.approxs_per_octave] + split_approxs = approxs[i::self.approxs_per_octave, :] + inside_flags = anchor_inside_flags( + split_approxs, split_valid_flags, + img_meta['img_shape'][:2], + self.train_cfg.allowed_border) + inside_flags_list.append(inside_flags) + # inside_flag for a position is true if any anchor in this + # position is true + inside_flags = ( + torch.stack(inside_flags_list, 0).sum(dim=0) > 0) + multi_level_flags.append(inside_flags) + inside_flag_list.append(multi_level_flags) + return approxs_list, inside_flag_list + + def get_anchors(self, + featmap_sizes, + shape_preds, + loc_preds, + img_metas, + use_loc_filter=False, + device='cuda'): + """Get squares according to feature map sizes and guided anchors. + + Args: + featmap_sizes (list[tuple]): Multi-level feature map sizes. + shape_preds (list[tensor]): Multi-level shape predictions. + loc_preds (list[tensor]): Multi-level location predictions. + img_metas (list[dict]): Image meta info. + use_loc_filter (bool): Use loc filter or not. + device (torch.device | str): device for returned tensors + + Returns: + tuple: square approxs of each image, guided anchors of each image, + loc masks of each image + """ + num_imgs = len(img_metas) + num_levels = len(featmap_sizes) + + # since feature map sizes of all images are the same, we only compute + # squares for one time + multi_level_squares = self.square_anchor_generator.grid_anchors( + featmap_sizes, device=device) + squares_list = [multi_level_squares for _ in range(num_imgs)] + + # for each image, we compute multi level guided anchors + guided_anchors_list = [] + loc_mask_list = [] + for img_id, img_meta in enumerate(img_metas): + multi_level_guided_anchors = [] + multi_level_loc_mask = [] + for i in range(num_levels): + squares = squares_list[img_id][i] + shape_pred = shape_preds[i][img_id] + loc_pred = loc_preds[i][img_id] + guided_anchors, loc_mask = self._get_guided_anchors_single( + squares, + shape_pred, + loc_pred, + use_loc_filter=use_loc_filter) + multi_level_guided_anchors.append(guided_anchors) + multi_level_loc_mask.append(loc_mask) + guided_anchors_list.append(multi_level_guided_anchors) + loc_mask_list.append(multi_level_loc_mask) + return squares_list, guided_anchors_list, loc_mask_list + + def _get_guided_anchors_single(self, + squares, + shape_pred, + loc_pred, + use_loc_filter=False): + """Get guided anchors and loc masks for a single level. + + Args: + square (tensor): Squares of a single level. + shape_pred (tensor): Shape predictions of a single level. + loc_pred (tensor): Loc predictions of a single level. + use_loc_filter (list[tensor]): Use loc filter or not. + + Returns: + tuple: guided anchors, location masks + """ + # calculate location filtering mask + loc_pred = loc_pred.sigmoid().detach() + if use_loc_filter: + loc_mask = loc_pred >= self.loc_filter_thr + else: + loc_mask = loc_pred >= 0.0 + mask = loc_mask.permute(1, 2, 0).expand(-1, -1, self.num_anchors) + mask = mask.contiguous().view(-1) + # calculate guided anchors + squares = squares[mask] + anchor_deltas = shape_pred.permute(1, 2, 0).contiguous().view( + -1, 2).detach()[mask] + bbox_deltas = anchor_deltas.new_full(squares.size(), 0) + bbox_deltas[:, 2:] = anchor_deltas + guided_anchors = self.anchor_coder.decode( + squares, bbox_deltas, wh_ratio_clip=1e-6) + return guided_anchors, mask + + def ga_loc_targets(self, gt_bboxes_list, featmap_sizes): + """Compute location targets for guided anchoring. + + Each feature map is divided into positive, negative and ignore regions. + - positive regions: target 1, weight 1 + - ignore regions: target 0, weight 0 + - negative regions: target 0, weight 0.1 + + Args: + gt_bboxes_list (list[Tensor]): Gt bboxes of each image. + featmap_sizes (list[tuple]): Multi level sizes of each feature + maps. + + Returns: + tuple + """ + anchor_scale = self.approx_anchor_generator.octave_base_scale + anchor_strides = self.approx_anchor_generator.strides + # Currently only supports same stride in x and y direction. + for stride in anchor_strides: + assert (stride[0] == stride[1]) + anchor_strides = [stride[0] for stride in anchor_strides] + + center_ratio = self.train_cfg.center_ratio + ignore_ratio = self.train_cfg.ignore_ratio + img_per_gpu = len(gt_bboxes_list) + num_lvls = len(featmap_sizes) + r1 = (1 - center_ratio) / 2 + r2 = (1 - ignore_ratio) / 2 + all_loc_targets = [] + all_loc_weights = [] + all_ignore_map = [] + for lvl_id in range(num_lvls): + h, w = featmap_sizes[lvl_id] + loc_targets = torch.zeros( + img_per_gpu, + 1, + h, + w, + device=gt_bboxes_list[0].device, + dtype=torch.float32) + loc_weights = torch.full_like(loc_targets, -1) + ignore_map = torch.zeros_like(loc_targets) + all_loc_targets.append(loc_targets) + all_loc_weights.append(loc_weights) + all_ignore_map.append(ignore_map) + for img_id in range(img_per_gpu): + gt_bboxes = gt_bboxes_list[img_id] + scale = torch.sqrt((gt_bboxes[:, 2] - gt_bboxes[:, 0]) * + (gt_bboxes[:, 3] - gt_bboxes[:, 1])) + min_anchor_size = scale.new_full( + (1, ), float(anchor_scale * anchor_strides[0])) + # assign gt bboxes to different feature levels w.r.t. their scales + target_lvls = torch.floor( + torch.log2(scale) - torch.log2(min_anchor_size) + 0.5) + target_lvls = target_lvls.clamp(min=0, max=num_lvls - 1).long() + for gt_id in range(gt_bboxes.size(0)): + lvl = target_lvls[gt_id].item() + # rescaled to corresponding feature map + gt_ = gt_bboxes[gt_id, :4] / anchor_strides[lvl] + # calculate ignore regions + ignore_x1, ignore_y1, ignore_x2, ignore_y2 = calc_region( + gt_, r2, featmap_sizes[lvl]) + # calculate positive (center) regions + ctr_x1, ctr_y1, ctr_x2, ctr_y2 = calc_region( + gt_, r1, featmap_sizes[lvl]) + all_loc_targets[lvl][img_id, 0, ctr_y1:ctr_y2 + 1, + ctr_x1:ctr_x2 + 1] = 1 + all_loc_weights[lvl][img_id, 0, ignore_y1:ignore_y2 + 1, + ignore_x1:ignore_x2 + 1] = 0 + all_loc_weights[lvl][img_id, 0, ctr_y1:ctr_y2 + 1, + ctr_x1:ctr_x2 + 1] = 1 + # calculate ignore map on nearby low level feature + if lvl > 0: + d_lvl = lvl - 1 + # rescaled to corresponding feature map + gt_ = gt_bboxes[gt_id, :4] / anchor_strides[d_lvl] + ignore_x1, ignore_y1, ignore_x2, ignore_y2 = calc_region( + gt_, r2, featmap_sizes[d_lvl]) + all_ignore_map[d_lvl][img_id, 0, ignore_y1:ignore_y2 + 1, + ignore_x1:ignore_x2 + 1] = 1 + # calculate ignore map on nearby high level feature + if lvl < num_lvls - 1: + u_lvl = lvl + 1 + # rescaled to corresponding feature map + gt_ = gt_bboxes[gt_id, :4] / anchor_strides[u_lvl] + ignore_x1, ignore_y1, ignore_x2, ignore_y2 = calc_region( + gt_, r2, featmap_sizes[u_lvl]) + all_ignore_map[u_lvl][img_id, 0, ignore_y1:ignore_y2 + 1, + ignore_x1:ignore_x2 + 1] = 1 + for lvl_id in range(num_lvls): + # ignore negative regions w.r.t. ignore map + all_loc_weights[lvl_id][(all_loc_weights[lvl_id] < 0) + & (all_ignore_map[lvl_id] > 0)] = 0 + # set negative regions with weight 0.1 + all_loc_weights[lvl_id][all_loc_weights[lvl_id] < 0] = 0.1 + # loc average factor to balance loss + loc_avg_factor = sum( + [t.size(0) * t.size(-1) * t.size(-2) + for t in all_loc_targets]) / 200 + return all_loc_targets, all_loc_weights, loc_avg_factor + + def _ga_shape_target_single(self, + flat_approxs, + inside_flags, + flat_squares, + gt_bboxes, + gt_bboxes_ignore, + img_meta, + unmap_outputs=True): + """Compute guided anchoring targets. + + This function returns sampled anchors and gt bboxes directly + rather than calculates regression targets. + + Args: + flat_approxs (Tensor): flat approxs of a single image, + shape (n, 4) + inside_flags (Tensor): inside flags of a single image, + shape (n, ). + flat_squares (Tensor): flat squares of a single image, + shape (approxs_per_octave * n, 4) + gt_bboxes (Tensor): Ground truth bboxes of a single image. + img_meta (dict): Meta info of a single image. + approxs_per_octave (int): number of approxs per octave + cfg (dict): RPN train configs. + unmap_outputs (bool): unmap outputs or not. + + Returns: + tuple + """ + if not inside_flags.any(): + return (None, ) * 5 + # assign gt and sample anchors + expand_inside_flags = inside_flags[:, None].expand( + -1, self.approxs_per_octave).reshape(-1) + approxs = flat_approxs[expand_inside_flags, :] + squares = flat_squares[inside_flags, :] + + assign_result = self.ga_assigner.assign(approxs, squares, + self.approxs_per_octave, + gt_bboxes, gt_bboxes_ignore) + sampling_result = self.ga_sampler.sample(assign_result, squares, + gt_bboxes) + + bbox_anchors = torch.zeros_like(squares) + bbox_gts = torch.zeros_like(squares) + bbox_weights = torch.zeros_like(squares) + + pos_inds = sampling_result.pos_inds + neg_inds = sampling_result.neg_inds + if len(pos_inds) > 0: + bbox_anchors[pos_inds, :] = sampling_result.pos_bboxes + bbox_gts[pos_inds, :] = sampling_result.pos_gt_bboxes + bbox_weights[pos_inds, :] = 1.0 + + # map up to original set of anchors + if unmap_outputs: + num_total_anchors = flat_squares.size(0) + bbox_anchors = unmap(bbox_anchors, num_total_anchors, inside_flags) + bbox_gts = unmap(bbox_gts, num_total_anchors, inside_flags) + bbox_weights = unmap(bbox_weights, num_total_anchors, inside_flags) + + return (bbox_anchors, bbox_gts, bbox_weights, pos_inds, neg_inds) + + def ga_shape_targets(self, + approx_list, + inside_flag_list, + square_list, + gt_bboxes_list, + img_metas, + gt_bboxes_ignore_list=None, + unmap_outputs=True): + """Compute guided anchoring targets. + + Args: + approx_list (list[list]): Multi level approxs of each image. + inside_flag_list (list[list]): Multi level inside flags of each + image. + square_list (list[list]): Multi level squares of each image. + gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image. + img_metas (list[dict]): Meta info of each image. + gt_bboxes_ignore_list (list[Tensor]): ignore list of gt bboxes. + unmap_outputs (bool): unmap outputs or not. + + Returns: + tuple + """ + num_imgs = len(img_metas) + assert len(approx_list) == len(inside_flag_list) == len( + square_list) == num_imgs + # anchor number of multi levels + num_level_squares = [squares.size(0) for squares in square_list[0]] + # concat all level anchors and flags to a single tensor + inside_flag_flat_list = [] + approx_flat_list = [] + square_flat_list = [] + for i in range(num_imgs): + assert len(square_list[i]) == len(inside_flag_list[i]) + inside_flag_flat_list.append(torch.cat(inside_flag_list[i])) + approx_flat_list.append(torch.cat(approx_list[i])) + square_flat_list.append(torch.cat(square_list[i])) + + # compute targets for each image + if gt_bboxes_ignore_list is None: + gt_bboxes_ignore_list = [None for _ in range(num_imgs)] + (all_bbox_anchors, all_bbox_gts, all_bbox_weights, pos_inds_list, + neg_inds_list) = multi_apply( + self._ga_shape_target_single, + approx_flat_list, + inside_flag_flat_list, + square_flat_list, + gt_bboxes_list, + gt_bboxes_ignore_list, + img_metas, + unmap_outputs=unmap_outputs) + # no valid anchors + if any([bbox_anchors is None for bbox_anchors in all_bbox_anchors]): + return None + # sampled anchors of all images + num_total_pos = sum([max(inds.numel(), 1) for inds in pos_inds_list]) + num_total_neg = sum([max(inds.numel(), 1) for inds in neg_inds_list]) + # split targets to a list w.r.t. multiple levels + bbox_anchors_list = images_to_levels(all_bbox_anchors, + num_level_squares) + bbox_gts_list = images_to_levels(all_bbox_gts, num_level_squares) + bbox_weights_list = images_to_levels(all_bbox_weights, + num_level_squares) + return (bbox_anchors_list, bbox_gts_list, bbox_weights_list, + num_total_pos, num_total_neg) + + def loss_shape_single(self, shape_pred, bbox_anchors, bbox_gts, + anchor_weights, anchor_total_num): + shape_pred = shape_pred.permute(0, 2, 3, 1).contiguous().view(-1, 2) + bbox_anchors = bbox_anchors.contiguous().view(-1, 4) + bbox_gts = bbox_gts.contiguous().view(-1, 4) + anchor_weights = anchor_weights.contiguous().view(-1, 4) + bbox_deltas = bbox_anchors.new_full(bbox_anchors.size(), 0) + bbox_deltas[:, 2:] += shape_pred + # filter out negative samples to speed-up weighted_bounded_iou_loss + inds = torch.nonzero( + anchor_weights[:, 0] > 0, as_tuple=False).squeeze(1) + bbox_deltas_ = bbox_deltas[inds] + bbox_anchors_ = bbox_anchors[inds] + bbox_gts_ = bbox_gts[inds] + anchor_weights_ = anchor_weights[inds] + pred_anchors_ = self.anchor_coder.decode( + bbox_anchors_, bbox_deltas_, wh_ratio_clip=1e-6) + loss_shape = self.loss_shape( + pred_anchors_, + bbox_gts_, + anchor_weights_, + avg_factor=anchor_total_num) + return loss_shape + + def loss_loc_single(self, loc_pred, loc_target, loc_weight, + loc_avg_factor): + loss_loc = self.loss_loc( + loc_pred.reshape(-1, 1), + loc_target.reshape(-1).long(), + loc_weight.reshape(-1), + avg_factor=loc_avg_factor) + return loss_loc + + @force_fp32( + apply_to=('cls_scores', 'bbox_preds', 'shape_preds', 'loc_preds')) + def loss(self, + cls_scores, + bbox_preds, + shape_preds, + loc_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.approx_anchor_generator.num_levels + + device = cls_scores[0].device + + # get loc targets + loc_targets, loc_weights, loc_avg_factor = self.ga_loc_targets( + gt_bboxes, featmap_sizes) + + # get sampled approxes + approxs_list, inside_flag_list = self.get_sampled_approxs( + featmap_sizes, img_metas, device=device) + # get squares and guided anchors + squares_list, guided_anchors_list, _ = self.get_anchors( + featmap_sizes, shape_preds, loc_preds, img_metas, device=device) + + # get shape targets + shape_targets = self.ga_shape_targets(approxs_list, inside_flag_list, + squares_list, gt_bboxes, + img_metas) + if shape_targets is None: + return None + (bbox_anchors_list, bbox_gts_list, anchor_weights_list, anchor_fg_num, + anchor_bg_num) = shape_targets + anchor_total_num = ( + anchor_fg_num if not self.ga_sampling else anchor_fg_num + + anchor_bg_num) + + # get anchor targets + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + cls_reg_targets = self.get_targets( + guided_anchors_list, + inside_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=label_channels) + if cls_reg_targets is None: + return None + (labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, + num_total_pos, num_total_neg) = cls_reg_targets + num_total_samples = ( + num_total_pos + num_total_neg if self.sampling else num_total_pos) + + # anchor number of multi levels + num_level_anchors = [ + anchors.size(0) for anchors in guided_anchors_list[0] + ] + # concat all level anchors to a single tensor + concat_anchor_list = [] + for i in range(len(guided_anchors_list)): + concat_anchor_list.append(torch.cat(guided_anchors_list[i])) + all_anchor_list = images_to_levels(concat_anchor_list, + num_level_anchors) + + # get classification and bbox regression losses + losses_cls, losses_bbox = multi_apply( + self.loss_single, + cls_scores, + bbox_preds, + all_anchor_list, + labels_list, + label_weights_list, + bbox_targets_list, + bbox_weights_list, + num_total_samples=num_total_samples) + + # get anchor location loss + losses_loc = [] + for i in range(len(loc_preds)): + loss_loc = self.loss_loc_single( + loc_preds[i], + loc_targets[i], + loc_weights[i], + loc_avg_factor=loc_avg_factor) + losses_loc.append(loss_loc) + + # get anchor shape loss + losses_shape = [] + for i in range(len(shape_preds)): + loss_shape = self.loss_shape_single( + shape_preds[i], + bbox_anchors_list[i], + bbox_gts_list[i], + anchor_weights_list[i], + anchor_total_num=anchor_total_num) + losses_shape.append(loss_shape) + + return dict( + loss_cls=losses_cls, + loss_bbox=losses_bbox, + loss_shape=losses_shape, + loss_loc=losses_loc) + + @force_fp32( + apply_to=('cls_scores', 'bbox_preds', 'shape_preds', 'loc_preds')) + def get_bboxes(self, + cls_scores, + bbox_preds, + shape_preds, + loc_preds, + img_metas, + cfg=None, + rescale=False): + assert len(cls_scores) == len(bbox_preds) == len(shape_preds) == len( + loc_preds) + num_levels = len(cls_scores) + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + device = cls_scores[0].device + # get guided anchors + _, guided_anchors, loc_masks = self.get_anchors( + featmap_sizes, + shape_preds, + loc_preds, + img_metas, + use_loc_filter=not self.training, + device=device) + result_list = [] + for img_id in range(len(img_metas)): + cls_score_list = [ + cls_scores[i][img_id].detach() for i in range(num_levels) + ] + bbox_pred_list = [ + bbox_preds[i][img_id].detach() for i in range(num_levels) + ] + guided_anchor_list = [ + guided_anchors[img_id][i].detach() for i in range(num_levels) + ] + loc_mask_list = [ + loc_masks[img_id][i].detach() for i in range(num_levels) + ] + img_shape = img_metas[img_id]['img_shape'] + scale_factor = img_metas[img_id]['scale_factor'] + proposals = self._get_bboxes_single(cls_score_list, bbox_pred_list, + guided_anchor_list, + loc_mask_list, img_shape, + scale_factor, cfg, rescale) + result_list.append(proposals) + return result_list + + def _get_bboxes_single(self, + cls_scores, + bbox_preds, + mlvl_anchors, + mlvl_masks, + img_shape, + scale_factor, + cfg, + rescale=False): + cfg = self.test_cfg if cfg is None else cfg + assert len(cls_scores) == len(bbox_preds) == len(mlvl_anchors) + mlvl_bboxes = [] + mlvl_scores = [] + for cls_score, bbox_pred, anchors, mask in zip(cls_scores, bbox_preds, + mlvl_anchors, + mlvl_masks): + assert cls_score.size()[-2:] == bbox_pred.size()[-2:] + # if no location is kept, end. + if mask.sum() == 0: + continue + # reshape scores and bbox_pred + cls_score = cls_score.permute(1, 2, + 0).reshape(-1, self.cls_out_channels) + if self.use_sigmoid_cls: + scores = cls_score.sigmoid() + else: + scores = cls_score.softmax(-1) + bbox_pred = bbox_pred.permute(1, 2, 0).reshape(-1, 4) + # filter scores, bbox_pred w.r.t. mask. + # anchors are filtered in get_anchors() beforehand. + scores = scores[mask, :] + bbox_pred = bbox_pred[mask, :] + if scores.dim() == 0: + anchors = anchors.unsqueeze(0) + scores = scores.unsqueeze(0) + bbox_pred = bbox_pred.unsqueeze(0) + # filter anchors, bbox_pred, scores w.r.t. scores + nms_pre = cfg.get('nms_pre', -1) + if nms_pre > 0 and scores.shape[0] > nms_pre: + if self.use_sigmoid_cls: + max_scores, _ = scores.max(dim=1) + else: + # remind that we set FG labels to [0, num_class-1] + # since mmdet v2.0 + # BG cat_id: num_class + max_scores, _ = scores[:, :-1].max(dim=1) + _, topk_inds = max_scores.topk(nms_pre) + anchors = anchors[topk_inds, :] + bbox_pred = bbox_pred[topk_inds, :] + scores = scores[topk_inds, :] + bboxes = self.bbox_coder.decode( + anchors, bbox_pred, max_shape=img_shape) + mlvl_bboxes.append(bboxes) + mlvl_scores.append(scores) + mlvl_bboxes = torch.cat(mlvl_bboxes) + if rescale: + mlvl_bboxes /= mlvl_bboxes.new_tensor(scale_factor) + mlvl_scores = torch.cat(mlvl_scores) + if self.use_sigmoid_cls: + # Add a dummy background class to the backend when using sigmoid + # remind that we set FG labels to [0, num_class-1] since mmdet v2.0 + # BG cat_id: num_class + padding = mlvl_scores.new_zeros(mlvl_scores.shape[0], 1) + mlvl_scores = torch.cat([mlvl_scores, padding], dim=1) + # multi class NMS + det_bboxes, det_labels = multiclass_nms(mlvl_bboxes, mlvl_scores, + cfg.score_thr, cfg.nms, + cfg.max_per_img) + return det_bboxes, det_labels diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/ld_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/ld_head.py new file mode 100644 index 0000000000000000000000000000000000000000..501e1f7befa086f0b2f818531807411fc383d7bd --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/ld_head.py @@ -0,0 +1,261 @@ +import torch +from mmcv.runner import force_fp32 + +from mmdet.core import (bbox2distance, bbox_overlaps, distance2bbox, + multi_apply, reduce_mean) +from ..builder import HEADS, build_loss +from .gfl_head import GFLHead + + +@HEADS.register_module() +class LDHead(GFLHead): + """Localization distillation Head. (Short description) + + It utilizes the learned bbox distributions to transfer the localization + dark knowledge from teacher to student. Original paper: `Localization + Distillation for Object Detection. `_ + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + loss_ld (dict): Config of Localization Distillation Loss (LD), + T is the temperature for distillation. + """ + + def __init__(self, + num_classes, + in_channels, + loss_ld=dict( + type='LocalizationDistillationLoss', + loss_weight=0.25, + T=10), + **kwargs): + + super(LDHead, self).__init__(num_classes, in_channels, **kwargs) + self.loss_ld = build_loss(loss_ld) + + def loss_single(self, anchors, cls_score, bbox_pred, labels, label_weights, + bbox_targets, stride, soft_targets, num_total_samples): + """Compute loss of a single scale level. + + Args: + anchors (Tensor): Box reference for each scale level with shape + (N, num_total_anchors, 4). + cls_score (Tensor): Cls and quality joint scores for each scale + level has shape (N, num_classes, H, W). + bbox_pred (Tensor): Box distribution logits for each scale + level with shape (N, 4*(n+1), H, W), n is max value of integral + set. + labels (Tensor): Labels of each anchors with shape + (N, num_total_anchors). + label_weights (Tensor): Label weights of each anchor with shape + (N, num_total_anchors) + bbox_targets (Tensor): BBox regression targets of each anchor wight + shape (N, num_total_anchors, 4). + stride (tuple): Stride in this scale level. + num_total_samples (int): Number of positive samples that is + reduced over all GPUs. + + Returns: + dict[tuple, Tensor]: Loss components and weight targets. + """ + assert stride[0] == stride[1], 'h stride is not equal to w stride!' + anchors = anchors.reshape(-1, 4) + cls_score = cls_score.permute(0, 2, 3, + 1).reshape(-1, self.cls_out_channels) + bbox_pred = bbox_pred.permute(0, 2, 3, + 1).reshape(-1, 4 * (self.reg_max + 1)) + soft_targets = soft_targets.permute(0, 2, 3, + 1).reshape(-1, + 4 * (self.reg_max + 1)) + + bbox_targets = bbox_targets.reshape(-1, 4) + labels = labels.reshape(-1) + label_weights = label_weights.reshape(-1) + + # FG cat_id: [0, num_classes -1], BG cat_id: num_classes + bg_class_ind = self.num_classes + pos_inds = ((labels >= 0) + & (labels < bg_class_ind)).nonzero().squeeze(1) + score = label_weights.new_zeros(labels.shape) + + if len(pos_inds) > 0: + pos_bbox_targets = bbox_targets[pos_inds] + pos_bbox_pred = bbox_pred[pos_inds] + pos_anchors = anchors[pos_inds] + pos_anchor_centers = self.anchor_center(pos_anchors) / stride[0] + + weight_targets = cls_score.detach().sigmoid() + weight_targets = weight_targets.max(dim=1)[0][pos_inds] + pos_bbox_pred_corners = self.integral(pos_bbox_pred) + pos_decode_bbox_pred = distance2bbox(pos_anchor_centers, + pos_bbox_pred_corners) + pos_decode_bbox_targets = pos_bbox_targets / stride[0] + score[pos_inds] = bbox_overlaps( + pos_decode_bbox_pred.detach(), + pos_decode_bbox_targets, + is_aligned=True) + pred_corners = pos_bbox_pred.reshape(-1, self.reg_max + 1) + pos_soft_targets = soft_targets[pos_inds] + soft_corners = pos_soft_targets.reshape(-1, self.reg_max + 1) + + target_corners = bbox2distance(pos_anchor_centers, + pos_decode_bbox_targets, + self.reg_max).reshape(-1) + + # regression loss + loss_bbox = self.loss_bbox( + pos_decode_bbox_pred, + pos_decode_bbox_targets, + weight=weight_targets, + avg_factor=1.0) + + # dfl loss + loss_dfl = self.loss_dfl( + pred_corners, + target_corners, + weight=weight_targets[:, None].expand(-1, 4).reshape(-1), + avg_factor=4.0) + + # ld loss + loss_ld = self.loss_ld( + pred_corners, + soft_corners, + weight=weight_targets[:, None].expand(-1, 4).reshape(-1), + avg_factor=4.0) + + else: + loss_ld = bbox_pred.sum() * 0 + loss_bbox = bbox_pred.sum() * 0 + loss_dfl = bbox_pred.sum() * 0 + weight_targets = bbox_pred.new_tensor(0) + + # cls (qfl) loss + loss_cls = self.loss_cls( + cls_score, (labels, score), + weight=label_weights, + avg_factor=num_total_samples) + + return loss_cls, loss_bbox, loss_dfl, loss_ld, weight_targets.sum() + + def forward_train(self, + x, + out_teacher, + img_metas, + gt_bboxes, + gt_labels=None, + gt_bboxes_ignore=None, + proposal_cfg=None, + **kwargs): + """ + Args: + x (list[Tensor]): Features from FPN. + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes (Tensor): Ground truth bboxes of the image, + shape (num_gts, 4). + gt_labels (Tensor): Ground truth labels of each box, + shape (num_gts,). + gt_bboxes_ignore (Tensor): Ground truth bboxes to be + ignored, shape (num_ignored_gts, 4). + proposal_cfg (mmcv.Config): Test / postprocessing configuration, + if None, test_cfg would be used + + Returns: + tuple[dict, list]: The loss components and proposals of each image. + + - losses (dict[str, Tensor]): A dictionary of loss components. + - proposal_list (list[Tensor]): Proposals of each image. + """ + outs = self(x) + soft_target = out_teacher[1] + if gt_labels is None: + loss_inputs = outs + (gt_bboxes, soft_target, img_metas) + else: + loss_inputs = outs + (gt_bboxes, gt_labels, soft_target, img_metas) + losses = self.loss(*loss_inputs, gt_bboxes_ignore=gt_bboxes_ignore) + if proposal_cfg is None: + return losses + else: + proposal_list = self.get_bboxes(*outs, img_metas, cfg=proposal_cfg) + return losses, proposal_list + + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + gt_labels, + soft_target, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + cls_scores (list[Tensor]): Cls and quality scores for each scale + level has shape (N, num_classes, H, W). + bbox_preds (list[Tensor]): Box distribution logits for each scale + level with shape (N, 4*(n+1), H, W), n is max value of integral + set. + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (list[Tensor] | None): specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.anchor_generator.num_levels + + device = cls_scores[0].device + anchor_list, valid_flag_list = self.get_anchors( + featmap_sizes, img_metas, device=device) + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + + cls_reg_targets = self.get_targets( + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=label_channels) + if cls_reg_targets is None: + return None + + (anchor_list, labels_list, label_weights_list, bbox_targets_list, + bbox_weights_list, num_total_pos, num_total_neg) = cls_reg_targets + + num_total_samples = reduce_mean( + torch.tensor(num_total_pos, dtype=torch.float, + device=device)).item() + num_total_samples = max(num_total_samples, 1.0) + + losses_cls, losses_bbox, losses_dfl, losses_ld, \ + avg_factor = multi_apply( + self.loss_single, + anchor_list, + cls_scores, + bbox_preds, + labels_list, + label_weights_list, + bbox_targets_list, + self.anchor_generator.strides, + soft_target, + num_total_samples=num_total_samples) + + avg_factor = sum(avg_factor) + 1e-6 + avg_factor = reduce_mean(avg_factor).item() + losses_bbox = [x / avg_factor for x in losses_bbox] + losses_dfl = [x / avg_factor for x in losses_dfl] + return dict( + loss_cls=losses_cls, + loss_bbox=losses_bbox, + loss_dfl=losses_dfl, + loss_ld=losses_ld) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/nasfcos_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/nasfcos_head.py new file mode 100644 index 0000000000000000000000000000000000000000..086ebf868ceb2a7aa8d802d1cab69479d9768f41 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/nasfcos_head.py @@ -0,0 +1,79 @@ +import copy + +import torch.nn as nn +from mmcv.cnn import ConvModule, Scale + +from mmdet.models.dense_heads.fcos_head import FCOSHead +from ..builder import HEADS + + +@HEADS.register_module() +class NASFCOSHead(FCOSHead): + """Anchor-free head used in `NASFCOS `_. + + It is quite similar with FCOS head, except for the searched structure of + classification branch and bbox regression branch, where a structure of + "dconv3x3, conv3x3, dconv3x3, conv1x1" is utilized instead. + """ + + def __init__(self, *args, init_cfg=None, **kwargs): + if init_cfg is None: + init_cfg = [ + dict(type='Caffe2Xavier', layer=['ConvModule', 'Conv2d']), + dict( + type='Normal', + std=0.01, + override=[ + dict(name='conv_reg'), + dict(name='conv_centerness'), + dict( + name='conv_cls', + type='Normal', + std=0.01, + bias_prob=0.01) + ]), + ] + super(NASFCOSHead, self).__init__(*args, init_cfg=init_cfg, **kwargs) + + def _init_layers(self): + """Initialize layers of the head.""" + dconv3x3_config = dict( + type='DCNv2', + kernel_size=3, + use_bias=True, + deform_groups=2, + padding=1) + conv3x3_config = dict(type='Conv', kernel_size=3, padding=1) + conv1x1_config = dict(type='Conv', kernel_size=1) + + self.arch_config = [ + dconv3x3_config, conv3x3_config, dconv3x3_config, conv1x1_config + ] + self.cls_convs = nn.ModuleList() + self.reg_convs = nn.ModuleList() + for i, op_ in enumerate(self.arch_config): + op = copy.deepcopy(op_) + chn = self.in_channels if i == 0 else self.feat_channels + assert isinstance(op, dict) + use_bias = op.pop('use_bias', False) + padding = op.pop('padding', 0) + kernel_size = op.pop('kernel_size') + module = ConvModule( + chn, + self.feat_channels, + kernel_size, + stride=1, + padding=padding, + norm_cfg=self.norm_cfg, + bias=use_bias, + conv_cfg=op) + + self.cls_convs.append(copy.deepcopy(module)) + self.reg_convs.append(copy.deepcopy(module)) + + self.conv_cls = nn.Conv2d( + self.feat_channels, self.cls_out_channels, 3, padding=1) + self.conv_reg = nn.Conv2d(self.feat_channels, 4, 3, padding=1) + self.conv_centerness = nn.Conv2d(self.feat_channels, 1, 3, padding=1) + + self.scales = nn.ModuleList([Scale(1.0) for _ in self.strides]) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/paa_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/paa_head.py new file mode 100644 index 0000000000000000000000000000000000000000..49b0996d76b5850ffeddace5d3441a05f55bb01b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/paa_head.py @@ -0,0 +1,673 @@ +import numpy as np +import torch +from mmcv.runner import force_fp32 + +from mmdet.core import multi_apply, multiclass_nms +from mmdet.core.bbox.iou_calculators import bbox_overlaps +from mmdet.models import HEADS +from mmdet.models.dense_heads import ATSSHead + +EPS = 1e-12 +try: + import sklearn.mixture as skm +except ImportError: + skm = None + + +def levels_to_images(mlvl_tensor): + """Concat multi-level feature maps by image. + + [feature_level0, feature_level1...] -> [feature_image0, feature_image1...] + Convert the shape of each element in mlvl_tensor from (N, C, H, W) to + (N, H*W , C), then split the element to N elements with shape (H*W, C), and + concat elements in same image of all level along first dimension. + + Args: + mlvl_tensor (list[torch.Tensor]): list of Tensor which collect from + corresponding level. Each element is of shape (N, C, H, W) + + Returns: + list[torch.Tensor]: A list that contains N tensors and each tensor is + of shape (num_elements, C) + """ + batch_size = mlvl_tensor[0].size(0) + batch_list = [[] for _ in range(batch_size)] + channels = mlvl_tensor[0].size(1) + for t in mlvl_tensor: + t = t.permute(0, 2, 3, 1) + t = t.view(batch_size, -1, channels).contiguous() + for img in range(batch_size): + batch_list[img].append(t[img]) + return [torch.cat(item, 0) for item in batch_list] + + +@HEADS.register_module() +class PAAHead(ATSSHead): + """Head of PAAAssignment: Probabilistic Anchor Assignment with IoU + Prediction for Object Detection. + + Code is modified from the `official github repo + `_. + + More details can be found in the `paper + `_ . + + Args: + topk (int): Select topk samples with smallest loss in + each level. + score_voting (bool): Whether to use score voting in post-process. + covariance_type : String describing the type of covariance parameters + to be used in :class:`sklearn.mixture.GaussianMixture`. + It must be one of: + + - 'full': each component has its own general covariance matrix + - 'tied': all components share the same general covariance matrix + - 'diag': each component has its own diagonal covariance matrix + - 'spherical': each component has its own single variance + Default: 'diag'. From 'full' to 'spherical', the gmm fitting + process is faster yet the performance could be influenced. For most + cases, 'diag' should be a good choice. + """ + + def __init__(self, + *args, + topk=9, + score_voting=True, + covariance_type='diag', + **kwargs): + # topk used in paa reassign process + self.topk = topk + self.with_score_voting = score_voting + self.covariance_type = covariance_type + super(PAAHead, self).__init__(*args, **kwargs) + + @force_fp32(apply_to=('cls_scores', 'bbox_preds', 'iou_preds')) + def loss(self, + cls_scores, + bbox_preds, + iou_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + iou_preds (list[Tensor]): iou_preds for each scale + level with shape (N, num_anchors * 1, H, W) + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (list[Tensor] | None): Specify which bounding + boxes can be ignored when are computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss gmm_assignment. + """ + + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.anchor_generator.num_levels + + device = cls_scores[0].device + anchor_list, valid_flag_list = self.get_anchors( + featmap_sizes, img_metas, device=device) + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + cls_reg_targets = self.get_targets( + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=label_channels, + ) + (labels, labels_weight, bboxes_target, bboxes_weight, pos_inds, + pos_gt_index) = cls_reg_targets + cls_scores = levels_to_images(cls_scores) + cls_scores = [ + item.reshape(-1, self.cls_out_channels) for item in cls_scores + ] + bbox_preds = levels_to_images(bbox_preds) + bbox_preds = [item.reshape(-1, 4) for item in bbox_preds] + iou_preds = levels_to_images(iou_preds) + iou_preds = [item.reshape(-1, 1) for item in iou_preds] + pos_losses_list, = multi_apply(self.get_pos_loss, anchor_list, + cls_scores, bbox_preds, labels, + labels_weight, bboxes_target, + bboxes_weight, pos_inds) + + with torch.no_grad(): + reassign_labels, reassign_label_weight, \ + reassign_bbox_weights, num_pos = multi_apply( + self.paa_reassign, + pos_losses_list, + labels, + labels_weight, + bboxes_weight, + pos_inds, + pos_gt_index, + anchor_list) + num_pos = sum(num_pos) + # convert all tensor list to a flatten tensor + cls_scores = torch.cat(cls_scores, 0).view(-1, cls_scores[0].size(-1)) + bbox_preds = torch.cat(bbox_preds, 0).view(-1, bbox_preds[0].size(-1)) + iou_preds = torch.cat(iou_preds, 0).view(-1, iou_preds[0].size(-1)) + labels = torch.cat(reassign_labels, 0).view(-1) + flatten_anchors = torch.cat( + [torch.cat(item, 0) for item in anchor_list]) + labels_weight = torch.cat(reassign_label_weight, 0).view(-1) + bboxes_target = torch.cat(bboxes_target, + 0).view(-1, bboxes_target[0].size(-1)) + + pos_inds_flatten = ((labels >= 0) + & + (labels < self.num_classes)).nonzero().reshape(-1) + + losses_cls = self.loss_cls( + cls_scores, + labels, + labels_weight, + avg_factor=max(num_pos, len(img_metas))) # avoid num_pos=0 + if num_pos: + pos_bbox_pred = self.bbox_coder.decode( + flatten_anchors[pos_inds_flatten], + bbox_preds[pos_inds_flatten]) + pos_bbox_target = bboxes_target[pos_inds_flatten] + iou_target = bbox_overlaps( + pos_bbox_pred.detach(), pos_bbox_target, is_aligned=True) + losses_iou = self.loss_centerness( + iou_preds[pos_inds_flatten], + iou_target.unsqueeze(-1), + avg_factor=num_pos) + losses_bbox = self.loss_bbox( + pos_bbox_pred, + pos_bbox_target, + iou_target.clamp(min=EPS), + avg_factor=iou_target.sum()) + else: + losses_iou = iou_preds.sum() * 0 + losses_bbox = bbox_preds.sum() * 0 + + return dict( + loss_cls=losses_cls, loss_bbox=losses_bbox, loss_iou=losses_iou) + + def get_pos_loss(self, anchors, cls_score, bbox_pred, label, label_weight, + bbox_target, bbox_weight, pos_inds): + """Calculate loss of all potential positive samples obtained from first + match process. + + Args: + anchors (list[Tensor]): Anchors of each scale. + cls_score (Tensor): Box scores of single image with shape + (num_anchors, num_classes) + bbox_pred (Tensor): Box energies / deltas of single image + with shape (num_anchors, 4) + label (Tensor): classification target of each anchor with + shape (num_anchors,) + label_weight (Tensor): Classification loss weight of each + anchor with shape (num_anchors). + bbox_target (dict): Regression target of each anchor with + shape (num_anchors, 4). + bbox_weight (Tensor): Bbox weight of each anchor with shape + (num_anchors, 4). + pos_inds (Tensor): Index of all positive samples got from + first assign process. + + Returns: + Tensor: Losses of all positive samples in single image. + """ + if not len(pos_inds): + return cls_score.new([]), + anchors_all_level = torch.cat(anchors, 0) + pos_scores = cls_score[pos_inds] + pos_bbox_pred = bbox_pred[pos_inds] + pos_label = label[pos_inds] + pos_label_weight = label_weight[pos_inds] + pos_bbox_target = bbox_target[pos_inds] + pos_bbox_weight = bbox_weight[pos_inds] + pos_anchors = anchors_all_level[pos_inds] + pos_bbox_pred = self.bbox_coder.decode(pos_anchors, pos_bbox_pred) + + # to keep loss dimension + loss_cls = self.loss_cls( + pos_scores, + pos_label, + pos_label_weight, + avg_factor=self.loss_cls.loss_weight, + reduction_override='none') + + loss_bbox = self.loss_bbox( + pos_bbox_pred, + pos_bbox_target, + pos_bbox_weight, + avg_factor=self.loss_cls.loss_weight, + reduction_override='none') + + loss_cls = loss_cls.sum(-1) + pos_loss = loss_bbox + loss_cls + return pos_loss, + + def paa_reassign(self, pos_losses, label, label_weight, bbox_weight, + pos_inds, pos_gt_inds, anchors): + """Fit loss to GMM distribution and separate positive, ignore, negative + samples again with GMM model. + + Args: + pos_losses (Tensor): Losses of all positive samples in + single image. + label (Tensor): classification target of each anchor with + shape (num_anchors,) + label_weight (Tensor): Classification loss weight of each + anchor with shape (num_anchors). + bbox_weight (Tensor): Bbox weight of each anchor with shape + (num_anchors, 4). + pos_inds (Tensor): Index of all positive samples got from + first assign process. + pos_gt_inds (Tensor): Gt_index of all positive samples got + from first assign process. + anchors (list[Tensor]): Anchors of each scale. + + Returns: + tuple: Usually returns a tuple containing learning targets. + + - label (Tensor): classification target of each anchor after + paa assign, with shape (num_anchors,) + - label_weight (Tensor): Classification loss weight of each + anchor after paa assign, with shape (num_anchors). + - bbox_weight (Tensor): Bbox weight of each anchor with shape + (num_anchors, 4). + - num_pos (int): The number of positive samples after paa + assign. + """ + if not len(pos_inds): + return label, label_weight, bbox_weight, 0 + label = label.clone() + label_weight = label_weight.clone() + bbox_weight = bbox_weight.clone() + num_gt = pos_gt_inds.max() + 1 + num_level = len(anchors) + num_anchors_each_level = [item.size(0) for item in anchors] + num_anchors_each_level.insert(0, 0) + inds_level_interval = np.cumsum(num_anchors_each_level) + pos_level_mask = [] + for i in range(num_level): + mask = (pos_inds >= inds_level_interval[i]) & ( + pos_inds < inds_level_interval[i + 1]) + pos_level_mask.append(mask) + pos_inds_after_paa = [label.new_tensor([])] + ignore_inds_after_paa = [label.new_tensor([])] + for gt_ind in range(num_gt): + pos_inds_gmm = [] + pos_loss_gmm = [] + gt_mask = pos_gt_inds == gt_ind + for level in range(num_level): + level_mask = pos_level_mask[level] + level_gt_mask = level_mask & gt_mask + value, topk_inds = pos_losses[level_gt_mask].topk( + min(level_gt_mask.sum(), self.topk), largest=False) + pos_inds_gmm.append(pos_inds[level_gt_mask][topk_inds]) + pos_loss_gmm.append(value) + pos_inds_gmm = torch.cat(pos_inds_gmm) + pos_loss_gmm = torch.cat(pos_loss_gmm) + # fix gmm need at least two sample + if len(pos_inds_gmm) < 2: + continue + device = pos_inds_gmm.device + pos_loss_gmm, sort_inds = pos_loss_gmm.sort() + pos_inds_gmm = pos_inds_gmm[sort_inds] + pos_loss_gmm = pos_loss_gmm.view(-1, 1).cpu().numpy() + min_loss, max_loss = pos_loss_gmm.min(), pos_loss_gmm.max() + means_init = np.array([min_loss, max_loss]).reshape(2, 1) + weights_init = np.array([0.5, 0.5]) + precisions_init = np.array([1.0, 1.0]).reshape(2, 1, 1) # full + if self.covariance_type == 'spherical': + precisions_init = precisions_init.reshape(2) + elif self.covariance_type == 'diag': + precisions_init = precisions_init.reshape(2, 1) + elif self.covariance_type == 'tied': + precisions_init = np.array([[1.0]]) + if skm is None: + raise ImportError('Please run "pip install sklearn" ' + 'to install sklearn first.') + gmm = skm.GaussianMixture( + 2, + weights_init=weights_init, + means_init=means_init, + precisions_init=precisions_init, + covariance_type=self.covariance_type) + gmm.fit(pos_loss_gmm) + gmm_assignment = gmm.predict(pos_loss_gmm) + scores = gmm.score_samples(pos_loss_gmm) + gmm_assignment = torch.from_numpy(gmm_assignment).to(device) + scores = torch.from_numpy(scores).to(device) + + pos_inds_temp, ignore_inds_temp = self.gmm_separation_scheme( + gmm_assignment, scores, pos_inds_gmm) + pos_inds_after_paa.append(pos_inds_temp) + ignore_inds_after_paa.append(ignore_inds_temp) + + pos_inds_after_paa = torch.cat(pos_inds_after_paa) + ignore_inds_after_paa = torch.cat(ignore_inds_after_paa) + reassign_mask = (pos_inds.unsqueeze(1) != pos_inds_after_paa).all(1) + reassign_ids = pos_inds[reassign_mask] + label[reassign_ids] = self.num_classes + label_weight[ignore_inds_after_paa] = 0 + bbox_weight[reassign_ids] = 0 + num_pos = len(pos_inds_after_paa) + return label, label_weight, bbox_weight, num_pos + + def gmm_separation_scheme(self, gmm_assignment, scores, pos_inds_gmm): + """A general separation scheme for gmm model. + + It separates a GMM distribution of candidate samples into three + parts, 0 1 and uncertain areas, and you can implement other + separation schemes by rewriting this function. + + Args: + gmm_assignment (Tensor): The prediction of GMM which is of shape + (num_samples,). The 0/1 value indicates the distribution + that each sample comes from. + scores (Tensor): The probability of sample coming from the + fit GMM distribution. The tensor is of shape (num_samples,). + pos_inds_gmm (Tensor): All the indexes of samples which are used + to fit GMM model. The tensor is of shape (num_samples,) + + Returns: + tuple[Tensor]: The indices of positive and ignored samples. + + - pos_inds_temp (Tensor): Indices of positive samples. + - ignore_inds_temp (Tensor): Indices of ignore samples. + """ + # The implementation is (c) in Fig.3 in origin paper instead of (b). + # You can refer to issues such as + # https://github.com/kkhoot/PAA/issues/8 and + # https://github.com/kkhoot/PAA/issues/9. + fgs = gmm_assignment == 0 + pos_inds_temp = fgs.new_tensor([], dtype=torch.long) + ignore_inds_temp = fgs.new_tensor([], dtype=torch.long) + if fgs.nonzero().numel(): + _, pos_thr_ind = scores[fgs].topk(1) + pos_inds_temp = pos_inds_gmm[fgs][:pos_thr_ind + 1] + ignore_inds_temp = pos_inds_gmm.new_tensor([]) + return pos_inds_temp, ignore_inds_temp + + def get_targets( + self, + anchor_list, + valid_flag_list, + gt_bboxes_list, + img_metas, + gt_bboxes_ignore_list=None, + gt_labels_list=None, + label_channels=1, + unmap_outputs=True, + ): + """Get targets for PAA head. + + This method is almost the same as `AnchorHead.get_targets()`. We direct + return the results from _get_targets_single instead map it to levels + by images_to_levels function. + + Args: + anchor_list (list[list[Tensor]]): Multi level anchors of each + image. The outer list indicates images, and the inner list + corresponds to feature levels of the image. Each element of + the inner list is a tensor of shape (num_anchors, 4). + valid_flag_list (list[list[Tensor]]): Multi level valid flags of + each image. The outer list indicates images, and the inner list + corresponds to feature levels of the image. Each element of + the inner list is a tensor of shape (num_anchors, ) + gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image. + img_metas (list[dict]): Meta info of each image. + gt_bboxes_ignore_list (list[Tensor]): Ground truth bboxes to be + ignored. + gt_labels_list (list[Tensor]): Ground truth labels of each box. + label_channels (int): Channel of label. + unmap_outputs (bool): Whether to map outputs back to the original + set of anchors. + + Returns: + tuple: Usually returns a tuple containing learning targets. + + - labels (list[Tensor]): Labels of all anchors, each with + shape (num_anchors,). + - label_weights (list[Tensor]): Label weights of all anchor. + each with shape (num_anchors,). + - bbox_targets (list[Tensor]): BBox targets of all anchors. + each with shape (num_anchors, 4). + - bbox_weights (list[Tensor]): BBox weights of all anchors. + each with shape (num_anchors, 4). + - pos_inds (list[Tensor]): Contains all index of positive + sample in all anchor. + - gt_inds (list[Tensor]): Contains all gt_index of positive + sample in all anchor. + """ + + num_imgs = len(img_metas) + assert len(anchor_list) == len(valid_flag_list) == num_imgs + concat_anchor_list = [] + concat_valid_flag_list = [] + for i in range(num_imgs): + assert len(anchor_list[i]) == len(valid_flag_list[i]) + concat_anchor_list.append(torch.cat(anchor_list[i])) + concat_valid_flag_list.append(torch.cat(valid_flag_list[i])) + + # compute targets for each image + if gt_bboxes_ignore_list is None: + gt_bboxes_ignore_list = [None for _ in range(num_imgs)] + if gt_labels_list is None: + gt_labels_list = [None for _ in range(num_imgs)] + results = multi_apply( + self._get_targets_single, + concat_anchor_list, + concat_valid_flag_list, + gt_bboxes_list, + gt_bboxes_ignore_list, + gt_labels_list, + img_metas, + label_channels=label_channels, + unmap_outputs=unmap_outputs) + + (labels, label_weights, bbox_targets, bbox_weights, valid_pos_inds, + valid_neg_inds, sampling_result) = results + + # Due to valid flag of anchors, we have to calculate the real pos_inds + # in origin anchor set. + pos_inds = [] + for i, single_labels in enumerate(labels): + pos_mask = (0 <= single_labels) & ( + single_labels < self.num_classes) + pos_inds.append(pos_mask.nonzero().view(-1)) + + gt_inds = [item.pos_assigned_gt_inds for item in sampling_result] + return (labels, label_weights, bbox_targets, bbox_weights, pos_inds, + gt_inds) + + def _get_targets_single(self, + flat_anchors, + valid_flags, + gt_bboxes, + gt_bboxes_ignore, + gt_labels, + img_meta, + label_channels=1, + unmap_outputs=True): + """Compute regression and classification targets for anchors in a + single image. + + This method is same as `AnchorHead._get_targets_single()`. + """ + assert unmap_outputs, 'We must map outputs back to the original' \ + 'set of anchors in PAAhead' + return super(ATSSHead, self)._get_targets_single( + flat_anchors, + valid_flags, + gt_bboxes, + gt_bboxes_ignore, + gt_labels, + img_meta, + label_channels=1, + unmap_outputs=True) + + def _get_bboxes(self, + cls_scores, + bbox_preds, + iou_preds, + mlvl_anchors, + img_shapes, + scale_factors, + cfg, + rescale=False, + with_nms=True): + """Transform outputs for a single batch item into labeled boxes. + + This method is almost same as `ATSSHead._get_bboxes()`. + We use sqrt(iou_preds * cls_scores) in NMS process instead of just + cls_scores. Besides, score voting is used when `` score_voting`` + is set to True. + """ + assert with_nms, 'PAA only supports "with_nms=True" now and it ' \ + 'means PAAHead does not support ' \ + 'test-time augmentation' + assert len(cls_scores) == len(bbox_preds) == len(mlvl_anchors) + batch_size = cls_scores[0].shape[0] + + mlvl_bboxes = [] + mlvl_scores = [] + mlvl_iou_preds = [] + for cls_score, bbox_pred, iou_preds, anchors in zip( + cls_scores, bbox_preds, iou_preds, mlvl_anchors): + assert cls_score.size()[-2:] == bbox_pred.size()[-2:] + + scores = cls_score.permute(0, 2, 3, 1).reshape( + batch_size, -1, self.cls_out_channels).sigmoid() + bbox_pred = bbox_pred.permute(0, 2, 3, + 1).reshape(batch_size, -1, 4) + iou_preds = iou_preds.permute(0, 2, 3, 1).reshape(batch_size, + -1).sigmoid() + + nms_pre = cfg.get('nms_pre', -1) + if nms_pre > 0 and scores.shape[1] > nms_pre: + max_scores, _ = (scores * iou_preds[..., None]).sqrt().max(-1) + _, topk_inds = max_scores.topk(nms_pre) + batch_inds = torch.arange(batch_size).view( + -1, 1).expand_as(topk_inds).long() + anchors = anchors[topk_inds, :] + bbox_pred = bbox_pred[batch_inds, topk_inds, :] + scores = scores[batch_inds, topk_inds, :] + iou_preds = iou_preds[batch_inds, topk_inds] + else: + anchors = anchors.expand_as(bbox_pred) + + bboxes = self.bbox_coder.decode( + anchors, bbox_pred, max_shape=img_shapes) + mlvl_bboxes.append(bboxes) + mlvl_scores.append(scores) + mlvl_iou_preds.append(iou_preds) + + batch_mlvl_bboxes = torch.cat(mlvl_bboxes, dim=1) + if rescale: + batch_mlvl_bboxes /= batch_mlvl_bboxes.new_tensor( + scale_factors).unsqueeze(1) + batch_mlvl_scores = torch.cat(mlvl_scores, dim=1) + # Add a dummy background class to the backend when using sigmoid + # remind that we set FG labels to [0, num_class-1] since mmdet v2.0 + # BG cat_id: num_class + padding = batch_mlvl_scores.new_zeros(batch_size, + batch_mlvl_scores.shape[1], 1) + batch_mlvl_scores = torch.cat([batch_mlvl_scores, padding], dim=-1) + batch_mlvl_iou_preds = torch.cat(mlvl_iou_preds, dim=1) + batch_mlvl_nms_scores = (batch_mlvl_scores * + batch_mlvl_iou_preds[..., None]).sqrt() + + det_results = [] + for (mlvl_bboxes, mlvl_scores) in zip(batch_mlvl_bboxes, + batch_mlvl_nms_scores): + det_bbox, det_label = multiclass_nms( + mlvl_bboxes, + mlvl_scores, + cfg.score_thr, + cfg.nms, + cfg.max_per_img, + score_factors=None) + if self.with_score_voting and len(det_bbox) > 0: + det_bbox, det_label = self.score_voting( + det_bbox, det_label, mlvl_bboxes, mlvl_scores, + cfg.score_thr) + det_results.append(tuple([det_bbox, det_label])) + + return det_results + + def score_voting(self, det_bboxes, det_labels, mlvl_bboxes, + mlvl_nms_scores, score_thr): + """Implementation of score voting method works on each remaining boxes + after NMS procedure. + + Args: + det_bboxes (Tensor): Remaining boxes after NMS procedure, + with shape (k, 5), each dimension means + (x1, y1, x2, y2, score). + det_labels (Tensor): The label of remaining boxes, with shape + (k, 1),Labels are 0-based. + mlvl_bboxes (Tensor): All boxes before the NMS procedure, + with shape (num_anchors,4). + mlvl_nms_scores (Tensor): The scores of all boxes which is used + in the NMS procedure, with shape (num_anchors, num_class) + mlvl_iou_preds (Tensor): The predictions of IOU of all boxes + before the NMS procedure, with shape (num_anchors, 1) + score_thr (float): The score threshold of bboxes. + + Returns: + tuple: Usually returns a tuple containing voting results. + + - det_bboxes_voted (Tensor): Remaining boxes after + score voting procedure, with shape (k, 5), each + dimension means (x1, y1, x2, y2, score). + - det_labels_voted (Tensor): Label of remaining bboxes + after voting, with shape (num_anchors,). + """ + candidate_mask = mlvl_nms_scores > score_thr + candidate_mask_nonzeros = candidate_mask.nonzero() + candidate_inds = candidate_mask_nonzeros[:, 0] + candidate_labels = candidate_mask_nonzeros[:, 1] + candidate_bboxes = mlvl_bboxes[candidate_inds] + candidate_scores = mlvl_nms_scores[candidate_mask] + det_bboxes_voted = [] + det_labels_voted = [] + for cls in range(self.cls_out_channels): + candidate_cls_mask = candidate_labels == cls + if not candidate_cls_mask.any(): + continue + candidate_cls_scores = candidate_scores[candidate_cls_mask] + candidate_cls_bboxes = candidate_bboxes[candidate_cls_mask] + det_cls_mask = det_labels == cls + det_cls_bboxes = det_bboxes[det_cls_mask].view( + -1, det_bboxes.size(-1)) + det_candidate_ious = bbox_overlaps(det_cls_bboxes[:, :4], + candidate_cls_bboxes) + for det_ind in range(len(det_cls_bboxes)): + single_det_ious = det_candidate_ious[det_ind] + pos_ious_mask = single_det_ious > 0.01 + pos_ious = single_det_ious[pos_ious_mask] + pos_bboxes = candidate_cls_bboxes[pos_ious_mask] + pos_scores = candidate_cls_scores[pos_ious_mask] + pis = (torch.exp(-(1 - pos_ious)**2 / 0.025) * + pos_scores)[:, None] + voted_box = torch.sum( + pis * pos_bboxes, dim=0) / torch.sum( + pis, dim=0) + voted_score = det_cls_bboxes[det_ind][-1:][None, :] + det_bboxes_voted.append( + torch.cat((voted_box[None, :], voted_score), dim=1)) + det_labels_voted.append(cls) + + det_bboxes_voted = torch.cat(det_bboxes_voted, dim=0) + det_labels_voted = det_labels.new_tensor(det_labels_voted) + return det_bboxes_voted, det_labels_voted diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/pisa_retinanet_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/pisa_retinanet_head.py new file mode 100644 index 0000000000000000000000000000000000000000..bd87b9aeb07e05ff94b444ac8999eca3f616711a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/pisa_retinanet_head.py @@ -0,0 +1,154 @@ +import torch +from mmcv.runner import force_fp32 + +from mmdet.core import images_to_levels +from ..builder import HEADS +from ..losses import carl_loss, isr_p +from .retina_head import RetinaHead + + +@HEADS.register_module() +class PISARetinaHead(RetinaHead): + """PISA Retinanet Head. + + The head owns the same structure with Retinanet Head, but differs in two + aspects: + 1. Importance-based Sample Reweighting Positive (ISR-P) is applied to + change the positive loss weights. + 2. Classification-aware regression loss is adopted as a third loss. + """ + + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + gt_bboxes (list[Tensor]): Ground truth bboxes of each image + with shape (num_obj, 4). + gt_labels (list[Tensor]): Ground truth labels of each image + with shape (num_obj, 4). + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (list[Tensor]): Ignored gt bboxes of each image. + Default: None. + + Returns: + dict: Loss dict, comprise classification loss, regression loss and + carl loss. + """ + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.anchor_generator.num_levels + + device = cls_scores[0].device + + anchor_list, valid_flag_list = self.get_anchors( + featmap_sizes, img_metas, device=device) + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + cls_reg_targets = self.get_targets( + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=label_channels, + return_sampling_results=True) + if cls_reg_targets is None: + return None + (labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, + num_total_pos, num_total_neg, sampling_results_list) = cls_reg_targets + num_total_samples = ( + num_total_pos + num_total_neg if self.sampling else num_total_pos) + + # anchor number of multi levels + num_level_anchors = [anchors.size(0) for anchors in anchor_list[0]] + # concat all level anchors and flags to a single tensor + concat_anchor_list = [] + for i in range(len(anchor_list)): + concat_anchor_list.append(torch.cat(anchor_list[i])) + all_anchor_list = images_to_levels(concat_anchor_list, + num_level_anchors) + + num_imgs = len(img_metas) + flatten_cls_scores = [ + cls_score.permute(0, 2, 3, 1).reshape(num_imgs, -1, label_channels) + for cls_score in cls_scores + ] + flatten_cls_scores = torch.cat( + flatten_cls_scores, dim=1).reshape(-1, + flatten_cls_scores[0].size(-1)) + flatten_bbox_preds = [ + bbox_pred.permute(0, 2, 3, 1).reshape(num_imgs, -1, 4) + for bbox_pred in bbox_preds + ] + flatten_bbox_preds = torch.cat( + flatten_bbox_preds, dim=1).view(-1, flatten_bbox_preds[0].size(-1)) + flatten_labels = torch.cat(labels_list, dim=1).reshape(-1) + flatten_label_weights = torch.cat( + label_weights_list, dim=1).reshape(-1) + flatten_anchors = torch.cat(all_anchor_list, dim=1).reshape(-1, 4) + flatten_bbox_targets = torch.cat( + bbox_targets_list, dim=1).reshape(-1, 4) + flatten_bbox_weights = torch.cat( + bbox_weights_list, dim=1).reshape(-1, 4) + + # Apply ISR-P + isr_cfg = self.train_cfg.get('isr', None) + if isr_cfg is not None: + all_targets = (flatten_labels, flatten_label_weights, + flatten_bbox_targets, flatten_bbox_weights) + with torch.no_grad(): + all_targets = isr_p( + flatten_cls_scores, + flatten_bbox_preds, + all_targets, + flatten_anchors, + sampling_results_list, + bbox_coder=self.bbox_coder, + loss_cls=self.loss_cls, + num_class=self.num_classes, + **self.train_cfg.isr) + (flatten_labels, flatten_label_weights, flatten_bbox_targets, + flatten_bbox_weights) = all_targets + + # For convenience we compute loss once instead separating by fpn level, + # so that we don't need to separate the weights by level again. + # The result should be the same + losses_cls = self.loss_cls( + flatten_cls_scores, + flatten_labels, + flatten_label_weights, + avg_factor=num_total_samples) + losses_bbox = self.loss_bbox( + flatten_bbox_preds, + flatten_bbox_targets, + flatten_bbox_weights, + avg_factor=num_total_samples) + loss_dict = dict(loss_cls=losses_cls, loss_bbox=losses_bbox) + + # CARL Loss + carl_cfg = self.train_cfg.get('carl', None) + if carl_cfg is not None: + loss_carl = carl_loss( + flatten_cls_scores, + flatten_labels, + flatten_bbox_preds, + flatten_bbox_targets, + self.loss_bbox, + **self.train_cfg.carl, + avg_factor=num_total_pos, + sigmoid=True, + num_class=self.num_classes) + loss_dict.update(loss_carl) + + return loss_dict diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/pisa_ssd_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/pisa_ssd_head.py new file mode 100644 index 0000000000000000000000000000000000000000..90ef3c83ed62d8346c8daef01f18ad7bd236623c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/pisa_ssd_head.py @@ -0,0 +1,139 @@ +import torch + +from mmdet.core import multi_apply +from ..builder import HEADS +from ..losses import CrossEntropyLoss, SmoothL1Loss, carl_loss, isr_p +from .ssd_head import SSDHead + + +# TODO: add loss evaluator for SSD +@HEADS.register_module() +class PISASSDHead(SSDHead): + + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + gt_bboxes (list[Tensor]): Ground truth bboxes of each image + with shape (num_obj, 4). + gt_labels (list[Tensor]): Ground truth labels of each image + with shape (num_obj, 4). + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (list[Tensor]): Ignored gt bboxes of each image. + Default: None. + + Returns: + dict: Loss dict, comprise classification loss regression loss and + carl loss. + """ + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.anchor_generator.num_levels + + device = cls_scores[0].device + + anchor_list, valid_flag_list = self.get_anchors( + featmap_sizes, img_metas, device=device) + cls_reg_targets = self.get_targets( + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=1, + unmap_outputs=False, + return_sampling_results=True) + if cls_reg_targets is None: + return None + (labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, + num_total_pos, num_total_neg, sampling_results_list) = cls_reg_targets + + num_images = len(img_metas) + all_cls_scores = torch.cat([ + s.permute(0, 2, 3, 1).reshape( + num_images, -1, self.cls_out_channels) for s in cls_scores + ], 1) + all_labels = torch.cat(labels_list, -1).view(num_images, -1) + all_label_weights = torch.cat(label_weights_list, + -1).view(num_images, -1) + all_bbox_preds = torch.cat([ + b.permute(0, 2, 3, 1).reshape(num_images, -1, 4) + for b in bbox_preds + ], -2) + all_bbox_targets = torch.cat(bbox_targets_list, + -2).view(num_images, -1, 4) + all_bbox_weights = torch.cat(bbox_weights_list, + -2).view(num_images, -1, 4) + + # concat all level anchors to a single tensor + all_anchors = [] + for i in range(num_images): + all_anchors.append(torch.cat(anchor_list[i])) + + isr_cfg = self.train_cfg.get('isr', None) + all_targets = (all_labels.view(-1), all_label_weights.view(-1), + all_bbox_targets.view(-1, + 4), all_bbox_weights.view(-1, 4)) + # apply ISR-P + if isr_cfg is not None: + all_targets = isr_p( + all_cls_scores.view(-1, all_cls_scores.size(-1)), + all_bbox_preds.view(-1, 4), + all_targets, + torch.cat(all_anchors), + sampling_results_list, + loss_cls=CrossEntropyLoss(), + bbox_coder=self.bbox_coder, + **self.train_cfg.isr, + num_class=self.num_classes) + (new_labels, new_label_weights, new_bbox_targets, + new_bbox_weights) = all_targets + all_labels = new_labels.view(all_labels.shape) + all_label_weights = new_label_weights.view(all_label_weights.shape) + all_bbox_targets = new_bbox_targets.view(all_bbox_targets.shape) + all_bbox_weights = new_bbox_weights.view(all_bbox_weights.shape) + + # add CARL loss + carl_loss_cfg = self.train_cfg.get('carl', None) + if carl_loss_cfg is not None: + loss_carl = carl_loss( + all_cls_scores.view(-1, all_cls_scores.size(-1)), + all_targets[0], + all_bbox_preds.view(-1, 4), + all_targets[2], + SmoothL1Loss(beta=1.), + **self.train_cfg.carl, + avg_factor=num_total_pos, + num_class=self.num_classes) + + # check NaN and Inf + assert torch.isfinite(all_cls_scores).all().item(), \ + 'classification scores become infinite or NaN!' + assert torch.isfinite(all_bbox_preds).all().item(), \ + 'bbox predications become infinite or NaN!' + + losses_cls, losses_bbox = multi_apply( + self.loss_single, + all_cls_scores, + all_bbox_preds, + all_anchors, + all_labels, + all_label_weights, + all_bbox_targets, + all_bbox_weights, + num_total_samples=num_total_pos) + loss_dict = dict(loss_cls=losses_cls, loss_bbox=losses_bbox) + if carl_loss_cfg is not None: + loss_dict.update(loss_carl) + return loss_dict diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/reppoints_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/reppoints_head.py new file mode 100644 index 0000000000000000000000000000000000000000..d05476fb688ebace14ef537252fa8361242372f7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/reppoints_head.py @@ -0,0 +1,749 @@ +import numpy as np +import torch +import torch.nn as nn +from mmcv.cnn import ConvModule +from mmcv.ops import DeformConv2d + +from mmdet.core import (build_assigner, build_sampler, images_to_levels, + multi_apply, multiclass_nms, unmap) +from mmdet.core.anchor.point_generator import MlvlPointGenerator +from ..builder import HEADS, build_loss +from .anchor_free_head import AnchorFreeHead + + +@HEADS.register_module() +class RepPointsHead(AnchorFreeHead): + """RepPoint head. + + Args: + point_feat_channels (int): Number of channels of points features. + gradient_mul (float): The multiplier to gradients from + points refinement and recognition. + point_strides (Iterable): points strides. + point_base_scale (int): bbox scale for assigning labels. + loss_cls (dict): Config of classification loss. + loss_bbox_init (dict): Config of initial points loss. + loss_bbox_refine (dict): Config of points loss in refinement. + use_grid_points (bool): If we use bounding box representation, the + reppoints is represented as grid points on the bounding box. + center_init (bool): Whether to use center point assignment. + transform_method (str): The methods to transform RepPoints to bbox. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ # noqa: W605 + + def __init__(self, + num_classes, + in_channels, + point_feat_channels=256, + num_points=9, + gradient_mul=0.1, + point_strides=[8, 16, 32, 64, 128], + point_base_scale=4, + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_init=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=0.5), + loss_bbox_refine=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0), + use_grid_points=False, + center_init=True, + transform_method='moment', + moment_mul=0.01, + init_cfg=dict( + type='Normal', + layer='Conv2d', + std=0.01, + override=dict( + type='Normal', + name='reppoints_cls_out', + std=0.01, + bias_prob=0.01)), + **kwargs): + self.num_points = num_points + self.point_feat_channels = point_feat_channels + self.use_grid_points = use_grid_points + self.center_init = center_init + + # we use deform conv to extract points features + self.dcn_kernel = int(np.sqrt(num_points)) + self.dcn_pad = int((self.dcn_kernel - 1) / 2) + assert self.dcn_kernel * self.dcn_kernel == num_points, \ + 'The points number should be a square number.' + assert self.dcn_kernel % 2 == 1, \ + 'The points number should be an odd square number.' + dcn_base = np.arange(-self.dcn_pad, + self.dcn_pad + 1).astype(np.float64) + dcn_base_y = np.repeat(dcn_base, self.dcn_kernel) + dcn_base_x = np.tile(dcn_base, self.dcn_kernel) + dcn_base_offset = np.stack([dcn_base_y, dcn_base_x], axis=1).reshape( + (-1)) + self.dcn_base_offset = torch.tensor(dcn_base_offset).view(1, -1, 1, 1) + + super().__init__( + num_classes, + in_channels, + loss_cls=loss_cls, + init_cfg=init_cfg, + **kwargs) + + self.gradient_mul = gradient_mul + self.point_base_scale = point_base_scale + self.point_strides = point_strides + self.point_generator = MlvlPointGenerator( + self.point_strides, offset=0.) + + self.sampling = loss_cls['type'] not in ['FocalLoss'] + if self.train_cfg: + self.init_assigner = build_assigner(self.train_cfg.init.assigner) + self.refine_assigner = build_assigner( + self.train_cfg.refine.assigner) + # use PseudoSampler when sampling is False + if self.sampling and hasattr(self.train_cfg, 'sampler'): + sampler_cfg = self.train_cfg.sampler + else: + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + self.transform_method = transform_method + if self.transform_method == 'moment': + self.moment_transfer = nn.Parameter( + data=torch.zeros(2), requires_grad=True) + self.moment_mul = moment_mul + + self.use_sigmoid_cls = loss_cls.get('use_sigmoid', False) + if self.use_sigmoid_cls: + self.cls_out_channels = self.num_classes + else: + self.cls_out_channels = self.num_classes + 1 + self.loss_bbox_init = build_loss(loss_bbox_init) + self.loss_bbox_refine = build_loss(loss_bbox_refine) + + def _init_layers(self): + """Initialize layers of the head.""" + self.relu = nn.ReLU(inplace=True) + self.cls_convs = nn.ModuleList() + self.reg_convs = nn.ModuleList() + for i in range(self.stacked_convs): + chn = self.in_channels if i == 0 else self.feat_channels + self.cls_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.reg_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + pts_out_dim = 4 if self.use_grid_points else 2 * self.num_points + self.reppoints_cls_conv = DeformConv2d(self.feat_channels, + self.point_feat_channels, + self.dcn_kernel, 1, + self.dcn_pad) + self.reppoints_cls_out = nn.Conv2d(self.point_feat_channels, + self.cls_out_channels, 1, 1, 0) + self.reppoints_pts_init_conv = nn.Conv2d(self.feat_channels, + self.point_feat_channels, 3, + 1, 1) + self.reppoints_pts_init_out = nn.Conv2d(self.point_feat_channels, + pts_out_dim, 1, 1, 0) + self.reppoints_pts_refine_conv = DeformConv2d(self.feat_channels, + self.point_feat_channels, + self.dcn_kernel, 1, + self.dcn_pad) + self.reppoints_pts_refine_out = nn.Conv2d(self.point_feat_channels, + pts_out_dim, 1, 1, 0) + + def points2bbox(self, pts, y_first=True): + """Converting the points set into bounding box. + + :param pts: the input points sets (fields), each points + set (fields) is represented as 2n scalar. + :param y_first: if y_first=True, the point set is represented as + [y1, x1, y2, x2 ... yn, xn], otherwise the point set is + represented as [x1, y1, x2, y2 ... xn, yn]. + :return: each points set is converting to a bbox [x1, y1, x2, y2]. + """ + pts_reshape = pts.view(pts.shape[0], -1, 2, *pts.shape[2:]) + pts_y = pts_reshape[:, :, 0, ...] if y_first else pts_reshape[:, :, 1, + ...] + pts_x = pts_reshape[:, :, 1, ...] if y_first else pts_reshape[:, :, 0, + ...] + if self.transform_method == 'minmax': + bbox_left = pts_x.min(dim=1, keepdim=True)[0] + bbox_right = pts_x.max(dim=1, keepdim=True)[0] + bbox_up = pts_y.min(dim=1, keepdim=True)[0] + bbox_bottom = pts_y.max(dim=1, keepdim=True)[0] + bbox = torch.cat([bbox_left, bbox_up, bbox_right, bbox_bottom], + dim=1) + elif self.transform_method == 'partial_minmax': + pts_y = pts_y[:, :4, ...] + pts_x = pts_x[:, :4, ...] + bbox_left = pts_x.min(dim=1, keepdim=True)[0] + bbox_right = pts_x.max(dim=1, keepdim=True)[0] + bbox_up = pts_y.min(dim=1, keepdim=True)[0] + bbox_bottom = pts_y.max(dim=1, keepdim=True)[0] + bbox = torch.cat([bbox_left, bbox_up, bbox_right, bbox_bottom], + dim=1) + elif self.transform_method == 'moment': + pts_y_mean = pts_y.mean(dim=1, keepdim=True) + pts_x_mean = pts_x.mean(dim=1, keepdim=True) + pts_y_std = torch.std(pts_y - pts_y_mean, dim=1, keepdim=True) + pts_x_std = torch.std(pts_x - pts_x_mean, dim=1, keepdim=True) + moment_transfer = (self.moment_transfer * self.moment_mul) + ( + self.moment_transfer.detach() * (1 - self.moment_mul)) + moment_width_transfer = moment_transfer[0] + moment_height_transfer = moment_transfer[1] + half_width = pts_x_std * torch.exp(moment_width_transfer) + half_height = pts_y_std * torch.exp(moment_height_transfer) + bbox = torch.cat([ + pts_x_mean - half_width, pts_y_mean - half_height, + pts_x_mean + half_width, pts_y_mean + half_height + ], + dim=1) + else: + raise NotImplementedError + return bbox + + def gen_grid_from_reg(self, reg, previous_boxes): + """Base on the previous bboxes and regression values, we compute the + regressed bboxes and generate the grids on the bboxes. + + :param reg: the regression value to previous bboxes. + :param previous_boxes: previous bboxes. + :return: generate grids on the regressed bboxes. + """ + b, _, h, w = reg.shape + bxy = (previous_boxes[:, :2, ...] + previous_boxes[:, 2:, ...]) / 2. + bwh = (previous_boxes[:, 2:, ...] - + previous_boxes[:, :2, ...]).clamp(min=1e-6) + grid_topleft = bxy + bwh * reg[:, :2, ...] - 0.5 * bwh * torch.exp( + reg[:, 2:, ...]) + grid_wh = bwh * torch.exp(reg[:, 2:, ...]) + grid_left = grid_topleft[:, [0], ...] + grid_top = grid_topleft[:, [1], ...] + grid_width = grid_wh[:, [0], ...] + grid_height = grid_wh[:, [1], ...] + intervel = torch.linspace(0., 1., self.dcn_kernel).view( + 1, self.dcn_kernel, 1, 1).type_as(reg) + grid_x = grid_left + grid_width * intervel + grid_x = grid_x.unsqueeze(1).repeat(1, self.dcn_kernel, 1, 1, 1) + grid_x = grid_x.view(b, -1, h, w) + grid_y = grid_top + grid_height * intervel + grid_y = grid_y.unsqueeze(2).repeat(1, 1, self.dcn_kernel, 1, 1) + grid_y = grid_y.view(b, -1, h, w) + grid_yx = torch.stack([grid_y, grid_x], dim=2) + grid_yx = grid_yx.view(b, -1, h, w) + regressed_bbox = torch.cat([ + grid_left, grid_top, grid_left + grid_width, grid_top + grid_height + ], 1) + return grid_yx, regressed_bbox + + def forward(self, feats): + return multi_apply(self.forward_single, feats) + + def forward_single(self, x): + """Forward feature map of a single FPN level.""" + dcn_base_offset = self.dcn_base_offset.type_as(x) + # If we use center_init, the initial reppoints is from center points. + # If we use bounding bbox representation, the initial reppoints is + # from regular grid placed on a pre-defined bbox. + if self.use_grid_points or not self.center_init: + scale = self.point_base_scale / 2 + points_init = dcn_base_offset / dcn_base_offset.max() * scale + bbox_init = x.new_tensor([-scale, -scale, scale, + scale]).view(1, 4, 1, 1) + else: + points_init = 0 + cls_feat = x + pts_feat = x + for cls_conv in self.cls_convs: + cls_feat = cls_conv(cls_feat) + for reg_conv in self.reg_convs: + pts_feat = reg_conv(pts_feat) + # initialize reppoints + pts_out_init = self.reppoints_pts_init_out( + self.relu(self.reppoints_pts_init_conv(pts_feat))) + if self.use_grid_points: + pts_out_init, bbox_out_init = self.gen_grid_from_reg( + pts_out_init, bbox_init.detach()) + else: + pts_out_init = pts_out_init + points_init + # refine and classify reppoints + pts_out_init_grad_mul = (1 - self.gradient_mul) * pts_out_init.detach( + ) + self.gradient_mul * pts_out_init + dcn_offset = pts_out_init_grad_mul - dcn_base_offset + cls_out = self.reppoints_cls_out( + self.relu(self.reppoints_cls_conv(cls_feat, dcn_offset))) + pts_out_refine = self.reppoints_pts_refine_out( + self.relu(self.reppoints_pts_refine_conv(pts_feat, dcn_offset))) + if self.use_grid_points: + pts_out_refine, bbox_out_refine = self.gen_grid_from_reg( + pts_out_refine, bbox_out_init.detach()) + else: + pts_out_refine = pts_out_refine + pts_out_init.detach() + return cls_out, pts_out_init, pts_out_refine + + def get_points(self, featmap_sizes, img_metas, device): + """Get points according to feature map sizes. + + Args: + featmap_sizes (list[tuple]): Multi-level feature map sizes. + img_metas (list[dict]): Image meta info. + + Returns: + tuple: points of each image, valid flags of each image + """ + num_imgs = len(img_metas) + + # since feature map sizes of all images are the same, we only compute + # points center for one time + multi_level_points = self.point_generator.grid_priors( + featmap_sizes, device, with_stride=True) + points_list = [[point.clone() for point in multi_level_points] + for _ in range(num_imgs)] + + # for each image, we compute valid flags of multi level grids + valid_flag_list = [] + for img_id, img_meta in enumerate(img_metas): + multi_level_flags = self.point_generator.valid_flags( + featmap_sizes, img_meta['pad_shape']) + valid_flag_list.append(multi_level_flags) + + return points_list, valid_flag_list + + def centers_to_bboxes(self, point_list): + """Get bboxes according to center points. + + Only used in :class:`MaxIoUAssigner`. + """ + bbox_list = [] + for i_img, point in enumerate(point_list): + bbox = [] + for i_lvl in range(len(self.point_strides)): + scale = self.point_base_scale * self.point_strides[i_lvl] * 0.5 + bbox_shift = torch.Tensor([-scale, -scale, scale, + scale]).view(1, 4).type_as(point[0]) + bbox_center = torch.cat( + [point[i_lvl][:, :2], point[i_lvl][:, :2]], dim=1) + bbox.append(bbox_center + bbox_shift) + bbox_list.append(bbox) + return bbox_list + + def offset_to_pts(self, center_list, pred_list): + """Change from point offset to point coordinate.""" + pts_list = [] + for i_lvl in range(len(self.point_strides)): + pts_lvl = [] + for i_img in range(len(center_list)): + pts_center = center_list[i_img][i_lvl][:, :2].repeat( + 1, self.num_points) + pts_shift = pred_list[i_lvl][i_img] + yx_pts_shift = pts_shift.permute(1, 2, 0).view( + -1, 2 * self.num_points) + y_pts_shift = yx_pts_shift[..., 0::2] + x_pts_shift = yx_pts_shift[..., 1::2] + xy_pts_shift = torch.stack([x_pts_shift, y_pts_shift], -1) + xy_pts_shift = xy_pts_shift.view(*yx_pts_shift.shape[:-1], -1) + pts = xy_pts_shift * self.point_strides[i_lvl] + pts_center + pts_lvl.append(pts) + pts_lvl = torch.stack(pts_lvl, 0) + pts_list.append(pts_lvl) + return pts_list + + def _point_target_single(self, + flat_proposals, + valid_flags, + gt_bboxes, + gt_bboxes_ignore, + gt_labels, + stage='init', + unmap_outputs=True): + inside_flags = valid_flags + if not inside_flags.any(): + return (None, ) * 7 + # assign gt and sample proposals + proposals = flat_proposals[inside_flags, :] + + if stage == 'init': + assigner = self.init_assigner + pos_weight = self.train_cfg.init.pos_weight + else: + assigner = self.refine_assigner + pos_weight = self.train_cfg.refine.pos_weight + assign_result = assigner.assign(proposals, gt_bboxes, gt_bboxes_ignore, + None if self.sampling else gt_labels) + sampling_result = self.sampler.sample(assign_result, proposals, + gt_bboxes) + + num_valid_proposals = proposals.shape[0] + bbox_gt = proposals.new_zeros([num_valid_proposals, 4]) + pos_proposals = torch.zeros_like(proposals) + proposals_weights = proposals.new_zeros([num_valid_proposals, 4]) + labels = proposals.new_full((num_valid_proposals, ), + self.num_classes, + dtype=torch.long) + label_weights = proposals.new_zeros( + num_valid_proposals, dtype=torch.float) + + pos_inds = sampling_result.pos_inds + neg_inds = sampling_result.neg_inds + if len(pos_inds) > 0: + pos_gt_bboxes = sampling_result.pos_gt_bboxes + bbox_gt[pos_inds, :] = pos_gt_bboxes + pos_proposals[pos_inds, :] = proposals[pos_inds, :] + proposals_weights[pos_inds, :] = 1.0 + if gt_labels is None: + # Only rpn gives gt_labels as None + # Foreground is the first class + labels[pos_inds] = 0 + else: + labels[pos_inds] = gt_labels[ + sampling_result.pos_assigned_gt_inds] + if pos_weight <= 0: + label_weights[pos_inds] = 1.0 + else: + label_weights[pos_inds] = pos_weight + if len(neg_inds) > 0: + label_weights[neg_inds] = 1.0 + + # map up to original set of proposals + if unmap_outputs: + num_total_proposals = flat_proposals.size(0) + labels = unmap(labels, num_total_proposals, inside_flags) + label_weights = unmap(label_weights, num_total_proposals, + inside_flags) + bbox_gt = unmap(bbox_gt, num_total_proposals, inside_flags) + pos_proposals = unmap(pos_proposals, num_total_proposals, + inside_flags) + proposals_weights = unmap(proposals_weights, num_total_proposals, + inside_flags) + + return (labels, label_weights, bbox_gt, pos_proposals, + proposals_weights, pos_inds, neg_inds) + + def get_targets(self, + proposals_list, + valid_flag_list, + gt_bboxes_list, + img_metas, + gt_bboxes_ignore_list=None, + gt_labels_list=None, + stage='init', + label_channels=1, + unmap_outputs=True): + """Compute corresponding GT box and classification targets for + proposals. + + Args: + proposals_list (list[list]): Multi level points/bboxes of each + image. + valid_flag_list (list[list]): Multi level valid flags of each + image. + gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image. + img_metas (list[dict]): Meta info of each image. + gt_bboxes_ignore_list (list[Tensor]): Ground truth bboxes to be + ignored. + gt_bboxes_list (list[Tensor]): Ground truth labels of each box. + stage (str): `init` or `refine`. Generate target for init stage or + refine stage + label_channels (int): Channel of label. + unmap_outputs (bool): Whether to map outputs back to the original + set of anchors. + + Returns: + tuple: + - labels_list (list[Tensor]): Labels of each level. + - label_weights_list (list[Tensor]): Label weights of each level. # noqa: E501 + - bbox_gt_list (list[Tensor]): Ground truth bbox of each level. + - proposal_list (list[Tensor]): Proposals(points/bboxes) of each level. # noqa: E501 + - proposal_weights_list (list[Tensor]): Proposal weights of each level. # noqa: E501 + - num_total_pos (int): Number of positive samples in all images. # noqa: E501 + - num_total_neg (int): Number of negative samples in all images. # noqa: E501 + """ + assert stage in ['init', 'refine'] + num_imgs = len(img_metas) + assert len(proposals_list) == len(valid_flag_list) == num_imgs + + # points number of multi levels + num_level_proposals = [points.size(0) for points in proposals_list[0]] + + # concat all level points and flags to a single tensor + for i in range(num_imgs): + assert len(proposals_list[i]) == len(valid_flag_list[i]) + proposals_list[i] = torch.cat(proposals_list[i]) + valid_flag_list[i] = torch.cat(valid_flag_list[i]) + + # compute targets for each image + if gt_bboxes_ignore_list is None: + gt_bboxes_ignore_list = [None for _ in range(num_imgs)] + if gt_labels_list is None: + gt_labels_list = [None for _ in range(num_imgs)] + (all_labels, all_label_weights, all_bbox_gt, all_proposals, + all_proposal_weights, pos_inds_list, neg_inds_list) = multi_apply( + self._point_target_single, + proposals_list, + valid_flag_list, + gt_bboxes_list, + gt_bboxes_ignore_list, + gt_labels_list, + stage=stage, + unmap_outputs=unmap_outputs) + # no valid points + if any([labels is None for labels in all_labels]): + return None + # sampled points of all images + num_total_pos = sum([max(inds.numel(), 1) for inds in pos_inds_list]) + num_total_neg = sum([max(inds.numel(), 1) for inds in neg_inds_list]) + labels_list = images_to_levels(all_labels, num_level_proposals) + label_weights_list = images_to_levels(all_label_weights, + num_level_proposals) + bbox_gt_list = images_to_levels(all_bbox_gt, num_level_proposals) + proposals_list = images_to_levels(all_proposals, num_level_proposals) + proposal_weights_list = images_to_levels(all_proposal_weights, + num_level_proposals) + return (labels_list, label_weights_list, bbox_gt_list, proposals_list, + proposal_weights_list, num_total_pos, num_total_neg) + + def loss_single(self, cls_score, pts_pred_init, pts_pred_refine, labels, + label_weights, bbox_gt_init, bbox_weights_init, + bbox_gt_refine, bbox_weights_refine, stride, + num_total_samples_init, num_total_samples_refine): + # classification loss + labels = labels.reshape(-1) + label_weights = label_weights.reshape(-1) + cls_score = cls_score.permute(0, 2, 3, + 1).reshape(-1, self.cls_out_channels) + cls_score = cls_score.contiguous() + loss_cls = self.loss_cls( + cls_score, + labels, + label_weights, + avg_factor=num_total_samples_refine) + + # points loss + bbox_gt_init = bbox_gt_init.reshape(-1, 4) + bbox_weights_init = bbox_weights_init.reshape(-1, 4) + bbox_pred_init = self.points2bbox( + pts_pred_init.reshape(-1, 2 * self.num_points), y_first=False) + bbox_gt_refine = bbox_gt_refine.reshape(-1, 4) + bbox_weights_refine = bbox_weights_refine.reshape(-1, 4) + bbox_pred_refine = self.points2bbox( + pts_pred_refine.reshape(-1, 2 * self.num_points), y_first=False) + normalize_term = self.point_base_scale * stride + loss_pts_init = self.loss_bbox_init( + bbox_pred_init / normalize_term, + bbox_gt_init / normalize_term, + bbox_weights_init, + avg_factor=num_total_samples_init) + loss_pts_refine = self.loss_bbox_refine( + bbox_pred_refine / normalize_term, + bbox_gt_refine / normalize_term, + bbox_weights_refine, + avg_factor=num_total_samples_refine) + return loss_cls, loss_pts_init, loss_pts_refine + + def loss(self, + cls_scores, + pts_preds_init, + pts_preds_refine, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + device = cls_scores[0].device + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + + # target for initial stage + center_list, valid_flag_list = self.get_points(featmap_sizes, + img_metas, device) + pts_coordinate_preds_init = self.offset_to_pts(center_list, + pts_preds_init) + if self.train_cfg.init.assigner['type'] == 'PointAssigner': + # Assign target for center list + candidate_list = center_list + else: + # transform center list to bbox list and + # assign target for bbox list + bbox_list = self.centers_to_bboxes(center_list) + candidate_list = bbox_list + cls_reg_targets_init = self.get_targets( + candidate_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + stage='init', + label_channels=label_channels) + (*_, bbox_gt_list_init, candidate_list_init, bbox_weights_list_init, + num_total_pos_init, num_total_neg_init) = cls_reg_targets_init + num_total_samples_init = ( + num_total_pos_init + + num_total_neg_init if self.sampling else num_total_pos_init) + + # target for refinement stage + center_list, valid_flag_list = self.get_points(featmap_sizes, + img_metas, device) + pts_coordinate_preds_refine = self.offset_to_pts( + center_list, pts_preds_refine) + bbox_list = [] + for i_img, center in enumerate(center_list): + bbox = [] + for i_lvl in range(len(pts_preds_refine)): + bbox_preds_init = self.points2bbox( + pts_preds_init[i_lvl].detach()) + bbox_shift = bbox_preds_init * self.point_strides[i_lvl] + bbox_center = torch.cat( + [center[i_lvl][:, :2], center[i_lvl][:, :2]], dim=1) + bbox.append(bbox_center + + bbox_shift[i_img].permute(1, 2, 0).reshape(-1, 4)) + bbox_list.append(bbox) + cls_reg_targets_refine = self.get_targets( + bbox_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + stage='refine', + label_channels=label_channels) + (labels_list, label_weights_list, bbox_gt_list_refine, + candidate_list_refine, bbox_weights_list_refine, num_total_pos_refine, + num_total_neg_refine) = cls_reg_targets_refine + num_total_samples_refine = ( + num_total_pos_refine + + num_total_neg_refine if self.sampling else num_total_pos_refine) + + # compute loss + losses_cls, losses_pts_init, losses_pts_refine = multi_apply( + self.loss_single, + cls_scores, + pts_coordinate_preds_init, + pts_coordinate_preds_refine, + labels_list, + label_weights_list, + bbox_gt_list_init, + bbox_weights_list_init, + bbox_gt_list_refine, + bbox_weights_list_refine, + self.point_strides, + num_total_samples_init=num_total_samples_init, + num_total_samples_refine=num_total_samples_refine) + loss_dict_all = { + 'loss_cls': losses_cls, + 'loss_pts_init': losses_pts_init, + 'loss_pts_refine': losses_pts_refine + } + return loss_dict_all + + def get_bboxes(self, + cls_scores, + pts_preds_init, + pts_preds_refine, + img_metas, + cfg=None, + rescale=False, + with_nms=True): + assert len(cls_scores) == len(pts_preds_refine) + device = cls_scores[0].device + bbox_preds_refine = [ + self.points2bbox(pts_pred_refine) + for pts_pred_refine in pts_preds_refine + ] + num_levels = len(cls_scores) + featmap_sizes = [ + cls_scores[i].size()[-2:] for i in range(len(cls_scores)) + ] + multi_level_points = self.point_generator.grid_priors( + featmap_sizes, device) + + result_list = [] + for img_id in range(len(img_metas)): + cls_score_list = [cls_scores[i][img_id] for i in range(num_levels)] + bbox_pred_list = [ + bbox_preds_refine[i][img_id] for i in range(num_levels) + ] + img_shape = img_metas[img_id]['img_shape'] + scale_factor = img_metas[img_id]['scale_factor'] + proposals = self._get_bboxes_single(cls_score_list, bbox_pred_list, + multi_level_points, img_shape, + scale_factor, cfg, rescale, + with_nms) + result_list.append(proposals) + return result_list + + def _get_bboxes_single(self, + cls_scores, + bbox_preds, + mlvl_points, + img_shape, + scale_factor, + cfg, + rescale=False, + with_nms=True): + cfg = self.test_cfg if cfg is None else cfg + assert len(cls_scores) == len(bbox_preds) == len(mlvl_points) + mlvl_bboxes = [] + mlvl_scores = [] + for i_lvl, (cls_score, bbox_pred, points) in enumerate( + zip(cls_scores, bbox_preds, mlvl_points)): + assert cls_score.size()[-2:] == bbox_pred.size()[-2:] + cls_score = cls_score.permute(1, 2, + 0).reshape(-1, self.cls_out_channels) + if self.use_sigmoid_cls: + scores = cls_score.sigmoid() + else: + scores = cls_score.softmax(-1) + bbox_pred = bbox_pred.permute(1, 2, 0).reshape(-1, 4) + nms_pre = cfg.get('nms_pre', -1) + if nms_pre > 0 and scores.shape[0] > nms_pre: + if self.use_sigmoid_cls: + max_scores, _ = scores.max(dim=1) + else: + # remind that we set FG labels to [0, num_class-1] + # since mmdet v2.0 + # BG cat_id: num_class + max_scores, _ = scores[:, :-1].max(dim=1) + _, topk_inds = max_scores.topk(nms_pre) + points = points[topk_inds, :] + bbox_pred = bbox_pred[topk_inds, :] + scores = scores[topk_inds, :] + bbox_pos_center = torch.cat([points[:, :2], points[:, :2]], dim=1) + bboxes = bbox_pred * self.point_strides[i_lvl] + bbox_pos_center + x1 = bboxes[:, 0].clamp(min=0, max=img_shape[1]) + y1 = bboxes[:, 1].clamp(min=0, max=img_shape[0]) + x2 = bboxes[:, 2].clamp(min=0, max=img_shape[1]) + y2 = bboxes[:, 3].clamp(min=0, max=img_shape[0]) + bboxes = torch.stack([x1, y1, x2, y2], dim=-1) + mlvl_bboxes.append(bboxes) + mlvl_scores.append(scores) + mlvl_bboxes = torch.cat(mlvl_bboxes) + if rescale: + mlvl_bboxes /= mlvl_bboxes.new_tensor(scale_factor) + mlvl_scores = torch.cat(mlvl_scores) + if self.use_sigmoid_cls: + # Add a dummy background class to the backend when using sigmoid + # remind that we set FG labels to [0, num_class-1] since mmdet v2.0 + # BG cat_id: num_class + padding = mlvl_scores.new_zeros(mlvl_scores.shape[0], 1) + mlvl_scores = torch.cat([mlvl_scores, padding], dim=1) + if with_nms: + det_bboxes, det_labels = multiclass_nms(mlvl_bboxes, mlvl_scores, + cfg.score_thr, cfg.nms, + cfg.max_per_img) + return det_bboxes, det_labels + else: + return mlvl_bboxes, mlvl_scores diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/retina_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/retina_head.py new file mode 100644 index 0000000000000000000000000000000000000000..698aec50bdc0d1353c34b49859c533590ab37c68 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/retina_head.py @@ -0,0 +1,114 @@ +import torch.nn as nn +from mmcv.cnn import ConvModule + +from ..builder import HEADS +from .anchor_head import AnchorHead + + +@HEADS.register_module() +class RetinaHead(AnchorHead): + r"""An anchor-based head used in `RetinaNet + `_. + + The head contains two subnetworks. The first classifies anchor boxes and + the second regresses deltas for the anchors. + + Example: + >>> import torch + >>> self = RetinaHead(11, 7) + >>> x = torch.rand(1, 7, 32, 32) + >>> cls_score, bbox_pred = self.forward_single(x) + >>> # Each anchor predicts a score for each class except background + >>> cls_per_anchor = cls_score.shape[1] / self.num_anchors + >>> box_per_anchor = bbox_pred.shape[1] / self.num_anchors + >>> assert cls_per_anchor == (self.num_classes) + >>> assert box_per_anchor == 4 + """ + + def __init__(self, + num_classes, + in_channels, + stacked_convs=4, + conv_cfg=None, + norm_cfg=None, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + init_cfg=dict( + type='Normal', + layer='Conv2d', + std=0.01, + override=dict( + type='Normal', + name='retina_cls', + std=0.01, + bias_prob=0.01)), + **kwargs): + self.stacked_convs = stacked_convs + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + super(RetinaHead, self).__init__( + num_classes, + in_channels, + anchor_generator=anchor_generator, + init_cfg=init_cfg, + **kwargs) + + def _init_layers(self): + """Initialize layers of the head.""" + self.relu = nn.ReLU(inplace=True) + self.cls_convs = nn.ModuleList() + self.reg_convs = nn.ModuleList() + for i in range(self.stacked_convs): + chn = self.in_channels if i == 0 else self.feat_channels + self.cls_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.reg_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.retina_cls = nn.Conv2d( + self.feat_channels, + self.num_anchors * self.cls_out_channels, + 3, + padding=1) + self.retina_reg = nn.Conv2d( + self.feat_channels, self.num_anchors * 4, 3, padding=1) + + def forward_single(self, x): + """Forward feature of a single scale level. + + Args: + x (Tensor): Features of a single scale level. + + Returns: + tuple: + cls_score (Tensor): Cls scores for a single scale level + the channels number is num_anchors * num_classes. + bbox_pred (Tensor): Box energies / deltas for a single scale + level, the channels number is num_anchors * 4. + """ + cls_feat = x + reg_feat = x + for cls_conv in self.cls_convs: + cls_feat = cls_conv(cls_feat) + for reg_conv in self.reg_convs: + reg_feat = reg_conv(reg_feat) + cls_score = self.retina_cls(cls_feat) + bbox_pred = self.retina_reg(reg_feat) + return cls_score, bbox_pred diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/retina_sepbn_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/retina_sepbn_head.py new file mode 100644 index 0000000000000000000000000000000000000000..6fda7fc82f5244a259579096d1eab6d3fdad9fd4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/retina_sepbn_head.py @@ -0,0 +1,117 @@ +import torch.nn as nn +from mmcv.cnn import ConvModule, bias_init_with_prob, normal_init + +from ..builder import HEADS +from .anchor_head import AnchorHead + + +@HEADS.register_module() +class RetinaSepBNHead(AnchorHead): + """"RetinaHead with separate BN. + + In RetinaHead, conv/norm layers are shared across different FPN levels, + while in RetinaSepBNHead, conv layers are shared across different FPN + levels, but BN layers are separated. + """ + + def __init__(self, + num_classes, + num_ins, + in_channels, + stacked_convs=4, + conv_cfg=None, + norm_cfg=None, + init_cfg=None, + **kwargs): + assert init_cfg is None, 'To prevent abnormal initialization ' \ + 'behavior, init_cfg is not allowed to be set' + self.stacked_convs = stacked_convs + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.num_ins = num_ins + super(RetinaSepBNHead, self).__init__( + num_classes, in_channels, init_cfg=init_cfg, **kwargs) + + def _init_layers(self): + """Initialize layers of the head.""" + self.relu = nn.ReLU(inplace=True) + self.cls_convs = nn.ModuleList() + self.reg_convs = nn.ModuleList() + for i in range(self.num_ins): + cls_convs = nn.ModuleList() + reg_convs = nn.ModuleList() + for i in range(self.stacked_convs): + chn = self.in_channels if i == 0 else self.feat_channels + cls_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + reg_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.cls_convs.append(cls_convs) + self.reg_convs.append(reg_convs) + for i in range(self.stacked_convs): + for j in range(1, self.num_ins): + self.cls_convs[j][i].conv = self.cls_convs[0][i].conv + self.reg_convs[j][i].conv = self.reg_convs[0][i].conv + self.retina_cls = nn.Conv2d( + self.feat_channels, + self.num_anchors * self.cls_out_channels, + 3, + padding=1) + self.retina_reg = nn.Conv2d( + self.feat_channels, self.num_anchors * 4, 3, padding=1) + + def init_weights(self): + """Initialize weights of the head.""" + super(RetinaSepBNHead, self).init_weights() + for m in self.cls_convs[0]: + normal_init(m.conv, std=0.01) + for m in self.reg_convs[0]: + normal_init(m.conv, std=0.01) + bias_cls = bias_init_with_prob(0.01) + normal_init(self.retina_cls, std=0.01, bias=bias_cls) + normal_init(self.retina_reg, std=0.01) + + def forward(self, feats): + """Forward features from the upstream network. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + + Returns: + tuple: Usually a tuple of classification scores and bbox prediction + cls_scores (list[Tensor]): Classification scores for all scale + levels, each is a 4D-tensor, the channels number is + num_anchors * num_classes. + bbox_preds (list[Tensor]): Box energies / deltas for all scale + levels, each is a 4D-tensor, the channels number is + num_anchors * 4. + """ + cls_scores = [] + bbox_preds = [] + for i, x in enumerate(feats): + cls_feat = feats[i] + reg_feat = feats[i] + for cls_conv in self.cls_convs[i]: + cls_feat = cls_conv(cls_feat) + for reg_conv in self.reg_convs[i]: + reg_feat = reg_conv(reg_feat) + cls_score = self.retina_cls(cls_feat) + bbox_pred = self.retina_reg(reg_feat) + cls_scores.append(cls_score) + bbox_preds.append(bbox_pred) + return cls_scores, bbox_preds diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/rpn_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/rpn_head.py new file mode 100644 index 0000000000000000000000000000000000000000..bff7a2e22f03400f98c066151ca4b46f3a825b7e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/rpn_head.py @@ -0,0 +1,319 @@ +import copy + +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.ops import batched_nms +from mmcv.runner import force_fp32 + +from ..builder import HEADS +from .anchor_head import AnchorHead + + +@HEADS.register_module() +class RPNHead(AnchorHead): + """RPN head. + + Args: + in_channels (int): Number of channels in the input feature map. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ # noqa: W605 + + def __init__(self, + in_channels, + init_cfg=dict(type='Normal', layer='Conv2d', std=0.01), + **kwargs): + super(RPNHead, self).__init__( + 1, in_channels, init_cfg=init_cfg, **kwargs) + + def _init_layers(self): + """Initialize layers of the head.""" + self.rpn_conv = nn.Conv2d( + self.in_channels, self.feat_channels, 3, padding=1) + self.rpn_cls = nn.Conv2d(self.feat_channels, + self.num_anchors * self.cls_out_channels, 1) + self.rpn_reg = nn.Conv2d(self.feat_channels, self.num_anchors * 4, 1) + + def forward_single(self, x): + """Forward feature map of a single scale level.""" + x = self.rpn_conv(x) + x = F.relu(x, inplace=True) + rpn_cls_score = self.rpn_cls(x) + rpn_bbox_pred = self.rpn_reg(x) + return rpn_cls_score, rpn_bbox_pred + + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + losses = super(RPNHead, self).loss( + cls_scores, + bbox_preds, + gt_bboxes, + None, + img_metas, + gt_bboxes_ignore=gt_bboxes_ignore) + return dict( + loss_rpn_cls=losses['loss_cls'], loss_rpn_bbox=losses['loss_bbox']) + + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def get_bboxes(self, + cls_scores, + bbox_preds, + img_metas, + cfg=None, + rescale=False, + with_nms=True): + """Transform network output for a batch into bbox predictions. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + cfg (mmcv.Config | None): Test / postprocessing configuration, + if None, test_cfg would be used + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + + Returns: + list[tuple[Tensor, Tensor]]: Each item in result_list is 2-tuple. + The first item is an (n, 5) tensor, where the first 4 columns + are bounding box positions (tl_x, tl_y, br_x, br_y) and the + 5-th column is a score between 0 and 1. The second item is a + (n,) tensor where each item is the predicted class label of the + corresponding box. + """ + assert with_nms, '``with_nms`` in RPNHead should always True' + assert len(cls_scores) == len(bbox_preds) + num_levels = len(cls_scores) + device = cls_scores[0].device + featmap_sizes = [cls_scores[i].shape[-2:] for i in range(num_levels)] + mlvl_anchors = self.anchor_generator.grid_anchors( + featmap_sizes, device=device) + + result_list = [] + for img_id in range(len(img_metas)): + cls_score_list = [ + cls_scores[i][img_id].detach() for i in range(num_levels) + ] + bbox_pred_list = [ + bbox_preds[i][img_id].detach() for i in range(num_levels) + ] + img_shape = img_metas[img_id]['img_shape'] + scale_factor = img_metas[img_id]['scale_factor'] + proposals = self._get_bboxes_single(cls_score_list, bbox_pred_list, + mlvl_anchors, img_shape, + scale_factor, cfg, rescale) + result_list.append(proposals) + return result_list + + def _get_bboxes_single(self, + cls_scores, + bbox_preds, + mlvl_anchors, + img_shape, + scale_factor, + cfg, + rescale=False): + """Transform outputs for a single batch item into bbox predictions. + + Args: + cls_scores (list[Tensor]): Box scores of all scale level + each item has shape (num_anchors * num_classes, H, W). + bbox_preds (list[Tensor]): Box energies / deltas of all + scale level, each item has shape (num_anchors * 4, H, W). + mlvl_anchors (list[Tensor]): Anchors of all scale level + each item has shape (num_total_anchors, 4). + img_shape (tuple[int]): Shape of the input image, + (height, width, 3). + scale_factor (ndarray): Scale factor of the image arrange as + (w_scale, h_scale, w_scale, h_scale). + cfg (mmcv.Config): Test / postprocessing configuration, + if None, test_cfg would be used. + rescale (bool): If True, return boxes in original image space. + Default: False. + + Returns: + Tensor: Labeled boxes in shape (n, 5), where the first 4 columns + are bounding box positions (tl_x, tl_y, br_x, br_y) and the + 5-th column is a score between 0 and 1. + """ + cfg = self.test_cfg if cfg is None else cfg + cfg = copy.deepcopy(cfg) + # bboxes from different level should be independent during NMS, + # level_ids are used as labels for batched NMS to separate them + level_ids = [] + mlvl_scores = [] + mlvl_bbox_preds = [] + mlvl_valid_anchors = [] + for idx in range(len(cls_scores)): + rpn_cls_score = cls_scores[idx] + rpn_bbox_pred = bbox_preds[idx] + assert rpn_cls_score.size()[-2:] == rpn_bbox_pred.size()[-2:] + rpn_cls_score = rpn_cls_score.permute(1, 2, 0) + if self.use_sigmoid_cls: + rpn_cls_score = rpn_cls_score.reshape(-1) + scores = rpn_cls_score.sigmoid() + else: + rpn_cls_score = rpn_cls_score.reshape(-1, 2) + # We set FG labels to [0, num_class-1] and BG label to + # num_class in RPN head since mmdet v2.5, which is unified to + # be consistent with other head since mmdet v2.0. In mmdet v2.0 + # to v2.4 we keep BG label as 0 and FG label as 1 in rpn head. + scores = rpn_cls_score.softmax(dim=1)[:, 0] + rpn_bbox_pred = rpn_bbox_pred.permute(1, 2, 0).reshape(-1, 4) + anchors = mlvl_anchors[idx] + if cfg.nms_pre > 0 and scores.shape[0] > cfg.nms_pre: + # sort is faster than topk + # _, topk_inds = scores.topk(cfg.nms_pre) + ranked_scores, rank_inds = scores.sort(descending=True) + topk_inds = rank_inds[:cfg.nms_pre] + scores = ranked_scores[:cfg.nms_pre] + rpn_bbox_pred = rpn_bbox_pred[topk_inds, :] + anchors = anchors[topk_inds, :] + mlvl_scores.append(scores) + mlvl_bbox_preds.append(rpn_bbox_pred) + mlvl_valid_anchors.append(anchors) + level_ids.append( + scores.new_full((scores.size(0), ), idx, dtype=torch.long)) + + scores = torch.cat(mlvl_scores) + anchors = torch.cat(mlvl_valid_anchors) + rpn_bbox_pred = torch.cat(mlvl_bbox_preds) + proposals = self.bbox_coder.decode( + anchors, rpn_bbox_pred, max_shape=img_shape) + ids = torch.cat(level_ids) + + if cfg.min_bbox_size > 0: + w = proposals[:, 2] - proposals[:, 0] + h = proposals[:, 3] - proposals[:, 1] + valid_mask = (w >= cfg.min_bbox_size) & (h >= cfg.min_bbox_size) + if not valid_mask.all(): + proposals = proposals[valid_mask] + scores = scores[valid_mask] + ids = ids[valid_mask] + if proposals.numel() > 0: + dets, keep = batched_nms(proposals, scores, ids, cfg.nms) + else: + return proposals.new_zeros(0, 5) + + return dets[:cfg.max_per_img] + + def onnx_export(self, x, img_metas): + """Test without augmentation. + + Args: + x (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + img_metas (list[dict]): Meta info of each image. + + Returns: + tuple[Tensor, Tensor]: dets of shape [N, num_det, 5] + and class labels of shape [N, num_det]. + """ + cls_scores, bbox_preds = self(x) + + assert len(cls_scores) == len(bbox_preds) + num_levels = len(cls_scores) + + device = cls_scores[0].device + featmap_sizes = [cls_scores[i].shape[-2:] for i in range(num_levels)] + mlvl_anchors = self.anchor_generator.grid_anchors( + featmap_sizes, device=device) + + cls_scores = [cls_scores[i].detach() for i in range(num_levels)] + bbox_preds = [bbox_preds[i].detach() for i in range(num_levels)] + + assert len( + img_metas + ) == 1, 'Only support one input image while in exporting to ONNX' + img_shapes = img_metas[0]['img_shape_for_onnx'] + + cfg = copy.deepcopy(self.test_cfg) + + mlvl_scores = [] + mlvl_bbox_preds = [] + mlvl_valid_anchors = [] + batch_size = cls_scores[0].shape[0] + nms_pre_tensor = torch.tensor( + cfg.nms_pre, device=cls_scores[0].device, dtype=torch.long) + for idx in range(len(cls_scores)): + rpn_cls_score = cls_scores[idx] + rpn_bbox_pred = bbox_preds[idx] + assert rpn_cls_score.size()[-2:] == rpn_bbox_pred.size()[-2:] + rpn_cls_score = rpn_cls_score.permute(0, 2, 3, 1) + if self.use_sigmoid_cls: + rpn_cls_score = rpn_cls_score.reshape(batch_size, -1) + scores = rpn_cls_score.sigmoid() + else: + rpn_cls_score = rpn_cls_score.reshape(batch_size, -1, 2) + # We set FG labels to [0, num_class-1] and BG label to + # num_class in RPN head since mmdet v2.5, which is unified to + # be consistent with other head since mmdet v2.0. In mmdet v2.0 + # to v2.4 we keep BG label as 0 and FG label as 1 in rpn head. + scores = rpn_cls_score.softmax(-1)[..., 0] + rpn_bbox_pred = rpn_bbox_pred.permute(0, 2, 3, 1).reshape( + batch_size, -1, 4) + anchors = mlvl_anchors[idx] + anchors = anchors.expand_as(rpn_bbox_pred) + # Get top-k prediction + from mmdet.core.export import get_k_for_topk + nms_pre = get_k_for_topk(nms_pre_tensor, rpn_bbox_pred.shape[1]) + if nms_pre > 0: + _, topk_inds = scores.topk(nms_pre) + batch_inds = torch.arange(batch_size).view( + -1, 1).expand_as(topk_inds) + # Avoid onnx2tensorrt issue in https://github.com/NVIDIA/TensorRT/issues/1134 # noqa: E501 + # Mind k<=3480 in TensorRT for TopK + transformed_inds = scores.shape[1] * batch_inds + topk_inds + scores = scores.reshape(-1, 1)[transformed_inds].reshape( + batch_size, -1) + rpn_bbox_pred = rpn_bbox_pred.reshape( + -1, 4)[transformed_inds, :].reshape(batch_size, -1, 4) + anchors = anchors.reshape(-1, 4)[transformed_inds, :].reshape( + batch_size, -1, 4) + mlvl_scores.append(scores) + mlvl_bbox_preds.append(rpn_bbox_pred) + mlvl_valid_anchors.append(anchors) + + batch_mlvl_scores = torch.cat(mlvl_scores, dim=1) + batch_mlvl_anchors = torch.cat(mlvl_valid_anchors, dim=1) + batch_mlvl_rpn_bbox_pred = torch.cat(mlvl_bbox_preds, dim=1) + batch_mlvl_proposals = self.bbox_coder.decode( + batch_mlvl_anchors, batch_mlvl_rpn_bbox_pred, max_shape=img_shapes) + + # Use ONNX::NonMaxSuppression in deployment + from mmdet.core.export import add_dummy_nms_for_onnx + batch_mlvl_scores = batch_mlvl_scores.unsqueeze(2) + score_threshold = cfg.nms.get('score_thr', 0.0) + nms_pre = cfg.get('deploy_nms_pre', -1) + dets, _ = add_dummy_nms_for_onnx(batch_mlvl_proposals, + batch_mlvl_scores, cfg.max_per_img, + cfg.nms.iou_threshold, + score_threshold, nms_pre, + cfg.max_per_img) + return dets diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/sabl_retina_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/sabl_retina_head.py new file mode 100644 index 0000000000000000000000000000000000000000..6f91c82ba61b3095a42ab74a6435ee08c80ec1ae --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/sabl_retina_head.py @@ -0,0 +1,622 @@ +import numpy as np +import torch +import torch.nn as nn +from mmcv.cnn import ConvModule +from mmcv.runner import force_fp32 + +from mmdet.core import (build_anchor_generator, build_assigner, + build_bbox_coder, build_sampler, images_to_levels, + multi_apply, multiclass_nms, unmap) +from ..builder import HEADS, build_loss +from .base_dense_head import BaseDenseHead +from .dense_test_mixins import BBoxTestMixin +from .guided_anchor_head import GuidedAnchorHead + + +@HEADS.register_module() +class SABLRetinaHead(BaseDenseHead, BBoxTestMixin): + """Side-Aware Boundary Localization (SABL) for RetinaNet. + + The anchor generation, assigning and sampling in SABLRetinaHead + are the same as GuidedAnchorHead for guided anchoring. + + Please refer to https://arxiv.org/abs/1912.04260 for more details. + + Args: + num_classes (int): Number of classes. + in_channels (int): Number of channels in the input feature map. + stacked_convs (int): Number of Convs for classification \ + and regression branches. Defaults to 4. + feat_channels (int): Number of hidden channels. \ + Defaults to 256. + approx_anchor_generator (dict): Config dict for approx generator. + square_anchor_generator (dict): Config dict for square generator. + conv_cfg (dict): Config dict for ConvModule. Defaults to None. + norm_cfg (dict): Config dict for Norm Layer. Defaults to None. + bbox_coder (dict): Config dict for bbox coder. + reg_decoded_bbox (bool): If true, the regression loss would be + applied directly on decoded bounding boxes, converting both + the predicted boxes and regression targets to absolute + coordinates format. Default False. It should be `True` when + using `IoULoss`, `GIoULoss`, or `DIoULoss` in the bbox head. + train_cfg (dict): Training config of SABLRetinaHead. + test_cfg (dict): Testing config of SABLRetinaHead. + loss_cls (dict): Config of classification loss. + loss_bbox_cls (dict): Config of classification loss for bbox branch. + loss_bbox_reg (dict): Config of regression loss for bbox branch. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__(self, + num_classes, + in_channels, + stacked_convs=4, + feat_channels=256, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128]), + conv_cfg=None, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', + num_buckets=14, + scale_factor=3.0), + reg_decoded_bbox=False, + train_cfg=None, + test_cfg=None, + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.5), + loss_bbox_reg=dict( + type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.5), + init_cfg=dict( + type='Normal', + layer='Conv2d', + std=0.01, + override=dict( + type='Normal', + name='retina_cls', + std=0.01, + bias_prob=0.01))): + super(SABLRetinaHead, self).__init__(init_cfg) + self.in_channels = in_channels + self.num_classes = num_classes + self.feat_channels = feat_channels + self.num_buckets = bbox_coder['num_buckets'] + self.side_num = int(np.ceil(self.num_buckets / 2)) + + assert (approx_anchor_generator['octave_base_scale'] == + square_anchor_generator['scales'][0]) + assert (approx_anchor_generator['strides'] == + square_anchor_generator['strides']) + + self.approx_anchor_generator = build_anchor_generator( + approx_anchor_generator) + self.square_anchor_generator = build_anchor_generator( + square_anchor_generator) + self.approxs_per_octave = ( + self.approx_anchor_generator.num_base_anchors[0]) + + # one anchor per location + self.num_anchors = 1 + self.stacked_convs = stacked_convs + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + + self.reg_decoded_bbox = reg_decoded_bbox + + self.use_sigmoid_cls = loss_cls.get('use_sigmoid', False) + self.sampling = loss_cls['type'] not in [ + 'FocalLoss', 'GHMC', 'QualityFocalLoss' + ] + if self.use_sigmoid_cls: + self.cls_out_channels = num_classes + else: + self.cls_out_channels = num_classes + 1 + + self.bbox_coder = build_bbox_coder(bbox_coder) + self.loss_cls = build_loss(loss_cls) + self.loss_bbox_cls = build_loss(loss_bbox_cls) + self.loss_bbox_reg = build_loss(loss_bbox_reg) + + self.train_cfg = train_cfg + self.test_cfg = test_cfg + + if self.train_cfg: + self.assigner = build_assigner(self.train_cfg.assigner) + # use PseudoSampler when sampling is False + if self.sampling and hasattr(self.train_cfg, 'sampler'): + sampler_cfg = self.train_cfg.sampler + else: + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + + self.fp16_enabled = False + self._init_layers() + + def _init_layers(self): + self.relu = nn.ReLU(inplace=True) + self.cls_convs = nn.ModuleList() + self.reg_convs = nn.ModuleList() + for i in range(self.stacked_convs): + chn = self.in_channels if i == 0 else self.feat_channels + self.cls_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.reg_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.retina_cls = nn.Conv2d( + self.feat_channels, self.cls_out_channels, 3, padding=1) + self.retina_bbox_reg = nn.Conv2d( + self.feat_channels, self.side_num * 4, 3, padding=1) + self.retina_bbox_cls = nn.Conv2d( + self.feat_channels, self.side_num * 4, 3, padding=1) + + def forward_single(self, x): + cls_feat = x + reg_feat = x + for cls_conv in self.cls_convs: + cls_feat = cls_conv(cls_feat) + for reg_conv in self.reg_convs: + reg_feat = reg_conv(reg_feat) + cls_score = self.retina_cls(cls_feat) + bbox_cls_pred = self.retina_bbox_cls(reg_feat) + bbox_reg_pred = self.retina_bbox_reg(reg_feat) + bbox_pred = (bbox_cls_pred, bbox_reg_pred) + return cls_score, bbox_pred + + def forward(self, feats): + return multi_apply(self.forward_single, feats) + + def get_anchors(self, featmap_sizes, img_metas, device='cuda'): + """Get squares according to feature map sizes and guided anchors. + + Args: + featmap_sizes (list[tuple]): Multi-level feature map sizes. + img_metas (list[dict]): Image meta info. + device (torch.device | str): device for returned tensors + + Returns: + tuple: square approxs of each image + """ + num_imgs = len(img_metas) + + # since feature map sizes of all images are the same, we only compute + # squares for one time + multi_level_squares = self.square_anchor_generator.grid_anchors( + featmap_sizes, device=device) + squares_list = [multi_level_squares for _ in range(num_imgs)] + + return squares_list + + def get_target(self, + approx_list, + inside_flag_list, + square_list, + gt_bboxes_list, + img_metas, + gt_bboxes_ignore_list=None, + gt_labels_list=None, + label_channels=None, + sampling=True, + unmap_outputs=True): + """Compute bucketing targets. + Args: + approx_list (list[list]): Multi level approxs of each image. + inside_flag_list (list[list]): Multi level inside flags of each + image. + square_list (list[list]): Multi level squares of each image. + gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image. + img_metas (list[dict]): Meta info of each image. + gt_bboxes_ignore_list (list[Tensor]): ignore list of gt bboxes. + gt_bboxes_list (list[Tensor]): Gt bboxes of each image. + label_channels (int): Channel of label. + sampling (bool): Sample Anchors or not. + unmap_outputs (bool): unmap outputs or not. + + Returns: + tuple: Returns a tuple containing learning targets. + + - labels_list (list[Tensor]): Labels of each level. + - label_weights_list (list[Tensor]): Label weights of each \ + level. + - bbox_cls_targets_list (list[Tensor]): BBox cls targets of \ + each level. + - bbox_cls_weights_list (list[Tensor]): BBox cls weights of \ + each level. + - bbox_reg_targets_list (list[Tensor]): BBox reg targets of \ + each level. + - bbox_reg_weights_list (list[Tensor]): BBox reg weights of \ + each level. + - num_total_pos (int): Number of positive samples in all \ + images. + - num_total_neg (int): Number of negative samples in all \ + images. + """ + num_imgs = len(img_metas) + assert len(approx_list) == len(inside_flag_list) == len( + square_list) == num_imgs + # anchor number of multi levels + num_level_squares = [squares.size(0) for squares in square_list[0]] + # concat all level anchors and flags to a single tensor + inside_flag_flat_list = [] + approx_flat_list = [] + square_flat_list = [] + for i in range(num_imgs): + assert len(square_list[i]) == len(inside_flag_list[i]) + inside_flag_flat_list.append(torch.cat(inside_flag_list[i])) + approx_flat_list.append(torch.cat(approx_list[i])) + square_flat_list.append(torch.cat(square_list[i])) + + # compute targets for each image + if gt_bboxes_ignore_list is None: + gt_bboxes_ignore_list = [None for _ in range(num_imgs)] + if gt_labels_list is None: + gt_labels_list = [None for _ in range(num_imgs)] + (all_labels, all_label_weights, all_bbox_cls_targets, + all_bbox_cls_weights, all_bbox_reg_targets, all_bbox_reg_weights, + pos_inds_list, neg_inds_list) = multi_apply( + self._get_target_single, + approx_flat_list, + inside_flag_flat_list, + square_flat_list, + gt_bboxes_list, + gt_bboxes_ignore_list, + gt_labels_list, + img_metas, + label_channels=label_channels, + sampling=sampling, + unmap_outputs=unmap_outputs) + # no valid anchors + if any([labels is None for labels in all_labels]): + return None + # sampled anchors of all images + num_total_pos = sum([max(inds.numel(), 1) for inds in pos_inds_list]) + num_total_neg = sum([max(inds.numel(), 1) for inds in neg_inds_list]) + # split targets to a list w.r.t. multiple levels + labels_list = images_to_levels(all_labels, num_level_squares) + label_weights_list = images_to_levels(all_label_weights, + num_level_squares) + bbox_cls_targets_list = images_to_levels(all_bbox_cls_targets, + num_level_squares) + bbox_cls_weights_list = images_to_levels(all_bbox_cls_weights, + num_level_squares) + bbox_reg_targets_list = images_to_levels(all_bbox_reg_targets, + num_level_squares) + bbox_reg_weights_list = images_to_levels(all_bbox_reg_weights, + num_level_squares) + return (labels_list, label_weights_list, bbox_cls_targets_list, + bbox_cls_weights_list, bbox_reg_targets_list, + bbox_reg_weights_list, num_total_pos, num_total_neg) + + def _get_target_single(self, + flat_approxs, + inside_flags, + flat_squares, + gt_bboxes, + gt_bboxes_ignore, + gt_labels, + img_meta, + label_channels=None, + sampling=True, + unmap_outputs=True): + """Compute regression and classification targets for anchors in a + single image. + + Args: + flat_approxs (Tensor): flat approxs of a single image, + shape (n, 4) + inside_flags (Tensor): inside flags of a single image, + shape (n, ). + flat_squares (Tensor): flat squares of a single image, + shape (approxs_per_octave * n, 4) + gt_bboxes (Tensor): Ground truth bboxes of a single image, \ + shape (num_gts, 4). + gt_bboxes_ignore (Tensor): Ground truth bboxes to be + ignored, shape (num_ignored_gts, 4). + gt_labels (Tensor): Ground truth labels of each box, + shape (num_gts,). + img_meta (dict): Meta info of the image. + label_channels (int): Channel of label. + sampling (bool): Sample Anchors or not. + unmap_outputs (bool): unmap outputs or not. + + Returns: + tuple: + + - labels_list (Tensor): Labels in a single image + - label_weights (Tensor): Label weights in a single image + - bbox_cls_targets (Tensor): BBox cls targets in a single image + - bbox_cls_weights (Tensor): BBox cls weights in a single image + - bbox_reg_targets (Tensor): BBox reg targets in a single image + - bbox_reg_weights (Tensor): BBox reg weights in a single image + - num_total_pos (int): Number of positive samples \ + in a single image + - num_total_neg (int): Number of negative samples \ + in a single image + """ + if not inside_flags.any(): + return (None, ) * 8 + # assign gt and sample anchors + expand_inside_flags = inside_flags[:, None].expand( + -1, self.approxs_per_octave).reshape(-1) + approxs = flat_approxs[expand_inside_flags, :] + squares = flat_squares[inside_flags, :] + + assign_result = self.assigner.assign(approxs, squares, + self.approxs_per_octave, + gt_bboxes, gt_bboxes_ignore) + sampling_result = self.sampler.sample(assign_result, squares, + gt_bboxes) + + num_valid_squares = squares.shape[0] + bbox_cls_targets = squares.new_zeros( + (num_valid_squares, self.side_num * 4)) + bbox_cls_weights = squares.new_zeros( + (num_valid_squares, self.side_num * 4)) + bbox_reg_targets = squares.new_zeros( + (num_valid_squares, self.side_num * 4)) + bbox_reg_weights = squares.new_zeros( + (num_valid_squares, self.side_num * 4)) + labels = squares.new_full((num_valid_squares, ), + self.num_classes, + dtype=torch.long) + label_weights = squares.new_zeros(num_valid_squares, dtype=torch.float) + + pos_inds = sampling_result.pos_inds + neg_inds = sampling_result.neg_inds + if len(pos_inds) > 0: + (pos_bbox_reg_targets, pos_bbox_reg_weights, pos_bbox_cls_targets, + pos_bbox_cls_weights) = self.bbox_coder.encode( + sampling_result.pos_bboxes, sampling_result.pos_gt_bboxes) + + bbox_cls_targets[pos_inds, :] = pos_bbox_cls_targets + bbox_reg_targets[pos_inds, :] = pos_bbox_reg_targets + bbox_cls_weights[pos_inds, :] = pos_bbox_cls_weights + bbox_reg_weights[pos_inds, :] = pos_bbox_reg_weights + if gt_labels is None: + # Only rpn gives gt_labels as None + # Foreground is the first class + labels[pos_inds] = 0 + else: + labels[pos_inds] = gt_labels[ + sampling_result.pos_assigned_gt_inds] + if self.train_cfg.pos_weight <= 0: + label_weights[pos_inds] = 1.0 + else: + label_weights[pos_inds] = self.train_cfg.pos_weight + if len(neg_inds) > 0: + label_weights[neg_inds] = 1.0 + + # map up to original set of anchors + if unmap_outputs: + num_total_anchors = flat_squares.size(0) + labels = unmap( + labels, num_total_anchors, inside_flags, fill=self.num_classes) + label_weights = unmap(label_weights, num_total_anchors, + inside_flags) + bbox_cls_targets = unmap(bbox_cls_targets, num_total_anchors, + inside_flags) + bbox_cls_weights = unmap(bbox_cls_weights, num_total_anchors, + inside_flags) + bbox_reg_targets = unmap(bbox_reg_targets, num_total_anchors, + inside_flags) + bbox_reg_weights = unmap(bbox_reg_weights, num_total_anchors, + inside_flags) + return (labels, label_weights, bbox_cls_targets, bbox_cls_weights, + bbox_reg_targets, bbox_reg_weights, pos_inds, neg_inds) + + def loss_single(self, cls_score, bbox_pred, labels, label_weights, + bbox_cls_targets, bbox_cls_weights, bbox_reg_targets, + bbox_reg_weights, num_total_samples): + # classification loss + labels = labels.reshape(-1) + label_weights = label_weights.reshape(-1) + cls_score = cls_score.permute(0, 2, 3, + 1).reshape(-1, self.cls_out_channels) + loss_cls = self.loss_cls( + cls_score, labels, label_weights, avg_factor=num_total_samples) + # regression loss + bbox_cls_targets = bbox_cls_targets.reshape(-1, self.side_num * 4) + bbox_cls_weights = bbox_cls_weights.reshape(-1, self.side_num * 4) + bbox_reg_targets = bbox_reg_targets.reshape(-1, self.side_num * 4) + bbox_reg_weights = bbox_reg_weights.reshape(-1, self.side_num * 4) + (bbox_cls_pred, bbox_reg_pred) = bbox_pred + bbox_cls_pred = bbox_cls_pred.permute(0, 2, 3, 1).reshape( + -1, self.side_num * 4) + bbox_reg_pred = bbox_reg_pred.permute(0, 2, 3, 1).reshape( + -1, self.side_num * 4) + loss_bbox_cls = self.loss_bbox_cls( + bbox_cls_pred, + bbox_cls_targets.long(), + bbox_cls_weights, + avg_factor=num_total_samples * 4 * self.side_num) + loss_bbox_reg = self.loss_bbox_reg( + bbox_reg_pred, + bbox_reg_targets, + bbox_reg_weights, + avg_factor=num_total_samples * 4 * self.bbox_coder.offset_topk) + return loss_cls, loss_bbox_cls, loss_bbox_reg + + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.approx_anchor_generator.num_levels + + device = cls_scores[0].device + + # get sampled approxes + approxs_list, inside_flag_list = GuidedAnchorHead.get_sampled_approxs( + self, featmap_sizes, img_metas, device=device) + + square_list = self.get_anchors(featmap_sizes, img_metas, device=device) + + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + + cls_reg_targets = self.get_target( + approxs_list, + inside_flag_list, + square_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=label_channels, + sampling=self.sampling) + if cls_reg_targets is None: + return None + (labels_list, label_weights_list, bbox_cls_targets_list, + bbox_cls_weights_list, bbox_reg_targets_list, bbox_reg_weights_list, + num_total_pos, num_total_neg) = cls_reg_targets + num_total_samples = ( + num_total_pos + num_total_neg if self.sampling else num_total_pos) + losses_cls, losses_bbox_cls, losses_bbox_reg = multi_apply( + self.loss_single, + cls_scores, + bbox_preds, + labels_list, + label_weights_list, + bbox_cls_targets_list, + bbox_cls_weights_list, + bbox_reg_targets_list, + bbox_reg_weights_list, + num_total_samples=num_total_samples) + return dict( + loss_cls=losses_cls, + loss_bbox_cls=losses_bbox_cls, + loss_bbox_reg=losses_bbox_reg) + + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def get_bboxes(self, + cls_scores, + bbox_preds, + img_metas, + cfg=None, + rescale=False): + assert len(cls_scores) == len(bbox_preds) + num_levels = len(cls_scores) + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + + device = cls_scores[0].device + mlvl_anchors = self.get_anchors( + featmap_sizes, img_metas, device=device) + result_list = [] + for img_id in range(len(img_metas)): + cls_score_list = [ + cls_scores[i][img_id].detach() for i in range(num_levels) + ] + bbox_cls_pred_list = [ + bbox_preds[i][0][img_id].detach() for i in range(num_levels) + ] + bbox_reg_pred_list = [ + bbox_preds[i][1][img_id].detach() for i in range(num_levels) + ] + img_shape = img_metas[img_id]['img_shape'] + scale_factor = img_metas[img_id]['scale_factor'] + proposals = self.get_bboxes_single(cls_score_list, + bbox_cls_pred_list, + bbox_reg_pred_list, + mlvl_anchors[img_id], img_shape, + scale_factor, cfg, rescale) + result_list.append(proposals) + return result_list + + def get_bboxes_single(self, + cls_scores, + bbox_cls_preds, + bbox_reg_preds, + mlvl_anchors, + img_shape, + scale_factor, + cfg, + rescale=False): + cfg = self.test_cfg if cfg is None else cfg + mlvl_bboxes = [] + mlvl_scores = [] + mlvl_confids = [] + assert len(cls_scores) == len(bbox_cls_preds) == len( + bbox_reg_preds) == len(mlvl_anchors) + for cls_score, bbox_cls_pred, bbox_reg_pred, anchors in zip( + cls_scores, bbox_cls_preds, bbox_reg_preds, mlvl_anchors): + assert cls_score.size()[-2:] == bbox_cls_pred.size( + )[-2:] == bbox_reg_pred.size()[-2::] + cls_score = cls_score.permute(1, 2, + 0).reshape(-1, self.cls_out_channels) + if self.use_sigmoid_cls: + scores = cls_score.sigmoid() + else: + scores = cls_score.softmax(-1) + bbox_cls_pred = bbox_cls_pred.permute(1, 2, 0).reshape( + -1, self.side_num * 4) + bbox_reg_pred = bbox_reg_pred.permute(1, 2, 0).reshape( + -1, self.side_num * 4) + nms_pre = cfg.get('nms_pre', -1) + if nms_pre > 0 and scores.shape[0] > nms_pre: + if self.use_sigmoid_cls: + max_scores, _ = scores.max(dim=1) + else: + max_scores, _ = scores[:, :-1].max(dim=1) + _, topk_inds = max_scores.topk(nms_pre) + anchors = anchors[topk_inds, :] + bbox_cls_pred = bbox_cls_pred[topk_inds, :] + bbox_reg_pred = bbox_reg_pred[topk_inds, :] + scores = scores[topk_inds, :] + bbox_preds = [ + bbox_cls_pred.contiguous(), + bbox_reg_pred.contiguous() + ] + bboxes, confids = self.bbox_coder.decode( + anchors.contiguous(), bbox_preds, max_shape=img_shape) + mlvl_bboxes.append(bboxes) + mlvl_scores.append(scores) + mlvl_confids.append(confids) + mlvl_bboxes = torch.cat(mlvl_bboxes) + if rescale: + mlvl_bboxes /= mlvl_bboxes.new_tensor(scale_factor) + mlvl_scores = torch.cat(mlvl_scores) + mlvl_confids = torch.cat(mlvl_confids) + if self.use_sigmoid_cls: + padding = mlvl_scores.new_zeros(mlvl_scores.shape[0], 1) + mlvl_scores = torch.cat([mlvl_scores, padding], dim=1) + det_bboxes, det_labels = multiclass_nms( + mlvl_bboxes, + mlvl_scores, + cfg.score_thr, + cfg.nms, + cfg.max_per_img, + score_factors=mlvl_confids) + return det_bboxes, det_labels diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/ssd_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/ssd_head.py new file mode 100644 index 0000000000000000000000000000000000000000..22dac7414bb47a169c2fc352bf8d254bbf41dfaf --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/ssd_head.py @@ -0,0 +1,345 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule, DepthwiseSeparableConvModule +from mmcv.runner import force_fp32 + +from mmdet.core import (build_anchor_generator, build_assigner, + build_bbox_coder, build_sampler, multi_apply) +from ..builder import HEADS +from ..losses import smooth_l1_loss +from .anchor_head import AnchorHead + + +# TODO: add loss evaluator for SSD +@HEADS.register_module() +class SSDHead(AnchorHead): + """SSD head used in https://arxiv.org/abs/1512.02325. + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + stacked_convs (int): Number of conv layers in cls and reg tower. + Default: 0. + feat_channels (int): Number of hidden channels when stacked_convs + > 0. Default: 256. + use_depthwise (bool): Whether to use DepthwiseSeparableConv. + Default: False. + conv_cfg (dict): Dictionary to construct and config conv layer. + Default: None. + norm_cfg (dict): Dictionary to construct and config norm layer. + Default: None. + act_cfg (dict): Dictionary to construct and config activation layer. + Default: None. + anchor_generator (dict): Config dict for anchor generator + bbox_coder (dict): Config of bounding box coder. + reg_decoded_bbox (bool): If true, the regression loss would be + applied directly on decoded bounding boxes, converting both + the predicted boxes and regression targets to absolute + coordinates format. Default False. It should be `True` when + using `IoULoss`, `GIoULoss`, or `DIoULoss` in the bbox head. + train_cfg (dict): Training config of anchor head. + test_cfg (dict): Testing config of anchor head. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ # noqa: W605 + + def __init__(self, + num_classes=80, + in_channels=(512, 1024, 512, 256, 256, 256), + stacked_convs=0, + feat_channels=256, + use_depthwise=False, + conv_cfg=None, + norm_cfg=None, + act_cfg=None, + anchor_generator=dict( + type='SSDAnchorGenerator', + scale_major=False, + input_size=300, + strides=[8, 16, 32, 64, 100, 300], + ratios=([2], [2, 3], [2, 3], [2, 3], [2], [2]), + basesize_ratio_range=(0.1, 0.9)), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + clip_border=True, + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0], + ), + reg_decoded_bbox=False, + train_cfg=None, + test_cfg=None, + init_cfg=dict( + type='Xavier', + layer='Conv2d', + distribution='uniform', + bias=0)): + super(AnchorHead, self).__init__(init_cfg) + self.num_classes = num_classes + self.in_channels = in_channels + self.stacked_convs = stacked_convs + self.feat_channels = feat_channels + self.use_depthwise = use_depthwise + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.act_cfg = act_cfg + + self.cls_out_channels = num_classes + 1 # add background class + self.anchor_generator = build_anchor_generator(anchor_generator) + self.num_anchors = self.anchor_generator.num_base_anchors + + self._init_layers() + + self.bbox_coder = build_bbox_coder(bbox_coder) + self.reg_decoded_bbox = reg_decoded_bbox + self.use_sigmoid_cls = False + self.cls_focal_loss = False + self.train_cfg = train_cfg + self.test_cfg = test_cfg + # set sampling=False for archor_target + self.sampling = False + if self.train_cfg: + self.assigner = build_assigner(self.train_cfg.assigner) + # SSD sampling=False so use PseudoSampler + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + self.fp16_enabled = False + + def _init_layers(self): + """Initialize layers of the head.""" + self.cls_convs = nn.ModuleList() + self.reg_convs = nn.ModuleList() + # TODO: Use registry to choose ConvModule type + conv = DepthwiseSeparableConvModule \ + if self.use_depthwise else ConvModule + + for channel, num_anchors in zip(self.in_channels, self.num_anchors): + cls_layers = [] + reg_layers = [] + in_channel = channel + # build stacked conv tower, not used in default ssd + for i in range(self.stacked_convs): + cls_layers.append( + conv( + in_channel, + self.feat_channels, + 3, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg)) + reg_layers.append( + conv( + in_channel, + self.feat_channels, + 3, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg)) + in_channel = self.feat_channels + # SSD-Lite head + if self.use_depthwise: + cls_layers.append( + ConvModule( + in_channel, + in_channel, + 3, + padding=1, + groups=in_channel, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg)) + reg_layers.append( + ConvModule( + in_channel, + in_channel, + 3, + padding=1, + groups=in_channel, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg)) + cls_layers.append( + nn.Conv2d( + in_channel, + num_anchors * self.cls_out_channels, + kernel_size=1 if self.use_depthwise else 3, + padding=0 if self.use_depthwise else 1)) + reg_layers.append( + nn.Conv2d( + in_channel, + num_anchors * 4, + kernel_size=1 if self.use_depthwise else 3, + padding=0 if self.use_depthwise else 1)) + self.cls_convs.append(nn.Sequential(*cls_layers)) + self.reg_convs.append(nn.Sequential(*reg_layers)) + + def forward(self, feats): + """Forward features from the upstream network. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + + Returns: + tuple: + cls_scores (list[Tensor]): Classification scores for all scale + levels, each is a 4D-tensor, the channels number is + num_anchors * num_classes. + bbox_preds (list[Tensor]): Box energies / deltas for all scale + levels, each is a 4D-tensor, the channels number is + num_anchors * 4. + """ + cls_scores = [] + bbox_preds = [] + for feat, reg_conv, cls_conv in zip(feats, self.reg_convs, + self.cls_convs): + cls_scores.append(cls_conv(feat)) + bbox_preds.append(reg_conv(feat)) + return cls_scores, bbox_preds + + def loss_single(self, cls_score, bbox_pred, anchor, labels, label_weights, + bbox_targets, bbox_weights, num_total_samples): + """Compute loss of a single image. + + Args: + cls_score (Tensor): Box scores for eachimage + Has shape (num_total_anchors, num_classes). + bbox_pred (Tensor): Box energies / deltas for each image + level with shape (num_total_anchors, 4). + anchors (Tensor): Box reference for each scale level with shape + (num_total_anchors, 4). + labels (Tensor): Labels of each anchors with shape + (num_total_anchors,). + label_weights (Tensor): Label weights of each anchor with shape + (num_total_anchors,) + bbox_targets (Tensor): BBox regression targets of each anchor wight + shape (num_total_anchors, 4). + bbox_weights (Tensor): BBox regression loss weights of each anchor + with shape (num_total_anchors, 4). + num_total_samples (int): If sampling, num total samples equal to + the number of total anchors; Otherwise, it is the number of + positive anchors. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + + loss_cls_all = F.cross_entropy( + cls_score, labels, reduction='none') * label_weights + # FG cat_id: [0, num_classes -1], BG cat_id: num_classes + pos_inds = ((labels >= 0) & (labels < self.num_classes)).nonzero( + as_tuple=False).reshape(-1) + neg_inds = (labels == self.num_classes).nonzero( + as_tuple=False).view(-1) + + num_pos_samples = pos_inds.size(0) + num_neg_samples = self.train_cfg.neg_pos_ratio * num_pos_samples + if num_neg_samples > neg_inds.size(0): + num_neg_samples = neg_inds.size(0) + topk_loss_cls_neg, _ = loss_cls_all[neg_inds].topk(num_neg_samples) + loss_cls_pos = loss_cls_all[pos_inds].sum() + loss_cls_neg = topk_loss_cls_neg.sum() + loss_cls = (loss_cls_pos + loss_cls_neg) / num_total_samples + + if self.reg_decoded_bbox: + # When the regression loss (e.g. `IouLoss`, `GIouLoss`) + # is applied directly on the decoded bounding boxes, it + # decodes the already encoded coordinates to absolute format. + bbox_pred = self.bbox_coder.decode(anchor, bbox_pred) + + loss_bbox = smooth_l1_loss( + bbox_pred, + bbox_targets, + bbox_weights, + beta=self.train_cfg.smoothl1_beta, + avg_factor=num_total_samples) + return loss_cls[None], loss_bbox + + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + gt_bboxes (list[Tensor]): each item are the truth boxes for each + image in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.anchor_generator.num_levels + + device = cls_scores[0].device + + anchor_list, valid_flag_list = self.get_anchors( + featmap_sizes, img_metas, device=device) + cls_reg_targets = self.get_targets( + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=1, + unmap_outputs=False) + if cls_reg_targets is None: + return None + (labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, + num_total_pos, num_total_neg) = cls_reg_targets + + num_images = len(img_metas) + all_cls_scores = torch.cat([ + s.permute(0, 2, 3, 1).reshape( + num_images, -1, self.cls_out_channels) for s in cls_scores + ], 1) + all_labels = torch.cat(labels_list, -1).view(num_images, -1) + all_label_weights = torch.cat(label_weights_list, + -1).view(num_images, -1) + all_bbox_preds = torch.cat([ + b.permute(0, 2, 3, 1).reshape(num_images, -1, 4) + for b in bbox_preds + ], -2) + all_bbox_targets = torch.cat(bbox_targets_list, + -2).view(num_images, -1, 4) + all_bbox_weights = torch.cat(bbox_weights_list, + -2).view(num_images, -1, 4) + + # concat all level anchors to a single tensor + all_anchors = [] + for i in range(num_images): + all_anchors.append(torch.cat(anchor_list[i])) + + # check NaN and Inf + assert torch.isfinite(all_cls_scores).all().item(), \ + 'classification scores become infinite or NaN!' + assert torch.isfinite(all_bbox_preds).all().item(), \ + 'bbox predications become infinite or NaN!' + + losses_cls, losses_bbox = multi_apply( + self.loss_single, + all_cls_scores, + all_bbox_preds, + all_anchors, + all_labels, + all_label_weights, + all_bbox_targets, + all_bbox_weights, + num_total_samples=num_total_pos) + return dict(loss_cls=losses_cls, loss_bbox=losses_bbox) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/vfnet_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/vfnet_head.py new file mode 100644 index 0000000000000000000000000000000000000000..6d887d56dc97e0d73977d49b55bbdaf1403cafe2 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/vfnet_head.py @@ -0,0 +1,791 @@ +import numpy as np +import torch +import torch.nn as nn +from mmcv.cnn import ConvModule, Scale +from mmcv.ops import DeformConv2d +from mmcv.runner import force_fp32 + +from mmdet.core import (bbox2distance, bbox_overlaps, build_anchor_generator, + build_assigner, build_sampler, distance2bbox, + multi_apply, multiclass_nms, reduce_mean) +from ..builder import HEADS, build_loss +from .atss_head import ATSSHead +from .fcos_head import FCOSHead + +INF = 1e8 + + +@HEADS.register_module() +class VFNetHead(ATSSHead, FCOSHead): + """Head of `VarifocalNet (VFNet): An IoU-aware Dense Object + Detector.`_. + + The VFNet predicts IoU-aware classification scores which mix the + object presence confidence and object localization accuracy as the + detection score. It is built on the FCOS architecture and uses ATSS + for defining positive/negative training examples. The VFNet is trained + with Varifocal Loss and empolys star-shaped deformable convolution to + extract features for a bbox. + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + regress_ranges (tuple[tuple[int, int]]): Regress range of multiple + level points. + center_sampling (bool): If true, use center sampling. Default: False. + center_sample_radius (float): Radius of center sampling. Default: 1.5. + sync_num_pos (bool): If true, synchronize the number of positive + examples across GPUs. Default: True + gradient_mul (float): The multiplier to gradients from bbox refinement + and recognition. Default: 0.1. + bbox_norm_type (str): The bbox normalization type, 'reg_denom' or + 'stride'. Default: reg_denom + loss_cls_fl (dict): Config of focal loss. + use_vfl (bool): If true, use varifocal loss for training. + Default: True. + loss_cls (dict): Config of varifocal loss. + loss_bbox (dict): Config of localization loss, GIoU Loss. + loss_bbox (dict): Config of localization refinement loss, GIoU Loss. + norm_cfg (dict): dictionary to construct and config norm layer. + Default: norm_cfg=dict(type='GN', num_groups=32, + requires_grad=True). + use_atss (bool): If true, use ATSS to define positive/negative + examples. Default: True. + anchor_generator (dict): Config of anchor generator for ATSS. + init_cfg (dict or list[dict], optional): Initialization config dict. + + Example: + >>> self = VFNetHead(11, 7) + >>> feats = [torch.rand(1, 7, s, s) for s in [4, 8, 16, 32, 64]] + >>> cls_score, bbox_pred, bbox_pred_refine= self.forward(feats) + >>> assert len(cls_score) == len(self.scales) + """ # noqa: E501 + + def __init__(self, + num_classes, + in_channels, + regress_ranges=((-1, 64), (64, 128), (128, 256), (256, 512), + (512, INF)), + center_sampling=False, + center_sample_radius=1.5, + sync_num_pos=True, + gradient_mul=0.1, + bbox_norm_type='reg_denom', + loss_cls_fl=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + use_vfl=True, + loss_cls=dict( + type='VarifocalLoss', + use_sigmoid=True, + alpha=0.75, + gamma=2.0, + iou_weighted=True, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.5), + loss_bbox_refine=dict(type='GIoULoss', loss_weight=2.0), + norm_cfg=dict(type='GN', num_groups=32, requires_grad=True), + use_atss=True, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + center_offset=0.0, + strides=[8, 16, 32, 64, 128]), + init_cfg=dict( + type='Normal', + layer='Conv2d', + std=0.01, + override=dict( + type='Normal', + name='vfnet_cls', + std=0.01, + bias_prob=0.01)), + **kwargs): + # dcn base offsets, adapted from reppoints_head.py + self.num_dconv_points = 9 + self.dcn_kernel = int(np.sqrt(self.num_dconv_points)) + self.dcn_pad = int((self.dcn_kernel - 1) / 2) + dcn_base = np.arange(-self.dcn_pad, + self.dcn_pad + 1).astype(np.float64) + dcn_base_y = np.repeat(dcn_base, self.dcn_kernel) + dcn_base_x = np.tile(dcn_base, self.dcn_kernel) + dcn_base_offset = np.stack([dcn_base_y, dcn_base_x], axis=1).reshape( + (-1)) + self.dcn_base_offset = torch.tensor(dcn_base_offset).view(1, -1, 1, 1) + + super(FCOSHead, self).__init__( + num_classes, + in_channels, + norm_cfg=norm_cfg, + init_cfg=init_cfg, + **kwargs) + self.regress_ranges = regress_ranges + self.reg_denoms = [ + regress_range[-1] for regress_range in regress_ranges + ] + self.reg_denoms[-1] = self.reg_denoms[-2] * 2 + self.center_sampling = center_sampling + self.center_sample_radius = center_sample_radius + self.sync_num_pos = sync_num_pos + self.bbox_norm_type = bbox_norm_type + self.gradient_mul = gradient_mul + self.use_vfl = use_vfl + if self.use_vfl: + self.loss_cls = build_loss(loss_cls) + else: + self.loss_cls = build_loss(loss_cls_fl) + self.loss_bbox = build_loss(loss_bbox) + self.loss_bbox_refine = build_loss(loss_bbox_refine) + + # for getting ATSS targets + self.use_atss = use_atss + self.use_sigmoid_cls = loss_cls.get('use_sigmoid', False) + self.anchor_generator = build_anchor_generator(anchor_generator) + self.anchor_center_offset = anchor_generator['center_offset'] + self.num_anchors = self.anchor_generator.num_base_anchors[0] + self.sampling = False + if self.train_cfg: + self.assigner = build_assigner(self.train_cfg.assigner) + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + + def _init_layers(self): + """Initialize layers of the head.""" + super(FCOSHead, self)._init_cls_convs() + super(FCOSHead, self)._init_reg_convs() + self.relu = nn.ReLU(inplace=True) + self.vfnet_reg_conv = ConvModule( + self.feat_channels, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + bias=self.conv_bias) + self.vfnet_reg = nn.Conv2d(self.feat_channels, 4, 3, padding=1) + self.scales = nn.ModuleList([Scale(1.0) for _ in self.strides]) + + self.vfnet_reg_refine_dconv = DeformConv2d( + self.feat_channels, + self.feat_channels, + self.dcn_kernel, + 1, + padding=self.dcn_pad) + self.vfnet_reg_refine = nn.Conv2d(self.feat_channels, 4, 3, padding=1) + self.scales_refine = nn.ModuleList([Scale(1.0) for _ in self.strides]) + + self.vfnet_cls_dconv = DeformConv2d( + self.feat_channels, + self.feat_channels, + self.dcn_kernel, + 1, + padding=self.dcn_pad) + self.vfnet_cls = nn.Conv2d( + self.feat_channels, self.cls_out_channels, 3, padding=1) + + def forward(self, feats): + """Forward features from the upstream network. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + + Returns: + tuple: + cls_scores (list[Tensor]): Box iou-aware scores for each scale + level, each is a 4D-tensor, the channel number is + num_points * num_classes. + bbox_preds (list[Tensor]): Box offsets for each + scale level, each is a 4D-tensor, the channel number is + num_points * 4. + bbox_preds_refine (list[Tensor]): Refined Box offsets for + each scale level, each is a 4D-tensor, the channel + number is num_points * 4. + """ + return multi_apply(self.forward_single, feats, self.scales, + self.scales_refine, self.strides, self.reg_denoms) + + def forward_single(self, x, scale, scale_refine, stride, reg_denom): + """Forward features of a single scale level. + + Args: + x (Tensor): FPN feature maps of the specified stride. + scale (:obj: `mmcv.cnn.Scale`): Learnable scale module to resize + the bbox prediction. + scale_refine (:obj: `mmcv.cnn.Scale`): Learnable scale module to + resize the refined bbox prediction. + stride (int): The corresponding stride for feature maps, + used to normalize the bbox prediction when + bbox_norm_type = 'stride'. + reg_denom (int): The corresponding regression range for feature + maps, only used to normalize the bbox prediction when + bbox_norm_type = 'reg_denom'. + + Returns: + tuple: iou-aware cls scores for each box, bbox predictions and + refined bbox predictions of input feature maps. + """ + cls_feat = x + reg_feat = x + + for cls_layer in self.cls_convs: + cls_feat = cls_layer(cls_feat) + + for reg_layer in self.reg_convs: + reg_feat = reg_layer(reg_feat) + + # predict the bbox_pred of different level + reg_feat_init = self.vfnet_reg_conv(reg_feat) + if self.bbox_norm_type == 'reg_denom': + bbox_pred = scale( + self.vfnet_reg(reg_feat_init)).float().exp() * reg_denom + elif self.bbox_norm_type == 'stride': + bbox_pred = scale( + self.vfnet_reg(reg_feat_init)).float().exp() * stride + else: + raise NotImplementedError + + # compute star deformable convolution offsets + # converting dcn_offset to reg_feat.dtype thus VFNet can be + # trained with FP16 + dcn_offset = self.star_dcn_offset(bbox_pred, self.gradient_mul, + stride).to(reg_feat.dtype) + + # refine the bbox_pred + reg_feat = self.relu(self.vfnet_reg_refine_dconv(reg_feat, dcn_offset)) + bbox_pred_refine = scale_refine( + self.vfnet_reg_refine(reg_feat)).float().exp() + bbox_pred_refine = bbox_pred_refine * bbox_pred.detach() + + # predict the iou-aware cls score + cls_feat = self.relu(self.vfnet_cls_dconv(cls_feat, dcn_offset)) + cls_score = self.vfnet_cls(cls_feat) + + return cls_score, bbox_pred, bbox_pred_refine + + def star_dcn_offset(self, bbox_pred, gradient_mul, stride): + """Compute the star deformable conv offsets. + + Args: + bbox_pred (Tensor): Predicted bbox distance offsets (l, r, t, b). + gradient_mul (float): Gradient multiplier. + stride (int): The corresponding stride for feature maps, + used to project the bbox onto the feature map. + + Returns: + dcn_offsets (Tensor): The offsets for deformable convolution. + """ + dcn_base_offset = self.dcn_base_offset.type_as(bbox_pred) + bbox_pred_grad_mul = (1 - gradient_mul) * bbox_pred.detach() + \ + gradient_mul * bbox_pred + # map to the feature map scale + bbox_pred_grad_mul = bbox_pred_grad_mul / stride + N, C, H, W = bbox_pred.size() + + x1 = bbox_pred_grad_mul[:, 0, :, :] + y1 = bbox_pred_grad_mul[:, 1, :, :] + x2 = bbox_pred_grad_mul[:, 2, :, :] + y2 = bbox_pred_grad_mul[:, 3, :, :] + bbox_pred_grad_mul_offset = bbox_pred.new_zeros( + N, 2 * self.num_dconv_points, H, W) + bbox_pred_grad_mul_offset[:, 0, :, :] = -1.0 * y1 # -y1 + bbox_pred_grad_mul_offset[:, 1, :, :] = -1.0 * x1 # -x1 + bbox_pred_grad_mul_offset[:, 2, :, :] = -1.0 * y1 # -y1 + bbox_pred_grad_mul_offset[:, 4, :, :] = -1.0 * y1 # -y1 + bbox_pred_grad_mul_offset[:, 5, :, :] = x2 # x2 + bbox_pred_grad_mul_offset[:, 7, :, :] = -1.0 * x1 # -x1 + bbox_pred_grad_mul_offset[:, 11, :, :] = x2 # x2 + bbox_pred_grad_mul_offset[:, 12, :, :] = y2 # y2 + bbox_pred_grad_mul_offset[:, 13, :, :] = -1.0 * x1 # -x1 + bbox_pred_grad_mul_offset[:, 14, :, :] = y2 # y2 + bbox_pred_grad_mul_offset[:, 16, :, :] = y2 # y2 + bbox_pred_grad_mul_offset[:, 17, :, :] = x2 # x2 + dcn_offset = bbox_pred_grad_mul_offset - dcn_base_offset + + return dcn_offset + + @force_fp32(apply_to=('cls_scores', 'bbox_preds', 'bbox_preds_refine')) + def loss(self, + cls_scores, + bbox_preds, + bbox_preds_refine, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute loss of the head. + + Args: + cls_scores (list[Tensor]): Box iou-aware scores for each scale + level, each is a 4D-tensor, the channel number is + num_points * num_classes. + bbox_preds (list[Tensor]): Box offsets for each + scale level, each is a 4D-tensor, the channel number is + num_points * 4. + bbox_preds_refine (list[Tensor]): Refined Box offsets for + each scale level, each is a 4D-tensor, the channel + number is num_points * 4. + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + Default: None. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + assert len(cls_scores) == len(bbox_preds) == len(bbox_preds_refine) + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + all_level_points = self.get_points(featmap_sizes, bbox_preds[0].dtype, + bbox_preds[0].device) + labels, label_weights, bbox_targets, bbox_weights = self.get_targets( + cls_scores, all_level_points, gt_bboxes, gt_labels, img_metas, + gt_bboxes_ignore) + + num_imgs = cls_scores[0].size(0) + # flatten cls_scores, bbox_preds and bbox_preds_refine + flatten_cls_scores = [ + cls_score.permute(0, 2, 3, + 1).reshape(-1, + self.cls_out_channels).contiguous() + for cls_score in cls_scores + ] + flatten_bbox_preds = [ + bbox_pred.permute(0, 2, 3, 1).reshape(-1, 4).contiguous() + for bbox_pred in bbox_preds + ] + flatten_bbox_preds_refine = [ + bbox_pred_refine.permute(0, 2, 3, 1).reshape(-1, 4).contiguous() + for bbox_pred_refine in bbox_preds_refine + ] + flatten_cls_scores = torch.cat(flatten_cls_scores) + flatten_bbox_preds = torch.cat(flatten_bbox_preds) + flatten_bbox_preds_refine = torch.cat(flatten_bbox_preds_refine) + flatten_labels = torch.cat(labels) + flatten_bbox_targets = torch.cat(bbox_targets) + # repeat points to align with bbox_preds + flatten_points = torch.cat( + [points.repeat(num_imgs, 1) for points in all_level_points]) + + # FG cat_id: [0, num_classes - 1], BG cat_id: num_classes + bg_class_ind = self.num_classes + pos_inds = torch.where( + ((flatten_labels >= 0) & (flatten_labels < bg_class_ind)) > 0)[0] + num_pos = len(pos_inds) + + pos_bbox_preds = flatten_bbox_preds[pos_inds] + pos_bbox_preds_refine = flatten_bbox_preds_refine[pos_inds] + pos_labels = flatten_labels[pos_inds] + + # sync num_pos across all gpus + if self.sync_num_pos: + num_pos_avg_per_gpu = reduce_mean( + pos_inds.new_tensor(num_pos).float()).item() + num_pos_avg_per_gpu = max(num_pos_avg_per_gpu, 1.0) + else: + num_pos_avg_per_gpu = num_pos + + pos_bbox_targets = flatten_bbox_targets[pos_inds] + pos_points = flatten_points[pos_inds] + + pos_decoded_bbox_preds = distance2bbox(pos_points, pos_bbox_preds) + pos_decoded_target_preds = distance2bbox(pos_points, pos_bbox_targets) + iou_targets_ini = bbox_overlaps( + pos_decoded_bbox_preds, + pos_decoded_target_preds.detach(), + is_aligned=True).clamp(min=1e-6) + bbox_weights_ini = iou_targets_ini.clone().detach() + bbox_avg_factor_ini = reduce_mean( + bbox_weights_ini.sum()).clamp_(min=1).item() + + pos_decoded_bbox_preds_refine = \ + distance2bbox(pos_points, pos_bbox_preds_refine) + iou_targets_rf = bbox_overlaps( + pos_decoded_bbox_preds_refine, + pos_decoded_target_preds.detach(), + is_aligned=True).clamp(min=1e-6) + bbox_weights_rf = iou_targets_rf.clone().detach() + bbox_avg_factor_rf = reduce_mean( + bbox_weights_rf.sum()).clamp_(min=1).item() + + if num_pos > 0: + loss_bbox = self.loss_bbox( + pos_decoded_bbox_preds, + pos_decoded_target_preds.detach(), + weight=bbox_weights_ini, + avg_factor=bbox_avg_factor_ini) + + loss_bbox_refine = self.loss_bbox_refine( + pos_decoded_bbox_preds_refine, + pos_decoded_target_preds.detach(), + weight=bbox_weights_rf, + avg_factor=bbox_avg_factor_rf) + + # build IoU-aware cls_score targets + if self.use_vfl: + pos_ious = iou_targets_rf.clone().detach() + cls_iou_targets = torch.zeros_like(flatten_cls_scores) + cls_iou_targets[pos_inds, pos_labels] = pos_ious + else: + loss_bbox = pos_bbox_preds.sum() * 0 + loss_bbox_refine = pos_bbox_preds_refine.sum() * 0 + if self.use_vfl: + cls_iou_targets = torch.zeros_like(flatten_cls_scores) + + if self.use_vfl: + loss_cls = self.loss_cls( + flatten_cls_scores, + cls_iou_targets, + avg_factor=num_pos_avg_per_gpu) + else: + loss_cls = self.loss_cls( + flatten_cls_scores, + flatten_labels, + weight=label_weights, + avg_factor=num_pos_avg_per_gpu) + + return dict( + loss_cls=loss_cls, + loss_bbox=loss_bbox, + loss_bbox_rf=loss_bbox_refine) + + @force_fp32(apply_to=('cls_scores', 'bbox_preds', 'bbox_preds_refine')) + def get_bboxes(self, + cls_scores, + bbox_preds, + bbox_preds_refine, + img_metas, + cfg=None, + rescale=None, + with_nms=True): + """Transform network outputs for a batch into bbox predictions. + + Args: + cls_scores (list[Tensor]): Box iou-aware scores for each scale + level with shape (N, num_points * num_classes, H, W). + bbox_preds (list[Tensor]): Box offsets for each scale + level with shape (N, num_points * 4, H, W). + bbox_preds_refine (list[Tensor]): Refined Box offsets for + each scale level with shape (N, num_points * 4, H, W). + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + cfg (mmcv.Config): Test / postprocessing configuration, + if None, test_cfg would be used. Default: None. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before returning boxes. + Default: True. + + Returns: + list[tuple[Tensor, Tensor]]: Each item in result_list is 2-tuple. + The first item is an (n, 5) tensor, where the first 4 columns + are bounding box positions (tl_x, tl_y, br_x, br_y) and the + 5-th column is a score between 0 and 1. The second item is a + (n,) tensor where each item is the predicted class label of + the corresponding box. + """ + assert len(cls_scores) == len(bbox_preds) == len(bbox_preds_refine) + num_levels = len(cls_scores) + + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + mlvl_points = self.get_points(featmap_sizes, bbox_preds[0].dtype, + bbox_preds[0].device) + result_list = [] + for img_id in range(len(img_metas)): + cls_score_list = [ + cls_scores[i][img_id].detach() for i in range(num_levels) + ] + bbox_pred_list = [ + bbox_preds_refine[i][img_id].detach() + for i in range(num_levels) + ] + img_shape = img_metas[img_id]['img_shape'] + scale_factor = img_metas[img_id]['scale_factor'] + det_bboxes = self._get_bboxes_single(cls_score_list, + bbox_pred_list, mlvl_points, + img_shape, scale_factor, cfg, + rescale, with_nms) + result_list.append(det_bboxes) + return result_list + + def _get_bboxes_single(self, + cls_scores, + bbox_preds, + mlvl_points, + img_shape, + scale_factor, + cfg, + rescale=False, + with_nms=True): + """Transform outputs for a single batch item into bbox predictions. + + Args: + cls_scores (list[Tensor]): Box iou-aware scores for a single scale + level with shape (num_points * num_classes, H, W). + bbox_preds (list[Tensor]): Box offsets for a single scale + level with shape (num_points * 4, H, W). + mlvl_points (list[Tensor]): Box reference for a single scale level + with shape (num_total_points, 4). + img_shape (tuple[int]): Shape of the input image, + (height, width, 3). + scale_factor (ndarray): Scale factor of the image arrange as + (w_scale, h_scale, w_scale, h_scale). + cfg (mmcv.Config | None): Test / postprocessing configuration, + if None, test_cfg would be used. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before returning boxes. + Default: True. + + Returns: + tuple(Tensor): + det_bboxes (Tensor): BBox predictions in shape (n, 5), where + the first 4 columns are bounding box positions + (tl_x, tl_y, br_x, br_y) and the 5-th column is a score + between 0 and 1. + det_labels (Tensor): A (n,) tensor where each item is the + predicted class label of the corresponding box. + """ + cfg = self.test_cfg if cfg is None else cfg + assert len(cls_scores) == len(bbox_preds) == len(mlvl_points) + mlvl_bboxes = [] + mlvl_scores = [] + for cls_score, bbox_pred, points in zip(cls_scores, bbox_preds, + mlvl_points): + assert cls_score.size()[-2:] == bbox_pred.size()[-2:] + scores = cls_score.permute(1, 2, 0).reshape( + -1, self.cls_out_channels).contiguous().sigmoid() + bbox_pred = bbox_pred.permute(1, 2, 0).reshape(-1, 4).contiguous() + + nms_pre = cfg.get('nms_pre', -1) + if 0 < nms_pre < scores.shape[0]: + max_scores, _ = scores.max(dim=1) + _, topk_inds = max_scores.topk(nms_pre) + points = points[topk_inds, :] + bbox_pred = bbox_pred[topk_inds, :] + scores = scores[topk_inds, :] + bboxes = distance2bbox(points, bbox_pred, max_shape=img_shape) + mlvl_bboxes.append(bboxes) + mlvl_scores.append(scores) + mlvl_bboxes = torch.cat(mlvl_bboxes) + if rescale: + mlvl_bboxes /= mlvl_bboxes.new_tensor(scale_factor) + mlvl_scores = torch.cat(mlvl_scores) + padding = mlvl_scores.new_zeros(mlvl_scores.shape[0], 1) + # remind that we set FG labels to [0, num_class-1] since mmdet v2.0 + # BG cat_id: num_class + mlvl_scores = torch.cat([mlvl_scores, padding], dim=1) + if with_nms: + det_bboxes, det_labels = multiclass_nms(mlvl_bboxes, mlvl_scores, + cfg.score_thr, cfg.nms, + cfg.max_per_img) + return det_bboxes, det_labels + else: + return mlvl_bboxes, mlvl_scores + + def _get_points_single(self, + featmap_size, + stride, + dtype, + device, + flatten=False): + """Get points according to feature map sizes.""" + h, w = featmap_size + x_range = torch.arange( + 0, w * stride, stride, dtype=dtype, device=device) + y_range = torch.arange( + 0, h * stride, stride, dtype=dtype, device=device) + y, x = torch.meshgrid(y_range, x_range) + # to be compatible with anchor points in ATSS + if self.use_atss: + points = torch.stack( + (x.reshape(-1), y.reshape(-1)), dim=-1) + \ + stride * self.anchor_center_offset + else: + points = torch.stack( + (x.reshape(-1), y.reshape(-1)), dim=-1) + stride // 2 + return points + + def get_targets(self, cls_scores, mlvl_points, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore): + """A wrapper for computing ATSS and FCOS targets for points in multiple + images. + + Args: + cls_scores (list[Tensor]): Box iou-aware scores for each scale + level with shape (N, num_points * num_classes, H, W). + mlvl_points (list[Tensor]): Points of each fpn level, each has + shape (num_points, 2). + gt_bboxes (list[Tensor]): Ground truth bboxes of each image, + each has shape (num_gt, 4). + gt_labels (list[Tensor]): Ground truth labels of each box, + each has shape (num_gt,). + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | Tensor): Ground truth bboxes to be + ignored, shape (num_ignored_gts, 4). + + Returns: + tuple: + labels_list (list[Tensor]): Labels of each level. + label_weights (Tensor/None): Label weights of all levels. + bbox_targets_list (list[Tensor]): Regression targets of each + level, (l, t, r, b). + bbox_weights (Tensor/None): Bbox weights of all levels. + """ + if self.use_atss: + return self.get_atss_targets(cls_scores, mlvl_points, gt_bboxes, + gt_labels, img_metas, + gt_bboxes_ignore) + else: + self.norm_on_bbox = False + return self.get_fcos_targets(mlvl_points, gt_bboxes, gt_labels) + + def _get_target_single(self, *args, **kwargs): + """Avoid ambiguity in multiple inheritance.""" + if self.use_atss: + return ATSSHead._get_target_single(self, *args, **kwargs) + else: + return FCOSHead._get_target_single(self, *args, **kwargs) + + def get_fcos_targets(self, points, gt_bboxes_list, gt_labels_list): + """Compute FCOS regression and classification targets for points in + multiple images. + + Args: + points (list[Tensor]): Points of each fpn level, each has shape + (num_points, 2). + gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image, + each has shape (num_gt, 4). + gt_labels_list (list[Tensor]): Ground truth labels of each box, + each has shape (num_gt,). + + Returns: + tuple: + labels (list[Tensor]): Labels of each level. + label_weights: None, to be compatible with ATSS targets. + bbox_targets (list[Tensor]): BBox targets of each level. + bbox_weights: None, to be compatible with ATSS targets. + """ + labels, bbox_targets = FCOSHead.get_targets(self, points, + gt_bboxes_list, + gt_labels_list) + label_weights = None + bbox_weights = None + return labels, label_weights, bbox_targets, bbox_weights + + def get_atss_targets(self, + cls_scores, + mlvl_points, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """A wrapper for computing ATSS targets for points in multiple images. + + Args: + cls_scores (list[Tensor]): Box iou-aware scores for each scale + level with shape (N, num_points * num_classes, H, W). + mlvl_points (list[Tensor]): Points of each fpn level, each has + shape (num_points, 2). + gt_bboxes (list[Tensor]): Ground truth bboxes of each image, + each has shape (num_gt, 4). + gt_labels (list[Tensor]): Ground truth labels of each box, + each has shape (num_gt,). + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | Tensor): Ground truth bboxes to be + ignored, shape (num_ignored_gts, 4). Default: None. + + Returns: + tuple: + labels_list (list[Tensor]): Labels of each level. + label_weights (Tensor): Label weights of all levels. + bbox_targets_list (list[Tensor]): Regression targets of each + level, (l, t, r, b). + bbox_weights (Tensor): Bbox weights of all levels. + """ + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.anchor_generator.num_levels + + device = cls_scores[0].device + anchor_list, valid_flag_list = self.get_anchors( + featmap_sizes, img_metas, device=device) + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + + cls_reg_targets = ATSSHead.get_targets( + self, + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=label_channels, + unmap_outputs=True) + if cls_reg_targets is None: + return None + + (anchor_list, labels_list, label_weights_list, bbox_targets_list, + bbox_weights_list, num_total_pos, num_total_neg) = cls_reg_targets + + bbox_targets_list = [ + bbox_targets.reshape(-1, 4) for bbox_targets in bbox_targets_list + ] + + num_imgs = len(img_metas) + # transform bbox_targets (x1, y1, x2, y2) into (l, t, r, b) format + bbox_targets_list = self.transform_bbox_targets( + bbox_targets_list, mlvl_points, num_imgs) + + labels_list = [labels.reshape(-1) for labels in labels_list] + label_weights_list = [ + label_weights.reshape(-1) for label_weights in label_weights_list + ] + bbox_weights_list = [ + bbox_weights.reshape(-1) for bbox_weights in bbox_weights_list + ] + label_weights = torch.cat(label_weights_list) + bbox_weights = torch.cat(bbox_weights_list) + return labels_list, label_weights, bbox_targets_list, bbox_weights + + def transform_bbox_targets(self, decoded_bboxes, mlvl_points, num_imgs): + """Transform bbox_targets (x1, y1, x2, y2) into (l, t, r, b) format. + + Args: + decoded_bboxes (list[Tensor]): Regression targets of each level, + in the form of (x1, y1, x2, y2). + mlvl_points (list[Tensor]): Points of each fpn level, each has + shape (num_points, 2). + num_imgs (int): the number of images in a batch. + + Returns: + bbox_targets (list[Tensor]): Regression targets of each level in + the form of (l, t, r, b). + """ + # TODO: Re-implemented in Class PointCoder + assert len(decoded_bboxes) == len(mlvl_points) + num_levels = len(decoded_bboxes) + mlvl_points = [points.repeat(num_imgs, 1) for points in mlvl_points] + bbox_targets = [] + for i in range(num_levels): + bbox_target = bbox2distance(mlvl_points[i], decoded_bboxes[i]) + bbox_targets.append(bbox_target) + + return bbox_targets + + def _load_from_state_dict(self, state_dict, prefix, local_metadata, strict, + missing_keys, unexpected_keys, error_msgs): + """Override the method in the parent class to avoid changing para's + name.""" + pass diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/yolact_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/yolact_head.py new file mode 100644 index 0000000000000000000000000000000000000000..5958263f773f519a0e1ba97f260d520c4299f7ed --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/yolact_head.py @@ -0,0 +1,1010 @@ +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule +from mmcv.runner import BaseModule, ModuleList, force_fp32 + +from mmdet.core import build_sampler, fast_nms, images_to_levels, multi_apply +from ..builder import HEADS, build_loss +from .anchor_head import AnchorHead + + +@HEADS.register_module() +class YOLACTHead(AnchorHead): + """YOLACT box head used in https://arxiv.org/abs/1904.02689. + + Note that YOLACT head is a light version of RetinaNet head. + Four differences are described as follows: + + 1. YOLACT box head has three-times fewer anchors. + 2. YOLACT box head shares the convs for box and cls branches. + 3. YOLACT box head uses OHEM instead of Focal loss. + 4. YOLACT box head predicts a set of mask coefficients for each box. + + Args: + num_classes (int): Number of categories excluding the background + category. + in_channels (int): Number of channels in the input feature map. + anchor_generator (dict): Config dict for anchor generator + loss_cls (dict): Config of classification loss. + loss_bbox (dict): Config of localization loss. + num_head_convs (int): Number of the conv layers shared by + box and cls branches. + num_protos (int): Number of the mask coefficients. + use_ohem (bool): If true, ``loss_single_OHEM`` will be used for + cls loss calculation. If false, ``loss_single`` will be used. + conv_cfg (dict): Dictionary to construct and config conv layer. + norm_cfg (dict): Dictionary to construct and config norm layer. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__(self, + num_classes, + in_channels, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=3, + scales_per_octave=1, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + reduction='none', + loss_weight=1.0), + loss_bbox=dict( + type='SmoothL1Loss', beta=1.0, loss_weight=1.5), + num_head_convs=1, + num_protos=32, + use_ohem=True, + conv_cfg=None, + norm_cfg=None, + init_cfg=dict( + type='Xavier', + distribution='uniform', + bias=0, + layer='Conv2d'), + **kwargs): + self.num_head_convs = num_head_convs + self.num_protos = num_protos + self.use_ohem = use_ohem + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + super(YOLACTHead, self).__init__( + num_classes, + in_channels, + loss_cls=loss_cls, + loss_bbox=loss_bbox, + anchor_generator=anchor_generator, + init_cfg=init_cfg, + **kwargs) + if self.use_ohem: + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + self.sampling = False + + def _init_layers(self): + """Initialize layers of the head.""" + self.relu = nn.ReLU(inplace=True) + self.head_convs = ModuleList() + for i in range(self.num_head_convs): + chn = self.in_channels if i == 0 else self.feat_channels + self.head_convs.append( + ConvModule( + chn, + self.feat_channels, + 3, + stride=1, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.conv_cls = nn.Conv2d( + self.feat_channels, + self.num_anchors * self.cls_out_channels, + 3, + padding=1) + self.conv_reg = nn.Conv2d( + self.feat_channels, self.num_anchors * 4, 3, padding=1) + self.conv_coeff = nn.Conv2d( + self.feat_channels, + self.num_anchors * self.num_protos, + 3, + padding=1) + + def forward_single(self, x): + """Forward feature of a single scale level. + + Args: + x (Tensor): Features of a single scale level. + + Returns: + tuple: + cls_score (Tensor): Cls scores for a single scale level \ + the channels number is num_anchors * num_classes. + bbox_pred (Tensor): Box energies / deltas for a single scale \ + level, the channels number is num_anchors * 4. + coeff_pred (Tensor): Mask coefficients for a single scale \ + level, the channels number is num_anchors * num_protos. + """ + for head_conv in self.head_convs: + x = head_conv(x) + cls_score = self.conv_cls(x) + bbox_pred = self.conv_reg(x) + coeff_pred = self.conv_coeff(x).tanh() + return cls_score, bbox_pred, coeff_pred + + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """A combination of the func:``AnchorHead.loss`` and + func:``SSDHead.loss``. + + When ``self.use_ohem == True``, it functions like ``SSDHead.loss``, + otherwise, it follows ``AnchorHead.loss``. Besides, it additionally + returns ``sampling_results``. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): Class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): Specify which bounding + boxes can be ignored when computing the loss. Default: None + + Returns: + tuple: + dict[str, Tensor]: A dictionary of loss components. + List[:obj:``SamplingResult``]: Sampler results for each image. + """ + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + assert len(featmap_sizes) == self.anchor_generator.num_levels + + device = cls_scores[0].device + + anchor_list, valid_flag_list = self.get_anchors( + featmap_sizes, img_metas, device=device) + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + cls_reg_targets = self.get_targets( + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=label_channels, + unmap_outputs=not self.use_ohem, + return_sampling_results=True) + if cls_reg_targets is None: + return None + (labels_list, label_weights_list, bbox_targets_list, bbox_weights_list, + num_total_pos, num_total_neg, sampling_results) = cls_reg_targets + + if self.use_ohem: + num_images = len(img_metas) + all_cls_scores = torch.cat([ + s.permute(0, 2, 3, 1).reshape( + num_images, -1, self.cls_out_channels) for s in cls_scores + ], 1) + all_labels = torch.cat(labels_list, -1).view(num_images, -1) + all_label_weights = torch.cat(label_weights_list, + -1).view(num_images, -1) + all_bbox_preds = torch.cat([ + b.permute(0, 2, 3, 1).reshape(num_images, -1, 4) + for b in bbox_preds + ], -2) + all_bbox_targets = torch.cat(bbox_targets_list, + -2).view(num_images, -1, 4) + all_bbox_weights = torch.cat(bbox_weights_list, + -2).view(num_images, -1, 4) + + # concat all level anchors to a single tensor + all_anchors = [] + for i in range(num_images): + all_anchors.append(torch.cat(anchor_list[i])) + + # check NaN and Inf + assert torch.isfinite(all_cls_scores).all().item(), \ + 'classification scores become infinite or NaN!' + assert torch.isfinite(all_bbox_preds).all().item(), \ + 'bbox predications become infinite or NaN!' + + losses_cls, losses_bbox = multi_apply( + self.loss_single_OHEM, + all_cls_scores, + all_bbox_preds, + all_anchors, + all_labels, + all_label_weights, + all_bbox_targets, + all_bbox_weights, + num_total_samples=num_total_pos) + else: + num_total_samples = ( + num_total_pos + + num_total_neg if self.sampling else num_total_pos) + + # anchor number of multi levels + num_level_anchors = [anchors.size(0) for anchors in anchor_list[0]] + # concat all level anchors and flags to a single tensor + concat_anchor_list = [] + for i in range(len(anchor_list)): + concat_anchor_list.append(torch.cat(anchor_list[i])) + all_anchor_list = images_to_levels(concat_anchor_list, + num_level_anchors) + losses_cls, losses_bbox = multi_apply( + self.loss_single, + cls_scores, + bbox_preds, + all_anchor_list, + labels_list, + label_weights_list, + bbox_targets_list, + bbox_weights_list, + num_total_samples=num_total_samples) + + return dict( + loss_cls=losses_cls, loss_bbox=losses_bbox), sampling_results + + def loss_single_OHEM(self, cls_score, bbox_pred, anchors, labels, + label_weights, bbox_targets, bbox_weights, + num_total_samples): + """"See func:``SSDHead.loss``.""" + loss_cls_all = self.loss_cls(cls_score, labels, label_weights) + + # FG cat_id: [0, num_classes -1], BG cat_id: num_classes + pos_inds = ((labels >= 0) & (labels < self.num_classes)).nonzero( + as_tuple=False).reshape(-1) + neg_inds = (labels == self.num_classes).nonzero( + as_tuple=False).view(-1) + + num_pos_samples = pos_inds.size(0) + if num_pos_samples == 0: + num_neg_samples = neg_inds.size(0) + else: + num_neg_samples = self.train_cfg.neg_pos_ratio * num_pos_samples + if num_neg_samples > neg_inds.size(0): + num_neg_samples = neg_inds.size(0) + topk_loss_cls_neg, _ = loss_cls_all[neg_inds].topk(num_neg_samples) + loss_cls_pos = loss_cls_all[pos_inds].sum() + loss_cls_neg = topk_loss_cls_neg.sum() + loss_cls = (loss_cls_pos + loss_cls_neg) / num_total_samples + if self.reg_decoded_bbox: + # When the regression loss (e.g. `IouLoss`, `GIouLoss`) + # is applied directly on the decoded bounding boxes, it + # decodes the already encoded coordinates to absolute format. + bbox_pred = self.bbox_coder.decode(anchors, bbox_pred) + loss_bbox = self.loss_bbox( + bbox_pred, + bbox_targets, + bbox_weights, + avg_factor=num_total_samples) + return loss_cls[None], loss_bbox + + @force_fp32(apply_to=('cls_scores', 'bbox_preds', 'coeff_preds')) + def get_bboxes(self, + cls_scores, + bbox_preds, + coeff_preds, + img_metas, + cfg=None, + rescale=False): + """"Similiar to func:``AnchorHead.get_bboxes``, but additionally + processes coeff_preds. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + with shape (N, num_anchors * num_classes, H, W) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (N, num_anchors * 4, H, W) + coeff_preds (list[Tensor]): Mask coefficients for each scale + level with shape (N, num_anchors * num_protos, H, W) + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + cfg (mmcv.Config | None): Test / postprocessing configuration, + if None, test_cfg would be used + rescale (bool): If True, return boxes in original image space. + Default: False. + + Returns: + list[tuple[Tensor, Tensor, Tensor]]: Each item in result_list is + a 3-tuple. The first item is an (n, 5) tensor, where the + first 4 columns are bounding box positions + (tl_x, tl_y, br_x, br_y) and the 5-th column is a score + between 0 and 1. The second item is an (n,) tensor where each + item is the predicted class label of the corresponding box. + The third item is an (n, num_protos) tensor where each item + is the predicted mask coefficients of instance inside the + corresponding box. + """ + assert len(cls_scores) == len(bbox_preds) + num_levels = len(cls_scores) + + device = cls_scores[0].device + featmap_sizes = [cls_scores[i].shape[-2:] for i in range(num_levels)] + mlvl_anchors = self.anchor_generator.grid_anchors( + featmap_sizes, device=device) + + det_bboxes = [] + det_labels = [] + det_coeffs = [] + for img_id in range(len(img_metas)): + cls_score_list = [ + cls_scores[i][img_id].detach() for i in range(num_levels) + ] + bbox_pred_list = [ + bbox_preds[i][img_id].detach() for i in range(num_levels) + ] + coeff_pred_list = [ + coeff_preds[i][img_id].detach() for i in range(num_levels) + ] + img_shape = img_metas[img_id]['img_shape'] + scale_factor = img_metas[img_id]['scale_factor'] + bbox_res = self._get_bboxes_single(cls_score_list, bbox_pred_list, + coeff_pred_list, mlvl_anchors, + img_shape, scale_factor, cfg, + rescale) + det_bboxes.append(bbox_res[0]) + det_labels.append(bbox_res[1]) + det_coeffs.append(bbox_res[2]) + return det_bboxes, det_labels, det_coeffs + + def _get_bboxes_single(self, + cls_score_list, + bbox_pred_list, + coeff_preds_list, + mlvl_anchors, + img_shape, + scale_factor, + cfg, + rescale=False): + """"Similiar to func:``AnchorHead._get_bboxes_single``, but + additionally processes coeff_preds_list and uses fast NMS instead of + traditional NMS. + + Args: + cls_score_list (list[Tensor]): Box scores for a single scale level + Has shape (num_anchors * num_classes, H, W). + bbox_pred_list (list[Tensor]): Box energies / deltas for a single + scale level with shape (num_anchors * 4, H, W). + coeff_preds_list (list[Tensor]): Mask coefficients for a single + scale level with shape (num_anchors * num_protos, H, W). + mlvl_anchors (list[Tensor]): Box reference for a single scale level + with shape (num_total_anchors, 4). + img_shape (tuple[int]): Shape of the input image, + (height, width, 3). + scale_factor (ndarray): Scale factor of the image arange as + (w_scale, h_scale, w_scale, h_scale). + cfg (mmcv.Config): Test / postprocessing configuration, + if None, test_cfg would be used. + rescale (bool): If True, return boxes in original image space. + + Returns: + tuple[Tensor, Tensor, Tensor]: The first item is an (n, 5) tensor, + where the first 4 columns are bounding box positions + (tl_x, tl_y, br_x, br_y) and the 5-th column is a score between + 0 and 1. The second item is an (n,) tensor where each item is + the predicted class label of the corresponding box. The third + item is an (n, num_protos) tensor where each item is the + predicted mask coefficients of instance inside the + corresponding box. + """ + cfg = self.test_cfg if cfg is None else cfg + assert len(cls_score_list) == len(bbox_pred_list) == len(mlvl_anchors) + mlvl_bboxes = [] + mlvl_scores = [] + mlvl_coeffs = [] + for cls_score, bbox_pred, coeff_pred, anchors in \ + zip(cls_score_list, bbox_pred_list, + coeff_preds_list, mlvl_anchors): + assert cls_score.size()[-2:] == bbox_pred.size()[-2:] + cls_score = cls_score.permute(1, 2, + 0).reshape(-1, self.cls_out_channels) + if self.use_sigmoid_cls: + scores = cls_score.sigmoid() + else: + scores = cls_score.softmax(-1) + bbox_pred = bbox_pred.permute(1, 2, 0).reshape(-1, 4) + coeff_pred = coeff_pred.permute(1, 2, + 0).reshape(-1, self.num_protos) + nms_pre = cfg.get('nms_pre', -1) + if nms_pre > 0 and scores.shape[0] > nms_pre: + # Get maximum scores for foreground classes. + if self.use_sigmoid_cls: + max_scores, _ = scores.max(dim=1) + else: + # remind that we set FG labels to [0, num_class-1] + # since mmdet v2.0 + # BG cat_id: num_class + max_scores, _ = scores[:, :-1].max(dim=1) + _, topk_inds = max_scores.topk(nms_pre) + anchors = anchors[topk_inds, :] + bbox_pred = bbox_pred[topk_inds, :] + scores = scores[topk_inds, :] + coeff_pred = coeff_pred[topk_inds, :] + bboxes = self.bbox_coder.decode( + anchors, bbox_pred, max_shape=img_shape) + mlvl_bboxes.append(bboxes) + mlvl_scores.append(scores) + mlvl_coeffs.append(coeff_pred) + mlvl_bboxes = torch.cat(mlvl_bboxes) + if rescale: + mlvl_bboxes /= mlvl_bboxes.new_tensor(scale_factor) + mlvl_scores = torch.cat(mlvl_scores) + mlvl_coeffs = torch.cat(mlvl_coeffs) + if self.use_sigmoid_cls: + # Add a dummy background class to the backend when using sigmoid + # remind that we set FG labels to [0, num_class-1] since mmdet v2.0 + # BG cat_id: num_class + padding = mlvl_scores.new_zeros(mlvl_scores.shape[0], 1) + mlvl_scores = torch.cat([mlvl_scores, padding], dim=1) + det_bboxes, det_labels, det_coeffs = fast_nms(mlvl_bboxes, mlvl_scores, + mlvl_coeffs, + cfg.score_thr, + cfg.iou_thr, cfg.top_k, + cfg.max_per_img) + return det_bboxes, det_labels, det_coeffs + + +@HEADS.register_module() +class YOLACTSegmHead(BaseModule): + """YOLACT segmentation head used in https://arxiv.org/abs/1904.02689. + + Apply a semantic segmentation loss on feature space using layers that are + only evaluated during training to increase performance with no speed + penalty. + + Args: + in_channels (int): Number of channels in the input feature map. + num_classes (int): Number of categories excluding the background + category. + loss_segm (dict): Config of semantic segmentation loss. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__(self, + num_classes, + in_channels=256, + loss_segm=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0), + init_cfg=dict( + type='Xavier', + distribution='uniform', + override=dict(name='segm_conv'))): + super(YOLACTSegmHead, self).__init__(init_cfg) + self.in_channels = in_channels + self.num_classes = num_classes + self.loss_segm = build_loss(loss_segm) + self._init_layers() + self.fp16_enabled = False + + def _init_layers(self): + """Initialize layers of the head.""" + self.segm_conv = nn.Conv2d( + self.in_channels, self.num_classes, kernel_size=1) + + def forward(self, x): + """Forward feature from the upstream network. + + Args: + x (Tensor): Feature from the upstream network, which is + a 4D-tensor. + + Returns: + Tensor: Predicted semantic segmentation map with shape + (N, num_classes, H, W). + """ + return self.segm_conv(x) + + @force_fp32(apply_to=('segm_pred', )) + def loss(self, segm_pred, gt_masks, gt_labels): + """Compute loss of the head. + + Args: + segm_pred (list[Tensor]): Predicted semantic segmentation map + with shape (N, num_classes, H, W). + gt_masks (list[Tensor]): Ground truth masks for each image with + the same shape of the input image. + gt_labels (list[Tensor]): Class indices corresponding to each box. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + loss_segm = [] + num_imgs, num_classes, mask_h, mask_w = segm_pred.size() + for idx in range(num_imgs): + cur_segm_pred = segm_pred[idx] + cur_gt_masks = gt_masks[idx].float() + cur_gt_labels = gt_labels[idx] + segm_targets = self.get_targets(cur_segm_pred, cur_gt_masks, + cur_gt_labels) + if segm_targets is None: + loss = self.loss_segm(cur_segm_pred, + torch.zeros_like(cur_segm_pred), + torch.zeros_like(cur_segm_pred)) + else: + loss = self.loss_segm( + cur_segm_pred, + segm_targets, + avg_factor=num_imgs * mask_h * mask_w) + loss_segm.append(loss) + return dict(loss_segm=loss_segm) + + def get_targets(self, segm_pred, gt_masks, gt_labels): + """Compute semantic segmentation targets for each image. + + Args: + segm_pred (Tensor): Predicted semantic segmentation map + with shape (num_classes, H, W). + gt_masks (Tensor): Ground truth masks for each image with + the same shape of the input image. + gt_labels (Tensor): Class indices corresponding to each box. + + Returns: + Tensor: Semantic segmentation targets with shape + (num_classes, H, W). + """ + if gt_masks.size(0) == 0: + return None + num_classes, mask_h, mask_w = segm_pred.size() + with torch.no_grad(): + downsampled_masks = F.interpolate( + gt_masks.unsqueeze(0), (mask_h, mask_w), + mode='bilinear', + align_corners=False).squeeze(0) + downsampled_masks = downsampled_masks.gt(0.5).float() + segm_targets = torch.zeros_like(segm_pred, requires_grad=False) + for obj_idx in range(downsampled_masks.size(0)): + segm_targets[gt_labels[obj_idx] - 1] = torch.max( + segm_targets[gt_labels[obj_idx] - 1], + downsampled_masks[obj_idx]) + return segm_targets + + def simple_test(self, feats, img_metas, rescale=False): + """Test function without test-time augmentation.""" + raise NotImplementedError( + 'simple_test of YOLACTSegmHead is not implemented ' + 'because this head is only evaluated during training') + + +@HEADS.register_module() +class YOLACTProtonet(BaseModule): + """YOLACT mask head used in https://arxiv.org/abs/1904.02689. + + This head outputs the mask prototypes for YOLACT. + + Args: + in_channels (int): Number of channels in the input feature map. + proto_channels (tuple[int]): Output channels of protonet convs. + proto_kernel_sizes (tuple[int]): Kernel sizes of protonet convs. + include_last_relu (Bool): If keep the last relu of protonet. + num_protos (int): Number of prototypes. + num_classes (int): Number of categories excluding the background + category. + loss_mask_weight (float): Reweight the mask loss by this factor. + max_masks_to_train (int): Maximum number of masks to train for + each image. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__(self, + num_classes, + in_channels=256, + proto_channels=(256, 256, 256, None, 256, 32), + proto_kernel_sizes=(3, 3, 3, -2, 3, 1), + include_last_relu=True, + num_protos=32, + loss_mask_weight=1.0, + max_masks_to_train=100, + init_cfg=dict( + type='Xavier', + distribution='uniform', + override=dict(name='protonet'))): + super(YOLACTProtonet, self).__init__(init_cfg) + self.in_channels = in_channels + self.proto_channels = proto_channels + self.proto_kernel_sizes = proto_kernel_sizes + self.include_last_relu = include_last_relu + self.protonet = self._init_layers() + + self.loss_mask_weight = loss_mask_weight + self.num_protos = num_protos + self.num_classes = num_classes + self.max_masks_to_train = max_masks_to_train + self.fp16_enabled = False + + def _init_layers(self): + """A helper function to take a config setting and turn it into a + network.""" + # Possible patterns: + # ( 256, 3) -> conv + # ( 256,-2) -> deconv + # (None,-2) -> bilinear interpolate + in_channels = self.in_channels + protonets = ModuleList() + for num_channels, kernel_size in zip(self.proto_channels, + self.proto_kernel_sizes): + if kernel_size > 0: + layer = nn.Conv2d( + in_channels, + num_channels, + kernel_size, + padding=kernel_size // 2) + else: + if num_channels is None: + layer = InterpolateModule( + scale_factor=-kernel_size, + mode='bilinear', + align_corners=False) + else: + layer = nn.ConvTranspose2d( + in_channels, + num_channels, + -kernel_size, + padding=kernel_size // 2) + protonets.append(layer) + protonets.append(nn.ReLU(inplace=True)) + in_channels = num_channels if num_channels is not None \ + else in_channels + if not self.include_last_relu: + protonets = protonets[:-1] + return nn.Sequential(*protonets) + + def forward(self, x, coeff_pred, bboxes, img_meta, sampling_results=None): + """Forward feature from the upstream network to get prototypes and + linearly combine the prototypes, using masks coefficients, into + instance masks. Finally, crop the instance masks with given bboxes. + + Args: + x (Tensor): Feature from the upstream network, which is + a 4D-tensor. + coeff_pred (list[Tensor]): Mask coefficients for each scale + level with shape (N, num_anchors * num_protos, H, W). + bboxes (list[Tensor]): Box used for cropping with shape + (N, num_anchors * 4, H, W). During training, they are + ground truth boxes. During testing, they are predicted + boxes. + img_meta (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + sampling_results (List[:obj:``SamplingResult``]): Sampler results + for each image. + + Returns: + list[Tensor]: Predicted instance segmentation masks. + """ + prototypes = self.protonet(x) + prototypes = prototypes.permute(0, 2, 3, 1).contiguous() + + num_imgs = x.size(0) + # Training state + if self.training: + coeff_pred_list = [] + for coeff_pred_per_level in coeff_pred: + coeff_pred_per_level = \ + coeff_pred_per_level.permute( + 0, 2, 3, 1).reshape(num_imgs, -1, self.num_protos) + coeff_pred_list.append(coeff_pred_per_level) + coeff_pred = torch.cat(coeff_pred_list, dim=1) + + mask_pred_list = [] + for idx in range(num_imgs): + cur_prototypes = prototypes[idx] + cur_coeff_pred = coeff_pred[idx] + cur_bboxes = bboxes[idx] + cur_img_meta = img_meta[idx] + + # Testing state + if not self.training: + bboxes_for_cropping = cur_bboxes + else: + cur_sampling_results = sampling_results[idx] + pos_assigned_gt_inds = \ + cur_sampling_results.pos_assigned_gt_inds + bboxes_for_cropping = cur_bboxes[pos_assigned_gt_inds].clone() + pos_inds = cur_sampling_results.pos_inds + cur_coeff_pred = cur_coeff_pred[pos_inds] + + # Linearly combine the prototypes with the mask coefficients + mask_pred = cur_prototypes @ cur_coeff_pred.t() + mask_pred = torch.sigmoid(mask_pred) + + h, w = cur_img_meta['img_shape'][:2] + bboxes_for_cropping[:, 0] /= w + bboxes_for_cropping[:, 1] /= h + bboxes_for_cropping[:, 2] /= w + bboxes_for_cropping[:, 3] /= h + + mask_pred = self.crop(mask_pred, bboxes_for_cropping) + mask_pred = mask_pred.permute(2, 0, 1).contiguous() + mask_pred_list.append(mask_pred) + return mask_pred_list + + @force_fp32(apply_to=('mask_pred', )) + def loss(self, mask_pred, gt_masks, gt_bboxes, img_meta, sampling_results): + """Compute loss of the head. + + Args: + mask_pred (list[Tensor]): Predicted prototypes with shape + (num_classes, H, W). + gt_masks (list[Tensor]): Ground truth masks for each image with + the same shape of the input image. + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + img_meta (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + sampling_results (List[:obj:``SamplingResult``]): Sampler results + for each image. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + loss_mask = [] + num_imgs = len(mask_pred) + total_pos = 0 + for idx in range(num_imgs): + cur_mask_pred = mask_pred[idx] + cur_gt_masks = gt_masks[idx].float() + cur_gt_bboxes = gt_bboxes[idx] + cur_img_meta = img_meta[idx] + cur_sampling_results = sampling_results[idx] + + pos_assigned_gt_inds = cur_sampling_results.pos_assigned_gt_inds + num_pos = pos_assigned_gt_inds.size(0) + # Since we're producing (near) full image masks, + # it'd take too much vram to backprop on every single mask. + # Thus we select only a subset. + if num_pos > self.max_masks_to_train: + perm = torch.randperm(num_pos) + select = perm[:self.max_masks_to_train] + cur_mask_pred = cur_mask_pred[select] + pos_assigned_gt_inds = pos_assigned_gt_inds[select] + num_pos = self.max_masks_to_train + total_pos += num_pos + + gt_bboxes_for_reweight = cur_gt_bboxes[pos_assigned_gt_inds] + + mask_targets = self.get_targets(cur_mask_pred, cur_gt_masks, + pos_assigned_gt_inds) + if num_pos == 0: + loss = cur_mask_pred.sum() * 0. + elif mask_targets is None: + loss = F.binary_cross_entropy(cur_mask_pred, + torch.zeros_like(cur_mask_pred), + torch.zeros_like(cur_mask_pred)) + else: + cur_mask_pred = torch.clamp(cur_mask_pred, 0, 1) + loss = F.binary_cross_entropy( + cur_mask_pred, mask_targets, + reduction='none') * self.loss_mask_weight + + h, w = cur_img_meta['img_shape'][:2] + gt_bboxes_width = (gt_bboxes_for_reweight[:, 2] - + gt_bboxes_for_reweight[:, 0]) / w + gt_bboxes_height = (gt_bboxes_for_reweight[:, 3] - + gt_bboxes_for_reweight[:, 1]) / h + loss = loss.mean(dim=(1, + 2)) / gt_bboxes_width / gt_bboxes_height + loss = torch.sum(loss) + loss_mask.append(loss) + + if total_pos == 0: + total_pos += 1 # avoid nan + loss_mask = [x / total_pos for x in loss_mask] + + return dict(loss_mask=loss_mask) + + def get_targets(self, mask_pred, gt_masks, pos_assigned_gt_inds): + """Compute instance segmentation targets for each image. + + Args: + mask_pred (Tensor): Predicted prototypes with shape + (num_classes, H, W). + gt_masks (Tensor): Ground truth masks for each image with + the same shape of the input image. + pos_assigned_gt_inds (Tensor): GT indices of the corresponding + positive samples. + Returns: + Tensor: Instance segmentation targets with shape + (num_instances, H, W). + """ + if gt_masks.size(0) == 0: + return None + mask_h, mask_w = mask_pred.shape[-2:] + gt_masks = F.interpolate( + gt_masks.unsqueeze(0), (mask_h, mask_w), + mode='bilinear', + align_corners=False).squeeze(0) + gt_masks = gt_masks.gt(0.5).float() + mask_targets = gt_masks[pos_assigned_gt_inds] + return mask_targets + + def get_seg_masks(self, mask_pred, label_pred, img_meta, rescale): + """Resize, binarize, and format the instance mask predictions. + + Args: + mask_pred (Tensor): shape (N, H, W). + label_pred (Tensor): shape (N, ). + img_meta (dict): Meta information of each image, e.g., + image size, scaling factor, etc. + rescale (bool): If rescale is False, then returned masks will + fit the scale of imgs[0]. + Returns: + list[ndarray]: Mask predictions grouped by their predicted classes. + """ + ori_shape = img_meta['ori_shape'] + scale_factor = img_meta['scale_factor'] + if rescale: + img_h, img_w = ori_shape[:2] + else: + img_h = np.round(ori_shape[0] * scale_factor[1]).astype(np.int32) + img_w = np.round(ori_shape[1] * scale_factor[0]).astype(np.int32) + + cls_segms = [[] for _ in range(self.num_classes)] + if mask_pred.size(0) == 0: + return cls_segms + + mask_pred = F.interpolate( + mask_pred.unsqueeze(0), (img_h, img_w), + mode='bilinear', + align_corners=False).squeeze(0) > 0.5 + mask_pred = mask_pred.cpu().numpy().astype(np.uint8) + + for m, l in zip(mask_pred, label_pred): + cls_segms[l].append(m) + return cls_segms + + def crop(self, masks, boxes, padding=1): + """Crop predicted masks by zeroing out everything not in the predicted + bbox. + + Args: + masks (Tensor): shape [H, W, N]. + boxes (Tensor): bbox coords in relative point form with + shape [N, 4]. + + Return: + Tensor: The cropped masks. + """ + h, w, n = masks.size() + x1, x2 = self.sanitize_coordinates( + boxes[:, 0], boxes[:, 2], w, padding, cast=False) + y1, y2 = self.sanitize_coordinates( + boxes[:, 1], boxes[:, 3], h, padding, cast=False) + + rows = torch.arange( + w, device=masks.device, dtype=x1.dtype).view(1, -1, + 1).expand(h, w, n) + cols = torch.arange( + h, device=masks.device, dtype=x1.dtype).view(-1, 1, + 1).expand(h, w, n) + + masks_left = rows >= x1.view(1, 1, -1) + masks_right = rows < x2.view(1, 1, -1) + masks_up = cols >= y1.view(1, 1, -1) + masks_down = cols < y2.view(1, 1, -1) + + crop_mask = masks_left * masks_right * masks_up * masks_down + + return masks * crop_mask.float() + + def sanitize_coordinates(self, x1, x2, img_size, padding=0, cast=True): + """Sanitizes the input coordinates so that x1 < x2, x1 != x2, x1 >= 0, + and x2 <= image_size. Also converts from relative to absolute + coordinates and casts the results to long tensors. + + Warning: this does things in-place behind the scenes so + copy if necessary. + + Args: + _x1 (Tensor): shape (N, ). + _x2 (Tensor): shape (N, ). + img_size (int): Size of the input image. + padding (int): x1 >= padding, x2 <= image_size-padding. + cast (bool): If cast is false, the result won't be cast to longs. + + Returns: + tuple: + x1 (Tensor): Sanitized _x1. + x2 (Tensor): Sanitized _x2. + """ + x1 = x1 * img_size + x2 = x2 * img_size + if cast: + x1 = x1.long() + x2 = x2.long() + x1 = torch.min(x1, x2) + x2 = torch.max(x1, x2) + x1 = torch.clamp(x1 - padding, min=0) + x2 = torch.clamp(x2 + padding, max=img_size) + return x1, x2 + + def simple_test(self, + feats, + det_bboxes, + det_labels, + det_coeffs, + img_metas, + rescale=False): + """Test function without test-time augmentation. + + Args: + feats (tuple[torch.Tensor]): Multi-level features from the + upstream network, each is a 4D-tensor. + det_bboxes (list[Tensor]): BBox results of each image. each + element is (n, 5) tensor, where 5 represent + (tl_x, tl_y, br_x, br_y, score) and the score between 0 and 1. + det_labels (list[Tensor]): BBox results of each image. each + element is (n, ) tensor, each element represents the class + label of the corresponding box. + det_coeffs (list[Tensor]): BBox coefficient of each image. each + element is (n, m) tensor, m is vector length. + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + rescale (bool, optional): Whether to rescale the results. + Defaults to False. + + Returns: + list[list]: encoded masks. The c-th item in the outer list + corresponds to the c-th class. Given the c-th outer list, the + i-th item in that inner list is the mask for the i-th box with + class label c. + """ + num_imgs = len(img_metas) + scale_factors = tuple(meta['scale_factor'] for meta in img_metas) + if all(det_bbox.shape[0] == 0 for det_bbox in det_bboxes): + segm_results = [[[] for _ in range(self.num_classes)] + for _ in range(num_imgs)] + else: + # if det_bboxes is rescaled to the original image size, we need to + # rescale it back to the testing scale to obtain RoIs. + if rescale and not isinstance(scale_factors[0], float): + scale_factors = [ + torch.from_numpy(scale_factor).to(det_bboxes[0].device) + for scale_factor in scale_factors + ] + _bboxes = [ + det_bboxes[i][:, :4] * + scale_factors[i] if rescale else det_bboxes[i][:, :4] + for i in range(len(det_bboxes)) + ] + mask_preds = self.forward(feats[0], det_coeffs, _bboxes, img_metas) + # apply mask post-processing to each image individually + segm_results = [] + for i in range(num_imgs): + if det_bboxes[i].shape[0] == 0: + segm_results.append([[] for _ in range(self.num_classes)]) + else: + segm_result = self.get_seg_masks(mask_preds[i], + det_labels[i], + img_metas[i], rescale) + segm_results.append(segm_result) + return segm_results + + +class InterpolateModule(BaseModule): + """This is a module version of F.interpolate. + + Any arguments you give it just get passed along for the ride. + """ + + def __init__(self, *args, init_cfg=None, **kwargs): + super().__init__(init_cfg) + + self.args = args + self.kwargs = kwargs + + def forward(self, x): + """Forward features from the upstream network.""" + return F.interpolate(x, *self.args, **self.kwargs) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/yolo_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/yolo_head.py new file mode 100644 index 0000000000000000000000000000000000000000..ea5c802e6e6df47e941bd3d9cb3d835daa384f2a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/yolo_head.py @@ -0,0 +1,622 @@ +# Copyright (c) 2019 Western Digital Corporation or its affiliates. + +import warnings + +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import (ConvModule, bias_init_with_prob, constant_init, is_norm, + normal_init) +from mmcv.runner import force_fp32 + +from mmdet.core import (build_anchor_generator, build_assigner, + build_bbox_coder, build_sampler, images_to_levels, + multi_apply, multiclass_nms) +from ..builder import HEADS, build_loss +from .base_dense_head import BaseDenseHead +from .dense_test_mixins import BBoxTestMixin + + +@HEADS.register_module() +class YOLOV3Head(BaseDenseHead, BBoxTestMixin): + """YOLOV3Head Paper link: https://arxiv.org/abs/1804.02767. + + Args: + num_classes (int): The number of object classes (w/o background) + in_channels (List[int]): Number of input channels per scale. + out_channels (List[int]): The number of output channels per scale + before the final 1x1 layer. Default: (1024, 512, 256). + anchor_generator (dict): Config dict for anchor generator + bbox_coder (dict): Config of bounding box coder. + featmap_strides (List[int]): The stride of each scale. + Should be in descending order. Default: (32, 16, 8). + one_hot_smoother (float): Set a non-zero value to enable label-smooth + Default: 0. + conv_cfg (dict): Config dict for convolution layer. Default: None. + norm_cfg (dict): Dictionary to construct and config norm layer. + Default: dict(type='BN', requires_grad=True) + act_cfg (dict): Config dict for activation layer. + Default: dict(type='LeakyReLU', negative_slope=0.1). + loss_cls (dict): Config of classification loss. + loss_conf (dict): Config of confidence loss. + loss_xy (dict): Config of xy coordinate loss. + loss_wh (dict): Config of wh coordinate loss. + train_cfg (dict): Training config of YOLOV3 head. Default: None. + test_cfg (dict): Testing config of YOLOV3 head. Default: None. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__(self, + num_classes, + in_channels, + out_channels=(1024, 512, 256), + anchor_generator=dict( + type='YOLOAnchorGenerator', + base_sizes=[[(116, 90), (156, 198), (373, 326)], + [(30, 61), (62, 45), (59, 119)], + [(10, 13), (16, 30), (33, 23)]], + strides=[32, 16, 8]), + bbox_coder=dict(type='YOLOBBoxCoder'), + featmap_strides=[32, 16, 8], + one_hot_smoother=0., + conv_cfg=None, + norm_cfg=dict(type='BN', requires_grad=True), + act_cfg=dict(type='LeakyReLU', negative_slope=0.1), + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0), + loss_conf=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0), + loss_xy=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0), + loss_wh=dict(type='MSELoss', loss_weight=1.0), + train_cfg=None, + test_cfg=None, + init_cfg=dict( + type='Normal', std=0.01, + override=dict(name='convs_pred'))): + super(YOLOV3Head, self).__init__(init_cfg) + # Check params + assert (len(in_channels) == len(out_channels) == len(featmap_strides)) + + self.num_classes = num_classes + self.in_channels = in_channels + self.out_channels = out_channels + self.featmap_strides = featmap_strides + self.train_cfg = train_cfg + self.test_cfg = test_cfg + if self.train_cfg: + self.assigner = build_assigner(self.train_cfg.assigner) + if hasattr(self.train_cfg, 'sampler'): + sampler_cfg = self.train_cfg.sampler + else: + sampler_cfg = dict(type='PseudoSampler') + self.sampler = build_sampler(sampler_cfg, context=self) + self.fp16_enabled = False + + self.one_hot_smoother = one_hot_smoother + + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.act_cfg = act_cfg + + self.bbox_coder = build_bbox_coder(bbox_coder) + self.anchor_generator = build_anchor_generator(anchor_generator) + + self.loss_cls = build_loss(loss_cls) + self.loss_conf = build_loss(loss_conf) + self.loss_xy = build_loss(loss_xy) + self.loss_wh = build_loss(loss_wh) + # usually the numbers of anchors for each level are the same + # except SSD detectors + self.num_anchors = self.anchor_generator.num_base_anchors[0] + assert len( + self.anchor_generator.num_base_anchors) == len(featmap_strides) + self._init_layers() + + @property + def num_levels(self): + return len(self.featmap_strides) + + @property + def num_attrib(self): + """int: number of attributes in pred_map, bboxes (4) + + objectness (1) + num_classes""" + + return 5 + self.num_classes + + def _init_layers(self): + self.convs_bridge = nn.ModuleList() + self.convs_pred = nn.ModuleList() + for i in range(self.num_levels): + conv_bridge = ConvModule( + self.in_channels[i], + self.out_channels[i], + 3, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg) + conv_pred = nn.Conv2d(self.out_channels[i], + self.num_anchors * self.num_attrib, 1) + + self.convs_bridge.append(conv_bridge) + self.convs_pred.append(conv_pred) + + def init_weights(self): + for m in self.modules(): + if isinstance(m, nn.Conv2d): + normal_init(m, mean=0, std=0.01) + if is_norm(m): + constant_init(m, 1) + + # Use prior in model initialization to improve stability + for conv_pred, stride in zip(self.convs_pred, self.featmap_strides): + bias = conv_pred.bias.reshape(self.num_anchors, -1) + # init objectness with prior of 8 objects per feature map + # refer to https://github.com/ultralytics/yolov3 + nn.init.constant_(bias.data[:, 4], + bias_init_with_prob(8 / (608 / stride)**2)) + nn.init.constant_(bias.data[:, 5:], bias_init_with_prob(0.01)) + + def forward(self, feats): + """Forward features from the upstream network. + + Args: + feats (tuple[Tensor]): Features from the upstream network, each is + a 4D-tensor. + + Returns: + tuple[Tensor]: A tuple of multi-level predication map, each is a + 4D-tensor of shape (batch_size, 5+num_classes, height, width). + """ + + assert len(feats) == self.num_levels + pred_maps = [] + for i in range(self.num_levels): + x = feats[i] + x = self.convs_bridge[i](x) + pred_map = self.convs_pred[i](x) + pred_maps.append(pred_map) + + return tuple(pred_maps), + + @force_fp32(apply_to=('pred_maps', )) + def get_bboxes(self, + pred_maps, + img_metas, + cfg=None, + rescale=False, + with_nms=True): + """Transform network output for a batch into bbox predictions. + + Args: + pred_maps (list[Tensor]): Raw predictions for a batch of images. + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + cfg (mmcv.Config | None): Test / postprocessing configuration, + if None, test_cfg would be used. Default: None. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + + Returns: + list[tuple[Tensor, Tensor]]: Each item in result_list is 2-tuple. + The first item is an (n, 5) tensor, where 5 represent + (tl_x, tl_y, br_x, br_y, score) and the score between 0 and 1. + The shape of the second tensor in the tuple is (n,), and + each element represents the class label of the corresponding + box. + """ + num_levels = len(pred_maps) + pred_maps_list = [pred_maps[i].detach() for i in range(num_levels)] + scale_factors = [ + img_metas[i]['scale_factor'] + for i in range(pred_maps_list[0].shape[0]) + ] + result_list = self._get_bboxes(pred_maps_list, scale_factors, cfg, + rescale, with_nms) + return result_list + + def _get_bboxes(self, + pred_maps_list, + scale_factors, + cfg, + rescale=False, + with_nms=True): + """Transform outputs for a single batch item into bbox predictions. + + Args: + pred_maps_list (list[Tensor]): Prediction maps for different scales + of each single image in the batch. + scale_factors (list(ndarray)): Scale factor of the image arrange as + (w_scale, h_scale, w_scale, h_scale). + cfg (mmcv.Config | None): Test / postprocessing configuration, + if None, test_cfg would be used. + rescale (bool): If True, return boxes in original image space. + Default: False. + with_nms (bool): If True, do nms before return boxes. + Default: True. + + Returns: + list[tuple[Tensor, Tensor]]: Each item in result_list is 2-tuple. + The first item is an (n, 5) tensor, where 5 represent + (tl_x, tl_y, br_x, br_y, score) and the score between 0 and 1. + The shape of the second tensor in the tuple is (n,), and + each element represents the class label of the corresponding + box. + """ + cfg = self.test_cfg if cfg is None else cfg + assert len(pred_maps_list) == self.num_levels + + device = pred_maps_list[0].device + batch_size = pred_maps_list[0].shape[0] + + featmap_sizes = [ + pred_maps_list[i].shape[-2:] for i in range(self.num_levels) + ] + multi_lvl_anchors = self.anchor_generator.grid_anchors( + featmap_sizes, device) + # convert to tensor to keep tracing + nms_pre_tensor = torch.tensor( + cfg.get('nms_pre', -1), device=device, dtype=torch.long) + + multi_lvl_bboxes = [] + multi_lvl_cls_scores = [] + multi_lvl_conf_scores = [] + for i in range(self.num_levels): + # get some key info for current scale + pred_map = pred_maps_list[i] + stride = self.featmap_strides[i] + # (b,h, w, num_anchors*num_attrib) -> + # (b,h*w*num_anchors, num_attrib) + pred_map = pred_map.permute(0, 2, 3, + 1).reshape(batch_size, -1, + self.num_attrib) + # Inplace operation like + # ```pred_map[..., :2] = \torch.sigmoid(pred_map[..., :2])``` + # would create constant tensor when exporting to onnx + pred_map_conf = torch.sigmoid(pred_map[..., :2]) + pred_map_rest = pred_map[..., 2:] + pred_map = torch.cat([pred_map_conf, pred_map_rest], dim=-1) + pred_map_boxes = pred_map[..., :4] + multi_lvl_anchor = multi_lvl_anchors[i] + multi_lvl_anchor = multi_lvl_anchor.expand_as(pred_map_boxes) + bbox_pred = self.bbox_coder.decode(multi_lvl_anchor, + pred_map_boxes, stride) + # conf and cls + conf_pred = torch.sigmoid(pred_map[..., 4]) + cls_pred = torch.sigmoid(pred_map[..., 5:]).view( + batch_size, -1, self.num_classes) # Cls pred one-hot. + + # Get top-k prediction + from mmdet.core.export import get_k_for_topk + nms_pre = get_k_for_topk(nms_pre_tensor, bbox_pred.shape[1]) + if nms_pre > 0: + _, topk_inds = conf_pred.topk(nms_pre) + batch_inds = torch.arange(batch_size).view( + -1, 1).expand_as(topk_inds).long() + # Avoid onnx2tensorrt issue in https://github.com/NVIDIA/TensorRT/issues/1134 # noqa: E501 + if torch.onnx.is_in_onnx_export(): + transformed_inds = ( + bbox_pred.shape[1] * batch_inds + topk_inds) + bbox_pred = bbox_pred.reshape( + -1, 4)[transformed_inds, :].reshape(batch_size, -1, 4) + cls_pred = cls_pred.reshape( + -1, self.num_classes)[transformed_inds, :].reshape( + batch_size, -1, self.num_classes) + conf_pred = conf_pred.reshape(-1, + 1)[transformed_inds].reshape( + batch_size, -1) + else: + bbox_pred = bbox_pred[batch_inds, topk_inds, :] + cls_pred = cls_pred[batch_inds, topk_inds, :] + conf_pred = conf_pred[batch_inds, topk_inds] + # Save the result of current scale + multi_lvl_bboxes.append(bbox_pred) + multi_lvl_cls_scores.append(cls_pred) + multi_lvl_conf_scores.append(conf_pred) + + # Merge the results of different scales together + batch_mlvl_bboxes = torch.cat(multi_lvl_bboxes, dim=1) + batch_mlvl_scores = torch.cat(multi_lvl_cls_scores, dim=1) + batch_mlvl_conf_scores = torch.cat(multi_lvl_conf_scores, dim=1) + + # Replace multiclass_nms with ONNX::NonMaxSuppression in deployment + if torch.onnx.is_in_onnx_export() and with_nms: + from mmdet.core.export import add_dummy_nms_for_onnx + conf_thr = cfg.get('conf_thr', -1) + score_thr = cfg.get('score_thr', -1) + # follow original pipeline of YOLOv3 + if conf_thr > 0: + mask = (batch_mlvl_conf_scores >= conf_thr).float() + batch_mlvl_conf_scores *= mask + if score_thr > 0: + mask = (batch_mlvl_scores > score_thr).float() + batch_mlvl_scores *= mask + batch_mlvl_conf_scores = batch_mlvl_conf_scores.unsqueeze( + 2).expand_as(batch_mlvl_scores) + batch_mlvl_scores = batch_mlvl_scores * batch_mlvl_conf_scores + max_output_boxes_per_class = cfg.nms.get( + 'max_output_boxes_per_class', 200) + iou_threshold = cfg.nms.get('iou_threshold', 0.5) + # keep aligned with original pipeline, improve + # mAP by 1% for YOLOv3 in ONNX + score_threshold = 0 + nms_pre = cfg.get('deploy_nms_pre', -1) + return add_dummy_nms_for_onnx( + batch_mlvl_bboxes, + batch_mlvl_scores, + max_output_boxes_per_class, + iou_threshold, + score_threshold, + nms_pre, + cfg.max_per_img, + ) + + if with_nms and (batch_mlvl_conf_scores.size(0) == 0): + return torch.zeros((0, 5)), torch.zeros((0, )) + + if rescale: + batch_mlvl_bboxes /= batch_mlvl_bboxes.new_tensor( + scale_factors).unsqueeze(1) + + # In mmdet 2.x, the class_id for background is num_classes. + # i.e., the last column. + padding = batch_mlvl_scores.new_zeros(batch_size, + batch_mlvl_scores.shape[1], 1) + batch_mlvl_scores = torch.cat([batch_mlvl_scores, padding], dim=-1) + + # Support exporting to onnx without nms + if with_nms and cfg.get('nms', None) is not None: + det_results = [] + for (mlvl_bboxes, mlvl_scores, + mlvl_conf_scores) in zip(batch_mlvl_bboxes, batch_mlvl_scores, + batch_mlvl_conf_scores): + # Filtering out all predictions with conf < conf_thr + conf_thr = cfg.get('conf_thr', -1) + if conf_thr > 0 and (not torch.onnx.is_in_onnx_export()): + # TensorRT not support NonZero + # add as_tuple=False for compatibility in Pytorch 1.6 + # flatten would create a Reshape op with constant values, + # and raise RuntimeError when doing inference in ONNX + # Runtime with a different input image (#4221). + conf_inds = mlvl_conf_scores.ge(conf_thr).nonzero( + as_tuple=False).squeeze(1) + mlvl_bboxes = mlvl_bboxes[conf_inds, :] + mlvl_scores = mlvl_scores[conf_inds, :] + mlvl_conf_scores = mlvl_conf_scores[conf_inds] + + det_bboxes, det_labels = multiclass_nms( + mlvl_bboxes, + mlvl_scores, + cfg.score_thr, + cfg.nms, + cfg.max_per_img, + score_factors=mlvl_conf_scores) + det_results.append(tuple([det_bboxes, det_labels])) + + else: + det_results = [ + tuple(mlvl_bs) + for mlvl_bs in zip(batch_mlvl_bboxes, batch_mlvl_scores, + batch_mlvl_conf_scores) + ] + return det_results + + @force_fp32(apply_to=('pred_maps', )) + def loss(self, + pred_maps, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute loss of the head. + + Args: + pred_maps (list[Tensor]): Prediction map for each scale level, + shape (N, num_anchors * num_attrib, H, W) + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + num_imgs = len(img_metas) + device = pred_maps[0][0].device + + featmap_sizes = [ + pred_maps[i].shape[-2:] for i in range(self.num_levels) + ] + multi_level_anchors = self.anchor_generator.grid_anchors( + featmap_sizes, device) + anchor_list = [multi_level_anchors for _ in range(num_imgs)] + + responsible_flag_list = [] + for img_id in range(len(img_metas)): + responsible_flag_list.append( + self.anchor_generator.responsible_flags( + featmap_sizes, gt_bboxes[img_id], device)) + + target_maps_list, neg_maps_list = self.get_targets( + anchor_list, responsible_flag_list, gt_bboxes, gt_labels) + + losses_cls, losses_conf, losses_xy, losses_wh = multi_apply( + self.loss_single, pred_maps, target_maps_list, neg_maps_list) + + return dict( + loss_cls=losses_cls, + loss_conf=losses_conf, + loss_xy=losses_xy, + loss_wh=losses_wh) + + def loss_single(self, pred_map, target_map, neg_map): + """Compute loss of a single image from a batch. + + Args: + pred_map (Tensor): Raw predictions for a single level. + target_map (Tensor): The Ground-Truth target for a single level. + neg_map (Tensor): The negative masks for a single level. + + Returns: + tuple: + loss_cls (Tensor): Classification loss. + loss_conf (Tensor): Confidence loss. + loss_xy (Tensor): Regression loss of x, y coordinate. + loss_wh (Tensor): Regression loss of w, h coordinate. + """ + + num_imgs = len(pred_map) + pred_map = pred_map.permute(0, 2, 3, + 1).reshape(num_imgs, -1, self.num_attrib) + neg_mask = neg_map.float() + pos_mask = target_map[..., 4] + pos_and_neg_mask = neg_mask + pos_mask + pos_mask = pos_mask.unsqueeze(dim=-1) + if torch.max(pos_and_neg_mask) > 1.: + warnings.warn('There is overlap between pos and neg sample.') + pos_and_neg_mask = pos_and_neg_mask.clamp(min=0., max=1.) + + pred_xy = pred_map[..., :2] + pred_wh = pred_map[..., 2:4] + pred_conf = pred_map[..., 4] + pred_label = pred_map[..., 5:] + + target_xy = target_map[..., :2] + target_wh = target_map[..., 2:4] + target_conf = target_map[..., 4] + target_label = target_map[..., 5:] + + loss_cls = self.loss_cls(pred_label, target_label, weight=pos_mask) + loss_conf = self.loss_conf( + pred_conf, target_conf, weight=pos_and_neg_mask) + loss_xy = self.loss_xy(pred_xy, target_xy, weight=pos_mask) + loss_wh = self.loss_wh(pred_wh, target_wh, weight=pos_mask) + + return loss_cls, loss_conf, loss_xy, loss_wh + + def get_targets(self, anchor_list, responsible_flag_list, gt_bboxes_list, + gt_labels_list): + """Compute target maps for anchors in multiple images. + + Args: + anchor_list (list[list[Tensor]]): Multi level anchors of each + image. The outer list indicates images, and the inner list + corresponds to feature levels of the image. Each element of + the inner list is a tensor of shape (num_total_anchors, 4). + responsible_flag_list (list[list[Tensor]]): Multi level responsible + flags of each image. Each element is a tensor of shape + (num_total_anchors, ) + gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image. + gt_labels_list (list[Tensor]): Ground truth labels of each box. + + Returns: + tuple: Usually returns a tuple containing learning targets. + - target_map_list (list[Tensor]): Target map of each level. + - neg_map_list (list[Tensor]): Negative map of each level. + """ + num_imgs = len(anchor_list) + + # anchor number of multi levels + num_level_anchors = [anchors.size(0) for anchors in anchor_list[0]] + + results = multi_apply(self._get_targets_single, anchor_list, + responsible_flag_list, gt_bboxes_list, + gt_labels_list) + + all_target_maps, all_neg_maps = results + assert num_imgs == len(all_target_maps) == len(all_neg_maps) + target_maps_list = images_to_levels(all_target_maps, num_level_anchors) + neg_maps_list = images_to_levels(all_neg_maps, num_level_anchors) + + return target_maps_list, neg_maps_list + + def _get_targets_single(self, anchors, responsible_flags, gt_bboxes, + gt_labels): + """Generate matching bounding box prior and converted GT. + + Args: + anchors (list[Tensor]): Multi-level anchors of the image. + responsible_flags (list[Tensor]): Multi-level responsible flags of + anchors + gt_bboxes (Tensor): Ground truth bboxes of single image. + gt_labels (Tensor): Ground truth labels of single image. + + Returns: + tuple: + target_map (Tensor): Predication target map of each + scale level, shape (num_total_anchors, + 5+num_classes) + neg_map (Tensor): Negative map of each scale level, + shape (num_total_anchors,) + """ + + anchor_strides = [] + for i in range(len(anchors)): + anchor_strides.append( + torch.tensor(self.featmap_strides[i], + device=gt_bboxes.device).repeat(len(anchors[i]))) + concat_anchors = torch.cat(anchors) + concat_responsible_flags = torch.cat(responsible_flags) + + anchor_strides = torch.cat(anchor_strides) + assert len(anchor_strides) == len(concat_anchors) == \ + len(concat_responsible_flags) + assign_result = self.assigner.assign(concat_anchors, + concat_responsible_flags, + gt_bboxes) + sampling_result = self.sampler.sample(assign_result, concat_anchors, + gt_bboxes) + + target_map = concat_anchors.new_zeros( + concat_anchors.size(0), self.num_attrib) + + target_map[sampling_result.pos_inds, :4] = self.bbox_coder.encode( + sampling_result.pos_bboxes, sampling_result.pos_gt_bboxes, + anchor_strides[sampling_result.pos_inds]) + + target_map[sampling_result.pos_inds, 4] = 1 + + gt_labels_one_hot = F.one_hot( + gt_labels, num_classes=self.num_classes).float() + if self.one_hot_smoother != 0: # label smooth + gt_labels_one_hot = gt_labels_one_hot * ( + 1 - self.one_hot_smoother + ) + self.one_hot_smoother / self.num_classes + target_map[sampling_result.pos_inds, 5:] = gt_labels_one_hot[ + sampling_result.pos_assigned_gt_inds] + + neg_map = concat_anchors.new_zeros( + concat_anchors.size(0), dtype=torch.uint8) + neg_map[sampling_result.neg_inds] = 1 + + return target_map, neg_map + + def aug_test(self, feats, img_metas, rescale=False): + """Test function with test time augmentation. + + Args: + feats (list[Tensor]): the outer list indicates test-time + augmentations and inner Tensor should have a shape NxCxHxW, + which contains features for all images in the batch. + img_metas (list[list[dict]]): the outer list indicates test-time + augs (multiscale, flip, etc.) and the inner list indicates + images in a batch. each dict has image information. + rescale (bool, optional): Whether to rescale the results. + Defaults to False. + + Returns: + list[ndarray]: bbox results of each class + """ + return self.aug_test_bboxes(feats, img_metas, rescale=rescale) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/yolof_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/yolof_head.py new file mode 100644 index 0000000000000000000000000000000000000000..e15d4d4a6f40150af803c196b8f5db3d68a61f6d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/dense_heads/yolof_head.py @@ -0,0 +1,415 @@ +import torch +import torch.nn as nn +from mmcv.cnn import (ConvModule, bias_init_with_prob, constant_init, is_norm, + normal_init) +from mmcv.runner import force_fp32 + +from mmdet.core import anchor_inside_flags, multi_apply, reduce_mean, unmap +from ..builder import HEADS +from .anchor_head import AnchorHead + +INF = 1e8 + + +def levels_to_images(mlvl_tensor): + """Concat multi-level feature maps by image. + + [feature_level0, feature_level1...] -> [feature_image0, feature_image1...] + Convert the shape of each element in mlvl_tensor from (N, C, H, W) to + (N, H*W , C), then split the element to N elements with shape (H*W, C), and + concat elements in same image of all level along first dimension. + + Args: + mlvl_tensor (list[torch.Tensor]): list of Tensor which collect from + corresponding level. Each element is of shape (N, C, H, W) + + Returns: + list[torch.Tensor]: A list that contains N tensors and each tensor is + of shape (num_elements, C) + """ + batch_size = mlvl_tensor[0].size(0) + batch_list = [[] for _ in range(batch_size)] + channels = mlvl_tensor[0].size(1) + for t in mlvl_tensor: + t = t.permute(0, 2, 3, 1) + t = t.view(batch_size, -1, channels).contiguous() + for img in range(batch_size): + batch_list[img].append(t[img]) + return [torch.cat(item, 0) for item in batch_list] + + +@HEADS.register_module() +class YOLOFHead(AnchorHead): + """YOLOFHead Paper link: https://arxiv.org/abs/2103.09460. + + Args: + num_classes (int): The number of object classes (w/o background) + in_channels (List[int]): The number of input channels per scale. + cls_num_convs (int): The number of convolutions of cls branch. + Default 2. + reg_num_convs (int): The number of convolutions of reg branch. + Default 4. + norm_cfg (dict): Dictionary to construct and config norm layer. + """ + + def __init__(self, + num_classes, + in_channels, + num_cls_convs=2, + num_reg_convs=4, + norm_cfg=dict(type='BN', requires_grad=True), + **kwargs): + self.num_cls_convs = num_cls_convs + self.num_reg_convs = num_reg_convs + self.norm_cfg = norm_cfg + super(YOLOFHead, self).__init__(num_classes, in_channels, **kwargs) + + def _init_layers(self): + cls_subnet = [] + bbox_subnet = [] + for i in range(self.num_cls_convs): + cls_subnet.append( + ConvModule( + self.in_channels, + self.in_channels, + kernel_size=3, + padding=1, + norm_cfg=self.norm_cfg)) + for i in range(self.num_reg_convs): + bbox_subnet.append( + ConvModule( + self.in_channels, + self.in_channels, + kernel_size=3, + padding=1, + norm_cfg=self.norm_cfg)) + self.cls_subnet = nn.Sequential(*cls_subnet) + self.bbox_subnet = nn.Sequential(*bbox_subnet) + self.cls_score = nn.Conv2d( + self.in_channels, + self.num_anchors * self.num_classes, + kernel_size=3, + stride=1, + padding=1) + self.bbox_pred = nn.Conv2d( + self.in_channels, + self.num_anchors * 4, + kernel_size=3, + stride=1, + padding=1) + self.object_pred = nn.Conv2d( + self.in_channels, + self.num_anchors, + kernel_size=3, + stride=1, + padding=1) + + def init_weights(self): + for m in self.modules(): + if isinstance(m, nn.Conv2d): + normal_init(m, mean=0, std=0.01) + if is_norm(m): + constant_init(m, 1) + + # Use prior in model initialization to improve stability + bias_cls = bias_init_with_prob(0.01) + torch.nn.init.constant_(self.cls_score.bias, bias_cls) + + def forward_single(self, feature): + cls_score = self.cls_score(self.cls_subnet(feature)) + N, _, H, W = cls_score.shape + cls_score = cls_score.view(N, -1, self.num_classes, H, W) + + reg_feat = self.bbox_subnet(feature) + bbox_reg = self.bbox_pred(reg_feat) + objectness = self.object_pred(reg_feat) + + # implicit objectness + objectness = objectness.view(N, -1, 1, H, W) + normalized_cls_score = cls_score + objectness - torch.log( + 1. + torch.clamp(cls_score.exp(), max=INF) + + torch.clamp(objectness.exp(), max=INF)) + normalized_cls_score = normalized_cls_score.view(N, -1, H, W) + return normalized_cls_score, bbox_reg + + @force_fp32(apply_to=('cls_scores', 'bbox_preds')) + def loss(self, + cls_scores, + bbox_preds, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=None): + """Compute losses of the head. + + Args: + cls_scores (list[Tensor]): Box scores for each scale level + Has shape (batch, num_anchors * num_classes, h, w) + bbox_preds (list[Tensor]): Box energies / deltas for each scale + level with shape (batch, num_anchors * 4, h, w) + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + img_metas (list[dict]): Meta information of each image, e.g., + image size, scaling factor, etc. + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. Default: None + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + assert len(cls_scores) == 1 + assert self.anchor_generator.num_levels == 1 + + device = cls_scores[0].device + featmap_sizes = [featmap.size()[-2:] for featmap in cls_scores] + anchor_list, valid_flag_list = self.get_anchors( + featmap_sizes, img_metas, device=device) + + # The output level is always 1 + anchor_list = [anchors[0] for anchors in anchor_list] + valid_flag_list = [valid_flags[0] for valid_flags in valid_flag_list] + + cls_scores_list = levels_to_images(cls_scores) + bbox_preds_list = levels_to_images(bbox_preds) + + label_channels = self.cls_out_channels if self.use_sigmoid_cls else 1 + cls_reg_targets = self.get_targets( + cls_scores_list, + bbox_preds_list, + anchor_list, + valid_flag_list, + gt_bboxes, + img_metas, + gt_bboxes_ignore_list=gt_bboxes_ignore, + gt_labels_list=gt_labels, + label_channels=label_channels) + if cls_reg_targets is None: + return None + (batch_labels, batch_label_weights, num_total_pos, num_total_neg, + batch_bbox_weights, batch_pos_predicted_boxes, + batch_target_boxes) = cls_reg_targets + + flatten_labels = batch_labels.reshape(-1) + batch_label_weights = batch_label_weights.reshape(-1) + cls_score = cls_scores[0].permute(0, 2, 3, + 1).reshape(-1, self.cls_out_channels) + + num_total_samples = (num_total_pos + + num_total_neg) if self.sampling else num_total_pos + num_total_samples = reduce_mean( + cls_score.new_tensor(num_total_samples)).clamp_(1.0).item() + + # classification loss + loss_cls = self.loss_cls( + cls_score, + flatten_labels, + batch_label_weights, + avg_factor=num_total_samples) + + # regression loss + if batch_pos_predicted_boxes.shape[0] == 0: + # no pos sample + loss_bbox = batch_pos_predicted_boxes.sum() * 0 + else: + loss_bbox = self.loss_bbox( + batch_pos_predicted_boxes, + batch_target_boxes, + batch_bbox_weights.float(), + avg_factor=num_total_samples) + + return dict(loss_cls=loss_cls, loss_bbox=loss_bbox) + + def get_targets(self, + cls_scores_list, + bbox_preds_list, + anchor_list, + valid_flag_list, + gt_bboxes_list, + img_metas, + gt_bboxes_ignore_list=None, + gt_labels_list=None, + label_channels=1, + unmap_outputs=True): + """Compute regression and classification targets for anchors in + multiple images. + + Args: + cls_scores_list (list[Tensor]): Classification scores of + each image. each is a 4D-tensor, the shape is + (h * w, num_anchors * num_classes). + bbox_preds_list (list[Tensor]): Bbox preds of each image. + each is a 4D-tensor, the shape is (h * w, num_anchors * 4). + anchor_list (list[Tensor]): Anchors of each image. Each element of + is a tensor of shape (h * w * num_anchors, 4). + valid_flag_list (list[Tensor]): Valid flags of each image. Each + element of is a tensor of shape (h * w * num_anchors, ) + gt_bboxes_list (list[Tensor]): Ground truth bboxes of each image. + img_metas (list[dict]): Meta info of each image. + gt_bboxes_ignore_list (list[Tensor]): Ground truth bboxes to be + ignored. + gt_labels_list (list[Tensor]): Ground truth labels of each box. + label_channels (int): Channel of label. + unmap_outputs (bool): Whether to map outputs back to the original + set of anchors. + + Returns: + tuple: Usually returns a tuple containing learning targets. + + - batch_labels (Tensor): Label of all images. Each element \ + of is a tensor of shape (batch, h * w * num_anchors) + - batch_label_weights (Tensor): Label weights of all images \ + of is a tensor of shape (batch, h * w * num_anchors) + - num_total_pos (int): Number of positive samples in all \ + images. + - num_total_neg (int): Number of negative samples in all \ + images. + additional_returns: This function enables user-defined returns from + `self._get_targets_single`. These returns are currently refined + to properties at each feature map (i.e. having HxW dimension). + The results will be concatenated after the end + """ + num_imgs = len(img_metas) + assert len(anchor_list) == len(valid_flag_list) == num_imgs + + # compute targets for each image + if gt_bboxes_ignore_list is None: + gt_bboxes_ignore_list = [None for _ in range(num_imgs)] + if gt_labels_list is None: + gt_labels_list = [None for _ in range(num_imgs)] + results = multi_apply( + self._get_targets_single, + bbox_preds_list, + anchor_list, + valid_flag_list, + gt_bboxes_list, + gt_bboxes_ignore_list, + gt_labels_list, + img_metas, + label_channels=label_channels, + unmap_outputs=unmap_outputs) + (all_labels, all_label_weights, pos_inds_list, neg_inds_list, + sampling_results_list) = results[:5] + rest_results = list(results[5:]) # user-added return values + # no valid anchors + if any([labels is None for labels in all_labels]): + return None + # sampled anchors of all images + num_total_pos = sum([max(inds.numel(), 1) for inds in pos_inds_list]) + num_total_neg = sum([max(inds.numel(), 1) for inds in neg_inds_list]) + + batch_labels = torch.stack(all_labels, 0) + batch_label_weights = torch.stack(all_label_weights, 0) + + res = (batch_labels, batch_label_weights, num_total_pos, num_total_neg) + for i, rests in enumerate(rest_results): # user-added return values + rest_results[i] = torch.cat(rests, 0) + + return res + tuple(rest_results) + + def _get_targets_single(self, + bbox_preds, + flat_anchors, + valid_flags, + gt_bboxes, + gt_bboxes_ignore, + gt_labels, + img_meta, + label_channels=1, + unmap_outputs=True): + """Compute regression and classification targets for anchors in a + single image. + + Args: + bbox_preds (Tensor): Bbox prediction of the image, which + shape is (h * w ,4) + flat_anchors (Tensor): Anchors of the image, which shape is + (h * w * num_anchors ,4) + valid_flags (Tensor): Valid flags of the image, which shape is + (h * w * num_anchors,). + gt_bboxes (Tensor): Ground truth bboxes of the image, + shape (num_gts, 4). + gt_bboxes_ignore (Tensor): Ground truth bboxes to be + ignored, shape (num_ignored_gts, 4). + img_meta (dict): Meta info of the image. + gt_labels (Tensor): Ground truth labels of each box, + shape (num_gts,). + label_channels (int): Channel of label. + unmap_outputs (bool): Whether to map outputs back to the original + set of anchors. + + Returns: + tuple: + labels (Tensor): Labels of image, which shape is + (h * w * num_anchors, ). + label_weights (Tensor): Label weights of image, which shape is + (h * w * num_anchors, ). + pos_inds (Tensor): Pos index of image. + neg_inds (Tensor): Neg index of image. + sampling_result (obj:`SamplingResult`): Sampling result. + pos_bbox_weights (Tensor): The Weight of using to calculate + the bbox branch loss, which shape is (num, ). + pos_predicted_boxes (Tensor): boxes predicted value of + using to calculate the bbox branch loss, which shape is + (num, 4). + pos_target_boxes (Tensor): boxes target value of + using to calculate the bbox branch loss, which shape is + (num, 4). + """ + inside_flags = anchor_inside_flags(flat_anchors, valid_flags, + img_meta['img_shape'][:2], + self.train_cfg.allowed_border) + if not inside_flags.any(): + return (None, ) * 8 + # assign gt and sample anchors + anchors = flat_anchors[inside_flags, :] + bbox_preds = bbox_preds.reshape(-1, 4) + bbox_preds = bbox_preds[inside_flags, :] + + # decoded bbox + decoder_bbox_preds = self.bbox_coder.decode(anchors, bbox_preds) + assign_result = self.assigner.assign( + decoder_bbox_preds, anchors, gt_bboxes, gt_bboxes_ignore, + None if self.sampling else gt_labels) + + pos_bbox_weights = assign_result.get_extra_property('pos_idx') + pos_predicted_boxes = assign_result.get_extra_property( + 'pos_predicted_boxes') + pos_target_boxes = assign_result.get_extra_property('target_boxes') + + sampling_result = self.sampler.sample(assign_result, anchors, + gt_bboxes) + num_valid_anchors = anchors.shape[0] + labels = anchors.new_full((num_valid_anchors, ), + self.num_classes, + dtype=torch.long) + label_weights = anchors.new_zeros(num_valid_anchors, dtype=torch.float) + + pos_inds = sampling_result.pos_inds + neg_inds = sampling_result.neg_inds + if len(pos_inds) > 0: + if gt_labels is None: + # Only rpn gives gt_labels as None + # Foreground is the first class since v2.5.0 + labels[pos_inds] = 0 + else: + labels[pos_inds] = gt_labels[ + sampling_result.pos_assigned_gt_inds] + if self.train_cfg.pos_weight <= 0: + label_weights[pos_inds] = 1.0 + else: + label_weights[pos_inds] = self.train_cfg.pos_weight + if len(neg_inds) > 0: + label_weights[neg_inds] = 1.0 + + # map up to original set of anchors + if unmap_outputs: + num_total_anchors = flat_anchors.size(0) + labels = unmap( + labels, num_total_anchors, inside_flags, + fill=self.num_classes) # fill bg label + label_weights = unmap(label_weights, num_total_anchors, + inside_flags) + + return (labels, label_weights, pos_inds, neg_inds, sampling_result, + pos_bbox_weights, pos_predicted_boxes, pos_target_boxes) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a74700747031ed5bff28954e76fce1e258309cec --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/__init__.py @@ -0,0 +1,44 @@ +from .atss import ATSS +from .autoassign import AutoAssign +from .base import BaseDetector +from .cascade_rcnn import CascadeRCNN +from .centernet import CenterNet +from .cornernet import CornerNet +from .deformable_detr import DeformableDETR +from .detr import DETR +from .fast_rcnn import FastRCNN +from .faster_rcnn import FasterRCNN +from .fcos import FCOS +from .fovea import FOVEA +from .fsaf import FSAF +from .gfl import GFL +from .grid_rcnn import GridRCNN +from .htc import HybridTaskCascade +from .kd_one_stage import KnowledgeDistillationSingleStageDetector +from .mask_rcnn import MaskRCNN +from .mask_scoring_rcnn import MaskScoringRCNN +from .nasfcos import NASFCOS +from .paa import PAA +from .point_rend import PointRend +from .reppoints_detector import RepPointsDetector +from .retinanet import RetinaNet +from .rpn import RPN +from .scnet import SCNet +from .single_stage import SingleStageDetector +from .sparse_rcnn import SparseRCNN +from .trident_faster_rcnn import TridentFasterRCNN +from .two_stage import TwoStageDetector +from .vfnet import VFNet +from .yolact import YOLACT +from .yolo import YOLOV3 +from .yolof import YOLOF + +__all__ = [ + 'ATSS', 'BaseDetector', 'SingleStageDetector', 'TwoStageDetector', 'RPN', + 'KnowledgeDistillationSingleStageDetector', 'FastRCNN', 'FasterRCNN', + 'MaskRCNN', 'CascadeRCNN', 'HybridTaskCascade', 'RetinaNet', 'FCOS', + 'GridRCNN', 'MaskScoringRCNN', 'RepPointsDetector', 'FOVEA', 'FSAF', + 'NASFCOS', 'PointRend', 'GFL', 'CornerNet', 'PAA', 'YOLOV3', 'YOLACT', + 'VFNet', 'DETR', 'TridentFasterRCNN', 'SparseRCNN', 'SCNet', + 'DeformableDETR', 'AutoAssign', 'YOLOF', 'CenterNet' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/atss.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/atss.py new file mode 100644 index 0000000000000000000000000000000000000000..e28f457c828b93b8aa66196e09e3103ee6221c82 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/atss.py @@ -0,0 +1,18 @@ +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class ATSS(SingleStageDetector): + """Implementation of `ATSS `_.""" + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None, + init_cfg=None): + super(ATSS, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained, init_cfg) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/autoassign.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/autoassign.py new file mode 100644 index 0000000000000000000000000000000000000000..1bc03091cb561ce4ab5e5277cc865797cf266bb4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/autoassign.py @@ -0,0 +1,18 @@ +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class AutoAssign(SingleStageDetector): + """Implementation of `AutoAssign: Differentiable Label Assignment for Dense + Object Detection `_.""" + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None): + super(AutoAssign, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/base.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/base.py new file mode 100644 index 0000000000000000000000000000000000000000..a6d157a03abc3b5ffb087d27fbed07f9aa70d5f3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/base.py @@ -0,0 +1,349 @@ +from abc import ABCMeta, abstractmethod +from collections import OrderedDict + +import mmcv +import numpy as np +import torch +import torch.distributed as dist +from mmcv.runner import BaseModule, auto_fp16 + +from mmdet.core.visualization import imshow_det_bboxes + + +class BaseDetector(BaseModule, metaclass=ABCMeta): + """Base class for detectors.""" + + def __init__(self, init_cfg=None): + super(BaseDetector, self).__init__(init_cfg) + self.fp16_enabled = False + + @property + def with_neck(self): + """bool: whether the detector has a neck""" + return hasattr(self, 'neck') and self.neck is not None + + # TODO: these properties need to be carefully handled + # for both single stage & two stage detectors + @property + def with_shared_head(self): + """bool: whether the detector has a shared head in the RoI Head""" + return hasattr(self, 'roi_head') and self.roi_head.with_shared_head + + @property + def with_bbox(self): + """bool: whether the detector has a bbox head""" + return ((hasattr(self, 'roi_head') and self.roi_head.with_bbox) + or (hasattr(self, 'bbox_head') and self.bbox_head is not None)) + + @property + def with_mask(self): + """bool: whether the detector has a mask head""" + return ((hasattr(self, 'roi_head') and self.roi_head.with_mask) + or (hasattr(self, 'mask_head') and self.mask_head is not None)) + + @abstractmethod + def extract_feat(self, imgs): + """Extract features from images.""" + pass + + def extract_feats(self, imgs): + """Extract features from multiple images. + + Args: + imgs (list[torch.Tensor]): A list of images. The images are + augmented from the same image but in different ways. + + Returns: + list[torch.Tensor]: Features of different images + """ + assert isinstance(imgs, list) + return [self.extract_feat(img) for img in imgs] + + def forward_train(self, imgs, img_metas, **kwargs): + """ + Args: + img (list[Tensor]): List of tensors of shape (1, C, H, W). + Typically these should be mean centered and std scaled. + img_metas (list[dict]): List of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys, see + :class:`mmdet.datasets.pipelines.Collect`. + kwargs (keyword arguments): Specific to concrete implementation. + """ + # NOTE the batched image size information may be useful, e.g. + # in DETR, this is needed for the construction of masks, which is + # then used for the transformer_head. + batch_input_shape = tuple(imgs[0].size()[-2:]) + for img_meta in img_metas: + img_meta['batch_input_shape'] = batch_input_shape + + async def async_simple_test(self, img, img_metas, **kwargs): + raise NotImplementedError + + @abstractmethod + def simple_test(self, img, img_metas, **kwargs): + pass + + @abstractmethod + def aug_test(self, imgs, img_metas, **kwargs): + """Test function with test time augmentation.""" + pass + + async def aforward_test(self, *, img, img_metas, **kwargs): + for var, name in [(img, 'img'), (img_metas, 'img_metas')]: + if not isinstance(var, list): + raise TypeError(f'{name} must be a list, but got {type(var)}') + + num_augs = len(img) + if num_augs != len(img_metas): + raise ValueError(f'num of augmentations ({len(img)}) ' + f'!= num of image metas ({len(img_metas)})') + # TODO: remove the restriction of samples_per_gpu == 1 when prepared + samples_per_gpu = img[0].size(0) + assert samples_per_gpu == 1 + + if num_augs == 1: + return await self.async_simple_test(img[0], img_metas[0], **kwargs) + else: + raise NotImplementedError + + def forward_test(self, imgs, img_metas, **kwargs): + """ + Args: + imgs (List[Tensor]): the outer list indicates test-time + augmentations and inner Tensor should have a shape NxCxHxW, + which contains all images in the batch. + img_metas (List[List[dict]]): the outer list indicates test-time + augs (multiscale, flip, etc.) and the inner list indicates + images in a batch. + """ + for var, name in [(imgs, 'imgs'), (img_metas, 'img_metas')]: + if not isinstance(var, list): + raise TypeError(f'{name} must be a list, but got {type(var)}') + + num_augs = len(imgs) + if num_augs != len(img_metas): + raise ValueError(f'num of augmentations ({len(imgs)}) ' + f'!= num of image meta ({len(img_metas)})') + + # NOTE the batched image size information may be useful, e.g. + # in DETR, this is needed for the construction of masks, which is + # then used for the transformer_head. + for img, img_meta in zip(imgs, img_metas): + batch_size = len(img_meta) + for img_id in range(batch_size): + img_meta[img_id]['batch_input_shape'] = tuple(img.size()[-2:]) + + if num_augs == 1: + # proposals (List[List[Tensor]]): the outer list indicates + # test-time augs (multiscale, flip, etc.) and the inner list + # indicates images in a batch. + # The Tensor should have a shape Px4, where P is the number of + # proposals. + if 'proposals' in kwargs: + kwargs['proposals'] = kwargs['proposals'][0] + return self.simple_test(imgs[0], img_metas[0], **kwargs) + else: + assert imgs[0].size(0) == 1, 'aug test does not support ' \ + 'inference with batch size ' \ + f'{imgs[0].size(0)}' + # TODO: support test augmentation for predefined proposals + assert 'proposals' not in kwargs + return self.aug_test(imgs, img_metas, **kwargs) + + @auto_fp16(apply_to=('img', )) + def forward(self, img, img_metas, return_loss=True, **kwargs): + """Calls either :func:`forward_train` or :func:`forward_test` depending + on whether ``return_loss`` is ``True``. + + Note this setting will change the expected inputs. When + ``return_loss=True``, img and img_meta are single-nested (i.e. Tensor + and List[dict]), and when ``resturn_loss=False``, img and img_meta + should be double nested (i.e. List[Tensor], List[List[dict]]), with + the outer list indicating test time augmentations. + """ + if torch.onnx.is_in_onnx_export(): + assert len(img_metas) == 1 + return self.onnx_export(img[0], img_metas[0]) + + if return_loss: + return self.forward_train(img, img_metas, **kwargs) + else: + return self.forward_test(img, img_metas, **kwargs) + + def _parse_losses(self, losses): + """Parse the raw outputs (losses) of the network. + + Args: + losses (dict): Raw output of the network, which usually contain + losses and other necessary infomation. + + Returns: + tuple[Tensor, dict]: (loss, log_vars), loss is the loss tensor \ + which may be a weighted sum of all losses, log_vars contains \ + all the variables to be sent to the logger. + """ + log_vars = OrderedDict() + for loss_name, loss_value in losses.items(): + if isinstance(loss_value, torch.Tensor): + log_vars[loss_name] = loss_value.mean() + elif isinstance(loss_value, list): + log_vars[loss_name] = sum(_loss.mean() for _loss in loss_value) + else: + raise TypeError( + f'{loss_name} is not a tensor or list of tensors') + + loss = sum(_value for _key, _value in log_vars.items() + if 'loss' in _key) + + log_vars['loss'] = loss + for loss_name, loss_value in log_vars.items(): + # reduce loss when distributed training + if dist.is_available() and dist.is_initialized(): + loss_value = loss_value.data.clone() + dist.all_reduce(loss_value.div_(dist.get_world_size())) + log_vars[loss_name] = loss_value.item() + + return loss, log_vars + + def train_step(self, data, optimizer): + """The iteration step during training. + + This method defines an iteration step during training, except for the + back propagation and optimizer updating, which are done in an optimizer + hook. Note that in some complicated cases or models, the whole process + including back propagation and optimizer updating is also defined in + this method, such as GAN. + + Args: + data (dict): The output of dataloader. + optimizer (:obj:`torch.optim.Optimizer` | dict): The optimizer of + runner is passed to ``train_step()``. This argument is unused + and reserved. + + Returns: + dict: It should contain at least 3 keys: ``loss``, ``log_vars``, \ + ``num_samples``. + + - ``loss`` is a tensor for back propagation, which can be a \ + weighted sum of multiple losses. + - ``log_vars`` contains all the variables to be sent to the + logger. + - ``num_samples`` indicates the batch size (when the model is \ + DDP, it means the batch size on each GPU), which is used for \ + averaging the logs. + """ + losses = self(**data) + loss, log_vars = self._parse_losses(losses) + + outputs = dict( + loss=loss, log_vars=log_vars, num_samples=len(data['img_metas'])) + + return outputs + + def val_step(self, data, optimizer=None): + """The iteration step during validation. + + This method shares the same signature as :func:`train_step`, but used + during val epochs. Note that the evaluation after training epochs is + not implemented with this method, but an evaluation hook. + """ + losses = self(**data) + loss, log_vars = self._parse_losses(losses) + + outputs = dict( + loss=loss, log_vars=log_vars, num_samples=len(data['img_metas'])) + + return outputs + + def show_result(self, + img, + result, + score_thr=0.3, + bbox_color=(72, 101, 241), + text_color=(72, 101, 241), + mask_color=None, + thickness=2, + font_size=13, + win_name='', + show=False, + wait_time=0, + out_file=None): + """Draw `result` over `img`. + + Args: + img (str or Tensor): The image to be displayed. + result (Tensor or tuple): The results to draw over `img` + bbox_result or (bbox_result, segm_result). + score_thr (float, optional): Minimum score of bboxes to be shown. + Default: 0.3. + bbox_color (str or tuple(int) or :obj:`Color`):Color of bbox lines. + The tuple of color should be in BGR order. Default: 'green' + text_color (str or tuple(int) or :obj:`Color`):Color of texts. + The tuple of color should be in BGR order. Default: 'green' + mask_color (None or str or tuple(int) or :obj:`Color`): + Color of masks. The tuple of color should be in BGR order. + Default: None + thickness (int): Thickness of lines. Default: 2 + font_size (int): Font size of texts. Default: 13 + win_name (str): The window name. Default: '' + wait_time (float): Value of waitKey param. + Default: 0. + show (bool): Whether to show the image. + Default: False. + out_file (str or None): The filename to write the image. + Default: None. + + Returns: + img (Tensor): Only if not `show` or `out_file` + """ + img = mmcv.imread(img) + img = img.copy() + if isinstance(result, tuple): + bbox_result, segm_result = result + if isinstance(segm_result, tuple): + segm_result = segm_result[0] # ms rcnn + else: + bbox_result, segm_result = result, None + bboxes = np.vstack(bbox_result) + labels = [ + np.full(bbox.shape[0], i, dtype=np.int32) + for i, bbox in enumerate(bbox_result) + ] + labels = np.concatenate(labels) + # draw segmentation masks + segms = None + if segm_result is not None and len(labels) > 0: # non empty + segms = mmcv.concat_list(segm_result) + if isinstance(segms[0], torch.Tensor): + segms = torch.stack(segms, dim=0).detach().cpu().numpy() + else: + segms = np.stack(segms, axis=0) + # if out_file specified, do not show image in window + if out_file is not None: + show = False + # draw bounding boxes + img = imshow_det_bboxes( + img, + bboxes, + labels, + segms, + class_names=self.CLASSES, + score_thr=score_thr, + bbox_color=bbox_color, + text_color=text_color, + mask_color=mask_color, + thickness=thickness, + font_size=font_size, + win_name=win_name, + show=show, + wait_time=wait_time, + out_file=out_file) + + if not (show or out_file): + return img + + def onnx_export(self, img, img_metas): + raise NotImplementedError(f'{self.__class__.__name__} does ' + f'not support ONNX EXPORT') diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/cascade_rcnn.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/cascade_rcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..8a4178954c41ba77b97db3311b5cc50632895f6d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/cascade_rcnn.py @@ -0,0 +1,48 @@ +from ..builder import DETECTORS +from .two_stage import TwoStageDetector + + +@DETECTORS.register_module() +class CascadeRCNN(TwoStageDetector): + r"""Implementation of `Cascade R-CNN: Delving into High Quality Object + Detection `_""" + + def __init__(self, + backbone, + neck=None, + rpn_head=None, + roi_head=None, + train_cfg=None, + test_cfg=None, + pretrained=None, + init_cfg=None): + super(CascadeRCNN, self).__init__( + backbone=backbone, + neck=neck, + rpn_head=rpn_head, + roi_head=roi_head, + train_cfg=train_cfg, + test_cfg=test_cfg, + pretrained=pretrained, + init_cfg=init_cfg) + + def show_result(self, data, result, **kwargs): + """Show prediction results of the detector. + + Args: + data (str or np.ndarray): Image filename or loaded image. + result (Tensor or tuple): The results to draw over `img` + bbox_result or (bbox_result, segm_result). + + Returns: + np.ndarray: The image with bboxes drawn on it. + """ + if self.with_mask: + ms_bbox_result, ms_segm_result = result + if isinstance(ms_bbox_result, dict): + result = (ms_bbox_result['ensemble'], + ms_segm_result['ensemble']) + else: + if isinstance(result, dict): + result = result['ensemble'] + return super(CascadeRCNN, self).show_result(data, result, **kwargs) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/centernet.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/centernet.py new file mode 100644 index 0000000000000000000000000000000000000000..f7c3ecd70a69e685b946f9fa65333d10fb88974f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/centernet.py @@ -0,0 +1,110 @@ +import torch + +from mmdet.core import bbox2result +from mmdet.models.builder import DETECTORS +from ...core.utils import flip_tensor +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class CenterNet(SingleStageDetector): + """Implementation of CenterNet(Objects as Points) + + . + """ + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None, + init_cfg=None): + super(CenterNet, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained, init_cfg) + + def merge_aug_results(self, aug_results, with_nms): + """Merge augmented detection bboxes and score. + + Args: + aug_results (list[list[Tensor]]): Det_bboxes and det_labels of each + image. + with_nms (bool): If True, do nms before return boxes. + + Returns: + tuple: (out_bboxes, out_labels) + """ + recovered_bboxes, aug_labels = [], [] + for single_result in aug_results: + recovered_bboxes.append(single_result[0][0]) + aug_labels.append(single_result[0][1]) + + bboxes = torch.cat(recovered_bboxes, dim=0).contiguous() + labels = torch.cat(aug_labels).contiguous() + if with_nms: + out_bboxes, out_labels = self.bbox_head._bboxes_nms( + bboxes, labels, self.bbox_head.test_cfg) + else: + out_bboxes, out_labels = bboxes, labels + + return out_bboxes, out_labels + + def aug_test(self, imgs, img_metas, rescale=True): + """Augment testing of CenterNet. Aug test must have flipped image pair, + and unlike CornerNet, it will perform an averaging operation on the + feature map instead of detecting bbox. + + Args: + imgs (list[Tensor]): Augmented images. + img_metas (list[list[dict]]): Meta information of each image, e.g., + image size, scaling factor, etc. + rescale (bool): If True, return boxes in original image space. + Default: True. + + Note: + ``imgs`` must including flipped image pairs. + + Returns: + list[list[np.ndarray]]: BBox results of each image and classes. + The outer list corresponds to each image. The inner list + corresponds to each class. + """ + img_inds = list(range(len(imgs))) + assert img_metas[0][0]['flip'] + img_metas[1][0]['flip'], ( + 'aug test must have flipped image pair') + aug_results = [] + for ind, flip_ind in zip(img_inds[0::2], img_inds[1::2]): + flip_direction = img_metas[flip_ind][0]['flip_direction'] + img_pair = torch.cat([imgs[ind], imgs[flip_ind]]) + x = self.extract_feat(img_pair) + center_heatmap_preds, wh_preds, offset_preds = self.bbox_head(x) + assert len(center_heatmap_preds) == len(wh_preds) == len( + offset_preds) == 1 + + # Feature map averaging + center_heatmap_preds[0] = ( + center_heatmap_preds[0][0:1] + + flip_tensor(center_heatmap_preds[0][1:2], flip_direction)) / 2 + wh_preds[0] = (wh_preds[0][0:1] + + flip_tensor(wh_preds[0][1:2], flip_direction)) / 2 + + bbox_list = self.bbox_head.get_bboxes( + center_heatmap_preds, + wh_preds, [offset_preds[0][0:1]], + img_metas[ind], + rescale=rescale, + with_nms=False) + aug_results.append(bbox_list) + + nms_cfg = self.bbox_head.test_cfg.get('nms_cfg', None) + if nms_cfg is None: + with_nms = False + else: + with_nms = True + bbox_list = [self.merge_aug_results(aug_results, with_nms)] + bbox_results = [ + bbox2result(det_bboxes, det_labels, self.bbox_head.num_classes) + for det_bboxes, det_labels in bbox_list + ] + return bbox_results diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/cornernet.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/cornernet.py new file mode 100644 index 0000000000000000000000000000000000000000..b6dc60334819f2ae3cd3a61d902f74f550a5247f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/cornernet.py @@ -0,0 +1,96 @@ +import torch + +from mmdet.core import bbox2result, bbox_mapping_back +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class CornerNet(SingleStageDetector): + """CornerNet. + + This detector is the implementation of the paper `CornerNet: Detecting + Objects as Paired Keypoints `_ . + """ + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None, + init_cfg=None): + super(CornerNet, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained, init_cfg) + + def merge_aug_results(self, aug_results, img_metas): + """Merge augmented detection bboxes and score. + + Args: + aug_results (list[list[Tensor]]): Det_bboxes and det_labels of each + image. + img_metas (list[list[dict]]): Meta information of each image, e.g., + image size, scaling factor, etc. + + Returns: + tuple: (bboxes, labels) + """ + recovered_bboxes, aug_labels = [], [] + for bboxes_labels, img_info in zip(aug_results, img_metas): + img_shape = img_info[0]['img_shape'] # using shape before padding + scale_factor = img_info[0]['scale_factor'] + flip = img_info[0]['flip'] + bboxes, labels = bboxes_labels + bboxes, scores = bboxes[:, :4], bboxes[:, -1:] + bboxes = bbox_mapping_back(bboxes, img_shape, scale_factor, flip) + recovered_bboxes.append(torch.cat([bboxes, scores], dim=-1)) + aug_labels.append(labels) + + bboxes = torch.cat(recovered_bboxes, dim=0) + labels = torch.cat(aug_labels) + + if bboxes.shape[0] > 0: + out_bboxes, out_labels = self.bbox_head._bboxes_nms( + bboxes, labels, self.bbox_head.test_cfg) + else: + out_bboxes, out_labels = bboxes, labels + + return out_bboxes, out_labels + + def aug_test(self, imgs, img_metas, rescale=False): + """Augment testing of CornerNet. + + Args: + imgs (list[Tensor]): Augmented images. + img_metas (list[list[dict]]): Meta information of each image, e.g., + image size, scaling factor, etc. + rescale (bool): If True, return boxes in original image space. + Default: False. + + Note: + ``imgs`` must including flipped image pairs. + + Returns: + list[list[np.ndarray]]: BBox results of each image and classes. + The outer list corresponds to each image. The inner list + corresponds to each class. + """ + img_inds = list(range(len(imgs))) + + assert img_metas[0][0]['flip'] + img_metas[1][0]['flip'], ( + 'aug test must have flipped image pair') + aug_results = [] + for ind, flip_ind in zip(img_inds[0::2], img_inds[1::2]): + img_pair = torch.cat([imgs[ind], imgs[flip_ind]]) + x = self.extract_feat(img_pair) + outs = self.bbox_head(x) + bbox_list = self.bbox_head.get_bboxes( + *outs, [img_metas[ind], img_metas[flip_ind]], False, False) + aug_results.append(bbox_list[0]) + aug_results.append(bbox_list[1]) + + bboxes, labels = self.merge_aug_results(aug_results, img_metas) + bbox_results = bbox2result(bboxes, labels, self.bbox_head.num_classes) + + return [bbox_results] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/deformable_detr.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/deformable_detr.py new file mode 100644 index 0000000000000000000000000000000000000000..947550fb0ba3976308d9acc95c8d77b07e9dd423 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/deformable_detr.py @@ -0,0 +1,9 @@ +from ..builder import DETECTORS +from .detr import DETR + + +@DETECTORS.register_module() +class DeformableDETR(DETR): + + def __init__(self, *args, **kwargs): + super(DETR, self).__init__(*args, **kwargs) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/detr.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/detr.py new file mode 100644 index 0000000000000000000000000000000000000000..1defa94be2f636a8adea783179a96bfb83d685d7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/detr.py @@ -0,0 +1,46 @@ +import torch + +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class DETR(SingleStageDetector): + r"""Implementation of `DETR: End-to-End Object Detection with + Transformers `_""" + + def __init__(self, + backbone, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None, + init_cfg=None): + super(DETR, self).__init__(backbone, None, bbox_head, train_cfg, + test_cfg, pretrained, init_cfg) + + # over-write `onnx_export` because: + # (1) the forward of bbox_head requires img_metas + # (2) the different behavior (e.g. construction of `masks`) between + # torch and ONNX model, during the forward of bbox_head + def onnx_export(self, img, img_metas): + """Test function for exporting to ONNX, without test time augmentation. + + Args: + img (torch.Tensor): input images. + img_metas (list[dict]): List of image information. + + Returns: + tuple[Tensor, Tensor]: dets of shape [N, num_det, 5] + and class labels of shape [N, num_det]. + """ + x = self.extract_feat(img) + # forward of this head requires img_metas + outs = self.bbox_head.forward_onnx(x, img_metas) + # get shape as tensor + img_shape = torch._shape_as_tensor(img)[2:] + img_metas[0]['img_shape_for_onnx'] = img_shape + + det_bboxes, det_labels = self.bbox_head.onnx_export(*outs, img_metas) + + return det_bboxes, det_labels diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/fast_rcnn.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/fast_rcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..4dd56199c1095bc33ff153ca42a05917f2019ec1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/fast_rcnn.py @@ -0,0 +1,54 @@ +from ..builder import DETECTORS +from .two_stage import TwoStageDetector + + +@DETECTORS.register_module() +class FastRCNN(TwoStageDetector): + """Implementation of `Fast R-CNN `_""" + + def __init__(self, + backbone, + roi_head, + train_cfg, + test_cfg, + neck=None, + pretrained=None, + init_cfg=None): + super(FastRCNN, self).__init__( + backbone=backbone, + neck=neck, + roi_head=roi_head, + train_cfg=train_cfg, + test_cfg=test_cfg, + pretrained=pretrained, + init_cfg=init_cfg) + + def forward_test(self, imgs, img_metas, proposals, **kwargs): + """ + Args: + imgs (List[Tensor]): the outer list indicates test-time + augmentations and inner Tensor should have a shape NxCxHxW, + which contains all images in the batch. + img_metas (List[List[dict]]): the outer list indicates test-time + augs (multiscale, flip, etc.) and the inner list indicates + images in a batch. + proposals (List[List[Tensor]]): the outer list indicates test-time + augs (multiscale, flip, etc.) and the inner list indicates + images in a batch. The Tensor should have a shape Px4, where + P is the number of proposals. + """ + for var, name in [(imgs, 'imgs'), (img_metas, 'img_metas')]: + if not isinstance(var, list): + raise TypeError(f'{name} must be a list, but got {type(var)}') + + num_augs = len(imgs) + if num_augs != len(img_metas): + raise ValueError(f'num of augmentations ({len(imgs)}) ' + f'!= num of image meta ({len(img_metas)})') + + if num_augs == 1: + return self.simple_test(imgs[0], img_metas[0], proposals[0], + **kwargs) + else: + # TODO: support test-time augmentation + assert NotImplementedError diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/faster_rcnn.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/faster_rcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..f6a7244d658ba43c61786c83e1c5d4248e673886 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/faster_rcnn.py @@ -0,0 +1,26 @@ +from ..builder import DETECTORS +from .two_stage import TwoStageDetector + + +@DETECTORS.register_module() +class FasterRCNN(TwoStageDetector): + """Implementation of `Faster R-CNN `_""" + + def __init__(self, + backbone, + rpn_head, + roi_head, + train_cfg, + test_cfg, + neck=None, + pretrained=None, + init_cfg=None): + super(FasterRCNN, self).__init__( + backbone=backbone, + neck=neck, + rpn_head=rpn_head, + roi_head=roi_head, + train_cfg=train_cfg, + test_cfg=test_cfg, + pretrained=pretrained, + init_cfg=init_cfg) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/fcos.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/fcos.py new file mode 100644 index 0000000000000000000000000000000000000000..df1d0bc54dec7e0bac26f286eec40cf1233f3a82 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/fcos.py @@ -0,0 +1,18 @@ +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class FCOS(SingleStageDetector): + """Implementation of `FCOS `_""" + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None, + init_cfg=None): + super(FCOS, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained, init_cfg) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/fovea.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/fovea.py new file mode 100644 index 0000000000000000000000000000000000000000..f7c756217d30e20702efc3b18cb8b1e5931e16d6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/fovea.py @@ -0,0 +1,18 @@ +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class FOVEA(SingleStageDetector): + """Implementation of `FoveaBox `_""" + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None, + init_cfg=None): + super(FOVEA, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained, init_cfg) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/fsaf.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/fsaf.py new file mode 100644 index 0000000000000000000000000000000000000000..b859c72950fc25a78253d99b7f50d7c0a4e56267 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/fsaf.py @@ -0,0 +1,18 @@ +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class FSAF(SingleStageDetector): + """Implementation of `FSAF `_""" + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None, + init_cfg=None): + super(FSAF, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained, init_cfg) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/gfl.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/gfl.py new file mode 100644 index 0000000000000000000000000000000000000000..29bdb6b51c8d0ee015bc04ca80fc454bb9a8e026 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/gfl.py @@ -0,0 +1,17 @@ +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class GFL(SingleStageDetector): + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None, + init_cfg=None): + super(GFL, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained, init_cfg) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/grid_rcnn.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/grid_rcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..1bd359476b585b557d0828e4985c43f4d726b0a0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/grid_rcnn.py @@ -0,0 +1,31 @@ +from ..builder import DETECTORS +from .two_stage import TwoStageDetector + + +@DETECTORS.register_module() +class GridRCNN(TwoStageDetector): + """Grid R-CNN. + + This detector is the implementation of: + - Grid R-CNN (https://arxiv.org/abs/1811.12030) + - Grid R-CNN Plus: Faster and Better (https://arxiv.org/abs/1906.05688) + """ + + def __init__(self, + backbone, + rpn_head, + roi_head, + train_cfg, + test_cfg, + neck=None, + pretrained=None, + init_cfg=None): + super(GridRCNN, self).__init__( + backbone=backbone, + neck=neck, + rpn_head=rpn_head, + roi_head=roi_head, + train_cfg=train_cfg, + test_cfg=test_cfg, + pretrained=pretrained, + init_cfg=init_cfg) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/htc.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/htc.py new file mode 100644 index 0000000000000000000000000000000000000000..d9efdf420fa7373f7f1d116f8d97836d73b457bf --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/htc.py @@ -0,0 +1,15 @@ +from ..builder import DETECTORS +from .cascade_rcnn import CascadeRCNN + + +@DETECTORS.register_module() +class HybridTaskCascade(CascadeRCNN): + """Implementation of `HTC `_""" + + def __init__(self, **kwargs): + super(HybridTaskCascade, self).__init__(**kwargs) + + @property + def with_semantic(self): + """bool: whether the detector has a semantic head""" + return self.roi_head.with_semantic diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/kd_one_stage.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/kd_one_stage.py new file mode 100644 index 0000000000000000000000000000000000000000..671ec19015c87fefd065b84ae887147f90cc892b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/kd_one_stage.py @@ -0,0 +1,100 @@ +import mmcv +import torch +from mmcv.runner import load_checkpoint + +from .. import build_detector +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class KnowledgeDistillationSingleStageDetector(SingleStageDetector): + r"""Implementation of `Distilling the Knowledge in a Neural Network. + `_. + + Args: + teacher_config (str | dict): Config file path + or the config object of teacher model. + teacher_ckpt (str, optional): Checkpoint path of teacher model. + If left as None, the model will not load any weights. + """ + + def __init__(self, + backbone, + neck, + bbox_head, + teacher_config, + teacher_ckpt=None, + eval_teacher=True, + train_cfg=None, + test_cfg=None, + pretrained=None): + super().__init__(backbone, neck, bbox_head, train_cfg, test_cfg, + pretrained) + self.eval_teacher = eval_teacher + # Build teacher model + if isinstance(teacher_config, str): + teacher_config = mmcv.Config.fromfile(teacher_config) + self.teacher_model = build_detector(teacher_config['model']) + if teacher_ckpt is not None: + load_checkpoint( + self.teacher_model, teacher_ckpt, map_location='cpu') + + def forward_train(self, + img, + img_metas, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None): + """ + Args: + img (Tensor): Input images of shape (N, C, H, W). + Typically these should be mean centered and std scaled. + img_metas (list[dict]): A List of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + :class:`mmdet.datasets.pipelines.Collect`. + gt_bboxes (list[Tensor]): Each item are the truth boxes for each + image in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): Class indices corresponding to each box + gt_bboxes_ignore (None | list[Tensor]): Specify which bounding + boxes can be ignored when computing the loss. + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + x = self.extract_feat(img) + with torch.no_grad(): + teacher_x = self.teacher_model.extract_feat(img) + out_teacher = self.teacher_model.bbox_head(teacher_x) + losses = self.bbox_head.forward_train(x, out_teacher, img_metas, + gt_bboxes, gt_labels, + gt_bboxes_ignore) + return losses + + def cuda(self, device=None): + """Since teacher_model is registered as a plain object, it is necessary + to put the teacher model to cuda when calling cuda function.""" + self.teacher_model.cuda(device=device) + return super().cuda(device=device) + + def train(self, mode=True): + """Set the same train mode for teacher and student model.""" + if self.eval_teacher: + self.teacher_model.train(False) + else: + self.teacher_model.train(mode) + super().train(mode) + + def __setattr__(self, name, value): + """Set attribute, i.e. self.name = value + + This reloading prevent the teacher model from being registered as a + nn.Module. The teacher module is registered as a plain object, so that + the teacher parameters will not show up when calling + ``self.parameters``, ``self.modules``, ``self.children`` methods. + """ + if name == 'teacher_model': + object.__setattr__(self, name, value) + else: + super().__setattr__(name, value) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/mask_rcnn.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/mask_rcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..29ea62d32e31ed4b4a7c5050cdbcd3b4e553b9b4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/mask_rcnn.py @@ -0,0 +1,26 @@ +from ..builder import DETECTORS +from .two_stage import TwoStageDetector + + +@DETECTORS.register_module() +class MaskRCNN(TwoStageDetector): + """Implementation of `Mask R-CNN `_""" + + def __init__(self, + backbone, + rpn_head, + roi_head, + train_cfg, + test_cfg, + neck=None, + pretrained=None, + init_cfg=None): + super(MaskRCNN, self).__init__( + backbone=backbone, + neck=neck, + rpn_head=rpn_head, + roi_head=roi_head, + train_cfg=train_cfg, + test_cfg=test_cfg, + pretrained=pretrained, + init_cfg=init_cfg) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/mask_scoring_rcnn.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/mask_scoring_rcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..86c6053c4a43741d80e35835ff2f74d0c7aa5b0d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/mask_scoring_rcnn.py @@ -0,0 +1,29 @@ +from ..builder import DETECTORS +from .two_stage import TwoStageDetector + + +@DETECTORS.register_module() +class MaskScoringRCNN(TwoStageDetector): + """Mask Scoring RCNN. + + https://arxiv.org/abs/1903.00241 + """ + + def __init__(self, + backbone, + rpn_head, + roi_head, + train_cfg, + test_cfg, + neck=None, + pretrained=None, + init_cfg=None): + super(MaskScoringRCNN, self).__init__( + backbone=backbone, + neck=neck, + rpn_head=rpn_head, + roi_head=roi_head, + train_cfg=train_cfg, + test_cfg=test_cfg, + pretrained=pretrained, + init_cfg=init_cfg) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/nasfcos.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/nasfcos.py new file mode 100644 index 0000000000000000000000000000000000000000..6f3446f3d4e281d5b8785ff6e2eda3a6797b5f59 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/nasfcos.py @@ -0,0 +1,21 @@ +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class NASFCOS(SingleStageDetector): + """NAS-FCOS: Fast Neural Architecture Search for Object Detection. + + https://arxiv.org/abs/1906.0442 + """ + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None, + init_cfg=None): + super(NASFCOS, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained, init_cfg) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/paa.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/paa.py new file mode 100644 index 0000000000000000000000000000000000000000..afc80590796af314b7493e7f102780bbcf65448b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/paa.py @@ -0,0 +1,18 @@ +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class PAA(SingleStageDetector): + """Implementation of `PAA `_.""" + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None, + init_cfg=None): + super(PAA, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained, init_cfg) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/point_rend.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/point_rend.py new file mode 100644 index 0000000000000000000000000000000000000000..72c4bacf50e844fbbd2f96bf828ea4bc257b94f6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/point_rend.py @@ -0,0 +1,31 @@ +from ..builder import DETECTORS +from .two_stage import TwoStageDetector + + +@DETECTORS.register_module() +class PointRend(TwoStageDetector): + """PointRend: Image Segmentation as Rendering + + This detector is the implementation of + `PointRend `_. + + """ + + def __init__(self, + backbone, + rpn_head, + roi_head, + train_cfg, + test_cfg, + neck=None, + pretrained=None, + init_cfg=None): + super(PointRend, self).__init__( + backbone=backbone, + neck=neck, + rpn_head=rpn_head, + roi_head=roi_head, + train_cfg=train_cfg, + test_cfg=test_cfg, + pretrained=pretrained, + init_cfg=init_cfg) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/reppoints_detector.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/reppoints_detector.py new file mode 100644 index 0000000000000000000000000000000000000000..3636a602d4e6ce593c715e92101e95e9ca9e101d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/reppoints_detector.py @@ -0,0 +1,23 @@ +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class RepPointsDetector(SingleStageDetector): + """RepPoints: Point Set Representation for Object Detection. + + This detector is the implementation of: + - RepPoints detector (https://arxiv.org/pdf/1904.11490) + """ + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None, + init_cfg=None): + super(RepPointsDetector, + self).__init__(backbone, neck, bbox_head, train_cfg, test_cfg, + pretrained, init_cfg) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/retinanet.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/retinanet.py new file mode 100644 index 0000000000000000000000000000000000000000..6aa29f2531e378b6b8bf57651ff829ae55b2c776 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/retinanet.py @@ -0,0 +1,18 @@ +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class RetinaNet(SingleStageDetector): + """Implementation of `RetinaNet `_""" + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None, + init_cfg=None): + super(RetinaNet, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained, init_cfg) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/rpn.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/rpn.py new file mode 100644 index 0000000000000000000000000000000000000000..f92d909286a59573d7af38ad8f468851f1f83265 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/rpn.py @@ -0,0 +1,154 @@ +import warnings + +import mmcv +import torch +from mmcv.image import tensor2imgs + +from mmdet.core import bbox_mapping +from ..builder import DETECTORS, build_backbone, build_head, build_neck +from .base import BaseDetector + + +@DETECTORS.register_module() +class RPN(BaseDetector): + """Implementation of Region Proposal Network.""" + + def __init__(self, + backbone, + neck, + rpn_head, + train_cfg, + test_cfg, + pretrained=None, + init_cfg=None): + super(RPN, self).__init__(init_cfg) + if pretrained: + warnings.warn('DeprecationWarning: pretrained is deprecated, ' + 'please use "init_cfg" instead') + backbone.pretrained = pretrained + self.backbone = build_backbone(backbone) + self.neck = build_neck(neck) if neck is not None else None + rpn_train_cfg = train_cfg.rpn if train_cfg is not None else None + rpn_head.update(train_cfg=rpn_train_cfg) + rpn_head.update(test_cfg=test_cfg.rpn) + self.rpn_head = build_head(rpn_head) + self.train_cfg = train_cfg + self.test_cfg = test_cfg + + def extract_feat(self, img): + """Extract features. + + Args: + img (torch.Tensor): Image tensor with shape (n, c, h ,w). + + Returns: + list[torch.Tensor]: Multi-level features that may have + different resolutions. + """ + x = self.backbone(img) + if self.with_neck: + x = self.neck(x) + return x + + def forward_dummy(self, img): + """Dummy forward function.""" + x = self.extract_feat(img) + rpn_outs = self.rpn_head(x) + return rpn_outs + + def forward_train(self, + img, + img_metas, + gt_bboxes=None, + gt_bboxes_ignore=None): + """ + Args: + img (Tensor): Input images of shape (N, C, H, W). + Typically these should be mean centered and std scaled. + img_metas (list[dict]): A List of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + :class:`mmdet.datasets.pipelines.Collect`. + gt_bboxes (list[Tensor]): Each item are the truth boxes for each + image in [tl_x, tl_y, br_x, br_y] format. + gt_bboxes_ignore (None | list[Tensor]): Specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + if (isinstance(self.train_cfg.rpn, dict) + and self.train_cfg.rpn.get('debug', False)): + self.rpn_head.debug_imgs = tensor2imgs(img) + + x = self.extract_feat(img) + losses = self.rpn_head.forward_train(x, img_metas, gt_bboxes, None, + gt_bboxes_ignore) + return losses + + def simple_test(self, img, img_metas, rescale=False): + """Test function without test time augmentation. + + Args: + imgs (list[torch.Tensor]): List of multiple images + img_metas (list[dict]): List of image information. + rescale (bool, optional): Whether to rescale the results. + Defaults to False. + + Returns: + list[np.ndarray]: proposals + """ + x = self.extract_feat(img) + # get origin input shape to onnx dynamic input shape + if torch.onnx.is_in_onnx_export(): + img_shape = torch._shape_as_tensor(img)[2:] + img_metas[0]['img_shape_for_onnx'] = img_shape + proposal_list = self.rpn_head.simple_test_rpn(x, img_metas) + if rescale: + for proposals, meta in zip(proposal_list, img_metas): + proposals[:, :4] /= proposals.new_tensor(meta['scale_factor']) + if torch.onnx.is_in_onnx_export(): + return proposal_list + + return [proposal.cpu().numpy() for proposal in proposal_list] + + def aug_test(self, imgs, img_metas, rescale=False): + """Test function with test time augmentation. + + Args: + imgs (list[torch.Tensor]): List of multiple images + img_metas (list[dict]): List of image information. + rescale (bool, optional): Whether to rescale the results. + Defaults to False. + + Returns: + list[np.ndarray]: proposals + """ + proposal_list = self.rpn_head.aug_test_rpn( + self.extract_feats(imgs), img_metas) + if not rescale: + for proposals, img_meta in zip(proposal_list, img_metas[0]): + img_shape = img_meta['img_shape'] + scale_factor = img_meta['scale_factor'] + flip = img_meta['flip'] + flip_direction = img_meta['flip_direction'] + proposals[:, :4] = bbox_mapping(proposals[:, :4], img_shape, + scale_factor, flip, + flip_direction) + return [proposal.cpu().numpy() for proposal in proposal_list] + + def show_result(self, data, result, top_k=20, **kwargs): + """Show RPN proposals on the image. + + Args: + data (str or np.ndarray): Image filename or loaded image. + result (Tensor or tuple): The results to draw over `img` + bbox_result or (bbox_result, segm_result). + top_k (int): Plot the first k bboxes only + if set positive. Default: 20 + + Returns: + np.ndarray: The image with bboxes drawn on it. + """ + mmcv.imshow_bboxes(data, result, top_k=top_k) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/scnet.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/scnet.py new file mode 100644 index 0000000000000000000000000000000000000000..04a2347c4ec1efcbfda59a134cddd8bde620d983 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/scnet.py @@ -0,0 +1,10 @@ +from ..builder import DETECTORS +from .cascade_rcnn import CascadeRCNN + + +@DETECTORS.register_module() +class SCNet(CascadeRCNN): + """Implementation of `SCNet `_""" + + def __init__(self, **kwargs): + super(SCNet, self).__init__(**kwargs) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/single_stage.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/single_stage.py new file mode 100644 index 0000000000000000000000000000000000000000..c053600903d6a413652574d3daea5e61037d6fea --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/single_stage.py @@ -0,0 +1,165 @@ +import warnings + +import torch + +from mmdet.core import bbox2result +from ..builder import DETECTORS, build_backbone, build_head, build_neck +from .base import BaseDetector + + +@DETECTORS.register_module() +class SingleStageDetector(BaseDetector): + """Base class for single-stage detectors. + + Single-stage detectors directly and densely predict bounding boxes on the + output features of the backbone+neck. + """ + + def __init__(self, + backbone, + neck=None, + bbox_head=None, + train_cfg=None, + test_cfg=None, + pretrained=None, + init_cfg=None): + super(SingleStageDetector, self).__init__(init_cfg) + if pretrained: + warnings.warn('DeprecationWarning: pretrained is deprecated, ' + 'please use "init_cfg" instead') + backbone.pretrained = pretrained + self.backbone = build_backbone(backbone) + if neck is not None: + self.neck = build_neck(neck) + bbox_head.update(train_cfg=train_cfg) + bbox_head.update(test_cfg=test_cfg) + self.bbox_head = build_head(bbox_head) + self.train_cfg = train_cfg + self.test_cfg = test_cfg + + def extract_feat(self, img): + """Directly extract features from the backbone+neck.""" + x = self.backbone(img) + if self.with_neck: + x = self.neck(x) + return x + + def forward_dummy(self, img): + """Used for computing network flops. + + See `mmdetection/tools/analysis_tools/get_flops.py` + """ + x = self.extract_feat(img) + outs = self.bbox_head(x) + return outs + + def forward_train(self, + img, + img_metas, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None): + """ + Args: + img (Tensor): Input images of shape (N, C, H, W). + Typically these should be mean centered and std scaled. + img_metas (list[dict]): A List of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + :class:`mmdet.datasets.pipelines.Collect`. + gt_bboxes (list[Tensor]): Each item are the truth boxes for each + image in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): Class indices corresponding to each box + gt_bboxes_ignore (None | list[Tensor]): Specify which bounding + boxes can be ignored when computing the loss. + + Returns: + dict[str, Tensor]: A dictionary of loss components. + """ + super(SingleStageDetector, self).forward_train(img, img_metas) + x = self.extract_feat(img) + losses = self.bbox_head.forward_train(x, img_metas, gt_bboxes, + gt_labels, gt_bboxes_ignore) + return losses + + def simple_test(self, img, img_metas, rescale=False): + """Test function without test-time augmentation. + + Args: + img (torch.Tensor): Images with shape (N, C, H, W). + img_metas (list[dict]): List of image information. + rescale (bool, optional): Whether to rescale the results. + Defaults to False. + + Returns: + list[list[np.ndarray]]: BBox results of each image and classes. + The outer list corresponds to each image. The inner list + corresponds to each class. + """ + feat = self.extract_feat(img) + results_list = self.bbox_head.simple_test( + feat, img_metas, rescale=rescale) + bbox_results = [ + bbox2result(det_bboxes, det_labels, self.bbox_head.num_classes) + for det_bboxes, det_labels in results_list + ] + return bbox_results + + def aug_test(self, imgs, img_metas, rescale=False): + """Test function with test time augmentation. + + Args: + imgs (list[Tensor]): the outer list indicates test-time + augmentations and inner Tensor should have a shape NxCxHxW, + which contains all images in the batch. + img_metas (list[list[dict]]): the outer list indicates test-time + augs (multiscale, flip, etc.) and the inner list indicates + images in a batch. each dict has image information. + rescale (bool, optional): Whether to rescale the results. + Defaults to False. + + Returns: + list[list[np.ndarray]]: BBox results of each image and classes. + The outer list corresponds to each image. The inner list + corresponds to each class. + """ + assert hasattr(self.bbox_head, 'aug_test'), \ + f'{self.bbox_head.__class__.__name__}' \ + ' does not support test-time augmentation' + + feats = self.extract_feats(imgs) + results_list = self.bbox_head.aug_test( + feats, img_metas, rescale=rescale) + bbox_results = [ + bbox2result(det_bboxes, det_labels, self.bbox_head.num_classes) + for det_bboxes, det_labels in results_list + ] + return bbox_results + + def onnx_export(self, img, img_metas): + """Test function without test time augmentation. + + Args: + img (torch.Tensor): input images. + img_metas (list[dict]): List of image information. + + Returns: + tuple[Tensor, Tensor]: dets of shape [N, num_det, 5] + and class labels of shape [N, num_det]. + """ + x = self.extract_feat(img) + outs = self.bbox_head(x) + # get origin input shape to support onnx dynamic shape + + # get shape as tensor + img_shape = torch._shape_as_tensor(img)[2:] + img_metas[0]['img_shape_for_onnx'] = img_shape + # get pad input shape to support onnx dynamic shape for exporting + # `CornerNet` and `CentripetalNet`, which 'pad_shape' is used + # for inference + img_metas[0]['pad_shape_for_onnx'] = img_shape + # TODO:move all onnx related code in bbox_head to onnx_export function + det_bboxes, det_labels = self.bbox_head.get_bboxes(*outs, img_metas) + + return det_bboxes, det_labels diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/sparse_rcnn.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/sparse_rcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..0dbd0250f189e610a0bbc72b0dab2559e26857ae --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/sparse_rcnn.py @@ -0,0 +1,110 @@ +from ..builder import DETECTORS +from .two_stage import TwoStageDetector + + +@DETECTORS.register_module() +class SparseRCNN(TwoStageDetector): + r"""Implementation of `Sparse R-CNN: End-to-End Object Detection with + Learnable Proposals `_""" + + def __init__(self, *args, **kwargs): + super(SparseRCNN, self).__init__(*args, **kwargs) + assert self.with_rpn, 'Sparse R-CNN do not support external proposals' + + def forward_train(self, + img, + img_metas, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None, + gt_masks=None, + proposals=None, + **kwargs): + """Forward function of SparseR-CNN in train stage. + + Args: + img (Tensor): of shape (N, C, H, W) encoding input images. + Typically these should be mean centered and std scaled. + img_metas (list[dict]): list of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + :class:`mmdet.datasets.pipelines.Collect`. + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + gt_bboxes_ignore (None | list[Tensor): specify which bounding + boxes can be ignored when computing the loss. + gt_masks (List[Tensor], optional) : Segmentation masks for + each box. But we don't support it in this architecture. + proposals (List[Tensor], optional): override rpn proposals with + custom proposals. Use when `with_rpn` is False. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + + assert proposals is None, 'Sparse R-CNN does not support' \ + ' external proposals' + assert gt_masks is None, 'Sparse R-CNN does not instance segmentation' + + x = self.extract_feat(img) + proposal_boxes, proposal_features, imgs_whwh = \ + self.rpn_head.forward_train(x, img_metas) + roi_losses = self.roi_head.forward_train( + x, + proposal_boxes, + proposal_features, + img_metas, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=gt_bboxes_ignore, + gt_masks=gt_masks, + imgs_whwh=imgs_whwh) + return roi_losses + + def simple_test(self, img, img_metas, rescale=False): + """Test function without test time augmentation. + + Args: + imgs (list[torch.Tensor]): List of multiple images + img_metas (list[dict]): List of image information. + rescale (bool): Whether to rescale the results. + Defaults to False. + + Returns: + list[list[np.ndarray]]: BBox results of each image and classes. + The outer list corresponds to each image. The inner list + corresponds to each class. + """ + x = self.extract_feat(img) + proposal_boxes, proposal_features, imgs_whwh = \ + self.rpn_head.simple_test_rpn(x, img_metas) + bbox_results = self.roi_head.simple_test( + x, + proposal_boxes, + proposal_features, + img_metas, + imgs_whwh=imgs_whwh, + rescale=rescale) + return bbox_results + + def forward_dummy(self, img): + """Used for computing network flops. + + See `mmdetection/tools/analysis_tools/get_flops.py` + """ + # backbone + x = self.extract_feat(img) + # rpn + num_imgs = len(img) + dummy_img_metas = [ + dict(img_shape=(800, 1333, 3)) for _ in range(num_imgs) + ] + proposal_boxes, proposal_features, imgs_whwh = \ + self.rpn_head.simple_test_rpn(x, dummy_img_metas) + # roi_head + roi_outs = self.roi_head.forward_dummy(x, proposal_boxes, + proposal_features, + dummy_img_metas) + return roi_outs diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/trident_faster_rcnn.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/trident_faster_rcnn.py new file mode 100644 index 0000000000000000000000000000000000000000..c72065e5eec27bdd11b0931dbac890a1e0a11b56 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/trident_faster_rcnn.py @@ -0,0 +1,68 @@ +from ..builder import DETECTORS +from .faster_rcnn import FasterRCNN + + +@DETECTORS.register_module() +class TridentFasterRCNN(FasterRCNN): + """Implementation of `TridentNet `_""" + + def __init__(self, + backbone, + rpn_head, + roi_head, + train_cfg, + test_cfg, + neck=None, + pretrained=None, + init_cfg=None): + + super(TridentFasterRCNN, self).__init__( + backbone=backbone, + neck=neck, + rpn_head=rpn_head, + roi_head=roi_head, + train_cfg=train_cfg, + test_cfg=test_cfg, + pretrained=pretrained, + init_cfg=init_cfg) + assert self.backbone.num_branch == self.roi_head.num_branch + assert self.backbone.test_branch_idx == self.roi_head.test_branch_idx + self.num_branch = self.backbone.num_branch + self.test_branch_idx = self.backbone.test_branch_idx + + def simple_test(self, img, img_metas, proposals=None, rescale=False): + """Test without augmentation.""" + assert self.with_bbox, 'Bbox head must be implemented.' + x = self.extract_feat(img) + if proposals is None: + num_branch = (self.num_branch if self.test_branch_idx == -1 else 1) + trident_img_metas = img_metas * num_branch + proposal_list = self.rpn_head.simple_test_rpn(x, trident_img_metas) + else: + proposal_list = proposals + + return self.roi_head.simple_test( + x, proposal_list, trident_img_metas, rescale=rescale) + + def aug_test(self, imgs, img_metas, rescale=False): + """Test with augmentations. + + If rescale is False, then returned bboxes and masks will fit the scale + of imgs[0]. + """ + x = self.extract_feats(imgs) + num_branch = (self.num_branch if self.test_branch_idx == -1 else 1) + trident_img_metas = [img_metas * num_branch for img_metas in img_metas] + proposal_list = self.rpn_head.aug_test_rpn(x, trident_img_metas) + return self.roi_head.aug_test( + x, proposal_list, img_metas, rescale=rescale) + + def forward_train(self, img, img_metas, gt_bboxes, gt_labels, **kwargs): + """make copies of img and gts to fit multi-branch.""" + trident_gt_bboxes = tuple(gt_bboxes * self.num_branch) + trident_gt_labels = tuple(gt_labels * self.num_branch) + trident_img_metas = tuple(img_metas * self.num_branch) + + return super(TridentFasterRCNN, + self).forward_train(img, trident_img_metas, + trident_gt_bboxes, trident_gt_labels) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/two_stage.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/two_stage.py new file mode 100644 index 0000000000000000000000000000000000000000..089dd5440e3b2c82301e84a19575fe1c8321b03e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/two_stage.py @@ -0,0 +1,271 @@ +import warnings + +import torch + +from ..builder import DETECTORS, build_backbone, build_head, build_neck +from .base import BaseDetector + + +@DETECTORS.register_module() +class _TwoStageDetector(BaseDetector): + """Base class for two-stage detectors. + + Two-stage detectors typically consisting of a region proposal network and a + task-specific regression head. + """ + + def __init__(self, + backbone, + neck=None, + rpn_head=None, + roi_head=None, + train_cfg=None, + test_cfg=None, + pretrained=None, + init_cfg=None): + super(_TwoStageDetector, self).__init__(init_cfg) + if pretrained: + warnings.warn('DeprecationWarning: pretrained is deprecated, ' + 'please use "init_cfg" instead') + backbone.pretrained = pretrained + self.backbone = build_backbone(backbone) + + if neck is not None: + self.neck = build_neck(neck) + + if rpn_head is not None: + rpn_train_cfg = train_cfg.rpn if train_cfg is not None else None + rpn_head_ = rpn_head.copy() + rpn_head_.update(train_cfg=rpn_train_cfg, test_cfg=test_cfg.rpn) + self.rpn_head = build_head(rpn_head_) + + if roi_head is not None: + # update train and test cfg here for now + # TODO: refactor assigner & sampler + rcnn_train_cfg = train_cfg.rcnn if train_cfg is not None else None + roi_head.update(train_cfg=rcnn_train_cfg) + roi_head.update(test_cfg=test_cfg.rcnn) + roi_head.pretrained = pretrained + self.roi_head = build_head(roi_head) + + self.train_cfg = train_cfg + self.test_cfg = test_cfg + + @property + def with_rpn(self): + """bool: whether the detector has RPN""" + return hasattr(self, 'rpn_head') and self.rpn_head is not None + + @property + def with_roi_head(self): + """bool: whether the detector has a RoI head""" + return hasattr(self, 'roi_head') and self.roi_head is not None + + def extract_feat(self, img): + """Directly extract features from the backbone+neck.""" + x = self.backbone(img) + if self.with_neck: + x = self.neck(x) + return x + + def forward_dummy(self, img): + """Used for computing network flops. + + See `mmdetection/tools/analysis_tools/get_flops.py` + """ + outs = () + # backbone + x = self.extract_feat(img) + # rpn + if self.with_rpn: + rpn_outs = self.rpn_head(x) + outs = outs + (rpn_outs, ) + proposals = torch.randn(1000, 4).to(img.device) + # roi_head + roi_outs = self.roi_head.forward_dummy(x, proposals) + outs = outs + (roi_outs, ) + return outs + + def forward_train(self, + img, + img_metas, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None, + gt_masks=None, + proposals=None, + **kwargs): + """ + Args: + img (Tensor): of shape (N, C, H, W) encoding input images. + Typically these should be mean centered and std scaled. + + img_metas (list[dict]): list of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `mmdet/datasets/pipelines/formatting.py:Collect`. + + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + + gt_labels (list[Tensor]): class indices corresponding to each box + + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + + gt_masks (None | Tensor) : true segmentation masks for each box + used if the architecture supports a segmentation task. + + proposals : override rpn proposals with custom proposals. Use when + `with_rpn` is False. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + x = self.extract_feat(img) + + losses = dict() + + # RPN forward and loss + if self.with_rpn: + proposal_cfg = self.train_cfg.get('rpn_proposal', + self.test_cfg.rpn) + rpn_losses, proposal_list = self.rpn_head.forward_train( + x, + img_metas, + gt_bboxes, + gt_labels=None, + gt_bboxes_ignore=gt_bboxes_ignore, + proposal_cfg=proposal_cfg) + losses.update(rpn_losses) + else: + proposal_list = proposals + + roi_losses = self.roi_head.forward_train(x, img_metas, proposal_list, + gt_bboxes, gt_labels, + gt_bboxes_ignore, gt_masks, + **kwargs) + losses.update(roi_losses) + + return losses + + async def async_simple_test(self, + img, + img_meta, + proposals=None, + rescale=False): + """Async test without augmentation.""" + assert self.with_bbox, 'Bbox head must be implemented.' + x = self.extract_feat(img) + + if proposals is None: + proposal_list = await self.rpn_head.async_simple_test_rpn( + x, img_meta) + else: + proposal_list = proposals + + return await self.roi_head.async_simple_test( + x, proposal_list, img_meta, rescale=rescale) + + def simple_test(self, img, img_metas, proposals=None, rescale=False): + """Test without augmentation.""" + + assert self.with_bbox, 'Bbox head must be implemented.' + x = self.extract_feat(img) + if proposals is None: + proposal_list = self.rpn_head.simple_test_rpn(x, img_metas) + else: + proposal_list = proposals + + return self.roi_head.simple_test( + x, proposal_list, img_metas, rescale=rescale) + + def aug_test(self, imgs, img_metas, rescale=False): + """Test with augmentations. + + If rescale is False, then returned bboxes and masks will fit the scale + of imgs[0]. + """ + x = self.extract_feats(imgs) + proposal_list = self.rpn_head.aug_test_rpn(x, img_metas) + return self.roi_head.aug_test( + x, proposal_list, img_metas, rescale=rescale) + + def onnx_export(self, img, img_metas): + + img_shape = torch._shape_as_tensor(img)[2:] + img_metas[0]['img_shape_for_onnx'] = img_shape + x = self.extract_feat(img) + proposals = self.rpn_head.onnx_export(x, img_metas) + return self.roi_head.onnx_export(x, proposals, img_metas) + + +@DETECTORS.register_module() +class TwoStageDetector(_TwoStageDetector): + """Base class for two-stage detectors. + + Two-stage detectors typically consisting of a region proposal network and a + task-specific regression head. + """ + + def forward_train(self, + img, + img_metas, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None, + gt_masks=None, + proposals=None, + loss_weights=None, + **kwargs): + xs = self.extract_feat(img) + + if not isinstance(xs[0], (list, tuple)): + xs = [xs] + loss_weights = None + elif loss_weights is None: + loss_weights = [0.5] + [1]*(len(xs)-1) # Reference CBNet paper + + + def upd_loss(losses, idx, weight): + new_losses = dict() + for k,v in losses.items(): + new_k = '{}{}'.format(k,idx) + if weight != 1 and 'loss' in k: + new_k = '{}_w{}'.format(new_k, weight) + if isinstance(v,list) or isinstance(v,tuple): + new_losses[new_k] = [i*weight for i in v] + else:new_losses[new_k] = v*weight + return new_losses + + losses = dict() + + # RPN forward and loss + if self.with_rpn: + proposal_cfg = self.train_cfg.get('rpn_proposal', + self.test_cfg.rpn) + for i,x in enumerate(xs): + rpn_losses, proposal_list = self.rpn_head.forward_train( + x, + img_metas, + gt_bboxes, + gt_labels=None, + gt_bboxes_ignore=gt_bboxes_ignore, + proposal_cfg=proposal_cfg) + if len(xs) > 1: + rpn_losses = upd_loss(rpn_losses, idx=i, weight=loss_weights[i]) + losses.update(rpn_losses) + else: + proposal_list = proposals + + for i,x in enumerate(xs): + roi_losses = self.roi_head.forward_train(x, img_metas, proposal_list, + gt_bboxes, gt_labels, + gt_bboxes_ignore, gt_masks, + **kwargs) + if len(xs) > 1: + roi_losses = upd_loss(roi_losses, idx=i, weight=loss_weights[i]) + losses.update(roi_losses) + + return losses diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/vfnet.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/vfnet.py new file mode 100644 index 0000000000000000000000000000000000000000..cd34d714ecb8807d6cc2e07cf1c7b2e62e1e973e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/vfnet.py @@ -0,0 +1,19 @@ +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class VFNet(SingleStageDetector): + """Implementation of `VarifocalNet + (VFNet).`_""" + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None, + init_cfg=None): + super(VFNet, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained, init_cfg) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/yolact.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/yolact.py new file mode 100644 index 0000000000000000000000000000000000000000..bca776fcbb8ff010a3ec5ebdd1e3abebf91b8a0e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/yolact.py @@ -0,0 +1,116 @@ +import torch + +from mmdet.core import bbox2result +from ..builder import DETECTORS, build_head +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class YOLACT(SingleStageDetector): + """Implementation of `YOLACT `_""" + + def __init__(self, + backbone, + neck, + bbox_head, + segm_head, + mask_head, + train_cfg=None, + test_cfg=None, + pretrained=None, + init_cfg=None): + super(YOLACT, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained, init_cfg) + self.segm_head = build_head(segm_head) + self.mask_head = build_head(mask_head) + + def forward_dummy(self, img): + """Used for computing network flops. + + See `mmdetection/tools/analysis_tools/get_flops.py` + """ + raise NotImplementedError + + def forward_train(self, + img, + img_metas, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None, + gt_masks=None): + """ + Args: + img (Tensor): of shape (N, C, H, W) encoding input images. + Typically these should be mean centered and std scaled. + img_metas (list[dict]): list of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `mmdet/datasets/pipelines/formatting.py:Collect`. + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + gt_masks (None | Tensor) : true segmentation masks for each box + used if the architecture supports a segmentation task. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + # convert Bitmap mask or Polygon Mask to Tensor here + gt_masks = [ + gt_mask.to_tensor(dtype=torch.uint8, device=img.device) + for gt_mask in gt_masks + ] + + x = self.extract_feat(img) + + cls_score, bbox_pred, coeff_pred = self.bbox_head(x) + bbox_head_loss_inputs = (cls_score, bbox_pred) + (gt_bboxes, gt_labels, + img_metas) + losses, sampling_results = self.bbox_head.loss( + *bbox_head_loss_inputs, gt_bboxes_ignore=gt_bboxes_ignore) + + segm_head_outs = self.segm_head(x[0]) + loss_segm = self.segm_head.loss(segm_head_outs, gt_masks, gt_labels) + losses.update(loss_segm) + + mask_pred = self.mask_head(x[0], coeff_pred, gt_bboxes, img_metas, + sampling_results) + loss_mask = self.mask_head.loss(mask_pred, gt_masks, gt_bboxes, + img_metas, sampling_results) + losses.update(loss_mask) + + # check NaN and Inf + for loss_name in losses.keys(): + assert torch.isfinite(torch.stack(losses[loss_name]))\ + .all().item(), '{} becomes infinite or NaN!'\ + .format(loss_name) + + return losses + + def simple_test(self, img, img_metas, rescale=False): + """Test function without test-time augmentation.""" + feat = self.extract_feat(img) + det_bboxes, det_labels, det_coeffs = self.bbox_head.simple_test( + feat, img_metas, rescale=rescale) + bbox_results = [ + bbox2result(det_bbox, det_label, self.bbox_head.num_classes) + for det_bbox, det_label in zip(det_bboxes, det_labels) + ] + + segm_results = self.mask_head.simple_test( + feat, + det_bboxes, + det_labels, + det_coeffs, + img_metas, + rescale=rescale) + + return list(zip(bbox_results, segm_results)) + + def aug_test(self, imgs, img_metas, rescale=False): + """Test with augmentations.""" + raise NotImplementedError( + 'YOLACT does not support test-time augmentation') diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/yolo.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/yolo.py new file mode 100644 index 0000000000000000000000000000000000000000..bd1f89e1c192378b94fb7813ee6bf9d0ddfbfbcd --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/yolo.py @@ -0,0 +1,19 @@ +# Copyright (c) 2019 Western Digital Corporation or its affiliates. + +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class YOLOV3(SingleStageDetector): + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None, + init_cfg=None): + super(YOLOV3, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained, init_cfg) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/yolof.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/yolof.py new file mode 100644 index 0000000000000000000000000000000000000000..dc7b3adfeff078b135c9e7e5d6c2a73e4ae2b723 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/detectors/yolof.py @@ -0,0 +1,18 @@ +from ..builder import DETECTORS +from .single_stage import SingleStageDetector + + +@DETECTORS.register_module() +class YOLOF(SingleStageDetector): + r"""Implementation of `You Only Look One-level Feature + `_""" + + def __init__(self, + backbone, + neck, + bbox_head, + train_cfg=None, + test_cfg=None, + pretrained=None): + super(YOLOF, self).__init__(backbone, neck, bbox_head, train_cfg, + test_cfg, pretrained) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3c64b1e2ba5086ef7dc3da137e83793c4962f484 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/__init__.py @@ -0,0 +1,30 @@ +from .accuracy import Accuracy, accuracy +from .ae_loss import AssociativeEmbeddingLoss +from .balanced_l1_loss import BalancedL1Loss, balanced_l1_loss +from .cross_entropy_loss import (CrossEntropyLoss, binary_cross_entropy, + cross_entropy, mask_cross_entropy) +from .focal_loss import FocalLoss, sigmoid_focal_loss +from .gaussian_focal_loss import GaussianFocalLoss +from .gfocal_loss import DistributionFocalLoss, QualityFocalLoss +from .ghm_loss import GHMC, GHMR +from .iou_loss import (BoundedIoULoss, CIoULoss, DIoULoss, GIoULoss, IoULoss, + bounded_iou_loss, iou_loss) +from .kd_loss import KnowledgeDistillationKLDivLoss +from .mse_loss import MSELoss, mse_loss +from .pisa_loss import carl_loss, isr_p +from .seesaw_loss import SeesawLoss +from .smooth_l1_loss import L1Loss, SmoothL1Loss, l1_loss, smooth_l1_loss +from .utils import reduce_loss, weight_reduce_loss, weighted_loss +from .varifocal_loss import VarifocalLoss + +__all__ = [ + 'accuracy', 'Accuracy', 'cross_entropy', 'binary_cross_entropy', + 'mask_cross_entropy', 'CrossEntropyLoss', 'sigmoid_focal_loss', + 'FocalLoss', 'smooth_l1_loss', 'SmoothL1Loss', 'balanced_l1_loss', + 'BalancedL1Loss', 'mse_loss', 'MSELoss', 'iou_loss', 'bounded_iou_loss', + 'IoULoss', 'BoundedIoULoss', 'GIoULoss', 'DIoULoss', 'CIoULoss', 'GHMC', + 'GHMR', 'reduce_loss', 'weight_reduce_loss', 'weighted_loss', 'L1Loss', + 'l1_loss', 'isr_p', 'carl_loss', 'AssociativeEmbeddingLoss', + 'GaussianFocalLoss', 'QualityFocalLoss', 'DistributionFocalLoss', + 'VarifocalLoss', 'KnowledgeDistillationKLDivLoss', 'SeesawLoss' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/accuracy.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/accuracy.py new file mode 100644 index 0000000000000000000000000000000000000000..789a2240a491289c5801b6690116e8ca657d004f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/accuracy.py @@ -0,0 +1,78 @@ +import mmcv +import torch.nn as nn + + +@mmcv.jit(coderize=True) +def accuracy(pred, target, topk=1, thresh=None): + """Calculate accuracy according to the prediction and target. + + Args: + pred (torch.Tensor): The model prediction, shape (N, num_class) + target (torch.Tensor): The target of each prediction, shape (N, ) + topk (int | tuple[int], optional): If the predictions in ``topk`` + matches the target, the predictions will be regarded as + correct ones. Defaults to 1. + thresh (float, optional): If not None, predictions with scores under + this threshold are considered incorrect. Default to None. + + Returns: + float | tuple[float]: If the input ``topk`` is a single integer, + the function will return a single float as accuracy. If + ``topk`` is a tuple containing multiple integers, the + function will return a tuple containing accuracies of + each ``topk`` number. + """ + assert isinstance(topk, (int, tuple)) + if isinstance(topk, int): + topk = (topk, ) + return_single = True + else: + return_single = False + + maxk = max(topk) + if pred.size(0) == 0: + accu = [pred.new_tensor(0.) for i in range(len(topk))] + return accu[0] if return_single else accu + assert pred.ndim == 2 and target.ndim == 1 + assert pred.size(0) == target.size(0) + assert maxk <= pred.size(1), \ + f'maxk {maxk} exceeds pred dimension {pred.size(1)}' + pred_value, pred_label = pred.topk(maxk, dim=1) + pred_label = pred_label.t() # transpose to shape (maxk, N) + correct = pred_label.eq(target.view(1, -1).expand_as(pred_label)) + if thresh is not None: + # Only prediction values larger than thresh are counted as correct + correct = correct & (pred_value > thresh).t() + res = [] + for k in topk: + correct_k = correct[:k].reshape(-1).float().sum(0, keepdim=True) + res.append(correct_k.mul_(100.0 / pred.size(0))) + return res[0] if return_single else res + + +class Accuracy(nn.Module): + + def __init__(self, topk=(1, ), thresh=None): + """Module to calculate the accuracy. + + Args: + topk (tuple, optional): The criterion used to calculate the + accuracy. Defaults to (1,). + thresh (float, optional): If not None, predictions with scores + under this threshold are considered incorrect. Default to None. + """ + super().__init__() + self.topk = topk + self.thresh = thresh + + def forward(self, pred, target): + """Forward function to calculate accuracy. + + Args: + pred (torch.Tensor): Prediction of models. + target (torch.Tensor): Target for each prediction. + + Returns: + tuple[float]: The accuracies under different topk criterions. + """ + return accuracy(pred, target, self.topk, self.thresh) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/ae_loss.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/ae_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..cff472aa03080fb49dbb3adba6fec68647a575e6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/ae_loss.py @@ -0,0 +1,102 @@ +import mmcv +import torch +import torch.nn as nn +import torch.nn.functional as F + +from ..builder import LOSSES + + +@mmcv.jit(derivate=True, coderize=True) +def ae_loss_per_image(tl_preds, br_preds, match): + """Associative Embedding Loss in one image. + + Associative Embedding Loss including two parts: pull loss and push loss. + Pull loss makes embedding vectors from same object closer to each other. + Push loss distinguish embedding vector from different objects, and makes + the gap between them is large enough. + + During computing, usually there are 3 cases: + - no object in image: both pull loss and push loss will be 0. + - one object in image: push loss will be 0 and pull loss is computed + by the two corner of the only object. + - more than one objects in image: pull loss is computed by corner pairs + from each object, push loss is computed by each object with all + other objects. We use confusion matrix with 0 in diagonal to + compute the push loss. + + Args: + tl_preds (tensor): Embedding feature map of left-top corner. + br_preds (tensor): Embedding feature map of bottim-right corner. + match (list): Downsampled coordinates pair of each ground truth box. + """ + + tl_list, br_list, me_list = [], [], [] + if len(match) == 0: # no object in image + pull_loss = tl_preds.sum() * 0. + push_loss = tl_preds.sum() * 0. + else: + for m in match: + [tl_y, tl_x], [br_y, br_x] = m + tl_e = tl_preds[:, tl_y, tl_x].view(-1, 1) + br_e = br_preds[:, br_y, br_x].view(-1, 1) + tl_list.append(tl_e) + br_list.append(br_e) + me_list.append((tl_e + br_e) / 2.0) + + tl_list = torch.cat(tl_list) + br_list = torch.cat(br_list) + me_list = torch.cat(me_list) + + assert tl_list.size() == br_list.size() + + # N is object number in image, M is dimension of embedding vector + N, M = tl_list.size() + + pull_loss = (tl_list - me_list).pow(2) + (br_list - me_list).pow(2) + pull_loss = pull_loss.sum() / N + + margin = 1 # exp setting of CornerNet, details in section 3.3 of paper + + # confusion matrix of push loss + conf_mat = me_list.expand((N, N, M)).permute(1, 0, 2) - me_list + conf_weight = 1 - torch.eye(N).type_as(me_list) + conf_mat = conf_weight * (margin - conf_mat.sum(-1).abs()) + + if N > 1: # more than one object in current image + push_loss = F.relu(conf_mat).sum() / (N * (N - 1)) + else: + push_loss = tl_preds.sum() * 0. + + return pull_loss, push_loss + + +@LOSSES.register_module() +class AssociativeEmbeddingLoss(nn.Module): + """Associative Embedding Loss. + + More details can be found in + `Associative Embedding `_ and + `CornerNet `_ . + Code is modified from `kp_utils.py `_ # noqa: E501 + + Args: + pull_weight (float): Loss weight for corners from same object. + push_weight (float): Loss weight for corners from different object. + """ + + def __init__(self, pull_weight=0.25, push_weight=0.25): + super(AssociativeEmbeddingLoss, self).__init__() + self.pull_weight = pull_weight + self.push_weight = push_weight + + def forward(self, pred, target, match): + """Forward function.""" + batch = pred.size(0) + pull_all, push_all = 0.0, 0.0 + for i in range(batch): + pull, push = ae_loss_per_image(pred[i], target[i], match[i]) + + pull_all += self.pull_weight * pull + push_all += self.push_weight * push + + return pull_all, push_all diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/balanced_l1_loss.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/balanced_l1_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..7bcd13ff26dbdc9f6eff8d7c7b5bde742a8d7d1d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/balanced_l1_loss.py @@ -0,0 +1,120 @@ +import mmcv +import numpy as np +import torch +import torch.nn as nn + +from ..builder import LOSSES +from .utils import weighted_loss + + +@mmcv.jit(derivate=True, coderize=True) +@weighted_loss +def balanced_l1_loss(pred, + target, + beta=1.0, + alpha=0.5, + gamma=1.5, + reduction='mean'): + """Calculate balanced L1 loss. + + Please see the `Libra R-CNN `_ + + Args: + pred (torch.Tensor): The prediction with shape (N, 4). + target (torch.Tensor): The learning target of the prediction with + shape (N, 4). + beta (float): The loss is a piecewise function of prediction and target + and ``beta`` serves as a threshold for the difference between the + prediction and target. Defaults to 1.0. + alpha (float): The denominator ``alpha`` in the balanced L1 loss. + Defaults to 0.5. + gamma (float): The ``gamma`` in the balanced L1 loss. + Defaults to 1.5. + reduction (str, optional): The method that reduces the loss to a + scalar. Options are "none", "mean" and "sum". + + Returns: + torch.Tensor: The calculated loss + """ + assert beta > 0 + assert pred.size() == target.size() and target.numel() > 0 + + diff = torch.abs(pred - target) + b = np.e**(gamma / alpha) - 1 + loss = torch.where( + diff < beta, alpha / b * + (b * diff + 1) * torch.log(b * diff / beta + 1) - alpha * diff, + gamma * diff + gamma / b - alpha * beta) + + return loss + + +@LOSSES.register_module() +class BalancedL1Loss(nn.Module): + """Balanced L1 Loss. + + arXiv: https://arxiv.org/pdf/1904.02701.pdf (CVPR 2019) + + Args: + alpha (float): The denominator ``alpha`` in the balanced L1 loss. + Defaults to 0.5. + gamma (float): The ``gamma`` in the balanced L1 loss. Defaults to 1.5. + beta (float, optional): The loss is a piecewise function of prediction + and target. ``beta`` serves as a threshold for the difference + between the prediction and target. Defaults to 1.0. + reduction (str, optional): The method that reduces the loss to a + scalar. Options are "none", "mean" and "sum". + loss_weight (float, optional): The weight of the loss. Defaults to 1.0 + """ + + def __init__(self, + alpha=0.5, + gamma=1.5, + beta=1.0, + reduction='mean', + loss_weight=1.0): + super(BalancedL1Loss, self).__init__() + self.alpha = alpha + self.gamma = gamma + self.beta = beta + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs): + """Forward function of loss. + + Args: + pred (torch.Tensor): The prediction with shape (N, 4). + target (torch.Tensor): The learning target of the prediction with + shape (N, 4). + weight (torch.Tensor, optional): Sample-wise loss weight with + shape (N, ). + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Options are "none", "mean" and "sum". + + Returns: + torch.Tensor: The calculated loss + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + loss_bbox = self.loss_weight * balanced_l1_loss( + pred, + target, + weight, + alpha=self.alpha, + gamma=self.gamma, + beta=self.beta, + reduction=reduction, + avg_factor=avg_factor, + **kwargs) + return loss_bbox diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/cross_entropy_loss.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/cross_entropy_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..57994157960eeae5530bd983b8b86263de31d0ff --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/cross_entropy_loss.py @@ -0,0 +1,214 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +from ..builder import LOSSES +from .utils import weight_reduce_loss + + +def cross_entropy(pred, + label, + weight=None, + reduction='mean', + avg_factor=None, + class_weight=None): + """Calculate the CrossEntropy loss. + + Args: + pred (torch.Tensor): The prediction with shape (N, C), C is the number + of classes. + label (torch.Tensor): The learning label of the prediction. + weight (torch.Tensor, optional): Sample-wise loss weight. + reduction (str, optional): The method used to reduce the loss. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + class_weight (list[float], optional): The weight for each class. + + Returns: + torch.Tensor: The calculated loss + """ + # element-wise losses + loss = F.cross_entropy(pred, label, weight=class_weight, reduction='none') + + # apply weights and do the reduction + if weight is not None: + weight = weight.float() + loss = weight_reduce_loss( + loss, weight=weight, reduction=reduction, avg_factor=avg_factor) + + return loss + + +def _expand_onehot_labels(labels, label_weights, label_channels): + bin_labels = labels.new_full((labels.size(0), label_channels), 0) + inds = torch.nonzero( + (labels >= 0) & (labels < label_channels), as_tuple=False).squeeze() + if inds.numel() > 0: + bin_labels[inds, labels[inds]] = 1 + + if label_weights is None: + bin_label_weights = None + else: + bin_label_weights = label_weights.view(-1, 1).expand( + label_weights.size(0), label_channels) + + return bin_labels, bin_label_weights + + +def binary_cross_entropy(pred, + label, + weight=None, + reduction='mean', + avg_factor=None, + class_weight=None): + """Calculate the binary CrossEntropy loss. + + Args: + pred (torch.Tensor): The prediction with shape (N, 1). + label (torch.Tensor): The learning label of the prediction. + weight (torch.Tensor, optional): Sample-wise loss weight. + reduction (str, optional): The method used to reduce the loss. + Options are "none", "mean" and "sum". + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + class_weight (list[float], optional): The weight for each class. + + Returns: + torch.Tensor: The calculated loss + """ + if pred.dim() != label.dim(): + label, weight = _expand_onehot_labels(label, weight, pred.size(-1)) + + # weighted element-wise losses + if weight is not None: + weight = weight.float() + loss = F.binary_cross_entropy_with_logits( + pred, label.float(), pos_weight=class_weight, reduction='none') + # do the reduction for the weighted loss + loss = weight_reduce_loss( + loss, weight, reduction=reduction, avg_factor=avg_factor) + + return loss + + +def mask_cross_entropy(pred, + target, + label, + reduction='mean', + avg_factor=None, + class_weight=None): + """Calculate the CrossEntropy loss for masks. + + Args: + pred (torch.Tensor): The prediction with shape (N, C, *), C is the + number of classes. The trailing * indicates arbitrary shape. + target (torch.Tensor): The learning label of the prediction. + label (torch.Tensor): ``label`` indicates the class label of the mask + corresponding object. This will be used to select the mask in the + of the class which the object belongs to when the mask prediction + if not class-agnostic. + reduction (str, optional): The method used to reduce the loss. + Options are "none", "mean" and "sum". + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + class_weight (list[float], optional): The weight for each class. + + Returns: + torch.Tensor: The calculated loss + + Example: + >>> N, C = 3, 11 + >>> H, W = 2, 2 + >>> pred = torch.randn(N, C, H, W) * 1000 + >>> target = torch.rand(N, H, W) + >>> label = torch.randint(0, C, size=(N,)) + >>> reduction = 'mean' + >>> avg_factor = None + >>> class_weights = None + >>> loss = mask_cross_entropy(pred, target, label, reduction, + >>> avg_factor, class_weights) + >>> assert loss.shape == (1,) + """ + # TODO: handle these two reserved arguments + assert reduction == 'mean' and avg_factor is None + num_rois = pred.size()[0] + inds = torch.arange(0, num_rois, dtype=torch.long, device=pred.device) + pred_slice = pred[inds, label].squeeze(1) + return F.binary_cross_entropy_with_logits( + pred_slice, target, weight=class_weight, reduction='mean')[None] + + +@LOSSES.register_module() +class CrossEntropyLoss(nn.Module): + + def __init__(self, + use_sigmoid=False, + use_mask=False, + reduction='mean', + class_weight=None, + loss_weight=1.0): + """CrossEntropyLoss. + + Args: + use_sigmoid (bool, optional): Whether the prediction uses sigmoid + of softmax. Defaults to False. + use_mask (bool, optional): Whether to use mask cross entropy loss. + Defaults to False. + reduction (str, optional): . Defaults to 'mean'. + Options are "none", "mean" and "sum". + class_weight (list[float], optional): Weight of each class. + Defaults to None. + loss_weight (float, optional): Weight of the loss. Defaults to 1.0. + """ + super(CrossEntropyLoss, self).__init__() + assert (use_sigmoid is False) or (use_mask is False) + self.use_sigmoid = use_sigmoid + self.use_mask = use_mask + self.reduction = reduction + self.loss_weight = loss_weight + self.class_weight = class_weight + + if self.use_sigmoid: + self.cls_criterion = binary_cross_entropy + elif self.use_mask: + self.cls_criterion = mask_cross_entropy + else: + self.cls_criterion = cross_entropy + + def forward(self, + cls_score, + label, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs): + """Forward function. + + Args: + cls_score (torch.Tensor): The prediction. + label (torch.Tensor): The learning label of the prediction. + weight (torch.Tensor, optional): Sample-wise loss weight. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction (str, optional): The method used to reduce the loss. + Options are "none", "mean" and "sum". + Returns: + torch.Tensor: The calculated loss + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + if self.class_weight is not None: + class_weight = cls_score.new_tensor( + self.class_weight, device=cls_score.device) + else: + class_weight = None + loss_cls = self.loss_weight * self.cls_criterion( + cls_score, + label, + weight, + class_weight=class_weight, + reduction=reduction, + avg_factor=avg_factor, + **kwargs) + return loss_cls diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/focal_loss.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/focal_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..493907c6984d532175e0351daf2eafe4b9ff0256 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/focal_loss.py @@ -0,0 +1,181 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.ops import sigmoid_focal_loss as _sigmoid_focal_loss + +from ..builder import LOSSES +from .utils import weight_reduce_loss + + +# This method is only for debugging +def py_sigmoid_focal_loss(pred, + target, + weight=None, + gamma=2.0, + alpha=0.25, + reduction='mean', + avg_factor=None): + """PyTorch version of `Focal Loss `_. + + Args: + pred (torch.Tensor): The prediction with shape (N, C), C is the + number of classes + target (torch.Tensor): The learning label of the prediction. + weight (torch.Tensor, optional): Sample-wise loss weight. + gamma (float, optional): The gamma for calculating the modulating + factor. Defaults to 2.0. + alpha (float, optional): A balanced form for Focal Loss. + Defaults to 0.25. + reduction (str, optional): The method used to reduce the loss into + a scalar. Defaults to 'mean'. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + """ + pred_sigmoid = pred.sigmoid() + target = target.type_as(pred) + pt = (1 - pred_sigmoid) * target + pred_sigmoid * (1 - target) + focal_weight = (alpha * target + (1 - alpha) * + (1 - target)) * pt.pow(gamma) + loss = F.binary_cross_entropy_with_logits( + pred, target, reduction='none') * focal_weight + if weight is not None: + if weight.shape != loss.shape: + if weight.size(0) == loss.size(0): + # For most cases, weight is of shape (num_priors, ), + # which means it does not have the second axis num_class + weight = weight.view(-1, 1) + else: + # Sometimes, weight per anchor per class is also needed. e.g. + # in FSAF. But it may be flattened of shape + # (num_priors x num_class, ), while loss is still of shape + # (num_priors, num_class). + assert weight.numel() == loss.numel() + weight = weight.view(loss.size(0), -1) + assert weight.ndim == loss.ndim + loss = weight_reduce_loss(loss, weight, reduction, avg_factor) + return loss + + +def sigmoid_focal_loss(pred, + target, + weight=None, + gamma=2.0, + alpha=0.25, + reduction='mean', + avg_factor=None): + r"""A warpper of cuda version `Focal Loss + `_. + + Args: + pred (torch.Tensor): The prediction with shape (N, C), C is the number + of classes. + target (torch.Tensor): The learning label of the prediction. + weight (torch.Tensor, optional): Sample-wise loss weight. + gamma (float, optional): The gamma for calculating the modulating + factor. Defaults to 2.0. + alpha (float, optional): A balanced form for Focal Loss. + Defaults to 0.25. + reduction (str, optional): The method used to reduce the loss into + a scalar. Defaults to 'mean'. Options are "none", "mean" and "sum". + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + """ + # Function.apply does not accept keyword arguments, so the decorator + # "weighted_loss" is not applicable + loss = _sigmoid_focal_loss(pred.contiguous(), target, gamma, alpha, None, + 'none') + if weight is not None: + if weight.shape != loss.shape: + if weight.size(0) == loss.size(0): + # For most cases, weight is of shape (num_priors, ), + # which means it does not have the second axis num_class + weight = weight.view(-1, 1) + else: + # Sometimes, weight per anchor per class is also needed. e.g. + # in FSAF. But it may be flattened of shape + # (num_priors x num_class, ), while loss is still of shape + # (num_priors, num_class). + assert weight.numel() == loss.numel() + weight = weight.view(loss.size(0), -1) + assert weight.ndim == loss.ndim + loss = weight_reduce_loss(loss, weight, reduction, avg_factor) + return loss + + +@LOSSES.register_module() +class FocalLoss(nn.Module): + + def __init__(self, + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + reduction='mean', + loss_weight=1.0): + """`Focal Loss `_ + + Args: + use_sigmoid (bool, optional): Whether to the prediction is + used for sigmoid or softmax. Defaults to True. + gamma (float, optional): The gamma for calculating the modulating + factor. Defaults to 2.0. + alpha (float, optional): A balanced form for Focal Loss. + Defaults to 0.25. + reduction (str, optional): The method used to reduce the loss into + a scalar. Defaults to 'mean'. Options are "none", "mean" and + "sum". + loss_weight (float, optional): Weight of loss. Defaults to 1.0. + """ + super(FocalLoss, self).__init__() + assert use_sigmoid is True, 'Only sigmoid focal loss supported now.' + self.use_sigmoid = use_sigmoid + self.gamma = gamma + self.alpha = alpha + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None): + """Forward function. + + Args: + pred (torch.Tensor): The prediction. + target (torch.Tensor): The learning label of the prediction. + weight (torch.Tensor, optional): The weight of loss for each + prediction. Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Options are "none", "mean" and "sum". + + Returns: + torch.Tensor: The calculated loss + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + if self.use_sigmoid: + if torch.cuda.is_available() and pred.is_cuda: + calculate_loss_func = sigmoid_focal_loss + else: + num_classes = pred.size(1) + target = F.one_hot(target, num_classes=num_classes + 1) + target = target[:, :num_classes] + calculate_loss_func = py_sigmoid_focal_loss + + loss_cls = self.loss_weight * calculate_loss_func( + pred, + target, + weight, + gamma=self.gamma, + alpha=self.alpha, + reduction=reduction, + avg_factor=avg_factor) + + else: + raise NotImplementedError + return loss_cls diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/gaussian_focal_loss.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/gaussian_focal_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..e45506a38e8e3c187be8288d0b714cc1ee29cf27 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/gaussian_focal_loss.py @@ -0,0 +1,91 @@ +import mmcv +import torch.nn as nn + +from ..builder import LOSSES +from .utils import weighted_loss + + +@mmcv.jit(derivate=True, coderize=True) +@weighted_loss +def gaussian_focal_loss(pred, gaussian_target, alpha=2.0, gamma=4.0): + """`Focal Loss `_ for targets in gaussian + distribution. + + Args: + pred (torch.Tensor): The prediction. + gaussian_target (torch.Tensor): The learning target of the prediction + in gaussian distribution. + alpha (float, optional): A balanced form for Focal Loss. + Defaults to 2.0. + gamma (float, optional): The gamma for calculating the modulating + factor. Defaults to 4.0. + """ + eps = 1e-12 + pos_weights = gaussian_target.eq(1) + neg_weights = (1 - gaussian_target).pow(gamma) + pos_loss = -(pred + eps).log() * (1 - pred).pow(alpha) * pos_weights + neg_loss = -(1 - pred + eps).log() * pred.pow(alpha) * neg_weights + return pos_loss + neg_loss + + +@LOSSES.register_module() +class GaussianFocalLoss(nn.Module): + """GaussianFocalLoss is a variant of focal loss. + + More details can be found in the `paper + `_ + Code is modified from `kp_utils.py + `_ # noqa: E501 + Please notice that the target in GaussianFocalLoss is a gaussian heatmap, + not 0/1 binary target. + + Args: + alpha (float): Power of prediction. + gamma (float): Power of target for negative samples. + reduction (str): Options are "none", "mean" and "sum". + loss_weight (float): Loss weight of current loss. + """ + + def __init__(self, + alpha=2.0, + gamma=4.0, + reduction='mean', + loss_weight=1.0): + super(GaussianFocalLoss, self).__init__() + self.alpha = alpha + self.gamma = gamma + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None): + """Forward function. + + Args: + pred (torch.Tensor): The prediction. + target (torch.Tensor): The learning target of the prediction + in gaussian distribution. + weight (torch.Tensor, optional): The weight of loss for each + prediction. Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Defaults to None. + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + loss_reg = self.loss_weight * gaussian_focal_loss( + pred, + target, + weight, + alpha=self.alpha, + gamma=self.gamma, + reduction=reduction, + avg_factor=avg_factor) + return loss_reg diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/gfocal_loss.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/gfocal_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..9d3b8833dc50c76f6741db5341dbf8da3402d07b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/gfocal_loss.py @@ -0,0 +1,188 @@ +import mmcv +import torch.nn as nn +import torch.nn.functional as F + +from ..builder import LOSSES +from .utils import weighted_loss + + +@mmcv.jit(derivate=True, coderize=True) +@weighted_loss +def quality_focal_loss(pred, target, beta=2.0): + r"""Quality Focal Loss (QFL) is from `Generalized Focal Loss: Learning + Qualified and Distributed Bounding Boxes for Dense Object Detection + `_. + + Args: + pred (torch.Tensor): Predicted joint representation of classification + and quality (IoU) estimation with shape (N, C), C is the number of + classes. + target (tuple([torch.Tensor])): Target category label with shape (N,) + and target quality label with shape (N,). + beta (float): The beta parameter for calculating the modulating factor. + Defaults to 2.0. + + Returns: + torch.Tensor: Loss tensor with shape (N,). + """ + assert len(target) == 2, """target for QFL must be a tuple of two elements, + including category label and quality label, respectively""" + # label denotes the category id, score denotes the quality score + label, score = target + + # negatives are supervised by 0 quality score + pred_sigmoid = pred.sigmoid() + scale_factor = pred_sigmoid + zerolabel = scale_factor.new_zeros(pred.shape) + loss = F.binary_cross_entropy_with_logits( + pred, zerolabel, reduction='none') * scale_factor.pow(beta) + + # FG cat_id: [0, num_classes -1], BG cat_id: num_classes + bg_class_ind = pred.size(1) + pos = ((label >= 0) & (label < bg_class_ind)).nonzero().squeeze(1) + pos_label = label[pos].long() + # positives are supervised by bbox quality (IoU) score + scale_factor = score[pos] - pred_sigmoid[pos, pos_label] + loss[pos, pos_label] = F.binary_cross_entropy_with_logits( + pred[pos, pos_label], score[pos], + reduction='none') * scale_factor.abs().pow(beta) + + loss = loss.sum(dim=1, keepdim=False) + return loss + + +@mmcv.jit(derivate=True, coderize=True) +@weighted_loss +def distribution_focal_loss(pred, label): + r"""Distribution Focal Loss (DFL) is from `Generalized Focal Loss: Learning + Qualified and Distributed Bounding Boxes for Dense Object Detection + `_. + + Args: + pred (torch.Tensor): Predicted general distribution of bounding boxes + (before softmax) with shape (N, n+1), n is the max value of the + integral set `{0, ..., n}` in paper. + label (torch.Tensor): Target distance label for bounding boxes with + shape (N,). + + Returns: + torch.Tensor: Loss tensor with shape (N,). + """ + dis_left = label.long() + dis_right = dis_left + 1 + weight_left = dis_right.float() - label + weight_right = label - dis_left.float() + loss = F.cross_entropy(pred, dis_left, reduction='none') * weight_left \ + + F.cross_entropy(pred, dis_right, reduction='none') * weight_right + return loss + + +@LOSSES.register_module() +class QualityFocalLoss(nn.Module): + r"""Quality Focal Loss (QFL) is a variant of `Generalized Focal Loss: + Learning Qualified and Distributed Bounding Boxes for Dense Object + Detection `_. + + Args: + use_sigmoid (bool): Whether sigmoid operation is conducted in QFL. + Defaults to True. + beta (float): The beta parameter for calculating the modulating factor. + Defaults to 2.0. + reduction (str): Options are "none", "mean" and "sum". + loss_weight (float): Loss weight of current loss. + """ + + def __init__(self, + use_sigmoid=True, + beta=2.0, + reduction='mean', + loss_weight=1.0): + super(QualityFocalLoss, self).__init__() + assert use_sigmoid is True, 'Only sigmoid in QFL supported now.' + self.use_sigmoid = use_sigmoid + self.beta = beta + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None): + """Forward function. + + Args: + pred (torch.Tensor): Predicted joint representation of + classification and quality (IoU) estimation with shape (N, C), + C is the number of classes. + target (tuple([torch.Tensor])): Target category label with shape + (N,) and target quality label with shape (N,). + weight (torch.Tensor, optional): The weight of loss for each + prediction. Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Defaults to None. + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + if self.use_sigmoid: + loss_cls = self.loss_weight * quality_focal_loss( + pred, + target, + weight, + beta=self.beta, + reduction=reduction, + avg_factor=avg_factor) + else: + raise NotImplementedError + return loss_cls + + +@LOSSES.register_module() +class DistributionFocalLoss(nn.Module): + r"""Distribution Focal Loss (DFL) is a variant of `Generalized Focal Loss: + Learning Qualified and Distributed Bounding Boxes for Dense Object + Detection `_. + + Args: + reduction (str): Options are `'none'`, `'mean'` and `'sum'`. + loss_weight (float): Loss weight of current loss. + """ + + def __init__(self, reduction='mean', loss_weight=1.0): + super(DistributionFocalLoss, self).__init__() + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None): + """Forward function. + + Args: + pred (torch.Tensor): Predicted general distribution of bounding + boxes (before softmax) with shape (N, n+1), n is the max value + of the integral set `{0, ..., n}` in paper. + target (torch.Tensor): Target distance label for bounding boxes + with shape (N,). + weight (torch.Tensor, optional): The weight of loss for each + prediction. Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Defaults to None. + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + loss_cls = self.loss_weight * distribution_focal_loss( + pred, target, weight, reduction=reduction, avg_factor=avg_factor) + return loss_cls diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/ghm_loss.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/ghm_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..8969a23fd98bb746415f96ac5e4ad9e37ba3af52 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/ghm_loss.py @@ -0,0 +1,172 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +from ..builder import LOSSES + + +def _expand_onehot_labels(labels, label_weights, label_channels): + bin_labels = labels.new_full((labels.size(0), label_channels), 0) + inds = torch.nonzero( + (labels >= 0) & (labels < label_channels), as_tuple=False).squeeze() + if inds.numel() > 0: + bin_labels[inds, labels[inds]] = 1 + bin_label_weights = label_weights.view(-1, 1).expand( + label_weights.size(0), label_channels) + return bin_labels, bin_label_weights + + +# TODO: code refactoring to make it consistent with other losses +@LOSSES.register_module() +class GHMC(nn.Module): + """GHM Classification Loss. + + Details of the theorem can be viewed in the paper + `Gradient Harmonized Single-stage Detector + `_. + + Args: + bins (int): Number of the unit regions for distribution calculation. + momentum (float): The parameter for moving average. + use_sigmoid (bool): Can only be true for BCE based loss now. + loss_weight (float): The weight of the total GHM-C loss. + """ + + def __init__(self, bins=10, momentum=0, use_sigmoid=True, loss_weight=1.0): + super(GHMC, self).__init__() + self.bins = bins + self.momentum = momentum + edges = torch.arange(bins + 1).float() / bins + self.register_buffer('edges', edges) + self.edges[-1] += 1e-6 + if momentum > 0: + acc_sum = torch.zeros(bins) + self.register_buffer('acc_sum', acc_sum) + self.use_sigmoid = use_sigmoid + if not self.use_sigmoid: + raise NotImplementedError + self.loss_weight = loss_weight + + def forward(self, pred, target, label_weight, *args, **kwargs): + """Calculate the GHM-C loss. + + Args: + pred (float tensor of size [batch_num, class_num]): + The direct prediction of classification fc layer. + target (float tensor of size [batch_num, class_num]): + Binary class target for each sample. + label_weight (float tensor of size [batch_num, class_num]): + the value is 1 if the sample is valid and 0 if ignored. + Returns: + The gradient harmonized loss. + """ + # the target should be binary class label + if pred.dim() != target.dim(): + target, label_weight = _expand_onehot_labels( + target, label_weight, pred.size(-1)) + target, label_weight = target.float(), label_weight.float() + edges = self.edges + mmt = self.momentum + weights = torch.zeros_like(pred) + + # gradient length + g = torch.abs(pred.sigmoid().detach() - target) + + valid = label_weight > 0 + tot = max(valid.float().sum().item(), 1.0) + n = 0 # n valid bins + for i in range(self.bins): + inds = (g >= edges[i]) & (g < edges[i + 1]) & valid + num_in_bin = inds.sum().item() + if num_in_bin > 0: + if mmt > 0: + self.acc_sum[i] = mmt * self.acc_sum[i] \ + + (1 - mmt) * num_in_bin + weights[inds] = tot / self.acc_sum[i] + else: + weights[inds] = tot / num_in_bin + n += 1 + if n > 0: + weights = weights / n + + loss = F.binary_cross_entropy_with_logits( + pred, target, weights, reduction='sum') / tot + return loss * self.loss_weight + + +# TODO: code refactoring to make it consistent with other losses +@LOSSES.register_module() +class GHMR(nn.Module): + """GHM Regression Loss. + + Details of the theorem can be viewed in the paper + `Gradient Harmonized Single-stage Detector + `_. + + Args: + mu (float): The parameter for the Authentic Smooth L1 loss. + bins (int): Number of the unit regions for distribution calculation. + momentum (float): The parameter for moving average. + loss_weight (float): The weight of the total GHM-R loss. + """ + + def __init__(self, mu=0.02, bins=10, momentum=0, loss_weight=1.0): + super(GHMR, self).__init__() + self.mu = mu + self.bins = bins + edges = torch.arange(bins + 1).float() / bins + self.register_buffer('edges', edges) + self.edges[-1] = 1e3 + self.momentum = momentum + if momentum > 0: + acc_sum = torch.zeros(bins) + self.register_buffer('acc_sum', acc_sum) + self.loss_weight = loss_weight + + # TODO: support reduction parameter + def forward(self, pred, target, label_weight, avg_factor=None): + """Calculate the GHM-R loss. + + Args: + pred (float tensor of size [batch_num, 4 (* class_num)]): + The prediction of box regression layer. Channel number can be 4 + or 4 * class_num depending on whether it is class-agnostic. + target (float tensor of size [batch_num, 4 (* class_num)]): + The target regression values with the same size of pred. + label_weight (float tensor of size [batch_num, 4 (* class_num)]): + The weight of each sample, 0 if ignored. + Returns: + The gradient harmonized loss. + """ + mu = self.mu + edges = self.edges + mmt = self.momentum + + # ASL1 loss + diff = pred - target + loss = torch.sqrt(diff * diff + mu * mu) - mu + + # gradient length + g = torch.abs(diff / torch.sqrt(mu * mu + diff * diff)).detach() + weights = torch.zeros_like(g) + + valid = label_weight > 0 + tot = max(label_weight.float().sum().item(), 1.0) + n = 0 # n: valid bins + for i in range(self.bins): + inds = (g >= edges[i]) & (g < edges[i + 1]) & valid + num_in_bin = inds.sum().item() + if num_in_bin > 0: + n += 1 + if mmt > 0: + self.acc_sum[i] = mmt * self.acc_sum[i] \ + + (1 - mmt) * num_in_bin + weights[inds] = tot / self.acc_sum[i] + else: + weights[inds] = tot / num_in_bin + if n > 0: + weights /= n + + loss = loss * weights + loss = loss.sum() / tot + return loss * self.loss_weight diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/iou_loss.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/iou_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..4c11a9a6e237ed6c96e194796aa592fee79eddad --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/iou_loss.py @@ -0,0 +1,446 @@ +import math + +import mmcv +import torch +import torch.nn as nn + +from mmdet.core import bbox_overlaps +from ..builder import LOSSES +from .utils import weighted_loss + + +@mmcv.jit(derivate=True, coderize=True) +@weighted_loss +def iou_loss(pred, target, linear=False, eps=1e-6): + """IoU loss. + + Computing the IoU loss between a set of predicted bboxes and target bboxes. + The loss is calculated as negative log of IoU. + + Args: + pred (torch.Tensor): Predicted bboxes of format (x1, y1, x2, y2), + shape (n, 4). + target (torch.Tensor): Corresponding gt bboxes, shape (n, 4). + linear (bool, optional): If True, use linear scale of loss instead of + log scale. Default: False. + eps (float): Eps to avoid log(0). + + Return: + torch.Tensor: Loss tensor. + """ + ious = bbox_overlaps(pred, target, is_aligned=True).clamp(min=eps) + if linear: + loss = 1 - ious + else: + loss = -ious.log() + return loss + + +@mmcv.jit(derivate=True, coderize=True) +@weighted_loss +def bounded_iou_loss(pred, target, beta=0.2, eps=1e-3): + """BIoULoss. + + This is an implementation of paper + `Improving Object Localization with Fitness NMS and Bounded IoU Loss. + `_. + + Args: + pred (torch.Tensor): Predicted bboxes. + target (torch.Tensor): Target bboxes. + beta (float): beta parameter in smoothl1. + eps (float): eps to avoid NaN. + """ + pred_ctrx = (pred[:, 0] + pred[:, 2]) * 0.5 + pred_ctry = (pred[:, 1] + pred[:, 3]) * 0.5 + pred_w = pred[:, 2] - pred[:, 0] + pred_h = pred[:, 3] - pred[:, 1] + with torch.no_grad(): + target_ctrx = (target[:, 0] + target[:, 2]) * 0.5 + target_ctry = (target[:, 1] + target[:, 3]) * 0.5 + target_w = target[:, 2] - target[:, 0] + target_h = target[:, 3] - target[:, 1] + + dx = target_ctrx - pred_ctrx + dy = target_ctry - pred_ctry + + loss_dx = 1 - torch.max( + (target_w - 2 * dx.abs()) / + (target_w + 2 * dx.abs() + eps), torch.zeros_like(dx)) + loss_dy = 1 - torch.max( + (target_h - 2 * dy.abs()) / + (target_h + 2 * dy.abs() + eps), torch.zeros_like(dy)) + loss_dw = 1 - torch.min(target_w / (pred_w + eps), pred_w / + (target_w + eps)) + loss_dh = 1 - torch.min(target_h / (pred_h + eps), pred_h / + (target_h + eps)) + loss_comb = torch.stack([loss_dx, loss_dy, loss_dw, loss_dh], + dim=-1).view(loss_dx.size(0), -1) + + loss = torch.where(loss_comb < beta, 0.5 * loss_comb * loss_comb / beta, + loss_comb - 0.5 * beta) + return loss + + +@mmcv.jit(derivate=True, coderize=True) +@weighted_loss +def giou_loss(pred, target, eps=1e-7): + r"""`Generalized Intersection over Union: A Metric and A Loss for Bounding + Box Regression `_. + + Args: + pred (torch.Tensor): Predicted bboxes of format (x1, y1, x2, y2), + shape (n, 4). + target (torch.Tensor): Corresponding gt bboxes, shape (n, 4). + eps (float): Eps to avoid log(0). + + Return: + Tensor: Loss tensor. + """ + gious = bbox_overlaps(pred, target, mode='giou', is_aligned=True, eps=eps) + loss = 1 - gious + return loss + + +@mmcv.jit(derivate=True, coderize=True) +@weighted_loss +def diou_loss(pred, target, eps=1e-7): + r"""`Implementation of Distance-IoU Loss: Faster and Better + Learning for Bounding Box Regression, https://arxiv.org/abs/1911.08287`_. + + Code is modified from https://github.com/Zzh-tju/DIoU. + + Args: + pred (Tensor): Predicted bboxes of format (x1, y1, x2, y2), + shape (n, 4). + target (Tensor): Corresponding gt bboxes, shape (n, 4). + eps (float): Eps to avoid log(0). + Return: + Tensor: Loss tensor. + """ + # overlap + lt = torch.max(pred[:, :2], target[:, :2]) + rb = torch.min(pred[:, 2:], target[:, 2:]) + wh = (rb - lt).clamp(min=0) + overlap = wh[:, 0] * wh[:, 1] + + # union + ap = (pred[:, 2] - pred[:, 0]) * (pred[:, 3] - pred[:, 1]) + ag = (target[:, 2] - target[:, 0]) * (target[:, 3] - target[:, 1]) + union = ap + ag - overlap + eps + + # IoU + ious = overlap / union + + # enclose area + enclose_x1y1 = torch.min(pred[:, :2], target[:, :2]) + enclose_x2y2 = torch.max(pred[:, 2:], target[:, 2:]) + enclose_wh = (enclose_x2y2 - enclose_x1y1).clamp(min=0) + + cw = enclose_wh[:, 0] + ch = enclose_wh[:, 1] + + c2 = cw**2 + ch**2 + eps + + b1_x1, b1_y1 = pred[:, 0], pred[:, 1] + b1_x2, b1_y2 = pred[:, 2], pred[:, 3] + b2_x1, b2_y1 = target[:, 0], target[:, 1] + b2_x2, b2_y2 = target[:, 2], target[:, 3] + + left = ((b2_x1 + b2_x2) - (b1_x1 + b1_x2))**2 / 4 + right = ((b2_y1 + b2_y2) - (b1_y1 + b1_y2))**2 / 4 + rho2 = left + right + + # DIoU + dious = ious - rho2 / c2 + loss = 1 - dious + return loss + + +@mmcv.jit(derivate=True, coderize=True) +@weighted_loss +def ciou_loss(pred, target, eps=1e-7): + r"""`Implementation of paper `Enhancing Geometric Factors into + Model Learning and Inference for Object Detection and Instance + Segmentation `_. + + Code is modified from https://github.com/Zzh-tju/CIoU. + + Args: + pred (Tensor): Predicted bboxes of format (x1, y1, x2, y2), + shape (n, 4). + target (Tensor): Corresponding gt bboxes, shape (n, 4). + eps (float): Eps to avoid log(0). + Return: + Tensor: Loss tensor. + """ + # overlap + lt = torch.max(pred[:, :2], target[:, :2]) + rb = torch.min(pred[:, 2:], target[:, 2:]) + wh = (rb - lt).clamp(min=0) + overlap = wh[:, 0] * wh[:, 1] + + # union + ap = (pred[:, 2] - pred[:, 0]) * (pred[:, 3] - pred[:, 1]) + ag = (target[:, 2] - target[:, 0]) * (target[:, 3] - target[:, 1]) + union = ap + ag - overlap + eps + + # IoU + ious = overlap / union + + # enclose area + enclose_x1y1 = torch.min(pred[:, :2], target[:, :2]) + enclose_x2y2 = torch.max(pred[:, 2:], target[:, 2:]) + enclose_wh = (enclose_x2y2 - enclose_x1y1).clamp(min=0) + + cw = enclose_wh[:, 0] + ch = enclose_wh[:, 1] + + c2 = cw**2 + ch**2 + eps + + b1_x1, b1_y1 = pred[:, 0], pred[:, 1] + b1_x2, b1_y2 = pred[:, 2], pred[:, 3] + b2_x1, b2_y1 = target[:, 0], target[:, 1] + b2_x2, b2_y2 = target[:, 2], target[:, 3] + + w1, h1 = b1_x2 - b1_x1, b1_y2 - b1_y1 + eps + w2, h2 = b2_x2 - b2_x1, b2_y2 - b2_y1 + eps + + left = ((b2_x1 + b2_x2) - (b1_x1 + b1_x2))**2 / 4 + right = ((b2_y1 + b2_y2) - (b1_y1 + b1_y2))**2 / 4 + rho2 = left + right + + factor = 4 / math.pi**2 + v = factor * torch.pow(torch.atan(w2 / h2) - torch.atan(w1 / h1), 2) + + # CIoU + cious = ious - (rho2 / c2 + v**2 / (1 - ious + v)) + loss = 1 - cious + return loss + + +@LOSSES.register_module() +class IoULoss(nn.Module): + """IoULoss. + + Computing the IoU loss between a set of predicted bboxes and target bboxes. + + Args: + linear (bool): If True, use linear scale of loss instead of log scale. + Default: False. + eps (float): Eps to avoid log(0). + reduction (str): Options are "none", "mean" and "sum". + loss_weight (float): Weight of loss. + """ + + def __init__(self, + linear=False, + eps=1e-6, + reduction='mean', + loss_weight=1.0): + super(IoULoss, self).__init__() + self.linear = linear + self.eps = eps + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs): + """Forward function. + + Args: + pred (torch.Tensor): The prediction. + target (torch.Tensor): The learning target of the prediction. + weight (torch.Tensor, optional): The weight of loss for each + prediction. Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Defaults to None. Options are "none", "mean" and "sum". + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + if (weight is not None) and (not torch.any(weight > 0)) and ( + reduction != 'none'): + if pred.dim() == weight.dim() + 1: + weight = weight.unsqueeze(1) + return (pred * weight).sum() # 0 + if weight is not None and weight.dim() > 1: + # TODO: remove this in the future + # reduce the weight of shape (n, 4) to (n,) to match the + # iou_loss of shape (n,) + assert weight.shape == pred.shape + weight = weight.mean(-1) + loss = self.loss_weight * iou_loss( + pred, + target, + weight, + linear=self.linear, + eps=self.eps, + reduction=reduction, + avg_factor=avg_factor, + **kwargs) + return loss + + +@LOSSES.register_module() +class BoundedIoULoss(nn.Module): + + def __init__(self, beta=0.2, eps=1e-3, reduction='mean', loss_weight=1.0): + super(BoundedIoULoss, self).__init__() + self.beta = beta + self.eps = eps + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs): + if weight is not None and not torch.any(weight > 0): + if pred.dim() == weight.dim() + 1: + weight = weight.unsqueeze(1) + return (pred * weight).sum() # 0 + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + loss = self.loss_weight * bounded_iou_loss( + pred, + target, + weight, + beta=self.beta, + eps=self.eps, + reduction=reduction, + avg_factor=avg_factor, + **kwargs) + return loss + + +@LOSSES.register_module() +class GIoULoss(nn.Module): + + def __init__(self, eps=1e-6, reduction='mean', loss_weight=1.0): + super(GIoULoss, self).__init__() + self.eps = eps + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs): + if weight is not None and not torch.any(weight > 0): + if pred.dim() == weight.dim() + 1: + weight = weight.unsqueeze(1) + return (pred * weight).sum() # 0 + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + if weight is not None and weight.dim() > 1: + # TODO: remove this in the future + # reduce the weight of shape (n, 4) to (n,) to match the + # giou_loss of shape (n,) + assert weight.shape == pred.shape + weight = weight.mean(-1) + loss = self.loss_weight * giou_loss( + pred, + target, + weight, + eps=self.eps, + reduction=reduction, + avg_factor=avg_factor, + **kwargs) + return loss + + +@LOSSES.register_module() +class DIoULoss(nn.Module): + + def __init__(self, eps=1e-6, reduction='mean', loss_weight=1.0): + super(DIoULoss, self).__init__() + self.eps = eps + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs): + if weight is not None and not torch.any(weight > 0): + if pred.dim() == weight.dim() + 1: + weight = weight.unsqueeze(1) + return (pred * weight).sum() # 0 + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + if weight is not None and weight.dim() > 1: + # TODO: remove this in the future + # reduce the weight of shape (n, 4) to (n,) to match the + # giou_loss of shape (n,) + assert weight.shape == pred.shape + weight = weight.mean(-1) + loss = self.loss_weight * diou_loss( + pred, + target, + weight, + eps=self.eps, + reduction=reduction, + avg_factor=avg_factor, + **kwargs) + return loss + + +@LOSSES.register_module() +class CIoULoss(nn.Module): + + def __init__(self, eps=1e-6, reduction='mean', loss_weight=1.0): + super(CIoULoss, self).__init__() + self.eps = eps + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs): + if weight is not None and not torch.any(weight > 0): + if pred.dim() == weight.dim() + 1: + weight = weight.unsqueeze(1) + return (pred * weight).sum() # 0 + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + if weight is not None and weight.dim() > 1: + # TODO: remove this in the future + # reduce the weight of shape (n, 4) to (n,) to match the + # giou_loss of shape (n,) + assert weight.shape == pred.shape + weight = weight.mean(-1) + loss = self.loss_weight * ciou_loss( + pred, + target, + weight, + eps=self.eps, + reduction=reduction, + avg_factor=avg_factor, + **kwargs) + return loss diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/kd_loss.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/kd_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..f3abb68d4f7b3eec98b873f69c1105a22eb33913 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/kd_loss.py @@ -0,0 +1,87 @@ +import mmcv +import torch.nn as nn +import torch.nn.functional as F + +from ..builder import LOSSES +from .utils import weighted_loss + + +@mmcv.jit(derivate=True, coderize=True) +@weighted_loss +def knowledge_distillation_kl_div_loss(pred, + soft_label, + T, + detach_target=True): + r"""Loss function for knowledge distilling using KL divergence. + + Args: + pred (Tensor): Predicted logits with shape (N, n + 1). + soft_label (Tensor): Target logits with shape (N, N + 1). + T (int): Temperature for distillation. + detach_target (bool): Remove soft_label from automatic differentiation + + Returns: + torch.Tensor: Loss tensor with shape (N,). + """ + assert pred.size() == soft_label.size() + target = F.softmax(soft_label / T, dim=1) + if detach_target: + target = target.detach() + + kd_loss = F.kl_div( + F.log_softmax(pred / T, dim=1), target, reduction='none').mean(1) * ( + T * T) + + return kd_loss + + +@LOSSES.register_module() +class KnowledgeDistillationKLDivLoss(nn.Module): + """Loss function for knowledge distilling using KL divergence. + + Args: + reduction (str): Options are `'none'`, `'mean'` and `'sum'`. + loss_weight (float): Loss weight of current loss. + T (int): Temperature for distillation. + """ + + def __init__(self, reduction='mean', loss_weight=1.0, T=10): + super(KnowledgeDistillationKLDivLoss, self).__init__() + assert T >= 1 + self.reduction = reduction + self.loss_weight = loss_weight + self.T = T + + def forward(self, + pred, + soft_label, + weight=None, + avg_factor=None, + reduction_override=None): + """Forward function. + + Args: + pred (Tensor): Predicted logits with shape (N, n + 1). + soft_label (Tensor): Target logits with shape (N, N + 1). + weight (torch.Tensor, optional): The weight of loss for each + prediction. Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Defaults to None. + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + + reduction = ( + reduction_override if reduction_override else self.reduction) + + loss_kd = self.loss_weight * knowledge_distillation_kl_div_loss( + pred, + soft_label, + weight, + reduction=reduction, + avg_factor=avg_factor, + T=self.T) + + return loss_kd diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/mse_loss.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/mse_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..54b5db740479db1b52aed1d4569275093822c3e9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/mse_loss.py @@ -0,0 +1,56 @@ +import torch.nn as nn +import torch.nn.functional as F + +from ..builder import LOSSES +from .utils import weighted_loss + + +@weighted_loss +def mse_loss(pred, target): + """Warpper of mse loss.""" + return F.mse_loss(pred, target, reduction='none') + + +@LOSSES.register_module() +class MSELoss(nn.Module): + """MSELoss. + + Args: + reduction (str, optional): The method that reduces the loss to a + scalar. Options are "none", "mean" and "sum". + loss_weight (float, optional): The weight of the loss. Defaults to 1.0 + """ + + def __init__(self, reduction='mean', loss_weight=1.0): + super().__init__() + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None): + """Forward function of loss. + + Args: + pred (torch.Tensor): The prediction. + target (torch.Tensor): The learning target of the prediction. + weight (torch.Tensor, optional): Weight of the loss for each + prediction. Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Defaults to None. + + Returns: + torch.Tensor: The calculated loss + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + loss = self.loss_weight * mse_loss( + pred, target, weight, reduction=reduction, avg_factor=avg_factor) + return loss diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/pisa_loss.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/pisa_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..4a48adfcd400bb07b719a6fbd5a8af0508820629 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/pisa_loss.py @@ -0,0 +1,183 @@ +import mmcv +import torch + +from mmdet.core import bbox_overlaps + + +@mmcv.jit(derivate=True, coderize=True) +def isr_p(cls_score, + bbox_pred, + bbox_targets, + rois, + sampling_results, + loss_cls, + bbox_coder, + k=2, + bias=0, + num_class=80): + """Importance-based Sample Reweighting (ISR_P), positive part. + + Args: + cls_score (Tensor): Predicted classification scores. + bbox_pred (Tensor): Predicted bbox deltas. + bbox_targets (tuple[Tensor]): A tuple of bbox targets, the are + labels, label_weights, bbox_targets, bbox_weights, respectively. + rois (Tensor): Anchors (single_stage) in shape (n, 4) or RoIs + (two_stage) in shape (n, 5). + sampling_results (obj): Sampling results. + loss_cls (func): Classification loss func of the head. + bbox_coder (obj): BBox coder of the head. + k (float): Power of the non-linear mapping. + bias (float): Shift of the non-linear mapping. + num_class (int): Number of classes, default: 80. + + Return: + tuple([Tensor]): labels, imp_based_label_weights, bbox_targets, + bbox_target_weights + """ + + labels, label_weights, bbox_targets, bbox_weights = bbox_targets + pos_label_inds = ((labels >= 0) & + (labels < num_class)).nonzero().reshape(-1) + pos_labels = labels[pos_label_inds] + + # if no positive samples, return the original targets + num_pos = float(pos_label_inds.size(0)) + if num_pos == 0: + return labels, label_weights, bbox_targets, bbox_weights + + # merge pos_assigned_gt_inds of per image to a single tensor + gts = list() + last_max_gt = 0 + for i in range(len(sampling_results)): + gt_i = sampling_results[i].pos_assigned_gt_inds + gts.append(gt_i + last_max_gt) + if len(gt_i) != 0: + last_max_gt = gt_i.max() + 1 + gts = torch.cat(gts) + assert len(gts) == num_pos + + cls_score = cls_score.detach() + bbox_pred = bbox_pred.detach() + + # For single stage detectors, rois here indicate anchors, in shape (N, 4) + # For two stage detectors, rois are in shape (N, 5) + if rois.size(-1) == 5: + pos_rois = rois[pos_label_inds][:, 1:] + else: + pos_rois = rois[pos_label_inds] + + if bbox_pred.size(-1) > 4: + bbox_pred = bbox_pred.view(bbox_pred.size(0), -1, 4) + pos_delta_pred = bbox_pred[pos_label_inds, pos_labels].view(-1, 4) + else: + pos_delta_pred = bbox_pred[pos_label_inds].view(-1, 4) + + # compute iou of the predicted bbox and the corresponding GT + pos_delta_target = bbox_targets[pos_label_inds].view(-1, 4) + pos_bbox_pred = bbox_coder.decode(pos_rois, pos_delta_pred) + target_bbox_pred = bbox_coder.decode(pos_rois, pos_delta_target) + ious = bbox_overlaps(pos_bbox_pred, target_bbox_pred, is_aligned=True) + + pos_imp_weights = label_weights[pos_label_inds] + # Two steps to compute IoU-HLR. Samples are first sorted by IoU locally, + # then sorted again within the same-rank group + max_l_num = pos_labels.bincount().max() + for label in pos_labels.unique(): + l_inds = (pos_labels == label).nonzero().view(-1) + l_gts = gts[l_inds] + for t in l_gts.unique(): + t_inds = l_inds[l_gts == t] + t_ious = ious[t_inds] + _, t_iou_rank_idx = t_ious.sort(descending=True) + _, t_iou_rank = t_iou_rank_idx.sort() + ious[t_inds] += max_l_num - t_iou_rank.float() + l_ious = ious[l_inds] + _, l_iou_rank_idx = l_ious.sort(descending=True) + _, l_iou_rank = l_iou_rank_idx.sort() # IoU-HLR + # linearly map HLR to label weights + pos_imp_weights[l_inds] *= (max_l_num - l_iou_rank.float()) / max_l_num + + pos_imp_weights = (bias + pos_imp_weights * (1 - bias)).pow(k) + + # normalize to make the new weighted loss value equal to the original loss + pos_loss_cls = loss_cls( + cls_score[pos_label_inds], pos_labels, reduction_override='none') + if pos_loss_cls.dim() > 1: + ori_pos_loss_cls = pos_loss_cls * label_weights[pos_label_inds][:, + None] + new_pos_loss_cls = pos_loss_cls * pos_imp_weights[:, None] + else: + ori_pos_loss_cls = pos_loss_cls * label_weights[pos_label_inds] + new_pos_loss_cls = pos_loss_cls * pos_imp_weights + pos_loss_cls_ratio = ori_pos_loss_cls.sum() / new_pos_loss_cls.sum() + pos_imp_weights = pos_imp_weights * pos_loss_cls_ratio + label_weights[pos_label_inds] = pos_imp_weights + + bbox_targets = labels, label_weights, bbox_targets, bbox_weights + return bbox_targets + + +@mmcv.jit(derivate=True, coderize=True) +def carl_loss(cls_score, + labels, + bbox_pred, + bbox_targets, + loss_bbox, + k=1, + bias=0.2, + avg_factor=None, + sigmoid=False, + num_class=80): + """Classification-Aware Regression Loss (CARL). + + Args: + cls_score (Tensor): Predicted classification scores. + labels (Tensor): Targets of classification. + bbox_pred (Tensor): Predicted bbox deltas. + bbox_targets (Tensor): Target of bbox regression. + loss_bbox (func): Regression loss func of the head. + bbox_coder (obj): BBox coder of the head. + k (float): Power of the non-linear mapping. + bias (float): Shift of the non-linear mapping. + avg_factor (int): Average factor used in regression loss. + sigmoid (bool): Activation of the classification score. + num_class (int): Number of classes, default: 80. + + Return: + dict: CARL loss dict. + """ + pos_label_inds = ((labels >= 0) & + (labels < num_class)).nonzero().reshape(-1) + if pos_label_inds.numel() == 0: + return dict(loss_carl=cls_score.sum()[None] * 0.) + pos_labels = labels[pos_label_inds] + + # multiply pos_cls_score with the corresponding bbox weight + # and remain gradient + if sigmoid: + pos_cls_score = cls_score.sigmoid()[pos_label_inds, pos_labels] + else: + pos_cls_score = cls_score.softmax(-1)[pos_label_inds, pos_labels] + carl_loss_weights = (bias + (1 - bias) * pos_cls_score).pow(k) + + # normalize carl_loss_weight to make its sum equal to num positive + num_pos = float(pos_cls_score.size(0)) + weight_ratio = num_pos / carl_loss_weights.sum() + carl_loss_weights *= weight_ratio + + if avg_factor is None: + avg_factor = bbox_targets.size(0) + # if is class agnostic, bbox pred is in shape (N, 4) + # otherwise, bbox pred is in shape (N, #classes, 4) + if bbox_pred.size(-1) > 4: + bbox_pred = bbox_pred.view(bbox_pred.size(0), -1, 4) + pos_bbox_preds = bbox_pred[pos_label_inds, pos_labels] + else: + pos_bbox_preds = bbox_pred[pos_label_inds] + ori_loss_reg = loss_bbox( + pos_bbox_preds, + bbox_targets[pos_label_inds], + reduction_override='none') / avg_factor + loss_carl = (ori_loss_reg * carl_loss_weights[:, None]).sum() + return dict(loss_carl=loss_carl[None]) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/seesaw_loss.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/seesaw_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..558cc9fd82f13076853e85aee5d8bf911b9369c3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/seesaw_loss.py @@ -0,0 +1,261 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F + +from ..builder import LOSSES +from .accuracy import accuracy +from .cross_entropy_loss import cross_entropy +from .utils import weight_reduce_loss + + +def seesaw_ce_loss(cls_score, + labels, + label_weights, + cum_samples, + num_classes, + p, + q, + eps, + reduction='mean', + avg_factor=None): + """Calculate the Seesaw CrossEntropy loss. + + Args: + cls_score (torch.Tensor): The prediction with shape (N, C), + C is the number of classes. + labels (torch.Tensor): The learning label of the prediction. + label_weights (torch.Tensor): Sample-wise loss weight. + cum_samples (torch.Tensor): Cumulative samples for each category. + num_classes (int): The number of classes. + p (float): The ``p`` in the mitigation factor. + q (float): The ``q`` in the compenstation factor. + eps (float): The minimal value of divisor to smooth + the computation of compensation factor + reduction (str, optional): The method used to reduce the loss. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + + Returns: + torch.Tensor: The calculated loss + """ + assert cls_score.size(-1) == num_classes + assert len(cum_samples) == num_classes + + onehot_labels = F.one_hot(labels, num_classes) + seesaw_weights = cls_score.new_ones(onehot_labels.size()) + + # mitigation factor + if p > 0: + sample_ratio_matrix = cum_samples[None, :].clamp( + min=1) / cum_samples[:, None].clamp(min=1) + index = (sample_ratio_matrix < 1.0).float() + sample_weights = sample_ratio_matrix.pow(p) * index + (1 - index) + mitigation_factor = sample_weights[labels.long(), :] + seesaw_weights = seesaw_weights * mitigation_factor + + # compensation factor + if q > 0: + scores = F.softmax(cls_score.detach(), dim=1) + self_scores = scores[ + torch.arange(0, len(scores)).to(scores.device).long(), + labels.long()] + score_matrix = scores / self_scores[:, None].clamp(min=eps) + index = (score_matrix > 1.0).float() + compensation_factor = score_matrix.pow(q) * index + (1 - index) + seesaw_weights = seesaw_weights * compensation_factor + + cls_score = cls_score + (seesaw_weights.log() * (1 - onehot_labels)) + + loss = F.cross_entropy(cls_score, labels, weight=None, reduction='none') + + if label_weights is not None: + label_weights = label_weights.float() + loss = weight_reduce_loss( + loss, weight=label_weights, reduction=reduction, avg_factor=avg_factor) + return loss + + +@LOSSES.register_module() +class SeesawLoss(nn.Module): + """ + Seesaw Loss for Long-Tailed Instance Segmentation (CVPR 2021) + arXiv: https://arxiv.org/abs/2008.10032 + + Args: + use_sigmoid (bool, optional): Whether the prediction uses sigmoid + of softmax. Only False is supported. + p (float, optional): The ``p`` in the mitigation factor. + Defaults to 0.8. + q (float, optional): The ``q`` in the compenstation factor. + Defaults to 2.0. + num_classes (int, optional): The number of classes. + Default to 1203 for LVIS v1 dataset. + eps (float, optional): The minimal value of divisor to smooth + the computation of compensation factor + reduction (str, optional): The method that reduces the loss to a + scalar. Options are "none", "mean" and "sum". + loss_weight (float, optional): The weight of the loss. Defaults to 1.0 + return_dict (bool, optional): Whether return the losses as a dict. + Default to True. + """ + + def __init__(self, + use_sigmoid=False, + p=0.8, + q=2.0, + num_classes=1203, + eps=1e-2, + reduction='mean', + loss_weight=1.0, + return_dict=True): + super(SeesawLoss, self).__init__() + assert not use_sigmoid + self.use_sigmoid = False + self.p = p + self.q = q + self.num_classes = num_classes + self.eps = eps + self.reduction = reduction + self.loss_weight = loss_weight + self.return_dict = return_dict + + # 0 for pos, 1 for neg + self.cls_criterion = seesaw_ce_loss + + # cumulative samples for each category + self.register_buffer( + 'cum_samples', + torch.zeros(self.num_classes + 1, dtype=torch.float)) + + # custom output channels of the classifier + self.custom_cls_channels = True + # custom activation of cls_score + self.custom_activation = True + # custom accuracy of the classsifier + self.custom_accuracy = True + + def _split_cls_score(self, cls_score): + # split cls_score to cls_score_classes and cls_score_objectness + assert cls_score.size(-1) == self.num_classes + 2 + cls_score_classes = cls_score[..., :-2] + cls_score_objectness = cls_score[..., -2:] + return cls_score_classes, cls_score_objectness + + def get_cls_channels(self, num_classes): + """Get custom classification channels. + + Args: + num_classes (int): The number of classes. + + Returns: + int: The custom classification channels. + """ + assert num_classes == self.num_classes + return num_classes + 2 + + def get_activation(self, cls_score): + """Get custom activation of cls_score. + + Args: + cls_score (torch.Tensor): The prediction with shape (N, C + 2). + + Returns: + torch.Tensor: The custom activation of cls_score with shape + (N, C + 1). + """ + cls_score_classes, cls_score_objectness = self._split_cls_score( + cls_score) + score_classes = F.softmax(cls_score_classes, dim=-1) + score_objectness = F.softmax(cls_score_objectness, dim=-1) + score_pos = score_objectness[..., [0]] + score_neg = score_objectness[..., [1]] + score_classes = score_classes * score_pos + scores = torch.cat([score_classes, score_neg], dim=-1) + return scores + + def get_accuracy(self, cls_score, labels): + """Get custom accuracy w.r.t. cls_score and labels. + + Args: + cls_score (torch.Tensor): The prediction with shape (N, C + 2). + labels (torch.Tensor): The learning label of the prediction. + + Returns: + Dict [str, torch.Tensor]: The accuracy for objectness and classes, + respectively. + """ + pos_inds = labels < self.num_classes + obj_labels = (labels == self.num_classes).long() + cls_score_classes, cls_score_objectness = self._split_cls_score( + cls_score) + acc_objectness = accuracy(cls_score_objectness, obj_labels) + acc_classes = accuracy(cls_score_classes[pos_inds], labels[pos_inds]) + acc = dict() + acc['acc_objectness'] = acc_objectness + acc['acc_classes'] = acc_classes + return acc + + def forward(self, + cls_score, + labels, + label_weights=None, + avg_factor=None, + reduction_override=None): + """Forward function. + + Args: + cls_score (torch.Tensor): The prediction with shape (N, C + 2). + labels (torch.Tensor): The learning label of the prediction. + label_weights (torch.Tensor, optional): Sample-wise loss weight. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction (str, optional): The method used to reduce the loss. + Options are "none", "mean" and "sum". + Returns: + torch.Tensor | Dict [str, torch.Tensor]: + if return_dict == False: The calculated loss | + if return_dict == True: The dict of calculated losses + for objectness and classes, respectively. + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + assert cls_score.size(-1) == self.num_classes + 2 + pos_inds = labels < self.num_classes + # 0 for pos, 1 for neg + obj_labels = (labels == self.num_classes).long() + + # accumulate the samples for each category + unique_labels = labels.unique() + for u_l in unique_labels: + inds_ = labels == u_l.item() + self.cum_samples[u_l] += inds_.sum() + + if label_weights is not None: + label_weights = label_weights.float() + else: + label_weights = labels.new_ones(labels.size(), dtype=torch.float) + + cls_score_classes, cls_score_objectness = self._split_cls_score( + cls_score) + # calculate loss_cls_classes (only need pos samples) + if pos_inds.sum() > 0: + loss_cls_classes = self.loss_weight * self.cls_criterion( + cls_score_classes[pos_inds], labels[pos_inds], + label_weights[pos_inds], self.cum_samples[:self.num_classes], + self.num_classes, self.p, self.q, self.eps, reduction, + avg_factor) + else: + loss_cls_classes = cls_score_classes[pos_inds].sum() + # calculate loss_cls_objectness + loss_cls_objectness = self.loss_weight * cross_entropy( + cls_score_objectness, obj_labels, label_weights, reduction, + avg_factor) + + if self.return_dict: + loss_cls = dict() + loss_cls['loss_cls_objectness'] = loss_cls_objectness + loss_cls['loss_cls_classes'] = loss_cls_classes + else: + loss_cls = loss_cls_classes + loss_cls_objectness + return loss_cls diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/smooth_l1_loss.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/smooth_l1_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..ec9c98a52d1932d6ccff18938c17c36755bf1baf --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/smooth_l1_loss.py @@ -0,0 +1,139 @@ +import mmcv +import torch +import torch.nn as nn + +from ..builder import LOSSES +from .utils import weighted_loss + + +@mmcv.jit(derivate=True, coderize=True) +@weighted_loss +def smooth_l1_loss(pred, target, beta=1.0): + """Smooth L1 loss. + + Args: + pred (torch.Tensor): The prediction. + target (torch.Tensor): The learning target of the prediction. + beta (float, optional): The threshold in the piecewise function. + Defaults to 1.0. + + Returns: + torch.Tensor: Calculated loss + """ + assert beta > 0 + assert pred.size() == target.size() and target.numel() > 0 + diff = torch.abs(pred - target) + loss = torch.where(diff < beta, 0.5 * diff * diff / beta, + diff - 0.5 * beta) + return loss + + +@mmcv.jit(derivate=True, coderize=True) +@weighted_loss +def l1_loss(pred, target): + """L1 loss. + + Args: + pred (torch.Tensor): The prediction. + target (torch.Tensor): The learning target of the prediction. + + Returns: + torch.Tensor: Calculated loss + """ + assert pred.size() == target.size() and target.numel() > 0 + loss = torch.abs(pred - target) + return loss + + +@LOSSES.register_module() +class SmoothL1Loss(nn.Module): + """Smooth L1 loss. + + Args: + beta (float, optional): The threshold in the piecewise function. + Defaults to 1.0. + reduction (str, optional): The method to reduce the loss. + Options are "none", "mean" and "sum". Defaults to "mean". + loss_weight (float, optional): The weight of loss. + """ + + def __init__(self, beta=1.0, reduction='mean', loss_weight=1.0): + super(SmoothL1Loss, self).__init__() + self.beta = beta + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None, + **kwargs): + """Forward function. + + Args: + pred (torch.Tensor): The prediction. + target (torch.Tensor): The learning target of the prediction. + weight (torch.Tensor, optional): The weight of loss for each + prediction. Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Defaults to None. + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + loss_bbox = self.loss_weight * smooth_l1_loss( + pred, + target, + weight, + beta=self.beta, + reduction=reduction, + avg_factor=avg_factor, + **kwargs) + return loss_bbox + + +@LOSSES.register_module() +class L1Loss(nn.Module): + """L1 loss. + + Args: + reduction (str, optional): The method to reduce the loss. + Options are "none", "mean" and "sum". + loss_weight (float, optional): The weight of loss. + """ + + def __init__(self, reduction='mean', loss_weight=1.0): + super(L1Loss, self).__init__() + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None): + """Forward function. + + Args: + pred (torch.Tensor): The prediction. + target (torch.Tensor): The learning target of the prediction. + weight (torch.Tensor, optional): The weight of loss for each + prediction. Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Defaults to None. + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + loss_bbox = self.loss_weight * l1_loss( + pred, target, weight, reduction=reduction, avg_factor=avg_factor) + return loss_bbox diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/utils.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..4756d7fcefd7cda1294c2662b4ca3e90c0a8e124 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/utils.py @@ -0,0 +1,100 @@ +import functools + +import mmcv +import torch.nn.functional as F + + +def reduce_loss(loss, reduction): + """Reduce loss as specified. + + Args: + loss (Tensor): Elementwise loss tensor. + reduction (str): Options are "none", "mean" and "sum". + + Return: + Tensor: Reduced loss tensor. + """ + reduction_enum = F._Reduction.get_enum(reduction) + # none: 0, elementwise_mean:1, sum: 2 + if reduction_enum == 0: + return loss + elif reduction_enum == 1: + return loss.mean() + elif reduction_enum == 2: + return loss.sum() + + +@mmcv.jit(derivate=True, coderize=True) +def weight_reduce_loss(loss, weight=None, reduction='mean', avg_factor=None): + """Apply element-wise weight and reduce loss. + + Args: + loss (Tensor): Element-wise loss. + weight (Tensor): Element-wise weights. + reduction (str): Same as built-in losses of PyTorch. + avg_factor (float): Avarage factor when computing the mean of losses. + + Returns: + Tensor: Processed loss values. + """ + # if weight is specified, apply element-wise weight + if weight is not None: + loss = loss * weight + + # if avg_factor is not specified, just reduce the loss + if avg_factor is None: + loss = reduce_loss(loss, reduction) + else: + # if reduction is mean, then average the loss by avg_factor + if reduction == 'mean': + loss = loss.sum() / avg_factor + # if reduction is 'none', then do nothing, otherwise raise an error + elif reduction != 'none': + raise ValueError('avg_factor can not be used with reduction="sum"') + return loss + + +def weighted_loss(loss_func): + """Create a weighted version of a given loss function. + + To use this decorator, the loss function must have the signature like + `loss_func(pred, target, **kwargs)`. The function only needs to compute + element-wise loss without any reduction. This decorator will add weight + and reduction arguments to the function. The decorated function will have + the signature like `loss_func(pred, target, weight=None, reduction='mean', + avg_factor=None, **kwargs)`. + + :Example: + + >>> import torch + >>> @weighted_loss + >>> def l1_loss(pred, target): + >>> return (pred - target).abs() + + >>> pred = torch.Tensor([0, 2, 3]) + >>> target = torch.Tensor([1, 1, 1]) + >>> weight = torch.Tensor([1, 0, 1]) + + >>> l1_loss(pred, target) + tensor(1.3333) + >>> l1_loss(pred, target, weight) + tensor(1.) + >>> l1_loss(pred, target, reduction='none') + tensor([1., 1., 2.]) + >>> l1_loss(pred, target, weight, avg_factor=2) + tensor(1.5000) + """ + + @functools.wraps(loss_func) + def wrapper(pred, + target, + weight=None, + reduction='mean', + avg_factor=None, + **kwargs): + # get element-wise loss + loss = loss_func(pred, target, **kwargs) + loss = weight_reduce_loss(loss, weight, reduction, avg_factor) + return loss + + return wrapper diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/varifocal_loss.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/varifocal_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..7f00bd6916c04fef45a9aeecb50888266420daf9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/losses/varifocal_loss.py @@ -0,0 +1,133 @@ +import mmcv +import torch.nn as nn +import torch.nn.functional as F + +from ..builder import LOSSES +from .utils import weight_reduce_loss + + +@mmcv.jit(derivate=True, coderize=True) +def varifocal_loss(pred, + target, + weight=None, + alpha=0.75, + gamma=2.0, + iou_weighted=True, + reduction='mean', + avg_factor=None): + """`Varifocal Loss `_ + + Args: + pred (torch.Tensor): The prediction with shape (N, C), C is the + number of classes + target (torch.Tensor): The learning target of the iou-aware + classification score with shape (N, C), C is the number of classes. + weight (torch.Tensor, optional): The weight of loss for each + prediction. Defaults to None. + alpha (float, optional): A balance factor for the negative part of + Varifocal Loss, which is different from the alpha of Focal Loss. + Defaults to 0.75. + gamma (float, optional): The gamma for calculating the modulating + factor. Defaults to 2.0. + iou_weighted (bool, optional): Whether to weight the loss of the + positive example with the iou target. Defaults to True. + reduction (str, optional): The method used to reduce the loss into + a scalar. Defaults to 'mean'. Options are "none", "mean" and + "sum". + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + """ + # pred and target should be of the same size + assert pred.size() == target.size() + pred_sigmoid = pred.sigmoid() + target = target.type_as(pred) + if iou_weighted: + focal_weight = target * (target > 0.0).float() + \ + alpha * (pred_sigmoid - target).abs().pow(gamma) * \ + (target <= 0.0).float() + else: + focal_weight = (target > 0.0).float() + \ + alpha * (pred_sigmoid - target).abs().pow(gamma) * \ + (target <= 0.0).float() + loss = F.binary_cross_entropy_with_logits( + pred, target, reduction='none') * focal_weight + loss = weight_reduce_loss(loss, weight, reduction, avg_factor) + return loss + + +@LOSSES.register_module() +class VarifocalLoss(nn.Module): + + def __init__(self, + use_sigmoid=True, + alpha=0.75, + gamma=2.0, + iou_weighted=True, + reduction='mean', + loss_weight=1.0): + """`Varifocal Loss `_ + + Args: + use_sigmoid (bool, optional): Whether the prediction is + used for sigmoid or softmax. Defaults to True. + alpha (float, optional): A balance factor for the negative part of + Varifocal Loss, which is different from the alpha of Focal + Loss. Defaults to 0.75. + gamma (float, optional): The gamma for calculating the modulating + factor. Defaults to 2.0. + iou_weighted (bool, optional): Whether to weight the loss of the + positive examples with the iou target. Defaults to True. + reduction (str, optional): The method used to reduce the loss into + a scalar. Defaults to 'mean'. Options are "none", "mean" and + "sum". + loss_weight (float, optional): Weight of loss. Defaults to 1.0. + """ + super(VarifocalLoss, self).__init__() + assert use_sigmoid is True, \ + 'Only sigmoid varifocal loss supported now.' + assert alpha >= 0.0 + self.use_sigmoid = use_sigmoid + self.alpha = alpha + self.gamma = gamma + self.iou_weighted = iou_weighted + self.reduction = reduction + self.loss_weight = loss_weight + + def forward(self, + pred, + target, + weight=None, + avg_factor=None, + reduction_override=None): + """Forward function. + + Args: + pred (torch.Tensor): The prediction. + target (torch.Tensor): The learning target of the prediction. + weight (torch.Tensor, optional): The weight of loss for each + prediction. Defaults to None. + avg_factor (int, optional): Average factor that is used to average + the loss. Defaults to None. + reduction_override (str, optional): The reduction method used to + override the original reduction method of the loss. + Options are "none", "mean" and "sum". + + Returns: + torch.Tensor: The calculated loss + """ + assert reduction_override in (None, 'none', 'mean', 'sum') + reduction = ( + reduction_override if reduction_override else self.reduction) + if self.use_sigmoid: + loss_cls = self.loss_weight * varifocal_loss( + pred, + target, + weight, + alpha=self.alpha, + gamma=self.gamma, + iou_weighted=self.iou_weighted, + reduction=reduction, + avg_factor=avg_factor) + else: + raise NotImplementedError + return loss_cls diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..da7e10ea58aec510354848695a4d07feada3152b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/__init__.py @@ -0,0 +1,21 @@ +from .bfp import BFP +from .channel_mapper import ChannelMapper +from .ct_resnet_neck import CTResNetNeck +from .dilated_encoder import DilatedEncoder +from .fpg import FPG +from .fpn import FPN +from .fpn_carafe import FPN_CARAFE +from .hrfpn import HRFPN +from .nas_fpn import NASFPN +from .nasfcos_fpn import NASFCOS_FPN +from .pafpn import PAFPN +from .rfp import RFP +from .ssd_neck import SSDNeck +from .yolo_neck import YOLOV3Neck +from .cbnet_fpn import CBFPN + +__all__ = [ + 'FPN', 'BFP', 'ChannelMapper', 'HRFPN', 'NASFPN', 'FPN_CARAFE', 'PAFPN', + 'NASFCOS_FPN', 'RFP', 'YOLOV3Neck', 'FPG', 'DilatedEncoder', + 'CTResNetNeck', 'SSDNeck', 'CBFPN' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/bfp.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/bfp.py new file mode 100644 index 0000000000000000000000000000000000000000..9f8ee0720ef2b8637aef4c512d1bf14b2ec6b51b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/bfp.py @@ -0,0 +1,101 @@ +import torch.nn.functional as F +from mmcv.cnn import ConvModule +from mmcv.cnn.bricks import NonLocal2d +from mmcv.runner import BaseModule + +from ..builder import NECKS + + +@NECKS.register_module() +class BFP(BaseModule): + """BFP (Balanced Feature Pyramids) + + BFP takes multi-level features as inputs and gather them into a single one, + then refine the gathered feature and scatter the refined results to + multi-level features. This module is used in Libra R-CNN (CVPR 2019), see + the paper `Libra R-CNN: Towards Balanced Learning for Object Detection + `_ for details. + + Args: + in_channels (int): Number of input channels (feature maps of all levels + should have the same channels). + num_levels (int): Number of input feature levels. + conv_cfg (dict): The config dict for convolution layers. + norm_cfg (dict): The config dict for normalization layers. + refine_level (int): Index of integration and refine level of BSF in + multi-level features from bottom to top. + refine_type (str): Type of the refine op, currently support + [None, 'conv', 'non_local']. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__(self, + in_channels, + num_levels, + refine_level=2, + refine_type=None, + conv_cfg=None, + norm_cfg=None, + init_cfg=dict( + type='Xavier', layer='Conv2d', distribution='uniform')): + super(BFP, self).__init__(init_cfg) + assert refine_type in [None, 'conv', 'non_local'] + + self.in_channels = in_channels + self.num_levels = num_levels + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + + self.refine_level = refine_level + self.refine_type = refine_type + assert 0 <= self.refine_level < self.num_levels + + if self.refine_type == 'conv': + self.refine = ConvModule( + self.in_channels, + self.in_channels, + 3, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg) + elif self.refine_type == 'non_local': + self.refine = NonLocal2d( + self.in_channels, + reduction=1, + use_scale=False, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg) + + def forward(self, inputs): + """Forward function.""" + assert len(inputs) == self.num_levels + + # step 1: gather multi-level features by resize and average + feats = [] + gather_size = inputs[self.refine_level].size()[2:] + for i in range(self.num_levels): + if i < self.refine_level: + gathered = F.adaptive_max_pool2d( + inputs[i], output_size=gather_size) + else: + gathered = F.interpolate( + inputs[i], size=gather_size, mode='nearest') + feats.append(gathered) + + bsf = sum(feats) / len(feats) + + # step 2: refine gathered features + if self.refine_type is not None: + bsf = self.refine(bsf) + + # step 3: scatter refined features to multi-levels by a residual path + outs = [] + for i in range(self.num_levels): + out_size = inputs[i].size()[2:] + if i < self.refine_level: + residual = F.interpolate(bsf, size=out_size, mode='nearest') + else: + residual = F.adaptive_max_pool2d(bsf, output_size=out_size) + outs.append(residual + inputs[i]) + + return tuple(outs) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/cbnet_fpn.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/cbnet_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..d4d7e3cb70931e837a0ef1b0fdd86fc5643953f4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/cbnet_fpn.py @@ -0,0 +1,27 @@ +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import xavier_init + +from ..builder import NECKS +from .fpn import FPN +from .. import builder + +@NECKS.register_module() +class CBFPN(FPN): + ''' + FPN with weight sharing + which support mutliple outputs from cbnet + ''' + def forward(self, inputs): + if not isinstance(inputs[0], (list, tuple)): + inputs = [inputs] + + if self.training: + outs = [] + for x in inputs: + out = super().forward(x) + outs.append(out) + return outs + else: + out = super().forward(inputs[-1]) + return out diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/channel_mapper.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/channel_mapper.py new file mode 100644 index 0000000000000000000000000000000000000000..9c4c541fff4b44e6970c90521be27bad0ce24106 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/channel_mapper.py @@ -0,0 +1,99 @@ +import torch.nn as nn +from mmcv.cnn import ConvModule +from mmcv.runner import BaseModule + +from ..builder import NECKS + + +@NECKS.register_module() +class ChannelMapper(BaseModule): + r"""Channel Mapper to reduce/increase channels of backbone features. + + This is used to reduce/increase channels of backbone features. + + Args: + in_channels (List[int]): Number of input channels per scale. + out_channels (int): Number of output channels (used at each scale). + kernel_size (int, optional): kernel_size for reducing channels (used + at each scale). Default: 3. + conv_cfg (dict, optional): Config dict for convolution layer. + Default: None. + norm_cfg (dict, optional): Config dict for normalization layer. + Default: None. + act_cfg (dict, optional): Config dict for activation layer in + ConvModule. Default: dict(type='ReLU'). + num_outs (int, optional): Number of output feature maps. There + would be extra_convs when num_outs larger than the length + of in_channels. + init_cfg (dict or list[dict], optional): Initialization config dict. + Example: + >>> import torch + >>> in_channels = [2, 3, 5, 7] + >>> scales = [340, 170, 84, 43] + >>> inputs = [torch.rand(1, c, s, s) + ... for c, s in zip(in_channels, scales)] + >>> self = ChannelMapper(in_channels, 11, 3).eval() + >>> outputs = self.forward(inputs) + >>> for i in range(len(outputs)): + ... print(f'outputs[{i}].shape = {outputs[i].shape}') + outputs[0].shape = torch.Size([1, 11, 340, 340]) + outputs[1].shape = torch.Size([1, 11, 170, 170]) + outputs[2].shape = torch.Size([1, 11, 84, 84]) + outputs[3].shape = torch.Size([1, 11, 43, 43]) + """ + + def __init__(self, + in_channels, + out_channels, + kernel_size=3, + conv_cfg=None, + norm_cfg=None, + act_cfg=dict(type='ReLU'), + num_outs=None, + init_cfg=dict( + type='Xavier', layer='Conv2d', distribution='uniform')): + super(ChannelMapper, self).__init__(init_cfg) + assert isinstance(in_channels, list) + self.extra_convs = None + if num_outs is None: + num_outs = len(in_channels) + self.convs = nn.ModuleList() + for in_channel in in_channels: + self.convs.append( + ConvModule( + in_channel, + out_channels, + kernel_size, + padding=(kernel_size - 1) // 2, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg)) + if num_outs > len(in_channels): + self.extra_convs = nn.ModuleList() + for i in range(len(in_channels), num_outs): + if i == len(in_channels): + in_channel = in_channels[-1] + else: + in_channel = out_channels + self.extra_convs.append( + ConvModule( + in_channel, + out_channels, + 3, + stride=2, + padding=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg)) + + def forward(self, inputs): + """Forward function.""" + assert len(inputs) == len(self.convs) + outs = [self.convs[i](inputs[i]) for i in range(len(inputs))] + if self.extra_convs: + for i in range(len(self.extra_convs)): + if i == 0: + outs.append(self.extra_convs[0](inputs[-1])) + else: + outs.append(self.extra_convs[i](outs[-1])) + return tuple(outs) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/ct_resnet_neck.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/ct_resnet_neck.py new file mode 100644 index 0000000000000000000000000000000000000000..899a6547ef70ecc9339c0fdc8d024db817ca2297 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/ct_resnet_neck.py @@ -0,0 +1,93 @@ +import math + +import torch.nn as nn +from mmcv.cnn import ConvModule +from mmcv.runner import BaseModule, auto_fp16 + +from mmdet.models.builder import NECKS + + +@NECKS.register_module() +class CTResNetNeck(BaseModule): + """The neck used in `CenterNet `_ for + object classification and box regression. + + Args: + in_channel (int): Number of input channels. + num_deconv_filters (tuple[int]): Number of filters per stage. + num_deconv_kernels (tuple[int]): Number of kernels per stage. + use_dcn (bool): If True, use DCNv2. Default: True. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__(self, + in_channel, + num_deconv_filters, + num_deconv_kernels, + use_dcn=True, + init_cfg=None): + super(CTResNetNeck, self).__init__(init_cfg) + assert len(num_deconv_filters) == len(num_deconv_kernels) + self.fp16_enabled = False + self.use_dcn = use_dcn + self.in_channel = in_channel + self.deconv_layers = self._make_deconv_layer(num_deconv_filters, + num_deconv_kernels) + + def _make_deconv_layer(self, num_deconv_filters, num_deconv_kernels): + """use deconv layers to upsample backbone's output.""" + layers = [] + for i in range(len(num_deconv_filters)): + feat_channel = num_deconv_filters[i] + conv_module = ConvModule( + self.in_channel, + feat_channel, + 3, + padding=1, + conv_cfg=dict(type='DCNv2') if self.use_dcn else None, + norm_cfg=dict(type='BN')) + layers.append(conv_module) + upsample_module = ConvModule( + feat_channel, + feat_channel, + num_deconv_kernels[i], + stride=2, + padding=1, + conv_cfg=dict(type='deconv'), + norm_cfg=dict(type='BN')) + layers.append(upsample_module) + self.in_channel = feat_channel + + return nn.Sequential(*layers) + + def init_weights(self): + for m in self.modules(): + if isinstance(m, nn.ConvTranspose2d): + # In order to be consistent with the source code, + # reset the ConvTranspose2d initialization parameters + m.reset_parameters() + # Simulated bilinear upsampling kernel + w = m.weight.data + f = math.ceil(w.size(2) / 2) + c = (2 * f - 1 - f % 2) / (2. * f) + for i in range(w.size(2)): + for j in range(w.size(3)): + w[0, 0, i, j] = \ + (1 - math.fabs(i / f - c)) * ( + 1 - math.fabs(j / f - c)) + for c in range(1, w.size(0)): + w[c, 0, :, :] = w[0, 0, :, :] + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + # self.use_dcn is False + elif not self.use_dcn and isinstance(m, nn.Conv2d): + # In order to be consistent with the source code, + # reset the Conv2d initialization parameters + m.reset_parameters() + + @auto_fp16() + def forward(self, inputs): + assert isinstance(inputs, (list, tuple)) + outs = self.deconv_layers(inputs[-1]) + return outs, diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/dilated_encoder.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/dilated_encoder.py new file mode 100644 index 0000000000000000000000000000000000000000..e97d5ccc7f5e5e796aaac728fe3e75a0e3cb2a15 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/dilated_encoder.py @@ -0,0 +1,107 @@ +import torch.nn as nn +from mmcv.cnn import (ConvModule, caffe2_xavier_init, constant_init, is_norm, + normal_init) +from torch.nn import BatchNorm2d + +from ..builder import NECKS + + +class Bottleneck(nn.Module): + """Bottleneck block for DilatedEncoder used in `YOLOF. + + `. + + The Bottleneck contains three ConvLayers and one residual connection. + + Args: + in_channels (int): The number of input channels. + mid_channels (int): The number of middle output channels. + dilation (int): Dilation rate. + norm_cfg (dict): Dictionary to construct and config norm layer. + """ + + def __init__(self, + in_channels, + mid_channels, + dilation, + norm_cfg=dict(type='BN', requires_grad=True)): + super(Bottleneck, self).__init__() + self.conv1 = ConvModule( + in_channels, mid_channels, 1, norm_cfg=norm_cfg) + self.conv2 = ConvModule( + mid_channels, + mid_channels, + 3, + padding=dilation, + dilation=dilation, + norm_cfg=norm_cfg) + self.conv3 = ConvModule( + mid_channels, in_channels, 1, norm_cfg=norm_cfg) + + def forward(self, x): + identity = x + out = self.conv1(x) + out = self.conv2(out) + out = self.conv3(out) + out = out + identity + return out + + +@NECKS.register_module() +class DilatedEncoder(nn.Module): + """Dilated Encoder for YOLOF `. + + This module contains two types of components: + - the original FPN lateral convolution layer and fpn convolution layer, + which are 1x1 conv + 3x3 conv + - the dilated residual block + + Args: + in_channels (int): The number of input channels. + out_channels (int): The number of output channels. + block_mid_channels (int): The number of middle block output channels + num_residual_blocks (int): The number of residual blocks. + """ + + def __init__(self, in_channels, out_channels, block_mid_channels, + num_residual_blocks): + super(DilatedEncoder, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.block_mid_channels = block_mid_channels + self.num_residual_blocks = num_residual_blocks + self.block_dilations = [2, 4, 6, 8] + self._init_layers() + + def _init_layers(self): + self.lateral_conv = nn.Conv2d( + self.in_channels, self.out_channels, kernel_size=1) + self.lateral_norm = BatchNorm2d(self.out_channels) + self.fpn_conv = nn.Conv2d( + self.out_channels, self.out_channels, kernel_size=3, padding=1) + self.fpn_norm = BatchNorm2d(self.out_channels) + encoder_blocks = [] + for i in range(self.num_residual_blocks): + dilation = self.block_dilations[i] + encoder_blocks.append( + Bottleneck( + self.out_channels, + self.block_mid_channels, + dilation=dilation)) + self.dilated_encoder_blocks = nn.Sequential(*encoder_blocks) + + def init_weights(self): + caffe2_xavier_init(self.lateral_conv) + caffe2_xavier_init(self.fpn_conv) + for m in [self.lateral_norm, self.fpn_norm]: + constant_init(m, 1) + for m in self.dilated_encoder_blocks.modules(): + if isinstance(m, nn.Conv2d): + normal_init(m, mean=0, std=0.01) + if is_norm(m): + constant_init(m, 1) + + def forward(self, feature): + out = self.lateral_norm(self.lateral_conv(feature[-1])) + out = self.fpn_norm(self.fpn_conv(out)) + return self.dilated_encoder_blocks(out), diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/fpg.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/fpg.py new file mode 100644 index 0000000000000000000000000000000000000000..2b65dba3d0b4516e2d318623e926259b78268a05 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/fpg.py @@ -0,0 +1,405 @@ +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule +from mmcv.runner import BaseModule + +from ..builder import NECKS + + +class Transition(BaseModule): + """Base class for transition. + + Args: + in_channels (int): Number of input channels. + out_channels (int): Number of output channels. + """ + + def __init__(self, in_channels, out_channels, init_cfg=None): + super().__init__(init_cfg) + self.in_channels = in_channels + self.out_channels = out_channels + + def forward(x): + pass + + +class UpInterpolationConv(Transition): + """A transition used for up-sampling. + + Up-sample the input by interpolation then refines the feature by + a convolution layer. + + Args: + in_channels (int): Number of input channels. + out_channels (int): Number of output channels. + scale_factor (int): Up-sampling factor. Default: 2. + mode (int): Interpolation mode. Default: nearest. + align_corners (bool): Whether align corners when interpolation. + Default: None. + kernel_size (int): Kernel size for the conv. Default: 3. + """ + + def __init__(self, + in_channels, + out_channels, + scale_factor=2, + mode='nearest', + align_corners=None, + kernel_size=3, + init_cfg=None, + **kwargs): + super().__init__(in_channels, out_channels, init_cfg) + self.mode = mode + self.scale_factor = scale_factor + self.align_corners = align_corners + self.conv = ConvModule( + in_channels, + out_channels, + kernel_size, + padding=(kernel_size - 1) // 2, + **kwargs) + + def forward(self, x): + x = F.interpolate( + x, + scale_factor=self.scale_factor, + mode=self.mode, + align_corners=self.align_corners) + x = self.conv(x) + return x + + +class LastConv(Transition): + """A transition used for refining the output of the last stage. + + Args: + in_channels (int): Number of input channels. + out_channels (int): Number of output channels. + num_inputs (int): Number of inputs of the FPN features. + kernel_size (int): Kernel size for the conv. Default: 3. + """ + + def __init__(self, + in_channels, + out_channels, + num_inputs, + kernel_size=3, + init_cfg=None, + **kwargs): + super().__init__(in_channels, out_channels, init_cfg) + self.num_inputs = num_inputs + self.conv_out = ConvModule( + in_channels, + out_channels, + kernel_size, + padding=(kernel_size - 1) // 2, + **kwargs) + + def forward(self, inputs): + assert len(inputs) == self.num_inputs + return self.conv_out(inputs[-1]) + + +@NECKS.register_module() +class FPG(BaseModule): + """FPG. + + Implementation of `Feature Pyramid Grids (FPG) + `_. + This implementation only gives the basic structure stated in the paper. + But users can implement different type of transitions to fully explore the + the potential power of the structure of FPG. + + Args: + in_channels (int): Number of input channels (feature maps of all levels + should have the same channels). + out_channels (int): Number of output channels (used at each scale) + num_outs (int): Number of output scales. + stack_times (int): The number of times the pyramid architecture will + be stacked. + paths (list[str]): Specify the path order of each stack level. + Each element in the list should be either 'bu' (bottom-up) or + 'td' (top-down). + inter_channels (int): Number of inter channels. + same_up_trans (dict): Transition that goes down at the same stage. + same_down_trans (dict): Transition that goes up at the same stage. + across_lateral_trans (dict): Across-pathway same-stage + across_down_trans (dict): Across-pathway bottom-up connection. + across_up_trans (dict): Across-pathway top-down connection. + across_skip_trans (dict): Across-pathway skip connection. + output_trans (dict): Transition that trans the output of the + last stage. + start_level (int): Index of the start input backbone level used to + build the feature pyramid. Default: 0. + end_level (int): Index of the end input backbone level (exclusive) to + build the feature pyramid. Default: -1, which means the last level. + add_extra_convs (bool): It decides whether to add conv + layers on top of the original feature maps. Default to False. + If True, its actual mode is specified by `extra_convs_on_inputs`. + norm_cfg (dict): Config dict for normalization layer. Default: None. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + transition_types = { + 'conv': ConvModule, + 'interpolation_conv': UpInterpolationConv, + 'last_conv': LastConv, + } + + def __init__(self, + in_channels, + out_channels, + num_outs, + stack_times, + paths, + inter_channels=None, + same_down_trans=None, + same_up_trans=dict( + type='conv', kernel_size=3, stride=2, padding=1), + across_lateral_trans=dict(type='conv', kernel_size=1), + across_down_trans=dict(type='conv', kernel_size=3), + across_up_trans=None, + across_skip_trans=dict(type='identity'), + output_trans=dict(type='last_conv', kernel_size=3), + start_level=0, + end_level=-1, + add_extra_convs=False, + norm_cfg=None, + skip_inds=None, + init_cfg=[ + dict(type='Caffe2Xavier', layer='Conv2d'), + dict( + type='Constant', + layer=[ + '_BatchNorm', '_InstanceNorm', 'GroupNorm', + 'LayerNorm' + ], + val=1.0) + ]): + super(FPG, self).__init__(init_cfg) + assert isinstance(in_channels, list) + self.in_channels = in_channels + self.out_channels = out_channels + self.num_ins = len(in_channels) + self.num_outs = num_outs + if inter_channels is None: + self.inter_channels = [out_channels for _ in range(num_outs)] + elif isinstance(inter_channels, int): + self.inter_channels = [inter_channels for _ in range(num_outs)] + else: + assert isinstance(inter_channels, list) + assert len(inter_channels) == num_outs + self.inter_channels = inter_channels + self.stack_times = stack_times + self.paths = paths + assert isinstance(paths, list) and len(paths) == stack_times + for d in paths: + assert d in ('bu', 'td') + + self.same_down_trans = same_down_trans + self.same_up_trans = same_up_trans + self.across_lateral_trans = across_lateral_trans + self.across_down_trans = across_down_trans + self.across_up_trans = across_up_trans + self.output_trans = output_trans + self.across_skip_trans = across_skip_trans + + self.with_bias = norm_cfg is None + # skip inds must be specified if across skip trans is not None + if self.across_skip_trans is not None: + skip_inds is not None + self.skip_inds = skip_inds + assert len(self.skip_inds[0]) <= self.stack_times + + if end_level == -1: + self.backbone_end_level = self.num_ins + assert num_outs >= self.num_ins - start_level + else: + # if end_level < inputs, no extra level is allowed + self.backbone_end_level = end_level + assert end_level <= len(in_channels) + assert num_outs == end_level - start_level + self.start_level = start_level + self.end_level = end_level + self.add_extra_convs = add_extra_convs + + # build lateral 1x1 convs to reduce channels + self.lateral_convs = nn.ModuleList() + for i in range(self.start_level, self.backbone_end_level): + l_conv = nn.Conv2d(self.in_channels[i], + self.inter_channels[i - self.start_level], 1) + self.lateral_convs.append(l_conv) + + extra_levels = num_outs - self.backbone_end_level + self.start_level + self.extra_downsamples = nn.ModuleList() + for i in range(extra_levels): + if self.add_extra_convs: + fpn_idx = self.backbone_end_level - self.start_level + i + extra_conv = nn.Conv2d( + self.inter_channels[fpn_idx - 1], + self.inter_channels[fpn_idx], + 3, + stride=2, + padding=1) + self.extra_downsamples.append(extra_conv) + else: + self.extra_downsamples.append(nn.MaxPool2d(1, stride=2)) + + self.fpn_transitions = nn.ModuleList() # stack times + for s in range(self.stack_times): + stage_trans = nn.ModuleList() # num of feature levels + for i in range(self.num_outs): + # same, across_lateral, across_down, across_up + trans = nn.ModuleDict() + if s in self.skip_inds[i]: + stage_trans.append(trans) + continue + # build same-stage down trans (used in bottom-up paths) + if i == 0 or self.same_up_trans is None: + same_up_trans = None + else: + same_up_trans = self.build_trans( + self.same_up_trans, self.inter_channels[i - 1], + self.inter_channels[i]) + trans['same_up'] = same_up_trans + # build same-stage up trans (used in top-down paths) + if i == self.num_outs - 1 or self.same_down_trans is None: + same_down_trans = None + else: + same_down_trans = self.build_trans( + self.same_down_trans, self.inter_channels[i + 1], + self.inter_channels[i]) + trans['same_down'] = same_down_trans + # build across lateral trans + across_lateral_trans = self.build_trans( + self.across_lateral_trans, self.inter_channels[i], + self.inter_channels[i]) + trans['across_lateral'] = across_lateral_trans + # build across down trans + if i == self.num_outs - 1 or self.across_down_trans is None: + across_down_trans = None + else: + across_down_trans = self.build_trans( + self.across_down_trans, self.inter_channels[i + 1], + self.inter_channels[i]) + trans['across_down'] = across_down_trans + # build across up trans + if i == 0 or self.across_up_trans is None: + across_up_trans = None + else: + across_up_trans = self.build_trans( + self.across_up_trans, self.inter_channels[i - 1], + self.inter_channels[i]) + trans['across_up'] = across_up_trans + if self.across_skip_trans is None: + across_skip_trans = None + else: + across_skip_trans = self.build_trans( + self.across_skip_trans, self.inter_channels[i - 1], + self.inter_channels[i]) + trans['across_skip'] = across_skip_trans + # build across_skip trans + stage_trans.append(trans) + self.fpn_transitions.append(stage_trans) + + self.output_transition = nn.ModuleList() # output levels + for i in range(self.num_outs): + trans = self.build_trans( + self.output_trans, + self.inter_channels[i], + self.out_channels, + num_inputs=self.stack_times + 1) + self.output_transition.append(trans) + + self.relu = nn.ReLU(inplace=True) + + def build_trans(self, cfg, in_channels, out_channels, **extra_args): + cfg_ = cfg.copy() + trans_type = cfg_.pop('type') + trans_cls = self.transition_types[trans_type] + return trans_cls(in_channels, out_channels, **cfg_, **extra_args) + + def fuse(self, fuse_dict): + out = None + for item in fuse_dict.values(): + if item is not None: + if out is None: + out = item + else: + out = out + item + return out + + def forward(self, inputs): + assert len(inputs) == len(self.in_channels) + + # build all levels from original feature maps + feats = [ + lateral_conv(inputs[i + self.start_level]) + for i, lateral_conv in enumerate(self.lateral_convs) + ] + for downsample in self.extra_downsamples: + feats.append(downsample(feats[-1])) + + outs = [feats] + + for i in range(self.stack_times): + current_outs = outs[-1] + next_outs = [] + direction = self.paths[i] + for j in range(self.num_outs): + if i in self.skip_inds[j]: + next_outs.append(outs[-1][j]) + continue + # feature level + if direction == 'td': + lvl = self.num_outs - j - 1 + else: + lvl = j + # get transitions + if direction == 'td': + same_trans = self.fpn_transitions[i][lvl]['same_down'] + else: + same_trans = self.fpn_transitions[i][lvl]['same_up'] + across_lateral_trans = self.fpn_transitions[i][lvl][ + 'across_lateral'] + across_down_trans = self.fpn_transitions[i][lvl]['across_down'] + across_up_trans = self.fpn_transitions[i][lvl]['across_up'] + across_skip_trans = self.fpn_transitions[i][lvl]['across_skip'] + # init output + to_fuse = dict( + same=None, lateral=None, across_up=None, across_down=None) + # same downsample/upsample + if same_trans is not None: + to_fuse['same'] = same_trans(next_outs[-1]) + # across lateral + if across_lateral_trans is not None: + to_fuse['lateral'] = across_lateral_trans( + current_outs[lvl]) + # across downsample + if lvl > 0 and across_up_trans is not None: + to_fuse['across_up'] = across_up_trans(current_outs[lvl - + 1]) + # across upsample + if (lvl < self.num_outs - 1 and across_down_trans is not None): + to_fuse['across_down'] = across_down_trans( + current_outs[lvl + 1]) + if across_skip_trans is not None: + to_fuse['across_skip'] = across_skip_trans(outs[0][lvl]) + x = self.fuse(to_fuse) + next_outs.append(x) + + if direction == 'td': + outs.append(next_outs[::-1]) + else: + outs.append(next_outs) + + # output trans + final_outs = [] + for i in range(self.num_outs): + lvl_out_list = [] + for s in range(len(outs)): + lvl_out_list.append(outs[s][i]) + lvl_out = self.output_transition[i](lvl_out_list) + final_outs.append(lvl_out) + + return final_outs diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/fpn.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..5644c615e6bfef026e5fe8382ff247bcbd2b87e9 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/fpn.py @@ -0,0 +1,202 @@ +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule +from mmcv.runner import BaseModule, auto_fp16 + +from ..builder import NECKS + + +@NECKS.register_module() +class FPN(BaseModule): + r"""Feature Pyramid Network. + + This is an implementation of paper `Feature Pyramid Networks for Object + Detection `_. + + Args: + in_channels (List[int]): Number of input channels per scale. + out_channels (int): Number of output channels (used at each scale) + num_outs (int): Number of output scales. + start_level (int): Index of the start input backbone level used to + build the feature pyramid. Default: 0. + end_level (int): Index of the end input backbone level (exclusive) to + build the feature pyramid. Default: -1, which means the last level. + add_extra_convs (bool | str): If bool, it decides whether to add conv + layers on top of the original feature maps. Default to False. + If True, it is equivalent to `add_extra_convs='on_input'`. + If str, it specifies the source feature map of the extra convs. + Only the following options are allowed + + - 'on_input': Last feat map of neck inputs (i.e. backbone feature). + - 'on_lateral': Last feature map after lateral convs. + - 'on_output': The last output feature map after fpn convs. + relu_before_extra_convs (bool): Whether to apply relu before the extra + conv. Default: False. + no_norm_on_lateral (bool): Whether to apply norm on lateral. + Default: False. + conv_cfg (dict): Config dict for convolution layer. Default: None. + norm_cfg (dict): Config dict for normalization layer. Default: None. + act_cfg (str): Config dict for activation layer in ConvModule. + Default: None. + upsample_cfg (dict): Config dict for interpolate layer. + Default: `dict(mode='nearest')` + init_cfg (dict or list[dict], optional): Initialization config dict. + + Example: + >>> import torch + >>> in_channels = [2, 3, 5, 7] + >>> scales = [340, 170, 84, 43] + >>> inputs = [torch.rand(1, c, s, s) + ... for c, s in zip(in_channels, scales)] + >>> self = FPN(in_channels, 11, len(in_channels)).eval() + >>> outputs = self.forward(inputs) + >>> for i in range(len(outputs)): + ... print(f'outputs[{i}].shape = {outputs[i].shape}') + outputs[0].shape = torch.Size([1, 11, 340, 340]) + outputs[1].shape = torch.Size([1, 11, 170, 170]) + outputs[2].shape = torch.Size([1, 11, 84, 84]) + outputs[3].shape = torch.Size([1, 11, 43, 43]) + """ + + def __init__(self, + in_channels, + out_channels, + num_outs, + start_level=0, + end_level=-1, + add_extra_convs=False, + relu_before_extra_convs=False, + no_norm_on_lateral=False, + conv_cfg=None, + norm_cfg=None, + act_cfg=None, + upsample_cfg=dict(mode='nearest'), + init_cfg=dict( + type='Xavier', layer='Conv2d', distribution='uniform')): + super(FPN, self).__init__(init_cfg) + assert isinstance(in_channels, list) + self.in_channels = in_channels + self.out_channels = out_channels + self.num_ins = len(in_channels) + self.num_outs = num_outs + self.relu_before_extra_convs = relu_before_extra_convs + self.no_norm_on_lateral = no_norm_on_lateral + self.fp16_enabled = False + self.upsample_cfg = upsample_cfg.copy() + + if end_level == -1: + self.backbone_end_level = self.num_ins + assert num_outs >= self.num_ins - start_level + else: + # if end_level < inputs, no extra level is allowed + self.backbone_end_level = end_level + assert end_level <= len(in_channels) + assert num_outs == end_level - start_level + self.start_level = start_level + self.end_level = end_level + self.add_extra_convs = add_extra_convs + assert isinstance(add_extra_convs, (str, bool)) + if isinstance(add_extra_convs, str): + # Extra_convs_source choices: 'on_input', 'on_lateral', 'on_output' + assert add_extra_convs in ('on_input', 'on_lateral', 'on_output') + elif add_extra_convs: # True + self.add_extra_convs = 'on_input' + + self.lateral_convs = nn.ModuleList() + self.fpn_convs = nn.ModuleList() + + for i in range(self.start_level, self.backbone_end_level): + l_conv = ConvModule( + in_channels[i], + out_channels, + 1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg if not self.no_norm_on_lateral else None, + act_cfg=act_cfg, + inplace=False) + fpn_conv = ConvModule( + out_channels, + out_channels, + 3, + padding=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + inplace=False) + + self.lateral_convs.append(l_conv) + self.fpn_convs.append(fpn_conv) + + # add extra conv layers (e.g., RetinaNet) + extra_levels = num_outs - self.backbone_end_level + self.start_level + if self.add_extra_convs and extra_levels >= 1: + for i in range(extra_levels): + if i == 0 and self.add_extra_convs == 'on_input': + in_channels = self.in_channels[self.backbone_end_level - 1] + else: + in_channels = out_channels + extra_fpn_conv = ConvModule( + in_channels, + out_channels, + 3, + stride=2, + padding=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + inplace=False) + self.fpn_convs.append(extra_fpn_conv) + + @auto_fp16() + def forward(self, inputs): + """Forward function.""" + assert len(inputs) == len(self.in_channels) + + # build laterals + laterals = [ + lateral_conv(inputs[i + self.start_level]) + for i, lateral_conv in enumerate(self.lateral_convs) + ] + + # build top-down path + used_backbone_levels = len(laterals) + for i in range(used_backbone_levels - 1, 0, -1): + # In some cases, fixing `scale factor` (e.g. 2) is preferred, but + # it cannot co-exist with `size` in `F.interpolate`. + if 'scale_factor' in self.upsample_cfg: + laterals[i - 1] += F.interpolate(laterals[i], + **self.upsample_cfg) + else: + prev_shape = laterals[i - 1].shape[2:] + laterals[i - 1] += F.interpolate( + laterals[i], size=prev_shape, **self.upsample_cfg) + + # build outputs + # part 1: from original levels + outs = [ + self.fpn_convs[i](laterals[i]) for i in range(used_backbone_levels) + ] + # part 2: add extra levels + if self.num_outs > len(outs): + # use max pool to get more levels on top of outputs + # (e.g., Faster R-CNN, Mask R-CNN) + if not self.add_extra_convs: + for i in range(self.num_outs - used_backbone_levels): + outs.append(F.max_pool2d(outs[-1], 1, stride=2)) + # add conv layers on top of original feature maps (RetinaNet) + else: + if self.add_extra_convs == 'on_input': + extra_source = inputs[self.backbone_end_level - 1] + elif self.add_extra_convs == 'on_lateral': + extra_source = laterals[-1] + elif self.add_extra_convs == 'on_output': + extra_source = outs[-1] + else: + raise NotImplementedError + outs.append(self.fpn_convs[used_backbone_levels](extra_source)) + for i in range(used_backbone_levels + 1, self.num_outs): + if self.relu_before_extra_convs: + outs.append(self.fpn_convs[i](F.relu(outs[-1]))) + else: + outs.append(self.fpn_convs[i](outs[-1])) + return tuple(outs) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/fpn_carafe.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/fpn_carafe.py new file mode 100644 index 0000000000000000000000000000000000000000..ccc78ec325f176ec015c9757dcebb40ebca075be --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/fpn_carafe.py @@ -0,0 +1,274 @@ +import torch.nn as nn +from mmcv.cnn import ConvModule, build_upsample_layer, xavier_init +from mmcv.ops.carafe import CARAFEPack +from mmcv.runner import BaseModule, ModuleList + +from ..builder import NECKS + + +@NECKS.register_module() +class FPN_CARAFE(BaseModule): + """FPN_CARAFE is a more flexible implementation of FPN. It allows more + choice for upsample methods during the top-down pathway. + + It can reproduce the performance of ICCV 2019 paper + CARAFE: Content-Aware ReAssembly of FEatures + Please refer to https://arxiv.org/abs/1905.02188 for more details. + + Args: + in_channels (list[int]): Number of channels for each input feature map. + out_channels (int): Output channels of feature pyramids. + num_outs (int): Number of output stages. + start_level (int): Start level of feature pyramids. + (Default: 0) + end_level (int): End level of feature pyramids. + (Default: -1 indicates the last level). + norm_cfg (dict): Dictionary to construct and config norm layer. + activate (str): Type of activation function in ConvModule + (Default: None indicates w/o activation). + order (dict): Order of components in ConvModule. + upsample (str): Type of upsample layer. + upsample_cfg (dict): Dictionary to construct and config upsample layer. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + in_channels, + out_channels, + num_outs, + start_level=0, + end_level=-1, + norm_cfg=None, + act_cfg=None, + order=('conv', 'norm', 'act'), + upsample_cfg=dict( + type='carafe', + up_kernel=5, + up_group=1, + encoder_kernel=3, + encoder_dilation=1), + init_cfg=None): + assert init_cfg is None, 'To prevent abnormal initialization ' \ + 'behavior, init_cfg is not allowed to be set' + super(FPN_CARAFE, self).__init__(init_cfg) + assert isinstance(in_channels, list) + self.in_channels = in_channels + self.out_channels = out_channels + self.num_ins = len(in_channels) + self.num_outs = num_outs + self.norm_cfg = norm_cfg + self.act_cfg = act_cfg + self.with_bias = norm_cfg is None + self.upsample_cfg = upsample_cfg.copy() + self.upsample = self.upsample_cfg.get('type') + self.relu = nn.ReLU(inplace=False) + + self.order = order + assert order in [('conv', 'norm', 'act'), ('act', 'conv', 'norm')] + + assert self.upsample in [ + 'nearest', 'bilinear', 'deconv', 'pixel_shuffle', 'carafe', None + ] + if self.upsample in ['deconv', 'pixel_shuffle']: + assert hasattr( + self.upsample_cfg, + 'upsample_kernel') and self.upsample_cfg.upsample_kernel > 0 + self.upsample_kernel = self.upsample_cfg.pop('upsample_kernel') + + if end_level == -1: + self.backbone_end_level = self.num_ins + assert num_outs >= self.num_ins - start_level + else: + # if end_level < inputs, no extra level is allowed + self.backbone_end_level = end_level + assert end_level <= len(in_channels) + assert num_outs == end_level - start_level + self.start_level = start_level + self.end_level = end_level + + self.lateral_convs = ModuleList() + self.fpn_convs = ModuleList() + self.upsample_modules = ModuleList() + + for i in range(self.start_level, self.backbone_end_level): + l_conv = ConvModule( + in_channels[i], + out_channels, + 1, + norm_cfg=norm_cfg, + bias=self.with_bias, + act_cfg=act_cfg, + inplace=False, + order=self.order) + fpn_conv = ConvModule( + out_channels, + out_channels, + 3, + padding=1, + norm_cfg=self.norm_cfg, + bias=self.with_bias, + act_cfg=act_cfg, + inplace=False, + order=self.order) + if i != self.backbone_end_level - 1: + upsample_cfg_ = self.upsample_cfg.copy() + if self.upsample == 'deconv': + upsample_cfg_.update( + in_channels=out_channels, + out_channels=out_channels, + kernel_size=self.upsample_kernel, + stride=2, + padding=(self.upsample_kernel - 1) // 2, + output_padding=(self.upsample_kernel - 1) // 2) + elif self.upsample == 'pixel_shuffle': + upsample_cfg_.update( + in_channels=out_channels, + out_channels=out_channels, + scale_factor=2, + upsample_kernel=self.upsample_kernel) + elif self.upsample == 'carafe': + upsample_cfg_.update(channels=out_channels, scale_factor=2) + else: + # suppress warnings + align_corners = (None + if self.upsample == 'nearest' else False) + upsample_cfg_.update( + scale_factor=2, + mode=self.upsample, + align_corners=align_corners) + upsample_module = build_upsample_layer(upsample_cfg_) + self.upsample_modules.append(upsample_module) + self.lateral_convs.append(l_conv) + self.fpn_convs.append(fpn_conv) + + # add extra conv layers (e.g., RetinaNet) + extra_out_levels = ( + num_outs - self.backbone_end_level + self.start_level) + if extra_out_levels >= 1: + for i in range(extra_out_levels): + in_channels = ( + self.in_channels[self.backbone_end_level - + 1] if i == 0 else out_channels) + extra_l_conv = ConvModule( + in_channels, + out_channels, + 3, + stride=2, + padding=1, + norm_cfg=norm_cfg, + bias=self.with_bias, + act_cfg=act_cfg, + inplace=False, + order=self.order) + if self.upsample == 'deconv': + upsampler_cfg_ = dict( + in_channels=out_channels, + out_channels=out_channels, + kernel_size=self.upsample_kernel, + stride=2, + padding=(self.upsample_kernel - 1) // 2, + output_padding=(self.upsample_kernel - 1) // 2) + elif self.upsample == 'pixel_shuffle': + upsampler_cfg_ = dict( + in_channels=out_channels, + out_channels=out_channels, + scale_factor=2, + upsample_kernel=self.upsample_kernel) + elif self.upsample == 'carafe': + upsampler_cfg_ = dict( + channels=out_channels, + scale_factor=2, + **self.upsample_cfg) + else: + # suppress warnings + align_corners = (None + if self.upsample == 'nearest' else False) + upsampler_cfg_ = dict( + scale_factor=2, + mode=self.upsample, + align_corners=align_corners) + upsampler_cfg_['type'] = self.upsample + upsample_module = build_upsample_layer(upsampler_cfg_) + extra_fpn_conv = ConvModule( + out_channels, + out_channels, + 3, + padding=1, + norm_cfg=self.norm_cfg, + bias=self.with_bias, + act_cfg=act_cfg, + inplace=False, + order=self.order) + self.upsample_modules.append(upsample_module) + self.fpn_convs.append(extra_fpn_conv) + self.lateral_convs.append(extra_l_conv) + + # default init_weights for conv(msra) and norm in ConvModule + def init_weights(self): + """Initialize the weights of module.""" + super(FPN_CARAFE, self).init_weights() + for m in self.modules(): + if isinstance(m, (nn.Conv2d, nn.ConvTranspose2d)): + xavier_init(m, distribution='uniform') + for m in self.modules(): + if isinstance(m, CARAFEPack): + m.init_weights() + + def slice_as(self, src, dst): + """Slice ``src`` as ``dst`` + + Note: + ``src`` should have the same or larger size than ``dst``. + + Args: + src (torch.Tensor): Tensors to be sliced. + dst (torch.Tensor): ``src`` will be sliced to have the same + size as ``dst``. + + Returns: + torch.Tensor: Sliced tensor. + """ + assert (src.size(2) >= dst.size(2)) and (src.size(3) >= dst.size(3)) + if src.size(2) == dst.size(2) and src.size(3) == dst.size(3): + return src + else: + return src[:, :, :dst.size(2), :dst.size(3)] + + def tensor_add(self, a, b): + """Add tensors ``a`` and ``b`` that might have different sizes.""" + if a.size() == b.size(): + c = a + b + else: + c = a + self.slice_as(b, a) + return c + + def forward(self, inputs): + """Forward function.""" + assert len(inputs) == len(self.in_channels) + + # build laterals + laterals = [] + for i, lateral_conv in enumerate(self.lateral_convs): + if i <= self.backbone_end_level - self.start_level: + input = inputs[min(i + self.start_level, len(inputs) - 1)] + else: + input = laterals[-1] + lateral = lateral_conv(input) + laterals.append(lateral) + + # build top-down path + for i in range(len(laterals) - 1, 0, -1): + if self.upsample is not None: + upsample_feat = self.upsample_modules[i - 1](laterals[i]) + else: + upsample_feat = laterals[i] + laterals[i - 1] = self.tensor_add(laterals[i - 1], upsample_feat) + + # build outputs + num_conv_outs = len(self.fpn_convs) + outs = [] + for i in range(num_conv_outs): + out = self.fpn_convs[i](laterals[i]) + outs.append(out) + return tuple(outs) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/hrfpn.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/hrfpn.py new file mode 100644 index 0000000000000000000000000000000000000000..135128fe97c2b2f49d9dcaf427cb5bced0c26be7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/hrfpn.py @@ -0,0 +1,99 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule +from mmcv.runner import BaseModule +from torch.utils.checkpoint import checkpoint + +from ..builder import NECKS + + +@NECKS.register_module() +class HRFPN(BaseModule): + """HRFPN (High Resolution Feature Pyramids) + + paper: `High-Resolution Representations for Labeling Pixels and Regions + `_. + + Args: + in_channels (list): number of channels for each branch. + out_channels (int): output channels of feature pyramids. + num_outs (int): number of output stages. + pooling_type (str): pooling for generating feature pyramids + from {MAX, AVG}. + conv_cfg (dict): dictionary to construct and config conv layer. + norm_cfg (dict): dictionary to construct and config norm layer. + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. + stride (int): stride of 3x3 convolutional layers + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__(self, + in_channels, + out_channels, + num_outs=5, + pooling_type='AVG', + conv_cfg=None, + norm_cfg=None, + with_cp=False, + stride=1, + init_cfg=dict(type='Caffe2Xavier', layer='Conv2d')): + super(HRFPN, self).__init__(init_cfg) + assert isinstance(in_channels, list) + self.in_channels = in_channels + self.out_channels = out_channels + self.num_ins = len(in_channels) + self.num_outs = num_outs + self.with_cp = with_cp + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + + self.reduction_conv = ConvModule( + sum(in_channels), + out_channels, + kernel_size=1, + conv_cfg=self.conv_cfg, + act_cfg=None) + + self.fpn_convs = nn.ModuleList() + for i in range(self.num_outs): + self.fpn_convs.append( + ConvModule( + out_channels, + out_channels, + kernel_size=3, + padding=1, + stride=stride, + conv_cfg=self.conv_cfg, + act_cfg=None)) + + if pooling_type == 'MAX': + self.pooling = F.max_pool2d + else: + self.pooling = F.avg_pool2d + + def forward(self, inputs): + """Forward function.""" + assert len(inputs) == self.num_ins + outs = [inputs[0]] + for i in range(1, self.num_ins): + outs.append( + F.interpolate(inputs[i], scale_factor=2**i, mode='bilinear')) + out = torch.cat(outs, dim=1) + if out.requires_grad and self.with_cp: + out = checkpoint(self.reduction_conv, out) + else: + out = self.reduction_conv(out) + outs = [out] + for i in range(1, self.num_outs): + outs.append(self.pooling(out, kernel_size=2**i, stride=2**i)) + outputs = [] + + for i in range(self.num_outs): + if outs[i].requires_grad and self.with_cp: + tmp_out = checkpoint(self.fpn_convs[i], outs[i]) + else: + tmp_out = self.fpn_convs[i](outs[i]) + outputs.append(tmp_out) + return tuple(outputs) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/nas_fpn.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/nas_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..fca3496e6d341c709b3379d6c83265bad6dab65e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/nas_fpn.py @@ -0,0 +1,157 @@ +import torch.nn as nn +from mmcv.cnn import ConvModule +from mmcv.ops.merge_cells import GlobalPoolingCell, SumCell +from mmcv.runner import BaseModule, ModuleList + +from ..builder import NECKS + + +@NECKS.register_module() +class NASFPN(BaseModule): + """NAS-FPN. + + Implementation of `NAS-FPN: Learning Scalable Feature Pyramid Architecture + for Object Detection `_ + + Args: + in_channels (List[int]): Number of input channels per scale. + out_channels (int): Number of output channels (used at each scale) + num_outs (int): Number of output scales. + stack_times (int): The number of times the pyramid architecture will + be stacked. + start_level (int): Index of the start input backbone level used to + build the feature pyramid. Default: 0. + end_level (int): Index of the end input backbone level (exclusive) to + build the feature pyramid. Default: -1, which means the last level. + add_extra_convs (bool): It decides whether to add conv + layers on top of the original feature maps. Default to False. + If True, its actual mode is specified by `extra_convs_on_inputs`. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__(self, + in_channels, + out_channels, + num_outs, + stack_times, + start_level=0, + end_level=-1, + add_extra_convs=False, + norm_cfg=None, + init_cfg=dict(type='Caffe2Xavier', layer='Conv2d')): + super(NASFPN, self).__init__(init_cfg) + assert isinstance(in_channels, list) + self.in_channels = in_channels + self.out_channels = out_channels + self.num_ins = len(in_channels) # num of input feature levels + self.num_outs = num_outs # num of output feature levels + self.stack_times = stack_times + self.norm_cfg = norm_cfg + + if end_level == -1: + self.backbone_end_level = self.num_ins + assert num_outs >= self.num_ins - start_level + else: + # if end_level < inputs, no extra level is allowed + self.backbone_end_level = end_level + assert end_level <= len(in_channels) + assert num_outs == end_level - start_level + self.start_level = start_level + self.end_level = end_level + self.add_extra_convs = add_extra_convs + + # add lateral connections + self.lateral_convs = nn.ModuleList() + for i in range(self.start_level, self.backbone_end_level): + l_conv = ConvModule( + in_channels[i], + out_channels, + 1, + norm_cfg=norm_cfg, + act_cfg=None) + self.lateral_convs.append(l_conv) + + # add extra downsample layers (stride-2 pooling or conv) + extra_levels = num_outs - self.backbone_end_level + self.start_level + self.extra_downsamples = nn.ModuleList() + for i in range(extra_levels): + extra_conv = ConvModule( + out_channels, out_channels, 1, norm_cfg=norm_cfg, act_cfg=None) + self.extra_downsamples.append( + nn.Sequential(extra_conv, nn.MaxPool2d(2, 2))) + + # add NAS FPN connections + self.fpn_stages = ModuleList() + for _ in range(self.stack_times): + stage = nn.ModuleDict() + # gp(p6, p4) -> p4_1 + stage['gp_64_4'] = GlobalPoolingCell( + in_channels=out_channels, + out_channels=out_channels, + out_norm_cfg=norm_cfg) + # sum(p4_1, p4) -> p4_2 + stage['sum_44_4'] = SumCell( + in_channels=out_channels, + out_channels=out_channels, + out_norm_cfg=norm_cfg) + # sum(p4_2, p3) -> p3_out + stage['sum_43_3'] = SumCell( + in_channels=out_channels, + out_channels=out_channels, + out_norm_cfg=norm_cfg) + # sum(p3_out, p4_2) -> p4_out + stage['sum_34_4'] = SumCell( + in_channels=out_channels, + out_channels=out_channels, + out_norm_cfg=norm_cfg) + # sum(p5, gp(p4_out, p3_out)) -> p5_out + stage['gp_43_5'] = GlobalPoolingCell(with_out_conv=False) + stage['sum_55_5'] = SumCell( + in_channels=out_channels, + out_channels=out_channels, + out_norm_cfg=norm_cfg) + # sum(p7, gp(p5_out, p4_2)) -> p7_out + stage['gp_54_7'] = GlobalPoolingCell(with_out_conv=False) + stage['sum_77_7'] = SumCell( + in_channels=out_channels, + out_channels=out_channels, + out_norm_cfg=norm_cfg) + # gp(p7_out, p5_out) -> p6_out + stage['gp_75_6'] = GlobalPoolingCell( + in_channels=out_channels, + out_channels=out_channels, + out_norm_cfg=norm_cfg) + self.fpn_stages.append(stage) + + def forward(self, inputs): + """Forward function.""" + # build P3-P5 + feats = [ + lateral_conv(inputs[i + self.start_level]) + for i, lateral_conv in enumerate(self.lateral_convs) + ] + # build P6-P7 on top of P5 + for downsample in self.extra_downsamples: + feats.append(downsample(feats[-1])) + + p3, p4, p5, p6, p7 = feats + + for stage in self.fpn_stages: + # gp(p6, p4) -> p4_1 + p4_1 = stage['gp_64_4'](p6, p4, out_size=p4.shape[-2:]) + # sum(p4_1, p4) -> p4_2 + p4_2 = stage['sum_44_4'](p4_1, p4, out_size=p4.shape[-2:]) + # sum(p4_2, p3) -> p3_out + p3 = stage['sum_43_3'](p4_2, p3, out_size=p3.shape[-2:]) + # sum(p3_out, p4_2) -> p4_out + p4 = stage['sum_34_4'](p3, p4_2, out_size=p4.shape[-2:]) + # sum(p5, gp(p4_out, p3_out)) -> p5_out + p5_tmp = stage['gp_43_5'](p4, p3, out_size=p5.shape[-2:]) + p5 = stage['sum_55_5'](p5, p5_tmp, out_size=p5.shape[-2:]) + # sum(p7, gp(p5_out, p4_2)) -> p7_out + p7_tmp = stage['gp_54_7'](p5, p4_2, out_size=p7.shape[-2:]) + p7 = stage['sum_77_7'](p7, p7_tmp, out_size=p7.shape[-2:]) + # gp(p7_out, p5_out) -> p6_out + p6 = stage['gp_75_6'](p7, p5, out_size=p6.shape[-2:]) + + return p3, p4, p5, p6, p7 diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/nasfcos_fpn.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/nasfcos_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..77a3ffd876a03ac8d07f7b3039d53ea3065fbd84 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/nasfcos_fpn.py @@ -0,0 +1,168 @@ +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule, caffe2_xavier_init +from mmcv.ops.merge_cells import ConcatCell +from mmcv.runner import BaseModule + +from ..builder import NECKS + + +@NECKS.register_module() +class NASFCOS_FPN(BaseModule): + """FPN structure in NASFPN. + + Implementation of paper `NAS-FCOS: Fast Neural Architecture Search for + Object Detection `_ + + Args: + in_channels (List[int]): Number of input channels per scale. + out_channels (int): Number of output channels (used at each scale) + num_outs (int): Number of output scales. + start_level (int): Index of the start input backbone level used to + build the feature pyramid. Default: 0. + end_level (int): Index of the end input backbone level (exclusive) to + build the feature pyramid. Default: -1, which means the last level. + add_extra_convs (bool): It decides whether to add conv + layers on top of the original feature maps. Default to False. + If True, its actual mode is specified by `extra_convs_on_inputs`. + conv_cfg (dict): dictionary to construct and config conv layer. + norm_cfg (dict): dictionary to construct and config norm layer. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + in_channels, + out_channels, + num_outs, + start_level=1, + end_level=-1, + add_extra_convs=False, + conv_cfg=None, + norm_cfg=None, + init_cfg=None): + assert init_cfg is None, 'To prevent abnormal initialization ' \ + 'behavior, init_cfg is not allowed to be set' + super(NASFCOS_FPN, self).__init__(init_cfg) + assert isinstance(in_channels, list) + self.in_channels = in_channels + self.out_channels = out_channels + self.num_ins = len(in_channels) + self.num_outs = num_outs + self.norm_cfg = norm_cfg + self.conv_cfg = conv_cfg + + if end_level == -1: + self.backbone_end_level = self.num_ins + assert num_outs >= self.num_ins - start_level + else: + self.backbone_end_level = end_level + assert end_level <= len(in_channels) + assert num_outs == end_level - start_level + self.start_level = start_level + self.end_level = end_level + self.add_extra_convs = add_extra_convs + + self.adapt_convs = nn.ModuleList() + for i in range(self.start_level, self.backbone_end_level): + adapt_conv = ConvModule( + in_channels[i], + out_channels, + 1, + stride=1, + padding=0, + bias=False, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU', inplace=False)) + self.adapt_convs.append(adapt_conv) + + # C2 is omitted according to the paper + extra_levels = num_outs - self.backbone_end_level + self.start_level + + def build_concat_cell(with_input1_conv, with_input2_conv): + cell_conv_cfg = dict( + kernel_size=1, padding=0, bias=False, groups=out_channels) + return ConcatCell( + in_channels=out_channels, + out_channels=out_channels, + with_out_conv=True, + out_conv_cfg=cell_conv_cfg, + out_norm_cfg=dict(type='BN'), + out_conv_order=('norm', 'act', 'conv'), + with_input1_conv=with_input1_conv, + with_input2_conv=with_input2_conv, + input_conv_cfg=conv_cfg, + input_norm_cfg=norm_cfg, + upsample_mode='nearest') + + # Denote c3=f0, c4=f1, c5=f2 for convince + self.fpn = nn.ModuleDict() + self.fpn['c22_1'] = build_concat_cell(True, True) + self.fpn['c22_2'] = build_concat_cell(True, True) + self.fpn['c32'] = build_concat_cell(True, False) + self.fpn['c02'] = build_concat_cell(True, False) + self.fpn['c42'] = build_concat_cell(True, True) + self.fpn['c36'] = build_concat_cell(True, True) + self.fpn['c61'] = build_concat_cell(True, True) # f9 + self.extra_downsamples = nn.ModuleList() + for i in range(extra_levels): + extra_act_cfg = None if i == 0 \ + else dict(type='ReLU', inplace=False) + self.extra_downsamples.append( + ConvModule( + out_channels, + out_channels, + 3, + stride=2, + padding=1, + act_cfg=extra_act_cfg, + order=('act', 'norm', 'conv'))) + + def forward(self, inputs): + """Forward function.""" + feats = [ + adapt_conv(inputs[i + self.start_level]) + for i, adapt_conv in enumerate(self.adapt_convs) + ] + + for (i, module_name) in enumerate(self.fpn): + idx_1, idx_2 = int(module_name[1]), int(module_name[2]) + res = self.fpn[module_name](feats[idx_1], feats[idx_2]) + feats.append(res) + + ret = [] + for (idx, input_idx) in zip([9, 8, 7], [1, 2, 3]): # add P3, P4, P5 + feats1, feats2 = feats[idx], feats[5] + feats2_resize = F.interpolate( + feats2, + size=feats1.size()[2:], + mode='bilinear', + align_corners=False) + + feats_sum = feats1 + feats2_resize + ret.append( + F.interpolate( + feats_sum, + size=inputs[input_idx].size()[2:], + mode='bilinear', + align_corners=False)) + + for submodule in self.extra_downsamples: + ret.append(submodule(ret[-1])) + + return tuple(ret) + + def init_weights(self): + """Initialize the weights of module.""" + super(NASFCOS_FPN, self).init_weights() + for module in self.fpn.values(): + if hasattr(module, 'conv_out'): + caffe2_xavier_init(module.out_conv.conv) + + for modules in [ + self.adapt_convs.modules(), + self.extra_downsamples.modules() + ]: + for module in modules: + if isinstance(module, nn.Conv2d): + caffe2_xavier_init(module) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/pafpn.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/pafpn.py new file mode 100644 index 0000000000000000000000000000000000000000..ba56ccae7fd03e07a6d286834edab396b8b6f844 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/pafpn.py @@ -0,0 +1,157 @@ +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule +from mmcv.runner import auto_fp16 + +from ..builder import NECKS +from .fpn import FPN + + +@NECKS.register_module() +class PAFPN(FPN): + """Path Aggregation Network for Instance Segmentation. + + This is an implementation of the `PAFPN in Path Aggregation Network + `_. + + Args: + in_channels (List[int]): Number of input channels per scale. + out_channels (int): Number of output channels (used at each scale) + num_outs (int): Number of output scales. + start_level (int): Index of the start input backbone level used to + build the feature pyramid. Default: 0. + end_level (int): Index of the end input backbone level (exclusive) to + build the feature pyramid. Default: -1, which means the last level. + add_extra_convs (bool | str): If bool, it decides whether to add conv + layers on top of the original feature maps. Default to False. + If True, it is equivalent to `add_extra_convs='on_input'`. + If str, it specifies the source feature map of the extra convs. + Only the following options are allowed + + - 'on_input': Last feat map of neck inputs (i.e. backbone feature). + - 'on_lateral': Last feature map after lateral convs. + - 'on_output': The last output feature map after fpn convs. + relu_before_extra_convs (bool): Whether to apply relu before the extra + conv. Default: False. + no_norm_on_lateral (bool): Whether to apply norm on lateral. + Default: False. + conv_cfg (dict): Config dict for convolution layer. Default: None. + norm_cfg (dict): Config dict for normalization layer. Default: None. + act_cfg (str): Config dict for activation layer in ConvModule. + Default: None. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__(self, + in_channels, + out_channels, + num_outs, + start_level=0, + end_level=-1, + add_extra_convs=False, + relu_before_extra_convs=False, + no_norm_on_lateral=False, + conv_cfg=None, + norm_cfg=None, + act_cfg=None, + init_cfg=dict( + type='Xavier', layer='Conv2d', distribution='uniform')): + super(PAFPN, self).__init__( + in_channels, + out_channels, + num_outs, + start_level, + end_level, + add_extra_convs, + relu_before_extra_convs, + no_norm_on_lateral, + conv_cfg, + norm_cfg, + act_cfg, + init_cfg=init_cfg) + # add extra bottom up pathway + self.downsample_convs = nn.ModuleList() + self.pafpn_convs = nn.ModuleList() + for i in range(self.start_level + 1, self.backbone_end_level): + d_conv = ConvModule( + out_channels, + out_channels, + 3, + stride=2, + padding=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + inplace=False) + pafpn_conv = ConvModule( + out_channels, + out_channels, + 3, + padding=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg, + inplace=False) + self.downsample_convs.append(d_conv) + self.pafpn_convs.append(pafpn_conv) + + @auto_fp16() + def forward(self, inputs): + """Forward function.""" + assert len(inputs) == len(self.in_channels) + + # build laterals + laterals = [ + lateral_conv(inputs[i + self.start_level]) + for i, lateral_conv in enumerate(self.lateral_convs) + ] + + # build top-down path + used_backbone_levels = len(laterals) + for i in range(used_backbone_levels - 1, 0, -1): + prev_shape = laterals[i - 1].shape[2:] + laterals[i - 1] += F.interpolate( + laterals[i], size=prev_shape, mode='nearest') + + # build outputs + # part 1: from original levels + inter_outs = [ + self.fpn_convs[i](laterals[i]) for i in range(used_backbone_levels) + ] + + # part 2: add bottom-up path + for i in range(0, used_backbone_levels - 1): + inter_outs[i + 1] += self.downsample_convs[i](inter_outs[i]) + + outs = [] + outs.append(inter_outs[0]) + outs.extend([ + self.pafpn_convs[i - 1](inter_outs[i]) + for i in range(1, used_backbone_levels) + ]) + + # part 3: add extra levels + if self.num_outs > len(outs): + # use max pool to get more levels on top of outputs + # (e.g., Faster R-CNN, Mask R-CNN) + if not self.add_extra_convs: + for i in range(self.num_outs - used_backbone_levels): + outs.append(F.max_pool2d(outs[-1], 1, stride=2)) + # add conv layers on top of original feature maps (RetinaNet) + else: + if self.add_extra_convs == 'on_input': + orig = inputs[self.backbone_end_level - 1] + outs.append(self.fpn_convs[used_backbone_levels](orig)) + elif self.add_extra_convs == 'on_lateral': + outs.append(self.fpn_convs[used_backbone_levels]( + laterals[-1])) + elif self.add_extra_convs == 'on_output': + outs.append(self.fpn_convs[used_backbone_levels](outs[-1])) + else: + raise NotImplementedError + for i in range(used_backbone_levels + 1, self.num_outs): + if self.relu_before_extra_convs: + outs.append(self.fpn_convs[i](F.relu(outs[-1]))) + else: + outs.append(self.fpn_convs[i](outs[-1])) + return tuple(outs) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/rfp.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/rfp.py new file mode 100644 index 0000000000000000000000000000000000000000..200e243479e1971f5da230d1c68fd43b5ce740cb --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/rfp.py @@ -0,0 +1,134 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import constant_init, xavier_init +from mmcv.runner import BaseModule, ModuleList + +from ..builder import NECKS, build_backbone +from .fpn import FPN + + +class ASPP(BaseModule): + """ASPP (Atrous Spatial Pyramid Pooling) + + This is an implementation of the ASPP module used in DetectoRS + (https://arxiv.org/pdf/2006.02334.pdf) + + Args: + in_channels (int): Number of input channels. + out_channels (int): Number of channels produced by this module + dilations (tuple[int]): Dilations of the four branches. + Default: (1, 3, 6, 1) + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__(self, + in_channels, + out_channels, + dilations=(1, 3, 6, 1), + init_cfg=dict(type='Kaiming', layer='Conv2d')): + super().__init__(init_cfg) + assert dilations[-1] == 1 + self.aspp = nn.ModuleList() + for dilation in dilations: + kernel_size = 3 if dilation > 1 else 1 + padding = dilation if dilation > 1 else 0 + conv = nn.Conv2d( + in_channels, + out_channels, + kernel_size=kernel_size, + stride=1, + dilation=dilation, + padding=padding, + bias=True) + self.aspp.append(conv) + self.gap = nn.AdaptiveAvgPool2d(1) + + def forward(self, x): + avg_x = self.gap(x) + out = [] + for aspp_idx in range(len(self.aspp)): + inp = avg_x if (aspp_idx == len(self.aspp) - 1) else x + out.append(F.relu_(self.aspp[aspp_idx](inp))) + out[-1] = out[-1].expand_as(out[-2]) + out = torch.cat(out, dim=1) + return out + + +@NECKS.register_module() +class RFP(FPN): + """RFP (Recursive Feature Pyramid) + + This is an implementation of RFP in `DetectoRS + `_. Different from standard FPN, the + input of RFP should be multi level features along with origin input image + of backbone. + + Args: + rfp_steps (int): Number of unrolled steps of RFP. + rfp_backbone (dict): Configuration of the backbone for RFP. + aspp_out_channels (int): Number of output channels of ASPP module. + aspp_dilations (tuple[int]): Dilation rates of four branches. + Default: (1, 3, 6, 1) + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + rfp_steps, + rfp_backbone, + aspp_out_channels, + aspp_dilations=(1, 3, 6, 1), + init_cfg=None, + **kwargs): + assert init_cfg is None, 'To prevent abnormal initialization ' \ + 'behavior, init_cfg is not allowed to be set' + super().__init__(init_cfg=init_cfg, **kwargs) + self.rfp_steps = rfp_steps + # Be careful! Pretrained weights cannot be loaded when use + # nn.ModuleList + self.rfp_modules = ModuleList() + for rfp_idx in range(1, rfp_steps): + rfp_module = build_backbone(rfp_backbone) + self.rfp_modules.append(rfp_module) + self.rfp_aspp = ASPP(self.out_channels, aspp_out_channels, + aspp_dilations) + self.rfp_weight = nn.Conv2d( + self.out_channels, + 1, + kernel_size=1, + stride=1, + padding=0, + bias=True) + + def init_weights(self): + # Avoid using super().init_weights(), which may alter the default + # initialization of the modules in self.rfp_modules that have missing + # keys in the pretrained checkpoint. + for convs in [self.lateral_convs, self.fpn_convs]: + for m in convs.modules(): + if isinstance(m, nn.Conv2d): + xavier_init(m, distribution='uniform') + for rfp_idx in range(self.rfp_steps - 1): + self.rfp_modules[rfp_idx].init_weights() + constant_init(self.rfp_weight, 0) + + def forward(self, inputs): + inputs = list(inputs) + assert len(inputs) == len(self.in_channels) + 1 # +1 for input image + img = inputs.pop(0) + # FPN forward + x = super().forward(tuple(inputs)) + for rfp_idx in range(self.rfp_steps - 1): + rfp_feats = [x[0]] + list( + self.rfp_aspp(x[i]) for i in range(1, len(x))) + x_idx = self.rfp_modules[rfp_idx].rfp_forward(img, rfp_feats) + # FPN forward + x_idx = super().forward(x_idx) + x_new = [] + for ft_idx in range(len(x_idx)): + add_weight = torch.sigmoid(self.rfp_weight(x_idx[ft_idx])) + x_new.append(add_weight * x_idx[ft_idx] + + (1 - add_weight) * x[ft_idx]) + x = x_new + return x diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/ssd_neck.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/ssd_neck.py new file mode 100644 index 0000000000000000000000000000000000000000..6ca11c2f05999bdd7196cc4c559380838106b46b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/ssd_neck.py @@ -0,0 +1,128 @@ +import torch +import torch.nn as nn +from mmcv.cnn import ConvModule, DepthwiseSeparableConvModule +from mmcv.runner import BaseModule + +from ..builder import NECKS + + +@NECKS.register_module() +class SSDNeck(BaseModule): + """Extra layers of SSD backbone to generate multi-scale feature maps. + + Args: + in_channels (Sequence[int]): Number of input channels per scale. + out_channels (Sequence[int]): Number of output channels per scale. + level_strides (Sequence[int]): Stride of 3x3 conv per level. + level_paddings (Sequence[int]): Padding size of 3x3 conv per level. + l2_norm_scale (float|None): L2 normalization layer init scale. + If None, not use L2 normalization on the first input feature. + last_kernel_size (int): Kernel size of the last conv layer. + Default: 3. + use_depthwise (bool): Whether to use DepthwiseSeparableConv. + Default: False. + conv_cfg (dict): Config dict for convolution layer. Default: None. + norm_cfg (dict): Dictionary to construct and config norm layer. + Default: None. + act_cfg (dict): Config dict for activation layer. + Default: dict(type='ReLU'). + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__(self, + in_channels, + out_channels, + level_strides, + level_paddings, + l2_norm_scale=20., + last_kernel_size=3, + use_depthwise=False, + conv_cfg=None, + norm_cfg=None, + act_cfg=dict(type='ReLU'), + init_cfg=[ + dict( + type='Xavier', distribution='uniform', + layer='Conv2d'), + dict(type='Constant', val=1, layer='BatchNorm2d'), + ]): + super(SSDNeck, self).__init__(init_cfg) + assert len(out_channels) > len(in_channels) + assert len(out_channels) - len(in_channels) == len(level_strides) + assert len(level_strides) == len(level_paddings) + assert in_channels == out_channels[:len(in_channels)] + + if l2_norm_scale: + self.l2_norm = L2Norm(in_channels[0], l2_norm_scale) + self.init_cfg += [ + dict( + type='Constant', + val=self.l2_norm.scale, + override=dict(name='l2_norm')) + ] + + self.extra_layers = nn.ModuleList() + extra_layer_channels = out_channels[len(in_channels):] + second_conv = DepthwiseSeparableConvModule if \ + use_depthwise else ConvModule + + for i, (out_channel, stride, padding) in enumerate( + zip(extra_layer_channels, level_strides, level_paddings)): + kernel_size = last_kernel_size \ + if i == len(extra_layer_channels) - 1 else 3 + per_lvl_convs = nn.Sequential( + ConvModule( + out_channels[len(in_channels) - 1 + i], + out_channel // 2, + 1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg), + second_conv( + out_channel // 2, + out_channel, + kernel_size, + stride=stride, + padding=padding, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg)) + self.extra_layers.append(per_lvl_convs) + + def forward(self, inputs): + """Forward function.""" + outs = [feat for feat in inputs] + if hasattr(self, 'l2_norm'): + outs[0] = self.l2_norm(outs[0]) + + feat = outs[-1] + for layer in self.extra_layers: + feat = layer(feat) + outs.append(feat) + return tuple(outs) + + +class L2Norm(nn.Module): + + def __init__(self, n_dims, scale=20., eps=1e-10): + """L2 normalization layer. + + Args: + n_dims (int): Number of dimensions to be normalized + scale (float, optional): Defaults to 20.. + eps (float, optional): Used to avoid division by zero. + Defaults to 1e-10. + """ + super(L2Norm, self).__init__() + self.n_dims = n_dims + self.weight = nn.Parameter(torch.Tensor(self.n_dims)) + self.eps = eps + self.scale = scale + + def forward(self, x): + """Forward function.""" + # normalization layer convert to FP32 in FP16 training + x_float = x.float() + norm = x_float.pow(2).sum(1, keepdim=True).sqrt() + self.eps + return (self.weight[None, :, None, None].float().expand_as(x_float) * + x_float / norm).type_as(x) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/yolo_neck.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/yolo_neck.py new file mode 100644 index 0000000000000000000000000000000000000000..999fb4837799d4be90f738ab71f7644507f462e4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/necks/yolo_neck.py @@ -0,0 +1,139 @@ +# Copyright (c) 2019 Western Digital Corporation or its affiliates. + +import torch +import torch.nn.functional as F +from mmcv.cnn import ConvModule +from mmcv.runner import BaseModule + +from ..builder import NECKS + + +class DetectionBlock(BaseModule): + """Detection block in YOLO neck. + + Let out_channels = n, the DetectionBlock contains: + Six ConvLayers, 1 Conv2D Layer and 1 YoloLayer. + The first 6 ConvLayers are formed the following way: + 1x1xn, 3x3x2n, 1x1xn, 3x3x2n, 1x1xn, 3x3x2n. + The Conv2D layer is 1x1x255. + Some block will have branch after the fifth ConvLayer. + The input channel is arbitrary (in_channels) + + Args: + in_channels (int): The number of input channels. + out_channels (int): The number of output channels. + conv_cfg (dict): Config dict for convolution layer. Default: None. + norm_cfg (dict): Dictionary to construct and config norm layer. + Default: dict(type='BN', requires_grad=True) + act_cfg (dict): Config dict for activation layer. + Default: dict(type='LeakyReLU', negative_slope=0.1). + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + in_channels, + out_channels, + conv_cfg=None, + norm_cfg=dict(type='BN', requires_grad=True), + act_cfg=dict(type='LeakyReLU', negative_slope=0.1), + init_cfg=None): + super(DetectionBlock, self).__init__(init_cfg) + double_out_channels = out_channels * 2 + + # shortcut + cfg = dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg, act_cfg=act_cfg) + self.conv1 = ConvModule(in_channels, out_channels, 1, **cfg) + self.conv2 = ConvModule( + out_channels, double_out_channels, 3, padding=1, **cfg) + self.conv3 = ConvModule(double_out_channels, out_channels, 1, **cfg) + self.conv4 = ConvModule( + out_channels, double_out_channels, 3, padding=1, **cfg) + self.conv5 = ConvModule(double_out_channels, out_channels, 1, **cfg) + + def forward(self, x): + tmp = self.conv1(x) + tmp = self.conv2(tmp) + tmp = self.conv3(tmp) + tmp = self.conv4(tmp) + out = self.conv5(tmp) + return out + + +@NECKS.register_module() +class YOLOV3Neck(BaseModule): + """The neck of YOLOV3. + + It can be treated as a simplified version of FPN. It + will take the result from Darknet backbone and do some upsampling and + concatenation. It will finally output the detection result. + + Note: + The input feats should be from top to bottom. + i.e., from high-lvl to low-lvl + But YOLOV3Neck will process them in reversed order. + i.e., from bottom (high-lvl) to top (low-lvl) + + Args: + num_scales (int): The number of scales / stages. + in_channels (List[int]): The number of input channels per scale. + out_channels (List[int]): The number of output channels per scale. + conv_cfg (dict, optional): Config dict for convolution layer. + Default: None. + norm_cfg (dict, optional): Dictionary to construct and config norm + layer. Default: dict(type='BN', requires_grad=True) + act_cfg (dict, optional): Config dict for activation layer. + Default: dict(type='LeakyReLU', negative_slope=0.1). + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + num_scales, + in_channels, + out_channels, + conv_cfg=None, + norm_cfg=dict(type='BN', requires_grad=True), + act_cfg=dict(type='LeakyReLU', negative_slope=0.1), + init_cfg=None): + super(YOLOV3Neck, self).__init__(init_cfg) + assert (num_scales == len(in_channels) == len(out_channels)) + self.num_scales = num_scales + self.in_channels = in_channels + self.out_channels = out_channels + + # shortcut + cfg = dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg, act_cfg=act_cfg) + + # To support arbitrary scales, the code looks awful, but it works. + # Better solution is welcomed. + self.detect1 = DetectionBlock(in_channels[0], out_channels[0], **cfg) + for i in range(1, self.num_scales): + in_c, out_c = self.in_channels[i], self.out_channels[i] + inter_c = out_channels[i - 1] + self.add_module(f'conv{i}', ConvModule(inter_c, out_c, 1, **cfg)) + # in_c + out_c : High-lvl feats will be cat with low-lvl feats + self.add_module(f'detect{i+1}', + DetectionBlock(in_c + out_c, out_c, **cfg)) + + def forward(self, feats): + assert len(feats) == self.num_scales + + # processed from bottom (high-lvl) to top (low-lvl) + outs = [] + out = self.detect1(feats[-1]) + outs.append(out) + + for i, x in enumerate(reversed(feats[:-1])): + conv = getattr(self, f'conv{i+1}') + tmp = conv(out) + + # Cat with low-lvl feats + tmp = F.interpolate(tmp, scale_factor=2) + tmp = torch.cat((tmp, x), 1) + + detect = getattr(self, f'detect{i+2}') + out = detect(tmp) + outs.append(out) + + return tuple(outs) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c7c6ca2d5e1bad089202d4cc406ace44788dec98 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/__init__.py @@ -0,0 +1,36 @@ +from .base_roi_head import BaseRoIHead +from .bbox_heads import (BBoxHead, ConvFCBBoxHead, DIIHead, + DoubleConvFCBBoxHead, SABLHead, SCNetBBoxHead, + Shared2FCBBoxHead, Shared4Conv1FCBBoxHead) +from .cascade_roi_head import CascadeRoIHead +from .double_roi_head import DoubleHeadRoIHead +from .dynamic_roi_head import DynamicRoIHead +from .grid_roi_head import GridRoIHead +from .htc_roi_head import HybridTaskCascadeRoIHead +from .mask_heads import (CoarseMaskHead, FCNMaskHead, FeatureRelayHead, + FusedSemanticHead, GlobalContextHead, GridHead, + HTCMaskHead, MaskIoUHead, MaskPointHead, + SCNetMaskHead, SCNetSemanticHead) +from .mask_scoring_roi_head import MaskScoringRoIHead +from .pisa_roi_head import PISARoIHead +from .point_rend_roi_head import PointRendRoIHead +from .roi_extractors import (BaseRoIExtractor, GenericRoIExtractor, + SingleRoIExtractor) +from .scnet_roi_head import SCNetRoIHead +from .shared_heads import ResLayer +from .sparse_roi_head import SparseRoIHead +from .standard_roi_head import StandardRoIHead +from .trident_roi_head import TridentRoIHead + +__all__ = [ + 'BaseRoIHead', 'CascadeRoIHead', 'DoubleHeadRoIHead', 'MaskScoringRoIHead', + 'HybridTaskCascadeRoIHead', 'GridRoIHead', 'ResLayer', 'BBoxHead', + 'ConvFCBBoxHead', 'DIIHead', 'SABLHead', 'Shared2FCBBoxHead', + 'StandardRoIHead', 'Shared4Conv1FCBBoxHead', 'DoubleConvFCBBoxHead', + 'FCNMaskHead', 'HTCMaskHead', 'FusedSemanticHead', 'GridHead', + 'MaskIoUHead', 'BaseRoIExtractor', 'GenericRoIExtractor', + 'SingleRoIExtractor', 'PISARoIHead', 'PointRendRoIHead', 'MaskPointHead', + 'CoarseMaskHead', 'DynamicRoIHead', 'SparseRoIHead', 'TridentRoIHead', + 'SCNetRoIHead', 'SCNetMaskHead', 'SCNetSemanticHead', 'SCNetBBoxHead', + 'FeatureRelayHead', 'GlobalContextHead' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/base_roi_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/base_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..423af25c24657f8d4833f35d0fb4142df38adf35 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/base_roi_head.py @@ -0,0 +1,102 @@ +from abc import ABCMeta, abstractmethod + +from mmcv.runner import BaseModule + +from ..builder import build_shared_head + + +class BaseRoIHead(BaseModule, metaclass=ABCMeta): + """Base class for RoIHeads.""" + + def __init__(self, + bbox_roi_extractor=None, + bbox_head=None, + mask_roi_extractor=None, + mask_head=None, + shared_head=None, + train_cfg=None, + test_cfg=None, + pretrained=None, + init_cfg=None): + super(BaseRoIHead, self).__init__(init_cfg) + self.train_cfg = train_cfg + self.test_cfg = test_cfg + if shared_head is not None: + shared_head.pretrained = pretrained + self.shared_head = build_shared_head(shared_head) + + if bbox_head is not None: + self.init_bbox_head(bbox_roi_extractor, bbox_head) + + if mask_head is not None: + self.init_mask_head(mask_roi_extractor, mask_head) + + self.init_assigner_sampler() + + @property + def with_bbox(self): + """bool: whether the RoI head contains a `bbox_head`""" + return hasattr(self, 'bbox_head') and self.bbox_head is not None + + @property + def with_mask(self): + """bool: whether the RoI head contains a `mask_head`""" + return hasattr(self, 'mask_head') and self.mask_head is not None + + @property + def with_shared_head(self): + """bool: whether the RoI head contains a `shared_head`""" + return hasattr(self, 'shared_head') and self.shared_head is not None + + @abstractmethod + def init_bbox_head(self): + """Initialize ``bbox_head``""" + pass + + @abstractmethod + def init_mask_head(self): + """Initialize ``mask_head``""" + pass + + @abstractmethod + def init_assigner_sampler(self): + """Initialize assigner and sampler.""" + pass + + @abstractmethod + def forward_train(self, + x, + img_meta, + proposal_list, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None, + gt_masks=None, + **kwargs): + """Forward function during training.""" + + async def async_simple_test(self, + x, + proposal_list, + img_metas, + proposals=None, + rescale=False, + **kwargs): + """Asynchronized test function.""" + raise NotImplementedError + + def simple_test(self, + x, + proposal_list, + img_meta, + proposals=None, + rescale=False, + **kwargs): + """Test without augmentation.""" + + def aug_test(self, x, proposal_list, img_metas, rescale=False, **kwargs): + """Test with augmentations. + + If rescale is False, then returned bboxes and masks will fit the scale + of imgs[0]. + """ diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/bbox_heads/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/bbox_heads/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bc5d29ece5bbf2f168f538f151f06d1b263a5153 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/bbox_heads/__init__.py @@ -0,0 +1,13 @@ +from .bbox_head import BBoxHead +from .convfc_bbox_head import (ConvFCBBoxHead, Shared2FCBBoxHead, + Shared4Conv1FCBBoxHead) +from .dii_head import DIIHead +from .double_bbox_head import DoubleConvFCBBoxHead +from .sabl_head import SABLHead +from .scnet_bbox_head import SCNetBBoxHead + +__all__ = [ + 'BBoxHead', 'ConvFCBBoxHead', 'Shared2FCBBoxHead', + 'Shared4Conv1FCBBoxHead', 'DoubleConvFCBBoxHead', 'SABLHead', 'DIIHead', + 'SCNetBBoxHead' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/bbox_heads/bbox_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/bbox_heads/bbox_head.py new file mode 100644 index 0000000000000000000000000000000000000000..f950f8bbfbbbdee8d59a9fd3de968b228ada42ec --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/bbox_heads/bbox_head.py @@ -0,0 +1,571 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.runner import BaseModule, auto_fp16, force_fp32 +from torch.nn.modules.utils import _pair + +from mmdet.core import build_bbox_coder, multi_apply, multiclass_nms +from mmdet.models.builder import HEADS, build_loss +from mmdet.models.losses import accuracy +from mmdet.models.utils import build_linear_layer + + +@HEADS.register_module() +class BBoxHead(BaseModule): + """Simplest RoI head, with only two fc layers for classification and + regression respectively.""" + + def __init__(self, + with_avg_pool=False, + with_cls=True, + with_reg=True, + roi_feat_size=7, + in_channels=256, + num_classes=80, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + clip_border=True, + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + reg_decoded_bbox=False, + reg_predictor_cfg=dict(type='Linear'), + cls_predictor_cfg=dict(type='Linear'), + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox=dict( + type='SmoothL1Loss', beta=1.0, loss_weight=1.0), + init_cfg=None): + super(BBoxHead, self).__init__(init_cfg) + assert with_cls or with_reg + self.with_avg_pool = with_avg_pool + self.with_cls = with_cls + self.with_reg = with_reg + self.roi_feat_size = _pair(roi_feat_size) + self.roi_feat_area = self.roi_feat_size[0] * self.roi_feat_size[1] + self.in_channels = in_channels + self.num_classes = num_classes + self.reg_class_agnostic = reg_class_agnostic + self.reg_decoded_bbox = reg_decoded_bbox + self.reg_predictor_cfg = reg_predictor_cfg + self.cls_predictor_cfg = cls_predictor_cfg + self.fp16_enabled = False + + self.bbox_coder = build_bbox_coder(bbox_coder) + self.loss_cls = build_loss(loss_cls) + self.loss_bbox = build_loss(loss_bbox) + + in_channels = self.in_channels + if self.with_avg_pool: + self.avg_pool = nn.AvgPool2d(self.roi_feat_size) + else: + in_channels *= self.roi_feat_area + if self.with_cls: + # need to add background class + if self.custom_cls_channels: + cls_channels = self.loss_cls.get_cls_channels(self.num_classes) + else: + cls_channels = num_classes + 1 + self.fc_cls = build_linear_layer( + self.cls_predictor_cfg, + in_features=in_channels, + out_features=cls_channels) + if self.with_reg: + out_dim_reg = 4 if reg_class_agnostic else 4 * num_classes + self.fc_reg = build_linear_layer( + self.reg_predictor_cfg, + in_features=in_channels, + out_features=out_dim_reg) + self.debug_imgs = None + if init_cfg is None: + self.init_cfg = [] + if self.with_cls: + self.init_cfg += [ + dict( + type='Normal', std=0.01, override=dict(name='fc_cls')) + ] + if self.with_reg: + self.init_cfg += [ + dict( + type='Normal', std=0.001, override=dict(name='fc_reg')) + ] + + @property + def custom_cls_channels(self): + return getattr(self.loss_cls, 'custom_cls_channels', False) + + @property + def custom_activation(self): + return getattr(self.loss_cls, 'custom_activation', False) + + @property + def custom_accuracy(self): + return getattr(self.loss_cls, 'custom_accuracy', False) + + @auto_fp16() + def forward(self, x): + if self.with_avg_pool: + x = self.avg_pool(x) + x = x.view(x.size(0), -1) + cls_score = self.fc_cls(x) if self.with_cls else None + bbox_pred = self.fc_reg(x) if self.with_reg else None + return cls_score, bbox_pred + + def _get_target_single(self, pos_bboxes, neg_bboxes, pos_gt_bboxes, + pos_gt_labels, cfg): + """Calculate the ground truth for proposals in the single image + according to the sampling results. + + Args: + pos_bboxes (Tensor): Contains all the positive boxes, + has shape (num_pos, 4), the last dimension 4 + represents [tl_x, tl_y, br_x, br_y]. + neg_bboxes (Tensor): Contains all the negative boxes, + has shape (num_neg, 4), the last dimension 4 + represents [tl_x, tl_y, br_x, br_y]. + pos_gt_bboxes (Tensor): Contains all the gt_boxes, + has shape (num_gt, 4), the last dimension 4 + represents [tl_x, tl_y, br_x, br_y]. + pos_gt_labels (Tensor): Contains all the gt_labels, + has shape (num_gt). + cfg (obj:`ConfigDict`): `train_cfg` of R-CNN. + + Returns: + Tuple[Tensor]: Ground truth for proposals + in a single image. Containing the following Tensors: + + - labels(Tensor): Gt_labels for all proposals, has + shape (num_proposals,). + - label_weights(Tensor): Labels_weights for all + proposals, has shape (num_proposals,). + - bbox_targets(Tensor):Regression target for all + proposals, has shape (num_proposals, 4), the + last dimension 4 represents [tl_x, tl_y, br_x, br_y]. + - bbox_weights(Tensor):Regression weights for all + proposals, has shape (num_proposals, 4). + """ + num_pos = pos_bboxes.size(0) + num_neg = neg_bboxes.size(0) + num_samples = num_pos + num_neg + + # original implementation uses new_zeros since BG are set to be 0 + # now use empty & fill because BG cat_id = num_classes, + # FG cat_id = [0, num_classes-1] + labels = pos_bboxes.new_full((num_samples, ), + self.num_classes, + dtype=torch.long) + label_weights = pos_bboxes.new_zeros(num_samples) + bbox_targets = pos_bboxes.new_zeros(num_samples, 4) + bbox_weights = pos_bboxes.new_zeros(num_samples, 4) + if num_pos > 0: + labels[:num_pos] = pos_gt_labels + pos_weight = 1.0 if cfg.pos_weight <= 0 else cfg.pos_weight + label_weights[:num_pos] = pos_weight + if not self.reg_decoded_bbox: + pos_bbox_targets = self.bbox_coder.encode( + pos_bboxes, pos_gt_bboxes) + else: + # When the regression loss (e.g. `IouLoss`, `GIouLoss`) + # is applied directly on the decoded bounding boxes, both + # the predicted boxes and regression targets should be with + # absolute coordinate format. + pos_bbox_targets = pos_gt_bboxes + bbox_targets[:num_pos, :] = pos_bbox_targets + bbox_weights[:num_pos, :] = 1 + if num_neg > 0: + label_weights[-num_neg:] = 1.0 + + return labels, label_weights, bbox_targets, bbox_weights + + def get_targets(self, + sampling_results, + gt_bboxes, + gt_labels, + rcnn_train_cfg, + concat=True): + """Calculate the ground truth for all samples in a batch according to + the sampling_results. + + Almost the same as the implementation in bbox_head, we passed + additional parameters pos_inds_list and neg_inds_list to + `_get_target_single` function. + + Args: + sampling_results (List[obj:SamplingResults]): Assign results of + all images in a batch after sampling. + gt_bboxes (list[Tensor]): Gt_bboxes of all images in a batch, + each tensor has shape (num_gt, 4), the last dimension 4 + represents [tl_x, tl_y, br_x, br_y]. + gt_labels (list[Tensor]): Gt_labels of all images in a batch, + each tensor has shape (num_gt,). + rcnn_train_cfg (obj:ConfigDict): `train_cfg` of RCNN. + concat (bool): Whether to concatenate the results of all + the images in a single batch. + + Returns: + Tuple[Tensor]: Ground truth for proposals in a single image. + Containing the following list of Tensors: + + - labels (list[Tensor],Tensor): Gt_labels for all + proposals in a batch, each tensor in list has + shape (num_proposals,) when `concat=False`, otherwise + just a single tensor has shape (num_all_proposals,). + - label_weights (list[Tensor]): Labels_weights for + all proposals in a batch, each tensor in list has + shape (num_proposals,) when `concat=False`, otherwise + just a single tensor has shape (num_all_proposals,). + - bbox_targets (list[Tensor],Tensor): Regression target + for all proposals in a batch, each tensor in list + has shape (num_proposals, 4) when `concat=False`, + otherwise just a single tensor has shape + (num_all_proposals, 4), the last dimension 4 represents + [tl_x, tl_y, br_x, br_y]. + - bbox_weights (list[tensor],Tensor): Regression weights for + all proposals in a batch, each tensor in list has shape + (num_proposals, 4) when `concat=False`, otherwise just a + single tensor has shape (num_all_proposals, 4). + """ + pos_bboxes_list = [res.pos_bboxes for res in sampling_results] + neg_bboxes_list = [res.neg_bboxes for res in sampling_results] + pos_gt_bboxes_list = [res.pos_gt_bboxes for res in sampling_results] + pos_gt_labels_list = [res.pos_gt_labels for res in sampling_results] + labels, label_weights, bbox_targets, bbox_weights = multi_apply( + self._get_target_single, + pos_bboxes_list, + neg_bboxes_list, + pos_gt_bboxes_list, + pos_gt_labels_list, + cfg=rcnn_train_cfg) + + if concat: + labels = torch.cat(labels, 0) + label_weights = torch.cat(label_weights, 0) + bbox_targets = torch.cat(bbox_targets, 0) + bbox_weights = torch.cat(bbox_weights, 0) + return labels, label_weights, bbox_targets, bbox_weights + + @force_fp32(apply_to=('cls_score', 'bbox_pred')) + def loss(self, + cls_score, + bbox_pred, + rois, + labels, + label_weights, + bbox_targets, + bbox_weights, + reduction_override=None): + losses = dict() + if cls_score is not None: + avg_factor = max(torch.sum(label_weights > 0).float().item(), 1.) + if cls_score.numel() > 0: + loss_cls_ = self.loss_cls( + cls_score, + labels, + label_weights, + avg_factor=avg_factor, + reduction_override=reduction_override) + if isinstance(loss_cls_, dict): + losses.update(loss_cls_) + else: + losses['loss_cls'] = loss_cls_ + if self.custom_activation: + acc_ = self.loss_cls.get_accuracy(cls_score, labels) + losses.update(acc_) + else: + losses['acc'] = accuracy(cls_score, labels) + if bbox_pred is not None: + bg_class_ind = self.num_classes + # 0~self.num_classes-1 are FG, self.num_classes is BG + pos_inds = (labels >= 0) & (labels < bg_class_ind) + # do not perform bounding box regression for BG anymore. + if pos_inds.any(): + if self.reg_decoded_bbox: + # When the regression loss (e.g. `IouLoss`, + # `GIouLoss`, `DIouLoss`) is applied directly on + # the decoded bounding boxes, it decodes the + # already encoded coordinates to absolute format. + bbox_pred = self.bbox_coder.decode(rois[:, 1:], bbox_pred) + if self.reg_class_agnostic: + pos_bbox_pred = bbox_pred.view( + bbox_pred.size(0), 4)[pos_inds.type(torch.bool)] + else: + pos_bbox_pred = bbox_pred.view( + bbox_pred.size(0), -1, + 4)[pos_inds.type(torch.bool), + labels[pos_inds.type(torch.bool)]] + losses['loss_bbox'] = self.loss_bbox( + pos_bbox_pred, + bbox_targets[pos_inds.type(torch.bool)], + bbox_weights[pos_inds.type(torch.bool)], + avg_factor=bbox_targets.size(0), + reduction_override=reduction_override) + else: + losses['loss_bbox'] = bbox_pred[pos_inds].sum() + return losses + + @force_fp32(apply_to=('cls_score', 'bbox_pred')) + def get_bboxes(self, + rois, + cls_score, + bbox_pred, + img_shape, + scale_factor, + rescale=False, + cfg=None): + """Transform network output for a batch into bbox predictions. + + Args: + rois (Tensor): Boxes to be transformed. Has shape (num_boxes, 5). + last dimension 5 arrange as (batch_index, x1, y1, x2, y2). + cls_score (Tensor): Box scores, has shape + (num_boxes, num_classes + 1). + bbox_pred (Tensor, optional): Box energies / deltas. + has shape (num_boxes, num_classes * 4). + img_shape (Sequence[int], optional): Maximum bounds for boxes, + specifies (H, W, C) or (H, W). + scale_factor (ndarray): Scale factor of the + image arrange as (w_scale, h_scale, w_scale, h_scale). + rescale (bool): If True, return boxes in original image space. + Default: False. + cfg (obj:`ConfigDict`): `test_cfg` of Bbox Head. Default: None + + Returns: + tuple[Tensor, Tensor]: + Fisrt tensor is `det_bboxes`, has the shape + (num_boxes, 5) and last + dimension 5 represent (tl_x, tl_y, br_x, br_y, score). + Second tensor is the labels with shape (num_boxes, ). + """ + + # some loss (Seesaw loss..) may have custom activation + if self.custom_cls_channels: + scores = self.loss_cls.get_activation(cls_score) + else: + scores = F.softmax( + cls_score, dim=-1) if cls_score is not None else None + # bbox_pred would be None in some detector when with_reg is False, + # e.g. Grid R-CNN. + if bbox_pred is not None: + bboxes = self.bbox_coder.decode( + rois[..., 1:], bbox_pred, max_shape=img_shape) + else: + bboxes = rois[:, 1:].clone() + if img_shape is not None: + bboxes[:, [0, 2]].clamp_(min=0, max=img_shape[1]) + bboxes[:, [1, 3]].clamp_(min=0, max=img_shape[0]) + + if rescale and bboxes.size(0) > 0: + + scale_factor = bboxes.new_tensor(scale_factor) + bboxes = (bboxes.view(bboxes.size(0), -1, 4) / scale_factor).view( + bboxes.size()[0], -1) + + if cfg is None: + return bboxes, scores + else: + det_bboxes, det_labels = multiclass_nms(bboxes, scores, + cfg.score_thr, cfg.nms, + cfg.max_per_img) + + return det_bboxes, det_labels + + @force_fp32(apply_to=('bbox_preds', )) + def refine_bboxes(self, rois, labels, bbox_preds, pos_is_gts, img_metas): + """Refine bboxes during training. + + Args: + rois (Tensor): Shape (n*bs, 5), where n is image number per GPU, + and bs is the sampled RoIs per image. The first column is + the image id and the next 4 columns are x1, y1, x2, y2. + labels (Tensor): Shape (n*bs, ). + bbox_preds (Tensor): Shape (n*bs, 4) or (n*bs, 4*#class). + pos_is_gts (list[Tensor]): Flags indicating if each positive bbox + is a gt bbox. + img_metas (list[dict]): Meta info of each image. + + Returns: + list[Tensor]: Refined bboxes of each image in a mini-batch. + + Example: + >>> # xdoctest: +REQUIRES(module:kwarray) + >>> import kwarray + >>> import numpy as np + >>> from mmdet.core.bbox.demodata import random_boxes + >>> self = BBoxHead(reg_class_agnostic=True) + >>> n_roi = 2 + >>> n_img = 4 + >>> scale = 512 + >>> rng = np.random.RandomState(0) + >>> img_metas = [{'img_shape': (scale, scale)} + ... for _ in range(n_img)] + >>> # Create rois in the expected format + >>> roi_boxes = random_boxes(n_roi, scale=scale, rng=rng) + >>> img_ids = torch.randint(0, n_img, (n_roi,)) + >>> img_ids = img_ids.float() + >>> rois = torch.cat([img_ids[:, None], roi_boxes], dim=1) + >>> # Create other args + >>> labels = torch.randint(0, 2, (n_roi,)).long() + >>> bbox_preds = random_boxes(n_roi, scale=scale, rng=rng) + >>> # For each image, pretend random positive boxes are gts + >>> is_label_pos = (labels.numpy() > 0).astype(np.int) + >>> lbl_per_img = kwarray.group_items(is_label_pos, + ... img_ids.numpy()) + >>> pos_per_img = [sum(lbl_per_img.get(gid, [])) + ... for gid in range(n_img)] + >>> pos_is_gts = [ + >>> torch.randint(0, 2, (npos,)).byte().sort( + >>> descending=True)[0] + >>> for npos in pos_per_img + >>> ] + >>> bboxes_list = self.refine_bboxes(rois, labels, bbox_preds, + >>> pos_is_gts, img_metas) + >>> print(bboxes_list) + """ + img_ids = rois[:, 0].long().unique(sorted=True) + assert img_ids.numel() <= len(img_metas) + + bboxes_list = [] + for i in range(len(img_metas)): + inds = torch.nonzero( + rois[:, 0] == i, as_tuple=False).squeeze(dim=1) + num_rois = inds.numel() + + bboxes_ = rois[inds, 1:] + label_ = labels[inds] + bbox_pred_ = bbox_preds[inds] + img_meta_ = img_metas[i] + pos_is_gts_ = pos_is_gts[i] + + bboxes = self.regress_by_class(bboxes_, label_, bbox_pred_, + img_meta_) + + # filter gt bboxes + pos_keep = 1 - pos_is_gts_ + keep_inds = pos_is_gts_.new_ones(num_rois) + keep_inds[:len(pos_is_gts_)] = pos_keep + + bboxes_list.append(bboxes[keep_inds.type(torch.bool)]) + + return bboxes_list + + @force_fp32(apply_to=('bbox_pred', )) + def regress_by_class(self, rois, label, bbox_pred, img_meta): + """Regress the bbox for the predicted class. Used in Cascade R-CNN. + + Args: + rois (Tensor): shape (n, 4) or (n, 5) + label (Tensor): shape (n, ) + bbox_pred (Tensor): shape (n, 4*(#class)) or (n, 4) + img_meta (dict): Image meta info. + + Returns: + Tensor: Regressed bboxes, the same shape as input rois. + """ + assert rois.size(1) == 4 or rois.size(1) == 5, repr(rois.shape) + + if not self.reg_class_agnostic: + label = label * 4 + inds = torch.stack((label, label + 1, label + 2, label + 3), 1) + bbox_pred = torch.gather(bbox_pred, 1, inds) + assert bbox_pred.size(1) == 4 + + if rois.size(1) == 4: + new_rois = self.bbox_coder.decode( + rois, bbox_pred, max_shape=img_meta['img_shape']) + else: + bboxes = self.bbox_coder.decode( + rois[:, 1:], bbox_pred, max_shape=img_meta['img_shape']) + new_rois = torch.cat((rois[:, [0]], bboxes), dim=1) + + return new_rois + + def onnx_export(self, + rois, + cls_score, + bbox_pred, + img_shape, + cfg=None, + **kwargs): + """Transform network output for a batch into bbox predictions. + + Args: + rois (Tensor): Boxes to be transformed. + Has shape (B, num_boxes, 5) + cls_score (Tensor): Box scores. has shape + (B, num_boxes, num_classes + 1), 1 represent the background. + bbox_pred (Tensor, optional): Box energies / deltas for, + has shape (B, num_boxes, num_classes * 4) when. + img_shape (torch.Tensor): Shape of image. + cfg (obj:`ConfigDict`): `test_cfg` of Bbox Head. Default: None + + Returns: + tuple[Tensor, Tensor]: dets of shape [N, num_det, 5] + and class labels of shape [N, num_det]. + """ + + assert rois.ndim == 3, 'Only support export two stage ' \ + 'model to ONNX ' \ + 'with batch dimension. ' + + if self.custom_cls_channels: + scores = self.loss_cls.get_activation(cls_score) + else: + scores = F.softmax( + cls_score, dim=-1) if cls_score is not None else None + + if bbox_pred is not None: + bboxes = self.bbox_coder.decode( + rois[..., 1:], bbox_pred, max_shape=img_shape) + else: + bboxes = rois[..., 1:].clone() + if img_shape is not None: + max_shape = bboxes.new_tensor(img_shape)[..., :2] + min_xy = bboxes.new_tensor(0) + max_xy = torch.cat( + [max_shape] * 2, dim=-1).flip(-1).unsqueeze(-2) + bboxes = torch.where(bboxes < min_xy, min_xy, bboxes) + bboxes = torch.where(bboxes > max_xy, max_xy, bboxes) + + # Replace multiclass_nms with ONNX::NonMaxSuppression in deployment + from mmdet.core.export import add_dummy_nms_for_onnx + batch_size = scores.shape[0] + # ignore background class + scores = scores[..., :self.num_classes] + labels = torch.arange( + self.num_classes, dtype=torch.long).to(scores.device) + labels = labels.view(1, 1, -1).expand_as(scores) + labels = labels.reshape(batch_size, -1) + scores = scores.reshape(batch_size, -1) + bboxes = bboxes.reshape(batch_size, -1, 4) + if self.reg_class_agnostic: + bboxes = bboxes.repeat(1, self.num_classes, 1) + + max_size = torch.max(img_shape) + # Offset bboxes of each class so that bboxes of different labels + # do not overlap. + offsets = (labels * max_size + 1).unsqueeze(2) + bboxes_for_nms = bboxes + offsets + max_output_boxes_per_class = cfg.nms.get('max_output_boxes_per_class', + cfg.max_per_img) + iou_threshold = cfg.nms.get('iou_threshold', 0.5) + score_threshold = cfg.score_thr + nms_pre = cfg.get('deploy_nms_pre', -1) + batch_dets, labels = add_dummy_nms_for_onnx( + bboxes_for_nms, + scores.unsqueeze(2), + max_output_boxes_per_class, + iou_threshold, + score_threshold, + pre_top_k=nms_pre, + after_top_k=cfg.max_per_img, + labels=labels) + # Offset the bboxes back after dummy nms. + offsets = (labels * max_size + 1).unsqueeze(2) + # Indexing + inplace operation fails with dynamic shape in ONNX + # original style: batch_dets[..., :4] -= offsets + bboxes, scores = batch_dets[..., 0:4], batch_dets[..., 4:5] + bboxes -= offsets + batch_dets = torch.cat([bboxes, scores], dim=2) + return batch_dets, labels diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/bbox_heads/convfc_bbox_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/bbox_heads/convfc_bbox_head.py new file mode 100644 index 0000000000000000000000000000000000000000..6f9f5ec6605bb62c1f5f65061737bb5ee7138b94 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/bbox_heads/convfc_bbox_head.py @@ -0,0 +1,221 @@ +import torch.nn as nn +from mmcv.cnn import ConvModule + +from mmdet.models.builder import HEADS +from mmdet.models.utils import build_linear_layer +from .bbox_head import BBoxHead + + +@HEADS.register_module() +class ConvFCBBoxHead(BBoxHead): + r"""More general bbox head, with shared conv and fc layers and two optional + separated branches. + + .. code-block:: none + + /-> cls convs -> cls fcs -> cls + shared convs -> shared fcs + \-> reg convs -> reg fcs -> reg + """ # noqa: W605 + + def __init__(self, + num_shared_convs=0, + num_shared_fcs=0, + num_cls_convs=0, + num_cls_fcs=0, + num_reg_convs=0, + num_reg_fcs=0, + conv_out_channels=256, + fc_out_channels=1024, + conv_cfg=None, + norm_cfg=None, + init_cfg=None, + *args, + **kwargs): + super(ConvFCBBoxHead, self).__init__( + *args, init_cfg=init_cfg, **kwargs) + assert (num_shared_convs + num_shared_fcs + num_cls_convs + + num_cls_fcs + num_reg_convs + num_reg_fcs > 0) + if num_cls_convs > 0 or num_reg_convs > 0: + assert num_shared_fcs == 0 + if not self.with_cls: + assert num_cls_convs == 0 and num_cls_fcs == 0 + if not self.with_reg: + assert num_reg_convs == 0 and num_reg_fcs == 0 + self.num_shared_convs = num_shared_convs + self.num_shared_fcs = num_shared_fcs + self.num_cls_convs = num_cls_convs + self.num_cls_fcs = num_cls_fcs + self.num_reg_convs = num_reg_convs + self.num_reg_fcs = num_reg_fcs + self.conv_out_channels = conv_out_channels + self.fc_out_channels = fc_out_channels + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + + # add shared convs and fcs + self.shared_convs, self.shared_fcs, last_layer_dim = \ + self._add_conv_fc_branch( + self.num_shared_convs, self.num_shared_fcs, self.in_channels, + True) + self.shared_out_channels = last_layer_dim + + # add cls specific branch + self.cls_convs, self.cls_fcs, self.cls_last_dim = \ + self._add_conv_fc_branch( + self.num_cls_convs, self.num_cls_fcs, self.shared_out_channels) + + # add reg specific branch + self.reg_convs, self.reg_fcs, self.reg_last_dim = \ + self._add_conv_fc_branch( + self.num_reg_convs, self.num_reg_fcs, self.shared_out_channels) + + if self.num_shared_fcs == 0 and not self.with_avg_pool: + if self.num_cls_fcs == 0: + self.cls_last_dim *= self.roi_feat_area + if self.num_reg_fcs == 0: + self.reg_last_dim *= self.roi_feat_area + + self.relu = nn.ReLU(inplace=True) + # reconstruct fc_cls and fc_reg since input channels are changed + if self.with_cls: + if self.custom_cls_channels: + cls_channels = self.loss_cls.get_cls_channels(self.num_classes) + else: + cls_channels = self.num_classes + 1 + self.fc_cls = build_linear_layer( + self.cls_predictor_cfg, + in_features=self.cls_last_dim, + out_features=cls_channels) + if self.with_reg: + out_dim_reg = (4 if self.reg_class_agnostic else 4 * + self.num_classes) + self.fc_reg = build_linear_layer( + self.reg_predictor_cfg, + in_features=self.reg_last_dim, + out_features=out_dim_reg) + + if init_cfg is None: + self.init_cfg += [ + dict( + type='Xavier', + layer='Linear', + override=[ + dict(name='shared_fcs'), + dict(name='cls_fcs'), + dict(name='reg_fcs') + ]) + ] + + def _add_conv_fc_branch(self, + num_branch_convs, + num_branch_fcs, + in_channels, + is_shared=False): + """Add shared or separable branch. + + convs -> avg pool (optional) -> fcs + """ + last_layer_dim = in_channels + # add branch specific conv layers + branch_convs = nn.ModuleList() + if num_branch_convs > 0: + for i in range(num_branch_convs): + conv_in_channels = ( + last_layer_dim if i == 0 else self.conv_out_channels) + branch_convs.append( + ConvModule( + conv_in_channels, + self.conv_out_channels, + 3, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + last_layer_dim = self.conv_out_channels + # add branch specific fc layers + branch_fcs = nn.ModuleList() + if num_branch_fcs > 0: + # for shared branch, only consider self.with_avg_pool + # for separated branches, also consider self.num_shared_fcs + if (is_shared + or self.num_shared_fcs == 0) and not self.with_avg_pool: + last_layer_dim *= self.roi_feat_area + for i in range(num_branch_fcs): + fc_in_channels = ( + last_layer_dim if i == 0 else self.fc_out_channels) + branch_fcs.append( + nn.Linear(fc_in_channels, self.fc_out_channels)) + last_layer_dim = self.fc_out_channels + return branch_convs, branch_fcs, last_layer_dim + + def forward(self, x): + # shared part + if self.num_shared_convs > 0: + for conv in self.shared_convs: + x = conv(x) + + if self.num_shared_fcs > 0: + if self.with_avg_pool: + x = self.avg_pool(x) + + x = x.flatten(1) + + for fc in self.shared_fcs: + x = self.relu(fc(x)) + # separate branches + x_cls = x + x_reg = x + + for conv in self.cls_convs: + x_cls = conv(x_cls) + if x_cls.dim() > 2: + if self.with_avg_pool: + x_cls = self.avg_pool(x_cls) + x_cls = x_cls.flatten(1) + for fc in self.cls_fcs: + x_cls = self.relu(fc(x_cls)) + + for conv in self.reg_convs: + x_reg = conv(x_reg) + if x_reg.dim() > 2: + if self.with_avg_pool: + x_reg = self.avg_pool(x_reg) + x_reg = x_reg.flatten(1) + for fc in self.reg_fcs: + x_reg = self.relu(fc(x_reg)) + + cls_score = self.fc_cls(x_cls) if self.with_cls else None + bbox_pred = self.fc_reg(x_reg) if self.with_reg else None + return cls_score, bbox_pred + + +@HEADS.register_module() +class Shared2FCBBoxHead(ConvFCBBoxHead): + + def __init__(self, fc_out_channels=1024, *args, **kwargs): + super(Shared2FCBBoxHead, self).__init__( + num_shared_convs=0, + num_shared_fcs=2, + num_cls_convs=0, + num_cls_fcs=0, + num_reg_convs=0, + num_reg_fcs=0, + fc_out_channels=fc_out_channels, + *args, + **kwargs) + + +@HEADS.register_module() +class Shared4Conv1FCBBoxHead(ConvFCBBoxHead): + + def __init__(self, fc_out_channels=1024, *args, **kwargs): + super(Shared4Conv1FCBBoxHead, self).__init__( + num_shared_convs=4, + num_shared_fcs=1, + num_cls_convs=0, + num_cls_fcs=0, + num_reg_convs=0, + num_reg_fcs=0, + fc_out_channels=fc_out_channels, + *args, + **kwargs) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/bbox_heads/dii_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/bbox_heads/dii_head.py new file mode 100644 index 0000000000000000000000000000000000000000..cf708eb090eda99c2a88764318cff60ebf8feb2e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/bbox_heads/dii_head.py @@ -0,0 +1,421 @@ +import torch +import torch.nn as nn +from mmcv.cnn import (bias_init_with_prob, build_activation_layer, + build_norm_layer) +from mmcv.cnn.bricks.transformer import FFN, MultiheadAttention +from mmcv.runner import auto_fp16, force_fp32 + +from mmdet.core import multi_apply +from mmdet.models.builder import HEADS, build_loss +from mmdet.models.dense_heads.atss_head import reduce_mean +from mmdet.models.losses import accuracy +from mmdet.models.utils import build_transformer +from .bbox_head import BBoxHead + + +@HEADS.register_module() +class DIIHead(BBoxHead): + r"""Dynamic Instance Interactive Head for `Sparse R-CNN: End-to-End Object + Detection with Learnable Proposals `_ + + Args: + num_classes (int): Number of class in dataset. + Defaults to 80. + num_ffn_fcs (int): The number of fully-connected + layers in FFNs. Defaults to 2. + num_heads (int): The hidden dimension of FFNs. + Defaults to 8. + num_cls_fcs (int): The number of fully-connected + layers in classification subnet. Defaults to 1. + num_reg_fcs (int): The number of fully-connected + layers in regression subnet. Defaults to 3. + feedforward_channels (int): The hidden dimension + of FFNs. Defaults to 2048 + in_channels (int): Hidden_channels of MultiheadAttention. + Defaults to 256. + dropout (float): Probability of drop the channel. + Defaults to 0.0 + ffn_act_cfg (dict): The activation config for FFNs. + dynamic_conv_cfg (dict): The convolution config + for DynamicConv. + loss_iou (dict): The config for iou or giou loss. + + """ + + def __init__(self, + num_classes=80, + num_ffn_fcs=2, + num_heads=8, + num_cls_fcs=1, + num_reg_fcs=3, + feedforward_channels=2048, + in_channels=256, + dropout=0.0, + ffn_act_cfg=dict(type='ReLU', inplace=True), + dynamic_conv_cfg=dict( + type='DynamicConv', + in_channels=256, + feat_channels=64, + out_channels=256, + input_feat_shape=7, + act_cfg=dict(type='ReLU', inplace=True), + norm_cfg=dict(type='LN')), + loss_iou=dict(type='GIoULoss', loss_weight=2.0), + init_cfg=None, + **kwargs): + assert init_cfg is None, 'To prevent abnormal initialization ' \ + 'behavior, init_cfg is not allowed to be set' + super(DIIHead, self).__init__( + num_classes=num_classes, + reg_decoded_bbox=True, + reg_class_agnostic=True, + init_cfg=init_cfg, + **kwargs) + self.loss_iou = build_loss(loss_iou) + self.in_channels = in_channels + self.fp16_enabled = False + self.attention = MultiheadAttention(in_channels, num_heads, dropout) + self.attention_norm = build_norm_layer(dict(type='LN'), in_channels)[1] + + self.instance_interactive_conv = build_transformer(dynamic_conv_cfg) + self.instance_interactive_conv_dropout = nn.Dropout(dropout) + self.instance_interactive_conv_norm = build_norm_layer( + dict(type='LN'), in_channels)[1] + + self.ffn = FFN( + in_channels, + feedforward_channels, + num_ffn_fcs, + act_cfg=ffn_act_cfg, + dropout=dropout) + self.ffn_norm = build_norm_layer(dict(type='LN'), in_channels)[1] + + self.cls_fcs = nn.ModuleList() + for _ in range(num_cls_fcs): + self.cls_fcs.append( + nn.Linear(in_channels, in_channels, bias=False)) + self.cls_fcs.append( + build_norm_layer(dict(type='LN'), in_channels)[1]) + self.cls_fcs.append( + build_activation_layer(dict(type='ReLU', inplace=True))) + + # over load the self.fc_cls in BBoxHead + if self.loss_cls.use_sigmoid: + self.fc_cls = nn.Linear(in_channels, self.num_classes) + else: + self.fc_cls = nn.Linear(in_channels, self.num_classes + 1) + + self.reg_fcs = nn.ModuleList() + for _ in range(num_reg_fcs): + self.reg_fcs.append( + nn.Linear(in_channels, in_channels, bias=False)) + self.reg_fcs.append( + build_norm_layer(dict(type='LN'), in_channels)[1]) + self.reg_fcs.append( + build_activation_layer(dict(type='ReLU', inplace=True))) + # over load the self.fc_cls in BBoxHead + self.fc_reg = nn.Linear(in_channels, 4) + + assert self.reg_class_agnostic, 'DIIHead only ' \ + 'suppport `reg_class_agnostic=True` ' + assert self.reg_decoded_bbox, 'DIIHead only ' \ + 'suppport `reg_decoded_bbox=True`' + + def init_weights(self): + """Use xavier initialization for all weight parameter and set + classification head bias as a specific value when use focal loss.""" + super(DIIHead, self).init_weights() + for p in self.parameters(): + if p.dim() > 1: + nn.init.xavier_uniform_(p) + else: + # adopt the default initialization for + # the weight and bias of the layer norm + pass + if self.loss_cls.use_sigmoid: + bias_init = bias_init_with_prob(0.01) + nn.init.constant_(self.fc_cls.bias, bias_init) + + @auto_fp16() + def forward(self, roi_feat, proposal_feat): + """Forward function of Dynamic Instance Interactive Head. + + Args: + roi_feat (Tensor): Roi-pooling features with shape + (batch_size*num_proposals, feature_dimensions, + pooling_h , pooling_w). + proposal_feat (Tensor): Intermediate feature get from + diihead in last stage, has shape + (batch_size, num_proposals, feature_dimensions) + + Returns: + tuple[Tensor]: Usually a tuple of classification scores + and bbox prediction and a intermediate feature. + + - cls_scores (Tensor): Classification scores for + all proposals, has shape + (batch_size, num_proposals, num_classes). + - bbox_preds (Tensor): Box energies / deltas for + all proposals, has shape + (batch_size, num_proposals, 4). + - obj_feat (Tensor): Object feature before classification + and regression subnet, has shape + (batch_size, num_proposal, feature_dimensions). + """ + N, num_proposals = proposal_feat.shape[:2] + + # Self attention + proposal_feat = proposal_feat.permute(1, 0, 2) + proposal_feat = self.attention_norm(self.attention(proposal_feat)) + + # instance interactive + proposal_feat = proposal_feat.permute(1, 0, + 2).reshape(-1, self.in_channels) + proposal_feat_iic = self.instance_interactive_conv( + proposal_feat, roi_feat) + proposal_feat = proposal_feat + self.instance_interactive_conv_dropout( + proposal_feat_iic) + obj_feat = self.instance_interactive_conv_norm(proposal_feat) + + # FFN + obj_feat = self.ffn_norm(self.ffn(obj_feat)) + + cls_feat = obj_feat + reg_feat = obj_feat + + for cls_layer in self.cls_fcs: + cls_feat = cls_layer(cls_feat) + for reg_layer in self.reg_fcs: + reg_feat = reg_layer(reg_feat) + + cls_score = self.fc_cls(cls_feat).view(N, num_proposals, -1) + bbox_delta = self.fc_reg(reg_feat).view(N, num_proposals, -1) + + return cls_score, bbox_delta, obj_feat.view(N, num_proposals, -1) + + @force_fp32(apply_to=('cls_score', 'bbox_pred')) + def loss(self, + cls_score, + bbox_pred, + labels, + label_weights, + bbox_targets, + bbox_weights, + imgs_whwh=None, + reduction_override=None, + **kwargs): + """"Loss function of DIIHead, get loss of all images. + + Args: + cls_score (Tensor): Classification prediction + results of all class, has shape + (batch_size * num_proposals_single_image, num_classes) + bbox_pred (Tensor): Regression prediction results, + has shape + (batch_size * num_proposals_single_image, 4), the last + dimension 4 represents [tl_x, tl_y, br_x, br_y]. + labels (Tensor): Label of each proposals, has shape + (batch_size * num_proposals_single_image + label_weights (Tensor): Classification loss + weight of each proposals, has shape + (batch_size * num_proposals_single_image + bbox_targets (Tensor): Regression targets of each + proposals, has shape + (batch_size * num_proposals_single_image, 4), + the last dimension 4 represents + [tl_x, tl_y, br_x, br_y]. + bbox_weights (Tensor): Regression loss weight of each + proposals's coordinate, has shape + (batch_size * num_proposals_single_image, 4), + imgs_whwh (Tensor): imgs_whwh (Tensor): Tensor with\ + shape (batch_size, num_proposals, 4), the last + dimension means + [img_width,img_height, img_width, img_height]. + reduction_override (str, optional): The reduction + method used to override the original reduction + method of the loss. Options are "none", + "mean" and "sum". Defaults to None, + + Returns: + dict[str, Tensor]: Dictionary of loss components + """ + losses = dict() + bg_class_ind = self.num_classes + # note in spare rcnn num_gt == num_pos + pos_inds = (labels >= 0) & (labels < bg_class_ind) + num_pos = pos_inds.sum().float() + avg_factor = reduce_mean(num_pos) + if cls_score is not None: + if cls_score.numel() > 0: + losses['loss_cls'] = self.loss_cls( + cls_score, + labels, + label_weights, + avg_factor=avg_factor, + reduction_override=reduction_override) + losses['pos_acc'] = accuracy(cls_score[pos_inds], + labels[pos_inds]) + if bbox_pred is not None: + # 0~self.num_classes-1 are FG, self.num_classes is BG + # do not perform bounding box regression for BG anymore. + if pos_inds.any(): + pos_bbox_pred = bbox_pred.reshape(bbox_pred.size(0), + 4)[pos_inds.type(torch.bool)] + imgs_whwh = imgs_whwh.reshape(bbox_pred.size(0), + 4)[pos_inds.type(torch.bool)] + losses['loss_bbox'] = self.loss_bbox( + pos_bbox_pred / imgs_whwh, + bbox_targets[pos_inds.type(torch.bool)] / imgs_whwh, + bbox_weights[pos_inds.type(torch.bool)], + avg_factor=avg_factor) + losses['loss_iou'] = self.loss_iou( + pos_bbox_pred, + bbox_targets[pos_inds.type(torch.bool)], + bbox_weights[pos_inds.type(torch.bool)], + avg_factor=avg_factor) + else: + losses['loss_bbox'] = bbox_pred.sum() * 0 + losses['loss_iou'] = bbox_pred.sum() * 0 + return losses + + def _get_target_single(self, pos_inds, neg_inds, pos_bboxes, neg_bboxes, + pos_gt_bboxes, pos_gt_labels, cfg): + """Calculate the ground truth for proposals in the single image + according to the sampling results. + + Almost the same as the implementation in `bbox_head`, + we add pos_inds and neg_inds to select positive and + negative samples instead of selecting the first num_pos + as positive samples. + + Args: + pos_inds (Tensor): The length is equal to the + positive sample numbers contain all index + of the positive sample in the origin proposal set. + neg_inds (Tensor): The length is equal to the + negative sample numbers contain all index + of the negative sample in the origin proposal set. + pos_bboxes (Tensor): Contains all the positive boxes, + has shape (num_pos, 4), the last dimension 4 + represents [tl_x, tl_y, br_x, br_y]. + neg_bboxes (Tensor): Contains all the negative boxes, + has shape (num_neg, 4), the last dimension 4 + represents [tl_x, tl_y, br_x, br_y]. + pos_gt_bboxes (Tensor): Contains all the gt_boxes, + has shape (num_gt, 4), the last dimension 4 + represents [tl_x, tl_y, br_x, br_y]. + pos_gt_labels (Tensor): Contains all the gt_labels, + has shape (num_gt). + cfg (obj:`ConfigDict`): `train_cfg` of R-CNN. + + Returns: + Tuple[Tensor]: Ground truth for proposals in a single image. + Containing the following Tensors: + + - labels(Tensor): Gt_labels for all proposals, has + shape (num_proposals,). + - label_weights(Tensor): Labels_weights for all proposals, has + shape (num_proposals,). + - bbox_targets(Tensor):Regression target for all proposals, has + shape (num_proposals, 4), the last dimension 4 + represents [tl_x, tl_y, br_x, br_y]. + - bbox_weights(Tensor):Regression weights for all proposals, + has shape (num_proposals, 4). + """ + num_pos = pos_bboxes.size(0) + num_neg = neg_bboxes.size(0) + num_samples = num_pos + num_neg + + # original implementation uses new_zeros since BG are set to be 0 + # now use empty & fill because BG cat_id = num_classes, + # FG cat_id = [0, num_classes-1] + labels = pos_bboxes.new_full((num_samples, ), + self.num_classes, + dtype=torch.long) + label_weights = pos_bboxes.new_zeros(num_samples) + bbox_targets = pos_bboxes.new_zeros(num_samples, 4) + bbox_weights = pos_bboxes.new_zeros(num_samples, 4) + if num_pos > 0: + labels[pos_inds] = pos_gt_labels + pos_weight = 1.0 if cfg.pos_weight <= 0 else cfg.pos_weight + label_weights[pos_inds] = pos_weight + if not self.reg_decoded_bbox: + pos_bbox_targets = self.bbox_coder.encode( + pos_bboxes, pos_gt_bboxes) + else: + pos_bbox_targets = pos_gt_bboxes + bbox_targets[pos_inds, :] = pos_bbox_targets + bbox_weights[pos_inds, :] = 1 + if num_neg > 0: + label_weights[neg_inds] = 1.0 + + return labels, label_weights, bbox_targets, bbox_weights + + def get_targets(self, + sampling_results, + gt_bboxes, + gt_labels, + rcnn_train_cfg, + concat=True): + """Calculate the ground truth for all samples in a batch according to + the sampling_results. + + Almost the same as the implementation in bbox_head, we passed + additional parameters pos_inds_list and neg_inds_list to + `_get_target_single` function. + + Args: + sampling_results (List[obj:SamplingResults]): Assign results of + all images in a batch after sampling. + gt_bboxes (list[Tensor]): Gt_bboxes of all images in a batch, + each tensor has shape (num_gt, 4), the last dimension 4 + represents [tl_x, tl_y, br_x, br_y]. + gt_labels (list[Tensor]): Gt_labels of all images in a batch, + each tensor has shape (num_gt,). + rcnn_train_cfg (obj:`ConfigDict`): `train_cfg` of RCNN. + concat (bool): Whether to concatenate the results of all + the images in a single batch. + + Returns: + Tuple[Tensor]: Ground truth for proposals in a single image. + Containing the following list of Tensors: + + - labels (list[Tensor],Tensor): Gt_labels for all + proposals in a batch, each tensor in list has + shape (num_proposals,) when `concat=False`, otherwise just + a single tensor has shape (num_all_proposals,). + - label_weights (list[Tensor]): Labels_weights for + all proposals in a batch, each tensor in list has shape + (num_proposals,) when `concat=False`, otherwise just a + single tensor has shape (num_all_proposals,). + - bbox_targets (list[Tensor],Tensor): Regression target + for all proposals in a batch, each tensor in list has + shape (num_proposals, 4) when `concat=False`, otherwise + just a single tensor has shape (num_all_proposals, 4), + the last dimension 4 represents [tl_x, tl_y, br_x, br_y]. + - bbox_weights (list[tensor],Tensor): Regression weights for + all proposals in a batch, each tensor in list has shape + (num_proposals, 4) when `concat=False`, otherwise just a + single tensor has shape (num_all_proposals, 4). + """ + pos_inds_list = [res.pos_inds for res in sampling_results] + neg_inds_list = [res.neg_inds for res in sampling_results] + pos_bboxes_list = [res.pos_bboxes for res in sampling_results] + neg_bboxes_list = [res.neg_bboxes for res in sampling_results] + pos_gt_bboxes_list = [res.pos_gt_bboxes for res in sampling_results] + pos_gt_labels_list = [res.pos_gt_labels for res in sampling_results] + labels, label_weights, bbox_targets, bbox_weights = multi_apply( + self._get_target_single, + pos_inds_list, + neg_inds_list, + pos_bboxes_list, + neg_bboxes_list, + pos_gt_bboxes_list, + pos_gt_labels_list, + cfg=rcnn_train_cfg) + if concat: + labels = torch.cat(labels, 0) + label_weights = torch.cat(label_weights, 0) + bbox_targets = torch.cat(bbox_targets, 0) + bbox_weights = torch.cat(bbox_weights, 0) + return labels, label_weights, bbox_targets, bbox_weights diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/bbox_heads/double_bbox_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/bbox_heads/double_bbox_head.py new file mode 100644 index 0000000000000000000000000000000000000000..26687e048b78dbc00cfc9811e1370db83942b85e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/bbox_heads/double_bbox_head.py @@ -0,0 +1,177 @@ +import torch.nn as nn +from mmcv.cnn import ConvModule +from mmcv.runner import BaseModule, ModuleList + +from mmdet.models.backbones.resnet import Bottleneck +from mmdet.models.builder import HEADS +from .bbox_head import BBoxHead + + +class BasicResBlock(BaseModule): + """Basic residual block. + + This block is a little different from the block in the ResNet backbone. + The kernel size of conv1 is 1 in this block while 3 in ResNet BasicBlock. + + Args: + in_channels (int): Channels of the input feature map. + out_channels (int): Channels of the output feature map. + conv_cfg (dict): The config dict for convolution layers. + norm_cfg (dict): The config dict for normalization layers. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + in_channels, + out_channels, + conv_cfg=None, + norm_cfg=dict(type='BN'), + init_cfg=None): + super(BasicResBlock, self).__init__(init_cfg) + + # main path + self.conv1 = ConvModule( + in_channels, + in_channels, + kernel_size=3, + padding=1, + bias=False, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg) + self.conv2 = ConvModule( + in_channels, + out_channels, + kernel_size=1, + bias=False, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=None) + + # identity path + self.conv_identity = ConvModule( + in_channels, + out_channels, + kernel_size=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=None) + + self.relu = nn.ReLU(inplace=True) + + def forward(self, x): + identity = x + + x = self.conv1(x) + x = self.conv2(x) + + identity = self.conv_identity(identity) + out = x + identity + + out = self.relu(out) + return out + + +@HEADS.register_module() +class DoubleConvFCBBoxHead(BBoxHead): + r"""Bbox head used in Double-Head R-CNN + + .. code-block:: none + + /-> cls + /-> shared convs -> + \-> reg + roi features + /-> cls + \-> shared fc -> + \-> reg + """ # noqa: W605 + + def __init__(self, + num_convs=0, + num_fcs=0, + conv_out_channels=1024, + fc_out_channels=1024, + conv_cfg=None, + norm_cfg=dict(type='BN'), + init_cfg=dict( + type='Normal', + override=[ + dict(type='Normal', name='fc_cls', std=0.01), + dict(type='Normal', name='fc_reg', std=0.001), + dict( + type='Xavier', + name='fc_branch', + distribution='uniform') + ]), + **kwargs): + kwargs.setdefault('with_avg_pool', True) + super(DoubleConvFCBBoxHead, self).__init__(init_cfg=init_cfg, **kwargs) + assert self.with_avg_pool + assert num_convs > 0 + assert num_fcs > 0 + self.num_convs = num_convs + self.num_fcs = num_fcs + self.conv_out_channels = conv_out_channels + self.fc_out_channels = fc_out_channels + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + + # increase the channel of input features + self.res_block = BasicResBlock(self.in_channels, + self.conv_out_channels) + + # add conv heads + self.conv_branch = self._add_conv_branch() + # add fc heads + self.fc_branch = self._add_fc_branch() + + out_dim_reg = 4 if self.reg_class_agnostic else 4 * self.num_classes + self.fc_reg = nn.Linear(self.conv_out_channels, out_dim_reg) + + self.fc_cls = nn.Linear(self.fc_out_channels, self.num_classes + 1) + self.relu = nn.ReLU(inplace=True) + + def _add_conv_branch(self): + """Add the fc branch which consists of a sequential of conv layers.""" + branch_convs = ModuleList() + for i in range(self.num_convs): + branch_convs.append( + Bottleneck( + inplanes=self.conv_out_channels, + planes=self.conv_out_channels // 4, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + return branch_convs + + def _add_fc_branch(self): + """Add the fc branch which consists of a sequential of fc layers.""" + branch_fcs = ModuleList() + for i in range(self.num_fcs): + fc_in_channels = ( + self.in_channels * + self.roi_feat_area if i == 0 else self.fc_out_channels) + branch_fcs.append(nn.Linear(fc_in_channels, self.fc_out_channels)) + return branch_fcs + + def forward(self, x_cls, x_reg): + # conv head + x_conv = self.res_block(x_reg) + + for conv in self.conv_branch: + x_conv = conv(x_conv) + + if self.with_avg_pool: + x_conv = self.avg_pool(x_conv) + + x_conv = x_conv.view(x_conv.size(0), -1) + bbox_pred = self.fc_reg(x_conv) + + # fc head + x_fc = x_cls.view(x_cls.size(0), -1) + for fc in self.fc_branch: + x_fc = self.relu(fc(x_fc)) + + cls_score = self.fc_cls(x_fc) + + return cls_score, bbox_pred diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/bbox_heads/sabl_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/bbox_heads/sabl_head.py new file mode 100644 index 0000000000000000000000000000000000000000..07c542ef144ca664516f2a5b87913c6d92a75405 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/bbox_heads/sabl_head.py @@ -0,0 +1,583 @@ +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule +from mmcv.runner import BaseModule, force_fp32 + +from mmdet.core import build_bbox_coder, multi_apply, multiclass_nms +from mmdet.models.builder import HEADS, build_loss +from mmdet.models.losses import accuracy + + +@HEADS.register_module() +class SABLHead(BaseModule): + """Side-Aware Boundary Localization (SABL) for RoI-Head. + + Side-Aware features are extracted by conv layers + with an attention mechanism. + Boundary Localization with Bucketing and Bucketing Guided Rescoring + are implemented in BucketingBBoxCoder. + + Please refer to https://arxiv.org/abs/1912.04260 for more details. + + Args: + cls_in_channels (int): Input channels of cls RoI feature. \ + Defaults to 256. + reg_in_channels (int): Input channels of reg RoI feature. \ + Defaults to 256. + roi_feat_size (int): Size of RoI features. Defaults to 7. + reg_feat_up_ratio (int): Upsample ratio of reg features. \ + Defaults to 2. + reg_pre_kernel (int): Kernel of 2D conv layers before \ + attention pooling. Defaults to 3. + reg_post_kernel (int): Kernel of 1D conv layers after \ + attention pooling. Defaults to 3. + reg_pre_num (int): Number of pre convs. Defaults to 2. + reg_post_num (int): Number of post convs. Defaults to 1. + num_classes (int): Number of classes in dataset. Defaults to 80. + cls_out_channels (int): Hidden channels in cls fcs. Defaults to 1024. + reg_offset_out_channels (int): Hidden and output channel \ + of reg offset branch. Defaults to 256. + reg_cls_out_channels (int): Hidden and output channel \ + of reg cls branch. Defaults to 256. + num_cls_fcs (int): Number of fcs for cls branch. Defaults to 1. + num_reg_fcs (int): Number of fcs for reg branch.. Defaults to 0. + reg_class_agnostic (bool): Class agnostic regresion or not. \ + Defaults to True. + norm_cfg (dict): Config of norm layers. Defaults to None. + bbox_coder (dict): Config of bbox coder. Defaults 'BucketingBBoxCoder'. + loss_cls (dict): Config of classification loss. + loss_bbox_cls (dict): Config of classification loss for bbox branch. + loss_bbox_reg (dict): Config of regression loss for bbox branch. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + num_classes, + cls_in_channels=256, + reg_in_channels=256, + roi_feat_size=7, + reg_feat_up_ratio=2, + reg_pre_kernel=3, + reg_post_kernel=3, + reg_pre_num=2, + reg_post_num=1, + cls_out_channels=1024, + reg_offset_out_channels=256, + reg_cls_out_channels=256, + num_cls_fcs=1, + num_reg_fcs=0, + reg_class_agnostic=True, + norm_cfg=None, + bbox_coder=dict( + type='BucketingBBoxCoder', + num_buckets=14, + scale_factor=1.7), + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + loss_weight=1.0), + loss_bbox_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=True, + loss_weight=1.0), + loss_bbox_reg=dict( + type='SmoothL1Loss', beta=0.1, loss_weight=1.0), + init_cfg=None): + super(SABLHead, self).__init__(init_cfg) + self.cls_in_channels = cls_in_channels + self.reg_in_channels = reg_in_channels + self.roi_feat_size = roi_feat_size + self.reg_feat_up_ratio = int(reg_feat_up_ratio) + self.num_buckets = bbox_coder['num_buckets'] + assert self.reg_feat_up_ratio // 2 >= 1 + self.up_reg_feat_size = roi_feat_size * self.reg_feat_up_ratio + assert self.up_reg_feat_size == bbox_coder['num_buckets'] + self.reg_pre_kernel = reg_pre_kernel + self.reg_post_kernel = reg_post_kernel + self.reg_pre_num = reg_pre_num + self.reg_post_num = reg_post_num + self.num_classes = num_classes + self.cls_out_channels = cls_out_channels + self.reg_offset_out_channels = reg_offset_out_channels + self.reg_cls_out_channels = reg_cls_out_channels + self.num_cls_fcs = num_cls_fcs + self.num_reg_fcs = num_reg_fcs + self.reg_class_agnostic = reg_class_agnostic + assert self.reg_class_agnostic + self.norm_cfg = norm_cfg + + self.bbox_coder = build_bbox_coder(bbox_coder) + self.loss_cls = build_loss(loss_cls) + self.loss_bbox_cls = build_loss(loss_bbox_cls) + self.loss_bbox_reg = build_loss(loss_bbox_reg) + + self.cls_fcs = self._add_fc_branch(self.num_cls_fcs, + self.cls_in_channels, + self.roi_feat_size, + self.cls_out_channels) + + self.side_num = int(np.ceil(self.num_buckets / 2)) + + if self.reg_feat_up_ratio > 1: + self.upsample_x = nn.ConvTranspose1d( + reg_in_channels, + reg_in_channels, + self.reg_feat_up_ratio, + stride=self.reg_feat_up_ratio) + self.upsample_y = nn.ConvTranspose1d( + reg_in_channels, + reg_in_channels, + self.reg_feat_up_ratio, + stride=self.reg_feat_up_ratio) + + self.reg_pre_convs = nn.ModuleList() + for i in range(self.reg_pre_num): + reg_pre_conv = ConvModule( + reg_in_channels, + reg_in_channels, + kernel_size=reg_pre_kernel, + padding=reg_pre_kernel // 2, + norm_cfg=norm_cfg, + act_cfg=dict(type='ReLU')) + self.reg_pre_convs.append(reg_pre_conv) + + self.reg_post_conv_xs = nn.ModuleList() + for i in range(self.reg_post_num): + reg_post_conv_x = ConvModule( + reg_in_channels, + reg_in_channels, + kernel_size=(1, reg_post_kernel), + padding=(0, reg_post_kernel // 2), + norm_cfg=norm_cfg, + act_cfg=dict(type='ReLU')) + self.reg_post_conv_xs.append(reg_post_conv_x) + self.reg_post_conv_ys = nn.ModuleList() + for i in range(self.reg_post_num): + reg_post_conv_y = ConvModule( + reg_in_channels, + reg_in_channels, + kernel_size=(reg_post_kernel, 1), + padding=(reg_post_kernel // 2, 0), + norm_cfg=norm_cfg, + act_cfg=dict(type='ReLU')) + self.reg_post_conv_ys.append(reg_post_conv_y) + + self.reg_conv_att_x = nn.Conv2d(reg_in_channels, 1, 1) + self.reg_conv_att_y = nn.Conv2d(reg_in_channels, 1, 1) + + self.fc_cls = nn.Linear(self.cls_out_channels, self.num_classes + 1) + self.relu = nn.ReLU(inplace=True) + + self.reg_cls_fcs = self._add_fc_branch(self.num_reg_fcs, + self.reg_in_channels, 1, + self.reg_cls_out_channels) + self.reg_offset_fcs = self._add_fc_branch(self.num_reg_fcs, + self.reg_in_channels, 1, + self.reg_offset_out_channels) + self.fc_reg_cls = nn.Linear(self.reg_cls_out_channels, 1) + self.fc_reg_offset = nn.Linear(self.reg_offset_out_channels, 1) + + if init_cfg is None: + self.init_cfg = [ + dict( + type='Xavier', + layer='Linear', + distribution='uniform', + override=[ + dict(type='Normal', name='reg_conv_att_x', std=0.01), + dict(type='Normal', name='reg_conv_att_y', std=0.01), + dict(type='Normal', name='fc_reg_cls', std=0.01), + dict(type='Normal', name='fc_cls', std=0.01), + dict(type='Normal', name='fc_reg_offset', std=0.001) + ]) + ] + if self.reg_feat_up_ratio > 1: + self.init_cfg += [ + dict( + type='Kaiming', + distribution='normal', + override=[ + dict(name='upsample_x'), + dict(name='upsample_y') + ]) + ] + + def _add_fc_branch(self, num_branch_fcs, in_channels, roi_feat_size, + fc_out_channels): + in_channels = in_channels * roi_feat_size * roi_feat_size + branch_fcs = nn.ModuleList() + for i in range(num_branch_fcs): + fc_in_channels = (in_channels if i == 0 else fc_out_channels) + branch_fcs.append(nn.Linear(fc_in_channels, fc_out_channels)) + return branch_fcs + + def cls_forward(self, cls_x): + cls_x = cls_x.view(cls_x.size(0), -1) + for fc in self.cls_fcs: + cls_x = self.relu(fc(cls_x)) + cls_score = self.fc_cls(cls_x) + return cls_score + + def attention_pool(self, reg_x): + """Extract direction-specific features fx and fy with attention + methanism.""" + reg_fx = reg_x + reg_fy = reg_x + reg_fx_att = self.reg_conv_att_x(reg_fx).sigmoid() + reg_fy_att = self.reg_conv_att_y(reg_fy).sigmoid() + reg_fx_att = reg_fx_att / reg_fx_att.sum(dim=2).unsqueeze(2) + reg_fy_att = reg_fy_att / reg_fy_att.sum(dim=3).unsqueeze(3) + reg_fx = (reg_fx * reg_fx_att).sum(dim=2) + reg_fy = (reg_fy * reg_fy_att).sum(dim=3) + return reg_fx, reg_fy + + def side_aware_feature_extractor(self, reg_x): + """Refine and extract side-aware features without split them.""" + for reg_pre_conv in self.reg_pre_convs: + reg_x = reg_pre_conv(reg_x) + reg_fx, reg_fy = self.attention_pool(reg_x) + + if self.reg_post_num > 0: + reg_fx = reg_fx.unsqueeze(2) + reg_fy = reg_fy.unsqueeze(3) + for i in range(self.reg_post_num): + reg_fx = self.reg_post_conv_xs[i](reg_fx) + reg_fy = self.reg_post_conv_ys[i](reg_fy) + reg_fx = reg_fx.squeeze(2) + reg_fy = reg_fy.squeeze(3) + if self.reg_feat_up_ratio > 1: + reg_fx = self.relu(self.upsample_x(reg_fx)) + reg_fy = self.relu(self.upsample_y(reg_fy)) + reg_fx = torch.transpose(reg_fx, 1, 2) + reg_fy = torch.transpose(reg_fy, 1, 2) + return reg_fx.contiguous(), reg_fy.contiguous() + + def reg_pred(self, x, offset_fcs, cls_fcs): + """Predict bucketing estimation (cls_pred) and fine regression (offset + pred) with side-aware features.""" + x_offset = x.view(-1, self.reg_in_channels) + x_cls = x.view(-1, self.reg_in_channels) + + for fc in offset_fcs: + x_offset = self.relu(fc(x_offset)) + for fc in cls_fcs: + x_cls = self.relu(fc(x_cls)) + offset_pred = self.fc_reg_offset(x_offset) + cls_pred = self.fc_reg_cls(x_cls) + + offset_pred = offset_pred.view(x.size(0), -1) + cls_pred = cls_pred.view(x.size(0), -1) + + return offset_pred, cls_pred + + def side_aware_split(self, feat): + """Split side-aware features aligned with orders of bucketing + targets.""" + l_end = int(np.ceil(self.up_reg_feat_size / 2)) + r_start = int(np.floor(self.up_reg_feat_size / 2)) + feat_fl = feat[:, :l_end] + feat_fr = feat[:, r_start:].flip(dims=(1, )) + feat_fl = feat_fl.contiguous() + feat_fr = feat_fr.contiguous() + feat = torch.cat([feat_fl, feat_fr], dim=-1) + return feat + + def bbox_pred_split(self, bbox_pred, num_proposals_per_img): + """Split batch bbox prediction back to each image.""" + bucket_cls_preds, bucket_offset_preds = bbox_pred + bucket_cls_preds = bucket_cls_preds.split(num_proposals_per_img, 0) + bucket_offset_preds = bucket_offset_preds.split( + num_proposals_per_img, 0) + bbox_pred = tuple(zip(bucket_cls_preds, bucket_offset_preds)) + return bbox_pred + + def reg_forward(self, reg_x): + outs = self.side_aware_feature_extractor(reg_x) + edge_offset_preds = [] + edge_cls_preds = [] + reg_fx = outs[0] + reg_fy = outs[1] + offset_pred_x, cls_pred_x = self.reg_pred(reg_fx, self.reg_offset_fcs, + self.reg_cls_fcs) + offset_pred_y, cls_pred_y = self.reg_pred(reg_fy, self.reg_offset_fcs, + self.reg_cls_fcs) + offset_pred_x = self.side_aware_split(offset_pred_x) + offset_pred_y = self.side_aware_split(offset_pred_y) + cls_pred_x = self.side_aware_split(cls_pred_x) + cls_pred_y = self.side_aware_split(cls_pred_y) + edge_offset_preds = torch.cat([offset_pred_x, offset_pred_y], dim=-1) + edge_cls_preds = torch.cat([cls_pred_x, cls_pred_y], dim=-1) + + return (edge_cls_preds, edge_offset_preds) + + def forward(self, x): + + bbox_pred = self.reg_forward(x) + cls_score = self.cls_forward(x) + + return cls_score, bbox_pred + + def get_targets(self, sampling_results, gt_bboxes, gt_labels, + rcnn_train_cfg): + pos_proposals = [res.pos_bboxes for res in sampling_results] + neg_proposals = [res.neg_bboxes for res in sampling_results] + pos_gt_bboxes = [res.pos_gt_bboxes for res in sampling_results] + pos_gt_labels = [res.pos_gt_labels for res in sampling_results] + cls_reg_targets = self.bucket_target(pos_proposals, neg_proposals, + pos_gt_bboxes, pos_gt_labels, + rcnn_train_cfg) + (labels, label_weights, bucket_cls_targets, bucket_cls_weights, + bucket_offset_targets, bucket_offset_weights) = cls_reg_targets + return (labels, label_weights, (bucket_cls_targets, + bucket_offset_targets), + (bucket_cls_weights, bucket_offset_weights)) + + def bucket_target(self, + pos_proposals_list, + neg_proposals_list, + pos_gt_bboxes_list, + pos_gt_labels_list, + rcnn_train_cfg, + concat=True): + (labels, label_weights, bucket_cls_targets, bucket_cls_weights, + bucket_offset_targets, bucket_offset_weights) = multi_apply( + self._bucket_target_single, + pos_proposals_list, + neg_proposals_list, + pos_gt_bboxes_list, + pos_gt_labels_list, + cfg=rcnn_train_cfg) + + if concat: + labels = torch.cat(labels, 0) + label_weights = torch.cat(label_weights, 0) + bucket_cls_targets = torch.cat(bucket_cls_targets, 0) + bucket_cls_weights = torch.cat(bucket_cls_weights, 0) + bucket_offset_targets = torch.cat(bucket_offset_targets, 0) + bucket_offset_weights = torch.cat(bucket_offset_weights, 0) + return (labels, label_weights, bucket_cls_targets, bucket_cls_weights, + bucket_offset_targets, bucket_offset_weights) + + def _bucket_target_single(self, pos_proposals, neg_proposals, + pos_gt_bboxes, pos_gt_labels, cfg): + """Compute bucketing estimation targets and fine regression targets for + a single image. + + Args: + pos_proposals (Tensor): positive proposals of a single image, + Shape (n_pos, 4) + neg_proposals (Tensor): negative proposals of a single image, + Shape (n_neg, 4). + pos_gt_bboxes (Tensor): gt bboxes assigned to positive proposals + of a single image, Shape (n_pos, 4). + pos_gt_labels (Tensor): gt labels assigned to positive proposals + of a single image, Shape (n_pos, ). + cfg (dict): Config of calculating targets + + Returns: + tuple: + + - labels (Tensor): Labels in a single image. \ + Shape (n,). + - label_weights (Tensor): Label weights in a single image.\ + Shape (n,) + - bucket_cls_targets (Tensor): Bucket cls targets in \ + a single image. Shape (n, num_buckets*2). + - bucket_cls_weights (Tensor): Bucket cls weights in \ + a single image. Shape (n, num_buckets*2). + - bucket_offset_targets (Tensor): Bucket offset targets \ + in a single image. Shape (n, num_buckets*2). + - bucket_offset_targets (Tensor): Bucket offset weights \ + in a single image. Shape (n, num_buckets*2). + """ + num_pos = pos_proposals.size(0) + num_neg = neg_proposals.size(0) + num_samples = num_pos + num_neg + labels = pos_gt_bboxes.new_full((num_samples, ), + self.num_classes, + dtype=torch.long) + label_weights = pos_proposals.new_zeros(num_samples) + bucket_cls_targets = pos_proposals.new_zeros(num_samples, + 4 * self.side_num) + bucket_cls_weights = pos_proposals.new_zeros(num_samples, + 4 * self.side_num) + bucket_offset_targets = pos_proposals.new_zeros( + num_samples, 4 * self.side_num) + bucket_offset_weights = pos_proposals.new_zeros( + num_samples, 4 * self.side_num) + if num_pos > 0: + labels[:num_pos] = pos_gt_labels + label_weights[:num_pos] = 1.0 + (pos_bucket_offset_targets, pos_bucket_offset_weights, + pos_bucket_cls_targets, + pos_bucket_cls_weights) = self.bbox_coder.encode( + pos_proposals, pos_gt_bboxes) + bucket_cls_targets[:num_pos, :] = pos_bucket_cls_targets + bucket_cls_weights[:num_pos, :] = pos_bucket_cls_weights + bucket_offset_targets[:num_pos, :] = pos_bucket_offset_targets + bucket_offset_weights[:num_pos, :] = pos_bucket_offset_weights + if num_neg > 0: + label_weights[-num_neg:] = 1.0 + return (labels, label_weights, bucket_cls_targets, bucket_cls_weights, + bucket_offset_targets, bucket_offset_weights) + + def loss(self, + cls_score, + bbox_pred, + rois, + labels, + label_weights, + bbox_targets, + bbox_weights, + reduction_override=None): + losses = dict() + if cls_score is not None: + avg_factor = max(torch.sum(label_weights > 0).float().item(), 1.) + losses['loss_cls'] = self.loss_cls( + cls_score, + labels, + label_weights, + avg_factor=avg_factor, + reduction_override=reduction_override) + losses['acc'] = accuracy(cls_score, labels) + + if bbox_pred is not None: + bucket_cls_preds, bucket_offset_preds = bbox_pred + bucket_cls_targets, bucket_offset_targets = bbox_targets + bucket_cls_weights, bucket_offset_weights = bbox_weights + # edge cls + bucket_cls_preds = bucket_cls_preds.view(-1, self.side_num) + bucket_cls_targets = bucket_cls_targets.view(-1, self.side_num) + bucket_cls_weights = bucket_cls_weights.view(-1, self.side_num) + losses['loss_bbox_cls'] = self.loss_bbox_cls( + bucket_cls_preds, + bucket_cls_targets, + bucket_cls_weights, + avg_factor=bucket_cls_targets.size(0), + reduction_override=reduction_override) + + losses['loss_bbox_reg'] = self.loss_bbox_reg( + bucket_offset_preds, + bucket_offset_targets, + bucket_offset_weights, + avg_factor=bucket_offset_targets.size(0), + reduction_override=reduction_override) + + return losses + + @force_fp32(apply_to=('cls_score', 'bbox_pred')) + def get_bboxes(self, + rois, + cls_score, + bbox_pred, + img_shape, + scale_factor, + rescale=False, + cfg=None): + if isinstance(cls_score, list): + cls_score = sum(cls_score) / float(len(cls_score)) + scores = F.softmax(cls_score, dim=1) if cls_score is not None else None + + if bbox_pred is not None: + bboxes, confids = self.bbox_coder.decode(rois[:, 1:], bbox_pred, + img_shape) + else: + bboxes = rois[:, 1:].clone() + confids = None + if img_shape is not None: + bboxes[:, [0, 2]].clamp_(min=0, max=img_shape[1] - 1) + bboxes[:, [1, 3]].clamp_(min=0, max=img_shape[0] - 1) + + if rescale and bboxes.size(0) > 0: + if isinstance(scale_factor, float): + bboxes /= scale_factor + else: + bboxes /= torch.from_numpy(scale_factor).to(bboxes.device) + + if cfg is None: + return bboxes, scores + else: + det_bboxes, det_labels = multiclass_nms( + bboxes, + scores, + cfg.score_thr, + cfg.nms, + cfg.max_per_img, + score_factors=confids) + + return det_bboxes, det_labels + + @force_fp32(apply_to=('bbox_preds', )) + def refine_bboxes(self, rois, labels, bbox_preds, pos_is_gts, img_metas): + """Refine bboxes during training. + + Args: + rois (Tensor): Shape (n*bs, 5), where n is image number per GPU, + and bs is the sampled RoIs per image. + labels (Tensor): Shape (n*bs, ). + bbox_preds (list[Tensor]): Shape [(n*bs, num_buckets*2), \ + (n*bs, num_buckets*2)]. + pos_is_gts (list[Tensor]): Flags indicating if each positive bbox + is a gt bbox. + img_metas (list[dict]): Meta info of each image. + + Returns: + list[Tensor]: Refined bboxes of each image in a mini-batch. + """ + img_ids = rois[:, 0].long().unique(sorted=True) + assert img_ids.numel() == len(img_metas) + + bboxes_list = [] + for i in range(len(img_metas)): + inds = torch.nonzero( + rois[:, 0] == i, as_tuple=False).squeeze(dim=1) + num_rois = inds.numel() + + bboxes_ = rois[inds, 1:] + label_ = labels[inds] + edge_cls_preds, edge_offset_preds = bbox_preds + edge_cls_preds_ = edge_cls_preds[inds] + edge_offset_preds_ = edge_offset_preds[inds] + bbox_pred_ = [edge_cls_preds_, edge_offset_preds_] + img_meta_ = img_metas[i] + pos_is_gts_ = pos_is_gts[i] + + bboxes = self.regress_by_class(bboxes_, label_, bbox_pred_, + img_meta_) + # filter gt bboxes + pos_keep = 1 - pos_is_gts_ + keep_inds = pos_is_gts_.new_ones(num_rois) + keep_inds[:len(pos_is_gts_)] = pos_keep + + bboxes_list.append(bboxes[keep_inds.type(torch.bool)]) + + return bboxes_list + + @force_fp32(apply_to=('bbox_pred', )) + def regress_by_class(self, rois, label, bbox_pred, img_meta): + """Regress the bbox for the predicted class. Used in Cascade R-CNN. + + Args: + rois (Tensor): shape (n, 4) or (n, 5) + label (Tensor): shape (n, ) + bbox_pred (list[Tensor]): shape [(n, num_buckets *2), \ + (n, num_buckets *2)] + img_meta (dict): Image meta info. + + Returns: + Tensor: Regressed bboxes, the same shape as input rois. + """ + assert rois.size(1) == 4 or rois.size(1) == 5 + + if rois.size(1) == 4: + new_rois, _ = self.bbox_coder.decode(rois, bbox_pred, + img_meta['img_shape']) + else: + bboxes, _ = self.bbox_coder.decode(rois[:, 1:], bbox_pred, + img_meta['img_shape']) + new_rois = torch.cat((rois[:, [0]], bboxes), dim=1) + + return new_rois diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/bbox_heads/scnet_bbox_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/bbox_heads/scnet_bbox_head.py new file mode 100644 index 0000000000000000000000000000000000000000..35758f4f4e3b2bddd460edb8a7f482b3a9da2919 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/bbox_heads/scnet_bbox_head.py @@ -0,0 +1,76 @@ +from mmdet.models.builder import HEADS +from .convfc_bbox_head import ConvFCBBoxHead + + +@HEADS.register_module() +class SCNetBBoxHead(ConvFCBBoxHead): + """BBox head for `SCNet `_. + + This inherits ``ConvFCBBoxHead`` with modified forward() function, allow us + to get intermediate shared feature. + """ + + def _forward_shared(self, x): + """Forward function for shared part.""" + if self.num_shared_convs > 0: + for conv in self.shared_convs: + x = conv(x) + + if self.num_shared_fcs > 0: + if self.with_avg_pool: + x = self.avg_pool(x) + + x = x.flatten(1) + + for fc in self.shared_fcs: + x = self.relu(fc(x)) + + return x + + def _forward_cls_reg(self, x): + """Forward function for classification and regression parts.""" + x_cls = x + x_reg = x + + for conv in self.cls_convs: + x_cls = conv(x_cls) + if x_cls.dim() > 2: + if self.with_avg_pool: + x_cls = self.avg_pool(x_cls) + x_cls = x_cls.flatten(1) + for fc in self.cls_fcs: + x_cls = self.relu(fc(x_cls)) + + for conv in self.reg_convs: + x_reg = conv(x_reg) + if x_reg.dim() > 2: + if self.with_avg_pool: + x_reg = self.avg_pool(x_reg) + x_reg = x_reg.flatten(1) + for fc in self.reg_fcs: + x_reg = self.relu(fc(x_reg)) + + cls_score = self.fc_cls(x_cls) if self.with_cls else None + bbox_pred = self.fc_reg(x_reg) if self.with_reg else None + + return cls_score, bbox_pred + + def forward(self, x, return_shared_feat=False): + """Forward function. + + Args: + x (Tensor): input features + return_shared_feat (bool): If True, return cls-reg-shared feature. + + Return: + out (tuple[Tensor]): contain ``cls_score`` and ``bbox_pred``, + if ``return_shared_feat`` is True, append ``x_shared`` to the + returned tuple. + """ + x_shared = self._forward_shared(x) + out = self._forward_cls_reg(x_shared) + + if return_shared_feat: + out += (x_shared, ) + + return out diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/cascade_roi_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/cascade_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..d0dc040c257e20b7a9c614096754082c35ebb79e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/cascade_roi_head.py @@ -0,0 +1,507 @@ +import numpy as np +import torch +import torch.nn as nn +from mmcv.runner import ModuleList + +from mmdet.core import (bbox2result, bbox2roi, bbox_mapping, build_assigner, + build_sampler, merge_aug_bboxes, merge_aug_masks, + multiclass_nms) +from ..builder import HEADS, build_head, build_roi_extractor +from .base_roi_head import BaseRoIHead +from .test_mixins import BBoxTestMixin, MaskTestMixin + + +@HEADS.register_module() +class CascadeRoIHead(BaseRoIHead, BBoxTestMixin, MaskTestMixin): + """Cascade roi head including one bbox head and one mask head. + + https://arxiv.org/abs/1712.00726 + """ + + def __init__(self, + num_stages, + stage_loss_weights, + bbox_roi_extractor=None, + bbox_head=None, + mask_roi_extractor=None, + mask_head=None, + shared_head=None, + train_cfg=None, + test_cfg=None, + pretrained=None, + init_cfg=None): + assert bbox_roi_extractor is not None + assert bbox_head is not None + assert shared_head is None, \ + 'Shared head is not supported in Cascade RCNN anymore' + + self.num_stages = num_stages + self.stage_loss_weights = stage_loss_weights + super(CascadeRoIHead, self).__init__( + bbox_roi_extractor=bbox_roi_extractor, + bbox_head=bbox_head, + mask_roi_extractor=mask_roi_extractor, + mask_head=mask_head, + shared_head=shared_head, + train_cfg=train_cfg, + test_cfg=test_cfg, + pretrained=pretrained, + init_cfg=init_cfg) + + def init_bbox_head(self, bbox_roi_extractor, bbox_head): + """Initialize box head and box roi extractor. + + Args: + bbox_roi_extractor (dict): Config of box roi extractor. + bbox_head (dict): Config of box in box head. + """ + self.bbox_roi_extractor = ModuleList() + self.bbox_head = ModuleList() + if not isinstance(bbox_roi_extractor, list): + bbox_roi_extractor = [ + bbox_roi_extractor for _ in range(self.num_stages) + ] + if not isinstance(bbox_head, list): + bbox_head = [bbox_head for _ in range(self.num_stages)] + assert len(bbox_roi_extractor) == len(bbox_head) == self.num_stages + for roi_extractor, head in zip(bbox_roi_extractor, bbox_head): + self.bbox_roi_extractor.append(build_roi_extractor(roi_extractor)) + self.bbox_head.append(build_head(head)) + + def init_mask_head(self, mask_roi_extractor, mask_head): + """Initialize mask head and mask roi extractor. + + Args: + mask_roi_extractor (dict): Config of mask roi extractor. + mask_head (dict): Config of mask in mask head. + """ + self.mask_head = nn.ModuleList() + if not isinstance(mask_head, list): + mask_head = [mask_head for _ in range(self.num_stages)] + assert len(mask_head) == self.num_stages + for head in mask_head: + self.mask_head.append(build_head(head)) + if mask_roi_extractor is not None: + self.share_roi_extractor = False + self.mask_roi_extractor = ModuleList() + if not isinstance(mask_roi_extractor, list): + mask_roi_extractor = [ + mask_roi_extractor for _ in range(self.num_stages) + ] + assert len(mask_roi_extractor) == self.num_stages + for roi_extractor in mask_roi_extractor: + self.mask_roi_extractor.append( + build_roi_extractor(roi_extractor)) + else: + self.share_roi_extractor = True + self.mask_roi_extractor = self.bbox_roi_extractor + + def init_assigner_sampler(self): + """Initialize assigner and sampler for each stage.""" + self.bbox_assigner = [] + self.bbox_sampler = [] + if self.train_cfg is not None: + for idx, rcnn_train_cfg in enumerate(self.train_cfg): + self.bbox_assigner.append( + build_assigner(rcnn_train_cfg.assigner)) + self.current_stage = idx + self.bbox_sampler.append( + build_sampler(rcnn_train_cfg.sampler, context=self)) + + def forward_dummy(self, x, proposals): + """Dummy forward function.""" + # bbox head + outs = () + rois = bbox2roi([proposals]) + if self.with_bbox: + for i in range(self.num_stages): + bbox_results = self._bbox_forward(i, x, rois) + outs = outs + (bbox_results['cls_score'], + bbox_results['bbox_pred']) + # mask heads + if self.with_mask: + mask_rois = rois[:100] + for i in range(self.num_stages): + mask_results = self._mask_forward(i, x, mask_rois) + outs = outs + (mask_results['mask_pred'], ) + return outs + + def _bbox_forward(self, stage, x, rois): + """Box head forward function used in both training and testing.""" + bbox_roi_extractor = self.bbox_roi_extractor[stage] + bbox_head = self.bbox_head[stage] + bbox_feats = bbox_roi_extractor(x[:bbox_roi_extractor.num_inputs], + rois) + # do not support caffe_c4 model anymore + cls_score, bbox_pred = bbox_head(bbox_feats) + + bbox_results = dict( + cls_score=cls_score, bbox_pred=bbox_pred, bbox_feats=bbox_feats) + return bbox_results + + def _bbox_forward_train(self, stage, x, sampling_results, gt_bboxes, + gt_labels, rcnn_train_cfg): + """Run forward function and calculate loss for box head in training.""" + rois = bbox2roi([res.bboxes for res in sampling_results]) + bbox_results = self._bbox_forward(stage, x, rois) + bbox_targets = self.bbox_head[stage].get_targets( + sampling_results, gt_bboxes, gt_labels, rcnn_train_cfg) + loss_bbox = self.bbox_head[stage].loss(bbox_results['cls_score'], + bbox_results['bbox_pred'], rois, + *bbox_targets) + + bbox_results.update( + loss_bbox=loss_bbox, rois=rois, bbox_targets=bbox_targets) + return bbox_results + + def _mask_forward(self, stage, x, rois): + """Mask head forward function used in both training and testing.""" + mask_roi_extractor = self.mask_roi_extractor[stage] + mask_head = self.mask_head[stage] + mask_feats = mask_roi_extractor(x[:mask_roi_extractor.num_inputs], + rois) + # do not support caffe_c4 model anymore + mask_pred = mask_head(mask_feats) + + mask_results = dict(mask_pred=mask_pred) + return mask_results + + def _mask_forward_train(self, + stage, + x, + sampling_results, + gt_masks, + rcnn_train_cfg, + bbox_feats=None): + """Run forward function and calculate loss for mask head in + training.""" + pos_rois = bbox2roi([res.pos_bboxes for res in sampling_results]) + mask_results = self._mask_forward(stage, x, pos_rois) + + mask_targets = self.mask_head[stage].get_targets( + sampling_results, gt_masks, rcnn_train_cfg) + pos_labels = torch.cat([res.pos_gt_labels for res in sampling_results]) + loss_mask = self.mask_head[stage].loss(mask_results['mask_pred'], + mask_targets, pos_labels) + + mask_results.update(loss_mask=loss_mask) + return mask_results + + def forward_train(self, + x, + img_metas, + proposal_list, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None, + gt_masks=None): + """ + Args: + x (list[Tensor]): list of multi-level img features. + img_metas (list[dict]): list of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `mmdet/datasets/pipelines/formatting.py:Collect`. + proposals (list[Tensors]): list of region proposals. + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + gt_masks (None | Tensor) : true segmentation masks for each box + used if the architecture supports a segmentation task. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + losses = dict() + for i in range(self.num_stages): + self.current_stage = i + rcnn_train_cfg = self.train_cfg[i] + lw = self.stage_loss_weights[i] + + # assign gts and sample proposals + sampling_results = [] + if self.with_bbox or self.with_mask: + bbox_assigner = self.bbox_assigner[i] + bbox_sampler = self.bbox_sampler[i] + num_imgs = len(img_metas) + if gt_bboxes_ignore is None: + gt_bboxes_ignore = [None for _ in range(num_imgs)] + + for j in range(num_imgs): + assign_result = bbox_assigner.assign( + proposal_list[j], gt_bboxes[j], gt_bboxes_ignore[j], + gt_labels[j]) + sampling_result = bbox_sampler.sample( + assign_result, + proposal_list[j], + gt_bboxes[j], + gt_labels[j], + feats=[lvl_feat[j][None] for lvl_feat in x]) + sampling_results.append(sampling_result) + + # bbox head forward and loss + bbox_results = self._bbox_forward_train(i, x, sampling_results, + gt_bboxes, gt_labels, + rcnn_train_cfg) + + for name, value in bbox_results['loss_bbox'].items(): + losses[f's{i}.{name}'] = ( + value * lw if 'loss' in name else value) + + # mask head forward and loss + if self.with_mask: + mask_results = self._mask_forward_train( + i, x, sampling_results, gt_masks, rcnn_train_cfg, + bbox_results['bbox_feats']) + for name, value in mask_results['loss_mask'].items(): + losses[f's{i}.{name}'] = ( + value * lw if 'loss' in name else value) + + # refine bboxes + if i < self.num_stages - 1: + pos_is_gts = [res.pos_is_gt for res in sampling_results] + # bbox_targets is a tuple + roi_labels = bbox_results['bbox_targets'][0] + with torch.no_grad(): + cls_score = bbox_results['cls_score'] + if self.bbox_head[i].custom_activation: + cls_score = self.bbox_head[i].loss_cls.get_activation( + cls_score) + roi_labels = torch.where( + roi_labels == self.bbox_head[i].num_classes, + cls_score[:, :-1].argmax(1), roi_labels) + proposal_list = self.bbox_head[i].refine_bboxes( + bbox_results['rois'], roi_labels, + bbox_results['bbox_pred'], pos_is_gts, img_metas) + + return losses + + def simple_test(self, x, proposal_list, img_metas, rescale=False): + """Test without augmentation.""" + assert self.with_bbox, 'Bbox head must be implemented.' + num_imgs = len(proposal_list) + img_shapes = tuple(meta['img_shape'] for meta in img_metas) + ori_shapes = tuple(meta['ori_shape'] for meta in img_metas) + scale_factors = tuple(meta['scale_factor'] for meta in img_metas) + + # "ms" in variable names means multi-stage + ms_bbox_result = {} + ms_segm_result = {} + ms_scores = [] + rcnn_test_cfg = self.test_cfg + + rois = bbox2roi(proposal_list) + for i in range(self.num_stages): + bbox_results = self._bbox_forward(i, x, rois) + + # split batch bbox prediction back to each image + cls_score = bbox_results['cls_score'] + bbox_pred = bbox_results['bbox_pred'] + num_proposals_per_img = tuple( + len(proposals) for proposals in proposal_list) + rois = rois.split(num_proposals_per_img, 0) + cls_score = cls_score.split(num_proposals_per_img, 0) + if isinstance(bbox_pred, torch.Tensor): + bbox_pred = bbox_pred.split(num_proposals_per_img, 0) + else: + bbox_pred = self.bbox_head[i].bbox_pred_split( + bbox_pred, num_proposals_per_img) + ms_scores.append(cls_score) + + if i < self.num_stages - 1: + if self.bbox_head[i].custom_activation: + cls_score = [ + self.bbox_head[i].loss_cls.get_activation(s) + for s in cls_score + ] + bbox_label = [s[:, :-1].argmax(dim=1) for s in cls_score] + rois = torch.cat([ + self.bbox_head[i].regress_by_class(rois[j], bbox_label[j], + bbox_pred[j], + img_metas[j]) + for j in range(num_imgs) + ]) + + # average scores of each image by stages + cls_score = [ + sum([score[i] for score in ms_scores]) / float(len(ms_scores)) + for i in range(num_imgs) + ] + + # apply bbox post-processing to each image individually + det_bboxes = [] + det_labels = [] + for i in range(num_imgs): + det_bbox, det_label = self.bbox_head[-1].get_bboxes( + rois[i], + cls_score[i], + bbox_pred[i], + img_shapes[i], + scale_factors[i], + rescale=rescale, + cfg=rcnn_test_cfg) + det_bboxes.append(det_bbox) + det_labels.append(det_label) + + if torch.onnx.is_in_onnx_export(): + return det_bboxes, det_labels + bbox_results = [ + bbox2result(det_bboxes[i], det_labels[i], + self.bbox_head[-1].num_classes) + for i in range(num_imgs) + ] + ms_bbox_result['ensemble'] = bbox_results + + if self.with_mask: + if all(det_bbox.shape[0] == 0 for det_bbox in det_bboxes): + mask_classes = self.mask_head[-1].num_classes + segm_results = [[[] for _ in range(mask_classes)] + for _ in range(num_imgs)] + else: + if rescale and not isinstance(scale_factors[0], float): + scale_factors = [ + torch.from_numpy(scale_factor).to(det_bboxes[0].device) + for scale_factor in scale_factors + ] + _bboxes = [ + det_bboxes[i][:, :4] * + scale_factors[i] if rescale else det_bboxes[i][:, :4] + for i in range(len(det_bboxes)) + ] + mask_rois = bbox2roi(_bboxes) + num_mask_rois_per_img = tuple( + _bbox.size(0) for _bbox in _bboxes) + aug_masks = [] + for i in range(self.num_stages): + mask_results = self._mask_forward(i, x, mask_rois) + mask_pred = mask_results['mask_pred'] + # split batch mask prediction back to each image + mask_pred = mask_pred.split(num_mask_rois_per_img, 0) + aug_masks.append( + [m.sigmoid().cpu().numpy() for m in mask_pred]) + + # apply mask post-processing to each image individually + segm_results = [] + for i in range(num_imgs): + if det_bboxes[i].shape[0] == 0: + segm_results.append( + [[] + for _ in range(self.mask_head[-1].num_classes)]) + else: + aug_mask = [mask[i] for mask in aug_masks] + merged_masks = merge_aug_masks( + aug_mask, [[img_metas[i]]] * self.num_stages, + rcnn_test_cfg) + segm_result = self.mask_head[-1].get_seg_masks( + merged_masks, _bboxes[i], det_labels[i], + rcnn_test_cfg, ori_shapes[i], scale_factors[i], + rescale) + segm_results.append(segm_result) + ms_segm_result['ensemble'] = segm_results + + if self.with_mask: + results = list( + zip(ms_bbox_result['ensemble'], ms_segm_result['ensemble'])) + else: + results = ms_bbox_result['ensemble'] + + return results + + def aug_test(self, features, proposal_list, img_metas, rescale=False): + """Test with augmentations. + + If rescale is False, then returned bboxes and masks will fit the scale + of imgs[0]. + """ + rcnn_test_cfg = self.test_cfg + aug_bboxes = [] + aug_scores = [] + for x, img_meta in zip(features, img_metas): + # only one image in the batch + img_shape = img_meta[0]['img_shape'] + scale_factor = img_meta[0]['scale_factor'] + flip = img_meta[0]['flip'] + flip_direction = img_meta[0]['flip_direction'] + + proposals = bbox_mapping(proposal_list[0][:, :4], img_shape, + scale_factor, flip, flip_direction) + # "ms" in variable names means multi-stage + ms_scores = [] + + rois = bbox2roi([proposals]) + for i in range(self.num_stages): + bbox_results = self._bbox_forward(i, x, rois) + ms_scores.append(bbox_results['cls_score']) + + if i < self.num_stages - 1: + cls_score = bbox_results['cls_score'] + if self.bbox_head[i].custom_activation: + cls_score = self.bbox_head[i].loss_cls.get_activation( + cls_score) + bbox_label = cls_score[:, :-1].argmax(dim=1) + rois = self.bbox_head[i].regress_by_class( + rois, bbox_label, bbox_results['bbox_pred'], + img_meta[0]) + + cls_score = sum(ms_scores) / float(len(ms_scores)) + bboxes, scores = self.bbox_head[-1].get_bboxes( + rois, + cls_score, + bbox_results['bbox_pred'], + img_shape, + scale_factor, + rescale=False, + cfg=None) + aug_bboxes.append(bboxes) + aug_scores.append(scores) + + # after merging, bboxes will be rescaled to the original image size + merged_bboxes, merged_scores = merge_aug_bboxes( + aug_bboxes, aug_scores, img_metas, rcnn_test_cfg) + det_bboxes, det_labels = multiclass_nms(merged_bboxes, merged_scores, + rcnn_test_cfg.score_thr, + rcnn_test_cfg.nms, + rcnn_test_cfg.max_per_img) + + bbox_result = bbox2result(det_bboxes, det_labels, + self.bbox_head[-1].num_classes) + + if self.with_mask: + if det_bboxes.shape[0] == 0: + segm_result = [[] + for _ in range(self.mask_head[-1].num_classes)] + else: + aug_masks = [] + aug_img_metas = [] + for x, img_meta in zip(features, img_metas): + img_shape = img_meta[0]['img_shape'] + scale_factor = img_meta[0]['scale_factor'] + flip = img_meta[0]['flip'] + flip_direction = img_meta[0]['flip_direction'] + _bboxes = bbox_mapping(det_bboxes[:, :4], img_shape, + scale_factor, flip, flip_direction) + mask_rois = bbox2roi([_bboxes]) + for i in range(self.num_stages): + mask_results = self._mask_forward(i, x, mask_rois) + aug_masks.append( + mask_results['mask_pred'].sigmoid().cpu().numpy()) + aug_img_metas.append(img_meta) + merged_masks = merge_aug_masks(aug_masks, aug_img_metas, + self.test_cfg) + + ori_shape = img_metas[0][0]['ori_shape'] + dummy_scale_factor = np.ones(4) + segm_result = self.mask_head[-1].get_seg_masks( + merged_masks, + det_bboxes, + det_labels, + rcnn_test_cfg, + ori_shape, + scale_factor=dummy_scale_factor, + rescale=False) + return [(bbox_result, segm_result)] + else: + return [bbox_result] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/double_roi_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/double_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..a1aa6c8244a889fbbed312a89574c3e11be294f0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/double_roi_head.py @@ -0,0 +1,33 @@ +from ..builder import HEADS +from .standard_roi_head import StandardRoIHead + + +@HEADS.register_module() +class DoubleHeadRoIHead(StandardRoIHead): + """RoI head for Double Head RCNN. + + https://arxiv.org/abs/1904.06493 + """ + + def __init__(self, reg_roi_scale_factor, **kwargs): + super(DoubleHeadRoIHead, self).__init__(**kwargs) + self.reg_roi_scale_factor = reg_roi_scale_factor + + def _bbox_forward(self, x, rois): + """Box head forward function used in both training and testing time.""" + bbox_cls_feats = self.bbox_roi_extractor( + x[:self.bbox_roi_extractor.num_inputs], rois) + bbox_reg_feats = self.bbox_roi_extractor( + x[:self.bbox_roi_extractor.num_inputs], + rois, + roi_scale_factor=self.reg_roi_scale_factor) + if self.with_shared_head: + bbox_cls_feats = self.shared_head(bbox_cls_feats) + bbox_reg_feats = self.shared_head(bbox_reg_feats) + cls_score, bbox_pred = self.bbox_head(bbox_cls_feats, bbox_reg_feats) + + bbox_results = dict( + cls_score=cls_score, + bbox_pred=bbox_pred, + bbox_feats=bbox_cls_feats) + return bbox_results diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/dynamic_roi_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/dynamic_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..89427a931f45f5a920c0e66fd88058bf9fa05f5c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/dynamic_roi_head.py @@ -0,0 +1,154 @@ +import numpy as np +import torch + +from mmdet.core import bbox2roi +from mmdet.models.losses import SmoothL1Loss +from ..builder import HEADS +from .standard_roi_head import StandardRoIHead + +EPS = 1e-15 + + +@HEADS.register_module() +class DynamicRoIHead(StandardRoIHead): + """RoI head for `Dynamic R-CNN `_.""" + + def __init__(self, **kwargs): + super(DynamicRoIHead, self).__init__(**kwargs) + assert isinstance(self.bbox_head.loss_bbox, SmoothL1Loss) + # the IoU history of the past `update_iter_interval` iterations + self.iou_history = [] + # the beta history of the past `update_iter_interval` iterations + self.beta_history = [] + + def forward_train(self, + x, + img_metas, + proposal_list, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None, + gt_masks=None): + """Forward function for training. + + Args: + x (list[Tensor]): list of multi-level img features. + + img_metas (list[dict]): list of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `mmdet/datasets/pipelines/formatting.py:Collect`. + + proposals (list[Tensors]): list of region proposals. + + gt_bboxes (list[Tensor]): each item are the truth boxes for each + image in [tl_x, tl_y, br_x, br_y] format. + + gt_labels (list[Tensor]): class indices corresponding to each box + + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + + gt_masks (None | Tensor) : true segmentation masks for each box + used if the architecture supports a segmentation task. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + # assign gts and sample proposals + if self.with_bbox or self.with_mask: + num_imgs = len(img_metas) + if gt_bboxes_ignore is None: + gt_bboxes_ignore = [None for _ in range(num_imgs)] + sampling_results = [] + cur_iou = [] + for i in range(num_imgs): + assign_result = self.bbox_assigner.assign( + proposal_list[i], gt_bboxes[i], gt_bboxes_ignore[i], + gt_labels[i]) + sampling_result = self.bbox_sampler.sample( + assign_result, + proposal_list[i], + gt_bboxes[i], + gt_labels[i], + feats=[lvl_feat[i][None] for lvl_feat in x]) + # record the `iou_topk`-th largest IoU in an image + iou_topk = min(self.train_cfg.dynamic_rcnn.iou_topk, + len(assign_result.max_overlaps)) + ious, _ = torch.topk(assign_result.max_overlaps, iou_topk) + cur_iou.append(ious[-1].item()) + sampling_results.append(sampling_result) + # average the current IoUs over images + cur_iou = np.mean(cur_iou) + self.iou_history.append(cur_iou) + + losses = dict() + # bbox head forward and loss + if self.with_bbox: + bbox_results = self._bbox_forward_train(x, sampling_results, + gt_bboxes, gt_labels, + img_metas) + losses.update(bbox_results['loss_bbox']) + + # mask head forward and loss + if self.with_mask: + mask_results = self._mask_forward_train(x, sampling_results, + bbox_results['bbox_feats'], + gt_masks, img_metas) + losses.update(mask_results['loss_mask']) + + # update IoU threshold and SmoothL1 beta + update_iter_interval = self.train_cfg.dynamic_rcnn.update_iter_interval + if len(self.iou_history) % update_iter_interval == 0: + new_iou_thr, new_beta = self.update_hyperparameters() + + return losses + + def _bbox_forward_train(self, x, sampling_results, gt_bboxes, gt_labels, + img_metas): + num_imgs = len(img_metas) + rois = bbox2roi([res.bboxes for res in sampling_results]) + bbox_results = self._bbox_forward(x, rois) + + bbox_targets = self.bbox_head.get_targets(sampling_results, gt_bboxes, + gt_labels, self.train_cfg) + # record the `beta_topk`-th smallest target + # `bbox_targets[2]` and `bbox_targets[3]` stand for bbox_targets + # and bbox_weights, respectively + pos_inds = bbox_targets[3][:, 0].nonzero().squeeze(1) + num_pos = len(pos_inds) + cur_target = bbox_targets[2][pos_inds, :2].abs().mean(dim=1) + beta_topk = min(self.train_cfg.dynamic_rcnn.beta_topk * num_imgs, + num_pos) + cur_target = torch.kthvalue(cur_target, beta_topk)[0].item() + self.beta_history.append(cur_target) + loss_bbox = self.bbox_head.loss(bbox_results['cls_score'], + bbox_results['bbox_pred'], rois, + *bbox_targets) + + bbox_results.update(loss_bbox=loss_bbox) + return bbox_results + + def update_hyperparameters(self): + """Update hyperparameters like IoU thresholds for assigner and beta for + SmoothL1 loss based on the training statistics. + + Returns: + tuple[float]: the updated ``iou_thr`` and ``beta``. + """ + new_iou_thr = max(self.train_cfg.dynamic_rcnn.initial_iou, + np.mean(self.iou_history)) + self.iou_history = [] + self.bbox_assigner.pos_iou_thr = new_iou_thr + self.bbox_assigner.neg_iou_thr = new_iou_thr + self.bbox_assigner.min_pos_iou = new_iou_thr + if (np.median(self.beta_history) < EPS): + # avoid 0 or too small value for new_beta + new_beta = self.bbox_head.loss_bbox.beta + else: + new_beta = min(self.train_cfg.dynamic_rcnn.initial_beta, + np.median(self.beta_history)) + self.beta_history = [] + self.bbox_head.loss_bbox.beta = new_beta + return new_iou_thr, new_beta diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/grid_roi_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/grid_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..0332b418f0bd7f3f5887485e2d1d23ac97679929 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/grid_roi_head.py @@ -0,0 +1,169 @@ +import numpy as np +import torch + +from mmdet.core import bbox2result, bbox2roi +from ..builder import HEADS, build_head, build_roi_extractor +from .standard_roi_head import StandardRoIHead + + +@HEADS.register_module() +class GridRoIHead(StandardRoIHead): + """Grid roi head for Grid R-CNN. + + https://arxiv.org/abs/1811.12030 + """ + + def __init__(self, grid_roi_extractor, grid_head, **kwargs): + assert grid_head is not None + super(GridRoIHead, self).__init__(**kwargs) + if grid_roi_extractor is not None: + self.grid_roi_extractor = build_roi_extractor(grid_roi_extractor) + self.share_roi_extractor = False + else: + self.share_roi_extractor = True + self.grid_roi_extractor = self.bbox_roi_extractor + self.grid_head = build_head(grid_head) + + def _random_jitter(self, sampling_results, img_metas, amplitude=0.15): + """Ramdom jitter positive proposals for training.""" + for sampling_result, img_meta in zip(sampling_results, img_metas): + bboxes = sampling_result.pos_bboxes + random_offsets = bboxes.new_empty(bboxes.shape[0], 4).uniform_( + -amplitude, amplitude) + # before jittering + cxcy = (bboxes[:, 2:4] + bboxes[:, :2]) / 2 + wh = (bboxes[:, 2:4] - bboxes[:, :2]).abs() + # after jittering + new_cxcy = cxcy + wh * random_offsets[:, :2] + new_wh = wh * (1 + random_offsets[:, 2:]) + # xywh to xyxy + new_x1y1 = (new_cxcy - new_wh / 2) + new_x2y2 = (new_cxcy + new_wh / 2) + new_bboxes = torch.cat([new_x1y1, new_x2y2], dim=1) + # clip bboxes + max_shape = img_meta['img_shape'] + if max_shape is not None: + new_bboxes[:, 0::2].clamp_(min=0, max=max_shape[1] - 1) + new_bboxes[:, 1::2].clamp_(min=0, max=max_shape[0] - 1) + + sampling_result.pos_bboxes = new_bboxes + return sampling_results + + def forward_dummy(self, x, proposals): + """Dummy forward function.""" + # bbox head + outs = () + rois = bbox2roi([proposals]) + if self.with_bbox: + bbox_results = self._bbox_forward(x, rois) + outs = outs + (bbox_results['cls_score'], + bbox_results['bbox_pred']) + + # grid head + grid_rois = rois[:100] + grid_feats = self.grid_roi_extractor( + x[:self.grid_roi_extractor.num_inputs], grid_rois) + if self.with_shared_head: + grid_feats = self.shared_head(grid_feats) + grid_pred = self.grid_head(grid_feats) + outs = outs + (grid_pred, ) + + # mask head + if self.with_mask: + mask_rois = rois[:100] + mask_results = self._mask_forward(x, mask_rois) + outs = outs + (mask_results['mask_pred'], ) + return outs + + def _bbox_forward_train(self, x, sampling_results, gt_bboxes, gt_labels, + img_metas): + """Run forward function and calculate loss for box head in training.""" + bbox_results = super(GridRoIHead, + self)._bbox_forward_train(x, sampling_results, + gt_bboxes, gt_labels, + img_metas) + + # Grid head forward and loss + sampling_results = self._random_jitter(sampling_results, img_metas) + pos_rois = bbox2roi([res.pos_bboxes for res in sampling_results]) + + # GN in head does not support zero shape input + if pos_rois.shape[0] == 0: + return bbox_results + + grid_feats = self.grid_roi_extractor( + x[:self.grid_roi_extractor.num_inputs], pos_rois) + if self.with_shared_head: + grid_feats = self.shared_head(grid_feats) + # Accelerate training + max_sample_num_grid = self.train_cfg.get('max_num_grid', 192) + sample_idx = torch.randperm( + grid_feats.shape[0])[:min(grid_feats.shape[0], max_sample_num_grid + )] + grid_feats = grid_feats[sample_idx] + + grid_pred = self.grid_head(grid_feats) + + grid_targets = self.grid_head.get_targets(sampling_results, + self.train_cfg) + grid_targets = grid_targets[sample_idx] + + loss_grid = self.grid_head.loss(grid_pred, grid_targets) + + bbox_results['loss_bbox'].update(loss_grid) + return bbox_results + + def simple_test(self, + x, + proposal_list, + img_metas, + proposals=None, + rescale=False): + """Test without augmentation.""" + assert self.with_bbox, 'Bbox head must be implemented.' + + det_bboxes, det_labels = self.simple_test_bboxes( + x, img_metas, proposal_list, self.test_cfg, rescale=False) + # pack rois into bboxes + grid_rois = bbox2roi([det_bbox[:, :4] for det_bbox in det_bboxes]) + if grid_rois.shape[0] != 0: + grid_feats = self.grid_roi_extractor( + x[:len(self.grid_roi_extractor.featmap_strides)], grid_rois) + self.grid_head.test_mode = True + grid_pred = self.grid_head(grid_feats) + # split batch grid head prediction back to each image + num_roi_per_img = tuple(len(det_bbox) for det_bbox in det_bboxes) + grid_pred = { + k: v.split(num_roi_per_img, 0) + for k, v in grid_pred.items() + } + + # apply bbox post-processing to each image individually + bbox_results = [] + num_imgs = len(det_bboxes) + for i in range(num_imgs): + if det_bboxes[i].shape[0] == 0: + bbox_results.append([ + np.zeros((0, 5), dtype=np.float32) + for _ in range(self.bbox_head.num_classes) + ]) + else: + det_bbox = self.grid_head.get_bboxes( + det_bboxes[i], grid_pred['fused'][i], [img_metas[i]]) + if rescale: + det_bbox[:, :4] /= img_metas[i]['scale_factor'] + bbox_results.append( + bbox2result(det_bbox, det_labels[i], + self.bbox_head.num_classes)) + else: + bbox_results = [[ + np.zeros((0, 5), dtype=np.float32) + for _ in range(self.bbox_head.num_classes) + ] for _ in range(len(det_bboxes))] + + if not self.with_mask: + return bbox_results + else: + segm_results = self.simple_test_mask( + x, img_metas, det_bboxes, det_labels, rescale=rescale) + return list(zip(bbox_results, segm_results)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/htc_roi_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/htc_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..f655ed41a7e5ac0a282fd59dec653371b796113c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/htc_roi_head.py @@ -0,0 +1,577 @@ +import torch +import torch.nn.functional as F + +from mmdet.core import (bbox2result, bbox2roi, bbox_mapping, merge_aug_bboxes, + merge_aug_masks, multiclass_nms) +from ..builder import HEADS, build_head, build_roi_extractor +from .cascade_roi_head import CascadeRoIHead + + +@HEADS.register_module() +class HybridTaskCascadeRoIHead(CascadeRoIHead): + """Hybrid task cascade roi head including one bbox head and one mask head. + + https://arxiv.org/abs/1901.07518 + """ + + def __init__(self, + num_stages, + stage_loss_weights, + semantic_roi_extractor=None, + semantic_head=None, + semantic_fusion=('bbox', 'mask'), + interleaved=True, + mask_info_flow=True, + **kwargs): + super(HybridTaskCascadeRoIHead, + self).__init__(num_stages, stage_loss_weights, **kwargs) + assert self.with_bbox + assert not self.with_shared_head # shared head is not supported + + if semantic_head is not None: + self.semantic_roi_extractor = build_roi_extractor( + semantic_roi_extractor) + self.semantic_head = build_head(semantic_head) + + self.semantic_fusion = semantic_fusion + self.interleaved = interleaved + self.mask_info_flow = mask_info_flow + + @property + def with_semantic(self): + """bool: whether the head has semantic head""" + if hasattr(self, 'semantic_head') and self.semantic_head is not None: + return True + else: + return False + + def forward_dummy(self, x, proposals): + """Dummy forward function.""" + outs = () + # semantic head + if self.with_semantic: + _, semantic_feat = self.semantic_head(x) + else: + semantic_feat = None + # bbox heads + rois = bbox2roi([proposals]) + for i in range(self.num_stages): + bbox_results = self._bbox_forward( + i, x, rois, semantic_feat=semantic_feat) + outs = outs + (bbox_results['cls_score'], + bbox_results['bbox_pred']) + # mask heads + if self.with_mask: + mask_rois = rois[:100] + mask_roi_extractor = self.mask_roi_extractor[-1] + mask_feats = mask_roi_extractor( + x[:len(mask_roi_extractor.featmap_strides)], mask_rois) + if self.with_semantic and 'mask' in self.semantic_fusion: + mask_semantic_feat = self.semantic_roi_extractor( + [semantic_feat], mask_rois) + mask_feats += mask_semantic_feat + last_feat = None + for i in range(self.num_stages): + mask_head = self.mask_head[i] + if self.mask_info_flow: + mask_pred, last_feat = mask_head(mask_feats, last_feat) + else: + mask_pred = mask_head(mask_feats) + outs = outs + (mask_pred, ) + return outs + + def _bbox_forward_train(self, + stage, + x, + sampling_results, + gt_bboxes, + gt_labels, + rcnn_train_cfg, + semantic_feat=None): + """Run forward function and calculate loss for box head in training.""" + bbox_head = self.bbox_head[stage] + rois = bbox2roi([res.bboxes for res in sampling_results]) + bbox_results = self._bbox_forward( + stage, x, rois, semantic_feat=semantic_feat) + + bbox_targets = bbox_head.get_targets(sampling_results, gt_bboxes, + gt_labels, rcnn_train_cfg) + loss_bbox = bbox_head.loss(bbox_results['cls_score'], + bbox_results['bbox_pred'], rois, + *bbox_targets) + + bbox_results.update( + loss_bbox=loss_bbox, + rois=rois, + bbox_targets=bbox_targets, + ) + return bbox_results + + def _mask_forward_train(self, + stage, + x, + sampling_results, + gt_masks, + rcnn_train_cfg, + semantic_feat=None): + """Run forward function and calculate loss for mask head in + training.""" + mask_roi_extractor = self.mask_roi_extractor[stage] + mask_head = self.mask_head[stage] + pos_rois = bbox2roi([res.pos_bboxes for res in sampling_results]) + mask_feats = mask_roi_extractor(x[:mask_roi_extractor.num_inputs], + pos_rois) + + # semantic feature fusion + # element-wise sum for original features and pooled semantic features + if self.with_semantic and 'mask' in self.semantic_fusion: + mask_semantic_feat = self.semantic_roi_extractor([semantic_feat], + pos_rois) + if mask_semantic_feat.shape[-2:] != mask_feats.shape[-2:]: + mask_semantic_feat = F.adaptive_avg_pool2d( + mask_semantic_feat, mask_feats.shape[-2:]) + mask_feats += mask_semantic_feat + + # mask information flow + # forward all previous mask heads to obtain last_feat, and fuse it + # with the normal mask feature + if self.mask_info_flow: + last_feat = None + for i in range(stage): + last_feat = self.mask_head[i]( + mask_feats, last_feat, return_logits=False) + mask_pred = mask_head(mask_feats, last_feat, return_feat=False) + else: + mask_pred = mask_head(mask_feats, return_feat=False) + + mask_targets = mask_head.get_targets(sampling_results, gt_masks, + rcnn_train_cfg) + pos_labels = torch.cat([res.pos_gt_labels for res in sampling_results]) + loss_mask = mask_head.loss(mask_pred, mask_targets, pos_labels) + + mask_results = dict(loss_mask=loss_mask) + return mask_results + + def _bbox_forward(self, stage, x, rois, semantic_feat=None): + """Box head forward function used in both training and testing.""" + bbox_roi_extractor = self.bbox_roi_extractor[stage] + bbox_head = self.bbox_head[stage] + bbox_feats = bbox_roi_extractor( + x[:len(bbox_roi_extractor.featmap_strides)], rois) + if self.with_semantic and 'bbox' in self.semantic_fusion: + bbox_semantic_feat = self.semantic_roi_extractor([semantic_feat], + rois) + if bbox_semantic_feat.shape[-2:] != bbox_feats.shape[-2:]: + bbox_semantic_feat = F.adaptive_avg_pool2d( + bbox_semantic_feat, bbox_feats.shape[-2:]) + bbox_feats += bbox_semantic_feat + cls_score, bbox_pred = bbox_head(bbox_feats) + + bbox_results = dict(cls_score=cls_score, bbox_pred=bbox_pred) + return bbox_results + + def _mask_forward_test(self, stage, x, bboxes, semantic_feat=None): + """Mask head forward function for testing.""" + mask_roi_extractor = self.mask_roi_extractor[stage] + mask_head = self.mask_head[stage] + mask_rois = bbox2roi([bboxes]) + mask_feats = mask_roi_extractor( + x[:len(mask_roi_extractor.featmap_strides)], mask_rois) + if self.with_semantic and 'mask' in self.semantic_fusion: + mask_semantic_feat = self.semantic_roi_extractor([semantic_feat], + mask_rois) + if mask_semantic_feat.shape[-2:] != mask_feats.shape[-2:]: + mask_semantic_feat = F.adaptive_avg_pool2d( + mask_semantic_feat, mask_feats.shape[-2:]) + mask_feats += mask_semantic_feat + if self.mask_info_flow: + last_feat = None + last_pred = None + for i in range(stage): + mask_pred, last_feat = self.mask_head[i](mask_feats, last_feat) + if last_pred is not None: + mask_pred = mask_pred + last_pred + last_pred = mask_pred + mask_pred = mask_head(mask_feats, last_feat, return_feat=False) + if last_pred is not None: + mask_pred = mask_pred + last_pred + else: + mask_pred = mask_head(mask_feats) + return mask_pred + + def forward_train(self, + x, + img_metas, + proposal_list, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None, + gt_masks=None, + gt_semantic_seg=None): + """ + Args: + x (list[Tensor]): list of multi-level img features. + + img_metas (list[dict]): list of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `mmdet/datasets/pipelines/formatting.py:Collect`. + + proposal_list (list[Tensors]): list of region proposals. + + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + + gt_labels (list[Tensor]): class indices corresponding to each box + + gt_bboxes_ignore (None, list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + + gt_masks (None, Tensor) : true segmentation masks for each box + used if the architecture supports a segmentation task. + + gt_semantic_seg (None, list[Tensor]): semantic segmentation masks + used if the architecture supports semantic segmentation task. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + # semantic segmentation part + # 2 outputs: segmentation prediction and embedded features + losses = dict() + if self.with_semantic: + semantic_pred, semantic_feat = self.semantic_head(x) + loss_seg = self.semantic_head.loss(semantic_pred, gt_semantic_seg) + losses['loss_semantic_seg'] = loss_seg + else: + semantic_feat = None + + for i in range(self.num_stages): + self.current_stage = i + rcnn_train_cfg = self.train_cfg[i] + lw = self.stage_loss_weights[i] + + # assign gts and sample proposals + sampling_results = [] + bbox_assigner = self.bbox_assigner[i] + bbox_sampler = self.bbox_sampler[i] + num_imgs = len(img_metas) + if gt_bboxes_ignore is None: + gt_bboxes_ignore = [None for _ in range(num_imgs)] + + for j in range(num_imgs): + assign_result = bbox_assigner.assign(proposal_list[j], + gt_bboxes[j], + gt_bboxes_ignore[j], + gt_labels[j]) + sampling_result = bbox_sampler.sample( + assign_result, + proposal_list[j], + gt_bboxes[j], + gt_labels[j], + feats=[lvl_feat[j][None] for lvl_feat in x]) + sampling_results.append(sampling_result) + + # bbox head forward and loss + bbox_results = \ + self._bbox_forward_train( + i, x, sampling_results, gt_bboxes, gt_labels, + rcnn_train_cfg, semantic_feat) + roi_labels = bbox_results['bbox_targets'][0] + + for name, value in bbox_results['loss_bbox'].items(): + losses[f's{i}.{name}'] = ( + value * lw if 'loss' in name else value) + + # mask head forward and loss + if self.with_mask: + # interleaved execution: use regressed bboxes by the box branch + # to train the mask branch + if self.interleaved: + pos_is_gts = [res.pos_is_gt for res in sampling_results] + with torch.no_grad(): + proposal_list = self.bbox_head[i].refine_bboxes( + bbox_results['rois'], roi_labels, + bbox_results['bbox_pred'], pos_is_gts, img_metas) + # re-assign and sample 512 RoIs from 512 RoIs + sampling_results = [] + for j in range(num_imgs): + assign_result = bbox_assigner.assign( + proposal_list[j], gt_bboxes[j], + gt_bboxes_ignore[j], gt_labels[j]) + sampling_result = bbox_sampler.sample( + assign_result, + proposal_list[j], + gt_bboxes[j], + gt_labels[j], + feats=[lvl_feat[j][None] for lvl_feat in x]) + sampling_results.append(sampling_result) + mask_results = self._mask_forward_train( + i, x, sampling_results, gt_masks, rcnn_train_cfg, + semantic_feat) + for name, value in mask_results['loss_mask'].items(): + losses[f's{i}.{name}'] = ( + value * lw if 'loss' in name else value) + + # refine bboxes (same as Cascade R-CNN) + if i < self.num_stages - 1 and not self.interleaved: + pos_is_gts = [res.pos_is_gt for res in sampling_results] + with torch.no_grad(): + proposal_list = self.bbox_head[i].refine_bboxes( + bbox_results['rois'], roi_labels, + bbox_results['bbox_pred'], pos_is_gts, img_metas) + + return losses + + def simple_test(self, x, proposal_list, img_metas, rescale=False): + """Test without augmentation.""" + if self.with_semantic: + _, semantic_feat = self.semantic_head(x) + else: + semantic_feat = None + + num_imgs = len(proposal_list) + img_shapes = tuple(meta['img_shape'] for meta in img_metas) + ori_shapes = tuple(meta['ori_shape'] for meta in img_metas) + scale_factors = tuple(meta['scale_factor'] for meta in img_metas) + + # "ms" in variable names means multi-stage + ms_bbox_result = {} + ms_segm_result = {} + ms_scores = [] + rcnn_test_cfg = self.test_cfg + + rois = bbox2roi(proposal_list) + for i in range(self.num_stages): + bbox_head = self.bbox_head[i] + bbox_results = self._bbox_forward( + i, x, rois, semantic_feat=semantic_feat) + # split batch bbox prediction back to each image + cls_score = bbox_results['cls_score'] + bbox_pred = bbox_results['bbox_pred'] + num_proposals_per_img = tuple(len(p) for p in proposal_list) + rois = rois.split(num_proposals_per_img, 0) + cls_score = cls_score.split(num_proposals_per_img, 0) + bbox_pred = bbox_pred.split(num_proposals_per_img, 0) + ms_scores.append(cls_score) + + if i < self.num_stages - 1: + bbox_label = [s[:, :-1].argmax(dim=1) for s in cls_score] + rois = torch.cat([ + bbox_head.regress_by_class(rois[i], bbox_label[i], + bbox_pred[i], img_metas[i]) + for i in range(num_imgs) + ]) + + # average scores of each image by stages + cls_score = [ + sum([score[i] for score in ms_scores]) / float(len(ms_scores)) + for i in range(num_imgs) + ] + + # apply bbox post-processing to each image individually + det_bboxes = [] + det_labels = [] + for i in range(num_imgs): + det_bbox, det_label = self.bbox_head[-1].get_bboxes( + rois[i], + cls_score[i], + bbox_pred[i], + img_shapes[i], + scale_factors[i], + rescale=rescale, + cfg=rcnn_test_cfg) + det_bboxes.append(det_bbox) + det_labels.append(det_label) + bbox_result = [ + bbox2result(det_bboxes[i], det_labels[i], + self.bbox_head[-1].num_classes) + for i in range(num_imgs) + ] + ms_bbox_result['ensemble'] = bbox_result + + if self.with_mask: + if all(det_bbox.shape[0] == 0 for det_bbox in det_bboxes): + mask_classes = self.mask_head[-1].num_classes + segm_results = [[[] for _ in range(mask_classes)] + for _ in range(num_imgs)] + else: + if rescale and not isinstance(scale_factors[0], float): + scale_factors = [ + torch.from_numpy(scale_factor).to(det_bboxes[0].device) + for scale_factor in scale_factors + ] + _bboxes = [ + det_bboxes[i][:, :4] * + scale_factors[i] if rescale else det_bboxes[i] + for i in range(num_imgs) + ] + mask_rois = bbox2roi(_bboxes) + aug_masks = [] + mask_roi_extractor = self.mask_roi_extractor[-1] + mask_feats = mask_roi_extractor( + x[:len(mask_roi_extractor.featmap_strides)], mask_rois) + if self.with_semantic and 'mask' in self.semantic_fusion: + mask_semantic_feat = self.semantic_roi_extractor( + [semantic_feat], mask_rois) + mask_feats += mask_semantic_feat + last_feat = None + + num_bbox_per_img = tuple(len(_bbox) for _bbox in _bboxes) + for i in range(self.num_stages): + mask_head = self.mask_head[i] + if self.mask_info_flow: + mask_pred, last_feat = mask_head(mask_feats, last_feat) + else: + mask_pred = mask_head(mask_feats) + + # split batch mask prediction back to each image + mask_pred = mask_pred.split(num_bbox_per_img, 0) + aug_masks.append( + [mask.sigmoid().cpu().numpy() for mask in mask_pred]) + + # apply mask post-processing to each image individually + segm_results = [] + for i in range(num_imgs): + if det_bboxes[i].shape[0] == 0: + segm_results.append( + [[] + for _ in range(self.mask_head[-1].num_classes)]) + else: + aug_mask = [mask[i] for mask in aug_masks] + merged_mask = merge_aug_masks( + aug_mask, [[img_metas[i]]] * self.num_stages, + rcnn_test_cfg) + segm_result = self.mask_head[-1].get_seg_masks( + merged_mask, _bboxes[i], det_labels[i], + rcnn_test_cfg, ori_shapes[i], scale_factors[i], + rescale) + segm_results.append(segm_result) + ms_segm_result['ensemble'] = segm_results + + if self.with_mask: + results = list( + zip(ms_bbox_result['ensemble'], ms_segm_result['ensemble'])) + else: + results = ms_bbox_result['ensemble'] + + return results + + def aug_test(self, img_feats, proposal_list, img_metas, rescale=False): + """Test with augmentations. + + If rescale is False, then returned bboxes and masks will fit the scale + of imgs[0]. + """ + if self.with_semantic: + semantic_feats = [ + self.semantic_head(feat)[1] for feat in img_feats + ] + else: + semantic_feats = [None] * len(img_metas) + + rcnn_test_cfg = self.test_cfg + aug_bboxes = [] + aug_scores = [] + for x, img_meta, semantic in zip(img_feats, img_metas, semantic_feats): + # only one image in the batch + img_shape = img_meta[0]['img_shape'] + scale_factor = img_meta[0]['scale_factor'] + flip = img_meta[0]['flip'] + flip_direction = img_meta[0]['flip_direction'] + + proposals = bbox_mapping(proposal_list[0][:, :4], img_shape, + scale_factor, flip, flip_direction) + # "ms" in variable names means multi-stage + ms_scores = [] + + rois = bbox2roi([proposals]) + for i in range(self.num_stages): + bbox_head = self.bbox_head[i] + bbox_results = self._bbox_forward( + i, x, rois, semantic_feat=semantic) + ms_scores.append(bbox_results['cls_score']) + + if i < self.num_stages - 1: + bbox_label = bbox_results['cls_score'].argmax(dim=1) + rois = bbox_head.regress_by_class( + rois, bbox_label, bbox_results['bbox_pred'], + img_meta[0]) + + cls_score = sum(ms_scores) / float(len(ms_scores)) + bboxes, scores = self.bbox_head[-1].get_bboxes( + rois, + cls_score, + bbox_results['bbox_pred'], + img_shape, + scale_factor, + rescale=False, + cfg=None) + aug_bboxes.append(bboxes) + aug_scores.append(scores) + + # after merging, bboxes will be rescaled to the original image size + merged_bboxes, merged_scores = merge_aug_bboxes( + aug_bboxes, aug_scores, img_metas, rcnn_test_cfg) + det_bboxes, det_labels = multiclass_nms(merged_bboxes, merged_scores, + rcnn_test_cfg.score_thr, + rcnn_test_cfg.nms, + rcnn_test_cfg.max_per_img) + + bbox_result = bbox2result(det_bboxes, det_labels, + self.bbox_head[-1].num_classes) + + if self.with_mask: + if det_bboxes.shape[0] == 0: + segm_result = [[] + for _ in range(self.mask_head[-1].num_classes)] + else: + aug_masks = [] + aug_img_metas = [] + for x, img_meta, semantic in zip(img_feats, img_metas, + semantic_feats): + img_shape = img_meta[0]['img_shape'] + scale_factor = img_meta[0]['scale_factor'] + flip = img_meta[0]['flip'] + flip_direction = img_meta[0]['flip_direction'] + _bboxes = bbox_mapping(det_bboxes[:, :4], img_shape, + scale_factor, flip, flip_direction) + mask_rois = bbox2roi([_bboxes]) + mask_feats = self.mask_roi_extractor[-1]( + x[:len(self.mask_roi_extractor[-1].featmap_strides)], + mask_rois) + if self.with_semantic: + semantic_feat = semantic + mask_semantic_feat = self.semantic_roi_extractor( + [semantic_feat], mask_rois) + if mask_semantic_feat.shape[-2:] != mask_feats.shape[ + -2:]: + mask_semantic_feat = F.adaptive_avg_pool2d( + mask_semantic_feat, mask_feats.shape[-2:]) + mask_feats += mask_semantic_feat + last_feat = None + for i in range(self.num_stages): + mask_head = self.mask_head[i] + if self.mask_info_flow: + mask_pred, last_feat = mask_head( + mask_feats, last_feat) + else: + mask_pred = mask_head(mask_feats) + aug_masks.append(mask_pred.sigmoid().cpu().numpy()) + aug_img_metas.append(img_meta) + merged_masks = merge_aug_masks(aug_masks, aug_img_metas, + self.test_cfg) + + ori_shape = img_metas[0][0]['ori_shape'] + segm_result = self.mask_head[-1].get_seg_masks( + merged_masks, + det_bboxes, + det_labels, + rcnn_test_cfg, + ori_shape, + scale_factor=1.0, + rescale=False) + return [(bbox_result, segm_result)] + else: + return [bbox_result] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..abfbe2624eecb73b029e9bcb7e2283bbf2a744ea --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/__init__.py @@ -0,0 +1,17 @@ +from .coarse_mask_head import CoarseMaskHead +from .fcn_mask_head import FCNMaskHead +from .feature_relay_head import FeatureRelayHead +from .fused_semantic_head import FusedSemanticHead +from .global_context_head import GlobalContextHead +from .grid_head import GridHead +from .htc_mask_head import HTCMaskHead +from .mask_point_head import MaskPointHead +from .maskiou_head import MaskIoUHead +from .scnet_mask_head import SCNetMaskHead +from .scnet_semantic_head import SCNetSemanticHead + +__all__ = [ + 'FCNMaskHead', 'HTCMaskHead', 'FusedSemanticHead', 'GridHead', + 'MaskIoUHead', 'CoarseMaskHead', 'MaskPointHead', 'SCNetMaskHead', + 'SCNetSemanticHead', 'GlobalContextHead', 'FeatureRelayHead' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/coarse_mask_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/coarse_mask_head.py new file mode 100644 index 0000000000000000000000000000000000000000..e58cae1b4c8cd587e73c08582e333ea21054534a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/coarse_mask_head.py @@ -0,0 +1,99 @@ +from mmcv.cnn import ConvModule, Linear +from mmcv.runner import ModuleList, auto_fp16 + +from mmdet.models.builder import HEADS +from .fcn_mask_head import FCNMaskHead + + +@HEADS.register_module() +class CoarseMaskHead(FCNMaskHead): + """Coarse mask head used in PointRend. + + Compared with standard ``FCNMaskHead``, ``CoarseMaskHead`` will downsample + the input feature map instead of upsample it. + + Args: + num_convs (int): Number of conv layers in the head. Default: 0. + num_fcs (int): Number of fc layers in the head. Default: 2. + fc_out_channels (int): Number of output channels of fc layer. + Default: 1024. + downsample_factor (int): The factor that feature map is downsampled by. + Default: 2. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__(self, + num_convs=0, + num_fcs=2, + fc_out_channels=1024, + downsample_factor=2, + init_cfg=dict( + type='Xavier', + override=[ + dict(name='fcs'), + dict(type='Constant', val=0.001, name='fc_logits') + ]), + *arg, + **kwarg): + super(CoarseMaskHead, self).__init__( + *arg, + num_convs=num_convs, + upsample_cfg=dict(type=None), + init_cfg=None, + **kwarg) + self.init_cfg = init_cfg + self.num_fcs = num_fcs + assert self.num_fcs > 0 + self.fc_out_channels = fc_out_channels + self.downsample_factor = downsample_factor + assert self.downsample_factor >= 1 + # remove conv_logit + delattr(self, 'conv_logits') + + if downsample_factor > 1: + downsample_in_channels = ( + self.conv_out_channels + if self.num_convs > 0 else self.in_channels) + self.downsample_conv = ConvModule( + downsample_in_channels, + self.conv_out_channels, + kernel_size=downsample_factor, + stride=downsample_factor, + padding=0, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg) + else: + self.downsample_conv = None + + self.output_size = (self.roi_feat_size[0] // downsample_factor, + self.roi_feat_size[1] // downsample_factor) + self.output_area = self.output_size[0] * self.output_size[1] + + last_layer_dim = self.conv_out_channels * self.output_area + + self.fcs = ModuleList() + for i in range(num_fcs): + fc_in_channels = ( + last_layer_dim if i == 0 else self.fc_out_channels) + self.fcs.append(Linear(fc_in_channels, self.fc_out_channels)) + last_layer_dim = self.fc_out_channels + output_channels = self.num_classes * self.output_area + self.fc_logits = Linear(last_layer_dim, output_channels) + + def init_weights(self): + super(FCNMaskHead, self).init_weights() + + @auto_fp16() + def forward(self, x): + for conv in self.convs: + x = conv(x) + + if self.downsample_conv is not None: + x = self.downsample_conv(x) + + x = x.flatten(1) + for fc in self.fcs: + x = self.relu(fc(x)) + mask_pred = self.fc_logits(x).view( + x.size(0), self.num_classes, *self.output_size) + return mask_pred diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/fcn_mask_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/fcn_mask_head.py new file mode 100644 index 0000000000000000000000000000000000000000..27e72f676e65d88804f49407716bf3aeb7122cda --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/fcn_mask_head.py @@ -0,0 +1,410 @@ +from warnings import warn + +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule, build_conv_layer, build_upsample_layer +from mmcv.ops.carafe import CARAFEPack +from mmcv.runner import BaseModule, ModuleList, auto_fp16, force_fp32 +from torch.nn.modules.utils import _pair + +from mmdet.core import mask_target +from mmdet.models.builder import HEADS, build_loss + +BYTES_PER_FLOAT = 4 +# TODO: This memory limit may be too much or too little. It would be better to +# determine it based on available resources. +GPU_MEM_LIMIT = 1024**3 # 1 GB memory limit + + +@HEADS.register_module() +class FCNMaskHead(BaseModule): + + def __init__(self, + num_convs=4, + roi_feat_size=14, + in_channels=256, + conv_kernel_size=3, + conv_out_channels=256, + num_classes=80, + class_agnostic=False, + upsample_cfg=dict(type='deconv', scale_factor=2), + conv_cfg=None, + norm_cfg=None, + predictor_cfg=dict(type='Conv'), + loss_mask=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0), + init_cfg=None): + assert init_cfg is None, 'To prevent abnormal initialization ' \ + 'behavior, init_cfg is not allowed to be set' + super(FCNMaskHead, self).__init__(init_cfg) + self.upsample_cfg = upsample_cfg.copy() + if self.upsample_cfg['type'] not in [ + None, 'deconv', 'nearest', 'bilinear', 'carafe' + ]: + raise ValueError( + f'Invalid upsample method {self.upsample_cfg["type"]}, ' + 'accepted methods are "deconv", "nearest", "bilinear", ' + '"carafe"') + self.num_convs = num_convs + # WARN: roi_feat_size is reserved and not used + self.roi_feat_size = _pair(roi_feat_size) + self.in_channels = in_channels + self.conv_kernel_size = conv_kernel_size + self.conv_out_channels = conv_out_channels + self.upsample_method = self.upsample_cfg.get('type') + self.scale_factor = self.upsample_cfg.pop('scale_factor', None) + self.num_classes = num_classes + self.class_agnostic = class_agnostic + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.predictor_cfg = predictor_cfg + self.fp16_enabled = False + self.loss_mask = build_loss(loss_mask) + + self.convs = ModuleList() + for i in range(self.num_convs): + in_channels = ( + self.in_channels if i == 0 else self.conv_out_channels) + padding = (self.conv_kernel_size - 1) // 2 + self.convs.append( + ConvModule( + in_channels, + self.conv_out_channels, + self.conv_kernel_size, + padding=padding, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg)) + upsample_in_channels = ( + self.conv_out_channels if self.num_convs > 0 else in_channels) + upsample_cfg_ = self.upsample_cfg.copy() + if self.upsample_method is None: + self.upsample = None + elif self.upsample_method == 'deconv': + upsample_cfg_.update( + in_channels=upsample_in_channels, + out_channels=self.conv_out_channels, + kernel_size=self.scale_factor, + stride=self.scale_factor) + self.upsample = build_upsample_layer(upsample_cfg_) + elif self.upsample_method == 'carafe': + upsample_cfg_.update( + channels=upsample_in_channels, scale_factor=self.scale_factor) + self.upsample = build_upsample_layer(upsample_cfg_) + else: + # suppress warnings + align_corners = (None + if self.upsample_method == 'nearest' else False) + upsample_cfg_.update( + scale_factor=self.scale_factor, + mode=self.upsample_method, + align_corners=align_corners) + self.upsample = build_upsample_layer(upsample_cfg_) + + out_channels = 1 if self.class_agnostic else self.num_classes + logits_in_channel = ( + self.conv_out_channels + if self.upsample_method == 'deconv' else upsample_in_channels) + self.conv_logits = build_conv_layer(self.predictor_cfg, + logits_in_channel, out_channels, 1) + self.relu = nn.ReLU(inplace=True) + self.debug_imgs = None + + def init_weights(self): + super(FCNMaskHead, self).init_weights() + for m in [self.upsample, self.conv_logits]: + if m is None: + continue + elif isinstance(m, CARAFEPack): + m.init_weights() + else: + nn.init.kaiming_normal_( + m.weight, mode='fan_out', nonlinearity='relu') + nn.init.constant_(m.bias, 0) + + @auto_fp16() + def forward(self, x): + for conv in self.convs: + x = conv(x) + if self.upsample is not None: + x = self.upsample(x) + if self.upsample_method == 'deconv': + x = self.relu(x) + mask_pred = self.conv_logits(x) + return mask_pred + + def get_targets(self, sampling_results, gt_masks, rcnn_train_cfg): + pos_proposals = [res.pos_bboxes for res in sampling_results] + pos_assigned_gt_inds = [ + res.pos_assigned_gt_inds for res in sampling_results + ] + mask_targets = mask_target(pos_proposals, pos_assigned_gt_inds, + gt_masks, rcnn_train_cfg) + return mask_targets + + @force_fp32(apply_to=('mask_pred', )) + def loss(self, mask_pred, mask_targets, labels): + """ + Example: + >>> from mmdet.models.roi_heads.mask_heads.fcn_mask_head import * # NOQA + >>> N = 7 # N = number of extracted ROIs + >>> C, H, W = 11, 32, 32 + >>> # Create example instance of FCN Mask Head. + >>> # There are lots of variations depending on the configuration + >>> self = FCNMaskHead(num_classes=C, num_convs=1) + >>> inputs = torch.rand(N, self.in_channels, H, W) + >>> mask_pred = self.forward(inputs) + >>> sf = self.scale_factor + >>> labels = torch.randint(0, C, size=(N,)) + >>> # With the default properties the mask targets should indicate + >>> # a (potentially soft) single-class label + >>> mask_targets = torch.rand(N, H * sf, W * sf) + >>> loss = self.loss(mask_pred, mask_targets, labels) + >>> print('loss = {!r}'.format(loss)) + """ + loss = dict() + if mask_pred.size(0) == 0: + loss_mask = mask_pred.sum() + else: + if self.class_agnostic: + loss_mask = self.loss_mask(mask_pred, mask_targets, + torch.zeros_like(labels)) + else: + loss_mask = self.loss_mask(mask_pred, mask_targets, labels) + loss['loss_mask'] = loss_mask + return loss + + def get_seg_masks(self, mask_pred, det_bboxes, det_labels, rcnn_test_cfg, + ori_shape, scale_factor, rescale): + """Get segmentation masks from mask_pred and bboxes. + + Args: + mask_pred (Tensor or ndarray): shape (n, #class, h, w). + For single-scale testing, mask_pred is the direct output of + model, whose type is Tensor, while for multi-scale testing, + it will be converted to numpy array outside of this method. + det_bboxes (Tensor): shape (n, 4/5) + det_labels (Tensor): shape (n, ) + rcnn_test_cfg (dict): rcnn testing config + ori_shape (Tuple): original image height and width, shape (2,) + scale_factor(ndarray | Tensor): If ``rescale is True``, box + coordinates are divided by this scale factor to fit + ``ori_shape``. + rescale (bool): If True, the resulting masks will be rescaled to + ``ori_shape``. + + Returns: + list[list]: encoded masks. The c-th item in the outer list + corresponds to the c-th class. Given the c-th outer list, the + i-th item in that inner list is the mask for the i-th box with + class label c. + + Example: + >>> import mmcv + >>> from mmdet.models.roi_heads.mask_heads.fcn_mask_head import * # NOQA + >>> N = 7 # N = number of extracted ROIs + >>> C, H, W = 11, 32, 32 + >>> # Create example instance of FCN Mask Head. + >>> self = FCNMaskHead(num_classes=C, num_convs=0) + >>> inputs = torch.rand(N, self.in_channels, H, W) + >>> mask_pred = self.forward(inputs) + >>> # Each input is associated with some bounding box + >>> det_bboxes = torch.Tensor([[1, 1, 42, 42 ]] * N) + >>> det_labels = torch.randint(0, C, size=(N,)) + >>> rcnn_test_cfg = mmcv.Config({'mask_thr_binary': 0, }) + >>> ori_shape = (H * 4, W * 4) + >>> scale_factor = torch.FloatTensor((1, 1)) + >>> rescale = False + >>> # Encoded masks are a list for each category. + >>> encoded_masks = self.get_seg_masks( + >>> mask_pred, det_bboxes, det_labels, rcnn_test_cfg, ori_shape, + >>> scale_factor, rescale + >>> ) + >>> assert len(encoded_masks) == C + >>> assert sum(list(map(len, encoded_masks))) == N + """ + if isinstance(mask_pred, torch.Tensor): + mask_pred = mask_pred.sigmoid() + else: + # In AugTest, has been activated before + mask_pred = det_bboxes.new_tensor(mask_pred) + + device = mask_pred.device + cls_segms = [[] for _ in range(self.num_classes) + ] # BG is not included in num_classes + bboxes = det_bboxes[:, :4] + labels = det_labels + + # In most cases, scale_factor should have been + # converted to Tensor when rescale the bbox + if not isinstance(scale_factor, torch.Tensor): + if isinstance(scale_factor, float): + scale_factor = np.array([scale_factor] * 4) + warn('Scale_factor should be a Tensor or ndarray ' + 'with shape (4,), float would be deprecated. ') + assert isinstance(scale_factor, np.ndarray) + scale_factor = torch.Tensor(scale_factor) + + if rescale: + img_h, img_w = ori_shape[:2] + bboxes = bboxes / scale_factor + else: + w_scale, h_scale = scale_factor[0], scale_factor[1] + img_h = np.round(ori_shape[0] * h_scale.item()).astype(np.int32) + img_w = np.round(ori_shape[1] * w_scale.item()).astype(np.int32) + + N = len(mask_pred) + # The actual implementation split the input into chunks, + # and paste them chunk by chunk. + if device.type == 'cpu': + # CPU is most efficient when they are pasted one by one with + # skip_empty=True, so that it performs minimal number of + # operations. + num_chunks = N + else: + # GPU benefits from parallelism for larger chunks, + # but may have memory issue + # the types of img_w and img_h are np.int32, + # when the image resolution is large, + # the calculation of num_chunks will overflow. + # so we neet to change the types of img_w and img_h to int. + # See https://github.com/open-mmlab/mmdetection/pull/5191 + num_chunks = int( + np.ceil(N * int(img_h) * int(img_w) * BYTES_PER_FLOAT / + GPU_MEM_LIMIT)) + assert (num_chunks <= + N), 'Default GPU_MEM_LIMIT is too small; try increasing it' + chunks = torch.chunk(torch.arange(N, device=device), num_chunks) + + threshold = rcnn_test_cfg.mask_thr_binary + im_mask = torch.zeros( + N, + img_h, + img_w, + device=device, + dtype=torch.bool if threshold >= 0 else torch.uint8) + + if not self.class_agnostic: + mask_pred = mask_pred[range(N), labels][:, None] + + for inds in chunks: + masks_chunk, spatial_inds = _do_paste_mask( + mask_pred[inds], + bboxes[inds], + img_h, + img_w, + skip_empty=device.type == 'cpu') + + if threshold >= 0: + masks_chunk = (masks_chunk >= threshold).to(dtype=torch.bool) + else: + # for visualization and debugging + masks_chunk = (masks_chunk * 255).to(dtype=torch.uint8) + + im_mask[(inds, ) + spatial_inds] = masks_chunk + + for i in range(N): + cls_segms[labels[i]].append(im_mask[i].detach().cpu().numpy()) + return cls_segms + + def onnx_export(self, mask_pred, det_bboxes, det_labels, rcnn_test_cfg, + ori_shape, **kwargs): + """Get segmentation masks from mask_pred and bboxes. + + Args: + mask_pred (Tensor): shape (n, #class, h, w). + det_bboxes (Tensor): shape (n, 4/5) + det_labels (Tensor): shape (n, ) + rcnn_test_cfg (dict): rcnn testing config + ori_shape (Tuple): original image height and width, shape (2,) + + Returns: + Tensor: a mask of shape (N, img_h, img_w). + """ + + mask_pred = mask_pred.sigmoid() + bboxes = det_bboxes[:, :4] + labels = det_labels + # No need to consider rescale and scale_factor while exporting to ONNX + img_h, img_w = ori_shape[:2] + threshold = rcnn_test_cfg.mask_thr_binary + if not self.class_agnostic: + box_inds = torch.arange(mask_pred.shape[0]) + mask_pred = mask_pred[box_inds, labels][:, None] + masks, _ = _do_paste_mask( + mask_pred, bboxes, img_h, img_w, skip_empty=False) + if threshold >= 0: + masks = (masks >= threshold).to(dtype=torch.bool) + return masks + + +def _do_paste_mask(masks, boxes, img_h, img_w, skip_empty=True): + """Paste instance masks according to boxes. + + This implementation is modified from + https://github.com/facebookresearch/detectron2/ + + Args: + masks (Tensor): N, 1, H, W + boxes (Tensor): N, 4 + img_h (int): Height of the image to be pasted. + img_w (int): Width of the image to be pasted. + skip_empty (bool): Only paste masks within the region that + tightly bound all boxes, and returns the results this region only. + An important optimization for CPU. + + Returns: + tuple: (Tensor, tuple). The first item is mask tensor, the second one + is the slice object. + If skip_empty == False, the whole image will be pasted. It will + return a mask of shape (N, img_h, img_w) and an empty tuple. + If skip_empty == True, only area around the mask will be pasted. + A mask of shape (N, h', w') and its start and end coordinates + in the original image will be returned. + """ + # On GPU, paste all masks together (up to chunk size) + # by using the entire image to sample the masks + # Compared to pasting them one by one, + # this has more operations but is faster on COCO-scale dataset. + device = masks.device + if skip_empty: + x0_int, y0_int = torch.clamp( + boxes.min(dim=0).values.floor()[:2] - 1, + min=0).to(dtype=torch.int32) + x1_int = torch.clamp( + boxes[:, 2].max().ceil() + 1, max=img_w).to(dtype=torch.int32) + y1_int = torch.clamp( + boxes[:, 3].max().ceil() + 1, max=img_h).to(dtype=torch.int32) + else: + x0_int, y0_int = 0, 0 + x1_int, y1_int = img_w, img_h + x0, y0, x1, y1 = torch.split(boxes, 1, dim=1) # each is Nx1 + + N = masks.shape[0] + + img_y = torch.arange(y0_int, y1_int, device=device).to(torch.float32) + 0.5 + img_x = torch.arange(x0_int, x1_int, device=device).to(torch.float32) + 0.5 + img_y = (img_y - y0) / (y1 - y0) * 2 - 1 + img_x = (img_x - x0) / (x1 - x0) * 2 - 1 + # img_x, img_y have shapes (N, w), (N, h) + # IsInf op is not supported with ONNX<=1.7.0 + if not torch.onnx.is_in_onnx_export(): + if torch.isinf(img_x).any(): + inds = torch.where(torch.isinf(img_x)) + img_x[inds] = 0 + if torch.isinf(img_y).any(): + inds = torch.where(torch.isinf(img_y)) + img_y[inds] = 0 + + gx = img_x[:, None, :].expand(N, img_y.size(1), img_x.size(1)) + gy = img_y[:, :, None].expand(N, img_y.size(1), img_x.size(1)) + grid = torch.stack([gx, gy], dim=3) + + img_masks = F.grid_sample( + masks.to(dtype=torch.float32), grid, align_corners=False) + + if skip_empty: + return img_masks[:, 0], (slice(y0_int, y1_int), slice(x0_int, x1_int)) + else: + return img_masks[:, 0], () diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/feature_relay_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/feature_relay_head.py new file mode 100644 index 0000000000000000000000000000000000000000..b4cd38206ed7024a009b106eeb34ad298fdb8427 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/feature_relay_head.py @@ -0,0 +1,52 @@ +import torch.nn as nn +from mmcv.runner import BaseModule, auto_fp16 + +from mmdet.models.builder import HEADS + + +@HEADS.register_module() +class FeatureRelayHead(BaseModule): + """Feature Relay Head used in `SCNet `_. + + Args: + in_channels (int, optional): number of input channels. Default: 256. + conv_out_channels (int, optional): number of output channels before + classification layer. Default: 256. + roi_feat_size (int, optional): roi feat size at box head. Default: 7. + scale_factor (int, optional): scale factor to match roi feat size + at mask head. Default: 2. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__(self, + in_channels=1024, + out_conv_channels=256, + roi_feat_size=7, + scale_factor=2, + init_cfg=dict(type='Kaiming', layer='Linear')): + super(FeatureRelayHead, self).__init__(init_cfg) + assert isinstance(roi_feat_size, int) + + self.in_channels = in_channels + self.out_conv_channels = out_conv_channels + self.roi_feat_size = roi_feat_size + self.out_channels = (roi_feat_size**2) * out_conv_channels + self.scale_factor = scale_factor + self.fp16_enabled = False + + self.fc = nn.Linear(self.in_channels, self.out_channels) + self.upsample = nn.Upsample( + scale_factor=scale_factor, mode='bilinear', align_corners=True) + + @auto_fp16() + def forward(self, x): + """Forward function.""" + N, in_C = x.shape + if N > 0: + out_C = self.out_conv_channels + out_HW = self.roi_feat_size + x = self.fc(x) + x = x.reshape(N, out_C, out_HW, out_HW) + x = self.upsample(x) + return x + return None diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/fused_semantic_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/fused_semantic_head.py new file mode 100644 index 0000000000000000000000000000000000000000..85a64fb6a0007fae5f51cf140de4ec9ecb29476b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/fused_semantic_head.py @@ -0,0 +1,106 @@ +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule +from mmcv.runner import BaseModule, auto_fp16, force_fp32 + +from mmdet.models.builder import HEADS + + +@HEADS.register_module() +class FusedSemanticHead(BaseModule): + r"""Multi-level fused semantic segmentation head. + + .. code-block:: none + + in_1 -> 1x1 conv --- + | + in_2 -> 1x1 conv -- | + || + in_3 -> 1x1 conv - || + ||| /-> 1x1 conv (mask prediction) + in_4 -> 1x1 conv -----> 3x3 convs (*4) + | \-> 1x1 conv (feature) + in_5 -> 1x1 conv --- + """ # noqa: W605 + + def __init__(self, + num_ins, + fusion_level, + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=183, + ignore_label=255, + loss_weight=0.2, + conv_cfg=None, + norm_cfg=None, + init_cfg=dict( + type='Kaiming', override=dict(name='conv_logits'))): + super(FusedSemanticHead, self).__init__(init_cfg) + self.num_ins = num_ins + self.fusion_level = fusion_level + self.num_convs = num_convs + self.in_channels = in_channels + self.conv_out_channels = conv_out_channels + self.num_classes = num_classes + self.ignore_label = ignore_label + self.loss_weight = loss_weight + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.fp16_enabled = False + + self.lateral_convs = nn.ModuleList() + for i in range(self.num_ins): + self.lateral_convs.append( + ConvModule( + self.in_channels, + self.in_channels, + 1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + inplace=False)) + + self.convs = nn.ModuleList() + for i in range(self.num_convs): + in_channels = self.in_channels if i == 0 else conv_out_channels + self.convs.append( + ConvModule( + in_channels, + conv_out_channels, + 3, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + self.conv_embedding = ConvModule( + conv_out_channels, + conv_out_channels, + 1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg) + self.conv_logits = nn.Conv2d(conv_out_channels, self.num_classes, 1) + + self.criterion = nn.CrossEntropyLoss(ignore_index=ignore_label) + + @auto_fp16() + def forward(self, feats): + x = self.lateral_convs[self.fusion_level](feats[self.fusion_level]) + fused_size = tuple(x.shape[-2:]) + for i, feat in enumerate(feats): + if i != self.fusion_level: + feat = F.interpolate( + feat, size=fused_size, mode='bilinear', align_corners=True) + x += self.lateral_convs[i](feat) + + for i in range(self.num_convs): + x = self.convs[i](x) + + mask_pred = self.conv_logits(x) + x = self.conv_embedding(x) + return mask_pred, x + + @force_fp32(apply_to=('mask_pred', )) + def loss(self, mask_pred, labels): + labels = labels.squeeze(1).long() + loss_semantic_seg = self.criterion(mask_pred, labels) + loss_semantic_seg *= self.loss_weight + return loss_semantic_seg diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/global_context_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/global_context_head.py new file mode 100644 index 0000000000000000000000000000000000000000..4f619940892322911b8d2338c750077cd48f8422 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/global_context_head.py @@ -0,0 +1,100 @@ +import torch.nn as nn +from mmcv.cnn import ConvModule +from mmcv.runner import BaseModule, auto_fp16, force_fp32 + +from mmdet.models.builder import HEADS +from mmdet.models.utils import ResLayer, SimplifiedBasicBlock + + +@HEADS.register_module() +class GlobalContextHead(BaseModule): + """Global context head used in `SCNet `_. + + Args: + num_convs (int, optional): number of convolutional layer in GlbCtxHead. + Default: 4. + in_channels (int, optional): number of input channels. Default: 256. + conv_out_channels (int, optional): number of output channels before + classification layer. Default: 256. + num_classes (int, optional): number of classes. Default: 80. + loss_weight (float, optional): global context loss weight. Default: 1. + conv_cfg (dict, optional): config to init conv layer. Default: None. + norm_cfg (dict, optional): config to init norm layer. Default: None. + conv_to_res (bool, optional): if True, 2 convs will be grouped into + 1 `SimplifiedBasicBlock` using a skip connection. Default: False. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__(self, + num_convs=4, + in_channels=256, + conv_out_channels=256, + num_classes=80, + loss_weight=1.0, + conv_cfg=None, + norm_cfg=None, + conv_to_res=False, + init_cfg=dict( + type='Normal', std=0.01, override=dict(name='fc'))): + super(GlobalContextHead, self).__init__(init_cfg) + self.num_convs = num_convs + self.in_channels = in_channels + self.conv_out_channels = conv_out_channels + self.num_classes = num_classes + self.loss_weight = loss_weight + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.conv_to_res = conv_to_res + self.fp16_enabled = False + + if self.conv_to_res: + num_res_blocks = num_convs // 2 + self.convs = ResLayer( + SimplifiedBasicBlock, + in_channels, + self.conv_out_channels, + num_res_blocks, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg) + self.num_convs = num_res_blocks + else: + self.convs = nn.ModuleList() + for i in range(self.num_convs): + in_channels = self.in_channels if i == 0 else conv_out_channels + self.convs.append( + ConvModule( + in_channels, + conv_out_channels, + 3, + padding=1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg)) + + self.pool = nn.AdaptiveAvgPool2d(1) + self.fc = nn.Linear(conv_out_channels, num_classes) + + self.criterion = nn.BCEWithLogitsLoss() + + @auto_fp16() + def forward(self, feats): + """Forward function.""" + x = feats[-1] + for i in range(self.num_convs): + x = self.convs[i](x) + x = self.pool(x) + + # multi-class prediction + mc_pred = x.reshape(x.size(0), -1) + mc_pred = self.fc(mc_pred) + + return mc_pred, x + + @force_fp32(apply_to=('pred', )) + def loss(self, pred, labels): + """Loss function.""" + labels = [lbl.unique() for lbl in labels] + targets = pred.new_zeros(pred.size()) + for i, label in enumerate(labels): + targets[i, label] = 1.0 + loss = self.loss_weight * self.criterion(pred, targets) + return loss diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/grid_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/grid_head.py new file mode 100644 index 0000000000000000000000000000000000000000..2d6ef67d8ddd3f1a66ff5c2dcd1c46bd8e379b79 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/grid_head.py @@ -0,0 +1,362 @@ +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import ConvModule +from mmcv.runner import BaseModule + +from mmdet.models.builder import HEADS, build_loss + + +@HEADS.register_module() +class GridHead(BaseModule): + + def __init__(self, + grid_points=9, + num_convs=8, + roi_feat_size=14, + in_channels=256, + conv_kernel_size=3, + point_feat_channels=64, + deconv_kernel_size=4, + class_agnostic=False, + loss_grid=dict( + type='CrossEntropyLoss', use_sigmoid=True, + loss_weight=15), + conv_cfg=None, + norm_cfg=dict(type='GN', num_groups=36), + init_cfg=[ + dict(type='Kaiming', layer=['Conv2d', 'Linear']), + dict( + type='Normal', + layer='ConvTranspose2d', + std=0.001, + override=dict( + type='Normal', + name='deconv2', + std=0.001, + bias=-np.log(0.99 / 0.01))) + ]): + super(GridHead, self).__init__(init_cfg) + self.grid_points = grid_points + self.num_convs = num_convs + self.roi_feat_size = roi_feat_size + self.in_channels = in_channels + self.conv_kernel_size = conv_kernel_size + self.point_feat_channels = point_feat_channels + self.conv_out_channels = self.point_feat_channels * self.grid_points + self.class_agnostic = class_agnostic + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + if isinstance(norm_cfg, dict) and norm_cfg['type'] == 'GN': + assert self.conv_out_channels % norm_cfg['num_groups'] == 0 + + assert self.grid_points >= 4 + self.grid_size = int(np.sqrt(self.grid_points)) + if self.grid_size * self.grid_size != self.grid_points: + raise ValueError('grid_points must be a square number') + + # the predicted heatmap is half of whole_map_size + if not isinstance(self.roi_feat_size, int): + raise ValueError('Only square RoIs are supporeted in Grid R-CNN') + self.whole_map_size = self.roi_feat_size * 4 + + # compute point-wise sub-regions + self.sub_regions = self.calc_sub_regions() + + self.convs = [] + for i in range(self.num_convs): + in_channels = ( + self.in_channels if i == 0 else self.conv_out_channels) + stride = 2 if i == 0 else 1 + padding = (self.conv_kernel_size - 1) // 2 + self.convs.append( + ConvModule( + in_channels, + self.conv_out_channels, + self.conv_kernel_size, + stride=stride, + padding=padding, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg, + bias=True)) + self.convs = nn.Sequential(*self.convs) + + self.deconv1 = nn.ConvTranspose2d( + self.conv_out_channels, + self.conv_out_channels, + kernel_size=deconv_kernel_size, + stride=2, + padding=(deconv_kernel_size - 2) // 2, + groups=grid_points) + self.norm1 = nn.GroupNorm(grid_points, self.conv_out_channels) + self.deconv2 = nn.ConvTranspose2d( + self.conv_out_channels, + grid_points, + kernel_size=deconv_kernel_size, + stride=2, + padding=(deconv_kernel_size - 2) // 2, + groups=grid_points) + + # find the 4-neighbor of each grid point + self.neighbor_points = [] + grid_size = self.grid_size + for i in range(grid_size): # i-th column + for j in range(grid_size): # j-th row + neighbors = [] + if i > 0: # left: (i - 1, j) + neighbors.append((i - 1) * grid_size + j) + if j > 0: # up: (i, j - 1) + neighbors.append(i * grid_size + j - 1) + if j < grid_size - 1: # down: (i, j + 1) + neighbors.append(i * grid_size + j + 1) + if i < grid_size - 1: # right: (i + 1, j) + neighbors.append((i + 1) * grid_size + j) + self.neighbor_points.append(tuple(neighbors)) + # total edges in the grid + self.num_edges = sum([len(p) for p in self.neighbor_points]) + + self.forder_trans = nn.ModuleList() # first-order feature transition + self.sorder_trans = nn.ModuleList() # second-order feature transition + for neighbors in self.neighbor_points: + fo_trans = nn.ModuleList() + so_trans = nn.ModuleList() + for _ in range(len(neighbors)): + # each transition module consists of a 5x5 depth-wise conv and + # 1x1 conv. + fo_trans.append( + nn.Sequential( + nn.Conv2d( + self.point_feat_channels, + self.point_feat_channels, + 5, + stride=1, + padding=2, + groups=self.point_feat_channels), + nn.Conv2d(self.point_feat_channels, + self.point_feat_channels, 1))) + so_trans.append( + nn.Sequential( + nn.Conv2d( + self.point_feat_channels, + self.point_feat_channels, + 5, + 1, + 2, + groups=self.point_feat_channels), + nn.Conv2d(self.point_feat_channels, + self.point_feat_channels, 1))) + self.forder_trans.append(fo_trans) + self.sorder_trans.append(so_trans) + + self.loss_grid = build_loss(loss_grid) + + def forward(self, x): + assert x.shape[-1] == x.shape[-2] == self.roi_feat_size + # RoI feature transformation, downsample 2x + x = self.convs(x) + + c = self.point_feat_channels + # first-order fusion + x_fo = [None for _ in range(self.grid_points)] + for i, points in enumerate(self.neighbor_points): + x_fo[i] = x[:, i * c:(i + 1) * c] + for j, point_idx in enumerate(points): + x_fo[i] = x_fo[i] + self.forder_trans[i][j]( + x[:, point_idx * c:(point_idx + 1) * c]) + + # second-order fusion + x_so = [None for _ in range(self.grid_points)] + for i, points in enumerate(self.neighbor_points): + x_so[i] = x[:, i * c:(i + 1) * c] + for j, point_idx in enumerate(points): + x_so[i] = x_so[i] + self.sorder_trans[i][j](x_fo[point_idx]) + + # predicted heatmap with fused features + x2 = torch.cat(x_so, dim=1) + x2 = self.deconv1(x2) + x2 = F.relu(self.norm1(x2), inplace=True) + heatmap = self.deconv2(x2) + + # predicted heatmap with original features (applicable during training) + if self.training: + x1 = x + x1 = self.deconv1(x1) + x1 = F.relu(self.norm1(x1), inplace=True) + heatmap_unfused = self.deconv2(x1) + else: + heatmap_unfused = heatmap + + return dict(fused=heatmap, unfused=heatmap_unfused) + + def calc_sub_regions(self): + """Compute point specific representation regions. + + See Grid R-CNN Plus (https://arxiv.org/abs/1906.05688) for details. + """ + # to make it consistent with the original implementation, half_size + # is computed as 2 * quarter_size, which is smaller + half_size = self.whole_map_size // 4 * 2 + sub_regions = [] + for i in range(self.grid_points): + x_idx = i // self.grid_size + y_idx = i % self.grid_size + if x_idx == 0: + sub_x1 = 0 + elif x_idx == self.grid_size - 1: + sub_x1 = half_size + else: + ratio = x_idx / (self.grid_size - 1) - 0.25 + sub_x1 = max(int(ratio * self.whole_map_size), 0) + + if y_idx == 0: + sub_y1 = 0 + elif y_idx == self.grid_size - 1: + sub_y1 = half_size + else: + ratio = y_idx / (self.grid_size - 1) - 0.25 + sub_y1 = max(int(ratio * self.whole_map_size), 0) + sub_regions.append( + (sub_x1, sub_y1, sub_x1 + half_size, sub_y1 + half_size)) + return sub_regions + + def get_targets(self, sampling_results, rcnn_train_cfg): + # mix all samples (across images) together. + pos_bboxes = torch.cat([res.pos_bboxes for res in sampling_results], + dim=0).cpu() + pos_gt_bboxes = torch.cat( + [res.pos_gt_bboxes for res in sampling_results], dim=0).cpu() + assert pos_bboxes.shape == pos_gt_bboxes.shape + + # expand pos_bboxes to 2x of original size + x1 = pos_bboxes[:, 0] - (pos_bboxes[:, 2] - pos_bboxes[:, 0]) / 2 + y1 = pos_bboxes[:, 1] - (pos_bboxes[:, 3] - pos_bboxes[:, 1]) / 2 + x2 = pos_bboxes[:, 2] + (pos_bboxes[:, 2] - pos_bboxes[:, 0]) / 2 + y2 = pos_bboxes[:, 3] + (pos_bboxes[:, 3] - pos_bboxes[:, 1]) / 2 + pos_bboxes = torch.stack([x1, y1, x2, y2], dim=-1) + pos_bbox_ws = (pos_bboxes[:, 2] - pos_bboxes[:, 0]).unsqueeze(-1) + pos_bbox_hs = (pos_bboxes[:, 3] - pos_bboxes[:, 1]).unsqueeze(-1) + + num_rois = pos_bboxes.shape[0] + map_size = self.whole_map_size + # this is not the final target shape + targets = torch.zeros((num_rois, self.grid_points, map_size, map_size), + dtype=torch.float) + + # pre-compute interpolation factors for all grid points. + # the first item is the factor of x-dim, and the second is y-dim. + # for a 9-point grid, factors are like (1, 0), (0.5, 0.5), (0, 1) + factors = [] + for j in range(self.grid_points): + x_idx = j // self.grid_size + y_idx = j % self.grid_size + factors.append((1 - x_idx / (self.grid_size - 1), + 1 - y_idx / (self.grid_size - 1))) + + radius = rcnn_train_cfg.pos_radius + radius2 = radius**2 + for i in range(num_rois): + # ignore small bboxes + if (pos_bbox_ws[i] <= self.grid_size + or pos_bbox_hs[i] <= self.grid_size): + continue + # for each grid point, mark a small circle as positive + for j in range(self.grid_points): + factor_x, factor_y = factors[j] + gridpoint_x = factor_x * pos_gt_bboxes[i, 0] + ( + 1 - factor_x) * pos_gt_bboxes[i, 2] + gridpoint_y = factor_y * pos_gt_bboxes[i, 1] + ( + 1 - factor_y) * pos_gt_bboxes[i, 3] + + cx = int((gridpoint_x - pos_bboxes[i, 0]) / pos_bbox_ws[i] * + map_size) + cy = int((gridpoint_y - pos_bboxes[i, 1]) / pos_bbox_hs[i] * + map_size) + + for x in range(cx - radius, cx + radius + 1): + for y in range(cy - radius, cy + radius + 1): + if x >= 0 and x < map_size and y >= 0 and y < map_size: + if (x - cx)**2 + (y - cy)**2 <= radius2: + targets[i, j, y, x] = 1 + # reduce the target heatmap size by a half + # proposed in Grid R-CNN Plus (https://arxiv.org/abs/1906.05688). + sub_targets = [] + for i in range(self.grid_points): + sub_x1, sub_y1, sub_x2, sub_y2 = self.sub_regions[i] + sub_targets.append(targets[:, [i], sub_y1:sub_y2, sub_x1:sub_x2]) + sub_targets = torch.cat(sub_targets, dim=1) + sub_targets = sub_targets.to(sampling_results[0].pos_bboxes.device) + return sub_targets + + def loss(self, grid_pred, grid_targets): + loss_fused = self.loss_grid(grid_pred['fused'], grid_targets) + loss_unfused = self.loss_grid(grid_pred['unfused'], grid_targets) + loss_grid = loss_fused + loss_unfused + return dict(loss_grid=loss_grid) + + def get_bboxes(self, det_bboxes, grid_pred, img_metas): + # TODO: refactoring + assert det_bboxes.shape[0] == grid_pred.shape[0] + det_bboxes = det_bboxes.cpu() + cls_scores = det_bboxes[:, [4]] + det_bboxes = det_bboxes[:, :4] + grid_pred = grid_pred.sigmoid().cpu() + + R, c, h, w = grid_pred.shape + half_size = self.whole_map_size // 4 * 2 + assert h == w == half_size + assert c == self.grid_points + + # find the point with max scores in the half-sized heatmap + grid_pred = grid_pred.view(R * c, h * w) + pred_scores, pred_position = grid_pred.max(dim=1) + xs = pred_position % w + ys = pred_position // w + + # get the position in the whole heatmap instead of half-sized heatmap + for i in range(self.grid_points): + xs[i::self.grid_points] += self.sub_regions[i][0] + ys[i::self.grid_points] += self.sub_regions[i][1] + + # reshape to (num_rois, grid_points) + pred_scores, xs, ys = tuple( + map(lambda x: x.view(R, c), [pred_scores, xs, ys])) + + # get expanded pos_bboxes + widths = (det_bboxes[:, 2] - det_bboxes[:, 0]).unsqueeze(-1) + heights = (det_bboxes[:, 3] - det_bboxes[:, 1]).unsqueeze(-1) + x1 = (det_bboxes[:, 0, None] - widths / 2) + y1 = (det_bboxes[:, 1, None] - heights / 2) + # map the grid point to the absolute coordinates + abs_xs = (xs.float() + 0.5) / w * widths + x1 + abs_ys = (ys.float() + 0.5) / h * heights + y1 + + # get the grid points indices that fall on the bbox boundaries + x1_inds = [i for i in range(self.grid_size)] + y1_inds = [i * self.grid_size for i in range(self.grid_size)] + x2_inds = [ + self.grid_points - self.grid_size + i + for i in range(self.grid_size) + ] + y2_inds = [(i + 1) * self.grid_size - 1 for i in range(self.grid_size)] + + # voting of all grid points on some boundary + bboxes_x1 = (abs_xs[:, x1_inds] * pred_scores[:, x1_inds]).sum( + dim=1, keepdim=True) / ( + pred_scores[:, x1_inds].sum(dim=1, keepdim=True)) + bboxes_y1 = (abs_ys[:, y1_inds] * pred_scores[:, y1_inds]).sum( + dim=1, keepdim=True) / ( + pred_scores[:, y1_inds].sum(dim=1, keepdim=True)) + bboxes_x2 = (abs_xs[:, x2_inds] * pred_scores[:, x2_inds]).sum( + dim=1, keepdim=True) / ( + pred_scores[:, x2_inds].sum(dim=1, keepdim=True)) + bboxes_y2 = (abs_ys[:, y2_inds] * pred_scores[:, y2_inds]).sum( + dim=1, keepdim=True) / ( + pred_scores[:, y2_inds].sum(dim=1, keepdim=True)) + + bbox_res = torch.cat( + [bboxes_x1, bboxes_y1, bboxes_x2, bboxes_y2, cls_scores], dim=1) + bbox_res[:, [0, 2]].clamp_(min=0, max=img_metas[0]['img_shape'][1]) + bbox_res[:, [1, 3]].clamp_(min=0, max=img_metas[0]['img_shape'][0]) + + return bbox_res diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/htc_mask_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/htc_mask_head.py new file mode 100644 index 0000000000000000000000000000000000000000..0f435ecf66d7c998eba635765a62db8adecc22d3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/htc_mask_head.py @@ -0,0 +1,38 @@ +from mmcv.cnn import ConvModule + +from mmdet.models.builder import HEADS +from .fcn_mask_head import FCNMaskHead + + +@HEADS.register_module() +class HTCMaskHead(FCNMaskHead): + + def __init__(self, with_conv_res=True, *args, **kwargs): + super(HTCMaskHead, self).__init__(*args, **kwargs) + self.with_conv_res = with_conv_res + if self.with_conv_res: + self.conv_res = ConvModule( + self.conv_out_channels, + self.conv_out_channels, + 1, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg) + + def forward(self, x, res_feat=None, return_logits=True, return_feat=True): + if res_feat is not None: + assert self.with_conv_res + res_feat = self.conv_res(res_feat) + x = x + res_feat + for conv in self.convs: + x = conv(x) + res_feat = x + outs = [] + if return_logits: + x = self.upsample(x) + if self.upsample_method == 'deconv': + x = self.relu(x) + mask_pred = self.conv_logits(x) + outs.append(mask_pred) + if return_feat: + outs.append(res_feat) + return outs if len(outs) > 1 else outs[0] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/mask_point_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/mask_point_head.py new file mode 100644 index 0000000000000000000000000000000000000000..16ed8b60030b3028dc48686c59e0d8b4654f5d50 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/mask_point_head.py @@ -0,0 +1,305 @@ +# Modified from https://github.com/facebookresearch/detectron2/tree/master/projects/PointRend/point_head/point_head.py # noqa + +import torch +import torch.nn as nn +from mmcv.cnn import ConvModule +from mmcv.ops import point_sample, rel_roi_point_to_rel_img_point +from mmcv.runner import BaseModule + +from mmdet.models.builder import HEADS, build_loss + + +@HEADS.register_module() +class MaskPointHead(BaseModule): + """A mask point head use in PointRend. + + ``MaskPointHead`` use shared multi-layer perceptron (equivalent to + nn.Conv1d) to predict the logit of input points. The fine-grained feature + and coarse feature will be concatenate together for predication. + + Args: + num_fcs (int): Number of fc layers in the head. Default: 3. + in_channels (int): Number of input channels. Default: 256. + fc_channels (int): Number of fc channels. Default: 256. + num_classes (int): Number of classes for logits. Default: 80. + class_agnostic (bool): Whether use class agnostic classification. + If so, the output channels of logits will be 1. Default: False. + coarse_pred_each_layer (bool): Whether concatenate coarse feature with + the output of each fc layer. Default: True. + conv_cfg (dict | None): Dictionary to construct and config conv layer. + Default: dict(type='Conv1d')) + norm_cfg (dict | None): Dictionary to construct and config norm layer. + Default: None. + loss_point (dict): Dictionary to construct and config loss layer of + point head. Default: dict(type='CrossEntropyLoss', use_mask=True, + loss_weight=1.0). + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__(self, + num_classes, + num_fcs=3, + in_channels=256, + fc_channels=256, + class_agnostic=False, + coarse_pred_each_layer=True, + conv_cfg=dict(type='Conv1d'), + norm_cfg=None, + act_cfg=dict(type='ReLU'), + loss_point=dict( + type='CrossEntropyLoss', use_mask=True, loss_weight=1.0), + init_cfg=dict( + type='Normal', std=0.001, + override=dict(name='fc_logits'))): + super().__init__(init_cfg) + self.num_fcs = num_fcs + self.in_channels = in_channels + self.fc_channels = fc_channels + self.num_classes = num_classes + self.class_agnostic = class_agnostic + self.coarse_pred_each_layer = coarse_pred_each_layer + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.loss_point = build_loss(loss_point) + + fc_in_channels = in_channels + num_classes + self.fcs = nn.ModuleList() + for _ in range(num_fcs): + fc = ConvModule( + fc_in_channels, + fc_channels, + kernel_size=1, + stride=1, + padding=0, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + self.fcs.append(fc) + fc_in_channels = fc_channels + fc_in_channels += num_classes if self.coarse_pred_each_layer else 0 + + out_channels = 1 if self.class_agnostic else self.num_classes + self.fc_logits = nn.Conv1d( + fc_in_channels, out_channels, kernel_size=1, stride=1, padding=0) + + def forward(self, fine_grained_feats, coarse_feats): + """Classify each point base on fine grained and coarse feats. + + Args: + fine_grained_feats (Tensor): Fine grained feature sampled from FPN, + shape (num_rois, in_channels, num_points). + coarse_feats (Tensor): Coarse feature sampled from CoarseMaskHead, + shape (num_rois, num_classes, num_points). + + Returns: + Tensor: Point classification results, + shape (num_rois, num_class, num_points). + """ + + x = torch.cat([fine_grained_feats, coarse_feats], dim=1) + for fc in self.fcs: + x = fc(x) + if self.coarse_pred_each_layer: + x = torch.cat((x, coarse_feats), dim=1) + return self.fc_logits(x) + + def get_targets(self, rois, rel_roi_points, sampling_results, gt_masks, + cfg): + """Get training targets of MaskPointHead for all images. + + Args: + rois (Tensor): Region of Interest, shape (num_rois, 5). + rel_roi_points: Points coordinates relative to RoI, shape + (num_rois, num_points, 2). + sampling_results (:obj:`SamplingResult`): Sampling result after + sampling and assignment. + gt_masks (Tensor) : Ground truth segmentation masks of + corresponding boxes, shape (num_rois, height, width). + cfg (dict): Training cfg. + + Returns: + Tensor: Point target, shape (num_rois, num_points). + """ + + num_imgs = len(sampling_results) + rois_list = [] + rel_roi_points_list = [] + for batch_ind in range(num_imgs): + inds = (rois[:, 0] == batch_ind) + rois_list.append(rois[inds]) + rel_roi_points_list.append(rel_roi_points[inds]) + pos_assigned_gt_inds_list = [ + res.pos_assigned_gt_inds for res in sampling_results + ] + cfg_list = [cfg for _ in range(num_imgs)] + + point_targets = map(self._get_target_single, rois_list, + rel_roi_points_list, pos_assigned_gt_inds_list, + gt_masks, cfg_list) + point_targets = list(point_targets) + + if len(point_targets) > 0: + point_targets = torch.cat(point_targets) + + return point_targets + + def _get_target_single(self, rois, rel_roi_points, pos_assigned_gt_inds, + gt_masks, cfg): + """Get training target of MaskPointHead for each image.""" + num_pos = rois.size(0) + num_points = cfg.num_points + if num_pos > 0: + gt_masks_th = ( + gt_masks.to_tensor(rois.dtype, rois.device).index_select( + 0, pos_assigned_gt_inds)) + gt_masks_th = gt_masks_th.unsqueeze(1) + rel_img_points = rel_roi_point_to_rel_img_point( + rois, rel_roi_points, gt_masks_th) + point_targets = point_sample(gt_masks_th, + rel_img_points).squeeze(1) + else: + point_targets = rois.new_zeros((0, num_points)) + return point_targets + + def loss(self, point_pred, point_targets, labels): + """Calculate loss for MaskPointHead. + + Args: + point_pred (Tensor): Point predication result, shape + (num_rois, num_classes, num_points). + point_targets (Tensor): Point targets, shape (num_roi, num_points). + labels (Tensor): Class label of corresponding boxes, + shape (num_rois, ) + + Returns: + dict[str, Tensor]: a dictionary of point loss components + """ + + loss = dict() + if self.class_agnostic: + loss_point = self.loss_point(point_pred, point_targets, + torch.zeros_like(labels)) + else: + loss_point = self.loss_point(point_pred, point_targets, labels) + loss['loss_point'] = loss_point + return loss + + def _get_uncertainty(self, mask_pred, labels): + """Estimate uncertainty based on pred logits. + + We estimate uncertainty as L1 distance between 0.0 and the logits + prediction in 'mask_pred' for the foreground class in `classes`. + + Args: + mask_pred (Tensor): mask predication logits, shape (num_rois, + num_classes, mask_height, mask_width). + + labels (list[Tensor]): Either predicted or ground truth label for + each predicted mask, of length num_rois. + + Returns: + scores (Tensor): Uncertainty scores with the most uncertain + locations having the highest uncertainty score, + shape (num_rois, 1, mask_height, mask_width) + """ + if mask_pred.shape[1] == 1: + gt_class_logits = mask_pred.clone() + else: + inds = torch.arange(mask_pred.shape[0], device=mask_pred.device) + gt_class_logits = mask_pred[inds, labels].unsqueeze(1) + return -torch.abs(gt_class_logits) + + def get_roi_rel_points_train(self, mask_pred, labels, cfg): + """Get ``num_points`` most uncertain points with random points during + train. + + Sample points in [0, 1] x [0, 1] coordinate space based on their + uncertainty. The uncertainties are calculated for each point using + '_get_uncertainty()' function that takes point's logit prediction as + input. + + Args: + mask_pred (Tensor): A tensor of shape (num_rois, num_classes, + mask_height, mask_width) for class-specific or class-agnostic + prediction. + labels (list): The ground truth class for each instance. + cfg (dict): Training config of point head. + + Returns: + point_coords (Tensor): A tensor of shape (num_rois, num_points, 2) + that contains the coordinates sampled points. + """ + num_points = cfg.num_points + oversample_ratio = cfg.oversample_ratio + importance_sample_ratio = cfg.importance_sample_ratio + assert oversample_ratio >= 1 + assert 0 <= importance_sample_ratio <= 1 + batch_size = mask_pred.shape[0] + num_sampled = int(num_points * oversample_ratio) + point_coords = torch.rand( + batch_size, num_sampled, 2, device=mask_pred.device) + point_logits = point_sample(mask_pred, point_coords) + # It is crucial to calculate uncertainty based on the sampled + # prediction value for the points. Calculating uncertainties of the + # coarse predictions first and sampling them for points leads to + # incorrect results. To illustrate this: assume uncertainty func( + # logits)=-abs(logits), a sampled point between two coarse + # predictions with -1 and 1 logits has 0 logits, and therefore 0 + # uncertainty value. However, if we calculate uncertainties for the + # coarse predictions first, both will have -1 uncertainty, + # and sampled point will get -1 uncertainty. + point_uncertainties = self._get_uncertainty(point_logits, labels) + num_uncertain_points = int(importance_sample_ratio * num_points) + num_random_points = num_points - num_uncertain_points + idx = torch.topk( + point_uncertainties[:, 0, :], k=num_uncertain_points, dim=1)[1] + shift = num_sampled * torch.arange( + batch_size, dtype=torch.long, device=mask_pred.device) + idx += shift[:, None] + point_coords = point_coords.view(-1, 2)[idx.view(-1), :].view( + batch_size, num_uncertain_points, 2) + if num_random_points > 0: + rand_roi_coords = torch.rand( + batch_size, num_random_points, 2, device=mask_pred.device) + point_coords = torch.cat((point_coords, rand_roi_coords), dim=1) + return point_coords + + def get_roi_rel_points_test(self, mask_pred, pred_label, cfg): + """Get ``num_points`` most uncertain points during test. + + Args: + mask_pred (Tensor): A tensor of shape (num_rois, num_classes, + mask_height, mask_width) for class-specific or class-agnostic + prediction. + pred_label (list): The predication class for each instance. + cfg (dict): Testing config of point head. + + Returns: + point_indices (Tensor): A tensor of shape (num_rois, num_points) + that contains indices from [0, mask_height x mask_width) of the + most uncertain points. + point_coords (Tensor): A tensor of shape (num_rois, num_points, 2) + that contains [0, 1] x [0, 1] normalized coordinates of the + most uncertain points from the [mask_height, mask_width] grid . + """ + num_points = cfg.subdivision_num_points + uncertainty_map = self._get_uncertainty(mask_pred, pred_label) + num_rois, _, mask_height, mask_width = uncertainty_map.shape + + # During ONNX exporting, the type of each elements of 'shape' is + # `Tensor(float)`, while it is `float` during PyTorch inference. + if isinstance(mask_height, torch.Tensor): + h_step = 1.0 / mask_height.float() + w_step = 1.0 / mask_width.float() + else: + h_step = 1.0 / mask_height + w_step = 1.0 / mask_width + # cast to int to avoid dynamic K for TopK op in ONNX + mask_size = int(mask_height * mask_width) + uncertainty_map = uncertainty_map.view(num_rois, mask_size) + num_points = min(mask_size, num_points) + point_indices = uncertainty_map.topk(num_points, dim=1)[1] + xs = w_step / 2.0 + (point_indices % mask_width).float() * w_step + ys = h_step / 2.0 + (point_indices // mask_width).float() * h_step + point_coords = torch.stack([xs, ys], dim=2) + return point_indices, point_coords diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/maskiou_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/maskiou_head.py new file mode 100644 index 0000000000000000000000000000000000000000..fc117ff7e86cefab14b52de8f006d4193eb4c964 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/maskiou_head.py @@ -0,0 +1,182 @@ +import numpy as np +import torch +import torch.nn as nn +from mmcv.cnn import Conv2d, Linear, MaxPool2d +from mmcv.runner import BaseModule, force_fp32 +from torch.nn.modules.utils import _pair + +from mmdet.models.builder import HEADS, build_loss + + +@HEADS.register_module() +class MaskIoUHead(BaseModule): + """Mask IoU Head. + + This head predicts the IoU of predicted masks and corresponding gt masks. + """ + + def __init__(self, + num_convs=4, + num_fcs=2, + roi_feat_size=14, + in_channels=256, + conv_out_channels=256, + fc_out_channels=1024, + num_classes=80, + loss_iou=dict(type='MSELoss', loss_weight=0.5), + init_cfg=[ + dict(type='Kaiming', override=dict(name='convs')), + dict(type='Caffe2Xavier', override=dict(name='fcs')), + dict( + type='Normal', + std=0.01, + override=dict(name='fc_mask_iou')) + ]): + super(MaskIoUHead, self).__init__(init_cfg) + self.in_channels = in_channels + self.conv_out_channels = conv_out_channels + self.fc_out_channels = fc_out_channels + self.num_classes = num_classes + self.fp16_enabled = False + + self.convs = nn.ModuleList() + for i in range(num_convs): + if i == 0: + # concatenation of mask feature and mask prediction + in_channels = self.in_channels + 1 + else: + in_channels = self.conv_out_channels + stride = 2 if i == num_convs - 1 else 1 + self.convs.append( + Conv2d( + in_channels, + self.conv_out_channels, + 3, + stride=stride, + padding=1)) + + roi_feat_size = _pair(roi_feat_size) + pooled_area = (roi_feat_size[0] // 2) * (roi_feat_size[1] // 2) + self.fcs = nn.ModuleList() + for i in range(num_fcs): + in_channels = ( + self.conv_out_channels * + pooled_area if i == 0 else self.fc_out_channels) + self.fcs.append(Linear(in_channels, self.fc_out_channels)) + + self.fc_mask_iou = Linear(self.fc_out_channels, self.num_classes) + self.relu = nn.ReLU() + self.max_pool = MaxPool2d(2, 2) + self.loss_iou = build_loss(loss_iou) + + def forward(self, mask_feat, mask_pred): + mask_pred = mask_pred.sigmoid() + mask_pred_pooled = self.max_pool(mask_pred.unsqueeze(1)) + + x = torch.cat((mask_feat, mask_pred_pooled), 1) + + for conv in self.convs: + x = self.relu(conv(x)) + x = x.flatten(1) + for fc in self.fcs: + x = self.relu(fc(x)) + mask_iou = self.fc_mask_iou(x) + return mask_iou + + @force_fp32(apply_to=('mask_iou_pred', )) + def loss(self, mask_iou_pred, mask_iou_targets): + pos_inds = mask_iou_targets > 0 + if pos_inds.sum() > 0: + loss_mask_iou = self.loss_iou(mask_iou_pred[pos_inds], + mask_iou_targets[pos_inds]) + else: + loss_mask_iou = mask_iou_pred.sum() * 0 + return dict(loss_mask_iou=loss_mask_iou) + + @force_fp32(apply_to=('mask_pred', )) + def get_targets(self, sampling_results, gt_masks, mask_pred, mask_targets, + rcnn_train_cfg): + """Compute target of mask IoU. + + Mask IoU target is the IoU of the predicted mask (inside a bbox) and + the gt mask of corresponding gt mask (the whole instance). + The intersection area is computed inside the bbox, and the gt mask area + is computed with two steps, firstly we compute the gt area inside the + bbox, then divide it by the area ratio of gt area inside the bbox and + the gt area of the whole instance. + + Args: + sampling_results (list[:obj:`SamplingResult`]): sampling results. + gt_masks (BitmapMask | PolygonMask): Gt masks (the whole instance) + of each image, with the same shape of the input image. + mask_pred (Tensor): Predicted masks of each positive proposal, + shape (num_pos, h, w). + mask_targets (Tensor): Gt mask of each positive proposal, + binary map of the shape (num_pos, h, w). + rcnn_train_cfg (dict): Training config for R-CNN part. + + Returns: + Tensor: mask iou target (length == num positive). + """ + pos_proposals = [res.pos_bboxes for res in sampling_results] + pos_assigned_gt_inds = [ + res.pos_assigned_gt_inds for res in sampling_results + ] + + # compute the area ratio of gt areas inside the proposals and + # the whole instance + area_ratios = map(self._get_area_ratio, pos_proposals, + pos_assigned_gt_inds, gt_masks) + area_ratios = torch.cat(list(area_ratios)) + assert mask_targets.size(0) == area_ratios.size(0) + + mask_pred = (mask_pred > rcnn_train_cfg.mask_thr_binary).float() + mask_pred_areas = mask_pred.sum((-1, -2)) + + # mask_pred and mask_targets are binary maps + overlap_areas = (mask_pred * mask_targets).sum((-1, -2)) + + # compute the mask area of the whole instance + gt_full_areas = mask_targets.sum((-1, -2)) / (area_ratios + 1e-7) + + mask_iou_targets = overlap_areas / ( + mask_pred_areas + gt_full_areas - overlap_areas) + return mask_iou_targets + + def _get_area_ratio(self, pos_proposals, pos_assigned_gt_inds, gt_masks): + """Compute area ratio of the gt mask inside the proposal and the gt + mask of the corresponding instance.""" + num_pos = pos_proposals.size(0) + if num_pos > 0: + area_ratios = [] + proposals_np = pos_proposals.cpu().numpy() + pos_assigned_gt_inds = pos_assigned_gt_inds.cpu().numpy() + # compute mask areas of gt instances (batch processing for speedup) + gt_instance_mask_area = gt_masks.areas + for i in range(num_pos): + gt_mask = gt_masks[pos_assigned_gt_inds[i]] + + # crop the gt mask inside the proposal + bbox = proposals_np[i, :].astype(np.int32) + gt_mask_in_proposal = gt_mask.crop(bbox) + + ratio = gt_mask_in_proposal.areas[0] / ( + gt_instance_mask_area[pos_assigned_gt_inds[i]] + 1e-7) + area_ratios.append(ratio) + area_ratios = torch.from_numpy(np.stack(area_ratios)).float().to( + pos_proposals.device) + else: + area_ratios = pos_proposals.new_zeros((0, )) + return area_ratios + + @force_fp32(apply_to=('mask_iou_pred', )) + def get_mask_scores(self, mask_iou_pred, det_bboxes, det_labels): + """Get the mask scores. + + mask_score = bbox_score * mask_iou + """ + inds = range(det_labels.size(0)) + mask_scores = mask_iou_pred[inds, det_labels] * det_bboxes[inds, -1] + mask_scores = mask_scores.cpu().numpy() + det_labels = det_labels.cpu().numpy() + return [mask_scores[det_labels == i] for i in range(self.num_classes)] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/scnet_mask_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/scnet_mask_head.py new file mode 100644 index 0000000000000000000000000000000000000000..983a2d9db71a3b2b4980996725fdafb0b412b413 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/scnet_mask_head.py @@ -0,0 +1,27 @@ +from mmdet.models.builder import HEADS +from mmdet.models.utils import ResLayer, SimplifiedBasicBlock +from .fcn_mask_head import FCNMaskHead + + +@HEADS.register_module() +class SCNetMaskHead(FCNMaskHead): + """Mask head for `SCNet `_. + + Args: + conv_to_res (bool, optional): if True, change the conv layers to + ``SimplifiedBasicBlock``. + """ + + def __init__(self, conv_to_res=True, **kwargs): + super(SCNetMaskHead, self).__init__(**kwargs) + self.conv_to_res = conv_to_res + if conv_to_res: + assert self.conv_kernel_size == 3 + self.num_res_blocks = self.num_convs // 2 + self.convs = ResLayer( + SimplifiedBasicBlock, + self.in_channels, + self.conv_out_channels, + self.num_res_blocks, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/scnet_semantic_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/scnet_semantic_head.py new file mode 100644 index 0000000000000000000000000000000000000000..df85a0112d27d97301fff56189f99bee0bf8efa5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_heads/scnet_semantic_head.py @@ -0,0 +1,27 @@ +from mmdet.models.builder import HEADS +from mmdet.models.utils import ResLayer, SimplifiedBasicBlock +from .fused_semantic_head import FusedSemanticHead + + +@HEADS.register_module() +class SCNetSemanticHead(FusedSemanticHead): + """Mask head for `SCNet `_. + + Args: + conv_to_res (bool, optional): if True, change the conv layers to + ``SimplifiedBasicBlock``. + """ + + def __init__(self, conv_to_res=True, **kwargs): + super(SCNetSemanticHead, self).__init__(**kwargs) + self.conv_to_res = conv_to_res + if self.conv_to_res: + num_res_blocks = self.num_convs // 2 + self.convs = ResLayer( + SimplifiedBasicBlock, + self.in_channels, + self.conv_out_channels, + num_res_blocks, + conv_cfg=self.conv_cfg, + norm_cfg=self.norm_cfg) + self.num_convs = num_res_blocks diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_scoring_roi_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_scoring_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..e12700cdb8e70569c9523b77939fbc3f8db6b6d4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/mask_scoring_roi_head.py @@ -0,0 +1,112 @@ +import torch + +from mmdet.core import bbox2roi +from ..builder import HEADS, build_head +from .standard_roi_head import StandardRoIHead + + +@HEADS.register_module() +class MaskScoringRoIHead(StandardRoIHead): + """Mask Scoring RoIHead for Mask Scoring RCNN. + + https://arxiv.org/abs/1903.00241 + """ + + def __init__(self, mask_iou_head, **kwargs): + assert mask_iou_head is not None + super(MaskScoringRoIHead, self).__init__(**kwargs) + self.mask_iou_head = build_head(mask_iou_head) + + def _mask_forward_train(self, x, sampling_results, bbox_feats, gt_masks, + img_metas): + """Run forward function and calculate loss for Mask head in + training.""" + pos_labels = torch.cat([res.pos_gt_labels for res in sampling_results]) + mask_results = super(MaskScoringRoIHead, + self)._mask_forward_train(x, sampling_results, + bbox_feats, gt_masks, + img_metas) + if mask_results['loss_mask'] is None: + return mask_results + + # mask iou head forward and loss + pos_mask_pred = mask_results['mask_pred'][ + range(mask_results['mask_pred'].size(0)), pos_labels] + mask_iou_pred = self.mask_iou_head(mask_results['mask_feats'], + pos_mask_pred) + pos_mask_iou_pred = mask_iou_pred[range(mask_iou_pred.size(0)), + pos_labels] + + mask_iou_targets = self.mask_iou_head.get_targets( + sampling_results, gt_masks, pos_mask_pred, + mask_results['mask_targets'], self.train_cfg) + loss_mask_iou = self.mask_iou_head.loss(pos_mask_iou_pred, + mask_iou_targets) + mask_results['loss_mask'].update(loss_mask_iou) + return mask_results + + def simple_test_mask(self, + x, + img_metas, + det_bboxes, + det_labels, + rescale=False): + """Obtain mask prediction without augmentation.""" + # image shapes of images in the batch + ori_shapes = tuple(meta['ori_shape'] for meta in img_metas) + scale_factors = tuple(meta['scale_factor'] for meta in img_metas) + + num_imgs = len(det_bboxes) + if all(det_bbox.shape[0] == 0 for det_bbox in det_bboxes): + num_classes = self.mask_head.num_classes + segm_results = [[[] for _ in range(num_classes)] + for _ in range(num_imgs)] + mask_scores = [[[] for _ in range(num_classes)] + for _ in range(num_imgs)] + else: + # if det_bboxes is rescaled to the original image size, we need to + # rescale it back to the testing scale to obtain RoIs. + if rescale and not isinstance(scale_factors[0], float): + scale_factors = [ + torch.from_numpy(scale_factor).to(det_bboxes[0].device) + for scale_factor in scale_factors + ] + _bboxes = [ + det_bboxes[i][:, :4] * + scale_factors[i] if rescale else det_bboxes[i] + for i in range(num_imgs) + ] + mask_rois = bbox2roi(_bboxes) + mask_results = self._mask_forward(x, mask_rois) + concat_det_labels = torch.cat(det_labels) + # get mask scores with mask iou head + mask_feats = mask_results['mask_feats'] + mask_pred = mask_results['mask_pred'] + mask_iou_pred = self.mask_iou_head( + mask_feats, mask_pred[range(concat_det_labels.size(0)), + concat_det_labels]) + # split batch mask prediction back to each image + num_bboxes_per_img = tuple(len(_bbox) for _bbox in _bboxes) + mask_preds = mask_pred.split(num_bboxes_per_img, 0) + mask_iou_preds = mask_iou_pred.split(num_bboxes_per_img, 0) + + # apply mask post-processing to each image individually + segm_results = [] + mask_scores = [] + for i in range(num_imgs): + if det_bboxes[i].shape[0] == 0: + segm_results.append( + [[] for _ in range(self.mask_head.num_classes)]) + mask_scores.append( + [[] for _ in range(self.mask_head.num_classes)]) + else: + segm_result = self.mask_head.get_seg_masks( + mask_preds[i], _bboxes[i], det_labels[i], + self.test_cfg, ori_shapes[i], scale_factors[i], + rescale) + # get mask scores with mask iou head + mask_score = self.mask_iou_head.get_mask_scores( + mask_iou_preds[i], det_bboxes[i], det_labels[i]) + segm_results.append(segm_result) + mask_scores.append(mask_score) + return list(zip(segm_results, mask_scores)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/pisa_roi_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/pisa_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..e01113629837eb9c065ba40cd4025899b7bd0172 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/pisa_roi_head.py @@ -0,0 +1,159 @@ +from mmdet.core import bbox2roi +from ..builder import HEADS +from ..losses.pisa_loss import carl_loss, isr_p +from .standard_roi_head import StandardRoIHead + + +@HEADS.register_module() +class PISARoIHead(StandardRoIHead): + r"""The RoI head for `Prime Sample Attention in Object Detection + `_.""" + + def forward_train(self, + x, + img_metas, + proposal_list, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None, + gt_masks=None): + """Forward function for training. + + Args: + x (list[Tensor]): List of multi-level img features. + img_metas (list[dict]): List of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `mmdet/datasets/pipelines/formatting.py:Collect`. + proposals (list[Tensors]): List of region proposals. + gt_bboxes (list[Tensor]): Each item are the truth boxes for each + image in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): Class indices corresponding to each box + gt_bboxes_ignore (list[Tensor], optional): Specify which bounding + boxes can be ignored when computing the loss. + gt_masks (None | Tensor) : True segmentation masks for each box + used if the architecture supports a segmentation task. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + # assign gts and sample proposals + if self.with_bbox or self.with_mask: + num_imgs = len(img_metas) + if gt_bboxes_ignore is None: + gt_bboxes_ignore = [None for _ in range(num_imgs)] + sampling_results = [] + neg_label_weights = [] + for i in range(num_imgs): + assign_result = self.bbox_assigner.assign( + proposal_list[i], gt_bboxes[i], gt_bboxes_ignore[i], + gt_labels[i]) + sampling_result = self.bbox_sampler.sample( + assign_result, + proposal_list[i], + gt_bboxes[i], + gt_labels[i], + feats=[lvl_feat[i][None] for lvl_feat in x]) + # neg label weight is obtained by sampling when using ISR-N + neg_label_weight = None + if isinstance(sampling_result, tuple): + sampling_result, neg_label_weight = sampling_result + sampling_results.append(sampling_result) + neg_label_weights.append(neg_label_weight) + + losses = dict() + # bbox head forward and loss + if self.with_bbox: + bbox_results = self._bbox_forward_train( + x, + sampling_results, + gt_bboxes, + gt_labels, + img_metas, + neg_label_weights=neg_label_weights) + losses.update(bbox_results['loss_bbox']) + + # mask head forward and loss + if self.with_mask: + mask_results = self._mask_forward_train(x, sampling_results, + bbox_results['bbox_feats'], + gt_masks, img_metas) + losses.update(mask_results['loss_mask']) + + return losses + + def _bbox_forward(self, x, rois): + """Box forward function used in both training and testing.""" + # TODO: a more flexible way to decide which feature maps to use + bbox_feats = self.bbox_roi_extractor( + x[:self.bbox_roi_extractor.num_inputs], rois) + if self.with_shared_head: + bbox_feats = self.shared_head(bbox_feats) + cls_score, bbox_pred = self.bbox_head(bbox_feats) + + bbox_results = dict( + cls_score=cls_score, bbox_pred=bbox_pred, bbox_feats=bbox_feats) + return bbox_results + + def _bbox_forward_train(self, + x, + sampling_results, + gt_bboxes, + gt_labels, + img_metas, + neg_label_weights=None): + """Run forward function and calculate loss for box head in training.""" + rois = bbox2roi([res.bboxes for res in sampling_results]) + + bbox_results = self._bbox_forward(x, rois) + + bbox_targets = self.bbox_head.get_targets(sampling_results, gt_bboxes, + gt_labels, self.train_cfg) + + # neg_label_weights obtained by sampler is image-wise, mapping back to + # the corresponding location in label weights + if neg_label_weights[0] is not None: + label_weights = bbox_targets[1] + cur_num_rois = 0 + for i in range(len(sampling_results)): + num_pos = sampling_results[i].pos_inds.size(0) + num_neg = sampling_results[i].neg_inds.size(0) + label_weights[cur_num_rois + num_pos:cur_num_rois + num_pos + + num_neg] = neg_label_weights[i] + cur_num_rois += num_pos + num_neg + + cls_score = bbox_results['cls_score'] + bbox_pred = bbox_results['bbox_pred'] + + # Apply ISR-P + isr_cfg = self.train_cfg.get('isr', None) + if isr_cfg is not None: + bbox_targets = isr_p( + cls_score, + bbox_pred, + bbox_targets, + rois, + sampling_results, + self.bbox_head.loss_cls, + self.bbox_head.bbox_coder, + **isr_cfg, + num_class=self.bbox_head.num_classes) + loss_bbox = self.bbox_head.loss(cls_score, bbox_pred, rois, + *bbox_targets) + + # Add CARL Loss + carl_cfg = self.train_cfg.get('carl', None) + if carl_cfg is not None: + loss_carl = carl_loss( + cls_score, + bbox_targets[0], + bbox_pred, + bbox_targets[2], + self.bbox_head.loss_bbox, + **carl_cfg, + num_class=self.bbox_head.num_classes) + loss_bbox.update(loss_carl) + + bbox_results.update(loss_bbox=loss_bbox) + return bbox_results diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/point_rend_roi_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/point_rend_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..c8e73d579c260d610608bd015d6edebb04a3a812 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/point_rend_roi_head.py @@ -0,0 +1,394 @@ +# Modified from https://github.com/facebookresearch/detectron2/tree/master/projects/PointRend # noqa +import logging +import os + +import numpy as np +import torch +import torch.nn.functional as F +from mmcv.ops import point_sample, rel_roi_point_to_rel_img_point + +from mmdet.core import bbox2roi, bbox_mapping, merge_aug_masks +from .. import builder +from ..builder import HEADS +from .standard_roi_head import StandardRoIHead + +logger = logging.getLogger(__name__) + + +@HEADS.register_module() +class PointRendRoIHead(StandardRoIHead): + """`PointRend `_.""" + + def __init__(self, point_head, *args, **kwargs): + super().__init__(*args, **kwargs) + assert self.with_bbox and self.with_mask + self.init_point_head(point_head) + + def init_point_head(self, point_head): + """Initialize ``point_head``""" + self.point_head = builder.build_head(point_head) + + def _mask_forward_train(self, x, sampling_results, bbox_feats, gt_masks, + img_metas): + """Run forward function and calculate loss for mask head and point head + in training.""" + mask_results = super()._mask_forward_train(x, sampling_results, + bbox_feats, gt_masks, + img_metas) + if mask_results['loss_mask'] is not None: + loss_point = self._mask_point_forward_train( + x, sampling_results, mask_results['mask_pred'], gt_masks, + img_metas) + mask_results['loss_mask'].update(loss_point) + + return mask_results + + def _mask_point_forward_train(self, x, sampling_results, mask_pred, + gt_masks, img_metas): + """Run forward function and calculate loss for point head in + training.""" + pos_labels = torch.cat([res.pos_gt_labels for res in sampling_results]) + rel_roi_points = self.point_head.get_roi_rel_points_train( + mask_pred, pos_labels, cfg=self.train_cfg) + rois = bbox2roi([res.pos_bboxes for res in sampling_results]) + + fine_grained_point_feats = self._get_fine_grained_point_feats( + x, rois, rel_roi_points, img_metas) + coarse_point_feats = point_sample(mask_pred, rel_roi_points) + mask_point_pred = self.point_head(fine_grained_point_feats, + coarse_point_feats) + mask_point_target = self.point_head.get_targets( + rois, rel_roi_points, sampling_results, gt_masks, self.train_cfg) + loss_mask_point = self.point_head.loss(mask_point_pred, + mask_point_target, pos_labels) + + return loss_mask_point + + def _get_fine_grained_point_feats(self, x, rois, rel_roi_points, + img_metas): + """Sample fine grained feats from each level feature map and + concatenate them together. + + Args: + x (tuple[Tensor]): Feature maps of all scale level. + rois (Tensor): shape (num_rois, 5). + rel_roi_points (Tensor): A tensor of shape (num_rois, num_points, + 2) that contains [0, 1] x [0, 1] normalized coordinates of the + most uncertain points from the [mask_height, mask_width] grid. + img_metas (list[dict]): Image meta info. + + Returns: + Tensor: The fine grained features for each points, + has shape (num_rois, feats_channels, num_points). + """ + num_imgs = len(img_metas) + fine_grained_feats = [] + for idx in range(self.mask_roi_extractor.num_inputs): + feats = x[idx] + spatial_scale = 1. / float( + self.mask_roi_extractor.featmap_strides[idx]) + point_feats = [] + for batch_ind in range(num_imgs): + # unravel batch dim + feat = feats[batch_ind].unsqueeze(0) + inds = (rois[:, 0].long() == batch_ind) + if inds.any(): + rel_img_points = rel_roi_point_to_rel_img_point( + rois[inds], rel_roi_points[inds], feat.shape[2:], + spatial_scale).unsqueeze(0) + point_feat = point_sample(feat, rel_img_points) + point_feat = point_feat.squeeze(0).transpose(0, 1) + point_feats.append(point_feat) + fine_grained_feats.append(torch.cat(point_feats, dim=0)) + return torch.cat(fine_grained_feats, dim=1) + + def _mask_point_forward_test(self, x, rois, label_pred, mask_pred, + img_metas): + """Mask refining process with point head in testing. + + Args: + x (tuple[Tensor]): Feature maps of all scale level. + rois (Tensor): shape (num_rois, 5). + label_pred (Tensor): The predication class for each rois. + mask_pred (Tensor): The predication coarse masks of + shape (num_rois, num_classes, small_size, small_size). + img_metas (list[dict]): Image meta info. + + Returns: + Tensor: The refined masks of shape (num_rois, num_classes, + large_size, large_size). + """ + refined_mask_pred = mask_pred.clone() + for subdivision_step in range(self.test_cfg.subdivision_steps): + refined_mask_pred = F.interpolate( + refined_mask_pred, + scale_factor=self.test_cfg.scale_factor, + mode='bilinear', + align_corners=False) + # If `subdivision_num_points` is larger or equal to the + # resolution of the next step, then we can skip this step + num_rois, channels, mask_height, mask_width = \ + refined_mask_pred.shape + if (self.test_cfg.subdivision_num_points >= + self.test_cfg.scale_factor**2 * mask_height * mask_width + and + subdivision_step < self.test_cfg.subdivision_steps - 1): + continue + point_indices, rel_roi_points = \ + self.point_head.get_roi_rel_points_test( + refined_mask_pred, label_pred, cfg=self.test_cfg) + fine_grained_point_feats = self._get_fine_grained_point_feats( + x, rois, rel_roi_points, img_metas) + coarse_point_feats = point_sample(mask_pred, rel_roi_points) + mask_point_pred = self.point_head(fine_grained_point_feats, + coarse_point_feats) + + point_indices = point_indices.unsqueeze(1).expand(-1, channels, -1) + refined_mask_pred = refined_mask_pred.reshape( + num_rois, channels, mask_height * mask_width) + refined_mask_pred = refined_mask_pred.scatter_( + 2, point_indices, mask_point_pred) + refined_mask_pred = refined_mask_pred.view(num_rois, channels, + mask_height, mask_width) + + return refined_mask_pred + + def simple_test_mask(self, + x, + img_metas, + det_bboxes, + det_labels, + rescale=False): + """Obtain mask prediction without augmentation.""" + ori_shapes = tuple(meta['ori_shape'] for meta in img_metas) + scale_factors = tuple(meta['scale_factor'] for meta in img_metas) + + if isinstance(scale_factors[0], float): + logger.warning( + 'Scale factor in img_metas should be a ' + 'ndarray with shape (4,) ' + 'arrange as (factor_w, factor_h, factor_w, factor_h), ' + 'The scale_factor with float type has been deprecated. ') + scale_factors = np.array([scale_factors] * 4, dtype=np.float32) + + num_imgs = len(det_bboxes) + if all(det_bbox.shape[0] == 0 for det_bbox in det_bboxes): + segm_results = [[[] for _ in range(self.mask_head.num_classes)] + for _ in range(num_imgs)] + else: + # if det_bboxes is rescaled to the original image size, we need to + # rescale it back to the testing scale to obtain RoIs. + _bboxes = [det_bboxes[i][:, :4] for i in range(len(det_bboxes))] + if rescale: + scale_factors = [ + torch.from_numpy(scale_factor).to(det_bboxes[0].device) + for scale_factor in scale_factors + ] + _bboxes = [ + _bboxes[i] * scale_factors[i] for i in range(len(_bboxes)) + ] + + mask_rois = bbox2roi(_bboxes) + mask_results = self._mask_forward(x, mask_rois) + # split batch mask prediction back to each image + mask_pred = mask_results['mask_pred'] + num_mask_roi_per_img = [len(det_bbox) for det_bbox in det_bboxes] + mask_preds = mask_pred.split(num_mask_roi_per_img, 0) + mask_rois = mask_rois.split(num_mask_roi_per_img, 0) + + # apply mask post-processing to each image individually + segm_results = [] + for i in range(num_imgs): + if det_bboxes[i].shape[0] == 0: + segm_results.append( + [[] for _ in range(self.mask_head.num_classes)]) + else: + x_i = [xx[[i]] for xx in x] + mask_rois_i = mask_rois[i] + mask_rois_i[:, 0] = 0 # TODO: remove this hack + mask_pred_i = self._mask_point_forward_test( + x_i, mask_rois_i, det_labels[i], mask_preds[i], + [img_metas]) + segm_result = self.mask_head.get_seg_masks( + mask_pred_i, _bboxes[i], det_labels[i], self.test_cfg, + ori_shapes[i], scale_factors[i], rescale) + segm_results.append(segm_result) + return segm_results + + def aug_test_mask(self, feats, img_metas, det_bboxes, det_labels): + """Test for mask head with test time augmentation.""" + if det_bboxes.shape[0] == 0: + segm_result = [[] for _ in range(self.mask_head.num_classes)] + else: + aug_masks = [] + for x, img_meta in zip(feats, img_metas): + img_shape = img_meta[0]['img_shape'] + scale_factor = img_meta[0]['scale_factor'] + flip = img_meta[0]['flip'] + _bboxes = bbox_mapping(det_bboxes[:, :4], img_shape, + scale_factor, flip) + mask_rois = bbox2roi([_bboxes]) + mask_results = self._mask_forward(x, mask_rois) + mask_results['mask_pred'] = self._mask_point_forward_test( + x, mask_rois, det_labels, mask_results['mask_pred'], + img_meta) + # convert to numpy array to save memory + aug_masks.append( + mask_results['mask_pred'].sigmoid().cpu().numpy()) + merged_masks = merge_aug_masks(aug_masks, img_metas, self.test_cfg) + + ori_shape = img_metas[0][0]['ori_shape'] + segm_result = self.mask_head.get_seg_masks( + merged_masks, + det_bboxes, + det_labels, + self.test_cfg, + ori_shape, + scale_factor=1.0, + rescale=False) + return segm_result + + def _onnx_get_fine_grained_point_feats(self, x, rois, rel_roi_points): + """Export the process of sampling fine grained feats to onnx. + + Args: + x (tuple[Tensor]): Feature maps of all scale level. + rois (Tensor): shape (num_rois, 5). + rel_roi_points (Tensor): A tensor of shape (num_rois, num_points, + 2) that contains [0, 1] x [0, 1] normalized coordinates of the + most uncertain points from the [mask_height, mask_width] grid. + + Returns: + Tensor: The fine grained features for each points, + has shape (num_rois, feats_channels, num_points). + """ + batch_size = x[0].shape[0] + num_rois = rois.shape[0] + fine_grained_feats = [] + for idx in range(self.mask_roi_extractor.num_inputs): + feats = x[idx] + spatial_scale = 1. / float( + self.mask_roi_extractor.featmap_strides[idx]) + + rel_img_points = rel_roi_point_to_rel_img_point( + rois, rel_roi_points, feats, spatial_scale) + channels = feats.shape[1] + num_points = rel_img_points.shape[1] + rel_img_points = rel_img_points.reshape(batch_size, -1, num_points, + 2) + point_feats = point_sample(feats, rel_img_points) + point_feats = point_feats.transpose(1, 2).reshape( + num_rois, channels, num_points) + fine_grained_feats.append(point_feats) + return torch.cat(fine_grained_feats, dim=1) + + def _mask_point_onnx_export(self, x, rois, label_pred, mask_pred): + """Export mask refining process with point head to onnx. + + Args: + x (tuple[Tensor]): Feature maps of all scale level. + rois (Tensor): shape (num_rois, 5). + label_pred (Tensor): The predication class for each rois. + mask_pred (Tensor): The predication coarse masks of + shape (num_rois, num_classes, small_size, small_size). + + Returns: + Tensor: The refined masks of shape (num_rois, num_classes, + large_size, large_size). + """ + refined_mask_pred = mask_pred.clone() + for subdivision_step in range(self.test_cfg.subdivision_steps): + refined_mask_pred = F.interpolate( + refined_mask_pred, + scale_factor=self.test_cfg.scale_factor, + mode='bilinear', + align_corners=False) + # If `subdivision_num_points` is larger or equal to the + # resolution of the next step, then we can skip this step + num_rois, channels, mask_height, mask_width = \ + refined_mask_pred.shape + if (self.test_cfg.subdivision_num_points >= + self.test_cfg.scale_factor**2 * mask_height * mask_width + and + subdivision_step < self.test_cfg.subdivision_steps - 1): + continue + point_indices, rel_roi_points = \ + self.point_head.get_roi_rel_points_test( + refined_mask_pred, label_pred, cfg=self.test_cfg) + fine_grained_point_feats = self._onnx_get_fine_grained_point_feats( + x, rois, rel_roi_points) + coarse_point_feats = point_sample(mask_pred, rel_roi_points) + mask_point_pred = self.point_head(fine_grained_point_feats, + coarse_point_feats) + + point_indices = point_indices.unsqueeze(1).expand(-1, channels, -1) + refined_mask_pred = refined_mask_pred.reshape( + num_rois, channels, mask_height * mask_width) + + is_trt_backend = os.environ.get('ONNX_BACKEND') == 'MMCVTensorRT' + # avoid ScatterElements op in ONNX for TensorRT + if is_trt_backend: + mask_shape = refined_mask_pred.shape + point_shape = point_indices.shape + inds_dim0 = torch.arange(point_shape[0]).reshape( + point_shape[0], 1, 1).expand_as(point_indices) + inds_dim1 = torch.arange(point_shape[1]).reshape( + 1, point_shape[1], 1).expand_as(point_indices) + inds_1d = inds_dim0.reshape( + -1) * mask_shape[1] * mask_shape[2] + inds_dim1.reshape( + -1) * mask_shape[2] + point_indices.reshape(-1) + refined_mask_pred = refined_mask_pred.reshape(-1) + refined_mask_pred[inds_1d] = mask_point_pred.reshape(-1) + refined_mask_pred = refined_mask_pred.reshape(*mask_shape) + else: + refined_mask_pred = refined_mask_pred.scatter_( + 2, point_indices, mask_point_pred) + + refined_mask_pred = refined_mask_pred.view(num_rois, channels, + mask_height, mask_width) + + return refined_mask_pred + + def mask_onnx_export(self, x, img_metas, det_bboxes, det_labels, **kwargs): + """Export mask branch to onnx which supports batch inference. + + Args: + x (tuple[Tensor]): Feature maps of all scale level. + img_metas (list[dict]): Image meta info. + det_bboxes (Tensor): Bboxes and corresponding scores. + has shape [N, num_bboxes, 5]. + det_labels (Tensor): class labels of + shape [N, num_bboxes]. + + Returns: + Tensor: The segmentation results of shape [N, num_bboxes, + image_height, image_width]. + """ + if all(det_bbox.shape[0] == 0 for det_bbox in det_bboxes): + raise RuntimeError('[ONNX Error] Can not record MaskHead ' + 'as it has not been executed this time') + batch_size = det_bboxes.size(0) + # if det_bboxes is rescaled to the original image size, we need to + # rescale it back to the testing scale to obtain RoIs. + det_bboxes = det_bboxes[..., :4] + batch_index = torch.arange( + det_bboxes.size(0), device=det_bboxes.device).float().view( + -1, 1, 1).expand(det_bboxes.size(0), det_bboxes.size(1), 1) + mask_rois = torch.cat([batch_index, det_bboxes], dim=-1) + mask_rois = mask_rois.view(-1, 5) + mask_results = self._mask_forward(x, mask_rois) + mask_pred = mask_results['mask_pred'] + max_shape = img_metas[0]['img_shape_for_onnx'] + num_det = det_bboxes.shape[1] + det_bboxes = det_bboxes.reshape(-1, 4) + det_labels = det_labels.reshape(-1) + + mask_pred = self._mask_point_onnx_export(x, mask_rois, det_labels, + mask_pred) + + segm_results = self.mask_head.onnx_export(mask_pred, det_bboxes, + det_labels, self.test_cfg, + max_shape) + segm_results = segm_results.reshape(batch_size, num_det, max_shape[0], + max_shape[1]) + return segm_results diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/roi_extractors/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/roi_extractors/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..59e2d6d2797a94ca8888b45403636a52019070ed --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/roi_extractors/__init__.py @@ -0,0 +1,5 @@ +from .base_roi_extractor import BaseRoIExtractor +from .generic_roi_extractor import GenericRoIExtractor +from .single_level_roi_extractor import SingleRoIExtractor + +__all__ = ['BaseRoIExtractor', 'SingleRoIExtractor', 'GenericRoIExtractor'] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/roi_extractors/base_roi_extractor.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/roi_extractors/base_roi_extractor.py new file mode 100644 index 0000000000000000000000000000000000000000..704ccf20560bc7f9e97882523cf2a9817af7cd27 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/roi_extractors/base_roi_extractor.py @@ -0,0 +1,87 @@ +from abc import ABCMeta, abstractmethod + +import torch +import torch.nn as nn +from mmcv import ops +from mmcv.runner import BaseModule + + +class BaseRoIExtractor(BaseModule, metaclass=ABCMeta): + """Base class for RoI extractor. + + Args: + roi_layer (dict): Specify RoI layer type and arguments. + out_channels (int): Output channels of RoI layers. + featmap_strides (int): Strides of input feature maps. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + roi_layer, + out_channels, + featmap_strides, + init_cfg=None): + super(BaseRoIExtractor, self).__init__(init_cfg) + self.roi_layers = self.build_roi_layers(roi_layer, featmap_strides) + self.out_channels = out_channels + self.featmap_strides = featmap_strides + self.fp16_enabled = False + + @property + def num_inputs(self): + """int: Number of input feature maps.""" + return len(self.featmap_strides) + + def build_roi_layers(self, layer_cfg, featmap_strides): + """Build RoI operator to extract feature from each level feature map. + + Args: + layer_cfg (dict): Dictionary to construct and config RoI layer + operation. Options are modules under ``mmcv/ops`` such as + ``RoIAlign``. + featmap_strides (List[int]): The stride of input feature map w.r.t + to the original image size, which would be used to scale RoI + coordinate (original image coordinate system) to feature + coordinate system. + + Returns: + nn.ModuleList: The RoI extractor modules for each level feature + map. + """ + + cfg = layer_cfg.copy() + layer_type = cfg.pop('type') + assert hasattr(ops, layer_type) + layer_cls = getattr(ops, layer_type) + roi_layers = nn.ModuleList( + [layer_cls(spatial_scale=1 / s, **cfg) for s in featmap_strides]) + return roi_layers + + def roi_rescale(self, rois, scale_factor): + """Scale RoI coordinates by scale factor. + + Args: + rois (torch.Tensor): RoI (Region of Interest), shape (n, 5) + scale_factor (float): Scale factor that RoI will be multiplied by. + + Returns: + torch.Tensor: Scaled RoI. + """ + + cx = (rois[:, 1] + rois[:, 3]) * 0.5 + cy = (rois[:, 2] + rois[:, 4]) * 0.5 + w = rois[:, 3] - rois[:, 1] + h = rois[:, 4] - rois[:, 2] + new_w = w * scale_factor + new_h = h * scale_factor + x1 = cx - new_w * 0.5 + x2 = cx + new_w * 0.5 + y1 = cy - new_h * 0.5 + y2 = cy + new_h * 0.5 + new_rois = torch.stack((rois[:, 0], x1, y1, x2, y2), dim=-1) + return new_rois + + @abstractmethod + def forward(self, feats, rois, roi_scale_factor=None): + pass diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/roi_extractors/generic_roi_extractor.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/roi_extractors/generic_roi_extractor.py new file mode 100644 index 0000000000000000000000000000000000000000..80c25bb8fde7844c994bfc1f4ae1a2d960cbf3d6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/roi_extractors/generic_roi_extractor.py @@ -0,0 +1,83 @@ +from mmcv.cnn.bricks import build_plugin_layer +from mmcv.runner import force_fp32 + +from mmdet.models.builder import ROI_EXTRACTORS +from .base_roi_extractor import BaseRoIExtractor + + +@ROI_EXTRACTORS.register_module() +class GenericRoIExtractor(BaseRoIExtractor): + """Extract RoI features from all level feature maps levels. + + This is the implementation of `A novel Region of Interest Extraction Layer + for Instance Segmentation `_. + + Args: + aggregation (str): The method to aggregate multiple feature maps. + Options are 'sum', 'concat'. Default: 'sum'. + pre_cfg (dict | None): Specify pre-processing modules. Default: None. + post_cfg (dict | None): Specify post-processing modules. Default: None. + kwargs (keyword arguments): Arguments that are the same + as :class:`BaseRoIExtractor`. + """ + + def __init__(self, + aggregation='sum', + pre_cfg=None, + post_cfg=None, + **kwargs): + super(GenericRoIExtractor, self).__init__(**kwargs) + + assert aggregation in ['sum', 'concat'] + + self.aggregation = aggregation + self.with_post = post_cfg is not None + self.with_pre = pre_cfg is not None + # build pre/post processing modules + if self.with_post: + self.post_module = build_plugin_layer(post_cfg, '_post_module')[1] + if self.with_pre: + self.pre_module = build_plugin_layer(pre_cfg, '_pre_module')[1] + + @force_fp32(apply_to=('feats', ), out_fp16=True) + def forward(self, feats, rois, roi_scale_factor=None): + """Forward function.""" + if len(feats) == 1: + return self.roi_layers[0](feats[0], rois) + + out_size = self.roi_layers[0].output_size + num_levels = len(feats) + roi_feats = feats[0].new_zeros( + rois.size(0), self.out_channels, *out_size) + + # some times rois is an empty tensor + if roi_feats.shape[0] == 0: + return roi_feats + + if roi_scale_factor is not None: + rois = self.roi_rescale(rois, roi_scale_factor) + + # mark the starting channels for concat mode + start_channels = 0 + for i in range(num_levels): + roi_feats_t = self.roi_layers[i](feats[i], rois) + end_channels = start_channels + roi_feats_t.size(1) + if self.with_pre: + # apply pre-processing to a RoI extracted from each layer + roi_feats_t = self.pre_module(roi_feats_t) + if self.aggregation == 'sum': + # and sum them all + roi_feats += roi_feats_t + else: + # and concat them along channel dimension + roi_feats[:, start_channels:end_channels] = roi_feats_t + # update channels starting position + start_channels = end_channels + # check if concat channels match at the end + if self.aggregation == 'concat': + assert start_channels == self.out_channels + + if self.with_post: + # apply post-processing before return the result + roi_feats = self.post_module(roi_feats) + return roi_feats diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/roi_extractors/single_level_roi_extractor.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/roi_extractors/single_level_roi_extractor.py new file mode 100644 index 0000000000000000000000000000000000000000..6c7c1d54fb1cd028471b75480bf870ea8cb8348a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/roi_extractors/single_level_roi_extractor.py @@ -0,0 +1,114 @@ +import torch +from mmcv.runner import force_fp32 + +from mmdet.models.builder import ROI_EXTRACTORS +from .base_roi_extractor import BaseRoIExtractor + + +@ROI_EXTRACTORS.register_module() +class SingleRoIExtractor(BaseRoIExtractor): + """Extract RoI features from a single level feature map. + + If there are multiple input feature levels, each RoI is mapped to a level + according to its scale. The mapping rule is proposed in + `FPN `_. + + Args: + roi_layer (dict): Specify RoI layer type and arguments. + out_channels (int): Output channels of RoI layers. + featmap_strides (List[int]): Strides of input feature maps. + finest_scale (int): Scale threshold of mapping to level 0. Default: 56. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + roi_layer, + out_channels, + featmap_strides, + finest_scale=56, + init_cfg=None): + super(SingleRoIExtractor, self).__init__(roi_layer, out_channels, + featmap_strides, init_cfg) + self.finest_scale = finest_scale + + def map_roi_levels(self, rois, num_levels): + """Map rois to corresponding feature levels by scales. + + - scale < finest_scale * 2: level 0 + - finest_scale * 2 <= scale < finest_scale * 4: level 1 + - finest_scale * 4 <= scale < finest_scale * 8: level 2 + - scale >= finest_scale * 8: level 3 + + Args: + rois (Tensor): Input RoIs, shape (k, 5). + num_levels (int): Total level number. + + Returns: + Tensor: Level index (0-based) of each RoI, shape (k, ) + """ + scale = torch.sqrt( + (rois[:, 3] - rois[:, 1]) * (rois[:, 4] - rois[:, 2])) + target_lvls = torch.floor(torch.log2(scale / self.finest_scale + 1e-6)) + target_lvls = target_lvls.clamp(min=0, max=num_levels - 1).long() + return target_lvls + + @force_fp32(apply_to=('feats', ), out_fp16=True) + def forward(self, feats, rois, roi_scale_factor=None): + """Forward function.""" + out_size = self.roi_layers[0].output_size + num_levels = len(feats) + expand_dims = (-1, self.out_channels * out_size[0] * out_size[1]) + if torch.onnx.is_in_onnx_export(): + # Work around to export mask-rcnn to onnx + roi_feats = rois[:, :1].clone().detach() + roi_feats = roi_feats.expand(*expand_dims) + roi_feats = roi_feats.reshape(-1, self.out_channels, *out_size) + roi_feats = roi_feats * 0 + else: + roi_feats = feats[0].new_zeros( + rois.size(0), self.out_channels, *out_size) + # TODO: remove this when parrots supports + if torch.__version__ == 'parrots': + roi_feats.requires_grad = True + + if num_levels == 1: + if len(rois) == 0: + return roi_feats + return self.roi_layers[0](feats[0], rois) + + target_lvls = self.map_roi_levels(rois, num_levels) + + if roi_scale_factor is not None: + rois = self.roi_rescale(rois, roi_scale_factor) + + for i in range(num_levels): + mask = target_lvls == i + if torch.onnx.is_in_onnx_export(): + # To keep all roi_align nodes exported to onnx + # and skip nonzero op + mask = mask.float().unsqueeze(-1) + # select target level rois and reset the rest rois to zero. + rois_i = rois.clone().detach() + rois_i *= mask + mask_exp = mask.expand(*expand_dims).reshape(roi_feats.shape) + roi_feats_t = self.roi_layers[i](feats[i], rois_i) + roi_feats_t *= mask_exp + roi_feats += roi_feats_t + continue + inds = mask.nonzero(as_tuple=False).squeeze(1) + if inds.numel() > 0: + rois_ = rois[inds] + roi_feats_t = self.roi_layers[i](feats[i], rois_) + roi_feats[inds] = roi_feats_t + else: + # Sometimes some pyramid levels will not be used for RoI + # feature extraction and this will cause an incomplete + # computation graph in one GPU, which is different from those + # in other GPUs and will cause a hanging error. + # Therefore, we add it to ensure each feature pyramid is + # included in the computation graph to avoid runtime bugs. + roi_feats += sum( + x.view(-1)[0] + for x in self.parameters()) * 0. + feats[i].sum() * 0. + return roi_feats diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/scnet_roi_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/scnet_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..84789fb2c496090fc904f7d30e70110c60a54b68 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/scnet_roi_head.py @@ -0,0 +1,561 @@ +import torch +import torch.nn.functional as F + +from mmdet.core import (bbox2result, bbox2roi, bbox_mapping, merge_aug_bboxes, + merge_aug_masks, multiclass_nms) +from ..builder import HEADS, build_head, build_roi_extractor +from .cascade_roi_head import CascadeRoIHead + + +@HEADS.register_module() +class SCNetRoIHead(CascadeRoIHead): + """RoIHead for `SCNet `_. + + Args: + num_stages (int): number of cascade stages. + stage_loss_weights (list): loss weight of cascade stages. + semantic_roi_extractor (dict): config to init semantic roi extractor. + semantic_head (dict): config to init semantic head. + feat_relay_head (dict): config to init feature_relay_head. + glbctx_head (dict): config to init global context head. + """ + + def __init__(self, + num_stages, + stage_loss_weights, + semantic_roi_extractor=None, + semantic_head=None, + feat_relay_head=None, + glbctx_head=None, + **kwargs): + super(SCNetRoIHead, self).__init__(num_stages, stage_loss_weights, + **kwargs) + assert self.with_bbox and self.with_mask + assert not self.with_shared_head # shared head is not supported + + if semantic_head is not None: + self.semantic_roi_extractor = build_roi_extractor( + semantic_roi_extractor) + self.semantic_head = build_head(semantic_head) + + if feat_relay_head is not None: + self.feat_relay_head = build_head(feat_relay_head) + + if glbctx_head is not None: + self.glbctx_head = build_head(glbctx_head) + + def init_mask_head(self, mask_roi_extractor, mask_head): + """Initialize ``mask_head``""" + if mask_roi_extractor is not None: + self.mask_roi_extractor = build_roi_extractor(mask_roi_extractor) + self.mask_head = build_head(mask_head) + + @property + def with_semantic(self): + """bool: whether the head has semantic head""" + return hasattr(self, + 'semantic_head') and self.semantic_head is not None + + @property + def with_feat_relay(self): + """bool: whether the head has feature relay head""" + return (hasattr(self, 'feat_relay_head') + and self.feat_relay_head is not None) + + @property + def with_glbctx(self): + """bool: whether the head has global context head""" + return hasattr(self, 'glbctx_head') and self.glbctx_head is not None + + def _fuse_glbctx(self, roi_feats, glbctx_feat, rois): + """Fuse global context feats with roi feats.""" + assert roi_feats.size(0) == rois.size(0) + img_inds = torch.unique(rois[:, 0].cpu(), sorted=True).long() + fused_feats = torch.zeros_like(roi_feats) + for img_id in img_inds: + inds = (rois[:, 0] == img_id.item()) + fused_feats[inds] = roi_feats[inds] + glbctx_feat[img_id] + return fused_feats + + def _slice_pos_feats(self, feats, sampling_results): + """Get features from pos rois.""" + num_rois = [res.bboxes.size(0) for res in sampling_results] + num_pos_rois = [res.pos_bboxes.size(0) for res in sampling_results] + inds = torch.zeros(sum(num_rois), dtype=torch.bool) + start = 0 + for i in range(len(num_rois)): + start = 0 if i == 0 else start + num_rois[i - 1] + stop = start + num_pos_rois[i] + inds[start:stop] = 1 + sliced_feats = feats[inds] + return sliced_feats + + def _bbox_forward(self, + stage, + x, + rois, + semantic_feat=None, + glbctx_feat=None): + """Box head forward function used in both training and testing.""" + bbox_roi_extractor = self.bbox_roi_extractor[stage] + bbox_head = self.bbox_head[stage] + bbox_feats = bbox_roi_extractor( + x[:len(bbox_roi_extractor.featmap_strides)], rois) + if self.with_semantic and semantic_feat is not None: + bbox_semantic_feat = self.semantic_roi_extractor([semantic_feat], + rois) + if bbox_semantic_feat.shape[-2:] != bbox_feats.shape[-2:]: + bbox_semantic_feat = F.adaptive_avg_pool2d( + bbox_semantic_feat, bbox_feats.shape[-2:]) + bbox_feats += bbox_semantic_feat + if self.with_glbctx and glbctx_feat is not None: + bbox_feats = self._fuse_glbctx(bbox_feats, glbctx_feat, rois) + cls_score, bbox_pred, relayed_feat = bbox_head( + bbox_feats, return_shared_feat=True) + + bbox_results = dict( + cls_score=cls_score, + bbox_pred=bbox_pred, + relayed_feat=relayed_feat) + return bbox_results + + def _mask_forward(self, + x, + rois, + semantic_feat=None, + glbctx_feat=None, + relayed_feat=None): + """Mask head forward function used in both training and testing.""" + mask_feats = self.mask_roi_extractor( + x[:self.mask_roi_extractor.num_inputs], rois) + if self.with_semantic and semantic_feat is not None: + mask_semantic_feat = self.semantic_roi_extractor([semantic_feat], + rois) + if mask_semantic_feat.shape[-2:] != mask_feats.shape[-2:]: + mask_semantic_feat = F.adaptive_avg_pool2d( + mask_semantic_feat, mask_feats.shape[-2:]) + mask_feats += mask_semantic_feat + if self.with_glbctx and glbctx_feat is not None: + mask_feats = self._fuse_glbctx(mask_feats, glbctx_feat, rois) + if self.with_feat_relay and relayed_feat is not None: + mask_feats = mask_feats + relayed_feat + mask_pred = self.mask_head(mask_feats) + mask_results = dict(mask_pred=mask_pred) + + return mask_results + + def _bbox_forward_train(self, + stage, + x, + sampling_results, + gt_bboxes, + gt_labels, + rcnn_train_cfg, + semantic_feat=None, + glbctx_feat=None): + """Run forward function and calculate loss for box head in training.""" + bbox_head = self.bbox_head[stage] + rois = bbox2roi([res.bboxes for res in sampling_results]) + bbox_results = self._bbox_forward( + stage, + x, + rois, + semantic_feat=semantic_feat, + glbctx_feat=glbctx_feat) + + bbox_targets = bbox_head.get_targets(sampling_results, gt_bboxes, + gt_labels, rcnn_train_cfg) + loss_bbox = bbox_head.loss(bbox_results['cls_score'], + bbox_results['bbox_pred'], rois, + *bbox_targets) + + bbox_results.update( + loss_bbox=loss_bbox, rois=rois, bbox_targets=bbox_targets) + return bbox_results + + def _mask_forward_train(self, + x, + sampling_results, + gt_masks, + rcnn_train_cfg, + semantic_feat=None, + glbctx_feat=None, + relayed_feat=None): + """Run forward function and calculate loss for mask head in + training.""" + pos_rois = bbox2roi([res.pos_bboxes for res in sampling_results]) + mask_results = self._mask_forward( + x, + pos_rois, + semantic_feat=semantic_feat, + glbctx_feat=glbctx_feat, + relayed_feat=relayed_feat) + + mask_targets = self.mask_head.get_targets(sampling_results, gt_masks, + rcnn_train_cfg) + pos_labels = torch.cat([res.pos_gt_labels for res in sampling_results]) + loss_mask = self.mask_head.loss(mask_results['mask_pred'], + mask_targets, pos_labels) + + mask_results = loss_mask + return mask_results + + def forward_train(self, + x, + img_metas, + proposal_list, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None, + gt_masks=None, + gt_semantic_seg=None): + """ + Args: + x (list[Tensor]): list of multi-level img features. + + img_metas (list[dict]): list of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `mmdet/datasets/pipelines/formatting.py:Collect`. + + proposal_list (list[Tensors]): list of region proposals. + + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + + gt_labels (list[Tensor]): class indices corresponding to each box + + gt_bboxes_ignore (None, list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + + gt_masks (None, Tensor) : true segmentation masks for each box + used if the architecture supports a segmentation task. + + gt_semantic_seg (None, list[Tensor]): semantic segmentation masks + used if the architecture supports semantic segmentation task. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + losses = dict() + + # semantic segmentation branch + if self.with_semantic: + semantic_pred, semantic_feat = self.semantic_head(x) + loss_seg = self.semantic_head.loss(semantic_pred, gt_semantic_seg) + losses['loss_semantic_seg'] = loss_seg + else: + semantic_feat = None + + # global context branch + if self.with_glbctx: + mc_pred, glbctx_feat = self.glbctx_head(x) + loss_glbctx = self.glbctx_head.loss(mc_pred, gt_labels) + losses['loss_glbctx'] = loss_glbctx + else: + glbctx_feat = None + + for i in range(self.num_stages): + self.current_stage = i + rcnn_train_cfg = self.train_cfg[i] + lw = self.stage_loss_weights[i] + + # assign gts and sample proposals + sampling_results = [] + bbox_assigner = self.bbox_assigner[i] + bbox_sampler = self.bbox_sampler[i] + num_imgs = len(img_metas) + if gt_bboxes_ignore is None: + gt_bboxes_ignore = [None for _ in range(num_imgs)] + + for j in range(num_imgs): + assign_result = bbox_assigner.assign(proposal_list[j], + gt_bboxes[j], + gt_bboxes_ignore[j], + gt_labels[j]) + sampling_result = bbox_sampler.sample( + assign_result, + proposal_list[j], + gt_bboxes[j], + gt_labels[j], + feats=[lvl_feat[j][None] for lvl_feat in x]) + sampling_results.append(sampling_result) + + bbox_results = \ + self._bbox_forward_train( + i, x, sampling_results, gt_bboxes, gt_labels, + rcnn_train_cfg, semantic_feat, glbctx_feat) + roi_labels = bbox_results['bbox_targets'][0] + + for name, value in bbox_results['loss_bbox'].items(): + losses[f's{i}.{name}'] = ( + value * lw if 'loss' in name else value) + + # refine boxes + if i < self.num_stages - 1: + pos_is_gts = [res.pos_is_gt for res in sampling_results] + with torch.no_grad(): + proposal_list = self.bbox_head[i].refine_bboxes( + bbox_results['rois'], roi_labels, + bbox_results['bbox_pred'], pos_is_gts, img_metas) + + if self.with_feat_relay: + relayed_feat = self._slice_pos_feats(bbox_results['relayed_feat'], + sampling_results) + relayed_feat = self.feat_relay_head(relayed_feat) + else: + relayed_feat = None + + mask_results = self._mask_forward_train(x, sampling_results, gt_masks, + rcnn_train_cfg, semantic_feat, + glbctx_feat, relayed_feat) + mask_lw = sum(self.stage_loss_weights) + losses['loss_mask'] = mask_lw * mask_results['loss_mask'] + + return losses + + def simple_test(self, x, proposal_list, img_metas, rescale=False): + """Test without augmentation.""" + if self.with_semantic: + _, semantic_feat = self.semantic_head(x) + else: + semantic_feat = None + + if self.with_glbctx: + mc_pred, glbctx_feat = self.glbctx_head(x) + else: + glbctx_feat = None + + num_imgs = len(proposal_list) + img_shapes = tuple(meta['img_shape'] for meta in img_metas) + ori_shapes = tuple(meta['ori_shape'] for meta in img_metas) + scale_factors = tuple(meta['scale_factor'] for meta in img_metas) + + # "ms" in variable names means multi-stage + ms_scores = [] + rcnn_test_cfg = self.test_cfg + + rois = bbox2roi(proposal_list) + for i in range(self.num_stages): + bbox_head = self.bbox_head[i] + bbox_results = self._bbox_forward( + i, + x, + rois, + semantic_feat=semantic_feat, + glbctx_feat=glbctx_feat) + # split batch bbox prediction back to each image + cls_score = bbox_results['cls_score'] + bbox_pred = bbox_results['bbox_pred'] + num_proposals_per_img = tuple(len(p) for p in proposal_list) + rois = rois.split(num_proposals_per_img, 0) + cls_score = cls_score.split(num_proposals_per_img, 0) + bbox_pred = bbox_pred.split(num_proposals_per_img, 0) + ms_scores.append(cls_score) + + if i < self.num_stages - 1: + bbox_label = [s[:, :-1].argmax(dim=1) for s in cls_score] + rois = torch.cat([ + bbox_head.regress_by_class(rois[i], bbox_label[i], + bbox_pred[i], img_metas[i]) + for i in range(num_imgs) + ]) + + # average scores of each image by stages + cls_score = [ + sum([score[i] for score in ms_scores]) / float(len(ms_scores)) + for i in range(num_imgs) + ] + + # apply bbox post-processing to each image individually + det_bboxes = [] + det_labels = [] + for i in range(num_imgs): + det_bbox, det_label = self.bbox_head[-1].get_bboxes( + rois[i], + cls_score[i], + bbox_pred[i], + img_shapes[i], + scale_factors[i], + rescale=rescale, + cfg=rcnn_test_cfg) + det_bboxes.append(det_bbox) + det_labels.append(det_label) + det_bbox_results = [ + bbox2result(det_bboxes[i], det_labels[i], + self.bbox_head[-1].num_classes) + for i in range(num_imgs) + ] + + if self.with_mask: + if all(det_bbox.shape[0] == 0 for det_bbox in det_bboxes): + mask_classes = self.mask_head.num_classes + det_segm_results = [[[] for _ in range(mask_classes)] + for _ in range(num_imgs)] + else: + if rescale and not isinstance(scale_factors[0], float): + scale_factors = [ + torch.from_numpy(scale_factor).to(det_bboxes[0].device) + for scale_factor in scale_factors + ] + _bboxes = [ + det_bboxes[i][:, :4] * + scale_factors[i] if rescale else det_bboxes[i] + for i in range(num_imgs) + ] + mask_rois = bbox2roi(_bboxes) + + # get relay feature on mask_rois + bbox_results = self._bbox_forward( + -1, + x, + mask_rois, + semantic_feat=semantic_feat, + glbctx_feat=glbctx_feat) + relayed_feat = bbox_results['relayed_feat'] + relayed_feat = self.feat_relay_head(relayed_feat) + + mask_results = self._mask_forward( + x, + mask_rois, + semantic_feat=semantic_feat, + glbctx_feat=glbctx_feat, + relayed_feat=relayed_feat) + mask_pred = mask_results['mask_pred'] + + # split batch mask prediction back to each image + num_bbox_per_img = tuple(len(_bbox) for _bbox in _bboxes) + mask_preds = mask_pred.split(num_bbox_per_img, 0) + + # apply mask post-processing to each image individually + det_segm_results = [] + for i in range(num_imgs): + if det_bboxes[i].shape[0] == 0: + det_segm_results.append( + [[] for _ in range(self.mask_head.num_classes)]) + else: + segm_result = self.mask_head.get_seg_masks( + mask_preds[i], _bboxes[i], det_labels[i], + self.test_cfg, ori_shapes[i], scale_factors[i], + rescale) + det_segm_results.append(segm_result) + + # return results + if self.with_mask: + return list(zip(det_bbox_results, det_segm_results)) + else: + return det_bbox_results + + def aug_test(self, img_feats, proposal_list, img_metas, rescale=False): + if self.with_semantic: + semantic_feats = [ + self.semantic_head(feat)[1] for feat in img_feats + ] + else: + semantic_feats = [None] * len(img_metas) + + if self.with_glbctx: + glbctx_feats = [self.glbctx_head(feat)[1] for feat in img_feats] + else: + glbctx_feats = [None] * len(img_metas) + + rcnn_test_cfg = self.test_cfg + aug_bboxes = [] + aug_scores = [] + for x, img_meta, semantic_feat, glbctx_feat in zip( + img_feats, img_metas, semantic_feats, glbctx_feats): + # only one image in the batch + img_shape = img_meta[0]['img_shape'] + scale_factor = img_meta[0]['scale_factor'] + flip = img_meta[0]['flip'] + + proposals = bbox_mapping(proposal_list[0][:, :4], img_shape, + scale_factor, flip) + # "ms" in variable names means multi-stage + ms_scores = [] + + rois = bbox2roi([proposals]) + for i in range(self.num_stages): + bbox_head = self.bbox_head[i] + bbox_results = self._bbox_forward( + i, + x, + rois, + semantic_feat=semantic_feat, + glbctx_feat=glbctx_feat) + ms_scores.append(bbox_results['cls_score']) + if i < self.num_stages - 1: + bbox_label = bbox_results['cls_score'].argmax(dim=1) + rois = bbox_head.regress_by_class( + rois, bbox_label, bbox_results['bbox_pred'], + img_meta[0]) + + cls_score = sum(ms_scores) / float(len(ms_scores)) + bboxes, scores = self.bbox_head[-1].get_bboxes( + rois, + cls_score, + bbox_results['bbox_pred'], + img_shape, + scale_factor, + rescale=False, + cfg=None) + aug_bboxes.append(bboxes) + aug_scores.append(scores) + + # after merging, bboxes will be rescaled to the original image size + merged_bboxes, merged_scores = merge_aug_bboxes( + aug_bboxes, aug_scores, img_metas, rcnn_test_cfg) + det_bboxes, det_labels = multiclass_nms(merged_bboxes, merged_scores, + rcnn_test_cfg.score_thr, + rcnn_test_cfg.nms, + rcnn_test_cfg.max_per_img) + + det_bbox_results = bbox2result(det_bboxes, det_labels, + self.bbox_head[-1].num_classes) + + if self.with_mask: + if det_bboxes.shape[0] == 0: + det_segm_results = [[] + for _ in range(self.mask_head.num_classes)] + else: + aug_masks = [] + for x, img_meta, semantic_feat, glbctx_feat in zip( + img_feats, img_metas, semantic_feats, glbctx_feats): + img_shape = img_meta[0]['img_shape'] + scale_factor = img_meta[0]['scale_factor'] + flip = img_meta[0]['flip'] + _bboxes = bbox_mapping(det_bboxes[:, :4], img_shape, + scale_factor, flip) + mask_rois = bbox2roi([_bboxes]) + # get relay feature on mask_rois + bbox_results = self._bbox_forward( + -1, + x, + mask_rois, + semantic_feat=semantic_feat, + glbctx_feat=glbctx_feat) + relayed_feat = bbox_results['relayed_feat'] + relayed_feat = self.feat_relay_head(relayed_feat) + mask_results = self._mask_forward( + x, + mask_rois, + semantic_feat=semantic_feat, + glbctx_feat=glbctx_feat, + relayed_feat=relayed_feat) + mask_pred = mask_results['mask_pred'] + aug_masks.append(mask_pred.sigmoid().cpu().numpy()) + merged_masks = merge_aug_masks(aug_masks, img_metas, + self.test_cfg) + ori_shape = img_metas[0][0]['ori_shape'] + det_segm_results = self.mask_head.get_seg_masks( + merged_masks, + det_bboxes, + det_labels, + rcnn_test_cfg, + ori_shape, + scale_factor=1.0, + rescale=False) + return [(det_bbox_results, det_segm_results)] + else: + return [det_bbox_results] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/shared_heads/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/shared_heads/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bbe70145b8bf7c304370f725f5afa8db98666679 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/shared_heads/__init__.py @@ -0,0 +1,3 @@ +from .res_layer import ResLayer + +__all__ = ['ResLayer'] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/shared_heads/res_layer.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/shared_heads/res_layer.py new file mode 100644 index 0000000000000000000000000000000000000000..01d6cb7f536e31af6c3d7f11b9dfad013b3fe2aa --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/shared_heads/res_layer.py @@ -0,0 +1,79 @@ +import warnings + +import torch.nn as nn +from mmcv.runner import BaseModule, auto_fp16 + +from mmdet.models.backbones import ResNet +from mmdet.models.builder import SHARED_HEADS +from mmdet.models.utils import ResLayer as _ResLayer + + +@SHARED_HEADS.register_module() +class ResLayer(BaseModule): + + def __init__(self, + depth, + stage=3, + stride=2, + dilation=1, + style='pytorch', + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + with_cp=False, + dcn=None, + pretrained=None, + init_cfg=None): + super(ResLayer, self).__init__(init_cfg) + + self.norm_eval = norm_eval + self.norm_cfg = norm_cfg + self.stage = stage + self.fp16_enabled = False + block, stage_blocks = ResNet.arch_settings[depth] + stage_block = stage_blocks[stage] + planes = 64 * 2**stage + inplanes = 64 * 2**(stage - 1) * block.expansion + + res_layer = _ResLayer( + block, + inplanes, + planes, + stage_block, + stride=stride, + dilation=dilation, + style=style, + with_cp=with_cp, + norm_cfg=self.norm_cfg, + dcn=dcn) + self.add_module(f'layer{stage + 1}', res_layer) + + assert not (init_cfg and pretrained), \ + 'init_cfg and pretrained cannot be setting at the same time' + if isinstance(pretrained, str): + warnings.warn('DeprecationWarning: pretrained is a deprecated, ' + 'please use "init_cfg" instead') + self.init_cfg = dict(type='Pretrained', checkpoint=pretrained) + elif pretrained is None: + if init_cfg is None: + self.init_cfg = [ + dict(type='Kaiming', layer='Conv2d'), + dict( + type='Constant', + val=1, + layer=['_BatchNorm', 'GroupNorm']) + ] + else: + raise TypeError('pretrained must be a str or None') + + @auto_fp16() + def forward(self, x): + res_layer = getattr(self, f'layer{self.stage + 1}') + out = res_layer(x) + return out + + def train(self, mode=True): + super(ResLayer, self).train(mode) + if self.norm_eval: + for m in self.modules(): + if isinstance(m, nn.BatchNorm2d): + m.eval() diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/sparse_roi_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/sparse_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..c249613ef62f50134a2479896bf643a27da27289 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/sparse_roi_head.py @@ -0,0 +1,318 @@ +import torch + +from mmdet.core import bbox2result, bbox2roi, bbox_xyxy_to_cxcywh +from mmdet.core.bbox.samplers import PseudoSampler +from ..builder import HEADS +from .cascade_roi_head import CascadeRoIHead + + +@HEADS.register_module() +class SparseRoIHead(CascadeRoIHead): + r"""The RoIHead for `Sparse R-CNN: End-to-End Object Detection with + Learnable Proposals `_ + + Args: + num_stages (int): Number of stage whole iterative process. + Defaults to 6. + stage_loss_weights (Tuple[float]): The loss + weight of each stage. By default all stages have + the same weight 1. + bbox_roi_extractor (dict): Config of box roi extractor. + bbox_head (dict): Config of box head. + train_cfg (dict, optional): Configuration information in train stage. + Defaults to None. + test_cfg (dict, optional): Configuration information in test stage. + Defaults to None. + pretrained (str, optional): model pretrained path. Default: None + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + + """ + + def __init__(self, + num_stages=6, + stage_loss_weights=(1, 1, 1, 1, 1, 1), + proposal_feature_channel=256, + bbox_roi_extractor=dict( + type='SingleRoIExtractor', + roi_layer=dict( + type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32]), + bbox_head=dict( + type='DIIHead', + num_classes=80, + num_fcs=2, + num_heads=8, + num_cls_fcs=1, + num_reg_fcs=3, + feedforward_channels=2048, + hidden_channels=256, + dropout=0.0, + roi_feat_size=7, + ffn_act_cfg=dict(type='ReLU', inplace=True)), + train_cfg=None, + test_cfg=None, + pretrained=None, + init_cfg=None): + assert bbox_roi_extractor is not None + assert bbox_head is not None + assert len(stage_loss_weights) == num_stages + self.num_stages = num_stages + self.stage_loss_weights = stage_loss_weights + self.proposal_feature_channel = proposal_feature_channel + super(SparseRoIHead, self).__init__( + num_stages, + stage_loss_weights, + bbox_roi_extractor=bbox_roi_extractor, + bbox_head=bbox_head, + train_cfg=train_cfg, + test_cfg=test_cfg, + pretrained=pretrained, + init_cfg=init_cfg) + # train_cfg would be None when run the test.py + if train_cfg is not None: + for stage in range(num_stages): + assert isinstance(self.bbox_sampler[stage], PseudoSampler), \ + 'Sparse R-CNN only support `PseudoSampler`' + + def _bbox_forward(self, stage, x, rois, object_feats, img_metas): + """Box head forward function used in both training and testing. Returns + all regression, classification results and a intermediate feature. + + Args: + stage (int): The index of current stage in + iterative process. + x (List[Tensor]): List of FPN features + rois (Tensor): Rois in total batch. With shape (num_proposal, 5). + the last dimension 5 represents (img_index, x1, y1, x2, y2). + object_feats (Tensor): The object feature extracted from + the previous stage. + img_metas (dict): meta information of images. + + Returns: + dict[str, Tensor]: a dictionary of bbox head outputs, + Containing the following results: + + - cls_score (Tensor): The score of each class, has + shape (batch_size, num_proposals, num_classes) + when use focal loss or + (batch_size, num_proposals, num_classes+1) + otherwise. + - decode_bbox_pred (Tensor): The regression results + with shape (batch_size, num_proposal, 4). + The last dimension 4 represents + [tl_x, tl_y, br_x, br_y]. + - object_feats (Tensor): The object feature extracted + from current stage + - detach_cls_score_list (list[Tensor]): The detached + classification results, length is batch_size, and + each tensor has shape (num_proposal, num_classes). + - detach_proposal_list (list[tensor]): The detached + regression results, length is batch_size, and each + tensor has shape (num_proposal, 4). The last + dimension 4 represents [tl_x, tl_y, br_x, br_y]. + """ + num_imgs = len(img_metas) + bbox_roi_extractor = self.bbox_roi_extractor[stage] + bbox_head = self.bbox_head[stage] + bbox_feats = bbox_roi_extractor(x[:bbox_roi_extractor.num_inputs], + rois) + cls_score, bbox_pred, object_feats = bbox_head(bbox_feats, + object_feats) + proposal_list = self.bbox_head[stage].refine_bboxes( + rois, + rois.new_zeros(len(rois)), # dummy arg + bbox_pred.view(-1, bbox_pred.size(-1)), + [rois.new_zeros(object_feats.size(1)) for _ in range(num_imgs)], + img_metas) + bbox_results = dict( + cls_score=cls_score, + decode_bbox_pred=torch.cat(proposal_list), + object_feats=object_feats, + # detach then use it in label assign + detach_cls_score_list=[ + cls_score[i].detach() for i in range(num_imgs) + ], + detach_proposal_list=[item.detach() for item in proposal_list]) + + return bbox_results + + def forward_train(self, + x, + proposal_boxes, + proposal_features, + img_metas, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None, + imgs_whwh=None, + gt_masks=None): + """Forward function in training stage. + + Args: + x (list[Tensor]): list of multi-level img features. + proposals (Tensor): Decoded proposal bboxes, has shape + (batch_size, num_proposals, 4) + proposal_features (Tensor): Expanded proposal + features, has shape + (batch_size, num_proposals, proposal_feature_channel) + img_metas (list[dict]): list of image info dict where + each dict has: 'img_shape', 'scale_factor', 'flip', + and may also contain 'filename', 'ori_shape', + 'pad_shape', and 'img_norm_cfg'. For details on the + values of these keys see + `mmdet/datasets/pipelines/formatting.py:Collect`. + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + imgs_whwh (Tensor): Tensor with shape (batch_size, 4), + the dimension means + [img_width,img_height, img_width, img_height]. + gt_masks (None | Tensor) : true segmentation masks for each box + used if the architecture supports a segmentation task. + + Returns: + dict[str, Tensor]: a dictionary of loss components of all stage. + """ + + num_imgs = len(img_metas) + num_proposals = proposal_boxes.size(1) + imgs_whwh = imgs_whwh.repeat(1, num_proposals, 1) + all_stage_bbox_results = [] + proposal_list = [proposal_boxes[i] for i in range(len(proposal_boxes))] + object_feats = proposal_features + all_stage_loss = {} + for stage in range(self.num_stages): + rois = bbox2roi(proposal_list) + bbox_results = self._bbox_forward(stage, x, rois, object_feats, + img_metas) + all_stage_bbox_results.append(bbox_results) + if gt_bboxes_ignore is None: + # TODO support ignore + gt_bboxes_ignore = [None for _ in range(num_imgs)] + sampling_results = [] + cls_pred_list = bbox_results['detach_cls_score_list'] + proposal_list = bbox_results['detach_proposal_list'] + for i in range(num_imgs): + normalize_bbox_ccwh = bbox_xyxy_to_cxcywh(proposal_list[i] / + imgs_whwh[i]) + assign_result = self.bbox_assigner[stage].assign( + normalize_bbox_ccwh, cls_pred_list[i], gt_bboxes[i], + gt_labels[i], img_metas[i]) + sampling_result = self.bbox_sampler[stage].sample( + assign_result, proposal_list[i], gt_bboxes[i]) + sampling_results.append(sampling_result) + bbox_targets = self.bbox_head[stage].get_targets( + sampling_results, gt_bboxes, gt_labels, self.train_cfg[stage], + True) + cls_score = bbox_results['cls_score'] + decode_bbox_pred = bbox_results['decode_bbox_pred'] + + single_stage_loss = self.bbox_head[stage].loss( + cls_score.view(-1, cls_score.size(-1)), + decode_bbox_pred.view(-1, 4), + *bbox_targets, + imgs_whwh=imgs_whwh) + for key, value in single_stage_loss.items(): + all_stage_loss[f'stage{stage}_{key}'] = value * \ + self.stage_loss_weights[stage] + object_feats = bbox_results['object_feats'] + + return all_stage_loss + + def simple_test(self, + x, + proposal_boxes, + proposal_features, + img_metas, + imgs_whwh, + rescale=False): + """Test without augmentation. + + Args: + x (list[Tensor]): list of multi-level img features. + proposal_boxes (Tensor): Decoded proposal bboxes, has shape + (batch_size, num_proposals, 4) + proposal_features (Tensor): Expanded proposal + features, has shape + (batch_size, num_proposals, proposal_feature_channel) + img_metas (dict): meta information of images. + imgs_whwh (Tensor): Tensor with shape (batch_size, 4), + the dimension means + [img_width,img_height, img_width, img_height]. + rescale (bool): If True, return boxes in original image + space. Defaults to False. + + Returns: + bbox_results (list[tuple[np.ndarray]]): \ + [[cls1_det, cls2_det, ...], ...]. \ + The outer list indicates images, and the inner \ + list indicates per-class detected bboxes. The \ + np.ndarray has shape (num_det, 5) and the last \ + dimension 5 represents (x1, y1, x2, y2, score). + """ + assert self.with_bbox, 'Bbox head must be implemented.' + # Decode initial proposals + num_imgs = len(img_metas) + proposal_list = [proposal_boxes[i] for i in range(num_imgs)] + object_feats = proposal_features + for stage in range(self.num_stages): + rois = bbox2roi(proposal_list) + bbox_results = self._bbox_forward(stage, x, rois, object_feats, + img_metas) + object_feats = bbox_results['object_feats'] + cls_score = bbox_results['cls_score'] + proposal_list = bbox_results['detach_proposal_list'] + + num_classes = self.bbox_head[-1].num_classes + det_bboxes = [] + det_labels = [] + + if self.bbox_head[-1].loss_cls.use_sigmoid: + cls_score = cls_score.sigmoid() + else: + cls_score = cls_score.softmax(-1)[..., :-1] + + for img_id in range(num_imgs): + cls_score_per_img = cls_score[img_id] + scores_per_img, topk_indices = cls_score_per_img.flatten( + 0, 1).topk( + self.test_cfg.max_per_img, sorted=False) + labels_per_img = topk_indices % num_classes + bbox_pred_per_img = proposal_list[img_id][topk_indices // + num_classes] + if rescale: + scale_factor = img_metas[img_id]['scale_factor'] + bbox_pred_per_img /= bbox_pred_per_img.new_tensor(scale_factor) + det_bboxes.append( + torch.cat([bbox_pred_per_img, scores_per_img[:, None]], dim=1)) + det_labels.append(labels_per_img) + + bbox_results = [ + bbox2result(det_bboxes[i], det_labels[i], num_classes) + for i in range(num_imgs) + ] + + return bbox_results + + def aug_test(self, features, proposal_list, img_metas, rescale=False): + raise NotImplementedError('Sparse R-CNN does not support `aug_test`') + + def forward_dummy(self, x, proposal_boxes, proposal_features, img_metas): + """Dummy forward function when do the flops computing.""" + all_stage_bbox_results = [] + proposal_list = [proposal_boxes[i] for i in range(len(proposal_boxes))] + object_feats = proposal_features + if self.with_bbox: + for stage in range(self.num_stages): + rois = bbox2roi(proposal_list) + bbox_results = self._bbox_forward(stage, x, rois, object_feats, + img_metas) + + all_stage_bbox_results.append(bbox_results) + proposal_list = bbox_results['detach_proposal_list'] + object_feats = bbox_results['object_feats'] + return all_stage_bbox_results diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/standard_roi_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/standard_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..64e941f6026c67810c2dd6bc04dc68d5290b25fa --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/standard_roi_head.py @@ -0,0 +1,372 @@ +import torch + +from mmdet.core import bbox2result, bbox2roi, build_assigner, build_sampler +from ..builder import HEADS, build_head, build_roi_extractor +from .base_roi_head import BaseRoIHead +from .test_mixins import BBoxTestMixin, MaskTestMixin + + +@HEADS.register_module() +class StandardRoIHead(BaseRoIHead, BBoxTestMixin, MaskTestMixin): + """Simplest base roi head including one bbox head and one mask head.""" + + def init_assigner_sampler(self): + """Initialize assigner and sampler.""" + self.bbox_assigner = None + self.bbox_sampler = None + if self.train_cfg: + self.bbox_assigner = build_assigner(self.train_cfg.assigner) + self.bbox_sampler = build_sampler( + self.train_cfg.sampler, context=self) + + def init_bbox_head(self, bbox_roi_extractor, bbox_head): + """Initialize ``bbox_head``""" + self.bbox_roi_extractor = build_roi_extractor(bbox_roi_extractor) + self.bbox_head = build_head(bbox_head) + + def init_mask_head(self, mask_roi_extractor, mask_head): + """Initialize ``mask_head``""" + if mask_roi_extractor is not None: + self.mask_roi_extractor = build_roi_extractor(mask_roi_extractor) + self.share_roi_extractor = False + else: + self.share_roi_extractor = True + self.mask_roi_extractor = self.bbox_roi_extractor + self.mask_head = build_head(mask_head) + + def forward_dummy(self, x, proposals): + """Dummy forward function.""" + # bbox head + outs = () + rois = bbox2roi([proposals]) + if self.with_bbox: + bbox_results = self._bbox_forward(x, rois) + outs = outs + (bbox_results['cls_score'], + bbox_results['bbox_pred']) + # mask head + if self.with_mask: + mask_rois = rois[:100] + mask_results = self._mask_forward(x, mask_rois) + outs = outs + (mask_results['mask_pred'], ) + return outs + + def forward_train(self, + x, + img_metas, + proposal_list, + gt_bboxes, + gt_labels, + gt_bboxes_ignore=None, + gt_masks=None): + """ + Args: + x (list[Tensor]): list of multi-level img features. + img_metas (list[dict]): list of image info dict where each dict + has: 'img_shape', 'scale_factor', 'flip', and may also contain + 'filename', 'ori_shape', 'pad_shape', and 'img_norm_cfg'. + For details on the values of these keys see + `mmdet/datasets/pipelines/formatting.py:Collect`. + proposals (list[Tensors]): list of region proposals. + gt_bboxes (list[Tensor]): Ground truth bboxes for each image with + shape (num_gts, 4) in [tl_x, tl_y, br_x, br_y] format. + gt_labels (list[Tensor]): class indices corresponding to each box + gt_bboxes_ignore (None | list[Tensor]): specify which bounding + boxes can be ignored when computing the loss. + gt_masks (None | Tensor) : true segmentation masks for each box + used if the architecture supports a segmentation task. + + Returns: + dict[str, Tensor]: a dictionary of loss components + """ + # assign gts and sample proposals + if self.with_bbox or self.with_mask: + num_imgs = len(img_metas) + if gt_bboxes_ignore is None: + gt_bboxes_ignore = [None for _ in range(num_imgs)] + sampling_results = [] + for i in range(num_imgs): + assign_result = self.bbox_assigner.assign( + proposal_list[i], gt_bboxes[i], gt_bboxes_ignore[i], + gt_labels[i]) + sampling_result = self.bbox_sampler.sample( + assign_result, + proposal_list[i], + gt_bboxes[i], + gt_labels[i], + feats=[lvl_feat[i][None] for lvl_feat in x]) + sampling_results.append(sampling_result) + + losses = dict() + # bbox head forward and loss + if self.with_bbox: + bbox_results = self._bbox_forward_train(x, sampling_results, + gt_bboxes, gt_labels, + img_metas) + losses.update(bbox_results['loss_bbox']) + + # mask head forward and loss + if self.with_mask: + mask_results = self._mask_forward_train(x, sampling_results, + bbox_results['bbox_feats'], + gt_masks, img_metas) + losses.update(mask_results['loss_mask']) + + return losses + + def _bbox_forward(self, x, rois): + """Box head forward function used in both training and testing.""" + # TODO: a more flexible way to decide which feature maps to use + bbox_feats = self.bbox_roi_extractor( + x[:self.bbox_roi_extractor.num_inputs], rois) + if self.with_shared_head: + bbox_feats = self.shared_head(bbox_feats) + cls_score, bbox_pred = self.bbox_head(bbox_feats) + + bbox_results = dict( + cls_score=cls_score, bbox_pred=bbox_pred, bbox_feats=bbox_feats) + return bbox_results + + def _bbox_forward_train(self, x, sampling_results, gt_bboxes, gt_labels, + img_metas): + """Run forward function and calculate loss for box head in training.""" + rois = bbox2roi([res.bboxes for res in sampling_results]) + bbox_results = self._bbox_forward(x, rois) + + bbox_targets = self.bbox_head.get_targets(sampling_results, gt_bboxes, + gt_labels, self.train_cfg) + loss_bbox = self.bbox_head.loss(bbox_results['cls_score'], + bbox_results['bbox_pred'], rois, + *bbox_targets) + + bbox_results.update(loss_bbox=loss_bbox) + return bbox_results + + def _mask_forward_train(self, x, sampling_results, bbox_feats, gt_masks, + img_metas): + """Run forward function and calculate loss for mask head in + training.""" + if not self.share_roi_extractor: + pos_rois = bbox2roi([res.pos_bboxes for res in sampling_results]) + mask_results = self._mask_forward(x, pos_rois) + else: + pos_inds = [] + device = bbox_feats.device + for res in sampling_results: + pos_inds.append( + torch.ones( + res.pos_bboxes.shape[0], + device=device, + dtype=torch.uint8)) + pos_inds.append( + torch.zeros( + res.neg_bboxes.shape[0], + device=device, + dtype=torch.uint8)) + pos_inds = torch.cat(pos_inds) + + mask_results = self._mask_forward( + x, pos_inds=pos_inds, bbox_feats=bbox_feats) + + mask_targets = self.mask_head.get_targets(sampling_results, gt_masks, + self.train_cfg) + pos_labels = torch.cat([res.pos_gt_labels for res in sampling_results]) + loss_mask = self.mask_head.loss(mask_results['mask_pred'], + mask_targets, pos_labels) + + mask_results.update(loss_mask=loss_mask, mask_targets=mask_targets) + return mask_results + + def _mask_forward(self, x, rois=None, pos_inds=None, bbox_feats=None): + """Mask head forward function used in both training and testing.""" + assert ((rois is not None) ^ + (pos_inds is not None and bbox_feats is not None)) + if rois is not None: + mask_feats = self.mask_roi_extractor( + x[:self.mask_roi_extractor.num_inputs], rois) + if self.with_shared_head: + mask_feats = self.shared_head(mask_feats) + else: + assert bbox_feats is not None + mask_feats = bbox_feats[pos_inds] + + mask_pred = self.mask_head(mask_feats) + mask_results = dict(mask_pred=mask_pred, mask_feats=mask_feats) + return mask_results + + async def async_simple_test(self, + x, + proposal_list, + img_metas, + proposals=None, + rescale=False): + """Async test without augmentation.""" + assert self.with_bbox, 'Bbox head must be implemented.' + + det_bboxes, det_labels = await self.async_test_bboxes( + x, img_metas, proposal_list, self.test_cfg, rescale=rescale) + bbox_results = bbox2result(det_bboxes, det_labels, + self.bbox_head.num_classes) + if not self.with_mask: + return bbox_results + else: + segm_results = await self.async_test_mask( + x, + img_metas, + det_bboxes, + det_labels, + rescale=rescale, + mask_test_cfg=self.test_cfg.get('mask')) + return bbox_results, segm_results + + def simple_test(self, + x, + proposal_list, + img_metas, + proposals=None, + rescale=False): + """Test without augmentation.""" + assert self.with_bbox, 'Bbox head must be implemented.' + + det_bboxes, det_labels = self.simple_test_bboxes( + x, img_metas, proposal_list, self.test_cfg, rescale=rescale) + + bbox_results = [ + bbox2result(det_bboxes[i], det_labels[i], + self.bbox_head.num_classes) + for i in range(len(det_bboxes)) + ] + + if not self.with_mask: + return bbox_results + else: + segm_results = self.simple_test_mask( + x, img_metas, det_bboxes, det_labels, rescale=rescale) + return list(zip(bbox_results, segm_results)) + + def aug_test(self, x, proposal_list, img_metas, rescale=False): + """Test with augmentations. + + If rescale is False, then returned bboxes and masks will fit the scale + of imgs[0]. + """ + det_bboxes, det_labels = self.aug_test_bboxes(x, img_metas, + proposal_list, + self.test_cfg) + if rescale: + _det_bboxes = det_bboxes + else: + _det_bboxes = det_bboxes.clone() + _det_bboxes[:, :4] *= det_bboxes.new_tensor( + img_metas[0][0]['scale_factor']) + bbox_results = bbox2result(_det_bboxes, det_labels, + self.bbox_head.num_classes) + + # det_bboxes always keep the original scale + if self.with_mask: + segm_results = self.aug_test_mask(x, img_metas, det_bboxes, + det_labels) + return [(bbox_results, segm_results)] + else: + return [bbox_results] + + def onnx_export(self, x, proposals, img_metas, rescale=False): + """Test without augmentation.""" + assert self.with_bbox, 'Bbox head must be implemented.' + det_bboxes, det_labels = self.bbox_onnx_export( + x, img_metas, proposals, self.test_cfg, rescale=rescale) + + if not self.with_mask: + return det_bboxes, det_labels + else: + segm_results = self.mask_onnx_export( + x, img_metas, det_bboxes, det_labels, rescale=rescale) + return det_bboxes, det_labels, segm_results + + def mask_onnx_export(self, x, img_metas, det_bboxes, det_labels, **kwargs): + """Export mask branch to onnx which supports batch inference. + + Args: + x (tuple[Tensor]): Feature maps of all scale level. + img_metas (list[dict]): Image meta info. + det_bboxes (Tensor): Bboxes and corresponding scores. + has shape [N, num_bboxes, 5]. + det_labels (Tensor): class labels of + shape [N, num_bboxes]. + + Returns: + Tensor: The segmentation results of shape [N, num_bboxes, + image_height, image_width]. + """ + # image shapes of images in the batch + + if all(det_bbox.shape[0] == 0 for det_bbox in det_bboxes): + raise RuntimeError('[ONNX Error] Can not record MaskHead ' + 'as it has not been executed this time') + batch_size = det_bboxes.size(0) + # if det_bboxes is rescaled to the original image size, we need to + # rescale it back to the testing scale to obtain RoIs. + det_bboxes = det_bboxes[..., :4] + batch_index = torch.arange( + det_bboxes.size(0), device=det_bboxes.device).float().view( + -1, 1, 1).expand(det_bboxes.size(0), det_bboxes.size(1), 1) + mask_rois = torch.cat([batch_index, det_bboxes], dim=-1) + mask_rois = mask_rois.view(-1, 5) + mask_results = self._mask_forward(x, mask_rois) + mask_pred = mask_results['mask_pred'] + max_shape = img_metas[0]['img_shape_for_onnx'] + num_det = det_bboxes.shape[1] + det_bboxes = det_bboxes.reshape(-1, 4) + det_labels = det_labels.reshape(-1) + segm_results = self.mask_head.onnx_export(mask_pred, det_bboxes, + det_labels, self.test_cfg, + max_shape) + segm_results = segm_results.reshape(batch_size, num_det, max_shape[0], + max_shape[1]) + return segm_results + + def bbox_onnx_export(self, x, img_metas, proposals, rcnn_test_cfg, + **kwargs): + """Export bbox branch to onnx which supports batch inference. + + Args: + x (tuple[Tensor]): Feature maps of all scale level. + img_metas (list[dict]): Image meta info. + proposals (Tensor): Region proposals with + batch dimension, has shape [N, num_bboxes, 5]. + rcnn_test_cfg (obj:`ConfigDict`): `test_cfg` of R-CNN. + + Returns: + tuple[Tensor, Tensor]: bboxes of shape [N, num_bboxes, 5] + and class labels of shape [N, num_bboxes]. + """ + # get origin input shape to support onnx dynamic input shape + assert len( + img_metas + ) == 1, 'Only support one input image while in exporting to ONNX' + img_shapes = img_metas[0]['img_shape_for_onnx'] + + rois = proposals + batch_index = torch.arange( + rois.size(0), device=rois.device).float().view(-1, 1, 1).expand( + rois.size(0), rois.size(1), 1) + rois = torch.cat([batch_index, rois[..., :4]], dim=-1) + batch_size = rois.shape[0] + num_proposals_per_img = rois.shape[1] + + # Eliminate the batch dimension + rois = rois.view(-1, 5) + bbox_results = self._bbox_forward(x, rois) + cls_score = bbox_results['cls_score'] + bbox_pred = bbox_results['bbox_pred'] + + # Recover the batch dimension + rois = rois.reshape(batch_size, num_proposals_per_img, rois.size(-1)) + cls_score = cls_score.reshape(batch_size, num_proposals_per_img, + cls_score.size(-1)) + + bbox_pred = bbox_pred.reshape(batch_size, num_proposals_per_img, + bbox_pred.size(-1)) + det_bboxes, det_labels = self.bbox_head.onnx_export( + rois, cls_score, bbox_pred, img_shapes, cfg=rcnn_test_cfg) + + return det_bboxes, det_labels diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/test_mixins.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/test_mixins.py new file mode 100644 index 0000000000000000000000000000000000000000..e4e448134d45eb1355fee32e64b42f18bdd2dca0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/test_mixins.py @@ -0,0 +1,283 @@ +import logging +import sys + +import numpy as np +import torch + +from mmdet.core import (bbox2roi, bbox_mapping, merge_aug_bboxes, + merge_aug_masks, multiclass_nms) + +logger = logging.getLogger(__name__) +if sys.version_info >= (3, 7): + from mmdet.utils.contextmanagers import completed + + +class BBoxTestMixin: + + if sys.version_info >= (3, 7): + + async def async_test_bboxes(self, + x, + img_metas, + proposals, + rcnn_test_cfg, + rescale=False, + **kwargs): + """Asynchronized test for box head without augmentation.""" + rois = bbox2roi(proposals) + roi_feats = self.bbox_roi_extractor( + x[:len(self.bbox_roi_extractor.featmap_strides)], rois) + if self.with_shared_head: + roi_feats = self.shared_head(roi_feats) + sleep_interval = rcnn_test_cfg.get('async_sleep_interval', 0.017) + + async with completed( + __name__, 'bbox_head_forward', + sleep_interval=sleep_interval): + cls_score, bbox_pred = self.bbox_head(roi_feats) + + img_shape = img_metas[0]['img_shape'] + scale_factor = img_metas[0]['scale_factor'] + det_bboxes, det_labels = self.bbox_head.get_bboxes( + rois, + cls_score, + bbox_pred, + img_shape, + scale_factor, + rescale=rescale, + cfg=rcnn_test_cfg) + return det_bboxes, det_labels + + def simple_test_bboxes(self, + x, + img_metas, + proposals, + rcnn_test_cfg, + rescale=False): + """Test only det bboxes without augmentation. + + Args: + x (tuple[Tensor]): Feature maps of all scale level. + img_metas (list[dict]): Image meta info. + proposals (List[Tensor]): Region proposals. + rcnn_test_cfg (obj:`ConfigDict`): `test_cfg` of R-CNN. + rescale (bool): If True, return boxes in original image space. + Default: False. + + Returns: + tuple[list[Tensor], list[Tensor]]: The first list contains + the boxes of the corresponding image in a batch, each + tensor has the shape (num_boxes, 5) and last dimension + 5 represent (tl_x, tl_y, br_x, br_y, score). Each Tensor + in the second list is the labels with shape (num_boxes, ). + The length of both lists should be equal to batch_size. + """ + + rois = bbox2roi(proposals) + bbox_results = self._bbox_forward(x, rois) + img_shapes = tuple(meta['img_shape'] for meta in img_metas) + scale_factors = tuple(meta['scale_factor'] for meta in img_metas) + + # split batch bbox prediction back to each image + cls_score = bbox_results['cls_score'] + bbox_pred = bbox_results['bbox_pred'] + num_proposals_per_img = tuple(len(p) for p in proposals) + rois = rois.split(num_proposals_per_img, 0) + cls_score = cls_score.split(num_proposals_per_img, 0) + + # some detector with_reg is False, bbox_pred will be None + if bbox_pred is not None: + # TODO move this to a sabl_roi_head + # the bbox prediction of some detectors like SABL is not Tensor + if isinstance(bbox_pred, torch.Tensor): + bbox_pred = bbox_pred.split(num_proposals_per_img, 0) + else: + bbox_pred = self.bbox_head.bbox_pred_split( + bbox_pred, num_proposals_per_img) + else: + bbox_pred = (None, ) * len(proposals) + + # apply bbox post-processing to each image individually + det_bboxes = [] + det_labels = [] + for i in range(len(proposals)): + det_bbox, det_label = self.bbox_head.get_bboxes( + rois[i], + cls_score[i], + bbox_pred[i], + img_shapes[i], + scale_factors[i], + rescale=rescale, + cfg=rcnn_test_cfg) + det_bboxes.append(det_bbox) + det_labels.append(det_label) + return det_bboxes, det_labels + + def aug_test_bboxes(self, feats, img_metas, proposal_list, rcnn_test_cfg): + """Test det bboxes with test time augmentation.""" + aug_bboxes = [] + aug_scores = [] + for x, img_meta in zip(feats, img_metas): + # only one image in the batch + img_shape = img_meta[0]['img_shape'] + scale_factor = img_meta[0]['scale_factor'] + flip = img_meta[0]['flip'] + flip_direction = img_meta[0]['flip_direction'] + # TODO more flexible + proposals = bbox_mapping(proposal_list[0][:, :4], img_shape, + scale_factor, flip, flip_direction) + rois = bbox2roi([proposals]) + bbox_results = self._bbox_forward(x, rois) + bboxes, scores = self.bbox_head.get_bboxes( + rois, + bbox_results['cls_score'], + bbox_results['bbox_pred'], + img_shape, + scale_factor, + rescale=False, + cfg=None) + aug_bboxes.append(bboxes) + aug_scores.append(scores) + # after merging, bboxes will be rescaled to the original image size + merged_bboxes, merged_scores = merge_aug_bboxes( + aug_bboxes, aug_scores, img_metas, rcnn_test_cfg) + det_bboxes, det_labels = multiclass_nms(merged_bboxes, merged_scores, + rcnn_test_cfg.score_thr, + rcnn_test_cfg.nms, + rcnn_test_cfg.max_per_img) + return det_bboxes, det_labels + + +class MaskTestMixin: + + if sys.version_info >= (3, 7): + + async def async_test_mask(self, + x, + img_metas, + det_bboxes, + det_labels, + rescale=False, + mask_test_cfg=None): + """Asynchronized test for mask head without augmentation.""" + # image shape of the first image in the batch (only one) + ori_shape = img_metas[0]['ori_shape'] + scale_factor = img_metas[0]['scale_factor'] + if det_bboxes.shape[0] == 0: + segm_result = [[] for _ in range(self.mask_head.num_classes)] + else: + if rescale and not isinstance(scale_factor, + (float, torch.Tensor)): + scale_factor = det_bboxes.new_tensor(scale_factor) + _bboxes = ( + det_bboxes[:, :4] * + scale_factor if rescale else det_bboxes) + mask_rois = bbox2roi([_bboxes]) + mask_feats = self.mask_roi_extractor( + x[:len(self.mask_roi_extractor.featmap_strides)], + mask_rois) + + if self.with_shared_head: + mask_feats = self.shared_head(mask_feats) + if mask_test_cfg and mask_test_cfg.get('async_sleep_interval'): + sleep_interval = mask_test_cfg['async_sleep_interval'] + else: + sleep_interval = 0.035 + async with completed( + __name__, + 'mask_head_forward', + sleep_interval=sleep_interval): + mask_pred = self.mask_head(mask_feats) + segm_result = self.mask_head.get_seg_masks( + mask_pred, _bboxes, det_labels, self.test_cfg, ori_shape, + scale_factor, rescale) + return segm_result + + def simple_test_mask(self, + x, + img_metas, + det_bboxes, + det_labels, + rescale=False): + """Simple test for mask head without augmentation.""" + # image shapes of images in the batch + ori_shapes = tuple(meta['ori_shape'] for meta in img_metas) + scale_factors = tuple(meta['scale_factor'] for meta in img_metas) + + if isinstance(scale_factors[0], float): + logger.warning( + 'Scale factor in img_metas should be a ' + 'ndarray with shape (4,) ' + 'arrange as (factor_w, factor_h, factor_w, factor_h), ' + 'The scale_factor with float type has been deprecated. ') + scale_factors = np.array([scale_factors] * 4, dtype=np.float32) + + num_imgs = len(det_bboxes) + if all(det_bbox.shape[0] == 0 for det_bbox in det_bboxes): + segm_results = [[[] for _ in range(self.mask_head.num_classes)] + for _ in range(num_imgs)] + else: + # if det_bboxes is rescaled to the original image size, we need to + # rescale it back to the testing scale to obtain RoIs. + if rescale: + scale_factors = [ + torch.from_numpy(scale_factor).to(det_bboxes[0].device) + for scale_factor in scale_factors + ] + _bboxes = [ + det_bboxes[i][:, :4] * + scale_factors[i] if rescale else det_bboxes[i][:, :4] + for i in range(len(det_bboxes)) + ] + mask_rois = bbox2roi(_bboxes) + mask_results = self._mask_forward(x, mask_rois) + mask_pred = mask_results['mask_pred'] + # split batch mask prediction back to each image + num_mask_roi_per_img = [len(det_bbox) for det_bbox in det_bboxes] + mask_preds = mask_pred.split(num_mask_roi_per_img, 0) + + # apply mask post-processing to each image individually + segm_results = [] + for i in range(num_imgs): + if det_bboxes[i].shape[0] == 0: + segm_results.append( + [[] for _ in range(self.mask_head.num_classes)]) + else: + segm_result = self.mask_head.get_seg_masks( + mask_preds[i], _bboxes[i], det_labels[i], + self.test_cfg, ori_shapes[i], scale_factors[i], + rescale) + segm_results.append(segm_result) + return segm_results + + def aug_test_mask(self, feats, img_metas, det_bboxes, det_labels): + """Test for mask head with test time augmentation.""" + if det_bboxes.shape[0] == 0: + segm_result = [[] for _ in range(self.mask_head.num_classes)] + else: + aug_masks = [] + for x, img_meta in zip(feats, img_metas): + img_shape = img_meta[0]['img_shape'] + scale_factor = img_meta[0]['scale_factor'] + flip = img_meta[0]['flip'] + flip_direction = img_meta[0]['flip_direction'] + _bboxes = bbox_mapping(det_bboxes[:, :4], img_shape, + scale_factor, flip, flip_direction) + mask_rois = bbox2roi([_bboxes]) + mask_results = self._mask_forward(x, mask_rois) + # convert to numpy array to save memory + aug_masks.append( + mask_results['mask_pred'].sigmoid().cpu().numpy()) + merged_masks = merge_aug_masks(aug_masks, img_metas, self.test_cfg) + + ori_shape = img_metas[0][0]['ori_shape'] + scale_factor = det_bboxes.new_ones(4) + segm_result = self.mask_head.get_seg_masks( + merged_masks, + det_bboxes, + det_labels, + self.test_cfg, + ori_shape, + scale_factor=scale_factor, + rescale=False) + return segm_result diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/trident_roi_head.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/trident_roi_head.py new file mode 100644 index 0000000000000000000000000000000000000000..245569e50b45cc8e21ba8e7210edf4bd0c7f27c5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/roi_heads/trident_roi_head.py @@ -0,0 +1,119 @@ +import torch +from mmcv.ops import batched_nms + +from mmdet.core import (bbox2result, bbox2roi, bbox_mapping, merge_aug_bboxes, + multiclass_nms) +from mmdet.models.roi_heads.standard_roi_head import StandardRoIHead +from ..builder import HEADS + + +@HEADS.register_module() +class TridentRoIHead(StandardRoIHead): + """Trident roi head. + + Args: + num_branch (int): Number of branches in TridentNet. + test_branch_idx (int): In inference, all 3 branches will be used + if `test_branch_idx==-1`, otherwise only branch with index + `test_branch_idx` will be used. + """ + + def __init__(self, num_branch, test_branch_idx, **kwargs): + self.num_branch = num_branch + self.test_branch_idx = test_branch_idx + super(TridentRoIHead, self).__init__(**kwargs) + + def merge_trident_bboxes(self, trident_det_bboxes, trident_det_labels): + """Merge bbox predictions of each branch.""" + if trident_det_bboxes.numel() == 0: + det_bboxes = trident_det_bboxes.new_zeros((0, 5)) + det_labels = trident_det_bboxes.new_zeros((0, ), dtype=torch.long) + else: + nms_bboxes = trident_det_bboxes[:, :4] + nms_scores = trident_det_bboxes[:, 4].contiguous() + nms_inds = trident_det_labels + nms_cfg = self.test_cfg['nms'] + det_bboxes, keep = batched_nms(nms_bboxes, nms_scores, nms_inds, + nms_cfg) + det_labels = trident_det_labels[keep] + if self.test_cfg['max_per_img'] > 0: + det_labels = det_labels[:self.test_cfg['max_per_img']] + det_bboxes = det_bboxes[:self.test_cfg['max_per_img']] + + return det_bboxes, det_labels + + def simple_test(self, + x, + proposal_list, + img_metas, + proposals=None, + rescale=False): + """Test without augmentation as follows: + + 1. Compute prediction bbox and label per branch. + 2. Merge predictions of each branch according to scores of + bboxes, i.e., bboxes with higher score are kept to give + top-k prediction. + """ + assert self.with_bbox, 'Bbox head must be implemented.' + det_bboxes_list, det_labels_list = self.simple_test_bboxes( + x, img_metas, proposal_list, self.test_cfg, rescale=rescale) + num_branch = self.num_branch if self.test_branch_idx == -1 else 1 + for _ in range(len(det_bboxes_list)): + if det_bboxes_list[_].shape[0] == 0: + det_bboxes_list[_] = det_bboxes_list[_].new_empty((0, 5)) + det_bboxes, det_labels = [], [] + for i in range(len(img_metas) // num_branch): + det_result = self.merge_trident_bboxes( + torch.cat(det_bboxes_list[i * num_branch:(i + 1) * + num_branch]), + torch.cat(det_labels_list[i * num_branch:(i + 1) * + num_branch])) + det_bboxes.append(det_result[0]) + det_labels.append(det_result[1]) + + bbox_results = [ + bbox2result(det_bboxes[i], det_labels[i], + self.bbox_head.num_classes) + for i in range(len(det_bboxes)) + ] + return bbox_results + + def aug_test_bboxes(self, feats, img_metas, proposal_list, rcnn_test_cfg): + """Test det bboxes with test time augmentation.""" + aug_bboxes = [] + aug_scores = [] + for x, img_meta in zip(feats, img_metas): + # only one image in the batch + img_shape = img_meta[0]['img_shape'] + scale_factor = img_meta[0]['scale_factor'] + flip = img_meta[0]['flip'] + flip_direction = img_meta[0]['flip_direction'] + + trident_bboxes, trident_scores = [], [] + for branch_idx in range(len(proposal_list)): + proposals = bbox_mapping(proposal_list[0][:, :4], img_shape, + scale_factor, flip, flip_direction) + rois = bbox2roi([proposals]) + bbox_results = self._bbox_forward(x, rois) + bboxes, scores = self.bbox_head.get_bboxes( + rois, + bbox_results['cls_score'], + bbox_results['bbox_pred'], + img_shape, + scale_factor, + rescale=False, + cfg=None) + trident_bboxes.append(bboxes) + trident_scores.append(scores) + + aug_bboxes.append(torch.cat(trident_bboxes, 0)) + aug_scores.append(torch.cat(trident_scores, 0)) + # after merging, bboxes will be rescaled to the original image size + merged_bboxes, merged_scores = merge_aug_bboxes( + aug_bboxes, aug_scores, img_metas, rcnn_test_cfg) + det_bboxes, det_labels = multiclass_nms(merged_bboxes, merged_scores, + rcnn_test_cfg.score_thr, + rcnn_test_cfg.nms, + rcnn_test_cfg.max_per_img) + return det_bboxes, det_labels diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..7c9a36d74c07198ca5827e22bafaff904b2c691d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/__init__.py @@ -0,0 +1,20 @@ +from .builder import build_linear_layer, build_transformer +from .gaussian_target import gaussian_radius, gen_gaussian_target +from .inverted_residual import InvertedResidual +from .make_divisible import make_divisible +from .normed_predictor import NormedConv2d, NormedLinear +from .positional_encoding import (LearnedPositionalEncoding, + SinePositionalEncoding) +from .res_layer import ResLayer, SimplifiedBasicBlock +from .se_layer import SELayer +from .transformer import (DetrTransformerDecoder, DetrTransformerDecoderLayer, + DynamicConv, Transformer) + +__all__ = [ + 'ResLayer', 'gaussian_radius', 'gen_gaussian_target', + 'DetrTransformerDecoderLayer', 'DetrTransformerDecoder', 'Transformer', + 'build_transformer', 'build_linear_layer', 'SinePositionalEncoding', + 'LearnedPositionalEncoding', 'DynamicConv', 'SimplifiedBasicBlock', + 'NormedLinear', 'NormedConv2d', 'make_divisible', 'InvertedResidual', + 'SELayer' +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/builder.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/builder.py new file mode 100644 index 0000000000000000000000000000000000000000..fdcff090f9253dfbcb0ad5c290a0028abad5f35c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/builder.py @@ -0,0 +1,46 @@ +import torch.nn as nn +from mmcv.utils import Registry, build_from_cfg + +TRANSFORMER = Registry('Transformer') +LINEAR_LAYERS = Registry('linear layers') + + +def build_transformer(cfg, default_args=None): + """Builder for Transformer.""" + return build_from_cfg(cfg, TRANSFORMER, default_args) + + +LINEAR_LAYERS.register_module('Linear', module=nn.Linear) + + +def build_linear_layer(cfg, *args, **kwargs): + """Build linear layer. + Args: + cfg (None or dict): The linear layer config, which should contain: + - type (str): Layer type. + - layer args: Args needed to instantiate an linear layer. + args (argument list): Arguments passed to the `__init__` + method of the corresponding linear layer. + kwargs (keyword arguments): Keyword arguments passed to the `__init__` + method of the corresponding linear layer. + Returns: + nn.Module: Created linear layer. + """ + if cfg is None: + cfg_ = dict(type='Linear') + else: + if not isinstance(cfg, dict): + raise TypeError('cfg must be a dict') + if 'type' not in cfg: + raise KeyError('the cfg dict must contain the key "type"') + cfg_ = cfg.copy() + + layer_type = cfg_.pop('type') + if layer_type not in LINEAR_LAYERS: + raise KeyError(f'Unrecognized linear type {layer_type}') + else: + linear_layer = LINEAR_LAYERS.get(layer_type) + + layer = linear_layer(*args, **kwargs, **cfg_) + + return layer diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/gaussian_target.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/gaussian_target.py new file mode 100644 index 0000000000000000000000000000000000000000..2e6d8b81bc134e9f5a23477a5b091bbbec2741b4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/gaussian_target.py @@ -0,0 +1,267 @@ +from math import sqrt + +import torch +import torch.nn.functional as F + + +def gaussian2D(radius, sigma=1, dtype=torch.float32, device='cpu'): + """Generate 2D gaussian kernel. + + Args: + radius (int): Radius of gaussian kernel. + sigma (int): Sigma of gaussian function. Default: 1. + dtype (torch.dtype): Dtype of gaussian tensor. Default: torch.float32. + device (str): Device of gaussian tensor. Default: 'cpu'. + + Returns: + h (Tensor): Gaussian kernel with a + ``(2 * radius + 1) * (2 * radius + 1)`` shape. + """ + x = torch.arange( + -radius, radius + 1, dtype=dtype, device=device).view(1, -1) + y = torch.arange( + -radius, radius + 1, dtype=dtype, device=device).view(-1, 1) + + h = (-(x * x + y * y) / (2 * sigma * sigma)).exp() + + h[h < torch.finfo(h.dtype).eps * h.max()] = 0 + return h + + +def gen_gaussian_target(heatmap, center, radius, k=1): + """Generate 2D gaussian heatmap. + + Args: + heatmap (Tensor): Input heatmap, the gaussian kernel will cover on + it and maintain the max value. + center (list[int]): Coord of gaussian kernel's center. + radius (int): Radius of gaussian kernel. + k (int): Coefficient of gaussian kernel. Default: 1. + + Returns: + out_heatmap (Tensor): Updated heatmap covered by gaussian kernel. + """ + diameter = 2 * radius + 1 + gaussian_kernel = gaussian2D( + radius, sigma=diameter / 6, dtype=heatmap.dtype, device=heatmap.device) + + x, y = center + + height, width = heatmap.shape[:2] + + left, right = min(x, radius), min(width - x, radius + 1) + top, bottom = min(y, radius), min(height - y, radius + 1) + + masked_heatmap = heatmap[y - top:y + bottom, x - left:x + right] + masked_gaussian = gaussian_kernel[radius - top:radius + bottom, + radius - left:radius + right] + out_heatmap = heatmap + torch.max( + masked_heatmap, + masked_gaussian * k, + out=out_heatmap[y - top:y + bottom, x - left:x + right]) + + return out_heatmap + + +def gaussian_radius(det_size, min_overlap): + r"""Generate 2D gaussian radius. + + This function is modified from the `official github repo + `_. + + Given ``min_overlap``, radius could computed by a quadratic equation + according to Vieta's formulas. + + There are 3 cases for computing gaussian radius, details are following: + + - Explanation of figure: ``lt`` and ``br`` indicates the left-top and + bottom-right corner of ground truth box. ``x`` indicates the + generated corner at the limited position when ``radius=r``. + + - Case1: one corner is inside the gt box and the other is outside. + + .. code:: text + + |< width >| + + lt-+----------+ - + | | | ^ + +--x----------+--+ + | | | | + | | | | height + | | overlap | | + | | | | + | | | | v + +--+---------br--+ - + | | | + +----------+--x + + To ensure IoU of generated box and gt box is larger than ``min_overlap``: + + .. math:: + \cfrac{(w-r)*(h-r)}{w*h+(w+h)r-r^2} \ge {iou} \quad\Rightarrow\quad + {r^2-(w+h)r+\cfrac{1-iou}{1+iou}*w*h} \ge 0 \\ + {a} = 1,\quad{b} = {-(w+h)},\quad{c} = {\cfrac{1-iou}{1+iou}*w*h} + {r} \le \cfrac{-b-\sqrt{b^2-4*a*c}}{2*a} + + - Case2: both two corners are inside the gt box. + + .. code:: text + + |< width >| + + lt-+----------+ - + | | | ^ + +--x-------+ | + | | | | + | |overlap| | height + | | | | + | +-------x--+ + | | | v + +----------+-br - + + To ensure IoU of generated box and gt box is larger than ``min_overlap``: + + .. math:: + \cfrac{(w-2*r)*(h-2*r)}{w*h} \ge {iou} \quad\Rightarrow\quad + {4r^2-2(w+h)r+(1-iou)*w*h} \ge 0 \\ + {a} = 4,\quad {b} = {-2(w+h)},\quad {c} = {(1-iou)*w*h} + {r} \le \cfrac{-b-\sqrt{b^2-4*a*c}}{2*a} + + - Case3: both two corners are outside the gt box. + + .. code:: text + + |< width >| + + x--+----------------+ + | | | + +-lt-------------+ | - + | | | | ^ + | | | | + | | overlap | | height + | | | | + | | | | v + | +------------br--+ - + | | | + +----------------+--x + + To ensure IoU of generated box and gt box is larger than ``min_overlap``: + + .. math:: + \cfrac{w*h}{(w+2*r)*(h+2*r)} \ge {iou} \quad\Rightarrow\quad + {4*iou*r^2+2*iou*(w+h)r+(iou-1)*w*h} \le 0 \\ + {a} = {4*iou},\quad {b} = {2*iou*(w+h)},\quad {c} = {(iou-1)*w*h} \\ + {r} \le \cfrac{-b+\sqrt{b^2-4*a*c}}{2*a} + + Args: + det_size (list[int]): Shape of object. + min_overlap (float): Min IoU with ground truth for boxes generated by + keypoints inside the gaussian kernel. + + Returns: + radius (int): Radius of gaussian kernel. + """ + height, width = det_size + + a1 = 1 + b1 = (height + width) + c1 = width * height * (1 - min_overlap) / (1 + min_overlap) + sq1 = sqrt(b1**2 - 4 * a1 * c1) + r1 = (b1 - sq1) / (2 * a1) + + a2 = 4 + b2 = 2 * (height + width) + c2 = (1 - min_overlap) * width * height + sq2 = sqrt(b2**2 - 4 * a2 * c2) + r2 = (b2 - sq2) / (2 * a2) + + a3 = 4 * min_overlap + b3 = -2 * min_overlap * (height + width) + c3 = (min_overlap - 1) * width * height + sq3 = sqrt(b3**2 - 4 * a3 * c3) + r3 = (b3 + sq3) / (2 * a3) + return min(r1, r2, r3) + + +def get_local_maximum(heat, kernel=3): + """Extract local maximum pixel with given kernal. + + Args: + heat (Tensor): Target heatmap. + kernel (int): Kernel size of max pooling. Default: 3. + + Returns: + heat (Tensor): A heatmap where local maximum pixels maintain its + own value and other positions are 0. + """ + pad = (kernel - 1) // 2 + hmax = F.max_pool2d(heat, kernel, stride=1, padding=pad) + keep = (hmax == heat).float() + return heat * keep + + +def get_topk_from_heatmap(scores, k=20): + """Get top k positions from heatmap. + + Args: + scores (Tensor): Target heatmap with shape + [batch, num_classes, height, width]. + k (int): Target number. Default: 20. + + Returns: + tuple[torch.Tensor]: Scores, indexes, categories and coords of + topk keypoint. Containing following Tensors: + + - topk_scores (Tensor): Max scores of each topk keypoint. + - topk_inds (Tensor): Indexes of each topk keypoint. + - topk_clses (Tensor): Categories of each topk keypoint. + - topk_ys (Tensor): Y-coord of each topk keypoint. + - topk_xs (Tensor): X-coord of each topk keypoint. + """ + batch, _, height, width = scores.size() + topk_scores, topk_inds = torch.topk(scores.view(batch, -1), k) + topk_clses = topk_inds // (height * width) + topk_inds = topk_inds % (height * width) + topk_ys = topk_inds // width + topk_xs = (topk_inds % width).int().float() + return topk_scores, topk_inds, topk_clses, topk_ys, topk_xs + + +def gather_feat(feat, ind, mask=None): + """Gather feature according to index. + + Args: + feat (Tensor): Target feature map. + ind (Tensor): Target coord index. + mask (Tensor | None): Mask of feature map. Default: None. + + Returns: + feat (Tensor): Gathered feature. + """ + dim = feat.size(2) + ind = ind.unsqueeze(2).repeat(1, 1, dim) + feat = feat.gather(1, ind) + if mask is not None: + mask = mask.unsqueeze(2).expand_as(feat) + feat = feat[mask] + feat = feat.view(-1, dim) + return feat + + +def transpose_and_gather_feat(feat, ind): + """Transpose and gather feature according to index. + + Args: + feat (Tensor): Target feature map. + ind (Tensor): Target coord index. + + Returns: + feat (Tensor): Transposed and gathered feature. + """ + feat = feat.permute(0, 2, 3, 1).contiguous() + feat = feat.view(feat.size(0), -1, feat.size(3)) + feat = gather_feat(feat, ind) + return feat diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/inverted_residual.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/inverted_residual.py new file mode 100644 index 0000000000000000000000000000000000000000..deb139d8b4cdecb2b36e887246d94e8413ccf51e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/inverted_residual.py @@ -0,0 +1,123 @@ +import torch.utils.checkpoint as cp +from mmcv.cnn import ConvModule +from mmcv.runner import BaseModule + +from .se_layer import SELayer + + +class InvertedResidual(BaseModule): + """Inverted Residual Block. + + Args: + in_channels (int): The input channels of this Module. + out_channels (int): The output channels of this Module. + mid_channels (int): The input channels of the depthwise convolution. + kernel_size (int): The kernal size of the depthwise convolution. + Default: 3. + stride (int): The stride of the depthwise convolution. Default: 1. + se_cfg (dict): Config dict for se layer. Defaul: None, which means no + se layer. + with_expand_conv (bool): Use expand conv or not. If set False, + mid_channels must be the same with in_channels. + Default: True. + conv_cfg (dict): Config dict for convolution layer. Default: None, + which means using conv2d. + norm_cfg (dict): Config dict for normalization layer. + Default: dict(type='BN'). + act_cfg (dict): Config dict for activation layer. + Default: dict(type='ReLU'). + with_cp (bool): Use checkpoint or not. Using checkpoint will save some + memory while slowing down the training speed. Default: False. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + + Returns: + Tensor: The output tensor. + """ + + def __init__(self, + in_channels, + out_channels, + mid_channels, + kernel_size=3, + stride=1, + se_cfg=None, + with_expand_conv=True, + conv_cfg=None, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU'), + with_cp=False, + init_cfg=None): + super(InvertedResidual, self).__init__(init_cfg) + self.with_res_shortcut = (stride == 1 and in_channels == out_channels) + assert stride in [1, 2], f'stride must in [1, 2]. ' \ + f'But received {stride}.' + self.with_cp = with_cp + self.with_se = se_cfg is not None + self.with_expand_conv = with_expand_conv + + if self.with_se: + assert isinstance(se_cfg, dict) + if not self.with_expand_conv: + assert mid_channels == in_channels + + if self.with_expand_conv: + self.expand_conv = ConvModule( + in_channels=in_channels, + out_channels=mid_channels, + kernel_size=1, + stride=1, + padding=0, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + self.depthwise_conv = ConvModule( + in_channels=mid_channels, + out_channels=mid_channels, + kernel_size=kernel_size, + stride=stride, + padding=kernel_size // 2, + groups=mid_channels, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=act_cfg) + + if self.with_se: + self.se = SELayer(**se_cfg) + + self.linear_conv = ConvModule( + in_channels=mid_channels, + out_channels=out_channels, + kernel_size=1, + stride=1, + padding=0, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + act_cfg=None) + + def forward(self, x): + + def _inner_forward(x): + out = x + + if self.with_expand_conv: + out = self.expand_conv(out) + + out = self.depthwise_conv(out) + + if self.with_se: + out = self.se(out) + + out = self.linear_conv(out) + + if self.with_res_shortcut: + return x + out + else: + return out + + if self.with_cp and x.requires_grad: + out = cp.checkpoint(_inner_forward, x) + else: + out = _inner_forward(x) + + return out diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/make_divisible.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/make_divisible.py new file mode 100644 index 0000000000000000000000000000000000000000..75ad756052529f52fe83bb95dd1f0ecfc9a13078 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/make_divisible.py @@ -0,0 +1,27 @@ +def make_divisible(value, divisor, min_value=None, min_ratio=0.9): + """Make divisible function. + + This function rounds the channel number to the nearest value that can be + divisible by the divisor. It is taken from the original tf repo. It ensures + that all layers have a channel number that is divisible by divisor. It can + be seen here: https://github.com/tensorflow/models/blob/master/research/slim/nets/mobilenet/mobilenet.py # noqa + + Args: + value (int): The original channel number. + divisor (int): The divisor to fully divide the channel number. + min_value (int): The minimum value of the output channel. + Default: None, means that the minimum value equal to the divisor. + min_ratio (float): The minimum ratio of the rounded channel number to + the original channel number. Default: 0.9. + + Returns: + int: The modified output channel number. + """ + + if min_value is None: + min_value = divisor + new_value = max(min_value, int(value + divisor / 2) // divisor * divisor) + # Make sure that round down does not go down by more than (1-min_ratio). + if new_value < min_ratio * value: + new_value += divisor + return new_value diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/normed_predictor.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/normed_predictor.py new file mode 100644 index 0000000000000000000000000000000000000000..3b1b231260915473ffe140df8f47e7dfcdf3f6e0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/normed_predictor.py @@ -0,0 +1,87 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import CONV_LAYERS + +from .builder import LINEAR_LAYERS + + +@LINEAR_LAYERS.register_module(name='NormedLinear') +class NormedLinear(nn.Linear): + """Normalized Linear Layer. + + Args: + tempeature (float, optional): Tempeature term. Default to 20. + power (int, optional): Power term. Default to 1.0. + eps (float, optional): The minimal value of divisor to + keep numerical stability. Default to 1e-6. + """ + + def __init__(self, *args, tempearture=20, power=1.0, eps=1e-6, **kwargs): + super(NormedLinear, self).__init__(*args, **kwargs) + self.tempearture = tempearture + self.power = power + self.eps = eps + self.init_weights() + + def init_weights(self): + nn.init.normal_(self.weight, mean=0, std=0.01) + if self.bias is not None: + nn.init.constant_(self.bias, 0) + + def forward(self, x): + weight_ = self.weight / ( + self.weight.norm(dim=1, keepdim=True).pow(self.power) + self.eps) + x_ = x / (x.norm(dim=1, keepdim=True).pow(self.power) + self.eps) + x_ = x_ * self.tempearture + + return F.linear(x_, weight_, self.bias) + + +@CONV_LAYERS.register_module(name='NormedConv2d') +class NormedConv2d(nn.Conv2d): + """Normalized Conv2d Layer. + + Args: + tempeature (float, optional): Tempeature term. Default to 20. + power (int, optional): Power term. Default to 1.0. + eps (float, optional): The minimal value of divisor to + keep numerical stability. Default to 1e-6. + norm_over_kernel (bool, optional): Normalize over kernel. + Default to False. + """ + + def __init__(self, + *args, + tempearture=20, + power=1.0, + eps=1e-6, + norm_over_kernel=False, + **kwargs): + super(NormedConv2d, self).__init__(*args, **kwargs) + self.tempearture = tempearture + self.power = power + self.norm_over_kernel = norm_over_kernel + self.eps = eps + + def forward(self, x): + if not self.norm_over_kernel: + weight_ = self.weight / ( + self.weight.norm(dim=1, keepdim=True).pow(self.power) + + self.eps) + else: + weight_ = self.weight / ( + self.weight.view(self.weight.size(0), -1).norm( + dim=1, keepdim=True).pow(self.power)[..., None, None] + + self.eps) + x_ = x / (x.norm(dim=1, keepdim=True).pow(self.power) + self.eps) + x_ = x_ * self.tempearture + + if hasattr(self, 'conv2d_forward'): + x_ = self.conv2d_forward(x_, weight_) + else: + if torch.__version__ >= '1.8': + x_ = self._conv_forward(x_, weight_, self.bias) + else: + x_ = self._conv_forward(x_, weight_) + return x_ diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/positional_encoding.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/positional_encoding.py new file mode 100644 index 0000000000000000000000000000000000000000..19cb83b66ca0d902282c5882df9bcd371c56c744 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/positional_encoding.py @@ -0,0 +1,162 @@ +import math + +import torch +import torch.nn as nn +from mmcv.cnn.bricks.transformer import POSITIONAL_ENCODING +from mmcv.runner import BaseModule + + +@POSITIONAL_ENCODING.register_module() +class SinePositionalEncoding(BaseModule): + """Position encoding with sine and cosine functions. + + See `End-to-End Object Detection with Transformers + `_ for details. + + Args: + num_feats (int): The feature dimension for each position + along x-axis or y-axis. Note the final returned dimension + for each position is 2 times of this value. + temperature (int, optional): The temperature used for scaling + the position embedding. Defaults to 10000. + normalize (bool, optional): Whether to normalize the position + embedding. Defaults to False. + scale (float, optional): A scale factor that scales the position + embedding. The scale will be used only when `normalize` is True. + Defaults to 2*pi. + eps (float, optional): A value added to the denominator for + numerical stability. Defaults to 1e-6. + offset (float): offset add to embed when do the normalization. + Defaults to 0. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + num_feats, + temperature=10000, + normalize=False, + scale=2 * math.pi, + eps=1e-6, + offset=0., + init_cfg=None): + super(SinePositionalEncoding, self).__init__(init_cfg) + if normalize: + assert isinstance(scale, (float, int)), 'when normalize is set,' \ + 'scale should be provided and in float or int type, ' \ + f'found {type(scale)}' + self.num_feats = num_feats + self.temperature = temperature + self.normalize = normalize + self.scale = scale + self.eps = eps + self.offset = offset + + def forward(self, mask): + """Forward function for `SinePositionalEncoding`. + + Args: + mask (Tensor): ByteTensor mask. Non-zero values representing + ignored positions, while zero values means valid positions + for this image. Shape [bs, h, w]. + + Returns: + pos (Tensor): Returned position embedding with shape + [bs, num_feats*2, h, w]. + """ + # For convenience of exporting to ONNX, it's required to convert + # `masks` from bool to int. + mask = mask.to(torch.int) + not_mask = 1 - mask # logical_not + y_embed = not_mask.cumsum(1, dtype=torch.float32) + x_embed = not_mask.cumsum(2, dtype=torch.float32) + if self.normalize: + y_embed = (y_embed + self.offset) / \ + (y_embed[:, -1:, :] + self.eps) * self.scale + x_embed = (x_embed + self.offset) / \ + (x_embed[:, :, -1:] + self.eps) * self.scale + dim_t = torch.arange( + self.num_feats, dtype=torch.float32, device=mask.device) + dim_t = self.temperature**(2 * (dim_t // 2) / self.num_feats) + pos_x = x_embed[:, :, :, None] / dim_t + pos_y = y_embed[:, :, :, None] / dim_t + # use `view` instead of `flatten` for dynamically exporting to ONNX + B, H, W = mask.size() + pos_x = torch.stack( + (pos_x[:, :, :, 0::2].sin(), pos_x[:, :, :, 1::2].cos()), + dim=4).view(B, H, W, -1) + pos_y = torch.stack( + (pos_y[:, :, :, 0::2].sin(), pos_y[:, :, :, 1::2].cos()), + dim=4).view(B, H, W, -1) + pos = torch.cat((pos_y, pos_x), dim=3).permute(0, 3, 1, 2) + return pos + + def __repr__(self): + """str: a string that describes the module""" + repr_str = self.__class__.__name__ + repr_str += f'(num_feats={self.num_feats}, ' + repr_str += f'temperature={self.temperature}, ' + repr_str += f'normalize={self.normalize}, ' + repr_str += f'scale={self.scale}, ' + repr_str += f'eps={self.eps})' + return repr_str + + +@POSITIONAL_ENCODING.register_module() +class LearnedPositionalEncoding(BaseModule): + """Position embedding with learnable embedding weights. + + Args: + num_feats (int): The feature dimension for each position + along x-axis or y-axis. The final returned dimension for + each position is 2 times of this value. + row_num_embed (int, optional): The dictionary size of row embeddings. + Default 50. + col_num_embed (int, optional): The dictionary size of col embeddings. + Default 50. + init_cfg (dict or list[dict], optional): Initialization config dict. + """ + + def __init__(self, + num_feats, + row_num_embed=50, + col_num_embed=50, + init_cfg=dict(type='Uniform', layer='Embedding')): + super(LearnedPositionalEncoding, self).__init__(init_cfg) + self.row_embed = nn.Embedding(row_num_embed, num_feats) + self.col_embed = nn.Embedding(col_num_embed, num_feats) + self.num_feats = num_feats + self.row_num_embed = row_num_embed + self.col_num_embed = col_num_embed + + def forward(self, mask): + """Forward function for `LearnedPositionalEncoding`. + + Args: + mask (Tensor): ByteTensor mask. Non-zero values representing + ignored positions, while zero values means valid positions + for this image. Shape [bs, h, w]. + + Returns: + pos (Tensor): Returned position embedding with shape + [bs, num_feats*2, h, w]. + """ + h, w = mask.shape[-2:] + x = torch.arange(w, device=mask.device) + y = torch.arange(h, device=mask.device) + x_embed = self.col_embed(x) + y_embed = self.row_embed(y) + pos = torch.cat( + (x_embed.unsqueeze(0).repeat(h, 1, 1), y_embed.unsqueeze(1).repeat( + 1, w, 1)), + dim=-1).permute(2, 0, + 1).unsqueeze(0).repeat(mask.shape[0], 1, 1, 1) + return pos + + def __repr__(self): + """str: a string that describes the module""" + repr_str = self.__class__.__name__ + repr_str += f'(num_feats={self.num_feats}, ' + repr_str += f'row_num_embed={self.row_num_embed}, ' + repr_str += f'col_num_embed={self.col_num_embed})' + return repr_str diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/res_layer.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/res_layer.py new file mode 100644 index 0000000000000000000000000000000000000000..825880d74c4720fcc77fcbf723259c5f86e119fa --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/res_layer.py @@ -0,0 +1,189 @@ +from mmcv.cnn import build_conv_layer, build_norm_layer +from mmcv.runner import BaseModule, Sequential +from torch import nn as nn + + +class ResLayer(Sequential): + """ResLayer to build ResNet style backbone. + + Args: + block (nn.Module): block used to build ResLayer. + inplanes (int): inplanes of block. + planes (int): planes of block. + num_blocks (int): number of blocks. + stride (int): stride of the first block. Default: 1 + avg_down (bool): Use AvgPool instead of stride conv when + downsampling in the bottleneck. Default: False + conv_cfg (dict): dictionary to construct and config conv layer. + Default: None + norm_cfg (dict): dictionary to construct and config norm layer. + Default: dict(type='BN') + downsample_first (bool): Downsample at the first block or last block. + False for Hourglass, True for ResNet. Default: True + """ + + def __init__(self, + block, + inplanes, + planes, + num_blocks, + stride=1, + avg_down=False, + conv_cfg=None, + norm_cfg=dict(type='BN'), + downsample_first=True, + **kwargs): + self.block = block + + downsample = None + if stride != 1 or inplanes != planes * block.expansion: + downsample = [] + conv_stride = stride + if avg_down: + conv_stride = 1 + downsample.append( + nn.AvgPool2d( + kernel_size=stride, + stride=stride, + ceil_mode=True, + count_include_pad=False)) + downsample.extend([ + build_conv_layer( + conv_cfg, + inplanes, + planes * block.expansion, + kernel_size=1, + stride=conv_stride, + bias=False), + build_norm_layer(norm_cfg, planes * block.expansion)[1] + ]) + downsample = nn.Sequential(*downsample) + + layers = [] + if downsample_first: + layers.append( + block( + inplanes=inplanes, + planes=planes, + stride=stride, + downsample=downsample, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + **kwargs)) + inplanes = planes * block.expansion + for _ in range(1, num_blocks): + layers.append( + block( + inplanes=inplanes, + planes=planes, + stride=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + **kwargs)) + + else: # downsample_first=False is for HourglassModule + for _ in range(num_blocks - 1): + layers.append( + block( + inplanes=inplanes, + planes=inplanes, + stride=1, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + **kwargs)) + layers.append( + block( + inplanes=inplanes, + planes=planes, + stride=stride, + downsample=downsample, + conv_cfg=conv_cfg, + norm_cfg=norm_cfg, + **kwargs)) + super(ResLayer, self).__init__(*layers) + + +class SimplifiedBasicBlock(BaseModule): + """Simplified version of original basic residual block. This is used in + `SCNet `_. + + - Norm layer is now optional + - Last ReLU in forward function is removed + """ + expansion = 1 + + def __init__(self, + inplanes, + planes, + stride=1, + dilation=1, + downsample=None, + style='pytorch', + with_cp=False, + conv_cfg=None, + norm_cfg=dict(type='BN'), + dcn=None, + plugins=None, + init_fg=None): + super(SimplifiedBasicBlock, self).__init__(init_fg) + assert dcn is None, 'Not implemented yet.' + assert plugins is None, 'Not implemented yet.' + assert not with_cp, 'Not implemented yet.' + self.with_norm = norm_cfg is not None + with_bias = True if norm_cfg is None else False + self.conv1 = build_conv_layer( + conv_cfg, + inplanes, + planes, + 3, + stride=stride, + padding=dilation, + dilation=dilation, + bias=with_bias) + if self.with_norm: + self.norm1_name, norm1 = build_norm_layer( + norm_cfg, planes, postfix=1) + self.add_module(self.norm1_name, norm1) + self.conv2 = build_conv_layer( + conv_cfg, planes, planes, 3, padding=1, bias=with_bias) + if self.with_norm: + self.norm2_name, norm2 = build_norm_layer( + norm_cfg, planes, postfix=2) + self.add_module(self.norm2_name, norm2) + + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + self.stride = stride + self.dilation = dilation + self.with_cp = with_cp + + @property + def norm1(self): + """nn.Module: normalization layer after the first convolution layer""" + return getattr(self, self.norm1_name) if self.with_norm else None + + @property + def norm2(self): + """nn.Module: normalization layer after the second convolution layer""" + return getattr(self, self.norm2_name) if self.with_norm else None + + def forward(self, x): + """Forward function.""" + + identity = x + + out = self.conv1(x) + if self.with_norm: + out = self.norm1(out) + out = self.relu(out) + + out = self.conv2(out) + if self.with_norm: + out = self.norm2(out) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + + return out diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/se_layer.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/se_layer.py new file mode 100644 index 0000000000000000000000000000000000000000..877346cb868c1cc151ded82a5e1696fecc1f8f4f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/se_layer.py @@ -0,0 +1,57 @@ +import mmcv +import torch.nn as nn +from mmcv.cnn import ConvModule +from mmcv.runner import BaseModule + + +class SELayer(BaseModule): + """Squeeze-and-Excitation Module. + + Args: + channels (int): The input (and output) channels of the SE layer. + ratio (int): Squeeze ratio in SELayer, the intermediate channel will be + ``int(channels/ratio)``. Default: 16. + conv_cfg (None or dict): Config dict for convolution layer. + Default: None, which means using conv2d. + act_cfg (dict or Sequence[dict]): Config dict for activation layer. + If act_cfg is a dict, two activation layers will be configurated + by this dict. If act_cfg is a sequence of dicts, the first + activation layer will be configurated by the first dict and the + second activation layer will be configurated by the second dict. + Default: (dict(type='ReLU'), dict(type='Sigmoid')) + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None + """ + + def __init__(self, + channels, + ratio=16, + conv_cfg=None, + act_cfg=(dict(type='ReLU'), dict(type='Sigmoid')), + init_cfg=None): + super(SELayer, self).__init__(init_cfg) + if isinstance(act_cfg, dict): + act_cfg = (act_cfg, act_cfg) + assert len(act_cfg) == 2 + assert mmcv.is_tuple_of(act_cfg, dict) + self.global_avgpool = nn.AdaptiveAvgPool2d(1) + self.conv1 = ConvModule( + in_channels=channels, + out_channels=int(channels / ratio), + kernel_size=1, + stride=1, + conv_cfg=conv_cfg, + act_cfg=act_cfg[0]) + self.conv2 = ConvModule( + in_channels=int(channels / ratio), + out_channels=channels, + kernel_size=1, + stride=1, + conv_cfg=conv_cfg, + act_cfg=act_cfg[1]) + + def forward(self, x): + out = self.global_avgpool(x) + out = self.conv1(out) + out = self.conv2(out) + return x * out diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/transformer.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..f81a582710512dc01ec1017a1e31beffa5a5738f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/models/utils/transformer.py @@ -0,0 +1,794 @@ +import math +import warnings + +import torch +import torch.nn as nn +from mmcv.cnn import build_activation_layer, build_norm_layer, xavier_init +from mmcv.cnn.bricks.registry import (TRANSFORMER_LAYER, + TRANSFORMER_LAYER_SEQUENCE) +from mmcv.cnn.bricks.transformer import (BaseTransformerLayer, + TransformerLayerSequence, + build_transformer_layer_sequence) +from mmcv.runner.base_module import BaseModule +from torch.nn.init import normal_ + +from mmdet.models.utils.builder import TRANSFORMER + +try: + from mmcv.ops.multi_scale_deform_attn import MultiScaleDeformableAttention + +except ImportError: + warnings.warn( + '`MultiScaleDeformableAttention` in MMCV has been moved to ' + '`mmcv.ops.multi_scale_deform_attn`, please update your MMCV') + from mmcv.cnn.bricks.transformer import MultiScaleDeformableAttention + + +def inverse_sigmoid(x, eps=1e-5): + """Inverse function of sigmoid. + + Args: + x (Tensor): The tensor to do the + inverse. + eps (float): EPS avoid numerical + overflow. Defaults 1e-5. + Returns: + Tensor: The x has passed the inverse + function of sigmoid, has same + shape with input. + """ + x = x.clamp(min=0, max=1) + x1 = x.clamp(min=eps) + x2 = (1 - x).clamp(min=eps) + return torch.log(x1 / x2) + + +@TRANSFORMER_LAYER.register_module() +class DetrTransformerDecoderLayer(BaseTransformerLayer): + """Implements decoder layer in DETR transformer. + + Args: + attn_cfgs (list[`mmcv.ConfigDict`] | list[dict] | dict )): + Configs for self_attention or cross_attention, the order + should be consistent with it in `operation_order`. If it is + a dict, it would be expand to the number of attention in + `operation_order`. + feedforward_channels (int): The hidden dimension for FFNs. + ffn_dropout (float): Probability of an element to be zeroed + in ffn. Default 0.0. + operation_order (tuple[str]): The execution order of operation + in transformer. Such as ('self_attn', 'norm', 'ffn', 'norm'). + Default:None + act_cfg (dict): The activation config for FFNs. Default: `LN` + norm_cfg (dict): Config dict for normalization layer. + Default: `LN`. + ffn_num_fcs (int): The number of fully-connected layers in FFNs. + Default:2. + """ + + def __init__(self, + attn_cfgs, + feedforward_channels, + ffn_dropout=0.0, + operation_order=None, + act_cfg=dict(type='ReLU', inplace=True), + norm_cfg=dict(type='LN'), + ffn_num_fcs=2, + **kwargs): + super(DetrTransformerDecoderLayer, self).__init__( + attn_cfgs=attn_cfgs, + feedforward_channels=feedforward_channels, + ffn_dropout=ffn_dropout, + operation_order=operation_order, + act_cfg=act_cfg, + norm_cfg=norm_cfg, + ffn_num_fcs=ffn_num_fcs, + **kwargs) + assert len(operation_order) == 6 + assert set(operation_order) == set( + ['self_attn', 'norm', 'cross_attn', 'ffn']) + + +@TRANSFORMER_LAYER_SEQUENCE.register_module() +class DetrTransformerEncoder(TransformerLayerSequence): + """TransformerEncoder of DETR. + + Args: + post_norm_cfg (dict): Config of last normalization layer. Default: + `LN`. Only used when `self.pre_norm` is `True` + """ + + def __init__(self, *args, post_norm_cfg=dict(type='LN'), **kwargs): + super(DetrTransformerEncoder, self).__init__(*args, **kwargs) + if post_norm_cfg is not None: + self.post_norm = build_norm_layer( + post_norm_cfg, self.embed_dims)[1] if self.pre_norm else None + else: + assert not self.pre_norm, f'Use prenorm in ' \ + f'{self.__class__.__name__},' \ + f'Please specify post_norm_cfg' + self.post_norm = None + + def forward(self, *args, **kwargs): + """Forward function for `TransformerCoder`. + + Returns: + Tensor: forwarded results with shape [num_query, bs, embed_dims]. + """ + x = super(DetrTransformerEncoder, self).forward(*args, **kwargs) + if self.post_norm is not None: + x = self.post_norm(x) + return x + + +@TRANSFORMER_LAYER_SEQUENCE.register_module() +class DetrTransformerDecoder(TransformerLayerSequence): + """Implements the decoder in DETR transformer. + + Args: + return_intermediate (bool): Whether to return intermediate outputs. + post_norm_cfg (dict): Config of last normalization layer. Default: + `LN`. + """ + + def __init__(self, + *args, + post_norm_cfg=dict(type='LN'), + return_intermediate=False, + **kwargs): + + super(DetrTransformerDecoder, self).__init__(*args, **kwargs) + self.return_intermediate = return_intermediate + if post_norm_cfg is not None: + self.post_norm = build_norm_layer(post_norm_cfg, + self.embed_dims)[1] + else: + self.post_norm = None + + def forward(self, query, *args, **kwargs): + """Forward function for `TransformerDecoder`. + + Args: + query (Tensor): Input query with shape + `(num_query, bs, embed_dims)`. + + Returns: + Tensor: Results with shape [1, num_query, bs, embed_dims] when + return_intermediate is `False`, otherwise it has shape + [num_layers, num_query, bs, embed_dims]. + """ + if not self.return_intermediate: + x = super().forward(query, *args, **kwargs) + if self.post_norm: + x = self.post_norm(x)[None] + return x + + intermediate = [] + for layer in self.layers: + query = layer(query, *args, **kwargs) + if self.return_intermediate: + if self.post_norm is not None: + intermediate.append(self.post_norm(query)) + else: + intermediate.append(query) + return torch.stack(intermediate) + + +@TRANSFORMER.register_module() +class Transformer(BaseModule): + """Implements the DETR transformer. + + Following the official DETR implementation, this module copy-paste + from torch.nn.Transformer with modifications: + + * positional encodings are passed in MultiheadAttention + * extra LN at the end of encoder is removed + * decoder returns a stack of activations from all decoding layers + + See `paper: End-to-End Object Detection with Transformers + `_ for details. + + Args: + encoder (`mmcv.ConfigDict` | Dict): Config of + TransformerEncoder. Defaults to None. + decoder ((`mmcv.ConfigDict` | Dict)): Config of + TransformerDecoder. Defaults to None + init_cfg (obj:`mmcv.ConfigDict`): The Config for initialization. + Defaults to None. + """ + + def __init__(self, encoder=None, decoder=None, init_cfg=None): + super(Transformer, self).__init__(init_cfg=init_cfg) + self.encoder = build_transformer_layer_sequence(encoder) + self.decoder = build_transformer_layer_sequence(decoder) + self.embed_dims = self.encoder.embed_dims + + def init_weights(self): + # follow the official DETR to init parameters + for m in self.modules(): + if hasattr(m, 'weight') and m.weight.dim() > 1: + xavier_init(m, distribution='uniform') + self._is_init = True + + def forward(self, x, mask, query_embed, pos_embed): + """Forward function for `Transformer`. + + Args: + x (Tensor): Input query with shape [bs, c, h, w] where + c = embed_dims. + mask (Tensor): The key_padding_mask used for encoder and decoder, + with shape [bs, h, w]. + query_embed (Tensor): The query embedding for decoder, with shape + [num_query, c]. + pos_embed (Tensor): The positional encoding for encoder and + decoder, with the same shape as `x`. + + Returns: + tuple[Tensor]: results of decoder containing the following tensor. + + - out_dec: Output from decoder. If return_intermediate_dec \ + is True output has shape [num_dec_layers, bs, + num_query, embed_dims], else has shape [1, bs, \ + num_query, embed_dims]. + - memory: Output results from encoder, with shape \ + [bs, embed_dims, h, w]. + """ + bs, c, h, w = x.shape + # use `view` instead of `flatten` for dynamically exporting to ONNX + x = x.view(bs, c, -1).permute(2, 0, 1) # [bs, c, h, w] -> [h*w, bs, c] + pos_embed = pos_embed.view(bs, c, -1).permute(2, 0, 1) + query_embed = query_embed.unsqueeze(1).repeat( + 1, bs, 1) # [num_query, dim] -> [num_query, bs, dim] + mask = mask.view(bs, -1) # [bs, h, w] -> [bs, h*w] + memory = self.encoder( + query=x, + key=None, + value=None, + query_pos=pos_embed, + query_key_padding_mask=mask) + target = torch.zeros_like(query_embed) + # out_dec: [num_layers, num_query, bs, dim] + out_dec = self.decoder( + query=target, + key=memory, + value=memory, + key_pos=pos_embed, + query_pos=query_embed, + key_padding_mask=mask) + out_dec = out_dec.transpose(1, 2) + memory = memory.permute(1, 2, 0).reshape(bs, c, h, w) + return out_dec, memory + + +@TRANSFORMER_LAYER_SEQUENCE.register_module() +class DeformableDetrTransformerDecoder(TransformerLayerSequence): + """Implements the decoder in DETR transformer. + + Args: + return_intermediate (bool): Whether to return intermediate outputs. + coder_norm_cfg (dict): Config of last normalization layer. Default: + `LN`. + """ + + def __init__(self, *args, return_intermediate=False, **kwargs): + + super(DeformableDetrTransformerDecoder, self).__init__(*args, **kwargs) + self.return_intermediate = return_intermediate + + def forward(self, + query, + *args, + reference_points=None, + valid_ratios=None, + reg_branches=None, + **kwargs): + """Forward function for `TransformerDecoder`. + + Args: + query (Tensor): Input query with shape + `(num_query, bs, embed_dims)`. + reference_points (Tensor): The reference + points of offset. has shape + (bs, num_query, 4) when as_two_stage, + otherwise has shape ((bs, num_query, 2). + valid_ratios (Tensor): The radios of valid + points on the feature map, has shape + (bs, num_levels, 2) + reg_branch: (obj:`nn.ModuleList`): Used for + refining the regression results. Only would + be passed when with_box_refine is True, + otherwise would be passed a `None`. + + Returns: + Tensor: Results with shape [1, num_query, bs, embed_dims] when + return_intermediate is `False`, otherwise it has shape + [num_layers, num_query, bs, embed_dims]. + """ + output = query + intermediate = [] + intermediate_reference_points = [] + for lid, layer in enumerate(self.layers): + if reference_points.shape[-1] == 4: + reference_points_input = reference_points[:, :, None] * \ + torch.cat([valid_ratios, valid_ratios], -1)[:, None] + else: + assert reference_points.shape[-1] == 2 + reference_points_input = reference_points[:, :, None] * \ + valid_ratios[:, None] + output = layer( + output, + *args, + reference_points=reference_points_input, + **kwargs) + output = output.permute(1, 0, 2) + + if reg_branches is not None: + tmp = reg_branches[lid](output) + if reference_points.shape[-1] == 4: + new_reference_points = tmp + inverse_sigmoid( + reference_points) + new_reference_points = new_reference_points.sigmoid() + else: + assert reference_points.shape[-1] == 2 + new_reference_points = tmp + new_reference_points[..., :2] = tmp[ + ..., :2] + inverse_sigmoid(reference_points) + new_reference_points = new_reference_points.sigmoid() + reference_points = new_reference_points.detach() + + output = output.permute(1, 0, 2) + if self.return_intermediate: + intermediate.append(output) + intermediate_reference_points.append(reference_points) + + if self.return_intermediate: + return torch.stack(intermediate), torch.stack( + intermediate_reference_points) + + return output, reference_points + + +@TRANSFORMER.register_module() +class DeformableDetrTransformer(Transformer): + """Implements the DeformableDETR transformer. + + Args: + as_two_stage (bool): Generate query from encoder features. + Default: False. + num_feature_levels (int): Number of feature maps from FPN: + Default: 4. + two_stage_num_proposals (int): Number of proposals when set + `as_two_stage` as True. Default: 300. + """ + + def __init__(self, + as_two_stage=False, + num_feature_levels=4, + two_stage_num_proposals=300, + **kwargs): + super(DeformableDetrTransformer, self).__init__(**kwargs) + self.as_two_stage = as_two_stage + self.num_feature_levels = num_feature_levels + self.two_stage_num_proposals = two_stage_num_proposals + self.embed_dims = self.encoder.embed_dims + self.init_layers() + + def init_layers(self): + """Initialize layers of the DeformableDetrTransformer.""" + self.level_embeds = nn.Parameter( + torch.Tensor(self.num_feature_levels, self.embed_dims)) + + if self.as_two_stage: + self.enc_output = nn.Linear(self.embed_dims, self.embed_dims) + self.enc_output_norm = nn.LayerNorm(self.embed_dims) + self.pos_trans = nn.Linear(self.embed_dims * 2, + self.embed_dims * 2) + self.pos_trans_norm = nn.LayerNorm(self.embed_dims * 2) + else: + self.reference_points = nn.Linear(self.embed_dims, 2) + + def init_weights(self): + """Initialize the transformer weights.""" + for p in self.parameters(): + if p.dim() > 1: + nn.init.xavier_uniform_(p) + for m in self.modules(): + if isinstance(m, MultiScaleDeformableAttention): + m.init_weights() + if not self.as_two_stage: + xavier_init(self.reference_points, distribution='uniform', bias=0.) + normal_(self.level_embeds) + + def gen_encoder_output_proposals(self, memory, memory_padding_mask, + spatial_shapes): + """Generate proposals from encoded memory. + + Args: + memory (Tensor) : The output of encoder, + has shape (bs, num_key, embed_dim). num_key is + equal the number of points on feature map from + all level. + memory_padding_mask (Tensor): Padding mask for memory. + has shape (bs, num_key). + spatial_shapes (Tensor): The shape of all feature maps. + has shape (num_level, 2). + + Returns: + tuple: A tuple of feature map and bbox prediction. + + - output_memory (Tensor): The input of decoder, \ + has shape (bs, num_key, embed_dim). num_key is \ + equal the number of points on feature map from \ + all levels. + - output_proposals (Tensor): The normalized proposal \ + after a inverse sigmoid, has shape \ + (bs, num_keys, 4). + """ + + N, S, C = memory.shape + proposals = [] + _cur = 0 + for lvl, (H, W) in enumerate(spatial_shapes): + mask_flatten_ = memory_padding_mask[:, _cur:(_cur + H * W)].view( + N, H, W, 1) + valid_H = torch.sum(~mask_flatten_[:, :, 0, 0], 1) + valid_W = torch.sum(~mask_flatten_[:, 0, :, 0], 1) + + grid_y, grid_x = torch.meshgrid( + torch.linspace( + 0, H - 1, H, dtype=torch.float32, device=memory.device), + torch.linspace( + 0, W - 1, W, dtype=torch.float32, device=memory.device)) + grid = torch.cat([grid_x.unsqueeze(-1), grid_y.unsqueeze(-1)], -1) + + scale = torch.cat([valid_W.unsqueeze(-1), + valid_H.unsqueeze(-1)], 1).view(N, 1, 1, 2) + grid = (grid.unsqueeze(0).expand(N, -1, -1, -1) + 0.5) / scale + wh = torch.ones_like(grid) * 0.05 * (2.0**lvl) + proposal = torch.cat((grid, wh), -1).view(N, -1, 4) + proposals.append(proposal) + _cur += (H * W) + output_proposals = torch.cat(proposals, 1) + output_proposals_valid = ((output_proposals > 0.01) & + (output_proposals < 0.99)).all( + -1, keepdim=True) + output_proposals = torch.log(output_proposals / (1 - output_proposals)) + output_proposals = output_proposals.masked_fill( + memory_padding_mask.unsqueeze(-1), float('inf')) + output_proposals = output_proposals.masked_fill( + ~output_proposals_valid, float('inf')) + + output_memory = memory + output_memory = output_memory.masked_fill( + memory_padding_mask.unsqueeze(-1), float(0)) + output_memory = output_memory.masked_fill(~output_proposals_valid, + float(0)) + output_memory = self.enc_output_norm(self.enc_output(output_memory)) + return output_memory, output_proposals + + @staticmethod + def get_reference_points(spatial_shapes, valid_ratios, device): + """Get the reference points used in decoder. + + Args: + spatial_shapes (Tensor): The shape of all + feature maps, has shape (num_level, 2). + valid_ratios (Tensor): The radios of valid + points on the feature map, has shape + (bs, num_levels, 2) + device (obj:`device`): The device where + reference_points should be. + + Returns: + Tensor: reference points used in decoder, has \ + shape (bs, num_keys, num_levels, 2). + """ + reference_points_list = [] + for lvl, (H, W) in enumerate(spatial_shapes): + # TODO check this 0.5 + ref_y, ref_x = torch.meshgrid( + torch.linspace( + 0.5, H - 0.5, H, dtype=torch.float32, device=device), + torch.linspace( + 0.5, W - 0.5, W, dtype=torch.float32, device=device)) + ref_y = ref_y.reshape(-1)[None] / ( + valid_ratios[:, None, lvl, 1] * H) + ref_x = ref_x.reshape(-1)[None] / ( + valid_ratios[:, None, lvl, 0] * W) + ref = torch.stack((ref_x, ref_y), -1) + reference_points_list.append(ref) + reference_points = torch.cat(reference_points_list, 1) + reference_points = reference_points[:, :, None] * valid_ratios[:, None] + return reference_points + + def get_valid_ratio(self, mask): + """Get the valid radios of feature maps of all level.""" + _, H, W = mask.shape + valid_H = torch.sum(~mask[:, :, 0], 1) + valid_W = torch.sum(~mask[:, 0, :], 1) + valid_ratio_h = valid_H.float() / H + valid_ratio_w = valid_W.float() / W + valid_ratio = torch.stack([valid_ratio_w, valid_ratio_h], -1) + return valid_ratio + + def get_proposal_pos_embed(self, + proposals, + num_pos_feats=128, + temperature=10000): + """Get the position embedding of proposal.""" + scale = 2 * math.pi + dim_t = torch.arange( + num_pos_feats, dtype=torch.float32, device=proposals.device) + dim_t = temperature**(2 * (dim_t // 2) / num_pos_feats) + # N, L, 4 + proposals = proposals.sigmoid() * scale + # N, L, 4, 128 + pos = proposals[:, :, :, None] / dim_t + # N, L, 4, 64, 2 + pos = torch.stack((pos[:, :, :, 0::2].sin(), pos[:, :, :, 1::2].cos()), + dim=4).flatten(2) + return pos + + def forward(self, + mlvl_feats, + mlvl_masks, + query_embed, + mlvl_pos_embeds, + reg_branches=None, + cls_branches=None, + **kwargs): + """Forward function for `Transformer`. + + Args: + mlvl_feats (list(Tensor)): Input queries from + different level. Each element has shape + [bs, embed_dims, h, w]. + mlvl_masks (list(Tensor)): The key_padding_mask from + different level used for encoder and decoder, + each element has shape [bs, h, w]. + query_embed (Tensor): The query embedding for decoder, + with shape [num_query, c]. + mlvl_pos_embeds (list(Tensor)): The positional encoding + of feats from different level, has the shape + [bs, embed_dims, h, w]. + reg_branches (obj:`nn.ModuleList`): Regression heads for + feature maps from each decoder layer. Only would + be passed when + `with_box_refine` is True. Default to None. + cls_branches (obj:`nn.ModuleList`): Classification heads + for feature maps from each decoder layer. Only would + be passed when `as_two_stage` + is True. Default to None. + + + Returns: + tuple[Tensor]: results of decoder containing the following tensor. + + - inter_states: Outputs from decoder. If + return_intermediate_dec is True output has shape \ + (num_dec_layers, bs, num_query, embed_dims), else has \ + shape (1, bs, num_query, embed_dims). + - init_reference_out: The initial value of reference \ + points, has shape (bs, num_queries, 4). + - inter_references_out: The internal value of reference \ + points in decoder, has shape \ + (num_dec_layers, bs,num_query, embed_dims) + - enc_outputs_class: The classification score of \ + proposals generated from \ + encoder's feature maps, has shape \ + (batch, h*w, num_classes). \ + Only would be returned when `as_two_stage` is True, \ + otherwise None. + - enc_outputs_coord_unact: The regression results \ + generated from encoder's feature maps., has shape \ + (batch, h*w, 4). Only would \ + be returned when `as_two_stage` is True, \ + otherwise None. + """ + assert self.as_two_stage or query_embed is not None + + feat_flatten = [] + mask_flatten = [] + lvl_pos_embed_flatten = [] + spatial_shapes = [] + for lvl, (feat, mask, pos_embed) in enumerate( + zip(mlvl_feats, mlvl_masks, mlvl_pos_embeds)): + bs, c, h, w = feat.shape + spatial_shape = (h, w) + spatial_shapes.append(spatial_shape) + feat = feat.flatten(2).transpose(1, 2) + mask = mask.flatten(1) + pos_embed = pos_embed.flatten(2).transpose(1, 2) + lvl_pos_embed = pos_embed + self.level_embeds[lvl].view(1, 1, -1) + lvl_pos_embed_flatten.append(lvl_pos_embed) + feat_flatten.append(feat) + mask_flatten.append(mask) + feat_flatten = torch.cat(feat_flatten, 1) + mask_flatten = torch.cat(mask_flatten, 1) + lvl_pos_embed_flatten = torch.cat(lvl_pos_embed_flatten, 1) + spatial_shapes = torch.as_tensor( + spatial_shapes, dtype=torch.long, device=feat_flatten.device) + level_start_index = torch.cat((spatial_shapes.new_zeros( + (1, )), spatial_shapes.prod(1).cumsum(0)[:-1])) + valid_ratios = torch.stack( + [self.get_valid_ratio(m) for m in mlvl_masks], 1) + + reference_points = \ + self.get_reference_points(spatial_shapes, + valid_ratios, + device=feat.device) + + feat_flatten = feat_flatten.permute(1, 0, 2) # (H*W, bs, embed_dims) + lvl_pos_embed_flatten = lvl_pos_embed_flatten.permute( + 1, 0, 2) # (H*W, bs, embed_dims) + memory = self.encoder( + query=feat_flatten, + key=None, + value=None, + query_pos=lvl_pos_embed_flatten, + query_key_padding_mask=mask_flatten, + spatial_shapes=spatial_shapes, + reference_points=reference_points, + level_start_index=level_start_index, + valid_ratios=valid_ratios, + **kwargs) + + memory = memory.permute(1, 0, 2) + bs, _, c = memory.shape + if self.as_two_stage: + output_memory, output_proposals = \ + self.gen_encoder_output_proposals( + memory, mask_flatten, spatial_shapes) + enc_outputs_class = cls_branches[self.decoder.num_layers]( + output_memory) + enc_outputs_coord_unact = \ + reg_branches[ + self.decoder.num_layers](output_memory) + output_proposals + + topk = self.two_stage_num_proposals + topk_proposals = torch.topk( + enc_outputs_class[..., 0], topk, dim=1)[1] + topk_coords_unact = torch.gather( + enc_outputs_coord_unact, 1, + topk_proposals.unsqueeze(-1).repeat(1, 1, 4)) + topk_coords_unact = topk_coords_unact.detach() + reference_points = topk_coords_unact.sigmoid() + init_reference_out = reference_points + pos_trans_out = self.pos_trans_norm( + self.pos_trans(self.get_proposal_pos_embed(topk_coords_unact))) + query_pos, query = torch.split(pos_trans_out, c, dim=2) + else: + query_pos, query = torch.split(query_embed, c, dim=1) + query_pos = query_pos.unsqueeze(0).expand(bs, -1, -1) + query = query.unsqueeze(0).expand(bs, -1, -1) + reference_points = self.reference_points(query_pos).sigmoid() + init_reference_out = reference_points + + # decoder + query = query.permute(1, 0, 2) + memory = memory.permute(1, 0, 2) + query_pos = query_pos.permute(1, 0, 2) + inter_states, inter_references = self.decoder( + query=query, + key=None, + value=memory, + query_pos=query_pos, + key_padding_mask=mask_flatten, + reference_points=reference_points, + spatial_shapes=spatial_shapes, + level_start_index=level_start_index, + valid_ratios=valid_ratios, + reg_branches=reg_branches, + **kwargs) + + inter_references_out = inter_references + if self.as_two_stage: + return inter_states, init_reference_out,\ + inter_references_out, enc_outputs_class,\ + enc_outputs_coord_unact + return inter_states, init_reference_out, \ + inter_references_out, None, None + + +@TRANSFORMER.register_module() +class DynamicConv(BaseModule): + """Implements Dynamic Convolution. + + This module generate parameters for each sample and + use bmm to implement 1*1 convolution. Code is modified + from the `official github repo `_ . + + Args: + in_channels (int): The input feature channel. + Defaults to 256. + feat_channels (int): The inner feature channel. + Defaults to 64. + out_channels (int, optional): The output feature channel. + When not specified, it will be set to `in_channels` + by default + input_feat_shape (int): The shape of input feature. + Defaults to 7. + act_cfg (dict): The activation config for DynamicConv. + norm_cfg (dict): Config dict for normalization layer. Default + layer normalization. + init_cfg (obj:`mmcv.ConfigDict`): The Config for initialization. + Default: None. + """ + + def __init__(self, + in_channels=256, + feat_channels=64, + out_channels=None, + input_feat_shape=7, + act_cfg=dict(type='ReLU', inplace=True), + norm_cfg=dict(type='LN'), + init_cfg=None): + super(DynamicConv, self).__init__(init_cfg) + self.in_channels = in_channels + self.feat_channels = feat_channels + self.out_channels_raw = out_channels + self.input_feat_shape = input_feat_shape + self.act_cfg = act_cfg + self.norm_cfg = norm_cfg + self.out_channels = out_channels if out_channels else in_channels + + self.num_params_in = self.in_channels * self.feat_channels + self.num_params_out = self.out_channels * self.feat_channels + self.dynamic_layer = nn.Linear( + self.in_channels, self.num_params_in + self.num_params_out) + + self.norm_in = build_norm_layer(norm_cfg, self.feat_channels)[1] + self.norm_out = build_norm_layer(norm_cfg, self.out_channels)[1] + + self.activation = build_activation_layer(act_cfg) + + num_output = self.out_channels * input_feat_shape**2 + self.fc_layer = nn.Linear(num_output, self.out_channels) + self.fc_norm = build_norm_layer(norm_cfg, self.out_channels)[1] + + def forward(self, param_feature, input_feature): + """Forward function for `DynamicConv`. + + Args: + param_feature (Tensor): The feature can be used + to generate the parameter, has shape + (num_all_proposals, in_channels). + input_feature (Tensor): Feature that + interact with parameters, has shape + (num_all_proposals, in_channels, H, W). + + Returns: + Tensor: The output feature has shape + (num_all_proposals, out_channels). + """ + num_proposals = param_feature.size(0) + input_feature = input_feature.view(num_proposals, self.in_channels, + -1).permute(2, 0, 1) + + input_feature = input_feature.permute(1, 0, 2) + parameters = self.dynamic_layer(param_feature) + + param_in = parameters[:, :self.num_params_in].view( + -1, self.in_channels, self.feat_channels) + param_out = parameters[:, -self.num_params_out:].view( + -1, self.feat_channels, self.out_channels) + + # input_feature has shape (num_all_proposals, H*W, in_channels) + # param_in has shape (num_all_proposals, in_channels, feat_channels) + # feature has shape (num_all_proposals, H*W, feat_channels) + features = torch.bmm(input_feature, param_in) + features = self.norm_in(features) + features = self.activation(features) + + # param_out has shape (batch_size, feat_channels, out_channels) + features = torch.bmm(features, param_out) + features = self.norm_out(features) + features = self.activation(features) + + features = features.flatten(1) + features = self.fc_layer(features) + features = self.fc_norm(features) + features = self.activation(features) + + return features diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e79ad8c02a2d465f0690a4aa80683a5c6d784d52 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/__init__.py @@ -0,0 +1,5 @@ +from .collect_env import collect_env +from .logger import get_root_logger +from .optimizer import DistOptimizerHook + +__all__ = ['get_root_logger', 'collect_env', 'DistOptimizerHook'] diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/collect_env.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/collect_env.py new file mode 100644 index 0000000000000000000000000000000000000000..89c064accdb10abec4a03de04f601d27aab2da70 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/collect_env.py @@ -0,0 +1,16 @@ +from mmcv.utils import collect_env as collect_base_env +from mmcv.utils import get_git_hash + +import mmdet + + +def collect_env(): + """Collect the information of the running environments.""" + env_info = collect_base_env() + env_info['MMDetection'] = mmdet.__version__ + '+' + get_git_hash()[:7] + return env_info + + +if __name__ == '__main__': + for name, val in collect_env().items(): + print(f'{name}: {val}') diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/contextmanagers.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/contextmanagers.py new file mode 100644 index 0000000000000000000000000000000000000000..38a639262d949b5754dedf12f33fa814b030ea38 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/contextmanagers.py @@ -0,0 +1,121 @@ +import asyncio +import contextlib +import logging +import os +import time +from typing import List + +import torch + +logger = logging.getLogger(__name__) + +DEBUG_COMPLETED_TIME = bool(os.environ.get('DEBUG_COMPLETED_TIME', False)) + + +@contextlib.asynccontextmanager +async def completed(trace_name='', + name='', + sleep_interval=0.05, + streams: List[torch.cuda.Stream] = None): + """Async context manager that waits for work to complete on given CUDA + streams.""" + if not torch.cuda.is_available(): + yield + return + + stream_before_context_switch = torch.cuda.current_stream() + if not streams: + streams = [stream_before_context_switch] + else: + streams = [s if s else stream_before_context_switch for s in streams] + + end_events = [ + torch.cuda.Event(enable_timing=DEBUG_COMPLETED_TIME) for _ in streams + ] + + if DEBUG_COMPLETED_TIME: + start = torch.cuda.Event(enable_timing=True) + stream_before_context_switch.record_event(start) + + cpu_start = time.monotonic() + logger.debug('%s %s starting, streams: %s', trace_name, name, streams) + grad_enabled_before = torch.is_grad_enabled() + try: + yield + finally: + current_stream = torch.cuda.current_stream() + assert current_stream == stream_before_context_switch + + if DEBUG_COMPLETED_TIME: + cpu_end = time.monotonic() + for i, stream in enumerate(streams): + event = end_events[i] + stream.record_event(event) + + grad_enabled_after = torch.is_grad_enabled() + + # observed change of torch.is_grad_enabled() during concurrent run of + # async_test_bboxes code + assert (grad_enabled_before == grad_enabled_after + ), 'Unexpected is_grad_enabled() value change' + + are_done = [e.query() for e in end_events] + logger.debug('%s %s completed: %s streams: %s', trace_name, name, + are_done, streams) + with torch.cuda.stream(stream_before_context_switch): + while not all(are_done): + await asyncio.sleep(sleep_interval) + are_done = [e.query() for e in end_events] + logger.debug( + '%s %s completed: %s streams: %s', + trace_name, + name, + are_done, + streams, + ) + + current_stream = torch.cuda.current_stream() + assert current_stream == stream_before_context_switch + + if DEBUG_COMPLETED_TIME: + cpu_time = (cpu_end - cpu_start) * 1000 + stream_times_ms = '' + for i, stream in enumerate(streams): + elapsed_time = start.elapsed_time(end_events[i]) + stream_times_ms += f' {stream} {elapsed_time:.2f} ms' + logger.info('%s %s %.2f ms %s', trace_name, name, cpu_time, + stream_times_ms) + + +@contextlib.asynccontextmanager +async def concurrent(streamqueue: asyncio.Queue, + trace_name='concurrent', + name='stream'): + """Run code concurrently in different streams. + + :param streamqueue: asyncio.Queue instance. + + Queue tasks define the pool of streams used for concurrent execution. + """ + if not torch.cuda.is_available(): + yield + return + + initial_stream = torch.cuda.current_stream() + + with torch.cuda.stream(initial_stream): + stream = await streamqueue.get() + assert isinstance(stream, torch.cuda.Stream) + + try: + with torch.cuda.stream(stream): + logger.debug('%s %s is starting, stream: %s', trace_name, name, + stream) + yield + current = torch.cuda.current_stream() + assert current == stream + logger.debug('%s %s has finished, stream: %s', trace_name, + name, stream) + finally: + streamqueue.task_done() + streamqueue.put_nowait(stream) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/logger.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/logger.py new file mode 100644 index 0000000000000000000000000000000000000000..6fc6e6b438a73e857ba6f173594985807cb88b30 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/logger.py @@ -0,0 +1,19 @@ +import logging + +from mmcv.utils import get_logger + + +def get_root_logger(log_file=None, log_level=logging.INFO): + """Get root logger. + + Args: + log_file (str, optional): File path of log. Defaults to None. + log_level (int, optional): The level of logger. + Defaults to logging.INFO. + + Returns: + :obj:`logging.Logger`: The obtained logger + """ + logger = get_logger(name='mmdet', log_file=log_file, log_level=log_level) + + return logger diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/optimizer.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/optimizer.py new file mode 100644 index 0000000000000000000000000000000000000000..9c9d11941c0b43d42bd6daad1e4b927eaca3e675 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/optimizer.py @@ -0,0 +1,33 @@ +from mmcv.runner import OptimizerHook, HOOKS +try: + import apex +except: + print('apex is not installed') + + +@HOOKS.register_module() +class DistOptimizerHook(OptimizerHook): + """Optimizer hook for distributed training.""" + + def __init__(self, update_interval=1, grad_clip=None, coalesce=True, bucket_size_mb=-1, use_fp16=False): + self.grad_clip = grad_clip + self.coalesce = coalesce + self.bucket_size_mb = bucket_size_mb + self.update_interval = update_interval + self.use_fp16 = use_fp16 + + def before_run(self, runner): + runner.optimizer.zero_grad() + + def after_train_iter(self, runner): + runner.outputs['loss'] /= self.update_interval + if self.use_fp16: + with apex.amp.scale_loss(runner.outputs['loss'], runner.optimizer) as scaled_loss: + scaled_loss.backward() + else: + runner.outputs['loss'].backward() + if self.every_n_iters(runner, self.update_interval): + if self.grad_clip is not None: + self.clip_grads(runner.model.parameters()) + runner.optimizer.step() + runner.optimizer.zero_grad() diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/profiling.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/profiling.py new file mode 100644 index 0000000000000000000000000000000000000000..4be9222c37e922329d537f883f5587995e27efc6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/profiling.py @@ -0,0 +1,39 @@ +import contextlib +import sys +import time + +import torch + +if sys.version_info >= (3, 7): + + @contextlib.contextmanager + def profile_time(trace_name, + name, + enabled=True, + stream=None, + end_stream=None): + """Print time spent by CPU and GPU. + + Useful as a temporary context manager to find sweet spots of code + suitable for async implementation. + """ + if (not enabled) or not torch.cuda.is_available(): + yield + return + stream = stream if stream else torch.cuda.current_stream() + end_stream = end_stream if end_stream else stream + start = torch.cuda.Event(enable_timing=True) + end = torch.cuda.Event(enable_timing=True) + stream.record_event(start) + try: + cpu_start = time.monotonic() + yield + finally: + cpu_end = time.monotonic() + end_stream.record_event(end) + end.synchronize() + cpu_time = (cpu_end - cpu_start) * 1000 + gpu_time = start.elapsed_time(end) + msg = f'{trace_name} {name} cpu_time {cpu_time:.2f} ms ' + msg += f'gpu_time {gpu_time:.2f} ms stream {stream}' + print(msg, end_stream) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/util_mixins.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/util_mixins.py new file mode 100644 index 0000000000000000000000000000000000000000..9aed015393c7b53d0e08b696d52948afd37025f5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/util_mixins.py @@ -0,0 +1,104 @@ +"""This module defines the :class:`NiceRepr` mixin class, which defines a +``__repr__`` and ``__str__`` method that only depend on a custom ``__nice__`` +method, which you must define. This means you only have to overload one +function instead of two. Furthermore, if the object defines a ``__len__`` +method, then the ``__nice__`` method defaults to something sensible, otherwise +it is treated as abstract and raises ``NotImplementedError``. + +To use simply have your object inherit from :class:`NiceRepr` +(multi-inheritance should be ok). + +This code was copied from the ubelt library: https://github.com/Erotemic/ubelt + +Example: + >>> # Objects that define __nice__ have a default __str__ and __repr__ + >>> class Student(NiceRepr): + ... def __init__(self, name): + ... self.name = name + ... def __nice__(self): + ... return self.name + >>> s1 = Student('Alice') + >>> s2 = Student('Bob') + >>> print(f's1 = {s1}') + >>> print(f's2 = {s2}') + s1 = + s2 = + +Example: + >>> # Objects that define __len__ have a default __nice__ + >>> class Group(NiceRepr): + ... def __init__(self, data): + ... self.data = data + ... def __len__(self): + ... return len(self.data) + >>> g = Group([1, 2, 3]) + >>> print(f'g = {g}') + g = +""" +import warnings + + +class NiceRepr: + """Inherit from this class and define ``__nice__`` to "nicely" print your + objects. + + Defines ``__str__`` and ``__repr__`` in terms of ``__nice__`` function + Classes that inherit from :class:`NiceRepr` should redefine ``__nice__``. + If the inheriting class has a ``__len__``, method then the default + ``__nice__`` method will return its length. + + Example: + >>> class Foo(NiceRepr): + ... def __nice__(self): + ... return 'info' + >>> foo = Foo() + >>> assert str(foo) == '' + >>> assert repr(foo).startswith('>> class Bar(NiceRepr): + ... pass + >>> bar = Bar() + >>> import pytest + >>> with pytest.warns(None) as record: + >>> assert 'object at' in str(bar) + >>> assert 'object at' in repr(bar) + + Example: + >>> class Baz(NiceRepr): + ... def __len__(self): + ... return 5 + >>> baz = Baz() + >>> assert str(baz) == '' + """ + + def __nice__(self): + """str: a "nice" summary string describing this module""" + if hasattr(self, '__len__'): + # It is a common pattern for objects to use __len__ in __nice__ + # As a convenience we define a default __nice__ for these objects + return str(len(self)) + else: + # In all other cases force the subclass to overload __nice__ + raise NotImplementedError( + f'Define the __nice__ method for {self.__class__!r}') + + def __repr__(self): + """str: the string of the module""" + try: + nice = self.__nice__() + classname = self.__class__.__name__ + return f'<{classname}({nice}) at {hex(id(self))}>' + except NotImplementedError as ex: + warnings.warn(str(ex), category=RuntimeWarning) + return object.__repr__(self) + + def __str__(self): + """str: the string of the module""" + try: + classname = self.__class__.__name__ + nice = self.__nice__() + return f'<{classname}({nice})>' + except NotImplementedError as ex: + warnings.warn(str(ex), category=RuntimeWarning) + return object.__repr__(self) diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/util_random.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/util_random.py new file mode 100644 index 0000000000000000000000000000000000000000..e313e9947bb3232a9458878fd219e1594ab93d57 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/utils/util_random.py @@ -0,0 +1,33 @@ +"""Helpers for random number generators.""" +import numpy as np + + +def ensure_rng(rng=None): + """Coerces input into a random number generator. + + If the input is None, then a global random state is returned. + + If the input is a numeric value, then that is used as a seed to construct a + random state. Otherwise the input is returned as-is. + + Adapted from [1]_. + + Args: + rng (int | numpy.random.RandomState | None): + if None, then defaults to the global rng. Otherwise this can be an + integer or a RandomState class + Returns: + (numpy.random.RandomState) : rng - + a numpy random number generator + + References: + .. [1] https://gitlab.kitware.com/computer-vision/kwarray/blob/master/kwarray/util_random.py#L270 # noqa: E501 + """ + + if rng is None: + rng = np.random.mtrand._rand + elif isinstance(rng, int): + rng = np.random.RandomState(rng) + else: + rng = rng + return rng diff --git a/detection_cbnet/docker-build-context/cbnetv2/mmdet/version.py b/detection_cbnet/docker-build-context/cbnetv2/mmdet/version.py new file mode 100644 index 0000000000000000000000000000000000000000..9c2b85e02c8a9a57cbabaab2153c7564abb5e7c7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/mmdet/version.py @@ -0,0 +1,19 @@ +# Copyright (c) Open-MMLab. All rights reserved. + +__version__ = '2.14.0' +short_version = __version__ + + +def parse_version_info(version_str): + version_info = [] + for x in version_str.split('.'): + if x.isdigit(): + version_info.append(int(x)) + elif x.find('rc') != -1: + patch_version = x.split('rc') + version_info.append(int(patch_version[0])) + version_info.append(f'rc{patch_version[1]}') + return tuple(version_info) + + +version_info = parse_version_info(__version__) diff --git a/detection_cbnet/docker-build-context/cbnetv2/model-index.yml b/detection_cbnet/docker-build-context/cbnetv2/model-index.yml new file mode 100644 index 0000000000000000000000000000000000000000..c3fb71513a3e32607f0b8b91028a1a341fca20c7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/model-index.yml @@ -0,0 +1,57 @@ +Import: + - configs/atss/metafile.yml + - configs/autoassign/metafile.yml + - configs/cascade_rcnn/metafile.yml + - configs/centernet/metafile.yml + - configs/centripetalnet/metafile.yml + - configs/cornernet/metafile.yml + - configs/dcn/metafile.yml + - configs/deformable_detr/metafile.yml + - configs/detectors/metafile.yml + - configs/detr/metafile.yml + - configs/double_heads/metafile.yml + - configs/dynamic_rcnn/metafile.yml + - configs/empirical_attention/metafile.yml + - configs/faster_rcnn/metafile.yml + - configs/fcos/metafile.yml + - configs/foveabox/metafile.yml + - configs/fp16/metafile.yml + - configs/fpg/metafile.yml + - configs/free_anchor/metafile.yml + - configs/fsaf/metafile.yml + - configs/gcnet/metafile.yml + - configs/gfl/metafile.yml + - configs/ghm/metafile.yml + - configs/gn/metafile.yml + - configs/gn+ws/metafile.yml + - configs/grid_rcnn/metafile.yml + - configs/groie/metafile.yml + - configs/guided_anchoring/metafile.yml + - configs/hrnet/metafile.yml + - configs/htc/metafile.yml + - configs/instaboost/metafile.yml + - configs/ld/metafile.yml + - configs/libra_rcnn/metafile.yml + - configs/mask_rcnn/metafile.yml + - configs/ms_rcnn/metafile.yml + - configs/nas_fcos/metafile.yml + - configs/nas_fpn/metafile.yml + - configs/paa/metafile.yml + - configs/pafpn/metafile.yml + - configs/pisa/metafile.yml + - configs/point_rend/metafile.yml + - configs/regnet/metafile.yml + - configs/reppoints/metafile.yml + - configs/res2net/metafile.yml + - configs/resnest/metafile.yml + - configs/retinanet/metafile.yml + - configs/sabl/metafile.yml + - configs/scnet/metafile.yml + - configs/scratch/metafile.yml + - configs/sparse_rcnn/metafile.yml + - configs/ssd/metafile.yml + - configs/tridentnet/metafile.yml + - configs/vfnet/metafile.yml + - configs/yolact/metafile.yml + - configs/yolo/metafile.yml + - configs/yolof/metafile.yml diff --git a/detection_cbnet/docker-build-context/cbnetv2/model_zoo.md b/detection_cbnet/docker-build-context/cbnetv2/model_zoo.md new file mode 100644 index 0000000000000000000000000000000000000000..416639932f37ff2ccdb1930373d9354279b4039c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/model_zoo.md @@ -0,0 +1,64 @@ +# Benchmark and Model Zoo + +## CNN-based (w/ ImageNet-1k pretrained) +### Faster R-CNN +| Backbone | Lr Schd | box mAP (minival) | #params | FLOPs | config | log | model | +| :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | +| DB-ResNet50 | 1x | 40.8 | 69M | 284G | [config](configs/cbnet/faster_rcnn_cbv2d1_r50_fpn_1x_coco.py) | [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/faster_rcnn_cbv2d1_r50_fpn_1x_coco.log.json)| [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/faster_rcnn_cbv2d1_r50_fpn_1x_coco.pth.zip)| + + +### Cascade R-CNN (1600x1400) +| Backbone | Lr Schd | box mAP (minival/test-dev)| #params | FLOPs | config | model | +| :---: | :---: | :---: | :---: | :---: | :---: | :---: | +| DB-Res2Net101-DCN | 20e | 53.7/- | 141M | 429G | [config](configs/cbnet/cascade_rcnn_cbv2d1_r2_101_mdconv_fpn_20e_fp16_ms400-1400_coco.py) | [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/cascade_rcnn_cbv2d1_r2_101_mdconv_fpn_20e_fp16_ms400-1400_coco.pth.zip)| +| DB-Res2Net101-DCN | 20e + 1x (swa) | 54.8/55.3 | 141M | 429G | [config (test only)](configs/cbnet/cascade_rcnn_cbv2d1_r2_101_mdconv_fpn_20e_fp16_ms400-1400_coco.py) | [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/cascade_rcnn_cbv2d1_r2_101_mdconv_fpn_20e_fp16_ms400-1400_coco_swa.pth.zip) | + +### Cascade R-CNN w/ 4conv1fc (1600x1400) +| Backbone | Lr Schd | box mAP (minival/test-dev)| #params | FLOPs | config | model | +| :---: | :---: | :---: | :---: | :---: | :---: | :---: | +| DB-Res2Net101-DCN | 20e | 54.1/- | 146M | 774G | [config](configs/cbnet/cascade_rcnn_cbv2d1_r2_101_mdconv_fpn_20e_fp16_ms400-1400_giou_4conv1f_coco.py) | [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/cascade_rcnn_cbv2d1_r2_101_mdconv_fpn_20e_fp16_ms400-1400_giou_4conv1f_coco.pth.zip)| +| DB-Res2Net101-DCN | 20e + 1x (swa) | 55.3/55.6 | 146M | 774G | [config (test only)](configs/cbnet/cascade_rcnn_cbv2d1_r2_101_mdconv_fpn_20e_fp16_ms400-1400_giou_4conv1f_coco.py) | [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/cascade_rcnn_cbv2d1_r2_101_mdconv_fpn_20e_fp16_ms400-1400_giou_4conv1f_coco_swa.pth.zip) | + + +**Notes**: +- For SWA training, please refer to [SWA Object Detection](https://github.com/hyz-xmaster/swa_object_detection) + +## Transformer-based (w/ ImageNet-1k pretrained) + +### Mask R-CNN + +| Backbone | Lr Schd | box mAP (minival) | mask mAP (minival) | #params | FLOPs | config | log | model | +| :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | +| DB-Swin-T | 3x | 50.2 | 44.5 | 76M | 357G | [config](configs/cbnet/mask_rcnn_cbv2_swin_small_patch4_window7_mstrain_480-800_adamw_3x_coco.py) | [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/mask_rcnn_cbv2_swin_tiny_patch4_window7_mstrain_480-800_adamw_3x_coco.log.json) | [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/mask_rcnn_cbv2_swin_tiny_patch4_window7_mstrain_480-800_adamw_3x_coco.pth.zip) | + + +### Cascade Mask R-CNN w/ 4conv1fc +| Backbone | Lr Schd | box mAP (minival)| mask mAP (minival)| #params | FLOPs | config | log | model | +| :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | +| DB-Swin-T | 3x | 53.6 | 46.2 | 114M | 836G | [config](configs/cbnet/cascade_mask_rcnn_cbv2_swin_tiny_patch4_window7_mstrain_480-800_adamw_3x_coco.py) | [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/cascade_mask_rcnn_cbv2_swin_tiny_patch4_window7_mstrain_480-800_adamw_3x_coco.log.json) | [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/cascade_mask_rcnn_cbv2_swin_tiny_patch4_window7_mstrain_480-800_adamw_3x_coco.pth.zip) | + +### Cascade Mask R-CNN w/ 4conv1fc (1600x1400) +| Backbone | Lr Schd | box mAP (minival/test-dev)| mask mAP (minival/test-dev)| #params | FLOPs | config | model | +| :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | +| DB-Swin-S | 3x | 56.3/56.9 | 48.6/49.1 | 156M | 1016G | [config](configs/cbnet/cascade_mask_rcnn_cbv2_swin_small_patch4_window7_mstrain_400-1400_adamw_3x_coco.py) | [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/cascade_mask_rcnn_cbv2_swin_small_patch4_window7_mstrain_400-1400_adamw_3x_coco.pth.zip)| + +## Transformer-based (w/ ImageNet-22k pretrained) +### HTC (1600x1400) +| Backbone | Lr Schd | box mAP (minival/test-dev) | mask mAP (minival/test-dev) | #params | FLOPs | config | model | +| :---: | :---: | :---: | :---: | :---: | :---: | :---: | :---: | +| DB-Swin-B | 20e | 57.9/- | 50.2/- | 231M | 1004G | [config](configs/cbnet/htc_cbv2_swin_base_patch4_window7_mstrain_400-1400_adamw_20e_coco.py) | [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/htc_cbv2_swin_base22k_patch4_window7_mstrain_400-1400_adamw_20e_coco.pth.zip) | +| DB-Swin-B | 20e + 1x (swa) | 58.2/58.6 | 50.4/51.1 | 231M | 1004G | [config (test only)](configs/cbnet/htc_cbv2_swin_base_patch4_window7_mstrain_400-1400_adamw_20e_coco.py) | [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/htc_cbv2_swin_base22k_patch4_window7_mstrain_400-1400_adamw_20e_coco_swa.pth.zip)| + +### HTC (bbox head w/ 4conv1fc) (1600x1400) +*Compared to regular HTC, our HTC uses 4conv1fc in bbox head.* +| Backbone | Lr Schd | box mAP (minival/test-dev) | mask mAP (minival/test-dev) | #params | FLOPs | config | model | +| :---: |:---: | :---: | :---: | :---: | :---: | :---: | :---: | +| DB-Swin-B | 20e | 58.4/58.7 | 50.7/51.1 | 235M | 1348G | [config](configs/cbnet/htc_cbv2_swin_base_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_20e_coco.py) | [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/htc_cbv2_swin_base22k_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_20e_coco.pth.zip) | +| DB-Swin-L | 1x | 59.1/59.4 | 51.0/51.6 | 453M | 2162G | [config](configs/cbnet/htc_cbv2_swin_large_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_1x_coco.py) | [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/htc_cbv2_swin_large22k_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_1x_coco.pth.zip) | +| DB-Swin-L (TTA) | 1x | 59.6/60.1 | 51.8/52.3 | 453M | - | [config](configs/cbnet/htc_cbv2_swin_large_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_1x_coco.py) | [github](https://github.com/CBNetwork/storage/releases/download/v1.0.0/htc_cbv2_swin_large22k_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_1x_coco.pth.zip) | + +TTA denotes test time augmentation. + +**Notes**: + +- **Pre-trained models of Swin Transformer can be downloaded from [Swin Transformer for ImageNet Classification](https://github.com/microsoft/Swin-Transformer)**. diff --git a/detection_cbnet/docker-build-context/cbnetv2/pytest.ini b/detection_cbnet/docker-build-context/cbnetv2/pytest.ini new file mode 100644 index 0000000000000000000000000000000000000000..9796e871e70c7c67345b1d6bcf708c0c82377a98 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/pytest.ini @@ -0,0 +1,7 @@ +[pytest] +addopts = --xdoctest --xdoctest-style=auto +norecursedirs = .git ignore build __pycache__ data docker docs .eggs + +filterwarnings= default + ignore:.*No cfgstr given in Cacher constructor or call.*:Warning + ignore:.*Define the __nice__ method for.*:Warning diff --git a/detection_cbnet/docker-build-context/cbnetv2/requirements.txt b/detection_cbnet/docker-build-context/cbnetv2/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..6981bd723391a980c0f22baeab39d0adbcb68679 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/requirements.txt @@ -0,0 +1,4 @@ +-r requirements/build.txt +-r requirements/optional.txt +-r requirements/runtime.txt +-r requirements/tests.txt diff --git a/detection_cbnet/docker-build-context/cbnetv2/requirements/build.txt b/detection_cbnet/docker-build-context/cbnetv2/requirements/build.txt new file mode 100644 index 0000000000000000000000000000000000000000..81558298594a9619f3187d220f1accede1865de7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/requirements/build.txt @@ -0,0 +1,3 @@ +# These must be installed before building mmdetection +cython +numpy diff --git a/detection_cbnet/docker-build-context/cbnetv2/requirements/docs.txt b/detection_cbnet/docker-build-context/cbnetv2/requirements/docs.txt new file mode 100644 index 0000000000000000000000000000000000000000..89fbf86c01cb29f10f7e99c910248c4d5229da58 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/requirements/docs.txt @@ -0,0 +1,4 @@ +recommonmark +sphinx +sphinx_markdown_tables +sphinx_rtd_theme diff --git a/detection_cbnet/docker-build-context/cbnetv2/requirements/mminstall.txt b/detection_cbnet/docker-build-context/cbnetv2/requirements/mminstall.txt new file mode 100644 index 0000000000000000000000000000000000000000..e242e9f7b8379455c539f986b4f2283a77c0e5a4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/requirements/mminstall.txt @@ -0,0 +1 @@ +mmcv-full>=1.3.3 diff --git a/detection_cbnet/docker-build-context/cbnetv2/requirements/optional.txt b/detection_cbnet/docker-build-context/cbnetv2/requirements/optional.txt new file mode 100644 index 0000000000000000000000000000000000000000..ac9688b010da6b227a5ff4138ce9aec26930d1b3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/requirements/optional.txt @@ -0,0 +1,5 @@ +albumentations>=0.3.2 +cityscapesscripts +imagecorruptions +scipy +sklearn diff --git a/detection_cbnet/docker-build-context/cbnetv2/requirements/readthedocs.txt b/detection_cbnet/docker-build-context/cbnetv2/requirements/readthedocs.txt new file mode 100644 index 0000000000000000000000000000000000000000..0542bfce6dff3b002a1d33e53c0be975e7feed4a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/requirements/readthedocs.txt @@ -0,0 +1,3 @@ +mmcv +torch +torchvision diff --git a/detection_cbnet/docker-build-context/cbnetv2/requirements/runtime.txt b/detection_cbnet/docker-build-context/cbnetv2/requirements/runtime.txt new file mode 100644 index 0000000000000000000000000000000000000000..b14d7317a74223b686ecf7d149c26049676978c7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/requirements/runtime.txt @@ -0,0 +1,7 @@ +matplotlib +numpy +pycocotools; platform_system == "Linux" +pycocotools-windows; platform_system == "Windows" +six +terminaltables +timm diff --git a/detection_cbnet/docker-build-context/cbnetv2/requirements/tests.txt b/detection_cbnet/docker-build-context/cbnetv2/requirements/tests.txt new file mode 100644 index 0000000000000000000000000000000000000000..5f3de01d179bcc0806b6ca7e26de69b58f0305fe --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/requirements/tests.txt @@ -0,0 +1,13 @@ +asynctest +codecov +flake8 +interrogate +isort==4.3.21 +# Note: used for kwarray.group_items, this may be ported to mmcv in the future. +kwarray +onnx==1.7.0 +onnxruntime==1.5.1 +pytest +ubelt +xdoctest>=0.10.0 +yapf diff --git a/detection_cbnet/docker-build-context/cbnetv2/resources/coco_test_12510.jpg b/detection_cbnet/docker-build-context/cbnetv2/resources/coco_test_12510.jpg new file mode 100644 index 0000000000000000000000000000000000000000..1271ae1d8a3e9b052d06e72f7b764887d171d96e Binary files /dev/null and b/detection_cbnet/docker-build-context/cbnetv2/resources/coco_test_12510.jpg differ diff --git a/detection_cbnet/docker-build-context/cbnetv2/resources/corruptions_sev_3.png b/detection_cbnet/docker-build-context/cbnetv2/resources/corruptions_sev_3.png new file mode 100644 index 0000000000000000000000000000000000000000..bbbd19a8d4c87677cb0cf64833a5eb1ce4b95e40 Binary files /dev/null and b/detection_cbnet/docker-build-context/cbnetv2/resources/corruptions_sev_3.png differ diff --git a/detection_cbnet/docker-build-context/cbnetv2/resources/data_pipeline.png b/detection_cbnet/docker-build-context/cbnetv2/resources/data_pipeline.png new file mode 100644 index 0000000000000000000000000000000000000000..6ac3fee2bc62753681b2e42a9705dd7eefbee443 Binary files /dev/null and b/detection_cbnet/docker-build-context/cbnetv2/resources/data_pipeline.png differ diff --git a/detection_cbnet/docker-build-context/cbnetv2/resources/loss_curve.png b/detection_cbnet/docker-build-context/cbnetv2/resources/loss_curve.png new file mode 100644 index 0000000000000000000000000000000000000000..02425551174d57ae6fecd51be7960acad84c934c Binary files /dev/null and b/detection_cbnet/docker-build-context/cbnetv2/resources/loss_curve.png differ diff --git a/detection_cbnet/docker-build-context/cbnetv2/resources/mmdet-logo.png b/detection_cbnet/docker-build-context/cbnetv2/resources/mmdet-logo.png new file mode 100644 index 0000000000000000000000000000000000000000..a0b6fbdc4484b3100c75ca357e02389c4477e191 Binary files /dev/null and b/detection_cbnet/docker-build-context/cbnetv2/resources/mmdet-logo.png differ diff --git a/detection_cbnet/docker-build-context/cbnetv2/resources/qq_group_qrcode.jpg b/detection_cbnet/docker-build-context/cbnetv2/resources/qq_group_qrcode.jpg new file mode 100644 index 0000000000000000000000000000000000000000..417347449fe64cbb2c9076601f7a8206d8b54706 Binary files /dev/null and b/detection_cbnet/docker-build-context/cbnetv2/resources/qq_group_qrcode.jpg differ diff --git a/detection_cbnet/docker-build-context/cbnetv2/resources/zhihu_qrcode.jpg b/detection_cbnet/docker-build-context/cbnetv2/resources/zhihu_qrcode.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c745fb027f06564d41794e9a40069b06c34e2bb5 Binary files /dev/null and b/detection_cbnet/docker-build-context/cbnetv2/resources/zhihu_qrcode.jpg differ diff --git a/detection_cbnet/docker-build-context/cbnetv2/setup.cfg b/detection_cbnet/docker-build-context/cbnetv2/setup.cfg new file mode 100644 index 0000000000000000000000000000000000000000..84dfbdddf3754efeecbffa86a56c329ac9194681 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/setup.cfg @@ -0,0 +1,13 @@ +[isort] +line_length = 79 +multi_line_output = 0 +known_standard_library = setuptools +known_first_party = mmdet +known_third_party = PIL,asynctest,cityscapesscripts,cv2,gather_models,matplotlib,mmcv,numpy,onnx,onnxruntime,pycocotools,pytest,seaborn,six,terminaltables,torch,ts,yaml +no_lines_before = STDLIB,LOCALFOLDER +default_section = THIRDPARTY + +[yapf] +BASED_ON_STYLE = pep8 +BLANK_LINE_BEFORE_NESTED_CLASS_OR_DEF = true +SPLIT_BEFORE_EXPRESSION_AFTER_OPENING_PAREN = true diff --git a/detection_cbnet/docker-build-context/cbnetv2/setup.py b/detection_cbnet/docker-build-context/cbnetv2/setup.py new file mode 100755 index 0000000000000000000000000000000000000000..9b146d3eaf082ea568eb443931d90141f49b0763 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/setup.py @@ -0,0 +1,162 @@ +#!/usr/bin/env python +import os +from setuptools import find_packages, setup + +import torch +from torch.utils.cpp_extension import (BuildExtension, CppExtension, + CUDAExtension) + + +def readme(): + with open('README.md', encoding='utf-8') as f: + content = f.read() + return content + + +version_file = 'mmdet/version.py' + + +def get_version(): + with open(version_file, 'r') as f: + exec(compile(f.read(), version_file, 'exec')) + return locals()['__version__'] + + +def make_cuda_ext(name, module, sources, sources_cuda=[]): + + define_macros = [] + extra_compile_args = {'cxx': []} + + if torch.cuda.is_available() or os.getenv('FORCE_CUDA', '0') == '1': + define_macros += [('WITH_CUDA', None)] + extension = CUDAExtension + extra_compile_args['nvcc'] = [ + '-D__CUDA_NO_HALF_OPERATORS__', + '-D__CUDA_NO_HALF_CONVERSIONS__', + '-D__CUDA_NO_HALF2_OPERATORS__', + ] + sources += sources_cuda + else: + print(f'Compiling {name} without CUDA') + extension = CppExtension + + return extension( + name=f'{module}.{name}', + sources=[os.path.join(*module.split('.'), p) for p in sources], + define_macros=define_macros, + extra_compile_args=extra_compile_args) + + +def parse_requirements(fname='requirements.txt', with_version=True): + """Parse the package dependencies listed in a requirements file but strips + specific versioning information. + + Args: + fname (str): path to requirements file + with_version (bool, default=False): if True include version specs + + Returns: + List[str]: list of requirements items + + CommandLine: + python -c "import setup; print(setup.parse_requirements())" + """ + import sys + from os.path import exists + import re + require_fpath = fname + + def parse_line(line): + """Parse information from a line in a requirements text file.""" + if line.startswith('-r '): + # Allow specifying requirements in other files + target = line.split(' ')[1] + for info in parse_require_file(target): + yield info + else: + info = {'line': line} + if line.startswith('-e '): + info['package'] = line.split('#egg=')[1] + elif '@git+' in line: + info['package'] = line + else: + # Remove versioning from the package + pat = '(' + '|'.join(['>=', '==', '>']) + ')' + parts = re.split(pat, line, maxsplit=1) + parts = [p.strip() for p in parts] + + info['package'] = parts[0] + if len(parts) > 1: + op, rest = parts[1:] + if ';' in rest: + # Handle platform specific dependencies + # http://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-platform-specific-dependencies + version, platform_deps = map(str.strip, + rest.split(';')) + info['platform_deps'] = platform_deps + else: + version = rest # NOQA + info['version'] = (op, version) + yield info + + def parse_require_file(fpath): + with open(fpath, 'r') as f: + for line in f.readlines(): + line = line.strip() + if line and not line.startswith('#'): + for info in parse_line(line): + yield info + + def gen_packages_items(): + if exists(require_fpath): + for info in parse_require_file(require_fpath): + parts = [info['package']] + if with_version and 'version' in info: + parts.extend(info['version']) + if not sys.version.startswith('3.4'): + # apparently package_deps are broken in 3.4 + platform_deps = info.get('platform_deps') + if platform_deps is not None: + parts.append(';' + platform_deps) + item = ''.join(parts) + yield item + + packages = list(gen_packages_items()) + return packages + + +if __name__ == '__main__': + setup( + name='mmdet', + version=get_version(), + description='OpenMMLab Detection Toolbox and Benchmark', + long_description=readme(), + long_description_content_type='text/markdown', + author='OpenMMLab', + author_email='openmmlab@gmail.com', + keywords='computer vision, object detection', + url='https://github.com/open-mmlab/mmdetection', + packages=find_packages(exclude=('configs', 'tools', 'demo')), + include_package_data=True, + classifiers=[ + 'Development Status :: 5 - Production/Stable', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + ], + license='Apache License 2.0', + setup_requires=parse_requirements('requirements/build.txt'), + tests_require=parse_requirements('requirements/tests.txt'), + install_requires=parse_requirements('requirements/runtime.txt'), + extras_require={ + 'all': parse_requirements('requirements.txt'), + 'tests': parse_requirements('requirements/tests.txt'), + 'build': parse_requirements('requirements/build.txt'), + 'optional': parse_requirements('requirements/optional.txt'), + }, + ext_modules=[], + cmdclass={'build_ext': BuildExtension}, + zip_safe=False) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_datasets/test_coco_dataset.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_datasets/test_coco_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..13b6c7f37cd6df26142f66becb91ebe23d43e74b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_datasets/test_coco_dataset.py @@ -0,0 +1,57 @@ +import os.path as osp +import tempfile + +import mmcv +import pytest + +from mmdet.datasets import CocoDataset + + +def _create_ids_error_coco_json(json_name): + image = { + 'id': 0, + 'width': 640, + 'height': 640, + 'file_name': 'fake_name.jpg', + } + + annotation_1 = { + 'id': 1, + 'image_id': 0, + 'category_id': 0, + 'area': 400, + 'bbox': [50, 60, 20, 20], + 'iscrowd': 0, + } + + annotation_2 = { + 'id': 1, + 'image_id': 0, + 'category_id': 0, + 'area': 900, + 'bbox': [100, 120, 30, 30], + 'iscrowd': 0, + } + + categories = [{ + 'id': 0, + 'name': 'car', + 'supercategory': 'car', + }] + + fake_json = { + 'images': [image], + 'annotations': [annotation_1, annotation_2], + 'categories': categories + } + mmcv.dump(fake_json, json_name) + + +def test_coco_annotation_ids_unique(): + tmp_dir = tempfile.TemporaryDirectory() + fake_json_file = osp.join(tmp_dir.name, 'fake_data.json') + _create_ids_error_coco_json(fake_json_file) + + # test annotation ids not unique error + with pytest.raises(AssertionError): + CocoDataset(ann_file=fake_json_file, classes=('car', ), pipeline=[]) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_datasets/test_common.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_datasets/test_common.py new file mode 100644 index 0000000000000000000000000000000000000000..6642be9fe7708b6188e5db68cc0a21f8bedd3040 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_datasets/test_common.py @@ -0,0 +1,360 @@ +import copy +import logging +import os +import os.path as osp +import tempfile +from unittest.mock import MagicMock, patch + +import mmcv +import numpy as np +import pytest +import torch +import torch.nn as nn +from mmcv.runner import EpochBasedRunner +from torch.utils.data import DataLoader + +from mmdet.core.evaluation import DistEvalHook, EvalHook +from mmdet.datasets import DATASETS, CocoDataset, CustomDataset, build_dataset + + +def _create_dummy_coco_json(json_name): + image = { + 'id': 0, + 'width': 640, + 'height': 640, + 'file_name': 'fake_name.jpg', + } + + annotation_1 = { + 'id': 1, + 'image_id': 0, + 'category_id': 0, + 'area': 400, + 'bbox': [50, 60, 20, 20], + 'iscrowd': 0, + } + + annotation_2 = { + 'id': 2, + 'image_id': 0, + 'category_id': 0, + 'area': 900, + 'bbox': [100, 120, 30, 30], + 'iscrowd': 0, + } + + annotation_3 = { + 'id': 3, + 'image_id': 0, + 'category_id': 0, + 'area': 1600, + 'bbox': [150, 160, 40, 40], + 'iscrowd': 0, + } + + annotation_4 = { + 'id': 4, + 'image_id': 0, + 'category_id': 0, + 'area': 10000, + 'bbox': [250, 260, 100, 100], + 'iscrowd': 0, + } + + categories = [{ + 'id': 0, + 'name': 'car', + 'supercategory': 'car', + }] + + fake_json = { + 'images': [image], + 'annotations': + [annotation_1, annotation_2, annotation_3, annotation_4], + 'categories': categories + } + + mmcv.dump(fake_json, json_name) + + +def _create_dummy_custom_pkl(pkl_name): + fake_pkl = [{ + 'filename': 'fake_name.jpg', + 'width': 640, + 'height': 640, + 'ann': { + 'bboxes': + np.array([[50, 60, 70, 80], [100, 120, 130, 150], + [150, 160, 190, 200], [250, 260, 350, 360]]), + 'labels': + np.array([0, 0, 0, 0]) + } + }] + mmcv.dump(fake_pkl, pkl_name) + + +def _create_dummy_results(): + boxes = [ + np.array([[50, 60, 70, 80, 1.0], [100, 120, 130, 150, 0.98], + [150, 160, 190, 200, 0.96], [250, 260, 350, 360, 0.95]]) + ] + return [boxes] + + +@pytest.mark.parametrize('config_path', + ['./configs/_base_/datasets/voc0712.py']) +def test_dataset_init(config_path): + if not os.path.exists('./data'): + os.symlink('./tests/data', './data') + data_config = mmcv.Config.fromfile(config_path) + if 'data' not in data_config: + return + stage_names = ['train', 'val', 'test'] + for stage_name in stage_names: + dataset_config = copy.deepcopy(data_config.data.get(stage_name)) + dataset = build_dataset(dataset_config) + dataset[0] + os.unlink('./data') + + +def test_dataset_evaluation(): + tmp_dir = tempfile.TemporaryDirectory() + # create dummy data + fake_json_file = osp.join(tmp_dir.name, 'fake_data.json') + _create_dummy_coco_json(fake_json_file) + + # test single coco dataset evaluation + coco_dataset = CocoDataset( + ann_file=fake_json_file, classes=('car', ), pipeline=[]) + fake_results = _create_dummy_results() + eval_results = coco_dataset.evaluate(fake_results, classwise=True) + assert eval_results['bbox_mAP'] == 1 + assert eval_results['bbox_mAP_50'] == 1 + assert eval_results['bbox_mAP_75'] == 1 + + # test concat dataset evaluation + fake_concat_results = _create_dummy_results() + _create_dummy_results() + + # build concat dataset through two config dict + coco_cfg = dict( + type='CocoDataset', + ann_file=fake_json_file, + classes=('car', ), + pipeline=[]) + concat_cfgs = [coco_cfg, coco_cfg] + concat_dataset = build_dataset(concat_cfgs) + eval_results = concat_dataset.evaluate(fake_concat_results) + assert eval_results['0_bbox_mAP'] == 1 + assert eval_results['0_bbox_mAP_50'] == 1 + assert eval_results['0_bbox_mAP_75'] == 1 + assert eval_results['1_bbox_mAP'] == 1 + assert eval_results['1_bbox_mAP_50'] == 1 + assert eval_results['1_bbox_mAP_75'] == 1 + + # build concat dataset through concatenated ann_file + coco_cfg = dict( + type='CocoDataset', + ann_file=[fake_json_file, fake_json_file], + classes=('car', ), + pipeline=[]) + concat_dataset = build_dataset(coco_cfg) + eval_results = concat_dataset.evaluate(fake_concat_results) + assert eval_results['0_bbox_mAP'] == 1 + assert eval_results['0_bbox_mAP_50'] == 1 + assert eval_results['0_bbox_mAP_75'] == 1 + assert eval_results['1_bbox_mAP'] == 1 + assert eval_results['1_bbox_mAP_50'] == 1 + assert eval_results['1_bbox_mAP_75'] == 1 + + # create dummy data + fake_pkl_file = osp.join(tmp_dir.name, 'fake_data.pkl') + _create_dummy_custom_pkl(fake_pkl_file) + + # test single custom dataset evaluation + custom_dataset = CustomDataset( + ann_file=fake_pkl_file, classes=('car', ), pipeline=[]) + fake_results = _create_dummy_results() + eval_results = custom_dataset.evaluate(fake_results) + assert eval_results['mAP'] == 1 + + # test concat dataset evaluation + fake_concat_results = _create_dummy_results() + _create_dummy_results() + + # build concat dataset through two config dict + custom_cfg = dict( + type='CustomDataset', + ann_file=fake_pkl_file, + classes=('car', ), + pipeline=[]) + concat_cfgs = [custom_cfg, custom_cfg] + concat_dataset = build_dataset(concat_cfgs) + eval_results = concat_dataset.evaluate(fake_concat_results) + assert eval_results['0_mAP'] == 1 + assert eval_results['1_mAP'] == 1 + + # build concat dataset through concatenated ann_file + concat_cfg = dict( + type='CustomDataset', + ann_file=[fake_pkl_file, fake_pkl_file], + classes=('car', ), + pipeline=[]) + concat_dataset = build_dataset(concat_cfg) + eval_results = concat_dataset.evaluate(fake_concat_results) + assert eval_results['0_mAP'] == 1 + assert eval_results['1_mAP'] == 1 + + # build concat dataset through explict type + concat_cfg = dict( + type='ConcatDataset', + datasets=[custom_cfg, custom_cfg], + separate_eval=False) + concat_dataset = build_dataset(concat_cfg) + eval_results = concat_dataset.evaluate(fake_concat_results, metric='mAP') + assert eval_results['mAP'] == 1 + assert len(concat_dataset.datasets[0].data_infos) == \ + len(concat_dataset.datasets[1].data_infos) + assert len(concat_dataset.datasets[0].data_infos) == 1 + tmp_dir.cleanup() + + +@patch('mmdet.apis.single_gpu_test', MagicMock) +@patch('mmdet.apis.multi_gpu_test', MagicMock) +@pytest.mark.parametrize('EvalHookParam', (EvalHook, DistEvalHook)) +def test_evaluation_hook(EvalHookParam): + # create dummy data + dataloader = DataLoader(torch.ones((5, 2))) + + # 0.1. dataloader is not a DataLoader object + with pytest.raises(TypeError): + EvalHookParam(dataloader=MagicMock(), interval=-1) + + # 0.2. negative interval + with pytest.raises(ValueError): + EvalHookParam(dataloader, interval=-1) + + # 1. start=None, interval=1: perform evaluation after each epoch. + runner = _build_demo_runner() + evalhook = EvalHookParam(dataloader, interval=1) + evalhook.evaluate = MagicMock() + runner.register_hook(evalhook) + runner.run([dataloader], [('train', 1)], 2) + assert evalhook.evaluate.call_count == 2 # after epoch 1 & 2 + + # 2. start=1, interval=1: perform evaluation after each epoch. + runner = _build_demo_runner() + + evalhook = EvalHookParam(dataloader, start=1, interval=1) + evalhook.evaluate = MagicMock() + runner.register_hook(evalhook) + runner.run([dataloader], [('train', 1)], 2) + assert evalhook.evaluate.call_count == 2 # after epoch 1 & 2 + + # 3. start=None, interval=2: perform evaluation after epoch 2, 4, 6, etc + runner = _build_demo_runner() + evalhook = EvalHookParam(dataloader, interval=2) + evalhook.evaluate = MagicMock() + runner.register_hook(evalhook) + runner.run([dataloader], [('train', 1)], 2) + assert evalhook.evaluate.call_count == 1 # after epoch 2 + + # 4. start=1, interval=2: perform evaluation after epoch 1, 3, 5, etc + runner = _build_demo_runner() + evalhook = EvalHookParam(dataloader, start=1, interval=2) + evalhook.evaluate = MagicMock() + runner.register_hook(evalhook) + runner.run([dataloader], [('train', 1)], 3) + assert evalhook.evaluate.call_count == 2 # after epoch 1 & 3 + + # 5. start=0/negative, interval=1: perform evaluation after each epoch and + # before epoch 1. + runner = _build_demo_runner() + evalhook = EvalHookParam(dataloader, start=0) + evalhook.evaluate = MagicMock() + runner.register_hook(evalhook) + runner.run([dataloader], [('train', 1)], 2) + assert evalhook.evaluate.call_count == 3 # before epoch1 and after e1 & e2 + + # the evaluation start epoch cannot be less than 0 + runner = _build_demo_runner() + with pytest.raises(ValueError): + EvalHookParam(dataloader, start=-2) + + evalhook = EvalHookParam(dataloader, start=0) + evalhook.evaluate = MagicMock() + runner.register_hook(evalhook) + runner.run([dataloader], [('train', 1)], 2) + assert evalhook.evaluate.call_count == 3 # before epoch1 and after e1 & e2 + + # 6. resuming from epoch i, start = x (x<=i), interval =1: perform + # evaluation after each epoch and before the first epoch. + runner = _build_demo_runner() + evalhook = EvalHookParam(dataloader, start=1) + evalhook.evaluate = MagicMock() + runner.register_hook(evalhook) + runner._epoch = 2 + runner.run([dataloader], [('train', 1)], 3) + assert evalhook.evaluate.call_count == 2 # before & after epoch 3 + + # 7. resuming from epoch i, start = i+1/None, interval =1: perform + # evaluation after each epoch. + runner = _build_demo_runner() + evalhook = EvalHookParam(dataloader, start=2) + evalhook.evaluate = MagicMock() + runner.register_hook(evalhook) + runner._epoch = 1 + runner.run([dataloader], [('train', 1)], 3) + assert evalhook.evaluate.call_count == 2 # after epoch 2 & 3 + + +def _build_demo_runner(): + + class Model(nn.Module): + + def __init__(self): + super().__init__() + self.linear = nn.Linear(2, 1) + + def forward(self, x): + return self.linear(x) + + def train_step(self, x, optimizer, **kwargs): + return dict(loss=self(x)) + + def val_step(self, x, optimizer, **kwargs): + return dict(loss=self(x)) + + model = Model() + tmp_dir = tempfile.mkdtemp() + + runner = EpochBasedRunner( + model=model, work_dir=tmp_dir, logger=logging.getLogger()) + return runner + + +@pytest.mark.parametrize('classes, expected_length', [(['bus'], 2), + (['car'], 1), + (['bus', 'car'], 2)]) +def test_allow_empty_images(classes, expected_length): + dataset_class = DATASETS.get('CocoDataset') + # Filter empty images + filtered_dataset = dataset_class( + ann_file='tests/data/coco_sample.json', + img_prefix='tests/data', + pipeline=[], + classes=classes, + filter_empty_gt=True) + + # Get all + full_dataset = dataset_class( + ann_file='tests/data/coco_sample.json', + img_prefix='tests/data', + pipeline=[], + classes=classes, + filter_empty_gt=False) + + assert len(filtered_dataset) == expected_length + assert len(filtered_dataset.img_ids) == expected_length + assert len(full_dataset) == 3 + assert len(full_dataset.img_ids) == 3 + assert filtered_dataset.CLASSES == classes + assert full_dataset.CLASSES == classes diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_datasets/test_custom_dataset.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_datasets/test_custom_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..bda449966c2c7e37b0769010341686fa6394a29e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_datasets/test_custom_dataset.py @@ -0,0 +1,88 @@ +from unittest.mock import MagicMock, patch + +import pytest + +from mmdet.datasets import DATASETS + + +@patch('mmdet.datasets.CocoDataset.load_annotations', MagicMock()) +@patch('mmdet.datasets.CustomDataset.load_annotations', MagicMock()) +@patch('mmdet.datasets.XMLDataset.load_annotations', MagicMock()) +@patch('mmdet.datasets.CityscapesDataset.load_annotations', MagicMock()) +@patch('mmdet.datasets.CocoDataset._filter_imgs', MagicMock) +@patch('mmdet.datasets.CustomDataset._filter_imgs', MagicMock) +@patch('mmdet.datasets.XMLDataset._filter_imgs', MagicMock) +@patch('mmdet.datasets.CityscapesDataset._filter_imgs', MagicMock) +@pytest.mark.parametrize('dataset', + ['CocoDataset', 'VOCDataset', 'CityscapesDataset']) +def test_custom_classes_override_default(dataset): + dataset_class = DATASETS.get(dataset) + if dataset in ['CocoDataset', 'CityscapesDataset']: + dataset_class.coco = MagicMock() + dataset_class.cat_ids = MagicMock() + + original_classes = dataset_class.CLASSES + + # Test setting classes as a tuple + custom_dataset = dataset_class( + ann_file=MagicMock(), + pipeline=[], + classes=('bus', 'car'), + test_mode=True, + img_prefix='VOC2007' if dataset == 'VOCDataset' else '') + + assert custom_dataset.CLASSES != original_classes + assert custom_dataset.CLASSES == ('bus', 'car') + print(custom_dataset) + + # Test setting classes as a list + custom_dataset = dataset_class( + ann_file=MagicMock(), + pipeline=[], + classes=['bus', 'car'], + test_mode=True, + img_prefix='VOC2007' if dataset == 'VOCDataset' else '') + + assert custom_dataset.CLASSES != original_classes + assert custom_dataset.CLASSES == ['bus', 'car'] + print(custom_dataset) + + # Test overriding not a subset + custom_dataset = dataset_class( + ann_file=MagicMock(), + pipeline=[], + classes=['foo'], + test_mode=True, + img_prefix='VOC2007' if dataset == 'VOCDataset' else '') + + assert custom_dataset.CLASSES != original_classes + assert custom_dataset.CLASSES == ['foo'] + print(custom_dataset) + + # Test default behavior + custom_dataset = dataset_class( + ann_file=MagicMock(), + pipeline=[], + classes=None, + test_mode=True, + img_prefix='VOC2007' if dataset == 'VOCDataset' else '') + + assert custom_dataset.CLASSES == original_classes + print(custom_dataset) + + # Test sending file path + import tempfile + tmp_file = tempfile.NamedTemporaryFile() + with open(tmp_file.name, 'w') as f: + f.write('bus\ncar\n') + custom_dataset = dataset_class( + ann_file=MagicMock(), + pipeline=[], + classes=tmp_file.name, + test_mode=True, + img_prefix='VOC2007' if dataset == 'VOCDataset' else '') + tmp_file.close() + + assert custom_dataset.CLASSES != original_classes + assert custom_dataset.CLASSES == ['bus', 'car'] + print(custom_dataset) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_datasets/test_dataset_wrapper.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_datasets/test_dataset_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..c08c990b7bf7c1bc673f0dc3a5fd8edbf1519120 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_datasets/test_dataset_wrapper.py @@ -0,0 +1,80 @@ +import bisect +import math +from collections import defaultdict +from unittest.mock import MagicMock + +import numpy as np + +from mmdet.datasets import (ClassBalancedDataset, ConcatDataset, CustomDataset, + RepeatDataset) + + +def test_dataset_wrapper(): + CustomDataset.load_annotations = MagicMock() + CustomDataset.__getitem__ = MagicMock(side_effect=lambda idx: idx) + dataset_a = CustomDataset( + ann_file=MagicMock(), pipeline=[], test_mode=True, img_prefix='') + len_a = 10 + cat_ids_list_a = [ + np.random.randint(0, 80, num).tolist() + for num in np.random.randint(1, 20, len_a) + ] + dataset_a.data_infos = MagicMock() + dataset_a.data_infos.__len__.return_value = len_a + dataset_a.get_cat_ids = MagicMock( + side_effect=lambda idx: cat_ids_list_a[idx]) + dataset_b = CustomDataset( + ann_file=MagicMock(), pipeline=[], test_mode=True, img_prefix='') + len_b = 20 + cat_ids_list_b = [ + np.random.randint(0, 80, num).tolist() + for num in np.random.randint(1, 20, len_b) + ] + dataset_b.data_infos = MagicMock() + dataset_b.data_infos.__len__.return_value = len_b + dataset_b.get_cat_ids = MagicMock( + side_effect=lambda idx: cat_ids_list_b[idx]) + + concat_dataset = ConcatDataset([dataset_a, dataset_b]) + assert concat_dataset[5] == 5 + assert concat_dataset[25] == 15 + assert concat_dataset.get_cat_ids(5) == cat_ids_list_a[5] + assert concat_dataset.get_cat_ids(25) == cat_ids_list_b[15] + assert len(concat_dataset) == len(dataset_a) + len(dataset_b) + + repeat_dataset = RepeatDataset(dataset_a, 10) + assert repeat_dataset[5] == 5 + assert repeat_dataset[15] == 5 + assert repeat_dataset[27] == 7 + assert repeat_dataset.get_cat_ids(5) == cat_ids_list_a[5] + assert repeat_dataset.get_cat_ids(15) == cat_ids_list_a[5] + assert repeat_dataset.get_cat_ids(27) == cat_ids_list_a[7] + assert len(repeat_dataset) == 10 * len(dataset_a) + + category_freq = defaultdict(int) + for cat_ids in cat_ids_list_a: + cat_ids = set(cat_ids) + for cat_id in cat_ids: + category_freq[cat_id] += 1 + for k, v in category_freq.items(): + category_freq[k] = v / len(cat_ids_list_a) + + mean_freq = np.mean(list(category_freq.values())) + repeat_thr = mean_freq + + category_repeat = { + cat_id: max(1.0, math.sqrt(repeat_thr / cat_freq)) + for cat_id, cat_freq in category_freq.items() + } + + repeat_factors = [] + for cat_ids in cat_ids_list_a: + cat_ids = set(cat_ids) + repeat_factor = max({category_repeat[cat_id] for cat_id in cat_ids}) + repeat_factors.append(math.ceil(repeat_factor)) + repeat_factors_cumsum = np.cumsum(repeat_factors) + repeat_factor_dataset = ClassBalancedDataset(dataset_a, repeat_thr) + assert len(repeat_factor_dataset) == repeat_factors_cumsum[-1] + for idx in np.random.randint(0, len(repeat_factor_dataset), 3): + assert repeat_factor_dataset[idx] == bisect.bisect_right( + repeat_factors_cumsum, idx) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_datasets/test_xml_dataset.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_datasets/test_xml_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..ebdd9e6f1101148e29bb1c59d336f6ee0d3c401c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_datasets/test_xml_dataset.py @@ -0,0 +1,22 @@ +import pytest + +from mmdet.datasets import DATASETS + + +def test_xml_dataset(): + dataconfig = { + 'ann_file': 'data/VOCdevkit/VOC2007/ImageSets/Main/test.txt', + 'img_prefix': 'data/VOCdevkit/VOC2007/', + 'pipeline': [{ + 'type': 'LoadImageFromFile' + }] + } + XMLDataset = DATASETS.get('XMLDataset') + + class XMLDatasetSubClass(XMLDataset): + CLASSES = None + + # get_ann_info and _filter_imgs of XMLDataset + # would use self.CLASSES, we added CLASSES not NONE + with pytest.raises(AssertionError): + XMLDatasetSubClass(**dataconfig) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_formatting.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_formatting.py new file mode 100644 index 0000000000000000000000000000000000000000..8a2a37571f448fe824833da839f0dda9b40de699 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_formatting.py @@ -0,0 +1,23 @@ +import os.path as osp + +from mmcv.utils import build_from_cfg + +from mmdet.datasets.builder import PIPELINES + + +def test_default_format_bundle(): + results = dict( + img_prefix=osp.join(osp.dirname(__file__), '../../data'), + img_info=dict(filename='color.jpg')) + load = dict(type='LoadImageFromFile') + load = build_from_cfg(load, PIPELINES) + bundle = dict(type='DefaultFormatBundle') + bundle = build_from_cfg(bundle, PIPELINES) + results = load(results) + assert 'pad_shape' not in results + assert 'scale_factor' not in results + assert 'img_norm_cfg' not in results + results = bundle(results) + assert 'pad_shape' in results + assert 'scale_factor' in results + assert 'img_norm_cfg' in results diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_loading.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_loading.py new file mode 100644 index 0000000000000000000000000000000000000000..e28c3b04b08215d9703f3c90a343aacb44773a1a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_loading.py @@ -0,0 +1,90 @@ +import copy +import os.path as osp + +import mmcv +import numpy as np + +from mmdet.datasets.pipelines import (LoadImageFromFile, LoadImageFromWebcam, + LoadMultiChannelImageFromFiles) + + +class TestLoading: + + @classmethod + def setup_class(cls): + cls.data_prefix = osp.join(osp.dirname(__file__), '../../data') + + def test_load_img(self): + results = dict( + img_prefix=self.data_prefix, img_info=dict(filename='color.jpg')) + transform = LoadImageFromFile() + results = transform(copy.deepcopy(results)) + assert results['filename'] == osp.join(self.data_prefix, 'color.jpg') + assert results['ori_filename'] == 'color.jpg' + assert results['img'].shape == (288, 512, 3) + assert results['img'].dtype == np.uint8 + assert results['img_shape'] == (288, 512, 3) + assert results['ori_shape'] == (288, 512, 3) + assert repr(transform) == transform.__class__.__name__ + \ + "(to_float32=False, color_type='color', " + \ + "file_client_args={'backend': 'disk'})" + + # no img_prefix + results = dict( + img_prefix=None, img_info=dict(filename='tests/data/color.jpg')) + transform = LoadImageFromFile() + results = transform(copy.deepcopy(results)) + assert results['filename'] == 'tests/data/color.jpg' + assert results['ori_filename'] == 'tests/data/color.jpg' + assert results['img'].shape == (288, 512, 3) + + # to_float32 + transform = LoadImageFromFile(to_float32=True) + results = transform(copy.deepcopy(results)) + assert results['img'].dtype == np.float32 + + # gray image + results = dict( + img_prefix=self.data_prefix, img_info=dict(filename='gray.jpg')) + transform = LoadImageFromFile() + results = transform(copy.deepcopy(results)) + assert results['img'].shape == (288, 512, 3) + assert results['img'].dtype == np.uint8 + + transform = LoadImageFromFile(color_type='unchanged') + results = transform(copy.deepcopy(results)) + assert results['img'].shape == (288, 512) + assert results['img'].dtype == np.uint8 + + def test_load_multi_channel_img(self): + results = dict( + img_prefix=self.data_prefix, + img_info=dict(filename=['color.jpg', 'color.jpg'])) + transform = LoadMultiChannelImageFromFiles() + results = transform(copy.deepcopy(results)) + assert results['filename'] == [ + osp.join(self.data_prefix, 'color.jpg'), + osp.join(self.data_prefix, 'color.jpg') + ] + assert results['ori_filename'] == ['color.jpg', 'color.jpg'] + assert results['img'].shape == (288, 512, 3, 2) + assert results['img'].dtype == np.uint8 + assert results['img_shape'] == (288, 512, 3, 2) + assert results['ori_shape'] == (288, 512, 3, 2) + assert results['pad_shape'] == (288, 512, 3, 2) + assert results['scale_factor'] == 1.0 + assert repr(transform) == transform.__class__.__name__ + \ + "(to_float32=False, color_type='unchanged', " + \ + "file_client_args={'backend': 'disk'})" + + def test_load_webcam_img(self): + img = mmcv.imread(osp.join(self.data_prefix, 'color.jpg')) + results = dict(img=img) + transform = LoadImageFromWebcam() + results = transform(copy.deepcopy(results)) + assert results['filename'] is None + assert results['ori_filename'] is None + assert results['img'].shape == (288, 512, 3) + assert results['img'].dtype == np.uint8 + assert results['img_shape'] == (288, 512, 3) + assert results['ori_shape'] == (288, 512, 3) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_sampler.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_sampler.py new file mode 100644 index 0000000000000000000000000000000000000000..1ba5c562a9c09586a39be39a474af7aeaaacc4b8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_sampler.py @@ -0,0 +1,328 @@ +import torch + +from mmdet.core.bbox.assigners import MaxIoUAssigner +from mmdet.core.bbox.samplers import (OHEMSampler, RandomSampler, + ScoreHLRSampler) + + +def test_random_sampler(): + assigner = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ignore_iof_thr=0.5, + ignore_wrt_candidates=False, + ) + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 9], + [0, 10, 10, 19], + ]) + gt_labels = torch.LongTensor([1, 2]) + gt_bboxes_ignore = torch.Tensor([ + [30, 30, 40, 40], + ]) + assign_result = assigner.assign( + bboxes, + gt_bboxes, + gt_bboxes_ignore=gt_bboxes_ignore, + gt_labels=gt_labels) + + sampler = RandomSampler( + num=10, pos_fraction=0.5, neg_pos_ub=-1, add_gt_as_proposals=True) + + sample_result = sampler.sample(assign_result, bboxes, gt_bboxes, gt_labels) + + assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds) + assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds) + + +def test_random_sampler_empty_gt(): + assigner = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ignore_iof_thr=0.5, + ignore_wrt_candidates=False, + ) + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_bboxes = torch.empty(0, 4) + gt_labels = torch.empty(0, ).long() + assign_result = assigner.assign(bboxes, gt_bboxes, gt_labels=gt_labels) + + sampler = RandomSampler( + num=10, pos_fraction=0.5, neg_pos_ub=-1, add_gt_as_proposals=True) + + sample_result = sampler.sample(assign_result, bboxes, gt_bboxes, gt_labels) + + assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds) + assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds) + + +def test_random_sampler_empty_pred(): + assigner = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ignore_iof_thr=0.5, + ignore_wrt_candidates=False, + ) + bboxes = torch.empty(0, 4) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 9], + [0, 10, 10, 19], + ]) + gt_labels = torch.LongTensor([1, 2]) + assign_result = assigner.assign(bboxes, gt_bboxes, gt_labels=gt_labels) + + sampler = RandomSampler( + num=10, pos_fraction=0.5, neg_pos_ub=-1, add_gt_as_proposals=True) + + sample_result = sampler.sample(assign_result, bboxes, gt_bboxes, gt_labels) + + assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds) + assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds) + + +def _context_for_ohem(): + import sys + from os.path import dirname + sys.path.insert(0, dirname(dirname(dirname(__file__)))) + from test_forward import _get_detector_cfg + + model = _get_detector_cfg( + 'faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py') + model['pretrained'] = None + + from mmdet.models import build_detector + context = build_detector(model).roi_head + return context + + +def test_ohem_sampler(): + + assigner = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ignore_iof_thr=0.5, + ignore_wrt_candidates=False, + ) + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 9], + [0, 10, 10, 19], + ]) + gt_labels = torch.LongTensor([1, 2]) + gt_bboxes_ignore = torch.Tensor([ + [30, 30, 40, 40], + ]) + assign_result = assigner.assign( + bboxes, + gt_bboxes, + gt_bboxes_ignore=gt_bboxes_ignore, + gt_labels=gt_labels) + + context = _context_for_ohem() + + sampler = OHEMSampler( + num=10, + pos_fraction=0.5, + context=context, + neg_pos_ub=-1, + add_gt_as_proposals=True) + + feats = [torch.rand(1, 256, int(2**i), int(2**i)) for i in [6, 5, 4, 3, 2]] + sample_result = sampler.sample( + assign_result, bboxes, gt_bboxes, gt_labels, feats=feats) + + assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds) + assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds) + + +def test_ohem_sampler_empty_gt(): + + assigner = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ignore_iof_thr=0.5, + ignore_wrt_candidates=False, + ) + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_bboxes = torch.empty(0, 4) + gt_labels = torch.LongTensor([]) + gt_bboxes_ignore = torch.Tensor([]) + assign_result = assigner.assign( + bboxes, + gt_bboxes, + gt_bboxes_ignore=gt_bboxes_ignore, + gt_labels=gt_labels) + + context = _context_for_ohem() + + sampler = OHEMSampler( + num=10, + pos_fraction=0.5, + context=context, + neg_pos_ub=-1, + add_gt_as_proposals=True) + + feats = [torch.rand(1, 256, int(2**i), int(2**i)) for i in [6, 5, 4, 3, 2]] + + sample_result = sampler.sample( + assign_result, bboxes, gt_bboxes, gt_labels, feats=feats) + + assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds) + assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds) + + +def test_ohem_sampler_empty_pred(): + assigner = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ignore_iof_thr=0.5, + ignore_wrt_candidates=False, + ) + bboxes = torch.empty(0, 4) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_labels = torch.LongTensor([1, 2, 2, 3]) + gt_bboxes_ignore = torch.Tensor([]) + assign_result = assigner.assign( + bboxes, + gt_bboxes, + gt_bboxes_ignore=gt_bboxes_ignore, + gt_labels=gt_labels) + + context = _context_for_ohem() + + sampler = OHEMSampler( + num=10, + pos_fraction=0.5, + context=context, + neg_pos_ub=-1, + add_gt_as_proposals=True) + + feats = [torch.rand(1, 256, int(2**i), int(2**i)) for i in [6, 5, 4, 3, 2]] + + sample_result = sampler.sample( + assign_result, bboxes, gt_bboxes, gt_labels, feats=feats) + + assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds) + assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds) + + +def test_random_sample_result(): + from mmdet.core.bbox.samplers.sampling_result import SamplingResult + SamplingResult.random(num_gts=0, num_preds=0) + SamplingResult.random(num_gts=0, num_preds=3) + SamplingResult.random(num_gts=3, num_preds=3) + SamplingResult.random(num_gts=0, num_preds=3) + SamplingResult.random(num_gts=7, num_preds=7) + SamplingResult.random(num_gts=7, num_preds=64) + SamplingResult.random(num_gts=24, num_preds=3) + + for i in range(3): + SamplingResult.random(rng=i) + + +def test_score_hlr_sampler_empty_pred(): + assigner = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ignore_iof_thr=0.5, + ignore_wrt_candidates=False, + ) + context = _context_for_ohem() + sampler = ScoreHLRSampler( + num=10, + pos_fraction=0.5, + context=context, + neg_pos_ub=-1, + add_gt_as_proposals=True) + gt_bboxes_ignore = torch.Tensor([]) + feats = [torch.rand(1, 256, int(2**i), int(2**i)) for i in [6, 5, 4, 3, 2]] + + # empty bbox + bboxes = torch.empty(0, 4) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_labels = torch.LongTensor([1, 2, 2, 3]) + assign_result = assigner.assign( + bboxes, + gt_bboxes, + gt_bboxes_ignore=gt_bboxes_ignore, + gt_labels=gt_labels) + sample_result, _ = sampler.sample( + assign_result, bboxes, gt_bboxes, gt_labels, feats=feats) + assert len(sample_result.neg_inds) == 0 + assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds) + assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds) + + # empty gt + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_bboxes = torch.empty(0, 4) + gt_labels = torch.LongTensor([]) + assign_result = assigner.assign( + bboxes, + gt_bboxes, + gt_bboxes_ignore=gt_bboxes_ignore, + gt_labels=gt_labels) + sample_result, _ = sampler.sample( + assign_result, bboxes, gt_bboxes, gt_labels, feats=feats) + assert len(sample_result.pos_inds) == 0 + assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds) + assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds) + + # non-empty input + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_labels = torch.LongTensor([1, 2, 2, 3]) + assign_result = assigner.assign( + bboxes, + gt_bboxes, + gt_bboxes_ignore=gt_bboxes_ignore, + gt_labels=gt_labels) + sample_result, _ = sampler.sample( + assign_result, bboxes, gt_bboxes, gt_labels, feats=feats) + assert len(sample_result.pos_bboxes) == len(sample_result.pos_inds) + assert len(sample_result.neg_bboxes) == len(sample_result.neg_inds) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_transform/test_img_augment.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_transform/test_img_augment.py new file mode 100644 index 0000000000000000000000000000000000000000..8f7dd9eb027fc04f300f920c4b80de750373500d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_transform/test_img_augment.py @@ -0,0 +1,203 @@ +import copy + +import mmcv +import numpy as np +from mmcv.utils import build_from_cfg +from numpy.testing import assert_array_equal + +from mmdet.core.mask import BitmapMasks, PolygonMasks +from mmdet.datasets.builder import PIPELINES + + +def construct_toy_data(poly2mask=True): + img = np.array([[1, 2, 3, 4], [5, 6, 7, 8]], dtype=np.uint8) + img = np.stack([img, img, img], axis=-1) + results = dict() + # image + results['img'] = img + results['img_shape'] = img.shape + results['img_fields'] = ['img'] + # bboxes + results['bbox_fields'] = ['gt_bboxes', 'gt_bboxes_ignore'] + results['gt_bboxes'] = np.array([[0., 0., 2., 1.]], dtype=np.float32) + results['gt_bboxes_ignore'] = np.array([[2., 0., 3., 1.]], + dtype=np.float32) + # labels + results['gt_labels'] = np.array([1], dtype=np.int64) + # masks + results['mask_fields'] = ['gt_masks'] + if poly2mask: + gt_masks = np.array([[0, 1, 1, 0], [0, 1, 0, 0]], + dtype=np.uint8)[None, :, :] + results['gt_masks'] = BitmapMasks(gt_masks, 2, 4) + else: + raw_masks = [[np.array([1, 0, 2, 0, 2, 1, 1, 1], dtype=np.float)]] + results['gt_masks'] = PolygonMasks(raw_masks, 2, 4) + # segmentations + results['seg_fields'] = ['gt_semantic_seg'] + results['gt_semantic_seg'] = img[..., 0] + return results + + +def test_adjust_color(): + results = construct_toy_data() + # test wighout aug + transform = dict(type='ColorTransform', prob=0, level=10) + transform_module = build_from_cfg(transform, PIPELINES) + results_transformed = transform_module(copy.deepcopy(results)) + assert_array_equal(results_transformed['img'], results['img']) + + # test with factor 1 + img = results['img'] + transform = dict(type='ColorTransform', prob=1, level=10) + transform_module = build_from_cfg(transform, PIPELINES) + results_transformed = transform_module(copy.deepcopy(results)) + assert_array_equal(results_transformed['img'], img) + + # test with factor 0 + transform_module.factor = 0 + img_gray = mmcv.bgr2gray(img.copy()) + img_r = np.stack([img_gray, img_gray, img_gray], axis=-1) + results_transformed = transform_module(copy.deepcopy(results)) + assert_array_equal(results_transformed['img'], img_r) + + # test with factor 0.5 + transform_module.factor = 0.5 + results_transformed = transform_module(copy.deepcopy(results)) + img = results['img'] + assert_array_equal( + results_transformed['img'], + np.round(np.clip((img * 0.5 + img_r * 0.5), 0, 255)).astype(img.dtype)) + + +def test_imequalize(nb_rand_test=100): + + def _imequalize(img): + # equalize the image using PIL.ImageOps.equalize + from PIL import ImageOps, Image + img = Image.fromarray(img) + equalized_img = np.asarray(ImageOps.equalize(img)) + return equalized_img + + results = construct_toy_data() + # test wighout aug + transform = dict(type='EqualizeTransform', prob=0) + transform_module = build_from_cfg(transform, PIPELINES) + results_transformed = transform_module(copy.deepcopy(results)) + assert_array_equal(results_transformed['img'], results['img']) + + # test equalize with case step=0 + transform = dict(type='EqualizeTransform', prob=1.) + transform_module = build_from_cfg(transform, PIPELINES) + img = np.array([[0, 0, 0], [120, 120, 120], [255, 255, 255]], + dtype=np.uint8) + img = np.stack([img, img, img], axis=-1) + results['img'] = img + results_transformed = transform_module(copy.deepcopy(results)) + assert_array_equal(results_transformed['img'], img) + + # test equalize with randomly sampled image. + for _ in range(nb_rand_test): + img = np.clip(np.random.uniform(0, 1, (1000, 1200, 3)) * 260, 0, + 255).astype(np.uint8) + results['img'] = img + results_transformed = transform_module(copy.deepcopy(results)) + assert_array_equal(results_transformed['img'], _imequalize(img)) + + +def test_adjust_brightness(nb_rand_test=100): + + def _adjust_brightness(img, factor): + # adjust the brightness of image using + # PIL.ImageEnhance.Brightness + from PIL.ImageEnhance import Brightness + from PIL import Image + img = Image.fromarray(img) + brightened_img = Brightness(img).enhance(factor) + return np.asarray(brightened_img) + + results = construct_toy_data() + # test wighout aug + transform = dict(type='BrightnessTransform', level=10, prob=0) + transform_module = build_from_cfg(transform, PIPELINES) + results_transformed = transform_module(copy.deepcopy(results)) + assert_array_equal(results_transformed['img'], results['img']) + + # test case with factor 1.0 + transform = dict(type='BrightnessTransform', level=10, prob=1.) + transform_module = build_from_cfg(transform, PIPELINES) + transform_module.factor = 1.0 + results_transformed = transform_module(copy.deepcopy(results)) + assert_array_equal(results_transformed['img'], results['img']) + + # test case with factor 0.0 + transform_module.factor = 0.0 + results_transformed = transform_module(copy.deepcopy(results)) + assert_array_equal(results_transformed['img'], + np.zeros_like(results['img'])) + + # test with randomly sampled images and factors. + for _ in range(nb_rand_test): + img = np.clip(np.random.uniform(0, 1, (1000, 1200, 3)) * 260, 0, + 255).astype(np.uint8) + factor = np.random.uniform() + transform_module.factor = factor + results['img'] = img + np.testing.assert_allclose( + transform_module(copy.deepcopy(results))['img'].astype(np.int32), + _adjust_brightness(img, factor).astype(np.int32), + rtol=0, + atol=1) + + +def test_adjust_contrast(nb_rand_test=100): + + def _adjust_contrast(img, factor): + from PIL.ImageEnhance import Contrast + from PIL import Image + # Image.fromarray defaultly supports RGB, not BGR. + # convert from BGR to RGB + img = Image.fromarray(img[..., ::-1], mode='RGB') + contrasted_img = Contrast(img).enhance(factor) + # convert from RGB to BGR + return np.asarray(contrasted_img)[..., ::-1] + + results = construct_toy_data() + # test wighout aug + transform = dict(type='ContrastTransform', level=10, prob=0) + transform_module = build_from_cfg(transform, PIPELINES) + results_transformed = transform_module(copy.deepcopy(results)) + assert_array_equal(results_transformed['img'], results['img']) + + # test case with factor 1.0 + transform = dict(type='ContrastTransform', level=10, prob=1.) + transform_module = build_from_cfg(transform, PIPELINES) + transform_module.factor = 1.0 + results_transformed = transform_module(copy.deepcopy(results)) + assert_array_equal(results_transformed['img'], results['img']) + + # test case with factor 0.0 + transform_module.factor = 0.0 + results_transformed = transform_module(copy.deepcopy(results)) + np.testing.assert_allclose( + results_transformed['img'], + _adjust_contrast(results['img'], 0.), + rtol=0, + atol=1) + + # test adjust_contrast with randomly sampled images and factors. + for _ in range(nb_rand_test): + img = np.clip(np.random.uniform(0, 1, (1200, 1000, 3)) * 260, 0, + 255).astype(np.uint8) + factor = np.random.uniform() + transform_module.factor = factor + results['img'] = img + results_transformed = transform_module(copy.deepcopy(results)) + # Note the gap (less_equal 1) between PIL.ImageEnhance.Contrast + # and mmcv.adjust_contrast comes from the gap that converts from + # a color image to gray image using mmcv or PIL. + np.testing.assert_allclose( + transform_module(copy.deepcopy(results))['img'].astype(np.int32), + _adjust_contrast(results['img'], factor).astype(np.int32), + rtol=0, + atol=1) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_transform/test_models_aug_test.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_transform/test_models_aug_test.py new file mode 100644 index 0000000000000000000000000000000000000000..d1f34b20340ad60bb56cb8845ca42ec32fbba2cf --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_transform/test_models_aug_test.py @@ -0,0 +1,130 @@ +import os.path as osp + +import mmcv +import torch +from mmcv.parallel import collate +from mmcv.utils import build_from_cfg + +from mmdet.datasets.builder import PIPELINES +from mmdet.models import build_detector + + +def model_aug_test_template(cfg_file): + # get config + cfg = mmcv.Config.fromfile(cfg_file) + # init model + cfg.model.pretrained = None + cfg.model.train_cfg = None + model = build_detector(cfg.model) + + # init test pipeline and set aug test + load_cfg, multi_scale_cfg = cfg.test_pipeline + multi_scale_cfg['flip'] = True + multi_scale_cfg['flip_direction'] = ['horizontal', 'vertical', 'diagonal'] + multi_scale_cfg['img_scale'] = [(1333, 800), (800, 600), (640, 480)] + + load = build_from_cfg(load_cfg, PIPELINES) + transform = build_from_cfg(multi_scale_cfg, PIPELINES) + + results = dict( + img_prefix=osp.join(osp.dirname(__file__), '../../../data'), + img_info=dict(filename='color.jpg')) + results = transform(load(results)) + assert len(results['img']) == 12 + assert len(results['img_metas']) == 12 + + results['img'] = [collate([x]) for x in results['img']] + results['img_metas'] = [collate([x]).data[0] for x in results['img_metas']] + # aug test the model + model.eval() + with torch.no_grad(): + aug_result = model(return_loss=False, rescale=True, **results) + return aug_result + + +def test_aug_test_size(): + results = dict( + img_prefix=osp.join(osp.dirname(__file__), '../../../data'), + img_info=dict(filename='color.jpg')) + + # Define simple pipeline + load = dict(type='LoadImageFromFile') + load = build_from_cfg(load, PIPELINES) + + # get config + transform = dict( + type='MultiScaleFlipAug', + transforms=[], + img_scale=[(1333, 800), (800, 600), (640, 480)], + flip=True, + flip_direction=['horizontal', 'vertical', 'diagonal']) + multi_aug_test_module = build_from_cfg(transform, PIPELINES) + + results = load(results) + results = multi_aug_test_module(load(results)) + # len(["original", "horizontal", "vertical", "diagonal"]) * + # len([(1333, 800), (800, 600), (640, 480)]) + assert len(results['img']) == 12 + + +def test_cascade_rcnn_aug_test(): + aug_result = model_aug_test_template( + 'configs/cascade_rcnn/cascade_rcnn_r50_fpn_1x_coco.py') + assert len(aug_result[0]) == 80 + + +def test_mask_rcnn_aug_test(): + aug_result = model_aug_test_template( + 'configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py') + assert len(aug_result[0]) == 2 + assert len(aug_result[0][0]) == 80 + assert len(aug_result[0][1]) == 80 + + +def test_htc_aug_test(): + aug_result = model_aug_test_template('configs/htc/htc_r50_fpn_1x_coco.py') + assert len(aug_result[0]) == 2 + assert len(aug_result[0][0]) == 80 + assert len(aug_result[0][1]) == 80 + + +def test_scnet_aug_test(): + aug_result = model_aug_test_template( + 'configs/scnet/scnet_r50_fpn_1x_coco.py') + assert len(aug_result[0]) == 2 + assert len(aug_result[0][0]) == 80 + assert len(aug_result[0][1]) == 80 + + +def test_cornernet_aug_test(): + # get config + cfg = mmcv.Config.fromfile( + 'configs/cornernet/cornernet_hourglass104_mstest_10x5_210e_coco.py') + # init model + cfg.model.pretrained = None + cfg.model.train_cfg = None + model = build_detector(cfg.model) + + # init test pipeline and set aug test + load_cfg, multi_scale_cfg = cfg.test_pipeline + multi_scale_cfg['flip'] = True + multi_scale_cfg['flip_direction'] = ['horizontal', 'vertical', 'diagonal'] + multi_scale_cfg['scale_factor'] = [0.5, 1.0, 2.0] + + load = build_from_cfg(load_cfg, PIPELINES) + transform = build_from_cfg(multi_scale_cfg, PIPELINES) + + results = dict( + img_prefix=osp.join(osp.dirname(__file__), '../../../data'), + img_info=dict(filename='color.jpg')) + results = transform(load(results)) + assert len(results['img']) == 12 + assert len(results['img_metas']) == 12 + + results['img'] = [collate([x]) for x in results['img']] + results['img_metas'] = [collate([x]).data[0] for x in results['img_metas']] + # aug test the model + model.eval() + with torch.no_grad(): + aug_result = model(return_loss=False, rescale=True, **results) + assert len(aug_result[0]) == 80 diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_transform/test_rotate.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_transform/test_rotate.py new file mode 100644 index 0000000000000000000000000000000000000000..c440451ade7c1cc5dc33df825f350d3c091b42cd --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_transform/test_rotate.py @@ -0,0 +1,224 @@ +import copy + +import numpy as np +import pytest +from mmcv.utils import build_from_cfg + +from mmdet.core.mask import BitmapMasks, PolygonMasks +from mmdet.datasets.builder import PIPELINES + + +def construct_toy_data(poly2mask=True): + img = np.array([[1, 2, 3, 4], [5, 6, 7, 8]], dtype=np.uint8) + img = np.stack([img, img, img], axis=-1) + results = dict() + # image + results['img'] = img + results['img_shape'] = img.shape + results['img_fields'] = ['img'] + # bboxes + results['bbox_fields'] = ['gt_bboxes', 'gt_bboxes_ignore'] + results['gt_bboxes'] = np.array([[0., 0., 2., 1.]], dtype=np.float32) + results['gt_bboxes_ignore'] = np.array([[2., 0., 3., 1.]], + dtype=np.float32) + # labels + results['gt_labels'] = np.array([1], dtype=np.int64) + # masks + results['mask_fields'] = ['gt_masks'] + if poly2mask: + gt_masks = np.array([[0, 1, 1, 0], [0, 1, 0, 0]], + dtype=np.uint8)[None, :, :] + results['gt_masks'] = BitmapMasks(gt_masks, 2, 4) + else: + raw_masks = [[np.array([0, 0, 2, 0, 2, 1, 0, 1], dtype=np.float)]] + results['gt_masks'] = PolygonMasks(raw_masks, 2, 4) + # segmentations + results['seg_fields'] = ['gt_semantic_seg'] + results['gt_semantic_seg'] = img[..., 0] + return results + + +def _check_fields(results, results_rotated, keys): + for key in keys: + if isinstance(results[key], (BitmapMasks, PolygonMasks)): + assert np.equal(results[key].to_ndarray(), + results_rotated[key].to_ndarray()).all() + else: + assert np.equal(results[key], results_rotated[key]).all() + + +def check_rotate(results, results_rotated): + # check image + _check_fields(results, results_rotated, results.get('img_fields', ['img'])) + # check bboxes + _check_fields(results, results_rotated, results.get('bbox_fields', [])) + # check masks + _check_fields(results, results_rotated, results.get('mask_fields', [])) + # check segmentations + _check_fields(results, results_rotated, results.get('seg_fields', [])) + # _check gt_labels + if 'gt_labels' in results: + assert np.equal(results['gt_labels'], + results_rotated['gt_labels']).all() + + +def test_rotate(): + # test assertion for invalid type of max_rotate_angle + with pytest.raises(AssertionError): + transform = dict(type='Rotate', level=1, max_rotate_angle=(30, )) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid type of scale + with pytest.raises(AssertionError): + transform = dict(type='Rotate', level=2, scale=(1.2, )) + build_from_cfg(transform, PIPELINES) + + # test ValueError for invalid type of img_fill_val + with pytest.raises(ValueError): + transform = dict( + type='Rotate', level=2, img_fill_val=[ + 128, + ]) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid number of elements in center + with pytest.raises(AssertionError): + transform = dict(type='Rotate', level=2, center=(0.5, )) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid type of center + with pytest.raises(AssertionError): + transform = dict(type='Rotate', level=2, center=[0, 0]) + build_from_cfg(transform, PIPELINES) + + # test case when no rotate aug (level=0) + results = construct_toy_data() + img_fill_val = (104, 116, 124) + seg_ignore_label = 255 + transform = dict( + type='Rotate', + level=0, + prob=1., + img_fill_val=img_fill_val, + seg_ignore_label=seg_ignore_label, + ) + rotate_module = build_from_cfg(transform, PIPELINES) + results_wo_rotate = rotate_module(copy.deepcopy(results)) + check_rotate(results, results_wo_rotate) + + # test case when no rotate aug (prob<=0) + transform = dict( + type='Rotate', level=10, prob=0., img_fill_val=img_fill_val, scale=0.6) + rotate_module = build_from_cfg(transform, PIPELINES) + results_wo_rotate = rotate_module(copy.deepcopy(results)) + check_rotate(results, results_wo_rotate) + + # test clockwise rotation with angle 90 + results = construct_toy_data() + img_fill_val = 128 + transform = dict( + type='Rotate', + level=10, + max_rotate_angle=90, + img_fill_val=img_fill_val, + # set random_negative_prob to 0 for clockwise rotation + random_negative_prob=0., + prob=1.) + rotate_module = build_from_cfg(transform, PIPELINES) + results_rotated = rotate_module(copy.deepcopy(results)) + img_r = np.array([[img_fill_val, 6, 2, img_fill_val], + [img_fill_val, 7, 3, img_fill_val]]).astype(np.uint8) + img_r = np.stack([img_r, img_r, img_r], axis=-1) + results_gt = copy.deepcopy(results) + results_gt['img'] = img_r + results_gt['gt_bboxes'] = np.array([[1., 0., 2., 1.]], dtype=np.float32) + results_gt['gt_bboxes_ignore'] = np.empty((0, 4), dtype=np.float32) + gt_masks = np.array([[0, 1, 1, 0], [0, 0, 1, 0]], + dtype=np.uint8)[None, :, :] + results_gt['gt_masks'] = BitmapMasks(gt_masks, 2, 4) + results_gt['gt_semantic_seg'] = np.array( + [[255, 6, 2, 255], [255, 7, 3, + 255]]).astype(results['gt_semantic_seg'].dtype) + check_rotate(results_gt, results_rotated) + + # test clockwise rotation with angle 90, PolygonMasks + results = construct_toy_data(poly2mask=False) + results_rotated = rotate_module(copy.deepcopy(results)) + gt_masks = [[np.array([2, 0, 2, 1, 1, 1, 1, 0], dtype=np.float)]] + results_gt['gt_masks'] = PolygonMasks(gt_masks, 2, 4) + check_rotate(results_gt, results_rotated) + + # test counter-clockwise roatation with angle 90, + # and specify the ratation center + img_fill_val = (104, 116, 124) + transform = dict( + type='Rotate', + level=10, + max_rotate_angle=90, + center=(0, 0), + img_fill_val=img_fill_val, + # set random_negative_prob to 1 for counter-clockwise rotation + random_negative_prob=1., + prob=1.) + results = construct_toy_data() + rotate_module = build_from_cfg(transform, PIPELINES) + results_rotated = rotate_module(copy.deepcopy(results)) + results_gt = copy.deepcopy(results) + h, w = results['img'].shape[:2] + img_r = np.stack([ + np.ones((h, w)) * img_fill_val[0], + np.ones((h, w)) * img_fill_val[1], + np.ones((h, w)) * img_fill_val[2] + ], + axis=-1).astype(np.uint8) + img_r[0, 0, :] = 1 + img_r[0, 1, :] = 5 + results_gt['img'] = img_r + results_gt['gt_bboxes'] = np.empty((0, 4), dtype=np.float32) + results_gt['gt_bboxes_ignore'] = np.empty((0, 4), dtype=np.float32) + results_gt['gt_labels'] = np.empty((0, ), dtype=np.int64) + gt_masks = np.empty((0, h, w), dtype=np.uint8) + results_gt['gt_masks'] = BitmapMasks(gt_masks, h, w) + gt_seg = (np.ones((h, w)) * 255).astype(results['gt_semantic_seg'].dtype) + gt_seg[0, 0], gt_seg[0, 1] = 1, 5 + results_gt['gt_semantic_seg'] = gt_seg + check_rotate(results_gt, results_rotated) + + transform = dict( + type='Rotate', + level=10, + max_rotate_angle=90, + center=(0), + img_fill_val=img_fill_val, + random_negative_prob=1., + prob=1.) + rotate_module = build_from_cfg(transform, PIPELINES) + results_rotated = rotate_module(copy.deepcopy(results)) + check_rotate(results_gt, results_rotated) + + # test counter-clockwise roatation with angle 90, + # and specify the ratation center, PolygonMasks + results = construct_toy_data(poly2mask=False) + results_rotated = rotate_module(copy.deepcopy(results)) + gt_masks = [[np.array([0, 0, 0, 0, 1, 0, 1, 0], dtype=np.float)]] + results_gt['gt_masks'] = PolygonMasks(gt_masks, 2, 4) + check_rotate(results_gt, results_rotated) + + # test AutoAugment equipped with Rotate + policies = [[dict(type='Rotate', level=10, prob=1.)]] + autoaug = dict(type='AutoAugment', policies=policies) + autoaug_module = build_from_cfg(autoaug, PIPELINES) + autoaug_module(copy.deepcopy(results)) + + policies = [[ + dict(type='Rotate', level=10, prob=1.), + dict( + type='Rotate', + level=8, + max_rotate_angle=90, + center=(0), + img_fill_val=img_fill_val) + ]] + autoaug = dict(type='AutoAugment', policies=policies) + autoaug_module = build_from_cfg(autoaug, PIPELINES) + autoaug_module(copy.deepcopy(results)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_transform/test_shear.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_transform/test_shear.py new file mode 100644 index 0000000000000000000000000000000000000000..3d63812521492182833fc16478ad6f44e6d1308f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_transform/test_shear.py @@ -0,0 +1,217 @@ +import copy + +import numpy as np +import pytest +from mmcv.utils import build_from_cfg + +from mmdet.core.mask import BitmapMasks, PolygonMasks +from mmdet.datasets.builder import PIPELINES + + +def construct_toy_data(poly2mask=True): + img = np.array([[1, 2, 3, 4], [5, 6, 7, 8]], dtype=np.uint8) + img = np.stack([img, img, img], axis=-1) + results = dict() + # image + results['img'] = img + results['img_shape'] = img.shape + results['img_fields'] = ['img'] + # bboxes + results['bbox_fields'] = ['gt_bboxes', 'gt_bboxes_ignore'] + results['gt_bboxes'] = np.array([[0., 0., 2., 1.]], dtype=np.float32) + results['gt_bboxes_ignore'] = np.array([[2., 0., 3., 1.]], + dtype=np.float32) + # labels + results['gt_labels'] = np.array([1], dtype=np.int64) + # masks + results['mask_fields'] = ['gt_masks'] + if poly2mask: + gt_masks = np.array([[0, 1, 1, 0], [0, 1, 0, 0]], + dtype=np.uint8)[None, :, :] + results['gt_masks'] = BitmapMasks(gt_masks, 2, 4) + else: + raw_masks = [[np.array([1, 0, 2, 0, 2, 1, 1, 1], dtype=np.float)]] + results['gt_masks'] = PolygonMasks(raw_masks, 2, 4) + + # segmentations + results['seg_fields'] = ['gt_semantic_seg'] + results['gt_semantic_seg'] = img[..., 0] + return results + + +def _check_fields(results, results_sheared, keys): + for key in keys: + if isinstance(results[key], (BitmapMasks, PolygonMasks)): + assert np.equal(results[key].to_ndarray(), + results_sheared[key].to_ndarray()).all() + else: + assert np.equal(results[key], results_sheared[key]).all() + + +def check_shear(results, results_sheared): + # _check_keys(results, results_sheared) + # check image + _check_fields(results, results_sheared, results.get('img_fields', ['img'])) + # check bboxes + _check_fields(results, results_sheared, results.get('bbox_fields', [])) + # check masks + _check_fields(results, results_sheared, results.get('mask_fields', [])) + # check segmentations + _check_fields(results, results_sheared, results.get('seg_fields', [])) + # check gt_labels + if 'gt_labels' in results: + assert np.equal(results['gt_labels'], + results_sheared['gt_labels']).all() + + +def test_shear(): + # test assertion for invalid type of max_shear_magnitude + with pytest.raises(AssertionError): + transform = dict(type='Shear', level=1, max_shear_magnitude=(0.5, )) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid value of max_shear_magnitude + with pytest.raises(AssertionError): + transform = dict(type='Shear', level=2, max_shear_magnitude=1.2) + build_from_cfg(transform, PIPELINES) + + # test ValueError for invalid type of img_fill_val + with pytest.raises(ValueError): + transform = dict(type='Shear', level=2, img_fill_val=[128]) + build_from_cfg(transform, PIPELINES) + + results = construct_toy_data() + # test case when no shear aug (level=0, direction='horizontal') + img_fill_val = (104, 116, 124) + seg_ignore_label = 255 + transform = dict( + type='Shear', + level=0, + prob=1., + img_fill_val=img_fill_val, + seg_ignore_label=seg_ignore_label, + direction='horizontal') + shear_module = build_from_cfg(transform, PIPELINES) + results_wo_shear = shear_module(copy.deepcopy(results)) + check_shear(results, results_wo_shear) + + # test case when no shear aug (level=0, direction='vertical') + transform = dict( + type='Shear', + level=0, + prob=1., + img_fill_val=img_fill_val, + seg_ignore_label=seg_ignore_label, + direction='vertical') + shear_module = build_from_cfg(transform, PIPELINES) + results_wo_shear = shear_module(copy.deepcopy(results)) + check_shear(results, results_wo_shear) + + # test case when no shear aug (prob<=0) + transform = dict( + type='Shear', + level=10, + prob=0., + img_fill_val=img_fill_val, + direction='vertical') + shear_module = build_from_cfg(transform, PIPELINES) + results_wo_shear = shear_module(copy.deepcopy(results)) + check_shear(results, results_wo_shear) + + # test shear horizontally, magnitude=1 + transform = dict( + type='Shear', + level=10, + prob=1., + img_fill_val=img_fill_val, + direction='horizontal', + max_shear_magnitude=1., + random_negative_prob=0.) + shear_module = build_from_cfg(transform, PIPELINES) + results_sheared = shear_module(copy.deepcopy(results)) + results_gt = copy.deepcopy(results) + img_s = np.array([[1, 2, 3, 4], [0, 5, 6, 7]], dtype=np.uint8) + img_s = np.stack([img_s, img_s, img_s], axis=-1) + img_s[1, 0, :] = np.array(img_fill_val) + results_gt['img'] = img_s + results_gt['gt_bboxes'] = np.array([[0., 0., 3., 1.]], dtype=np.float32) + results_gt['gt_bboxes_ignore'] = np.array([[2., 0., 4., 1.]], + dtype=np.float32) + gt_masks = np.array([[0, 1, 1, 0], [0, 0, 1, 0]], + dtype=np.uint8)[None, :, :] + results_gt['gt_masks'] = BitmapMasks(gt_masks, 2, 4) + results_gt['gt_semantic_seg'] = np.array( + [[1, 2, 3, 4], [255, 5, 6, 7]], dtype=results['gt_semantic_seg'].dtype) + check_shear(results_gt, results_sheared) + + # test PolygonMasks with shear horizontally, magnitude=1 + results = construct_toy_data(poly2mask=False) + results_sheared = shear_module(copy.deepcopy(results)) + gt_masks = [[np.array([1, 0, 2, 0, 3, 1, 2, 1], dtype=np.float)]] + results_gt['gt_masks'] = PolygonMasks(gt_masks, 2, 4) + check_shear(results_gt, results_sheared) + + # test shear vertically, magnitude=-1 + img_fill_val = 128 + results = construct_toy_data() + transform = dict( + type='Shear', + level=10, + prob=1., + img_fill_val=img_fill_val, + direction='vertical', + max_shear_magnitude=1., + random_negative_prob=1.) + shear_module = build_from_cfg(transform, PIPELINES) + results_sheared = shear_module(copy.deepcopy(results)) + results_gt = copy.deepcopy(results) + img_s = np.array([[1, 6, img_fill_val, img_fill_val], + [5, img_fill_val, img_fill_val, img_fill_val]], + dtype=np.uint8) + img_s = np.stack([img_s, img_s, img_s], axis=-1) + results_gt['img'] = img_s + results_gt['gt_bboxes'] = np.empty((0, 4), dtype=np.float32) + results_gt['gt_labels'] = np.empty((0, ), dtype=np.int64) + results_gt['gt_bboxes_ignore'] = np.empty((0, 4), dtype=np.float32) + gt_masks = np.array([[0, 1, 0, 0], [0, 0, 0, 0]], + dtype=np.uint8)[None, :, :] + results_gt['gt_masks'] = BitmapMasks(gt_masks, 2, 4) + results_gt['gt_semantic_seg'] = np.array( + [[1, 6, 255, 255], [5, 255, 255, 255]], + dtype=results['gt_semantic_seg'].dtype) + check_shear(results_gt, results_sheared) + + # test PolygonMasks with shear vertically, magnitude=-1 + results = construct_toy_data(poly2mask=False) + results_sheared = shear_module(copy.deepcopy(results)) + gt_masks = [[np.array([1, 0, 2, 0, 2, 0, 1, 0], dtype=np.float)]] + results_gt['gt_masks'] = PolygonMasks(gt_masks, 2, 4) + check_shear(results_gt, results_sheared) + + results = construct_toy_data() + # same mask for BitmapMasks and PolygonMasks + results['gt_masks'] = BitmapMasks( + np.array([[0, 1, 1, 0], [0, 1, 1, 0]], dtype=np.uint8)[None, :, :], 2, + 4) + results['gt_bboxes'] = np.array([[1., 0., 2., 1.]], dtype=np.float32) + results_sheared_bitmap = shear_module(copy.deepcopy(results)) + check_shear(results_sheared_bitmap, results_sheared) + + # test AutoAugment equipped with Shear + policies = [[dict(type='Shear', level=10, prob=1.)]] + autoaug = dict(type='AutoAugment', policies=policies) + autoaug_module = build_from_cfg(autoaug, PIPELINES) + autoaug_module(copy.deepcopy(results)) + + policies = [[ + dict(type='Shear', level=10, prob=1.), + dict( + type='Shear', + level=8, + img_fill_val=img_fill_val, + direction='vertical', + max_shear_magnitude=1.) + ]] + autoaug = dict(type='AutoAugment', policies=policies) + autoaug_module = build_from_cfg(autoaug, PIPELINES) + autoaug_module(copy.deepcopy(results)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_transform/test_transform.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_transform/test_transform.py new file mode 100644 index 0000000000000000000000000000000000000000..b69d5ef9ffc37d8a73920186bddbb18b53d5eb0b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_transform/test_transform.py @@ -0,0 +1,792 @@ +import copy +import os.path as osp + +import mmcv +import numpy as np +import pytest +import torch +from mmcv.utils import build_from_cfg + +from mmdet.core.evaluation.bbox_overlaps import bbox_overlaps +from mmdet.datasets.builder import PIPELINES + + +def test_resize(): + # test assertion if img_scale is a list + with pytest.raises(AssertionError): + transform = dict(type='Resize', img_scale=[1333, 800], keep_ratio=True) + build_from_cfg(transform, PIPELINES) + + # test assertion if len(img_scale) while ratio_range is not None + with pytest.raises(AssertionError): + transform = dict( + type='Resize', + img_scale=[(1333, 800), (1333, 600)], + ratio_range=(0.9, 1.1), + keep_ratio=True) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid multiscale_mode + with pytest.raises(AssertionError): + transform = dict( + type='Resize', + img_scale=[(1333, 800), (1333, 600)], + keep_ratio=True, + multiscale_mode='2333') + build_from_cfg(transform, PIPELINES) + + # test assertion if both scale and scale_factor are setted + with pytest.raises(AssertionError): + results = dict( + img_prefix=osp.join(osp.dirname(__file__), '../../../data'), + img_info=dict(filename='color.jpg')) + load = dict(type='LoadImageFromFile') + load = build_from_cfg(load, PIPELINES) + transform = dict(type='Resize', img_scale=(1333, 800), keep_ratio=True) + transform = build_from_cfg(transform, PIPELINES) + results = load(results) + results['scale'] = (1333, 800) + results['scale_factor'] = 1.0 + results = transform(results) + + transform = dict(type='Resize', img_scale=(1333, 800), keep_ratio=True) + resize_module = build_from_cfg(transform, PIPELINES) + + results = dict() + img = mmcv.imread( + osp.join(osp.dirname(__file__), '../../../data/color.jpg'), 'color') + results['img'] = img + results['img2'] = copy.deepcopy(img) + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + # Set initial values for default meta_keys + results['pad_shape'] = img.shape + results['img_fields'] = ['img', 'img2'] + + results = resize_module(results) + assert np.equal(results['img'], results['img2']).all() + + results.pop('scale') + results.pop('scale_factor') + transform = dict( + type='Resize', + img_scale=(1280, 800), + multiscale_mode='value', + keep_ratio=False) + resize_module = build_from_cfg(transform, PIPELINES) + results = resize_module(results) + assert np.equal(results['img'], results['img2']).all() + assert results['img_shape'] == (800, 1280, 3) + + +def test_flip(): + # test assertion for invalid flip_ratio + with pytest.raises(AssertionError): + transform = dict(type='RandomFlip', flip_ratio=1.5) + build_from_cfg(transform, PIPELINES) + # test assertion for 0 <= sum(flip_ratio) <= 1 + with pytest.raises(AssertionError): + transform = dict( + type='RandomFlip', + flip_ratio=[0.7, 0.8], + direction=['horizontal', 'vertical']) + build_from_cfg(transform, PIPELINES) + + # test assertion for mismatch between number of flip_ratio and direction + with pytest.raises(AssertionError): + transform = dict(type='RandomFlip', flip_ratio=[0.4, 0.5]) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid direction + with pytest.raises(AssertionError): + transform = dict( + type='RandomFlip', flip_ratio=1., direction='horizonta') + build_from_cfg(transform, PIPELINES) + + transform = dict(type='RandomFlip', flip_ratio=1.) + flip_module = build_from_cfg(transform, PIPELINES) + + results = dict() + img = mmcv.imread( + osp.join(osp.dirname(__file__), '../../../data/color.jpg'), 'color') + original_img = copy.deepcopy(img) + results['img'] = img + results['img2'] = copy.deepcopy(img) + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + # Set initial values for default meta_keys + results['pad_shape'] = img.shape + results['scale_factor'] = 1.0 + results['img_fields'] = ['img', 'img2'] + + results = flip_module(results) + assert np.equal(results['img'], results['img2']).all() + + flip_module = build_from_cfg(transform, PIPELINES) + results = flip_module(results) + assert np.equal(results['img'], results['img2']).all() + assert np.equal(original_img, results['img']).all() + + # test flip_ratio is float, direction is list + transform = dict( + type='RandomFlip', + flip_ratio=0.9, + direction=['horizontal', 'vertical', 'diagonal']) + flip_module = build_from_cfg(transform, PIPELINES) + + results = dict() + img = mmcv.imread( + osp.join(osp.dirname(__file__), '../../../data/color.jpg'), 'color') + original_img = copy.deepcopy(img) + results['img'] = img + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + # Set initial values for default meta_keys + results['pad_shape'] = img.shape + results['scale_factor'] = 1.0 + results['img_fields'] = ['img'] + results = flip_module(results) + if results['flip']: + assert np.array_equal( + mmcv.imflip(original_img, results['flip_direction']), + results['img']) + else: + assert np.array_equal(original_img, results['img']) + + # test flip_ratio is list, direction is list + transform = dict( + type='RandomFlip', + flip_ratio=[0.3, 0.3, 0.2], + direction=['horizontal', 'vertical', 'diagonal']) + flip_module = build_from_cfg(transform, PIPELINES) + + results = dict() + img = mmcv.imread( + osp.join(osp.dirname(__file__), '../../../data/color.jpg'), 'color') + original_img = copy.deepcopy(img) + results['img'] = img + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + # Set initial values for default meta_keys + results['pad_shape'] = img.shape + results['scale_factor'] = 1.0 + results['img_fields'] = ['img'] + results = flip_module(results) + if results['flip']: + assert np.array_equal( + mmcv.imflip(original_img, results['flip_direction']), + results['img']) + else: + assert np.array_equal(original_img, results['img']) + + +def test_random_crop(): + # test assertion for invalid random crop + with pytest.raises(AssertionError): + transform = dict(type='RandomCrop', crop_size=(-1, 0)) + build_from_cfg(transform, PIPELINES) + + results = dict() + img = mmcv.imread( + osp.join(osp.dirname(__file__), '../../../data/color.jpg'), 'color') + results['img'] = img + + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + # TODO: add img_fields test + results['bbox_fields'] = ['gt_bboxes', 'gt_bboxes_ignore'] + # Set initial values for default meta_keys + results['pad_shape'] = img.shape + results['scale_factor'] = 1.0 + + def create_random_bboxes(num_bboxes, img_w, img_h): + bboxes_left_top = np.random.uniform(0, 0.5, size=(num_bboxes, 2)) + bboxes_right_bottom = np.random.uniform(0.5, 1, size=(num_bboxes, 2)) + bboxes = np.concatenate((bboxes_left_top, bboxes_right_bottom), 1) + bboxes = (bboxes * np.array([img_w, img_h, img_w, img_h])).astype( + np.int) + return bboxes + + h, w, _ = img.shape + gt_bboxes = create_random_bboxes(8, w, h) + gt_bboxes_ignore = create_random_bboxes(2, w, h) + results['gt_bboxes'] = gt_bboxes + results['gt_bboxes_ignore'] = gt_bboxes_ignore + transform = dict(type='RandomCrop', crop_size=(h - 20, w - 20)) + crop_module = build_from_cfg(transform, PIPELINES) + results = crop_module(results) + assert results['img'].shape[:2] == (h - 20, w - 20) + # All bboxes should be reserved after crop + assert results['img_shape'][:2] == (h - 20, w - 20) + assert results['gt_bboxes'].shape[0] == 8 + assert results['gt_bboxes_ignore'].shape[0] == 2 + + def area(bboxes): + return np.prod(bboxes[:, 2:4] - bboxes[:, 0:2], axis=1) + + assert (area(results['gt_bboxes']) <= area(gt_bboxes)).all() + assert (area(results['gt_bboxes_ignore']) <= area(gt_bboxes_ignore)).all() + + # test assertion for invalid crop_type + with pytest.raises(ValueError): + transform = dict( + type='RandomCrop', crop_size=(1, 1), crop_type='unknown') + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid crop_size + with pytest.raises(AssertionError): + transform = dict( + type='RandomCrop', crop_type='relative', crop_size=(0, 0)) + build_from_cfg(transform, PIPELINES) + + def _construct_toy_data(): + img = np.array([[1, 2, 3, 4], [5, 6, 7, 8]], dtype=np.uint8) + img = np.stack([img, img, img], axis=-1) + results = dict() + # image + results['img'] = img + results['img_shape'] = img.shape + results['img_fields'] = ['img'] + # bboxes + results['bbox_fields'] = ['gt_bboxes', 'gt_bboxes_ignore'] + results['gt_bboxes'] = np.array([[0., 0., 2., 1.]], dtype=np.float32) + results['gt_bboxes_ignore'] = np.array([[2., 0., 3., 1.]], + dtype=np.float32) + # labels + results['gt_labels'] = np.array([1], dtype=np.int64) + return results + + # test crop_type "relative_range" + results = _construct_toy_data() + transform = dict( + type='RandomCrop', + crop_type='relative_range', + crop_size=(0.3, 0.7), + allow_negative_crop=True) + transform_module = build_from_cfg(transform, PIPELINES) + results_transformed = transform_module(copy.deepcopy(results)) + h, w = results_transformed['img_shape'][:2] + assert int(2 * 0.3 + 0.5) <= h <= int(2 * 1 + 0.5) + assert int(4 * 0.7 + 0.5) <= w <= int(4 * 1 + 0.5) + + # test crop_type "relative" + transform = dict( + type='RandomCrop', + crop_type='relative', + crop_size=(0.3, 0.7), + allow_negative_crop=True) + transform_module = build_from_cfg(transform, PIPELINES) + results_transformed = transform_module(copy.deepcopy(results)) + h, w = results_transformed['img_shape'][:2] + assert h == int(2 * 0.3 + 0.5) and w == int(4 * 0.7 + 0.5) + + # test crop_type "absolute" + transform = dict( + type='RandomCrop', + crop_type='absolute', + crop_size=(1, 2), + allow_negative_crop=True) + transform_module = build_from_cfg(transform, PIPELINES) + results_transformed = transform_module(copy.deepcopy(results)) + h, w = results_transformed['img_shape'][:2] + assert h == 1 and w == 2 + + # test crop_type "absolute_range" + transform = dict( + type='RandomCrop', + crop_type='absolute_range', + crop_size=(1, 20), + allow_negative_crop=True) + transform_module = build_from_cfg(transform, PIPELINES) + results_transformed = transform_module(copy.deepcopy(results)) + h, w = results_transformed['img_shape'][:2] + assert 1 <= h <= 2 and 1 <= w <= 4 + + +def test_min_iou_random_crop(): + + def create_random_bboxes(num_bboxes, img_w, img_h): + bboxes_left_top = np.random.uniform(0, 0.5, size=(num_bboxes, 2)) + bboxes_right_bottom = np.random.uniform(0.5, 1, size=(num_bboxes, 2)) + bboxes = np.concatenate((bboxes_left_top, bboxes_right_bottom), 1) + bboxes = (bboxes * np.array([img_w, img_h, img_w, img_h])).astype( + np.int) + return bboxes + + results = dict() + img = mmcv.imread( + osp.join(osp.dirname(__file__), '../../../data/color.jpg'), 'color') + results['img'] = img + + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + results['bbox_fields'] = ['gt_bboxes', 'gt_bboxes_ignore'] + # Set initial values for default meta_keys + results['pad_shape'] = img.shape + results['scale_factor'] = 1.0 + h, w, _ = img.shape + gt_bboxes = create_random_bboxes(1, w, h) + gt_bboxes_ignore = create_random_bboxes(1, w, h) + results['gt_bboxes'] = gt_bboxes + results['gt_bboxes_ignore'] = gt_bboxes_ignore + transform = dict(type='MinIoURandomCrop') + crop_module = build_from_cfg(transform, PIPELINES) + + # Test for img_fields + results_test = copy.deepcopy(results) + results_test['img1'] = results_test['img'] + results_test['img_fields'] = ['img', 'img1'] + with pytest.raises(AssertionError): + crop_module(results_test) + results = crop_module(results) + patch = np.array([0, 0, results['img_shape'][1], results['img_shape'][0]]) + ious = bbox_overlaps(patch.reshape(-1, 4), + results['gt_bboxes']).reshape(-1) + ious_ignore = bbox_overlaps( + patch.reshape(-1, 4), results['gt_bboxes_ignore']).reshape(-1) + mode = crop_module.mode + if mode == 1: + assert np.equal(results['gt_bboxes'], gt_bboxes).all() + assert np.equal(results['gt_bboxes_ignore'], gt_bboxes_ignore).all() + else: + assert (ious >= mode).all() + assert (ious_ignore >= mode).all() + + +def test_pad(): + # test assertion if both size_divisor and size is None + with pytest.raises(AssertionError): + transform = dict(type='Pad') + build_from_cfg(transform, PIPELINES) + + transform = dict(type='Pad', size_divisor=32) + transform = build_from_cfg(transform, PIPELINES) + results = dict() + img = mmcv.imread( + osp.join(osp.dirname(__file__), '../../../data/color.jpg'), 'color') + original_img = copy.deepcopy(img) + results['img'] = img + results['img2'] = copy.deepcopy(img) + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + # Set initial values for default meta_keys + results['pad_shape'] = img.shape + results['scale_factor'] = 1.0 + results['img_fields'] = ['img', 'img2'] + + results = transform(results) + assert np.equal(results['img'], results['img2']).all() + # original img already divisible by 32 + assert np.equal(results['img'], original_img).all() + img_shape = results['img'].shape + assert img_shape[0] % 32 == 0 + assert img_shape[1] % 32 == 0 + + resize_transform = dict( + type='Resize', img_scale=(1333, 800), keep_ratio=True) + resize_module = build_from_cfg(resize_transform, PIPELINES) + results = resize_module(results) + results = transform(results) + img_shape = results['img'].shape + assert np.equal(results['img'], results['img2']).all() + assert img_shape[0] % 32 == 0 + assert img_shape[1] % 32 == 0 + + +def test_normalize(): + img_norm_cfg = dict( + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True) + transform = dict(type='Normalize', **img_norm_cfg) + transform = build_from_cfg(transform, PIPELINES) + results = dict() + img = mmcv.imread( + osp.join(osp.dirname(__file__), '../../../data/color.jpg'), 'color') + original_img = copy.deepcopy(img) + results['img'] = img + results['img2'] = copy.deepcopy(img) + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + # Set initial values for default meta_keys + results['pad_shape'] = img.shape + results['scale_factor'] = 1.0 + results['img_fields'] = ['img', 'img2'] + + results = transform(results) + assert np.equal(results['img'], results['img2']).all() + + mean = np.array(img_norm_cfg['mean']) + std = np.array(img_norm_cfg['std']) + converted_img = (original_img[..., ::-1] - mean) / std + assert np.allclose(results['img'], converted_img) + + +def test_albu_transform(): + results = dict( + img_prefix=osp.join(osp.dirname(__file__), '../../../data'), + img_info=dict(filename='color.jpg')) + + # Define simple pipeline + load = dict(type='LoadImageFromFile') + load = build_from_cfg(load, PIPELINES) + + albu_transform = dict( + type='Albu', transforms=[dict(type='ChannelShuffle', p=1)]) + albu_transform = build_from_cfg(albu_transform, PIPELINES) + + normalize = dict(type='Normalize', mean=[0] * 3, std=[0] * 3, to_rgb=True) + normalize = build_from_cfg(normalize, PIPELINES) + + # Execute transforms + results = load(results) + results = albu_transform(results) + results = normalize(results) + + assert results['img'].dtype == np.float32 + + +def test_random_center_crop_pad(): + # test assertion for invalid crop_size while test_mode=False + with pytest.raises(AssertionError): + transform = dict( + type='RandomCenterCropPad', + crop_size=(-1, 0), + test_mode=False, + test_pad_mode=None) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid ratios while test_mode=False + with pytest.raises(AssertionError): + transform = dict( + type='RandomCenterCropPad', + crop_size=(511, 511), + ratios=(1.0), + test_mode=False, + test_pad_mode=None) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid mean, std and to_rgb + with pytest.raises(AssertionError): + transform = dict( + type='RandomCenterCropPad', + crop_size=(511, 511), + mean=None, + std=None, + to_rgb=None, + test_mode=False, + test_pad_mode=None) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid crop_size while test_mode=True + with pytest.raises(AssertionError): + transform = dict( + type='RandomCenterCropPad', + crop_size=(511, 511), + ratios=None, + border=None, + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True, + test_mode=True, + test_pad_mode=('logical_or', 127)) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid ratios while test_mode=True + with pytest.raises(AssertionError): + transform = dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=(0.9, 1.0, 1.1), + border=None, + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True, + test_mode=True, + test_pad_mode=('logical_or', 127)) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid border while test_mode=True + with pytest.raises(AssertionError): + transform = dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=None, + border=128, + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True, + test_mode=True, + test_pad_mode=('logical_or', 127)) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid test_pad_mode while test_mode=True + with pytest.raises(AssertionError): + transform = dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=None, + border=None, + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True, + test_mode=True, + test_pad_mode=('do_nothing', 100)) + build_from_cfg(transform, PIPELINES) + + results = dict( + img_prefix=osp.join(osp.dirname(__file__), '../../../data'), + img_info=dict(filename='color.jpg')) + + load = dict(type='LoadImageFromFile', to_float32=True) + load = build_from_cfg(load, PIPELINES) + results = load(results) + test_results = copy.deepcopy(results) + + def create_random_bboxes(num_bboxes, img_w, img_h): + bboxes_left_top = np.random.uniform(0, 0.5, size=(num_bboxes, 2)) + bboxes_right_bottom = np.random.uniform(0.5, 1, size=(num_bboxes, 2)) + bboxes = np.concatenate((bboxes_left_top, bboxes_right_bottom), 1) + bboxes = (bboxes * np.array([img_w, img_h, img_w, img_h])).astype( + np.int) + return bboxes + + h, w, _ = results['img_shape'] + gt_bboxes = create_random_bboxes(8, w, h) + gt_bboxes_ignore = create_random_bboxes(2, w, h) + results['gt_bboxes'] = gt_bboxes + results['gt_bboxes_ignore'] = gt_bboxes_ignore + train_transform = dict( + type='RandomCenterCropPad', + crop_size=(h - 20, w - 20), + ratios=(1.0, ), + border=128, + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True, + test_mode=False, + test_pad_mode=None) + crop_module = build_from_cfg(train_transform, PIPELINES) + train_results = crop_module(results) + assert train_results['img'].shape[:2] == (h - 20, w - 20) + # All bboxes should be reserved after crop + assert train_results['pad_shape'][:2] == (h - 20, w - 20) + assert train_results['gt_bboxes'].shape[0] == 8 + assert train_results['gt_bboxes_ignore'].shape[0] == 2 + + test_transform = dict( + type='RandomCenterCropPad', + crop_size=None, + ratios=None, + border=None, + mean=[123.675, 116.28, 103.53], + std=[58.395, 57.12, 57.375], + to_rgb=True, + test_mode=True, + test_pad_mode=('logical_or', 127)) + crop_module = build_from_cfg(test_transform, PIPELINES) + + test_results = crop_module(test_results) + assert test_results['img'].shape[:2] == (h | 127, w | 127) + assert test_results['pad_shape'][:2] == (h | 127, w | 127) + assert 'border' in test_results + + +def test_multi_scale_flip_aug(): + # test assertion if give both scale_factor and img_scale + with pytest.raises(AssertionError): + transform = dict( + type='MultiScaleFlipAug', + scale_factor=1.0, + img_scale=[(1333, 800)], + transforms=[dict(type='Resize')]) + build_from_cfg(transform, PIPELINES) + + # test assertion if both scale_factor and img_scale are None + with pytest.raises(AssertionError): + transform = dict( + type='MultiScaleFlipAug', + scale_factor=None, + img_scale=None, + transforms=[dict(type='Resize')]) + build_from_cfg(transform, PIPELINES) + + # test assertion if img_scale is not tuple or list of tuple + with pytest.raises(AssertionError): + transform = dict( + type='MultiScaleFlipAug', + img_scale=[1333, 800], + transforms=[dict(type='Resize')]) + build_from_cfg(transform, PIPELINES) + + # test assertion if flip_direction is not str or list of str + with pytest.raises(AssertionError): + transform = dict( + type='MultiScaleFlipAug', + img_scale=[(1333, 800)], + flip_direction=1, + transforms=[dict(type='Resize')]) + build_from_cfg(transform, PIPELINES) + + scale_transform = dict( + type='MultiScaleFlipAug', + img_scale=[(1333, 800), (1333, 640)], + transforms=[dict(type='Resize', keep_ratio=True)]) + transform = build_from_cfg(scale_transform, PIPELINES) + + results = dict() + img = mmcv.imread( + osp.join(osp.dirname(__file__), '../../../data/color.jpg'), 'color') + results['img'] = img + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + # Set initial values for default meta_keys + results['pad_shape'] = img.shape + results['img_fields'] = ['img'] + + scale_results = transform(copy.deepcopy(results)) + assert len(scale_results['img']) == 2 + assert scale_results['img'][0].shape == (750, 1333, 3) + assert scale_results['img_shape'][0] == (750, 1333, 3) + assert scale_results['img'][1].shape == (640, 1138, 3) + assert scale_results['img_shape'][1] == (640, 1138, 3) + + scale_factor_transform = dict( + type='MultiScaleFlipAug', + scale_factor=[0.8, 1.0, 1.2], + transforms=[dict(type='Resize', keep_ratio=False)]) + transform = build_from_cfg(scale_factor_transform, PIPELINES) + scale_factor_results = transform(copy.deepcopy(results)) + assert len(scale_factor_results['img']) == 3 + assert scale_factor_results['img'][0].shape == (230, 409, 3) + assert scale_factor_results['img_shape'][0] == (230, 409, 3) + assert scale_factor_results['img'][1].shape == (288, 512, 3) + assert scale_factor_results['img_shape'][1] == (288, 512, 3) + assert scale_factor_results['img'][2].shape == (345, 614, 3) + assert scale_factor_results['img_shape'][2] == (345, 614, 3) + + # test pipeline of coco_detection + results = dict( + img_prefix=osp.join(osp.dirname(__file__), '../../../data'), + img_info=dict(filename='color.jpg')) + load_cfg, multi_scale_cfg = mmcv.Config.fromfile( + 'configs/_base_/datasets/coco_detection.py').test_pipeline + load = build_from_cfg(load_cfg, PIPELINES) + transform = build_from_cfg(multi_scale_cfg, PIPELINES) + results = transform(load(results)) + assert len(results['img']) == 1 + assert len(results['img_metas']) == 1 + assert isinstance(results['img'][0], torch.Tensor) + assert isinstance(results['img_metas'][0], mmcv.parallel.DataContainer) + assert results['img_metas'][0].data['ori_shape'] == (288, 512, 3) + assert results['img_metas'][0].data['img_shape'] == (750, 1333, 3) + assert results['img_metas'][0].data['pad_shape'] == (768, 1344, 3) + assert results['img_metas'][0].data['scale_factor'].tolist() == [ + 2.603515625, 2.6041667461395264, 2.603515625, 2.6041667461395264 + ] + + +def test_cutout(): + # test n_holes + with pytest.raises(AssertionError): + transform = dict(type='CutOut', n_holes=(5, 3), cutout_shape=(8, 8)) + build_from_cfg(transform, PIPELINES) + with pytest.raises(AssertionError): + transform = dict(type='CutOut', n_holes=(3, 4, 5), cutout_shape=(8, 8)) + build_from_cfg(transform, PIPELINES) + # test cutout_shape and cutout_ratio + with pytest.raises(AssertionError): + transform = dict(type='CutOut', n_holes=1, cutout_shape=8) + build_from_cfg(transform, PIPELINES) + with pytest.raises(AssertionError): + transform = dict(type='CutOut', n_holes=1, cutout_ratio=0.2) + build_from_cfg(transform, PIPELINES) + # either of cutout_shape and cutout_ratio should be given + with pytest.raises(AssertionError): + transform = dict(type='CutOut', n_holes=1) + build_from_cfg(transform, PIPELINES) + with pytest.raises(AssertionError): + transform = dict( + type='CutOut', + n_holes=1, + cutout_shape=(2, 2), + cutout_ratio=(0.4, 0.4)) + build_from_cfg(transform, PIPELINES) + + results = dict() + img = mmcv.imread( + osp.join(osp.dirname(__file__), '../../../data/color.jpg'), 'color') + + results['img'] = img + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + results['pad_shape'] = img.shape + results['img_fields'] = ['img'] + + transform = dict(type='CutOut', n_holes=1, cutout_shape=(10, 10)) + cutout_module = build_from_cfg(transform, PIPELINES) + cutout_result = cutout_module(copy.deepcopy(results)) + assert cutout_result['img'].sum() < img.sum() + + transform = dict(type='CutOut', n_holes=1, cutout_ratio=(0.8, 0.8)) + cutout_module = build_from_cfg(transform, PIPELINES) + cutout_result = cutout_module(copy.deepcopy(results)) + assert cutout_result['img'].sum() < img.sum() + + transform = dict( + type='CutOut', + n_holes=(2, 4), + cutout_shape=[(10, 10), (15, 15)], + fill_in=(255, 255, 255)) + cutout_module = build_from_cfg(transform, PIPELINES) + cutout_result = cutout_module(copy.deepcopy(results)) + assert cutout_result['img'].sum() > img.sum() + + transform = dict( + type='CutOut', + n_holes=1, + cutout_ratio=(0.8, 0.8), + fill_in=(255, 255, 255)) + cutout_module = build_from_cfg(transform, PIPELINES) + cutout_result = cutout_module(copy.deepcopy(results)) + assert cutout_result['img'].sum() > img.sum() + + +def test_random_shift(): + # test assertion for invalid shift_ratio + with pytest.raises(AssertionError): + transform = dict(type='RandomShift', shift_ratio=1.5) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid max_shift_px + with pytest.raises(AssertionError): + transform = dict(type='RandomShift', max_shift_px=-1) + build_from_cfg(transform, PIPELINES) + + results = dict() + img = mmcv.imread( + osp.join(osp.dirname(__file__), '../../../data/color.jpg'), 'color') + results['img'] = img + # TODO: add img_fields test + results['bbox_fields'] = ['gt_bboxes', 'gt_bboxes_ignore'] + + def create_random_bboxes(num_bboxes, img_w, img_h): + bboxes_left_top = np.random.uniform(0, 0.5, size=(num_bboxes, 2)) + bboxes_right_bottom = np.random.uniform(0.5, 1, size=(num_bboxes, 2)) + bboxes = np.concatenate((bboxes_left_top, bboxes_right_bottom), 1) + bboxes = (bboxes * np.array([img_w, img_h, img_w, img_h])).astype( + np.int) + return bboxes + + h, w, _ = img.shape + gt_bboxes = create_random_bboxes(8, w, h) + gt_bboxes_ignore = create_random_bboxes(2, w, h) + results['gt_labels'] = torch.ones(gt_bboxes.shape[0]) + results['gt_bboxes'] = gt_bboxes + results['gt_bboxes_ignore'] = gt_bboxes_ignore + transform = dict(type='RandomShift', shift_ratio=1.0) + random_shift_module = build_from_cfg(transform, PIPELINES) + results = random_shift_module(results) + + assert results['img'].shape[:2] == (h, w) + assert results['gt_labels'].shape[0] == results['gt_bboxes'].shape[0] diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_transform/test_translate.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_transform/test_translate.py new file mode 100644 index 0000000000000000000000000000000000000000..87f37d0d8fc6aeda4200e8b94f7b23d1a6069444 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_pipelines/test_transform/test_translate.py @@ -0,0 +1,515 @@ +import copy + +import numpy as np +import pycocotools.mask as maskUtils +import pytest +from mmcv.utils import build_from_cfg + +from mmdet.core.mask import BitmapMasks, PolygonMasks +from mmdet.datasets.builder import PIPELINES + + +def _check_keys(results, results_translated): + assert len(set(results.keys()).difference(set( + results_translated.keys()))) == 0 + assert len(set(results_translated.keys()).difference(set( + results.keys()))) == 0 + + +def _pad(h, w, c, pad_val, axis=-1, dtype=np.float32): + assert isinstance(pad_val, (int, float, tuple)) + if isinstance(pad_val, (int, float)): + pad_val = tuple([pad_val] * c) + assert len(pad_val) == c + pad_data = np.stack([np.ones((h, w)) * pad_val[i] for i in range(c)], + axis=axis).astype(dtype) + return pad_data + + +def _construct_img(results): + h, w = results['img_info']['height'], results['img_info']['width'] + img = np.random.uniform(0, 1, (h, w, 3)) * 255 + img = img.astype(np.uint8) + results['img'] = img + results['img_shape'] = img.shape + results['ori_shape'] = img.shape + results['img_fields'] = ['img'] + + +def _construct_ann_info(h=427, w=640, c=3): + bboxes = np.array( + [[222.62, 217.82, 241.81, 238.93], [50.5, 329.7, 130.23, 384.96], + [175.47, 331.97, 254.8, 389.26]], + dtype=np.float32) + labels = np.array([9, 2, 2], dtype=np.int64) + bboxes_ignore = np.array([[59., 253., 311., 337.]], dtype=np.float32) + masks = [ + [[222.62, 217.82, 222.62, 238.93, 241.81, 238.93, 240.85, 218.78]], + [[ + 69.19, 332.17, 82.39, 330.25, 97.24, 329.7, 114.01, 331.35, 116.76, + 337.39, 119.78, 343.17, 128.03, 344.54, 128.86, 347.84, 124.18, + 350.59, 129.96, 358.01, 130.23, 366.54, 129.13, 377.81, 125.28, + 382.48, 119.78, 381.93, 117.31, 377.54, 116.21, 379.46, 114.83, + 382.21, 107.14, 383.31, 105.49, 378.36, 77.99, 377.54, 75.79, + 381.11, 69.74, 381.93, 66.72, 378.91, 65.07, 377.81, 63.15, 379.19, + 62.32, 383.31, 52.7, 384.96, 50.5, 379.46, 51.32, 375.61, 51.6, + 370.11, 51.6, 364.06, 53.52, 354.99, 56.27, 344.54, 59.57, 336.29, + 66.45, 332.72 + ]], + [[ + 175.47, 386.86, 175.87, 376.44, 177.08, 351.2, 189.1, 332.77, + 194.31, 331.97, 236.37, 332.77, 244.79, 342.39, 246.79, 346.79, + 248.39, 345.99, 251.6, 345.59, 254.8, 348.0, 254.8, 351.6, 250.0, + 352.0, 250.0, 354.81, 251.6, 358.41, 251.6, 364.42, 251.6, 370.03, + 252.8, 378.04, 252.8, 384.05, 250.8, 387.26, 246.39, 387.66, + 245.19, 386.46, 242.38, 388.86, 233.97, 389.26, 232.77, 388.06, + 232.77, 383.65, 195.91, 381.25, 195.91, 384.86, 191.1, 384.86, + 187.49, 385.26, 186.69, 382.85, 184.29, 382.45, 183.09, 387.26, + 178.68, 388.46, 176.28, 387.66 + ]] + ] + return dict( + bboxes=bboxes, labels=labels, bboxes_ignore=bboxes_ignore, masks=masks) + + +def _load_bboxes(results): + ann_info = results['ann_info'] + results['gt_bboxes'] = ann_info['bboxes'].copy() + results['bbox_fields'] = ['gt_bboxes'] + gt_bboxes_ignore = ann_info.get('bboxes_ignore', None) + if gt_bboxes_ignore is not None: + results['gt_bboxes_ignore'] = gt_bboxes_ignore.copy() + results['bbox_fields'].append('gt_bboxes_ignore') + + +def _load_labels(results): + results['gt_labels'] = results['ann_info']['labels'].copy() + + +def _poly2mask(mask_ann, img_h, img_w): + if isinstance(mask_ann, list): + # polygon -- a single object might consist of multiple parts + # we merge all parts into one mask rle code + rles = maskUtils.frPyObjects(mask_ann, img_h, img_w) + rle = maskUtils.merge(rles) + elif isinstance(mask_ann['counts'], list): + # uncompressed RLE + rle = maskUtils.frPyObjects(mask_ann, img_h, img_w) + else: + # rle + rle = mask_ann + mask = maskUtils.decode(rle) + return mask + + +def _process_polygons(polygons): + polygons = [np.array(p) for p in polygons] + valid_polygons = [] + for polygon in polygons: + if len(polygon) % 2 == 0 and len(polygon) >= 6: + valid_polygons.append(polygon) + return valid_polygons + + +def _load_masks(results, poly2mask=True): + h, w = results['img_info']['height'], results['img_info']['width'] + gt_masks = results['ann_info']['masks'] + if poly2mask: + gt_masks = BitmapMasks([_poly2mask(mask, h, w) for mask in gt_masks], + h, w) + else: + gt_masks = PolygonMasks( + [_process_polygons(polygons) for polygons in gt_masks], h, w) + results['gt_masks'] = gt_masks + results['mask_fields'] = ['gt_masks'] + + +def _construct_semantic_seg(results): + h, w = results['img_info']['height'], results['img_info']['width'] + seg_toy = (np.random.uniform(0, 1, (h, w)) * 255).astype(np.uint8) + results['gt_semantic_seg'] = seg_toy + results['seg_fields'] = ['gt_semantic_seg'] + + +def construct_toy_data(poly2mask=True): + img_info = dict(height=427, width=640) + ann_info = _construct_ann_info(h=img_info['height'], w=img_info['width']) + results = dict(img_info=img_info, ann_info=ann_info) + # construct image, similar to 'LoadImageFromFile' + _construct_img(results) + # 'LoadAnnotations' (bboxes, labels, masks, semantic_seg) + _load_bboxes(results) + _load_labels(results) + _load_masks(results, poly2mask) + _construct_semantic_seg(results) + return results + + +def test_translate(): + # test assertion for invalid value of level + with pytest.raises(AssertionError): + transform = dict(type='Translate', level=-1) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid type of level + with pytest.raises(AssertionError): + transform = dict(type='Translate', level=[1]) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid prob + with pytest.raises(AssertionError): + transform = dict(type='Translate', level=1, prob=-0.5) + build_from_cfg(transform, PIPELINES) + + # test assertion for the num of elements in tuple img_fill_val + with pytest.raises(AssertionError): + transform = dict( + type='Translate', level=1, img_fill_val=(128, 128, 128, 128)) + build_from_cfg(transform, PIPELINES) + + # test ValueError for invalid type of img_fill_val + with pytest.raises(ValueError): + transform = dict( + type='Translate', level=1, img_fill_val=[128, 128, 128]) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid value of img_fill_val + with pytest.raises(AssertionError): + transform = dict( + type='Translate', level=1, img_fill_val=(128, -1, 256)) + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid value of direction + with pytest.raises(AssertionError): + transform = dict( + type='Translate', level=1, img_fill_val=128, direction='diagonal') + build_from_cfg(transform, PIPELINES) + + # test assertion for invalid type of max_translate_offset + with pytest.raises(AssertionError): + transform = dict( + type='Translate', + level=1, + img_fill_val=128, + max_translate_offset=(250., )) + build_from_cfg(transform, PIPELINES) + + # construct toy data example for unit test + results = construct_toy_data() + + def _check_bbox_mask(results, + results_translated, + offset, + direction, + min_size=0.): + # The key correspondence from bboxes to labels and masks. + bbox2label = { + 'gt_bboxes': 'gt_labels', + 'gt_bboxes_ignore': 'gt_labels_ignore' + } + bbox2mask = { + 'gt_bboxes': 'gt_masks', + 'gt_bboxes_ignore': 'gt_masks_ignore' + } + + def _translate_bbox(bboxes, offset, direction, max_h, max_w): + if direction == 'horizontal': + bboxes[:, 0::2] = bboxes[:, 0::2] + offset + elif direction == 'vertical': + bboxes[:, 1::2] = bboxes[:, 1::2] + offset + else: + raise ValueError + bboxes[:, 0::2] = np.clip(bboxes[:, 0::2], 0, max_w) + bboxes[:, 1::2] = np.clip(bboxes[:, 1::2], 0, max_h) + return bboxes + + h, w, c = results_translated['img'].shape + for key in results_translated.get('bbox_fields', []): + label_key, mask_key = bbox2label[key], bbox2mask[key] + # check length of key + if label_key in results: + assert len(results_translated[key]) == len( + results_translated[label_key]) + if mask_key in results: + assert len(results_translated[key]) == len( + results_translated[mask_key]) + # construct gt_bboxes + gt_bboxes = _translate_bbox( + copy.deepcopy(results[key]), offset, direction, h, w) + valid_inds = (gt_bboxes[:, 2] - gt_bboxes[:, 0] > min_size) & ( + gt_bboxes[:, 3] - gt_bboxes[:, 1] > min_size) + gt_bboxes = gt_bboxes[valid_inds] + # check bbox + assert np.equal(gt_bboxes, results_translated[key]).all() + + # construct gt_masks + if mask_key not in results: + # e.g. 'gt_masks_ignore' + continue + masks, masks_translated = results[mask_key].to_ndarray( + ), results_translated[mask_key].to_ndarray() + assert masks.dtype == masks_translated.dtype + if direction == 'horizontal': + masks_pad = _pad( + h, + abs(offset), + masks.shape[0], + 0, + axis=0, + dtype=masks.dtype) + if offset <= 0: + # left shift + gt_masks = np.concatenate( + (masks[:, :, -offset:], masks_pad), axis=-1) + else: + # right shift + gt_masks = np.concatenate( + (masks_pad, masks[:, :, :-offset]), axis=-1) + else: + masks_pad = _pad( + abs(offset), + w, + masks.shape[0], + 0, + axis=0, + dtype=masks.dtype) + if offset <= 0: + # top shift + gt_masks = np.concatenate( + (masks[:, -offset:, :], masks_pad), axis=1) + else: + # bottom shift + gt_masks = np.concatenate( + (masks_pad, masks[:, :-offset, :]), axis=1) + gt_masks = gt_masks[valid_inds] + # check masks + assert np.equal(gt_masks, masks_translated).all() + + def _check_img_seg(results, results_translated, keys, offset, fill_val, + direction): + for key in keys: + assert isinstance(results_translated[key], type(results[key])) + # assert type(results[key]) == type(results_translated[key]) + data, data_translated = results[key], results_translated[key] + if 'mask' in key: + data, data_translated = data.to_ndarray( + ), data_translated.to_ndarray() + assert data.dtype == data_translated.dtype + if 'img' in key: + data, data_translated = data.transpose( + (2, 0, 1)), data_translated.transpose((2, 0, 1)) + elif 'seg' in key: + data, data_translated = data[None, :, :], data_translated[ + None, :, :] + c, h, w = data.shape + if direction == 'horizontal': + data_pad = _pad( + h, abs(offset), c, fill_val, axis=0, dtype=data.dtype) + if offset <= 0: + # left shift + data_gt = np.concatenate((data[:, :, -offset:], data_pad), + axis=-1) + else: + # right shift + data_gt = np.concatenate((data_pad, data[:, :, :-offset]), + axis=-1) + else: + data_pad = _pad( + abs(offset), w, c, fill_val, axis=0, dtype=data.dtype) + if offset <= 0: + # top shift + data_gt = np.concatenate((data[:, -offset:, :], data_pad), + axis=1) + else: + # bottom shift + data_gt = np.concatenate((data_pad, data[:, :-offset, :]), + axis=1) + if 'mask' in key: + # TODO assertion here. ``data_translated`` must be a subset + # (or equal) of ``data_gt`` + pass + else: + assert np.equal(data_gt, data_translated).all() + + def check_translate(results, + results_translated, + offset, + img_fill_val, + seg_ignore_label, + direction, + min_size=0): + # check keys + _check_keys(results, results_translated) + # check image + _check_img_seg(results, results_translated, + results.get('img_fields', ['img']), offset, + img_fill_val, direction) + # check segmentation map + _check_img_seg(results, results_translated, + results.get('seg_fields', []), offset, seg_ignore_label, + direction) + # check masks and bboxes + _check_bbox_mask(results, results_translated, offset, direction, + min_size) + + # test case when level=0 (without translate aug) + img_fill_val = (104, 116, 124) + seg_ignore_label = 255 + transform = dict( + type='Translate', + level=0, + prob=1.0, + img_fill_val=img_fill_val, + seg_ignore_label=seg_ignore_label) + translate_module = build_from_cfg(transform, PIPELINES) + results_wo_translate = translate_module(copy.deepcopy(results)) + check_translate( + copy.deepcopy(results), + results_wo_translate, + 0, + img_fill_val, + seg_ignore_label, + 'horizontal', + ) + + # test case when level>0 and translate horizontally (left shift). + transform = dict( + type='Translate', + level=8, + prob=1.0, + img_fill_val=img_fill_val, + random_negative_prob=1.0, + seg_ignore_label=seg_ignore_label) + translate_module = build_from_cfg(transform, PIPELINES) + offset = translate_module.offset + results_translated = translate_module(copy.deepcopy(results)) + check_translate( + copy.deepcopy(results), + results_translated, + -offset, + img_fill_val, + seg_ignore_label, + 'horizontal', + ) + + # test case when level>0 and translate horizontally (right shift). + translate_module.random_negative_prob = 0.0 + results_translated = translate_module(copy.deepcopy(results)) + check_translate( + copy.deepcopy(results), + results_translated, + offset, + img_fill_val, + seg_ignore_label, + 'horizontal', + ) + + # test case when level>0 and translate vertically (top shift). + transform = dict( + type='Translate', + level=10, + prob=1.0, + img_fill_val=img_fill_val, + seg_ignore_label=seg_ignore_label, + random_negative_prob=1.0, + direction='vertical') + translate_module = build_from_cfg(transform, PIPELINES) + offset = translate_module.offset + results_translated = translate_module(copy.deepcopy(results)) + check_translate( + copy.deepcopy(results), results_translated, -offset, img_fill_val, + seg_ignore_label, 'vertical') + + # test case when level>0 and translate vertically (bottom shift). + translate_module.random_negative_prob = 0.0 + results_translated = translate_module(copy.deepcopy(results)) + check_translate( + copy.deepcopy(results), results_translated, offset, img_fill_val, + seg_ignore_label, 'vertical') + + # test case when no translation is called (prob<=0) + transform = dict( + type='Translate', + level=8, + prob=0.0, + img_fill_val=img_fill_val, + random_negative_prob=0.0, + seg_ignore_label=seg_ignore_label) + translate_module = build_from_cfg(transform, PIPELINES) + results_translated = translate_module(copy.deepcopy(results)) + + # test translate vertically with PolygonMasks (top shift) + results = construct_toy_data(False) + transform = dict( + type='Translate', + level=10, + prob=1.0, + img_fill_val=img_fill_val, + seg_ignore_label=seg_ignore_label, + direction='vertical') + translate_module = build_from_cfg(transform, PIPELINES) + offset = translate_module.offset + translate_module.random_negative_prob = 1.0 + results_translated = translate_module(copy.deepcopy(results)) + + def _translated_gt(masks, direction, offset, out_shape): + translated_masks = [] + for poly_per_obj in masks: + translated_poly_per_obj = [] + for p in poly_per_obj: + p = p.copy() + if direction == 'horizontal': + p[0::2] = np.clip(p[0::2] + offset, 0, out_shape[1]) + elif direction == 'vertical': + p[1::2] = np.clip(p[1::2] + offset, 0, out_shape[0]) + if PolygonMasks([[p]], *out_shape).areas[0] > 0: + # filter invalid (area=0) + translated_poly_per_obj.append(p) + if len(translated_poly_per_obj): + translated_masks.append(translated_poly_per_obj) + translated_masks = PolygonMasks(translated_masks, *out_shape) + return translated_masks + + h, w = results['img_shape'][:2] + for key in results.get('mask_fields', []): + masks = results[key] + translated_gt = _translated_gt(masks, 'vertical', -offset, (h, w)) + assert np.equal(results_translated[key].to_ndarray(), + translated_gt.to_ndarray()).all() + + # test translate horizontally with PolygonMasks (right shift) + results = construct_toy_data(False) + transform = dict( + type='Translate', + level=8, + prob=1.0, + img_fill_val=img_fill_val, + random_negative_prob=0.0, + seg_ignore_label=seg_ignore_label) + translate_module = build_from_cfg(transform, PIPELINES) + offset = translate_module.offset + results_translated = translate_module(copy.deepcopy(results)) + h, w = results['img_shape'][:2] + for key in results.get('mask_fields', []): + masks = results[key] + translated_gt = _translated_gt(masks, 'horizontal', offset, (h, w)) + assert np.equal(results_translated[key].to_ndarray(), + translated_gt.to_ndarray()).all() + + # test AutoAugment equipped with Translate + policies = [[dict(type='Translate', level=10, prob=1.)]] + autoaug = dict(type='AutoAugment', policies=policies) + autoaug_module = build_from_cfg(autoaug, PIPELINES) + autoaug_module(copy.deepcopy(results)) + + policies = [[ + dict(type='Translate', level=10, prob=1.), + dict( + type='Translate', + level=8, + img_fill_val=img_fill_val, + direction='vertical') + ]] + autoaug = dict(type='AutoAugment', policies=policies) + autoaug_module = build_from_cfg(autoaug, PIPELINES) + autoaug_module(copy.deepcopy(results)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_utils.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..cd612f2f63e788b9ad8f22e96da63f53d46a2d4a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_data/test_utils.py @@ -0,0 +1,79 @@ +import pytest + +from mmdet.datasets import get_loading_pipeline, replace_ImageToTensor + + +def test_replace_ImageToTensor(): + # with MultiScaleFlipAug + pipelines = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize'), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ]) + ] + expected_pipelines = [ + dict(type='LoadImageFromFile'), + dict( + type='MultiScaleFlipAug', + img_scale=(1333, 800), + flip=False, + transforms=[ + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize'), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img']), + ]) + ] + with pytest.warns(UserWarning): + assert expected_pipelines == replace_ImageToTensor(pipelines) + + # without MultiScaleFlipAug + pipelines = [ + dict(type='LoadImageFromFile'), + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize'), + dict(type='Pad', size_divisor=32), + dict(type='ImageToTensor', keys=['img']), + dict(type='Collect', keys=['img']), + ] + expected_pipelines = [ + dict(type='LoadImageFromFile'), + dict(type='Resize', keep_ratio=True), + dict(type='RandomFlip'), + dict(type='Normalize'), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img']), + ] + with pytest.warns(UserWarning): + assert expected_pipelines == replace_ImageToTensor(pipelines) + + +def test_get_loading_pipeline(): + pipelines = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True), + dict(type='Resize', img_scale=(1333, 800), keep_ratio=True), + dict(type='RandomFlip', flip_ratio=0.5), + dict(type='Pad', size_divisor=32), + dict(type='DefaultFormatBundle'), + dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']) + ] + expected_pipelines = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_bbox=True) + ] + assert expected_pipelines == \ + get_loading_pipeline(pipelines) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_metrics/test_box_overlap.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_metrics/test_box_overlap.py new file mode 100644 index 0000000000000000000000000000000000000000..94c6400f3360d9cd8df172f74a37f729324ec163 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_metrics/test_box_overlap.py @@ -0,0 +1,105 @@ +import numpy as np +import pytest +import torch + +from mmdet.core import BboxOverlaps2D, bbox_overlaps + + +def test_bbox_overlaps_2d(eps=1e-7): + + def _construct_bbox(num_bbox=None): + img_h = int(np.random.randint(3, 1000)) + img_w = int(np.random.randint(3, 1000)) + if num_bbox is None: + num_bbox = np.random.randint(1, 10) + x1y1 = torch.rand((num_bbox, 2)) + x2y2 = torch.max(torch.rand((num_bbox, 2)), x1y1) + bboxes = torch.cat((x1y1, x2y2), -1) + bboxes[:, 0::2] *= img_w + bboxes[:, 1::2] *= img_h + return bboxes, num_bbox + + # is_aligned is True, bboxes.size(-1) == 5 (include score) + self = BboxOverlaps2D() + bboxes1, num_bbox = _construct_bbox() + bboxes2, _ = _construct_bbox(num_bbox) + bboxes1 = torch.cat((bboxes1, torch.rand((num_bbox, 1))), 1) + bboxes2 = torch.cat((bboxes2, torch.rand((num_bbox, 1))), 1) + gious = self(bboxes1, bboxes2, 'giou', True) + assert gious.size() == (num_bbox, ), gious.size() + assert torch.all(gious >= -1) and torch.all(gious <= 1) + + # is_aligned is True, bboxes1.size(-2) == 0 + bboxes1 = torch.empty((0, 4)) + bboxes2 = torch.empty((0, 4)) + gious = self(bboxes1, bboxes2, 'giou', True) + assert gious.size() == (0, ), gious.size() + assert torch.all(gious == torch.empty((0, ))) + assert torch.all(gious >= -1) and torch.all(gious <= 1) + + # is_aligned is True, and bboxes.ndims > 2 + bboxes1, num_bbox = _construct_bbox() + bboxes2, _ = _construct_bbox(num_bbox) + bboxes1 = bboxes1.unsqueeze(0).repeat(2, 1, 1) + # test assertion when batch dim is not the same + with pytest.raises(AssertionError): + self(bboxes1, bboxes2.unsqueeze(0).repeat(3, 1, 1), 'giou', True) + bboxes2 = bboxes2.unsqueeze(0).repeat(2, 1, 1) + gious = self(bboxes1, bboxes2, 'giou', True) + assert torch.all(gious >= -1) and torch.all(gious <= 1) + assert gious.size() == (2, num_bbox) + bboxes1 = bboxes1.unsqueeze(0).repeat(2, 1, 1, 1) + bboxes2 = bboxes2.unsqueeze(0).repeat(2, 1, 1, 1) + gious = self(bboxes1, bboxes2, 'giou', True) + assert torch.all(gious >= -1) and torch.all(gious <= 1) + assert gious.size() == (2, 2, num_bbox) + + # is_aligned is False + bboxes1, num_bbox1 = _construct_bbox() + bboxes2, num_bbox2 = _construct_bbox() + gious = self(bboxes1, bboxes2, 'giou') + assert torch.all(gious >= -1) and torch.all(gious <= 1) + assert gious.size() == (num_bbox1, num_bbox2) + + # is_aligned is False, and bboxes.ndims > 2 + bboxes1 = bboxes1.unsqueeze(0).repeat(2, 1, 1) + bboxes2 = bboxes2.unsqueeze(0).repeat(2, 1, 1) + gious = self(bboxes1, bboxes2, 'giou') + assert torch.all(gious >= -1) and torch.all(gious <= 1) + assert gious.size() == (2, num_bbox1, num_bbox2) + bboxes1 = bboxes1.unsqueeze(0) + bboxes2 = bboxes2.unsqueeze(0) + gious = self(bboxes1, bboxes2, 'giou') + assert torch.all(gious >= -1) and torch.all(gious <= 1) + assert gious.size() == (1, 2, num_bbox1, num_bbox2) + + # is_aligned is False, bboxes1.size(-2) == 0 + gious = self(torch.empty(1, 2, 0, 4), bboxes2, 'giou') + assert torch.all(gious == torch.empty(1, 2, 0, bboxes2.size(-2))) + assert torch.all(gious >= -1) and torch.all(gious <= 1) + + # test allclose between bbox_overlaps and the original official + # implementation. + bboxes1 = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [32, 32, 38, 42], + ]) + bboxes2 = torch.FloatTensor([ + [0, 0, 10, 20], + [0, 10, 10, 19], + [10, 10, 20, 20], + ]) + gious = bbox_overlaps(bboxes1, bboxes2, 'giou', is_aligned=True, eps=eps) + gious = gious.numpy().round(4) + # the gt is got with four decimal precision. + expected_gious = np.array([0.5000, -0.0500, -0.8214]) + assert np.allclose(gious, expected_gious, rtol=0, atol=eps) + + # test mode 'iof' + ious = bbox_overlaps(bboxes1, bboxes2, 'iof', is_aligned=True, eps=eps) + assert torch.all(ious >= -1) and torch.all(ious <= 1) + assert ious.size() == (bboxes1.size(0), ) + ious = bbox_overlaps(bboxes1, bboxes2, 'iof', eps=eps) + assert torch.all(ious >= -1) and torch.all(ious <= 1) + assert ious.size() == (bboxes1.size(0), bboxes2.size(0)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_metrics/test_losses.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_metrics/test_losses.py new file mode 100644 index 0000000000000000000000000000000000000000..dac85e1dd3d2f0788e04b051cf2fe18f1944e615 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_metrics/test_losses.py @@ -0,0 +1,240 @@ +import pytest +import torch + +from mmdet.models import Accuracy, build_loss + + +def test_ce_loss(): + # use_mask and use_sigmoid cannot be true at the same time + with pytest.raises(AssertionError): + loss_cfg = dict( + type='CrossEntropyLoss', + use_mask=True, + use_sigmoid=True, + loss_weight=1.0) + build_loss(loss_cfg) + + # test loss with class weights + loss_cls_cfg = dict( + type='CrossEntropyLoss', + use_sigmoid=False, + class_weight=[0.8, 0.2], + loss_weight=1.0) + loss_cls = build_loss(loss_cls_cfg) + fake_pred = torch.Tensor([[100, -100]]) + fake_label = torch.Tensor([1]).long() + assert torch.allclose(loss_cls(fake_pred, fake_label), torch.tensor(40.)) + + loss_cls_cfg = dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0) + loss_cls = build_loss(loss_cls_cfg) + assert torch.allclose(loss_cls(fake_pred, fake_label), torch.tensor(200.)) + + +def test_varifocal_loss(): + # only sigmoid version of VarifocalLoss is implemented + with pytest.raises(AssertionError): + loss_cfg = dict( + type='VarifocalLoss', use_sigmoid=False, loss_weight=1.0) + build_loss(loss_cfg) + + # test that alpha should be greater than 0 + with pytest.raises(AssertionError): + loss_cfg = dict( + type='VarifocalLoss', + alpha=-0.75, + gamma=2.0, + use_sigmoid=True, + loss_weight=1.0) + build_loss(loss_cfg) + + # test that pred and target should be of the same size + loss_cls_cfg = dict( + type='VarifocalLoss', + use_sigmoid=True, + alpha=0.75, + gamma=2.0, + iou_weighted=True, + reduction='mean', + loss_weight=1.0) + loss_cls = build_loss(loss_cls_cfg) + with pytest.raises(AssertionError): + fake_pred = torch.Tensor([[100.0, -100.0]]) + fake_target = torch.Tensor([[1.0]]) + loss_cls(fake_pred, fake_target) + + # test the calculation + loss_cls = build_loss(loss_cls_cfg) + fake_pred = torch.Tensor([[100.0, -100.0]]) + fake_target = torch.Tensor([[1.0, 0.0]]) + assert torch.allclose(loss_cls(fake_pred, fake_target), torch.tensor(0.0)) + + # test the loss with weights + loss_cls = build_loss(loss_cls_cfg) + fake_pred = torch.Tensor([[0.0, 100.0]]) + fake_target = torch.Tensor([[1.0, 1.0]]) + fake_weight = torch.Tensor([0.0, 1.0]) + assert torch.allclose( + loss_cls(fake_pred, fake_target, fake_weight), torch.tensor(0.0)) + + +def test_kd_loss(): + # test that temeprature should be greater than 1 + with pytest.raises(AssertionError): + loss_cfg = dict( + type='KnowledgeDistillationKLDivLoss', loss_weight=1.0, T=0.5) + build_loss(loss_cfg) + + # test that pred and target should be of the same size + loss_cls_cfg = dict( + type='KnowledgeDistillationKLDivLoss', loss_weight=1.0, T=1) + loss_cls = build_loss(loss_cls_cfg) + with pytest.raises(AssertionError): + fake_pred = torch.Tensor([[100, -100]]) + fake_label = torch.Tensor([1]).long() + loss_cls(fake_pred, fake_label) + + # test the calculation + loss_cls = build_loss(loss_cls_cfg) + fake_pred = torch.Tensor([[100.0, 100.0]]) + fake_target = torch.Tensor([[1.0, 1.0]]) + assert torch.allclose(loss_cls(fake_pred, fake_target), torch.tensor(0.0)) + + # test the loss with weights + loss_cls = build_loss(loss_cls_cfg) + fake_pred = torch.Tensor([[100.0, -100.0], [100.0, 100.0]]) + fake_target = torch.Tensor([[1.0, 0.0], [1.0, 1.0]]) + fake_weight = torch.Tensor([0.0, 1.0]) + assert torch.allclose( + loss_cls(fake_pred, fake_target, fake_weight), torch.tensor(0.0)) + + +def test_seesaw_loss(): + # only softmax version of Seesaw Loss is implemented + with pytest.raises(AssertionError): + loss_cfg = dict(type='SeesawLoss', use_sigmoid=True, loss_weight=1.0) + build_loss(loss_cfg) + + # test that cls_score.size(-1) == num_classes + 2 + loss_cls_cfg = dict( + type='SeesawLoss', p=0.0, q=0.0, loss_weight=1.0, num_classes=2) + loss_cls = build_loss(loss_cls_cfg) + # the length of fake_pred should be num_classes + 2 = 4 + with pytest.raises(AssertionError): + fake_pred = torch.Tensor([[-100, 100]]) + fake_label = torch.Tensor([1]).long() + loss_cls(fake_pred, fake_label) + # the length of fake_pred should be num_classes + 2 = 4 + with pytest.raises(AssertionError): + fake_pred = torch.Tensor([[-100, 100, -100]]) + fake_label = torch.Tensor([1]).long() + loss_cls(fake_pred, fake_label) + + # test the calculation without p and q + loss_cls_cfg = dict( + type='SeesawLoss', p=0.0, q=0.0, loss_weight=1.0, num_classes=2) + loss_cls = build_loss(loss_cls_cfg) + fake_pred = torch.Tensor([[-100, 100, -100, 100]]) + fake_label = torch.Tensor([1]).long() + loss = loss_cls(fake_pred, fake_label) + assert torch.allclose(loss['loss_cls_objectness'], torch.tensor(200.)) + assert torch.allclose(loss['loss_cls_classes'], torch.tensor(0.)) + + # test the calculation with p and without q + loss_cls_cfg = dict( + type='SeesawLoss', p=1.0, q=0.0, loss_weight=1.0, num_classes=2) + loss_cls = build_loss(loss_cls_cfg) + fake_pred = torch.Tensor([[-100, 100, -100, 100]]) + fake_label = torch.Tensor([0]).long() + loss_cls.cum_samples[0] = torch.exp(torch.Tensor([20])) + loss = loss_cls(fake_pred, fake_label) + assert torch.allclose(loss['loss_cls_objectness'], torch.tensor(200.)) + assert torch.allclose(loss['loss_cls_classes'], torch.tensor(180.)) + + # test the calculation with q and without p + loss_cls_cfg = dict( + type='SeesawLoss', p=0.0, q=1.0, loss_weight=1.0, num_classes=2) + loss_cls = build_loss(loss_cls_cfg) + fake_pred = torch.Tensor([[-100, 100, -100, 100]]) + fake_label = torch.Tensor([0]).long() + loss = loss_cls(fake_pred, fake_label) + assert torch.allclose(loss['loss_cls_objectness'], torch.tensor(200.)) + assert torch.allclose(loss['loss_cls_classes'], + torch.tensor(200.) + torch.tensor(100.).log()) + + # test the others + loss_cls_cfg = dict( + type='SeesawLoss', + p=0.0, + q=1.0, + loss_weight=1.0, + num_classes=2, + return_dict=False) + loss_cls = build_loss(loss_cls_cfg) + fake_pred = torch.Tensor([[100, -100, 100, -100]]) + fake_label = torch.Tensor([0]).long() + loss = loss_cls(fake_pred, fake_label) + acc = loss_cls.get_accuracy(fake_pred, fake_label) + act = loss_cls.get_activation(fake_pred) + assert torch.allclose(loss, torch.tensor(0.)) + assert torch.allclose(acc['acc_objectness'], torch.tensor(100.)) + assert torch.allclose(acc['acc_classes'], torch.tensor(100.)) + assert torch.allclose(act, torch.tensor([1., 0., 0.])) + + +def test_accuracy(): + # test for empty pred + pred = torch.empty(0, 4) + label = torch.empty(0) + accuracy = Accuracy(topk=1) + acc = accuracy(pred, label) + assert acc.item() == 0 + + pred = torch.Tensor([[0.2, 0.3, 0.6, 0.5], [0.1, 0.1, 0.2, 0.6], + [0.9, 0.0, 0.0, 0.1], [0.4, 0.7, 0.1, 0.1], + [0.0, 0.0, 0.99, 0]]) + # test for top1 + true_label = torch.Tensor([2, 3, 0, 1, 2]).long() + accuracy = Accuracy(topk=1) + acc = accuracy(pred, true_label) + assert acc.item() == 100 + + # test for top1 with score thresh=0.8 + true_label = torch.Tensor([2, 3, 0, 1, 2]).long() + accuracy = Accuracy(topk=1, thresh=0.8) + acc = accuracy(pred, true_label) + assert acc.item() == 40 + + # test for top2 + accuracy = Accuracy(topk=2) + label = torch.Tensor([3, 2, 0, 0, 2]).long() + acc = accuracy(pred, label) + assert acc.item() == 100 + + # test for both top1 and top2 + accuracy = Accuracy(topk=(1, 2)) + true_label = torch.Tensor([2, 3, 0, 1, 2]).long() + acc = accuracy(pred, true_label) + for a in acc: + assert a.item() == 100 + + # topk is larger than pred class number + with pytest.raises(AssertionError): + accuracy = Accuracy(topk=5) + accuracy(pred, true_label) + + # wrong topk type + with pytest.raises(AssertionError): + accuracy = Accuracy(topk='wrong type') + accuracy(pred, true_label) + + # label size is larger than required + with pytest.raises(AssertionError): + label = torch.Tensor([2, 3, 0, 1, 2, 0]).long() # size mismatch + accuracy = Accuracy() + accuracy(pred, label) + + # wrong pred dimension + with pytest.raises(AssertionError): + accuracy = Accuracy() + accuracy(pred[:, :, None], true_label) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ce4596a6a53dbd308963152612dafda2f84e185c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/__init__.py @@ -0,0 +1,3 @@ +from .utils import check_norm_state, is_block, is_norm + +__all__ = ['is_block', 'is_norm', 'check_norm_state'] diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_detectors_resnet.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_detectors_resnet.py new file mode 100644 index 0000000000000000000000000000000000000000..fbd817d6acf1a821c097c7cb3a2faf8af687f512 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_detectors_resnet.py @@ -0,0 +1,46 @@ +import pytest + +from mmdet.models.backbones import DetectoRS_ResNet + + +def test_detectorrs_resnet_backbone(): + detectorrs_cfg = dict( + depth=50, + num_stages=4, + out_indices=(0, 1, 2, 3), + frozen_stages=1, + norm_cfg=dict(type='BN', requires_grad=True), + norm_eval=True, + style='pytorch', + conv_cfg=dict(type='ConvAWS'), + sac=dict(type='SAC', use_deform=True), + stage_with_sac=(False, True, True, True), + output_img=True) + """Test init_weights config""" + with pytest.raises(AssertionError): + # pretrained and init_cfg cannot be setting at the same time + DetectoRS_ResNet( + **detectorrs_cfg, pretrained='Pretrained', init_cfg='Pretrained') + + with pytest.raises(AssertionError): + # init_cfg must be a dict + DetectoRS_ResNet( + **detectorrs_cfg, pretrained=None, init_cfg=['Pretrained']) + + with pytest.raises(KeyError): + # init_cfg must contain the key `type` + DetectoRS_ResNet( + **detectorrs_cfg, + pretrained=None, + init_cfg=dict(checkpoint='Pretrained')) + + with pytest.raises(AssertionError): + # init_cfg only support initialize pretrained model way + DetectoRS_ResNet( + **detectorrs_cfg, pretrained=None, init_cfg=dict(type='Trained')) + + with pytest.raises(TypeError): + # pretrained mast be a str or None + model = DetectoRS_ResNet( + **detectorrs_cfg, pretrained=['Pretrained'], init_cfg=None) + model.init_weights() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_hourglass.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_hourglass.py new file mode 100644 index 0000000000000000000000000000000000000000..363c94d50e115880ec9ca81f7b421c2c3d99f741 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_hourglass.py @@ -0,0 +1,44 @@ +import pytest +import torch + +from mmdet.models.backbones.hourglass import HourglassNet + + +def test_hourglass_backbone(): + with pytest.raises(AssertionError): + # HourglassNet's num_stacks should larger than 0 + HourglassNet(num_stacks=0) + + with pytest.raises(AssertionError): + # len(stage_channels) should equal len(stage_blocks) + HourglassNet( + stage_channels=[256, 256, 384, 384, 384], + stage_blocks=[2, 2, 2, 2, 2, 4]) + + with pytest.raises(AssertionError): + # len(stage_channels) should lagrer than downsample_times + HourglassNet( + downsample_times=5, + stage_channels=[256, 256, 384, 384, 384], + stage_blocks=[2, 2, 2, 2, 2]) + + # Test HourglassNet-52 + model = HourglassNet(num_stacks=1) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 256, 256) + feat = model(imgs) + assert len(feat) == 1 + assert feat[0].shape == torch.Size([1, 256, 64, 64]) + + # Test HourglassNet-104 + model = HourglassNet(num_stacks=2) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 256, 256) + feat = model(imgs) + assert len(feat) == 2 + assert feat[0].shape == torch.Size([1, 256, 64, 64]) + assert feat[1].shape == torch.Size([1, 256, 64, 64]) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_mobilenet_v2.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_mobilenet_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..1f879be8822f6dc4026ca618d9d88dedf07e0406 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_mobilenet_v2.py @@ -0,0 +1,182 @@ +import pytest +import torch +from torch.nn.modules import GroupNorm +from torch.nn.modules.batchnorm import _BatchNorm + +from mmdet.models.backbones.mobilenet_v2 import MobileNetV2 +from .utils import check_norm_state, is_block, is_norm + + +def test_mobilenetv2_backbone(): + with pytest.raises(ValueError): + # frozen_stages must in range(-1, 8) + MobileNetV2(frozen_stages=8) + + with pytest.raises(ValueError): + # out_indices in range(-1, 8) + MobileNetV2(out_indices=[8]) + + # Test MobileNetV2 with first stage frozen + frozen_stages = 1 + model = MobileNetV2(frozen_stages=frozen_stages) + model.init_weights() + model.train() + + for mod in model.conv1.modules(): + for param in mod.parameters(): + assert param.requires_grad is False + for i in range(1, frozen_stages + 1): + layer = getattr(model, f'layer{i}') + for mod in layer.modules(): + if isinstance(mod, _BatchNorm): + assert mod.training is False + for param in layer.parameters(): + assert param.requires_grad is False + + # Test MobileNetV2 with norm_eval=True + model = MobileNetV2(norm_eval=True) + model.init_weights() + model.train() + + assert check_norm_state(model.modules(), False) + + # Test MobileNetV2 forward with widen_factor=1.0 + model = MobileNetV2(widen_factor=1.0, out_indices=range(0, 8)) + model.init_weights() + model.train() + + assert check_norm_state(model.modules(), True) + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 8 + assert feat[0].shape == torch.Size((1, 16, 112, 112)) + assert feat[1].shape == torch.Size((1, 24, 56, 56)) + assert feat[2].shape == torch.Size((1, 32, 28, 28)) + assert feat[3].shape == torch.Size((1, 64, 14, 14)) + assert feat[4].shape == torch.Size((1, 96, 14, 14)) + assert feat[5].shape == torch.Size((1, 160, 7, 7)) + assert feat[6].shape == torch.Size((1, 320, 7, 7)) + assert feat[7].shape == torch.Size((1, 1280, 7, 7)) + + # Test MobileNetV2 forward with widen_factor=0.5 + model = MobileNetV2(widen_factor=0.5, out_indices=range(0, 7)) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 7 + assert feat[0].shape == torch.Size((1, 8, 112, 112)) + assert feat[1].shape == torch.Size((1, 16, 56, 56)) + assert feat[2].shape == torch.Size((1, 16, 28, 28)) + assert feat[3].shape == torch.Size((1, 32, 14, 14)) + assert feat[4].shape == torch.Size((1, 48, 14, 14)) + assert feat[5].shape == torch.Size((1, 80, 7, 7)) + assert feat[6].shape == torch.Size((1, 160, 7, 7)) + + # Test MobileNetV2 forward with widen_factor=2.0 + model = MobileNetV2(widen_factor=2.0, out_indices=range(0, 8)) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert feat[0].shape == torch.Size((1, 32, 112, 112)) + assert feat[1].shape == torch.Size((1, 48, 56, 56)) + assert feat[2].shape == torch.Size((1, 64, 28, 28)) + assert feat[3].shape == torch.Size((1, 128, 14, 14)) + assert feat[4].shape == torch.Size((1, 192, 14, 14)) + assert feat[5].shape == torch.Size((1, 320, 7, 7)) + assert feat[6].shape == torch.Size((1, 640, 7, 7)) + assert feat[7].shape == torch.Size((1, 2560, 7, 7)) + + # Test MobileNetV2 forward with dict(type='ReLU') + model = MobileNetV2( + widen_factor=1.0, act_cfg=dict(type='ReLU'), out_indices=range(0, 7)) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 7 + assert feat[0].shape == torch.Size((1, 16, 112, 112)) + assert feat[1].shape == torch.Size((1, 24, 56, 56)) + assert feat[2].shape == torch.Size((1, 32, 28, 28)) + assert feat[3].shape == torch.Size((1, 64, 14, 14)) + assert feat[4].shape == torch.Size((1, 96, 14, 14)) + assert feat[5].shape == torch.Size((1, 160, 7, 7)) + assert feat[6].shape == torch.Size((1, 320, 7, 7)) + + # Test MobileNetV2 with BatchNorm forward + model = MobileNetV2(widen_factor=1.0, out_indices=range(0, 7)) + for m in model.modules(): + if is_norm(m): + assert isinstance(m, _BatchNorm) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 7 + assert feat[0].shape == torch.Size((1, 16, 112, 112)) + assert feat[1].shape == torch.Size((1, 24, 56, 56)) + assert feat[2].shape == torch.Size((1, 32, 28, 28)) + assert feat[3].shape == torch.Size((1, 64, 14, 14)) + assert feat[4].shape == torch.Size((1, 96, 14, 14)) + assert feat[5].shape == torch.Size((1, 160, 7, 7)) + assert feat[6].shape == torch.Size((1, 320, 7, 7)) + + # Test MobileNetV2 with GroupNorm forward + model = MobileNetV2( + widen_factor=1.0, + norm_cfg=dict(type='GN', num_groups=2, requires_grad=True), + out_indices=range(0, 7)) + for m in model.modules(): + if is_norm(m): + assert isinstance(m, GroupNorm) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 7 + assert feat[0].shape == torch.Size((1, 16, 112, 112)) + assert feat[1].shape == torch.Size((1, 24, 56, 56)) + assert feat[2].shape == torch.Size((1, 32, 28, 28)) + assert feat[3].shape == torch.Size((1, 64, 14, 14)) + assert feat[4].shape == torch.Size((1, 96, 14, 14)) + assert feat[5].shape == torch.Size((1, 160, 7, 7)) + assert feat[6].shape == torch.Size((1, 320, 7, 7)) + + # Test MobileNetV2 with layers 1, 3, 5 out forward + model = MobileNetV2(widen_factor=1.0, out_indices=(0, 2, 4)) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 3 + assert feat[0].shape == torch.Size((1, 16, 112, 112)) + assert feat[1].shape == torch.Size((1, 32, 28, 28)) + assert feat[2].shape == torch.Size((1, 96, 14, 14)) + + # Test MobileNetV2 with checkpoint forward + model = MobileNetV2( + widen_factor=1.0, with_cp=True, out_indices=range(0, 7)) + for m in model.modules(): + if is_block(m): + assert m.with_cp + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 7 + assert feat[0].shape == torch.Size((1, 16, 112, 112)) + assert feat[1].shape == torch.Size((1, 24, 56, 56)) + assert feat[2].shape == torch.Size((1, 32, 28, 28)) + assert feat[3].shape == torch.Size((1, 64, 14, 14)) + assert feat[4].shape == torch.Size((1, 96, 14, 14)) + assert feat[5].shape == torch.Size((1, 160, 7, 7)) + assert feat[6].shape == torch.Size((1, 320, 7, 7)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_regnet.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_regnet.py new file mode 100644 index 0000000000000000000000000000000000000000..81d4abcea63724842d82204ab8108370a0ff6396 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_regnet.py @@ -0,0 +1,58 @@ +import pytest +import torch + +from mmdet.models.backbones import RegNet + +regnet_test_data = [ + ('regnetx_400mf', + dict(w0=24, wa=24.48, wm=2.54, group_w=16, depth=22, + bot_mul=1.0), [32, 64, 160, 384]), + ('regnetx_800mf', + dict(w0=56, wa=35.73, wm=2.28, group_w=16, depth=16, + bot_mul=1.0), [64, 128, 288, 672]), + ('regnetx_1.6gf', + dict(w0=80, wa=34.01, wm=2.25, group_w=24, depth=18, + bot_mul=1.0), [72, 168, 408, 912]), + ('regnetx_3.2gf', + dict(w0=88, wa=26.31, wm=2.25, group_w=48, depth=25, + bot_mul=1.0), [96, 192, 432, 1008]), + ('regnetx_4.0gf', + dict(w0=96, wa=38.65, wm=2.43, group_w=40, depth=23, + bot_mul=1.0), [80, 240, 560, 1360]), + ('regnetx_6.4gf', + dict(w0=184, wa=60.83, wm=2.07, group_w=56, depth=17, + bot_mul=1.0), [168, 392, 784, 1624]), + ('regnetx_8.0gf', + dict(w0=80, wa=49.56, wm=2.88, group_w=120, depth=23, + bot_mul=1.0), [80, 240, 720, 1920]), + ('regnetx_12gf', + dict(w0=168, wa=73.36, wm=2.37, group_w=112, depth=19, + bot_mul=1.0), [224, 448, 896, 2240]), +] + + +@pytest.mark.parametrize('arch_name,arch,out_channels', regnet_test_data) +def test_regnet_backbone(arch_name, arch, out_channels): + with pytest.raises(AssertionError): + # ResNeXt depth should be in [50, 101, 152] + RegNet(arch_name + '233') + + # Test RegNet with arch_name + model = RegNet(arch_name) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, out_channels[0], 56, 56]) + assert feat[1].shape == torch.Size([1, out_channels[1], 28, 28]) + assert feat[2].shape == torch.Size([1, out_channels[2], 14, 14]) + assert feat[3].shape == torch.Size([1, out_channels[3], 7, 7]) + + # Test RegNet with arch + model = RegNet(arch) + assert feat[0].shape == torch.Size([1, out_channels[0], 56, 56]) + assert feat[1].shape == torch.Size([1, out_channels[1], 28, 28]) + assert feat[2].shape == torch.Size([1, out_channels[2], 14, 14]) + assert feat[3].shape == torch.Size([1, out_channels[3], 7, 7]) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_renext.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_renext.py new file mode 100644 index 0000000000000000000000000000000000000000..d01443e4042fb6884ae111d0377f347d0220a2bd --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_renext.py @@ -0,0 +1,105 @@ +import pytest +import torch + +from mmdet.models.backbones import ResNeXt +from mmdet.models.backbones.resnext import Bottleneck as BottleneckX +from .utils import is_block + + +def test_renext_bottleneck(): + with pytest.raises(AssertionError): + # Style must be in ['pytorch', 'caffe'] + BottleneckX(64, 64, groups=32, base_width=4, style='tensorflow') + + # Test ResNeXt Bottleneck structure + block = BottleneckX( + 64, 64, groups=32, base_width=4, stride=2, style='pytorch') + assert block.conv2.stride == (2, 2) + assert block.conv2.groups == 32 + assert block.conv2.out_channels == 128 + + # Test ResNeXt Bottleneck with DCN + dcn = dict(type='DCN', deform_groups=1, fallback_on_stride=False) + with pytest.raises(AssertionError): + # conv_cfg must be None if dcn is not None + BottleneckX( + 64, + 64, + groups=32, + base_width=4, + dcn=dcn, + conv_cfg=dict(type='Conv')) + BottleneckX(64, 64, dcn=dcn) + + # Test ResNeXt Bottleneck forward + block = BottleneckX(64, 16, groups=32, base_width=4) + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + # Test ResNeXt Bottleneck forward with plugins + plugins = [ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='0010', + kv_stride=2), + stages=(False, False, True, True), + position='after_conv2') + ] + block = BottleneckX(64, 16, groups=32, base_width=4, plugins=plugins) + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + +def test_resnext_backbone(): + with pytest.raises(KeyError): + # ResNeXt depth should be in [50, 101, 152] + ResNeXt(depth=18) + + # Test ResNeXt with group 32, base_width 4 + model = ResNeXt(depth=50, groups=32, base_width=4) + for m in model.modules(): + if is_block(m): + assert m.conv2.groups == 32 + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + assert feat[3].shape == torch.Size([1, 2048, 7, 7]) + + +regnet_test_data = [ + ('regnetx_400mf', + dict(w0=24, wa=24.48, wm=2.54, group_w=16, depth=22, + bot_mul=1.0), [32, 64, 160, 384]), + ('regnetx_800mf', + dict(w0=56, wa=35.73, wm=2.28, group_w=16, depth=16, + bot_mul=1.0), [64, 128, 288, 672]), + ('regnetx_1.6gf', + dict(w0=80, wa=34.01, wm=2.25, group_w=24, depth=18, + bot_mul=1.0), [72, 168, 408, 912]), + ('regnetx_3.2gf', + dict(w0=88, wa=26.31, wm=2.25, group_w=48, depth=25, + bot_mul=1.0), [96, 192, 432, 1008]), + ('regnetx_4.0gf', + dict(w0=96, wa=38.65, wm=2.43, group_w=40, depth=23, + bot_mul=1.0), [80, 240, 560, 1360]), + ('regnetx_6.4gf', + dict(w0=184, wa=60.83, wm=2.07, group_w=56, depth=17, + bot_mul=1.0), [168, 392, 784, 1624]), + ('regnetx_8.0gf', + dict(w0=80, wa=49.56, wm=2.88, group_w=120, depth=23, + bot_mul=1.0), [80, 240, 720, 1920]), + ('regnetx_12gf', + dict(w0=168, wa=73.36, wm=2.37, group_w=112, depth=19, + bot_mul=1.0), [224, 448, 896, 2240]), +] diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_res2net.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_res2net.py new file mode 100644 index 0000000000000000000000000000000000000000..95d0118ca525114dfee9b1f91d8d659e14d055c6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_res2net.py @@ -0,0 +1,62 @@ +import pytest +import torch + +from mmdet.models.backbones import Res2Net +from mmdet.models.backbones.res2net import Bottle2neck +from .utils import is_block + + +def test_res2net_bottle2neck(): + with pytest.raises(AssertionError): + # Style must be in ['pytorch', 'caffe'] + Bottle2neck(64, 64, base_width=26, scales=4, style='tensorflow') + + with pytest.raises(AssertionError): + # Scale must be larger than 1 + Bottle2neck(64, 64, base_width=26, scales=1, style='pytorch') + + # Test Res2Net Bottle2neck structure + block = Bottle2neck( + 64, 64, base_width=26, stride=2, scales=4, style='pytorch') + assert block.scales == 4 + + # Test Res2Net Bottle2neck with DCN + dcn = dict(type='DCN', deform_groups=1, fallback_on_stride=False) + with pytest.raises(AssertionError): + # conv_cfg must be None if dcn is not None + Bottle2neck( + 64, + 64, + base_width=26, + scales=4, + dcn=dcn, + conv_cfg=dict(type='Conv')) + Bottle2neck(64, 64, dcn=dcn) + + # Test Res2Net Bottle2neck forward + block = Bottle2neck(64, 16, base_width=26, scales=4) + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + +def test_res2net_backbone(): + with pytest.raises(KeyError): + # Res2Net depth should be in [50, 101, 152] + Res2Net(depth=18) + + # Test Res2Net with scales 4, base_width 26 + model = Res2Net(depth=50, scales=4, base_width=26) + for m in model.modules(): + if is_block(m): + assert m.scales == 4 + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + assert feat[3].shape == torch.Size([1, 2048, 7, 7]) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_resnest.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_resnest.py new file mode 100644 index 0000000000000000000000000000000000000000..2243591620eadc82881872ab7b5e5e5df3d8ac0b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_resnest.py @@ -0,0 +1,43 @@ +import pytest +import torch + +from mmdet.models.backbones import ResNeSt +from mmdet.models.backbones.resnest import Bottleneck as BottleneckS + + +def test_resnest_bottleneck(): + with pytest.raises(AssertionError): + # Style must be in ['pytorch', 'caffe'] + BottleneckS(64, 64, radix=2, reduction_factor=4, style='tensorflow') + + # Test ResNeSt Bottleneck structure + block = BottleneckS( + 64, 256, radix=2, reduction_factor=4, stride=2, style='pytorch') + assert block.avd_layer.stride == 2 + assert block.conv2.channels == 256 + + # Test ResNeSt Bottleneck forward + block = BottleneckS(64, 16, radix=2, reduction_factor=4) + x = torch.randn(2, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([2, 64, 56, 56]) + + +def test_resnest_backbone(): + with pytest.raises(KeyError): + # ResNeSt depth should be in [50, 101, 152, 200] + ResNeSt(depth=18) + + # Test ResNeSt with radix 2, reduction_factor 4 + model = ResNeSt( + depth=50, radix=2, reduction_factor=4, out_indices=(0, 1, 2, 3)) + model.init_weights() + model.train() + + imgs = torch.randn(2, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([2, 256, 56, 56]) + assert feat[1].shape == torch.Size([2, 512, 28, 28]) + assert feat[2].shape == torch.Size([2, 1024, 14, 14]) + assert feat[3].shape == torch.Size([2, 2048, 7, 7]) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_resnet.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_resnet.py new file mode 100644 index 0000000000000000000000000000000000000000..afbdf1c912a1a5b8552177480803b5003f95496f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_resnet.py @@ -0,0 +1,666 @@ +import pytest +import torch +from mmcv import assert_params_all_zeros +from mmcv.ops import DeformConv2dPack +from torch.nn.modules import AvgPool2d, GroupNorm +from torch.nn.modules.batchnorm import _BatchNorm + +from mmdet.models.backbones import ResNet, ResNetV1d +from mmdet.models.backbones.resnet import BasicBlock, Bottleneck +from mmdet.models.utils import ResLayer, SimplifiedBasicBlock +from .utils import check_norm_state, is_block, is_norm + + +def test_resnet_basic_block(): + with pytest.raises(AssertionError): + # Not implemented yet. + dcn = dict(type='DCN', deform_groups=1, fallback_on_stride=False) + BasicBlock(64, 64, dcn=dcn) + + with pytest.raises(AssertionError): + # Not implemented yet. + plugins = [ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + position='after_conv3') + ] + BasicBlock(64, 64, plugins=plugins) + + with pytest.raises(AssertionError): + # Not implemented yet + plugins = [ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='0010', + kv_stride=2), + position='after_conv2') + ] + BasicBlock(64, 64, plugins=plugins) + + # test BasicBlock structure and forward + block = BasicBlock(64, 64) + assert block.conv1.in_channels == 64 + assert block.conv1.out_channels == 64 + assert block.conv1.kernel_size == (3, 3) + assert block.conv2.in_channels == 64 + assert block.conv2.out_channels == 64 + assert block.conv2.kernel_size == (3, 3) + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + # Test BasicBlock with checkpoint forward + block = BasicBlock(64, 64, with_cp=True) + assert block.with_cp + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + +def test_resnet_bottleneck(): + with pytest.raises(AssertionError): + # Style must be in ['pytorch', 'caffe'] + Bottleneck(64, 64, style='tensorflow') + + with pytest.raises(AssertionError): + # Allowed positions are 'after_conv1', 'after_conv2', 'after_conv3' + plugins = [ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + position='after_conv4') + ] + Bottleneck(64, 16, plugins=plugins) + + with pytest.raises(AssertionError): + # Need to specify different postfix to avoid duplicate plugin name + plugins = [ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + position='after_conv3'), + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + position='after_conv3') + ] + Bottleneck(64, 16, plugins=plugins) + + with pytest.raises(KeyError): + # Plugin type is not supported + plugins = [dict(cfg=dict(type='WrongPlugin'), position='after_conv3')] + Bottleneck(64, 16, plugins=plugins) + + # Test Bottleneck with checkpoint forward + block = Bottleneck(64, 16, with_cp=True) + assert block.with_cp + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + # Test Bottleneck style + block = Bottleneck(64, 64, stride=2, style='pytorch') + assert block.conv1.stride == (1, 1) + assert block.conv2.stride == (2, 2) + block = Bottleneck(64, 64, stride=2, style='caffe') + assert block.conv1.stride == (2, 2) + assert block.conv2.stride == (1, 1) + + # Test Bottleneck DCN + dcn = dict(type='DCN', deform_groups=1, fallback_on_stride=False) + with pytest.raises(AssertionError): + Bottleneck(64, 64, dcn=dcn, conv_cfg=dict(type='Conv')) + block = Bottleneck(64, 64, dcn=dcn) + assert isinstance(block.conv2, DeformConv2dPack) + + # Test Bottleneck forward + block = Bottleneck(64, 16) + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + # Test Bottleneck with 1 ContextBlock after conv3 + plugins = [ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + position='after_conv3') + ] + block = Bottleneck(64, 16, plugins=plugins) + assert block.context_block.in_channels == 64 + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + # Test Bottleneck with 1 GeneralizedAttention after conv2 + plugins = [ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='0010', + kv_stride=2), + position='after_conv2') + ] + block = Bottleneck(64, 16, plugins=plugins) + assert block.gen_attention_block.in_channels == 16 + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + # Test Bottleneck with 1 GeneralizedAttention after conv2, 1 NonLocal2D + # after conv2, 1 ContextBlock after conv3 + plugins = [ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='0010', + kv_stride=2), + position='after_conv2'), + dict(cfg=dict(type='NonLocal2d'), position='after_conv2'), + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + position='after_conv3') + ] + block = Bottleneck(64, 16, plugins=plugins) + assert block.gen_attention_block.in_channels == 16 + assert block.nonlocal_block.in_channels == 16 + assert block.context_block.in_channels == 64 + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + # Test Bottleneck with 1 ContextBlock after conv2, 2 ContextBlock after + # conv3 + plugins = [ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16, postfix=1), + position='after_conv2'), + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16, postfix=2), + position='after_conv3'), + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16, postfix=3), + position='after_conv3') + ] + block = Bottleneck(64, 16, plugins=plugins) + assert block.context_block1.in_channels == 16 + assert block.context_block2.in_channels == 64 + assert block.context_block3.in_channels == 64 + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + +def test_simplied_basic_block(): + with pytest.raises(AssertionError): + # Not implemented yet. + dcn = dict(type='DCN', deform_groups=1, fallback_on_stride=False) + SimplifiedBasicBlock(64, 64, dcn=dcn) + + with pytest.raises(AssertionError): + # Not implemented yet. + plugins = [ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + position='after_conv3') + ] + SimplifiedBasicBlock(64, 64, plugins=plugins) + + with pytest.raises(AssertionError): + # Not implemented yet + plugins = [ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='0010', + kv_stride=2), + position='after_conv2') + ] + SimplifiedBasicBlock(64, 64, plugins=plugins) + + with pytest.raises(AssertionError): + # Not implemented yet + SimplifiedBasicBlock(64, 64, with_cp=True) + + # test SimplifiedBasicBlock structure and forward + block = SimplifiedBasicBlock(64, 64) + assert block.conv1.in_channels == 64 + assert block.conv1.out_channels == 64 + assert block.conv1.kernel_size == (3, 3) + assert block.conv2.in_channels == 64 + assert block.conv2.out_channels == 64 + assert block.conv2.kernel_size == (3, 3) + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + # test SimplifiedBasicBlock without norm + block = SimplifiedBasicBlock(64, 64, norm_cfg=None) + assert block.norm1 is None + assert block.norm2 is None + x_out = block(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + +def test_resnet_res_layer(): + # Test ResLayer of 3 Bottleneck w\o downsample + layer = ResLayer(Bottleneck, 64, 16, 3) + assert len(layer) == 3 + assert layer[0].conv1.in_channels == 64 + assert layer[0].conv1.out_channels == 16 + for i in range(1, len(layer)): + assert layer[i].conv1.in_channels == 64 + assert layer[i].conv1.out_channels == 16 + for i in range(len(layer)): + assert layer[i].downsample is None + x = torch.randn(1, 64, 56, 56) + x_out = layer(x) + assert x_out.shape == torch.Size([1, 64, 56, 56]) + + # Test ResLayer of 3 Bottleneck with downsample + layer = ResLayer(Bottleneck, 64, 64, 3) + assert layer[0].downsample[0].out_channels == 256 + for i in range(1, len(layer)): + assert layer[i].downsample is None + x = torch.randn(1, 64, 56, 56) + x_out = layer(x) + assert x_out.shape == torch.Size([1, 256, 56, 56]) + + # Test ResLayer of 3 Bottleneck with stride=2 + layer = ResLayer(Bottleneck, 64, 64, 3, stride=2) + assert layer[0].downsample[0].out_channels == 256 + assert layer[0].downsample[0].stride == (2, 2) + for i in range(1, len(layer)): + assert layer[i].downsample is None + x = torch.randn(1, 64, 56, 56) + x_out = layer(x) + assert x_out.shape == torch.Size([1, 256, 28, 28]) + + # Test ResLayer of 3 Bottleneck with stride=2 and average downsample + layer = ResLayer(Bottleneck, 64, 64, 3, stride=2, avg_down=True) + assert isinstance(layer[0].downsample[0], AvgPool2d) + assert layer[0].downsample[1].out_channels == 256 + assert layer[0].downsample[1].stride == (1, 1) + for i in range(1, len(layer)): + assert layer[i].downsample is None + x = torch.randn(1, 64, 56, 56) + x_out = layer(x) + assert x_out.shape == torch.Size([1, 256, 28, 28]) + + # Test ResLayer of 3 BasicBlock with stride=2 and downsample_first=False + layer = ResLayer(BasicBlock, 64, 64, 3, stride=2, downsample_first=False) + assert layer[2].downsample[0].out_channels == 64 + assert layer[2].downsample[0].stride == (2, 2) + for i in range(len(layer) - 1): + assert layer[i].downsample is None + x = torch.randn(1, 64, 56, 56) + x_out = layer(x) + assert x_out.shape == torch.Size([1, 64, 28, 28]) + + +def test_resnest_stem(): + # Test default stem_channels + model = ResNet(50) + assert model.stem_channels == 64 + assert model.conv1.out_channels == 64 + assert model.norm1.num_features == 64 + + # Test default stem_channels, with base_channels=32 + model = ResNet(50, base_channels=32) + assert model.stem_channels == 32 + assert model.conv1.out_channels == 32 + assert model.norm1.num_features == 32 + assert model.layer1[0].conv1.in_channels == 32 + + # Test stem_channels=64 + model = ResNet(50, stem_channels=64) + assert model.stem_channels == 64 + assert model.conv1.out_channels == 64 + assert model.norm1.num_features == 64 + assert model.layer1[0].conv1.in_channels == 64 + + # Test stem_channels=64, with base_channels=32 + model = ResNet(50, stem_channels=64, base_channels=32) + assert model.stem_channels == 64 + assert model.conv1.out_channels == 64 + assert model.norm1.num_features == 64 + assert model.layer1[0].conv1.in_channels == 64 + + # Test stem_channels=128 + model = ResNet(depth=50, stem_channels=128) + model.init_weights() + model.train() + assert model.conv1.out_channels == 128 + assert model.layer1[0].conv1.in_channels == 128 + + # Test V1d stem_channels + model = ResNetV1d(depth=50, stem_channels=128) + model.init_weights() + model.train() + assert model.stem[0].out_channels == 64 + assert model.stem[1].num_features == 64 + assert model.stem[3].out_channels == 64 + assert model.stem[4].num_features == 64 + assert model.stem[6].out_channels == 128 + assert model.stem[7].num_features == 128 + assert model.layer1[0].conv1.in_channels == 128 + + +def test_resnet_backbone(): + """Test resnet backbone.""" + with pytest.raises(KeyError): + # ResNet depth should be in [18, 34, 50, 101, 152] + ResNet(20) + + with pytest.raises(AssertionError): + # In ResNet: 1 <= num_stages <= 4 + ResNet(50, num_stages=0) + + with pytest.raises(AssertionError): + # len(stage_with_dcn) == num_stages + dcn = dict(type='DCN', deform_groups=1, fallback_on_stride=False) + ResNet(50, dcn=dcn, stage_with_dcn=(True, )) + + with pytest.raises(AssertionError): + # len(stage_with_plugin) == num_stages + plugins = [ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True), + position='after_conv3') + ] + ResNet(50, plugins=plugins) + + with pytest.raises(AssertionError): + # In ResNet: 1 <= num_stages <= 4 + ResNet(50, num_stages=5) + + with pytest.raises(AssertionError): + # len(strides) == len(dilations) == num_stages + ResNet(50, strides=(1, ), dilations=(1, 1), num_stages=3) + + with pytest.raises(TypeError): + # pretrained must be a string path + model = ResNet(50, pretrained=0) + model.init_weights() + + with pytest.raises(AssertionError): + # Style must be in ['pytorch', 'caffe'] + ResNet(50, style='tensorflow') + + # Test ResNet50 norm_eval=True + model = ResNet(50, norm_eval=True) + model.init_weights() + model.train() + assert check_norm_state(model.modules(), False) + + # Test ResNet50 with torchvision pretrained weight + model = ResNet( + depth=50, norm_eval=True, pretrained='torchvision://resnet50') + model.init_weights() + model.train() + assert check_norm_state(model.modules(), False) + + # Test ResNet50 with first stage frozen + frozen_stages = 1 + model = ResNet(50, frozen_stages=frozen_stages) + model.init_weights() + model.train() + assert model.norm1.training is False + for layer in [model.conv1, model.norm1]: + for param in layer.parameters(): + assert param.requires_grad is False + for i in range(1, frozen_stages + 1): + layer = getattr(model, f'layer{i}') + for mod in layer.modules(): + if isinstance(mod, _BatchNorm): + assert mod.training is False + for param in layer.parameters(): + assert param.requires_grad is False + + # Test ResNet50V1d with first stage frozen + model = ResNetV1d(depth=50, frozen_stages=frozen_stages) + assert len(model.stem) == 9 + model.init_weights() + model.train() + assert check_norm_state(model.stem, False) + for param in model.stem.parameters(): + assert param.requires_grad is False + for i in range(1, frozen_stages + 1): + layer = getattr(model, f'layer{i}') + for mod in layer.modules(): + if isinstance(mod, _BatchNorm): + assert mod.training is False + for param in layer.parameters(): + assert param.requires_grad is False + + # Test ResNet18 forward + model = ResNet(18) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 64, 56, 56]) + assert feat[1].shape == torch.Size([1, 128, 28, 28]) + assert feat[2].shape == torch.Size([1, 256, 14, 14]) + assert feat[3].shape == torch.Size([1, 512, 7, 7]) + + # Test ResNet18 with checkpoint forward + model = ResNet(18, with_cp=True) + for m in model.modules(): + if is_block(m): + assert m.with_cp + + # Test ResNet50 with BatchNorm forward + model = ResNet(50) + for m in model.modules(): + if is_norm(m): + assert isinstance(m, _BatchNorm) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + assert feat[3].shape == torch.Size([1, 2048, 7, 7]) + + # Test ResNet50 with layers 1, 2, 3 out forward + model = ResNet(50, out_indices=(0, 1, 2)) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 3 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + + # Test ResNet50 with checkpoint forward + model = ResNet(50, with_cp=True) + for m in model.modules(): + if is_block(m): + assert m.with_cp + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + assert feat[3].shape == torch.Size([1, 2048, 7, 7]) + + # Test ResNet50 with GroupNorm forward + model = ResNet( + 50, norm_cfg=dict(type='GN', num_groups=32, requires_grad=True)) + for m in model.modules(): + if is_norm(m): + assert isinstance(m, GroupNorm) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + assert feat[3].shape == torch.Size([1, 2048, 7, 7]) + + # Test ResNet50 with 1 GeneralizedAttention after conv2, 1 NonLocal2D + # after conv2, 1 ContextBlock after conv3 in layers 2, 3, 4 + plugins = [ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='0010', + kv_stride=2), + stages=(False, True, True, True), + position='after_conv2'), + dict(cfg=dict(type='NonLocal2d'), position='after_conv2'), + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + stages=(False, True, True, False), + position='after_conv3') + ] + model = ResNet(50, plugins=plugins) + for m in model.layer1.modules(): + if is_block(m): + assert not hasattr(m, 'context_block') + assert not hasattr(m, 'gen_attention_block') + assert m.nonlocal_block.in_channels == 64 + for m in model.layer2.modules(): + if is_block(m): + assert m.nonlocal_block.in_channels == 128 + assert m.gen_attention_block.in_channels == 128 + assert m.context_block.in_channels == 512 + + for m in model.layer3.modules(): + if is_block(m): + assert m.nonlocal_block.in_channels == 256 + assert m.gen_attention_block.in_channels == 256 + assert m.context_block.in_channels == 1024 + + for m in model.layer4.modules(): + if is_block(m): + assert m.nonlocal_block.in_channels == 512 + assert m.gen_attention_block.in_channels == 512 + assert not hasattr(m, 'context_block') + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + assert feat[3].shape == torch.Size([1, 2048, 7, 7]) + + # Test ResNet50 with 1 ContextBlock after conv2, 1 ContextBlock after + # conv3 in layers 2, 3, 4 + plugins = [ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16, postfix=1), + stages=(False, True, True, False), + position='after_conv3'), + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16, postfix=2), + stages=(False, True, True, False), + position='after_conv3') + ] + + model = ResNet(50, plugins=plugins) + for m in model.layer1.modules(): + if is_block(m): + assert not hasattr(m, 'context_block') + assert not hasattr(m, 'context_block1') + assert not hasattr(m, 'context_block2') + for m in model.layer2.modules(): + if is_block(m): + assert not hasattr(m, 'context_block') + assert m.context_block1.in_channels == 512 + assert m.context_block2.in_channels == 512 + + for m in model.layer3.modules(): + if is_block(m): + assert not hasattr(m, 'context_block') + assert m.context_block1.in_channels == 1024 + assert m.context_block2.in_channels == 1024 + + for m in model.layer4.modules(): + if is_block(m): + assert not hasattr(m, 'context_block') + assert not hasattr(m, 'context_block1') + assert not hasattr(m, 'context_block2') + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + assert feat[3].shape == torch.Size([1, 2048, 7, 7]) + + # Test ResNet50 zero initialization of residual + model = ResNet(50, zero_init_residual=True) + model.init_weights() + for m in model.modules(): + if isinstance(m, Bottleneck): + assert assert_params_all_zeros(m.norm3) + elif isinstance(m, BasicBlock): + assert assert_params_all_zeros(m.norm2) + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + assert feat[3].shape == torch.Size([1, 2048, 7, 7]) + + # Test ResNetV1d forward + model = ResNetV1d(depth=50) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + assert feat[3].shape == torch.Size([1, 2048, 7, 7]) + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + assert feat[3].shape == torch.Size([1, 2048, 7, 7]) + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 4 + assert feat[0].shape == torch.Size([1, 256, 56, 56]) + assert feat[1].shape == torch.Size([1, 512, 28, 28]) + assert feat[2].shape == torch.Size([1, 1024, 14, 14]) + assert feat[3].shape == torch.Size([1, 2048, 7, 7]) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_trident_resnet.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_trident_resnet.py new file mode 100644 index 0000000000000000000000000000000000000000..ebb4415bf3c4af34f77112d75d02aba6936ef497 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/test_trident_resnet.py @@ -0,0 +1,180 @@ +import pytest +import torch + +from mmdet.models.backbones import TridentResNet +from mmdet.models.backbones.trident_resnet import TridentBottleneck + + +def test_trident_resnet_bottleneck(): + trident_dilations = (1, 2, 3) + test_branch_idx = 1 + concat_output = True + trident_build_config = (trident_dilations, test_branch_idx, concat_output) + + with pytest.raises(AssertionError): + # Style must be in ['pytorch', 'caffe'] + TridentBottleneck( + *trident_build_config, inplanes=64, planes=64, style='tensorflow') + + with pytest.raises(AssertionError): + # Allowed positions are 'after_conv1', 'after_conv2', 'after_conv3' + plugins = [ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + position='after_conv4') + ] + TridentBottleneck( + *trident_build_config, inplanes=64, planes=16, plugins=plugins) + + with pytest.raises(AssertionError): + # Need to specify different postfix to avoid duplicate plugin name + plugins = [ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + position='after_conv3'), + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + position='after_conv3') + ] + TridentBottleneck( + *trident_build_config, inplanes=64, planes=16, plugins=plugins) + + with pytest.raises(KeyError): + # Plugin type is not supported + plugins = [dict(cfg=dict(type='WrongPlugin'), position='after_conv3')] + TridentBottleneck( + *trident_build_config, inplanes=64, planes=16, plugins=plugins) + + # Test Bottleneck with checkpoint forward + block = TridentBottleneck( + *trident_build_config, inplanes=64, planes=16, with_cp=True) + assert block.with_cp + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([block.num_branch, 64, 56, 56]) + + # Test Bottleneck style + block = TridentBottleneck( + *trident_build_config, + inplanes=64, + planes=64, + stride=2, + style='pytorch') + assert block.conv1.stride == (1, 1) + assert block.conv2.stride == (2, 2) + block = TridentBottleneck( + *trident_build_config, inplanes=64, planes=64, stride=2, style='caffe') + assert block.conv1.stride == (2, 2) + assert block.conv2.stride == (1, 1) + + # Test Bottleneck forward + block = TridentBottleneck(*trident_build_config, inplanes=64, planes=16) + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([block.num_branch, 64, 56, 56]) + + # Test Bottleneck with 1 ContextBlock after conv3 + plugins = [ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + position='after_conv3') + ] + block = TridentBottleneck( + *trident_build_config, inplanes=64, planes=16, plugins=plugins) + assert block.context_block.in_channels == 64 + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([block.num_branch, 64, 56, 56]) + + # Test Bottleneck with 1 GeneralizedAttention after conv2 + plugins = [ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='0010', + kv_stride=2), + position='after_conv2') + ] + block = TridentBottleneck( + *trident_build_config, inplanes=64, planes=16, plugins=plugins) + assert block.gen_attention_block.in_channels == 16 + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([block.num_branch, 64, 56, 56]) + + # Test Bottleneck with 1 GeneralizedAttention after conv2, 1 NonLocal2D + # after conv2, 1 ContextBlock after conv3 + plugins = [ + dict( + cfg=dict( + type='GeneralizedAttention', + spatial_range=-1, + num_heads=8, + attention_type='0010', + kv_stride=2), + position='after_conv2'), + dict(cfg=dict(type='NonLocal2d'), position='after_conv2'), + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16), + position='after_conv3') + ] + block = TridentBottleneck( + *trident_build_config, inplanes=64, planes=16, plugins=plugins) + assert block.gen_attention_block.in_channels == 16 + assert block.nonlocal_block.in_channels == 16 + assert block.context_block.in_channels == 64 + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([block.num_branch, 64, 56, 56]) + + # Test Bottleneck with 1 ContextBlock after conv2, 2 ContextBlock after + # conv3 + plugins = [ + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16, postfix=1), + position='after_conv2'), + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16, postfix=2), + position='after_conv3'), + dict( + cfg=dict(type='ContextBlock', ratio=1. / 16, postfix=3), + position='after_conv3') + ] + block = TridentBottleneck( + *trident_build_config, inplanes=64, planes=16, plugins=plugins) + assert block.context_block1.in_channels == 16 + assert block.context_block2.in_channels == 64 + assert block.context_block3.in_channels == 64 + x = torch.randn(1, 64, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size([block.num_branch, 64, 56, 56]) + + +def test_trident_resnet_backbone(): + tridentresnet_config = dict( + num_branch=3, + test_branch_idx=1, + strides=(1, 2, 2), + dilations=(1, 1, 1), + trident_dilations=(1, 2, 3), + out_indices=(2, ), + ) + """Test tridentresnet backbone.""" + with pytest.raises(AssertionError): + # TridentResNet depth should be in [50, 101, 152] + TridentResNet(18, **tridentresnet_config) + + with pytest.raises(AssertionError): + # In TridentResNet: num_stages == 3 + TridentResNet(50, num_stages=4, **tridentresnet_config) + + model = TridentResNet(50, num_stages=3, **tridentresnet_config) + model.init_weights() + model.train() + + imgs = torch.randn(1, 3, 224, 224) + feat = model(imgs) + assert len(feat) == 1 + assert feat[0].shape == torch.Size([3, 1024, 14, 14]) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/utils.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..5314e4d8860b2b3f86808ee0e9123943b5990864 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_backbones/utils.py @@ -0,0 +1,31 @@ +from torch.nn.modules import GroupNorm +from torch.nn.modules.batchnorm import _BatchNorm + +from mmdet.models.backbones.res2net import Bottle2neck +from mmdet.models.backbones.resnet import BasicBlock, Bottleneck +from mmdet.models.backbones.resnext import Bottleneck as BottleneckX +from mmdet.models.utils import SimplifiedBasicBlock + + +def is_block(modules): + """Check if is ResNet building block.""" + if isinstance(modules, (BasicBlock, Bottleneck, BottleneckX, Bottle2neck, + SimplifiedBasicBlock)): + return True + return False + + +def is_norm(modules): + """Check if is one of the norms.""" + if isinstance(modules, (GroupNorm, _BatchNorm)): + return True + return False + + +def check_norm_state(modules, train_state): + """Check if norm layer is in correct train state.""" + for mod in modules: + if isinstance(mod, _BatchNorm): + if mod.training != train_state: + return False + return True diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_anchor_head.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_anchor_head.py new file mode 100644 index 0000000000000000000000000000000000000000..23cb3640cb3a0ae52a4315e3a6d67e8c79308c60 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_anchor_head.py @@ -0,0 +1,69 @@ +import mmcv +import torch + +from mmdet.models.dense_heads import AnchorHead + + +def test_anchor_head_loss(): + """Tests anchor head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + + cfg = mmcv.Config( + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=0, + pos_weight=-1, + debug=False)) + self = AnchorHead(num_classes=4, in_channels=1, train_cfg=cfg) + + # Anchor head expects a multiple levels of features per image + feat = [ + torch.rand(1, 1, s // (2**(i + 2)), s // (2**(i + 2))) + for i in range(len(self.anchor_generator.strides)) + ] + cls_scores, bbox_preds = self.forward(feat) + + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + + gt_bboxes_ignore = None + empty_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there should + # be no box loss. + empty_cls_loss = sum(empty_gt_losses['loss_cls']) + empty_box_loss = sum(empty_gt_losses['loss_bbox']) + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero for + # random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + one_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + onegt_cls_loss = sum(one_gt_losses['loss_cls']) + onegt_box_loss = sum(one_gt_losses['loss_bbox']) + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_loss.item() > 0, 'box loss should be non-zero' diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_atss_head.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_atss_head.py new file mode 100644 index 0000000000000000000000000000000000000000..3757a345674230b9223e963c363fa0f2da3169e4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_atss_head.py @@ -0,0 +1,76 @@ +import mmcv +import torch + +from mmdet.models.dense_heads import ATSSHead + + +def test_atss_head_loss(): + """Tests atss head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + train_cfg = mmcv.Config( + dict( + assigner=dict(type='ATSSAssigner', topk=9), + allowed_border=-1, + pos_weight=-1, + debug=False)) + self = ATSSHead( + num_classes=4, + in_channels=1, + train_cfg=train_cfg, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=2.0)) + feat = [ + torch.rand(1, 1, s // feat_size, s // feat_size) + for feat_size in [4, 8, 16, 32, 64] + ] + cls_scores, bbox_preds, centernesses = self.forward(feat) + + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + gt_bboxes_ignore = None + empty_gt_losses = self.loss(cls_scores, bbox_preds, centernesses, + gt_bboxes, gt_labels, img_metas, + gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there should + # be no box loss. + empty_cls_loss = sum(empty_gt_losses['loss_cls']) + empty_box_loss = sum(empty_gt_losses['loss_bbox']) + empty_centerness_loss = sum(empty_gt_losses['loss_centerness']) + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + assert empty_centerness_loss.item() == 0, ( + 'there should be no centerness loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero for + # random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + one_gt_losses = self.loss(cls_scores, bbox_preds, centernesses, gt_bboxes, + gt_labels, img_metas, gt_bboxes_ignore) + onegt_cls_loss = sum(one_gt_losses['loss_cls']) + onegt_box_loss = sum(one_gt_losses['loss_bbox']) + onegt_centerness_loss = sum(one_gt_losses['loss_centerness']) + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_loss.item() > 0, 'box loss should be non-zero' + assert onegt_centerness_loss.item() > 0, ( + 'centerness loss should be non-zero') diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_autoassign_head.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_autoassign_head.py new file mode 100644 index 0000000000000000000000000000000000000000..ebcf6fed1d6520b29d4482611436129002c026c0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_autoassign_head.py @@ -0,0 +1,91 @@ +import mmcv +import torch + +from mmdet.models.dense_heads.autoassign_head import AutoAssignHead +from mmdet.models.dense_heads.paa_head import levels_to_images + + +def test_autoassign_head_loss(): + """Tests autoassign head loss when truth is empty and non-empty.""" + + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + train_cfg = mmcv.Config( + dict(assigner=None, allowed_border=-1, pos_weight=-1, debug=False)) + self = AutoAssignHead( + num_classes=4, + in_channels=1, + train_cfg=train_cfg, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.3)) + feat = [ + torch.rand(1, 1, s // feat_size, s // feat_size) + for feat_size in [4, 8, 16, 32, 64] + ] + self.init_weights() + cls_scores, bbox_preds, objectnesses = self(feat) + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + gt_bboxes_ignore = None + empty_gt_losses = self.loss(cls_scores, bbox_preds, objectnesses, + gt_bboxes, gt_labels, img_metas, + gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there should + # be no box loss. + empty_pos_loss = empty_gt_losses['loss_pos'] + empty_neg_loss = empty_gt_losses['loss_neg'] + empty_center_loss = empty_gt_losses['loss_center'] + assert empty_neg_loss.item() > 0, 'cls loss should be non-zero' + assert empty_pos_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + assert empty_center_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero for + # random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + one_gt_losses = self.loss(cls_scores, bbox_preds, objectnesses, gt_bboxes, + gt_labels, img_metas, gt_bboxes_ignore) + onegt_pos_loss = one_gt_losses['loss_pos'] + onegt_neg_loss = one_gt_losses['loss_neg'] + onegt_center_loss = one_gt_losses['loss_center'] + assert onegt_pos_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_neg_loss.item() > 0, 'box loss should be non-zero' + assert onegt_center_loss.item() > 0, 'box loss should be non-zero' + n, c, h, w = 10, 4, 20, 20 + mlvl_tensor = [torch.ones(n, c, h, w) for i in range(5)] + results = levels_to_images(mlvl_tensor) + assert len(results) == n + assert results[0].size() == (h * w * 5, c) + cls_scores = [torch.ones(2, 4, 5, 5)] + bbox_preds = [torch.ones(2, 4, 5, 5)] + iou_preds = [torch.ones(2, 1, 5, 5)] + mlvl_anchors = [torch.ones(5 * 5, 2)] + img_shape = None + scale_factor = [0.5, 0.5] + cfg = mmcv.Config( + dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) + rescale = False + self._get_bboxes( + cls_scores, + bbox_preds, + iou_preds, + mlvl_anchors, + img_shape, + scale_factor, + cfg, + rescale=rescale) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_centernet_head.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_centernet_head.py new file mode 100644 index 0000000000000000000000000000000000000000..2ecb6184ceba62e3272da0f2e7c9c6568fc5313e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_centernet_head.py @@ -0,0 +1,106 @@ +import numpy as np +import torch +from mmcv import ConfigDict + +from mmdet.models.dense_heads import CenterNetHead + + +def test_center_head_loss(): + """Tests center head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + test_cfg = dict(topK=100, max_per_img=100) + self = CenterNetHead( + num_classes=4, in_channel=1, feat_channel=4, test_cfg=test_cfg) + + feat = [torch.rand(1, 1, s, s)] + center_out, wh_out, offset_out = self.forward(feat) + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + + gt_bboxes_ignore = None + empty_gt_losses = self.loss(center_out, wh_out, offset_out, gt_bboxes, + gt_labels, img_metas, gt_bboxes_ignore) + loss_center = empty_gt_losses['loss_center_heatmap'] + loss_wh = empty_gt_losses['loss_wh'] + loss_offset = empty_gt_losses['loss_offset'] + assert loss_center.item() > 0, 'loss_center should be non-zero' + assert loss_wh.item() == 0, ( + 'there should be no loss_wh when there are no true boxes') + assert loss_offset.item() == 0, ( + 'there should be no loss_offset when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero for + # random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + one_gt_losses = self.loss(center_out, wh_out, offset_out, gt_bboxes, + gt_labels, img_metas, gt_bboxes_ignore) + loss_center = one_gt_losses['loss_center_heatmap'] + loss_wh = one_gt_losses['loss_wh'] + loss_offset = one_gt_losses['loss_offset'] + assert loss_center.item() > 0, 'loss_center should be non-zero' + assert loss_wh.item() > 0, 'loss_wh should be non-zero' + assert loss_offset.item() > 0, 'loss_offset should be non-zero' + + +def test_centernet_head_get_bboxes(): + """Tests center head generating and decoding the heatmap.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': np.array([1., 1., 1., 1.]), + 'pad_shape': (s, s, 3), + 'batch_input_shape': (s, s), + 'border': (0, 0, 0, 0), + 'flip': False + }] + test_cfg = ConfigDict( + dict(topk=100, local_maximum_kernel=3, max_per_img=100)) + gt_bboxes = [ + torch.Tensor([[10, 20, 200, 240], [40, 50, 100, 200], + [10, 20, 100, 240]]) + ] + gt_labels = [torch.LongTensor([1, 1, 2])] + + self = CenterNetHead( + num_classes=4, in_channel=1, feat_channel=4, test_cfg=test_cfg) + self.feat_shape = (1, 1, s // 4, s // 4) + targets, _ = self.get_targets(gt_bboxes, gt_labels, self.feat_shape, + img_metas[0]['pad_shape']) + center_target = targets['center_heatmap_target'] + wh_target = targets['wh_target'] + offset_target = targets['offset_target'] + # make sure assign target right + for i in range(len(gt_bboxes[0])): + bbox, label = gt_bboxes[0][i] / 4, gt_labels[0][i] + ctx, cty = sum(bbox[0::2]) / 2, sum(bbox[1::2]) / 2 + int_ctx, int_cty = int(sum(bbox[0::2]) / 2), int(sum(bbox[1::2]) / 2) + w, h = bbox[2] - bbox[0], bbox[3] - bbox[1] + x_off = ctx - int(ctx) + y_off = cty - int(cty) + assert center_target[0, label, int_cty, int_ctx] == 1 + assert wh_target[0, 0, int_cty, int_ctx] == w + assert wh_target[0, 1, int_cty, int_ctx] == h + assert offset_target[0, 0, int_cty, int_ctx] == x_off + assert offset_target[0, 1, int_cty, int_ctx] == y_off + # make sure get_bboxes is right + detections = self.get_bboxes([center_target], [wh_target], [offset_target], + img_metas, + rescale=True, + with_nms=False) + out_bboxes = detections[0][0][:3] + out_clses = detections[0][1][:3] + for bbox, cls in zip(out_bboxes, out_clses): + flag = False + for gt_bbox, gt_cls in zip(gt_bboxes[0], gt_labels[0]): + if (bbox[:4] == gt_bbox[:4]).all(): + flag = True + assert flag, 'get_bboxes is wrong' diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_corner_head.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_corner_head.py new file mode 100644 index 0000000000000000000000000000000000000000..91d1c218309f3467801c033b230db9dad49bba46 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_corner_head.py @@ -0,0 +1,166 @@ +import torch + +from mmdet.core.evaluation.bbox_overlaps import bbox_overlaps +from mmdet.models.dense_heads import CornerHead + + +def test_corner_head_loss(): + """Tests corner head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + + self = CornerHead(num_classes=4, in_channels=1) + + # Corner head expects a multiple levels of features per image + feat = [ + torch.rand(1, 1, s // 4, s // 4) for _ in range(self.num_feat_levels) + ] + tl_heats, br_heats, tl_embs, br_embs, tl_offs, br_offs = self.forward(feat) + + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + + gt_bboxes_ignore = None + empty_gt_losses = self.loss(tl_heats, br_heats, tl_embs, br_embs, tl_offs, + br_offs, gt_bboxes, gt_labels, img_metas, + gt_bboxes_ignore) + empty_det_loss = sum(empty_gt_losses['det_loss']) + empty_push_loss = sum(empty_gt_losses['push_loss']) + empty_pull_loss = sum(empty_gt_losses['pull_loss']) + empty_off_loss = sum(empty_gt_losses['off_loss']) + assert empty_det_loss.item() > 0, 'det loss should be non-zero' + assert empty_push_loss.item() == 0, ( + 'there should be no push loss when there are no true boxes') + assert empty_pull_loss.item() == 0, ( + 'there should be no pull loss when there are no true boxes') + assert empty_off_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero for + # random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + one_gt_losses = self.loss(tl_heats, br_heats, tl_embs, br_embs, tl_offs, + br_offs, gt_bboxes, gt_labels, img_metas, + gt_bboxes_ignore) + onegt_det_loss = sum(one_gt_losses['det_loss']) + onegt_push_loss = sum(one_gt_losses['push_loss']) + onegt_pull_loss = sum(one_gt_losses['pull_loss']) + onegt_off_loss = sum(one_gt_losses['off_loss']) + assert onegt_det_loss.item() > 0, 'det loss should be non-zero' + assert onegt_push_loss.item() == 0, ( + 'there should be no push loss when there are only one true box') + assert onegt_pull_loss.item() > 0, 'pull loss should be non-zero' + assert onegt_off_loss.item() > 0, 'off loss should be non-zero' + + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874], + [123.6667, 123.8757, 138.6326, 251.8874]]), + ] + gt_labels = [torch.LongTensor([2, 3])] + + # equalize the corners' embedding value of different objects to make the + # push_loss larger than 0 + gt_bboxes_ind = (gt_bboxes[0] // 4).int().tolist() + for tl_emb_feat, br_emb_feat in zip(tl_embs, br_embs): + tl_emb_feat[:, :, gt_bboxes_ind[0][1], + gt_bboxes_ind[0][0]] = tl_emb_feat[:, :, + gt_bboxes_ind[1][1], + gt_bboxes_ind[1][0]] + br_emb_feat[:, :, gt_bboxes_ind[0][3], + gt_bboxes_ind[0][2]] = br_emb_feat[:, :, + gt_bboxes_ind[1][3], + gt_bboxes_ind[1][2]] + + two_gt_losses = self.loss(tl_heats, br_heats, tl_embs, br_embs, tl_offs, + br_offs, gt_bboxes, gt_labels, img_metas, + gt_bboxes_ignore) + twogt_det_loss = sum(two_gt_losses['det_loss']) + twogt_push_loss = sum(two_gt_losses['push_loss']) + twogt_pull_loss = sum(two_gt_losses['pull_loss']) + twogt_off_loss = sum(two_gt_losses['off_loss']) + assert twogt_det_loss.item() > 0, 'det loss should be non-zero' + assert twogt_push_loss.item() > 0, 'push loss should be non-zero' + assert twogt_pull_loss.item() > 0, 'pull loss should be non-zero' + assert twogt_off_loss.item() > 0, 'off loss should be non-zero' + + +def test_corner_head_encode_and_decode_heatmap(): + """Tests corner head generating and decoding the heatmap.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3), + 'border': (0, 0, 0, 0) + }] + + gt_bboxes = [ + torch.Tensor([[10, 20, 200, 240], [40, 50, 100, 200], + [10, 20, 200, 240]]) + ] + gt_labels = [torch.LongTensor([1, 1, 2])] + + self = CornerHead(num_classes=4, in_channels=1, corner_emb_channels=1) + + feat = [ + torch.rand(1, 1, s // 4, s // 4) for _ in range(self.num_feat_levels) + ] + + targets = self.get_targets( + gt_bboxes, + gt_labels, + feat[0].shape, + img_metas[0]['pad_shape'], + with_corner_emb=self.with_corner_emb) + + gt_tl_heatmap = targets['topleft_heatmap'] + gt_br_heatmap = targets['bottomright_heatmap'] + gt_tl_offset = targets['topleft_offset'] + gt_br_offset = targets['bottomright_offset'] + embedding = targets['corner_embedding'] + [top, left], [bottom, right] = embedding[0][0] + gt_tl_embedding_heatmap = torch.zeros([1, 1, s // 4, s // 4]) + gt_br_embedding_heatmap = torch.zeros([1, 1, s // 4, s // 4]) + gt_tl_embedding_heatmap[0, 0, top, left] = 1 + gt_br_embedding_heatmap[0, 0, bottom, right] = 1 + + batch_bboxes, batch_scores, batch_clses = self.decode_heatmap( + tl_heat=gt_tl_heatmap, + br_heat=gt_br_heatmap, + tl_off=gt_tl_offset, + br_off=gt_br_offset, + tl_emb=gt_tl_embedding_heatmap, + br_emb=gt_br_embedding_heatmap, + img_meta=img_metas[0], + k=100, + kernel=3, + distance_threshold=0.5) + + bboxes = batch_bboxes.view(-1, 4) + scores = batch_scores.view(-1, 1) + clses = batch_clses.view(-1, 1) + + idx = scores.argsort(dim=0, descending=True) + bboxes = bboxes[idx].view(-1, 4) + scores = scores[idx].view(-1) + clses = clses[idx].view(-1) + + valid_bboxes = bboxes[torch.where(scores > 0.05)] + valid_labels = clses[torch.where(scores > 0.05)] + max_coordinate = valid_bboxes.max() + offsets = valid_labels.to(valid_bboxes) * (max_coordinate + 1) + gt_offsets = gt_labels[0].to(gt_bboxes[0]) * (max_coordinate + 1) + + offset_bboxes = valid_bboxes + offsets[:, None] + offset_gtbboxes = gt_bboxes[0] + gt_offsets[:, None] + + iou_matrix = bbox_overlaps(offset_bboxes.numpy(), offset_gtbboxes.numpy()) + assert (iou_matrix == 1).sum() == 3 diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_dense_heads_attr.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_dense_heads_attr.py new file mode 100644 index 0000000000000000000000000000000000000000..f6be7f15272583a6be5a6827bf4e4d4c1ed14e56 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_dense_heads_attr.py @@ -0,0 +1,43 @@ +import warnings + +from terminaltables import AsciiTable + +from mmdet.models import dense_heads +from mmdet.models.dense_heads import * # noqa: F401,F403 + + +def test_dense_heads_test_attr(): + """Tests inference methods such as simple_test and aug_test.""" + # make list of dense heads + exceptions = ['FeatureAdaption'] # module used in head + all_dense_heads = [m for m in dense_heads.__all__ if m not in exceptions] + + # search attributes + check_attributes = [ + 'simple_test', 'aug_test', 'simple_test_bboxes', 'simple_test_rpn', + 'aug_test_rpn' + ] + table_header = ['head name'] + check_attributes + table_data = [table_header] + not_found = {k: [] for k in check_attributes} + for target_head_name in all_dense_heads: + target_head = globals()[target_head_name] + target_head_attributes = dir(target_head) + check_results = [target_head_name] + for check_attribute in check_attributes: + found = check_attribute in target_head_attributes + check_results.append(found) + if not found: + not_found[check_attribute].append(target_head_name) + table_data.append(check_results) + table = AsciiTable(table_data) + print() + print(table.table) + + # NOTE: this test just checks attributes. + # simple_test of RPN heads will not work now. + assert len(not_found['simple_test']) == 0, \ + f'simple_test not found in {not_found["simple_test"]}' + if len(not_found['aug_test']) != 0: + warnings.warn(f'aug_test not found in {not_found["aug_test"]}. ' + 'Please implement it or raise NotImplementedError.') diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_detr_head.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_detr_head.py new file mode 100644 index 0000000000000000000000000000000000000000..51f97d48363db50c11cd690183ecbef0a5bcfed8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_detr_head.py @@ -0,0 +1,103 @@ +import torch +from mmcv import ConfigDict + +from mmdet.models.dense_heads import DETRHead + + +def test_detr_head_loss(): + """Tests transformer head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3), + 'batch_input_shape': (s, s) + }] + config = ConfigDict( + dict( + type='DETRHead', + num_classes=80, + in_channels=200, + transformer=dict( + type='Transformer', + encoder=dict( + type='DetrTransformerEncoder', + num_layers=6, + transformerlayers=dict( + type='BaseTransformerLayer', + attn_cfgs=[ + dict( + type='MultiheadAttention', + embed_dims=256, + num_heads=8, + dropout=0.1) + ], + feedforward_channels=2048, + ffn_dropout=0.1, + operation_order=('self_attn', 'norm', 'ffn', 'norm'))), + decoder=dict( + type='DetrTransformerDecoder', + return_intermediate=True, + num_layers=6, + transformerlayers=dict( + type='DetrTransformerDecoderLayer', + attn_cfgs=dict( + type='MultiheadAttention', + embed_dims=256, + num_heads=8, + dropout=0.1), + feedforward_channels=2048, + ffn_dropout=0.1, + operation_order=('self_attn', 'norm', 'cross_attn', + 'norm', 'ffn', 'norm')), + )), + positional_encoding=dict( + type='SinePositionalEncoding', num_feats=128, normalize=True), + loss_cls=dict( + type='CrossEntropyLoss', + bg_cls_weight=0.1, + use_sigmoid=False, + loss_weight=1.0, + class_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=5.0), + loss_iou=dict(type='GIoULoss', loss_weight=2.0))) + + self = DETRHead(**config) + self.init_weights() + feat = [torch.rand(1, 200, 10, 10)] + cls_scores, bbox_preds = self.forward(feat, img_metas) + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + gt_bboxes_ignore = None + empty_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there should + # be no box loss. + for key, loss in empty_gt_losses.items(): + if 'cls' in key: + assert loss.item() > 0, 'cls loss should be non-zero' + elif 'bbox' in key: + assert loss.item( + ) == 0, 'there should be no box loss when there are no true boxes' + elif 'iou' in key: + assert loss.item( + ) == 0, 'there should be no iou loss when there are no true boxes' + + # When truth is non-empty then both cls and box loss should be nonzero for + # random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + one_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + for loss in one_gt_losses.values(): + assert loss.item( + ) > 0, 'cls loss, or box loss, or iou loss should be non-zero' + + # test forward_train + self.forward_train(feat, img_metas, gt_bboxes, gt_labels) + + # test inference mode + self.get_bboxes(cls_scores, bbox_preds, img_metas, rescale=True) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_fcos_head.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_fcos_head.py new file mode 100644 index 0000000000000000000000000000000000000000..663e81514a6239021da0cdc74e40192e7cea984b --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_fcos_head.py @@ -0,0 +1,63 @@ +import mmcv +import torch + +from mmdet.models.dense_heads import FCOSHead + + +def test_fcos_head_loss(): + """Tests fcos head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + train_cfg = mmcv.Config( + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False)) + # since Focal Loss is not supported on CPU + self = FCOSHead( + num_classes=4, + in_channels=1, + train_cfg=train_cfg, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)) + feat = [ + torch.rand(1, 1, s // feat_size, s // feat_size) + for feat_size in [4, 8, 16, 32, 64] + ] + cls_scores, bbox_preds, centerness = self.forward(feat) + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + gt_bboxes_ignore = None + empty_gt_losses = self.loss(cls_scores, bbox_preds, centerness, gt_bboxes, + gt_labels, img_metas, gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there should + # be no box loss. + empty_cls_loss = empty_gt_losses['loss_cls'] + empty_box_loss = empty_gt_losses['loss_bbox'] + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero for + # random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + one_gt_losses = self.loss(cls_scores, bbox_preds, centerness, gt_bboxes, + gt_labels, img_metas, gt_bboxes_ignore) + onegt_cls_loss = one_gt_losses['loss_cls'] + onegt_box_loss = one_gt_losses['loss_bbox'] + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_loss.item() > 0, 'box loss should be non-zero' diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_fsaf_head.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_fsaf_head.py new file mode 100644 index 0000000000000000000000000000000000000000..1d85937f88675830a937dc623a6ee08b2f07dd7c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_fsaf_head.py @@ -0,0 +1,81 @@ +import mmcv +import torch + +from mmdet.models.dense_heads import FSAFHead + + +def test_fsaf_head_loss(): + """Tests anchor head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + + cfg = dict( + reg_decoded_bbox=True, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=1, + scales_per_octave=1, + ratios=[1.0], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict(type='TBLRBBoxCoder', normalizer=4.0), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0, + reduction='none'), + loss_bbox=dict( + type='IoULoss', eps=1e-6, loss_weight=1.0, reduction='none')) + + train_cfg = mmcv.Config( + dict( + assigner=dict( + type='CenterRegionAssigner', + pos_scale=0.2, + neg_scale=0.2, + min_pos_iof=0.01), + allowed_border=-1, + pos_weight=-1, + debug=False)) + head = FSAFHead(num_classes=4, in_channels=1, train_cfg=train_cfg, **cfg) + if torch.cuda.is_available(): + head.cuda() + # FSAF head expects a multiple levels of features per image + feat = [ + torch.rand(1, 1, s // (2**(i + 2)), s // (2**(i + 2))).cuda() + for i in range(len(head.anchor_generator.strides)) + ] + cls_scores, bbox_preds = head.forward(feat) + gt_bboxes_ignore = None + + # When truth is non-empty then both cls and box loss should be nonzero + # for random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]).cuda(), + ] + gt_labels = [torch.LongTensor([2]).cuda()] + one_gt_losses = head.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + onegt_cls_loss = sum(one_gt_losses['loss_cls']) + onegt_box_loss = sum(one_gt_losses['loss_bbox']) + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_loss.item() > 0, 'box loss should be non-zero' + + # Test that empty ground truth encourages the network to predict bkg + gt_bboxes = [torch.empty((0, 4)).cuda()] + gt_labels = [torch.LongTensor([]).cuda()] + + empty_gt_losses = head.loss(cls_scores, bbox_preds, gt_bboxes, + gt_labels, img_metas, gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there + # should be no box loss. + empty_cls_loss = sum(empty_gt_losses['loss_cls']) + empty_box_loss = sum(empty_gt_losses['loss_bbox']) + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_ga_anchor_head.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_ga_anchor_head.py new file mode 100644 index 0000000000000000000000000000000000000000..4da346d35ea68e3100e0fd6a01183cf372a5dc21 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_ga_anchor_head.py @@ -0,0 +1,90 @@ +import mmcv +import torch + +from mmdet.models.dense_heads import GuidedAnchorHead + + +def test_ga_anchor_head_loss(): + """Tests anchor head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + + cfg = mmcv.Config( + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + ga_assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + ignore_iof_thr=-1), + ga_sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + allowed_border=-1, + center_ratio=0.2, + ignore_ratio=0.5, + pos_weight=-1, + debug=False)) + head = GuidedAnchorHead(num_classes=4, in_channels=4, train_cfg=cfg) + + # Anchor head expects a multiple levels of features per image + if torch.cuda.is_available(): + head.cuda() + feat = [ + torch.rand(1, 4, s // (2**(i + 2)), s // (2**(i + 2))).cuda() + for i in range(len(head.approx_anchor_generator.base_anchors)) + ] + cls_scores, bbox_preds, shape_preds, loc_preds = head.forward(feat) + + # Test that empty ground truth encourages the network to predict + # background + gt_bboxes = [torch.empty((0, 4)).cuda()] + gt_labels = [torch.LongTensor([]).cuda()] + + gt_bboxes_ignore = None + + empty_gt_losses = head.loss(cls_scores, bbox_preds, shape_preds, + loc_preds, gt_bboxes, gt_labels, img_metas, + gt_bboxes_ignore) + + # When there is no truth, the cls loss should be nonzero but there + # should be no box loss. + empty_cls_loss = sum(empty_gt_losses['loss_cls']) + empty_box_loss = sum(empty_gt_losses['loss_bbox']) + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero + # for random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]).cuda(), + ] + gt_labels = [torch.LongTensor([2]).cuda()] + one_gt_losses = head.loss(cls_scores, bbox_preds, shape_preds, + loc_preds, gt_bboxes, gt_labels, img_metas, + gt_bboxes_ignore) + onegt_cls_loss = sum(one_gt_losses['loss_cls']) + onegt_box_loss = sum(one_gt_losses['loss_bbox']) + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_loss.item() > 0, 'box loss should be non-zero' diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_gfl_head.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_gfl_head.py new file mode 100644 index 0000000000000000000000000000000000000000..b035a579495820345bb61f20e77e4eb696f9eb67 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_gfl_head.py @@ -0,0 +1,73 @@ +import mmcv +import torch + +from mmdet.models.dense_heads import GFLHead + + +def test_gfl_head_loss(): + """Tests gfl head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + train_cfg = mmcv.Config( + dict( + assigner=dict(type='ATSSAssigner', topk=9), + allowed_border=-1, + pos_weight=-1, + debug=False)) + self = GFLHead( + num_classes=4, + in_channels=1, + train_cfg=train_cfg, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128]), + loss_cls=dict( + type='QualityFocalLoss', + use_sigmoid=True, + beta=2.0, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=2.0)) + feat = [ + torch.rand(1, 1, s // feat_size, s // feat_size) + for feat_size in [4, 8, 16, 32, 64] + ] + cls_scores, bbox_preds = self.forward(feat) + + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + gt_bboxes_ignore = None + empty_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there should + # be no box loss. + empty_cls_loss = sum(empty_gt_losses['loss_cls']) + empty_box_loss = sum(empty_gt_losses['loss_bbox']) + empty_dfl_loss = sum(empty_gt_losses['loss_dfl']) + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + assert empty_dfl_loss.item() == 0, ( + 'there should be no dfl loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero for + # random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + one_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + onegt_cls_loss = sum(one_gt_losses['loss_cls']) + onegt_box_loss = sum(one_gt_losses['loss_bbox']) + onegt_dfl_loss = sum(one_gt_losses['loss_dfl']) + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_loss.item() > 0, 'box loss should be non-zero' + assert onegt_dfl_loss.item() > 0, 'dfl loss should be non-zero' diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_ld_head.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_ld_head.py new file mode 100644 index 0000000000000000000000000000000000000000..6a7541adc762a9e0bbb367ceb56bc74fb2ae8789 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_ld_head.py @@ -0,0 +1,120 @@ +import mmcv +import torch + +from mmdet.models.dense_heads import GFLHead, LDHead + + +def test_ld_head_loss(): + """Tests vfnet head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + train_cfg = mmcv.Config( + dict( + assigner=dict(type='ATSSAssigner', topk=9, ignore_iof_thr=0.1), + allowed_border=-1, + pos_weight=-1, + debug=False)) + + self = LDHead( + num_classes=4, + in_channels=1, + train_cfg=train_cfg, + loss_ld=dict(type='KnowledgeDistillationKLDivLoss', loss_weight=1.0), + loss_cls=dict( + type='QualityFocalLoss', + use_sigmoid=True, + beta=2.0, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=2.0), + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128])) + + teacher_model = GFLHead( + num_classes=4, + in_channels=1, + train_cfg=train_cfg, + loss_cls=dict( + type='QualityFocalLoss', + use_sigmoid=True, + beta=2.0, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=2.0), + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + octave_base_scale=8, + scales_per_octave=1, + strides=[8, 16, 32, 64, 128])) + + feat = [ + torch.rand(1, 1, s // feat_size, s // feat_size) + for feat_size in [4, 8, 16, 32, 64] + ] + cls_scores, bbox_preds = self.forward(feat) + rand_soft_target = teacher_model.forward(feat)[1] + + # Test that empty ground truth encourages the network to predict + # background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + gt_bboxes_ignore = None + + empty_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + rand_soft_target, img_metas, gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero, ld loss should + # be non-negative but there should be no box loss. + empty_cls_loss = sum(empty_gt_losses['loss_cls']) + empty_box_loss = sum(empty_gt_losses['loss_bbox']) + empty_ld_loss = sum(empty_gt_losses['loss_ld']) + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + assert empty_ld_loss.item() >= 0, 'ld loss should be non-negative' + + # When truth is non-empty then both cls and box loss should be nonzero + # for random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + one_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + rand_soft_target, img_metas, gt_bboxes_ignore) + onegt_cls_loss = sum(one_gt_losses['loss_cls']) + onegt_box_loss = sum(one_gt_losses['loss_bbox']) + + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_loss.item() > 0, 'box loss should be non-zero' + + gt_bboxes_ignore = gt_bboxes + + # When truth is non-empty but ignored then the cls loss should be nonzero, + # but there should be no box loss. + ignore_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + rand_soft_target, img_metas, gt_bboxes_ignore) + ignore_cls_loss = sum(ignore_gt_losses['loss_cls']) + ignore_box_loss = sum(ignore_gt_losses['loss_bbox']) + + assert ignore_cls_loss.item() > 0, 'cls loss should be non-zero' + assert ignore_box_loss.item() == 0, 'gt bbox ignored loss should be zero' + + # When truth is non-empty and not ignored then both cls and box loss should + # be nonzero for random inputs + gt_bboxes_ignore = [torch.randn(1, 4)] + + not_ignore_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, + gt_labels, rand_soft_target, img_metas, + gt_bboxes_ignore) + not_ignore_cls_loss = sum(not_ignore_gt_losses['loss_cls']) + not_ignore_box_loss = sum(not_ignore_gt_losses['loss_bbox']) + + assert not_ignore_cls_loss.item() > 0, 'cls loss should be non-zero' + assert not_ignore_box_loss.item( + ) > 0, 'gt bbox not ignored loss should be non-zero' diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_paa_head.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_paa_head.py new file mode 100644 index 0000000000000000000000000000000000000000..224e9491e007610530f2555cf55fc134a7e5e5d7 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_paa_head.py @@ -0,0 +1,122 @@ +import mmcv +import numpy as np +import torch + +from mmdet.models.dense_heads import PAAHead, paa_head +from mmdet.models.dense_heads.paa_head import levels_to_images + + +def test_paa_head_loss(): + """Tests paa head loss when truth is empty and non-empty.""" + + class mock_skm: + + def GaussianMixture(self, *args, **kwargs): + return self + + def fit(self, loss): + pass + + def predict(self, loss): + components = np.zeros_like(loss, dtype=np.long) + return components.reshape(-1) + + def score_samples(self, loss): + scores = np.random.random(len(loss)) + return scores + + paa_head.skm = mock_skm() + + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + train_cfg = mmcv.Config( + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.1, + neg_iou_thr=0.1, + min_pos_iou=0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False)) + # since Focal Loss is not supported on CPU + self = PAAHead( + num_classes=4, + in_channels=1, + train_cfg=train_cfg, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.3), + loss_centerness=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=0.5)) + feat = [ + torch.rand(1, 1, s // feat_size, s // feat_size) + for feat_size in [4, 8, 16, 32, 64] + ] + self.init_weights() + cls_scores, bbox_preds, iou_preds = self(feat) + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + gt_bboxes_ignore = None + empty_gt_losses = self.loss(cls_scores, bbox_preds, iou_preds, gt_bboxes, + gt_labels, img_metas, gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there should + # be no box loss. + empty_cls_loss = empty_gt_losses['loss_cls'] + empty_box_loss = empty_gt_losses['loss_bbox'] + empty_iou_loss = empty_gt_losses['loss_iou'] + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + assert empty_iou_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero for + # random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + one_gt_losses = self.loss(cls_scores, bbox_preds, iou_preds, gt_bboxes, + gt_labels, img_metas, gt_bboxes_ignore) + onegt_cls_loss = one_gt_losses['loss_cls'] + onegt_box_loss = one_gt_losses['loss_bbox'] + onegt_iou_loss = one_gt_losses['loss_iou'] + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_loss.item() > 0, 'box loss should be non-zero' + assert onegt_iou_loss.item() > 0, 'box loss should be non-zero' + n, c, h, w = 10, 4, 20, 20 + mlvl_tensor = [torch.ones(n, c, h, w) for i in range(5)] + results = levels_to_images(mlvl_tensor) + assert len(results) == n + assert results[0].size() == (h * w * 5, c) + assert self.with_score_voting + cls_scores = [torch.ones(2, 4, 5, 5)] + bbox_preds = [torch.ones(2, 4, 5, 5)] + iou_preds = [torch.ones(2, 1, 5, 5)] + mlvl_anchors = [torch.ones(2, 5 * 5, 4)] + img_shape = None + scale_factor = [0.5, 0.5] + cfg = mmcv.Config( + dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.6), + max_per_img=100)) + rescale = False + self._get_bboxes( + cls_scores, + bbox_preds, + iou_preds, + mlvl_anchors, + img_shape, + scale_factor, + cfg, + rescale=rescale) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_pisa_head.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_pisa_head.py new file mode 100644 index 0000000000000000000000000000000000000000..6b1d42db49c498aca59b154b18d59794749643bf --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_pisa_head.py @@ -0,0 +1,244 @@ +import mmcv +import torch + +from mmdet.models.dense_heads import PISARetinaHead, PISASSDHead +from mmdet.models.roi_heads import PISARoIHead + + +def test_pisa_retinanet_head_loss(): + """Tests pisa retinanet head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + + cfg = mmcv.Config( + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='RandomSampler', + num=256, + pos_fraction=0.5, + neg_pos_ub=-1, + add_gt_as_proposals=False), + isr=dict(k=2., bias=0.), + carl=dict(k=1., bias=0.2), + allowed_border=0, + pos_weight=-1, + debug=False)) + self = PISARetinaHead(num_classes=4, in_channels=1, train_cfg=cfg) + + # Anchor head expects a multiple levels of features per image + feat = [ + torch.rand(1, 1, s // (2**(i + 2)), s // (2**(i + 2))) + for i in range(len(self.anchor_generator.strides)) + ] + cls_scores, bbox_preds = self.forward(feat) + + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + + gt_bboxes_ignore = None + empty_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there should + # be no box loss. + empty_cls_loss = empty_gt_losses['loss_cls'].sum() + empty_box_loss = empty_gt_losses['loss_bbox'].sum() + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero for + # random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + one_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + onegt_cls_loss = one_gt_losses['loss_cls'].sum() + onegt_box_loss = one_gt_losses['loss_bbox'].sum() + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_loss.item() > 0, 'box loss should be non-zero' + + +def test_pisa_ssd_head_loss(): + """Tests pisa ssd head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + + cfg = mmcv.Config( + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0., + ignore_iof_thr=-1, + gt_max_assign_all=False), + isr=dict(k=2., bias=0.), + carl=dict(k=1., bias=0.2), + smoothl1_beta=1., + allowed_border=-1, + pos_weight=-1, + neg_pos_ratio=3, + debug=False)) + ssd_anchor_generator = dict( + type='SSDAnchorGenerator', + scale_major=False, + input_size=300, + strides=[1], + ratios=([2], ), + basesize_ratio_range=(0.15, 0.9)) + self = PISASSDHead( + num_classes=4, + in_channels=(1, ), + train_cfg=cfg, + anchor_generator=ssd_anchor_generator) + + # Anchor head expects a multiple levels of features per image + feat = [ + torch.rand(1, 1, s // (2**(i + 2)), s // (2**(i + 2))) + for i in range(len(self.anchor_generator.strides)) + ] + cls_scores, bbox_preds = self.forward(feat) + + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + + gt_bboxes_ignore = None + empty_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there should + # be no box loss. + empty_cls_loss = sum(empty_gt_losses['loss_cls']) + empty_box_loss = sum(empty_gt_losses['loss_bbox']) + # SSD is special, #pos:#neg = 1: 3, so empth gt will also lead loss cls = 0 + assert empty_cls_loss.item() == 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero for + # random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + one_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + onegt_cls_loss = sum(one_gt_losses['loss_cls']) + onegt_box_loss = sum(one_gt_losses['loss_bbox']) + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_loss.item() > 0, 'box loss should be non-zero' + + +def test_pisa_roi_head_loss(): + """Tests pisa roi head loss when truth is empty and non-empty.""" + train_cfg = mmcv.Config( + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.7, + neg_iou_thr=0.3, + min_pos_iou=0.3, + match_low_quality=True, + ignore_iof_thr=-1), + sampler=dict( + type='ScoreHLRSampler', + num=4, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True, + k=0.5, + bias=0.), + isr=dict(k=2., bias=0.), + carl=dict(k=1., bias=0.2), + allowed_border=0, + pos_weight=-1, + debug=False)) + + bbox_roi_extractor = dict( + type='SingleRoIExtractor', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=0), + out_channels=1, + featmap_strides=[1]) + + bbox_head = dict( + type='Shared2FCBBoxHead', + in_channels=1, + fc_out_channels=2, + roi_feat_size=7, + num_classes=4, + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[0., 0., 0., 0.], + target_stds=[0.1, 0.1, 0.2, 0.2]), + reg_class_agnostic=False, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0), + loss_bbox=dict(type='L1Loss', loss_weight=1.0)) + + self = PISARoIHead(bbox_roi_extractor, bbox_head, train_cfg=train_cfg) + + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + + # Anchor head expects a multiple levels of features per image + feat = [ + torch.rand(1, 1, s // (2**(i + 2)), s // (2**(i + 2))) + for i in range(1) + ] + + proposal_list = [ + torch.Tensor([[22.6667, 22.8757, 238.6326, 151.8874], [0, 3, 5, 7]]) + ] + + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + gt_bboxes_ignore = None + + empty_gt_losses = self.forward_train(feat, img_metas, proposal_list, + gt_bboxes, gt_labels, + gt_bboxes_ignore) + + # When there is no truth, the cls loss should be nonzero but there should + # be no box loss. + empty_cls_loss = empty_gt_losses['loss_cls'].sum() + empty_box_loss = empty_gt_losses['loss_bbox'].sum() + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero for + # random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + + one_gt_losses = self.forward_train(feat, img_metas, proposal_list, + gt_bboxes, gt_labels, gt_bboxes_ignore) + onegt_cls_loss = one_gt_losses['loss_cls'].sum() + onegt_box_loss = one_gt_losses['loss_bbox'].sum() + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_loss.item() > 0, 'box loss should be non-zero' diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_sabl_retina_head.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_sabl_retina_head.py new file mode 100644 index 0000000000000000000000000000000000000000..c958be6f67bc280ae52f11beef9ba0d2077cf624 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_sabl_retina_head.py @@ -0,0 +1,75 @@ +import mmcv +import torch + +from mmdet.models.dense_heads import SABLRetinaHead + + +def test_sabl_retina_head_loss(): + """Tests anchor head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + + cfg = mmcv.Config( + dict( + assigner=dict( + type='ApproxMaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0.0, + ignore_iof_thr=-1), + allowed_border=-1, + pos_weight=-1, + debug=False)) + head = SABLRetinaHead( + num_classes=4, + in_channels=3, + feat_channels=10, + loss_cls=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0), + train_cfg=cfg) + if torch.cuda.is_available(): + head.cuda() + # Anchor head expects a multiple levels of features per image + feat = [ + torch.rand(1, 3, s // (2**(i + 2)), s // (2**(i + 2))).cuda() + for i in range(len(head.approx_anchor_generator.base_anchors)) + ] + cls_scores, bbox_preds = head.forward(feat) + + # Test that empty ground truth encourages the network + # to predict background + gt_bboxes = [torch.empty((0, 4)).cuda()] + gt_labels = [torch.LongTensor([]).cuda()] + + gt_bboxes_ignore = None + empty_gt_losses = head.loss(cls_scores, bbox_preds, gt_bboxes, + gt_labels, img_metas, gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there + # should be no box loss. + empty_cls_loss = sum(empty_gt_losses['loss_cls']) + empty_box_cls_loss = sum(empty_gt_losses['loss_bbox_cls']) + empty_box_reg_loss = sum(empty_gt_losses['loss_bbox_reg']) + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_cls_loss.item() == 0, ( + 'there should be no box cls loss when there are no true boxes') + assert empty_box_reg_loss.item() == 0, ( + 'there should be no box reg loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should + # be nonzero for random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]).cuda(), + ] + gt_labels = [torch.LongTensor([2]).cuda()] + one_gt_losses = head.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + onegt_cls_loss = sum(one_gt_losses['loss_cls']) + onegt_box_cls_loss = sum(one_gt_losses['loss_bbox_cls']) + onegt_box_reg_loss = sum(one_gt_losses['loss_bbox_reg']) + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_cls_loss.item() > 0, 'box loss cls should be non-zero' + assert onegt_box_reg_loss.item() > 0, 'box loss reg should be non-zero' diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_vfnet_head.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_vfnet_head.py new file mode 100644 index 0000000000000000000000000000000000000000..4fd43dd94fcf447c1b95bce96cd70c9976510a9f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_vfnet_head.py @@ -0,0 +1,62 @@ +import mmcv +import torch + +from mmdet.models.dense_heads import VFNetHead + + +def test_vfnet_head_loss(): + """Tests vfnet head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + train_cfg = mmcv.Config( + dict( + assigner=dict(type='ATSSAssigner', topk=9), + allowed_border=-1, + pos_weight=-1, + debug=False)) + # since Focal Loss is not supported on CPU + self = VFNetHead( + num_classes=4, + in_channels=1, + train_cfg=train_cfg, + loss_cls=dict(type='VarifocalLoss', use_sigmoid=True, loss_weight=1.0)) + if torch.cuda.is_available(): + self.cuda() + feat = [ + torch.rand(1, 1, s // feat_size, s // feat_size).cuda() + for feat_size in [4, 8, 16, 32, 64] + ] + cls_scores, bbox_preds, bbox_preds_refine = self.forward(feat) + # Test that empty ground truth encourages the network to predict + # background + gt_bboxes = [torch.empty((0, 4)).cuda()] + gt_labels = [torch.LongTensor([]).cuda()] + gt_bboxes_ignore = None + empty_gt_losses = self.loss(cls_scores, bbox_preds, bbox_preds_refine, + gt_bboxes, gt_labels, img_metas, + gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there + # should be no box loss. + empty_cls_loss = empty_gt_losses['loss_cls'] + empty_box_loss = empty_gt_losses['loss_bbox'] + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero + # for random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]).cuda(), + ] + gt_labels = [torch.LongTensor([2]).cuda()] + one_gt_losses = self.loss(cls_scores, bbox_preds, bbox_preds_refine, + gt_bboxes, gt_labels, img_metas, + gt_bboxes_ignore) + onegt_cls_loss = one_gt_losses['loss_cls'] + onegt_box_loss = one_gt_losses['loss_bbox'] + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_loss.item() > 0, 'box loss should be non-zero' diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_yolact_head.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_yolact_head.py new file mode 100644 index 0000000000000000000000000000000000000000..aff57c4a97405110b4a94ce515f34a52b867eeb8 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_yolact_head.py @@ -0,0 +1,136 @@ +import mmcv +import torch + +from mmdet.models.dense_heads import YOLACTHead, YOLACTProtonet, YOLACTSegmHead + + +def test_yolact_head_loss(): + """Tests yolact head losses when truth is empty and non-empty.""" + s = 550 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + train_cfg = mmcv.Config( + dict( + assigner=dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.4, + min_pos_iou=0., + ignore_iof_thr=-1, + gt_max_assign_all=False), + smoothl1_beta=1., + allowed_border=-1, + pos_weight=-1, + neg_pos_ratio=3, + debug=False, + min_gt_box_wh=[4.0, 4.0])) + bbox_head = YOLACTHead( + num_classes=80, + in_channels=256, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=3, + scales_per_octave=1, + base_sizes=[8, 16, 32, 64, 128], + ratios=[0.5, 1.0, 2.0], + strides=[550.0 / x for x in [69, 35, 18, 9, 5]], + centers=[(550 * 0.5 / x, 550 * 0.5 / x) + for x in [69, 35, 18, 9, 5]]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2]), + loss_cls=dict( + type='CrossEntropyLoss', + use_sigmoid=False, + reduction='none', + loss_weight=1.0), + loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.5), + num_head_convs=1, + num_protos=32, + use_ohem=True, + train_cfg=train_cfg) + segm_head = YOLACTSegmHead( + in_channels=256, + num_classes=80, + loss_segm=dict( + type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)) + mask_head = YOLACTProtonet( + num_classes=80, + in_channels=256, + num_protos=32, + max_masks_to_train=100, + loss_mask_weight=6.125) + feat = [ + torch.rand(1, 256, feat_size, feat_size) + for feat_size in [69, 35, 18, 9, 5] + ] + cls_score, bbox_pred, coeff_pred = bbox_head.forward(feat) + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + gt_masks = [torch.empty((0, 550, 550))] + gt_bboxes_ignore = None + empty_gt_losses, sampling_results = bbox_head.loss( + cls_score, + bbox_pred, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there should + # be no box loss. + empty_cls_loss = sum(empty_gt_losses['loss_cls']) + empty_box_loss = sum(empty_gt_losses['loss_bbox']) + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + # Test segm head and mask head + segm_head_outs = segm_head(feat[0]) + empty_segm_loss = segm_head.loss(segm_head_outs, gt_masks, gt_labels) + mask_pred = mask_head(feat[0], coeff_pred, gt_bboxes, img_metas, + sampling_results) + empty_mask_loss = mask_head.loss(mask_pred, gt_masks, gt_bboxes, img_metas, + sampling_results) + # When there is no truth, the segm and mask loss should be zero. + empty_segm_loss = sum(empty_segm_loss['loss_segm']) + empty_mask_loss = sum(empty_mask_loss['loss_mask']) + assert empty_segm_loss.item() == 0, ( + 'there should be no segm loss when there are no true boxes') + assert empty_mask_loss == 0, ( + 'there should be no mask loss when there are no true boxes') + + # When truth is non-empty then cls, box, mask, segm loss should be + # nonzero for random inputs. + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + gt_masks = [(torch.rand((1, 550, 550)) > 0.5).float()] + + one_gt_losses, sampling_results = bbox_head.loss( + cls_score, + bbox_pred, + gt_bboxes, + gt_labels, + img_metas, + gt_bboxes_ignore=gt_bboxes_ignore) + one_gt_cls_loss = sum(one_gt_losses['loss_cls']) + one_gt_box_loss = sum(one_gt_losses['loss_bbox']) + assert one_gt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert one_gt_box_loss.item() > 0, 'box loss should be non-zero' + + one_gt_segm_loss = segm_head.loss(segm_head_outs, gt_masks, gt_labels) + mask_pred = mask_head(feat[0], coeff_pred, gt_bboxes, img_metas, + sampling_results) + one_gt_mask_loss = mask_head.loss(mask_pred, gt_masks, gt_bboxes, + img_metas, sampling_results) + one_gt_segm_loss = sum(one_gt_segm_loss['loss_segm']) + one_gt_mask_loss = sum(one_gt_mask_loss['loss_mask']) + assert one_gt_segm_loss.item() > 0, 'segm loss should be non-zero' + assert one_gt_mask_loss.item() > 0, 'mask loss should be non-zero' diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_yolof_head.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_yolof_head.py new file mode 100644 index 0000000000000000000000000000000000000000..ef21b66cf3f81934f27e6d8f27997ebf4b950c48 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_dense_heads/test_yolof_head.py @@ -0,0 +1,75 @@ +import mmcv +import torch + +from mmdet.models.dense_heads import YOLOFHead + + +def test_yolof_head_loss(): + """Tests yolof head loss when truth is empty and non-empty.""" + s = 256 + img_metas = [{ + 'img_shape': (s, s, 3), + 'scale_factor': 1, + 'pad_shape': (s, s, 3) + }] + train_cfg = mmcv.Config( + dict( + assigner=dict( + type='UniformAssigner', + pos_ignore_thr=0.15, + neg_ignore_thr=0.7), + allowed_border=-1, + pos_weight=-1, + debug=False)) + self = YOLOFHead( + num_classes=4, + in_channels=1, + reg_decoded_bbox=True, + train_cfg=train_cfg, + anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[1, 2, 4, 8, 16], + strides=[32]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1., 1., 1., 1.], + add_ctr_clamp=True, + ctr_clamp=32), + loss_cls=dict( + type='FocalLoss', + use_sigmoid=True, + gamma=2.0, + alpha=0.25, + loss_weight=1.0), + loss_bbox=dict(type='GIoULoss', loss_weight=1.0)) + feat = [torch.rand(1, 1, s // 32, s // 32)] + cls_scores, bbox_preds = self.forward(feat) + + # Test that empty ground truth encourages the network to predict background + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + gt_bboxes_ignore = None + empty_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + # When there is no truth, the cls loss should be nonzero but there should + # be no box loss. + empty_cls_loss = empty_gt_losses['loss_cls'] + empty_box_loss = empty_gt_losses['loss_bbox'] + assert empty_cls_loss.item() > 0, 'cls loss should be non-zero' + assert empty_box_loss.item() == 0, ( + 'there should be no box loss when there are no true boxes') + + # When truth is non-empty then both cls and box loss should be nonzero for + # random inputs + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + one_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels, + img_metas, gt_bboxes_ignore) + onegt_cls_loss = one_gt_losses['loss_cls'] + onegt_box_loss = one_gt_losses['loss_bbox'] + assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero' + assert onegt_box_loss.item() > 0, 'box loss should be non-zero' diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_forward.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_forward.py new file mode 100644 index 0000000000000000000000000000000000000000..82ba87e2e1149fa58bd16fa2139150148dcadada --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_forward.py @@ -0,0 +1,622 @@ +"""pytest tests/test_forward.py.""" +import copy +from os.path import dirname, exists, join + +import numpy as np +import pytest +import torch + + +def _get_config_directory(): + """Find the predefined detector config directory.""" + try: + # Assume we are running in the source mmdetection repo + repo_dpath = dirname(dirname(dirname(__file__))) + except NameError: + # For IPython development when this __file__ is not defined + import mmdet + repo_dpath = dirname(dirname(mmdet.__file__)) + config_dpath = join(repo_dpath, 'configs') + if not exists(config_dpath): + raise Exception('Cannot find config path') + return config_dpath + + +def _get_config_module(fname): + """Load a configuration as a python module.""" + from mmcv import Config + config_dpath = _get_config_directory() + config_fpath = join(config_dpath, fname) + config_mod = Config.fromfile(config_fpath) + return config_mod + + +def _get_detector_cfg(fname): + """Grab configs necessary to create a detector. + + These are deep copied to allow for safe modification of parameters without + influencing other tests. + """ + config = _get_config_module(fname) + model = copy.deepcopy(config.model) + return model + + +def test_sparse_rcnn_forward(): + config_path = 'sparse_rcnn/sparse_rcnn_r50_fpn_1x_coco.py' + model = _get_detector_cfg(config_path) + model['pretrained'] = None + from mmdet.models import build_detector + detector = build_detector(model) + detector.init_weights() + input_shape = (1, 3, 550, 550) + mm_inputs = _demo_mm_inputs(input_shape, num_items=[5]) + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + # Test forward train with non-empty truth batch + detector = detector + imgs = imgs + detector.train() + gt_bboxes = mm_inputs['gt_bboxes'] + gt_bboxes = [item for item in gt_bboxes] + gt_labels = mm_inputs['gt_labels'] + gt_labels = [item for item in gt_labels] + losses = detector.forward( + imgs, + img_metas, + gt_bboxes=gt_bboxes, + gt_labels=gt_labels, + return_loss=True) + assert isinstance(losses, dict) + loss, _ = detector._parse_losses(losses) + assert float(loss.item()) > 0 + detector.forward_dummy(imgs) + + # Test forward train with an empty truth batch + mm_inputs = _demo_mm_inputs(input_shape, num_items=[0]) + imgs = mm_inputs.pop('imgs') + imgs = imgs + img_metas = mm_inputs.pop('img_metas') + gt_bboxes = mm_inputs['gt_bboxes'] + gt_bboxes = [item for item in gt_bboxes] + gt_labels = mm_inputs['gt_labels'] + gt_labels = [item for item in gt_labels] + losses = detector.forward( + imgs, + img_metas, + gt_bboxes=gt_bboxes, + gt_labels=gt_labels, + return_loss=True) + assert isinstance(losses, dict) + loss, _ = detector._parse_losses(losses) + assert float(loss.item()) > 0 + + # Test forward test + detector.eval() + with torch.no_grad(): + img_list = [g[None, :] for g in imgs] + batch_results = [] + for one_img, one_meta in zip(img_list, img_metas): + result = detector.forward([one_img], [[one_meta]], + rescale=True, + return_loss=False) + batch_results.append(result) + + +def test_rpn_forward(): + model = _get_detector_cfg('rpn/rpn_r50_fpn_1x_coco.py') + model['pretrained'] = None + + from mmdet.models import build_detector + detector = build_detector(model) + + input_shape = (1, 3, 224, 224) + mm_inputs = _demo_mm_inputs(input_shape) + + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + + # Test forward train + gt_bboxes = mm_inputs['gt_bboxes'] + losses = detector.forward( + imgs, img_metas, gt_bboxes=gt_bboxes, return_loss=True) + assert isinstance(losses, dict) + + # Test forward test + with torch.no_grad(): + img_list = [g[None, :] for g in imgs] + batch_results = [] + for one_img, one_meta in zip(img_list, img_metas): + result = detector.forward([one_img], [[one_meta]], + return_loss=False) + batch_results.append(result) + + +@pytest.mark.parametrize( + 'cfg_file', + [ + 'retinanet/retinanet_r50_fpn_1x_coco.py', + 'guided_anchoring/ga_retinanet_r50_fpn_1x_coco.py', + 'ghm/retinanet_ghm_r50_fpn_1x_coco.py', + 'fcos/fcos_center_r50_caffe_fpn_gn-head_1x_coco.py', + 'foveabox/fovea_align_r50_fpn_gn-head_4x4_2x_coco.py', + # 'free_anchor/retinanet_free_anchor_r50_fpn_1x_coco.py', + # 'atss/atss_r50_fpn_1x_coco.py', # not ready for topk + 'reppoints/reppoints_moment_r50_fpn_1x_coco.py', + 'yolo/yolov3_d53_mstrain-608_273e_coco.py' + ]) +def test_single_stage_forward_gpu(cfg_file): + if not torch.cuda.is_available(): + import pytest + pytest.skip('test requires GPU and torch+cuda') + + model = _get_detector_cfg(cfg_file) + model['pretrained'] = None + + from mmdet.models import build_detector + detector = build_detector(model) + + input_shape = (2, 3, 224, 224) + mm_inputs = _demo_mm_inputs(input_shape) + + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + + detector = detector.cuda() + imgs = imgs.cuda() + # Test forward train + gt_bboxes = [b.cuda() for b in mm_inputs['gt_bboxes']] + gt_labels = [g.cuda() for g in mm_inputs['gt_labels']] + losses = detector.forward( + imgs, + img_metas, + gt_bboxes=gt_bboxes, + gt_labels=gt_labels, + return_loss=True) + assert isinstance(losses, dict) + + # Test forward test + with torch.no_grad(): + img_list = [g[None, :] for g in imgs] + batch_results = [] + for one_img, one_meta in zip(img_list, img_metas): + result = detector.forward([one_img], [[one_meta]], + return_loss=False) + batch_results.append(result) + + +def test_faster_rcnn_ohem_forward(): + model = _get_detector_cfg( + 'faster_rcnn/faster_rcnn_r50_fpn_ohem_1x_coco.py') + model['pretrained'] = None + + from mmdet.models import build_detector + detector = build_detector(model) + + input_shape = (1, 3, 256, 256) + + # Test forward train with a non-empty truth batch + mm_inputs = _demo_mm_inputs(input_shape, num_items=[10]) + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + gt_bboxes = mm_inputs['gt_bboxes'] + gt_labels = mm_inputs['gt_labels'] + losses = detector.forward( + imgs, + img_metas, + gt_bboxes=gt_bboxes, + gt_labels=gt_labels, + return_loss=True) + assert isinstance(losses, dict) + loss, _ = detector._parse_losses(losses) + assert float(loss.item()) > 0 + + # Test forward train with an empty truth batch + mm_inputs = _demo_mm_inputs(input_shape, num_items=[0]) + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + gt_bboxes = mm_inputs['gt_bboxes'] + gt_labels = mm_inputs['gt_labels'] + losses = detector.forward( + imgs, + img_metas, + gt_bboxes=gt_bboxes, + gt_labels=gt_labels, + return_loss=True) + assert isinstance(losses, dict) + loss, _ = detector._parse_losses(losses) + assert float(loss.item()) > 0 + + +@pytest.mark.parametrize( + 'cfg_file', + [ + 'cascade_rcnn/cascade_mask_rcnn_r50_fpn_1x_coco.py', + 'mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py', + 'grid_rcnn/grid_rcnn_r50_fpn_gn-head_2x_coco.py', + 'ms_rcnn/ms_rcnn_r50_fpn_1x_coco.py', + 'htc/htc_r50_fpn_1x_coco.py', + 'scnet/scnet_r50_fpn_20e_coco.py', + 'seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py' # noqa: E501 + ]) +def test_two_stage_forward(cfg_file): + models_with_semantic = [ + 'htc/htc_r50_fpn_1x_coco.py', + 'scnet/scnet_r50_fpn_20e_coco.py', + ] + if cfg_file in models_with_semantic: + with_semantic = True + else: + with_semantic = False + + model = _get_detector_cfg(cfg_file) + model['pretrained'] = None + + # Save cost + if cfg_file in [ + 'seesaw_loss/mask_rcnn_r50_fpn_random_seesaw_loss_normed_mask_mstrain_2x_lvis_v1.py' # noqa: E501 + ]: + model.roi_head.bbox_head.num_classes = 80 + model.roi_head.bbox_head.loss_cls.num_classes = 80 + model.roi_head.mask_head.num_classes = 80 + model.test_cfg.rcnn.score_thr = 0.05 + model.test_cfg.rcnn.max_per_img = 100 + + from mmdet.models import build_detector + detector = build_detector(model) + + input_shape = (1, 3, 256, 256) + + # Test forward train with a non-empty truth batch + mm_inputs = _demo_mm_inputs( + input_shape, num_items=[10], with_semantic=with_semantic) + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + losses = detector.forward(imgs, img_metas, return_loss=True, **mm_inputs) + assert isinstance(losses, dict) + loss, _ = detector._parse_losses(losses) + loss.requires_grad_(True) + assert float(loss.item()) > 0 + loss.backward() + + # Test forward train with an empty truth batch + mm_inputs = _demo_mm_inputs( + input_shape, num_items=[0], with_semantic=with_semantic) + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + losses = detector.forward(imgs, img_metas, return_loss=True, **mm_inputs) + assert isinstance(losses, dict) + loss, _ = detector._parse_losses(losses) + loss.requires_grad_(True) + assert float(loss.item()) > 0 + loss.backward() + + # Test forward test + with torch.no_grad(): + img_list = [g[None, :] for g in imgs] + batch_results = [] + for one_img, one_meta in zip(img_list, img_metas): + result = detector.forward([one_img], [[one_meta]], + return_loss=False) + batch_results.append(result) + + +@pytest.mark.parametrize( + 'cfg_file', ['ghm/retinanet_ghm_r50_fpn_1x_coco.py', 'ssd/ssd300_coco.py']) +def test_single_stage_forward_cpu(cfg_file): + model = _get_detector_cfg(cfg_file) + model['pretrained'] = None + + from mmdet.models import build_detector + detector = build_detector(model) + + input_shape = (1, 3, 300, 300) + mm_inputs = _demo_mm_inputs(input_shape) + + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + + # Test forward train + gt_bboxes = mm_inputs['gt_bboxes'] + gt_labels = mm_inputs['gt_labels'] + losses = detector.forward( + imgs, + img_metas, + gt_bboxes=gt_bboxes, + gt_labels=gt_labels, + return_loss=True) + assert isinstance(losses, dict) + + # Test forward test + with torch.no_grad(): + img_list = [g[None, :] for g in imgs] + batch_results = [] + for one_img, one_meta in zip(img_list, img_metas): + result = detector.forward([one_img], [[one_meta]], + return_loss=False) + batch_results.append(result) + + +def _demo_mm_inputs(input_shape=(1, 3, 300, 300), + num_items=None, num_classes=10, + with_semantic=False): # yapf: disable + """Create a superset of inputs needed to run test or train batches. + + Args: + input_shape (tuple): + input batch dimensions + + num_items (None | List[int]): + specifies the number of boxes in each batch item + + num_classes (int): + number of different labels a box might have + """ + from mmdet.core import BitmapMasks + + (N, C, H, W) = input_shape + + rng = np.random.RandomState(0) + + imgs = rng.rand(*input_shape) + + img_metas = [{ + 'img_shape': (H, W, C), + 'ori_shape': (H, W, C), + 'pad_shape': (H, W, C), + 'filename': '.png', + 'scale_factor': np.array([1.1, 1.2, 1.1, 1.2]), + 'flip': False, + 'flip_direction': None, + } for _ in range(N)] + + gt_bboxes = [] + gt_labels = [] + gt_masks = [] + + for batch_idx in range(N): + if num_items is None: + num_boxes = rng.randint(1, 10) + else: + num_boxes = num_items[batch_idx] + + cx, cy, bw, bh = rng.rand(num_boxes, 4).T + + tl_x = ((cx * W) - (W * bw / 2)).clip(0, W) + tl_y = ((cy * H) - (H * bh / 2)).clip(0, H) + br_x = ((cx * W) + (W * bw / 2)).clip(0, W) + br_y = ((cy * H) + (H * bh / 2)).clip(0, H) + + boxes = np.vstack([tl_x, tl_y, br_x, br_y]).T + class_idxs = rng.randint(1, num_classes, size=num_boxes) + + gt_bboxes.append(torch.FloatTensor(boxes)) + gt_labels.append(torch.LongTensor(class_idxs)) + + mask = np.random.randint(0, 2, (len(boxes), H, W), dtype=np.uint8) + gt_masks.append(BitmapMasks(mask, H, W)) + + mm_inputs = { + 'imgs': torch.FloatTensor(imgs).requires_grad_(True), + 'img_metas': img_metas, + 'gt_bboxes': gt_bboxes, + 'gt_labels': gt_labels, + 'gt_bboxes_ignore': None, + 'gt_masks': gt_masks, + } + + if with_semantic: + # assume gt_semantic_seg using scale 1/8 of the img + gt_semantic_seg = np.random.randint( + 0, num_classes, (1, 1, H // 8, W // 8), dtype=np.uint8) + mm_inputs.update( + {'gt_semantic_seg': torch.ByteTensor(gt_semantic_seg)}) + + return mm_inputs + + +def test_yolact_forward(): + model = _get_detector_cfg('yolact/yolact_r50_1x8_coco.py') + model['pretrained'] = None + + from mmdet.models import build_detector + detector = build_detector(model) + + input_shape = (1, 3, 100, 100) + mm_inputs = _demo_mm_inputs(input_shape) + + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + + # Test forward train + detector.train() + gt_bboxes = mm_inputs['gt_bboxes'] + gt_labels = mm_inputs['gt_labels'] + gt_masks = mm_inputs['gt_masks'] + losses = detector.forward( + imgs, + img_metas, + gt_bboxes=gt_bboxes, + gt_labels=gt_labels, + gt_masks=gt_masks, + return_loss=True) + assert isinstance(losses, dict) + + # Test forward test + detector.eval() + with torch.no_grad(): + img_list = [g[None, :] for g in imgs] + batch_results = [] + for one_img, one_meta in zip(img_list, img_metas): + result = detector.forward([one_img], [[one_meta]], + rescale=True, + return_loss=False) + batch_results.append(result) + + +def test_detr_forward(): + model = _get_detector_cfg('detr/detr_r50_8x2_150e_coco.py') + model['pretrained'] = None + + from mmdet.models import build_detector + detector = build_detector(model) + + input_shape = (1, 3, 100, 100) + mm_inputs = _demo_mm_inputs(input_shape) + + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + + # Test forward train with non-empty truth batch + detector.train() + gt_bboxes = mm_inputs['gt_bboxes'] + gt_labels = mm_inputs['gt_labels'] + losses = detector.forward( + imgs, + img_metas, + gt_bboxes=gt_bboxes, + gt_labels=gt_labels, + return_loss=True) + assert isinstance(losses, dict) + loss, _ = detector._parse_losses(losses) + assert float(loss.item()) > 0 + + # Test forward train with an empty truth batch + mm_inputs = _demo_mm_inputs(input_shape, num_items=[0]) + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + gt_bboxes = mm_inputs['gt_bboxes'] + gt_labels = mm_inputs['gt_labels'] + losses = detector.forward( + imgs, + img_metas, + gt_bboxes=gt_bboxes, + gt_labels=gt_labels, + return_loss=True) + assert isinstance(losses, dict) + loss, _ = detector._parse_losses(losses) + assert float(loss.item()) > 0 + + # Test forward test + detector.eval() + with torch.no_grad(): + img_list = [g[None, :] for g in imgs] + batch_results = [] + for one_img, one_meta in zip(img_list, img_metas): + result = detector.forward([one_img], [[one_meta]], + rescale=True, + return_loss=False) + batch_results.append(result) + + +def test_kd_single_stage_forward(): + model = _get_detector_cfg('ld/ld_r18_gflv1_r101_fpn_coco_1x.py') + model['pretrained'] = None + + from mmdet.models import build_detector + detector = build_detector(model) + + input_shape = (1, 3, 100, 100) + mm_inputs = _demo_mm_inputs(input_shape) + + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + + # Test forward train with non-empty truth batch + detector.train() + gt_bboxes = mm_inputs['gt_bboxes'] + gt_labels = mm_inputs['gt_labels'] + losses = detector.forward( + imgs, + img_metas, + gt_bboxes=gt_bboxes, + gt_labels=gt_labels, + return_loss=True) + assert isinstance(losses, dict) + loss, _ = detector._parse_losses(losses) + assert float(loss.item()) > 0 + + # Test forward train with an empty truth batch + mm_inputs = _demo_mm_inputs(input_shape, num_items=[0]) + imgs = mm_inputs.pop('imgs') + img_metas = mm_inputs.pop('img_metas') + gt_bboxes = mm_inputs['gt_bboxes'] + gt_labels = mm_inputs['gt_labels'] + losses = detector.forward( + imgs, + img_metas, + gt_bboxes=gt_bboxes, + gt_labels=gt_labels, + return_loss=True) + assert isinstance(losses, dict) + loss, _ = detector._parse_losses(losses) + assert float(loss.item()) > 0 + + # Test forward test + detector.eval() + with torch.no_grad(): + img_list = [g[None, :] for g in imgs] + batch_results = [] + for one_img, one_meta in zip(img_list, img_metas): + result = detector.forward([one_img], [[one_meta]], + rescale=True, + return_loss=False) + batch_results.append(result) + + +def test_inference_detector(): + from mmdet.apis import inference_detector + from mmdet.models import build_detector + from mmcv import ConfigDict + + # small RetinaNet + num_class = 3 + model_dict = dict( + type='RetinaNet', + pretrained=None, + backbone=dict( + type='ResNet', + depth=18, + num_stages=4, + out_indices=(3, ), + norm_cfg=dict(type='BN', requires_grad=False), + norm_eval=True, + style='pytorch'), + neck=None, + bbox_head=dict( + type='RetinaHead', + num_classes=num_class, + in_channels=512, + stacked_convs=1, + feat_channels=256, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5], + strides=[32]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0]), + ), + test_cfg=dict( + nms_pre=1000, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100)) + + rng = np.random.RandomState(0) + img1 = rng.rand(100, 100, 3) + img2 = rng.rand(100, 100, 3) + + model = build_detector(ConfigDict(model_dict)) + config = _get_config_module('retinanet/retinanet_r50_fpn_1x_coco.py') + model.cfg = config + # test single image + result = inference_detector(model, img1) + assert len(result) == num_class + # test multiple image + result = inference_detector(model, [img1, img2]) + assert len(result) == 2 and len(result[0]) == num_class diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_loss.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_loss.py new file mode 100644 index 0000000000000000000000000000000000000000..3f34865914edcd7862bcb0615d1892fd0c11c1e1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_loss.py @@ -0,0 +1,101 @@ +import pytest +import torch + +from mmdet.models.losses import (BalancedL1Loss, BoundedIoULoss, CIoULoss, + CrossEntropyLoss, DIoULoss, + DistributionFocalLoss, FocalLoss, + GaussianFocalLoss, GIoULoss, IoULoss, L1Loss, + MSELoss, QualityFocalLoss, SmoothL1Loss, + VarifocalLoss) + + +@pytest.mark.parametrize( + 'loss_class', [IoULoss, BoundedIoULoss, GIoULoss, DIoULoss, CIoULoss]) +def test_iou_type_loss_zeros_weight(loss_class): + pred = torch.rand((10, 4)) + target = torch.rand((10, 4)) + weight = torch.zeros(10) + + loss = loss_class()(pred, target, weight) + assert loss == 0. + + +@pytest.mark.parametrize('loss_class', [ + IoULoss, BoundedIoULoss, GIoULoss, DIoULoss, CIoULoss, MSELoss, L1Loss, + SmoothL1Loss, BalancedL1Loss, FocalLoss, QualityFocalLoss, + GaussianFocalLoss, DistributionFocalLoss, VarifocalLoss, CrossEntropyLoss +]) +def test_loss_with_reduction_override(loss_class): + pred = torch.rand((10, 4)) + target = torch.rand((10, 4)) + + with pytest.raises(AssertionError): + # only reduction_override from [None, 'none', 'mean', 'sum'] + # is not allowed + reduction_override = True + loss_class()(pred, target, reduction_override=reduction_override) + + +@pytest.mark.parametrize('loss_class', [ + IoULoss, BoundedIoULoss, GIoULoss, DIoULoss, CIoULoss, MSELoss, L1Loss, + SmoothL1Loss, BalancedL1Loss +]) +def test_regression_losses(loss_class): + pred = torch.rand((10, 4)) + target = torch.rand((10, 4)) + + # Test loss forward + loss = loss_class()(pred, target) + assert isinstance(loss, torch.Tensor) + + # Test loss forward with reduction_override + loss = loss_class()(pred, target, reduction_override='mean') + assert isinstance(loss, torch.Tensor) + + # Test loss forward with avg_factor + loss = loss_class()(pred, target, avg_factor=10) + assert isinstance(loss, torch.Tensor) + + with pytest.raises(ValueError): + # loss can evaluate with avg_factor only if + # reduction is None, 'none' or 'mean'. + reduction_override = 'sum' + loss_class()( + pred, target, avg_factor=10, reduction_override=reduction_override) + + # Test loss forward with avg_factor and reduction + for reduction_override in [None, 'none', 'mean']: + loss_class()( + pred, target, avg_factor=10, reduction_override=reduction_override) + assert isinstance(loss, torch.Tensor) + + +@pytest.mark.parametrize('loss_class', [FocalLoss, CrossEntropyLoss]) +def test_classification_losses(loss_class): + pred = torch.rand((10, 5)) + target = torch.randint(0, 5, (10, )) + + # Test loss forward + loss = loss_class()(pred, target) + assert isinstance(loss, torch.Tensor) + + # Test loss forward with reduction_override + loss = loss_class()(pred, target, reduction_override='mean') + assert isinstance(loss, torch.Tensor) + + # Test loss forward with avg_factor + loss = loss_class()(pred, target, avg_factor=10) + assert isinstance(loss, torch.Tensor) + + with pytest.raises(ValueError): + # loss can evaluate with avg_factor only if + # reduction is None, 'none' or 'mean'. + reduction_override = 'sum' + loss_class()( + pred, target, avg_factor=10, reduction_override=reduction_override) + + # Test loss forward with avg_factor and reduction + for reduction_override in [None, 'none', 'mean']: + loss_class()( + pred, target, avg_factor=10, reduction_override=reduction_override) + assert isinstance(loss, torch.Tensor) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_necks.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_necks.py new file mode 100644 index 0000000000000000000000000000000000000000..45e32563c5dcb8c561e1bd2269a1fe8edc2a83bc --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_necks.py @@ -0,0 +1,375 @@ +import pytest +import torch +from torch.nn.modules.batchnorm import _BatchNorm + +from mmdet.models.necks import (FPN, ChannelMapper, CTResNetNeck, + DilatedEncoder, SSDNeck, YOLOV3Neck) + + +def test_fpn(): + """Tests fpn.""" + s = 64 + in_channels = [8, 16, 32, 64] + feat_sizes = [s // 2**i for i in range(4)] # [64, 32, 16, 8] + out_channels = 8 + # `num_outs` is not equal to len(in_channels) - start_level + with pytest.raises(AssertionError): + FPN(in_channels=in_channels, + out_channels=out_channels, + start_level=1, + num_outs=2) + + # `end_level` is larger than len(in_channels) - 1 + with pytest.raises(AssertionError): + FPN(in_channels=in_channels, + out_channels=out_channels, + start_level=1, + end_level=4, + num_outs=2) + + # `num_outs` is not equal to end_level - start_level + with pytest.raises(AssertionError): + FPN(in_channels=in_channels, + out_channels=out_channels, + start_level=1, + end_level=3, + num_outs=1) + + # Invalid `add_extra_convs` option + with pytest.raises(AssertionError): + FPN(in_channels=in_channels, + out_channels=out_channels, + start_level=1, + add_extra_convs='on_xxx', + num_outs=5) + + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + start_level=1, + add_extra_convs=True, + num_outs=5) + + # FPN expects a multiple levels of features per image + feats = [ + torch.rand(1, in_channels[i], feat_sizes[i], feat_sizes[i]) + for i in range(len(in_channels)) + ] + outs = fpn_model(feats) + assert fpn_model.add_extra_convs == 'on_input' + assert len(outs) == fpn_model.num_outs + for i in range(fpn_model.num_outs): + outs[i].shape[1] == out_channels + outs[i].shape[2] == outs[i].shape[3] == s // (2**i) + + # Tests for fpn with no extra convs (pooling is used instead) + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + start_level=1, + add_extra_convs=False, + num_outs=5) + outs = fpn_model(feats) + assert len(outs) == fpn_model.num_outs + assert not fpn_model.add_extra_convs + for i in range(fpn_model.num_outs): + outs[i].shape[1] == out_channels + outs[i].shape[2] == outs[i].shape[3] == s // (2**i) + + # Tests for fpn with lateral bns + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + start_level=1, + add_extra_convs=True, + no_norm_on_lateral=False, + norm_cfg=dict(type='BN', requires_grad=True), + num_outs=5) + outs = fpn_model(feats) + assert len(outs) == fpn_model.num_outs + assert fpn_model.add_extra_convs == 'on_input' + for i in range(fpn_model.num_outs): + outs[i].shape[1] == out_channels + outs[i].shape[2] == outs[i].shape[3] == s // (2**i) + bn_exist = False + for m in fpn_model.modules(): + if isinstance(m, _BatchNorm): + bn_exist = True + assert bn_exist + + # Bilinear upsample + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + start_level=1, + add_extra_convs=True, + upsample_cfg=dict(mode='bilinear', align_corners=True), + num_outs=5) + fpn_model(feats) + outs = fpn_model(feats) + assert len(outs) == fpn_model.num_outs + assert fpn_model.add_extra_convs == 'on_input' + for i in range(fpn_model.num_outs): + outs[i].shape[1] == out_channels + outs[i].shape[2] == outs[i].shape[3] == s // (2**i) + + # Scale factor instead of fixed upsample size upsample + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + start_level=1, + add_extra_convs=True, + upsample_cfg=dict(scale_factor=2), + num_outs=5) + outs = fpn_model(feats) + assert len(outs) == fpn_model.num_outs + for i in range(fpn_model.num_outs): + outs[i].shape[1] == out_channels + outs[i].shape[2] == outs[i].shape[3] == s // (2**i) + + # Extra convs source is 'inputs' + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + add_extra_convs='on_input', + start_level=1, + num_outs=5) + assert fpn_model.add_extra_convs == 'on_input' + outs = fpn_model(feats) + assert len(outs) == fpn_model.num_outs + for i in range(fpn_model.num_outs): + outs[i].shape[1] == out_channels + outs[i].shape[2] == outs[i].shape[3] == s // (2**i) + + # Extra convs source is 'laterals' + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + add_extra_convs='on_lateral', + start_level=1, + num_outs=5) + assert fpn_model.add_extra_convs == 'on_lateral' + outs = fpn_model(feats) + assert len(outs) == fpn_model.num_outs + for i in range(fpn_model.num_outs): + outs[i].shape[1] == out_channels + outs[i].shape[2] == outs[i].shape[3] == s // (2**i) + + # Extra convs source is 'outputs' + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + add_extra_convs='on_output', + start_level=1, + num_outs=5) + assert fpn_model.add_extra_convs == 'on_output' + outs = fpn_model(feats) + assert len(outs) == fpn_model.num_outs + for i in range(fpn_model.num_outs): + outs[i].shape[1] == out_channels + outs[i].shape[2] == outs[i].shape[3] == s // (2**i) + + +def test_channel_mapper(): + """Tests ChannelMapper.""" + s = 64 + in_channels = [8, 16, 32, 64] + feat_sizes = [s // 2**i for i in range(4)] # [64, 32, 16, 8] + out_channels = 8 + kernel_size = 3 + feats = [ + torch.rand(1, in_channels[i], feat_sizes[i], feat_sizes[i]) + for i in range(len(in_channels)) + ] + + # in_channels must be a list + with pytest.raises(AssertionError): + channel_mapper = ChannelMapper( + in_channels=10, out_channels=out_channels, kernel_size=kernel_size) + # the length of channel_mapper's inputs must be equal to the length of + # in_channels + with pytest.raises(AssertionError): + channel_mapper = ChannelMapper( + in_channels=in_channels[:-1], + out_channels=out_channels, + kernel_size=kernel_size) + channel_mapper(feats) + + channel_mapper = ChannelMapper( + in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size) + + outs = channel_mapper(feats) + assert len(outs) == len(feats) + for i in range(len(feats)): + outs[i].shape[1] == out_channels + outs[i].shape[2] == outs[i].shape[3] == s // (2**i) + + +def test_dilated_encoder(): + in_channels = 16 + out_channels = 32 + out_shape = 34 + dilated_encoder = DilatedEncoder(in_channels, out_channels, 16, 2) + feat = [torch.rand(1, in_channels, 34, 34)] + out_feat = dilated_encoder(feat)[0] + assert out_feat.shape == (1, out_channels, out_shape, out_shape) + + +def test_ct_resnet_neck(): + # num_filters/num_kernels must be a list + with pytest.raises(TypeError): + CTResNetNeck( + in_channel=10, num_deconv_filters=10, num_deconv_kernels=4) + + # num_filters/num_kernels must be same length + with pytest.raises(AssertionError): + CTResNetNeck( + in_channel=10, + num_deconv_filters=(10, 10), + num_deconv_kernels=(4, )) + + in_channels = 16 + num_filters = (8, 8) + num_kernels = (4, 4) + feat = torch.rand(1, 16, 4, 4) + ct_resnet_neck = CTResNetNeck( + in_channel=in_channels, + num_deconv_filters=num_filters, + num_deconv_kernels=num_kernels, + use_dcn=False) + + # feat must be list or tuple + with pytest.raises(AssertionError): + ct_resnet_neck(feat) + + out_feat = ct_resnet_neck([feat])[0] + assert out_feat.shape == (1, num_filters[-1], 16, 16) + + if torch.cuda.is_available(): + # test dcn + ct_resnet_neck = CTResNetNeck( + in_channel=in_channels, + num_deconv_filters=num_filters, + num_deconv_kernels=num_kernels) + ct_resnet_neck = ct_resnet_neck.cuda() + feat = feat.cuda() + out_feat = ct_resnet_neck([feat])[0] + assert out_feat.shape == (1, num_filters[-1], 16, 16) + + +def test_yolov3_neck(): + # num_scales, in_channels, out_channels must be same length + with pytest.raises(AssertionError): + YOLOV3Neck(num_scales=3, in_channels=[16, 8, 4], out_channels=[8, 4]) + + # len(feats) must equal to num_scales + with pytest.raises(AssertionError): + neck = YOLOV3Neck( + num_scales=3, in_channels=[16, 8, 4], out_channels=[8, 4, 2]) + feats = (torch.rand(1, 4, 16, 16), torch.rand(1, 8, 16, 16)) + neck(feats) + + # test normal channels + s = 32 + in_channels = [16, 8, 4] + out_channels = [8, 4, 2] + feat_sizes = [s // 2**i for i in range(len(in_channels) - 1, -1, -1)] + feats = [ + torch.rand(1, in_channels[i], feat_sizes[i], feat_sizes[i]) + for i in range(len(in_channels) - 1, -1, -1) + ] + neck = YOLOV3Neck( + num_scales=3, in_channels=in_channels, out_channels=out_channels) + outs = neck(feats) + + assert len(outs) == len(feats) + for i in range(len(outs)): + assert outs[i].shape == \ + (1, out_channels[i], feat_sizes[i], feat_sizes[i]) + + # test more flexible setting + s = 32 + in_channels = [32, 8, 16] + out_channels = [19, 21, 5] + feat_sizes = [s // 2**i for i in range(len(in_channels) - 1, -1, -1)] + feats = [ + torch.rand(1, in_channels[i], feat_sizes[i], feat_sizes[i]) + for i in range(len(in_channels) - 1, -1, -1) + ] + neck = YOLOV3Neck( + num_scales=3, in_channels=in_channels, out_channels=out_channels) + outs = neck(feats) + + assert len(outs) == len(feats) + for i in range(len(outs)): + assert outs[i].shape == \ + (1, out_channels[i], feat_sizes[i], feat_sizes[i]) + + +def test_ssd_neck(): + # level_strides/level_paddings must be same length + with pytest.raises(AssertionError): + SSDNeck( + in_channels=[8, 16], + out_channels=[8, 16, 32], + level_strides=[2], + level_paddings=[2, 1]) + + # length of out_channels must larger than in_channels + with pytest.raises(AssertionError): + SSDNeck( + in_channels=[8, 16], + out_channels=[8], + level_strides=[2], + level_paddings=[2]) + + # len(out_channels) - len(in_channels) must equal to len(level_strides) + with pytest.raises(AssertionError): + SSDNeck( + in_channels=[8, 16], + out_channels=[4, 16, 64], + level_strides=[2, 2], + level_paddings=[2, 2]) + + # in_channels must be same with out_channels[:len(in_channels)] + with pytest.raises(AssertionError): + SSDNeck( + in_channels=[8, 16], + out_channels=[4, 16, 64], + level_strides=[2], + level_paddings=[2]) + + ssd_neck = SSDNeck( + in_channels=[4], + out_channels=[4, 8, 16], + level_strides=[2, 1], + level_paddings=[1, 0]) + feats = (torch.rand(1, 4, 16, 16), ) + outs = ssd_neck(feats) + assert outs[0].shape == (1, 4, 16, 16) + assert outs[1].shape == (1, 8, 8, 8) + assert outs[2].shape == (1, 16, 6, 6) + + # test SSD-Lite Neck + ssd_neck = SSDNeck( + in_channels=[4, 8], + out_channels=[4, 8, 16], + level_strides=[1], + level_paddings=[1], + l2_norm_scale=None, + use_depthwise=True, + norm_cfg=dict(type='BN'), + act_cfg=dict(type='ReLU6')) + assert not hasattr(ssd_neck, 'l2_norm') + + from mmcv.cnn.bricks import DepthwiseSeparableConvModule + assert isinstance(ssd_neck.extra_layers[0][-1], + DepthwiseSeparableConvModule) + + feats = (torch.rand(1, 4, 8, 8), torch.rand(1, 8, 8, 8)) + outs = ssd_neck(feats) + assert outs[0].shape == (1, 4, 8, 8) + assert outs[1].shape == (1, 8, 8, 8) + assert outs[2].shape == (1, 16, 8, 8) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_roi_heads/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_roi_heads/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9bb64023107ea525d3bf4c02cc1ef81f097f3ad1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_roi_heads/__init__.py @@ -0,0 +1,3 @@ +from .utils import _dummy_bbox_sampling + +__all__ = ['_dummy_bbox_sampling'] diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_roi_heads/test_bbox_head.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_roi_heads/test_bbox_head.py new file mode 100644 index 0000000000000000000000000000000000000000..24c2a0d54b978cef954ca58eaa0c24b58c596e98 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_roi_heads/test_bbox_head.py @@ -0,0 +1,250 @@ +import mmcv +import numpy as np +import pytest +import torch + +from mmdet.core import bbox2roi +from mmdet.models.roi_heads.bbox_heads import BBoxHead +from .utils import _dummy_bbox_sampling + + +def test_bbox_head_loss(): + """Tests bbox head loss when truth is empty and non-empty.""" + self = BBoxHead(in_channels=8, roi_feat_size=3) + + # Dummy proposals + proposal_list = [ + torch.Tensor([[23.6667, 23.8757, 228.6326, 153.8874]]), + ] + + target_cfg = mmcv.Config(dict(pos_weight=1)) + + # Test bbox loss when truth is empty + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + + sampling_results = _dummy_bbox_sampling(proposal_list, gt_bboxes, + gt_labels) + + bbox_targets = self.get_targets(sampling_results, gt_bboxes, gt_labels, + target_cfg) + labels, label_weights, bbox_targets, bbox_weights = bbox_targets + + # Create dummy features "extracted" for each sampled bbox + num_sampled = sum(len(res.bboxes) for res in sampling_results) + rois = bbox2roi([res.bboxes for res in sampling_results]) + dummy_feats = torch.rand(num_sampled, 8 * 3 * 3) + cls_scores, bbox_preds = self.forward(dummy_feats) + + losses = self.loss(cls_scores, bbox_preds, rois, labels, label_weights, + bbox_targets, bbox_weights) + assert losses.get('loss_cls', 0) > 0, 'cls-loss should be non-zero' + assert losses.get('loss_bbox', 0) == 0, 'empty gt loss should be zero' + + # Test bbox loss when truth is non-empty + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + + sampling_results = _dummy_bbox_sampling(proposal_list, gt_bboxes, + gt_labels) + rois = bbox2roi([res.bboxes for res in sampling_results]) + + bbox_targets = self.get_targets(sampling_results, gt_bboxes, gt_labels, + target_cfg) + labels, label_weights, bbox_targets, bbox_weights = bbox_targets + + # Create dummy features "extracted" for each sampled bbox + num_sampled = sum(len(res.bboxes) for res in sampling_results) + dummy_feats = torch.rand(num_sampled, 8 * 3 * 3) + cls_scores, bbox_preds = self.forward(dummy_feats) + + losses = self.loss(cls_scores, bbox_preds, rois, labels, label_weights, + bbox_targets, bbox_weights) + assert losses.get('loss_cls', 0) > 0, 'cls-loss should be non-zero' + assert losses.get('loss_bbox', 0) > 0, 'box-loss should be non-zero' + + +@pytest.mark.parametrize('num_sample', [0, 1, 2]) +def test_bbox_head_get_bboxes(num_sample): + self = BBoxHead(reg_class_agnostic=True) + + num_class = 6 + rois = torch.rand((num_sample, 5)) + cls_score = torch.rand((num_sample, num_class)) + bbox_pred = torch.rand((num_sample, 4)) + + scale_factor = np.array([2.0, 2.0, 2.0, 2.0]) + det_bboxes, det_labels = self.get_bboxes( + rois, cls_score, bbox_pred, None, scale_factor, rescale=True) + if num_sample == 0: + assert len(det_bboxes) == 0 and len(det_labels) == 0 + else: + assert det_bboxes.shape == bbox_pred.shape + assert det_labels.shape == cls_score.shape + + +def test_refine_boxes(): + """Mirrors the doctest in + ``mmdet.models.bbox_heads.bbox_head.BBoxHead.refine_boxes`` but checks for + multiple values of n_roi / n_img.""" + self = BBoxHead(reg_class_agnostic=True) + + test_settings = [ + + # Corner case: less rois than images + { + 'n_roi': 2, + 'n_img': 4, + 'rng': 34285940 + }, + + # Corner case: no images + { + 'n_roi': 0, + 'n_img': 0, + 'rng': 52925222 + }, + + # Corner cases: few images / rois + { + 'n_roi': 1, + 'n_img': 1, + 'rng': 1200281 + }, + { + 'n_roi': 2, + 'n_img': 1, + 'rng': 1200282 + }, + { + 'n_roi': 2, + 'n_img': 2, + 'rng': 1200283 + }, + { + 'n_roi': 1, + 'n_img': 2, + 'rng': 1200284 + }, + + # Corner case: no rois few images + { + 'n_roi': 0, + 'n_img': 1, + 'rng': 23955860 + }, + { + 'n_roi': 0, + 'n_img': 2, + 'rng': 25830516 + }, + + # Corner case: no rois many images + { + 'n_roi': 0, + 'n_img': 10, + 'rng': 671346 + }, + { + 'n_roi': 0, + 'n_img': 20, + 'rng': 699807 + }, + + # Corner case: cal_similarity num rois and images + { + 'n_roi': 20, + 'n_img': 20, + 'rng': 1200238 + }, + { + 'n_roi': 10, + 'n_img': 20, + 'rng': 1200238 + }, + { + 'n_roi': 5, + 'n_img': 5, + 'rng': 1200238 + }, + + # ---------------------------------- + # Common case: more rois than images + { + 'n_roi': 100, + 'n_img': 1, + 'rng': 337156 + }, + { + 'n_roi': 150, + 'n_img': 2, + 'rng': 275898 + }, + { + 'n_roi': 500, + 'n_img': 5, + 'rng': 4903221 + }, + ] + + for demokw in test_settings: + try: + n_roi = demokw['n_roi'] + n_img = demokw['n_img'] + rng = demokw['rng'] + + print(f'Test refine_boxes case: {demokw!r}') + tup = _demodata_refine_boxes(n_roi, n_img, rng=rng) + rois, labels, bbox_preds, pos_is_gts, img_metas = tup + bboxes_list = self.refine_bboxes(rois, labels, bbox_preds, + pos_is_gts, img_metas) + assert len(bboxes_list) == n_img + assert sum(map(len, bboxes_list)) <= n_roi + assert all(b.shape[1] == 4 for b in bboxes_list) + except Exception: + print(f'Test failed with demokw={demokw!r}') + raise + + +def _demodata_refine_boxes(n_roi, n_img, rng=0): + """Create random test data for the + ``mmdet.models.bbox_heads.bbox_head.BBoxHead.refine_boxes`` method.""" + import numpy as np + from mmdet.core.bbox.demodata import random_boxes + from mmdet.core.bbox.demodata import ensure_rng + try: + import kwarray + except ImportError: + import pytest + pytest.skip('kwarray is required for this test') + scale = 512 + rng = ensure_rng(rng) + img_metas = [{'img_shape': (scale, scale)} for _ in range(n_img)] + # Create rois in the expected format + roi_boxes = random_boxes(n_roi, scale=scale, rng=rng) + if n_img == 0: + assert n_roi == 0, 'cannot have any rois if there are no images' + img_ids = torch.empty((0, ), dtype=torch.long) + roi_boxes = torch.empty((0, 4), dtype=torch.float32) + else: + img_ids = rng.randint(0, n_img, (n_roi, )) + img_ids = torch.from_numpy(img_ids) + rois = torch.cat([img_ids[:, None].float(), roi_boxes], dim=1) + # Create other args + labels = rng.randint(0, 2, (n_roi, )) + labels = torch.from_numpy(labels).long() + bbox_preds = random_boxes(n_roi, scale=scale, rng=rng) + # For each image, pretend random positive boxes are gts + is_label_pos = (labels.numpy() > 0).astype(np.int) + lbl_per_img = kwarray.group_items(is_label_pos, img_ids.numpy()) + pos_per_img = [sum(lbl_per_img.get(gid, [])) for gid in range(n_img)] + # randomly generate with numpy then sort with torch + _pos_is_gts = [ + rng.randint(0, 2, (npos, )).astype(np.uint8) for npos in pos_per_img + ] + pos_is_gts = [ + torch.from_numpy(p).sort(descending=True)[0] for p in _pos_is_gts + ] + return rois, labels, bbox_preds, pos_is_gts, img_metas diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_roi_heads/test_mask_head.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_roi_heads/test_mask_head.py new file mode 100644 index 0000000000000000000000000000000000000000..31826cd59a4b2e2a0d1d0306df0321f48ff18e7a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_roi_heads/test_mask_head.py @@ -0,0 +1,69 @@ +import mmcv +import torch + +from mmdet.models.roi_heads.mask_heads import FCNMaskHead, MaskIoUHead +from .utils import _dummy_bbox_sampling + + +def test_mask_head_loss(): + """Test mask head loss when mask target is empty.""" + self = FCNMaskHead( + num_convs=1, + roi_feat_size=6, + in_channels=8, + conv_out_channels=8, + num_classes=8) + + # Dummy proposals + proposal_list = [ + torch.Tensor([[23.6667, 23.8757, 228.6326, 153.8874]]), + ] + + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + sampling_results = _dummy_bbox_sampling(proposal_list, gt_bboxes, + gt_labels) + + # create dummy mask + import numpy as np + from mmdet.core import BitmapMasks + dummy_mask = np.random.randint(0, 2, (1, 160, 240), dtype=np.uint8) + gt_masks = [BitmapMasks(dummy_mask, 160, 240)] + + # create dummy train_cfg + train_cfg = mmcv.Config(dict(mask_size=12, mask_thr_binary=0.5)) + + # Create dummy features "extracted" for each sampled bbox + num_sampled = sum(len(res.bboxes) for res in sampling_results) + dummy_feats = torch.rand(num_sampled, 8, 6, 6) + + mask_pred = self.forward(dummy_feats) + mask_targets = self.get_targets(sampling_results, gt_masks, train_cfg) + pos_labels = torch.cat([res.pos_gt_labels for res in sampling_results]) + loss_mask = self.loss(mask_pred, mask_targets, pos_labels) + + onegt_mask_loss = sum(loss_mask['loss_mask']) + assert onegt_mask_loss.item() > 0, 'mask loss should be non-zero' + + # test mask_iou_head + mask_iou_head = MaskIoUHead( + num_convs=1, + num_fcs=1, + roi_feat_size=6, + in_channels=8, + conv_out_channels=8, + fc_out_channels=8, + num_classes=8) + + pos_mask_pred = mask_pred[range(mask_pred.size(0)), pos_labels] + mask_iou_pred = mask_iou_head(dummy_feats, pos_mask_pred) + pos_mask_iou_pred = mask_iou_pred[range(mask_iou_pred.size(0)), pos_labels] + + mask_iou_targets = mask_iou_head.get_targets(sampling_results, gt_masks, + pos_mask_pred, mask_targets, + train_cfg) + loss_mask_iou = mask_iou_head.loss(pos_mask_iou_pred, mask_iou_targets) + onegt_mask_iou_loss = loss_mask_iou['loss_mask_iou'].sum() + assert onegt_mask_iou_loss.item() >= 0 diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_roi_heads/test_roi_extractor.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_roi_heads/test_roi_extractor.py new file mode 100644 index 0000000000000000000000000000000000000000..22743f2d3be3b4be82bc46699b76fecf7af60020 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_roi_heads/test_roi_extractor.py @@ -0,0 +1,113 @@ +import pytest +import torch + +from mmdet.models.roi_heads.roi_extractors import GenericRoIExtractor + + +def test_groie(): + # test with pre/post + cfg = dict( + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32], + pre_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False, + ), + post_cfg=dict( + type='ConvModule', + in_channels=256, + out_channels=256, + kernel_size=5, + padding=2, + inplace=False)) + + groie = GenericRoIExtractor(**cfg) + + feats = ( + torch.rand((1, 256, 200, 336)), + torch.rand((1, 256, 100, 168)), + torch.rand((1, 256, 50, 84)), + torch.rand((1, 256, 25, 42)), + ) + + rois = torch.tensor([[0.0000, 587.8285, 52.1405, 886.2484, 341.5644]]) + + res = groie(feats, rois) + assert res.shape == torch.Size([1, 256, 7, 7]) + + # test w.o. pre/post + cfg = dict( + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256, + featmap_strides=[4, 8, 16, 32]) + + groie = GenericRoIExtractor(**cfg) + + feats = ( + torch.rand((1, 256, 200, 336)), + torch.rand((1, 256, 100, 168)), + torch.rand((1, 256, 50, 84)), + torch.rand((1, 256, 25, 42)), + ) + + rois = torch.tensor([[0.0000, 587.8285, 52.1405, 886.2484, 341.5644]]) + + res = groie(feats, rois) + assert res.shape == torch.Size([1, 256, 7, 7]) + + # test w.o. pre/post concat + cfg = dict( + aggregation='concat', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256 * 4, + featmap_strides=[4, 8, 16, 32]) + + groie = GenericRoIExtractor(**cfg) + + feats = ( + torch.rand((1, 256, 200, 336)), + torch.rand((1, 256, 100, 168)), + torch.rand((1, 256, 50, 84)), + torch.rand((1, 256, 25, 42)), + ) + + rois = torch.tensor([[0.0000, 587.8285, 52.1405, 886.2484, 341.5644]]) + + res = groie(feats, rois) + assert res.shape == torch.Size([1, 1024, 7, 7]) + + # test not supported aggregate method + with pytest.raises(AssertionError): + cfg = dict( + aggregation='not support', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=1024, + featmap_strides=[4, 8, 16, 32]) + _ = GenericRoIExtractor(**cfg) + + # test concat channels number + cfg = dict( + aggregation='concat', + roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2), + out_channels=256 * 5, # 256*5 != 256*4 + featmap_strides=[4, 8, 16, 32]) + + groie = GenericRoIExtractor(**cfg) + + feats = ( + torch.rand((1, 256, 200, 336)), + torch.rand((1, 256, 100, 168)), + torch.rand((1, 256, 50, 84)), + torch.rand((1, 256, 25, 42)), + ) + + rois = torch.tensor([[0.0000, 587.8285, 52.1405, 886.2484, 341.5644]]) + + # out_channels does not sum of feat channels + with pytest.raises(AssertionError): + _ = groie(feats, rois) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_roi_heads/test_sabl_bbox_head.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_roi_heads/test_sabl_bbox_head.py new file mode 100644 index 0000000000000000000000000000000000000000..05178088a40ddbac0f456ab7b764967c8d6f71c1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_roi_heads/test_sabl_bbox_head.py @@ -0,0 +1,76 @@ +import mmcv +import torch + +from mmdet.core import bbox2roi +from mmdet.models.roi_heads.bbox_heads import SABLHead +from .utils import _dummy_bbox_sampling + + +def test_sabl_bbox_head_loss(): + """Tests bbox head loss when truth is empty and non-empty.""" + self = SABLHead( + num_classes=4, + cls_in_channels=3, + reg_in_channels=3, + cls_out_channels=3, + reg_offset_out_channels=3, + reg_cls_out_channels=3, + roi_feat_size=7) + + # Dummy proposals + proposal_list = [ + torch.Tensor([[23.6667, 23.8757, 228.6326, 153.8874]]), + ] + + target_cfg = mmcv.Config(dict(pos_weight=1)) + + # Test bbox loss when truth is empty + gt_bboxes = [torch.empty((0, 4))] + gt_labels = [torch.LongTensor([])] + + sampling_results = _dummy_bbox_sampling(proposal_list, gt_bboxes, + gt_labels) + + bbox_targets = self.get_targets(sampling_results, gt_bboxes, gt_labels, + target_cfg) + labels, label_weights, bbox_targets, bbox_weights = bbox_targets + + # Create dummy features "extracted" for each sampled bbox + num_sampled = sum(len(res.bboxes) for res in sampling_results) + rois = bbox2roi([res.bboxes for res in sampling_results]) + dummy_feats = torch.rand(num_sampled, 3, 7, 7) + cls_scores, bbox_preds = self.forward(dummy_feats) + + losses = self.loss(cls_scores, bbox_preds, rois, labels, label_weights, + bbox_targets, bbox_weights) + assert losses.get('loss_cls', 0) > 0, 'cls-loss should be non-zero' + assert losses.get('loss_bbox_cls', + 0) == 0, 'empty gt bbox-cls-loss should be zero' + assert losses.get('loss_bbox_reg', + 0) == 0, 'empty gt bbox-reg-loss should be zero' + + # Test bbox loss when truth is non-empty + gt_bboxes = [ + torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]), + ] + gt_labels = [torch.LongTensor([2])] + + sampling_results = _dummy_bbox_sampling(proposal_list, gt_bboxes, + gt_labels) + rois = bbox2roi([res.bboxes for res in sampling_results]) + + bbox_targets = self.get_targets(sampling_results, gt_bboxes, gt_labels, + target_cfg) + labels, label_weights, bbox_targets, bbox_weights = bbox_targets + + # Create dummy features "extracted" for each sampled bbox + num_sampled = sum(len(res.bboxes) for res in sampling_results) + dummy_feats = torch.rand(num_sampled, 3, 7, 7) + cls_scores, bbox_preds = self.forward(dummy_feats) + + losses = self.loss(cls_scores, bbox_preds, rois, labels, label_weights, + bbox_targets, bbox_weights) + assert losses.get('loss_bbox_cls', + 0) > 0, 'empty gt bbox-cls-loss should be zero' + assert losses.get('loss_bbox_reg', + 0) > 0, 'empty gt bbox-reg-loss should be zero' diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_roi_heads/utils.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_roi_heads/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..e5c6d58fcb1ce3e1d6def9070b61680670b39d58 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_roi_heads/utils.py @@ -0,0 +1,37 @@ +import torch + +from mmdet.core import build_assigner, build_sampler + + +def _dummy_bbox_sampling(proposal_list, gt_bboxes, gt_labels): + """Create sample results that can be passed to BBoxHead.get_targets.""" + num_imgs = 1 + feat = torch.rand(1, 1, 3, 3) + assign_config = dict( + type='MaxIoUAssigner', + pos_iou_thr=0.5, + neg_iou_thr=0.5, + min_pos_iou=0.5, + ignore_iof_thr=-1) + sampler_config = dict( + type='RandomSampler', + num=512, + pos_fraction=0.25, + neg_pos_ub=-1, + add_gt_as_proposals=True) + bbox_assigner = build_assigner(assign_config) + bbox_sampler = build_sampler(sampler_config) + gt_bboxes_ignore = [None for _ in range(num_imgs)] + sampling_results = [] + for i in range(num_imgs): + assign_result = bbox_assigner.assign(proposal_list[i], gt_bboxes[i], + gt_bboxes_ignore[i], gt_labels[i]) + sampling_result = bbox_sampler.sample( + assign_result, + proposal_list[i], + gt_bboxes[i], + gt_labels[i], + feats=feat) + sampling_results.append(sampling_result) + + return sampling_results diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_utils/test_inverted_residual.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_utils/test_inverted_residual.py new file mode 100644 index 0000000000000000000000000000000000000000..d54227082706a9011811113111f7d7aa0e0a2d12 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_utils/test_inverted_residual.py @@ -0,0 +1,75 @@ +import pytest +import torch +from mmcv.cnn import is_norm +from torch.nn.modules import GroupNorm + +from mmdet.models.utils import InvertedResidual, SELayer + + +def test_inverted_residual(): + + with pytest.raises(AssertionError): + # stride must be in [1, 2] + InvertedResidual(16, 16, 32, stride=3) + + with pytest.raises(AssertionError): + # se_cfg must be None or dict + InvertedResidual(16, 16, 32, se_cfg=list()) + + with pytest.raises(AssertionError): + # in_channeld and mid_channels must be the same if + # with_expand_conv is False + InvertedResidual(16, 16, 32, with_expand_conv=False) + + # Test InvertedResidual forward, stride=1 + block = InvertedResidual(16, 16, 32, stride=1) + x = torch.randn(1, 16, 56, 56) + x_out = block(x) + assert getattr(block, 'se', None) is None + assert block.with_res_shortcut + assert x_out.shape == torch.Size((1, 16, 56, 56)) + + # Test InvertedResidual forward, stride=2 + block = InvertedResidual(16, 16, 32, stride=2) + x = torch.randn(1, 16, 56, 56) + x_out = block(x) + assert not block.with_res_shortcut + assert x_out.shape == torch.Size((1, 16, 28, 28)) + + # Test InvertedResidual forward with se layer + se_cfg = dict(channels=32) + block = InvertedResidual(16, 16, 32, stride=1, se_cfg=se_cfg) + x = torch.randn(1, 16, 56, 56) + x_out = block(x) + assert isinstance(block.se, SELayer) + assert x_out.shape == torch.Size((1, 16, 56, 56)) + + # Test InvertedResidual forward, with_expand_conv=False + block = InvertedResidual(32, 16, 32, with_expand_conv=False) + x = torch.randn(1, 32, 56, 56) + x_out = block(x) + assert getattr(block, 'expand_conv', None) is None + assert x_out.shape == torch.Size((1, 16, 56, 56)) + + # Test InvertedResidual forward with GroupNorm + block = InvertedResidual( + 16, 16, 32, norm_cfg=dict(type='GN', num_groups=2)) + x = torch.randn(1, 16, 56, 56) + x_out = block(x) + for m in block.modules(): + if is_norm(m): + assert isinstance(m, GroupNorm) + assert x_out.shape == torch.Size((1, 16, 56, 56)) + + # Test InvertedResidual forward with HSigmoid + block = InvertedResidual(16, 16, 32, act_cfg=dict(type='HSigmoid')) + x = torch.randn(1, 16, 56, 56) + x_out = block(x) + assert x_out.shape == torch.Size((1, 16, 56, 56)) + + # Test InvertedResidual forward with checkpoint + block = InvertedResidual(16, 16, 32, with_cp=True) + x = torch.randn(1, 16, 56, 56) + x_out = block(x) + assert block.with_cp + assert x_out.shape == torch.Size((1, 16, 56, 56)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_utils/test_position_encoding.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_utils/test_position_encoding.py new file mode 100644 index 0000000000000000000000000000000000000000..e12127f54efd60692c7608e2a31b22598739ddfd --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_utils/test_position_encoding.py @@ -0,0 +1,38 @@ +import pytest +import torch + +from mmdet.models.utils import (LearnedPositionalEncoding, + SinePositionalEncoding) + + +def test_sine_positional_encoding(num_feats=16, batch_size=2): + # test invalid type of scale + with pytest.raises(AssertionError): + module = SinePositionalEncoding( + num_feats, scale=(3., ), normalize=True) + + module = SinePositionalEncoding(num_feats) + h, w = 10, 6 + mask = (torch.rand(batch_size, h, w) > 0.5).to(torch.int) + assert not module.normalize + out = module(mask) + assert out.shape == (batch_size, num_feats * 2, h, w) + + # set normalize + module = SinePositionalEncoding(num_feats, normalize=True) + assert module.normalize + out = module(mask) + assert out.shape == (batch_size, num_feats * 2, h, w) + + +def test_learned_positional_encoding(num_feats=16, + row_num_embed=10, + col_num_embed=10, + batch_size=2): + module = LearnedPositionalEncoding(num_feats, row_num_embed, col_num_embed) + assert module.row_embed.weight.shape == (row_num_embed, num_feats) + assert module.col_embed.weight.shape == (col_num_embed, num_feats) + h, w = 10, 6 + mask = torch.rand(batch_size, h, w) > 0.5 + out = module(mask) + assert out.shape == (batch_size, num_feats * 2, h, w) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_utils/test_se_layer.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_utils/test_se_layer.py new file mode 100644 index 0000000000000000000000000000000000000000..c2cd239cf9f10d25f9e49977e3359d448b46472c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_utils/test_se_layer.py @@ -0,0 +1,23 @@ +import pytest +import torch + +from mmdet.models.utils import SELayer + + +def test_se_layer(): + with pytest.raises(AssertionError): + # act_cfg sequence length must equal to 2 + SELayer(channels=32, act_cfg=(dict(type='ReLU'), )) + + with pytest.raises(AssertionError): + # act_cfg sequence must be a tuple of dict + SELayer(channels=32, act_cfg=[dict(type='ReLU'), dict(type='ReLU')]) + + # Test SELayer forward + layer = SELayer(channels=32) + layer.init_weights() + layer.train() + + x = torch.randn((1, 32, 10, 10)) + x_out = layer(x) + assert x_out.shape == torch.Size((1, 32, 10, 10)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_utils/test_transformer.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_utils/test_transformer.py new file mode 100644 index 0000000000000000000000000000000000000000..6058b2abeb23c88d134d604d004f27e342aa6564 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_models/test_utils/test_transformer.py @@ -0,0 +1,110 @@ +import pytest +from mmcv.utils import ConfigDict + +from mmdet.models.utils.transformer import (DetrTransformerDecoder, + DetrTransformerEncoder, + Transformer) + + +def test_detr_transformer_dencoder_encoder_layer(): + config = ConfigDict( + dict( + return_intermediate=True, + num_layers=6, + transformerlayers=dict( + type='DetrTransformerDecoderLayer', + attn_cfgs=dict( + type='MultiheadAttention', + embed_dims=256, + num_heads=8, + dropout=0.1), + feedforward_channels=2048, + ffn_dropout=0.1, + operation_order=( + 'norm', + 'self_attn', + 'norm', + 'cross_attn', + 'norm', + 'ffn', + )))) + assert DetrTransformerDecoder(**config).layers[0].pre_norm + assert len(DetrTransformerDecoder(**config).layers) == 6 + + DetrTransformerDecoder(**config) + with pytest.raises(AssertionError): + config = ConfigDict( + dict( + return_intermediate=True, + num_layers=6, + transformerlayers=[ + dict( + type='DetrTransformerDecoderLayer', + attn_cfgs=dict( + type='MultiheadAttention', + embed_dims=256, + num_heads=8, + dropout=0.1), + feedforward_channels=2048, + ffn_dropout=0.1, + operation_order=('self_attn', 'norm', 'cross_attn', + 'norm', 'ffn', 'norm')) + ] * 5)) + DetrTransformerDecoder(**config) + + config = ConfigDict( + dict( + num_layers=6, + transformerlayers=dict( + type='DetrTransformerDecoderLayer', + attn_cfgs=dict( + type='MultiheadAttention', + embed_dims=256, + num_heads=8, + dropout=0.1), + feedforward_channels=2048, + ffn_dropout=0.1, + operation_order=('norm', 'self_attn', 'norm', 'cross_attn', + 'norm', 'ffn', 'norm')))) + + with pytest.raises(AssertionError): + # len(operation_order) == 6 + DetrTransformerEncoder(**config) + + +def test_transformer(): + config = ConfigDict( + dict( + encoder=dict( + type='DetrTransformerEncoder', + num_layers=6, + transformerlayers=dict( + type='BaseTransformerLayer', + attn_cfgs=[ + dict( + type='MultiheadAttention', + embed_dims=256, + num_heads=8, + dropout=0.1) + ], + feedforward_channels=2048, + ffn_dropout=0.1, + operation_order=('self_attn', 'norm', 'ffn', 'norm'))), + decoder=dict( + type='DetrTransformerDecoder', + return_intermediate=True, + num_layers=6, + transformerlayers=dict( + type='DetrTransformerDecoderLayer', + attn_cfgs=dict( + type='MultiheadAttention', + embed_dims=256, + num_heads=8, + dropout=0.1), + feedforward_channels=2048, + ffn_dropout=0.1, + operation_order=('self_attn', 'norm', 'cross_attn', 'norm', + 'ffn', 'norm')), + ))) + transformer = Transformer(**config) + transformer.init_weights() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_onnx/__init__.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_onnx/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..320516c05c19b2e02831124bed38956e32575914 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_onnx/__init__.py @@ -0,0 +1,3 @@ +from .utils import ort_validate + +__all__ = ['ort_validate'] diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_onnx/test_head.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_onnx/test_head.py new file mode 100644 index 0000000000000000000000000000000000000000..215435fcedd9cbaab33065733c398b2acf5c9286 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_onnx/test_head.py @@ -0,0 +1,381 @@ +import os.path as osp +from functools import partial + +import mmcv +import numpy as np +import pytest +import torch +from mmcv.cnn import Scale + +from mmdet import digit_version +from mmdet.models.dense_heads import (FCOSHead, FSAFHead, RetinaHead, SSDHead, + YOLOV3Head) +from .utils import ort_validate + +data_path = osp.join(osp.dirname(__file__), 'data') + +if digit_version(torch.__version__) <= digit_version('1.5.0'): + pytest.skip( + 'ort backend does not support version below 1.5.0', + allow_module_level=True) + + +def retinanet_config(): + """RetinanNet Head Config.""" + head_cfg = dict( + stacked_convs=6, + feat_channels=2, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0])) + + test_cfg = mmcv.Config( + dict( + deploy_nms_pre=0, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100)) + + model = RetinaHead( + num_classes=4, in_channels=1, test_cfg=test_cfg, **head_cfg) + model.requires_grad_(False) + + return model + + +def test_retina_head_forward_single(): + """Test RetinaNet Head single forward in torch and onnxruntime env.""" + retina_model = retinanet_config() + + feat = torch.rand(1, retina_model.in_channels, 32, 32) + # validate the result between the torch and ort + ort_validate(retina_model.forward_single, feat) + + +def test_retina_head_forward(): + """Test RetinaNet Head forward in torch and onnxruntime env.""" + retina_model = retinanet_config() + s = 128 + # RetinaNet head expects a multiple levels of features per image + feats = [ + torch.rand(1, retina_model.in_channels, s // (2**(i + 2)), + s // (2**(i + 2))) # [32, 16, 8, 4, 2] + for i in range(len(retina_model.anchor_generator.strides)) + ] + ort_validate(retina_model.forward, feats) + + +def test_retinanet_head_get_bboxes(): + """Test RetinaNet Head _get_bboxes() in torch and onnxruntime env.""" + retina_model = retinanet_config() + s = 128 + img_metas = [{ + 'img_shape_for_onnx': torch.Tensor([s, s]), + 'scale_factor': np.ones(4), + 'pad_shape': (s, s, 3), + 'img_shape': (s, s, 2) + }] + + # The data of retina_head_get_bboxes.pkl contains two parts: + # cls_score(list(Tensor)) and bboxes(list(Tensor)), + # where each torch.Tensor is generated by torch.rand(). + # the cls_score's size: (1, 36, 32, 32), (1, 36, 16, 16), + # (1, 36, 8, 8), (1, 36, 4, 4), (1, 36, 2, 2). + # the bboxes's size: (1, 36, 32, 32), (1, 36, 16, 16), + # (1, 36, 8, 8), (1, 36, 4, 4), (1, 36, 2, 2) + retina_head_data = 'retina_head_get_bboxes.pkl' + feats = mmcv.load(osp.join(data_path, retina_head_data)) + cls_score = feats[:5] + bboxes = feats[5:] + + retina_model.get_bboxes = partial( + retina_model.get_bboxes, img_metas=img_metas, with_nms=False) + ort_validate(retina_model.get_bboxes, (cls_score, bboxes)) + + +def yolo_config(): + """YoloV3 Head Config.""" + head_cfg = dict( + anchor_generator=dict( + type='YOLOAnchorGenerator', + base_sizes=[[(116, 90), (156, 198), (373, 326)], + [(30, 61), (62, 45), (59, 119)], + [(10, 13), (16, 30), (33, 23)]], + strides=[32, 16, 8]), + bbox_coder=dict(type='YOLOBBoxCoder')) + + test_cfg = mmcv.Config( + dict( + deploy_nms_pre=0, + min_bbox_size=0, + score_thr=0.05, + conf_thr=0.005, + nms=dict(type='nms', iou_threshold=0.45), + max_per_img=100)) + + model = YOLOV3Head( + num_classes=4, + in_channels=[1, 1, 1], + out_channels=[16, 8, 4], + test_cfg=test_cfg, + **head_cfg) + model.requires_grad_(False) + # yolov3 need eval() + model.cpu().eval() + return model + + +def test_yolov3_head_forward(): + """Test Yolov3 head forward() in torch and ort env.""" + yolo_model = yolo_config() + + # Yolov3 head expects a multiple levels of features per image + feats = [ + torch.rand(1, 1, 64 // (2**(i + 2)), 64 // (2**(i + 2))) + for i in range(len(yolo_model.in_channels)) + ] + ort_validate(yolo_model.forward, feats) + + +def test_yolov3_head_get_bboxes(): + """Test yolov3 head get_bboxes() in torch and ort env.""" + yolo_model = yolo_config() + s = 128 + img_metas = [{ + 'img_shape_for_onnx': torch.Tensor([s, s]), + 'img_shape': (s, s, 3), + 'scale_factor': np.ones(4), + 'pad_shape': (s, s, 3) + }] + + # The data of yolov3_head_get_bboxes.pkl contains + # a list of torch.Tensor, where each torch.Tensor + # is generated by torch.rand and each tensor size is: + # (1, 27, 32, 32), (1, 27, 16, 16), (1, 27, 8, 8). + yolo_head_data = 'yolov3_head_get_bboxes.pkl' + pred_maps = mmcv.load(osp.join(data_path, yolo_head_data)) + + yolo_model.get_bboxes = partial( + yolo_model.get_bboxes, img_metas=img_metas, with_nms=False) + ort_validate(yolo_model.get_bboxes, pred_maps) + + +def fcos_config(): + """FCOS Head Config.""" + test_cfg = mmcv.Config( + dict( + deploy_nms_pre=0, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100)) + + model = FCOSHead(num_classes=4, in_channels=1, test_cfg=test_cfg) + + model.requires_grad_(False) + return model + + +def test_fcos_head_forward_single(): + """Test fcos forward single in torch and ort env.""" + fcos_model = fcos_config() + + feat = torch.rand(1, fcos_model.in_channels, 32, 32) + fcos_model.forward_single = partial( + fcos_model.forward_single, + scale=Scale(1.0).requires_grad_(False), + stride=(4, )) + ort_validate(fcos_model.forward_single, feat) + + +def test_fcos_head_forward(): + """Test fcos forward in mutil-level feature map.""" + fcos_model = fcos_config() + s = 128 + feats = [ + torch.rand(1, 1, s // feat_size, s // feat_size) + for feat_size in [4, 8, 16, 32, 64] + ] + ort_validate(fcos_model.forward, feats) + + +def test_fcos_head_get_bboxes(): + """Test fcos head get_bboxes() in ort.""" + fcos_model = fcos_config() + s = 128 + img_metas = [{ + 'img_shape_for_onnx': torch.Tensor([s, s]), + 'img_shape': (s, s, 3), + 'scale_factor': np.ones(4), + 'pad_shape': (s, s, 3) + }] + + cls_scores = [ + torch.rand(1, fcos_model.num_classes, s // feat_size, s // feat_size) + for feat_size in [4, 8, 16, 32, 64] + ] + bboxes = [ + torch.rand(1, 4, s // feat_size, s // feat_size) + for feat_size in [4, 8, 16, 32, 64] + ] + centerness = [ + torch.rand(1, 1, s // feat_size, s // feat_size) + for feat_size in [4, 8, 16, 32, 64] + ] + + fcos_model.get_bboxes = partial( + fcos_model.get_bboxes, img_metas=img_metas, with_nms=False) + ort_validate(fcos_model.get_bboxes, (cls_scores, bboxes, centerness)) + + +def fsaf_config(): + """FSAF Head Config.""" + cfg = dict( + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=1, + scales_per_octave=1, + ratios=[1.0], + strides=[8, 16, 32, 64, 128])) + + test_cfg = mmcv.Config( + dict( + deploy_nms_pre=0, + min_bbox_size=0, + score_thr=0.05, + nms=dict(type='nms', iou_threshold=0.5), + max_per_img=100)) + + model = FSAFHead(num_classes=4, in_channels=1, test_cfg=test_cfg, **cfg) + model.requires_grad_(False) + return model + + +def test_fsaf_head_forward_single(): + """Test RetinaNet Head forward_single() in torch and onnxruntime env.""" + fsaf_model = fsaf_config() + + feat = torch.rand(1, fsaf_model.in_channels, 32, 32) + ort_validate(fsaf_model.forward_single, feat) + + +def test_fsaf_head_forward(): + """Test RetinaNet Head forward in torch and onnxruntime env.""" + fsaf_model = fsaf_config() + s = 128 + feats = [ + torch.rand(1, fsaf_model.in_channels, s // (2**(i + 2)), + s // (2**(i + 2))) + for i in range(len(fsaf_model.anchor_generator.strides)) + ] + ort_validate(fsaf_model.forward, feats) + + +def test_fsaf_head_get_bboxes(): + """Test RetinaNet Head get_bboxes in torch and onnxruntime env.""" + fsaf_model = fsaf_config() + s = 256 + img_metas = [{ + 'img_shape_for_onnx': torch.Tensor([s, s]), + 'scale_factor': np.ones(4), + 'pad_shape': (s, s, 3), + 'img_shape': (s, s, 2) + }] + + # The data of fsaf_head_get_bboxes.pkl contains two parts: + # cls_score(list(Tensor)) and bboxes(list(Tensor)), + # where each torch.Tensor is generated by torch.rand(). + # the cls_score's size: (1, 4, 64, 64), (1, 4, 32, 32), + # (1, 4, 16, 16), (1, 4, 8, 8), (1, 4, 4, 4). + # the bboxes's size: (1, 4, 64, 64), (1, 4, 32, 32), + # (1, 4, 16, 16), (1, 4, 8, 8), (1, 4, 4, 4). + fsaf_head_data = 'fsaf_head_get_bboxes.pkl' + feats = mmcv.load(osp.join(data_path, fsaf_head_data)) + cls_score = feats[:5] + bboxes = feats[5:] + + fsaf_model.get_bboxes = partial( + fsaf_model.get_bboxes, img_metas=img_metas, with_nms=False) + ort_validate(fsaf_model.get_bboxes, (cls_score, bboxes)) + + +def ssd_config(): + """SSD Head Config.""" + cfg = dict( + anchor_generator=dict( + type='SSDAnchorGenerator', + scale_major=False, + input_size=300, + basesize_ratio_range=(0.15, 0.9), + strides=[8, 16, 32, 64, 100, 300], + ratios=[[2], [2, 3], [2, 3], [2, 3], [2], [2]]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[0.1, 0.1, 0.2, 0.2])) + + test_cfg = mmcv.Config( + dict( + deploy_nms_pre=0, + nms=dict(type='nms', iou_threshold=0.45), + min_bbox_size=0, + score_thr=0.02, + max_per_img=200)) + + model = SSDHead( + num_classes=4, + in_channels=(4, 8, 4, 2, 2, 2), + test_cfg=test_cfg, + **cfg) + + model.requires_grad_(False) + return model + + +def test_ssd_head_forward(): + """Test SSD Head forward in torch and onnxruntime env.""" + ssd_model = ssd_config() + + featmap_size = [38, 19, 10, 6, 5, 3, 1] + + feats = [ + torch.rand(1, ssd_model.in_channels[i], featmap_size[i], + featmap_size[i]) for i in range(len(ssd_model.in_channels)) + ] + ort_validate(ssd_model.forward, feats) + + +def test_ssd_head_get_bboxes(): + """Test SSD Head get_bboxes in torch and onnxruntime env.""" + ssd_model = ssd_config() + s = 300 + img_metas = [{ + 'img_shape_for_onnx': torch.Tensor([s, s]), + 'scale_factor': np.ones(4), + 'pad_shape': (s, s, 3), + 'img_shape': (s, s, 2) + }] + + # The data of ssd_head_get_bboxes.pkl contains two parts: + # cls_score(list(Tensor)) and bboxes(list(Tensor)), + # where each torch.Tensor is generated by torch.rand(). + # the cls_score's size: (1, 20, 38, 38), (1, 30, 19, 19), + # (1, 30, 10, 10), (1, 30, 5, 5), (1, 20, 3, 3), (1, 20, 1, 1). + # the bboxes's size: (1, 16, 38, 38), (1, 24, 19, 19), + # (1, 24, 10, 10), (1, 24, 5, 5), (1, 16, 3, 3), (1, 16, 1, 1). + ssd_head_data = 'ssd_head_get_bboxes.pkl' + feats = mmcv.load(osp.join(data_path, ssd_head_data)) + cls_score = feats[:6] + bboxes = feats[6:] + + ssd_model.get_bboxes = partial( + ssd_model.get_bboxes, img_metas=img_metas, with_nms=False) + ort_validate(ssd_model.get_bboxes, (cls_score, bboxes)) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_onnx/test_neck.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_onnx/test_neck.py new file mode 100644 index 0000000000000000000000000000000000000000..f16a3dcc8597bbd09c5bb4656a93531438b83eb0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_onnx/test_neck.py @@ -0,0 +1,162 @@ +import os.path as osp + +import mmcv +import pytest +import torch + +from mmdet import digit_version +from mmdet.models.necks import FPN, YOLOV3Neck +from .utils import ort_validate + +if digit_version(torch.__version__) <= digit_version('1.5.0'): + pytest.skip( + 'ort backend does not support version below 1.5.0', + allow_module_level=True) + +# Control the returned model of fpn_neck_config() +fpn_test_step_names = { + 'fpn_normal': 0, + 'fpn_wo_extra_convs': 1, + 'fpn_lateral_bns': 2, + 'fpn_bilinear_upsample': 3, + 'fpn_scale_factor': 4, + 'fpn_extra_convs_inputs': 5, + 'fpn_extra_convs_laterals': 6, + 'fpn_extra_convs_outputs': 7, +} + +# Control the returned model of yolo_neck_config() +yolo_test_step_names = {'yolo_normal': 0} + +data_path = osp.join(osp.dirname(__file__), 'data') + + +def fpn_neck_config(test_step_name): + """Return the class containing the corresponding attributes according to + the fpn_test_step_names.""" + s = 64 + in_channels = [8, 16, 32, 64] + feat_sizes = [s // 2**i for i in range(4)] # [64, 32, 16, 8] + out_channels = 8 + + feats = [ + torch.rand(1, in_channels[i], feat_sizes[i], feat_sizes[i]) + for i in range(len(in_channels)) + ] + + if (fpn_test_step_names[test_step_name] == 0): + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + add_extra_convs=True, + num_outs=5) + elif (fpn_test_step_names[test_step_name] == 1): + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + add_extra_convs=False, + num_outs=5) + elif (fpn_test_step_names[test_step_name] == 2): + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + add_extra_convs=True, + no_norm_on_lateral=False, + norm_cfg=dict(type='BN', requires_grad=True), + num_outs=5) + elif (fpn_test_step_names[test_step_name] == 3): + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + add_extra_convs=True, + upsample_cfg=dict(mode='bilinear', align_corners=True), + num_outs=5) + elif (fpn_test_step_names[test_step_name] == 4): + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + add_extra_convs=True, + upsample_cfg=dict(scale_factor=2), + num_outs=5) + elif (fpn_test_step_names[test_step_name] == 5): + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + add_extra_convs='on_input', + num_outs=5) + elif (fpn_test_step_names[test_step_name] == 6): + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + add_extra_convs='on_lateral', + num_outs=5) + elif (fpn_test_step_names[test_step_name] == 7): + fpn_model = FPN( + in_channels=in_channels, + out_channels=out_channels, + add_extra_convs='on_output', + num_outs=5) + return fpn_model, feats + + +def yolo_neck_config(test_step_name): + """Config yolov3 Neck.""" + in_channels = [16, 8, 4] + out_channels = [8, 4, 2] + + # The data of yolov3_neck.pkl contains a list of + # torch.Tensor, where each torch.Tensor is generated by + # torch.rand and each tensor size is: + # (1, 4, 64, 64), (1, 8, 32, 32), (1, 16, 16, 16). + yolov3_neck_data = 'yolov3_neck.pkl' + feats = mmcv.load(osp.join(data_path, yolov3_neck_data)) + + if (yolo_test_step_names[test_step_name] == 0): + yolo_model = YOLOV3Neck( + in_channels=in_channels, out_channels=out_channels, num_scales=3) + return yolo_model, feats + + +def test_fpn_normal(): + outs = fpn_neck_config('fpn_normal') + ort_validate(*outs) + + +def test_fpn_wo_extra_convs(): + outs = fpn_neck_config('fpn_wo_extra_convs') + ort_validate(*outs) + + +def test_fpn_lateral_bns(): + outs = fpn_neck_config('fpn_lateral_bns') + ort_validate(*outs) + + +def test_fpn_bilinear_upsample(): + outs = fpn_neck_config('fpn_bilinear_upsample') + ort_validate(*outs) + + +def test_fpn_scale_factor(): + outs = fpn_neck_config('fpn_scale_factor') + ort_validate(*outs) + + +def test_fpn_extra_convs_inputs(): + outs = fpn_neck_config('fpn_extra_convs_inputs') + ort_validate(*outs) + + +def test_fpn_extra_convs_laterals(): + outs = fpn_neck_config('fpn_extra_convs_laterals') + ort_validate(*outs) + + +def test_fpn_extra_convs_outputs(): + outs = fpn_neck_config('fpn_extra_convs_outputs') + ort_validate(*outs) + + +def test_yolo_normal(): + outs = yolo_neck_config('yolo_normal') + ort_validate(*outs) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_onnx/utils.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_onnx/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..89b9c13aa6cbee616bf9f3ef87f3ed3166a29f15 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_onnx/utils.py @@ -0,0 +1,136 @@ +import os +import os.path as osp +import warnings + +import numpy as np +import onnx +import onnxruntime as ort +import torch +import torch.nn as nn + +ort_custom_op_path = '' +try: + from mmcv.ops import get_onnxruntime_op_path + ort_custom_op_path = get_onnxruntime_op_path() +except (ImportError, ModuleNotFoundError): + warnings.warn('If input model has custom op from mmcv, \ + you may have to build mmcv with ONNXRuntime from source.') + + +class WrapFunction(nn.Module): + """Wrap the function to be tested for torch.onnx.export tracking.""" + + def __init__(self, wrapped_function): + super(WrapFunction, self).__init__() + self.wrapped_function = wrapped_function + + def forward(self, *args, **kwargs): + return self.wrapped_function(*args, **kwargs) + + +def ort_validate(model, feats, onnx_io='tmp.onnx'): + """Validate the output of the onnxruntime backend is the same as the output + generated by torch. + + Args: + model (nn.Module | function): the function of model or model + to be verified. + feats (tuple(list(torch.Tensor)) | list(torch.Tensor) | torch.Tensor): + the input of model. + onnx_io (str): the name of onnx output file. + """ + # if model is not an instance of nn.Module, then it is a normal + # function and it should be wrapped. + if isinstance(model, nn.Module): + wrap_model = model + else: + wrap_model = WrapFunction(model) + wrap_model.cpu().eval() + with torch.no_grad(): + torch.onnx.export( + wrap_model, + feats, + onnx_io, + export_params=True, + keep_initializers_as_inputs=True, + do_constant_folding=True, + verbose=False, + opset_version=11) + + if isinstance(feats, tuple): + ort_feats = [] + for feat in feats: + ort_feats += feat + else: + ort_feats = feats + # default model name: tmp.onnx + onnx_outputs = get_ort_model_output(ort_feats) + + # remove temp file + if osp.exists(onnx_io): + os.remove(onnx_io) + + if isinstance(feats, tuple): + torch_outputs = convert_result_list(wrap_model.forward(*feats)) + else: + torch_outputs = convert_result_list(wrap_model.forward(feats)) + torch_outputs = [ + torch_output.detach().numpy() for torch_output in torch_outputs + ] + + # match torch_outputs and onnx_outputs + for i in range(len(onnx_outputs)): + np.testing.assert_allclose( + torch_outputs[i], onnx_outputs[i], rtol=1e-03, atol=1e-05) + + +def get_ort_model_output(feat, onnx_io='tmp.onnx'): + """Run the model in onnxruntime env. + + Args: + feat (list[Tensor]): A list of tensors from torch.rand, + each is a 4D-tensor. + + Returns: + list[np.array]: onnxruntime infer result, each is a np.array + """ + + onnx_model = onnx.load(onnx_io) + onnx.checker.check_model(onnx_model) + + session_options = ort.SessionOptions() + # register custom op for onnxruntime + if osp.exists(ort_custom_op_path): + session_options.register_custom_ops_library(ort_custom_op_path) + sess = ort.InferenceSession(onnx_io, session_options) + if isinstance(feat, torch.Tensor): + onnx_outputs = sess.run(None, + {sess.get_inputs()[0].name: feat.numpy()}) + else: + onnx_outputs = sess.run(None, { + sess.get_inputs()[i].name: feat[i].numpy() + for i in range(len(feat)) + }) + return onnx_outputs + + +def convert_result_list(outputs): + """Convert the torch forward outputs containing tuple or list to a list + only containing torch.Tensor. + + Args: + output (list(Tensor) | tuple(list(Tensor) | ...): the outputs + in torch env, maybe containing nested structures such as list + or tuple. + + Returns: + list(Tensor): a list only containing torch.Tensor + """ + # recursive end condition + if isinstance(outputs, torch.Tensor): + return [outputs] + + ret = [] + for sub in outputs: + ret += convert_result_list(sub) + return ret diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_runtime/async_benchmark.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_runtime/async_benchmark.py new file mode 100644 index 0000000000000000000000000000000000000000..8dab48adf8f292c91137310e16fd198a2ac43691 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_runtime/async_benchmark.py @@ -0,0 +1,101 @@ +import asyncio +import os +import shutil +import urllib + +import mmcv +import torch + +from mmdet.apis import (async_inference_detector, inference_detector, + init_detector) +from mmdet.utils.contextmanagers import concurrent +from mmdet.utils.profiling import profile_time + + +async def main(): + """Benchmark between async and synchronous inference interfaces. + + Sample runs for 20 demo images on K80 GPU, model - mask_rcnn_r50_fpn_1x: + + async sync + + 7981.79 ms 9660.82 ms + 8074.52 ms 9660.94 ms + 7976.44 ms 9406.83 ms + + Async variant takes about 0.83-0.85 of the time of the synchronous + interface. + """ + project_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) + project_dir = os.path.join(project_dir, '..') + + config_file = os.path.join( + project_dir, 'configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py') + checkpoint_file = os.path.join( + project_dir, + 'checkpoints/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth') + + if not os.path.exists(checkpoint_file): + url = ('https://download.openmmlab.com/mmdetection/v2.0' + '/mask_rcnn/mask_rcnn_r50_fpn_1x_coco' + '/mask_rcnn_r50_fpn_1x_coco_20200205-d4b0c5d6.pth') + print(f'Downloading {url} ...') + local_filename, _ = urllib.request.urlretrieve(url) + os.makedirs(os.path.dirname(checkpoint_file), exist_ok=True) + shutil.move(local_filename, checkpoint_file) + print(f'Saved as {checkpoint_file}') + else: + print(f'Using existing checkpoint {checkpoint_file}') + + device = 'cuda:0' + model = init_detector( + config_file, checkpoint=checkpoint_file, device=device) + + # queue is used for concurrent inference of multiple images + streamqueue = asyncio.Queue() + # queue size defines concurrency level + streamqueue_size = 4 + + for _ in range(streamqueue_size): + streamqueue.put_nowait(torch.cuda.Stream(device=device)) + + # test a single image and show the results + img = mmcv.imread(os.path.join(project_dir, 'demo/demo.jpg')) + + # warmup + await async_inference_detector(model, img) + + async def detect(img): + async with concurrent(streamqueue): + return await async_inference_detector(model, img) + + num_of_images = 20 + with profile_time('benchmark', 'async'): + tasks = [ + asyncio.create_task(detect(img)) for _ in range(num_of_images) + ] + async_results = await asyncio.gather(*tasks) + + with torch.cuda.stream(torch.cuda.default_stream()): + with profile_time('benchmark', 'sync'): + sync_results = [ + inference_detector(model, img) for _ in range(num_of_images) + ] + + result_dir = os.path.join(project_dir, 'demo') + model.show_result( + img, + async_results[0], + score_thr=0.5, + show=False, + out_file=os.path.join(result_dir, 'result_async.jpg')) + model.show_result( + img, + sync_results[0], + score_thr=0.5, + show=False, + out_file=os.path.join(result_dir, 'result_sync.jpg')) + + +if __name__ == '__main__': + asyncio.run(main()) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_runtime/test_async.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_runtime/test_async.py new file mode 100644 index 0000000000000000000000000000000000000000..e9733f61bab0bcc97cccdd5844c7d6da6499e92a --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_runtime/test_async.py @@ -0,0 +1,82 @@ +"""Tests for async interface.""" + +import asyncio +import os +import sys + +import asynctest +import mmcv +import torch + +from mmdet.apis import async_inference_detector, init_detector + +if sys.version_info >= (3, 7): + from mmdet.utils.contextmanagers import concurrent + + +class AsyncTestCase(asynctest.TestCase): + use_default_loop = False + forbid_get_event_loop = True + + TEST_TIMEOUT = int(os.getenv('ASYNCIO_TEST_TIMEOUT', '30')) + + def _run_test_method(self, method): + result = method() + if asyncio.iscoroutine(result): + self.loop.run_until_complete( + asyncio.wait_for(result, timeout=self.TEST_TIMEOUT)) + + +class MaskRCNNDetector: + + def __init__(self, + model_config, + checkpoint=None, + streamqueue_size=3, + device='cuda:0'): + + self.streamqueue_size = streamqueue_size + self.device = device + # build the model and load checkpoint + self.model = init_detector( + model_config, checkpoint=None, device=self.device) + self.streamqueue = None + + async def init(self): + self.streamqueue = asyncio.Queue() + for _ in range(self.streamqueue_size): + stream = torch.cuda.Stream(device=self.device) + self.streamqueue.put_nowait(stream) + + if sys.version_info >= (3, 7): + + async def apredict(self, img): + if isinstance(img, str): + img = mmcv.imread(img) + async with concurrent(self.streamqueue): + result = await async_inference_detector(self.model, img) + return result + + +class AsyncInferenceTestCase(AsyncTestCase): + + if sys.version_info >= (3, 7): + + async def test_simple_inference(self): + if not torch.cuda.is_available(): + import pytest + + pytest.skip('test requires GPU and torch+cuda') + + ori_grad_enabled = torch.is_grad_enabled() + root_dir = os.path.dirname(os.path.dirname(__name__)) + model_config = os.path.join( + root_dir, 'configs/mask_rcnn/mask_rcnn_r50_fpn_1x_coco.py') + detector = MaskRCNNDetector(model_config) + await detector.init() + img_path = os.path.join(root_dir, 'demo/demo.jpg') + bboxes, _ = await detector.apredict(img_path) + self.assertTrue(bboxes) + # asy inference detector will hack grad_enabled, + # so restore here to avoid it to influence other tests + torch.set_grad_enabled(ori_grad_enabled) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_runtime/test_config.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_runtime/test_config.py new file mode 100644 index 0000000000000000000000000000000000000000..54c358a1ed7686ad2661ea4fd8bd472df08af515 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_runtime/test_config.py @@ -0,0 +1,426 @@ +from os.path import dirname, exists, join, relpath +from unittest.mock import Mock + +import pytest +import torch +from mmcv.runner import build_optimizer + +from mmdet.core import BitmapMasks, PolygonMasks +from mmdet.datasets.builder import DATASETS +from mmdet.datasets.utils import NumClassCheckHook + + +def _get_config_directory(): + """Find the predefined detector config directory.""" + try: + # Assume we are running in the source mmdetection repo + repo_dpath = dirname(dirname(__file__)) + repo_dpath = join(repo_dpath, '..') + except NameError: + # For IPython development when this __file__ is not defined + import mmdet + repo_dpath = dirname(dirname(mmdet.__file__)) + config_dpath = join(repo_dpath, 'configs') + if not exists(config_dpath): + raise Exception('Cannot find config path') + return config_dpath + + +def _check_numclasscheckhook(detector, config_mod): + dummy_runner = Mock() + dummy_runner.model = detector + + def get_dataset_name_classes(dataset): + # deal with `RepeatDataset`,`ConcatDataset`,`ClassBalancedDataset`.. + if isinstance(dataset, (list, tuple)): + dataset = dataset[0] + while ('dataset' in dataset): + dataset = dataset['dataset'] + # ConcatDataset + if isinstance(dataset, (list, tuple)): + dataset = dataset[0] + return dataset['type'], dataset.get('classes', None) + + compatible_check = NumClassCheckHook() + dataset_name, CLASSES = get_dataset_name_classes( + config_mod['data']['train']) + if CLASSES is None: + CLASSES = DATASETS.get(dataset_name).CLASSES + dummy_runner.data_loader.dataset.CLASSES = CLASSES + compatible_check.before_train_epoch(dummy_runner) + + dummy_runner.data_loader.dataset.CLASSES = None + compatible_check.before_train_epoch(dummy_runner) + + dataset_name, CLASSES = get_dataset_name_classes(config_mod['data']['val']) + if CLASSES is None: + CLASSES = DATASETS.get(dataset_name).CLASSES + dummy_runner.data_loader.dataset.CLASSES = CLASSES + compatible_check.before_val_epoch(dummy_runner) + dummy_runner.data_loader.dataset.CLASSES = None + compatible_check.before_val_epoch(dummy_runner) + + +def test_config_build_detector(): + """Test that all detection models defined in the configs can be + initialized.""" + from mmcv import Config + from mmdet.models import build_detector + + config_dpath = _get_config_directory() + print(f'Found config_dpath = {config_dpath}') + + import glob + config_fpaths = list(glob.glob(join(config_dpath, '**', '*.py'))) + config_fpaths = [ + p for p in config_fpaths + if p.find('_base_') == -1 and p.find('common') == -1 + ] + config_names = [relpath(p, config_dpath) for p in config_fpaths] + + print(f'Using {len(config_names)} config files') + + for config_fname in config_names: + config_fpath = join(config_dpath, config_fname) + config_mod = Config.fromfile(config_fpath) + config_mod.model + print(f'Building detector, config_fpath = {config_fpath}') + + # Remove pretrained keys to allow for testing in an offline environment + if 'pretrained' in config_mod.model: + config_mod.model['pretrained'] = None + + detector = build_detector(config_mod.model) + assert detector is not None + + _check_numclasscheckhook(detector, config_mod) + + optimizer = build_optimizer(detector, config_mod.optimizer) + assert isinstance(optimizer, torch.optim.Optimizer) + + if 'roi_head' in config_mod.model.keys(): + # for two stage detector + # detectors must have bbox head + assert detector.roi_head.with_bbox and detector.with_bbox + assert detector.roi_head.with_mask == detector.with_mask + + head_config = config_mod.model['roi_head'] + _check_roi_head(head_config, detector.roi_head) + + # else: + # # for single stage detector + # # detectors must have bbox head + # # assert detector.with_bbox + # head_config = config_mod.model['bbox_head'] + # _check_bbox_head(head_config, detector.bbox_head) + + +def _check_roi_head(config, head): + # check consistency between head_config and roi_head + assert config['type'] == head.__class__.__name__ + + # check roi_align + bbox_roi_cfg = config.bbox_roi_extractor + bbox_roi_extractor = head.bbox_roi_extractor + _check_roi_extractor(bbox_roi_cfg, bbox_roi_extractor) + + # check bbox head infos + bbox_cfg = config.bbox_head + bbox_head = head.bbox_head + _check_bbox_head(bbox_cfg, bbox_head) + + if head.with_mask: + # check roi_align + if config.mask_roi_extractor: + mask_roi_cfg = config.mask_roi_extractor + mask_roi_extractor = head.mask_roi_extractor + _check_roi_extractor(mask_roi_cfg, mask_roi_extractor, + bbox_roi_extractor) + + # check mask head infos + mask_head = head.mask_head + mask_cfg = config.mask_head + _check_mask_head(mask_cfg, mask_head) + + # check arch specific settings, e.g., cascade/htc + if config['type'] in ['CascadeRoIHead', 'HybridTaskCascadeRoIHead']: + assert config.num_stages == len(head.bbox_head) + assert config.num_stages == len(head.bbox_roi_extractor) + + if head.with_mask: + assert config.num_stages == len(head.mask_head) + assert config.num_stages == len(head.mask_roi_extractor) + + elif config['type'] in ['MaskScoringRoIHead']: + assert (hasattr(head, 'mask_iou_head') + and head.mask_iou_head is not None) + mask_iou_cfg = config.mask_iou_head + mask_iou_head = head.mask_iou_head + assert (mask_iou_cfg.fc_out_channels == + mask_iou_head.fc_mask_iou.in_features) + + elif config['type'] in ['GridRoIHead']: + grid_roi_cfg = config.grid_roi_extractor + grid_roi_extractor = head.grid_roi_extractor + _check_roi_extractor(grid_roi_cfg, grid_roi_extractor, + bbox_roi_extractor) + + config.grid_head.grid_points = head.grid_head.grid_points + + +def _check_roi_extractor(config, roi_extractor, prev_roi_extractor=None): + import torch.nn as nn + # Separate roi_extractor and prev_roi_extractor checks for flexibility + if isinstance(roi_extractor, nn.ModuleList): + roi_extractor = roi_extractor[0] + if prev_roi_extractor and isinstance(prev_roi_extractor, nn.ModuleList): + prev_roi_extractor = prev_roi_extractor[0] + + assert (len(config.featmap_strides) == len(roi_extractor.roi_layers)) + assert (config.out_channels == roi_extractor.out_channels) + from torch.nn.modules.utils import _pair + assert (_pair(config.roi_layer.output_size) == + roi_extractor.roi_layers[0].output_size) + + if 'use_torchvision' in config.roi_layer: + assert (config.roi_layer.use_torchvision == + roi_extractor.roi_layers[0].use_torchvision) + elif 'aligned' in config.roi_layer: + assert ( + config.roi_layer.aligned == roi_extractor.roi_layers[0].aligned) + + if prev_roi_extractor: + assert (roi_extractor.roi_layers[0].aligned == + prev_roi_extractor.roi_layers[0].aligned) + assert (roi_extractor.roi_layers[0].use_torchvision == + prev_roi_extractor.roi_layers[0].use_torchvision) + + +def _check_mask_head(mask_cfg, mask_head): + import torch.nn as nn + if isinstance(mask_cfg, list): + for single_mask_cfg, single_mask_head in zip(mask_cfg, mask_head): + _check_mask_head(single_mask_cfg, single_mask_head) + elif isinstance(mask_head, nn.ModuleList): + for single_mask_head in mask_head: + _check_mask_head(mask_cfg, single_mask_head) + else: + assert mask_cfg['type'] == mask_head.__class__.__name__ + assert mask_cfg.in_channels == mask_head.in_channels + class_agnostic = mask_cfg.get('class_agnostic', False) + out_dim = (1 if class_agnostic else mask_cfg.num_classes) + if hasattr(mask_head, 'conv_logits'): + assert (mask_cfg.conv_out_channels == + mask_head.conv_logits.in_channels) + assert mask_head.conv_logits.out_channels == out_dim + else: + assert mask_cfg.fc_out_channels == mask_head.fc_logits.in_features + assert (mask_head.fc_logits.out_features == out_dim * + mask_head.output_area) + + +def _check_bbox_head(bbox_cfg, bbox_head): + import torch.nn as nn + if isinstance(bbox_cfg, list): + for single_bbox_cfg, single_bbox_head in zip(bbox_cfg, bbox_head): + _check_bbox_head(single_bbox_cfg, single_bbox_head) + elif isinstance(bbox_head, nn.ModuleList): + for single_bbox_head in bbox_head: + _check_bbox_head(bbox_cfg, single_bbox_head) + else: + assert bbox_cfg['type'] == bbox_head.__class__.__name__ + if bbox_cfg['type'] == 'SABLHead': + assert bbox_cfg.cls_in_channels == bbox_head.cls_in_channels + assert bbox_cfg.reg_in_channels == bbox_head.reg_in_channels + + cls_out_channels = bbox_cfg.get('cls_out_channels', 1024) + assert (cls_out_channels == bbox_head.fc_cls.in_features) + assert (bbox_cfg.num_classes + 1 == bbox_head.fc_cls.out_features) + + elif bbox_cfg['type'] == 'DIIHead': + assert bbox_cfg['num_ffn_fcs'] == bbox_head.ffn.num_fcs + # 3 means FC and LN and Relu + assert bbox_cfg['num_cls_fcs'] == len(bbox_head.cls_fcs) // 3 + assert bbox_cfg['num_reg_fcs'] == len(bbox_head.reg_fcs) // 3 + assert bbox_cfg['in_channels'] == bbox_head.in_channels + assert bbox_cfg['in_channels'] == bbox_head.fc_cls.in_features + assert bbox_cfg['in_channels'] == bbox_head.fc_reg.in_features + assert bbox_cfg['in_channels'] == bbox_head.attention.embed_dims + assert bbox_cfg[ + 'feedforward_channels'] == bbox_head.ffn.feedforward_channels + + else: + assert bbox_cfg.in_channels == bbox_head.in_channels + with_cls = bbox_cfg.get('with_cls', True) + + if with_cls: + fc_out_channels = bbox_cfg.get('fc_out_channels', 2048) + assert (fc_out_channels == bbox_head.fc_cls.in_features) + if bbox_head.custom_cls_channels: + assert (bbox_head.loss_cls.get_cls_channels( + bbox_head.num_classes) == bbox_head.fc_cls.out_features + ) + else: + assert (bbox_cfg.num_classes + + 1 == bbox_head.fc_cls.out_features) + with_reg = bbox_cfg.get('with_reg', True) + if with_reg: + out_dim = (4 if bbox_cfg.reg_class_agnostic else 4 * + bbox_cfg.num_classes) + assert bbox_head.fc_reg.out_features == out_dim + + +def _check_anchorhead(config, head): + # check consistency between head_config and roi_head + assert config['type'] == head.__class__.__name__ + assert config.in_channels == head.in_channels + + num_classes = ( + config.num_classes - + 1 if config.loss_cls.get('use_sigmoid', False) else config.num_classes) + if config['type'] == 'ATSSHead': + assert (config.feat_channels == head.atss_cls.in_channels) + assert (config.feat_channels == head.atss_reg.in_channels) + assert (config.feat_channels == head.atss_centerness.in_channels) + elif config['type'] == 'SABLRetinaHead': + assert (config.feat_channels == head.retina_cls.in_channels) + assert (config.feat_channels == head.retina_bbox_reg.in_channels) + assert (config.feat_channels == head.retina_bbox_cls.in_channels) + else: + assert (config.in_channels == head.conv_cls.in_channels) + assert (config.in_channels == head.conv_reg.in_channels) + assert (head.conv_cls.out_channels == num_classes * head.num_anchors) + assert head.fc_reg.out_channels == 4 * head.num_anchors + + +# Only tests a representative subset of configurations +# TODO: test pipelines using Albu, current Albu throw None given empty GT +@pytest.mark.parametrize( + 'config_rpath', + [ + 'wider_face/ssd300_wider_face.py', + 'pascal_voc/ssd300_voc0712.py', + 'pascal_voc/ssd512_voc0712.py', + # 'albu_example/mask_rcnn_r50_fpn_1x.py', + 'foveabox/fovea_align_r50_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py', + 'mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain-poly_1x_coco.py', + 'mask_rcnn/mask_rcnn_r50_caffe_fpn_mstrain_1x_coco.py', + 'fp16/mask_rcnn_r50_fpn_fp16_1x_coco.py' + ]) +def test_config_data_pipeline(config_rpath): + """Test whether the data pipeline is valid and can process corner cases. + + CommandLine: + xdoctest -m tests/test_runtime/ + test_config.py test_config_build_data_pipeline + """ + from mmcv import Config + from mmdet.datasets.pipelines import Compose + import numpy as np + + config_dpath = _get_config_directory() + print(f'Found config_dpath = {config_dpath}') + + def dummy_masks(h, w, num_obj=3, mode='bitmap'): + assert mode in ('polygon', 'bitmap') + if mode == 'bitmap': + masks = np.random.randint(0, 2, (num_obj, h, w), dtype=np.uint8) + masks = BitmapMasks(masks, h, w) + else: + masks = [] + for i in range(num_obj): + masks.append([]) + masks[-1].append( + np.random.uniform(0, min(h - 1, w - 1), (8 + 4 * i, ))) + masks[-1].append( + np.random.uniform(0, min(h - 1, w - 1), (10 + 4 * i, ))) + masks = PolygonMasks(masks, h, w) + return masks + + config_fpath = join(config_dpath, config_rpath) + cfg = Config.fromfile(config_fpath) + + # remove loading pipeline + loading_pipeline = cfg.train_pipeline.pop(0) + loading_ann_pipeline = cfg.train_pipeline.pop(0) + cfg.test_pipeline.pop(0) + + train_pipeline = Compose(cfg.train_pipeline) + test_pipeline = Compose(cfg.test_pipeline) + + print(f'Building data pipeline, config_fpath = {config_fpath}') + + print(f'Test training data pipeline: \n{train_pipeline!r}') + img = np.random.randint(0, 255, size=(888, 666, 3), dtype=np.uint8) + if loading_pipeline.get('to_float32', False): + img = img.astype(np.float32) + mode = 'bitmap' if loading_ann_pipeline.get('poly2mask', + True) else 'polygon' + results = dict( + filename='test_img.png', + ori_filename='test_img.png', + img=img, + img_shape=img.shape, + ori_shape=img.shape, + gt_bboxes=np.array([[35.2, 11.7, 39.7, 15.7]], dtype=np.float32), + gt_labels=np.array([1], dtype=np.int64), + gt_masks=dummy_masks(img.shape[0], img.shape[1], mode=mode), + ) + results['img_fields'] = ['img'] + results['bbox_fields'] = ['gt_bboxes'] + results['mask_fields'] = ['gt_masks'] + output_results = train_pipeline(results) + assert output_results is not None + + print(f'Test testing data pipeline: \n{test_pipeline!r}') + results = dict( + filename='test_img.png', + ori_filename='test_img.png', + img=img, + img_shape=img.shape, + ori_shape=img.shape, + gt_bboxes=np.array([[35.2, 11.7, 39.7, 15.7]], dtype=np.float32), + gt_labels=np.array([1], dtype=np.int64), + gt_masks=dummy_masks(img.shape[0], img.shape[1], mode=mode), + ) + results['img_fields'] = ['img'] + results['bbox_fields'] = ['gt_bboxes'] + results['mask_fields'] = ['gt_masks'] + output_results = test_pipeline(results) + assert output_results is not None + + # test empty GT + print('Test empty GT with training data pipeline: ' + f'\n{train_pipeline!r}') + results = dict( + filename='test_img.png', + ori_filename='test_img.png', + img=img, + img_shape=img.shape, + ori_shape=img.shape, + gt_bboxes=np.zeros((0, 4), dtype=np.float32), + gt_labels=np.array([], dtype=np.int64), + gt_masks=dummy_masks(img.shape[0], img.shape[1], num_obj=0, mode=mode), + ) + results['img_fields'] = ['img'] + results['bbox_fields'] = ['gt_bboxes'] + results['mask_fields'] = ['gt_masks'] + output_results = train_pipeline(results) + assert output_results is not None + + print(f'Test empty GT with testing data pipeline: \n{test_pipeline!r}') + results = dict( + filename='test_img.png', + ori_filename='test_img.png', + img=img, + img_shape=img.shape, + ori_shape=img.shape, + gt_bboxes=np.zeros((0, 4), dtype=np.float32), + gt_labels=np.array([], dtype=np.int64), + gt_masks=dummy_masks(img.shape[0], img.shape[1], num_obj=0, mode=mode), + ) + results['img_fields'] = ['img'] + results['bbox_fields'] = ['gt_bboxes'] + results['mask_fields'] = ['gt_masks'] + output_results = test_pipeline(results) + assert output_results is not None diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_runtime/test_eval_hook.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_runtime/test_eval_hook.py new file mode 100644 index 0000000000000000000000000000000000000000..435f99bd0074899e2d258e98bb3bb298eb058ba2 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_runtime/test_eval_hook.py @@ -0,0 +1,251 @@ +import os.path as osp +import tempfile +import unittest.mock as mock +from collections import OrderedDict +from unittest.mock import MagicMock, patch + +import pytest +import torch +import torch.nn as nn +from mmcv.runner import EpochBasedRunner, build_optimizer +from mmcv.utils import get_logger +from torch.utils.data import DataLoader, Dataset + +from mmdet.core import DistEvalHook, EvalHook + + +class ExampleDataset(Dataset): + + def __init__(self): + self.index = 0 + self.eval_result = [0.1, 0.4, 0.3, 0.7, 0.2, 0.05, 0.4, 0.6] + + def __getitem__(self, idx): + results = dict(imgs=torch.tensor([1])) + return results + + def __len__(self): + return 1 + + @mock.create_autospec + def evaluate(self, results, logger=None): + pass + + +class EvalDataset(ExampleDataset): + + def evaluate(self, results, logger=None): + mean_ap = self.eval_result[self.index] + output = OrderedDict(mAP=mean_ap, index=self.index, score=mean_ap) + self.index += 1 + return output + + +class ExampleModel(nn.Module): + + def __init__(self): + super().__init__() + self.conv = nn.Linear(1, 1) + self.test_cfg = None + + def forward(self, imgs, rescale=False, return_loss=False): + return imgs + + def train_step(self, data_batch, optimizer, **kwargs): + outputs = { + 'loss': 0.5, + 'log_vars': { + 'accuracy': 0.98 + }, + 'num_samples': 1 + } + return outputs + + +@pytest.mark.skipif( + not torch.cuda.is_available(), reason='requires CUDA support') +@patch('mmdet.apis.single_gpu_test', MagicMock) +@patch('mmdet.apis.multi_gpu_test', MagicMock) +@pytest.mark.parametrize('EvalHookCls', (EvalHook, DistEvalHook)) +def test_eval_hook(EvalHookCls): + with pytest.raises(TypeError): + # dataloader must be a pytorch DataLoader + test_dataset = ExampleDataset() + data_loader = [ + DataLoader( + test_dataset, + batch_size=1, + sampler=None, + num_worker=0, + shuffle=False) + ] + EvalHookCls(data_loader) + + with pytest.raises(KeyError): + # rule must be in keys of rule_map + test_dataset = ExampleDataset() + data_loader = DataLoader( + test_dataset, + batch_size=1, + sampler=None, + num_workers=0, + shuffle=False) + EvalHookCls(data_loader, save_best='auto', rule='unsupport') + + with pytest.raises(ValueError): + # key_indicator must be valid when rule_map is None + test_dataset = ExampleDataset() + data_loader = DataLoader( + test_dataset, + batch_size=1, + sampler=None, + num_workers=0, + shuffle=False) + EvalHookCls(data_loader, save_best='unsupport') + + optimizer_cfg = dict( + type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001) + + test_dataset = ExampleDataset() + loader = DataLoader(test_dataset, batch_size=1) + model = ExampleModel() + optimizer = build_optimizer(model, optimizer_cfg) + + data_loader = DataLoader(test_dataset, batch_size=1) + eval_hook = EvalHookCls(data_loader, save_best=None) + with tempfile.TemporaryDirectory() as tmpdir: + logger = get_logger('test_eval') + runner = EpochBasedRunner( + model=model, + batch_processor=None, + optimizer=optimizer, + work_dir=tmpdir, + logger=logger) + runner.register_hook(eval_hook) + runner.run([loader], [('train', 1)], 1) + assert runner.meta is None or 'best_score' not in runner.meta[ + 'hook_msgs'] + assert runner.meta is None or 'best_ckpt' not in runner.meta[ + 'hook_msgs'] + + # when `save_best` is set to 'auto', first metric will be used. + loader = DataLoader(EvalDataset(), batch_size=1) + model = ExampleModel() + data_loader = DataLoader(EvalDataset(), batch_size=1) + eval_hook = EvalHookCls(data_loader, interval=1, save_best='auto') + + with tempfile.TemporaryDirectory() as tmpdir: + logger = get_logger('test_eval') + runner = EpochBasedRunner( + model=model, + batch_processor=None, + optimizer=optimizer, + work_dir=tmpdir, + logger=logger) + runner.register_checkpoint_hook(dict(interval=1)) + runner.register_hook(eval_hook) + runner.run([loader], [('train', 1)], 8) + + real_path = osp.join(tmpdir, 'best_mAP_epoch_4.pth') + + assert runner.meta['hook_msgs']['best_ckpt'] == osp.realpath(real_path) + assert runner.meta['hook_msgs']['best_score'] == 0.7 + + loader = DataLoader(EvalDataset(), batch_size=1) + model = ExampleModel() + data_loader = DataLoader(EvalDataset(), batch_size=1) + eval_hook = EvalHookCls(data_loader, interval=1, save_best='mAP') + + with tempfile.TemporaryDirectory() as tmpdir: + logger = get_logger('test_eval') + runner = EpochBasedRunner( + model=model, + batch_processor=None, + optimizer=optimizer, + work_dir=tmpdir, + logger=logger) + runner.register_checkpoint_hook(dict(interval=1)) + runner.register_hook(eval_hook) + runner.run([loader], [('train', 1)], 8) + + real_path = osp.join(tmpdir, 'best_mAP_epoch_4.pth') + + assert runner.meta['hook_msgs']['best_ckpt'] == osp.realpath(real_path) + assert runner.meta['hook_msgs']['best_score'] == 0.7 + + data_loader = DataLoader(EvalDataset(), batch_size=1) + eval_hook = EvalHookCls( + data_loader, interval=1, save_best='score', rule='greater') + with tempfile.TemporaryDirectory() as tmpdir: + logger = get_logger('test_eval') + runner = EpochBasedRunner( + model=model, + batch_processor=None, + optimizer=optimizer, + work_dir=tmpdir, + logger=logger) + runner.register_checkpoint_hook(dict(interval=1)) + runner.register_hook(eval_hook) + runner.run([loader], [('train', 1)], 8) + + real_path = osp.join(tmpdir, 'best_score_epoch_4.pth') + + assert runner.meta['hook_msgs']['best_ckpt'] == osp.realpath(real_path) + assert runner.meta['hook_msgs']['best_score'] == 0.7 + + data_loader = DataLoader(EvalDataset(), batch_size=1) + eval_hook = EvalHookCls(data_loader, save_best='mAP', rule='less') + with tempfile.TemporaryDirectory() as tmpdir: + logger = get_logger('test_eval') + runner = EpochBasedRunner( + model=model, + batch_processor=None, + optimizer=optimizer, + work_dir=tmpdir, + logger=logger) + runner.register_checkpoint_hook(dict(interval=1)) + runner.register_hook(eval_hook) + runner.run([loader], [('train', 1)], 8) + + real_path = osp.join(tmpdir, 'best_mAP_epoch_6.pth') + + assert runner.meta['hook_msgs']['best_ckpt'] == osp.realpath(real_path) + assert runner.meta['hook_msgs']['best_score'] == 0.05 + + data_loader = DataLoader(EvalDataset(), batch_size=1) + eval_hook = EvalHookCls(data_loader, save_best='mAP') + with tempfile.TemporaryDirectory() as tmpdir: + logger = get_logger('test_eval') + runner = EpochBasedRunner( + model=model, + batch_processor=None, + optimizer=optimizer, + work_dir=tmpdir, + logger=logger) + runner.register_checkpoint_hook(dict(interval=1)) + runner.register_hook(eval_hook) + runner.run([loader], [('train', 1)], 2) + + real_path = osp.join(tmpdir, 'best_mAP_epoch_2.pth') + + assert runner.meta['hook_msgs']['best_ckpt'] == osp.realpath(real_path) + assert runner.meta['hook_msgs']['best_score'] == 0.4 + + resume_from = osp.join(tmpdir, 'latest.pth') + loader = DataLoader(ExampleDataset(), batch_size=1) + eval_hook = EvalHookCls(data_loader, save_best='mAP') + runner = EpochBasedRunner( + model=model, + batch_processor=None, + optimizer=optimizer, + work_dir=tmpdir, + logger=logger) + runner.register_checkpoint_hook(dict(interval=1)) + runner.register_hook(eval_hook) + runner.resume(resume_from) + runner.run([loader], [('train', 1)], 8) + + real_path = osp.join(tmpdir, 'best_mAP_epoch_4.pth') + + assert runner.meta['hook_msgs']['best_ckpt'] == osp.realpath(real_path) + assert runner.meta['hook_msgs']['best_score'] == 0.7 diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_runtime/test_fp16.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_runtime/test_fp16.py new file mode 100644 index 0000000000000000000000000000000000000000..d43fb19c5bfde8aae4a75cc37ef22d0226e76fc3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_runtime/test_fp16.py @@ -0,0 +1,300 @@ +import numpy as np +import pytest +import torch +import torch.nn as nn +from mmcv.runner import auto_fp16, force_fp32 +from mmcv.runner.fp16_utils import cast_tensor_type + + +def test_cast_tensor_type(): + inputs = torch.FloatTensor([5.]) + src_type = torch.float32 + dst_type = torch.int32 + outputs = cast_tensor_type(inputs, src_type, dst_type) + assert isinstance(outputs, torch.Tensor) + assert outputs.dtype == dst_type + + inputs = 'tensor' + src_type = str + dst_type = str + outputs = cast_tensor_type(inputs, src_type, dst_type) + assert isinstance(outputs, str) + + inputs = np.array([5.]) + src_type = np.ndarray + dst_type = np.ndarray + outputs = cast_tensor_type(inputs, src_type, dst_type) + assert isinstance(outputs, np.ndarray) + + inputs = dict( + tensor_a=torch.FloatTensor([1.]), tensor_b=torch.FloatTensor([2.])) + src_type = torch.float32 + dst_type = torch.int32 + outputs = cast_tensor_type(inputs, src_type, dst_type) + assert isinstance(outputs, dict) + assert outputs['tensor_a'].dtype == dst_type + assert outputs['tensor_b'].dtype == dst_type + + inputs = [torch.FloatTensor([1.]), torch.FloatTensor([2.])] + src_type = torch.float32 + dst_type = torch.int32 + outputs = cast_tensor_type(inputs, src_type, dst_type) + assert isinstance(outputs, list) + assert outputs[0].dtype == dst_type + assert outputs[1].dtype == dst_type + + inputs = 5 + outputs = cast_tensor_type(inputs, None, None) + assert isinstance(outputs, int) + + +def test_auto_fp16(): + + with pytest.raises(TypeError): + # ExampleObject is not a subclass of nn.Module + + class ExampleObject: + + @auto_fp16() + def __call__(self, x): + return x + + model = ExampleObject() + input_x = torch.ones(1, dtype=torch.float32) + model(input_x) + + # apply to all input args + class ExampleModule(nn.Module): + + @auto_fp16() + def forward(self, x, y): + return x, y + + model = ExampleModule() + input_x = torch.ones(1, dtype=torch.float32) + input_y = torch.ones(1, dtype=torch.float32) + output_x, output_y = model(input_x, input_y) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.float32 + + model.fp16_enabled = True + output_x, output_y = model(input_x, input_y) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.half + + if torch.cuda.is_available(): + model.cuda() + output_x, output_y = model(input_x.cuda(), input_y.cuda()) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.half + + # apply to specified input args + class ExampleModule(nn.Module): + + @auto_fp16(apply_to=('x', )) + def forward(self, x, y): + return x, y + + model = ExampleModule() + input_x = torch.ones(1, dtype=torch.float32) + input_y = torch.ones(1, dtype=torch.float32) + output_x, output_y = model(input_x, input_y) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.float32 + + model.fp16_enabled = True + output_x, output_y = model(input_x, input_y) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.float32 + + if torch.cuda.is_available(): + model.cuda() + output_x, output_y = model(input_x.cuda(), input_y.cuda()) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.float32 + + # apply to optional input args + class ExampleModule(nn.Module): + + @auto_fp16(apply_to=('x', 'y')) + def forward(self, x, y=None, z=None): + return x, y, z + + model = ExampleModule() + input_x = torch.ones(1, dtype=torch.float32) + input_y = torch.ones(1, dtype=torch.float32) + input_z = torch.ones(1, dtype=torch.float32) + output_x, output_y, output_z = model(input_x, y=input_y, z=input_z) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.float32 + assert output_z.dtype == torch.float32 + + model.fp16_enabled = True + output_x, output_y, output_z = model(input_x, y=input_y, z=input_z) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.half + assert output_z.dtype == torch.float32 + + if torch.cuda.is_available(): + model.cuda() + output_x, output_y, output_z = model( + input_x.cuda(), y=input_y.cuda(), z=input_z.cuda()) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.half + assert output_z.dtype == torch.float32 + + # out_fp32=True + class ExampleModule(nn.Module): + + @auto_fp16(apply_to=('x', 'y'), out_fp32=True) + def forward(self, x, y=None, z=None): + return x, y, z + + model = ExampleModule() + input_x = torch.ones(1, dtype=torch.half) + input_y = torch.ones(1, dtype=torch.float32) + input_z = torch.ones(1, dtype=torch.float32) + output_x, output_y, output_z = model(input_x, y=input_y, z=input_z) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.float32 + assert output_z.dtype == torch.float32 + + model.fp16_enabled = True + output_x, output_y, output_z = model(input_x, y=input_y, z=input_z) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.float32 + assert output_z.dtype == torch.float32 + + if torch.cuda.is_available(): + model.cuda() + output_x, output_y, output_z = model( + input_x.cuda(), y=input_y.cuda(), z=input_z.cuda()) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.float32 + assert output_z.dtype == torch.float32 + + +def test_force_fp32(): + + with pytest.raises(TypeError): + # ExampleObject is not a subclass of nn.Module + + class ExampleObject: + + @force_fp32() + def __call__(self, x): + return x + + model = ExampleObject() + input_x = torch.ones(1, dtype=torch.float32) + model(input_x) + + # apply to all input args + class ExampleModule(nn.Module): + + @force_fp32() + def forward(self, x, y): + return x, y + + model = ExampleModule() + input_x = torch.ones(1, dtype=torch.half) + input_y = torch.ones(1, dtype=torch.half) + output_x, output_y = model(input_x, input_y) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.half + + model.fp16_enabled = True + output_x, output_y = model(input_x, input_y) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.float32 + + if torch.cuda.is_available(): + model.cuda() + output_x, output_y = model(input_x.cuda(), input_y.cuda()) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.float32 + + # apply to specified input args + class ExampleModule(nn.Module): + + @force_fp32(apply_to=('x', )) + def forward(self, x, y): + return x, y + + model = ExampleModule() + input_x = torch.ones(1, dtype=torch.half) + input_y = torch.ones(1, dtype=torch.half) + output_x, output_y = model(input_x, input_y) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.half + + model.fp16_enabled = True + output_x, output_y = model(input_x, input_y) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.half + + if torch.cuda.is_available(): + model.cuda() + output_x, output_y = model(input_x.cuda(), input_y.cuda()) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.half + + # apply to optional input args + class ExampleModule(nn.Module): + + @force_fp32(apply_to=('x', 'y')) + def forward(self, x, y=None, z=None): + return x, y, z + + model = ExampleModule() + input_x = torch.ones(1, dtype=torch.half) + input_y = torch.ones(1, dtype=torch.half) + input_z = torch.ones(1, dtype=torch.half) + output_x, output_y, output_z = model(input_x, y=input_y, z=input_z) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.half + assert output_z.dtype == torch.half + + model.fp16_enabled = True + output_x, output_y, output_z = model(input_x, y=input_y, z=input_z) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.float32 + assert output_z.dtype == torch.half + + if torch.cuda.is_available(): + model.cuda() + output_x, output_y, output_z = model( + input_x.cuda(), y=input_y.cuda(), z=input_z.cuda()) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.float32 + assert output_z.dtype == torch.half + + # out_fp16=True + class ExampleModule(nn.Module): + + @force_fp32(apply_to=('x', 'y'), out_fp16=True) + def forward(self, x, y=None, z=None): + return x, y, z + + model = ExampleModule() + input_x = torch.ones(1, dtype=torch.float32) + input_y = torch.ones(1, dtype=torch.half) + input_z = torch.ones(1, dtype=torch.half) + output_x, output_y, output_z = model(input_x, y=input_y, z=input_z) + assert output_x.dtype == torch.float32 + assert output_y.dtype == torch.half + assert output_z.dtype == torch.half + + model.fp16_enabled = True + output_x, output_y, output_z = model(input_x, y=input_y, z=input_z) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.half + assert output_z.dtype == torch.half + + if torch.cuda.is_available(): + model.cuda() + output_x, output_y, output_z = model( + input_x.cuda(), y=input_y.cuda(), z=input_z.cuda()) + assert output_x.dtype == torch.half + assert output_y.dtype == torch.half + assert output_z.dtype == torch.half diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_utils/test_anchor.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_utils/test_anchor.py new file mode 100644 index 0000000000000000000000000000000000000000..f871b1b654a9a4f71b3a8c1ba9d45dd3a81f17b3 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_utils/test_anchor.py @@ -0,0 +1,674 @@ +""" +CommandLine: + pytest tests/test_utils/test_anchor.py + xdoctest tests/test_utils/test_anchor.py zero + +""" +import pytest +import torch + + +def test_standard_points_generator(): + from mmdet.core.anchor import build_prior_generator + # teat init + anchor_generator_cfg = dict( + type='MlvlPointGenerator', strides=[4, 8], offset=0) + anchor_generator = build_prior_generator(anchor_generator_cfg) + assert anchor_generator is not None + assert anchor_generator.num_base_priors == [1, 1] + # test_stride + from mmdet.core.anchor import MlvlPointGenerator + + # Square strides + mlvl_points = MlvlPointGenerator(strides=[4, 10], offset=0) + mlvl_points_half_stride_generator = MlvlPointGenerator( + strides=[4, 10], offset=0.5) + assert mlvl_points.num_levels == 2 + + # assert self.num_levels == len(featmap_sizes) + with pytest.raises(AssertionError): + mlvl_points.grid_priors(featmap_sizes=[(2, 2)], device='cpu') + priors = mlvl_points.grid_priors( + featmap_sizes=[(2, 2), (4, 8)], device='cpu') + priors_with_stride = mlvl_points.grid_priors( + featmap_sizes=[(2, 2), (4, 8)], with_stride=True, device='cpu') + assert len(priors) == 2 + + # assert last dimension is (coord_x, coord_y, stride_w, stride_h). + assert priors_with_stride[0].size(1) == 4 + assert priors_with_stride[0][0][2] == 4 + assert priors_with_stride[0][0][3] == 4 + assert priors_with_stride[1][0][2] == 10 + assert priors_with_stride[1][0][3] == 10 + + stride_4_feat_2_2 = priors[0] + assert (stride_4_feat_2_2[1] - stride_4_feat_2_2[0]).sum() == 4 + assert stride_4_feat_2_2.size(0) == 4 + assert stride_4_feat_2_2.size(1) == 2 + + stride_10_feat_4_8 = priors[1] + assert (stride_10_feat_4_8[1] - stride_10_feat_4_8[0]).sum() == 10 + assert stride_10_feat_4_8.size(0) == 4 * 8 + assert stride_10_feat_4_8.size(1) == 2 + + # assert the offset of 0.5 * stride + priors_half_offset = mlvl_points_half_stride_generator.grid_priors( + featmap_sizes=[(2, 2), (4, 8)], device='cpu') + + assert (priors_half_offset[0][0] - priors[0][0]).sum() == 4 * 0.5 * 2 + assert (priors_half_offset[1][0] - priors[1][0]).sum() == 10 * 0.5 * 2 + if torch.cuda.is_available(): + anchor_generator_cfg = dict( + type='MlvlPointGenerator', strides=[4, 8], offset=0) + anchor_generator = build_prior_generator(anchor_generator_cfg) + assert anchor_generator is not None + # Square strides + mlvl_points = MlvlPointGenerator(strides=[4, 10], offset=0) + mlvl_points_half_stride_generator = MlvlPointGenerator( + strides=[4, 10], offset=0.5) + assert mlvl_points.num_levels == 2 + + # assert self.num_levels == len(featmap_sizes) + with pytest.raises(AssertionError): + mlvl_points.grid_priors(featmap_sizes=[(2, 2)], device='cuda') + priors = mlvl_points.grid_priors( + featmap_sizes=[(2, 2), (4, 8)], device='cuda') + priors_with_stride = mlvl_points.grid_priors( + featmap_sizes=[(2, 2), (4, 8)], with_stride=True, device='cuda') + assert len(priors) == 2 + + # assert last dimension is (coord_x, coord_y, stride_w, stride_h). + assert priors_with_stride[0].size(1) == 4 + assert priors_with_stride[0][0][2] == 4 + assert priors_with_stride[0][0][3] == 4 + assert priors_with_stride[1][0][2] == 10 + assert priors_with_stride[1][0][3] == 10 + + stride_4_feat_2_2 = priors[0] + assert (stride_4_feat_2_2[1] - stride_4_feat_2_2[0]).sum() == 4 + assert stride_4_feat_2_2.size(0) == 4 + assert stride_4_feat_2_2.size(1) == 2 + + stride_10_feat_4_8 = priors[1] + assert (stride_10_feat_4_8[1] - stride_10_feat_4_8[0]).sum() == 10 + assert stride_10_feat_4_8.size(0) == 4 * 8 + assert stride_10_feat_4_8.size(1) == 2 + + # assert the offset of 0.5 * stride + priors_half_offset = mlvl_points_half_stride_generator.grid_priors( + featmap_sizes=[(2, 2), (4, 8)], device='cuda') + + assert (priors_half_offset[0][0] - priors[0][0]).sum() == 4 * 0.5 * 2 + assert (priors_half_offset[1][0] - priors[1][0]).sum() == 10 * 0.5 * 2 + + +def test_sparse_prior(): + from mmdet.core.anchor import MlvlPointGenerator + mlvl_points = MlvlPointGenerator(strides=[4, 10], offset=0) + prior_indexs = torch.Tensor([0, 2, 4, 5, 6, 9]).long() + + featmap_sizes = [(3, 5), (6, 4)] + grid_anchors = mlvl_points.grid_priors( + featmap_sizes=featmap_sizes, with_stride=False, device='cpu') + sparse_prior = mlvl_points.sparse_priors( + prior_idxs=prior_indexs, + featmap_size=featmap_sizes[0], + level_idx=0, + device='cpu') + + assert not sparse_prior.is_cuda + assert (sparse_prior == grid_anchors[0][prior_indexs]).all() + sparse_prior = mlvl_points.sparse_priors( + prior_idxs=prior_indexs, + featmap_size=featmap_sizes[1], + level_idx=1, + device='cpu') + assert (sparse_prior == grid_anchors[1][prior_indexs]).all() + + from mmdet.core.anchor import AnchorGenerator + mlvl_anchors = AnchorGenerator( + strides=[16, 32], ratios=[1.], scales=[1.], base_sizes=[4, 8]) + prior_indexs = torch.Tensor([0, 2, 4, 5, 6, 9]).long() + + featmap_sizes = [(3, 5), (6, 4)] + grid_anchors = mlvl_anchors.grid_priors( + featmap_sizes=featmap_sizes, device='cpu') + sparse_prior = mlvl_anchors.sparse_priors( + prior_idxs=prior_indexs, + featmap_size=featmap_sizes[0], + level_idx=0, + device='cpu') + assert (sparse_prior == grid_anchors[0][prior_indexs]).all() + sparse_prior = mlvl_anchors.sparse_priors( + prior_idxs=prior_indexs, + featmap_size=featmap_sizes[1], + level_idx=1, + device='cpu') + assert (sparse_prior == grid_anchors[1][prior_indexs]).all() + + # for ssd + from mmdet.core.anchor.anchor_generator import SSDAnchorGenerator + featmap_sizes = [(38, 38), (19, 19), (10, 10)] + anchor_generator = SSDAnchorGenerator( + scale_major=False, + input_size=300, + basesize_ratio_range=(0.15, 0.9), + strides=[8, 16, 32], + ratios=[[2], [2, 3], [2, 3]]) + ssd_anchors = anchor_generator.grid_anchors(featmap_sizes, device='cpu') + for i in range(len(featmap_sizes)): + sparse_ssd_anchors = anchor_generator.sparse_priors( + prior_idxs=prior_indexs, + level_idx=i, + featmap_size=featmap_sizes[i], + device='cpu') + assert (sparse_ssd_anchors == ssd_anchors[i][prior_indexs]).all() + + # for yolo + from mmdet.core.anchor.anchor_generator import YOLOAnchorGenerator + featmap_sizes = [(38, 38), (19, 19), (10, 10)] + anchor_generator = YOLOAnchorGenerator( + strides=[32, 16, 8], + base_sizes=[ + [(116, 90), (156, 198), (373, 326)], + [(30, 61), (62, 45), (59, 119)], + [(10, 13), (16, 30), (33, 23)], + ]) + yolo_anchors = anchor_generator.grid_anchors(featmap_sizes, device='cpu') + for i in range(len(featmap_sizes)): + sparse_yolo_anchors = anchor_generator.sparse_priors( + prior_idxs=prior_indexs, + level_idx=i, + featmap_size=featmap_sizes[i], + device='cpu') + assert (sparse_yolo_anchors == yolo_anchors[i][prior_indexs]).all() + + if torch.cuda.is_available(): + mlvl_points = MlvlPointGenerator(strides=[4, 10], offset=0) + prior_indexs = torch.Tensor([0, 3, 4, 5, 6, 7, 1, 2, 4, 5, 6, + 9]).long().cuda() + + featmap_sizes = [(6, 8), (6, 4)] + grid_anchors = mlvl_points.grid_priors( + featmap_sizes=featmap_sizes, with_stride=False, device='cuda') + sparse_prior = mlvl_points.sparse_priors( + prior_idxs=prior_indexs, + featmap_size=featmap_sizes[0], + level_idx=0, + device='cuda') + assert (sparse_prior == grid_anchors[0][prior_indexs]).all() + sparse_prior = mlvl_points.sparse_priors( + prior_idxs=prior_indexs, + featmap_size=featmap_sizes[1], + level_idx=1, + device='cuda') + assert (sparse_prior == grid_anchors[1][prior_indexs]).all() + assert sparse_prior.is_cuda + mlvl_anchors = AnchorGenerator( + strides=[16, 32], + ratios=[1., 2.5], + scales=[1., 5.], + base_sizes=[4, 8]) + prior_indexs = torch.Tensor([4, 5, 6, 7, 0, 2, 50, 4, 5, 6, + 9]).long().cuda() + + featmap_sizes = [(13, 5), (16, 4)] + grid_anchors = mlvl_anchors.grid_priors( + featmap_sizes=featmap_sizes, device='cuda') + sparse_prior = mlvl_anchors.sparse_priors( + prior_idxs=prior_indexs, + featmap_size=featmap_sizes[0], + level_idx=0, + device='cuda') + assert (sparse_prior == grid_anchors[0][prior_indexs]).all() + sparse_prior = mlvl_anchors.sparse_priors( + prior_idxs=prior_indexs, + featmap_size=featmap_sizes[1], + level_idx=1, + device='cuda') + assert (sparse_prior == grid_anchors[1][prior_indexs]).all() + + # for ssd + from mmdet.core.anchor.anchor_generator import SSDAnchorGenerator + featmap_sizes = [(38, 38), (19, 19), (10, 10)] + anchor_generator = SSDAnchorGenerator( + scale_major=False, + input_size=300, + basesize_ratio_range=(0.15, 0.9), + strides=[8, 16, 32], + ratios=[[2], [2, 3], [2, 3]]) + ssd_anchors = anchor_generator.grid_anchors( + featmap_sizes, device='cuda') + for i in range(len(featmap_sizes)): + sparse_ssd_anchors = anchor_generator.sparse_priors( + prior_idxs=prior_indexs, + level_idx=i, + featmap_size=featmap_sizes[i], + device='cuda') + assert (sparse_ssd_anchors == ssd_anchors[i][prior_indexs]).all() + + # for yolo + from mmdet.core.anchor.anchor_generator import YOLOAnchorGenerator + featmap_sizes = [(38, 38), (19, 19), (10, 10)] + anchor_generator = YOLOAnchorGenerator( + strides=[32, 16, 8], + base_sizes=[ + [(116, 90), (156, 198), (373, 326)], + [(30, 61), (62, 45), (59, 119)], + [(10, 13), (16, 30), (33, 23)], + ]) + yolo_anchors = anchor_generator.grid_anchors( + featmap_sizes, device='cuda') + for i in range(len(featmap_sizes)): + sparse_yolo_anchors = anchor_generator.sparse_priors( + prior_idxs=prior_indexs, + level_idx=i, + featmap_size=featmap_sizes[i], + device='cuda') + assert (sparse_yolo_anchors == yolo_anchors[i][prior_indexs]).all() + + +def test_standard_anchor_generator(): + from mmdet.core.anchor import build_anchor_generator + anchor_generator_cfg = dict( + type='AnchorGenerator', + scales=[8], + ratios=[0.5, 1.0, 2.0], + strides=[4, 8]) + + anchor_generator = build_anchor_generator(anchor_generator_cfg) + assert anchor_generator.num_base_priors == \ + anchor_generator.num_base_anchors + assert anchor_generator.num_base_priors == [3, 3] + assert anchor_generator is not None + + +def test_strides(): + from mmdet.core import AnchorGenerator + # Square strides + self = AnchorGenerator([10], [1.], [1.], [10]) + anchors = self.grid_anchors([(2, 2)], device='cpu') + + expected_anchors = torch.tensor([[-5., -5., 5., 5.], [5., -5., 15., 5.], + [-5., 5., 5., 15.], [5., 5., 15., 15.]]) + + assert torch.equal(anchors[0], expected_anchors) + + # Different strides in x and y direction + self = AnchorGenerator([(10, 20)], [1.], [1.], [10]) + anchors = self.grid_anchors([(2, 2)], device='cpu') + + expected_anchors = torch.tensor([[-5., -5., 5., 5.], [5., -5., 15., 5.], + [-5., 15., 5., 25.], [5., 15., 15., 25.]]) + + assert torch.equal(anchors[0], expected_anchors) + + +def test_ssd_anchor_generator(): + from mmdet.core.anchor import build_anchor_generator + if torch.cuda.is_available(): + device = 'cuda' + else: + device = 'cpu' + + anchor_generator_cfg = dict( + type='SSDAnchorGenerator', + scale_major=False, + input_size=300, + basesize_ratio_range=(0.15, 0.9), + strides=[8, 16, 32, 64, 100, 300], + ratios=[[2], [2, 3], [2, 3], [2, 3], [2], [2]]) + + featmap_sizes = [(38, 38), (19, 19), (10, 10), (5, 5), (3, 3), (1, 1)] + anchor_generator = build_anchor_generator(anchor_generator_cfg) + + # check base anchors + expected_base_anchors = [ + torch.Tensor([[-6.5000, -6.5000, 14.5000, 14.5000], + [-11.3704, -11.3704, 19.3704, 19.3704], + [-10.8492, -3.4246, 18.8492, 11.4246], + [-3.4246, -10.8492, 11.4246, 18.8492]]), + torch.Tensor([[-14.5000, -14.5000, 30.5000, 30.5000], + [-25.3729, -25.3729, 41.3729, 41.3729], + [-23.8198, -7.9099, 39.8198, 23.9099], + [-7.9099, -23.8198, 23.9099, 39.8198], + [-30.9711, -4.9904, 46.9711, 20.9904], + [-4.9904, -30.9711, 20.9904, 46.9711]]), + torch.Tensor([[-33.5000, -33.5000, 65.5000, 65.5000], + [-45.5366, -45.5366, 77.5366, 77.5366], + [-54.0036, -19.0018, 86.0036, 51.0018], + [-19.0018, -54.0036, 51.0018, 86.0036], + [-69.7365, -12.5788, 101.7365, 44.5788], + [-12.5788, -69.7365, 44.5788, 101.7365]]), + torch.Tensor([[-44.5000, -44.5000, 108.5000, 108.5000], + [-56.9817, -56.9817, 120.9817, 120.9817], + [-76.1873, -22.0937, 140.1873, 86.0937], + [-22.0937, -76.1873, 86.0937, 140.1873], + [-100.5019, -12.1673, 164.5019, 76.1673], + [-12.1673, -100.5019, 76.1673, 164.5019]]), + torch.Tensor([[-53.5000, -53.5000, 153.5000, 153.5000], + [-66.2185, -66.2185, 166.2185, 166.2185], + [-96.3711, -23.1855, 196.3711, 123.1855], + [-23.1855, -96.3711, 123.1855, 196.3711]]), + torch.Tensor([[19.5000, 19.5000, 280.5000, 280.5000], + [6.6342, 6.6342, 293.3658, 293.3658], + [-34.5549, 57.7226, 334.5549, 242.2774], + [57.7226, -34.5549, 242.2774, 334.5549]]), + ] + base_anchors = anchor_generator.base_anchors + for i, base_anchor in enumerate(base_anchors): + assert base_anchor.allclose(expected_base_anchors[i]) + + # check valid flags + expected_valid_pixels = [5776, 2166, 600, 150, 36, 4] + multi_level_valid_flags = anchor_generator.valid_flags( + featmap_sizes, (300, 300), device) + for i, single_level_valid_flag in enumerate(multi_level_valid_flags): + assert single_level_valid_flag.sum() == expected_valid_pixels[i] + + # check number of base anchors for each level + assert anchor_generator.num_base_anchors == [4, 6, 6, 6, 4, 4] + + # check anchor generation + anchors = anchor_generator.grid_anchors(featmap_sizes, device) + assert len(anchors) == 6 + + +def test_anchor_generator_with_tuples(): + from mmdet.core.anchor import build_anchor_generator + if torch.cuda.is_available(): + device = 'cuda' + else: + device = 'cpu' + + anchor_generator_cfg = dict( + type='SSDAnchorGenerator', + scale_major=False, + input_size=300, + basesize_ratio_range=(0.15, 0.9), + strides=[8, 16, 32, 64, 100, 300], + ratios=[[2], [2, 3], [2, 3], [2, 3], [2], [2]]) + + featmap_sizes = [(38, 38), (19, 19), (10, 10), (5, 5), (3, 3), (1, 1)] + anchor_generator = build_anchor_generator(anchor_generator_cfg) + anchors = anchor_generator.grid_anchors(featmap_sizes, device) + + anchor_generator_cfg_tuples = dict( + type='SSDAnchorGenerator', + scale_major=False, + input_size=300, + basesize_ratio_range=(0.15, 0.9), + strides=[(8, 8), (16, 16), (32, 32), (64, 64), (100, 100), (300, 300)], + ratios=[[2], [2, 3], [2, 3], [2, 3], [2], [2]]) + + anchor_generator_tuples = build_anchor_generator( + anchor_generator_cfg_tuples) + anchors_tuples = anchor_generator_tuples.grid_anchors( + featmap_sizes, device) + for anchor, anchor_tuples in zip(anchors, anchors_tuples): + assert torch.equal(anchor, anchor_tuples) + + +def test_yolo_anchor_generator(): + from mmdet.core.anchor import build_anchor_generator + if torch.cuda.is_available(): + device = 'cuda' + else: + device = 'cpu' + + anchor_generator_cfg = dict( + type='YOLOAnchorGenerator', + strides=[32, 16, 8], + base_sizes=[ + [(116, 90), (156, 198), (373, 326)], + [(30, 61), (62, 45), (59, 119)], + [(10, 13), (16, 30), (33, 23)], + ]) + + featmap_sizes = [(14, 18), (28, 36), (56, 72)] + anchor_generator = build_anchor_generator(anchor_generator_cfg) + + # check base anchors + expected_base_anchors = [ + torch.Tensor([[-42.0000, -29.0000, 74.0000, 61.0000], + [-62.0000, -83.0000, 94.0000, 115.0000], + [-170.5000, -147.0000, 202.5000, 179.0000]]), + torch.Tensor([[-7.0000, -22.5000, 23.0000, 38.5000], + [-23.0000, -14.5000, 39.0000, 30.5000], + [-21.5000, -51.5000, 37.5000, 67.5000]]), + torch.Tensor([[-1.0000, -2.5000, 9.0000, 10.5000], + [-4.0000, -11.0000, 12.0000, 19.0000], + [-12.5000, -7.5000, 20.5000, 15.5000]]) + ] + base_anchors = anchor_generator.base_anchors + for i, base_anchor in enumerate(base_anchors): + assert base_anchor.allclose(expected_base_anchors[i]) + + # check number of base anchors for each level + assert anchor_generator.num_base_anchors == [3, 3, 3] + + # check anchor generation + anchors = anchor_generator.grid_anchors(featmap_sizes, device) + assert len(anchors) == 3 + + +def test_retina_anchor(): + from mmdet.models import build_head + if torch.cuda.is_available(): + device = 'cuda' + else: + device = 'cpu' + + # head configs modified from + # configs/nas_fpn/retinanet_r50_fpn_crop640_50e.py + bbox_head = dict( + type='RetinaSepBNHead', + num_classes=4, + num_ins=5, + in_channels=4, + stacked_convs=1, + feat_channels=4, + anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + bbox_coder=dict( + type='DeltaXYWHBBoxCoder', + target_means=[.0, .0, .0, .0], + target_stds=[1.0, 1.0, 1.0, 1.0])) + + retina_head = build_head(bbox_head) + assert retina_head.anchor_generator is not None + + # use the featmap sizes in NASFPN setting to test retina head + featmap_sizes = [(80, 80), (40, 40), (20, 20), (10, 10), (5, 5)] + # check base anchors + expected_base_anchors = [ + torch.Tensor([[-22.6274, -11.3137, 22.6274, 11.3137], + [-28.5088, -14.2544, 28.5088, 14.2544], + [-35.9188, -17.9594, 35.9188, 17.9594], + [-16.0000, -16.0000, 16.0000, 16.0000], + [-20.1587, -20.1587, 20.1587, 20.1587], + [-25.3984, -25.3984, 25.3984, 25.3984], + [-11.3137, -22.6274, 11.3137, 22.6274], + [-14.2544, -28.5088, 14.2544, 28.5088], + [-17.9594, -35.9188, 17.9594, 35.9188]]), + torch.Tensor([[-45.2548, -22.6274, 45.2548, 22.6274], + [-57.0175, -28.5088, 57.0175, 28.5088], + [-71.8376, -35.9188, 71.8376, 35.9188], + [-32.0000, -32.0000, 32.0000, 32.0000], + [-40.3175, -40.3175, 40.3175, 40.3175], + [-50.7968, -50.7968, 50.7968, 50.7968], + [-22.6274, -45.2548, 22.6274, 45.2548], + [-28.5088, -57.0175, 28.5088, 57.0175], + [-35.9188, -71.8376, 35.9188, 71.8376]]), + torch.Tensor([[-90.5097, -45.2548, 90.5097, 45.2548], + [-114.0350, -57.0175, 114.0350, 57.0175], + [-143.6751, -71.8376, 143.6751, 71.8376], + [-64.0000, -64.0000, 64.0000, 64.0000], + [-80.6349, -80.6349, 80.6349, 80.6349], + [-101.5937, -101.5937, 101.5937, 101.5937], + [-45.2548, -90.5097, 45.2548, 90.5097], + [-57.0175, -114.0350, 57.0175, 114.0350], + [-71.8376, -143.6751, 71.8376, 143.6751]]), + torch.Tensor([[-181.0193, -90.5097, 181.0193, 90.5097], + [-228.0701, -114.0350, 228.0701, 114.0350], + [-287.3503, -143.6751, 287.3503, 143.6751], + [-128.0000, -128.0000, 128.0000, 128.0000], + [-161.2699, -161.2699, 161.2699, 161.2699], + [-203.1873, -203.1873, 203.1873, 203.1873], + [-90.5097, -181.0193, 90.5097, 181.0193], + [-114.0350, -228.0701, 114.0350, 228.0701], + [-143.6751, -287.3503, 143.6751, 287.3503]]), + torch.Tensor([[-362.0387, -181.0193, 362.0387, 181.0193], + [-456.1401, -228.0701, 456.1401, 228.0701], + [-574.7006, -287.3503, 574.7006, 287.3503], + [-256.0000, -256.0000, 256.0000, 256.0000], + [-322.5398, -322.5398, 322.5398, 322.5398], + [-406.3747, -406.3747, 406.3747, 406.3747], + [-181.0193, -362.0387, 181.0193, 362.0387], + [-228.0701, -456.1401, 228.0701, 456.1401], + [-287.3503, -574.7006, 287.3503, 574.7006]]) + ] + base_anchors = retina_head.anchor_generator.base_anchors + for i, base_anchor in enumerate(base_anchors): + assert base_anchor.allclose(expected_base_anchors[i]) + + # check valid flags + expected_valid_pixels = [57600, 14400, 3600, 900, 225] + multi_level_valid_flags = retina_head.anchor_generator.valid_flags( + featmap_sizes, (640, 640), device) + for i, single_level_valid_flag in enumerate(multi_level_valid_flags): + assert single_level_valid_flag.sum() == expected_valid_pixels[i] + + # check number of base anchors for each level + assert retina_head.anchor_generator.num_base_anchors == [9, 9, 9, 9, 9] + + # check anchor generation + anchors = retina_head.anchor_generator.grid_anchors(featmap_sizes, device) + assert len(anchors) == 5 + + +def test_guided_anchor(): + from mmdet.models import build_head + if torch.cuda.is_available(): + device = 'cuda' + else: + device = 'cpu' + # head configs modified from + # configs/guided_anchoring/ga_retinanet_r50_fpn_1x_coco.py + bbox_head = dict( + type='GARetinaHead', + num_classes=8, + in_channels=4, + stacked_convs=1, + feat_channels=4, + approx_anchor_generator=dict( + type='AnchorGenerator', + octave_base_scale=4, + scales_per_octave=3, + ratios=[0.5, 1.0, 2.0], + strides=[8, 16, 32, 64, 128]), + square_anchor_generator=dict( + type='AnchorGenerator', + ratios=[1.0], + scales=[4], + strides=[8, 16, 32, 64, 128])) + + ga_retina_head = build_head(bbox_head) + assert ga_retina_head.approx_anchor_generator is not None + + # use the featmap sizes in NASFPN setting to test ga_retina_head + featmap_sizes = [(100, 152), (50, 76), (25, 38), (13, 19), (7, 10)] + # check base anchors + expected_approxs = [ + torch.Tensor([[-22.6274, -11.3137, 22.6274, 11.3137], + [-28.5088, -14.2544, 28.5088, 14.2544], + [-35.9188, -17.9594, 35.9188, 17.9594], + [-16.0000, -16.0000, 16.0000, 16.0000], + [-20.1587, -20.1587, 20.1587, 20.1587], + [-25.3984, -25.3984, 25.3984, 25.3984], + [-11.3137, -22.6274, 11.3137, 22.6274], + [-14.2544, -28.5088, 14.2544, 28.5088], + [-17.9594, -35.9188, 17.9594, 35.9188]]), + torch.Tensor([[-45.2548, -22.6274, 45.2548, 22.6274], + [-57.0175, -28.5088, 57.0175, 28.5088], + [-71.8376, -35.9188, 71.8376, 35.9188], + [-32.0000, -32.0000, 32.0000, 32.0000], + [-40.3175, -40.3175, 40.3175, 40.3175], + [-50.7968, -50.7968, 50.7968, 50.7968], + [-22.6274, -45.2548, 22.6274, 45.2548], + [-28.5088, -57.0175, 28.5088, 57.0175], + [-35.9188, -71.8376, 35.9188, 71.8376]]), + torch.Tensor([[-90.5097, -45.2548, 90.5097, 45.2548], + [-114.0350, -57.0175, 114.0350, 57.0175], + [-143.6751, -71.8376, 143.6751, 71.8376], + [-64.0000, -64.0000, 64.0000, 64.0000], + [-80.6349, -80.6349, 80.6349, 80.6349], + [-101.5937, -101.5937, 101.5937, 101.5937], + [-45.2548, -90.5097, 45.2548, 90.5097], + [-57.0175, -114.0350, 57.0175, 114.0350], + [-71.8376, -143.6751, 71.8376, 143.6751]]), + torch.Tensor([[-181.0193, -90.5097, 181.0193, 90.5097], + [-228.0701, -114.0350, 228.0701, 114.0350], + [-287.3503, -143.6751, 287.3503, 143.6751], + [-128.0000, -128.0000, 128.0000, 128.0000], + [-161.2699, -161.2699, 161.2699, 161.2699], + [-203.1873, -203.1873, 203.1873, 203.1873], + [-90.5097, -181.0193, 90.5097, 181.0193], + [-114.0350, -228.0701, 114.0350, 228.0701], + [-143.6751, -287.3503, 143.6751, 287.3503]]), + torch.Tensor([[-362.0387, -181.0193, 362.0387, 181.0193], + [-456.1401, -228.0701, 456.1401, 228.0701], + [-574.7006, -287.3503, 574.7006, 287.3503], + [-256.0000, -256.0000, 256.0000, 256.0000], + [-322.5398, -322.5398, 322.5398, 322.5398], + [-406.3747, -406.3747, 406.3747, 406.3747], + [-181.0193, -362.0387, 181.0193, 362.0387], + [-228.0701, -456.1401, 228.0701, 456.1401], + [-287.3503, -574.7006, 287.3503, 574.7006]]) + ] + approxs = ga_retina_head.approx_anchor_generator.base_anchors + for i, base_anchor in enumerate(approxs): + assert base_anchor.allclose(expected_approxs[i]) + + # check valid flags + expected_valid_pixels = [136800, 34200, 8550, 2223, 630] + multi_level_valid_flags = ga_retina_head.approx_anchor_generator \ + .valid_flags(featmap_sizes, (800, 1216), device) + for i, single_level_valid_flag in enumerate(multi_level_valid_flags): + assert single_level_valid_flag.sum() == expected_valid_pixels[i] + + # check number of base anchors for each level + assert ga_retina_head.approx_anchor_generator.num_base_anchors == [ + 9, 9, 9, 9, 9 + ] + + # check approx generation + squares = ga_retina_head.square_anchor_generator.grid_anchors( + featmap_sizes, device) + assert len(squares) == 5 + + expected_squares = [ + torch.Tensor([[-16., -16., 16., 16.]]), + torch.Tensor([[-32., -32., 32., 32]]), + torch.Tensor([[-64., -64., 64., 64.]]), + torch.Tensor([[-128., -128., 128., 128.]]), + torch.Tensor([[-256., -256., 256., 256.]]) + ] + squares = ga_retina_head.square_anchor_generator.base_anchors + for i, base_anchor in enumerate(squares): + assert base_anchor.allclose(expected_squares[i]) + + # square_anchor_generator does not check valid flags + # check number of base anchors for each level + assert (ga_retina_head.square_anchor_generator.num_base_anchors == [ + 1, 1, 1, 1, 1 + ]) + + # check square generation + anchors = ga_retina_head.square_anchor_generator.grid_anchors( + featmap_sizes, device) + assert len(anchors) == 5 diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_utils/test_assigner.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_utils/test_assigner.py new file mode 100644 index 0000000000000000000000000000000000000000..949234b6f57c41710ce96e9d5162e31fa6d477fc --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_utils/test_assigner.py @@ -0,0 +1,497 @@ +"""Tests the Assigner objects. + +CommandLine: + pytest tests/test_utils/test_assigner.py + xdoctest tests/test_utils/test_assigner.py zero +""" +import torch + +from mmdet.core.bbox.assigners import (ApproxMaxIoUAssigner, + CenterRegionAssigner, HungarianAssigner, + MaxIoUAssigner, PointAssigner, + UniformAssigner) + + +def test_max_iou_assigner(): + self = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ) + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 9], + [0, 10, 10, 19], + ]) + gt_labels = torch.LongTensor([2, 3]) + assign_result = self.assign(bboxes, gt_bboxes, gt_labels=gt_labels) + assert len(assign_result.gt_inds) == 4 + assert len(assign_result.labels) == 4 + + expected_gt_inds = torch.LongTensor([1, 0, 2, 0]) + assert torch.all(assign_result.gt_inds == expected_gt_inds) + + +def test_max_iou_assigner_with_ignore(): + self = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ignore_iof_thr=0.5, + ignore_wrt_candidates=False, + ) + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [30, 32, 40, 42], + ]) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 9], + [0, 10, 10, 19], + ]) + gt_bboxes_ignore = torch.Tensor([ + [30, 30, 40, 40], + ]) + assign_result = self.assign( + bboxes, gt_bboxes, gt_bboxes_ignore=gt_bboxes_ignore) + + expected_gt_inds = torch.LongTensor([1, 0, 2, -1]) + assert torch.all(assign_result.gt_inds == expected_gt_inds) + + +def test_max_iou_assigner_with_empty_gt(): + """Test corner case where an image might have no true detections.""" + self = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ) + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_bboxes = torch.empty(0, 4) + assign_result = self.assign(bboxes, gt_bboxes) + + expected_gt_inds = torch.LongTensor([0, 0, 0, 0]) + assert torch.all(assign_result.gt_inds == expected_gt_inds) + + +def test_max_iou_assigner_with_empty_boxes(): + """Test corner case where a network might predict no boxes.""" + self = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ) + bboxes = torch.empty((0, 4)) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 9], + [0, 10, 10, 19], + ]) + gt_labels = torch.LongTensor([2, 3]) + + # Test with gt_labels + assign_result = self.assign(bboxes, gt_bboxes, gt_labels=gt_labels) + assert len(assign_result.gt_inds) == 0 + assert tuple(assign_result.labels.shape) == (0, ) + + # Test without gt_labels + assign_result = self.assign(bboxes, gt_bboxes, gt_labels=None) + assert len(assign_result.gt_inds) == 0 + assert assign_result.labels is None + + +def test_max_iou_assigner_with_empty_boxes_and_ignore(): + """Test corner case where a network might predict no boxes and + ignore_iof_thr is on.""" + self = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ignore_iof_thr=0.5, + ) + bboxes = torch.empty((0, 4)) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 9], + [0, 10, 10, 19], + ]) + gt_bboxes_ignore = torch.Tensor([ + [30, 30, 40, 40], + ]) + gt_labels = torch.LongTensor([2, 3]) + + # Test with gt_labels + assign_result = self.assign( + bboxes, + gt_bboxes, + gt_labels=gt_labels, + gt_bboxes_ignore=gt_bboxes_ignore) + assert len(assign_result.gt_inds) == 0 + assert tuple(assign_result.labels.shape) == (0, ) + + # Test without gt_labels + assign_result = self.assign( + bboxes, gt_bboxes, gt_labels=None, gt_bboxes_ignore=gt_bboxes_ignore) + assert len(assign_result.gt_inds) == 0 + assert assign_result.labels is None + + +def test_max_iou_assigner_with_empty_boxes_and_gt(): + """Test corner case where a network might predict no boxes and no gt.""" + self = MaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ) + bboxes = torch.empty((0, 4)) + gt_bboxes = torch.empty((0, 4)) + assign_result = self.assign(bboxes, gt_bboxes) + assert len(assign_result.gt_inds) == 0 + + +def test_point_assigner(): + self = PointAssigner() + points = torch.FloatTensor([ # [x, y, stride] + [0, 0, 1], + [10, 10, 1], + [5, 5, 1], + [32, 32, 1], + ]) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 9], + [0, 10, 10, 19], + ]) + assign_result = self.assign(points, gt_bboxes) + expected_gt_inds = torch.LongTensor([1, 2, 1, 0]) + assert torch.all(assign_result.gt_inds == expected_gt_inds) + + +def test_point_assigner_with_empty_gt(): + """Test corner case where an image might have no true detections.""" + self = PointAssigner() + points = torch.FloatTensor([ # [x, y, stride] + [0, 0, 1], + [10, 10, 1], + [5, 5, 1], + [32, 32, 1], + ]) + gt_bboxes = torch.FloatTensor([]) + assign_result = self.assign(points, gt_bboxes) + + expected_gt_inds = torch.LongTensor([0, 0, 0, 0]) + assert torch.all(assign_result.gt_inds == expected_gt_inds) + + +def test_point_assigner_with_empty_boxes_and_gt(): + """Test corner case where an image might predict no points and no gt.""" + self = PointAssigner() + points = torch.FloatTensor([]) + gt_bboxes = torch.FloatTensor([]) + assign_result = self.assign(points, gt_bboxes) + assert len(assign_result.gt_inds) == 0 + + +def test_approx_iou_assigner(): + self = ApproxMaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ) + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 9], + [0, 10, 10, 19], + ]) + approxs_per_octave = 1 + approxs = bboxes + squares = bboxes + assign_result = self.assign(approxs, squares, approxs_per_octave, + gt_bboxes) + + expected_gt_inds = torch.LongTensor([1, 0, 2, 0]) + assert torch.all(assign_result.gt_inds == expected_gt_inds) + + +def test_approx_iou_assigner_with_empty_gt(): + """Test corner case where an image might have no true detections.""" + self = ApproxMaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ) + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_bboxes = torch.FloatTensor([]) + approxs_per_octave = 1 + approxs = bboxes + squares = bboxes + assign_result = self.assign(approxs, squares, approxs_per_octave, + gt_bboxes) + + expected_gt_inds = torch.LongTensor([0, 0, 0, 0]) + assert torch.all(assign_result.gt_inds == expected_gt_inds) + + +def test_approx_iou_assigner_with_empty_boxes(): + """Test corner case where an network might predict no boxes.""" + self = ApproxMaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ) + bboxes = torch.empty((0, 4)) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 9], + [0, 10, 10, 19], + ]) + approxs_per_octave = 1 + approxs = bboxes + squares = bboxes + assign_result = self.assign(approxs, squares, approxs_per_octave, + gt_bboxes) + assert len(assign_result.gt_inds) == 0 + + +def test_approx_iou_assigner_with_empty_boxes_and_gt(): + """Test corner case where an network might predict no boxes and no gt.""" + self = ApproxMaxIoUAssigner( + pos_iou_thr=0.5, + neg_iou_thr=0.5, + ) + bboxes = torch.empty((0, 4)) + gt_bboxes = torch.empty((0, 4)) + approxs_per_octave = 1 + approxs = bboxes + squares = bboxes + assign_result = self.assign(approxs, squares, approxs_per_octave, + gt_bboxes) + assert len(assign_result.gt_inds) == 0 + + +def test_random_assign_result(): + """Test random instantiation of assign result to catch corner cases.""" + from mmdet.core.bbox.assigners.assign_result import AssignResult + AssignResult.random() + + AssignResult.random(num_gts=0, num_preds=0) + AssignResult.random(num_gts=0, num_preds=3) + AssignResult.random(num_gts=3, num_preds=3) + AssignResult.random(num_gts=0, num_preds=3) + AssignResult.random(num_gts=7, num_preds=7) + AssignResult.random(num_gts=7, num_preds=64) + AssignResult.random(num_gts=24, num_preds=3) + + +def test_center_region_assigner(): + self = CenterRegionAssigner(pos_scale=0.3, neg_scale=1) + bboxes = torch.FloatTensor([[0, 0, 10, 10], [10, 10, 20, 20], [8, 8, 9, + 9]]) + gt_bboxes = torch.FloatTensor([ + [0, 0, 11, 11], # match bboxes[0] + [10, 10, 20, 20], # match bboxes[1] + [4.5, 4.5, 5.5, 5.5], # match bboxes[0] but area is too small + [0, 0, 10, 10], # match bboxes[1] and has a smaller area than gt[0] + ]) + gt_labels = torch.LongTensor([2, 3, 4, 5]) + assign_result = self.assign(bboxes, gt_bboxes, gt_labels=gt_labels) + assert len(assign_result.gt_inds) == 3 + assert len(assign_result.labels) == 3 + expected_gt_inds = torch.LongTensor([4, 2, 0]) + assert torch.all(assign_result.gt_inds == expected_gt_inds) + shadowed_labels = assign_result.get_extra_property('shadowed_labels') + # [8, 8, 9, 9] in the shadowed region of [0, 0, 11, 11] (label: 2) + assert torch.any(shadowed_labels == torch.LongTensor([[2, 2]])) + # [8, 8, 9, 9] in the shadowed region of [0, 0, 10, 10] (label: 5) + assert torch.any(shadowed_labels == torch.LongTensor([[2, 5]])) + # [0, 0, 10, 10] is already assigned to [4.5, 4.5, 5.5, 5.5]. + # Therefore, [0, 0, 11, 11] (label: 2) is shadowed + assert torch.any(shadowed_labels == torch.LongTensor([[0, 2]])) + + +def test_center_region_assigner_with_ignore(): + self = CenterRegionAssigner( + pos_scale=0.5, + neg_scale=1, + ) + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + ]) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 10], # match bboxes[0] + [10, 10, 20, 20], # match bboxes[1] + ]) + gt_bboxes_ignore = torch.FloatTensor([ + [0, 0, 10, 10], # match bboxes[0] + ]) + gt_labels = torch.LongTensor([1, 2]) + assign_result = self.assign( + bboxes, + gt_bboxes, + gt_bboxes_ignore=gt_bboxes_ignore, + gt_labels=gt_labels) + assert len(assign_result.gt_inds) == 2 + assert len(assign_result.labels) == 2 + + expected_gt_inds = torch.LongTensor([-1, 2]) + assert torch.all(assign_result.gt_inds == expected_gt_inds) + + +def test_center_region_assigner_with_empty_bboxes(): + self = CenterRegionAssigner( + pos_scale=0.5, + neg_scale=1, + ) + bboxes = torch.empty((0, 4)).float() + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 10], # match bboxes[0] + [10, 10, 20, 20], # match bboxes[1] + ]) + gt_labels = torch.LongTensor([1, 2]) + assign_result = self.assign(bboxes, gt_bboxes, gt_labels=gt_labels) + assert assign_result.gt_inds is None or assign_result.gt_inds.numel() == 0 + assert assign_result.labels is None or assign_result.labels.numel() == 0 + + +def test_center_region_assigner_with_empty_gts(): + self = CenterRegionAssigner( + pos_scale=0.5, + neg_scale=1, + ) + bboxes = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + ]) + gt_bboxes = torch.empty((0, 4)).float() + gt_labels = torch.empty((0, )).long() + assign_result = self.assign(bboxes, gt_bboxes, gt_labels=gt_labels) + assert len(assign_result.gt_inds) == 2 + expected_gt_inds = torch.LongTensor([0, 0]) + assert torch.all(assign_result.gt_inds == expected_gt_inds) + + +def test_hungarian_match_assigner(): + self = HungarianAssigner() + assert self.iou_cost.iou_mode == 'giou' + + # test no gt bboxes + bbox_pred = torch.rand((10, 4)) + cls_pred = torch.rand((10, 81)) + gt_bboxes = torch.empty((0, 4)).float() + gt_labels = torch.empty((0, )).long() + img_meta = dict(img_shape=(10, 8, 3)) + assign_result = self.assign(bbox_pred, cls_pred, gt_bboxes, gt_labels, + img_meta) + assert torch.all(assign_result.gt_inds == 0) + assert torch.all(assign_result.labels == -1) + + # test with gt bboxes + gt_bboxes = torch.FloatTensor([[0, 0, 5, 7], [3, 5, 7, 8]]) + gt_labels = torch.LongTensor([1, 20]) + assign_result = self.assign(bbox_pred, cls_pred, gt_bboxes, gt_labels, + img_meta) + assert torch.all(assign_result.gt_inds > -1) + assert (assign_result.gt_inds > 0).sum() == gt_bboxes.size(0) + assert (assign_result.labels > -1).sum() == gt_bboxes.size(0) + + # test iou mode + self = HungarianAssigner( + iou_cost=dict(type='IoUCost', iou_mode='iou', weight=1.0)) + assert self.iou_cost.iou_mode == 'iou' + assign_result = self.assign(bbox_pred, cls_pred, gt_bboxes, gt_labels, + img_meta) + assert torch.all(assign_result.gt_inds > -1) + assert (assign_result.gt_inds > 0).sum() == gt_bboxes.size(0) + assert (assign_result.labels > -1).sum() == gt_bboxes.size(0) + + # test focal loss mode + self = HungarianAssigner( + iou_cost=dict(type='IoUCost', iou_mode='giou', weight=1.0), + cls_cost=dict(type='FocalLossCost', weight=1.)) + assert self.iou_cost.iou_mode == 'giou' + assign_result = self.assign(bbox_pred, cls_pred, gt_bboxes, gt_labels, + img_meta) + assert torch.all(assign_result.gt_inds > -1) + assert (assign_result.gt_inds > 0).sum() == gt_bboxes.size(0) + assert (assign_result.labels > -1).sum() == gt_bboxes.size(0) + + +def test_uniform_assigner(): + self = UniformAssigner(0.15, 0.7, 1) + pred_bbox = torch.FloatTensor([ + [1, 1, 12, 8], + [4, 4, 20, 20], + [1, 5, 15, 15], + [30, 5, 32, 42], + ]) + anchor = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 9], + [0, 10, 10, 19], + ]) + gt_labels = torch.LongTensor([2, 3]) + assign_result = self.assign( + pred_bbox, anchor, gt_bboxes, gt_labels=gt_labels) + assert len(assign_result.gt_inds) == 4 + assert len(assign_result.labels) == 4 + + expected_gt_inds = torch.LongTensor([-1, 0, 2, 0]) + assert torch.all(assign_result.gt_inds == expected_gt_inds) + + +def test_uniform_assigner_with_empty_gt(): + """Test corner case where an image might have no true detections.""" + self = UniformAssigner(0.15, 0.7, 1) + pred_bbox = torch.FloatTensor([ + [1, 1, 12, 8], + [4, 4, 20, 20], + [1, 5, 15, 15], + [30, 5, 32, 42], + ]) + anchor = torch.FloatTensor([ + [0, 0, 10, 10], + [10, 10, 20, 20], + [5, 5, 15, 15], + [32, 32, 38, 42], + ]) + gt_bboxes = torch.empty(0, 4) + assign_result = self.assign(pred_bbox, anchor, gt_bboxes) + + expected_gt_inds = torch.LongTensor([0, 0, 0, 0]) + assert torch.all(assign_result.gt_inds == expected_gt_inds) + + +def test_uniform_assigner_with_empty_boxes(): + """Test corner case where a network might predict no boxes.""" + self = UniformAssigner(0.15, 0.7, 1) + pred_bbox = torch.empty((0, 4)) + anchor = torch.empty((0, 4)) + gt_bboxes = torch.FloatTensor([ + [0, 0, 10, 9], + [0, 10, 10, 19], + ]) + gt_labels = torch.LongTensor([2, 3]) + + # Test with gt_labels + assign_result = self.assign( + pred_bbox, anchor, gt_bboxes, gt_labels=gt_labels) + assert len(assign_result.gt_inds) == 0 + assert tuple(assign_result.labels.shape) == (0, ) + + # Test without gt_labels + assign_result = self.assign(pred_bbox, anchor, gt_bboxes, gt_labels=None) + assert len(assign_result.gt_inds) == 0 diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_utils/test_coder.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_utils/test_coder.py new file mode 100644 index 0000000000000000000000000000000000000000..2dca4131978fe508d862bcc0b76d78b291b2f9e1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_utils/test_coder.py @@ -0,0 +1,109 @@ +import pytest +import torch + +from mmdet.core.bbox.coder import (DeltaXYWHBBoxCoder, TBLRBBoxCoder, + YOLOBBoxCoder) + + +def test_yolo_bbox_coder(): + coder = YOLOBBoxCoder() + bboxes = torch.Tensor([[-42., -29., 74., 61.], [-10., -29., 106., 61.], + [22., -29., 138., 61.], [54., -29., 170., 61.]]) + pred_bboxes = torch.Tensor([[0.4709, 0.6152, 0.1690, -0.4056], + [0.5399, 0.6653, 0.1162, -0.4162], + [0.4654, 0.6618, 0.1548, -0.4301], + [0.4786, 0.6197, 0.1896, -0.4479]]) + grid_size = 32 + expected_decode_bboxes = torch.Tensor( + [[-53.6102, -10.3096, 83.7478, 49.6824], + [-15.8700, -8.3901, 114.4236, 50.9693], + [11.1822, -8.0924, 146.6034, 50.4476], + [41.2068, -8.9232, 181.4236, 48.5840]]) + assert expected_decode_bboxes.allclose( + coder.decode(bboxes, pred_bboxes, grid_size)) + + +def test_delta_bbox_coder(): + coder = DeltaXYWHBBoxCoder() + + rois = torch.Tensor([[0., 0., 1., 1.], [0., 0., 1., 1.], [0., 0., 1., 1.], + [5., 5., 5., 5.]]) + deltas = torch.Tensor([[0., 0., 0., 0.], [1., 1., 1., 1.], + [0., 0., 2., -1.], [0.7, -1.9, -0.5, 0.3]]) + expected_decode_bboxes = torch.Tensor([[0.0000, 0.0000, 1.0000, 1.0000], + [0.1409, 0.1409, 2.8591, 2.8591], + [0.0000, 0.3161, 4.1945, 0.6839], + [5.0000, 5.0000, 5.0000, 5.0000]]) + + out = coder.decode(rois, deltas, max_shape=(32, 32)) + assert expected_decode_bboxes.allclose(out, atol=1e-04) + out = coder.decode(rois, deltas, max_shape=torch.Tensor((32, 32))) + assert expected_decode_bboxes.allclose(out, atol=1e-04) + + batch_rois = rois.unsqueeze(0).repeat(2, 1, 1) + batch_deltas = deltas.unsqueeze(0).repeat(2, 1, 1) + batch_out = coder.decode(batch_rois, batch_deltas, max_shape=(32, 32))[0] + assert out.allclose(batch_out) + batch_out = coder.decode( + batch_rois, batch_deltas, max_shape=[(32, 32), (32, 32)])[0] + assert out.allclose(batch_out) + + # test max_shape is not equal to batch + with pytest.raises(AssertionError): + coder.decode( + batch_rois, batch_deltas, max_shape=[(32, 32), (32, 32), (32, 32)]) + + rois = torch.zeros((0, 4)) + deltas = torch.zeros((0, 4)) + out = coder.decode(rois, deltas, max_shape=(32, 32)) + assert rois.shape == out.shape + + # test add_ctr_clamp + coder = DeltaXYWHBBoxCoder(add_ctr_clamp=True, ctr_clamp=2) + + rois = torch.Tensor([[0., 0., 6., 6.], [0., 0., 1., 1.], [0., 0., 1., 1.], + [5., 5., 5., 5.]]) + deltas = torch.Tensor([[1., 1., 2., 2.], [1., 1., 1., 1.], + [0., 0., 2., -1.], [0.7, -1.9, -0.5, 0.3]]) + expected_decode_bboxes = torch.Tensor([[0.0000, 0.0000, 27.1672, 27.1672], + [0.1409, 0.1409, 2.8591, 2.8591], + [0.0000, 0.3161, 4.1945, 0.6839], + [5.0000, 5.0000, 5.0000, 5.0000]]) + + out = coder.decode(rois, deltas, max_shape=(32, 32)) + assert expected_decode_bboxes.allclose(out, atol=1e-04) + + +def test_tblr_bbox_coder(): + coder = TBLRBBoxCoder(normalizer=15.) + + rois = torch.Tensor([[0., 0., 1., 1.], [0., 0., 1., 1.], [0., 0., 1., 1.], + [5., 5., 5., 5.]]) + deltas = torch.Tensor([[0., 0., 0., 0.], [1., 1., 1., 1.], + [0., 0., 2., -1.], [0.7, -1.9, -0.5, 0.3]]) + expected_decode_bboxes = torch.Tensor([[0.5000, 0.5000, 0.5000, 0.5000], + [0.0000, 0.0000, 12.0000, 13.0000], + [0.0000, 0.5000, 0.0000, 0.5000], + [5.0000, 5.0000, 5.0000, 5.0000]]) + + out = coder.decode(rois, deltas, max_shape=(13, 12)) + assert expected_decode_bboxes.allclose(out) + out = coder.decode(rois, deltas, max_shape=torch.Tensor((13, 12))) + assert expected_decode_bboxes.allclose(out) + + batch_rois = rois.unsqueeze(0).repeat(2, 1, 1) + batch_deltas = deltas.unsqueeze(0).repeat(2, 1, 1) + batch_out = coder.decode(batch_rois, batch_deltas, max_shape=(13, 12))[0] + assert out.allclose(batch_out) + batch_out = coder.decode( + batch_rois, batch_deltas, max_shape=[(13, 12), (13, 12)])[0] + assert out.allclose(batch_out) + + # test max_shape is not equal to batch + with pytest.raises(AssertionError): + coder.decode(batch_rois, batch_deltas, max_shape=[(13, 12)]) + + rois = torch.zeros((0, 4)) + deltas = torch.zeros((0, 4)) + out = coder.decode(rois, deltas, max_shape=(32, 32)) + assert rois.shape == out.shape diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_utils/test_masks.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_utils/test_masks.py new file mode 100644 index 0000000000000000000000000000000000000000..808cf08c6cd16da0a07d91f28588faa05086acbf --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_utils/test_masks.py @@ -0,0 +1,655 @@ +import numpy as np +import pytest +import torch + +from mmdet.core import BitmapMasks, PolygonMasks + + +def dummy_raw_bitmap_masks(size): + """ + Args: + size (tuple): expected shape of dummy masks, (H, W) or (N, H, W) + + Return: + ndarray: dummy mask + """ + return np.random.randint(0, 2, size, dtype=np.uint8) + + +def dummy_raw_polygon_masks(size): + """ + Args: + size (tuple): expected shape of dummy masks, (N, H, W) + + Return: + list[list[ndarray]]: dummy mask + """ + num_obj, heigt, width = size + polygons = [] + for _ in range(num_obj): + num_points = np.random.randint(5) * 2 + 6 + polygons.append([np.random.uniform(0, min(heigt, width), num_points)]) + return polygons + + +def dummy_bboxes(num, max_height, max_width): + x1y1 = np.random.randint(0, min(max_height // 2, max_width // 2), (num, 2)) + wh = np.random.randint(0, min(max_height // 2, max_width // 2), (num, 2)) + x2y2 = x1y1 + wh + return np.concatenate([x1y1, x2y2], axis=1).squeeze().astype(np.float32) + + +def test_bitmap_mask_init(): + # init with empty ndarray masks + raw_masks = np.empty((0, 28, 28), dtype=np.uint8) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + assert len(bitmap_masks) == 0 + assert bitmap_masks.height == 28 + assert bitmap_masks.width == 28 + + # init with empty list masks + raw_masks = [] + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + assert len(bitmap_masks) == 0 + assert bitmap_masks.height == 28 + assert bitmap_masks.width == 28 + + # init with ndarray masks contain 3 instances + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + assert len(bitmap_masks) == 3 + assert bitmap_masks.height == 28 + assert bitmap_masks.width == 28 + + # init with list masks contain 3 instances + raw_masks = [dummy_raw_bitmap_masks((28, 28)) for _ in range(3)] + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + assert len(bitmap_masks) == 3 + assert bitmap_masks.height == 28 + assert bitmap_masks.width == 28 + + # init with raw masks of unsupported type + with pytest.raises(AssertionError): + raw_masks = [[dummy_raw_bitmap_masks((28, 28))]] + BitmapMasks(raw_masks, 28, 28) + + +def test_bitmap_mask_rescale(): + # rescale with empty bitmap masks + raw_masks = dummy_raw_bitmap_masks((0, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + rescaled_masks = bitmap_masks.rescale((56, 72)) + assert len(rescaled_masks) == 0 + assert rescaled_masks.height == 56 + assert rescaled_masks.width == 56 + + # rescale with bitmap masks contain 1 instances + raw_masks = np.array([[[1, 0, 0, 0], [0, 1, 0, 1]]]) + bitmap_masks = BitmapMasks(raw_masks, 2, 4) + rescaled_masks = bitmap_masks.rescale((8, 8)) + assert len(rescaled_masks) == 1 + assert rescaled_masks.height == 4 + assert rescaled_masks.width == 8 + truth = np.array([[[1, 1, 0, 0, 0, 0, 0, 0], [1, 1, 0, 0, 0, 0, 0, 0], + [0, 0, 1, 1, 0, 0, 1, 1], [0, 0, 1, 1, 0, 0, 1, 1]]]) + assert (rescaled_masks.masks == truth).all() + + +def test_bitmap_mask_resize(): + # resize with empty bitmap masks + raw_masks = dummy_raw_bitmap_masks((0, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + resized_masks = bitmap_masks.resize((56, 72)) + assert len(resized_masks) == 0 + assert resized_masks.height == 56 + assert resized_masks.width == 72 + + # resize with bitmap masks contain 1 instances + raw_masks = np.diag(np.ones(4, dtype=np.uint8))[np.newaxis, ...] + bitmap_masks = BitmapMasks(raw_masks, 4, 4) + resized_masks = bitmap_masks.resize((8, 8)) + assert len(resized_masks) == 1 + assert resized_masks.height == 8 + assert resized_masks.width == 8 + truth = np.array([[[1, 1, 0, 0, 0, 0, 0, 0], [1, 1, 0, 0, 0, 0, 0, 0], + [0, 0, 1, 1, 0, 0, 0, 0], [0, 0, 1, 1, 0, 0, 0, 0], + [0, 0, 0, 0, 1, 1, 0, 0], [0, 0, 0, 0, 1, 1, 0, 0], + [0, 0, 0, 0, 0, 0, 1, 1], [0, 0, 0, 0, 0, 0, 1, 1]]]) + assert (resized_masks.masks == truth).all() + + # resize to non-square + raw_masks = np.diag(np.ones(4, dtype=np.uint8))[np.newaxis, ...] + bitmap_masks = BitmapMasks(raw_masks, 4, 4) + resized_masks = bitmap_masks.resize((4, 8)) + assert len(resized_masks) == 1 + assert resized_masks.height == 4 + assert resized_masks.width == 8 + truth = np.array([[[1, 1, 0, 0, 0, 0, 0, 0], [0, 0, 1, 1, 0, 0, 0, 0], + [0, 0, 0, 0, 1, 1, 0, 0], [0, 0, 0, 0, 0, 0, 1, 1]]]) + assert (resized_masks.masks == truth).all() + + +def test_bitmap_mask_flip(): + # flip with empty bitmap masks + raw_masks = dummy_raw_bitmap_masks((0, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + flipped_masks = bitmap_masks.flip(flip_direction='horizontal') + assert len(flipped_masks) == 0 + assert flipped_masks.height == 28 + assert flipped_masks.width == 28 + + # horizontally flip with bitmap masks contain 3 instances + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + flipped_masks = bitmap_masks.flip(flip_direction='horizontal') + flipped_flipped_masks = flipped_masks.flip(flip_direction='horizontal') + assert flipped_masks.masks.shape == (3, 28, 28) + assert (bitmap_masks.masks == flipped_flipped_masks.masks).all() + assert (flipped_masks.masks == raw_masks[:, :, ::-1]).all() + + # vertically flip with bitmap masks contain 3 instances + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + flipped_masks = bitmap_masks.flip(flip_direction='vertical') + flipped_flipped_masks = flipped_masks.flip(flip_direction='vertical') + assert len(flipped_masks) == 3 + assert flipped_masks.height == 28 + assert flipped_masks.width == 28 + assert (bitmap_masks.masks == flipped_flipped_masks.masks).all() + assert (flipped_masks.masks == raw_masks[:, ::-1, :]).all() + + # diagonal flip with bitmap masks contain 3 instances + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + flipped_masks = bitmap_masks.flip(flip_direction='diagonal') + flipped_flipped_masks = flipped_masks.flip(flip_direction='diagonal') + assert len(flipped_masks) == 3 + assert flipped_masks.height == 28 + assert flipped_masks.width == 28 + assert (bitmap_masks.masks == flipped_flipped_masks.masks).all() + assert (flipped_masks.masks == raw_masks[:, ::-1, ::-1]).all() + + +def test_bitmap_mask_pad(): + # pad with empty bitmap masks + raw_masks = dummy_raw_bitmap_masks((0, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + padded_masks = bitmap_masks.pad((56, 56)) + assert len(padded_masks) == 0 + assert padded_masks.height == 56 + assert padded_masks.width == 56 + + # pad with bitmap masks contain 3 instances + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + padded_masks = bitmap_masks.pad((56, 56)) + assert len(padded_masks) == 3 + assert padded_masks.height == 56 + assert padded_masks.width == 56 + assert (padded_masks.masks[:, 28:, 28:] == 0).all() + + +def test_bitmap_mask_crop(): + # crop with empty bitmap masks + dummy_bbox = np.array([0, 10, 10, 27], dtype=np.int) + raw_masks = dummy_raw_bitmap_masks((0, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + cropped_masks = bitmap_masks.crop(dummy_bbox) + assert len(cropped_masks) == 0 + assert cropped_masks.height == 17 + assert cropped_masks.width == 10 + + # crop with bitmap masks contain 3 instances + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + cropped_masks = bitmap_masks.crop(dummy_bbox) + assert len(cropped_masks) == 3 + assert cropped_masks.height == 17 + assert cropped_masks.width == 10 + x1, y1, x2, y2 = dummy_bbox + assert (cropped_masks.masks == raw_masks[:, y1:y2, x1:x2]).all() + + # crop with invalid bbox + with pytest.raises(AssertionError): + dummy_bbox = dummy_bboxes(2, 28, 28) + bitmap_masks.crop(dummy_bbox) + + +def test_bitmap_mask_crop_and_resize(): + dummy_bbox = dummy_bboxes(5, 28, 28) + inds = np.random.randint(0, 3, (5, )) + + # crop and resize with empty bitmap masks + raw_masks = dummy_raw_bitmap_masks((0, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + cropped_resized_masks = bitmap_masks.crop_and_resize( + dummy_bbox, (56, 56), inds) + assert len(cropped_resized_masks) == 0 + assert cropped_resized_masks.height == 56 + assert cropped_resized_masks.width == 56 + + # crop and resize with bitmap masks contain 3 instances + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + cropped_resized_masks = bitmap_masks.crop_and_resize( + dummy_bbox, (56, 56), inds) + assert len(cropped_resized_masks) == 5 + assert cropped_resized_masks.height == 56 + assert cropped_resized_masks.width == 56 + + +def test_bitmap_mask_expand(): + # expand with empty bitmap masks + raw_masks = dummy_raw_bitmap_masks((0, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + expanded_masks = bitmap_masks.expand(56, 56, 12, 14) + assert len(expanded_masks) == 0 + assert expanded_masks.height == 56 + assert expanded_masks.width == 56 + + # expand with bitmap masks contain 3 instances + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + expanded_masks = bitmap_masks.expand(56, 56, 12, 14) + assert len(expanded_masks) == 3 + assert expanded_masks.height == 56 + assert expanded_masks.width == 56 + assert (expanded_masks.masks[:, :12, :14] == 0).all() + assert (expanded_masks.masks[:, 12 + 28:, 14 + 28:] == 0).all() + + +def test_bitmap_mask_area(): + # area of empty bitmap mask + raw_masks = dummy_raw_bitmap_masks((0, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + assert bitmap_masks.areas.sum() == 0 + + # area of bitmap masks contain 3 instances + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + areas = bitmap_masks.areas + assert len(areas) == 3 + assert (areas == raw_masks.sum((1, 2))).all() + + +def test_bitmap_mask_to_ndarray(): + # empty bitmap masks to ndarray + raw_masks = dummy_raw_bitmap_masks((0, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + ndarray_masks = bitmap_masks.to_ndarray() + assert isinstance(ndarray_masks, np.ndarray) + assert ndarray_masks.shape == (0, 28, 28) + + # bitmap masks contain 3 instances to ndarray + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + ndarray_masks = bitmap_masks.to_ndarray() + assert isinstance(ndarray_masks, np.ndarray) + assert ndarray_masks.shape == (3, 28, 28) + assert (ndarray_masks == raw_masks).all() + + +def test_bitmap_mask_to_tensor(): + # empty bitmap masks to tensor + raw_masks = dummy_raw_bitmap_masks((0, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + tensor_masks = bitmap_masks.to_tensor(dtype=torch.uint8, device='cpu') + assert isinstance(tensor_masks, torch.Tensor) + assert tensor_masks.shape == (0, 28, 28) + + # bitmap masks contain 3 instances to tensor + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + tensor_masks = bitmap_masks.to_tensor(dtype=torch.uint8, device='cpu') + assert isinstance(tensor_masks, torch.Tensor) + assert tensor_masks.shape == (3, 28, 28) + assert (tensor_masks.numpy() == raw_masks).all() + + +def test_bitmap_mask_index(): + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + assert (bitmap_masks[0].masks == raw_masks[0]).all() + assert (bitmap_masks[range(2)].masks == raw_masks[range(2)]).all() + + +def test_bitmap_mask_iter(): + raw_masks = dummy_raw_bitmap_masks((3, 28, 28)) + bitmap_masks = BitmapMasks(raw_masks, 28, 28) + for i, bitmap_mask in enumerate(bitmap_masks): + assert bitmap_mask.shape == (28, 28) + assert (bitmap_mask == raw_masks[i]).all() + + +def test_polygon_mask_init(): + # init with empty masks + raw_masks = [] + polygon_masks = BitmapMasks(raw_masks, 28, 28) + assert len(polygon_masks) == 0 + assert polygon_masks.height == 28 + assert polygon_masks.width == 28 + + # init with masks contain 3 instances + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + assert isinstance(polygon_masks.masks, list) + assert isinstance(polygon_masks.masks[0], list) + assert isinstance(polygon_masks.masks[0][0], np.ndarray) + assert len(polygon_masks) == 3 + assert polygon_masks.height == 28 + assert polygon_masks.width == 28 + assert polygon_masks.to_ndarray().shape == (3, 28, 28) + + # init with raw masks of unsupported type + with pytest.raises(AssertionError): + raw_masks = [[[]]] + PolygonMasks(raw_masks, 28, 28) + + raw_masks = [dummy_raw_polygon_masks((3, 28, 28))] + PolygonMasks(raw_masks, 28, 28) + + +def test_polygon_mask_rescale(): + # rescale with empty polygon masks + raw_masks = dummy_raw_polygon_masks((0, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + rescaled_masks = polygon_masks.rescale((56, 72)) + assert len(rescaled_masks) == 0 + assert rescaled_masks.height == 56 + assert rescaled_masks.width == 56 + assert rescaled_masks.to_ndarray().shape == (0, 56, 56) + + # rescale with polygon masks contain 3 instances + raw_masks = [[np.array([1, 1, 3, 1, 4, 3, 2, 4, 1, 3], dtype=np.float)]] + polygon_masks = PolygonMasks(raw_masks, 5, 5) + rescaled_masks = polygon_masks.rescale((12, 10)) + assert len(rescaled_masks) == 1 + assert rescaled_masks.height == 10 + assert rescaled_masks.width == 10 + assert rescaled_masks.to_ndarray().shape == (1, 10, 10) + truth = np.array( + [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 1, 1, 1, 1, 0, 0, 0, 0], [0, 0, 1, 1, 1, 1, 1, 0, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0, 0], [0, 0, 1, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 0, 1, 1, 1, 1, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], + np.uint8) + assert (rescaled_masks.to_ndarray() == truth).all() + + +def test_polygon_mask_resize(): + # resize with empty polygon masks + raw_masks = dummy_raw_polygon_masks((0, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + resized_masks = polygon_masks.resize((56, 72)) + assert len(resized_masks) == 0 + assert resized_masks.height == 56 + assert resized_masks.width == 72 + assert resized_masks.to_ndarray().shape == (0, 56, 72) + + # resize with polygon masks contain 1 instance 1 part + raw_masks1 = [[np.array([1, 1, 3, 1, 4, 3, 2, 4, 1, 3], dtype=np.float)]] + polygon_masks1 = PolygonMasks(raw_masks1, 5, 5) + resized_masks1 = polygon_masks1.resize((10, 10)) + assert len(resized_masks1) == 1 + assert resized_masks1.height == 10 + assert resized_masks1.width == 10 + assert resized_masks1.to_ndarray().shape == (1, 10, 10) + truth1 = np.array( + [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 1, 1, 1, 1, 0, 0, 0, 0], [0, 0, 1, 1, 1, 1, 1, 0, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0, 0], [0, 0, 1, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 0, 1, 1, 1, 1, 0, 0, 0], [0, 0, 0, 0, 1, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], + np.uint8) + assert (resized_masks1.to_ndarray() == truth1).all() + + # resize with polygon masks contain 1 instance 2 part + raw_masks2 = [[ + np.array([0., 0., 1., 0., 1., 1.]), + np.array([1., 1., 2., 1., 2., 2., 1., 2.]) + ]] + polygon_masks2 = PolygonMasks(raw_masks2, 3, 3) + resized_masks2 = polygon_masks2.resize((6, 6)) + assert len(resized_masks2) == 1 + assert resized_masks2.height == 6 + assert resized_masks2.width == 6 + assert resized_masks2.to_ndarray().shape == (1, 6, 6) + truth2 = np.array( + [[0, 1, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 1, 1, 0, 0], + [0, 0, 1, 1, 0, 0], [0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]], np.uint8) + assert (resized_masks2.to_ndarray() == truth2).all() + + # resize with polygon masks contain 2 instances + raw_masks3 = [raw_masks1[0], raw_masks2[0]] + polygon_masks3 = PolygonMasks(raw_masks3, 5, 5) + resized_masks3 = polygon_masks3.resize((10, 10)) + assert len(resized_masks3) == 2 + assert resized_masks3.height == 10 + assert resized_masks3.width == 10 + assert resized_masks3.to_ndarray().shape == (2, 10, 10) + truth3 = np.stack([truth1, np.pad(truth2, ((0, 4), (0, 4)), 'constant')]) + assert (resized_masks3.to_ndarray() == truth3).all() + + # resize to non-square + raw_masks4 = [[np.array([1, 1, 3, 1, 4, 3, 2, 4, 1, 3], dtype=np.float)]] + polygon_masks4 = PolygonMasks(raw_masks4, 5, 5) + resized_masks4 = polygon_masks4.resize((5, 10)) + assert len(resized_masks4) == 1 + assert resized_masks4.height == 5 + assert resized_masks4.width == 10 + assert resized_masks4.to_ndarray().shape == (1, 5, 10) + truth4 = np.array( + [[0, 0, 0, 0, 0, 0, 0, 0, 0, 0], [0, 0, 1, 1, 1, 1, 1, 0, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 1, 0, 0], [0, 0, 0, 1, 1, 1, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]], np.uint8) + assert (resized_masks4.to_ndarray() == truth4).all() + + +def test_polygon_mask_flip(): + # flip with empty polygon masks + raw_masks = dummy_raw_polygon_masks((0, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + flipped_masks = polygon_masks.flip(flip_direction='horizontal') + assert len(flipped_masks) == 0 + assert flipped_masks.height == 28 + assert flipped_masks.width == 28 + assert flipped_masks.to_ndarray().shape == (0, 28, 28) + + # TODO: fixed flip correctness checking after v2.0_coord is merged + # horizontally flip with polygon masks contain 3 instances + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + flipped_masks = polygon_masks.flip(flip_direction='horizontal') + flipped_flipped_masks = flipped_masks.flip(flip_direction='horizontal') + assert len(flipped_masks) == 3 + assert flipped_masks.height == 28 + assert flipped_masks.width == 28 + assert flipped_masks.to_ndarray().shape == (3, 28, 28) + assert (polygon_masks.to_ndarray() == flipped_flipped_masks.to_ndarray() + ).all() + + # vertically flip with polygon masks contain 3 instances + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + flipped_masks = polygon_masks.flip(flip_direction='vertical') + flipped_flipped_masks = flipped_masks.flip(flip_direction='vertical') + assert len(flipped_masks) == 3 + assert flipped_masks.height == 28 + assert flipped_masks.width == 28 + assert flipped_masks.to_ndarray().shape == (3, 28, 28) + assert (polygon_masks.to_ndarray() == flipped_flipped_masks.to_ndarray() + ).all() + + # diagonal flip with polygon masks contain 3 instances + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + flipped_masks = polygon_masks.flip(flip_direction='diagonal') + flipped_flipped_masks = flipped_masks.flip(flip_direction='diagonal') + assert len(flipped_masks) == 3 + assert flipped_masks.height == 28 + assert flipped_masks.width == 28 + assert flipped_masks.to_ndarray().shape == (3, 28, 28) + assert (polygon_masks.to_ndarray() == flipped_flipped_masks.to_ndarray() + ).all() + + +def test_polygon_mask_crop(): + dummy_bbox = np.array([0, 10, 10, 27], dtype=np.int) + # crop with empty polygon masks + raw_masks = dummy_raw_polygon_masks((0, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + cropped_masks = polygon_masks.crop(dummy_bbox) + assert len(cropped_masks) == 0 + assert cropped_masks.height == 17 + assert cropped_masks.width == 10 + assert cropped_masks.to_ndarray().shape == (0, 17, 10) + + # crop with polygon masks contain 1 instances + raw_masks = [[np.array([1., 3., 5., 1., 5., 6., 1, 6])]] + polygon_masks = PolygonMasks(raw_masks, 7, 7) + bbox = np.array([0, 0, 3, 4]) + cropped_masks = polygon_masks.crop(bbox) + assert len(cropped_masks) == 1 + assert cropped_masks.height == 4 + assert cropped_masks.width == 3 + assert cropped_masks.to_ndarray().shape == (1, 4, 3) + truth = np.array([[0, 0, 0], [0, 0, 0], [0, 0, 1], [0, 1, 1]]) + assert (cropped_masks.to_ndarray() == truth).all() + + # crop with invalid bbox + with pytest.raises(AssertionError): + dummy_bbox = dummy_bboxes(2, 28, 28) + polygon_masks.crop(dummy_bbox) + + +def test_polygon_mask_pad(): + # pad with empty polygon masks + raw_masks = dummy_raw_polygon_masks((0, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + padded_masks = polygon_masks.pad((56, 56)) + assert len(padded_masks) == 0 + assert padded_masks.height == 56 + assert padded_masks.width == 56 + assert padded_masks.to_ndarray().shape == (0, 56, 56) + + # pad with polygon masks contain 3 instances + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + padded_masks = polygon_masks.pad((56, 56)) + assert len(padded_masks) == 3 + assert padded_masks.height == 56 + assert padded_masks.width == 56 + assert padded_masks.to_ndarray().shape == (3, 56, 56) + assert (padded_masks.to_ndarray()[:, 28:, 28:] == 0).all() + + +def test_polygon_mask_expand(): + with pytest.raises(NotImplementedError): + raw_masks = dummy_raw_polygon_masks((0, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + polygon_masks.expand(56, 56, 10, 17) + + +def test_polygon_mask_crop_and_resize(): + dummy_bbox = dummy_bboxes(5, 28, 28) + inds = np.random.randint(0, 3, (5, )) + + # crop and resize with empty polygon masks + raw_masks = dummy_raw_polygon_masks((0, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + cropped_resized_masks = polygon_masks.crop_and_resize( + dummy_bbox, (56, 56), inds) + assert len(cropped_resized_masks) == 0 + assert cropped_resized_masks.height == 56 + assert cropped_resized_masks.width == 56 + assert cropped_resized_masks.to_ndarray().shape == (0, 56, 56) + + # crop and resize with polygon masks contain 3 instances + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + cropped_resized_masks = polygon_masks.crop_and_resize( + dummy_bbox, (56, 56), inds) + assert len(cropped_resized_masks) == 5 + assert cropped_resized_masks.height == 56 + assert cropped_resized_masks.width == 56 + assert cropped_resized_masks.to_ndarray().shape == (5, 56, 56) + + +def test_polygon_mask_area(): + # area of empty polygon masks + raw_masks = dummy_raw_polygon_masks((0, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + assert polygon_masks.areas.sum() == 0 + + # area of polygon masks contain 1 instance + # here we hack a case that the gap between the area of bitmap and polygon + # is minor + raw_masks = [[np.array([1, 1, 5, 1, 3, 4])]] + polygon_masks = PolygonMasks(raw_masks, 6, 6) + polygon_area = polygon_masks.areas + bitmap_area = polygon_masks.to_bitmap().areas + assert len(polygon_area) == 1 + assert np.isclose(polygon_area, bitmap_area).all() + + +def test_polygon_mask_to_bitmap(): + # polygon masks contain 3 instances to bitmap + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + bitmap_masks = polygon_masks.to_bitmap() + assert (polygon_masks.to_ndarray() == bitmap_masks.to_ndarray()).all() + + +def test_polygon_mask_to_ndarray(): + # empty polygon masks to ndarray + raw_masks = dummy_raw_polygon_masks((0, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + ndarray_masks = polygon_masks.to_ndarray() + assert isinstance(ndarray_masks, np.ndarray) + assert ndarray_masks.shape == (0, 28, 28) + + # polygon masks contain 3 instances to ndarray + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + ndarray_masks = polygon_masks.to_ndarray() + assert isinstance(ndarray_masks, np.ndarray) + assert ndarray_masks.shape == (3, 28, 28) + + +def test_polygon_to_tensor(): + # empty polygon masks to tensor + raw_masks = dummy_raw_polygon_masks((0, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + tensor_masks = polygon_masks.to_tensor(dtype=torch.uint8, device='cpu') + assert isinstance(tensor_masks, torch.Tensor) + assert tensor_masks.shape == (0, 28, 28) + + # polygon masks contain 3 instances to tensor + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + tensor_masks = polygon_masks.to_tensor(dtype=torch.uint8, device='cpu') + assert isinstance(tensor_masks, torch.Tensor) + assert tensor_masks.shape == (3, 28, 28) + assert (tensor_masks.numpy() == polygon_masks.to_ndarray()).all() + + +def test_polygon_mask_index(): + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + # index by integer + polygon_masks[0] + # index by list + polygon_masks[[0, 1]] + # index by ndarray + polygon_masks[np.asarray([0, 1])] + with pytest.raises(ValueError): + # invalid index + polygon_masks[torch.Tensor([1, 2])] + + +def test_polygon_mask_iter(): + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + for i, polygon_mask in enumerate(polygon_masks): + assert np.equal(polygon_mask, raw_masks[i]).all() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_utils/test_misc.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_utils/test_misc.py new file mode 100644 index 0000000000000000000000000000000000000000..16be906c8b071aecda6149b1a55bcc76ada9b036 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_utils/test_misc.py @@ -0,0 +1,92 @@ +import numpy as np +import pytest +import torch + +from mmdet.core.bbox import distance2bbox +from mmdet.core.mask.structures import BitmapMasks, PolygonMasks +from mmdet.core.utils import mask2ndarray + + +def dummy_raw_polygon_masks(size): + """ + Args: + size (tuple): expected shape of dummy masks, (N, H, W) + + Return: + list[list[ndarray]]: dummy mask + """ + num_obj, heigt, width = size + polygons = [] + for _ in range(num_obj): + num_points = np.random.randint(5) * 2 + 6 + polygons.append([np.random.uniform(0, min(heigt, width), num_points)]) + return polygons + + +def test_mask2ndarray(): + raw_masks = np.ones((3, 28, 28)) + bitmap_mask = BitmapMasks(raw_masks, 28, 28) + output_mask = mask2ndarray(bitmap_mask) + assert np.allclose(raw_masks, output_mask) + + raw_masks = dummy_raw_polygon_masks((3, 28, 28)) + polygon_masks = PolygonMasks(raw_masks, 28, 28) + output_mask = mask2ndarray(polygon_masks) + assert output_mask.shape == (3, 28, 28) + + raw_masks = np.ones((3, 28, 28)) + output_mask = mask2ndarray(raw_masks) + assert np.allclose(raw_masks, output_mask) + + raw_masks = torch.ones((3, 28, 28)) + output_mask = mask2ndarray(raw_masks) + assert np.allclose(raw_masks, output_mask) + + # test unsupported type + raw_masks = [] + with pytest.raises(TypeError): + output_mask = mask2ndarray(raw_masks) + + +def test_distance2bbox(): + point = torch.Tensor([[74., 61.], [-29., 106.], [138., 61.], [29., 170.]]) + + distance = torch.Tensor([[0., 0, 1., 1.], [1., 2., 10., 6.], + [22., -29., 138., 61.], [54., -29., 170., 61.]]) + expected_decode_bboxes = torch.Tensor([[74., 61., 75., 62.], + [0., 104., 0., 112.], + [100., 90., 100., 120.], + [0., 120., 100., 120.]]) + out_bbox = distance2bbox(point, distance, max_shape=(120, 100)) + assert expected_decode_bboxes.allclose(out_bbox) + out = distance2bbox(point, distance, max_shape=torch.Tensor((120, 100))) + assert expected_decode_bboxes.allclose(out) + + batch_point = point.unsqueeze(0).repeat(2, 1, 1) + batch_distance = distance.unsqueeze(0).repeat(2, 1, 1) + batch_out = distance2bbox( + batch_point, batch_distance, max_shape=(120, 100))[0] + assert out.allclose(batch_out) + batch_out = distance2bbox( + batch_point, batch_distance, max_shape=[(120, 100), (120, 100)])[0] + assert out.allclose(batch_out) + + batch_out = distance2bbox(point, batch_distance, max_shape=(120, 100))[0] + assert out.allclose(batch_out) + + # test max_shape is not equal to batch + with pytest.raises(AssertionError): + distance2bbox( + batch_point, + batch_distance, + max_shape=[(120, 100), (120, 100), (32, 32)]) + + rois = torch.zeros((0, 4)) + deltas = torch.zeros((0, 4)) + out = distance2bbox(rois, deltas, max_shape=(120, 100)) + assert rois.shape == out.shape + + rois = torch.zeros((2, 0, 4)) + deltas = torch.zeros((2, 0, 4)) + out = distance2bbox(rois, deltas, max_shape=(120, 100)) + assert rois.shape == out.shape diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_utils/test_version.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_utils/test_version.py new file mode 100644 index 0000000000000000000000000000000000000000..6ddf45c0e2854cb64006281363afe5547aa886c2 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_utils/test_version.py @@ -0,0 +1,15 @@ +from mmdet import digit_version + + +def test_version_check(): + assert digit_version('1.0.5') > digit_version('1.0.5rc0') + assert digit_version('1.0.5') > digit_version('1.0.4rc0') + assert digit_version('1.0.5') > digit_version('1.0rc0') + assert digit_version('1.0.0') > digit_version('0.6.2') + assert digit_version('1.0.0') > digit_version('0.2.16') + assert digit_version('1.0.5rc0') > digit_version('1.0.0rc0') + assert digit_version('1.0.0rc1') > digit_version('1.0.0rc0') + assert digit_version('1.0.0rc2') > digit_version('1.0.0rc0') + assert digit_version('1.0.0rc2') > digit_version('1.0.0rc1') + assert digit_version('1.0.1rc1') > digit_version('1.0.0rc1') + assert digit_version('1.0.0') > digit_version('1.0.0rc1') diff --git a/detection_cbnet/docker-build-context/cbnetv2/tests/test_utils/test_visualization.py b/detection_cbnet/docker-build-context/cbnetv2/tests/test_utils/test_visualization.py new file mode 100644 index 0000000000000000000000000000000000000000..9c7969b44ee5b4ee862c09b63f122db14a534b32 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tests/test_utils/test_visualization.py @@ -0,0 +1,127 @@ +# Copyright (c) Open-MMLab. All rights reserved. +import os +import os.path as osp +import tempfile + +import mmcv +import numpy as np +import pytest +import torch + +from mmdet.core import visualization as vis + + +def test_color(): + assert vis.color_val_matplotlib(mmcv.Color.blue) == (0., 0., 1.) + assert vis.color_val_matplotlib('green') == (0., 1., 0.) + assert vis.color_val_matplotlib((1, 2, 3)) == (3 / 255, 2 / 255, 1 / 255) + assert vis.color_val_matplotlib(100) == (100 / 255, 100 / 255, 100 / 255) + assert vis.color_val_matplotlib(np.zeros(3, dtype=np.int)) == (0., 0., 0.) + # forbid white color + with pytest.raises(TypeError): + vis.color_val_matplotlib([255, 255, 255]) + # forbid float + with pytest.raises(TypeError): + vis.color_val_matplotlib(1.0) + # overflowed + with pytest.raises(AssertionError): + vis.color_val_matplotlib((0, 0, 500)) + + +def test_imshow_det_bboxes(): + tmp_filename = osp.join(tempfile.gettempdir(), 'det_bboxes_image', + 'image.jpg') + image = np.ones((10, 10, 3), np.uint8) + bbox = np.array([[2, 1, 3, 3], [3, 4, 6, 6]]) + label = np.array([0, 1]) + out_image = vis.imshow_det_bboxes( + image, bbox, label, out_file=tmp_filename, show=False) + assert osp.isfile(tmp_filename) + assert image.shape == out_image.shape + assert not np.allclose(image, out_image) + os.remove(tmp_filename) + + # test grayscale images + image = np.ones((10, 10), np.uint8) + bbox = np.array([[2, 1, 3, 3], [3, 4, 6, 6]]) + label = np.array([0, 1]) + out_image = vis.imshow_det_bboxes( + image, bbox, label, out_file=tmp_filename, show=False) + assert osp.isfile(tmp_filename) + assert image.shape == out_image.shape[:2] + os.remove(tmp_filename) + + # test shaped (0,) + image = np.ones((10, 10, 3), np.uint8) + bbox = np.ones((0, 4)) + label = np.ones((0, )) + vis.imshow_det_bboxes( + image, bbox, label, out_file=tmp_filename, show=False) + assert osp.isfile(tmp_filename) + os.remove(tmp_filename) + + # test mask + image = np.ones((10, 10, 3), np.uint8) + bbox = np.array([[2, 1, 3, 3], [3, 4, 6, 6]]) + label = np.array([0, 1]) + segms = np.random.random((2, 10, 10)) > 0.5 + segms = np.array(segms, np.int32) + vis.imshow_det_bboxes( + image, bbox, label, segms, out_file=tmp_filename, show=False) + assert osp.isfile(tmp_filename) + os.remove(tmp_filename) + + # test tensor mask type error + with pytest.raises(AttributeError): + segms = torch.tensor(segms) + vis.imshow_det_bboxes(image, bbox, label, segms, show=False) + + +def test_imshow_gt_det_bboxes(): + tmp_filename = osp.join(tempfile.gettempdir(), 'det_bboxes_image', + 'image.jpg') + image = np.ones((10, 10, 3), np.uint8) + bbox = np.array([[2, 1, 3, 3], [3, 4, 6, 6]]) + label = np.array([0, 1]) + annotation = dict(gt_bboxes=bbox, gt_labels=label) + det_result = np.array([[2, 1, 3, 3, 0], [3, 4, 6, 6, 1]]) + result = [det_result] + out_image = vis.imshow_gt_det_bboxes( + image, annotation, result, out_file=tmp_filename, show=False) + assert osp.isfile(tmp_filename) + assert image.shape == out_image.shape + assert not np.allclose(image, out_image) + os.remove(tmp_filename) + + # test grayscale images + image = np.ones((10, 10), np.uint8) + bbox = np.array([[2, 1, 3, 3], [3, 4, 6, 6]]) + label = np.array([0, 1]) + annotation = dict(gt_bboxes=bbox, gt_labels=label) + det_result = np.array([[2, 1, 3, 3, 0], [3, 4, 6, 6, 1]]) + result = [det_result] + vis.imshow_gt_det_bboxes( + image, annotation, result, out_file=tmp_filename, show=False) + assert osp.isfile(tmp_filename) + os.remove(tmp_filename) + + # test numpy mask + gt_mask = np.ones((2, 10, 10)) + annotation['gt_masks'] = gt_mask + vis.imshow_gt_det_bboxes( + image, annotation, result, out_file=tmp_filename, show=False) + assert osp.isfile(tmp_filename) + os.remove(tmp_filename) + + # test tensor mask + gt_mask = torch.ones((2, 10, 10)) + annotation['gt_masks'] = gt_mask + vis.imshow_gt_det_bboxes( + image, annotation, result, out_file=tmp_filename, show=False) + assert osp.isfile(tmp_filename) + os.remove(tmp_filename) + + # test unsupported type + annotation['gt_masks'] = [] + with pytest.raises(TypeError): + vis.imshow_gt_det_bboxes(image, annotation, result, show=False) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/analyze_logs.py b/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/analyze_logs.py new file mode 100644 index 0000000000000000000000000000000000000000..37797741a151d2e2d655427c11f49e22cdd254b2 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/analyze_logs.py @@ -0,0 +1,179 @@ +import argparse +import json +from collections import defaultdict + +import matplotlib.pyplot as plt +import numpy as np +import seaborn as sns + + +def cal_train_time(log_dicts, args): + for i, log_dict in enumerate(log_dicts): + print(f'{"-" * 5}Analyze train time of {args.json_logs[i]}{"-" * 5}') + all_times = [] + for epoch in log_dict.keys(): + if args.include_outliers: + all_times.append(log_dict[epoch]['time']) + else: + all_times.append(log_dict[epoch]['time'][1:]) + all_times = np.array(all_times) + epoch_ave_time = all_times.mean(-1) + slowest_epoch = epoch_ave_time.argmax() + fastest_epoch = epoch_ave_time.argmin() + std_over_epoch = epoch_ave_time.std() + print(f'slowest epoch {slowest_epoch + 1}, ' + f'average time is {epoch_ave_time[slowest_epoch]:.4f}') + print(f'fastest epoch {fastest_epoch + 1}, ' + f'average time is {epoch_ave_time[fastest_epoch]:.4f}') + print(f'time std over epochs is {std_over_epoch:.4f}') + print(f'average iter time: {np.mean(all_times):.4f} s/iter') + print() + + +def plot_curve(log_dicts, args): + if args.backend is not None: + plt.switch_backend(args.backend) + sns.set_style(args.style) + # if legend is None, use {filename}_{key} as legend + legend = args.legend + if legend is None: + legend = [] + for json_log in args.json_logs: + for metric in args.keys: + legend.append(f'{json_log}_{metric}') + assert len(legend) == (len(args.json_logs) * len(args.keys)) + metrics = args.keys + + num_metrics = len(metrics) + for i, log_dict in enumerate(log_dicts): + epochs = list(log_dict.keys()) + for j, metric in enumerate(metrics): + print(f'plot curve of {args.json_logs[i]}, metric is {metric}') + if metric not in log_dict[epochs[0]]: + raise KeyError( + f'{args.json_logs[i]} does not contain metric {metric}') + + if 'mAP' in metric: + xs = np.arange(1, max(epochs) + 1) + ys = [] + for epoch in epochs: + ys += log_dict[epoch][metric] + ax = plt.gca() + ax.set_xticks(xs) + plt.xlabel('epoch') + plt.plot(xs, ys, label=legend[i * num_metrics + j], marker='o') + else: + xs = [] + ys = [] + num_iters_per_epoch = log_dict[epochs[0]]['iter'][-2] + for epoch in epochs: + iters = log_dict[epoch]['iter'] + if log_dict[epoch]['mode'][-1] == 'val': + iters = iters[:-1] + xs.append( + np.array(iters) + (epoch - 1) * num_iters_per_epoch) + ys.append(np.array(log_dict[epoch][metric][:len(iters)])) + xs = np.concatenate(xs) + ys = np.concatenate(ys) + plt.xlabel('iter') + plt.plot( + xs, ys, label=legend[i * num_metrics + j], linewidth=0.5) + plt.legend() + if args.title is not None: + plt.title(args.title) + if args.out is None: + plt.show() + else: + print(f'save curve to: {args.out}') + plt.savefig(args.out) + plt.cla() + + +def add_plot_parser(subparsers): + parser_plt = subparsers.add_parser( + 'plot_curve', help='parser for plotting curves') + parser_plt.add_argument( + 'json_logs', + type=str, + nargs='+', + help='path of train log in json format') + parser_plt.add_argument( + '--keys', + type=str, + nargs='+', + default=['bbox_mAP'], + help='the metric that you want to plot') + parser_plt.add_argument('--title', type=str, help='title of figure') + parser_plt.add_argument( + '--legend', + type=str, + nargs='+', + default=None, + help='legend of each plot') + parser_plt.add_argument( + '--backend', type=str, default=None, help='backend of plt') + parser_plt.add_argument( + '--style', type=str, default='dark', help='style of plt') + parser_plt.add_argument('--out', type=str, default=None) + + +def add_time_parser(subparsers): + parser_time = subparsers.add_parser( + 'cal_train_time', + help='parser for computing the average time per training iteration') + parser_time.add_argument( + 'json_logs', + type=str, + nargs='+', + help='path of train log in json format') + parser_time.add_argument( + '--include-outliers', + action='store_true', + help='include the first value of every epoch when computing ' + 'the average time') + + +def parse_args(): + parser = argparse.ArgumentParser(description='Analyze Json Log') + # currently only support plot curve and calculate average train time + subparsers = parser.add_subparsers(dest='task', help='task parser') + add_plot_parser(subparsers) + add_time_parser(subparsers) + args = parser.parse_args() + return args + + +def load_json_logs(json_logs): + # load and convert json_logs to log_dict, key is epoch, value is a sub dict + # keys of sub dict is different metrics, e.g. memory, bbox_mAP + # value of sub dict is a list of corresponding values of all iterations + log_dicts = [dict() for _ in json_logs] + for json_log, log_dict in zip(json_logs, log_dicts): + with open(json_log, 'r') as log_file: + for line in log_file: + log = json.loads(line.strip()) + # skip lines without `epoch` field + if 'epoch' not in log: + continue + epoch = log.pop('epoch') + if epoch not in log_dict: + log_dict[epoch] = defaultdict(list) + for k, v in log.items(): + log_dict[epoch][k].append(v) + return log_dicts + + +def main(): + args = parse_args() + + json_logs = args.json_logs + for json_log in json_logs: + assert json_log.endswith('.json') + + log_dicts = load_json_logs(json_logs) + + eval(args.task)(log_dicts, args) + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/analyze_results.py b/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/analyze_results.py new file mode 100644 index 0000000000000000000000000000000000000000..2e8e67d3da398868ec4688d182a9d0358deed459 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/analyze_results.py @@ -0,0 +1,202 @@ +import argparse +import os.path as osp + +import mmcv +import numpy as np +from mmcv import Config, DictAction + +from mmdet.core.evaluation import eval_map +from mmdet.core.visualization import imshow_gt_det_bboxes +from mmdet.datasets import build_dataset, get_loading_pipeline + + +def bbox_map_eval(det_result, annotation): + """Evaluate mAP of single image det result. + + Args: + det_result (list[list]): [[cls1_det, cls2_det, ...], ...]. + The outer list indicates images, and the inner list indicates + per-class detected bboxes. + annotation (dict): Ground truth annotations where keys of + annotations are: + + - bboxes: numpy array of shape (n, 4) + - labels: numpy array of shape (n, ) + - bboxes_ignore (optional): numpy array of shape (k, 4) + - labels_ignore (optional): numpy array of shape (k, ) + + Returns: + float: mAP + """ + + # use only bbox det result + if isinstance(det_result, tuple): + bbox_det_result = [det_result[0]] + else: + bbox_det_result = [det_result] + # mAP + iou_thrs = np.linspace( + .5, 0.95, int(np.round((0.95 - .5) / .05)) + 1, endpoint=True) + mean_aps = [] + for thr in iou_thrs: + mean_ap, _ = eval_map( + bbox_det_result, [annotation], iou_thr=thr, logger='silent') + mean_aps.append(mean_ap) + return sum(mean_aps) / len(mean_aps) + + +class ResultVisualizer: + """Display and save evaluation results. + + Args: + show (bool): Whether to show the image. Default: True + wait_time (float): Value of waitKey param. Default: 0. + score_thr (float): Minimum score of bboxes to be shown. + Default: 0 + """ + + def __init__(self, show=False, wait_time=0, score_thr=0): + self.show = show + self.wait_time = wait_time + self.score_thr = score_thr + + def _save_image_gts_results(self, dataset, results, mAPs, out_dir=None): + mmcv.mkdir_or_exist(out_dir) + + for mAP_info in mAPs: + index, mAP = mAP_info + data_info = dataset.prepare_train_img(index) + + # calc save file path + filename = data_info['filename'] + if data_info['img_prefix'] is not None: + filename = osp.join(data_info['img_prefix'], filename) + else: + filename = data_info['filename'] + fname, name = osp.splitext(osp.basename(filename)) + save_filename = fname + '_' + str(round(mAP, 3)) + name + out_file = osp.join(out_dir, save_filename) + imshow_gt_det_bboxes( + data_info['img'], + data_info, + results[index], + dataset.CLASSES, + show=self.show, + score_thr=self.score_thr, + wait_time=self.wait_time, + out_file=out_file) + + def evaluate_and_show(self, + dataset, + results, + topk=20, + show_dir='work_dir', + eval_fn=None): + """Evaluate and show results. + + Args: + dataset (Dataset): A PyTorch dataset. + results (list): Det results from test results pkl file + topk (int): Number of the highest topk and + lowest topk after evaluation index sorting. Default: 20 + show_dir (str, optional): The filename to write the image. + Default: 'work_dir' + eval_fn (callable, optional): Eval function, Default: None + """ + + assert topk > 0 + if (topk * 2) > len(dataset): + topk = len(dataset) // 2 + + if eval_fn is None: + eval_fn = bbox_map_eval + else: + assert callable(eval_fn) + + prog_bar = mmcv.ProgressBar(len(results)) + _mAPs = {} + for i, (result, ) in enumerate(zip(results)): + # self.dataset[i] should not call directly + # because there is a risk of mismatch + data_info = dataset.prepare_train_img(i) + mAP = eval_fn(result, data_info['ann_info']) + _mAPs[i] = mAP + prog_bar.update() + + # descending select topk image + _mAPs = list(sorted(_mAPs.items(), key=lambda kv: kv[1])) + good_mAPs = _mAPs[-topk:] + bad_mAPs = _mAPs[:topk] + + good_dir = osp.abspath(osp.join(show_dir, 'good')) + bad_dir = osp.abspath(osp.join(show_dir, 'bad')) + self._save_image_gts_results(dataset, results, good_mAPs, good_dir) + self._save_image_gts_results(dataset, results, bad_mAPs, bad_dir) + + +def parse_args(): + parser = argparse.ArgumentParser( + description='MMDet eval image prediction result for each') + parser.add_argument('config', help='test config file path') + parser.add_argument( + 'prediction_path', help='prediction path where test pkl result') + parser.add_argument( + 'show_dir', help='directory where painted images will be saved') + parser.add_argument('--show', action='store_true', help='show results') + parser.add_argument( + '--wait-time', + type=float, + default=0, + help='the interval of show (s), 0 is block') + parser.add_argument( + '--topk', + default=20, + type=int, + help='saved Number of the highest topk ' + 'and lowest topk after index sorting') + parser.add_argument( + '--show-score-thr', + type=float, + default=0, + help='score threshold (default: 0.)') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + + mmcv.check_file_exist(args.prediction_path) + + cfg = Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + cfg.data.test.test_mode = True + # import modules from string list. + if cfg.get('custom_imports', None): + from mmcv.utils import import_modules_from_strings + import_modules_from_strings(**cfg['custom_imports']) + + cfg.data.test.pop('samples_per_gpu', 0) + cfg.data.test.pipeline = get_loading_pipeline(cfg.data.train.pipeline) + dataset = build_dataset(cfg.data.test) + outputs = mmcv.load(args.prediction_path) + + result_visualizer = ResultVisualizer(args.show, args.wait_time, + args.show_score_thr) + result_visualizer.evaluate_and_show( + dataset, outputs, topk=args.topk, show_dir=args.show_dir) + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/benchmark.py b/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/benchmark.py new file mode 100644 index 0000000000000000000000000000000000000000..9e8bec6b66ece6df1aa6251bda5efe7bdb39e599 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/benchmark.py @@ -0,0 +1,142 @@ +import argparse +import os +import time + +import torch +from mmcv import Config, DictAction +from mmcv.cnn import fuse_conv_bn +from mmcv.parallel import MMDistributedDataParallel +from mmcv.runner import init_dist, load_checkpoint, wrap_fp16_model + +from mmdet.datasets import (build_dataloader, build_dataset, + replace_ImageToTensor) +from mmdet.models import build_detector + + +def parse_args(): + parser = argparse.ArgumentParser(description='MMDet benchmark a model') + parser.add_argument('config', help='test config file path') + parser.add_argument('checkpoint', help='checkpoint file') + parser.add_argument( + '--max-iter', type=int, default=2000, help='num of max iter') + parser.add_argument( + '--log-interval', type=int, default=50, help='interval of logging') + parser.add_argument( + '--fuse-conv-bn', + action='store_true', + help='Whether to fuse conv and bn, this will slightly increase' + 'the inference speed') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + parser.add_argument( + '--launcher', + choices=['none', 'pytorch', 'slurm', 'mpi'], + default='none', + help='job launcher') + parser.add_argument('--local_rank', type=int, default=0) + args = parser.parse_args() + if 'LOCAL_RANK' not in os.environ: + os.environ['LOCAL_RANK'] = str(args.local_rank) + return args + + +def measure_inferense_speed(cfg, checkpoint, max_iter, log_interval, + is_fuse_conv_bn): + # set cudnn_benchmark + if cfg.get('cudnn_benchmark', False): + torch.backends.cudnn.benchmark = True + cfg.model.pretrained = None + cfg.data.test.test_mode = True + + # build the dataloader + samples_per_gpu = cfg.data.test.pop('samples_per_gpu', 1) + if samples_per_gpu > 1: + # Replace 'ImageToTensor' to 'DefaultFormatBundle' + cfg.data.test.pipeline = replace_ImageToTensor(cfg.data.test.pipeline) + dataset = build_dataset(cfg.data.test) + data_loader = build_dataloader( + dataset, + samples_per_gpu=1, + workers_per_gpu=cfg.data.workers_per_gpu, + dist=True, + shuffle=False) + + # build the model and load checkpoint + cfg.model.train_cfg = None + model = build_detector(cfg.model, test_cfg=cfg.get('test_cfg')) + fp16_cfg = cfg.get('fp16', None) + if fp16_cfg is not None: + wrap_fp16_model(model) + load_checkpoint(model, checkpoint, map_location='cpu') + if is_fuse_conv_bn: + model = fuse_conv_bn(model) + + model = MMDistributedDataParallel( + model.cuda(), + device_ids=[torch.cuda.current_device()], + broadcast_buffers=False) + model.eval() + + # the first several iterations may be very slow so skip them + num_warmup = 5 + pure_inf_time = 0 + fps = 0 + + # benchmark with 2000 image and take the average + for i, data in enumerate(data_loader): + + torch.cuda.synchronize() + start_time = time.perf_counter() + + with torch.no_grad(): + model(return_loss=False, rescale=True, **data) + + torch.cuda.synchronize() + elapsed = time.perf_counter() - start_time + + if i >= num_warmup: + pure_inf_time += elapsed + if (i + 1) % log_interval == 0: + fps = (i + 1 - num_warmup) / pure_inf_time + print( + f'Done image [{i + 1:<3}/ {max_iter}], ' + f'fps: {fps:.1f} img / s, ' + f'times per image: {1000 / fps:.1f} ms / img', + flush=True) + + if (i + 1) == max_iter: + fps = (i + 1 - num_warmup) / pure_inf_time + print( + f'Overall fps: {fps:.1f} img / s, ' + f'times per image: {1000 / fps:.1f} ms / img', + flush=True) + break + return fps + + +def main(): + args = parse_args() + + cfg = Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + + if args.launcher == 'none': + raise NotImplementedError('Only supports distributed mode') + else: + init_dist(args.launcher, **cfg.dist_params) + + measure_inferense_speed(cfg, args.checkpoint, args.max_iter, + args.log_interval, args.fuse_conv_bn) + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/coco_error_analysis.py b/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/coco_error_analysis.py new file mode 100644 index 0000000000000000000000000000000000000000..722efe6d31f07ba92ecb14836ccd834d5a318bb0 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/coco_error_analysis.py @@ -0,0 +1,338 @@ +import copy +import os +from argparse import ArgumentParser +from multiprocessing import Pool + +import matplotlib.pyplot as plt +import numpy as np +from pycocotools.coco import COCO +from pycocotools.cocoeval import COCOeval + + +def makeplot(rs, ps, outDir, class_name, iou_type): + cs = np.vstack([ + np.ones((2, 3)), + np.array([0.31, 0.51, 0.74]), + np.array([0.75, 0.31, 0.30]), + np.array([0.36, 0.90, 0.38]), + np.array([0.50, 0.39, 0.64]), + np.array([1, 0.6, 0]), + ]) + areaNames = ['allarea', 'small', 'medium', 'large'] + types = ['C75', 'C50', 'Loc', 'Sim', 'Oth', 'BG', 'FN'] + for i in range(len(areaNames)): + area_ps = ps[..., i, 0] + figure_title = iou_type + '-' + class_name + '-' + areaNames[i] + aps = [ps_.mean() for ps_ in area_ps] + ps_curve = [ + ps_.mean(axis=1) if ps_.ndim > 1 else ps_ for ps_ in area_ps + ] + ps_curve.insert(0, np.zeros(ps_curve[0].shape)) + fig = plt.figure() + ax = plt.subplot(111) + for k in range(len(types)): + ax.plot(rs, ps_curve[k + 1], color=[0, 0, 0], linewidth=0.5) + ax.fill_between( + rs, + ps_curve[k], + ps_curve[k + 1], + color=cs[k], + label=str(f'[{aps[k]:.3f}]' + types[k]), + ) + plt.xlabel('recall') + plt.ylabel('precision') + plt.xlim(0, 1.0) + plt.ylim(0, 1.0) + plt.title(figure_title) + plt.legend() + # plt.show() + fig.savefig(outDir + f'/{figure_title}.png') + plt.close(fig) + + +def autolabel(ax, rects): + """Attach a text label above each bar in *rects*, displaying its height.""" + for rect in rects: + height = rect.get_height() + if height > 0 and height <= 1: # for percent values + text_label = '{:2.0f}'.format(height * 100) + else: + text_label = '{:2.0f}'.format(height) + ax.annotate( + text_label, + xy=(rect.get_x() + rect.get_width() / 2, height), + xytext=(0, 3), # 3 points vertical offset + textcoords='offset points', + ha='center', + va='bottom', + fontsize='x-small', + ) + + +def makebarplot(rs, ps, outDir, class_name, iou_type): + areaNames = ['allarea', 'small', 'medium', 'large'] + types = ['C75', 'C50', 'Loc', 'Sim', 'Oth', 'BG', 'FN'] + fig, ax = plt.subplots() + x = np.arange(len(areaNames)) # the areaNames locations + width = 0.60 # the width of the bars + rects_list = [] + figure_title = iou_type + '-' + class_name + '-' + 'ap bar plot' + for i in range(len(types) - 1): + type_ps = ps[i, ..., 0] + aps = [ps_.mean() for ps_ in type_ps.T] + rects_list.append( + ax.bar( + x - width / 2 + (i + 1) * width / len(types), + aps, + width / len(types), + label=types[i], + )) + + # Add some text for labels, title and custom x-axis tick labels, etc. + ax.set_ylabel('Mean Average Precision (mAP)') + ax.set_title(figure_title) + ax.set_xticks(x) + ax.set_xticklabels(areaNames) + ax.legend() + + # Add score texts over bars + for rects in rects_list: + autolabel(ax, rects) + + # Save plot + fig.savefig(outDir + f'/{figure_title}.png') + plt.close(fig) + + +def get_gt_area_group_numbers(cocoEval): + areaRng = cocoEval.params.areaRng + areaRngStr = [str(aRng) for aRng in areaRng] + areaRngLbl = cocoEval.params.areaRngLbl + areaRngStr2areaRngLbl = dict(zip(areaRngStr, areaRngLbl)) + areaRngLbl2Number = dict.fromkeys(areaRngLbl, 0) + for evalImg in cocoEval.evalImgs: + if evalImg: + for gtIgnore in evalImg['gtIgnore']: + if not gtIgnore: + aRngLbl = areaRngStr2areaRngLbl[str(evalImg['aRng'])] + areaRngLbl2Number[aRngLbl] += 1 + return areaRngLbl2Number + + +def make_gt_area_group_numbers_plot(cocoEval, outDir, verbose=True): + areaRngLbl2Number = get_gt_area_group_numbers(cocoEval) + areaRngLbl = areaRngLbl2Number.keys() + if verbose: + print('number of annotations per area group:', areaRngLbl2Number) + + # Init figure + fig, ax = plt.subplots() + x = np.arange(len(areaRngLbl)) # the areaNames locations + width = 0.60 # the width of the bars + figure_title = 'number of annotations per area group' + + rects = ax.bar(x, areaRngLbl2Number.values(), width) + + # Add some text for labels, title and custom x-axis tick labels, etc. + ax.set_ylabel('Number of annotations') + ax.set_title(figure_title) + ax.set_xticks(x) + ax.set_xticklabels(areaRngLbl) + + # Add score texts over bars + autolabel(ax, rects) + + # Save plot + fig.tight_layout() + fig.savefig(outDir + f'/{figure_title}.png') + plt.close(fig) + + +def make_gt_area_histogram_plot(cocoEval, outDir): + n_bins = 100 + areas = [ann['area'] for ann in cocoEval.cocoGt.anns.values()] + + # init figure + figure_title = 'gt annotation areas histogram plot' + fig, ax = plt.subplots() + + # Set the number of bins + ax.hist(np.sqrt(areas), bins=n_bins) + + # Add some text for labels, title and custom x-axis tick labels, etc. + ax.set_xlabel('Squareroot Area') + ax.set_ylabel('Number of annotations') + ax.set_title(figure_title) + + # Save plot + fig.tight_layout() + fig.savefig(outDir + f'/{figure_title}.png') + plt.close(fig) + + +def analyze_individual_category(k, + cocoDt, + cocoGt, + catId, + iou_type, + areas=None): + nm = cocoGt.loadCats(catId)[0] + print(f'--------------analyzing {k + 1}-{nm["name"]}---------------') + ps_ = {} + dt = copy.deepcopy(cocoDt) + nm = cocoGt.loadCats(catId)[0] + imgIds = cocoGt.getImgIds() + dt_anns = dt.dataset['annotations'] + select_dt_anns = [] + for ann in dt_anns: + if ann['category_id'] == catId: + select_dt_anns.append(ann) + dt.dataset['annotations'] = select_dt_anns + dt.createIndex() + # compute precision but ignore superclass confusion + gt = copy.deepcopy(cocoGt) + child_catIds = gt.getCatIds(supNms=[nm['supercategory']]) + for idx, ann in enumerate(gt.dataset['annotations']): + if ann['category_id'] in child_catIds and ann['category_id'] != catId: + gt.dataset['annotations'][idx]['ignore'] = 1 + gt.dataset['annotations'][idx]['iscrowd'] = 1 + gt.dataset['annotations'][idx]['category_id'] = catId + cocoEval = COCOeval(gt, copy.deepcopy(dt), iou_type) + cocoEval.params.imgIds = imgIds + cocoEval.params.maxDets = [100] + cocoEval.params.iouThrs = [0.1] + cocoEval.params.useCats = 1 + if areas: + cocoEval.params.areaRng = [[0**2, areas[2]], [0**2, areas[0]], + [areas[0], areas[1]], [areas[1], areas[2]]] + cocoEval.evaluate() + cocoEval.accumulate() + ps_supercategory = cocoEval.eval['precision'][0, :, k, :, :] + ps_['ps_supercategory'] = ps_supercategory + # compute precision but ignore any class confusion + gt = copy.deepcopy(cocoGt) + for idx, ann in enumerate(gt.dataset['annotations']): + if ann['category_id'] != catId: + gt.dataset['annotations'][idx]['ignore'] = 1 + gt.dataset['annotations'][idx]['iscrowd'] = 1 + gt.dataset['annotations'][idx]['category_id'] = catId + cocoEval = COCOeval(gt, copy.deepcopy(dt), iou_type) + cocoEval.params.imgIds = imgIds + cocoEval.params.maxDets = [100] + cocoEval.params.iouThrs = [0.1] + cocoEval.params.useCats = 1 + if areas: + cocoEval.params.areaRng = [[0**2, areas[2]], [0**2, areas[0]], + [areas[0], areas[1]], [areas[1], areas[2]]] + cocoEval.evaluate() + cocoEval.accumulate() + ps_allcategory = cocoEval.eval['precision'][0, :, k, :, :] + ps_['ps_allcategory'] = ps_allcategory + return k, ps_ + + +def analyze_results(res_file, + ann_file, + res_types, + out_dir, + extraplots=None, + areas=None): + for res_type in res_types: + assert res_type in ['bbox', 'segm'] + if areas: + assert len(areas) == 3, '3 integers should be specified as areas, \ + representing 3 area regions' + + directory = os.path.dirname(out_dir + '/') + if not os.path.exists(directory): + print(f'-------------create {out_dir}-----------------') + os.makedirs(directory) + + cocoGt = COCO(ann_file) + cocoDt = cocoGt.loadRes(res_file) + imgIds = cocoGt.getImgIds() + for res_type in res_types: + res_out_dir = out_dir + '/' + res_type + '/' + res_directory = os.path.dirname(res_out_dir) + if not os.path.exists(res_directory): + print(f'-------------create {res_out_dir}-----------------') + os.makedirs(res_directory) + iou_type = res_type + cocoEval = COCOeval( + copy.deepcopy(cocoGt), copy.deepcopy(cocoDt), iou_type) + cocoEval.params.imgIds = imgIds + cocoEval.params.iouThrs = [0.75, 0.5, 0.1] + cocoEval.params.maxDets = [100] + if areas: + cocoEval.params.areaRng = [[0**2, areas[2]], [0**2, areas[0]], + [areas[0], areas[1]], + [areas[1], areas[2]]] + cocoEval.evaluate() + cocoEval.accumulate() + ps = cocoEval.eval['precision'] + ps = np.vstack([ps, np.zeros((4, *ps.shape[1:]))]) + catIds = cocoGt.getCatIds() + recThrs = cocoEval.params.recThrs + with Pool(processes=48) as pool: + args = [(k, cocoDt, cocoGt, catId, iou_type, areas) + for k, catId in enumerate(catIds)] + analyze_results = pool.starmap(analyze_individual_category, args) + for k, catId in enumerate(catIds): + nm = cocoGt.loadCats(catId)[0] + print(f'--------------saving {k + 1}-{nm["name"]}---------------') + analyze_result = analyze_results[k] + assert k == analyze_result[0] + ps_supercategory = analyze_result[1]['ps_supercategory'] + ps_allcategory = analyze_result[1]['ps_allcategory'] + # compute precision but ignore superclass confusion + ps[3, :, k, :, :] = ps_supercategory + # compute precision but ignore any class confusion + ps[4, :, k, :, :] = ps_allcategory + # fill in background and false negative errors and plot + ps[ps == -1] = 0 + ps[5, :, k, :, :] = ps[4, :, k, :, :] > 0 + ps[6, :, k, :, :] = 1.0 + makeplot(recThrs, ps[:, :, k], res_out_dir, nm['name'], iou_type) + if extraplots: + makebarplot(recThrs, ps[:, :, k], res_out_dir, nm['name'], + iou_type) + makeplot(recThrs, ps, res_out_dir, 'allclass', iou_type) + if extraplots: + makebarplot(recThrs, ps, res_out_dir, 'allclass', iou_type) + make_gt_area_group_numbers_plot( + cocoEval=cocoEval, outDir=res_out_dir, verbose=True) + make_gt_area_histogram_plot(cocoEval=cocoEval, outDir=res_out_dir) + + +def main(): + parser = ArgumentParser(description='COCO Error Analysis Tool') + parser.add_argument('result', help='result file (json format) path') + parser.add_argument('out_dir', help='dir to save analyze result images') + parser.add_argument( + '--ann', + default='data/coco/annotations/instances_val2017.json', + help='annotation file path') + parser.add_argument( + '--types', type=str, nargs='+', default=['bbox'], help='result types') + parser.add_argument( + '--extraplots', + action='store_true', + help='export extra bar/stat plots') + parser.add_argument( + '--areas', + type=int, + nargs='+', + default=[1024, 9216, 10000000000], + help='area regions') + args = parser.parse_args() + analyze_results( + args.result, + args.ann, + args.types, + out_dir=args.out_dir, + extraplots=args.extraplots, + areas=args.areas) + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/eval_metric.py b/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/eval_metric.py new file mode 100644 index 0000000000000000000000000000000000000000..5732719d2933d74360fd91a6b58d6320fb666f9d --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/eval_metric.py @@ -0,0 +1,83 @@ +import argparse + +import mmcv +from mmcv import Config, DictAction + +from mmdet.datasets import build_dataset + + +def parse_args(): + parser = argparse.ArgumentParser(description='Evaluate metric of the ' + 'results saved in pkl format') + parser.add_argument('config', help='Config of the model') + parser.add_argument('pkl_results', help='Results in pickle format') + parser.add_argument( + '--format-only', + action='store_true', + help='Format the output results without perform evaluation. It is' + 'useful when you want to format the result to a specific format and ' + 'submit it to the test server') + parser.add_argument( + '--eval', + type=str, + nargs='+', + help='Evaluation metrics, which depends on the dataset, e.g., "bbox",' + ' "segm", "proposal" for COCO, and "mAP", "recall" for PASCAL VOC') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + parser.add_argument( + '--eval-options', + nargs='+', + action=DictAction, + help='custom options for evaluation, the key-value pair in xxx=yyy ' + 'format will be kwargs for dataset.evaluate() function') + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + + cfg = Config.fromfile(args.config) + assert args.eval or args.format_only, ( + 'Please specify at least one operation (eval/format the results) with ' + 'the argument "--eval", "--format-only"') + if args.eval and args.format_only: + raise ValueError('--eval and --format_only cannot be both specified') + + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + # import modules from string list. + if cfg.get('custom_imports', None): + from mmcv.utils import import_modules_from_strings + import_modules_from_strings(**cfg['custom_imports']) + cfg.data.test.test_mode = True + + dataset = build_dataset(cfg.data.test) + outputs = mmcv.load(args.pkl_results) + + kwargs = {} if args.eval_options is None else args.eval_options + if args.format_only: + dataset.format_results(outputs, **kwargs) + if args.eval: + eval_kwargs = cfg.get('evaluation', {}).copy() + # hard-code way to remove EvalHook args + for key in [ + 'interval', 'tmpdir', 'start', 'gpu_collect', 'save_best', + 'rule' + ]: + eval_kwargs.pop(key, None) + eval_kwargs.update(dict(metric=args.eval, **kwargs)) + print(dataset.evaluate(outputs, **eval_kwargs)) + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/get_flops.py b/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/get_flops.py new file mode 100644 index 0000000000000000000000000000000000000000..e3cfe8e826fb39de2eec3be0ccbc1ae2a9b3e965 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/get_flops.py @@ -0,0 +1,81 @@ +import argparse + +import torch +from mmcv import Config, DictAction + +from mmdet.models import build_detector + +try: + from mmcv.cnn import get_model_complexity_info +except ImportError: + raise ImportError('Please upgrade mmcv to >0.6.2') + + +def parse_args(): + parser = argparse.ArgumentParser(description='Train a detector') + parser.add_argument('config', help='train config file path') + parser.add_argument( + '--shape', + type=int, + nargs='+', + default=[1280, 800], + help='input image size') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + args = parser.parse_args() + return args + + +def main(): + + args = parse_args() + + if len(args.shape) == 1: + input_shape = (3, args.shape[0], args.shape[0]) + elif len(args.shape) == 2: + input_shape = (3, ) + tuple(args.shape) + else: + raise ValueError('invalid input shape') + + cfg = Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + # import modules from string list. + if cfg.get('custom_imports', None): + from mmcv.utils import import_modules_from_strings + import_modules_from_strings(**cfg['custom_imports']) + + model = build_detector( + cfg.model, + train_cfg=cfg.get('train_cfg'), + test_cfg=cfg.get('test_cfg')) + if torch.cuda.is_available(): + model.cuda() + model.eval() + + if hasattr(model, 'forward_dummy'): + model.forward = model.forward_dummy + else: + raise NotImplementedError( + 'FLOPs counter is currently not currently supported with {}'. + format(model.__class__.__name__)) + + flops, params = get_model_complexity_info(model, input_shape) + split_line = '=' * 30 + print(f'{split_line}\nInput shape: {input_shape}\n' + f'Flops: {flops}\nParams: {params}\n{split_line}') + print('!!!Please be cautious if you use the results in papers. ' + 'You may need to check if all ops are supported and verify that the ' + 'flops computation is correct.') + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/robustness_eval.py b/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/robustness_eval.py new file mode 100644 index 0000000000000000000000000000000000000000..cc2e27b6b74ca87cd58723bda7f94177a81734ca --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/robustness_eval.py @@ -0,0 +1,250 @@ +import os.path as osp +from argparse import ArgumentParser + +import mmcv +import numpy as np + + +def print_coco_results(results): + + def _print(result, ap=1, iouThr=None, areaRng='all', maxDets=100): + titleStr = 'Average Precision' if ap == 1 else 'Average Recall' + typeStr = '(AP)' if ap == 1 else '(AR)' + iouStr = '0.50:0.95' \ + if iouThr is None else f'{iouThr:0.2f}' + iStr = f' {titleStr:<18} {typeStr} @[ IoU={iouStr:<9} | ' + iStr += f'area={areaRng:>6s} | maxDets={maxDets:>3d} ] = {result:0.3f}' + print(iStr) + + stats = np.zeros((12, )) + stats[0] = _print(results[0], 1) + stats[1] = _print(results[1], 1, iouThr=.5) + stats[2] = _print(results[2], 1, iouThr=.75) + stats[3] = _print(results[3], 1, areaRng='small') + stats[4] = _print(results[4], 1, areaRng='medium') + stats[5] = _print(results[5], 1, areaRng='large') + stats[6] = _print(results[6], 0, maxDets=1) + stats[7] = _print(results[7], 0, maxDets=10) + stats[8] = _print(results[8], 0) + stats[9] = _print(results[9], 0, areaRng='small') + stats[10] = _print(results[10], 0, areaRng='medium') + stats[11] = _print(results[11], 0, areaRng='large') + + +def get_coco_style_results(filename, + task='bbox', + metric=None, + prints='mPC', + aggregate='benchmark'): + + assert aggregate in ['benchmark', 'all'] + + if prints == 'all': + prints = ['P', 'mPC', 'rPC'] + elif isinstance(prints, str): + prints = [prints] + for p in prints: + assert p in ['P', 'mPC', 'rPC'] + + if metric is None: + metrics = [ + 'AP', 'AP50', 'AP75', 'APs', 'APm', 'APl', 'AR1', 'AR10', 'AR100', + 'ARs', 'ARm', 'ARl' + ] + elif isinstance(metric, list): + metrics = metric + else: + metrics = [metric] + + for metric_name in metrics: + assert metric_name in [ + 'AP', 'AP50', 'AP75', 'APs', 'APm', 'APl', 'AR1', 'AR10', 'AR100', + 'ARs', 'ARm', 'ARl' + ] + + eval_output = mmcv.load(filename) + + num_distortions = len(list(eval_output.keys())) + results = np.zeros((num_distortions, 6, len(metrics)), dtype='float32') + + for corr_i, distortion in enumerate(eval_output): + for severity in eval_output[distortion]: + for metric_j, metric_name in enumerate(metrics): + mAP = eval_output[distortion][severity][task][metric_name] + results[corr_i, severity, metric_j] = mAP + + P = results[0, 0, :] + if aggregate == 'benchmark': + mPC = np.mean(results[:15, 1:, :], axis=(0, 1)) + else: + mPC = np.mean(results[:, 1:, :], axis=(0, 1)) + rPC = mPC / P + + print(f'\nmodel: {osp.basename(filename)}') + if metric is None: + if 'P' in prints: + print(f'Performance on Clean Data [P] ({task})') + print_coco_results(P) + if 'mPC' in prints: + print(f'Mean Performance under Corruption [mPC] ({task})') + print_coco_results(mPC) + if 'rPC' in prints: + print(f'Relative Performance under Corruption [rPC] ({task})') + print_coco_results(rPC) + else: + if 'P' in prints: + print(f'Performance on Clean Data [P] ({task})') + for metric_i, metric_name in enumerate(metrics): + print(f'{metric_name:5} = {P[metric_i]:0.3f}') + if 'mPC' in prints: + print(f'Mean Performance under Corruption [mPC] ({task})') + for metric_i, metric_name in enumerate(metrics): + print(f'{metric_name:5} = {mPC[metric_i]:0.3f}') + if 'rPC' in prints: + print(f'Relative Performance under Corruption [rPC] ({task})') + for metric_i, metric_name in enumerate(metrics): + print(f'{metric_name:5} => {rPC[metric_i] * 100:0.1f} %') + + return results + + +def get_voc_style_results(filename, prints='mPC', aggregate='benchmark'): + + assert aggregate in ['benchmark', 'all'] + + if prints == 'all': + prints = ['P', 'mPC', 'rPC'] + elif isinstance(prints, str): + prints = [prints] + for p in prints: + assert p in ['P', 'mPC', 'rPC'] + + eval_output = mmcv.load(filename) + + num_distortions = len(list(eval_output.keys())) + results = np.zeros((num_distortions, 6, 20), dtype='float32') + + for i, distortion in enumerate(eval_output): + for severity in eval_output[distortion]: + mAP = [ + eval_output[distortion][severity][j]['ap'] + for j in range(len(eval_output[distortion][severity])) + ] + results[i, severity, :] = mAP + + P = results[0, 0, :] + if aggregate == 'benchmark': + mPC = np.mean(results[:15, 1:, :], axis=(0, 1)) + else: + mPC = np.mean(results[:, 1:, :], axis=(0, 1)) + rPC = mPC / P + + print(f'\nmodel: {osp.basename(filename)}') + if 'P' in prints: + print(f'Performance on Clean Data [P] in AP50 = {np.mean(P):0.3f}') + if 'mPC' in prints: + print('Mean Performance under Corruption [mPC] in AP50 = ' + f'{np.mean(mPC):0.3f}') + if 'rPC' in prints: + print('Relative Performance under Corruption [rPC] in % = ' + f'{np.mean(rPC) * 100:0.1f}') + + return np.mean(results, axis=2, keepdims=True) + + +def get_results(filename, + dataset='coco', + task='bbox', + metric=None, + prints='mPC', + aggregate='benchmark'): + assert dataset in ['coco', 'voc', 'cityscapes'] + + if dataset in ['coco', 'cityscapes']: + results = get_coco_style_results( + filename, + task=task, + metric=metric, + prints=prints, + aggregate=aggregate) + elif dataset == 'voc': + if task != 'bbox': + print('Only bbox analysis is supported for Pascal VOC') + print('Will report bbox results\n') + if metric not in [None, ['AP'], ['AP50']]: + print('Only the AP50 metric is supported for Pascal VOC') + print('Will report AP50 metric\n') + results = get_voc_style_results( + filename, prints=prints, aggregate=aggregate) + + return results + + +def get_distortions_from_file(filename): + + eval_output = mmcv.load(filename) + + return get_distortions_from_results(eval_output) + + +def get_distortions_from_results(eval_output): + distortions = [] + for i, distortion in enumerate(eval_output): + distortions.append(distortion.replace('_', ' ')) + return distortions + + +def main(): + parser = ArgumentParser(description='Corruption Result Analysis') + parser.add_argument('filename', help='result file path') + parser.add_argument( + '--dataset', + type=str, + choices=['coco', 'voc', 'cityscapes'], + default='coco', + help='dataset type') + parser.add_argument( + '--task', + type=str, + nargs='+', + choices=['bbox', 'segm'], + default=['bbox'], + help='task to report') + parser.add_argument( + '--metric', + nargs='+', + choices=[ + None, 'AP', 'AP50', 'AP75', 'APs', 'APm', 'APl', 'AR1', 'AR10', + 'AR100', 'ARs', 'ARm', 'ARl' + ], + default=None, + help='metric to report') + parser.add_argument( + '--prints', + type=str, + nargs='+', + choices=['P', 'mPC', 'rPC'], + default='mPC', + help='corruption benchmark metric to print') + parser.add_argument( + '--aggregate', + type=str, + choices=['all', 'benchmark'], + default='benchmark', + help='aggregate all results or only those \ + for benchmark corruptions') + + args = parser.parse_args() + + for task in args.task: + get_results( + args.filename, + dataset=args.dataset, + task=task, + metric=args.metric, + prints=args.prints, + aggregate=args.aggregate) + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/test_robustness.py b/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/test_robustness.py new file mode 100644 index 0000000000000000000000000000000000000000..ae30c019796b3e20d96dc4486ad1eae8e8981b98 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/analysis_tools/test_robustness.py @@ -0,0 +1,390 @@ +import argparse +import copy +import os +import os.path as osp + +import mmcv +import torch +from mmcv import DictAction +from mmcv.parallel import MMDataParallel, MMDistributedDataParallel +from mmcv.runner import (get_dist_info, init_dist, load_checkpoint, + wrap_fp16_model) +from pycocotools.coco import COCO +from pycocotools.cocoeval import COCOeval +from tools.analysis_tools.robustness_eval import get_results + +from mmdet import datasets +from mmdet.apis import multi_gpu_test, set_random_seed, single_gpu_test +from mmdet.core import eval_map +from mmdet.datasets import build_dataloader, build_dataset +from mmdet.models import build_detector + + +def coco_eval_with_return(result_files, + result_types, + coco, + max_dets=(100, 300, 1000)): + for res_type in result_types: + assert res_type in ['proposal', 'bbox', 'segm', 'keypoints'] + + if mmcv.is_str(coco): + coco = COCO(coco) + assert isinstance(coco, COCO) + + eval_results = {} + for res_type in result_types: + result_file = result_files[res_type] + assert result_file.endswith('.json') + + coco_dets = coco.loadRes(result_file) + img_ids = coco.getImgIds() + iou_type = 'bbox' if res_type == 'proposal' else res_type + cocoEval = COCOeval(coco, coco_dets, iou_type) + cocoEval.params.imgIds = img_ids + if res_type == 'proposal': + cocoEval.params.useCats = 0 + cocoEval.params.maxDets = list(max_dets) + cocoEval.evaluate() + cocoEval.accumulate() + cocoEval.summarize() + if res_type == 'segm' or res_type == 'bbox': + metric_names = [ + 'AP', 'AP50', 'AP75', 'APs', 'APm', 'APl', 'AR1', 'AR10', + 'AR100', 'ARs', 'ARm', 'ARl' + ] + eval_results[res_type] = { + metric_names[i]: cocoEval.stats[i] + for i in range(len(metric_names)) + } + else: + eval_results[res_type] = cocoEval.stats + + return eval_results + + +def voc_eval_with_return(result_file, + dataset, + iou_thr=0.5, + logger='print', + only_ap=True): + det_results = mmcv.load(result_file) + annotations = [dataset.get_ann_info(i) for i in range(len(dataset))] + if hasattr(dataset, 'year') and dataset.year == 2007: + dataset_name = 'voc07' + else: + dataset_name = dataset.CLASSES + mean_ap, eval_results = eval_map( + det_results, + annotations, + scale_ranges=None, + iou_thr=iou_thr, + dataset=dataset_name, + logger=logger) + + if only_ap: + eval_results = [{ + 'ap': eval_results[i]['ap'] + } for i in range(len(eval_results))] + + return mean_ap, eval_results + + +def parse_args(): + parser = argparse.ArgumentParser(description='MMDet test detector') + parser.add_argument('config', help='test config file path') + parser.add_argument('checkpoint', help='checkpoint file') + parser.add_argument('--out', help='output result file') + parser.add_argument( + '--corruptions', + type=str, + nargs='+', + default='benchmark', + choices=[ + 'all', 'benchmark', 'noise', 'blur', 'weather', 'digital', + 'holdout', 'None', 'gaussian_noise', 'shot_noise', 'impulse_noise', + 'defocus_blur', 'glass_blur', 'motion_blur', 'zoom_blur', 'snow', + 'frost', 'fog', 'brightness', 'contrast', 'elastic_transform', + 'pixelate', 'jpeg_compression', 'speckle_noise', 'gaussian_blur', + 'spatter', 'saturate' + ], + help='corruptions') + parser.add_argument( + '--severities', + type=int, + nargs='+', + default=[0, 1, 2, 3, 4, 5], + help='corruption severity levels') + parser.add_argument( + '--eval', + type=str, + nargs='+', + choices=['proposal', 'proposal_fast', 'bbox', 'segm', 'keypoints'], + help='eval types') + parser.add_argument( + '--iou-thr', + type=float, + default=0.5, + help='IoU threshold for pascal voc evaluation') + parser.add_argument( + '--summaries', + type=bool, + default=False, + help='Print summaries for every corruption and severity') + parser.add_argument( + '--workers', type=int, default=32, help='workers per gpu') + parser.add_argument('--show', action='store_true', help='show results') + parser.add_argument( + '--show-dir', help='directory where painted images will be saved') + parser.add_argument( + '--show-score-thr', + type=float, + default=0.3, + help='score threshold (default: 0.3)') + parser.add_argument('--tmpdir', help='tmp dir for writing some results') + parser.add_argument('--seed', type=int, default=None, help='random seed') + parser.add_argument( + '--launcher', + choices=['none', 'pytorch', 'slurm', 'mpi'], + default='none', + help='job launcher') + parser.add_argument('--local_rank', type=int, default=0) + parser.add_argument( + '--final-prints', + type=str, + nargs='+', + choices=['P', 'mPC', 'rPC'], + default='mPC', + help='corruption benchmark metric to print at the end') + parser.add_argument( + '--final-prints-aggregate', + type=str, + choices=['all', 'benchmark'], + default='benchmark', + help='aggregate all results or only those for benchmark corruptions') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + args = parser.parse_args() + if 'LOCAL_RANK' not in os.environ: + os.environ['LOCAL_RANK'] = str(args.local_rank) + return args + + +def main(): + args = parse_args() + + assert args.out or args.show or args.show_dir, \ + ('Please specify at least one operation (save or show the results) ' + 'with the argument "--out", "--show" or "show-dir"') + + if args.out is not None and not args.out.endswith(('.pkl', '.pickle')): + raise ValueError('The output file must be a pkl file.') + + cfg = mmcv.Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + # import modules from string list. + if cfg.get('custom_imports', None): + from mmcv.utils import import_modules_from_strings + import_modules_from_strings(**cfg['custom_imports']) + # set cudnn_benchmark + if cfg.get('cudnn_benchmark', False): + torch.backends.cudnn.benchmark = True + cfg.model.pretrained = None + cfg.data.test.test_mode = True + if args.workers == 0: + args.workers = cfg.data.workers_per_gpu + + # init distributed env first, since logger depends on the dist info. + if args.launcher == 'none': + distributed = False + else: + distributed = True + init_dist(args.launcher, **cfg.dist_params) + + # set random seeds + if args.seed is not None: + set_random_seed(args.seed) + + if 'all' in args.corruptions: + corruptions = [ + 'gaussian_noise', 'shot_noise', 'impulse_noise', 'defocus_blur', + 'glass_blur', 'motion_blur', 'zoom_blur', 'snow', 'frost', 'fog', + 'brightness', 'contrast', 'elastic_transform', 'pixelate', + 'jpeg_compression', 'speckle_noise', 'gaussian_blur', 'spatter', + 'saturate' + ] + elif 'benchmark' in args.corruptions: + corruptions = [ + 'gaussian_noise', 'shot_noise', 'impulse_noise', 'defocus_blur', + 'glass_blur', 'motion_blur', 'zoom_blur', 'snow', 'frost', 'fog', + 'brightness', 'contrast', 'elastic_transform', 'pixelate', + 'jpeg_compression' + ] + elif 'noise' in args.corruptions: + corruptions = ['gaussian_noise', 'shot_noise', 'impulse_noise'] + elif 'blur' in args.corruptions: + corruptions = [ + 'defocus_blur', 'glass_blur', 'motion_blur', 'zoom_blur' + ] + elif 'weather' in args.corruptions: + corruptions = ['snow', 'frost', 'fog', 'brightness'] + elif 'digital' in args.corruptions: + corruptions = [ + 'contrast', 'elastic_transform', 'pixelate', 'jpeg_compression' + ] + elif 'holdout' in args.corruptions: + corruptions = ['speckle_noise', 'gaussian_blur', 'spatter', 'saturate'] + elif 'None' in args.corruptions: + corruptions = ['None'] + args.severities = [0] + else: + corruptions = args.corruptions + + rank, _ = get_dist_info() + aggregated_results = {} + for corr_i, corruption in enumerate(corruptions): + aggregated_results[corruption] = {} + for sev_i, corruption_severity in enumerate(args.severities): + # evaluate severity 0 (= no corruption) only once + if corr_i > 0 and corruption_severity == 0: + aggregated_results[corruption][0] = \ + aggregated_results[corruptions[0]][0] + continue + + test_data_cfg = copy.deepcopy(cfg.data.test) + # assign corruption and severity + if corruption_severity > 0: + corruption_trans = dict( + type='Corrupt', + corruption=corruption, + severity=corruption_severity) + # TODO: hard coded "1", we assume that the first step is + # loading images, which needs to be fixed in the future + test_data_cfg['pipeline'].insert(1, corruption_trans) + + # print info + print(f'\nTesting {corruption} at severity {corruption_severity}') + + # build the dataloader + # TODO: support multiple images per gpu + # (only minor changes are needed) + dataset = build_dataset(test_data_cfg) + data_loader = build_dataloader( + dataset, + samples_per_gpu=1, + workers_per_gpu=args.workers, + dist=distributed, + shuffle=False) + + # build the model and load checkpoint + cfg.model.train_cfg = None + model = build_detector(cfg.model, test_cfg=cfg.get('test_cfg')) + fp16_cfg = cfg.get('fp16', None) + if fp16_cfg is not None: + wrap_fp16_model(model) + checkpoint = load_checkpoint( + model, args.checkpoint, map_location='cpu') + # old versions did not save class info in checkpoints, + # this walkaround is for backward compatibility + if 'CLASSES' in checkpoint.get('meta', {}): + model.CLASSES = checkpoint['meta']['CLASSES'] + else: + model.CLASSES = dataset.CLASSES + + if not distributed: + model = MMDataParallel(model, device_ids=[0]) + show_dir = args.show_dir + if show_dir is not None: + show_dir = osp.join(show_dir, corruption) + show_dir = osp.join(show_dir, str(corruption_severity)) + if not osp.exists(show_dir): + osp.makedirs(show_dir) + outputs = single_gpu_test(model, data_loader, args.show, + show_dir, args.show_score_thr) + else: + model = MMDistributedDataParallel( + model.cuda(), + device_ids=[torch.cuda.current_device()], + broadcast_buffers=False) + outputs = multi_gpu_test(model, data_loader, args.tmpdir) + + if args.out and rank == 0: + eval_results_filename = ( + osp.splitext(args.out)[0] + '_results' + + osp.splitext(args.out)[1]) + mmcv.dump(outputs, args.out) + eval_types = args.eval + if cfg.dataset_type == 'VOCDataset': + if eval_types: + for eval_type in eval_types: + if eval_type == 'bbox': + test_dataset = mmcv.runner.obj_from_dict( + cfg.data.test, datasets) + logger = 'print' if args.summaries else None + mean_ap, eval_results = \ + voc_eval_with_return( + args.out, test_dataset, + args.iou_thr, logger) + aggregated_results[corruption][ + corruption_severity] = eval_results + else: + print('\nOnly "bbox" evaluation \ + is supported for pascal voc') + else: + if eval_types: + print(f'Starting evaluate {" and ".join(eval_types)}') + if eval_types == ['proposal_fast']: + result_file = args.out + else: + if not isinstance(outputs[0], dict): + result_files = dataset.results2json( + outputs, args.out) + else: + for name in outputs[0]: + print(f'\nEvaluating {name}') + outputs_ = [out[name] for out in outputs] + result_file = args.out + + f'.{name}' + result_files = dataset.results2json( + outputs_, result_file) + eval_results = coco_eval_with_return( + result_files, eval_types, dataset.coco) + aggregated_results[corruption][ + corruption_severity] = eval_results + else: + print('\nNo task was selected for evaluation;' + '\nUse --eval to select a task') + + # save results after each evaluation + mmcv.dump(aggregated_results, eval_results_filename) + + if rank == 0: + # print final results + print('\nAggregated results:') + prints = args.final_prints + aggregate = args.final_prints_aggregate + + if cfg.dataset_type == 'VOCDataset': + get_results( + eval_results_filename, + dataset='voc', + prints=prints, + aggregate=aggregate) + else: + get_results( + eval_results_filename, + dataset='coco', + prints=prints, + aggregate=aggregate) + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/dataset_converters/cityscapes.py b/detection_cbnet/docker-build-context/cbnetv2/tools/dataset_converters/cityscapes.py new file mode 100644 index 0000000000000000000000000000000000000000..bde3dac4e6c1fe19f91d0a69baeecdfb50f35ea5 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/dataset_converters/cityscapes.py @@ -0,0 +1,151 @@ +import argparse +import glob +import os.path as osp + +import cityscapesscripts.helpers.labels as CSLabels +import mmcv +import numpy as np +import pycocotools.mask as maskUtils + + +def collect_files(img_dir, gt_dir): + suffix = 'leftImg8bit.png' + files = [] + for img_file in glob.glob(osp.join(img_dir, '**/*.png')): + assert img_file.endswith(suffix), img_file + inst_file = gt_dir + img_file[ + len(img_dir):-len(suffix)] + 'gtFine_instanceIds.png' + # Note that labelIds are not converted to trainId for seg map + segm_file = gt_dir + img_file[ + len(img_dir):-len(suffix)] + 'gtFine_labelIds.png' + files.append((img_file, inst_file, segm_file)) + assert len(files), f'No images found in {img_dir}' + print(f'Loaded {len(files)} images from {img_dir}') + + return files + + +def collect_annotations(files, nproc=1): + print('Loading annotation images') + if nproc > 1: + images = mmcv.track_parallel_progress( + load_img_info, files, nproc=nproc) + else: + images = mmcv.track_progress(load_img_info, files) + + return images + + +def load_img_info(files): + img_file, inst_file, segm_file = files + inst_img = mmcv.imread(inst_file, 'unchanged') + # ids < 24 are stuff labels (filtering them first is about 5% faster) + unique_inst_ids = np.unique(inst_img[inst_img >= 24]) + anno_info = [] + for inst_id in unique_inst_ids: + # For non-crowd annotations, inst_id // 1000 is the label_id + # Crowd annotations have <1000 instance ids + label_id = inst_id // 1000 if inst_id >= 1000 else inst_id + label = CSLabels.id2label[label_id] + if not label.hasInstances or label.ignoreInEval: + continue + + category_id = label.id + iscrowd = int(inst_id < 1000) + mask = np.asarray(inst_img == inst_id, dtype=np.uint8, order='F') + mask_rle = maskUtils.encode(mask[:, :, None])[0] + + area = maskUtils.area(mask_rle) + # convert to COCO style XYWH format + bbox = maskUtils.toBbox(mask_rle) + + # for json encoding + mask_rle['counts'] = mask_rle['counts'].decode() + + anno = dict( + iscrowd=iscrowd, + category_id=category_id, + bbox=bbox.tolist(), + area=area.tolist(), + segmentation=mask_rle) + anno_info.append(anno) + video_name = osp.basename(osp.dirname(img_file)) + img_info = dict( + # remove img_prefix for filename + file_name=osp.join(video_name, osp.basename(img_file)), + height=inst_img.shape[0], + width=inst_img.shape[1], + anno_info=anno_info, + segm_file=osp.join(video_name, osp.basename(segm_file))) + + return img_info + + +def cvt_annotations(image_infos, out_json_name): + out_json = dict() + img_id = 0 + ann_id = 0 + out_json['images'] = [] + out_json['categories'] = [] + out_json['annotations'] = [] + for image_info in image_infos: + image_info['id'] = img_id + anno_infos = image_info.pop('anno_info') + out_json['images'].append(image_info) + for anno_info in anno_infos: + anno_info['image_id'] = img_id + anno_info['id'] = ann_id + out_json['annotations'].append(anno_info) + ann_id += 1 + img_id += 1 + for label in CSLabels.labels: + if label.hasInstances and not label.ignoreInEval: + cat = dict(id=label.id, name=label.name) + out_json['categories'].append(cat) + + if len(out_json['annotations']) == 0: + out_json.pop('annotations') + + mmcv.dump(out_json, out_json_name) + return out_json + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Convert Cityscapes annotations to COCO format') + parser.add_argument('cityscapes_path', help='cityscapes data path') + parser.add_argument('--img-dir', default='leftImg8bit', type=str) + parser.add_argument('--gt-dir', default='gtFine', type=str) + parser.add_argument('-o', '--out-dir', help='output path') + parser.add_argument( + '--nproc', default=1, type=int, help='number of process') + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + cityscapes_path = args.cityscapes_path + out_dir = args.out_dir if args.out_dir else cityscapes_path + mmcv.mkdir_or_exist(out_dir) + + img_dir = osp.join(cityscapes_path, args.img_dir) + gt_dir = osp.join(cityscapes_path, args.gt_dir) + + set_name = dict( + train='instancesonly_filtered_gtFine_train.json', + val='instancesonly_filtered_gtFine_val.json', + test='instancesonly_filtered_gtFine_test.json') + + for split, json_name in set_name.items(): + print(f'Converting {split} into {json_name}') + with mmcv.Timer( + print_tmpl='It took {}s to convert Cityscapes annotation'): + files = collect_files( + osp.join(img_dir, split), osp.join(gt_dir, split)) + image_infos = collect_annotations(files, nproc=args.nproc) + cvt_annotations(image_infos, osp.join(out_dir, json_name)) + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/dataset_converters/pascal_voc.py b/detection_cbnet/docker-build-context/cbnetv2/tools/dataset_converters/pascal_voc.py new file mode 100644 index 0000000000000000000000000000000000000000..f109307c3d9bf461fa7e8d29fe2333413534f0d4 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/dataset_converters/pascal_voc.py @@ -0,0 +1,236 @@ +import argparse +import os.path as osp +import xml.etree.ElementTree as ET + +import mmcv +import numpy as np + +from mmdet.core import voc_classes + +label_ids = {name: i for i, name in enumerate(voc_classes())} + + +def parse_xml(args): + xml_path, img_path = args + tree = ET.parse(xml_path) + root = tree.getroot() + size = root.find('size') + w = int(size.find('width').text) + h = int(size.find('height').text) + bboxes = [] + labels = [] + bboxes_ignore = [] + labels_ignore = [] + for obj in root.findall('object'): + name = obj.find('name').text + label = label_ids[name] + difficult = int(obj.find('difficult').text) + bnd_box = obj.find('bndbox') + bbox = [ + int(bnd_box.find('xmin').text), + int(bnd_box.find('ymin').text), + int(bnd_box.find('xmax').text), + int(bnd_box.find('ymax').text) + ] + if difficult: + bboxes_ignore.append(bbox) + labels_ignore.append(label) + else: + bboxes.append(bbox) + labels.append(label) + if not bboxes: + bboxes = np.zeros((0, 4)) + labels = np.zeros((0, )) + else: + bboxes = np.array(bboxes, ndmin=2) - 1 + labels = np.array(labels) + if not bboxes_ignore: + bboxes_ignore = np.zeros((0, 4)) + labels_ignore = np.zeros((0, )) + else: + bboxes_ignore = np.array(bboxes_ignore, ndmin=2) - 1 + labels_ignore = np.array(labels_ignore) + annotation = { + 'filename': img_path, + 'width': w, + 'height': h, + 'ann': { + 'bboxes': bboxes.astype(np.float32), + 'labels': labels.astype(np.int64), + 'bboxes_ignore': bboxes_ignore.astype(np.float32), + 'labels_ignore': labels_ignore.astype(np.int64) + } + } + return annotation + + +def cvt_annotations(devkit_path, years, split, out_file): + if not isinstance(years, list): + years = [years] + annotations = [] + for year in years: + filelist = osp.join(devkit_path, + f'VOC{year}/ImageSets/Main/{split}.txt') + if not osp.isfile(filelist): + print(f'filelist does not exist: {filelist}, ' + f'skip voc{year} {split}') + return + img_names = mmcv.list_from_file(filelist) + xml_paths = [ + osp.join(devkit_path, f'VOC{year}/Annotations/{img_name}.xml') + for img_name in img_names + ] + img_paths = [ + f'VOC{year}/JPEGImages/{img_name}.jpg' for img_name in img_names + ] + part_annotations = mmcv.track_progress(parse_xml, + list(zip(xml_paths, img_paths))) + annotations.extend(part_annotations) + if out_file.endswith('json'): + annotations = cvt_to_coco_json(annotations) + mmcv.dump(annotations, out_file) + return annotations + + +def cvt_to_coco_json(annotations): + image_id = 0 + annotation_id = 0 + coco = dict() + coco['images'] = [] + coco['type'] = 'instance' + coco['categories'] = [] + coco['annotations'] = [] + image_set = set() + + def addAnnItem(annotation_id, image_id, category_id, bbox, difficult_flag): + annotation_item = dict() + annotation_item['segmentation'] = [] + + seg = [] + # bbox[] is x1,y1,x2,y2 + # left_top + seg.append(int(bbox[0])) + seg.append(int(bbox[1])) + # left_bottom + seg.append(int(bbox[0])) + seg.append(int(bbox[3])) + # right_bottom + seg.append(int(bbox[2])) + seg.append(int(bbox[3])) + # right_top + seg.append(int(bbox[2])) + seg.append(int(bbox[1])) + + annotation_item['segmentation'].append(seg) + + xywh = np.array( + [bbox[0], bbox[1], bbox[2] - bbox[0], bbox[3] - bbox[1]]) + annotation_item['area'] = int(xywh[2] * xywh[3]) + if difficult_flag == 1: + annotation_item['ignore'] = 0 + annotation_item['iscrowd'] = 1 + else: + annotation_item['ignore'] = 0 + annotation_item['iscrowd'] = 0 + annotation_item['image_id'] = int(image_id) + annotation_item['bbox'] = xywh.astype(int).tolist() + annotation_item['category_id'] = int(category_id) + annotation_item['id'] = int(annotation_id) + coco['annotations'].append(annotation_item) + return annotation_id + 1 + + for category_id, name in enumerate(voc_classes()): + category_item = dict() + category_item['supercategory'] = str('none') + category_item['id'] = int(category_id) + category_item['name'] = str(name) + coco['categories'].append(category_item) + + for ann_dict in annotations: + file_name = ann_dict['filename'] + ann = ann_dict['ann'] + assert file_name not in image_set + image_item = dict() + image_item['id'] = int(image_id) + image_item['file_name'] = str(file_name) + image_item['height'] = int(ann_dict['height']) + image_item['width'] = int(ann_dict['width']) + coco['images'].append(image_item) + image_set.add(file_name) + + bboxes = ann['bboxes'][:, :4] + labels = ann['labels'] + for bbox_id in range(len(bboxes)): + bbox = bboxes[bbox_id] + label = labels[bbox_id] + annotation_id = addAnnItem( + annotation_id, image_id, label, bbox, difficult_flag=0) + + bboxes_ignore = ann['bboxes_ignore'][:, :4] + labels_ignore = ann['labels_ignore'] + for bbox_id in range(len(bboxes_ignore)): + bbox = bboxes_ignore[bbox_id] + label = labels_ignore[bbox_id] + annotation_id = addAnnItem( + annotation_id, image_id, label, bbox, difficult_flag=1) + + image_id += 1 + + return coco + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Convert PASCAL VOC annotations to mmdetection format') + parser.add_argument('devkit_path', help='pascal voc devkit path') + parser.add_argument('-o', '--out-dir', help='output path') + parser.add_argument( + '--out-format', + default='pkl', + choices=('pkl', 'coco'), + help='output format, "coco" indicates coco annotation format') + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + devkit_path = args.devkit_path + out_dir = args.out_dir if args.out_dir else devkit_path + mmcv.mkdir_or_exist(out_dir) + + years = [] + if osp.isdir(osp.join(devkit_path, 'VOC2007')): + years.append('2007') + if osp.isdir(osp.join(devkit_path, 'VOC2012')): + years.append('2012') + if '2007' in years and '2012' in years: + years.append(['2007', '2012']) + if not years: + raise IOError(f'The devkit path {devkit_path} contains neither ' + '"VOC2007" nor "VOC2012" subfolder') + out_fmt = f'.{args.out_format}' + if args.out_format == 'coco': + out_fmt = '.json' + for year in years: + if year == '2007': + prefix = 'voc07' + elif year == '2012': + prefix = 'voc12' + elif year == ['2007', '2012']: + prefix = 'voc0712' + for split in ['train', 'val', 'trainval']: + dataset_name = prefix + '_' + split + print(f'processing {dataset_name} ...') + cvt_annotations(devkit_path, year, split, + osp.join(out_dir, dataset_name + out_fmt)) + if not isinstance(year, list): + dataset_name = prefix + '_test' + print(f'processing {dataset_name} ...') + cvt_annotations(devkit_path, year, 'test', + osp.join(out_dir, dataset_name + out_fmt)) + print('Done!') + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/deployment/mmdet2torchserve.py b/detection_cbnet/docker-build-context/cbnetv2/tools/deployment/mmdet2torchserve.py new file mode 100644 index 0000000000000000000000000000000000000000..d1d8501b37cac2359b45636fbadd65e12979c824 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/deployment/mmdet2torchserve.py @@ -0,0 +1,109 @@ +from argparse import ArgumentParser, Namespace +from pathlib import Path +from tempfile import TemporaryDirectory + +import mmcv + +try: + from model_archiver.model_packaging import package_model + from model_archiver.model_packaging_utils import ModelExportUtils +except ImportError: + package_model = None + + +def mmdet2torchserve( + config_file: str, + checkpoint_file: str, + output_folder: str, + model_name: str, + model_version: str = '1.0', + force: bool = False, +): + """Converts MMDetection model (config + checkpoint) to TorchServe `.mar`. + + Args: + config_file: + In MMDetection config format. + The contents vary for each task repository. + checkpoint_file: + In MMDetection checkpoint format. + The contents vary for each task repository. + output_folder: + Folder where `{model_name}.mar` will be created. + The file created will be in TorchServe archive format. + model_name: + If not None, used for naming the `{model_name}.mar` file + that will be created under `output_folder`. + If None, `{Path(checkpoint_file).stem}` will be used. + model_version: + Model's version. + force: + If True, if there is an existing `{model_name}.mar` + file under `output_folder` it will be overwritten. + """ + mmcv.mkdir_or_exist(output_folder) + + config = mmcv.Config.fromfile(config_file) + + with TemporaryDirectory() as tmpdir: + config.dump(f'{tmpdir}/config.py') + + args = Namespace( + **{ + 'model_file': f'{tmpdir}/config.py', + 'serialized_file': checkpoint_file, + 'handler': f'{Path(__file__).parent}/mmdet_handler.py', + 'model_name': model_name or Path(checkpoint_file).stem, + 'version': model_version, + 'export_path': output_folder, + 'force': force, + 'requirements_file': None, + 'extra_files': None, + 'runtime': 'python', + 'archive_format': 'default' + }) + manifest = ModelExportUtils.generate_manifest_json(args) + package_model(args, manifest) + + +def parse_args(): + parser = ArgumentParser( + description='Convert MMDetection models to TorchServe `.mar` format.') + parser.add_argument('config', type=str, help='config file path') + parser.add_argument('checkpoint', type=str, help='checkpoint file path') + parser.add_argument( + '--output-folder', + type=str, + required=True, + help='Folder where `{model_name}.mar` will be created.') + parser.add_argument( + '--model-name', + type=str, + default=None, + help='If not None, used for naming the `{model_name}.mar`' + 'file that will be created under `output_folder`.' + 'If None, `{Path(checkpoint_file).stem}` will be used.') + parser.add_argument( + '--model-version', + type=str, + default='1.0', + help='Number used for versioning.') + parser.add_argument( + '-f', + '--force', + action='store_true', + help='overwrite the existing `{model_name}.mar`') + args = parser.parse_args() + + return args + + +if __name__ == '__main__': + args = parse_args() + + if package_model is None: + raise ImportError('`torch-model-archiver` is required.' + 'Try: pip install torch-model-archiver') + + mmdet2torchserve(args.config, args.checkpoint, args.output_folder, + args.model_name, args.model_version, args.force) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/deployment/mmdet_handler.py b/detection_cbnet/docker-build-context/cbnetv2/tools/deployment/mmdet_handler.py new file mode 100644 index 0000000000000000000000000000000000000000..568fcd2f2bfd621a48f00eb572cc027a8a26f08e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/deployment/mmdet_handler.py @@ -0,0 +1,69 @@ +import base64 +import os + +import mmcv +import torch +from ts.torch_handler.base_handler import BaseHandler + +from mmdet.apis import inference_detector, init_detector + + +class MMdetHandler(BaseHandler): + threshold = 0.5 + + def initialize(self, context): + properties = context.system_properties + self.map_location = 'cuda' if torch.cuda.is_available() else 'cpu' + self.device = torch.device(self.map_location + ':' + + str(properties.get('gpu_id')) if torch.cuda. + is_available() else self.map_location) + self.manifest = context.manifest + + model_dir = properties.get('model_dir') + serialized_file = self.manifest['model']['serializedFile'] + checkpoint = os.path.join(model_dir, serialized_file) + self.config_file = os.path.join(model_dir, 'config.py') + + self.model = init_detector(self.config_file, checkpoint, self.device) + self.initialized = True + + def preprocess(self, data): + images = [] + + for row in data: + image = row.get('data') or row.get('body') + if isinstance(image, str): + image = base64.b64decode(image) + image = mmcv.imfrombytes(image) + images.append(image) + + return images + + def inference(self, data, *args, **kwargs): + results = inference_detector(self.model, data) + return results + + def postprocess(self, data): + # Format output following the example ObjectDetectionHandler format + output = [] + for image_index, image_result in enumerate(data): + output.append([]) + if isinstance(image_result, tuple): + bbox_result, segm_result = image_result + if isinstance(segm_result, tuple): + segm_result = segm_result[0] # ms rcnn + else: + bbox_result, segm_result = image_result, None + + for class_index, class_result in enumerate(bbox_result): + class_name = self.model.CLASSES[class_index] + for bbox in class_result: + bbox_coords = bbox[:-1].tolist() + score = float(bbox[-1]) + if score >= self.threshold: + output[image_index].append({ + class_name: bbox_coords, + 'score': score + }) + + return output diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/deployment/onnx2tensorrt.py b/detection_cbnet/docker-build-context/cbnetv2/tools/deployment/onnx2tensorrt.py new file mode 100644 index 0000000000000000000000000000000000000000..05636d0c728f48cee2906201a0e539b05156d0e2 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/deployment/onnx2tensorrt.py @@ -0,0 +1,253 @@ +import argparse +import os +import os.path as osp +import warnings + +import numpy as np +import onnx +import torch +from mmcv import Config +from mmcv.tensorrt import is_tensorrt_plugin_loaded, onnx2trt, save_trt_engine + +from mmdet.core.export import preprocess_example_input +from mmdet.core.export.model_wrappers import (ONNXRuntimeDetector, + TensorRTDetector) +from mmdet.datasets import DATASETS + + +def get_GiB(x: int): + """return x GiB.""" + return x * (1 << 30) + + +def onnx2tensorrt(onnx_file, + trt_file, + input_config, + verify=False, + show=False, + workspace_size=1, + verbose=False): + import tensorrt as trt + onnx_model = onnx.load(onnx_file) + max_shape = input_config['max_shape'] + min_shape = input_config['min_shape'] + opt_shape = input_config['opt_shape'] + fp16_mode = False + # create trt engine and wraper + opt_shape_dict = {'input': [min_shape, opt_shape, max_shape]} + max_workspace_size = get_GiB(workspace_size) + trt_engine = onnx2trt( + onnx_model, + opt_shape_dict, + log_level=trt.Logger.VERBOSE if verbose else trt.Logger.ERROR, + fp16_mode=fp16_mode, + max_workspace_size=max_workspace_size) + save_dir, _ = osp.split(trt_file) + if save_dir: + os.makedirs(save_dir, exist_ok=True) + save_trt_engine(trt_engine, trt_file) + print(f'Successfully created TensorRT engine: {trt_file}') + + if verify: + # prepare input + one_img, one_meta = preprocess_example_input(input_config) + img_list, img_meta_list = [one_img], [[one_meta]] + img_list = [_.cuda().contiguous() for _ in img_list] + + # wrap ONNX and TensorRT model + onnx_model = ONNXRuntimeDetector(onnx_file, CLASSES, device_id=0) + trt_model = TensorRTDetector(trt_file, CLASSES, device_id=0) + + # inference with wrapped model + with torch.no_grad(): + onnx_results = onnx_model( + img_list, img_metas=img_meta_list, return_loss=False)[0] + trt_results = trt_model( + img_list, img_metas=img_meta_list, return_loss=False)[0] + + if show: + out_file_ort, out_file_trt = None, None + else: + out_file_ort, out_file_trt = 'show-ort.png', 'show-trt.png' + show_img = one_meta['show_img'] + score_thr = 0.3 + onnx_model.show_result( + show_img, + onnx_results, + score_thr=score_thr, + show=True, + win_name='ONNXRuntime', + out_file=out_file_ort) + trt_model.show_result( + show_img, + trt_results, + score_thr=score_thr, + show=True, + win_name='TensorRT', + out_file=out_file_trt) + with_mask = trt_model.with_masks + # compare a part of result + if with_mask: + compare_pairs = list(zip(onnx_results, trt_results)) + else: + compare_pairs = [(onnx_results, trt_results)] + err_msg = 'The numerical values are different between Pytorch' + \ + ' and ONNX, but it does not necessarily mean the' + \ + ' exported ONNX model is problematic.' + # check the numerical value + for onnx_res, pytorch_res in compare_pairs: + for o_res, p_res in zip(onnx_res, pytorch_res): + np.testing.assert_allclose( + o_res, p_res, rtol=1e-03, atol=1e-05, err_msg=err_msg) + print('The numerical values are the same between Pytorch and ONNX') + + +def parse_normalize_cfg(test_pipeline): + transforms = None + for pipeline in test_pipeline: + if 'transforms' in pipeline: + transforms = pipeline['transforms'] + break + assert transforms is not None, 'Failed to find `transforms`' + norm_config_li = [_ for _ in transforms if _['type'] == 'Normalize'] + assert len(norm_config_li) == 1, '`norm_config` should only have one' + norm_config = norm_config_li[0] + return norm_config + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Convert MMDetection models from ONNX to TensorRT') + parser.add_argument('config', help='test config file path') + parser.add_argument('model', help='Filename of input ONNX model') + parser.add_argument( + '--trt-file', + type=str, + default='tmp.trt', + help='Filename of output TensorRT engine') + parser.add_argument( + '--input-img', type=str, default='', help='Image for test') + parser.add_argument( + '--show', action='store_true', help='Whether to show output results') + parser.add_argument( + '--dataset', + type=str, + default='coco', + help='Dataset name. This argument is deprecated and will be \ + removed in future releases.') + parser.add_argument( + '--verify', + action='store_true', + help='Verify the outputs of ONNXRuntime and TensorRT') + parser.add_argument( + '--verbose', + action='store_true', + help='Whether to verbose logging messages while creating \ + TensorRT engine. Defaults to False.') + parser.add_argument( + '--to-rgb', + action='store_false', + help='Feed model with RGB or BGR image. Default is RGB. This \ + argument is deprecated and will be removed in future releases.') + parser.add_argument( + '--shape', + type=int, + nargs='+', + default=[400, 600], + help='Input size of the model') + parser.add_argument( + '--mean', + type=float, + nargs='+', + default=[123.675, 116.28, 103.53], + help='Mean value used for preprocess input data. This argument \ + is deprecated and will be removed in future releases.') + parser.add_argument( + '--std', + type=float, + nargs='+', + default=[58.395, 57.12, 57.375], + help='Variance value used for preprocess input data. \ + This argument is deprecated and will be removed in future releases.') + parser.add_argument( + '--min-shape', + type=int, + nargs='+', + default=None, + help='Minimum input size of the model in TensorRT') + parser.add_argument( + '--max-shape', + type=int, + nargs='+', + default=None, + help='Maximum input size of the model in TensorRT') + parser.add_argument( + '--workspace-size', + type=int, + default=1, + help='Max workspace size in GiB') + + args = parser.parse_args() + return args + + +if __name__ == '__main__': + + assert is_tensorrt_plugin_loaded(), 'TensorRT plugin should be compiled.' + args = parse_args() + warnings.warn( + 'Arguments like `--to-rgb`, `--mean`, `--std`, `--dataset` would be \ + parsed directly from config file and are deprecated and will be \ + removed in future releases.') + if not args.input_img: + args.input_img = osp.join(osp.dirname(__file__), '../demo/demo.jpg') + + cfg = Config.fromfile(args.config) + + def parse_shape(shape): + if len(shape) == 1: + shape = (1, 3, shape[0], shape[0]) + elif len(args.shape) == 2: + shape = (1, 3) + tuple(shape) + else: + raise ValueError('invalid input shape') + return shape + + if args.shape: + input_shape = parse_shape(args.shape) + else: + img_scale = cfg.test_pipeline[1]['img_scale'] + input_shape = (1, 3, img_scale[1], img_scale[0]) + + if not args.max_shape: + max_shape = input_shape + else: + max_shape = parse_shape(args.max_shape) + + if not args.min_shape: + min_shape = input_shape + else: + min_shape = parse_shape(args.min_shape) + + dataset = DATASETS.get(cfg.data.test['type']) + assert (dataset is not None) + CLASSES = dataset.CLASSES + normalize_cfg = parse_normalize_cfg(cfg.test_pipeline) + + input_config = { + 'min_shape': min_shape, + 'opt_shape': input_shape, + 'max_shape': max_shape, + 'input_shape': input_shape, + 'input_path': args.input_img, + 'normalize_cfg': normalize_cfg + } + # Create TensorRT engine + onnx2tensorrt( + args.model, + args.trt_file, + input_config, + verify=args.verify, + show=args.show, + workspace_size=args.workspace_size, + verbose=args.verbose) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/deployment/pytorch2onnx.py b/detection_cbnet/docker-build-context/cbnetv2/tools/deployment/pytorch2onnx.py new file mode 100644 index 0000000000000000000000000000000000000000..d9c02195440540ef1992dd8335542b545b4c0874 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/deployment/pytorch2onnx.py @@ -0,0 +1,305 @@ +import argparse +import os.path as osp +import warnings +from functools import partial + +import numpy as np +import onnx +import torch +from mmcv import Config, DictAction + +from mmdet.core.export import build_model_from_cfg, preprocess_example_input +from mmdet.core.export.model_wrappers import ONNXRuntimeDetector + + +def pytorch2onnx(model, + input_img, + input_shape, + normalize_cfg, + opset_version=11, + show=False, + output_file='tmp.onnx', + verify=False, + test_img=None, + do_simplify=False, + dynamic_export=None): + + input_config = { + 'input_shape': input_shape, + 'input_path': input_img, + 'normalize_cfg': normalize_cfg + } + # prepare input + one_img, one_meta = preprocess_example_input(input_config) + img_list, img_meta_list = [one_img], [[one_meta]] + # replace original forward function + origin_forward = model.forward + model.forward = partial( + model.forward, + img_metas=img_meta_list, + return_loss=False, + rescale=False) + + output_names = ['dets', 'labels'] + if model.with_mask: + output_names.append('masks') + input_name = 'input' + dynamic_axes = None + if dynamic_export: + dynamic_axes = { + input_name: { + 0: 'batch', + 2: 'width', + 3: 'height' + }, + 'dets': { + 0: 'batch', + 1: 'num_dets', + }, + 'labels': { + 0: 'batch', + 1: 'num_dets', + }, + } + if model.with_mask: + dynamic_axes['masks'] = {0: 'batch', 1: 'num_dets'} + + torch.onnx.export( + model, + img_list, + output_file, + input_names=[input_name], + output_names=output_names, + export_params=True, + keep_initializers_as_inputs=True, + do_constant_folding=True, + verbose=show, + opset_version=opset_version, + dynamic_axes=dynamic_axes) + + model.forward = origin_forward + + # get the custom op path + ort_custom_op_path = '' + try: + from mmcv.ops import get_onnxruntime_op_path + ort_custom_op_path = get_onnxruntime_op_path() + except (ImportError, ModuleNotFoundError): + warnings.warn('If input model has custom op from mmcv, \ + you may have to build mmcv with ONNXRuntime from source.') + + if do_simplify: + import onnxsim + + from mmdet import digit_version + + min_required_version = '0.3.0' + assert digit_version(onnxsim.__version__) >= digit_version( + min_required_version + ), f'Requires to install onnx-simplify>={min_required_version}' + + input_dic = {'input': img_list[0].detach().cpu().numpy()} + onnxsim.simplify( + output_file, input_data=input_dic, custom_lib=ort_custom_op_path) + print(f'Successfully exported ONNX model: {output_file}') + + if verify: + # check by onnx + onnx_model = onnx.load(output_file) + onnx.checker.check_model(onnx_model) + + # wrap onnx model + onnx_model = ONNXRuntimeDetector(output_file, model.CLASSES, 0) + if dynamic_export: + # scale up to test dynamic shape + h, w = [int((_ * 1.5) // 32 * 32) for _ in input_shape[2:]] + h, w = min(1344, h), min(1344, w) + input_config['input_shape'] = (1, 3, h, w) + + if test_img is None: + input_config['input_path'] = input_img + + # prepare input once again + one_img, one_meta = preprocess_example_input(input_config) + img_list, img_meta_list = [one_img], [[one_meta]] + + # get pytorch output + pytorch_results = model( + img_list, img_metas=img_meta_list, return_loss=False, + rescale=True)[0] + + img_list = [_.cuda().contiguous() for _ in img_list] + if dynamic_export: + img_list = img_list + [_.flip(-1).contiguous() for _ in img_list] + img_meta_list = img_meta_list * 2 + # get onnx output + onnx_results = onnx_model( + img_list, img_metas=img_meta_list, return_loss=False)[0] + # visualize predictions + score_thr = 0.3 + if show: + out_file_ort, out_file_pt = None, None + else: + out_file_ort, out_file_pt = 'show-ort.png', 'show-pt.png' + + show_img = one_meta['show_img'] + model.show_result( + show_img, + pytorch_results, + score_thr=score_thr, + show=True, + win_name='PyTorch', + out_file=out_file_pt) + onnx_model.show_result( + show_img, + onnx_results, + score_thr=score_thr, + show=True, + win_name='ONNXRuntime', + out_file=out_file_ort) + + # compare a part of result + if model.with_mask: + compare_pairs = list(zip(onnx_results, pytorch_results)) + else: + compare_pairs = [(onnx_results, pytorch_results)] + err_msg = 'The numerical values are different between Pytorch' + \ + ' and ONNX, but it does not necessarily mean the' + \ + ' exported ONNX model is problematic.' + # check the numerical value + for onnx_res, pytorch_res in compare_pairs: + for o_res, p_res in zip(onnx_res, pytorch_res): + np.testing.assert_allclose( + o_res, p_res, rtol=1e-03, atol=1e-05, err_msg=err_msg) + print('The numerical values are the same between Pytorch and ONNX') + + +def parse_normalize_cfg(test_pipeline): + transforms = None + for pipeline in test_pipeline: + if 'transforms' in pipeline: + transforms = pipeline['transforms'] + break + assert transforms is not None, 'Failed to find `transforms`' + norm_config_li = [_ for _ in transforms if _['type'] == 'Normalize'] + assert len(norm_config_li) == 1, '`norm_config` should only have one' + norm_config = norm_config_li[0] + return norm_config + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Convert MMDetection models to ONNX') + parser.add_argument('config', help='test config file path') + parser.add_argument('checkpoint', help='checkpoint file') + parser.add_argument('--input-img', type=str, help='Images for input') + parser.add_argument( + '--show', + action='store_true', + help='Show onnx graph and detection outputs') + parser.add_argument('--output-file', type=str, default='tmp.onnx') + parser.add_argument('--opset-version', type=int, default=11) + parser.add_argument( + '--test-img', type=str, default=None, help='Images for test') + parser.add_argument( + '--dataset', + type=str, + default='coco', + help='Dataset name. This argument is deprecated and will be removed \ + in future releases.') + parser.add_argument( + '--verify', + action='store_true', + help='verify the onnx model output against pytorch output') + parser.add_argument( + '--simplify', + action='store_true', + help='Whether to simplify onnx model.') + parser.add_argument( + '--shape', + type=int, + nargs='+', + default=[800, 1216], + help='input image size') + parser.add_argument( + '--mean', + type=float, + nargs='+', + default=[123.675, 116.28, 103.53], + help='mean value used for preprocess input data.This argument \ + is deprecated and will be removed in future releases.') + parser.add_argument( + '--std', + type=float, + nargs='+', + default=[58.395, 57.12, 57.375], + help='variance value used for preprocess input data. ' + 'This argument is deprecated and will be removed in future releases.') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='Override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + parser.add_argument( + '--dynamic-export', + action='store_true', + help='Whether to export onnx with dynamic axis.') + args = parser.parse_args() + return args + + +if __name__ == '__main__': + args = parse_args() + warnings.warn('Arguments like `--mean`, `--std`, `--dataset` would be \ + parsed directly from config file and are deprecated and \ + will be removed in future releases.') + + assert args.opset_version == 11, 'MMDet only support opset 11 now' + + try: + from mmcv.onnx.symbolic import register_extra_symbolics + except ModuleNotFoundError: + raise NotImplementedError('please update mmcv to version>=v1.0.4') + register_extra_symbolics(args.opset_version) + + cfg = Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + + if args.shape is None: + img_scale = cfg.test_pipeline[1]['img_scale'] + input_shape = (1, 3, img_scale[1], img_scale[0]) + elif len(args.shape) == 1: + input_shape = (1, 3, args.shape[0], args.shape[0]) + elif len(args.shape) == 2: + input_shape = (1, 3) + tuple(args.shape) + else: + raise ValueError('invalid input shape') + + # build the model and load checkpoint + model = build_model_from_cfg(args.config, args.checkpoint, + args.cfg_options) + + if not args.input_img: + args.input_img = osp.join(osp.dirname(__file__), '../../demo/demo.jpg') + + normalize_cfg = parse_normalize_cfg(cfg.test_pipeline) + + # convert model to onnx file + pytorch2onnx( + model, + args.input_img, + input_shape, + normalize_cfg, + opset_version=args.opset_version, + show=args.show, + output_file=args.output_file, + verify=args.verify, + test_img=args.test_img, + do_simplify=args.simplify, + dynamic_export=args.dynamic_export) diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/deployment/test.py b/detection_cbnet/docker-build-context/cbnetv2/tools/deployment/test.py new file mode 100644 index 0000000000000000000000000000000000000000..a8341aaf7be0270ee81e1194fd9d83b31a02d83c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/deployment/test.py @@ -0,0 +1,142 @@ +import argparse + +import mmcv +from mmcv import Config, DictAction +from mmcv.parallel import MMDataParallel + +from mmdet.apis import single_gpu_test +from mmdet.datasets import (build_dataloader, build_dataset, + replace_ImageToTensor) + + +def parse_args(): + parser = argparse.ArgumentParser( + description='MMDet test (and eval) an ONNX model using ONNXRuntime') + parser.add_argument('config', help='test config file path') + parser.add_argument('model', help='Input model file') + parser.add_argument('--out', help='output result file in pickle format') + parser.add_argument( + '--format-only', + action='store_true', + help='Format the output results without perform evaluation. It is' + 'useful when you want to format the result to a specific format and ' + 'submit it to the test server') + parser.add_argument( + '--backend', + required=True, + choices=['onnxruntime', 'tensorrt'], + help='Backend for input model to run. ') + parser.add_argument( + '--eval', + type=str, + nargs='+', + help='evaluation metrics, which depends on the dataset, e.g., "bbox",' + ' "segm", "proposal" for COCO, and "mAP", "recall" for PASCAL VOC') + parser.add_argument('--show', action='store_true', help='show results') + parser.add_argument( + '--show-dir', help='directory where painted images will be saved') + parser.add_argument( + '--show-score-thr', + type=float, + default=0.3, + help='score threshold (default: 0.3)') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + parser.add_argument( + '--eval-options', + nargs='+', + action=DictAction, + help='custom options for evaluation, the key-value pair in xxx=yyy ' + 'format will be kwargs for dataset.evaluate() function') + + args = parser.parse_args() + return args + + +def main(): + args = parse_args() + + assert args.out or args.eval or args.format_only or args.show \ + or args.show_dir, \ + ('Please specify at least one operation (save/eval/format/show the ' + 'results / save the results) with the argument "--out", "--eval"' + ', "--format-only", "--show" or "--show-dir"') + + if args.eval and args.format_only: + raise ValueError('--eval and --format_only cannot be both specified') + + if args.out is not None and not args.out.endswith(('.pkl', '.pickle')): + raise ValueError('The output file must be a pkl file.') + + cfg = Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + + # in case the test dataset is concatenated + samples_per_gpu = 1 + if isinstance(cfg.data.test, dict): + cfg.data.test.test_mode = True + samples_per_gpu = cfg.data.test.pop('samples_per_gpu', 1) + if samples_per_gpu > 1: + # Replace 'ImageToTensor' to 'DefaultFormatBundle' + cfg.data.test.pipeline = replace_ImageToTensor( + cfg.data.test.pipeline) + elif isinstance(cfg.data.test, list): + for ds_cfg in cfg.data.test: + ds_cfg.test_mode = True + samples_per_gpu = max( + [ds_cfg.pop('samples_per_gpu', 1) for ds_cfg in cfg.data.test]) + if samples_per_gpu > 1: + for ds_cfg in cfg.data.test: + ds_cfg.pipeline = replace_ImageToTensor(ds_cfg.pipeline) + + # build the dataloader + dataset = build_dataset(cfg.data.test) + data_loader = build_dataloader( + dataset, + samples_per_gpu=samples_per_gpu, + workers_per_gpu=cfg.data.workers_per_gpu, + dist=False, + shuffle=False) + + if args.backend == 'onnxruntime': + from mmdet.core.export.model_wrappers import ONNXRuntimeDetector + model = ONNXRuntimeDetector( + args.model, class_names=dataset.CLASSES, device_id=0) + elif args.backend == 'tensorrt': + from mmdet.core.export.model_wrappers import TensorRTDetector + model = TensorRTDetector( + args.model, class_names=dataset.CLASSES, device_id=0) + + model = MMDataParallel(model, device_ids=[0]) + outputs = single_gpu_test(model, data_loader, args.show, args.show_dir, + args.show_score_thr) + + if args.out: + print(f'\nwriting results to {args.out}') + mmcv.dump(outputs, args.out) + kwargs = {} if args.eval_options is None else args.eval_options + if args.format_only: + dataset.format_results(outputs, **kwargs) + if args.eval: + eval_kwargs = cfg.get('evaluation', {}).copy() + # hard-code way to remove EvalHook args + for key in [ + 'interval', 'tmpdir', 'start', 'gpu_collect', 'save_best', + 'rule' + ]: + eval_kwargs.pop(key, None) + eval_kwargs.update(dict(metric=args.eval, **kwargs)) + print(dataset.evaluate(outputs, **eval_kwargs)) + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/dist_test.sh b/detection_cbnet/docker-build-context/cbnetv2/tools/dist_test.sh new file mode 100755 index 0000000000000000000000000000000000000000..3c74ec6ecd1f08049a3234f2562f8be7107ed6ec --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/dist_test.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +CONFIG=$1 +CHECKPOINT=$2 +GPUS=$3 +PORT=${PORT:-29500} + +PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \ +python -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \ + $(dirname "$0")/test.py $CONFIG $CHECKPOINT --launcher pytorch ${@:4} diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/dist_train.sh b/detection_cbnet/docker-build-context/cbnetv2/tools/dist_train.sh new file mode 100755 index 0000000000000000000000000000000000000000..5b43fffbf28fc9b8ba7c14efcd5e4f8b19279470 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/dist_train.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +CONFIG=$1 +GPUS=$2 +PORT=${PORT:-29500} + +PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \ +python -m torch.distributed.launch --nproc_per_node=$GPUS --master_port=$PORT \ + $(dirname "$0")/train.py $CONFIG --launcher pytorch ${@:3} diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/misc/browse_dataset.py b/detection_cbnet/docker-build-context/cbnetv2/tools/misc/browse_dataset.py new file mode 100644 index 0000000000000000000000000000000000000000..0c9385fa70e12a912d8963212cc62bf94f83fa7c --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/misc/browse_dataset.py @@ -0,0 +1,96 @@ +import argparse +import os +from pathlib import Path + +import mmcv +from mmcv import Config, DictAction + +from mmdet.core.utils import mask2ndarray +from mmdet.core.visualization import imshow_det_bboxes +from mmdet.datasets.builder import build_dataset + + +def parse_args(): + parser = argparse.ArgumentParser(description='Browse a dataset') + parser.add_argument('config', help='train config file path') + parser.add_argument( + '--skip-type', + type=str, + nargs='+', + default=['DefaultFormatBundle', 'Normalize', 'Collect'], + help='skip some useless pipeline') + parser.add_argument( + '--output-dir', + default=None, + type=str, + help='If there is no display interface, you can save it') + parser.add_argument('--not-show', default=False, action='store_true') + parser.add_argument( + '--show-interval', + type=float, + default=2, + help='the interval of show (s)') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + args = parser.parse_args() + return args + + +def retrieve_data_cfg(config_path, skip_type, cfg_options): + cfg = Config.fromfile(config_path) + if cfg_options is not None: + cfg.merge_from_dict(cfg_options) + # import modules from string list. + if cfg.get('custom_imports', None): + from mmcv.utils import import_modules_from_strings + import_modules_from_strings(**cfg['custom_imports']) + train_data_cfg = cfg.data.train + train_data_cfg['pipeline'] = [ + x for x in train_data_cfg.pipeline if x['type'] not in skip_type + ] + + return cfg + + +def main(): + args = parse_args() + cfg = retrieve_data_cfg(args.config, args.skip_type, args.cfg_options) + + dataset = build_dataset(cfg.data.train) + + progress_bar = mmcv.ProgressBar(len(dataset)) + + for item in dataset: + filename = os.path.join(args.output_dir, + Path(item['filename']).name + ) if args.output_dir is not None else None + + gt_masks = item.get('gt_masks', None) + if gt_masks is not None: + gt_masks = mask2ndarray(gt_masks) + + imshow_det_bboxes( + item['img'], + item['gt_bboxes'], + item['gt_labels'], + gt_masks, + class_names=dataset.CLASSES, + show=not args.not_show, + wait_time=args.show_interval, + out_file=filename, + bbox_color=(255, 102, 61), + text_color=(255, 102, 61)) + + progress_bar.update() + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/misc/print_config.py b/detection_cbnet/docker-build-context/cbnetv2/tools/misc/print_config.py new file mode 100644 index 0000000000000000000000000000000000000000..3627f81fed059f2e819dc6544fac103e1a1e6c17 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/misc/print_config.py @@ -0,0 +1,54 @@ +import argparse +import warnings + +from mmcv import Config, DictAction + + +def parse_args(): + parser = argparse.ArgumentParser(description='Print the whole config') + parser.add_argument('config', help='config file path') + parser.add_argument( + '--options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file (deprecate), ' + 'change to --cfg-options instead.') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + args = parser.parse_args() + + if args.options and args.cfg_options: + raise ValueError( + '--options and --cfg-options cannot be both ' + 'specified, --options is deprecated in favor of --cfg-options') + if args.options: + warnings.warn('--options is deprecated in favor of --cfg-options') + args.cfg_options = args.options + + return args + + +def main(): + args = parse_args() + + cfg = Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + # import modules from string list. + if cfg.get('custom_imports', None): + from mmcv.utils import import_modules_from_strings + import_modules_from_strings(**cfg['custom_imports']) + print(f'Config:\n{cfg.pretty_text}') + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/model_converters/detectron2pytorch.py b/detection_cbnet/docker-build-context/cbnetv2/tools/model_converters/detectron2pytorch.py new file mode 100644 index 0000000000000000000000000000000000000000..961e6f571b785f01236a660651323cc6372e8189 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/model_converters/detectron2pytorch.py @@ -0,0 +1,82 @@ +import argparse +from collections import OrderedDict + +import mmcv +import torch + +arch_settings = {50: (3, 4, 6, 3), 101: (3, 4, 23, 3)} + + +def convert_bn(blobs, state_dict, caffe_name, torch_name, converted_names): + # detectron replace bn with affine channel layer + state_dict[torch_name + '.bias'] = torch.from_numpy(blobs[caffe_name + + '_b']) + state_dict[torch_name + '.weight'] = torch.from_numpy(blobs[caffe_name + + '_s']) + bn_size = state_dict[torch_name + '.weight'].size() + state_dict[torch_name + '.running_mean'] = torch.zeros(bn_size) + state_dict[torch_name + '.running_var'] = torch.ones(bn_size) + converted_names.add(caffe_name + '_b') + converted_names.add(caffe_name + '_s') + + +def convert_conv_fc(blobs, state_dict, caffe_name, torch_name, + converted_names): + state_dict[torch_name + '.weight'] = torch.from_numpy(blobs[caffe_name + + '_w']) + converted_names.add(caffe_name + '_w') + if caffe_name + '_b' in blobs: + state_dict[torch_name + '.bias'] = torch.from_numpy(blobs[caffe_name + + '_b']) + converted_names.add(caffe_name + '_b') + + +def convert(src, dst, depth): + """Convert keys in detectron pretrained ResNet models to pytorch style.""" + # load arch_settings + if depth not in arch_settings: + raise ValueError('Only support ResNet-50 and ResNet-101 currently') + block_nums = arch_settings[depth] + # load caffe model + caffe_model = mmcv.load(src, encoding='latin1') + blobs = caffe_model['blobs'] if 'blobs' in caffe_model else caffe_model + # convert to pytorch style + state_dict = OrderedDict() + converted_names = set() + convert_conv_fc(blobs, state_dict, 'conv1', 'conv1', converted_names) + convert_bn(blobs, state_dict, 'res_conv1_bn', 'bn1', converted_names) + for i in range(1, len(block_nums) + 1): + for j in range(block_nums[i - 1]): + if j == 0: + convert_conv_fc(blobs, state_dict, f'res{i + 1}_{j}_branch1', + f'layer{i}.{j}.downsample.0', converted_names) + convert_bn(blobs, state_dict, f'res{i + 1}_{j}_branch1_bn', + f'layer{i}.{j}.downsample.1', converted_names) + for k, letter in enumerate(['a', 'b', 'c']): + convert_conv_fc(blobs, state_dict, + f'res{i + 1}_{j}_branch2{letter}', + f'layer{i}.{j}.conv{k+1}', converted_names) + convert_bn(blobs, state_dict, + f'res{i + 1}_{j}_branch2{letter}_bn', + f'layer{i}.{j}.bn{k + 1}', converted_names) + # check if all layers are converted + for key in blobs: + if key not in converted_names: + print(f'Not Convert: {key}') + # save checkpoint + checkpoint = dict() + checkpoint['state_dict'] = state_dict + torch.save(checkpoint, dst) + + +def main(): + parser = argparse.ArgumentParser(description='Convert model keys') + parser.add_argument('src', help='src detectron model path') + parser.add_argument('dst', help='save path') + parser.add_argument('depth', type=int, help='ResNet model depth') + args = parser.parse_args() + convert(args.src, args.dst, args.depth) + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/model_converters/publish_model.py b/detection_cbnet/docker-build-context/cbnetv2/tools/model_converters/publish_model.py new file mode 100644 index 0000000000000000000000000000000000000000..e8926246ec701a91d8535cf574ca73e86fc0045e --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/model_converters/publish_model.py @@ -0,0 +1,42 @@ +import argparse +import subprocess + +import torch + + +def parse_args(): + parser = argparse.ArgumentParser( + description='Process a checkpoint to be published') + parser.add_argument('in_file', help='input checkpoint filename') + parser.add_argument('out_file', help='output checkpoint filename') + args = parser.parse_args() + return args + + +def process_checkpoint(in_file, out_file): + checkpoint = torch.load(in_file, map_location='cpu') + # remove optimizer for smaller file size + if 'optimizer' in checkpoint: + del checkpoint['optimizer'] + # if it is necessary to remove some sensitive data in checkpoint['meta'], + # add the code here. + if torch.__version__ >= '1.6': + torch.save(checkpoint, out_file, _use_new_zipfile_serialization=False) + else: + torch.save(checkpoint, out_file) + sha = subprocess.check_output(['sha256sum', out_file]).decode() + if out_file.endswith('.pth'): + out_file_name = out_file[:-4] + else: + out_file_name = out_file + final_file = out_file_name + f'-{sha[:8]}.pth' + subprocess.Popen(['mv', out_file, final_file]) + + +def main(): + args = parse_args() + process_checkpoint(args.in_file, args.out_file) + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/model_converters/regnet2mmdet.py b/detection_cbnet/docker-build-context/cbnetv2/tools/model_converters/regnet2mmdet.py new file mode 100644 index 0000000000000000000000000000000000000000..9f4e316d37569a6fbeb6329bd36abaa822b20ccf --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/model_converters/regnet2mmdet.py @@ -0,0 +1,89 @@ +import argparse +from collections import OrderedDict + +import torch + + +def convert_stem(model_key, model_weight, state_dict, converted_names): + new_key = model_key.replace('stem.conv', 'conv1') + new_key = new_key.replace('stem.bn', 'bn1') + state_dict[new_key] = model_weight + converted_names.add(model_key) + print(f'Convert {model_key} to {new_key}') + + +def convert_head(model_key, model_weight, state_dict, converted_names): + new_key = model_key.replace('head.fc', 'fc') + state_dict[new_key] = model_weight + converted_names.add(model_key) + print(f'Convert {model_key} to {new_key}') + + +def convert_reslayer(model_key, model_weight, state_dict, converted_names): + split_keys = model_key.split('.') + layer, block, module = split_keys[:3] + block_id = int(block[1:]) + layer_name = f'layer{int(layer[1:])}' + block_name = f'{block_id - 1}' + + if block_id == 1 and module == 'bn': + new_key = f'{layer_name}.{block_name}.downsample.1.{split_keys[-1]}' + elif block_id == 1 and module == 'proj': + new_key = f'{layer_name}.{block_name}.downsample.0.{split_keys[-1]}' + elif module == 'f': + if split_keys[3] == 'a_bn': + module_name = 'bn1' + elif split_keys[3] == 'b_bn': + module_name = 'bn2' + elif split_keys[3] == 'c_bn': + module_name = 'bn3' + elif split_keys[3] == 'a': + module_name = 'conv1' + elif split_keys[3] == 'b': + module_name = 'conv2' + elif split_keys[3] == 'c': + module_name = 'conv3' + new_key = f'{layer_name}.{block_name}.{module_name}.{split_keys[-1]}' + else: + raise ValueError(f'Unsupported conversion of key {model_key}') + print(f'Convert {model_key} to {new_key}') + state_dict[new_key] = model_weight + converted_names.add(model_key) + + +def convert(src, dst): + """Convert keys in pycls pretrained RegNet models to mmdet style.""" + # load caffe model + regnet_model = torch.load(src) + blobs = regnet_model['model_state'] + # convert to pytorch style + state_dict = OrderedDict() + converted_names = set() + for key, weight in blobs.items(): + if 'stem' in key: + convert_stem(key, weight, state_dict, converted_names) + elif 'head' in key: + convert_head(key, weight, state_dict, converted_names) + elif key.startswith('s'): + convert_reslayer(key, weight, state_dict, converted_names) + + # check if all layers are converted + for key in blobs: + if key not in converted_names: + print(f'not converted: {key}') + # save checkpoint + checkpoint = dict() + checkpoint['state_dict'] = state_dict + torch.save(checkpoint, dst) + + +def main(): + parser = argparse.ArgumentParser(description='Convert model keys') + parser.add_argument('src', help='src detectron model path') + parser.add_argument('dst', help='save path') + args = parser.parse_args() + convert(args.src, args.dst) + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/model_converters/selfsup2mmdet.py b/detection_cbnet/docker-build-context/cbnetv2/tools/model_converters/selfsup2mmdet.py new file mode 100644 index 0000000000000000000000000000000000000000..86daef65ac57ddd273f090f546f9f2894a5600fa --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/model_converters/selfsup2mmdet.py @@ -0,0 +1,41 @@ +import argparse +from collections import OrderedDict + +import torch + + +def moco_convert(src, dst): + """Convert keys in pycls pretrained moco models to mmdet style.""" + # load caffe model + moco_model = torch.load(src) + blobs = moco_model['state_dict'] + # convert to pytorch style + state_dict = OrderedDict() + for k, v in blobs.items(): + if not k.startswith('module.encoder_q.'): + continue + old_k = k + k = k.replace('module.encoder_q.', '') + state_dict[k] = v + print(old_k, '->', k) + # save checkpoint + checkpoint = dict() + checkpoint['state_dict'] = state_dict + torch.save(checkpoint, dst) + + +def main(): + parser = argparse.ArgumentParser(description='Convert model keys') + parser.add_argument('src', help='src detectron model path') + parser.add_argument('dst', help='save path') + parser.add_argument( + '--selfsup', type=str, choices=['moco', 'swav'], help='save path') + args = parser.parse_args() + if args.selfsup == 'moco': + moco_convert(args.src, args.dst) + elif args.selfsup == 'swav': + print('SWAV does not need to convert the keys') + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/model_converters/upgrade_model_version.py b/detection_cbnet/docker-build-context/cbnetv2/tools/model_converters/upgrade_model_version.py new file mode 100644 index 0000000000000000000000000000000000000000..232c8bc4cf010084b817c545ab4e2ef34fdd4549 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/model_converters/upgrade_model_version.py @@ -0,0 +1,209 @@ +import argparse +import re +import tempfile +from collections import OrderedDict + +import torch +from mmcv import Config + + +def is_head(key): + valid_head_list = [ + 'bbox_head', 'mask_head', 'semantic_head', 'grid_head', 'mask_iou_head' + ] + + return any(key.startswith(h) for h in valid_head_list) + + +def parse_config(config_strings): + temp_file = tempfile.NamedTemporaryFile() + config_path = f'{temp_file.name}.py' + with open(config_path, 'w') as f: + f.write(config_strings) + + config = Config.fromfile(config_path) + is_two_stage = True + is_ssd = False + is_retina = False + reg_cls_agnostic = False + if 'rpn_head' not in config.model: + is_two_stage = False + # check whether it is SSD + if config.model.bbox_head.type == 'SSDHead': + is_ssd = True + elif config.model.bbox_head.type == 'RetinaHead': + is_retina = True + elif isinstance(config.model['bbox_head'], list): + reg_cls_agnostic = True + elif 'reg_class_agnostic' in config.model.bbox_head: + reg_cls_agnostic = config.model.bbox_head \ + .reg_class_agnostic + temp_file.close() + return is_two_stage, is_ssd, is_retina, reg_cls_agnostic + + +def reorder_cls_channel(val, num_classes=81): + # bias + if val.dim() == 1: + new_val = torch.cat((val[1:], val[:1]), dim=0) + # weight + else: + out_channels, in_channels = val.shape[:2] + # conv_cls for softmax output + if out_channels != num_classes and out_channels % num_classes == 0: + new_val = val.reshape(-1, num_classes, in_channels, *val.shape[2:]) + new_val = torch.cat((new_val[:, 1:], new_val[:, :1]), dim=1) + new_val = new_val.reshape(val.size()) + # fc_cls + elif out_channels == num_classes: + new_val = torch.cat((val[1:], val[:1]), dim=0) + # agnostic | retina_cls | rpn_cls + else: + new_val = val + + return new_val + + +def truncate_cls_channel(val, num_classes=81): + + # bias + if val.dim() == 1: + if val.size(0) % num_classes == 0: + new_val = val[:num_classes - 1] + else: + new_val = val + # weight + else: + out_channels, in_channels = val.shape[:2] + # conv_logits + if out_channels % num_classes == 0: + new_val = val.reshape(num_classes, in_channels, *val.shape[2:])[1:] + new_val = new_val.reshape(-1, *val.shape[1:]) + # agnostic + else: + new_val = val + + return new_val + + +def truncate_reg_channel(val, num_classes=81): + # bias + if val.dim() == 1: + # fc_reg | rpn_reg + if val.size(0) % num_classes == 0: + new_val = val.reshape(num_classes, -1)[:num_classes - 1] + new_val = new_val.reshape(-1) + # agnostic + else: + new_val = val + # weight + else: + out_channels, in_channels = val.shape[:2] + # fc_reg | rpn_reg + if out_channels % num_classes == 0: + new_val = val.reshape(num_classes, -1, in_channels, + *val.shape[2:])[1:] + new_val = new_val.reshape(-1, *val.shape[1:]) + # agnostic + else: + new_val = val + + return new_val + + +def convert(in_file, out_file, num_classes): + """Convert keys in checkpoints. + + There can be some breaking changes during the development of mmdetection, + and this tool is used for upgrading checkpoints trained with old versions + to the latest one. + """ + checkpoint = torch.load(in_file) + in_state_dict = checkpoint.pop('state_dict') + out_state_dict = OrderedDict() + meta_info = checkpoint['meta'] + is_two_stage, is_ssd, is_retina, reg_cls_agnostic = parse_config( + '#' + meta_info['config']) + if meta_info['mmdet_version'] <= '0.5.3' and is_retina: + upgrade_retina = True + else: + upgrade_retina = False + + # MMDetection v2.5.0 unifies the class order in RPN + # if the model is trained in version=2.5.0 + if meta_info['mmdet_version'] < '2.5.0': + upgrade_rpn = True + else: + upgrade_rpn = False + + for key, val in in_state_dict.items(): + new_key = key + new_val = val + if is_two_stage and is_head(key): + new_key = 'roi_head.{}'.format(key) + + # classification + if upgrade_rpn: + m = re.search( + r'(conv_cls|retina_cls|rpn_cls|fc_cls|fcos_cls|' + r'fovea_cls).(weight|bias)', new_key) + else: + m = re.search( + r'(conv_cls|retina_cls|fc_cls|fcos_cls|' + r'fovea_cls).(weight|bias)', new_key) + if m is not None: + print(f'reorder cls channels of {new_key}') + new_val = reorder_cls_channel(val, num_classes) + + # regression + if upgrade_rpn: + m = re.search(r'(fc_reg).(weight|bias)', new_key) + else: + m = re.search(r'(fc_reg|rpn_reg).(weight|bias)', new_key) + if m is not None and not reg_cls_agnostic: + print(f'truncate regression channels of {new_key}') + new_val = truncate_reg_channel(val, num_classes) + + # mask head + m = re.search(r'(conv_logits).(weight|bias)', new_key) + if m is not None: + print(f'truncate mask prediction channels of {new_key}') + new_val = truncate_cls_channel(val, num_classes) + + m = re.search(r'(cls_convs|reg_convs).\d.(weight|bias)', key) + # Legacy issues in RetinaNet since V1.x + # Use ConvModule instead of nn.Conv2d in RetinaNet + # cls_convs.0.weight -> cls_convs.0.conv.weight + if m is not None and upgrade_retina: + param = m.groups()[1] + new_key = key.replace(param, f'conv.{param}') + out_state_dict[new_key] = val + print(f'rename the name of {key} to {new_key}') + continue + + m = re.search(r'(cls_convs).\d.(weight|bias)', key) + if m is not None and is_ssd: + print(f'reorder cls channels of {new_key}') + new_val = reorder_cls_channel(val, num_classes) + + out_state_dict[new_key] = new_val + checkpoint['state_dict'] = out_state_dict + torch.save(checkpoint, out_file) + + +def main(): + parser = argparse.ArgumentParser(description='Upgrade model version') + parser.add_argument('in_file', help='input checkpoint file') + parser.add_argument('out_file', help='output checkpoint file') + parser.add_argument( + '--num-classes', + type=int, + default=81, + help='number of classes of the original model') + args = parser.parse_args() + convert(args.in_file, args.out_file, args.num_classes) + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/model_converters/upgrade_ssd_version.py b/detection_cbnet/docker-build-context/cbnetv2/tools/model_converters/upgrade_ssd_version.py new file mode 100644 index 0000000000000000000000000000000000000000..1d756faaf2f714af5acdb70dfbbd377501299817 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/model_converters/upgrade_ssd_version.py @@ -0,0 +1,57 @@ +import argparse +import tempfile +from collections import OrderedDict + +import torch +from mmcv import Config + + +def parse_config(config_strings): + temp_file = tempfile.NamedTemporaryFile() + config_path = f'{temp_file.name}.py' + with open(config_path, 'w') as f: + f.write(config_strings) + + config = Config.fromfile(config_path) + # check whether it is SSD + if config.model.bbox_head.type != 'SSDHead': + raise AssertionError('This is not a SSD model.') + + +def convert(in_file, out_file): + checkpoint = torch.load(in_file) + in_state_dict = checkpoint.pop('state_dict') + out_state_dict = OrderedDict() + meta_info = checkpoint['meta'] + parse_config('#' + meta_info['config']) + for key, value in in_state_dict.items(): + if 'extra' in key: + layer_idx = int(key.split('.')[2]) + new_key = 'neck.extra_layers.{}.{}.conv.'.format( + layer_idx // 2, layer_idx % 2) + key.split('.')[-1] + elif 'l2_norm' in key: + new_key = 'neck.l2_norm.weight' + elif 'bbox_head' in key: + new_key = key[:21] + '.0' + key[21:] + else: + new_key = key + out_state_dict[new_key] = value + checkpoint['state_dict'] = out_state_dict + + if torch.__version__ >= '1.6': + torch.save(checkpoint, out_file, _use_new_zipfile_serialization=False) + else: + torch.save(checkpoint, out_file) + + +def main(): + parser = argparse.ArgumentParser(description='Upgrade SSD version') + parser.add_argument('in_file', help='input checkpoint file') + parser.add_argument('out_file', help='output checkpoint file') + + args = parser.parse_args() + convert(args.in_file, args.out_file) + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/slurm_test.sh b/detection_cbnet/docker-build-context/cbnetv2/tools/slurm_test.sh new file mode 100755 index 0000000000000000000000000000000000000000..6dd67e57442b741fc30f26102eb5afe16139edb1 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/slurm_test.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash + +set -x + +PARTITION=$1 +JOB_NAME=$2 +CONFIG=$3 +CHECKPOINT=$4 +GPUS=${GPUS:-8} +GPUS_PER_NODE=${GPUS_PER_NODE:-8} +CPUS_PER_TASK=${CPUS_PER_TASK:-5} +PY_ARGS=${@:5} +SRUN_ARGS=${SRUN_ARGS:-""} + +PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \ +srun -p ${PARTITION} \ + --job-name=${JOB_NAME} \ + --gres=gpu:${GPUS_PER_NODE} \ + --ntasks=${GPUS} \ + --ntasks-per-node=${GPUS_PER_NODE} \ + --cpus-per-task=${CPUS_PER_TASK} \ + --kill-on-bad-exit=1 \ + ${SRUN_ARGS} \ + python -u tools/test.py ${CONFIG} ${CHECKPOINT} --launcher="slurm" ${PY_ARGS} diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/slurm_train.sh b/detection_cbnet/docker-build-context/cbnetv2/tools/slurm_train.sh new file mode 100755 index 0000000000000000000000000000000000000000..b3feb3d9c7a6c33d82739cdf5ee10365673aaded --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/slurm_train.sh @@ -0,0 +1,24 @@ +#!/usr/bin/env bash + +set -x + +PARTITION=$1 +JOB_NAME=$2 +CONFIG=$3 +WORK_DIR=$4 +GPUS=${GPUS:-8} +GPUS_PER_NODE=${GPUS_PER_NODE:-8} +CPUS_PER_TASK=${CPUS_PER_TASK:-5} +SRUN_ARGS=${SRUN_ARGS:-""} +PY_ARGS=${@:5} + +PYTHONPATH="$(dirname $0)/..":$PYTHONPATH \ +srun -p ${PARTITION} \ + --job-name=${JOB_NAME} \ + --gres=gpu:${GPUS_PER_NODE} \ + --ntasks=${GPUS} \ + --ntasks-per-node=${GPUS_PER_NODE} \ + --cpus-per-task=${CPUS_PER_TASK} \ + --kill-on-bad-exit=1 \ + ${SRUN_ARGS} \ + python -u tools/train.py ${CONFIG} --work-dir=${WORK_DIR} --launcher="slurm" ${PY_ARGS} diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/test.py b/detection_cbnet/docker-build-context/cbnetv2/tools/test.py new file mode 100644 index 0000000000000000000000000000000000000000..461d3ef478f06271bda069aca7864bd816006dc6 --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/test.py @@ -0,0 +1,237 @@ +import argparse +import os +import os.path as osp +import time +import warnings + +import mmcv +import torch +from mmcv import Config, DictAction +from mmcv.cnn import fuse_conv_bn +from mmcv.parallel import MMDataParallel, MMDistributedDataParallel +from mmcv.runner import (get_dist_info, init_dist, load_checkpoint, + wrap_fp16_model) + +from mmdet.apis import multi_gpu_test, single_gpu_test +from mmdet.datasets import (build_dataloader, build_dataset, + replace_ImageToTensor) +from mmdet.models import build_detector + + +def parse_args(): + parser = argparse.ArgumentParser( + description='MMDet test (and eval) a model') + parser.add_argument('config', help='test config file path') + parser.add_argument('checkpoint', help='checkpoint file') + parser.add_argument( + '--work-dir', + help='the directory to save the file containing evaluation metrics') + parser.add_argument('--out', help='output result file in pickle format') + parser.add_argument( + '--fuse-conv-bn', + action='store_true', + help='Whether to fuse conv and bn, this will slightly increase' + 'the inference speed') + parser.add_argument( + '--format-only', + action='store_true', + help='Format the output results without perform evaluation. It is' + 'useful when you want to format the result to a specific format and ' + 'submit it to the test server') + parser.add_argument( + '--eval', + type=str, + nargs='+', + help='evaluation metrics, which depends on the dataset, e.g., "bbox",' + ' "segm", "proposal" for COCO, and "mAP", "recall" for PASCAL VOC') + parser.add_argument('--show', action='store_true', help='show results') + parser.add_argument( + '--show-dir', help='directory where painted images will be saved') + parser.add_argument( + '--show-score-thr', + type=float, + default=0.3, + help='score threshold (default: 0.3)') + parser.add_argument( + '--gpu-collect', + action='store_true', + help='whether to use gpu to collect results.') + parser.add_argument( + '--tmpdir', + help='tmp directory used for collecting results from multiple ' + 'workers, available when gpu-collect is not specified') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + parser.add_argument( + '--options', + nargs='+', + action=DictAction, + help='custom options for evaluation, the key-value pair in xxx=yyy ' + 'format will be kwargs for dataset.evaluate() function (deprecate), ' + 'change to --eval-options instead.') + parser.add_argument( + '--eval-options', + nargs='+', + action=DictAction, + help='custom options for evaluation, the key-value pair in xxx=yyy ' + 'format will be kwargs for dataset.evaluate() function') + parser.add_argument( + '--launcher', + choices=['none', 'pytorch', 'slurm', 'mpi'], + default='none', + help='job launcher') + parser.add_argument('--local_rank', type=int, default=0) + args = parser.parse_args() + if 'LOCAL_RANK' not in os.environ: + os.environ['LOCAL_RANK'] = str(args.local_rank) + + if args.options and args.eval_options: + raise ValueError( + '--options and --eval-options cannot be both ' + 'specified, --options is deprecated in favor of --eval-options') + if args.options: + warnings.warn('--options is deprecated in favor of --eval-options') + args.eval_options = args.options + return args + + +def main(): + args = parse_args() + + assert args.out or args.eval or args.format_only or args.show \ + or args.show_dir, \ + ('Please specify at least one operation (save/eval/format/show the ' + 'results / save the results) with the argument "--out", "--eval"' + ', "--format-only", "--show" or "--show-dir"') + + if args.eval and args.format_only: + raise ValueError('--eval and --format_only cannot be both specified') + + if args.out is not None and not args.out.endswith(('.pkl', '.pickle')): + raise ValueError('The output file must be a pkl file.') + + cfg = Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + # import modules from string list. + if cfg.get('custom_imports', None): + from mmcv.utils import import_modules_from_strings + import_modules_from_strings(**cfg['custom_imports']) + # set cudnn_benchmark + if cfg.get('cudnn_benchmark', False): + torch.backends.cudnn.benchmark = True + + cfg.model.pretrained = None + if cfg.model.get('neck'): + if isinstance(cfg.model.neck, list): + for neck_cfg in cfg.model.neck: + if neck_cfg.get('rfp_backbone'): + if neck_cfg.rfp_backbone.get('pretrained'): + neck_cfg.rfp_backbone.pretrained = None + elif cfg.model.neck.get('rfp_backbone'): + if cfg.model.neck.rfp_backbone.get('pretrained'): + cfg.model.neck.rfp_backbone.pretrained = None + + # in case the test dataset is concatenated + samples_per_gpu = 1 + if isinstance(cfg.data.test, dict): + cfg.data.test.test_mode = True + samples_per_gpu = cfg.data.test.pop('samples_per_gpu', 1) + if samples_per_gpu > 1: + # Replace 'ImageToTensor' to 'DefaultFormatBundle' + cfg.data.test.pipeline = replace_ImageToTensor( + cfg.data.test.pipeline) + elif isinstance(cfg.data.test, list): + for ds_cfg in cfg.data.test: + ds_cfg.test_mode = True + samples_per_gpu = max( + [ds_cfg.pop('samples_per_gpu', 1) for ds_cfg in cfg.data.test]) + if samples_per_gpu > 1: + for ds_cfg in cfg.data.test: + ds_cfg.pipeline = replace_ImageToTensor(ds_cfg.pipeline) + + # init distributed env first, since logger depends on the dist info. + if args.launcher == 'none': + distributed = False + else: + distributed = True + init_dist(args.launcher, **cfg.dist_params) + + rank, _ = get_dist_info() + # allows not to create + if args.work_dir is not None and rank == 0: + mmcv.mkdir_or_exist(osp.abspath(args.work_dir)) + timestamp = time.strftime('%Y%m%d_%H%M%S', time.localtime()) + json_file = osp.join(args.work_dir, f'eval_{timestamp}.json') + + # build the dataloader + dataset = build_dataset(cfg.data.test) + data_loader = build_dataloader( + dataset, + samples_per_gpu=samples_per_gpu, + workers_per_gpu=cfg.data.workers_per_gpu, + dist=distributed, + shuffle=False) + + # build the model and load checkpoint + cfg.model.train_cfg = None + model = build_detector(cfg.model, test_cfg=cfg.get('test_cfg')) + fp16_cfg = cfg.get('fp16', None) + if fp16_cfg is not None: + wrap_fp16_model(model) + checkpoint = load_checkpoint(model, args.checkpoint, map_location='cpu') + if args.fuse_conv_bn: + model = fuse_conv_bn(model) + # old versions did not save class info in checkpoints, this walkaround is + # for backward compatibility + if 'CLASSES' in checkpoint.get('meta', {}): + model.CLASSES = checkpoint['meta']['CLASSES'] + else: + model.CLASSES = dataset.CLASSES + + if not distributed: + model = MMDataParallel(model, device_ids=[0]) + outputs = single_gpu_test(model, data_loader, args.show, args.show_dir, + args.show_score_thr) + else: + model = MMDistributedDataParallel( + model.cuda(), + device_ids=[torch.cuda.current_device()], + broadcast_buffers=False) + outputs = multi_gpu_test(model, data_loader, args.tmpdir, + args.gpu_collect) + + rank, _ = get_dist_info() + if rank == 0: + if args.out: + print(f'\nwriting results to {args.out}') + mmcv.dump(outputs, args.out) + kwargs = {} if args.eval_options is None else args.eval_options + if args.format_only: + dataset.format_results(outputs, **kwargs) + if args.eval: + eval_kwargs = cfg.get('evaluation', {}).copy() + # hard-code way to remove EvalHook args + for key in [ + 'interval', 'tmpdir', 'start', 'gpu_collect', 'save_best', + 'rule' + ]: + eval_kwargs.pop(key, None) + eval_kwargs.update(dict(metric=args.eval, **kwargs)) + metric = dataset.evaluate(outputs, **eval_kwargs) + print(metric) + metric_dict = dict(config=args.config, metric=metric) + if args.work_dir is not None and rank == 0: + mmcv.dump(metric_dict, json_file) + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/docker-build-context/cbnetv2/tools/train.py b/detection_cbnet/docker-build-context/cbnetv2/tools/train.py new file mode 100644 index 0000000000000000000000000000000000000000..2fce106f26269099ff8c998925b2a1e3b1952b1f --- /dev/null +++ b/detection_cbnet/docker-build-context/cbnetv2/tools/train.py @@ -0,0 +1,188 @@ +import argparse +import copy +import os +import os.path as osp +import time +import warnings + +import mmcv +import torch +from mmcv import Config, DictAction +from mmcv.runner import get_dist_info, init_dist +from mmcv.utils import get_git_hash + +from mmdet import __version__ +from mmdet.apis import set_random_seed, train_detector +from mmdet.datasets import build_dataset +from mmdet.models import build_detector +from mmdet.utils import collect_env, get_root_logger + + +def parse_args(): + parser = argparse.ArgumentParser(description='Train a detector') + parser.add_argument('config', help='train config file path') + parser.add_argument('--work-dir', help='the dir to save logs and models') + parser.add_argument( + '--resume-from', help='the checkpoint file to resume from') + parser.add_argument( + '--no-validate', + action='store_true', + help='whether not to evaluate the checkpoint during training') + group_gpus = parser.add_mutually_exclusive_group() + group_gpus.add_argument( + '--gpus', + type=int, + help='number of gpus to use ' + '(only applicable to non-distributed training)') + group_gpus.add_argument( + '--gpu-ids', + type=int, + nargs='+', + help='ids of gpus to use ' + '(only applicable to non-distributed training)') + parser.add_argument('--seed', type=int, default=None, help='random seed') + parser.add_argument( + '--deterministic', + action='store_true', + help='whether to set deterministic options for CUDNN backend.') + parser.add_argument( + '--options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file (deprecate), ' + 'change to --cfg-options instead.') + parser.add_argument( + '--cfg-options', + nargs='+', + action=DictAction, + help='override some settings in the used config, the key-value pair ' + 'in xxx=yyy format will be merged into config file. If the value to ' + 'be overwritten is a list, it should be like key="[a,b]" or key=a,b ' + 'It also allows nested list/tuple values, e.g. key="[(a,b),(c,d)]" ' + 'Note that the quotation marks are necessary and that no white space ' + 'is allowed.') + parser.add_argument( + '--launcher', + choices=['none', 'pytorch', 'slurm', 'mpi'], + default='none', + help='job launcher') + parser.add_argument('--local_rank', type=int, default=0) + args = parser.parse_args() + if 'LOCAL_RANK' not in os.environ: + os.environ['LOCAL_RANK'] = str(args.local_rank) + + if args.options and args.cfg_options: + raise ValueError( + '--options and --cfg-options cannot be both ' + 'specified, --options is deprecated in favor of --cfg-options') + if args.options: + warnings.warn('--options is deprecated in favor of --cfg-options') + args.cfg_options = args.options + + return args + + +def main(): + args = parse_args() + + cfg = Config.fromfile(args.config) + if args.cfg_options is not None: + cfg.merge_from_dict(args.cfg_options) + # import modules from string list. + if cfg.get('custom_imports', None): + from mmcv.utils import import_modules_from_strings + import_modules_from_strings(**cfg['custom_imports']) + # set cudnn_benchmark + if cfg.get('cudnn_benchmark', False): + torch.backends.cudnn.benchmark = True + + # work_dir is determined in this priority: CLI > segment in file > filename + if args.work_dir is not None: + # update configs according to CLI args if args.work_dir is not None + cfg.work_dir = args.work_dir + elif cfg.get('work_dir', None) is None: + # use config filename as default work_dir if cfg.work_dir is None + cfg.work_dir = osp.join('./work_dirs', + osp.splitext(osp.basename(args.config))[0]) + if args.resume_from is not None: + cfg.resume_from = args.resume_from + if args.gpu_ids is not None: + cfg.gpu_ids = args.gpu_ids + else: + cfg.gpu_ids = range(1) if args.gpus is None else range(args.gpus) + + # init distributed env first, since logger depends on the dist info. + if args.launcher == 'none': + distributed = False + else: + distributed = True + init_dist(args.launcher, **cfg.dist_params) + # re-set gpu_ids with distributed training mode + _, world_size = get_dist_info() + cfg.gpu_ids = range(world_size) + + # create work_dir + mmcv.mkdir_or_exist(osp.abspath(cfg.work_dir)) + # dump config + cfg.dump(osp.join(cfg.work_dir, osp.basename(args.config))) + # init the logger before other steps + timestamp = time.strftime('%Y%m%d_%H%M%S', time.localtime()) + log_file = osp.join(cfg.work_dir, f'{timestamp}.log') + logger = get_root_logger(log_file=log_file, log_level=cfg.log_level) + + # init the meta dict to record some important information such as + # environment info and seed, which will be logged + meta = dict() + # log env info + env_info_dict = collect_env() + env_info = '\n'.join([(f'{k}: {v}') for k, v in env_info_dict.items()]) + dash_line = '-' * 60 + '\n' + logger.info('Environment info:\n' + dash_line + env_info + '\n' + + dash_line) + meta['env_info'] = env_info + meta['config'] = cfg.pretty_text + # log some basic info + logger.info(f'Distributed training: {distributed}') + logger.info(f'Config:\n{cfg.pretty_text}') + + # set random seeds + if args.seed is not None: + logger.info(f'Set random seed to {args.seed}, ' + f'deterministic: {args.deterministic}') + set_random_seed(args.seed, deterministic=args.deterministic) + cfg.seed = args.seed + meta['seed'] = args.seed + meta['exp_name'] = osp.basename(args.config) + + model = build_detector( + cfg.model, + train_cfg=cfg.get('train_cfg'), + test_cfg=cfg.get('test_cfg')) + model.init_weights() + + datasets = [build_dataset(cfg.data.train)] + if len(cfg.workflow) == 2: + val_dataset = copy.deepcopy(cfg.data.val) + val_dataset.pipeline = cfg.data.train.pipeline + datasets.append(build_dataset(val_dataset)) + if cfg.checkpoint_config is not None: + # save mmdet version, config file content and class names in + # checkpoints as meta data + cfg.checkpoint_config.meta = dict( + mmdet_version=__version__ + get_git_hash()[:7], + CLASSES=datasets[0].CLASSES) + # add an attribute for visualization convenience + model.CLASSES = datasets[0].CLASSES + train_detector( + model, + datasets, + cfg, + distributed=distributed, + validate=(not args.no_validate), + timestamp=timestamp, + meta=meta) + + +if __name__ == '__main__': + main() diff --git a/detection_cbnet/run.py b/detection_cbnet/run.py new file mode 100644 index 0000000000000000000000000000000000000000..24609de51291473146513328e6b2f094e592cd47 --- /dev/null +++ b/detection_cbnet/run.py @@ -0,0 +1,214 @@ +import argparse +import logging +import os +from typing import Iterable + +import cv2 +import mmcv +import numpy as np +import torch +from mmcv import Config +from mmcv.parallel import MMDataParallel +from mmcv.runner import wrap_fp16_model, load_checkpoint +from mmdet.datasets import replace_ImageToTensor +from mmdet.models import build_detector + +from common import logging_levels, download_file +from common.communication.kafka_common import KafkaRequestProcessor +from common.communication.messages_pb2 import PerceptionRequest, PerceptionResponse, DetectionEntry + +RGB_MEAN = np.array([123.675, 116.28, 103.53], dtype=np.float32) +RGB_STD = np.array([58.395, 57.12, 57.375], dtype=np.float32) + +# Classes (starting from 1 - 80 in total) - https://gist.github.com/AruniRC/7b3dadd004da04c80198557db5da4bda + + +# noinspection DuplicatedCode +def process_detection_requests(requests: Iterable[PerceptionRequest], results: Iterable[PerceptionResponse], detector): + batch_req = [] + for req in requests: + if req.finished: + continue + img = np.fromstring(req.detection.image, np.uint8) + img = cv2.imdecode(img, cv2.IMREAD_ANYCOLOR) + logging.info('Processing an image of shape %s', img.shape) + batch_req.append(img) + if len(batch_req) > 0: + process_batch(batch_req, results, detector) + + +def process_batch(batch_req, batch_res, detector): + # [[(tl_x, tl_y, br_x, br_y, score, class_id)]] + batch_detection = detector(batch_req) + for detections, res in zip(batch_detection, batch_res): + for det in detections: + res.detection.entries.append(DetectionEntry(box_top_left_x=det[0], + box_top_left_y=det[1], + box_bottom_right_x=det[2], + box_bottom_right_y=det[3], + score=det[4], + class_id=det[5])) + + +# noinspection DuplicatedCode +def get_detector(config_path, weights_path, max_size): + cfg = Config.fromfile(config_path) + if cfg.get('custom_imports', None): + from mmcv.utils import import_modules_from_strings + import_modules_from_strings(**cfg['custom_imports']) + # set cudnn_benchmark + if cfg.get('cudnn_benchmark', False): + torch.backends.cudnn.benchmark = True + + cfg.model.pretrained = None + if cfg.model.get('neck'): + if isinstance(cfg.model.neck, list): + for neck_cfg in cfg.model.neck: + if neck_cfg.get('rfp_backbone'): + if neck_cfg.rfp_backbone.get('pretrained'): + neck_cfg.rfp_backbone.pretrained = None + elif cfg.model.neck.get('rfp_backbone'): + if cfg.model.neck.rfp_backbone.get('pretrained'): + cfg.model.neck.rfp_backbone.pretrained = None + + # in case the test dataset is concatenated + samples_per_gpu = 1 + if isinstance(cfg.data.test, dict): + cfg.data.test.test_mode = True + samples_per_gpu = cfg.data.test.pop('samples_per_gpu', 1) + if samples_per_gpu > 1: + # Replace 'ImageToTensor' to 'DefaultFormatBundle' + cfg.data.test.pipeline = replace_ImageToTensor( + cfg.data.test.pipeline) + elif isinstance(cfg.data.test, list): + for ds_cfg in cfg.data.test: + ds_cfg.test_mode = True + samples_per_gpu = max( + [ds_cfg.pop('samples_per_gpu', 1) for ds_cfg in cfg.data.test]) + if samples_per_gpu > 1: + for ds_cfg in cfg.data.test: + ds_cfg.pipeline = replace_ImageToTensor(ds_cfg.pipeline) + + # build the model and load checkpoint + cfg.model.train_cfg = None + model = build_detector(cfg.model, test_cfg=cfg.get('test_cfg')) + fp16_cfg = cfg.get('fp16', None) + if fp16_cfg is not None: + wrap_fp16_model(model) + load_checkpoint(model, weights_path, map_location='cpu') + + model = MMDataParallel(model, device_ids=[0]) + model.eval() + + return lambda images: predict_image(images, model, max_size) + + +def predict_image(images, model, max_size): + resized_images = [] + img_metas = [] + for img in images: + ori_h, ori_w = img.shape[:2] + cur_size = ori_h * ori_w + if cur_size > max_size: + size_scale = max_size / (ori_h * ori_w) + img = mmcv.imrescale(img, size_scale, return_scale=False, backend='cv2') + new_h, new_w = img.shape[:2] + w_scale = new_w / ori_w + h_scale = new_h / ori_h + img = mmcv.imnormalize(img, RGB_MEAN, RGB_STD, to_rgb=True) + + padding_h = int((new_h + 31) / 32) * 32 + padding_w = int((new_w + 31) / 32) * 32 + img = mmcv.impad(img, shape=(padding_h, padding_w)) + + img = img.transpose(2, 0, 1) + h, w = img.shape[1], img.shape[2] + img = np.expand_dims(img, axis=0) + resized_images.append(img) + img_metas.append({'img_shape': (h, w, 3), + 'ori_shape': (ori_h, ori_w, 3), + 'scale_factor': np.array([w_scale, h_scale, w_scale, h_scale], dtype=np.float32), + 'flip': False, + 'flip_direction': None}) + + resized_images = np.concatenate(resized_images) + resized_images = torch.from_numpy(resized_images) + with torch.no_grad(): + detection_batch = model(return_loss=False, rescale=True, img=[resized_images], img_metas=[img_metas]) + + results = [] + for i in range(len(detection_batch)): + boxes = [] + detected_classes = detection_batch[i][0] + for cl_id, bbox in enumerate(detected_classes): + for i in range(bbox.shape[0]): + score = bbox[i, 4] + if score > 0.3: + boxes.append((int(bbox[i, 0]), int(bbox[i, 1]), int(bbox[i, 2]), int(bbox[i, 3]), score, cl_id + 1)) + results.append(boxes) + return results + + +def run(parser: argparse.ArgumentParser): + parser.add_argument('--name', default='detection_cbnet', help='instance name used as part of the client id') + parser.add_argument('--log_dir', help='logs dir path', required=True) + parser.add_argument('--logging_level', dest='logging_level', choices=logging_levels.keys(), default='INFO', + help='logging level. One of ' + str(logging_levels.keys())) + parser.add_argument('--weights_path', default='', + help='path to the model weights file. Default: ' + 'PROJECT_DIR_PATH/detection_cbnet/models/htc_cbv2_swin_large22k_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_1x_coco.pth') + parser.add_argument('--weights_url', default='', help='url to the model weights file that should be downloaded and ' + 'placed to the weights_path if it does not already exist') + parser.add_argument('--bootstrap_servers', type=str, required=True, + help='comma separated kafka bootstrap servers. Example: kafka1.local:9092,kafka2.local:9092') + parser.add_argument('--username', type=str, default='', help='kafka SASL username or empty str') + parser.add_argument('--password', type=str, default='', help='kafka SASL password or empty str') + parser.add_argument('--topic', default='detection-requests', help='requests topic name') + parser.add_argument('--group_id', default='detection', help='kafka clients group id') + parser.add_argument('--result_topic_prefix', default='detection-results-', help='prefix of the result topic names') + parser.add_argument('--batch_size', type=int, default=1, help='batch size for frames processing') + parser.add_argument('--max_size', type=int, default=1280 * 720, help='max size of an image in pixels') + parser.add_argument('--commit_period_ms', type=int, default=1000, + help='period for committing processed and delivered request offsets') + args = parser.parse_args() + + logging.basicConfig(format="%(asctime)s: %(levelname)s - %(message)s", + filename=os.path.join(args.log_dir, 'detection_cbnet.log'), level=logging_levels[args.logging_level]) + + current_dir_path = os.path.dirname(os.path.abspath(__file__)) + weights_path = args.weights_path if args.weights_path else \ + os.path.join(current_dir_path, 'models/htc_cbv2_swin_large22k_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_1x_coco.pth') + + if os.path.exists(weights_path) and not os.path.isfile(weights_path): + raise ValueError("weight_path path '{}' must denote a file or mustn't exist".format(weights_path)) + + if not os.path.exists(weights_path): + logging.info("Downloading detection_cbnet model weights from '{}' to '{}'".format(args.weights_url, weights_path)) + download_file(args.weights_url, weights_path) + logging.info('Detection model weights download finished') + + config_path = os.path.join(current_dir_path, 'docker-build-context/cbnetv2/configs/cbnet/htc_cbv2_swin_large_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_1x_coco.py') + logging.info("Initializing detection_cbnet model") + detector = get_detector(config_path, weights_path, args.max_size) + logging.info("Model was initialized") + + bootstrap_servers = args.bootstrap_servers.split(',') + logging.info('Using bootstrap servers: %s', bootstrap_servers) + + processor = KafkaRequestProcessor(name=args.name, bootstrap_servers=args.bootstrap_servers.split(','), + username=args.username, password=args.password, topic_name=args.topic, + group_id=args.group_id, result_topic_prefix=args.result_topic_prefix, + req_batch_size=args.batch_size, commit_period_ms=args.commit_period_ms, + stream_max_idle_time_ms=600_000, frames_commit_latency=0, + request_handler=lambda req, res: process_detection_requests(req, res, detector)) + + logging.info("Starting detection_cbnet service '%s'", args.name) + try: + processor.run_sync() + except KeyboardInterrupt: + pass + logging.info("Stopping detection_cbnet service") + + +if __name__ == '__main__': + run(argparse.ArgumentParser()) diff --git a/docker-compose.yml b/docker-compose.yml index 1e453e245ad3f255594fd3c9247959f850c90395..e98e4946173d94af94d8deaa3b3c11bb7b988fef 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -39,13 +39,33 @@ services: environment: - NVIDIA_VISIBLE_DEVICES=${DEVBEH_DETECTION_NVIDIA_VISIBLE_DEVICES:-0} command: 'python3.6 run.py detection --log_dir /opt/detection/logs - --graphormer_weights_url ${DEVBEH_DETECTION_WEIGHTS_URL:-https://www.dropbox.com/s/qx1tlfmpkacvmep/COCO-MaskRCNN-R101FPN9xGNCasAugScratch.npz?dl=1} - --hrnet_weights_url ${DEVBEH_DETECTION_WEIGHTS_URL:-https://datarelease.blob.core.windows.net/metro/models/hrnetv2_w64_imagenet_pretrained.pth} + --weights_url ${DEVBEH_DETECTION_WEIGHTS_URL:-https://www.dropbox.com/s/qx1tlfmpkacvmep/COCO-MaskRCNN-R101FPN9xGNCasAugScratch.npz?dl=1} --bootstrap_servers ${DEVBEH_KAFKA_BOOTSTRAP_SERVERS:-localhost:9095} --username "${DEVBEH_KAFKA_USERNAME:-}" --password "${DEVBEH_KAFKA_PASSWORD:-}" --batch_size ${DEVBEH_DETECTION_BATCH_SIZE:-1} --name ${DEVBEH_DETECTION_INSTANCE_NAME:-detection}' + detection_cbnet: + # Текущей директорией в путях является директория, в которой расположен данный docker-compose.yml файл + build: ./detection_cbnet/docker-build-context + image: devbeh/detection_cbnet + volumes: + - type: bind + # Подключаем директорию с кодом. Данный volume не является read_only, поскольку программа может автоматически + # скачать и сохранить в нем файлы весов моделей + source: . + target: /opt/detection_cbnet/src + network_mode: host + runtime: nvidia + environment: + - NVIDIA_VISIBLE_DEVICES=${DEVBEH_DETECTION_CBNET_NVIDIA_VISIBLE_DEVICES:-0} + command: 'python run.py detection_cbnet --log_dir /opt/detection_cbnet/logs + --weights_url ${DEVBEH_DETECTION_CBNET_WEIGHTS_URL:-https://www.dropbox.com/s/0vsk5zld23tgrka/htc_cbv2_swin_large22k_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_1x_coco.pth?dl=1} + --bootstrap_servers ${DEVBEH_KAFKA_BOOTSTRAP_SERVERS:-localhost:9095} + --username "${DEVBEH_KAFKA_USERNAME:-}" + --password "${DEVBEH_KAFKA_PASSWORD:-}" + --batch_size ${DEVBEH_DETECTION_CBNET_BATCH_SIZE:-1} + --name ${DEVBEH_DETECTION_CBNET_INSTANCE_NAME:-detection_cbnet}' pose3d: build: ./pose3d/docker-build-context image: devbeh/pose3d @@ -60,7 +80,8 @@ services: environment: - NVIDIA_VISIBLE_DEVICES=${DEVBEH_POSE3D_NVIDIA_VISIBLE_DEVICES:-0} command: 'python run.py pose3d --log_dir /opt/pose3d/logs - --weights_url ${DEVBEH_POSE3D_WEIGHTS_URL:-https://datarelease.blob.core.windows.net/metro/models/graphormer_h36m_state_dict.bin} + --graphormer_weights_url ${DEVBEH_POSE3D_GRAPHORMER_WEIGHTS_URL:-https://datarelease.blob.core.windows.net/metro/models/graphormer_h36m_state_dict.bin} + --hrnet_weights_url ${DEVBEH_POSE3D_HRNET_WEIGHTS_URL:-https://datarelease.blob.core.windows.net/metro/models/hrnetv2_w64_imagenet_pretrained.pth} --bootstrap_servers ${DEVBEH_KAFKA_BOOTSTRAP_SERVERS:-localhost:9095} --username "${DEVBEH_KAFKA_USERNAME:-}" --password "${DEVBEH_KAFKA_PASSWORD:-}" @@ -86,6 +107,27 @@ services: --password "${DEVBEH_KAFKA_PASSWORD:-}" --batch_size ${DEVBEH_POSE3D_VHM_BATCH_SIZE:-1} --name ${DEVBEH_POSE3D_VHM_INSTANCE_NAME:-pose3d_vhm}' + tracking: + build: ./tracking/docker-build-context + image: devbeh/tracking + volumes: + - type: bind + # Подключаем директорию с кодом. Данный volume не является read_only, поскольку программа может автоматически + # скачать и сохранить в нем файлы весов моделей + source: . + target: /opt/tracking/src + network_mode: host + runtime: nvidia + environment: + - NVIDIA_VISIBLE_DEVICES=${DEVBEH_TRACKING_NVIDIA_VISIBLE_DEVICES:-0} + command: 'python run.py tracking --log_dir /opt/tracking/logs + --weights_url ${DEVBEH_POSE3D_WEIGHTS_URL:-https://drive.google.com/u/0/uc?export=download&confirm=2ySX&id=1HX2_JpMOjOIj1Z9rJjoet9XNy_cCAs5U} + --bootstrap_servers ${DEVBEH_KAFKA_BOOTSTRAP_SERVERS:-localhost:9095} + --username "${DEVBEH_KAFKA_USERNAME:-}" + --password "${DEVBEH_KAFKA_PASSWORD:-}" + --batch_size ${DEVBEH_TRACKING_BATCH_SIZE:-1} + --frames_commit_latency ${DEVBEH_TRACKING_FRAMES_COMMIT_LATENCY:-100} + --name ${DEVBEH_TRACKING_INSTANCE_NAME:-tracking}' networks: kafka_net: driver: bridge diff --git a/pose3d_vhm/run.py b/pose3d_vhm/run.py index 0ce72c1e41503b7c09ababa0e351a56378c34e9b..f5c85d162b18e3e1ca800c431ce35572cf028402 100644 --- a/pose3d_vhm/run.py +++ b/pose3d_vhm/run.py @@ -5,7 +5,6 @@ from typing import Iterable import cv2 import numpy as np -import torch from common import logging_levels, download_file from common.communication.kafka_common import KafkaRequestProcessor diff --git a/run.py b/run.py index 2afdd44a125f462072530efe71b6d8ead54034e5..17f52a23c1838d53c7c1546ffcd76f087bd57664 100644 --- a/run.py +++ b/run.py @@ -3,7 +3,7 @@ import importlib if __name__ == '__main__': parser = argparse.ArgumentParser() - modules = ('detection', 'tracking', 'pose3d', 'pose3d_vhm', 'distance', 'actions') + modules = ('detection', 'detection_cbnet', 'tracking', 'pose3d', 'pose3d_vhm', 'distance', 'actions') parser.add_argument('module', choices=modules, help='launching module {}'.format(modules)) args, _ = parser.parse_known_args() diff --git a/test/detection_cbnet_test/__init__.py b/test/detection_cbnet_test/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/test/detection_cbnet_test/detection-cbnet-test-env-start.sh b/test/detection_cbnet_test/detection-cbnet-test-env-start.sh new file mode 100755 index 0000000000000000000000000000000000000000..5eb7b5c1753b567e5f6c8893fa0462c4ee531e19 --- /dev/null +++ b/test/detection_cbnet_test/detection-cbnet-test-env-start.sh @@ -0,0 +1,5 @@ +#!/bin/bash +current_dir=$(pwd) +cd "$(dirname "${BASH_SOURCE[0]}")" || exit 1 +docker-compose up -d --force-recreate --build +cd "$current_dir" || exit 1 \ No newline at end of file diff --git a/test/detection_cbnet_test/detection-cbnet-test-env-stop.sh b/test/detection_cbnet_test/detection-cbnet-test-env-stop.sh new file mode 100755 index 0000000000000000000000000000000000000000..8ab874984076ec390d48eca6aff1de54aeef804b --- /dev/null +++ b/test/detection_cbnet_test/detection-cbnet-test-env-stop.sh @@ -0,0 +1,6 @@ +#!/bin/bash +current_dir=$(pwd) +cd "$(dirname "${BASH_SOURCE[0]}")" || exit 1 +docker-compose down +docker-compose rm +cd "$current_dir" || exit 1 \ No newline at end of file diff --git a/test/detection_cbnet_test/detection_cbnet_general_test.py b/test/detection_cbnet_test/detection_cbnet_general_test.py new file mode 100644 index 0000000000000000000000000000000000000000..ad86dcc3f1725fe8c313c5d9c6c53af0dbd1cadb --- /dev/null +++ b/test/detection_cbnet_test/detection_cbnet_general_test.py @@ -0,0 +1,87 @@ +import os +import shutil +import unittest + +# noinspection PyProtectedMember +import cv2 +from kafka import KafkaProducer, KafkaConsumer, TopicPartition + +from common.communication.messages_pb2 import DetectionReq, PerceptionRequest +from test.common_test.kafka_common_test import KafkaRequestProcessorTest +from test.tracking_test.tracking_general_test import draw_boxes + + +# noinspection DuplicatedCode +class DetectionCBNetServiceTest(unittest.TestCase): + + def test_general(self): + """Для выполнения данного теста необходимо запустить тестовую Kafka на порту 9097 и тестовый detection сервис, + работающей с ней. Это можно сделать запустив скрипт pose3d-test-env-start.sh""" + + topic_name = 'detection-requests' + bootstrap_servers = 'localhost:9097' + result_topic_prefix = 'detection-results-' + + DetectionCBNetServiceTest.ensure_topic_empty(bootstrap_servers, topic_name, 0) + + self.producer: KafkaProducer = KafkaRequestProcessorTest.get_test_producer(bootstrap_servers) + self.consumer: KafkaConsumer = KafkaRequestProcessorTest.get_test_consumer(bootstrap_servers) + self.consumer.subscribe([result_topic_prefix + '1']) + + img_dir_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'images/') + img_names = os.listdir(img_dir_path) + for i, img_name in enumerate(img_names): + img_path = os.path.join(img_dir_path, img_name) + with open(img_path, 'br') as f: + img = f.read() + self.send(topic_name, partition=0, manager_id=1, video_id=1, frame_id=i+1, image=img) + self.send(topic_name, partition=0, manager_id=1, video_id=1, finished=True) + self.producer.flush() + + tp_results = KafkaRequestProcessorTest.wait_for_results(self.consumer, len(img_names) + 1, 60_000) + self.assertEqual(1, len(tp_results)) + + results = tp_results[TopicPartition(result_topic_prefix + '1', 0)] + self.assertEqual(len(img_names) + 1, len(results)) + + result_dir_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'results/') + if os.path.exists(result_dir_path): + shutil.rmtree(result_dir_path) + os.mkdir(result_dir_path) + for img_name, res in zip(img_names, results[:len(img_names)]): + print(img_name) + print(res) + img_path = os.path.join(img_dir_path, img_name) + res_img_path = os.path.join(result_dir_path, img_name) + img = cv2.imread(img_path) + boxes = [] + for i, entry in enumerate(res.detection.entries): + boxes.append({'box': [entry.box_top_left_x, entry.box_top_left_y, entry.box_bottom_right_x, + entry.box_bottom_right_y], 'class': entry.class_id, 'score': entry.score, + 'id': i}) + img = draw_boxes(img, boxes) + cv2.imwrite(res_img_path, img) + + + @staticmethod + def ensure_topic_empty(bootstrap_servers, topic_name, partitions_num): + consumer: KafkaConsumer = KafkaRequestProcessorTest.get_test_consumer(bootstrap_servers, + client_id='check-offset-c') + tp = TopicPartition(topic_name, partitions_num) + offsets = consumer.end_offsets([tp]) + if not len(offsets): + raise RuntimeError('No topic offset') + offset = offsets[tp] + if offset > 0: + raise RuntimeError('Topic partition {}:{} is not empty'.format(topic_name, partitions_num)) + consumer.close() + + def send(self, topic_name, partition, manager_id, video_id, frame_id=None, finished=None, image=None): + r = PerceptionRequest(manager_id=manager_id, video_id=video_id) + if frame_id is not None: + r.frame_id = frame_id + if finished is not None: + r.finished = finished + if image is not None: + r.detection.CopyFrom(DetectionReq(image=image)) + self.producer.send(topic=topic_name, partition=partition, value=r) diff --git a/test/detection_cbnet_test/docker-compose.yml b/test/detection_cbnet_test/docker-compose.yml new file mode 100644 index 0000000000000000000000000000000000000000..738b7664cd10eb1edbb9f18a988ab9334c7ec047 --- /dev/null +++ b/test/detection_cbnet_test/docker-compose.yml @@ -0,0 +1,43 @@ +services: + zookeeper: + image: 'bitnami/zookeeper:3.5.5' + networks: + - detection_cbnet_net + environment: + - ALLOW_ANONYMOUS_LOGIN=yes + kafka: + image: 'bitnami/kafka:2.4.0' + ports: + - '9097:9097' + networks: + - detection_cbnet_net + environment: + - KAFKA_BROKER_ID=1 + - KAFKA_CFG_ZOOKEEPER_CONNECT=zookeeper:2181 + - ALLOW_PLAINTEXT_LISTENER=yes + - KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CLIENT:PLAINTEXT,EXTERNAL:PLAINTEXT + - KAFKA_CFG_LISTENERS=CLIENT://:29097,EXTERNAL://:9097 + - KAFKA_CFG_ADVERTISED_LISTENERS=CLIENT://kafka:29097,EXTERNAL://localhost:9097 + - KAFKA_INTER_BROKER_LISTENER_NAME=CLIENT + depends_on: + - zookeeper + detection_cbnet: + build: ../../detection_cbnet/docker-build-context + image: devbeh/detection_cbnet + depends_on: + - kafka + volumes: + - type: bind + source: ../../ + target: /opt/detection_cbnet/src + networks: + - detection_cbnet_net + runtime: nvidia + environment: + - NVIDIA_VISIBLE_DEVICES=${DEVBEH_DETECTION_CBNET_NVIDIA_VISIBLE_DEVICES:-0} + command: 'python run.py detection_cbnet --log_dir /opt/detection_cbnet/logs + --weights_url https://www.dropbox.com/s/0vsk5zld23tgrka/htc_cbv2_swin_large22k_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_1x_coco.pth?dl=1 + --bootstrap_servers kafka:29097' +networks: + detection_cbnet_net: + driver: bridge diff --git a/test/detection_cbnet_test/images/image1.jpg b/test/detection_cbnet_test/images/image1.jpg new file mode 100644 index 0000000000000000000000000000000000000000..81084ede854e86efc2c5e5e9ba4bee1edfe29da1 Binary files /dev/null and b/test/detection_cbnet_test/images/image1.jpg differ diff --git a/test/detection_cbnet_test/images/image2.jpg b/test/detection_cbnet_test/images/image2.jpg new file mode 100644 index 0000000000000000000000000000000000000000..024c844ee0877cbfe3587ea07240c3cfe887303f Binary files /dev/null and b/test/detection_cbnet_test/images/image2.jpg differ diff --git a/test/detection_test/detection-test-env-start.sh b/test/detection_test/detection-test-env-start.sh index 51ba88c5992668e830e296d15e14860d656b9345..5eb7b5c1753b567e5f6c8893fa0462c4ee531e19 100755 --- a/test/detection_test/detection-test-env-start.sh +++ b/test/detection_test/detection-test-env-start.sh @@ -2,16 +2,4 @@ current_dir=$(pwd) cd "$(dirname "${BASH_SOURCE[0]}")" || exit 1 docker-compose up -d --force-recreate --build -docker-compose exec kafka /opt/bitnami/kafka/bin/kafka-topics.sh \ - --create \ - --bootstrap-server localhost:9097 \ - --replication-factor 1 \ - --topic detection-requests \ - --partitions 1 -docker-compose exec kafka /opt/bitnami/kafka/bin/kafka-topics.sh \ - --create \ - --bootstrap-server localhost:9097 \ - --replication-factor 1 \ - --topic detection-results-1 \ - --partitions 1 cd "$current_dir" || exit 1 \ No newline at end of file diff --git a/test/detection_test/docker-compose.yml b/test/detection_test/docker-compose.yml index 793b2a1854b914a2291bc3df82c4c8069e7e1219..b012797ded7b350de4de95a34b7c3a737ce01ff5 100644 --- a/test/detection_test/docker-compose.yml +++ b/test/detection_test/docker-compose.yml @@ -24,6 +24,8 @@ services: detection: build: ../../detection/docker-build-context image: devbeh/detection + depends_on: + - kafka volumes: - type: bind source: ../../ @@ -36,8 +38,6 @@ services: command: 'python3.6 run.py detection --log_dir /opt/detection/logs --weights_url https://www.dropbox.com/s/qx1tlfmpkacvmep/COCO-MaskRCNN-R101FPN9xGNCasAugScratch.npz?dl=1 --bootstrap_servers kafka:29097' - depends_on: - - kafka networks: detection_net: driver: bridge diff --git a/test/pose3d_test/docker-compose.yml b/test/pose3d_test/docker-compose.yml index 06fc82733c6214b7283d237c02a1d90cd328df61..c430eb01809a9f4f7be47f2cf1b5e2d02189eeea 100644 --- a/test/pose3d_test/docker-compose.yml +++ b/test/pose3d_test/docker-compose.yml @@ -24,6 +24,8 @@ services: pose3d: build: ../../pose3d/docker-build-context image: devbeh/pose3d + depends_on: + - kafka volumes: - type: bind # Подключаем директорию с кодом. Данный volume не является read_only, поскольку программа может автоматически @@ -36,8 +38,8 @@ services: environment: - NVIDIA_VISIBLE_DEVICES=${DEVBEH_POSE3D_NVIDIA_VISIBLE_DEVICES:-0} command: 'python run.py pose3d --log_dir /opt/pose3d/logs - --graphormer_weights_url ${DEVBEH_POSE3D_WEIGHTS_URL:-https://datarelease.blob.core.windows.net/metro/models/graphormer_h36m_state_dict.bin} - --hrnet_weights_url ${DEVBEH_DETECTION_WEIGHTS_URL:-https://datarelease.blob.core.windows.net/metro/models/hrnetv2_w64_imagenet_pretrained.pth} + --graphormer_weights_url https://datarelease.blob.core.windows.net/metro/models/graphormer_h36m_state_dict.bin + --hrnet_weights_url https://datarelease.blob.core.windows.net/metro/models/hrnetv2_w64_imagenet_pretrained.pth --bootstrap_servers kafka:29097' networks: pose3d_net: diff --git a/test/pose3d_test/pose3d-test-env-start.sh b/test/pose3d_test/pose3d-test-env-start.sh index 51ba88c5992668e830e296d15e14860d656b9345..5eb7b5c1753b567e5f6c8893fa0462c4ee531e19 100755 --- a/test/pose3d_test/pose3d-test-env-start.sh +++ b/test/pose3d_test/pose3d-test-env-start.sh @@ -2,16 +2,4 @@ current_dir=$(pwd) cd "$(dirname "${BASH_SOURCE[0]}")" || exit 1 docker-compose up -d --force-recreate --build -docker-compose exec kafka /opt/bitnami/kafka/bin/kafka-topics.sh \ - --create \ - --bootstrap-server localhost:9097 \ - --replication-factor 1 \ - --topic detection-requests \ - --partitions 1 -docker-compose exec kafka /opt/bitnami/kafka/bin/kafka-topics.sh \ - --create \ - --bootstrap-server localhost:9097 \ - --replication-factor 1 \ - --topic detection-results-1 \ - --partitions 1 cd "$current_dir" || exit 1 \ No newline at end of file diff --git a/test/pose3d_test/pose3d_general_test.py b/test/pose3d_test/pose3d_general_test.py index 0242b22dca37516ebdcae98fc54046b8645bcda2..206cc7d3ddfd5af54ef6f42233d6f21ae727cae3 100644 --- a/test/pose3d_test/pose3d_general_test.py +++ b/test/pose3d_test/pose3d_general_test.py @@ -12,6 +12,7 @@ from test.common_test.kafka_common_test import KafkaRequestProcessorTest from test.pose3d_test import pose3d_utils +# noinspection DuplicatedCode class Pose3DServiceTest(unittest.TestCase): def test_general(self): diff --git a/test/pose3d_vhm_test/docker-compose.yml b/test/pose3d_vhm_test/docker-compose.yml index 4bd466868ee5e1dd98de523d284878da9ebfb46c..294f8950fcce4da021d5d985a3e1886badc5b47d 100644 --- a/test/pose3d_vhm_test/docker-compose.yml +++ b/test/pose3d_vhm_test/docker-compose.yml @@ -24,6 +24,8 @@ services: pose3d_vhm: build: ../../pose3d_vhm/docker-build-context image: devbeh/pose3d_vhm + depends_on: + - kafka volumes: - type: bind # Подключаем директорию с кодом. Данный volume не является read_only, поскольку программа может автоматически diff --git a/test/pose3d_vhm_test/pose3d-test-env-start.sh b/test/pose3d_vhm_test/pose3d-test-env-start.sh index 51ba88c5992668e830e296d15e14860d656b9345..5eb7b5c1753b567e5f6c8893fa0462c4ee531e19 100755 --- a/test/pose3d_vhm_test/pose3d-test-env-start.sh +++ b/test/pose3d_vhm_test/pose3d-test-env-start.sh @@ -2,16 +2,4 @@ current_dir=$(pwd) cd "$(dirname "${BASH_SOURCE[0]}")" || exit 1 docker-compose up -d --force-recreate --build -docker-compose exec kafka /opt/bitnami/kafka/bin/kafka-topics.sh \ - --create \ - --bootstrap-server localhost:9097 \ - --replication-factor 1 \ - --topic detection-requests \ - --partitions 1 -docker-compose exec kafka /opt/bitnami/kafka/bin/kafka-topics.sh \ - --create \ - --bootstrap-server localhost:9097 \ - --replication-factor 1 \ - --topic detection-results-1 \ - --partitions 1 cd "$current_dir" || exit 1 \ No newline at end of file diff --git a/test/tracking_test/__init__.py b/test/tracking_test/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/test/tracking_test/docker-compose.yml b/test/tracking_test/docker-compose.yml new file mode 100644 index 0000000000000000000000000000000000000000..ffad6941a541a6f30a8c81b2074e6d1c1458b5e8 --- /dev/null +++ b/test/tracking_test/docker-compose.yml @@ -0,0 +1,63 @@ +services: + zookeeper: + image: 'bitnami/zookeeper:3.5.5' + networks: + - tracking_net + environment: + - ALLOW_ANONYMOUS_LOGIN=yes + kafka: + image: 'bitnami/kafka:2.4.0' + ports: + - '9097:9097' + networks: + - tracking_net + environment: + - KAFKA_BROKER_ID=1 + - KAFKA_CFG_ZOOKEEPER_CONNECT=zookeeper:2181 + - ALLOW_PLAINTEXT_LISTENER=yes + - KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CLIENT:PLAINTEXT,EXTERNAL:PLAINTEXT + - KAFKA_CFG_LISTENERS=CLIENT://:29097,EXTERNAL://:9097 + - KAFKA_CFG_ADVERTISED_LISTENERS=CLIENT://kafka:29097,EXTERNAL://localhost:9097 + - KAFKA_INTER_BROKER_LISTENER_NAME=CLIENT + depends_on: + - zookeeper + detection_cbnet: + build: ../../detection_cbnet/docker-build-context + image: devbeh/detection_cbnet + depends_on: + - kafka + volumes: + - type: bind + source: ../../ + target: /opt/detection_cbnet/src + networks: + - tracking_net + runtime: nvidia + environment: + - NVIDIA_VISIBLE_DEVICES=${DEVBEH_DETECTION_NVIDIA_VISIBLE_DEVICES:-0} + command: 'python run.py detection_cbnet --log_dir /opt/detection_cbnet/logs + --weights_url https://www.dropbox.com/s/0vsk5zld23tgrka/htc_cbv2_swin_large22k_patch4_window7_mstrain_400-1400_giou_4conv1f_adamw_1x_coco.pth?dl=1 + --bootstrap_servers kafka:29097' + tracking: + build: ../../tracking/docker-build-context + image: devbeh/tracking + depends_on: + - kafka + volumes: + - type: bind + # Подключаем директорию с кодом. Данный volume не является read_only, поскольку программа может автоматически + # скачать и сохранить в нем файлы весов моделей + source: ../../ + target: /opt/tracking/src + networks: + - tracking_net + runtime: nvidia + environment: + - NVIDIA_VISIBLE_DEVICES=${DEVBEH_TRACKING_NVIDIA_VISIBLE_DEVICES:-0} + command: 'python run.py tracking --log_dir /opt/tracking/logs + --weights_url ${DEVBEH_TRACKING_WEIGHTS_URL:-https://drive.google.com/u/0/uc?export=download&confirm=2ySX&id=1HX2_JpMOjOIj1Z9rJjoet9XNy_cCAs5U} + --frames_commit_latency 100 + --bootstrap_servers kafka:29097' +networks: + tracking_net: + driver: bridge diff --git a/test/tracking_test/tracking-test-env-start.sh b/test/tracking_test/tracking-test-env-start.sh new file mode 100755 index 0000000000000000000000000000000000000000..5eb7b5c1753b567e5f6c8893fa0462c4ee531e19 --- /dev/null +++ b/test/tracking_test/tracking-test-env-start.sh @@ -0,0 +1,5 @@ +#!/bin/bash +current_dir=$(pwd) +cd "$(dirname "${BASH_SOURCE[0]}")" || exit 1 +docker-compose up -d --force-recreate --build +cd "$current_dir" || exit 1 \ No newline at end of file diff --git a/test/tracking_test/tracking-test-env-stop.sh b/test/tracking_test/tracking-test-env-stop.sh new file mode 100755 index 0000000000000000000000000000000000000000..8ab874984076ec390d48eca6aff1de54aeef804b --- /dev/null +++ b/test/tracking_test/tracking-test-env-stop.sh @@ -0,0 +1,6 @@ +#!/bin/bash +current_dir=$(pwd) +cd "$(dirname "${BASH_SOURCE[0]}")" || exit 1 +docker-compose down +docker-compose rm +cd "$current_dir" || exit 1 \ No newline at end of file diff --git a/test/tracking_test/tracking_general_test.py b/test/tracking_test/tracking_general_test.py new file mode 100644 index 0000000000000000000000000000000000000000..9963648cd05d208eae7f8a9c95c78d90de3b2d80 --- /dev/null +++ b/test/tracking_test/tracking_general_test.py @@ -0,0 +1,255 @@ +import os +import shutil +import unittest + +# noinspection PyProtectedMember +import cv2 +import matplotlib.pyplot as plt +import numpy as np +from kafka import KafkaProducer, KafkaConsumer, TopicPartition + +from common.communication.messages_pb2 import PerceptionRequest, DetectionReq, TrackingReq +from test.common_test.kafka_common_test import KafkaRequestProcessorTest + +colors = ['blue', 'red', 'green', 'yellow', 'black', 'brown', 'azure', 'pink', 'silver', 'purple', 'lime', 'azure', + 'coral'] + +tracking_classes = {1, 2, 3, 4, 6, 8, 16, 17, 25, 27, 29} + + +def get_video_fps(video): + (major_ver, minor_ver, subminor_ver) = (cv2.__version__).split('.') + + if int(major_ver) < 3: + fps = video.get(cv2.cv.CV_CAP_PROP_FPS) + else: + fps = video.get(cv2.CAP_PROP_FPS) + + return max(1, round(fps)) + + +# noinspection DuplicatedCode +class TrackingServiceTest(unittest.TestCase): + + def test_general(self): + """Для выполнения данного теста необходимо запустить тестовую Kafka на порту 9097 и тестовый pose3d сервис, + работающей с ней. Это можно сделать запустив скрипт pose3d-test-env-start.sh""" + + self.bootstrap_servers = 'localhost:9097' + self.detection_topic_name = 'detection-requests' + self.detection_result_topic_prefix = 'detection-results-' + self.tracking_topic_name = 'tracking-requests' + self.tracking_result_topic_prefix = 'tracking-results-' + + TrackingServiceTest.ensure_topic_empty(self.bootstrap_servers, self.detection_topic_name, 0) + TrackingServiceTest.ensure_topic_empty(self.bootstrap_servers, self.tracking_topic_name, 0) + + self.producer: KafkaProducer = KafkaRequestProcessorTest.get_test_producer(self.bootstrap_servers) + + self.detection_consumer: KafkaConsumer = KafkaRequestProcessorTest.get_test_consumer(self.bootstrap_servers, + client_id='test-det-c') + self.detection_tp = TopicPartition(self.detection_result_topic_prefix + '1', 0) + self.detection_consumer.assign([self.detection_tp]) + self.detection_consumer.seek(self.detection_tp, 0) + + self.tracking_consumer: KafkaConsumer = KafkaRequestProcessorTest.get_test_consumer(self.bootstrap_servers, + client_id='test-track-c') + self.tracking_tp = TopicPartition(self.tracking_result_topic_prefix + '1', 0) + self.tracking_consumer.assign([self.tracking_tp]) + self.tracking_consumer.seek(self.tracking_tp, 0) + + video_dir_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'videos/') + video_names = os.listdir(video_dir_path) + + result_fps = 10 + batch_size = 10 + result_dir_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'results/') + if os.path.exists(result_dir_path): + shutil.rmtree(result_dir_path) + os.mkdir(result_dir_path) + + for video_id, video_name in enumerate(video_names): + video_path = os.path.join(video_dir_path, video_name) + video_src = cv2.VideoCapture(video_path) + fps = get_video_fps(video_src) + skip_frames = round(fps / result_fps - 1) + skip_frames = max(0, skip_frames) + video_width = int(video_src.get(cv2.CAP_PROP_FRAME_WIDTH)) + video_height = int(video_src.get(cv2.CAP_PROP_FRAME_HEIGHT)) + + video_dst_path = os.path.join(result_dir_path, os.path.splitext(video_name)[0] + '-res.avi') + video_dst = cv2.VideoWriter(video_dst_path, cv2.VideoWriter_fourcc(*'XVID'), result_fps, + (video_width, video_height)) + + print('Starting processing ' + video_name) + frame_batch = [] + first_frame_id = 1 + while True: + success, frame = video_src.read() + if not success: + break + frame = cv2.imencode('.jpg', frame)[1].tobytes() + frame_batch.append(frame) + for _ in range(skip_frames): + video_src.read() + + if len(frame_batch) == batch_size: + first_frame_id = self.handle_frame_batch(frame_batch, first_frame_id, video_id, video_dst, False) + frame_batch = [] + + self.handle_frame_batch(frame_batch, first_frame_id, video_id, video_dst, True) + + video_src.release() + video_dst.release() + + def handle_frame_batch(self, frame_batch, first_frame_id, video_id, video_dst, finished): + """Отправляет кадры детектору объектов, принимает результат и передает трекеру. Затем ожидает результат от + трекера и записывает кадры с отмеченными объектами в результирующее видео. + Возвращает first_frame_id + размер батча """ + + print("Sending batch to detector " + str(first_frame_id)) + for i, frame in enumerate(frame_batch): + self.send_detection(manager_id=1, video_id=video_id, frame_id=first_frame_id + i, image=frame) + if finished: + self.send_detection(manager_id=1, video_id=video_id, finished=True) + self.producer.flush() + + expected_len = len(frame_batch) + (1 if finished else 0) + detection_tp_results = KafkaRequestProcessorTest.wait_for_results(self.detection_consumer, expected_len, 60_000) + self.assertEqual(1, len(detection_tp_results)) + + detection_results = detection_tp_results[self.detection_tp] + self.assertEqual(expected_len, len(detection_results)) + + detection_batch = [] + for r in detection_results: + self.assertEqual(r.err.msg, "") + if r.frame_id != -1: + # self.assertGreater(len(r.detection.entries), 0) + detection_batch.append(r.detection.entries) + + print("Sending batch to tracker " + str(first_frame_id)) + + for i, (frame, detections) in enumerate(zip(frame_batch, detection_batch)): + self.send_tracking(manager_id=1, video_id=video_id, frame_id=first_frame_id + i, image=frame, + detections=detections) + if finished: + self.send_tracking(manager_id=1, video_id=video_id, finished=True) + self.producer.flush() + + tracking_tp_results = KafkaRequestProcessorTest.wait_for_results(self.tracking_consumer, expected_len, 60_000) + self.assertEqual(1, len(tracking_tp_results)) + + tracking_results = tracking_tp_results[self.tracking_tp] + self.assertEqual(expected_len, len(tracking_results)) + + for frame, r in zip(frame_batch, tracking_results): + self.assertEqual(r.err.msg, "") + if r.frame_id != -1: + # self.assertGreater(len(r.tracking.entries), 0) + boxes = [] + for tracking_entry in r.tracking.entries: + boxes.append({'id': tracking_entry.id, + 'class': tracking_entry.detection.class_id, + 'score': tracking_entry.detection.score, + 'box': [tracking_entry.detection.box_top_left_x, + tracking_entry.detection.box_top_left_y, + tracking_entry.detection.box_bottom_right_x, + tracking_entry.detection.box_bottom_right_y]}) + + frame = np.fromstring(frame, np.uint8) + frame = cv2.imdecode(frame, cv2.IMREAD_ANYCOLOR) + frame = draw_boxes(frame, boxes) + video_dst.write(frame) + + return first_frame_id + len(frame_batch) + + # noinspection DuplicatedCode + @staticmethod + def ensure_topic_empty(bootstrap_servers, topic_name, partitions_num): + consumer: KafkaConsumer = KafkaRequestProcessorTest.get_test_consumer(bootstrap_servers, + client_id='check-offset-c') + tp = TopicPartition(topic_name, partitions_num) + offsets = consumer.end_offsets([tp]) + if not len(offsets): + raise RuntimeError('No topic offset') + offset = offsets[tp] + if offset > 0: + raise RuntimeError('Topic partition {}:{} is not empty'.format(topic_name, partitions_num)) + consumer.close() + + def send_detection(self, manager_id, video_id, frame_id=None, finished=None, image=None): + r = PerceptionRequest(manager_id=manager_id, video_id=video_id) + if frame_id is not None: + r.frame_id = frame_id + if finished is not None: + r.finished = finished + if image is not None: + r.detection.CopyFrom(DetectionReq(image=image)) + self.producer.send(topic=self.detection_topic_name, partition=0, value=r) + + def send_tracking(self, manager_id, video_id, frame_id=None, finished=None, image=None, + detections=None): + r = PerceptionRequest(manager_id=manager_id, video_id=video_id) + if frame_id is not None: + r.frame_id = frame_id + if finished is not None: + r.finished = finished + if image is not None: + tracking_req = TrackingReq(image=image) + if detections is not None: + for det in detections: + # filter human class id necessary + if det.class_id in tracking_classes: + tracking_req.entries.append(det) + r.tracking.CopyFrom(tracking_req) + self.producer.send(topic=self.tracking_topic_name, partition=0, value=r) + + +def draw_boxes(frame, boxes): + """ + Отрисовывает рамки на переданном кадре. + :param frame: numpy array - [height, width, 3] - bgr формат + :param boxes: [{box:[tl_x, tl_y, br_x, br_y]],class:...,score:...,id:...}] + :return: numpy array - [height, width, 3] - bgr формат + """ + + frame = frame[:, :, (2, 1, 0)] + + sizes = np.shape(frame) + height = float(sizes[0]) + width = float(sizes[1]) + + fig = plt.figure() + fig.set_size_inches(width / 100, height / 100) + + ax = plt.Axes(fig, [0., 0., 1., 1.]) + ax.set_axis_off() + ax.imshow(frame) + + for box_info in boxes: + id = box_info["id"] + box = box_info["box"] + class_id = box_info["class"] + score = round(box_info["score"], 2) + + color_id = id % len(colors) + ax.add_patch( + plt.Rectangle( + (box[0], box[1]), # x,y + box[2] - box[0], # width + box[3] - box[1], # height + fill=False, linewidth=1.0, color=colors[color_id] + )) + + font_size = max(6, min(15, int((box[2] - box[0])/8))) + ax.annotate("{}-{}-{}".format(id, class_id, score), + (box[0] + (box[2] - box[0]) / 2.0, box[1] + 2), + color=colors[color_id], fontsize=font_size, ha='center', va='center') + + fig.add_axes(ax) + fig.canvas.draw() + new_frame = np.array(fig.canvas.renderer._renderer)[:, :, 0:3] + + plt.close() + return new_frame[:, :, (2, 1, 0)] \ No newline at end of file diff --git a/tracking/docker-build-context/Dockerfile b/tracking/docker-build-context/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..4a8a78048933bcb59c3e56ff2a61183a75efb16b --- /dev/null +++ b/tracking/docker-build-context/Dockerfile @@ -0,0 +1,25 @@ +FROM pytorch/pytorch:1.8.1-cuda10.2-cudnn7-devel + +WORKDIR /opt/tracking/install + +RUN apt update +RUN apt install -y less nano git +# For pycurl and wheel +RUN apt install -y libcurl4-openssl-dev libssl-dev libcairo2-dev libgirepository1.0-dev libcairo2-dev python3-cairo-dev +# cv2 +RUN apt install -y ffmpeg libsm6 libxext6 libxrender-dev + +# Install byte_track +COPY byte_track ./byte_track +WORKDIR /opt/tracking/install/byte_track +RUN pip install -r requirements.txt +RUN python setup.py develop +RUN pip install cython; pip install 'git+https://github.com/cocodataset/cocoapi.git#subdirectory=PythonAPI' + +WORKDIR /opt/tracking/install +COPY additional-requirements.txt ./ +RUN pip install -r additional-requirements.txt + +WORKDIR /opt/tracking/logs +# Подключаем файлы программы при помощи bind. В самом контейнере хранятся только зависимости и логи при работе системы +WORKDIR /opt/tracking/src \ No newline at end of file diff --git a/tracking/docker-build-context/additional-requirements.txt b/tracking/docker-build-context/additional-requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..e6f419a89404d63c958367cf72e0659fd95e5de3 --- /dev/null +++ b/tracking/docker-build-context/additional-requirements.txt @@ -0,0 +1,8 @@ +# missing from the byte_track +cython-bbox +kafka-python==2.0.2 +matplotlib==2.2.2 +opencv-python==4.1.2.30 +plotly==4.3.0 +protobuf==3.17.3 +pycurl==7.43.0 \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/.gitignore b/tracking/docker-build-context/byte_track/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..34812b5f8a8a85f1e481f9f1025733290fb832cf --- /dev/null +++ b/tracking/docker-build-context/byte_track/.gitignore @@ -0,0 +1,141 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# output +docs/api +.code-workspace.code-workspace +*.pkl +*.npy +*.pth +*.onnx +*.engine +events.out.tfevents* +pretrained +YOLOX_outputs \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/Dockerfile b/tracking/docker-build-context/byte_track/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..7fdf60f542357e0e18025da460e051893426c8e9 --- /dev/null +++ b/tracking/docker-build-context/byte_track/Dockerfile @@ -0,0 +1,53 @@ +FROM nvcr.io/nvidia/tensorrt:21.09-py3 + +ENV DEBIAN_FRONTEND=noninteractive +ARG USERNAME=user +ARG WORKDIR=/workspace/ByteTrack + +RUN apt-get update && apt-get install -y \ + automake autoconf libpng-dev nano python3-pip \ + curl zip unzip libtool swig zlib1g-dev pkg-config \ + python3-mock libpython3-dev libpython3-all-dev \ + g++ gcc cmake make pciutils cpio gosu wget \ + libgtk-3-dev libxtst-dev sudo apt-transport-https \ + build-essential gnupg git xz-utils vim \ + libva-drm2 libva-x11-2 vainfo libva-wayland2 libva-glx2 \ + libva-dev libdrm-dev xorg xorg-dev protobuf-compiler \ + openbox libx11-dev libgl1-mesa-glx libgl1-mesa-dev \ + libtbb2 libtbb-dev libopenblas-dev libopenmpi-dev \ + && sed -i 's/# set linenumbers/set linenumbers/g' /etc/nanorc \ + && apt clean \ + && rm -rf /var/lib/apt/lists/* + +RUN git clone https://github.com/ifzhang/ByteTrack \ + && cd byte_track \ + && git checkout 3434c5e8bc6a5ae8ad530528ba8d9a431967f237 \ + && mkdir -p YOLOX_outputs/yolox_x_mix_det/track_vis \ + && sed -i 's/torch>=1.7/torch==1.9.1+cu111/g' requirements.txt \ + && sed -i 's/torchvision==0.10.0/torchvision==0.10.1+cu111/g' requirements.txt \ + && sed -i "s/'cuda'/0/g" tools/demo_track.py \ + && pip3 install pip --upgrade \ + && pip3 install -r requirements.txt -f https://download.pytorch.org/whl/torch_stable.html \ + && python3 setup.py develop \ + && pip3 install cython \ + && pip3 install 'git+https://github.com/cocodataset/cocoapi.git#subdirectory=PythonAPI' \ + && pip3 install cython_bbox gdown \ + && ldconfig \ + && pip cache purge + +RUN git clone https://github.com/NVIDIA-AI-IOT/torch2trt \ + && cd torch2trt \ + && git checkout 0400b38123d01cc845364870bdf0a0044ea2b3b2 \ + # https://github.com/NVIDIA-AI-IOT/torch2trt/issues/619 + && wget https://github.com/NVIDIA-AI-IOT/torch2trt/commit/8b9fb46ddbe99c2ddf3f1ed148c97435cbeb8fd3.patch \ + && git apply 8b9fb46ddbe99c2ddf3f1ed148c97435cbeb8fd3.patch \ + && python3 setup.py install + +RUN echo "root:root" | chpasswd \ + && adduser --disabled-password --gecos "" "${USERNAME}" \ + && echo "${USERNAME}:${USERNAME}" | chpasswd \ + && echo "%${USERNAME} ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers.d/${USERNAME} \ + && chmod 0440 /etc/sudoers.d/${USERNAME} +USER ${USERNAME} +RUN sudo chown -R ${USERNAME}:${USERNAME} ${WORKDIR} +WORKDIR ${WORKDIR} \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/LICENSE b/tracking/docker-build-context/byte_track/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..dfba850f8da707522d7b1d71636ba3ed6c7d0a28 --- /dev/null +++ b/tracking/docker-build-context/byte_track/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Yifu Zhang + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/tracking/docker-build-context/byte_track/README.md b/tracking/docker-build-context/byte_track/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e6cdf7008932b7761e234d7d7ef36c534f96c94a --- /dev/null +++ b/tracking/docker-build-context/byte_track/README.md @@ -0,0 +1,286 @@ +# ByteTrack + +[![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/bytetrack-multi-object-tracking-by-1/multi-object-tracking-on-mot17)](https://paperswithcode.com/sota/multi-object-tracking-on-mot17?p=bytetrack-multi-object-tracking-by-1) + +[![PWC](https://img.shields.io/endpoint.svg?url=https://paperswithcode.com/badge/bytetrack-multi-object-tracking-by-1/multi-object-tracking-on-mot20-1)](https://paperswithcode.com/sota/multi-object-tracking-on-mot20-1?p=bytetrack-multi-object-tracking-by-1) + +#### ByteTrack is a simple, fast and strong multi-object tracker. + +

+ +> [**ByteTrack: Multi-Object Tracking by Associating Every Detection Box**](https://arxiv.org/abs/2110.06864) +> +> Yifu Zhang, Peize Sun, Yi Jiang, Dongdong Yu, Zehuan Yuan, Ping Luo, Wenyu Liu, Xinggang Wang +> +> *[arXiv 2110.06864](https://arxiv.org/abs/2110.06864)* + +## Demo Links +| Google Colab demo | Huggingface Demo | Original Paper: ByteTrack | +|:-:|:-:|:-:| +|[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/drive/1bDilg4cmXFa8HCKHbsZ_p16p0vrhLyu0?usp=sharing)|[![Hugging Face Spaces](https://img.shields.io/badge/%F0%9F%A4%97%20Hugging%20Face-Spaces-blue)](https://huggingface.co/spaces/akhaliq/bytetrack)|[arXiv 2110.06864](https://arxiv.org/abs/2110.06864)| +* Integrated to [Huggingface Spaces](https://huggingface.co/spaces) with [Gradio](https://github.com/gradio-app/gradio). + + +## Abstract +Multi-object tracking (MOT) aims at estimating bounding boxes and identities of objects in videos. Most methods obtain identities by associating detection boxes whose scores are higher than a threshold. The objects with low detection scores, e.g. occluded objects, are simply thrown away, which brings non-negligible true object missing and fragmented trajectories. To solve this problem, we present a simple, effective and generic association method, tracking by associating every detection box instead of only the high score ones. For the low score detection boxes, we utilize their similarities with tracklets to recover true objects and filter out the background detections. When applied to 9 different state-of-the-art trackers, our method achieves consistent improvement on IDF1 score ranging from 1 to 10 points.To put forwards the state-of-the-art performance of MOT, we design a simple and strong tracker, named ByteTrack. For the first time, we achieve 80.3 MOTA, 77.3 IDF1 and 63.1 HOTA on the test set of MOT17 with 30 FPS running speed on a single V100 GPU. +

+ +## Tracking performance +### Results on MOT challenge test set +| Dataset | MOTA | IDF1 | HOTA | MT | ML | FP | FN | IDs | FPS | +|------------|-------|------|------|-------|-------|------|------|------|------| +|MOT17 | 80.3 | 77.3 | 63.1 | 53.2% | 14.5% | 25491 | 83721 | 2196 | 29.6 | +|MOT20 | 77.8 | 75.2 | 61.3 | 69.2% | 9.5% | 26249 | 87594 | 1223 | 13.7 | + +### Visualization results on MOT challenge test set + + + +## Installation +### 1. Installing on the host machine +Step1. Install ByteTrack. +```shell +git clone https://github.com/ifzhang/ByteTrack.git +cd byte_track +pip3 install -r requirements.txt +python3 setup.py develop +``` + +Step2. Install [pycocotools](https://github.com/cocodataset/cocoapi). + +```shell +pip3 install cython; pip3 install 'git+https://github.com/cocodataset/cocoapi.git#subdirectory=PythonAPI' +``` + +Step3. Others +```shell +pip3 install cython_bbox +``` +### 2. Docker build +```shell +docker build -t bytetrack:latest . + +# Startup sample +mkdir -p pretrained && \ +mkdir -p YOLOX_outputs && \ +xhost +local: && \ +docker run --gpus all -it --rm \ +-v $PWD/pretrained:/workspace/byte_track/pretrained \ +-v $PWD/datasets:/workspace/byte_track/datasets \ +-v $PWD/YOLOX_outputs:/workspace/byte_track/YOLOX_outputs \ +-v /tmp/.X11-unix/:/tmp/.X11-unix:rw \ +--device /dev/video0:/dev/video0:mwr \ +--net=host \ +-e XDG_RUNTIME_DIR=$XDG_RUNTIME_DIR \ +-e DISPLAY=$DISPLAY \ +--privileged \ +bytetrack:latest +``` + +## Data preparation + +Download [MOT17](https://motchallenge.net/), [MOT20](https://motchallenge.net/), [CrowdHuman](https://www.crowdhuman.org/), [Cityperson](https://github.com/Zhongdao/Towards-Realtime-MOT/blob/master/DATASET_ZOO.md), [ETHZ](https://github.com/Zhongdao/Towards-Realtime-MOT/blob/master/DATASET_ZOO.md) and put them under /datasets in the following structure: +``` +datasets + |——————mot + | └——————train + | └——————test + └——————crowdhuman + | └——————Crowdhuman_train + | └——————Crowdhuman_val + | └——————annotation_train.odgt + | └——————annotation_val.odgt + └——————MOT20 + | └——————train + | └——————test + └——————Cityscapes + | └——————images + | └——————labels_with_ids + └——————ETHZ + └——————eth01 + └——————... + └——————eth07 +``` + +Then, you need to turn the datasets to COCO format and mix different training data: + +```shell +cd +python3 tools/convert_mot17_to_coco.py +python3 tools/convert_mot20_to_coco.py +python3 tools/convert_crowdhuman_to_coco.py +python3 tools/convert_cityperson_to_coco.py +python3 tools/convert_ethz_to_coco.py +``` + +Before mixing different datasets, you need to following the operations in [mix_xxx.py](https://github.com/ifzhang/ByteTrack/blob/c116dfc746f9ebe07d419caa8acba9b3acfa79a6/tools/mix_data_ablation.py#L6) to create data folder and link. Finally you can mix the training data: + +```shell +cd +python3 tools/mix_data_ablation.py +python3 tools/mix_data_test_mot17.py +python3 tools/mix_data_test_mot20.py +``` + + +## Model zoo + +### Ablation model + +Train on CrowdHuman and MOT17 half train, evaluate on MOT17 half val + +| Model | MOTA | IDF1 | IDs | FPS | +|------------|-------|------|------|------| +|ByteTrack_ablation [[google]](https://drive.google.com/file/d/1iqhM-6V_r1FpOlOzrdP_Ejshgk0DxOob/view?usp=sharing), [[baidu(code:eeo8)]](https://pan.baidu.com/s/1W5eRBnxc4x9V8gm7dgdEYg) | 76.6 | 79.3 | 159 | 29.6 | + +### MOT17 test model + +Train on CrowdHuman, MOT17, Cityperson and ETHZ, evaluate on MOT17 train + +| Model | MOTA | IDF1 | IDs | FPS | +|------------|-------|------|------|------| +|bytetrack_x_mot17 [[google]](https://drive.google.com/file/d/1P4mY0Yyd3PPTybgZkjMYhFri88nTmJX5/view?usp=sharing), [[baidu(code:ic0i)]](https://pan.baidu.com/s/1OJKrcQa_JP9zofC6ZtGBpw) | 90.0 | 83.3 | 422 | 29.6 | +|bytetrack_l_mot17 [[google]](https://drive.google.com/file/d/1XwfUuCBF4IgWBWK2H7oOhQgEj9Mrb3rz/view?usp=sharing), [[baidu(code:1cml)]](https://pan.baidu.com/s/1242adimKM6TYdeLU2qnuRA) | 88.7 | 80.7 | 460 | 43.7 | +|bytetrack_m_mot17 [[google]](https://drive.google.com/file/d/11Zb0NN_Uu7JwUd9e6Nk8o2_EUfxWqsun/view?usp=sharing), [[baidu(code:u3m4)]](https://pan.baidu.com/s/1fKemO1uZfvNSLzJfURO4TQ) | 87.0 | 80.1 | 477 | 54.1 | +|bytetrack_s_mot17 [[google]](https://drive.google.com/file/d/1uSmhXzyV1Zvb4TJJCzpsZOIcw7CCJLxj/view?usp=sharing), [[baidu(code:qflm)]](https://pan.baidu.com/s/1PiP1kQfgxAIrnGUbFP6Wfg) | 79.2 | 74.3 | 533 | 64.5 | + + +### MOT20 test model + +Train on CrowdHuman and MOT20, evaluate on MOT20 train + +| Model | MOTA | IDF1 | IDs | FPS | +|------------|-------|------|------|------| +|bytetrack_x_mot20 [[google]](https://drive.google.com/file/d/1HX2_JpMOjOIj1Z9rJjoet9XNy_cCAs5U/view?usp=sharing), [[baidu(code:3apd)]](https://pan.baidu.com/s/1bowJJj0bAnbhEQ3_6_Am0A) | 93.4 | 89.3 | 1057 | 17.5 | + + +## Training + +The COCO pretrained YOLOX model can be downloaded from their [model zoo](https://github.com/Megvii-BaseDetection/YOLOX/tree/0.1.0). After downloading the pretrained models, you can put them under /pretrained. + +* **Train ablation model (MOT17 half train and CrowdHuman)** + +```shell +cd +python3 tools/train.py -f exps/example/mot/yolox_x_ablation.py -d 8 -b 48 --fp16 -o -c pretrained/yolox_x.pth +``` + +* **Train MOT17 test model (MOT17 train, CrowdHuman, Cityperson and ETHZ)** + +```shell +cd +python3 tools/train.py -f exps/example/mot/yolox_x_mix_det.py -d 8 -b 48 --fp16 -o -c pretrained/yolox_x.pth +``` + +* **Train MOT20 test model (MOT20 train, CrowdHuman)** + +For MOT20, you need to clip the bounding boxes inside the image. + +Add clip operation in [line 134-135 in data_augment.py](https://github.com/ifzhang/ByteTrack/blob/72cd6dd24083c337a9177e484b12bb2b5b3069a6/yolox/data/data_augment.py#L134), [line 122-125 in mosaicdetection.py](https://github.com/ifzhang/ByteTrack/blob/72cd6dd24083c337a9177e484b12bb2b5b3069a6/yolox/data/datasets/mosaicdetection.py#L122), [line 217-225 in mosaicdetection.py](https://github.com/ifzhang/ByteTrack/blob/72cd6dd24083c337a9177e484b12bb2b5b3069a6/yolox/data/datasets/mosaicdetection.py#L217), [line 115-118 in boxes.py](https://github.com/ifzhang/ByteTrack/blob/72cd6dd24083c337a9177e484b12bb2b5b3069a6/yolox/utils/boxes.py#L115). + +```shell +cd +python3 tools/train.py -f exps/example/mot/yolox_x_mix_mot20_ch.py -d 8 -b 48 --fp16 -o -c pretrained/yolox_x.pth +``` + +* **Train custom dataset** + +First, you need to prepare your dataset in COCO format. You can refer to [MOT-to-COCO](https://github.com/ifzhang/ByteTrack/blob/main/tools/convert_mot17_to_coco.py) or [CrowdHuman-to-COCO](https://github.com/ifzhang/ByteTrack/blob/main/tools/convert_crowdhuman_to_coco.py). Then, you need to create a Exp file for your dataset. You can refer to the [CrowdHuman](https://github.com/ifzhang/ByteTrack/blob/main/exps/example/mot/yolox_x_ch.py) training Exp file. Don't forget to modify get_data_loader() and get_eval_loader in your Exp file. Finally, you can train bytetrack on your dataset by running: + +```shell +cd +python3 tools/train.py -f exps/example/mot/your_exp_file.py -d 8 -b 48 --fp16 -o -c pretrained/yolox_x.pth +``` + + +## Tracking + +* **Evaluation on MOT17 half val** + +Run ByteTrack: + +```shell +cd +python3 tools/track.py -f exps/example/mot/yolox_x_ablation.py -c pretrained/bytetrack_ablation.pth.tar -b 1 -d 1 --fp16 --fuse +``` +You can get 76.6 MOTA using our pretrained model. + +Run other trackers: +```shell +python3 tools/track_sort.py -f exps/example/mot/yolox_x_ablation.py -c pretrained/bytetrack_ablation.pth.tar -b 1 -d 1 --fp16 --fuse +python3 tools/track_deepsort.py -f exps/example/mot/yolox_x_ablation.py -c pretrained/bytetrack_ablation.pth.tar -b 1 -d 1 --fp16 --fuse +python3 tools/track_motdt.py -f exps/example/mot/yolox_x_ablation.py -c pretrained/bytetrack_ablation.pth.tar -b 1 -d 1 --fp16 --fuse +``` + +* **Test on MOT17** + +Run ByteTrack: + +```shell +cd +python3 tools/track.py -f exps/example/mot/yolox_x_mix_det.py -c pretrained/bytetrack_x_mot17.pth.tar -b 1 -d 1 --fp16 --fuse +python3 tools/interpolation.py +``` +Submit the txt files to [MOTChallenge](https://motchallenge.net/) website and you can get 79+ MOTA (For 80+ MOTA, you need to carefully tune the test image size and high score detection threshold of each sequence). + +* **Test on MOT20** + +We use the input size 1600 x 896 for MOT20-04, MOT20-07 and 1920 x 736 for MOT20-06, MOT20-08. You can edit it in [yolox_x_mix_mot20_ch.py](https://github.com/ifzhang/ByteTrack/blob/main/exps/example/mot/yolox_x_mix_mot20_ch.py) + +Run ByteTrack: + +```shell +cd +python3 tools/track.py -f exps/example/mot/yolox_x_mix_mot20_ch.py -c pretrained/bytetrack_x_mot20.pth.tar -b 1 -d 1 --fp16 --fuse --match_thresh 0.7 --mot20 +python3 tools/interpolation.py +``` +Submit the txt files to [MOTChallenge](https://motchallenge.net/) website and you can get 77+ MOTA (For higher MOTA, you need to carefully tune the test image size and high score detection threshold of each sequence). + +## Applying BYTE to other trackers + +See [tutorials](https://github.com/ifzhang/ByteTrack/tree/main/tutorials). + +## Combining BYTE with other detectors + +Suppose you have already got the detection results 'dets' (x1, y1, x2, y2, score) from other detectors, you can simply pass the detection results to BYTETracker (you need to first modify some post processing code according to the format of your detection results in [byte_tracker.py](https://github.com/ifzhang/ByteTrack/blob/main/yolox/tracker/byte_tracker.py)): + +``` +from yolox.tracker.byte_tracker import BYTETracker +tracker = BYTETracker(args) +for image in images: + dets = detector(image) + online_targets = tracker.update(dets, info_imgs, img_size) +``` + +You can get the tracking results in each frame from 'online_targets'. You can refer to [mot_evaluators.py](https://github.com/ifzhang/ByteTrack/blob/main/yolox/evaluators/mot_evaluator.py) to pass the detection results to BYTETracker. + +## Demo + + + +```shell +cd +python3 tools/demo_track.py video -f exps/example/mot/yolox_x_mix_det.py -c pretrained/bytetrack_x_mot17.pth.tar --fp16 --fuse --save_result +``` + +## Deploy + +1. [ONNX export and ONNXRuntime](./deploy/ONNXRuntime) +2. [TensorRT in Python](./deploy/TensorRT/python) +3. [TensorRT in C++](./deploy/TensorRT/cpp) +4. [ncnn in C++](./deploy/ncnn/cpp) + +## Citation + +``` +@article{zhang2021bytetrack, + title={ByteTrack: Multi-Object Tracking by Associating Every Detection Box}, + author={Zhang, Yifu and Sun, Peize and Jiang, Yi and Yu, Dongdong and Yuan, Zehuan and Luo, Ping and Liu, Wenyu and Wang, Xinggang}, + journal={arXiv preprint arXiv:2110.06864}, + year={2021} +} +``` + +## Acknowledgement + +A large part of the code is borrowed from [YOLOX](https://github.com/Megvii-BaseDetection/YOLOX), [FairMOT](https://github.com/ifzhang/FairMOT), [TransTrack](https://github.com/PeizeSun/TransTrack) and [JDE-Cpp](https://github.com/samylee/Towards-Realtime-MOT-Cpp). Many thanks for their wonderful works. diff --git a/tracking/docker-build-context/byte_track/assets/MOT17-01-SDP.gif b/tracking/docker-build-context/byte_track/assets/MOT17-01-SDP.gif new file mode 100644 index 0000000000000000000000000000000000000000..d1f238e7c89d9adf3d5b5d11d19b8f7da1017adb Binary files /dev/null and b/tracking/docker-build-context/byte_track/assets/MOT17-01-SDP.gif differ diff --git a/tracking/docker-build-context/byte_track/assets/MOT17-07-SDP.gif b/tracking/docker-build-context/byte_track/assets/MOT17-07-SDP.gif new file mode 100644 index 0000000000000000000000000000000000000000..4a4df10cd453d1739c3f6ffe53718a5ea406345d Binary files /dev/null and b/tracking/docker-build-context/byte_track/assets/MOT17-07-SDP.gif differ diff --git a/tracking/docker-build-context/byte_track/assets/MOT20-07.gif b/tracking/docker-build-context/byte_track/assets/MOT20-07.gif new file mode 100644 index 0000000000000000000000000000000000000000..197ab433660bbf87e3061fa5e05d71a1a05b17e4 Binary files /dev/null and b/tracking/docker-build-context/byte_track/assets/MOT20-07.gif differ diff --git a/tracking/docker-build-context/byte_track/assets/MOT20-08.gif b/tracking/docker-build-context/byte_track/assets/MOT20-08.gif new file mode 100644 index 0000000000000000000000000000000000000000..24366289797b4e3e69fe1d7edd22bfd66d032afc Binary files /dev/null and b/tracking/docker-build-context/byte_track/assets/MOT20-08.gif differ diff --git a/tracking/docker-build-context/byte_track/assets/palace_demo.gif b/tracking/docker-build-context/byte_track/assets/palace_demo.gif new file mode 100644 index 0000000000000000000000000000000000000000..5bc9720fe0a7f7145c1c2cf5160ee932b0434238 Binary files /dev/null and b/tracking/docker-build-context/byte_track/assets/palace_demo.gif differ diff --git a/tracking/docker-build-context/byte_track/assets/sota.png b/tracking/docker-build-context/byte_track/assets/sota.png new file mode 100644 index 0000000000000000000000000000000000000000..89bcb73ec3d9747ce9e7e8dcdf839953d2f8c168 Binary files /dev/null and b/tracking/docker-build-context/byte_track/assets/sota.png differ diff --git a/tracking/docker-build-context/byte_track/assets/teasing.png b/tracking/docker-build-context/byte_track/assets/teasing.png new file mode 100644 index 0000000000000000000000000000000000000000..394e32cd9f6706c7d72209b5ef57247e5b998323 Binary files /dev/null and b/tracking/docker-build-context/byte_track/assets/teasing.png differ diff --git a/tracking/docker-build-context/byte_track/datasets/data_path/citypersons.train b/tracking/docker-build-context/byte_track/datasets/data_path/citypersons.train new file mode 100644 index 0000000000000000000000000000000000000000..0dc55728e07028ae42f53cedeb91419c6babd54b --- /dev/null +++ b/tracking/docker-build-context/byte_track/datasets/data_path/citypersons.train @@ -0,0 +1,2500 @@ +Cityscapes/images/train/jena/jena_000078_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000032_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000055_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000067_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000111_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000105_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000021_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000045_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000058_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000096_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000042_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000109_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000065_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000068_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000052_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000051_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000091_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000037_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000050_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000092_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000101_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000081_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000074_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000040_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000030_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000059_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000100_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000034_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000089_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000104_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000080_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000082_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000044_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000026_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000115_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000023_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000095_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000102_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000094_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000010_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000009_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000070_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000039_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000079_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000071_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000018_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000008_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000076_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000090_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000085_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000116_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000049_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000022_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000118_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000004_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000087_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000047_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000007_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000033_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000097_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000028_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000086_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000073_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000056_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000069_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000088_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000103_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000057_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000038_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000061_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000048_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000060_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000027_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000031_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000072_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000043_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000054_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000083_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000077_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000108_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000106_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000084_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000075_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000024_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000110_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000029_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000000_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000117_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000036_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000046_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000062_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000035_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000019_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000112_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000025_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000013_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000113_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000053_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000098_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000066_000019_leftImg8bit.png +Cityscapes/images/train/jena/jena_000041_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000110_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000107_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000087_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000188_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000175_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000147_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000043_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000042_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000191_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000189_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000007_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000112_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000032_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000085_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000077_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000144_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000161_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000149_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000017_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000010_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000067_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000178_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000070_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000026_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000056_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000171_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000157_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000100_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000158_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000125_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000134_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000168_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000156_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000162_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000024_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000037_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000094_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000129_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000069_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000011_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000095_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000002_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000160_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000111_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000039_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000184_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000165_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000046_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000065_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000022_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000135_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000030_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000133_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000172_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000105_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000014_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000142_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000079_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000183_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000173_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000028_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000083_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000071_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000106_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000169_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000126_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000019_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000148_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000078_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000023_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000108_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000153_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000033_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000088_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000027_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000016_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000181_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000044_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000182_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000140_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000186_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000090_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000099_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000124_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000176_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000009_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000008_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000004_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000048_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000020_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000093_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000152_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000155_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000138_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000136_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000103_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000145_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000163_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000101_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000005_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000029_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000063_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000139_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000150_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000123_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000064_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000190_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000092_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000091_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000130_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000164_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000170_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000131_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000031_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000159_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000054_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000167_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000012_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000104_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000021_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000053_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000058_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000001_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000179_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000066_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000034_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000050_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000013_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000068_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000137_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000086_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000187_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000080_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000097_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000192_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000132_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000015_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000025_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000096_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000059_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000102_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000174_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000166_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000180_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000185_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000098_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000141_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000151_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000006_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000047_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000146_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000072_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000073_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000089_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000055_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000193_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000018_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000109_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000076_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000074_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000177_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000127_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000121_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000084_000019_leftImg8bit.png +Cityscapes/images/train/stuttgart/stuttgart_000061_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000019_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000063_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000066_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000037_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000091_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000004_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000050_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000092_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000079_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000020_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000071_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000027_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000068_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000002_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000084_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000003_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000013_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000000_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000069_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000065_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000077_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000017_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000012_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000074_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000055_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000072_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000093_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000018_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000028_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000075_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000044_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000043_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000062_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000059_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000052_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000082_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000053_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000006_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000087_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000024_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000007_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000070_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000080_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000057_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000085_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000011_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000064_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000005_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000067_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000060_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000040_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000010_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000073_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000023_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000031_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000089_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000039_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000016_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000058_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000047_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000030_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000078_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000041_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000086_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000051_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000014_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000056_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000015_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000008_000019_leftImg8bit.png +Cityscapes/images/train/ulm/ulm_000049_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000114_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000099_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000014_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000204_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000205_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000055_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000052_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000162_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000126_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000118_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000159_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000024_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000077_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000092_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000131_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000021_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000155_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000062_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000179_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000151_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000012_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000160_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000038_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000139_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000152_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000167_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000031_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000081_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000111_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000149_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000108_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000202_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000036_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000170_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000174_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000005_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000173_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000008_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000163_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000022_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000056_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000019_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000140_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000175_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000088_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000078_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000135_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000107_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000124_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000083_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000029_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000009_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000156_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000000_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000143_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000066_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000192_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000082_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000057_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000119_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000079_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000067_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000068_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000172_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000073_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000030_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000037_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000063_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000069_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000209_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000070_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000169_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000178_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000150_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000032_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000080_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000120_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000018_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000176_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000115_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000110_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000026_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000041_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000185_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000189_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000075_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000044_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000071_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000002_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000094_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000093_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000203_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000121_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000136_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000117_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000051_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000193_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000089_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000147_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000013_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000165_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000201_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000206_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000157_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000197_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000153_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000113_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000198_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000164_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000122_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000064_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000123_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000208_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000010_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000134_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000090_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000199_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000187_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000015_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000154_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000035_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000180_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000074_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000025_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000086_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000210_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000195_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000040_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000065_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000200_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000076_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000096_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000158_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000023_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000145_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000072_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000104_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000060_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000125_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000027_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000098_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000211_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000138_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000061_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000190_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000084_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000034_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000045_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000137_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000181_000019_leftImg8bit.png +Cityscapes/images/train/dusseldorf/dusseldorf_000059_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000050_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000071_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000073_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000019_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000046_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000083_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000036_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000001_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000062_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000034_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000044_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000068_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000074_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000075_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000055_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000025_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000029_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000082_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000006_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000053_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000043_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000016_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000031_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000037_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000064_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000048_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000008_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000010_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000078_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000042_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000084_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000045_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000070_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000033_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000059_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000035_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000054_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000018_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000080_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000026_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000030_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000028_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000056_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000024_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000066_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000049_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000003_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000058_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000081_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000002_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000051_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000041_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000023_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000021_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000022_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000007_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000000_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000047_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000067_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000015_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000027_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000061_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000052_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000032_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000017_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000060_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000069_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000076_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000065_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000063_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000072_000019_leftImg8bit.png +Cityscapes/images/train/darmstadt/darmstadt_000079_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000040_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000074_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000103_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000089_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000037_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000019_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000101_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000072_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000008_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000051_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000015_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000020_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000114_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000062_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000115_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000070_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000069_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000081_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000005_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000119_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000079_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000096_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000110_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000117_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000054_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000002_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000042_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000012_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000085_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000045_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000077_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000084_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000031_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000034_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000011_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000021_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000022_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000073_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000111_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000113_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000090_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000053_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000006_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000120_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000030_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000088_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000010_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000055_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000038_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000035_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000004_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000083_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000016_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000102_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000013_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000066_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000064_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000001_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000099_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000078_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000118_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000067_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000033_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000121_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000028_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000094_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000068_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000014_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000080_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000109_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000023_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000105_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000025_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000024_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000061_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000009_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000060_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000075_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000087_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000027_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000104_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000106_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000000_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000071_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000007_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000065_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000003_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000058_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000018_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000057_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000086_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000063_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000032_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000092_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000052_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000039_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000056_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000043_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000026_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000059_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000095_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000041_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000017_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000076_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000044_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000116_000019_leftImg8bit.png +Cityscapes/images/train/zurich/zurich_000029_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000193_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000165_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000223_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000032_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000280_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000175_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000076_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000295_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000002_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000084_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000009_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000085_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000244_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000299_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000099_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000206_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000110_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000090_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000228_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000214_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000078_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000037_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000016_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000083_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000144_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000158_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000057_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000105_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000097_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000207_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000140_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000208_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000047_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000253_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000260_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000087_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000026_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000044_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000168_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000136_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000176_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000292_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000235_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000311_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000014_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000191_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000242_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000029_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000104_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000170_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000285_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000224_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000067_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000288_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000131_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000286_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000298_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000036_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000213_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000103_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000255_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000275_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000065_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000094_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000204_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000064_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000171_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000252_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000148_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000287_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000290_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000120_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000221_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000056_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000315_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000205_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000048_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000222_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000313_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000259_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000226_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000186_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000030_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000243_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000055_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000212_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000059_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000303_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000312_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000043_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000174_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000086_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000238_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000075_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000246_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000134_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000066_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000257_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000049_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000185_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000069_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000282_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000229_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000211_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000194_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000309_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000283_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000053_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000218_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000126_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000138_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000179_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000188_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000015_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000198_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000011_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000058_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000019_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000135_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000040_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000182_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000305_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000300_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000233_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000220_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000046_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000314_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000124_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000190_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000081_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000022_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000023_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000128_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000100_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000035_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000273_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000020_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000063_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000248_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000137_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000041_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000250_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000209_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000163_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000304_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000160_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000038_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000210_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000308_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000306_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000025_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000141_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000291_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000061_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000142_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000073_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000054_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000042_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000264_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000070_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000006_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000310_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000187_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000201_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000013_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000106_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000162_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000297_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000072_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000269_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000216_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000199_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000177_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000095_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000098_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000281_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000184_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000254_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000109_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000068_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000028_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000296_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000050_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000077_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000258_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000200_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000196_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000080_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000062_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000079_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000241_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000119_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000268_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000132_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000003_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000045_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000133_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000172_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000266_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000231_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000108_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000034_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000146_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000276_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000008_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000261_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000249_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000251_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000219_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000101_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000139_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000145_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000052_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000157_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000118_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000031_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000007_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000256_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000102_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000051_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000074_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000197_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000089_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000173_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000217_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000180_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000302_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000001_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000143_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000227_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000027_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000262_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000004_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000010_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000116_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000307_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000121_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000271_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000021_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000149_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000012_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000071_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000092_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000017_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000263_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000060_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000178_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000130_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000082_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000301_000019_leftImg8bit.png +Cityscapes/images/train/bremen/bremen_000153_000019_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_036606_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_037039_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_025833_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_015645_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_014803_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_006026_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_023648_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_029203_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_028297_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_022210_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_022414_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_015038_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_020673_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_034936_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_024343_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_023040_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_000885_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_024855_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_015321_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_024196_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_002293_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_017216_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_006484_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_038150_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_006746_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_007651_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_014658_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_000600_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_016758_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_020776_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_004748_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_037223_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_015880_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_010562_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_025746_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_021606_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_016260_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_035958_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_009554_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_003674_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_003245_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_019188_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_008162_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_005936_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_001828_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_011711_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_016591_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_017453_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_016125_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_018195_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_007150_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_020899_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_011255_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_007950_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_014332_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_009951_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_002562_leftImg8bit.png +Cityscapes/images/train/bochum/bochum_000000_023174_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_073314_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_084746_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_047108_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_067338_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_098862_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_008221_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_041667_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_046566_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_038511_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_057678_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_059720_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_007737_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_068693_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_086499_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_093572_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_000042_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_042885_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_025986_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_077927_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_048750_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_025802_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_028608_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_046510_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_082301_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_103856_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_080169_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_047157_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_038915_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_016928_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_049558_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_053886_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_065983_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_069096_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_104857_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_073549_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_015350_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_065604_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_073672_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_042505_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_104428_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_052122_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_063698_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_090398_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_030279_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_099902_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_067799_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_085321_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_102574_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_002095_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_097447_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_064269_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_067587_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_097086_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_021961_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_080674_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_089491_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_068916_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_080438_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_039420_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_074425_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_044747_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_098616_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_036427_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_032266_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_070334_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_032460_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_040021_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_063403_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_043944_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_031971_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_045437_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_073999_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_092850_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_006322_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_035568_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_045908_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_088054_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_074267_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_085073_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_103075_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_085645_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_066706_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_026675_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_028439_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_062371_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_020211_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_048138_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_019373_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_032906_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_019760_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_003488_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_059339_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_079657_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_083696_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_105464_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_096063_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_053776_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_039264_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_037279_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_088783_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_048494_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_061790_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_073389_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_018592_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_054850_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_064825_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_078579_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_086636_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_046619_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_070444_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_054029_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_077756_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_087216_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_047057_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_078407_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_091900_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_014030_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_037036_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_029676_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_088983_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_073758_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_093325_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_033506_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_016691_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_006192_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_053086_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_001613_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_065843_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_100300_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_079376_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_027304_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_067223_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_060907_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_046872_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_077144_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_089696_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_091155_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_002338_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_098061_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_074545_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_101724_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_057816_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_056229_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_098400_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_003904_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_054555_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_060215_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_029144_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_081299_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_044400_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_038446_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_103367_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_036003_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_076392_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_058591_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_046078_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_036527_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_099109_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_102379_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_105123_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_029378_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_095561_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_020563_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_069417_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_077642_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_062964_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_076966_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_038729_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_088197_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_048960_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_066424_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_037741_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_060586_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_094717_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_062039_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_105296_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_019892_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_044996_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_071675_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_080878_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_055039_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_023472_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_008494_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_005639_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_047390_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_069177_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_069289_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_044251_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_084865_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_050160_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_090742_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_103541_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_087822_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_094185_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_018878_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_055414_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_065055_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_055894_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_092476_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_106102_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_051855_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_001106_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_088939_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_071942_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_053486_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_074694_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_085982_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_071150_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_037161_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_071016_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_062710_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_085413_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_088627_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_096624_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_066988_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_034049_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_045704_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_014940_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_099368_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_004985_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_024251_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_061468_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_091038_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_047220_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_027857_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_052904_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_061048_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_032719_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_083586_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_105724_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_053563_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_028056_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_054220_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_016447_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_082187_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_022524_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_039546_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_030953_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_103186_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_057487_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_013577_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_078842_leftImg8bit.png +Cityscapes/images/train/hamburg/hamburg_000000_056508_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000118_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000021_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000112_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000081_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000059_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000119_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000001_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000077_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000137_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000044_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000071_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000046_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000016_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000006_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000117_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000026_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000047_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000105_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000037_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000075_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000108_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000027_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000029_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000084_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000138_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000030_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000082_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000022_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000056_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000019_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000106_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000034_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000132_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000079_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000131_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000009_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000004_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000126_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000100_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000057_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000113_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000124_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000050_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000032_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000058_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000018_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000123_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000063_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000042_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000068_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000072_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000115_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000062_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000055_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000049_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000060_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000064_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000076_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000085_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000120_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000007_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000033_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000087_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000028_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000111_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000036_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000023_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000089_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000065_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000109_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000002_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000054_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000128_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000038_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000107_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000086_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000020_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000010_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000048_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000122_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000066_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000080_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000008_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000051_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000078_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000043_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000011_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000003_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000074_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000012_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000090_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000140_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000024_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000083_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000143_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000025_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000052_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000114_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000015_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000116_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000139_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000035_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000069_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000017_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000070_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000073_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000125_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000014_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000005_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000031_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000053_000019_leftImg8bit.png +Cityscapes/images/train/tubingen/tubingen_000134_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000025_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000070_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000048_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000097_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000091_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000006_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000142_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000020_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000129_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000150_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000003_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000110_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000040_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000082_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000023_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000059_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000135_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000138_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000131_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000029_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000137_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000047_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000031_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000076_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000127_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000159_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000073_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000102_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000153_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000086_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000085_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000058_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000123_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000143_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000017_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000141_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000101_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000081_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000125_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000105_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000038_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000032_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000054_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000009_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000088_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000139_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000090_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000111_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000136_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000077_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000072_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000005_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000034_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000166_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000164_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000039_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000067_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000056_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000026_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000162_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000021_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000148_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000098_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000122_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000075_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000161_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000000_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000093_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000099_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000071_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000087_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000066_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000053_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000103_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000116_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000106_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000172_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000080_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000033_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000065_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000035_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000024_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000046_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000114_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000019_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000092_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000119_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000155_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000063_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000094_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000030_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000084_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000016_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000112_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000132_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000154_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000089_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000069_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000062_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000168_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000010_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000133_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000173_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000169_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000042_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000120_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000134_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000145_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000022_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000045_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000109_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000049_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000074_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000041_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000068_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000027_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000050_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000008_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000057_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000147_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000055_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000051_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000018_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000115_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000171_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000140_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000126_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000100_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000052_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000118_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000156_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000013_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000121_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000012_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000002_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000146_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000144_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000036_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000108_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000083_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000015_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000004_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000107_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000095_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000064_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000163_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000014_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000113_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000128_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000011_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000117_000019_leftImg8bit.png +Cityscapes/images/train/aachen/aachen_000160_000019_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_025434_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_021553_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_029704_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_012353_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_030400_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_015116_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_005252_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_000108_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_023338_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_030701_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_026580_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_024921_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_028378_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_032390_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_026919_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_003096_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_014146_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_029050_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_017342_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_034231_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_020033_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_023510_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_030560_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_011655_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_027954_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_001908_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_002083_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_017042_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_015868_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_013139_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_019125_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_018514_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_022162_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_008305_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_036299_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_019697_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_016342_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_034389_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_034156_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_016863_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_020334_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_010653_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_030221_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_018747_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_021000_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_020873_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_012505_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_020933_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_024604_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_018004_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_021814_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_000316_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_009926_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_027596_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_020624_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_013766_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_025812_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_031257_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_035398_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_008584_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_034686_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_004447_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_024276_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_014673_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_030111_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_005503_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_007325_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_021222_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_015494_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_026269_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_009574_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_014886_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_017489_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_024362_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_003937_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_018866_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_027075_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_033478_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_019791_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_010160_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_023143_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_023698_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_004608_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_009404_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_011483_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_028638_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_010329_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_032614_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_000926_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_015687_leftImg8bit.png +Cityscapes/images/train/krefeld/krefeld_000000_006274_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_015849_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_008200_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_037039_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_045004_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_029455_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_044085_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_014537_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_003224_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_046954_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_002140_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_044195_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_020655_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_005970_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_058189_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_053027_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_026356_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_041610_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_040793_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_048379_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_034347_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_054965_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_046200_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_048274_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_055124_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_015587_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_024136_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_052512_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_038773_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_030781_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_034720_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_005599_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_029325_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_020089_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_007897_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_057532_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_051842_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_012675_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_019456_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_023881_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_027007_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_023975_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_047629_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_019282_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_052729_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_043550_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_043653_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_026804_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_001173_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_046572_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_027561_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_003853_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_052649_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_010553_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_017041_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_013814_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_029769_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_013094_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_036051_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_055800_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_012347_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_056361_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_021337_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_042581_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_032681_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_040221_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_056142_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_035768_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_010403_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_032351_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_040294_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_043102_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_027481_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_038855_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_005175_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_039021_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_056601_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_014319_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_043236_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_011170_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_022645_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_038927_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_051059_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_043822_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_031856_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_007342_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_014713_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_057710_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_011971_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_034935_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_009128_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_047499_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_006355_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_045188_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_052887_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_039470_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_016038_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_056457_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_049005_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_053437_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_008017_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_023614_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_029404_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_013205_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_044622_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_051271_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_032210_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_034141_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_037516_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_042992_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_016558_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_005288_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_004752_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_040051_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_014919_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_051536_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_026743_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_034015_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_028460_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_027650_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_025437_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_026183_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_027282_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_027766_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_033457_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_018546_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_001620_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_049269_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_024276_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_023239_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_029043_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_053604_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_024441_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_040133_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_023276_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_041232_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_052013_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_036562_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_031144_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_009420_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_042770_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_026014_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_042255_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_007780_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_006922_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_004230_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_054276_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_050228_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_034560_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_042382_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_011471_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_046398_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_030889_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_004646_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_019116_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_027998_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_051152_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_046646_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_055937_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_027390_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_035491_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_056800_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_040456_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_046732_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_019672_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_024989_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_025335_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_050398_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_030546_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_018800_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_044344_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_030346_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_045841_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_024719_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_002357_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_045657_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_037298_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_002458_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_030276_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_048508_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_028202_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_045446_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_048765_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_035606_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_047870_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_049465_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_009004_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_055592_leftImg8bit.png +Cityscapes/images/train/hanover/hanover_000000_005732_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000018_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000077_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000114_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000036_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000041_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000133_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000089_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000093_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000027_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000058_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000083_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000116_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000053_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000082_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000105_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000084_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000091_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000038_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000043_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000029_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000057_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000101_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000121_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000064_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000039_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000000_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000113_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000034_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000020_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000075_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000109_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000046_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000138_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000025_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000085_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000037_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000092_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000078_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000099_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000107_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000032_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000117_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000028_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000074_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000130_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000096_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000090_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000106_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000111_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000052_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000126_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000065_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000056_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000031_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000019_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000035_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000033_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000127_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000006_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000061_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000071_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000049_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000086_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000055_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000134_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000014_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000135_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000072_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000087_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000003_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000042_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000094_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000129_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000030_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000131_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000128_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000081_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000076_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000007_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000070_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000104_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000066_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000088_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000017_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000073_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000015_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000108_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000102_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000016_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000047_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000098_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000040_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000100_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000050_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000063_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000139_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000136_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000140_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000115_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000112_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000001_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000010_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000103_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000045_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000080_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000095_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000024_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000002_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000026_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000097_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000013_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000132_000019_leftImg8bit.png +Cityscapes/images/train/weimar/weimar_000054_000019_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_015602_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_055934_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_014503_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_054275_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_004660_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_007524_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_045135_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_008771_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_014416_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_023515_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_037090_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_029178_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_019247_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_007813_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_031582_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_020956_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_049399_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_049977_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_059433_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_019891_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_028379_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_008310_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_056330_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_029696_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_049143_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_035255_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_058105_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_025268_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_006995_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_029179_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_020432_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_002949_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_013266_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_034375_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_009246_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_033425_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_043886_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_001722_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_000508_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_030122_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_020653_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_012070_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_002081_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_016481_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_045880_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_002216_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_017540_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_026606_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_042869_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_009333_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_031683_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_033062_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_052497_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_021951_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_005995_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_027156_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_029281_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_024152_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_051661_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_021651_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_016247_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_061472_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_035008_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_035689_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_051877_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_014743_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_036232_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_030120_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_060173_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_013944_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_025907_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_003489_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_017159_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_000113_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_042434_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_042309_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_053976_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_040761_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_026575_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_039231_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_003632_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_025089_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_033838_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_014066_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_022489_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_051934_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_037645_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_031427_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_052430_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_009471_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_017283_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_030269_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_039114_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_016681_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_057517_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_009795_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_008677_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_003159_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_022067_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_006386_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_034387_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_060821_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_036937_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_025772_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_030839_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_028852_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_015506_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_006483_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_017761_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_024945_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_005289_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_065214_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_047619_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_029577_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_035942_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_002354_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_009619_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_028912_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_010755_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_010816_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_055860_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_031223_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_014629_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_009618_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_055273_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_052979_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_011990_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_030706_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_029400_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_015131_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_062691_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_014101_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_061685_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_042558_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_039558_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_011880_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_014584_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_001449_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_004745_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_022151_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_057811_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_057930_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_039703_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_025833_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_036697_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_028628_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_047955_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_032346_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_010640_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_024701_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_025426_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_052050_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_029839_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_032315_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_030017_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_026741_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_064393_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_016376_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_046324_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_024379_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_040620_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_019355_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_025491_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_014258_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_033747_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_034923_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_030539_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_017675_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_003676_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_015974_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_037776_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_035276_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_013654_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_011775_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_029051_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_058373_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_034494_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_026106_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_014033_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_016024_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_034040_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_030941_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_027233_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_040981_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_017469_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_002519_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_013322_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_051574_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_007441_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_019698_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_014931_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_052198_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_017844_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_019229_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_047755_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_019617_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_040564_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_030725_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_027097_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_011225_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_056142_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_060061_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_031976_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_052840_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_029339_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_054639_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_061285_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_013863_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_051317_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_003991_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_062362_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_010372_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_017593_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_007727_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_033129_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_053579_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_061384_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_004951_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_008603_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_051134_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_006264_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_033448_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_056857_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_020904_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_038281_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_016253_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_004983_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_002644_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_029481_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_015764_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_023064_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_006621_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_005219_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_006153_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_023271_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_028556_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_030997_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_026882_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_014235_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_047702_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_018153_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_004248_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_013574_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_026355_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_063808_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_005249_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_013223_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_029729_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_049776_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_064224_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_004106_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_048605_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_017044_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_026998_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_008784_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_010162_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_030324_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_031067_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_052544_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_052297_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_057191_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_017081_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_017450_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_039446_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_029915_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_051448_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_004383_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_034633_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_026316_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_000710_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_041215_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_035713_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_010445_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_036480_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_048121_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_030435_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_016436_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_039374_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_063385_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_029020_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_058954_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_012956_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_026611_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_055698_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_001072_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_025351_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_006106_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_005666_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_033027_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_031272_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_011617_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_018155_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_027771_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_005876_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_004112_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_035562_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_004260_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_031602_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_008576_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_023694_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_028822_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_032660_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_018432_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_053222_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_050098_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_016311_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_009097_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_010049_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_059914_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_013767_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_031116_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_028240_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_013914_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_003846_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_000778_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_032962_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_034097_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_047336_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_062542_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_022363_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_029980_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000000_018358_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_001901_leftImg8bit.png +Cityscapes/images/train/strasbourg/strasbourg_000001_042235_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000004_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000029_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000119_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000021_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000081_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000077_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000085_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000040_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000006_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000046_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000140_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000010_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000032_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000078_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000036_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000037_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000045_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000023_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000005_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000087_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000109_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000103_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000069_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000147_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000148_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000128_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000122_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000132_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000080_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000144_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000143_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000101_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000152_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000089_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000093_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000071_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000126_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000053_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000033_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000051_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000055_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000044_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000075_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000083_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000130_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000129_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000066_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000135_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000092_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000073_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000019_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000039_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000104_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000138_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000116_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000043_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000031_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000088_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000011_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000094_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000113_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000015_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000098_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000014_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000145_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000076_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000142_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000007_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000125_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000079_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000054_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000102_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000123_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000091_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000124_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000017_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000118_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000057_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000137_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000022_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000026_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000082_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000012_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000000_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000139_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000072_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000041_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000003_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000008_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000063_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000061_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000064_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000117_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000020_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000131_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000025_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000100_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000030_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000009_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000146_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000084_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000105_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000065_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000047_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000106_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000028_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000108_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000068_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000074_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000067_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000121_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000114_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000134_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000150_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000127_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000059_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000034_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000107_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000095_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000096_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000048_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000052_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000070_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000120_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000001_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000042_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000049_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000141_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000099_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000111_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000058_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000018_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000056_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000090_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000038_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000002_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000136_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000086_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000027_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000133_000019_leftImg8bit.png +Cityscapes/images/train/cologne/cologne_000035_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000063_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000051_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000067_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000100_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000068_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000052_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000000_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000021_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000091_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000017_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000087_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000098_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000059_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000070_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000071_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000054_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000034_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000086_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000083_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000060_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000106_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000102_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000104_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000072_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000016_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000105_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000074_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000108_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000064_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000007_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000012_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000011_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000097_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000065_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000061_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000053_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000103_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000036_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000062_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000048_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000090_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000082_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000022_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000015_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000050_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000084_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000069_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000005_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000055_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000006_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000101_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000004_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000037_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000032_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000024_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000075_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000080_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000040_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000002_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000042_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000049_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000046_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000023_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000014_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000047_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000029_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000025_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000008_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000057_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000066_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000073_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000020_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000001_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000041_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000076_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000043_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000018_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000085_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000026_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000003_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000035_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000031_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000039_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000038_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000056_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000033_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000028_000019_leftImg8bit.png +Cityscapes/images/train/erfurt/erfurt_000093_000019_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_011383_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_033683_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_001068_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_010505_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_025215_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_006169_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_020856_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_010733_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_002255_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_004580_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_020596_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_035650_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_020303_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_002478_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_035083_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_021663_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_019500_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_010280_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_009615_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_005876_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_030662_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_030010_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_009930_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_026006_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000001_002229_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_007098_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_035718_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000001_002353_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_006518_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_001294_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_026908_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_010860_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_012672_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000001_000537_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_027628_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_018445_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_003442_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_032540_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_028883_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000001_000054_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_031005_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_005138_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_023375_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_009690_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_022748_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_023856_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_021104_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_026305_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_024964_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_013352_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000001_001936_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_018294_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_018114_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_017950_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_007851_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000001_000168_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_031360_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_018575_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_007695_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_034930_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_014685_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_018720_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_033454_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_002972_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_034621_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_031623_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_015685_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_009191_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_000383_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_015928_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000001_000876_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_005686_leftImg8bit.png +Cityscapes/images/train/monchengladbach/monchengladbach_000000_013228_leftImg8bit.png diff --git a/tracking/docker-build-context/byte_track/datasets/data_path/eth.train b/tracking/docker-build-context/byte_track/datasets/data_path/eth.train new file mode 100644 index 0000000000000000000000000000000000000000..1a322180a189489398b103c9f703954456defd00 --- /dev/null +++ b/tracking/docker-build-context/byte_track/datasets/data_path/eth.train @@ -0,0 +1,2056 @@ +ETHZ/eth01/images/image_00000001_0.png +ETHZ/eth01/images/image_00000002_0.png +ETHZ/eth01/images/image_00000003_0.png +ETHZ/eth01/images/image_00000004_0.png +ETHZ/eth01/images/image_00000005_0.png +ETHZ/eth01/images/image_00000006_0.png +ETHZ/eth01/images/image_00000007_0.png +ETHZ/eth01/images/image_00000008_0.png +ETHZ/eth01/images/image_00000009_0.png +ETHZ/eth01/images/image_00000010_0.png +ETHZ/eth01/images/image_00000011_0.png +ETHZ/eth01/images/image_00000012_0.png +ETHZ/eth01/images/image_00000013_0.png +ETHZ/eth01/images/image_00000014_0.png +ETHZ/eth01/images/image_00000015_0.png +ETHZ/eth01/images/image_00000016_0.png +ETHZ/eth01/images/image_00000017_0.png +ETHZ/eth01/images/image_00000018_0.png +ETHZ/eth01/images/image_00000019_0.png +ETHZ/eth01/images/image_00000020_0.png +ETHZ/eth01/images/image_00000021_0.png +ETHZ/eth01/images/image_00000022_0.png +ETHZ/eth01/images/image_00000023_0.png +ETHZ/eth01/images/image_00000024_0.png +ETHZ/eth01/images/image_00000025_0.png +ETHZ/eth01/images/image_00000026_0.png +ETHZ/eth01/images/image_00000027_0.png +ETHZ/eth01/images/image_00000028_0.png +ETHZ/eth01/images/image_00000029_0.png +ETHZ/eth01/images/image_00000030_0.png +ETHZ/eth01/images/image_00000031_0.png +ETHZ/eth01/images/image_00000032_0.png +ETHZ/eth01/images/image_00000033_0.png +ETHZ/eth01/images/image_00000034_0.png +ETHZ/eth01/images/image_00000035_0.png +ETHZ/eth01/images/image_00000036_0.png +ETHZ/eth01/images/image_00000037_0.png +ETHZ/eth01/images/image_00000038_0.png +ETHZ/eth01/images/image_00000039_0.png +ETHZ/eth01/images/image_00000040_0.png +ETHZ/eth01/images/image_00000041_0.png +ETHZ/eth01/images/image_00000042_0.png +ETHZ/eth01/images/image_00000043_0.png +ETHZ/eth01/images/image_00000044_0.png +ETHZ/eth01/images/image_00000045_0.png +ETHZ/eth01/images/image_00000046_0.png +ETHZ/eth01/images/image_00000047_0.png +ETHZ/eth01/images/image_00000048_0.png +ETHZ/eth01/images/image_00000049_0.png +ETHZ/eth01/images/image_00000050_0.png +ETHZ/eth01/images/image_00000051_0.png +ETHZ/eth01/images/image_00000052_0.png +ETHZ/eth01/images/image_00000053_0.png +ETHZ/eth01/images/image_00000054_0.png +ETHZ/eth01/images/image_00000055_0.png +ETHZ/eth01/images/image_00000056_0.png +ETHZ/eth01/images/image_00000057_0.png +ETHZ/eth01/images/image_00000058_0.png +ETHZ/eth01/images/image_00000059_0.png +ETHZ/eth01/images/image_00000060_0.png +ETHZ/eth01/images/image_00000061_0.png +ETHZ/eth01/images/image_00000062_0.png +ETHZ/eth01/images/image_00000063_0.png +ETHZ/eth01/images/image_00000064_0.png +ETHZ/eth01/images/image_00000065_0.png +ETHZ/eth01/images/image_00000066_0.png +ETHZ/eth01/images/image_00000067_0.png +ETHZ/eth01/images/image_00000068_0.png +ETHZ/eth01/images/image_00000069_0.png +ETHZ/eth01/images/image_00000070_0.png +ETHZ/eth01/images/image_00000071_0.png +ETHZ/eth01/images/image_00000072_0.png +ETHZ/eth01/images/image_00000073_0.png +ETHZ/eth01/images/image_00000074_0.png +ETHZ/eth01/images/image_00000075_0.png +ETHZ/eth01/images/image_00000076_0.png +ETHZ/eth01/images/image_00000077_0.png +ETHZ/eth01/images/image_00000078_0.png +ETHZ/eth01/images/image_00000079_0.png +ETHZ/eth01/images/image_00000080_0.png +ETHZ/eth01/images/image_00000081_0.png +ETHZ/eth01/images/image_00000082_0.png +ETHZ/eth01/images/image_00000083_0.png +ETHZ/eth01/images/image_00000084_0.png +ETHZ/eth01/images/image_00000085_0.png +ETHZ/eth01/images/image_00000086_0.png +ETHZ/eth01/images/image_00000087_0.png +ETHZ/eth01/images/image_00000088_0.png +ETHZ/eth01/images/image_00000089_0.png +ETHZ/eth01/images/image_00000090_0.png +ETHZ/eth01/images/image_00000091_0.png +ETHZ/eth01/images/image_00000092_0.png +ETHZ/eth01/images/image_00000093_0.png +ETHZ/eth01/images/image_00000094_0.png +ETHZ/eth01/images/image_00000095_0.png +ETHZ/eth01/images/image_00000096_0.png +ETHZ/eth01/images/image_00000097_0.png +ETHZ/eth01/images/image_00000098_0.png +ETHZ/eth01/images/image_00000099_0.png +ETHZ/eth01/images/image_00000100_0.png +ETHZ/eth01/images/image_00000101_0.png +ETHZ/eth01/images/image_00000102_0.png +ETHZ/eth01/images/image_00000103_0.png +ETHZ/eth01/images/image_00000104_0.png +ETHZ/eth01/images/image_00000105_0.png +ETHZ/eth01/images/image_00000106_0.png +ETHZ/eth01/images/image_00000107_0.png +ETHZ/eth01/images/image_00000108_0.png +ETHZ/eth01/images/image_00000109_0.png +ETHZ/eth01/images/image_00000110_0.png +ETHZ/eth01/images/image_00000111_0.png +ETHZ/eth01/images/image_00000112_0.png +ETHZ/eth01/images/image_00000113_0.png +ETHZ/eth01/images/image_00000114_0.png +ETHZ/eth01/images/image_00000115_0.png +ETHZ/eth01/images/image_00000116_0.png +ETHZ/eth01/images/image_00000117_0.png +ETHZ/eth01/images/image_00000118_0.png +ETHZ/eth01/images/image_00000119_0.png +ETHZ/eth01/images/image_00000120_0.png +ETHZ/eth01/images/image_00000121_0.png +ETHZ/eth01/images/image_00000122_0.png +ETHZ/eth01/images/image_00000123_0.png +ETHZ/eth01/images/image_00000124_0.png +ETHZ/eth01/images/image_00000125_0.png +ETHZ/eth01/images/image_00000126_0.png +ETHZ/eth01/images/image_00000127_0.png +ETHZ/eth01/images/image_00000128_0.png +ETHZ/eth01/images/image_00000129_0.png +ETHZ/eth01/images/image_00000130_0.png +ETHZ/eth01/images/image_00000131_0.png +ETHZ/eth01/images/image_00000132_0.png +ETHZ/eth01/images/image_00000133_0.png +ETHZ/eth01/images/image_00000134_0.png +ETHZ/eth01/images/image_00000135_0.png +ETHZ/eth01/images/image_00000136_0.png +ETHZ/eth01/images/image_00000137_0.png +ETHZ/eth01/images/image_00000138_0.png +ETHZ/eth01/images/image_00000139_0.png +ETHZ/eth01/images/image_00000140_0.png +ETHZ/eth01/images/image_00000141_0.png +ETHZ/eth01/images/image_00000142_0.png +ETHZ/eth01/images/image_00000143_0.png +ETHZ/eth01/images/image_00000144_0.png +ETHZ/eth01/images/image_00000145_0.png +ETHZ/eth01/images/image_00000146_0.png +ETHZ/eth01/images/image_00000147_0.png +ETHZ/eth01/images/image_00000148_0.png +ETHZ/eth01/images/image_00000149_0.png +ETHZ/eth01/images/image_00000150_0.png +ETHZ/eth01/images/image_00000151_0.png +ETHZ/eth01/images/image_00000152_0.png +ETHZ/eth01/images/image_00000153_0.png +ETHZ/eth01/images/image_00000154_0.png +ETHZ/eth01/images/image_00000155_0.png +ETHZ/eth01/images/image_00000156_0.png +ETHZ/eth01/images/image_00000157_0.png +ETHZ/eth01/images/image_00000158_0.png +ETHZ/eth01/images/image_00000159_0.png +ETHZ/eth01/images/image_00000160_0.png +ETHZ/eth01/images/image_00000161_0.png +ETHZ/eth01/images/image_00000162_0.png +ETHZ/eth01/images/image_00000163_0.png +ETHZ/eth01/images/image_00000164_0.png +ETHZ/eth01/images/image_00000165_0.png +ETHZ/eth01/images/image_00000166_0.png +ETHZ/eth01/images/image_00000167_0.png +ETHZ/eth01/images/image_00000168_0.png +ETHZ/eth01/images/image_00000169_0.png +ETHZ/eth01/images/image_00000170_0.png +ETHZ/eth01/images/image_00000171_0.png +ETHZ/eth01/images/image_00000172_0.png +ETHZ/eth01/images/image_00000173_0.png +ETHZ/eth01/images/image_00000174_0.png +ETHZ/eth01/images/image_00000175_0.png +ETHZ/eth01/images/image_00000176_0.png +ETHZ/eth01/images/image_00000177_0.png +ETHZ/eth01/images/image_00000178_0.png +ETHZ/eth01/images/image_00000179_0.png +ETHZ/eth01/images/image_00000180_0.png +ETHZ/eth01/images/image_00000181_0.png +ETHZ/eth01/images/image_00000182_0.png +ETHZ/eth01/images/image_00000183_0.png +ETHZ/eth01/images/image_00000184_0.png +ETHZ/eth01/images/image_00000185_0.png +ETHZ/eth01/images/image_00000186_0.png +ETHZ/eth01/images/image_00000187_0.png +ETHZ/eth01/images/image_00000188_0.png +ETHZ/eth01/images/image_00000189_0.png +ETHZ/eth01/images/image_00000190_0.png +ETHZ/eth01/images/image_00000191_0.png +ETHZ/eth01/images/image_00000192_0.png +ETHZ/eth01/images/image_00000193_0.png +ETHZ/eth01/images/image_00000194_0.png +ETHZ/eth01/images/image_00000195_0.png +ETHZ/eth01/images/image_00000196_0.png +ETHZ/eth01/images/image_00000197_0.png +ETHZ/eth01/images/image_00000198_0.png +ETHZ/eth01/images/image_00000199_0.png +ETHZ/eth01/images/image_00000200_0.png +ETHZ/eth01/images/image_00000201_0.png +ETHZ/eth01/images/image_00000202_0.png +ETHZ/eth01/images/image_00000203_0.png +ETHZ/eth01/images/image_00000204_0.png +ETHZ/eth01/images/image_00000205_0.png +ETHZ/eth01/images/image_00000206_0.png +ETHZ/eth01/images/image_00000207_0.png +ETHZ/eth01/images/image_00000208_0.png +ETHZ/eth01/images/image_00000209_0.png +ETHZ/eth01/images/image_00000210_0.png +ETHZ/eth01/images/image_00000211_0.png +ETHZ/eth01/images/image_00000212_0.png +ETHZ/eth01/images/image_00000213_0.png +ETHZ/eth01/images/image_00000214_0.png +ETHZ/eth01/images/image_00000215_0.png +ETHZ/eth01/images/image_00000216_0.png +ETHZ/eth01/images/image_00000217_0.png +ETHZ/eth01/images/image_00000218_0.png +ETHZ/eth01/images/image_00000219_0.png +ETHZ/eth01/images/image_00000220_0.png +ETHZ/eth01/images/image_00000221_0.png +ETHZ/eth01/images/image_00000222_0.png +ETHZ/eth01/images/image_00000223_0.png +ETHZ/eth01/images/image_00000224_0.png +ETHZ/eth01/images/image_00000225_0.png +ETHZ/eth01/images/image_00000226_0.png +ETHZ/eth01/images/image_00000227_0.png +ETHZ/eth01/images/image_00000228_0.png +ETHZ/eth01/images/image_00000229_0.png +ETHZ/eth01/images/image_00000230_0.png +ETHZ/eth01/images/image_00000231_0.png +ETHZ/eth01/images/image_00000232_0.png +ETHZ/eth01/images/image_00000233_0.png +ETHZ/eth01/images/image_00000234_0.png +ETHZ/eth01/images/image_00000235_0.png +ETHZ/eth01/images/image_00000236_0.png +ETHZ/eth01/images/image_00000237_0.png +ETHZ/eth01/images/image_00000238_0.png +ETHZ/eth01/images/image_00000239_0.png +ETHZ/eth01/images/image_00000240_0.png +ETHZ/eth01/images/image_00000241_0.png +ETHZ/eth01/images/image_00000242_0.png +ETHZ/eth01/images/image_00000243_0.png +ETHZ/eth01/images/image_00000244_0.png +ETHZ/eth01/images/image_00000245_0.png +ETHZ/eth01/images/image_00000246_0.png +ETHZ/eth01/images/image_00000247_0.png +ETHZ/eth01/images/image_00000248_0.png +ETHZ/eth01/images/image_00000249_0.png +ETHZ/eth01/images/image_00000250_0.png +ETHZ/eth01/images/image_00000251_0.png +ETHZ/eth01/images/image_00000252_0.png +ETHZ/eth01/images/image_00000253_0.png +ETHZ/eth01/images/image_00000254_0.png +ETHZ/eth01/images/image_00000255_0.png +ETHZ/eth01/images/image_00000256_0.png +ETHZ/eth01/images/image_00000257_0.png +ETHZ/eth01/images/image_00000258_0.png +ETHZ/eth01/images/image_00000259_0.png +ETHZ/eth01/images/image_00000260_0.png +ETHZ/eth01/images/image_00000261_0.png +ETHZ/eth01/images/image_00000262_0.png +ETHZ/eth01/images/image_00000263_0.png +ETHZ/eth01/images/image_00000264_0.png +ETHZ/eth01/images/image_00000265_0.png +ETHZ/eth01/images/image_00000266_0.png +ETHZ/eth01/images/image_00000267_0.png +ETHZ/eth01/images/image_00000268_0.png +ETHZ/eth01/images/image_00000269_0.png +ETHZ/eth01/images/image_00000270_0.png +ETHZ/eth01/images/image_00000271_0.png +ETHZ/eth01/images/image_00000272_0.png +ETHZ/eth01/images/image_00000273_0.png +ETHZ/eth01/images/image_00000274_0.png +ETHZ/eth01/images/image_00000275_0.png +ETHZ/eth01/images/image_00000276_0.png +ETHZ/eth01/images/image_00000277_0.png +ETHZ/eth01/images/image_00000278_0.png +ETHZ/eth01/images/image_00000279_0.png +ETHZ/eth01/images/image_00000280_0.png +ETHZ/eth01/images/image_00000281_0.png +ETHZ/eth01/images/image_00000282_0.png +ETHZ/eth01/images/image_00000283_0.png +ETHZ/eth01/images/image_00000284_0.png +ETHZ/eth01/images/image_00000285_0.png +ETHZ/eth01/images/image_00000286_0.png +ETHZ/eth01/images/image_00000287_0.png +ETHZ/eth01/images/image_00000288_0.png +ETHZ/eth01/images/image_00000289_0.png +ETHZ/eth01/images/image_00000290_0.png +ETHZ/eth01/images/image_00000291_0.png +ETHZ/eth01/images/image_00000292_0.png +ETHZ/eth01/images/image_00000293_0.png +ETHZ/eth01/images/image_00000294_0.png +ETHZ/eth01/images/image_00000295_0.png +ETHZ/eth01/images/image_00000296_0.png +ETHZ/eth01/images/image_00000297_0.png +ETHZ/eth01/images/image_00000298_0.png +ETHZ/eth01/images/image_00000299_0.png +ETHZ/eth01/images/image_00000300_0.png +ETHZ/eth01/images/image_00000301_0.png +ETHZ/eth01/images/image_00000302_0.png +ETHZ/eth01/images/image_00000303_0.png +ETHZ/eth01/images/image_00000304_0.png +ETHZ/eth01/images/image_00000305_0.png +ETHZ/eth01/images/image_00000306_0.png +ETHZ/eth01/images/image_00000307_0.png +ETHZ/eth01/images/image_00000308_0.png +ETHZ/eth01/images/image_00000309_0.png +ETHZ/eth01/images/image_00000310_0.png +ETHZ/eth01/images/image_00000311_0.png +ETHZ/eth01/images/image_00000312_0.png +ETHZ/eth01/images/image_00000313_0.png +ETHZ/eth01/images/image_00000314_0.png +ETHZ/eth01/images/image_00000315_0.png +ETHZ/eth01/images/image_00000316_0.png +ETHZ/eth01/images/image_00000317_0.png +ETHZ/eth01/images/image_00000318_0.png +ETHZ/eth01/images/image_00000319_0.png +ETHZ/eth01/images/image_00000320_0.png +ETHZ/eth01/images/image_00000321_0.png +ETHZ/eth01/images/image_00000322_0.png +ETHZ/eth01/images/image_00000323_0.png +ETHZ/eth01/images/image_00000324_0.png +ETHZ/eth01/images/image_00000325_0.png +ETHZ/eth01/images/image_00000326_0.png +ETHZ/eth01/images/image_00000327_0.png +ETHZ/eth01/images/image_00000328_0.png +ETHZ/eth01/images/image_00000329_0.png +ETHZ/eth01/images/image_00000330_0.png +ETHZ/eth01/images/image_00000331_0.png +ETHZ/eth01/images/image_00000332_0.png +ETHZ/eth01/images/image_00000333_0.png +ETHZ/eth01/images/image_00000334_0.png +ETHZ/eth01/images/image_00000335_0.png +ETHZ/eth01/images/image_00000336_0.png +ETHZ/eth01/images/image_00000337_0.png +ETHZ/eth01/images/image_00000338_0.png +ETHZ/eth01/images/image_00000339_0.png +ETHZ/eth01/images/image_00000340_0.png +ETHZ/eth01/images/image_00000341_0.png +ETHZ/eth01/images/image_00000342_0.png +ETHZ/eth01/images/image_00000343_0.png +ETHZ/eth01/images/image_00000344_0.png +ETHZ/eth01/images/image_00000345_0.png +ETHZ/eth01/images/image_00000346_0.png +ETHZ/eth01/images/image_00000347_0.png +ETHZ/eth01/images/image_00000348_0.png +ETHZ/eth01/images/image_00000349_0.png +ETHZ/eth01/images/image_00000350_0.png +ETHZ/eth01/images/image_00000351_0.png +ETHZ/eth01/images/image_00000352_0.png +ETHZ/eth01/images/image_00000353_0.png +ETHZ/eth01/images/image_00000354_0.png +ETHZ/eth01/images/image_00000355_0.png +ETHZ/eth01/images/image_00000356_0.png +ETHZ/eth01/images/image_00000357_0.png +ETHZ/eth01/images/image_00000358_0.png +ETHZ/eth01/images/image_00000359_0.png +ETHZ/eth01/images/image_00000360_0.png +ETHZ/eth01/images/image_00000361_0.png +ETHZ/eth01/images/image_00000362_0.png +ETHZ/eth01/images/image_00000363_0.png +ETHZ/eth01/images/image_00000364_0.png +ETHZ/eth01/images/image_00000365_0.png +ETHZ/eth01/images/image_00000366_0.png +ETHZ/eth01/images/image_00000367_0.png +ETHZ/eth01/images/image_00000368_0.png +ETHZ/eth01/images/image_00000369_0.png +ETHZ/eth01/images/image_00000370_0.png +ETHZ/eth01/images/image_00000371_0.png +ETHZ/eth01/images/image_00000372_0.png +ETHZ/eth01/images/image_00000373_0.png +ETHZ/eth01/images/image_00000374_0.png +ETHZ/eth01/images/image_00000375_0.png +ETHZ/eth01/images/image_00000376_0.png +ETHZ/eth01/images/image_00000377_0.png +ETHZ/eth01/images/image_00000378_0.png +ETHZ/eth01/images/image_00000379_0.png +ETHZ/eth01/images/image_00000380_0.png +ETHZ/eth01/images/image_00000381_0.png +ETHZ/eth01/images/image_00000382_0.png +ETHZ/eth01/images/image_00000383_0.png +ETHZ/eth01/images/image_00000384_0.png +ETHZ/eth01/images/image_00000385_0.png +ETHZ/eth01/images/image_00000386_0.png +ETHZ/eth01/images/image_00000387_0.png +ETHZ/eth01/images/image_00000388_0.png +ETHZ/eth01/images/image_00000389_0.png +ETHZ/eth01/images/image_00000390_0.png +ETHZ/eth01/images/image_00000391_0.png +ETHZ/eth01/images/image_00000392_0.png +ETHZ/eth01/images/image_00000393_0.png +ETHZ/eth01/images/image_00000394_0.png +ETHZ/eth01/images/image_00000395_0.png +ETHZ/eth01/images/image_00000396_0.png +ETHZ/eth01/images/image_00000397_0.png +ETHZ/eth01/images/image_00000398_0.png +ETHZ/eth01/images/image_00000399_0.png +ETHZ/eth01/images/image_00000400_0.png +ETHZ/eth01/images/image_00000401_0.png +ETHZ/eth01/images/image_00000402_0.png +ETHZ/eth01/images/image_00000403_0.png +ETHZ/eth01/images/image_00000404_0.png +ETHZ/eth01/images/image_00000405_0.png +ETHZ/eth01/images/image_00000406_0.png +ETHZ/eth01/images/image_00000407_0.png +ETHZ/eth01/images/image_00000408_0.png +ETHZ/eth01/images/image_00000409_0.png +ETHZ/eth01/images/image_00000410_0.png +ETHZ/eth01/images/image_00000411_0.png +ETHZ/eth01/images/image_00000412_0.png +ETHZ/eth01/images/image_00000413_0.png +ETHZ/eth01/images/image_00000414_0.png +ETHZ/eth01/images/image_00000415_0.png +ETHZ/eth01/images/image_00000416_0.png +ETHZ/eth01/images/image_00000417_0.png +ETHZ/eth01/images/image_00000418_0.png +ETHZ/eth01/images/image_00000419_0.png +ETHZ/eth01/images/image_00000420_0.png +ETHZ/eth01/images/image_00000421_0.png +ETHZ/eth01/images/image_00000422_0.png +ETHZ/eth01/images/image_00000423_0.png +ETHZ/eth01/images/image_00000424_0.png +ETHZ/eth01/images/image_00000425_0.png +ETHZ/eth01/images/image_00000426_0.png +ETHZ/eth01/images/image_00000427_0.png +ETHZ/eth01/images/image_00000428_0.png +ETHZ/eth01/images/image_00000429_0.png +ETHZ/eth01/images/image_00000430_0.png +ETHZ/eth01/images/image_00000431_0.png +ETHZ/eth01/images/image_00000432_0.png +ETHZ/eth01/images/image_00000433_0.png +ETHZ/eth01/images/image_00000434_0.png +ETHZ/eth01/images/image_00000435_0.png +ETHZ/eth01/images/image_00000436_0.png +ETHZ/eth01/images/image_00000437_0.png +ETHZ/eth01/images/image_00000438_0.png +ETHZ/eth01/images/image_00000439_0.png +ETHZ/eth01/images/image_00000440_0.png +ETHZ/eth01/images/image_00000441_0.png +ETHZ/eth01/images/image_00000442_0.png +ETHZ/eth01/images/image_00000443_0.png +ETHZ/eth01/images/image_00000444_0.png +ETHZ/eth01/images/image_00000445_0.png +ETHZ/eth01/images/image_00000446_0.png +ETHZ/eth01/images/image_00000447_0.png +ETHZ/eth01/images/image_00000448_0.png +ETHZ/eth01/images/image_00000449_0.png +ETHZ/eth01/images/image_00000450_0.png +ETHZ/eth01/images/image_00000451_0.png +ETHZ/eth01/images/image_00000452_0.png +ETHZ/eth01/images/image_00000453_0.png +ETHZ/eth01/images/image_00000454_0.png +ETHZ/eth01/images/image_00000455_0.png +ETHZ/eth01/images/image_00000456_0.png +ETHZ/eth01/images/image_00000457_0.png +ETHZ/eth01/images/image_00000458_0.png +ETHZ/eth01/images/image_00000459_0.png +ETHZ/eth01/images/image_00000460_0.png +ETHZ/eth01/images/image_00000461_0.png +ETHZ/eth01/images/image_00000462_0.png +ETHZ/eth01/images/image_00000463_0.png +ETHZ/eth01/images/image_00000464_0.png +ETHZ/eth01/images/image_00000465_0.png +ETHZ/eth01/images/image_00000466_0.png +ETHZ/eth01/images/image_00000467_0.png +ETHZ/eth01/images/image_00000468_0.png +ETHZ/eth01/images/image_00000469_0.png +ETHZ/eth01/images/image_00000470_0.png +ETHZ/eth01/images/image_00000471_0.png +ETHZ/eth01/images/image_00000472_0.png +ETHZ/eth01/images/image_00000473_0.png +ETHZ/eth01/images/image_00000474_0.png +ETHZ/eth01/images/image_00000475_0.png +ETHZ/eth01/images/image_00000476_0.png +ETHZ/eth01/images/image_00000477_0.png +ETHZ/eth01/images/image_00000478_0.png +ETHZ/eth01/images/image_00000479_0.png +ETHZ/eth01/images/image_00000480_0.png +ETHZ/eth01/images/image_00000481_0.png +ETHZ/eth01/images/image_00000482_0.png +ETHZ/eth01/images/image_00000483_0.png +ETHZ/eth01/images/image_00000484_0.png +ETHZ/eth01/images/image_00000485_0.png +ETHZ/eth01/images/image_00000486_0.png +ETHZ/eth01/images/image_00000487_0.png +ETHZ/eth01/images/image_00000488_0.png +ETHZ/eth01/images/image_00000489_0.png +ETHZ/eth01/images/image_00000490_0.png +ETHZ/eth01/images/image_00000491_0.png +ETHZ/eth01/images/image_00000492_0.png +ETHZ/eth01/images/image_00000493_0.png +ETHZ/eth01/images/image_00000494_0.png +ETHZ/eth01/images/image_00000495_0.png +ETHZ/eth01/images/image_00000496_0.png +ETHZ/eth01/images/image_00000497_0.png +ETHZ/eth01/images/image_00000498_0.png +ETHZ/eth01/images/image_00000499_0.png +ETHZ/eth01/images/image_00000500_0.png +ETHZ/eth01/images/image_00000501_0.png +ETHZ/eth01/images/image_00000502_0.png +ETHZ/eth01/images/image_00000503_0.png +ETHZ/eth01/images/image_00000504_0.png +ETHZ/eth01/images/image_00000505_0.png +ETHZ/eth01/images/image_00000506_0.png +ETHZ/eth01/images/image_00000507_0.png +ETHZ/eth01/images/image_00000508_0.png +ETHZ/eth01/images/image_00000509_0.png +ETHZ/eth01/images/image_00000510_0.png +ETHZ/eth01/images/image_00000511_0.png +ETHZ/eth01/images/image_00000512_0.png +ETHZ/eth01/images/image_00000513_0.png +ETHZ/eth01/images/image_00000514_0.png +ETHZ/eth01/images/image_00000515_0.png +ETHZ/eth01/images/image_00000516_0.png +ETHZ/eth01/images/image_00000517_0.png +ETHZ/eth01/images/image_00000518_0.png +ETHZ/eth01/images/image_00000519_0.png +ETHZ/eth01/images/image_00000520_0.png +ETHZ/eth01/images/image_00000521_0.png +ETHZ/eth01/images/image_00000522_0.png +ETHZ/eth01/images/image_00000523_0.png +ETHZ/eth01/images/image_00000524_0.png +ETHZ/eth01/images/image_00000525_0.png +ETHZ/eth01/images/image_00000526_0.png +ETHZ/eth01/images/image_00000527_0.png +ETHZ/eth01/images/image_00000528_0.png +ETHZ/eth01/images/image_00000529_0.png +ETHZ/eth01/images/image_00000530_0.png +ETHZ/eth01/images/image_00000531_0.png +ETHZ/eth01/images/image_00000532_0.png +ETHZ/eth01/images/image_00000533_0.png +ETHZ/eth01/images/image_00000534_0.png +ETHZ/eth01/images/image_00000535_0.png +ETHZ/eth01/images/image_00000536_0.png +ETHZ/eth01/images/image_00000537_0.png +ETHZ/eth01/images/image_00000538_0.png +ETHZ/eth01/images/image_00000539_0.png +ETHZ/eth01/images/image_00000540_0.png +ETHZ/eth01/images/image_00000541_0.png +ETHZ/eth01/images/image_00000542_0.png +ETHZ/eth01/images/image_00000543_0.png +ETHZ/eth01/images/image_00000544_0.png +ETHZ/eth01/images/image_00000545_0.png +ETHZ/eth01/images/image_00000546_0.png +ETHZ/eth01/images/image_00000547_0.png +ETHZ/eth01/images/image_00000548_0.png +ETHZ/eth01/images/image_00000549_0.png +ETHZ/eth01/images/image_00000550_0.png +ETHZ/eth01/images/image_00000551_0.png +ETHZ/eth01/images/image_00000552_0.png +ETHZ/eth01/images/image_00000553_0.png +ETHZ/eth01/images/image_00000554_0.png +ETHZ/eth01/images/image_00000555_0.png +ETHZ/eth01/images/image_00000556_0.png +ETHZ/eth01/images/image_00000557_0.png +ETHZ/eth01/images/image_00000558_0.png +ETHZ/eth01/images/image_00000559_0.png +ETHZ/eth01/images/image_00000560_0.png +ETHZ/eth01/images/image_00000561_0.png +ETHZ/eth01/images/image_00000562_0.png +ETHZ/eth01/images/image_00000563_0.png +ETHZ/eth01/images/image_00000564_0.png +ETHZ/eth01/images/image_00000565_0.png +ETHZ/eth01/images/image_00000566_0.png +ETHZ/eth01/images/image_00000567_0.png +ETHZ/eth01/images/image_00000568_0.png +ETHZ/eth01/images/image_00000569_0.png +ETHZ/eth01/images/image_00000570_0.png +ETHZ/eth01/images/image_00000571_0.png +ETHZ/eth01/images/image_00000572_0.png +ETHZ/eth01/images/image_00000573_0.png +ETHZ/eth01/images/image_00000574_0.png +ETHZ/eth01/images/image_00000575_0.png +ETHZ/eth01/images/image_00000576_0.png +ETHZ/eth01/images/image_00000577_0.png +ETHZ/eth01/images/image_00000578_0.png +ETHZ/eth01/images/image_00000579_0.png +ETHZ/eth01/images/image_00000580_0.png +ETHZ/eth01/images/image_00000581_0.png +ETHZ/eth01/images/image_00000582_0.png +ETHZ/eth01/images/image_00000583_0.png +ETHZ/eth01/images/image_00000584_0.png +ETHZ/eth01/images/image_00000585_0.png +ETHZ/eth01/images/image_00000586_0.png +ETHZ/eth01/images/image_00000587_0.png +ETHZ/eth01/images/image_00000588_0.png +ETHZ/eth01/images/image_00000589_0.png +ETHZ/eth01/images/image_00000590_0.png +ETHZ/eth01/images/image_00000591_0.png +ETHZ/eth01/images/image_00000592_0.png +ETHZ/eth01/images/image_00000593_0.png +ETHZ/eth01/images/image_00000594_0.png +ETHZ/eth01/images/image_00000595_0.png +ETHZ/eth01/images/image_00000596_0.png +ETHZ/eth01/images/image_00000597_0.png +ETHZ/eth01/images/image_00000598_0.png +ETHZ/eth01/images/image_00000599_0.png +ETHZ/eth01/images/image_00000600_0.png +ETHZ/eth01/images/image_00000601_0.png +ETHZ/eth01/images/image_00000602_0.png +ETHZ/eth01/images/image_00000603_0.png +ETHZ/eth01/images/image_00000604_0.png +ETHZ/eth01/images/image_00000605_0.png +ETHZ/eth01/images/image_00000606_0.png +ETHZ/eth01/images/image_00000607_0.png +ETHZ/eth01/images/image_00000608_0.png +ETHZ/eth01/images/image_00000609_0.png +ETHZ/eth01/images/image_00000610_0.png +ETHZ/eth01/images/image_00000611_0.png +ETHZ/eth01/images/image_00000612_0.png +ETHZ/eth01/images/image_00000613_0.png +ETHZ/eth01/images/image_00000614_0.png +ETHZ/eth01/images/image_00000615_0.png +ETHZ/eth01/images/image_00000616_0.png +ETHZ/eth01/images/image_00000617_0.png +ETHZ/eth01/images/image_00000618_0.png +ETHZ/eth01/images/image_00000619_0.png +ETHZ/eth01/images/image_00000620_0.png +ETHZ/eth01/images/image_00000621_0.png +ETHZ/eth01/images/image_00000622_0.png +ETHZ/eth01/images/image_00000623_0.png +ETHZ/eth01/images/image_00000624_0.png +ETHZ/eth01/images/image_00000625_0.png +ETHZ/eth01/images/image_00000626_0.png +ETHZ/eth01/images/image_00000627_0.png +ETHZ/eth01/images/image_00000628_0.png +ETHZ/eth01/images/image_00000629_0.png +ETHZ/eth01/images/image_00000630_0.png +ETHZ/eth01/images/image_00000631_0.png +ETHZ/eth01/images/image_00000632_0.png +ETHZ/eth01/images/image_00000633_0.png +ETHZ/eth01/images/image_00000634_0.png +ETHZ/eth01/images/image_00000635_0.png +ETHZ/eth01/images/image_00000636_0.png +ETHZ/eth01/images/image_00000637_0.png +ETHZ/eth01/images/image_00000638_0.png +ETHZ/eth01/images/image_00000639_0.png +ETHZ/eth01/images/image_00000640_0.png +ETHZ/eth01/images/image_00000641_0.png +ETHZ/eth01/images/image_00000642_0.png +ETHZ/eth01/images/image_00000643_0.png +ETHZ/eth01/images/image_00000644_0.png +ETHZ/eth01/images/image_00000645_0.png +ETHZ/eth01/images/image_00000646_0.png +ETHZ/eth01/images/image_00000647_0.png +ETHZ/eth01/images/image_00000648_0.png +ETHZ/eth01/images/image_00000649_0.png +ETHZ/eth01/images/image_00000650_0.png +ETHZ/eth01/images/image_00000651_0.png +ETHZ/eth01/images/image_00000652_0.png +ETHZ/eth01/images/image_00000653_0.png +ETHZ/eth01/images/image_00000654_0.png +ETHZ/eth01/images/image_00000655_0.png +ETHZ/eth01/images/image_00000656_0.png +ETHZ/eth01/images/image_00000657_0.png +ETHZ/eth01/images/image_00000658_0.png +ETHZ/eth01/images/image_00000659_0.png +ETHZ/eth01/images/image_00000660_0.png +ETHZ/eth01/images/image_00000661_0.png +ETHZ/eth01/images/image_00000662_0.png +ETHZ/eth01/images/image_00000663_0.png +ETHZ/eth01/images/image_00000664_0.png +ETHZ/eth01/images/image_00000665_0.png +ETHZ/eth01/images/image_00000666_0.png +ETHZ/eth01/images/image_00000667_0.png +ETHZ/eth01/images/image_00000668_0.png +ETHZ/eth01/images/image_00000669_0.png +ETHZ/eth01/images/image_00000670_0.png +ETHZ/eth01/images/image_00000671_0.png +ETHZ/eth01/images/image_00000672_0.png +ETHZ/eth01/images/image_00000673_0.png +ETHZ/eth01/images/image_00000674_0.png +ETHZ/eth01/images/image_00000675_0.png +ETHZ/eth01/images/image_00000676_0.png +ETHZ/eth01/images/image_00000677_0.png +ETHZ/eth01/images/image_00000678_0.png +ETHZ/eth01/images/image_00000679_0.png +ETHZ/eth01/images/image_00000680_0.png +ETHZ/eth01/images/image_00000681_0.png +ETHZ/eth01/images/image_00000682_0.png +ETHZ/eth01/images/image_00000683_0.png +ETHZ/eth01/images/image_00000684_0.png +ETHZ/eth01/images/image_00000685_0.png +ETHZ/eth01/images/image_00000686_0.png +ETHZ/eth01/images/image_00000687_0.png +ETHZ/eth01/images/image_00000688_0.png +ETHZ/eth01/images/image_00000689_0.png +ETHZ/eth01/images/image_00000690_0.png +ETHZ/eth01/images/image_00000691_0.png +ETHZ/eth01/images/image_00000692_0.png +ETHZ/eth01/images/image_00000693_0.png +ETHZ/eth01/images/image_00000694_0.png +ETHZ/eth01/images/image_00000695_0.png +ETHZ/eth01/images/image_00000696_0.png +ETHZ/eth01/images/image_00000697_0.png +ETHZ/eth01/images/image_00000698_0.png +ETHZ/eth01/images/image_00000699_0.png +ETHZ/eth01/images/image_00000700_0.png +ETHZ/eth01/images/image_00000701_0.png +ETHZ/eth01/images/image_00000702_0.png +ETHZ/eth01/images/image_00000703_0.png +ETHZ/eth01/images/image_00000704_0.png +ETHZ/eth01/images/image_00000705_0.png +ETHZ/eth01/images/image_00000706_0.png +ETHZ/eth01/images/image_00000707_0.png +ETHZ/eth01/images/image_00000708_0.png +ETHZ/eth01/images/image_00000709_0.png +ETHZ/eth01/images/image_00000710_0.png +ETHZ/eth01/images/image_00000711_0.png +ETHZ/eth01/images/image_00000712_0.png +ETHZ/eth01/images/image_00000713_0.png +ETHZ/eth01/images/image_00000714_0.png +ETHZ/eth01/images/image_00000715_0.png +ETHZ/eth01/images/image_00000716_0.png +ETHZ/eth01/images/image_00000717_0.png +ETHZ/eth01/images/image_00000718_0.png +ETHZ/eth01/images/image_00000719_0.png +ETHZ/eth01/images/image_00000720_0.png +ETHZ/eth01/images/image_00000721_0.png +ETHZ/eth01/images/image_00000722_0.png +ETHZ/eth01/images/image_00000723_0.png +ETHZ/eth01/images/image_00000724_0.png +ETHZ/eth01/images/image_00000725_0.png +ETHZ/eth01/images/image_00000726_0.png +ETHZ/eth01/images/image_00000727_0.png +ETHZ/eth01/images/image_00000728_0.png +ETHZ/eth01/images/image_00000729_0.png +ETHZ/eth01/images/image_00000730_0.png +ETHZ/eth01/images/image_00000731_0.png +ETHZ/eth01/images/image_00000732_0.png +ETHZ/eth01/images/image_00000733_0.png +ETHZ/eth01/images/image_00000734_0.png +ETHZ/eth01/images/image_00000735_0.png +ETHZ/eth01/images/image_00000736_0.png +ETHZ/eth01/images/image_00000737_0.png +ETHZ/eth01/images/image_00000738_0.png +ETHZ/eth01/images/image_00000739_0.png +ETHZ/eth01/images/image_00000740_0.png +ETHZ/eth01/images/image_00000741_0.png +ETHZ/eth01/images/image_00000742_0.png +ETHZ/eth01/images/image_00000743_0.png +ETHZ/eth01/images/image_00000744_0.png +ETHZ/eth01/images/image_00000745_0.png +ETHZ/eth01/images/image_00000746_0.png +ETHZ/eth01/images/image_00000747_0.png +ETHZ/eth01/images/image_00000748_0.png +ETHZ/eth01/images/image_00000749_0.png +ETHZ/eth01/images/image_00000750_0.png +ETHZ/eth01/images/image_00000751_0.png +ETHZ/eth01/images/image_00000752_0.png +ETHZ/eth01/images/image_00000753_0.png +ETHZ/eth01/images/image_00000754_0.png +ETHZ/eth01/images/image_00000755_0.png +ETHZ/eth01/images/image_00000756_0.png +ETHZ/eth01/images/image_00000757_0.png +ETHZ/eth01/images/image_00000758_0.png +ETHZ/eth01/images/image_00000759_0.png +ETHZ/eth01/images/image_00000760_0.png +ETHZ/eth01/images/image_00000761_0.png +ETHZ/eth01/images/image_00000762_0.png +ETHZ/eth01/images/image_00000763_0.png +ETHZ/eth01/images/image_00000764_0.png +ETHZ/eth01/images/image_00000765_0.png +ETHZ/eth01/images/image_00000766_0.png +ETHZ/eth01/images/image_00000767_0.png +ETHZ/eth01/images/image_00000768_0.png +ETHZ/eth01/images/image_00000769_0.png +ETHZ/eth01/images/image_00000770_0.png +ETHZ/eth01/images/image_00000771_0.png +ETHZ/eth01/images/image_00000772_0.png +ETHZ/eth01/images/image_00000773_0.png +ETHZ/eth01/images/image_00000774_0.png +ETHZ/eth01/images/image_00000775_0.png +ETHZ/eth01/images/image_00000776_0.png +ETHZ/eth01/images/image_00000777_0.png +ETHZ/eth01/images/image_00000778_0.png +ETHZ/eth01/images/image_00000779_0.png +ETHZ/eth01/images/image_00000780_0.png +ETHZ/eth01/images/image_00000781_0.png +ETHZ/eth01/images/image_00000782_0.png +ETHZ/eth01/images/image_00000783_0.png +ETHZ/eth01/images/image_00000784_0.png +ETHZ/eth01/images/image_00000785_0.png +ETHZ/eth01/images/image_00000786_0.png +ETHZ/eth01/images/image_00000787_0.png +ETHZ/eth01/images/image_00000788_0.png +ETHZ/eth01/images/image_00000789_0.png +ETHZ/eth01/images/image_00000790_0.png +ETHZ/eth01/images/image_00000791_0.png +ETHZ/eth01/images/image_00000792_0.png +ETHZ/eth01/images/image_00000793_0.png +ETHZ/eth01/images/image_00000794_0.png +ETHZ/eth01/images/image_00000795_0.png +ETHZ/eth01/images/image_00000796_0.png +ETHZ/eth01/images/image_00000797_0.png +ETHZ/eth01/images/image_00000798_0.png +ETHZ/eth01/images/image_00000799_0.png +ETHZ/eth01/images/image_00000800_0.png +ETHZ/eth01/images/image_00000801_0.png +ETHZ/eth01/images/image_00000802_0.png +ETHZ/eth01/images/image_00000803_0.png +ETHZ/eth01/images/image_00000804_0.png +ETHZ/eth01/images/image_00000805_0.png +ETHZ/eth01/images/image_00000806_0.png +ETHZ/eth01/images/image_00000807_0.png +ETHZ/eth01/images/image_00000808_0.png +ETHZ/eth01/images/image_00000809_0.png +ETHZ/eth01/images/image_00000810_0.png +ETHZ/eth01/images/image_00000811_0.png +ETHZ/eth01/images/image_00000812_0.png +ETHZ/eth01/images/image_00000813_0.png +ETHZ/eth01/images/image_00000814_0.png +ETHZ/eth01/images/image_00000815_0.png +ETHZ/eth01/images/image_00000816_0.png +ETHZ/eth01/images/image_00000817_0.png +ETHZ/eth01/images/image_00000818_0.png +ETHZ/eth01/images/image_00000819_0.png +ETHZ/eth01/images/image_00000820_0.png +ETHZ/eth01/images/image_00000821_0.png +ETHZ/eth01/images/image_00000822_0.png +ETHZ/eth01/images/image_00000823_0.png +ETHZ/eth01/images/image_00000824_0.png +ETHZ/eth01/images/image_00000825_0.png +ETHZ/eth01/images/image_00000826_0.png +ETHZ/eth01/images/image_00000827_0.png +ETHZ/eth01/images/image_00000828_0.png +ETHZ/eth01/images/image_00000829_0.png +ETHZ/eth01/images/image_00000830_0.png +ETHZ/eth01/images/image_00000831_0.png +ETHZ/eth01/images/image_00000832_0.png +ETHZ/eth01/images/image_00000833_0.png +ETHZ/eth01/images/image_00000834_0.png +ETHZ/eth01/images/image_00000835_0.png +ETHZ/eth01/images/image_00000836_0.png +ETHZ/eth01/images/image_00000837_0.png +ETHZ/eth01/images/image_00000838_0.png +ETHZ/eth01/images/image_00000839_0.png +ETHZ/eth01/images/image_00000840_0.png +ETHZ/eth01/images/image_00000841_0.png +ETHZ/eth01/images/image_00000842_0.png +ETHZ/eth01/images/image_00000843_0.png +ETHZ/eth01/images/image_00000844_0.png +ETHZ/eth01/images/image_00000845_0.png +ETHZ/eth01/images/image_00000846_0.png +ETHZ/eth01/images/image_00000847_0.png +ETHZ/eth01/images/image_00000848_0.png +ETHZ/eth01/images/image_00000849_0.png +ETHZ/eth01/images/image_00000850_0.png +ETHZ/eth01/images/image_00000851_0.png +ETHZ/eth01/images/image_00000852_0.png +ETHZ/eth01/images/image_00000853_0.png +ETHZ/eth01/images/image_00000854_0.png +ETHZ/eth01/images/image_00000855_0.png +ETHZ/eth01/images/image_00000856_0.png +ETHZ/eth01/images/image_00000857_0.png +ETHZ/eth01/images/image_00000858_0.png +ETHZ/eth01/images/image_00000859_0.png +ETHZ/eth01/images/image_00000860_0.png +ETHZ/eth01/images/image_00000861_0.png +ETHZ/eth01/images/image_00000862_0.png +ETHZ/eth01/images/image_00000863_0.png +ETHZ/eth01/images/image_00000864_0.png +ETHZ/eth01/images/image_00000865_0.png +ETHZ/eth01/images/image_00000866_0.png +ETHZ/eth01/images/image_00000867_0.png +ETHZ/eth01/images/image_00000868_0.png +ETHZ/eth01/images/image_00000869_0.png +ETHZ/eth01/images/image_00000870_0.png +ETHZ/eth01/images/image_00000871_0.png +ETHZ/eth01/images/image_00000872_0.png +ETHZ/eth01/images/image_00000873_0.png +ETHZ/eth01/images/image_00000874_0.png +ETHZ/eth01/images/image_00000875_0.png +ETHZ/eth01/images/image_00000876_0.png +ETHZ/eth01/images/image_00000877_0.png +ETHZ/eth01/images/image_00000878_0.png +ETHZ/eth01/images/image_00000879_0.png +ETHZ/eth01/images/image_00000880_0.png +ETHZ/eth01/images/image_00000881_0.png +ETHZ/eth01/images/image_00000882_0.png +ETHZ/eth01/images/image_00000883_0.png +ETHZ/eth01/images/image_00000884_0.png +ETHZ/eth01/images/image_00000885_0.png +ETHZ/eth01/images/image_00000886_0.png +ETHZ/eth01/images/image_00000887_0.png +ETHZ/eth01/images/image_00000888_0.png +ETHZ/eth01/images/image_00000889_0.png +ETHZ/eth01/images/image_00000890_0.png +ETHZ/eth01/images/image_00000891_0.png +ETHZ/eth01/images/image_00000892_0.png +ETHZ/eth01/images/image_00000893_0.png +ETHZ/eth01/images/image_00000894_0.png +ETHZ/eth01/images/image_00000895_0.png +ETHZ/eth01/images/image_00000896_0.png +ETHZ/eth01/images/image_00000897_0.png +ETHZ/eth01/images/image_00000898_0.png +ETHZ/eth01/images/image_00000899_0.png +ETHZ/eth01/images/image_00000900_0.png +ETHZ/eth01/images/image_00000901_0.png +ETHZ/eth01/images/image_00000902_0.png +ETHZ/eth01/images/image_00000903_0.png +ETHZ/eth01/images/image_00000904_0.png +ETHZ/eth01/images/image_00000905_0.png +ETHZ/eth01/images/image_00000906_0.png +ETHZ/eth01/images/image_00000907_0.png +ETHZ/eth01/images/image_00000908_0.png +ETHZ/eth01/images/image_00000909_0.png +ETHZ/eth01/images/image_00000910_0.png +ETHZ/eth01/images/image_00000911_0.png +ETHZ/eth01/images/image_00000912_0.png +ETHZ/eth01/images/image_00000913_0.png +ETHZ/eth01/images/image_00000914_0.png +ETHZ/eth01/images/image_00000915_0.png +ETHZ/eth01/images/image_00000916_0.png +ETHZ/eth01/images/image_00000917_0.png +ETHZ/eth01/images/image_00000918_0.png +ETHZ/eth01/images/image_00000919_0.png +ETHZ/eth01/images/image_00000920_0.png +ETHZ/eth01/images/image_00000921_0.png +ETHZ/eth01/images/image_00000922_0.png +ETHZ/eth01/images/image_00000923_0.png +ETHZ/eth01/images/image_00000924_0.png +ETHZ/eth01/images/image_00000925_0.png +ETHZ/eth01/images/image_00000926_0.png +ETHZ/eth01/images/image_00000927_0.png +ETHZ/eth01/images/image_00000928_0.png +ETHZ/eth01/images/image_00000929_0.png +ETHZ/eth01/images/image_00000930_0.png +ETHZ/eth01/images/image_00000931_0.png +ETHZ/eth01/images/image_00000932_0.png +ETHZ/eth01/images/image_00000933_0.png +ETHZ/eth01/images/image_00000934_0.png +ETHZ/eth01/images/image_00000935_0.png +ETHZ/eth01/images/image_00000936_0.png +ETHZ/eth01/images/image_00000937_0.png +ETHZ/eth01/images/image_00000938_0.png +ETHZ/eth01/images/image_00000939_0.png +ETHZ/eth01/images/image_00000940_0.png +ETHZ/eth01/images/image_00000941_0.png +ETHZ/eth01/images/image_00000942_0.png +ETHZ/eth01/images/image_00000943_0.png +ETHZ/eth01/images/image_00000944_0.png +ETHZ/eth01/images/image_00000945_0.png +ETHZ/eth01/images/image_00000946_0.png +ETHZ/eth01/images/image_00000947_0.png +ETHZ/eth01/images/image_00000948_0.png +ETHZ/eth01/images/image_00000949_0.png +ETHZ/eth01/images/image_00000950_0.png +ETHZ/eth01/images/image_00000951_0.png +ETHZ/eth01/images/image_00000952_0.png +ETHZ/eth01/images/image_00000953_0.png +ETHZ/eth01/images/image_00000954_0.png +ETHZ/eth01/images/image_00000955_0.png +ETHZ/eth01/images/image_00000956_0.png +ETHZ/eth01/images/image_00000957_0.png +ETHZ/eth01/images/image_00000958_0.png +ETHZ/eth01/images/image_00000959_0.png +ETHZ/eth01/images/image_00000960_0.png +ETHZ/eth01/images/image_00000961_0.png +ETHZ/eth01/images/image_00000962_0.png +ETHZ/eth01/images/image_00000963_0.png +ETHZ/eth01/images/image_00000964_0.png +ETHZ/eth01/images/image_00000965_0.png +ETHZ/eth01/images/image_00000966_0.png +ETHZ/eth01/images/image_00000967_0.png +ETHZ/eth01/images/image_00000968_0.png +ETHZ/eth01/images/image_00000969_0.png +ETHZ/eth01/images/image_00000970_0.png +ETHZ/eth01/images/image_00000971_0.png +ETHZ/eth01/images/image_00000972_0.png +ETHZ/eth01/images/image_00000973_0.png +ETHZ/eth01/images/image_00000974_0.png +ETHZ/eth01/images/image_00000975_0.png +ETHZ/eth01/images/image_00000976_0.png +ETHZ/eth01/images/image_00000977_0.png +ETHZ/eth01/images/image_00000978_0.png +ETHZ/eth01/images/image_00000979_0.png +ETHZ/eth01/images/image_00000980_0.png +ETHZ/eth01/images/image_00000981_0.png +ETHZ/eth01/images/image_00000982_0.png +ETHZ/eth01/images/image_00000983_0.png +ETHZ/eth01/images/image_00000984_0.png +ETHZ/eth01/images/image_00000985_0.png +ETHZ/eth01/images/image_00000986_0.png +ETHZ/eth01/images/image_00000987_0.png +ETHZ/eth01/images/image_00000988_0.png +ETHZ/eth01/images/image_00000989_0.png +ETHZ/eth01/images/image_00000990_0.png +ETHZ/eth01/images/image_00000991_0.png +ETHZ/eth01/images/image_00000992_0.png +ETHZ/eth01/images/image_00000993_0.png +ETHZ/eth01/images/image_00000994_0.png +ETHZ/eth01/images/image_00000995_0.png +ETHZ/eth01/images/image_00000996_0.png +ETHZ/eth01/images/image_00000997_0.png +ETHZ/eth01/images/image_00000998_0.png +ETHZ/eth01/images/image_00000999_0.png +ETHZ/eth02/images/image_00000000_0.png +ETHZ/eth02/images/image_00000001_0.png +ETHZ/eth02/images/image_00000002_0.png +ETHZ/eth02/images/image_00000003_0.png +ETHZ/eth02/images/image_00000004_0.png +ETHZ/eth02/images/image_00000005_0.png +ETHZ/eth02/images/image_00000006_0.png +ETHZ/eth02/images/image_00000007_0.png +ETHZ/eth02/images/image_00000008_0.png +ETHZ/eth02/images/image_00000009_0.png +ETHZ/eth02/images/image_00000010_0.png +ETHZ/eth02/images/image_00000011_0.png +ETHZ/eth02/images/image_00000012_0.png +ETHZ/eth02/images/image_00000013_0.png +ETHZ/eth02/images/image_00000014_0.png +ETHZ/eth02/images/image_00000015_0.png +ETHZ/eth02/images/image_00000016_0.png +ETHZ/eth02/images/image_00000017_0.png +ETHZ/eth02/images/image_00000018_0.png +ETHZ/eth02/images/image_00000019_0.png +ETHZ/eth02/images/image_00000020_0.png +ETHZ/eth02/images/image_00000021_0.png +ETHZ/eth02/images/image_00000022_0.png +ETHZ/eth02/images/image_00000023_0.png +ETHZ/eth02/images/image_00000024_0.png +ETHZ/eth02/images/image_00000025_0.png +ETHZ/eth02/images/image_00000026_0.png +ETHZ/eth02/images/image_00000027_0.png +ETHZ/eth02/images/image_00000028_0.png +ETHZ/eth02/images/image_00000029_0.png +ETHZ/eth02/images/image_00000030_0.png +ETHZ/eth02/images/image_00000031_0.png +ETHZ/eth02/images/image_00000032_0.png +ETHZ/eth02/images/image_00000033_0.png +ETHZ/eth02/images/image_00000034_0.png +ETHZ/eth02/images/image_00000035_0.png +ETHZ/eth02/images/image_00000036_0.png +ETHZ/eth02/images/image_00000037_0.png +ETHZ/eth02/images/image_00000038_0.png +ETHZ/eth02/images/image_00000039_0.png +ETHZ/eth02/images/image_00000040_0.png +ETHZ/eth02/images/image_00000041_0.png +ETHZ/eth02/images/image_00000042_0.png +ETHZ/eth02/images/image_00000043_0.png +ETHZ/eth02/images/image_00000044_0.png +ETHZ/eth02/images/image_00000045_0.png +ETHZ/eth02/images/image_00000046_0.png +ETHZ/eth02/images/image_00000047_0.png +ETHZ/eth02/images/image_00000048_0.png +ETHZ/eth02/images/image_00000049_0.png +ETHZ/eth02/images/image_00000050_0.png +ETHZ/eth02/images/image_00000051_0.png +ETHZ/eth02/images/image_00000052_0.png +ETHZ/eth02/images/image_00000053_0.png +ETHZ/eth02/images/image_00000054_0.png +ETHZ/eth02/images/image_00000055_0.png +ETHZ/eth02/images/image_00000056_0.png +ETHZ/eth02/images/image_00000057_0.png +ETHZ/eth02/images/image_00000058_0.png +ETHZ/eth02/images/image_00000059_0.png +ETHZ/eth02/images/image_00000060_0.png +ETHZ/eth02/images/image_00000061_0.png +ETHZ/eth02/images/image_00000062_0.png +ETHZ/eth02/images/image_00000063_0.png +ETHZ/eth02/images/image_00000064_0.png +ETHZ/eth02/images/image_00000065_0.png +ETHZ/eth02/images/image_00000066_0.png +ETHZ/eth02/images/image_00000067_0.png +ETHZ/eth02/images/image_00000068_0.png +ETHZ/eth02/images/image_00000069_0.png +ETHZ/eth02/images/image_00000070_0.png +ETHZ/eth02/images/image_00000071_0.png +ETHZ/eth02/images/image_00000072_0.png +ETHZ/eth02/images/image_00000073_0.png +ETHZ/eth02/images/image_00000074_0.png +ETHZ/eth02/images/image_00000075_0.png +ETHZ/eth02/images/image_00000076_0.png +ETHZ/eth02/images/image_00000077_0.png +ETHZ/eth02/images/image_00000078_0.png +ETHZ/eth02/images/image_00000079_0.png +ETHZ/eth02/images/image_00000080_0.png +ETHZ/eth02/images/image_00000081_0.png +ETHZ/eth02/images/image_00000082_0.png +ETHZ/eth02/images/image_00000083_0.png +ETHZ/eth02/images/image_00000084_0.png +ETHZ/eth02/images/image_00000085_0.png +ETHZ/eth02/images/image_00000086_0.png +ETHZ/eth02/images/image_00000087_0.png +ETHZ/eth02/images/image_00000088_0.png +ETHZ/eth02/images/image_00000089_0.png +ETHZ/eth02/images/image_00000090_0.png +ETHZ/eth02/images/image_00000091_0.png +ETHZ/eth02/images/image_00000092_0.png +ETHZ/eth02/images/image_00000093_0.png +ETHZ/eth02/images/image_00000094_0.png +ETHZ/eth02/images/image_00000095_0.png +ETHZ/eth02/images/image_00000096_0.png +ETHZ/eth02/images/image_00000097_0.png +ETHZ/eth02/images/image_00000098_0.png +ETHZ/eth02/images/image_00000099_0.png +ETHZ/eth02/images/image_00000100_0.png +ETHZ/eth02/images/image_00000101_0.png +ETHZ/eth02/images/image_00000102_0.png +ETHZ/eth02/images/image_00000103_0.png +ETHZ/eth02/images/image_00000104_0.png +ETHZ/eth02/images/image_00000105_0.png +ETHZ/eth02/images/image_00000106_0.png +ETHZ/eth02/images/image_00000107_0.png +ETHZ/eth02/images/image_00000108_0.png +ETHZ/eth02/images/image_00000109_0.png +ETHZ/eth02/images/image_00000110_0.png +ETHZ/eth02/images/image_00000111_0.png +ETHZ/eth02/images/image_00000112_0.png +ETHZ/eth02/images/image_00000113_0.png +ETHZ/eth02/images/image_00000114_0.png +ETHZ/eth02/images/image_00000115_0.png +ETHZ/eth02/images/image_00000116_0.png +ETHZ/eth02/images/image_00000117_0.png +ETHZ/eth02/images/image_00000118_0.png +ETHZ/eth02/images/image_00000119_0.png +ETHZ/eth02/images/image_00000120_0.png +ETHZ/eth02/images/image_00000121_0.png +ETHZ/eth02/images/image_00000122_0.png +ETHZ/eth02/images/image_00000123_0.png +ETHZ/eth02/images/image_00000124_0.png +ETHZ/eth02/images/image_00000125_0.png +ETHZ/eth02/images/image_00000126_0.png +ETHZ/eth02/images/image_00000127_0.png +ETHZ/eth02/images/image_00000128_0.png +ETHZ/eth02/images/image_00000129_0.png +ETHZ/eth02/images/image_00000130_0.png +ETHZ/eth02/images/image_00000131_0.png +ETHZ/eth02/images/image_00000132_0.png +ETHZ/eth02/images/image_00000133_0.png +ETHZ/eth02/images/image_00000134_0.png +ETHZ/eth02/images/image_00000135_0.png +ETHZ/eth02/images/image_00000136_0.png +ETHZ/eth02/images/image_00000137_0.png +ETHZ/eth02/images/image_00000138_0.png +ETHZ/eth02/images/image_00000139_0.png +ETHZ/eth02/images/image_00000140_0.png +ETHZ/eth02/images/image_00000141_0.png +ETHZ/eth02/images/image_00000142_0.png +ETHZ/eth02/images/image_00000143_0.png +ETHZ/eth02/images/image_00000144_0.png +ETHZ/eth02/images/image_00000145_0.png +ETHZ/eth02/images/image_00000146_0.png +ETHZ/eth02/images/image_00000147_0.png +ETHZ/eth02/images/image_00000148_0.png +ETHZ/eth02/images/image_00000149_0.png +ETHZ/eth02/images/image_00000150_0.png +ETHZ/eth02/images/image_00000151_0.png +ETHZ/eth02/images/image_00000152_0.png +ETHZ/eth02/images/image_00000153_0.png +ETHZ/eth02/images/image_00000154_0.png +ETHZ/eth02/images/image_00000155_0.png +ETHZ/eth02/images/image_00000156_0.png +ETHZ/eth02/images/image_00000157_0.png +ETHZ/eth02/images/image_00000158_0.png +ETHZ/eth02/images/image_00000159_0.png +ETHZ/eth02/images/image_00000160_0.png +ETHZ/eth02/images/image_00000161_0.png +ETHZ/eth02/images/image_00000162_0.png +ETHZ/eth02/images/image_00000163_0.png +ETHZ/eth02/images/image_00000164_0.png +ETHZ/eth02/images/image_00000165_0.png +ETHZ/eth02/images/image_00000166_0.png +ETHZ/eth02/images/image_00000167_0.png +ETHZ/eth02/images/image_00000168_0.png +ETHZ/eth02/images/image_00000169_0.png +ETHZ/eth02/images/image_00000170_0.png +ETHZ/eth02/images/image_00000171_0.png +ETHZ/eth02/images/image_00000172_0.png +ETHZ/eth02/images/image_00000173_0.png +ETHZ/eth02/images/image_00000174_0.png +ETHZ/eth02/images/image_00000175_0.png +ETHZ/eth02/images/image_00000176_0.png +ETHZ/eth02/images/image_00000177_0.png +ETHZ/eth02/images/image_00000178_0.png +ETHZ/eth02/images/image_00000179_0.png +ETHZ/eth02/images/image_00000180_0.png +ETHZ/eth02/images/image_00000181_0.png +ETHZ/eth02/images/image_00000182_0.png +ETHZ/eth02/images/image_00000183_0.png +ETHZ/eth02/images/image_00000184_0.png +ETHZ/eth02/images/image_00000185_0.png +ETHZ/eth02/images/image_00000186_0.png +ETHZ/eth02/images/image_00000187_0.png +ETHZ/eth02/images/image_00000188_0.png +ETHZ/eth02/images/image_00000189_0.png +ETHZ/eth02/images/image_00000190_0.png +ETHZ/eth02/images/image_00000191_0.png +ETHZ/eth02/images/image_00000192_0.png +ETHZ/eth02/images/image_00000193_0.png +ETHZ/eth02/images/image_00000194_0.png +ETHZ/eth02/images/image_00000195_0.png +ETHZ/eth02/images/image_00000196_0.png +ETHZ/eth02/images/image_00000197_0.png +ETHZ/eth02/images/image_00000198_0.png +ETHZ/eth02/images/image_00000199_0.png +ETHZ/eth02/images/image_00000200_0.png +ETHZ/eth02/images/image_00000201_0.png +ETHZ/eth02/images/image_00000202_0.png +ETHZ/eth02/images/image_00000203_0.png +ETHZ/eth02/images/image_00000204_0.png +ETHZ/eth02/images/image_00000205_0.png +ETHZ/eth02/images/image_00000206_0.png +ETHZ/eth02/images/image_00000207_0.png +ETHZ/eth02/images/image_00000208_0.png +ETHZ/eth02/images/image_00000209_0.png +ETHZ/eth02/images/image_00000210_0.png +ETHZ/eth02/images/image_00000211_0.png +ETHZ/eth02/images/image_00000212_0.png +ETHZ/eth02/images/image_00000213_0.png +ETHZ/eth02/images/image_00000214_0.png +ETHZ/eth02/images/image_00000215_0.png +ETHZ/eth02/images/image_00000216_0.png +ETHZ/eth02/images/image_00000217_0.png +ETHZ/eth02/images/image_00000218_0.png +ETHZ/eth02/images/image_00000219_0.png +ETHZ/eth02/images/image_00000220_0.png +ETHZ/eth02/images/image_00000221_0.png +ETHZ/eth02/images/image_00000222_0.png +ETHZ/eth02/images/image_00000223_0.png +ETHZ/eth02/images/image_00000224_0.png +ETHZ/eth02/images/image_00000225_0.png +ETHZ/eth02/images/image_00000226_0.png +ETHZ/eth02/images/image_00000227_0.png +ETHZ/eth02/images/image_00000228_0.png +ETHZ/eth02/images/image_00000229_0.png +ETHZ/eth02/images/image_00000230_0.png +ETHZ/eth02/images/image_00000231_0.png +ETHZ/eth02/images/image_00000232_0.png +ETHZ/eth02/images/image_00000233_0.png +ETHZ/eth02/images/image_00000234_0.png +ETHZ/eth02/images/image_00000235_0.png +ETHZ/eth02/images/image_00000236_0.png +ETHZ/eth02/images/image_00000237_0.png +ETHZ/eth02/images/image_00000238_0.png +ETHZ/eth02/images/image_00000239_0.png +ETHZ/eth02/images/image_00000240_0.png +ETHZ/eth02/images/image_00000241_0.png +ETHZ/eth02/images/image_00000242_0.png +ETHZ/eth02/images/image_00000243_0.png +ETHZ/eth02/images/image_00000244_0.png +ETHZ/eth02/images/image_00000245_0.png +ETHZ/eth02/images/image_00000246_0.png +ETHZ/eth02/images/image_00000247_0.png +ETHZ/eth02/images/image_00000248_0.png +ETHZ/eth02/images/image_00000249_0.png +ETHZ/eth02/images/image_00000250_0.png +ETHZ/eth02/images/image_00000251_0.png +ETHZ/eth02/images/image_00000252_0.png +ETHZ/eth02/images/image_00000253_0.png +ETHZ/eth02/images/image_00000254_0.png +ETHZ/eth02/images/image_00000255_0.png +ETHZ/eth02/images/image_00000256_0.png +ETHZ/eth02/images/image_00000257_0.png +ETHZ/eth02/images/image_00000258_0.png +ETHZ/eth02/images/image_00000259_0.png +ETHZ/eth02/images/image_00000260_0.png +ETHZ/eth02/images/image_00000261_0.png +ETHZ/eth02/images/image_00000262_0.png +ETHZ/eth02/images/image_00000263_0.png +ETHZ/eth02/images/image_00000264_0.png +ETHZ/eth02/images/image_00000265_0.png +ETHZ/eth02/images/image_00000266_0.png +ETHZ/eth02/images/image_00000267_0.png +ETHZ/eth02/images/image_00000268_0.png +ETHZ/eth02/images/image_00000269_0.png +ETHZ/eth02/images/image_00000270_0.png +ETHZ/eth02/images/image_00000271_0.png +ETHZ/eth02/images/image_00000272_0.png +ETHZ/eth02/images/image_00000273_0.png +ETHZ/eth02/images/image_00000274_0.png +ETHZ/eth02/images/image_00000275_0.png +ETHZ/eth02/images/image_00000276_0.png +ETHZ/eth02/images/image_00000277_0.png +ETHZ/eth02/images/image_00000278_0.png +ETHZ/eth02/images/image_00000279_0.png +ETHZ/eth02/images/image_00000280_0.png +ETHZ/eth02/images/image_00000281_0.png +ETHZ/eth02/images/image_00000282_0.png +ETHZ/eth02/images/image_00000283_0.png +ETHZ/eth02/images/image_00000284_0.png +ETHZ/eth02/images/image_00000285_0.png +ETHZ/eth02/images/image_00000286_0.png +ETHZ/eth02/images/image_00000287_0.png +ETHZ/eth02/images/image_00000288_0.png +ETHZ/eth02/images/image_00000289_0.png +ETHZ/eth02/images/image_00000290_0.png +ETHZ/eth02/images/image_00000291_0.png +ETHZ/eth02/images/image_00000292_0.png +ETHZ/eth02/images/image_00000293_0.png +ETHZ/eth02/images/image_00000294_0.png +ETHZ/eth02/images/image_00000295_0.png +ETHZ/eth02/images/image_00000296_0.png +ETHZ/eth02/images/image_00000297_0.png +ETHZ/eth02/images/image_00000298_0.png +ETHZ/eth02/images/image_00000299_0.png +ETHZ/eth02/images/image_00000300_0.png +ETHZ/eth02/images/image_00000301_0.png +ETHZ/eth02/images/image_00000302_0.png +ETHZ/eth02/images/image_00000303_0.png +ETHZ/eth02/images/image_00000304_0.png +ETHZ/eth02/images/image_00000305_0.png +ETHZ/eth02/images/image_00000306_0.png +ETHZ/eth02/images/image_00000307_0.png +ETHZ/eth02/images/image_00000308_0.png +ETHZ/eth02/images/image_00000309_0.png +ETHZ/eth02/images/image_00000310_0.png +ETHZ/eth02/images/image_00000311_0.png +ETHZ/eth02/images/image_00000312_0.png +ETHZ/eth02/images/image_00000313_0.png +ETHZ/eth02/images/image_00000314_0.png +ETHZ/eth02/images/image_00000315_0.png +ETHZ/eth02/images/image_00000316_0.png +ETHZ/eth02/images/image_00000317_0.png +ETHZ/eth02/images/image_00000318_0.png +ETHZ/eth02/images/image_00000319_0.png +ETHZ/eth02/images/image_00000320_0.png +ETHZ/eth02/images/image_00000321_0.png +ETHZ/eth02/images/image_00000322_0.png +ETHZ/eth02/images/image_00000323_0.png +ETHZ/eth02/images/image_00000324_0.png +ETHZ/eth02/images/image_00000325_0.png +ETHZ/eth02/images/image_00000326_0.png +ETHZ/eth02/images/image_00000327_0.png +ETHZ/eth02/images/image_00000328_0.png +ETHZ/eth02/images/image_00000329_0.png +ETHZ/eth02/images/image_00000330_0.png +ETHZ/eth02/images/image_00000331_0.png +ETHZ/eth02/images/image_00000332_0.png +ETHZ/eth02/images/image_00000333_0.png +ETHZ/eth02/images/image_00000334_0.png +ETHZ/eth02/images/image_00000335_0.png +ETHZ/eth02/images/image_00000336_0.png +ETHZ/eth02/images/image_00000337_0.png +ETHZ/eth02/images/image_00000338_0.png +ETHZ/eth02/images/image_00000339_0.png +ETHZ/eth02/images/image_00000340_0.png +ETHZ/eth02/images/image_00000341_0.png +ETHZ/eth02/images/image_00000342_0.png +ETHZ/eth02/images/image_00000343_0.png +ETHZ/eth02/images/image_00000344_0.png +ETHZ/eth02/images/image_00000345_0.png +ETHZ/eth02/images/image_00000346_0.png +ETHZ/eth02/images/image_00000347_0.png +ETHZ/eth02/images/image_00000348_0.png +ETHZ/eth02/images/image_00000349_0.png +ETHZ/eth02/images/image_00000350_0.png +ETHZ/eth02/images/image_00000351_0.png +ETHZ/eth02/images/image_00000352_0.png +ETHZ/eth02/images/image_00000353_0.png +ETHZ/eth02/images/image_00000354_0.png +ETHZ/eth02/images/image_00000355_0.png +ETHZ/eth02/images/image_00000356_0.png +ETHZ/eth02/images/image_00000357_0.png +ETHZ/eth02/images/image_00000358_0.png +ETHZ/eth02/images/image_00000359_0.png +ETHZ/eth02/images/image_00000360_0.png +ETHZ/eth02/images/image_00000361_0.png +ETHZ/eth02/images/image_00000362_0.png +ETHZ/eth02/images/image_00000363_0.png +ETHZ/eth02/images/image_00000364_0.png +ETHZ/eth02/images/image_00000365_0.png +ETHZ/eth02/images/image_00000366_0.png +ETHZ/eth02/images/image_00000367_0.png +ETHZ/eth02/images/image_00000368_0.png +ETHZ/eth02/images/image_00000369_0.png +ETHZ/eth02/images/image_00000370_0.png +ETHZ/eth02/images/image_00000371_0.png +ETHZ/eth02/images/image_00000372_0.png +ETHZ/eth02/images/image_00000373_0.png +ETHZ/eth02/images/image_00000374_0.png +ETHZ/eth02/images/image_00000375_0.png +ETHZ/eth02/images/image_00000376_0.png +ETHZ/eth02/images/image_00000377_0.png +ETHZ/eth02/images/image_00000378_0.png +ETHZ/eth02/images/image_00000379_0.png +ETHZ/eth02/images/image_00000380_0.png +ETHZ/eth02/images/image_00000381_0.png +ETHZ/eth02/images/image_00000382_0.png +ETHZ/eth02/images/image_00000383_0.png +ETHZ/eth02/images/image_00000384_0.png +ETHZ/eth02/images/image_00000385_0.png +ETHZ/eth02/images/image_00000386_0.png +ETHZ/eth02/images/image_00000387_0.png +ETHZ/eth02/images/image_00000388_0.png +ETHZ/eth02/images/image_00000389_0.png +ETHZ/eth02/images/image_00000390_0.png +ETHZ/eth02/images/image_00000391_0.png +ETHZ/eth02/images/image_00000392_0.png +ETHZ/eth02/images/image_00000393_0.png +ETHZ/eth02/images/image_00000394_0.png +ETHZ/eth02/images/image_00000395_0.png +ETHZ/eth02/images/image_00000396_0.png +ETHZ/eth02/images/image_00000397_0.png +ETHZ/eth02/images/image_00000398_0.png +ETHZ/eth02/images/image_00000399_0.png +ETHZ/eth02/images/image_00000400_0.png +ETHZ/eth02/images/image_00000401_0.png +ETHZ/eth02/images/image_00000402_0.png +ETHZ/eth02/images/image_00000403_0.png +ETHZ/eth02/images/image_00000404_0.png +ETHZ/eth02/images/image_00000405_0.png +ETHZ/eth02/images/image_00000406_0.png +ETHZ/eth02/images/image_00000407_0.png +ETHZ/eth02/images/image_00000408_0.png +ETHZ/eth02/images/image_00000409_0.png +ETHZ/eth02/images/image_00000410_0.png +ETHZ/eth02/images/image_00000411_0.png +ETHZ/eth02/images/image_00000412_0.png +ETHZ/eth02/images/image_00000413_0.png +ETHZ/eth02/images/image_00000414_0.png +ETHZ/eth02/images/image_00000415_0.png +ETHZ/eth02/images/image_00000416_0.png +ETHZ/eth02/images/image_00000417_0.png +ETHZ/eth02/images/image_00000418_0.png +ETHZ/eth02/images/image_00000419_0.png +ETHZ/eth02/images/image_00000420_0.png +ETHZ/eth02/images/image_00000421_0.png +ETHZ/eth02/images/image_00000422_0.png +ETHZ/eth02/images/image_00000423_0.png +ETHZ/eth02/images/image_00000424_0.png +ETHZ/eth02/images/image_00000425_0.png +ETHZ/eth02/images/image_00000426_0.png +ETHZ/eth02/images/image_00000427_0.png +ETHZ/eth02/images/image_00000428_0.png +ETHZ/eth02/images/image_00000429_0.png +ETHZ/eth02/images/image_00000430_0.png +ETHZ/eth02/images/image_00000431_0.png +ETHZ/eth02/images/image_00000432_0.png +ETHZ/eth02/images/image_00000433_0.png +ETHZ/eth02/images/image_00000434_0.png +ETHZ/eth02/images/image_00000435_0.png +ETHZ/eth02/images/image_00000436_0.png +ETHZ/eth02/images/image_00000437_0.png +ETHZ/eth02/images/image_00000438_0.png +ETHZ/eth02/images/image_00000439_0.png +ETHZ/eth02/images/image_00000440_0.png +ETHZ/eth02/images/image_00000441_0.png +ETHZ/eth02/images/image_00000442_0.png +ETHZ/eth02/images/image_00000443_0.png +ETHZ/eth02/images/image_00000444_0.png +ETHZ/eth02/images/image_00000445_0.png +ETHZ/eth03/images/image_00000100_0.png +ETHZ/eth03/images/image_00000101_0.png +ETHZ/eth03/images/image_00000102_0.png +ETHZ/eth03/images/image_00000103_0.png +ETHZ/eth03/images/image_00000104_0.png +ETHZ/eth03/images/image_00000105_0.png +ETHZ/eth03/images/image_00000106_0.png +ETHZ/eth03/images/image_00000107_0.png +ETHZ/eth03/images/image_00000108_0.png +ETHZ/eth03/images/image_00000109_0.png +ETHZ/eth03/images/image_00000110_0.png +ETHZ/eth03/images/image_00000111_0.png +ETHZ/eth03/images/image_00000112_0.png +ETHZ/eth03/images/image_00000113_0.png +ETHZ/eth03/images/image_00000114_0.png +ETHZ/eth03/images/image_00000115_0.png +ETHZ/eth03/images/image_00000116_0.png +ETHZ/eth03/images/image_00000117_0.png +ETHZ/eth03/images/image_00000118_0.png +ETHZ/eth03/images/image_00000119_0.png +ETHZ/eth03/images/image_00000120_0.png +ETHZ/eth03/images/image_00000121_0.png +ETHZ/eth03/images/image_00000122_0.png +ETHZ/eth03/images/image_00000123_0.png +ETHZ/eth03/images/image_00000124_0.png +ETHZ/eth03/images/image_00000125_0.png +ETHZ/eth03/images/image_00000126_0.png +ETHZ/eth03/images/image_00000127_0.png +ETHZ/eth03/images/image_00000128_0.png +ETHZ/eth03/images/image_00000129_0.png +ETHZ/eth03/images/image_00000130_0.png +ETHZ/eth03/images/image_00000131_0.png +ETHZ/eth03/images/image_00000132_0.png +ETHZ/eth03/images/image_00000133_0.png +ETHZ/eth03/images/image_00000134_0.png +ETHZ/eth03/images/image_00000135_0.png +ETHZ/eth03/images/image_00000136_0.png +ETHZ/eth03/images/image_00000137_0.png +ETHZ/eth03/images/image_00000138_0.png +ETHZ/eth03/images/image_00000139_0.png +ETHZ/eth03/images/image_00000140_0.png +ETHZ/eth03/images/image_00000141_0.png +ETHZ/eth03/images/image_00000142_0.png +ETHZ/eth03/images/image_00000143_0.png +ETHZ/eth03/images/image_00000144_0.png +ETHZ/eth03/images/image_00000145_0.png +ETHZ/eth03/images/image_00000146_0.png +ETHZ/eth03/images/image_00000147_0.png +ETHZ/eth03/images/image_00000148_0.png +ETHZ/eth03/images/image_00000149_0.png +ETHZ/eth03/images/image_00000150_0.png +ETHZ/eth03/images/image_00000151_0.png +ETHZ/eth03/images/image_00000152_0.png +ETHZ/eth03/images/image_00000153_0.png +ETHZ/eth03/images/image_00000154_0.png +ETHZ/eth03/images/image_00000155_0.png +ETHZ/eth03/images/image_00000156_0.png +ETHZ/eth03/images/image_00000157_0.png +ETHZ/eth03/images/image_00000158_0.png +ETHZ/eth03/images/image_00000159_0.png +ETHZ/eth03/images/image_00000160_0.png +ETHZ/eth03/images/image_00000161_0.png +ETHZ/eth03/images/image_00000162_0.png +ETHZ/eth03/images/image_00000163_0.png +ETHZ/eth03/images/image_00000164_0.png +ETHZ/eth03/images/image_00000165_0.png +ETHZ/eth03/images/image_00000166_0.png +ETHZ/eth03/images/image_00000167_0.png +ETHZ/eth03/images/image_00000168_0.png +ETHZ/eth03/images/image_00000169_0.png +ETHZ/eth03/images/image_00000170_0.png +ETHZ/eth03/images/image_00000171_0.png +ETHZ/eth03/images/image_00000172_0.png +ETHZ/eth03/images/image_00000173_0.png +ETHZ/eth03/images/image_00000174_0.png +ETHZ/eth03/images/image_00000175_0.png +ETHZ/eth03/images/image_00000176_0.png +ETHZ/eth03/images/image_00000177_0.png +ETHZ/eth03/images/image_00000178_0.png +ETHZ/eth03/images/image_00000179_0.png +ETHZ/eth03/images/image_00000180_0.png +ETHZ/eth03/images/image_00000181_0.png +ETHZ/eth03/images/image_00000182_0.png +ETHZ/eth03/images/image_00000183_0.png +ETHZ/eth03/images/image_00000184_0.png +ETHZ/eth03/images/image_00000185_0.png +ETHZ/eth03/images/image_00000186_0.png +ETHZ/eth03/images/image_00000187_0.png +ETHZ/eth03/images/image_00000188_0.png +ETHZ/eth03/images/image_00000189_0.png +ETHZ/eth03/images/image_00000190_0.png +ETHZ/eth03/images/image_00000191_0.png +ETHZ/eth03/images/image_00000192_0.png +ETHZ/eth03/images/image_00000193_0.png +ETHZ/eth03/images/image_00000194_0.png +ETHZ/eth03/images/image_00000195_0.png +ETHZ/eth03/images/image_00000196_0.png +ETHZ/eth03/images/image_00000197_0.png +ETHZ/eth03/images/image_00000198_0.png +ETHZ/eth03/images/image_00000199_0.png +ETHZ/eth03/images/image_00000200_0.png +ETHZ/eth03/images/image_00000201_0.png +ETHZ/eth03/images/image_00000202_0.png +ETHZ/eth03/images/image_00000203_0.png +ETHZ/eth03/images/image_00000204_0.png +ETHZ/eth03/images/image_00000205_0.png +ETHZ/eth03/images/image_00000206_0.png +ETHZ/eth03/images/image_00000207_0.png +ETHZ/eth03/images/image_00000208_0.png +ETHZ/eth03/images/image_00000209_0.png +ETHZ/eth03/images/image_00000210_0.png +ETHZ/eth03/images/image_00000211_0.png +ETHZ/eth03/images/image_00000212_0.png +ETHZ/eth03/images/image_00000213_0.png +ETHZ/eth03/images/image_00000214_0.png +ETHZ/eth03/images/image_00000215_0.png +ETHZ/eth03/images/image_00000216_0.png +ETHZ/eth03/images/image_00000217_0.png +ETHZ/eth03/images/image_00000218_0.png +ETHZ/eth03/images/image_00000219_0.png +ETHZ/eth03/images/image_00000220_0.png +ETHZ/eth03/images/image_00000221_0.png +ETHZ/eth03/images/image_00000222_0.png +ETHZ/eth03/images/image_00000223_0.png +ETHZ/eth03/images/image_00000224_0.png +ETHZ/eth03/images/image_00000225_0.png +ETHZ/eth03/images/image_00000226_0.png +ETHZ/eth03/images/image_00000227_0.png +ETHZ/eth03/images/image_00000228_0.png +ETHZ/eth03/images/image_00000229_0.png +ETHZ/eth03/images/image_00000230_0.png +ETHZ/eth03/images/image_00000231_0.png +ETHZ/eth03/images/image_00000232_0.png +ETHZ/eth03/images/image_00000233_0.png +ETHZ/eth03/images/image_00000234_0.png +ETHZ/eth03/images/image_00000235_0.png +ETHZ/eth03/images/image_00000236_0.png +ETHZ/eth03/images/image_00000237_0.png +ETHZ/eth03/images/image_00000238_0.png +ETHZ/eth03/images/image_00000239_0.png +ETHZ/eth03/images/image_00000240_0.png +ETHZ/eth03/images/image_00000241_0.png +ETHZ/eth03/images/image_00000242_0.png +ETHZ/eth03/images/image_00000243_0.png +ETHZ/eth03/images/image_00000244_0.png +ETHZ/eth03/images/image_00000245_0.png +ETHZ/eth03/images/image_00000246_0.png +ETHZ/eth03/images/image_00000247_0.png +ETHZ/eth03/images/image_00000248_0.png +ETHZ/eth03/images/image_00000249_0.png +ETHZ/eth03/images/image_00000250_0.png +ETHZ/eth03/images/image_00000251_0.png +ETHZ/eth03/images/image_00000252_0.png +ETHZ/eth03/images/image_00000253_0.png +ETHZ/eth03/images/image_00000254_0.png +ETHZ/eth03/images/image_00000255_0.png +ETHZ/eth03/images/image_00000256_0.png +ETHZ/eth03/images/image_00000257_0.png +ETHZ/eth03/images/image_00000258_0.png +ETHZ/eth03/images/image_00000259_0.png +ETHZ/eth03/images/image_00000260_0.png +ETHZ/eth03/images/image_00000261_0.png +ETHZ/eth03/images/image_00000262_0.png +ETHZ/eth03/images/image_00000263_0.png +ETHZ/eth03/images/image_00000264_0.png +ETHZ/eth03/images/image_00000265_0.png +ETHZ/eth03/images/image_00000266_0.png +ETHZ/eth03/images/image_00000267_0.png +ETHZ/eth03/images/image_00000268_0.png +ETHZ/eth03/images/image_00000269_0.png +ETHZ/eth03/images/image_00000270_0.png +ETHZ/eth03/images/image_00000271_0.png +ETHZ/eth03/images/image_00000272_0.png +ETHZ/eth03/images/image_00000273_0.png +ETHZ/eth03/images/image_00000274_0.png +ETHZ/eth03/images/image_00000275_0.png +ETHZ/eth03/images/image_00000276_0.png +ETHZ/eth03/images/image_00000277_0.png +ETHZ/eth03/images/image_00000278_0.png +ETHZ/eth03/images/image_00000279_0.png +ETHZ/eth03/images/image_00000280_0.png +ETHZ/eth03/images/image_00000281_0.png +ETHZ/eth03/images/image_00000282_0.png +ETHZ/eth03/images/image_00000283_0.png +ETHZ/eth03/images/image_00000284_0.png +ETHZ/eth03/images/image_00000285_0.png +ETHZ/eth03/images/image_00000286_0.png +ETHZ/eth03/images/image_00000287_0.png +ETHZ/eth03/images/image_00000288_0.png +ETHZ/eth03/images/image_00000289_0.png +ETHZ/eth03/images/image_00000290_0.png +ETHZ/eth03/images/image_00000291_0.png +ETHZ/eth03/images/image_00000292_0.png +ETHZ/eth03/images/image_00000293_0.png +ETHZ/eth03/images/image_00000294_0.png +ETHZ/eth03/images/image_00000295_0.png +ETHZ/eth03/images/image_00000296_0.png +ETHZ/eth03/images/image_00000297_0.png +ETHZ/eth03/images/image_00000298_0.png +ETHZ/eth03/images/image_00000299_0.png +ETHZ/eth03/images/image_00000300_0.png +ETHZ/eth03/images/image_00000301_0.png +ETHZ/eth03/images/image_00000302_0.png +ETHZ/eth03/images/image_00000303_0.png +ETHZ/eth03/images/image_00000304_0.png +ETHZ/eth03/images/image_00000305_0.png +ETHZ/eth03/images/image_00000306_0.png +ETHZ/eth03/images/image_00000307_0.png +ETHZ/eth03/images/image_00000308_0.png +ETHZ/eth03/images/image_00000309_0.png +ETHZ/eth03/images/image_00000310_0.png +ETHZ/eth03/images/image_00000311_0.png +ETHZ/eth03/images/image_00000312_0.png +ETHZ/eth03/images/image_00000313_0.png +ETHZ/eth03/images/image_00000314_0.png +ETHZ/eth03/images/image_00000315_0.png +ETHZ/eth03/images/image_00000316_0.png +ETHZ/eth03/images/image_00000317_0.png +ETHZ/eth03/images/image_00000318_0.png +ETHZ/eth03/images/image_00000319_0.png +ETHZ/eth03/images/image_00000320_0.png +ETHZ/eth03/images/image_00000321_0.png +ETHZ/eth03/images/image_00000322_0.png +ETHZ/eth03/images/image_00000323_0.png +ETHZ/eth03/images/image_00000324_0.png +ETHZ/eth03/images/image_00000325_0.png +ETHZ/eth03/images/image_00000326_0.png +ETHZ/eth03/images/image_00000327_0.png +ETHZ/eth03/images/image_00000328_0.png +ETHZ/eth03/images/image_00000329_0.png +ETHZ/eth03/images/image_00000330_0.png +ETHZ/eth03/images/image_00000331_0.png +ETHZ/eth03/images/image_00000332_0.png +ETHZ/eth03/images/image_00000333_0.png +ETHZ/eth03/images/image_00000334_0.png +ETHZ/eth03/images/image_00000335_0.png +ETHZ/eth03/images/image_00000336_0.png +ETHZ/eth03/images/image_00000337_0.png +ETHZ/eth03/images/image_00000338_0.png +ETHZ/eth03/images/image_00000339_0.png +ETHZ/eth03/images/image_00000340_0.png +ETHZ/eth03/images/image_00000341_0.png +ETHZ/eth03/images/image_00000342_0.png +ETHZ/eth03/images/image_00000343_0.png +ETHZ/eth03/images/image_00000344_0.png +ETHZ/eth03/images/image_00000345_0.png +ETHZ/eth03/images/image_00000346_0.png +ETHZ/eth03/images/image_00000347_0.png +ETHZ/eth03/images/image_00000348_0.png +ETHZ/eth03/images/image_00000349_0.png +ETHZ/eth03/images/image_00000350_0.png +ETHZ/eth03/images/image_00000351_0.png +ETHZ/eth03/images/image_00000352_0.png +ETHZ/eth03/images/image_00000353_0.png +ETHZ/eth03/images/image_00000354_0.png +ETHZ/eth03/images/image_00000355_0.png +ETHZ/eth03/images/image_00000356_0.png +ETHZ/eth03/images/image_00000357_0.png +ETHZ/eth03/images/image_00000358_0.png +ETHZ/eth03/images/image_00000359_0.png +ETHZ/eth03/images/image_00000360_0.png +ETHZ/eth03/images/image_00000361_0.png +ETHZ/eth03/images/image_00000362_0.png +ETHZ/eth03/images/image_00000363_0.png +ETHZ/eth03/images/image_00000364_0.png +ETHZ/eth03/images/image_00000365_0.png +ETHZ/eth03/images/image_00000366_0.png +ETHZ/eth03/images/image_00000367_0.png +ETHZ/eth03/images/image_00000368_0.png +ETHZ/eth03/images/image_00000369_0.png +ETHZ/eth03/images/image_00000370_0.png +ETHZ/eth03/images/image_00000371_0.png +ETHZ/eth03/images/image_00000372_0.png +ETHZ/eth03/images/image_00000373_0.png +ETHZ/eth03/images/image_00000374_0.png +ETHZ/eth03/images/image_00000375_0.png +ETHZ/eth03/images/image_00000376_0.png +ETHZ/eth03/images/image_00000377_0.png +ETHZ/eth03/images/image_00000378_0.png +ETHZ/eth03/images/image_00000379_0.png +ETHZ/eth03/images/image_00000380_0.png +ETHZ/eth03/images/image_00000381_0.png +ETHZ/eth03/images/image_00000382_0.png +ETHZ/eth03/images/image_00000383_0.png +ETHZ/eth03/images/image_00000384_0.png +ETHZ/eth03/images/image_00000385_0.png +ETHZ/eth03/images/image_00000386_0.png +ETHZ/eth03/images/image_00000387_0.png +ETHZ/eth03/images/image_00000388_0.png +ETHZ/eth03/images/image_00000389_0.png +ETHZ/eth03/images/image_00000390_0.png +ETHZ/eth03/images/image_00000391_0.png +ETHZ/eth03/images/image_00000392_0.png +ETHZ/eth03/images/image_00000393_0.png +ETHZ/eth03/images/image_00000394_0.png +ETHZ/eth03/images/image_00000395_0.png +ETHZ/eth03/images/image_00000396_0.png +ETHZ/eth03/images/image_00000397_0.png +ETHZ/eth03/images/image_00000398_0.png +ETHZ/eth03/images/image_00000399_0.png +ETHZ/eth03/images/image_00000400_0.png +ETHZ/eth03/images/image_00000401_0.png +ETHZ/eth03/images/image_00000402_0.png +ETHZ/eth03/images/image_00000403_0.png +ETHZ/eth03/images/image_00000404_0.png +ETHZ/eth03/images/image_00000405_0.png +ETHZ/eth03/images/image_00000406_0.png +ETHZ/eth03/images/image_00000407_0.png +ETHZ/eth03/images/image_00000408_0.png +ETHZ/eth03/images/image_00000409_0.png +ETHZ/eth03/images/image_00000410_0.png +ETHZ/eth03/images/image_00000411_0.png +ETHZ/eth03/images/image_00000412_0.png +ETHZ/eth03/images/image_00000413_0.png +ETHZ/eth03/images/image_00000414_0.png +ETHZ/eth03/images/image_00000415_0.png +ETHZ/eth03/images/image_00000416_0.png +ETHZ/eth03/images/image_00000417_0.png +ETHZ/eth03/images/image_00000418_0.png +ETHZ/eth03/images/image_00000419_0.png +ETHZ/eth03/images/image_00000420_0.png +ETHZ/eth03/images/image_00000421_0.png +ETHZ/eth03/images/image_00000422_0.png +ETHZ/eth03/images/image_00000423_0.png +ETHZ/eth03/images/image_00000424_0.png +ETHZ/eth03/images/image_00000425_0.png +ETHZ/eth03/images/image_00000426_0.png +ETHZ/eth03/images/image_00000427_0.png +ETHZ/eth03/images/image_00000428_0.png +ETHZ/eth03/images/image_00000429_0.png +ETHZ/eth03/images/image_00000430_0.png +ETHZ/eth03/images/image_00000431_0.png +ETHZ/eth03/images/image_00000432_0.png +ETHZ/eth03/images/image_00000433_0.png +ETHZ/eth03/images/image_00000434_0.png +ETHZ/eth03/images/image_00000435_0.png +ETHZ/eth03/images/image_00000436_0.png +ETHZ/eth03/images/image_00000437_0.png +ETHZ/eth03/images/image_00000438_0.png +ETHZ/eth03/images/image_00000439_0.png +ETHZ/eth03/images/image_00000440_0.png +ETHZ/eth03/images/image_00000441_0.png +ETHZ/eth03/images/image_00000442_0.png +ETHZ/eth03/images/image_00000443_0.png +ETHZ/eth03/images/image_00000444_0.png +ETHZ/eth03/images/image_00000445_0.png +ETHZ/eth03/images/image_00000446_0.png +ETHZ/eth03/images/image_00000447_0.png +ETHZ/eth03/images/image_00000448_0.png +ETHZ/eth03/images/image_00000449_0.png +ETHZ/eth03/images/image_00000450_0.png +ETHZ/eth03/images/image_00000451_0.png +ETHZ/eth03/images/image_00000452_0.png +ETHZ/eth03/images/image_00000453_0.png +ETHZ/eth05/images/image_00000000_0.png +ETHZ/eth05/images/image_00000004_0.png +ETHZ/eth05/images/image_00000008_0.png +ETHZ/eth05/images/image_00000012_0.png +ETHZ/eth05/images/image_00000016_0.png +ETHZ/eth05/images/image_00000022_0.png +ETHZ/eth05/images/image_00000026_0.png +ETHZ/eth05/images/image_00000030_0.png +ETHZ/eth05/images/image_00000034_0.png +ETHZ/eth05/images/image_00000038_0.png +ETHZ/eth05/images/image_00000042_0.png +ETHZ/eth05/images/image_00000046_0.png +ETHZ/eth05/images/image_00000050_0.png +ETHZ/eth05/images/image_00000054_0.png +ETHZ/eth05/images/image_00000058_0.png +ETHZ/eth05/images/image_00000062_0.png +ETHZ/eth05/images/image_00000066_0.png +ETHZ/eth05/images/image_00000070_0.png +ETHZ/eth05/images/image_00000074_0.png +ETHZ/eth05/images/image_00000078_0.png +ETHZ/eth05/images/image_00000082_0.png +ETHZ/eth05/images/image_00000086_0.png +ETHZ/eth05/images/image_00000090_0.png +ETHZ/eth05/images/image_00000094_0.png +ETHZ/eth05/images/image_00000098_0.png +ETHZ/eth05/images/image_00000101_0.png +ETHZ/eth05/images/image_00000105_0.png +ETHZ/eth05/images/image_00000109_0.png +ETHZ/eth05/images/image_00000113_0.png +ETHZ/eth05/images/image_00000115_0.png +ETHZ/eth05/images/image_00000119_0.png +ETHZ/eth05/images/image_00000123_0.png +ETHZ/eth05/images/image_00000127_0.png +ETHZ/eth05/images/image_00000130_0.png +ETHZ/eth05/images/image_00000134_0.png +ETHZ/eth05/images/image_00000138_0.png +ETHZ/eth05/images/image_00000142_0.png +ETHZ/eth05/images/image_00000145_0.png +ETHZ/eth05/images/image_00000149_0.png +ETHZ/eth05/images/image_00000153_0.png +ETHZ/eth05/images/image_00000157_0.png +ETHZ/eth05/images/image_00000161_0.png +ETHZ/eth05/images/image_00000165_0.png +ETHZ/eth05/images/image_00000169_0.png +ETHZ/eth05/images/image_00000173_0.png +ETHZ/eth05/images/image_00000177_0.png +ETHZ/eth05/images/image_00000181_0.png +ETHZ/eth05/images/image_00000185_0.png +ETHZ/eth05/images/image_00000189_0.png +ETHZ/eth05/images/image_00000193_0.png +ETHZ/eth05/images/image_00000197_0.png +ETHZ/eth05/images/image_00000201_0.png +ETHZ/eth05/images/image_00000205_0.png +ETHZ/eth05/images/image_00000209_0.png +ETHZ/eth05/images/image_00000213_0.png +ETHZ/eth05/images/image_00000217_0.png +ETHZ/eth07/images/image_00004900_0.png +ETHZ/eth07/images/image_00004904_0.png +ETHZ/eth07/images/image_00004908_0.png +ETHZ/eth07/images/image_00004912_0.png +ETHZ/eth07/images/image_00004916_0.png +ETHZ/eth07/images/image_00004920_0.png +ETHZ/eth07/images/image_00004924_0.png +ETHZ/eth07/images/image_00004928_0.png +ETHZ/eth07/images/image_00004932_0.png +ETHZ/eth07/images/image_00004936_0.png +ETHZ/eth07/images/image_00004940_0.png +ETHZ/eth07/images/image_00004944_0.png +ETHZ/eth07/images/image_00004948_0.png +ETHZ/eth07/images/image_00004952_0.png +ETHZ/eth07/images/image_00004956_0.png +ETHZ/eth07/images/image_00004960_0.png +ETHZ/eth07/images/image_00004964_0.png +ETHZ/eth07/images/image_00004968_0.png +ETHZ/eth07/images/image_00004972_0.png +ETHZ/eth07/images/image_00004976_0.png +ETHZ/eth07/images/image_00004980_0.png +ETHZ/eth07/images/image_00004984_0.png +ETHZ/eth07/images/image_00004988_0.png +ETHZ/eth07/images/image_00004992_0.png +ETHZ/eth07/images/image_00004996_0.png +ETHZ/eth07/images/image_00005000_0.png +ETHZ/eth07/images/image_00005004_0.png +ETHZ/eth07/images/image_00005008_0.png +ETHZ/eth07/images/image_00005012_0.png +ETHZ/eth07/images/image_00005016_0.png +ETHZ/eth07/images/image_00005020_0.png +ETHZ/eth07/images/image_00005024_0.png +ETHZ/eth07/images/image_00005028_0.png +ETHZ/eth07/images/image_00005032_0.png +ETHZ/eth07/images/image_00005036_0.png +ETHZ/eth07/images/image_00005040_0.png +ETHZ/eth07/images/image_00005044_0.png +ETHZ/eth07/images/image_00005048_0.png +ETHZ/eth07/images/image_00005052_0.png +ETHZ/eth07/images/image_00005056_0.png +ETHZ/eth07/images/image_00005060_0.png +ETHZ/eth07/images/image_00005064_0.png +ETHZ/eth07/images/image_00005068_0.png +ETHZ/eth07/images/image_00005072_0.png +ETHZ/eth07/images/image_00005076_0.png +ETHZ/eth07/images/image_00005080_0.png +ETHZ/eth07/images/image_00005084_0.png +ETHZ/eth07/images/image_00005088_0.png +ETHZ/eth07/images/image_00005092_0.png +ETHZ/eth07/images/image_00005096_0.png +ETHZ/eth07/images/image_00005100_0.png +ETHZ/eth07/images/image_00005104_0.png +ETHZ/eth07/images/image_00005108_0.png +ETHZ/eth07/images/image_00005112_0.png +ETHZ/eth07/images/image_00005116_0.png +ETHZ/eth07/images/image_00005120_0.png +ETHZ/eth07/images/image_00005124_0.png +ETHZ/eth07/images/image_00005128_0.png +ETHZ/eth07/images/image_00005132_0.png +ETHZ/eth07/images/image_00005136_0.png +ETHZ/eth07/images/image_00005140_0.png +ETHZ/eth07/images/image_00005144_0.png +ETHZ/eth07/images/image_00005148_0.png +ETHZ/eth07/images/image_00005152_0.png +ETHZ/eth07/images/image_00005156_0.png +ETHZ/eth07/images/image_00005160_0.png +ETHZ/eth07/images/image_00005164_0.png +ETHZ/eth07/images/image_00005168_0.png +ETHZ/eth07/images/image_00005172_0.png +ETHZ/eth07/images/image_00005176_0.png +ETHZ/eth07/images/image_00005180_0.png +ETHZ/eth07/images/image_00005184_0.png +ETHZ/eth07/images/image_00005188_0.png +ETHZ/eth07/images/image_00005192_0.png +ETHZ/eth07/images/image_00005196_0.png +ETHZ/eth07/images/image_00005200_0.png +ETHZ/eth07/images/image_00005204_0.png +ETHZ/eth07/images/image_00005208_0.png +ETHZ/eth07/images/image_00005212_0.png +ETHZ/eth07/images/image_00005216_0.png +ETHZ/eth07/images/image_00005220_0.png +ETHZ/eth07/images/image_00005224_0.png +ETHZ/eth07/images/image_00005228_0.png +ETHZ/eth07/images/image_00005232_0.png +ETHZ/eth07/images/image_00005236_0.png +ETHZ/eth07/images/image_00005240_0.png +ETHZ/eth07/images/image_00005244_0.png +ETHZ/eth07/images/image_00005248_0.png +ETHZ/eth07/images/image_00005252_0.png +ETHZ/eth07/images/image_00005256_0.png +ETHZ/eth07/images/image_00005260_0.png +ETHZ/eth07/images/image_00005264_0.png +ETHZ/eth07/images/image_00005268_0.png +ETHZ/eth07/images/image_00005272_0.png +ETHZ/eth07/images/image_00005276_0.png +ETHZ/eth07/images/image_00005280_0.png +ETHZ/eth07/images/image_00005284_0.png +ETHZ/eth07/images/image_00005288_0.png +ETHZ/eth07/images/image_00005292_0.png +ETHZ/eth07/images/image_00005296_0.png +ETHZ/eth07/images/image_00005300_0.png +ETHZ/eth07/images/image_00005304_0.png +ETHZ/eth07/images/image_00005308_0.png +ETHZ/eth07/images/image_00005312_0.png +ETHZ/eth07/images/image_00005316_0.png +ETHZ/eth07/images/image_00005320_0.png +ETHZ/eth07/images/image_00005324_0.png +ETHZ/eth07/images/image_00005328_0.png +ETHZ/eth07/images/image_00005332_0.png +ETHZ/eth07/images/image_00005336_0.png +ETHZ/eth07/images/image_00005340_0.png +ETHZ/eth07/images/image_00005344_0.png +ETHZ/eth07/images/image_00005348_0.png +ETHZ/eth07/images/image_00005352_0.png +ETHZ/eth07/images/image_00005356_0.png +ETHZ/eth07/images/image_00005360_0.png +ETHZ/eth07/images/image_00005364_0.png +ETHZ/eth07/images/image_00005368_0.png +ETHZ/eth07/images/image_00005372_0.png +ETHZ/eth07/images/image_00005376_0.png +ETHZ/eth07/images/image_00005380_0.png +ETHZ/eth07/images/image_00005384_0.png +ETHZ/eth07/images/image_00005388_0.png +ETHZ/eth07/images/image_00005392_0.png +ETHZ/eth07/images/image_00005396_0.png +ETHZ/eth07/images/image_00005400_0.png +ETHZ/eth07/images/image_00005404_0.png +ETHZ/eth07/images/image_00005408_0.png +ETHZ/eth07/images/image_00005412_0.png +ETHZ/eth07/images/image_00005416_0.png +ETHZ/eth07/images/image_00005420_0.png +ETHZ/eth07/images/image_00005424_0.png +ETHZ/eth07/images/image_00005428_0.png +ETHZ/eth07/images/image_00005432_0.png +ETHZ/eth07/images/image_00005436_0.png +ETHZ/eth07/images/image_00005440_0.png +ETHZ/eth07/images/image_00005444_0.png +ETHZ/eth07/images/image_00005448_0.png +ETHZ/eth07/images/image_00005452_0.png +ETHZ/eth07/images/image_00005456_0.png +ETHZ/eth07/images/image_00005460_0.png +ETHZ/eth07/images/image_00005464_0.png +ETHZ/eth07/images/image_00005468_0.png +ETHZ/eth07/images/image_00005472_0.png +ETHZ/eth07/images/image_00005476_0.png +ETHZ/eth07/images/image_00005480_0.png +ETHZ/eth07/images/image_00005484_0.png +ETHZ/eth07/images/image_00005488_0.png +ETHZ/eth07/images/image_00005492_0.png +ETHZ/eth07/images/image_00005496_0.png +ETHZ/eth07/images/image_00005500_0.png +ETHZ/eth07/images/image_00005504_0.png +ETHZ/eth07/images/image_00005508_0.png +ETHZ/eth07/images/image_00005512_0.png +ETHZ/eth07/images/image_00005516_0.png +ETHZ/eth07/images/image_00005520_0.png +ETHZ/eth07/images/image_00005524_0.png +ETHZ/eth07/images/image_00005528_0.png +ETHZ/eth07/images/image_00005532_0.png +ETHZ/eth07/images/image_00005536_0.png +ETHZ/eth07/images/image_00005540_0.png +ETHZ/eth07/images/image_00005544_0.png +ETHZ/eth07/images/image_00005548_0.png +ETHZ/eth07/images/image_00005552_0.png +ETHZ/eth07/images/image_00005556_0.png +ETHZ/eth07/images/image_00005560_0.png +ETHZ/eth07/images/image_00005564_0.png +ETHZ/eth07/images/image_00005568_0.png +ETHZ/eth07/images/image_00005572_0.png +ETHZ/eth07/images/image_00005576_0.png +ETHZ/eth07/images/image_00005580_0.png +ETHZ/eth07/images/image_00005584_0.png +ETHZ/eth07/images/image_00005588_0.png +ETHZ/eth07/images/image_00005592_0.png +ETHZ/eth07/images/image_00005596_0.png +ETHZ/eth07/images/image_00005600_0.png +ETHZ/eth07/images/image_00005604_0.png +ETHZ/eth07/images/image_00005608_0.png +ETHZ/eth07/images/image_00005612_0.png +ETHZ/eth07/images/image_00005616_0.png +ETHZ/eth07/images/image_00005620_0.png +ETHZ/eth07/images/image_00005624_0.png +ETHZ/eth07/images/image_00005628_0.png +ETHZ/eth07/images/image_00005632_0.png +ETHZ/eth07/images/image_00005636_0.png +ETHZ/eth07/images/image_00005640_0.png +ETHZ/eth07/images/image_00005644_0.png +ETHZ/eth07/images/image_00005648_0.png +ETHZ/eth07/images/image_00005652_0.png +ETHZ/eth07/images/image_00005656_0.png +ETHZ/eth07/images/image_00005660_0.png +ETHZ/eth07/images/image_00005664_0.png +ETHZ/eth07/images/image_00005668_0.png +ETHZ/eth07/images/image_00005672_0.png +ETHZ/eth07/images/image_00005676_0.png +ETHZ/eth07/images/image_00005680_0.png +ETHZ/eth07/images/image_00005684_0.png +ETHZ/eth07/images/image_00005688_0.png +ETHZ/eth07/images/image_00005692_0.png +ETHZ/eth07/images/image_00005696_0.png +ETHZ/eth07/images/image_00005700_0.png diff --git a/tracking/docker-build-context/byte_track/deploy/ONNXRuntime/README.md b/tracking/docker-build-context/byte_track/deploy/ONNXRuntime/README.md new file mode 100644 index 0000000000000000000000000000000000000000..4d0669081db3549f6db4a14189e73640de0688e2 --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/ONNXRuntime/README.md @@ -0,0 +1,19 @@ +## ByteTrack-ONNXRuntime in Python + +This doc introduces how to convert your pytorch model into onnx, and how to run an onnxruntime demo to verify your convertion. + +### Convert Your Model to ONNX + +```shell +cd +python3 tools/export_onnx.py --output-name bytetrack_s.onnx -f exps/example/mot/yolox_s_mix_det.py -c pretrained/bytetrack_s_mot17.pth.tar +``` + +### ONNXRuntime Demo + +You can run onnx demo with **16 FPS** (96-core Intel(R) Xeon(R) Platinum 8163 CPU @ 2.50GHz): + +```shell +cd /deploy/ONNXRuntime +python3 onnx_inference.py +``` diff --git a/tracking/docker-build-context/byte_track/deploy/ONNXRuntime/onnx_inference.py b/tracking/docker-build-context/byte_track/deploy/ONNXRuntime/onnx_inference.py new file mode 100644 index 0000000000000000000000000000000000000000..5929174735c99a2952a5f152f0cba2a0d84770bd --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/ONNXRuntime/onnx_inference.py @@ -0,0 +1,160 @@ +import argparse +import os + +import cv2 +import numpy as np +from loguru import logger + +import onnxruntime + +from yolox.data.data_augment import preproc as preprocess +from yolox.utils import mkdir, multiclass_nms, demo_postprocess, vis +from yolox.utils.visualize import plot_tracking +from yolox.tracker.byte_tracker import BYTETracker +from yolox.tracking_utils.timer import Timer + + +def make_parser(): + parser = argparse.ArgumentParser("onnxruntime inference sample") + parser.add_argument( + "-m", + "--model", + type=str, + default="../../bytetrack_s.onnx", + help="Input your onnx model.", + ) + parser.add_argument( + "-i", + "--video_path", + type=str, + default='../../videos/palace.mp4', + help="Path to your input image.", + ) + parser.add_argument( + "-o", + "--output_dir", + type=str, + default='demo_output', + help="Path to your output directory.", + ) + parser.add_argument( + "-s", + "--score_thr", + type=float, + default=0.1, + help="Score threshould to filter the result.", + ) + parser.add_argument( + "-n", + "--nms_thr", + type=float, + default=0.7, + help="NMS threshould.", + ) + parser.add_argument( + "--input_shape", + type=str, + default="608,1088", + help="Specify an input shape for inference.", + ) + parser.add_argument( + "--with_p6", + action="store_true", + help="Whether your model uses p6 in FPN/PAN.", + ) + # tracking args + parser.add_argument("--track_thresh", type=float, default=0.5, help="tracking confidence threshold") + parser.add_argument("--track_buffer", type=int, default=30, help="the frames for keep lost tracks") + parser.add_argument("--match_thresh", type=int, default=0.8, help="matching threshold for tracking") + parser.add_argument('--min-box-area', type=float, default=10, help='filter out tiny boxes') + parser.add_argument("--mot20", dest="mot20", default=False, action="store_true", help="test mot20.") + return parser + + +class Predictor(object): + def __init__(self, args): + self.rgb_means = (0.485, 0.456, 0.406) + self.std = (0.229, 0.224, 0.225) + self.args = args + self.session = onnxruntime.InferenceSession(args.model) + self.input_shape = tuple(map(int, args.input_shape.split(','))) + + def inference(self, ori_img, timer): + img_info = {"id": 0} + height, width = ori_img.shape[:2] + img_info["height"] = height + img_info["width"] = width + img_info["raw_img"] = ori_img + + img, ratio = preprocess(ori_img, self.input_shape, self.rgb_means, self.std) + img_info["ratio"] = ratio + ort_inputs = {self.session.get_inputs()[0].name: img[None, :, :, :]} + timer.tic() + output = self.session.run(None, ort_inputs) + predictions = demo_postprocess(output[0], self.input_shape, p6=self.args.with_p6)[0] + + boxes = predictions[:, :4] + scores = predictions[:, 4:5] * predictions[:, 5:] + + boxes_xyxy = np.ones_like(boxes) + boxes_xyxy[:, 0] = boxes[:, 0] - boxes[:, 2]/2. + boxes_xyxy[:, 1] = boxes[:, 1] - boxes[:, 3]/2. + boxes_xyxy[:, 2] = boxes[:, 0] + boxes[:, 2]/2. + boxes_xyxy[:, 3] = boxes[:, 1] + boxes[:, 3]/2. + boxes_xyxy /= ratio + dets = multiclass_nms(boxes_xyxy, scores, nms_thr=self.args.nms_thr, score_thr=self.args.score_thr) + return dets[:, :-1], img_info + + +def imageflow_demo(predictor, args): + cap = cv2.VideoCapture(args.video_path) + width = cap.get(cv2.CAP_PROP_FRAME_WIDTH) # float + height = cap.get(cv2.CAP_PROP_FRAME_HEIGHT) # float + fps = cap.get(cv2.CAP_PROP_FPS) + save_folder = args.output_dir + os.makedirs(save_folder, exist_ok=True) + save_path = os.path.join(save_folder, args.video_path.split("/")[-1]) + logger.info(f"video save_path is {save_path}") + vid_writer = cv2.VideoWriter( + save_path, cv2.VideoWriter_fourcc(*"mp4v"), fps, (int(width), int(height)) + ) + tracker = BYTETracker(args, frame_rate=30) + timer = Timer() + frame_id = 0 + results = [] + while True: + if frame_id % 20 == 0: + logger.info('Processing frame {} ({:.2f} fps)'.format(frame_id, 1. / max(1e-5, timer.average_time))) + ret_val, frame = cap.read() + if ret_val: + outputs, img_info = predictor.inference(frame, timer) + online_targets = tracker.update(outputs, [img_info['height'], img_info['width']], [img_info['height'], img_info['width']]) + online_tlwhs = [] + online_ids = [] + online_scores = [] + for t in online_targets: + tlwh = t.tlwh + tid = t.track_id + vertical = tlwh[2] / tlwh[3] > 1.6 + if tlwh[2] * tlwh[3] > args.min_box_area and not vertical: + online_tlwhs.append(tlwh) + online_ids.append(tid) + online_scores.append(t.score) + timer.toc() + results.append((frame_id + 1, online_tlwhs, online_ids, online_scores)) + online_im = plot_tracking(img_info['raw_img'], online_tlwhs, online_ids, frame_id=frame_id + 1, + fps=1. / timer.average_time) + vid_writer.write(online_im) + ch = cv2.waitKey(1) + if ch == 27 or ch == ord("q") or ch == ord("Q"): + break + else: + break + frame_id += 1 + + +if __name__ == '__main__': + args = make_parser().parse_args() + + predictor = Predictor(args) + imageflow_demo(predictor, args) \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/CMakeLists.txt b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..dabf6220c95557d2d728419f18844e938483d79a --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/CMakeLists.txt @@ -0,0 +1,39 @@ +cmake_minimum_required(VERSION 2.6) + +project(bytetrack) + +add_definitions(-std=c++11) + +option(CUDA_USE_STATIC_CUDA_RUNTIME OFF) +set(CMAKE_CXX_STANDARD 11) +set(CMAKE_BUILD_TYPE Debug) + +find_package(CUDA REQUIRED) + +include_directories(${PROJECT_SOURCE_DIR}/include) +include_directories(/usr/local/include/eigen3) +link_directories(${PROJECT_SOURCE_DIR}/include) +# include and link dirs of cuda and tensorrt, you need adapt them if yours are different +# cuda +include_directories(/usr/local/cuda/include) +link_directories(/usr/local/cuda/lib64) +# cudnn +include_directories(/data/cuda/cuda-10.2/cudnn/v8.0.4/include) +link_directories(/data/cuda/cuda-10.2/cudnn/v8.0.4/lib64) +# tensorrt +include_directories(/opt/tiger/demo/TensorRT-7.2.3.4/include) +link_directories(/opt/tiger/demo/TensorRT-7.2.3.4/lib) + +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11 -Wall -Ofast -Wfatal-errors -D_MWAITXINTRIN_H_INCLUDED") + +find_package(OpenCV) +include_directories(${OpenCV_INCLUDE_DIRS}) + +file(GLOB My_Source_Files ${PROJECT_SOURCE_DIR}/src/*.cpp) +add_executable(bytetrack ${My_Source_Files}) +target_link_libraries(bytetrack nvinfer) +target_link_libraries(bytetrack cudart) +target_link_libraries(bytetrack ${OpenCV_LIBS}) + +add_definitions(-O2 -pthread) + diff --git a/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/README.md b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/README.md new file mode 100644 index 0000000000000000000000000000000000000000..cb2bfae1054140d2863b10738e7ae5b91c52b28a --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/README.md @@ -0,0 +1,58 @@ +# ByteTrack-TensorRT in C++ + +## Installation + +Install opencv with ```sudo apt-get install libopencv-dev``` (we don't need a higher version of opencv like v3.3+). + +Install eigen-3.3.9 [[google]](https://drive.google.com/file/d/1rqO74CYCNrmRAg8Rra0JP3yZtJ-rfket/view?usp=sharing), [[baidu(code:ueq4)]](https://pan.baidu.com/s/15kEfCxpy-T7tz60msxxExg). + +```shell +unzip eigen-3.3.9.zip +cd eigen-3.3.9 +mkdir build +cd build +cmake .. +sudo make install +``` + +## Prepare serialized engine file + +Follow the TensorRT Python demo to convert and save the serialized engine file. + +Check the 'model_trt.engine' file, which will be automatically saved at the YOLOX_output dir. + +## Build the demo + +You should set the TensorRT path and CUDA path in CMakeLists.txt. + +For bytetrack_s model, we set the input frame size 1088 x 608. For bytetrack_m, bytetrack_l, bytetrack_x models, we set the input frame size 1440 x 800. You can modify the INPUT_W and INPUT_H in src/bytetrack.cpp + +```c++ +static const int INPUT_W = 1088; +static const int INPUT_H = 608; +``` + +You can first build the demo: + +```shell +cd /demo/TensorRT/cpp +mkdir build +cd build +cmake .. +make +``` + +Then you can run the demo with **200 FPS**: + +```shell +./bytetrack ../../../../YOLOX_outputs/yolox_s_mix_det/model_trt.engine -i ../../../../videos/palace.mp4 +``` + +(If you find the output video lose some frames, you can convert the input video by running: + +```shell +cd +python3 tools/convert_video.py +``` +to generate an appropriate input video for TensorRT C++ demo. ) + diff --git a/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/include/BYTETracker.h b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/include/BYTETracker.h new file mode 100644 index 0000000000000000000000000000000000000000..e3dda973fa27ccdb85a27841ec2a1cf8dcc1e9b0 --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/include/BYTETracker.h @@ -0,0 +1,49 @@ +#pragma once + +#include "STrack.h" + +struct Object +{ + cv::Rect_ rect; + int label; + float prob; +}; + +class BYTETracker +{ +public: + BYTETracker(int frame_rate = 30, int track_buffer = 30); + ~BYTETracker(); + + vector update(const vector& objects); + Scalar get_color(int idx); + +private: + vector joint_stracks(vector &tlista, vector &tlistb); + vector joint_stracks(vector &tlista, vector &tlistb); + + vector sub_stracks(vector &tlista, vector &tlistb); + void remove_duplicate_stracks(vector &resa, vector &resb, vector &stracksa, vector &stracksb); + + void linear_assignment(vector > &cost_matrix, int cost_matrix_size, int cost_matrix_size_size, float thresh, + vector > &matches, vector &unmatched_a, vector &unmatched_b); + vector > iou_distance(vector &atracks, vector &btracks, int &dist_size, int &dist_size_size); + vector > iou_distance(vector &atracks, vector &btracks); + vector > ious(vector > &atlbrs, vector > &btlbrs); + + double lapjv(const vector > &cost, vector &rowsol, vector &colsol, + bool extend_cost = false, float cost_limit = LONG_MAX, bool return_cost = true); + +private: + + float track_thresh; + float high_thresh; + float match_thresh; + int frame_id; + int max_time_lost; + + vector tracked_stracks; + vector lost_stracks; + vector removed_stracks; + byte_kalman::KalmanFilter kalman_filter; +}; \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/include/STrack.h b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/include/STrack.h new file mode 100644 index 0000000000000000000000000000000000000000..752cbefa8f7f7f4f0aff08e0e28ff036afe7d61a --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/include/STrack.h @@ -0,0 +1,50 @@ +#pragma once + +#include +#include "kalmanFilter.h" + +using namespace cv; +using namespace std; + +enum TrackState { New = 0, Tracked, Lost, Removed }; + +class STrack +{ +public: + STrack(vector tlwh_, float score); + ~STrack(); + + vector static tlbr_to_tlwh(vector &tlbr); + void static multi_predict(vector &stracks, byte_kalman::KalmanFilter &kalman_filter); + void static_tlwh(); + void static_tlbr(); + vector tlwh_to_xyah(vector tlwh_tmp); + vector to_xyah(); + void mark_lost(); + void mark_removed(); + int next_id(); + int end_frame(); + + void activate(byte_kalman::KalmanFilter &kalman_filter, int frame_id); + void re_activate(STrack &new_track, int frame_id, bool new_id = false); + void update(STrack &new_track, int frame_id); + +public: + bool is_activated; + int track_id; + int state; + + vector _tlwh; + vector tlwh; + vector tlbr; + int frame_id; + int tracklet_len; + int start_frame; + + KAL_MEAN mean; + KAL_COVA covariance; + float score; + +private: + byte_kalman::KalmanFilter kalman_filter; +}; \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/include/dataType.h b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/include/dataType.h new file mode 100644 index 0000000000000000000000000000000000000000..a7821a395c1c03db137587b879b255846fb0ca16 --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/include/dataType.h @@ -0,0 +1,36 @@ +#pragma once + +#include +#include + +#include +#include +typedef Eigen::Matrix DETECTBOX; +typedef Eigen::Matrix DETECTBOXSS; +typedef Eigen::Matrix FEATURE; +typedef Eigen::Matrix FEATURESS; +//typedef std::vector FEATURESS; + +//Kalmanfilter +//typedef Eigen::Matrix KAL_FILTER; +typedef Eigen::Matrix KAL_MEAN; +typedef Eigen::Matrix KAL_COVA; +typedef Eigen::Matrix KAL_HMEAN; +typedef Eigen::Matrix KAL_HCOVA; +using KAL_DATA = std::pair; +using KAL_HDATA = std::pair; + +//main +using RESULT_DATA = std::pair; + +//tracker: +using TRACKER_DATA = std::pair; +using MATCH_DATA = std::pair; +typedef struct t { + std::vector matches; + std::vector unmatched_tracks; + std::vector unmatched_detections; +}TRACHER_MATCHD; + +//linear_assignment: +typedef Eigen::Matrix DYNAMICM; \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/include/kalmanFilter.h b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/include/kalmanFilter.h new file mode 100644 index 0000000000000000000000000000000000000000..6596b54e33de75d1b49a8af9bfbb1f26d00ea786 --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/include/kalmanFilter.h @@ -0,0 +1,31 @@ +#pragma once + +#include "dataType.h" + +namespace byte_kalman +{ + class KalmanFilter + { + public: + static const double chi2inv95[10]; + KalmanFilter(); + KAL_DATA initiate(const DETECTBOX& measurement); + void predict(KAL_MEAN& mean, KAL_COVA& covariance); + KAL_HDATA project(const KAL_MEAN& mean, const KAL_COVA& covariance); + KAL_DATA update(const KAL_MEAN& mean, + const KAL_COVA& covariance, + const DETECTBOX& measurement); + + Eigen::Matrix gating_distance( + const KAL_MEAN& mean, + const KAL_COVA& covariance, + const std::vector& measurements, + bool only_position = false); + + private: + Eigen::Matrix _motion_mat; + Eigen::Matrix _update_mat; + float _std_weight_position; + float _std_weight_velocity; + }; +} \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/include/lapjv.h b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/include/lapjv.h new file mode 100644 index 0000000000000000000000000000000000000000..0e34385a647bec225827370ff0041a391e628477 --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/include/lapjv.h @@ -0,0 +1,63 @@ +#ifndef LAPJV_H +#define LAPJV_H + +#define LARGE 1000000 + +#if !defined TRUE +#define TRUE 1 +#endif +#if !defined FALSE +#define FALSE 0 +#endif + +#define NEW(x, t, n) if ((x = (t *)malloc(sizeof(t) * (n))) == 0) { return -1; } +#define FREE(x) if (x != 0) { free(x); x = 0; } +#define SWAP_INDICES(a, b) { int_t _temp_index = a; a = b; b = _temp_index; } + +#if 0 +#include +#define ASSERT(cond) assert(cond) +#define PRINTF(fmt, ...) printf(fmt, ##__VA_ARGS__) +#define PRINT_COST_ARRAY(a, n) \ + while (1) { \ + printf(#a" = ["); \ + if ((n) > 0) { \ + printf("%f", (a)[0]); \ + for (uint_t j = 1; j < n; j++) { \ + printf(", %f", (a)[j]); \ + } \ + } \ + printf("]\n"); \ + break; \ + } +#define PRINT_INDEX_ARRAY(a, n) \ + while (1) { \ + printf(#a" = ["); \ + if ((n) > 0) { \ + printf("%d", (a)[0]); \ + for (uint_t j = 1; j < n; j++) { \ + printf(", %d", (a)[j]); \ + } \ + } \ + printf("]\n"); \ + break; \ + } +#else +#define ASSERT(cond) +#define PRINTF(fmt, ...) +#define PRINT_COST_ARRAY(a, n) +#define PRINT_INDEX_ARRAY(a, n) +#endif + + +typedef signed int int_t; +typedef unsigned int uint_t; +typedef double cost_t; +typedef char boolean; +typedef enum fp_t { FP_1 = 1, FP_2 = 2, FP_DYNAMIC = 3 } fp_t; + +extern int_t lapjv_internal( + const uint_t n, cost_t *cost[], + int_t *x, int_t *y); + +#endif // LAPJV_H \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/include/logging.h b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/include/logging.h new file mode 100644 index 0000000000000000000000000000000000000000..602b69fb5759ac8401765bad5251928c59bac7c7 --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/include/logging.h @@ -0,0 +1,503 @@ +/* + * Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef TENSORRT_LOGGING_H +#define TENSORRT_LOGGING_H + +#include "NvInferRuntimeCommon.h" +#include +#include +#include +#include +#include +#include +#include + +using Severity = nvinfer1::ILogger::Severity; + +class LogStreamConsumerBuffer : public std::stringbuf +{ +public: + LogStreamConsumerBuffer(std::ostream& stream, const std::string& prefix, bool shouldLog) + : mOutput(stream) + , mPrefix(prefix) + , mShouldLog(shouldLog) + { + } + + LogStreamConsumerBuffer(LogStreamConsumerBuffer&& other) + : mOutput(other.mOutput) + { + } + + ~LogStreamConsumerBuffer() + { + // std::streambuf::pbase() gives a pointer to the beginning of the buffered part of the output sequence + // std::streambuf::pptr() gives a pointer to the current position of the output sequence + // if the pointer to the beginning is not equal to the pointer to the current position, + // call putOutput() to log the output to the stream + if (pbase() != pptr()) + { + putOutput(); + } + } + + // synchronizes the stream buffer and returns 0 on success + // synchronizing the stream buffer consists of inserting the buffer contents into the stream, + // resetting the buffer and flushing the stream + virtual int sync() + { + putOutput(); + return 0; + } + + void putOutput() + { + if (mShouldLog) + { + // prepend timestamp + std::time_t timestamp = std::time(nullptr); + tm* tm_local = std::localtime(×tamp); + std::cout << "["; + std::cout << std::setw(2) << std::setfill('0') << 1 + tm_local->tm_mon << "/"; + std::cout << std::setw(2) << std::setfill('0') << tm_local->tm_mday << "/"; + std::cout << std::setw(4) << std::setfill('0') << 1900 + tm_local->tm_year << "-"; + std::cout << std::setw(2) << std::setfill('0') << tm_local->tm_hour << ":"; + std::cout << std::setw(2) << std::setfill('0') << tm_local->tm_min << ":"; + std::cout << std::setw(2) << std::setfill('0') << tm_local->tm_sec << "] "; + // std::stringbuf::str() gets the string contents of the buffer + // insert the buffer contents pre-appended by the appropriate prefix into the stream + mOutput << mPrefix << str(); + // set the buffer to empty + str(""); + // flush the stream + mOutput.flush(); + } + } + + void setShouldLog(bool shouldLog) + { + mShouldLog = shouldLog; + } + +private: + std::ostream& mOutput; + std::string mPrefix; + bool mShouldLog; +}; + +//! +//! \class LogStreamConsumerBase +//! \brief Convenience object used to initialize LogStreamConsumerBuffer before std::ostream in LogStreamConsumer +//! +class LogStreamConsumerBase +{ +public: + LogStreamConsumerBase(std::ostream& stream, const std::string& prefix, bool shouldLog) + : mBuffer(stream, prefix, shouldLog) + { + } + +protected: + LogStreamConsumerBuffer mBuffer; +}; + +//! +//! \class LogStreamConsumer +//! \brief Convenience object used to facilitate use of C++ stream syntax when logging messages. +//! Order of base classes is LogStreamConsumerBase and then std::ostream. +//! This is because the LogStreamConsumerBase class is used to initialize the LogStreamConsumerBuffer member field +//! in LogStreamConsumer and then the address of the buffer is passed to std::ostream. +//! This is necessary to prevent the address of an uninitialized buffer from being passed to std::ostream. +//! Please do not change the order of the parent classes. +//! +class LogStreamConsumer : protected LogStreamConsumerBase, public std::ostream +{ +public: + //! \brief Creates a LogStreamConsumer which logs messages with level severity. + //! Reportable severity determines if the messages are severe enough to be logged. + LogStreamConsumer(Severity reportableSeverity, Severity severity) + : LogStreamConsumerBase(severityOstream(severity), severityPrefix(severity), severity <= reportableSeverity) + , std::ostream(&mBuffer) // links the stream buffer with the stream + , mShouldLog(severity <= reportableSeverity) + , mSeverity(severity) + { + } + + LogStreamConsumer(LogStreamConsumer&& other) + : LogStreamConsumerBase(severityOstream(other.mSeverity), severityPrefix(other.mSeverity), other.mShouldLog) + , std::ostream(&mBuffer) // links the stream buffer with the stream + , mShouldLog(other.mShouldLog) + , mSeverity(other.mSeverity) + { + } + + void setReportableSeverity(Severity reportableSeverity) + { + mShouldLog = mSeverity <= reportableSeverity; + mBuffer.setShouldLog(mShouldLog); + } + +private: + static std::ostream& severityOstream(Severity severity) + { + return severity >= Severity::kINFO ? std::cout : std::cerr; + } + + static std::string severityPrefix(Severity severity) + { + switch (severity) + { + case Severity::kINTERNAL_ERROR: return "[F] "; + case Severity::kERROR: return "[E] "; + case Severity::kWARNING: return "[W] "; + case Severity::kINFO: return "[I] "; + case Severity::kVERBOSE: return "[V] "; + default: assert(0); return ""; + } + } + + bool mShouldLog; + Severity mSeverity; +}; + +//! \class Logger +//! +//! \brief Class which manages logging of TensorRT tools and samples +//! +//! \details This class provides a common interface for TensorRT tools and samples to log information to the console, +//! and supports logging two types of messages: +//! +//! - Debugging messages with an associated severity (info, warning, error, or internal error/fatal) +//! - Test pass/fail messages +//! +//! The advantage of having all samples use this class for logging as opposed to emitting directly to stdout/stderr is +//! that the logic for controlling the verbosity and formatting of sample output is centralized in one location. +//! +//! In the future, this class could be extended to support dumping test results to a file in some standard format +//! (for example, JUnit XML), and providing additional metadata (e.g. timing the duration of a test run). +//! +//! TODO: For backwards compatibility with existing samples, this class inherits directly from the nvinfer1::ILogger +//! interface, which is problematic since there isn't a clean separation between messages coming from the TensorRT +//! library and messages coming from the sample. +//! +//! In the future (once all samples are updated to use Logger::getTRTLogger() to access the ILogger) we can refactor the +//! class to eliminate the inheritance and instead make the nvinfer1::ILogger implementation a member of the Logger +//! object. + +class Logger : public nvinfer1::ILogger +{ +public: + Logger(Severity severity = Severity::kWARNING) + : mReportableSeverity(severity) + { + } + + //! + //! \enum TestResult + //! \brief Represents the state of a given test + //! + enum class TestResult + { + kRUNNING, //!< The test is running + kPASSED, //!< The test passed + kFAILED, //!< The test failed + kWAIVED //!< The test was waived + }; + + //! + //! \brief Forward-compatible method for retrieving the nvinfer::ILogger associated with this Logger + //! \return The nvinfer1::ILogger associated with this Logger + //! + //! TODO Once all samples are updated to use this method to register the logger with TensorRT, + //! we can eliminate the inheritance of Logger from ILogger + //! + nvinfer1::ILogger& getTRTLogger() + { + return *this; + } + + //! + //! \brief Implementation of the nvinfer1::ILogger::log() virtual method + //! + //! Note samples should not be calling this function directly; it will eventually go away once we eliminate the + //! inheritance from nvinfer1::ILogger + //! + void log(Severity severity, const char* msg) override + { + LogStreamConsumer(mReportableSeverity, severity) << "[TRT] " << std::string(msg) << std::endl; + } + + //! + //! \brief Method for controlling the verbosity of logging output + //! + //! \param severity The logger will only emit messages that have severity of this level or higher. + //! + void setReportableSeverity(Severity severity) + { + mReportableSeverity = severity; + } + + //! + //! \brief Opaque handle that holds logging information for a particular test + //! + //! This object is an opaque handle to information used by the Logger to print test results. + //! The sample must call Logger::defineTest() in order to obtain a TestAtom that can be used + //! with Logger::reportTest{Start,End}(). + //! + class TestAtom + { + public: + TestAtom(TestAtom&&) = default; + + private: + friend class Logger; + + TestAtom(bool started, const std::string& name, const std::string& cmdline) + : mStarted(started) + , mName(name) + , mCmdline(cmdline) + { + } + + bool mStarted; + std::string mName; + std::string mCmdline; + }; + + //! + //! \brief Define a test for logging + //! + //! \param[in] name The name of the test. This should be a string starting with + //! "TensorRT" and containing dot-separated strings containing + //! the characters [A-Za-z0-9_]. + //! For example, "TensorRT.sample_googlenet" + //! \param[in] cmdline The command line used to reproduce the test + // + //! \return a TestAtom that can be used in Logger::reportTest{Start,End}(). + //! + static TestAtom defineTest(const std::string& name, const std::string& cmdline) + { + return TestAtom(false, name, cmdline); + } + + //! + //! \brief A convenience overloaded version of defineTest() that accepts an array of command-line arguments + //! as input + //! + //! \param[in] name The name of the test + //! \param[in] argc The number of command-line arguments + //! \param[in] argv The array of command-line arguments (given as C strings) + //! + //! \return a TestAtom that can be used in Logger::reportTest{Start,End}(). + static TestAtom defineTest(const std::string& name, int argc, char const* const* argv) + { + auto cmdline = genCmdlineString(argc, argv); + return defineTest(name, cmdline); + } + + //! + //! \brief Report that a test has started. + //! + //! \pre reportTestStart() has not been called yet for the given testAtom + //! + //! \param[in] testAtom The handle to the test that has started + //! + static void reportTestStart(TestAtom& testAtom) + { + reportTestResult(testAtom, TestResult::kRUNNING); + assert(!testAtom.mStarted); + testAtom.mStarted = true; + } + + //! + //! \brief Report that a test has ended. + //! + //! \pre reportTestStart() has been called for the given testAtom + //! + //! \param[in] testAtom The handle to the test that has ended + //! \param[in] result The result of the test. Should be one of TestResult::kPASSED, + //! TestResult::kFAILED, TestResult::kWAIVED + //! + static void reportTestEnd(const TestAtom& testAtom, TestResult result) + { + assert(result != TestResult::kRUNNING); + assert(testAtom.mStarted); + reportTestResult(testAtom, result); + } + + static int reportPass(const TestAtom& testAtom) + { + reportTestEnd(testAtom, TestResult::kPASSED); + return EXIT_SUCCESS; + } + + static int reportFail(const TestAtom& testAtom) + { + reportTestEnd(testAtom, TestResult::kFAILED); + return EXIT_FAILURE; + } + + static int reportWaive(const TestAtom& testAtom) + { + reportTestEnd(testAtom, TestResult::kWAIVED); + return EXIT_SUCCESS; + } + + static int reportTest(const TestAtom& testAtom, bool pass) + { + return pass ? reportPass(testAtom) : reportFail(testAtom); + } + + Severity getReportableSeverity() const + { + return mReportableSeverity; + } + +private: + //! + //! \brief returns an appropriate string for prefixing a log message with the given severity + //! + static const char* severityPrefix(Severity severity) + { + switch (severity) + { + case Severity::kINTERNAL_ERROR: return "[F] "; + case Severity::kERROR: return "[E] "; + case Severity::kWARNING: return "[W] "; + case Severity::kINFO: return "[I] "; + case Severity::kVERBOSE: return "[V] "; + default: assert(0); return ""; + } + } + + //! + //! \brief returns an appropriate string for prefixing a test result message with the given result + //! + static const char* testResultString(TestResult result) + { + switch (result) + { + case TestResult::kRUNNING: return "RUNNING"; + case TestResult::kPASSED: return "PASSED"; + case TestResult::kFAILED: return "FAILED"; + case TestResult::kWAIVED: return "WAIVED"; + default: assert(0); return ""; + } + } + + //! + //! \brief returns an appropriate output stream (cout or cerr) to use with the given severity + //! + static std::ostream& severityOstream(Severity severity) + { + return severity >= Severity::kINFO ? std::cout : std::cerr; + } + + //! + //! \brief method that implements logging test results + //! + static void reportTestResult(const TestAtom& testAtom, TestResult result) + { + severityOstream(Severity::kINFO) << "&&&& " << testResultString(result) << " " << testAtom.mName << " # " + << testAtom.mCmdline << std::endl; + } + + //! + //! \brief generate a command line string from the given (argc, argv) values + //! + static std::string genCmdlineString(int argc, char const* const* argv) + { + std::stringstream ss; + for (int i = 0; i < argc; i++) + { + if (i > 0) + ss << " "; + ss << argv[i]; + } + return ss.str(); + } + + Severity mReportableSeverity; +}; + +namespace +{ + +//! +//! \brief produces a LogStreamConsumer object that can be used to log messages of severity kVERBOSE +//! +//! Example usage: +//! +//! LOG_VERBOSE(logger) << "hello world" << std::endl; +//! +inline LogStreamConsumer LOG_VERBOSE(const Logger& logger) +{ + return LogStreamConsumer(logger.getReportableSeverity(), Severity::kVERBOSE); +} + +//! +//! \brief produces a LogStreamConsumer object that can be used to log messages of severity kINFO +//! +//! Example usage: +//! +//! LOG_INFO(logger) << "hello world" << std::endl; +//! +inline LogStreamConsumer LOG_INFO(const Logger& logger) +{ + return LogStreamConsumer(logger.getReportableSeverity(), Severity::kINFO); +} + +//! +//! \brief produces a LogStreamConsumer object that can be used to log messages of severity kWARNING +//! +//! Example usage: +//! +//! LOG_WARN(logger) << "hello world" << std::endl; +//! +inline LogStreamConsumer LOG_WARN(const Logger& logger) +{ + return LogStreamConsumer(logger.getReportableSeverity(), Severity::kWARNING); +} + +//! +//! \brief produces a LogStreamConsumer object that can be used to log messages of severity kERROR +//! +//! Example usage: +//! +//! LOG_ERROR(logger) << "hello world" << std::endl; +//! +inline LogStreamConsumer LOG_ERROR(const Logger& logger) +{ + return LogStreamConsumer(logger.getReportableSeverity(), Severity::kERROR); +} + +//! +//! \brief produces a LogStreamConsumer object that can be used to log messages of severity kINTERNAL_ERROR +// ("fatal" severity) +//! +//! Example usage: +//! +//! LOG_FATAL(logger) << "hello world" << std::endl; +//! +inline LogStreamConsumer LOG_FATAL(const Logger& logger) +{ + return LogStreamConsumer(logger.getReportableSeverity(), Severity::kINTERNAL_ERROR); +} + +} // anonymous namespace + +#endif // TENSORRT_LOGGING_H diff --git a/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/src/BYTETracker.cpp b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/src/BYTETracker.cpp new file mode 100644 index 0000000000000000000000000000000000000000..7c936b81f2e95f335ec90b8c355360bc0ebee800 --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/src/BYTETracker.cpp @@ -0,0 +1,241 @@ +#include "BYTETracker.h" +#include + +BYTETracker::BYTETracker(int frame_rate, int track_buffer) +{ + track_thresh = 0.5; + high_thresh = 0.6; + match_thresh = 0.8; + + frame_id = 0; + max_time_lost = int(frame_rate / 30.0 * track_buffer); + cout << "Init ByteTrack!" << endl; +} + +BYTETracker::~BYTETracker() +{ +} + +vector BYTETracker::update(const vector& objects) +{ + + ////////////////// Step 1: Get detections ////////////////// + this->frame_id++; + vector activated_stracks; + vector refind_stracks; + vector removed_stracks; + vector lost_stracks; + vector detections; + vector detections_low; + + vector detections_cp; + vector tracked_stracks_swap; + vector resa, resb; + vector output_stracks; + + vector unconfirmed; + vector tracked_stracks; + vector strack_pool; + vector r_tracked_stracks; + + if (objects.size() > 0) + { + for (int i = 0; i < objects.size(); i++) + { + vector tlbr_; + tlbr_.resize(4); + tlbr_[0] = objects[i].rect.x; + tlbr_[1] = objects[i].rect.y; + tlbr_[2] = objects[i].rect.x + objects[i].rect.width; + tlbr_[3] = objects[i].rect.y + objects[i].rect.height; + + float score = objects[i].prob; + + STrack strack(STrack::tlbr_to_tlwh(tlbr_), score); + if (score >= track_thresh) + { + detections.push_back(strack); + } + else + { + detections_low.push_back(strack); + } + + } + } + + // Add newly detected tracklets to tracked_stracks + for (int i = 0; i < this->tracked_stracks.size(); i++) + { + if (!this->tracked_stracks[i].is_activated) + unconfirmed.push_back(&this->tracked_stracks[i]); + else + tracked_stracks.push_back(&this->tracked_stracks[i]); + } + + ////////////////// Step 2: First association, with IoU ////////////////// + strack_pool = joint_stracks(tracked_stracks, this->lost_stracks); + STrack::multi_predict(strack_pool, this->kalman_filter); + + vector > dists; + int dist_size = 0, dist_size_size = 0; + dists = iou_distance(strack_pool, detections, dist_size, dist_size_size); + + vector > matches; + vector u_track, u_detection; + linear_assignment(dists, dist_size, dist_size_size, match_thresh, matches, u_track, u_detection); + + for (int i = 0; i < matches.size(); i++) + { + STrack *track = strack_pool[matches[i][0]]; + STrack *det = &detections[matches[i][1]]; + if (track->state == TrackState::Tracked) + { + track->update(*det, this->frame_id); + activated_stracks.push_back(*track); + } + else + { + track->re_activate(*det, this->frame_id, false); + refind_stracks.push_back(*track); + } + } + + ////////////////// Step 3: Second association, using low score dets ////////////////// + for (int i = 0; i < u_detection.size(); i++) + { + detections_cp.push_back(detections[u_detection[i]]); + } + detections.clear(); + detections.assign(detections_low.begin(), detections_low.end()); + + for (int i = 0; i < u_track.size(); i++) + { + if (strack_pool[u_track[i]]->state == TrackState::Tracked) + { + r_tracked_stracks.push_back(strack_pool[u_track[i]]); + } + } + + dists.clear(); + dists = iou_distance(r_tracked_stracks, detections, dist_size, dist_size_size); + + matches.clear(); + u_track.clear(); + u_detection.clear(); + linear_assignment(dists, dist_size, dist_size_size, 0.5, matches, u_track, u_detection); + + for (int i = 0; i < matches.size(); i++) + { + STrack *track = r_tracked_stracks[matches[i][0]]; + STrack *det = &detections[matches[i][1]]; + if (track->state == TrackState::Tracked) + { + track->update(*det, this->frame_id); + activated_stracks.push_back(*track); + } + else + { + track->re_activate(*det, this->frame_id, false); + refind_stracks.push_back(*track); + } + } + + for (int i = 0; i < u_track.size(); i++) + { + STrack *track = r_tracked_stracks[u_track[i]]; + if (track->state != TrackState::Lost) + { + track->mark_lost(); + lost_stracks.push_back(*track); + } + } + + // Deal with unconfirmed tracks, usually tracks with only one beginning frame + detections.clear(); + detections.assign(detections_cp.begin(), detections_cp.end()); + + dists.clear(); + dists = iou_distance(unconfirmed, detections, dist_size, dist_size_size); + + matches.clear(); + vector u_unconfirmed; + u_detection.clear(); + linear_assignment(dists, dist_size, dist_size_size, 0.7, matches, u_unconfirmed, u_detection); + + for (int i = 0; i < matches.size(); i++) + { + unconfirmed[matches[i][0]]->update(detections[matches[i][1]], this->frame_id); + activated_stracks.push_back(*unconfirmed[matches[i][0]]); + } + + for (int i = 0; i < u_unconfirmed.size(); i++) + { + STrack *track = unconfirmed[u_unconfirmed[i]]; + track->mark_removed(); + removed_stracks.push_back(*track); + } + + ////////////////// Step 4: Init new stracks ////////////////// + for (int i = 0; i < u_detection.size(); i++) + { + STrack *track = &detections[u_detection[i]]; + if (track->score < this->high_thresh) + continue; + track->activate(this->kalman_filter, this->frame_id); + activated_stracks.push_back(*track); + } + + ////////////////// Step 5: Update state ////////////////// + for (int i = 0; i < this->lost_stracks.size(); i++) + { + if (this->frame_id - this->lost_stracks[i].end_frame() > this->max_time_lost) + { + this->lost_stracks[i].mark_removed(); + removed_stracks.push_back(this->lost_stracks[i]); + } + } + + for (int i = 0; i < this->tracked_stracks.size(); i++) + { + if (this->tracked_stracks[i].state == TrackState::Tracked) + { + tracked_stracks_swap.push_back(this->tracked_stracks[i]); + } + } + this->tracked_stracks.clear(); + this->tracked_stracks.assign(tracked_stracks_swap.begin(), tracked_stracks_swap.end()); + + this->tracked_stracks = joint_stracks(this->tracked_stracks, activated_stracks); + this->tracked_stracks = joint_stracks(this->tracked_stracks, refind_stracks); + + //std::cout << activated_stracks.size() << std::endl; + + this->lost_stracks = sub_stracks(this->lost_stracks, this->tracked_stracks); + for (int i = 0; i < lost_stracks.size(); i++) + { + this->lost_stracks.push_back(lost_stracks[i]); + } + + this->lost_stracks = sub_stracks(this->lost_stracks, this->removed_stracks); + for (int i = 0; i < removed_stracks.size(); i++) + { + this->removed_stracks.push_back(removed_stracks[i]); + } + + remove_duplicate_stracks(resa, resb, this->tracked_stracks, this->lost_stracks); + + this->tracked_stracks.clear(); + this->tracked_stracks.assign(resa.begin(), resa.end()); + this->lost_stracks.clear(); + this->lost_stracks.assign(resb.begin(), resb.end()); + + for (int i = 0; i < this->tracked_stracks.size(); i++) + { + if (this->tracked_stracks[i].is_activated) + { + output_stracks.push_back(this->tracked_stracks[i]); + } + } + return output_stracks; +} \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/src/STrack.cpp b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/src/STrack.cpp new file mode 100644 index 0000000000000000000000000000000000000000..8306165304355fe6d3d6e244207211757f21a646 --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/src/STrack.cpp @@ -0,0 +1,192 @@ +#include "STrack.h" + +STrack::STrack(vector tlwh_, float score) +{ + _tlwh.resize(4); + _tlwh.assign(tlwh_.begin(), tlwh_.end()); + + is_activated = false; + track_id = 0; + state = TrackState::New; + + tlwh.resize(4); + tlbr.resize(4); + + static_tlwh(); + static_tlbr(); + frame_id = 0; + tracklet_len = 0; + this->score = score; + start_frame = 0; +} + +STrack::~STrack() +{ +} + +void STrack::activate(byte_kalman::KalmanFilter &kalman_filter, int frame_id) +{ + this->kalman_filter = kalman_filter; + this->track_id = this->next_id(); + + vector _tlwh_tmp(4); + _tlwh_tmp[0] = this->_tlwh[0]; + _tlwh_tmp[1] = this->_tlwh[1]; + _tlwh_tmp[2] = this->_tlwh[2]; + _tlwh_tmp[3] = this->_tlwh[3]; + vector xyah = tlwh_to_xyah(_tlwh_tmp); + DETECTBOX xyah_box; + xyah_box[0] = xyah[0]; + xyah_box[1] = xyah[1]; + xyah_box[2] = xyah[2]; + xyah_box[3] = xyah[3]; + auto mc = this->kalman_filter.initiate(xyah_box); + this->mean = mc.first; + this->covariance = mc.second; + + static_tlwh(); + static_tlbr(); + + this->tracklet_len = 0; + this->state = TrackState::Tracked; + if (frame_id == 1) + { + this->is_activated = true; + } + //this->is_activated = true; + this->frame_id = frame_id; + this->start_frame = frame_id; +} + +void STrack::re_activate(STrack &new_track, int frame_id, bool new_id) +{ + vector xyah = tlwh_to_xyah(new_track.tlwh); + DETECTBOX xyah_box; + xyah_box[0] = xyah[0]; + xyah_box[1] = xyah[1]; + xyah_box[2] = xyah[2]; + xyah_box[3] = xyah[3]; + auto mc = this->kalman_filter.update(this->mean, this->covariance, xyah_box); + this->mean = mc.first; + this->covariance = mc.second; + + static_tlwh(); + static_tlbr(); + + this->tracklet_len = 0; + this->state = TrackState::Tracked; + this->is_activated = true; + this->frame_id = frame_id; + this->score = new_track.score; + if (new_id) + this->track_id = next_id(); +} + +void STrack::update(STrack &new_track, int frame_id) +{ + this->frame_id = frame_id; + this->tracklet_len++; + + vector xyah = tlwh_to_xyah(new_track.tlwh); + DETECTBOX xyah_box; + xyah_box[0] = xyah[0]; + xyah_box[1] = xyah[1]; + xyah_box[2] = xyah[2]; + xyah_box[3] = xyah[3]; + + auto mc = this->kalman_filter.update(this->mean, this->covariance, xyah_box); + this->mean = mc.first; + this->covariance = mc.second; + + static_tlwh(); + static_tlbr(); + + this->state = TrackState::Tracked; + this->is_activated = true; + + this->score = new_track.score; +} + +void STrack::static_tlwh() +{ + if (this->state == TrackState::New) + { + tlwh[0] = _tlwh[0]; + tlwh[1] = _tlwh[1]; + tlwh[2] = _tlwh[2]; + tlwh[3] = _tlwh[3]; + return; + } + + tlwh[0] = mean[0]; + tlwh[1] = mean[1]; + tlwh[2] = mean[2]; + tlwh[3] = mean[3]; + + tlwh[2] *= tlwh[3]; + tlwh[0] -= tlwh[2] / 2; + tlwh[1] -= tlwh[3] / 2; +} + +void STrack::static_tlbr() +{ + tlbr.clear(); + tlbr.assign(tlwh.begin(), tlwh.end()); + tlbr[2] += tlbr[0]; + tlbr[3] += tlbr[1]; +} + +vector STrack::tlwh_to_xyah(vector tlwh_tmp) +{ + vector tlwh_output = tlwh_tmp; + tlwh_output[0] += tlwh_output[2] / 2; + tlwh_output[1] += tlwh_output[3] / 2; + tlwh_output[2] /= tlwh_output[3]; + return tlwh_output; +} + +vector STrack::to_xyah() +{ + return tlwh_to_xyah(tlwh); +} + +vector STrack::tlbr_to_tlwh(vector &tlbr) +{ + tlbr[2] -= tlbr[0]; + tlbr[3] -= tlbr[1]; + return tlbr; +} + +void STrack::mark_lost() +{ + state = TrackState::Lost; +} + +void STrack::mark_removed() +{ + state = TrackState::Removed; +} + +int STrack::next_id() +{ + static int _count = 0; + _count++; + return _count; +} + +int STrack::end_frame() +{ + return this->frame_id; +} + +void STrack::multi_predict(vector &stracks, byte_kalman::KalmanFilter &kalman_filter) +{ + for (int i = 0; i < stracks.size(); i++) + { + if (stracks[i]->state != TrackState::Tracked) + { + stracks[i]->mean[7] = 0; + } + kalman_filter.predict(stracks[i]->mean, stracks[i]->covariance); + } +} \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/src/bytetrack.cpp b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/src/bytetrack.cpp new file mode 100644 index 0000000000000000000000000000000000000000..3f359a6a55620e3362c2421c21d00bb1add3beec --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/src/bytetrack.cpp @@ -0,0 +1,506 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include "NvInfer.h" +#include "cuda_runtime_api.h" +#include "logging.h" +#include "BYTETracker.h" + +#define CHECK(status) \ + do\ + {\ + auto ret = (status);\ + if (ret != 0)\ + {\ + cerr << "Cuda failure: " << ret << endl;\ + abort();\ + }\ + } while (0) + +#define DEVICE 0 // GPU id +#define NMS_THRESH 0.7 +#define BBOX_CONF_THRESH 0.1 + +using namespace nvinfer1; + +// stuff we know about the network and the input/output blobs +static const int INPUT_W = 1088; +static const int INPUT_H = 608; +const char* INPUT_BLOB_NAME = "input_0"; +const char* OUTPUT_BLOB_NAME = "output_0"; +static Logger gLogger; + +Mat static_resize(Mat& img) { + float r = min(INPUT_W / (img.cols*1.0), INPUT_H / (img.rows*1.0)); + // r = std::min(r, 1.0f); + int unpad_w = r * img.cols; + int unpad_h = r * img.rows; + Mat re(unpad_h, unpad_w, CV_8UC3); + resize(img, re, re.size()); + Mat out(INPUT_H, INPUT_W, CV_8UC3, Scalar(114, 114, 114)); + re.copyTo(out(Rect(0, 0, re.cols, re.rows))); + return out; +} + +struct GridAndStride +{ + int grid0; + int grid1; + int stride; +}; + +static void generate_grids_and_stride(const int target_w, const int target_h, vector& strides, vector& grid_strides) +{ + for (auto stride : strides) + { + int num_grid_w = target_w / stride; + int num_grid_h = target_h / stride; + for (int g1 = 0; g1 < num_grid_h; g1++) + { + for (int g0 = 0; g0 < num_grid_w; g0++) + { + grid_strides.push_back((GridAndStride){g0, g1, stride}); + } + } + } +} + +static inline float intersection_area(const Object& a, const Object& b) +{ + Rect_ inter = a.rect & b.rect; + return inter.area(); +} + +static void qsort_descent_inplace(vector& faceobjects, int left, int right) +{ + int i = left; + int j = right; + float p = faceobjects[(left + right) / 2].prob; + + while (i <= j) + { + while (faceobjects[i].prob > p) + i++; + + while (faceobjects[j].prob < p) + j--; + + if (i <= j) + { + // swap + swap(faceobjects[i], faceobjects[j]); + + i++; + j--; + } + } + + #pragma omp parallel sections + { + #pragma omp section + { + if (left < j) qsort_descent_inplace(faceobjects, left, j); + } + #pragma omp section + { + if (i < right) qsort_descent_inplace(faceobjects, i, right); + } + } +} + +static void qsort_descent_inplace(vector& objects) +{ + if (objects.empty()) + return; + + qsort_descent_inplace(objects, 0, objects.size() - 1); +} + +static void nms_sorted_bboxes(const vector& faceobjects, vector& picked, float nms_threshold) +{ + picked.clear(); + + const int n = faceobjects.size(); + + vector areas(n); + for (int i = 0; i < n; i++) + { + areas[i] = faceobjects[i].rect.area(); + } + + for (int i = 0; i < n; i++) + { + const Object& a = faceobjects[i]; + + int keep = 1; + for (int j = 0; j < (int)picked.size(); j++) + { + const Object& b = faceobjects[picked[j]]; + + // intersection over union + float inter_area = intersection_area(a, b); + float union_area = areas[i] + areas[picked[j]] - inter_area; + // float IoU = inter_area / union_area + if (inter_area / union_area > nms_threshold) + keep = 0; + } + + if (keep) + picked.push_back(i); + } +} + + +static void generate_yolox_proposals(vector grid_strides, float* feat_blob, float prob_threshold, vector& objects) +{ + const int num_class = 1; + + const int num_anchors = grid_strides.size(); + + for (int anchor_idx = 0; anchor_idx < num_anchors; anchor_idx++) + { + const int grid0 = grid_strides[anchor_idx].grid0; + const int grid1 = grid_strides[anchor_idx].grid1; + const int stride = grid_strides[anchor_idx].stride; + + const int basic_pos = anchor_idx * (num_class + 5); + + // yolox/models/yolo_head.py decode logic + float x_center = (feat_blob[basic_pos+0] + grid0) * stride; + float y_center = (feat_blob[basic_pos+1] + grid1) * stride; + float w = exp(feat_blob[basic_pos+2]) * stride; + float h = exp(feat_blob[basic_pos+3]) * stride; + float x0 = x_center - w * 0.5f; + float y0 = y_center - h * 0.5f; + + float box_objectness = feat_blob[basic_pos+4]; + for (int class_idx = 0; class_idx < num_class; class_idx++) + { + float box_cls_score = feat_blob[basic_pos + 5 + class_idx]; + float box_prob = box_objectness * box_cls_score; + if (box_prob > prob_threshold) + { + Object obj; + obj.rect.x = x0; + obj.rect.y = y0; + obj.rect.width = w; + obj.rect.height = h; + obj.label = class_idx; + obj.prob = box_prob; + + objects.push_back(obj); + } + + } // class loop + + } // point anchor loop +} + +float* blobFromImage(Mat& img){ + cvtColor(img, img, COLOR_BGR2RGB); + + float* blob = new float[img.total()*3]; + int channels = 3; + int img_h = img.rows; + int img_w = img.cols; + vector mean = {0.485, 0.456, 0.406}; + vector std = {0.229, 0.224, 0.225}; + for (size_t c = 0; c < channels; c++) + { + for (size_t h = 0; h < img_h; h++) + { + for (size_t w = 0; w < img_w; w++) + { + blob[c * img_w * img_h + h * img_w + w] = + (((float)img.at(h, w)[c]) / 255.0f - mean[c]) / std[c]; + } + } + } + return blob; +} + + +static void decode_outputs(float* prob, vector& objects, float scale, const int img_w, const int img_h) { + vector proposals; + vector strides = {8, 16, 32}; + vector grid_strides; + generate_grids_and_stride(INPUT_W, INPUT_H, strides, grid_strides); + generate_yolox_proposals(grid_strides, prob, BBOX_CONF_THRESH, proposals); + //std::cout << "num of boxes before nms: " << proposals.size() << std::endl; + + qsort_descent_inplace(proposals); + + vector picked; + nms_sorted_bboxes(proposals, picked, NMS_THRESH); + + + int count = picked.size(); + + //std::cout << "num of boxes: " << count << std::endl; + + objects.resize(count); + for (int i = 0; i < count; i++) + { + objects[i] = proposals[picked[i]]; + + // adjust offset to original unpadded + float x0 = (objects[i].rect.x) / scale; + float y0 = (objects[i].rect.y) / scale; + float x1 = (objects[i].rect.x + objects[i].rect.width) / scale; + float y1 = (objects[i].rect.y + objects[i].rect.height) / scale; + + // clip + // x0 = std::max(std::min(x0, (float)(img_w - 1)), 0.f); + // y0 = std::max(std::min(y0, (float)(img_h - 1)), 0.f); + // x1 = std::max(std::min(x1, (float)(img_w - 1)), 0.f); + // y1 = std::max(std::min(y1, (float)(img_h - 1)), 0.f); + + objects[i].rect.x = x0; + objects[i].rect.y = y0; + objects[i].rect.width = x1 - x0; + objects[i].rect.height = y1 - y0; + } +} + +const float color_list[80][3] = +{ + {0.000, 0.447, 0.741}, + {0.850, 0.325, 0.098}, + {0.929, 0.694, 0.125}, + {0.494, 0.184, 0.556}, + {0.466, 0.674, 0.188}, + {0.301, 0.745, 0.933}, + {0.635, 0.078, 0.184}, + {0.300, 0.300, 0.300}, + {0.600, 0.600, 0.600}, + {1.000, 0.000, 0.000}, + {1.000, 0.500, 0.000}, + {0.749, 0.749, 0.000}, + {0.000, 1.000, 0.000}, + {0.000, 0.000, 1.000}, + {0.667, 0.000, 1.000}, + {0.333, 0.333, 0.000}, + {0.333, 0.667, 0.000}, + {0.333, 1.000, 0.000}, + {0.667, 0.333, 0.000}, + {0.667, 0.667, 0.000}, + {0.667, 1.000, 0.000}, + {1.000, 0.333, 0.000}, + {1.000, 0.667, 0.000}, + {1.000, 1.000, 0.000}, + {0.000, 0.333, 0.500}, + {0.000, 0.667, 0.500}, + {0.000, 1.000, 0.500}, + {0.333, 0.000, 0.500}, + {0.333, 0.333, 0.500}, + {0.333, 0.667, 0.500}, + {0.333, 1.000, 0.500}, + {0.667, 0.000, 0.500}, + {0.667, 0.333, 0.500}, + {0.667, 0.667, 0.500}, + {0.667, 1.000, 0.500}, + {1.000, 0.000, 0.500}, + {1.000, 0.333, 0.500}, + {1.000, 0.667, 0.500}, + {1.000, 1.000, 0.500}, + {0.000, 0.333, 1.000}, + {0.000, 0.667, 1.000}, + {0.000, 1.000, 1.000}, + {0.333, 0.000, 1.000}, + {0.333, 0.333, 1.000}, + {0.333, 0.667, 1.000}, + {0.333, 1.000, 1.000}, + {0.667, 0.000, 1.000}, + {0.667, 0.333, 1.000}, + {0.667, 0.667, 1.000}, + {0.667, 1.000, 1.000}, + {1.000, 0.000, 1.000}, + {1.000, 0.333, 1.000}, + {1.000, 0.667, 1.000}, + {0.333, 0.000, 0.000}, + {0.500, 0.000, 0.000}, + {0.667, 0.000, 0.000}, + {0.833, 0.000, 0.000}, + {1.000, 0.000, 0.000}, + {0.000, 0.167, 0.000}, + {0.000, 0.333, 0.000}, + {0.000, 0.500, 0.000}, + {0.000, 0.667, 0.000}, + {0.000, 0.833, 0.000}, + {0.000, 1.000, 0.000}, + {0.000, 0.000, 0.167}, + {0.000, 0.000, 0.333}, + {0.000, 0.000, 0.500}, + {0.000, 0.000, 0.667}, + {0.000, 0.000, 0.833}, + {0.000, 0.000, 1.000}, + {0.000, 0.000, 0.000}, + {0.143, 0.143, 0.143}, + {0.286, 0.286, 0.286}, + {0.429, 0.429, 0.429}, + {0.571, 0.571, 0.571}, + {0.714, 0.714, 0.714}, + {0.857, 0.857, 0.857}, + {0.000, 0.447, 0.741}, + {0.314, 0.717, 0.741}, + {0.50, 0.5, 0} +}; + +void doInference(IExecutionContext& context, float* input, float* output, const int output_size, Size input_shape) { + const ICudaEngine& engine = context.getEngine(); + + // Pointers to input and output device buffers to pass to engine. + // Engine requires exactly IEngine::getNbBindings() number of buffers. + assert(engine.getNbBindings() == 2); + void* buffers[2]; + + // In order to bind the buffers, we need to know the names of the input and output tensors. + // Note that indices are guaranteed to be less than IEngine::getNbBindings() + const int inputIndex = engine.getBindingIndex(INPUT_BLOB_NAME); + + assert(engine.getBindingDataType(inputIndex) == nvinfer1::DataType::kFLOAT); + const int outputIndex = engine.getBindingIndex(OUTPUT_BLOB_NAME); + assert(engine.getBindingDataType(outputIndex) == nvinfer1::DataType::kFLOAT); + int mBatchSize = engine.getMaxBatchSize(); + + // Create GPU buffers on device + CHECK(cudaMalloc(&buffers[inputIndex], 3 * input_shape.height * input_shape.width * sizeof(float))); + CHECK(cudaMalloc(&buffers[outputIndex], output_size*sizeof(float))); + + // Create stream + cudaStream_t stream; + CHECK(cudaStreamCreate(&stream)); + + // DMA input batch data to device, infer on the batch asynchronously, and DMA output back to host + CHECK(cudaMemcpyAsync(buffers[inputIndex], input, 3 * input_shape.height * input_shape.width * sizeof(float), cudaMemcpyHostToDevice, stream)); + context.enqueue(1, buffers, stream, nullptr); + CHECK(cudaMemcpyAsync(output, buffers[outputIndex], output_size * sizeof(float), cudaMemcpyDeviceToHost, stream)); + cudaStreamSynchronize(stream); + + // Release stream and buffers + cudaStreamDestroy(stream); + CHECK(cudaFree(buffers[inputIndex])); + CHECK(cudaFree(buffers[outputIndex])); +} + +int main(int argc, char** argv) { + cudaSetDevice(DEVICE); + + // create a model using the API directly and serialize it to a stream + char *trtModelStream{nullptr}; + size_t size{0}; + + if (argc == 4 && string(argv[2]) == "-i") { + const string engine_file_path {argv[1]}; + ifstream file(engine_file_path, ios::binary); + if (file.good()) { + file.seekg(0, file.end); + size = file.tellg(); + file.seekg(0, file.beg); + trtModelStream = new char[size]; + assert(trtModelStream); + file.read(trtModelStream, size); + file.close(); + } + } else { + cerr << "arguments not right!" << endl; + cerr << "run 'python3 tools/trt.py -f exps/example/mot/yolox_s_mix_det.py -c pretrained/bytetrack_s_mot17.pth.tar' to serialize model first!" << std::endl; + cerr << "Then use the following command:" << endl; + cerr << "cd demo/TensorRT/cpp/build" << endl; + cerr << "./bytetrack ../../../../YOLOX_outputs/yolox_s_mix_det/model_trt.engine -i ../../../../videos/palace.mp4 // deserialize file and run inference" << std::endl; + return -1; + } + const string input_video_path {argv[3]}; + + IRuntime* runtime = createInferRuntime(gLogger); + assert(runtime != nullptr); + ICudaEngine* engine = runtime->deserializeCudaEngine(trtModelStream, size); + assert(engine != nullptr); + IExecutionContext* context = engine->createExecutionContext(); + assert(context != nullptr); + delete[] trtModelStream; + auto out_dims = engine->getBindingDimensions(1); + auto output_size = 1; + for(int j=0;j(cap.get(CV_CAP_PROP_FRAME_COUNT)); + cout << "Total frames: " << nFrame << endl; + + VideoWriter writer("demo.mp4", CV_FOURCC('m', 'p', '4', 'v'), fps, Size(img_w, img_h)); + + Mat img; + BYTETracker tracker(fps, 30); + int num_frames = 0; + int total_ms = 0; + while (true) + { + if(!cap.read(img)) + break; + num_frames ++; + if (num_frames % 20 == 0) + { + cout << "Processing frame " << num_frames << " (" << num_frames * 1000000 / total_ms << " fps)" << endl; + } + if (img.empty()) + break; + Mat pr_img = static_resize(img); + + float* blob; + blob = blobFromImage(pr_img); + float scale = min(INPUT_W / (img.cols*1.0), INPUT_H / (img.rows*1.0)); + + // run inference + auto start = chrono::system_clock::now(); + doInference(*context, blob, prob, output_size, pr_img.size()); + vector objects; + decode_outputs(prob, objects, scale, img_w, img_h); + vector output_stracks = tracker.update(objects); + auto end = chrono::system_clock::now(); + total_ms = total_ms + chrono::duration_cast(end - start).count(); + + for (int i = 0; i < output_stracks.size(); i++) + { + vector tlwh = output_stracks[i].tlwh; + bool vertical = tlwh[2] / tlwh[3] > 1.6; + if (tlwh[2] * tlwh[3] > 20 && !vertical) + { + Scalar s = tracker.get_color(output_stracks[i].track_id); + putText(img, format("%d", output_stracks[i].track_id), Point(tlwh[0], tlwh[1] - 5), + 0, 0.6, Scalar(0, 0, 255), 2, LINE_AA); + rectangle(img, Rect(tlwh[0], tlwh[1], tlwh[2], tlwh[3]), s, 2); + } + } + putText(img, format("frame: %d fps: %d num: %d", num_frames, num_frames * 1000000 / total_ms, output_stracks.size()), + Point(0, 30), 0, 0.6, Scalar(0, 0, 255), 2, LINE_AA); + writer.write(img); + + delete blob; + char c = waitKey(1); + if (c > 0) + { + break; + } + } + cap.release(); + cout << "FPS: " << num_frames * 1000000 / total_ms << endl; + // destroy the engine + context->destroy(); + engine->destroy(); + runtime->destroy(); + return 0; +} diff --git a/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/src/kalmanFilter.cpp b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/src/kalmanFilter.cpp new file mode 100644 index 0000000000000000000000000000000000000000..168432a46810d0c1296f4b17500d41f8b4f308b4 --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/src/kalmanFilter.cpp @@ -0,0 +1,152 @@ +#include "kalmanFilter.h" +#include + +namespace byte_kalman +{ + const double KalmanFilter::chi2inv95[10] = { + 0, + 3.8415, + 5.9915, + 7.8147, + 9.4877, + 11.070, + 12.592, + 14.067, + 15.507, + 16.919 + }; + KalmanFilter::KalmanFilter() + { + int ndim = 4; + double dt = 1.; + + _motion_mat = Eigen::MatrixXf::Identity(8, 8); + for (int i = 0; i < ndim; i++) { + _motion_mat(i, ndim + i) = dt; + } + _update_mat = Eigen::MatrixXf::Identity(4, 8); + + this->_std_weight_position = 1. / 20; + this->_std_weight_velocity = 1. / 160; + } + + KAL_DATA KalmanFilter::initiate(const DETECTBOX &measurement) + { + DETECTBOX mean_pos = measurement; + DETECTBOX mean_vel; + for (int i = 0; i < 4; i++) mean_vel(i) = 0; + + KAL_MEAN mean; + for (int i = 0; i < 8; i++) { + if (i < 4) mean(i) = mean_pos(i); + else mean(i) = mean_vel(i - 4); + } + + KAL_MEAN std; + std(0) = 2 * _std_weight_position * measurement[3]; + std(1) = 2 * _std_weight_position * measurement[3]; + std(2) = 1e-2; + std(3) = 2 * _std_weight_position * measurement[3]; + std(4) = 10 * _std_weight_velocity * measurement[3]; + std(5) = 10 * _std_weight_velocity * measurement[3]; + std(6) = 1e-5; + std(7) = 10 * _std_weight_velocity * measurement[3]; + + KAL_MEAN tmp = std.array().square(); + KAL_COVA var = tmp.asDiagonal(); + return std::make_pair(mean, var); + } + + void KalmanFilter::predict(KAL_MEAN &mean, KAL_COVA &covariance) + { + //revise the data; + DETECTBOX std_pos; + std_pos << _std_weight_position * mean(3), + _std_weight_position * mean(3), + 1e-2, + _std_weight_position * mean(3); + DETECTBOX std_vel; + std_vel << _std_weight_velocity * mean(3), + _std_weight_velocity * mean(3), + 1e-5, + _std_weight_velocity * mean(3); + KAL_MEAN tmp; + tmp.block<1, 4>(0, 0) = std_pos; + tmp.block<1, 4>(0, 4) = std_vel; + tmp = tmp.array().square(); + KAL_COVA motion_cov = tmp.asDiagonal(); + KAL_MEAN mean1 = this->_motion_mat * mean.transpose(); + KAL_COVA covariance1 = this->_motion_mat * covariance *(_motion_mat.transpose()); + covariance1 += motion_cov; + + mean = mean1; + covariance = covariance1; + } + + KAL_HDATA KalmanFilter::project(const KAL_MEAN &mean, const KAL_COVA &covariance) + { + DETECTBOX std; + std << _std_weight_position * mean(3), _std_weight_position * mean(3), + 1e-1, _std_weight_position * mean(3); + KAL_HMEAN mean1 = _update_mat * mean.transpose(); + KAL_HCOVA covariance1 = _update_mat * covariance * (_update_mat.transpose()); + Eigen::Matrix diag = std.asDiagonal(); + diag = diag.array().square().matrix(); + covariance1 += diag; + // covariance1.diagonal() << diag; + return std::make_pair(mean1, covariance1); + } + + KAL_DATA + KalmanFilter::update( + const KAL_MEAN &mean, + const KAL_COVA &covariance, + const DETECTBOX &measurement) + { + KAL_HDATA pa = project(mean, covariance); + KAL_HMEAN projected_mean = pa.first; + KAL_HCOVA projected_cov = pa.second; + + //chol_factor, lower = + //scipy.linalg.cho_factor(projected_cov, lower=True, check_finite=False) + //kalmain_gain = + //scipy.linalg.cho_solve((cho_factor, lower), + //np.dot(covariance, self._upadte_mat.T).T, + //check_finite=False).T + Eigen::Matrix B = (covariance * (_update_mat.transpose())).transpose(); + Eigen::Matrix kalman_gain = (projected_cov.llt().solve(B)).transpose(); // eg.8x4 + Eigen::Matrix innovation = measurement - projected_mean; //eg.1x4 + auto tmp = innovation * (kalman_gain.transpose()); + KAL_MEAN new_mean = (mean.array() + tmp.array()).matrix(); + KAL_COVA new_covariance = covariance - kalman_gain * projected_cov*(kalman_gain.transpose()); + return std::make_pair(new_mean, new_covariance); + } + + Eigen::Matrix + KalmanFilter::gating_distance( + const KAL_MEAN &mean, + const KAL_COVA &covariance, + const std::vector &measurements, + bool only_position) + { + KAL_HDATA pa = this->project(mean, covariance); + if (only_position) { + printf("not implement!"); + exit(0); + } + KAL_HMEAN mean1 = pa.first; + KAL_HCOVA covariance1 = pa.second; + + // Eigen::Matrix d(size, 4); + DETECTBOXSS d(measurements.size(), 4); + int pos = 0; + for (DETECTBOX box : measurements) { + d.row(pos++) = box - mean1; + } + Eigen::Matrix factor = covariance1.llt().matrixL(); + Eigen::Matrix z = factor.triangularView().solve(d).transpose(); + auto zz = ((z.array())*(z.array())).matrix(); + auto square_maha = zz.colwise().sum(); + return square_maha; + } +} \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/src/lapjv.cpp b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/src/lapjv.cpp new file mode 100644 index 0000000000000000000000000000000000000000..169efd51f915adf8c666f3f4978f1cb7b2d3e1b3 --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/src/lapjv.cpp @@ -0,0 +1,343 @@ +#include +#include +#include + +#include "lapjv.h" + +/** Column-reduction and reduction transfer for a dense cost matrix. + */ +int_t _ccrrt_dense(const uint_t n, cost_t *cost[], + int_t *free_rows, int_t *x, int_t *y, cost_t *v) +{ + int_t n_free_rows; + boolean *unique; + + for (uint_t i = 0; i < n; i++) { + x[i] = -1; + v[i] = LARGE; + y[i] = 0; + } + for (uint_t i = 0; i < n; i++) { + for (uint_t j = 0; j < n; j++) { + const cost_t c = cost[i][j]; + if (c < v[j]) { + v[j] = c; + y[j] = i; + } + PRINTF("i=%d, j=%d, c[i,j]=%f, v[j]=%f y[j]=%d\n", i, j, c, v[j], y[j]); + } + } + PRINT_COST_ARRAY(v, n); + PRINT_INDEX_ARRAY(y, n); + NEW(unique, boolean, n); + memset(unique, TRUE, n); + { + int_t j = n; + do { + j--; + const int_t i = y[j]; + if (x[i] < 0) { + x[i] = j; + } + else { + unique[i] = FALSE; + y[j] = -1; + } + } while (j > 0); + } + n_free_rows = 0; + for (uint_t i = 0; i < n; i++) { + if (x[i] < 0) { + free_rows[n_free_rows++] = i; + } + else if (unique[i]) { + const int_t j = x[i]; + cost_t min = LARGE; + for (uint_t j2 = 0; j2 < n; j2++) { + if (j2 == (uint_t)j) { + continue; + } + const cost_t c = cost[i][j2] - v[j2]; + if (c < min) { + min = c; + } + } + PRINTF("v[%d] = %f - %f\n", j, v[j], min); + v[j] -= min; + } + } + FREE(unique); + return n_free_rows; +} + + +/** Augmenting row reduction for a dense cost matrix. + */ +int_t _carr_dense( + const uint_t n, cost_t *cost[], + const uint_t n_free_rows, + int_t *free_rows, int_t *x, int_t *y, cost_t *v) +{ + uint_t current = 0; + int_t new_free_rows = 0; + uint_t rr_cnt = 0; + PRINT_INDEX_ARRAY(x, n); + PRINT_INDEX_ARRAY(y, n); + PRINT_COST_ARRAY(v, n); + PRINT_INDEX_ARRAY(free_rows, n_free_rows); + while (current < n_free_rows) { + int_t i0; + int_t j1, j2; + cost_t v1, v2, v1_new; + boolean v1_lowers; + + rr_cnt++; + PRINTF("current = %d rr_cnt = %d\n", current, rr_cnt); + const int_t free_i = free_rows[current++]; + j1 = 0; + v1 = cost[free_i][0] - v[0]; + j2 = -1; + v2 = LARGE; + for (uint_t j = 1; j < n; j++) { + PRINTF("%d = %f %d = %f\n", j1, v1, j2, v2); + const cost_t c = cost[free_i][j] - v[j]; + if (c < v2) { + if (c >= v1) { + v2 = c; + j2 = j; + } + else { + v2 = v1; + v1 = c; + j2 = j1; + j1 = j; + } + } + } + i0 = y[j1]; + v1_new = v[j1] - (v2 - v1); + v1_lowers = v1_new < v[j1]; + PRINTF("%d %d 1=%d,%f 2=%d,%f v1'=%f(%d,%g) \n", free_i, i0, j1, v1, j2, v2, v1_new, v1_lowers, v[j1] - v1_new); + if (rr_cnt < current * n) { + if (v1_lowers) { + v[j1] = v1_new; + } + else if (i0 >= 0 && j2 >= 0) { + j1 = j2; + i0 = y[j2]; + } + if (i0 >= 0) { + if (v1_lowers) { + free_rows[--current] = i0; + } + else { + free_rows[new_free_rows++] = i0; + } + } + } + else { + PRINTF("rr_cnt=%d >= %d (current=%d * n=%d)\n", rr_cnt, current * n, current, n); + if (i0 >= 0) { + free_rows[new_free_rows++] = i0; + } + } + x[free_i] = j1; + y[j1] = free_i; + } + return new_free_rows; +} + + +/** Find columns with minimum d[j] and put them on the SCAN list. + */ +uint_t _find_dense(const uint_t n, uint_t lo, cost_t *d, int_t *cols, int_t *y) +{ + uint_t hi = lo + 1; + cost_t mind = d[cols[lo]]; + for (uint_t k = hi; k < n; k++) { + int_t j = cols[k]; + if (d[j] <= mind) { + if (d[j] < mind) { + hi = lo; + mind = d[j]; + } + cols[k] = cols[hi]; + cols[hi++] = j; + } + } + return hi; +} + + +// Scan all columns in TODO starting from arbitrary column in SCAN +// and try to decrease d of the TODO columns using the SCAN column. +int_t _scan_dense(const uint_t n, cost_t *cost[], + uint_t *plo, uint_t*phi, + cost_t *d, int_t *cols, int_t *pred, + int_t *y, cost_t *v) +{ + uint_t lo = *plo; + uint_t hi = *phi; + cost_t h, cred_ij; + + while (lo != hi) { + int_t j = cols[lo++]; + const int_t i = y[j]; + const cost_t mind = d[j]; + h = cost[i][j] - v[j] - mind; + PRINTF("i=%d j=%d h=%f\n", i, j, h); + // For all columns in TODO + for (uint_t k = hi; k < n; k++) { + j = cols[k]; + cred_ij = cost[i][j] - v[j] - h; + if (cred_ij < d[j]) { + d[j] = cred_ij; + pred[j] = i; + if (cred_ij == mind) { + if (y[j] < 0) { + return j; + } + cols[k] = cols[hi]; + cols[hi++] = j; + } + } + } + } + *plo = lo; + *phi = hi; + return -1; +} + + +/** Single iteration of modified Dijkstra shortest path algorithm as explained in the JV paper. + * + * This is a dense matrix version. + * + * \return The closest free column index. + */ +int_t find_path_dense( + const uint_t n, cost_t *cost[], + const int_t start_i, + int_t *y, cost_t *v, + int_t *pred) +{ + uint_t lo = 0, hi = 0; + int_t final_j = -1; + uint_t n_ready = 0; + int_t *cols; + cost_t *d; + + NEW(cols, int_t, n); + NEW(d, cost_t, n); + + for (uint_t i = 0; i < n; i++) { + cols[i] = i; + pred[i] = start_i; + d[i] = cost[start_i][i] - v[i]; + } + PRINT_COST_ARRAY(d, n); + while (final_j == -1) { + // No columns left on the SCAN list. + if (lo == hi) { + PRINTF("%d..%d -> find\n", lo, hi); + n_ready = lo; + hi = _find_dense(n, lo, d, cols, y); + PRINTF("check %d..%d\n", lo, hi); + PRINT_INDEX_ARRAY(cols, n); + for (uint_t k = lo; k < hi; k++) { + const int_t j = cols[k]; + if (y[j] < 0) { + final_j = j; + } + } + } + if (final_j == -1) { + PRINTF("%d..%d -> scan\n", lo, hi); + final_j = _scan_dense( + n, cost, &lo, &hi, d, cols, pred, y, v); + PRINT_COST_ARRAY(d, n); + PRINT_INDEX_ARRAY(cols, n); + PRINT_INDEX_ARRAY(pred, n); + } + } + + PRINTF("found final_j=%d\n", final_j); + PRINT_INDEX_ARRAY(cols, n); + { + const cost_t mind = d[cols[lo]]; + for (uint_t k = 0; k < n_ready; k++) { + const int_t j = cols[k]; + v[j] += d[j] - mind; + } + } + + FREE(cols); + FREE(d); + + return final_j; +} + + +/** Augment for a dense cost matrix. + */ +int_t _ca_dense( + const uint_t n, cost_t *cost[], + const uint_t n_free_rows, + int_t *free_rows, int_t *x, int_t *y, cost_t *v) +{ + int_t *pred; + + NEW(pred, int_t, n); + + for (int_t *pfree_i = free_rows; pfree_i < free_rows + n_free_rows; pfree_i++) { + int_t i = -1, j; + uint_t k = 0; + + PRINTF("looking at free_i=%d\n", *pfree_i); + j = find_path_dense(n, cost, *pfree_i, y, v, pred); + ASSERT(j >= 0); + ASSERT(j < n); + while (i != *pfree_i) { + PRINTF("augment %d\n", j); + PRINT_INDEX_ARRAY(pred, n); + i = pred[j]; + PRINTF("y[%d]=%d -> %d\n", j, y[j], i); + y[j] = i; + PRINT_INDEX_ARRAY(x, n); + SWAP_INDICES(j, x[i]); + k++; + if (k >= n) { + ASSERT(FALSE); + } + } + } + FREE(pred); + return 0; +} + + +/** Solve dense sparse LAP. + */ +int lapjv_internal( + const uint_t n, cost_t *cost[], + int_t *x, int_t *y) +{ + int ret; + int_t *free_rows; + cost_t *v; + + NEW(free_rows, int_t, n); + NEW(v, cost_t, n); + ret = _ccrrt_dense(n, cost, free_rows, x, y, v); + int i = 0; + while (ret > 0 && i < 2) { + ret = _carr_dense(n, cost, ret, free_rows, x, y, v); + i++; + } + if (ret > 0) { + ret = _ca_dense(n, cost, ret, free_rows, x, y, v); + } + FREE(v); + FREE(free_rows); + return ret; +} \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/src/utils.cpp b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/src/utils.cpp new file mode 100644 index 0000000000000000000000000000000000000000..4aa0305cd6cf025496528ef9ff49075209fe9e8c --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/TensorRT/cpp/src/utils.cpp @@ -0,0 +1,429 @@ +#include "BYTETracker.h" +#include "lapjv.h" + +vector BYTETracker::joint_stracks(vector &tlista, vector &tlistb) +{ + map exists; + vector res; + for (int i = 0; i < tlista.size(); i++) + { + exists.insert(pair(tlista[i]->track_id, 1)); + res.push_back(tlista[i]); + } + for (int i = 0; i < tlistb.size(); i++) + { + int tid = tlistb[i].track_id; + if (!exists[tid] || exists.count(tid) == 0) + { + exists[tid] = 1; + res.push_back(&tlistb[i]); + } + } + return res; +} + +vector BYTETracker::joint_stracks(vector &tlista, vector &tlistb) +{ + map exists; + vector res; + for (int i = 0; i < tlista.size(); i++) + { + exists.insert(pair(tlista[i].track_id, 1)); + res.push_back(tlista[i]); + } + for (int i = 0; i < tlistb.size(); i++) + { + int tid = tlistb[i].track_id; + if (!exists[tid] || exists.count(tid) == 0) + { + exists[tid] = 1; + res.push_back(tlistb[i]); + } + } + return res; +} + +vector BYTETracker::sub_stracks(vector &tlista, vector &tlistb) +{ + map stracks; + for (int i = 0; i < tlista.size(); i++) + { + stracks.insert(pair(tlista[i].track_id, tlista[i])); + } + for (int i = 0; i < tlistb.size(); i++) + { + int tid = tlistb[i].track_id; + if (stracks.count(tid) != 0) + { + stracks.erase(tid); + } + } + + vector res; + std::map::iterator it; + for (it = stracks.begin(); it != stracks.end(); ++it) + { + res.push_back(it->second); + } + + return res; +} + +void BYTETracker::remove_duplicate_stracks(vector &resa, vector &resb, vector &stracksa, vector &stracksb) +{ + vector > pdist = iou_distance(stracksa, stracksb); + vector > pairs; + for (int i = 0; i < pdist.size(); i++) + { + for (int j = 0; j < pdist[i].size(); j++) + { + if (pdist[i][j] < 0.15) + { + pairs.push_back(pair(i, j)); + } + } + } + + vector dupa, dupb; + for (int i = 0; i < pairs.size(); i++) + { + int timep = stracksa[pairs[i].first].frame_id - stracksa[pairs[i].first].start_frame; + int timeq = stracksb[pairs[i].second].frame_id - stracksb[pairs[i].second].start_frame; + if (timep > timeq) + dupb.push_back(pairs[i].second); + else + dupa.push_back(pairs[i].first); + } + + for (int i = 0; i < stracksa.size(); i++) + { + vector::iterator iter = find(dupa.begin(), dupa.end(), i); + if (iter == dupa.end()) + { + resa.push_back(stracksa[i]); + } + } + + for (int i = 0; i < stracksb.size(); i++) + { + vector::iterator iter = find(dupb.begin(), dupb.end(), i); + if (iter == dupb.end()) + { + resb.push_back(stracksb[i]); + } + } +} + +void BYTETracker::linear_assignment(vector > &cost_matrix, int cost_matrix_size, int cost_matrix_size_size, float thresh, + vector > &matches, vector &unmatched_a, vector &unmatched_b) +{ + if (cost_matrix.size() == 0) + { + for (int i = 0; i < cost_matrix_size; i++) + { + unmatched_a.push_back(i); + } + for (int i = 0; i < cost_matrix_size_size; i++) + { + unmatched_b.push_back(i); + } + return; + } + + vector rowsol; vector colsol; + float c = lapjv(cost_matrix, rowsol, colsol, true, thresh); + for (int i = 0; i < rowsol.size(); i++) + { + if (rowsol[i] >= 0) + { + vector match; + match.push_back(i); + match.push_back(rowsol[i]); + matches.push_back(match); + } + else + { + unmatched_a.push_back(i); + } + } + + for (int i = 0; i < colsol.size(); i++) + { + if (colsol[i] < 0) + { + unmatched_b.push_back(i); + } + } +} + +vector > BYTETracker::ious(vector > &atlbrs, vector > &btlbrs) +{ + vector > ious; + if (atlbrs.size()*btlbrs.size() == 0) + return ious; + + ious.resize(atlbrs.size()); + for (int i = 0; i < ious.size(); i++) + { + ious[i].resize(btlbrs.size()); + } + + //bbox_ious + for (int k = 0; k < btlbrs.size(); k++) + { + vector ious_tmp; + float box_area = (btlbrs[k][2] - btlbrs[k][0] + 1)*(btlbrs[k][3] - btlbrs[k][1] + 1); + for (int n = 0; n < atlbrs.size(); n++) + { + float iw = min(atlbrs[n][2], btlbrs[k][2]) - max(atlbrs[n][0], btlbrs[k][0]) + 1; + if (iw > 0) + { + float ih = min(atlbrs[n][3], btlbrs[k][3]) - max(atlbrs[n][1], btlbrs[k][1]) + 1; + if(ih > 0) + { + float ua = (atlbrs[n][2] - atlbrs[n][0] + 1)*(atlbrs[n][3] - atlbrs[n][1] + 1) + box_area - iw * ih; + ious[n][k] = iw * ih / ua; + } + else + { + ious[n][k] = 0.0; + } + } + else + { + ious[n][k] = 0.0; + } + } + } + + return ious; +} + +vector > BYTETracker::iou_distance(vector &atracks, vector &btracks, int &dist_size, int &dist_size_size) +{ + vector > cost_matrix; + if (atracks.size() * btracks.size() == 0) + { + dist_size = atracks.size(); + dist_size_size = btracks.size(); + return cost_matrix; + } + vector > atlbrs, btlbrs; + for (int i = 0; i < atracks.size(); i++) + { + atlbrs.push_back(atracks[i]->tlbr); + } + for (int i = 0; i < btracks.size(); i++) + { + btlbrs.push_back(btracks[i].tlbr); + } + + dist_size = atracks.size(); + dist_size_size = btracks.size(); + + vector > _ious = ious(atlbrs, btlbrs); + + for (int i = 0; i < _ious.size();i++) + { + vector _iou; + for (int j = 0; j < _ious[i].size(); j++) + { + _iou.push_back(1 - _ious[i][j]); + } + cost_matrix.push_back(_iou); + } + + return cost_matrix; +} + +vector > BYTETracker::iou_distance(vector &atracks, vector &btracks) +{ + vector > atlbrs, btlbrs; + for (int i = 0; i < atracks.size(); i++) + { + atlbrs.push_back(atracks[i].tlbr); + } + for (int i = 0; i < btracks.size(); i++) + { + btlbrs.push_back(btracks[i].tlbr); + } + + vector > _ious = ious(atlbrs, btlbrs); + vector > cost_matrix; + for (int i = 0; i < _ious.size(); i++) + { + vector _iou; + for (int j = 0; j < _ious[i].size(); j++) + { + _iou.push_back(1 - _ious[i][j]); + } + cost_matrix.push_back(_iou); + } + + return cost_matrix; +} + +double BYTETracker::lapjv(const vector > &cost, vector &rowsol, vector &colsol, + bool extend_cost, float cost_limit, bool return_cost) +{ + vector > cost_c; + cost_c.assign(cost.begin(), cost.end()); + + vector > cost_c_extended; + + int n_rows = cost.size(); + int n_cols = cost[0].size(); + rowsol.resize(n_rows); + colsol.resize(n_cols); + + int n = 0; + if (n_rows == n_cols) + { + n = n_rows; + } + else + { + if (!extend_cost) + { + cout << "set extend_cost=True" << endl; + system("pause"); + exit(0); + } + } + + if (extend_cost || cost_limit < LONG_MAX) + { + n = n_rows + n_cols; + cost_c_extended.resize(n); + for (int i = 0; i < cost_c_extended.size(); i++) + cost_c_extended[i].resize(n); + + if (cost_limit < LONG_MAX) + { + for (int i = 0; i < cost_c_extended.size(); i++) + { + for (int j = 0; j < cost_c_extended[i].size(); j++) + { + cost_c_extended[i][j] = cost_limit / 2.0; + } + } + } + else + { + float cost_max = -1; + for (int i = 0; i < cost_c.size(); i++) + { + for (int j = 0; j < cost_c[i].size(); j++) + { + if (cost_c[i][j] > cost_max) + cost_max = cost_c[i][j]; + } + } + for (int i = 0; i < cost_c_extended.size(); i++) + { + for (int j = 0; j < cost_c_extended[i].size(); j++) + { + cost_c_extended[i][j] = cost_max + 1; + } + } + } + + for (int i = n_rows; i < cost_c_extended.size(); i++) + { + for (int j = n_cols; j < cost_c_extended[i].size(); j++) + { + cost_c_extended[i][j] = 0; + } + } + for (int i = 0; i < n_rows; i++) + { + for (int j = 0; j < n_cols; j++) + { + cost_c_extended[i][j] = cost_c[i][j]; + } + } + + cost_c.clear(); + cost_c.assign(cost_c_extended.begin(), cost_c_extended.end()); + } + + double **cost_ptr; + cost_ptr = new double *[sizeof(double *) * n]; + for (int i = 0; i < n; i++) + cost_ptr[i] = new double[sizeof(double) * n]; + + for (int i = 0; i < n; i++) + { + for (int j = 0; j < n; j++) + { + cost_ptr[i][j] = cost_c[i][j]; + } + } + + int* x_c = new int[sizeof(int) * n]; + int *y_c = new int[sizeof(int) * n]; + + int ret = lapjv_internal(n, cost_ptr, x_c, y_c); + if (ret != 0) + { + cout << "Calculate Wrong!" << endl; + system("pause"); + exit(0); + } + + double opt = 0.0; + + if (n != n_rows) + { + for (int i = 0; i < n; i++) + { + if (x_c[i] >= n_cols) + x_c[i] = -1; + if (y_c[i] >= n_rows) + y_c[i] = -1; + } + for (int i = 0; i < n_rows; i++) + { + rowsol[i] = x_c[i]; + } + for (int i = 0; i < n_cols; i++) + { + colsol[i] = y_c[i]; + } + + if (return_cost) + { + for (int i = 0; i < rowsol.size(); i++) + { + if (rowsol[i] != -1) + { + //cout << i << "\t" << rowsol[i] << "\t" << cost_ptr[i][rowsol[i]] << endl; + opt += cost_ptr[i][rowsol[i]]; + } + } + } + } + else if (return_cost) + { + for (int i = 0; i < rowsol.size(); i++) + { + opt += cost_ptr[i][rowsol[i]]; + } + } + + for (int i = 0; i < n; i++) + { + delete[]cost_ptr[i]; + } + delete[]cost_ptr; + delete[]x_c; + delete[]y_c; + + return opt; +} + +Scalar BYTETracker::get_color(int idx) +{ + idx += 3; + return Scalar(37 * idx % 255, 17 * idx % 255, 29 * idx % 255); +} \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/deploy/TensorRT/python/README.md b/tracking/docker-build-context/byte_track/deploy/TensorRT/python/README.md new file mode 100644 index 0000000000000000000000000000000000000000..235401dc0f8c16ce00bb18a545af1fa541b3895f --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/TensorRT/python/README.md @@ -0,0 +1,22 @@ +# ByteTrack-TensorRT in Python + +## Install TensorRT Toolkit +Please follow the [TensorRT Installation Guide](https://docs.nvidia.com/deeplearning/tensorrt/install-guide/index.html) and [torch2trt gitrepo](https://github.com/NVIDIA-AI-IOT/torch2trt) to install TensorRT (Version 7 recommended) and torch2trt. + +## Convert model + +You can convert the Pytorch model “bytetrack_s_mot17” to TensorRT model by running: + +```shell +cd +python3 tools/trt.py -f exps/example/mot/yolox_s_mix_det.py -c pretrained/bytetrack_s_mot17.pth.tar +``` + +## Run TensorRT demo + +You can use the converted model_trt.pth to run TensorRT demo with **130 FPS**: + +```shell +cd +python3 tools/demo_track.py video -f exps/example/mot/yolox_s_mix_det.py --trt --save_result +``` diff --git a/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/CMakeLists.txt b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/CMakeLists.txt new file mode 100644 index 0000000000000000000000000000000000000000..e2118d807bb0ed988f76a8e333a65da7cab14c60 --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/CMakeLists.txt @@ -0,0 +1,84 @@ +macro(ncnn_add_example name) + add_executable(${name} ${name}.cpp) + if(OpenCV_FOUND) + target_include_directories(${name} PRIVATE ${OpenCV_INCLUDE_DIRS}) + target_link_libraries(${name} PRIVATE ncnn ${OpenCV_LIBS}) + elseif(NCNN_SIMPLEOCV) + target_compile_definitions(${name} PUBLIC USE_NCNN_SIMPLEOCV) + target_link_libraries(${name} PRIVATE ncnn) + endif() + + # add test to a virtual project group + set_property(TARGET ${name} PROPERTY FOLDER "examples") +endmacro() + +if(NCNN_PIXEL) + find_package(OpenCV QUIET COMPONENTS opencv_world) + # for opencv 2.4 on ubuntu 16.04, there is no opencv_world but OpenCV_FOUND will be TRUE + if("${OpenCV_LIBS}" STREQUAL "") + set(OpenCV_FOUND FALSE) + endif() + if(NOT OpenCV_FOUND) + find_package(OpenCV QUIET COMPONENTS core highgui imgproc imgcodecs videoio) + endif() + if(NOT OpenCV_FOUND) + find_package(OpenCV QUIET COMPONENTS core highgui imgproc) + endif() + + if(OpenCV_FOUND OR NCNN_SIMPLEOCV) + if(OpenCV_FOUND) + message(STATUS "OpenCV library: ${OpenCV_INSTALL_PATH}") + message(STATUS " version: ${OpenCV_VERSION}") + message(STATUS " libraries: ${OpenCV_LIBS}") + message(STATUS " include path: ${OpenCV_INCLUDE_DIRS}") + + if(${OpenCV_VERSION_MAJOR} GREATER 3) + set(CMAKE_CXX_STANDARD 11) + endif() + endif() + + include_directories(${CMAKE_CURRENT_SOURCE_DIR}/../src) + include_directories(${CMAKE_CURRENT_BINARY_DIR}/../src) + include_directories(include) + include_directories(/usr/local/include/eigen3) + + ncnn_add_example(squeezenet) + ncnn_add_example(squeezenet_c_api) + ncnn_add_example(fasterrcnn) + ncnn_add_example(rfcn) + ncnn_add_example(yolov2) + ncnn_add_example(yolov3) + if(OpenCV_FOUND) + ncnn_add_example(yolov4) + endif() + ncnn_add_example(yolov5) + ncnn_add_example(yolox) + ncnn_add_example(mobilenetv2ssdlite) + ncnn_add_example(mobilenetssd) + ncnn_add_example(squeezenetssd) + ncnn_add_example(shufflenetv2) + ncnn_add_example(peleenetssd_seg) + ncnn_add_example(simplepose) + ncnn_add_example(retinaface) + ncnn_add_example(yolact) + ncnn_add_example(nanodet) + ncnn_add_example(scrfd) + ncnn_add_example(scrfd_crowdhuman) + ncnn_add_example(rvm) + file(GLOB My_Source_Files src/*.cpp) + add_executable(bytetrack ${My_Source_Files}) + if(OpenCV_FOUND) + target_include_directories(bytetrack PRIVATE ${OpenCV_INCLUDE_DIRS}) + target_link_libraries(bytetrack PRIVATE ncnn ${OpenCV_LIBS}) + elseif(NCNN_SIMPLEOCV) + target_compile_definitions(bytetrack PUBLIC USE_NCNN_SIMPLEOCV) + target_link_libraries(bytetrack PRIVATE ncnn) + endif() + # add test to a virtual project group + set_property(TARGET bytetrack PROPERTY FOLDER "examples") + else() + message(WARNING "OpenCV not found and NCNN_SIMPLEOCV disabled, examples won't be built") + endif() +else() + message(WARNING "NCNN_PIXEL not enabled, examples won't be built") +endif() diff --git a/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/README.md b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/README.md new file mode 100644 index 0000000000000000000000000000000000000000..38137039f2056b43a77206092ac9c4cd282a2853 --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/README.md @@ -0,0 +1,103 @@ +# ByteTrack-CPP-ncnn + +## Installation + +Clone [ncnn](https://github.com/Tencent/ncnn) first, then please following [build tutorial of ncnn](https://github.com/Tencent/ncnn/wiki/how-to-build) to build on your own device. + +Install eigen-3.3.9 [[google]](https://drive.google.com/file/d/1rqO74CYCNrmRAg8Rra0JP3yZtJ-rfket/view?usp=sharing), [[baidu(code:ueq4)]](https://pan.baidu.com/s/15kEfCxpy-T7tz60msxxExg). + +```shell +unzip eigen-3.3.9.zip +cd eigen-3.3.9 +mkdir build +cd build +cmake .. +sudo make install +``` + +## Generate onnx file +Use provided tools to generate onnx file. +For example, if you want to generate onnx file of bytetrack_s_mot17.pth, please run the following command: +```shell +cd +python3 tools/export_onnx.py -f exps/example/mot/yolox_s_mix_det.py -c pretrained/bytetrack_s_mot17.pth.tar +``` +Then, a bytetrack_s.onnx file is generated under . + +## Generate ncnn param and bin file +Put bytetrack_s.onnx under ncnn/build/tools/onnx and then run: + +```shell +cd ncnn/build/tools/onnx +./onnx2ncnn bytetrack_s.onnx bytetrack_s.param bytetrack_s.bin +``` + +Since Focus module is not supported in ncnn. Warnings like: +```shell +Unsupported slice step ! +``` +will be printed. However, don't worry! C++ version of Focus layer is already implemented in src/bytetrack.cpp. + +## Modify param file +Open **bytetrack_s.param**, and modify it. +Before (just an example): +``` +235 268 +Input images 0 1 images +Split splitncnn_input0 1 4 images images_splitncnn_0 images_splitncnn_1 images_splitncnn_2 images_splitncnn_3 +Crop Slice_4 1 1 images_splitncnn_3 467 -23309=1,0 -23310=1,2147483647 -23311=1,1 +Crop Slice_9 1 1 467 472 -23309=1,0 -23310=1,2147483647 -23311=1,2 +Crop Slice_14 1 1 images_splitncnn_2 477 -23309=1,0 -23310=1,2147483647 -23311=1,1 +Crop Slice_19 1 1 477 482 -23309=1,1 -23310=1,2147483647 -23311=1,2 +Crop Slice_24 1 1 images_splitncnn_1 487 -23309=1,1 -23310=1,2147483647 -23311=1,1 +Crop Slice_29 1 1 487 492 -23309=1,0 -23310=1,2147483647 -23311=1,2 +Crop Slice_34 1 1 images_splitncnn_0 497 -23309=1,1 -23310=1,2147483647 -23311=1,1 +Crop Slice_39 1 1 497 502 -23309=1,1 -23310=1,2147483647 -23311=1,2 +Concat Concat_40 4 1 472 492 482 502 503 0=0 +... +``` +* Change first number for 235 to 235 - 9 = 226(since we will remove 10 layers and add 1 layers, total layers number should minus 9). +* Then remove 10 lines of code from Split to Concat, but remember the last but 2nd number: 503. +* Add YoloV5Focus layer After Input (using previous number 503): +``` +YoloV5Focus focus 1 1 images 503 +``` +After(just an exmaple): +``` +226 328 +Input images 0 1 images +YoloV5Focus focus 1 1 images 503 +... +``` + +## Use ncnn_optimize to generate new param and bin +```shell +# suppose you are still under ncnn/build/tools/onnx dir. +../ncnnoptimize bytetrack_s.param bytetrack_s.bin bytetrack_s_op.param bytetrack_s_op.bin 65536 +``` + +## Copy files and build ByteTrack +Copy or move 'src', 'include' folders and 'CMakeLists.txt' file into ncnn/examples. Copy bytetrack_s_op.param, bytetrack_s_op.bin and /videos/palace.mp4 into ncnn/build/examples. Then, build ByteTrack: + +```shell +cd ncnn/build/examples +cmake .. +make +``` + +## Run the demo +You can run the ncnn demo with **5 FPS** (96-core Intel(R) Xeon(R) Platinum 8163 CPU @ 2.50GHz): +```shell +./bytetrack palace.mp4 +``` + +You can modify 'num_threads' to optimize the running speed in [bytetrack.cpp](https://github.com/ifzhang/ByteTrack/blob/2e9a67895da6b47b948015f6861bba0bacd4e72f/deploy/ncnn/cpp/src/bytetrack.cpp#L309) according to the number of your CPU cores: + +``` +yolox.opt.num_threads = 20; +``` + + +## Acknowledgement + +* [ncnn](https://github.com/Tencent/ncnn) diff --git a/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/include/BYTETracker.h b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/include/BYTETracker.h new file mode 100644 index 0000000000000000000000000000000000000000..e3dda973fa27ccdb85a27841ec2a1cf8dcc1e9b0 --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/include/BYTETracker.h @@ -0,0 +1,49 @@ +#pragma once + +#include "STrack.h" + +struct Object +{ + cv::Rect_ rect; + int label; + float prob; +}; + +class BYTETracker +{ +public: + BYTETracker(int frame_rate = 30, int track_buffer = 30); + ~BYTETracker(); + + vector update(const vector& objects); + Scalar get_color(int idx); + +private: + vector joint_stracks(vector &tlista, vector &tlistb); + vector joint_stracks(vector &tlista, vector &tlistb); + + vector sub_stracks(vector &tlista, vector &tlistb); + void remove_duplicate_stracks(vector &resa, vector &resb, vector &stracksa, vector &stracksb); + + void linear_assignment(vector > &cost_matrix, int cost_matrix_size, int cost_matrix_size_size, float thresh, + vector > &matches, vector &unmatched_a, vector &unmatched_b); + vector > iou_distance(vector &atracks, vector &btracks, int &dist_size, int &dist_size_size); + vector > iou_distance(vector &atracks, vector &btracks); + vector > ious(vector > &atlbrs, vector > &btlbrs); + + double lapjv(const vector > &cost, vector &rowsol, vector &colsol, + bool extend_cost = false, float cost_limit = LONG_MAX, bool return_cost = true); + +private: + + float track_thresh; + float high_thresh; + float match_thresh; + int frame_id; + int max_time_lost; + + vector tracked_stracks; + vector lost_stracks; + vector removed_stracks; + byte_kalman::KalmanFilter kalman_filter; +}; \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/include/STrack.h b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/include/STrack.h new file mode 100644 index 0000000000000000000000000000000000000000..752cbefa8f7f7f4f0aff08e0e28ff036afe7d61a --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/include/STrack.h @@ -0,0 +1,50 @@ +#pragma once + +#include +#include "kalmanFilter.h" + +using namespace cv; +using namespace std; + +enum TrackState { New = 0, Tracked, Lost, Removed }; + +class STrack +{ +public: + STrack(vector tlwh_, float score); + ~STrack(); + + vector static tlbr_to_tlwh(vector &tlbr); + void static multi_predict(vector &stracks, byte_kalman::KalmanFilter &kalman_filter); + void static_tlwh(); + void static_tlbr(); + vector tlwh_to_xyah(vector tlwh_tmp); + vector to_xyah(); + void mark_lost(); + void mark_removed(); + int next_id(); + int end_frame(); + + void activate(byte_kalman::KalmanFilter &kalman_filter, int frame_id); + void re_activate(STrack &new_track, int frame_id, bool new_id = false); + void update(STrack &new_track, int frame_id); + +public: + bool is_activated; + int track_id; + int state; + + vector _tlwh; + vector tlwh; + vector tlbr; + int frame_id; + int tracklet_len; + int start_frame; + + KAL_MEAN mean; + KAL_COVA covariance; + float score; + +private: + byte_kalman::KalmanFilter kalman_filter; +}; \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/include/dataType.h b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/include/dataType.h new file mode 100644 index 0000000000000000000000000000000000000000..a7821a395c1c03db137587b879b255846fb0ca16 --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/include/dataType.h @@ -0,0 +1,36 @@ +#pragma once + +#include +#include + +#include +#include +typedef Eigen::Matrix DETECTBOX; +typedef Eigen::Matrix DETECTBOXSS; +typedef Eigen::Matrix FEATURE; +typedef Eigen::Matrix FEATURESS; +//typedef std::vector FEATURESS; + +//Kalmanfilter +//typedef Eigen::Matrix KAL_FILTER; +typedef Eigen::Matrix KAL_MEAN; +typedef Eigen::Matrix KAL_COVA; +typedef Eigen::Matrix KAL_HMEAN; +typedef Eigen::Matrix KAL_HCOVA; +using KAL_DATA = std::pair; +using KAL_HDATA = std::pair; + +//main +using RESULT_DATA = std::pair; + +//tracker: +using TRACKER_DATA = std::pair; +using MATCH_DATA = std::pair; +typedef struct t { + std::vector matches; + std::vector unmatched_tracks; + std::vector unmatched_detections; +}TRACHER_MATCHD; + +//linear_assignment: +typedef Eigen::Matrix DYNAMICM; \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/include/kalmanFilter.h b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/include/kalmanFilter.h new file mode 100644 index 0000000000000000000000000000000000000000..6596b54e33de75d1b49a8af9bfbb1f26d00ea786 --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/include/kalmanFilter.h @@ -0,0 +1,31 @@ +#pragma once + +#include "dataType.h" + +namespace byte_kalman +{ + class KalmanFilter + { + public: + static const double chi2inv95[10]; + KalmanFilter(); + KAL_DATA initiate(const DETECTBOX& measurement); + void predict(KAL_MEAN& mean, KAL_COVA& covariance); + KAL_HDATA project(const KAL_MEAN& mean, const KAL_COVA& covariance); + KAL_DATA update(const KAL_MEAN& mean, + const KAL_COVA& covariance, + const DETECTBOX& measurement); + + Eigen::Matrix gating_distance( + const KAL_MEAN& mean, + const KAL_COVA& covariance, + const std::vector& measurements, + bool only_position = false); + + private: + Eigen::Matrix _motion_mat; + Eigen::Matrix _update_mat; + float _std_weight_position; + float _std_weight_velocity; + }; +} \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/include/lapjv.h b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/include/lapjv.h new file mode 100644 index 0000000000000000000000000000000000000000..0e34385a647bec225827370ff0041a391e628477 --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/include/lapjv.h @@ -0,0 +1,63 @@ +#ifndef LAPJV_H +#define LAPJV_H + +#define LARGE 1000000 + +#if !defined TRUE +#define TRUE 1 +#endif +#if !defined FALSE +#define FALSE 0 +#endif + +#define NEW(x, t, n) if ((x = (t *)malloc(sizeof(t) * (n))) == 0) { return -1; } +#define FREE(x) if (x != 0) { free(x); x = 0; } +#define SWAP_INDICES(a, b) { int_t _temp_index = a; a = b; b = _temp_index; } + +#if 0 +#include +#define ASSERT(cond) assert(cond) +#define PRINTF(fmt, ...) printf(fmt, ##__VA_ARGS__) +#define PRINT_COST_ARRAY(a, n) \ + while (1) { \ + printf(#a" = ["); \ + if ((n) > 0) { \ + printf("%f", (a)[0]); \ + for (uint_t j = 1; j < n; j++) { \ + printf(", %f", (a)[j]); \ + } \ + } \ + printf("]\n"); \ + break; \ + } +#define PRINT_INDEX_ARRAY(a, n) \ + while (1) { \ + printf(#a" = ["); \ + if ((n) > 0) { \ + printf("%d", (a)[0]); \ + for (uint_t j = 1; j < n; j++) { \ + printf(", %d", (a)[j]); \ + } \ + } \ + printf("]\n"); \ + break; \ + } +#else +#define ASSERT(cond) +#define PRINTF(fmt, ...) +#define PRINT_COST_ARRAY(a, n) +#define PRINT_INDEX_ARRAY(a, n) +#endif + + +typedef signed int int_t; +typedef unsigned int uint_t; +typedef double cost_t; +typedef char boolean; +typedef enum fp_t { FP_1 = 1, FP_2 = 2, FP_DYNAMIC = 3 } fp_t; + +extern int_t lapjv_internal( + const uint_t n, cost_t *cost[], + int_t *x, int_t *y); + +#endif // LAPJV_H \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/src/BYTETracker.cpp b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/src/BYTETracker.cpp new file mode 100644 index 0000000000000000000000000000000000000000..7c936b81f2e95f335ec90b8c355360bc0ebee800 --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/src/BYTETracker.cpp @@ -0,0 +1,241 @@ +#include "BYTETracker.h" +#include + +BYTETracker::BYTETracker(int frame_rate, int track_buffer) +{ + track_thresh = 0.5; + high_thresh = 0.6; + match_thresh = 0.8; + + frame_id = 0; + max_time_lost = int(frame_rate / 30.0 * track_buffer); + cout << "Init ByteTrack!" << endl; +} + +BYTETracker::~BYTETracker() +{ +} + +vector BYTETracker::update(const vector& objects) +{ + + ////////////////// Step 1: Get detections ////////////////// + this->frame_id++; + vector activated_stracks; + vector refind_stracks; + vector removed_stracks; + vector lost_stracks; + vector detections; + vector detections_low; + + vector detections_cp; + vector tracked_stracks_swap; + vector resa, resb; + vector output_stracks; + + vector unconfirmed; + vector tracked_stracks; + vector strack_pool; + vector r_tracked_stracks; + + if (objects.size() > 0) + { + for (int i = 0; i < objects.size(); i++) + { + vector tlbr_; + tlbr_.resize(4); + tlbr_[0] = objects[i].rect.x; + tlbr_[1] = objects[i].rect.y; + tlbr_[2] = objects[i].rect.x + objects[i].rect.width; + tlbr_[3] = objects[i].rect.y + objects[i].rect.height; + + float score = objects[i].prob; + + STrack strack(STrack::tlbr_to_tlwh(tlbr_), score); + if (score >= track_thresh) + { + detections.push_back(strack); + } + else + { + detections_low.push_back(strack); + } + + } + } + + // Add newly detected tracklets to tracked_stracks + for (int i = 0; i < this->tracked_stracks.size(); i++) + { + if (!this->tracked_stracks[i].is_activated) + unconfirmed.push_back(&this->tracked_stracks[i]); + else + tracked_stracks.push_back(&this->tracked_stracks[i]); + } + + ////////////////// Step 2: First association, with IoU ////////////////// + strack_pool = joint_stracks(tracked_stracks, this->lost_stracks); + STrack::multi_predict(strack_pool, this->kalman_filter); + + vector > dists; + int dist_size = 0, dist_size_size = 0; + dists = iou_distance(strack_pool, detections, dist_size, dist_size_size); + + vector > matches; + vector u_track, u_detection; + linear_assignment(dists, dist_size, dist_size_size, match_thresh, matches, u_track, u_detection); + + for (int i = 0; i < matches.size(); i++) + { + STrack *track = strack_pool[matches[i][0]]; + STrack *det = &detections[matches[i][1]]; + if (track->state == TrackState::Tracked) + { + track->update(*det, this->frame_id); + activated_stracks.push_back(*track); + } + else + { + track->re_activate(*det, this->frame_id, false); + refind_stracks.push_back(*track); + } + } + + ////////////////// Step 3: Second association, using low score dets ////////////////// + for (int i = 0; i < u_detection.size(); i++) + { + detections_cp.push_back(detections[u_detection[i]]); + } + detections.clear(); + detections.assign(detections_low.begin(), detections_low.end()); + + for (int i = 0; i < u_track.size(); i++) + { + if (strack_pool[u_track[i]]->state == TrackState::Tracked) + { + r_tracked_stracks.push_back(strack_pool[u_track[i]]); + } + } + + dists.clear(); + dists = iou_distance(r_tracked_stracks, detections, dist_size, dist_size_size); + + matches.clear(); + u_track.clear(); + u_detection.clear(); + linear_assignment(dists, dist_size, dist_size_size, 0.5, matches, u_track, u_detection); + + for (int i = 0; i < matches.size(); i++) + { + STrack *track = r_tracked_stracks[matches[i][0]]; + STrack *det = &detections[matches[i][1]]; + if (track->state == TrackState::Tracked) + { + track->update(*det, this->frame_id); + activated_stracks.push_back(*track); + } + else + { + track->re_activate(*det, this->frame_id, false); + refind_stracks.push_back(*track); + } + } + + for (int i = 0; i < u_track.size(); i++) + { + STrack *track = r_tracked_stracks[u_track[i]]; + if (track->state != TrackState::Lost) + { + track->mark_lost(); + lost_stracks.push_back(*track); + } + } + + // Deal with unconfirmed tracks, usually tracks with only one beginning frame + detections.clear(); + detections.assign(detections_cp.begin(), detections_cp.end()); + + dists.clear(); + dists = iou_distance(unconfirmed, detections, dist_size, dist_size_size); + + matches.clear(); + vector u_unconfirmed; + u_detection.clear(); + linear_assignment(dists, dist_size, dist_size_size, 0.7, matches, u_unconfirmed, u_detection); + + for (int i = 0; i < matches.size(); i++) + { + unconfirmed[matches[i][0]]->update(detections[matches[i][1]], this->frame_id); + activated_stracks.push_back(*unconfirmed[matches[i][0]]); + } + + for (int i = 0; i < u_unconfirmed.size(); i++) + { + STrack *track = unconfirmed[u_unconfirmed[i]]; + track->mark_removed(); + removed_stracks.push_back(*track); + } + + ////////////////// Step 4: Init new stracks ////////////////// + for (int i = 0; i < u_detection.size(); i++) + { + STrack *track = &detections[u_detection[i]]; + if (track->score < this->high_thresh) + continue; + track->activate(this->kalman_filter, this->frame_id); + activated_stracks.push_back(*track); + } + + ////////////////// Step 5: Update state ////////////////// + for (int i = 0; i < this->lost_stracks.size(); i++) + { + if (this->frame_id - this->lost_stracks[i].end_frame() > this->max_time_lost) + { + this->lost_stracks[i].mark_removed(); + removed_stracks.push_back(this->lost_stracks[i]); + } + } + + for (int i = 0; i < this->tracked_stracks.size(); i++) + { + if (this->tracked_stracks[i].state == TrackState::Tracked) + { + tracked_stracks_swap.push_back(this->tracked_stracks[i]); + } + } + this->tracked_stracks.clear(); + this->tracked_stracks.assign(tracked_stracks_swap.begin(), tracked_stracks_swap.end()); + + this->tracked_stracks = joint_stracks(this->tracked_stracks, activated_stracks); + this->tracked_stracks = joint_stracks(this->tracked_stracks, refind_stracks); + + //std::cout << activated_stracks.size() << std::endl; + + this->lost_stracks = sub_stracks(this->lost_stracks, this->tracked_stracks); + for (int i = 0; i < lost_stracks.size(); i++) + { + this->lost_stracks.push_back(lost_stracks[i]); + } + + this->lost_stracks = sub_stracks(this->lost_stracks, this->removed_stracks); + for (int i = 0; i < removed_stracks.size(); i++) + { + this->removed_stracks.push_back(removed_stracks[i]); + } + + remove_duplicate_stracks(resa, resb, this->tracked_stracks, this->lost_stracks); + + this->tracked_stracks.clear(); + this->tracked_stracks.assign(resa.begin(), resa.end()); + this->lost_stracks.clear(); + this->lost_stracks.assign(resb.begin(), resb.end()); + + for (int i = 0; i < this->tracked_stracks.size(); i++) + { + if (this->tracked_stracks[i].is_activated) + { + output_stracks.push_back(this->tracked_stracks[i]); + } + } + return output_stracks; +} \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/src/STrack.cpp b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/src/STrack.cpp new file mode 100644 index 0000000000000000000000000000000000000000..8306165304355fe6d3d6e244207211757f21a646 --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/src/STrack.cpp @@ -0,0 +1,192 @@ +#include "STrack.h" + +STrack::STrack(vector tlwh_, float score) +{ + _tlwh.resize(4); + _tlwh.assign(tlwh_.begin(), tlwh_.end()); + + is_activated = false; + track_id = 0; + state = TrackState::New; + + tlwh.resize(4); + tlbr.resize(4); + + static_tlwh(); + static_tlbr(); + frame_id = 0; + tracklet_len = 0; + this->score = score; + start_frame = 0; +} + +STrack::~STrack() +{ +} + +void STrack::activate(byte_kalman::KalmanFilter &kalman_filter, int frame_id) +{ + this->kalman_filter = kalman_filter; + this->track_id = this->next_id(); + + vector _tlwh_tmp(4); + _tlwh_tmp[0] = this->_tlwh[0]; + _tlwh_tmp[1] = this->_tlwh[1]; + _tlwh_tmp[2] = this->_tlwh[2]; + _tlwh_tmp[3] = this->_tlwh[3]; + vector xyah = tlwh_to_xyah(_tlwh_tmp); + DETECTBOX xyah_box; + xyah_box[0] = xyah[0]; + xyah_box[1] = xyah[1]; + xyah_box[2] = xyah[2]; + xyah_box[3] = xyah[3]; + auto mc = this->kalman_filter.initiate(xyah_box); + this->mean = mc.first; + this->covariance = mc.second; + + static_tlwh(); + static_tlbr(); + + this->tracklet_len = 0; + this->state = TrackState::Tracked; + if (frame_id == 1) + { + this->is_activated = true; + } + //this->is_activated = true; + this->frame_id = frame_id; + this->start_frame = frame_id; +} + +void STrack::re_activate(STrack &new_track, int frame_id, bool new_id) +{ + vector xyah = tlwh_to_xyah(new_track.tlwh); + DETECTBOX xyah_box; + xyah_box[0] = xyah[0]; + xyah_box[1] = xyah[1]; + xyah_box[2] = xyah[2]; + xyah_box[3] = xyah[3]; + auto mc = this->kalman_filter.update(this->mean, this->covariance, xyah_box); + this->mean = mc.first; + this->covariance = mc.second; + + static_tlwh(); + static_tlbr(); + + this->tracklet_len = 0; + this->state = TrackState::Tracked; + this->is_activated = true; + this->frame_id = frame_id; + this->score = new_track.score; + if (new_id) + this->track_id = next_id(); +} + +void STrack::update(STrack &new_track, int frame_id) +{ + this->frame_id = frame_id; + this->tracklet_len++; + + vector xyah = tlwh_to_xyah(new_track.tlwh); + DETECTBOX xyah_box; + xyah_box[0] = xyah[0]; + xyah_box[1] = xyah[1]; + xyah_box[2] = xyah[2]; + xyah_box[3] = xyah[3]; + + auto mc = this->kalman_filter.update(this->mean, this->covariance, xyah_box); + this->mean = mc.first; + this->covariance = mc.second; + + static_tlwh(); + static_tlbr(); + + this->state = TrackState::Tracked; + this->is_activated = true; + + this->score = new_track.score; +} + +void STrack::static_tlwh() +{ + if (this->state == TrackState::New) + { + tlwh[0] = _tlwh[0]; + tlwh[1] = _tlwh[1]; + tlwh[2] = _tlwh[2]; + tlwh[3] = _tlwh[3]; + return; + } + + tlwh[0] = mean[0]; + tlwh[1] = mean[1]; + tlwh[2] = mean[2]; + tlwh[3] = mean[3]; + + tlwh[2] *= tlwh[3]; + tlwh[0] -= tlwh[2] / 2; + tlwh[1] -= tlwh[3] / 2; +} + +void STrack::static_tlbr() +{ + tlbr.clear(); + tlbr.assign(tlwh.begin(), tlwh.end()); + tlbr[2] += tlbr[0]; + tlbr[3] += tlbr[1]; +} + +vector STrack::tlwh_to_xyah(vector tlwh_tmp) +{ + vector tlwh_output = tlwh_tmp; + tlwh_output[0] += tlwh_output[2] / 2; + tlwh_output[1] += tlwh_output[3] / 2; + tlwh_output[2] /= tlwh_output[3]; + return tlwh_output; +} + +vector STrack::to_xyah() +{ + return tlwh_to_xyah(tlwh); +} + +vector STrack::tlbr_to_tlwh(vector &tlbr) +{ + tlbr[2] -= tlbr[0]; + tlbr[3] -= tlbr[1]; + return tlbr; +} + +void STrack::mark_lost() +{ + state = TrackState::Lost; +} + +void STrack::mark_removed() +{ + state = TrackState::Removed; +} + +int STrack::next_id() +{ + static int _count = 0; + _count++; + return _count; +} + +int STrack::end_frame() +{ + return this->frame_id; +} + +void STrack::multi_predict(vector &stracks, byte_kalman::KalmanFilter &kalman_filter) +{ + for (int i = 0; i < stracks.size(); i++) + { + if (stracks[i]->state != TrackState::Tracked) + { + stracks[i]->mean[7] = 0; + } + kalman_filter.predict(stracks[i]->mean, stracks[i]->covariance); + } +} \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/src/bytetrack.cpp b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/src/bytetrack.cpp new file mode 100644 index 0000000000000000000000000000000000000000..a129f146dd8faa3570bb590555e98a23bd9e4d23 --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/src/bytetrack.cpp @@ -0,0 +1,396 @@ +#include "layer.h" +#include "net.h" + +#if defined(USE_NCNN_SIMPLEOCV) +#include "simpleocv.h" +#include +#else +#include +#include +#include +#include +#endif +#include +#include +#include +#include +#include "BYTETracker.h" + +#define YOLOX_NMS_THRESH 0.7 // nms threshold +#define YOLOX_CONF_THRESH 0.1 // threshold of bounding box prob +#define INPUT_W 1088 // target image size w after resize +#define INPUT_H 608 // target image size h after resize + +Mat static_resize(Mat& img) { + float r = min(INPUT_W / (img.cols*1.0), INPUT_H / (img.rows*1.0)); + // r = std::min(r, 1.0f); + int unpad_w = r * img.cols; + int unpad_h = r * img.rows; + Mat re(unpad_h, unpad_w, CV_8UC3); + resize(img, re, re.size()); + Mat out(INPUT_H, INPUT_W, CV_8UC3, Scalar(114, 114, 114)); + re.copyTo(out(Rect(0, 0, re.cols, re.rows))); + return out; +} + +// YOLOX use the same focus in yolov5 +class YoloV5Focus : public ncnn::Layer +{ +public: + YoloV5Focus() + { + one_blob_only = true; + } + + virtual int forward(const ncnn::Mat& bottom_blob, ncnn::Mat& top_blob, const ncnn::Option& opt) const + { + int w = bottom_blob.w; + int h = bottom_blob.h; + int channels = bottom_blob.c; + + int outw = w / 2; + int outh = h / 2; + int outc = channels * 4; + + top_blob.create(outw, outh, outc, 4u, 1, opt.blob_allocator); + if (top_blob.empty()) + return -100; + + #pragma omp parallel for num_threads(opt.num_threads) + for (int p = 0; p < outc; p++) + { + const float* ptr = bottom_blob.channel(p % channels).row((p / channels) % 2) + ((p / channels) / 2); + float* outptr = top_blob.channel(p); + + for (int i = 0; i < outh; i++) + { + for (int j = 0; j < outw; j++) + { + *outptr = *ptr; + + outptr += 1; + ptr += 2; + } + + ptr += w; + } + } + + return 0; + } +}; + +DEFINE_LAYER_CREATOR(YoloV5Focus) + +struct GridAndStride +{ + int grid0; + int grid1; + int stride; +}; + +static inline float intersection_area(const Object& a, const Object& b) +{ + cv::Rect_ inter = a.rect & b.rect; + return inter.area(); +} + +static void qsort_descent_inplace(std::vector& faceobjects, int left, int right) +{ + int i = left; + int j = right; + float p = faceobjects[(left + right) / 2].prob; + + while (i <= j) + { + while (faceobjects[i].prob > p) + i++; + + while (faceobjects[j].prob < p) + j--; + + if (i <= j) + { + // swap + std::swap(faceobjects[i], faceobjects[j]); + + i++; + j--; + } + } + + #pragma omp parallel sections + { + #pragma omp section + { + if (left < j) qsort_descent_inplace(faceobjects, left, j); + } + #pragma omp section + { + if (i < right) qsort_descent_inplace(faceobjects, i, right); + } + } +} + +static void qsort_descent_inplace(std::vector& objects) +{ + if (objects.empty()) + return; + + qsort_descent_inplace(objects, 0, objects.size() - 1); +} + +static void nms_sorted_bboxes(const std::vector& faceobjects, std::vector& picked, float nms_threshold) +{ + picked.clear(); + + const int n = faceobjects.size(); + + std::vector areas(n); + for (int i = 0; i < n; i++) + { + areas[i] = faceobjects[i].rect.area(); + } + + for (int i = 0; i < n; i++) + { + const Object& a = faceobjects[i]; + + int keep = 1; + for (int j = 0; j < (int)picked.size(); j++) + { + const Object& b = faceobjects[picked[j]]; + + // intersection over union + float inter_area = intersection_area(a, b); + float union_area = areas[i] + areas[picked[j]] - inter_area; + // float IoU = inter_area / union_area + if (inter_area / union_area > nms_threshold) + keep = 0; + } + + if (keep) + picked.push_back(i); + } +} + +static void generate_grids_and_stride(const int target_w, const int target_h, std::vector& strides, std::vector& grid_strides) +{ + for (int i = 0; i < (int)strides.size(); i++) + { + int stride = strides[i]; + int num_grid_w = target_w / stride; + int num_grid_h = target_h / stride; + for (int g1 = 0; g1 < num_grid_h; g1++) + { + for (int g0 = 0; g0 < num_grid_w; g0++) + { + GridAndStride gs; + gs.grid0 = g0; + gs.grid1 = g1; + gs.stride = stride; + grid_strides.push_back(gs); + } + } + } +} + +static void generate_yolox_proposals(std::vector grid_strides, const ncnn::Mat& feat_blob, float prob_threshold, std::vector& objects) +{ + const int num_grid = feat_blob.h; + const int num_class = feat_blob.w - 5; + const int num_anchors = grid_strides.size(); + + const float* feat_ptr = feat_blob.channel(0); + for (int anchor_idx = 0; anchor_idx < num_anchors; anchor_idx++) + { + const int grid0 = grid_strides[anchor_idx].grid0; + const int grid1 = grid_strides[anchor_idx].grid1; + const int stride = grid_strides[anchor_idx].stride; + + // yolox/models/yolo_head.py decode logic + // outputs[..., :2] = (outputs[..., :2] + grids) * strides + // outputs[..., 2:4] = torch.exp(outputs[..., 2:4]) * strides + float x_center = (feat_ptr[0] + grid0) * stride; + float y_center = (feat_ptr[1] + grid1) * stride; + float w = exp(feat_ptr[2]) * stride; + float h = exp(feat_ptr[3]) * stride; + float x0 = x_center - w * 0.5f; + float y0 = y_center - h * 0.5f; + + float box_objectness = feat_ptr[4]; + for (int class_idx = 0; class_idx < num_class; class_idx++) + { + float box_cls_score = feat_ptr[5 + class_idx]; + float box_prob = box_objectness * box_cls_score; + if (box_prob > prob_threshold) + { + Object obj; + obj.rect.x = x0; + obj.rect.y = y0; + obj.rect.width = w; + obj.rect.height = h; + obj.label = class_idx; + obj.prob = box_prob; + + objects.push_back(obj); + } + + } // class loop + feat_ptr += feat_blob.w; + + } // point anchor loop +} + +static int detect_yolox(ncnn::Mat& in_pad, std::vector& objects, ncnn::Extractor ex, float scale) +{ + + ex.input("images", in_pad); + + std::vector proposals; + + { + ncnn::Mat out; + ex.extract("output", out); + + static const int stride_arr[] = {8, 16, 32}; // might have stride=64 in YOLOX + std::vector strides(stride_arr, stride_arr + sizeof(stride_arr) / sizeof(stride_arr[0])); + std::vector grid_strides; + generate_grids_and_stride(INPUT_W, INPUT_H, strides, grid_strides); + generate_yolox_proposals(grid_strides, out, YOLOX_CONF_THRESH, proposals); + } + // sort all proposals by score from highest to lowest + qsort_descent_inplace(proposals); + + // apply nms with nms_threshold + std::vector picked; + nms_sorted_bboxes(proposals, picked, YOLOX_NMS_THRESH); + + int count = picked.size(); + + objects.resize(count); + for (int i = 0; i < count; i++) + { + objects[i] = proposals[picked[i]]; + + // adjust offset to original unpadded + float x0 = (objects[i].rect.x) / scale; + float y0 = (objects[i].rect.y) / scale; + float x1 = (objects[i].rect.x + objects[i].rect.width) / scale; + float y1 = (objects[i].rect.y + objects[i].rect.height) / scale; + + // clip + // x0 = std::max(std::min(x0, (float)(img_w - 1)), 0.f); + // y0 = std::max(std::min(y0, (float)(img_h - 1)), 0.f); + // x1 = std::max(std::min(x1, (float)(img_w - 1)), 0.f); + // y1 = std::max(std::min(y1, (float)(img_h - 1)), 0.f); + + objects[i].rect.x = x0; + objects[i].rect.y = y0; + objects[i].rect.width = x1 - x0; + objects[i].rect.height = y1 - y0; + } + + return 0; +} + +int main(int argc, char** argv) +{ + if (argc != 2) + { + fprintf(stderr, "Usage: %s [videopath]\n", argv[0]); + return -1; + } + + ncnn::Net yolox; + + //yolox.opt.use_vulkan_compute = true; + //yolox.opt.use_bf16_storage = true; + yolox.opt.num_threads = 20; + //ncnn::set_cpu_powersave(0); + + //ncnn::set_omp_dynamic(0); + //ncnn::set_omp_num_threads(20); + + // Focus in yolov5 + yolox.register_custom_layer("YoloV5Focus", YoloV5Focus_layer_creator); + + yolox.load_param("bytetrack_s_op.param"); + yolox.load_model("bytetrack_s_op.bin"); + + ncnn::Extractor ex = yolox.create_extractor(); + + const char* videopath = argv[1]; + + VideoCapture cap(videopath); + if (!cap.isOpened()) + return 0; + + int img_w = cap.get(CV_CAP_PROP_FRAME_WIDTH); + int img_h = cap.get(CV_CAP_PROP_FRAME_HEIGHT); + int fps = cap.get(CV_CAP_PROP_FPS); + long nFrame = static_cast(cap.get(CV_CAP_PROP_FRAME_COUNT)); + cout << "Total frames: " << nFrame << endl; + + VideoWriter writer("demo.mp4", CV_FOURCC('m', 'p', '4', 'v'), fps, Size(img_w, img_h)); + + Mat img; + BYTETracker tracker(fps, 30); + int num_frames = 0; + int total_ms = 1; + for (;;) + { + if(!cap.read(img)) + break; + num_frames ++; + if (num_frames % 20 == 0) + { + cout << "Processing frame " << num_frames << " (" << num_frames * 1000000 / total_ms << " fps)" << endl; + } + if (img.empty()) + break; + + float scale = min(INPUT_W / (img.cols*1.0), INPUT_H / (img.rows*1.0)); + Mat pr_img = static_resize(img); + ncnn::Mat in_pad = ncnn::Mat::from_pixels_resize(pr_img.data, ncnn::Mat::PIXEL_BGR2RGB, INPUT_W, INPUT_H, INPUT_W, INPUT_H); + + // python 0-1 input tensor with rgb_means = (0.485, 0.456, 0.406), std = (0.229, 0.224, 0.225) + // so for 0-255 input image, rgb_mean should multiply 255 and norm should div by std. + const float mean_vals[3] = {255.f * 0.485f, 255.f * 0.456, 255.f * 0.406f}; + const float norm_vals[3] = {1 / (255.f * 0.229f), 1 / (255.f * 0.224f), 1 / (255.f * 0.225f)}; + + in_pad.substract_mean_normalize(mean_vals, norm_vals); + + std::vector objects; + auto start = chrono::system_clock::now(); + //detect_yolox(img, objects); + detect_yolox(in_pad, objects, ex, scale); + vector output_stracks = tracker.update(objects); + auto end = chrono::system_clock::now(); + total_ms = total_ms + chrono::duration_cast(end - start).count(); + for (int i = 0; i < output_stracks.size(); i++) + { + vector tlwh = output_stracks[i].tlwh; + bool vertical = tlwh[2] / tlwh[3] > 1.6; + if (tlwh[2] * tlwh[3] > 20 && !vertical) + { + Scalar s = tracker.get_color(output_stracks[i].track_id); + putText(img, format("%d", output_stracks[i].track_id), Point(tlwh[0], tlwh[1] - 5), + 0, 0.6, Scalar(0, 0, 255), 2, LINE_AA); + rectangle(img, Rect(tlwh[0], tlwh[1], tlwh[2], tlwh[3]), s, 2); + } + } + putText(img, format("frame: %d fps: %d num: %d", num_frames, num_frames * 1000000 / total_ms, output_stracks.size()), + Point(0, 30), 0, 0.6, Scalar(0, 0, 255), 2, LINE_AA); + writer.write(img); + char c = waitKey(1); + if (c > 0) + { + break; + } + } + cap.release(); + cout << "FPS: " << num_frames * 1000000 / total_ms << endl; + + return 0; +} diff --git a/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/src/kalmanFilter.cpp b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/src/kalmanFilter.cpp new file mode 100644 index 0000000000000000000000000000000000000000..168432a46810d0c1296f4b17500d41f8b4f308b4 --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/src/kalmanFilter.cpp @@ -0,0 +1,152 @@ +#include "kalmanFilter.h" +#include + +namespace byte_kalman +{ + const double KalmanFilter::chi2inv95[10] = { + 0, + 3.8415, + 5.9915, + 7.8147, + 9.4877, + 11.070, + 12.592, + 14.067, + 15.507, + 16.919 + }; + KalmanFilter::KalmanFilter() + { + int ndim = 4; + double dt = 1.; + + _motion_mat = Eigen::MatrixXf::Identity(8, 8); + for (int i = 0; i < ndim; i++) { + _motion_mat(i, ndim + i) = dt; + } + _update_mat = Eigen::MatrixXf::Identity(4, 8); + + this->_std_weight_position = 1. / 20; + this->_std_weight_velocity = 1. / 160; + } + + KAL_DATA KalmanFilter::initiate(const DETECTBOX &measurement) + { + DETECTBOX mean_pos = measurement; + DETECTBOX mean_vel; + for (int i = 0; i < 4; i++) mean_vel(i) = 0; + + KAL_MEAN mean; + for (int i = 0; i < 8; i++) { + if (i < 4) mean(i) = mean_pos(i); + else mean(i) = mean_vel(i - 4); + } + + KAL_MEAN std; + std(0) = 2 * _std_weight_position * measurement[3]; + std(1) = 2 * _std_weight_position * measurement[3]; + std(2) = 1e-2; + std(3) = 2 * _std_weight_position * measurement[3]; + std(4) = 10 * _std_weight_velocity * measurement[3]; + std(5) = 10 * _std_weight_velocity * measurement[3]; + std(6) = 1e-5; + std(7) = 10 * _std_weight_velocity * measurement[3]; + + KAL_MEAN tmp = std.array().square(); + KAL_COVA var = tmp.asDiagonal(); + return std::make_pair(mean, var); + } + + void KalmanFilter::predict(KAL_MEAN &mean, KAL_COVA &covariance) + { + //revise the data; + DETECTBOX std_pos; + std_pos << _std_weight_position * mean(3), + _std_weight_position * mean(3), + 1e-2, + _std_weight_position * mean(3); + DETECTBOX std_vel; + std_vel << _std_weight_velocity * mean(3), + _std_weight_velocity * mean(3), + 1e-5, + _std_weight_velocity * mean(3); + KAL_MEAN tmp; + tmp.block<1, 4>(0, 0) = std_pos; + tmp.block<1, 4>(0, 4) = std_vel; + tmp = tmp.array().square(); + KAL_COVA motion_cov = tmp.asDiagonal(); + KAL_MEAN mean1 = this->_motion_mat * mean.transpose(); + KAL_COVA covariance1 = this->_motion_mat * covariance *(_motion_mat.transpose()); + covariance1 += motion_cov; + + mean = mean1; + covariance = covariance1; + } + + KAL_HDATA KalmanFilter::project(const KAL_MEAN &mean, const KAL_COVA &covariance) + { + DETECTBOX std; + std << _std_weight_position * mean(3), _std_weight_position * mean(3), + 1e-1, _std_weight_position * mean(3); + KAL_HMEAN mean1 = _update_mat * mean.transpose(); + KAL_HCOVA covariance1 = _update_mat * covariance * (_update_mat.transpose()); + Eigen::Matrix diag = std.asDiagonal(); + diag = diag.array().square().matrix(); + covariance1 += diag; + // covariance1.diagonal() << diag; + return std::make_pair(mean1, covariance1); + } + + KAL_DATA + KalmanFilter::update( + const KAL_MEAN &mean, + const KAL_COVA &covariance, + const DETECTBOX &measurement) + { + KAL_HDATA pa = project(mean, covariance); + KAL_HMEAN projected_mean = pa.first; + KAL_HCOVA projected_cov = pa.second; + + //chol_factor, lower = + //scipy.linalg.cho_factor(projected_cov, lower=True, check_finite=False) + //kalmain_gain = + //scipy.linalg.cho_solve((cho_factor, lower), + //np.dot(covariance, self._upadte_mat.T).T, + //check_finite=False).T + Eigen::Matrix B = (covariance * (_update_mat.transpose())).transpose(); + Eigen::Matrix kalman_gain = (projected_cov.llt().solve(B)).transpose(); // eg.8x4 + Eigen::Matrix innovation = measurement - projected_mean; //eg.1x4 + auto tmp = innovation * (kalman_gain.transpose()); + KAL_MEAN new_mean = (mean.array() + tmp.array()).matrix(); + KAL_COVA new_covariance = covariance - kalman_gain * projected_cov*(kalman_gain.transpose()); + return std::make_pair(new_mean, new_covariance); + } + + Eigen::Matrix + KalmanFilter::gating_distance( + const KAL_MEAN &mean, + const KAL_COVA &covariance, + const std::vector &measurements, + bool only_position) + { + KAL_HDATA pa = this->project(mean, covariance); + if (only_position) { + printf("not implement!"); + exit(0); + } + KAL_HMEAN mean1 = pa.first; + KAL_HCOVA covariance1 = pa.second; + + // Eigen::Matrix d(size, 4); + DETECTBOXSS d(measurements.size(), 4); + int pos = 0; + for (DETECTBOX box : measurements) { + d.row(pos++) = box - mean1; + } + Eigen::Matrix factor = covariance1.llt().matrixL(); + Eigen::Matrix z = factor.triangularView().solve(d).transpose(); + auto zz = ((z.array())*(z.array())).matrix(); + auto square_maha = zz.colwise().sum(); + return square_maha; + } +} \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/src/lapjv.cpp b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/src/lapjv.cpp new file mode 100644 index 0000000000000000000000000000000000000000..169efd51f915adf8c666f3f4978f1cb7b2d3e1b3 --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/src/lapjv.cpp @@ -0,0 +1,343 @@ +#include +#include +#include + +#include "lapjv.h" + +/** Column-reduction and reduction transfer for a dense cost matrix. + */ +int_t _ccrrt_dense(const uint_t n, cost_t *cost[], + int_t *free_rows, int_t *x, int_t *y, cost_t *v) +{ + int_t n_free_rows; + boolean *unique; + + for (uint_t i = 0; i < n; i++) { + x[i] = -1; + v[i] = LARGE; + y[i] = 0; + } + for (uint_t i = 0; i < n; i++) { + for (uint_t j = 0; j < n; j++) { + const cost_t c = cost[i][j]; + if (c < v[j]) { + v[j] = c; + y[j] = i; + } + PRINTF("i=%d, j=%d, c[i,j]=%f, v[j]=%f y[j]=%d\n", i, j, c, v[j], y[j]); + } + } + PRINT_COST_ARRAY(v, n); + PRINT_INDEX_ARRAY(y, n); + NEW(unique, boolean, n); + memset(unique, TRUE, n); + { + int_t j = n; + do { + j--; + const int_t i = y[j]; + if (x[i] < 0) { + x[i] = j; + } + else { + unique[i] = FALSE; + y[j] = -1; + } + } while (j > 0); + } + n_free_rows = 0; + for (uint_t i = 0; i < n; i++) { + if (x[i] < 0) { + free_rows[n_free_rows++] = i; + } + else if (unique[i]) { + const int_t j = x[i]; + cost_t min = LARGE; + for (uint_t j2 = 0; j2 < n; j2++) { + if (j2 == (uint_t)j) { + continue; + } + const cost_t c = cost[i][j2] - v[j2]; + if (c < min) { + min = c; + } + } + PRINTF("v[%d] = %f - %f\n", j, v[j], min); + v[j] -= min; + } + } + FREE(unique); + return n_free_rows; +} + + +/** Augmenting row reduction for a dense cost matrix. + */ +int_t _carr_dense( + const uint_t n, cost_t *cost[], + const uint_t n_free_rows, + int_t *free_rows, int_t *x, int_t *y, cost_t *v) +{ + uint_t current = 0; + int_t new_free_rows = 0; + uint_t rr_cnt = 0; + PRINT_INDEX_ARRAY(x, n); + PRINT_INDEX_ARRAY(y, n); + PRINT_COST_ARRAY(v, n); + PRINT_INDEX_ARRAY(free_rows, n_free_rows); + while (current < n_free_rows) { + int_t i0; + int_t j1, j2; + cost_t v1, v2, v1_new; + boolean v1_lowers; + + rr_cnt++; + PRINTF("current = %d rr_cnt = %d\n", current, rr_cnt); + const int_t free_i = free_rows[current++]; + j1 = 0; + v1 = cost[free_i][0] - v[0]; + j2 = -1; + v2 = LARGE; + for (uint_t j = 1; j < n; j++) { + PRINTF("%d = %f %d = %f\n", j1, v1, j2, v2); + const cost_t c = cost[free_i][j] - v[j]; + if (c < v2) { + if (c >= v1) { + v2 = c; + j2 = j; + } + else { + v2 = v1; + v1 = c; + j2 = j1; + j1 = j; + } + } + } + i0 = y[j1]; + v1_new = v[j1] - (v2 - v1); + v1_lowers = v1_new < v[j1]; + PRINTF("%d %d 1=%d,%f 2=%d,%f v1'=%f(%d,%g) \n", free_i, i0, j1, v1, j2, v2, v1_new, v1_lowers, v[j1] - v1_new); + if (rr_cnt < current * n) { + if (v1_lowers) { + v[j1] = v1_new; + } + else if (i0 >= 0 && j2 >= 0) { + j1 = j2; + i0 = y[j2]; + } + if (i0 >= 0) { + if (v1_lowers) { + free_rows[--current] = i0; + } + else { + free_rows[new_free_rows++] = i0; + } + } + } + else { + PRINTF("rr_cnt=%d >= %d (current=%d * n=%d)\n", rr_cnt, current * n, current, n); + if (i0 >= 0) { + free_rows[new_free_rows++] = i0; + } + } + x[free_i] = j1; + y[j1] = free_i; + } + return new_free_rows; +} + + +/** Find columns with minimum d[j] and put them on the SCAN list. + */ +uint_t _find_dense(const uint_t n, uint_t lo, cost_t *d, int_t *cols, int_t *y) +{ + uint_t hi = lo + 1; + cost_t mind = d[cols[lo]]; + for (uint_t k = hi; k < n; k++) { + int_t j = cols[k]; + if (d[j] <= mind) { + if (d[j] < mind) { + hi = lo; + mind = d[j]; + } + cols[k] = cols[hi]; + cols[hi++] = j; + } + } + return hi; +} + + +// Scan all columns in TODO starting from arbitrary column in SCAN +// and try to decrease d of the TODO columns using the SCAN column. +int_t _scan_dense(const uint_t n, cost_t *cost[], + uint_t *plo, uint_t*phi, + cost_t *d, int_t *cols, int_t *pred, + int_t *y, cost_t *v) +{ + uint_t lo = *plo; + uint_t hi = *phi; + cost_t h, cred_ij; + + while (lo != hi) { + int_t j = cols[lo++]; + const int_t i = y[j]; + const cost_t mind = d[j]; + h = cost[i][j] - v[j] - mind; + PRINTF("i=%d j=%d h=%f\n", i, j, h); + // For all columns in TODO + for (uint_t k = hi; k < n; k++) { + j = cols[k]; + cred_ij = cost[i][j] - v[j] - h; + if (cred_ij < d[j]) { + d[j] = cred_ij; + pred[j] = i; + if (cred_ij == mind) { + if (y[j] < 0) { + return j; + } + cols[k] = cols[hi]; + cols[hi++] = j; + } + } + } + } + *plo = lo; + *phi = hi; + return -1; +} + + +/** Single iteration of modified Dijkstra shortest path algorithm as explained in the JV paper. + * + * This is a dense matrix version. + * + * \return The closest free column index. + */ +int_t find_path_dense( + const uint_t n, cost_t *cost[], + const int_t start_i, + int_t *y, cost_t *v, + int_t *pred) +{ + uint_t lo = 0, hi = 0; + int_t final_j = -1; + uint_t n_ready = 0; + int_t *cols; + cost_t *d; + + NEW(cols, int_t, n); + NEW(d, cost_t, n); + + for (uint_t i = 0; i < n; i++) { + cols[i] = i; + pred[i] = start_i; + d[i] = cost[start_i][i] - v[i]; + } + PRINT_COST_ARRAY(d, n); + while (final_j == -1) { + // No columns left on the SCAN list. + if (lo == hi) { + PRINTF("%d..%d -> find\n", lo, hi); + n_ready = lo; + hi = _find_dense(n, lo, d, cols, y); + PRINTF("check %d..%d\n", lo, hi); + PRINT_INDEX_ARRAY(cols, n); + for (uint_t k = lo; k < hi; k++) { + const int_t j = cols[k]; + if (y[j] < 0) { + final_j = j; + } + } + } + if (final_j == -1) { + PRINTF("%d..%d -> scan\n", lo, hi); + final_j = _scan_dense( + n, cost, &lo, &hi, d, cols, pred, y, v); + PRINT_COST_ARRAY(d, n); + PRINT_INDEX_ARRAY(cols, n); + PRINT_INDEX_ARRAY(pred, n); + } + } + + PRINTF("found final_j=%d\n", final_j); + PRINT_INDEX_ARRAY(cols, n); + { + const cost_t mind = d[cols[lo]]; + for (uint_t k = 0; k < n_ready; k++) { + const int_t j = cols[k]; + v[j] += d[j] - mind; + } + } + + FREE(cols); + FREE(d); + + return final_j; +} + + +/** Augment for a dense cost matrix. + */ +int_t _ca_dense( + const uint_t n, cost_t *cost[], + const uint_t n_free_rows, + int_t *free_rows, int_t *x, int_t *y, cost_t *v) +{ + int_t *pred; + + NEW(pred, int_t, n); + + for (int_t *pfree_i = free_rows; pfree_i < free_rows + n_free_rows; pfree_i++) { + int_t i = -1, j; + uint_t k = 0; + + PRINTF("looking at free_i=%d\n", *pfree_i); + j = find_path_dense(n, cost, *pfree_i, y, v, pred); + ASSERT(j >= 0); + ASSERT(j < n); + while (i != *pfree_i) { + PRINTF("augment %d\n", j); + PRINT_INDEX_ARRAY(pred, n); + i = pred[j]; + PRINTF("y[%d]=%d -> %d\n", j, y[j], i); + y[j] = i; + PRINT_INDEX_ARRAY(x, n); + SWAP_INDICES(j, x[i]); + k++; + if (k >= n) { + ASSERT(FALSE); + } + } + } + FREE(pred); + return 0; +} + + +/** Solve dense sparse LAP. + */ +int lapjv_internal( + const uint_t n, cost_t *cost[], + int_t *x, int_t *y) +{ + int ret; + int_t *free_rows; + cost_t *v; + + NEW(free_rows, int_t, n); + NEW(v, cost_t, n); + ret = _ccrrt_dense(n, cost, free_rows, x, y, v); + int i = 0; + while (ret > 0 && i < 2) { + ret = _carr_dense(n, cost, ret, free_rows, x, y, v); + i++; + } + if (ret > 0) { + ret = _ca_dense(n, cost, ret, free_rows, x, y, v); + } + FREE(v); + FREE(free_rows); + return ret; +} \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/src/utils.cpp b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/src/utils.cpp new file mode 100644 index 0000000000000000000000000000000000000000..4aa0305cd6cf025496528ef9ff49075209fe9e8c --- /dev/null +++ b/tracking/docker-build-context/byte_track/deploy/ncnn/cpp/src/utils.cpp @@ -0,0 +1,429 @@ +#include "BYTETracker.h" +#include "lapjv.h" + +vector BYTETracker::joint_stracks(vector &tlista, vector &tlistb) +{ + map exists; + vector res; + for (int i = 0; i < tlista.size(); i++) + { + exists.insert(pair(tlista[i]->track_id, 1)); + res.push_back(tlista[i]); + } + for (int i = 0; i < tlistb.size(); i++) + { + int tid = tlistb[i].track_id; + if (!exists[tid] || exists.count(tid) == 0) + { + exists[tid] = 1; + res.push_back(&tlistb[i]); + } + } + return res; +} + +vector BYTETracker::joint_stracks(vector &tlista, vector &tlistb) +{ + map exists; + vector res; + for (int i = 0; i < tlista.size(); i++) + { + exists.insert(pair(tlista[i].track_id, 1)); + res.push_back(tlista[i]); + } + for (int i = 0; i < tlistb.size(); i++) + { + int tid = tlistb[i].track_id; + if (!exists[tid] || exists.count(tid) == 0) + { + exists[tid] = 1; + res.push_back(tlistb[i]); + } + } + return res; +} + +vector BYTETracker::sub_stracks(vector &tlista, vector &tlistb) +{ + map stracks; + for (int i = 0; i < tlista.size(); i++) + { + stracks.insert(pair(tlista[i].track_id, tlista[i])); + } + for (int i = 0; i < tlistb.size(); i++) + { + int tid = tlistb[i].track_id; + if (stracks.count(tid) != 0) + { + stracks.erase(tid); + } + } + + vector res; + std::map::iterator it; + for (it = stracks.begin(); it != stracks.end(); ++it) + { + res.push_back(it->second); + } + + return res; +} + +void BYTETracker::remove_duplicate_stracks(vector &resa, vector &resb, vector &stracksa, vector &stracksb) +{ + vector > pdist = iou_distance(stracksa, stracksb); + vector > pairs; + for (int i = 0; i < pdist.size(); i++) + { + for (int j = 0; j < pdist[i].size(); j++) + { + if (pdist[i][j] < 0.15) + { + pairs.push_back(pair(i, j)); + } + } + } + + vector dupa, dupb; + for (int i = 0; i < pairs.size(); i++) + { + int timep = stracksa[pairs[i].first].frame_id - stracksa[pairs[i].first].start_frame; + int timeq = stracksb[pairs[i].second].frame_id - stracksb[pairs[i].second].start_frame; + if (timep > timeq) + dupb.push_back(pairs[i].second); + else + dupa.push_back(pairs[i].first); + } + + for (int i = 0; i < stracksa.size(); i++) + { + vector::iterator iter = find(dupa.begin(), dupa.end(), i); + if (iter == dupa.end()) + { + resa.push_back(stracksa[i]); + } + } + + for (int i = 0; i < stracksb.size(); i++) + { + vector::iterator iter = find(dupb.begin(), dupb.end(), i); + if (iter == dupb.end()) + { + resb.push_back(stracksb[i]); + } + } +} + +void BYTETracker::linear_assignment(vector > &cost_matrix, int cost_matrix_size, int cost_matrix_size_size, float thresh, + vector > &matches, vector &unmatched_a, vector &unmatched_b) +{ + if (cost_matrix.size() == 0) + { + for (int i = 0; i < cost_matrix_size; i++) + { + unmatched_a.push_back(i); + } + for (int i = 0; i < cost_matrix_size_size; i++) + { + unmatched_b.push_back(i); + } + return; + } + + vector rowsol; vector colsol; + float c = lapjv(cost_matrix, rowsol, colsol, true, thresh); + for (int i = 0; i < rowsol.size(); i++) + { + if (rowsol[i] >= 0) + { + vector match; + match.push_back(i); + match.push_back(rowsol[i]); + matches.push_back(match); + } + else + { + unmatched_a.push_back(i); + } + } + + for (int i = 0; i < colsol.size(); i++) + { + if (colsol[i] < 0) + { + unmatched_b.push_back(i); + } + } +} + +vector > BYTETracker::ious(vector > &atlbrs, vector > &btlbrs) +{ + vector > ious; + if (atlbrs.size()*btlbrs.size() == 0) + return ious; + + ious.resize(atlbrs.size()); + for (int i = 0; i < ious.size(); i++) + { + ious[i].resize(btlbrs.size()); + } + + //bbox_ious + for (int k = 0; k < btlbrs.size(); k++) + { + vector ious_tmp; + float box_area = (btlbrs[k][2] - btlbrs[k][0] + 1)*(btlbrs[k][3] - btlbrs[k][1] + 1); + for (int n = 0; n < atlbrs.size(); n++) + { + float iw = min(atlbrs[n][2], btlbrs[k][2]) - max(atlbrs[n][0], btlbrs[k][0]) + 1; + if (iw > 0) + { + float ih = min(atlbrs[n][3], btlbrs[k][3]) - max(atlbrs[n][1], btlbrs[k][1]) + 1; + if(ih > 0) + { + float ua = (atlbrs[n][2] - atlbrs[n][0] + 1)*(atlbrs[n][3] - atlbrs[n][1] + 1) + box_area - iw * ih; + ious[n][k] = iw * ih / ua; + } + else + { + ious[n][k] = 0.0; + } + } + else + { + ious[n][k] = 0.0; + } + } + } + + return ious; +} + +vector > BYTETracker::iou_distance(vector &atracks, vector &btracks, int &dist_size, int &dist_size_size) +{ + vector > cost_matrix; + if (atracks.size() * btracks.size() == 0) + { + dist_size = atracks.size(); + dist_size_size = btracks.size(); + return cost_matrix; + } + vector > atlbrs, btlbrs; + for (int i = 0; i < atracks.size(); i++) + { + atlbrs.push_back(atracks[i]->tlbr); + } + for (int i = 0; i < btracks.size(); i++) + { + btlbrs.push_back(btracks[i].tlbr); + } + + dist_size = atracks.size(); + dist_size_size = btracks.size(); + + vector > _ious = ious(atlbrs, btlbrs); + + for (int i = 0; i < _ious.size();i++) + { + vector _iou; + for (int j = 0; j < _ious[i].size(); j++) + { + _iou.push_back(1 - _ious[i][j]); + } + cost_matrix.push_back(_iou); + } + + return cost_matrix; +} + +vector > BYTETracker::iou_distance(vector &atracks, vector &btracks) +{ + vector > atlbrs, btlbrs; + for (int i = 0; i < atracks.size(); i++) + { + atlbrs.push_back(atracks[i].tlbr); + } + for (int i = 0; i < btracks.size(); i++) + { + btlbrs.push_back(btracks[i].tlbr); + } + + vector > _ious = ious(atlbrs, btlbrs); + vector > cost_matrix; + for (int i = 0; i < _ious.size(); i++) + { + vector _iou; + for (int j = 0; j < _ious[i].size(); j++) + { + _iou.push_back(1 - _ious[i][j]); + } + cost_matrix.push_back(_iou); + } + + return cost_matrix; +} + +double BYTETracker::lapjv(const vector > &cost, vector &rowsol, vector &colsol, + bool extend_cost, float cost_limit, bool return_cost) +{ + vector > cost_c; + cost_c.assign(cost.begin(), cost.end()); + + vector > cost_c_extended; + + int n_rows = cost.size(); + int n_cols = cost[0].size(); + rowsol.resize(n_rows); + colsol.resize(n_cols); + + int n = 0; + if (n_rows == n_cols) + { + n = n_rows; + } + else + { + if (!extend_cost) + { + cout << "set extend_cost=True" << endl; + system("pause"); + exit(0); + } + } + + if (extend_cost || cost_limit < LONG_MAX) + { + n = n_rows + n_cols; + cost_c_extended.resize(n); + for (int i = 0; i < cost_c_extended.size(); i++) + cost_c_extended[i].resize(n); + + if (cost_limit < LONG_MAX) + { + for (int i = 0; i < cost_c_extended.size(); i++) + { + for (int j = 0; j < cost_c_extended[i].size(); j++) + { + cost_c_extended[i][j] = cost_limit / 2.0; + } + } + } + else + { + float cost_max = -1; + for (int i = 0; i < cost_c.size(); i++) + { + for (int j = 0; j < cost_c[i].size(); j++) + { + if (cost_c[i][j] > cost_max) + cost_max = cost_c[i][j]; + } + } + for (int i = 0; i < cost_c_extended.size(); i++) + { + for (int j = 0; j < cost_c_extended[i].size(); j++) + { + cost_c_extended[i][j] = cost_max + 1; + } + } + } + + for (int i = n_rows; i < cost_c_extended.size(); i++) + { + for (int j = n_cols; j < cost_c_extended[i].size(); j++) + { + cost_c_extended[i][j] = 0; + } + } + for (int i = 0; i < n_rows; i++) + { + for (int j = 0; j < n_cols; j++) + { + cost_c_extended[i][j] = cost_c[i][j]; + } + } + + cost_c.clear(); + cost_c.assign(cost_c_extended.begin(), cost_c_extended.end()); + } + + double **cost_ptr; + cost_ptr = new double *[sizeof(double *) * n]; + for (int i = 0; i < n; i++) + cost_ptr[i] = new double[sizeof(double) * n]; + + for (int i = 0; i < n; i++) + { + for (int j = 0; j < n; j++) + { + cost_ptr[i][j] = cost_c[i][j]; + } + } + + int* x_c = new int[sizeof(int) * n]; + int *y_c = new int[sizeof(int) * n]; + + int ret = lapjv_internal(n, cost_ptr, x_c, y_c); + if (ret != 0) + { + cout << "Calculate Wrong!" << endl; + system("pause"); + exit(0); + } + + double opt = 0.0; + + if (n != n_rows) + { + for (int i = 0; i < n; i++) + { + if (x_c[i] >= n_cols) + x_c[i] = -1; + if (y_c[i] >= n_rows) + y_c[i] = -1; + } + for (int i = 0; i < n_rows; i++) + { + rowsol[i] = x_c[i]; + } + for (int i = 0; i < n_cols; i++) + { + colsol[i] = y_c[i]; + } + + if (return_cost) + { + for (int i = 0; i < rowsol.size(); i++) + { + if (rowsol[i] != -1) + { + //cout << i << "\t" << rowsol[i] << "\t" << cost_ptr[i][rowsol[i]] << endl; + opt += cost_ptr[i][rowsol[i]]; + } + } + } + } + else if (return_cost) + { + for (int i = 0; i < rowsol.size(); i++) + { + opt += cost_ptr[i][rowsol[i]]; + } + } + + for (int i = 0; i < n; i++) + { + delete[]cost_ptr[i]; + } + delete[]cost_ptr; + delete[]x_c; + delete[]y_c; + + return opt; +} + +Scalar BYTETracker::get_color(int idx) +{ + idx += 3; + return Scalar(37 * idx % 255, 17 * idx % 255, 29 * idx % 255); +} \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/exps/default/nano.py b/tracking/docker-build-context/byte_track/exps/default/nano.py new file mode 100644 index 0000000000000000000000000000000000000000..a622830e2ebb92e10cf56efdfdaf4243c877d6de --- /dev/null +++ b/tracking/docker-build-context/byte_track/exps/default/nano.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) Megvii, Inc. and its affiliates. + +import os +import torch.nn as nn + +from yolox.exp import Exp as MyExp + + +class Exp(MyExp): + def __init__(self): + super(Exp, self).__init__() + self.depth = 0.33 + self.width = 0.25 + self.scale = (0.5, 1.5) + self.random_size = (10, 20) + self.test_size = (416, 416) + self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0] + self.enable_mixup = False + + def get_model(self, sublinear=False): + + def init_yolo(M): + for m in M.modules(): + if isinstance(m, nn.BatchNorm2d): + m.eps = 1e-3 + m.momentum = 0.03 + if "model" not in self.__dict__: + from yolox.models import YOLOX, YOLOPAFPN, YOLOXHead + in_channels = [256, 512, 1024] + # NANO model use depthwise = True, which is main difference. + backbone = YOLOPAFPN(self.depth, self.width, in_channels=in_channels, depthwise=True) + head = YOLOXHead(self.num_classes, self.width, in_channels=in_channels, depthwise=True) + self.model = YOLOX(backbone, head) + + self.model.apply(init_yolo) + self.model.head.initialize_biases(1e-2) + return self.model diff --git a/tracking/docker-build-context/byte_track/exps/default/yolov3.py b/tracking/docker-build-context/byte_track/exps/default/yolov3.py new file mode 100644 index 0000000000000000000000000000000000000000..85d59963d7ccb5868bf010facc9eedc1821f85ea --- /dev/null +++ b/tracking/docker-build-context/byte_track/exps/default/yolov3.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) Megvii, Inc. and its affiliates. + +import os +import torch +import torch.nn as nn + +from yolox.exp import Exp as MyExp + + +class Exp(MyExp): + def __init__(self): + super(Exp, self).__init__() + self.depth = 1.0 + self.width = 1.0 + self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0] + + def get_model(self, sublinear=False): + def init_yolo(M): + for m in M.modules(): + if isinstance(m, nn.BatchNorm2d): + m.eps = 1e-3 + m.momentum = 0.03 + if "model" not in self.__dict__: + from yolox.models import YOLOX, YOLOFPN, YOLOXHead + backbone = YOLOFPN() + head = YOLOXHead(self.num_classes, self.width, in_channels=[128, 256, 512], act="lrelu") + self.model = YOLOX(backbone, head) + self.model.apply(init_yolo) + self.model.head.initialize_biases(1e-2) + + return self.model + + def get_data_loader(self, batch_size, is_distributed, no_aug=False): + from data.datasets.cocodataset import COCODataset + from data.datasets.mosaicdetection import MosaicDetection + from data.datasets.data_augment import TrainTransform + from data.datasets.dataloading import YoloBatchSampler, DataLoader, InfiniteSampler + import torch.distributed as dist + + dataset = COCODataset( + data_dir='data/COCO/', + json_file=self.train_ann, + img_size=self.input_size, + preproc=TrainTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + max_labels=50 + ), + ) + + dataset = MosaicDetection( + dataset, + mosaic=not no_aug, + img_size=self.input_size, + preproc=TrainTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + max_labels=120 + ), + degrees=self.degrees, + translate=self.translate, + scale=self.scale, + shear=self.shear, + perspective=self.perspective, + ) + + self.dataset = dataset + + if is_distributed: + batch_size = batch_size // dist.get_world_size() + sampler = InfiniteSampler(len(self.dataset), seed=self.seed if self.seed else 0) + else: + sampler = torch.utils.data.RandomSampler(self.dataset) + + batch_sampler = YoloBatchSampler( + sampler=sampler, + batch_size=batch_size, + drop_last=False, + input_dimension=self.input_size, + mosaic=not no_aug + ) + + dataloader_kwargs = {"num_workers": self.data_num_workers, "pin_memory": True} + dataloader_kwargs["batch_sampler"] = batch_sampler + train_loader = DataLoader(self.dataset, **dataloader_kwargs) + + return train_loader diff --git a/tracking/docker-build-context/byte_track/exps/default/yolox_l.py b/tracking/docker-build-context/byte_track/exps/default/yolox_l.py new file mode 100644 index 0000000000000000000000000000000000000000..50833ca38c51fe9ac5e327d7c1c0561fb62249aa --- /dev/null +++ b/tracking/docker-build-context/byte_track/exps/default/yolox_l.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) Megvii, Inc. and its affiliates. + +import os + +from yolox.exp import Exp as MyExp + + +class Exp(MyExp): + def __init__(self): + super(Exp, self).__init__() + self.depth = 1.0 + self.width = 1.0 + self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0] diff --git a/tracking/docker-build-context/byte_track/exps/default/yolox_m.py b/tracking/docker-build-context/byte_track/exps/default/yolox_m.py new file mode 100644 index 0000000000000000000000000000000000000000..9666a31177b9cc1c94978f9867aaceac8ddebce2 --- /dev/null +++ b/tracking/docker-build-context/byte_track/exps/default/yolox_m.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) Megvii, Inc. and its affiliates. + +import os + +from yolox.exp import Exp as MyExp + + +class Exp(MyExp): + def __init__(self): + super(Exp, self).__init__() + self.depth = 0.67 + self.width = 0.75 + self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0] diff --git a/tracking/docker-build-context/byte_track/exps/default/yolox_s.py b/tracking/docker-build-context/byte_track/exps/default/yolox_s.py new file mode 100644 index 0000000000000000000000000000000000000000..abb6a8bbbe4fd1c6aff71596621aaeec2a6a15d8 --- /dev/null +++ b/tracking/docker-build-context/byte_track/exps/default/yolox_s.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) Megvii, Inc. and its affiliates. + +import os + +from yolox.exp import Exp as MyExp + + +class Exp(MyExp): + def __init__(self): + super(Exp, self).__init__() + self.depth = 0.33 + self.width = 0.50 + self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0] diff --git a/tracking/docker-build-context/byte_track/exps/default/yolox_tiny.py b/tracking/docker-build-context/byte_track/exps/default/yolox_tiny.py new file mode 100644 index 0000000000000000000000000000000000000000..9ea66048cbf68c3b39712dd84f92b800adea413b --- /dev/null +++ b/tracking/docker-build-context/byte_track/exps/default/yolox_tiny.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) Megvii, Inc. and its affiliates. + +import os + +from yolox.exp import Exp as MyExp + + +class Exp(MyExp): + def __init__(self): + super(Exp, self).__init__() + self.depth = 0.33 + self.width = 0.375 + self.scale = (0.5, 1.5) + self.random_size = (10, 20) + self.test_size = (416, 416) + self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0] + self.enable_mixup = False diff --git a/tracking/docker-build-context/byte_track/exps/default/yolox_x.py b/tracking/docker-build-context/byte_track/exps/default/yolox_x.py new file mode 100644 index 0000000000000000000000000000000000000000..ac498a1fb91f597e9362c2b73a9a002cf31445fc --- /dev/null +++ b/tracking/docker-build-context/byte_track/exps/default/yolox_x.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) Megvii, Inc. and its affiliates. + +import os + +from yolox.exp import Exp as MyExp + + +class Exp(MyExp): + def __init__(self): + super(Exp, self).__init__() + self.depth = 1.33 + self.width = 1.25 + self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0] diff --git a/tracking/docker-build-context/byte_track/exps/example/mot/yolox_l_mix_det.py b/tracking/docker-build-context/byte_track/exps/example/mot/yolox_l_mix_det.py new file mode 100644 index 0000000000000000000000000000000000000000..e5f5944102b772b7ae4ce4a880ba056fd3118ce9 --- /dev/null +++ b/tracking/docker-build-context/byte_track/exps/example/mot/yolox_l_mix_det.py @@ -0,0 +1,138 @@ +# encoding: utf-8 +import os +import random +import torch +import torch.nn as nn +import torch.distributed as dist + +from yolox.exp import Exp as MyExp +from yolox.data import get_yolox_datadir + +class Exp(MyExp): + def __init__(self): + super(Exp, self).__init__() + self.num_classes = 1 + self.depth = 1.0 + self.width = 1.0 + self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0] + self.train_ann = "train.json" + self.val_ann = "train.json" + self.input_size = (800, 1440) + self.test_size = (800, 1440) + self.random_size = (18, 32) + self.max_epoch = 80 + self.print_interval = 20 + self.eval_interval = 5 + self.test_conf = 0.001 + self.nmsthre = 0.7 + self.no_aug_epochs = 10 + self.basic_lr_per_img = 0.001 / 64.0 + self.warmup_epochs = 1 + + def get_data_loader(self, batch_size, is_distributed, no_aug=False): + from yolox.data import ( + MOTDataset, + TrainTransform, + YoloBatchSampler, + DataLoader, + InfiniteSampler, + MosaicDetection, + ) + + dataset = MOTDataset( + data_dir=os.path.join(get_yolox_datadir(), "mix_det"), + json_file=self.train_ann, + name='', + img_size=self.input_size, + preproc=TrainTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + max_labels=500, + ), + ) + + dataset = MosaicDetection( + dataset, + mosaic=not no_aug, + img_size=self.input_size, + preproc=TrainTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + max_labels=1000, + ), + degrees=self.degrees, + translate=self.translate, + scale=self.scale, + shear=self.shear, + perspective=self.perspective, + enable_mixup=self.enable_mixup, + ) + + self.dataset = dataset + + if is_distributed: + batch_size = batch_size // dist.get_world_size() + + sampler = InfiniteSampler( + len(self.dataset), seed=self.seed if self.seed else 0 + ) + + batch_sampler = YoloBatchSampler( + sampler=sampler, + batch_size=batch_size, + drop_last=False, + input_dimension=self.input_size, + mosaic=not no_aug, + ) + + dataloader_kwargs = {"num_workers": self.data_num_workers, "pin_memory": True} + dataloader_kwargs["batch_sampler"] = batch_sampler + train_loader = DataLoader(self.dataset, **dataloader_kwargs) + + return train_loader + + def get_eval_loader(self, batch_size, is_distributed, testdev=False): + from yolox.data import MOTDataset, ValTransform + + valdataset = MOTDataset( + data_dir=os.path.join(get_yolox_datadir(), "mot"), + json_file=self.val_ann, + img_size=self.test_size, + name='train', + preproc=ValTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + ), + ) + + if is_distributed: + batch_size = batch_size // dist.get_world_size() + sampler = torch.utils.data.distributed.DistributedSampler( + valdataset, shuffle=False + ) + else: + sampler = torch.utils.data.SequentialSampler(valdataset) + + dataloader_kwargs = { + "num_workers": self.data_num_workers, + "pin_memory": True, + "sampler": sampler, + } + dataloader_kwargs["batch_size"] = batch_size + val_loader = torch.utils.data.DataLoader(valdataset, **dataloader_kwargs) + + return val_loader + + def get_evaluator(self, batch_size, is_distributed, testdev=False): + from yolox.evaluators import COCOEvaluator + + val_loader = self.get_eval_loader(batch_size, is_distributed, testdev=testdev) + evaluator = COCOEvaluator( + dataloader=val_loader, + img_size=self.test_size, + confthre=self.test_conf, + nmsthre=self.nmsthre, + num_classes=self.num_classes, + testdev=testdev, + ) + return evaluator diff --git a/tracking/docker-build-context/byte_track/exps/example/mot/yolox_m_mix_det.py b/tracking/docker-build-context/byte_track/exps/example/mot/yolox_m_mix_det.py new file mode 100644 index 0000000000000000000000000000000000000000..fccb14597eeacdab5d393ae58a2c31bf17d2f2b8 --- /dev/null +++ b/tracking/docker-build-context/byte_track/exps/example/mot/yolox_m_mix_det.py @@ -0,0 +1,138 @@ +# encoding: utf-8 +import os +import random +import torch +import torch.nn as nn +import torch.distributed as dist + +from yolox.exp import Exp as MyExp +from yolox.data import get_yolox_datadir + +class Exp(MyExp): + def __init__(self): + super(Exp, self).__init__() + self.num_classes = 1 + self.depth = 0.67 + self.width = 0.75 + self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0] + self.train_ann = "train.json" + self.val_ann = "train.json" + self.input_size = (800, 1440) + self.test_size = (800, 1440) + self.random_size = (18, 32) + self.max_epoch = 80 + self.print_interval = 20 + self.eval_interval = 5 + self.test_conf = 0.001 + self.nmsthre = 0.7 + self.no_aug_epochs = 10 + self.basic_lr_per_img = 0.001 / 64.0 + self.warmup_epochs = 1 + + def get_data_loader(self, batch_size, is_distributed, no_aug=False): + from yolox.data import ( + MOTDataset, + TrainTransform, + YoloBatchSampler, + DataLoader, + InfiniteSampler, + MosaicDetection, + ) + + dataset = MOTDataset( + data_dir=os.path.join(get_yolox_datadir(), "mix_det"), + json_file=self.train_ann, + name='', + img_size=self.input_size, + preproc=TrainTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + max_labels=500, + ), + ) + + dataset = MosaicDetection( + dataset, + mosaic=not no_aug, + img_size=self.input_size, + preproc=TrainTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + max_labels=1000, + ), + degrees=self.degrees, + translate=self.translate, + scale=self.scale, + shear=self.shear, + perspective=self.perspective, + enable_mixup=self.enable_mixup, + ) + + self.dataset = dataset + + if is_distributed: + batch_size = batch_size // dist.get_world_size() + + sampler = InfiniteSampler( + len(self.dataset), seed=self.seed if self.seed else 0 + ) + + batch_sampler = YoloBatchSampler( + sampler=sampler, + batch_size=batch_size, + drop_last=False, + input_dimension=self.input_size, + mosaic=not no_aug, + ) + + dataloader_kwargs = {"num_workers": self.data_num_workers, "pin_memory": True} + dataloader_kwargs["batch_sampler"] = batch_sampler + train_loader = DataLoader(self.dataset, **dataloader_kwargs) + + return train_loader + + def get_eval_loader(self, batch_size, is_distributed, testdev=False): + from yolox.data import MOTDataset, ValTransform + + valdataset = MOTDataset( + data_dir=os.path.join(get_yolox_datadir(), "mot"), + json_file=self.val_ann, + img_size=self.test_size, + name='train', + preproc=ValTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + ), + ) + + if is_distributed: + batch_size = batch_size // dist.get_world_size() + sampler = torch.utils.data.distributed.DistributedSampler( + valdataset, shuffle=False + ) + else: + sampler = torch.utils.data.SequentialSampler(valdataset) + + dataloader_kwargs = { + "num_workers": self.data_num_workers, + "pin_memory": True, + "sampler": sampler, + } + dataloader_kwargs["batch_size"] = batch_size + val_loader = torch.utils.data.DataLoader(valdataset, **dataloader_kwargs) + + return val_loader + + def get_evaluator(self, batch_size, is_distributed, testdev=False): + from yolox.evaluators import COCOEvaluator + + val_loader = self.get_eval_loader(batch_size, is_distributed, testdev=testdev) + evaluator = COCOEvaluator( + dataloader=val_loader, + img_size=self.test_size, + confthre=self.test_conf, + nmsthre=self.nmsthre, + num_classes=self.num_classes, + testdev=testdev, + ) + return evaluator diff --git a/tracking/docker-build-context/byte_track/exps/example/mot/yolox_s_mix_det.py b/tracking/docker-build-context/byte_track/exps/example/mot/yolox_s_mix_det.py new file mode 100644 index 0000000000000000000000000000000000000000..95f1810872b9cefd4a4d5c21c45df7b9747a24aa --- /dev/null +++ b/tracking/docker-build-context/byte_track/exps/example/mot/yolox_s_mix_det.py @@ -0,0 +1,138 @@ +# encoding: utf-8 +import os +import random +import torch +import torch.nn as nn +import torch.distributed as dist + +from yolox.exp import Exp as MyExp +from yolox.data import get_yolox_datadir + +class Exp(MyExp): + def __init__(self): + super(Exp, self).__init__() + self.num_classes = 1 + self.depth = 0.33 + self.width = 0.50 + self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0] + self.train_ann = "train.json" + self.val_ann = "train.json" + self.input_size = (608, 1088) + self.test_size = (608, 1088) + self.random_size = (12, 26) + self.max_epoch = 80 + self.print_interval = 20 + self.eval_interval = 5 + self.test_conf = 0.001 + self.nmsthre = 0.7 + self.no_aug_epochs = 10 + self.basic_lr_per_img = 0.001 / 64.0 + self.warmup_epochs = 1 + + def get_data_loader(self, batch_size, is_distributed, no_aug=False): + from yolox.data import ( + MOTDataset, + TrainTransform, + YoloBatchSampler, + DataLoader, + InfiniteSampler, + MosaicDetection, + ) + + dataset = MOTDataset( + data_dir=os.path.join(get_yolox_datadir(), "mix_det"), + json_file=self.train_ann, + name='', + img_size=self.input_size, + preproc=TrainTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + max_labels=500, + ), + ) + + dataset = MosaicDetection( + dataset, + mosaic=not no_aug, + img_size=self.input_size, + preproc=TrainTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + max_labels=1000, + ), + degrees=self.degrees, + translate=self.translate, + scale=self.scale, + shear=self.shear, + perspective=self.perspective, + enable_mixup=self.enable_mixup, + ) + + self.dataset = dataset + + if is_distributed: + batch_size = batch_size // dist.get_world_size() + + sampler = InfiniteSampler( + len(self.dataset), seed=self.seed if self.seed else 0 + ) + + batch_sampler = YoloBatchSampler( + sampler=sampler, + batch_size=batch_size, + drop_last=False, + input_dimension=self.input_size, + mosaic=not no_aug, + ) + + dataloader_kwargs = {"num_workers": self.data_num_workers, "pin_memory": True} + dataloader_kwargs["batch_sampler"] = batch_sampler + train_loader = DataLoader(self.dataset, **dataloader_kwargs) + + return train_loader + + def get_eval_loader(self, batch_size, is_distributed, testdev=False): + from yolox.data import MOTDataset, ValTransform + + valdataset = MOTDataset( + data_dir=os.path.join(get_yolox_datadir(), "mot"), + json_file=self.val_ann, + img_size=self.test_size, + name='train', + preproc=ValTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + ), + ) + + if is_distributed: + batch_size = batch_size // dist.get_world_size() + sampler = torch.utils.data.distributed.DistributedSampler( + valdataset, shuffle=False + ) + else: + sampler = torch.utils.data.SequentialSampler(valdataset) + + dataloader_kwargs = { + "num_workers": self.data_num_workers, + "pin_memory": True, + "sampler": sampler, + } + dataloader_kwargs["batch_size"] = batch_size + val_loader = torch.utils.data.DataLoader(valdataset, **dataloader_kwargs) + + return val_loader + + def get_evaluator(self, batch_size, is_distributed, testdev=False): + from yolox.evaluators import COCOEvaluator + + val_loader = self.get_eval_loader(batch_size, is_distributed, testdev=testdev) + evaluator = COCOEvaluator( + dataloader=val_loader, + img_size=self.test_size, + confthre=self.test_conf, + nmsthre=self.nmsthre, + num_classes=self.num_classes, + testdev=testdev, + ) + return evaluator diff --git a/tracking/docker-build-context/byte_track/exps/example/mot/yolox_x_ablation.py b/tracking/docker-build-context/byte_track/exps/example/mot/yolox_x_ablation.py new file mode 100644 index 0000000000000000000000000000000000000000..6afb771555419b1166adfdce8489303ae912c9fc --- /dev/null +++ b/tracking/docker-build-context/byte_track/exps/example/mot/yolox_x_ablation.py @@ -0,0 +1,138 @@ +# encoding: utf-8 +import os +import random +import torch +import torch.nn as nn +import torch.distributed as dist + +from yolox.exp import Exp as MyExp +from yolox.data import get_yolox_datadir + +class Exp(MyExp): + def __init__(self): + super(Exp, self).__init__() + self.num_classes = 1 + self.depth = 1.33 + self.width = 1.25 + self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0] + self.train_ann = "train.json" + self.val_ann = "val_half.json" + self.input_size = (800, 1440) + self.test_size = (800, 1440) + self.random_size = (18, 32) + self.max_epoch = 80 + self.print_interval = 20 + self.eval_interval = 5 + self.test_conf = 0.1 + self.nmsthre = 0.7 + self.no_aug_epochs = 10 + self.basic_lr_per_img = 0.001 / 64.0 + self.warmup_epochs = 1 + + def get_data_loader(self, batch_size, is_distributed, no_aug=False): + from yolox.data import ( + MOTDataset, + TrainTransform, + YoloBatchSampler, + DataLoader, + InfiniteSampler, + MosaicDetection, + ) + + dataset = MOTDataset( + data_dir=os.path.join(get_yolox_datadir(), "mix_mot_ch"), + json_file=self.train_ann, + name='', + img_size=self.input_size, + preproc=TrainTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + max_labels=500, + ), + ) + + dataset = MosaicDetection( + dataset, + mosaic=not no_aug, + img_size=self.input_size, + preproc=TrainTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + max_labels=1000, + ), + degrees=self.degrees, + translate=self.translate, + scale=self.scale, + shear=self.shear, + perspective=self.perspective, + enable_mixup=self.enable_mixup, + ) + + self.dataset = dataset + + if is_distributed: + batch_size = batch_size // dist.get_world_size() + + sampler = InfiniteSampler( + len(self.dataset), seed=self.seed if self.seed else 0 + ) + + batch_sampler = YoloBatchSampler( + sampler=sampler, + batch_size=batch_size, + drop_last=False, + input_dimension=self.input_size, + mosaic=not no_aug, + ) + + dataloader_kwargs = {"num_workers": self.data_num_workers, "pin_memory": True} + dataloader_kwargs["batch_sampler"] = batch_sampler + train_loader = DataLoader(self.dataset, **dataloader_kwargs) + + return train_loader + + def get_eval_loader(self, batch_size, is_distributed, testdev=False): + from yolox.data import MOTDataset, ValTransform + + valdataset = MOTDataset( + data_dir=os.path.join(get_yolox_datadir(), "mot"), + json_file=self.val_ann, + img_size=self.test_size, + name='train', + preproc=ValTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + ), + ) + + if is_distributed: + batch_size = batch_size // dist.get_world_size() + sampler = torch.utils.data.distributed.DistributedSampler( + valdataset, shuffle=False + ) + else: + sampler = torch.utils.data.SequentialSampler(valdataset) + + dataloader_kwargs = { + "num_workers": self.data_num_workers, + "pin_memory": True, + "sampler": sampler, + } + dataloader_kwargs["batch_size"] = batch_size + val_loader = torch.utils.data.DataLoader(valdataset, **dataloader_kwargs) + + return val_loader + + def get_evaluator(self, batch_size, is_distributed, testdev=False): + from yolox.evaluators import COCOEvaluator + + val_loader = self.get_eval_loader(batch_size, is_distributed, testdev=testdev) + evaluator = COCOEvaluator( + dataloader=val_loader, + img_size=self.test_size, + confthre=self.test_conf, + nmsthre=self.nmsthre, + num_classes=self.num_classes, + testdev=testdev, + ) + return evaluator diff --git a/tracking/docker-build-context/byte_track/exps/example/mot/yolox_x_ch.py b/tracking/docker-build-context/byte_track/exps/example/mot/yolox_x_ch.py new file mode 100644 index 0000000000000000000000000000000000000000..0e4765ef92fdfe61c9a28c4a384f156302523e24 --- /dev/null +++ b/tracking/docker-build-context/byte_track/exps/example/mot/yolox_x_ch.py @@ -0,0 +1,138 @@ +# encoding: utf-8 +import os +import random +import torch +import torch.nn as nn +import torch.distributed as dist + +from yolox.exp import Exp as MyExp +from yolox.data import get_yolox_datadir + +class Exp(MyExp): + def __init__(self): + super(Exp, self).__init__() + self.num_classes = 1 + self.depth = 1.33 + self.width = 1.25 + self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0] + self.train_ann = "train.json" + self.val_ann = "val_half.json" + self.input_size = (800, 1440) + self.test_size = (800, 1440) + self.random_size = (18, 32) + self.max_epoch = 80 + self.print_interval = 20 + self.eval_interval = 5 + self.test_conf = 0.1 + self.nmsthre = 0.7 + self.no_aug_epochs = 10 + self.basic_lr_per_img = 0.001 / 64.0 + self.warmup_epochs = 1 + + def get_data_loader(self, batch_size, is_distributed, no_aug=False): + from yolox.data import ( + MOTDataset, + TrainTransform, + YoloBatchSampler, + DataLoader, + InfiniteSampler, + MosaicDetection, + ) + + dataset = MOTDataset( + data_dir=os.path.join(get_yolox_datadir(), "ch_all"), + json_file=self.train_ann, + name='', + img_size=self.input_size, + preproc=TrainTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + max_labels=500, + ), + ) + + dataset = MosaicDetection( + dataset, + mosaic=not no_aug, + img_size=self.input_size, + preproc=TrainTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + max_labels=1000, + ), + degrees=self.degrees, + translate=self.translate, + scale=self.scale, + shear=self.shear, + perspective=self.perspective, + enable_mixup=self.enable_mixup, + ) + + self.dataset = dataset + + if is_distributed: + batch_size = batch_size // dist.get_world_size() + + sampler = InfiniteSampler( + len(self.dataset), seed=self.seed if self.seed else 0 + ) + + batch_sampler = YoloBatchSampler( + sampler=sampler, + batch_size=batch_size, + drop_last=False, + input_dimension=self.input_size, + mosaic=not no_aug, + ) + + dataloader_kwargs = {"num_workers": self.data_num_workers, "pin_memory": True} + dataloader_kwargs["batch_sampler"] = batch_sampler + train_loader = DataLoader(self.dataset, **dataloader_kwargs) + + return train_loader + + def get_eval_loader(self, batch_size, is_distributed, testdev=False): + from yolox.data import MOTDataset, ValTransform + + valdataset = MOTDataset( + data_dir=os.path.join(get_yolox_datadir(), "mot"), + json_file=self.val_ann, + img_size=self.test_size, + name='train', + preproc=ValTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + ), + ) + + if is_distributed: + batch_size = batch_size // dist.get_world_size() + sampler = torch.utils.data.distributed.DistributedSampler( + valdataset, shuffle=False + ) + else: + sampler = torch.utils.data.SequentialSampler(valdataset) + + dataloader_kwargs = { + "num_workers": self.data_num_workers, + "pin_memory": True, + "sampler": sampler, + } + dataloader_kwargs["batch_size"] = batch_size + val_loader = torch.utils.data.DataLoader(valdataset, **dataloader_kwargs) + + return val_loader + + def get_evaluator(self, batch_size, is_distributed, testdev=False): + from yolox.evaluators import COCOEvaluator + + val_loader = self.get_eval_loader(batch_size, is_distributed, testdev=testdev) + evaluator = COCOEvaluator( + dataloader=val_loader, + img_size=self.test_size, + confthre=self.test_conf, + nmsthre=self.nmsthre, + num_classes=self.num_classes, + testdev=testdev, + ) + return evaluator diff --git a/tracking/docker-build-context/byte_track/exps/example/mot/yolox_x_mix_det.py b/tracking/docker-build-context/byte_track/exps/example/mot/yolox_x_mix_det.py new file mode 100644 index 0000000000000000000000000000000000000000..8013d94558c9e01cfe454778c4bd25231dbec7d8 --- /dev/null +++ b/tracking/docker-build-context/byte_track/exps/example/mot/yolox_x_mix_det.py @@ -0,0 +1,138 @@ +# encoding: utf-8 +import os +import random +import torch +import torch.nn as nn +import torch.distributed as dist + +from yolox.exp import Exp as MyExp +from yolox.data import get_yolox_datadir + +class Exp(MyExp): + def __init__(self): + super(Exp, self).__init__() + self.num_classes = 1 + self.depth = 1.33 + self.width = 1.25 + self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0] + self.train_ann = "train.json" + self.val_ann = "test.json" # change to train.json when running on training set + self.input_size = (800, 1440) + self.test_size = (800, 1440) + self.random_size = (18, 32) + self.max_epoch = 80 + self.print_interval = 20 + self.eval_interval = 5 + self.test_conf = 0.001 + self.nmsthre = 0.7 + self.no_aug_epochs = 10 + self.basic_lr_per_img = 0.001 / 64.0 + self.warmup_epochs = 1 + + def get_data_loader(self, batch_size, is_distributed, no_aug=False): + from yolox.data import ( + MOTDataset, + TrainTransform, + YoloBatchSampler, + DataLoader, + InfiniteSampler, + MosaicDetection, + ) + + dataset = MOTDataset( + data_dir=os.path.join(get_yolox_datadir(), "mix_det"), + json_file=self.train_ann, + name='', + img_size=self.input_size, + preproc=TrainTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + max_labels=500, + ), + ) + + dataset = MosaicDetection( + dataset, + mosaic=not no_aug, + img_size=self.input_size, + preproc=TrainTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + max_labels=1000, + ), + degrees=self.degrees, + translate=self.translate, + scale=self.scale, + shear=self.shear, + perspective=self.perspective, + enable_mixup=self.enable_mixup, + ) + + self.dataset = dataset + + if is_distributed: + batch_size = batch_size // dist.get_world_size() + + sampler = InfiniteSampler( + len(self.dataset), seed=self.seed if self.seed else 0 + ) + + batch_sampler = YoloBatchSampler( + sampler=sampler, + batch_size=batch_size, + drop_last=False, + input_dimension=self.input_size, + mosaic=not no_aug, + ) + + dataloader_kwargs = {"num_workers": self.data_num_workers, "pin_memory": True} + dataloader_kwargs["batch_sampler"] = batch_sampler + train_loader = DataLoader(self.dataset, **dataloader_kwargs) + + return train_loader + + def get_eval_loader(self, batch_size, is_distributed, testdev=False): + from yolox.data import MOTDataset, ValTransform + + valdataset = MOTDataset( + data_dir=os.path.join(get_yolox_datadir(), "mot"), + json_file=self.val_ann, + img_size=self.test_size, + name='test', # change to train when running on training set + preproc=ValTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + ), + ) + + if is_distributed: + batch_size = batch_size // dist.get_world_size() + sampler = torch.utils.data.distributed.DistributedSampler( + valdataset, shuffle=False + ) + else: + sampler = torch.utils.data.SequentialSampler(valdataset) + + dataloader_kwargs = { + "num_workers": self.data_num_workers, + "pin_memory": True, + "sampler": sampler, + } + dataloader_kwargs["batch_size"] = batch_size + val_loader = torch.utils.data.DataLoader(valdataset, **dataloader_kwargs) + + return val_loader + + def get_evaluator(self, batch_size, is_distributed, testdev=False): + from yolox.evaluators import COCOEvaluator + + val_loader = self.get_eval_loader(batch_size, is_distributed, testdev=testdev) + evaluator = COCOEvaluator( + dataloader=val_loader, + img_size=self.test_size, + confthre=self.test_conf, + nmsthre=self.nmsthre, + num_classes=self.num_classes, + testdev=testdev, + ) + return evaluator diff --git a/tracking/docker-build-context/byte_track/exps/example/mot/yolox_x_mix_mot20_ch.py b/tracking/docker-build-context/byte_track/exps/example/mot/yolox_x_mix_mot20_ch.py new file mode 100644 index 0000000000000000000000000000000000000000..cff6b4f868607a5f3e2bb365c49acbc401f37bb0 --- /dev/null +++ b/tracking/docker-build-context/byte_track/exps/example/mot/yolox_x_mix_mot20_ch.py @@ -0,0 +1,139 @@ +# encoding: utf-8 +import os +import random +import torch +import torch.nn as nn +import torch.distributed as dist + +from yolox.exp import Exp as MyExp +from yolox.data import get_yolox_datadir + +class Exp(MyExp): + def __init__(self): + super(Exp, self).__init__() + self.num_classes = 1 + self.depth = 1.33 + self.width = 1.25 + self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0] + self.train_ann = "train.json" + self.val_ann = "test.json" # change to train.json when running on training set + self.input_size = (896, 1600) + self.test_size = (896, 1600) + #self.test_size = (736, 1920) + self.random_size = (20, 36) + self.max_epoch = 80 + self.print_interval = 20 + self.eval_interval = 5 + self.test_conf = 0.001 + self.nmsthre = 0.7 + self.no_aug_epochs = 10 + self.basic_lr_per_img = 0.001 / 64.0 + self.warmup_epochs = 1 + + def get_data_loader(self, batch_size, is_distributed, no_aug=False): + from yolox.data import ( + MOTDataset, + TrainTransform, + YoloBatchSampler, + DataLoader, + InfiniteSampler, + MosaicDetection, + ) + + dataset = MOTDataset( + data_dir=os.path.join(get_yolox_datadir(), "mix_mot20_ch"), + json_file=self.train_ann, + name='', + img_size=self.input_size, + preproc=TrainTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + max_labels=600, + ), + ) + + dataset = MosaicDetection( + dataset, + mosaic=not no_aug, + img_size=self.input_size, + preproc=TrainTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + max_labels=1200, + ), + degrees=self.degrees, + translate=self.translate, + scale=self.scale, + shear=self.shear, + perspective=self.perspective, + enable_mixup=self.enable_mixup, + ) + + self.dataset = dataset + + if is_distributed: + batch_size = batch_size // dist.get_world_size() + + sampler = InfiniteSampler( + len(self.dataset), seed=self.seed if self.seed else 0 + ) + + batch_sampler = YoloBatchSampler( + sampler=sampler, + batch_size=batch_size, + drop_last=False, + input_dimension=self.input_size, + mosaic=not no_aug, + ) + + dataloader_kwargs = {"num_workers": self.data_num_workers, "pin_memory": True} + dataloader_kwargs["batch_sampler"] = batch_sampler + train_loader = DataLoader(self.dataset, **dataloader_kwargs) + + return train_loader + + def get_eval_loader(self, batch_size, is_distributed, testdev=False): + from yolox.data import MOTDataset, ValTransform + + valdataset = MOTDataset( + data_dir=os.path.join(get_yolox_datadir(), "MOT20"), + json_file=self.val_ann, + img_size=self.test_size, + name='test', # change to train when running on training set + preproc=ValTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + ), + ) + + if is_distributed: + batch_size = batch_size // dist.get_world_size() + sampler = torch.utils.data.distributed.DistributedSampler( + valdataset, shuffle=False + ) + else: + sampler = torch.utils.data.SequentialSampler(valdataset) + + dataloader_kwargs = { + "num_workers": self.data_num_workers, + "pin_memory": True, + "sampler": sampler, + } + dataloader_kwargs["batch_size"] = batch_size + val_loader = torch.utils.data.DataLoader(valdataset, **dataloader_kwargs) + + return val_loader + + def get_evaluator(self, batch_size, is_distributed, testdev=False): + from yolox.evaluators import COCOEvaluator + + val_loader = self.get_eval_loader(batch_size, is_distributed, testdev=testdev) + evaluator = COCOEvaluator( + dataloader=val_loader, + img_size=self.test_size, + confthre=self.test_conf, + nmsthre=self.nmsthre, + num_classes=self.num_classes, + testdev=testdev, + ) + return evaluator diff --git a/tracking/docker-build-context/byte_track/exps/example/mot/yolox_x_mot17_half.py b/tracking/docker-build-context/byte_track/exps/example/mot/yolox_x_mot17_half.py new file mode 100644 index 0000000000000000000000000000000000000000..441119b72b8714e78f8f0311933c1c24360fa3d8 --- /dev/null +++ b/tracking/docker-build-context/byte_track/exps/example/mot/yolox_x_mot17_half.py @@ -0,0 +1,138 @@ +# encoding: utf-8 +import os +import random +import torch +import torch.nn as nn +import torch.distributed as dist + +from yolox.exp import Exp as MyExp +from yolox.data import get_yolox_datadir + +class Exp(MyExp): + def __init__(self): + super(Exp, self).__init__() + self.num_classes = 1 + self.depth = 1.33 + self.width = 1.25 + self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0] + self.train_ann = "train.json" + self.val_ann = "val_half.json" + self.input_size = (800, 1440) + self.test_size = (800, 1440) + self.random_size = (18, 32) + self.max_epoch = 80 + self.print_interval = 20 + self.eval_interval = 5 + self.test_conf = 0.1 + self.nmsthre = 0.7 + self.no_aug_epochs = 10 + self.basic_lr_per_img = 0.001 / 64.0 + self.warmup_epochs = 1 + + def get_data_loader(self, batch_size, is_distributed, no_aug=False): + from yolox.data import ( + MOTDataset, + TrainTransform, + YoloBatchSampler, + DataLoader, + InfiniteSampler, + MosaicDetection, + ) + + dataset = MOTDataset( + data_dir=os.path.join(get_yolox_datadir(), "mot"), + json_file=self.train_ann, + name='train', + img_size=self.input_size, + preproc=TrainTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + max_labels=500, + ), + ) + + dataset = MosaicDetection( + dataset, + mosaic=not no_aug, + img_size=self.input_size, + preproc=TrainTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + max_labels=1000, + ), + degrees=self.degrees, + translate=self.translate, + scale=self.scale, + shear=self.shear, + perspective=self.perspective, + enable_mixup=self.enable_mixup, + ) + + self.dataset = dataset + + if is_distributed: + batch_size = batch_size // dist.get_world_size() + + sampler = InfiniteSampler( + len(self.dataset), seed=self.seed if self.seed else 0 + ) + + batch_sampler = YoloBatchSampler( + sampler=sampler, + batch_size=batch_size, + drop_last=False, + input_dimension=self.input_size, + mosaic=not no_aug, + ) + + dataloader_kwargs = {"num_workers": self.data_num_workers, "pin_memory": True} + dataloader_kwargs["batch_sampler"] = batch_sampler + train_loader = DataLoader(self.dataset, **dataloader_kwargs) + + return train_loader + + def get_eval_loader(self, batch_size, is_distributed, testdev=False): + from yolox.data import MOTDataset, ValTransform + + valdataset = MOTDataset( + data_dir=os.path.join(get_yolox_datadir(), "mot"), + json_file=self.val_ann, + img_size=self.test_size, + name='train', + preproc=ValTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + ), + ) + + if is_distributed: + batch_size = batch_size // dist.get_world_size() + sampler = torch.utils.data.distributed.DistributedSampler( + valdataset, shuffle=False + ) + else: + sampler = torch.utils.data.SequentialSampler(valdataset) + + dataloader_kwargs = { + "num_workers": self.data_num_workers, + "pin_memory": True, + "sampler": sampler, + } + dataloader_kwargs["batch_size"] = batch_size + val_loader = torch.utils.data.DataLoader(valdataset, **dataloader_kwargs) + + return val_loader + + def get_evaluator(self, batch_size, is_distributed, testdev=False): + from yolox.evaluators import COCOEvaluator + + val_loader = self.get_eval_loader(batch_size, is_distributed, testdev=testdev) + evaluator = COCOEvaluator( + dataloader=val_loader, + img_size=self.test_size, + confthre=self.test_conf, + nmsthre=self.nmsthre, + num_classes=self.num_classes, + testdev=testdev, + ) + return evaluator diff --git a/tracking/docker-build-context/byte_track/requirements.txt b/tracking/docker-build-context/byte_track/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..14c7ac84510d00590cac99d8ae065d8c22cbf840 --- /dev/null +++ b/tracking/docker-build-context/byte_track/requirements.txt @@ -0,0 +1,22 @@ +# TODO: Update with exact module version +numpy +torch>=1.8 +opencv_python +loguru +scikit-image +tqdm +torchvision>=0.10.0 +Pillow +thop +ninja +tabulate +tensorboard +lap +motmetrics +filterpy +h5py + +# verified versions +onnx==1.8.1 +onnxruntime==1.8.0 +onnx-simplifier==0.3.5 diff --git a/tracking/docker-build-context/byte_track/setup.cfg b/tracking/docker-build-context/byte_track/setup.cfg new file mode 100644 index 0000000000000000000000000000000000000000..5c0cb3cc460cd11b7ef0adc08f9edf205bcb6914 --- /dev/null +++ b/tracking/docker-build-context/byte_track/setup.cfg @@ -0,0 +1,18 @@ +[isort] +line_length = 100 +multi_line_output = 3 +balanced_wrapping = True +known_standard_library = setuptools +known_third_party = tqdm,loguru +known_data_processing = cv2,numpy,scipy,PIL,matplotlib,scikit_image +known_datasets = pycocotools +known_deeplearning = torch,torchvision,caffe2,onnx,apex,timm,thop,torch2trt,tensorrt,openvino,onnxruntime +known_myself = yolox +sections = FUTURE,STDLIB,THIRDPARTY,data_processing,datasets,deeplearning,myself,FIRSTPARTY,LOCALFOLDER +no_lines_before=STDLIB,THIRDPARTY,datasets +default_section = FIRSTPARTY + +[flake8] +max-line-length = 100 +max-complexity = 18 +exclude = __init__.py diff --git a/tracking/docker-build-context/byte_track/setup.py b/tracking/docker-build-context/byte_track/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..ab3aca97b5fed932e7a40e21f6633f9f6cb84879 --- /dev/null +++ b/tracking/docker-build-context/byte_track/setup.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python +# Copyright (c) Megvii, Inc. and its affiliates. All Rights Reserved + +import re +import setuptools +import glob +from os import path +import torch +from torch.utils.cpp_extension import CppExtension + +torch_ver = [int(x) for x in torch.__version__.split(".")[:2]] +assert torch_ver >= [1, 3], "Requires PyTorch >= 1.3" + + +def get_extensions(): + this_dir = path.dirname(path.abspath(__file__)) + extensions_dir = path.join(this_dir, "yolox", "layers", "csrc") + + main_source = path.join(extensions_dir, "vision.cpp") + sources = glob.glob(path.join(extensions_dir, "**", "*.cpp")) + + sources = [main_source] + sources + extension = CppExtension + + extra_compile_args = {"cxx": ["-O3"]} + define_macros = [] + + include_dirs = [extensions_dir] + + ext_modules = [ + extension( + "yolox._C", + sources, + include_dirs=include_dirs, + define_macros=define_macros, + extra_compile_args=extra_compile_args, + ) + ] + + return ext_modules + + +with open("yolox/__init__.py", "r") as f: + version = re.search( + r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', + f.read(), re.MULTILINE + ).group(1) + + +with open("README.md", "r") as f: + long_description = f.read() + + +setuptools.setup( + name="yolox", + version=version, + author="basedet team", + python_requires=">=3.6", + long_description=long_description, + ext_modules=get_extensions(), + classifiers=["Programming Language :: Python :: 3", "Operating System :: OS Independent"], + cmdclass={"build_ext": torch.utils.cpp_extension.BuildExtension}, + packages=setuptools.find_namespace_packages(), +) diff --git a/tracking/docker-build-context/byte_track/tools/convert_cityperson_to_coco.py b/tracking/docker-build-context/byte_track/tools/convert_cityperson_to_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..e3c2a284fa9b20db329c50e1bb2c47479b680125 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tools/convert_cityperson_to_coco.py @@ -0,0 +1,59 @@ +import os +import numpy as np +import json +from PIL import Image + +DATA_PATH = 'datasets/Cityscapes/' +DATA_FILE_PATH = 'datasets/data_path/citypersons.train' +OUT_PATH = DATA_PATH + 'annotations/' + +def load_paths(data_path): + with open(data_path, 'r') as file: + img_files = file.readlines() + img_files = [x.replace('\n', '') for x in img_files] + img_files = list(filter(lambda x: len(x) > 0, img_files)) + label_files = [x.replace('images', 'labels_with_ids').replace('.png', '.txt').replace('.jpg', '.txt') for x in img_files] + return img_files, label_files + +if __name__ == '__main__': + if not os.path.exists(OUT_PATH): + os.mkdir(OUT_PATH) + + out_path = OUT_PATH + 'train.json' + out = {'images': [], 'annotations': [], 'categories': [{'id': 1, 'name': 'person'}]} + img_paths, label_paths = load_paths(DATA_FILE_PATH) + image_cnt = 0 + ann_cnt = 0 + video_cnt = 0 + for img_path, label_path in zip(img_paths, label_paths): + image_cnt += 1 + im = Image.open(img_path) + image_info = {'file_name': img_path, + 'id': image_cnt, + 'height': im.size[1], + 'width': im.size[0]} + out['images'].append(image_info) + # Load labels + if os.path.isfile(label_path): + labels0 = np.loadtxt(label_path, dtype=np.float32).reshape(-1, 6) + # Normalized xywh to pixel xyxy format + labels = labels0.copy() + labels[:, 2] = image_info['width'] * (labels0[:, 2] - labels0[:, 4] / 2) + labels[:, 3] = image_info['height'] * (labels0[:, 3] - labels0[:, 5] / 2) + labels[:, 4] = image_info['width'] * labels0[:, 4] + labels[:, 5] = image_info['height'] * labels0[:, 5] + else: + labels = np.array([]) + for i in range(len(labels)): + ann_cnt += 1 + fbox = labels[i, 2:6].tolist() + ann = {'id': ann_cnt, + 'category_id': 1, + 'image_id': image_cnt, + 'track_id': -1, + 'bbox': fbox, + 'area': fbox[2] * fbox[3], + 'iscrowd': 0} + out['annotations'].append(ann) + print('loaded train for {} images and {} samples'.format(len(out['images']), len(out['annotations']))) + json.dump(out, open(out_path, 'w')) \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/tools/convert_crowdhuman_to_coco.py b/tracking/docker-build-context/byte_track/tools/convert_crowdhuman_to_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..62e0b66788f7625e2fbb5ba420794abf1125aa84 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tools/convert_crowdhuman_to_coco.py @@ -0,0 +1,57 @@ +import os +import numpy as np +import json +from PIL import Image + +DATA_PATH = 'datasets/crowdhuman/' +OUT_PATH = DATA_PATH + 'annotations/' +SPLITS = ['val', 'train'] +DEBUG = False + +def load_func(fpath): + print('fpath', fpath) + assert os.path.exists(fpath) + with open(fpath,'r') as fid: + lines = fid.readlines() + records =[json.loads(line.strip('\n')) for line in lines] + return records + +if __name__ == '__main__': + if not os.path.exists(OUT_PATH): + os.mkdir(OUT_PATH) + for split in SPLITS: + data_path = DATA_PATH + split + out_path = OUT_PATH + '{}.json'.format(split) + out = {'images': [], 'annotations': [], 'categories': [{'id': 1, 'name': 'person'}]} + ann_path = DATA_PATH + 'annotation_{}.odgt'.format(split) + anns_data = load_func(ann_path) + image_cnt = 0 + ann_cnt = 0 + video_cnt = 0 + for ann_data in anns_data: + image_cnt += 1 + file_path = DATA_PATH + 'CrowdHuman_{}/'.format(split) + '{}.jpg'.format(ann_data['ID']) + im = Image.open(file_path) + image_info = {'file_name': '{}.jpg'.format(ann_data['ID']), + 'id': image_cnt, + 'height': im.size[1], + 'width': im.size[0]} + out['images'].append(image_info) + if split != 'test': + anns = ann_data['gtboxes'] + for i in range(len(anns)): + ann_cnt += 1 + fbox = anns[i]['fbox'] + ann = {'id': ann_cnt, + 'category_id': 1, + 'image_id': image_cnt, + 'track_id': -1, + 'bbox_vis': anns[i]['vbox'], + 'bbox': fbox, + 'area': fbox[2] * fbox[3], + 'iscrowd': 1 if 'extra' in anns[i] and \ + 'ignore' in anns[i]['extra'] and \ + anns[i]['extra']['ignore'] == 1 else 0} + out['annotations'].append(ann) + print('loaded {} for {} images and {} samples'.format(split, len(out['images']), len(out['annotations']))) + json.dump(out, open(out_path, 'w')) \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/tools/convert_ethz_to_coco.py b/tracking/docker-build-context/byte_track/tools/convert_ethz_to_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..ceb32810dd0c6970f93d819bcca886fd42451a61 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tools/convert_ethz_to_coco.py @@ -0,0 +1,59 @@ +import os +import numpy as np +import json +from PIL import Image + +DATA_PATH = 'datasets/ETHZ/' +DATA_FILE_PATH = 'datasets/data_path/eth.train' +OUT_PATH = DATA_PATH + 'annotations/' + +def load_paths(data_path): + with open(data_path, 'r') as file: + img_files = file.readlines() + img_files = [x.replace('\n', '') for x in img_files] + img_files = list(filter(lambda x: len(x) > 0, img_files)) + label_files = [x.replace('images', 'labels_with_ids').replace('.png', '.txt').replace('.jpg', '.txt') for x in img_files] + return img_files, label_files + +if __name__ == '__main__': + if not os.path.exists(OUT_PATH): + os.mkdir(OUT_PATH) + + out_path = OUT_PATH + 'train.json' + out = {'images': [], 'annotations': [], 'categories': [{'id': 1, 'name': 'person'}]} + img_paths, label_paths = load_paths(DATA_FILE_PATH) + image_cnt = 0 + ann_cnt = 0 + video_cnt = 0 + for img_path, label_path in zip(img_paths, label_paths): + image_cnt += 1 + im = Image.open(img_path) + image_info = {'file_name': img_path, + 'id': image_cnt, + 'height': im.size[1], + 'width': im.size[0]} + out['images'].append(image_info) + # Load labels + if os.path.isfile(label_path): + labels0 = np.loadtxt(label_path, dtype=np.float32).reshape(-1, 6) + # Normalized xywh to pixel xyxy format + labels = labels0.copy() + labels[:, 2] = image_info['width'] * (labels0[:, 2] - labels0[:, 4] / 2) + labels[:, 3] = image_info['height'] * (labels0[:, 3] - labels0[:, 5] / 2) + labels[:, 4] = image_info['width'] * labels0[:, 4] + labels[:, 5] = image_info['height'] * labels0[:, 5] + else: + labels = np.array([]) + for i in range(len(labels)): + ann_cnt += 1 + fbox = labels[i, 2:6].tolist() + ann = {'id': ann_cnt, + 'category_id': 1, + 'image_id': image_cnt, + 'track_id': -1, + 'bbox': fbox, + 'area': fbox[2] * fbox[3], + 'iscrowd': 0} + out['annotations'].append(ann) + print('loaded train for {} images and {} samples'.format(len(out['images']), len(out['annotations']))) + json.dump(out, open(out_path, 'w')) \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/tools/convert_mot17_to_coco.py b/tracking/docker-build-context/byte_track/tools/convert_mot17_to_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..403798c14613e82523557e9730464b64e6403a86 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tools/convert_mot17_to_coco.py @@ -0,0 +1,133 @@ +import os +import numpy as np +import json +import cv2 + + +# Use the same script for MOT16 +DATA_PATH = 'datasets/mot' +OUT_PATH = os.path.join(DATA_PATH, 'annotations') +SPLITS = ['train_half', 'val_half', 'train', 'test'] # --> split training data to train_half and val_half. +HALF_VIDEO = True +CREATE_SPLITTED_ANN = True +CREATE_SPLITTED_DET = True + + +if __name__ == '__main__': + + if not os.path.exists(OUT_PATH): + os.makedirs(OUT_PATH) + + for split in SPLITS: + if split == "test": + data_path = os.path.join(DATA_PATH, 'test') + else: + data_path = os.path.join(DATA_PATH, 'train') + out_path = os.path.join(OUT_PATH, '{}.json'.format(split)) + out = {'images': [], 'annotations': [], 'videos': [], + 'categories': [{'id': 1, 'name': 'pedestrian'}]} + seqs = os.listdir(data_path) + image_cnt = 0 + ann_cnt = 0 + video_cnt = 0 + tid_curr = 0 + tid_last = -1 + for seq in sorted(seqs): + if '.DS_Store' in seq: + continue + if 'mot' in DATA_PATH and (split != 'test' and not ('FRCNN' in seq)): + continue + video_cnt += 1 # video sequence number. + out['videos'].append({'id': video_cnt, 'file_name': seq}) + seq_path = os.path.join(data_path, seq) + img_path = os.path.join(seq_path, 'img1') + ann_path = os.path.join(seq_path, 'gt/gt.txt') + images = os.listdir(img_path) + num_images = len([image for image in images if 'jpg' in image]) # half and half + + if HALF_VIDEO and ('half' in split): + image_range = [0, num_images // 2] if 'train' in split else \ + [num_images // 2 + 1, num_images - 1] + else: + image_range = [0, num_images - 1] + + for i in range(num_images): + if i < image_range[0] or i > image_range[1]: + continue + img = cv2.imread(os.path.join(data_path, '{}/img1/{:06d}.jpg'.format(seq, i + 1))) + height, width = img.shape[:2] + image_info = {'file_name': '{}/img1/{:06d}.jpg'.format(seq, i + 1), # image name. + 'id': image_cnt + i + 1, # image number in the entire training set. + 'frame_id': i + 1 - image_range[0], # image number in the video sequence, starting from 1. + 'prev_image_id': image_cnt + i if i > 0 else -1, # image number in the entire training set. + 'next_image_id': image_cnt + i + 2 if i < num_images - 1 else -1, + 'video_id': video_cnt, + 'height': height, 'width': width} + out['images'].append(image_info) + print('{}: {} images'.format(seq, num_images)) + if split != 'test': + det_path = os.path.join(seq_path, 'det/det.txt') + anns = np.loadtxt(ann_path, dtype=np.float32, delimiter=',') + dets = np.loadtxt(det_path, dtype=np.float32, delimiter=',') + if CREATE_SPLITTED_ANN and ('half' in split): + anns_out = np.array([anns[i] for i in range(anns.shape[0]) + if int(anns[i][0]) - 1 >= image_range[0] and + int(anns[i][0]) - 1 <= image_range[1]], np.float32) + anns_out[:, 0] -= image_range[0] + gt_out = os.path.join(seq_path, 'gt/gt_{}.txt'.format(split)) + fout = open(gt_out, 'w') + for o in anns_out: + fout.write('{:d},{:d},{:d},{:d},{:d},{:d},{:d},{:d},{:.6f}\n'.format( + int(o[0]), int(o[1]), int(o[2]), int(o[3]), int(o[4]), int(o[5]), + int(o[6]), int(o[7]), o[8])) + fout.close() + if CREATE_SPLITTED_DET and ('half' in split): + dets_out = np.array([dets[i] for i in range(dets.shape[0]) + if int(dets[i][0]) - 1 >= image_range[0] and + int(dets[i][0]) - 1 <= image_range[1]], np.float32) + dets_out[:, 0] -= image_range[0] + det_out = os.path.join(seq_path, 'det/det_{}.txt'.format(split)) + dout = open(det_out, 'w') + for o in dets_out: + dout.write('{:d},{:d},{:.1f},{:.1f},{:.1f},{:.1f},{:.6f}\n'.format( + int(o[0]), int(o[1]), float(o[2]), float(o[3]), float(o[4]), float(o[5]), + float(o[6]))) + dout.close() + + print('{} ann images'.format(int(anns[:, 0].max()))) + for i in range(anns.shape[0]): + frame_id = int(anns[i][0]) + if frame_id - 1 < image_range[0] or frame_id - 1 > image_range[1]: + continue + track_id = int(anns[i][1]) + cat_id = int(anns[i][7]) + ann_cnt += 1 + if not ('15' in DATA_PATH): + #if not (float(anns[i][8]) >= 0.25): # visibility. + #continue + if not (int(anns[i][6]) == 1): # whether ignore. + continue + if int(anns[i][7]) in [3, 4, 5, 6, 9, 10, 11]: # Non-person + continue + if int(anns[i][7]) in [2, 7, 8, 12]: # Ignored person + category_id = -1 + else: + category_id = 1 # pedestrian(non-static) + if not track_id == tid_last: + tid_curr += 1 + tid_last = track_id + else: + category_id = 1 + ann = {'id': ann_cnt, + 'category_id': category_id, + 'image_id': image_cnt + frame_id, + 'track_id': tid_curr, + 'bbox': anns[i][2:6].tolist(), + 'conf': float(anns[i][6]), + 'iscrowd': 0, + 'area': float(anns[i][4] * anns[i][5])} + out['annotations'].append(ann) + image_cnt += num_images + print(tid_curr, tid_last) + print('loaded {} for {} images and {} samples'.format(split, len(out['images']), len(out['annotations']))) + json.dump(out, open(out_path, 'w')) \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/tools/convert_mot20_to_coco.py b/tracking/docker-build-context/byte_track/tools/convert_mot20_to_coco.py new file mode 100644 index 0000000000000000000000000000000000000000..67bd9b55b94dc8511b8542d0391d73681238c8b7 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tools/convert_mot20_to_coco.py @@ -0,0 +1,132 @@ +import os +import numpy as np +import json +import cv2 + + +# Use the same script for MOT16 +DATA_PATH = 'datasets/MOT20' +OUT_PATH = os.path.join(DATA_PATH, 'annotations') +SPLITS = ['train_half', 'val_half', 'train', 'test'] # --> split training data to train_half and val_half. +HALF_VIDEO = True +CREATE_SPLITTED_ANN = True +CREATE_SPLITTED_DET = True + + +if __name__ == '__main__': + + if not os.path.exists(OUT_PATH): + os.makedirs(OUT_PATH) + + for split in SPLITS: + if split == "test": + data_path = os.path.join(DATA_PATH, 'test') + else: + data_path = os.path.join(DATA_PATH, 'train') + out_path = os.path.join(OUT_PATH, '{}.json'.format(split)) + out = {'images': [], 'annotations': [], 'videos': [], + 'categories': [{'id': 1, 'name': 'pedestrian'}]} + seqs = os.listdir(data_path) + image_cnt = 0 + ann_cnt = 0 + video_cnt = 0 + tid_curr = 0 + tid_last = -1 + for seq in sorted(seqs): + if '.DS_Store' in seq: + continue + video_cnt += 1 # video sequence number. + out['videos'].append({'id': video_cnt, 'file_name': seq}) + seq_path = os.path.join(data_path, seq) + img_path = os.path.join(seq_path, 'img1') + ann_path = os.path.join(seq_path, 'gt/gt.txt') + images = os.listdir(img_path) + num_images = len([image for image in images if 'jpg' in image]) # half and half + + if HALF_VIDEO and ('half' in split): + image_range = [0, num_images // 2] if 'train' in split else \ + [num_images // 2 + 1, num_images - 1] + else: + image_range = [0, num_images - 1] + + for i in range(num_images): + if i < image_range[0] or i > image_range[1]: + continue + img = cv2.imread(os.path.join(data_path, '{}/img1/{:06d}.jpg'.format(seq, i + 1))) + height, width = img.shape[:2] + image_info = {'file_name': '{}/img1/{:06d}.jpg'.format(seq, i + 1), # image name. + 'id': image_cnt + i + 1, # image number in the entire training set. + 'frame_id': i + 1 - image_range[0], # image number in the video sequence, starting from 1. + 'prev_image_id': image_cnt + i if i > 0 else -1, # image number in the entire training set. + 'next_image_id': image_cnt + i + 2 if i < num_images - 1 else -1, + 'video_id': video_cnt, + 'height': height, 'width': width} + out['images'].append(image_info) + print('{}: {} images'.format(seq, num_images)) + if split != 'test': + det_path = os.path.join(seq_path, 'det/det.txt') + anns = np.loadtxt(ann_path, dtype=np.float32, delimiter=',') + dets = np.loadtxt(det_path, dtype=np.float32, delimiter=',') + if CREATE_SPLITTED_ANN and ('half' in split): + anns_out = np.array([anns[i] for i in range(anns.shape[0]) + if int(anns[i][0]) - 1 >= image_range[0] and + int(anns[i][0]) - 1 <= image_range[1]], np.float32) + anns_out[:, 0] -= image_range[0] + gt_out = os.path.join(seq_path, 'gt/gt_{}.txt'.format(split)) + fout = open(gt_out, 'w') + for o in anns_out: + fout.write('{:d},{:d},{:d},{:d},{:d},{:d},{:d},{:d},{:.6f}\n'.format( + int(o[0]), int(o[1]), int(o[2]), int(o[3]), int(o[4]), int(o[5]), + int(o[6]), int(o[7]), o[8])) + fout.close() + if CREATE_SPLITTED_DET and ('half' in split): + dets_out = np.array([dets[i] for i in range(dets.shape[0]) + if int(dets[i][0]) - 1 >= image_range[0] and + int(dets[i][0]) - 1 <= image_range[1]], np.float32) + dets_out[:, 0] -= image_range[0] + det_out = os.path.join(seq_path, 'det/det_{}.txt'.format(split)) + dout = open(det_out, 'w') + for o in dets_out: + dout.write('{:d},{:d},{:.1f},{:.1f},{:.1f},{:.1f},{:.6f}\n'.format( + int(o[0]), int(o[1]), float(o[2]), float(o[3]), float(o[4]), float(o[5]), + float(o[6]))) + dout.close() + + print('{} ann images'.format(int(anns[:, 0].max()))) + for i in range(anns.shape[0]): + frame_id = int(anns[i][0]) + if frame_id - 1 < image_range[0] or frame_id - 1 > image_range[1]: + continue + track_id = int(anns[i][1]) + cat_id = int(anns[i][7]) + ann_cnt += 1 + if not ('15' in DATA_PATH): + #if not (float(anns[i][8]) >= 0.25): # visibility. + #continue + if not (int(anns[i][6]) == 1): # whether ignore. + continue + if int(anns[i][7]) in [3, 4, 5, 6, 9, 10, 11]: # Non-person + continue + if int(anns[i][7]) in [2, 7, 8, 12]: # Ignored person + #category_id = -1 + continue + else: + category_id = 1 # pedestrian(non-static) + if not track_id == tid_last: + tid_curr += 1 + tid_last = track_id + else: + category_id = 1 + ann = {'id': ann_cnt, + 'category_id': category_id, + 'image_id': image_cnt + frame_id, + 'track_id': tid_curr, + 'bbox': anns[i][2:6].tolist(), + 'conf': float(anns[i][6]), + 'iscrowd': 0, + 'area': float(anns[i][4] * anns[i][5])} + out['annotations'].append(ann) + image_cnt += num_images + print(tid_curr, tid_last) + print('loaded {} for {} images and {} samples'.format(split, len(out['images']), len(out['annotations']))) + json.dump(out, open(out_path, 'w')) \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/tools/convert_video.py b/tracking/docker-build-context/byte_track/tools/convert_video.py new file mode 100644 index 0000000000000000000000000000000000000000..16e8aaaf117a95e72d8f85b6376a5e2b8eaaf300 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tools/convert_video.py @@ -0,0 +1,26 @@ +import cv2 + +def convert_video(video_path): + cap = cv2.VideoCapture(video_path) + width = cap.get(cv2.CAP_PROP_FRAME_WIDTH) # float + height = cap.get(cv2.CAP_PROP_FRAME_HEIGHT) # float + fps = cap.get(cv2.CAP_PROP_FPS) + video_name = video_path.split('/')[-1].split('.')[0] + save_name = video_name + '_converted' + save_path = video_path.replace(video_name, save_name) + vid_writer = cv2.VideoWriter( + save_path, cv2.VideoWriter_fourcc(*"mp4v"), fps, (int(width), int(height)) + ) + while True: + ret_val, frame = cap.read() + if ret_val: + vid_writer.write(frame) + ch = cv2.waitKey(1) + if ch == 27 or ch == ord("q") or ch == ord("Q"): + break + else: + break + +if __name__ == "__main__": + video_path = 'videos/palace.mp4' + convert_video(video_path) \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/tools/demo_track.py b/tracking/docker-build-context/byte_track/tools/demo_track.py new file mode 100644 index 0000000000000000000000000000000000000000..6ce9c6d185fb24fa7ad571d74ed24c2206b046ac --- /dev/null +++ b/tracking/docker-build-context/byte_track/tools/demo_track.py @@ -0,0 +1,357 @@ +from loguru import logger + +import cv2 + +import torch + +from yolox.data.data_augment import preproc +from yolox.exp import get_exp +from yolox.utils import fuse_model, get_model_info, postprocess, vis +from yolox.utils.visualize import plot_tracking +from yolox.tracker.byte_tracker import BYTETracker +from yolox.tracking_utils.timer import Timer + +import argparse +import os +import time + +IMAGE_EXT = [".jpg", ".jpeg", ".webp", ".bmp", ".png"] + + +def make_parser(): + parser = argparse.ArgumentParser("byte_track Demo!") + parser.add_argument( + "demo", default="image", help="demo type, eg. image, video and webcam" + ) + parser.add_argument("-expn", "--experiment-name", type=str, default=None) + parser.add_argument("-n", "--name", type=str, default=None, help="model name") + + parser.add_argument( + #"--path", default="./datasets/mot/train/MOT17-05-FRCNN/img1", help="path to images or video" + "--path", default="./videos/palace.mp4", help="path to images or video" + ) + parser.add_argument("--camid", type=int, default=0, help="webcam demo camera id") + parser.add_argument( + "--save_result", + action="store_true", + help="whether to save the inference result of image/video", + ) + + # exp file + parser.add_argument( + "-f", + "--exp_file", + default=None, + type=str, + help="pls input your expriment description file", + ) + parser.add_argument("-c", "--ckpt", default=None, type=str, help="ckpt for eval") + parser.add_argument( + "--device", + default="gpu", + type=str, + help="device to run our model, can either be cpu or gpu", + ) + parser.add_argument("--conf", default=None, type=float, help="test conf") + parser.add_argument("--nms", default=None, type=float, help="test nms threshold") + parser.add_argument("--tsize", default=None, type=int, help="test img size") + parser.add_argument( + "--fp16", + dest="fp16", + default=False, + action="store_true", + help="Adopting mix precision evaluating.", + ) + parser.add_argument( + "--fuse", + dest="fuse", + default=False, + action="store_true", + help="Fuse conv and bn for testing.", + ) + parser.add_argument( + "--trt", + dest="trt", + default=False, + action="store_true", + help="Using TensorRT model for testing.", + ) + # tracking args + parser.add_argument("--track_thresh", type=float, default=0.5, help="tracking confidence threshold") + parser.add_argument("--track_buffer", type=int, default=30, help="the frames for keep lost tracks") + parser.add_argument("--match_thresh", type=int, default=0.8, help="matching threshold for tracking") + parser.add_argument('--min-box-area', type=float, default=10, help='filter out tiny boxes') + parser.add_argument("--mot20", dest="mot20", default=False, action="store_true", help="test mot20.") + return parser + + +def get_image_list(path): + image_names = [] + for maindir, subdir, file_name_list in os.walk(path): + for filename in file_name_list: + apath = os.path.join(maindir, filename) + ext = os.path.splitext(apath)[1] + if ext in IMAGE_EXT: + image_names.append(apath) + return image_names + + +def write_results(filename, results): + save_format = '{frame},{id},{x1},{y1},{w},{h},{s},-1,-1,-1\n' + with open(filename, 'w') as f: + for frame_id, tlwhs, track_ids, scores in results: + for tlwh, track_id, score in zip(tlwhs, track_ids, scores): + if track_id < 0: + continue + x1, y1, w, h = tlwh + line = save_format.format(frame=frame_id, id=track_id, x1=round(x1, 1), y1=round(y1, 1), w=round(w, 1), h=round(h, 1), s=round(score, 2)) + f.write(line) + logger.info('save results to {}'.format(filename)) + + +class Predictor(object): + def __init__( + self, + model, + exp, + trt_file=None, + decoder=None, + device="cpu", + fp16=False + ): + self.model = model + self.decoder = decoder + self.num_classes = exp.num_classes + self.confthre = exp.test_conf + self.nmsthre = exp.nmsthre + self.test_size = exp.test_size + self.device = device + self.fp16 = fp16 + if trt_file is not None: + from torch2trt import TRTModule + + model_trt = TRTModule() + model_trt.load_state_dict(torch.load(trt_file)) + + x = torch.ones(1, 3, exp.test_size[0], exp.test_size[1]).cuda() + self.model(x) + self.model = model_trt + self.rgb_means = (0.485, 0.456, 0.406) + self.std = (0.229, 0.224, 0.225) + + def inference(self, img, timer): + img_info = {"id": 0} + if isinstance(img, str): + img_info["file_name"] = os.path.basename(img) + img = cv2.imread(img) + else: + img_info["file_name"] = None + + height, width = img.shape[:2] + img_info["height"] = height + img_info["width"] = width + img_info["raw_img"] = img + + img, ratio = preproc(img, self.test_size, self.rgb_means, self.std) + img_info["ratio"] = ratio + img = torch.from_numpy(img).unsqueeze(0) + img = img.float() + if self.device == "gpu": + img = img.cuda() + if self.fp16: + img = img.half() # to FP16 + + with torch.no_grad(): + timer.tic() + outputs = self.model(img) + if self.decoder is not None: + outputs = self.decoder(outputs, dtype=outputs.type()) + outputs = postprocess( + outputs, self.num_classes, self.confthre, self.nmsthre + ) + #logger.info("Infer time: {:.4f}s".format(time.time() - t0)) + return outputs, img_info + + +def image_demo(predictor, vis_folder, path, current_time, save_result): + if os.path.isdir(path): + files = get_image_list(path) + else: + files = [path] + files.sort() + tracker = BYTETracker(args, frame_rate=30) + timer = Timer() + frame_id = 0 + results = [] + for image_name in files: + if frame_id % 20 == 0: + logger.info('Processing frame {} ({:.2f} fps)'.format(frame_id, 1. / max(1e-5, timer.average_time))) + outputs, img_info = predictor.inference(image_name, timer) + if outputs[0] is not None: + online_targets = tracker.update(outputs[0], [img_info['height'], img_info['width']], exp.test_size) + online_tlwhs = [] + online_ids = [] + online_scores = [] + for t in online_targets: + tlwh = t.tlwh + tid = t.track_id + vertical = tlwh[2] / tlwh[3] > 1.6 + if tlwh[2] * tlwh[3] > args.min_box_area and not vertical: + online_tlwhs.append(tlwh) + online_ids.append(tid) + online_scores.append(t.score) + # save results + results.append((frame_id + 1, online_tlwhs, online_ids, online_scores)) + timer.toc() + online_im = plot_tracking(img_info['raw_img'], online_tlwhs, online_ids, frame_id=frame_id + 1, + fps=1. / timer.average_time) + else: + timer.toc() + online_im = img_info['raw_img'] + + #result_image = predictor.visual(outputs[0], img_info, predictor.confthre) + if save_result: + save_folder = os.path.join( + vis_folder, time.strftime("%Y_%m_%d_%H_%M_%S", current_time) + ) + os.makedirs(save_folder, exist_ok=True) + save_file_name = os.path.join(save_folder, os.path.basename(image_name)) + cv2.imwrite(save_file_name, online_im) + ch = cv2.waitKey(0) + frame_id += 1 + if ch == 27 or ch == ord("q") or ch == ord("Q"): + break + #write_results(result_filename, results) + + +def imageflow_demo(predictor, vis_folder, current_time, args): + cap = cv2.VideoCapture(args.path if args.demo == "video" else args.camid) + width = cap.get(cv2.CAP_PROP_FRAME_WIDTH) # float + height = cap.get(cv2.CAP_PROP_FRAME_HEIGHT) # float + fps = cap.get(cv2.CAP_PROP_FPS) + save_folder = os.path.join( + vis_folder, time.strftime("%Y_%m_%d_%H_%M_%S", current_time) + ) + os.makedirs(save_folder, exist_ok=True) + if args.demo == "video": + save_path = os.path.join(save_folder, args.path.split("/")[-1]) + else: + save_path = os.path.join(save_folder, "camera.mp4") + logger.info(f"video save_path is {save_path}") + vid_writer = cv2.VideoWriter( + save_path, cv2.VideoWriter_fourcc(*"mp4v"), fps, (int(width), int(height)) + ) + tracker = BYTETracker(args, frame_rate=30) + timer = Timer() + frame_id = 0 + results = [] + while True: + if frame_id % 20 == 0: + logger.info('Processing frame {} ({:.2f} fps)'.format(frame_id, 1. / max(1e-5, timer.average_time))) + ret_val, frame = cap.read() + if ret_val: + outputs, img_info = predictor.inference(frame, timer) + if outputs[0] is not None: + online_targets = tracker.update(outputs[0], [img_info['height'], img_info['width']], exp.test_size) + online_tlwhs = [] + online_ids = [] + online_scores = [] + for t in online_targets: + tlwh = t.tlwh + tid = t.track_id + vertical = tlwh[2] / tlwh[3] > 1.6 + if tlwh[2] * tlwh[3] > args.min_box_area and not vertical: + online_tlwhs.append(tlwh) + online_ids.append(tid) + online_scores.append(t.score) + results.append((frame_id + 1, online_tlwhs, online_ids, online_scores)) + timer.toc() + online_im = plot_tracking(img_info['raw_img'], online_tlwhs, online_ids, frame_id=frame_id + 1, + fps=1. / timer.average_time) + else: + timer.toc() + online_im = img_info['raw_img'] + if args.save_result: + vid_writer.write(online_im) + ch = cv2.waitKey(1) + if ch == 27 or ch == ord("q") or ch == ord("Q"): + break + else: + break + frame_id += 1 + + +def main(exp, args): + if not args.experiment_name: + args.experiment_name = exp.exp_name + + file_name = os.path.join(exp.output_dir, args.experiment_name) + os.makedirs(file_name, exist_ok=True) + + if args.save_result: + vis_folder = os.path.join(file_name, "track_vis") + os.makedirs(vis_folder, exist_ok=True) + + if args.trt: + args.device = "gpu" + + logger.info("Args: {}".format(args)) + + if args.conf is not None: + exp.test_conf = args.conf + if args.nms is not None: + exp.nmsthre = args.nms + if args.tsize is not None: + exp.test_size = (args.tsize, args.tsize) + + model = exp.get_model() + logger.info("Model Summary: {}".format(get_model_info(model, exp.test_size))) + + if args.device == "gpu": + model.cuda() + model.eval() + + if not args.trt: + if args.ckpt is None: + ckpt_file = os.path.join(file_name, "best_ckpt.pth.tar") + else: + ckpt_file = args.ckpt + logger.info("loading checkpoint") + ckpt = torch.load(ckpt_file, map_location="cpu") + # load the model state dict + model.load_state_dict(ckpt["model"]) + logger.info("loaded checkpoint done.") + + if args.fuse: + logger.info("\tFusing model...") + model = fuse_model(model) + + if args.fp16: + model = model.half() # to FP16 + + if args.trt: + assert not args.fuse, "TensorRT model is not support model fusing!" + trt_file = os.path.join(file_name, "model_trt.pth") + assert os.path.exists( + trt_file + ), "TensorRT model is not found!\n Run python3 tools/trt.py first!" + model.head.decode_in_inference = False + decoder = model.head.decode_outputs + logger.info("Using TensorRT to inference") + else: + trt_file = None + decoder = None + + predictor = Predictor(model, exp, trt_file, decoder, args.device, args.fp16) + current_time = time.localtime() + if args.demo == "image": + image_demo(predictor, vis_folder, args.path, current_time, args.save_result) + elif args.demo == "video" or args.demo == "webcam": + imageflow_demo(predictor, vis_folder, current_time, args) + + +if __name__ == "__main__": + args = make_parser().parse_args() + exp = get_exp(args.exp_file, args.name) + + main(exp, args) diff --git a/tracking/docker-build-context/byte_track/tools/export_onnx.py b/tracking/docker-build-context/byte_track/tools/export_onnx.py new file mode 100644 index 0000000000000000000000000000000000000000..71b16fe3b3bd4e4a2b1315d2c2185ad2195dc37b --- /dev/null +++ b/tracking/docker-build-context/byte_track/tools/export_onnx.py @@ -0,0 +1,102 @@ +from loguru import logger + +import torch +from torch import nn + +from yolox.exp import get_exp +from yolox.models.network_blocks import SiLU +from yolox.utils import replace_module + +import argparse +import os + + +def make_parser(): + parser = argparse.ArgumentParser("YOLOX onnx deploy") + parser.add_argument( + "--output-name", type=str, default="bytetrack_s.onnx", help="output name of models" + ) + parser.add_argument( + "--input", default="images", type=str, help="input node name of onnx model" + ) + parser.add_argument( + "--output", default="output", type=str, help="output node name of onnx model" + ) + parser.add_argument( + "-o", "--opset", default=11, type=int, help="onnx opset version" + ) + parser.add_argument("--no-onnxsim", action="store_true", help="use onnxsim or not") + parser.add_argument( + "-f", + "--exp_file", + default=None, + type=str, + help="expriment description file", + ) + parser.add_argument("-expn", "--experiment-name", type=str, default=None) + parser.add_argument("-n", "--name", type=str, default=None, help="model name") + parser.add_argument("-c", "--ckpt", default=None, type=str, help="ckpt path") + parser.add_argument( + "opts", + help="Modify config options using the command-line", + default=None, + nargs=argparse.REMAINDER, + ) + + return parser + + +@logger.catch +def main(): + args = make_parser().parse_args() + logger.info("args value: {}".format(args)) + exp = get_exp(args.exp_file, args.name) + exp.merge(args.opts) + + if not args.experiment_name: + args.experiment_name = exp.exp_name + + model = exp.get_model() + if args.ckpt is None: + file_name = os.path.join(exp.output_dir, args.experiment_name) + ckpt_file = os.path.join(file_name, "best_ckpt.pth.tar") + else: + ckpt_file = args.ckpt + + # load the model state dict + ckpt = torch.load(ckpt_file, map_location="cpu") + + model.eval() + if "model" in ckpt: + ckpt = ckpt["model"] + model.load_state_dict(ckpt) + model = replace_module(model, nn.SiLU, SiLU) + model.head.decode_in_inference = False + + logger.info("loading checkpoint done.") + dummy_input = torch.randn(1, 3, exp.test_size[0], exp.test_size[1]) + torch.onnx._export( + model, + dummy_input, + args.output_name, + input_names=[args.input], + output_names=[args.output], + opset_version=args.opset, + ) + logger.info("generated onnx model named {}".format(args.output_name)) + + if not args.no_onnxsim: + import onnx + + from onnxsim import simplify + + # use onnxsimplify to reduce reduent model. + onnx_model = onnx.load(args.output_name) + model_simp, check = simplify(onnx_model) + assert check, "Simplified ONNX model could not be validated" + onnx.save(model_simp, args.output_name) + logger.info("generated simplified onnx model named {}".format(args.output_name)) + + +if __name__ == "__main__": + main() diff --git a/tracking/docker-build-context/byte_track/tools/interpolation.py b/tracking/docker-build-context/byte_track/tools/interpolation.py new file mode 100644 index 0000000000000000000000000000000000000000..065bc48d0d80e87ad92d683ba26b03ccde03a9c6 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tools/interpolation.py @@ -0,0 +1,143 @@ +import numpy as np +import os +import glob +import motmetrics as mm + +from yolox.evaluators.evaluation import Evaluator + + +def mkdir_if_missing(d): + if not os.path.exists(d): + os.makedirs(d) + + +def eval_mota(data_root, txt_path): + accs = [] + seqs = sorted([s for s in os.listdir(data_root) if s.endswith('FRCNN')]) + #seqs = sorted([s for s in os.listdir(data_root)]) + for seq in seqs: + video_out_path = os.path.join(txt_path, seq + '.txt') + evaluator = Evaluator(data_root, seq, 'mot') + accs.append(evaluator.eval_file(video_out_path)) + metrics = mm.metrics.motchallenge_metrics + mh = mm.metrics.create() + summary = Evaluator.get_summary(accs, seqs, metrics) + strsummary = mm.io.render_summary( + summary, + formatters=mh.formatters, + namemap=mm.io.motchallenge_metric_names + ) + print(strsummary) + + +def get_mota(data_root, txt_path): + accs = [] + seqs = sorted([s for s in os.listdir(data_root) if s.endswith('FRCNN')]) + #seqs = sorted([s for s in os.listdir(data_root)]) + for seq in seqs: + video_out_path = os.path.join(txt_path, seq + '.txt') + evaluator = Evaluator(data_root, seq, 'mot') + accs.append(evaluator.eval_file(video_out_path)) + metrics = mm.metrics.motchallenge_metrics + mh = mm.metrics.create() + summary = Evaluator.get_summary(accs, seqs, metrics) + strsummary = mm.io.render_summary( + summary, + formatters=mh.formatters, + namemap=mm.io.motchallenge_metric_names + ) + mota = float(strsummary.split(' ')[-6][:-1]) + return mota + + +def write_results_score(filename, results): + save_format = '{frame},{id},{x1},{y1},{w},{h},{s},-1,-1,-1\n' + with open(filename, 'w') as f: + for i in range(results.shape[0]): + frame_data = results[i] + frame_id = int(frame_data[0]) + track_id = int(frame_data[1]) + x1, y1, w, h = frame_data[2:6] + score = frame_data[6] + line = save_format.format(frame=frame_id, id=track_id, x1=x1, y1=y1, w=w, h=h, s=-1) + f.write(line) + + +def dti(txt_path, save_path, n_min=25, n_dti=20): + seq_txts = sorted(glob.glob(os.path.join(txt_path, '*.txt'))) + for seq_txt in seq_txts: + seq_name = seq_txt.split('/')[-1] + seq_data = np.loadtxt(seq_txt, dtype=np.float64, delimiter=',') + min_id = int(np.min(seq_data[:, 1])) + max_id = int(np.max(seq_data[:, 1])) + seq_results = np.zeros((1, 10), dtype=np.float64) + for track_id in range(min_id, max_id + 1): + index = (seq_data[:, 1] == track_id) + tracklet = seq_data[index] + tracklet_dti = tracklet + if tracklet.shape[0] == 0: + continue + n_frame = tracklet.shape[0] + n_conf = np.sum(tracklet[:, 6] > 0.5) + if n_frame > n_min: + frames = tracklet[:, 0] + frames_dti = {} + for i in range(0, n_frame): + right_frame = frames[i] + if i > 0: + left_frame = frames[i - 1] + else: + left_frame = frames[i] + # disconnected track interpolation + if 1 < right_frame - left_frame < n_dti: + num_bi = int(right_frame - left_frame - 1) + right_bbox = tracklet[i, 2:6] + left_bbox = tracklet[i - 1, 2:6] + for j in range(1, num_bi + 1): + curr_frame = j + left_frame + curr_bbox = (curr_frame - left_frame) * (right_bbox - left_bbox) / \ + (right_frame - left_frame) + left_bbox + frames_dti[curr_frame] = curr_bbox + num_dti = len(frames_dti.keys()) + if num_dti > 0: + data_dti = np.zeros((num_dti, 10), dtype=np.float64) + for n in range(num_dti): + data_dti[n, 0] = list(frames_dti.keys())[n] + data_dti[n, 1] = track_id + data_dti[n, 2:6] = frames_dti[list(frames_dti.keys())[n]] + data_dti[n, 6:] = [1, -1, -1, -1] + tracklet_dti = np.vstack((tracklet, data_dti)) + seq_results = np.vstack((seq_results, tracklet_dti)) + save_seq_txt = os.path.join(save_path, seq_name) + seq_results = seq_results[1:] + seq_results = seq_results[seq_results[:, 0].argsort()] + write_results_score(save_seq_txt, seq_results) + + +if __name__ == '__main__': + data_root = '/opt/tiger/demo/byte_track/datasets/mot/test' + txt_path = '/opt/tiger/demo/byte_track/YOLOX_outputs/yolox_x_mix_det/track_results' + save_path = '/opt/tiger/demo/byte_track/YOLOX_outputs/yolox_x_mix_det/track_results_dti' + + mkdir_if_missing(save_path) + dti(txt_path, save_path, n_min=5, n_dti=20) + print('Before DTI: ') + eval_mota(data_root, txt_path) + print('After DTI:') + eval_mota(data_root, save_path) + + ''' + mota_best = 0.0 + best_n_min = 0 + best_n_dti = 0 + for n_min in range(5, 50, 5): + for n_dti in range(5, 30, 5): + dti(txt_path, save_path, n_min, n_dti) + mota = get_mota(data_root, save_path) + if mota > mota_best: + mota_best = mota + best_n_min = n_min + best_n_dti = n_dti + print(mota_best, best_n_min, best_n_dti) + print(mota_best, best_n_min, best_n_dti) + ''' diff --git a/tracking/docker-build-context/byte_track/tools/mix_data_ablation.py b/tracking/docker-build-context/byte_track/tools/mix_data_ablation.py new file mode 100644 index 0000000000000000000000000000000000000000..b830c691ce52756aac2a8569829297b37ec5147d --- /dev/null +++ b/tracking/docker-build-context/byte_track/tools/mix_data_ablation.py @@ -0,0 +1,93 @@ +import json +import os + + +""" +cd datasets +mkdir -p mix_mot_ch/annotations +cp mot/annotations/val_half.json mix_mot_ch/annotations/val_half.json +cp mot/annotations/test.json mix_mot_ch/annotations/test.json +cd mix_mot_ch +ln -s ../mot/train mot_train +ln -s ../crowdhuman/CrowdHuman_train crowdhuman_train +ln -s ../crowdhuman/CrowdHuman_val crowdhuman_val +cd .. +""" + +mot_json = json.load(open('datasets/mot/annotations/train_half.json','r')) + +img_list = list() +for img in mot_json['images']: + img['file_name'] = 'mot_train/' + img['file_name'] + img_list.append(img) + +ann_list = list() +for ann in mot_json['annotations']: + ann_list.append(ann) + +video_list = mot_json['videos'] +category_list = mot_json['categories'] + +print('mot17') + +max_img = 10000 +max_ann = 2000000 +max_video = 10 + +crowdhuman_json = json.load(open('datasets/crowdhuman/annotations/train.json','r')) +img_id_count = 0 +for img in crowdhuman_json['images']: + img_id_count += 1 + img['file_name'] = 'crowdhuman_train/' + img['file_name'] + img['frame_id'] = img_id_count + img['prev_image_id'] = img['id'] + max_img + img['next_image_id'] = img['id'] + max_img + img['id'] = img['id'] + max_img + img['video_id'] = max_video + img_list.append(img) + +for ann in crowdhuman_json['annotations']: + ann['id'] = ann['id'] + max_ann + ann['image_id'] = ann['image_id'] + max_img + ann_list.append(ann) + +video_list.append({ + 'id': max_video, + 'file_name': 'crowdhuman_train' +}) + +print('crowdhuman_train') + +max_img = 30000 +max_ann = 10000000 + +crowdhuman_val_json = json.load(open('datasets/crowdhuman/annotations/val.json','r')) +img_id_count = 0 +for img in crowdhuman_val_json['images']: + img_id_count += 1 + img['file_name'] = 'crowdhuman_val/' + img['file_name'] + img['frame_id'] = img_id_count + img['prev_image_id'] = img['id'] + max_img + img['next_image_id'] = img['id'] + max_img + img['id'] = img['id'] + max_img + img['video_id'] = max_video + img_list.append(img) + +for ann in crowdhuman_val_json['annotations']: + ann['id'] = ann['id'] + max_ann + ann['image_id'] = ann['image_id'] + max_img + ann_list.append(ann) + +video_list.append({ + 'id': max_video, + 'file_name': 'crowdhuman_val' +}) + +print('crowdhuman_val') + +mix_json = dict() +mix_json['images'] = img_list +mix_json['annotations'] = ann_list +mix_json['videos'] = video_list +mix_json['categories'] = category_list +json.dump(mix_json, open('datasets/mix_mot_ch/annotations/train.json','w')) \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/tools/mix_data_test_mot17.py b/tracking/docker-build-context/byte_track/tools/mix_data_test_mot17.py new file mode 100644 index 0000000000000000000000000000000000000000..b0848db812dfe63e631dd8e35a401d7dbaecd767 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tools/mix_data_test_mot17.py @@ -0,0 +1,151 @@ +import json +import os + + +""" +cd datasets +mkdir -p mix_det/annotations +cp mot/annotations/val_half.json mix_det/annotations/val_half.json +cp mot/annotations/test.json mix_det/annotations/test.json +cd mix_det +ln -s ../mot/train mot_train +ln -s ../crowdhuman/CrowdHuman_train crowdhuman_train +ln -s ../crowdhuman/CrowdHuman_val crowdhuman_val +ln -s ../Cityscapes cp_train +ln -s ../ETHZ ethz_train +cd .. +""" + +mot_json = json.load(open('datasets/mot/annotations/train_half.json','r')) + +img_list = list() +for img in mot_json['images']: + img['file_name'] = 'mot_train/' + img['file_name'] + img_list.append(img) + +ann_list = list() +for ann in mot_json['annotations']: + ann_list.append(ann) + +video_list = mot_json['videos'] +category_list = mot_json['categories'] + + +print('mot17') + +max_img = 10000 +max_ann = 2000000 +max_video = 10 + +crowdhuman_json = json.load(open('datasets/crowdhuman/annotations/train.json','r')) +img_id_count = 0 +for img in crowdhuman_json['images']: + img_id_count += 1 + img['file_name'] = 'crowdhuman_train/' + img['file_name'] + img['frame_id'] = img_id_count + img['prev_image_id'] = img['id'] + max_img + img['next_image_id'] = img['id'] + max_img + img['id'] = img['id'] + max_img + img['video_id'] = max_video + img_list.append(img) + +for ann in crowdhuman_json['annotations']: + ann['id'] = ann['id'] + max_ann + ann['image_id'] = ann['image_id'] + max_img + ann_list.append(ann) + +print('crowdhuman_train') + +video_list.append({ + 'id': max_video, + 'file_name': 'crowdhuman_train' +}) + + +max_img = 30000 +max_ann = 10000000 + +crowdhuman_val_json = json.load(open('datasets/crowdhuman/annotations/val.json','r')) +img_id_count = 0 +for img in crowdhuman_val_json['images']: + img_id_count += 1 + img['file_name'] = 'crowdhuman_val/' + img['file_name'] + img['frame_id'] = img_id_count + img['prev_image_id'] = img['id'] + max_img + img['next_image_id'] = img['id'] + max_img + img['id'] = img['id'] + max_img + img['video_id'] = max_video + img_list.append(img) + +for ann in crowdhuman_val_json['annotations']: + ann['id'] = ann['id'] + max_ann + ann['image_id'] = ann['image_id'] + max_img + ann_list.append(ann) + +print('crowdhuman_val') + +video_list.append({ + 'id': max_video, + 'file_name': 'crowdhuman_val' +}) + +max_img = 40000 +max_ann = 20000000 + +ethz_json = json.load(open('datasets/ETHZ/annotations/train.json','r')) +img_id_count = 0 +for img in ethz_json['images']: + img_id_count += 1 + img['file_name'] = 'ethz_train/' + img['file_name'][5:] + img['frame_id'] = img_id_count + img['prev_image_id'] = img['id'] + max_img + img['next_image_id'] = img['id'] + max_img + img['id'] = img['id'] + max_img + img['video_id'] = max_video + img_list.append(img) + +for ann in ethz_json['annotations']: + ann['id'] = ann['id'] + max_ann + ann['image_id'] = ann['image_id'] + max_img + ann_list.append(ann) + +print('ETHZ') + +video_list.append({ + 'id': max_video, + 'file_name': 'ethz' +}) + +max_img = 50000 +max_ann = 25000000 + +cp_json = json.load(open('datasets/Cityscapes/annotations/train.json','r')) +img_id_count = 0 +for img in cp_json['images']: + img_id_count += 1 + img['file_name'] = 'cp_train/' + img['file_name'][11:] + img['frame_id'] = img_id_count + img['prev_image_id'] = img['id'] + max_img + img['next_image_id'] = img['id'] + max_img + img['id'] = img['id'] + max_img + img['video_id'] = max_video + img_list.append(img) + +for ann in cp_json['annotations']: + ann['id'] = ann['id'] + max_ann + ann['image_id'] = ann['image_id'] + max_img + ann_list.append(ann) + +print('Cityscapes') + +video_list.append({ + 'id': max_video, + 'file_name': 'cityperson' +}) + +mix_json = dict() +mix_json['images'] = img_list +mix_json['annotations'] = ann_list +mix_json['videos'] = video_list +mix_json['categories'] = category_list +json.dump(mix_json, open('datasets/mix_det/annotations/train.json','w')) \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/tools/mix_data_test_mot20.py b/tracking/docker-build-context/byte_track/tools/mix_data_test_mot20.py new file mode 100644 index 0000000000000000000000000000000000000000..e7bbafc2156dfc53536f547ed17e744f7cc0513e --- /dev/null +++ b/tracking/docker-build-context/byte_track/tools/mix_data_test_mot20.py @@ -0,0 +1,91 @@ +import json +import os + + +""" +cd datasets +mkdir -p mix_mot20_ch/annotations +cp MOT20/annotations/val_half.json mix_mot20_ch/annotations/val_half.json +cp MOT20/annotations/test.json mix_mot20_ch/annotations/test.json +cd mix_mot20_ch +ln -s ../MOT20/train mot20_train +ln -s ../crowdhuman/CrowdHuman_train crowdhuman_train +ln -s ../crowdhuman/CrowdHuman_val crowdhuman_val +cd .. +""" + +mot_json = json.load(open('datasets/MOT20/annotations/train.json','r')) + +img_list = list() +for img in mot_json['images']: + img['file_name'] = 'mot20_train/' + img['file_name'] + img_list.append(img) + +ann_list = list() +for ann in mot_json['annotations']: + ann_list.append(ann) + +video_list = mot_json['videos'] +category_list = mot_json['categories'] + + + + +max_img = 10000 +max_ann = 2000000 +max_video = 10 + +crowdhuman_json = json.load(open('datasets/crowdhuman/annotations/train.json','r')) +img_id_count = 0 +for img in crowdhuman_json['images']: + img_id_count += 1 + img['file_name'] = 'crowdhuman_train/' + img['file_name'] + img['frame_id'] = img_id_count + img['prev_image_id'] = img['id'] + max_img + img['next_image_id'] = img['id'] + max_img + img['id'] = img['id'] + max_img + img['video_id'] = max_video + img_list.append(img) + +for ann in crowdhuman_json['annotations']: + ann['id'] = ann['id'] + max_ann + ann['image_id'] = ann['image_id'] + max_img + ann_list.append(ann) + +video_list.append({ + 'id': max_video, + 'file_name': 'crowdhuman_train' +}) + + +max_img = 30000 +max_ann = 10000000 + +crowdhuman_val_json = json.load(open('datasets/crowdhuman/annotations/val.json','r')) +img_id_count = 0 +for img in crowdhuman_val_json['images']: + img_id_count += 1 + img['file_name'] = 'crowdhuman_val/' + img['file_name'] + img['frame_id'] = img_id_count + img['prev_image_id'] = img['id'] + max_img + img['next_image_id'] = img['id'] + max_img + img['id'] = img['id'] + max_img + img['video_id'] = max_video + img_list.append(img) + +for ann in crowdhuman_val_json['annotations']: + ann['id'] = ann['id'] + max_ann + ann['image_id'] = ann['image_id'] + max_img + ann_list.append(ann) + +video_list.append({ + 'id': max_video, + 'file_name': 'crowdhuman_val' +}) + +mix_json = dict() +mix_json['images'] = img_list +mix_json['annotations'] = ann_list +mix_json['videos'] = video_list +mix_json['categories'] = category_list +json.dump(mix_json, open('datasets/mix_mot20_ch/annotations/train.json','w')) \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/tools/mota.py b/tracking/docker-build-context/byte_track/tools/mota.py new file mode 100644 index 0000000000000000000000000000000000000000..29608a91999680e20d003c8443afc4ba35e9196a --- /dev/null +++ b/tracking/docker-build-context/byte_track/tools/mota.py @@ -0,0 +1,84 @@ +from loguru import logger + +import torch +import torch.backends.cudnn as cudnn +from torch.nn.parallel import DistributedDataParallel as DDP + +from yolox.core import launch +from yolox.exp import get_exp +from yolox.utils import configure_nccl, fuse_model, get_local_rank, get_model_info, setup_logger +from yolox.evaluators import MOTEvaluator + +import argparse +import os +import random +import warnings +import glob +import motmetrics as mm +from collections import OrderedDict +from pathlib import Path + + +def compare_dataframes(gts, ts): + accs = [] + names = [] + for k, tsacc in ts.items(): + if k in gts: + logger.info('Comparing {}...'.format(k)) + accs.append(mm.utils.compare_to_groundtruth(gts[k], tsacc, 'iou', distth=0.5)) + names.append(k) + else: + logger.warning('No ground truth for {}, skipping.'.format(k)) + + return accs, names + + +# evaluate MOTA +results_folder = 'YOLOX_outputs/yolox_x_ablation/track_results' +mm.lap.default_solver = 'lap' + +gt_type = '_val_half' +#gt_type = '' +print('gt_type', gt_type) +gtfiles = glob.glob( + os.path.join('datasets/mot/train', '*/gt/gt{}.txt'.format(gt_type))) +print('gt_files', gtfiles) +tsfiles = [f for f in glob.glob(os.path.join(results_folder, '*.txt')) if not os.path.basename(f).startswith('eval')] + +logger.info('Found {} groundtruths and {} test files.'.format(len(gtfiles), len(tsfiles))) +logger.info('Available LAP solvers {}'.format(mm.lap.available_solvers)) +logger.info('Default LAP solver \'{}\''.format(mm.lap.default_solver)) +logger.info('Loading files.') + +gt = OrderedDict([(Path(f).parts[-3], mm.io.loadtxt(f, fmt='mot15-2D', min_confidence=1)) for f in gtfiles]) +ts = OrderedDict([(os.path.splitext(Path(f).parts[-1])[0], mm.io.loadtxt(f, fmt='mot15-2D', min_confidence=0.6)) for f in tsfiles]) + +mh = mm.metrics.create() +accs, names = compare_dataframes(gt, ts) + +logger.info('Running metrics') +metrics = ['recall', 'precision', 'num_unique_objects', 'mostly_tracked', + 'partially_tracked', 'mostly_lost', 'num_false_positives', 'num_misses', + 'num_switches', 'num_fragmentations', 'mota', 'motp', 'num_objects'] +summary = mh.compute_many(accs, names=names, metrics=metrics, generate_overall=True) +# summary = mh.compute_many(accs, names=names, metrics=mm.metrics.motchallenge_metrics, generate_overall=True) +# print(mm.io.render_summary( +# summary, formatters=mh.formatters, +# namemap=mm.io.motchallenge_metric_names)) +div_dict = { + 'num_objects': ['num_false_positives', 'num_misses', 'num_switches', 'num_fragmentations'], + 'num_unique_objects': ['mostly_tracked', 'partially_tracked', 'mostly_lost']} +for divisor in div_dict: + for divided in div_dict[divisor]: + summary[divided] = (summary[divided] / summary[divisor]) +fmt = mh.formatters +change_fmt_list = ['num_false_positives', 'num_misses', 'num_switches', 'num_fragmentations', 'mostly_tracked', + 'partially_tracked', 'mostly_lost'] +for k in change_fmt_list: + fmt[k] = fmt['mota'] +print(mm.io.render_summary(summary, formatters=fmt, namemap=mm.io.motchallenge_metric_names)) + +metrics = mm.metrics.motchallenge_metrics + ['num_objects'] +summary = mh.compute_many(accs, names=names, metrics=metrics, generate_overall=True) +print(mm.io.render_summary(summary, formatters=mh.formatters, namemap=mm.io.motchallenge_metric_names)) +logger.info('Completed') \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/tools/track.py b/tracking/docker-build-context/byte_track/tools/track.py new file mode 100644 index 0000000000000000000000000000000000000000..ee7769a543b417f84968301153e8d6d0d2d659a0 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tools/track.py @@ -0,0 +1,293 @@ +from loguru import logger + +import torch +import torch.backends.cudnn as cudnn +from torch.nn.parallel import DistributedDataParallel as DDP + +from yolox.core import launch +from yolox.exp import get_exp +from yolox.utils import configure_nccl, fuse_model, get_local_rank, get_model_info, setup_logger +from yolox.evaluators import MOTEvaluator + +import argparse +import os +import random +import warnings +import glob +import motmetrics as mm +from collections import OrderedDict +from pathlib import Path + + +def make_parser(): + parser = argparse.ArgumentParser("YOLOX Eval") + parser.add_argument("-expn", "--experiment-name", type=str, default=None) + parser.add_argument("-n", "--name", type=str, default=None, help="model name") + + # distributed + parser.add_argument( + "--dist-backend", default="nccl", type=str, help="distributed backend" + ) + parser.add_argument( + "--dist-url", + default=None, + type=str, + help="url used to set up distributed training", + ) + parser.add_argument("-b", "--batch-size", type=int, default=64, help="batch size") + parser.add_argument( + "-d", "--devices", default=None, type=int, help="device for training" + ) + parser.add_argument( + "--local_rank", default=0, type=int, help="local rank for dist training" + ) + parser.add_argument( + "--num_machines", default=1, type=int, help="num of node for training" + ) + parser.add_argument( + "--machine_rank", default=0, type=int, help="node rank for multi-node training" + ) + parser.add_argument( + "-f", + "--exp_file", + default=None, + type=str, + help="pls input your expriment description file", + ) + parser.add_argument( + "--fp16", + dest="fp16", + default=False, + action="store_true", + help="Adopting mix precision evaluating.", + ) + parser.add_argument( + "--fuse", + dest="fuse", + default=False, + action="store_true", + help="Fuse conv and bn for testing.", + ) + parser.add_argument( + "--trt", + dest="trt", + default=False, + action="store_true", + help="Using TensorRT model for testing.", + ) + parser.add_argument( + "--test", + dest="test", + default=False, + action="store_true", + help="Evaluating on test-dev set.", + ) + parser.add_argument( + "--speed", + dest="speed", + default=False, + action="store_true", + help="speed test only.", + ) + parser.add_argument( + "opts", + help="Modify config options using the command-line", + default=None, + nargs=argparse.REMAINDER, + ) + # det args + parser.add_argument("-c", "--ckpt", default=None, type=str, help="ckpt for eval") + parser.add_argument("--conf", default=0.01, type=float, help="test conf") + parser.add_argument("--nms", default=0.7, type=float, help="test nms threshold") + parser.add_argument("--tsize", default=None, type=int, help="test img size") + parser.add_argument("--seed", default=None, type=int, help="eval seed") + # tracking args + parser.add_argument("--track_thresh", type=float, default=0.6, help="tracking confidence threshold") + parser.add_argument("--track_buffer", type=int, default=30, help="the frames for keep lost tracks") + parser.add_argument("--match_thresh", type=float, default=0.9, help="matching threshold for tracking") + parser.add_argument("--min-box-area", type=float, default=100, help='filter out tiny boxes') + parser.add_argument("--mot20", dest="mot20", default=False, action="store_true", help="test mot20.") + return parser + + +def compare_dataframes(gts, ts): + accs = [] + names = [] + for k, tsacc in ts.items(): + if k in gts: + logger.info('Comparing {}...'.format(k)) + accs.append(mm.utils.compare_to_groundtruth(gts[k], tsacc, 'iou', distth=0.5)) + names.append(k) + else: + logger.warning('No ground truth for {}, skipping.'.format(k)) + + return accs, names + + +@logger.catch +def main(exp, args, num_gpu): + if args.seed is not None: + random.seed(args.seed) + torch.manual_seed(args.seed) + cudnn.deterministic = True + warnings.warn( + "You have chosen to seed testing. This will turn on the CUDNN deterministic setting, " + ) + + is_distributed = num_gpu > 1 + + # set environment variables for distributed training + cudnn.benchmark = True + + rank = args.local_rank + # rank = get_local_rank() + + file_name = os.path.join(exp.output_dir, args.experiment_name) + + if rank == 0: + os.makedirs(file_name, exist_ok=True) + + results_folder = os.path.join(file_name, "track_results") + os.makedirs(results_folder, exist_ok=True) + + setup_logger(file_name, distributed_rank=rank, filename="val_log.txt", mode="a") + logger.info("Args: {}".format(args)) + + if args.conf is not None: + exp.test_conf = args.conf + if args.nms is not None: + exp.nmsthre = args.nms + if args.tsize is not None: + exp.test_size = (args.tsize, args.tsize) + + model = exp.get_model() + logger.info("Model Summary: {}".format(get_model_info(model, exp.test_size))) + #logger.info("Model Structure:\n{}".format(str(model))) + + val_loader = exp.get_eval_loader(args.batch_size, is_distributed, args.test) + evaluator = MOTEvaluator( + args=args, + dataloader=val_loader, + img_size=exp.test_size, + confthre=exp.test_conf, + nmsthre=exp.nmsthre, + num_classes=exp.num_classes, + ) + + torch.cuda.set_device(rank) + model.cuda(rank) + model.eval() + + if not args.speed and not args.trt: + if args.ckpt is None: + ckpt_file = os.path.join(file_name, "best_ckpt.pth.tar") + else: + ckpt_file = args.ckpt + logger.info("loading checkpoint") + loc = "cuda:{}".format(rank) + ckpt = torch.load(ckpt_file, map_location=loc) + # load the model state dict + model.load_state_dict(ckpt["model"]) + logger.info("loaded checkpoint done.") + + if is_distributed: + model = DDP(model, device_ids=[rank]) + + if args.fuse: + logger.info("\tFusing model...") + model = fuse_model(model) + + if args.trt: + assert ( + not args.fuse and not is_distributed and args.batch_size == 1 + ), "TensorRT model is not support model fusing and distributed inferencing!" + trt_file = os.path.join(file_name, "model_trt.pth") + assert os.path.exists( + trt_file + ), "TensorRT model is not found!\n Run tools/trt.py first!" + model.head.decode_in_inference = False + decoder = model.head.decode_outputs + else: + trt_file = None + decoder = None + + # start evaluate + *_, summary = evaluator.evaluate( + model, is_distributed, args.fp16, trt_file, decoder, exp.test_size, results_folder + ) + logger.info("\n" + summary) + + # evaluate MOTA + mm.lap.default_solver = 'lap' + + if exp.val_ann == 'val_half.json': + gt_type = '_val_half' + else: + gt_type = '' + print('gt_type', gt_type) + if args.mot20: + gtfiles = glob.glob(os.path.join('datasets/MOT20/train', '*/gt/gt{}.txt'.format(gt_type))) + else: + gtfiles = glob.glob(os.path.join('datasets/mot/train', '*/gt/gt{}.txt'.format(gt_type))) + print('gt_files', gtfiles) + tsfiles = [f for f in glob.glob(os.path.join(results_folder, '*.txt')) if not os.path.basename(f).startswith('eval')] + + logger.info('Found {} groundtruths and {} test files.'.format(len(gtfiles), len(tsfiles))) + logger.info('Available LAP solvers {}'.format(mm.lap.available_solvers)) + logger.info('Default LAP solver \'{}\''.format(mm.lap.default_solver)) + logger.info('Loading files.') + + gt = OrderedDict([(Path(f).parts[-3], mm.io.loadtxt(f, fmt='mot15-2D', min_confidence=1)) for f in gtfiles]) + ts = OrderedDict([(os.path.splitext(Path(f).parts[-1])[0], mm.io.loadtxt(f, fmt='mot15-2D', min_confidence=-1)) for f in tsfiles]) + + mh = mm.metrics.create() + accs, names = compare_dataframes(gt, ts) + + logger.info('Running metrics') + metrics = ['recall', 'precision', 'num_unique_objects', 'mostly_tracked', + 'partially_tracked', 'mostly_lost', 'num_false_positives', 'num_misses', + 'num_switches', 'num_fragmentations', 'mota', 'motp', 'num_objects'] + summary = mh.compute_many(accs, names=names, metrics=metrics, generate_overall=True) + # summary = mh.compute_many(accs, names=names, metrics=mm.metrics.motchallenge_metrics, generate_overall=True) + # print(mm.io.render_summary( + # summary, formatters=mh.formatters, + # namemap=mm.io.motchallenge_metric_names)) + div_dict = { + 'num_objects': ['num_false_positives', 'num_misses', 'num_switches', 'num_fragmentations'], + 'num_unique_objects': ['mostly_tracked', 'partially_tracked', 'mostly_lost']} + for divisor in div_dict: + for divided in div_dict[divisor]: + summary[divided] = (summary[divided] / summary[divisor]) + fmt = mh.formatters + change_fmt_list = ['num_false_positives', 'num_misses', 'num_switches', 'num_fragmentations', 'mostly_tracked', + 'partially_tracked', 'mostly_lost'] + for k in change_fmt_list: + fmt[k] = fmt['mota'] + print(mm.io.render_summary(summary, formatters=fmt, namemap=mm.io.motchallenge_metric_names)) + + metrics = mm.metrics.motchallenge_metrics + ['num_objects'] + summary = mh.compute_many(accs, names=names, metrics=metrics, generate_overall=True) + print(mm.io.render_summary(summary, formatters=mh.formatters, namemap=mm.io.motchallenge_metric_names)) + logger.info('Completed') + + +if __name__ == "__main__": + args = make_parser().parse_args() + exp = get_exp(args.exp_file, args.name) + exp.merge(args.opts) + + if not args.experiment_name: + args.experiment_name = exp.exp_name + + num_gpu = torch.cuda.device_count() if args.devices is None else args.devices + assert num_gpu <= torch.cuda.device_count() + + launch( + main, + num_gpu, + args.num_machines, + args.machine_rank, + backend=args.dist_backend, + dist_url=args.dist_url, + args=(exp, args, num_gpu), + ) diff --git a/tracking/docker-build-context/byte_track/tools/track_deepsort.py b/tracking/docker-build-context/byte_track/tools/track_deepsort.py new file mode 100644 index 0000000000000000000000000000000000000000..b59c423a1bfd48b1342d5a469e363d2c010b9ff0 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tools/track_deepsort.py @@ -0,0 +1,293 @@ +from loguru import logger + +import torch +import torch.backends.cudnn as cudnn +from torch.nn.parallel import DistributedDataParallel as DDP + +from yolox.core import launch +from yolox.exp import get_exp +from yolox.utils import configure_nccl, fuse_model, get_local_rank, get_model_info, setup_logger +from yolox.evaluators import MOTEvaluator + +import argparse +import os +import random +import warnings +import glob +import motmetrics as mm +from collections import OrderedDict +from pathlib import Path + + +def make_parser(): + parser = argparse.ArgumentParser("YOLOX Eval") + parser.add_argument("-expn", "--experiment-name", type=str, default=None) + parser.add_argument("-n", "--name", type=str, default=None, help="model name") + + # distributed + parser.add_argument( + "--dist-backend", default="nccl", type=str, help="distributed backend" + ) + parser.add_argument( + "--dist-url", + default=None, + type=str, + help="url used to set up distributed training", + ) + parser.add_argument("-b", "--batch-size", type=int, default=64, help="batch size") + parser.add_argument( + "-d", "--devices", default=None, type=int, help="device for training" + ) + parser.add_argument( + "--local_rank", default=0, type=int, help="local rank for dist training" + ) + parser.add_argument( + "--num_machines", default=1, type=int, help="num of node for training" + ) + parser.add_argument( + "--machine_rank", default=0, type=int, help="node rank for multi-node training" + ) + parser.add_argument( + "-f", + "--exp_file", + default=None, + type=str, + help="pls input your expriment description file", + ) + parser.add_argument( + "--fp16", + dest="fp16", + default=False, + action="store_true", + help="Adopting mix precision evaluating.", + ) + parser.add_argument( + "--fuse", + dest="fuse", + default=False, + action="store_true", + help="Fuse conv and bn for testing.", + ) + parser.add_argument( + "--trt", + dest="trt", + default=False, + action="store_true", + help="Using TensorRT model for testing.", + ) + parser.add_argument( + "--test", + dest="test", + default=False, + action="store_true", + help="Evaluating on test-dev set.", + ) + parser.add_argument( + "--speed", + dest="speed", + default=False, + action="store_true", + help="speed test only.", + ) + parser.add_argument( + "opts", + help="Modify config options using the command-line", + default=None, + nargs=argparse.REMAINDER, + ) + # det args + parser.add_argument("-c", "--ckpt", default=None, type=str, help="ckpt for eval") + parser.add_argument("--conf", default=0.1, type=float, help="test conf") + parser.add_argument("--nms", default=0.7, type=float, help="test nms threshold") + parser.add_argument("--tsize", default=None, type=int, help="test img size") + parser.add_argument("--seed", default=None, type=int, help="eval seed") + # tracking args + parser.add_argument("--track_thresh", type=float, default=0.5, help="tracking confidence threshold") + parser.add_argument("--track_buffer", type=int, default=30, help="the frames for keep lost tracks") + parser.add_argument("--match_thresh", type=int, default=0.9, help="matching threshold for tracking") + parser.add_argument('--min-box-area', type=float, default=100, help='filter out tiny boxes') + # deepsort args + parser.add_argument("--model_folder", type=str, default='pretrained/ckpt.t7', help="reid model folder") + return parser + + +def compare_dataframes(gts, ts): + accs = [] + names = [] + for k, tsacc in ts.items(): + if k in gts: + logger.info('Comparing {}...'.format(k)) + accs.append(mm.utils.compare_to_groundtruth(gts[k], tsacc, 'iou', distth=0.5)) + names.append(k) + else: + logger.warning('No ground truth for {}, skipping.'.format(k)) + + return accs, names + + +@logger.catch +def main(exp, args, num_gpu): + if args.seed is not None: + random.seed(args.seed) + torch.manual_seed(args.seed) + cudnn.deterministic = True + warnings.warn( + "You have chosen to seed testing. This will turn on the CUDNN deterministic setting, " + ) + + is_distributed = num_gpu > 1 + + # set environment variables for distributed training + cudnn.benchmark = True + + rank = args.local_rank + # rank = get_local_rank() + + file_name = os.path.join(exp.output_dir, args.experiment_name) + + if rank == 0: + os.makedirs(file_name, exist_ok=True) + + results_folder = os.path.join(file_name, "track_results_deepsort") + os.makedirs(results_folder, exist_ok=True) + model_folder = args.model_folder + + setup_logger(file_name, distributed_rank=rank, filename="val_log.txt", mode="a") + logger.info("Args: {}".format(args)) + + if args.conf is not None: + exp.test_conf = args.conf + if args.nms is not None: + exp.nmsthre = args.nms + if args.tsize is not None: + exp.test_size = (args.tsize, args.tsize) + + model = exp.get_model() + logger.info("Model Summary: {}".format(get_model_info(model, exp.test_size))) + #logger.info("Model Structure:\n{}".format(str(model))) + + #evaluator = exp.get_evaluator(args.batch_size, is_distributed, args.test) + + val_loader = exp.get_eval_loader(args.batch_size, is_distributed, args.test) + evaluator = MOTEvaluator( + args=args, + dataloader=val_loader, + img_size=exp.test_size, + confthre=exp.test_conf, + nmsthre=exp.nmsthre, + num_classes=exp.num_classes, + ) + + torch.cuda.set_device(rank) + model.cuda(rank) + model.eval() + + if not args.speed and not args.trt: + if args.ckpt is None: + ckpt_file = os.path.join(file_name, "best_ckpt.pth.tar") + else: + ckpt_file = args.ckpt + logger.info("loading checkpoint") + loc = "cuda:{}".format(rank) + ckpt = torch.load(ckpt_file, map_location=loc) + # load the model state dict + model.load_state_dict(ckpt["model"]) + logger.info("loaded checkpoint done.") + + if is_distributed: + model = DDP(model, device_ids=[rank]) + + if args.fuse: + logger.info("\tFusing model...") + model = fuse_model(model) + + if args.trt: + assert ( + not args.fuse and not is_distributed and args.batch_size == 1 + ), "TensorRT model is not support model fusing and distributed inferencing!" + trt_file = os.path.join(file_name, "model_trt.pth") + assert os.path.exists( + trt_file + ), "TensorRT model is not found!\n Run tools/trt.py first!" + model.head.decode_in_inference = False + decoder = model.head.decode_outputs + else: + trt_file = None + decoder = None + + # start evaluate + *_, summary = evaluator.evaluate_deepsort( + model, is_distributed, args.fp16, trt_file, decoder, exp.test_size, results_folder, model_folder + ) + logger.info("\n" + summary) + + # evaluate MOTA + mm.lap.default_solver = 'lap' + + gt_type = '_val_half' + #gt_type = '' + print('gt_type', gt_type) + gtfiles = glob.glob( + os.path.join('datasets/mot/train', '*/gt/gt{}.txt'.format(gt_type))) + print('gt_files', gtfiles) + tsfiles = [f for f in glob.glob(os.path.join(results_folder, '*.txt')) if not os.path.basename(f).startswith('eval')] + + logger.info('Found {} groundtruths and {} test files.'.format(len(gtfiles), len(tsfiles))) + logger.info('Available LAP solvers {}'.format(mm.lap.available_solvers)) + logger.info('Default LAP solver \'{}\''.format(mm.lap.default_solver)) + logger.info('Loading files.') + + gt = OrderedDict([(Path(f).parts[-3], mm.io.loadtxt(f, fmt='mot15-2D', min_confidence=1)) for f in gtfiles]) + ts = OrderedDict([(os.path.splitext(Path(f).parts[-1])[0], mm.io.loadtxt(f, fmt='mot15-2D', min_confidence=-1)) for f in tsfiles]) + + mh = mm.metrics.create() + accs, names = compare_dataframes(gt, ts) + + logger.info('Running metrics') + metrics = ['recall', 'precision', 'num_unique_objects', 'mostly_tracked', + 'partially_tracked', 'mostly_lost', 'num_false_positives', 'num_misses', + 'num_switches', 'num_fragmentations', 'mota', 'motp', 'num_objects'] + summary = mh.compute_many(accs, names=names, metrics=metrics, generate_overall=True) + # summary = mh.compute_many(accs, names=names, metrics=mm.metrics.motchallenge_metrics, generate_overall=True) + # print(mm.io.render_summary( + # summary, formatters=mh.formatters, + # namemap=mm.io.motchallenge_metric_names)) + div_dict = { + 'num_objects': ['num_false_positives', 'num_misses', 'num_switches', 'num_fragmentations'], + 'num_unique_objects': ['mostly_tracked', 'partially_tracked', 'mostly_lost']} + for divisor in div_dict: + for divided in div_dict[divisor]: + summary[divided] = (summary[divided] / summary[divisor]) + fmt = mh.formatters + change_fmt_list = ['num_false_positives', 'num_misses', 'num_switches', 'num_fragmentations', 'mostly_tracked', + 'partially_tracked', 'mostly_lost'] + for k in change_fmt_list: + fmt[k] = fmt['mota'] + print(mm.io.render_summary(summary, formatters=fmt, namemap=mm.io.motchallenge_metric_names)) + + metrics = mm.metrics.motchallenge_metrics + ['num_objects'] + summary = mh.compute_many(accs, names=names, metrics=metrics, generate_overall=True) + print(mm.io.render_summary(summary, formatters=mh.formatters, namemap=mm.io.motchallenge_metric_names)) + logger.info('Completed') + + +if __name__ == "__main__": + args = make_parser().parse_args() + exp = get_exp(args.exp_file, args.name) + exp.merge(args.opts) + + if not args.experiment_name: + args.experiment_name = exp.exp_name + + num_gpu = torch.cuda.device_count() if args.devices is None else args.devices + assert num_gpu <= torch.cuda.device_count() + + launch( + main, + num_gpu, + args.num_machines, + args.machine_rank, + backend=args.dist_backend, + dist_url=args.dist_url, + args=(exp, args, num_gpu), + ) diff --git a/tracking/docker-build-context/byte_track/tools/track_motdt.py b/tracking/docker-build-context/byte_track/tools/track_motdt.py new file mode 100644 index 0000000000000000000000000000000000000000..303815dca938c66147ac0cfd301bb7bb11e240ae --- /dev/null +++ b/tracking/docker-build-context/byte_track/tools/track_motdt.py @@ -0,0 +1,293 @@ +from loguru import logger + +import torch +import torch.backends.cudnn as cudnn +from torch.nn.parallel import DistributedDataParallel as DDP + +from yolox.core import launch +from yolox.exp import get_exp +from yolox.utils import configure_nccl, fuse_model, get_local_rank, get_model_info, setup_logger +from yolox.evaluators import MOTEvaluator + +import argparse +import os +import random +import warnings +import glob +import motmetrics as mm +from collections import OrderedDict +from pathlib import Path + + +def make_parser(): + parser = argparse.ArgumentParser("YOLOX Eval") + parser.add_argument("-expn", "--experiment-name", type=str, default=None) + parser.add_argument("-n", "--name", type=str, default=None, help="model name") + + # distributed + parser.add_argument( + "--dist-backend", default="nccl", type=str, help="distributed backend" + ) + parser.add_argument( + "--dist-url", + default=None, + type=str, + help="url used to set up distributed training", + ) + parser.add_argument("-b", "--batch-size", type=int, default=64, help="batch size") + parser.add_argument( + "-d", "--devices", default=None, type=int, help="device for training" + ) + parser.add_argument( + "--local_rank", default=0, type=int, help="local rank for dist training" + ) + parser.add_argument( + "--num_machines", default=1, type=int, help="num of node for training" + ) + parser.add_argument( + "--machine_rank", default=0, type=int, help="node rank for multi-node training" + ) + parser.add_argument( + "-f", + "--exp_file", + default=None, + type=str, + help="pls input your expriment description file", + ) + parser.add_argument( + "--fp16", + dest="fp16", + default=False, + action="store_true", + help="Adopting mix precision evaluating.", + ) + parser.add_argument( + "--fuse", + dest="fuse", + default=False, + action="store_true", + help="Fuse conv and bn for testing.", + ) + parser.add_argument( + "--trt", + dest="trt", + default=False, + action="store_true", + help="Using TensorRT model for testing.", + ) + parser.add_argument( + "--test", + dest="test", + default=False, + action="store_true", + help="Evaluating on test-dev set.", + ) + parser.add_argument( + "--speed", + dest="speed", + default=False, + action="store_true", + help="speed test only.", + ) + parser.add_argument( + "opts", + help="Modify config options using the command-line", + default=None, + nargs=argparse.REMAINDER, + ) + # det args + parser.add_argument("-c", "--ckpt", default=None, type=str, help="ckpt for eval") + parser.add_argument("--conf", default=0.1, type=float, help="test conf") + parser.add_argument("--nms", default=0.7, type=float, help="test nms threshold") + parser.add_argument("--tsize", default=None, type=int, help="test img size") + parser.add_argument("--seed", default=None, type=int, help="eval seed") + # tracking args + parser.add_argument("--track_thresh", type=float, default=0.6, help="tracking confidence threshold") + parser.add_argument("--track_buffer", type=int, default=30, help="the frames for keep lost tracks") + parser.add_argument("--match_thresh", type=int, default=0.9, help="matching threshold for tracking") + parser.add_argument('--min-box-area', type=float, default=100, help='filter out tiny boxes') + # deepsort args + parser.add_argument("--model_folder", type=str, default='pretrained/googlenet_part8_all_xavier_ckpt_56.h5', help="reid model folder") + return parser + + +def compare_dataframes(gts, ts): + accs = [] + names = [] + for k, tsacc in ts.items(): + if k in gts: + logger.info('Comparing {}...'.format(k)) + accs.append(mm.utils.compare_to_groundtruth(gts[k], tsacc, 'iou', distth=0.5)) + names.append(k) + else: + logger.warning('No ground truth for {}, skipping.'.format(k)) + + return accs, names + + +@logger.catch +def main(exp, args, num_gpu): + if args.seed is not None: + random.seed(args.seed) + torch.manual_seed(args.seed) + cudnn.deterministic = True + warnings.warn( + "You have chosen to seed testing. This will turn on the CUDNN deterministic setting, " + ) + + is_distributed = num_gpu > 1 + + # set environment variables for distributed training + cudnn.benchmark = True + + rank = args.local_rank + # rank = get_local_rank() + + file_name = os.path.join(exp.output_dir, args.experiment_name) + + if rank == 0: + os.makedirs(file_name, exist_ok=True) + + results_folder = os.path.join(file_name, "track_results_motdt") + os.makedirs(results_folder, exist_ok=True) + model_folder = args.model_folder + + setup_logger(file_name, distributed_rank=rank, filename="val_log.txt", mode="a") + logger.info("Args: {}".format(args)) + + if args.conf is not None: + exp.test_conf = args.conf + if args.nms is not None: + exp.nmsthre = args.nms + if args.tsize is not None: + exp.test_size = (args.tsize, args.tsize) + + model = exp.get_model() + logger.info("Model Summary: {}".format(get_model_info(model, exp.test_size))) + #logger.info("Model Structure:\n{}".format(str(model))) + + #evaluator = exp.get_evaluator(args.batch_size, is_distributed, args.test) + + val_loader = exp.get_eval_loader(args.batch_size, is_distributed, args.test) + evaluator = MOTEvaluator( + args=args, + dataloader=val_loader, + img_size=exp.test_size, + confthre=exp.test_conf, + nmsthre=exp.nmsthre, + num_classes=exp.num_classes, + ) + + torch.cuda.set_device(rank) + model.cuda(rank) + model.eval() + + if not args.speed and not args.trt: + if args.ckpt is None: + ckpt_file = os.path.join(file_name, "best_ckpt.pth.tar") + else: + ckpt_file = args.ckpt + logger.info("loading checkpoint") + loc = "cuda:{}".format(rank) + ckpt = torch.load(ckpt_file, map_location=loc) + # load the model state dict + model.load_state_dict(ckpt["model"]) + logger.info("loaded checkpoint done.") + + if is_distributed: + model = DDP(model, device_ids=[rank]) + + if args.fuse: + logger.info("\tFusing model...") + model = fuse_model(model) + + if args.trt: + assert ( + not args.fuse and not is_distributed and args.batch_size == 1 + ), "TensorRT model is not support model fusing and distributed inferencing!" + trt_file = os.path.join(file_name, "model_trt.pth") + assert os.path.exists( + trt_file + ), "TensorRT model is not found!\n Run tools/trt.py first!" + model.head.decode_in_inference = False + decoder = model.head.decode_outputs + else: + trt_file = None + decoder = None + + # start evaluate + *_, summary = evaluator.evaluate_motdt( + model, is_distributed, args.fp16, trt_file, decoder, exp.test_size, results_folder, model_folder + ) + logger.info("\n" + summary) + + # evaluate MOTA + mm.lap.default_solver = 'lap' + + gt_type = '_val_half' + #gt_type = '' + print('gt_type', gt_type) + gtfiles = glob.glob( + os.path.join('datasets/mot/train', '*/gt/gt{}.txt'.format(gt_type))) + print('gt_files', gtfiles) + tsfiles = [f for f in glob.glob(os.path.join(results_folder, '*.txt')) if not os.path.basename(f).startswith('eval')] + + logger.info('Found {} groundtruths and {} test files.'.format(len(gtfiles), len(tsfiles))) + logger.info('Available LAP solvers {}'.format(mm.lap.available_solvers)) + logger.info('Default LAP solver \'{}\''.format(mm.lap.default_solver)) + logger.info('Loading files.') + + gt = OrderedDict([(Path(f).parts[-3], mm.io.loadtxt(f, fmt='mot15-2D', min_confidence=1)) for f in gtfiles]) + ts = OrderedDict([(os.path.splitext(Path(f).parts[-1])[0], mm.io.loadtxt(f, fmt='mot15-2D', min_confidence=-1)) for f in tsfiles]) + + mh = mm.metrics.create() + accs, names = compare_dataframes(gt, ts) + + logger.info('Running metrics') + metrics = ['recall', 'precision', 'num_unique_objects', 'mostly_tracked', + 'partially_tracked', 'mostly_lost', 'num_false_positives', 'num_misses', + 'num_switches', 'num_fragmentations', 'mota', 'motp', 'num_objects'] + summary = mh.compute_many(accs, names=names, metrics=metrics, generate_overall=True) + # summary = mh.compute_many(accs, names=names, metrics=mm.metrics.motchallenge_metrics, generate_overall=True) + # print(mm.io.render_summary( + # summary, formatters=mh.formatters, + # namemap=mm.io.motchallenge_metric_names)) + div_dict = { + 'num_objects': ['num_false_positives', 'num_misses', 'num_switches', 'num_fragmentations'], + 'num_unique_objects': ['mostly_tracked', 'partially_tracked', 'mostly_lost']} + for divisor in div_dict: + for divided in div_dict[divisor]: + summary[divided] = (summary[divided] / summary[divisor]) + fmt = mh.formatters + change_fmt_list = ['num_false_positives', 'num_misses', 'num_switches', 'num_fragmentations', 'mostly_tracked', + 'partially_tracked', 'mostly_lost'] + for k in change_fmt_list: + fmt[k] = fmt['mota'] + print(mm.io.render_summary(summary, formatters=fmt, namemap=mm.io.motchallenge_metric_names)) + + metrics = mm.metrics.motchallenge_metrics + ['num_objects'] + summary = mh.compute_many(accs, names=names, metrics=metrics, generate_overall=True) + print(mm.io.render_summary(summary, formatters=mh.formatters, namemap=mm.io.motchallenge_metric_names)) + logger.info('Completed') + + +if __name__ == "__main__": + args = make_parser().parse_args() + exp = get_exp(args.exp_file, args.name) + exp.merge(args.opts) + + if not args.experiment_name: + args.experiment_name = exp.exp_name + + num_gpu = torch.cuda.device_count() if args.devices is None else args.devices + assert num_gpu <= torch.cuda.device_count() + + launch( + main, + num_gpu, + args.num_machines, + args.machine_rank, + backend=args.dist_backend, + dist_url=args.dist_url, + args=(exp, args, num_gpu), + ) diff --git a/tracking/docker-build-context/byte_track/tools/track_sort.py b/tracking/docker-build-context/byte_track/tools/track_sort.py new file mode 100644 index 0000000000000000000000000000000000000000..7a50527d30558918f121e75402ad8ea44093c5ec --- /dev/null +++ b/tracking/docker-build-context/byte_track/tools/track_sort.py @@ -0,0 +1,290 @@ +from loguru import logger + +import torch +import torch.backends.cudnn as cudnn +from torch.nn.parallel import DistributedDataParallel as DDP + +from yolox.core import launch +from yolox.exp import get_exp +from yolox.utils import configure_nccl, fuse_model, get_local_rank, get_model_info, setup_logger +from yolox.evaluators import MOTEvaluator + +import argparse +import os +import random +import warnings +import glob +import motmetrics as mm +from collections import OrderedDict +from pathlib import Path + + +def make_parser(): + parser = argparse.ArgumentParser("YOLOX Eval") + parser.add_argument("-expn", "--experiment-name", type=str, default=None) + parser.add_argument("-n", "--name", type=str, default=None, help="model name") + + # distributed + parser.add_argument( + "--dist-backend", default="nccl", type=str, help="distributed backend" + ) + parser.add_argument( + "--dist-url", + default=None, + type=str, + help="url used to set up distributed training", + ) + parser.add_argument("-b", "--batch-size", type=int, default=64, help="batch size") + parser.add_argument( + "-d", "--devices", default=None, type=int, help="device for training" + ) + parser.add_argument( + "--local_rank", default=0, type=int, help="local rank for dist training" + ) + parser.add_argument( + "--num_machines", default=1, type=int, help="num of node for training" + ) + parser.add_argument( + "--machine_rank", default=0, type=int, help="node rank for multi-node training" + ) + parser.add_argument( + "-f", + "--exp_file", + default=None, + type=str, + help="pls input your expriment description file", + ) + parser.add_argument( + "--fp16", + dest="fp16", + default=False, + action="store_true", + help="Adopting mix precision evaluating.", + ) + parser.add_argument( + "--fuse", + dest="fuse", + default=False, + action="store_true", + help="Fuse conv and bn for testing.", + ) + parser.add_argument( + "--trt", + dest="trt", + default=False, + action="store_true", + help="Using TensorRT model for testing.", + ) + parser.add_argument( + "--test", + dest="test", + default=False, + action="store_true", + help="Evaluating on test-dev set.", + ) + parser.add_argument( + "--speed", + dest="speed", + default=False, + action="store_true", + help="speed test only.", + ) + parser.add_argument( + "opts", + help="Modify config options using the command-line", + default=None, + nargs=argparse.REMAINDER, + ) + # det args + parser.add_argument("-c", "--ckpt", default=None, type=str, help="ckpt for eval") + parser.add_argument("--conf", default=0.1, type=float, help="test conf") + parser.add_argument("--nms", default=0.7, type=float, help="test nms threshold") + parser.add_argument("--tsize", default=None, type=int, help="test img size") + parser.add_argument("--seed", default=None, type=int, help="eval seed") + # tracking args + parser.add_argument("--track_thresh", type=float, default=0.4, help="tracking confidence threshold") + parser.add_argument("--track_buffer", type=int, default=30, help="the frames for keep lost tracks") + parser.add_argument("--match_thresh", type=int, default=0.9, help="matching threshold for tracking") + parser.add_argument('--min-box-area', type=float, default=100, help='filter out tiny boxes') + return parser + + +def compare_dataframes(gts, ts): + accs = [] + names = [] + for k, tsacc in ts.items(): + if k in gts: + logger.info('Comparing {}...'.format(k)) + accs.append(mm.utils.compare_to_groundtruth(gts[k], tsacc, 'iou', distth=0.5)) + names.append(k) + else: + logger.warning('No ground truth for {}, skipping.'.format(k)) + + return accs, names + + +@logger.catch +def main(exp, args, num_gpu): + if args.seed is not None: + random.seed(args.seed) + torch.manual_seed(args.seed) + cudnn.deterministic = True + warnings.warn( + "You have chosen to seed testing. This will turn on the CUDNN deterministic setting, " + ) + + is_distributed = num_gpu > 1 + + # set environment variables for distributed training + cudnn.benchmark = True + + rank = args.local_rank + # rank = get_local_rank() + + file_name = os.path.join(exp.output_dir, args.experiment_name) + + if rank == 0: + os.makedirs(file_name, exist_ok=True) + + results_folder = os.path.join(file_name, "track_results_sort") + os.makedirs(results_folder, exist_ok=True) + + setup_logger(file_name, distributed_rank=rank, filename="val_log.txt", mode="a") + logger.info("Args: {}".format(args)) + + if args.conf is not None: + exp.test_conf = args.conf + if args.nms is not None: + exp.nmsthre = args.nms + if args.tsize is not None: + exp.test_size = (args.tsize, args.tsize) + + model = exp.get_model() + logger.info("Model Summary: {}".format(get_model_info(model, exp.test_size))) + #logger.info("Model Structure:\n{}".format(str(model))) + + #evaluator = exp.get_evaluator(args.batch_size, is_distributed, args.test) + + val_loader = exp.get_eval_loader(args.batch_size, is_distributed, args.test) + evaluator = MOTEvaluator( + args=args, + dataloader=val_loader, + img_size=exp.test_size, + confthre=exp.test_conf, + nmsthre=exp.nmsthre, + num_classes=exp.num_classes, + ) + + torch.cuda.set_device(rank) + model.cuda(rank) + model.eval() + + if not args.speed and not args.trt: + if args.ckpt is None: + ckpt_file = os.path.join(file_name, "best_ckpt.pth.tar") + else: + ckpt_file = args.ckpt + logger.info("loading checkpoint") + loc = "cuda:{}".format(rank) + ckpt = torch.load(ckpt_file, map_location=loc) + # load the model state dict + model.load_state_dict(ckpt["model"]) + logger.info("loaded checkpoint done.") + + if is_distributed: + model = DDP(model, device_ids=[rank]) + + if args.fuse: + logger.info("\tFusing model...") + model = fuse_model(model) + + if args.trt: + assert ( + not args.fuse and not is_distributed and args.batch_size == 1 + ), "TensorRT model is not support model fusing and distributed inferencing!" + trt_file = os.path.join(file_name, "model_trt.pth") + assert os.path.exists( + trt_file + ), "TensorRT model is not found!\n Run tools/trt.py first!" + model.head.decode_in_inference = False + decoder = model.head.decode_outputs + else: + trt_file = None + decoder = None + + # start evaluate + *_, summary = evaluator.evaluate_sort( + model, is_distributed, args.fp16, trt_file, decoder, exp.test_size, results_folder + ) + logger.info("\n" + summary) + + # evaluate MOTA + mm.lap.default_solver = 'lap' + + gt_type = '_val_half' + #gt_type = '' + print('gt_type', gt_type) + gtfiles = glob.glob( + os.path.join('datasets/mot/train', '*/gt/gt{}.txt'.format(gt_type))) + print('gt_files', gtfiles) + tsfiles = [f for f in glob.glob(os.path.join(results_folder, '*.txt')) if not os.path.basename(f).startswith('eval')] + + logger.info('Found {} groundtruths and {} test files.'.format(len(gtfiles), len(tsfiles))) + logger.info('Available LAP solvers {}'.format(mm.lap.available_solvers)) + logger.info('Default LAP solver \'{}\''.format(mm.lap.default_solver)) + logger.info('Loading files.') + + gt = OrderedDict([(Path(f).parts[-3], mm.io.loadtxt(f, fmt='mot15-2D', min_confidence=1)) for f in gtfiles]) + ts = OrderedDict([(os.path.splitext(Path(f).parts[-1])[0], mm.io.loadtxt(f, fmt='mot15-2D', min_confidence=-1)) for f in tsfiles]) + + mh = mm.metrics.create() + accs, names = compare_dataframes(gt, ts) + + logger.info('Running metrics') + metrics = ['recall', 'precision', 'num_unique_objects', 'mostly_tracked', + 'partially_tracked', 'mostly_lost', 'num_false_positives', 'num_misses', + 'num_switches', 'num_fragmentations', 'mota', 'motp', 'num_objects'] + summary = mh.compute_many(accs, names=names, metrics=metrics, generate_overall=True) + # summary = mh.compute_many(accs, names=names, metrics=mm.metrics.motchallenge_metrics, generate_overall=True) + # print(mm.io.render_summary( + # summary, formatters=mh.formatters, + # namemap=mm.io.motchallenge_metric_names)) + div_dict = { + 'num_objects': ['num_false_positives', 'num_misses', 'num_switches', 'num_fragmentations'], + 'num_unique_objects': ['mostly_tracked', 'partially_tracked', 'mostly_lost']} + for divisor in div_dict: + for divided in div_dict[divisor]: + summary[divided] = (summary[divided] / summary[divisor]) + fmt = mh.formatters + change_fmt_list = ['num_false_positives', 'num_misses', 'num_switches', 'num_fragmentations', 'mostly_tracked', + 'partially_tracked', 'mostly_lost'] + for k in change_fmt_list: + fmt[k] = fmt['mota'] + print(mm.io.render_summary(summary, formatters=fmt, namemap=mm.io.motchallenge_metric_names)) + + metrics = mm.metrics.motchallenge_metrics + ['num_objects'] + summary = mh.compute_many(accs, names=names, metrics=metrics, generate_overall=True) + print(mm.io.render_summary(summary, formatters=mh.formatters, namemap=mm.io.motchallenge_metric_names)) + logger.info('Completed') + + +if __name__ == "__main__": + args = make_parser().parse_args() + exp = get_exp(args.exp_file, args.name) + exp.merge(args.opts) + + if not args.experiment_name: + args.experiment_name = exp.exp_name + + num_gpu = torch.cuda.device_count() if args.devices is None else args.devices + assert num_gpu <= torch.cuda.device_count() + + launch( + main, + num_gpu, + args.num_machines, + args.machine_rank, + backend=args.dist_backend, + dist_url=args.dist_url, + args=(exp, args, num_gpu), + ) diff --git a/tracking/docker-build-context/byte_track/tools/train.py b/tracking/docker-build-context/byte_track/tools/train.py new file mode 100644 index 0000000000000000000000000000000000000000..d05360911952b3f5a8e4c0e8dbd51dfff34f8ea8 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tools/train.py @@ -0,0 +1,122 @@ +from loguru import logger + +import torch +import torch.backends.cudnn as cudnn + +from yolox.core import Trainer, launch +from yolox.exp import get_exp + +import argparse +import random +import warnings + + +def make_parser(): + parser = argparse.ArgumentParser("YOLOX train parser") + parser.add_argument("-expn", "--experiment-name", type=str, default=None) + parser.add_argument("-n", "--name", type=str, default=None, help="model name") + + # distributed + parser.add_argument( + "--dist-backend", default="nccl", type=str, help="distributed backend" + ) + parser.add_argument( + "--dist-url", + default=None, + type=str, + help="url used to set up distributed training", + ) + parser.add_argument("-b", "--batch-size", type=int, default=64, help="batch size") + parser.add_argument( + "-d", "--devices", default=None, type=int, help="device for training" + ) + parser.add_argument( + "--local_rank", default=0, type=int, help="local rank for dist training" + ) + parser.add_argument( + "-f", + "--exp_file", + default=None, + type=str, + help="plz input your expriment description file", + ) + parser.add_argument( + "--resume", default=False, action="store_true", help="resume training" + ) + parser.add_argument("-c", "--ckpt", default=None, type=str, help="checkpoint file") + parser.add_argument( + "-e", + "--start_epoch", + default=None, + type=int, + help="resume training start epoch", + ) + parser.add_argument( + "--num_machines", default=1, type=int, help="num of node for training" + ) + parser.add_argument( + "--machine_rank", default=0, type=int, help="node rank for multi-node training" + ) + parser.add_argument( + "--fp16", + dest="fp16", + default=True, + action="store_true", + help="Adopting mix precision training.", + ) + parser.add_argument( + "-o", + "--occupy", + dest="occupy", + default=False, + action="store_true", + help="occupy GPU memory first for training.", + ) + parser.add_argument( + "opts", + help="Modify config options using the command-line", + default=None, + nargs=argparse.REMAINDER, + ) + return parser + + +@logger.catch +def main(exp, args): + if exp.seed is not None: + random.seed(exp.seed) + torch.manual_seed(exp.seed) + cudnn.deterministic = True + warnings.warn( + "You have chosen to seed training. This will turn on the CUDNN deterministic setting, " + "which can slow down your training considerably! You may see unexpected behavior " + "when restarting from checkpoints." + ) + + # set environment variables for distributed training + cudnn.benchmark = True + + trainer = Trainer(exp, args) + trainer.train() + + +if __name__ == "__main__": + args = make_parser().parse_args() + exp = get_exp(args.exp_file, args.name) + exp.merge(args.opts) + + if not args.experiment_name: + args.experiment_name = exp.exp_name + + num_gpu = torch.cuda.device_count() if args.devices is None else args.devices + assert num_gpu <= torch.cuda.device_count() + + launch( + main, + num_gpu, + args.num_machines, + args.machine_rank, + backend=args.dist_backend, + dist_url=args.dist_url, + args=(exp, args), + ) diff --git a/tracking/docker-build-context/byte_track/tools/trt.py b/tracking/docker-build-context/byte_track/tools/trt.py new file mode 100644 index 0000000000000000000000000000000000000000..f4673e9b961cb051229fad92a32641af22e05dc9 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tools/trt.py @@ -0,0 +1,74 @@ +from loguru import logger + +import tensorrt as trt +import torch +from torch2trt import torch2trt + +from yolox.exp import get_exp + +import argparse +import os +import shutil + + +def make_parser(): + parser = argparse.ArgumentParser("YOLOX ncnn deploy") + parser.add_argument("-expn", "--experiment-name", type=str, default=None) + parser.add_argument("-n", "--name", type=str, default=None, help="model name") + + parser.add_argument( + "-f", + "--exp_file", + default=None, + type=str, + help="pls input your expriment description file", + ) + parser.add_argument("-c", "--ckpt", default=None, type=str, help="ckpt path") + return parser + + +@logger.catch +def main(): + args = make_parser().parse_args() + exp = get_exp(args.exp_file, args.name) + if not args.experiment_name: + args.experiment_name = exp.exp_name + + model = exp.get_model() + file_name = os.path.join(exp.output_dir, args.experiment_name) + os.makedirs(file_name, exist_ok=True) + if args.ckpt is None: + ckpt_file = os.path.join(file_name, "best_ckpt.pth.tar") + else: + ckpt_file = args.ckpt + + ckpt = torch.load(ckpt_file, map_location="cpu") + # load the model state dict + + model.load_state_dict(ckpt["model"]) + logger.info("loaded checkpoint done.") + model.eval() + model.cuda() + model.head.decode_in_inference = False + x = torch.ones(1, 3, exp.test_size[0], exp.test_size[1]).cuda() + model_trt = torch2trt( + model, + [x], + fp16_mode=True, + log_level=trt.Logger.INFO, + max_workspace_size=(1 << 32), + ) + torch.save(model_trt.state_dict(), os.path.join(file_name, "model_trt.pth")) + logger.info("Converted TensorRT model done.") + engine_file = os.path.join(file_name, "model_trt.engine") + engine_file_demo = os.path.join("deploy", "TensorRT", "cpp", "model_trt.engine") + with open(engine_file, "wb") as f: + f.write(model_trt.engine.serialize()) + + shutil.copyfile(engine_file, engine_file_demo) + + logger.info("Converted TensorRT model engine file is saved for C++ inference.") + + +if __name__ == "__main__": + main() diff --git a/tracking/docker-build-context/byte_track/tools/txt2video.py b/tracking/docker-build-context/byte_track/tools/txt2video.py new file mode 100644 index 0000000000000000000000000000000000000000..23ddd8ba8ba75e4dc46114351177aabccacd1ccb --- /dev/null +++ b/tracking/docker-build-context/byte_track/tools/txt2video.py @@ -0,0 +1,211 @@ +import os +import sys +import json +import cv2 +import glob as gb +import numpy as np + + +def colormap(rgb=False): + color_list = np.array( + [ + 0.000, 0.447, 0.741, + 0.850, 0.325, 0.098, + 0.929, 0.694, 0.125, + 0.494, 0.184, 0.556, + 0.466, 0.674, 0.188, + 0.301, 0.745, 0.933, + 0.635, 0.078, 0.184, + 0.300, 0.300, 0.300, + 0.600, 0.600, 0.600, + 1.000, 0.000, 0.000, + 1.000, 0.500, 0.000, + 0.749, 0.749, 0.000, + 0.000, 1.000, 0.000, + 0.000, 0.000, 1.000, + 0.667, 0.000, 1.000, + 0.333, 0.333, 0.000, + 0.333, 0.667, 0.000, + 0.333, 1.000, 0.000, + 0.667, 0.333, 0.000, + 0.667, 0.667, 0.000, + 0.667, 1.000, 0.000, + 1.000, 0.333, 0.000, + 1.000, 0.667, 0.000, + 1.000, 1.000, 0.000, + 0.000, 0.333, 0.500, + 0.000, 0.667, 0.500, + 0.000, 1.000, 0.500, + 0.333, 0.000, 0.500, + 0.333, 0.333, 0.500, + 0.333, 0.667, 0.500, + 0.333, 1.000, 0.500, + 0.667, 0.000, 0.500, + 0.667, 0.333, 0.500, + 0.667, 0.667, 0.500, + 0.667, 1.000, 0.500, + 1.000, 0.000, 0.500, + 1.000, 0.333, 0.500, + 1.000, 0.667, 0.500, + 1.000, 1.000, 0.500, + 0.000, 0.333, 1.000, + 0.000, 0.667, 1.000, + 0.000, 1.000, 1.000, + 0.333, 0.000, 1.000, + 0.333, 0.333, 1.000, + 0.333, 0.667, 1.000, + 0.333, 1.000, 1.000, + 0.667, 0.000, 1.000, + 0.667, 0.333, 1.000, + 0.667, 0.667, 1.000, + 0.667, 1.000, 1.000, + 1.000, 0.000, 1.000, + 1.000, 0.333, 1.000, + 1.000, 0.667, 1.000, + 0.167, 0.000, 0.000, + 0.333, 0.000, 0.000, + 0.500, 0.000, 0.000, + 0.667, 0.000, 0.000, + 0.833, 0.000, 0.000, + 1.000, 0.000, 0.000, + 0.000, 0.167, 0.000, + 0.000, 0.333, 0.000, + 0.000, 0.500, 0.000, + 0.000, 0.667, 0.000, + 0.000, 0.833, 0.000, + 0.000, 1.000, 0.000, + 0.000, 0.000, 0.167, + 0.000, 0.000, 0.333, + 0.000, 0.000, 0.500, + 0.000, 0.000, 0.667, + 0.000, 0.000, 0.833, + 0.000, 0.000, 1.000, + 0.000, 0.000, 0.000, + 0.143, 0.143, 0.143, + 0.286, 0.286, 0.286, + 0.429, 0.429, 0.429, + 0.571, 0.571, 0.571, + 0.714, 0.714, 0.714, + 0.857, 0.857, 0.857, + 1.000, 1.000, 1.000 + ] + ).astype(np.float32) + color_list = color_list.reshape((-1, 3)) * 255 + if not rgb: + color_list = color_list[:, ::-1] + return color_list + + +def txt2img(visual_path="visual_val_gt"): + print("Starting txt2img") + + valid_labels = {1} + ignore_labels = {2, 7, 8, 12} + + if not os.path.exists(visual_path): + os.makedirs(visual_path) + color_list = colormap() + + gt_json_path = 'datasets/mot/annotations/val_half.json' + img_path = 'datasets/mot/train/' + show_video_names = ['MOT17-02-FRCNN', + 'MOT17-04-FRCNN', + 'MOT17-05-FRCNN', + 'MOT17-09-FRCNN', + 'MOT17-10-FRCNN', + 'MOT17-11-FRCNN', + 'MOT17-13-FRCNN'] + + + test_json_path = 'datasets/mot/annotations/test.json' + test_img_path = 'datasets/mot/test/' + test_show_video_names = ['MOT17-01-FRCNN', + 'MOT17-03-FRCNN', + 'MOT17-06-FRCNN', + 'MOT17-07-FRCNN', + 'MOT17-08-FRCNN', + 'MOT17-12-FRCNN', + 'MOT17-14-FRCNN'] + if visual_path == "visual_test_predict": + show_video_names = test_show_video_names + img_path = test_img_path + gt_json_path = test_json_path + for show_video_name in show_video_names: + img_dict = dict() + + if visual_path == "visual_val_gt": + txt_path = 'datasets/mot/train/' + show_video_name + '/gt/gt_val_half.txt' + elif visual_path == "visual_yolox_x": + txt_path = 'YOLOX_outputs/yolox_mot_x_1088/track_results/'+ show_video_name + '.txt' + elif visual_path == "visual_test_predict": + txt_path = 'test/tracks/'+ show_video_name + '.txt' + else: + raise NotImplementedError + + with open(gt_json_path, 'r') as f: + gt_json = json.load(f) + + for ann in gt_json["images"]: + file_name = ann['file_name'] + video_name = file_name.split('/')[0] + if video_name == show_video_name: + img_dict[ann['frame_id']] = img_path + file_name + + + txt_dict = dict() + with open(txt_path, 'r') as f: + for line in f.readlines(): + linelist = line.split(',') + + mark = int(float(linelist[6])) + label = int(float(linelist[7])) + vis_ratio = float(linelist[8]) + + if visual_path == "visual_val_gt": + if mark == 0 or label not in valid_labels or label in ignore_labels or vis_ratio <= 0: + continue + + img_id = linelist[0] + obj_id = linelist[1] + bbox = [float(linelist[2]), float(linelist[3]), + float(linelist[2]) + float(linelist[4]), + float(linelist[3]) + float(linelist[5]), int(obj_id)] + if int(img_id) in txt_dict: + txt_dict[int(img_id)].append(bbox) + else: + txt_dict[int(img_id)] = list() + txt_dict[int(img_id)].append(bbox) + + for img_id in sorted(txt_dict.keys()): + img = cv2.imread(img_dict[img_id]) + for bbox in txt_dict[img_id]: + cv2.rectangle(img, (int(bbox[0]), int(bbox[1])), (int(bbox[2]), int(bbox[3])), color_list[bbox[4]%79].tolist(), thickness=2) + cv2.putText(img, "{}".format(int(bbox[4])), (int(bbox[0]), int(bbox[1])), cv2.FONT_HERSHEY_SIMPLEX, 0.8, color_list[bbox[4]%79].tolist(), 2) + cv2.imwrite(visual_path + "/" + show_video_name + "{:0>6d}.png".format(img_id), img) + print(show_video_name, "Done") + print("txt2img Done") + + +def img2video(visual_path="visual_val_gt"): + print("Starting img2video") + + img_paths = gb.glob(visual_path + "/*.png") + fps = 16 + size = (1920,1080) + videowriter = cv2.VideoWriter(visual_path + "_video.avi",cv2.VideoWriter_fourcc('M','J','P','G'), fps, size) + + for img_path in sorted(img_paths): + img = cv2.imread(img_path) + img = cv2.resize(img, size) + videowriter.write(img) + + videowriter.release() + print("img2video Done") + + +if __name__ == '__main__': + visual_path="visual_yolox_x" + if len(sys.argv) > 1: + visual_path =sys.argv[1] + txt2img(visual_path) + #img2video(visual_path) diff --git a/tracking/docker-build-context/byte_track/tutorials/centertrack/README.md b/tracking/docker-build-context/byte_track/tutorials/centertrack/README.md new file mode 100644 index 0000000000000000000000000000000000000000..b46bb2f0412c260c53d90bb5f8e5f2c387f748a5 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/centertrack/README.md @@ -0,0 +1,42 @@ +# CenterTrack + +Step1. git clone https://github.com/xingyizhou/CenterTrack.git + + +Step2. + +replace https://github.com/xingyizhou/CenterTrack/blob/master/src/lib/utils/tracker.py + +replace https://github.com/xingyizhou/CenterTrack/blob/master/src/lib/opts.py + + +Step3. run +``` +python3 test.py tracking --exp_id mot17_half --dataset mot --dataset_version 17halfval --pre_hm --ltrb_amodal --load_model ../models/mot17_half.pth --track_thresh 0.4 --new_thresh 0.5 --out_thresh 0.2 --pre_thresh 0.5 +``` + + +# CenterTrack_BYTE + +Step1. git clone https://github.com/xingyizhou/CenterTrack.git + + +Step2. + +replace https://github.com/xingyizhou/CenterTrack/blob/master/src/lib/utils/tracker.py by byte_tracker.py + +replace https://github.com/xingyizhou/CenterTrack/blob/master/src/lib/opts.py + +add mot_online to https://github.com/xingyizhou/CenterTrack/blob/master/src/lib/utils + +Step3. run +``` +python3 test.py tracking --exp_id mot17_half --dataset mot --dataset_version 17halfval --pre_hm --ltrb_amodal --load_model ../models/mot17_half.pth --track_thresh 0.4 --new_thresh 0.5 --out_thresh 0.2 --pre_thresh 0.5 +``` + + +## Notes +tracker.py: only motion + +byte_tracker.py: motion with kalman filter + diff --git a/tracking/docker-build-context/byte_track/tutorials/centertrack/byte_tracker.py b/tracking/docker-build-context/byte_track/tutorials/centertrack/byte_tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..8cb757e0f1e62f3ec4f2e9ab57cef2b509298dbc --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/centertrack/byte_tracker.py @@ -0,0 +1,363 @@ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import numpy as np +from sklearn.utils.linear_assignment_ import linear_assignment +import copy +from .mot_online.kalman_filter import KalmanFilter +from .mot_online.basetrack import BaseTrack, TrackState +from .mot_online import matching + + +class STrack(BaseTrack): + shared_kalman = KalmanFilter() + def __init__(self, tlwh, score): + + # wait activate + self._tlwh = np.asarray(tlwh, dtype=np.float) + self.kalman_filter = None + self.mean, self.covariance = None, None + self.is_activated = False + + self.score = score + self.tracklet_len = 0 + + def predict(self): + mean_state = self.mean.copy() + if self.state != TrackState.Tracked: + mean_state[7] = 0 + self.mean, self.covariance = self.kalman_filter.predict(mean_state, self.covariance) + + @staticmethod + def multi_predict(stracks): + if len(stracks) > 0: + multi_mean = np.asarray([st.mean.copy() for st in stracks]) + multi_covariance = np.asarray([st.covariance for st in stracks]) + for i, st in enumerate(stracks): + if st.state != TrackState.Tracked: + multi_mean[i][7] = 0 + multi_mean, multi_covariance = STrack.shared_kalman.multi_predict(multi_mean, multi_covariance) + for i, (mean, cov) in enumerate(zip(multi_mean, multi_covariance)): + stracks[i].mean = mean + stracks[i].covariance = cov + + def activate(self, kalman_filter, frame_id): + """Start a new tracklet""" + self.kalman_filter = kalman_filter + self.track_id = self.next_id() + self.mean, self.covariance = self.kalman_filter.initiate(self.tlwh_to_xyah(self._tlwh)) + + self.tracklet_len = 0 + self.state = TrackState.Tracked + if frame_id == 1: + self.is_activated = True + # self.is_activated = True + self.frame_id = frame_id + self.start_frame = frame_id + + def re_activate(self, new_track, frame_id, new_id=False): + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_track.tlwh) + ) + self.tracklet_len = 0 + self.state = TrackState.Tracked + self.is_activated = True + self.frame_id = frame_id + if new_id: + self.track_id = self.next_id() + self.score = new_track.score + + def update(self, new_track, frame_id): + """ + Update a matched track + :type new_track: STrack + :type frame_id: int + :type update_feature: bool + :return: + """ + self.frame_id = frame_id + self.tracklet_len += 1 + + new_tlwh = new_track.tlwh + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_tlwh)) + self.state = TrackState.Tracked + self.is_activated = True + + self.score = new_track.score + + @property + # @jit(nopython=True) + def tlwh(self): + """Get current position in bounding box format `(top left x, top left y, + width, height)`. + """ + if self.mean is None: + return self._tlwh.copy() + ret = self.mean[:4].copy() + ret[2] *= ret[3] + ret[:2] -= ret[2:] / 2 + return ret + + @property + # @jit(nopython=True) + def tlbr(self): + """Convert bounding box to format `(min x, min y, max x, max y)`, i.e., + `(top left, bottom right)`. + """ + ret = self.tlwh.copy() + ret[2:] += ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_xyah(tlwh): + """Convert bounding box to format `(center x, center y, aspect ratio, + height)`, where the aspect ratio is `width / height`. + """ + ret = np.asarray(tlwh).copy() + ret[:2] += ret[2:] / 2 + ret[2] /= ret[3] + return ret + + def to_xyah(self): + return self.tlwh_to_xyah(self.tlwh) + + @staticmethod + # @jit(nopython=True) + def tlbr_to_tlwh(tlbr): + ret = np.asarray(tlbr).copy() + ret[2:] -= ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_tlbr(tlwh): + ret = np.asarray(tlwh).copy() + ret[2:] += ret[:2] + return ret + + def __repr__(self): + return 'OT_{}_({}-{})'.format(self.track_id, self.start_frame, self.end_frame) + + + +class BYTETracker(object): + def __init__(self, args, frame_rate=30): + self.args = args + self.det_thresh = args.new_thresh + self.buffer_size = int(frame_rate / 30.0 * args.track_buffer) + self.max_time_lost = self.buffer_size + self.reset() + + # below has no effect to final output, just to be compatible to codebase + def init_track(self, results): + for item in results: + if item['score'] > self.opt.new_thresh and item['class'] == 1: + self.id_count += 1 + item['active'] = 1 + item['age'] = 1 + item['tracking_id'] = self.id_count + if not ('ct' in item): + bbox = item['bbox'] + item['ct'] = [(bbox[0] + bbox[2]) / 2, (bbox[1] + bbox[3]) / 2] + self.tracks.append(item) + + def reset(self): + self.frame_id = 0 + self.kalman_filter = KalmanFilter() + self.tracked_stracks = [] # type: list[STrack] + self.lost_stracks = [] # type: list[STrack] + self.removed_stracks = [] # type: list[STrack] + self.tracks = [] + + # below has no effect to final output, just to be compatible to codebase + self.id_count = 0 + + def step(self, results, public_det=None): + self.frame_id += 1 + activated_starcks = [] + refind_stracks = [] + lost_stracks = [] + removed_stracks = [] + detections = [] + detections_second = [] + + scores = np.array([item['score'] for item in results if item['class'] == 1], np.float32) + bboxes = np.vstack([item['bbox'] for item in results if item['class'] == 1]) # N x 4, x1y1x2y2 + + remain_inds = scores >= self.args.track_thresh + dets = bboxes[remain_inds] + scores_keep = scores[remain_inds] + + + inds_low = scores > self.args.out_thresh + inds_high = scores < self.args.track_thresh + inds_second = np.logical_and(inds_low, inds_high) + dets_second = bboxes[inds_second] + scores_second = scores[inds_second] + + if len(dets) > 0: + '''Detections''' + detections = [STrack(STrack.tlbr_to_tlwh(tlbr), s) for + (tlbr, s) in zip(dets, scores_keep)] + else: + detections = [] + + ''' Add newly detected tracklets to tracked_stracks''' + unconfirmed = [] + tracked_stracks = [] # type: list[STrack] + for track in self.tracked_stracks: + if not track.is_activated: + unconfirmed.append(track) + else: + tracked_stracks.append(track) + + ''' Step 2: First association, with Kalman and IOU''' + strack_pool = joint_stracks(tracked_stracks, self.lost_stracks) + # Predict the current location with KF + STrack.multi_predict(strack_pool) + dists = matching.iou_distance(strack_pool, detections) + #dists = matching.fuse_motion(self.kalman_filter, dists, strack_pool, detections) + matches, u_track, u_detection = matching.linear_assignment(dists, thresh=self.args.match_thresh) + + for itracked, idet in matches: + track = strack_pool[itracked] + det = detections[idet] + if track.state == TrackState.Tracked: + track.update(detections[idet], self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + ''' Step 3: Second association, association the untrack to the low score detections, with IOU''' + if len(dets_second) > 0: + '''Detections''' + detections_second = [STrack(STrack.tlbr_to_tlwh(tlbr), s) for + (tlbr, s) in zip(dets_second, scores_second)] + else: + detections_second = [] + r_tracked_stracks = [strack_pool[i] for i in u_track if strack_pool[i].state == TrackState.Tracked] + dists = matching.iou_distance(r_tracked_stracks, detections_second) + matches, u_track, u_detection_second = matching.linear_assignment(dists, thresh=0.5) + for itracked, idet in matches: + track = r_tracked_stracks[itracked] + det = detections_second[idet] + if track.state == TrackState.Tracked: + track.update(det, self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + for it in u_track: + track = r_tracked_stracks[it] + if not track.state == TrackState.Lost: + track.mark_lost() + lost_stracks.append(track) + + '''Deal with unconfirmed tracks, usually tracks with only one beginning frame''' + detections = [detections[i] for i in u_detection] + dists = matching.iou_distance(unconfirmed, detections) + matches, u_unconfirmed, u_detection = matching.linear_assignment(dists, thresh=0.7) + for itracked, idet in matches: + unconfirmed[itracked].update(detections[idet], self.frame_id) + activated_starcks.append(unconfirmed[itracked]) + for it in u_unconfirmed: + track = unconfirmed[it] + track.mark_removed() + removed_stracks.append(track) + + """ Step 4: Init new stracks""" + for inew in u_detection: + track = detections[inew] + if track.score < self.det_thresh: + continue + track.activate(self.kalman_filter, self.frame_id) + activated_starcks.append(track) + """ Step 5: Update state""" + for track in self.lost_stracks: + if self.frame_id - track.end_frame > self.max_time_lost: + track.mark_removed() + removed_stracks.append(track) + + self.tracked_stracks = [t for t in self.tracked_stracks if t.state == TrackState.Tracked] + self.tracked_stracks = joint_stracks(self.tracked_stracks, activated_starcks) + self.tracked_stracks = joint_stracks(self.tracked_stracks, refind_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.tracked_stracks) + self.lost_stracks.extend(lost_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.removed_stracks) + self.removed_stracks.extend(removed_stracks) + self.tracked_stracks, self.lost_stracks = remove_duplicate_stracks(self.tracked_stracks, self.lost_stracks) + output_stracks = [track for track in self.tracked_stracks if track.is_activated] + + ret = [] + for track in output_stracks: + track_dict = {} + track_dict['score'] = track.score + track_dict['bbox'] = track.tlbr + bbox = track_dict['bbox'] + track_dict['ct'] = [(bbox[0] + bbox[2]) / 2, (bbox[1] + bbox[3]) / 2] + track_dict['active'] = 1 if track.is_activated else 0 + track_dict['tracking_id'] = track.track_id + track_dict['class'] = 1 + ret.append(track_dict) + + self.tracks = ret + return ret + + +def joint_stracks(tlista, tlistb): + exists = {} + res = [] + for t in tlista: + exists[t.track_id] = 1 + res.append(t) + for t in tlistb: + tid = t.track_id + if not exists.get(tid, 0): + exists[tid] = 1 + res.append(t) + return res + + +def sub_stracks(tlista, tlistb): + stracks = {} + for t in tlista: + stracks[t.track_id] = t + for t in tlistb: + tid = t.track_id + if stracks.get(tid, 0): + del stracks[tid] + return list(stracks.values()) + + +def remove_duplicate_stracks(stracksa, stracksb): + pdist = matching.iou_distance(stracksa, stracksb) + pairs = np.where(pdist < 0.15) + dupa, dupb = list(), list() + for p, q in zip(*pairs): + timep = stracksa[p].frame_id - stracksa[p].start_frame + timeq = stracksb[q].frame_id - stracksb[q].start_frame + if timep > timeq: + dupb.append(q) + else: + dupa.append(p) + resa = [t for i, t in enumerate(stracksa) if not i in dupa] + resb = [t for i, t in enumerate(stracksb) if not i in dupb] + return resa, resb + + +def remove_fp_stracks(stracksa, n_frame=10): + remain = [] + for t in stracksa: + score_5 = t.score_list[-n_frame:] + score_5 = np.array(score_5, dtype=np.float32) + index = score_5 < 0.45 + num = np.sum(index) + if num < n_frame: + remain.append(t) + return remain + diff --git a/tracking/docker-build-context/byte_track/tutorials/centertrack/mot_online/basetrack.py b/tracking/docker-build-context/byte_track/tutorials/centertrack/mot_online/basetrack.py new file mode 100644 index 0000000000000000000000000000000000000000..4fe2233607f6d4ed28b11a0ae6c0303c8ca19098 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/centertrack/mot_online/basetrack.py @@ -0,0 +1,52 @@ +import numpy as np +from collections import OrderedDict + + +class TrackState(object): + New = 0 + Tracked = 1 + Lost = 2 + Removed = 3 + + +class BaseTrack(object): + _count = 0 + + track_id = 0 + is_activated = False + state = TrackState.New + + history = OrderedDict() + features = [] + curr_feature = None + score = 0 + start_frame = 0 + frame_id = 0 + time_since_update = 0 + + # multi-camera + location = (np.inf, np.inf) + + @property + def end_frame(self): + return self.frame_id + + @staticmethod + def next_id(): + BaseTrack._count += 1 + return BaseTrack._count + + def activate(self, *args): + raise NotImplementedError + + def predict(self): + raise NotImplementedError + + def update(self, *args, **kwargs): + raise NotImplementedError + + def mark_lost(self): + self.state = TrackState.Lost + + def mark_removed(self): + self.state = TrackState.Removed diff --git a/tracking/docker-build-context/byte_track/tutorials/centertrack/mot_online/kalman_filter.py b/tracking/docker-build-context/byte_track/tutorials/centertrack/mot_online/kalman_filter.py new file mode 100644 index 0000000000000000000000000000000000000000..b4c4e9854d8abd2fea75ad6b1fe8cd6846c43680 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/centertrack/mot_online/kalman_filter.py @@ -0,0 +1,269 @@ +# vim: expandtab:ts=4:sw=4 +import numpy as np +import scipy.linalg + +""" +Table for the 0.95 quantile of the chi-square distribution with N degrees of +freedom (contains values for N=1, ..., 9). Taken from MATLAB/Octave's chi2inv +function and used as Mahalanobis gating threshold. +""" +chi2inv95 = { + 1: 3.8415, + 2: 5.9915, + 3: 7.8147, + 4: 9.4877, + 5: 11.070, + 6: 12.592, + 7: 14.067, + 8: 15.507, + 9: 16.919} + + +class KalmanFilter(object): + """ + A simple Kalman filter for tracking bounding boxes in image space. + + The 8-dimensional state space + + x, y, a, h, vx, vy, va, vh + + contains the bounding box center position (x, y), aspect ratio a, height h, + and their respective velocities. + + Object motion follows a constant velocity model. The bounding box location + (x, y, a, h) is taken as direct observation of the state space (linear + observation model). + + """ + + def __init__(self): + ndim, dt = 4, 1. + + # Create Kalman filter model matrices. + self._motion_mat = np.eye(2 * ndim, 2 * ndim) + for i in range(ndim): + self._motion_mat[i, ndim + i] = dt + self._update_mat = np.eye(ndim, 2 * ndim) + + # Motion and observation uncertainty are chosen relative to the current + # state estimate. These weights control the amount of uncertainty in + # the model. This is a bit hacky. + self._std_weight_position = 1. / 20 + self._std_weight_velocity = 1. / 160 + + def initiate(self, measurement): + """Create track from unassociated measurement. + + Parameters + ---------- + measurement : ndarray + Bounding box coordinates (x, y, a, h) with center position (x, y), + aspect ratio a, and height h. + + Returns + ------- + (ndarray, ndarray) + Returns the mean vector (8 dimensional) and covariance matrix (8x8 + dimensional) of the new track. Unobserved velocities are initialized + to 0 mean. + + """ + mean_pos = measurement + mean_vel = np.zeros_like(mean_pos) + mean = np.r_[mean_pos, mean_vel] + + std = [ + 2 * self._std_weight_position * measurement[3], + 2 * self._std_weight_position * measurement[3], + 1e-2, + 2 * self._std_weight_position * measurement[3], + 10 * self._std_weight_velocity * measurement[3], + 10 * self._std_weight_velocity * measurement[3], + 1e-5, + 10 * self._std_weight_velocity * measurement[3]] + covariance = np.diag(np.square(std)) + return mean, covariance + + def predict(self, mean, covariance): + """Run Kalman filter prediction step. + + Parameters + ---------- + mean : ndarray + The 8 dimensional mean vector of the object state at the previous + time step. + covariance : ndarray + The 8x8 dimensional covariance matrix of the object state at the + previous time step. + + Returns + ------- + (ndarray, ndarray) + Returns the mean vector and covariance matrix of the predicted + state. Unobserved velocities are initialized to 0 mean. + + """ + std_pos = [ + self._std_weight_position * mean[3], + self._std_weight_position * mean[3], + 1e-2, + self._std_weight_position * mean[3]] + std_vel = [ + self._std_weight_velocity * mean[3], + self._std_weight_velocity * mean[3], + 1e-5, + self._std_weight_velocity * mean[3]] + motion_cov = np.diag(np.square(np.r_[std_pos, std_vel])) + + #mean = np.dot(self._motion_mat, mean) + mean = np.dot(mean, self._motion_mat.T) + covariance = np.linalg.multi_dot(( + self._motion_mat, covariance, self._motion_mat.T)) + motion_cov + + return mean, covariance + + def project(self, mean, covariance): + """Project state distribution to measurement space. + + Parameters + ---------- + mean : ndarray + The state's mean vector (8 dimensional array). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + + Returns + ------- + (ndarray, ndarray) + Returns the projected mean and covariance matrix of the given state + estimate. + + """ + std = [ + self._std_weight_position * mean[3], + self._std_weight_position * mean[3], + 1e-1, + self._std_weight_position * mean[3]] + innovation_cov = np.diag(np.square(std)) + + mean = np.dot(self._update_mat, mean) + covariance = np.linalg.multi_dot(( + self._update_mat, covariance, self._update_mat.T)) + return mean, covariance + innovation_cov + + def multi_predict(self, mean, covariance): + """Run Kalman filter prediction step (Vectorized version). + Parameters + ---------- + mean : ndarray + The Nx8 dimensional mean matrix of the object states at the previous + time step. + covariance : ndarray + The Nx8x8 dimensional covariance matrics of the object states at the + previous time step. + Returns + ------- + (ndarray, ndarray) + Returns the mean vector and covariance matrix of the predicted + state. Unobserved velocities are initialized to 0 mean. + """ + std_pos = [ + self._std_weight_position * mean[:, 3], + self._std_weight_position * mean[:, 3], + 1e-2 * np.ones_like(mean[:, 3]), + self._std_weight_position * mean[:, 3]] + std_vel = [ + self._std_weight_velocity * mean[:, 3], + self._std_weight_velocity * mean[:, 3], + 1e-5 * np.ones_like(mean[:, 3]), + self._std_weight_velocity * mean[:, 3]] + sqr = np.square(np.r_[std_pos, std_vel]).T + + motion_cov = [] + for i in range(len(mean)): + motion_cov.append(np.diag(sqr[i])) + motion_cov = np.asarray(motion_cov) + + mean = np.dot(mean, self._motion_mat.T) + left = np.dot(self._motion_mat, covariance).transpose((1, 0, 2)) + covariance = np.dot(left, self._motion_mat.T) + motion_cov + + return mean, covariance + + def update(self, mean, covariance, measurement): + """Run Kalman filter correction step. + + Parameters + ---------- + mean : ndarray + The predicted state's mean vector (8 dimensional). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + measurement : ndarray + The 4 dimensional measurement vector (x, y, a, h), where (x, y) + is the center position, a the aspect ratio, and h the height of the + bounding box. + + Returns + ------- + (ndarray, ndarray) + Returns the measurement-corrected state distribution. + + """ + projected_mean, projected_cov = self.project(mean, covariance) + + chol_factor, lower = scipy.linalg.cho_factor( + projected_cov, lower=True, check_finite=False) + kalman_gain = scipy.linalg.cho_solve( + (chol_factor, lower), np.dot(covariance, self._update_mat.T).T, + check_finite=False).T + innovation = measurement - projected_mean + + new_mean = mean + np.dot(innovation, kalman_gain.T) + new_covariance = covariance - np.linalg.multi_dot(( + kalman_gain, projected_cov, kalman_gain.T)) + return new_mean, new_covariance + + def gating_distance(self, mean, covariance, measurements, + only_position=False, metric='maha'): + """Compute gating distance between state distribution and measurements. + A suitable distance threshold can be obtained from `chi2inv95`. If + `only_position` is False, the chi-square distribution has 4 degrees of + freedom, otherwise 2. + Parameters + ---------- + mean : ndarray + Mean vector over the state distribution (8 dimensional). + covariance : ndarray + Covariance of the state distribution (8x8 dimensional). + measurements : ndarray + An Nx4 dimensional matrix of N measurements, each in + format (x, y, a, h) where (x, y) is the bounding box center + position, a the aspect ratio, and h the height. + only_position : Optional[bool] + If True, distance computation is done with respect to the bounding + box center position only. + Returns + ------- + ndarray + Returns an array of length N, where the i-th element contains the + squared Mahalanobis distance between (mean, covariance) and + `measurements[i]`. + """ + mean, covariance = self.project(mean, covariance) + if only_position: + mean, covariance = mean[:2], covariance[:2, :2] + measurements = measurements[:, :2] + + d = measurements - mean + if metric == 'gaussian': + return np.sum(d * d, axis=1) + elif metric == 'maha': + cholesky_factor = np.linalg.cholesky(covariance) + z = scipy.linalg.solve_triangular( + cholesky_factor, d.T, lower=True, check_finite=False, + overwrite_b=True) + squared_maha = np.sum(z * z, axis=0) + return squared_maha + else: + raise ValueError('invalid distance metric') diff --git a/tracking/docker-build-context/byte_track/tutorials/centertrack/mot_online/matching.py b/tracking/docker-build-context/byte_track/tutorials/centertrack/mot_online/matching.py new file mode 100644 index 0000000000000000000000000000000000000000..54cb4be09624cdb68581508bdbdeecdc63539b7c --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/centertrack/mot_online/matching.py @@ -0,0 +1,198 @@ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import lap +import numpy as np +import scipy +from cython_bbox import bbox_overlaps as bbox_ious +from scipy.spatial.distance import cdist + +chi2inv95 = { + 1: 3.8415, + 2: 5.9915, + 3: 7.8147, + 4: 9.4877, + 5: 11.070, + 6: 12.592, + 7: 14.067, + 8: 15.507, + 9: 16.919} + +def merge_matches(m1, m2, shape): + O,P,Q = shape + m1 = np.asarray(m1) + m2 = np.asarray(m2) + + M1 = scipy.sparse.coo_matrix((np.ones(len(m1)), (m1[:, 0], m1[:, 1])), shape=(O, P)) + M2 = scipy.sparse.coo_matrix((np.ones(len(m2)), (m2[:, 0], m2[:, 1])), shape=(P, Q)) + + mask = M1*M2 + match = mask.nonzero() + match = list(zip(match[0], match[1])) + unmatched_O = tuple(set(range(O)) - set([i for i, j in match])) + unmatched_Q = tuple(set(range(Q)) - set([j for i, j in match])) + + return match, unmatched_O, unmatched_Q + + +def _indices_to_matches(cost_matrix, indices, thresh): + matched_cost = cost_matrix[tuple(zip(*indices))] + matched_mask = (matched_cost <= thresh) + + matches = indices[matched_mask] + unmatched_a = tuple(set(range(cost_matrix.shape[0])) - set(matches[:, 0])) + unmatched_b = tuple(set(range(cost_matrix.shape[1])) - set(matches[:, 1])) + + return matches, unmatched_a, unmatched_b + + +def linear_assignment(cost_matrix, thresh): + if cost_matrix.size == 0: + return np.empty((0, 2), dtype=int), tuple(range(cost_matrix.shape[0])), tuple(range(cost_matrix.shape[1])) + matches, unmatched_a, unmatched_b = [], [], [] + cost, x, y = lap.lapjv(cost_matrix, extend_cost=True, cost_limit=thresh) + for ix, mx in enumerate(x): + if mx >= 0: + matches.append([ix, mx]) + unmatched_a = np.where(x < 0)[0] + unmatched_b = np.where(y < 0)[0] + matches = np.asarray(matches) + return matches, unmatched_a, unmatched_b + + +def ious(atlbrs, btlbrs): + """ + Compute cost based on IoU + :type atlbrs: list[tlbr] | np.ndarray + :type atlbrs: list[tlbr] | np.ndarray + + :rtype ious np.ndarray + """ + ious = np.zeros((len(atlbrs), len(btlbrs)), dtype=np.float) + if ious.size == 0: + return ious + + ious = bbox_ious( + np.ascontiguousarray(atlbrs, dtype=np.float), + np.ascontiguousarray(btlbrs, dtype=np.float) + ) + + return ious + + +def iou_distance(atracks, btracks): + """ + Compute cost based on IoU + :type atracks: list[STrack] + :type btracks: list[STrack] + + :rtype cost_matrix np.ndarray + """ + + if (len(atracks)>0 and isinstance(atracks[0], np.ndarray)) or (len(btracks) > 0 and isinstance(btracks[0], np.ndarray)): + atlbrs = atracks + btlbrs = btracks + else: + atlbrs = [track.tlbr for track in atracks] + btlbrs = [track.tlbr for track in btracks] + _ious = ious(atlbrs, btlbrs) + cost_matrix = 1 - _ious + + return cost_matrix + +def embedding_distance(tracks, detections, metric='cosine'): + """ + :param tracks: list[STrack] + :param detections: list[BaseTrack] + :param metric: + :return: cost_matrix np.ndarray + """ + + cost_matrix = np.zeros((len(tracks), len(detections)), dtype=np.float) + if cost_matrix.size == 0: + return cost_matrix + det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float) + #for i, track in enumerate(tracks): + #cost_matrix[i, :] = np.maximum(0.0, cdist(track.smooth_feat.reshape(1,-1), det_features, metric)) + track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float) + cost_matrix = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features + return cost_matrix + +def embedding_distance2(tracks, detections, metric='cosine'): + """ + :param tracks: list[STrack] + :param detections: list[BaseTrack] + :param metric: + :return: cost_matrix np.ndarray + """ + + cost_matrix = np.zeros((len(tracks), len(detections)), dtype=np.float) + if cost_matrix.size == 0: + return cost_matrix + det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float) + #for i, track in enumerate(tracks): + #cost_matrix[i, :] = np.maximum(0.0, cdist(track.smooth_feat.reshape(1,-1), det_features, metric)) + track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float) + cost_matrix = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features + track_features = np.asarray([track.features[0] for track in tracks], dtype=np.float) + cost_matrix2 = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features + track_features = np.asarray([track.features[len(track.features)-1] for track in tracks], dtype=np.float) + cost_matrix3 = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features + for row in range(len(cost_matrix)): + cost_matrix[row] = (cost_matrix[row]+cost_matrix2[row]+cost_matrix3[row])/3 + return cost_matrix + + +def vis_id_feature_A_distance(tracks, detections, metric='cosine'): + track_features = [] + det_features = [] + leg1 = len(tracks) + leg2 = len(detections) + cost_matrix = np.zeros((leg1, leg2), dtype=np.float) + cost_matrix_det = np.zeros((leg1, leg2), dtype=np.float) + cost_matrix_track = np.zeros((leg1, leg2), dtype=np.float) + det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float) + track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float) + if leg2 != 0: + cost_matrix_det = np.maximum(0.0, cdist(det_features, det_features, metric)) + if leg1 != 0: + cost_matrix_track = np.maximum(0.0, cdist(track_features, track_features, metric)) + if cost_matrix.size == 0: + return track_features, det_features, cost_matrix, cost_matrix_det, cost_matrix_track + cost_matrix = np.maximum(0.0, cdist(track_features, det_features, metric)) + if leg1 > 10: + leg1 = 10 + tracks = tracks[:10] + if leg2 > 10: + leg2 = 10 + detections = detections[:10] + det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float) + track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float) + return track_features, det_features, cost_matrix, cost_matrix_det, cost_matrix_track + +def gate_cost_matrix(kf, cost_matrix, tracks, detections, only_position=False): + if cost_matrix.size == 0: + return cost_matrix + gating_dim = 2 if only_position else 4 + gating_threshold = chi2inv95[gating_dim] + measurements = np.asarray([det.to_xyah() for det in detections]) + for row, track in enumerate(tracks): + gating_distance = kf.gating_distance( + track.mean, track.covariance, measurements, only_position) + cost_matrix[row, gating_distance > gating_threshold] = np.inf + return cost_matrix + + +def fuse_motion(kf, cost_matrix, tracks, detections, only_position=False, lambda_=0.98): + if cost_matrix.size == 0: + return cost_matrix + gating_dim = 2 if only_position else 4 + gating_threshold = chi2inv95[gating_dim] + measurements = np.asarray([det.to_xyah() for det in detections]) + for row, track in enumerate(tracks): + gating_distance = kf.gating_distance( + track.mean, track.covariance, measurements, only_position, metric='maha') + cost_matrix[row, gating_distance > gating_threshold] = np.inf + cost_matrix[row] = lambda_ * cost_matrix[row] + (1 - lambda_) * gating_distance + return cost_matrix diff --git a/tracking/docker-build-context/byte_track/tutorials/centertrack/opts.py b/tracking/docker-build-context/byte_track/tutorials/centertrack/opts.py new file mode 100644 index 0000000000000000000000000000000000000000..5d54fe39ff696933e1391c531868f8b73865b690 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/centertrack/opts.py @@ -0,0 +1,406 @@ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import argparse +import os +import sys + +class opts(object): + def __init__(self): + self.parser = argparse.ArgumentParser() + # basic experiment setting + self.parser.add_argument('task', default='', + help='ctdet | ddd | multi_pose ' + '| tracking or combined with ,') + self.parser.add_argument('--dataset', default='coco', + help='see lib/dataset/dataset_facotry for ' + + 'available datasets') + self.parser.add_argument('--test_dataset', default='', + help='coco | kitti | coco_hp | pascal') + self.parser.add_argument('--exp_id', default='default') + self.parser.add_argument('--test', action='store_true') + self.parser.add_argument('--debug', type=int, default=0, + help='level of visualization.' + '1: only show the final detection results' + '2: show the network output features' + '3: use matplot to display' # useful when lunching training with ipython notebook + '4: save all visualizations to disk') + self.parser.add_argument('--no_pause', action='store_true') + self.parser.add_argument('--demo', default='', + help='path to image/ image folders/ video. ' + 'or "webcam"') + self.parser.add_argument('--load_model', default='', + help='path to pretrained model') + self.parser.add_argument('--resume', action='store_true', + help='resume an experiment. ' + 'Reloaded the optimizer parameter and ' + 'set load_model to model_last.pth ' + 'in the exp dir if load_model is empty.') + + # system + self.parser.add_argument('--gpus', default='0', + help='-1 for CPU, use comma for multiple gpus') + self.parser.add_argument('--num_workers', type=int, default=4, + help='dataloader threads. 0 for single-thread.') + self.parser.add_argument('--not_cuda_benchmark', action='store_true', + help='disable when the input size is not fixed.') + self.parser.add_argument('--seed', type=int, default=317, + help='random seed') # from CornerNet + self.parser.add_argument('--not_set_cuda_env', action='store_true', + help='used when training in slurm clusters.') + + # log + self.parser.add_argument('--print_iter', type=int, default=0, + help='disable progress bar and print to screen.') + self.parser.add_argument('--save_all', action='store_true', + help='save model to disk every 5 epochs.') + self.parser.add_argument('--vis_thresh', type=float, default=0.3, + help='visualization threshold.') + self.parser.add_argument('--debugger_theme', default='white', + choices=['white', 'black']) + self.parser.add_argument('--eval_val', action='store_true') + self.parser.add_argument('--save_imgs', default='', help='') + self.parser.add_argument('--save_img_suffix', default='', help='') + self.parser.add_argument('--skip_first', type=int, default=-1, help='') + self.parser.add_argument('--save_video', action='store_true') + self.parser.add_argument('--save_framerate', type=int, default=30) + self.parser.add_argument('--resize_video', action='store_true') + self.parser.add_argument('--video_h', type=int, default=512, help='') + self.parser.add_argument('--video_w', type=int, default=512, help='') + self.parser.add_argument('--transpose_video', action='store_true') + self.parser.add_argument('--show_track_color', action='store_true') + self.parser.add_argument('--not_show_bbox', action='store_true') + self.parser.add_argument('--not_show_number', action='store_true') + self.parser.add_argument('--not_show_txt', action='store_true') + self.parser.add_argument('--qualitative', action='store_true') + self.parser.add_argument('--tango_color', action='store_true') + self.parser.add_argument('--only_show_dots', action='store_true') + self.parser.add_argument('--show_trace', action='store_true') + + # model + self.parser.add_argument('--arch', default='dla_34', + help='model architecture. Currently tested' + 'res_18 | res_101 | resdcn_18 | resdcn_101 |' + 'dlav0_34 | dla_34 | hourglass') + self.parser.add_argument('--dla_node', default='dcn') + self.parser.add_argument('--head_conv', type=int, default=-1, + help='conv layer channels for output head' + '0 for no conv layer' + '-1 for default setting: ' + '64 for resnets and 256 for dla.') + self.parser.add_argument('--num_head_conv', type=int, default=1) + self.parser.add_argument('--head_kernel', type=int, default=3, help='') + self.parser.add_argument('--down_ratio', type=int, default=4, + help='output stride. Currently only supports 4.') + self.parser.add_argument('--not_idaup', action='store_true') + self.parser.add_argument('--num_classes', type=int, default=-1) + self.parser.add_argument('--num_layers', type=int, default=101) + self.parser.add_argument('--backbone', default='dla34') + self.parser.add_argument('--neck', default='dlaup') + self.parser.add_argument('--msra_outchannel', type=int, default=256) + self.parser.add_argument('--efficient_level', type=int, default=0) + self.parser.add_argument('--prior_bias', type=float, default=-4.6) # -2.19 + + # input + self.parser.add_argument('--input_res', type=int, default=-1, + help='input height and width. -1 for default from ' + 'dataset. Will be overriden by input_h | input_w') + self.parser.add_argument('--input_h', type=int, default=-1, + help='input height. -1 for default from dataset.') + self.parser.add_argument('--input_w', type=int, default=-1, + help='input width. -1 for default from dataset.') + self.parser.add_argument('--dataset_version', default='') + + # train + self.parser.add_argument('--optim', default='adam') + self.parser.add_argument('--lr', type=float, default=1.25e-4, + help='learning rate for batch size 32.') + self.parser.add_argument('--lr_step', type=str, default='60', + help='drop learning rate by 10.') + self.parser.add_argument('--save_point', type=str, default='90', + help='when to save the model to disk.') + self.parser.add_argument('--num_epochs', type=int, default=70, + help='total training epochs.') + self.parser.add_argument('--batch_size', type=int, default=32, + help='batch size') + self.parser.add_argument('--master_batch_size', type=int, default=-1, + help='batch size on the master gpu.') + self.parser.add_argument('--num_iters', type=int, default=-1, + help='default: #samples / batch_size.') + self.parser.add_argument('--val_intervals', type=int, default=10000, + help='number of epochs to run validation.') + self.parser.add_argument('--trainval', action='store_true', + help='include validation in training and ' + 'test on test set') + self.parser.add_argument('--ltrb', action='store_true', + help='') + self.parser.add_argument('--ltrb_weight', type=float, default=0.1, + help='') + self.parser.add_argument('--reset_hm', action='store_true') + self.parser.add_argument('--reuse_hm', action='store_true') + self.parser.add_argument('--use_kpt_center', action='store_true') + self.parser.add_argument('--add_05', action='store_true') + self.parser.add_argument('--dense_reg', type=int, default=1, help='') + + # test + self.parser.add_argument('--flip_test', action='store_true', + help='flip data augmentation.') + self.parser.add_argument('--test_scales', type=str, default='1', + help='multi scale test augmentation.') + self.parser.add_argument('--nms', action='store_true', + help='run nms in testing.') + self.parser.add_argument('--K', type=int, default=100, + help='max number of output objects.') + self.parser.add_argument('--not_prefetch_test', action='store_true', + help='not use parallal data pre-processing.') + self.parser.add_argument('--fix_short', type=int, default=-1) + self.parser.add_argument('--keep_res', action='store_true', + help='keep the original resolution' + ' during validation.') + self.parser.add_argument('--map_argoverse_id', action='store_true', + help='if trained on nuscenes and eval on kitti') + self.parser.add_argument('--out_thresh', type=float, default=-1, + help='') + self.parser.add_argument('--depth_scale', type=float, default=1, + help='') + self.parser.add_argument('--save_results', action='store_true') + self.parser.add_argument('--load_results', default='') + self.parser.add_argument('--use_loaded_results', action='store_true') + self.parser.add_argument('--ignore_loaded_cats', default='') + self.parser.add_argument('--model_output_list', action='store_true', + help='Used when convert to onnx') + self.parser.add_argument('--non_block_test', action='store_true') + self.parser.add_argument('--vis_gt_bev', default='', help='') + self.parser.add_argument('--kitti_split', default='3dop', + help='different validation split for kitti: ' + '3dop | subcnn') + self.parser.add_argument('--test_focal_length', type=int, default=-1) + + # dataset + self.parser.add_argument('--not_rand_crop', action='store_true', + help='not use the random crop data augmentation' + 'from CornerNet.') + self.parser.add_argument('--not_max_crop', action='store_true', + help='used when the training dataset has' + 'inbalanced aspect ratios.') + self.parser.add_argument('--shift', type=float, default=0, + help='when not using random crop, 0.1' + 'apply shift augmentation.') + self.parser.add_argument('--scale', type=float, default=0, + help='when not using random crop, 0.4' + 'apply scale augmentation.') + self.parser.add_argument('--aug_rot', type=float, default=0, + help='probability of applying ' + 'rotation augmentation.') + self.parser.add_argument('--rotate', type=float, default=0, + help='when not using random crop' + 'apply rotation augmentation.') + self.parser.add_argument('--flip', type=float, default=0.5, + help='probability of applying flip augmentation.') + self.parser.add_argument('--no_color_aug', action='store_true', + help='not use the color augmenation ' + 'from CornerNet') + + # Tracking + self.parser.add_argument('--tracking', action='store_true') + self.parser.add_argument('--pre_hm', action='store_true') + self.parser.add_argument('--same_aug_pre', action='store_true') + self.parser.add_argument('--zero_pre_hm', action='store_true') + self.parser.add_argument('--hm_disturb', type=float, default=0) + self.parser.add_argument('--lost_disturb', type=float, default=0) + self.parser.add_argument('--fp_disturb', type=float, default=0) + self.parser.add_argument('--pre_thresh', type=float, default=-1) + self.parser.add_argument('--track_thresh', type=float, default=0.3) + self.parser.add_argument('--match_thresh', type=float, default=0.8) + self.parser.add_argument('--track_buffer', type=int, default=30) + self.parser.add_argument('--new_thresh', type=float, default=0.3) + self.parser.add_argument('--max_frame_dist', type=int, default=3) + self.parser.add_argument('--ltrb_amodal', action='store_true') + self.parser.add_argument('--ltrb_amodal_weight', type=float, default=0.1) + self.parser.add_argument('--public_det', action='store_true') + self.parser.add_argument('--no_pre_img', action='store_true') + self.parser.add_argument('--zero_tracking', action='store_true') + self.parser.add_argument('--hungarian', action='store_true') + self.parser.add_argument('--max_age', type=int, default=-1) + + + # loss + self.parser.add_argument('--tracking_weight', type=float, default=1) + self.parser.add_argument('--reg_loss', default='l1', + help='regression loss: sl1 | l1 | l2') + self.parser.add_argument('--hm_weight', type=float, default=1, + help='loss weight for keypoint heatmaps.') + self.parser.add_argument('--off_weight', type=float, default=1, + help='loss weight for keypoint local offsets.') + self.parser.add_argument('--wh_weight', type=float, default=0.1, + help='loss weight for bounding box size.') + self.parser.add_argument('--hp_weight', type=float, default=1, + help='loss weight for human pose offset.') + self.parser.add_argument('--hm_hp_weight', type=float, default=1, + help='loss weight for human keypoint heatmap.') + self.parser.add_argument('--amodel_offset_weight', type=float, default=1, + help='Please forgive the typo.') + self.parser.add_argument('--dep_weight', type=float, default=1, + help='loss weight for depth.') + self.parser.add_argument('--dim_weight', type=float, default=1, + help='loss weight for 3d bounding box size.') + self.parser.add_argument('--rot_weight', type=float, default=1, + help='loss weight for orientation.') + self.parser.add_argument('--nuscenes_att', action='store_true') + self.parser.add_argument('--nuscenes_att_weight', type=float, default=1) + self.parser.add_argument('--velocity', action='store_true') + self.parser.add_argument('--velocity_weight', type=float, default=1) + + # custom dataset + self.parser.add_argument('--custom_dataset_img_path', default='') + self.parser.add_argument('--custom_dataset_ann_path', default='') + self.parser.add_argument('--bird_view_world_size', type=int, default=64) + + def parse(self, args=''): + if args == '': + opt = self.parser.parse_args() + else: + opt = self.parser.parse_args(args) + + if opt.test_dataset == '': + opt.test_dataset = opt.dataset + + opt.gpus_str = opt.gpus + opt.gpus = [int(gpu) for gpu in opt.gpus.split(',')] + opt.gpus = [i for i in range(len(opt.gpus))] if opt.gpus[0] >=0 else [-1] + opt.lr_step = [int(i) for i in opt.lr_step.split(',')] + opt.save_point = [int(i) for i in opt.save_point.split(',')] + opt.test_scales = [float(i) for i in opt.test_scales.split(',')] + opt.save_imgs = [i for i in opt.save_imgs.split(',')] \ + if opt.save_imgs != '' else [] + opt.ignore_loaded_cats = \ + [int(i) for i in opt.ignore_loaded_cats.split(',')] \ + if opt.ignore_loaded_cats != '' else [] + + opt.num_workers = max(opt.num_workers, 2 * len(opt.gpus)) + opt.pre_img = False + if 'tracking' in opt.task: + print('Running tracking') + opt.tracking = True +# opt.out_thresh = max(opt.track_thresh, opt.out_thresh) +# opt.pre_thresh = max(opt.track_thresh, opt.pre_thresh) +# opt.new_thresh = max(opt.track_thresh, opt.new_thresh) + opt.pre_img = not opt.no_pre_img + print('Using tracking threshold for out threshold!', opt.track_thresh) + if 'ddd' in opt.task: + opt.show_track_color = True + + opt.fix_res = not opt.keep_res + print('Fix size testing.' if opt.fix_res else 'Keep resolution testing.') + + if opt.head_conv == -1: # init default head_conv + opt.head_conv = 256 if 'dla' in opt.arch else 64 + + opt.pad = 127 if 'hourglass' in opt.arch else 31 + opt.num_stacks = 2 if opt.arch == 'hourglass' else 1 + + if opt.master_batch_size == -1: + opt.master_batch_size = opt.batch_size // len(opt.gpus) + rest_batch_size = (opt.batch_size - opt.master_batch_size) + opt.chunk_sizes = [opt.master_batch_size] + for i in range(len(opt.gpus) - 1): + slave_chunk_size = rest_batch_size // (len(opt.gpus) - 1) + if i < rest_batch_size % (len(opt.gpus) - 1): + slave_chunk_size += 1 + opt.chunk_sizes.append(slave_chunk_size) + print('training chunk_sizes:', opt.chunk_sizes) + + if opt.debug > 0: + opt.num_workers = 0 + opt.batch_size = 1 + opt.gpus = [opt.gpus[0]] + opt.master_batch_size = -1 + + # log dirs + opt.root_dir = os.path.join(os.path.dirname(__file__), '..', '..') + opt.data_dir = os.path.join(opt.root_dir, 'data') + opt.exp_dir = os.path.join(opt.root_dir, 'exp', opt.task) + opt.save_dir = os.path.join(opt.exp_dir, opt.exp_id) + opt.debug_dir = os.path.join(opt.save_dir, 'debug') + + if opt.resume and opt.load_model == '': + opt.load_model = os.path.join(opt.save_dir, 'model_last.pth') + return opt + + + def update_dataset_info_and_set_heads(self, opt, dataset): + opt.num_classes = dataset.num_categories \ + if opt.num_classes < 0 else opt.num_classes + # input_h(w): opt.input_h overrides opt.input_res overrides dataset default + input_h, input_w = dataset.default_resolution + input_h = opt.input_res if opt.input_res > 0 else input_h + input_w = opt.input_res if opt.input_res > 0 else input_w + opt.input_h = opt.input_h if opt.input_h > 0 else input_h + opt.input_w = opt.input_w if opt.input_w > 0 else input_w + opt.output_h = opt.input_h // opt.down_ratio + opt.output_w = opt.input_w // opt.down_ratio + opt.input_res = max(opt.input_h, opt.input_w) + opt.output_res = max(opt.output_h, opt.output_w) + + opt.heads = {'hm': opt.num_classes, 'reg': 2, 'wh': 2} + + if 'tracking' in opt.task: + opt.heads.update({'tracking': 2}) + + if 'ddd' in opt.task: + opt.heads.update({'dep': 1, 'rot': 8, 'dim': 3, 'amodel_offset': 2}) + + if 'multi_pose' in opt.task: + opt.heads.update({ + 'hps': dataset.num_joints * 2, 'hm_hp': dataset.num_joints, + 'hp_offset': 2}) + + if opt.ltrb: + opt.heads.update({'ltrb': 4}) + if opt.ltrb_amodal: + opt.heads.update({'ltrb_amodal': 4}) + if opt.nuscenes_att: + opt.heads.update({'nuscenes_att': 8}) + if opt.velocity: + opt.heads.update({'velocity': 3}) + + weight_dict = {'hm': opt.hm_weight, 'wh': opt.wh_weight, + 'reg': opt.off_weight, 'hps': opt.hp_weight, + 'hm_hp': opt.hm_hp_weight, 'hp_offset': opt.off_weight, + 'dep': opt.dep_weight, 'rot': opt.rot_weight, + 'dim': opt.dim_weight, + 'amodel_offset': opt.amodel_offset_weight, + 'ltrb': opt.ltrb_weight, + 'tracking': opt.tracking_weight, + 'ltrb_amodal': opt.ltrb_amodal_weight, + 'nuscenes_att': opt.nuscenes_att_weight, + 'velocity': opt.velocity_weight} + opt.weights = {head: weight_dict[head] for head in opt.heads} + for head in opt.weights: + if opt.weights[head] == 0: + del opt.heads[head] + opt.head_conv = {head: [opt.head_conv \ + for i in range(opt.num_head_conv if head != 'reg' else 1)] for head in opt.heads} + + print('input h w:', opt.input_h, opt.input_w) + print('heads', opt.heads) + print('weights', opt.weights) + print('head conv', opt.head_conv) + + return opt + + def init(self, args=''): + # only used in demo + default_dataset_info = { + 'ctdet': 'coco', 'multi_pose': 'coco_hp', 'ddd': 'nuscenes', + 'tracking,ctdet': 'coco', 'tracking,multi_pose': 'coco_hp', + 'tracking,ddd': 'nuscenes' + } + opt = self.parse() + from dataset.dataset_factory import dataset_factory + train_dataset = default_dataset_info[opt.task] \ + if opt.task in default_dataset_info else 'coco' + dataset = dataset_factory[train_dataset] + opt = self.update_dataset_info_and_set_heads(opt, dataset) + return opt diff --git a/tracking/docker-build-context/byte_track/tutorials/centertrack/tracker.py b/tracking/docker-build-context/byte_track/tutorials/centertrack/tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..22a746528ae84416423d7e1ec5b7d93429560b5d --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/centertrack/tracker.py @@ -0,0 +1,198 @@ +import numpy as np +from sklearn.utils.linear_assignment_ import linear_assignment +# from numba import jit +import copy + + +class Tracker(object): + def __init__(self, opt): + self.opt = opt + self.reset() + + def init_track(self, results): + for item in results: + if item['score'] > self.opt.new_thresh: + self.id_count += 1 + # active and age are never used in the paper + item['active'] = 1 + item['age'] = 1 + item['tracking_id'] = self.id_count + if not ('ct' in item): + bbox = item['bbox'] + item['ct'] = [(bbox[0] + bbox[2]) / 2, (bbox[1] + bbox[3]) / 2] + self.tracks.append(item) + + def reset(self): + self.id_count = 0 + self.tracks = [] + + def step(self, results_with_low, public_det=None): + + results = [item for item in results_with_low if item['score'] >= self.opt.track_thresh] + + # first association + N = len(results) + M = len(self.tracks) + + dets = np.array( + [det['ct'] + det['tracking'] for det in results], np.float32) # N x 2 + track_size = np.array([((track['bbox'][2] - track['bbox'][0]) * \ + (track['bbox'][3] - track['bbox'][1])) \ + for track in self.tracks], np.float32) # M + track_cat = np.array([track['class'] for track in self.tracks], np.int32) # M + item_size = np.array([((item['bbox'][2] - item['bbox'][0]) * \ + (item['bbox'][3] - item['bbox'][1])) \ + for item in results], np.float32) # N + item_cat = np.array([item['class'] for item in results], np.int32) # N + tracks = np.array( + [pre_det['ct'] for pre_det in self.tracks], np.float32) # M x 2 + dist = (((tracks.reshape(1, -1, 2) - \ + dets.reshape(-1, 1, 2)) ** 2).sum(axis=2)) # N x M + + invalid = ((dist > track_size.reshape(1, M)) + \ + (dist > item_size.reshape(N, 1)) + \ + (item_cat.reshape(N, 1) != track_cat.reshape(1, M))) > 0 + dist = dist + invalid * 1e18 + + if self.opt.hungarian: + assert not self.opt.hungarian, 'we only verify centertrack with greedy_assignment' + item_score = np.array([item['score'] for item in results], np.float32) # N + dist[dist > 1e18] = 1e18 + matched_indices = linear_assignment(dist) + else: + matched_indices = greedy_assignment(copy.deepcopy(dist)) + + unmatched_dets = [d for d in range(dets.shape[0]) \ + if not (d in matched_indices[:, 0])] + unmatched_tracks = [d for d in range(tracks.shape[0]) \ + if not (d in matched_indices[:, 1])] + + if self.opt.hungarian: + assert not self.opt.hungarian, 'we only verify centertrack with greedy_assignment' + matches = [] + for m in matched_indices: + if dist[m[0], m[1]] > 1e16: + unmatched_dets.append(m[0]) + unmatched_tracks.append(m[1]) + else: + matches.append(m) + matches = np.array(matches).reshape(-1, 2) + else: + matches = matched_indices + + ret = [] + for m in matches: + track = results[m[0]] + track['tracking_id'] = self.tracks[m[1]]['tracking_id'] + track['age'] = 1 + track['active'] = self.tracks[m[1]]['active'] + 1 + ret.append(track) + + if self.opt.public_det and len(unmatched_dets) > 0: + assert not self.opt.public_det, 'we only verify centertrack with private detection' + # Public detection: only create tracks from provided detections + pub_dets = np.array([d['ct'] for d in public_det], np.float32) + dist3 = ((dets.reshape(-1, 1, 2) - pub_dets.reshape(1, -1, 2)) ** 2).sum( + axis=2) + matched_dets = [d for d in range(dets.shape[0]) \ + if not (d in unmatched_dets)] + dist3[matched_dets] = 1e18 + for j in range(len(pub_dets)): + i = dist3[:, j].argmin() + if dist3[i, j] < item_size[i]: + dist3[i, :] = 1e18 + track = results[i] + if track['score'] > self.opt.new_thresh: + self.id_count += 1 + track['tracking_id'] = self.id_count + track['age'] = 1 + track['active'] = 1 + ret.append(track) + else: + # Private detection: create tracks for all un-matched detections + for i in unmatched_dets: + track = results[i] + if track['score'] > self.opt.new_thresh: + self.id_count += 1 + track['tracking_id'] = self.id_count + track['age'] = 1 + track['active'] = 1 + ret.append(track) + + # second association + results_second = [item for item in results_with_low if item['score'] < self.opt.track_thresh] + + self_tracks_second = [self.tracks[i] for i in unmatched_tracks if self.tracks[i]['active'] > 0] + second2original = [i for i in unmatched_tracks if self.tracks[i]['active'] > 0] + + N = len(results_second) + M = len(self_tracks_second) + + if N > 0 and M > 0: + dets = np.array( + [det['ct'] + det['tracking'] for det in results_second], np.float32) # N x 2 + track_size = np.array([((track['bbox'][2] - track['bbox'][0]) * \ + (track['bbox'][3] - track['bbox'][1])) \ + for track in self_tracks_second], np.float32) # M + track_cat = np.array([track['class'] for track in self_tracks_second], np.int32) # M + item_size = np.array([((item['bbox'][2] - item['bbox'][0]) * \ + (item['bbox'][3] - item['bbox'][1])) \ + for item in results_second], np.float32) # N + item_cat = np.array([item['class'] for item in results_second], np.int32) # N + tracks_second = np.array( + [pre_det['ct'] for pre_det in self_tracks_second], np.float32) # M x 2 + dist = (((tracks_second.reshape(1, -1, 2) - \ + dets.reshape(-1, 1, 2)) ** 2).sum(axis=2)) # N x M + + invalid = ((dist > track_size.reshape(1, M)) + \ + (dist > item_size.reshape(N, 1)) + \ + (item_cat.reshape(N, 1) != track_cat.reshape(1, M))) > 0 + dist = dist + invalid * 1e18 + + matched_indices_second = greedy_assignment(copy.deepcopy(dist), 1e8) + + unmatched_tracks_second = [d for d in range(tracks_second.shape[0]) \ + if not (d in matched_indices_second[:, 1])] + matches_second = matched_indices_second + + for m in matches_second: + track = results_second[m[0]] + track['tracking_id'] = self_tracks_second[m[1]]['tracking_id'] + track['age'] = 1 + track['active'] = self_tracks_second[m[1]]['active'] + 1 + ret.append(track) + + unmatched_tracks = [second2original[i] for i in unmatched_tracks_second] + \ + [i for i in unmatched_tracks if self.tracks[i]['active'] == 0] + +#. for debug +# unmatched_tracks = [i for i in unmatched_tracks if self.tracks[i]['active'] > 0] + \ +# [i for i in unmatched_tracks if self.tracks[i]['active'] == 0] + + for i in unmatched_tracks: + track = self.tracks[i] + if track['age'] < self.opt.max_age: + track['age'] += 1 + track['active'] = 0 + bbox = track['bbox'] + ct = track['ct'] + v = [0, 0] + track['bbox'] = [ + bbox[0] + v[0], bbox[1] + v[1], + bbox[2] + v[0], bbox[3] + v[1]] + track['ct'] = [ct[0] + v[0], ct[1] + v[1]] + ret.append(track) + self.tracks = ret + return ret + + +def greedy_assignment(dist, thresh=1e16): + matched_indices = [] + if dist.shape[1] == 0: + return np.array(matched_indices, np.int32).reshape(-1, 2) + for i in range(dist.shape[0]): + j = dist[i].argmin() + if dist[i][j] < thresh: + dist[:, j] = 1e18 + matched_indices.append([i, j]) + return np.array(matched_indices, np.int32).reshape(-1, 2) diff --git a/tracking/docker-build-context/byte_track/tutorials/cstrack/README.md b/tracking/docker-build-context/byte_track/tutorials/cstrack/README.md new file mode 100644 index 0000000000000000000000000000000000000000..fdb66c1955791274891905aa620ec10636c86d6f --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/cstrack/README.md @@ -0,0 +1,28 @@ +# CSTrack + +Step1. git clone https://github.com/JudasDie/SOTS.git + + +Step2. replace https://github.com/JudasDie/SOTS/blob/master/lib/tracker/cstrack.py + + +Step3. download cstrack model trained on MIX and MOT17_half (mix_mot17_half_cstrack.pt): [google](https://drive.google.com/file/d/1OG5PDj_CYmMiw3dN6pZ0FsgqY__CIDx1/view?usp=sharing), [baidu(code:0bsu)](https://pan.baidu.com/s/1Z2VnE-OhZIPmgX6-4r9Z1Q) + + +Step4. run BYTE tracker example: +``` +python3 test_cstrack.py --val_mot17 True --val_hf 2 --weights weights/mix_mot17_half_cstrack.pt --conf_thres 0.7 --data_cfg ../src/lib/cfg/mot17_hf.json --data_dir your/data/path +``` + + +## Notes +byte_tracker: only motion + +tracker: motion + reid + + + + + + + diff --git a/tracking/docker-build-context/byte_track/tutorials/cstrack/byte_tracker.py b/tracking/docker-build-context/byte_track/tutorials/cstrack/byte_tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..ae2af402f2780e77ef0fa7ef24cbee8bf62c4a94 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/cstrack/byte_tracker.py @@ -0,0 +1,500 @@ +from collections import deque +import os +import cv2 +import numpy as np +import torch +import torch.nn.functional as F +from torchsummary import summary + +from core.mot.general import non_max_suppression_and_inds, non_max_suppression_jde, non_max_suppression, scale_coords +from core.mot.torch_utils import intersect_dicts +from models.mot.cstrack import Model + +from mot_online import matching +from mot_online.kalman_filter import KalmanFilter +from mot_online.log import logger +from mot_online.utils import * + +from mot_online.basetrack import BaseTrack, TrackState + + +class STrack(BaseTrack): + shared_kalman = KalmanFilter() + def __init__(self, tlwh, score): + + # wait activate + self._tlwh = np.asarray(tlwh, dtype=np.float) + self.kalman_filter = None + self.mean, self.covariance = None, None + self.is_activated = False + + self.score = score + self.tracklet_len = 0 + + def predict(self): + mean_state = self.mean.copy() + if self.state != TrackState.Tracked: + mean_state[7] = 0 + self.mean, self.covariance = self.kalman_filter.predict(mean_state, self.covariance) + + @staticmethod + def multi_predict(stracks): + if len(stracks) > 0: + multi_mean = np.asarray([st.mean.copy() for st in stracks]) + multi_covariance = np.asarray([st.covariance for st in stracks]) + for i, st in enumerate(stracks): + if st.state != TrackState.Tracked: + multi_mean[i][7] = 0 + multi_mean, multi_covariance = STrack.shared_kalman.multi_predict(multi_mean, multi_covariance) + for i, (mean, cov) in enumerate(zip(multi_mean, multi_covariance)): + stracks[i].mean = mean + stracks[i].covariance = cov + + def activate(self, kalman_filter, frame_id): + """Start a new tracklet""" + self.kalman_filter = kalman_filter + self.track_id = self.next_id() + self.mean, self.covariance = self.kalman_filter.initiate(self.tlwh_to_xyah(self._tlwh)) + + self.tracklet_len = 0 + self.state = TrackState.Tracked + #self.is_activated = True + self.frame_id = frame_id + self.start_frame = frame_id + + def re_activate(self, new_track, frame_id, new_id=False): + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_track.tlwh) + ) + + self.tracklet_len = 0 + self.state = TrackState.Tracked + self.is_activated = True + self.frame_id = frame_id + if new_id: + self.track_id = self.next_id() + + def update(self, new_track, frame_id): + """ + Update a matched track + :type new_track: STrack + :type frame_id: int + :type update_feature: bool + :return: + """ + self.frame_id = frame_id + self.tracklet_len += 1 + + new_tlwh = new_track.tlwh + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_tlwh)) + self.state = TrackState.Tracked + self.is_activated = True + + self.score = new_track.score + + @property + # @jit(nopython=True) + def tlwh(self): + """Get current position in bounding box format `(top left x, top left y, + width, height)`. + """ + if self.mean is None: + return self._tlwh.copy() + ret = self.mean[:4].copy() + ret[2] *= ret[3] + ret[:2] -= ret[2:] / 2 + return ret + + @property + # @jit(nopython=True) + def tlbr(self): + """Convert bounding box to format `(min x, min y, max x, max y)`, i.e., + `(top left, bottom right)`. + """ + ret = self.tlwh.copy() + ret[2:] += ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_xyah(tlwh): + """Convert bounding box to format `(center x, center y, aspect ratio, + height)`, where the aspect ratio is `width / height`. + """ + ret = np.asarray(tlwh).copy() + ret[:2] += ret[2:] / 2 + ret[2] /= ret[3] + return ret + + def to_xyah(self): + return self.tlwh_to_xyah(self.tlwh) + + @staticmethod + # @jit(nopython=True) + def tlbr_to_tlwh(tlbr): + ret = np.asarray(tlbr).copy() + ret[2:] -= ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_tlbr(tlwh): + ret = np.asarray(tlwh).copy() + ret[2:] += ret[:2] + return ret + + def __repr__(self): + return 'OT_{}_({}-{})'.format(self.track_id, self.start_frame, self.end_frame) + + +class BYTETracker(object): + def __init__(self, opt, frame_rate=30): + self.opt = opt + if int(opt.gpus[0]) >= 0: + opt.device = torch.device('cuda') + else: + opt.device = torch.device('cpu') + print('Creating model...') + + ckpt = torch.load(opt.weights, map_location=opt.device) # load checkpoint + self.model = Model(opt.cfg or ckpt['model'].yaml, ch=3, nc=1).to(opt.device) # create + exclude = ['anchor'] if opt.cfg else [] # exclude keys + if type(ckpt['model']).__name__ == "OrderedDict": + state_dict = ckpt['model'] + else: + state_dict = ckpt['model'].float().state_dict() # to FP32 + state_dict = intersect_dicts(state_dict, self.model.state_dict(), exclude=exclude) # intersect + self.model.load_state_dict(state_dict, strict=False) # load + self.model.cuda().eval() + total_params = sum(p.numel() for p in self.model.parameters()) + print(f'{total_params:,} total parameters.') + + + self.tracked_stracks = [] # type: list[STrack] + self.lost_stracks = [] # type: list[STrack] + self.removed_stracks = [] # type: list[STrack] + + self.frame_id = 0 + self.det_thresh = opt.conf_thres + self.buffer_size = int(frame_rate / 30.0 * opt.track_buffer) + self.max_time_lost = self.buffer_size + self.mean = np.array(opt.mean, dtype=np.float32).reshape(1, 1, 3) + self.std = np.array(opt.std, dtype=np.float32).reshape(1, 1, 3) + + self.kalman_filter = KalmanFilter() + self.low_thres = 0.2 + self.high_thres = self.opt.conf_thres + 0.1 + + def update(self, im_blob, img0,seq_num, save_dir): + self.frame_id += 1 + activated_starcks = [] + refind_stracks = [] + lost_stracks = [] + removed_stracks = [] + dets = [] + + ''' Step 1: Network forward, get detections & embeddings''' + with torch.no_grad(): + output = self.model(im_blob, augment=False) + pred, train_out = output[1] + + pred = pred[pred[:, :, 4] > self.low_thres] + detections = [] + if len(pred) > 0: + dets,x_inds,y_inds = non_max_suppression_and_inds(pred[:,:6].unsqueeze(0), 0.1, self.opt.nms_thres,method='cluster_diou') + if len(dets) != 0: + scale_coords(self.opt.img_size, dets[:, :4], img0.shape).round() + + remain_inds = dets[:, 4] > self.opt.conf_thres + inds_low = dets[:, 4] > self.low_thres + inds_high = dets[:, 4] < self.opt.conf_thres + inds_second = np.logical_and(inds_low, inds_high) + dets_second = dets[inds_second] + dets = dets[remain_inds] + + detections = [STrack(STrack.tlbr_to_tlwh(tlbrs[:4]), tlbrs[4]) for + tlbrs in dets[:, :5]] + + else: + detections = [] + dets_second = [] + id_feature_second = [] + + ''' Add newly detected tracklets to tracked_stracks''' + unconfirmed = [] + tracked_stracks = [] # type: list[STrack] + for track in self.tracked_stracks: + if not track.is_activated: + unconfirmed.append(track) + else: + tracked_stracks.append(track) + + ''' Step 2: First association, with embedding''' + strack_pool = joint_stracks(tracked_stracks, self.lost_stracks) + # Predict the current location with KF + STrack.multi_predict(strack_pool) + dists = matching.iou_distance(strack_pool, detections) + matches, u_track, u_detection = matching.linear_assignment(dists, thresh=0.8) + + for itracked, idet in matches: + track = strack_pool[itracked] + det = detections[idet] + if track.state == TrackState.Tracked: + track.update(detections[idet], self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + # vis + track_features, det_features, cost_matrix, cost_matrix_det, cost_matrix_track = [],[],[],[],[] + if self.opt.vis_state == 1 and self.frame_id % 20 == 0: + if len(dets) != 0: + for i in range(0, dets.shape[0]): + bbox = dets[i][0:4] + cv2.rectangle(img0, (int(bbox[0]), int(bbox[1])),(int(bbox[2]), int(bbox[3])),(0, 255, 0), 2) + track_features, det_features, cost_matrix, cost_matrix_det, cost_matrix_track = matching.vis_id_feature_A_distance(strack_pool, detections) + vis_feature(self.frame_id,seq_num,img0,track_features, + det_features, cost_matrix, cost_matrix_det, cost_matrix_track, max_num=5, out_path=save_dir) + + ''' Step 3: Second association, with IOU''' + + # association the untrack to the low score detections + if len(dets_second) > 0: + detections_second = [STrack(STrack.tlbr_to_tlwh(tlbrs[:4]), tlbrs[4]) for + tlbrs in dets_second[:, :5]] + else: + detections_second = [] + r_tracked_stracks = [strack_pool[i] for i in u_track if strack_pool[i].state == TrackState.Tracked] + dists = matching.iou_distance(r_tracked_stracks, detections_second) + matches, u_track, u_detection_second = matching.linear_assignment(dists, thresh=0.4) + for itracked, idet in matches: + track = r_tracked_stracks[itracked] + det = detections_second[idet] + if track.state == TrackState.Tracked: + track.update(det, self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + for it in u_track: + track = r_tracked_stracks[it] + if not track.state == TrackState.Lost: + track.mark_lost() + lost_stracks.append(track) + + '''Deal with unconfirmed tracks, usually tracks with only one beginning frame''' + detections = [detections[i] for i in u_detection] + dists = matching.iou_distance(unconfirmed, detections) + matches, u_unconfirmed, u_detection = matching.linear_assignment(dists, thresh=0.7) + for itracked, idet in matches: + unconfirmed[itracked].update(detections[idet], self.frame_id) + activated_starcks.append(unconfirmed[itracked]) + for it in u_unconfirmed: + track = unconfirmed[it] + track.mark_removed() + removed_stracks.append(track) + + """ Step 4: Init new stracks""" + for inew in u_detection: + track = detections[inew] + if track.score < self.high_thres: + continue + track.activate(self.kalman_filter, self.frame_id) + activated_starcks.append(track) + """ Step 5: Update state""" + for track in self.lost_stracks: + if self.frame_id - track.end_frame > self.max_time_lost: + track.mark_removed() + removed_stracks.append(track) + + # print('Ramained match {} s'.format(t4-t3)) + + self.tracked_stracks = [t for t in self.tracked_stracks if t.state == TrackState.Tracked] + self.tracked_stracks = joint_stracks(self.tracked_stracks, activated_starcks) + self.tracked_stracks = joint_stracks(self.tracked_stracks, refind_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.tracked_stracks) + self.lost_stracks.extend(lost_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.removed_stracks) + self.removed_stracks.extend(removed_stracks) + self.tracked_stracks, self.lost_stracks = remove_duplicate_stracks(self.tracked_stracks, self.lost_stracks) + # get scores of lost tracks + output_stracks = [track for track in self.tracked_stracks if track.is_activated] + + logger.debug('===========Frame {}=========='.format(self.frame_id)) + logger.debug('Activated: {}'.format([track.track_id for track in activated_starcks])) + logger.debug('Refind: {}'.format([track.track_id for track in refind_stracks])) + logger.debug('Lost: {}'.format([track.track_id for track in lost_stracks])) + logger.debug('Removed: {}'.format([track.track_id for track in removed_stracks])) + + return output_stracks + + +def joint_stracks(tlista, tlistb): + exists = {} + res = [] + for t in tlista: + exists[t.track_id] = 1 + res.append(t) + for t in tlistb: + tid = t.track_id + if not exists.get(tid, 0): + exists[tid] = 1 + res.append(t) + return res + + +def sub_stracks(tlista, tlistb): + stracks = {} + for t in tlista: + stracks[t.track_id] = t + for t in tlistb: + tid = t.track_id + if stracks.get(tid, 0): + del stracks[tid] + return list(stracks.values()) + + +def remove_duplicate_stracks(stracksa, stracksb): + pdist = matching.iou_distance(stracksa, stracksb) + pairs = np.where(pdist < 0.15) + dupa, dupb = list(), list() + for p, q in zip(*pairs): + timep = stracksa[p].frame_id - stracksa[p].start_frame + timeq = stracksb[q].frame_id - stracksb[q].start_frame + if timep > timeq: + dupb.append(q) + else: + dupa.append(p) + resa = [t for i, t in enumerate(stracksa) if not i in dupa] + resb = [t for i, t in enumerate(stracksb) if not i in dupb] + return resa, resb + +def vis_feature(frame_id,seq_num,img,track_features, det_features, cost_matrix, cost_matrix_det, cost_matrix_track,max_num=5, out_path='/home/XX/'): + num_zero = ["0000","000","00","0"] + img = cv2.resize(img, (778, 435)) + + if len(det_features) != 0: + max_f = det_features.max() + min_f = det_features.min() + det_features = np.round((det_features - min_f) / (max_f - min_f) * 255) + det_features = det_features.astype(np.uint8) + d_F_M = [] + cutpff_line = [40]*512 + for d_f in det_features: + for row in range(45): + d_F_M += [[40]*3+d_f.tolist()+[40]*3] + for row in range(3): + d_F_M += [[40]*3+cutpff_line+[40]*3] + d_F_M = np.array(d_F_M) + d_F_M = d_F_M.astype(np.uint8) + det_features_img = cv2.applyColorMap(d_F_M, cv2.COLORMAP_JET) + feature_img2 = cv2.resize(det_features_img, (435, 435)) + #cv2.putText(feature_img2, "det_features", (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) + else: + feature_img2 = np.zeros((435, 435)) + feature_img2 = feature_img2.astype(np.uint8) + feature_img2 = cv2.applyColorMap(feature_img2, cv2.COLORMAP_JET) + #cv2.putText(feature_img2, "det_features", (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) + feature_img = np.concatenate((img, feature_img2), axis=1) + + if len(cost_matrix_det) != 0 and len(cost_matrix_det[0]) != 0: + max_f = cost_matrix_det.max() + min_f = cost_matrix_det.min() + cost_matrix_det = np.round((cost_matrix_det - min_f) / (max_f - min_f) * 255) + d_F_M = [] + cutpff_line = [40]*len(cost_matrix_det)*10 + for c_m in cost_matrix_det: + add = [] + for row in range(len(c_m)): + add += [255-c_m[row]]*10 + for row in range(10): + d_F_M += [[40]+add+[40]] + d_F_M = np.array(d_F_M) + d_F_M = d_F_M.astype(np.uint8) + cost_matrix_det_img = cv2.applyColorMap(d_F_M, cv2.COLORMAP_JET) + feature_img2 = cv2.resize(cost_matrix_det_img, (435, 435)) + #cv2.putText(feature_img2, "cost_matrix_det", (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) + else: + feature_img2 = np.zeros((435, 435)) + feature_img2 = feature_img2.astype(np.uint8) + feature_img2 = cv2.applyColorMap(feature_img2, cv2.COLORMAP_JET) + #cv2.putText(feature_img2, "cost_matrix_det", (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) + feature_img = np.concatenate((feature_img, feature_img2), axis=1) + + if len(track_features) != 0: + max_f = track_features.max() + min_f = track_features.min() + track_features = np.round((track_features - min_f) / (max_f - min_f) * 255) + track_features = track_features.astype(np.uint8) + d_F_M = [] + cutpff_line = [40]*512 + for d_f in track_features: + for row in range(45): + d_F_M += [[40]*3+d_f.tolist()+[40]*3] + for row in range(3): + d_F_M += [[40]*3+cutpff_line+[40]*3] + d_F_M = np.array(d_F_M) + d_F_M = d_F_M.astype(np.uint8) + track_features_img = cv2.applyColorMap(d_F_M, cv2.COLORMAP_JET) + feature_img2 = cv2.resize(track_features_img, (435, 435)) + #cv2.putText(feature_img2, "track_features", (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) + else: + feature_img2 = np.zeros((435, 435)) + feature_img2 = feature_img2.astype(np.uint8) + feature_img2 = cv2.applyColorMap(feature_img2, cv2.COLORMAP_JET) + #cv2.putText(feature_img2, "track_features", (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) + feature_img = np.concatenate((feature_img, feature_img2), axis=1) + + if len(cost_matrix_track) != 0 and len(cost_matrix_track[0]) != 0: + max_f = cost_matrix_track.max() + min_f = cost_matrix_track.min() + cost_matrix_track = np.round((cost_matrix_track - min_f) / (max_f - min_f) * 255) + d_F_M = [] + cutpff_line = [40]*len(cost_matrix_track)*10 + for c_m in cost_matrix_track: + add = [] + for row in range(len(c_m)): + add += [255-c_m[row]]*10 + for row in range(10): + d_F_M += [[40]+add+[40]] + d_F_M = np.array(d_F_M) + d_F_M = d_F_M.astype(np.uint8) + cost_matrix_track_img = cv2.applyColorMap(d_F_M, cv2.COLORMAP_JET) + feature_img2 = cv2.resize(cost_matrix_track_img, (435, 435)) + #cv2.putText(feature_img2, "cost_matrix_track", (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) + else: + feature_img2 = np.zeros((435, 435)) + feature_img2 = feature_img2.astype(np.uint8) + feature_img2 = cv2.applyColorMap(feature_img2, cv2.COLORMAP_JET) + #cv2.putText(feature_img2, "cost_matrix_track", (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) + feature_img = np.concatenate((feature_img, feature_img2), axis=1) + + if len(cost_matrix) != 0 and len(cost_matrix[0]) != 0: + max_f = cost_matrix.max() + min_f = cost_matrix.min() + cost_matrix = np.round((cost_matrix - min_f) / (max_f - min_f) * 255) + d_F_M = [] + cutpff_line = [40]*len(cost_matrix[0])*10 + for c_m in cost_matrix: + add = [] + for row in range(len(c_m)): + add += [255-c_m[row]]*10 + for row in range(10): + d_F_M += [[40]+add+[40]] + d_F_M = np.array(d_F_M) + d_F_M = d_F_M.astype(np.uint8) + cost_matrix_img = cv2.applyColorMap(d_F_M, cv2.COLORMAP_JET) + feature_img2 = cv2.resize(cost_matrix_img, (435, 435)) + #cv2.putText(feature_img2, "cost_matrix", (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) + else: + feature_img2 = np.zeros((435, 435)) + feature_img2 = feature_img2.astype(np.uint8) + feature_img2 = cv2.applyColorMap(feature_img2, cv2.COLORMAP_JET) + #cv2.putText(feature_img2, "cost_matrix", (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) + feature_img = np.concatenate((feature_img, feature_img2), axis=1) + + dst_path = out_path + "/" + seq_num + "_" + num_zero[len(str(frame_id))-1] + str(frame_id) + '.png' + cv2.imwrite(dst_path, feature_img) \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/tutorials/cstrack/tracker.py b/tracking/docker-build-context/byte_track/tutorials/cstrack/tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..67b7b49600993b016cc877abf0ceabd1a7942520 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/cstrack/tracker.py @@ -0,0 +1,542 @@ +from collections import deque +import os +import cv2 +import numpy as np +import torch +import torch.nn.functional as F +from torchsummary import summary + +from core.mot.general import non_max_suppression_and_inds, non_max_suppression_jde, non_max_suppression, scale_coords +from core.mot.torch_utils import intersect_dicts +from models.mot.cstrack import Model + +from mot_online import matching +from mot_online.kalman_filter import KalmanFilter +from mot_online.log import logger +from mot_online.utils import * + +from mot_online.basetrack import BaseTrack, TrackState + + +class STrack(BaseTrack): + shared_kalman = KalmanFilter() + def __init__(self, tlwh, score, temp_feat, buffer_size=30): + + # wait activate + self._tlwh = np.asarray(tlwh, dtype=np.float) + self.kalman_filter = None + self.mean, self.covariance = None, None + self.is_activated = False + + self.score = score + self.tracklet_len = 0 + + self.smooth_feat = None + self.update_features(temp_feat) + self.features = deque([], maxlen=buffer_size) + self.alpha = 0.9 + + def update_features(self, feat): + feat /= np.linalg.norm(feat) + self.curr_feat = feat + if self.smooth_feat is None: + self.smooth_feat = feat + else: + self.smooth_feat = self.alpha * self.smooth_feat + (1 - self.alpha) * feat + self.features.append(feat) + self.smooth_feat /= np.linalg.norm(self.smooth_feat) + + def predict(self): + mean_state = self.mean.copy() + if self.state != TrackState.Tracked: + mean_state[7] = 0 + self.mean, self.covariance = self.kalman_filter.predict(mean_state, self.covariance) + + @staticmethod + def multi_predict(stracks): + if len(stracks) > 0: + multi_mean = np.asarray([st.mean.copy() for st in stracks]) + multi_covariance = np.asarray([st.covariance for st in stracks]) + for i, st in enumerate(stracks): + if st.state != TrackState.Tracked: + multi_mean[i][7] = 0 + multi_mean, multi_covariance = STrack.shared_kalman.multi_predict(multi_mean, multi_covariance) + for i, (mean, cov) in enumerate(zip(multi_mean, multi_covariance)): + stracks[i].mean = mean + stracks[i].covariance = cov + + def activate(self, kalman_filter, frame_id): + """Start a new tracklet""" + self.kalman_filter = kalman_filter + self.track_id = self.next_id() + self.mean, self.covariance = self.kalman_filter.initiate(self.tlwh_to_xyah(self._tlwh)) + + self.tracklet_len = 0 + self.state = TrackState.Tracked + #self.is_activated = True + self.frame_id = frame_id + self.start_frame = frame_id + + def re_activate(self, new_track, frame_id, new_id=False): + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_track.tlwh) + ) + + self.update_features(new_track.curr_feat) + self.tracklet_len = 0 + self.state = TrackState.Tracked + self.is_activated = True + self.frame_id = frame_id + if new_id: + self.track_id = self.next_id() + + def update(self, new_track, frame_id, update_feature=True): + """ + Update a matched track + :type new_track: STrack + :type frame_id: int + :type update_feature: bool + :return: + """ + self.frame_id = frame_id + self.tracklet_len += 1 + + new_tlwh = new_track.tlwh + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_tlwh)) + self.state = TrackState.Tracked + self.is_activated = True + + self.score = new_track.score + if update_feature: + self.update_features(new_track.curr_feat) + + @property + # @jit(nopython=True) + def tlwh(self): + """Get current position in bounding box format `(top left x, top left y, + width, height)`. + """ + if self.mean is None: + return self._tlwh.copy() + ret = self.mean[:4].copy() + ret[2] *= ret[3] + ret[:2] -= ret[2:] / 2 + return ret + + @property + # @jit(nopython=True) + def tlbr(self): + """Convert bounding box to format `(min x, min y, max x, max y)`, i.e., + `(top left, bottom right)`. + """ + ret = self.tlwh.copy() + ret[2:] += ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_xyah(tlwh): + """Convert bounding box to format `(center x, center y, aspect ratio, + height)`, where the aspect ratio is `width / height`. + """ + ret = np.asarray(tlwh).copy() + ret[:2] += ret[2:] / 2 + ret[2] /= ret[3] + return ret + + def to_xyah(self): + return self.tlwh_to_xyah(self.tlwh) + + @staticmethod + # @jit(nopython=True) + def tlbr_to_tlwh(tlbr): + ret = np.asarray(tlbr).copy() + ret[2:] -= ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_tlbr(tlwh): + ret = np.asarray(tlwh).copy() + ret[2:] += ret[:2] + return ret + + def __repr__(self): + return 'OT_{}_({}-{})'.format(self.track_id, self.start_frame, self.end_frame) + + +class JDETracker(object): + def __init__(self, opt, frame_rate=30): + self.opt = opt + if int(opt.gpus[0]) >= 0: + opt.device = torch.device('cuda') + else: + opt.device = torch.device('cpu') + print('Creating model...') + + ckpt = torch.load(opt.weights, map_location=opt.device) # load checkpoint + self.model = Model(opt.cfg or ckpt['model'].yaml, ch=3, nc=1).to(opt.device) # create + exclude = ['anchor'] if opt.cfg else [] # exclude keys + if type(ckpt['model']).__name__ == "OrderedDict": + state_dict = ckpt['model'] + else: + state_dict = ckpt['model'].float().state_dict() # to FP32 + state_dict = intersect_dicts(state_dict, self.model.state_dict(), exclude=exclude) # intersect + self.model.load_state_dict(state_dict, strict=False) # load + self.model.cuda().eval() + total_params = sum(p.numel() for p in self.model.parameters()) + print(f'{total_params:,} total parameters.') + + + self.tracked_stracks = [] # type: list[STrack] + self.lost_stracks = [] # type: list[STrack] + self.removed_stracks = [] # type: list[STrack] + + self.frame_id = 0 + self.det_thresh = opt.conf_thres + self.buffer_size = int(frame_rate / 30.0 * opt.track_buffer) + self.max_time_lost = self.buffer_size + self.mean = np.array(opt.mean, dtype=np.float32).reshape(1, 1, 3) + self.std = np.array(opt.std, dtype=np.float32).reshape(1, 1, 3) + + self.kalman_filter = KalmanFilter() + self.low_thres = 0.2 + self.high_thres = self.opt.conf_thres + 0.1 + + def update(self, im_blob, img0,seq_num, save_dir): + self.frame_id += 1 + activated_starcks = [] + refind_stracks = [] + lost_stracks = [] + removed_stracks = [] + dets = [] + + ''' Step 1: Network forward, get detections & embeddings''' + with torch.no_grad(): + output = self.model(im_blob, augment=False) + pred, train_out = output[1] + + pred = pred[pred[:, :, 4] > self.low_thres] + detections = [] + if len(pred) > 0: + dets,x_inds,y_inds = non_max_suppression_and_inds(pred[:,:6].unsqueeze(0), 0.1, self.opt.nms_thres,method='cluster_diou') + if len(dets) != 0: + scale_coords(self.opt.img_size, dets[:, :4], img0.shape).round() + id_feature = output[0][0, y_inds, x_inds, :].cpu().numpy() + + remain_inds = dets[:, 4] > self.opt.conf_thres + inds_low = dets[:, 4] > self.low_thres + inds_high = dets[:, 4] < self.opt.conf_thres + inds_second = np.logical_and(inds_low, inds_high) + dets_second = dets[inds_second] + if id_feature.shape[0] == 1: + id_feature_second = id_feature + else: + id_feature_second = id_feature[inds_second] + dets = dets[remain_inds] + id_feature = id_feature[remain_inds] + + detections = [STrack(STrack.tlbr_to_tlwh(tlbrs[:4]), tlbrs[4], f, 30) for + (tlbrs, f) in zip(dets[:, :5], id_feature)] + + else: + detections = [] + dets_second = [] + id_feature_second = [] + + ''' Add newly detected tracklets to tracked_stracks''' + unconfirmed = [] + tracked_stracks = [] # type: list[STrack] + for track in self.tracked_stracks: + if not track.is_activated: + unconfirmed.append(track) + else: + tracked_stracks.append(track) + + ''' Step 2: First association, with embedding''' + strack_pool = joint_stracks(tracked_stracks, self.lost_stracks) + # Predict the current location with KF + #for strack in strack_pool: + #strack.predict() + STrack.multi_predict(strack_pool) + dists = matching.embedding_distance(strack_pool, detections) + dists = matching.fuse_motion(self.kalman_filter, dists, strack_pool, detections) + #dists = matching.iou_distance(strack_pool, detections) + matches, u_track, u_detection = matching.linear_assignment(dists, thresh=0.4) + + for itracked, idet in matches: + track = strack_pool[itracked] + det = detections[idet] + if track.state == TrackState.Tracked: + track.update(detections[idet], self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + # vis + track_features, det_features, cost_matrix, cost_matrix_det, cost_matrix_track = [],[],[],[],[] + if self.opt.vis_state == 1 and self.frame_id % 20 == 0: + if len(dets) != 0: + for i in range(0, dets.shape[0]): + bbox = dets[i][0:4] + cv2.rectangle(img0, (int(bbox[0]), int(bbox[1])),(int(bbox[2]), int(bbox[3])),(0, 255, 0), 2) + track_features, det_features, cost_matrix, cost_matrix_det, cost_matrix_track = matching.vis_id_feature_A_distance(strack_pool, detections) + vis_feature(self.frame_id,seq_num,img0,track_features, + det_features, cost_matrix, cost_matrix_det, cost_matrix_track, max_num=5, out_path=save_dir) + + ''' Step 3: Second association, with IOU''' + detections = [detections[i] for i in u_detection] + r_tracked_stracks = [strack_pool[i] for i in u_track if strack_pool[i].state == TrackState.Tracked] + dists = matching.iou_distance(r_tracked_stracks, detections) + matches, u_track, u_detection = matching.linear_assignment(dists, thresh=0.5) + + for itracked, idet in matches: + track = r_tracked_stracks[itracked] + det = detections[idet] + if track.state == TrackState.Tracked: + track.update(det, self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + # association the untrack to the low score detections + if len(dets_second) > 0: + detections_second = [STrack(STrack.tlbr_to_tlwh(tlbrs[:4]), tlbrs[4], f, 30) for + (tlbrs, f) in zip(dets_second[:, :5], id_feature_second)] + else: + detections_second = [] + second_tracked_stracks = [r_tracked_stracks[i] for i in u_track if r_tracked_stracks[i].state == TrackState.Tracked] + dists = matching.iou_distance(second_tracked_stracks, detections_second) + matches, u_track, u_detection_second = matching.linear_assignment(dists, thresh=0.4) + for itracked, idet in matches: + track = second_tracked_stracks[itracked] + det = detections_second[idet] + if track.state == TrackState.Tracked: + track.update(det, self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + for it in u_track: + track = second_tracked_stracks[it] + if not track.state == TrackState.Lost: + track.mark_lost() + lost_stracks.append(track) + + '''Deal with unconfirmed tracks, usually tracks with only one beginning frame''' + detections = [detections[i] for i in u_detection] + dists = matching.iou_distance(unconfirmed, detections) + matches, u_unconfirmed, u_detection = matching.linear_assignment(dists, thresh=0.7) + for itracked, idet in matches: + unconfirmed[itracked].update(detections[idet], self.frame_id) + activated_starcks.append(unconfirmed[itracked]) + for it in u_unconfirmed: + track = unconfirmed[it] + track.mark_removed() + removed_stracks.append(track) + + """ Step 4: Init new stracks""" + for inew in u_detection: + track = detections[inew] + if track.score < self.high_thres: + continue + track.activate(self.kalman_filter, self.frame_id) + activated_starcks.append(track) + """ Step 5: Update state""" + for track in self.lost_stracks: + if self.frame_id - track.end_frame > self.max_time_lost: + track.mark_removed() + removed_stracks.append(track) + + # print('Ramained match {} s'.format(t4-t3)) + + self.tracked_stracks = [t for t in self.tracked_stracks if t.state == TrackState.Tracked] + self.tracked_stracks = joint_stracks(self.tracked_stracks, activated_starcks) + self.tracked_stracks = joint_stracks(self.tracked_stracks, refind_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.tracked_stracks) + self.lost_stracks.extend(lost_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.removed_stracks) + self.removed_stracks.extend(removed_stracks) + self.tracked_stracks, self.lost_stracks = remove_duplicate_stracks(self.tracked_stracks, self.lost_stracks) + # get scores of lost tracks + output_stracks = [track for track in self.tracked_stracks if track.is_activated] + + logger.debug('===========Frame {}=========='.format(self.frame_id)) + logger.debug('Activated: {}'.format([track.track_id for track in activated_starcks])) + logger.debug('Refind: {}'.format([track.track_id for track in refind_stracks])) + logger.debug('Lost: {}'.format([track.track_id for track in lost_stracks])) + logger.debug('Removed: {}'.format([track.track_id for track in removed_stracks])) + + return output_stracks + + +def joint_stracks(tlista, tlistb): + exists = {} + res = [] + for t in tlista: + exists[t.track_id] = 1 + res.append(t) + for t in tlistb: + tid = t.track_id + if not exists.get(tid, 0): + exists[tid] = 1 + res.append(t) + return res + + +def sub_stracks(tlista, tlistb): + stracks = {} + for t in tlista: + stracks[t.track_id] = t + for t in tlistb: + tid = t.track_id + if stracks.get(tid, 0): + del stracks[tid] + return list(stracks.values()) + + +def remove_duplicate_stracks(stracksa, stracksb): + pdist = matching.iou_distance(stracksa, stracksb) + pairs = np.where(pdist < 0.15) + dupa, dupb = list(), list() + for p, q in zip(*pairs): + timep = stracksa[p].frame_id - stracksa[p].start_frame + timeq = stracksb[q].frame_id - stracksb[q].start_frame + if timep > timeq: + dupb.append(q) + else: + dupa.append(p) + resa = [t for i, t in enumerate(stracksa) if not i in dupa] + resb = [t for i, t in enumerate(stracksb) if not i in dupb] + return resa, resb + +def vis_feature(frame_id,seq_num,img,track_features, det_features, cost_matrix, cost_matrix_det, cost_matrix_track,max_num=5, out_path='/home/XX/'): + num_zero = ["0000","000","00","0"] + img = cv2.resize(img, (778, 435)) + + if len(det_features) != 0: + max_f = det_features.max() + min_f = det_features.min() + det_features = np.round((det_features - min_f) / (max_f - min_f) * 255) + det_features = det_features.astype(np.uint8) + d_F_M = [] + cutpff_line = [40]*512 + for d_f in det_features: + for row in range(45): + d_F_M += [[40]*3+d_f.tolist()+[40]*3] + for row in range(3): + d_F_M += [[40]*3+cutpff_line+[40]*3] + d_F_M = np.array(d_F_M) + d_F_M = d_F_M.astype(np.uint8) + det_features_img = cv2.applyColorMap(d_F_M, cv2.COLORMAP_JET) + feature_img2 = cv2.resize(det_features_img, (435, 435)) + #cv2.putText(feature_img2, "det_features", (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) + else: + feature_img2 = np.zeros((435, 435)) + feature_img2 = feature_img2.astype(np.uint8) + feature_img2 = cv2.applyColorMap(feature_img2, cv2.COLORMAP_JET) + #cv2.putText(feature_img2, "det_features", (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) + feature_img = np.concatenate((img, feature_img2), axis=1) + + if len(cost_matrix_det) != 0 and len(cost_matrix_det[0]) != 0: + max_f = cost_matrix_det.max() + min_f = cost_matrix_det.min() + cost_matrix_det = np.round((cost_matrix_det - min_f) / (max_f - min_f) * 255) + d_F_M = [] + cutpff_line = [40]*len(cost_matrix_det)*10 + for c_m in cost_matrix_det: + add = [] + for row in range(len(c_m)): + add += [255-c_m[row]]*10 + for row in range(10): + d_F_M += [[40]+add+[40]] + d_F_M = np.array(d_F_M) + d_F_M = d_F_M.astype(np.uint8) + cost_matrix_det_img = cv2.applyColorMap(d_F_M, cv2.COLORMAP_JET) + feature_img2 = cv2.resize(cost_matrix_det_img, (435, 435)) + #cv2.putText(feature_img2, "cost_matrix_det", (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) + else: + feature_img2 = np.zeros((435, 435)) + feature_img2 = feature_img2.astype(np.uint8) + feature_img2 = cv2.applyColorMap(feature_img2, cv2.COLORMAP_JET) + #cv2.putText(feature_img2, "cost_matrix_det", (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) + feature_img = np.concatenate((feature_img, feature_img2), axis=1) + + if len(track_features) != 0: + max_f = track_features.max() + min_f = track_features.min() + track_features = np.round((track_features - min_f) / (max_f - min_f) * 255) + track_features = track_features.astype(np.uint8) + d_F_M = [] + cutpff_line = [40]*512 + for d_f in track_features: + for row in range(45): + d_F_M += [[40]*3+d_f.tolist()+[40]*3] + for row in range(3): + d_F_M += [[40]*3+cutpff_line+[40]*3] + d_F_M = np.array(d_F_M) + d_F_M = d_F_M.astype(np.uint8) + track_features_img = cv2.applyColorMap(d_F_M, cv2.COLORMAP_JET) + feature_img2 = cv2.resize(track_features_img, (435, 435)) + #cv2.putText(feature_img2, "track_features", (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) + else: + feature_img2 = np.zeros((435, 435)) + feature_img2 = feature_img2.astype(np.uint8) + feature_img2 = cv2.applyColorMap(feature_img2, cv2.COLORMAP_JET) + #cv2.putText(feature_img2, "track_features", (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) + feature_img = np.concatenate((feature_img, feature_img2), axis=1) + + if len(cost_matrix_track) != 0 and len(cost_matrix_track[0]) != 0: + max_f = cost_matrix_track.max() + min_f = cost_matrix_track.min() + cost_matrix_track = np.round((cost_matrix_track - min_f) / (max_f - min_f) * 255) + d_F_M = [] + cutpff_line = [40]*len(cost_matrix_track)*10 + for c_m in cost_matrix_track: + add = [] + for row in range(len(c_m)): + add += [255-c_m[row]]*10 + for row in range(10): + d_F_M += [[40]+add+[40]] + d_F_M = np.array(d_F_M) + d_F_M = d_F_M.astype(np.uint8) + cost_matrix_track_img = cv2.applyColorMap(d_F_M, cv2.COLORMAP_JET) + feature_img2 = cv2.resize(cost_matrix_track_img, (435, 435)) + #cv2.putText(feature_img2, "cost_matrix_track", (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) + else: + feature_img2 = np.zeros((435, 435)) + feature_img2 = feature_img2.astype(np.uint8) + feature_img2 = cv2.applyColorMap(feature_img2, cv2.COLORMAP_JET) + #cv2.putText(feature_img2, "cost_matrix_track", (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) + feature_img = np.concatenate((feature_img, feature_img2), axis=1) + + if len(cost_matrix) != 0 and len(cost_matrix[0]) != 0: + max_f = cost_matrix.max() + min_f = cost_matrix.min() + cost_matrix = np.round((cost_matrix - min_f) / (max_f - min_f) * 255) + d_F_M = [] + cutpff_line = [40]*len(cost_matrix[0])*10 + for c_m in cost_matrix: + add = [] + for row in range(len(c_m)): + add += [255-c_m[row]]*10 + for row in range(10): + d_F_M += [[40]+add+[40]] + d_F_M = np.array(d_F_M) + d_F_M = d_F_M.astype(np.uint8) + cost_matrix_img = cv2.applyColorMap(d_F_M, cv2.COLORMAP_JET) + feature_img2 = cv2.resize(cost_matrix_img, (435, 435)) + #cv2.putText(feature_img2, "cost_matrix", (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) + else: + feature_img2 = np.zeros((435, 435)) + feature_img2 = feature_img2.astype(np.uint8) + feature_img2 = cv2.applyColorMap(feature_img2, cv2.COLORMAP_JET) + #cv2.putText(feature_img2, "cost_matrix", (5, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2) + feature_img = np.concatenate((feature_img, feature_img2), axis=1) + + dst_path = out_path + "/" + seq_num + "_" + num_zero[len(str(frame_id))-1] + str(frame_id) + '.png' + cv2.imwrite(dst_path, feature_img) \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/tutorials/ctracker/README.md b/tracking/docker-build-context/byte_track/tutorials/ctracker/README.md new file mode 100644 index 0000000000000000000000000000000000000000..00d9e3d18abbf7137382e64352405d082def7dc3 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/ctracker/README.md @@ -0,0 +1,65 @@ +# CTracker + +#### Step1 +git clone https://github.com/pjl1995/CTracker.git and preapare dataset + + +#### Step2 + +add generate_half_csv.py to https://github.com/pjl1995/CTracker + +run generate_half_csv.py and put train_half_annots.csv in MOT17 + +run +``` +python3 train.py --root_path MOT17 --csv_train train_half_annots.csv --model_dir ctracker/ --depth 50 --epochs 50 +``` +You can also download the CTracker model trained by us: [google](https://drive.google.com/file/d/1TwBDomJx8pxD-e96mGIiTduLenUvmf1t/view?usp=sharing), [baidu(code:6p3w)](https://pan.baidu.com/s/1MaCvnHynX2Wzg81hWkqzeg) + +#### Step3 + +replace https://github.com/pjl1995/CTracker/blob/master/test.py + +run +``` +python3 test.py --dataset_path MOT17 --model_dir ctracker --model_path ctracker/mot17_half_ctracker.pt +``` + +#### Step4 + +add eval_motchallenge.py to https://github.com/pjl1995/CTracker + +prepare gt_half_val.txt as CenterTrack [DATA.md](https://github.com/xingyizhou/CenterTrack/blob/master/readme/DATA.md) + + +#### Step5 + +run +``` +python3 eval_motchallenge.py --groundtruths MOT17/train --tests ctracker/results --gt_type half_val --eval_official --score_threshold -1 +``` + + + +# CTracker_BYTE + +#### Step3 + +add mot_online to https://github.com/pjl1995/CTracker + +add byte_tracker.py to https://github.com/pjl1995/CTracker + +add test_byte.py to https://github.com/pjl1995/CTracker + +run +``` +python3 test_byte.py --dataset_path MOT17 --model_dir ctracker --model_path ctracker/mot17_half_ctracker.pt +``` + + +#### Step5 + +run +``` +python3 eval_motchallenge.py --groundtruths MOT17/train --tests ctracker/results --gt_type half_val --eval_official --score_threshold -1 +``` diff --git a/tracking/docker-build-context/byte_track/tutorials/ctracker/byte_tracker.py b/tracking/docker-build-context/byte_track/tutorials/ctracker/byte_tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..0a6ae80119025c0b9b35419ab4ccb5a107b25c0e --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/ctracker/byte_tracker.py @@ -0,0 +1,343 @@ +import numpy as np +from collections import deque +import os +import os.path as osp +import copy +import torch +import torch.nn.functional as F + +from mot_online.kalman_filter import KalmanFilter +from mot_online.basetrack import BaseTrack, TrackState +from mot_online import matching + + + +class STrack(BaseTrack): + shared_kalman = KalmanFilter() + def __init__(self, tlwh, score): + + # wait activate + self._tlwh = np.asarray(tlwh, dtype=np.float) + self.kalman_filter = None + self.mean, self.covariance = None, None + self.is_activated = False + + self.score = score + self.tracklet_len = 0 + + def predict(self): + mean_state = self.mean.copy() + if self.state != TrackState.Tracked: + mean_state[7] = 0 + self.mean, self.covariance = self.kalman_filter.predict(mean_state, self.covariance) + + @staticmethod + def multi_predict(stracks): + if len(stracks) > 0: + multi_mean = np.asarray([st.mean.copy() for st in stracks]) + multi_covariance = np.asarray([st.covariance for st in stracks]) + for i, st in enumerate(stracks): + if st.state != TrackState.Tracked: + multi_mean[i][7] = 0 + multi_mean, multi_covariance = STrack.shared_kalman.multi_predict(multi_mean, multi_covariance) + for i, (mean, cov) in enumerate(zip(multi_mean, multi_covariance)): + stracks[i].mean = mean + stracks[i].covariance = cov + + def activate(self, kalman_filter, frame_id): + """Start a new tracklet""" + self.kalman_filter = kalman_filter + self.track_id = self.next_id() + self.mean, self.covariance = self.kalman_filter.initiate(self.tlwh_to_xyah(self._tlwh)) + + self.tracklet_len = 0 + self.state = TrackState.Tracked + if frame_id == 1: + self.is_activated = True + # self.is_activated = True + self.frame_id = frame_id + self.start_frame = frame_id + + def re_activate(self, new_track, frame_id, new_id=False): + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_track.tlwh) + ) + self.tracklet_len = 0 + self.state = TrackState.Tracked + self.is_activated = True + self.frame_id = frame_id + if new_id: + self.track_id = self.next_id() + self.score = new_track.score + + def update(self, new_track, frame_id): + """ + Update a matched track + :type new_track: STrack + :type frame_id: int + :type update_feature: bool + :return: + """ + self.frame_id = frame_id + self.tracklet_len += 1 + + new_tlwh = new_track.tlwh + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_tlwh)) + self.state = TrackState.Tracked + self.is_activated = True + + self.score = new_track.score + + @property + # @jit(nopython=True) + def tlwh(self): + """Get current position in bounding box format `(top left x, top left y, + width, height)`. + """ + if self.mean is None: + return self._tlwh.copy() + ret = self.mean[:4].copy() + ret[2] *= ret[3] + ret[:2] -= ret[2:] / 2 + return ret + + @property + # @jit(nopython=True) + def tlbr(self): + """Convert bounding box to format `(min x, min y, max x, max y)`, i.e., + `(top left, bottom right)`. + """ + ret = self.tlwh.copy() + ret[2:] += ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_xyah(tlwh): + """Convert bounding box to format `(center x, center y, aspect ratio, + height)`, where the aspect ratio is `width / height`. + """ + ret = np.asarray(tlwh).copy() + ret[:2] += ret[2:] / 2 + ret[2] /= ret[3] + return ret + + def to_xyah(self): + return self.tlwh_to_xyah(self.tlwh) + + @staticmethod + # @jit(nopython=True) + def tlbr_to_tlwh(tlbr): + ret = np.asarray(tlbr).copy() + ret[2:] -= ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_tlbr(tlwh): + ret = np.asarray(tlwh).copy() + ret[2:] += ret[:2] + return ret + + def __repr__(self): + return 'OT_{}_({}-{})'.format(self.track_id, self.start_frame, self.end_frame) + + +class BYTETracker(object): + def __init__(self, frame_rate=30): + self.tracked_stracks = [] # type: list[STrack] + self.lost_stracks = [] # type: list[STrack] + self.removed_stracks = [] # type: list[STrack] + + self.frame_id = 0 + + self.low_thresh = 0.2 + self.track_thresh = 0.4 + self.det_thresh = self.track_thresh + 0.1 + + + self.buffer_size = int(frame_rate / 30.0 * 30) + self.max_time_lost = self.buffer_size + self.kalman_filter = KalmanFilter() + +# def update(self, output_results): + def update(self, det_bboxes, scores): + + self.frame_id += 1 + activated_starcks = [] + refind_stracks = [] + lost_stracks = [] + removed_stracks = [] + +# scores = output_results[:, 4] +# bboxes = output_results[:, :4] # x1y1x2y2 + scores = scores + bboxes = det_bboxes + + remain_inds = scores > self.track_thresh + dets = bboxes[remain_inds] + scores_keep = scores[remain_inds] + + + inds_low = scores > self.low_thresh + inds_high = scores < self.track_thresh + inds_second = np.logical_and(inds_low, inds_high) + dets_second = bboxes[inds_second] + scores_second = scores[inds_second] + + + if len(dets) > 0: + '''Detections''' + detections = [STrack(STrack.tlbr_to_tlwh(tlbr), s) for + (tlbr, s) in zip(dets, scores_keep)] + else: + detections = [] + + ''' Add newly detected tracklets to tracked_stracks''' + unconfirmed = [] + tracked_stracks = [] # type: list[STrack] + for track in self.tracked_stracks: + if not track.is_activated: + unconfirmed.append(track) + else: + tracked_stracks.append(track) + + ''' Step 2: First association, with Kalman and IOU''' + strack_pool = joint_stracks(tracked_stracks, self.lost_stracks) + # Predict the current location with KF + STrack.multi_predict(strack_pool) + dists = matching.iou_distance(strack_pool, detections) + matches, u_track, u_detection = matching.linear_assignment(dists, thresh=0.8) + + for itracked, idet in matches: + track = strack_pool[itracked] + det = detections[idet] + if track.state == TrackState.Tracked: + track.update(detections[idet], self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + ''' Step 3: Second association, with IOU''' + # association the untrack to the low score detections + if len(dets_second) > 0: + '''Detections''' + detections_second = [STrack(STrack.tlbr_to_tlwh(tlbr), s) for + (tlbr, s) in zip(dets_second, scores_second)] + else: + detections_second = [] + r_tracked_stracks = [strack_pool[i] for i in u_track if strack_pool[i].state == TrackState.Tracked] + dists = matching.iou_distance(r_tracked_stracks, detections_second) + matches, u_track, u_detection_second = matching.linear_assignment(dists, thresh=0.5) + for itracked, idet in matches: + track = r_tracked_stracks[itracked] + det = detections_second[idet] + if track.state == TrackState.Tracked: + track.update(det, self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + for it in u_track: + #track = strack_pool[it] + track = r_tracked_stracks[it] + if not track.state == TrackState.Lost: + track.mark_lost() + lost_stracks.append(track) + + '''Deal with unconfirmed tracks, usually tracks with only one beginning frame''' + detections = [detections[i] for i in u_detection] + dists = matching.iou_distance(unconfirmed, detections) + matches, u_unconfirmed, u_detection = matching.linear_assignment(dists, thresh=0.7) + for itracked, idet in matches: + unconfirmed[itracked].update(detections[idet], self.frame_id) + activated_starcks.append(unconfirmed[itracked]) + for it in u_unconfirmed: + track = unconfirmed[it] + track.mark_removed() + removed_stracks.append(track) + + """ Step 4: Init new stracks""" + for inew in u_detection: + track = detections[inew] + if track.score < self.det_thresh: + continue + track.activate(self.kalman_filter, self.frame_id) + activated_starcks.append(track) + """ Step 5: Update state""" + for track in self.lost_stracks: + if self.frame_id - track.end_frame > self.max_time_lost: + track.mark_removed() + removed_stracks.append(track) + + # print('Ramained match {} s'.format(t4-t3)) + + self.tracked_stracks = [t for t in self.tracked_stracks if t.state == TrackState.Tracked] + self.tracked_stracks = joint_stracks(self.tracked_stracks, activated_starcks) + self.tracked_stracks = joint_stracks(self.tracked_stracks, refind_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.tracked_stracks) + self.lost_stracks.extend(lost_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.removed_stracks) + self.removed_stracks.extend(removed_stracks) + self.tracked_stracks, self.lost_stracks = remove_duplicate_stracks(self.tracked_stracks, self.lost_stracks) + # get scores of lost tracks + output_stracks = [track for track in self.tracked_stracks if track.is_activated] + + return output_stracks + + + +def joint_stracks(tlista, tlistb): + exists = {} + res = [] + for t in tlista: + exists[t.track_id] = 1 + res.append(t) + for t in tlistb: + tid = t.track_id + if not exists.get(tid, 0): + exists[tid] = 1 + res.append(t) + return res + + +def sub_stracks(tlista, tlistb): + stracks = {} + for t in tlista: + stracks[t.track_id] = t + for t in tlistb: + tid = t.track_id + if stracks.get(tid, 0): + del stracks[tid] + return list(stracks.values()) + + +def remove_duplicate_stracks(stracksa, stracksb): + pdist = matching.iou_distance(stracksa, stracksb) + pairs = np.where(pdist < 0.15) + dupa, dupb = list(), list() + for p, q in zip(*pairs): + timep = stracksa[p].frame_id - stracksa[p].start_frame + timeq = stracksb[q].frame_id - stracksb[q].start_frame + if timep > timeq: + dupb.append(q) + else: + dupa.append(p) + resa = [t for i, t in enumerate(stracksa) if not i in dupa] + resb = [t for i, t in enumerate(stracksb) if not i in dupb] + return resa, resb + + +def remove_fp_stracks(stracksa, n_frame=10): + remain = [] + for t in stracksa: + score_5 = t.score_list[-n_frame:] + score_5 = np.array(score_5, dtype=np.float32) + index = score_5 < 0.45 + num = np.sum(index) + if num < n_frame: + remain.append(t) + return remain diff --git a/tracking/docker-build-context/byte_track/tutorials/ctracker/eval_motchallenge.py b/tracking/docker-build-context/byte_track/tutorials/ctracker/eval_motchallenge.py new file mode 100644 index 0000000000000000000000000000000000000000..a2b51388a77bd76bfc16a0ac2740e6fcd3d86aac --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/ctracker/eval_motchallenge.py @@ -0,0 +1,122 @@ +"""py-motmetrics - metrics for multiple object tracker (MOT) benchmarking. +Christoph Heindl, 2017 +https://github.com/cheind/py-motmetrics +Modified by Rufeng Zhang +""" + +import argparse +import glob +import os +import logging +import motmetrics as mm +import pandas as pd +from collections import OrderedDict +from pathlib import Path + + +def parse_args(): + parser = argparse.ArgumentParser(description=""" +Compute metrics for trackers using MOTChallenge ground-truth data. +Files +----- +All file content, ground truth and test files, have to comply with the +format described in +Milan, Anton, et al. +"Mot16: A benchmark for multi-object tracking." +arXiv preprint arXiv:1603.00831 (2016). +https://motchallenge.net/ +Structure +--------- +Layout for ground truth data + //gt/gt.txt + //gt/gt.txt + ... +Layout for test data + /.txt + /.txt + ... +Sequences of ground truth and test will be matched according to the `` +string.""", formatter_class=argparse.RawTextHelpFormatter) + + parser.add_argument('--groundtruths', type=str, help='Directory containing ground truth files.') + parser.add_argument('--tests', type=str, help='Directory containing tracker result files') + parser.add_argument('--score_threshold', type=float, help='Score threshold',default=0.5) + parser.add_argument('--gt_type', type=str, default='') + parser.add_argument('--eval_official', action='store_true') + parser.add_argument('--loglevel', type=str, help='Log level', default='info') + parser.add_argument('--fmt', type=str, help='Data format', default='mot15-2D') + parser.add_argument('--solver', type=str, help='LAP solver to use') + return parser.parse_args() + + +def compare_dataframes(gts, ts): + accs = [] + names = [] + for k, tsacc in ts.items(): + if k in gts: + logging.info('Comparing {}...'.format(k)) + accs.append(mm.utils.compare_to_groundtruth(gts[k], tsacc, 'iou', distth=0.5)) + names.append(k) + else: + logging.warning('No ground truth for {}, skipping.'.format(k)) + + return accs, names + + +if __name__ == '__main__': + + args = parse_args() + + loglevel = getattr(logging, args.loglevel.upper(), None) + if not isinstance(loglevel, int): + raise ValueError('Invalid log level: {} '.format(args.loglevel)) + logging.basicConfig(level=loglevel, format='%(asctime)s %(levelname)s - %(message)s', datefmt='%I:%M:%S') + + if args.solver: + mm.lap.default_solver = args.solver + + gt_type = args.gt_type + print('gt_type', gt_type) + gtfiles = glob.glob( + os.path.join(args.groundtruths, '*/gt/gt_{}.txt'.format(gt_type))) + print('gt_files', gtfiles) + tsfiles = [f for f in glob.glob(os.path.join(args.tests, '*.txt')) if not os.path.basename(f).startswith('eval')] + + logging.info('Found {} groundtruths and {} test files.'.format(len(gtfiles), len(tsfiles))) + logging.info('Available LAP solvers {}'.format(mm.lap.available_solvers)) + logging.info('Default LAP solver \'{}\''.format(mm.lap.default_solver)) + logging.info('Loading files.') + + gt = OrderedDict([(Path(f).parts[-3], mm.io.loadtxt(f, fmt=args.fmt, min_confidence=1)) for f in gtfiles]) + ts = OrderedDict([(os.path.splitext(Path(f).parts[-1])[0], mm.io.loadtxt(f, fmt=args.fmt, min_confidence=args.score_threshold)) for f in tsfiles]) +# ts = gt + + mh = mm.metrics.create() + accs, names = compare_dataframes(gt, ts) + + logging.info('Running metrics') + metrics = ['recall', 'precision', 'num_unique_objects', 'mostly_tracked', + 'partially_tracked', 'mostly_lost', 'num_false_positives', 'num_misses', + 'num_switches', 'num_fragmentations', 'mota', 'motp', 'num_objects'] + summary = mh.compute_many(accs, names=names, metrics=metrics, generate_overall=True) + # summary = mh.compute_many(accs, names=names, metrics=mm.metrics.motchallenge_metrics, generate_overall=True) + # print(mm.io.render_summary( + # summary, formatters=mh.formatters, + # namemap=mm.io.motchallenge_metric_names)) + div_dict = { + 'num_objects': ['num_false_positives', 'num_misses', 'num_switches', 'num_fragmentations'], + 'num_unique_objects': ['mostly_tracked', 'partially_tracked', 'mostly_lost']} + for divisor in div_dict: + for divided in div_dict[divisor]: + summary[divided] = (summary[divided] / summary[divisor]) + fmt = mh.formatters + change_fmt_list = ['num_false_positives', 'num_misses', 'num_switches', 'num_fragmentations', 'mostly_tracked', + 'partially_tracked', 'mostly_lost'] + for k in change_fmt_list: + fmt[k] = fmt['mota'] + print(mm.io.render_summary(summary, formatters=fmt, namemap=mm.io.motchallenge_metric_names)) + if args.eval_official: + metrics = mm.metrics.motchallenge_metrics + ['num_objects'] + summary = mh.compute_many(accs, names=names, metrics=metrics, generate_overall=True) + print(mm.io.render_summary(summary, formatters=mh.formatters, namemap=mm.io.motchallenge_metric_names)) + logging.info('Completed') diff --git a/tracking/docker-build-context/byte_track/tutorials/ctracker/generate_half_csv.py b/tracking/docker-build-context/byte_track/tutorials/ctracker/generate_half_csv.py new file mode 100644 index 0000000000000000000000000000000000000000..12ca75bca486e8187cfc45bea50311e8decfdfaf --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/ctracker/generate_half_csv.py @@ -0,0 +1,37 @@ +import os +import numpy as np +prefix_dir = 'MOT17/' +root_dir = 'train/' +result_csv = 'train_half_annots.csv' +train_half_set = {2: 301, 4: 526, 5:419, 9:263, 10:328, 11:451, 13:376} +fout = open(result_csv, 'w') + +for data_name in sorted(os.listdir(prefix_dir + root_dir)): + print(data_name) + gt_path = os.path.join(prefix_dir, root_dir, data_name, 'gt', 'gt.txt') + # print(gt_path) + data_raw = np.loadtxt(gt_path, delimiter=',', dtype='float', usecols=(0,1,2,3,4,5,6,7,8)) + + data_sort = data_raw[np.lexsort(data_raw[:,::-1].T)] + visible_raw = data_sort[:,8] + # print(data_sort) + # print(data_sort[-1, 0]) + img_num = data_sort[-1, 0] + + # print(data_sort.shape[0]) + box_num = data_sort.shape[0] + + person_box_num = np.sum(data_sort[:,6] == 1) + # print(person_box_num) +# import ipdb; ipdb.set_trace() + for i in range(box_num): + c = int(data_sort[i, 6]) + v = visible_raw[i] + img_index = int(data_sort[i, 0]) + if c == 1 and v > 0.1 and img_index < train_half_set[int(data_name[-2:])]: + img_index = int(data_sort[i, 0]) + img_name = data_name + '/img1/' + str(img_index).zfill(6) + '.jpg' + print(root_dir + img_name + ', ' + str(int(data_sort[i, 1])) + ', ' + str(data_sort[i, 2]) + ', ' + str(data_sort[i, 3]) + ', ' + str(data_sort[i, 2] + data_sort[i, 4]) + ', ' + str(data_sort[i, 3] + data_sort[i, 5]) + ', person\n') + fout.write(root_dir + img_name + ', ' + str(int(data_sort[i, 1])) + ', ' + str(data_sort[i, 2]) + ', ' + str(data_sort[i, 3]) + ', ' + str(data_sort[i, 2] + data_sort[i, 4]) + ', ' + str(data_sort[i, 3] + data_sort[i, 5]) + ', person\n') + +fout.close() diff --git a/tracking/docker-build-context/byte_track/tutorials/ctracker/mot_online/basetrack.py b/tracking/docker-build-context/byte_track/tutorials/ctracker/mot_online/basetrack.py new file mode 100644 index 0000000000000000000000000000000000000000..4fe2233607f6d4ed28b11a0ae6c0303c8ca19098 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/ctracker/mot_online/basetrack.py @@ -0,0 +1,52 @@ +import numpy as np +from collections import OrderedDict + + +class TrackState(object): + New = 0 + Tracked = 1 + Lost = 2 + Removed = 3 + + +class BaseTrack(object): + _count = 0 + + track_id = 0 + is_activated = False + state = TrackState.New + + history = OrderedDict() + features = [] + curr_feature = None + score = 0 + start_frame = 0 + frame_id = 0 + time_since_update = 0 + + # multi-camera + location = (np.inf, np.inf) + + @property + def end_frame(self): + return self.frame_id + + @staticmethod + def next_id(): + BaseTrack._count += 1 + return BaseTrack._count + + def activate(self, *args): + raise NotImplementedError + + def predict(self): + raise NotImplementedError + + def update(self, *args, **kwargs): + raise NotImplementedError + + def mark_lost(self): + self.state = TrackState.Lost + + def mark_removed(self): + self.state = TrackState.Removed diff --git a/tracking/docker-build-context/byte_track/tutorials/ctracker/mot_online/kalman_filter.py b/tracking/docker-build-context/byte_track/tutorials/ctracker/mot_online/kalman_filter.py new file mode 100644 index 0000000000000000000000000000000000000000..b4c4e9854d8abd2fea75ad6b1fe8cd6846c43680 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/ctracker/mot_online/kalman_filter.py @@ -0,0 +1,269 @@ +# vim: expandtab:ts=4:sw=4 +import numpy as np +import scipy.linalg + +""" +Table for the 0.95 quantile of the chi-square distribution with N degrees of +freedom (contains values for N=1, ..., 9). Taken from MATLAB/Octave's chi2inv +function and used as Mahalanobis gating threshold. +""" +chi2inv95 = { + 1: 3.8415, + 2: 5.9915, + 3: 7.8147, + 4: 9.4877, + 5: 11.070, + 6: 12.592, + 7: 14.067, + 8: 15.507, + 9: 16.919} + + +class KalmanFilter(object): + """ + A simple Kalman filter for tracking bounding boxes in image space. + + The 8-dimensional state space + + x, y, a, h, vx, vy, va, vh + + contains the bounding box center position (x, y), aspect ratio a, height h, + and their respective velocities. + + Object motion follows a constant velocity model. The bounding box location + (x, y, a, h) is taken as direct observation of the state space (linear + observation model). + + """ + + def __init__(self): + ndim, dt = 4, 1. + + # Create Kalman filter model matrices. + self._motion_mat = np.eye(2 * ndim, 2 * ndim) + for i in range(ndim): + self._motion_mat[i, ndim + i] = dt + self._update_mat = np.eye(ndim, 2 * ndim) + + # Motion and observation uncertainty are chosen relative to the current + # state estimate. These weights control the amount of uncertainty in + # the model. This is a bit hacky. + self._std_weight_position = 1. / 20 + self._std_weight_velocity = 1. / 160 + + def initiate(self, measurement): + """Create track from unassociated measurement. + + Parameters + ---------- + measurement : ndarray + Bounding box coordinates (x, y, a, h) with center position (x, y), + aspect ratio a, and height h. + + Returns + ------- + (ndarray, ndarray) + Returns the mean vector (8 dimensional) and covariance matrix (8x8 + dimensional) of the new track. Unobserved velocities are initialized + to 0 mean. + + """ + mean_pos = measurement + mean_vel = np.zeros_like(mean_pos) + mean = np.r_[mean_pos, mean_vel] + + std = [ + 2 * self._std_weight_position * measurement[3], + 2 * self._std_weight_position * measurement[3], + 1e-2, + 2 * self._std_weight_position * measurement[3], + 10 * self._std_weight_velocity * measurement[3], + 10 * self._std_weight_velocity * measurement[3], + 1e-5, + 10 * self._std_weight_velocity * measurement[3]] + covariance = np.diag(np.square(std)) + return mean, covariance + + def predict(self, mean, covariance): + """Run Kalman filter prediction step. + + Parameters + ---------- + mean : ndarray + The 8 dimensional mean vector of the object state at the previous + time step. + covariance : ndarray + The 8x8 dimensional covariance matrix of the object state at the + previous time step. + + Returns + ------- + (ndarray, ndarray) + Returns the mean vector and covariance matrix of the predicted + state. Unobserved velocities are initialized to 0 mean. + + """ + std_pos = [ + self._std_weight_position * mean[3], + self._std_weight_position * mean[3], + 1e-2, + self._std_weight_position * mean[3]] + std_vel = [ + self._std_weight_velocity * mean[3], + self._std_weight_velocity * mean[3], + 1e-5, + self._std_weight_velocity * mean[3]] + motion_cov = np.diag(np.square(np.r_[std_pos, std_vel])) + + #mean = np.dot(self._motion_mat, mean) + mean = np.dot(mean, self._motion_mat.T) + covariance = np.linalg.multi_dot(( + self._motion_mat, covariance, self._motion_mat.T)) + motion_cov + + return mean, covariance + + def project(self, mean, covariance): + """Project state distribution to measurement space. + + Parameters + ---------- + mean : ndarray + The state's mean vector (8 dimensional array). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + + Returns + ------- + (ndarray, ndarray) + Returns the projected mean and covariance matrix of the given state + estimate. + + """ + std = [ + self._std_weight_position * mean[3], + self._std_weight_position * mean[3], + 1e-1, + self._std_weight_position * mean[3]] + innovation_cov = np.diag(np.square(std)) + + mean = np.dot(self._update_mat, mean) + covariance = np.linalg.multi_dot(( + self._update_mat, covariance, self._update_mat.T)) + return mean, covariance + innovation_cov + + def multi_predict(self, mean, covariance): + """Run Kalman filter prediction step (Vectorized version). + Parameters + ---------- + mean : ndarray + The Nx8 dimensional mean matrix of the object states at the previous + time step. + covariance : ndarray + The Nx8x8 dimensional covariance matrics of the object states at the + previous time step. + Returns + ------- + (ndarray, ndarray) + Returns the mean vector and covariance matrix of the predicted + state. Unobserved velocities are initialized to 0 mean. + """ + std_pos = [ + self._std_weight_position * mean[:, 3], + self._std_weight_position * mean[:, 3], + 1e-2 * np.ones_like(mean[:, 3]), + self._std_weight_position * mean[:, 3]] + std_vel = [ + self._std_weight_velocity * mean[:, 3], + self._std_weight_velocity * mean[:, 3], + 1e-5 * np.ones_like(mean[:, 3]), + self._std_weight_velocity * mean[:, 3]] + sqr = np.square(np.r_[std_pos, std_vel]).T + + motion_cov = [] + for i in range(len(mean)): + motion_cov.append(np.diag(sqr[i])) + motion_cov = np.asarray(motion_cov) + + mean = np.dot(mean, self._motion_mat.T) + left = np.dot(self._motion_mat, covariance).transpose((1, 0, 2)) + covariance = np.dot(left, self._motion_mat.T) + motion_cov + + return mean, covariance + + def update(self, mean, covariance, measurement): + """Run Kalman filter correction step. + + Parameters + ---------- + mean : ndarray + The predicted state's mean vector (8 dimensional). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + measurement : ndarray + The 4 dimensional measurement vector (x, y, a, h), where (x, y) + is the center position, a the aspect ratio, and h the height of the + bounding box. + + Returns + ------- + (ndarray, ndarray) + Returns the measurement-corrected state distribution. + + """ + projected_mean, projected_cov = self.project(mean, covariance) + + chol_factor, lower = scipy.linalg.cho_factor( + projected_cov, lower=True, check_finite=False) + kalman_gain = scipy.linalg.cho_solve( + (chol_factor, lower), np.dot(covariance, self._update_mat.T).T, + check_finite=False).T + innovation = measurement - projected_mean + + new_mean = mean + np.dot(innovation, kalman_gain.T) + new_covariance = covariance - np.linalg.multi_dot(( + kalman_gain, projected_cov, kalman_gain.T)) + return new_mean, new_covariance + + def gating_distance(self, mean, covariance, measurements, + only_position=False, metric='maha'): + """Compute gating distance between state distribution and measurements. + A suitable distance threshold can be obtained from `chi2inv95`. If + `only_position` is False, the chi-square distribution has 4 degrees of + freedom, otherwise 2. + Parameters + ---------- + mean : ndarray + Mean vector over the state distribution (8 dimensional). + covariance : ndarray + Covariance of the state distribution (8x8 dimensional). + measurements : ndarray + An Nx4 dimensional matrix of N measurements, each in + format (x, y, a, h) where (x, y) is the bounding box center + position, a the aspect ratio, and h the height. + only_position : Optional[bool] + If True, distance computation is done with respect to the bounding + box center position only. + Returns + ------- + ndarray + Returns an array of length N, where the i-th element contains the + squared Mahalanobis distance between (mean, covariance) and + `measurements[i]`. + """ + mean, covariance = self.project(mean, covariance) + if only_position: + mean, covariance = mean[:2], covariance[:2, :2] + measurements = measurements[:, :2] + + d = measurements - mean + if metric == 'gaussian': + return np.sum(d * d, axis=1) + elif metric == 'maha': + cholesky_factor = np.linalg.cholesky(covariance) + z = scipy.linalg.solve_triangular( + cholesky_factor, d.T, lower=True, check_finite=False, + overwrite_b=True) + squared_maha = np.sum(z * z, axis=0) + return squared_maha + else: + raise ValueError('invalid distance metric') diff --git a/tracking/docker-build-context/byte_track/tutorials/ctracker/mot_online/matching.py b/tracking/docker-build-context/byte_track/tutorials/ctracker/mot_online/matching.py new file mode 100644 index 0000000000000000000000000000000000000000..54cb4be09624cdb68581508bdbdeecdc63539b7c --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/ctracker/mot_online/matching.py @@ -0,0 +1,198 @@ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import lap +import numpy as np +import scipy +from cython_bbox import bbox_overlaps as bbox_ious +from scipy.spatial.distance import cdist + +chi2inv95 = { + 1: 3.8415, + 2: 5.9915, + 3: 7.8147, + 4: 9.4877, + 5: 11.070, + 6: 12.592, + 7: 14.067, + 8: 15.507, + 9: 16.919} + +def merge_matches(m1, m2, shape): + O,P,Q = shape + m1 = np.asarray(m1) + m2 = np.asarray(m2) + + M1 = scipy.sparse.coo_matrix((np.ones(len(m1)), (m1[:, 0], m1[:, 1])), shape=(O, P)) + M2 = scipy.sparse.coo_matrix((np.ones(len(m2)), (m2[:, 0], m2[:, 1])), shape=(P, Q)) + + mask = M1*M2 + match = mask.nonzero() + match = list(zip(match[0], match[1])) + unmatched_O = tuple(set(range(O)) - set([i for i, j in match])) + unmatched_Q = tuple(set(range(Q)) - set([j for i, j in match])) + + return match, unmatched_O, unmatched_Q + + +def _indices_to_matches(cost_matrix, indices, thresh): + matched_cost = cost_matrix[tuple(zip(*indices))] + matched_mask = (matched_cost <= thresh) + + matches = indices[matched_mask] + unmatched_a = tuple(set(range(cost_matrix.shape[0])) - set(matches[:, 0])) + unmatched_b = tuple(set(range(cost_matrix.shape[1])) - set(matches[:, 1])) + + return matches, unmatched_a, unmatched_b + + +def linear_assignment(cost_matrix, thresh): + if cost_matrix.size == 0: + return np.empty((0, 2), dtype=int), tuple(range(cost_matrix.shape[0])), tuple(range(cost_matrix.shape[1])) + matches, unmatched_a, unmatched_b = [], [], [] + cost, x, y = lap.lapjv(cost_matrix, extend_cost=True, cost_limit=thresh) + for ix, mx in enumerate(x): + if mx >= 0: + matches.append([ix, mx]) + unmatched_a = np.where(x < 0)[0] + unmatched_b = np.where(y < 0)[0] + matches = np.asarray(matches) + return matches, unmatched_a, unmatched_b + + +def ious(atlbrs, btlbrs): + """ + Compute cost based on IoU + :type atlbrs: list[tlbr] | np.ndarray + :type atlbrs: list[tlbr] | np.ndarray + + :rtype ious np.ndarray + """ + ious = np.zeros((len(atlbrs), len(btlbrs)), dtype=np.float) + if ious.size == 0: + return ious + + ious = bbox_ious( + np.ascontiguousarray(atlbrs, dtype=np.float), + np.ascontiguousarray(btlbrs, dtype=np.float) + ) + + return ious + + +def iou_distance(atracks, btracks): + """ + Compute cost based on IoU + :type atracks: list[STrack] + :type btracks: list[STrack] + + :rtype cost_matrix np.ndarray + """ + + if (len(atracks)>0 and isinstance(atracks[0], np.ndarray)) or (len(btracks) > 0 and isinstance(btracks[0], np.ndarray)): + atlbrs = atracks + btlbrs = btracks + else: + atlbrs = [track.tlbr for track in atracks] + btlbrs = [track.tlbr for track in btracks] + _ious = ious(atlbrs, btlbrs) + cost_matrix = 1 - _ious + + return cost_matrix + +def embedding_distance(tracks, detections, metric='cosine'): + """ + :param tracks: list[STrack] + :param detections: list[BaseTrack] + :param metric: + :return: cost_matrix np.ndarray + """ + + cost_matrix = np.zeros((len(tracks), len(detections)), dtype=np.float) + if cost_matrix.size == 0: + return cost_matrix + det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float) + #for i, track in enumerate(tracks): + #cost_matrix[i, :] = np.maximum(0.0, cdist(track.smooth_feat.reshape(1,-1), det_features, metric)) + track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float) + cost_matrix = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features + return cost_matrix + +def embedding_distance2(tracks, detections, metric='cosine'): + """ + :param tracks: list[STrack] + :param detections: list[BaseTrack] + :param metric: + :return: cost_matrix np.ndarray + """ + + cost_matrix = np.zeros((len(tracks), len(detections)), dtype=np.float) + if cost_matrix.size == 0: + return cost_matrix + det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float) + #for i, track in enumerate(tracks): + #cost_matrix[i, :] = np.maximum(0.0, cdist(track.smooth_feat.reshape(1,-1), det_features, metric)) + track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float) + cost_matrix = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features + track_features = np.asarray([track.features[0] for track in tracks], dtype=np.float) + cost_matrix2 = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features + track_features = np.asarray([track.features[len(track.features)-1] for track in tracks], dtype=np.float) + cost_matrix3 = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features + for row in range(len(cost_matrix)): + cost_matrix[row] = (cost_matrix[row]+cost_matrix2[row]+cost_matrix3[row])/3 + return cost_matrix + + +def vis_id_feature_A_distance(tracks, detections, metric='cosine'): + track_features = [] + det_features = [] + leg1 = len(tracks) + leg2 = len(detections) + cost_matrix = np.zeros((leg1, leg2), dtype=np.float) + cost_matrix_det = np.zeros((leg1, leg2), dtype=np.float) + cost_matrix_track = np.zeros((leg1, leg2), dtype=np.float) + det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float) + track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float) + if leg2 != 0: + cost_matrix_det = np.maximum(0.0, cdist(det_features, det_features, metric)) + if leg1 != 0: + cost_matrix_track = np.maximum(0.0, cdist(track_features, track_features, metric)) + if cost_matrix.size == 0: + return track_features, det_features, cost_matrix, cost_matrix_det, cost_matrix_track + cost_matrix = np.maximum(0.0, cdist(track_features, det_features, metric)) + if leg1 > 10: + leg1 = 10 + tracks = tracks[:10] + if leg2 > 10: + leg2 = 10 + detections = detections[:10] + det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float) + track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float) + return track_features, det_features, cost_matrix, cost_matrix_det, cost_matrix_track + +def gate_cost_matrix(kf, cost_matrix, tracks, detections, only_position=False): + if cost_matrix.size == 0: + return cost_matrix + gating_dim = 2 if only_position else 4 + gating_threshold = chi2inv95[gating_dim] + measurements = np.asarray([det.to_xyah() for det in detections]) + for row, track in enumerate(tracks): + gating_distance = kf.gating_distance( + track.mean, track.covariance, measurements, only_position) + cost_matrix[row, gating_distance > gating_threshold] = np.inf + return cost_matrix + + +def fuse_motion(kf, cost_matrix, tracks, detections, only_position=False, lambda_=0.98): + if cost_matrix.size == 0: + return cost_matrix + gating_dim = 2 if only_position else 4 + gating_threshold = chi2inv95[gating_dim] + measurements = np.asarray([det.to_xyah() for det in detections]) + for row, track in enumerate(tracks): + gating_distance = kf.gating_distance( + track.mean, track.covariance, measurements, only_position, metric='maha') + cost_matrix[row, gating_distance > gating_threshold] = np.inf + cost_matrix[row] = lambda_ * cost_matrix[row] + (1 - lambda_) * gating_distance + return cost_matrix diff --git a/tracking/docker-build-context/byte_track/tutorials/ctracker/test.py b/tracking/docker-build-context/byte_track/tutorials/ctracker/test.py new file mode 100644 index 0000000000000000000000000000000000000000..772d9169975cd51f4aad5830fde363f776e97b4b --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/ctracker/test.py @@ -0,0 +1,337 @@ +import numpy as np +import torchvision +import time +import math +import os +import copy +import pdb +import argparse +import sys +import cv2 +import skimage.io +import skimage.transform +import skimage.color +import skimage +import torch +import model + +from torch.utils.data import Dataset, DataLoader +from torchvision import datasets, models, transforms +from dataloader import CSVDataset, collater, Resizer, AspectRatioBasedSampler, Augmenter, UnNormalizer, Normalizer, RGB_MEAN, RGB_STD +from scipy.optimize import linear_sum_assignment + +# assert torch.__version__.split('.')[1] == '4' + +print('CUDA available: {}'.format(torch.cuda.is_available())) + +color_list = [(0, 0, 255), (255, 0, 0), (0, 255, 0), (255, 0, 255), (0, 255, 255), (255, 255, 0), (128, 0, 255), +(0, 128, 255), (128, 255, 0), (0, 255, 128), (255, 128, 0), (255, 0, 128), (128, 128, 255), (128, 255, 128), (255, 128, 128), (128, 128, 0), (128, 0, 128)] + +class detect_rect: + def __init__(self): + self.curr_frame = 0 + self.curr_rect = np.array([0, 0, 1, 1]) + self.next_rect = np.array([0, 0, 1, 1]) + self.conf = 0 + self.id = 0 + + @property + def position(self): + x = (self.curr_rect[0] + self.curr_rect[2])/2 + y = (self.curr_rect[1] + self.curr_rect[3])/2 + return np.array([x, y]) + + @property + def size(self): + w = self.curr_rect[2] - self.curr_rect[0] + h = self.curr_rect[3] - self.curr_rect[1] + return np.array([w, h]) + +class tracklet: + def __init__(self, det_rect): + self.id = det_rect.id + self.rect_list = [det_rect] + self.rect_num = 1 + self.last_rect = det_rect + self.last_frame = det_rect.curr_frame + self.no_match_frame = 0 + + def add_rect(self, det_rect): + self.rect_list.append(det_rect) + self.rect_num = self.rect_num + 1 + self.last_rect = det_rect + self.last_frame = det_rect.curr_frame + + @property + def velocity(self): + if(self.rect_num < 2): + return (0, 0) + elif(self.rect_num < 6): + return (self.rect_list[self.rect_num - 1].position - self.rect_list[self.rect_num - 2].position) / (self.rect_list[self.rect_num - 1].curr_frame - self.rect_list[self.rect_num - 2].curr_frame) + else: + v1 = (self.rect_list[self.rect_num - 1].position - self.rect_list[self.rect_num - 4].position) / (self.rect_list[self.rect_num - 1].curr_frame - self.rect_list[self.rect_num - 4].curr_frame) + v2 = (self.rect_list[self.rect_num - 2].position - self.rect_list[self.rect_num - 5].position) / (self.rect_list[self.rect_num - 2].curr_frame - self.rect_list[self.rect_num - 5].curr_frame) + v3 = (self.rect_list[self.rect_num - 3].position - self.rect_list[self.rect_num - 6].position) / (self.rect_list[self.rect_num - 3].curr_frame - self.rect_list[self.rect_num - 6].curr_frame) + return (v1 + v2 + v3) / 3 + + +def cal_iou(rect1, rect2): + x1, y1, x2, y2 = rect1 + x3, y3, x4, y4 = rect2 + i_w = min(x2, x4) - max(x1, x3) + i_h = min(y2, y4) - max(y1, y3) + if(i_w <= 0 or i_h <= 0): + return 0 + i_s = i_w * i_h + s_1 = (x2 - x1) * (y2 - y1) + s_2 = (x4 - x3) * (y4 - y3) + return float(i_s) / (s_1 + s_2 - i_s) + +def cal_simi(det_rect1, det_rect2): + return cal_iou(det_rect1.next_rect, det_rect2.curr_rect) + +def cal_simi_track_det(track, det_rect): + if(det_rect.curr_frame <= track.last_frame): + print("cal_simi_track_det error") + return 0 + elif(det_rect.curr_frame - track.last_frame == 1): + return cal_iou(track.last_rect.next_rect, det_rect.curr_rect) + else: + pred_rect = track.last_rect.curr_rect + np.append(track.velocity, track.velocity) * (det_rect.curr_frame - track.last_frame) + return cal_iou(pred_rect, det_rect.curr_rect) + +def track_det_match(tracklet_list, det_rect_list, min_iou = 0.5): + num1 = len(tracklet_list) + num2 = len(det_rect_list) + cost_mat = np.zeros((num1, num2)) + for i in range(num1): + for j in range(num2): + cost_mat[i, j] = -cal_simi_track_det(tracklet_list[i], det_rect_list[j]) + + match_result = linear_sum_assignment(cost_mat) + match_result = np.asarray(match_result) + match_result = np.transpose(match_result) + + matches, unmatched1, unmatched2 = [], [], [] + for i in range(num1): + if i not in match_result[:, 0]: + unmatched1.append(i) + for j in range(num2): + if j not in match_result[:, 1]: + unmatched2.append(j) + for i, j in match_result: + if cost_mat[i, j] > -min_iou: + unmatched1.append(i) + unmatched2.append(j) + else: + matches.append((i, j)) + return matches, unmatched1, unmatched2 + +def draw_caption(image, box, caption, color): + b = np.array(box).astype(int) + cv2.putText(image, caption, (b[0], b[1] - 8), cv2.FONT_HERSHEY_PLAIN, 2, color, 2) + + +def run_each_dataset(model_dir, retinanet, dataset_path, subset, cur_dataset): + print(cur_dataset) + + img_list = os.listdir(os.path.join(dataset_path, subset, cur_dataset, 'img1')) + img_list = [os.path.join(dataset_path, subset, cur_dataset, 'img1', _) for _ in img_list if ('jpg' in _) or ('png' in _)] + img_list = sorted(img_list) + + img_len = len(img_list) + last_feat = None + + confidence_threshold = 0.4 + IOU_threshold = 0.5 + retention_threshold = 10 + + det_list_all = [] + tracklet_all = [] + max_id = 0 + max_draw_len = 100 + draw_interval = 5 + img_width = 1920 + img_height = 1080 + fps = 30 + + for i in range(img_len): + det_list_all.append([]) + + for idx in range((int(img_len / 2)), img_len + 1): + i = idx - 1 + print('tracking: ', i) + with torch.no_grad(): + data_path1 = img_list[min(idx, img_len - 1)] + img_origin1 = skimage.io.imread(data_path1) + img_h, img_w, _ = img_origin1.shape + img_height, img_width = img_h, img_w + resize_h, resize_w = math.ceil(img_h / 32) * 32, math.ceil(img_w / 32) * 32 + img1 = np.zeros((resize_h, resize_w, 3), dtype=img_origin1.dtype) + img1[:img_h, :img_w, :] = img_origin1 + img1 = (img1.astype(np.float32) / 255.0 - np.array([[RGB_MEAN]])) / np.array([[RGB_STD]]) + img1 = torch.from_numpy(img1).permute(2, 0, 1).view(1, 3, resize_h, resize_w) + scores, transformed_anchors, last_feat = retinanet(img1.cuda().float(), last_feat=last_feat) +# if idx > 0: + if idx > (int(img_len / 2)): + idxs = np.where(scores>0.1) + + for j in range(idxs[0].shape[0]): + bbox = transformed_anchors[idxs[0][j], :] + x1 = int(bbox[0]) + y1 = int(bbox[1]) + x2 = int(bbox[2]) + y2 = int(bbox[3]) + + x3 = int(bbox[4]) + y3 = int(bbox[5]) + x4 = int(bbox[6]) + y4 = int(bbox[7]) + + det_conf = float(scores[idxs[0][j]]) + + det_rect = detect_rect() + det_rect.curr_frame = idx + det_rect.curr_rect = np.array([x1, y1, x2, y2]) + det_rect.next_rect = np.array([x3, y3, x4, y4]) + det_rect.conf = det_conf + + if det_rect.conf > confidence_threshold: + det_list_all[det_rect.curr_frame - 1].append(det_rect) +# if i == 0: + if i == int(img_len / 2): + for j in range(len(det_list_all[i])): + det_list_all[i][j].id = j + 1 + max_id = max(max_id, j + 1) + track = tracklet(det_list_all[i][j]) + tracklet_all.append(track) + continue + + matches, unmatched1, unmatched2 = track_det_match(tracklet_all, det_list_all[i], IOU_threshold) + + for j in range(len(matches)): + det_list_all[i][matches[j][1]].id = tracklet_all[matches[j][0]].id + det_list_all[i][matches[j][1]].id = tracklet_all[matches[j][0]].id + tracklet_all[matches[j][0]].add_rect(det_list_all[i][matches[j][1]]) + + delete_track_list = [] + for j in range(len(unmatched1)): + tracklet_all[unmatched1[j]].no_match_frame = tracklet_all[unmatched1[j]].no_match_frame + 1 + if(tracklet_all[unmatched1[j]].no_match_frame >= retention_threshold): + delete_track_list.append(unmatched1[j]) + + origin_index = set([k for k in range(len(tracklet_all))]) + delete_index = set(delete_track_list) + left_index = list(origin_index - delete_index) + tracklet_all = [tracklet_all[k] for k in left_index] + + + for j in range(len(unmatched2)): + det_list_all[i][unmatched2[j]].id = max_id + 1 + max_id = max_id + 1 + track = tracklet(det_list_all[i][unmatched2[j]]) + tracklet_all.append(track) + + + + #**************visualize tracking result and save evaluate file**************** + + fout_tracking = open(os.path.join(model_dir, 'results', cur_dataset + '.txt'), 'w') + + save_img_dir = os.path.join(model_dir, 'results', cur_dataset) + if not os.path.exists(save_img_dir): + os.makedirs(save_img_dir) + + out_video = os.path.join(model_dir, 'results', cur_dataset + '.mp4') + videoWriter = cv2.VideoWriter(out_video, cv2.VideoWriter_fourcc('m', 'p', '4', 'v'), fps, (img_width, img_height)) + + id_dict = {} + + + for i in range((int(img_len / 2)), img_len): + print('saving: ', i) + img = cv2.imread(img_list[i]) + + for j in range(len(det_list_all[i])): + + x1, y1, x2, y2 = det_list_all[i][j].curr_rect.astype(int) + trace_id = det_list_all[i][j].id + + id_dict.setdefault(str(trace_id),[]).append((int((x1+x2)/2), y2)) + draw_trace_id = str(trace_id) + draw_caption(img, (x1, y1, x2, y2), draw_trace_id, color=color_list[trace_id % len(color_list)]) + cv2.rectangle(img, (x1, y1), (x2, y2), color=color_list[trace_id % len(color_list)], thickness=2) + + trace_len = len(id_dict[str(trace_id)]) + trace_len_draw = min(max_draw_len, trace_len) + + for k in range(trace_len_draw - draw_interval): + if(k % draw_interval == 0): + draw_point1 = id_dict[str(trace_id)][trace_len - k - 1] + draw_point2 = id_dict[str(trace_id)][trace_len - k - 1 - draw_interval] + cv2.line(img, draw_point1, draw_point2, color=color_list[trace_id % len(color_list)], thickness=2) + + fout_tracking.write(str(i+1) + ',' + str(trace_id) + ',' + str(x1) + ',' + str(y1) + ',' + str(x2 - x1) + ',' + str(y2 - y1) + ',-1,-1,-1,-1\n') + + cv2.imwrite(os.path.join(save_img_dir, str(i + 1).zfill(6) + '.jpg'), img) + videoWriter.write(img) +# cv2.waitKey(0) + + fout_tracking.close() + videoWriter.release() + +def run_from_train(model_dir, root_path): + if not os.path.exists(os.path.join(model_dir, 'results')): + os.makedirs(os.path.join(model_dir, 'results')) + retinanet = torch.load(os.path.join(model_dir, 'model_final.pt')) + + use_gpu = True + + if use_gpu: retinanet = retinanet.cuda() + + retinanet.eval() + + for seq_num in [2, 4, 5, 9, 10, 11, 13]: + run_each_dataset(model_dir, retinanet, root_path, 'train', 'MOT17-{:02d}'.format(seq_num)) + for seq_num in [1, 3, 6, 7, 8, 12, 14]: + run_each_dataset(model_dir, retinanet, root_path, 'test', 'MOT17-{:02d}'.format(seq_num)) + +def main(args=None): + parser = argparse.ArgumentParser(description='Simple script for testing a CTracker network.') + parser.add_argument('--dataset_path', default='/dockerdata/home/jeromepeng/data/MOT/MOT17/', type=str, help='Dataset path, location of the images sequence.') + parser.add_argument('--model_dir', default='./trained_model/', help='Path to model (.pt) file.') + parser.add_argument('--model_path', default='./trained_model/model_final.pth', help='Path to model (.pt) file.') + parser = parser.parse_args(args) + + if not os.path.exists(os.path.join(parser.model_dir, 'results')): + os.makedirs(os.path.join(parser.model_dir, 'results')) + + retinanet = model.resnet50(num_classes=1, pretrained=True) +# retinanet_save = torch.load(os.path.join(parser.model_dir, 'model_final.pth')) + retinanet_save = torch.load(os.path.join(parser.model_path)) + + # rename moco pre-trained keys + state_dict = retinanet_save.state_dict() + for k in list(state_dict.keys()): + # retain only encoder up to before the embedding layer + if k.startswith('module.'): + # remove prefix + state_dict[k[len("module."):]] = state_dict[k] + # delete renamed or unused k + del state_dict[k] + + retinanet.load_state_dict(state_dict) + + use_gpu = True + + if use_gpu: retinanet = retinanet.cuda() + + retinanet.eval() + + for seq_num in [2, 4, 5, 9, 10, 11, 13]: + run_each_dataset(parser.model_dir, retinanet, parser.dataset_path, 'train', 'MOT17-{:02d}'.format(seq_num)) +# for seq_num in [1, 3, 6, 7, 8, 12, 14]: +# run_each_dataset(parser.model_dir, retinanet, parser.dataset_path, 'test', 'MOT17-{:02d}'.format(seq_num)) + +if __name__ == '__main__': + main() diff --git a/tracking/docker-build-context/byte_track/tutorials/ctracker/test_byte.py b/tracking/docker-build-context/byte_track/tutorials/ctracker/test_byte.py new file mode 100644 index 0000000000000000000000000000000000000000..bbb8a53b7a98de5e1c4c5fcffa1a546cc36f0e4b --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/ctracker/test_byte.py @@ -0,0 +1,156 @@ +import numpy as np +import torchvision +import time +import math +import os +import copy +import pdb +import argparse +import sys +import cv2 +import skimage.io +import skimage.transform +import skimage.color +import skimage +import torch +import model + +from torch.utils.data import Dataset, DataLoader +from torchvision import datasets, models, transforms +from dataloader import CSVDataset, collater, Resizer, AspectRatioBasedSampler, Augmenter, UnNormalizer, Normalizer, RGB_MEAN, RGB_STD +from scipy.optimize import linear_sum_assignment +from tracker import BYTETracker + + +def write_results(filename, results): + save_format = '{frame},{id},{x1},{y1},{w},{h},{s},-1,-1,-1\n' + with open(filename, 'w') as f: + for frame_id, tlwhs, track_ids, scores in results: + for tlwh, track_id, score in zip(tlwhs, track_ids, scores): + if track_id < 0: + continue + x1, y1, w, h = tlwh + line = save_format.format(frame=frame_id, id=track_id, x1=round(x1, 1), y1=round(y1, 1), w=round(w, 1), h=round(h, 1), s=round(score, 2)) + f.write(line) + +def write_results_no_score(filename, results): + save_format = '{frame},{id},{x1},{y1},{w},{h},-1,-1,-1,-1\n' + with open(filename, 'w') as f: + for frame_id, tlwhs, track_ids in results: + for tlwh, track_id in zip(tlwhs, track_ids): + if track_id < 0: + continue + x1, y1, w, h = tlwh + line = save_format.format(frame=frame_id, id=track_id, x1=round(x1, 1), y1=round(y1, 1), w=round(w, 1), h=round(h, 1)) + f.write(line) + +def run_each_dataset(model_dir, retinanet, dataset_path, subset, cur_dataset): + print(cur_dataset) + + img_list = os.listdir(os.path.join(dataset_path, subset, cur_dataset, 'img1')) + img_list = [os.path.join(dataset_path, subset, cur_dataset, 'img1', _) for _ in img_list if ('jpg' in _) or ('png' in _)] + img_list = sorted(img_list) + + img_len = len(img_list) + last_feat = None + + confidence_threshold = 0.6 + IOU_threshold = 0.5 + retention_threshold = 10 + + det_list_all = [] + tracklet_all = [] + results = [] + max_id = 0 + max_draw_len = 100 + draw_interval = 5 + img_width = 1920 + img_height = 1080 + fps = 30 + + tracker = BYTETracker() + + for idx in range((int(img_len / 2)), img_len + 1): + i = idx - 1 + print('tracking: ', i) + with torch.no_grad(): + data_path1 = img_list[min(idx, img_len - 1)] + img_origin1 = skimage.io.imread(data_path1) + img_h, img_w, _ = img_origin1.shape + img_height, img_width = img_h, img_w + resize_h, resize_w = math.ceil(img_h / 32) * 32, math.ceil(img_w / 32) * 32 + img1 = np.zeros((resize_h, resize_w, 3), dtype=img_origin1.dtype) + img1[:img_h, :img_w, :] = img_origin1 + img1 = (img1.astype(np.float32) / 255.0 - np.array([[RGB_MEAN]])) / np.array([[RGB_STD]]) + img1 = torch.from_numpy(img1).permute(2, 0, 1).view(1, 3, resize_h, resize_w) + scores, transformed_anchors, last_feat = retinanet(img1.cuda().float(), last_feat=last_feat) + + if idx > (int(img_len / 2)): + idxs = np.where(scores > 0.1) + # run tracking + online_targets = tracker.update(transformed_anchors[idxs[0], :4], scores[idxs[0]]) + online_tlwhs = [] + online_ids = [] + online_scores = [] + for t in online_targets: + tlwh = t.tlwh + tid = t.track_id + online_tlwhs.append(tlwh) + online_ids.append(tid) + online_scores.append(t.score) + results.append((idx, online_tlwhs, online_ids, online_scores)) + + fout_tracking = os.path.join(model_dir, 'results', cur_dataset + '.txt') + write_results(fout_tracking, results) + + + +def main(args=None): + parser = argparse.ArgumentParser(description='Simple script for testing a CTracker network.') + parser.add_argument('--dataset_path', default='/dockerdata/home/jeromepeng/data/MOT/MOT17/', type=str, + help='Dataset path, location of the images sequence.') + parser.add_argument('--model_dir', default='./trained_model/', help='Path to model (.pt) file.') + parser.add_argument('--model_path', default='./trained_model/model_final.pth', help='Path to model (.pt) file.') + parser.add_argument('--seq_nums', default=0, type=int) + + parser = parser.parse_args(args) + + if not os.path.exists(os.path.join(parser.model_dir, 'results')): + os.makedirs(os.path.join(parser.model_dir, 'results')) + + retinanet = model.resnet50(num_classes=1, pretrained=True) + # retinanet_save = torch.load(os.path.join(parser.model_dir, 'model_final.pth')) + retinanet_save = torch.load(os.path.join(parser.model_path)) + + # rename moco pre-trained keys + state_dict = retinanet_save.state_dict() + for k in list(state_dict.keys()): + # retain only encoder up to before the embedding layer + if k.startswith('module.'): + # remove prefix + state_dict[k[len("module."):]] = state_dict[k] + # delete renamed or unused k + del state_dict[k] + + retinanet.load_state_dict(state_dict) + + use_gpu = True + + if use_gpu: retinanet = retinanet.cuda() + + retinanet.eval() + seq_nums = [] + if parser.seq_nums > 0: + seq_nums.append(parser.seq_nums) + else: + seq_nums = [2, 4, 5, 9, 10, 11, 13] + + for seq_num in seq_nums: + run_each_dataset(parser.model_dir, retinanet, parser.dataset_path, 'train', 'MOT17-{:02d}'.format(seq_num)) + + +# for seq_num in [1, 3, 6, 7, 8, 12, 14]: +# run_each_dataset(parser.model_dir, retinanet, parser.dataset_path, 'test', 'MOT17-{:02d}'.format(seq_num)) + +if __name__ == '__main__': + main() diff --git a/tracking/docker-build-context/byte_track/tutorials/fairmot/README.md b/tracking/docker-build-context/byte_track/tutorials/fairmot/README.md new file mode 100644 index 0000000000000000000000000000000000000000..28d18577464ca29dccd75144d19e9e1810e60519 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/fairmot/README.md @@ -0,0 +1,20 @@ +# FairMOT + +Step1. git clone https://github.com/ifzhang/FairMOT.git + + +Step2. replace https://github.com/ifzhang/FairMOT/blob/master/src/lib/tracker/multitracker.py + + +Step3. run motion + reid tracker using tracker.py (set --match_thres 0.4), run BYTE tracker using byte_tracker.py (set --match_thres 0.8) + +run BYTE tracker example: +``` +python3 track_half.py mot --load_model ../exp/mot/mot17_half_dla34/model_last.pth --match_thres 0.8 +``` + + +## Notes +byte_tracker: only motion + +tracker: motion + reid diff --git a/tracking/docker-build-context/byte_track/tutorials/fairmot/byte_tracker.py b/tracking/docker-build-context/byte_track/tutorials/fairmot/byte_tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..7bb384dcb7e09c3f17f87860ee5d30f48e18ba9d --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/fairmot/byte_tracker.py @@ -0,0 +1,403 @@ +import numpy as np +from collections import deque +import itertools +import os +import os.path as osp +import time +import torch +import cv2 +import torch.nn.functional as F + +from models.model import create_model, load_model +from models.decode import mot_decode +from tracking_utils.utils import * +from tracking_utils.log import logger +from tracking_utils.kalman_filter import KalmanFilter +from models import * +from tracker import matching +from .basetrack import BaseTrack, TrackState +from utils.post_process import ctdet_post_process +from utils.image import get_affine_transform +from models.utils import _tranpose_and_gather_feat + +class STrack(BaseTrack): + shared_kalman = KalmanFilter() + def __init__(self, tlwh, score): + + # wait activate + self._tlwh = np.asarray(tlwh, dtype=np.float) + self.kalman_filter = None + self.mean, self.covariance = None, None + self.is_activated = False + + self.score = score + self.tracklet_len = 0 + + def predict(self): + mean_state = self.mean.copy() + if self.state != TrackState.Tracked: + mean_state[7] = 0 + self.mean, self.covariance = self.kalman_filter.predict(mean_state, self.covariance) + + @staticmethod + def multi_predict(stracks): + if len(stracks) > 0: + multi_mean = np.asarray([st.mean.copy() for st in stracks]) + multi_covariance = np.asarray([st.covariance for st in stracks]) + for i, st in enumerate(stracks): + if st.state != TrackState.Tracked: + multi_mean[i][7] = 0 + multi_mean, multi_covariance = STrack.shared_kalman.multi_predict(multi_mean, multi_covariance) + for i, (mean, cov) in enumerate(zip(multi_mean, multi_covariance)): + stracks[i].mean = mean + stracks[i].covariance = cov + + def activate(self, kalman_filter, frame_id): + """Start a new tracklet""" + self.kalman_filter = kalman_filter + self.track_id = self.next_id() + self.mean, self.covariance = self.kalman_filter.initiate(self.tlwh_to_xyah(self._tlwh)) + + self.tracklet_len = 0 + self.state = TrackState.Tracked + if frame_id == 1: + self.is_activated = True + #self.is_activated = True + self.frame_id = frame_id + self.start_frame = frame_id + + def re_activate(self, new_track, frame_id, new_id=False): + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_track.tlwh) + ) + + self.tracklet_len = 0 + self.state = TrackState.Tracked + self.is_activated = True + self.frame_id = frame_id + if new_id: + self.track_id = self.next_id() + self.score = new_track.score + + def update(self, new_track, frame_id): + """ + Update a matched track + :type new_track: STrack + :type frame_id: int + :type update_feature: bool + :return: + """ + self.frame_id = frame_id + self.tracklet_len += 1 + + new_tlwh = new_track.tlwh + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_tlwh)) + self.state = TrackState.Tracked + self.is_activated = True + + self.score = new_track.score + + @property + # @jit(nopython=True) + def tlwh(self): + """Get current position in bounding box format `(top left x, top left y, + width, height)`. + """ + if self.mean is None: + return self._tlwh.copy() + ret = self.mean[:4].copy() + ret[2] *= ret[3] + ret[:2] -= ret[2:] / 2 + return ret + + @property + # @jit(nopython=True) + def tlbr(self): + """Convert bounding box to format `(min x, min y, max x, max y)`, i.e., + `(top left, bottom right)`. + """ + ret = self.tlwh.copy() + ret[2:] += ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_xyah(tlwh): + """Convert bounding box to format `(center x, center y, aspect ratio, + height)`, where the aspect ratio is `width / height`. + """ + ret = np.asarray(tlwh).copy() + ret[:2] += ret[2:] / 2 + ret[2] /= ret[3] + return ret + + def to_xyah(self): + return self.tlwh_to_xyah(self.tlwh) + + @staticmethod + # @jit(nopython=True) + def tlbr_to_tlwh(tlbr): + ret = np.asarray(tlbr).copy() + ret[2:] -= ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_tlbr(tlwh): + ret = np.asarray(tlwh).copy() + ret[2:] += ret[:2] + return ret + + def __repr__(self): + return 'OT_{}_({}-{})'.format(self.track_id, self.start_frame, self.end_frame) + + +class BYTETracker(object): + def __init__(self, opt, frame_rate=30): + self.opt = opt + if opt.gpus[0] >= 0: + opt.device = torch.device('cuda') + else: + opt.device = torch.device('cpu') + print('Creating model...') + self.model = create_model(opt.arch, opt.heads, opt.head_conv) + self.model = load_model(self.model, opt.load_model) + self.model = self.model.to(opt.device) + self.model.eval() + + self.tracked_stracks = [] # type: list[STrack] + self.lost_stracks = [] # type: list[STrack] + self.removed_stracks = [] # type: list[STrack] + + self.frame_id = 0 + #self.det_thresh = opt.conf_thres + self.det_thresh = opt.conf_thres + 0.1 + self.buffer_size = int(frame_rate / 30.0 * opt.track_buffer) + self.max_time_lost = self.buffer_size + self.max_per_image = opt.K + self.mean = np.array(opt.mean, dtype=np.float32).reshape(1, 1, 3) + self.std = np.array(opt.std, dtype=np.float32).reshape(1, 1, 3) + + self.kalman_filter = KalmanFilter() + + def post_process(self, dets, meta): + dets = dets.detach().cpu().numpy() + dets = dets.reshape(1, -1, dets.shape[2]) + dets = ctdet_post_process( + dets.copy(), [meta['c']], [meta['s']], + meta['out_height'], meta['out_width'], self.opt.num_classes) + for j in range(1, self.opt.num_classes + 1): + dets[0][j] = np.array(dets[0][j], dtype=np.float32).reshape(-1, 5) + return dets[0] + + def merge_outputs(self, detections): + results = {} + for j in range(1, self.opt.num_classes + 1): + results[j] = np.concatenate( + [detection[j] for detection in detections], axis=0).astype(np.float32) + + scores = np.hstack( + [results[j][:, 4] for j in range(1, self.opt.num_classes + 1)]) + if len(scores) > self.max_per_image: + kth = len(scores) - self.max_per_image + thresh = np.partition(scores, kth)[kth] + for j in range(1, self.opt.num_classes + 1): + keep_inds = (results[j][:, 4] >= thresh) + results[j] = results[j][keep_inds] + return results + + def update(self, im_blob, img0): + self.frame_id += 1 + activated_starcks = [] + refind_stracks = [] + lost_stracks = [] + removed_stracks = [] + + width = img0.shape[1] + height = img0.shape[0] + inp_height = im_blob.shape[2] + inp_width = im_blob.shape[3] + c = np.array([width / 2., height / 2.], dtype=np.float32) + s = max(float(inp_width) / float(inp_height) * height, width) * 1.0 + meta = {'c': c, 's': s, + 'out_height': inp_height // self.opt.down_ratio, + 'out_width': inp_width // self.opt.down_ratio} + + ''' Step 1: Network forward, get detections & embeddings''' + with torch.no_grad(): + output = self.model(im_blob)[-1] + hm = output['hm'].sigmoid_() + wh = output['wh'] + + reg = output['reg'] if self.opt.reg_offset else None + dets, inds = mot_decode(hm, wh, reg=reg, ltrb=self.opt.ltrb, K=self.opt.K) + + dets = self.post_process(dets, meta) + dets = self.merge_outputs([dets])[1] + + remain_inds = dets[:, 4] > self.opt.conf_thres + inds_low = dets[:, 4] > 0.2 + inds_high = dets[:, 4] < self.opt.conf_thres + inds_second = np.logical_and(inds_low, inds_high) + dets_second = dets[inds_second] + dets = dets[remain_inds] + + if len(dets) > 0: + '''Detections''' + detections = [STrack(STrack.tlbr_to_tlwh(tlbrs[:4]), tlbrs[4]) for + tlbrs in dets[:, :5]] + else: + detections = [] + + ''' Add newly detected tracklets to tracked_stracks''' + unconfirmed = [] + tracked_stracks = [] # type: list[STrack] + for track in self.tracked_stracks: + if not track.is_activated: + unconfirmed.append(track) + else: + tracked_stracks.append(track) + + ''' Step 2: First association, with IOU''' + strack_pool = joint_stracks(tracked_stracks, self.lost_stracks) + # Predict the current location with KF + STrack.multi_predict(strack_pool) + dists = matching.iou_distance(strack_pool, detections) + matches, u_track, u_detection = matching.linear_assignment(dists, thresh=self.opt.match_thres) + + for itracked, idet in matches: + track = strack_pool[itracked] + det = detections[idet] + if track.state == TrackState.Tracked: + track.update(detections[idet], self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + # association the untrack to the low score detections + if len(dets_second) > 0: + '''Detections''' + detections_second = [STrack(STrack.tlbr_to_tlwh(tlbrs[:4]), tlbrs[4]) for + tlbrs in dets_second[:, :5]] + else: + detections_second = [] + r_tracked_stracks = [strack_pool[i] for i in u_track if strack_pool[i].state == TrackState.Tracked] + dists = matching.iou_distance(r_tracked_stracks, detections_second) + matches, u_track, u_detection_second = matching.linear_assignment(dists, thresh=0.4) + for itracked, idet in matches: + track = r_tracked_stracks[itracked] + det = detections_second[idet] + if track.state == TrackState.Tracked: + track.update(det, self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + for it in u_track: + track = r_tracked_stracks[it] + if not track.state == TrackState.Lost: + track.mark_lost() + lost_stracks.append(track) + + '''Deal with unconfirmed tracks, usually tracks with only one beginning frame''' + detections = [detections[i] for i in u_detection] + dists = matching.iou_distance(unconfirmed, detections) + matches, u_unconfirmed, u_detection = matching.linear_assignment(dists, thresh=0.7) + for itracked, idet in matches: + unconfirmed[itracked].update(detections[idet], self.frame_id) + activated_starcks.append(unconfirmed[itracked]) + for it in u_unconfirmed: + track = unconfirmed[it] + track.mark_removed() + removed_stracks.append(track) + + """ Step 4: Init new stracks""" + for inew in u_detection: + track = detections[inew] + if track.score < self.det_thresh: + continue + track.activate(self.kalman_filter, self.frame_id) + activated_starcks.append(track) + """ Step 5: Update state""" + for track in self.lost_stracks: + if self.frame_id - track.end_frame > self.max_time_lost: + track.mark_removed() + removed_stracks.append(track) + + # print('Ramained match {} s'.format(t4-t3)) + + self.tracked_stracks = [t for t in self.tracked_stracks if t.state == TrackState.Tracked] + self.tracked_stracks = joint_stracks(self.tracked_stracks, activated_starcks) + self.tracked_stracks = joint_stracks(self.tracked_stracks, refind_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.tracked_stracks) + self.lost_stracks.extend(lost_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.removed_stracks) + self.removed_stracks.extend(removed_stracks) + self.tracked_stracks, self.lost_stracks = remove_duplicate_stracks(self.tracked_stracks, self.lost_stracks) + #self.tracked_stracks = remove_fp_stracks(self.tracked_stracks) + # get scores of lost tracks + output_stracks = [track for track in self.tracked_stracks if track.is_activated] + + logger.debug('===========Frame {}=========='.format(self.frame_id)) + logger.debug('Activated: {}'.format([track.track_id for track in activated_starcks])) + logger.debug('Refind: {}'.format([track.track_id for track in refind_stracks])) + logger.debug('Lost: {}'.format([track.track_id for track in lost_stracks])) + logger.debug('Removed: {}'.format([track.track_id for track in removed_stracks])) + + return output_stracks + + +def joint_stracks(tlista, tlistb): + exists = {} + res = [] + for t in tlista: + exists[t.track_id] = 1 + res.append(t) + for t in tlistb: + tid = t.track_id + if not exists.get(tid, 0): + exists[tid] = 1 + res.append(t) + return res + + +def sub_stracks(tlista, tlistb): + stracks = {} + for t in tlista: + stracks[t.track_id] = t + for t in tlistb: + tid = t.track_id + if stracks.get(tid, 0): + del stracks[tid] + return list(stracks.values()) + + +def remove_duplicate_stracks(stracksa, stracksb): + pdist = matching.iou_distance(stracksa, stracksb) + pairs = np.where(pdist < 0.15) + dupa, dupb = list(), list() + for p, q in zip(*pairs): + timep = stracksa[p].frame_id - stracksa[p].start_frame + timeq = stracksb[q].frame_id - stracksb[q].start_frame + if timep > timeq: + dupb.append(q) + else: + dupa.append(p) + resa = [t for i, t in enumerate(stracksa) if not i in dupa] + resb = [t for i, t in enumerate(stracksb) if not i in dupb] + return resa, resb + + +def remove_fp_stracks(stracksa, n_frame=10): + remain = [] + for t in stracksa: + score_5 = t.score_list[-n_frame:] + score_5 = np.array(score_5, dtype=np.float32) + index = score_5 < 0.45 + num = np.sum(index) + if num < n_frame: + remain.append(t) + return remain diff --git a/tracking/docker-build-context/byte_track/tutorials/fairmot/tracker.py b/tracking/docker-build-context/byte_track/tutorials/fairmot/tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..b3af90ee066585c846735914cd7bb50ede767e2d --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/fairmot/tracker.py @@ -0,0 +1,465 @@ +import numpy as np +from collections import deque +import itertools +import os +import os.path as osp +import time +import torch +import cv2 +import torch.nn.functional as F + +from models.model import create_model, load_model +from models.decode import mot_decode +from tracking_utils.utils import * +from tracking_utils.log import logger +from tracking_utils.kalman_filter import KalmanFilter +from models import * +from tracker import matching +from .basetrack import BaseTrack, TrackState +from utils.post_process import ctdet_post_process +from utils.image import get_affine_transform +from models.utils import _tranpose_and_gather_feat + +class STrack(BaseTrack): + shared_kalman = KalmanFilter() + def __init__(self, tlwh, score, temp_feat, buffer_size=30): + + # wait activate + self._tlwh = np.asarray(tlwh, dtype=np.float) + self.kalman_filter = None + self.mean, self.covariance = None, None + self.is_activated = False + + self.score = score + self.score_list = [] + self.tracklet_len = 0 + + self.smooth_feat = None + self.update_features(temp_feat) + self.features = deque([], maxlen=buffer_size) + self.alpha = 0.9 + + def update_features(self, feat): + feat /= np.linalg.norm(feat) + self.curr_feat = feat + if self.smooth_feat is None: + self.smooth_feat = feat + else: + self.smooth_feat = self.alpha * self.smooth_feat + (1 - self.alpha) * feat + self.features.append(feat) + self.smooth_feat /= np.linalg.norm(self.smooth_feat) + + def predict(self): + mean_state = self.mean.copy() + if self.state != TrackState.Tracked: + mean_state[7] = 0 + self.mean, self.covariance = self.kalman_filter.predict(mean_state, self.covariance) + + @staticmethod + def multi_predict(stracks): + if len(stracks) > 0: + multi_mean = np.asarray([st.mean.copy() for st in stracks]) + multi_covariance = np.asarray([st.covariance for st in stracks]) + for i, st in enumerate(stracks): + if st.state != TrackState.Tracked: + multi_mean[i][7] = 0 + multi_mean, multi_covariance = STrack.shared_kalman.multi_predict(multi_mean, multi_covariance) + for i, (mean, cov) in enumerate(zip(multi_mean, multi_covariance)): + stracks[i].mean = mean + stracks[i].covariance = cov + + def activate(self, kalman_filter, frame_id): + """Start a new tracklet""" + self.kalman_filter = kalman_filter + self.track_id = self.next_id() + self.mean, self.covariance = self.kalman_filter.initiate(self.tlwh_to_xyah(self._tlwh)) + + self.tracklet_len = 0 + self.state = TrackState.Tracked + if frame_id == 1: + self.is_activated = True + #self.is_activated = True + self.frame_id = frame_id + self.start_frame = frame_id + self.score_list.append(self.score) + + def re_activate(self, new_track, frame_id, new_id=False): + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_track.tlwh) + ) + + self.update_features(new_track.curr_feat) + self.tracklet_len = 0 + self.state = TrackState.Tracked + self.is_activated = True + self.frame_id = frame_id + if new_id: + self.track_id = self.next_id() + self.score = new_track.score + self.score_list.append(self.score) + + def update(self, new_track, frame_id, update_feature=True): + """ + Update a matched track + :type new_track: STrack + :type frame_id: int + :type update_feature: bool + :return: + """ + self.frame_id = frame_id + self.tracklet_len += 1 + + new_tlwh = new_track.tlwh + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_tlwh)) + self.state = TrackState.Tracked + self.is_activated = True + + self.score = new_track.score + self.score_list.append(self.score) + if update_feature: + self.update_features(new_track.curr_feat) + + @property + # @jit(nopython=True) + def tlwh(self): + """Get current position in bounding box format `(top left x, top left y, + width, height)`. + """ + if self.mean is None: + return self._tlwh.copy() + ret = self.mean[:4].copy() + ret[2] *= ret[3] + ret[:2] -= ret[2:] / 2 + return ret + + @property + # @jit(nopython=True) + def tlbr(self): + """Convert bounding box to format `(min x, min y, max x, max y)`, i.e., + `(top left, bottom right)`. + """ + ret = self.tlwh.copy() + ret[2:] += ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_xyah(tlwh): + """Convert bounding box to format `(center x, center y, aspect ratio, + height)`, where the aspect ratio is `width / height`. + """ + ret = np.asarray(tlwh).copy() + ret[:2] += ret[2:] / 2 + ret[2] /= ret[3] + return ret + + def to_xyah(self): + return self.tlwh_to_xyah(self.tlwh) + + @staticmethod + # @jit(nopython=True) + def tlbr_to_tlwh(tlbr): + ret = np.asarray(tlbr).copy() + ret[2:] -= ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_tlbr(tlwh): + ret = np.asarray(tlwh).copy() + ret[2:] += ret[:2] + return ret + + def __repr__(self): + return 'OT_{}_({}-{})'.format(self.track_id, self.start_frame, self.end_frame) + + +class JDETracker(object): + def __init__(self, opt, frame_rate=30): + self.opt = opt + if opt.gpus[0] >= 0: + opt.device = torch.device('cuda') + else: + opt.device = torch.device('cpu') + print('Creating model...') + self.model = create_model(opt.arch, opt.heads, opt.head_conv) + self.model = load_model(self.model, opt.load_model) + self.model = self.model.to(opt.device) + self.model.eval() + + self.tracked_stracks = [] # type: list[STrack] + self.lost_stracks = [] # type: list[STrack] + self.removed_stracks = [] # type: list[STrack] + + self.frame_id = 0 + #self.det_thresh = opt.conf_thres + self.det_thresh = opt.conf_thres + 0.1 + self.buffer_size = int(frame_rate / 30.0 * opt.track_buffer) + self.max_time_lost = self.buffer_size + self.max_per_image = opt.K + self.mean = np.array(opt.mean, dtype=np.float32).reshape(1, 1, 3) + self.std = np.array(opt.std, dtype=np.float32).reshape(1, 1, 3) + + self.kalman_filter = KalmanFilter() + + def post_process(self, dets, meta): + dets = dets.detach().cpu().numpy() + dets = dets.reshape(1, -1, dets.shape[2]) + dets = ctdet_post_process( + dets.copy(), [meta['c']], [meta['s']], + meta['out_height'], meta['out_width'], self.opt.num_classes) + for j in range(1, self.opt.num_classes + 1): + dets[0][j] = np.array(dets[0][j], dtype=np.float32).reshape(-1, 5) + return dets[0] + + def merge_outputs(self, detections): + results = {} + for j in range(1, self.opt.num_classes + 1): + results[j] = np.concatenate( + [detection[j] for detection in detections], axis=0).astype(np.float32) + + scores = np.hstack( + [results[j][:, 4] for j in range(1, self.opt.num_classes + 1)]) + if len(scores) > self.max_per_image: + kth = len(scores) - self.max_per_image + thresh = np.partition(scores, kth)[kth] + for j in range(1, self.opt.num_classes + 1): + keep_inds = (results[j][:, 4] >= thresh) + results[j] = results[j][keep_inds] + return results + + def update(self, im_blob, img0): + self.frame_id += 1 + activated_starcks = [] + refind_stracks = [] + lost_stracks = [] + removed_stracks = [] + + width = img0.shape[1] + height = img0.shape[0] + inp_height = im_blob.shape[2] + inp_width = im_blob.shape[3] + c = np.array([width / 2., height / 2.], dtype=np.float32) + s = max(float(inp_width) / float(inp_height) * height, width) * 1.0 + meta = {'c': c, 's': s, + 'out_height': inp_height // self.opt.down_ratio, + 'out_width': inp_width // self.opt.down_ratio} + + ''' Step 1: Network forward, get detections & embeddings''' + with torch.no_grad(): + output = self.model(im_blob)[-1] + hm = output['hm'].sigmoid_() + wh = output['wh'] + id_feature = output['id'] + id_feature = F.normalize(id_feature, dim=1) + + reg = output['reg'] if self.opt.reg_offset else None + dets, inds = mot_decode(hm, wh, reg=reg, ltrb=self.opt.ltrb, K=self.opt.K) + id_feature = _tranpose_and_gather_feat(id_feature, inds) + id_feature = id_feature.squeeze(0) + id_feature = id_feature.cpu().numpy() + + dets = self.post_process(dets, meta) + dets = self.merge_outputs([dets])[1] + + remain_inds = dets[:, 4] > self.opt.conf_thres + inds_low = dets[:, 4] > 0.2 + #inds_low = dets[:, 4] > self.opt.conf_thres + inds_high = dets[:, 4] < self.opt.conf_thres + inds_second = np.logical_and(inds_low, inds_high) + dets_second = dets[inds_second] + id_feature_second = id_feature[inds_second] + dets = dets[remain_inds] + id_feature = id_feature[remain_inds] + + # vis + ''' + for i in range(0, dets.shape[0]): + bbox = dets[i][0:4] + cv2.rectangle(img0, (bbox[0], bbox[1]), + (bbox[2], bbox[3]), + (0, 255, 0), 2) + cv2.imshow('dets', img0) + cv2.waitKey(0) + id0 = id0-1 + ''' + + if len(dets) > 0: + '''Detections''' + detections = [STrack(STrack.tlbr_to_tlwh(tlbrs[:4]), tlbrs[4], f, 30) for + (tlbrs, f) in zip(dets[:, :5], id_feature)] + else: + detections = [] + + ''' Add newly detected tracklets to tracked_stracks''' + unconfirmed = [] + tracked_stracks = [] # type: list[STrack] + for track in self.tracked_stracks: + if not track.is_activated: + unconfirmed.append(track) + else: + tracked_stracks.append(track) + + ''' Step 2: First association, with embedding''' + strack_pool = joint_stracks(tracked_stracks, self.lost_stracks) + # Predict the current location with KF + STrack.multi_predict(strack_pool) + dists = matching.embedding_distance(strack_pool, detections) + #dists = matching.fuse_iou(dists, strack_pool, detections) + #dists = matching.iou_distance(strack_pool, detections) + dists = matching.fuse_motion(self.kalman_filter, dists, strack_pool, detections) + matches, u_track, u_detection = matching.linear_assignment(dists, thresh=self.opt.match_thres) + + for itracked, idet in matches: + track = strack_pool[itracked] + det = detections[idet] + if track.state == TrackState.Tracked: + track.update(detections[idet], self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + ''' Step 3: Second association, with IOU''' + detections = [detections[i] for i in u_detection] + r_tracked_stracks = [strack_pool[i] for i in u_track if strack_pool[i].state == TrackState.Tracked] + dists = matching.iou_distance(r_tracked_stracks, detections) + matches, u_track, u_detection = matching.linear_assignment(dists, thresh=0.5) + + for itracked, idet in matches: + track = r_tracked_stracks[itracked] + det = detections[idet] + if track.state == TrackState.Tracked: + track.update(det, self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + # association the untrack to the low score detections + if len(dets_second) > 0: + '''Detections''' + detections_second = [STrack(STrack.tlbr_to_tlwh(tlbrs[:4]), tlbrs[4], f, 30) for + (tlbrs, f) in zip(dets_second[:, :5], id_feature_second)] + else: + detections_second = [] + second_tracked_stracks = [r_tracked_stracks[i] for i in u_track if r_tracked_stracks[i].state == TrackState.Tracked] + dists = matching.iou_distance(second_tracked_stracks, detections_second) + matches, u_track, u_detection_second = matching.linear_assignment(dists, thresh=0.4) + for itracked, idet in matches: + track = second_tracked_stracks[itracked] + det = detections_second[idet] + if track.state == TrackState.Tracked: + track.update(det, self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + for it in u_track: + #track = r_tracked_stracks[it] + track = second_tracked_stracks[it] + if not track.state == TrackState.Lost: + track.mark_lost() + lost_stracks.append(track) + + '''Deal with unconfirmed tracks, usually tracks with only one beginning frame''' + detections = [detections[i] for i in u_detection] + dists = matching.iou_distance(unconfirmed, detections) + matches, u_unconfirmed, u_detection = matching.linear_assignment(dists, thresh=0.7) + for itracked, idet in matches: + unconfirmed[itracked].update(detections[idet], self.frame_id) + activated_starcks.append(unconfirmed[itracked]) + for it in u_unconfirmed: + track = unconfirmed[it] + track.mark_removed() + removed_stracks.append(track) + + """ Step 4: Init new stracks""" + for inew in u_detection: + track = detections[inew] + if track.score < self.det_thresh: + continue + track.activate(self.kalman_filter, self.frame_id) + activated_starcks.append(track) + """ Step 5: Update state""" + for track in self.lost_stracks: + if self.frame_id - track.end_frame > self.max_time_lost: + track.mark_removed() + removed_stracks.append(track) + + # print('Ramained match {} s'.format(t4-t3)) + + self.tracked_stracks = [t for t in self.tracked_stracks if t.state == TrackState.Tracked] + self.tracked_stracks = joint_stracks(self.tracked_stracks, activated_starcks) + self.tracked_stracks = joint_stracks(self.tracked_stracks, refind_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.tracked_stracks) + self.lost_stracks.extend(lost_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.removed_stracks) + self.removed_stracks.extend(removed_stracks) + self.tracked_stracks, self.lost_stracks = remove_duplicate_stracks(self.tracked_stracks, self.lost_stracks) + #self.tracked_stracks = remove_fp_stracks(self.tracked_stracks) + # get scores of lost tracks + output_stracks = [track for track in self.tracked_stracks if track.is_activated] + + logger.debug('===========Frame {}=========='.format(self.frame_id)) + logger.debug('Activated: {}'.format([track.track_id for track in activated_starcks])) + logger.debug('Refind: {}'.format([track.track_id for track in refind_stracks])) + logger.debug('Lost: {}'.format([track.track_id for track in lost_stracks])) + logger.debug('Removed: {}'.format([track.track_id for track in removed_stracks])) + + return output_stracks + + +def joint_stracks(tlista, tlistb): + exists = {} + res = [] + for t in tlista: + exists[t.track_id] = 1 + res.append(t) + for t in tlistb: + tid = t.track_id + if not exists.get(tid, 0): + exists[tid] = 1 + res.append(t) + return res + + +def sub_stracks(tlista, tlistb): + stracks = {} + for t in tlista: + stracks[t.track_id] = t + for t in tlistb: + tid = t.track_id + if stracks.get(tid, 0): + del stracks[tid] + return list(stracks.values()) + + +def remove_duplicate_stracks(stracksa, stracksb): + pdist = matching.iou_distance(stracksa, stracksb) + pairs = np.where(pdist < 0.15) + dupa, dupb = list(), list() + for p, q in zip(*pairs): + timep = stracksa[p].frame_id - stracksa[p].start_frame + timeq = stracksb[q].frame_id - stracksb[q].start_frame + if timep > timeq: + dupb.append(q) + else: + dupa.append(p) + resa = [t for i, t in enumerate(stracksa) if not i in dupa] + resb = [t for i, t in enumerate(stracksb) if not i in dupb] + return resa, resb + + +def remove_fp_stracks(stracksa, n_frame=10): + remain = [] + for t in stracksa: + score_5 = t.score_list[-n_frame:] + score_5 = np.array(score_5, dtype=np.float32) + index = score_5 < 0.45 + num = np.sum(index) + if num < n_frame: + remain.append(t) + return remain diff --git a/tracking/docker-build-context/byte_track/tutorials/jde/README.md b/tracking/docker-build-context/byte_track/tutorials/jde/README.md new file mode 100644 index 0000000000000000000000000000000000000000..cd915a2225a09b013b2c3ab55b5b2d7e19c66ec0 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/jde/README.md @@ -0,0 +1,19 @@ +# JDE + +Step1. git clone https://github.com/Zhongdao/Towards-Realtime-MOT.git + + +Step2. replace https://github.com/Zhongdao/Towards-Realtime-MOT/blob/master/tracker/multitracker.py + +Step3. download JDE model trained on MIX and MOT17_half (mix_mot17_half_jde.pt): [google](https://drive.google.com/file/d/1jUiIbaHFf75Jq6thOGI3CPygMMBy6850/view?usp=sharing), [baidu(code:ccdd)](https://pan.baidu.com/s/10se81ZktkUDUWn2dZzkk_Q) + +Step4. put track_half.py under https://github.com/Zhongdao/Towards-Realtime-MOT and run: +``` +python3 track_half.py --cfg ./cfg/yolov3_1088x608.cfg --weights weights/mix_mot17_half_jde.pt +``` + + +## Notes +byte_tracker: only motion + +tracker: motion + reid diff --git a/tracking/docker-build-context/byte_track/tutorials/jde/byte_tracker.py b/tracking/docker-build-context/byte_track/tutorials/jde/byte_tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..63baccebb1e7bd710d863984426bb94c2770d95d --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/jde/byte_tracker.py @@ -0,0 +1,369 @@ +from collections import deque +import torch +import numpy as np +from utils.kalman_filter import KalmanFilter +from utils.log import logger +from models import * +from tracker import matching +from .basetrack import BaseTrack, TrackState + + +class STrack(BaseTrack): + + def __init__(self, tlwh, score): + + # wait activate + self._tlwh = np.asarray(tlwh, dtype=np.float) + self.kalman_filter = None + self.mean, self.covariance = None, None + self.is_activated = False + + self.score = score + self.tracklet_len = 0 + + def predict(self): + mean_state = self.mean.copy() + if self.state != TrackState.Tracked: + mean_state[7] = 0 + self.mean, self.covariance = self.kalman_filter.predict(mean_state, self.covariance) + + @staticmethod + def multi_predict(stracks, kalman_filter): + if len(stracks) > 0: + multi_mean = np.asarray([st.mean.copy() for st in stracks]) + multi_covariance = np.asarray([st.covariance for st in stracks]) + for i, st in enumerate(stracks): + if st.state != TrackState.Tracked: + multi_mean[i][7] = 0 +# multi_mean, multi_covariance = STrack.kalman_filter.multi_predict(multi_mean, multi_covariance) + multi_mean, multi_covariance = kalman_filter.multi_predict(multi_mean, multi_covariance) + for i, (mean, cov) in enumerate(zip(multi_mean, multi_covariance)): + stracks[i].mean = mean + stracks[i].covariance = cov + + def activate(self, kalman_filter, frame_id): + """Start a new tracklet""" + self.kalman_filter = kalman_filter + self.track_id = self.next_id() + self.mean, self.covariance = self.kalman_filter.initiate(self.tlwh_to_xyah(self._tlwh)) + + self.tracklet_len = 0 + self.state = TrackState.Tracked + #self.is_activated = True + self.frame_id = frame_id + self.start_frame = frame_id + + def re_activate(self, new_track, frame_id, new_id=False): + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_track.tlwh) + ) + + self.tracklet_len = 0 + self.state = TrackState.Tracked + self.is_activated = True + self.frame_id = frame_id + if new_id: + self.track_id = self.next_id() + + def update(self, new_track, frame_id, update_feature=True): + """ + Update a matched track + :type new_track: STrack + :type frame_id: int + :type update_feature: bool + :return: + """ + self.frame_id = frame_id + self.tracklet_len += 1 + + new_tlwh = new_track.tlwh + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_tlwh)) + self.state = TrackState.Tracked + self.is_activated = True + + self.score = new_track.score + + @property + def tlwh(self): + """Get current position in bounding box format `(top left x, top left y, + width, height)`. + """ + if self.mean is None: + return self._tlwh.copy() + ret = self.mean[:4].copy() + ret[2] *= ret[3] + ret[:2] -= ret[2:] / 2 + return ret + + @property + def tlbr(self): + """Convert bounding box to format `(min x, min y, max x, max y)`, i.e., + `(top left, bottom right)`. + """ + ret = self.tlwh.copy() + ret[2:] += ret[:2] + return ret + + @staticmethod + def tlwh_to_xyah(tlwh): + """Convert bounding box to format `(center x, center y, aspect ratio, + height)`, where the aspect ratio is `width / height`. + """ + ret = np.asarray(tlwh).copy() + ret[:2] += ret[2:] / 2 + ret[2] /= ret[3] + return ret + + def to_xyah(self): + return self.tlwh_to_xyah(self.tlwh) + + @staticmethod + def tlbr_to_tlwh(tlbr): + ret = np.asarray(tlbr).copy() + ret[2:] -= ret[:2] + return ret + + @staticmethod + def tlwh_to_tlbr(tlwh): + ret = np.asarray(tlwh).copy() + ret[2:] += ret[:2] + return ret + + def __repr__(self): + return 'OT_{}_({}-{})'.format(self.track_id, self.start_frame, self.end_frame) + + +class BYTETracker(object): + def __init__(self, opt, frame_rate=30): + self.opt = opt + self.model = Darknet(opt.cfg, nID=14455) + # load_darknet_weights(self.model, opt.weights) + self.model.load_state_dict(torch.load(opt.weights, map_location='cpu')['model'], strict=False) + self.model.cuda().eval() + + self.tracked_stracks = [] # type: list[STrack] + self.lost_stracks = [] # type: list[STrack] + self.removed_stracks = [] # type: list[STrack] + + self.frame_id = 0 + self.det_thresh = opt.conf_thres + self.init_thresh = self.det_thresh + 0.2 + self.low_thresh = 0.3 + self.buffer_size = int(frame_rate / 30.0 * opt.track_buffer) + self.max_time_lost = self.buffer_size + + self.kalman_filter = KalmanFilter() + + def update(self, im_blob, img0): + """ + Processes the image frame and finds bounding box(detections). + + Associates the detection with corresponding tracklets and also handles lost, removed, refound and active tracklets + + Parameters + ---------- + im_blob : torch.float32 + Tensor of shape depending upon the size of image. By default, shape of this tensor is [1, 3, 608, 1088] + + img0 : ndarray + ndarray of shape depending on the input image sequence. By default, shape is [608, 1080, 3] + + Returns + ------- + output_stracks : list of Strack(instances) + The list contains information regarding the online_tracklets for the recieved image tensor. + + """ + + self.frame_id += 1 + activated_starcks = [] # for storing active tracks, for the current frame + refind_stracks = [] # Lost Tracks whose detections are obtained in the current frame + lost_stracks = [] # The tracks which are not obtained in the current frame but are not removed.(Lost for some time lesser than the threshold for removing) + removed_stracks = [] + + t1 = time.time() + ''' Step 1: Network forward, get detections & embeddings''' + with torch.no_grad(): + pred = self.model(im_blob) + # pred is tensor of all the proposals (default number of proposals: 54264). Proposals have information associated with the bounding box and embeddings + pred = pred[pred[:, :, 4] > self.low_thresh] + # pred now has lesser number of proposals. Proposals rejected on basis of object confidence score + if len(pred) > 0: + dets = non_max_suppression(pred.unsqueeze(0), self.low_thresh, self.opt.nms_thres)[0].cpu() + # Final proposals are obtained in dets. Information of bounding box and embeddings also included + # Next step changes the detection scales + scale_coords(self.opt.img_size, dets[:, :4], img0.shape).round() + '''Detections is list of (x1, y1, x2, y2, object_conf, class_score, class_pred)''' + # class_pred is the embeddings. + + dets = dets.numpy() + remain_inds = dets[:, 4] > self.det_thresh + inds_low = dets[:, 4] > self.low_thresh + inds_high = dets[:, 4] < self.det_thresh + inds_second = np.logical_and(inds_low, inds_high) + dets_second = dets[inds_second] + dets = dets[remain_inds] + + detections = [STrack(STrack.tlbr_to_tlwh(tlbrs[:4]), tlbrs[4]) for + tlbrs in dets[:, :5]] + else: + detections = [] + dets_second = [] + + t2 = time.time() + # print('Forward: {} s'.format(t2-t1)) + + ''' Add newly detected tracklets to tracked_stracks''' + unconfirmed = [] + tracked_stracks = [] # type: list[STrack] + for track in self.tracked_stracks: + if not track.is_activated: + # previous tracks which are not active in the current frame are added in unconfirmed list + unconfirmed.append(track) + # print("Should not be here, in unconfirmed") + else: + # Active tracks are added to the local list 'tracked_stracks' + tracked_stracks.append(track) + + ''' Step 2: First association, with embedding''' + # Combining currently tracked_stracks and lost_stracks + strack_pool = joint_stracks(tracked_stracks, self.lost_stracks) + # Predict the current location with KF + STrack.multi_predict(strack_pool, self.kalman_filter) + dists = matching.iou_distance(strack_pool, detections) + # The dists is the list of distances of the detection with the tracks in strack_pool + matches, u_track, u_detection = matching.linear_assignment(dists, thresh=0.8) + # The matches is the array for corresponding matches of the detection with the corresponding strack_pool + + for itracked, idet in matches: + # itracked is the id of the track and idet is the detection + track = strack_pool[itracked] + det = detections[idet] + if track.state == TrackState.Tracked: + # If the track is active, add the detection to the track + track.update(detections[idet], self.frame_id) + activated_starcks.append(track) + else: + # We have obtained a detection from a track which is not active, hence put the track in refind_stracks list + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + # association the untrack to the low score detections + if len(dets_second) > 0: + detections_second = [STrack(STrack.tlbr_to_tlwh(tlbrs[:4]), tlbrs[4]) for + tlbrs in dets_second[:, :5]] + else: + detections_second = [] + r_tracked_stracks = [strack_pool[i] for i in u_track if strack_pool[i].state == TrackState.Tracked] + dists = matching.iou_distance(r_tracked_stracks, detections_second) + matches, u_track, u_detection_second = matching.linear_assignment(dists, thresh=0.4) + for itracked, idet in matches: + track = r_tracked_stracks[itracked] + det = detections_second[idet] + if track.state == TrackState.Tracked: + track.update(det, self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + for it in u_track: + track = r_tracked_stracks[it] + if not track.state == TrackState.Lost: + track.mark_lost() + lost_stracks.append(track) + # If no detections are obtained for tracks (u_track), the tracks are added to lost_tracks list and are marked lost + + '''Deal with unconfirmed tracks, usually tracks with only one beginning frame''' + detections = [detections[i] for i in u_detection] + dists = matching.iou_distance(unconfirmed, detections) + matches, u_unconfirmed, u_detection = matching.linear_assignment(dists, thresh=0.7) + for itracked, idet in matches: + unconfirmed[itracked].update(detections[idet], self.frame_id) + activated_starcks.append(unconfirmed[itracked]) + + # The tracks which are yet not matched + for it in u_unconfirmed: + track = unconfirmed[it] + track.mark_removed() + removed_stracks.append(track) + + # after all these confirmation steps, if a new detection is found, it is initialized for a new track + """ Step 4: Init new stracks""" + for inew in u_detection: + track = detections[inew] + if track.score < self.init_thresh: + continue + track.activate(self.kalman_filter, self.frame_id) + activated_starcks.append(track) + + """ Step 5: Update state""" + # If the tracks are lost for more frames than the threshold number, the tracks are removed. + for track in self.lost_stracks: + if self.frame_id - track.end_frame > self.max_time_lost: + track.mark_removed() + removed_stracks.append(track) + # print('Remained match {} s'.format(t4-t3)) + + # Update the self.tracked_stracks and self.lost_stracks using the updates in this step. + self.tracked_stracks = [t for t in self.tracked_stracks if t.state == TrackState.Tracked] + self.tracked_stracks = joint_stracks(self.tracked_stracks, activated_starcks) + self.tracked_stracks = joint_stracks(self.tracked_stracks, refind_stracks) + # self.lost_stracks = [t for t in self.lost_stracks if t.state == TrackState.Lost] # type: list[STrack] + self.lost_stracks = sub_stracks(self.lost_stracks, self.tracked_stracks) + self.lost_stracks.extend(lost_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.removed_stracks) + self.removed_stracks.extend(removed_stracks) + self.tracked_stracks, self.lost_stracks = remove_duplicate_stracks(self.tracked_stracks, self.lost_stracks) + + # get scores of lost tracks + output_stracks = [track for track in self.tracked_stracks if track.is_activated] + + logger.debug('===========Frame {}=========='.format(self.frame_id)) + logger.debug('Activated: {}'.format([track.track_id for track in activated_starcks])) + logger.debug('Refind: {}'.format([track.track_id for track in refind_stracks])) + logger.debug('Lost: {}'.format([track.track_id for track in lost_stracks])) + logger.debug('Removed: {}'.format([track.track_id for track in removed_stracks])) + # print('Final {} s'.format(t5-t4)) + return output_stracks + +def joint_stracks(tlista, tlistb): + exists = {} + res = [] + for t in tlista: + exists[t.track_id] = 1 + res.append(t) + for t in tlistb: + tid = t.track_id + if not exists.get(tid, 0): + exists[tid] = 1 + res.append(t) + return res + +def sub_stracks(tlista, tlistb): + stracks = {} + for t in tlista: + stracks[t.track_id] = t + for t in tlistb: + tid = t.track_id + if stracks.get(tid, 0): + del stracks[tid] + return list(stracks.values()) + +def remove_duplicate_stracks(stracksa, stracksb): + pdist = matching.iou_distance(stracksa, stracksb) + pairs = np.where(pdist<0.15) + dupa, dupb = list(), list() + for p,q in zip(*pairs): + timep = stracksa[p].frame_id - stracksa[p].start_frame + timeq = stracksb[q].frame_id - stracksb[q].start_frame + if timep > timeq: + dupb.append(q) + else: + dupa.append(p) + resa = [t for i,t in enumerate(stracksa) if not i in dupa] + resb = [t for i,t in enumerate(stracksb) if not i in dupb] + return resa, resb + + diff --git a/tracking/docker-build-context/byte_track/tutorials/jde/track_half.py b/tracking/docker-build-context/byte_track/tutorials/jde/track_half.py new file mode 100644 index 0000000000000000000000000000000000000000..55d6c13d6eca83359c88e22f261b95533ddb05f5 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/jde/track_half.py @@ -0,0 +1,222 @@ +import os +import os.path as osp +import cv2 +import logging +import argparse +import motmetrics as mm + +import torch +#from tracker.multitracker import JDETracker +from tracker.byte_tracker import BYTETracker +from utils import visualization as vis +from utils.log import logger +from utils.timer import Timer +from utils.evaluation import Evaluator +from utils.parse_config import parse_model_cfg +import utils.datasets as datasets +from utils.utils import * + + +def write_results(filename, results, data_type): + if data_type == 'mot': + save_format = '{frame},{id},{x1},{y1},{w},{h},1,-1,-1,-1\n' + elif data_type == 'kitti': + save_format = '{frame} {id} pedestrian 0 0 -10 {x1} {y1} {x2} {y2} -10 -10 -10 -1000 -1000 -1000 -10\n' + else: + raise ValueError(data_type) + + with open(filename, 'w') as f: + for frame_id, tlwhs, track_ids in results: + if data_type == 'kitti': + frame_id -= 1 + for tlwh, track_id in zip(tlwhs, track_ids): + if track_id < 0: + continue + x1, y1, w, h = tlwh + x2, y2 = x1 + w, y1 + h + line = save_format.format(frame=frame_id, id=track_id, x1=x1, y1=y1, x2=x2, y2=y2, w=w, h=h) + f.write(line) + logger.info('save results to {}'.format(filename)) + + +def eval_seq(opt, dataloader, data_type, result_filename, save_dir=None, show_image=True, frame_rate=30): + ''' + Processes the video sequence given and provides the output of tracking result (write the results in video file) + + It uses JDE model for getting information about the online targets present. + + Parameters + ---------- + opt : Namespace + Contains information passed as commandline arguments. + + dataloader : LoadVideo + Instance of LoadVideo class used for fetching the image sequence and associated data. + + data_type : String + Type of dataset corresponding(similar) to the given video. + + result_filename : String + The name(path) of the file for storing results. + + save_dir : String + Path to the folder for storing the frames containing bounding box information (Result frames). + + show_image : bool + Option for shhowing individial frames during run-time. + + frame_rate : int + Frame-rate of the given video. + + Returns + ------- + (Returns are not significant here) + frame_id : int + Sequence number of the last sequence + ''' + + if save_dir: + mkdir_if_missing(save_dir) + tracker = BYTETracker(opt, frame_rate=frame_rate) + timer = Timer() + results = [] + len_all = len(dataloader) + start_frame = int(len_all / 2) + frame_id = int(len_all / 2) + for i, (path, img, img0) in enumerate(dataloader): + if i < start_frame: + continue + if frame_id % 20 == 0: + logger.info('Processing frame {} ({:.2f} fps)'.format(frame_id, 1./max(1e-5, timer.average_time))) + + # run tracking + timer.tic() + blob = torch.from_numpy(img).cuda().unsqueeze(0) + online_targets = tracker.update(blob, img0) + online_tlwhs = [] + online_ids = [] + for t in online_targets: + tlwh = t.tlwh + tid = t.track_id + vertical = tlwh[2] / tlwh[3] > 1.6 + if tlwh[2] * tlwh[3] > opt.min_box_area and not vertical: + online_tlwhs.append(tlwh) + online_ids.append(tid) + timer.toc() + # save results + results.append((frame_id + 1, online_tlwhs, online_ids)) + if show_image or save_dir is not None: + online_im = vis.plot_tracking(img0, online_tlwhs, online_ids, frame_id=frame_id, + fps=1. / timer.average_time) + if show_image: + cv2.imshow('online_im', online_im) + if save_dir is not None: + cv2.imwrite(os.path.join(save_dir, '{:05d}.jpg'.format(frame_id)), online_im) + frame_id += 1 + # save results + write_results(result_filename, results, data_type) + return frame_id, timer.average_time, timer.calls + + +def main(opt, data_root='/data/MOT16/train', det_root=None, seqs=('MOT16-05',), exp_name='demo', + save_images=False, save_videos=False, show_image=True): + logger.setLevel(logging.INFO) + result_root = os.path.join(data_root, '..', 'results', exp_name) + mkdir_if_missing(result_root) + data_type = 'mot' + + # Read config + cfg_dict = parse_model_cfg(opt.cfg) + opt.img_size = [int(cfg_dict[0]['width']), int(cfg_dict[0]['height'])] + + # run tracking + accs = [] + n_frame = 0 + timer_avgs, timer_calls = [], [] + for seq in seqs: + output_dir = os.path.join(data_root, '..','outputs', exp_name, seq) if save_images or save_videos else None + + logger.info('start seq: {}'.format(seq)) + dataloader = datasets.LoadImages(osp.join(data_root, seq, 'img1'), opt.img_size) + result_filename = os.path.join(result_root, '{}.txt'.format(seq)) + meta_info = open(os.path.join(data_root, seq, 'seqinfo.ini')).read() + frame_rate = int(meta_info[meta_info.find('frameRate')+10:meta_info.find('\nseqLength')]) + nf, ta, tc = eval_seq(opt, dataloader, data_type, result_filename, + save_dir=output_dir, show_image=show_image, frame_rate=frame_rate) + n_frame += nf + timer_avgs.append(ta) + timer_calls.append(tc) + + # eval + logger.info('Evaluate seq: {}'.format(seq)) + evaluator = Evaluator(data_root, seq, data_type) + accs.append(evaluator.eval_file(result_filename)) + if save_videos: + output_video_path = osp.join(output_dir, '{}.mp4'.format(seq)) + cmd_str = 'ffmpeg -f image2 -i {}/%05d.jpg -c:v copy {}'.format(output_dir, output_video_path) + os.system(cmd_str) + timer_avgs = np.asarray(timer_avgs) + timer_calls = np.asarray(timer_calls) + all_time = np.dot(timer_avgs, timer_calls) + avg_time = all_time / np.sum(timer_calls) + logger.info('Time elapsed: {:.2f} seconds, FPS: {:.2f}'.format(all_time, 1.0 / avg_time)) + + # get summary + metrics = mm.metrics.motchallenge_metrics + mh = mm.metrics.create() + summary = Evaluator.get_summary(accs, seqs, metrics) + strsummary = mm.io.render_summary( + summary, + formatters=mh.formatters, + namemap=mm.io.motchallenge_metric_names + ) + print(strsummary) + Evaluator.save_summary(summary, os.path.join(result_root, 'summary_{}.xlsx'.format(exp_name))) + + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(prog='track.py') + parser.add_argument('--cfg', type=str, default='cfg/yolov3_1088x608.cfg', help='cfg file path') + parser.add_argument('--weights', type=str, default='weights/latest.pt', help='path to weights file') + parser.add_argument('--iou-thres', type=float, default=0.5, help='iou threshold required to qualify as detected') + parser.add_argument('--conf-thres', type=float, default=0.7, help='object confidence threshold') + parser.add_argument('--nms-thres', type=float, default=0.4, help='iou threshold for non-maximum suppression') + parser.add_argument('--min-box-area', type=float, default=200, help='filter out tiny boxes') + parser.add_argument('--track-buffer', type=int, default=30, help='tracking buffer') + parser.add_argument('--test-mot16', action='store_true', help='tracking buffer') + parser.add_argument('--val-mot17', default=True, help='validation on MOT17') + parser.add_argument('--save-images', action='store_true', help='save tracking results (image)') + parser.add_argument('--save-videos', action='store_true', help='save tracking results (video)') + opt = parser.parse_args() + print(opt, end='\n\n') + + if not opt.test_mot16: + seqs_str = '''MOT17-02-SDP + MOT17-04-SDP + MOT17-05-SDP + MOT17-09-SDP + MOT17-10-SDP + MOT17-11-SDP + MOT17-13-SDP + ''' + #seqs_str = '''MOT17-02-SDP''' + data_root = '/opt/tiger/demo/datasets/MOT17/images/train' + else: + seqs_str = '''MOT16-01 + MOT16-03 + MOT16-06 + MOT16-07 + MOT16-08 + MOT16-12 + MOT16-14''' + data_root = '/home/wangzd/datasets/MOT/MOT16/images/test' + seqs = [seq.strip() for seq in seqs_str.split()] + + main(opt, + data_root=data_root, + seqs=seqs, + exp_name=opt.weights.split('/')[-2], + show_image=False, + save_images=opt.save_images, + save_videos=opt.save_videos) diff --git a/tracking/docker-build-context/byte_track/tutorials/jde/tracker.py b/tracking/docker-build-context/byte_track/tutorials/jde/tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..81b9653f94571a36e813b1ec938c42f9f0c01f67 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/jde/tracker.py @@ -0,0 +1,414 @@ + +from collections import deque +import torch +import numpy as np +from utils.kalman_filter import KalmanFilter +from utils.log import logger +from models import * +from tracker import matching +from .basetrack import BaseTrack, TrackState + + +class STrack(BaseTrack): + + def __init__(self, tlwh, score, temp_feat, buffer_size=30): + + # wait activate + self._tlwh = np.asarray(tlwh, dtype=np.float) + self.kalman_filter = None + self.mean, self.covariance = None, None + self.is_activated = False + + self.score = score + self.tracklet_len = 0 + + self.smooth_feat = None + self.update_features(temp_feat) + self.features = deque([], maxlen=buffer_size) + self.alpha = 0.9 + + def update_features(self, feat): + feat /= np.linalg.norm(feat) + self.curr_feat = feat + if self.smooth_feat is None: + self.smooth_feat = feat + else: + self.smooth_feat = self.alpha *self.smooth_feat + (1-self.alpha) * feat + self.features.append(feat) + self.smooth_feat /= np.linalg.norm(self.smooth_feat) + + def predict(self): + mean_state = self.mean.copy() + if self.state != TrackState.Tracked: + mean_state[7] = 0 + self.mean, self.covariance = self.kalman_filter.predict(mean_state, self.covariance) + + @staticmethod + def multi_predict(stracks, kalman_filter): + if len(stracks) > 0: + multi_mean = np.asarray([st.mean.copy() for st in stracks]) + multi_covariance = np.asarray([st.covariance for st in stracks]) + for i, st in enumerate(stracks): + if st.state != TrackState.Tracked: + multi_mean[i][7] = 0 +# multi_mean, multi_covariance = STrack.kalman_filter.multi_predict(multi_mean, multi_covariance) + multi_mean, multi_covariance = kalman_filter.multi_predict(multi_mean, multi_covariance) + for i, (mean, cov) in enumerate(zip(multi_mean, multi_covariance)): + stracks[i].mean = mean + stracks[i].covariance = cov + + def activate(self, kalman_filter, frame_id): + """Start a new tracklet""" + self.kalman_filter = kalman_filter + self.track_id = self.next_id() + self.mean, self.covariance = self.kalman_filter.initiate(self.tlwh_to_xyah(self._tlwh)) + + self.tracklet_len = 0 + self.state = TrackState.Tracked + #self.is_activated = True + self.frame_id = frame_id + self.start_frame = frame_id + + def re_activate(self, new_track, frame_id, new_id=False): + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_track.tlwh) + ) + + self.update_features(new_track.curr_feat) + self.tracklet_len = 0 + self.state = TrackState.Tracked + self.is_activated = True + self.frame_id = frame_id + if new_id: + self.track_id = self.next_id() + + def update(self, new_track, frame_id, update_feature=True): + """ + Update a matched track + :type new_track: STrack + :type frame_id: int + :type update_feature: bool + :return: + """ + self.frame_id = frame_id + self.tracklet_len += 1 + + new_tlwh = new_track.tlwh + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_tlwh)) + self.state = TrackState.Tracked + self.is_activated = True + + self.score = new_track.score + if update_feature: + self.update_features(new_track.curr_feat) + + @property + def tlwh(self): + """Get current position in bounding box format `(top left x, top left y, + width, height)`. + """ + if self.mean is None: + return self._tlwh.copy() + ret = self.mean[:4].copy() + ret[2] *= ret[3] + ret[:2] -= ret[2:] / 2 + return ret + + @property + def tlbr(self): + """Convert bounding box to format `(min x, min y, max x, max y)`, i.e., + `(top left, bottom right)`. + """ + ret = self.tlwh.copy() + ret[2:] += ret[:2] + return ret + + @staticmethod + def tlwh_to_xyah(tlwh): + """Convert bounding box to format `(center x, center y, aspect ratio, + height)`, where the aspect ratio is `width / height`. + """ + ret = np.asarray(tlwh).copy() + ret[:2] += ret[2:] / 2 + ret[2] /= ret[3] + return ret + + def to_xyah(self): + return self.tlwh_to_xyah(self.tlwh) + + @staticmethod + def tlbr_to_tlwh(tlbr): + ret = np.asarray(tlbr).copy() + ret[2:] -= ret[:2] + return ret + + @staticmethod + def tlwh_to_tlbr(tlwh): + ret = np.asarray(tlwh).copy() + ret[2:] += ret[:2] + return ret + + def __repr__(self): + return 'OT_{}_({}-{})'.format(self.track_id, self.start_frame, self.end_frame) + + +class JDETracker(object): + def __init__(self, opt, frame_rate=30): + self.opt = opt + self.model = Darknet(opt.cfg, nID=14455) + # load_darknet_weights(self.model, opt.weights) + self.model.load_state_dict(torch.load(opt.weights, map_location='cpu')['model'], strict=False) + self.model.cuda().eval() + + self.tracked_stracks = [] # type: list[STrack] + self.lost_stracks = [] # type: list[STrack] + self.removed_stracks = [] # type: list[STrack] + + self.frame_id = 0 + self.det_thresh = opt.conf_thres + self.init_thresh = self.det_thresh + 0.2 + self.low_thresh = 0.4 + self.buffer_size = int(frame_rate / 30.0 * opt.track_buffer) + self.max_time_lost = self.buffer_size + + self.kalman_filter = KalmanFilter() + + def update(self, im_blob, img0): + """ + Processes the image frame and finds bounding box(detections). + + Associates the detection with corresponding tracklets and also handles lost, removed, refound and active tracklets + + Parameters + ---------- + im_blob : torch.float32 + Tensor of shape depending upon the size of image. By default, shape of this tensor is [1, 3, 608, 1088] + + img0 : ndarray + ndarray of shape depending on the input image sequence. By default, shape is [608, 1080, 3] + + Returns + ------- + output_stracks : list of Strack(instances) + The list contains information regarding the online_tracklets for the recieved image tensor. + + """ + + self.frame_id += 1 + activated_starcks = [] # for storing active tracks, for the current frame + refind_stracks = [] # Lost Tracks whose detections are obtained in the current frame + lost_stracks = [] # The tracks which are not obtained in the current frame but are not removed.(Lost for some time lesser than the threshold for removing) + removed_stracks = [] + + t1 = time.time() + ''' Step 1: Network forward, get detections & embeddings''' + with torch.no_grad(): + pred = self.model(im_blob) + # pred is tensor of all the proposals (default number of proposals: 54264). Proposals have information associated with the bounding box and embeddings + pred = pred[pred[:, :, 4] > self.low_thresh] + # pred now has lesser number of proposals. Proposals rejected on basis of object confidence score + if len(pred) > 0: + dets = non_max_suppression(pred.unsqueeze(0), self.low_thresh, self.opt.nms_thres)[0].cpu() + # Final proposals are obtained in dets. Information of bounding box and embeddings also included + # Next step changes the detection scales + scale_coords(self.opt.img_size, dets[:, :4], img0.shape).round() + '''Detections is list of (x1, y1, x2, y2, object_conf, class_score, class_pred)''' + # class_pred is the embeddings. + + dets = dets.numpy() + remain_inds = dets[:, 4] > self.det_thresh + inds_low = dets[:, 4] > self.low_thresh + inds_high = dets[:, 4] < self.det_thresh + inds_second = np.logical_and(inds_low, inds_high) + dets_second = dets[inds_second] + dets = dets[remain_inds] + + detections = [STrack(STrack.tlbr_to_tlwh(tlbrs[:4]), tlbrs[4], f, 30) for + (tlbrs, f) in zip(dets[:, :5], dets[:, 6:])] + else: + detections = [] + dets_second = [] + + t2 = time.time() + # print('Forward: {} s'.format(t2-t1)) + + ''' Add newly detected tracklets to tracked_stracks''' + unconfirmed = [] + tracked_stracks = [] # type: list[STrack] + for track in self.tracked_stracks: + if not track.is_activated: + # previous tracks which are not active in the current frame are added in unconfirmed list + unconfirmed.append(track) + # print("Should not be here, in unconfirmed") + else: + # Active tracks are added to the local list 'tracked_stracks' + tracked_stracks.append(track) + + ''' Step 2: First association, with embedding''' + # Combining currently tracked_stracks and lost_stracks + strack_pool = joint_stracks(tracked_stracks, self.lost_stracks) + # Predict the current location with KF + STrack.multi_predict(strack_pool, self.kalman_filter) + + dists = matching.embedding_distance(strack_pool, detections) + dists = matching.fuse_motion(self.kalman_filter, dists, strack_pool, detections) + #dists = matching.iou_distance(strack_pool, detections) + # The dists is the list of distances of the detection with the tracks in strack_pool + matches, u_track, u_detection = matching.linear_assignment(dists, thresh=0.7) + # The matches is the array for corresponding matches of the detection with the corresponding strack_pool + + for itracked, idet in matches: + # itracked is the id of the track and idet is the detection + track = strack_pool[itracked] + det = detections[idet] + if track.state == TrackState.Tracked: + # If the track is active, add the detection to the track + track.update(detections[idet], self.frame_id) + activated_starcks.append(track) + else: + # We have obtained a detection from a track which is not active, hence put the track in refind_stracks list + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + # None of the steps below happen if there are no undetected tracks. + ''' Step 3: Second association, with IOU''' + detections = [detections[i] for i in u_detection] + # detections is now a list of the unmatched detections + r_tracked_stracks = [] # This is container for stracks which were tracked till the + # previous frame but no detection was found for it in the current frame + for i in u_track: + if strack_pool[i].state == TrackState.Tracked: + r_tracked_stracks.append(strack_pool[i]) + dists = matching.iou_distance(r_tracked_stracks, detections) + matches, u_track, u_detection = matching.linear_assignment(dists, thresh=0.5) + # matches is the list of detections which matched with corresponding tracks by IOU distance method + for itracked, idet in matches: + track = r_tracked_stracks[itracked] + det = detections[idet] + if track.state == TrackState.Tracked: + track.update(det, self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + # Same process done for some unmatched detections, but now considering IOU_distance as measure + + # association the untrack to the low score detections + if len(dets_second) > 0: + detections_second = [STrack(STrack.tlbr_to_tlwh(tlbrs[:4]), tlbrs[4], f, 30) for + (tlbrs, f) in zip(dets_second[:, :5], dets_second[:, 6:])] + else: + detections_second = [] + second_tracked_stracks = [r_tracked_stracks[i] for i in u_track if r_tracked_stracks[i].state == TrackState.Tracked] + dists = matching.iou_distance(second_tracked_stracks, detections_second) + matches, u_track, u_detection_second = matching.linear_assignment(dists, thresh=0.4) + for itracked, idet in matches: + track = second_tracked_stracks[itracked] + det = detections_second[idet] + if track.state == TrackState.Tracked: + track.update(det, self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + for it in u_track: + track = second_tracked_stracks[it] + if not track.state == TrackState.Lost: + track.mark_lost() + lost_stracks.append(track) + # If no detections are obtained for tracks (u_track), the tracks are added to lost_tracks list and are marked lost + + '''Deal with unconfirmed tracks, usually tracks with only one beginning frame''' + detections = [detections[i] for i in u_detection] + dists = matching.iou_distance(unconfirmed, detections) + matches, u_unconfirmed, u_detection = matching.linear_assignment(dists, thresh=0.7) + for itracked, idet in matches: + unconfirmed[itracked].update(detections[idet], self.frame_id) + activated_starcks.append(unconfirmed[itracked]) + + # The tracks which are yet not matched + for it in u_unconfirmed: + track = unconfirmed[it] + track.mark_removed() + removed_stracks.append(track) + + # after all these confirmation steps, if a new detection is found, it is initialized for a new track + """ Step 4: Init new stracks""" + for inew in u_detection: + track = detections[inew] + if track.score < self.init_thresh: + continue + track.activate(self.kalman_filter, self.frame_id) + activated_starcks.append(track) + + """ Step 5: Update state""" + # If the tracks are lost for more frames than the threshold number, the tracks are removed. + for track in self.lost_stracks: + if self.frame_id - track.end_frame > self.max_time_lost: + track.mark_removed() + removed_stracks.append(track) + # print('Remained match {} s'.format(t4-t3)) + + # Update the self.tracked_stracks and self.lost_stracks using the updates in this step. + self.tracked_stracks = [t for t in self.tracked_stracks if t.state == TrackState.Tracked] + self.tracked_stracks = joint_stracks(self.tracked_stracks, activated_starcks) + self.tracked_stracks = joint_stracks(self.tracked_stracks, refind_stracks) + # self.lost_stracks = [t for t in self.lost_stracks if t.state == TrackState.Lost] # type: list[STrack] + self.lost_stracks = sub_stracks(self.lost_stracks, self.tracked_stracks) + self.lost_stracks.extend(lost_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.removed_stracks) + self.removed_stracks.extend(removed_stracks) + self.tracked_stracks, self.lost_stracks = remove_duplicate_stracks(self.tracked_stracks, self.lost_stracks) + + # get scores of lost tracks + output_stracks = [track for track in self.tracked_stracks if track.is_activated] + + logger.debug('===========Frame {}=========='.format(self.frame_id)) + logger.debug('Activated: {}'.format([track.track_id for track in activated_starcks])) + logger.debug('Refind: {}'.format([track.track_id for track in refind_stracks])) + logger.debug('Lost: {}'.format([track.track_id for track in lost_stracks])) + logger.debug('Removed: {}'.format([track.track_id for track in removed_stracks])) + # print('Final {} s'.format(t5-t4)) + return output_stracks + +def joint_stracks(tlista, tlistb): + exists = {} + res = [] + for t in tlista: + exists[t.track_id] = 1 + res.append(t) + for t in tlistb: + tid = t.track_id + if not exists.get(tid, 0): + exists[tid] = 1 + res.append(t) + return res + +def sub_stracks(tlista, tlistb): + stracks = {} + for t in tlista: + stracks[t.track_id] = t + for t in tlistb: + tid = t.track_id + if stracks.get(tid, 0): + del stracks[tid] + return list(stracks.values()) + +def remove_duplicate_stracks(stracksa, stracksb): + pdist = matching.iou_distance(stracksa, stracksb) + pairs = np.where(pdist<0.15) + dupa, dupb = list(), list() + for p,q in zip(*pairs): + timep = stracksa[p].frame_id - stracksa[p].start_frame + timeq = stracksb[q].frame_id - stracksb[q].start_frame + if timep > timeq: + dupb.append(q) + else: + dupa.append(p) + resa = [t for i,t in enumerate(stracksa) if not i in dupa] + resb = [t for i,t in enumerate(stracksb) if not i in dupb] + return resa, resb + + diff --git a/tracking/docker-build-context/byte_track/tutorials/motr/README.md b/tracking/docker-build-context/byte_track/tutorials/motr/README.md new file mode 100644 index 0000000000000000000000000000000000000000..3fcc6ca471912eba104c258cc8a152f14673d813 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/motr/README.md @@ -0,0 +1,100 @@ +# MOTR + +Step1. + +git clone https://github.com/megvii-model/MOTR.git and install + +replace https://github.com/megvii-model/MOTR/blob/main/datasets/joint.py + +replace https://github.com/megvii-model/MOTR/blob/main/datasets/transforms.py + + +train + +``` +python3 -m torch.distributed.launch --nproc_per_node=8 \ + --use_env main.py \ + --meta_arch motr \ + --dataset_file e2e_joint \ + --epoch 50 \ + --with_box_refine \ + --lr_drop 40 \ + --lr 2e-4 \ + --lr_backbone 2e-5 \ + --pretrained coco_model_final.pth \ + --output_dir exps/e2e_motr_r50_mot17trainhalf \ + --batch_size 1 \ + --sample_mode 'random_interval' \ + --sample_interval 10 \ + --sampler_steps 10 20 30 \ + --sampler_lengths 2 3 4 5 \ + --update_query_pos \ + --merger_dropout 0 \ + --dropout 0 \ + --random_drop 0.1 \ + --fp_ratio 0.3 \ + --query_interaction_layer 'QIM' \ + --extra_track_attn \ + --mot_path . + --data_txt_path_train ./datasets/data_path/mot17.half \ + --data_txt_path_val ./datasets/data_path/mot17.val \ +``` +mot17.half and mot17.val are from https://github.com/ifzhang/FairMOT/tree/master/src/data + +You can also download the MOTR model trained by us: [google](https://drive.google.com/file/d/1pzGi53VooppQqhKf3TSxLK99LERsVyTw/view?usp=sharing), [baidu(code:t87h)](https://pan.baidu.com/s/1OrcR3L9Bf2xXIo8RQl3zyA) + + +Step2. + +replace https://github.com/megvii-model/MOTR/blob/main/util/evaluation.py + +replace https://github.com/megvii-model/MOTR/blob/main/eval.py + +replace https://github.com/megvii-model/MOTR/blob/main/models/motr.py + +add byte_tracker.py to https://github.com/megvii-model/MOTR + +add mot_online to https://github.com/megvii-model/MOTR + + +Step3. + + +val + +``` +python3 eval.py \ + --meta_arch motr \ + --dataset_file e2e_joint \ + --epoch 200 \ + --with_box_refine \ + --lr_drop 100 \ + --lr 2e-4 \ + --lr_backbone 2e-5 \ + --pretrained exps/e2e_motr_r50_mot17val/motr_final.pth \ + --output_dir exps/e2e_motr_r50_mot17val \ + --batch_size 1 \ + --sample_mode 'random_interval' \ + --sample_interval 10 \ + --sampler_steps 50 90 120 \ + --sampler_lengths 2 3 4 5 \ + --update_query_pos \ + --merger_dropout 0 \ + --dropout 0 \ + --random_drop 0.1 \ + --fp_ratio 0.3 \ + --query_interaction_layer 'QIM' \ + --extra_track_attn \ + --mot_path ./MOT17/images/train + --data_txt_path_train ./datasets/data_path/mot17.half \ + --data_txt_path_val ./datasets/data_path/mot17.val \ + --resume model_final.pth \ +``` + + + +# MOTR det + +in Step2, replace https://github.com/megvii-model/MOTR/blob/main/models/motr.py by motr_det.py + +others are the same as MOTR diff --git a/tracking/docker-build-context/byte_track/tutorials/motr/byte_tracker.py b/tracking/docker-build-context/byte_track/tutorials/motr/byte_tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..d5bc6dd479441e78e92bd07ce496314d8de13d38 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/motr/byte_tracker.py @@ -0,0 +1,339 @@ +import numpy as np +from collections import deque +import os +import os.path as osp +import copy +import torch +import torch.nn.functional as F + +from mot_online.kalman_filter import KalmanFilter +from mot_online.basetrack import BaseTrack, TrackState +from mot_online import matching + + + +class STrack(BaseTrack): + shared_kalman = KalmanFilter() + def __init__(self, tlwh, score): + + # wait activate + self._tlwh = np.asarray(tlwh, dtype=np.float) + self.kalman_filter = None + self.mean, self.covariance = None, None + self.is_activated = False + + self.score = score + self.tracklet_len = 0 + + def predict(self): + mean_state = self.mean.copy() + if self.state != TrackState.Tracked: + mean_state[7] = 0 + self.mean, self.covariance = self.kalman_filter.predict(mean_state, self.covariance) + + @staticmethod + def multi_predict(stracks): + if len(stracks) > 0: + multi_mean = np.asarray([st.mean.copy() for st in stracks]) + multi_covariance = np.asarray([st.covariance for st in stracks]) + for i, st in enumerate(stracks): + if st.state != TrackState.Tracked: + multi_mean[i][7] = 0 + multi_mean, multi_covariance = STrack.shared_kalman.multi_predict(multi_mean, multi_covariance) + for i, (mean, cov) in enumerate(zip(multi_mean, multi_covariance)): + stracks[i].mean = mean + stracks[i].covariance = cov + + def activate(self, kalman_filter, frame_id): + """Start a new tracklet""" + self.kalman_filter = kalman_filter + self.track_id = self.next_id() + self.mean, self.covariance = self.kalman_filter.initiate(self.tlwh_to_xyah(self._tlwh)) + + self.tracklet_len = 0 + self.state = TrackState.Tracked + if frame_id == 1: + self.is_activated = True + # self.is_activated = True + self.frame_id = frame_id + self.start_frame = frame_id + + def re_activate(self, new_track, frame_id, new_id=False): + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_track.tlwh) + ) + self.tracklet_len = 0 + self.state = TrackState.Tracked + self.is_activated = True + self.frame_id = frame_id + if new_id: + self.track_id = self.next_id() + self.score = new_track.score + + def update(self, new_track, frame_id): + """ + Update a matched track + :type new_track: STrack + :type frame_id: int + :type update_feature: bool + :return: + """ + self.frame_id = frame_id + self.tracklet_len += 1 + + new_tlwh = new_track.tlwh + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_tlwh)) + self.state = TrackState.Tracked + self.is_activated = True + + self.score = new_track.score + + @property + # @jit(nopython=True) + def tlwh(self): + """Get current position in bounding box format `(top left x, top left y, + width, height)`. + """ + if self.mean is None: + return self._tlwh.copy() + ret = self.mean[:4].copy() + ret[2] *= ret[3] + ret[:2] -= ret[2:] / 2 + return ret + + @property + # @jit(nopython=True) + def tlbr(self): + """Convert bounding box to format `(min x, min y, max x, max y)`, i.e., + `(top left, bottom right)`. + """ + ret = self.tlwh.copy() + ret[2:] += ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_xyah(tlwh): + """Convert bounding box to format `(center x, center y, aspect ratio, + height)`, where the aspect ratio is `width / height`. + """ + ret = np.asarray(tlwh).copy() + ret[:2] += ret[2:] / 2 + ret[2] /= ret[3] + return ret + + def to_xyah(self): + return self.tlwh_to_xyah(self.tlwh) + + @staticmethod + # @jit(nopython=True) + def tlbr_to_tlwh(tlbr): + ret = np.asarray(tlbr).copy() + ret[2:] -= ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_tlbr(tlwh): + ret = np.asarray(tlwh).copy() + ret[2:] += ret[:2] + return ret + + def __repr__(self): + return 'OT_{}_({}-{})'.format(self.track_id, self.start_frame, self.end_frame) + + +class BYTETracker(object): + def __init__(self, frame_rate=30): + self.tracked_stracks = [] # type: list[STrack] + self.lost_stracks = [] # type: list[STrack] + self.removed_stracks = [] # type: list[STrack] + + self.frame_id = 0 + + self.low_thresh = 0.2 + self.track_thresh = 0.8 + self.det_thresh = self.track_thresh + 0.1 + + + self.buffer_size = int(frame_rate / 30.0 * 30) + self.max_time_lost = self.buffer_size + self.kalman_filter = KalmanFilter() + + def update(self, output_results): + self.frame_id += 1 + activated_starcks = [] + refind_stracks = [] + lost_stracks = [] + removed_stracks = [] + + + scores = output_results[:, 4] + bboxes = output_results[:, :4] # x1y1x2y2 + + remain_inds = scores > self.track_thresh + dets = bboxes[remain_inds] + scores_keep = scores[remain_inds] + + + inds_low = scores > self.low_thresh + inds_high = scores < self.track_thresh + inds_second = np.logical_and(inds_low, inds_high) + dets_second = bboxes[inds_second] + scores_second = scores[inds_second] + + + if len(dets) > 0: + '''Detections''' + detections = [STrack(STrack.tlbr_to_tlwh(tlbr), s) for + (tlbr, s) in zip(dets, scores_keep)] + else: + detections = [] + + ''' Add newly detected tracklets to tracked_stracks''' + unconfirmed = [] + tracked_stracks = [] # type: list[STrack] + for track in self.tracked_stracks: + if not track.is_activated: + unconfirmed.append(track) + else: + tracked_stracks.append(track) + + ''' Step 2: First association, with Kalman and IOU''' + strack_pool = joint_stracks(tracked_stracks, self.lost_stracks) + # Predict the current location with KF + STrack.multi_predict(strack_pool) + dists = matching.iou_distance(strack_pool, detections) + matches, u_track, u_detection = matching.linear_assignment(dists, thresh=0.8) + + for itracked, idet in matches: + track = strack_pool[itracked] + det = detections[idet] + if track.state == TrackState.Tracked: + track.update(detections[idet], self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + ''' Step 3: Second association, with IOU''' + # association the untrack to the low score detections + if len(dets_second) > 0: + '''Detections''' + detections_second = [STrack(STrack.tlbr_to_tlwh(tlbr), s) for + (tlbr, s) in zip(dets_second, scores_second)] + else: + detections_second = [] + r_tracked_stracks = [strack_pool[i] for i in u_track if strack_pool[i].state == TrackState.Tracked] + dists = matching.iou_distance(r_tracked_stracks, detections_second) + matches, u_track, u_detection_second = matching.linear_assignment(dists, thresh=0.5) + for itracked, idet in matches: + track = r_tracked_stracks[itracked] + det = detections_second[idet] + if track.state == TrackState.Tracked: + track.update(det, self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + for it in u_track: + #track = r_tracked_stracks[it] + track = r_tracked_stracks[it] + if not track.state == TrackState.Lost: + track.mark_lost() + lost_stracks.append(track) + + '''Deal with unconfirmed tracks, usually tracks with only one beginning frame''' + detections = [detections[i] for i in u_detection] + dists = matching.iou_distance(unconfirmed, detections) + matches, u_unconfirmed, u_detection = matching.linear_assignment(dists, thresh=0.7) + for itracked, idet in matches: + unconfirmed[itracked].update(detections[idet], self.frame_id) + activated_starcks.append(unconfirmed[itracked]) + for it in u_unconfirmed: + track = unconfirmed[it] + track.mark_removed() + removed_stracks.append(track) + + """ Step 4: Init new stracks""" + for inew in u_detection: + track = detections[inew] + if track.score < self.det_thresh: + continue + track.activate(self.kalman_filter, self.frame_id) + activated_starcks.append(track) + """ Step 5: Update state""" + for track in self.lost_stracks: + if self.frame_id - track.end_frame > self.max_time_lost: + track.mark_removed() + removed_stracks.append(track) + + # print('Ramained match {} s'.format(t4-t3)) + + self.tracked_stracks = [t for t in self.tracked_stracks if t.state == TrackState.Tracked] + self.tracked_stracks = joint_stracks(self.tracked_stracks, activated_starcks) + self.tracked_stracks = joint_stracks(self.tracked_stracks, refind_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.tracked_stracks) + self.lost_stracks.extend(lost_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.removed_stracks) + self.removed_stracks.extend(removed_stracks) + self.tracked_stracks, self.lost_stracks = remove_duplicate_stracks(self.tracked_stracks, self.lost_stracks) + # get scores of lost tracks + output_stracks = [track for track in self.tracked_stracks if track.is_activated] + + return output_stracks + + +def joint_stracks(tlista, tlistb): + exists = {} + res = [] + for t in tlista: + exists[t.track_id] = 1 + res.append(t) + for t in tlistb: + tid = t.track_id + if not exists.get(tid, 0): + exists[tid] = 1 + res.append(t) + return res + + +def sub_stracks(tlista, tlistb): + stracks = {} + for t in tlista: + stracks[t.track_id] = t + for t in tlistb: + tid = t.track_id + if stracks.get(tid, 0): + del stracks[tid] + return list(stracks.values()) + + +def remove_duplicate_stracks(stracksa, stracksb): + pdist = matching.iou_distance(stracksa, stracksb) + pairs = np.where(pdist < 0.15) + dupa, dupb = list(), list() + for p, q in zip(*pairs): + timep = stracksa[p].frame_id - stracksa[p].start_frame + timeq = stracksb[q].frame_id - stracksb[q].start_frame + if timep > timeq: + dupb.append(q) + else: + dupa.append(p) + resa = [t for i, t in enumerate(stracksa) if not i in dupa] + resb = [t for i, t in enumerate(stracksb) if not i in dupb] + return resa, resb + + +def remove_fp_stracks(stracksa, n_frame=10): + remain = [] + for t in stracksa: + score_5 = t.score_list[-n_frame:] + score_5 = np.array(score_5, dtype=np.float32) + index = score_5 < 0.45 + num = np.sum(index) + if num < n_frame: + remain.append(t) + return remain diff --git a/tracking/docker-build-context/byte_track/tutorials/motr/eval.py b/tracking/docker-build-context/byte_track/tutorials/motr/eval.py new file mode 100644 index 0000000000000000000000000000000000000000..fbbb8e5600fb762fa586d898c4477ebb82eae374 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/motr/eval.py @@ -0,0 +1,470 @@ +# ------------------------------------------------------------------------ +# Copyright (c) 2021 megvii-model. All Rights Reserved. +# ------------------------------------------------------------------------ +# Modified from Deformable DETR (https://github.com/fundamentalvision/Deformable-DETR) +# Copyright (c) 2020 SenseTime. All Rights Reserved. +# ------------------------------------------------------------------------ +# Modified from DETR (https://github.com/facebookresearch/detr) +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# ------------------------------------------------------------------------ + +""" + SORT: A Simple, Online and Realtime Tracker + Copyright (C) 2016-2020 Alex Bewley alex@bewley.ai + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" +from __future__ import print_function + +import os +import numpy as np +import random +import argparse +import torchvision.transforms.functional as F +import torch +import cv2 +from tqdm import tqdm +from pathlib import Path +from PIL import Image, ImageDraw +from models import build_model +from util.tool import load_model +from main import get_args_parser +from torch.nn.functional import interpolate +from typing import List +from util.evaluation import Evaluator +import motmetrics as mm +import shutil + +from detectron2.structures import Instances + +from tracker import BYTETracker + +np.random.seed(2020) + +COLORS_10 = [(144, 238, 144), (178, 34, 34), (221, 160, 221), (0, 255, 0), (0, 128, 0), (210, 105, 30), (220, 20, 60), + (192, 192, 192), (255, 228, 196), (50, 205, 50), (139, 0, 139), (100, 149, 237), (138, 43, 226), + (238, 130, 238), + (255, 0, 255), (0, 100, 0), (127, 255, 0), (255, 0, 255), (0, 0, 205), (255, 140, 0), (255, 239, 213), + (199, 21, 133), (124, 252, 0), (147, 112, 219), (106, 90, 205), (176, 196, 222), (65, 105, 225), + (173, 255, 47), + (255, 20, 147), (219, 112, 147), (186, 85, 211), (199, 21, 133), (148, 0, 211), (255, 99, 71), + (144, 238, 144), + (255, 255, 0), (230, 230, 250), (0, 0, 255), (128, 128, 0), (189, 183, 107), (255, 255, 224), + (128, 128, 128), + (105, 105, 105), (64, 224, 208), (205, 133, 63), (0, 128, 128), (72, 209, 204), (139, 69, 19), + (255, 245, 238), + (250, 240, 230), (152, 251, 152), (0, 255, 255), (135, 206, 235), (0, 191, 255), (176, 224, 230), + (0, 250, 154), + (245, 255, 250), (240, 230, 140), (245, 222, 179), (0, 139, 139), (143, 188, 143), (255, 0, 0), + (240, 128, 128), + (102, 205, 170), (60, 179, 113), (46, 139, 87), (165, 42, 42), (178, 34, 34), (175, 238, 238), + (255, 248, 220), + (218, 165, 32), (255, 250, 240), (253, 245, 230), (244, 164, 96), (210, 105, 30)] + + +def plot_one_box(x, img, color=None, label=None, score=None, line_thickness=None): + # Plots one bounding box on image img + + tl = line_thickness or round( + 0.002 * max(img.shape[0:2])) + 1 # line thickness + color = color or [random.randint(0, 255) for _ in range(3)] + c1, c2 = (int(x[0]), int(x[1])), (int(x[2]), int(x[3])) + cv2.rectangle(img, c1, c2, color, thickness=tl) + # if label: + # tf = max(tl - 1, 1) # font thickness + # t_size = cv2.getTextSize(label, 0, fontScale=tl / 3, thickness=tf)[0] + # c2 = c1[0] + t_size[0], c1[1] - t_size[1] - 3 + # cv2.rectangle(img, c1, c2, color, -1) # filled + # cv2.putText(img, + # label, (c1[0], c1[1] - 2), + # 0, + # tl / 3, [225, 255, 255], + # thickness=tf, + # lineType=cv2.LINE_AA) + # if score is not None: + # cv2.putText(img, score, (c1[0], c1[1] + 30), 0, tl / 3, [225, 255, 255], thickness=tf, lineType=cv2.LINE_AA) + return img + + +def draw_bboxes(ori_img, bbox, identities=None, offset=(0, 0), cvt_color=False): + if cvt_color: + ori_img = cv2.cvtColor(np.asarray(ori_img), cv2.COLOR_RGB2BGR) + img = ori_img + for i, box in enumerate(bbox): + x1, y1, x2, y2 = [int(i) for i in box[:4]] + x1 += offset[0] + x2 += offset[0] + y1 += offset[1] + y2 += offset[1] + if len(box) > 4: + score = '{:.2f}'.format(box[4]) + else: + score = None + # box text and bar + id = int(identities[i]) if identities is not None else 0 + color = COLORS_10[id % len(COLORS_10)] + label = '{:d}'.format(id) + # t_size = cv2.getTextSize(label, cv2.FONT_HERSHEY_PLAIN, 2 , 2)[0] + img = plot_one_box([x1, y1, x2, y2], img, color, label, score=score) + return img + + +def draw_points(img: np.ndarray, points: np.ndarray, color=(255, 255, 255)) -> np.ndarray: + assert len(points.shape) == 2 and points.shape[1] == 2, 'invalid points shape: {}'.format(points.shape) + for i, (x, y) in enumerate(points): + if i >= 300: + color = (0, 255, 0) + cv2.circle(img, (int(x), int(y)), 2, color=color, thickness=2) + return img + + +def tensor_to_numpy(tensor: torch.Tensor) -> np.ndarray: + return tensor.detach().cpu().numpy() + + +class Track(object): + track_cnt = 0 + + def __init__(self, box): + self.box = box + self.time_since_update = 0 + self.id = Track.track_cnt + Track.track_cnt += 1 + self.miss = 0 + + def miss_one_frame(self): + self.miss += 1 + + def clear_miss(self): + self.miss = 0 + + def update(self, box): + self.box = box + self.clear_miss() + + +def write_results(filename, results): + save_format = '{frame},{id},{x1},{y1},{w},{h},{s},-1,-1,-1\n' + with open(filename, 'w') as f: + for frame_id, tlwhs, track_ids, scores in results: + for tlwh, track_id, score in zip(tlwhs, track_ids, scores): + if track_id < 0: + continue + x1, y1, w, h = tlwh + line = save_format.format(frame=frame_id, id=track_id, x1=round(x1, 1), y1=round(y1, 1), w=round(w, 1), h=round(h, 1), s=round(score, 2)) + f.write(line) + logger.info('save results to {}'.format(filename)) + + +class MOTR(object): + def __init__(self, max_age=1, min_hits=3, iou_threshold=0.3): + self.tracker = BYTETracker() + + def update(self, dt_instances: Instances): + ret = [] + for i in range(len(dt_instances)): + label = dt_instances.labels[i] + if label == 0: + id = dt_instances.obj_idxes[i] + box_with_score = np.concatenate([dt_instances.boxes[i], dt_instances.scores[i:i+1]], axis=-1) + ret.append(np.concatenate((box_with_score, [id + 1])).reshape(1, -1)) # +1 as MOT benchmark requires positive + + if len(ret) > 0: + online_targets = self.tracker.update(np.concatenate(ret)) + + online_ret = [] + for t in online_targets: + online_ret.append(np.array([t.tlbr[0], t.tlbr[1], t.tlbr[2], t.tlbr[3], t.score, t.track_id]).reshape(1, -1)) + + if len(online_ret) > 0: + return np.concatenate(online_ret) + + return np.empty((0, 6)) + + + +def load_label(label_path: str, img_size: tuple) -> dict: + labels0 = np.loadtxt(label_path, dtype=np.float32).reshape(-1, 6) + h, w = img_size + # Normalized cewh to pixel xyxy format + labels = labels0.copy() + labels[:, 2] = w * (labels0[:, 2] - labels0[:, 4] / 2) + labels[:, 3] = h * (labels0[:, 3] - labels0[:, 5] / 2) + labels[:, 4] = w * (labels0[:, 2] + labels0[:, 4] / 2) + labels[:, 5] = h * (labels0[:, 3] + labels0[:, 5] / 2) + targets = {'boxes': [], 'labels': [], 'area': []} + num_boxes = len(labels) + + visited_ids = set() + for label in labels[:num_boxes]: + obj_id = label[1] + if obj_id in visited_ids: + continue + visited_ids.add(obj_id) + targets['boxes'].append(label[2:6].tolist()) + targets['area'].append(label[4] * label[5]) + targets['labels'].append(0) + targets['boxes'] = np.asarray(targets['boxes']) + targets['area'] = np.asarray(targets['area']) + targets['labels'] = np.asarray(targets['labels']) + return targets + + +def filter_pub_det(res_file, pub_det_file, filter_iou=False): + frame_boxes = {} + with open(pub_det_file, 'r') as f: + lines = f.readlines() + for line in lines: + if len(line) == 0: + continue + elements = line.strip().split(',') + frame_id = int(elements[0]) + x1, y1, w, h = elements[2:6] + x1, y1, w, h = float(x1), float(y1), float(w), float(h) + x2 = x1 + w - 1 + y2 = y1 + h - 1 + if frame_id not in frame_boxes: + frame_boxes[frame_id] = [] + frame_boxes[frame_id].append([x1, y1, x2, y2]) + + for frame, boxes in frame_boxes.items(): + frame_boxes[frame] = np.array(boxes) + + ids = {} + num_filter_box = 0 + with open(res_file, 'r') as f: + lines = list(f.readlines()) + with open(res_file, 'w') as f: + for line in lines: + if len(line) == 0: + continue + elements = line.strip().split(',') + frame_id, obj_id = elements[:2] + frame_id = int(frame_id) + obj_id = int(obj_id) + x1, y1, w, h = elements[2:6] + x1, y1, w, h = float(x1), float(y1), float(w), float(h) + x2 = x1 + w - 1 + y2 = y1 + h - 1 + if obj_id not in ids: + # track initialization. + if frame_id not in frame_boxes: + num_filter_box += 1 + print("filter init box {} {}".format(frame_id, obj_id)) + continue + pub_dt_boxes = frame_boxes[frame_id] + dt_box = np.array([[x1, y1, x2, y2]]) + if filter_iou: + max_iou = bbox_iou(dt_box, pub_dt_boxes).max() + if max_iou < 0.5: + num_filter_box += 1 + print("filter init box {} {}".format(frame_id, obj_id)) + continue + else: + pub_dt_centers = (pub_dt_boxes[:, :2] + pub_dt_boxes[:, 2:4]) * 0.5 + x_inside = (dt_box[0, 0] <= pub_dt_centers[:, 0]) & (dt_box[0, 2] >= pub_dt_centers[:, 0]) + y_inside = (dt_box[0, 1] <= pub_dt_centers[:, 1]) & (dt_box[0, 3] >= pub_dt_centers[:, 1]) + center_inside: np.ndarray = x_inside & y_inside + if not center_inside.any(): + num_filter_box += 1 + print("filter init box {} {}".format(frame_id, obj_id)) + continue + print("save init track {} {}".format(frame_id, obj_id)) + ids[obj_id] = True + f.write(line) + + print("totally {} boxes are filtered.".format(num_filter_box)) + + +class Detector(object): + def __init__(self, args, model=None, seq_num=2): + + self.args = args + self.detr = model + + self.seq_num = seq_num + img_list = os.listdir(os.path.join(self.args.mot_path, self.seq_num, 'img1')) + img_list = [os.path.join(self.args.mot_path, self.seq_num, 'img1', _) for _ in img_list if + ('jpg' in _) or ('png' in _)] + + self.img_list = sorted(img_list) + self.img_len = len(self.img_list) + self.tr_tracker = MOTR() + + ''' + common settings + ''' + self.img_height = 800 + self.img_width = 1536 + self.mean = [0.485, 0.456, 0.406] + self.std = [0.229, 0.224, 0.225] + + self.save_path = os.path.join(self.args.output_dir, 'results/{}'.format(seq_num)) + os.makedirs(self.save_path, exist_ok=True) + + self.predict_path = os.path.join(self.args.output_dir, 'preds', self.seq_num) + os.makedirs(self.predict_path, exist_ok=True) + if os.path.exists(os.path.join(self.predict_path, 'gt.txt')): + os.remove(os.path.join(self.predict_path, 'gt.txt')) + + def load_img_from_file(self,f_path): + label_path = f_path.replace('images', 'labels_with_ids').replace('.png', '.txt').replace('.jpg', '.txt') + cur_img = cv2.imread(f_path) + cur_img = cv2.cvtColor(cur_img, cv2.COLOR_BGR2RGB) + targets = load_label(label_path, cur_img.shape[:2]) if os.path.exists(label_path) else None + return cur_img, targets + + def init_img(self, img): + ori_img = img.copy() + self.seq_h, self.seq_w = img.shape[:2] + scale = self.img_height / min(self.seq_h, self.seq_w) + if max(self.seq_h, self.seq_w) * scale > self.img_width: + scale = self.img_width / max(self.seq_h, self.seq_w) + target_h = int(self.seq_h * scale) + target_w = int(self.seq_w * scale) + img = cv2.resize(img, (target_w, target_h)) + img = F.normalize(F.to_tensor(img), self.mean, self.std) + img = img.unsqueeze(0) + return img, ori_img + + @staticmethod + def filter_dt_by_score(dt_instances: Instances, prob_threshold: float) -> Instances: + keep = dt_instances.scores > prob_threshold + return dt_instances[keep] + + @staticmethod + def filter_dt_by_area(dt_instances: Instances, area_threshold: float) -> Instances: + wh = dt_instances.boxes[:, 2:4] - dt_instances.boxes[:, 0:2] + areas = wh[:, 0] * wh[:, 1] + keep = areas > area_threshold + return dt_instances[keep] + + @staticmethod + def write_results(txt_path, frame_id, bbox_xyxy, identities): + save_format = '{frame},{id},{x1},{y1},{w},{h},1,-1,-1,-1\n' + with open(txt_path, 'a') as f: + for xyxy, track_id in zip(bbox_xyxy, identities): + if track_id < 0 or track_id is None: + continue + x1, y1, x2, y2 = xyxy + w, h = x2 - x1, y2 - y1 + line = save_format.format(frame=int(frame_id), id=int(track_id), x1=x1, y1=y1, w=w, h=h) + f.write(line) + + def eval_seq(self): + data_root = os.path.join(self.args.mot_path) + result_filename = os.path.join(self.predict_path, 'gt.txt') + evaluator = Evaluator(data_root, self.seq_num) + accs = evaluator.eval_file(result_filename) + return accs + + @staticmethod + def visualize_img_with_bbox(img_path, img, dt_instances: Instances, ref_pts=None, gt_boxes=None): + img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR) + if dt_instances.has('scores'): + img_show = draw_bboxes(img, np.concatenate([dt_instances.boxes, dt_instances.scores.reshape(-1, 1)], axis=-1), dt_instances.obj_idxes) + else: + img_show = draw_bboxes(img, dt_instances.boxes, dt_instances.obj_idxes) +# if ref_pts is not None: +# img_show = draw_points(img_show, ref_pts) +# if gt_boxes is not None: +# img_show = draw_bboxes(img_show, gt_boxes, identities=np.ones((len(gt_boxes), )) * -1) + cv2.imwrite(img_path, img_show) + + def detect(self, prob_threshold=0.2, area_threshold=100, vis=False): + total_dts = 0 + track_instances = None + max_id = 0 + + # we only consider val split (second half images) + for i in tqdm(range((int(self.img_len / 2)), self.img_len)): +# for i in tqdm(range(0, self.img_len)): + img, targets = self.load_img_from_file(self.img_list[i]) + cur_img, ori_img = self.init_img(img) + + # track_instances = None + if track_instances is not None: + track_instances.remove('boxes') + track_instances.remove('labels') + + res = self.detr.inference_single_image(cur_img.cuda().float(), (self.seq_h, self.seq_w), track_instances) + track_instances = res['track_instances'] + max_id = max(max_id, track_instances.obj_idxes.max().item()) + + print("ref points.shape={}".format(res['ref_pts'].shape)) + all_ref_pts = tensor_to_numpy(res['ref_pts'][0, :, :2]) + dt_instances = track_instances.to(torch.device('cpu')) + + # filter det instances by score. + dt_instances = self.filter_dt_by_score(dt_instances, prob_threshold) + dt_instances = self.filter_dt_by_area(dt_instances, area_threshold) + + total_dts += len(dt_instances) + + if vis: + # for visual + cur_vis_img_path = os.path.join(self.save_path, 'frame_{:0>8d}.jpg'.format(i)) + gt_boxes = None + self.visualize_img_with_bbox(cur_vis_img_path, ori_img, dt_instances, ref_pts=all_ref_pts, gt_boxes=gt_boxes) + + tracker_outputs = self.tr_tracker.update(dt_instances) + + self.write_results(txt_path=os.path.join(self.predict_path, 'gt.txt'), + frame_id=(i + 1), + bbox_xyxy=tracker_outputs[:, :4], + identities=tracker_outputs[:, 5]) + print("totally {} dts max_id={}".format(total_dts, max_id)) + + +if __name__ == '__main__': + + parser = argparse.ArgumentParser('DETR training and evaluation script', parents=[get_args_parser()]) + args = parser.parse_args() + if args.output_dir: + Path(args.output_dir).mkdir(parents=True, exist_ok=True) + + # load model and weights + detr, _, _ = build_model(args) + checkpoint = torch.load(args.resume, map_location='cpu') + detr = load_model(detr, args.resume) + detr = detr.cuda() + detr.eval() + +# seq_nums = ['ADL-Rundle-6', 'ETH-Bahnhof', 'KITTI-13', 'PETS09-S2L1', 'TUD-Stadtmitte', 'ADL-Rundle-8', 'KITTI-17', +# 'ETH-Pedcross2', 'ETH-Sunnyday', 'TUD-Campus', 'Venice-2'] + seq_nums = ['MOT17-02-SDP', + 'MOT17-04-SDP', + 'MOT17-05-SDP', + 'MOT17-09-SDP', + 'MOT17-10-SDP', + 'MOT17-11-SDP', + 'MOT17-13-SDP'] + accs = [] + seqs = [] + + for seq_num in seq_nums: + print("solve {}".format(seq_num)) + det = Detector(args, model=detr, seq_num=seq_num) + det.detect(vis=False) + accs.append(det.eval_seq()) + seqs.append(seq_num) + + metrics = mm.metrics.motchallenge_metrics + mh = mm.metrics.create() + summary = Evaluator.get_summary(accs, seqs, metrics) + strsummary = mm.io.render_summary( + summary, + formatters=mh.formatters, + namemap=mm.io.motchallenge_metric_names + ) + print(strsummary) + with open("eval_log.txt", 'a') as f: + print(strsummary, file=f) diff --git a/tracking/docker-build-context/byte_track/tutorials/motr/evaluation.py b/tracking/docker-build-context/byte_track/tutorials/motr/evaluation.py new file mode 100644 index 0000000000000000000000000000000000000000..2be0d672e160e78361f94916e319cd5ee5f2310d --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/motr/evaluation.py @@ -0,0 +1,207 @@ +# ------------------------------------------------------------------------ +# Copyright (c) 2021 megvii-model. All Rights Reserved. +# ------------------------------------------------------------------------ +# Modified from Deformable DETR (https://github.com/fundamentalvision/Deformable-DETR) +# Copyright (c) 2020 SenseTime. All Rights Reserved. +# ------------------------------------------------------------------------ +# Modified from DETR (https://github.com/facebookresearch/detr) +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# ------------------------------------------------------------------------ + + +import os +import numpy as np +import copy +import motmetrics as mm +mm.lap.default_solver = 'lap' +import os +from typing import Dict +import numpy as np +import logging + +def read_results(filename, data_type: str, is_gt=False, is_ignore=False): + if data_type in ('mot', 'lab'): + read_fun = read_mot_results + else: + raise ValueError('Unknown data type: {}'.format(data_type)) + + return read_fun(filename, is_gt, is_ignore) + +# def read_mot_results(filename, is_gt, is_ignore): +# results_dict = dict() +# if os.path.isfile(filename): +# with open(filename, 'r') as f: +# for line in f.readlines(): +# linelist = line.split(',') +# if len(linelist) < 7: +# continue +# fid = int(linelist[0]) +# if fid < 1: +# continue +# results_dict.setdefault(fid, list()) + +# if is_gt: +# mark = int(float(linelist[6])) +# if mark == 0 : +# continue +# score = 1 +# elif is_ignore: +# score = 1 +# else: +# score = float(linelist[6]) + +# tlwh = tuple(map(float, linelist[2:6])) +# target_id = int(float(linelist[1])) +# results_dict[fid].append((tlwh, target_id, score)) + +# return results_dict + +def read_mot_results(filename, is_gt, is_ignore): + valid_labels = {1} + ignore_labels = {0, 2, 7, 8, 12} + results_dict = dict() + if os.path.isfile(filename): + with open(filename, 'r') as f: + for line in f.readlines(): + linelist = line.split(',') + if len(linelist) < 7: + continue + fid = int(linelist[0]) + if fid < 1: + continue + results_dict.setdefault(fid, list()) + + if is_gt: + if 'MOT16-' in filename or 'MOT17-' in filename: + label = int(float(linelist[7])) + mark = int(float(linelist[6])) + if mark == 0 or label not in valid_labels: + continue + score = 1 + elif is_ignore: + if 'MOT16-' in filename or 'MOT17-' in filename: + label = int(float(linelist[7])) + vis_ratio = float(linelist[8]) + if label not in ignore_labels and vis_ratio >= 0: + continue + elif 'MOT15' in filename: + label = int(float(linelist[6])) + if label not in ignore_labels: + continue + else: + continue + score = 1 + else: + score = float(linelist[6]) + + tlwh = tuple(map(float, linelist[2:6])) + target_id = int(linelist[1]) + + results_dict[fid].append((tlwh, target_id, score)) + + return results_dict + +def unzip_objs(objs): + if len(objs) > 0: + tlwhs, ids, scores = zip(*objs) + else: + tlwhs, ids, scores = [], [], [] + tlwhs = np.asarray(tlwhs, dtype=float).reshape(-1, 4) + return tlwhs, ids, scores + + +class Evaluator(object): + def __init__(self, data_root, seq_name, data_type='mot'): + + self.data_root = data_root + self.seq_name = seq_name + self.data_type = data_type + + self.load_annotations() + self.reset_accumulator() + + def load_annotations(self): + assert self.data_type == 'mot' + + gt_filename = os.path.join(self.data_root, self.seq_name, 'gt', 'gt.txt') + self.gt_frame_dict = read_results(gt_filename, self.data_type, is_gt=True) + self.gt_ignore_frame_dict = read_results(gt_filename, self.data_type, is_ignore=True) + + def reset_accumulator(self): + self.acc = mm.MOTAccumulator(auto_id=True) + + def eval_frame(self, frame_id, trk_tlwhs, trk_ids, rtn_events=False): + # results + trk_tlwhs = np.copy(trk_tlwhs) + trk_ids = np.copy(trk_ids) + + # gts + gt_objs = self.gt_frame_dict.get(frame_id, []) + gt_tlwhs, gt_ids = unzip_objs(gt_objs)[:2] + + # ignore boxes + ignore_objs = self.gt_ignore_frame_dict.get(frame_id, []) + ignore_tlwhs = unzip_objs(ignore_objs)[0] + # remove ignored results + keep = np.ones(len(trk_tlwhs), dtype=bool) + iou_distance = mm.distances.iou_matrix(ignore_tlwhs, trk_tlwhs, max_iou=0.5) + if len(iou_distance) > 0: + match_is, match_js = mm.lap.linear_sum_assignment(iou_distance) + match_is, match_js = map(lambda a: np.asarray(a, dtype=int), [match_is, match_js]) + match_ious = iou_distance[match_is, match_js] + + match_js = np.asarray(match_js, dtype=int) + match_js = match_js[np.logical_not(np.isnan(match_ious))] + keep[match_js] = False + trk_tlwhs = trk_tlwhs[keep] + trk_ids = trk_ids[keep] + + # get distance matrix + iou_distance = mm.distances.iou_matrix(gt_tlwhs, trk_tlwhs, max_iou=0.5) + + # acc + self.acc.update(gt_ids, trk_ids, iou_distance) + + if rtn_events and iou_distance.size > 0 and hasattr(self.acc, 'last_mot_events'): + events = self.acc.last_mot_events # only supported by https://github.com/longcw/py-motmetrics + else: + events = None + return events + + def eval_file(self, filename): + self.reset_accumulator() + + result_frame_dict = read_results(filename, self.data_type, is_gt=False) + #frames = sorted(list(set(self.gt_frame_dict.keys()) | set(result_frame_dict.keys()))) + frames = sorted(list(set(result_frame_dict.keys()))) + + for frame_id in frames: + trk_objs = result_frame_dict.get(frame_id, []) + trk_tlwhs, trk_ids = unzip_objs(trk_objs)[:2] + self.eval_frame(frame_id, trk_tlwhs, trk_ids, rtn_events=False) + + return self.acc + + @staticmethod + def get_summary(accs, names, metrics=('mota', 'num_switches', 'idp', 'idr', 'idf1', 'precision', 'recall')): + names = copy.deepcopy(names) + if metrics is None: + metrics = mm.metrics.motchallenge_metrics + metrics = copy.deepcopy(metrics) + + mh = mm.metrics.create() + summary = mh.compute_many( + accs, + metrics=metrics, + names=names, + generate_overall=True + ) + + return summary + + @staticmethod + def save_summary(summary, filename): + import pandas as pd + writer = pd.ExcelWriter(filename) + summary.to_excel(writer) + writer.save() diff --git a/tracking/docker-build-context/byte_track/tutorials/motr/joint.py b/tracking/docker-build-context/byte_track/tutorials/motr/joint.py new file mode 100644 index 0000000000000000000000000000000000000000..65800a9ceff315f8733adbe967c9d8c0daa0f0e5 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/motr/joint.py @@ -0,0 +1,292 @@ +# ------------------------------------------------------------------------ +# Copyright (c) 2021 megvii-model. All Rights Reserved. +# ------------------------------------------------------------------------ +# Modified from Deformable DETR (https://github.com/fundamentalvision/Deformable-DETR) +# Copyright (c) 2020 SenseTime. All Rights Reserved. +# ------------------------------------------------------------------------ +# Modified from DETR (https://github.com/facebookresearch/detr) +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# ------------------------------------------------------------------------ + +""" +MOT dataset which returns image_id for evaluation. +""" +from pathlib import Path +import cv2 +import numpy as np +import torch +import torch.utils.data +import os.path as osp +from PIL import Image, ImageDraw +import copy +import datasets.transforms as T +from models.structures import Instances + + +class DetMOTDetection: + def __init__(self, args, data_txt_path: str, seqs_folder, dataset2transform): + self.args = args + self.dataset2transform = dataset2transform + self.num_frames_per_batch = max(args.sampler_lengths) + self.sample_mode = args.sample_mode + self.sample_interval = args.sample_interval + self.vis = args.vis + self.video_dict = {} + + with open(data_txt_path, 'r') as file: + self.img_files = file.readlines() + self.img_files = [osp.join(seqs_folder, x.strip()) for x in self.img_files] + self.img_files = list(filter(lambda x: len(x) > 0, self.img_files)) + + self.label_files = [(x.replace('images', 'labels_with_ids').replace('.png', '.txt').replace('.jpg', '.txt')) + for x in self.img_files] + # The number of images per sample: 1 + (num_frames - 1) * interval. + # The number of valid samples: num_images - num_image_per_sample + 1. + self.item_num = len(self.img_files) - (self.num_frames_per_batch - 1) * self.sample_interval + + self._register_videos() + + # video sampler. + self.sampler_steps: list = args.sampler_steps + self.lengths: list = args.sampler_lengths + print("sampler_steps={} lenghts={}".format(self.sampler_steps, self.lengths)) + if self.sampler_steps is not None and len(self.sampler_steps) > 0: + # Enable sampling length adjustment. + assert len(self.lengths) > 0 + assert len(self.lengths) == len(self.sampler_steps) + 1 + for i in range(len(self.sampler_steps) - 1): + assert self.sampler_steps[i] < self.sampler_steps[i + 1] + self.item_num = len(self.img_files) - (self.lengths[-1] - 1) * self.sample_interval + self.period_idx = 0 + self.num_frames_per_batch = self.lengths[0] + self.current_epoch = 0 + + def _register_videos(self): + for label_name in self.label_files: + video_name = '/'.join(label_name.split('/')[:-1]) + if video_name not in self.video_dict: + print("register {}-th video: {} ".format(len(self.video_dict) + 1, video_name)) + self.video_dict[video_name] = len(self.video_dict) + # assert len(self.video_dict) <= 300 + + def set_epoch(self, epoch): + self.current_epoch = epoch + if self.sampler_steps is None or len(self.sampler_steps) == 0: + # fixed sampling length. + return + + for i in range(len(self.sampler_steps)): + if epoch >= self.sampler_steps[i]: + self.period_idx = i + 1 + print("set epoch: epoch {} period_idx={}".format(epoch, self.period_idx)) + self.num_frames_per_batch = self.lengths[self.period_idx] + + def step_epoch(self): + # one epoch finishes. + print("Dataset: epoch {} finishes".format(self.current_epoch)) + self.set_epoch(self.current_epoch + 1) + + @staticmethod + def _targets_to_instances(targets: dict, img_shape) -> Instances: + gt_instances = Instances(tuple(img_shape)) + gt_instances.boxes = targets['boxes'] + gt_instances.labels = targets['labels'] + gt_instances.obj_ids = targets['obj_ids'] + gt_instances.area = targets['area'] + return gt_instances + + def _pre_single_frame(self, idx: int): + img_path = self.img_files[idx] + label_path = self.label_files[idx] + if 'crowdhuman' in img_path: + img_path = img_path.replace('.jpg', '.png') + img = Image.open(img_path) + targets = {} + w, h = img._size + assert w > 0 and h > 0, "invalid image {} with shape {} {}".format(img_path, w, h) + if osp.isfile(label_path): + labels0 = np.loadtxt(label_path, dtype=np.float32).reshape(-1, 6) + + # normalized cewh to pixel xyxy format + labels = labels0.copy() + labels[:, 2] = w * (labels0[:, 2] - labels0[:, 4] / 2) + labels[:, 3] = h * (labels0[:, 3] - labels0[:, 5] / 2) + labels[:, 4] = w * (labels0[:, 2] + labels0[:, 4] / 2) + labels[:, 5] = h * (labels0[:, 3] + labels0[:, 5] / 2) + else: + raise ValueError('invalid label path: {}'.format(label_path)) + video_name = '/'.join(label_path.split('/')[:-1]) + obj_idx_offset = self.video_dict[video_name] * 1000000 # 1000000 unique ids is enough for a video. + if 'crowdhuman' in img_path: + targets['dataset'] = 'CrowdHuman' + elif 'MOT17' in img_path: + targets['dataset'] = 'MOT17' + else: + raise NotImplementedError() + targets['boxes'] = [] + targets['area'] = [] + targets['iscrowd'] = [] + targets['labels'] = [] + targets['obj_ids'] = [] + targets['image_id'] = torch.as_tensor(idx) + targets['size'] = torch.as_tensor([h, w]) + targets['orig_size'] = torch.as_tensor([h, w]) + for label in labels: + targets['boxes'].append(label[2:6].tolist()) + targets['area'].append(label[4] * label[5]) + targets['iscrowd'].append(0) + targets['labels'].append(0) + obj_id = label[1] + obj_idx_offset if label[1] >= 0 else label[1] + targets['obj_ids'].append(obj_id) # relative id + + targets['area'] = torch.as_tensor(targets['area']) + targets['iscrowd'] = torch.as_tensor(targets['iscrowd']) + targets['labels'] = torch.as_tensor(targets['labels']) + targets['obj_ids'] = torch.as_tensor(targets['obj_ids']) + targets['boxes'] = torch.as_tensor(targets['boxes'], dtype=torch.float32).reshape(-1, 4) +# targets['boxes'][:, 0::2].clamp_(min=0, max=w) +# targets['boxes'][:, 1::2].clamp_(min=0, max=h) + return img, targets + + def _get_sample_range(self, start_idx): + + # take default sampling method for normal dataset. + assert self.sample_mode in ['fixed_interval', 'random_interval'], 'invalid sample mode: {}'.format(self.sample_mode) + if self.sample_mode == 'fixed_interval': + sample_interval = self.sample_interval + elif self.sample_mode == 'random_interval': + sample_interval = np.random.randint(1, self.sample_interval + 1) + default_range = start_idx, start_idx + (self.num_frames_per_batch - 1) * sample_interval + 1, sample_interval + return default_range + + def pre_continuous_frames(self, start, end, interval=1): + targets = [] + images = [] + for i in range(start, end, interval): + img_i, targets_i = self._pre_single_frame(i) + images.append(img_i) + targets.append(targets_i) + return images, targets + + def __getitem__(self, idx): + sample_start, sample_end, sample_interval = self._get_sample_range(idx) + images, targets = self.pre_continuous_frames(sample_start, sample_end, sample_interval) + data = {} + dataset_name = targets[0]['dataset'] + transform = self.dataset2transform[dataset_name] + if transform is not None: + images, targets = transform(images, targets) + gt_instances = [] + for img_i, targets_i in zip(images, targets): + gt_instances_i = self._targets_to_instances(targets_i, img_i.shape[1:3]) + gt_instances.append(gt_instances_i) + data.update({ + 'imgs': images, + 'gt_instances': gt_instances, + }) + if self.args.vis: + data['ori_img'] = [target_i['ori_img'] for target_i in targets] + return data + + def __len__(self): + return self.item_num + + +class DetMOTDetectionValidation(DetMOTDetection): + def __init__(self, args, seqs_folder, dataset2transform): + args.data_txt_path = args.val_data_txt_path + super().__init__(args, seqs_folder, dataset2transform) + + + +def make_transforms_for_mot17(image_set, args=None): + + normalize = T.MotCompose([ + T.MotToTensor(), + T.MotNormalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) + ]) + scales = [608, 640, 672, 704, 736, 768, 800, 832, 864, 896, 928, 960, 992] + + if image_set == 'train': + return T.MotCompose([ + T.MotRandomHorizontalFlip(), + T.MotRandomSelect( + T.MotRandomResize(scales, max_size=1536), + T.MotCompose([ + T.MotRandomResize([400, 500, 600]), + T.FixedMotRandomCrop(384, 600), + T.MotRandomResize(scales, max_size=1536), + ]) + ), + normalize, + ]) + + if image_set == 'val': + return T.MotCompose([ + T.MotRandomResize([800], max_size=1333), + normalize, + ]) + + raise ValueError(f'unknown {image_set}') + + +def make_transforms_for_crowdhuman(image_set, args=None): + + normalize = T.MotCompose([ + T.MotToTensor(), + T.MotNormalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) + ]) + scales = [608, 640, 672, 704, 736, 768, 800, 832, 864, 896, 928, 960, 992] + + if image_set == 'train': + return T.MotCompose([ + T.MotRandomHorizontalFlip(), + T.FixedMotRandomShift(bs=1), + T.MotRandomSelect( + T.MotRandomResize(scales, max_size=1536), + T.MotCompose([ + T.MotRandomResize([400, 500, 600]), + T.FixedMotRandomCrop(384, 600), + T.MotRandomResize(scales, max_size=1536), + ]) + ), + normalize, + + ]) + + if image_set == 'val': + return T.MotCompose([ + T.MotRandomResize([800], max_size=1333), + normalize, + ]) + + raise ValueError(f'unknown {image_set}') + + +def build_dataset2transform(args, image_set): + mot17_train = make_transforms_for_mot17('train', args) + mot17_test = make_transforms_for_mot17('val', args) + + crowdhuman_train = make_transforms_for_crowdhuman('train', args) + dataset2transform_train = {'MOT17': mot17_train, 'CrowdHuman': crowdhuman_train} + dataset2transform_val = {'MOT17': mot17_test, 'CrowdHuman': mot17_test} + if image_set == 'train': + return dataset2transform_train + elif image_set == 'val': + return dataset2transform_val + else: + raise NotImplementedError() + + +def build(image_set, args): + root = Path(args.mot_path) + assert root.exists(), f'provided MOT path {root} does not exist' + dataset2transform = build_dataset2transform(args, image_set) + if image_set == 'train': + data_txt_path = args.data_txt_path_train + dataset = DetMOTDetection(args, data_txt_path=data_txt_path, seqs_folder=root, dataset2transform=dataset2transform) + if image_set == 'val': + data_txt_path = args.data_txt_path_val + dataset = DetMOTDetection(args, data_txt_path=data_txt_path, seqs_folder=root, dataset2transform=dataset2transform) + return dataset + diff --git a/tracking/docker-build-context/byte_track/tutorials/motr/mot_online/basetrack.py b/tracking/docker-build-context/byte_track/tutorials/motr/mot_online/basetrack.py new file mode 100644 index 0000000000000000000000000000000000000000..4fe2233607f6d4ed28b11a0ae6c0303c8ca19098 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/motr/mot_online/basetrack.py @@ -0,0 +1,52 @@ +import numpy as np +from collections import OrderedDict + + +class TrackState(object): + New = 0 + Tracked = 1 + Lost = 2 + Removed = 3 + + +class BaseTrack(object): + _count = 0 + + track_id = 0 + is_activated = False + state = TrackState.New + + history = OrderedDict() + features = [] + curr_feature = None + score = 0 + start_frame = 0 + frame_id = 0 + time_since_update = 0 + + # multi-camera + location = (np.inf, np.inf) + + @property + def end_frame(self): + return self.frame_id + + @staticmethod + def next_id(): + BaseTrack._count += 1 + return BaseTrack._count + + def activate(self, *args): + raise NotImplementedError + + def predict(self): + raise NotImplementedError + + def update(self, *args, **kwargs): + raise NotImplementedError + + def mark_lost(self): + self.state = TrackState.Lost + + def mark_removed(self): + self.state = TrackState.Removed diff --git a/tracking/docker-build-context/byte_track/tutorials/motr/mot_online/kalman_filter.py b/tracking/docker-build-context/byte_track/tutorials/motr/mot_online/kalman_filter.py new file mode 100644 index 0000000000000000000000000000000000000000..82111a336d4d94bece171f2f95d9147bb7456285 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/motr/mot_online/kalman_filter.py @@ -0,0 +1,252 @@ +# vim: expandtab:ts=4:sw=4 +import numpy as np +import scipy.linalg + +""" +Table for the 0.95 quantile of the chi-square distribution with N degrees of +freedom (contains values for N=1, ..., 9). Taken from MATLAB/Octave's chi2inv +function and used as Mahalanobis gating threshold. +""" +chi2inv95 = { + 1: 3.8415, + 2: 5.9915, + 3: 7.8147, + 4: 9.4877, + 5: 11.070, + 6: 12.592, + 7: 14.067, + 8: 15.507, + 9: 16.919} + + +class KalmanFilter(object): + """ + A simple Kalman filter for tracking bounding boxes in image space. + The 8-dimensional state space + x, y, a, h, vx, vy, va, vh + contains the bounding box center position (x, y), aspect ratio a, height h, + and their respective velocities. + Object motion follows a constant velocity model. The bounding box location + (x, y, a, h) is taken as direct observation of the state space (linear + observation model). + """ + + def __init__(self): + ndim, dt = 4, 1. + + # Create Kalman filter model matrices. + self._motion_mat = np.eye(2 * ndim, 2 * ndim) + for i in range(ndim): + self._motion_mat[i, ndim + i] = dt + self._update_mat = np.eye(ndim, 2 * ndim) + + # Motion and observation uncertainty are chosen relative to the current + # state estimate. These weights control the amount of uncertainty in + # the model. This is a bit hacky. + self._std_weight_position = 1. / 20 + self._std_weight_velocity = 1. / 160 + + def initiate(self, measurement): + """Create track from unassociated measurement. + Parameters + ---------- + measurement : ndarray + Bounding box coordinates (x, y, a, h) with center position (x, y), + aspect ratio a, and height h. + Returns + ------- + (ndarray, ndarray) + Returns the mean vector (8 dimensional) and covariance matrix (8x8 + dimensional) of the new track. Unobserved velocities are initialized + to 0 mean. + """ + mean_pos = measurement + mean_vel = np.zeros_like(mean_pos) + mean = np.r_[mean_pos, mean_vel] + + std = [ + 2 * self._std_weight_position * measurement[3], + 2 * self._std_weight_position * measurement[3], + 1e-2, + 2 * self._std_weight_position * measurement[3], + 10 * self._std_weight_velocity * measurement[3], + 10 * self._std_weight_velocity * measurement[3], + 1e-5, + 10 * self._std_weight_velocity * measurement[3]] + covariance = np.diag(np.square(std)) + return mean, covariance + + def predict(self, mean, covariance): + """Run Kalman filter prediction step. + Parameters + ---------- + mean : ndarray + The 8 dimensional mean vector of the object state at the previous + time step. + covariance : ndarray + The 8x8 dimensional covariance matrix of the object state at the + previous time step. + Returns + ------- + (ndarray, ndarray) + Returns the mean vector and covariance matrix of the predicted + state. Unobserved velocities are initialized to 0 mean. + """ + std_pos = [ + self._std_weight_position * mean[3], + self._std_weight_position * mean[3], + 1e-2, + self._std_weight_position * mean[3]] + std_vel = [ + self._std_weight_velocity * mean[3], + self._std_weight_velocity * mean[3], + 1e-5, + self._std_weight_velocity * mean[3]] + motion_cov = np.diag(np.square(np.r_[std_pos, std_vel])) + + #mean = np.dot(self._motion_mat, mean) + mean = np.dot(mean, self._motion_mat.T) + covariance = np.linalg.multi_dot(( + self._motion_mat, covariance, self._motion_mat.T)) + motion_cov + + return mean, covariance + + def project(self, mean, covariance): + """Project state distribution to measurement space. + Parameters + ---------- + mean : ndarray + The state's mean vector (8 dimensional array). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + Returns + ------- + (ndarray, ndarray) + Returns the projected mean and covariance matrix of the given state + estimate. + """ + std = [ + self._std_weight_position * mean[3], + self._std_weight_position * mean[3], + 1e-1, + self._std_weight_position * mean[3]] + innovation_cov = np.diag(np.square(std)) + + mean = np.dot(self._update_mat, mean) + covariance = np.linalg.multi_dot(( + self._update_mat, covariance, self._update_mat.T)) + return mean, covariance + innovation_cov + + def multi_predict(self, mean, covariance): + """Run Kalman filter prediction step (Vectorized version). + Parameters + ---------- + mean : ndarray + The Nx8 dimensional mean matrix of the object states at the previous + time step. + covariance : ndarray + The Nx8x8 dimensional covariance matrics of the object states at the + previous time step. + Returns + ------- + (ndarray, ndarray) + Returns the mean vector and covariance matrix of the predicted + state. Unobserved velocities are initialized to 0 mean. + """ + std_pos = [ + self._std_weight_position * mean[:, 3], + self._std_weight_position * mean[:, 3], + 1e-2 * np.ones_like(mean[:, 3]), + self._std_weight_position * mean[:, 3]] + std_vel = [ + self._std_weight_velocity * mean[:, 3], + self._std_weight_velocity * mean[:, 3], + 1e-5 * np.ones_like(mean[:, 3]), + self._std_weight_velocity * mean[:, 3]] + sqr = np.square(np.r_[std_pos, std_vel]).T + + motion_cov = [] + for i in range(len(mean)): + motion_cov.append(np.diag(sqr[i])) + motion_cov = np.asarray(motion_cov) + + mean = np.dot(mean, self._motion_mat.T) + left = np.dot(self._motion_mat, covariance).transpose((1, 0, 2)) + covariance = np.dot(left, self._motion_mat.T) + motion_cov + + return mean, covariance + + def update(self, mean, covariance, measurement): + """Run Kalman filter correction step. + Parameters + ---------- + mean : ndarray + The predicted state's mean vector (8 dimensional). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + measurement : ndarray + The 4 dimensional measurement vector (x, y, a, h), where (x, y) + is the center position, a the aspect ratio, and h the height of the + bounding box. + Returns + ------- + (ndarray, ndarray) + Returns the measurement-corrected state distribution. + """ + projected_mean, projected_cov = self.project(mean, covariance) + + chol_factor, lower = scipy.linalg.cho_factor( + projected_cov, lower=True, check_finite=False) + kalman_gain = scipy.linalg.cho_solve( + (chol_factor, lower), np.dot(covariance, self._update_mat.T).T, + check_finite=False).T + innovation = measurement - projected_mean + + new_mean = mean + np.dot(innovation, kalman_gain.T) + new_covariance = covariance - np.linalg.multi_dot(( + kalman_gain, projected_cov, kalman_gain.T)) + return new_mean, new_covariance + + def gating_distance(self, mean, covariance, measurements, + only_position=False, metric='maha'): + """Compute gating distance between state distribution and measurements. + A suitable distance threshold can be obtained from `chi2inv95`. If + `only_position` is False, the chi-square distribution has 4 degrees of + freedom, otherwise 2. + Parameters + ---------- + mean : ndarray + Mean vector over the state distribution (8 dimensional). + covariance : ndarray + Covariance of the state distribution (8x8 dimensional). + measurements : ndarray + An Nx4 dimensional matrix of N measurements, each in + format (x, y, a, h) where (x, y) is the bounding box center + position, a the aspect ratio, and h the height. + only_position : Optional[bool] + If True, distance computation is done with respect to the bounding + box center position only. + Returns + ------- + ndarray + Returns an array of length N, where the i-th element contains the + squared Mahalanobis distance between (mean, covariance) and + `measurements[i]`. + """ + mean, covariance = self.project(mean, covariance) + if only_position: + mean, covariance = mean[:2], covariance[:2, :2] + measurements = measurements[:, :2] + + d = measurements - mean + if metric == 'gaussian': + return np.sum(d * d, axis=1) + elif metric == 'maha': + cholesky_factor = np.linalg.cholesky(covariance) + z = scipy.linalg.solve_triangular( + cholesky_factor, d.T, lower=True, check_finite=False, + overwrite_b=True) + squared_maha = np.sum(z * z, axis=0) + return squared_maha + else: + raise ValueError('invalid distance metric') diff --git a/tracking/docker-build-context/byte_track/tutorials/motr/mot_online/matching.py b/tracking/docker-build-context/byte_track/tutorials/motr/mot_online/matching.py new file mode 100644 index 0000000000000000000000000000000000000000..cc7abab60f86e5e84994071fc0ec0dd2f89c0377 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/motr/mot_online/matching.py @@ -0,0 +1,196 @@ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import lap +import numpy as np +import scipy +from cython_bbox import bbox_overlaps as bbox_ious +from scipy.spatial.distance import cdist + +chi2inv95 = { + 1: 3.8415, + 2: 5.9915, + 3: 7.8147, + 4: 9.4877, + 5: 11.070, + 6: 12.592, + 7: 14.067, + 8: 15.507, + 9: 16.919} + +def merge_matches(m1, m2, shape): + O,P,Q = shape + m1 = np.asarray(m1) + m2 = np.asarray(m2) + + M1 = scipy.sparse.coo_matrix((np.ones(len(m1)), (m1[:, 0], m1[:, 1])), shape=(O, P)) + M2 = scipy.sparse.coo_matrix((np.ones(len(m2)), (m2[:, 0], m2[:, 1])), shape=(P, Q)) + + mask = M1*M2 + match = mask.nonzero() + match = list(zip(match[0], match[1])) + unmatched_O = tuple(set(range(O)) - set([i for i, j in match])) + unmatched_Q = tuple(set(range(Q)) - set([j for i, j in match])) + + return match, unmatched_O, unmatched_Q + + +def _indices_to_matches(cost_matrix, indices, thresh): + matched_cost = cost_matrix[tuple(zip(*indices))] + matched_mask = (matched_cost <= thresh) + + matches = indices[matched_mask] + unmatched_a = tuple(set(range(cost_matrix.shape[0])) - set(matches[:, 0])) + unmatched_b = tuple(set(range(cost_matrix.shape[1])) - set(matches[:, 1])) + + return matches, unmatched_a, unmatched_b + + +def linear_assignment(cost_matrix, thresh): + if cost_matrix.size == 0: + return np.empty((0, 2), dtype=int), tuple(range(cost_matrix.shape[0])), tuple(range(cost_matrix.shape[1])) + matches, unmatched_a, unmatched_b = [], [], [] + cost, x, y = lap.lapjv(cost_matrix, extend_cost=True, cost_limit=thresh) + for ix, mx in enumerate(x): + if mx >= 0: + matches.append([ix, mx]) + unmatched_a = np.where(x < 0)[0] + unmatched_b = np.where(y < 0)[0] + matches = np.asarray(matches) + return matches, unmatched_a, unmatched_b + + +def ious(atlbrs, btlbrs): + """ + Compute cost based on IoU + :type atlbrs: list[tlbr] | np.ndarray + :type atlbrs: list[tlbr] | np.ndarray + :rtype ious np.ndarray + """ + ious = np.zeros((len(atlbrs), len(btlbrs)), dtype=np.float) + if ious.size == 0: + return ious + + ious = bbox_ious( + np.ascontiguousarray(atlbrs, dtype=np.float), + np.ascontiguousarray(btlbrs, dtype=np.float) + ) + + return ious + + +def iou_distance(atracks, btracks): + """ + Compute cost based on IoU + :type atracks: list[STrack] + :type btracks: list[STrack] + :rtype cost_matrix np.ndarray + """ + + if (len(atracks)>0 and isinstance(atracks[0], np.ndarray)) or (len(btracks) > 0 and isinstance(btracks[0], np.ndarray)): + atlbrs = atracks + btlbrs = btracks + else: + atlbrs = [track.tlbr for track in atracks] + btlbrs = [track.tlbr for track in btracks] + _ious = ious(atlbrs, btlbrs) + cost_matrix = 1 - _ious + + return cost_matrix + +def embedding_distance(tracks, detections, metric='cosine'): + """ + :param tracks: list[STrack] + :param detections: list[BaseTrack] + :param metric: + :return: cost_matrix np.ndarray + """ + + cost_matrix = np.zeros((len(tracks), len(detections)), dtype=np.float) + if cost_matrix.size == 0: + return cost_matrix + det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float) + #for i, track in enumerate(tracks): + #cost_matrix[i, :] = np.maximum(0.0, cdist(track.smooth_feat.reshape(1,-1), det_features, metric)) + track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float) + cost_matrix = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features + return cost_matrix + +def embedding_distance2(tracks, detections, metric='cosine'): + """ + :param tracks: list[STrack] + :param detections: list[BaseTrack] + :param metric: + :return: cost_matrix np.ndarray + """ + + cost_matrix = np.zeros((len(tracks), len(detections)), dtype=np.float) + if cost_matrix.size == 0: + return cost_matrix + det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float) + #for i, track in enumerate(tracks): + #cost_matrix[i, :] = np.maximum(0.0, cdist(track.smooth_feat.reshape(1,-1), det_features, metric)) + track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float) + cost_matrix = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features + track_features = np.asarray([track.features[0] for track in tracks], dtype=np.float) + cost_matrix2 = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features + track_features = np.asarray([track.features[len(track.features)-1] for track in tracks], dtype=np.float) + cost_matrix3 = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features + for row in range(len(cost_matrix)): + cost_matrix[row] = (cost_matrix[row]+cost_matrix2[row]+cost_matrix3[row])/3 + return cost_matrix + + +def vis_id_feature_A_distance(tracks, detections, metric='cosine'): + track_features = [] + det_features = [] + leg1 = len(tracks) + leg2 = len(detections) + cost_matrix = np.zeros((leg1, leg2), dtype=np.float) + cost_matrix_det = np.zeros((leg1, leg2), dtype=np.float) + cost_matrix_track = np.zeros((leg1, leg2), dtype=np.float) + det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float) + track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float) + if leg2 != 0: + cost_matrix_det = np.maximum(0.0, cdist(det_features, det_features, metric)) + if leg1 != 0: + cost_matrix_track = np.maximum(0.0, cdist(track_features, track_features, metric)) + if cost_matrix.size == 0: + return track_features, det_features, cost_matrix, cost_matrix_det, cost_matrix_track + cost_matrix = np.maximum(0.0, cdist(track_features, det_features, metric)) + if leg1 > 10: + leg1 = 10 + tracks = tracks[:10] + if leg2 > 10: + leg2 = 10 + detections = detections[:10] + det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float) + track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float) + return track_features, det_features, cost_matrix, cost_matrix_det, cost_matrix_track + +def gate_cost_matrix(kf, cost_matrix, tracks, detections, only_position=False): + if cost_matrix.size == 0: + return cost_matrix + gating_dim = 2 if only_position else 4 + gating_threshold = chi2inv95[gating_dim] + measurements = np.asarray([det.to_xyah() for det in detections]) + for row, track in enumerate(tracks): + gating_distance = kf.gating_distance( + track.mean, track.covariance, measurements, only_position) + cost_matrix[row, gating_distance > gating_threshold] = np.inf + return cost_matrix + + +def fuse_motion(kf, cost_matrix, tracks, detections, only_position=False, lambda_=0.98): + if cost_matrix.size == 0: + return cost_matrix + gating_dim = 2 if only_position else 4 + gating_threshold = chi2inv95[gating_dim] + measurements = np.asarray([det.to_xyah() for det in detections]) + for row, track in enumerate(tracks): + gating_distance = kf.gating_distance( + track.mean, track.covariance, measurements, only_position, metric='maha') + cost_matrix[row, gating_distance > gating_threshold] = np.inf + cost_matrix[row] = lambda_ * cost_matrix[row] + (1 - lambda_) * gating_distance + return cost_matrix diff --git a/tracking/docker-build-context/byte_track/tutorials/motr/motr.py b/tracking/docker-build-context/byte_track/tutorials/motr/motr.py new file mode 100644 index 0000000000000000000000000000000000000000..3e24b1d26318cd7d33a473198d743e9a9a69548f --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/motr/motr.py @@ -0,0 +1,676 @@ +# ------------------------------------------------------------------------ +# Copyright (c) 2021 megvii-model. All Rights Reserved. +# ------------------------------------------------------------------------ +# Modified from Deformable DETR (https://github.com/fundamentalvision/Deformable-DETR) +# Copyright (c) 2020 SenseTime. All Rights Reserved. +# ------------------------------------------------------------------------ +# Modified from DETR (https://github.com/facebookresearch/detr) +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# ------------------------------------------------------------------------ + +""" +DETR model and criterion classes. +""" +import copy +import math +import numpy as np +import torch +import torch.nn.functional as F +from torch import nn, Tensor +from typing import List + +from util import box_ops +from util.misc import (NestedTensor, nested_tensor_from_tensor_list, + accuracy, get_world_size, interpolate, get_rank, + is_dist_avail_and_initialized, inverse_sigmoid) + +from models.structures import Instances, Boxes, pairwise_iou, matched_boxlist_iou + +from .backbone import build_backbone +from .matcher import build_matcher +from .deformable_transformer_plus import build_deforamble_transformer +from .qim import build as build_query_interaction_layer +from .memory_bank import build_memory_bank +from .deformable_detr import SetCriterion, MLP +from .segmentation import sigmoid_focal_loss + + +class ClipMatcher(SetCriterion): + def __init__(self, num_classes, + matcher, + weight_dict, + losses): + """ Create the criterion. + Parameters: + num_classes: number of object categories, omitting the special no-object category + matcher: module able to compute a matching between targets and proposals + weight_dict: dict containing as key the names of the losses and as values their relative weight. + eos_coef: relative classification weight applied to the no-object category + losses: list of all the losses to be applied. See get_loss for list of available losses. + """ + super().__init__(num_classes, matcher, weight_dict, losses) + self.num_classes = num_classes + self.matcher = matcher + self.weight_dict = weight_dict + self.losses = losses + self.focal_loss = True + self.losses_dict = {} + self._current_frame_idx = 0 + + def initialize_for_single_clip(self, gt_instances: List[Instances]): + self.gt_instances = gt_instances + self.num_samples = 0 + self.sample_device = None + self._current_frame_idx = 0 + self.losses_dict = {} + + def _step(self): + self._current_frame_idx += 1 + + def calc_loss_for_track_scores(self, track_instances: Instances): + frame_id = self._current_frame_idx - 1 + gt_instances = self.gt_instances[frame_id] + outputs = { + 'pred_logits': track_instances.track_scores[None], + } + device = track_instances.track_scores.device + + num_tracks = len(track_instances) + src_idx = torch.arange(num_tracks, dtype=torch.long, device=device) + tgt_idx = track_instances.matched_gt_idxes # -1 for FP tracks and disappeared tracks + + track_losses = self.get_loss('labels', + outputs=outputs, + gt_instances=[gt_instances], + indices=[(src_idx, tgt_idx)], + num_boxes=1) + self.losses_dict.update( + {'frame_{}_track_{}'.format(frame_id, key): value for key, value in + track_losses.items()}) + + def get_num_boxes(self, num_samples): + num_boxes = torch.as_tensor(num_samples, dtype=torch.float, device=self.sample_device) + if is_dist_avail_and_initialized(): + torch.distributed.all_reduce(num_boxes) + num_boxes = torch.clamp(num_boxes / get_world_size(), min=1).item() + return num_boxes + + def get_loss(self, loss, outputs, gt_instances, indices, num_boxes, **kwargs): + loss_map = { + 'labels': self.loss_labels, + 'cardinality': self.loss_cardinality, + 'boxes': self.loss_boxes, + } + assert loss in loss_map, f'do you really want to compute {loss} loss?' + return loss_map[loss](outputs, gt_instances, indices, num_boxes, **kwargs) + + def loss_boxes(self, outputs, gt_instances: List[Instances], indices: List[tuple], num_boxes): + """Compute the losses related to the bounding boxes, the L1 regression loss and the GIoU loss + targets dicts must contain the key "boxes" containing a tensor of dim [nb_target_boxes, 4] + The target boxes are expected in format (center_x, center_y, h, w), normalized by the image size. + """ + # We ignore the regression loss of the track-disappear slots. + #TODO: Make this filter process more elegant. + filtered_idx = [] + for src_per_img, tgt_per_img in indices: + keep = tgt_per_img != -1 + filtered_idx.append((src_per_img[keep], tgt_per_img[keep])) + indices = filtered_idx + idx = self._get_src_permutation_idx(indices) + src_boxes = outputs['pred_boxes'][idx] + target_boxes = torch.cat([gt_per_img.boxes[i] for gt_per_img, (_, i) in zip(gt_instances, indices)], dim=0) + + # for pad target, don't calculate regression loss, judged by whether obj_id=-1 + target_obj_ids = torch.cat([gt_per_img.obj_ids[i] for gt_per_img, (_, i) in zip(gt_instances, indices)], dim=0) # size(16) + mask = (target_obj_ids != -1) + + loss_bbox = F.l1_loss(src_boxes[mask], target_boxes[mask], reduction='none') + loss_giou = 1 - torch.diag(box_ops.generalized_box_iou( + box_ops.box_cxcywh_to_xyxy(src_boxes[mask]), + box_ops.box_cxcywh_to_xyxy(target_boxes[mask]))) + + losses = {} + losses['loss_bbox'] = loss_bbox.sum() / num_boxes + losses['loss_giou'] = loss_giou.sum() / num_boxes + + return losses + + def loss_labels(self, outputs, gt_instances: List[Instances], indices, num_boxes, log=False): + """Classification loss (NLL) + targets dicts must contain the key "labels" containing a tensor of dim [nb_target_boxes] + """ + src_logits = outputs['pred_logits'] + idx = self._get_src_permutation_idx(indices) + target_classes = torch.full(src_logits.shape[:2], self.num_classes, + dtype=torch.int64, device=src_logits.device) + # The matched gt for disappear track query is set -1. + labels = [] + for gt_per_img, (_, J) in zip(gt_instances, indices): + labels_per_img = torch.ones_like(J) + # set labels of track-appear slots to 0. + if len(gt_per_img) > 0: + labels_per_img[J != -1] = gt_per_img.labels[J[J != -1]] + labels.append(labels_per_img) + target_classes_o = torch.cat(labels) + target_classes[idx] = target_classes_o + if self.focal_loss: + gt_labels_target = F.one_hot(target_classes, num_classes=self.num_classes + 1)[:, :, :-1] # no loss for the last (background) class + gt_labels_target = gt_labels_target.to(src_logits) + loss_ce = sigmoid_focal_loss(src_logits.flatten(1), + gt_labels_target.flatten(1), + alpha=0.25, + gamma=2, + num_boxes=num_boxes, mean_in_dim1=False) + loss_ce = loss_ce.sum() + else: + loss_ce = F.cross_entropy(src_logits.transpose(1, 2), target_classes, self.empty_weight) + losses = {'loss_ce': loss_ce} + + if log: + # TODO this should probably be a separate loss, not hacked in this one here + losses['class_error'] = 100 - accuracy(src_logits[idx], target_classes_o)[0] + + return losses + + def match_for_single_frame(self, outputs: dict): + outputs_without_aux = {k: v for k, v in outputs.items() if k != 'aux_outputs'} + + gt_instances_i = self.gt_instances[self._current_frame_idx] # gt instances of i-th image. + track_instances: Instances = outputs_without_aux['track_instances'] + pred_logits_i = track_instances.pred_logits # predicted logits of i-th image. + pred_boxes_i = track_instances.pred_boxes # predicted boxes of i-th image. + + obj_idxes = gt_instances_i.obj_ids + obj_idxes_list = obj_idxes.detach().cpu().numpy().tolist() + obj_idx_to_gt_idx = {obj_idx: gt_idx for gt_idx, obj_idx in enumerate(obj_idxes_list)} + outputs_i = { + 'pred_logits': pred_logits_i.unsqueeze(0), + 'pred_boxes': pred_boxes_i.unsqueeze(0), + } + + # step1. inherit and update the previous tracks. + num_disappear_track = 0 + for j in range(len(track_instances)): + obj_id = track_instances.obj_idxes[j].item() + # set new target idx. + if obj_id >= 0: + if obj_id in obj_idx_to_gt_idx: + track_instances.matched_gt_idxes[j] = obj_idx_to_gt_idx[obj_id] + else: + num_disappear_track += 1 + track_instances.matched_gt_idxes[j] = -1 # track-disappear case. + else: + track_instances.matched_gt_idxes[j] = -1 + + full_track_idxes = torch.arange(len(track_instances), dtype=torch.long).to(pred_logits_i.device) + matched_track_idxes = (track_instances.obj_idxes >= 0) # occu + prev_matched_indices = torch.stack( + [full_track_idxes[matched_track_idxes], track_instances.matched_gt_idxes[matched_track_idxes]], dim=1).to( + pred_logits_i.device) + + # step2. select the unmatched slots. + # note that the FP tracks whose obj_idxes are -2 will not be selected here. + unmatched_track_idxes = full_track_idxes[track_instances.obj_idxes == -1] + + # step3. select the untracked gt instances (new tracks). + tgt_indexes = track_instances.matched_gt_idxes + tgt_indexes = tgt_indexes[tgt_indexes != -1] + + tgt_state = torch.zeros(len(gt_instances_i)).to(pred_logits_i.device) + tgt_state[tgt_indexes] = 1 + untracked_tgt_indexes = torch.arange(len(gt_instances_i)).to(pred_logits_i.device)[tgt_state == 0] + # untracked_tgt_indexes = select_unmatched_indexes(tgt_indexes, len(gt_instances_i)) + untracked_gt_instances = gt_instances_i[untracked_tgt_indexes] + + def match_for_single_decoder_layer(unmatched_outputs, matcher): + new_track_indices = matcher(unmatched_outputs, + [untracked_gt_instances]) # list[tuple(src_idx, tgt_idx)] + + src_idx = new_track_indices[0][0] + tgt_idx = new_track_indices[0][1] + # concat src and tgt. + new_matched_indices = torch.stack([unmatched_track_idxes[src_idx], untracked_tgt_indexes[tgt_idx]], + dim=1).to(pred_logits_i.device) + return new_matched_indices + + # step4. do matching between the unmatched slots and GTs. + unmatched_outputs = { + 'pred_logits': track_instances.pred_logits[unmatched_track_idxes].unsqueeze(0), + 'pred_boxes': track_instances.pred_boxes[unmatched_track_idxes].unsqueeze(0), + } + new_matched_indices = match_for_single_decoder_layer(unmatched_outputs, self.matcher) + + # step5. update obj_idxes according to the new matching result. + track_instances.obj_idxes[new_matched_indices[:, 0]] = gt_instances_i.obj_ids[new_matched_indices[:, 1]].long() + track_instances.matched_gt_idxes[new_matched_indices[:, 0]] = new_matched_indices[:, 1] + + # step6. calculate iou. + active_idxes = (track_instances.obj_idxes >= 0) & (track_instances.matched_gt_idxes >= 0) + active_track_boxes = track_instances.pred_boxes[active_idxes] + if len(active_track_boxes) > 0: + gt_boxes = gt_instances_i.boxes[track_instances.matched_gt_idxes[active_idxes]] + active_track_boxes = box_ops.box_cxcywh_to_xyxy(active_track_boxes) + gt_boxes = box_ops.box_cxcywh_to_xyxy(gt_boxes) + track_instances.iou[active_idxes] = matched_boxlist_iou(Boxes(active_track_boxes), Boxes(gt_boxes)) + + # step7. merge the unmatched pairs and the matched pairs. + matched_indices = torch.cat([new_matched_indices, prev_matched_indices], dim=0) + + # step8. calculate losses. + self.num_samples += len(gt_instances_i) + num_disappear_track + self.sample_device = pred_logits_i.device + for loss in self.losses: + new_track_loss = self.get_loss(loss, + outputs=outputs_i, + gt_instances=[gt_instances_i], + indices=[(matched_indices[:, 0], matched_indices[:, 1])], + num_boxes=1) + self.losses_dict.update( + {'frame_{}_{}'.format(self._current_frame_idx, key): value for key, value in new_track_loss.items()}) + + if 'aux_outputs' in outputs: + for i, aux_outputs in enumerate(outputs['aux_outputs']): + unmatched_outputs_layer = { + 'pred_logits': aux_outputs['pred_logits'][0, unmatched_track_idxes].unsqueeze(0), + 'pred_boxes': aux_outputs['pred_boxes'][0, unmatched_track_idxes].unsqueeze(0), + } + new_matched_indices_layer = match_for_single_decoder_layer(unmatched_outputs_layer, self.matcher) + matched_indices_layer = torch.cat([new_matched_indices_layer, prev_matched_indices], dim=0) + for loss in self.losses: + if loss == 'masks': + # Intermediate masks losses are too costly to compute, we ignore them. + continue + l_dict = self.get_loss(loss, + aux_outputs, + gt_instances=[gt_instances_i], + indices=[(matched_indices_layer[:, 0], matched_indices_layer[:, 1])], + num_boxes=1, ) + self.losses_dict.update( + {'frame_{}_aux{}_{}'.format(self._current_frame_idx, i, key): value for key, value in + l_dict.items()}) + self._step() + return track_instances + + def forward(self, outputs, input_data: dict): + # losses of each frame are calculated during the model's forwarding and are outputted by the model as outputs['losses_dict]. + losses = outputs.pop("losses_dict") + num_samples = self.get_num_boxes(self.num_samples) + for loss_name, loss in losses.items(): + losses[loss_name] /= num_samples + return losses + + +class RuntimeTrackerBase(object): + def __init__(self, score_thresh=0.8, filter_score_thresh=0.6, miss_tolerance=5): + self.score_thresh = score_thresh + self.filter_score_thresh = filter_score_thresh + self.miss_tolerance = miss_tolerance + self.max_obj_id = 0 + + def clear(self): + self.max_obj_id = 0 + + def update(self, track_instances: Instances): + track_instances.disappear_time[track_instances.scores >= self.score_thresh] = 0 + for i in range(len(track_instances)): + if track_instances.obj_idxes[i] == -1 and track_instances.scores[i] >= self.score_thresh: + # print("track {} has score {}, assign obj_id {}".format(i, track_instances.scores[i], self.max_obj_id)) + track_instances.obj_idxes[i] = self.max_obj_id + self.max_obj_id += 1 + elif track_instances.obj_idxes[i] >= 0 and track_instances.scores[i] < self.filter_score_thresh: + track_instances.disappear_time[i] += 1 + if track_instances.disappear_time[i] >= self.miss_tolerance: + # Set the obj_id to -1. + # Then this track will be removed by TrackEmbeddingLayer. + track_instances.obj_idxes[i] = -1 + + +class TrackerPostProcess(nn.Module): + """ This module converts the model's output into the format expected by the coco api""" + def __init__(self): + super().__init__() + + @torch.no_grad() + def forward(self, track_instances: Instances, target_size) -> Instances: + """ Perform the computation + Parameters: + outputs: raw outputs of the model + target_sizes: tensor of dimension [batch_size x 2] containing the size of each images of the batch + For evaluation, this must be the original image size (before any data augmentation) + For visualization, this should be the image size after data augment, but before padding + """ + out_logits = track_instances.pred_logits + out_bbox = track_instances.pred_boxes + + prob = out_logits.sigmoid() + # prob = out_logits[...,:1].sigmoid() + scores, labels = prob.max(-1) + + # convert to [x0, y0, x1, y1] format + boxes = box_ops.box_cxcywh_to_xyxy(out_bbox) + # and from relative [0, 1] to absolute [0, height] coordinates + img_h, img_w = target_size + scale_fct = torch.Tensor([img_w, img_h, img_w, img_h]).to(boxes) + boxes = boxes * scale_fct[None, :] + + track_instances.boxes = boxes + track_instances.scores = scores + track_instances.labels = labels +# track_instances.remove('pred_logits') +# track_instances.remove('pred_boxes') + return track_instances + + +def _get_clones(module, N): + return nn.ModuleList([copy.deepcopy(module) for i in range(N)]) + + +class MOTR(nn.Module): + def __init__(self, backbone, transformer, num_classes, num_queries, num_feature_levels, criterion, track_embed, + aux_loss=True, with_box_refine=False, two_stage=False, memory_bank=None): + """ Initializes the model. + Parameters: + backbone: torch module of the backbone to be used. See backbone.py + transformer: torch module of the transformer architecture. See transformer.py + num_classes: number of object classes + num_queries: number of object queries, ie detection slot. This is the maximal number of objects + DETR can detect in a single image. For COCO, we recommend 100 queries. + aux_loss: True if auxiliary decoding losses (loss at each decoder layer) are to be used. + with_box_refine: iterative bounding box refinement + two_stage: two-stage Deformable DETR + """ + super().__init__() + self.num_queries = num_queries + self.track_embed = track_embed + self.transformer = transformer + hidden_dim = transformer.d_model + self.num_classes = num_classes + self.class_embed = nn.Linear(hidden_dim, num_classes) + self.bbox_embed = MLP(hidden_dim, hidden_dim, 4, 3) + self.num_feature_levels = num_feature_levels + if not two_stage: + self.query_embed = nn.Embedding(num_queries, hidden_dim * 2) + if num_feature_levels > 1: + num_backbone_outs = len(backbone.strides) + input_proj_list = [] + for _ in range(num_backbone_outs): + in_channels = backbone.num_channels[_] + input_proj_list.append(nn.Sequential( + nn.Conv2d(in_channels, hidden_dim, kernel_size=1), + nn.GroupNorm(32, hidden_dim), + )) + for _ in range(num_feature_levels - num_backbone_outs): + input_proj_list.append(nn.Sequential( + nn.Conv2d(in_channels, hidden_dim, kernel_size=3, stride=2, padding=1), + nn.GroupNorm(32, hidden_dim), + )) + in_channels = hidden_dim + self.input_proj = nn.ModuleList(input_proj_list) + else: + self.input_proj = nn.ModuleList([ + nn.Sequential( + nn.Conv2d(backbone.num_channels[0], hidden_dim, kernel_size=1), + nn.GroupNorm(32, hidden_dim), + )]) + self.backbone = backbone + self.aux_loss = aux_loss + self.with_box_refine = with_box_refine + self.two_stage = two_stage + + prior_prob = 0.01 + bias_value = -math.log((1 - prior_prob) / prior_prob) + self.class_embed.bias.data = torch.ones(num_classes) * bias_value + nn.init.constant_(self.bbox_embed.layers[-1].weight.data, 0) + nn.init.constant_(self.bbox_embed.layers[-1].bias.data, 0) + for proj in self.input_proj: + nn.init.xavier_uniform_(proj[0].weight, gain=1) + nn.init.constant_(proj[0].bias, 0) + + # if two-stage, the last class_embed and bbox_embed is for region proposal generation + num_pred = (transformer.decoder.num_layers + 1) if two_stage else transformer.decoder.num_layers + if with_box_refine: + self.class_embed = _get_clones(self.class_embed, num_pred) + self.bbox_embed = _get_clones(self.bbox_embed, num_pred) + nn.init.constant_(self.bbox_embed[0].layers[-1].bias.data[2:], -2.0) + # hack implementation for iterative bounding box refinement + self.transformer.decoder.bbox_embed = self.bbox_embed + else: + nn.init.constant_(self.bbox_embed.layers[-1].bias.data[2:], -2.0) + self.class_embed = nn.ModuleList([self.class_embed for _ in range(num_pred)]) + self.bbox_embed = nn.ModuleList([self.bbox_embed for _ in range(num_pred)]) + self.transformer.decoder.bbox_embed = None + if two_stage: + # hack implementation for two-stage + self.transformer.decoder.class_embed = self.class_embed + for box_embed in self.bbox_embed: + nn.init.constant_(box_embed.layers[-1].bias.data[2:], 0.0) + self.post_process = TrackerPostProcess() + self.track_base = RuntimeTrackerBase() + self.criterion = criterion + self.memory_bank = memory_bank + self.mem_bank_len = 0 if memory_bank is None else memory_bank.max_his_length + + def _generate_empty_tracks(self): + track_instances = Instances((1, 1)) + num_queries, dim = self.query_embed.weight.shape # (300, 512) + device = self.query_embed.weight.device + track_instances.ref_pts = self.transformer.reference_points(self.query_embed.weight[:, :dim // 2]) + track_instances.query_pos = self.query_embed.weight + track_instances.output_embedding = torch.zeros((num_queries, dim >> 1), device=device) + track_instances.obj_idxes = torch.full((len(track_instances),), -1, dtype=torch.long, device=device) + track_instances.matched_gt_idxes = torch.full((len(track_instances),), -1, dtype=torch.long, device=device) + track_instances.disappear_time = torch.zeros((len(track_instances), ), dtype=torch.long, device=device) + track_instances.iou = torch.zeros((len(track_instances),), dtype=torch.float, device=device) + track_instances.scores = torch.zeros((len(track_instances),), dtype=torch.float, device=device) + track_instances.track_scores = torch.zeros((len(track_instances),), dtype=torch.float, device=device) + track_instances.pred_boxes = torch.zeros((len(track_instances), 4), dtype=torch.float, device=device) + track_instances.pred_logits = torch.zeros((len(track_instances), self.num_classes), dtype=torch.float, device=device) + + mem_bank_len = self.mem_bank_len + track_instances.mem_bank = torch.zeros((len(track_instances), mem_bank_len, dim // 2), dtype=torch.float32, device=device) + track_instances.mem_padding_mask = torch.ones((len(track_instances), mem_bank_len), dtype=torch.bool, device=device) + track_instances.save_period = torch.zeros((len(track_instances), ), dtype=torch.float32, device=device) + + return track_instances.to(self.query_embed.weight.device) + + def clear(self): + self.track_base.clear() + + @torch.jit.unused + def _set_aux_loss(self, outputs_class, outputs_coord): + # this is a workaround to make torchscript happy, as torchscript + # doesn't support dictionary with non-homogeneous values, such + # as a dict having both a Tensor and a list. + return [{'pred_logits': a, 'pred_boxes': b, } + for a, b in zip(outputs_class[:-1], outputs_coord[:-1])] + + def _forward_single_image(self, samples, track_instances: Instances): + features, pos = self.backbone(samples) + src, mask = features[-1].decompose() + assert mask is not None + + srcs = [] + masks = [] + for l, feat in enumerate(features): + src, mask = feat.decompose() + srcs.append(self.input_proj[l](src)) + masks.append(mask) + assert mask is not None + + if self.num_feature_levels > len(srcs): + _len_srcs = len(srcs) + for l in range(_len_srcs, self.num_feature_levels): + if l == _len_srcs: + src = self.input_proj[l](features[-1].tensors) + else: + src = self.input_proj[l](srcs[-1]) + m = samples.mask + mask = F.interpolate(m[None].float(), size=src.shape[-2:]).to(torch.bool)[0] + pos_l = self.backbone[1](NestedTensor(src, mask)).to(src.dtype) + srcs.append(src) + masks.append(mask) + pos.append(pos_l) + + hs, init_reference, inter_references, enc_outputs_class, enc_outputs_coord_unact = self.transformer(srcs, masks, pos, track_instances.query_pos, ref_pts=track_instances.ref_pts) + + outputs_classes = [] + outputs_coords = [] + for lvl in range(hs.shape[0]): + if lvl == 0: + reference = init_reference + else: + reference = inter_references[lvl - 1] + reference = inverse_sigmoid(reference) + outputs_class = self.class_embed[lvl](hs[lvl]) + tmp = self.bbox_embed[lvl](hs[lvl]) + if reference.shape[-1] == 4: + tmp += reference + else: + assert reference.shape[-1] == 2 + tmp[..., :2] += reference + outputs_coord = tmp.sigmoid() + outputs_classes.append(outputs_class) + outputs_coords.append(outputs_coord) + outputs_class = torch.stack(outputs_classes) + outputs_coord = torch.stack(outputs_coords) + + ref_pts_all = torch.cat([init_reference[None], inter_references[:, :, :, :2]], dim=0) + out = {'pred_logits': outputs_class[-1], 'pred_boxes': outputs_coord[-1], 'ref_pts': ref_pts_all[5]} + if self.aux_loss: + out['aux_outputs'] = self._set_aux_loss(outputs_class, outputs_coord) + + with torch.no_grad(): + if self.training: + track_scores = outputs_class[-1, 0, :].sigmoid().max(dim=-1).values + else: + track_scores = outputs_class[-1, 0, :, 0].sigmoid() + + track_instances.scores = track_scores + track_instances.pred_logits = outputs_class[-1, 0] + track_instances.pred_boxes = outputs_coord[-1, 0] + track_instances.output_embedding = hs[-1, 0] + if self.training: + # the track id will be assigned by the mather. + out['track_instances'] = track_instances + track_instances = self.criterion.match_for_single_frame(out) + else: + # each track will be assigned an unique global id by the track base. + self.track_base.update(track_instances) + if self.memory_bank is not None: + track_instances = self.memory_bank(track_instances) + # track_instances.track_scores = track_instances.track_scores[..., 0] + # track_instances.scores = track_instances.track_scores.sigmoid() + if self.training: + self.criterion.calc_loss_for_track_scores(track_instances) + tmp = {} + tmp['init_track_instances'] = self._generate_empty_tracks() + tmp['track_instances'] = track_instances + out_track_instances = self.track_embed(tmp) + out['track_instances'] = out_track_instances + return out + + @torch.no_grad() + def inference_single_image(self, img, ori_img_size, track_instances=None): + if not isinstance(img, NestedTensor): + img = nested_tensor_from_tensor_list(img) + if track_instances is None: + track_instances = self._generate_empty_tracks() + + res = self._forward_single_image(img, track_instances=track_instances) + + track_instances = res['track_instances'] + track_instances = self.post_process(track_instances, ori_img_size) + ret = {'track_instances': track_instances} + if 'ref_pts' in res: + ref_pts = res['ref_pts'] + img_h, img_w = ori_img_size + scale_fct = torch.Tensor([img_w, img_h]).to(ref_pts) + ref_pts = ref_pts * scale_fct[None] + ret['ref_pts'] = ref_pts + return ret + + def forward(self, data: dict): + if self.training: + self.criterion.initialize_for_single_clip(data['gt_instances']) + frames = data['imgs'] # list of Tensor. + outputs = { + 'pred_logits': [], + 'pred_boxes': [], + } + + track_instances = self._generate_empty_tracks() + for frame in frames: + if not isinstance(frame, NestedTensor): + frame = nested_tensor_from_tensor_list([frame]) + frame_res = self._forward_single_image(frame, track_instances) + track_instances = frame_res['track_instances'] + outputs['pred_logits'].append(frame_res['pred_logits']) + outputs['pred_boxes'].append(frame_res['pred_boxes']) + + if not self.training: + outputs['track_instances'] = track_instances + else: + outputs['losses_dict'] = self.criterion.losses_dict + return outputs + + +def build(args): + dataset_to_num_classes = { + 'coco': 91, + 'coco_panoptic': 250, + 'e2e_mot': 1, + 'e2e_joint': 1, + 'e2e_static_mot': 1 + } + assert args.dataset_file in dataset_to_num_classes + num_classes = dataset_to_num_classes[args.dataset_file] + device = torch.device(args.device) + + backbone = build_backbone(args) + + transformer = build_deforamble_transformer(args) + d_model = transformer.d_model + hidden_dim = args.dim_feedforward + query_interaction_layer = build_query_interaction_layer(args, args.query_interaction_layer, d_model, hidden_dim, d_model*2) + + img_matcher = build_matcher(args) + num_frames_per_batch = max(args.sampler_lengths) + weight_dict = {} + for i in range(num_frames_per_batch): + weight_dict.update({"frame_{}_loss_ce".format(i): args.cls_loss_coef, + 'frame_{}_loss_bbox'.format(i): args.bbox_loss_coef, + 'frame_{}_loss_giou'.format(i): args.giou_loss_coef, + }) + + # TODO this is a hack + if args.aux_loss: + for i in range(num_frames_per_batch): + for j in range(args.dec_layers - 1): + weight_dict.update({"frame_{}_aux{}_loss_ce".format(i, j): args.cls_loss_coef, + 'frame_{}_aux{}_loss_bbox'.format(i, j): args.bbox_loss_coef, + 'frame_{}_aux{}_loss_giou'.format(i, j): args.giou_loss_coef, + }) + if args.memory_bank_type is not None and len(args.memory_bank_type) > 0: + memory_bank = build_memory_bank(args, d_model, hidden_dim, d_model * 2) + for i in range(num_frames_per_batch): + weight_dict.update({"frame_{}_track_loss_ce".format(i): args.cls_loss_coef}) + else: + memory_bank = None + losses = ['labels', 'boxes'] + criterion = ClipMatcher(num_classes, matcher=img_matcher, weight_dict=weight_dict, losses=losses) + criterion.to(device) + postprocessors = {} + model = MOTR( + backbone, + transformer, + track_embed=query_interaction_layer, + num_feature_levels=args.num_feature_levels, + num_classes=num_classes, + num_queries=args.num_queries, + aux_loss=args.aux_loss, + criterion=criterion, + with_box_refine=args.with_box_refine, + two_stage=args.two_stage, + memory_bank=memory_bank, + ) + return model, criterion, postprocessors diff --git a/tracking/docker-build-context/byte_track/tutorials/motr/motr_det.py b/tracking/docker-build-context/byte_track/tutorials/motr/motr_det.py new file mode 100644 index 0000000000000000000000000000000000000000..b9f74fdf8520385a79653a557631fa4a9ac1b9fc --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/motr/motr_det.py @@ -0,0 +1,677 @@ +# ------------------------------------------------------------------------ +# Copyright (c) 2021 megvii-model. All Rights Reserved. +# ------------------------------------------------------------------------ +# Modified from Deformable DETR (https://github.com/fundamentalvision/Deformable-DETR) +# Copyright (c) 2020 SenseTime. All Rights Reserved. +# ------------------------------------------------------------------------ +# Modified from DETR (https://github.com/facebookresearch/detr) +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# ------------------------------------------------------------------------ + +""" +DETR model and criterion classes. +""" +import copy +import math +import numpy as np +import torch +import torch.nn.functional as F +from torch import nn, Tensor +from typing import List + +from util import box_ops +from util.misc import (NestedTensor, nested_tensor_from_tensor_list, + accuracy, get_world_size, interpolate, get_rank, + is_dist_avail_and_initialized, inverse_sigmoid) + +from models.structures import Instances, Boxes, pairwise_iou, matched_boxlist_iou + +from .backbone import build_backbone +from .matcher import build_matcher +from .deformable_transformer_plus import build_deforamble_transformer +from .qim import build as build_query_interaction_layer +from .memory_bank import build_memory_bank +from .deformable_detr import SetCriterion, MLP +from .segmentation import sigmoid_focal_loss + + +class ClipMatcher(SetCriterion): + def __init__(self, num_classes, + matcher, + weight_dict, + losses): + """ Create the criterion. + Parameters: + num_classes: number of object categories, omitting the special no-object category + matcher: module able to compute a matching between targets and proposals + weight_dict: dict containing as key the names of the losses and as values their relative weight. + eos_coef: relative classification weight applied to the no-object category + losses: list of all the losses to be applied. See get_loss for list of available losses. + """ + super().__init__(num_classes, matcher, weight_dict, losses) + self.num_classes = num_classes + self.matcher = matcher + self.weight_dict = weight_dict + self.losses = losses + self.focal_loss = True + self.losses_dict = {} + self._current_frame_idx = 0 + + def initialize_for_single_clip(self, gt_instances: List[Instances]): + self.gt_instances = gt_instances + self.num_samples = 0 + self.sample_device = None + self._current_frame_idx = 0 + self.losses_dict = {} + + def _step(self): + self._current_frame_idx += 1 + + def calc_loss_for_track_scores(self, track_instances: Instances): + frame_id = self._current_frame_idx - 1 + gt_instances = self.gt_instances[frame_id] + outputs = { + 'pred_logits': track_instances.track_scores[None], + } + device = track_instances.track_scores.device + + num_tracks = len(track_instances) + src_idx = torch.arange(num_tracks, dtype=torch.long, device=device) + tgt_idx = track_instances.matched_gt_idxes # -1 for FP tracks and disappeared tracks + + track_losses = self.get_loss('labels', + outputs=outputs, + gt_instances=[gt_instances], + indices=[(src_idx, tgt_idx)], + num_boxes=1) + self.losses_dict.update( + {'frame_{}_track_{}'.format(frame_id, key): value for key, value in + track_losses.items()}) + + def get_num_boxes(self, num_samples): + num_boxes = torch.as_tensor(num_samples, dtype=torch.float, device=self.sample_device) + if is_dist_avail_and_initialized(): + torch.distributed.all_reduce(num_boxes) + num_boxes = torch.clamp(num_boxes / get_world_size(), min=1).item() + return num_boxes + + def get_loss(self, loss, outputs, gt_instances, indices, num_boxes, **kwargs): + loss_map = { + 'labels': self.loss_labels, + 'cardinality': self.loss_cardinality, + 'boxes': self.loss_boxes, + } + assert loss in loss_map, f'do you really want to compute {loss} loss?' + return loss_map[loss](outputs, gt_instances, indices, num_boxes, **kwargs) + + def loss_boxes(self, outputs, gt_instances: List[Instances], indices: List[tuple], num_boxes): + """Compute the losses related to the bounding boxes, the L1 regression loss and the GIoU loss + targets dicts must contain the key "boxes" containing a tensor of dim [nb_target_boxes, 4] + The target boxes are expected in format (center_x, center_y, h, w), normalized by the image size. + """ + # We ignore the regression loss of the track-disappear slots. + #TODO: Make this filter process more elegant. + filtered_idx = [] + for src_per_img, tgt_per_img in indices: + keep = tgt_per_img != -1 + filtered_idx.append((src_per_img[keep], tgt_per_img[keep])) + indices = filtered_idx + idx = self._get_src_permutation_idx(indices) + src_boxes = outputs['pred_boxes'][idx] + target_boxes = torch.cat([gt_per_img.boxes[i] for gt_per_img, (_, i) in zip(gt_instances, indices)], dim=0) + + # for pad target, don't calculate regression loss, judged by whether obj_id=-1 + target_obj_ids = torch.cat([gt_per_img.obj_ids[i] for gt_per_img, (_, i) in zip(gt_instances, indices)], dim=0) # size(16) + mask = (target_obj_ids != -1) + + loss_bbox = F.l1_loss(src_boxes[mask], target_boxes[mask], reduction='none') + loss_giou = 1 - torch.diag(box_ops.generalized_box_iou( + box_ops.box_cxcywh_to_xyxy(src_boxes[mask]), + box_ops.box_cxcywh_to_xyxy(target_boxes[mask]))) + + losses = {} + losses['loss_bbox'] = loss_bbox.sum() / num_boxes + losses['loss_giou'] = loss_giou.sum() / num_boxes + + return losses + + def loss_labels(self, outputs, gt_instances: List[Instances], indices, num_boxes, log=False): + """Classification loss (NLL) + targets dicts must contain the key "labels" containing a tensor of dim [nb_target_boxes] + """ + src_logits = outputs['pred_logits'] + idx = self._get_src_permutation_idx(indices) + target_classes = torch.full(src_logits.shape[:2], self.num_classes, + dtype=torch.int64, device=src_logits.device) + # The matched gt for disappear track query is set -1. + labels = [] + for gt_per_img, (_, J) in zip(gt_instances, indices): + labels_per_img = torch.ones_like(J) + # set labels of track-appear slots to 0. + if len(gt_per_img) > 0: + labels_per_img[J != -1] = gt_per_img.labels[J[J != -1]] + labels.append(labels_per_img) + target_classes_o = torch.cat(labels) + target_classes[idx] = target_classes_o + if self.focal_loss: + gt_labels_target = F.one_hot(target_classes, num_classes=self.num_classes + 1)[:, :, :-1] # no loss for the last (background) class + gt_labels_target = gt_labels_target.to(src_logits) + loss_ce = sigmoid_focal_loss(src_logits.flatten(1), + gt_labels_target.flatten(1), + alpha=0.25, + gamma=2, + num_boxes=num_boxes, mean_in_dim1=False) + loss_ce = loss_ce.sum() + else: + loss_ce = F.cross_entropy(src_logits.transpose(1, 2), target_classes, self.empty_weight) + losses = {'loss_ce': loss_ce} + + if log: + # TODO this should probably be a separate loss, not hacked in this one here + losses['class_error'] = 100 - accuracy(src_logits[idx], target_classes_o)[0] + + return losses + + def match_for_single_frame(self, outputs: dict): + outputs_without_aux = {k: v for k, v in outputs.items() if k != 'aux_outputs'} + + gt_instances_i = self.gt_instances[self._current_frame_idx] # gt instances of i-th image. + track_instances: Instances = outputs_without_aux['track_instances'] + pred_logits_i = track_instances.pred_logits # predicted logits of i-th image. + pred_boxes_i = track_instances.pred_boxes # predicted boxes of i-th image. + + obj_idxes = gt_instances_i.obj_ids + obj_idxes_list = obj_idxes.detach().cpu().numpy().tolist() + obj_idx_to_gt_idx = {obj_idx: gt_idx for gt_idx, obj_idx in enumerate(obj_idxes_list)} + outputs_i = { + 'pred_logits': pred_logits_i.unsqueeze(0), + 'pred_boxes': pred_boxes_i.unsqueeze(0), + } + + # step1. inherit and update the previous tracks. + num_disappear_track = 0 + for j in range(len(track_instances)): + obj_id = track_instances.obj_idxes[j].item() + # set new target idx. + if obj_id >= 0: + if obj_id in obj_idx_to_gt_idx: + track_instances.matched_gt_idxes[j] = obj_idx_to_gt_idx[obj_id] + else: + num_disappear_track += 1 + track_instances.matched_gt_idxes[j] = -1 # track-disappear case. + else: + track_instances.matched_gt_idxes[j] = -1 + + full_track_idxes = torch.arange(len(track_instances), dtype=torch.long).to(pred_logits_i.device) + matched_track_idxes = (track_instances.obj_idxes >= 0) # occu + prev_matched_indices = torch.stack( + [full_track_idxes[matched_track_idxes], track_instances.matched_gt_idxes[matched_track_idxes]], dim=1).to( + pred_logits_i.device) + + # step2. select the unmatched slots. + # note that the FP tracks whose obj_idxes are -2 will not be selected here. + unmatched_track_idxes = full_track_idxes[track_instances.obj_idxes == -1] + + # step3. select the untracked gt instances (new tracks). + tgt_indexes = track_instances.matched_gt_idxes + tgt_indexes = tgt_indexes[tgt_indexes != -1] + + tgt_state = torch.zeros(len(gt_instances_i)).to(pred_logits_i.device) + tgt_state[tgt_indexes] = 1 + untracked_tgt_indexes = torch.arange(len(gt_instances_i)).to(pred_logits_i.device)[tgt_state == 0] + # untracked_tgt_indexes = select_unmatched_indexes(tgt_indexes, len(gt_instances_i)) + untracked_gt_instances = gt_instances_i[untracked_tgt_indexes] + + def match_for_single_decoder_layer(unmatched_outputs, matcher): + new_track_indices = matcher(unmatched_outputs, + [untracked_gt_instances]) # list[tuple(src_idx, tgt_idx)] + + src_idx = new_track_indices[0][0] + tgt_idx = new_track_indices[0][1] + # concat src and tgt. + new_matched_indices = torch.stack([unmatched_track_idxes[src_idx], untracked_tgt_indexes[tgt_idx]], + dim=1).to(pred_logits_i.device) + return new_matched_indices + + # step4. do matching between the unmatched slots and GTs. + unmatched_outputs = { + 'pred_logits': track_instances.pred_logits[unmatched_track_idxes].unsqueeze(0), + 'pred_boxes': track_instances.pred_boxes[unmatched_track_idxes].unsqueeze(0), + } + new_matched_indices = match_for_single_decoder_layer(unmatched_outputs, self.matcher) + + # step5. update obj_idxes according to the new matching result. + track_instances.obj_idxes[new_matched_indices[:, 0]] = gt_instances_i.obj_ids[new_matched_indices[:, 1]].long() + track_instances.matched_gt_idxes[new_matched_indices[:, 0]] = new_matched_indices[:, 1] + + # step6. calculate iou. + active_idxes = (track_instances.obj_idxes >= 0) & (track_instances.matched_gt_idxes >= 0) + active_track_boxes = track_instances.pred_boxes[active_idxes] + if len(active_track_boxes) > 0: + gt_boxes = gt_instances_i.boxes[track_instances.matched_gt_idxes[active_idxes]] + active_track_boxes = box_ops.box_cxcywh_to_xyxy(active_track_boxes) + gt_boxes = box_ops.box_cxcywh_to_xyxy(gt_boxes) + track_instances.iou[active_idxes] = matched_boxlist_iou(Boxes(active_track_boxes), Boxes(gt_boxes)) + + # step7. merge the unmatched pairs and the matched pairs. + matched_indices = torch.cat([new_matched_indices, prev_matched_indices], dim=0) + + # step8. calculate losses. + self.num_samples += len(gt_instances_i) + num_disappear_track + self.sample_device = pred_logits_i.device + for loss in self.losses: + new_track_loss = self.get_loss(loss, + outputs=outputs_i, + gt_instances=[gt_instances_i], + indices=[(matched_indices[:, 0], matched_indices[:, 1])], + num_boxes=1) + self.losses_dict.update( + {'frame_{}_{}'.format(self._current_frame_idx, key): value for key, value in new_track_loss.items()}) + + if 'aux_outputs' in outputs: + for i, aux_outputs in enumerate(outputs['aux_outputs']): + unmatched_outputs_layer = { + 'pred_logits': aux_outputs['pred_logits'][0, unmatched_track_idxes].unsqueeze(0), + 'pred_boxes': aux_outputs['pred_boxes'][0, unmatched_track_idxes].unsqueeze(0), + } + new_matched_indices_layer = match_for_single_decoder_layer(unmatched_outputs_layer, self.matcher) + matched_indices_layer = torch.cat([new_matched_indices_layer, prev_matched_indices], dim=0) + for loss in self.losses: + if loss == 'masks': + # Intermediate masks losses are too costly to compute, we ignore them. + continue + l_dict = self.get_loss(loss, + aux_outputs, + gt_instances=[gt_instances_i], + indices=[(matched_indices_layer[:, 0], matched_indices_layer[:, 1])], + num_boxes=1, ) + self.losses_dict.update( + {'frame_{}_aux{}_{}'.format(self._current_frame_idx, i, key): value for key, value in + l_dict.items()}) + self._step() + return track_instances + + def forward(self, outputs, input_data: dict): + # losses of each frame are calculated during the model's forwarding and are outputted by the model as outputs['losses_dict]. + losses = outputs.pop("losses_dict") + num_samples = self.get_num_boxes(self.num_samples) + for loss_name, loss in losses.items(): + losses[loss_name] /= num_samples + return losses + + +class RuntimeTrackerBase(object): + def __init__(self, score_thresh=0.8, filter_score_thresh=0.6, miss_tolerance=5): + self.score_thresh = score_thresh + self.filter_score_thresh = filter_score_thresh + self.miss_tolerance = miss_tolerance + self.max_obj_id = 0 + + def clear(self): + self.max_obj_id = 0 + + def update(self, track_instances: Instances): + track_instances.disappear_time[track_instances.scores >= self.score_thresh] = 0 + for i in range(len(track_instances)): + if track_instances.obj_idxes[i] == -1 and track_instances.scores[i] >= self.score_thresh: + # print("track {} has score {}, assign obj_id {}".format(i, track_instances.scores[i], self.max_obj_id)) + track_instances.obj_idxes[i] = self.max_obj_id + self.max_obj_id += 1 + elif track_instances.obj_idxes[i] >= 0 and track_instances.scores[i] < self.filter_score_thresh: + track_instances.disappear_time[i] += 1 + if track_instances.disappear_time[i] >= self.miss_tolerance: + # Set the obj_id to -1. + # Then this track will be removed by TrackEmbeddingLayer. + track_instances.obj_idxes[i] = -1 + + +class TrackerPostProcess(nn.Module): + """ This module converts the model's output into the format expected by the coco api""" + def __init__(self): + super().__init__() + + @torch.no_grad() + def forward(self, track_instances: Instances, target_size) -> Instances: + """ Perform the computation + Parameters: + outputs: raw outputs of the model + target_sizes: tensor of dimension [batch_size x 2] containing the size of each images of the batch + For evaluation, this must be the original image size (before any data augmentation) + For visualization, this should be the image size after data augment, but before padding + """ + out_logits = track_instances.pred_logits + out_bbox = track_instances.pred_boxes + + prob = out_logits.sigmoid() + # prob = out_logits[...,:1].sigmoid() + scores, labels = prob.max(-1) + + # convert to [x0, y0, x1, y1] format + boxes = box_ops.box_cxcywh_to_xyxy(out_bbox) + # and from relative [0, 1] to absolute [0, height] coordinates + img_h, img_w = target_size + scale_fct = torch.Tensor([img_w, img_h, img_w, img_h]).to(boxes) + boxes = boxes * scale_fct[None, :] + + track_instances.boxes = boxes + track_instances.scores = scores + track_instances.labels = labels +# track_instances.remove('pred_logits') +# track_instances.remove('pred_boxes') + return track_instances + + +def _get_clones(module, N): + return nn.ModuleList([copy.deepcopy(module) for i in range(N)]) + + +class MOTR(nn.Module): + def __init__(self, backbone, transformer, num_classes, num_queries, num_feature_levels, criterion, track_embed, + aux_loss=True, with_box_refine=False, two_stage=False, memory_bank=None): + """ Initializes the model. + Parameters: + backbone: torch module of the backbone to be used. See backbone.py + transformer: torch module of the transformer architecture. See transformer.py + num_classes: number of object classes + num_queries: number of object queries, ie detection slot. This is the maximal number of objects + DETR can detect in a single image. For COCO, we recommend 100 queries. + aux_loss: True if auxiliary decoding losses (loss at each decoder layer) are to be used. + with_box_refine: iterative bounding box refinement + two_stage: two-stage Deformable DETR + """ + super().__init__() + self.num_queries = num_queries + self.track_embed = track_embed + self.transformer = transformer + hidden_dim = transformer.d_model + self.num_classes = num_classes + self.class_embed = nn.Linear(hidden_dim, num_classes) + self.bbox_embed = MLP(hidden_dim, hidden_dim, 4, 3) + self.num_feature_levels = num_feature_levels + if not two_stage: + self.query_embed = nn.Embedding(num_queries, hidden_dim * 2) + if num_feature_levels > 1: + num_backbone_outs = len(backbone.strides) + input_proj_list = [] + for _ in range(num_backbone_outs): + in_channels = backbone.num_channels[_] + input_proj_list.append(nn.Sequential( + nn.Conv2d(in_channels, hidden_dim, kernel_size=1), + nn.GroupNorm(32, hidden_dim), + )) + for _ in range(num_feature_levels - num_backbone_outs): + input_proj_list.append(nn.Sequential( + nn.Conv2d(in_channels, hidden_dim, kernel_size=3, stride=2, padding=1), + nn.GroupNorm(32, hidden_dim), + )) + in_channels = hidden_dim + self.input_proj = nn.ModuleList(input_proj_list) + else: + self.input_proj = nn.ModuleList([ + nn.Sequential( + nn.Conv2d(backbone.num_channels[0], hidden_dim, kernel_size=1), + nn.GroupNorm(32, hidden_dim), + )]) + self.backbone = backbone + self.aux_loss = aux_loss + self.with_box_refine = with_box_refine + self.two_stage = two_stage + + prior_prob = 0.01 + bias_value = -math.log((1 - prior_prob) / prior_prob) + self.class_embed.bias.data = torch.ones(num_classes) * bias_value + nn.init.constant_(self.bbox_embed.layers[-1].weight.data, 0) + nn.init.constant_(self.bbox_embed.layers[-1].bias.data, 0) + for proj in self.input_proj: + nn.init.xavier_uniform_(proj[0].weight, gain=1) + nn.init.constant_(proj[0].bias, 0) + + # if two-stage, the last class_embed and bbox_embed is for region proposal generation + num_pred = (transformer.decoder.num_layers + 1) if two_stage else transformer.decoder.num_layers + if with_box_refine: + self.class_embed = _get_clones(self.class_embed, num_pred) + self.bbox_embed = _get_clones(self.bbox_embed, num_pred) + nn.init.constant_(self.bbox_embed[0].layers[-1].bias.data[2:], -2.0) + # hack implementation for iterative bounding box refinement + self.transformer.decoder.bbox_embed = self.bbox_embed + else: + nn.init.constant_(self.bbox_embed.layers[-1].bias.data[2:], -2.0) + self.class_embed = nn.ModuleList([self.class_embed for _ in range(num_pred)]) + self.bbox_embed = nn.ModuleList([self.bbox_embed for _ in range(num_pred)]) + self.transformer.decoder.bbox_embed = None + if two_stage: + # hack implementation for two-stage + self.transformer.decoder.class_embed = self.class_embed + for box_embed in self.bbox_embed: + nn.init.constant_(box_embed.layers[-1].bias.data[2:], 0.0) + self.post_process = TrackerPostProcess() + self.track_base = RuntimeTrackerBase() + self.criterion = criterion + self.memory_bank = memory_bank + self.mem_bank_len = 0 if memory_bank is None else memory_bank.max_his_length + + def _generate_empty_tracks(self): + track_instances = Instances((1, 1)) + num_queries, dim = self.query_embed.weight.shape # (300, 512) + device = self.query_embed.weight.device + track_instances.ref_pts = self.transformer.reference_points(self.query_embed.weight[:, :dim // 2]) + track_instances.query_pos = self.query_embed.weight + track_instances.output_embedding = torch.zeros((num_queries, dim >> 1), device=device) + track_instances.obj_idxes = torch.full((len(track_instances),), -1, dtype=torch.long, device=device) + track_instances.matched_gt_idxes = torch.full((len(track_instances),), -1, dtype=torch.long, device=device) + track_instances.disappear_time = torch.zeros((len(track_instances), ), dtype=torch.long, device=device) + track_instances.iou = torch.zeros((len(track_instances),), dtype=torch.float, device=device) + track_instances.scores = torch.zeros((len(track_instances),), dtype=torch.float, device=device) + track_instances.track_scores = torch.zeros((len(track_instances),), dtype=torch.float, device=device) + track_instances.pred_boxes = torch.zeros((len(track_instances), 4), dtype=torch.float, device=device) + track_instances.pred_logits = torch.zeros((len(track_instances), self.num_classes), dtype=torch.float, device=device) + + mem_bank_len = self.mem_bank_len + track_instances.mem_bank = torch.zeros((len(track_instances), mem_bank_len, dim // 2), dtype=torch.float32, device=device) + track_instances.mem_padding_mask = torch.ones((len(track_instances), mem_bank_len), dtype=torch.bool, device=device) + track_instances.save_period = torch.zeros((len(track_instances), ), dtype=torch.float32, device=device) + + return track_instances.to(self.query_embed.weight.device) + + def clear(self): + self.track_base.clear() + + @torch.jit.unused + def _set_aux_loss(self, outputs_class, outputs_coord): + # this is a workaround to make torchscript happy, as torchscript + # doesn't support dictionary with non-homogeneous values, such + # as a dict having both a Tensor and a list. + return [{'pred_logits': a, 'pred_boxes': b, } + for a, b in zip(outputs_class[:-1], outputs_coord[:-1])] + + def _forward_single_image(self, samples, track_instances: Instances): + features, pos = self.backbone(samples) + src, mask = features[-1].decompose() + assert mask is not None + + srcs = [] + masks = [] + for l, feat in enumerate(features): + src, mask = feat.decompose() + srcs.append(self.input_proj[l](src)) + masks.append(mask) + assert mask is not None + + if self.num_feature_levels > len(srcs): + _len_srcs = len(srcs) + for l in range(_len_srcs, self.num_feature_levels): + if l == _len_srcs: + src = self.input_proj[l](features[-1].tensors) + else: + src = self.input_proj[l](srcs[-1]) + m = samples.mask + mask = F.interpolate(m[None].float(), size=src.shape[-2:]).to(torch.bool)[0] + pos_l = self.backbone[1](NestedTensor(src, mask)).to(src.dtype) + srcs.append(src) + masks.append(mask) + pos.append(pos_l) + + hs, init_reference, inter_references, enc_outputs_class, enc_outputs_coord_unact = self.transformer(srcs, masks, pos, track_instances.query_pos, ref_pts=track_instances.ref_pts) + + outputs_classes = [] + outputs_coords = [] + for lvl in range(hs.shape[0]): + if lvl == 0: + reference = init_reference + else: + reference = inter_references[lvl - 1] + reference = inverse_sigmoid(reference) + outputs_class = self.class_embed[lvl](hs[lvl]) + tmp = self.bbox_embed[lvl](hs[lvl]) + if reference.shape[-1] == 4: + tmp += reference + else: + assert reference.shape[-1] == 2 + tmp[..., :2] += reference + outputs_coord = tmp.sigmoid() + outputs_classes.append(outputs_class) + outputs_coords.append(outputs_coord) + outputs_class = torch.stack(outputs_classes) + outputs_coord = torch.stack(outputs_coords) + + ref_pts_all = torch.cat([init_reference[None], inter_references[:, :, :, :2]], dim=0) + out = {'pred_logits': outputs_class[-1], 'pred_boxes': outputs_coord[-1], 'ref_pts': ref_pts_all[5]} + if self.aux_loss: + out['aux_outputs'] = self._set_aux_loss(outputs_class, outputs_coord) + + with torch.no_grad(): + if self.training: + track_scores = outputs_class[-1, 0, :].sigmoid().max(dim=-1).values + else: + track_scores = outputs_class[-1, 0, :, 0].sigmoid() + + track_instances.scores = track_scores + track_instances.pred_logits = outputs_class[-1, 0] + track_instances.pred_boxes = outputs_coord[-1, 0] + track_instances.output_embedding = hs[-1, 0] + if self.training: + # the track id will be assigned by the mather. + out['track_instances'] = track_instances + track_instances = self.criterion.match_for_single_frame(out) + else: + # each track will be assigned an unique global id by the track base. + self.track_base.update(track_instances) + if self.memory_bank is not None: + track_instances = self.memory_bank(track_instances) + # track_instances.track_scores = track_instances.track_scores[..., 0] + # track_instances.scores = track_instances.track_scores.sigmoid() + if self.training: + self.criterion.calc_loss_for_track_scores(track_instances) + tmp = {} + tmp['init_track_instances'] = self._generate_empty_tracks() + tmp['track_instances'] = track_instances + out_track_instances = self.track_embed(tmp) + out['track_instances'] = out_track_instances + return out + + @torch.no_grad() + def inference_single_image(self, img, ori_img_size, track_instances=None): + if not isinstance(img, NestedTensor): + img = nested_tensor_from_tensor_list(img) +# if track_instances is None: +# track_instances = self._generate_empty_tracks() + track_instances = self._generate_empty_tracks() + + res = self._forward_single_image(img, track_instances=track_instances) + + track_instances = res['track_instances'] + track_instances = self.post_process(track_instances, ori_img_size) + ret = {'track_instances': track_instances} + if 'ref_pts' in res: + ref_pts = res['ref_pts'] + img_h, img_w = ori_img_size + scale_fct = torch.Tensor([img_w, img_h]).to(ref_pts) + ref_pts = ref_pts * scale_fct[None] + ret['ref_pts'] = ref_pts + return ret + + def forward(self, data: dict): + if self.training: + self.criterion.initialize_for_single_clip(data['gt_instances']) + frames = data['imgs'] # list of Tensor. + outputs = { + 'pred_logits': [], + 'pred_boxes': [], + } + + track_instances = self._generate_empty_tracks() + for frame in frames: + if not isinstance(frame, NestedTensor): + frame = nested_tensor_from_tensor_list([frame]) + frame_res = self._forward_single_image(frame, track_instances) + track_instances = frame_res['track_instances'] + outputs['pred_logits'].append(frame_res['pred_logits']) + outputs['pred_boxes'].append(frame_res['pred_boxes']) + + if not self.training: + outputs['track_instances'] = track_instances + else: + outputs['losses_dict'] = self.criterion.losses_dict + return outputs + + +def build(args): + dataset_to_num_classes = { + 'coco': 91, + 'coco_panoptic': 250, + 'e2e_mot': 1, + 'e2e_joint': 1, + 'e2e_static_mot': 1 + } + assert args.dataset_file in dataset_to_num_classes + num_classes = dataset_to_num_classes[args.dataset_file] + device = torch.device(args.device) + + backbone = build_backbone(args) + + transformer = build_deforamble_transformer(args) + d_model = transformer.d_model + hidden_dim = args.dim_feedforward + query_interaction_layer = build_query_interaction_layer(args, args.query_interaction_layer, d_model, hidden_dim, d_model*2) + + img_matcher = build_matcher(args) + num_frames_per_batch = max(args.sampler_lengths) + weight_dict = {} + for i in range(num_frames_per_batch): + weight_dict.update({"frame_{}_loss_ce".format(i): args.cls_loss_coef, + 'frame_{}_loss_bbox'.format(i): args.bbox_loss_coef, + 'frame_{}_loss_giou'.format(i): args.giou_loss_coef, + }) + + # TODO this is a hack + if args.aux_loss: + for i in range(num_frames_per_batch): + for j in range(args.dec_layers - 1): + weight_dict.update({"frame_{}_aux{}_loss_ce".format(i, j): args.cls_loss_coef, + 'frame_{}_aux{}_loss_bbox'.format(i, j): args.bbox_loss_coef, + 'frame_{}_aux{}_loss_giou'.format(i, j): args.giou_loss_coef, + }) + if args.memory_bank_type is not None and len(args.memory_bank_type) > 0: + memory_bank = build_memory_bank(args, d_model, hidden_dim, d_model * 2) + for i in range(num_frames_per_batch): + weight_dict.update({"frame_{}_track_loss_ce".format(i): args.cls_loss_coef}) + else: + memory_bank = None + losses = ['labels', 'boxes'] + criterion = ClipMatcher(num_classes, matcher=img_matcher, weight_dict=weight_dict, losses=losses) + criterion.to(device) + postprocessors = {} + model = MOTR( + backbone, + transformer, + track_embed=query_interaction_layer, + num_feature_levels=args.num_feature_levels, + num_classes=num_classes, + num_queries=args.num_queries, + aux_loss=args.aux_loss, + criterion=criterion, + with_box_refine=args.with_box_refine, + two_stage=args.two_stage, + memory_bank=memory_bank, + ) + return model, criterion, postprocessors diff --git a/tracking/docker-build-context/byte_track/tutorials/motr/transforms.py b/tracking/docker-build-context/byte_track/tutorials/motr/transforms.py new file mode 100644 index 0000000000000000000000000000000000000000..064d1f057a7084153db597ba9b723a8f2c14f243 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/motr/transforms.py @@ -0,0 +1,650 @@ +# ------------------------------------------------------------------------ +# Copyright (c) 2021 megvii-model. All Rights Reserved. +# ------------------------------------------------------------------------ +# Modified from Deformable DETR (https://github.com/fundamentalvision/Deformable-DETR) +# Copyright (c) 2020 SenseTime. All Rights Reserved. +# ------------------------------------------------------------------------ +# Modified from DETR (https://github.com/facebookresearch/detr) +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# ------------------------------------------------------------------------ + +""" +Transforms and data augmentation for both image + bbox. +""" +import copy +import random +import PIL +import torch +import torchvision.transforms as T +import torchvision.transforms.functional as F +from PIL import Image, ImageDraw +from util.box_ops import box_xyxy_to_cxcywh +from util.misc import interpolate +import numpy as np +import os + + + +def crop_mot(image, target, region): + cropped_image = F.crop(image, *region) + + target = target.copy() + i, j, h, w = region + + # should we do something wrt the original size? + target["size"] = torch.tensor([h, w]) + + fields = ["labels", "area", "iscrowd"] + if 'obj_ids' in target: + fields.append('obj_ids') + + if "boxes" in target: + boxes = target["boxes"] + max_size = torch.as_tensor([w, h], dtype=torch.float32) + cropped_boxes = boxes - torch.as_tensor([j, i, j, i]) + + for i, box in enumerate(cropped_boxes): + l, t, r, b = box +# if l < 0: +# l = 0 +# if r < 0: +# r = 0 +# if l > w: +# l = w +# if r > w: +# r = w +# if t < 0: +# t = 0 +# if b < 0: +# b = 0 +# if t > h: +# t = h +# if b > h: +# b = h + if l < 0 and r < 0: + l = r = 0 + if l > w and r > w: + l = r = w + if t < 0 and b < 0: + t = b = 0 + if t > h and b > h: + t = b = h + cropped_boxes[i] = torch.tensor([l, t, r, b], dtype=box.dtype) + + cropped_boxes = torch.min(cropped_boxes.reshape(-1, 2, 2), max_size) + cropped_boxes = cropped_boxes.clamp(min=0) + area = (cropped_boxes[:, 1, :] - cropped_boxes[:, 0, :]).prod(dim=1) + target["boxes"] = cropped_boxes.reshape(-1, 4) + target["area"] = area + fields.append("boxes") + + if "masks" in target: + # FIXME should we update the area here if there are no boxes? + target['masks'] = target['masks'][:, i:i + h, j:j + w] + fields.append("masks") + + # remove elements for which the boxes or masks that have zero area + if "boxes" in target or "masks" in target: + # favor boxes selection when defining which elements to keep + # this is compatible with previous implementation + if "boxes" in target: + cropped_boxes = target['boxes'].reshape(-1, 2, 2) + keep = torch.all(cropped_boxes[:, 1, :] > cropped_boxes[:, 0, :], dim=1) + else: + keep = target['masks'].flatten(1).any(1) + + for field in fields: + target[field] = target[field][keep] + + return cropped_image, target + + +def random_shift(image, target, region, sizes): + oh, ow = sizes + # step 1, shift crop and re-scale image firstly + cropped_image = F.crop(image, *region) + cropped_image = F.resize(cropped_image, sizes) + + target = target.copy() + i, j, h, w = region + + # should we do something wrt the original size? + target["size"] = torch.tensor([h, w]) + + fields = ["labels", "area", "iscrowd"] + if 'obj_ids' in target: + fields.append('obj_ids') + + if "boxes" in target: + boxes = target["boxes"] + max_size = torch.as_tensor([w, h], dtype=torch.float32) + cropped_boxes = boxes - torch.as_tensor([j, i, j, i]) + + for i, box in enumerate(cropped_boxes): + l, t, r, b = box + if l < 0: + l = 0 + if r < 0: + r = 0 + if l > w: + l = w + if r > w: + r = w + if t < 0: + t = 0 + if b < 0: + b = 0 + if t > h: + t = h + if b > h: + b = h + # step 2, re-scale coords secondly + ratio_h = 1.0 * oh / h + ratio_w = 1.0 * ow / w + cropped_boxes[i] = torch.tensor([ratio_w * l, ratio_h * t, ratio_w * r, ratio_h * b], dtype=box.dtype) + + cropped_boxes = cropped_boxes.reshape(-1, 2, 2) + area = (cropped_boxes[:, 1, :] - cropped_boxes[:, 0, :]).prod(dim=1) + target["boxes"] = cropped_boxes.reshape(-1, 4) + target["area"] = area + fields.append("boxes") + + if "masks" in target: + # FIXME should we update the area here if there are no boxes? + target['masks'] = target['masks'][:, i:i + h, j:j + w] + fields.append("masks") + + # remove elements for which the boxes or masks that have zero area + if "boxes" in target or "masks" in target: + # favor boxes selection when defining which elements to keep + # this is compatible with previous implementation + if "boxes" in target: + cropped_boxes = target['boxes'].reshape(-1, 2, 2) + keep = torch.all(cropped_boxes[:, 1, :] > cropped_boxes[:, 0, :], dim=1) + else: + keep = target['masks'].flatten(1).any(1) + + for field in fields: + target[field] = target[field][keep] + + return cropped_image, target + + +def crop(image, target, region): + cropped_image = F.crop(image, *region) + + target = target.copy() + i, j, h, w = region + + # should we do something wrt the original size? + target["size"] = torch.tensor([h, w]) + + fields = ["labels", "area", "iscrowd"] + if 'obj_ids' in target: + fields.append('obj_ids') + + if "boxes" in target: + boxes = target["boxes"] + max_size = torch.as_tensor([w, h], dtype=torch.float32) + cropped_boxes = boxes - torch.as_tensor([j, i, j, i]) + cropped_boxes = torch.min(cropped_boxes.reshape(-1, 2, 2), max_size) + cropped_boxes = cropped_boxes.clamp(min=0) + + area = (cropped_boxes[:, 1, :] - cropped_boxes[:, 0, :]).prod(dim=1) + target["boxes"] = cropped_boxes.reshape(-1, 4) + target["area"] = area + fields.append("boxes") + + if "masks" in target: + # FIXME should we update the area here if there are no boxes? + target['masks'] = target['masks'][:, i:i + h, j:j + w] + fields.append("masks") + + # remove elements for which the boxes or masks that have zero area + if "boxes" in target or "masks" in target: + # favor boxes selection when defining which elements to keep + # this is compatible with previous implementation + if "boxes" in target: + cropped_boxes = target['boxes'].reshape(-1, 2, 2) + keep = torch.all(cropped_boxes[:, 1, :] > cropped_boxes[:, 0, :], dim=1) + else: + keep = target['masks'].flatten(1).any(1) + + for field in fields: + target[field] = target[field][keep] + + return cropped_image, target + + +def hflip(image, target): + flipped_image = F.hflip(image) + + w, h = image.size + + target = target.copy() + if "boxes" in target: + boxes = target["boxes"] + boxes = boxes[:, [2, 1, 0, 3]] * torch.as_tensor([-1, 1, -1, 1]) + torch.as_tensor([w, 0, w, 0]) + target["boxes"] = boxes + + if "masks" in target: + target['masks'] = target['masks'].flip(-1) + + return flipped_image, target + + +def resize(image, target, size, max_size=None): + # size can be min_size (scalar) or (w, h) tuple + + def get_size_with_aspect_ratio(image_size, size, max_size=None): + w, h = image_size + if max_size is not None: + min_original_size = float(min((w, h))) + max_original_size = float(max((w, h))) + if max_original_size / min_original_size * size > max_size: + size = int(round(max_size * min_original_size / max_original_size)) + + if (w <= h and w == size) or (h <= w and h == size): + return (h, w) + + if w < h: + ow = size + oh = int(size * h / w) + else: + oh = size + ow = int(size * w / h) + + return (oh, ow) + + def get_size(image_size, size, max_size=None): + if isinstance(size, (list, tuple)): + return size[::-1] + else: + return get_size_with_aspect_ratio(image_size, size, max_size) + + size = get_size(image.size, size, max_size) + rescaled_image = F.resize(image, size) + + if target is None: + return rescaled_image, None + + ratios = tuple(float(s) / float(s_orig) for s, s_orig in zip(rescaled_image.size, image.size)) + ratio_width, ratio_height = ratios + + target = target.copy() + if "boxes" in target: + boxes = target["boxes"] + scaled_boxes = boxes * torch.as_tensor([ratio_width, ratio_height, ratio_width, ratio_height]) + target["boxes"] = scaled_boxes + + if "area" in target: + area = target["area"] + scaled_area = area * (ratio_width * ratio_height) + target["area"] = scaled_area + + h, w = size + target["size"] = torch.tensor([h, w]) + + if "masks" in target: + target['masks'] = interpolate( + target['masks'][:, None].float(), size, mode="nearest")[:, 0] > 0.5 + + return rescaled_image, target + + +def pad(image, target, padding): + # assumes that we only pad on the bottom right corners + padded_image = F.pad(image, (0, 0, padding[0], padding[1])) + if target is None: + return padded_image, None + target = target.copy() + # should we do something wrt the original size? + target["size"] = torch.tensor(padded_image[::-1]) + if "masks" in target: + target['masks'] = torch.nn.functional.pad(target['masks'], (0, padding[0], 0, padding[1])) + return padded_image, target + + +class RandomCrop(object): + def __init__(self, size): + self.size = size + + def __call__(self, img, target): + region = T.RandomCrop.get_params(img, self.size) + return crop(img, target, region) + + +class MotRandomCrop(RandomCrop): + def __call__(self, imgs: list, targets: list): + ret_imgs = [] + ret_targets = [] + region = T.RandomCrop.get_params(imgs[0], self.size) + for img_i, targets_i in zip(imgs, targets): + img_i, targets_i = crop(img_i, targets_i, region) + ret_imgs.append(img_i) + ret_targets.append(targets_i) + return ret_imgs, ret_targets + +class FixedMotRandomCrop(object): + def __init__(self, min_size: int, max_size: int): + self.min_size = min_size + self.max_size = max_size + + def __call__(self, imgs: list, targets: list): + ret_imgs = [] + ret_targets = [] + w = random.randint(self.min_size, min(imgs[0].width, self.max_size)) + h = random.randint(self.min_size, min(imgs[0].height, self.max_size)) + region = T.RandomCrop.get_params(imgs[0], [h, w]) + for img_i, targets_i in zip(imgs, targets): + img_i, targets_i = crop_mot(img_i, targets_i, region) + ret_imgs.append(img_i) + ret_targets.append(targets_i) + return ret_imgs, ret_targets + +class MotRandomShift(object): + def __init__(self, bs=1): + self.bs = bs + + def __call__(self, imgs: list, targets: list): + ret_imgs = copy.deepcopy(imgs) + ret_targets = copy.deepcopy(targets) + + n_frames = len(imgs) + select_i = random.choice(list(range(n_frames))) + w, h = imgs[select_i].size + + xshift = (100 * torch.rand(self.bs)).int() + xshift *= (torch.randn(self.bs) > 0.0).int() * 2 - 1 + yshift = (100 * torch.rand(self.bs)).int() + yshift *= (torch.randn(self.bs) > 0.0).int() * 2 - 1 + ymin = max(0, -yshift[0]) + ymax = min(h, h - yshift[0]) + xmin = max(0, -xshift[0]) + xmax = min(w, w - xshift[0]) + + region = (int(ymin), int(xmin), int(ymax-ymin), int(xmax-xmin)) + ret_imgs[select_i], ret_targets[select_i] = random_shift(imgs[select_i], targets[select_i], region, (h,w)) + + return ret_imgs, ret_targets + + +class FixedMotRandomShift(object): + def __init__(self, bs=1, padding=50): + self.bs = bs + self.padding = padding + + def __call__(self, imgs: list, targets: list): + ret_imgs = [] + ret_targets = [] + + n_frames = len(imgs) + w, h = imgs[0].size + xshift = (self.padding * torch.rand(self.bs)).int() + 1 + xshift *= (torch.randn(self.bs) > 0.0).int() * 2 - 1 + yshift = (self.padding * torch.rand(self.bs)).int() + 1 + yshift *= (torch.randn(self.bs) > 0.0).int() * 2 - 1 + ret_imgs.append(imgs[0]) + ret_targets.append(targets[0]) + for i in range(1, n_frames): + ymin = max(0, -yshift[0]) + ymax = min(h, h - yshift[0]) + xmin = max(0, -xshift[0]) + xmax = min(w, w - xshift[0]) + prev_img = ret_imgs[i-1].copy() + prev_target = copy.deepcopy(ret_targets[i-1]) + region = (int(ymin), int(xmin), int(ymax - ymin), int(xmax - xmin)) + img_i, target_i = random_shift(prev_img, prev_target, region, (h, w)) + ret_imgs.append(img_i) + ret_targets.append(target_i) + + return ret_imgs, ret_targets + + +class RandomSizeCrop(object): + def __init__(self, min_size: int, max_size: int): + self.min_size = min_size + self.max_size = max_size + + def __call__(self, img: PIL.Image.Image, target: dict): + w = random.randint(self.min_size, min(img.width, self.max_size)) + h = random.randint(self.min_size, min(img.height, self.max_size)) + region = T.RandomCrop.get_params(img, [h, w]) + return crop(img, target, region) + + +class MotRandomSizeCrop(RandomSizeCrop): + def __call__(self, imgs, targets): + w = random.randint(self.min_size, min(imgs[0].width, self.max_size)) + h = random.randint(self.min_size, min(imgs[0].height, self.max_size)) + region = T.RandomCrop.get_params(imgs[0], [h, w]) + ret_imgs = [] + ret_targets = [] + for img_i, targets_i in zip(imgs, targets): + img_i, targets_i = crop(img_i, targets_i, region) + ret_imgs.append(img_i) + ret_targets.append(targets_i) + return ret_imgs, ret_targets + + +class CenterCrop(object): + def __init__(self, size): + self.size = size + + def __call__(self, img, target): + image_width, image_height = img.size + crop_height, crop_width = self.size + crop_top = int(round((image_height - crop_height) / 2.)) + crop_left = int(round((image_width - crop_width) / 2.)) + return crop(img, target, (crop_top, crop_left, crop_height, crop_width)) + + +class MotCenterCrop(CenterCrop): + def __call__(self, imgs, targets): + image_width, image_height = imgs[0].size + crop_height, crop_width = self.size + crop_top = int(round((image_height - crop_height) / 2.)) + crop_left = int(round((image_width - crop_width) / 2.)) + ret_imgs = [] + ret_targets = [] + for img_i, targets_i in zip(imgs, targets): + img_i, targets_i = crop(img_i, targets_i, (crop_top, crop_left, crop_height, crop_width)) + ret_imgs.append(img_i) + ret_targets.append(targets_i) + return ret_imgs, ret_targets + + +class RandomHorizontalFlip(object): + def __init__(self, p=0.5): + self.p = p + + def __call__(self, img, target): + if random.random() < self.p: + return hflip(img, target) + return img, target + + +class MotRandomHorizontalFlip(RandomHorizontalFlip): + def __call__(self, imgs, targets): + if random.random() < self.p: + ret_imgs = [] + ret_targets = [] + for img_i, targets_i in zip(imgs, targets): + img_i, targets_i = hflip(img_i, targets_i) + ret_imgs.append(img_i) + ret_targets.append(targets_i) + return ret_imgs, ret_targets + return imgs, targets + + +class RandomResize(object): + def __init__(self, sizes, max_size=None): + assert isinstance(sizes, (list, tuple)) + self.sizes = sizes + self.max_size = max_size + + def __call__(self, img, target=None): + size = random.choice(self.sizes) + return resize(img, target, size, self.max_size) + + +class MotRandomResize(RandomResize): + def __call__(self, imgs, targets): + size = random.choice(self.sizes) + ret_imgs = [] + ret_targets = [] + for img_i, targets_i in zip(imgs, targets): + img_i, targets_i = resize(img_i, targets_i, size, self.max_size) + ret_imgs.append(img_i) + ret_targets.append(targets_i) + return ret_imgs, ret_targets + + +class RandomPad(object): + def __init__(self, max_pad): + self.max_pad = max_pad + + def __call__(self, img, target): + pad_x = random.randint(0, self.max_pad) + pad_y = random.randint(0, self.max_pad) + return pad(img, target, (pad_x, pad_y)) + + +class MotRandomPad(RandomPad): + def __call__(self, imgs, targets): + pad_x = random.randint(0, self.max_pad) + pad_y = random.randint(0, self.max_pad) + ret_imgs = [] + ret_targets = [] + for img_i, targets_i in zip(imgs, targets): + img_i, target_i = pad(img_i, targets_i, (pad_x, pad_y)) + ret_imgs.append(img_i) + ret_targets.append(targets_i) + return ret_imgs, ret_targets + + +class RandomSelect(object): + """ + Randomly selects between transforms1 and transforms2, + with probability p for transforms1 and (1 - p) for transforms2 + """ + def __init__(self, transforms1, transforms2, p=0.5): + self.transforms1 = transforms1 + self.transforms2 = transforms2 + self.p = p + + def __call__(self, img, target): + if random.random() < self.p: + return self.transforms1(img, target) + return self.transforms2(img, target) + + +class MotRandomSelect(RandomSelect): + """ + Randomly selects between transforms1 and transforms2, + with probability p for transforms1 and (1 - p) for transforms2 + """ + def __call__(self, imgs, targets): + if random.random() < self.p: + return self.transforms1(imgs, targets) + return self.transforms2(imgs, targets) + + +class ToTensor(object): + def __call__(self, img, target): + return F.to_tensor(img), target + + +class MotToTensor(ToTensor): + def __call__(self, imgs, targets): + ret_imgs = [] + for img in imgs: + ret_imgs.append(F.to_tensor(img)) + return ret_imgs, targets + + +class RandomErasing(object): + + def __init__(self, *args, **kwargs): + self.eraser = T.RandomErasing(*args, **kwargs) + + def __call__(self, img, target): + return self.eraser(img), target + + +class MotRandomErasing(RandomErasing): + def __call__(self, imgs, targets): + # TODO: Rewrite this part to ensure the data augmentation is same to each image. + ret_imgs = [] + for img_i, targets_i in zip(imgs, targets): + ret_imgs.append(self.eraser(img_i)) + return ret_imgs, targets + + +class MoTColorJitter(T.ColorJitter): + def __call__(self, imgs, targets): + transform = self.get_params(self.brightness, self.contrast, + self.saturation, self.hue) + ret_imgs = [] + for img_i, targets_i in zip(imgs, targets): + ret_imgs.append(transform(img_i)) + return ret_imgs, targets + + +class Normalize(object): + def __init__(self, mean, std): + self.mean = mean + self.std = std + + def __call__(self, image, target=None): + if target is not None: + target['ori_img'] = image.clone() + image = F.normalize(image, mean=self.mean, std=self.std) + if target is None: + return image, None + target = target.copy() + h, w = image.shape[-2:] + if "boxes" in target: + boxes = target["boxes"] + boxes = box_xyxy_to_cxcywh(boxes) + boxes = boxes / torch.tensor([w, h, w, h], dtype=torch.float32) + target["boxes"] = boxes + return image, target + + +class MotNormalize(Normalize): + def __call__(self, imgs, targets=None): + ret_imgs = [] + ret_targets = [] + for i in range(len(imgs)): + img_i = imgs[i] + targets_i = targets[i] if targets is not None else None + img_i, targets_i = super().__call__(img_i, targets_i) + ret_imgs.append(img_i) + ret_targets.append(targets_i) + return ret_imgs, ret_targets + + +class Compose(object): + def __init__(self, transforms): + self.transforms = transforms + + def __call__(self, image, target): + for t in self.transforms: + image, target = t(image, target) + return image, target + + def __repr__(self): + format_string = self.__class__.__name__ + "(" + for t in self.transforms: + format_string += "\n" + format_string += " {0}".format(t) + format_string += "\n)" + return format_string + + +class MotCompose(Compose): + def __call__(self, imgs, targets): + for t in self.transforms: + imgs, targets = t(imgs, targets) + return imgs, targets diff --git a/tracking/docker-build-context/byte_track/tutorials/qdtrack/README.md b/tracking/docker-build-context/byte_track/tutorials/qdtrack/README.md new file mode 100644 index 0000000000000000000000000000000000000000..47578f2014bdcea0e723584584b9e268f012e1b3 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/qdtrack/README.md @@ -0,0 +1,39 @@ +# QDTrack_reid_motion + +Step1. git clone https://github.com/SysCV/qdtrack.git and train + + +Step2. + +replace https://github.com/SysCV/qdtrack/blob/master/qdtrack/models/mot/qdtrack.py + +add mot_online to https://github.com/SysCV/qdtrack + +add tracker_reid_motion.py to https://github.com/SysCV/qdtrack and rename to tracker.py + +Step3. download qdtrack model trained on mot17 half training set: [google](https://drive.google.com/file/d/1IfM8i0R0lF_4NOgeloMPFo5d52dqhaHW/view?usp=sharing), [baidu(code:whcc)](https://pan.baidu.com/s/1IYRD3V2YOa6-YNFgMQyv7w) + +Step4. run +``` +python3 -m torch.distributed.launch --nproc_per_node=8 --master_port=29501 tools/test.py configs/mot17/qdtrack-frcnn_r50_fpn_4e_mot17.py work_dirs/mot17_half_qdtrack.pth --launcher pytorch --eval track --eval-options resfile_path=output +``` + + +# QDTrack_BYTE + +Step1. git clone https://github.com/SysCV/qdtrack.git and train + + +Step2. + +replace https://github.com/SysCV/qdtrack/blob/master/qdtrack/models/mot/qdtrack.py + +add mot_online to https://github.com/SysCV/qdtrack + +add byte_tracker.py to https://github.com/SysCV/qdtrack + + +Step3. run +``` +python3 -m torch.distributed.launch --nproc_per_node=8 --master_port=29501 tools/test.py configs/mot17/qdtrack-frcnn_r50_fpn_4e_mot17.py work_dirs/mot17_half_qdtrack.pth --launcher pytorch --eval track --eval-options resfile_path=output +``` diff --git a/tracking/docker-build-context/byte_track/tutorials/qdtrack/byte_tracker.py b/tracking/docker-build-context/byte_track/tutorials/qdtrack/byte_tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..0a292293e52327be6b6f2ff8f227c87f1da0527b --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/qdtrack/byte_tracker.py @@ -0,0 +1,353 @@ +import numpy as np +from collections import deque +import os +import os.path as osp +import copy +import torch +import torch.nn.functional as F + +from mot_online.kalman_filter import KalmanFilter +from mot_online.basetrack import BaseTrack, TrackState +from mot_online import matching + + + +class STrack(BaseTrack): + shared_kalman = KalmanFilter() + def __init__(self, tlwh, score): + + # wait activate + self._tlwh = np.asarray(tlwh, dtype=np.float) + self.kalman_filter = None + self.mean, self.covariance = None, None + self.is_activated = False + + self.score = score + self.tracklet_len = 0 + + def predict(self): + mean_state = self.mean.copy() + if self.state != TrackState.Tracked: + mean_state[7] = 0 + self.mean, self.covariance = self.kalman_filter.predict(mean_state, self.covariance) + + @staticmethod + def multi_predict(stracks): + if len(stracks) > 0: + multi_mean = np.asarray([st.mean.copy() for st in stracks]) + multi_covariance = np.asarray([st.covariance for st in stracks]) + for i, st in enumerate(stracks): + if st.state != TrackState.Tracked: + multi_mean[i][7] = 0 + multi_mean, multi_covariance = STrack.shared_kalman.multi_predict(multi_mean, multi_covariance) + for i, (mean, cov) in enumerate(zip(multi_mean, multi_covariance)): + stracks[i].mean = mean + stracks[i].covariance = cov + + def activate(self, kalman_filter, frame_id): + """Start a new tracklet""" + self.kalman_filter = kalman_filter + self.track_id = self.next_id() + self.mean, self.covariance = self.kalman_filter.initiate(self.tlwh_to_xyah(self._tlwh)) + + self.tracklet_len = 0 + self.state = TrackState.Tracked + if frame_id == 1: + self.is_activated = True + # self.is_activated = True + self.frame_id = frame_id + self.start_frame = frame_id + + def re_activate(self, new_track, frame_id, new_id=False): + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_track.tlwh) + ) + self.tracklet_len = 0 + self.state = TrackState.Tracked + self.is_activated = True + self.frame_id = frame_id + if new_id: + self.track_id = self.next_id() + self.score = new_track.score + + def update(self, new_track, frame_id): + """ + Update a matched track + :type new_track: STrack + :type frame_id: int + :type update_feature: bool + :return: + """ + self.frame_id = frame_id + self.tracklet_len += 1 + + new_tlwh = new_track.tlwh + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_tlwh)) + self.state = TrackState.Tracked + self.is_activated = True + + self.score = new_track.score + + @property + # @jit(nopython=True) + def tlwh(self): + """Get current position in bounding box format `(top left x, top left y, + width, height)`. + """ + if self.mean is None: + return self._tlwh.copy() + ret = self.mean[:4].copy() + ret[2] *= ret[3] + ret[:2] -= ret[2:] / 2 + return ret + + @property + # @jit(nopython=True) + def tlbr(self): + """Convert bounding box to format `(min x, min y, max x, max y)`, i.e., + `(top left, bottom right)`. + """ + ret = self.tlwh.copy() + ret[2:] += ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_xyah(tlwh): + """Convert bounding box to format `(center x, center y, aspect ratio, + height)`, where the aspect ratio is `width / height`. + """ + ret = np.asarray(tlwh).copy() + ret[:2] += ret[2:] / 2 + ret[2] /= ret[3] + return ret + + def to_xyah(self): + return self.tlwh_to_xyah(self.tlwh) + + @staticmethod + # @jit(nopython=True) + def tlbr_to_tlwh(tlbr): + ret = np.asarray(tlbr).copy() + ret[2:] -= ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_tlbr(tlwh): + ret = np.asarray(tlwh).copy() + ret[2:] += ret[:2] + return ret + + def __repr__(self): + return 'OT_{}_({}-{})'.format(self.track_id, self.start_frame, self.end_frame) + + +class BYTETracker(object): + def __init__(self, frame_rate=30): + self.tracked_stracks = [] # type: list[STrack] + self.lost_stracks = [] # type: list[STrack] + self.removed_stracks = [] # type: list[STrack] + + self.frame_id = 0 + + self.low_thresh = 0.2 + self.track_thresh = 0.8 + self.det_thresh = self.track_thresh + 0.1 + + + self.buffer_size = int(frame_rate / 30.0 * 30) + self.max_time_lost = self.buffer_size + self.kalman_filter = KalmanFilter() + +# def update(self, output_results): + def update(self, det_bboxes, det_labels, frame_id, track_feats=None): + +# self.frame_id += 1 + self.frame_id = frame_id + 1 + activated_starcks = [] + refind_stracks = [] + lost_stracks = [] + removed_stracks = [] + +# scores = output_results[:, 4] +# bboxes = output_results[:, :4] # x1y1x2y2 + scores = det_bboxes[:, 4].cpu().numpy() + bboxes = det_bboxes[:, :4].cpu().numpy() + + remain_inds = scores > self.track_thresh + dets = bboxes[remain_inds] + scores_keep = scores[remain_inds] + + + inds_low = scores > self.low_thresh + inds_high = scores < self.track_thresh + inds_second = np.logical_and(inds_low, inds_high) + dets_second = bboxes[inds_second] + scores_second = scores[inds_second] + + + if len(dets) > 0: + '''Detections''' + detections = [STrack(STrack.tlbr_to_tlwh(tlbr), s) for + (tlbr, s) in zip(dets, scores_keep)] + else: + detections = [] + + ''' Add newly detected tracklets to tracked_stracks''' + unconfirmed = [] + tracked_stracks = [] # type: list[STrack] + for track in self.tracked_stracks: + if not track.is_activated: + unconfirmed.append(track) + else: + tracked_stracks.append(track) + + ''' Step 2: First association, with Kalman and IOU''' + strack_pool = joint_stracks(tracked_stracks, self.lost_stracks) + # Predict the current location with KF + STrack.multi_predict(strack_pool) + dists = matching.iou_distance(strack_pool, detections) + matches, u_track, u_detection = matching.linear_assignment(dists, thresh=0.8) + + for itracked, idet in matches: + track = strack_pool[itracked] + det = detections[idet] + if track.state == TrackState.Tracked: + track.update(detections[idet], self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + ''' Step 3: Second association, with IOU''' + # association the untrack to the low score detections + if len(dets_second) > 0: + '''Detections''' + detections_second = [STrack(STrack.tlbr_to_tlwh(tlbr), s) for + (tlbr, s) in zip(dets_second, scores_second)] + else: + detections_second = [] + r_tracked_stracks = [strack_pool[i] for i in u_track if strack_pool[i].state == TrackState.Tracked] + dists = matching.iou_distance(r_tracked_stracks, detections_second) + matches, u_track, u_detection_second = matching.linear_assignment(dists, thresh=0.5) + for itracked, idet in matches: + track = r_tracked_stracks[itracked] + det = detections_second[idet] + if track.state == TrackState.Tracked: + track.update(det, self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + for it in u_track: + #track = strack_pool[it] + track = r_tracked_stracks[it] + if not track.state == TrackState.Lost: + track.mark_lost() + lost_stracks.append(track) + + '''Deal with unconfirmed tracks, usually tracks with only one beginning frame''' + detections = [detections[i] for i in u_detection] + dists = matching.iou_distance(unconfirmed, detections) + matches, u_unconfirmed, u_detection = matching.linear_assignment(dists, thresh=0.7) + for itracked, idet in matches: + unconfirmed[itracked].update(detections[idet], self.frame_id) + activated_starcks.append(unconfirmed[itracked]) + for it in u_unconfirmed: + track = unconfirmed[it] + track.mark_removed() + removed_stracks.append(track) + + """ Step 4: Init new stracks""" + for inew in u_detection: + track = detections[inew] + if track.score < self.det_thresh: + continue + track.activate(self.kalman_filter, self.frame_id) + activated_starcks.append(track) + """ Step 5: Update state""" + for track in self.lost_stracks: + if self.frame_id - track.end_frame > self.max_time_lost: + track.mark_removed() + removed_stracks.append(track) + + # print('Ramained match {} s'.format(t4-t3)) + + self.tracked_stracks = [t for t in self.tracked_stracks if t.state == TrackState.Tracked] + self.tracked_stracks = joint_stracks(self.tracked_stracks, activated_starcks) + self.tracked_stracks = joint_stracks(self.tracked_stracks, refind_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.tracked_stracks) + self.lost_stracks.extend(lost_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.removed_stracks) + self.removed_stracks.extend(removed_stracks) + self.tracked_stracks, self.lost_stracks = remove_duplicate_stracks(self.tracked_stracks, self.lost_stracks) + # get scores of lost tracks + output_stracks = [track for track in self.tracked_stracks if track.is_activated] + +# return output_stracks + + bboxes = [] + labels = [] + ids = [] + for track in output_stracks: + if track.is_activated: + track_bbox = track.tlbr + bboxes.append([track_bbox[0], track_bbox[1], track_bbox[2], track_bbox[3], track.score]) + labels.append(0) + ids.append(track.track_id) + return torch.tensor(bboxes), torch.tensor(labels), torch.tensor(ids) + +def joint_stracks(tlista, tlistb): + exists = {} + res = [] + for t in tlista: + exists[t.track_id] = 1 + res.append(t) + for t in tlistb: + tid = t.track_id + if not exists.get(tid, 0): + exists[tid] = 1 + res.append(t) + return res + + +def sub_stracks(tlista, tlistb): + stracks = {} + for t in tlista: + stracks[t.track_id] = t + for t in tlistb: + tid = t.track_id + if stracks.get(tid, 0): + del stracks[tid] + return list(stracks.values()) + + +def remove_duplicate_stracks(stracksa, stracksb): + pdist = matching.iou_distance(stracksa, stracksb) + pairs = np.where(pdist < 0.15) + dupa, dupb = list(), list() + for p, q in zip(*pairs): + timep = stracksa[p].frame_id - stracksa[p].start_frame + timeq = stracksb[q].frame_id - stracksb[q].start_frame + if timep > timeq: + dupb.append(q) + else: + dupa.append(p) + resa = [t for i, t in enumerate(stracksa) if not i in dupa] + resb = [t for i, t in enumerate(stracksb) if not i in dupb] + return resa, resb + + +def remove_fp_stracks(stracksa, n_frame=10): + remain = [] + for t in stracksa: + score_5 = t.score_list[-n_frame:] + score_5 = np.array(score_5, dtype=np.float32) + index = score_5 < 0.45 + num = np.sum(index) + if num < n_frame: + remain.append(t) + return remain diff --git a/tracking/docker-build-context/byte_track/tutorials/qdtrack/mot_online/basetrack.py b/tracking/docker-build-context/byte_track/tutorials/qdtrack/mot_online/basetrack.py new file mode 100644 index 0000000000000000000000000000000000000000..4fe2233607f6d4ed28b11a0ae6c0303c8ca19098 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/qdtrack/mot_online/basetrack.py @@ -0,0 +1,52 @@ +import numpy as np +from collections import OrderedDict + + +class TrackState(object): + New = 0 + Tracked = 1 + Lost = 2 + Removed = 3 + + +class BaseTrack(object): + _count = 0 + + track_id = 0 + is_activated = False + state = TrackState.New + + history = OrderedDict() + features = [] + curr_feature = None + score = 0 + start_frame = 0 + frame_id = 0 + time_since_update = 0 + + # multi-camera + location = (np.inf, np.inf) + + @property + def end_frame(self): + return self.frame_id + + @staticmethod + def next_id(): + BaseTrack._count += 1 + return BaseTrack._count + + def activate(self, *args): + raise NotImplementedError + + def predict(self): + raise NotImplementedError + + def update(self, *args, **kwargs): + raise NotImplementedError + + def mark_lost(self): + self.state = TrackState.Lost + + def mark_removed(self): + self.state = TrackState.Removed diff --git a/tracking/docker-build-context/byte_track/tutorials/qdtrack/mot_online/kalman_filter.py b/tracking/docker-build-context/byte_track/tutorials/qdtrack/mot_online/kalman_filter.py new file mode 100644 index 0000000000000000000000000000000000000000..b4c4e9854d8abd2fea75ad6b1fe8cd6846c43680 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/qdtrack/mot_online/kalman_filter.py @@ -0,0 +1,269 @@ +# vim: expandtab:ts=4:sw=4 +import numpy as np +import scipy.linalg + +""" +Table for the 0.95 quantile of the chi-square distribution with N degrees of +freedom (contains values for N=1, ..., 9). Taken from MATLAB/Octave's chi2inv +function and used as Mahalanobis gating threshold. +""" +chi2inv95 = { + 1: 3.8415, + 2: 5.9915, + 3: 7.8147, + 4: 9.4877, + 5: 11.070, + 6: 12.592, + 7: 14.067, + 8: 15.507, + 9: 16.919} + + +class KalmanFilter(object): + """ + A simple Kalman filter for tracking bounding boxes in image space. + + The 8-dimensional state space + + x, y, a, h, vx, vy, va, vh + + contains the bounding box center position (x, y), aspect ratio a, height h, + and their respective velocities. + + Object motion follows a constant velocity model. The bounding box location + (x, y, a, h) is taken as direct observation of the state space (linear + observation model). + + """ + + def __init__(self): + ndim, dt = 4, 1. + + # Create Kalman filter model matrices. + self._motion_mat = np.eye(2 * ndim, 2 * ndim) + for i in range(ndim): + self._motion_mat[i, ndim + i] = dt + self._update_mat = np.eye(ndim, 2 * ndim) + + # Motion and observation uncertainty are chosen relative to the current + # state estimate. These weights control the amount of uncertainty in + # the model. This is a bit hacky. + self._std_weight_position = 1. / 20 + self._std_weight_velocity = 1. / 160 + + def initiate(self, measurement): + """Create track from unassociated measurement. + + Parameters + ---------- + measurement : ndarray + Bounding box coordinates (x, y, a, h) with center position (x, y), + aspect ratio a, and height h. + + Returns + ------- + (ndarray, ndarray) + Returns the mean vector (8 dimensional) and covariance matrix (8x8 + dimensional) of the new track. Unobserved velocities are initialized + to 0 mean. + + """ + mean_pos = measurement + mean_vel = np.zeros_like(mean_pos) + mean = np.r_[mean_pos, mean_vel] + + std = [ + 2 * self._std_weight_position * measurement[3], + 2 * self._std_weight_position * measurement[3], + 1e-2, + 2 * self._std_weight_position * measurement[3], + 10 * self._std_weight_velocity * measurement[3], + 10 * self._std_weight_velocity * measurement[3], + 1e-5, + 10 * self._std_weight_velocity * measurement[3]] + covariance = np.diag(np.square(std)) + return mean, covariance + + def predict(self, mean, covariance): + """Run Kalman filter prediction step. + + Parameters + ---------- + mean : ndarray + The 8 dimensional mean vector of the object state at the previous + time step. + covariance : ndarray + The 8x8 dimensional covariance matrix of the object state at the + previous time step. + + Returns + ------- + (ndarray, ndarray) + Returns the mean vector and covariance matrix of the predicted + state. Unobserved velocities are initialized to 0 mean. + + """ + std_pos = [ + self._std_weight_position * mean[3], + self._std_weight_position * mean[3], + 1e-2, + self._std_weight_position * mean[3]] + std_vel = [ + self._std_weight_velocity * mean[3], + self._std_weight_velocity * mean[3], + 1e-5, + self._std_weight_velocity * mean[3]] + motion_cov = np.diag(np.square(np.r_[std_pos, std_vel])) + + #mean = np.dot(self._motion_mat, mean) + mean = np.dot(mean, self._motion_mat.T) + covariance = np.linalg.multi_dot(( + self._motion_mat, covariance, self._motion_mat.T)) + motion_cov + + return mean, covariance + + def project(self, mean, covariance): + """Project state distribution to measurement space. + + Parameters + ---------- + mean : ndarray + The state's mean vector (8 dimensional array). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + + Returns + ------- + (ndarray, ndarray) + Returns the projected mean and covariance matrix of the given state + estimate. + + """ + std = [ + self._std_weight_position * mean[3], + self._std_weight_position * mean[3], + 1e-1, + self._std_weight_position * mean[3]] + innovation_cov = np.diag(np.square(std)) + + mean = np.dot(self._update_mat, mean) + covariance = np.linalg.multi_dot(( + self._update_mat, covariance, self._update_mat.T)) + return mean, covariance + innovation_cov + + def multi_predict(self, mean, covariance): + """Run Kalman filter prediction step (Vectorized version). + Parameters + ---------- + mean : ndarray + The Nx8 dimensional mean matrix of the object states at the previous + time step. + covariance : ndarray + The Nx8x8 dimensional covariance matrics of the object states at the + previous time step. + Returns + ------- + (ndarray, ndarray) + Returns the mean vector and covariance matrix of the predicted + state. Unobserved velocities are initialized to 0 mean. + """ + std_pos = [ + self._std_weight_position * mean[:, 3], + self._std_weight_position * mean[:, 3], + 1e-2 * np.ones_like(mean[:, 3]), + self._std_weight_position * mean[:, 3]] + std_vel = [ + self._std_weight_velocity * mean[:, 3], + self._std_weight_velocity * mean[:, 3], + 1e-5 * np.ones_like(mean[:, 3]), + self._std_weight_velocity * mean[:, 3]] + sqr = np.square(np.r_[std_pos, std_vel]).T + + motion_cov = [] + for i in range(len(mean)): + motion_cov.append(np.diag(sqr[i])) + motion_cov = np.asarray(motion_cov) + + mean = np.dot(mean, self._motion_mat.T) + left = np.dot(self._motion_mat, covariance).transpose((1, 0, 2)) + covariance = np.dot(left, self._motion_mat.T) + motion_cov + + return mean, covariance + + def update(self, mean, covariance, measurement): + """Run Kalman filter correction step. + + Parameters + ---------- + mean : ndarray + The predicted state's mean vector (8 dimensional). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + measurement : ndarray + The 4 dimensional measurement vector (x, y, a, h), where (x, y) + is the center position, a the aspect ratio, and h the height of the + bounding box. + + Returns + ------- + (ndarray, ndarray) + Returns the measurement-corrected state distribution. + + """ + projected_mean, projected_cov = self.project(mean, covariance) + + chol_factor, lower = scipy.linalg.cho_factor( + projected_cov, lower=True, check_finite=False) + kalman_gain = scipy.linalg.cho_solve( + (chol_factor, lower), np.dot(covariance, self._update_mat.T).T, + check_finite=False).T + innovation = measurement - projected_mean + + new_mean = mean + np.dot(innovation, kalman_gain.T) + new_covariance = covariance - np.linalg.multi_dot(( + kalman_gain, projected_cov, kalman_gain.T)) + return new_mean, new_covariance + + def gating_distance(self, mean, covariance, measurements, + only_position=False, metric='maha'): + """Compute gating distance between state distribution and measurements. + A suitable distance threshold can be obtained from `chi2inv95`. If + `only_position` is False, the chi-square distribution has 4 degrees of + freedom, otherwise 2. + Parameters + ---------- + mean : ndarray + Mean vector over the state distribution (8 dimensional). + covariance : ndarray + Covariance of the state distribution (8x8 dimensional). + measurements : ndarray + An Nx4 dimensional matrix of N measurements, each in + format (x, y, a, h) where (x, y) is the bounding box center + position, a the aspect ratio, and h the height. + only_position : Optional[bool] + If True, distance computation is done with respect to the bounding + box center position only. + Returns + ------- + ndarray + Returns an array of length N, where the i-th element contains the + squared Mahalanobis distance between (mean, covariance) and + `measurements[i]`. + """ + mean, covariance = self.project(mean, covariance) + if only_position: + mean, covariance = mean[:2], covariance[:2, :2] + measurements = measurements[:, :2] + + d = measurements - mean + if metric == 'gaussian': + return np.sum(d * d, axis=1) + elif metric == 'maha': + cholesky_factor = np.linalg.cholesky(covariance) + z = scipy.linalg.solve_triangular( + cholesky_factor, d.T, lower=True, check_finite=False, + overwrite_b=True) + squared_maha = np.sum(z * z, axis=0) + return squared_maha + else: + raise ValueError('invalid distance metric') diff --git a/tracking/docker-build-context/byte_track/tutorials/qdtrack/mot_online/matching.py b/tracking/docker-build-context/byte_track/tutorials/qdtrack/mot_online/matching.py new file mode 100644 index 0000000000000000000000000000000000000000..54cb4be09624cdb68581508bdbdeecdc63539b7c --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/qdtrack/mot_online/matching.py @@ -0,0 +1,198 @@ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import lap +import numpy as np +import scipy +from cython_bbox import bbox_overlaps as bbox_ious +from scipy.spatial.distance import cdist + +chi2inv95 = { + 1: 3.8415, + 2: 5.9915, + 3: 7.8147, + 4: 9.4877, + 5: 11.070, + 6: 12.592, + 7: 14.067, + 8: 15.507, + 9: 16.919} + +def merge_matches(m1, m2, shape): + O,P,Q = shape + m1 = np.asarray(m1) + m2 = np.asarray(m2) + + M1 = scipy.sparse.coo_matrix((np.ones(len(m1)), (m1[:, 0], m1[:, 1])), shape=(O, P)) + M2 = scipy.sparse.coo_matrix((np.ones(len(m2)), (m2[:, 0], m2[:, 1])), shape=(P, Q)) + + mask = M1*M2 + match = mask.nonzero() + match = list(zip(match[0], match[1])) + unmatched_O = tuple(set(range(O)) - set([i for i, j in match])) + unmatched_Q = tuple(set(range(Q)) - set([j for i, j in match])) + + return match, unmatched_O, unmatched_Q + + +def _indices_to_matches(cost_matrix, indices, thresh): + matched_cost = cost_matrix[tuple(zip(*indices))] + matched_mask = (matched_cost <= thresh) + + matches = indices[matched_mask] + unmatched_a = tuple(set(range(cost_matrix.shape[0])) - set(matches[:, 0])) + unmatched_b = tuple(set(range(cost_matrix.shape[1])) - set(matches[:, 1])) + + return matches, unmatched_a, unmatched_b + + +def linear_assignment(cost_matrix, thresh): + if cost_matrix.size == 0: + return np.empty((0, 2), dtype=int), tuple(range(cost_matrix.shape[0])), tuple(range(cost_matrix.shape[1])) + matches, unmatched_a, unmatched_b = [], [], [] + cost, x, y = lap.lapjv(cost_matrix, extend_cost=True, cost_limit=thresh) + for ix, mx in enumerate(x): + if mx >= 0: + matches.append([ix, mx]) + unmatched_a = np.where(x < 0)[0] + unmatched_b = np.where(y < 0)[0] + matches = np.asarray(matches) + return matches, unmatched_a, unmatched_b + + +def ious(atlbrs, btlbrs): + """ + Compute cost based on IoU + :type atlbrs: list[tlbr] | np.ndarray + :type atlbrs: list[tlbr] | np.ndarray + + :rtype ious np.ndarray + """ + ious = np.zeros((len(atlbrs), len(btlbrs)), dtype=np.float) + if ious.size == 0: + return ious + + ious = bbox_ious( + np.ascontiguousarray(atlbrs, dtype=np.float), + np.ascontiguousarray(btlbrs, dtype=np.float) + ) + + return ious + + +def iou_distance(atracks, btracks): + """ + Compute cost based on IoU + :type atracks: list[STrack] + :type btracks: list[STrack] + + :rtype cost_matrix np.ndarray + """ + + if (len(atracks)>0 and isinstance(atracks[0], np.ndarray)) or (len(btracks) > 0 and isinstance(btracks[0], np.ndarray)): + atlbrs = atracks + btlbrs = btracks + else: + atlbrs = [track.tlbr for track in atracks] + btlbrs = [track.tlbr for track in btracks] + _ious = ious(atlbrs, btlbrs) + cost_matrix = 1 - _ious + + return cost_matrix + +def embedding_distance(tracks, detections, metric='cosine'): + """ + :param tracks: list[STrack] + :param detections: list[BaseTrack] + :param metric: + :return: cost_matrix np.ndarray + """ + + cost_matrix = np.zeros((len(tracks), len(detections)), dtype=np.float) + if cost_matrix.size == 0: + return cost_matrix + det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float) + #for i, track in enumerate(tracks): + #cost_matrix[i, :] = np.maximum(0.0, cdist(track.smooth_feat.reshape(1,-1), det_features, metric)) + track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float) + cost_matrix = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features + return cost_matrix + +def embedding_distance2(tracks, detections, metric='cosine'): + """ + :param tracks: list[STrack] + :param detections: list[BaseTrack] + :param metric: + :return: cost_matrix np.ndarray + """ + + cost_matrix = np.zeros((len(tracks), len(detections)), dtype=np.float) + if cost_matrix.size == 0: + return cost_matrix + det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float) + #for i, track in enumerate(tracks): + #cost_matrix[i, :] = np.maximum(0.0, cdist(track.smooth_feat.reshape(1,-1), det_features, metric)) + track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float) + cost_matrix = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features + track_features = np.asarray([track.features[0] for track in tracks], dtype=np.float) + cost_matrix2 = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features + track_features = np.asarray([track.features[len(track.features)-1] for track in tracks], dtype=np.float) + cost_matrix3 = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features + for row in range(len(cost_matrix)): + cost_matrix[row] = (cost_matrix[row]+cost_matrix2[row]+cost_matrix3[row])/3 + return cost_matrix + + +def vis_id_feature_A_distance(tracks, detections, metric='cosine'): + track_features = [] + det_features = [] + leg1 = len(tracks) + leg2 = len(detections) + cost_matrix = np.zeros((leg1, leg2), dtype=np.float) + cost_matrix_det = np.zeros((leg1, leg2), dtype=np.float) + cost_matrix_track = np.zeros((leg1, leg2), dtype=np.float) + det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float) + track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float) + if leg2 != 0: + cost_matrix_det = np.maximum(0.0, cdist(det_features, det_features, metric)) + if leg1 != 0: + cost_matrix_track = np.maximum(0.0, cdist(track_features, track_features, metric)) + if cost_matrix.size == 0: + return track_features, det_features, cost_matrix, cost_matrix_det, cost_matrix_track + cost_matrix = np.maximum(0.0, cdist(track_features, det_features, metric)) + if leg1 > 10: + leg1 = 10 + tracks = tracks[:10] + if leg2 > 10: + leg2 = 10 + detections = detections[:10] + det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float) + track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float) + return track_features, det_features, cost_matrix, cost_matrix_det, cost_matrix_track + +def gate_cost_matrix(kf, cost_matrix, tracks, detections, only_position=False): + if cost_matrix.size == 0: + return cost_matrix + gating_dim = 2 if only_position else 4 + gating_threshold = chi2inv95[gating_dim] + measurements = np.asarray([det.to_xyah() for det in detections]) + for row, track in enumerate(tracks): + gating_distance = kf.gating_distance( + track.mean, track.covariance, measurements, only_position) + cost_matrix[row, gating_distance > gating_threshold] = np.inf + return cost_matrix + + +def fuse_motion(kf, cost_matrix, tracks, detections, only_position=False, lambda_=0.98): + if cost_matrix.size == 0: + return cost_matrix + gating_dim = 2 if only_position else 4 + gating_threshold = chi2inv95[gating_dim] + measurements = np.asarray([det.to_xyah() for det in detections]) + for row, track in enumerate(tracks): + gating_distance = kf.gating_distance( + track.mean, track.covariance, measurements, only_position, metric='maha') + cost_matrix[row, gating_distance > gating_threshold] = np.inf + cost_matrix[row] = lambda_ * cost_matrix[row] + (1 - lambda_) * gating_distance + return cost_matrix diff --git a/tracking/docker-build-context/byte_track/tutorials/qdtrack/qdtrack.py b/tracking/docker-build-context/byte_track/tutorials/qdtrack/qdtrack.py new file mode 100644 index 0000000000000000000000000000000000000000..bbecbc04828e8af0bad0c2fa46a88ee76c7c9473 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/qdtrack/qdtrack.py @@ -0,0 +1,164 @@ +import numpy as np +from mmdet.core import bbox2result +from mmdet.models import TwoStageDetector + +from qdtrack.core import track2result +from ..builder import MODELS, build_tracker +from qdtrack.core import imshow_tracks, restore_result +from tracker import BYTETracker + + +@MODELS.register_module() +class QDTrack(TwoStageDetector): + + def __init__(self, tracker=None, freeze_detector=False, *args, **kwargs): + self.prepare_cfg(kwargs) + super().__init__(*args, **kwargs) + self.tracker_cfg = tracker + + self.freeze_detector = freeze_detector + if self.freeze_detector: + self._freeze_detector() + + def _freeze_detector(self): + + self.detector = [ + self.backbone, self.neck, self.rpn_head, self.roi_head.bbox_head + ] + for model in self.detector: + model.eval() + for param in model.parameters(): + param.requires_grad = False + + def prepare_cfg(self, kwargs): + if kwargs.get('train_cfg', False): + kwargs['roi_head']['track_train_cfg'] = kwargs['train_cfg'].get( + 'embed', None) + + def init_tracker(self): +# self.tracker = build_tracker(self.tracker_cfg) + self.tracker = BYTETracker() + + def forward_train(self, + img, + img_metas, + gt_bboxes, + gt_labels, + gt_match_indices, + ref_img, + ref_img_metas, + ref_gt_bboxes, + ref_gt_labels, + ref_gt_match_indices, + gt_bboxes_ignore=None, + gt_masks=None, + ref_gt_bboxes_ignore=None, + ref_gt_masks=None, + **kwargs): + x = self.extract_feat(img) + + losses = dict() + + # RPN forward and loss + proposal_cfg = self.train_cfg.get('rpn_proposal', self.test_cfg.rpn) + rpn_losses, proposal_list = self.rpn_head.forward_train( + x, + img_metas, + gt_bboxes, + gt_labels=None, + gt_bboxes_ignore=gt_bboxes_ignore, + proposal_cfg=proposal_cfg) + losses.update(rpn_losses) + + ref_x = self.extract_feat(ref_img) + ref_proposals = self.rpn_head.simple_test_rpn(ref_x, ref_img_metas) + + roi_losses = self.roi_head.forward_train( + x, img_metas, proposal_list, gt_bboxes, gt_labels, + gt_match_indices, ref_x, ref_img_metas, ref_proposals, + ref_gt_bboxes, ref_gt_labels, gt_bboxes_ignore, gt_masks, + ref_gt_bboxes_ignore, **kwargs) + losses.update(roi_losses) + + return losses + + def simple_test(self, img, img_metas, rescale=False): + # TODO inherit from a base tracker + assert self.roi_head.with_track, 'Track head must be implemented.' + frame_id = img_metas[0].get('frame_id', -1) + if frame_id == 0: + self.init_tracker() + + x = self.extract_feat(img) + proposal_list = self.rpn_head.simple_test_rpn(x, img_metas) + det_bboxes, det_labels, track_feats = self.roi_head.simple_test(x, img_metas, proposal_list, rescale) + + bboxes, labels, ids = self.tracker.update(det_bboxes, det_labels, frame_id, track_feats) + +# if track_feats is not None: +# bboxes, labels, ids = self.tracker.match( +# bboxes=det_bboxes, +# labels=det_labels, +# track_feats=track_feats, +# frame_id=frame_id) + + bbox_result = bbox2result(det_bboxes, det_labels, + self.roi_head.bbox_head.num_classes) + + if track_feats is not None: + track_result = track2result(bboxes, labels, ids, + self.roi_head.bbox_head.num_classes) + else: + track_result = [ + np.zeros((0, 6), dtype=np.float32) + for i in range(self.roi_head.bbox_head.num_classes) + ] + return dict(bbox_results=bbox_result, track_results=track_result) + + def show_result(self, + img, + result, + thickness=1, + font_scale=0.5, + show=False, + out_file=None, + wait_time=0, + backend='cv2', + **kwargs): + """Visualize tracking results. + + Args: + img (str | ndarray): Filename of loaded image. + result (dict): Tracking result. + The value of key 'track_results' is ndarray with shape (n, 6) + in [id, tl_x, tl_y, br_x, br_y, score] format. + The value of key 'bbox_results' is ndarray with shape (n, 5) + in [tl_x, tl_y, br_x, br_y, score] format. + thickness (int, optional): Thickness of lines. Defaults to 1. + font_scale (float, optional): Font scales of texts. Defaults + to 0.5. + show (bool, optional): Whether show the visualizations on the + fly. Defaults to False. + out_file (str | None, optional): Output filename. Defaults to None. + backend (str, optional): Backend to draw the bounding boxes, + options are `cv2` and `plt`. Defaults to 'cv2'. + + Returns: + ndarray: Visualized image. + """ + assert isinstance(result, dict) + track_result = result.get('track_results', None) + bboxes, labels, ids = restore_result(track_result, return_ids=True) + img = imshow_tracks( + img, + bboxes, + labels, + ids, + classes=self.CLASSES, + thickness=thickness, + font_scale=font_scale, + show=show, + out_file=out_file, + wait_time=wait_time, + backend=backend) + return img diff --git a/tracking/docker-build-context/byte_track/tutorials/qdtrack/tracker_reid_motion.py b/tracking/docker-build-context/byte_track/tutorials/qdtrack/tracker_reid_motion.py new file mode 100644 index 0000000000000000000000000000000000000000..406a0a413fe5d5682497ea2bef6a1148a8650cb6 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/qdtrack/tracker_reid_motion.py @@ -0,0 +1,397 @@ +import numpy as np +from collections import deque +import os +import os.path as osp +import copy +import torch +import torch.nn.functional as F + +from mot_online.kalman_filter import KalmanFilter +from mot_online.basetrack import BaseTrack, TrackState +from mot_online import matching + + + +class STrack(BaseTrack): + shared_kalman = KalmanFilter() + def __init__(self, tlwh, score, temp_feat, buffer_size=30): + + # wait activate + self._tlwh = np.asarray(tlwh, dtype=np.float) + self.kalman_filter = None + self.mean, self.covariance = None, None + self.is_activated = False + + self.score = score + self.tracklet_len = 0 + + self.smooth_feat = None + self.update_features(temp_feat) + self.features = deque([], maxlen=buffer_size) + self.alpha = 0.9 + + def update_features(self, feat): + feat /= np.linalg.norm(feat) + self.curr_feat = feat + if self.smooth_feat is None: + self.smooth_feat = feat + else: + self.smooth_feat = self.alpha * self.smooth_feat + (1 - self.alpha) * feat + self.features.append(feat) + self.smooth_feat /= np.linalg.norm(self.smooth_feat) + + def predict(self): + mean_state = self.mean.copy() + if self.state != TrackState.Tracked: + mean_state[7] = 0 + self.mean, self.covariance = self.kalman_filter.predict(mean_state, self.covariance) + + @staticmethod + def multi_predict(stracks): + if len(stracks) > 0: + multi_mean = np.asarray([st.mean.copy() for st in stracks]) + multi_covariance = np.asarray([st.covariance for st in stracks]) + for i, st in enumerate(stracks): + if st.state != TrackState.Tracked: + multi_mean[i][7] = 0 + multi_mean, multi_covariance = STrack.shared_kalman.multi_predict(multi_mean, multi_covariance) + for i, (mean, cov) in enumerate(zip(multi_mean, multi_covariance)): + stracks[i].mean = mean + stracks[i].covariance = cov + + def activate(self, kalman_filter, frame_id): + """Start a new tracklet""" + self.kalman_filter = kalman_filter + self.track_id = self.next_id() + self.mean, self.covariance = self.kalman_filter.initiate(self.tlwh_to_xyah(self._tlwh)) + + self.tracklet_len = 0 + self.state = TrackState.Tracked + if frame_id == 1: + self.is_activated = True + # self.is_activated = True + self.frame_id = frame_id + self.start_frame = frame_id + + def re_activate(self, new_track, frame_id, new_id=False): + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_track.tlwh) + ) + + self.update_features(new_track.curr_feat) + self.tracklet_len = 0 + self.state = TrackState.Tracked + self.is_activated = True + self.frame_id = frame_id + if new_id: + self.track_id = self.next_id() + + def update(self, new_track, frame_id, update_feature=True): + """ + Update a matched track + :type new_track: STrack + :type frame_id: int + :type update_feature: bool + :return: + """ + self.frame_id = frame_id + self.tracklet_len += 1 + + new_tlwh = new_track.tlwh + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_tlwh)) + self.state = TrackState.Tracked + self.is_activated = True + + self.score = new_track.score + if update_feature: + self.update_features(new_track.curr_feat) + + @property + # @jit(nopython=True) + def tlwh(self): + """Get current position in bounding box format `(top left x, top left y, + width, height)`. + """ + if self.mean is None: + return self._tlwh.copy() + ret = self.mean[:4].copy() + ret[2] *= ret[3] + ret[:2] -= ret[2:] / 2 + return ret + + @property + # @jit(nopython=True) + def tlbr(self): + """Convert bounding box to format `(min x, min y, max x, max y)`, i.e., + `(top left, bottom right)`. + """ + ret = self.tlwh.copy() + ret[2:] += ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_xyah(tlwh): + """Convert bounding box to format `(center x, center y, aspect ratio, + height)`, where the aspect ratio is `width / height`. + """ + ret = np.asarray(tlwh).copy() + ret[:2] += ret[2:] / 2 + ret[2] /= ret[3] + return ret + + def to_xyah(self): + return self.tlwh_to_xyah(self.tlwh) + + @staticmethod + # @jit(nopython=True) + def tlbr_to_tlwh(tlbr): + ret = np.asarray(tlbr).copy() + ret[2:] -= ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_tlbr(tlwh): + ret = np.asarray(tlwh).copy() + ret[2:] += ret[:2] + return ret + + def __repr__(self): + return 'OT_{}_({}-{})'.format(self.track_id, self.start_frame, self.end_frame) + + +class BYTETracker(object): + def __init__(self, frame_rate=30): + self.tracked_stracks = [] # type: list[STrack] + self.lost_stracks = [] # type: list[STrack] + self.removed_stracks = [] # type: list[STrack] + + self.frame_id = 0 + + self.low_thresh = 0.2 + self.track_thresh = 0.8 + self.det_thresh = self.track_thresh + 0.1 + + + self.buffer_size = int(frame_rate / 30.0 * 30) + self.max_time_lost = self.buffer_size + self.kalman_filter = KalmanFilter() + +# def update(self, output_results): + def update(self, det_bboxes, det_labels, frame_id, track_feats): + +# self.frame_id += 1 + self.frame_id = frame_id + 1 + activated_starcks = [] + refind_stracks = [] + lost_stracks = [] + removed_stracks = [] + +# scores = output_results[:, 4] +# bboxes = output_results[:, :4] # x1y1x2y2 + scores = det_bboxes[:, 4].cpu().numpy() + bboxes = det_bboxes[:, :4].cpu().numpy() + + track_feature = F.normalize(track_feats).cpu().numpy() + + remain_inds = scores > self.track_thresh + dets = bboxes[remain_inds] + scores_keep = scores[remain_inds] + id_feature = track_feature[remain_inds] + + + inds_low = scores > self.low_thresh + inds_high = scores < self.track_thresh + inds_second = np.logical_and(inds_low, inds_high) + dets_second = bboxes[inds_second] + scores_second = scores[inds_second] + id_feature_second = track_feature[inds_second] + + if len(dets) > 0: + '''Detections''' + detections = [STrack(STrack.tlbr_to_tlwh(tlbr), s, f) for + (tlbr, s, f) in zip(dets, scores_keep, id_feature)] + else: + detections = [] + + + ''' Add newly detected tracklets to tracked_stracks''' + unconfirmed = [] + tracked_stracks = [] # type: list[STrack] + for track in self.tracked_stracks: + if not track.is_activated: + unconfirmed.append(track) + else: + tracked_stracks.append(track) + + ''' Step 2: First association, with Kalman and IOU''' + strack_pool = joint_stracks(tracked_stracks, self.lost_stracks) + # Predict the current location with KF + STrack.multi_predict(strack_pool) + + dists = matching.embedding_distance(strack_pool, detections) + dists = matching.fuse_motion(self.kalman_filter, dists, strack_pool, detections) + matches, u_track, u_detection = matching.linear_assignment(dists, thresh=0.6) +# dists = matching.iou_distance(strack_pool, detections) +# matches, u_track, u_detection = matching.linear_assignment(dists, thresh=0.8) + + for itracked, idet in matches: + track = strack_pool[itracked] + det = detections[idet] + if track.state == TrackState.Tracked: + track.update(detections[idet], self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + ''' Step 3: Second association, with IOU''' + detections = [detections[i] for i in u_detection] + r_tracked_stracks = [strack_pool[i] for i in u_track if strack_pool[i].state == TrackState.Tracked] + dists = matching.iou_distance(r_tracked_stracks, detections) + matches, u_track, u_detection = matching.linear_assignment(dists, thresh=0.5) + + for itracked, idet in matches: + track = r_tracked_stracks[itracked] + det = detections[idet] + if track.state == TrackState.Tracked: + track.update(det, self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + + ''' Step 3.5: Second association, with IOU''' + # association the untrack to the low score detections + if len(dets_second) > 0: + '''Detections''' + detections_second = [STrack(STrack.tlbr_to_tlwh(tlbr), s, f) for + (tlbr, s, f) in zip(dets_second, scores_second, id_feature_second)] + else: + detections_second = [] + + second_tracked_stracks = [r_tracked_stracks[i] for i in u_track if r_tracked_stracks[i].state == TrackState.Tracked] + dists = matching.iou_distance(second_tracked_stracks, detections_second) + matches, u_track, u_detection_second = matching.linear_assignment(dists, thresh=0.5) + for itracked, idet in matches: + track = second_tracked_stracks[itracked] + det = detections_second[idet] + if track.state == TrackState.Tracked: + track.update(det, self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + for it in u_track: + #track = r_tracked_stracks[it] + track = second_tracked_stracks[it] + if not track.state == TrackState.Lost: + track.mark_lost() + lost_stracks.append(track) + + '''Deal with unconfirmed tracks, usually tracks with only one beginning frame''' + detections = [detections[i] for i in u_detection] + dists = matching.iou_distance(unconfirmed, detections) + matches, u_unconfirmed, u_detection = matching.linear_assignment(dists, thresh=0.7) + for itracked, idet in matches: + unconfirmed[itracked].update(detections[idet], self.frame_id) + activated_starcks.append(unconfirmed[itracked]) + for it in u_unconfirmed: + track = unconfirmed[it] + track.mark_removed() + removed_stracks.append(track) + + """ Step 4: Init new stracks""" + for inew in u_detection: + track = detections[inew] + if track.score < self.det_thresh: + continue + track.activate(self.kalman_filter, self.frame_id) + activated_starcks.append(track) + """ Step 5: Update state""" + for track in self.lost_stracks: + if self.frame_id - track.end_frame > self.max_time_lost: + track.mark_removed() + removed_stracks.append(track) + + # print('Ramained match {} s'.format(t4-t3)) + + self.tracked_stracks = [t for t in self.tracked_stracks if t.state == TrackState.Tracked] + self.tracked_stracks = joint_stracks(self.tracked_stracks, activated_starcks) + self.tracked_stracks = joint_stracks(self.tracked_stracks, refind_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.tracked_stracks) + self.lost_stracks.extend(lost_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.removed_stracks) + self.removed_stracks.extend(removed_stracks) + self.tracked_stracks, self.lost_stracks = remove_duplicate_stracks(self.tracked_stracks, self.lost_stracks) + # get scores of lost tracks + output_stracks = [track for track in self.tracked_stracks if track.is_activated] + +# return output_stracks + + bboxes = [] + labels = [] + ids = [] + for track in output_stracks: + if track.is_activated: + track_bbox = track.tlbr + bboxes.append([track_bbox[0], track_bbox[1], track_bbox[2], track_bbox[3], track.score]) + labels.append(0) + ids.append(track.track_id) + return torch.tensor(bboxes), torch.tensor(labels), torch.tensor(ids) + +def joint_stracks(tlista, tlistb): + exists = {} + res = [] + for t in tlista: + exists[t.track_id] = 1 + res.append(t) + for t in tlistb: + tid = t.track_id + if not exists.get(tid, 0): + exists[tid] = 1 + res.append(t) + return res + + +def sub_stracks(tlista, tlistb): + stracks = {} + for t in tlista: + stracks[t.track_id] = t + for t in tlistb: + tid = t.track_id + if stracks.get(tid, 0): + del stracks[tid] + return list(stracks.values()) + + +def remove_duplicate_stracks(stracksa, stracksb): + pdist = matching.iou_distance(stracksa, stracksb) + pairs = np.where(pdist < 0.15) + dupa, dupb = list(), list() + for p, q in zip(*pairs): + timep = stracksa[p].frame_id - stracksa[p].start_frame + timeq = stracksb[q].frame_id - stracksb[q].start_frame + if timep > timeq: + dupb.append(q) + else: + dupa.append(p) + resa = [t for i, t in enumerate(stracksa) if not i in dupa] + resb = [t for i, t in enumerate(stracksb) if not i in dupb] + return resa, resb + + +def remove_fp_stracks(stracksa, n_frame=10): + remain = [] + for t in stracksa: + score_5 = t.score_list[-n_frame:] + score_5 = np.array(score_5, dtype=np.float32) + index = score_5 < 0.45 + num = np.sum(index) + if num < n_frame: + remain.append(t) + return remain diff --git a/tracking/docker-build-context/byte_track/tutorials/trades/README.md b/tracking/docker-build-context/byte_track/tutorials/trades/README.md new file mode 100644 index 0000000000000000000000000000000000000000..95afad0195f6230b7ca593dfd088ea7953ff2ed6 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/trades/README.md @@ -0,0 +1,41 @@ +# TraDeS + +Step1. git clone https://github.com/JialianW/TraDeS.git + + +Step2. + +replace https://github.com/JialianW/TraDeS/blob/master/src/lib/utils/tracker.py + +replace https://github.com/JialianW/TraDeS/blob/master/src/lib/opts.py + + +Step3. run +``` +python3 test.py tracking --exp_id mot17_half --dataset mot --dataset_version 17halfval --pre_hm --ltrb_amodal --inference --load_model ../models/mot_half.pth --gpus 0 --clip_len 3 --trades --track_thresh 0.4 --new_thresh 0.4 --out_thresh 0.2 --pre_thresh 0.5 +``` + + +# TraDeS_BYTE + +Step1. git clone https://github.com/JialianW/TraDeS.git + + +Step2. + +replace https://github.com/JialianW/TraDeS/blob/master/src/lib/utils/tracker.py by byte_tracker.py + +replace https://github.com/JialianW/TraDeS/blob/master/src/lib/opts.py + +add mot_online to https://github.com/JialianW/TraDeS/blob/master/src/lib/utils + +Step3. run +``` +python3 test.py tracking --exp_id mot17_half --dataset mot --dataset_version 17halfval --pre_hm --ltrb_amodal --inference --load_model ../models/mot_half.pth --gpus 0 --clip_len 3 --trades --track_thresh 0.4 --new_thresh 0.5 --out_thresh 0.1 --pre_thresh 0.5 +``` + + +## Notes +tracker.py: motion + reid + +byte_tracker.py: motion with kalman filter diff --git a/tracking/docker-build-context/byte_track/tutorials/trades/byte_tracker.py b/tracking/docker-build-context/byte_track/tutorials/trades/byte_tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..d154045b6c86ceebdf941a5e735b47f4542d7908 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/trades/byte_tracker.py @@ -0,0 +1,352 @@ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import numpy as np +from sklearn.utils.linear_assignment_ import linear_assignment +# from numba import jit +import copy +from .mot_online.kalman_filter import KalmanFilter +from .mot_online.basetrack import BaseTrack, TrackState +from .mot_online import matching + + +class STrack(BaseTrack): + shared_kalman = KalmanFilter() + def __init__(self, tlwh, score): + + # wait activate + self._tlwh = np.asarray(tlwh, dtype=np.float) + self.kalman_filter = None + self.mean, self.covariance = None, None + self.is_activated = False + + self.score = score + self.tracklet_len = 0 + + def predict(self): + mean_state = self.mean.copy() + if self.state != TrackState.Tracked: + mean_state[7] = 0 + self.mean, self.covariance = self.kalman_filter.predict(mean_state, self.covariance) + + @staticmethod + def multi_predict(stracks): + if len(stracks) > 0: + multi_mean = np.asarray([st.mean.copy() for st in stracks]) + multi_covariance = np.asarray([st.covariance for st in stracks]) + for i, st in enumerate(stracks): + if st.state != TrackState.Tracked: + multi_mean[i][7] = 0 + multi_mean, multi_covariance = STrack.shared_kalman.multi_predict(multi_mean, multi_covariance) + for i, (mean, cov) in enumerate(zip(multi_mean, multi_covariance)): + stracks[i].mean = mean + stracks[i].covariance = cov + + def activate(self, kalman_filter, frame_id): + """Start a new tracklet""" + self.kalman_filter = kalman_filter + self.track_id = self.next_id() + self.mean, self.covariance = self.kalman_filter.initiate(self.tlwh_to_xyah(self._tlwh)) + + self.tracklet_len = 0 + self.state = TrackState.Tracked + if frame_id == 1: + self.is_activated = True + # self.is_activated = True + self.frame_id = frame_id + self.start_frame = frame_id + + def re_activate(self, new_track, frame_id, new_id=False): + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_track.tlwh) + ) + self.tracklet_len = 0 + self.state = TrackState.Tracked + self.is_activated = True + self.frame_id = frame_id + if new_id: + self.track_id = self.next_id() + self.score = new_track.score + + def update(self, new_track, frame_id): + """ + Update a matched track + :type new_track: STrack + :type frame_id: int + :type update_feature: bool + :return: + """ + self.frame_id = frame_id + self.tracklet_len += 1 + + new_tlwh = new_track.tlwh + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_tlwh)) + self.state = TrackState.Tracked + self.is_activated = True + + self.score = new_track.score + + @property + # @jit(nopython=True) + def tlwh(self): + """Get current position in bounding box format `(top left x, top left y, + width, height)`. + """ + if self.mean is None: + return self._tlwh.copy() + ret = self.mean[:4].copy() + ret[2] *= ret[3] + ret[:2] -= ret[2:] / 2 + return ret + + @property + # @jit(nopython=True) + def tlbr(self): + """Convert bounding box to format `(min x, min y, max x, max y)`, i.e., + `(top left, bottom right)`. + """ + ret = self.tlwh.copy() + ret[2:] += ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_xyah(tlwh): + """Convert bounding box to format `(center x, center y, aspect ratio, + height)`, where the aspect ratio is `width / height`. + """ + ret = np.asarray(tlwh).copy() + ret[:2] += ret[2:] / 2 + ret[2] /= ret[3] + return ret + + def to_xyah(self): + return self.tlwh_to_xyah(self.tlwh) + + @staticmethod + # @jit(nopython=True) + def tlbr_to_tlwh(tlbr): + ret = np.asarray(tlbr).copy() + ret[2:] -= ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_tlbr(tlwh): + ret = np.asarray(tlwh).copy() + ret[2:] += ret[:2] + return ret + + def __repr__(self): + return 'OT_{}_({}-{})'.format(self.track_id, self.start_frame, self.end_frame) + + + +class BYTETracker(object): + def __init__(self, args, frame_rate=30): + self.args = args + self.det_thresh = args.new_thresh + self.buffer_size = int(frame_rate / 30.0 * args.track_buffer) + self.max_time_lost = self.buffer_size + self.reset() + + # below has no effect to final output, just to be compatible to codebase + def init_track(self, results): + for item in results: + if item['score'] > self.opt.new_thresh and item['class'] == 1: + self.id_count += 1 + item['active'] = 1 + item['age'] = 1 + item['tracking_id'] = self.id_count + if not ('ct' in item): + bbox = item['bbox'] + item['ct'] = [(bbox[0] + bbox[2]) / 2, (bbox[1] + bbox[3]) / 2] + self.tracks.append(item) + + def reset(self): + self.frame_id = 0 + self.kalman_filter = KalmanFilter() + self.tracked_stracks = [] # type: list[STrack] + self.lost_stracks = [] # type: list[STrack] + self.removed_stracks = [] # type: list[STrack] + self.tracks = [] + + # below has no effect to final output, just to be compatible to codebase + self.id_count = 0 + + def step(self, results, public_det=None): + self.frame_id += 1 + activated_starcks = [] + refind_stracks = [] + lost_stracks = [] + removed_stracks = [] + detections = [] + detections_second = [] + + scores = np.array([item['score'] for item in results if item['class'] == 1], np.float32) + bboxes = np.vstack([item['bbox'] for item in results if item['class'] == 1]) # N x 4, x1y1x2y2 + + remain_inds = scores >= self.args.track_thresh + dets = bboxes[remain_inds] + scores_keep = scores[remain_inds] + + + inds_low = scores > self.args.out_thresh + inds_high = scores < self.args.track_thresh + inds_second = np.logical_and(inds_low, inds_high) + dets_second = bboxes[inds_second] + scores_second = scores[inds_second] + + if len(dets) > 0: + '''Detections''' + detections = [STrack(STrack.tlbr_to_tlwh(tlbr), s) for + (tlbr, s) in zip(dets, scores_keep)] + else: + detections = [] + + ''' Add newly detected tracklets to tracked_stracks''' + unconfirmed = [] + tracked_stracks = [] # type: list[STrack] + for track in self.tracked_stracks: + if not track.is_activated: + unconfirmed.append(track) + else: + tracked_stracks.append(track) + + ''' Step 2: First association, with Kalman and IOU''' + strack_pool = joint_stracks(tracked_stracks, self.lost_stracks) + # Predict the current location with KF + STrack.multi_predict(strack_pool) + dists = matching.iou_distance(strack_pool, detections) + #dists = matching.fuse_motion(self.kalman_filter, dists, strack_pool, detections) + matches, u_track, u_detection = matching.linear_assignment(dists, thresh=0.9) + + for itracked, idet in matches: + track = strack_pool[itracked] + det = detections[idet] + if track.state == TrackState.Tracked: + track.update(detections[idet], self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + ''' Step 3: Second association, association the untrack to the low score detections, with IOU''' + if len(dets_second) > 0: + '''Detections''' + detections_second = [STrack(STrack.tlbr_to_tlwh(tlbr), s) for + (tlbr, s) in zip(dets_second, scores_second)] + else: + detections_second = [] + r_tracked_stracks = [strack_pool[i] for i in u_track if strack_pool[i].state == TrackState.Tracked] + dists = matching.iou_distance(r_tracked_stracks, detections_second) + matches, u_track, u_detection_second = matching.linear_assignment(dists, thresh=0.4) + for itracked, idet in matches: + track = r_tracked_stracks[itracked] + det = detections_second[idet] + if track.state == TrackState.Tracked: + track.update(det, self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + for it in u_track: + #track = r_tracked_stracks[it] + track = r_tracked_stracks[it] + if not track.state == TrackState.Lost: + track.mark_lost() + lost_stracks.append(track) + + '''Deal with unconfirmed tracks, usually tracks with only one beginning frame''' + detections = [detections[i] for i in u_detection] + dists = matching.iou_distance(unconfirmed, detections) + matches, u_unconfirmed, u_detection = matching.linear_assignment(dists, thresh=0.7) + for itracked, idet in matches: + unconfirmed[itracked].update(detections[idet], self.frame_id) + activated_starcks.append(unconfirmed[itracked]) + for it in u_unconfirmed: + track = unconfirmed[it] + track.mark_removed() + removed_stracks.append(track) + + """ Step 4: Init new stracks""" + for inew in u_detection: + track = detections[inew] + if track.score < self.det_thresh: + continue + track.activate(self.kalman_filter, self.frame_id) + activated_starcks.append(track) + """ Step 5: Update state""" + for track in self.lost_stracks: + if self.frame_id - track.end_frame > self.max_time_lost: + track.mark_removed() + removed_stracks.append(track) + + self.tracked_stracks = [t for t in self.tracked_stracks if t.state == TrackState.Tracked] + self.tracked_stracks = joint_stracks(self.tracked_stracks, activated_starcks) + self.tracked_stracks = joint_stracks(self.tracked_stracks, refind_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.tracked_stracks) + self.lost_stracks.extend(lost_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.removed_stracks) + self.removed_stracks.extend(removed_stracks) + self.tracked_stracks, self.lost_stracks = remove_duplicate_stracks(self.tracked_stracks, self.lost_stracks) + output_stracks = [track for track in self.tracked_stracks if track.is_activated] + + ret = [] + for track in output_stracks: + track_dict = {} + track_dict['score'] = track.score + track_dict['bbox'] = track.tlbr + bbox = track_dict['bbox'] + track_dict['ct'] = [(bbox[0] + bbox[2]) / 2, (bbox[1] + bbox[3]) / 2] + track_dict['active'] = 1 if track.is_activated else 0 + track_dict['tracking_id'] = track.track_id + track_dict['class'] = 1 + ret.append(track_dict) + + self.tracks = ret + return ret + + +def joint_stracks(tlista, tlistb): + exists = {} + res = [] + for t in tlista: + exists[t.track_id] = 1 + res.append(t) + for t in tlistb: + tid = t.track_id + if not exists.get(tid, 0): + exists[tid] = 1 + res.append(t) + return res + + +def sub_stracks(tlista, tlistb): + stracks = {} + for t in tlista: + stracks[t.track_id] = t + for t in tlistb: + tid = t.track_id + if stracks.get(tid, 0): + del stracks[tid] + return list(stracks.values()) + + +def remove_duplicate_stracks(stracksa, stracksb): + pdist = matching.iou_distance(stracksa, stracksb) + pairs = np.where(pdist < 0.15) + dupa, dupb = list(), list() + for p, q in zip(*pairs): + timep = stracksa[p].frame_id - stracksa[p].start_frame + timeq = stracksb[q].frame_id - stracksb[q].start_frame + if timep > timeq: + dupb.append(q) + else: + dupa.append(p) + resa = [t for i, t in enumerate(stracksa) if not i in dupa] + resb = [t for i, t in enumerate(stracksb) if not i in dupb] + return resa, resb diff --git a/tracking/docker-build-context/byte_track/tutorials/trades/mot_online/basetrack.py b/tracking/docker-build-context/byte_track/tutorials/trades/mot_online/basetrack.py new file mode 100644 index 0000000000000000000000000000000000000000..4fe2233607f6d4ed28b11a0ae6c0303c8ca19098 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/trades/mot_online/basetrack.py @@ -0,0 +1,52 @@ +import numpy as np +from collections import OrderedDict + + +class TrackState(object): + New = 0 + Tracked = 1 + Lost = 2 + Removed = 3 + + +class BaseTrack(object): + _count = 0 + + track_id = 0 + is_activated = False + state = TrackState.New + + history = OrderedDict() + features = [] + curr_feature = None + score = 0 + start_frame = 0 + frame_id = 0 + time_since_update = 0 + + # multi-camera + location = (np.inf, np.inf) + + @property + def end_frame(self): + return self.frame_id + + @staticmethod + def next_id(): + BaseTrack._count += 1 + return BaseTrack._count + + def activate(self, *args): + raise NotImplementedError + + def predict(self): + raise NotImplementedError + + def update(self, *args, **kwargs): + raise NotImplementedError + + def mark_lost(self): + self.state = TrackState.Lost + + def mark_removed(self): + self.state = TrackState.Removed diff --git a/tracking/docker-build-context/byte_track/tutorials/trades/mot_online/kalman_filter.py b/tracking/docker-build-context/byte_track/tutorials/trades/mot_online/kalman_filter.py new file mode 100644 index 0000000000000000000000000000000000000000..82111a336d4d94bece171f2f95d9147bb7456285 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/trades/mot_online/kalman_filter.py @@ -0,0 +1,252 @@ +# vim: expandtab:ts=4:sw=4 +import numpy as np +import scipy.linalg + +""" +Table for the 0.95 quantile of the chi-square distribution with N degrees of +freedom (contains values for N=1, ..., 9). Taken from MATLAB/Octave's chi2inv +function and used as Mahalanobis gating threshold. +""" +chi2inv95 = { + 1: 3.8415, + 2: 5.9915, + 3: 7.8147, + 4: 9.4877, + 5: 11.070, + 6: 12.592, + 7: 14.067, + 8: 15.507, + 9: 16.919} + + +class KalmanFilter(object): + """ + A simple Kalman filter for tracking bounding boxes in image space. + The 8-dimensional state space + x, y, a, h, vx, vy, va, vh + contains the bounding box center position (x, y), aspect ratio a, height h, + and their respective velocities. + Object motion follows a constant velocity model. The bounding box location + (x, y, a, h) is taken as direct observation of the state space (linear + observation model). + """ + + def __init__(self): + ndim, dt = 4, 1. + + # Create Kalman filter model matrices. + self._motion_mat = np.eye(2 * ndim, 2 * ndim) + for i in range(ndim): + self._motion_mat[i, ndim + i] = dt + self._update_mat = np.eye(ndim, 2 * ndim) + + # Motion and observation uncertainty are chosen relative to the current + # state estimate. These weights control the amount of uncertainty in + # the model. This is a bit hacky. + self._std_weight_position = 1. / 20 + self._std_weight_velocity = 1. / 160 + + def initiate(self, measurement): + """Create track from unassociated measurement. + Parameters + ---------- + measurement : ndarray + Bounding box coordinates (x, y, a, h) with center position (x, y), + aspect ratio a, and height h. + Returns + ------- + (ndarray, ndarray) + Returns the mean vector (8 dimensional) and covariance matrix (8x8 + dimensional) of the new track. Unobserved velocities are initialized + to 0 mean. + """ + mean_pos = measurement + mean_vel = np.zeros_like(mean_pos) + mean = np.r_[mean_pos, mean_vel] + + std = [ + 2 * self._std_weight_position * measurement[3], + 2 * self._std_weight_position * measurement[3], + 1e-2, + 2 * self._std_weight_position * measurement[3], + 10 * self._std_weight_velocity * measurement[3], + 10 * self._std_weight_velocity * measurement[3], + 1e-5, + 10 * self._std_weight_velocity * measurement[3]] + covariance = np.diag(np.square(std)) + return mean, covariance + + def predict(self, mean, covariance): + """Run Kalman filter prediction step. + Parameters + ---------- + mean : ndarray + The 8 dimensional mean vector of the object state at the previous + time step. + covariance : ndarray + The 8x8 dimensional covariance matrix of the object state at the + previous time step. + Returns + ------- + (ndarray, ndarray) + Returns the mean vector and covariance matrix of the predicted + state. Unobserved velocities are initialized to 0 mean. + """ + std_pos = [ + self._std_weight_position * mean[3], + self._std_weight_position * mean[3], + 1e-2, + self._std_weight_position * mean[3]] + std_vel = [ + self._std_weight_velocity * mean[3], + self._std_weight_velocity * mean[3], + 1e-5, + self._std_weight_velocity * mean[3]] + motion_cov = np.diag(np.square(np.r_[std_pos, std_vel])) + + #mean = np.dot(self._motion_mat, mean) + mean = np.dot(mean, self._motion_mat.T) + covariance = np.linalg.multi_dot(( + self._motion_mat, covariance, self._motion_mat.T)) + motion_cov + + return mean, covariance + + def project(self, mean, covariance): + """Project state distribution to measurement space. + Parameters + ---------- + mean : ndarray + The state's mean vector (8 dimensional array). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + Returns + ------- + (ndarray, ndarray) + Returns the projected mean and covariance matrix of the given state + estimate. + """ + std = [ + self._std_weight_position * mean[3], + self._std_weight_position * mean[3], + 1e-1, + self._std_weight_position * mean[3]] + innovation_cov = np.diag(np.square(std)) + + mean = np.dot(self._update_mat, mean) + covariance = np.linalg.multi_dot(( + self._update_mat, covariance, self._update_mat.T)) + return mean, covariance + innovation_cov + + def multi_predict(self, mean, covariance): + """Run Kalman filter prediction step (Vectorized version). + Parameters + ---------- + mean : ndarray + The Nx8 dimensional mean matrix of the object states at the previous + time step. + covariance : ndarray + The Nx8x8 dimensional covariance matrics of the object states at the + previous time step. + Returns + ------- + (ndarray, ndarray) + Returns the mean vector and covariance matrix of the predicted + state. Unobserved velocities are initialized to 0 mean. + """ + std_pos = [ + self._std_weight_position * mean[:, 3], + self._std_weight_position * mean[:, 3], + 1e-2 * np.ones_like(mean[:, 3]), + self._std_weight_position * mean[:, 3]] + std_vel = [ + self._std_weight_velocity * mean[:, 3], + self._std_weight_velocity * mean[:, 3], + 1e-5 * np.ones_like(mean[:, 3]), + self._std_weight_velocity * mean[:, 3]] + sqr = np.square(np.r_[std_pos, std_vel]).T + + motion_cov = [] + for i in range(len(mean)): + motion_cov.append(np.diag(sqr[i])) + motion_cov = np.asarray(motion_cov) + + mean = np.dot(mean, self._motion_mat.T) + left = np.dot(self._motion_mat, covariance).transpose((1, 0, 2)) + covariance = np.dot(left, self._motion_mat.T) + motion_cov + + return mean, covariance + + def update(self, mean, covariance, measurement): + """Run Kalman filter correction step. + Parameters + ---------- + mean : ndarray + The predicted state's mean vector (8 dimensional). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + measurement : ndarray + The 4 dimensional measurement vector (x, y, a, h), where (x, y) + is the center position, a the aspect ratio, and h the height of the + bounding box. + Returns + ------- + (ndarray, ndarray) + Returns the measurement-corrected state distribution. + """ + projected_mean, projected_cov = self.project(mean, covariance) + + chol_factor, lower = scipy.linalg.cho_factor( + projected_cov, lower=True, check_finite=False) + kalman_gain = scipy.linalg.cho_solve( + (chol_factor, lower), np.dot(covariance, self._update_mat.T).T, + check_finite=False).T + innovation = measurement - projected_mean + + new_mean = mean + np.dot(innovation, kalman_gain.T) + new_covariance = covariance - np.linalg.multi_dot(( + kalman_gain, projected_cov, kalman_gain.T)) + return new_mean, new_covariance + + def gating_distance(self, mean, covariance, measurements, + only_position=False, metric='maha'): + """Compute gating distance between state distribution and measurements. + A suitable distance threshold can be obtained from `chi2inv95`. If + `only_position` is False, the chi-square distribution has 4 degrees of + freedom, otherwise 2. + Parameters + ---------- + mean : ndarray + Mean vector over the state distribution (8 dimensional). + covariance : ndarray + Covariance of the state distribution (8x8 dimensional). + measurements : ndarray + An Nx4 dimensional matrix of N measurements, each in + format (x, y, a, h) where (x, y) is the bounding box center + position, a the aspect ratio, and h the height. + only_position : Optional[bool] + If True, distance computation is done with respect to the bounding + box center position only. + Returns + ------- + ndarray + Returns an array of length N, where the i-th element contains the + squared Mahalanobis distance between (mean, covariance) and + `measurements[i]`. + """ + mean, covariance = self.project(mean, covariance) + if only_position: + mean, covariance = mean[:2], covariance[:2, :2] + measurements = measurements[:, :2] + + d = measurements - mean + if metric == 'gaussian': + return np.sum(d * d, axis=1) + elif metric == 'maha': + cholesky_factor = np.linalg.cholesky(covariance) + z = scipy.linalg.solve_triangular( + cholesky_factor, d.T, lower=True, check_finite=False, + overwrite_b=True) + squared_maha = np.sum(z * z, axis=0) + return squared_maha + else: + raise ValueError('invalid distance metric') diff --git a/tracking/docker-build-context/byte_track/tutorials/trades/mot_online/matching.py b/tracking/docker-build-context/byte_track/tutorials/trades/mot_online/matching.py new file mode 100644 index 0000000000000000000000000000000000000000..cc7abab60f86e5e84994071fc0ec0dd2f89c0377 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/trades/mot_online/matching.py @@ -0,0 +1,196 @@ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import lap +import numpy as np +import scipy +from cython_bbox import bbox_overlaps as bbox_ious +from scipy.spatial.distance import cdist + +chi2inv95 = { + 1: 3.8415, + 2: 5.9915, + 3: 7.8147, + 4: 9.4877, + 5: 11.070, + 6: 12.592, + 7: 14.067, + 8: 15.507, + 9: 16.919} + +def merge_matches(m1, m2, shape): + O,P,Q = shape + m1 = np.asarray(m1) + m2 = np.asarray(m2) + + M1 = scipy.sparse.coo_matrix((np.ones(len(m1)), (m1[:, 0], m1[:, 1])), shape=(O, P)) + M2 = scipy.sparse.coo_matrix((np.ones(len(m2)), (m2[:, 0], m2[:, 1])), shape=(P, Q)) + + mask = M1*M2 + match = mask.nonzero() + match = list(zip(match[0], match[1])) + unmatched_O = tuple(set(range(O)) - set([i for i, j in match])) + unmatched_Q = tuple(set(range(Q)) - set([j for i, j in match])) + + return match, unmatched_O, unmatched_Q + + +def _indices_to_matches(cost_matrix, indices, thresh): + matched_cost = cost_matrix[tuple(zip(*indices))] + matched_mask = (matched_cost <= thresh) + + matches = indices[matched_mask] + unmatched_a = tuple(set(range(cost_matrix.shape[0])) - set(matches[:, 0])) + unmatched_b = tuple(set(range(cost_matrix.shape[1])) - set(matches[:, 1])) + + return matches, unmatched_a, unmatched_b + + +def linear_assignment(cost_matrix, thresh): + if cost_matrix.size == 0: + return np.empty((0, 2), dtype=int), tuple(range(cost_matrix.shape[0])), tuple(range(cost_matrix.shape[1])) + matches, unmatched_a, unmatched_b = [], [], [] + cost, x, y = lap.lapjv(cost_matrix, extend_cost=True, cost_limit=thresh) + for ix, mx in enumerate(x): + if mx >= 0: + matches.append([ix, mx]) + unmatched_a = np.where(x < 0)[0] + unmatched_b = np.where(y < 0)[0] + matches = np.asarray(matches) + return matches, unmatched_a, unmatched_b + + +def ious(atlbrs, btlbrs): + """ + Compute cost based on IoU + :type atlbrs: list[tlbr] | np.ndarray + :type atlbrs: list[tlbr] | np.ndarray + :rtype ious np.ndarray + """ + ious = np.zeros((len(atlbrs), len(btlbrs)), dtype=np.float) + if ious.size == 0: + return ious + + ious = bbox_ious( + np.ascontiguousarray(atlbrs, dtype=np.float), + np.ascontiguousarray(btlbrs, dtype=np.float) + ) + + return ious + + +def iou_distance(atracks, btracks): + """ + Compute cost based on IoU + :type atracks: list[STrack] + :type btracks: list[STrack] + :rtype cost_matrix np.ndarray + """ + + if (len(atracks)>0 and isinstance(atracks[0], np.ndarray)) or (len(btracks) > 0 and isinstance(btracks[0], np.ndarray)): + atlbrs = atracks + btlbrs = btracks + else: + atlbrs = [track.tlbr for track in atracks] + btlbrs = [track.tlbr for track in btracks] + _ious = ious(atlbrs, btlbrs) + cost_matrix = 1 - _ious + + return cost_matrix + +def embedding_distance(tracks, detections, metric='cosine'): + """ + :param tracks: list[STrack] + :param detections: list[BaseTrack] + :param metric: + :return: cost_matrix np.ndarray + """ + + cost_matrix = np.zeros((len(tracks), len(detections)), dtype=np.float) + if cost_matrix.size == 0: + return cost_matrix + det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float) + #for i, track in enumerate(tracks): + #cost_matrix[i, :] = np.maximum(0.0, cdist(track.smooth_feat.reshape(1,-1), det_features, metric)) + track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float) + cost_matrix = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features + return cost_matrix + +def embedding_distance2(tracks, detections, metric='cosine'): + """ + :param tracks: list[STrack] + :param detections: list[BaseTrack] + :param metric: + :return: cost_matrix np.ndarray + """ + + cost_matrix = np.zeros((len(tracks), len(detections)), dtype=np.float) + if cost_matrix.size == 0: + return cost_matrix + det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float) + #for i, track in enumerate(tracks): + #cost_matrix[i, :] = np.maximum(0.0, cdist(track.smooth_feat.reshape(1,-1), det_features, metric)) + track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float) + cost_matrix = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features + track_features = np.asarray([track.features[0] for track in tracks], dtype=np.float) + cost_matrix2 = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features + track_features = np.asarray([track.features[len(track.features)-1] for track in tracks], dtype=np.float) + cost_matrix3 = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features + for row in range(len(cost_matrix)): + cost_matrix[row] = (cost_matrix[row]+cost_matrix2[row]+cost_matrix3[row])/3 + return cost_matrix + + +def vis_id_feature_A_distance(tracks, detections, metric='cosine'): + track_features = [] + det_features = [] + leg1 = len(tracks) + leg2 = len(detections) + cost_matrix = np.zeros((leg1, leg2), dtype=np.float) + cost_matrix_det = np.zeros((leg1, leg2), dtype=np.float) + cost_matrix_track = np.zeros((leg1, leg2), dtype=np.float) + det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float) + track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float) + if leg2 != 0: + cost_matrix_det = np.maximum(0.0, cdist(det_features, det_features, metric)) + if leg1 != 0: + cost_matrix_track = np.maximum(0.0, cdist(track_features, track_features, metric)) + if cost_matrix.size == 0: + return track_features, det_features, cost_matrix, cost_matrix_det, cost_matrix_track + cost_matrix = np.maximum(0.0, cdist(track_features, det_features, metric)) + if leg1 > 10: + leg1 = 10 + tracks = tracks[:10] + if leg2 > 10: + leg2 = 10 + detections = detections[:10] + det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float) + track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float) + return track_features, det_features, cost_matrix, cost_matrix_det, cost_matrix_track + +def gate_cost_matrix(kf, cost_matrix, tracks, detections, only_position=False): + if cost_matrix.size == 0: + return cost_matrix + gating_dim = 2 if only_position else 4 + gating_threshold = chi2inv95[gating_dim] + measurements = np.asarray([det.to_xyah() for det in detections]) + for row, track in enumerate(tracks): + gating_distance = kf.gating_distance( + track.mean, track.covariance, measurements, only_position) + cost_matrix[row, gating_distance > gating_threshold] = np.inf + return cost_matrix + + +def fuse_motion(kf, cost_matrix, tracks, detections, only_position=False, lambda_=0.98): + if cost_matrix.size == 0: + return cost_matrix + gating_dim = 2 if only_position else 4 + gating_threshold = chi2inv95[gating_dim] + measurements = np.asarray([det.to_xyah() for det in detections]) + for row, track in enumerate(tracks): + gating_distance = kf.gating_distance( + track.mean, track.covariance, measurements, only_position, metric='maha') + cost_matrix[row, gating_distance > gating_threshold] = np.inf + cost_matrix[row] = lambda_ * cost_matrix[row] + (1 - lambda_) * gating_distance + return cost_matrix diff --git a/tracking/docker-build-context/byte_track/tutorials/trades/opts.py b/tracking/docker-build-context/byte_track/tutorials/trades/opts.py new file mode 100644 index 0000000000000000000000000000000000000000..3993c793ac9afd6b666563328cb7be8fdcc871b1 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/trades/opts.py @@ -0,0 +1,439 @@ +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import argparse +import os +import sys +import json + +class opts(object): + def __init__(self): + self.parser = argparse.ArgumentParser() + # basic experiment setting + self.parser.add_argument('task', default='', + help='ctdet | ddd | multi_pose ' + '| tracking or combined with ,') + self.parser.add_argument('--dataset', default='coco', + help='see lib/dataset/dataset_facotry for ' + + 'available datasets') + self.parser.add_argument('--test_dataset', default='', + help='coco | kitti | coco_hp | pascal') + self.parser.add_argument('--exp_id', default='default') + self.parser.add_argument('--test', action='store_true') + self.parser.add_argument('--debug', type=int, default=0, + help='level of visualization.' + '1: only show the final detection results' + '2: show the network output features' + '3: use matplot to display' # useful when lunching training with ipython notebook + '4: save all visualizations to disk') + self.parser.add_argument('--no_pause', action='store_true') + self.parser.add_argument('--demo', default='', + help='path to image/ image folders/ video. ' + 'or "webcam"') + self.parser.add_argument('--load_model', default='', + help='path to pretrained model') + self.parser.add_argument('--resume', action='store_true', + help='resume an experiment. ' + 'Reloaded the optimizer parameter and ' + 'set load_model to model_last.pth ' + 'in the exp dir if load_model is empty.') + + # system + self.parser.add_argument('--gpus', default='0', + help='-1 for CPU, use comma for multiple gpus') + self.parser.add_argument('--num_workers', type=int, default=4, + help='dataloader threads. 0 for single-thread.') + self.parser.add_argument('--not_cuda_benchmark', action='store_true', + help='disable when the input size is not fixed.') + self.parser.add_argument('--seed', type=int, default=317, + help='random seed') # from CornerNet + self.parser.add_argument('--not_set_cuda_env', action='store_true', + help='used when training in slurm clusters.') + + # log + self.parser.add_argument('--print_iter', type=int, default=0, + help='disable progress bar and print to screen.') + self.parser.add_argument('--save_all', action='store_true', + help='save model to disk every 5 epochs.') + self.parser.add_argument('--vis_thresh', type=float, default=0.3, + help='visualization threshold.') + self.parser.add_argument('--debugger_theme', default='white', + choices=['white', 'black']) + self.parser.add_argument('--eval_val', action='store_true') + self.parser.add_argument('--save_imgs', default='', help='') + self.parser.add_argument('--save_img_suffix', default='', help='') + self.parser.add_argument('--skip_first', type=int, default=-1, help='') + self.parser.add_argument('--save_video', action='store_true') + self.parser.add_argument('--save_framerate', type=int, default=30) + self.parser.add_argument('--resize_video', action='store_true') + self.parser.add_argument('--video_h', type=int, default=512, help='') + self.parser.add_argument('--video_w', type=int, default=512, help='') + self.parser.add_argument('--transpose_video', action='store_true') + self.parser.add_argument('--show_track_color', action='store_true') + self.parser.add_argument('--not_show_bbox', action='store_true') + self.parser.add_argument('--not_show_number', action='store_true') + self.parser.add_argument('--qualitative', action='store_true') + self.parser.add_argument('--tango_color', action='store_true') + + # model + self.parser.add_argument('--arch', default='dla_34', + help='model architecture. Currently tested' + 'res_18 | res_101 | resdcn_18 | resdcn_101 |' + 'dlav0_34 | dla_34 | hourglass') + self.parser.add_argument('--dla_node', default='dcn') + self.parser.add_argument('--head_conv', type=int, default=-1, + help='conv layer channels for output head' + '0 for no conv layer' + '-1 for default setting: ' + '64 for resnets and 256 for dla.') + self.parser.add_argument('--num_head_conv', type=int, default=1) + self.parser.add_argument('--head_kernel', type=int, default=3, help='') + self.parser.add_argument('--down_ratio', type=int, default=4, + help='output stride. Currently only supports 4.') + self.parser.add_argument('--not_idaup', action='store_true') + self.parser.add_argument('--num_classes', type=int, default=-1) + self.parser.add_argument('--num_layers', type=int, default=101) + self.parser.add_argument('--backbone', default='dla34') + self.parser.add_argument('--neck', default='dlaup') + self.parser.add_argument('--msra_outchannel', type=int, default=256) + self.parser.add_argument('--efficient_level', type=int, default=0) + self.parser.add_argument('--prior_bias', type=float, default=-4.6) # -2.19 + self.parser.add_argument('--embedding', action='store_true') + self.parser.add_argument('--box_nms', type=float, default=-1) + self.parser.add_argument('--inference', action='store_true') + self.parser.add_argument('--clip_len', type=int, default=1, help='number of images used in trades' + 'including the current image') + self.parser.add_argument('--no_repeat', action='store_true', default=True) + self.parser.add_argument('--seg', action='store_true', default=False) + self.parser.add_argument('--seg_feat_channel', default=8, type=int, help='.') + self.parser.add_argument('--deform_kernel_size', type=int, default=3) + self.parser.add_argument('--trades', action='store_true', help='Track to Detect and Segment:' + 'An Online Multi Object Tracker') + + # input + self.parser.add_argument('--input_res', type=int, default=-1, + help='input height and width. -1 for default from ' + 'dataset. Will be overriden by input_h | input_w') + self.parser.add_argument('--input_h', type=int, default=-1, + help='input height. -1 for default from dataset.') + self.parser.add_argument('--input_w', type=int, default=-1, + help='input width. -1 for default from dataset.') + self.parser.add_argument('--dataset_version', default='') + + # train + self.parser.add_argument('--optim', default='adam') + self.parser.add_argument('--lr', type=float, default=1.25e-4, + help='learning rate for batch size 32.') + self.parser.add_argument('--lr_step', type=str, default='60', + help='drop learning rate by 10.') + self.parser.add_argument('--save_point', type=str, default='90', + help='when to save the model to disk.') + self.parser.add_argument('--num_epochs', type=int, default=70, + help='total training epochs.') + self.parser.add_argument('--batch_size', type=int, default=32, + help='batch size') + self.parser.add_argument('--master_batch_size', type=int, default=-1, + help='batch size on the master gpu.') + self.parser.add_argument('--num_iters', type=int, default=-1, + help='default: #samples / batch_size.') + self.parser.add_argument('--val_intervals', type=int, default=10000, + help='number of epochs to run validation.') + self.parser.add_argument('--trainval', action='store_true', + help='include validation in training and ' + 'test on test set') + self.parser.add_argument('--ltrb', action='store_true', + help='') + self.parser.add_argument('--ltrb_weight', type=float, default=0.1, + help='') + self.parser.add_argument('--reset_hm', action='store_true') + self.parser.add_argument('--reuse_hm', action='store_true') + self.parser.add_argument('--use_kpt_center', action='store_true') + self.parser.add_argument('--add_05', action='store_true') + self.parser.add_argument('--dense_reg', type=int, default=1, help='') + + # test + self.parser.add_argument('--flip_test', action='store_true', + help='flip data augmentation.') + self.parser.add_argument('--test_scales', type=str, default='1', + help='multi scale test augmentation.') + self.parser.add_argument('--nms', action='store_true', + help='run nms in testing.') + self.parser.add_argument('--K', type=int, default=100, + help='max number of output objects.') + self.parser.add_argument('--not_prefetch_test', action='store_true', + help='not use parallal data pre-processing.') + self.parser.add_argument('--fix_short', type=int, default=-1) + self.parser.add_argument('--keep_res', action='store_true', + help='keep the original resolution' + ' during validation.') + self.parser.add_argument('--map_argoverse_id', action='store_true', + help='if trained on nuscenes and eval on kitti') + self.parser.add_argument('--out_thresh', type=float, default=-1, + help='') + self.parser.add_argument('--depth_scale', type=float, default=1, + help='') + self.parser.add_argument('--save_results', action='store_true') + self.parser.add_argument('--load_results', default='') + self.parser.add_argument('--use_loaded_results', action='store_true') + self.parser.add_argument('--ignore_loaded_cats', default='') + self.parser.add_argument('--model_output_list', action='store_true', + help='Used when convert to onnx') + self.parser.add_argument('--non_block_test', action='store_true') + self.parser.add_argument('--vis_gt_bev', default='', help='') + self.parser.add_argument('--kitti_split', default='3dop', + help='different validation split for kitti: ' + '3dop | subcnn') + self.parser.add_argument('--test_focal_length', type=int, default=-1) + + # dataset + self.parser.add_argument('--not_rand_crop', action='store_true', + help='not use the random crop data augmentation' + 'from CornerNet.') + self.parser.add_argument('--not_max_crop', action='store_true', + help='used when the training dataset has' + 'inbalanced aspect ratios.') + self.parser.add_argument('--shift', type=float, default=0, + help='when not using random crop, 0.1' + 'apply shift augmentation.') + self.parser.add_argument('--scale', type=float, default=0, + help='when not using random crop, 0.4' + 'apply scale augmentation.') + self.parser.add_argument('--aug_rot', type=float, default=0, + help='probability of applying ' + 'rotation augmentation.') + self.parser.add_argument('--rotate', type=float, default=0, + help='when not using random crop' + 'apply rotation augmentation.') + self.parser.add_argument('--flip', type=float, default=0.5, + help='probability of applying flip augmentation.') + self.parser.add_argument('--no_color_aug', action='store_true', + help='not use the color augmenation ' + 'from CornerNet') + + # Tracking + self.parser.add_argument('--tracking', action='store_true') + self.parser.add_argument('--pre_hm', action='store_true') + self.parser.add_argument('--same_aug_pre', action='store_true') + self.parser.add_argument('--zero_pre_hm', action='store_true') + self.parser.add_argument('--hm_disturb', type=float, default=0) + self.parser.add_argument('--lost_disturb', type=float, default=0) + self.parser.add_argument('--fp_disturb', type=float, default=0) + self.parser.add_argument('--pre_thresh', type=float, default=-1) + self.parser.add_argument('--track_thresh', type=float, default=0.3) + self.parser.add_argument('--match_thresh', type=float, default=0.8) + self.parser.add_argument('--track_buffer', type=int, default=30) + self.parser.add_argument('--new_thresh', type=float, default=0.0) + self.parser.add_argument('--max_frame_dist', type=int, default=3) + self.parser.add_argument('--ltrb_amodal', action='store_true') + self.parser.add_argument('--ltrb_amodal_weight', type=float, default=0.1) + self.parser.add_argument('--window_size', type=int, default=20) + self.parser.add_argument('--public_det', action='store_true') + self.parser.add_argument('--no_pre_img', action='store_true') + self.parser.add_argument('--zero_tracking', action='store_true') + self.parser.add_argument('--hungarian', action='store_true') + self.parser.add_argument('--max_age', type=int, default=-1) + + + # loss + self.parser.add_argument('--tracking_weight', type=float, default=1) + self.parser.add_argument('--reg_loss', default='l1', + help='regression loss: sl1 | l1 | l2') + self.parser.add_argument('--hm_weight', type=float, default=1, + help='loss weight for keypoint heatmaps.') + self.parser.add_argument('--off_weight', type=float, default=1, + help='loss weight for keypoint local offsets.') + self.parser.add_argument('--wh_weight', type=float, default=0.1, + help='loss weight for bounding box size.') + self.parser.add_argument('--hp_weight', type=float, default=1, + help='loss weight for human pose offset.') + self.parser.add_argument('--hm_hp_weight', type=float, default=1, + help='loss weight for human keypoint heatmap.') + self.parser.add_argument('--amodel_offset_weight', type=float, default=1, + help='Please forgive the typo.') + self.parser.add_argument('--dep_weight', type=float, default=1, + help='loss weight for depth.') + self.parser.add_argument('--dim_weight', type=float, default=1, + help='loss weight for 3d bounding box size.') + self.parser.add_argument('--rot_weight', type=float, default=1, + help='loss weight for orientation.') + self.parser.add_argument('--nuscenes_att', action='store_true') + self.parser.add_argument('--nuscenes_att_weight', type=float, default=1) + self.parser.add_argument('--velocity', action='store_true') + self.parser.add_argument('--velocity_weight', type=float, default=1) + self.parser.add_argument('--nID', type=int, default=-1) + + # custom dataset + self.parser.add_argument('--custom_dataset_img_path', default='') + self.parser.add_argument('--custom_dataset_ann_path', default='') + + def parse(self, args=''): + if args == '': + opt = self.parser.parse_args() + else: + opt = self.parser.parse_args(args) + + if opt.test_dataset == '': + opt.test_dataset = opt.dataset + + opt.gpus_str = opt.gpus + opt.gpus = [int(gpu) for gpu in opt.gpus.split(',')] + opt.gpus = [i for i in range(len(opt.gpus))] if opt.gpus[0] >=0 else [-1] + opt.lr_step = [int(i) for i in opt.lr_step.split(',')] + opt.save_point = [int(i) for i in opt.save_point.split(',')] + opt.test_scales = [float(i) for i in opt.test_scales.split(',')] + opt.save_imgs = [i for i in opt.save_imgs.split(',')] \ + if opt.save_imgs != '' else [] + opt.ignore_loaded_cats = \ + [int(i) for i in opt.ignore_loaded_cats.split(',')] \ + if opt.ignore_loaded_cats != '' else [] + + opt.num_workers = max(opt.num_workers, 2 * len(opt.gpus)) + opt.pre_img = False + if 'tracking' in opt.task: + print('Running tracking') + opt.tracking = True +# opt.out_thresh = max(opt.track_thresh, opt.out_thresh) +# opt.pre_thresh = max(opt.track_thresh, opt.pre_thresh) +# opt.new_thresh = max(opt.track_thresh, opt.new_thresh) + opt.pre_img = not opt.no_pre_img + print('Using tracking threshold for out threshold!', opt.track_thresh) + # if 'ddd' in opt.task: + opt.show_track_color = True + if opt.dataset in ['mot', 'mots', 'youtube_vis']: + opt.overlap_thresh = 0.05 + elif opt.dataset == 'nuscenes': + opt.window_size = 7 + opt.overlap_thresh = -1 + else: + opt.overlap_thresh = 0.05 + + opt.fix_res = not opt.keep_res + print('Fix size testing.' if opt.fix_res else 'Keep resolution testing.') + + if opt.head_conv == -1: # init default head_conv + opt.head_conv = 256 if 'dla' in opt.arch else 64 + + opt.pad = 127 if 'hourglass' in opt.arch else 31 + opt.num_stacks = 2 if opt.arch == 'hourglass' else 1 + + if opt.master_batch_size == -1: + opt.master_batch_size = opt.batch_size // len(opt.gpus) + rest_batch_size = (opt.batch_size - opt.master_batch_size) + opt.chunk_sizes = [opt.master_batch_size] + for i in range(len(opt.gpus) - 1): + slave_chunk_size = rest_batch_size // (len(opt.gpus) - 1) + if i < rest_batch_size % (len(opt.gpus) - 1): + slave_chunk_size += 1 + opt.chunk_sizes.append(slave_chunk_size) + print('training chunk_sizes:', opt.chunk_sizes) + + if opt.debug > 0: + opt.num_workers = 0 + opt.batch_size = 1 + opt.gpus = [opt.gpus[0]] + opt.master_batch_size = -1 + + # log dirs + opt.root_dir = os.path.join(os.path.dirname(__file__), '..', '..') + opt.data_dir = os.path.join(opt.root_dir, 'data') + opt.exp_dir = os.path.join(opt.root_dir, 'exp', opt.task) + opt.save_dir = os.path.join(opt.exp_dir, opt.exp_id) + opt.debug_dir = os.path.join(opt.save_dir, 'debug') + + if opt.resume and opt.load_model == '': + opt.load_model = os.path.join(opt.save_dir, 'model_last.pth') + return opt + + + def update_dataset_info_and_set_heads(self, opt, dataset): + opt.num_classes = dataset.num_categories \ + if opt.num_classes < 0 else opt.num_classes + # input_h(w): opt.input_h overrides opt.input_res overrides dataset default + input_h, input_w = dataset.default_resolution + input_h = opt.input_res if opt.input_res > 0 else input_h + input_w = opt.input_res if opt.input_res > 0 else input_w + opt.input_h = opt.input_h if opt.input_h > 0 else input_h + opt.input_w = opt.input_w if opt.input_w > 0 else input_w + opt.output_h = opt.input_h // opt.down_ratio + opt.output_w = opt.input_w // opt.down_ratio + opt.input_res = max(opt.input_h, opt.input_w) + opt.output_res = max(opt.output_h, opt.output_w) + + opt.heads = {'hm': opt.num_classes, 'reg': 2, 'wh': 2} + + if not opt.trades: + if 'tracking' in opt.task: + opt.heads.update({'tracking': 2}) + + if 'ddd' in opt.task: + opt.heads.update({'dep': 1, 'rot': 8, 'dim': 3, 'amodel_offset': 2}) + + if 'multi_pose' in opt.task: + opt.heads.update({ + 'hps': dataset.num_joints * 2, 'hm_hp': dataset.num_joints, + 'hp_offset': 2}) + + if opt.ltrb: + opt.heads.update({'ltrb': 4}) + if opt.ltrb_amodal: + opt.heads.update({'ltrb_amodal': 4}) + if opt.nuscenes_att: + opt.heads.update({'nuscenes_att': 8}) + if opt.velocity: + opt.heads.update({'velocity': 3}) + + if opt.embedding: + opt.heads.update({'embedding': 128}) + if opt.seg: + opt.heads.update({'conv_weight': 2*opt.seg_feat_channel**2 + 5*opt.seg_feat_channel + 1}) + opt.heads.update({'seg_feat': opt.seg_feat_channel}) + weight_dict = {'hm': opt.hm_weight, 'wh': opt.wh_weight, + 'reg': opt.off_weight, 'hps': opt.hp_weight, + 'hm_hp': opt.hm_hp_weight, 'hp_offset': opt.off_weight, + 'dep': opt.dep_weight, 'rot': opt.rot_weight, + 'dim': opt.dim_weight, + 'amodel_offset': opt.amodel_offset_weight, + 'ltrb': opt.ltrb_weight, + 'tracking': opt.tracking_weight, + 'ltrb_amodal': opt.ltrb_amodal_weight, + 'nuscenes_att': opt.nuscenes_att_weight, + 'velocity': opt.velocity_weight, + 'embedding': 1.0, + 'conv_weight': 1.0, + 'seg_feat':1.0} + opt.weights = {head: weight_dict[head] for head in opt.heads} + if opt.trades: + opt.weights['cost_volume'] = 1.0 + if opt.seg: + opt.weights['mask_loss'] = 1.0 + for head in opt.weights: + if opt.weights[head] == 0: + del opt.heads[head] + opt.head_conv = {head: [opt.head_conv \ + for i in range(opt.num_head_conv if head != 'reg' else 1)] for head in opt.heads} + + print('input h w:', opt.input_h, opt.input_w) + print('heads', opt.heads) + print('weights', opt.weights) + print('head conv', opt.head_conv) + + return opt + + def init(self, args=''): + # only used in demo + default_dataset_info = { + 'ctdet': 'coco', 'multi_pose': 'coco_hp', 'ddd': 'nuscenes', + 'tracking,ctdet': 'coco', 'tracking,multi_pose': 'coco_hp', + 'tracking,ddd': 'nuscenes' + } + opt = self.parse() + from dataset.dataset_factory import dataset_factory + train_dataset = default_dataset_info[opt.task] \ + if opt.task in default_dataset_info else 'coco' + if opt.dataset != 'coco': + dataset = dataset_factory[opt.dataset] + else: + dataset = dataset_factory[train_dataset] + opt = self.update_dataset_info_and_set_heads(opt, dataset) + return opt diff --git a/tracking/docker-build-context/byte_track/tutorials/trades/tracker.py b/tracking/docker-build-context/byte_track/tutorials/trades/tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..a607935cc335d48e784448f74a040175890a23f4 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/trades/tracker.py @@ -0,0 +1,299 @@ +import numpy as np +from sklearn.utils.linear_assignment_ import linear_assignment +import copy +from sklearn.metrics.pairwise import cosine_similarity as cosine + + +class Tracker(object): + def __init__(self, opt): + self.opt = opt + self.reset() + self.nID = 10000 + self.alpha = 0.1 + + def init_track(self, results): + for item in results: + if item['score'] > self.opt.new_thresh: + self.id_count += 1 + # active and age are never used in the paper + item['active'] = 1 + item['age'] = 1 + item['tracking_id'] = self.id_count + if not ('ct' in item): + bbox = item['bbox'] + item['ct'] = [(bbox[0] + bbox[2]) / 2, (bbox[1] + bbox[3]) / 2] + self.tracks.append(item) + self.nID = 10000 + self.embedding_bank = np.zeros((self.nID, 128)) + self.cat_bank = np.zeros((self.nID), dtype=np.int) + + def reset(self): + self.id_count = 0 + self.nID = 10000 + self.tracks = [] + self.embedding_bank = np.zeros((self.nID, 128)) + self.cat_bank = np.zeros((self.nID), dtype=np.int) + self.tracklet_ages = np.zeros((self.nID), dtype=np.int) + self.alive = [] + + def step(self, results_with_low, public_det=None): + results = [item for item in results_with_low if item['score'] >= self.opt.track_thresh] + + # first association + N = len(results) + M = len(self.tracks) + self.alive = [] + + track_boxes = np.array([[track['bbox'][0], track['bbox'][1], + track['bbox'][2], track['bbox'][3]] for track in self.tracks], np.float32) # M x 4 + det_boxes = np.array([[item['bbox'][0], item['bbox'][1], + item['bbox'][2], item['bbox'][3]] for item in results], np.float32) # N x 4 + box_ious = self.bbox_overlaps_py(det_boxes, track_boxes) + + dets = np.array( + [det['ct'] + det['tracking'] for det in results], np.float32) # N x 2 + track_size = np.array([((track['bbox'][2] - track['bbox'][0]) * \ + (track['bbox'][3] - track['bbox'][1])) \ + for track in self.tracks], np.float32) # M + track_cat = np.array([track['class'] for track in self.tracks], np.int32) # M + item_size = np.array([((item['bbox'][2] - item['bbox'][0]) * \ + (item['bbox'][3] - item['bbox'][1])) \ + for item in results], np.float32) # N + item_cat = np.array([item['class'] for item in results], np.int32) # N + tracks = np.array( + [pre_det['ct'] for pre_det in self.tracks], np.float32) # M x 2 + dist = (((tracks.reshape(1, -1, 2) - \ + dets.reshape(-1, 1, 2)) ** 2).sum(axis=2)) # N x M + + if self.opt.dataset == 'youtube_vis': + invalid = ((dist > track_size.reshape(1, M)) + \ + (dist > item_size.reshape(N, 1)) + (box_ious < self.opt.overlap_thresh)) > 0 + else: + invalid = ((dist > track_size.reshape(1, M)) + \ + (dist > item_size.reshape(N, 1)) + \ + (item_cat.reshape(N, 1) != track_cat.reshape(1, M)) + (box_ious < self.opt.overlap_thresh)) > 0 + dist = dist + invalid * 1e18 + + if self.opt.hungarian: + item_score = np.array([item['score'] for item in results], np.float32) # N + dist[dist > 1e18] = 1e18 + matched_indices = linear_assignment(dist) + else: + matched_indices = greedy_assignment(copy.deepcopy(dist)) + unmatched_dets = [d for d in range(dets.shape[0]) \ + if not (d in matched_indices[:, 0])] + unmatched_tracks = [d for d in range(tracks.shape[0]) \ + if not (d in matched_indices[:, 1])] + + if self.opt.hungarian: + matches = [] + for m in matched_indices: + if dist[m[0], m[1]] > 1e16: + unmatched_dets.append(m[0]) + unmatched_tracks.append(m[1]) + else: + matches.append(m) + matches = np.array(matches).reshape(-1, 2) + else: + matches = matched_indices + + ret = [] + for m in matches: + track = results[m[0]] + track['tracking_id'] = self.tracks[m[1]]['tracking_id'] + track['age'] = 1 + track['active'] = self.tracks[m[1]]['active'] + 1 + if 'embedding' in track: + self.alive.append(track['tracking_id']) + self.embedding_bank[self.tracks[m[1]]['tracking_id'] - 1, :] = self.alpha * track['embedding'] \ + + (1 - self.alpha) * self.embedding_bank[ + self.tracks[m[1]][ + 'tracking_id'] - 1, + :] + self.cat_bank[self.tracks[m[1]]['tracking_id'] - 1] = track['class'] + ret.append(track) + + if self.opt.public_det and len(unmatched_dets) > 0: + # Public detection: only create tracks from provided detections + pub_dets = np.array([d['ct'] for d in public_det], np.float32) + dist3 = ((dets.reshape(-1, 1, 2) - pub_dets.reshape(1, -1, 2)) ** 2).sum( + axis=2) + matched_dets = [d for d in range(dets.shape[0]) \ + if not (d in unmatched_dets)] + dist3[matched_dets] = 1e18 + for j in range(len(pub_dets)): + i = dist3[:, j].argmin() + if dist3[i, j] < item_size[i]: + dist3[i, :] = 1e18 + track = results[i] + if track['score'] > self.opt.new_thresh: + self.id_count += 1 + track['tracking_id'] = self.id_count + track['age'] = 1 + track['active'] = 1 + ret.append(track) + else: + # Private detection: create tracks for all un-matched detections + for i in unmatched_dets: + track = results[i] + if track['score'] > self.opt.new_thresh: + if 'embedding' in track: + max_id, max_cos = self.get_similarity(track['embedding'], False, track['class']) + if max_cos >= 0.3 and self.tracklet_ages[max_id - 1] < self.opt.window_size: + track['tracking_id'] = max_id + track['age'] = 1 + track['active'] = 1 + self.embedding_bank[track['tracking_id'] - 1, :] = self.alpha * track['embedding'] \ + + (1 - self.alpha) * self.embedding_bank[track['tracking_id'] - 1,:] + else: + self.id_count += 1 + track['tracking_id'] = self.id_count + track['age'] = 1 + track['active'] = 1 + self.embedding_bank[self.id_count - 1, :] = track['embedding'] + self.cat_bank[self.id_count - 1] = track['class'] + self.alive.append(track['tracking_id']) + ret.append(track) + else: + self.id_count += 1 + track['tracking_id'] = self.id_count + track['age'] = 1 + track['active'] = 1 + ret.append(track) + + self.tracklet_ages[:self.id_count] = self.tracklet_ages[:self.id_count] + 1 + for track in ret: + self.tracklet_ages[track['tracking_id'] - 1] = 1 + + + # second association + results_second = [item for item in results_with_low if item['score'] < self.opt.track_thresh] + self_tracks_second = [self.tracks[i] for i in unmatched_tracks if self.tracks[i]['active'] > 0] + second2original = [i for i in unmatched_tracks if self.tracks[i]['active'] > 0] + + N = len(results_second) + M = len(self_tracks_second) + + if N > 0 and M > 0: + + track_boxes_second = np.array([[track['bbox'][0], track['bbox'][1], + track['bbox'][2], track['bbox'][3]] for track in self_tracks_second], np.float32) # M x 4 + det_boxes_second = np.array([[item['bbox'][0], item['bbox'][1], + item['bbox'][2], item['bbox'][3]] for item in results_second], np.float32) # N x 4 + box_ious_second = self.bbox_overlaps_py(det_boxes_second, track_boxes_second) + + dets = np.array( + [det['ct'] + det['tracking'] for det in results_second], np.float32) # N x 2 + track_size = np.array([((track['bbox'][2] - track['bbox'][0]) * \ + (track['bbox'][3] - track['bbox'][1])) \ + for track in self_tracks_second], np.float32) # M + track_cat = np.array([track['class'] for track in self_tracks_second], np.int32) # M + item_size = np.array([((item['bbox'][2] - item['bbox'][0]) * \ + (item['bbox'][3] - item['bbox'][1])) \ + for item in results_second], np.float32) # N + item_cat = np.array([item['class'] for item in results_second], np.int32) # N + tracks_second = np.array( + [pre_det['ct'] for pre_det in self_tracks_second], np.float32) # M x 2 + dist = (((tracks_second.reshape(1, -1, 2) - \ + dets.reshape(-1, 1, 2)) ** 2).sum(axis=2)) # N x M + + invalid = ((dist > track_size.reshape(1, M)) + \ + (dist > item_size.reshape(N, 1)) + \ + (item_cat.reshape(N, 1) != track_cat.reshape(1, M)) + (box_ious_second < 0.3)) > 0 + dist = dist + invalid * 1e18 + + matched_indices_second = greedy_assignment(copy.deepcopy(dist), 1e8) + unmatched_tracks_second = [d for d in range(tracks_second.shape[0]) \ + if not (d in matched_indices_second[:, 1])] + matches_second = matched_indices_second + + for m in matches_second: + track = results_second[m[0]] + track['tracking_id'] = self_tracks_second[m[1]]['tracking_id'] + track['age'] = 1 + track['active'] = self_tracks_second[m[1]]['active'] + 1 + if 'embedding' in track: + self.alive.append(track['tracking_id']) + self.embedding_bank[self_tracks_second[m[1]]['tracking_id'] - 1, :] = self.alpha * track['embedding'] \ + + (1 - self.alpha) * self.embedding_bank[self_tracks_second[m[1]]['tracking_id'] - 1,:] + self.cat_bank[self_tracks_second[m[1]]['tracking_id'] - 1] = track['class'] + ret.append(track) + + unmatched_tracks = [second2original[i] for i in unmatched_tracks_second] + \ + [i for i in unmatched_tracks if self.tracks[i]['active'] == 0] + + + # Never used + for i in unmatched_tracks: + track = self.tracks[i] + if track['age'] < self.opt.max_age: + track['age'] += 1 + track['active'] = 1 # 0 + bbox = track['bbox'] + ct = track['ct'] + v = [0, 0] + track['bbox'] = [ + bbox[0] + v[0], bbox[1] + v[1], + bbox[2] + v[0], bbox[3] + v[1]] + track['ct'] = [ct[0] + v[0], ct[1] + v[1]] + ret.append(track) + for r_ in ret: + del r_['embedding'] + self.tracks = ret + return ret + + def get_similarity(self, feat, stat, cls): + max_id = -1 + max_cos = -1 + if stat: + nID = self.id_count + else: + nID = self.id_count + + a = feat[None, :] + b = self.embedding_bank[:nID, :] + if len(b) > 0: + alive = np.array(self.alive, dtype=np.int) - 1 + cosim = cosine(a, b) + cosim = np.reshape(cosim, newshape=(-1)) + cosim[alive] = -2 + cosim[nID - 1] = -2 + cosim[np.where(self.cat_bank[:nID] != cls)[0]] = -2 + max_id = int(np.argmax(cosim) + 1) + max_cos = np.max(cosim) + return max_id, max_cos + + def bbox_overlaps_py(self, boxes, query_boxes): + """ + determine overlaps between boxes and query_boxes + :param boxes: n * 4 bounding boxes + :param query_boxes: k * 4 bounding boxes + :return: overlaps: n * k overlaps + """ + n_ = boxes.shape[0] + k_ = query_boxes.shape[0] + overlaps = np.zeros((n_, k_), dtype=np.float) + for k in range(k_): + query_box_area = (query_boxes[k, 2] - query_boxes[k, 0] + 1) * (query_boxes[k, 3] - query_boxes[k, 1] + 1) + for n in range(n_): + iw = min(boxes[n, 2], query_boxes[k, 2]) - max(boxes[n, 0], query_boxes[k, 0]) + 1 + if iw > 0: + ih = min(boxes[n, 3], query_boxes[k, 3]) - max(boxes[n, 1], query_boxes[k, 1]) + 1 + if ih > 0: + box_area = (boxes[n, 2] - boxes[n, 0] + 1) * (boxes[n, 3] - boxes[n, 1] + 1) + all_area = float(box_area + query_box_area - iw * ih) + overlaps[n, k] = iw * ih / all_area + return overlaps + + + +def greedy_assignment(dist, thresh=1e16): + matched_indices = [] + if dist.shape[1] == 0: + return np.array(matched_indices, np.int32).reshape(-1, 2) + for i in range(dist.shape[0]): + j = dist[i].argmin() + if dist[i][j] < thresh: + dist[:, j] = 1e18 + matched_indices.append([i, j]) + return np.array(matched_indices, np.int32).reshape(-1, 2) diff --git a/tracking/docker-build-context/byte_track/tutorials/transtrack/README.md b/tracking/docker-build-context/byte_track/tutorials/transtrack/README.md new file mode 100644 index 0000000000000000000000000000000000000000..193965abc7c18906bf8072e034448c8fd6e5aab3 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/transtrack/README.md @@ -0,0 +1,45 @@ +# TransTrack + +Step1. git clone https://github.com/PeizeSun/TransTrack.git + + +Step2. + +replace https://github.com/PeizeSun/TransTrack/blob/main/models/tracker.py + +Step3. + +Download TransTrack pretrained model: [671mot17_crowdhuman_mot17.pth](https://drive.google.com/drive/folders/1DjPL8xWoXDASrxgsA3O06EspJRdUXFQ-?usp=sharing) + + +Step3. run +``` +python3 main_track.py --output_dir . --dataset_file mot --coco_path mot --batch_size 1 --resume pretrained/671mot17_crowdhuman_mot17.pth --eval --with_box_refine --num_queries 500 +``` + + +# TransTrack_BYTE + +Step1. git clone https://github.com/PeizeSun/TransTrack.git + +Step2. + +replace https://github.com/PeizeSun/TransTrack/blob/main/models/save_track.py + +replace https://github.com/PeizeSun/TransTrack/blob/main/engine_track.py + +replace https://github.com/PeizeSun/TransTrack/blob/main/main_track.py + +add mot_online to https://github.com/PeizeSun/TransTrack + +Step3. run +``` +python3 main_track.py --output_dir . --dataset_file mot --coco_path mot --batch_size 1 --resume pretrained/671mot17_crowdhuman_mot17.pth --eval --with_box_refine --num_queries 500 +``` + + +## Notes +tracker.py: only motion + +mot_online/byte_tracker.py: motion with kalman filter + diff --git a/tracking/docker-build-context/byte_track/tutorials/transtrack/engine_track.py b/tracking/docker-build-context/byte_track/tutorials/transtrack/engine_track.py new file mode 100644 index 0000000000000000000000000000000000000000..925a4f2ba1b6ee4e7daaed7c0a901362ee223ddb --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/transtrack/engine_track.py @@ -0,0 +1,277 @@ +# Modified by Peize Sun, Rufeng Zhang +# ------------------------------------------------------------------------ +# Deformable DETR +# Copyright (c) 2020 SenseTime. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 [see LICENSE for details] +# ------------------------------------------------------------------------ +# Modified from DETR (https://github.com/facebookresearch/detr) +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# ------------------------------------------------------------------------ +""" +Train and eval functions used in main.py +""" +import math +import os +import sys +from typing import Iterable + +import torch +import util.misc as utils +from datasets.coco_eval import CocoEvaluator +from datasets.panoptic_eval import PanopticEvaluator +from datasets.data_prefetcher import data_prefetcher +from mot_online.byte_tracker import BYTETracker + + +def train_one_epoch(model: torch.nn.Module, criterion: torch.nn.Module, + data_loader: Iterable, optimizer: torch.optim.Optimizer, + device: torch.device, epoch: int, max_norm: float = 0): + model.train() + criterion.train() + metric_logger = utils.MetricLogger(delimiter=" ") + metric_logger.add_meter('lr', utils.SmoothedValue(window_size=1, fmt='{value:.6f}')) + metric_logger.add_meter('class_error', utils.SmoothedValue(window_size=1, fmt='{value:.2f}')) + metric_logger.add_meter('grad_norm', utils.SmoothedValue(window_size=1, fmt='{value:.2f}')) + header = 'Epoch: [{}]'.format(epoch) + print_freq = 10 + + prefetcher = data_prefetcher(data_loader, device, prefetch=True) + samples, targets = prefetcher.next() + + # for samples, targets in metric_logger.log_every(data_loader, print_freq, header): + for _ in metric_logger.log_every(range(len(data_loader)), print_freq, header): + outputs, pre_outputs, pre_targets = model([samples, targets]) + loss_dict = criterion(outputs, targets, pre_outputs, pre_targets) + weight_dict = criterion.weight_dict + losses = sum(loss_dict[k] * weight_dict[k] for k in loss_dict.keys() if k in weight_dict) + + # reduce losses over all GPUs for logging purposes + loss_dict_reduced = utils.reduce_dict(loss_dict) + loss_dict_reduced_unscaled = {f'{k}_unscaled': v + for k, v in loss_dict_reduced.items()} + loss_dict_reduced_scaled = {k: v * weight_dict[k] + for k, v in loss_dict_reduced.items() if k in weight_dict} + losses_reduced_scaled = sum(loss_dict_reduced_scaled.values()) + + loss_value = losses_reduced_scaled.item() + + if not math.isfinite(loss_value): + print("Loss is {}, stopping training".format(loss_value)) + print(loss_dict_reduced) + sys.exit(1) + + optimizer.zero_grad() + losses.backward() + if max_norm > 0: + grad_total_norm = torch.nn.utils.clip_grad_norm_(model.parameters(), max_norm) + else: + grad_total_norm = utils.get_total_grad_norm(model.parameters(), max_norm) + optimizer.step() + + metric_logger.update(loss=loss_value, **loss_dict_reduced_scaled, **loss_dict_reduced_unscaled) + metric_logger.update(class_error=loss_dict_reduced['class_error']) + metric_logger.update(lr=optimizer.param_groups[0]["lr"]) + metric_logger.update(grad_norm=grad_total_norm) + + samples, targets = prefetcher.next() + # gather the stats from all processes + metric_logger.synchronize_between_processes() + print("Averaged stats:", metric_logger) + return {k: meter.global_avg for k, meter in metric_logger.meters.items()} + + +@torch.no_grad() +def evaluate(model, criterion, postprocessors, data_loader, base_ds, device, output_dir, tracker=None, + phase='train', det_val=False): + model.eval() + criterion.eval() + + metric_logger = utils.MetricLogger(delimiter=" ") + metric_logger.add_meter('class_error', utils.SmoothedValue(window_size=1, fmt='{value:.2f}')) + header = 'Test:' + + iou_types = tuple(k for k in ('segm', 'bbox') if k in postprocessors.keys()) + coco_evaluator = CocoEvaluator(base_ds, iou_types) + # coco_evaluator.coco_eval[iou_types[0]].params.iouThrs = [0, 0.1, 0.5, 0.75] + + panoptic_evaluator = None + if 'panoptic' in postprocessors.keys(): + panoptic_evaluator = PanopticEvaluator( + data_loader.dataset.ann_file, + data_loader.dataset.ann_folder, + output_dir=os.path.join(output_dir, "panoptic_eval"), + ) + + res_tracks = dict() + pre_embed = None + for samples, targets in metric_logger.log_every(data_loader, 10, header): + # pre process for track. + if tracker is not None: + if phase != 'train': + assert samples.tensors.shape[0] == 1, "Now only support inference of batchsize 1." + frame_id = targets[0].get("frame_id", None) + assert frame_id is not None + frame_id = frame_id.item() + if frame_id == 1: + tracker.reset_all() + pre_embed = None + + samples = samples.to(device) + targets = [{k: v.to(device) for k, v in t.items()} for t in targets] + + if det_val: + outputs = model(samples) + else: + outputs, pre_embed = model(samples, pre_embed) + loss_dict = criterion(outputs, targets) + weight_dict = criterion.weight_dict + +# reduce losses over all GPUs for logging purposes + loss_dict_reduced = utils.reduce_dict(loss_dict) + loss_dict_reduced_scaled = {k: v * weight_dict[k] + for k, v in loss_dict_reduced.items() if k in weight_dict} + loss_dict_reduced_unscaled = {f'{k}_unscaled': v + for k, v in loss_dict_reduced.items()} + metric_logger.update(loss=sum(loss_dict_reduced_scaled.values()), + **loss_dict_reduced_scaled, + **loss_dict_reduced_unscaled) + metric_logger.update(class_error=loss_dict_reduced['class_error']) + + orig_target_sizes = torch.stack([t["orig_size"] for t in targets], dim=0) + results = postprocessors['bbox'](outputs, orig_target_sizes) + + if 'segm' in postprocessors.keys(): + target_sizes = torch.stack([t["size"] for t in targets], dim=0) + results = postprocessors['segm'](results, outputs, orig_target_sizes, target_sizes) + res = {target['image_id'].item(): output for target, output in zip(targets, results)} + + # post process for track. + if tracker is not None: + if frame_id == 1: + res_track = tracker.init_track(results[0]) + else: + res_track = tracker.step(results[0]) + res_tracks[targets[0]['image_id'].item()] = res_track + + if coco_evaluator is not None: + coco_evaluator.update(res) + + if panoptic_evaluator is not None: + res_pano = postprocessors["panoptic"](outputs, target_sizes, orig_target_sizes) + for i, target in enumerate(targets): + image_id = target["image_id"].item() + file_name = f"{image_id:012d}.png" + res_pano[i]["image_id"] = image_id + res_pano[i]["file_name"] = file_name + + panoptic_evaluator.update(res_pano) + + # gather the stats from all processes + metric_logger.synchronize_between_processes() + print("Averaged stats:", metric_logger) + if coco_evaluator is not None: + coco_evaluator.synchronize_between_processes() + if panoptic_evaluator is not None: + panoptic_evaluator.synchronize_between_processes() + + # accumulate predictions from all images + if coco_evaluator is not None: + coco_evaluator.accumulate() + coco_evaluator.summarize() + panoptic_res = None + if panoptic_evaluator is not None: + panoptic_res = panoptic_evaluator.summarize() + stats = {k: meter.global_avg for k, meter in metric_logger.meters.items()} + if coco_evaluator is not None: + if 'bbox' in postprocessors.keys(): + stats['coco_eval_bbox'] = coco_evaluator.coco_eval['bbox'].stats.tolist() + if 'segm' in postprocessors.keys(): + stats['coco_eval_masks'] = coco_evaluator.coco_eval['segm'].stats.tolist() + if panoptic_res is not None: + stats['PQ_all'] = panoptic_res["All"] + stats['PQ_th'] = panoptic_res["Things"] + stats['PQ_st'] = panoptic_res["Stuff"] + return stats, coco_evaluator, res_tracks + + +@torch.no_grad() +def evaluate_track(args, model, criterion, postprocessors, data_loader, base_ds, device, output_dir, tracker=None, + phase='train', det_val=False): + model.eval() + criterion.eval() + + metric_logger = utils.MetricLogger(delimiter=" ") + metric_logger.add_meter('class_error', utils.SmoothedValue(window_size=1, fmt='{value:.2f}')) + header = 'Test:' + + iou_types = tuple(k for k in ('segm', 'bbox') if k in postprocessors.keys()) + coco_evaluator = CocoEvaluator(base_ds, iou_types) + # coco_evaluator.coco_eval[iou_types[0]].params.iouThrs = [0, 0.1, 0.5, 0.75] + + res_tracks = dict() + pre_embed = None + for samples, targets in metric_logger.log_every(data_loader, 50, header): + # pre process for track. + if tracker is not None: + frame_id = targets[0].get("frame_id", None) + assert frame_id is not None + frame_id = frame_id.item() + if frame_id == 1: + tracker = BYTETracker(args) + pre_embed = None + + samples = samples.to(device) + targets = [{k: v.to(device) for k, v in t.items()} for t in targets] + + if det_val: + outputs = model(samples) + else: + outputs, pre_embed = model(samples, pre_embed) + loss_dict = criterion(outputs, targets) + weight_dict = criterion.weight_dict + +# reduce losses over all GPUs for logging purposes + loss_dict_reduced = utils.reduce_dict(loss_dict) + loss_dict_reduced_scaled = {k: v * weight_dict[k] + for k, v in loss_dict_reduced.items() if k in weight_dict} + loss_dict_reduced_unscaled = {f'{k}_unscaled': v + for k, v in loss_dict_reduced.items()} + metric_logger.update(loss=sum(loss_dict_reduced_scaled.values()), + **loss_dict_reduced_scaled, + **loss_dict_reduced_unscaled) + metric_logger.update(class_error=loss_dict_reduced['class_error']) + + orig_target_sizes = torch.stack([t["orig_size"] for t in targets], dim=0) + results = postprocessors['bbox'](outputs, orig_target_sizes) + + if 'segm' in postprocessors.keys(): + target_sizes = torch.stack([t["size"] for t in targets], dim=0) + results = postprocessors['segm'](results, outputs, orig_target_sizes, target_sizes) + res = {target['image_id'].item(): output for target, output in zip(targets, results)} + + # post process for track. + if tracker is not None: + res_track = tracker.update(results[0]) + res_tracks[targets[0]['image_id'].item()] = res_track + + if coco_evaluator is not None: + coco_evaluator.update(res) + + # gather the stats from all processes + metric_logger.synchronize_between_processes() + print("Averaged stats:", metric_logger) + if coco_evaluator is not None: + coco_evaluator.synchronize_between_processes() + + # accumulate predictions from all images + if coco_evaluator is not None: + coco_evaluator.accumulate() + coco_evaluator.summarize() + + stats = {k: meter.global_avg for k, meter in metric_logger.meters.items()} + if coco_evaluator is not None: + if 'bbox' in postprocessors.keys(): + stats['coco_eval_bbox'] = coco_evaluator.coco_eval['bbox'].stats.tolist() + if 'segm' in postprocessors.keys(): + stats['coco_eval_masks'] = coco_evaluator.coco_eval['segm'].stats.tolist() + return stats, coco_evaluator, res_tracks \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/tutorials/transtrack/main_track.py b/tracking/docker-build-context/byte_track/tutorials/transtrack/main_track.py new file mode 100644 index 0000000000000000000000000000000000000000..efe1a0ac1fd5b26aee2d9d6582ce25e441786080 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/transtrack/main_track.py @@ -0,0 +1,375 @@ +# Modified by Peize Sun, Rufeng Zhang +# ------------------------------------------------------------------------ +# Deformable DETR +# Copyright (c) 2020 SenseTime. All Rights Reserved. +# Licensed under the Apache License, Version 2.0 [see LICENSE for details] +# ------------------------------------------------------------------------ +# Modified from DETR (https://github.com/facebookresearch/detr) +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# ------------------------------------------------------------------------ +import argparse +import datetime +import json +import random +import time +from pathlib import Path + +import numpy as np +import torch +from torch.utils.data import DataLoader +import datasets +import util.misc as utils +import datasets.samplers as samplers +from datasets import build_dataset, get_coco_api_from_dataset +from engine_track import evaluate, train_one_epoch, evaluate_track +from models import build_tracktrain_model, build_tracktest_model, build_model +from models import Tracker +from models import save_track +from mot_online.byte_tracker import BYTETracker + +from collections import defaultdict + + +def get_args_parser(): + parser = argparse.ArgumentParser('Deformable DETR Detector', add_help=False) + parser.add_argument('--lr', default=2e-4, type=float) + parser.add_argument('--lr_backbone_names', default=["backbone.0"], type=str, nargs='+') + parser.add_argument('--lr_backbone', default=2e-5, type=float) + parser.add_argument('--lr_linear_proj_names', default=['reference_points', 'sampling_offsets'], type=str, nargs='+') + parser.add_argument('--lr_linear_proj_mult', default=0.1, type=float) + parser.add_argument('--batch_size', default=1, type=int) + parser.add_argument('--weight_decay', default=1e-4, type=float) + parser.add_argument('--epochs', default=50, type=int) + parser.add_argument('--lr_drop', default=40, type=int) + parser.add_argument('--lr_drop_epochs', default=None, type=int, nargs='+') + parser.add_argument('--clip_max_norm', default=0.1, type=float, + help='gradient clipping max norm') + + parser.add_argument('--sgd', action='store_true') + + # Variants of Deformable DETR + parser.add_argument('--with_box_refine', default=True, action='store_true') + parser.add_argument('--two_stage', default=False, action='store_true') + + # Model parameters + parser.add_argument('--frozen_weights', type=str, default=None, + help="Path to the pretrained model. If set, only the mask head will be trained") + + # * Backbone + parser.add_argument('--backbone', default='resnet50', type=str, + help="Name of the convolutional backbone to use") + parser.add_argument('--dilation', action='store_true', + help="If true, we replace stride with dilation in the last convolutional block (DC5)") + parser.add_argument('--position_embedding', default='sine', type=str, choices=('sine', 'learned'), + help="Type of positional embedding to use on top of the image features") + parser.add_argument('--position_embedding_scale', default=2 * np.pi, type=float, + help="position / size * scale") + parser.add_argument('--num_feature_levels', default=4, type=int, help='number of feature levels') + + # * Transformer + parser.add_argument('--enc_layers', default=6, type=int, + help="Number of encoding layers in the transformer") + parser.add_argument('--dec_layers', default=6, type=int, + help="Number of decoding layers in the transformer") + parser.add_argument('--dim_feedforward', default=1024, type=int, + help="Intermediate size of the feedforward layers in the transformer blocks") + parser.add_argument('--hidden_dim', default=256, type=int, + help="Size of the embeddings (dimension of the transformer)") + parser.add_argument('--dropout', default=0.1, type=float, + help="Dropout applied in the transformer") + parser.add_argument('--nheads', default=8, type=int, + help="Number of attention heads inside the transformer's attentions") + parser.add_argument('--num_queries', default=500, type=int, + help="Number of query slots") + parser.add_argument('--dec_n_points', default=4, type=int) + parser.add_argument('--enc_n_points', default=4, type=int) + + # * Segmentation + parser.add_argument('--masks', action='store_true', + help="Train segmentation head if the flag is provided") + + # Loss + parser.add_argument('--no_aux_loss', dest='aux_loss', action='store_false', + help="Disables auxiliary decoding losses (loss at each layer)") + + # * Matcher + parser.add_argument('--set_cost_class', default=2, type=float, + help="Class coefficient in the matching cost") + parser.add_argument('--set_cost_bbox', default=5, type=float, + help="L1 box coefficient in the matching cost") + parser.add_argument('--set_cost_giou', default=2, type=float, + help="giou box coefficient in the matching cost") + + # * Loss coefficients + parser.add_argument('--mask_loss_coef', default=1, type=float) + parser.add_argument('--dice_loss_coef', default=1, type=float) + parser.add_argument('--cls_loss_coef', default=2, type=float) + parser.add_argument('--bbox_loss_coef', default=5, type=float) + parser.add_argument('--giou_loss_coef', default=2, type=float) + parser.add_argument('--focal_alpha', default=0.25, type=float) + parser.add_argument('--id_loss_coef', default=1, type=float) + + # dataset parameters + parser.add_argument('--dataset_file', default='coco') + parser.add_argument('--coco_path', default='./data/coco', type=str) + parser.add_argument('--coco_panoptic_path', type=str) + parser.add_argument('--remove_difficult', action='store_true') + + parser.add_argument('--output_dir', default='', + help='path where to save, empty for no saving') + parser.add_argument('--device', default='cuda', + help='device to use for training / testing') + parser.add_argument('--seed', default=42, type=int) + parser.add_argument('--resume', default='', help='resume from checkpoint') + parser.add_argument('--start_epoch', default=0, type=int, metavar='N', + help='start epoch') + parser.add_argument('--eval', action='store_true') + parser.add_argument('--num_workers', default=2, type=int) + parser.add_argument('--cache_mode', default=False, action='store_true', help='whether to cache images on memory') + + # PyTorch checkpointing for saving memory (torch.utils.checkpoint.checkpoint) + parser.add_argument('--checkpoint_enc_ffn', default=False, action='store_true') + parser.add_argument('--checkpoint_dec_ffn', default=False, action='store_true') + + # appended for track. + parser.add_argument('--track_train_split', default='train', type=str) + parser.add_argument('--track_eval_split', default='val', type=str) + parser.add_argument('--track_thresh', default=0.4, type=float) + parser.add_argument('--reid_shared', default=False, type=bool) + parser.add_argument('--reid_dim', default=128, type=int) + parser.add_argument('--num_ids', default=360, type=int) + + + # detector for track. + parser.add_argument('--det_val', default=False, action='store_true') + + + return parser + + +def main(args): + utils.init_distributed_mode(args) + print("git:\n {}\n".format(utils.get_sha())) + + if args.frozen_weights is not None: + assert args.masks, "Frozen training is meant for segmentation only" + print(args) + + device = torch.device(args.device) + + # fix the seed for reproducibility + seed = args.seed + utils.get_rank() + torch.manual_seed(seed) + np.random.seed(seed) + random.seed(seed) + + if args.det_val: + assert args.eval, 'only support eval mode of detector for track' + model, criterion, postprocessors = build_model(args) + elif args.eval: + model, criterion, postprocessors = build_tracktest_model(args) + else: + model, criterion, postprocessors = build_tracktrain_model(args) + + model.to(device) + + model_without_ddp = model + n_parameters = sum(p.numel() for p in model.parameters() if p.requires_grad) + print('number of params:', n_parameters) + + dataset_train = build_dataset(image_set=args.track_train_split, args=args) + dataset_val = build_dataset(image_set=args.track_eval_split, args=args) + + if args.distributed: + if args.cache_mode: + sampler_train = samplers.NodeDistributedSampler(dataset_train) + sampler_val = samplers.NodeDistributedSampler(dataset_val, shuffle=False) + else: + sampler_train = samplers.DistributedSampler(dataset_train) + sampler_val = samplers.DistributedSampler(dataset_val, shuffle=False) + else: + sampler_train = torch.utils.data.RandomSampler(dataset_train) + sampler_val = torch.utils.data.SequentialSampler(dataset_val) + + batch_sampler_train = torch.utils.data.BatchSampler( + sampler_train, args.batch_size, drop_last=True) + + data_loader_train = DataLoader(dataset_train, batch_sampler=batch_sampler_train, + collate_fn=utils.collate_fn, num_workers=args.num_workers, + pin_memory=True) + data_loader_val = DataLoader(dataset_val, args.batch_size, sampler=sampler_val, + drop_last=False, collate_fn=utils.collate_fn, num_workers=args.num_workers, + pin_memory=True) + + # lr_backbone_names = ["backbone.0", "backbone.neck", "input_proj", "transformer.encoder"] + def match_name_keywords(n, name_keywords): + out = False + for b in name_keywords: + if b in n: + out = True + break + return out + + for n, p in model_without_ddp.named_parameters(): + print(n) + + param_dicts = [ + { + "params": + [p for n, p in model_without_ddp.named_parameters() + if not match_name_keywords(n, args.lr_backbone_names) and not match_name_keywords(n, args.lr_linear_proj_names) and p.requires_grad], + "lr": args.lr, + }, + { + "params": [p for n, p in model_without_ddp.named_parameters() if match_name_keywords(n, args.lr_backbone_names) and p.requires_grad], + "lr": args.lr_backbone, + }, + { + "params": [p for n, p in model_without_ddp.named_parameters() if match_name_keywords(n, args.lr_linear_proj_names) and p.requires_grad], + "lr": args.lr * args.lr_linear_proj_mult, + } + ] + if args.sgd: + optimizer = torch.optim.SGD(param_dicts, lr=args.lr, momentum=0.9, + weight_decay=args.weight_decay) + else: + optimizer = torch.optim.AdamW(param_dicts, lr=args.lr, + weight_decay=args.weight_decay) + lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, args.lr_drop) + + if args.distributed: + model = torch.nn.parallel.DistributedDataParallel(model, device_ids=[args.gpu], find_unused_parameters=True) + model_without_ddp = model.module + + if args.dataset_file == "coco_panoptic": + # We also evaluate AP during panoptic training, on original coco DS + coco_val = datasets.coco.build("val", args) + base_ds = get_coco_api_from_dataset(coco_val) + else: + base_ds = get_coco_api_from_dataset(dataset_val) + + if args.frozen_weights is not None: + checkpoint = torch.load(args.frozen_weights, map_location='cpu') + model_without_ddp.detr.load_state_dict(checkpoint['model']) + + output_dir = Path(args.output_dir) + if args.resume: + if args.resume.startswith('https'): + checkpoint = torch.hub.load_state_dict_from_url( + args.resume, map_location='cpu', check_hash=True) + else: + checkpoint = torch.load(args.resume, map_location='cpu') + missing_keys, unexpected_keys = model_without_ddp.load_state_dict(checkpoint['model'], strict=False) + unexpected_keys = [k for k in unexpected_keys if not (k.endswith('total_params') or k.endswith('total_ops'))] + if len(missing_keys) > 0: + print('Missing Keys: {}'.format(missing_keys)) + if len(unexpected_keys) > 0: + print('Unexpected Keys: {}'.format(unexpected_keys)) + if not args.eval and 'optimizer' in checkpoint and 'lr_scheduler' in checkpoint and 'epoch' in checkpoint: + import copy + p_groups = copy.deepcopy(optimizer.param_groups) + optimizer.load_state_dict(checkpoint['optimizer']) + for pg, pg_old in zip(optimizer.param_groups, p_groups): + pg['lr'] = pg_old['lr'] + pg['initial_lr'] = pg_old['initial_lr'] + print(optimizer.param_groups) + lr_scheduler.load_state_dict(checkpoint['lr_scheduler']) + # todo: this is a hack for doing experiment that resume from checkpoint and also modify lr scheduler (e.g., decrease lr in advance). + args.override_resumed_lr_drop = True + if args.override_resumed_lr_drop: + print('Warning: (hack) args.override_resumed_lr_drop is set to True, so args.lr_drop would override lr_drop in resumed lr_scheduler.') + lr_scheduler.step_size = args.lr_drop + lr_scheduler.base_lrs = list(map(lambda group: group['initial_lr'], optimizer.param_groups)) + lr_scheduler.step(lr_scheduler.last_epoch) + args.start_epoch = checkpoint['epoch'] + 1 + # check the resumed model +# if not args.eval: +# test_stats, coco_evaluator, _ = evaluate( +# model, criterion, postprocessors, data_loader_val, base_ds, device, args.output_dir +# ) + + if args.eval: + assert args.batch_size == 1, print("Now only support 1.") + # tracker = MOTXTracker(score_thresh=args.track_thresh) + # test_stats, coco_evaluator, res_tracks = evaluate(model, criterion, postprocessors, data_loader_val, + # base_ds, device, args.output_dir, tracker=tracker, + # phase='eval', det_val=args.det_val) + tracker = BYTETracker(args) + test_stats, coco_evaluator, res_tracks = evaluate_track(args, model, criterion, postprocessors, data_loader_val, + base_ds, device, args.output_dir, tracker=tracker, + phase='eval', det_val=args.det_val) + if args.output_dir: + utils.save_on_master(coco_evaluator.coco_eval["bbox"].eval, output_dir / "eval.pth") + if res_tracks is not None: + print("Creating video index for {}.".format(args.dataset_file)) + video_to_images = defaultdict(list) + video_names = defaultdict() + for _, info in dataset_val.coco.imgs.items(): + video_to_images[info["video_id"]].append({"image_id": info["id"], + "frame_id": info["frame_id"]}) + video_name = info["file_name"].split("/")[0] + if video_name not in video_names: + video_names[info["video_id"]] = video_name + assert len(video_to_images) == len(video_names) + # save mot results. + save_track(res_tracks, args.output_dir, video_to_images, video_names, args.track_eval_split) + + return + + print("Start training") + start_time = time.time() + for epoch in range(args.start_epoch, args.epochs): + if args.distributed: + sampler_train.set_epoch(epoch) + train_stats = train_one_epoch( + model, criterion, data_loader_train, optimizer, device, epoch, args.clip_max_norm) + lr_scheduler.step() + if args.output_dir: + checkpoint_paths = [output_dir / 'checkpoint.pth'] + # extra checkpoint before LR drop and every 5 epochs + if (epoch + 1) % args.lr_drop == 0 or (epoch + 1) % 5 == 0: + checkpoint_paths.append(output_dir / f'checkpoint{epoch:04}.pth') + for checkpoint_path in checkpoint_paths: + utils.save_on_master({ + 'model': model_without_ddp.state_dict(), + 'optimizer': optimizer.state_dict(), + 'lr_scheduler': lr_scheduler.state_dict(), + 'epoch': epoch, + 'args': args, + }, checkpoint_path) + if epoch % 10 == 0 or epoch > args.epochs - 5: + test_stats, coco_evaluator, _ = evaluate( + model, criterion, postprocessors, data_loader_val, base_ds, device, args.output_dir, + ) + + log_stats = {**{f'train_{k}': v for k, v in train_stats.items()}, + **{f'test_{k}': v for k, v in test_stats.items()}, + 'epoch': epoch, + 'n_parameters': n_parameters} + + if args.output_dir and utils.is_main_process(): + with (output_dir / "log.txt").open("a") as f: + f.write(json.dumps(log_stats) + "\n") + + # for evaluation logs + if coco_evaluator is not None: + (output_dir / 'eval').mkdir(exist_ok=True) + if "bbox" in coco_evaluator.coco_eval: + filenames = ['latest.pth'] + if epoch % 50 == 0: + filenames.append(f'{epoch:03}.pth') + for name in filenames: + torch.save(coco_evaluator.coco_eval["bbox"].eval, + output_dir / "eval" / name) + + total_time = time.time() - start_time + total_time_str = str(datetime.timedelta(seconds=int(total_time))) + print('Training time {}'.format(total_time_str)) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser('Deformable DETR training and evaluation script', parents=[get_args_parser()]) + args = parser.parse_args() + if args.output_dir: + Path(args.output_dir).mkdir(parents=True, exist_ok=True) + main(args) diff --git a/tracking/docker-build-context/byte_track/tutorials/transtrack/mot_online/basetrack.py b/tracking/docker-build-context/byte_track/tutorials/transtrack/mot_online/basetrack.py new file mode 100644 index 0000000000000000000000000000000000000000..a7130b5cc08ac55705c155594d0f2a1d09f96774 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/transtrack/mot_online/basetrack.py @@ -0,0 +1,52 @@ +import numpy as np +from collections import OrderedDict + + +class TrackState(object): + New = 0 + Tracked = 1 + Lost = 2 + Removed = 3 + + +class BaseTrack(object): + _count = 0 + + track_id = 0 + is_activated = False + state = TrackState.New + + history = OrderedDict() + features = [] + curr_feature = None + score = 0 + start_frame = 0 + frame_id = 0 + time_since_update = 0 + + # multi-camera + location = (np.inf, np.inf) + + @property + def end_frame(self): + return self.frame_id + + @staticmethod + def next_id(): + BaseTrack._count += 1 + return BaseTrack._count + + def activate(self, *args): + raise NotImplementedError + + def predict(self): + raise NotImplementedError + + def update(self, *args, **kwargs): + raise NotImplementedError + + def mark_lost(self): + self.state = TrackState.Lost + + def mark_removed(self): + self.state = TrackState.Removed \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/tutorials/transtrack/mot_online/byte_tracker.py b/tracking/docker-build-context/byte_track/tutorials/transtrack/mot_online/byte_tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..be6e179a5d7ab5fb7a276cda82679ce9656f29f1 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/transtrack/mot_online/byte_tracker.py @@ -0,0 +1,354 @@ +import numpy as np +from collections import deque +import os +import os.path as osp +import copy +import torch +import torch.nn.functional as F + +from .kalman_filter import KalmanFilter +from mot_online import matching +from .basetrack import BaseTrack, TrackState + +class STrack(BaseTrack): + shared_kalman = KalmanFilter() + def __init__(self, tlwh, score, buffer_size=30): + + # wait activate + self._tlwh = np.asarray(tlwh, dtype=np.float) + self.kalman_filter = None + self.mean, self.covariance = None, None + self.is_activated = False + + self.score = score + self.tracklet_len = 0 + + def predict(self): + mean_state = self.mean.copy() + if self.state != TrackState.Tracked: + mean_state[7] = 0 + self.mean, self.covariance = self.kalman_filter.predict(mean_state, self.covariance) + + @staticmethod + def multi_predict(stracks): + if len(stracks) > 0: + multi_mean = np.asarray([st.mean.copy() for st in stracks]) + multi_covariance = np.asarray([st.covariance for st in stracks]) + for i, st in enumerate(stracks): + if st.state != TrackState.Tracked: + multi_mean[i][7] = 0 + multi_mean, multi_covariance = STrack.shared_kalman.multi_predict(multi_mean, multi_covariance) + for i, (mean, cov) in enumerate(zip(multi_mean, multi_covariance)): + stracks[i].mean = mean + stracks[i].covariance = cov + + def activate(self, kalman_filter, frame_id): + """Start a new tracklet""" + self.kalman_filter = kalman_filter + self.track_id = self.next_id() + self.mean, self.covariance = self.kalman_filter.initiate(self.tlwh_to_xyah(self._tlwh)) + + self.tracklet_len = 0 + self.state = TrackState.Tracked + if frame_id == 1: + self.is_activated = True + #self.is_activated = True + self.frame_id = frame_id + self.start_frame = frame_id + + def re_activate(self, new_track, frame_id, new_id=False): + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_track.tlwh) + ) + self.tracklet_len = 0 + self.state = TrackState.Tracked + self.is_activated = True + self.frame_id = frame_id + if new_id: + self.track_id = self.next_id() + self.score = new_track.score + + def update(self, new_track, frame_id): + """ + Update a matched track + :type new_track: STrack + :type frame_id: int + :type update_feature: bool + :return: + """ + self.frame_id = frame_id + self.tracklet_len += 1 + + new_tlwh = new_track.tlwh + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_tlwh)) + self.state = TrackState.Tracked + self.is_activated = True + + self.score = new_track.score + + @property + # @jit(nopython=True) + def tlwh(self): + """Get current position in bounding box format `(top left x, top left y, + width, height)`. + """ + if self.mean is None: + return self._tlwh.copy() + ret = self.mean[:4].copy() + ret[2] *= ret[3] + ret[:2] -= ret[2:] / 2 + return ret + + @property + # @jit(nopython=True) + def tlbr(self): + """Convert bounding box to format `(min x, min y, max x, max y)`, i.e., + `(top left, bottom right)`. + """ + ret = self.tlwh.copy() + ret[2:] += ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_xyah(tlwh): + """Convert bounding box to format `(center x, center y, aspect ratio, + height)`, where the aspect ratio is `width / height`. + """ + ret = np.asarray(tlwh).copy() + ret[:2] += ret[2:] / 2 + ret[2] /= ret[3] + return ret + + def to_xyah(self): + return self.tlwh_to_xyah(self.tlwh) + + @staticmethod + # @jit(nopython=True) + def tlbr_to_tlwh(tlbr): + ret = np.asarray(tlbr).copy() + ret[2:] -= ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_tlbr(tlwh): + ret = np.asarray(tlwh).copy() + ret[2:] += ret[:2] + return ret + + def __repr__(self): + return 'OT_{}_({}-{})'.format(self.track_id, self.start_frame, self.end_frame) + + +class BYTETracker(object): + def __init__(self, args, frame_rate=30): + self.tracked_stracks = [] # type: list[STrack] + self.lost_stracks = [] # type: list[STrack] + self.removed_stracks = [] # type: list[STrack] + + self.frame_id = 0 + self.args = args + #self.det_thresh = args.track_thresh + self.det_thresh = args.track_thresh + 0.1 + self.buffer_size = int(frame_rate / 30.0 * 30) + self.max_time_lost = self.buffer_size + self.max_per_image = args.num_queries + self.kalman_filter = KalmanFilter() + + def update(self, output_results): + self.frame_id += 1 + activated_starcks = [] + refind_stracks = [] + lost_stracks = [] + removed_stracks = [] + + scores = output_results["scores"].cpu().numpy() + classes = output_results["labels"].cpu().numpy() + bboxes = output_results["boxes"].cpu().numpy() # x1y1x2y2 + + remain_inds = scores > self.args.track_thresh + inds_low = scores > 0.2 + inds_high = scores < self.args.track_thresh + inds_second = np.logical_and(inds_low, inds_high) + dets_second = bboxes[inds_second] + dets = bboxes[remain_inds] + scores_keep = scores[remain_inds] + scores_second = scores[inds_second] + + # vis + ''' + for i in range(0, dets.shape[0]): + bbox = dets[i][0:4] + cv2.rectangle(img0, (bbox[0], bbox[1]), + (bbox[2], bbox[3]), + (0, 255, 0), 2) + cv2.imshow('dets', img0) + cv2.waitKey(0) + id0 = id0-1 + ''' + + if len(dets) > 0: + '''Detections''' + detections = [STrack(STrack.tlbr_to_tlwh(tlbr), s, 30) for + (tlbr, s) in zip(dets, scores_keep)] + else: + detections = [] + + ''' Add newly detected tracklets to tracked_stracks''' + unconfirmed = [] + tracked_stracks = [] # type: list[STrack] + for track in self.tracked_stracks: + if not track.is_activated: + unconfirmed.append(track) + else: + tracked_stracks.append(track) + + ''' Step 2: First association, with Kalman and IOU''' + strack_pool = joint_stracks(tracked_stracks, self.lost_stracks) + # Predict the current location with KF + STrack.multi_predict(strack_pool) + dists = matching.iou_distance(strack_pool, detections) + #dists = matching.fuse_motion(self.kalman_filter, dists, strack_pool, detections) + matches, u_track, u_detection = matching.linear_assignment(dists, thresh=0.8) + + for itracked, idet in matches: + track = strack_pool[itracked] + det = detections[idet] + if track.state == TrackState.Tracked: + track.update(detections[idet], self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + ''' Step 3: Second association, with IOU''' + # association the untrack to the low score detections + if len(dets_second) > 0: + '''Detections''' + detections_second = [STrack(STrack.tlbr_to_tlwh(tlbr), s, 30) for + (tlbr, s) in zip(dets_second, scores_second)] + else: + detections_second = [] + r_tracked_stracks = [strack_pool[i] for i in u_track if strack_pool[i].state == TrackState.Tracked] + dists = matching.iou_distance(r_tracked_stracks, detections_second) + matches, u_track, u_detection_second = matching.linear_assignment(dists, thresh=0.4) + for itracked, idet in matches: + track = r_tracked_stracks[itracked] + det = detections_second[idet] + if track.state == TrackState.Tracked: + track.update(det, self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + for it in u_track: + #track = r_tracked_stracks[it] + track = r_tracked_stracks[it] + if not track.state == TrackState.Lost: + track.mark_lost() + lost_stracks.append(track) + + '''Deal with unconfirmed tracks, usually tracks with only one beginning frame''' + detections = [detections[i] for i in u_detection] + dists = matching.iou_distance(unconfirmed, detections) + matches, u_unconfirmed, u_detection = matching.linear_assignment(dists, thresh=0.7) + for itracked, idet in matches: + unconfirmed[itracked].update(detections[idet], self.frame_id) + activated_starcks.append(unconfirmed[itracked]) + for it in u_unconfirmed: + track = unconfirmed[it] + track.mark_removed() + removed_stracks.append(track) + + """ Step 4: Init new stracks""" + for inew in u_detection: + track = detections[inew] + if track.score < self.det_thresh: + continue + track.activate(self.kalman_filter, self.frame_id) + activated_starcks.append(track) + """ Step 5: Update state""" + for track in self.lost_stracks: + if self.frame_id - track.end_frame > self.max_time_lost: + track.mark_removed() + removed_stracks.append(track) + + # print('Ramained match {} s'.format(t4-t3)) + + self.tracked_stracks = [t for t in self.tracked_stracks if t.state == TrackState.Tracked] + self.tracked_stracks = joint_stracks(self.tracked_stracks, activated_starcks) + self.tracked_stracks = joint_stracks(self.tracked_stracks, refind_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.tracked_stracks) + self.lost_stracks.extend(lost_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.removed_stracks) + self.removed_stracks.extend(removed_stracks) + self.tracked_stracks, self.lost_stracks = remove_duplicate_stracks(self.tracked_stracks, self.lost_stracks) + # get scores of lost tracks + #output_stracks = [track for track in self.tracked_stracks if track.is_activated] + ret = list() + for track in self.tracked_stracks: + if track.is_activated: + track_dict = {} + track_dict['tracking_id'] = track.track_id + track_dict['active'] = 1 + track_dict['bbox'] = track.tlbr + track_dict['score'] = track.score + ret.append(track_dict) + + return copy.deepcopy(ret) + + +def joint_stracks(tlista, tlistb): + exists = {} + res = [] + for t in tlista: + exists[t.track_id] = 1 + res.append(t) + for t in tlistb: + tid = t.track_id + if not exists.get(tid, 0): + exists[tid] = 1 + res.append(t) + return res + + +def sub_stracks(tlista, tlistb): + stracks = {} + for t in tlista: + stracks[t.track_id] = t + for t in tlistb: + tid = t.track_id + if stracks.get(tid, 0): + del stracks[tid] + return list(stracks.values()) + + +def remove_duplicate_stracks(stracksa, stracksb): + pdist = matching.iou_distance(stracksa, stracksb) + pairs = np.where(pdist < 0.15) + dupa, dupb = list(), list() + for p, q in zip(*pairs): + timep = stracksa[p].frame_id - stracksa[p].start_frame + timeq = stracksb[q].frame_id - stracksb[q].start_frame + if timep > timeq: + dupb.append(q) + else: + dupa.append(p) + resa = [t for i, t in enumerate(stracksa) if not i in dupa] + resb = [t for i, t in enumerate(stracksb) if not i in dupb] + return resa, resb + + +def remove_fp_stracks(stracksa, n_frame=10): + remain = [] + for t in stracksa: + score_5 = t.score_list[-n_frame:] + score_5 = np.array(score_5, dtype=np.float32) + index = score_5 < 0.45 + num = np.sum(index) + if num < n_frame: + remain.append(t) + return remain \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/tutorials/transtrack/mot_online/kalman_filter.py b/tracking/docker-build-context/byte_track/tutorials/transtrack/mot_online/kalman_filter.py new file mode 100644 index 0000000000000000000000000000000000000000..deda8a26292b81bc6512a8f6145afabde6c16d7a --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/transtrack/mot_online/kalman_filter.py @@ -0,0 +1,270 @@ +# vim: expandtab:ts=4:sw=4 +import numpy as np +import scipy.linalg + + +""" +Table for the 0.95 quantile of the chi-square distribution with N degrees of +freedom (contains values for N=1, ..., 9). Taken from MATLAB/Octave's chi2inv +function and used as Mahalanobis gating threshold. +""" +chi2inv95 = { + 1: 3.8415, + 2: 5.9915, + 3: 7.8147, + 4: 9.4877, + 5: 11.070, + 6: 12.592, + 7: 14.067, + 8: 15.507, + 9: 16.919} + + +class KalmanFilter(object): + """ + A simple Kalman filter for tracking bounding boxes in image space. + + The 8-dimensional state space + + x, y, a, h, vx, vy, va, vh + + contains the bounding box center position (x, y), aspect ratio a, height h, + and their respective velocities. + + Object motion follows a constant velocity model. The bounding box location + (x, y, a, h) is taken as direct observation of the state space (linear + observation model). + + """ + + def __init__(self): + ndim, dt = 4, 1. + + # Create Kalman filter model matrices. + self._motion_mat = np.eye(2 * ndim, 2 * ndim) + for i in range(ndim): + self._motion_mat[i, ndim + i] = dt + self._update_mat = np.eye(ndim, 2 * ndim) + + # Motion and observation uncertainty are chosen relative to the current + # state estimate. These weights control the amount of uncertainty in + # the model. This is a bit hacky. + self._std_weight_position = 1. / 20 + self._std_weight_velocity = 1. / 160 + + def initiate(self, measurement): + """Create track from unassociated measurement. + + Parameters + ---------- + measurement : ndarray + Bounding box coordinates (x, y, a, h) with center position (x, y), + aspect ratio a, and height h. + + Returns + ------- + (ndarray, ndarray) + Returns the mean vector (8 dimensional) and covariance matrix (8x8 + dimensional) of the new track. Unobserved velocities are initialized + to 0 mean. + + """ + mean_pos = measurement + mean_vel = np.zeros_like(mean_pos) + mean = np.r_[mean_pos, mean_vel] + + std = [ + 2 * self._std_weight_position * measurement[3], + 2 * self._std_weight_position * measurement[3], + 1e-2, + 2 * self._std_weight_position * measurement[3], + 10 * self._std_weight_velocity * measurement[3], + 10 * self._std_weight_velocity * measurement[3], + 1e-5, + 10 * self._std_weight_velocity * measurement[3]] + covariance = np.diag(np.square(std)) + return mean, covariance + + def predict(self, mean, covariance): + """Run Kalman filter prediction step. + + Parameters + ---------- + mean : ndarray + The 8 dimensional mean vector of the object state at the previous + time step. + covariance : ndarray + The 8x8 dimensional covariance matrix of the object state at the + previous time step. + + Returns + ------- + (ndarray, ndarray) + Returns the mean vector and covariance matrix of the predicted + state. Unobserved velocities are initialized to 0 mean. + + """ + std_pos = [ + self._std_weight_position * mean[3], + self._std_weight_position * mean[3], + 1e-2, + self._std_weight_position * mean[3]] + std_vel = [ + self._std_weight_velocity * mean[3], + self._std_weight_velocity * mean[3], + 1e-5, + self._std_weight_velocity * mean[3]] + motion_cov = np.diag(np.square(np.r_[std_pos, std_vel])) + + #mean = np.dot(self._motion_mat, mean) + mean = np.dot(mean, self._motion_mat.T) + covariance = np.linalg.multi_dot(( + self._motion_mat, covariance, self._motion_mat.T)) + motion_cov + + return mean, covariance + + def project(self, mean, covariance): + """Project state distribution to measurement space. + + Parameters + ---------- + mean : ndarray + The state's mean vector (8 dimensional array). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + + Returns + ------- + (ndarray, ndarray) + Returns the projected mean and covariance matrix of the given state + estimate. + + """ + std = [ + self._std_weight_position * mean[3], + self._std_weight_position * mean[3], + 1e-1, + self._std_weight_position * mean[3]] + innovation_cov = np.diag(np.square(std)) + + mean = np.dot(self._update_mat, mean) + covariance = np.linalg.multi_dot(( + self._update_mat, covariance, self._update_mat.T)) + return mean, covariance + innovation_cov + + def multi_predict(self, mean, covariance): + """Run Kalman filter prediction step (Vectorized version). + Parameters + ---------- + mean : ndarray + The Nx8 dimensional mean matrix of the object states at the previous + time step. + covariance : ndarray + The Nx8x8 dimensional covariance matrics of the object states at the + previous time step. + Returns + ------- + (ndarray, ndarray) + Returns the mean vector and covariance matrix of the predicted + state. Unobserved velocities are initialized to 0 mean. + """ + std_pos = [ + self._std_weight_position * mean[:, 3], + self._std_weight_position * mean[:, 3], + 1e-2 * np.ones_like(mean[:, 3]), + self._std_weight_position * mean[:, 3]] + std_vel = [ + self._std_weight_velocity * mean[:, 3], + self._std_weight_velocity * mean[:, 3], + 1e-5 * np.ones_like(mean[:, 3]), + self._std_weight_velocity * mean[:, 3]] + sqr = np.square(np.r_[std_pos, std_vel]).T + + motion_cov = [] + for i in range(len(mean)): + motion_cov.append(np.diag(sqr[i])) + motion_cov = np.asarray(motion_cov) + + mean = np.dot(mean, self._motion_mat.T) + left = np.dot(self._motion_mat, covariance).transpose((1, 0, 2)) + covariance = np.dot(left, self._motion_mat.T) + motion_cov + + return mean, covariance + + def update(self, mean, covariance, measurement): + """Run Kalman filter correction step. + + Parameters + ---------- + mean : ndarray + The predicted state's mean vector (8 dimensional). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + measurement : ndarray + The 4 dimensional measurement vector (x, y, a, h), where (x, y) + is the center position, a the aspect ratio, and h the height of the + bounding box. + + Returns + ------- + (ndarray, ndarray) + Returns the measurement-corrected state distribution. + + """ + projected_mean, projected_cov = self.project(mean, covariance) + + chol_factor, lower = scipy.linalg.cho_factor( + projected_cov, lower=True, check_finite=False) + kalman_gain = scipy.linalg.cho_solve( + (chol_factor, lower), np.dot(covariance, self._update_mat.T).T, + check_finite=False).T + innovation = measurement - projected_mean + + new_mean = mean + np.dot(innovation, kalman_gain.T) + new_covariance = covariance - np.linalg.multi_dot(( + kalman_gain, projected_cov, kalman_gain.T)) + return new_mean, new_covariance + + def gating_distance(self, mean, covariance, measurements, + only_position=False, metric='maha'): + """Compute gating distance between state distribution and measurements. + A suitable distance threshold can be obtained from `chi2inv95`. If + `only_position` is False, the chi-square distribution has 4 degrees of + freedom, otherwise 2. + Parameters + ---------- + mean : ndarray + Mean vector over the state distribution (8 dimensional). + covariance : ndarray + Covariance of the state distribution (8x8 dimensional). + measurements : ndarray + An Nx4 dimensional matrix of N measurements, each in + format (x, y, a, h) where (x, y) is the bounding box center + position, a the aspect ratio, and h the height. + only_position : Optional[bool] + If True, distance computation is done with respect to the bounding + box center position only. + Returns + ------- + ndarray + Returns an array of length N, where the i-th element contains the + squared Mahalanobis distance between (mean, covariance) and + `measurements[i]`. + """ + mean, covariance = self.project(mean, covariance) + if only_position: + mean, covariance = mean[:2], covariance[:2, :2] + measurements = measurements[:, :2] + + d = measurements - mean + if metric == 'gaussian': + return np.sum(d * d, axis=1) + elif metric == 'maha': + cholesky_factor = np.linalg.cholesky(covariance) + z = scipy.linalg.solve_triangular( + cholesky_factor, d.T, lower=True, check_finite=False, + overwrite_b=True) + squared_maha = np.sum(z * z, axis=0) + return squared_maha + else: + raise ValueError('invalid distance metric') \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/tutorials/transtrack/mot_online/matching.py b/tracking/docker-build-context/byte_track/tutorials/transtrack/mot_online/matching.py new file mode 100644 index 0000000000000000000000000000000000000000..d21c958237a64abf185f5298a62d2bcb9270e254 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/transtrack/mot_online/matching.py @@ -0,0 +1,156 @@ +import cv2 +import numpy as np +import scipy +import lap +from scipy.spatial.distance import cdist + +from cython_bbox import bbox_overlaps as bbox_ious +from mot_online import kalman_filter +import time + +def merge_matches(m1, m2, shape): + O,P,Q = shape + m1 = np.asarray(m1) + m2 = np.asarray(m2) + + M1 = scipy.sparse.coo_matrix((np.ones(len(m1)), (m1[:, 0], m1[:, 1])), shape=(O, P)) + M2 = scipy.sparse.coo_matrix((np.ones(len(m2)), (m2[:, 0], m2[:, 1])), shape=(P, Q)) + + mask = M1*M2 + match = mask.nonzero() + match = list(zip(match[0], match[1])) + unmatched_O = tuple(set(range(O)) - set([i for i, j in match])) + unmatched_Q = tuple(set(range(Q)) - set([j for i, j in match])) + + return match, unmatched_O, unmatched_Q + + +def _indices_to_matches(cost_matrix, indices, thresh): + matched_cost = cost_matrix[tuple(zip(*indices))] + matched_mask = (matched_cost <= thresh) + + matches = indices[matched_mask] + unmatched_a = tuple(set(range(cost_matrix.shape[0])) - set(matches[:, 0])) + unmatched_b = tuple(set(range(cost_matrix.shape[1])) - set(matches[:, 1])) + + return matches, unmatched_a, unmatched_b + + +def linear_assignment(cost_matrix, thresh): + if cost_matrix.size == 0: + return np.empty((0, 2), dtype=int), tuple(range(cost_matrix.shape[0])), tuple(range(cost_matrix.shape[1])) + matches, unmatched_a, unmatched_b = [], [], [] + cost, x, y = lap.lapjv(cost_matrix, extend_cost=True, cost_limit=thresh) + for ix, mx in enumerate(x): + if mx >= 0: + matches.append([ix, mx]) + unmatched_a = np.where(x < 0)[0] + unmatched_b = np.where(y < 0)[0] + matches = np.asarray(matches) + return matches, unmatched_a, unmatched_b + + +def ious(atlbrs, btlbrs): + """ + Compute cost based on IoU + :type atlbrs: list[tlbr] | np.ndarray + :type atlbrs: list[tlbr] | np.ndarray + + :rtype ious np.ndarray + """ + ious = np.zeros((len(atlbrs), len(btlbrs)), dtype=np.float) + if ious.size == 0: + return ious + + ious = bbox_ious( + np.ascontiguousarray(atlbrs, dtype=np.float), + np.ascontiguousarray(btlbrs, dtype=np.float) + ) + + return ious + + +def iou_distance(atracks, btracks): + """ + Compute cost based on IoU + :type atracks: list[STrack] + :type btracks: list[STrack] + + :rtype cost_matrix np.ndarray + """ + + if (len(atracks)>0 and isinstance(atracks[0], np.ndarray)) or (len(btracks) > 0 and isinstance(btracks[0], np.ndarray)): + atlbrs = atracks + btlbrs = btracks + else: + atlbrs = [track.tlbr for track in atracks] + btlbrs = [track.tlbr for track in btracks] + _ious = ious(atlbrs, btlbrs) + cost_matrix = 1 - _ious + + return cost_matrix + +def embedding_distance(tracks, detections, metric='cosine'): + """ + :param tracks: list[STrack] + :param detections: list[BaseTrack] + :param metric: + :return: cost_matrix np.ndarray + """ + + cost_matrix = np.zeros((len(tracks), len(detections)), dtype=np.float) + if cost_matrix.size == 0: + return cost_matrix + det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float) + track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float) + cost_matrix = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features + return cost_matrix + + +def gate_cost_matrix(kf, cost_matrix, tracks, detections, only_position=False): + if cost_matrix.size == 0: + return cost_matrix + gating_dim = 2 if only_position else 4 + gating_threshold = kalman_filter.chi2inv95[gating_dim] + measurements = np.asarray([det.to_xyah() for det in detections]) + for row, track in enumerate(tracks): + gating_distance = kf.gating_distance( + track.mean, track.covariance, measurements, only_position) + cost_matrix[row, gating_distance > gating_threshold] = np.inf + return cost_matrix + + +def fuse_motion(kf, cost_matrix, tracks, detections, only_position=False, lambda_=0.98): + if cost_matrix.size == 0: + return cost_matrix + gating_dim = 2 if only_position else 4 + gating_threshold = kalman_filter.chi2inv95[gating_dim] + measurements = np.asarray([det.to_xyah() for det in detections]) + for row, track in enumerate(tracks): + gating_distance = kf.gating_distance( + track.mean, track.covariance, measurements, only_position, metric='maha') + cost_matrix[row, gating_distance > gating_threshold] = np.inf + cost_matrix[row] = lambda_ * cost_matrix[row] + (1 - lambda_) * gating_distance + return cost_matrix + + +def fuse_iou(cost_matrix, tracks, detections): + if cost_matrix.size == 0: + return cost_matrix + reid_sim = 1 - cost_matrix + iou_dist = iou_distance(tracks, detections) + iou_sim = 1 - iou_dist + fuse_sim = reid_sim * (1 + iou_sim) / 2 + det_scores = np.array([det.score for det in detections]) + det_scores = np.expand_dims(det_scores, axis=0).repeat(cost_matrix.shape[0], axis=0) + #fuse_sim = fuse_sim * (1 + det_scores) / 2 + fuse_cost = 1 - fuse_sim + return fuse_cost + + +def fuse_iou_add(cost_matrix, tracks, detections, weight=0.5): + if cost_matrix.size == 0: + return cost_matrix + iou_dist = iou_distance(tracks, detections) + fuse_dist = weight * iou_dist + (1 - weight) * cost_matrix + return fuse_dist \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/tutorials/transtrack/save_track.py b/tracking/docker-build-context/byte_track/tutorials/transtrack/save_track.py new file mode 100644 index 0000000000000000000000000000000000000000..7a0517c8620d2868b056b7b84c3e5c41713d06f3 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/transtrack/save_track.py @@ -0,0 +1,52 @@ +""" +Copyright (c) https://github.com/xingyizhou/CenterTrack +Modified by Peize Sun, Rufeng Zhang +""" +# coding: utf-8 +import os +import json +import logging +from collections import defaultdict + + +def save_track(results, out_root, video_to_images, video_names, data_split='val'): + assert out_root is not None + out_dir = os.path.join(out_root, data_split) + if not os.path.exists(out_dir): + os.mkdir(out_dir) + + # save json. + # json_path = os.path.join(out_dir, "track_results.json") + # with open(json_path, "w") as f: + # f.write(json.dumps(results)) + # f.flush() + + # save it in standard format. + track_dir = os.path.join(out_dir, "tracks") + if not os.path.exists(track_dir): + os.mkdir(track_dir) + for video_id in video_to_images.keys(): + video_infos = video_to_images[video_id] + video_name = video_names[video_id] + file_path = os.path.join(track_dir, "{}.txt".format(video_name)) + f = open(file_path, "w") + tracks = defaultdict(list) + for video_info in video_infos: + image_id, frame_id = video_info["image_id"], video_info["frame_id"] + result = results[image_id] + for item in result: + if not ("tracking_id" in item): + raise NotImplementedError + tracking_id = item["tracking_id"] + bbox = item["bbox"] + bbox = [bbox[0], bbox[1], bbox[2], bbox[3], item['score'], item['active']] + tracks[tracking_id].append([frame_id] + bbox) + + rename_track_id = 0 + for track_id in sorted(tracks): + rename_track_id += 1 + for t in tracks[track_id]: + if t[6] > 0: + f.write("{},{},{:.2f},{:.2f},{:.2f},{:.2f},-1,-1,-1,-1\n".format( + t[0], rename_track_id, t[1], t[2], t[3] - t[1], t[4] - t[2])) + f.close() diff --git a/tracking/docker-build-context/byte_track/tutorials/transtrack/tracker.py b/tracking/docker-build-context/byte_track/tutorials/transtrack/tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..3e1f300bb35dc3b20f59dc912a35db3d8a07fd40 --- /dev/null +++ b/tracking/docker-build-context/byte_track/tutorials/transtrack/tracker.py @@ -0,0 +1,191 @@ +""" +Copyright (c) https://github.com/xingyizhou/CenterTrack +Modified by Peize Sun, Rufeng Zhang +""" +# coding: utf-8 +import torch +from scipy.optimize import linear_sum_assignment +from util import box_ops +import copy + +class Tracker(object): + def __init__(self, score_thresh, max_age=32): + self.score_thresh = score_thresh + self.low_thresh = 0.2 + self.high_thresh = score_thresh + 0.1 + self.max_age = max_age + self.id_count = 0 + self.tracks_dict = dict() + self.tracks = list() + self.unmatched_tracks = list() + self.reset_all() + + def reset_all(self): + self.id_count = 0 + self.tracks_dict = dict() + self.tracks = list() + self.unmatched_tracks = list() + + def init_track(self, results): + + scores = results["scores"] + classes = results["labels"] + bboxes = results["boxes"] # x1y1x2y2 + + ret = list() + ret_dict = dict() + for idx in range(scores.shape[0]): + if scores[idx] >= self.score_thresh: + self.id_count += 1 + obj = dict() + obj["score"] = float(scores[idx]) + obj["bbox"] = bboxes[idx, :].cpu().numpy().tolist() + obj["tracking_id"] = self.id_count + obj['active'] = 1 + obj['age'] = 1 + ret.append(obj) + ret_dict[idx] = obj + + self.tracks = ret + self.tracks_dict = ret_dict + return copy.deepcopy(ret) + + + def step(self, output_results): + scores = output_results["scores"] + bboxes = output_results["boxes"] # x1y1x2y2 + track_bboxes = output_results["track_boxes"] if "track_boxes" in output_results else None # x1y1x2y2 + + results = list() + results_dict = dict() + results_second = list() + + tracks = list() + + for idx in range(scores.shape[0]): + if idx in self.tracks_dict and track_bboxes is not None: + self.tracks_dict[idx]["bbox"] = track_bboxes[idx, :].cpu().numpy().tolist() + + if scores[idx] >= self.score_thresh: + obj = dict() + obj["score"] = float(scores[idx]) + obj["bbox"] = bboxes[idx, :].cpu().numpy().tolist() + results.append(obj) + results_dict[idx] = obj + elif scores[idx] >= self.low_thresh: + second_obj = dict() + second_obj["score"] = float(scores[idx]) + second_obj["bbox"] = bboxes[idx, :].cpu().numpy().tolist() + results_second.append(second_obj) + results_dict[idx] = second_obj + + tracks = [v for v in self.tracks_dict.values()] + self.unmatched_tracks + # for trackss in tracks: + # print(trackss.keys()) + N = len(results) + M = len(tracks) + + ret = list() + unmatched_tracks = [t for t in range(M)] + unmatched_dets = [d for d in range(N)] + + if N > 0 and M > 0: + det_box = torch.stack([torch.tensor(obj['bbox']) for obj in results], dim=0) # N x 4 + track_box = torch.stack([torch.tensor(obj['bbox']) for obj in tracks], dim=0) # M x 4 + cost_bbox = 1.0 - box_ops.generalized_box_iou(det_box, track_box) # N x M + + matched_indices = linear_sum_assignment(cost_bbox) + unmatched_dets = [d for d in range(N) if not (d in matched_indices[0])] + unmatched_tracks = [d for d in range(M) if not (d in matched_indices[1])] + + matches = [[],[]] + for (m0, m1) in zip(matched_indices[0], matched_indices[1]): + if cost_bbox[m0, m1] > 1.2: + unmatched_dets.append(m0) + unmatched_tracks.append(m1) + else: + matches[0].append(m0) + matches[1].append(m1) + + for (m0, m1) in zip(matches[0], matches[1]): + track = results[m0] + track['tracking_id'] = tracks[m1]['tracking_id'] + track['age'] = 1 + track['active'] = 1 + ret.append(track) + + # second association + N_second = len(results_second) + unmatched_tracks_obj = list() + for i in unmatched_tracks: + #print(tracks[i].keys()) + track = tracks[i] + if track['active'] == 1: + unmatched_tracks_obj.append(track) + M_second = len(unmatched_tracks_obj) + unmatched_tracks_second = [t for t in range(M_second)] + + if N_second > 0 and M_second > 0: + det_box_second = torch.stack([torch.tensor(obj['bbox']) for obj in results_second], dim=0) # N_second x 4 + track_box_second = torch.stack([torch.tensor(obj['bbox']) for obj in unmatched_tracks_obj], dim=0) # M_second x 4 + cost_bbox_second = 1.0 - box_ops.generalized_box_iou(det_box_second, track_box_second) # N_second x M_second + + matched_indices_second = linear_sum_assignment(cost_bbox_second) + unmatched_tracks_second = [d for d in range(M_second) if not (d in matched_indices_second[1])] + + matches_second = [[],[]] + for (m0, m1) in zip(matched_indices_second[0], matched_indices_second[1]): + if cost_bbox_second[m0, m1] > 0.8: + unmatched_tracks_second.append(m1) + else: + matches_second[0].append(m0) + matches_second[1].append(m1) + + for (m0, m1) in zip(matches_second[0], matches_second[1]): + track = results_second[m0] + track['tracking_id'] = unmatched_tracks_obj[m1]['tracking_id'] + track['age'] = 1 + track['active'] = 1 + ret.append(track) + + for i in unmatched_dets: + trackd = results[i] + if trackd["score"] >= self.high_thresh: + self.id_count += 1 + trackd['tracking_id'] = self.id_count + trackd['age'] = 1 + trackd['active'] = 1 + ret.append(trackd) + + # ------------------------------------------------------ # + ret_unmatched_tracks = [] + + for j in unmatched_tracks: + track = tracks[j] + if track['active'] == 0 and track['age'] < self.max_age: + track['age'] += 1 + track['active'] = 0 + ret.append(track) + ret_unmatched_tracks.append(track) + + for i in unmatched_tracks_second: + track = unmatched_tracks_obj[i] + if track['age'] < self.max_age: + track['age'] += 1 + track['active'] = 0 + ret.append(track) + ret_unmatched_tracks.append(track) + + # for i in unmatched_tracks: + # track = tracks[i] + # if track['age'] < self.max_age: + # track['age'] += 1 + # track['active'] = 0 + # ret.append(track) + # ret_unmatched_tracks.append(track) + #print(len(ret_unmatched_tracks)) + + self.tracks = ret + self.tracks_dict = {red_ind:red for red_ind, red in results_dict.items() if 'tracking_id' in red} + self.unmatched_tracks = ret_unmatched_tracks + return copy.deepcopy(ret) diff --git a/tracking/docker-build-context/byte_track/videos/palace.mp4 b/tracking/docker-build-context/byte_track/videos/palace.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..db93ce2b41d82e049d1b407a79bb34b3dce97c13 Binary files /dev/null and b/tracking/docker-build-context/byte_track/videos/palace.mp4 differ diff --git a/tracking/docker-build-context/byte_track/yolox/__init__.py b/tracking/docker-build-context/byte_track/yolox/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..1cbc411d419c55098e7d4e24ff0f21caaaf10a1f --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/__init__.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- + +from .utils import configure_module + +configure_module() + +__version__ = "0.1.0" diff --git a/tracking/docker-build-context/byte_track/yolox/core/__init__.py b/tracking/docker-build-context/byte_track/yolox/core/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c2379c704ec6320066cbb45a6b8dacca548662a0 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/core/__init__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) Megvii, Inc. and its affiliates. + +from .launch import launch +from .trainer import Trainer diff --git a/tracking/docker-build-context/byte_track/yolox/core/launch.py b/tracking/docker-build-context/byte_track/yolox/core/launch.py new file mode 100644 index 0000000000000000000000000000000000000000..2fd5eaa765d7da2193f16f0fc463d001f6c4d5c5 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/core/launch.py @@ -0,0 +1,219 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Code are based on +# https://github.com/facebookresearch/detectron2/blob/master/detectron2/engine/launch.py +# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) Megvii, Inc. and its affiliates. + +from loguru import logger + +import torch +import torch.distributed as dist +import torch.multiprocessing as mp + +import yolox.utils.dist as comm +from yolox.utils import configure_nccl + +import os +import subprocess +import sys +import time + +__all__ = ["launch"] + + +def _find_free_port(): + """ + Find an available port of current machine / node. + """ + import socket + + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + # Binding to port 0 will cause the OS to find an available port for us + sock.bind(("", 0)) + port = sock.getsockname()[1] + sock.close() + # NOTE: there is still a chance the port could be taken by other processes. + return port + + +def launch( + main_func, + num_gpus_per_machine, + num_machines=1, + machine_rank=0, + backend="nccl", + dist_url=None, + args=(), +): + """ + Args: + main_func: a function that will be called by `main_func(*args)` + num_machines (int): the total number of machines + machine_rank (int): the rank of this machine (one per machine) + dist_url (str): url to connect to for distributed training, including protocol + e.g. "tcp://127.0.0.1:8686". + Can be set to auto to automatically select a free port on localhost + args (tuple): arguments passed to main_func + """ + world_size = num_machines * num_gpus_per_machine + if world_size > 1: + if int(os.environ.get("WORLD_SIZE", "1")) > 1: + dist_url = "{}:{}".format( + os.environ.get("MASTER_ADDR", None), + os.environ.get("MASTER_PORT", "None"), + ) + local_rank = int(os.environ.get("LOCAL_RANK", "0")) + world_size = int(os.environ.get("WORLD_SIZE", "1")) + _distributed_worker( + local_rank, + main_func, + world_size, + num_gpus_per_machine, + num_machines, + machine_rank, + backend, + dist_url, + args, + ) + exit() + launch_by_subprocess( + sys.argv, + world_size, + num_machines, + machine_rank, + num_gpus_per_machine, + dist_url, + args, + ) + else: + main_func(*args) + + +def launch_by_subprocess( + raw_argv, + world_size, + num_machines, + machine_rank, + num_gpus_per_machine, + dist_url, + args, +): + assert ( + world_size > 1 + ), "subprocess mode doesn't support single GPU, use spawn mode instead" + + if dist_url is None: + # ------------------------hack for multi-machine training -------------------- # + if num_machines > 1: + master_ip = subprocess.check_output(["hostname", "--fqdn"]).decode("utf-8") + master_ip = str(master_ip).strip() + dist_url = "tcp://{}".format(master_ip) + ip_add_file = "./" + args[1].experiment_name + "_ip_add.txt" + if machine_rank == 0: + port = _find_free_port() + with open(ip_add_file, "w") as ip_add: + ip_add.write(dist_url+'\n') + ip_add.write(str(port)) + else: + while not os.path.exists(ip_add_file): + time.sleep(0.5) + + with open(ip_add_file, "r") as ip_add: + dist_url = ip_add.readline().strip() + port = ip_add.readline() + else: + dist_url = "tcp://127.0.0.1" + port = _find_free_port() + + # set PyTorch distributed related environmental variables + current_env = os.environ.copy() + current_env["MASTER_ADDR"] = dist_url + current_env["MASTER_PORT"] = str(port) + current_env["WORLD_SIZE"] = str(world_size) + assert num_gpus_per_machine <= torch.cuda.device_count() + + if "OMP_NUM_THREADS" not in os.environ and num_gpus_per_machine > 1: + current_env["OMP_NUM_THREADS"] = str(1) + logger.info( + "\n*****************************************\n" + "Setting OMP_NUM_THREADS environment variable for each process " + "to be {} in default, to avoid your system being overloaded, " + "please further tune the variable for optimal performance in " + "your application as needed. \n" + "*****************************************".format( + current_env["OMP_NUM_THREADS"] + ) + ) + + processes = [] + for local_rank in range(0, num_gpus_per_machine): + # each process's rank + dist_rank = machine_rank * num_gpus_per_machine + local_rank + current_env["RANK"] = str(dist_rank) + current_env["LOCAL_RANK"] = str(local_rank) + + # spawn the processes + cmd = ["python3", *raw_argv] + + process = subprocess.Popen(cmd, env=current_env) + processes.append(process) + + for process in processes: + process.wait() + if process.returncode != 0: + raise subprocess.CalledProcessError(returncode=process.returncode, cmd=cmd) + + +def _distributed_worker( + local_rank, + main_func, + world_size, + num_gpus_per_machine, + num_machines, + machine_rank, + backend, + dist_url, + args, +): + assert ( + torch.cuda.is_available() + ), "cuda is not available. Please check your installation." + configure_nccl() + global_rank = machine_rank * num_gpus_per_machine + local_rank + logger.info("Rank {} initialization finished.".format(global_rank)) + try: + dist.init_process_group( + backend=backend, + init_method=dist_url, + world_size=world_size, + rank=global_rank, + ) + except Exception: + logger.error("Process group URL: {}".format(dist_url)) + raise + # synchronize is needed here to prevent a possible timeout after calling init_process_group + # See: https://github.com/facebookresearch/maskrcnn-benchmark/issues/172 + comm.synchronize() + + if global_rank == 0 and os.path.exists( + "./" + args[1].experiment_name + "_ip_add.txt" + ): + os.remove("./" + args[1].experiment_name + "_ip_add.txt") + + assert num_gpus_per_machine <= torch.cuda.device_count() + torch.cuda.set_device(local_rank) + + args[1].local_rank = local_rank + args[1].num_machines = num_machines + + # Setup the local process group (which contains ranks within the same machine) + # assert comm._LOCAL_PROCESS_GROUP is None + # num_machines = world_size // num_gpus_per_machine + # for i in range(num_machines): + # ranks_on_i = list(range(i * num_gpus_per_machine, (i + 1) * num_gpus_per_machine)) + # pg = dist.new_group(ranks_on_i) + # if i == machine_rank: + # comm._LOCAL_PROCESS_GROUP = pg + + main_func(*args) diff --git a/tracking/docker-build-context/byte_track/yolox/core/trainer.py b/tracking/docker-build-context/byte_track/yolox/core/trainer.py new file mode 100644 index 0000000000000000000000000000000000000000..c6a85e6c543bc7b4af5847f78c3194f17a873ce4 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/core/trainer.py @@ -0,0 +1,327 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) Megvii, Inc. and its affiliates. + +from loguru import logger + +import torch + +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.utils.tensorboard import SummaryWriter + +from yolox.data import DataPrefetcher +from yolox.utils import ( + MeterBuffer, + ModelEMA, + all_reduce_norm, + get_model_info, + get_rank, + get_world_size, + gpu_mem_usage, + load_ckpt, + occupy_mem, + save_checkpoint, + setup_logger, + synchronize +) + +import datetime +import os +import time + + +class Trainer: + def __init__(self, exp, args): + # init function only defines some basic attr, other attrs like model, optimizer are built in + # before_train methods. + self.exp = exp + self.args = args + + # training related attr + self.max_epoch = exp.max_epoch + self.amp_training = args.fp16 + self.scaler = torch.cuda.amp.GradScaler(enabled=args.fp16) + self.is_distributed = get_world_size() > 1 + self.rank = get_rank() + self.local_rank = args.local_rank + self.device = "cuda:{}".format(self.local_rank) + self.use_model_ema = exp.ema + + # data/dataloader related attr + self.data_type = torch.float16 if args.fp16 else torch.float32 + self.input_size = exp.input_size + self.best_ap = 0 + + # metric record + self.meter = MeterBuffer(window_size=exp.print_interval) + self.file_name = os.path.join(exp.output_dir, args.experiment_name) + + if self.rank == 0: + os.makedirs(self.file_name, exist_ok=True) + + setup_logger( + self.file_name, + distributed_rank=self.rank, + filename="train_log.txt", + mode="a", + ) + + def train(self): + self.before_train() + try: + self.train_in_epoch() + except Exception: + raise + finally: + self.after_train() + + def train_in_epoch(self): + for self.epoch in range(self.start_epoch, self.max_epoch): + self.before_epoch() + self.train_in_iter() + self.after_epoch() + + def train_in_iter(self): + for self.iter in range(self.max_iter): + self.before_iter() + self.train_one_iter() + self.after_iter() + + def train_one_iter(self): + iter_start_time = time.time() + + inps, targets = self.prefetcher.next() + inps = inps.to(self.data_type) + targets = targets.to(self.data_type) + targets.requires_grad = False + data_end_time = time.time() + + with torch.cuda.amp.autocast(enabled=self.amp_training): + outputs = self.model(inps, targets) + loss = outputs["total_loss"] + + self.optimizer.zero_grad() + self.scaler.scale(loss).backward() + self.scaler.step(self.optimizer) + self.scaler.update() + + if self.use_model_ema: + self.ema_model.update(self.model) + + lr = self.lr_scheduler.update_lr(self.progress_in_iter + 1) + for param_group in self.optimizer.param_groups: + param_group["lr"] = lr + + iter_end_time = time.time() + self.meter.update( + iter_time=iter_end_time - iter_start_time, + data_time=data_end_time - iter_start_time, + lr=lr, + **outputs, + ) + + def before_train(self): + logger.info("args: {}".format(self.args)) + logger.info("exp value:\n{}".format(self.exp)) + + # model related init + torch.cuda.set_device(self.local_rank) + model = self.exp.get_model() + logger.info( + "Model Summary: {}".format(get_model_info(model, self.exp.test_size)) + ) + model.to(self.device) + + # solver related init + self.optimizer = self.exp.get_optimizer(self.args.batch_size) + + # value of epoch will be set in `resume_train` + model = self.resume_train(model) + + # data related init + self.no_aug = self.start_epoch >= self.max_epoch - self.exp.no_aug_epochs + self.train_loader = self.exp.get_data_loader( + batch_size=self.args.batch_size, + is_distributed=self.is_distributed, + no_aug=self.no_aug, + ) + logger.info("init prefetcher, this might take one minute or less...") + self.prefetcher = DataPrefetcher(self.train_loader) + # max_iter means iters per epoch + self.max_iter = len(self.train_loader) + + self.lr_scheduler = self.exp.get_lr_scheduler( + self.exp.basic_lr_per_img * self.args.batch_size, self.max_iter + ) + if self.args.occupy: + occupy_mem(self.local_rank) + + if self.is_distributed: + model = DDP(model, device_ids=[self.local_rank], broadcast_buffers=False) + + if self.use_model_ema: + self.ema_model = ModelEMA(model, 0.9998) + self.ema_model.updates = self.max_iter * self.start_epoch + + self.model = model + self.model.train() + + self.evaluator = self.exp.gget_evaluator( + batch_size=self.args.batch_size, is_distributed=self.is_distributed + ) + # Tensorboard logger + if self.rank == 0: + self.tblogger = SummaryWriter(self.file_name) + + logger.info("Training start...") + #logger.info("\n{}".format(model)) + + def after_train(self): + logger.info( + "Training of experiment is done and the best AP is {:.2f}".format( + self.best_ap * 100 + ) + ) + + def before_epoch(self): + logger.info("---> start train epoch{}".format(self.epoch + 1)) + + if self.epoch + 1 == self.max_epoch - self.exp.no_aug_epochs or self.no_aug: + + logger.info("--->No mosaic aug now!") + self.train_loader.close_mosaic() + logger.info("--->Add additional L1 loss now!") + if self.is_distributed: + self.model.module.head.use_l1 = True + else: + self.model.head.use_l1 = True + + self.exp.eval_interval = 1 + if not self.no_aug: + self.save_ckpt(ckpt_name="last_mosaic_epoch") + + def after_epoch(self): + if self.use_model_ema: + self.ema_model.update_attr(self.model) + + self.save_ckpt(ckpt_name="latest") + + if (self.epoch + 1) % self.exp.eval_interval == 0: + all_reduce_norm(self.model) + self.evaluate_and_save_model() + + def before_iter(self): + pass + + def after_iter(self): + """ + `after_iter` contains two parts of logic: + * log information + * reset setting of resize + """ + # log needed information + if (self.iter + 1) % self.exp.print_interval == 0: + # TODO check ETA logic + left_iters = self.max_iter * self.max_epoch - (self.progress_in_iter + 1) + eta_seconds = self.meter["iter_time"].global_avg * left_iters + eta_str = "ETA: {}".format(datetime.timedelta(seconds=int(eta_seconds))) + + progress_str = "epoch: {}/{}, iter: {}/{}".format( + self.epoch + 1, self.max_epoch, self.iter + 1, self.max_iter + ) + loss_meter = self.meter.get_filtered_meter("loss") + loss_str = ", ".join( + ["{}: {:.3f}".format(k, v.latest) for k, v in loss_meter.items()] + ) + + time_meter = self.meter.get_filtered_meter("time") + time_str = ", ".join( + ["{}: {:.3f}s".format(k, v.avg) for k, v in time_meter.items()] + ) + + logger.info( + "{}, mem: {:.0f}Mb, {}, {}, lr: {:.3e}".format( + progress_str, + gpu_mem_usage(), + time_str, + loss_str, + self.meter["lr"].latest, + ) + + (", size: {:d}, {}".format(self.input_size[0], eta_str)) + ) + self.meter.clear_meters() + + # random resizing + if self.exp.random_size is not None and (self.progress_in_iter + 1) % 10 == 0: + self.input_size = self.exp.random_resize( + self.train_loader, self.epoch, self.rank, self.is_distributed + ) + + @property + def progress_in_iter(self): + return self.epoch * self.max_iter + self.iter + + def resume_train(self, model): + if self.args.resume: + logger.info("resume training") + if self.args.ckpt is None: + ckpt_file = os.path.join(self.file_name, "latest" + "_ckpt.pth.tar") + else: + ckpt_file = self.args.ckpt + + ckpt = torch.load(ckpt_file, map_location=self.device) + # resume the model/optimizer state dict + model.load_state_dict(ckpt["model"]) + self.optimizer.load_state_dict(ckpt["optimizer"]) + start_epoch = ( + self.args.start_epoch - 1 + if self.args.start_epoch is not None + else ckpt["start_epoch"] + ) + self.start_epoch = start_epoch + logger.info( + "loaded checkpoint '{}' (epoch {})".format( + self.args.resume, self.start_epoch + ) + ) # noqa + else: + if self.args.ckpt is not None: + logger.info("loading checkpoint for fine tuning") + ckpt_file = self.args.ckpt + ckpt = torch.load(ckpt_file, map_location=self.device)["model"] + model = load_ckpt(model, ckpt) + self.start_epoch = 0 + + return model + + def evaluate_and_save_model(self): + evalmodel = self.ema_model.ema if self.use_model_ema else self.model + ap50_95, ap50, summary = self.exp.eval( + evalmodel, self.evaluator, self.is_distributed + ) + self.model.train() + if self.rank == 0: + self.tblogger.add_scalar("val/COCOAP50", ap50, self.epoch + 1) + self.tblogger.add_scalar("val/COCOAP50_95", ap50_95, self.epoch + 1) + logger.info("\n" + summary) + synchronize() + + #self.best_ap = max(self.best_ap, ap50_95) + self.save_ckpt("last_epoch", ap50 > self.best_ap) + self.best_ap = max(self.best_ap, ap50) + + def save_ckpt(self, ckpt_name, update_best_ckpt=False): + if self.rank == 0: + save_model = self.ema_model.ema if self.use_model_ema else self.model + logger.info("Save weights to {}".format(self.file_name)) + ckpt_state = { + "start_epoch": self.epoch + 1, + "model": save_model.state_dict(), + "optimizer": self.optimizer.state_dict(), + } + save_checkpoint( + ckpt_state, + update_best_ckpt, + self.file_name, + ckpt_name, + ) diff --git a/tracking/docker-build-context/byte_track/yolox/data/__init__.py b/tracking/docker-build-context/byte_track/yolox/data/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..258effe73ee63b9d4296ea1180379ec69c1cf550 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/data/__init__.py @@ -0,0 +1,9 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) Megvii, Inc. and its affiliates. + +from .data_augment import TrainTransform, ValTransform +from .data_prefetcher import DataPrefetcher +from .dataloading import DataLoader, get_yolox_datadir +from .datasets import * +from .samplers import InfiniteSampler, YoloBatchSampler diff --git a/tracking/docker-build-context/byte_track/yolox/data/data_augment.py b/tracking/docker-build-context/byte_track/yolox/data/data_augment.py new file mode 100644 index 0000000000000000000000000000000000000000..99fb30a284eeb5851e4c776aafd61b44d485196b --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/data/data_augment.py @@ -0,0 +1,299 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) Megvii, Inc. and its affiliates. +""" +Data augmentation functionality. Passed as callable transformations to +Dataset classes. + +The data augmentation procedures were interpreted from @weiliu89's SSD paper +http://arxiv.org/abs/1512.02325 +""" + +import cv2 +import numpy as np + +import torch + +from yolox.utils import xyxy2cxcywh + +import math +import random + + +def augment_hsv(img, hgain=0.015, sgain=0.7, vgain=0.4): + r = np.random.uniform(-1, 1, 3) * [hgain, sgain, vgain] + 1 # random gains + hue, sat, val = cv2.split(cv2.cvtColor(img, cv2.COLOR_BGR2HSV)) + dtype = img.dtype # uint8 + + x = np.arange(0, 256, dtype=np.int16) + lut_hue = ((x * r[0]) % 180).astype(dtype) + lut_sat = np.clip(x * r[1], 0, 255).astype(dtype) + lut_val = np.clip(x * r[2], 0, 255).astype(dtype) + + img_hsv = cv2.merge( + (cv2.LUT(hue, lut_hue), cv2.LUT(sat, lut_sat), cv2.LUT(val, lut_val)) + ).astype(dtype) + cv2.cvtColor(img_hsv, cv2.COLOR_HSV2BGR, dst=img) # no return needed + + +def box_candidates(box1, box2, wh_thr=2, ar_thr=20, area_thr=0.2): + # box1(4,n), box2(4,n) + # Compute candidate boxes which include follwing 5 things: + # box1 before augment, box2 after augment, wh_thr (pixels), aspect_ratio_thr, area_ratio + w1, h1 = box1[2] - box1[0], box1[3] - box1[1] + w2, h2 = box2[2] - box2[0], box2[3] - box2[1] + ar = np.maximum(w2 / (h2 + 1e-16), h2 / (w2 + 1e-16)) # aspect ratio + return ( + (w2 > wh_thr) + & (h2 > wh_thr) + & (w2 * h2 / (w1 * h1 + 1e-16) > area_thr) + & (ar < ar_thr) + ) # candidates + + +def random_perspective( + img, + targets=(), + degrees=10, + translate=0.1, + scale=0.1, + shear=10, + perspective=0.0, + border=(0, 0), +): + # targets = [cls, xyxy] + height = img.shape[0] + border[0] * 2 # shape(h,w,c) + width = img.shape[1] + border[1] * 2 + + # Center + C = np.eye(3) + C[0, 2] = -img.shape[1] / 2 # x translation (pixels) + C[1, 2] = -img.shape[0] / 2 # y translation (pixels) + + # Rotation and Scale + R = np.eye(3) + a = random.uniform(-degrees, degrees) + # a += random.choice([-180, -90, 0, 90]) # add 90deg rotations to small rotations + s = random.uniform(scale[0], scale[1]) + # s = 2 ** random.uniform(-scale, scale) + R[:2] = cv2.getRotationMatrix2D(angle=a, center=(0, 0), scale=s) + + # Shear + S = np.eye(3) + S[0, 1] = math.tan(random.uniform(-shear, shear) * math.pi / 180) # x shear (deg) + S[1, 0] = math.tan(random.uniform(-shear, shear) * math.pi / 180) # y shear (deg) + + # Translation + T = np.eye(3) + T[0, 2] = ( + random.uniform(0.5 - translate, 0.5 + translate) * width + ) # x translation (pixels) + T[1, 2] = ( + random.uniform(0.5 - translate, 0.5 + translate) * height + ) # y translation (pixels) + + # Combined rotation matrix + M = T @ S @ R @ C # order of operations (right to left) is IMPORTANT + + ########################### + # For Aug out of Mosaic + # s = 1. + # M = np.eye(3) + ########################### + + if (border[0] != 0) or (border[1] != 0) or (M != np.eye(3)).any(): # image changed + if perspective: + img = cv2.warpPerspective( + img, M, dsize=(width, height), borderValue=(114, 114, 114) + ) + else: # affine + img = cv2.warpAffine( + img, M[:2], dsize=(width, height), borderValue=(114, 114, 114) + ) + + # Transform label coordinates + n = len(targets) + if n: + # warp points + xy = np.ones((n * 4, 3)) + xy[:, :2] = targets[:, [0, 1, 2, 3, 0, 3, 2, 1]].reshape( + n * 4, 2 + ) # x1y1, x2y2, x1y2, x2y1 + xy = xy @ M.T # transform + if perspective: + xy = (xy[:, :2] / xy[:, 2:3]).reshape(n, 8) # rescale + else: # affine + xy = xy[:, :2].reshape(n, 8) + + # create new boxes + x = xy[:, [0, 2, 4, 6]] + y = xy[:, [1, 3, 5, 7]] + xy = np.concatenate((x.min(1), y.min(1), x.max(1), y.max(1))).reshape(4, n).T + + # clip boxes + #xy[:, [0, 2]] = xy[:, [0, 2]].clip(0, width) + #xy[:, [1, 3]] = xy[:, [1, 3]].clip(0, height) + + # filter candidates + i = box_candidates(box1=targets[:, :4].T * s, box2=xy.T) + targets = targets[i] + targets[:, :4] = xy[i] + + targets = targets[targets[:, 0] < width] + targets = targets[targets[:, 2] > 0] + targets = targets[targets[:, 1] < height] + targets = targets[targets[:, 3] > 0] + + return img, targets + + +def _distort(image): + def _convert(image, alpha=1, beta=0): + tmp = image.astype(float) * alpha + beta + tmp[tmp < 0] = 0 + tmp[tmp > 255] = 255 + image[:] = tmp + + image = image.copy() + + if random.randrange(2): + _convert(image, beta=random.uniform(-32, 32)) + + if random.randrange(2): + _convert(image, alpha=random.uniform(0.5, 1.5)) + + image = cv2.cvtColor(image, cv2.COLOR_BGR2HSV) + + if random.randrange(2): + tmp = image[:, :, 0].astype(int) + random.randint(-18, 18) + tmp %= 180 + image[:, :, 0] = tmp + + if random.randrange(2): + _convert(image[:, :, 1], alpha=random.uniform(0.5, 1.5)) + + image = cv2.cvtColor(image, cv2.COLOR_HSV2BGR) + + return image + + +def _mirror(image, boxes): + _, width, _ = image.shape + if random.randrange(2): + image = image[:, ::-1] + boxes = boxes.copy() + boxes[:, 0::2] = width - boxes[:, 2::-2] + return image, boxes + + +def preproc(image, input_size, mean, std, swap=(2, 0, 1)): + if len(image.shape) == 3: + padded_img = np.ones((input_size[0], input_size[1], 3)) * 114.0 + else: + padded_img = np.ones(input_size) * 114.0 + img = np.array(image) + r = min(input_size[0] / img.shape[0], input_size[1] / img.shape[1]) + resized_img = cv2.resize( + img, + (int(img.shape[1] * r), int(img.shape[0] * r)), + interpolation=cv2.INTER_LINEAR, + ).astype(np.float32) + padded_img[: int(img.shape[0] * r), : int(img.shape[1] * r)] = resized_img + + padded_img = padded_img[:, :, ::-1] + padded_img /= 255.0 + if mean is not None: + padded_img -= mean + if std is not None: + padded_img /= std + padded_img = padded_img.transpose(swap) + padded_img = np.ascontiguousarray(padded_img, dtype=np.float32) + return padded_img, r + + +class TrainTransform: + def __init__(self, p=0.5, rgb_means=None, std=None, max_labels=100): + self.means = rgb_means + self.std = std + self.p = p + self.max_labels = max_labels + + def __call__(self, image, targets, input_dim): + boxes = targets[:, :4].copy() + labels = targets[:, 4].copy() + ids = targets[:, 5].copy() + if len(boxes) == 0: + targets = np.zeros((self.max_labels, 6), dtype=np.float32) + image, r_o = preproc(image, input_dim, self.means, self.std) + image = np.ascontiguousarray(image, dtype=np.float32) + return image, targets + + image_o = image.copy() + targets_o = targets.copy() + height_o, width_o, _ = image_o.shape + boxes_o = targets_o[:, :4] + labels_o = targets_o[:, 4] + ids_o = targets_o[:, 5] + # bbox_o: [xyxy] to [c_x,c_y,w,h] + boxes_o = xyxy2cxcywh(boxes_o) + + image_t = _distort(image) + image_t, boxes = _mirror(image_t, boxes) + height, width, _ = image_t.shape + image_t, r_ = preproc(image_t, input_dim, self.means, self.std) + # boxes [xyxy] 2 [cx,cy,w,h] + boxes = xyxy2cxcywh(boxes) + boxes *= r_ + + mask_b = np.minimum(boxes[:, 2], boxes[:, 3]) > 1 + boxes_t = boxes[mask_b] + labels_t = labels[mask_b] + ids_t = ids[mask_b] + + if len(boxes_t) == 0: + image_t, r_o = preproc(image_o, input_dim, self.means, self.std) + boxes_o *= r_o + boxes_t = boxes_o + labels_t = labels_o + ids_t = ids_o + + labels_t = np.expand_dims(labels_t, 1) + ids_t = np.expand_dims(ids_t, 1) + + targets_t = np.hstack((labels_t, boxes_t, ids_t)) + padded_labels = np.zeros((self.max_labels, 6)) + padded_labels[range(len(targets_t))[: self.max_labels]] = targets_t[ + : self.max_labels + ] + padded_labels = np.ascontiguousarray(padded_labels, dtype=np.float32) + image_t = np.ascontiguousarray(image_t, dtype=np.float32) + return image_t, padded_labels + + +class ValTransform: + """ + Defines the transformations that should be applied to test PIL image + for input into the network + + dimension -> tensorize -> color adj + + Arguments: + resize (int): input dimension to SSD + rgb_means ((int,int,int)): average RGB of the dataset + (104,117,123) + swap ((int,int,int)): final order of channels + + Returns: + transform (transform) : callable transform to be applied to test/val + data + """ + + def __init__(self, rgb_means=None, std=None, swap=(2, 0, 1)): + self.means = rgb_means + self.swap = swap + self.std = std + + # assume input is cv2 img for now + def __call__(self, img, res, input_size): + img, _ = preproc(img, input_size, self.means, self.std, self.swap) + return img, np.zeros((1, 5)) diff --git a/tracking/docker-build-context/byte_track/yolox/data/data_prefetcher.py b/tracking/docker-build-context/byte_track/yolox/data/data_prefetcher.py new file mode 100644 index 0000000000000000000000000000000000000000..0f5d2b5eeec2b552f381239a16117a5c98255041 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/data/data_prefetcher.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) Megvii, Inc. and its affiliates. + +import torch +import torch.distributed as dist + +from yolox.utils import synchronize + +import random + + +class DataPrefetcher: + """ + DataPrefetcher is inspired by code of following file: + https://github.com/NVIDIA/apex/blob/master/examples/imagenet/main_amp.py + It could speedup your pytorch dataloader. For more information, please check + https://github.com/NVIDIA/apex/issues/304#issuecomment-493562789. + """ + + def __init__(self, loader): + self.loader = iter(loader) + self.stream = torch.cuda.Stream() + self.input_cuda = self._input_cuda_for_image + self.record_stream = DataPrefetcher._record_stream_for_image + self.preload() + + def preload(self): + try: + self.next_input, self.next_target, _, _ = next(self.loader) + except StopIteration: + self.next_input = None + self.next_target = None + return + + with torch.cuda.stream(self.stream): + self.input_cuda() + self.next_target = self.next_target.cuda(non_blocking=True) + + def next(self): + torch.cuda.current_stream().wait_stream(self.stream) + input = self.next_input + target = self.next_target + if input is not None: + self.record_stream(input) + if target is not None: + target.record_stream(torch.cuda.current_stream()) + self.preload() + return input, target + + def _input_cuda_for_image(self): + self.next_input = self.next_input.cuda(non_blocking=True) + + @staticmethod + def _record_stream_for_image(input): + input.record_stream(torch.cuda.current_stream()) + + +def random_resize(data_loader, exp, epoch, rank, is_distributed): + tensor = torch.LongTensor(1).cuda() + if is_distributed: + synchronize() + + if rank == 0: + if epoch > exp.max_epoch - 10: + size = exp.input_size + else: + size = random.randint(*exp.random_size) + size = int(32 * size) + tensor.fill_(size) + + if is_distributed: + synchronize() + dist.broadcast(tensor, 0) + + input_size = data_loader.change_input_dim(multiple=tensor.item(), random_range=None) + return input_size diff --git a/tracking/docker-build-context/byte_track/yolox/data/dataloading.py b/tracking/docker-build-context/byte_track/yolox/data/dataloading.py new file mode 100644 index 0000000000000000000000000000000000000000..2756f2fc8b77ea6d02f2b0dd03ff99de08efba95 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/data/dataloading.py @@ -0,0 +1,178 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) Megvii, Inc. and its affiliates. + +import torch +from torch.utils.data.dataloader import DataLoader as torchDataLoader +from torch.utils.data.dataloader import default_collate + +import os +import random + +from .samplers import YoloBatchSampler + + +def get_yolox_datadir(): + """ + get dataset dir of YOLOX. If environment variable named `YOLOX_DATADIR` is set, + this function will return value of the environment variable. Otherwise, use data + """ + yolox_datadir = os.getenv("YOLOX_DATADIR", None) + if yolox_datadir is None: + import yolox + + yolox_path = os.path.dirname(os.path.dirname(yolox.__file__)) + yolox_datadir = os.path.join(yolox_path, "datasets") + return yolox_datadir + + +class DataLoader(torchDataLoader): + """ + Lightnet dataloader that enables on the fly resizing of the images. + See :class:`torch.utils.data.DataLoader` for more information on the arguments. + Check more on the following website: + https://gitlab.com/EAVISE/lightnet/-/blob/master/lightnet/data/_dataloading.py + + Note: + This dataloader only works with :class:`lightnet.data.Dataset` based datasets. + + Example: + >>> class CustomSet(ln.data.Dataset): + ... def __len__(self): + ... return 4 + ... @ln.data.Dataset.resize_getitem + ... def __getitem__(self, index): + ... # Should return (image, anno) but here we return (input_dim,) + ... return (self.input_dim,) + >>> dl = ln.data.DataLoader( + ... CustomSet((200,200)), + ... batch_size = 2, + ... collate_fn = ln.data.list_collate # We want the data to be grouped as a list + ... ) + >>> dl.dataset.input_dim # Default input_dim + (200, 200) + >>> for d in dl: + ... d + [[(200, 200), (200, 200)]] + [[(200, 200), (200, 200)]] + >>> dl.change_input_dim(320, random_range=None) + (320, 320) + >>> for d in dl: + ... d + [[(320, 320), (320, 320)]] + [[(320, 320), (320, 320)]] + >>> dl.change_input_dim((480, 320), random_range=None) + (480, 320) + >>> for d in dl: + ... d + [[(480, 320), (480, 320)]] + [[(480, 320), (480, 320)]] + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.__initialized = False + shuffle = False + batch_sampler = None + if len(args) > 5: + shuffle = args[2] + sampler = args[3] + batch_sampler = args[4] + elif len(args) > 4: + shuffle = args[2] + sampler = args[3] + if "batch_sampler" in kwargs: + batch_sampler = kwargs["batch_sampler"] + elif len(args) > 3: + shuffle = args[2] + if "sampler" in kwargs: + sampler = kwargs["sampler"] + if "batch_sampler" in kwargs: + batch_sampler = kwargs["batch_sampler"] + else: + if "shuffle" in kwargs: + shuffle = kwargs["shuffle"] + if "sampler" in kwargs: + sampler = kwargs["sampler"] + if "batch_sampler" in kwargs: + batch_sampler = kwargs["batch_sampler"] + + # Use custom BatchSampler + if batch_sampler is None: + if sampler is None: + if shuffle: + sampler = torch.utils.data.sampler.RandomSampler(self.dataset) + # sampler = torch.utils.data.DistributedSampler(self.dataset) + else: + sampler = torch.utils.data.sampler.SequentialSampler(self.dataset) + batch_sampler = YoloBatchSampler( + sampler, + self.batch_size, + self.drop_last, + input_dimension=self.dataset.input_dim, + ) + # batch_sampler = IterationBasedBatchSampler(batch_sampler, num_iterations = + + self.batch_sampler = batch_sampler + + self.__initialized = True + + def close_mosaic(self): + self.batch_sampler.mosaic = False + + def change_input_dim(self, multiple=32, random_range=(10, 19)): + """This function will compute a new size and update it on the next mini_batch. + + Args: + multiple (int or tuple, optional): values to multiply the randomly generated range by. + Default **32** + random_range (tuple, optional): This (min, max) tuple sets the range + for the randomisation; Default **(10, 19)** + + Return: + tuple: width, height tuple with new dimension + + Note: + The new size is generated as follows: |br| + First we compute a random integer inside ``[random_range]``. + We then multiply that number with the ``multiple`` argument, + which gives our final new input size. |br| + If ``multiple`` is an integer we generate a square size. If you give a tuple + of **(width, height)**, the size is computed + as :math:`rng * multiple[0], rng * multiple[1]`. + + Note: + You can set the ``random_range`` argument to **None** to set + an exact size of multiply. |br| + See the example above for how this works. + """ + if random_range is None: + size = 1 + else: + size = random.randint(*random_range) + + if isinstance(multiple, int): + size = (size * multiple, size * multiple) + else: + size = (size * multiple[0], size * multiple[1]) + + self.batch_sampler.new_input_dim = size + + return size + + +def list_collate(batch): + """ + Function that collates lists or tuples together into one list (of lists/tuples). + Use this as the collate function in a Dataloader, if you want to have a list of + items as an output, as opposed to tensors (eg. Brambox.boxes). + """ + items = list(zip(*batch)) + + for i in range(len(items)): + if isinstance(items[i][0], (list, tuple)): + items[i] = list(items[i]) + else: + items[i] = default_collate(items[i]) + + return items diff --git a/tracking/docker-build-context/byte_track/yolox/data/datasets/__init__.py b/tracking/docker-build-context/byte_track/yolox/data/datasets/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..61065a88874f8da6a92542801114ca9a5afe8eac --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/data/datasets/__init__.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) Megvii, Inc. and its affiliates. + +from .datasets_wrapper import ConcatDataset, Dataset, MixConcatDataset +from .mosaicdetection import MosaicDetection +from .mot import MOTDataset diff --git a/tracking/docker-build-context/byte_track/yolox/data/datasets/datasets_wrapper.py b/tracking/docker-build-context/byte_track/yolox/data/datasets/datasets_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..a262e6a6755e7fa69132d3f59fbe20b1bb17a6cf --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/data/datasets/datasets_wrapper.py @@ -0,0 +1,128 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) Megvii, Inc. and its affiliates. + +from torch.utils.data.dataset import ConcatDataset as torchConcatDataset +from torch.utils.data.dataset import Dataset as torchDataset + +import bisect +from functools import wraps + + +class ConcatDataset(torchConcatDataset): + def __init__(self, datasets): + super(ConcatDataset, self).__init__(datasets) + if hasattr(self.datasets[0], "input_dim"): + self._input_dim = self.datasets[0].input_dim + self.input_dim = self.datasets[0].input_dim + + def pull_item(self, idx): + if idx < 0: + if -idx > len(self): + raise ValueError( + "absolute value of index should not exceed dataset length" + ) + idx = len(self) + idx + dataset_idx = bisect.bisect_right(self.cumulative_sizes, idx) + if dataset_idx == 0: + sample_idx = idx + else: + sample_idx = idx - self.cumulative_sizes[dataset_idx - 1] + return self.datasets[dataset_idx].pull_item(sample_idx) + + +class MixConcatDataset(torchConcatDataset): + def __init__(self, datasets): + super(MixConcatDataset, self).__init__(datasets) + if hasattr(self.datasets[0], "input_dim"): + self._input_dim = self.datasets[0].input_dim + self.input_dim = self.datasets[0].input_dim + + def __getitem__(self, index): + + if not isinstance(index, int): + idx = index[1] + if idx < 0: + if -idx > len(self): + raise ValueError( + "absolute value of index should not exceed dataset length" + ) + idx = len(self) + idx + dataset_idx = bisect.bisect_right(self.cumulative_sizes, idx) + if dataset_idx == 0: + sample_idx = idx + else: + sample_idx = idx - self.cumulative_sizes[dataset_idx - 1] + if not isinstance(index, int): + index = (index[0], sample_idx, index[2]) + + return self.datasets[dataset_idx][index] + + +class Dataset(torchDataset): + """ This class is a subclass of the base :class:`torch.utils.data.Dataset`, + that enables on the fly resizing of the ``input_dim``. + + Args: + input_dimension (tuple): (width,height) tuple with default dimensions of the network + """ + + def __init__(self, input_dimension, mosaic=True): + super().__init__() + self.__input_dim = input_dimension[:2] + self.enable_mosaic = mosaic + + @property + def input_dim(self): + """ + Dimension that can be used by transforms to set the correct image size, etc. + This allows transforms to have a single source of truth + for the input dimension of the network. + + Return: + list: Tuple containing the current width,height + """ + if hasattr(self, "_input_dim"): + return self._input_dim + return self.__input_dim + + @staticmethod + def resize_getitem(getitem_fn): + """ + Decorator method that needs to be used around the ``__getitem__`` method. |br| + This decorator enables the on the fly resizing of + the ``input_dim`` with our :class:`~lightnet.data.DataLoader` class. + + Example: + >>> class CustomSet(ln.data.Dataset): + ... def __len__(self): + ... return 10 + ... @ln.data.Dataset.resize_getitem + ... def __getitem__(self, index): + ... # Should return (image, anno) but here we return input_dim + ... return self.input_dim + >>> data = CustomSet((200,200)) + >>> data[0] + (200, 200) + >>> data[(480,320), 0] + (480, 320) + """ + + @wraps(getitem_fn) + def wrapper(self, index): + if not isinstance(index, int): + has_dim = True + self._input_dim = index[0] + self.enable_mosaic = index[2] + index = index[1] + else: + has_dim = False + + ret_val = getitem_fn(self, index) + + if has_dim: + del self._input_dim + + return ret_val + + return wrapper diff --git a/tracking/docker-build-context/byte_track/yolox/data/datasets/mosaicdetection.py b/tracking/docker-build-context/byte_track/yolox/data/datasets/mosaicdetection.py new file mode 100644 index 0000000000000000000000000000000000000000..d2bf39f7a4ebe0cdea59e01ad20dc0695d2d51fa --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/data/datasets/mosaicdetection.py @@ -0,0 +1,242 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) Megvii, Inc. and its affiliates. + +import cv2 +import numpy as np + +from yolox.utils import adjust_box_anns + +import random + +from ..data_augment import box_candidates, random_perspective, augment_hsv +from .datasets_wrapper import Dataset + + +def get_mosaic_coordinate(mosaic_image, mosaic_index, xc, yc, w, h, input_h, input_w): + # TODO update doc + # index0 to top left part of image + if mosaic_index == 0: + x1, y1, x2, y2 = max(xc - w, 0), max(yc - h, 0), xc, yc + small_coord = w - (x2 - x1), h - (y2 - y1), w, h + # index1 to top right part of image + elif mosaic_index == 1: + x1, y1, x2, y2 = xc, max(yc - h, 0), min(xc + w, input_w * 2), yc + small_coord = 0, h - (y2 - y1), min(w, x2 - x1), h + # index2 to bottom left part of image + elif mosaic_index == 2: + x1, y1, x2, y2 = max(xc - w, 0), yc, xc, min(input_h * 2, yc + h) + small_coord = w - (x2 - x1), 0, w, min(y2 - y1, h) + # index2 to bottom right part of image + elif mosaic_index == 3: + x1, y1, x2, y2 = xc, yc, min(xc + w, input_w * 2), min(input_h * 2, yc + h) # noqa + small_coord = 0, 0, min(w, x2 - x1), min(y2 - y1, h) + return (x1, y1, x2, y2), small_coord + + +class MosaicDetection(Dataset): + """Detection dataset wrapper that performs mixup for normal dataset.""" + + def __init__( + self, dataset, img_size, mosaic=True, preproc=None, + degrees=10.0, translate=0.1, scale=(0.5, 1.5), mscale=(0.5, 1.5), + shear=2.0, perspective=0.0, enable_mixup=True, *args + ): + """ + + Args: + dataset(Dataset) : Pytorch dataset object. + img_size (tuple): + mosaic (bool): enable mosaic augmentation or not. + preproc (func): + degrees (float): + translate (float): + scale (tuple): + mscale (tuple): + shear (float): + perspective (float): + enable_mixup (bool): + *args(tuple) : Additional arguments for mixup random sampler. + """ + super().__init__(img_size, mosaic=mosaic) + self._dataset = dataset + self.preproc = preproc + self.degrees = degrees + self.translate = translate + self.scale = scale + self.shear = shear + self.perspective = perspective + self.mixup_scale = mscale + self.enable_mosaic = mosaic + self.enable_mixup = enable_mixup + + def __len__(self): + return len(self._dataset) + + @Dataset.resize_getitem + def __getitem__(self, idx): + if self.enable_mosaic: + mosaic_labels = [] + input_dim = self._dataset.input_dim + input_h, input_w = input_dim[0], input_dim[1] + + # yc, xc = s, s # mosaic center x, y + yc = int(random.uniform(0.5 * input_h, 1.5 * input_h)) + xc = int(random.uniform(0.5 * input_w, 1.5 * input_w)) + + # 3 additional image indices + indices = [idx] + [random.randint(0, len(self._dataset) - 1) for _ in range(3)] + + for i_mosaic, index in enumerate(indices): + img, _labels, _, _ = self._dataset.pull_item(index) + h0, w0 = img.shape[:2] # orig hw + scale = min(1. * input_h / h0, 1. * input_w / w0) + img = cv2.resize( + img, (int(w0 * scale), int(h0 * scale)), interpolation=cv2.INTER_LINEAR + ) + # generate output mosaic image + (h, w, c) = img.shape[:3] + if i_mosaic == 0: + mosaic_img = np.full((input_h * 2, input_w * 2, c), 114, dtype=np.uint8) + + # suffix l means large image, while s means small image in mosaic aug. + (l_x1, l_y1, l_x2, l_y2), (s_x1, s_y1, s_x2, s_y2) = get_mosaic_coordinate( + mosaic_img, i_mosaic, xc, yc, w, h, input_h, input_w + ) + + mosaic_img[l_y1:l_y2, l_x1:l_x2] = img[s_y1:s_y2, s_x1:s_x2] + padw, padh = l_x1 - s_x1, l_y1 - s_y1 + + labels = _labels.copy() + # Normalized xywh to pixel xyxy format + if _labels.size > 0: + labels[:, 0] = scale * _labels[:, 0] + padw + labels[:, 1] = scale * _labels[:, 1] + padh + labels[:, 2] = scale * _labels[:, 2] + padw + labels[:, 3] = scale * _labels[:, 3] + padh + mosaic_labels.append(labels) + + if len(mosaic_labels): + mosaic_labels = np.concatenate(mosaic_labels, 0) + ''' + np.clip(mosaic_labels[:, 0], 0, 2 * input_w, out=mosaic_labels[:, 0]) + np.clip(mosaic_labels[:, 1], 0, 2 * input_h, out=mosaic_labels[:, 1]) + np.clip(mosaic_labels[:, 2], 0, 2 * input_w, out=mosaic_labels[:, 2]) + np.clip(mosaic_labels[:, 3], 0, 2 * input_h, out=mosaic_labels[:, 3]) + ''' + + mosaic_labels = mosaic_labels[mosaic_labels[:, 0] < 2 * input_w] + mosaic_labels = mosaic_labels[mosaic_labels[:, 2] > 0] + mosaic_labels = mosaic_labels[mosaic_labels[:, 1] < 2 * input_h] + mosaic_labels = mosaic_labels[mosaic_labels[:, 3] > 0] + + #augment_hsv(mosaic_img) + mosaic_img, mosaic_labels = random_perspective( + mosaic_img, + mosaic_labels, + degrees=self.degrees, + translate=self.translate, + scale=self.scale, + shear=self.shear, + perspective=self.perspective, + border=[-input_h // 2, -input_w // 2], + ) # border to remove + + # ----------------------------------------------------------------- + # CopyPaste: https://arxiv.org/abs/2012.07177 + # ----------------------------------------------------------------- + if self.enable_mixup and not len(mosaic_labels) == 0: + mosaic_img, mosaic_labels = self.mixup(mosaic_img, mosaic_labels, self.input_dim) + + mix_img, padded_labels = self.preproc(mosaic_img, mosaic_labels, self.input_dim) + img_info = (mix_img.shape[1], mix_img.shape[0]) + + return mix_img, padded_labels, img_info, np.array([idx]) + + else: + self._dataset._input_dim = self.input_dim + img, label, img_info, id_ = self._dataset.pull_item(idx) + img, label = self.preproc(img, label, self.input_dim) + return img, label, img_info, id_ + + def mixup(self, origin_img, origin_labels, input_dim): + jit_factor = random.uniform(*self.mixup_scale) + FLIP = random.uniform(0, 1) > 0.5 + cp_labels = [] + while len(cp_labels) == 0: + cp_index = random.randint(0, self.__len__() - 1) + cp_labels = self._dataset.load_anno(cp_index) + img, cp_labels, _, _ = self._dataset.pull_item(cp_index) + + if len(img.shape) == 3: + cp_img = np.ones((input_dim[0], input_dim[1], 3)) * 114.0 + else: + cp_img = np.ones(input_dim) * 114.0 + cp_scale_ratio = min(input_dim[0] / img.shape[0], input_dim[1] / img.shape[1]) + resized_img = cv2.resize( + img, + (int(img.shape[1] * cp_scale_ratio), int(img.shape[0] * cp_scale_ratio)), + interpolation=cv2.INTER_LINEAR, + ).astype(np.float32) + cp_img[ + : int(img.shape[0] * cp_scale_ratio), : int(img.shape[1] * cp_scale_ratio) + ] = resized_img + cp_img = cv2.resize( + cp_img, + (int(cp_img.shape[1] * jit_factor), int(cp_img.shape[0] * jit_factor)), + ) + cp_scale_ratio *= jit_factor + if FLIP: + cp_img = cp_img[:, ::-1, :] + + origin_h, origin_w = cp_img.shape[:2] + target_h, target_w = origin_img.shape[:2] + padded_img = np.zeros( + (max(origin_h, target_h), max(origin_w, target_w), 3) + ).astype(np.uint8) + padded_img[:origin_h, :origin_w] = cp_img + + x_offset, y_offset = 0, 0 + if padded_img.shape[0] > target_h: + y_offset = random.randint(0, padded_img.shape[0] - target_h - 1) + if padded_img.shape[1] > target_w: + x_offset = random.randint(0, padded_img.shape[1] - target_w - 1) + padded_cropped_img = padded_img[ + y_offset: y_offset + target_h, x_offset: x_offset + target_w + ] + + cp_bboxes_origin_np = adjust_box_anns( + cp_labels[:, :4].copy(), cp_scale_ratio, 0, 0, origin_w, origin_h + ) + if FLIP: + cp_bboxes_origin_np[:, 0::2] = ( + origin_w - cp_bboxes_origin_np[:, 0::2][:, ::-1] + ) + cp_bboxes_transformed_np = cp_bboxes_origin_np.copy() + ''' + cp_bboxes_transformed_np[:, 0::2] = np.clip( + cp_bboxes_transformed_np[:, 0::2] - x_offset, 0, target_w + ) + cp_bboxes_transformed_np[:, 1::2] = np.clip( + cp_bboxes_transformed_np[:, 1::2] - y_offset, 0, target_h + ) + ''' + cp_bboxes_transformed_np[:, 0::2] = cp_bboxes_transformed_np[:, 0::2] - x_offset + cp_bboxes_transformed_np[:, 1::2] = cp_bboxes_transformed_np[:, 1::2] - y_offset + keep_list = box_candidates(cp_bboxes_origin_np.T, cp_bboxes_transformed_np.T, 5) + + if keep_list.sum() >= 1.0: + cls_labels = cp_labels[keep_list, 4:5].copy() + id_labels = cp_labels[keep_list, 5:6].copy() + box_labels = cp_bboxes_transformed_np[keep_list] + labels = np.hstack((box_labels, cls_labels, id_labels)) + # remove outside bbox + labels = labels[labels[:, 0] < target_w] + labels = labels[labels[:, 2] > 0] + labels = labels[labels[:, 1] < target_h] + labels = labels[labels[:, 3] > 0] + origin_labels = np.vstack((origin_labels, labels)) + origin_img = origin_img.astype(np.float32) + origin_img = 0.5 * origin_img + 0.5 * padded_cropped_img.astype(np.float32) + + return origin_img, origin_labels diff --git a/tracking/docker-build-context/byte_track/yolox/data/datasets/mot.py b/tracking/docker-build-context/byte_track/yolox/data/datasets/mot.py new file mode 100644 index 0000000000000000000000000000000000000000..d52febcbbe816bdd3d1e07f2d042e115ae330442 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/data/datasets/mot.py @@ -0,0 +1,132 @@ +import cv2 +import numpy as np +from pycocotools.coco import COCO + +import os + +from ..dataloading import get_yolox_datadir +from .datasets_wrapper import Dataset + + +class MOTDataset(Dataset): + """ + COCO dataset class. + """ + + def __init__( + self, + data_dir=None, + json_file="train_half.json", + name="train", + img_size=(608, 1088), + preproc=None, + ): + """ + COCO dataset initialization. Annotation data are read into memory by COCO API. + Args: + data_dir (str): dataset root directory + json_file (str): COCO json file name + name (str): COCO data name (e.g. 'train2017' or 'val2017') + img_size (int): target image size after pre-processing + preproc: data augmentation strategy + """ + super().__init__(img_size) + if data_dir is None: + data_dir = os.path.join(get_yolox_datadir(), "mot") + self.data_dir = data_dir + self.json_file = json_file + + self.coco = COCO(os.path.join(self.data_dir, "annotations", self.json_file)) + self.ids = self.coco.getImgIds() + self.class_ids = sorted(self.coco.getCatIds()) + cats = self.coco.loadCats(self.coco.getCatIds()) + self._classes = tuple([c["name"] for c in cats]) + self.annotations = self._load_coco_annotations() + self.name = name + self.img_size = img_size + self.preproc = preproc + + def __len__(self): + return len(self.ids) + + def _load_coco_annotations(self): + return [self.load_anno_from_ids(_ids) for _ids in self.ids] + + def load_anno_from_ids(self, id_): + im_ann = self.coco.loadImgs(id_)[0] + width = im_ann["width"] + height = im_ann["height"] + frame_id = im_ann["frame_id"] + video_id = im_ann["video_id"] + anno_ids = self.coco.getAnnIds(imgIds=[int(id_)], iscrowd=False) + annotations = self.coco.loadAnns(anno_ids) + objs = [] + for obj in annotations: + x1 = obj["bbox"][0] + y1 = obj["bbox"][1] + x2 = x1 + obj["bbox"][2] + y2 = y1 + obj["bbox"][3] + if obj["area"] > 0 and x2 >= x1 and y2 >= y1: + obj["clean_bbox"] = [x1, y1, x2, y2] + objs.append(obj) + + num_objs = len(objs) + + res = np.zeros((num_objs, 6)) + + for ix, obj in enumerate(objs): + cls = self.class_ids.index(obj["category_id"]) + res[ix, 0:4] = obj["clean_bbox"] + res[ix, 4] = cls + res[ix, 5] = obj["track_id"] + + file_name = im_ann["file_name"] if "file_name" in im_ann else "{:012}".format(id_) + ".jpg" + img_info = (height, width, frame_id, video_id, file_name) + + del im_ann, annotations + + return (res, img_info, file_name) + + def load_anno(self, index): + return self.annotations[index][0] + + def pull_item(self, index): + id_ = self.ids[index] + + res, img_info, file_name = self.annotations[index] + # load image and preprocess + img_file = os.path.join( + self.data_dir, self.name, file_name + ) + img = cv2.imread(img_file) + assert img is not None + + return img, res.copy(), img_info, np.array([id_]) + + @Dataset.resize_getitem + def __getitem__(self, index): + """ + One image / label pair for the given index is picked up and pre-processed. + + Args: + index (int): data index + + Returns: + img (numpy.ndarray): pre-processed image + padded_labels (torch.Tensor): pre-processed label data. + The shape is :math:`[max_labels, 5]`. + each label consists of [class, xc, yc, w, h]: + class (float): class index. + xc, yc (float) : center of bbox whose values range from 0 to 1. + w, h (float) : size of bbox whose values range from 0 to 1. + info_img : tuple of h, w, nh, nw, dx, dy. + h, w (int): original shape of the image + nh, nw (int): shape of the resized image without padding + dx, dy (int): pad size + img_id (int): same as the input index. Used for evaluation. + """ + img, target, img_info, img_id = self.pull_item(index) + + if self.preproc is not None: + img, target = self.preproc(img, target, self.input_dim) + return img, target, img_info, img_id diff --git a/tracking/docker-build-context/byte_track/yolox/data/samplers.py b/tracking/docker-build-context/byte_track/yolox/data/samplers.py new file mode 100644 index 0000000000000000000000000000000000000000..064b13376b3c813ad6f9e5745496dd5027b65f0f --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/data/samplers.py @@ -0,0 +1,95 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) Megvii, Inc. and its affiliates. + +import torch +import torch.distributed as dist +from torch.utils.data.sampler import BatchSampler as torchBatchSampler +from torch.utils.data.sampler import Sampler + +import itertools +from typing import Optional + + +class YoloBatchSampler(torchBatchSampler): + """ + This batch sampler will generate mini-batches of (dim, index) tuples from another sampler. + It works just like the :class:`torch.utils.data.sampler.BatchSampler`, + but it will prepend a dimension, whilst ensuring it stays the same across one mini-batch. + """ + + def __init__(self, *args, input_dimension=None, mosaic=True, **kwargs): + super().__init__(*args, **kwargs) + self.input_dim = input_dimension + self.new_input_dim = None + self.mosaic = mosaic + + def __iter__(self): + self.__set_input_dim() + for batch in super().__iter__(): + yield [(self.input_dim, idx, self.mosaic) for idx in batch] + self.__set_input_dim() + + def __set_input_dim(self): + """ This function randomly changes the the input dimension of the dataset. """ + if self.new_input_dim is not None: + self.input_dim = (self.new_input_dim[0], self.new_input_dim[1]) + self.new_input_dim = None + + +class InfiniteSampler(Sampler): + """ + In training, we only care about the "infinite stream" of training data. + So this sampler produces an infinite stream of indices and + all workers cooperate to correctly shuffle the indices and sample different indices. + The samplers in each worker effectively produces `indices[worker_id::num_workers]` + where `indices` is an infinite stream of indices consisting of + `shuffle(range(size)) + shuffle(range(size)) + ...` (if shuffle is True) + or `range(size) + range(size) + ...` (if shuffle is False) + """ + + def __init__( + self, + size: int, + shuffle: bool = True, + seed: Optional[int] = 0, + rank=0, + world_size=1, + ): + """ + Args: + size (int): the total number of data of the underlying dataset to sample from + shuffle (bool): whether to shuffle the indices or not + seed (int): the initial seed of the shuffle. Must be the same + across all workers. If None, will use a random seed shared + among workers (require synchronization among all workers). + """ + self._size = size + assert size > 0 + self._shuffle = shuffle + self._seed = int(seed) + + if dist.is_available() and dist.is_initialized(): + self._rank = dist.get_rank() + self._world_size = dist.get_world_size() + else: + self._rank = rank + self._world_size = world_size + + def __iter__(self): + start = self._rank + yield from itertools.islice( + self._infinite_indices(), start, None, self._world_size + ) + + def _infinite_indices(self): + g = torch.Generator() + g.manual_seed(self._seed) + while True: + if self._shuffle: + yield from torch.randperm(self._size, generator=g) + else: + yield from torch.arange(self._size) + + def __len__(self): + return self._size // self._world_size diff --git a/tracking/docker-build-context/byte_track/yolox/deepsort_tracker/deepsort.py b/tracking/docker-build-context/byte_track/yolox/deepsort_tracker/deepsort.py new file mode 100644 index 0000000000000000000000000000000000000000..198f3b0dd7bf34bde4bf16eb9c80f068a8635ced --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/deepsort_tracker/deepsort.py @@ -0,0 +1,295 @@ +import numpy as np +import torch +import cv2 +import os + +from .reid_model import Extractor +from yolox.deepsort_tracker import kalman_filter, linear_assignment, iou_matching +from yolox.data.dataloading import get_yolox_datadir +from .detection import Detection +from .track import Track + + +def _cosine_distance(a, b, data_is_normalized=False): + if not data_is_normalized: + a = np.asarray(a) / np.linalg.norm(a, axis=1, keepdims=True) + b = np.asarray(b) / np.linalg.norm(b, axis=1, keepdims=True) + return 1. - np.dot(a, b.T) + + +def _nn_cosine_distance(x, y): + distances = _cosine_distance(x, y) + return distances.min(axis=0) + + +class Tracker: + def __init__(self, metric, max_iou_distance=0.7, max_age=70, n_init=3): + self.metric = metric + self.max_iou_distance = max_iou_distance + self.max_age = max_age + self.n_init = n_init + + self.kf = kalman_filter.KalmanFilter() + self.tracks = [] + self._next_id = 1 + + def predict(self): + """Propagate track state distributions one time step forward. + This function should be called once every time step, before `update`. + """ + for track in self.tracks: + track.predict(self.kf) + + def increment_ages(self): + for track in self.tracks: + track.increment_age() + track.mark_missed() + + def update(self, detections, classes): + """Perform measurement update and track management. + Parameters + ---------- + detections : List[deep_sort.detection.Detection] + A list of detections at the current time step. + """ + # Run matching cascade. + matches, unmatched_tracks, unmatched_detections = \ + self._match(detections) + + # Update track set. + for track_idx, detection_idx in matches: + self.tracks[track_idx].update( + self.kf, detections[detection_idx]) + for track_idx in unmatched_tracks: + self.tracks[track_idx].mark_missed() + for detection_idx in unmatched_detections: + self._initiate_track(detections[detection_idx], classes[detection_idx].item()) + self.tracks = [t for t in self.tracks if not t.is_deleted()] + + # Update distance metric. + active_targets = [t.track_id for t in self.tracks if t.is_confirmed()] + features, targets = [], [] + for track in self.tracks: + if not track.is_confirmed(): + continue + features += track.features + targets += [track.track_id for _ in track.features] + track.features = [] + self.metric.partial_fit( + np.asarray(features), np.asarray(targets), active_targets) + + def _match(self, detections): + + def gated_metric(tracks, dets, track_indices, detection_indices): + features = np.array([dets[i].feature for i in detection_indices]) + targets = np.array([tracks[i].track_id for i in track_indices]) + cost_matrix = self.metric.distance(features, targets) + cost_matrix = linear_assignment.gate_cost_matrix( + self.kf, cost_matrix, tracks, dets, track_indices, + detection_indices) + + return cost_matrix + + # Split track set into confirmed and unconfirmed tracks. + confirmed_tracks = [ + i for i, t in enumerate(self.tracks) if t.is_confirmed()] + unconfirmed_tracks = [ + i for i, t in enumerate(self.tracks) if not t.is_confirmed()] + + # Associate confirmed tracks using appearance features. + matches_a, unmatched_tracks_a, unmatched_detections = \ + linear_assignment.matching_cascade( + gated_metric, self.metric.matching_threshold, self.max_age, + self.tracks, detections, confirmed_tracks) + + # Associate remaining tracks together with unconfirmed tracks using IOU. + iou_track_candidates = unconfirmed_tracks + [ + k for k in unmatched_tracks_a if + self.tracks[k].time_since_update == 1] + unmatched_tracks_a = [ + k for k in unmatched_tracks_a if + self.tracks[k].time_since_update != 1] + matches_b, unmatched_tracks_b, unmatched_detections = \ + linear_assignment.min_cost_matching( + iou_matching.iou_cost, self.max_iou_distance, self.tracks, + detections, iou_track_candidates, unmatched_detections) + + matches = matches_a + matches_b + unmatched_tracks = list(set(unmatched_tracks_a + unmatched_tracks_b)) + return matches, unmatched_tracks, unmatched_detections + + def _initiate_track(self, detection, class_id): + mean, covariance = self.kf.initiate(detection.to_xyah()) + self.tracks.append(Track( + mean, covariance, self._next_id, class_id, self.n_init, self.max_age, + detection.feature)) + self._next_id += 1 + + +class NearestNeighborDistanceMetric(object): + def __init__(self, metric, matching_threshold, budget=None): + + if metric == "cosine": + self._metric = _nn_cosine_distance + else: + raise ValueError( + "Invalid metric; must be either 'euclidean' or 'cosine'") + self.matching_threshold = matching_threshold + self.budget = budget + self.samples = {} + + def partial_fit(self, features, targets, active_targets): + for feature, target in zip(features, targets): + self.samples.setdefault(target, []).append(feature) + if self.budget is not None: + self.samples[target] = self.samples[target][-self.budget:] + self.samples = {k: self.samples[k] for k in active_targets} + + def distance(self, features, targets): + cost_matrix = np.zeros((len(targets), len(features))) + for i, target in enumerate(targets): + cost_matrix[i, :] = self._metric(self.samples[target], features) + return cost_matrix + + +class DeepSort(object): + def __init__(self, model_path, max_dist=0.1, min_confidence=0.3, nms_max_overlap=1.0, max_iou_distance=0.7, max_age=30, n_init=3, nn_budget=100, use_cuda=True): + self.min_confidence = min_confidence + self.nms_max_overlap = nms_max_overlap + + self.extractor = Extractor(model_path, use_cuda=use_cuda) + + max_cosine_distance = max_dist + metric = NearestNeighborDistanceMetric( + "cosine", max_cosine_distance, nn_budget) + self.tracker = Tracker( + metric, max_iou_distance=max_iou_distance, max_age=max_age, n_init=n_init) + + def update(self, output_results, img_info, img_size, img_file_name): + img_file_name = os.path.join(get_yolox_datadir(), 'mot', 'train', img_file_name) + ori_img = cv2.imread(img_file_name) + self.height, self.width = ori_img.shape[:2] + # post process detections + output_results = output_results.cpu().numpy() + confidences = output_results[:, 4] * output_results[:, 5] + + bboxes = output_results[:, :4] # x1y1x2y2 + img_h, img_w = img_info[0], img_info[1] + scale = min(img_size[0] / float(img_h), img_size[1] / float(img_w)) + bboxes /= scale + bbox_xyxy = bboxes + bbox_tlwh = self._xyxy_to_tlwh_array(bbox_xyxy) + remain_inds = confidences > self.min_confidence + bbox_tlwh = bbox_tlwh[remain_inds] + confidences = confidences[remain_inds] + + # generate detections + features = self._get_features(bbox_tlwh, ori_img) + detections = [Detection(bbox_tlwh[i], conf, features[i]) for i, conf in enumerate( + confidences) if conf > self.min_confidence] + classes = np.zeros((len(detections), )) + + # run on non-maximum supression + boxes = np.array([d.tlwh for d in detections]) + scores = np.array([d.confidence for d in detections]) + + # update tracker + self.tracker.predict() + self.tracker.update(detections, classes) + + # output bbox identities + outputs = [] + for track in self.tracker.tracks: + if not track.is_confirmed() or track.time_since_update > 1: + continue + box = track.to_tlwh() + x1, y1, x2, y2 = self._tlwh_to_xyxy_noclip(box) + track_id = track.track_id + class_id = track.class_id + outputs.append(np.array([x1, y1, x2, y2, track_id, class_id], dtype=np.int)) + if len(outputs) > 0: + outputs = np.stack(outputs, axis=0) + return outputs + + """ + TODO: + Convert bbox from xc_yc_w_h to xtl_ytl_w_h + Thanks JieChen91@github.com for reporting this bug! + """ + @staticmethod + def _xywh_to_tlwh(bbox_xywh): + if isinstance(bbox_xywh, np.ndarray): + bbox_tlwh = bbox_xywh.copy() + elif isinstance(bbox_xywh, torch.Tensor): + bbox_tlwh = bbox_xywh.clone() + bbox_tlwh[:, 0] = bbox_xywh[:, 0] - bbox_xywh[:, 2] / 2. + bbox_tlwh[:, 1] = bbox_xywh[:, 1] - bbox_xywh[:, 3] / 2. + return bbox_tlwh + + @staticmethod + def _xyxy_to_tlwh_array(bbox_xyxy): + if isinstance(bbox_xyxy, np.ndarray): + bbox_tlwh = bbox_xyxy.copy() + elif isinstance(bbox_xyxy, torch.Tensor): + bbox_tlwh = bbox_xyxy.clone() + bbox_tlwh[:, 2] = bbox_xyxy[:, 2] - bbox_xyxy[:, 0] + bbox_tlwh[:, 3] = bbox_xyxy[:, 3] - bbox_xyxy[:, 1] + return bbox_tlwh + + def _xywh_to_xyxy(self, bbox_xywh): + x, y, w, h = bbox_xywh + x1 = max(int(x - w / 2), 0) + x2 = min(int(x + w / 2), self.width - 1) + y1 = max(int(y - h / 2), 0) + y2 = min(int(y + h / 2), self.height - 1) + return x1, y1, x2, y2 + + def _tlwh_to_xyxy(self, bbox_tlwh): + """ + TODO: + Convert bbox from xtl_ytl_w_h to xc_yc_w_h + Thanks JieChen91@github.com for reporting this bug! + """ + x, y, w, h = bbox_tlwh + x1 = max(int(x), 0) + x2 = min(int(x+w), self.width - 1) + y1 = max(int(y), 0) + y2 = min(int(y+h), self.height - 1) + return x1, y1, x2, y2 + + def _tlwh_to_xyxy_noclip(self, bbox_tlwh): + """ + TODO: + Convert bbox from xtl_ytl_w_h to xc_yc_w_h + Thanks JieChen91@github.com for reporting this bug! + """ + x, y, w, h = bbox_tlwh + x1 = x + x2 = x + w + y1 = y + y2 = y + h + return x1, y1, x2, y2 + + def increment_ages(self): + self.tracker.increment_ages() + + def _xyxy_to_tlwh(self, bbox_xyxy): + x1, y1, x2, y2 = bbox_xyxy + + t = x1 + l = y1 + w = int(x2 - x1) + h = int(y2 - y1) + return t, l, w, h + + def _get_features(self, bbox_xywh, ori_img): + im_crops = [] + for box in bbox_xywh: + x1, y1, x2, y2 = self._tlwh_to_xyxy(box) + im = ori_img[y1:y2, x1:x2] + im_crops.append(im) + if im_crops: + features = self.extractor(im_crops) + else: + features = np.array([]) + return features diff --git a/tracking/docker-build-context/byte_track/yolox/deepsort_tracker/detection.py b/tracking/docker-build-context/byte_track/yolox/deepsort_tracker/detection.py new file mode 100644 index 0000000000000000000000000000000000000000..9bd84977d840bfd7a63daf8efafdcb8f7615c6a3 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/deepsort_tracker/detection.py @@ -0,0 +1,46 @@ +# vim: expandtab:ts=4:sw=4 +import numpy as np + + +class Detection(object): + """ + This class represents a bounding box detection in a single image. + Parameters + ---------- + tlwh : array_like + Bounding box in format `(x, y, w, h)`. + confidence : float + Detector confidence score. + feature : array_like + A feature vector that describes the object contained in this image. + Attributes + ---------- + tlwh : ndarray + Bounding box in format `(top left x, top left y, width, height)`. + confidence : ndarray + Detector confidence score. + feature : ndarray | NoneType + A feature vector that describes the object contained in this image. + """ + + def __init__(self, tlwh, confidence, feature): + self.tlwh = np.asarray(tlwh, dtype=np.float) + self.confidence = float(confidence) + self.feature = np.asarray(feature, dtype=np.float32) + + def to_tlbr(self): + """Convert bounding box to format `(min x, min y, max x, max y)`, i.e., + `(top left, bottom right)`. + """ + ret = self.tlwh.copy() + ret[2:] += ret[:2] + return ret + + def to_xyah(self): + """Convert bounding box to format `(center x, center y, aspect ratio, + height)`, where the aspect ratio is `width / height`. + """ + ret = self.tlwh.copy() + ret[:2] += ret[2:] / 2 + ret[2] /= ret[3] + return ret \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/yolox/deepsort_tracker/iou_matching.py b/tracking/docker-build-context/byte_track/yolox/deepsort_tracker/iou_matching.py new file mode 100644 index 0000000000000000000000000000000000000000..7a27a4dbff2360a09943442140d52743cd66d8c4 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/deepsort_tracker/iou_matching.py @@ -0,0 +1,76 @@ +# vim: expandtab:ts=4:sw=4 +from __future__ import absolute_import +import numpy as np +from yolox.deepsort_tracker import linear_assignment + + +def iou(bbox, candidates): + """Computer intersection over union. + Parameters + ---------- + bbox : ndarray + A bounding box in format `(top left x, top left y, width, height)`. + candidates : ndarray + A matrix of candidate bounding boxes (one per row) in the same format + as `bbox`. + Returns + ------- + ndarray + The intersection over union in [0, 1] between the `bbox` and each + candidate. A higher score means a larger fraction of the `bbox` is + occluded by the candidate. + """ + bbox_tl, bbox_br = bbox[:2], bbox[:2] + bbox[2:] + candidates_tl = candidates[:, :2] + candidates_br = candidates[:, :2] + candidates[:, 2:] + + tl = np.c_[np.maximum(bbox_tl[0], candidates_tl[:, 0])[:, np.newaxis], + np.maximum(bbox_tl[1], candidates_tl[:, 1])[:, np.newaxis]] + br = np.c_[np.minimum(bbox_br[0], candidates_br[:, 0])[:, np.newaxis], + np.minimum(bbox_br[1], candidates_br[:, 1])[:, np.newaxis]] + wh = np.maximum(0., br - tl) + + area_intersection = wh.prod(axis=1) + area_bbox = bbox[2:].prod() + area_candidates = candidates[:, 2:].prod(axis=1) + return area_intersection / (area_bbox + area_candidates - area_intersection) + + +def iou_cost(tracks, detections, track_indices=None, + detection_indices=None): + """An intersection over union distance metric. + Parameters + ---------- + tracks : List[deep_sort.track.Track] + A list of tracks. + detections : List[deep_sort.detection.Detection] + A list of detections. + track_indices : Optional[List[int]] + A list of indices to tracks that should be matched. Defaults to + all `tracks`. + detection_indices : Optional[List[int]] + A list of indices to detections that should be matched. Defaults + to all `detections`. + Returns + ------- + ndarray + Returns a cost matrix of shape + len(track_indices), len(detection_indices) where entry (i, j) is + `1 - iou(tracks[track_indices[i]], detections[detection_indices[j]])`. + """ + if track_indices is None: + track_indices = np.arange(len(tracks)) + if detection_indices is None: + detection_indices = np.arange(len(detections)) + + cost_matrix = np.zeros((len(track_indices), len(detection_indices))) + for row, track_idx in enumerate(track_indices): + if tracks[track_idx].time_since_update > 1: + cost_matrix[row, :] = linear_assignment.INFTY_COST + continue + + bbox = tracks[track_idx].to_tlwh() + candidates = np.asarray( + [detections[i].tlwh for i in detection_indices]) + cost_matrix[row, :] = 1. - iou(bbox, candidates) + return cost_matrix \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/yolox/deepsort_tracker/kalman_filter.py b/tracking/docker-build-context/byte_track/yolox/deepsort_tracker/kalman_filter.py new file mode 100644 index 0000000000000000000000000000000000000000..1d4bcc36b9ab44eb08fe41161839b8d7e604b6fe --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/deepsort_tracker/kalman_filter.py @@ -0,0 +1,208 @@ +# vim: expandtab:ts=4:sw=4 +import numpy as np +import scipy.linalg + + +""" +Table for the 0.95 quantile of the chi-square distribution with N degrees of +freedom (contains values for N=1, ..., 9). Taken from MATLAB/Octave's chi2inv +function and used as Mahalanobis gating threshold. +""" +chi2inv95 = { + 1: 3.8415, + 2: 5.9915, + 3: 7.8147, + 4: 9.4877, + 5: 11.070, + 6: 12.592, + 7: 14.067, + 8: 15.507, + 9: 16.919} + + +class KalmanFilter(object): + """ + A simple Kalman filter for tracking bounding boxes in image space. + The 8-dimensional state space + x, y, a, h, vx, vy, va, vh + contains the bounding box center position (x, y), aspect ratio a, height h, + and their respective velocities. + Object motion follows a constant velocity model. The bounding box location + (x, y, a, h) is taken as direct observation of the state space (linear + observation model). + """ + + def __init__(self): + ndim, dt = 4, 1. + + # Create Kalman filter model matrices. + self._motion_mat = np.eye(2 * ndim, 2 * ndim) + for i in range(ndim): + self._motion_mat[i, ndim + i] = dt + self._update_mat = np.eye(ndim, 2 * ndim) + + # Motion and observation uncertainty are chosen relative to the current + # state estimate. These weights control the amount of uncertainty in + # the model. This is a bit hacky. + self._std_weight_position = 1. / 20 + self._std_weight_velocity = 1. / 160 + + def initiate(self, measurement): + """Create track from unassociated measurement. + Parameters + ---------- + measurement : ndarray + Bounding box coordinates (x, y, a, h) with center position (x, y), + aspect ratio a, and height h. + Returns + ------- + (ndarray, ndarray) + Returns the mean vector (8 dimensional) and covariance matrix (8x8 + dimensional) of the new track. Unobserved velocities are initialized + to 0 mean. + """ + mean_pos = measurement + mean_vel = np.zeros_like(mean_pos) + mean = np.r_[mean_pos, mean_vel] + + std = [ + 2 * self._std_weight_position * measurement[3], + 2 * self._std_weight_position * measurement[3], + 1e-2, + 2 * self._std_weight_position * measurement[3], + 10 * self._std_weight_velocity * measurement[3], + 10 * self._std_weight_velocity * measurement[3], + 1e-5, + 10 * self._std_weight_velocity * measurement[3]] + covariance = np.diag(np.square(std)) + return mean, covariance + + def predict(self, mean, covariance): + """Run Kalman filter prediction step. + Parameters + ---------- + mean : ndarray + The 8 dimensional mean vector of the object state at the previous + time step. + covariance : ndarray + The 8x8 dimensional covariance matrix of the object state at the + previous time step. + Returns + ------- + (ndarray, ndarray) + Returns the mean vector and covariance matrix of the predicted + state. Unobserved velocities are initialized to 0 mean. + """ + std_pos = [ + self._std_weight_position * mean[3], + self._std_weight_position * mean[3], + 1e-2, + self._std_weight_position * mean[3]] + std_vel = [ + self._std_weight_velocity * mean[3], + self._std_weight_velocity * mean[3], + 1e-5, + self._std_weight_velocity * mean[3]] + motion_cov = np.diag(np.square(np.r_[std_pos, std_vel])) + + mean = np.dot(self._motion_mat, mean) + covariance = np.linalg.multi_dot(( + self._motion_mat, covariance, self._motion_mat.T)) + motion_cov + + return mean, covariance + + def project(self, mean, covariance): + """Project state distribution to measurement space. + Parameters + ---------- + mean : ndarray + The state's mean vector (8 dimensional array). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + Returns + ------- + (ndarray, ndarray) + Returns the projected mean and covariance matrix of the given state + estimate. + """ + std = [ + self._std_weight_position * mean[3], + self._std_weight_position * mean[3], + 1e-1, + self._std_weight_position * mean[3]] + innovation_cov = np.diag(np.square(std)) + + mean = np.dot(self._update_mat, mean) + covariance = np.linalg.multi_dot(( + self._update_mat, covariance, self._update_mat.T)) + return mean, covariance + innovation_cov + + def update(self, mean, covariance, measurement): + """Run Kalman filter correction step. + Parameters + ---------- + mean : ndarray + The predicted state's mean vector (8 dimensional). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + measurement : ndarray + The 4 dimensional measurement vector (x, y, a, h), where (x, y) + is the center position, a the aspect ratio, and h the height of the + bounding box. + Returns + ------- + (ndarray, ndarray) + Returns the measurement-corrected state distribution. + """ + projected_mean, projected_cov = self.project(mean, covariance) + + chol_factor, lower = scipy.linalg.cho_factor( + projected_cov, lower=True, check_finite=False) + kalman_gain = scipy.linalg.cho_solve( + (chol_factor, lower), np.dot(covariance, self._update_mat.T).T, + check_finite=False).T + innovation = measurement - projected_mean + + new_mean = mean + np.dot(innovation, kalman_gain.T) + new_covariance = covariance - np.linalg.multi_dot(( + kalman_gain, projected_cov, kalman_gain.T)) + return new_mean, new_covariance + + def gating_distance(self, mean, covariance, measurements, + only_position=False): + """Compute gating distance between state distribution and measurements. + A suitable distance threshold can be obtained from `chi2inv95`. If + `only_position` is False, the chi-square distribution has 4 degrees of + freedom, otherwise 2. + Parameters + ---------- + mean : ndarray + Mean vector over the state distribution (8 dimensional). + covariance : ndarray + Covariance of the state distribution (8x8 dimensional). + measurements : ndarray + An Nx4 dimensional matrix of N measurements, each in + format (x, y, a, h) where (x, y) is the bounding box center + position, a the aspect ratio, and h the height. + only_position : Optional[bool] + If True, distance computation is done with respect to the bounding + box center position only. + Returns + ------- + ndarray + Returns an array of length N, where the i-th element contains the + squared Mahalanobis distance between (mean, covariance) and + `measurements[i]`. + """ + mean, covariance = self.project(mean, covariance) + if only_position: + mean, covariance = mean[:2], covariance[:2, :2] + measurements = measurements[:, :2] + + cholesky_factor = np.linalg.cholesky(covariance) + d = measurements - mean + z = scipy.linalg.solve_triangular( + cholesky_factor, d.T, lower=True, check_finite=False, + overwrite_b=True) + squared_maha = np.sum(z * z, axis=0) + return squared_maha \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/yolox/deepsort_tracker/linear_assignment.py b/tracking/docker-build-context/byte_track/yolox/deepsort_tracker/linear_assignment.py new file mode 100644 index 0000000000000000000000000000000000000000..5651893225d410b0a2144f9624810e4a98fac75c --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/deepsort_tracker/linear_assignment.py @@ -0,0 +1,182 @@ +from __future__ import absolute_import +import numpy as np +# from sklearn.utils.linear_assignment_ import linear_assignment +from scipy.optimize import linear_sum_assignment as linear_assignment +from yolox.deepsort_tracker import kalman_filter + + +INFTY_COST = 1e+5 + + +def min_cost_matching( + distance_metric, max_distance, tracks, detections, track_indices=None, + detection_indices=None): + """Solve linear assignment problem. + Parameters + ---------- + distance_metric : Callable[List[Track], List[Detection], List[int], List[int]) -> ndarray + The distance metric is given a list of tracks and detections as well as + a list of N track indices and M detection indices. The metric should + return the NxM dimensional cost matrix, where element (i, j) is the + association cost between the i-th track in the given track indices and + the j-th detection in the given detection_indices. + max_distance : float + Gating threshold. Associations with cost larger than this value are + disregarded. + tracks : List[track.Track] + A list of predicted tracks at the current time step. + detections : List[detection.Detection] + A list of detections at the current time step. + track_indices : List[int] + List of track indices that maps rows in `cost_matrix` to tracks in + `tracks` (see description above). + detection_indices : List[int] + List of detection indices that maps columns in `cost_matrix` to + detections in `detections` (see description above). + Returns + ------- + (List[(int, int)], List[int], List[int]) + Returns a tuple with the following three entries: + * A list of matched track and detection indices. + * A list of unmatched track indices. + * A list of unmatched detection indices. + """ + if track_indices is None: + track_indices = np.arange(len(tracks)) + if detection_indices is None: + detection_indices = np.arange(len(detections)) + + if len(detection_indices) == 0 or len(track_indices) == 0: + return [], track_indices, detection_indices # Nothing to match. + + cost_matrix = distance_metric( + tracks, detections, track_indices, detection_indices) + cost_matrix[cost_matrix > max_distance] = max_distance + 1e-5 + + row_indices, col_indices = linear_assignment(cost_matrix) + + matches, unmatched_tracks, unmatched_detections = [], [], [] + for col, detection_idx in enumerate(detection_indices): + if col not in col_indices: + unmatched_detections.append(detection_idx) + for row, track_idx in enumerate(track_indices): + if row not in row_indices: + unmatched_tracks.append(track_idx) + for row, col in zip(row_indices, col_indices): + track_idx = track_indices[row] + detection_idx = detection_indices[col] + if cost_matrix[row, col] > max_distance: + unmatched_tracks.append(track_idx) + unmatched_detections.append(detection_idx) + else: + matches.append((track_idx, detection_idx)) + return matches, unmatched_tracks, unmatched_detections + + +def matching_cascade( + distance_metric, max_distance, cascade_depth, tracks, detections, + track_indices=None, detection_indices=None): + """Run matching cascade. + Parameters + ---------- + distance_metric : Callable[List[Track], List[Detection], List[int], List[int]) -> ndarray + The distance metric is given a list of tracks and detections as well as + a list of N track indices and M detection indices. The metric should + return the NxM dimensional cost matrix, where element (i, j) is the + association cost between the i-th track in the given track indices and + the j-th detection in the given detection indices. + max_distance : float + Gating threshold. Associations with cost larger than this value are + disregarded. + cascade_depth: int + The cascade depth, should be se to the maximum track age. + tracks : List[track.Track] + A list of predicted tracks at the current time step. + detections : List[detection.Detection] + A list of detections at the current time step. + track_indices : Optional[List[int]] + List of track indices that maps rows in `cost_matrix` to tracks in + `tracks` (see description above). Defaults to all tracks. + detection_indices : Optional[List[int]] + List of detection indices that maps columns in `cost_matrix` to + detections in `detections` (see description above). Defaults to all + detections. + Returns + ------- + (List[(int, int)], List[int], List[int]) + Returns a tuple with the following three entries: + * A list of matched track and detection indices. + * A list of unmatched track indices. + * A list of unmatched detection indices. + """ + if track_indices is None: + track_indices = list(range(len(tracks))) + if detection_indices is None: + detection_indices = list(range(len(detections))) + + unmatched_detections = detection_indices + matches = [] + for level in range(cascade_depth): + if len(unmatched_detections) == 0: # No detections left + break + + track_indices_l = [ + k for k in track_indices + if tracks[k].time_since_update == 1 + level + ] + if len(track_indices_l) == 0: # Nothing to match at this level + continue + + matches_l, _, unmatched_detections = \ + min_cost_matching( + distance_metric, max_distance, tracks, detections, + track_indices_l, unmatched_detections) + matches += matches_l + unmatched_tracks = list(set(track_indices) - set(k for k, _ in matches)) + return matches, unmatched_tracks, unmatched_detections + + +def gate_cost_matrix( + kf, cost_matrix, tracks, detections, track_indices, detection_indices, + gated_cost=INFTY_COST, only_position=False): + """Invalidate infeasible entries in cost matrix based on the state + distributions obtained by Kalman filtering. + Parameters + ---------- + kf : The Kalman filter. + cost_matrix : ndarray + The NxM dimensional cost matrix, where N is the number of track indices + and M is the number of detection indices, such that entry (i, j) is the + association cost between `tracks[track_indices[i]]` and + `detections[detection_indices[j]]`. + tracks : List[track.Track] + A list of predicted tracks at the current time step. + detections : List[detection.Detection] + A list of detections at the current time step. + track_indices : List[int] + List of track indices that maps rows in `cost_matrix` to tracks in + `tracks` (see description above). + detection_indices : List[int] + List of detection indices that maps columns in `cost_matrix` to + detections in `detections` (see description above). + gated_cost : Optional[float] + Entries in the cost matrix corresponding to infeasible associations are + set this value. Defaults to a very large value. + only_position : Optional[bool] + If True, only the x, y position of the state distribution is considered + during gating. Defaults to False. + Returns + ------- + ndarray + Returns the modified cost matrix. + """ + gating_dim = 2 if only_position else 4 + gating_threshold = kalman_filter.chi2inv95[gating_dim] + measurements = np.asarray( + [detections[i].to_xyah() for i in detection_indices]) + for row, track_idx in enumerate(track_indices): + track = tracks[track_idx] + gating_distance = kf.gating_distance( + track.mean, track.covariance, measurements, only_position) + cost_matrix[row, gating_distance > gating_threshold] = gated_cost + return cost_matrix \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/yolox/deepsort_tracker/reid_model.py b/tracking/docker-build-context/byte_track/yolox/deepsort_tracker/reid_model.py new file mode 100644 index 0000000000000000000000000000000000000000..6aff8853f0859b16b33b178a3ada445f755a7027 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/deepsort_tracker/reid_model.py @@ -0,0 +1,145 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +import numpy as np +import cv2 +import logging +import torchvision.transforms as transforms + + +class BasicBlock(nn.Module): + def __init__(self, c_in, c_out, is_downsample=False): + super(BasicBlock, self).__init__() + self.is_downsample = is_downsample + if is_downsample: + self.conv1 = nn.Conv2d( + c_in, c_out, 3, stride=2, padding=1, bias=False) + else: + self.conv1 = nn.Conv2d( + c_in, c_out, 3, stride=1, padding=1, bias=False) + self.bn1 = nn.BatchNorm2d(c_out) + self.relu = nn.ReLU(True) + self.conv2 = nn.Conv2d(c_out, c_out, 3, stride=1, + padding=1, bias=False) + self.bn2 = nn.BatchNorm2d(c_out) + if is_downsample: + self.downsample = nn.Sequential( + nn.Conv2d(c_in, c_out, 1, stride=2, bias=False), + nn.BatchNorm2d(c_out) + ) + elif c_in != c_out: + self.downsample = nn.Sequential( + nn.Conv2d(c_in, c_out, 1, stride=1, bias=False), + nn.BatchNorm2d(c_out) + ) + self.is_downsample = True + + def forward(self, x): + y = self.conv1(x) + y = self.bn1(y) + y = self.relu(y) + y = self.conv2(y) + y = self.bn2(y) + if self.is_downsample: + x = self.downsample(x) + return F.relu(x.add(y), True) + + +def make_layers(c_in, c_out, repeat_times, is_downsample=False): + blocks = [] + for i in range(repeat_times): + if i == 0: + blocks += [BasicBlock(c_in, c_out, is_downsample=is_downsample), ] + else: + blocks += [BasicBlock(c_out, c_out), ] + return nn.Sequential(*blocks) + + +class Net(nn.Module): + def __init__(self, num_classes=751, reid=False): + super(Net, self).__init__() + # 3 128 64 + self.conv = nn.Sequential( + nn.Conv2d(3, 64, 3, stride=1, padding=1), + nn.BatchNorm2d(64), + nn.ReLU(inplace=True), + # nn.Conv2d(32,32,3,stride=1,padding=1), + # nn.BatchNorm2d(32), + # nn.ReLU(inplace=True), + nn.MaxPool2d(3, 2, padding=1), + ) + # 32 64 32 + self.layer1 = make_layers(64, 64, 2, False) + # 32 64 32 + self.layer2 = make_layers(64, 128, 2, True) + # 64 32 16 + self.layer3 = make_layers(128, 256, 2, True) + # 128 16 8 + self.layer4 = make_layers(256, 512, 2, True) + # 256 8 4 + self.avgpool = nn.AvgPool2d((8, 4), 1) + # 256 1 1 + self.reid = reid + self.classifier = nn.Sequential( + nn.Linear(512, 256), + nn.BatchNorm1d(256), + nn.ReLU(inplace=True), + nn.Dropout(), + nn.Linear(256, num_classes), + ) + + def forward(self, x): + x = self.conv(x) + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.layer4(x) + x = self.avgpool(x) + x = x.view(x.size(0), -1) + # B x 128 + if self.reid: + x = x.div(x.norm(p=2, dim=1, keepdim=True)) + return x + # classifier + x = self.classifier(x) + return x + + +class Extractor(object): + def __init__(self, model_path, use_cuda=True): + self.net = Net(reid=True) + self.device = "cuda" if torch.cuda.is_available() and use_cuda else "cpu" + state_dict = torch.load(model_path, map_location=torch.device(self.device))[ + 'net_dict'] + self.net.load_state_dict(state_dict) + logger = logging.getLogger("root.tracker") + logger.info("Loading weights from {}... Done!".format(model_path)) + self.net.to(self.device) + self.size = (64, 128) + self.norm = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), + ]) + + def _preprocess(self, im_crops): + """ + TODO: + 1. to float with scale from 0 to 1 + 2. resize to (64, 128) as Market1501 dataset did + 3. concatenate to a numpy array + 3. to torch Tensor + 4. normalize + """ + def _resize(im, size): + return cv2.resize(im.astype(np.float32)/255., size) + + im_batch = torch.cat([self.norm(_resize(im, self.size)).unsqueeze( + 0) for im in im_crops], dim=0).float() + return im_batch + + def __call__(self, im_crops): + im_batch = self._preprocess(im_crops) + with torch.no_grad(): + im_batch = im_batch.to(self.device) + features = self.net(im_batch) + return features.cpu().numpy() \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/yolox/deepsort_tracker/track.py b/tracking/docker-build-context/byte_track/yolox/deepsort_tracker/track.py new file mode 100644 index 0000000000000000000000000000000000000000..6867441e016e80224fda6ecf3e0c7e8072be4e57 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/deepsort_tracker/track.py @@ -0,0 +1,158 @@ +# vim: expandtab:ts=4:sw=4 + + +class TrackState: + """ + Enumeration type for the single target track state. Newly created tracks are + classified as `tentative` until enough evidence has been collected. Then, + the track state is changed to `confirmed`. Tracks that are no longer alive + are classified as `deleted` to mark them for removal from the set of active + tracks. + """ + + Tentative = 1 + Confirmed = 2 + Deleted = 3 + + +class Track: + """ + A single target track with state space `(x, y, a, h)` and associated + velocities, where `(x, y)` is the center of the bounding box, `a` is the + aspect ratio and `h` is the height. + Parameters + ---------- + mean : ndarray + Mean vector of the initial state distribution. + covariance : ndarray + Covariance matrix of the initial state distribution. + track_id : int + A unique track identifier. + n_init : int + Number of consecutive detections before the track is confirmed. The + track state is set to `Deleted` if a miss occurs within the first + `n_init` frames. + max_age : int + The maximum number of consecutive misses before the track state is + set to `Deleted`. + feature : Optional[ndarray] + Feature vector of the detection this track originates from. If not None, + this feature is added to the `features` cache. + Attributes + ---------- + mean : ndarray + Mean vector of the initial state distribution. + covariance : ndarray + Covariance matrix of the initial state distribution. + track_id : int + A unique track identifier. + hits : int + Total number of measurement updates. + age : int + Total number of frames since first occurance. + time_since_update : int + Total number of frames since last measurement update. + state : TrackState + The current track state. + features : List[ndarray] + A cache of features. On each measurement update, the associated feature + vector is added to this list. + """ + + def __init__(self, mean, covariance, track_id, class_id, n_init, max_age, + feature=None): + self.mean = mean + self.covariance = covariance + self.track_id = track_id + self.class_id = class_id + self.hits = 1 + self.age = 1 + self.time_since_update = 0 + + self.state = TrackState.Tentative + self.features = [] + if feature is not None: + self.features.append(feature) + + self._n_init = n_init + self._max_age = max_age + + def to_tlwh(self): + """Get current position in bounding box format `(top left x, top left y, + width, height)`. + Returns + ------- + ndarray + The bounding box. + """ + ret = self.mean[:4].copy() + ret[2] *= ret[3] + ret[:2] -= ret[2:] / 2 + return ret + + def to_tlbr(self): + """Get current position in bounding box format `(min x, miny, max x, + max y)`. + Returns + ------- + ndarray + The bounding box. + """ + ret = self.to_tlwh() + ret[2:] = ret[:2] + ret[2:] + return ret + + def increment_age(self): + self.age += 1 + self.time_since_update += 1 + + def predict(self, kf): + """Propagate the state distribution to the current time step using a + Kalman filter prediction step. + Parameters + ---------- + kf : kalman_filter.KalmanFilter + The Kalman filter. + """ + self.mean, self.covariance = kf.predict(self.mean, self.covariance) + self.increment_age() + + def update(self, kf, detection): + """Perform Kalman filter measurement update step and update the feature + cache. + Parameters + ---------- + kf : kalman_filter.KalmanFilter + The Kalman filter. + detection : Detection + The associated detection. + """ + self.mean, self.covariance = kf.update( + self.mean, self.covariance, detection.to_xyah()) + self.features.append(detection.feature) + + self.hits += 1 + self.time_since_update = 0 + if self.state == TrackState.Tentative and self.hits >= self._n_init: + self.state = TrackState.Confirmed + + def mark_missed(self): + """Mark this track as missed (no association at the current time step). + """ + if self.state == TrackState.Tentative: + self.state = TrackState.Deleted + elif self.time_since_update > self._max_age: + self.state = TrackState.Deleted + + def is_tentative(self): + """Returns True if this track is tentative (unconfirmed). + """ + return self.state == TrackState.Tentative + + def is_confirmed(self): + """Returns True if this track is confirmed.""" + return self.state == TrackState.Confirmed + + def is_deleted(self): + """Returns True if this track is dead and should be deleted.""" + return self.state == TrackState.Deleted \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/yolox/evaluators/__init__.py b/tracking/docker-build-context/byte_track/yolox/evaluators/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..5d704e05c79409fb053be1a8f8ce4676a015b054 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/evaluators/__init__.py @@ -0,0 +1,6 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) Megvii, Inc. and its affiliates. + +from .coco_evaluator import COCOEvaluator +from .mot_evaluator import MOTEvaluator diff --git a/tracking/docker-build-context/byte_track/yolox/evaluators/coco_evaluator.py b/tracking/docker-build-context/byte_track/yolox/evaluators/coco_evaluator.py new file mode 100644 index 0000000000000000000000000000000000000000..24dce235307cfe52062da31b0e06506b77b32b36 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/evaluators/coco_evaluator.py @@ -0,0 +1,224 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) Megvii, Inc. and its affiliates. + +from loguru import logger +from tqdm import tqdm + +import torch + +from yolox.utils import ( + gather, + is_main_process, + postprocess, + synchronize, + time_synchronized, + xyxy2xywh +) + +import contextlib +import io +import itertools +import json +import tempfile +import time + + +class COCOEvaluator: + """ + COCO AP Evaluation class. All the data in the val2017 dataset are processed + and evaluated by COCO API. + """ + + def __init__( + self, dataloader, img_size, confthre, nmsthre, num_classes, testdev=False + ): + """ + Args: + dataloader (Dataloader): evaluate dataloader. + img_size (int): image size after preprocess. images are resized + to squares whose shape is (img_size, img_size). + confthre (float): confidence threshold ranging from 0 to 1, which + is defined in the config file. + nmsthre (float): IoU threshold of non-max supression ranging from 0 to 1. + """ + self.dataloader = dataloader + self.img_size = img_size + self.confthre = confthre + self.nmsthre = nmsthre + self.num_classes = num_classes + self.testdev = testdev + + def evaluate( + self, + model, + distributed=False, + half=False, + trt_file=None, + decoder=None, + test_size=None, + ): + """ + COCO average precision (AP) Evaluation. Iterate inference on the test dataset + and the results are evaluated by COCO API. + + NOTE: This function will change training mode to False, please save states if needed. + + Args: + model : model to evaluate. + + Returns: + ap50_95 (float) : COCO AP of IoU=50:95 + ap50 (float) : COCO AP of IoU=50 + summary (sr): summary info of evaluation. + """ + # TODO half to amp_test + tensor_type = torch.cuda.HalfTensor if half else torch.cuda.FloatTensor + model = model.eval() + if half: + model = model.half() + ids = [] + data_list = [] + progress_bar = tqdm if is_main_process() else iter + + inference_time = 0 + nms_time = 0 + n_samples = len(self.dataloader) - 1 + + if trt_file is not None: + from torch2trt import TRTModule + + model_trt = TRTModule() + model_trt.load_state_dict(torch.load(trt_file)) + + x = torch.ones(1, 3, test_size[0], test_size[1]).cuda() + model(x) + model = model_trt + + for cur_iter, (imgs, _, info_imgs, ids) in enumerate( + progress_bar(self.dataloader) + ): + with torch.no_grad(): + imgs = imgs.type(tensor_type) + + # skip the the last iters since batchsize might be not enough for batch inference + is_time_record = cur_iter < len(self.dataloader) - 1 + if is_time_record: + start = time.time() + + outputs = model(imgs) + if decoder is not None: + outputs = decoder(outputs, dtype=outputs.type()) + + if is_time_record: + infer_end = time_synchronized() + inference_time += infer_end - start + + outputs = postprocess( + outputs, self.num_classes, self.confthre, self.nmsthre + ) + if is_time_record: + nms_end = time_synchronized() + nms_time += nms_end - infer_end + + data_list.extend(self.convert_to_coco_format(outputs, info_imgs, ids)) + + statistics = torch.cuda.FloatTensor([inference_time, nms_time, n_samples]) + if distributed: + data_list = gather(data_list, dst=0) + data_list = list(itertools.chain(*data_list)) + torch.distributed.reduce(statistics, dst=0) + + eval_results = self.evaluate_prediction(data_list, statistics) + synchronize() + return eval_results + + def convert_to_coco_format(self, outputs, info_imgs, ids): + data_list = [] + for (output, img_h, img_w, img_id) in zip( + outputs, info_imgs[0], info_imgs[1], ids + ): + if output is None: + continue + output = output.cpu() + + bboxes = output[:, 0:4] + + # preprocessing: resize + scale = min( + self.img_size[0] / float(img_h), self.img_size[1] / float(img_w) + ) + bboxes /= scale + bboxes = xyxy2xywh(bboxes) + + cls = output[:, 6] + scores = output[:, 4] * output[:, 5] + for ind in range(bboxes.shape[0]): + label = self.dataloader.dataset.class_ids[int(cls[ind])] + pred_data = { + "image_id": int(img_id), + "category_id": label, + "bbox": bboxes[ind].numpy().tolist(), + "score": scores[ind].numpy().item(), + "segmentation": [], + } # COCO json format + data_list.append(pred_data) + return data_list + + def evaluate_prediction(self, data_dict, statistics): + if not is_main_process(): + return 0, 0, None + + logger.info("Evaluate in main process...") + + annType = ["segm", "bbox", "keypoints"] + + inference_time = statistics[0].item() + nms_time = statistics[1].item() + n_samples = statistics[2].item() + + a_infer_time = 1000 * inference_time / (n_samples * self.dataloader.batch_size) + a_nms_time = 1000 * nms_time / (n_samples * self.dataloader.batch_size) + + time_info = ", ".join( + [ + "Average {} time: {:.2f} ms".format(k, v) + for k, v in zip( + ["forward", "NMS", "inference"], + [a_infer_time, a_nms_time, (a_infer_time + a_nms_time)], + ) + ] + ) + + info = time_info + "\n" + + # Evaluate the Dt (detection) json comparing with the ground truth + if len(data_dict) > 0: + cocoGt = self.dataloader.dataset.coco + # TODO: since pycocotools can't process dict in py36, write data to json file. + if self.testdev: + json.dump(data_dict, open("./yolox_testdev_2017.json", "w")) + cocoDt = cocoGt.loadRes("./yolox_testdev_2017.json") + else: + _, tmp = tempfile.mkstemp() + json.dump(data_dict, open(tmp, "w")) + cocoDt = cocoGt.loadRes(tmp) + ''' + try: + from yolox.layers import COCOeval_opt as COCOeval + except ImportError: + from pycocotools import cocoeval as COCOeval + logger.warning("Use standard COCOeval.") + ''' + #from pycocotools.cocoeval import COCOeval + from yolox.layers import COCOeval_opt as COCOeval + cocoEval = COCOeval(cocoGt, cocoDt, annType[1]) + cocoEval.evaluate() + cocoEval.accumulate() + redirect_string = io.StringIO() + with contextlib.redirect_stdout(redirect_string): + cocoEval.summarize() + info += redirect_string.getvalue() + return cocoEval.stats[0], cocoEval.stats[1], info + else: + return 0, 0, info diff --git a/tracking/docker-build-context/byte_track/yolox/evaluators/evaluation.py b/tracking/docker-build-context/byte_track/yolox/evaluators/evaluation.py new file mode 100644 index 0000000000000000000000000000000000000000..fd72f82adf4bacc73b564a855ce10082d89f76af --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/evaluators/evaluation.py @@ -0,0 +1,200 @@ +import os +import numpy as np +import copy +import motmetrics as mm +mm.lap.default_solver = 'lap' + + +class Evaluator(object): + + def __init__(self, data_root, seq_name, data_type): + self.data_root = data_root + self.seq_name = seq_name + self.data_type = data_type + + self.load_annotations() + self.reset_accumulator() + + def load_annotations(self): + assert self.data_type == 'mot' + + gt_filename = os.path.join(self.data_root, self.seq_name, 'gt', 'gt.txt') + self.gt_frame_dict = read_results(gt_filename, self.data_type, is_gt=True) + self.gt_ignore_frame_dict = read_results(gt_filename, self.data_type, is_ignore=True) + + def reset_accumulator(self): + self.acc = mm.MOTAccumulator(auto_id=True) + + def eval_frame(self, frame_id, trk_tlwhs, trk_ids, rtn_events=False): + # results + trk_tlwhs = np.copy(trk_tlwhs) + trk_ids = np.copy(trk_ids) + + # gts + gt_objs = self.gt_frame_dict.get(frame_id, []) + gt_tlwhs, gt_ids = unzip_objs(gt_objs)[:2] + + # ignore boxes + ignore_objs = self.gt_ignore_frame_dict.get(frame_id, []) + ignore_tlwhs = unzip_objs(ignore_objs)[0] + + # remove ignored results + keep = np.ones(len(trk_tlwhs), dtype=bool) + iou_distance = mm.distances.iou_matrix(ignore_tlwhs, trk_tlwhs, max_iou=0.5) + if len(iou_distance) > 0: + match_is, match_js = mm.lap.linear_sum_assignment(iou_distance) + match_is, match_js = map(lambda a: np.asarray(a, dtype=int), [match_is, match_js]) + match_ious = iou_distance[match_is, match_js] + + match_js = np.asarray(match_js, dtype=int) + match_js = match_js[np.logical_not(np.isnan(match_ious))] + keep[match_js] = False + trk_tlwhs = trk_tlwhs[keep] + trk_ids = trk_ids[keep] + #match_is, match_js = mm.lap.linear_sum_assignment(iou_distance) + #match_is, match_js = map(lambda a: np.asarray(a, dtype=int), [match_is, match_js]) + #match_ious = iou_distance[match_is, match_js] + + #match_js = np.asarray(match_js, dtype=int) + #match_js = match_js[np.logical_not(np.isnan(match_ious))] + #keep[match_js] = False + #trk_tlwhs = trk_tlwhs[keep] + #trk_ids = trk_ids[keep] + + # get distance matrix + iou_distance = mm.distances.iou_matrix(gt_tlwhs, trk_tlwhs, max_iou=0.5) + + # acc + self.acc.update(gt_ids, trk_ids, iou_distance) + + if rtn_events and iou_distance.size > 0 and hasattr(self.acc, 'last_mot_events'): + events = self.acc.last_mot_events # only supported by https://github.com/longcw/py-motmetrics + else: + events = None + return events + + def eval_file(self, filename): + self.reset_accumulator() + + result_frame_dict = read_results(filename, self.data_type, is_gt=False) + #frames = sorted(list(set(self.gt_frame_dict.keys()) | set(result_frame_dict.keys()))) + frames = sorted(list(set(result_frame_dict.keys()))) + for frame_id in frames: + trk_objs = result_frame_dict.get(frame_id, []) + trk_tlwhs, trk_ids = unzip_objs(trk_objs)[:2] + self.eval_frame(frame_id, trk_tlwhs, trk_ids, rtn_events=False) + + return self.acc + + @staticmethod + def get_summary(accs, names, metrics=('mota', 'num_switches', 'idp', 'idr', 'idf1', 'precision', 'recall')): + names = copy.deepcopy(names) + if metrics is None: + metrics = mm.metrics.motchallenge_metrics + metrics = copy.deepcopy(metrics) + + mh = mm.metrics.create() + summary = mh.compute_many( + accs, + metrics=metrics, + names=names, + generate_overall=True + ) + + return summary + + @staticmethod + def save_summary(summary, filename): + import pandas as pd + writer = pd.ExcelWriter(filename) + summary.to_excel(writer) + writer.save() + + + + + +def read_results(filename, data_type: str, is_gt=False, is_ignore=False): + if data_type in ('mot', 'lab'): + read_fun = read_mot_results + else: + raise ValueError('Unknown data type: {}'.format(data_type)) + + return read_fun(filename, is_gt, is_ignore) + + +""" +labels={'ped', ... % 1 +'person_on_vhcl', ... % 2 +'car', ... % 3 +'bicycle', ... % 4 +'mbike', ... % 5 +'non_mot_vhcl', ... % 6 +'static_person', ... % 7 +'distractor', ... % 8 +'occluder', ... % 9 +'occluder_on_grnd', ... %10 +'occluder_full', ... % 11 +'reflection', ... % 12 +'crowd' ... % 13 +}; +""" + + +def read_mot_results(filename, is_gt, is_ignore): + valid_labels = {1} + ignore_labels = {2, 7, 8, 12} + results_dict = dict() + if os.path.isfile(filename): + with open(filename, 'r') as f: + for line in f.readlines(): + linelist = line.split(',') + if len(linelist) < 7: + continue + fid = int(linelist[0]) + if fid < 1: + continue + results_dict.setdefault(fid, list()) + + box_size = float(linelist[4]) * float(linelist[5]) + + if is_gt: + if 'MOT16-' in filename or 'MOT17-' in filename: + label = int(float(linelist[7])) + mark = int(float(linelist[6])) + if mark == 0 or label not in valid_labels: + continue + score = 1 + elif is_ignore: + if 'MOT16-' in filename or 'MOT17-' in filename: + label = int(float(linelist[7])) + vis_ratio = float(linelist[8]) + if label not in ignore_labels and vis_ratio >= 0: + continue + else: + continue + score = 1 + else: + score = float(linelist[6]) + + #if box_size > 7000: + #if box_size <= 7000 or box_size >= 15000: + #if box_size < 15000: + #continue + + tlwh = tuple(map(float, linelist[2:6])) + target_id = int(linelist[1]) + + results_dict[fid].append((tlwh, target_id, score)) + + return results_dict + + +def unzip_objs(objs): + if len(objs) > 0: + tlwhs, ids, scores = zip(*objs) + else: + tlwhs, ids, scores = [], [], [] + tlwhs = np.asarray(tlwhs, dtype=float).reshape(-1, 4) + + return tlwhs, ids, scores \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/yolox/evaluators/mot_evaluator.py b/tracking/docker-build-context/byte_track/yolox/evaluators/mot_evaluator.py new file mode 100644 index 0000000000000000000000000000000000000000..becec47deadf7fd8345b477df9bac151bab7241d --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/evaluators/mot_evaluator.py @@ -0,0 +1,679 @@ +from collections import defaultdict +from loguru import logger +from tqdm import tqdm + +import torch + +from yolox.utils import ( + gather, + is_main_process, + postprocess, + synchronize, + time_synchronized, + xyxy2xywh +) +from yolox.tracker.byte_tracker import BYTETracker +from yolox.sort_tracker.sort import Sort +from yolox.deepsort_tracker.deepsort import DeepSort +from yolox.motdt_tracker.motdt_tracker import OnlineTracker + +import contextlib +import io +import os +import itertools +import json +import tempfile +import time + + +def write_results(filename, results): + save_format = '{frame},{id},{x1},{y1},{w},{h},{s},-1,-1,-1\n' + with open(filename, 'w') as f: + for frame_id, tlwhs, track_ids, scores in results: + for tlwh, track_id, score in zip(tlwhs, track_ids, scores): + if track_id < 0: + continue + x1, y1, w, h = tlwh + line = save_format.format(frame=frame_id, id=track_id, x1=round(x1, 1), y1=round(y1, 1), w=round(w, 1), h=round(h, 1), s=round(score, 2)) + f.write(line) + logger.info('save results to {}'.format(filename)) + + +def write_results_no_score(filename, results): + save_format = '{frame},{id},{x1},{y1},{w},{h},-1,-1,-1,-1\n' + with open(filename, 'w') as f: + for frame_id, tlwhs, track_ids in results: + for tlwh, track_id in zip(tlwhs, track_ids): + if track_id < 0: + continue + x1, y1, w, h = tlwh + line = save_format.format(frame=frame_id, id=track_id, x1=round(x1, 1), y1=round(y1, 1), w=round(w, 1), h=round(h, 1)) + f.write(line) + logger.info('save results to {}'.format(filename)) + + +class MOTEvaluator: + """ + COCO AP Evaluation class. All the data in the val2017 dataset are processed + and evaluated by COCO API. + """ + + def __init__( + self, args, dataloader, img_size, confthre, nmsthre, num_classes): + """ + Args: + dataloader (Dataloader): evaluate dataloader. + img_size (int): image size after preprocess. images are resized + to squares whose shape is (img_size, img_size). + confthre (float): confidence threshold ranging from 0 to 1, which + is defined in the config file. + nmsthre (float): IoU threshold of non-max supression ranging from 0 to 1. + """ + self.dataloader = dataloader + self.img_size = img_size + self.confthre = confthre + self.nmsthre = nmsthre + self.num_classes = num_classes + self.args = args + + def evaluate( + self, + model, + distributed=False, + half=False, + trt_file=None, + decoder=None, + test_size=None, + result_folder=None + ): + """ + COCO average precision (AP) Evaluation. Iterate inference on the test dataset + and the results are evaluated by COCO API. + + NOTE: This function will change training mode to False, please save states if needed. + + Args: + model : model to evaluate. + + Returns: + ap50_95 (float) : COCO AP of IoU=50:95 + ap50 (float) : COCO AP of IoU=50 + summary (sr): summary info of evaluation. + """ + # TODO half to amp_test + tensor_type = torch.cuda.HalfTensor if half else torch.cuda.FloatTensor + model = model.eval() + if half: + model = model.half() + ids = [] + data_list = [] + results = [] + video_names = defaultdict() + progress_bar = tqdm if is_main_process() else iter + + inference_time = 0 + track_time = 0 + n_samples = len(self.dataloader) - 1 + + if trt_file is not None: + from torch2trt import TRTModule + + model_trt = TRTModule() + model_trt.load_state_dict(torch.load(trt_file)) + + x = torch.ones(1, 3, test_size[0], test_size[1]).cuda() + model(x) + model = model_trt + + tracker = BYTETracker(self.args) + ori_thresh = self.args.track_thresh + for cur_iter, (imgs, _, info_imgs, ids) in enumerate( + progress_bar(self.dataloader) + ): + with torch.no_grad(): + # init tracker + frame_id = info_imgs[2].item() + video_id = info_imgs[3].item() + img_file_name = info_imgs[4] + video_name = img_file_name[0].split('/')[0] + if video_name == 'MOT17-05-FRCNN' or video_name == 'MOT17-06-FRCNN': + self.args.track_buffer = 14 + elif video_name == 'MOT17-13-FRCNN' or video_name == 'MOT17-14-FRCNN': + self.args.track_buffer = 25 + else: + self.args.track_buffer = 30 + + if video_name == 'MOT17-01-FRCNN': + self.args.track_thresh = 0.65 + elif video_name == 'MOT17-06-FRCNN': + self.args.track_thresh = 0.65 + elif video_name == 'MOT17-12-FRCNN': + self.args.track_thresh = 0.7 + elif video_name == 'MOT17-14-FRCNN': + self.args.track_thresh = 0.67 + else: + self.args.track_thresh = ori_thresh + + if video_name == 'MOT20-06' or video_name == 'MOT20-08': + self.args.track_thresh = 0.3 + else: + self.args.track_thresh = ori_thresh + + if video_name not in video_names: + video_names[video_id] = video_name + if frame_id == 1: + tracker = BYTETracker(self.args) + if len(results) != 0: + result_filename = os.path.join(result_folder, '{}.txt'.format(video_names[video_id - 1])) + write_results(result_filename, results) + results = [] + + imgs = imgs.type(tensor_type) + + # skip the the last iters since batchsize might be not enough for batch inference + is_time_record = cur_iter < len(self.dataloader) - 1 + if is_time_record: + start = time.time() + + outputs = model(imgs) + if decoder is not None: + outputs = decoder(outputs, dtype=outputs.type()) + + outputs = postprocess(outputs, self.num_classes, self.confthre, self.nmsthre) + + if is_time_record: + infer_end = time_synchronized() + inference_time += infer_end - start + + output_results = self.convert_to_coco_format(outputs, info_imgs, ids) + data_list.extend(output_results) + + # run tracking + if outputs[0] is not None: + online_targets = tracker.update(outputs[0], info_imgs, self.img_size) + online_tlwhs = [] + online_ids = [] + online_scores = [] + for t in online_targets: + tlwh = t.tlwh + tid = t.track_id + vertical = tlwh[2] / tlwh[3] > 1.6 + if tlwh[2] * tlwh[3] > self.args.min_box_area and not vertical: + online_tlwhs.append(tlwh) + online_ids.append(tid) + online_scores.append(t.score) + # save results + results.append((frame_id, online_tlwhs, online_ids, online_scores)) + + if is_time_record: + track_end = time_synchronized() + track_time += track_end - infer_end + + if cur_iter == len(self.dataloader) - 1: + result_filename = os.path.join(result_folder, '{}.txt'.format(video_names[video_id])) + write_results(result_filename, results) + + statistics = torch.cuda.FloatTensor([inference_time, track_time, n_samples]) + if distributed: + data_list = gather(data_list, dst=0) + data_list = list(itertools.chain(*data_list)) + torch.distributed.reduce(statistics, dst=0) + + eval_results = self.evaluate_prediction(data_list, statistics) + synchronize() + return eval_results + + def evaluate_sort( + self, + model, + distributed=False, + half=False, + trt_file=None, + decoder=None, + test_size=None, + result_folder=None + ): + """ + COCO average precision (AP) Evaluation. Iterate inference on the test dataset + and the results are evaluated by COCO API. + + NOTE: This function will change training mode to False, please save states if needed. + + Args: + model : model to evaluate. + + Returns: + ap50_95 (float) : COCO AP of IoU=50:95 + ap50 (float) : COCO AP of IoU=50 + summary (sr): summary info of evaluation. + """ + # TODO half to amp_test + tensor_type = torch.cuda.HalfTensor if half else torch.cuda.FloatTensor + model = model.eval() + if half: + model = model.half() + ids = [] + data_list = [] + results = [] + video_names = defaultdict() + progress_bar = tqdm if is_main_process() else iter + + inference_time = 0 + track_time = 0 + n_samples = len(self.dataloader) - 1 + + if trt_file is not None: + from torch2trt import TRTModule + + model_trt = TRTModule() + model_trt.load_state_dict(torch.load(trt_file)) + + x = torch.ones(1, 3, test_size[0], test_size[1]).cuda() + model(x) + model = model_trt + + tracker = Sort(self.args.track_thresh) + + for cur_iter, (imgs, _, info_imgs, ids) in enumerate( + progress_bar(self.dataloader) + ): + with torch.no_grad(): + # init tracker + frame_id = info_imgs[2].item() + video_id = info_imgs[3].item() + img_file_name = info_imgs[4] + video_name = img_file_name[0].split('/')[0] + + if video_name not in video_names: + video_names[video_id] = video_name + if frame_id == 1: + tracker = Sort(self.args.track_thresh) + if len(results) != 0: + result_filename = os.path.join(result_folder, '{}.txt'.format(video_names[video_id - 1])) + write_results_no_score(result_filename, results) + results = [] + + imgs = imgs.type(tensor_type) + + # skip the the last iters since batchsize might be not enough for batch inference + is_time_record = cur_iter < len(self.dataloader) - 1 + if is_time_record: + start = time.time() + + outputs = model(imgs) + if decoder is not None: + outputs = decoder(outputs, dtype=outputs.type()) + + outputs = postprocess(outputs, self.num_classes, self.confthre, self.nmsthre) + + if is_time_record: + infer_end = time_synchronized() + inference_time += infer_end - start + + output_results = self.convert_to_coco_format(outputs, info_imgs, ids) + data_list.extend(output_results) + + # run tracking + online_targets = tracker.update(outputs[0], info_imgs, self.img_size) + online_tlwhs = [] + online_ids = [] + for t in online_targets: + tlwh = [t[0], t[1], t[2] - t[0], t[3] - t[1]] + tid = t[4] + vertical = tlwh[2] / tlwh[3] > 1.6 + if tlwh[2] * tlwh[3] > self.args.min_box_area and not vertical: + online_tlwhs.append(tlwh) + online_ids.append(tid) + # save results + results.append((frame_id, online_tlwhs, online_ids)) + + if is_time_record: + track_end = time_synchronized() + track_time += track_end - infer_end + + if cur_iter == len(self.dataloader) - 1: + result_filename = os.path.join(result_folder, '{}.txt'.format(video_names[video_id])) + write_results_no_score(result_filename, results) + + statistics = torch.cuda.FloatTensor([inference_time, track_time, n_samples]) + if distributed: + data_list = gather(data_list, dst=0) + data_list = list(itertools.chain(*data_list)) + torch.distributed.reduce(statistics, dst=0) + + eval_results = self.evaluate_prediction(data_list, statistics) + synchronize() + return eval_results + + def evaluate_deepsort( + self, + model, + distributed=False, + half=False, + trt_file=None, + decoder=None, + test_size=None, + result_folder=None, + model_folder=None + ): + """ + COCO average precision (AP) Evaluation. Iterate inference on the test dataset + and the results are evaluated by COCO API. + + NOTE: This function will change training mode to False, please save states if needed. + + Args: + model : model to evaluate. + + Returns: + ap50_95 (float) : COCO AP of IoU=50:95 + ap50 (float) : COCO AP of IoU=50 + summary (sr): summary info of evaluation. + """ + # TODO half to amp_test + tensor_type = torch.cuda.HalfTensor if half else torch.cuda.FloatTensor + model = model.eval() + if half: + model = model.half() + ids = [] + data_list = [] + results = [] + video_names = defaultdict() + progress_bar = tqdm if is_main_process() else iter + + inference_time = 0 + track_time = 0 + n_samples = len(self.dataloader) - 1 + + if trt_file is not None: + from torch2trt import TRTModule + + model_trt = TRTModule() + model_trt.load_state_dict(torch.load(trt_file)) + + x = torch.ones(1, 3, test_size[0], test_size[1]).cuda() + model(x) + model = model_trt + + tracker = DeepSort(model_folder, min_confidence=self.args.track_thresh) + + for cur_iter, (imgs, _, info_imgs, ids) in enumerate( + progress_bar(self.dataloader) + ): + with torch.no_grad(): + # init tracker + frame_id = info_imgs[2].item() + video_id = info_imgs[3].item() + img_file_name = info_imgs[4] + video_name = img_file_name[0].split('/')[0] + + if video_name not in video_names: + video_names[video_id] = video_name + if frame_id == 1: + tracker = DeepSort(model_folder, min_confidence=self.args.track_thresh) + if len(results) != 0: + result_filename = os.path.join(result_folder, '{}.txt'.format(video_names[video_id - 1])) + write_results_no_score(result_filename, results) + results = [] + + imgs = imgs.type(tensor_type) + + # skip the the last iters since batchsize might be not enough for batch inference + is_time_record = cur_iter < len(self.dataloader) - 1 + if is_time_record: + start = time.time() + + outputs = model(imgs) + if decoder is not None: + outputs = decoder(outputs, dtype=outputs.type()) + + outputs = postprocess(outputs, self.num_classes, self.confthre, self.nmsthre) + + if is_time_record: + infer_end = time_synchronized() + inference_time += infer_end - start + + output_results = self.convert_to_coco_format(outputs, info_imgs, ids) + data_list.extend(output_results) + + # run tracking + online_targets = tracker.update(outputs[0], info_imgs, self.img_size, img_file_name[0]) + online_tlwhs = [] + online_ids = [] + for t in online_targets: + tlwh = [t[0], t[1], t[2] - t[0], t[3] - t[1]] + tid = t[4] + vertical = tlwh[2] / tlwh[3] > 1.6 + if tlwh[2] * tlwh[3] > self.args.min_box_area and not vertical: + online_tlwhs.append(tlwh) + online_ids.append(tid) + # save results + results.append((frame_id, online_tlwhs, online_ids)) + + if is_time_record: + track_end = time_synchronized() + track_time += track_end - infer_end + + if cur_iter == len(self.dataloader) - 1: + result_filename = os.path.join(result_folder, '{}.txt'.format(video_names[video_id])) + write_results_no_score(result_filename, results) + + statistics = torch.cuda.FloatTensor([inference_time, track_time, n_samples]) + if distributed: + data_list = gather(data_list, dst=0) + data_list = list(itertools.chain(*data_list)) + torch.distributed.reduce(statistics, dst=0) + + eval_results = self.evaluate_prediction(data_list, statistics) + synchronize() + return eval_results + + def evaluate_motdt( + self, + model, + distributed=False, + half=False, + trt_file=None, + decoder=None, + test_size=None, + result_folder=None, + model_folder=None + ): + """ + COCO average precision (AP) Evaluation. Iterate inference on the test dataset + and the results are evaluated by COCO API. + + NOTE: This function will change training mode to False, please save states if needed. + + Args: + model : model to evaluate. + + Returns: + ap50_95 (float) : COCO AP of IoU=50:95 + ap50 (float) : COCO AP of IoU=50 + summary (sr): summary info of evaluation. + """ + # TODO half to amp_test + tensor_type = torch.cuda.HalfTensor if half else torch.cuda.FloatTensor + model = model.eval() + if half: + model = model.half() + ids = [] + data_list = [] + results = [] + video_names = defaultdict() + progress_bar = tqdm if is_main_process() else iter + + inference_time = 0 + track_time = 0 + n_samples = len(self.dataloader) - 1 + + if trt_file is not None: + from torch2trt import TRTModule + + model_trt = TRTModule() + model_trt.load_state_dict(torch.load(trt_file)) + + x = torch.ones(1, 3, test_size[0], test_size[1]).cuda() + model(x) + model = model_trt + + tracker = OnlineTracker(model_folder, min_cls_score=self.args.track_thresh) + for cur_iter, (imgs, _, info_imgs, ids) in enumerate( + progress_bar(self.dataloader) + ): + with torch.no_grad(): + # init tracker + frame_id = info_imgs[2].item() + video_id = info_imgs[3].item() + img_file_name = info_imgs[4] + video_name = img_file_name[0].split('/')[0] + + if video_name not in video_names: + video_names[video_id] = video_name + if frame_id == 1: + tracker = OnlineTracker(model_folder, min_cls_score=self.args.track_thresh) + if len(results) != 0: + result_filename = os.path.join(result_folder, '{}.txt'.format(video_names[video_id - 1])) + write_results(result_filename, results) + results = [] + + imgs = imgs.type(tensor_type) + + # skip the the last iters since batchsize might be not enough for batch inference + is_time_record = cur_iter < len(self.dataloader) - 1 + if is_time_record: + start = time.time() + + outputs = model(imgs) + if decoder is not None: + outputs = decoder(outputs, dtype=outputs.type()) + + outputs = postprocess(outputs, self.num_classes, self.confthre, self.nmsthre) + + if is_time_record: + infer_end = time_synchronized() + inference_time += infer_end - start + + output_results = self.convert_to_coco_format(outputs, info_imgs, ids) + data_list.extend(output_results) + + # run tracking + online_targets = tracker.update(outputs[0], info_imgs, self.img_size, img_file_name[0]) + online_tlwhs = [] + online_ids = [] + online_scores = [] + for t in online_targets: + tlwh = t.tlwh + tid = t.track_id + vertical = tlwh[2] / tlwh[3] > 1.6 + if tlwh[2] * tlwh[3] > self.args.min_box_area and not vertical: + online_tlwhs.append(tlwh) + online_ids.append(tid) + online_scores.append(t.score) + # save results + results.append((frame_id, online_tlwhs, online_ids, online_scores)) + + if is_time_record: + track_end = time_synchronized() + track_time += track_end - infer_end + + if cur_iter == len(self.dataloader) - 1: + result_filename = os.path.join(result_folder, '{}.txt'.format(video_names[video_id])) + write_results(result_filename, results) + + statistics = torch.cuda.FloatTensor([inference_time, track_time, n_samples]) + if distributed: + data_list = gather(data_list, dst=0) + data_list = list(itertools.chain(*data_list)) + torch.distributed.reduce(statistics, dst=0) + + eval_results = self.evaluate_prediction(data_list, statistics) + synchronize() + return eval_results + + def convert_to_coco_format(self, outputs, info_imgs, ids): + data_list = [] + for (output, img_h, img_w, img_id) in zip( + outputs, info_imgs[0], info_imgs[1], ids + ): + if output is None: + continue + output = output.cpu() + + bboxes = output[:, 0:4] + + # preprocessing: resize + scale = min( + self.img_size[0] / float(img_h), self.img_size[1] / float(img_w) + ) + bboxes /= scale + bboxes = xyxy2xywh(bboxes) + + cls = output[:, 6] + scores = output[:, 4] * output[:, 5] + for ind in range(bboxes.shape[0]): + label = self.dataloader.dataset.class_ids[int(cls[ind])] + pred_data = { + "image_id": int(img_id), + "category_id": label, + "bbox": bboxes[ind].numpy().tolist(), + "score": scores[ind].numpy().item(), + "segmentation": [], + } # COCO json format + data_list.append(pred_data) + return data_list + + def evaluate_prediction(self, data_dict, statistics): + if not is_main_process(): + return 0, 0, None + + logger.info("Evaluate in main process...") + + annType = ["segm", "bbox", "keypoints"] + + inference_time = statistics[0].item() + track_time = statistics[1].item() + n_samples = statistics[2].item() + + a_infer_time = 1000 * inference_time / (n_samples * self.dataloader.batch_size) + a_track_time = 1000 * track_time / (n_samples * self.dataloader.batch_size) + + time_info = ", ".join( + [ + "Average {} time: {:.2f} ms".format(k, v) + for k, v in zip( + ["forward", "track", "inference"], + [a_infer_time, a_track_time, (a_infer_time + a_track_time)], + ) + ] + ) + + info = time_info + "\n" + + # Evaluate the Dt (detection) json comparing with the ground truth + if len(data_dict) > 0: + cocoGt = self.dataloader.dataset.coco + # TODO: since pycocotools can't process dict in py36, write data to json file. + _, tmp = tempfile.mkstemp() + json.dump(data_dict, open(tmp, "w")) + cocoDt = cocoGt.loadRes(tmp) + ''' + try: + from yolox.layers import COCOeval_opt as COCOeval + except ImportError: + from pycocotools import cocoeval as COCOeval + logger.warning("Use standard COCOeval.") + ''' + #from pycocotools.cocoeval import COCOeval + from yolox.layers import COCOeval_opt as COCOeval + cocoEval = COCOeval(cocoGt, cocoDt, annType[1]) + cocoEval.evaluate() + cocoEval.accumulate() + redirect_string = io.StringIO() + with contextlib.redirect_stdout(redirect_string): + cocoEval.summarize() + info += redirect_string.getvalue() + return cocoEval.stats[0], cocoEval.stats[1], info + else: + return 0, 0, info diff --git a/tracking/docker-build-context/byte_track/yolox/exp/__init__.py b/tracking/docker-build-context/byte_track/yolox/exp/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..951195cb905195145ac10a6b9aefd84f9d9c3b03 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/exp/__init__.py @@ -0,0 +1,7 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +from .base_exp import BaseExp +from .build import get_exp +from .yolox_base import Exp diff --git a/tracking/docker-build-context/byte_track/yolox/exp/base_exp.py b/tracking/docker-build-context/byte_track/yolox/exp/base_exp.py new file mode 100644 index 0000000000000000000000000000000000000000..c4aed89f55ad742cf32d1ba6c7bd7363609a222e --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/exp/base_exp.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +import torch +from torch.nn import Module + +from yolox.utils import LRScheduler + +import ast +import pprint +from abc import ABCMeta, abstractmethod +from tabulate import tabulate +from typing import Dict + + +class BaseExp(metaclass=ABCMeta): + """Basic class for any experiment.""" + + def __init__(self): + self.seed = None + self.output_dir = "./YOLOX_outputs" + self.print_interval = 100 + self.eval_interval = 10 + + @abstractmethod + def get_model(self) -> Module: + pass + + @abstractmethod + def get_data_loader( + self, batch_size: int, is_distributed: bool + ) -> Dict[str, torch.utils.data.DataLoader]: + pass + + @abstractmethod + def get_optimizer(self, batch_size: int) -> torch.optim.Optimizer: + pass + + @abstractmethod + def get_lr_scheduler( + self, lr: float, iters_per_epoch: int, **kwargs + ) -> LRScheduler: + pass + + @abstractmethod + def get_evaluator(self): + pass + + @abstractmethod + def eval(self, model, evaluator, weights): + pass + + def __repr__(self): + table_header = ["keys", "values"] + exp_table = [ + (str(k), pprint.pformat(v)) + for k, v in vars(self).items() + if not k.startswith("_") + ] + return tabulate(exp_table, headers=table_header, tablefmt="fancy_grid") + + def merge(self, cfg_list): + assert len(cfg_list) % 2 == 0 + for k, v in zip(cfg_list[0::2], cfg_list[1::2]): + # only update value with same key + if hasattr(self, k): + src_value = getattr(self, k) + src_type = type(src_value) + if src_value is not None and src_type != type(v): + try: + v = src_type(v) + except Exception: + v = ast.literal_eval(v) + setattr(self, k, v) diff --git a/tracking/docker-build-context/byte_track/yolox/exp/build.py b/tracking/docker-build-context/byte_track/yolox/exp/build.py new file mode 100644 index 0000000000000000000000000000000000000000..411b09cdc6b65dd4f9e4bcec1a54dc00bdfe347b --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/exp/build.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +import importlib +import os +import sys + + +def get_exp_by_file(exp_file): + try: + sys.path.append(os.path.dirname(exp_file)) + current_exp = importlib.import_module(os.path.basename(exp_file).split(".")[0]) + exp = current_exp.Exp() + except Exception: + raise ImportError("{} doesn't contains class named 'Exp'".format(exp_file)) + return exp + + +def get_exp_by_name(exp_name): + import yolox + + yolox_path = os.path.dirname(os.path.dirname(yolox.__file__)) + filedict = { + "yolox-s": "yolox_s.py", + "yolox-m": "yolox_m.py", + "yolox-l": "yolox_l.py", + "yolox-x": "yolox_x.py", + "yolox-tiny": "yolox_tiny.py", + "yolox-nano": "nano.py", + "yolov3": "yolov3.py", + } + filename = filedict[exp_name] + exp_path = os.path.join(yolox_path, "exps", "default", filename) + return get_exp_by_file(exp_path) + + +def get_exp(exp_file, exp_name): + """ + get Exp object by file or name. If exp_file and exp_name + are both provided, get Exp by exp_file. + + Args: + exp_file (str): file path of experiment. + exp_name (str): name of experiment. "yolo-s", + """ + assert ( + exp_file is not None or exp_name is not None + ), "plz provide exp file or exp name." + if exp_file is not None: + return get_exp_by_file(exp_file) + else: + return get_exp_by_name(exp_name) diff --git a/tracking/docker-build-context/byte_track/yolox/exp/yolox_base.py b/tracking/docker-build-context/byte_track/yolox/exp/yolox_base.py new file mode 100644 index 0000000000000000000000000000000000000000..a1b1b03f6e79a246bea5644b030149b70dab81a6 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/exp/yolox_base.py @@ -0,0 +1,248 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +import torch +import torch.distributed as dist +import torch.nn as nn + +import os +import random + +from .base_exp import BaseExp + + +class Exp(BaseExp): + def __init__(self): + super().__init__() + + # ---------------- model config ---------------- # + self.num_classes = 80 + self.depth = 1.00 + self.width = 1.00 + + # ---------------- dataloader config ---------------- # + # set worker to 4 for shorter dataloader init time + self.data_num_workers = 4 + self.input_size = (640, 640) + self.random_size = (14, 26) + self.train_ann = "instances_train2017.json" + self.val_ann = "instances_val2017.json" + + # --------------- transform config ----------------- # + self.degrees = 10.0 + self.translate = 0.1 + self.scale = (0.1, 2) + self.mscale = (0.8, 1.6) + self.shear = 2.0 + self.perspective = 0.0 + self.enable_mixup = True + + # -------------- training config --------------------- # + self.warmup_epochs = 5 + self.max_epoch = 300 + self.warmup_lr = 0 + self.basic_lr_per_img = 0.01 / 64.0 + self.scheduler = "yoloxwarmcos" + self.no_aug_epochs = 15 + self.min_lr_ratio = 0.05 + self.ema = True + + self.weight_decay = 5e-4 + self.momentum = 0.9 + self.print_interval = 10 + self.eval_interval = 10 + self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0] + + # ----------------- testing config ------------------ # + self.test_size = (640, 640) + self.test_conf = 0.001 + self.nmsthre = 0.65 + + def get_model(self): + from yolox.models import YOLOPAFPN, YOLOX, YOLOXHead + + def init_yolo(M): + for m in M.modules(): + if isinstance(m, nn.BatchNorm2d): + m.eps = 1e-3 + m.momentum = 0.03 + + if getattr(self, "model", None) is None: + in_channels = [256, 512, 1024] + backbone = YOLOPAFPN(self.depth, self.width, in_channels=in_channels) + head = YOLOXHead(self.num_classes, self.width, in_channels=in_channels) + self.model = YOLOX(backbone, head) + + self.model.apply(init_yolo) + self.model.head.initialize_biases(1e-2) + return self.model + + def get_data_loader(self, batch_size, is_distributed, no_aug=False): + from yolox.data import ( + COCODataset, + DataLoader, + InfiniteSampler, + MosaicDetection, + TrainTransform, + YoloBatchSampler + ) + + dataset = COCODataset( + data_dir=None, + json_file=self.train_ann, + img_size=self.input_size, + preproc=TrainTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + max_labels=50, + ), + ) + + dataset = MosaicDetection( + dataset, + mosaic=not no_aug, + img_size=self.input_size, + preproc=TrainTransform( + rgb_means=(0.485, 0.456, 0.406), + std=(0.229, 0.224, 0.225), + max_labels=120, + ), + degrees=self.degrees, + translate=self.translate, + scale=self.scale, + shear=self.shear, + perspective=self.perspective, + enable_mixup=self.enable_mixup, + ) + + self.dataset = dataset + + if is_distributed: + batch_size = batch_size // dist.get_world_size() + + sampler = InfiniteSampler(len(self.dataset), seed=self.seed if self.seed else 0) + + batch_sampler = YoloBatchSampler( + sampler=sampler, + batch_size=batch_size, + drop_last=False, + input_dimension=self.input_size, + mosaic=not no_aug, + ) + + dataloader_kwargs = {"num_workers": self.data_num_workers, "pin_memory": True} + dataloader_kwargs["batch_sampler"] = batch_sampler + train_loader = DataLoader(self.dataset, **dataloader_kwargs) + + return train_loader + + def random_resize(self, data_loader, epoch, rank, is_distributed): + tensor = torch.LongTensor(2).cuda() + + if rank == 0: + size_factor = self.input_size[1] * 1.0 / self.input_size[0] + size = random.randint(*self.random_size) + size = (int(32 * size), 32 * int(size * size_factor)) + tensor[0] = size[0] + tensor[1] = size[1] + + if is_distributed: + dist.barrier() + dist.broadcast(tensor, 0) + + input_size = data_loader.change_input_dim( + multiple=(tensor[0].item(), tensor[1].item()), random_range=None + ) + return input_size + + def get_optimizer(self, batch_size): + if "optimizer" not in self.__dict__: + if self.warmup_epochs > 0: + lr = self.warmup_lr + else: + lr = self.basic_lr_per_img * batch_size + + pg0, pg1, pg2 = [], [], [] # optimizer parameter groups + + for k, v in self.model.named_modules(): + if hasattr(v, "bias") and isinstance(v.bias, nn.Parameter): + pg2.append(v.bias) # biases + if isinstance(v, nn.BatchNorm2d) or "bn" in k: + pg0.append(v.weight) # no decay + elif hasattr(v, "weight") and isinstance(v.weight, nn.Parameter): + pg1.append(v.weight) # apply decay + + optimizer = torch.optim.SGD( + pg0, lr=lr, momentum=self.momentum, nesterov=True + ) + optimizer.add_param_group( + {"params": pg1, "weight_decay": self.weight_decay} + ) # add pg1 with weight_decay + optimizer.add_param_group({"params": pg2}) + self.optimizer = optimizer + + return self.optimizer + + def get_lr_scheduler(self, lr, iters_per_epoch): + from yolox.utils import LRScheduler + + scheduler = LRScheduler( + self.scheduler, + lr, + iters_per_epoch, + self.max_epoch, + warmup_epochs=self.warmup_epochs, + warmup_lr_start=self.warmup_lr, + no_aug_epochs=self.no_aug_epochs, + min_lr_ratio=self.min_lr_ratio, + ) + return scheduler + + def get_eval_loader(self, batch_size, is_distributed, testdev=False): + from yolox.data import COCODataset, ValTransform + + valdataset = COCODataset( + data_dir=None, + json_file=self.val_ann if not testdev else "image_info_test-dev2017.json", + name="val2017" if not testdev else "test2017", + img_size=self.test_size, + preproc=ValTransform( + rgb_means=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225) + ), + ) + + if is_distributed: + batch_size = batch_size // dist.get_world_size() + sampler = torch.utils.data.distributed.DistributedSampler( + valdataset, shuffle=False + ) + else: + sampler = torch.utils.data.SequentialSampler(valdataset) + + dataloader_kwargs = { + "num_workers": self.data_num_workers, + "pin_memory": True, + "sampler": sampler, + } + dataloader_kwargs["batch_size"] = batch_size + val_loader = torch.utils.data.DataLoader(valdataset, **dataloader_kwargs) + + return val_loader + + def get_evaluator(self, batch_size, is_distributed, testdev=False): + from yolox.evaluators import COCOEvaluator + + val_loader = self.get_eval_loader(batch_size, is_distributed, testdev=testdev) + evaluator = COCOEvaluator( + dataloader=val_loader, + img_size=self.test_size, + confthre=self.test_conf, + nmsthre=self.nmsthre, + num_classes=self.num_classes, + testdev=testdev, + ) + return evaluator + + def eval(self, model, evaluator, is_distributed, half=False): + return evaluator.evaluate(model, is_distributed, half) diff --git a/tracking/docker-build-context/byte_track/yolox/layers/__init__.py b/tracking/docker-build-context/byte_track/yolox/layers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..833947c3dfb9b8e50a7b5d478628bb681afb25bb --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/layers/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +from .fast_coco_eval_api import COCOeval_opt diff --git a/tracking/docker-build-context/byte_track/yolox/layers/csrc/cocoeval/cocoeval.cpp b/tracking/docker-build-context/byte_track/yolox/layers/csrc/cocoeval/cocoeval.cpp new file mode 100644 index 0000000000000000000000000000000000000000..2e63bc9952918060f55999ec100b283d83616b46 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/layers/csrc/cocoeval/cocoeval.cpp @@ -0,0 +1,502 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +#include "cocoeval.h" +#include +#include +#include +#include + +using namespace pybind11::literals; + +namespace COCOeval { + +// Sort detections from highest score to lowest, such that +// detection_instances[detection_sorted_indices[t]] >= +// detection_instances[detection_sorted_indices[t+1]]. Use stable_sort to match +// original COCO API +void SortInstancesByDetectionScore( + const std::vector& detection_instances, + std::vector* detection_sorted_indices) { + detection_sorted_indices->resize(detection_instances.size()); + std::iota( + detection_sorted_indices->begin(), detection_sorted_indices->end(), 0); + std::stable_sort( + detection_sorted_indices->begin(), + detection_sorted_indices->end(), + [&detection_instances](size_t j1, size_t j2) { + return detection_instances[j1].score > detection_instances[j2].score; + }); +} + +// Partition the ground truth objects based on whether or not to ignore them +// based on area +void SortInstancesByIgnore( + const std::array& area_range, + const std::vector& ground_truth_instances, + std::vector* ground_truth_sorted_indices, + std::vector* ignores) { + ignores->clear(); + ignores->reserve(ground_truth_instances.size()); + for (auto o : ground_truth_instances) { + ignores->push_back( + o.ignore || o.area < area_range[0] || o.area > area_range[1]); + } + + ground_truth_sorted_indices->resize(ground_truth_instances.size()); + std::iota( + ground_truth_sorted_indices->begin(), + ground_truth_sorted_indices->end(), + 0); + std::stable_sort( + ground_truth_sorted_indices->begin(), + ground_truth_sorted_indices->end(), + [&ignores](size_t j1, size_t j2) { + return (int)(*ignores)[j1] < (int)(*ignores)[j2]; + }); +} + +// For each IOU threshold, greedily match each detected instance to a ground +// truth instance (if possible) and store the results +void MatchDetectionsToGroundTruth( + const std::vector& detection_instances, + const std::vector& detection_sorted_indices, + const std::vector& ground_truth_instances, + const std::vector& ground_truth_sorted_indices, + const std::vector& ignores, + const std::vector>& ious, + const std::vector& iou_thresholds, + const std::array& area_range, + ImageEvaluation* results) { + // Initialize memory to store return data matches and ignore + const int num_iou_thresholds = iou_thresholds.size(); + const int num_ground_truth = ground_truth_sorted_indices.size(); + const int num_detections = detection_sorted_indices.size(); + std::vector ground_truth_matches( + num_iou_thresholds * num_ground_truth, 0); + std::vector& detection_matches = results->detection_matches; + std::vector& detection_ignores = results->detection_ignores; + std::vector& ground_truth_ignores = results->ground_truth_ignores; + detection_matches.resize(num_iou_thresholds * num_detections, 0); + detection_ignores.resize(num_iou_thresholds * num_detections, false); + ground_truth_ignores.resize(num_ground_truth); + for (auto g = 0; g < num_ground_truth; ++g) { + ground_truth_ignores[g] = ignores[ground_truth_sorted_indices[g]]; + } + + for (auto t = 0; t < num_iou_thresholds; ++t) { + for (auto d = 0; d < num_detections; ++d) { + // information about best match so far (match=-1 -> unmatched) + double best_iou = std::min(iou_thresholds[t], 1 - 1e-10); + int match = -1; + for (auto g = 0; g < num_ground_truth; ++g) { + // if this ground truth instance is already matched and not a + // crowd, it cannot be matched to another detection + if (ground_truth_matches[t * num_ground_truth + g] > 0 && + !ground_truth_instances[ground_truth_sorted_indices[g]].is_crowd) { + continue; + } + + // if detected instance matched to a regular ground truth + // instance, we can break on the first ground truth instance + // tagged as ignore (because they are sorted by the ignore tag) + if (match >= 0 && !ground_truth_ignores[match] && + ground_truth_ignores[g]) { + break; + } + + // if IOU overlap is the best so far, store the match appropriately + if (ious[d][ground_truth_sorted_indices[g]] >= best_iou) { + best_iou = ious[d][ground_truth_sorted_indices[g]]; + match = g; + } + } + // if match was made, store id of match for both detection and + // ground truth + if (match >= 0) { + detection_ignores[t * num_detections + d] = ground_truth_ignores[match]; + detection_matches[t * num_detections + d] = + ground_truth_instances[ground_truth_sorted_indices[match]].id; + ground_truth_matches[t * num_ground_truth + match] = + detection_instances[detection_sorted_indices[d]].id; + } + + // set unmatched detections outside of area range to ignore + const InstanceAnnotation& detection = + detection_instances[detection_sorted_indices[d]]; + detection_ignores[t * num_detections + d] = + detection_ignores[t * num_detections + d] || + (detection_matches[t * num_detections + d] == 0 && + (detection.area < area_range[0] || detection.area > area_range[1])); + } + } + + // store detection score results + results->detection_scores.resize(detection_sorted_indices.size()); + for (size_t d = 0; d < detection_sorted_indices.size(); ++d) { + results->detection_scores[d] = + detection_instances[detection_sorted_indices[d]].score; + } +} + +std::vector EvaluateImages( + const std::vector>& area_ranges, + int max_detections, + const std::vector& iou_thresholds, + const ImageCategoryInstances>& image_category_ious, + const ImageCategoryInstances& + image_category_ground_truth_instances, + const ImageCategoryInstances& + image_category_detection_instances) { + const int num_area_ranges = area_ranges.size(); + const int num_images = image_category_ground_truth_instances.size(); + const int num_categories = + image_category_ious.size() > 0 ? image_category_ious[0].size() : 0; + std::vector detection_sorted_indices; + std::vector ground_truth_sorted_indices; + std::vector ignores; + std::vector results_all( + num_images * num_area_ranges * num_categories); + + // Store results for each image, category, and area range combination. Results + // for each IOU threshold are packed into the same ImageEvaluation object + for (auto i = 0; i < num_images; ++i) { + for (auto c = 0; c < num_categories; ++c) { + const std::vector& ground_truth_instances = + image_category_ground_truth_instances[i][c]; + const std::vector& detection_instances = + image_category_detection_instances[i][c]; + + SortInstancesByDetectionScore( + detection_instances, &detection_sorted_indices); + if ((int)detection_sorted_indices.size() > max_detections) { + detection_sorted_indices.resize(max_detections); + } + + for (size_t a = 0; a < area_ranges.size(); ++a) { + SortInstancesByIgnore( + area_ranges[a], + ground_truth_instances, + &ground_truth_sorted_indices, + &ignores); + + MatchDetectionsToGroundTruth( + detection_instances, + detection_sorted_indices, + ground_truth_instances, + ground_truth_sorted_indices, + ignores, + image_category_ious[i][c], + iou_thresholds, + area_ranges[a], + &results_all + [c * num_area_ranges * num_images + a * num_images + i]); + } + } + } + + return results_all; +} + +// Convert a python list to a vector +template +std::vector list_to_vec(const py::list& l) { + std::vector v(py::len(l)); + for (int i = 0; i < (int)py::len(l); ++i) { + v[i] = l[i].cast(); + } + return v; +} + +// Helper function to Accumulate() +// Considers the evaluation results applicable to a particular category, area +// range, and max_detections parameter setting, which begin at +// evaluations[evaluation_index]. Extracts a sorted list of length n of all +// applicable detection instances concatenated across all images in the dataset, +// which are represented by the outputs evaluation_indices, detection_scores, +// image_detection_indices, and detection_sorted_indices--all of which are +// length n. evaluation_indices[i] stores the applicable index into +// evaluations[] for instance i, which has detection score detection_score[i], +// and is the image_detection_indices[i]'th of the list of detections +// for the image containing i. detection_sorted_indices[] defines a sorted +// permutation of the 3 other outputs +int BuildSortedDetectionList( + const std::vector& evaluations, + const int64_t evaluation_index, + const int64_t num_images, + const int max_detections, + std::vector* evaluation_indices, + std::vector* detection_scores, + std::vector* detection_sorted_indices, + std::vector* image_detection_indices) { + assert(evaluations.size() >= evaluation_index + num_images); + + // Extract a list of object instances of the applicable category, area + // range, and max detections requirements such that they can be sorted + image_detection_indices->clear(); + evaluation_indices->clear(); + detection_scores->clear(); + image_detection_indices->reserve(num_images * max_detections); + evaluation_indices->reserve(num_images * max_detections); + detection_scores->reserve(num_images * max_detections); + int num_valid_ground_truth = 0; + for (auto i = 0; i < num_images; ++i) { + const ImageEvaluation& evaluation = evaluations[evaluation_index + i]; + + for (int d = 0; + d < (int)evaluation.detection_scores.size() && d < max_detections; + ++d) { // detected instances + evaluation_indices->push_back(evaluation_index + i); + image_detection_indices->push_back(d); + detection_scores->push_back(evaluation.detection_scores[d]); + } + for (auto ground_truth_ignore : evaluation.ground_truth_ignores) { + if (!ground_truth_ignore) { + ++num_valid_ground_truth; + } + } + } + + // Sort detections by decreasing score, using stable sort to match + // python implementation + detection_sorted_indices->resize(detection_scores->size()); + std::iota( + detection_sorted_indices->begin(), detection_sorted_indices->end(), 0); + std::stable_sort( + detection_sorted_indices->begin(), + detection_sorted_indices->end(), + [&detection_scores](size_t j1, size_t j2) { + return (*detection_scores)[j1] > (*detection_scores)[j2]; + }); + + return num_valid_ground_truth; +} + +// Helper function to Accumulate() +// Compute a precision recall curve given a sorted list of detected instances +// encoded in evaluations, evaluation_indices, detection_scores, +// detection_sorted_indices, image_detection_indices (see +// BuildSortedDetectionList()). Using vectors precisions and recalls +// and temporary storage, output the results into precisions_out, recalls_out, +// and scores_out, which are large buffers containing many precion/recall curves +// for all possible parameter settings, with precisions_out_index and +// recalls_out_index defining the applicable indices to store results. +void ComputePrecisionRecallCurve( + const int64_t precisions_out_index, + const int64_t precisions_out_stride, + const int64_t recalls_out_index, + const std::vector& recall_thresholds, + const int iou_threshold_index, + const int num_iou_thresholds, + const int num_valid_ground_truth, + const std::vector& evaluations, + const std::vector& evaluation_indices, + const std::vector& detection_scores, + const std::vector& detection_sorted_indices, + const std::vector& image_detection_indices, + std::vector* precisions, + std::vector* recalls, + std::vector* precisions_out, + std::vector* scores_out, + std::vector* recalls_out) { + assert(recalls_out->size() > recalls_out_index); + + // Compute precision/recall for each instance in the sorted list of detections + int64_t true_positives_sum = 0, false_positives_sum = 0; + precisions->clear(); + recalls->clear(); + precisions->reserve(detection_sorted_indices.size()); + recalls->reserve(detection_sorted_indices.size()); + assert(!evaluations.empty() || detection_sorted_indices.empty()); + for (auto detection_sorted_index : detection_sorted_indices) { + const ImageEvaluation& evaluation = + evaluations[evaluation_indices[detection_sorted_index]]; + const auto num_detections = + evaluation.detection_matches.size() / num_iou_thresholds; + const auto detection_index = iou_threshold_index * num_detections + + image_detection_indices[detection_sorted_index]; + assert(evaluation.detection_matches.size() > detection_index); + assert(evaluation.detection_ignores.size() > detection_index); + const int64_t detection_match = + evaluation.detection_matches[detection_index]; + const bool detection_ignores = + evaluation.detection_ignores[detection_index]; + const auto true_positive = detection_match > 0 && !detection_ignores; + const auto false_positive = detection_match == 0 && !detection_ignores; + if (true_positive) { + ++true_positives_sum; + } + if (false_positive) { + ++false_positives_sum; + } + + const double recall = + static_cast(true_positives_sum) / num_valid_ground_truth; + recalls->push_back(recall); + const int64_t num_valid_detections = + true_positives_sum + false_positives_sum; + const double precision = num_valid_detections > 0 + ? static_cast(true_positives_sum) / num_valid_detections + : 0.0; + precisions->push_back(precision); + } + + (*recalls_out)[recalls_out_index] = !recalls->empty() ? recalls->back() : 0; + + for (int64_t i = static_cast(precisions->size()) - 1; i > 0; --i) { + if ((*precisions)[i] > (*precisions)[i - 1]) { + (*precisions)[i - 1] = (*precisions)[i]; + } + } + + // Sample the per instance precision/recall list at each recall threshold + for (size_t r = 0; r < recall_thresholds.size(); ++r) { + // first index in recalls >= recall_thresholds[r] + std::vector::iterator low = std::lower_bound( + recalls->begin(), recalls->end(), recall_thresholds[r]); + size_t precisions_index = low - recalls->begin(); + + const auto results_ind = precisions_out_index + r * precisions_out_stride; + assert(results_ind < precisions_out->size()); + assert(results_ind < scores_out->size()); + if (precisions_index < precisions->size()) { + (*precisions_out)[results_ind] = (*precisions)[precisions_index]; + (*scores_out)[results_ind] = + detection_scores[detection_sorted_indices[precisions_index]]; + } else { + (*precisions_out)[results_ind] = 0; + (*scores_out)[results_ind] = 0; + } + } +} +py::dict Accumulate( + const py::object& params, + const std::vector& evaluations) { + const std::vector recall_thresholds = + list_to_vec(params.attr("recThrs")); + const std::vector max_detections = + list_to_vec(params.attr("maxDets")); + const int num_iou_thresholds = py::len(params.attr("iouThrs")); + const int num_recall_thresholds = py::len(params.attr("recThrs")); + const int num_categories = params.attr("useCats").cast() == 1 + ? py::len(params.attr("catIds")) + : 1; + const int num_area_ranges = py::len(params.attr("areaRng")); + const int num_max_detections = py::len(params.attr("maxDets")); + const int num_images = py::len(params.attr("imgIds")); + + std::vector precisions_out( + num_iou_thresholds * num_recall_thresholds * num_categories * + num_area_ranges * num_max_detections, + -1); + std::vector recalls_out( + num_iou_thresholds * num_categories * num_area_ranges * + num_max_detections, + -1); + std::vector scores_out( + num_iou_thresholds * num_recall_thresholds * num_categories * + num_area_ranges * num_max_detections, + -1); + + // Consider the list of all detected instances in the entire dataset in one + // large list. evaluation_indices, detection_scores, + // image_detection_indices, and detection_sorted_indices all have the same + // length as this list, such that each entry corresponds to one detected + // instance + std::vector evaluation_indices; // indices into evaluations[] + std::vector detection_scores; // detection scores of each instance + std::vector detection_sorted_indices; // sorted indices of all + // instances in the dataset + std::vector + image_detection_indices; // indices into the list of detected instances in + // the same image as each instance + std::vector precisions, recalls; + + for (auto c = 0; c < num_categories; ++c) { + for (auto a = 0; a < num_area_ranges; ++a) { + for (auto m = 0; m < num_max_detections; ++m) { + // The COCO PythonAPI assumes evaluations[] (the return value of + // COCOeval::EvaluateImages() is one long list storing results for each + // combination of category, area range, and image id, with categories in + // the outermost loop and images in the innermost loop. + const int64_t evaluations_index = + c * num_area_ranges * num_images + a * num_images; + int num_valid_ground_truth = BuildSortedDetectionList( + evaluations, + evaluations_index, + num_images, + max_detections[m], + &evaluation_indices, + &detection_scores, + &detection_sorted_indices, + &image_detection_indices); + + if (num_valid_ground_truth == 0) { + continue; + } + + for (auto t = 0; t < num_iou_thresholds; ++t) { + // recalls_out is a flattened vectors representing a + // num_iou_thresholds X num_categories X num_area_ranges X + // num_max_detections matrix + const int64_t recalls_out_index = + t * num_categories * num_area_ranges * num_max_detections + + c * num_area_ranges * num_max_detections + + a * num_max_detections + m; + + // precisions_out and scores_out are flattened vectors + // representing a num_iou_thresholds X num_recall_thresholds X + // num_categories X num_area_ranges X num_max_detections matrix + const int64_t precisions_out_stride = + num_categories * num_area_ranges * num_max_detections; + const int64_t precisions_out_index = t * num_recall_thresholds * + num_categories * num_area_ranges * num_max_detections + + c * num_area_ranges * num_max_detections + + a * num_max_detections + m; + + ComputePrecisionRecallCurve( + precisions_out_index, + precisions_out_stride, + recalls_out_index, + recall_thresholds, + t, + num_iou_thresholds, + num_valid_ground_truth, + evaluations, + evaluation_indices, + detection_scores, + detection_sorted_indices, + image_detection_indices, + &precisions, + &recalls, + &precisions_out, + &scores_out, + &recalls_out); + } + } + } + } + + time_t rawtime; + struct tm local_time; + std::array buffer; + time(&rawtime); +#ifdef _WIN32 + localtime_s(&local_time, &rawtime); +#else + localtime_r(&rawtime, &local_time); +#endif + strftime( + buffer.data(), 200, "%Y-%m-%d %H:%num_max_detections:%S", &local_time); + return py::dict( + "params"_a = params, + "counts"_a = std::vector({num_iou_thresholds, + num_recall_thresholds, + num_categories, + num_area_ranges, + num_max_detections}), + "date"_a = buffer, + "precision"_a = precisions_out, + "recall"_a = recalls_out, + "scores"_a = scores_out); +} + +} // namespace COCOeval diff --git a/tracking/docker-build-context/byte_track/yolox/layers/csrc/cocoeval/cocoeval.h b/tracking/docker-build-context/byte_track/yolox/layers/csrc/cocoeval/cocoeval.h new file mode 100644 index 0000000000000000000000000000000000000000..f9def4151102d1c493dc88186384342565798d05 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/layers/csrc/cocoeval/cocoeval.h @@ -0,0 +1,85 @@ +// Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +#pragma once + +#include +#include +#include +#include +#include + +namespace py = pybind11; + +namespace COCOeval { + +// Annotation data for a single object instance in an image +struct InstanceAnnotation { + InstanceAnnotation( + uint64_t id, + double score, + double area, + bool is_crowd, + bool ignore) + : id{id}, score{score}, area{area}, is_crowd{is_crowd}, ignore{ignore} {} + uint64_t id; + double score = 0.; + double area = 0.; + bool is_crowd = false; + bool ignore = false; +}; + +// Stores intermediate results for evaluating detection results for a single +// image that has D detected instances and G ground truth instances. This stores +// matches between detected and ground truth instances +struct ImageEvaluation { + // For each of the D detected instances, the id of the matched ground truth + // instance, or 0 if unmatched + std::vector detection_matches; + + // The detection score of each of the D detected instances + std::vector detection_scores; + + // Marks whether or not each of G instances was ignored from evaluation (e.g., + // because it's outside area_range) + std::vector ground_truth_ignores; + + // Marks whether or not each of D instances was ignored from evaluation (e.g., + // because it's outside aRng) + std::vector detection_ignores; +}; + +template +using ImageCategoryInstances = std::vector>>; + +// C++ implementation of COCO API cocoeval.py::COCOeval.evaluateImg(). For each +// combination of image, category, area range settings, and IOU thresholds to +// evaluate, it matches detected instances to ground truth instances and stores +// the results into a vector of ImageEvaluation results, which will be +// interpreted by the COCOeval::Accumulate() function to produce precion-recall +// curves. The parameters of nested vectors have the following semantics: +// image_category_ious[i][c][d][g] is the intersection over union of the d'th +// detected instance and g'th ground truth instance of +// category category_ids[c] in image image_ids[i] +// image_category_ground_truth_instances[i][c] is a vector of ground truth +// instances in image image_ids[i] of category category_ids[c] +// image_category_detection_instances[i][c] is a vector of detected +// instances in image image_ids[i] of category category_ids[c] +std::vector EvaluateImages( + const std::vector>& area_ranges, // vector of 2-tuples + int max_detections, + const std::vector& iou_thresholds, + const ImageCategoryInstances>& image_category_ious, + const ImageCategoryInstances& + image_category_ground_truth_instances, + const ImageCategoryInstances& + image_category_detection_instances); + +// C++ implementation of COCOeval.accumulate(), which generates precision +// recall curves for each set of category, IOU threshold, detection area range, +// and max number of detections parameters. It is assumed that the parameter +// evaluations is the return value of the functon COCOeval::EvaluateImages(), +// which was called with the same parameter settings params +py::dict Accumulate( + const py::object& params, + const std::vector& evalutations); + +} // namespace COCOeval diff --git a/tracking/docker-build-context/byte_track/yolox/layers/csrc/vision.cpp b/tracking/docker-build-context/byte_track/yolox/layers/csrc/vision.cpp new file mode 100644 index 0000000000000000000000000000000000000000..7663d0faf5c58542624d2f01730618b9aa9d4a25 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/layers/csrc/vision.cpp @@ -0,0 +1,13 @@ +#include "cocoeval/cocoeval.h" + +PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) { + m.def("COCOevalAccumulate", &COCOeval::Accumulate, "COCOeval::Accumulate"); + m.def( + "COCOevalEvaluateImages", + &COCOeval::EvaluateImages, + "COCOeval::EvaluateImages"); + pybind11::class_(m, "InstanceAnnotation") + .def(pybind11::init()); + pybind11::class_(m, "ImageEvaluation") + .def(pybind11::init<>()); +} diff --git a/tracking/docker-build-context/byte_track/yolox/layers/fast_coco_eval_api.py b/tracking/docker-build-context/byte_track/yolox/layers/fast_coco_eval_api.py new file mode 100644 index 0000000000000000000000000000000000000000..442c97eed233eb6a1ccf05dd6ea6b94e35ca4c9d --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/layers/fast_coco_eval_api.py @@ -0,0 +1,150 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# This file comes from +# https://github.com/facebookresearch/detectron2/blob/master/detectron2/evaluation/fast_eval_api.py +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +import numpy as np +from pycocotools.cocoeval import COCOeval + +# import torch first to make yolox._C work without ImportError of libc10.so +# in YOLOX, env is already set in __init__.py. +from yolox import _C + +import copy +import time + + +class COCOeval_opt(COCOeval): + """ + This is a slightly modified version of the original COCO API, where the functions evaluateImg() + and accumulate() are implemented in C++ to speedup evaluation + """ + + def evaluate(self): + """ + Run per image evaluation on given images and store results in self.evalImgs_cpp, a + datastructure that isn't readable from Python but is used by a c++ implementation of + accumulate(). Unlike the original COCO PythonAPI, we don't populate the datastructure + self.evalImgs because this datastructure is a computational bottleneck. + :return: None + """ + tic = time.time() + + print("Running per image evaluation...") + p = self.params + # add backward compatibility if useSegm is specified in params + if p.useSegm is not None: + p.iouType = "segm" if p.useSegm == 1 else "bbox" + print( + "useSegm (deprecated) is not None. Running {} evaluation".format( + p.iouType + ) + ) + print("Evaluate annotation type *{}*".format(p.iouType)) + p.imgIds = list(np.unique(p.imgIds)) + if p.useCats: + p.catIds = list(np.unique(p.catIds)) + p.maxDets = sorted(p.maxDets) + self.params = p + + self._prepare() + + # loop through images, area range, max detection number + catIds = p.catIds if p.useCats else [-1] + + if p.iouType == "segm" or p.iouType == "bbox": + computeIoU = self.computeIoU + elif p.iouType == "keypoints": + computeIoU = self.computeOks + self.ious = { + (imgId, catId): computeIoU(imgId, catId) + for imgId in p.imgIds + for catId in catIds + } + + maxDet = p.maxDets[-1] + + # <<<< Beginning of code differences with original COCO API + def convert_instances_to_cpp(instances, is_det=False): + # Convert annotations for a list of instances in an image to a format that's fast + # to access in C++ + instances_cpp = [] + for instance in instances: + instance_cpp = _C.InstanceAnnotation( + int(instance["id"]), + instance["score"] if is_det else instance.get("score", 0.0), + instance["area"], + bool(instance.get("iscrowd", 0)), + bool(instance.get("ignore", 0)), + ) + instances_cpp.append(instance_cpp) + return instances_cpp + + # Convert GT annotations, detections, and IOUs to a format that's fast to access in C++ + ground_truth_instances = [ + [convert_instances_to_cpp(self._gts[imgId, catId]) for catId in p.catIds] + for imgId in p.imgIds + ] + detected_instances = [ + [ + convert_instances_to_cpp(self._dts[imgId, catId], is_det=True) + for catId in p.catIds + ] + for imgId in p.imgIds + ] + ious = [[self.ious[imgId, catId] for catId in catIds] for imgId in p.imgIds] + + if not p.useCats: + # For each image, flatten per-category lists into a single list + ground_truth_instances = [ + [[o for c in i for o in c]] for i in ground_truth_instances + ] + detected_instances = [ + [[o for c in i for o in c]] for i in detected_instances + ] + + # Call C++ implementation of self.evaluateImgs() + self._evalImgs_cpp = _C.COCOevalEvaluateImages( + p.areaRng, + maxDet, + p.iouThrs, + ious, + ground_truth_instances, + detected_instances, + ) + self._evalImgs = None + + self._paramsEval = copy.deepcopy(self.params) + toc = time.time() + print("COCOeval_opt.evaluate() finished in {:0.2f} seconds.".format(toc - tic)) + # >>>> End of code differences with original COCO API + + def accumulate(self): + """ + Accumulate per image evaluation results and store the result in self.eval. Does not + support changing parameter settings from those used by self.evaluate() + """ + print("Accumulating evaluation results...") + tic = time.time() + if not hasattr(self, "_evalImgs_cpp"): + print("Please run evaluate() first") + + self.eval = _C.COCOevalAccumulate(self._paramsEval, self._evalImgs_cpp) + + # recall is num_iou_thresholds X num_categories X num_area_ranges X num_max_detections + self.eval["recall"] = np.array(self.eval["recall"]).reshape( + self.eval["counts"][:1] + self.eval["counts"][2:] + ) + + # precision and scores are num_iou_thresholds X num_recall_thresholds X num_categories X + # num_area_ranges X num_max_detections + self.eval["precision"] = np.array(self.eval["precision"]).reshape( + self.eval["counts"] + ) + self.eval["scores"] = np.array(self.eval["scores"]).reshape(self.eval["counts"]) + toc = time.time() + print( + "COCOeval_opt.accumulate() finished in {:0.2f} seconds.".format(toc - tic) + ) diff --git a/tracking/docker-build-context/byte_track/yolox/models/__init__.py b/tracking/docker-build-context/byte_track/yolox/models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c4641a61bf466259c88e0a0b92e4ff55b2abcd61 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/models/__init__.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +from .darknet import CSPDarknet, Darknet +from .losses import IOUloss +from .yolo_fpn import YOLOFPN +from .yolo_head import YOLOXHead +from .yolo_pafpn import YOLOPAFPN +from .yolox import YOLOX diff --git a/tracking/docker-build-context/byte_track/yolox/models/darknet.py b/tracking/docker-build-context/byte_track/yolox/models/darknet.py new file mode 100644 index 0000000000000000000000000000000000000000..70c79f86a0f444d5325329b5e8c9b50c864d48f0 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/models/darknet.py @@ -0,0 +1,179 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +from torch import nn + +from .network_blocks import BaseConv, CSPLayer, DWConv, Focus, ResLayer, SPPBottleneck + + +class Darknet(nn.Module): + # number of blocks from dark2 to dark5. + depth2blocks = {21: [1, 2, 2, 1], 53: [2, 8, 8, 4]} + + def __init__( + self, + depth, + in_channels=3, + stem_out_channels=32, + out_features=("dark3", "dark4", "dark5"), + ): + """ + Args: + depth (int): depth of darknet used in model, usually use [21, 53] for this param. + in_channels (int): number of input channels, for example, use 3 for RGB image. + stem_out_channels (int): number of output chanels of darknet stem. + It decides channels of darknet layer2 to layer5. + out_features (Tuple[str]): desired output layer name. + """ + super().__init__() + assert out_features, "please provide output features of Darknet" + self.out_features = out_features + self.stem = nn.Sequential( + BaseConv(in_channels, stem_out_channels, ksize=3, stride=1, act="lrelu"), + *self.make_group_layer(stem_out_channels, num_blocks=1, stride=2), + ) + in_channels = stem_out_channels * 2 # 64 + + num_blocks = Darknet.depth2blocks[depth] + # create darknet with `stem_out_channels` and `num_blocks` layers. + # to make model structure more clear, we don't use `for` statement in python. + self.dark2 = nn.Sequential( + *self.make_group_layer(in_channels, num_blocks[0], stride=2) + ) + in_channels *= 2 # 128 + self.dark3 = nn.Sequential( + *self.make_group_layer(in_channels, num_blocks[1], stride=2) + ) + in_channels *= 2 # 256 + self.dark4 = nn.Sequential( + *self.make_group_layer(in_channels, num_blocks[2], stride=2) + ) + in_channels *= 2 # 512 + + self.dark5 = nn.Sequential( + *self.make_group_layer(in_channels, num_blocks[3], stride=2), + *self.make_spp_block([in_channels, in_channels * 2], in_channels * 2), + ) + + def make_group_layer(self, in_channels: int, num_blocks: int, stride: int = 1): + "starts with conv layer then has `num_blocks` `ResLayer`" + return [ + BaseConv(in_channels, in_channels * 2, ksize=3, stride=stride, act="lrelu"), + *[(ResLayer(in_channels * 2)) for _ in range(num_blocks)], + ] + + def make_spp_block(self, filters_list, in_filters): + m = nn.Sequential( + *[ + BaseConv(in_filters, filters_list[0], 1, stride=1, act="lrelu"), + BaseConv(filters_list[0], filters_list[1], 3, stride=1, act="lrelu"), + SPPBottleneck( + in_channels=filters_list[1], + out_channels=filters_list[0], + activation="lrelu", + ), + BaseConv(filters_list[0], filters_list[1], 3, stride=1, act="lrelu"), + BaseConv(filters_list[1], filters_list[0], 1, stride=1, act="lrelu"), + ] + ) + return m + + def forward(self, x): + outputs = {} + x = self.stem(x) + outputs["stem"] = x + x = self.dark2(x) + outputs["dark2"] = x + x = self.dark3(x) + outputs["dark3"] = x + x = self.dark4(x) + outputs["dark4"] = x + x = self.dark5(x) + outputs["dark5"] = x + return {k: v for k, v in outputs.items() if k in self.out_features} + + +class CSPDarknet(nn.Module): + def __init__( + self, + dep_mul, + wid_mul, + out_features=("dark3", "dark4", "dark5"), + depthwise=False, + act="silu", + ): + super().__init__() + assert out_features, "please provide output features of Darknet" + self.out_features = out_features + Conv = DWConv if depthwise else BaseConv + + base_channels = int(wid_mul * 64) # 64 + base_depth = max(round(dep_mul * 3), 1) # 3 + + # stem + self.stem = Focus(3, base_channels, ksize=3, act=act) + + # dark2 + self.dark2 = nn.Sequential( + Conv(base_channels, base_channels * 2, 3, 2, act=act), + CSPLayer( + base_channels * 2, + base_channels * 2, + n=base_depth, + depthwise=depthwise, + act=act, + ), + ) + + # dark3 + self.dark3 = nn.Sequential( + Conv(base_channels * 2, base_channels * 4, 3, 2, act=act), + CSPLayer( + base_channels * 4, + base_channels * 4, + n=base_depth * 3, + depthwise=depthwise, + act=act, + ), + ) + + # dark4 + self.dark4 = nn.Sequential( + Conv(base_channels * 4, base_channels * 8, 3, 2, act=act), + CSPLayer( + base_channels * 8, + base_channels * 8, + n=base_depth * 3, + depthwise=depthwise, + act=act, + ), + ) + + # dark5 + self.dark5 = nn.Sequential( + Conv(base_channels * 8, base_channels * 16, 3, 2, act=act), + SPPBottleneck(base_channels * 16, base_channels * 16, activation=act), + CSPLayer( + base_channels * 16, + base_channels * 16, + n=base_depth, + shortcut=False, + depthwise=depthwise, + act=act, + ), + ) + + def forward(self, x): + outputs = {} + x = self.stem(x) + outputs["stem"] = x + x = self.dark2(x) + outputs["dark2"] = x + x = self.dark3(x) + outputs["dark3"] = x + x = self.dark4(x) + outputs["dark4"] = x + x = self.dark5(x) + outputs["dark5"] = x + return {k: v for k, v in outputs.items() if k in self.out_features} diff --git a/tracking/docker-build-context/byte_track/yolox/models/losses.py b/tracking/docker-build-context/byte_track/yolox/models/losses.py new file mode 100644 index 0000000000000000000000000000000000000000..a789ebab8ba28a3927d467947c7d918fe4f2478b --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/models/losses.py @@ -0,0 +1,81 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +import torch +import torch.nn as nn +import torch.nn.functional as F + + +class IOUloss(nn.Module): + def __init__(self, reduction="none", loss_type="iou"): + super(IOUloss, self).__init__() + self.reduction = reduction + self.loss_type = loss_type + + def forward(self, pred, target): + assert pred.shape[0] == target.shape[0] + + pred = pred.view(-1, 4) + target = target.view(-1, 4) + tl = torch.max( + (pred[:, :2] - pred[:, 2:] / 2), (target[:, :2] - target[:, 2:] / 2) + ) + br = torch.min( + (pred[:, :2] + pred[:, 2:] / 2), (target[:, :2] + target[:, 2:] / 2) + ) + + area_p = torch.prod(pred[:, 2:], 1) + area_g = torch.prod(target[:, 2:], 1) + + en = (tl < br).type(tl.type()).prod(dim=1) + area_i = torch.prod(br - tl, 1) * en + iou = (area_i) / (area_p + area_g - area_i + 1e-16) + + if self.loss_type == "iou": + loss = 1 - iou ** 2 + elif self.loss_type == "giou": + c_tl = torch.min( + (pred[:, :2] - pred[:, 2:] / 2), (target[:, :2] - target[:, 2:] / 2) + ) + c_br = torch.max( + (pred[:, :2] + pred[:, 2:] / 2), (target[:, :2] + target[:, 2:] / 2) + ) + area_c = torch.prod(c_br - c_tl, 1) + giou = iou - (area_c - area_i) / area_c.clamp(1e-16) + loss = 1 - giou.clamp(min=-1.0, max=1.0) + + if self.reduction == "mean": + loss = loss.mean() + elif self.reduction == "sum": + loss = loss.sum() + + return loss + + +def sigmoid_focal_loss(inputs, targets, num_boxes, alpha: float = 0.25, gamma: float = 2): + """ + Loss used in RetinaNet for dense detection: https://arxiv.org/abs/1708.02002. + Args: + inputs: A float tensor of arbitrary shape. + The predictions for each example. + targets: A float tensor with the same shape as inputs. Stores the binary + classification label for each element in inputs + (0 for the negative class and 1 for the positive class). + alpha: (optional) Weighting factor in range (0,1) to balance + positive vs negative examples. Default = -1 (no weighting). + gamma: Exponent of the modulating factor (1 - p_t) to + balance easy vs hard examples. + Returns: + Loss tensor + """ + prob = inputs.sigmoid() + ce_loss = F.binary_cross_entropy_with_logits(inputs, targets, reduction="none") + p_t = prob * targets + (1 - prob) * (1 - targets) + loss = ce_loss * ((1 - p_t) ** gamma) + + if alpha >= 0: + alpha_t = alpha * targets + (1 - alpha) * (1 - targets) + loss = alpha_t * loss + #return loss.mean(0).sum() / num_boxes + return loss.sum() / num_boxes \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/yolox/models/network_blocks.py b/tracking/docker-build-context/byte_track/yolox/models/network_blocks.py new file mode 100644 index 0000000000000000000000000000000000000000..4bdb2ca731a07aa9e5e6b68c652467f28fe96079 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/models/network_blocks.py @@ -0,0 +1,210 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +import torch +import torch.nn as nn + + +class SiLU(nn.Module): + """export-friendly version of nn.SiLU()""" + + @staticmethod + def forward(x): + return x * torch.sigmoid(x) + + +def get_activation(name="silu", inplace=True): + if name == "silu": + module = nn.SiLU(inplace=inplace) + elif name == "relu": + module = nn.ReLU(inplace=inplace) + elif name == "lrelu": + module = nn.LeakyReLU(0.1, inplace=inplace) + else: + raise AttributeError("Unsupported act type: {}".format(name)) + return module + + +class BaseConv(nn.Module): + """A Conv2d -> Batchnorm -> silu/leaky relu block""" + + def __init__( + self, in_channels, out_channels, ksize, stride, groups=1, bias=False, act="silu" + ): + super().__init__() + # same padding + pad = (ksize - 1) // 2 + self.conv = nn.Conv2d( + in_channels, + out_channels, + kernel_size=ksize, + stride=stride, + padding=pad, + groups=groups, + bias=bias, + ) + self.bn = nn.BatchNorm2d(out_channels) + self.act = get_activation(act, inplace=True) + + def forward(self, x): + return self.act(self.bn(self.conv(x))) + + def fuseforward(self, x): + return self.act(self.conv(x)) + + +class DWConv(nn.Module): + """Depthwise Conv + Conv""" + + def __init__(self, in_channels, out_channels, ksize, stride=1, act="silu"): + super().__init__() + self.dconv = BaseConv( + in_channels, + in_channels, + ksize=ksize, + stride=stride, + groups=in_channels, + act=act, + ) + self.pconv = BaseConv( + in_channels, out_channels, ksize=1, stride=1, groups=1, act=act + ) + + def forward(self, x): + x = self.dconv(x) + return self.pconv(x) + + +class Bottleneck(nn.Module): + # Standard bottleneck + def __init__( + self, + in_channels, + out_channels, + shortcut=True, + expansion=0.5, + depthwise=False, + act="silu", + ): + super().__init__() + hidden_channels = int(out_channels * expansion) + Conv = DWConv if depthwise else BaseConv + self.conv1 = BaseConv(in_channels, hidden_channels, 1, stride=1, act=act) + self.conv2 = Conv(hidden_channels, out_channels, 3, stride=1, act=act) + self.use_add = shortcut and in_channels == out_channels + + def forward(self, x): + y = self.conv2(self.conv1(x)) + if self.use_add: + y = y + x + return y + + +class ResLayer(nn.Module): + "Residual layer with `in_channels` inputs." + + def __init__(self, in_channels: int): + super().__init__() + mid_channels = in_channels // 2 + self.layer1 = BaseConv( + in_channels, mid_channels, ksize=1, stride=1, act="lrelu" + ) + self.layer2 = BaseConv( + mid_channels, in_channels, ksize=3, stride=1, act="lrelu" + ) + + def forward(self, x): + out = self.layer2(self.layer1(x)) + return x + out + + +class SPPBottleneck(nn.Module): + """Spatial pyramid pooling layer used in YOLOv3-SPP""" + + def __init__( + self, in_channels, out_channels, kernel_sizes=(5, 9, 13), activation="silu" + ): + super().__init__() + hidden_channels = in_channels // 2 + self.conv1 = BaseConv(in_channels, hidden_channels, 1, stride=1, act=activation) + self.m = nn.ModuleList( + [ + nn.MaxPool2d(kernel_size=ks, stride=1, padding=ks // 2) + for ks in kernel_sizes + ] + ) + conv2_channels = hidden_channels * (len(kernel_sizes) + 1) + self.conv2 = BaseConv(conv2_channels, out_channels, 1, stride=1, act=activation) + + def forward(self, x): + x = self.conv1(x) + x = torch.cat([x] + [m(x) for m in self.m], dim=1) + x = self.conv2(x) + return x + + +class CSPLayer(nn.Module): + """C3 in yolov5, CSP Bottleneck with 3 convolutions""" + + def __init__( + self, + in_channels, + out_channels, + n=1, + shortcut=True, + expansion=0.5, + depthwise=False, + act="silu", + ): + """ + Args: + in_channels (int): input channels. + out_channels (int): output channels. + n (int): number of Bottlenecks. Default value: 1. + """ + # ch_in, ch_out, number, shortcut, groups, expansion + super().__init__() + hidden_channels = int(out_channels * expansion) # hidden channels + self.conv1 = BaseConv(in_channels, hidden_channels, 1, stride=1, act=act) + self.conv2 = BaseConv(in_channels, hidden_channels, 1, stride=1, act=act) + self.conv3 = BaseConv(2 * hidden_channels, out_channels, 1, stride=1, act=act) + module_list = [ + Bottleneck( + hidden_channels, hidden_channels, shortcut, 1.0, depthwise, act=act + ) + for _ in range(n) + ] + self.m = nn.Sequential(*module_list) + + def forward(self, x): + x_1 = self.conv1(x) + x_2 = self.conv2(x) + x_1 = self.m(x_1) + x = torch.cat((x_1, x_2), dim=1) + return self.conv3(x) + + +class Focus(nn.Module): + """Focus width and height information into channel space.""" + + def __init__(self, in_channels, out_channels, ksize=1, stride=1, act="silu"): + super().__init__() + self.conv = BaseConv(in_channels * 4, out_channels, ksize, stride, act=act) + + def forward(self, x): + # shape of x (b,c,w,h) -> y(b,4c,w/2,h/2) + patch_top_left = x[..., ::2, ::2] + patch_top_right = x[..., ::2, 1::2] + patch_bot_left = x[..., 1::2, ::2] + patch_bot_right = x[..., 1::2, 1::2] + x = torch.cat( + ( + patch_top_left, + patch_bot_left, + patch_top_right, + patch_bot_right, + ), + dim=1, + ) + return self.conv(x) diff --git a/tracking/docker-build-context/byte_track/yolox/models/yolo_fpn.py b/tracking/docker-build-context/byte_track/yolox/models/yolo_fpn.py new file mode 100644 index 0000000000000000000000000000000000000000..8b3ba1473c005a57187247fd276ee5920750add8 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/models/yolo_fpn.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +import torch +import torch.nn as nn + +from .darknet import Darknet +from .network_blocks import BaseConv + + +class YOLOFPN(nn.Module): + """ + YOLOFPN module. Darknet 53 is the default backbone of this model. + """ + + def __init__( + self, + depth=53, + in_features=["dark3", "dark4", "dark5"], + ): + super().__init__() + + self.backbone = Darknet(depth) + self.in_features = in_features + + # out 1 + self.out1_cbl = self._make_cbl(512, 256, 1) + self.out1 = self._make_embedding([256, 512], 512 + 256) + + # out 2 + self.out2_cbl = self._make_cbl(256, 128, 1) + self.out2 = self._make_embedding([128, 256], 256 + 128) + + # upsample + self.upsample = nn.Upsample(scale_factor=2, mode="nearest") + + def _make_cbl(self, _in, _out, ks): + return BaseConv(_in, _out, ks, stride=1, act="lrelu") + + def _make_embedding(self, filters_list, in_filters): + m = nn.Sequential( + *[ + self._make_cbl(in_filters, filters_list[0], 1), + self._make_cbl(filters_list[0], filters_list[1], 3), + self._make_cbl(filters_list[1], filters_list[0], 1), + self._make_cbl(filters_list[0], filters_list[1], 3), + self._make_cbl(filters_list[1], filters_list[0], 1), + ] + ) + return m + + def load_pretrained_model(self, filename="./weights/darknet53.mix.pth"): + with open(filename, "rb") as f: + state_dict = torch.load(f, map_location="cpu") + print("loading pretrained weights...") + self.backbone.load_state_dict(state_dict) + + def forward(self, inputs): + """ + Args: + inputs (Tensor): input image. + + Returns: + Tuple[Tensor]: FPN output features.. + """ + # backbone + out_features = self.backbone(inputs) + x2, x1, x0 = [out_features[f] for f in self.in_features] + + # yolo branch 1 + x1_in = self.out1_cbl(x0) + x1_in = self.upsample(x1_in) + x1_in = torch.cat([x1_in, x1], 1) + out_dark4 = self.out1(x1_in) + + # yolo branch 2 + x2_in = self.out2_cbl(out_dark4) + x2_in = self.upsample(x2_in) + x2_in = torch.cat([x2_in, x2], 1) + out_dark3 = self.out2(x2_in) + + outputs = (out_dark3, out_dark4, x0) + return outputs diff --git a/tracking/docker-build-context/byte_track/yolox/models/yolo_head.py b/tracking/docker-build-context/byte_track/yolox/models/yolo_head.py new file mode 100644 index 0000000000000000000000000000000000000000..ba8238f17d317eeb4f6c4dc7470f3a6db3ce3ece --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/models/yolo_head.py @@ -0,0 +1,660 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +from loguru import logger + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from yolox.utils import bboxes_iou + +import math + +from .losses import IOUloss +from .network_blocks import BaseConv, DWConv + + +class YOLOXHead(nn.Module): + def __init__( + self, + num_classes, + width=1.0, + strides=[8, 16, 32], + in_channels=[256, 512, 1024], + act="silu", + depthwise=False, + ): + """ + Args: + act (str): activation type of conv. Defalut value: "silu". + depthwise (bool): wheather apply depthwise conv in conv branch. Defalut value: False. + """ + super().__init__() + + self.n_anchors = 1 + self.num_classes = num_classes + self.decode_in_inference = True # for deploy, set to False + + self.cls_convs = nn.ModuleList() + self.reg_convs = nn.ModuleList() + self.cls_preds = nn.ModuleList() + self.reg_preds = nn.ModuleList() + self.obj_preds = nn.ModuleList() + self.stems = nn.ModuleList() + Conv = DWConv if depthwise else BaseConv + + for i in range(len(in_channels)): + self.stems.append( + BaseConv( + in_channels=int(in_channels[i] * width), + out_channels=int(256 * width), + ksize=1, + stride=1, + act=act, + ) + ) + self.cls_convs.append( + nn.Sequential( + *[ + Conv( + in_channels=int(256 * width), + out_channels=int(256 * width), + ksize=3, + stride=1, + act=act, + ), + Conv( + in_channels=int(256 * width), + out_channels=int(256 * width), + ksize=3, + stride=1, + act=act, + ), + ] + ) + ) + self.reg_convs.append( + nn.Sequential( + *[ + Conv( + in_channels=int(256 * width), + out_channels=int(256 * width), + ksize=3, + stride=1, + act=act, + ), + Conv( + in_channels=int(256 * width), + out_channels=int(256 * width), + ksize=3, + stride=1, + act=act, + ), + ] + ) + ) + self.cls_preds.append( + nn.Conv2d( + in_channels=int(256 * width), + out_channels=self.n_anchors * self.num_classes, + kernel_size=1, + stride=1, + padding=0, + ) + ) + self.reg_preds.append( + nn.Conv2d( + in_channels=int(256 * width), + out_channels=4, + kernel_size=1, + stride=1, + padding=0, + ) + ) + self.obj_preds.append( + nn.Conv2d( + in_channels=int(256 * width), + out_channels=self.n_anchors * 1, + kernel_size=1, + stride=1, + padding=0, + ) + ) + + self.use_l1 = False + self.l1_loss = nn.L1Loss(reduction="none") + self.bcewithlog_loss = nn.BCEWithLogitsLoss(reduction="none") + self.iou_loss = IOUloss(reduction="none") + self.strides = strides + self.grids = [torch.zeros(1)] * len(in_channels) + self.expanded_strides = [None] * len(in_channels) + + def initialize_biases(self, prior_prob): + for conv in self.cls_preds: + b = conv.bias.view(self.n_anchors, -1) + b.data.fill_(-math.log((1 - prior_prob) / prior_prob)) + conv.bias = torch.nn.Parameter(b.view(-1), requires_grad=True) + + for conv in self.obj_preds: + b = conv.bias.view(self.n_anchors, -1) + b.data.fill_(-math.log((1 - prior_prob) / prior_prob)) + conv.bias = torch.nn.Parameter(b.view(-1), requires_grad=True) + + def forward(self, xin, labels=None, imgs=None): + outputs = [] + origin_preds = [] + x_shifts = [] + y_shifts = [] + expanded_strides = [] + + for k, (cls_conv, reg_conv, stride_this_level, x) in enumerate( + zip(self.cls_convs, self.reg_convs, self.strides, xin) + ): + x = self.stems[k](x) + cls_x = x + reg_x = x + + cls_feat = cls_conv(cls_x) + cls_output = self.cls_preds[k](cls_feat) + + reg_feat = reg_conv(reg_x) + reg_output = self.reg_preds[k](reg_feat) + obj_output = self.obj_preds[k](reg_feat) + + if self.training: + output = torch.cat([reg_output, obj_output, cls_output], 1) + output, grid = self.get_output_and_grid( + output, k, stride_this_level, xin[0].type() + ) + x_shifts.append(grid[:, :, 0]) + y_shifts.append(grid[:, :, 1]) + expanded_strides.append( + torch.zeros(1, grid.shape[1]) + .fill_(stride_this_level) + .type_as(xin[0]) + ) + if self.use_l1: + batch_size = reg_output.shape[0] + hsize, wsize = reg_output.shape[-2:] + reg_output = reg_output.view( + batch_size, self.n_anchors, 4, hsize, wsize + ) + reg_output = reg_output.permute(0, 1, 3, 4, 2).reshape( + batch_size, -1, 4 + ) + origin_preds.append(reg_output.clone()) + + else: + output = torch.cat( + [reg_output, obj_output.sigmoid(), cls_output.sigmoid()], 1 + ) + + outputs.append(output) + + if self.training: + return self.get_losses( + imgs, + x_shifts, + y_shifts, + expanded_strides, + labels, + torch.cat(outputs, 1), + origin_preds, + dtype=xin[0].dtype, + ) + else: + self.hw = [x.shape[-2:] for x in outputs] + # [batch, n_anchors_all, 85] + outputs = torch.cat( + [x.flatten(start_dim=2) for x in outputs], dim=2 + ).permute(0, 2, 1) + if self.decode_in_inference: + return self.decode_outputs(outputs, dtype=xin[0].type()) + else: + return outputs + + def get_output_and_grid(self, output, k, stride, dtype): + grid = self.grids[k] + + batch_size = output.shape[0] + n_ch = 5 + self.num_classes + hsize, wsize = output.shape[-2:] + if grid.shape[2:4] != output.shape[2:4]: + yv, xv = torch.meshgrid([torch.arange(hsize), torch.arange(wsize)]) + grid = torch.stack((xv, yv), 2).view(1, 1, hsize, wsize, 2).type(dtype) + self.grids[k] = grid + + output = output.view(batch_size, self.n_anchors, n_ch, hsize, wsize) + output = output.permute(0, 1, 3, 4, 2).reshape( + batch_size, self.n_anchors * hsize * wsize, -1 + ) + grid = grid.view(1, -1, 2) + output[..., :2] = (output[..., :2] + grid) * stride + output[..., 2:4] = torch.exp(output[..., 2:4]) * stride + return output, grid + + def decode_outputs(self, outputs, dtype): + grids = [] + strides = [] + for (hsize, wsize), stride in zip(self.hw, self.strides): + yv, xv = torch.meshgrid([torch.arange(hsize), torch.arange(wsize)]) + grid = torch.stack((xv, yv), 2).view(1, -1, 2) + grids.append(grid) + shape = grid.shape[:2] + strides.append(torch.full((*shape, 1), stride)) + + grids = torch.cat(grids, dim=1).type(dtype) + strides = torch.cat(strides, dim=1).type(dtype) + + outputs[..., :2] = (outputs[..., :2] + grids) * strides + outputs[..., 2:4] = torch.exp(outputs[..., 2:4]) * strides + return outputs + + def get_losses( + self, + imgs, + x_shifts, + y_shifts, + expanded_strides, + labels, + outputs, + origin_preds, + dtype, + ): + bbox_preds = outputs[:, :, :4] # [batch, n_anchors_all, 4] + obj_preds = outputs[:, :, 4].unsqueeze(-1) # [batch, n_anchors_all, 1] + cls_preds = outputs[:, :, 5:] # [batch, n_anchors_all, n_cls] + + # calculate targets + mixup = labels.shape[2] > 5 + if mixup: + label_cut = labels[..., :5] + else: + label_cut = labels + nlabel = (label_cut.sum(dim=2) > 0).sum(dim=1) # number of objects + + total_num_anchors = outputs.shape[1] + x_shifts = torch.cat(x_shifts, 1) # [1, n_anchors_all] + y_shifts = torch.cat(y_shifts, 1) # [1, n_anchors_all] + expanded_strides = torch.cat(expanded_strides, 1) + if self.use_l1: + origin_preds = torch.cat(origin_preds, 1) + + cls_targets = [] + reg_targets = [] + l1_targets = [] + obj_targets = [] + fg_masks = [] + + num_fg = 0.0 + num_gts = 0.0 + + for batch_idx in range(outputs.shape[0]): + num_gt = int(nlabel[batch_idx]) + num_gts += num_gt + if num_gt == 0: + cls_target = outputs.new_zeros((0, self.num_classes)) + reg_target = outputs.new_zeros((0, 4)) + l1_target = outputs.new_zeros((0, 4)) + obj_target = outputs.new_zeros((total_num_anchors, 1)) + fg_mask = outputs.new_zeros(total_num_anchors).bool() + else: + gt_bboxes_per_image = labels[batch_idx, :num_gt, 1:5] + gt_classes = labels[batch_idx, :num_gt, 0] + bboxes_preds_per_image = bbox_preds[batch_idx] + + try: + ( + gt_matched_classes, + fg_mask, + pred_ious_this_matching, + matched_gt_inds, + num_fg_img, + ) = self.get_assignments( # noqa + batch_idx, + num_gt, + total_num_anchors, + gt_bboxes_per_image, + gt_classes, + bboxes_preds_per_image, + expanded_strides, + x_shifts, + y_shifts, + cls_preds, + bbox_preds, + obj_preds, + labels, + imgs, + ) + except RuntimeError: + logger.info( + "OOM RuntimeError is raised due to the huge memory cost during label assignment. \ + CPU mode is applied in this batch. If you want to avoid this issue, \ + try to reduce the batch size or image size." + ) + print("OOM RuntimeError is raised due to the huge memory cost during label assignment. \ + CPU mode is applied in this batch. If you want to avoid this issue, \ + try to reduce the batch size or image size.") + torch.cuda.empty_cache() + ( + gt_matched_classes, + fg_mask, + pred_ious_this_matching, + matched_gt_inds, + num_fg_img, + ) = self.get_assignments( # noqa + batch_idx, + num_gt, + total_num_anchors, + gt_bboxes_per_image, + gt_classes, + bboxes_preds_per_image, + expanded_strides, + x_shifts, + y_shifts, + cls_preds, + bbox_preds, + obj_preds, + labels, + imgs, + "cpu", + ) + + + torch.cuda.empty_cache() + num_fg += num_fg_img + + cls_target = F.one_hot( + gt_matched_classes.to(torch.int64), self.num_classes + ) * pred_ious_this_matching.unsqueeze(-1) + obj_target = fg_mask.unsqueeze(-1) + reg_target = gt_bboxes_per_image[matched_gt_inds] + + if self.use_l1: + l1_target = self.get_l1_target( + outputs.new_zeros((num_fg_img, 4)), + gt_bboxes_per_image[matched_gt_inds], + expanded_strides[0][fg_mask], + x_shifts=x_shifts[0][fg_mask], + y_shifts=y_shifts[0][fg_mask], + ) + + cls_targets.append(cls_target) + reg_targets.append(reg_target) + obj_targets.append(obj_target.to(dtype)) + fg_masks.append(fg_mask) + if self.use_l1: + l1_targets.append(l1_target) + + cls_targets = torch.cat(cls_targets, 0) + reg_targets = torch.cat(reg_targets, 0) + obj_targets = torch.cat(obj_targets, 0) + fg_masks = torch.cat(fg_masks, 0) + if self.use_l1: + l1_targets = torch.cat(l1_targets, 0) + + num_fg = max(num_fg, 1) + loss_iou = ( + self.iou_loss(bbox_preds.view(-1, 4)[fg_masks], reg_targets) + ).sum() / num_fg + loss_obj = ( + self.bcewithlog_loss(obj_preds.view(-1, 1), obj_targets) + ).sum() / num_fg + loss_cls = ( + self.bcewithlog_loss( + cls_preds.view(-1, self.num_classes)[fg_masks], cls_targets + ) + ).sum() / num_fg + if self.use_l1: + loss_l1 = ( + self.l1_loss(origin_preds.view(-1, 4)[fg_masks], l1_targets) + ).sum() / num_fg + else: + loss_l1 = 0.0 + + reg_weight = 5.0 + loss = reg_weight * loss_iou + loss_obj + loss_cls + loss_l1 + + return ( + loss, + reg_weight * loss_iou, + loss_obj, + loss_cls, + loss_l1, + num_fg / max(num_gts, 1), + ) + + def get_l1_target(self, l1_target, gt, stride, x_shifts, y_shifts, eps=1e-8): + l1_target[:, 0] = gt[:, 0] / stride - x_shifts + l1_target[:, 1] = gt[:, 1] / stride - y_shifts + l1_target[:, 2] = torch.log(gt[:, 2] / stride + eps) + l1_target[:, 3] = torch.log(gt[:, 3] / stride + eps) + return l1_target + + @torch.no_grad() + def get_assignments( + self, + batch_idx, + num_gt, + total_num_anchors, + gt_bboxes_per_image, + gt_classes, + bboxes_preds_per_image, + expanded_strides, + x_shifts, + y_shifts, + cls_preds, + bbox_preds, + obj_preds, + labels, + imgs, + mode="gpu", + ): + + if mode == "cpu": + print("------------CPU Mode for This Batch-------------") + gt_bboxes_per_image = gt_bboxes_per_image.cpu().float() + bboxes_preds_per_image = bboxes_preds_per_image.cpu().float() + gt_classes = gt_classes.cpu().float() + expanded_strides = expanded_strides.cpu().float() + x_shifts = x_shifts.cpu() + y_shifts = y_shifts.cpu() + + img_size = imgs.shape[2:] + fg_mask, is_in_boxes_and_center = self.get_in_boxes_info( + gt_bboxes_per_image, + expanded_strides, + x_shifts, + y_shifts, + total_num_anchors, + num_gt, + img_size + ) + + bboxes_preds_per_image = bboxes_preds_per_image[fg_mask] + cls_preds_ = cls_preds[batch_idx][fg_mask] + obj_preds_ = obj_preds[batch_idx][fg_mask] + num_in_boxes_anchor = bboxes_preds_per_image.shape[0] + + if mode == "cpu": + gt_bboxes_per_image = gt_bboxes_per_image.cpu() + bboxes_preds_per_image = bboxes_preds_per_image.cpu() + + pair_wise_ious = bboxes_iou(gt_bboxes_per_image, bboxes_preds_per_image, False) + + gt_cls_per_image = ( + F.one_hot(gt_classes.to(torch.int64), self.num_classes) + .float() + .unsqueeze(1) + .repeat(1, num_in_boxes_anchor, 1) + ) + pair_wise_ious_loss = -torch.log(pair_wise_ious + 1e-8) + + if mode == "cpu": + cls_preds_, obj_preds_ = cls_preds_.cpu(), obj_preds_.cpu() + + with torch.cuda.amp.autocast(enabled=False): + cls_preds_ = ( + cls_preds_.float().unsqueeze(0).repeat(num_gt, 1, 1).sigmoid_() + * obj_preds_.float().unsqueeze(0).repeat(num_gt, 1, 1).sigmoid_() + ) + pair_wise_cls_loss = F.binary_cross_entropy( + cls_preds_.sqrt_(), gt_cls_per_image, reduction="none" + ).sum(-1) + del cls_preds_ + + cost = ( + pair_wise_cls_loss + + 3.0 * pair_wise_ious_loss + + 100000.0 * (~is_in_boxes_and_center) + ) + + ( + num_fg, + gt_matched_classes, + pred_ious_this_matching, + matched_gt_inds, + ) = self.dynamic_k_matching(cost, pair_wise_ious, gt_classes, num_gt, fg_mask) + del pair_wise_cls_loss, cost, pair_wise_ious, pair_wise_ious_loss + + if mode == "cpu": + gt_matched_classes = gt_matched_classes.cuda() + fg_mask = fg_mask.cuda() + pred_ious_this_matching = pred_ious_this_matching.cuda() + matched_gt_inds = matched_gt_inds.cuda() + + return ( + gt_matched_classes, + fg_mask, + pred_ious_this_matching, + matched_gt_inds, + num_fg, + ) + + def get_in_boxes_info( + self, + gt_bboxes_per_image, + expanded_strides, + x_shifts, + y_shifts, + total_num_anchors, + num_gt, + img_size + ): + expanded_strides_per_image = expanded_strides[0] + x_shifts_per_image = x_shifts[0] * expanded_strides_per_image + y_shifts_per_image = y_shifts[0] * expanded_strides_per_image + x_centers_per_image = ( + (x_shifts_per_image + 0.5 * expanded_strides_per_image) + .unsqueeze(0) + .repeat(num_gt, 1) + ) # [n_anchor] -> [n_gt, n_anchor] + y_centers_per_image = ( + (y_shifts_per_image + 0.5 * expanded_strides_per_image) + .unsqueeze(0) + .repeat(num_gt, 1) + ) + + gt_bboxes_per_image_l = ( + (gt_bboxes_per_image[:, 0] - 0.5 * gt_bboxes_per_image[:, 2]) + .unsqueeze(1) + .repeat(1, total_num_anchors) + ) + gt_bboxes_per_image_r = ( + (gt_bboxes_per_image[:, 0] + 0.5 * gt_bboxes_per_image[:, 2]) + .unsqueeze(1) + .repeat(1, total_num_anchors) + ) + gt_bboxes_per_image_t = ( + (gt_bboxes_per_image[:, 1] - 0.5 * gt_bboxes_per_image[:, 3]) + .unsqueeze(1) + .repeat(1, total_num_anchors) + ) + gt_bboxes_per_image_b = ( + (gt_bboxes_per_image[:, 1] + 0.5 * gt_bboxes_per_image[:, 3]) + .unsqueeze(1) + .repeat(1, total_num_anchors) + ) + + b_l = x_centers_per_image - gt_bboxes_per_image_l + b_r = gt_bboxes_per_image_r - x_centers_per_image + b_t = y_centers_per_image - gt_bboxes_per_image_t + b_b = gt_bboxes_per_image_b - y_centers_per_image + bbox_deltas = torch.stack([b_l, b_t, b_r, b_b], 2) + + is_in_boxes = bbox_deltas.min(dim=-1).values > 0.0 + is_in_boxes_all = is_in_boxes.sum(dim=0) > 0 + # in fixed center + + center_radius = 2.5 + # clip center inside image + gt_bboxes_per_image_clip = gt_bboxes_per_image[:, 0:2].clone() + gt_bboxes_per_image_clip[:, 0] = torch.clamp(gt_bboxes_per_image_clip[:, 0], min=0, max=img_size[1]) + gt_bboxes_per_image_clip[:, 1] = torch.clamp(gt_bboxes_per_image_clip[:, 1], min=0, max=img_size[0]) + + gt_bboxes_per_image_l = (gt_bboxes_per_image_clip[:, 0]).unsqueeze(1).repeat( + 1, total_num_anchors + ) - center_radius * expanded_strides_per_image.unsqueeze(0) + gt_bboxes_per_image_r = (gt_bboxes_per_image_clip[:, 0]).unsqueeze(1).repeat( + 1, total_num_anchors + ) + center_radius * expanded_strides_per_image.unsqueeze(0) + gt_bboxes_per_image_t = (gt_bboxes_per_image_clip[:, 1]).unsqueeze(1).repeat( + 1, total_num_anchors + ) - center_radius * expanded_strides_per_image.unsqueeze(0) + gt_bboxes_per_image_b = (gt_bboxes_per_image_clip[:, 1]).unsqueeze(1).repeat( + 1, total_num_anchors + ) + center_radius * expanded_strides_per_image.unsqueeze(0) + + c_l = x_centers_per_image - gt_bboxes_per_image_l + c_r = gt_bboxes_per_image_r - x_centers_per_image + c_t = y_centers_per_image - gt_bboxes_per_image_t + c_b = gt_bboxes_per_image_b - y_centers_per_image + center_deltas = torch.stack([c_l, c_t, c_r, c_b], 2) + is_in_centers = center_deltas.min(dim=-1).values > 0.0 + is_in_centers_all = is_in_centers.sum(dim=0) > 0 + + # in boxes and in centers + is_in_boxes_anchor = is_in_boxes_all | is_in_centers_all + + is_in_boxes_and_center = ( + is_in_boxes[:, is_in_boxes_anchor] & is_in_centers[:, is_in_boxes_anchor] + ) + del gt_bboxes_per_image_clip + return is_in_boxes_anchor, is_in_boxes_and_center + + def dynamic_k_matching(self, cost, pair_wise_ious, gt_classes, num_gt, fg_mask): + # Dynamic K + # --------------------------------------------------------------- + matching_matrix = torch.zeros_like(cost) + + ious_in_boxes_matrix = pair_wise_ious + n_candidate_k = min(10, ious_in_boxes_matrix.size(1)) + topk_ious, _ = torch.topk(ious_in_boxes_matrix, n_candidate_k, dim=1) + dynamic_ks = torch.clamp(topk_ious.sum(1).int(), min=1) + for gt_idx in range(num_gt): + _, pos_idx = torch.topk( + cost[gt_idx], k=dynamic_ks[gt_idx].item(), largest=False + ) + matching_matrix[gt_idx][pos_idx] = 1.0 + + del topk_ious, dynamic_ks, pos_idx + + anchor_matching_gt = matching_matrix.sum(0) + if (anchor_matching_gt > 1).sum() > 0: + cost_min, cost_argmin = torch.min(cost[:, anchor_matching_gt > 1], dim=0) + matching_matrix[:, anchor_matching_gt > 1] *= 0.0 + matching_matrix[cost_argmin, anchor_matching_gt > 1] = 1.0 + fg_mask_inboxes = matching_matrix.sum(0) > 0.0 + num_fg = fg_mask_inboxes.sum().item() + + fg_mask[fg_mask.clone()] = fg_mask_inboxes + + matched_gt_inds = matching_matrix[:, fg_mask_inboxes].argmax(0) + gt_matched_classes = gt_classes[matched_gt_inds] + + pred_ious_this_matching = (matching_matrix * pair_wise_ious).sum(0)[ + fg_mask_inboxes + ] + return num_fg, gt_matched_classes, pred_ious_this_matching, matched_gt_inds diff --git a/tracking/docker-build-context/byte_track/yolox/models/yolo_pafpn.py b/tracking/docker-build-context/byte_track/yolox/models/yolo_pafpn.py new file mode 100644 index 0000000000000000000000000000000000000000..c419de3204f466c81f7e50fe5c7ffd17e51d63b3 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/models/yolo_pafpn.py @@ -0,0 +1,116 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +import torch +import torch.nn as nn + +from .darknet import CSPDarknet +from .network_blocks import BaseConv, CSPLayer, DWConv + + +class YOLOPAFPN(nn.Module): + """ + YOLOv3 model. Darknet 53 is the default backbone of this model. + """ + + def __init__( + self, + depth=1.0, + width=1.0, + in_features=("dark3", "dark4", "dark5"), + in_channels=[256, 512, 1024], + depthwise=False, + act="silu", + ): + super().__init__() + self.backbone = CSPDarknet(depth, width, depthwise=depthwise, act=act) + self.in_features = in_features + self.in_channels = in_channels + Conv = DWConv if depthwise else BaseConv + + self.upsample = nn.Upsample(scale_factor=2, mode="nearest") + self.lateral_conv0 = BaseConv( + int(in_channels[2] * width), int(in_channels[1] * width), 1, 1, act=act + ) + self.C3_p4 = CSPLayer( + int(2 * in_channels[1] * width), + int(in_channels[1] * width), + round(3 * depth), + False, + depthwise=depthwise, + act=act, + ) # cat + + self.reduce_conv1 = BaseConv( + int(in_channels[1] * width), int(in_channels[0] * width), 1, 1, act=act + ) + self.C3_p3 = CSPLayer( + int(2 * in_channels[0] * width), + int(in_channels[0] * width), + round(3 * depth), + False, + depthwise=depthwise, + act=act, + ) + + # bottom-up conv + self.bu_conv2 = Conv( + int(in_channels[0] * width), int(in_channels[0] * width), 3, 2, act=act + ) + self.C3_n3 = CSPLayer( + int(2 * in_channels[0] * width), + int(in_channels[1] * width), + round(3 * depth), + False, + depthwise=depthwise, + act=act, + ) + + # bottom-up conv + self.bu_conv1 = Conv( + int(in_channels[1] * width), int(in_channels[1] * width), 3, 2, act=act + ) + self.C3_n4 = CSPLayer( + int(2 * in_channels[1] * width), + int(in_channels[2] * width), + round(3 * depth), + False, + depthwise=depthwise, + act=act, + ) + + def forward(self, input): + """ + Args: + inputs: input images. + + Returns: + Tuple[Tensor]: FPN feature. + """ + + # backbone + out_features = self.backbone(input) + features = [out_features[f] for f in self.in_features] + [x2, x1, x0] = features + + fpn_out0 = self.lateral_conv0(x0) # 1024->512/32 + f_out0 = self.upsample(fpn_out0) # 512/16 + f_out0 = torch.cat([f_out0, x1], 1) # 512->1024/16 + f_out0 = self.C3_p4(f_out0) # 1024->512/16 + + fpn_out1 = self.reduce_conv1(f_out0) # 512->256/16 + f_out1 = self.upsample(fpn_out1) # 256/8 + f_out1 = torch.cat([f_out1, x2], 1) # 256->512/8 + pan_out2 = self.C3_p3(f_out1) # 512->256/8 + + p_out1 = self.bu_conv2(pan_out2) # 256->256/16 + p_out1 = torch.cat([p_out1, fpn_out1], 1) # 256->512/16 + pan_out1 = self.C3_n3(p_out1) # 512->512/16 + + p_out0 = self.bu_conv1(pan_out1) # 512->512/32 + p_out0 = torch.cat([p_out0, fpn_out0], 1) # 512->1024/32 + pan_out0 = self.C3_n4(p_out0) # 1024->1024/32 + + outputs = (pan_out2, pan_out1, pan_out0) + return outputs diff --git a/tracking/docker-build-context/byte_track/yolox/models/yolox.py b/tracking/docker-build-context/byte_track/yolox/models/yolox.py new file mode 100644 index 0000000000000000000000000000000000000000..2f1fa1b34baaf6e0241cf289a2f73db48b33d914 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/models/yolox.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python +# -*- encoding: utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +import torch.nn as nn + +from .yolo_head import YOLOXHead +from .yolo_pafpn import YOLOPAFPN + + +class YOLOX(nn.Module): + """ + YOLOX model module. The module list is defined by create_yolov3_modules function. + The network returns loss values from three YOLO layers during training + and detection results during test. + """ + + def __init__(self, backbone=None, head=None): + super().__init__() + if backbone is None: + backbone = YOLOPAFPN() + if head is None: + head = YOLOXHead(80) + + self.backbone = backbone + self.head = head + + def forward(self, x, targets=None): + # fpn output content features of [dark3, dark4, dark5] + fpn_outs = self.backbone(x) + + if self.training: + assert targets is not None + loss, iou_loss, conf_loss, cls_loss, l1_loss, num_fg = self.head( + fpn_outs, targets, x + ) + outputs = { + "total_loss": loss, + "iou_loss": iou_loss, + "l1_loss": l1_loss, + "conf_loss": conf_loss, + "cls_loss": cls_loss, + "num_fg": num_fg, + } + else: + outputs = self.head(fpn_outs) + + return outputs diff --git a/tracking/docker-build-context/byte_track/yolox/motdt_tracker/basetrack.py b/tracking/docker-build-context/byte_track/yolox/motdt_tracker/basetrack.py new file mode 100644 index 0000000000000000000000000000000000000000..88b16eea14d1cc6f238b7c56becd6e754fbea55c --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/motdt_tracker/basetrack.py @@ -0,0 +1,56 @@ +import numpy as np +from collections import OrderedDict + + +class TrackState(object): + New = 0 + Tracked = 1 + Lost = 2 + Removed = 3 + Replaced = 4 + + +class BaseTrack(object): + _count = 0 + + track_id = 0 + is_activated = False + state = TrackState.New + + history = OrderedDict() + features = [] + curr_feature = None + score = 0 + start_frame = 0 + frame_id = 0 + time_since_update = 0 + + # multi-camera + location = (np.inf, np.inf) + + @property + def end_frame(self): + return self.frame_id + + @staticmethod + def next_id(): + BaseTrack._count += 1 + return BaseTrack._count + + def activate(self, *args): + raise NotImplementedError + + def predict(self): + raise NotImplementedError + + def update(self, *args, **kwargs): + raise NotImplementedError + + def mark_lost(self): + self.state = TrackState.Lost + + def mark_removed(self): + self.state = TrackState.Removed + + def mark_replaced(self): + self.state = TrackState.Replaced diff --git a/tracking/docker-build-context/byte_track/yolox/motdt_tracker/kalman_filter.py b/tracking/docker-build-context/byte_track/yolox/motdt_tracker/kalman_filter.py new file mode 100644 index 0000000000000000000000000000000000000000..deda8a26292b81bc6512a8f6145afabde6c16d7a --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/motdt_tracker/kalman_filter.py @@ -0,0 +1,270 @@ +# vim: expandtab:ts=4:sw=4 +import numpy as np +import scipy.linalg + + +""" +Table for the 0.95 quantile of the chi-square distribution with N degrees of +freedom (contains values for N=1, ..., 9). Taken from MATLAB/Octave's chi2inv +function and used as Mahalanobis gating threshold. +""" +chi2inv95 = { + 1: 3.8415, + 2: 5.9915, + 3: 7.8147, + 4: 9.4877, + 5: 11.070, + 6: 12.592, + 7: 14.067, + 8: 15.507, + 9: 16.919} + + +class KalmanFilter(object): + """ + A simple Kalman filter for tracking bounding boxes in image space. + + The 8-dimensional state space + + x, y, a, h, vx, vy, va, vh + + contains the bounding box center position (x, y), aspect ratio a, height h, + and their respective velocities. + + Object motion follows a constant velocity model. The bounding box location + (x, y, a, h) is taken as direct observation of the state space (linear + observation model). + + """ + + def __init__(self): + ndim, dt = 4, 1. + + # Create Kalman filter model matrices. + self._motion_mat = np.eye(2 * ndim, 2 * ndim) + for i in range(ndim): + self._motion_mat[i, ndim + i] = dt + self._update_mat = np.eye(ndim, 2 * ndim) + + # Motion and observation uncertainty are chosen relative to the current + # state estimate. These weights control the amount of uncertainty in + # the model. This is a bit hacky. + self._std_weight_position = 1. / 20 + self._std_weight_velocity = 1. / 160 + + def initiate(self, measurement): + """Create track from unassociated measurement. + + Parameters + ---------- + measurement : ndarray + Bounding box coordinates (x, y, a, h) with center position (x, y), + aspect ratio a, and height h. + + Returns + ------- + (ndarray, ndarray) + Returns the mean vector (8 dimensional) and covariance matrix (8x8 + dimensional) of the new track. Unobserved velocities are initialized + to 0 mean. + + """ + mean_pos = measurement + mean_vel = np.zeros_like(mean_pos) + mean = np.r_[mean_pos, mean_vel] + + std = [ + 2 * self._std_weight_position * measurement[3], + 2 * self._std_weight_position * measurement[3], + 1e-2, + 2 * self._std_weight_position * measurement[3], + 10 * self._std_weight_velocity * measurement[3], + 10 * self._std_weight_velocity * measurement[3], + 1e-5, + 10 * self._std_weight_velocity * measurement[3]] + covariance = np.diag(np.square(std)) + return mean, covariance + + def predict(self, mean, covariance): + """Run Kalman filter prediction step. + + Parameters + ---------- + mean : ndarray + The 8 dimensional mean vector of the object state at the previous + time step. + covariance : ndarray + The 8x8 dimensional covariance matrix of the object state at the + previous time step. + + Returns + ------- + (ndarray, ndarray) + Returns the mean vector and covariance matrix of the predicted + state. Unobserved velocities are initialized to 0 mean. + + """ + std_pos = [ + self._std_weight_position * mean[3], + self._std_weight_position * mean[3], + 1e-2, + self._std_weight_position * mean[3]] + std_vel = [ + self._std_weight_velocity * mean[3], + self._std_weight_velocity * mean[3], + 1e-5, + self._std_weight_velocity * mean[3]] + motion_cov = np.diag(np.square(np.r_[std_pos, std_vel])) + + #mean = np.dot(self._motion_mat, mean) + mean = np.dot(mean, self._motion_mat.T) + covariance = np.linalg.multi_dot(( + self._motion_mat, covariance, self._motion_mat.T)) + motion_cov + + return mean, covariance + + def project(self, mean, covariance): + """Project state distribution to measurement space. + + Parameters + ---------- + mean : ndarray + The state's mean vector (8 dimensional array). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + + Returns + ------- + (ndarray, ndarray) + Returns the projected mean and covariance matrix of the given state + estimate. + + """ + std = [ + self._std_weight_position * mean[3], + self._std_weight_position * mean[3], + 1e-1, + self._std_weight_position * mean[3]] + innovation_cov = np.diag(np.square(std)) + + mean = np.dot(self._update_mat, mean) + covariance = np.linalg.multi_dot(( + self._update_mat, covariance, self._update_mat.T)) + return mean, covariance + innovation_cov + + def multi_predict(self, mean, covariance): + """Run Kalman filter prediction step (Vectorized version). + Parameters + ---------- + mean : ndarray + The Nx8 dimensional mean matrix of the object states at the previous + time step. + covariance : ndarray + The Nx8x8 dimensional covariance matrics of the object states at the + previous time step. + Returns + ------- + (ndarray, ndarray) + Returns the mean vector and covariance matrix of the predicted + state. Unobserved velocities are initialized to 0 mean. + """ + std_pos = [ + self._std_weight_position * mean[:, 3], + self._std_weight_position * mean[:, 3], + 1e-2 * np.ones_like(mean[:, 3]), + self._std_weight_position * mean[:, 3]] + std_vel = [ + self._std_weight_velocity * mean[:, 3], + self._std_weight_velocity * mean[:, 3], + 1e-5 * np.ones_like(mean[:, 3]), + self._std_weight_velocity * mean[:, 3]] + sqr = np.square(np.r_[std_pos, std_vel]).T + + motion_cov = [] + for i in range(len(mean)): + motion_cov.append(np.diag(sqr[i])) + motion_cov = np.asarray(motion_cov) + + mean = np.dot(mean, self._motion_mat.T) + left = np.dot(self._motion_mat, covariance).transpose((1, 0, 2)) + covariance = np.dot(left, self._motion_mat.T) + motion_cov + + return mean, covariance + + def update(self, mean, covariance, measurement): + """Run Kalman filter correction step. + + Parameters + ---------- + mean : ndarray + The predicted state's mean vector (8 dimensional). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + measurement : ndarray + The 4 dimensional measurement vector (x, y, a, h), where (x, y) + is the center position, a the aspect ratio, and h the height of the + bounding box. + + Returns + ------- + (ndarray, ndarray) + Returns the measurement-corrected state distribution. + + """ + projected_mean, projected_cov = self.project(mean, covariance) + + chol_factor, lower = scipy.linalg.cho_factor( + projected_cov, lower=True, check_finite=False) + kalman_gain = scipy.linalg.cho_solve( + (chol_factor, lower), np.dot(covariance, self._update_mat.T).T, + check_finite=False).T + innovation = measurement - projected_mean + + new_mean = mean + np.dot(innovation, kalman_gain.T) + new_covariance = covariance - np.linalg.multi_dot(( + kalman_gain, projected_cov, kalman_gain.T)) + return new_mean, new_covariance + + def gating_distance(self, mean, covariance, measurements, + only_position=False, metric='maha'): + """Compute gating distance between state distribution and measurements. + A suitable distance threshold can be obtained from `chi2inv95`. If + `only_position` is False, the chi-square distribution has 4 degrees of + freedom, otherwise 2. + Parameters + ---------- + mean : ndarray + Mean vector over the state distribution (8 dimensional). + covariance : ndarray + Covariance of the state distribution (8x8 dimensional). + measurements : ndarray + An Nx4 dimensional matrix of N measurements, each in + format (x, y, a, h) where (x, y) is the bounding box center + position, a the aspect ratio, and h the height. + only_position : Optional[bool] + If True, distance computation is done with respect to the bounding + box center position only. + Returns + ------- + ndarray + Returns an array of length N, where the i-th element contains the + squared Mahalanobis distance between (mean, covariance) and + `measurements[i]`. + """ + mean, covariance = self.project(mean, covariance) + if only_position: + mean, covariance = mean[:2], covariance[:2, :2] + measurements = measurements[:, :2] + + d = measurements - mean + if metric == 'gaussian': + return np.sum(d * d, axis=1) + elif metric == 'maha': + cholesky_factor = np.linalg.cholesky(covariance) + z = scipy.linalg.solve_triangular( + cholesky_factor, d.T, lower=True, check_finite=False, + overwrite_b=True) + squared_maha = np.sum(z * z, axis=0) + return squared_maha + else: + raise ValueError('invalid distance metric') \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/yolox/motdt_tracker/matching.py b/tracking/docker-build-context/byte_track/yolox/motdt_tracker/matching.py new file mode 100644 index 0000000000000000000000000000000000000000..01d07da874a793c06eecba172d1e44c7a368234b --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/motdt_tracker/matching.py @@ -0,0 +1,116 @@ +import cv2 +import numpy as np +import lap +from scipy.spatial.distance import cdist + +from cython_bbox import bbox_overlaps as bbox_ious +from yolox.motdt_tracker import kalman_filter + + +def _indices_to_matches(cost_matrix, indices, thresh): + matched_cost = cost_matrix[tuple(zip(*indices))] + matched_mask = (matched_cost <= thresh) + + matches = indices[matched_mask] + unmatched_a = tuple(set(range(cost_matrix.shape[0])) - set(matches[:, 0])) + unmatched_b = tuple(set(range(cost_matrix.shape[1])) - set(matches[:, 1])) + + return matches, unmatched_a, unmatched_b + + +def linear_assignment(cost_matrix, thresh): + if cost_matrix.size == 0: + return np.empty((0, 2), dtype=int), tuple(range(cost_matrix.shape[0])), tuple(range(cost_matrix.shape[1])) + matches, unmatched_a, unmatched_b = [], [], [] + cost, x, y = lap.lapjv(cost_matrix, extend_cost=True, cost_limit=thresh) + for ix, mx in enumerate(x): + if mx >= 0: + matches.append([ix, mx]) + unmatched_a = np.where(x < 0)[0] + unmatched_b = np.where(y < 0)[0] + matches = np.asarray(matches) + return matches, unmatched_a, unmatched_b + + +def ious(atlbrs, btlbrs): + """ + Compute cost based on IoU + :type atlbrs: list[tlbr] | np.ndarray + :type atlbrs: list[tlbr] | np.ndarray + :rtype ious np.ndarray + """ + ious = np.zeros((len(atlbrs), len(btlbrs)), dtype=np.float) + if ious.size == 0: + return ious + + ious = bbox_ious( + np.ascontiguousarray(atlbrs, dtype=np.float), + np.ascontiguousarray(btlbrs, dtype=np.float) + ) + + return ious + + +def iou_distance(atracks, btracks): + """ + Compute cost based on IoU + :type atracks: list[STrack] + :type btracks: list[STrack] + :rtype cost_matrix np.ndarray + """ + atlbrs = [track.tlbr for track in atracks] + btlbrs = [track.tlbr for track in btracks] + _ious = ious(atlbrs, btlbrs) + cost_matrix = 1 - _ious + + return cost_matrix + + +def nearest_reid_distance(tracks, detections, metric='cosine'): + """ + Compute cost based on ReID features + :type tracks: list[STrack] + :type detections: list[BaseTrack] + :rtype cost_matrix np.ndarray + """ + cost_matrix = np.zeros((len(tracks), len(detections)), dtype=np.float) + if cost_matrix.size == 0: + return cost_matrix + + det_features = np.asarray([track.curr_feature for track in detections], dtype=np.float32) + for i, track in enumerate(tracks): + cost_matrix[i, :] = np.maximum(0.0, cdist(track.features, det_features, metric).min(axis=0)) + + return cost_matrix + + +def mean_reid_distance(tracks, detections, metric='cosine'): + """ + Compute cost based on ReID features + :type tracks: list[STrack] + :type detections: list[BaseTrack] + :type metric: str + :rtype cost_matrix np.ndarray + """ + cost_matrix = np.empty((len(tracks), len(detections)), dtype=np.float) + if cost_matrix.size == 0: + return cost_matrix + + track_features = np.asarray([track.curr_feature for track in tracks], dtype=np.float32) + det_features = np.asarray([track.curr_feature for track in detections], dtype=np.float32) + cost_matrix = cdist(track_features, det_features, metric) + + return cost_matrix + + +def gate_cost_matrix(kf, cost_matrix, tracks, detections, only_position=False): + if cost_matrix.size == 0: + return cost_matrix + gating_dim = 2 if only_position else 4 + gating_threshold = kalman_filter.chi2inv95[gating_dim] + measurements = np.asarray([det.to_xyah() for det in detections]) + for row, track in enumerate(tracks): + gating_distance = kf.gating_distance( + track.mean, track.covariance, measurements, only_position) + cost_matrix[row, gating_distance > gating_threshold] = np.inf + return cost_matrix \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/yolox/motdt_tracker/motdt_tracker.py b/tracking/docker-build-context/byte_track/yolox/motdt_tracker/motdt_tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..b3521f2ff7f5c937a3af05e92126388d960caf24 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/motdt_tracker/motdt_tracker.py @@ -0,0 +1,358 @@ +import numpy as np +#from numba import jit +from collections import OrderedDict, deque +import itertools +import os +import cv2 +import torch +from torch._C import dtype +import torchvision + +from yolox.motdt_tracker import matching +from .kalman_filter import KalmanFilter +from .reid_model import load_reid_model, extract_reid_features +from yolox.data.dataloading import get_yolox_datadir + +from .basetrack import BaseTrack, TrackState + + +class STrack(BaseTrack): + + def __init__(self, tlwh, score, max_n_features=100, from_det=True): + + # wait activate + self._tlwh = np.asarray(tlwh, dtype=np.float) + self.kalman_filter = None + self.mean, self.covariance = None, None + self.is_activated = False + + self.score = score + self.max_n_features = max_n_features + self.curr_feature = None + self.last_feature = None + self.features = deque([], maxlen=self.max_n_features) + + # classification + self.from_det = from_det + self.tracklet_len = 0 + self.time_by_tracking = 0 + + # self-tracking + self.tracker = None + + def set_feature(self, feature): + if feature is None: + return False + self.features.append(feature) + self.curr_feature = feature + self.last_feature = feature + # self._p_feature = 0 + return True + + def predict(self): + if self.time_since_update > 0: + self.tracklet_len = 0 + + self.time_since_update += 1 + + mean_state = self.mean.copy() + if self.state != TrackState.Tracked: + mean_state[7] = 0 + self.mean, self.covariance = self.kalman_filter.predict(mean_state, self.covariance) + + if self.tracker: + self.tracker.update_roi(self.tlwh) + + def self_tracking(self, image): + tlwh = self.tracker.predict(image) if self.tracker else self.tlwh + return tlwh + + def activate(self, kalman_filter, frame_id, image): + """Start a new tracklet""" + self.kalman_filter = kalman_filter # type: KalmanFilter + self.track_id = self.next_id() + # cx, cy, aspect_ratio, height, dx, dy, da, dh + self.mean, self.covariance = self.kalman_filter.initiate(self.tlwh_to_xyah(self._tlwh)) + + # self.tracker = sot.SingleObjectTracker() + # self.tracker.init(image, self.tlwh) + + del self._tlwh + + self.time_since_update = 0 + self.time_by_tracking = 0 + self.tracklet_len = 0 + self.state = TrackState.Tracked + # self.is_activated = True + self.frame_id = frame_id + self.start_frame = frame_id + + def re_activate(self, new_track, frame_id, image, new_id=False): + # self.mean, self.covariance = self.kalman_filter.initiate(self.tlwh_to_xyah(new_track.tlwh)) + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_track.tlwh) + ) + self.time_since_update = 0 + self.time_by_tracking = 0 + self.tracklet_len = 0 + self.state = TrackState.Tracked + self.is_activated = True + self.frame_id = frame_id + if new_id: + self.track_id = self.next_id() + + self.set_feature(new_track.curr_feature) + + def update(self, new_track, frame_id, image, update_feature=True): + """ + Update a matched track + :type new_track: STrack + :type frame_id: int + :type update_feature: bool + :return: + """ + self.frame_id = frame_id + self.time_since_update = 0 + if new_track.from_det: + self.time_by_tracking = 0 + else: + self.time_by_tracking += 1 + self.tracklet_len += 1 + + new_tlwh = new_track.tlwh + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_tlwh)) + self.state = TrackState.Tracked + self.is_activated = True + + self.score = new_track.score + + if update_feature: + self.set_feature(new_track.curr_feature) + if self.tracker: + self.tracker.update(image, self.tlwh) + + @property + #@jit + def tlwh(self): + """Get current position in bounding box format `(top left x, top left y, + width, height)`. + """ + if self.mean is None: + return self._tlwh.copy() + ret = self.mean[:4].copy() + ret[2] *= ret[3] + ret[:2] -= ret[2:] / 2 + return ret + + @property + #@jit + def tlbr(self): + """Convert bounding box to format `(min x, min y, max x, max y)`, i.e., + `(top left, bottom right)`. + """ + ret = self.tlwh.copy() + ret[2:] += ret[:2] + return ret + + @staticmethod + #@jit + def tlwh_to_xyah(tlwh): + """Convert bounding box to format `(center x, center y, aspect ratio, + height)`, where the aspect ratio is `width / height`. + """ + ret = np.asarray(tlwh).copy() + ret[:2] += ret[2:] / 2 + ret[2] /= ret[3] + return ret + + def to_xyah(self): + return self.tlwh_to_xyah(self.tlwh) + + def tracklet_score(self): + # score = (1 - np.exp(-0.6 * self.hit_streak)) * np.exp(-0.03 * self.time_by_tracking) + + score = max(0, 1 - np.log(1 + 0.05 * self.time_by_tracking)) * (self.tracklet_len - self.time_by_tracking > 2) + # score = max(0, 1 - np.log(1 + 0.05 * self.n_tracking)) * (1 - np.exp(-0.6 * self.hit_streak)) + return score + + def __repr__(self): + return 'OT_{}_({}-{})'.format(self.track_id, self.start_frame, self.end_frame) + + +class OnlineTracker(object): + + def __init__(self, model_folder, min_cls_score=0.4, min_ap_dist=0.8, max_time_lost=30, use_tracking=True, use_refind=True): + + self.min_cls_score = min_cls_score + self.min_ap_dist = min_ap_dist + self.max_time_lost = max_time_lost + + self.kalman_filter = KalmanFilter() + + self.tracked_stracks = [] # type: list[STrack] + self.lost_stracks = [] # type: list[STrack] + self.removed_stracks = [] # type: list[STrack] + + self.use_refind = use_refind + self.use_tracking = use_tracking + self.classifier = None + self.reid_model = load_reid_model(model_folder) + + self.frame_id = 0 + + def update(self, output_results, img_info, img_size, img_file_name): + img_file_name = os.path.join(get_yolox_datadir(), 'mot', 'train', img_file_name) + image = cv2.imread(img_file_name) + # post process detections + output_results = output_results.cpu().numpy() + confidences = output_results[:, 4] * output_results[:, 5] + + bboxes = output_results[:, :4] # x1y1x2y2 + img_h, img_w = img_info[0], img_info[1] + scale = min(img_size[0] / float(img_h), img_size[1] / float(img_w)) + bboxes /= scale + bbox_xyxy = bboxes + tlwhs = self._xyxy_to_tlwh_array(bbox_xyxy) + remain_inds = confidences > self.min_cls_score + tlwhs = tlwhs[remain_inds] + det_scores = confidences[remain_inds] + + self.frame_id += 1 + + activated_starcks = [] + refind_stracks = [] + lost_stracks = [] + removed_stracks = [] + + """step 1: prediction""" + for strack in itertools.chain(self.tracked_stracks, self.lost_stracks): + strack.predict() + + """step 2: scoring and selection""" + if det_scores is None: + det_scores = np.ones(len(tlwhs), dtype=float) + detections = [STrack(tlwh, score, from_det=True) for tlwh, score in zip(tlwhs, det_scores)] + if self.use_tracking: + tracks = [STrack(t.self_tracking(image), 0.6 * t.tracklet_score(), from_det=False) + for t in itertools.chain(self.tracked_stracks, self.lost_stracks) if t.is_activated] + detections.extend(tracks) + rois = np.asarray([d.tlbr for d in detections], dtype=np.float32) + scores = np.asarray([d.score for d in detections], dtype=np.float32) + # nms + if len(detections) > 0: + nms_out_index = torchvision.ops.batched_nms( + torch.from_numpy(rois), + torch.from_numpy(scores.reshape(-1)).to(torch.from_numpy(rois).dtype), + torch.zeros_like(torch.from_numpy(scores.reshape(-1))), + 0.7, + ) + keep = nms_out_index.numpy() + mask = np.zeros(len(rois), dtype=np.bool) + mask[keep] = True + keep = np.where(mask & (scores >= self.min_cls_score))[0] + detections = [detections[i] for i in keep] + scores = scores[keep] + for d, score in zip(detections, scores): + d.score = score + pred_dets = [d for d in detections if not d.from_det] + detections = [d for d in detections if d.from_det] + + # set features + tlbrs = [det.tlbr for det in detections] + features = extract_reid_features(self.reid_model, image, tlbrs) + features = features.cpu().numpy() + for i, det in enumerate(detections): + det.set_feature(features[i]) + + """step 3: association for tracked""" + # matching for tracked targets + unconfirmed = [] + tracked_stracks = [] # type: list[STrack] + for track in self.tracked_stracks: + if not track.is_activated: + unconfirmed.append(track) + else: + tracked_stracks.append(track) + + dists = matching.nearest_reid_distance(tracked_stracks, detections, metric='euclidean') + dists = matching.gate_cost_matrix(self.kalman_filter, dists, tracked_stracks, detections) + matches, u_track, u_detection = matching.linear_assignment(dists, thresh=self.min_ap_dist) + for itracked, idet in matches: + tracked_stracks[itracked].update(detections[idet], self.frame_id, image) + + # matching for missing targets + detections = [detections[i] for i in u_detection] + dists = matching.nearest_reid_distance(self.lost_stracks, detections, metric='euclidean') + dists = matching.gate_cost_matrix(self.kalman_filter, dists, self.lost_stracks, detections) + matches, u_lost, u_detection = matching.linear_assignment(dists, thresh=self.min_ap_dist) + for ilost, idet in matches: + track = self.lost_stracks[ilost] # type: STrack + det = detections[idet] + track.re_activate(det, self.frame_id, image, new_id=not self.use_refind) + refind_stracks.append(track) + + # remaining tracked + # tracked + len_det = len(u_detection) + detections = [detections[i] for i in u_detection] + pred_dets + r_tracked_stracks = [tracked_stracks[i] for i in u_track] + dists = matching.iou_distance(r_tracked_stracks, detections) + matches, u_track, u_detection = matching.linear_assignment(dists, thresh=0.5) + for itracked, idet in matches: + r_tracked_stracks[itracked].update(detections[idet], self.frame_id, image, update_feature=True) + for it in u_track: + track = r_tracked_stracks[it] + track.mark_lost() + lost_stracks.append(track) + + # unconfirmed + detections = [detections[i] for i in u_detection if i < len_det] + dists = matching.iou_distance(unconfirmed, detections) + matches, u_unconfirmed, u_detection = matching.linear_assignment(dists, thresh=0.7) + for itracked, idet in matches: + unconfirmed[itracked].update(detections[idet], self.frame_id, image, update_feature=True) + for it in u_unconfirmed: + track = unconfirmed[it] + track.mark_removed() + removed_stracks.append(track) + + """step 4: init new stracks""" + for inew in u_detection: + track = detections[inew] + if not track.from_det or track.score < 0.6: + continue + track.activate(self.kalman_filter, self.frame_id, image) + activated_starcks.append(track) + + """step 6: update state""" + for track in self.lost_stracks: + if self.frame_id - track.end_frame > self.max_time_lost: + track.mark_removed() + removed_stracks.append(track) + + self.tracked_stracks = [t for t in self.tracked_stracks if t.state == TrackState.Tracked] + self.lost_stracks = [t for t in self.lost_stracks if t.state == TrackState.Lost] # type: list[STrack] + self.tracked_stracks.extend(activated_starcks) + self.tracked_stracks.extend(refind_stracks) + self.lost_stracks.extend(lost_stracks) + self.removed_stracks.extend(removed_stracks) + + # output_stracks = self.tracked_stracks + self.lost_stracks + + # get scores of lost tracks + output_tracked_stracks = [track for track in self.tracked_stracks if track.is_activated] + + output_stracks = output_tracked_stracks + + return output_stracks + + @staticmethod + def _xyxy_to_tlwh_array(bbox_xyxy): + if isinstance(bbox_xyxy, np.ndarray): + bbox_tlwh = bbox_xyxy.copy() + elif isinstance(bbox_xyxy, torch.Tensor): + bbox_tlwh = bbox_xyxy.clone() + bbox_tlwh[:, 2] = bbox_xyxy[:, 2] - bbox_xyxy[:, 0] + bbox_tlwh[:, 3] = bbox_xyxy[:, 3] - bbox_xyxy[:, 1] + return bbox_tlwh diff --git a/tracking/docker-build-context/byte_track/yolox/motdt_tracker/reid_model.py b/tracking/docker-build-context/byte_track/yolox/motdt_tracker/reid_model.py new file mode 100644 index 0000000000000000000000000000000000000000..6ad49e340584452da3eeea487b0f8bf7d7e5db5d --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/motdt_tracker/reid_model.py @@ -0,0 +1,270 @@ +import cv2 +import numpy as np +import torch +from torch.autograd import Variable +import torch.nn.functional as F +import torch.nn as nn +import pickle +import os +from torch.nn.modules import CrossMapLRN2d as SpatialCrossMapLRN +#from torch.legacy.nn import SpatialCrossMapLRN as SpatialCrossMapLRNOld +from torch.autograd import Function, Variable +from torch.nn import Module + + +def clip_boxes(boxes, im_shape): + """ + Clip boxes to image boundaries. + """ + boxes = np.asarray(boxes) + if boxes.shape[0] == 0: + return boxes + boxes = np.copy(boxes) + # x1 >= 0 + boxes[:, 0::4] = np.maximum(np.minimum(boxes[:, 0::4], im_shape[1] - 1), 0) + # y1 >= 0 + boxes[:, 1::4] = np.maximum(np.minimum(boxes[:, 1::4], im_shape[0] - 1), 0) + # x2 < im_shape[1] + boxes[:, 2::4] = np.maximum(np.minimum(boxes[:, 2::4], im_shape[1] - 1), 0) + # y2 < im_shape[0] + boxes[:, 3::4] = np.maximum(np.minimum(boxes[:, 3::4], im_shape[0] - 1), 0) + return boxes + + +def load_net(fname, net, prefix='', load_state_dict=False): + import h5py + with h5py.File(fname, mode='r') as h5f: + h5f_is_module = True + for k in h5f.keys(): + if not str(k).startswith('module.'): + h5f_is_module = False + break + if prefix == '' and not isinstance(net, nn.DataParallel) and h5f_is_module: + prefix = 'module.' + + for k, v in net.state_dict().items(): + k = prefix + k + if k in h5f: + param = torch.from_numpy(np.asarray(h5f[k])) + if v.size() != param.size(): + print('Inconsistent shape: {}, {}'.format(v.size(), param.size())) + else: + v.copy_(param) + else: + print.warning('No layer: {}'.format(k)) + + epoch = h5f.attrs['epoch'] if 'epoch' in h5f.attrs else -1 + + if not load_state_dict: + if 'learning_rates' in h5f.attrs: + lr = h5f.attrs['learning_rates'] + else: + lr = h5f.attrs.get('lr', -1) + lr = np.asarray([lr] if lr > 0 else [], dtype=np.float) + + return epoch, lr + + state_file = fname + '.optimizer_state.pk' + if os.path.isfile(state_file): + with open(state_file, 'rb') as f: + state_dicts = pickle.load(f) + if not isinstance(state_dicts, list): + state_dicts = [state_dicts] + else: + state_dicts = None + return epoch, state_dicts + + +# class SpatialCrossMapLRNFunc(Function): + +# def __init__(self, size, alpha=1e-4, beta=0.75, k=1): +# self.size = size +# self.alpha = alpha +# self.beta = beta +# self.k = k + +# def forward(self, input): +# self.save_for_backward(input) +# self.lrn = SpatialCrossMapLRNOld(self.size, self.alpha, self.beta, self.k) +# self.lrn.type(input.type()) +# return self.lrn.forward(input) + +# def backward(self, grad_output): +# input, = self.saved_tensors +# return self.lrn.backward(input, grad_output) + + +# # use this one instead +# class SpatialCrossMapLRN(Module): +# def __init__(self, size, alpha=1e-4, beta=0.75, k=1): +# super(SpatialCrossMapLRN, self).__init__() +# self.size = size +# self.alpha = alpha +# self.beta = beta +# self.k = k + +# def forward(self, input): +# return SpatialCrossMapLRNFunc(self.size, self.alpha, self.beta, self.k)(input) + + +class Inception(nn.Module): + def __init__(self, in_planes, n1x1, n3x3red, n3x3, n5x5red, n5x5, pool_planes): + super(Inception, self).__init__() + # 1x1 conv branch + self.b1 = nn.Sequential( + nn.Conv2d(in_planes, n1x1, kernel_size=1), + nn.ReLU(True), + ) + + # 1x1 conv -> 3x3 conv branch + self.b2 = nn.Sequential( + nn.Conv2d(in_planes, n3x3red, kernel_size=1), + nn.ReLU(True), + nn.Conv2d(n3x3red, n3x3, kernel_size=3, padding=1), + nn.ReLU(True), + ) + + # 1x1 conv -> 5x5 conv branch + self.b3 = nn.Sequential( + nn.Conv2d(in_planes, n5x5red, kernel_size=1), + nn.ReLU(True), + + nn.Conv2d(n5x5red, n5x5, kernel_size=5, padding=2), + nn.ReLU(True), + ) + + # 3x3 pool -> 1x1 conv branch + self.b4 = nn.Sequential( + nn.MaxPool2d(3, stride=1, padding=1), + + nn.Conv2d(in_planes, pool_planes, kernel_size=1), + nn.ReLU(True), + ) + + def forward(self, x): + y1 = self.b1(x) + y2 = self.b2(x) + y3 = self.b3(x) + y4 = self.b4(x) + return torch.cat([y1,y2,y3,y4], 1) + + +class GoogLeNet(nn.Module): + + output_channels = 832 + + def __init__(self): + super(GoogLeNet, self).__init__() + self.pre_layers = nn.Sequential( + nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3), + nn.ReLU(True), + + nn.MaxPool2d(3, stride=2, ceil_mode=True), + SpatialCrossMapLRN(5), + + nn.Conv2d(64, 64, 1), + nn.ReLU(True), + + nn.Conv2d(64, 192, 3, padding=1), + nn.ReLU(True), + + SpatialCrossMapLRN(5), + nn.MaxPool2d(3, stride=2, ceil_mode=True), + ) + + self.a3 = Inception(192, 64, 96, 128, 16, 32, 32) + self.b3 = Inception(256, 128, 128, 192, 32, 96, 64) + + self.maxpool = nn.MaxPool2d(3, stride=2, ceil_mode=True) + + self.a4 = Inception(480, 192, 96, 208, 16, 48, 64) + self.b4 = Inception(512, 160, 112, 224, 24, 64, 64) + self.c4 = Inception(512, 128, 128, 256, 24, 64, 64) + self.d4 = Inception(512, 112, 144, 288, 32, 64, 64) + self.e4 = Inception(528, 256, 160, 320, 32, 128, 128) + + def forward(self, x): + out = self.pre_layers(x) + out = self.a3(out) + out = self.b3(out) + out = self.maxpool(out) + out = self.a4(out) + out = self.b4(out) + out = self.c4(out) + out = self.d4(out) + out = self.e4(out) + + return out + + +class Model(nn.Module): + def __init__(self, n_parts=8): + super(Model, self).__init__() + self.n_parts = n_parts + + self.feat_conv = GoogLeNet() + self.conv_input_feat = nn.Conv2d(self.feat_conv.output_channels, 512, 1) + + # part net + self.conv_att = nn.Conv2d(512, self.n_parts, 1) + + for i in range(self.n_parts): + setattr(self, 'linear_feature{}'.format(i+1), nn.Linear(512, 64)) + + def forward(self, x): + feature = self.feat_conv(x) + feature = self.conv_input_feat(feature) + + att_weights = torch.sigmoid(self.conv_att(feature)) + + linear_feautres = [] + for i in range(self.n_parts): + masked_feature = feature * torch.unsqueeze(att_weights[:, i], 1) + pooled_feature = F.avg_pool2d(masked_feature, masked_feature.size()[2:4]) + linear_feautres.append( + getattr(self, 'linear_feature{}'.format(i+1))(pooled_feature.view(pooled_feature.size(0), -1)) + ) + + concat_features = torch.cat(linear_feautres, 1) + normed_feature = concat_features / torch.clamp(torch.norm(concat_features, 2, 1, keepdim=True), min=1e-6) + + return normed_feature + + +def load_reid_model(ckpt): + model = Model(n_parts=8) + model.inp_size = (80, 160) + load_net(ckpt, model) + print('Load ReID model from {}'.format(ckpt)) + + model = model.cuda() + model.eval() + return model + + +def im_preprocess(image): + image = np.asarray(image, np.float32) + image -= np.array([104, 117, 123], dtype=np.float32).reshape(1, 1, -1) + image = image.transpose((2, 0, 1)) + return image + + +def extract_image_patches(image, bboxes): + bboxes = np.round(bboxes).astype(np.int) + bboxes = clip_boxes(bboxes, image.shape) + patches = [image[box[1]:box[3], box[0]:box[2]] for box in bboxes] + return patches + + +def extract_reid_features(reid_model, image, tlbrs): + if len(tlbrs) == 0: + return torch.FloatTensor() + + patches = extract_image_patches(image, tlbrs) + patches = np.asarray([im_preprocess(cv2.resize(p, reid_model.inp_size)) for p in patches], dtype=np.float32) + + with torch.no_grad(): + im_var = Variable(torch.from_numpy(patches)) + im_var = im_var.cuda() + features = reid_model(im_var).data + return features \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/yolox/sort_tracker/sort.py b/tracking/docker-build-context/byte_track/yolox/sort_tracker/sort.py new file mode 100644 index 0000000000000000000000000000000000000000..9b708ba85f392f1097fec6aaa916ca571b09e3df --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/sort_tracker/sort.py @@ -0,0 +1,251 @@ +""" + SORT: A Simple, Online and Realtime Tracker + Copyright (C) 2016-2020 Alex Bewley alex@bewley.ai + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + You should have received a copy of the GNU General Public License + along with this program. If not, see . +""" +from __future__ import print_function + +import os +import numpy as np + +from filterpy.kalman import KalmanFilter + +np.random.seed(0) + + +def linear_assignment(cost_matrix): + try: + import lap + _, x, y = lap.lapjv(cost_matrix, extend_cost=True) + return np.array([[y[i],i] for i in x if i >= 0]) # + except ImportError: + from scipy.optimize import linear_sum_assignment + x, y = linear_sum_assignment(cost_matrix) + return np.array(list(zip(x, y))) + + +def iou_batch(bb_test, bb_gt): + """ + From SORT: Computes IOU between two bboxes in the form [x1,y1,x2,y2] + """ + bb_gt = np.expand_dims(bb_gt, 0) + bb_test = np.expand_dims(bb_test, 1) + + xx1 = np.maximum(bb_test[..., 0], bb_gt[..., 0]) + yy1 = np.maximum(bb_test[..., 1], bb_gt[..., 1]) + xx2 = np.minimum(bb_test[..., 2], bb_gt[..., 2]) + yy2 = np.minimum(bb_test[..., 3], bb_gt[..., 3]) + w = np.maximum(0., xx2 - xx1) + h = np.maximum(0., yy2 - yy1) + wh = w * h + o = wh / ((bb_test[..., 2] - bb_test[..., 0]) * (bb_test[..., 3] - bb_test[..., 1]) + + (bb_gt[..., 2] - bb_gt[..., 0]) * (bb_gt[..., 3] - bb_gt[..., 1]) - wh) + return(o) + + +def convert_bbox_to_z(bbox): + """ + Takes a bounding box in the form [x1,y1,x2,y2] and returns z in the form + [x,y,s,r] where x,y is the centre of the box and s is the scale/area and r is + the aspect ratio + """ + w = bbox[2] - bbox[0] + h = bbox[3] - bbox[1] + x = bbox[0] + w/2. + y = bbox[1] + h/2. + s = w * h #scale is just area + r = w / float(h) + return np.array([x, y, s, r]).reshape((4, 1)) + + +def convert_x_to_bbox(x,score=None): + """ + Takes a bounding box in the centre form [x,y,s,r] and returns it in the form + [x1,y1,x2,y2] where x1,y1 is the top left and x2,y2 is the bottom right + """ + w = np.sqrt(x[2] * x[3]) + h = x[2] / w + if(score==None): + return np.array([x[0]-w/2.,x[1]-h/2.,x[0]+w/2.,x[1]+h/2.]).reshape((1,4)) + else: + return np.array([x[0]-w/2.,x[1]-h/2.,x[0]+w/2.,x[1]+h/2.,score]).reshape((1,5)) + + +class KalmanBoxTracker(object): + """ + This class represents the internal state of individual tracked objects observed as bbox. + """ + count = 0 + def __init__(self,bbox): + """ + Initialises a tracker using initial bounding box. + """ + #define constant velocity model + self.kf = KalmanFilter(dim_x=7, dim_z=4) + self.kf.F = np.array([[1,0,0,0,1,0,0],[0,1,0,0,0,1,0],[0,0,1,0,0,0,1],[0,0,0,1,0,0,0], [0,0,0,0,1,0,0],[0,0,0,0,0,1,0],[0,0,0,0,0,0,1]]) + self.kf.H = np.array([[1,0,0,0,0,0,0],[0,1,0,0,0,0,0],[0,0,1,0,0,0,0],[0,0,0,1,0,0,0]]) + + self.kf.R[2:,2:] *= 10. + self.kf.P[4:,4:] *= 1000. #give high uncertainty to the unobservable initial velocities + self.kf.P *= 10. + self.kf.Q[-1,-1] *= 0.01 + self.kf.Q[4:,4:] *= 0.01 + + self.kf.x[:4] = convert_bbox_to_z(bbox) + self.time_since_update = 0 + self.id = KalmanBoxTracker.count + KalmanBoxTracker.count += 1 + self.history = [] + self.hits = 0 + self.hit_streak = 0 + self.age = 0 + + def update(self,bbox): + """ + Updates the state vector with observed bbox. + """ + self.time_since_update = 0 + self.history = [] + self.hits += 1 + self.hit_streak += 1 + self.kf.update(convert_bbox_to_z(bbox)) + + def predict(self): + """ + Advances the state vector and returns the predicted bounding box estimate. + """ + if((self.kf.x[6]+self.kf.x[2])<=0): + self.kf.x[6] *= 0.0 + self.kf.predict() + self.age += 1 + if(self.time_since_update>0): + self.hit_streak = 0 + self.time_since_update += 1 + self.history.append(convert_x_to_bbox(self.kf.x)) + return self.history[-1] + + def get_state(self): + """ + Returns the current bounding box estimate. + """ + return convert_x_to_bbox(self.kf.x) + + +def associate_detections_to_trackers(detections,trackers,iou_threshold = 0.3): + """ + Assigns detections to tracked object (both represented as bounding boxes) + Returns 3 lists of matches, unmatched_detections and unmatched_trackers + """ + if(len(trackers)==0): + return np.empty((0,2),dtype=int), np.arange(len(detections)), np.empty((0,5),dtype=int) + + iou_matrix = iou_batch(detections, trackers) + + if min(iou_matrix.shape) > 0: + a = (iou_matrix > iou_threshold).astype(np.int32) + if a.sum(1).max() == 1 and a.sum(0).max() == 1: + matched_indices = np.stack(np.where(a), axis=1) + else: + matched_indices = linear_assignment(-iou_matrix) + else: + matched_indices = np.empty(shape=(0,2)) + + unmatched_detections = [] + for d, det in enumerate(detections): + if(d not in matched_indices[:,0]): + unmatched_detections.append(d) + unmatched_trackers = [] + for t, trk in enumerate(trackers): + if(t not in matched_indices[:,1]): + unmatched_trackers.append(t) + + #filter out matched with low IOU + matches = [] + for m in matched_indices: + if(iou_matrix[m[0], m[1]] self.det_thresh + dets = dets[remain_inds] + # get predicted locations from existing trackers. + trks = np.zeros((len(self.trackers), 5)) + to_del = [] + ret = [] + for t, trk in enumerate(trks): + pos = self.trackers[t].predict()[0] + trk[:] = [pos[0], pos[1], pos[2], pos[3], 0] + if np.any(np.isnan(pos)): + to_del.append(t) + trks = np.ma.compress_rows(np.ma.masked_invalid(trks)) + for t in reversed(to_del): + self.trackers.pop(t) + matched, unmatched_dets, unmatched_trks = associate_detections_to_trackers(dets, trks, self.iou_threshold) + + # update matched trackers with assigned detections + for m in matched: + self.trackers[m[1]].update(dets[m[0], :]) + + # create and initialise new trackers for unmatched detections + for i in unmatched_dets: + trk = KalmanBoxTracker(dets[i,:]) + self.trackers.append(trk) + i = len(self.trackers) + for trk in reversed(self.trackers): + d = trk.get_state()[0] + if (trk.time_since_update < 1) and (trk.hit_streak >= self.min_hits or self.frame_count <= self.min_hits): + ret.append(np.concatenate((d,[trk.id+1])).reshape(1,-1)) # +1 as MOT benchmark requires positive + i -= 1 + # remove dead tracklet + if(trk.time_since_update > self.max_age): + self.trackers.pop(i) + if(len(ret)>0): + return np.concatenate(ret) + return np.empty((0,5)) \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/yolox/tracker/basetrack.py b/tracking/docker-build-context/byte_track/yolox/tracker/basetrack.py new file mode 100644 index 0000000000000000000000000000000000000000..a7130b5cc08ac55705c155594d0f2a1d09f96774 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/tracker/basetrack.py @@ -0,0 +1,52 @@ +import numpy as np +from collections import OrderedDict + + +class TrackState(object): + New = 0 + Tracked = 1 + Lost = 2 + Removed = 3 + + +class BaseTrack(object): + _count = 0 + + track_id = 0 + is_activated = False + state = TrackState.New + + history = OrderedDict() + features = [] + curr_feature = None + score = 0 + start_frame = 0 + frame_id = 0 + time_since_update = 0 + + # multi-camera + location = (np.inf, np.inf) + + @property + def end_frame(self): + return self.frame_id + + @staticmethod + def next_id(): + BaseTrack._count += 1 + return BaseTrack._count + + def activate(self, *args): + raise NotImplementedError + + def predict(self): + raise NotImplementedError + + def update(self, *args, **kwargs): + raise NotImplementedError + + def mark_lost(self): + self.state = TrackState.Lost + + def mark_removed(self): + self.state = TrackState.Removed \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/yolox/tracker/byte_tracker.py b/tracking/docker-build-context/byte_track/yolox/tracker/byte_tracker.py new file mode 100644 index 0000000000000000000000000000000000000000..d995082827d36ee037473ad2e07c140d87824681 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/tracker/byte_tracker.py @@ -0,0 +1,355 @@ +from typing import Iterable + +import numpy as np +from collections import deque +import os +import os.path as osp +import copy +import torch +import torch.nn.functional as F + +from .kalman_filter import KalmanFilter +from yolox.tracker import matching +from .basetrack import BaseTrack, TrackState + +class STrack(BaseTrack): + shared_kalman = KalmanFilter() + def __init__(self, tlwh, score, class_id, next_id_supplier): + + # wait activate + self._tlwh = np.asarray(tlwh, dtype=np.float) + self.kalman_filter = None + self.mean, self.covariance = None, None + self.is_activated = False + + self.score = score + self.class_id = class_id + self.tracklet_len = 0 + self.next_id_supplier = next_id_supplier + + def predict(self): + mean_state = self.mean.copy() + if self.state != TrackState.Tracked: + mean_state[7] = 0 + self.mean, self.covariance = self.kalman_filter.predict(mean_state, self.covariance) + + @staticmethod + def multi_predict(stracks): + if len(stracks) > 0: + multi_mean = np.asarray([st.mean.copy() for st in stracks]) + multi_covariance = np.asarray([st.covariance for st in stracks]) + for i, st in enumerate(stracks): + if st.state != TrackState.Tracked: + multi_mean[i][7] = 0 + multi_mean, multi_covariance = STrack.shared_kalman.multi_predict(multi_mean, multi_covariance) + for i, (mean, cov) in enumerate(zip(multi_mean, multi_covariance)): + stracks[i].mean = mean + stracks[i].covariance = cov + + def activate(self, kalman_filter, frame_id): + """Start a new tracklet""" + self.kalman_filter = kalman_filter + self.track_id = self.next_id_supplier() + self.mean, self.covariance = self.kalman_filter.initiate(self.tlwh_to_xyah(self._tlwh)) + + self.tracklet_len = 0 + self.state = TrackState.Tracked + if frame_id == 1: + self.is_activated = True + # self.is_activated = True + self.frame_id = frame_id + self.start_frame = frame_id + + def re_activate(self, new_track, frame_id, new_id=False): + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_track.tlwh) + ) + self.tracklet_len = 0 + self.state = TrackState.Tracked + self.is_activated = True + self.frame_id = frame_id + if new_id: + self.track_id = self.next_id_supplier() + self.score = new_track.score + + def update(self, new_track, frame_id): + """ + Update a matched track + :type new_track: STrack + :type frame_id: int + :type update_feature: bool + :return: + """ + self.frame_id = frame_id + self.tracklet_len += 1 + + new_tlwh = new_track.tlwh + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_tlwh)) + self.state = TrackState.Tracked + self.is_activated = True + + self.score = new_track.score + + @property + # @jit(nopython=True) + def tlwh(self): + """Get current position in bounding box format `(top left x, top left y, + width, height)`. + """ + if self.mean is None: + return self._tlwh.copy() + ret = self.mean[:4].copy() + ret[2] *= ret[3] + ret[:2] -= ret[2:] / 2 + return ret + + @property + # @jit(nopython=True) + def tlbr(self): + """Convert bounding box to format `(min x, min y, max x, max y)`, i.e., + `(top left, bottom right)`. + """ + ret = self.tlwh.copy() + ret[2:] += ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_xyah(tlwh): + """Convert bounding box to format `(center x, center y, aspect ratio, + height)`, where the aspect ratio is `width / height`. + """ + ret = np.asarray(tlwh).copy() + ret[:2] += ret[2:] / 2 + ret[2] /= ret[3] + return ret + + def to_xyah(self): + return self.tlwh_to_xyah(self.tlwh) + + @staticmethod + # @jit(nopython=True) + def tlbr_to_tlwh(tlbr): + ret = np.asarray(tlbr).copy() + ret[2:] -= ret[:2] + return ret + + @staticmethod + # @jit(nopython=True) + def tlwh_to_tlbr(tlwh): + ret = np.asarray(tlwh).copy() + ret[2:] += ret[:2] + return ret + + def __repr__(self): + return 'OT_{}_({}-{})'.format(self.track_id, self.start_frame, self.end_frame) + + +class BYTETracker(object): + def __init__(self, args, frame_rate=30): + self.tracked_stracks = [] # type: list[STrack] + self.lost_stracks = [] # type: list[STrack] + self.removed_stracks = [] # type: list[STrack] + + self.frame_id = 0 + self.args = args + #self.det_thresh = args.track_thresh + self.det_thresh = args.track_thresh + 0.1 + self.buffer_size = int(frame_rate / 30.0 * args.track_buffer) + self.max_time_lost = self.buffer_size + self.kalman_filter = KalmanFilter() + self.id_counter = 1 + + def next_id(self): + t_id = self.id_counter + self.id_counter += 1 + return t_id + + def update(self, output_results, img_info, img_size, classes=None): + self.frame_id += 1 + activated_starcks = [] + refind_stracks = [] + lost_stracks = [] + removed_stracks = [] + + if output_results.shape[1] == 5: + scores = output_results[:, 4] + bboxes = output_results[:, :4] + else: + output_results = output_results.cpu().numpy() + scores = output_results[:, 4] * output_results[:, 5] + bboxes = output_results[:, :4] # x1y1x2y2 + img_h, img_w = img_info[0], img_info[1] + scale = min(img_size[0] / float(img_h), img_size[1] / float(img_w)) + bboxes /= scale + + remain_inds = scores > self.args.track_thresh + inds_low = scores > 0.1 + inds_high = scores < self.args.track_thresh + + inds_second = np.logical_and(inds_low, inds_high) + dets_second = bboxes[inds_second] + dets = bboxes[remain_inds] + scores_keep = scores[remain_inds] + scores_second = scores[inds_second] + if classes is None: + classes = np.ones((output_results.shape[0])) + classes_keep = classes[remain_inds] + classes_second = classes[inds_second] + + if len(dets) > 0: + '''Detections''' + detections = [STrack(STrack.tlbr_to_tlwh(tlbr), s, cl, self.next_id) for + (tlbr, s, cl) in zip(dets, scores_keep, classes_keep)] + else: + detections = [] + + ''' Add newly detected tracklets to tracked_stracks''' + unconfirmed = [] + tracked_stracks = [] # type: list[STrack] + for track in self.tracked_stracks: + if not track.is_activated: + unconfirmed.append(track) + else: + tracked_stracks.append(track) + + ''' Step 2: First association, with high score detection boxes''' + strack_pool = joint_stracks(tracked_stracks, self.lost_stracks) + # Predict the current location with KF + STrack.multi_predict(strack_pool) + dists = matching.iou_distance(strack_pool, detections) + if not self.args.mot20: + dists = matching.fuse_score(dists, detections) + dists = correct_different_classes(dists, strack_pool, detections) + matches, u_track, u_detection = matching.linear_assignment(dists, thresh=self.args.match_thresh) + + for itracked, idet in matches: + track = strack_pool[itracked] + det = detections[idet] + if track.state == TrackState.Tracked: + track.update(detections[idet], self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + ''' Step 3: Second association, with low score detection boxes''' + # association the untrack to the low score detections + if len(dets_second) > 0: + '''Detections''' + detections_second = [STrack(STrack.tlbr_to_tlwh(tlbr), s, cl, self.next_id) for + (tlbr, s, cl) in zip(dets_second, scores_second, classes_second)] + else: + detections_second = [] + r_tracked_stracks = [strack_pool[i] for i in u_track if strack_pool[i].state == TrackState.Tracked] + dists = matching.iou_distance(r_tracked_stracks, detections_second) + dists = correct_different_classes(dists, r_tracked_stracks, detections_second) + matches, u_track, u_detection_second = matching.linear_assignment(dists, thresh=0.5) + for itracked, idet in matches: + track = r_tracked_stracks[itracked] + det = detections_second[idet] + if track.state == TrackState.Tracked: + track.update(det, self.frame_id) + activated_starcks.append(track) + else: + track.re_activate(det, self.frame_id, new_id=False) + refind_stracks.append(track) + + for it in u_track: + track = r_tracked_stracks[it] + if not track.state == TrackState.Lost: + track.mark_lost() + lost_stracks.append(track) + + '''Deal with unconfirmed tracks, usually tracks with only one beginning frame''' + detections = [detections[i] for i in u_detection] + dists = matching.iou_distance(unconfirmed, detections) + if not self.args.mot20: + dists = matching.fuse_score(dists, detections) + dists = correct_different_classes(dists, unconfirmed, detections) + matches, u_unconfirmed, u_detection = matching.linear_assignment(dists, thresh=0.7) + for itracked, idet in matches: + unconfirmed[itracked].update(detections[idet], self.frame_id) + activated_starcks.append(unconfirmed[itracked]) + for it in u_unconfirmed: + track = unconfirmed[it] + track.mark_removed() + removed_stracks.append(track) + + """ Step 4: Init new stracks""" + for inew in u_detection: + track = detections[inew] + if track.score < self.det_thresh: + continue + track.activate(self.kalman_filter, self.frame_id) + activated_starcks.append(track) + """ Step 5: Update state""" + for track in self.lost_stracks: + if self.frame_id - track.end_frame > self.max_time_lost: + track.mark_removed() + removed_stracks.append(track) + + # print('Ramained match {} s'.format(t4-t3)) + + self.tracked_stracks = [t for t in self.tracked_stracks if t.state == TrackState.Tracked] + self.tracked_stracks = joint_stracks(self.tracked_stracks, activated_starcks) + self.tracked_stracks = joint_stracks(self.tracked_stracks, refind_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.tracked_stracks) + self.lost_stracks.extend(lost_stracks) + self.lost_stracks = sub_stracks(self.lost_stracks, self.removed_stracks) + self.removed_stracks.extend(removed_stracks) + self.tracked_stracks, self.lost_stracks = remove_duplicate_stracks(self.tracked_stracks, self.lost_stracks) + # get scores of lost tracks + output_stracks = [track for track in self.tracked_stracks if track.is_activated] + + return output_stracks + + +def correct_different_classes(dists, st1_iter: Iterable[STrack], st2_iter: Iterable[STrack]): + for i, st1 in enumerate(st1_iter): + for j, st2 in enumerate(st2_iter): + if st1.class_id != st2.class_id: + dists[i,j] = np.inf + return dists + + +def joint_stracks(tlista, tlistb): + exists = {} + res = [] + for t in tlista: + exists[t.track_id] = 1 + res.append(t) + for t in tlistb: + tid = t.track_id + if not exists.get(tid, 0): + exists[tid] = 1 + res.append(t) + return res + + +def sub_stracks(tlista, tlistb): + stracks = {} + for t in tlista: + stracks[t.track_id] = t + for t in tlistb: + tid = t.track_id + if stracks.get(tid, 0): + del stracks[tid] + return list(stracks.values()) + + +def remove_duplicate_stracks(stracksa, stracksb): + pdist = matching.iou_distance(stracksa, stracksb) + pairs = np.where(pdist < 0.15) + dupa, dupb = list(), list() + for p, q in zip(*pairs): + timep = stracksa[p].frame_id - stracksa[p].start_frame + timeq = stracksb[q].frame_id - stracksb[q].start_frame + if timep > timeq: + dupb.append(q) + else: + dupa.append(p) + resa = [t for i, t in enumerate(stracksa) if not i in dupa] + resb = [t for i, t in enumerate(stracksb) if not i in dupb] + return resa, resb diff --git a/tracking/docker-build-context/byte_track/yolox/tracker/kalman_filter.py b/tracking/docker-build-context/byte_track/yolox/tracker/kalman_filter.py new file mode 100644 index 0000000000000000000000000000000000000000..deda8a26292b81bc6512a8f6145afabde6c16d7a --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/tracker/kalman_filter.py @@ -0,0 +1,270 @@ +# vim: expandtab:ts=4:sw=4 +import numpy as np +import scipy.linalg + + +""" +Table for the 0.95 quantile of the chi-square distribution with N degrees of +freedom (contains values for N=1, ..., 9). Taken from MATLAB/Octave's chi2inv +function and used as Mahalanobis gating threshold. +""" +chi2inv95 = { + 1: 3.8415, + 2: 5.9915, + 3: 7.8147, + 4: 9.4877, + 5: 11.070, + 6: 12.592, + 7: 14.067, + 8: 15.507, + 9: 16.919} + + +class KalmanFilter(object): + """ + A simple Kalman filter for tracking bounding boxes in image space. + + The 8-dimensional state space + + x, y, a, h, vx, vy, va, vh + + contains the bounding box center position (x, y), aspect ratio a, height h, + and their respective velocities. + + Object motion follows a constant velocity model. The bounding box location + (x, y, a, h) is taken as direct observation of the state space (linear + observation model). + + """ + + def __init__(self): + ndim, dt = 4, 1. + + # Create Kalman filter model matrices. + self._motion_mat = np.eye(2 * ndim, 2 * ndim) + for i in range(ndim): + self._motion_mat[i, ndim + i] = dt + self._update_mat = np.eye(ndim, 2 * ndim) + + # Motion and observation uncertainty are chosen relative to the current + # state estimate. These weights control the amount of uncertainty in + # the model. This is a bit hacky. + self._std_weight_position = 1. / 20 + self._std_weight_velocity = 1. / 160 + + def initiate(self, measurement): + """Create track from unassociated measurement. + + Parameters + ---------- + measurement : ndarray + Bounding box coordinates (x, y, a, h) with center position (x, y), + aspect ratio a, and height h. + + Returns + ------- + (ndarray, ndarray) + Returns the mean vector (8 dimensional) and covariance matrix (8x8 + dimensional) of the new track. Unobserved velocities are initialized + to 0 mean. + + """ + mean_pos = measurement + mean_vel = np.zeros_like(mean_pos) + mean = np.r_[mean_pos, mean_vel] + + std = [ + 2 * self._std_weight_position * measurement[3], + 2 * self._std_weight_position * measurement[3], + 1e-2, + 2 * self._std_weight_position * measurement[3], + 10 * self._std_weight_velocity * measurement[3], + 10 * self._std_weight_velocity * measurement[3], + 1e-5, + 10 * self._std_weight_velocity * measurement[3]] + covariance = np.diag(np.square(std)) + return mean, covariance + + def predict(self, mean, covariance): + """Run Kalman filter prediction step. + + Parameters + ---------- + mean : ndarray + The 8 dimensional mean vector of the object state at the previous + time step. + covariance : ndarray + The 8x8 dimensional covariance matrix of the object state at the + previous time step. + + Returns + ------- + (ndarray, ndarray) + Returns the mean vector and covariance matrix of the predicted + state. Unobserved velocities are initialized to 0 mean. + + """ + std_pos = [ + self._std_weight_position * mean[3], + self._std_weight_position * mean[3], + 1e-2, + self._std_weight_position * mean[3]] + std_vel = [ + self._std_weight_velocity * mean[3], + self._std_weight_velocity * mean[3], + 1e-5, + self._std_weight_velocity * mean[3]] + motion_cov = np.diag(np.square(np.r_[std_pos, std_vel])) + + #mean = np.dot(self._motion_mat, mean) + mean = np.dot(mean, self._motion_mat.T) + covariance = np.linalg.multi_dot(( + self._motion_mat, covariance, self._motion_mat.T)) + motion_cov + + return mean, covariance + + def project(self, mean, covariance): + """Project state distribution to measurement space. + + Parameters + ---------- + mean : ndarray + The state's mean vector (8 dimensional array). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + + Returns + ------- + (ndarray, ndarray) + Returns the projected mean and covariance matrix of the given state + estimate. + + """ + std = [ + self._std_weight_position * mean[3], + self._std_weight_position * mean[3], + 1e-1, + self._std_weight_position * mean[3]] + innovation_cov = np.diag(np.square(std)) + + mean = np.dot(self._update_mat, mean) + covariance = np.linalg.multi_dot(( + self._update_mat, covariance, self._update_mat.T)) + return mean, covariance + innovation_cov + + def multi_predict(self, mean, covariance): + """Run Kalman filter prediction step (Vectorized version). + Parameters + ---------- + mean : ndarray + The Nx8 dimensional mean matrix of the object states at the previous + time step. + covariance : ndarray + The Nx8x8 dimensional covariance matrics of the object states at the + previous time step. + Returns + ------- + (ndarray, ndarray) + Returns the mean vector and covariance matrix of the predicted + state. Unobserved velocities are initialized to 0 mean. + """ + std_pos = [ + self._std_weight_position * mean[:, 3], + self._std_weight_position * mean[:, 3], + 1e-2 * np.ones_like(mean[:, 3]), + self._std_weight_position * mean[:, 3]] + std_vel = [ + self._std_weight_velocity * mean[:, 3], + self._std_weight_velocity * mean[:, 3], + 1e-5 * np.ones_like(mean[:, 3]), + self._std_weight_velocity * mean[:, 3]] + sqr = np.square(np.r_[std_pos, std_vel]).T + + motion_cov = [] + for i in range(len(mean)): + motion_cov.append(np.diag(sqr[i])) + motion_cov = np.asarray(motion_cov) + + mean = np.dot(mean, self._motion_mat.T) + left = np.dot(self._motion_mat, covariance).transpose((1, 0, 2)) + covariance = np.dot(left, self._motion_mat.T) + motion_cov + + return mean, covariance + + def update(self, mean, covariance, measurement): + """Run Kalman filter correction step. + + Parameters + ---------- + mean : ndarray + The predicted state's mean vector (8 dimensional). + covariance : ndarray + The state's covariance matrix (8x8 dimensional). + measurement : ndarray + The 4 dimensional measurement vector (x, y, a, h), where (x, y) + is the center position, a the aspect ratio, and h the height of the + bounding box. + + Returns + ------- + (ndarray, ndarray) + Returns the measurement-corrected state distribution. + + """ + projected_mean, projected_cov = self.project(mean, covariance) + + chol_factor, lower = scipy.linalg.cho_factor( + projected_cov, lower=True, check_finite=False) + kalman_gain = scipy.linalg.cho_solve( + (chol_factor, lower), np.dot(covariance, self._update_mat.T).T, + check_finite=False).T + innovation = measurement - projected_mean + + new_mean = mean + np.dot(innovation, kalman_gain.T) + new_covariance = covariance - np.linalg.multi_dot(( + kalman_gain, projected_cov, kalman_gain.T)) + return new_mean, new_covariance + + def gating_distance(self, mean, covariance, measurements, + only_position=False, metric='maha'): + """Compute gating distance between state distribution and measurements. + A suitable distance threshold can be obtained from `chi2inv95`. If + `only_position` is False, the chi-square distribution has 4 degrees of + freedom, otherwise 2. + Parameters + ---------- + mean : ndarray + Mean vector over the state distribution (8 dimensional). + covariance : ndarray + Covariance of the state distribution (8x8 dimensional). + measurements : ndarray + An Nx4 dimensional matrix of N measurements, each in + format (x, y, a, h) where (x, y) is the bounding box center + position, a the aspect ratio, and h the height. + only_position : Optional[bool] + If True, distance computation is done with respect to the bounding + box center position only. + Returns + ------- + ndarray + Returns an array of length N, where the i-th element contains the + squared Mahalanobis distance between (mean, covariance) and + `measurements[i]`. + """ + mean, covariance = self.project(mean, covariance) + if only_position: + mean, covariance = mean[:2], covariance[:2, :2] + measurements = measurements[:, :2] + + d = measurements - mean + if metric == 'gaussian': + return np.sum(d * d, axis=1) + elif metric == 'maha': + cholesky_factor = np.linalg.cholesky(covariance) + z = scipy.linalg.solve_triangular( + cholesky_factor, d.T, lower=True, check_finite=False, + overwrite_b=True) + squared_maha = np.sum(z * z, axis=0) + return squared_maha + else: + raise ValueError('invalid distance metric') \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/yolox/tracker/matching.py b/tracking/docker-build-context/byte_track/yolox/tracker/matching.py new file mode 100644 index 0000000000000000000000000000000000000000..d36a6cf5bf758a49bd414f63f402fef3fdd2e18c --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/tracker/matching.py @@ -0,0 +1,181 @@ +import cv2 +import numpy as np +import scipy +import lap +from scipy.spatial.distance import cdist + +from cython_bbox import bbox_overlaps as bbox_ious +from yolox.tracker import kalman_filter +import time + +def merge_matches(m1, m2, shape): + O,P,Q = shape + m1 = np.asarray(m1) + m2 = np.asarray(m2) + + M1 = scipy.sparse.coo_matrix((np.ones(len(m1)), (m1[:, 0], m1[:, 1])), shape=(O, P)) + M2 = scipy.sparse.coo_matrix((np.ones(len(m2)), (m2[:, 0], m2[:, 1])), shape=(P, Q)) + + mask = M1*M2 + match = mask.nonzero() + match = list(zip(match[0], match[1])) + unmatched_O = tuple(set(range(O)) - set([i for i, j in match])) + unmatched_Q = tuple(set(range(Q)) - set([j for i, j in match])) + + return match, unmatched_O, unmatched_Q + + +def _indices_to_matches(cost_matrix, indices, thresh): + matched_cost = cost_matrix[tuple(zip(*indices))] + matched_mask = (matched_cost <= thresh) + + matches = indices[matched_mask] + unmatched_a = tuple(set(range(cost_matrix.shape[0])) - set(matches[:, 0])) + unmatched_b = tuple(set(range(cost_matrix.shape[1])) - set(matches[:, 1])) + + return matches, unmatched_a, unmatched_b + + +def linear_assignment(cost_matrix, thresh): + if cost_matrix.size == 0: + return np.empty((0, 2), dtype=int), tuple(range(cost_matrix.shape[0])), tuple(range(cost_matrix.shape[1])) + matches, unmatched_a, unmatched_b = [], [], [] + cost, x, y = lap.lapjv(cost_matrix, extend_cost=True, cost_limit=thresh) + for ix, mx in enumerate(x): + if mx >= 0: + matches.append([ix, mx]) + unmatched_a = np.where(x < 0)[0] + unmatched_b = np.where(y < 0)[0] + matches = np.asarray(matches) + return matches, unmatched_a, unmatched_b + + +def ious(atlbrs, btlbrs): + """ + Compute cost based on IoU + :type atlbrs: list[tlbr] | np.ndarray + :type atlbrs: list[tlbr] | np.ndarray + + :rtype ious np.ndarray + """ + ious = np.zeros((len(atlbrs), len(btlbrs)), dtype=np.float) + if ious.size == 0: + return ious + + ious = bbox_ious( + np.ascontiguousarray(atlbrs, dtype=np.float), + np.ascontiguousarray(btlbrs, dtype=np.float) + ) + + return ious + + +def iou_distance(atracks, btracks): + """ + Compute cost based on IoU + :type atracks: list[STrack] + :type btracks: list[STrack] + + :rtype cost_matrix np.ndarray + """ + + if (len(atracks)>0 and isinstance(atracks[0], np.ndarray)) or (len(btracks) > 0 and isinstance(btracks[0], np.ndarray)): + atlbrs = atracks + btlbrs = btracks + else: + atlbrs = [track.tlbr for track in atracks] + btlbrs = [track.tlbr for track in btracks] + _ious = ious(atlbrs, btlbrs) + cost_matrix = 1 - _ious + + return cost_matrix + +def v_iou_distance(atracks, btracks): + """ + Compute cost based on IoU + :type atracks: list[STrack] + :type btracks: list[STrack] + + :rtype cost_matrix np.ndarray + """ + + if (len(atracks)>0 and isinstance(atracks[0], np.ndarray)) or (len(btracks) > 0 and isinstance(btracks[0], np.ndarray)): + atlbrs = atracks + btlbrs = btracks + else: + atlbrs = [track.tlwh_to_tlbr(track.pred_bbox) for track in atracks] + btlbrs = [track.tlwh_to_tlbr(track.pred_bbox) for track in btracks] + _ious = ious(atlbrs, btlbrs) + cost_matrix = 1 - _ious + + return cost_matrix + +def embedding_distance(tracks, detections, metric='cosine'): + """ + :param tracks: list[STrack] + :param detections: list[BaseTrack] + :param metric: + :return: cost_matrix np.ndarray + """ + + cost_matrix = np.zeros((len(tracks), len(detections)), dtype=np.float) + if cost_matrix.size == 0: + return cost_matrix + det_features = np.asarray([track.curr_feat for track in detections], dtype=np.float) + #for i, track in enumerate(tracks): + #cost_matrix[i, :] = np.maximum(0.0, cdist(track.smooth_feat.reshape(1,-1), det_features, metric)) + track_features = np.asarray([track.smooth_feat for track in tracks], dtype=np.float) + cost_matrix = np.maximum(0.0, cdist(track_features, det_features, metric)) # Nomalized features + return cost_matrix + + +def gate_cost_matrix(kf, cost_matrix, tracks, detections, only_position=False): + if cost_matrix.size == 0: + return cost_matrix + gating_dim = 2 if only_position else 4 + gating_threshold = kalman_filter.chi2inv95[gating_dim] + measurements = np.asarray([det.to_xyah() for det in detections]) + for row, track in enumerate(tracks): + gating_distance = kf.gating_distance( + track.mean, track.covariance, measurements, only_position) + cost_matrix[row, gating_distance > gating_threshold] = np.inf + return cost_matrix + + +def fuse_motion(kf, cost_matrix, tracks, detections, only_position=False, lambda_=0.98): + if cost_matrix.size == 0: + return cost_matrix + gating_dim = 2 if only_position else 4 + gating_threshold = kalman_filter.chi2inv95[gating_dim] + measurements = np.asarray([det.to_xyah() for det in detections]) + for row, track in enumerate(tracks): + gating_distance = kf.gating_distance( + track.mean, track.covariance, measurements, only_position, metric='maha') + cost_matrix[row, gating_distance > gating_threshold] = np.inf + cost_matrix[row] = lambda_ * cost_matrix[row] + (1 - lambda_) * gating_distance + return cost_matrix + + +def fuse_iou(cost_matrix, tracks, detections): + if cost_matrix.size == 0: + return cost_matrix + reid_sim = 1 - cost_matrix + iou_dist = iou_distance(tracks, detections) + iou_sim = 1 - iou_dist + fuse_sim = reid_sim * (1 + iou_sim) / 2 + det_scores = np.array([det.score for det in detections]) + det_scores = np.expand_dims(det_scores, axis=0).repeat(cost_matrix.shape[0], axis=0) + #fuse_sim = fuse_sim * (1 + det_scores) / 2 + fuse_cost = 1 - fuse_sim + return fuse_cost + + +def fuse_score(cost_matrix, detections): + if cost_matrix.size == 0: + return cost_matrix + iou_sim = 1 - cost_matrix + det_scores = np.array([det.score for det in detections]) + det_scores = np.expand_dims(det_scores, axis=0).repeat(cost_matrix.shape[0], axis=0) + fuse_sim = iou_sim * det_scores + fuse_cost = 1 - fuse_sim + return fuse_cost \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/yolox/tracking_utils/evaluation.py b/tracking/docker-build-context/byte_track/yolox/tracking_utils/evaluation.py new file mode 100644 index 0000000000000000000000000000000000000000..6e2f063e43a529da934e897390bfa0daf8c19610 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/tracking_utils/evaluation.py @@ -0,0 +1,113 @@ +import os +import numpy as np +import copy +import motmetrics as mm +mm.lap.default_solver = 'lap' + +from yolox.tracking_utils.io import read_results, unzip_objs + + +class Evaluator(object): + + def __init__(self, data_root, seq_name, data_type): + self.data_root = data_root + self.seq_name = seq_name + self.data_type = data_type + + self.load_annotations() + self.reset_accumulator() + + def load_annotations(self): + assert self.data_type == 'mot' + + gt_filename = os.path.join(self.data_root, self.seq_name, 'gt', 'gt.txt') + self.gt_frame_dict = read_results(gt_filename, self.data_type, is_gt=True) + self.gt_ignore_frame_dict = read_results(gt_filename, self.data_type, is_ignore=True) + + def reset_accumulator(self): + self.acc = mm.MOTAccumulator(auto_id=True) + + def eval_frame(self, frame_id, trk_tlwhs, trk_ids, rtn_events=False): + # results + trk_tlwhs = np.copy(trk_tlwhs) + trk_ids = np.copy(trk_ids) + + # gts + gt_objs = self.gt_frame_dict.get(frame_id, []) + gt_tlwhs, gt_ids = unzip_objs(gt_objs)[:2] + + # ignore boxes + ignore_objs = self.gt_ignore_frame_dict.get(frame_id, []) + ignore_tlwhs = unzip_objs(ignore_objs)[0] + + # remove ignored results + keep = np.ones(len(trk_tlwhs), dtype=bool) + iou_distance = mm.distances.iou_matrix(ignore_tlwhs, trk_tlwhs, max_iou=0.5) + if len(iou_distance) > 0: + match_is, match_js = mm.lap.linear_sum_assignment(iou_distance) + match_is, match_js = map(lambda a: np.asarray(a, dtype=int), [match_is, match_js]) + match_ious = iou_distance[match_is, match_js] + + match_js = np.asarray(match_js, dtype=int) + match_js = match_js[np.logical_not(np.isnan(match_ious))] + keep[match_js] = False + trk_tlwhs = trk_tlwhs[keep] + trk_ids = trk_ids[keep] + #match_is, match_js = mm.lap.linear_sum_assignment(iou_distance) + #match_is, match_js = map(lambda a: np.asarray(a, dtype=int), [match_is, match_js]) + #match_ious = iou_distance[match_is, match_js] + + #match_js = np.asarray(match_js, dtype=int) + #match_js = match_js[np.logical_not(np.isnan(match_ious))] + #keep[match_js] = False + #trk_tlwhs = trk_tlwhs[keep] + #trk_ids = trk_ids[keep] + + # get distance matrix + iou_distance = mm.distances.iou_matrix(gt_tlwhs, trk_tlwhs, max_iou=0.5) + + # acc + self.acc.update(gt_ids, trk_ids, iou_distance) + + if rtn_events and iou_distance.size > 0 and hasattr(self.acc, 'last_mot_events'): + events = self.acc.last_mot_events # only supported by https://github.com/longcw/py-motmetrics + else: + events = None + return events + + def eval_file(self, filename): + self.reset_accumulator() + + result_frame_dict = read_results(filename, self.data_type, is_gt=False) + #frames = sorted(list(set(self.gt_frame_dict.keys()) | set(result_frame_dict.keys()))) + frames = sorted(list(set(result_frame_dict.keys()))) + for frame_id in frames: + trk_objs = result_frame_dict.get(frame_id, []) + trk_tlwhs, trk_ids = unzip_objs(trk_objs)[:2] + self.eval_frame(frame_id, trk_tlwhs, trk_ids, rtn_events=False) + + return self.acc + + @staticmethod + def get_summary(accs, names, metrics=('mota', 'num_switches', 'idp', 'idr', 'idf1', 'precision', 'recall')): + names = copy.deepcopy(names) + if metrics is None: + metrics = mm.metrics.motchallenge_metrics + metrics = copy.deepcopy(metrics) + + mh = mm.metrics.create() + summary = mh.compute_many( + accs, + metrics=metrics, + names=names, + generate_overall=True + ) + + return summary + + @staticmethod + def save_summary(summary, filename): + import pandas as pd + writer = pd.ExcelWriter(filename) + summary.to_excel(writer) + writer.save() \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/yolox/tracking_utils/io.py b/tracking/docker-build-context/byte_track/yolox/tracking_utils/io.py new file mode 100644 index 0000000000000000000000000000000000000000..5c8b053c9bc868d645b7ce0bec057879ba51f2d8 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/tracking_utils/io.py @@ -0,0 +1,116 @@ +import os +from typing import Dict +import numpy as np + + +def write_results(filename, results_dict: Dict, data_type: str): + if not filename: + return + path = os.path.dirname(filename) + if not os.path.exists(path): + os.makedirs(path) + + if data_type in ('mot', 'mcmot', 'lab'): + save_format = '{frame},{id},{x1},{y1},{w},{h},1,-1,-1,-1\n' + elif data_type == 'kitti': + save_format = '{frame} {id} pedestrian -1 -1 -10 {x1} {y1} {x2} {y2} -1 -1 -1 -1000 -1000 -1000 -10 {score}\n' + else: + raise ValueError(data_type) + + with open(filename, 'w') as f: + for frame_id, frame_data in results_dict.items(): + if data_type == 'kitti': + frame_id -= 1 + for tlwh, track_id in frame_data: + if track_id < 0: + continue + x1, y1, w, h = tlwh + x2, y2 = x1 + w, y1 + h + line = save_format.format(frame=frame_id, id=track_id, x1=x1, y1=y1, x2=x2, y2=y2, w=w, h=h, score=1.0) + f.write(line) + + +def read_results(filename, data_type: str, is_gt=False, is_ignore=False): + if data_type in ('mot', 'lab'): + read_fun = read_mot_results + else: + raise ValueError('Unknown data type: {}'.format(data_type)) + + return read_fun(filename, is_gt, is_ignore) + + +""" +labels={'ped', ... % 1 +'person_on_vhcl', ... % 2 +'car', ... % 3 +'bicycle', ... % 4 +'mbike', ... % 5 +'non_mot_vhcl', ... % 6 +'static_person', ... % 7 +'distractor', ... % 8 +'occluder', ... % 9 +'occluder_on_grnd', ... %10 +'occluder_full', ... % 11 +'reflection', ... % 12 +'crowd' ... % 13 +}; +""" + + +def read_mot_results(filename, is_gt, is_ignore): + valid_labels = {1} + ignore_labels = {2, 7, 8, 12} + results_dict = dict() + if os.path.isfile(filename): + with open(filename, 'r') as f: + for line in f.readlines(): + linelist = line.split(',') + if len(linelist) < 7: + continue + fid = int(linelist[0]) + if fid < 1: + continue + results_dict.setdefault(fid, list()) + + box_size = float(linelist[4]) * float(linelist[5]) + + if is_gt: + if 'MOT16-' in filename or 'MOT17-' in filename: + label = int(float(linelist[7])) + mark = int(float(linelist[6])) + if mark == 0 or label not in valid_labels: + continue + score = 1 + elif is_ignore: + if 'MOT16-' in filename or 'MOT17-' in filename: + label = int(float(linelist[7])) + vis_ratio = float(linelist[8]) + if label not in ignore_labels and vis_ratio >= 0: + continue + else: + continue + score = 1 + else: + score = float(linelist[6]) + + #if box_size > 7000: + #if box_size <= 7000 or box_size >= 15000: + #if box_size < 15000: + #continue + + tlwh = tuple(map(float, linelist[2:6])) + target_id = int(linelist[1]) + + results_dict[fid].append((tlwh, target_id, score)) + + return results_dict + + +def unzip_objs(objs): + if len(objs) > 0: + tlwhs, ids, scores = zip(*objs) + else: + tlwhs, ids, scores = [], [], [] + tlwhs = np.asarray(tlwhs, dtype=float).reshape(-1, 4) + + return tlwhs, ids, scores \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/yolox/tracking_utils/timer.py b/tracking/docker-build-context/byte_track/yolox/tracking_utils/timer.py new file mode 100644 index 0000000000000000000000000000000000000000..c9b15fb969bce7b31a1613a6401141dcc9cf180a --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/tracking_utils/timer.py @@ -0,0 +1,37 @@ +import time + + +class Timer(object): + """A simple timer.""" + def __init__(self): + self.total_time = 0. + self.calls = 0 + self.start_time = 0. + self.diff = 0. + self.average_time = 0. + + self.duration = 0. + + def tic(self): + # using time.time instead of time.clock because time time.clock + # does not normalize for multithreading + self.start_time = time.time() + + def toc(self, average=True): + self.diff = time.time() - self.start_time + self.total_time += self.diff + self.calls += 1 + self.average_time = self.total_time / self.calls + if average: + self.duration = self.average_time + else: + self.duration = self.diff + return self.duration + + def clear(self): + self.total_time = 0. + self.calls = 0 + self.start_time = 0. + self.diff = 0. + self.average_time = 0. + self.duration = 0. \ No newline at end of file diff --git a/tracking/docker-build-context/byte_track/yolox/utils/__init__.py b/tracking/docker-build-context/byte_track/yolox/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a268c1a4538ce568c8f9ef1c0d10511fdac34be1 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/utils/__init__.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +from .allreduce_norm import * +from .boxes import * +from .checkpoint import load_ckpt, save_checkpoint +from .demo_utils import * +from .dist import * +from .ema import ModelEMA +from .logger import setup_logger +from .lr_scheduler import LRScheduler +from .metric import * +from .model_utils import * +from .setup_env import * +from .visualize import * diff --git a/tracking/docker-build-context/byte_track/yolox/utils/allreduce_norm.py b/tracking/docker-build-context/byte_track/yolox/utils/allreduce_norm.py new file mode 100644 index 0000000000000000000000000000000000000000..d9b51e2608e3ae3b82355cfc4593edb96213b520 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/utils/allreduce_norm.py @@ -0,0 +1,103 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +import torch +from torch import distributed as dist +from torch import nn + +import pickle +from collections import OrderedDict + +from .dist import _get_global_gloo_group, get_world_size + +ASYNC_NORM = ( + nn.BatchNorm1d, + nn.BatchNorm2d, + nn.BatchNorm3d, + nn.InstanceNorm1d, + nn.InstanceNorm2d, + nn.InstanceNorm3d, +) + +__all__ = [ + "get_async_norm_states", + "pyobj2tensor", + "tensor2pyobj", + "all_reduce", + "all_reduce_norm", +] + + +def get_async_norm_states(module): + async_norm_states = OrderedDict() + for name, child in module.named_modules(): + if isinstance(child, ASYNC_NORM): + for k, v in child.state_dict().items(): + async_norm_states[".".join([name, k])] = v + return async_norm_states + + +def pyobj2tensor(pyobj, device="cuda"): + """serialize picklable python object to tensor""" + storage = torch.ByteStorage.from_buffer(pickle.dumps(pyobj)) + return torch.ByteTensor(storage).to(device=device) + + +def tensor2pyobj(tensor): + """deserialize tensor to picklable python object""" + return pickle.loads(tensor.cpu().numpy().tobytes()) + + +def _get_reduce_op(op_name): + return { + "sum": dist.ReduceOp.SUM, + "mean": dist.ReduceOp.SUM, + }[op_name.lower()] + + +def all_reduce(py_dict, op="sum", group=None): + """ + Apply all reduce function for python dict object. + NOTE: make sure that every py_dict has the same keys and values are in the same shape. + + Args: + py_dict (dict): dict to apply all reduce op. + op (str): operator, could be "sum" or "mean". + """ + world_size = get_world_size() + if world_size == 1: + return py_dict + if group is None: + group = _get_global_gloo_group() + if dist.get_world_size(group) == 1: + return py_dict + + # all reduce logic across different devices. + py_key = list(py_dict.keys()) + py_key_tensor = pyobj2tensor(py_key) + dist.broadcast(py_key_tensor, src=0) + py_key = tensor2pyobj(py_key_tensor) + + tensor_shapes = [py_dict[k].shape for k in py_key] + tensor_numels = [py_dict[k].numel() for k in py_key] + + flatten_tensor = torch.cat([py_dict[k].flatten() for k in py_key]) + dist.all_reduce(flatten_tensor, op=_get_reduce_op(op)) + if op == "mean": + flatten_tensor /= world_size + + split_tensors = [ + x.reshape(shape) + for x, shape in zip(torch.split(flatten_tensor, tensor_numels), tensor_shapes) + ] + return OrderedDict({k: v for k, v in zip(py_key, split_tensors)}) + + +def all_reduce_norm(module): + """ + All reduce norm statistics in different devices. + """ + states = get_async_norm_states(module) + states = all_reduce(states, op="mean") + module.load_state_dict(states, strict=False) diff --git a/tracking/docker-build-context/byte_track/yolox/utils/boxes.py b/tracking/docker-build-context/byte_track/yolox/utils/boxes.py new file mode 100644 index 0000000000000000000000000000000000000000..ac262b9608f85151e4bbeac3c7b02779dc63de75 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/utils/boxes.py @@ -0,0 +1,133 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +import numpy as np + +import torch +import torchvision +import torch.nn.functional as F + +__all__ = [ + "filter_box", + "postprocess", + "bboxes_iou", + "matrix_iou", + "adjust_box_anns", + "xyxy2xywh", + "xyxy2cxcywh", +] + + +def filter_box(output, scale_range): + """ + output: (N, 5+class) shape + """ + min_scale, max_scale = scale_range + w = output[:, 2] - output[:, 0] + h = output[:, 3] - output[:, 1] + keep = (w * h > min_scale * min_scale) & (w * h < max_scale * max_scale) + return output[keep] + + +def postprocess(prediction, num_classes, conf_thre=0.7, nms_thre=0.45): + box_corner = prediction.new(prediction.shape) + box_corner[:, :, 0] = prediction[:, :, 0] - prediction[:, :, 2] / 2 + box_corner[:, :, 1] = prediction[:, :, 1] - prediction[:, :, 3] / 2 + box_corner[:, :, 2] = prediction[:, :, 0] + prediction[:, :, 2] / 2 + box_corner[:, :, 3] = prediction[:, :, 1] + prediction[:, :, 3] / 2 + prediction[:, :, :4] = box_corner[:, :, :4] + + output = [None for _ in range(len(prediction))] + for i, image_pred in enumerate(prediction): + + # If none are remaining => process next image + if not image_pred.size(0): + continue + # Get score and class with highest confidence + class_conf, class_pred = torch.max( + image_pred[:, 5 : 5 + num_classes], 1, keepdim=True + ) + + conf_mask = (image_pred[:, 4] * class_conf.squeeze() >= conf_thre).squeeze() + # _, conf_mask = torch.topk((image_pred[:, 4] * class_conf.squeeze()), 1000) + # Detections ordered as (x1, y1, x2, y2, obj_conf, class_conf, class_pred) + detections = torch.cat((image_pred[:, :5], class_conf, class_pred.float()), 1) + detections = detections[conf_mask] + if not detections.size(0): + continue + + nms_out_index = torchvision.ops.batched_nms( + detections[:, :4], + detections[:, 4] * detections[:, 5], + detections[:, 6], + nms_thre, + ) + detections = detections[nms_out_index] + if output[i] is None: + output[i] = detections + else: + output[i] = torch.cat((output[i], detections)) + + return output + + +def bboxes_iou(bboxes_a, bboxes_b, xyxy=True): + if bboxes_a.shape[1] != 4 or bboxes_b.shape[1] != 4: + raise IndexError + + if xyxy: + tl = torch.max(bboxes_a[:, None, :2], bboxes_b[:, :2]) + br = torch.min(bboxes_a[:, None, 2:], bboxes_b[:, 2:]) + area_a = torch.prod(bboxes_a[:, 2:] - bboxes_a[:, :2], 1) + area_b = torch.prod(bboxes_b[:, 2:] - bboxes_b[:, :2], 1) + else: + tl = torch.max( + (bboxes_a[:, None, :2] - bboxes_a[:, None, 2:] / 2), + (bboxes_b[:, :2] - bboxes_b[:, 2:] / 2), + ) + br = torch.min( + (bboxes_a[:, None, :2] + bboxes_a[:, None, 2:] / 2), + (bboxes_b[:, :2] + bboxes_b[:, 2:] / 2), + ) + + area_a = torch.prod(bboxes_a[:, 2:], 1) + area_b = torch.prod(bboxes_b[:, 2:], 1) + en = (tl < br).type(tl.type()).prod(dim=2) + area_i = torch.prod(br - tl, 2) * en # * ((tl < br).all()) + return area_i / (area_a[:, None] + area_b - area_i) + + +def matrix_iou(a, b): + """ + return iou of a and b, numpy version for data augenmentation + """ + lt = np.maximum(a[:, np.newaxis, :2], b[:, :2]) + rb = np.minimum(a[:, np.newaxis, 2:], b[:, 2:]) + + area_i = np.prod(rb - lt, axis=2) * (lt < rb).all(axis=2) + area_a = np.prod(a[:, 2:] - a[:, :2], axis=1) + area_b = np.prod(b[:, 2:] - b[:, :2], axis=1) + return area_i / (area_a[:, np.newaxis] + area_b - area_i + 1e-12) + + +def adjust_box_anns(bbox, scale_ratio, padw, padh, w_max, h_max): + #bbox[:, 0::2] = np.clip(bbox[:, 0::2] * scale_ratio + padw, 0, w_max) + #bbox[:, 1::2] = np.clip(bbox[:, 1::2] * scale_ratio + padh, 0, h_max) + bbox[:, 0::2] = bbox[:, 0::2] * scale_ratio + padw + bbox[:, 1::2] = bbox[:, 1::2] * scale_ratio + padh + return bbox + + +def xyxy2xywh(bboxes): + bboxes[:, 2] = bboxes[:, 2] - bboxes[:, 0] + bboxes[:, 3] = bboxes[:, 3] - bboxes[:, 1] + return bboxes + + +def xyxy2cxcywh(bboxes): + bboxes[:, 2] = bboxes[:, 2] - bboxes[:, 0] + bboxes[:, 3] = bboxes[:, 3] - bboxes[:, 1] + bboxes[:, 0] = bboxes[:, 0] + bboxes[:, 2] * 0.5 + bboxes[:, 1] = bboxes[:, 1] + bboxes[:, 3] * 0.5 + return bboxes diff --git a/tracking/docker-build-context/byte_track/yolox/utils/checkpoint.py b/tracking/docker-build-context/byte_track/yolox/utils/checkpoint.py new file mode 100644 index 0000000000000000000000000000000000000000..55903b4695b1926f76ced732797893702cf6387c --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/utils/checkpoint.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. +from loguru import logger + +import torch + +import os +import shutil + + +def load_ckpt(model, ckpt): + model_state_dict = model.state_dict() + load_dict = {} + for key_model, v in model_state_dict.items(): + if key_model not in ckpt: + logger.warning( + "{} is not in the ckpt. Please double check and see if this is desired.".format( + key_model + ) + ) + continue + v_ckpt = ckpt[key_model] + if v.shape != v_ckpt.shape: + logger.warning( + "Shape of {} in checkpoint is {}, while shape of {} in model is {}.".format( + key_model, v_ckpt.shape, key_model, v.shape + ) + ) + continue + load_dict[key_model] = v_ckpt + + model.load_state_dict(load_dict, strict=False) + return model + + +def save_checkpoint(state, is_best, save_dir, model_name=""): + if not os.path.exists(save_dir): + os.makedirs(save_dir) + filename = os.path.join(save_dir, model_name + "_ckpt.pth.tar") + torch.save(state, filename) + if is_best: + best_filename = os.path.join(save_dir, "best_ckpt.pth.tar") + shutil.copyfile(filename, best_filename) diff --git a/tracking/docker-build-context/byte_track/yolox/utils/demo_utils.py b/tracking/docker-build-context/byte_track/yolox/utils/demo_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..093443cd568a2b0421fa707eb8fda97ec154b142 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/utils/demo_utils.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +import numpy as np + +import os + +__all__ = ["mkdir", "nms", "multiclass_nms", "demo_postprocess"] + + +def mkdir(path): + if not os.path.exists(path): + os.makedirs(path) + + +def nms(boxes, scores, nms_thr): + """Single class NMS implemented in Numpy.""" + x1 = boxes[:, 0] + y1 = boxes[:, 1] + x2 = boxes[:, 2] + y2 = boxes[:, 3] + + areas = (x2 - x1 + 1) * (y2 - y1 + 1) + order = scores.argsort()[::-1] + + keep = [] + while order.size > 0: + i = order[0] + keep.append(i) + xx1 = np.maximum(x1[i], x1[order[1:]]) + yy1 = np.maximum(y1[i], y1[order[1:]]) + xx2 = np.minimum(x2[i], x2[order[1:]]) + yy2 = np.minimum(y2[i], y2[order[1:]]) + + w = np.maximum(0.0, xx2 - xx1 + 1) + h = np.maximum(0.0, yy2 - yy1 + 1) + inter = w * h + ovr = inter / (areas[i] + areas[order[1:]] - inter) + + inds = np.where(ovr <= nms_thr)[0] + order = order[inds + 1] + + return keep + + +def multiclass_nms(boxes, scores, nms_thr, score_thr): + """Multiclass NMS implemented in Numpy""" + final_dets = [] + num_classes = scores.shape[1] + for cls_ind in range(num_classes): + cls_scores = scores[:, cls_ind] + valid_score_mask = cls_scores > score_thr + if valid_score_mask.sum() == 0: + continue + else: + valid_scores = cls_scores[valid_score_mask] + valid_boxes = boxes[valid_score_mask] + keep = nms(valid_boxes, valid_scores, nms_thr) + if len(keep) > 0: + cls_inds = np.ones((len(keep), 1)) * cls_ind + dets = np.concatenate( + [valid_boxes[keep], valid_scores[keep, None], cls_inds], 1 + ) + final_dets.append(dets) + if len(final_dets) == 0: + return None + return np.concatenate(final_dets, 0) + + +def demo_postprocess(outputs, img_size, p6=False): + + grids = [] + expanded_strides = [] + + if not p6: + strides = [8, 16, 32] + else: + strides = [8, 16, 32, 64] + + hsizes = [img_size[0] // stride for stride in strides] + wsizes = [img_size[1] // stride for stride in strides] + + for hsize, wsize, stride in zip(hsizes, wsizes, strides): + xv, yv = np.meshgrid(np.arange(wsize), np.arange(hsize)) + grid = np.stack((xv, yv), 2).reshape(1, -1, 2) + grids.append(grid) + shape = grid.shape[:2] + expanded_strides.append(np.full((*shape, 1), stride)) + + grids = np.concatenate(grids, 1) + expanded_strides = np.concatenate(expanded_strides, 1) + outputs[..., :2] = (outputs[..., :2] + grids) * expanded_strides + outputs[..., 2:4] = np.exp(outputs[..., 2:4]) * expanded_strides + + return outputs diff --git a/tracking/docker-build-context/byte_track/yolox/utils/dist.py b/tracking/docker-build-context/byte_track/yolox/utils/dist.py new file mode 100644 index 0000000000000000000000000000000000000000..691c30690a5b4237cab23b9547cb106a1bd31dd7 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/utils/dist.py @@ -0,0 +1,255 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# This file mainly comes from +# https://github.com/facebookresearch/detectron2/blob/master/detectron2/utils/comm.py +# Copyright (c) Facebook, Inc. and its affiliates. +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. +""" +This file contains primitives for multi-gpu communication. +This is useful when doing distributed training. +""" + +import numpy as np + +import torch +from torch import distributed as dist + +import functools +import logging +import pickle +import time + +__all__ = [ + "is_main_process", + "synchronize", + "get_world_size", + "get_rank", + "get_local_rank", + "get_local_size", + "time_synchronized", + "gather", + "all_gather", +] + +_LOCAL_PROCESS_GROUP = None + + +def synchronize(): + """ + Helper function to synchronize (barrier) among all processes when using distributed training + """ + if not dist.is_available(): + return + if not dist.is_initialized(): + return + world_size = dist.get_world_size() + if world_size == 1: + return + dist.barrier() + + +def get_world_size() -> int: + if not dist.is_available(): + return 1 + if not dist.is_initialized(): + return 1 + return dist.get_world_size() + + +def get_rank() -> int: + if not dist.is_available(): + return 0 + if not dist.is_initialized(): + return 0 + return dist.get_rank() + + +def get_local_rank() -> int: + """ + Returns: + The rank of the current process within the local (per-machine) process group. + """ + if not dist.is_available(): + return 0 + if not dist.is_initialized(): + return 0 + assert _LOCAL_PROCESS_GROUP is not None + return dist.get_rank(group=_LOCAL_PROCESS_GROUP) + + +def get_local_size() -> int: + """ + Returns: + The size of the per-machine process group, i.e. the number of processes per machine. + """ + if not dist.is_available(): + return 1 + if not dist.is_initialized(): + return 1 + return dist.get_world_size(group=_LOCAL_PROCESS_GROUP) + + +def is_main_process() -> bool: + return get_rank() == 0 + + +@functools.lru_cache() +def _get_global_gloo_group(): + """ + Return a process group based on gloo backend, containing all the ranks + The result is cached. + """ + if dist.get_backend() == "nccl": + return dist.new_group(backend="gloo") + else: + return dist.group.WORLD + + +def _serialize_to_tensor(data, group): + backend = dist.get_backend(group) + assert backend in ["gloo", "nccl"] + device = torch.device("cpu" if backend == "gloo" else "cuda") + + buffer = pickle.dumps(data) + if len(buffer) > 1024 ** 3: + logger = logging.getLogger(__name__) + logger.warning( + "Rank {} trying to all-gather {:.2f} GB of data on device {}".format( + get_rank(), len(buffer) / (1024 ** 3), device + ) + ) + storage = torch.ByteStorage.from_buffer(buffer) + tensor = torch.ByteTensor(storage).to(device=device) + return tensor + + +def _pad_to_largest_tensor(tensor, group): + """ + Returns: + list[int]: size of the tensor, on each rank + Tensor: padded tensor that has the max size + """ + world_size = dist.get_world_size(group=group) + assert ( + world_size >= 1 + ), "comm.gather/all_gather must be called from ranks within the given group!" + local_size = torch.tensor([tensor.numel()], dtype=torch.int64, device=tensor.device) + size_list = [ + torch.zeros([1], dtype=torch.int64, device=tensor.device) + for _ in range(world_size) + ] + dist.all_gather(size_list, local_size, group=group) + size_list = [int(size.item()) for size in size_list] + + max_size = max(size_list) + + # we pad the tensor because torch all_gather does not support + # gathering tensors of different shapes + if local_size != max_size: + padding = torch.zeros( + (max_size - local_size,), dtype=torch.uint8, device=tensor.device + ) + tensor = torch.cat((tensor, padding), dim=0) + return size_list, tensor + + +def all_gather(data, group=None): + """ + Run all_gather on arbitrary picklable data (not necessarily tensors). + + Args: + data: any picklable object + group: a torch process group. By default, will use a group which + contains all ranks on gloo backend. + Returns: + list[data]: list of data gathered from each rank + """ + if get_world_size() == 1: + return [data] + if group is None: + group = _get_global_gloo_group() + if dist.get_world_size(group) == 1: + return [data] + + tensor = _serialize_to_tensor(data, group) + + size_list, tensor = _pad_to_largest_tensor(tensor, group) + max_size = max(size_list) + + # receiving Tensor from all ranks + tensor_list = [ + torch.empty((max_size,), dtype=torch.uint8, device=tensor.device) + for _ in size_list + ] + dist.all_gather(tensor_list, tensor, group=group) + + data_list = [] + for size, tensor in zip(size_list, tensor_list): + buffer = tensor.cpu().numpy().tobytes()[:size] + data_list.append(pickle.loads(buffer)) + + return data_list + + +def gather(data, dst=0, group=None): + """ + Run gather on arbitrary picklable data (not necessarily tensors). + + Args: + data: any picklable object + dst (int): destination rank + group: a torch process group. By default, will use a group which + contains all ranks on gloo backend. + + Returns: + list[data]: on dst, a list of data gathered from each rank. Otherwise, + an empty list. + """ + if get_world_size() == 1: + return [data] + if group is None: + group = _get_global_gloo_group() + if dist.get_world_size(group=group) == 1: + return [data] + rank = dist.get_rank(group=group) + + tensor = _serialize_to_tensor(data, group) + size_list, tensor = _pad_to_largest_tensor(tensor, group) + + # receiving Tensor from all ranks + if rank == dst: + max_size = max(size_list) + tensor_list = [ + torch.empty((max_size,), dtype=torch.uint8, device=tensor.device) + for _ in size_list + ] + dist.gather(tensor, tensor_list, dst=dst, group=group) + + data_list = [] + for size, tensor in zip(size_list, tensor_list): + buffer = tensor.cpu().numpy().tobytes()[:size] + data_list.append(pickle.loads(buffer)) + return data_list + else: + dist.gather(tensor, [], dst=dst, group=group) + return [] + + +def shared_random_seed(): + """ + Returns: + int: a random number that is the same across all workers. + If workers need a shared RNG, they can use this shared seed to + create one. + All workers must call this function, otherwise it will deadlock. + """ + ints = np.random.randint(2 ** 31) + all_ints = all_gather(ints) + return all_ints[0] + + +def time_synchronized(): + """pytorch-accurate time""" + if torch.cuda.is_available(): + torch.cuda.synchronize() + return time.time() diff --git a/tracking/docker-build-context/byte_track/yolox/utils/ema.py b/tracking/docker-build-context/byte_track/yolox/utils/ema.py new file mode 100644 index 0000000000000000000000000000000000000000..e0d09baf32e590aba97fc2b5aabf41a40549e55d --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/utils/ema.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. +import torch +import torch.nn as nn + +import math +from copy import deepcopy + + +def is_parallel(model): + """check if model is in parallel mode.""" + + parallel_type = ( + nn.parallel.DataParallel, + nn.parallel.DistributedDataParallel, + ) + return isinstance(model, parallel_type) + + +def copy_attr(a, b, include=(), exclude=()): + # Copy attributes from b to a, options to only include [...] and to exclude [...] + for k, v in b.__dict__.items(): + if (len(include) and k not in include) or k.startswith("_") or k in exclude: + continue + else: + setattr(a, k, v) + + +class ModelEMA: + """ + Model Exponential Moving Average from https://github.com/rwightman/pytorch-image-models + Keep a moving average of everything in the model state_dict (parameters and buffers). + This is intended to allow functionality like + https://www.tensorflow.org/api_docs/python/tf/train/ExponentialMovingAverage + A smoothed version of the weights is necessary for some training schemes to perform well. + This class is sensitive where it is initialized in the sequence of model init, + GPU assignment and distributed training wrappers. + """ + + def __init__(self, model, decay=0.9999, updates=0): + """ + Args: + model (nn.Module): model to apply EMA. + decay (float): ema decay reate. + updates (int): counter of EMA updates. + """ + # Create EMA(FP32) + self.ema = deepcopy(model.module if is_parallel(model) else model).eval() + self.updates = updates + # decay exponential ramp (to help early epochs) + self.decay = lambda x: decay * (1 - math.exp(-x / 2000)) + for p in self.ema.parameters(): + p.requires_grad_(False) + + def update(self, model): + # Update EMA parameters + with torch.no_grad(): + self.updates += 1 + d = self.decay(self.updates) + + msd = ( + model.module.state_dict() if is_parallel(model) else model.state_dict() + ) # model state_dict + for k, v in self.ema.state_dict().items(): + if v.dtype.is_floating_point: + v *= d + v += (1.0 - d) * msd[k].detach() + + def update_attr(self, model, include=(), exclude=("process_group", "reducer")): + # Update EMA attributes + copy_attr(self.ema, model, include, exclude) diff --git a/tracking/docker-build-context/byte_track/yolox/utils/logger.py b/tracking/docker-build-context/byte_track/yolox/utils/logger.py new file mode 100644 index 0000000000000000000000000000000000000000..4bd51d9ec6569c452b34c1cf60ff03044842c2ee --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/utils/logger.py @@ -0,0 +1,96 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +from loguru import logger + +import inspect +import os +import sys + + +def get_caller_name(depth=0): + """ + Args: + depth (int): Depth of caller conext, use 0 for caller depth. Default value: 0. + + Returns: + str: module name of the caller + """ + # the following logic is a little bit faster than inspect.stack() logic + frame = inspect.currentframe().f_back + for _ in range(depth): + frame = frame.f_back + + return frame.f_globals["__name__"] + + +class StreamToLoguru: + """ + stream object that redirects writes to a logger instance. + """ + + def __init__(self, level="INFO", caller_names=("apex", "pycocotools")): + """ + Args: + level(str): log level string of loguru. Default value: "INFO". + caller_names(tuple): caller names of redirected module. + Default value: (apex, pycocotools). + """ + self.level = level + self.linebuf = "" + self.caller_names = caller_names + + def write(self, buf): + full_name = get_caller_name(depth=1) + module_name = full_name.rsplit(".", maxsplit=-1)[0] + if module_name in self.caller_names: + for line in buf.rstrip().splitlines(): + # use caller level log + logger.opt(depth=2).log(self.level, line.rstrip()) + else: + sys.__stdout__.write(buf) + + def flush(self): + pass + + +def redirect_sys_output(log_level="INFO"): + redirect_logger = StreamToLoguru(log_level) + sys.stderr = redirect_logger + sys.stdout = redirect_logger + + +def setup_logger(save_dir, distributed_rank=0, filename="log.txt", mode="a"): + """setup logger for training and testing. + Args: + save_dir(str): location to save log file + distributed_rank(int): device rank when multi-gpu environment + filename (string): log save name. + mode(str): log file write mode, `append` or `override`. default is `a`. + + Return: + logger instance. + """ + loguru_format = ( + "{time:YYYY-MM-DD HH:mm:ss} | " + "{level: <8} | " + "{name}:{line} - {message}" + ) + + logger.remove() + save_file = os.path.join(save_dir, filename) + if mode == "o" and os.path.exists(save_file): + os.remove(save_file) + # only keep logger in rank0 process + if distributed_rank == 0: + logger.add( + sys.stderr, + format=loguru_format, + level="INFO", + enqueue=True, + ) + logger.add(save_file) + + # redirect stdout/stderr to loguru + redirect_sys_output("INFO") diff --git a/tracking/docker-build-context/byte_track/yolox/utils/lr_scheduler.py b/tracking/docker-build-context/byte_track/yolox/utils/lr_scheduler.py new file mode 100644 index 0000000000000000000000000000000000000000..8f85c230d26d82b667843aac82d795b3d3b7526a --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/utils/lr_scheduler.py @@ -0,0 +1,205 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +import math +from functools import partial + + +class LRScheduler: + def __init__(self, name, lr, iters_per_epoch, total_epochs, **kwargs): + """ + Supported lr schedulers: [cos, warmcos, multistep] + + Args: + lr (float): learning rate. + iters_per_peoch (int): number of iterations in one epoch. + total_epochs (int): number of epochs in training. + kwargs (dict): + - cos: None + - warmcos: [warmup_epochs, warmup_lr_start (default 1e-6)] + - multistep: [milestones (epochs), gamma (default 0.1)] + """ + + self.lr = lr + self.iters_per_epoch = iters_per_epoch + self.total_epochs = total_epochs + self.total_iters = iters_per_epoch * total_epochs + + self.__dict__.update(kwargs) + + self.lr_func = self._get_lr_func(name) + + def update_lr(self, iters): + return self.lr_func(iters) + + def _get_lr_func(self, name): + if name == "cos": # cosine lr schedule + lr_func = partial(cos_lr, self.lr, self.total_iters) + elif name == "warmcos": + warmup_total_iters = self.iters_per_epoch * self.warmup_epochs + warmup_lr_start = getattr(self, "warmup_lr_start", 1e-6) + lr_func = partial( + warm_cos_lr, + self.lr, + self.total_iters, + warmup_total_iters, + warmup_lr_start, + ) + elif name == "yoloxwarmcos": + warmup_total_iters = self.iters_per_epoch * self.warmup_epochs + no_aug_iters = self.iters_per_epoch * self.no_aug_epochs + warmup_lr_start = getattr(self, "warmup_lr_start", 0) + min_lr_ratio = getattr(self, "min_lr_ratio", 0.2) + lr_func = partial( + yolox_warm_cos_lr, + self.lr, + min_lr_ratio, + self.total_iters, + warmup_total_iters, + warmup_lr_start, + no_aug_iters, + ) + elif name == "yoloxsemiwarmcos": + warmup_lr_start = getattr(self, "warmup_lr_start", 0) + min_lr_ratio = getattr(self, "min_lr_ratio", 0.2) + warmup_total_iters = self.iters_per_epoch * self.warmup_epochs + no_aug_iters = self.iters_per_epoch * self.no_aug_epochs + normal_iters = self.iters_per_epoch * self.semi_epoch + semi_iters = self.iters_per_epoch_semi * ( + self.total_epochs - self.semi_epoch - self.no_aug_epochs + ) + lr_func = partial( + yolox_semi_warm_cos_lr, + self.lr, + min_lr_ratio, + warmup_lr_start, + self.total_iters, + normal_iters, + no_aug_iters, + warmup_total_iters, + semi_iters, + self.iters_per_epoch, + self.iters_per_epoch_semi, + ) + elif name == "multistep": # stepwise lr schedule + milestones = [ + int(self.total_iters * milestone / self.total_epochs) + for milestone in self.milestones + ] + gamma = getattr(self, "gamma", 0.1) + lr_func = partial(multistep_lr, self.lr, milestones, gamma) + else: + raise ValueError("Scheduler version {} not supported.".format(name)) + return lr_func + + +def cos_lr(lr, total_iters, iters): + """Cosine learning rate""" + lr *= 0.5 * (1.0 + math.cos(math.pi * iters / total_iters)) + return lr + + +def warm_cos_lr(lr, total_iters, warmup_total_iters, warmup_lr_start, iters): + """Cosine learning rate with warm up.""" + if iters <= warmup_total_iters: + lr = (lr - warmup_lr_start) * iters / float( + warmup_total_iters + ) + warmup_lr_start + else: + lr *= 0.5 * ( + 1.0 + + math.cos( + math.pi + * (iters - warmup_total_iters) + / (total_iters - warmup_total_iters) + ) + ) + return lr + + +def yolox_warm_cos_lr( + lr, + min_lr_ratio, + total_iters, + warmup_total_iters, + warmup_lr_start, + no_aug_iter, + iters, +): + """Cosine learning rate with warm up.""" + min_lr = lr * min_lr_ratio + if iters <= warmup_total_iters: + # lr = (lr - warmup_lr_start) * iters / float(warmup_total_iters) + warmup_lr_start + lr = (lr - warmup_lr_start) * pow( + iters / float(warmup_total_iters), 2 + ) + warmup_lr_start + elif iters >= total_iters - no_aug_iter: + lr = min_lr + else: + lr = min_lr + 0.5 * (lr - min_lr) * ( + 1.0 + + math.cos( + math.pi + * (iters - warmup_total_iters) + / (total_iters - warmup_total_iters - no_aug_iter) + ) + ) + return lr + + +def yolox_semi_warm_cos_lr( + lr, + min_lr_ratio, + warmup_lr_start, + total_iters, + normal_iters, + no_aug_iters, + warmup_total_iters, + semi_iters, + iters_per_epoch, + iters_per_epoch_semi, + iters, +): + """Cosine learning rate with warm up.""" + min_lr = lr * min_lr_ratio + if iters <= warmup_total_iters: + # lr = (lr - warmup_lr_start) * iters / float(warmup_total_iters) + warmup_lr_start + lr = (lr - warmup_lr_start) * pow( + iters / float(warmup_total_iters), 2 + ) + warmup_lr_start + elif iters >= normal_iters + semi_iters: + lr = min_lr + elif iters <= normal_iters: + lr = min_lr + 0.5 * (lr - min_lr) * ( + 1.0 + + math.cos( + math.pi + * (iters - warmup_total_iters) + / (total_iters - warmup_total_iters - no_aug_iters) + ) + ) + else: + lr = min_lr + 0.5 * (lr - min_lr) * ( + 1.0 + + math.cos( + math.pi + * ( + normal_iters + - warmup_total_iters + + (iters - normal_iters) + * iters_per_epoch + * 1.0 + / iters_per_epoch_semi + ) + / (total_iters - warmup_total_iters - no_aug_iters) + ) + ) + return lr + + +def multistep_lr(lr, milestones, gamma, iters): + """MultiStep learning rate""" + for milestone in milestones: + lr *= gamma if iters >= milestone else 1.0 + return lr diff --git a/tracking/docker-build-context/byte_track/yolox/utils/metric.py b/tracking/docker-build-context/byte_track/yolox/utils/metric.py new file mode 100644 index 0000000000000000000000000000000000000000..4840b8dd0e97d26891fb8c515b6999cf35bd9544 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/utils/metric.py @@ -0,0 +1,123 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. +import numpy as np + +import torch + +import functools +import os +import time +from collections import defaultdict, deque + +__all__ = [ + "AverageMeter", + "MeterBuffer", + "get_total_and_free_memory_in_Mb", + "occupy_mem", + "gpu_mem_usage", +] + + +def get_total_and_free_memory_in_Mb(cuda_device): + devices_info_str = os.popen( + "nvidia-smi --query-gpu=memory.total,memory.used --format=csv,nounits,noheader" + ) + devices_info = devices_info_str.read().strip().split("\n") + total, used = devices_info[int(cuda_device)].split(",") + return int(total), int(used) + + +def occupy_mem(cuda_device, mem_ratio=0.95): + """ + pre-allocate gpu memory for training to avoid memory Fragmentation. + """ + total, used = get_total_and_free_memory_in_Mb(cuda_device) + max_mem = int(total * mem_ratio) + block_mem = max_mem - used + x = torch.cuda.FloatTensor(256, 1024, block_mem) + del x + time.sleep(5) + + +def gpu_mem_usage(): + """ + Compute the GPU memory usage for the current device (MB). + """ + mem_usage_bytes = torch.cuda.max_memory_allocated() + return mem_usage_bytes / (1024 * 1024) + + +class AverageMeter: + """Track a series of values and provide access to smoothed values over a + window or the global series average. + """ + + def __init__(self, window_size=50): + self._deque = deque(maxlen=window_size) + self._total = 0.0 + self._count = 0 + + def update(self, value): + self._deque.append(value) + self._count += 1 + self._total += value + + @property + def median(self): + d = np.array(list(self._deque)) + return np.median(d) + + @property + def avg(self): + # if deque is empty, nan will be returned. + d = np.array(list(self._deque)) + return d.mean() + + @property + def global_avg(self): + return self._total / max(self._count, 1e-5) + + @property + def latest(self): + return self._deque[-1] if len(self._deque) > 0 else None + + @property + def total(self): + return self._total + + def reset(self): + self._deque.clear() + self._total = 0.0 + self._count = 0 + + def clear(self): + self._deque.clear() + + +class MeterBuffer(defaultdict): + """Computes and stores the average and current value""" + + def __init__(self, window_size=20): + factory = functools.partial(AverageMeter, window_size=window_size) + super().__init__(factory) + + def reset(self): + for v in self.values(): + v.reset() + + def get_filtered_meter(self, filter_key="time"): + return {k: v for k, v in self.items() if filter_key in k} + + def update(self, values=None, **kwargs): + if values is None: + values = {} + values.update(kwargs) + for k, v in values.items(): + if isinstance(v, torch.Tensor): + v = v.detach() + self[k].update(v) + + def clear_meters(self): + for v in self.values(): + v.clear() diff --git a/tracking/docker-build-context/byte_track/yolox/utils/model_utils.py b/tracking/docker-build-context/byte_track/yolox/utils/model_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..b4e581211e7842cfffdd4c977577e8110b8d05b5 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/utils/model_utils.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +import torch +import torch.nn as nn +from thop import profile + +from copy import deepcopy + +__all__ = [ + "fuse_conv_and_bn", + "fuse_model", + "get_model_info", + "replace_module", +] + + +def get_model_info(model, tsize): + + stride = 64 + img = torch.zeros((1, 3, stride, stride), device=next(model.parameters()).device) + flops, params = profile(deepcopy(model), inputs=(img,), verbose=False) + params /= 1e6 + flops /= 1e9 + flops *= tsize[0] * tsize[1] / stride / stride * 2 # Gflops + info = "Params: {:.2f}M, Gflops: {:.2f}".format(params, flops) + return info + + +def fuse_conv_and_bn(conv, bn): + # Fuse convolution and batchnorm layers https://tehnokv.com/posts/fusing-batchnorm-and-conv/ + fusedconv = ( + nn.Conv2d( + conv.in_channels, + conv.out_channels, + kernel_size=conv.kernel_size, + stride=conv.stride, + padding=conv.padding, + groups=conv.groups, + bias=True, + ) + .requires_grad_(False) + .to(conv.weight.device) + ) + + # prepare filters + w_conv = conv.weight.clone().view(conv.out_channels, -1) + w_bn = torch.diag(bn.weight.div(torch.sqrt(bn.eps + bn.running_var))) + fusedconv.weight.copy_(torch.mm(w_bn, w_conv).view(fusedconv.weight.shape)) + + # prepare spatial bias + b_conv = ( + torch.zeros(conv.weight.size(0), device=conv.weight.device) + if conv.bias is None + else conv.bias + ) + b_bn = bn.bias - bn.weight.mul(bn.running_mean).div( + torch.sqrt(bn.running_var + bn.eps) + ) + fusedconv.bias.copy_(torch.mm(w_bn, b_conv.reshape(-1, 1)).reshape(-1) + b_bn) + + return fusedconv + + +def fuse_model(model): + from yolox.models.network_blocks import BaseConv + + for m in model.modules(): + if type(m) is BaseConv and hasattr(m, "bn"): + m.conv = fuse_conv_and_bn(m.conv, m.bn) # update conv + delattr(m, "bn") # remove batchnorm + m.forward = m.fuseforward # update forward + return model + + +def replace_module(module, replaced_module_type, new_module_type, replace_func=None): + """ + Replace given type in module to a new type. mostly used in deploy. + + Args: + module (nn.Module): model to apply replace operation. + replaced_module_type (Type): module type to be replaced. + new_module_type (Type) + replace_func (function): python function to describe replace logic. Defalut value None. + + Returns: + model (nn.Module): module that already been replaced. + """ + + def default_replace_func(replaced_module_type, new_module_type): + return new_module_type() + + if replace_func is None: + replace_func = default_replace_func + + model = module + if isinstance(module, replaced_module_type): + model = replace_func(replaced_module_type, new_module_type) + else: # recurrsively replace + for name, child in module.named_children(): + new_child = replace_module(child, replaced_module_type, new_module_type) + if new_child is not child: # child is already replaced + model.add_module(name, new_child) + + return model diff --git a/tracking/docker-build-context/byte_track/yolox/utils/setup_env.py b/tracking/docker-build-context/byte_track/yolox/utils/setup_env.py new file mode 100644 index 0000000000000000000000000000000000000000..f282b1f6dc6f5c2d7a4e8de468e624a721bca94c --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/utils/setup_env.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +import cv2 + +import os +import subprocess + +__all__ = ["configure_nccl", "configure_module"] + + +def configure_nccl(): + """Configure multi-machine environment variables of NCCL.""" + os.environ["NCCL_LAUNCH_MODE"] = "PARALLEL" + os.environ["NCCL_IB_HCA"] = subprocess.getoutput( + "pushd /sys/class/infiniband/ > /dev/null; for i in mlx5_*; " + "do cat $i/ports/1/gid_attrs/types/* 2>/dev/null " + "| grep v >/dev/null && echo $i ; done; popd > /dev/null" + ) + os.environ["NCCL_IB_GID_INDEX"] = "3" + os.environ["NCCL_IB_TC"] = "106" + + +def configure_module(ulimit_value=8192): + """ + Configure pytorch module environment. setting of ulimit and cv2 will be set. + + Args: + ulimit_value(int): default open file number on linux. Default value: 8192. + """ + # system setting + try: + import resource + + rlimit = resource.getrlimit(resource.RLIMIT_NOFILE) + resource.setrlimit(resource.RLIMIT_NOFILE, (ulimit_value, rlimit[1])) + except Exception: + # Exception might be raised in Windows OS or rlimit reaches max limit number. + # However, set rlimit value might not be necessary. + pass + + # cv2 + # multiprocess might be harmful on performance of torch dataloader + os.environ["OPENCV_OPENCL_RUNTIME"] = "disabled" + try: + cv2.setNumThreads(0) + cv2.ocl.setUseOpenCL(False) + except Exception: + # cv2 version mismatch might rasie exceptions. + pass diff --git a/tracking/docker-build-context/byte_track/yolox/utils/visualize.py b/tracking/docker-build-context/byte_track/yolox/utils/visualize.py new file mode 100644 index 0000000000000000000000000000000000000000..1d02d474d289df7bf3a9c43a707f403c1858f950 --- /dev/null +++ b/tracking/docker-build-context/byte_track/yolox/utils/visualize.py @@ -0,0 +1,166 @@ +#!/usr/bin/env python3 +# -*- coding:utf-8 -*- +# Copyright (c) 2014-2021 Megvii Inc. All rights reserved. + +import cv2 +import numpy as np + +__all__ = ["vis"] + + +def vis(img, boxes, scores, cls_ids, conf=0.5, class_names=None): + + for i in range(len(boxes)): + box = boxes[i] + cls_id = int(cls_ids[i]) + score = scores[i] + if score < conf: + continue + x0 = int(box[0]) + y0 = int(box[1]) + x1 = int(box[2]) + y1 = int(box[3]) + + color = (_COLORS[cls_id] * 255).astype(np.uint8).tolist() + text = '{}:{:.1f}%'.format(class_names[cls_id], score * 100) + txt_color = (0, 0, 0) if np.mean(_COLORS[cls_id]) > 0.5 else (255, 255, 255) + font = cv2.FONT_HERSHEY_SIMPLEX + + txt_size = cv2.getTextSize(text, font, 0.4, 1)[0] + cv2.rectangle(img, (x0, y0), (x1, y1), color, 2) + + txt_bk_color = (_COLORS[cls_id] * 255 * 0.7).astype(np.uint8).tolist() + cv2.rectangle( + img, + (x0, y0 + 1), + (x0 + txt_size[0] + 1, y0 + int(1.5*txt_size[1])), + txt_bk_color, + -1 + ) + cv2.putText(img, text, (x0, y0 + txt_size[1]), font, 0.4, txt_color, thickness=1) + + return img + + +def get_color(idx): + idx = idx * 3 + color = ((37 * idx) % 255, (17 * idx) % 255, (29 * idx) % 255) + + return color + + +def plot_tracking(image, tlwhs, obj_ids, scores=None, frame_id=0, fps=0., ids2=None): + im = np.ascontiguousarray(np.copy(image)) + im_h, im_w = im.shape[:2] + + top_view = np.zeros([im_w, im_w, 3], dtype=np.uint8) + 255 + + #text_scale = max(1, image.shape[1] / 1600.) + #text_thickness = 2 + #line_thickness = max(1, int(image.shape[1] / 500.)) + text_scale = 2 + text_thickness = 2 + line_thickness = 3 + + radius = max(5, int(im_w/140.)) + cv2.putText(im, 'frame: %d fps: %.2f num: %d' % (frame_id, fps, len(tlwhs)), + (0, int(15 * text_scale)), cv2.FONT_HERSHEY_PLAIN, 2, (0, 0, 255), thickness=2) + + for i, tlwh in enumerate(tlwhs): + x1, y1, w, h = tlwh + intbox = tuple(map(int, (x1, y1, x1 + w, y1 + h))) + obj_id = int(obj_ids[i]) + id_text = '{}'.format(int(obj_id)) + if ids2 is not None: + id_text = id_text + ', {}'.format(int(ids2[i])) + color = get_color(abs(obj_id)) + cv2.rectangle(im, intbox[0:2], intbox[2:4], color=color, thickness=line_thickness) + cv2.putText(im, id_text, (intbox[0], intbox[1]), cv2.FONT_HERSHEY_PLAIN, text_scale, (0, 0, 255), + thickness=text_thickness) + return im + + +_COLORS = np.array( + [ + 0.000, 0.447, 0.741, + 0.850, 0.325, 0.098, + 0.929, 0.694, 0.125, + 0.494, 0.184, 0.556, + 0.466, 0.674, 0.188, + 0.301, 0.745, 0.933, + 0.635, 0.078, 0.184, + 0.300, 0.300, 0.300, + 0.600, 0.600, 0.600, + 1.000, 0.000, 0.000, + 1.000, 0.500, 0.000, + 0.749, 0.749, 0.000, + 0.000, 1.000, 0.000, + 0.000, 0.000, 1.000, + 0.667, 0.000, 1.000, + 0.333, 0.333, 0.000, + 0.333, 0.667, 0.000, + 0.333, 1.000, 0.000, + 0.667, 0.333, 0.000, + 0.667, 0.667, 0.000, + 0.667, 1.000, 0.000, + 1.000, 0.333, 0.000, + 1.000, 0.667, 0.000, + 1.000, 1.000, 0.000, + 0.000, 0.333, 0.500, + 0.000, 0.667, 0.500, + 0.000, 1.000, 0.500, + 0.333, 0.000, 0.500, + 0.333, 0.333, 0.500, + 0.333, 0.667, 0.500, + 0.333, 1.000, 0.500, + 0.667, 0.000, 0.500, + 0.667, 0.333, 0.500, + 0.667, 0.667, 0.500, + 0.667, 1.000, 0.500, + 1.000, 0.000, 0.500, + 1.000, 0.333, 0.500, + 1.000, 0.667, 0.500, + 1.000, 1.000, 0.500, + 0.000, 0.333, 1.000, + 0.000, 0.667, 1.000, + 0.000, 1.000, 1.000, + 0.333, 0.000, 1.000, + 0.333, 0.333, 1.000, + 0.333, 0.667, 1.000, + 0.333, 1.000, 1.000, + 0.667, 0.000, 1.000, + 0.667, 0.333, 1.000, + 0.667, 0.667, 1.000, + 0.667, 1.000, 1.000, + 1.000, 0.000, 1.000, + 1.000, 0.333, 1.000, + 1.000, 0.667, 1.000, + 0.333, 0.000, 0.000, + 0.500, 0.000, 0.000, + 0.667, 0.000, 0.000, + 0.833, 0.000, 0.000, + 1.000, 0.000, 0.000, + 0.000, 0.167, 0.000, + 0.000, 0.333, 0.000, + 0.000, 0.500, 0.000, + 0.000, 0.667, 0.000, + 0.000, 0.833, 0.000, + 0.000, 1.000, 0.000, + 0.000, 0.000, 0.167, + 0.000, 0.000, 0.333, + 0.000, 0.000, 0.500, + 0.000, 0.000, 0.667, + 0.000, 0.000, 0.833, + 0.000, 0.000, 1.000, + 0.000, 0.000, 0.000, + 0.143, 0.143, 0.143, + 0.286, 0.286, 0.286, + 0.429, 0.429, 0.429, + 0.571, 0.571, 0.571, + 0.714, 0.714, 0.714, + 0.857, 0.857, 0.857, + 0.000, 0.447, 0.741, + 0.314, 0.717, 0.741, + 0.50, 0.5, 0 + ] +).astype(np.float32).reshape(-1, 3) diff --git a/tracking/run.py b/tracking/run.py index 0f3930f1776936996d267360772f4e25f982eba8..de1d6b6c1b92231a7c81d877abef28cddca20546 100644 --- a/tracking/run.py +++ b/tracking/run.py @@ -1,14 +1,160 @@ import argparse +import logging +import os +from typing import Iterable +from unittest import mock +import cv2 +import numpy as np +from yolox.tracker.byte_tracker import BYTETracker, STrack + +from common import download_file, logging_levels +from common.communication.kafka_common import KafkaRequestProcessor +from common.communication.messages_pb2 import PerceptionRequest, PerceptionResponse, TrackingEntry, DetectionEntry + +CURRENT_DIR_PATH = os.path.dirname(os.path.abspath(__file__)) + + +# noinspection DuplicatedCode +def process_tracking_requests(requests: Iterable[PerceptionRequest], results: Iterable[PerceptionResponse], + tracker_factory, trackers): + for req, res in zip(requests, results): + if req.finished: + track(req.manager_id, req.video_id, tracker_factory, trackers, None, None) + else: + image = np.fromstring(req.tracking.image, np.uint8) + image = cv2.imdecode(image, cv2.IMREAD_ANYCOLOR) + logging.info('Processing image of shape %s', image.shape) + tracking_entries = track(req.manager_id, req.video_id, tracker_factory, trackers, image, + req.tracking.entries) + for tracking_entry in tracking_entries: + res.tracking.entries.append(tracking_entry) + + +def track(manager_id, video_id, tracker_factory, trackers, image, detections): + """ + Обрабатывает очередной кадр видеозаписи, сопоставляя объекты с прошлым кадром. + :param manager_id: идентификатор менеджера, передающего видепоток + :param video_id: идентификатор видеопотока + :param tracker_factory: фабрика для создания трекеров + :param trackers: хранит трекеры для видеозаписей {(manager_id, video_id): tracker} + :param image: изображение или None, если видеопоток завершился + :param detections: набор DetectionEntry + :return: набор TrackingEntry + """ + tracker_key = (manager_id, video_id) + if image is None or detections is None: + logging.info("Closing video stream %s", tracker_key) + if tracker_key in trackers.keys(): + del trackers[tracker_key] + return [] + + if tracker_key not in trackers.keys(): + trackers[tracker_key] = tracker_factory() + + detection_info = np.zeros((len(detections), 5), dtype=float) + detection_classes = np.zeros((len(detections))) + for i, detection in enumerate(detections): + detection_info[i, 0] = detection.box_top_left_x + detection_info[i, 1] = detection.box_top_left_y + detection_info[i, 2] = detection.box_bottom_right_x + detection_info[i, 3] = detection.box_bottom_right_y + detection_info[i, 4] = detection.score + detection_classes[i] = detection.class_id + + tracker: BYTETracker = trackers[tracker_key] + height_width = (image.shape[0], image.shape[1]) + tracks: Iterable[STrack] = tracker.update(detection_info, height_width, height_width, classes=detection_classes) + + result = [] + for t in tracks: + tlbr = t.tlbr + entry = TrackingEntry(id=t.track_id) + entry.detection.CopyFrom(DetectionEntry(box_top_left_x=int(tlbr[0]), box_top_left_y=int(tlbr[1]), + box_bottom_right_x=int(tlbr[2]), box_bottom_right_y=int(tlbr[3]), + class_id=int(t.class_id), score=t.score)) + result.append(entry) + return result + + +def get_tracker_factory(weights_path, fps): + return lambda: create_tracker(fps) + + +def create_tracker(fps): + args = mock.Mock() + # todo: пробовать разные параметры + args.track_thresh = 0.7 + args.track_buffer = 150 + args.mot20 = False # todo: попробовать True тоже + args.match_thresh = 0.9 + return BYTETracker(args, fps) + + +# noinspection DuplicatedCode def run(parser: argparse.ArgumentParser): + parser.add_argument('--name', default='tracking', help='instance name used as part of the client id') + parser.add_argument('--log_dir', help='logs dir path', required=True) + parser.add_argument('--logging_level', dest='logging_level', choices=logging_levels.keys(), default='INFO', + help='logging level. One of ' + str(logging_levels.keys())) + parser.add_argument('--weights_path', default='',# todo + help='path to the model weights file. Default: ' + 'PROJECT_DIR_PATH/tracking/models/bytetrack_x_mot20.tar') + parser.add_argument('--weights_url', default='', help='url to the model weights file that ' + 'should be downloaded and placed to the ' + 'weights_path if it does not already exist') + parser.add_argument('--bootstrap_servers', type=str, required=True, + help='comma separated kafka bootstrap servers. Example: kafka1.local:9092,kafka2.local:9092') + parser.add_argument('--username', type=str, default='', help='kafka SASL username or empty str') + parser.add_argument('--password', type=str, default='', help='kafka SASL password or empty str') + parser.add_argument('--topic', default='tracking-requests', help='requests topic name') + parser.add_argument('--group_id', default='tracking', help='kafka clients group id') + parser.add_argument('--result_topic_prefix', default='tracking-results-', help='prefix of the result topic names') + parser.add_argument('--batch_size', type=int, default=1, help='batch size for frames processing') + parser.add_argument('--frames_commit_latency', type=int, default=10, help='learning interval in frames') + parser.add_argument('--fps', type=int, default=1, help='fps') + parser.add_argument('--commit_period_ms', type=int, default=1000, + help='period for committing processed and delivered request offsets') args = parser.parse_args() - # todo: проблему контекста здесь можно решить осуществляя коммит не сразу, а с постоянным лагом. В этом случае - # после перебалансировки можно будет вначале обучить модель. Но если произойдет перебалансировка - проблема. - # Следует проверить, редки ли они. И, может быть, в коммите указывать метаинформацию - id, до которого - # ответы уже отправились, чтобы заново ответы не отправлять, а просто игнорировать. Можно параметр в KafkaProcessor - # сделать - величина лага коммита в id'шниках. Но при перебалансировки нужно какой-то обработчик получить. - # А еще state хранить для каждого видео... + + logging.basicConfig(format="%(asctime)s: %(levelname)s - %(message)s", + filename=os.path.join(args.log_dir, 'tracking.log'), level=logging_levels[args.logging_level]) + + weights_path = args.weights_path if args.weights_path else \ + os.path.join(CURRENT_DIR_PATH, 'models/bytetrack_x_mot20.tar') + + if os.path.exists(weights_path) and not os.path.isfile(weights_path): + raise ValueError("weights_path '{}' must denote a file or mustn't exist".format(weights_path)) + + if not os.path.exists(weights_path): + logging.info("Downloading model weights from '{}' to '{}'".format(args.weights_url, weights_path)) + download_file(args.weights_url, weights_path) + logging.info('Model weights download finished') + + logging.info("Initializing tracking model") + tracker_factory = get_tracker_factory(weights_path, args.fps) + logging.info("Model was initialized") + trackers = {} + + bootstrap_servers = args.bootstrap_servers.split(',') + logging.info('Using bootstrap servers: %s', bootstrap_servers) + + processor = KafkaRequestProcessor(name=args.name, bootstrap_servers=args.bootstrap_servers.split(','), + username=args.username, password=args.password, topic_name=args.topic, + group_id=args.group_id, result_topic_prefix=args.result_topic_prefix, + req_batch_size=args.batch_size, commit_period_ms=args.commit_period_ms, + stream_max_idle_time_ms=600_000, frames_commit_latency=args.frames_commit_latency, + request_handler=lambda req, res: process_tracking_requests(req, res, + tracker_factory, + trackers)) + + logging.info("Starting tracking service '%s'", args.name) + try: + processor.run_sync() + except KeyboardInterrupt: + pass + logging.info("Stopping tracking service") if __name__ == '__main__':